├── biometric_identification ├── f_d.mat ├── readAndPreprocessImage_ours.m ├── get_all_features.m ├── preprocessing.m ├── getfeatures.m ├── apply_preprocessing_to_all.m ├── get_fnames_ids.m ├── test.m ├── get_data_identification.m └── training_ID.m ├── gender_classification ├── temp.tiff ├── SVM_d_9.mat ├── SVM_p_2.mat ├── test │ ├── f_d.jpg │ ├── f_d.mat │ ├── f_p.jpg │ ├── f_p.mat │ ├── m_d.jpg │ ├── m_d.mat │ ├── m_p.jpg │ └── m_p.mat ├── readAndPreprocessImage_ours3.m ├── readAndPreprocessImage_ours.m ├── readAndPreprocessImage_ours2.m ├── readAndPreprocessImage_ours1.m ├── preprocessing.m ├── plotTrainingAccuracy.m ├── readAndPreprocessImage.m ├── readAndPreprocessImage_alex.m ├── apply_preprocessing_to_all.m ├── getParam.m ├── license.txt ├── getfeatures.m ├── get_all_features.m ├── demo.m ├── get_data.m ├── twoStream.m ├── getModel.m ├── training_SVM.m └── CNN_training.m ├── .github └── FUNDING.yml ├── LICENSE.md └── README.md /biometric_identification/f_d.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mahmoudnafifi/11K-Hands/HEAD/biometric_identification/f_d.mat -------------------------------------------------------------------------------- /gender_classification/temp.tiff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mahmoudnafifi/11K-Hands/HEAD/gender_classification/temp.tiff -------------------------------------------------------------------------------- /gender_classification/SVM_d_9.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mahmoudnafifi/11K-Hands/HEAD/gender_classification/SVM_d_9.mat -------------------------------------------------------------------------------- /gender_classification/SVM_p_2.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mahmoudnafifi/11K-Hands/HEAD/gender_classification/SVM_p_2.mat -------------------------------------------------------------------------------- /gender_classification/test/f_d.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mahmoudnafifi/11K-Hands/HEAD/gender_classification/test/f_d.jpg -------------------------------------------------------------------------------- /gender_classification/test/f_d.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mahmoudnafifi/11K-Hands/HEAD/gender_classification/test/f_d.mat -------------------------------------------------------------------------------- /gender_classification/test/f_p.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mahmoudnafifi/11K-Hands/HEAD/gender_classification/test/f_p.jpg -------------------------------------------------------------------------------- /gender_classification/test/f_p.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mahmoudnafifi/11K-Hands/HEAD/gender_classification/test/f_p.mat -------------------------------------------------------------------------------- /gender_classification/test/m_d.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mahmoudnafifi/11K-Hands/HEAD/gender_classification/test/m_d.jpg -------------------------------------------------------------------------------- /gender_classification/test/m_d.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mahmoudnafifi/11K-Hands/HEAD/gender_classification/test/m_d.mat -------------------------------------------------------------------------------- /gender_classification/test/m_p.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mahmoudnafifi/11K-Hands/HEAD/gender_classification/test/m_p.jpg -------------------------------------------------------------------------------- /gender_classification/test/m_p.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mahmoudnafifi/11K-Hands/HEAD/gender_classification/test/m_p.mat -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] 4 | patreon: MahmoudAfifi 5 | open_collective: # Replace with a single Open Collective username 6 | ko_fi: # Replace with a single Ko-fi username 7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel 8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry 9 | liberapay: # Replace with a single Liberapay username 10 | issuehunt: # Replace with a single IssueHunt username 11 | otechie: # Replace with a single Otechie username 12 | lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry 13 | custom: https://www.paypal.com/paypalme/MahmoudAfifi 14 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Mahmoud Afifi 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /gender_classification/readAndPreprocessImage_ours3.m: -------------------------------------------------------------------------------- 1 | %Copyright (c) 2017 Mahmoud Afifi 2 | %York University - Assiut University 3 | 4 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 5 | %Citation information is provided in the readme file (can be found in the dataset webpage). 6 | 7 | %Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software with restriction for its use for research purpose only, subject to the following conditions: 8 | 9 | %The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 10 | 11 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 12 | %Citation information is provided in the readme file (can be found in the dataset webpage). 13 | 14 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 15 | 16 | function Iout = readAndPreprocessImage_ours3(filename) 17 | 18 | Iout = imread(filename); 19 | 20 | 21 | end 22 | 23 | -------------------------------------------------------------------------------- /gender_classification/readAndPreprocessImage_ours.m: -------------------------------------------------------------------------------- 1 | %Copyright (c) 2017 Mahmoud Afifi 2 | %York University - Assiut University 3 | 4 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 5 | %Citation information is provided in the readme file (can be found in the dataset webpage). 6 | 7 | %Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software with restriction for its use for research purpose only, subject to the following conditions: 8 | 9 | %The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 10 | 11 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 12 | %Citation information is provided in the readme file (can be found in the dataset webpage). 13 | 14 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 15 | 16 | function Iout = readAndPreprocessImage_ours3(filename) 17 | 18 | I = imread(filename); 19 | Iout=preprocessing(I); 20 | 21 | end 22 | 23 | -------------------------------------------------------------------------------- /gender_classification/readAndPreprocessImage_ours2.m: -------------------------------------------------------------------------------- 1 | %Copyright (c) 2017 Mahmoud Afifi 2 | %York University - Assiut University 3 | 4 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 5 | %Citation information is provided in the readme file (can be found in the dataset webpage). 6 | 7 | %Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software with restriction for its use for research purpose only, subject to the following conditions: 8 | 9 | %The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 10 | 11 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 12 | %Citation information is provided in the readme file (can be found in the dataset webpage). 13 | 14 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 15 | 16 | function Iout = readAndPreprocessImage_ours2(filename) 17 | 18 | I = imread(filename); 19 | 20 | Iout=I(:,:,4); 21 | 22 | end -------------------------------------------------------------------------------- /biometric_identification/readAndPreprocessImage_ours.m: -------------------------------------------------------------------------------- 1 | %Copyright (c) 2017 Mahmoud Afifi 2 | %York University - Assiut University 3 | 4 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 5 | %Citation information is provided in the readme file (can be found in the dataset webpage). 6 | 7 | %Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software with restriction for its use for research purpose only, subject to the following conditions: 8 | 9 | %The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 10 | 11 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 12 | %Citation information is provided in the readme file (can be found in the dataset webpage). 13 | 14 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 15 | 16 | 17 | function Iout = readAndPreprocessImage_ours3(filename) 18 | 19 | I = imread(filename); 20 | Iout=preprocessing(I); 21 | 22 | end 23 | 24 | -------------------------------------------------------------------------------- /gender_classification/readAndPreprocessImage_ours1.m: -------------------------------------------------------------------------------- 1 | %Copyright (c) 2017 Mahmoud Afifi 2 | %York University - Assiut University 3 | 4 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 5 | %Citation information is provided in the readme file (can be found in the dataset webpage). 6 | 7 | %Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software with restriction for its use for research purpose only, subject to the following conditions: 8 | 9 | %The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 10 | 11 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 12 | %Citation information is provided in the readme file (can be found in the dataset webpage). 13 | 14 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 15 | 16 | function Iout = readAndPreprocessImage_ours1(filename) 17 | 18 | I = imread(filename); 19 | 20 | Iout=I(:,:,1:3); 21 | 22 | end -------------------------------------------------------------------------------- /biometric_identification/get_all_features.m: -------------------------------------------------------------------------------- 1 | %Copyright (c) 2017 Mahmoud Afifi 2 | %York University - Assiut University 3 | 4 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 5 | %Citation information is provided in the readme file (can be found in the dataset webpage). 6 | 7 | %Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software with restriction for its use for research purpose only, subject to the following conditions: 8 | 9 | %The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 10 | 11 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 12 | %Citation information is provided in the readme file (can be found in the dataset webpage). 13 | 14 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 15 | 16 | 17 | %get all features 18 | base='hands'; 19 | 20 | load('o_p_1.mat'); 21 | 22 | images=fullfile(base,'*.tiff'); 23 | for i=1:length(images) 24 | features=getfeatures(imread(fullfile(base,images(i).name)),net,1); 25 | save('features',fullfile(base,strcat(images(i).name(1:end-4),'.mat'))); 26 | 27 | end -------------------------------------------------------------------------------- /gender_classification/preprocessing.m: -------------------------------------------------------------------------------- 1 | %Copyright (c) 2017 Mahmoud Afifi 2 | %York University - Assiut University 3 | 4 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 5 | %Citation information is provided in the readme file (can be found in the dataset webpage). 6 | 7 | %Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software with restriction for its use for research purpose only, subject to the following conditions: 8 | 9 | %The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 10 | 11 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 12 | %Citation information is provided in the readme file (can be found in the dataset webpage). 13 | 14 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 15 | 16 | function O= preprocessing(I) 17 | 18 | I=im2double(I); 19 | low=imguidedfilter(I,'DegreeOfSmoothing',100,'NeighborhoodSize',[10,10]); 20 | glow=rgb2gray(low); 21 | high=imadjust((rgb2gray(I)+(eps/100))./(glow+(eps/100))); 22 | low=imresize(low,[224,224]); 23 | high=imresize(high,[224,224]); 24 | O=zeros(224,224,4); O(:,:,1:3)=low; O(:,:,4)=high; 25 | O = uint8(O*255); 26 | end -------------------------------------------------------------------------------- /biometric_identification/preprocessing.m: -------------------------------------------------------------------------------- 1 | %Copyright (c) 2017 Mahmoud Afifi 2 | %York University - Assiut University 3 | 4 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 5 | %Citation information is provided in the readme file (can be found in the dataset webpage). 6 | 7 | %Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software with restriction for its use for research purpose only, subject to the following conditions: 8 | 9 | %The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 10 | 11 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 12 | %Citation information is provided in the readme file (can be found in the dataset webpage). 13 | 14 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 15 | 16 | function O= preprocessing(I) 17 | 18 | I=double(I)/255; 19 | low=imguidedfilter(I,'DegreeOfSmoothing',100,'NeighborhoodSize',[10,10]); 20 | glow=rgb2gray(low); 21 | high=imadjust((rgb2gray(I)+(eps/100))./(glow+(eps/100))); 22 | low=imresize(low,[224,224]); 23 | high=imresize(high,[224,224]); 24 | O=zeros(224,224,4); O(:,:,1:3)=low; O(:,:,4)=high; 25 | O = uint8(O*255); 26 | end -------------------------------------------------------------------------------- /gender_classification/plotTrainingAccuracy.m: -------------------------------------------------------------------------------- 1 | %Copyright (c) 2017 Mahmoud Afifi 2 | %York University - Assiut University 3 | 4 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 5 | %Citation information is provided in the readme file (can be found in the dataset webpage). 6 | 7 | %Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software with restriction for its use for research purpose only, subject to the following conditions: 8 | 9 | %The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 10 | 11 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 12 | %Citation information is provided in the readme file (can be found in the dataset webpage). 13 | 14 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 15 | 16 | function plotTrainingAccuracy(info) 17 | persistent plotObj 18 | 19 | if info.State == "start" 20 | plotObj = animatedline; 21 | xlabel("Iteration") 22 | ylabel("Training Accuracy") 23 | elseif info.State == "iteration" 24 | addpoints(plotObj,info.Iteration,info.TrainingAccuracy) 25 | drawnow limitrate nocallbacks 26 | end 27 | 28 | end 29 | 30 | 31 | -------------------------------------------------------------------------------- /gender_classification/readAndPreprocessImage.m: -------------------------------------------------------------------------------- 1 | %Copyright (c) 2017 Mahmoud Afifi 2 | %York University - Assiut University 3 | 4 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 5 | %Citation information is provided in the readme file (can be found in the dataset webpage). 6 | 7 | %Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software with restriction for its use for research purpose only, subject to the following conditions: 8 | 9 | %The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 10 | 11 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 12 | %Citation information is provided in the readme file (can be found in the dataset webpage). 13 | 14 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 15 | 16 | function Iout = readAndPreprocessImage(filename) 17 | 18 | I = imread(filename); 19 | 20 | % Some images may be grayscale. Replicate the image 3 times to 21 | % create an RGB image. 22 | if ismatrix(I) 23 | I = cat(3,I,I,I); 24 | end 25 | 26 | % Resize the image as required for the CNN. 27 | Iout = imresize(I, [224 224]); 28 | 29 | end -------------------------------------------------------------------------------- /gender_classification/readAndPreprocessImage_alex.m: -------------------------------------------------------------------------------- 1 | %Copyright (c) 2017 Mahmoud Afifi 2 | %York University - Assiut University 3 | 4 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 5 | %Citation information is provided in the readme file (can be found in the dataset webpage). 6 | 7 | %Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software with restriction for its use for research purpose only, subject to the following conditions: 8 | 9 | %The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 10 | 11 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 12 | %Citation information is provided in the readme file (can be found in the dataset webpage). 13 | 14 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 15 | 16 | function Iout = readAndPreprocessImage_alex(filename) 17 | 18 | I = imread(filename); 19 | 20 | % Some images may be grayscale. Replicate the image 3 times to 21 | % create an RGB image. 22 | if ismatrix(I) 23 | I = cat(3,I,I,I); 24 | end 25 | 26 | % Resize the image as required for the CNN. 27 | Iout = imresize(I, [227 227]); 28 | 29 | 30 | end -------------------------------------------------------------------------------- /gender_classification/apply_preprocessing_to_all.m: -------------------------------------------------------------------------------- 1 | %Copyright (c) 2017 Mahmoud Afifi 2 | %York University - Assiut University 3 | 4 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 5 | %Citation information is provided in the readme file (can be found in the dataset webpage). 6 | 7 | %Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software with restriction for its use for research purpose only, subject to the following conditions: 8 | 9 | %The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 10 | 11 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 12 | %Citation information is provided in the readme file (can be found in the dataset webpage). 13 | 14 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 15 | 16 | function apply_preprocessing_to_all(f, newf) 17 | g={'male','female'}; 18 | mkdir(fullfile(pwd,newf)); 19 | for G=1:2 20 | images=dir(fullfile(f,g{G},'*.jpg')); 21 | mkdir(fullfile(pwd,newf,g{G})); 22 | for i=1:length(images) 23 | I=(imread(fullfile(f,g{G},images(i).name))); 24 | out=preprocessing(I); 25 | imwrite(out,(fullfile(newf,g{G},strcat(images(i).name(1:end-3),'tiff')))); 26 | end 27 | end -------------------------------------------------------------------------------- /gender_classification/getParam.m: -------------------------------------------------------------------------------- 1 | %Copyright (c) 2017 Mahmoud Afifi 2 | %York University - Assiut University 3 | 4 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 5 | %Citation information is provided in the readme file (can be found in the dataset webpage). 6 | 7 | %Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software with restriction for its use for research purpose only, subject to the following conditions: 8 | 9 | %The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 10 | 11 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 12 | %Citation information is provided in the readme file (can be found in the dataset webpage). 13 | 14 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 15 | 16 | function [param] = getParam 17 | 18 | 19 | param.szIn=[224 224]; 20 | param.szOut=531; %because of the output of .. lbpFeatures = extractLBPFeatures(I(:,:,4),'CellSize',[64 64]); is 531 (for consistency) 21 | param.DataAugmentation='none'; %'randcrop' | 'randfliplr' | cell array of 'randcrop' and 'randfliplr' 22 | param.Normalization='zerocenter'; %'none' 23 | param.WeightLearnRateFactor=20; 24 | param.BiasLearnRateFactor=20; 25 | param.WeightL2Factor=0.8; 26 | param.numClasses=2; %m and f 27 | end 28 | 29 | -------------------------------------------------------------------------------- /biometric_identification/getfeatures.m: -------------------------------------------------------------------------------- 1 | %Copyright (c) 2017 Mahmoud Afifi 2 | %York University - Assiut University 3 | 4 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 5 | %Citation information is provided in the readme file (can be found in the dataset webpage). 6 | 7 | %Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software with restriction for its use for research purpose only, subject to the following conditions: 8 | 9 | %The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 10 | 11 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 12 | %Citation information is provided in the readme file (can be found in the dataset webpage). 13 | 14 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 15 | 16 | function features = getfeatures(I,net,withLBP) 17 | 18 | if withLBP==1 19 | features.LBP = extractLBPFeatures(I(:,:,4),'CellSize',[64 64]); 20 | end 21 | 22 | featureLayer = 'fc9_1'; 23 | features.low = activations(net, I, featureLayer,'OutputAs', 'columns')'; 24 | featureLayer = 'fc9_2_2'; 25 | features.high = activations(net, I, featureLayer,'OutputAs', 'columns')'; 26 | featureLayer = 'avpool'; 27 | features.fusion = activations(net, I, featureLayer,'OutputAs', 'columns')'; 28 | 29 | end 30 | 31 | -------------------------------------------------------------------------------- /biometric_identification/apply_preprocessing_to_all.m: -------------------------------------------------------------------------------- 1 | %Copyright (c) 2017 Mahmoud Afifi 2 | %York University - Assiut University 3 | 4 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 5 | %Citation information is provided in the readme file (can be found in the dataset webpage). 6 | 7 | %Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software with restriction for its use for research purpose only, subject to the following conditions: 8 | 9 | %The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 10 | 11 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 12 | %Citation information is provided in the readme file (can be found in the dataset webpage). 13 | 14 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 15 | 16 | 17 | The Software is provided "as is", without warranty of any kind.function apply_preprocessing_to_all(f, newf) 18 | g={'male','female'}; 19 | mkdir(fullfile(pwd,newf)); 20 | for G=1:2 21 | images=dir(fullfile(f,g{G},'*.jpg')); 22 | mkdir(fullfile(pwd,newf,g{G})); 23 | for i=1:length(images) 24 | I=(imread(fullfile(f,g{G},images(i).name))); 25 | out=preprocessing(I); 26 | imwrite(out,(fullfile(newf,g{G},strcat(images(i).name(1:end-3),'tiff')))); 27 | end 28 | end -------------------------------------------------------------------------------- /gender_classification/license.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2018, mahmoud afifi 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are met: 6 | 7 | * Redistributions of source code must retain the above copyright notice, this 8 | list of conditions and the following disclaimer. 9 | 10 | * Redistributions in binary form must reproduce the above copyright notice, 11 | this list of conditions and the following disclaimer in the documentation 12 | and/or other materials provided with the distribution 13 | * Neither the name of York University nor the names of its 14 | contributors may be used to endorse or promote products derived from this 15 | software without specific prior written permission. 16 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 17 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 18 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 19 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE 20 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 21 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 22 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 23 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 24 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26 | -------------------------------------------------------------------------------- /gender_classification/getfeatures.m: -------------------------------------------------------------------------------- 1 | %Copyright (c) 2017 Mahmoud Afifi 2 | %York University - Assiut University 3 | 4 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 5 | %Citation information is provided in the readme file (can be found in the dataset webpage). 6 | 7 | %Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software with restriction for its use for research purpose only, subject to the following conditions: 8 | 9 | %The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 10 | 11 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 12 | %Citation information is provided in the readme file (can be found in the dataset webpage). 13 | 14 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 15 | 16 | function features = getfeatures(I,net,withLBP) 17 | 18 | I = preprocessing(I); 19 | 20 | if withLBP==1 21 | features.LBP = extractLBPFeatures(I(:,:,4),'CellSize',[64 64]); 22 | end 23 | 24 | imwrite(I,'temp.tiff'); 25 | 26 | testingImage = imageDatastore('temp.tiff'); 27 | 28 | featureLayer = 'fc9_1'; 29 | features.low = reshape ( activations(net, testingImage, featureLayer),[1,531]); 30 | featureLayer = 'fc9_2_2'; 31 | features.high = reshape(activations(net, testingImage, featureLayer),[1,531]); 32 | featureLayer = 'avpool'; 33 | features.fusion = reshape(activations(net, testingImage, featureLayer),[1,2*531]); 34 | end 35 | 36 | -------------------------------------------------------------------------------- /biometric_identification/get_fnames_ids.m: -------------------------------------------------------------------------------- 1 | %Copyright (c) 2017 Mahmoud Afifi 2 | %York University - Assiut University 3 | 4 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 5 | %Citation information is provided in the readme file (can be found in the dataset webpage). 6 | 7 | %Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software with restriction for its use for research purpose only, subject to the following conditions: 8 | 9 | %The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 10 | 11 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 12 | %Citation information is provided in the readme file (can be found in the dataset webpage). 13 | 14 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 15 | 16 | 17 | 18 | %get filenames and ids 19 | function [fnames,ids]=get_fnames_ids(fold,phase,side,subj,ext) 20 | %fold: integer from 1 to 10 21 | %phase: 'training' or 'testing' 22 | %side: 'dorsal' or 'palmar' 23 | %subj: integer 80,100, or 120 24 | %ext: 'jpg' or 'mat' (use 'mat' if you have extracted and saved all 25 | %features as .mat file in the same directory 26 | if nargin==4 27 | ext='jpg'; 28 | end 29 | base=fullfile('identification',num2str(fold),strcat(phase,'_',side),num2str(subj)); 30 | 31 | files=dir(fullfile(base,strcat('*.',ext))); 32 | fnames=cell(length(files),1); 33 | ids=cell(length(files),1); 34 | 35 | for i=1:length(files) 36 | fnames{i}=files(i).name; 37 | parts=strsplit(fnames{i},'_'); 38 | ids{i}=cell2mat(parts(1)); 39 | end 40 | end 41 | -------------------------------------------------------------------------------- /gender_classification/get_all_features.m: -------------------------------------------------------------------------------- 1 | %Copyright (c) 2017 Mahmoud Afifi 2 | %York University - Assiut University 3 | 4 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 5 | %Citation information is provided in the readme file (can be found in the dataset webpage). 6 | 7 | %Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software with restriction for its use for research purpose only, subject to the following conditions: 8 | 9 | %The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 10 | 11 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 12 | %Citation information is provided in the readme file (can be found in the dataset webpage). 13 | 14 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 15 | 16 | %get all features 17 | base='hands'; 18 | load('HandInfo.mat'); 19 | 20 | load('ours_d_1.mat'); 21 | 22 | a={HandInfo.aspectOfHand}; ind_d=find(contains(a,'dorsal')); 23 | 24 | a={HandInfo.imageName}; 25 | 26 | for i=1:length(ind_d) 27 | name=a{ind_d(i)}; 28 | features=getfeatures(imread(fullfile(base,name)),net,0); 29 | save(fullfile(base,strcat(name(1:end-4),'.mat')),'features'); 30 | end 31 | 32 | 33 | load('ours_p_7.mat'); 34 | 35 | a={HandInfo.aspectOfHand}; ind_p=find(contains(a,'palm')); 36 | 37 | a={HandInfo.imageName}; 38 | 39 | for i=1:length(ind_p) 40 | name=a{ind_p(i)}; 41 | features=getfeatures(imread(fullfile(base,name)),net,1); 42 | save(fullfile(base,strcat(name(1:end-4),'.mat')),'features'); 43 | end 44 | 45 | 46 | 47 | 48 | -------------------------------------------------------------------------------- /biometric_identification/test.m: -------------------------------------------------------------------------------- 1 | %Copyright (c) 2017 Mahmoud Afifi 2 | %York University - Assiut University 3 | 4 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 5 | %Citation information is provided in the readme file (can be found in the dataset webpage). 6 | 7 | %Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software with restriction for its use for research purpose only, subject to the following conditions: 8 | 9 | %The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 10 | 11 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 12 | %Citation information is provided in the readme file (can be found in the dataset webpage). 13 | 14 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 15 | 16 | function ID = test( image, model, classifier, with_LBP ) 17 | 18 | %image: RGB image. 19 | %model: our two-stream trained CNN model, you can download from the webpage. Note: use the proper CNN. For example, if you are working with dorsal-side hand images, use a CNN that has been trained on the dorsal side. 20 | %classifier: trained SVM classifier 21 | %with_LBP: boolean value, it should be true if the classifier was trained with the LBP features, false otherwise. 22 | 23 | if nargin==3 24 | with_LBP=false; 25 | end 26 | 27 | I=preprocessing(image); 28 | 29 | features=getfeatures(I,model,with_LBP); 30 | 31 | if with_LBP==true 32 | f=[features.LBP,features.low,features.high,features.fusion]; 33 | [~,scores1] = predict(classifier.low, f(:,532:1062)); 34 | [~,scores2] = predict(classifier.high, f(:,1063:1594)); 35 | [~,scores3] = predict(classifier.fusion, f(:,1595:end)); 36 | [~,scores4] = predict(classifier.lbp, f(:,1:531)); 37 | scores=(((scores1+scores2)/2)+scores3)/3+scores4; 38 | inds=max(scores,[],2)==scores; 39 | response=(find(inds(1,:))); 40 | ID=classifier.high.ClassNames{response}; 41 | 42 | else 43 | f=[features.low,features.high,features.fusion]; 44 | ID = predict(classifier.all,f); 45 | end 46 | 47 | 48 | end 49 | 50 | -------------------------------------------------------------------------------- /gender_classification/demo.m: -------------------------------------------------------------------------------- 1 | %Copyright (c) 2017 Mahmoud Afifi 2 | %York University - Assiut University 3 | 4 | % Project page: https://sites.google.com/view/11khands 5 | 6 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 7 | %Citation information is provided in the readme file (can be found in the dataset webpage). 8 | 9 | %Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software with restriction for its use for research purpose only, subject to the following conditions: 10 | 11 | %The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 12 | 13 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 14 | %Citation information is provided in the readme file (can be found in the dataset webpage). 15 | 16 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 17 | 18 | %Please download ours_p_7.mat and ours_d_1.mat from the dataset webpage. 19 | 20 | 21 | inputImg='test\m_p.jpg'; 22 | 23 | CNNfile='ours_p_7.mat'; load(CNNfile); %loaded as net 24 | 25 | I=imread(inputImg); 26 | 27 | I=preprocessing(I); 28 | 29 | imwrite(I,'temp.tiff'); 30 | 31 | testingImage = imageDatastore('temp.tiff'); 32 | [YPred] = classify(net,testingImage); 33 | fprintf('CNN result: %s\n', char(YPred)); 34 | 35 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 36 | SVM_='SVM_p_2.mat'; load(SVM_); %loaded as classifier 37 | 38 | I=imread(inputImg); 39 | 40 | features = getfeatures(I,net,0); 41 | f=[features.low,features.high,features.fusion]; 42 | predictedLabel = predict(classifier, f); 43 | if predictedLabel==0 44 | YPred = 'Female'; 45 | else 46 | YPred ='Male'; 47 | end 48 | fprintf('CNN + SVM result: %s\n', YPred); 49 | 50 | 51 | 52 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 53 | 54 | inputImg='test\f_d.jpg'; 55 | 56 | CNNfile='ours_d_1.mat'; load(CNNfile); %loaded as net 57 | 58 | I=imread(inputImg); 59 | 60 | I=preprocessing(I); 61 | 62 | imwrite(I,'temp.tiff'); 63 | 64 | testingImage = imageDatastore('temp.tiff'); 65 | 66 | [YPred] = classify(net,testingImage); 67 | fprintf('CNN result: %s\n', char(YPred)); 68 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 69 | SVM_='SVM_d_9.mat'; load(SVM_); %loaded as classifier 70 | 71 | I=imread(inputImg); 72 | 73 | features = getfeatures(I,net,0); 74 | f=[features.low,features.high,features.fusion]; 75 | predictedLabel = predict(classifier, f); 76 | if predictedLabel==0 77 | YPred = 'Female'; 78 | else 79 | YPred = 'Male'; 80 | end 81 | fprintf('CNN + SVM result: %s\n', YPred); -------------------------------------------------------------------------------- /gender_classification/get_data.m: -------------------------------------------------------------------------------- 1 | %Copyright (c) 2017 Mahmoud Afifi 2 | %York University - Assiut University 3 | 4 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 5 | %Citation information is provided in the readme file (can be found in the dataset webpage). 6 | 7 | %Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software with restriction for its use for research purpose only, subject to the following conditions: 8 | 9 | %The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 10 | 11 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 12 | %Citation information is provided in the readme file (can be found in the dataset webpage). 13 | 14 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 15 | 16 | %get gender classification training and testing data 17 | 18 | %please locate directories of .txt files in the same directory of this code. 19 | 20 | %all images should be located in the following directory (please change the name accordingly). 21 | 22 | 23 | 24 | baseDir='hands'; %you can get it from the webpage of the dataset 25 | 26 | mkdir('gender'); 27 | 28 | for i=[1,2,3,4,5,6,7,8,9,10] 29 | 30 | 31 | mkdir(fullfile('gender',num2str(i))); 32 | 33 | sides={'p','d'}; 34 | sets={'training','testing'}; 35 | for S=1:2 36 | 37 | newDir_=sprintf('gender\\%s\\%s_',num2str(i),sets{S}); 38 | 39 | for s=1:2 40 | if s==1 41 | newDir=strcat(newDir_,'palmar'); 42 | else 43 | newDir=strcat(newDir_,'dorsal'); 44 | end 45 | mkdir(newDir); 46 | mkdir(fullfile(newDir,'male')); 47 | mkdir(fullfile(newDir,'female')); 48 | 49 | fileID = fopen(fullfile('.\',num2str(i),sprintf('g_imgs_%s_%s.txt',sets{S},sides{s})),'r'); 50 | imageNames = textscan(fileID,'%s\r\n'); %change it to '%s\n' for Linux users 51 | imageNames=imageNames{1}; 52 | fclose(fileID); 53 | 54 | fileID = fopen(fullfile('.\',num2str(i),sprintf('g_%s_%s.txt',sets{S},sides{s})),'r'); 55 | gender = textscan(fileID,'%s\r\n'); %change it to '%s\n' for Linux users 56 | gender=gender{1}; 57 | fclose(fileID); 58 | 59 | for j=1:length(imageNames) 60 | n=imageNames{j}; 61 | %n=strcat(n(1:end-3),'tiff'); 62 | copyfile(fullfile(baseDir,n),fullfile(newDir,gender{j},n)); 63 | end 64 | end 65 | end 66 | end 67 | -------------------------------------------------------------------------------- /biometric_identification/get_data_identification.m: -------------------------------------------------------------------------------- 1 | %Copyright (c) 2017 Mahmoud Afifi 2 | %York University - Assiut University 3 | 4 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 5 | %Citation information is provided in the readme file (can be found in the dataset webpage). 6 | 7 | %Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software with restriction for its use for research purpose only, subject to the following conditions: 8 | 9 | %The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 10 | 11 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 12 | %Citation information is provided in the readme file (can be found in the dataset webpage). 13 | 14 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 15 | 16 | 17 | 18 | %get biometric identification training and testing data 19 | 20 | %please locate directories of .txt files in the same directory of this code. 21 | 22 | %all images should be located in the following directory (please change the name accordingly). 23 | 24 | 25 | %NOTE: 26 | %run get_all_features.m first if you want to extract all features and save them as .mat files besides the original image. 27 | %Then, remove the comment from line 53 to extract only .mat files to train the SVM classifiers. 28 | 29 | baseDir='hands'; %you can get it from the webpage of the dataset 30 | 31 | mkdir('identification'); 32 | 33 | for i=[1,2,3,4,5,6,7,8,9,10] 34 | 35 | 36 | mkdir(fullfile('identification',num2str(i))); 37 | 38 | sides={'p','d'}; 39 | sets={'training','testing'}; 40 | 41 | for S=1:2 42 | 43 | newDir_=sprintf('identification\\%s\\%s_',num2str(i),sets{S}); 44 | 45 | for s=1:2 46 | if s==1 47 | newDir=strcat(newDir_,'palmar'); 48 | else 49 | newDir=strcat(newDir_,'dorsal'); 50 | end 51 | mkdir(newDir); 52 | 53 | 54 | for subj=[80,100,120] 55 | mkdir(fullfile(newDir,num2str(subj))); 56 | 57 | 58 | fileID = fopen(fullfile('.\',num2str(i),sprintf('id_imgs_%s_%s_%s.txt',sets{S},sides{s},num2str(subj))),'r'); 59 | imageNames = textscan(fileID,'%s\r\n'); %change it to '%s\n' for Linux users 60 | imageNames=imageNames{1}; 61 | fclose(fileID); 62 | 63 | fileID = fopen(fullfile('.\',num2str(i),sprintf('id_%s_%s_%s.txt',sets{S},sides{s},num2str(subj))),'r'); 64 | ids = textscan(fileID,'%s\r\n'); %change it to '%s\n' for Linux users 65 | ids=ids{1}; 66 | fclose(fileID); 67 | 68 | for j=1:length(imageNames) 69 | n=imageNames{j}; 70 | %n=strcat(n(1:end-3),'mat'); 71 | id=ids{j}; 72 | [~,name,ext] = fileparts(n); 73 | new_n=strcat(id,'_',name,ext); 74 | copyfile(fullfile(baseDir,n),fullfile(newDir,num2str(subj),new_n)); 75 | end 76 | end 77 | end 78 | end 79 | end 80 | -------------------------------------------------------------------------------- /gender_classification/twoStream.m: -------------------------------------------------------------------------------- 1 | %Copyright (c) 2017 Mahmoud Afifi 2 | %York University - Assiut University 3 | 4 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 5 | %Citation information is provided in the readme file (can be found in the dataset webpage). 6 | 7 | %Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software with restriction for its use for research purpose only, subject to the following conditions: 8 | 9 | %The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 10 | 11 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 12 | %Citation information is provided in the readme file (can be found in the dataset webpage). 13 | 14 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 15 | 16 | function [lgraph]=twoStream(net1,net2,param) 17 | 18 | outSize=param.szOut; 19 | inputlayer = imageInputLayer([224,224,4],... 20 | 'DataAugmentation',param.DataAugmentation,... 21 | 'Normalization',param.Normalization,... 22 | 'Name','inputlayer'); 23 | 24 | 25 | %% conv1 (modified) 26 | conv1_1=net1.Layers(2); 27 | conv1_2=net2.Layers(2); 28 | 29 | Weights_=zeros([size(conv1_1.Weights,1),size(conv1_1.Weights,2),... 30 | size(conv1_1.Weights,3)+1,size(conv1_1.Weights,4)]); 31 | Weights_(:,:,1:3,:)=conv1_1.Weights(:,:,:,:); 32 | Weights_(:,:,4,:)=conv1_2.Weights(:,:,:,:); 33 | 34 | Weights=Weights_; 35 | Weights(:,:,4,:)=0; 36 | skipConv1 = convolution2dLayer(size(Weights,1),size(Weights,4),'Stride',3,'Name','skipConv1',... 37 | 'Padding',2,'WeightL2Factor',1,... 38 | 'WeightLearnRateFactor',0,... 39 | 'BiasLearnRateFactor',0); 40 | skipConv1.Weights = Weights; 41 | skipConv1.Bias = conv1_1.Bias; 42 | 43 | Weights=Weights_; 44 | Weights(:,:,1:3,:)=0; 45 | %pattern=repmat([0,1],[2,1,1,96]); 46 | skipConv2 = convolution2dLayer(size(Weights,1),size(Weights,4),'Stride',3,'Name','skipConv2',... 47 | 'Padding',2,'WeightL2Factor',1,... 48 | 'WeightLearnRateFactor',0,... 49 | 'BiasLearnRateFactor',0); 50 | 51 | skipConv2.Weights = Weights; 52 | skipConv2.Bias = conv1_2.Bias; 53 | 54 | clear conv1_2 conv1_1 Weights_ 55 | 56 | layers=[skipConv1;net1.Layers(3:end-3);net2.Layers(3:end-3)]; 57 | layers(2).Name='relu1_1'; 58 | layers(3).Name='norm1_1'; 59 | layers(4).Name='pool1_1'; 60 | layers(5).Name='conv2_1'; 61 | layers(6).Name='relu2_1'; 62 | layers(7).Name='norm2_1'; 63 | layers(8).Name='pool2_1'; 64 | layers(9).Name='conv3_1'; 65 | layers(10).Name='relu3_1'; 66 | layers(11).Name='conv4_1'; 67 | layers(12).Name='relu4_1'; 68 | layers(13).Name='conv5_1'; 69 | layers(14).Name='relu5_1'; 70 | layers(15).Name='pool5_1'; 71 | layers(16).Name='fc6_1'; 72 | layers(17).Name='relu6_1'; 73 | layers(18).Name='drop6_1'; 74 | layers(19).Name='fc7_1'; 75 | layers(20).Name='relu7_1'; 76 | layers(21).Name='drop7_1'; 77 | layers(22).Name='fc8_1'; 78 | layers(23).Name='relu8_1'; 79 | layers(24).Name='drop8_1'; 80 | layers(25).Name='fc9_1'; 81 | 82 | layers(26).Name='relu1_2'; 83 | layers(27).Name='norm1_2'; 84 | layers(28).Name='pool1_2'; 85 | layers(29).Name='conv2_2'; 86 | layers(30).Name='relu2_2'; 87 | layers(31).Name='norm2_2'; 88 | layers(32).Name='pool2_2'; 89 | layers(33).Name='conv3_2'; 90 | layers(34).Name='relu3_2'; 91 | layers(35).Name='conv4_2'; 92 | layers(36).Name='relu4_2'; 93 | layers(37).Name='conv5_2'; 94 | layers(38).Name='relu5_2'; 95 | layers(39).Name='pool5_2'; 96 | layers(40).Name='fc6_2'; 97 | layers(41).Name='relu6_2'; 98 | layers(42).Name='drop6_2'; 99 | layers(43).Name='fc7_2'; 100 | layers(44).Name='relu7_2'; 101 | layers(45).Name='drop7_2'; 102 | layers(46).Name='fc8_2'; 103 | layers(47).Name='relu8_2'; 104 | layers(48).Name='drop8_2'; 105 | layers(49).Name='fc9_2'; 106 | layers(50).Name='relu9_2'; 107 | layers(51).Name='drop9_2'; 108 | layers(end).Name='fc9_2_2'; 109 | fusion=fullyConnectedLayer(param.numClasses,'Name','fc_fusion','WeightLearnRateFactor',param.WeightLearnRateFactor*2); 110 | 111 | layers=[inputlayer;layers; 112 | depthConcatenationLayer(2,'Name','append'); 113 | averagePooling2dLayer(1,'Stride',2,'Name','avpool'); 114 | fusion 115 | softmaxLayer; 116 | classificationLayer]; 117 | layers(end-1).Name='softmax'; 118 | layers(end).Name='classificationLayer'; 119 | lgraph = layerGraph(layers); 120 | 121 | 122 | lgraph = addLayers(lgraph,skipConv2); 123 | lgraph = connectLayers(lgraph,'inputlayer','skipConv2'); 124 | lgraph = disconnectLayers(lgraph,'fc9_1','relu1_2'); 125 | lgraph = connectLayers(lgraph,'skipConv2','relu1_2'); 126 | lgraph = connectLayers(lgraph,'fc9_1','append/in2'); 127 | 128 | lgraph.Layers 129 | 130 | figure 131 | plot(lgraph); 132 | figure; 133 | 134 | 135 | 136 | end -------------------------------------------------------------------------------- /gender_classification/getModel.m: -------------------------------------------------------------------------------- 1 | %Copyright (c) 2017 Mahmoud Afifi 2 | %York University - Assiut University 3 | 4 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 5 | %Citation information is provided in the readme file (can be found in the dataset webpage). 6 | 7 | %Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software with restriction for its use for research purpose only, subject to the following conditions: 8 | 9 | %The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 10 | 11 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 12 | %Citation information is provided in the readme file (can be found in the dataset webpage). 13 | 14 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 15 | 16 | function [layers]=getModel(param,M,net1,net2) 17 | 18 | if M~=3 19 | net1=[]; 20 | net2=[]; 21 | 22 | end 23 | 24 | %% input layer 25 | if M==1 26 | inputlayer = imageInputLayer([param.szIn,3],... 27 | 'DataAugmentation',param.DataAugmentation,... 28 | 'Normalization',param.Normalization,... 29 | 'Name','inputlayer'); 30 | elseif M==2 31 | inputlayer = imageInputLayer([param.szIn,1],... 32 | 'DataAugmentation',param.DataAugmentation,... 33 | 'Normalization',param.Normalization,... 34 | 'Name','inputlayer'); 35 | elseif M==3 36 | [layers]=twoStream(net1,net2,param); 37 | return; 38 | end 39 | 40 | %% alexnet (from 2:23) 41 | if M==1 || M==2 42 | net = alexnet; 43 | layersTransfer = net.Layers(2:16); 44 | clear net; 45 | else 46 | end 47 | 48 | %% conv1 (modified) 49 | conv1=layersTransfer(1); 50 | if M==1 51 | Weights=zeros([size(conv1.Weights,1),size(conv1.Weights,2),... 52 | 3,size(conv1.Weights,4)]); 53 | elseif M==2 54 | Weights=zeros([size(conv1.Weights,1),size(conv1.Weights,2),... 55 | 1,size(conv1.Weights,4)]); 56 | end 57 | Bias=conv1.Bias; 58 | if M==1 59 | Weights=conv1.Weights; 60 | elseif M==2 61 | Weights=(0.2989*conv1.Weights(:,:,1,:)+0.5870*conv1.Weights(:,:,2,:)+0.1140*conv1.Weights(:,:,3,:)); %to Y'UV 62 | end 63 | if M==1 64 | conv1 = convolution2dLayer(size(Weights,1),size(Weights,4),'Stride',3,... 65 | 'Padding',0,'WeightL2Factor',param.WeightL2Factor,'Name','conv1',... 66 | 'WeightLearnRateFactor',param.WeightLearnRateFactor*2,... 67 | 'BiasLearnRateFactor',param.BiasLearnRateFactor); 68 | elseif M==2 69 | conv1 = convolution2dLayer(size(Weights,1),size(Weights,4),'Stride',3,... 70 | 'Padding',0,'WeightL2Factor',param.WeightL2Factor,'Name','conv1',... 71 | 'WeightLearnRateFactor',param.WeightLearnRateFactor,... 72 | 'BiasLearnRateFactor',param.BiasLearnRateFactor); 73 | end 74 | 75 | conv1.Weights = Weights; 76 | conv1.Bias =Bias; 77 | layersTransfer=layersTransfer(2:end); 78 | 79 | %% fully connected layers 80 | % fc6 81 | 82 | fc6= fullyConnectedLayer(4096,... 83 | 'WeightL2Factor',param.WeightL2Factor,... 84 | 'WeightLearnRateFactor',param.WeightLearnRateFactor*2,... 85 | 'BiasLearnRateFactor',param.BiasLearnRateFactor*2,... 86 | 'Name','fc6'); 87 | 88 | 89 | relu6 = reluLayer('Name','relu6'); 90 | drop6=dropoutLayer(0.5,'Name','drop6'); 91 | 92 | % fc7 93 | fc7= fullyConnectedLayer(4096,... 94 | 'WeightL2Factor',param.WeightL2Factor,... 95 | 'WeightLearnRateFactor',param.WeightLearnRateFactor*2,... 96 | 'BiasLearnRateFactor',param.BiasLearnRateFactor*2,... 97 | 'Name','fc7'); 98 | 99 | relu7 = reluLayer('Name','relu7'); 100 | drop7=dropoutLayer(0.5,'Name','drop7'); 101 | 102 | if M==1 103 | % fc8 104 | 105 | 106 | 107 | 108 | fc8= fullyConnectedLayer(2048,... 109 | 'WeightL2Factor',param.WeightL2Factor,... 110 | 'WeightLearnRateFactor',param.WeightLearnRateFactor*2,... 111 | 'BiasLearnRateFactor',param.BiasLearnRateFactor*2,... 112 | 'Name','fc8'); 113 | 114 | relu8 = reluLayer('Name','relu8'); 115 | 116 | drop8=dropoutLayer(0.5,'Name','drop8'); 117 | 118 | % fc9 119 | outSize=param.szOut; 120 | 121 | 122 | 123 | fc9= fullyConnectedLayer(outSize,... 124 | 'WeightL2Factor',param.WeightL2Factor,... 125 | 'WeightLearnRateFactor',param.WeightLearnRateFactor*2,... 126 | 'BiasLearnRateFactor',param.BiasLearnRateFactor*2,... 127 | 'Name','fc9'); 128 | 129 | 130 | fc=[fc6;relu6;drop6;fc7;relu7;drop7;fc8;relu8;drop8;fc9]; 131 | elseif M==2 132 | 133 | % fc8 134 | 135 | 136 | 137 | fc8= fullyConnectedLayer(2048,... 138 | 'WeightL2Factor',param.WeightL2Factor,... 139 | 'WeightLearnRateFactor',param.WeightLearnRateFactor*2,... 140 | 'BiasLearnRateFactor',param.BiasLearnRateFactor*2,... 141 | 'Name','fc8'); 142 | 143 | relu8 = reluLayer('Name','relu8'); 144 | 145 | 146 | drop8=dropoutLayer(0.5,'Name','drop8'); 147 | 148 | % fc9 149 | 150 | 151 | 152 | 153 | fc9= fullyConnectedLayer(2048,... 154 | 'WeightL2Factor',param.WeightL2Factor,... 155 | 'WeightLearnRateFactor',param.WeightLearnRateFactor*2,... 156 | 'BiasLearnRateFactor',param.BiasLearnRateFactor*2,... 157 | 'Name','fc9'); 158 | relu9 = reluLayer('Name','relu9'); 159 | 160 | drop9=dropoutLayer(0.5,'Name','drop9'); 161 | 162 | outSize=param.szOut; 163 | 164 | 165 | 166 | 167 | fc9_= fullyConnectedLayer(outSize,... 168 | 'WeightL2Factor',param.WeightL2Factor,... 169 | 'WeightLearnRateFactor',param.WeightLearnRateFactor*2,... 170 | 'BiasLearnRateFactor',param.BiasLearnRateFactor*2,... 171 | 'Name','fc9_2'); 172 | 173 | 174 | 175 | 176 | 177 | fc=[fc6;relu6;drop6;fc7;relu7;drop7;fc8;relu8;drop8;fc9;relu9;drop9;fc9_]; 178 | 179 | end 180 | %% classification layer 181 | cl=[fullyConnectedLayer(param.numClasses,'WeightLearnRateFactor',... 182 | param.WeightLearnRateFactor,'BiasLearnRateFactor',... 183 | param.BiasLearnRateFactor,'Name','fc10') 184 | softmaxLayer 185 | classificationLayer]; 186 | 187 | layers = [inputlayer;conv1;layersTransfer;fc;cl] 188 | -------------------------------------------------------------------------------- /biometric_identification/training_ID.m: -------------------------------------------------------------------------------- 1 | %Copyright (c) 2017 Mahmoud Afifi 2 | %York University - Assiut University 3 | 4 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 5 | %Citation information is provided in the readme file (can be found in the dataset webpage). 6 | 7 | %Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software with restriction for its use for research purpose only, subject to the following conditions: 8 | 9 | %The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 10 | 11 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 12 | %Citation information is provided in the readme file (can be found in the dataset webpage). 13 | 14 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 15 | 16 | clear 17 | 18 | for S=1:2 19 | if S==1 20 | side='palmar'; 21 | else 22 | side='dorsal'; 23 | end 24 | for subj=[80,100,120] 25 | ext='mat'; 26 | for fold=[1:10] 27 | base=fullfile('identification',num2str(fold),strcat('training','_',side),num2str(subj)); 28 | 29 | [fnames,training_response]=get_fnames_ids(fold,'training',side,subj,ext); 30 | %load data to memory 31 | training_data=[]; 32 | for i=1:length(fnames) 33 | load(fullfile(base,fnames{i})); 34 | training_data=[training_data;features.LBP,features.low,features.high,features.fusion]; 35 | end 36 | base=fullfile('identification',num2str(fold),strcat('testing','_',side),num2str(subj)); 37 | [fnames,testing_response]=get_fnames_ids(fold,'testing',side,subj,ext); 38 | %load data to memory 39 | testing_data=[]; 40 | for i=1:length(fnames) 41 | load(fullfile(base,fnames{i})); 42 | testing_data=[testing_data;features.LBP,features.low,features.high,features.fusion]; 43 | 44 | end 45 | 46 | 47 | model=sprintf('SVM_11K_%s_%s_%s',side,num2str(subj),num2str(fold)); 48 | conf_name=sprintf('SVM_results_%s_%s_%s.mat',side,num2str(subj),num2str(fold)); 49 | conf_name2=sprintf('SVM_results_%s_%s_wo_lbp_%s.mat',side,num2str(subj),num2str(fold)); 50 | tic; 51 | 52 | 53 | 54 | SVM_temp = templateSVM('KernelFunction','polynomial','PolynomialOrder',2,... 55 | 'KernelScale','auto','Solver','ISDA','ClipAlphas',false,... 56 | 'IterationLimit',1e7,'Standardize',false); 57 | options = statset('UseParallel',true); 58 | 59 | classifier_low = fitcecoc(training_data(:,532:1062), training_response,... 60 | 'Learners',SVM_temp, 'Verbose',0,'Coding', 'onevsall','ObservationsIn', 'rows','Options',options); 61 | classifier_high = fitcecoc(training_data(:,1063:1594), training_response,... 62 | 'Learners',SVM_temp, 'Verbose',0,'Coding', 'onevsall','ObservationsIn', 'rows','Options',options); 63 | classifier_fusion = fitcecoc(training_data(:,1595:end), training_response,... 64 | 'Learners',SVM_temp, 'Verbose',0,'Coding', 'onevsall','ObservationsIn', 'rows','Options',options); 65 | classifier_lbp = fitcecoc(training_data(:,1:531), training_response,... 66 | 'Learners',SVM_temp, 'Verbose',0,'Coding', 'onevsall','ObservationsIn', 'rows','Options',options); 67 | classifier_all = fitcecoc(training_data(:,532:end), training_response,... 68 | 'Learners',SVM_temp, 'Verbose',0,'Coding', 'onevsall','ObservationsIn', 'rows','Options',options); 69 | 70 | Classifier.low=classifier_low; 71 | Classifier.high=classifier_high; 72 | Classifier.fusion=classifier_fusion; 73 | Classifier.lbp=classifier_lbp; 74 | Classifier.all=classifier_all; 75 | save(model,'Classifier'); 76 | 77 | t_training=toc; 78 | 79 | 80 | 81 | ind=randperm(size(testing_data,1)); 82 | testing_data=testing_data(ind,:); 83 | testing_response=testing_response(ind,:); 84 | 85 | 86 | 87 | tic 88 | 89 | % Pass CNN image features to trained classifier 90 | 91 | [L1,scores1] = predict(classifier_low, testing_data(:,532:1062)); 92 | [L2,scores2] = predict(classifier_high, testing_data(:,1063:1594)); 93 | [L3,scores3] = predict(classifier_fusion, testing_data(:,1595:end)); 94 | [L4,scores4] = predict(classifier_lbp, testing_data(:,1:531)); 95 | [L5, scores5] = predict(classifier_all,testing_data(:,532:end)); 96 | scores=(((scores1+scores2)/2)+scores3)/3+scores4; 97 | predictedLabels=cell(size(scores,1),1); 98 | inds=max(scores,[],2)==scores; 99 | for ro=1:size(inds,1) 100 | response=(find(inds(ro,:))); 101 | predictedLabels{ro}=classifier_high.ClassNames{response}; 102 | end 103 | 104 | 105 | 106 | t_testing=toc; 107 | 108 | % Tabulate the results using a confusion matrix. 109 | confMat = confusionmat(testing_response, predictedLabels); 110 | % Convert confusion matrix into percentage form 111 | confMat = bsxfun(@rdivide,confMat,sum(confMat,2)); 112 | save(conf_name,'confMat'); 113 | sprintf('The result for %s: %f',model,mean(diag(confMat))) 114 | 115 | % Tabulate the results using a confusion matrix. 116 | confMat = confusionmat(testing_response, L5); 117 | % Convert confusion matrix into percentage form 118 | confMat = bsxfun(@rdivide,confMat,sum(confMat,2)); 119 | save(conf_name2,'confMat'); 120 | sprintf('The result for %s without LBP: %f',model,mean(diag(confMat))) 121 | 122 | sprintf('Time training: %f - testing: %f',t_training,(t_testing/length(predictedLabels))) 123 | end 124 | end 125 | end -------------------------------------------------------------------------------- /gender_classification/training_SVM.m: -------------------------------------------------------------------------------- 1 | %Copyright (c) 2017 Mahmoud Afifi 2 | %York University - Assiut University 3 | 4 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 5 | %Citation information is provided in the readme file (can be found in the dataset webpage). 6 | 7 | %Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software with restriction for its use for research purpose only, subject to the following conditions: 8 | 9 | %The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 10 | 11 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 12 | %Citation information is provided in the readme file (can be found in the dataset webpage). 13 | 14 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 15 | 16 | get_all_features 17 | 18 | %get gender classification training and testing data 19 | 20 | %please locate directories of .txt files in the same directory of this code. 21 | 22 | %all images should be located in the following directory (please change the name accordingly). 23 | 24 | 25 | 26 | baseDir='hands'; %you can get it from the webpage of the dataset 27 | 28 | mkdir('gender'); 29 | 30 | for i=[1,2,3,4,5,6,7,8,9,10] 31 | 32 | mkdir(fullfile('gender',num2str(i))); 33 | 34 | sides={'p','d'}; 35 | sets={'training','testing'}; 36 | for S=1:2 37 | 38 | newDir_=sprintf('gender\\%s\\%s_',num2str(i),sets{S}); 39 | 40 | for s=1:2 41 | if s==1 42 | newDir=strcat(newDir_,'palmar'); 43 | else 44 | newDir=strcat(newDir_,'dorsal'); 45 | end 46 | mkdir(newDir); 47 | mkdir(fullfile(newDir,'male')); 48 | mkdir(fullfile(newDir,'female')); 49 | 50 | fileID = fopen(fullfile('.\',num2str(i),sprintf('g_imgs_%s_%s.txt',sets{S},sides{s})),'r'); 51 | imageNames = textscan(fileID,'%s\r\n'); %change it to '%s\n' for Linux users 52 | imageNames=imageNames{1}; 53 | fclose(fileID); 54 | 55 | fileID = fopen(fullfile('.\',num2str(i),sprintf('g_%s_%s.txt',sets{S},sides{s})),'r'); 56 | gender = textscan(fileID,'%s\r\n'); %change it to '%s\n' for Linux users 57 | gender=gender{1}; 58 | fclose(fileID); 59 | 60 | for j=1:length(imageNames) 61 | n=imageNames{j}; 62 | n=strcat(n(1:end-3),'mat'); 63 | copyfile(fullfile(baseDir,n),fullfile(newDir,gender{j},n)); 64 | end 65 | end 66 | end 67 | end 68 | 69 | 70 | 71 | %%start training SVM using extracted features .... 72 | % Train multiclass SVM classifier using a fast linear solver, and set 73 | % 'ObservationsIn' to 'columns' to match the arrangement used for training 74 | % features. 75 | 76 | 77 | 78 | 79 | %%% training 80 | for SIDE=1:2 81 | if SIDE==1 82 | side_='dorsal'; %'dorsal' or 'palmar' 83 | else 84 | side_='palmar'; %'dorsal' or 'palmar' 85 | end 86 | 87 | for i=1:10 %do the experiments using the 10 folds generated by get_data.m 88 | %training data 89 | rootFolder=fullfile('gender',num2str(i),strcat('training_',side_)); 90 | rootFolder2=fullfile('gender',num2str(i),strcat('testing_',side_)); 91 | categories={'male','female'}; 92 | %load data to memory 93 | training_data=[]; 94 | training_response=[]; 95 | for c=1:2 96 | data_=dir(fullfile(rootFolder,categories{c},'*.mat')); 97 | if c==1 %male=1 98 | training_response=[training_response;ones(length(data_),1)]; 99 | else %female=0 100 | training_response=[training_response;zeros(length(data_),1)]; 101 | end 102 | for d=1:length(data_) 103 | load(fullfile(rootFolder,categories{c},data_(d).name)) 104 | training_data=[training_data;features.low,features.high,features.fusion]; 105 | end 106 | end 107 | 108 | 109 | if strcmp(side_,'palmar') 110 | model=strcat('SVM_p_',num2str(i)); 111 | conf_name=strcat('SVM_results_p_',num2str(i),'.mat'); 112 | else 113 | model=strcat('SVM_d_',num2str(i)); 114 | conf_name=strcat('SVM_results_d_',num2str(i),'.mat'); 115 | end 116 | 117 | tic; 118 | classifier = fitcecoc(training_data, training_response,... 119 | 'Learners', 'Linear', 'Coding', 'onevsall', 'ObservationsIn', 'rows'); 120 | 121 | t_training=toc; 122 | 123 | save(strcat(model,'.mat'),'classifier'); 124 | clear training_data training_response 125 | 126 | testing_data=[]; 127 | testing_response=[]; 128 | for c=1:2 129 | data_=dir(fullfile(rootFolder2,categories{c},'*.mat')); 130 | if c==1 %male=1 131 | testing_response=[testing_response;ones(length(data_),1)]; 132 | else %female=0 133 | testing_response=[testing_response;zeros(length(data_),1)]; 134 | end 135 | for d=1:length(data_) 136 | load(fullfile(rootFolder2,categories{c},data_(d).name)) 137 | testing_data=[testing_data;features.low,features.high,features.fusion]; 138 | end 139 | end 140 | 141 | ind=randperm(size(testing_data,1)); 142 | testing_data=testing_data(ind,:); 143 | testing_response=testing_response(ind,:); 144 | 145 | 146 | 147 | tic 148 | % Pass CNN image features to trained classifier 149 | predictedLabels = predict(classifier, testing_data); 150 | t_testing=toc; 151 | 152 | 153 | % Tabulate the results using a confusion matrix. 154 | confMat = confusionmat(testing_response, predictedLabels); 155 | 156 | % Convert confusion matrix into percentage form 157 | confMat = bsxfun(@rdivide,confMat,sum(confMat,2)); 158 | 159 | save(conf_name,'confMat'); 160 | sprintf('The result for %s %s - number %s: %f',model, side_(1),num2str(i),mean(diag(confMat))) 161 | sprintf('Time training: %f - testing: %f',t_training,(t_testing/length(predictedLabels))) 162 | 163 | end 164 | 165 | end 166 | 167 | 168 | 169 | 170 | 171 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # 11K Hands: Gender recognition and biometric identification using a large dataset of hand images 2 | 3 | ![two-stream-arch](https://user-images.githubusercontent.com/37669469/76105201-817cc580-5fa2-11ea-8704-9bb82d24ce30.jpg) 4 | 5 | This is the Matlab implementation of the paper: 6 | Mahmoud Afifi, "11K Hands: Gender recognition and biometric identification using a large dataset of hand images." Multimedia Tools and Applications, 2019. 7 | 8 | [Project webpage](https://sites.google.com/view/11khands) 9 | 10 | 11 | To run this code, you need to download the trained models from our [project webpage](https://sites.google.com/view/11khands). The provided files include models for gender classification and biometric identification. 12 | 13 | Gender classification files are: 14 | 15 | 1. demo.m: a demo to test our model. 16 | 2. get_data.m: extract training and testing images from the main directory of the hand images. 17 | 3. CNN_training.m: train our model, or other CNN architectures (AlexNet, VGG-16, VGG-19, or GoogleNet). 18 | 4. Apply_preprocessing_to_all.m: to apply our preprocessing to all images before train using CNN. In this case, you can comment out lines (23-27) of CNN_training.m. Note: the code will write all images as 4-D images (.TIFF). 19 | 5. getModel.m: return the CNN architecture. 20 | 6. getParam.m: return the training parameters. 21 | 7. plotTrainingAccuracy.m: used to plot the progress during the training process. 22 | 8. preprocessing.m: return 4-D image contains the smoothed version of the image in the first 3 layers and the detail layer in the fourth layer of the image. 23 | 9. readAndPreprocessImage_*.m: required to adjust the size of the image before the training process. 24 | 10. training_SVM.m: train SVM using the extracted features of our trained model. 25 | 11. twoStream.m: return the architecture of our two-stream CNN. 26 | 12. getfeatures.m: return the CNN features and LBP features required for the SVM training process. 27 | 13. get_all_features.m: extract features from all images to feed the SVM classifier in the SVM training process. 28 | 14. SVM_d_9.m and SVM_p_2.m: is the trained SVM classifiers to be used in the demo.m 29 | 30 | 31 | Biometric classification files are: 32 | 1. preprocessing.m: return 4-D image contains the smoothed version of the image in the first 3 layers and the detail layer in the fourth layer of the image. 33 | 2. Apply_preprocessing_to_all.m: to apply our preprocessing to all images before train. 34 | 3. getfeatures.m: return the CNN features and LBP features required for the training process. 35 | 4. get_all_features.m: extract features from all images to feed the SVM classifier in the SVM training process. 36 | 5. get_data_identification.m: extract training and testing images from the main directory of the hand images. You have to first use get_all_features.m to get all features from images, then use this code to extract all training/testing sets of the features not the images. 37 | 6. training_ID.m: train SVM classifiers (it loops through all sets described in the paper) 38 | 7. get_fnames_ids.m: get file names and ids for train SVM classifiers. 39 | 8. test.m: to test a particular classifier. You can download them from our webpage. 40 | Run steps 2, 4, 5, then 6 to re-train the SVM classifiers. 41 | 42 | 43 | Note: 44 | Because of the 4-D images, you are going to get an error states the following: 45 | Error using imageInputLayer>iParseInputArguments (line 59) 46 | The value of 'InputSize' is invalid. Expected input image size to be a 2 or 3 element vector. For a 3-element vector, the 47 | third element must be 3 or 1. 48 | 49 | To fix it, do the following: 50 | 51 | a- Open Matlab (run as administrator) 52 | 53 | b- Write: 54 | 55 | `edit imageInputLayer.m` 56 | 57 | c- Replace the following code: 58 | ``` 59 | function tf = iIsValidRGBImageSize(sz) 60 | tf = numel(sz) == 3 && sz(end) == 3; 61 | end 62 | ``` 63 | with the modified function: 64 | ``` 65 | function tf = iIsValidRGBImageSize(sz) 66 | tf = numel(sz) == 3 && (sz(end) == 3 || sz(end) == 4); 67 | end 68 | ``` 69 | d- Save 70 | 71 | 72 | ### How to use with Hub 73 | A simple way of using this dataset is with [Activeloop](https://activeloop.ai)'s python package [Hub](https://github.com/activeloopai/Hub)! 74 | 75 | First, run `pip install hub` (or `pip3 install hub`). 76 | 77 | ```python 78 | import hub 79 | ds = hub.load('hub://activeloop/11k-hands') 80 | 81 | # check out the first image and all of its details! 82 | import matplotlib.pyplot as plt 83 | plt.imshow(ds.images[0].numpy()) 84 | plt.title(f"aspect_of_hand : {ds.aspect_of_hand[0].numpy()}, id : {ds.id[0].numpy()}, age : {ds.age[0].numpy()}, irregularities : {ds.irregularities[0].numpy()}, accessories : {ds.accessories[0].numpy()}, nail_polish : {ds.nail_polish[0].numpy()},gender : {ds.gender[0].numpy()},skin_color : {ds.skin_color[0].numpy()}") 85 | plt.show() 86 | 87 | # train a model in pytorch 88 | for sample in ds.pytorch(): 89 | # ... model code here ... 90 | 91 | # train a model in tensorflow 92 | for sample in ds.tensorflow(): 93 | # ... model code here ... 94 | ``` 95 | available tensors can be shown by printing dataset: 96 | 97 | ```python 98 | print(ds) 99 | # prints: Dataset(path='hub://activeloop/11k-hands', read_only=True, tensors=['images', 'aspect_of_hand', 'id', 'age', 'irregularities', 'accessories', 'nail_polish', 'gender', 'skin_color']) 100 | ``` 101 | 102 | For more information, check out the [hub documentation](https://docs.activeloop.ai/). 103 | 104 | ### Example of testing biometric identification 105 | Please follow the following steps to re-produce our results for biometric identification. 106 | 1. Download source code for biometric identification. Also can be found [here](https://drive.google.com/file/d/1Fmk1KCbIzSfQVGsISwFUwhp2HykGE43R/view). 107 | 108 | 2. Download the dataset from our [project webpage](https://sites.google.com/view/11khands) (a direct download link is available [here](https://drive.google.com/open?id=1KcMYcNJgtK1zZvfl_9sTqnyBUTri2aP2)). 109 | 110 | 3. Download the trained CNN model. Assume we are interested in dorsal-side hand images, so download the trained model for dorsal images from [here](https://drive.google.com/file/d/0Byh0abzpiSu5ZmNtR1pMeWl3UnM/view). 111 | 112 | 4. Download the SVM trained classifier (it should be for the same side). Here we will use an SVM classifier for dorsal images without LBP features for Experiment #1 for 100 subjects (for more information please read our paper or see the [project webpage](https://sites.google.com/view/11khands)). The SVM model is available [here](https://drive.google.com/file/d/0B6CktEG1p54WTk5EX0RqQlRqS2s/view). 113 | 114 | 5. Download the IDs for experiments from [here](https://drive.google.com/drive/folders/0BwO0RMrZJCioZTNTdThFUGh5bG8). In this example, we are interested in experiment #1 for 100 subjcts. 115 | 116 | 6. Run `get_data_identification.m` code (also is available [here](https://drive.google.com/file/d/0BwO0RMrZJCioWEhLMWhYMVgtdGc/view)) that automatically extracts images for each experiment. You can modify it to only extract images for experiment 1. Do not forget to change the directory in the code to the directory you saved hand images in. It will create for you a directory named `identification`. 117 | 118 | Now, we have everything. So, let's test experiment 1 (100 subjects) for dorsal-side images. 119 | 120 | 7. Load trained CNN 121 | 8. Load trained classifier 122 | 9. Run this code: 123 | ``` 124 | base = 'identification\1\testing_dorsal\100'; %that is the base directory for experiment1 testing images 125 | images = dir(fullfile(base,'*.jpg')); %get all image names 126 | images = {images(:).name}; %convert them to cell (just easier in future use) 127 | acc= 0; %set accuracy to 0 128 | for i = 1 : length(images) %for each image 129 | I = imread(fullfile(base,images{i})); %read it 130 | ID = test( I, net, Classifier,false); %get predicted ID 131 | parts = strsplit(images{i},'_'); %each image name has the following format ID_originalImageName.jpg 132 | ID_gt = parts{1} ; %get the ground truth ID (the part before first underscore) 133 | if strcmpi(ID_gt,ID{1}) %if ground truth match predicted ID, increment the accuracy 134 | acc = acc + 1; 135 | end 136 | end 137 | 138 | acc = acc/length(images) %report the accuracy (total true predictions over total number of images) 139 | ``` 140 | 141 | It should give you 0.9646 ~ 0.965 (as reported in the project webpage and our paper). 142 | 143 | -------------------------------------------------------------------------------------------------- 144 | 145 | Project webpage: https://sites.google.com/view/11khands 146 | 147 | [![View Two stream CNN for biometric recognition on File Exchange](https://www.mathworks.com/matlabcentral/images/matlab-file-exchange.svg)](https://www.mathworks.com/matlabcentral/fileexchange/65065-two-stream-cnn-for-biometric-recognition) 148 | 149 | ### Publication: 150 | 151 | If you use this code or our dataset, please cite our paper: 152 | 153 | 154 | Mahmoud Afifi, "11K Hands: Gender recognition and biometric identification using a large dataset of hand images." Multimedia Tools and Applications, 2019. 155 | 156 | ``` 157 | @article{afifi201911k, 158 | title={11K Hands: gender recognition and biometric identification using a large dataset of hand images}, 159 | author={Afifi, Mahmoud}, 160 | journal={Multimedia Tools and Applications}, 161 | volume={78}, 162 | number={15}, 163 | pages={20835--20854}, 164 | year={2019}, 165 | publisher={Springer} 166 | } 167 | ``` 168 | -------------------------------------------------------------------------------- /gender_classification/CNN_training.m: -------------------------------------------------------------------------------- 1 | %Copyright (c) 2017 Mahmoud Afifi 2 | %York University - Assiut University 3 | 4 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 5 | %Citation information is provided in the readme file (can be found in the dataset webpage). 6 | 7 | %Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software with restriction for its use for research purpose only, subject to the following conditions: 8 | 9 | %The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 10 | 11 | %Please cite our paper if you use the provided source code, pre-trained models, or the dataset. 12 | %Citation information is provided in the readme file (can be found in the dataset webpage). 13 | 14 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 15 | 16 | 17 | %CNN training (transfer learning) 18 | 19 | %% 20 | %If the Neural Network Toolbox Model for CNN Network support package 21 | %is not installed, then the function provides a link to the required support 22 | %package in the Add-On Explorer. To install the support package, click the 23 | %link, and then click Install. Check that the installation is successful by 24 | %typing the model name (e.g. alexnet, vgg16, vgg19, and googlenet) at the 25 | %command line. 26 | %Remember, alexnet, vgg16, and vgg19 require Matlab 2016 or higher. 27 | %Googlenet or our model require Matlab 2017b or higher. 28 | 29 | clear 30 | cnn_model='ours'; %'alexnet', 'vgg16', 'vgg19', 'googlenet', or 'ours' 31 | side_='dorsal'; %'dorsal' or 'palmar' 32 | for i=[1:10] %do the experiments using the 10 folds generated by get_data.m 33 | close all 34 | %training data 35 | rootFolder=fullfile('gender',num2str(i),strcat('training_',side_)); 36 | rootFolder2=fullfile('gender',num2str(i),strcat('testing_',side_)); 37 | 38 | if strcmpi(cnn_model,'ours')==1 39 | disp('Preprocessing .. it may take awhile!') 40 | apply_preprocessing_to_all(rootFolder,fullfile(rootFolder,'ours')); %if you are going to repeat the process, you can comment out this line 41 | apply_preprocessing_to_all(rootFolder2,fullfile(rootFolder2,'ours')); %if you are going to repeat the process, you can comment out this line 42 | rootFolder=fullfile('gender',num2str(i),strcat('training_',side_),'ours'); 43 | rootFolder2=fullfile('gender',num2str(i),strcat('testing_',side_),'ours'); 44 | 45 | end 46 | 47 | 48 | 49 | 50 | categories={'male','female'}; 51 | trainingImages = imageDatastore(fullfile(rootFolder, categories), 'LabelSource', 'foldernames'); 52 | testingImages = imageDatastore(fullfile(rootFolder2, categories), 'LabelSource', 'foldernames'); 53 | 54 | if strcmpi(cnn_model,'alexnet')==1 %alexnet 55 | trainingImages.ReadFcn = @(filename)readAndPreprocessImage_alex(filename); 56 | testingImages.ReadFcn = @(filename)readAndPreprocessImage_alex(filename); 57 | elseif strcmpi(cnn_model,'ours')==1 %ours 58 | trainingImages.ReadFcn = @(filename)readAndPreprocessImage_ours1(filename); 59 | testingImages.ReadFcn = @(filename)readAndPreprocessImage_ours1(filename); 60 | else %vgg, googlenet 61 | trainingImages.ReadFcn = @(filename)readAndPreprocessImage(filename); 62 | testingImages.ReadFcn = @(filename)readAndPreprocessImage(filename); 63 | end 64 | 65 | if strcmpi(cnn_model,'googlenet')~=1 && strcmpi(cnn_model,'ours')~=1 66 | switch cnn_model 67 | case 'alexnet' 68 | net = alexnet(); 69 | if strcmp(side_,'palmar') 70 | model=strcat('alexnet_p_',num2str(i)); 71 | conf_name=strcat('alexnet_results_p_',num2str(i),'.mat'); 72 | else 73 | model=strcat('alexnet_d_',num2str(i)); 74 | conf_name=strcat('alexnet_results_d_',num2str(i),'.mat'); 75 | end 76 | 77 | case 'vgg16' 78 | net = vgg16(); 79 | if strcmp(side_,'palmar') 80 | model=strcat('vgg16_p_',num2str(i)); 81 | conf_name=strcat('vgg16_results_p_',num2str(i),'.mat'); 82 | else 83 | model=strcat('vgg16_d_',num2str(i)); 84 | conf_name=strcat('vgg16_results_d_',num2str(i),'.mat'); 85 | end 86 | case 'vgg19' 87 | net = vgg19(); 88 | if strcmp(side_,'palmar') 89 | model=strcat('vgg19_p_',num2str(i)); 90 | conf_name=strcat('vgg19_results_p_',num2str(i),'.mat'); 91 | else 92 | model=strcat('vgg19_d_',num2str(i)); 93 | conf_name=strcat('vgg19_results_d_',num2str(i),'.mat'); 94 | end 95 | end 96 | 97 | layersTransfer = net.Layers(1:end-3); 98 | numClasses = numel(unique(trainingImages.Labels)); 99 | layers = [ 100 | layersTransfer 101 | fullyConnectedLayer(numClasses,'WeightLearnRateFactor',20,'BiasLearnRateFactor',20) 102 | softmaxLayer 103 | classificationLayer]; 104 | 105 | miniBatchSize = 60; 106 | numIterationsPerEpoch = floor(numel(trainingImages.Labels)/miniBatchSize); 107 | options = trainingOptions('sgdm',... 108 | 'MiniBatchSize',miniBatchSize,... 109 | 'MaxEpochs',30,... 110 | 'InitialLearnRate',0.0001,'OutputFcn',@plotTrainingAccuracy); 111 | tic; 112 | net = trainNetwork(trainingImages,layers,options); 113 | t_training=toc; 114 | elseif strcmpi(cnn_model,'googlenet')==1 115 | if strcmp(side_,'palmar') 116 | model=strcat('googlenet_p_',num2str(i)); 117 | conf_name=strcat('googlenet_results_p_',num2str(i),'.mat'); 118 | else 119 | model=strcat('googlenet_d_',num2str(i)); 120 | conf_name=strcat('googlenet_results_d_',num2str(i),'.mat'); 121 | end 122 | 123 | net=googlenet(); 124 | lgraph = layerGraph(net); 125 | lgraph = removeLayers(lgraph, {'loss3-classifier','prob','output'}); 126 | numClasses = numel(unique(trainingImages.Labels)); 127 | newLayers = [ 128 | fullyConnectedLayer(numClasses,'Name','fc','WeightLearnRateFactor',20,'BiasLearnRateFactor', 20) 129 | softmaxLayer('Name','softmax') 130 | classificationLayer('Name','classoutput')]; 131 | lgraph = addLayers(lgraph,newLayers); 132 | lgraph = connectLayers(lgraph,'pool5-drop_7x7_s1','fc'); 133 | miniBatchSize = 60; 134 | numIterationsPerEpoch = floor(numel(trainingImages.Labels)/miniBatchSize); 135 | options = trainingOptions('sgdm',... 136 | 'MiniBatchSize',miniBatchSize,... 137 | 'MaxEpochs',30,... 138 | 'InitialLearnRate',0.0001,'OutputFcn',@plotTrainingAccuracy); 139 | tic; 140 | net = trainNetwork(trainingImages,lgraph,options); 141 | t_training=toc; 142 | 143 | 144 | elseif strcmpi(cnn_model,'ours')==1 145 | t_training=0; 146 | if strcmp(side_,'palmar') 147 | model=strcat('low_ours_p_',num2str(i)); 148 | conf_name=strcat('low_ours_results_p_',num2str(i),'.mat'); 149 | else 150 | model=strcat('low_ours_d_',num2str(i)); 151 | conf_name=strcat('low_ours_results_d_',num2str(i),'.mat'); 152 | end 153 | [param] = getParam(); 154 | layers=getModel(param,1,[],[]); 155 | miniBatchSize = 60; 156 | numIterationsPerEpoch = floor(numel(trainingImages.Labels)/miniBatchSize); 157 | options = trainingOptions('sgdm',... 158 | 'MiniBatchSize',miniBatchSize,... 159 | 'MaxEpochs',20,... %it was 30 for other models 160 | 'InitialLearnRate',0.0001,'OutputFcn',@plotTrainingAccuracy); 161 | tic; 162 | net1 = trainNetwork(trainingImages,layers,options); 163 | t_training=t_training+toc; 164 | %save(strcat(model,'.mat'),'net1'); 165 | [YPred] = classify(net1,testingImages); 166 | YTest = testingImages.Labels; 167 | % Tabulate the results using a confusion matrix. 168 | confMat = confusionmat(YTest, YPred); 169 | % Convert confusion matrix into percentage form 170 | confMat = bsxfun(@rdivide,confMat,sum(confMat,2)); 171 | save(conf_name,'confMat'); 172 | 173 | close all 174 | tic; 175 | trainingImages.ReadFcn = @(filename)readAndPreprocessImage_ours2(filename); 176 | testingImages.ReadFcn = @(filename)readAndPreprocessImage_ours2(filename); 177 | 178 | if strcmp(side_,'palmar') 179 | model=strcat('high_ours_p_',num2str(i)); 180 | conf_name=strcat('high_ours_results_p_',num2str(i),'.mat'); 181 | else 182 | model=strcat('high_ours_d_',num2str(i)); 183 | conf_name=strcat('high_ours_results_d_',num2str(i),'.mat'); 184 | end 185 | layers=getModel(param,2,[],[]); 186 | miniBatchSize = 60; 187 | numIterationsPerEpoch = floor(numel(trainingImages.Labels)/miniBatchSize); 188 | options = trainingOptions('sgdm',... 189 | 'MiniBatchSize',miniBatchSize,... 190 | 'MaxEpochs',20,... %it was 30 for other models 191 | 'InitialLearnRate',0.0001,'OutputFcn',@plotTrainingAccuracy); 192 | tic; 193 | net2 = trainNetwork(trainingImages,layers,options); 194 | t_training=t_training+toc; 195 | %save(strcat(model,'.mat'),'net2'); 196 | [YPred] = classify(net2,testingImages); 197 | YTest = testingImages.Labels; 198 | % Tabulate the results using a confusion matrix. 199 | confMat = confusionmat(YTest, YPred); 200 | % Convert confusion matrix into percentage form 201 | confMat = bsxfun(@rdivide,confMat,sum(confMat,2)); 202 | save(conf_name,'confMat'); 203 | 204 | close all 205 | 206 | trainingImages.ReadFcn = @(filename)readAndPreprocessImage_ours3(filename); 207 | testingImages.ReadFcn = @(filename)readAndPreprocessImage_ours3(filename); 208 | 209 | [lgraph]=getModel(param,3,net1,net2); 210 | 211 | 212 | if strcmp(side_,'palmar') 213 | model=strcat('ours_p_',num2str(i)); 214 | conf_name=strcat('ours_results_p_',num2str(i),'.mat'); 215 | else 216 | model=strcat('ours_d_',num2str(i)); 217 | conf_name=strcat('ours_results_d_',num2str(i),'.mat'); 218 | end 219 | 220 | clear net1 net2; 221 | 222 | 223 | miniBatchSize = 60; 224 | numIterationsPerEpoch = floor(numel(trainingImages.Labels)/miniBatchSize); 225 | options = trainingOptions('sgdm',... 226 | 'MiniBatchSize',miniBatchSize,... 227 | 'MaxEpochs',9,... %it was 30 for other models 228 | 'InitialLearnRate',0.0001,'OutputFcn',@plotTrainingAccuracy); 229 | tic; 230 | net = trainNetwork(trainingImages,lgraph,options); 231 | t_training=t_training+toc; 232 | 233 | 234 | end 235 | 236 | save(strcat(model,'.mat'),'net'); 237 | tic; 238 | [YPred] = classify(net,testingImages); 239 | t_testing=toc; 240 | YTest = testingImages.Labels; 241 | % Tabulate the results using a confusion matrix. 242 | confMat = confusionmat(YTest, YPred); 243 | 244 | % Convert confusion matrix into percentage form 245 | confMat = bsxfun(@rdivide,confMat,sum(confMat,2)); 246 | save(conf_name,'confMat'); 247 | sprintf('The result for %s %s - number %s: %f',cnn_model, side_(1),num2str(i),mean(diag(confMat))) 248 | sprintf('Time training: %f - testing: %f',t_training,(t_testing/length(YTest))) 249 | 250 | clear net 251 | end --------------------------------------------------------------------------------