├── .github └── FUNDING.yml ├── Final presentation.pptx ├── ML_EX9.ppt ├── README.md ├── att_faces └── att_faces.zip ├── pca_code ├── ML_ex8.ipynb ├── ML_project_Final.ipynb └── ex8_data.csv ├── pca_face_detection.ipynb ├── pics_for_presentation ├── Work_Flow.JPG ├── result_With_pca.jpg ├── sample.jpg └── score_Without_pca.jpg └── plot_face_recognition.ipynb /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | # These are supported funding model platforms 2 | 3 | github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] 4 | patreon: # Replace with a single Patreon username 5 | open_collective: # Replace with a single Open Collective username 6 | ko_fi: # Replace with a single Ko-fi username 7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel 8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry 9 | liberapay: # Replace with a single Liberapay username 10 | issuehunt: # Replace with a single IssueHunt username 11 | otechie: # Replace with a single Otechie username 12 | custom: ['https://www.freelancer.com/u/afaqahmad100?w=f'] 13 | -------------------------------------------------------------------------------- /Final presentation.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/afaq-ahmad/PCA-SVM-Features-based-Face-Images-Classification/feb655c6899d94586afa646e69a8c26066dd8367/Final presentation.pptx -------------------------------------------------------------------------------- /ML_EX9.ppt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/afaq-ahmad/PCA-SVM-Features-based-Face-Images-Classification/feb655c6899d94586afa646e69a8c26066dd8367/ML_EX9.ppt -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # PCA-SVM-Features-based-Face-Images-Classification 2 | 3 | ### Objectives: 4 | 5 | Face recognition on The Database of Faces at a glance. 6 | Data pre-processing. 7 | Dimension reduction using PCA. 8 | Support Vector Machines (For classification) 9 | 10 | ### Project work flow: 11 | 12 | ![Work Flow](pics_for_presentation/Work_Flow.JPG) 13 | 14 | ### Data-Set (The Database of Faces at a Glance): 15 | 16 | The data-set consists of 400 pictures of 40 people. 17 | Each picture has a dimension of 112X92 pixels. 18 | Data set comprises of pgm format. 19 | 20 | 21 | Front faced pictures of a single subject was captured in different perspective & can be seen by plotting few examples of the given data-set. 22 | 23 | ![sample](pics_for_presentation/sample.jpg) 24 | 25 | ### Data pre-processing: 26 | 27 | Each image is flattened into row vector of size 10304. 28 | The size of whole data set becomes 400X10304. 29 | Then converted into a numpy ndarray. 30 | 31 | ### Dimension reduction using PCA: 32 | 33 | Redundant features are removed using PCA. 34 | Eigen vectors with more variance are kept while with low variance are discarded. 35 | Output matrix from PCA will look noting like original image. 36 | 37 | ### Support Vector Machine (SVM): 38 | 39 | First of all for classification SVM is applied directly without using SVM. 40 | To get maximum margin, Linear classifier was used. 41 | 100% accuracy was achieved. 42 | 43 | ### Score without using PCA: 44 | 45 | When original numbers are used that is dimension reduction isn’t performed we have maximum score. 46 | 47 | ![score without pca](pics_for_presentation/score_Without_pca.jpg) 48 | 49 | ### SVM after using PCA: 50 | 51 | >PCA was implemented for K=1,2……10 52 | >For K=1 , error was 94%. 53 | >For K=10, error was 1%. 54 | >Significant improvements made when dimensions were increased. 55 | 56 | ### Results with PCA: 57 | As we are increasing number of dimensions error is decreasing and score is increasing to one. 58 | 59 | ![Result with Pca](pics_for_presentation/result_With_pca.jpg) 60 | 61 | 62 | 63 | -------------------------------------------------------------------------------- /att_faces/att_faces.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/afaq-ahmad/PCA-SVM-Features-based-Face-Images-Classification/feb655c6899d94586afa646e69a8c26066dd8367/att_faces/att_faces.zip -------------------------------------------------------------------------------- /pca_code/ex8_data.csv: -------------------------------------------------------------------------------- 1 | 5.6702,7.0868,4.2754,5.8189,-2.1269,-3.1249,-1.7641,0.18279,-0.44423,2.4425,6.5677,2.8334,2.2641,1.4804,1.3699,0.48773,-1.1287,1.2632,-0.81249,2.3186 2 | 0.1005,-0.11182,-2.9499,-3.0962,-6.9152,-5.5826,-8.2558,-1.1346,-0.25808,6.991,7.3514,5.6396,4.1069,2.0733,-2.7759,-4.1288,-2.7823,-1.935,-5.5202,-1.4041 3 | 5.2642,7.4971,3.8528,7.1889,-1.5751,-1.9094,-3.0081,0.30877,-1.0281,0.18143,5.2577,2.619,2.3765,1.9385,1.7809,1.1154,-1.0425,1.3218,-0.58202,1.9125 4 | 4.1036,8.1084,3.5166,8.6578,-3.2947,-1.1116,-1.3258,0.57146,-0.44872,2.2365,5.1622,2.359,2.6567,3.8551,1.2246,1.4877,0.57491,-0.79346,-1.0036,-0.12747 5 | -3.4212,-4.2184,-1.5427,-1.7678,-2.1168,-4.3136,-7.5925,-10.649,-10.219,-10.054,-5.4661,-9.7487,-1.4086,0.75779,3.2326,-2.5169,1.9765,-2.1492,-4.7931,-4.9828 6 | 3.8266,-3.5337,-4.1258,-6.8176,-5.5395,-1.0892,1.2045,1.3831,4.6191,5.2413,3.2529,8.7184,6.4596,6.0702,0.53644,1.1044,-6.5138,-6.8114,-1.9105,-3.5512 7 | -4.2564,-3.1434,-1.1365,-0.86318,-3.4535,-5.7632,-6.4497,-10.426,-10.592,-9.3418,-4.0997,-7.6157,-0.83725,1.1274,3.7138,-2.4787,-0.11042,-3.3305,-5.1169,-6.84 8 | -3.1641,-3.3783,1.7241,0.18376,3.03,6.2896,5.138,1.071,-3.4468,-5.0117,-9.272,-7.6895,-7.3409,-4.034,1.0999,-2.0415,2.663,0.012598,1.948,-2.3466 9 | 0.75598,2.1829,3.9303,1.7412,-5.0348,-1.7033,-4.4808,-6.77,-3.0245,0.91152,-0.89146,1.9253,2.0591,-1.5978,-0.74324,6.4408,5.429,7.6457,8.7564,6.7568 10 | 5.0231,-0.91134,-5.4199,-5.6389,-6.4044,-3.9186,1.1202,0.27733,3.4337,3.8019,4.3586,8.1833,4.6831,4.7897,0.7455,3.0044,-7.0867,-7.1444,-3.2081,-3.7266 11 | -1.7461,-2.4234,0.71324,1.3896,2.5384,7.0493,4.6731,0.85308,-0.75872,-6.5845,-8.2263,-10.195,-6.4055,-4.6733,-0.83004,-3.3046,0.99158,-0.97267,1.6483,-1.7171 12 | 2.3114,1.9616,3.1564,0.74757,-3.2473,0.90202,-5.6098,-7.1988,-3.9798,1.7371,-1.9124,3.1187,1.5446,0.10935,-1.5825,4.4269,4.7155,8.8329,7.5355,7.2088 13 | 5.1446,7.5504,3.4077,6.4019,-1.126,-2.9551,-0.71928,0.63963,-0.37908,0.14599,4.5813,4.0915,2.6798,1.1416,2.8853,1.1197,-1.3958,1.5159,-1.5965,0.26504 14 | -3.7784,-1.8908,0.7999,1.8625,4.2074,8.1961,6.7951,0.96115,-4.3413,-5.1452,-7.9035,-9.4746,-5.3806,-2.9432,0.4143,-2.4038,1.1335,2.313,0.71695,1.4874 15 | -0.92164,-1.2201,-0.16514,-3.0179,-4.2269,-4.4557,-5.1018,-0.77826,2.2925,5.7828,6.7999,7.1997,3.8331,4.6651,-2.5229,-3.3603,-2.209,-0.82343,-6.6129,-0.15609 16 | 4.5235,-2.0607,-4.198,-7.6186,-7.5609,-2.9409,-0.27772,-0.18441,3.1265,1.4361,4.7205,8.2211,4.3478,4.3249,1.1879,1.8033,-6.5017,-5.866,-3.8339,-2.2448 17 | 0.25023,-1.8738,-0.013144,-4.0927,-5.5185,-4.9498,-4.2623,-1.6546,-0.076045,6.965,3.2319,4.351,3.0665,2.7275,-0.24115,-3.1148,-1.5539,-0.83353,-6.0378,-1.1423 18 | 1.8261,2.8869,2.3006,1.2535,-6.901,2.0077,-4.7352,-6.9201,-1.873,-0.62576,0.55429,1.8196,2.678,1.0295,-0.4979,6.5073,4.6456,9.6467,7.3947,7.8119 19 | 0.78375,-0.45236,-0.29597,-4.6296,-5.2263,-6.7459,-8.4191,-1.61,1.2477,5.8948,5.666,3.39,3.1935,3.523,-2.6418,-3.4045,-4.5781,-1.9564,-5.337,0.82405 20 | 5.1138,-4.2448,-5.063,-6.7581,-5.3485,-4.1331,0.18789,-0.6097,1.2196,3.4533,3.5121,8.6125,5.9125,4.3193,2.3549,1.8459,-7.7503,-7.8124,-2.8548,-3.2187 21 | 3.924,6.0745,2.4224,5.6372,-3.3653,-2.2243,-0.072397,0.19197,0.0042397,1.7125,5.219,2.5894,2.2393,0.51952,0.96544,0.88984,-0.41677,-1.1892,0.29778,0.33423 22 | 1.1414,-0.3402,0.33209,-1.9817,-5.2532,-5.8149,-6.3357,-1.5046,-0.17798,7.6787,5.4878,5.938,2.8947,5.4643,-0.931,-3.8267,-1.0115,-3.7376,-5.0294,-0.33307 23 | -0.90924,-2.4044,-2.4765,-3.2273,-3.8447,-5.715,-5.9598,-3.5235,2.4124,6.8259,6.1931,3.8925,5.7207,2.7592,-1.8543,-1.9269,-0.86199,-3.0775,-3.6411,-1.3549 24 | -2.9699,-1.3814,1.2978,1.0159,1.1678,6.0035,3.8675,1.7793,-3.2582,-7.5696,-8.2174,-8.4964,-7.4253,-4.7056,1.2429,-2.2965,3.0932,-0.21533,1.3878,-2.6748 25 | 2.2026,2.2076,4.1305,2.9547,-5.1815,-0.40891,-5.6987,-6.6506,-2.1789,0.79846,-0.53472,1.9827,1.0256,-1.4428,1.143,5.8449,5.3252,7.7857,7.4531,6.5485 26 | -0.73761,-0.061129,0.69277,-3.6422,-4.3651,-5.0677,-5.2533,0.61954,1.0842,4.3445,5.2743,4.8348,4.015,4.0494,-2.8066,-4.1183,-1.0796,-2.1017,-6.593,-1.4018 27 | 1.7796,-0.97899,-2.4503,-4.8669,-6.7781,-5.1546,-4.344,-0.9796,1.865,6.5144,5.6996,3.8038,3.0603,3.2236,-0.71928,-1.6669,-2.7761,-1.6834,-6.7733,-0.82479 28 | -2.1276,-2.8981,-2.8701,-1.8786,-3.1449,-4.339,-8.0856,-9.7523,-10.642,-11.484,-8.935,-8.2847,-2.764,1.5989,2.7659,-3.0666,1.4385,-1.7122,-4.8069,-5.3944 29 | 0.27941,-1.343,-1.2465,-2.5231,-5.1131,-3.6366,-4.6812,-1.5933,0.68712,6.8292,5.6642,5.3617,4.5419,4.9903,-0.9134,-2.7833,-3.8851,-2.3167,-4.8109,-0.67418 30 | 4.728,-2.7656,-2.398,-7.1413,-5.625,-4.4108,-0.55399,-0.051588,4.5923,3.7841,3.4324,6.6622,4.333,5.083,0.98785,1.4426,-8.333,-7.47,-5.0198,-4.4463 31 | 3.5144,-1.4353,-4.965,-5.6639,-4.121,-3.8597,0.38921,-1.6658,3.03,4.9336,4.6796,9.2668,5.5436,4.913,3.9265,2.593,-5.4459,-7.4119,-2.1453,-4.5834 32 | -2.9124,-0.72564,1.2638,2.922,3.0724,6.7383,4.6414,-0.19341,-4.0315,-6.5173,-11.057,-10.482,-5.5045,-3.4258,1.0528,-2.2782,2.4635,1.2741,2.2672,-1.2853 33 | -2.3226,-1.9728,-1.9743,-1.4005,-2.5978,-1.7899,-7.2886,-12.346,-10.702,-9.5639,-5.9397,-8.3062,-1.5623,1.4938,4.4978,-1.73,0.056698,-2.5352,-4.4069,-6.3253 34 | 2.011,2.0246,3.3437,1.0697,-6.7073,1.1666,-3.8406,-6.8223,-1.6678,1.7336,-0.33956,4.9306,1.0472,-1.659,-0.17574,5.1245,6.9344,9.9653,8.9498,6.3699 35 | -2.9913,-0.77956,2.4317,0.99216,2.928,5.7289,6.4222,0.99995,-2.2128,-9.4704,-9.6673,-9.113,-5.6344,-4.162,3.2907,-2.8035,1.6879,-0.34552,2.2348,-1.0221 36 | 0.007262,0.4818,-0.98634,-4.7141,-5.8058,-4.8235,-8.3313,-1.4674,3.0337,5.9386,7.7398,4.2136,2.4914,2.7613,-0.4809,-2.2869,-3.2326,-3.0126,-4.6824,-1.3129 37 | -3.5721,-1.8965,-1.1226,0.3129,-3.6641,-4.231,-7.7231,-11.048,-9.1909,-10.196,-4.7674,-7.114,-1.3033,0.51217,2.6026,-0.75039,0.49844,-2.0667,-4.9283,-5.2843 38 | 1.6989,1.1597,4.5518,0.60767,-4.2386,1.064,-3.6357,-7.542,-2.5866,1.547,-1.8768,3.9576,0.11349,-3.4745,0.090353,4.5381,6.4711,9.642,8.9714,7.6725 39 | 0.64443,-1.0094,-0.40946,-6.0705,-4.6423,-5.1949,-5.1845,-1.1968,1.8251,6.5857,5.0115,4.8377,3.4365,4.0648,-2.4422,-4.3287,-3.0285,-1.5878,-5.36,-1.3928 40 | -0.23664,3.4908,4.7466,1.6543,-4.0211,1.232,-3.9626,-5.1727,-1.6545,-0.59863,-0.13311,1.1898,-0.93249,-2.0099,0.67434,4.9142,6.7291,10.581,8.4692,7.1072 41 | 4.3901,7.2192,5.885,5.795,-1.4579,-0.021153,-2.5834,-0.65263,-0.23859,2.2943,6.1327,1.4298,2.7096,1.4921,2.8484,0.10899,-0.81141,-1.2834,-0.8698,1.9956 42 | -3.5447,-1.3123,0.02381,2.6765,2.0567,5.4185,6.7481,1.7518,-3.0037,-5.9885,-8.1732,-10.015,-6.5327,-4.9377,-0.41134,-2.3101,1.7243,0.076388,1.2274,-2.2472 43 | 5.9543,6.6741,3.2904,4.6176,0.44456,-2.0463,0.6635,-0.04295,-0.060508,2.0867,6.3863,2.153,3.2966,1.6924,3.1807,0.71196,-2.5028,0.074785,0.85894,-0.34093 44 | 5.7154,-4.7492,-4.9602,-6.097,-4.8459,-3.0486,0.3819,1.9774,2.9137,3.8439,3.3925,6.2685,2.5269,4.5147,3.0119,1.6556,-8.1386,-7.0058,-3.4528,-4.7184 45 | 5.1085,-2.1572,-3.7429,-6.2702,-3.2794,-4.1817,0.45476,0.78112,4.149,4.5442,4.5873,9.4645,5.1081,5.2121,1.8417,1.5022,-5.8515,-7.9345,-2.7732,-4.2132 46 | 4.8582,5.5981,4.791,5.5473,-0.39614,-2.06,-2.0098,0.48193,-1.7893,1.6084,5.7636,1.5122,2.0225,0.49717,1.6961,-0.52908,-1.7529,1.2506,0.2132,2.0453 47 | -3.111,-3.3222,-2.5967,-0.46489,-4.0466,-1.9385,-6.5577,-10.109,-8.033,-10.049,-5.5445,-6.6135,-0.94858,0.72973,0.78466,-3.6949,-0.85665,-1.9498,-3.2733,-5.6195 48 | -0.36937,0.10734,-0.30833,-3.5326,-2.9102,-3.1239,-7.6705,-1.4282,1.0776,6.0045,5.4083,4.2101,4.5347,3.9591,-3.601,-3.3209,-4.4668,-2.4132,-5.4956,-0.4657 49 | 0.055081,-0.73048,-2.7046,-4.0388,-4.6135,-4.3023,-6.8018,-2.653,1.9138,5.5564,3.8301,4.9801,4.2824,4.3838,-1.9723,-3.1896,-3.3251,-1.8737,-5.8515,0.76053 50 | -1.7355,0.32621,1.1215,1.3793,3.2808,6.423,6.1811,0.19951,-0.98319,-8.0403,-8.253,-11.149,-3.8948,-2.7228,1.644,-2.6081,1.929,-0.27715,3.9963,-1.7488 51 | 5.2746,7.1792,2.2302,6.6941,-2.7072,-2.4722,-1.2045,2.8719,0.91059,1.12,3.8573,2.109,3.4967,1.8524,1.7971,0.22126,-0.89634,1.8051,-0.33365,0.69196 52 | -1.5256,1.3187,-1.6721,-3.0748,-5.9836,-5.6806,-6.0261,-0.15359,2.067,5.5987,5.8912,5.8044,3.9575,3.7969,-1.5265,-2.9259,-3.8768,-2.7342,-5.5845,-0.48289 53 | 2.6682,0.30182,3.1338,0.70901,-2.8345,1.2961,-5.9234,-6.588,-2.365,2.2537,-3.1277,3.557,2.1522,-1.436,2.3379,3.7867,5.3649,7.8331,11.07,6.3273 54 | -2.1174,-2.9616,1.6338,2.4718,2.9726,5.634,6.0542,-0.057914,-2.8344,-5.9991,-9.057,-9.398,-6.4405,-5.1671,1.5168,-4.2241,2.3861,0.23765,2.0194,-0.63097 55 | -2.8266,-1.4481,0.56591,-0.43989,4.018,5.6642,5.8304,0.093227,-3.7123,-6.6375,-9.7858,-11.232,-5.1419,-2.1762,1.123,-1.5199,3.2388,-0.96223,0.24285,0.66341 56 | -2.5245,-2.6164,-1.9265,-0.57483,-3.1619,-3.1832,-5.1282,-10.984,-8.7868,-10.022,-5.6947,-8.1168,-1.3098,3.713,1.6802,-1.4626,-0.57968,-1.9181,-4.9628,-5.3016 57 | 6.1171,7.8819,3.7524,5.5151,-1.6904,0.027139,-0.84879,2.0766,-1.563,-0.31814,5.2169,1.7318,2.8281,0.96498,2.8158,0.53535,-3.0481,-0.34486,-1.0674,0.91945 58 | 4.8593,6.9594,4.0581,5.2118,-0.47181,-2.0388,-2.164,1.7365,0.21118,0.34553,4.2384,2.1476,1.834,1.42,1.2565,0.59923,-0.853,0.60179,0.056514,2.4283 59 | 3.5737,7.572,4.8434,6.897,-1.2349,-2.3571,-1.2517,1.1669,-0.68077,1.4987,3.4515,1.9488,2.2387,2.678,3.3114,0.37131,-1.2181,1.8275,-0.97228,-0.48436 60 | -2.7126,-2.6426,-0.72742,-0.29474,-3.4353,-4.4239,-6.3053,-9.7586,-8.092,-9.3951,-4.477,-6.5344,-1.5321,-0.16784,4.0441,-1.3545,-0.96013,-3.1495,-2.7014,-5.3878 61 | -0.3351,0.071796,0.035165,1.0202,2.5212,6.2574,4.9989,1.8309,-2.8852,-7.1096,-9.3809,-10.568,-6.5308,-3.0188,0.40008,-1.7085,2.1183,-0.86014,2.4071,-2.0116 62 | -1.7427,-2.7703,-0.37465,-0.17936,-2.6174,-2.4586,-6.8564,-10.607,-12.505,-10.465,-7.0409,-6.8349,0.3182,-0.022015,4.2284,-1.0622,-1.6616,-1.9144,-4.596,-5.4372 63 | -2.8226,-1.0988,-2.7405,-1.0832,-2.3853,-3.6114,-6.9388,-11.634,-10.378,-11.603,-5.6653,-7.152,-1.467,1.5025,3.4058,-2.0377,0.35814,-2.0964,-4.7657,-6.4183 64 | 3.3676,-4.7311,-5.0158,-6.1347,-5.3841,-3.0226,0.74174,-1.9373,2.3367,2.6835,4.9708,7.4984,6.9503,4.9891,3.1565,0.96826,-6.0146,-9.1364,-1.8965,-2.8794 65 | 4.1834,6.5328,3.6984,5.9664,-2.5883,-2.5147,-1.0605,1.1033,-1.1059,1.5188,7.5288,2.9796,2.0312,1.4649,3.2549,-0.36164,-2.4748,-0.43573,-1.2359,2.2306 66 | 3.3508,-3.6087,-4.5419,-5.7908,-6.3608,-2.0192,-0.35675,1.4368,4.0007,3.9394,4.8522,8.9819,4.2406,4.2798,3.6967,2.1586,-7.5437,-6.8758,-2.2678,-3.316 67 | -4.2785,-2.9193,0.9428,-1.4767,-2.1503,-5.4621,-8.1406,-11.859,-9.6109,-8.564,-6.4026,-8.9161,-1.3703,-0.019189,3.8612,-0.33164,-0.84235,-2.4691,-6.6679,-3.5352 68 | 5.1546,6.6183,4.2516,5.2941,-0.039847,-1.29,-1.7194,0.79071,-2.0252,0.76121,4.7241,2.174,3.2151,0.29801,2.4359,1.8691,-1.0467,0.3467,-1.5354,1.0926 69 | -2.3889,-2.681,-0.71953,0.20742,2.5901,5.6231,4.8085,1.2761,-3.5888,-6.617,-8.3605,-9.854,-4.3152,-2.2751,3.3206,-1.5268,1.9462,0.091485,1.8083,-2.6958 70 | 4.2974,-4.0245,-1.9718,-7.5647,-4.4561,-3.0798,-0.63382,0.90673,2.5988,5.8078,5.0304,9.5037,5.5524,5.7311,2.5757,1.049,-6.546,-6.2185,-4.0075,-2.3507 71 | 2.3872,2.7543,4.9499,-0.34523,-3.6505,2.0656,-5.4382,-5.5777,-1.8532,1.3298,-0.14985,1.3525,1.1705,-2.2616,-1.0666,4.8248,5.2066,9.0818,9.0707,6.1063 72 | 5.5333,7.6864,5.7266,6.8463,-2.1291,-0.77188,-2.584,1.2696,-0.85665,1.3728,5.3175,2.4739,1.3046,1.7895,2.711,2.5648,-0.050381,-0.33205,0.058248,2.4524 73 | -3.053,-2.9749,1.1723,1.1176,4.7769,8.0193,4.4227,-0.68609,-3.3782,-6.2622,-8.8851,-9.957,-5.005,-3.699,0.14882,-2.6451,1.3974,-0.18179,1.5844,-0.96512 74 | -4.2517,-1.8632,-2.2157,-1.6016,-2.1388,-4.4349,-6.327,-9.3993,-10.024,-11.906,-3.4452,-7.8588,0.93717,1.1404,3.4422,-1.0426,-0.14923,-2.8427,-2.3378,-4.3309 75 | -4.9142,-4.5738,-3.2769,0.52534,-4.3314,-3.8081,-9.8522,-10.68,-8.2685,-10.197,-5.3892,-8.3051,0.23629,3.8624,2.3485,-2.1064,-1.4737,-3.7278,-5.6261,-5.4055 76 | -3.9006,-3.2185,-2.5156,-1.5586,-2.0697,-4.1206,-8.0474,-10.638,-9.7339,-10.026,-5.0015,-6.0666,-0.18951,-0.63044,3.1403,-1.0786,0.94557,-2.8026,-5.0366,-4.9745 77 | 3.4973,-2.7801,-6.7733,-7.0011,-5.0677,-3.524,2.5448,1.1642,4.1988,4.4405,5.131,7.065,5.1632,1.7311,2.7485,1.5939,-7.9115,-5.9363,-5.2678,-3.2301 78 | -1.999,-2.9066,-2.0874,-1.8418,-2.6211,-3.751,-7.3879,-9.8709,-6.7676,-11.147,-4.4716,-9.7185,0.074472,-1.3512,4.375,-3.1488,0.10159,-1.4675,-5.305,-6.769 79 | -0.58674,-0.25011,-1.6367,-3.9307,-6.0332,-4.7485,-6.0741,-2.4174,1.8386,5.5994,5.7097,3.5055,3.4188,3.8612,-1.4751,-2.3515,-2.3135,-2.8833,-4.9651,-0.67212 80 | -3.3893,-3.5694,-2.5767,-2.2211,-1.9846,-3.3518,-7.9175,-9.3897,-10.872,-9.6605,-7.429,-8.6621,-0.68806,2.2354,3.6611,-0.89273,-0.78717,-3.4643,-2.8303,-5.5224 81 | -3.5781,-1.1518,1.352,0.56466,3.7057,5.0963,5.9366,-0.80055,-3.3264,-6.475,-8.0366,-9.8232,-4.8573,-2.6858,-0.33862,-1.553,2.5869,-0.75095,0.84227,-1.0972 82 | -1.2266,-1.9666,-0.40575,-2.9749,-3.9053,-4.9112,-7.7613,-11.315,-11.783,-10.118,-5.2367,-9.2565,-1.3956,1.5079,2.0515,-2.1359,1.1859,-0.68861,-3.4476,-4.0961 83 | 3.346,2.4043,5.516,0.61214,-4.8082,-1.1012,-4.446,-6.129,-2.5555,1.3259,-0.89718,2.9964,-2.5999,-2.2089,1.1345,7.1077,6.1683,8.1634,10.51,7.0422 84 | 5.44,-1.2353,-3.9546,-7.7296,-4.6758,-4.54,-1.245,-0.25717,3.6477,4.0955,4.2977,7.3313,4.3458,2.2553,1.362,0.16963,-5.7791,-7.422,-1.8168,-4.7957 85 | 2.9367,3.6,4.3991,2.144,-5.9872,0.36601,-3.0464,-6.3695,-3.2389,3.0786,-0.028188,2.308,0.10877,-1.2934,0.56661,5.2811,4.7064,7.0235,7.5618,5.5683 86 | -4.4874,-2.3592,0.436,0.16846,3.755,7.0424,4.9914,-0.49002,-3.5058,-5.2092,-5.9231,-9.9228,-6.8736,-4.1904,-0.88085,-1.7538,1.7217,-2.317,0.89218,-2.3252 87 | -3.2644,-2.3831,0.54633,1.0013,4.7346,6.5789,4.132,1.3307,-3.5361,-8.3151,-9.6057,-8.7509,-4.6755,-3.0498,1.5975,-1.0493,0.25266,0.13164,1.2152,-3.6798 88 | 5.3803,7.3959,2.5864,4.7554,-0.36824,-1.4587,-2.4804,-0.16805,-0.6486,1.7976,5.7295,3.6173,0.73806,0.1769,2.5151,1.4874,-1.8751,-0.78372,-1.1773,1.6886 89 | 4.7738,-2.3533,-5.0098,-5.8882,-4.1407,-2.48,-0.99608,-1.0355,2.4353,4.9474,1.8975,7.4053,5.8202,2.5946,-0.00619,-0.12726,-9.0655,-7.38,-2.8088,-2.9579 90 | 5.1816,8.5346,4.4263,7.5046,-0.80775,-2.2456,-1.1263,-0.19911,-1.0854,0.10316,4.0491,3.1191,2.9091,-0.057308,2.6441,-0.91945,-1.1649,0.14201,-0.28736,0.67989 91 | -3.6873,-2.2899,0.33499,0.24949,4.7567,8.1628,6.0105,1.3336,-0.82012,-5.6712,-8.2585,-8.7827,-6.83,-3.0304,1.4998,-1.6107,1.278,0.45247,3.0548,-1.824 92 | 5.1323,-3.0695,-3.5891,-7.1648,-7.6933,-2.8185,0.78808,0.50493,2.6542,4.1175,6.4243,8.2533,6.9934,4.7168,1.9693,3.133,-6.0474,-6.2113,-3.1733,-4.8771 93 | 5.3037,6.8901,5.1732,6.0548,1.011,-3.7318,0.92467,-0.48139,-1.9881,1.6297,5.9813,2.0841,2.0048,0.73085,1.3766,1.0871,-1.3401,0.17931,-2.7296,0.25512 94 | 2.8128,0.88362,5.4152,0.81212,-3.4265,0.10415,-5.1773,-6.9043,-3.3248,1.2609,0.063897,3.0821,1.5405,-1.4336,0.19142,3.5878,8.6018,9.5358,9.746,6.6668 95 | -3.4659,-2.3448,-2.8251,-1.887,-3.6581,-3.0426,-8.1333,-10.73,-10.502,-10.651,-5.6827,-7.9646,-0.87044,0.88969,3.3005,-3.3046,0.38045,-4.1825,-4.7271,-5.4124 96 | -2.3299,-3.8602,-1.1572,-1.4319,-2.0944,-4.954,-5.5522,-8.8077,-11.314,-10.913,-5.0706,-8.2308,-2.5296,-0.69862,3.465,-2.1334,-1.0627,-2.5437,-4.575,-5.7852 97 | -3.1839,-3.8948,1.0676,1.3663,1.4392,5.2842,5.1391,0.61594,-5.1696,-7.0829,-8.0791,-9.4277,-5.2777,-2.0017,1.8401,-4.5863,1.0221,0.23699,2.443,0.18728 98 | -0.68101,-1.2555,-1.9057,-4.3648,-5.5451,-5.575,-5.5778,-2.4919,0.43309,7.0929,5.402,5.1412,6.3384,1.8222,0.19485,-1.7748,-1.5782,-0.82931,-3.2064,-0.99339 99 | -2.7596,-1.667,-2.5816,2.007,-3.4564,-4.2867,-8.7424,-10.587,-9.8011,-10.219,-4.9498,-7.734,-3.0212,2.1093,3.4671,-1.3858,-0.35352,-2.5403,-4.2416,-7.2111 100 | 0.21145,-0.089121,-0.62626,-3.4358,-3.9046,-7.3479,-6.2448,-0.27584,1.7156,6.6145,7.0617,5.9727,2.6119,4.1983,-3.4403,-2.8414,-2.8942,-1.0436,-4.9271,-0.41122 101 | -2.4135,-0.46424,1.5639,-0.34093,5.1168,6.6066,7.2812,0.10956,-2.5749,-7.735,-10.526,-8.9778,-6.6309,-2.2981,0.67905,-2.2099,2.4087,-0.50335,3.0174,-4.2459 102 | -3.4113,-3.0054,0.075609,0.92631,4.0772,5.6752,4.5004,3.2152,-4.1023,-6.0811,-9.8877,-10.767,-4.876,-3.4649,1.1033,-2.7429,4.0682,-1.2155,2.0054,-2.1297 103 | 2.9339,-3.2901,-2.394,-8.3409,-4.7137,-3.1438,-0.55545,-0.1601,3.5202,4.173,3.8938,7.5669,5.0243,5.0987,0.40933,-0.61353,-8.0302,-9.508,-2.8296,-3.0907 104 | 1.2652,-0.48668,-0.83722,-4.9771,-3.3402,-7.502,-6.627,-2.2514,1.7541,5.175,5.0365,5.2891,2.8812,2.9615,-3.65,-3.9729,-1.2109,-1.2268,-5.4444,-0.33348 105 | 2.7501,-3.7789,-3.4949,-7.171,-4.8036,-3.9517,-0.82751,-0.59148,3.4475,4.725,5.0271,5.3152,6.5322,5.579,2.8301,1.3243,-7.1584,-6.4463,-3.4968,-2.5497 106 | -3.9702,-4.8709,-1.8586,-0.11564,-3.3772,-5.8583,-7.0294,-9.8915,-10.999,-11.661,-5.284,-5.4154,-0.56206,0.66364,2.2954,-1.9956,-0.67803,-1.6114,-4.8547,-5.7167 107 | 0.80023,1.7034,4.0812,0.98528,-4.5261,0.59392,-3.1578,-7.3571,-2.5309,2.7422,-0.21043,1.6597,2.4463,-2.6033,-0.99951,4.9122,6.9239,9.6864,10.053,5.9881 108 | 2.5704,2.5858,4.608,2.8695,-5.1539,-1.1206,-4.5602,-5.2733,-2.705,0.41675,-0.30632,1.693,1.2229,-1.3959,-1.2421,4.9436,7.4424,9.021,7.0733,6.4422 109 | 1.0126,1.9207,5.5149,0.38963,-4.3957,0.7922,-4.2298,-6.6479,-2.027,1.3922,0.41833,0.50997,0.21907,-3.1695,0.24713,5.0637,5.8195,8.6089,10.882,6.1973 110 | -0.74068,-4.0346,-2.304,-1.6108,-5.2075,-4.2649,-6.3769,-2.5465,2.5531,6.6442,4.6338,3.7781,3.5475,3.1163,-1.7174,-4.7646,-1.7989,-1.5905,-6.509,-0.69684 111 | 5.3069,7.0028,3.7542,4.7509,-0.23625,-1.4917,-1.9512,0.77644,0.99348,1.0675,6.2304,1.3381,1.9098,2.1146,2.7074,2.0378,-0.46889,0.71408,-1.5781,1.8279 112 | 5.5122,-2.4645,-6.0214,-6.1828,-5.3678,-2.1207,-0.13149,-0.32364,3.7442,4.6642,4.5877,7.6905,5.5462,5.6981,2.2975,2.0508,-6.0473,-6.1269,-2.1701,-3.3359 113 | 6.5846,7.9232,3.9816,5.1574,-1.4107,-3.613,-0.74582,0.65568,0.55063,0.46928,5.017,4.5717,4.4558,2.4635,3.5487,0.34687,-1.1359,0.89495,-0.28451,1.7288 114 | 3.5749,3.4441,2.1753,2.0286,-4.7297,-0.32285,-7.0277,-6.7184,-1.0163,0.72905,-0.090727,1.8333,-0.12119,-2.0057,-0.47424,5.729,5.5791,8.7199,7.3773,5.9739 115 | 5.8634,8.1807,2.5007,6.1199,-2.3562,-2.1505,-0.27889,1.0389,-1.7534,2.1105,3.0926,2.6983,1.9648,3.3973,3.463,-0.11525,-1.4259,-0.39554,-0.47023,2.2782 116 | -2.9082,-1.1836,-2.3108,0.28912,-4.7278,-3.7003,-4.3704,-10.851,-8.7978,-8.5041,-6.5747,-5.7837,-1.8592,0.14522,2.1669,-1.4215,0.34341,-3.047,-6.5478,-4.6794 117 | 1.1386,2.7783,2.7823,1.4102,-6.4874,-0.53339,-6.0971,-6.3828,-3.3155,0.13179,-0.92225,3.0746,1.8763,-2.1335,-1.5246,6.7176,6.9743,6.7251,9.4277,8.8379 118 | 5.7518,7.6967,2.9663,5.8568,-0.97737,-2.7943,-3.0452,1.9593,-2.1198,0.86488,2.9641,4.2865,2.8273,1.5272,4.1608,0.57039,0.51023,-1.0824,-1.3741,1.6751 119 | 1.9367,-0.13477,-0.27112,-4.8044,-4.4436,-6.9792,-6.911,-1.5998,1.8354,6.7848,6.5913,5.8635,4.4722,4.7894,-1.5409,-4.6407,-1.0518,-1.0128,-5.2181,-1.5842 120 | 5.4189,7.273,3.9655,6.3038,-0.3753,-1.3568,-0.94988,2.2852,-0.29546,2.1461,5.7801,2.7523,3.9874,3.0606,1.8482,-0.57112,-1.7774,-0.14822,-1.5419,1.8931 121 | 3.3199,-2.0031,-3.3483,-8.5751,-7.4241,-5.1173,-0.41401,0.81946,1.933,3.6134,4.5934,8.2397,3.2577,2.61,1.3089,-0.14142,-5.8232,-7.4635,-2.8671,-1.7599 122 | 1.6444,-0.14421,2.2043,0.44147,-4.6948,0.43891,-5.3355,-6.9086,-4.6007,-1.1699,0.71482,2.4163,2.6861,-0.68632,0.87139,6.4521,6.3933,9.338,9.3295,7.0699 123 | 1.9058,2.5212,2.3707,-1.4693,-4.7201,0.1088,-5.4257,-4.7793,-1.4148,0.24761,-0.2496,3.9594,1.3509,-0.72301,-1.686,3.6289,4.5766,9.3112,9.3004,7.3424 124 | 2.3937,2.2996,2.9621,2.1941,-5.664,0.15038,-5.2277,-6.7321,-1.5082,0.85387,-0.29853,2.252,1.4074,-1.7122,-0.034248,4.0713,6.4094,9.0262,7.9974,6.2977 125 | 4.7379,7.5662,4.5656,6.8477,-1.3134,-3.5803,-1.8185,1.7715,-0.76667,0.11996,5.1876,3.9893,2.3483,3.9502,1.8386,0.9409,-1.9687,1.4448,0.88214,1.9402 126 | -4.2565,-3.8955,-1.4406,-1.9101,-2.6481,-2.4344,-7.0619,-8.9706,-11.94,-9.748,-5.8438,-6.5022,-2.5781,2.4061,3.5693,-2.4651,2.5221,-2.1828,-5.0669,-5.4678 127 | 5.4714,-2.7784,-4.1604,-7.3768,-5.9706,-1.2879,0.77981,-1.4725,5.2,5.8923,4.7652,7.472,4.3787,3.0724,2.9238,-0.018222,-6.2287,-7.4715,-3.192,-2.218 128 | 2.5664,2.4991,4.8355,2.8273,-6.3656,1.8763,-5.7279,-6.9839,-3.4559,0.42727,-1.9624,2.3596,0.82161,-3.2773,-0.10737,4.8467,4.6761,7.8966,8.8333,6.8332 129 | 0.61121,2.9654,4.1395,2.2577,-5.147,-0.085429,-4.9021,-6.301,-2.7246,-0.89147,-0.19649,2.5773,0.080773,-3.0251,0.47298,3.8774,9.2526,8.1356,9.8424,8.2968 130 | 1.5005,0.15481,-1.758,-2.4641,-5.4882,-2.8206,-7.7397,-2.4054,1.0463,6.7743,5.1404,7.3441,2.6055,2.4303,-1.7811,-3.878,-1.9818,-2.5148,-4.6002,0.75408 131 | -0.077863,-0.62143,-0.5942,-5.6122,-3.8177,-5.2961,-5.7245,-0.64498,0.50064,7.629,5.8988,5.8082,3.8609,3.6593,-0.16664,-2.6936,-2.3933,-2.1507,-4.9482,-0.91701 132 | 1.9364,2.6566,5.0507,-0.38541,-3.8869,-0.1698,-4.8562,-5.181,-1.9724,-0.16199,-2.5803,2.1526,-0.45203,-1.9983,-0.059852,6.1864,5.4694,9.6545,7.2622,7.4401 133 | -1.1091,-0.36882,-2.2108,-2.7377,-7.2848,-4.8217,-5.488,-1.4512,1.7871,5.763,6.3663,4.6911,3.4997,3.4284,-1.8508,-4.2748,-0.76769,-5.0755,-4.9462,-2.21 134 | -5.6797,-2.0794,-0.76322,-1.5583,-3.6916,-3.6434,-8.0915,-11.293,-9.615,-9.773,-4.0872,-8.7228,-0.087836,1.5958,4.1685,-2.9897,-0.37848,-3.1635,-3.8319,-6.0029 135 | -5.3508,-2.3596,-1.6758,-0.56254,-2.6614,-4.8513,-6.7444,-10.176,-11.855,-11.208,-4.1696,-7.2923,-0.28353,1.3742,2.5555,-3.0457,-0.16976,-1.5796,-3.6016,-5.0359 136 | 0.18945,-1.4437,-0.0032792,-2.1893,-4.859,-3.9289,-6.7643,-1.7804,1.7314,6.1983,5.6431,6.6266,2.5537,3.8592,-1.9958,-3.2116,-2.7943,-4.1803,-5.643,-2.284 137 | 2.9096,0.85044,6.5331,0.91664,-5.2058,-0.62587,-4.3218,-4.9957,-2.3848,-0.31731,-2.4786,0.55137,-0.58349,-1.4812,-2.2046,5.0316,7.4159,10.739,7.7341,7.4059 138 | -0.99567,-1.7555,1.3453,3.659,2.2889,7.0508,3.883,1.2157,-2.4993,-8.0781,-9.2834,-9.6202,-6.3028,-3.4226,2.6106,-1.5222,1.6511,0.66327,3.0808,-2.2435 139 | 6.0041,-2.0928,-4.3812,-7.2167,-5.6933,-4.128,2.048,0.16319,4.2393,6.4617,6.8099,6.3746,6.2729,4.2767,3.0736,1.6104,-6.0782,-8.0449,-2.536,-5.2111 140 | 0.01845,-0.43068,-2.3484,-3.9872,-5.3396,-7.049,-7.2151,-1.5494,1.0954,6.5402,5.2542,4.5441,2.998,3.4027,-0.084881,-1.5803,-1.8739,-3.9145,-7.3794,0.42157 141 | 4.8419,-2.0668,-4.0175,-8.9822,-5.3681,-3.4554,1.1418,0.31667,3.087,3.9499,4.0242,5.855,3.9211,3.3953,2.475,-0.41174,-7.8385,-6.7474,-2.7959,-2.6819 142 | 5.8886,-1.7779,-4.9876,-6.6161,-5.926,-1.9433,-1.3508,2.4691,1.8845,4.216,2.6985,9.6618,4.8938,3.8325,2.5879,1.1359,-6.7818,-5.7689,-3.7093,-3.2974 143 | -3.497,-2.6518,0.99952,0.34363,1.7813,4.6278,4.9756,1.7293,-1.9795,-7.8152,-7.8165,-9.7913,-6.7221,-4.1339,2.7574,-1.7953,0.7219,-0.28401,1.7403,-2.3299 144 | 5.4651,-4.5171,-4.5519,-7.322,-4.1904,-1.2243,0.39332,0.10938,3.9094,4.0284,1.9671,6.5174,3.1289,4.0938,1.6948,1.0423,-8.2414,-6.7757,-4.4606,-3.1915 145 | 5.2646,7.7541,4.6638,5.8242,-0.88906,-3.4204,-0.24619,1.5425,0.91622,2.4539,4.4878,4.2449,3.7326,0.56484,2.406,0.37411,-0.95136,0.21936,-0.49082,0.86439 146 | -1.6088,-0.024639,-2.1565,-1.505,-3.9789,-3.2807,-7.0274,-9.4118,-10.635,-8.7995,-6.4089,-8.8421,0.5573,0.20669,3.3235,-1.5367,-0.29886,-1.1711,-3.4396,-4.9487 147 | -3.2813,-1.4447,0.14045,2.9208,3.5768,6.3891,5.6653,1.6665,-4.814,-6.9245,-8.0919,-9.0438,-4.9786,-3.2561,2.3644,-2.9423,1.6897,-0.088605,2.1876,-2.9187 148 | -2.0138,-1.4434,1.9788,0.99687,3.0316,6.2499,5.0521,0.54706,-3.4827,-6.4336,-8.2163,-9.2299,-5.9996,-4.0343,1.6307,-0.13254,2.7788,1.0476,2.8963,-1.8059 149 | 4.8726,6.7431,4.337,4.4014,-1.5473,-3.0054,-0.35317,-0.36532,-1.8301,1.3513,6.0298,2.1346,2.496,2.4466,3.2326,0.19674,-2.309,-0.041275,-0.91707,-0.55223 150 | -2.4503,-1.8044,0.54705,1.4181,1.6696,6.9664,2.8635,-0.14472,-1.9308,-5.8916,-9.3533,-9.6037,-7.6557,-2.1726,-0.78374,-2.7581,2.0529,1.1367,1.7701,-1.0996 151 | -3.0442,-2.5916,2.1222,2.2706,2.9544,7.6177,6.1424,0.17748,-3.0323,-7.7357,-9.2848,-8.4232,-5.8258,-2.2512,1.0573,-2.2956,2.2772,-0.27013,2.2375,-2.9442 152 | 3.2285,1.9109,2.2858,1.073,-4.8216,-1.4801,-6.3924,-7.7178,-2.483,2.8577,-1.4207,4.7027,2.4731,-0.69654,1.5394,4.3174,6.506,10.237,8.5515,7.8067 153 | 4.4939,5.6546,2.5713,6.0786,-2.6656,-1.7412,-0.48204,-0.23471,-0.046765,2.647,3.5924,2.7542,3.245,-0.99916,4.0582,1.3139,0.050048,0.11407,-1.6327,0.92898 154 | 3.5671,-3.7329,-2.7088,-7.6091,-7.0403,-4.7034,-0.81858,1.5387,2.715,5.3712,4.7745,8.1495,4.4299,4.4914,0.9653,-0.4099,-6.1106,-8.7135,-5.0531,-3.1693 155 | -1.2959,-1.8802,-0.82126,-4.7506,-4.483,-5.6801,-8.0824,-1.6626,-0.55018,6.0825,8.3381,4.1401,3.1236,3.2936,-1.492,-4.5096,-0.9457,-3.1004,-5.3486,-0.57293 156 | -2.5342,-1.9138,-0.4836,1.5911,3.6579,6.9728,7.0739,-0.99892,-3.1232,-5.3751,-8.2505,-11.018,-4.8192,-3.5635,2.6016,-3.9255,2.6942,0.069868,2.4621,-1.7168 157 | 2.4963,-1.2457,-3.9322,-7.6067,-5.5506,-3.1835,0.57556,0.52421,2.4575,6.2117,4.2857,9.0817,4.6123,4.7341,2.5585,2.8234,-8.2061,-8.8895,-2.3745,-4.907 158 | -2.6656,-4.547,-2.8728,-1.3123,-3.1403,-5.2762,-6.1654,-11.441,-9.6779,-9.2338,-5.4211,-8.5212,-0.62356,1.5707,1.7836,-0.96932,-0.74035,-1.919,-3.0044,-3.9002 159 | 5.8302,9.3402,4.2424,6.0232,-1.7111,-0.38343,-2.0188,0.57825,-0.70145,0.55353,4.1373,0.87645,2.8403,0.69433,1.0308,-1.9407,-0.6399,-1.251,-1.2883,0.99791 160 | 4.352,-3.4101,-4.2592,-6.368,-5.6335,-2.4591,-0.31865,0.38639,2.3545,5.3092,3.4648,8.2722,4.7351,4.5197,1.0168,2.4687,-8.6295,-7.3842,-3.2674,-3.7665 161 | -0.87371,-0.307,-1.134,-5.227,-4.1243,-6.0718,-8.1259,-2.4193,-0.0088609,5.8976,5.3572,5.5525,5.0545,2.9564,-0.20872,-2.8482,-2.441,-2.8716,-4.1127,-1.2897 162 | -0.30367,0.78645,-1.6913,-3.8715,-5.8315,-5.9702,-7.2581,-2.4004,1.3949,6.5972,4.0593,6.4899,3.8691,3.9895,-2.6555,-3.5745,-0.97986,-2.0234,-6.2252,-0.11419 163 | 1.7749,1.1395,5.0269,2.1392,-4.2715,-1.6647,-4.2649,-6.5261,-3.2285,1.104,-0.015536,2.1418,0.44986,-1.4879,0.8432,5.3886,6.8446,7.4511,8.9008,7.3388 164 | 1.2868,1.7294,3.1932,1.0566,-4.5795,-1.0551,-5.6469,-7.792,-3.7856,0.19249,-0.13653,2.2035,1.9839,-2.3504,-0.51708,4.6428,6.0103,8.8724,8.7179,6.3548 165 | 5.2648,6.767,4.1612,7.8774,-0.45493,-1.6183,-2.5128,0.80062,-1.7125,2.4213,5.4349,2.9501,0.16754,0.25774,0.62774,2.158,-1.6536,0.67371,-1.293,0.32813 166 | -2.3512,-1.66,0.60657,2.0443,3.4196,7.9718,4.187,2.9183,-0.84759,-6.5048,-7.2617,-7.9869,-4.8957,-3.2217,0.95182,-3.2421,2.9577,0.72338,1.7395,-1.9725 167 | 1.1571,1.0545,-1.9087,-2.8643,-3.42,-4.9748,-5.9255,-1.9397,-0.30415,6.0771,7.0364,6.602,5.3424,1.8384,-3.9388,-0.79148,-0.52335,-1.1491,-5.6695,-0.26766 168 | 3.0031,0.54044,4.9306,0.029632,-5.4675,-0.14514,-5.0073,-7.9996,-2.6556,0.51675,-0.55862,2.2172,0.75923,-3.3227,0.36406,5.5051,5.2617,9.4694,7.8602,6.3978 169 | -3.6195,-1.5899,-1.4327,-0.46734,-2.6229,-5.6205,-6.0147,-11.661,-10.322,-11.51,-6.8116,-6.7129,-2.3099,0.0058969,3.1576,-2.2472,-1.4758,-3.932,-5.7534,-5.0663 170 | -0.37929,-2.5503,1.483,1.3313,3.3632,8.8654,3.6881,0.89659,-2.4825,-4.9386,-8.4391,-10.844,-5.2147,-2.6437,1.1396,-3.3673,0.60143,-2.9904,0.85804,-1.0672 171 | -3.9071,-0.70434,-1.1908,-1.1426,-3.4724,-3.5811,-5.4645,-10.291,-11.005,-9.1921,-5.7183,-6.8471,-2.3699,2.3266,2.9449,-0.36217,0.8194,-0.72686,-5.4403,-6.024 172 | 3.8025,2.0241,4.951,2.9422,-4.0834,-0.94409,-5.428,-4.8405,-3.9021,1.0204,-0.38246,3.6509,1.5222,-1.6301,1.3127,4.5738,6.266,9.3362,7.3834,5.5465 173 | -1.4639,-0.94802,-1.294,-3.7412,-5.1644,-5.7665,-5.3022,-1.6229,1.9267,6.5261,4.6724,5.1279,3.682,1.0398,-1.754,-0.62721,-2.1518,-0.82516,-5.671,0.13656 174 | -4.8377,-1.7614,0.33576,-0.1547,-2.9486,-3.0644,-6.8051,-10.174,-8.2749,-9.5026,-3.7999,-7.3748,-1.1075,2.4045,2.9419,-1.3283,0.31302,-1.4722,-3.5592,-4.4558 175 | -4.156,-1.3109,2.7426,1.9713,3.8857,7.7128,6.3895,0.75738,-3.466,-6.9257,-9.8963,-10.787,-5.572,-2.9198,0.022655,-1.4375,4.072,0.071495,2.4308,-1.5274 176 | 2.8036,-3.7129,-5.5337,-6.7489,-4.5649,-3.5487,-0.39843,-0.59881,3.0839,4.585,3.3868,7.7417,4.5482,6.3589,3.1168,0.73128,-8.2262,-6.9686,-3.6635,-2.1118 177 | 3.5535,-2.9147,-3.1779,-6.2853,-4.8513,-3.9983,0.96237,1.4731,0.97245,4.9921,4.641,7.284,4.9555,4.5214,4.619,-0.25061,-5.6478,-6.6948,-5.6035,-3.2762 178 | 6.3369,6.5947,3.6565,7.0428,1.0627,-0.42176,-1.7979,-0.95426,0.39479,0.9358,2.9199,2.9744,2.1744,2.8631,3.5269,-0.21575,-0.42164,-0.84606,-1.2805,1.9878 179 | 1.7834,0.29228,3.7312,2.1945,-5.5531,-0.49699,-4.8555,-5.5822,-2.3467,1.6666,-1.2253,2.5183,2.5195,-1.3427,0.72884,3.5916,5.1405,9.4114,9.0326,5.8292 180 | 4.8791,-4.0631,-3.9514,-5.5593,-5.6913,-4.5858,-2.2513,-0.44873,2.2898,4.674,5.0885,8.7238,3.0147,5.6966,3.9254,0.8041,-7.1123,-7.6343,-2.6138,-4.0265 181 | -------------------------------------------------------------------------------- /pics_for_presentation/Work_Flow.JPG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/afaq-ahmad/PCA-SVM-Features-based-Face-Images-Classification/feb655c6899d94586afa646e69a8c26066dd8367/pics_for_presentation/Work_Flow.JPG -------------------------------------------------------------------------------- /pics_for_presentation/result_With_pca.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/afaq-ahmad/PCA-SVM-Features-based-Face-Images-Classification/feb655c6899d94586afa646e69a8c26066dd8367/pics_for_presentation/result_With_pca.jpg -------------------------------------------------------------------------------- /pics_for_presentation/sample.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/afaq-ahmad/PCA-SVM-Features-based-Face-Images-Classification/feb655c6899d94586afa646e69a8c26066dd8367/pics_for_presentation/sample.jpg -------------------------------------------------------------------------------- /pics_for_presentation/score_Without_pca.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/afaq-ahmad/PCA-SVM-Features-based-Face-Images-Classification/feb655c6899d94586afa646e69a8c26066dd8367/pics_for_presentation/score_Without_pca.jpg -------------------------------------------------------------------------------- /plot_face_recognition.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "%matplotlib inline" 10 | ] 11 | }, 12 | { 13 | "cell_type": "markdown", 14 | "metadata": {}, 15 | "source": [ 16 | "\n", 17 | "# Faces recognition example using eigenfaces and SVMs\n", 18 | "\n", 19 | "\n", 20 | "The dataset used in this example is a preprocessed excerpt of the\n", 21 | "\"Labeled Faces in the Wild\", aka LFW_:\n", 22 | "\n", 23 | " http://vis-www.cs.umass.edu/lfw/lfw-funneled.tgz (233MB)\n", 24 | "\n", 25 | "\n", 26 | "Expected results for the top 5 most represented people in the dataset:\n", 27 | "\n", 28 | "================== ============ ======= ========== =======\n", 29 | " precision recall f1-score support\n", 30 | "================== ============ ======= ========== =======\n", 31 | " Ariel Sharon 0.67 0.92 0.77 13\n", 32 | " Colin Powell 0.75 0.78 0.76 60\n", 33 | " Donald Rumsfeld 0.78 0.67 0.72 27\n", 34 | " George W Bush 0.86 0.86 0.86 146\n", 35 | "Gerhard Schroeder 0.76 0.76 0.76 25\n", 36 | " Hugo Chavez 0.67 0.67 0.67 15\n", 37 | " Tony Blair 0.81 0.69 0.75 36\n", 38 | "\n", 39 | " avg / total 0.80 0.80 0.80 322\n", 40 | "================== ============ ======= ========== =======\n", 41 | "\n", 42 | "\n" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": 1, 48 | "metadata": {}, 49 | "outputs": [ 50 | { 51 | "name": "stderr", 52 | "output_type": "stream", 53 | "text": [ 54 | "Downloading LFW metadata: https://ndownloader.figshare.com/files/5976012\n", 55 | "2019-02-25 15:31:52,718 Downloading LFW metadata: https://ndownloader.figshare.com/files/5976012\n" 56 | ] 57 | }, 58 | { 59 | "name": "stdout", 60 | "output_type": "stream", 61 | "text": [ 62 | "Automatically created module for IPython interactive environment\n" 63 | ] 64 | }, 65 | { 66 | "name": "stderr", 67 | "output_type": "stream", 68 | "text": [ 69 | "Downloading LFW metadata: https://ndownloader.figshare.com/files/5976009\n", 70 | "2019-02-25 15:31:58,561 Downloading LFW metadata: https://ndownloader.figshare.com/files/5976009\n", 71 | "Downloading LFW metadata: https://ndownloader.figshare.com/files/5976006\n", 72 | "2019-02-25 15:32:00,772 Downloading LFW metadata: https://ndownloader.figshare.com/files/5976006\n", 73 | "Downloading LFW data (~200MB): https://ndownloader.figshare.com/files/5976015\n", 74 | "2019-02-25 15:32:06,027 Downloading LFW data (~200MB): https://ndownloader.figshare.com/files/5976015\n" 75 | ] 76 | }, 77 | { 78 | "ename": "ConnectionAbortedError", 79 | "evalue": "[WinError 10053] An established connection was aborted by the software in your host machine", 80 | "output_type": "error", 81 | "traceback": [ 82 | "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", 83 | "\u001b[1;31mConnectionAbortedError\u001b[0m Traceback (most recent call last)", 84 | "\u001b[1;32m\u001b[0m in \u001b[0;36m\u001b[1;34m()\u001b[0m\n\u001b[0;32m 23\u001b[0m \u001b[1;31m# Download the data, if not already on disk and load it as numpy arrays\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 24\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 25\u001b[1;33m \u001b[0mlfw_people\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mfetch_lfw_people\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mmin_faces_per_person\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;36m70\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mresize\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;36m0.4\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 26\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 27\u001b[0m \u001b[1;31m# introspect the images arrays to find the shapes (for plotting)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", 85 | "\u001b[1;32mC:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\datasets\\lfw.py\u001b[0m in \u001b[0;36mfetch_lfw_people\u001b[1;34m(data_home, funneled, resize, min_faces_per_person, color, slice_, download_if_missing)\u001b[0m\n\u001b[0;32m 322\u001b[0m lfw_home, data_folder_path = check_fetch_lfw(\n\u001b[0;32m 323\u001b[0m \u001b[0mdata_home\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mdata_home\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mfunneled\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mfunneled\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 324\u001b[1;33m download_if_missing=download_if_missing)\n\u001b[0m\u001b[0;32m 325\u001b[0m \u001b[0mlogger\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mdebug\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m'Loading LFW people faces from %s'\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mlfw_home\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 326\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n", 86 | "\u001b[1;32mC:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\datasets\\lfw.py\u001b[0m in \u001b[0;36mcheck_fetch_lfw\u001b[1;34m(data_home, funneled, download_if_missing)\u001b[0m\n\u001b[0;32m 123\u001b[0m logger.info(\"Downloading LFW data (~200MB): %s\",\n\u001b[0;32m 124\u001b[0m archive.url)\n\u001b[1;32m--> 125\u001b[1;33m \u001b[0m_fetch_remote\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0marchive\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mdirname\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mlfw_home\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 126\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 127\u001b[0m \u001b[1;32mraise\u001b[0m \u001b[0mIOError\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m\"%s is missing\"\u001b[0m \u001b[1;33m%\u001b[0m \u001b[0marchive_path\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", 87 | "\u001b[1;32mC:\\ProgramData\\Anaconda3\\lib\\site-packages\\sklearn\\datasets\\base.py\u001b[0m in \u001b[0;36m_fetch_remote\u001b[1;34m(remote, dirname)\u001b[0m\n\u001b[0;32m 872\u001b[0m file_path = (remote.filename if dirname is None\n\u001b[0;32m 873\u001b[0m else join(dirname, remote.filename))\n\u001b[1;32m--> 874\u001b[1;33m \u001b[0murlretrieve\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mremote\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0murl\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mfile_path\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 875\u001b[0m \u001b[0mchecksum\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0m_sha256\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mfile_path\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 876\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mremote\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mchecksum\u001b[0m \u001b[1;33m!=\u001b[0m \u001b[0mchecksum\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", 88 | "\u001b[1;32mC:\\ProgramData\\Anaconda3\\lib\\urllib\\request.py\u001b[0m in \u001b[0;36murlretrieve\u001b[1;34m(url, filename, reporthook, data)\u001b[0m\n\u001b[0;32m 275\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 276\u001b[0m \u001b[1;32mwhile\u001b[0m \u001b[1;32mTrue\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 277\u001b[1;33m \u001b[0mblock\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mfp\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mread\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mbs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 278\u001b[0m \u001b[1;32mif\u001b[0m \u001b[1;32mnot\u001b[0m \u001b[0mblock\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 279\u001b[0m \u001b[1;32mbreak\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", 89 | "\u001b[1;32mC:\\ProgramData\\Anaconda3\\lib\\http\\client.py\u001b[0m in \u001b[0;36mread\u001b[1;34m(self, amt)\u001b[0m\n\u001b[0;32m 447\u001b[0m \u001b[1;31m# Amount is given, implement using readinto\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 448\u001b[0m \u001b[0mb\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mbytearray\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mamt\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 449\u001b[1;33m \u001b[0mn\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mreadinto\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mb\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 450\u001b[0m \u001b[1;32mreturn\u001b[0m \u001b[0mmemoryview\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mb\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;33m:\u001b[0m\u001b[0mn\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mtobytes\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 451\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", 90 | "\u001b[1;32mC:\\ProgramData\\Anaconda3\\lib\\http\\client.py\u001b[0m in \u001b[0;36mreadinto\u001b[1;34m(self, b)\u001b[0m\n\u001b[0;32m 491\u001b[0m \u001b[1;31m# connection, and the user is reading more bytes than will be provided\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 492\u001b[0m \u001b[1;31m# (for example, reading in 1k chunks)\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 493\u001b[1;33m \u001b[0mn\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mfp\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mreadinto\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mb\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 494\u001b[0m \u001b[1;32mif\u001b[0m \u001b[1;32mnot\u001b[0m \u001b[0mn\u001b[0m \u001b[1;32mand\u001b[0m \u001b[0mb\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 495\u001b[0m \u001b[1;31m# Ideally, we would raise IncompleteRead if the content-length\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", 91 | "\u001b[1;32mC:\\ProgramData\\Anaconda3\\lib\\socket.py\u001b[0m in \u001b[0;36mreadinto\u001b[1;34m(self, b)\u001b[0m\n\u001b[0;32m 584\u001b[0m \u001b[1;32mwhile\u001b[0m \u001b[1;32mTrue\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 585\u001b[0m \u001b[1;32mtry\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 586\u001b[1;33m \u001b[1;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_sock\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mrecv_into\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mb\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 587\u001b[0m \u001b[1;32mexcept\u001b[0m \u001b[0mtimeout\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 588\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_timeout_occurred\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;32mTrue\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", 92 | "\u001b[1;32mC:\\ProgramData\\Anaconda3\\lib\\ssl.py\u001b[0m in \u001b[0;36mrecv_into\u001b[1;34m(self, buffer, nbytes, flags)\u001b[0m\n\u001b[0;32m 1007\u001b[0m \u001b[1;34m\"non-zero flags not allowed in calls to recv_into() on %s\"\u001b[0m \u001b[1;33m%\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1008\u001b[0m self.__class__)\n\u001b[1;32m-> 1009\u001b[1;33m \u001b[1;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mread\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mnbytes\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mbuffer\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 1010\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1011\u001b[0m \u001b[1;32mreturn\u001b[0m \u001b[0msocket\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mrecv_into\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mbuffer\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mnbytes\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mflags\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", 93 | "\u001b[1;32mC:\\ProgramData\\Anaconda3\\lib\\ssl.py\u001b[0m in \u001b[0;36mread\u001b[1;34m(self, len, buffer)\u001b[0m\n\u001b[0;32m 869\u001b[0m \u001b[1;32mraise\u001b[0m \u001b[0mValueError\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m\"Read on closed or unwrapped SSL socket.\"\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 870\u001b[0m \u001b[1;32mtry\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 871\u001b[1;33m \u001b[1;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_sslobj\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mread\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mlen\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mbuffer\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 872\u001b[0m \u001b[1;32mexcept\u001b[0m \u001b[0mSSLError\u001b[0m \u001b[1;32mas\u001b[0m \u001b[0mx\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 873\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mx\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0margs\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;36m0\u001b[0m\u001b[1;33m]\u001b[0m \u001b[1;33m==\u001b[0m \u001b[0mSSL_ERROR_EOF\u001b[0m \u001b[1;32mand\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0msuppress_ragged_eofs\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", 94 | "\u001b[1;32mC:\\ProgramData\\Anaconda3\\lib\\ssl.py\u001b[0m in \u001b[0;36mread\u001b[1;34m(self, len, buffer)\u001b[0m\n\u001b[0;32m 629\u001b[0m \"\"\"\n\u001b[0;32m 630\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mbuffer\u001b[0m \u001b[1;32mis\u001b[0m \u001b[1;32mnot\u001b[0m \u001b[1;32mNone\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 631\u001b[1;33m \u001b[0mv\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_sslobj\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mread\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mlen\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mbuffer\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 632\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 633\u001b[0m \u001b[0mv\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_sslobj\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mread\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mlen\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n", 95 | "\u001b[1;31mConnectionAbortedError\u001b[0m: [WinError 10053] An established connection was aborted by the software in your host machine" 96 | ] 97 | } 98 | ], 99 | "source": [ 100 | "from __future__ import print_function\n", 101 | "\n", 102 | "from time import time\n", 103 | "import logging\n", 104 | "import matplotlib.pyplot as plt\n", 105 | "\n", 106 | "from sklearn.model_selection import train_test_split\n", 107 | "from sklearn.model_selection import GridSearchCV\n", 108 | "from sklearn.datasets import fetch_lfw_people\n", 109 | "from sklearn.metrics import classification_report\n", 110 | "from sklearn.metrics import confusion_matrix\n", 111 | "from sklearn.decomposition import PCA\n", 112 | "from sklearn.svm import SVC\n", 113 | "\n", 114 | "\n", 115 | "print(__doc__)\n", 116 | "\n", 117 | "# Display progress logs on stdout\n", 118 | "logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s')\n", 119 | "\n", 120 | "\n", 121 | "# #############################################################################\n", 122 | "# Download the data, if not already on disk and load it as numpy arrays\n", 123 | "\n", 124 | "lfw_people = fetch_lfw_people(min_faces_per_person=70, resize=0.4)\n", 125 | "\n", 126 | "# introspect the images arrays to find the shapes (for plotting)\n", 127 | "n_samples, h, w = lfw_people.images.shape\n", 128 | "\n", 129 | "# for machine learning we use the 2 data directly (as relative pixel\n", 130 | "# positions info is ignored by this model)\n", 131 | "X = lfw_people.data\n", 132 | "n_features = X.shape[1]\n", 133 | "\n", 134 | "# the label to predict is the id of the person\n", 135 | "y = lfw_people.target\n", 136 | "target_names = lfw_people.target_names\n", 137 | "n_classes = target_names.shape[0]\n", 138 | "\n", 139 | "print(\"Total dataset size:\")\n", 140 | "print(\"n_samples: %d\" % n_samples)\n", 141 | "print(\"n_features: %d\" % n_features)\n", 142 | "print(\"n_classes: %d\" % n_classes)\n", 143 | "\n", 144 | "\n", 145 | "# #############################################################################\n", 146 | "# Split into a training set and a test set using a stratified k fold\n", 147 | "\n", 148 | "# split into a training and testing set\n", 149 | "X_train, X_test, y_train, y_test = train_test_split(\n", 150 | " X, y, test_size=0.25, random_state=42)\n", 151 | "\n", 152 | "\n", 153 | "# #############################################################################\n", 154 | "# Compute a PCA (eigenfaces) on the face dataset (treated as unlabeled\n", 155 | "# dataset): unsupervised feature extraction / dimensionality reduction\n", 156 | "n_components = 150\n", 157 | "\n", 158 | "print(\"Extracting the top %d eigenfaces from %d faces\"\n", 159 | " % (n_components, X_train.shape[0]))\n", 160 | "t0 = time()\n", 161 | "pca = PCA(n_components=n_components, svd_solver='randomized',\n", 162 | " whiten=True).fit(X_train)\n", 163 | "print(\"done in %0.3fs\" % (time() - t0))\n", 164 | "\n", 165 | "eigenfaces = pca.components_.reshape((n_components, h, w))\n", 166 | "\n", 167 | "print(\"Projecting the input data on the eigenfaces orthonormal basis\")\n", 168 | "t0 = time()\n", 169 | "X_train_pca = pca.transform(X_train)\n", 170 | "X_test_pca = pca.transform(X_test)\n", 171 | "print(\"done in %0.3fs\" % (time() - t0))\n", 172 | "\n", 173 | "\n", 174 | "# #############################################################################\n", 175 | "# Train a SVM classification model\n", 176 | "\n", 177 | "print(\"Fitting the classifier to the training set\")\n", 178 | "t0 = time()\n", 179 | "param_grid = {'C': [1e3, 5e3, 1e4, 5e4, 1e5],\n", 180 | " 'gamma': [0.0001, 0.0005, 0.001, 0.005, 0.01, 0.1], }\n", 181 | "clf = GridSearchCV(SVC(kernel='rbf', class_weight='balanced'),\n", 182 | " param_grid, cv=5)\n", 183 | "clf = clf.fit(X_train_pca, y_train)\n", 184 | "print(\"done in %0.3fs\" % (time() - t0))\n", 185 | "print(\"Best estimator found by grid search:\")\n", 186 | "print(clf.best_estimator_)\n", 187 | "\n", 188 | "\n", 189 | "# #############################################################################\n", 190 | "# Quantitative evaluation of the model quality on the test set\n", 191 | "\n", 192 | "print(\"Predicting people's names on the test set\")\n", 193 | "t0 = time()\n", 194 | "y_pred = clf.predict(X_test_pca)\n", 195 | "print(\"done in %0.3fs\" % (time() - t0))\n", 196 | "\n", 197 | "print(classification_report(y_test, y_pred, target_names=target_names))\n", 198 | "print(confusion_matrix(y_test, y_pred, labels=range(n_classes)))\n", 199 | "\n", 200 | "\n", 201 | "# #############################################################################\n", 202 | "# Qualitative evaluation of the predictions using matplotlib\n", 203 | "\n", 204 | "def plot_gallery(images, titles, h, w, n_row=3, n_col=4):\n", 205 | " \"\"\"Helper function to plot a gallery of portraits\"\"\"\n", 206 | " plt.figure(figsize=(1.8 * n_col, 2.4 * n_row))\n", 207 | " plt.subplots_adjust(bottom=0, left=.01, right=.99, top=.90, hspace=.35)\n", 208 | " for i in range(n_row * n_col):\n", 209 | " plt.subplot(n_row, n_col, i + 1)\n", 210 | " plt.imshow(images[i].reshape((h, w)), cmap=plt.cm.gray)\n", 211 | " plt.title(titles[i], size=12)\n", 212 | " plt.xticks(())\n", 213 | " plt.yticks(())\n", 214 | "\n", 215 | "\n", 216 | "# plot the result of the prediction on a portion of the test set\n", 217 | "\n", 218 | "def title(y_pred, y_test, target_names, i):\n", 219 | " pred_name = target_names[y_pred[i]].rsplit(' ', 1)[-1]\n", 220 | " true_name = target_names[y_test[i]].rsplit(' ', 1)[-1]\n", 221 | " return 'predicted: %s\\ntrue: %s' % (pred_name, true_name)\n", 222 | "\n", 223 | "prediction_titles = [title(y_pred, y_test, target_names, i)\n", 224 | " for i in range(y_pred.shape[0])]\n", 225 | "\n", 226 | "plot_gallery(X_test, prediction_titles, h, w)\n", 227 | "\n", 228 | "# plot the gallery of the most significative eigenfaces\n", 229 | "\n", 230 | "eigenface_titles = [\"eigenface %d\" % i for i in range(eigenfaces.shape[0])]\n", 231 | "plot_gallery(eigenfaces, eigenface_titles, h, w)\n", 232 | "\n", 233 | "plt.show()" 234 | ] 235 | }, 236 | { 237 | "cell_type": "code", 238 | "execution_count": null, 239 | "metadata": {}, 240 | "outputs": [], 241 | "source": [] 242 | } 243 | ], 244 | "metadata": { 245 | "kernelspec": { 246 | "display_name": "Python 3", 247 | "language": "python", 248 | "name": "python3" 249 | }, 250 | "language_info": { 251 | "codemirror_mode": { 252 | "name": "ipython", 253 | "version": 3 254 | }, 255 | "file_extension": ".py", 256 | "mimetype": "text/x-python", 257 | "name": "python", 258 | "nbconvert_exporter": "python", 259 | "pygments_lexer": "ipython3", 260 | "version": "3.6.5" 261 | } 262 | }, 263 | "nbformat": 4, 264 | "nbformat_minor": 1 265 | } 266 | --------------------------------------------------------------------------------