├── .DS_Store ├── Assignments ├── .DS_Store ├── Assignment 1 Answers │ ├── diabetes.csv │ ├── diabetes.ipynb │ ├── insurance.csv │ └── insurance.ipynb ├── Assignment 1 │ ├── .DS_Store │ ├── Datasets │ │ ├── diabetes.csv │ │ └── insurance.csv │ ├── iAAA - Assignment 1 - Eng.pdf │ └── iAAA - Assignment 1 - Per.pdf ├── Assignment 2 Solution │ └── Assignment_2_Solution.ipynb ├── Assignment 2 │ ├── iAAA - Assignment 2 - En.pdf │ └── iAAA - Assignment 2 - Per.pdf ├── Assignment 3 Solution │ ├── Mall.ipynb │ └── Mall_Customers.csv ├── Assignment 3 │ ├── Mall_Customers.csv │ ├── iAAA - Assignment 3 - En.pdf │ └── iAAA - Assignment 3 - Per.pdf ├── Assignment 4 Solution │ ├── IP-Solution.ipynb │ └── logo.png ├── Assignment 4 │ ├── iAAA - Assignment 4 - En.pdf │ └── iAAA - Assignment 4- Per.pdf ├── Assignment 5 Solution │ └── Assignment 5 Solution.ipynb ├── Assignment 5 │ ├── iAAA - Assignment 5 - En.pdf │ └── iAAA - Assignment 5 - Per.pdf ├── Assignment 6 │ ├── iAAA - Assignment 6 - En.pdf │ └── iAAA - Assignment 6 - Per.pdf ├── Assignment 7 Solution │ └── Assignment_7_Solution.ipynb ├── Assignment 7 │ ├── iAAA - Assignment 7 - En.pdf │ └── iAAA - Assignment 7 - Per.pdf └── Template.ipynb ├── AutoEncoders ├── autoencoder-torch-notebook.ipynb └── variational-autoencoder-pytorch.ipynb ├── CNN ├── .DS_Store ├── Augmentation.ipynb ├── CNN.ipynb └── dogs-vs-cats-augmentation-model.ipynb ├── Classification ├── breast-cancer.csv └── classification_notebook.ipynb ├── Clustering ├── Customers.csv └── clustering.ipynb ├── EDA-Linear Regression ├── housing.csv └── linear_regression2.ipynb ├── Image Processing ├── Advanced_Image_Processing.ipynb ├── Image Processing.ipynb ├── haarcascade_frontalface_default.xml ├── image1.jpg └── image2.jpg ├── Neural Network ├── .DS_Store ├── Cancer Torch │ ├── breast-cancer.csv │ └── cancer_torch.ipynb ├── NN.ipynb └── pytorch_toturial_solution.ipynb ├── PyDicom └── pydicom-notebook.ipynb ├── README.md ├── RNN └── RNN.pdf └── Torch Enhancement ├── PyTorch Enhancement.ipynb └── data_tutorial.ipynb /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iAAA-event/Machine-learning/6a6525852ad04ed6e467763e5b8d9acc4fecf755/.DS_Store -------------------------------------------------------------------------------- /Assignments/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iAAA-event/Machine-learning/6a6525852ad04ed6e467763e5b8d9acc4fecf755/Assignments/.DS_Store -------------------------------------------------------------------------------- /Assignments/Assignment 1 Answers/diabetes.csv: -------------------------------------------------------------------------------- 1 | Pregnancies,Glucose,BloodPressure,SkinThickness,Insulin,BMI,DiabetesPedigreeFunction,Age,Outcome 2 | 6,148,72,35,0,33.6,0.627,50,1 3 | 1,85,66,29,0,26.6,0.351,31,0 4 | 8,183,64,0,0,23.3,0.672,32,1 5 | 1,89,66,23,94,28.1,0.167,21,0 6 | 0,137,40,35,168,43.1,2.288,33,1 7 | 5,116,74,0,0,25.6,0.201,30,0 8 | 3,78,50,32,88,31,0.248,26,1 9 | 10,115,0,0,0,35.3,0.134,29,0 10 | 2,197,70,45,543,30.5,0.158,53,1 11 | 8,125,96,0,0,0,0.232,54,1 12 | 4,110,92,0,0,37.6,0.191,30,0 13 | 10,168,74,0,0,38,0.537,34,1 14 | 10,139,80,0,0,27.1,1.441,57,0 15 | 1,189,60,23,846,30.1,0.398,59,1 16 | 5,166,72,19,175,25.8,0.587,51,1 17 | 7,100,0,0,0,30,0.484,32,1 18 | 0,118,84,47,230,45.8,0.551,31,1 19 | 7,107,74,0,0,29.6,0.254,31,1 20 | 1,103,30,38,83,43.3,0.183,33,0 21 | 1,115,70,30,96,34.6,0.529,32,1 22 | 3,126,88,41,235,39.3,0.704,27,0 23 | 8,99,84,0,0,35.4,0.388,50,0 24 | 7,196,90,0,0,39.8,0.451,41,1 25 | 9,119,80,35,0,29,0.263,29,1 26 | 11,143,94,33,146,36.6,0.254,51,1 27 | 10,125,70,26,115,31.1,0.205,41,1 28 | 7,147,76,0,0,39.4,0.257,43,1 29 | 1,97,66,15,140,23.2,0.487,22,0 30 | 13,145,82,19,110,22.2,0.245,57,0 31 | 5,117,92,0,0,34.1,0.337,38,0 32 | 5,109,75,26,0,36,0.546,60,0 33 | 3,158,76,36,245,31.6,0.851,28,1 34 | 3,88,58,11,54,24.8,0.267,22,0 35 | 6,92,92,0,0,19.9,0.188,28,0 36 | 10,122,78,31,0,27.6,0.512,45,0 37 | 4,103,60,33,192,24,0.966,33,0 38 | 11,138,76,0,0,33.2,0.42,35,0 39 | 9,102,76,37,0,32.9,0.665,46,1 40 | 2,90,68,42,0,38.2,0.503,27,1 41 | 4,111,72,47,207,37.1,1.39,56,1 42 | 3,180,64,25,70,34,0.271,26,0 43 | 7,133,84,0,0,40.2,0.696,37,0 44 | 7,106,92,18,0,22.7,0.235,48,0 45 | 9,171,110,24,240,45.4,0.721,54,1 46 | 7,159,64,0,0,27.4,0.294,40,0 47 | 0,180,66,39,0,42,1.893,25,1 48 | 1,146,56,0,0,29.7,0.564,29,0 49 | 2,71,70,27,0,28,0.586,22,0 50 | 7,103,66,32,0,39.1,0.344,31,1 51 | 7,105,0,0,0,0,0.305,24,0 52 | 1,103,80,11,82,19.4,0.491,22,0 53 | 1,101,50,15,36,24.2,0.526,26,0 54 | 5,88,66,21,23,24.4,0.342,30,0 55 | 8,176,90,34,300,33.7,0.467,58,1 56 | 7,150,66,42,342,34.7,0.718,42,0 57 | 1,73,50,10,0,23,0.248,21,0 58 | 7,187,68,39,304,37.7,0.254,41,1 59 | 0,100,88,60,110,46.8,0.962,31,0 60 | 0,146,82,0,0,40.5,1.781,44,0 61 | 0,105,64,41,142,41.5,0.173,22,0 62 | 2,84,0,0,0,0,0.304,21,0 63 | 8,133,72,0,0,32.9,0.27,39,1 64 | 5,44,62,0,0,25,0.587,36,0 65 | 2,141,58,34,128,25.4,0.699,24,0 66 | 7,114,66,0,0,32.8,0.258,42,1 67 | 5,99,74,27,0,29,0.203,32,0 68 | 0,109,88,30,0,32.5,0.855,38,1 69 | 2,109,92,0,0,42.7,0.845,54,0 70 | 1,95,66,13,38,19.6,0.334,25,0 71 | 4,146,85,27,100,28.9,0.189,27,0 72 | 2,100,66,20,90,32.9,0.867,28,1 73 | 5,139,64,35,140,28.6,0.411,26,0 74 | 13,126,90,0,0,43.4,0.583,42,1 75 | 4,129,86,20,270,35.1,0.231,23,0 76 | 1,79,75,30,0,32,0.396,22,0 77 | 1,0,48,20,0,24.7,0.14,22,0 78 | 7,62,78,0,0,32.6,0.391,41,0 79 | 5,95,72,33,0,37.7,0.37,27,0 80 | 0,131,0,0,0,43.2,0.27,26,1 81 | 2,112,66,22,0,25,0.307,24,0 82 | 3,113,44,13,0,22.4,0.14,22,0 83 | 2,74,0,0,0,0,0.102,22,0 84 | 7,83,78,26,71,29.3,0.767,36,0 85 | 0,101,65,28,0,24.6,0.237,22,0 86 | 5,137,108,0,0,48.8,0.227,37,1 87 | 2,110,74,29,125,32.4,0.698,27,0 88 | 13,106,72,54,0,36.6,0.178,45,0 89 | 2,100,68,25,71,38.5,0.324,26,0 90 | 15,136,70,32,110,37.1,0.153,43,1 91 | 1,107,68,19,0,26.5,0.165,24,0 92 | 1,80,55,0,0,19.1,0.258,21,0 93 | 4,123,80,15,176,32,0.443,34,0 94 | 7,81,78,40,48,46.7,0.261,42,0 95 | 4,134,72,0,0,23.8,0.277,60,1 96 | 2,142,82,18,64,24.7,0.761,21,0 97 | 6,144,72,27,228,33.9,0.255,40,0 98 | 2,92,62,28,0,31.6,0.13,24,0 99 | 1,71,48,18,76,20.4,0.323,22,0 100 | 6,93,50,30,64,28.7,0.356,23,0 101 | 1,122,90,51,220,49.7,0.325,31,1 102 | 1,163,72,0,0,39,1.222,33,1 103 | 1,151,60,0,0,26.1,0.179,22,0 104 | 0,125,96,0,0,22.5,0.262,21,0 105 | 1,81,72,18,40,26.6,0.283,24,0 106 | 2,85,65,0,0,39.6,0.93,27,0 107 | 1,126,56,29,152,28.7,0.801,21,0 108 | 1,96,122,0,0,22.4,0.207,27,0 109 | 4,144,58,28,140,29.5,0.287,37,0 110 | 3,83,58,31,18,34.3,0.336,25,0 111 | 0,95,85,25,36,37.4,0.247,24,1 112 | 3,171,72,33,135,33.3,0.199,24,1 113 | 8,155,62,26,495,34,0.543,46,1 114 | 1,89,76,34,37,31.2,0.192,23,0 115 | 4,76,62,0,0,34,0.391,25,0 116 | 7,160,54,32,175,30.5,0.588,39,1 117 | 4,146,92,0,0,31.2,0.539,61,1 118 | 5,124,74,0,0,34,0.22,38,1 119 | 5,78,48,0,0,33.7,0.654,25,0 120 | 4,97,60,23,0,28.2,0.443,22,0 121 | 4,99,76,15,51,23.2,0.223,21,0 122 | 0,162,76,56,100,53.2,0.759,25,1 123 | 6,111,64,39,0,34.2,0.26,24,0 124 | 2,107,74,30,100,33.6,0.404,23,0 125 | 5,132,80,0,0,26.8,0.186,69,0 126 | 0,113,76,0,0,33.3,0.278,23,1 127 | 1,88,30,42,99,55,0.496,26,1 128 | 3,120,70,30,135,42.9,0.452,30,0 129 | 1,118,58,36,94,33.3,0.261,23,0 130 | 1,117,88,24,145,34.5,0.403,40,1 131 | 0,105,84,0,0,27.9,0.741,62,1 132 | 4,173,70,14,168,29.7,0.361,33,1 133 | 9,122,56,0,0,33.3,1.114,33,1 134 | 3,170,64,37,225,34.5,0.356,30,1 135 | 8,84,74,31,0,38.3,0.457,39,0 136 | 2,96,68,13,49,21.1,0.647,26,0 137 | 2,125,60,20,140,33.8,0.088,31,0 138 | 0,100,70,26,50,30.8,0.597,21,0 139 | 0,93,60,25,92,28.7,0.532,22,0 140 | 0,129,80,0,0,31.2,0.703,29,0 141 | 5,105,72,29,325,36.9,0.159,28,0 142 | 3,128,78,0,0,21.1,0.268,55,0 143 | 5,106,82,30,0,39.5,0.286,38,0 144 | 2,108,52,26,63,32.5,0.318,22,0 145 | 10,108,66,0,0,32.4,0.272,42,1 146 | 4,154,62,31,284,32.8,0.237,23,0 147 | 0,102,75,23,0,0,0.572,21,0 148 | 9,57,80,37,0,32.8,0.096,41,0 149 | 2,106,64,35,119,30.5,1.4,34,0 150 | 5,147,78,0,0,33.7,0.218,65,0 151 | 2,90,70,17,0,27.3,0.085,22,0 152 | 1,136,74,50,204,37.4,0.399,24,0 153 | 4,114,65,0,0,21.9,0.432,37,0 154 | 9,156,86,28,155,34.3,1.189,42,1 155 | 1,153,82,42,485,40.6,0.687,23,0 156 | 8,188,78,0,0,47.9,0.137,43,1 157 | 7,152,88,44,0,50,0.337,36,1 158 | 2,99,52,15,94,24.6,0.637,21,0 159 | 1,109,56,21,135,25.2,0.833,23,0 160 | 2,88,74,19,53,29,0.229,22,0 161 | 17,163,72,41,114,40.9,0.817,47,1 162 | 4,151,90,38,0,29.7,0.294,36,0 163 | 7,102,74,40,105,37.2,0.204,45,0 164 | 0,114,80,34,285,44.2,0.167,27,0 165 | 2,100,64,23,0,29.7,0.368,21,0 166 | 0,131,88,0,0,31.6,0.743,32,1 167 | 6,104,74,18,156,29.9,0.722,41,1 168 | 3,148,66,25,0,32.5,0.256,22,0 169 | 4,120,68,0,0,29.6,0.709,34,0 170 | 4,110,66,0,0,31.9,0.471,29,0 171 | 3,111,90,12,78,28.4,0.495,29,0 172 | 6,102,82,0,0,30.8,0.18,36,1 173 | 6,134,70,23,130,35.4,0.542,29,1 174 | 2,87,0,23,0,28.9,0.773,25,0 175 | 1,79,60,42,48,43.5,0.678,23,0 176 | 2,75,64,24,55,29.7,0.37,33,0 177 | 8,179,72,42,130,32.7,0.719,36,1 178 | 6,85,78,0,0,31.2,0.382,42,0 179 | 0,129,110,46,130,67.1,0.319,26,1 180 | 5,143,78,0,0,45,0.19,47,0 181 | 5,130,82,0,0,39.1,0.956,37,1 182 | 6,87,80,0,0,23.2,0.084,32,0 183 | 0,119,64,18,92,34.9,0.725,23,0 184 | 1,0,74,20,23,27.7,0.299,21,0 185 | 5,73,60,0,0,26.8,0.268,27,0 186 | 4,141,74,0,0,27.6,0.244,40,0 187 | 7,194,68,28,0,35.9,0.745,41,1 188 | 8,181,68,36,495,30.1,0.615,60,1 189 | 1,128,98,41,58,32,1.321,33,1 190 | 8,109,76,39,114,27.9,0.64,31,1 191 | 5,139,80,35,160,31.6,0.361,25,1 192 | 3,111,62,0,0,22.6,0.142,21,0 193 | 9,123,70,44,94,33.1,0.374,40,0 194 | 7,159,66,0,0,30.4,0.383,36,1 195 | 11,135,0,0,0,52.3,0.578,40,1 196 | 8,85,55,20,0,24.4,0.136,42,0 197 | 5,158,84,41,210,39.4,0.395,29,1 198 | 1,105,58,0,0,24.3,0.187,21,0 199 | 3,107,62,13,48,22.9,0.678,23,1 200 | 4,109,64,44,99,34.8,0.905,26,1 201 | 4,148,60,27,318,30.9,0.15,29,1 202 | 0,113,80,16,0,31,0.874,21,0 203 | 1,138,82,0,0,40.1,0.236,28,0 204 | 0,108,68,20,0,27.3,0.787,32,0 205 | 2,99,70,16,44,20.4,0.235,27,0 206 | 6,103,72,32,190,37.7,0.324,55,0 207 | 5,111,72,28,0,23.9,0.407,27,0 208 | 8,196,76,29,280,37.5,0.605,57,1 209 | 5,162,104,0,0,37.7,0.151,52,1 210 | 1,96,64,27,87,33.2,0.289,21,0 211 | 7,184,84,33,0,35.5,0.355,41,1 212 | 2,81,60,22,0,27.7,0.29,25,0 213 | 0,147,85,54,0,42.8,0.375,24,0 214 | 7,179,95,31,0,34.2,0.164,60,0 215 | 0,140,65,26,130,42.6,0.431,24,1 216 | 9,112,82,32,175,34.2,0.26,36,1 217 | 12,151,70,40,271,41.8,0.742,38,1 218 | 5,109,62,41,129,35.8,0.514,25,1 219 | 6,125,68,30,120,30,0.464,32,0 220 | 5,85,74,22,0,29,1.224,32,1 221 | 5,112,66,0,0,37.8,0.261,41,1 222 | 0,177,60,29,478,34.6,1.072,21,1 223 | 2,158,90,0,0,31.6,0.805,66,1 224 | 7,119,0,0,0,25.2,0.209,37,0 225 | 7,142,60,33,190,28.8,0.687,61,0 226 | 1,100,66,15,56,23.6,0.666,26,0 227 | 1,87,78,27,32,34.6,0.101,22,0 228 | 0,101,76,0,0,35.7,0.198,26,0 229 | 3,162,52,38,0,37.2,0.652,24,1 230 | 4,197,70,39,744,36.7,2.329,31,0 231 | 0,117,80,31,53,45.2,0.089,24,0 232 | 4,142,86,0,0,44,0.645,22,1 233 | 6,134,80,37,370,46.2,0.238,46,1 234 | 1,79,80,25,37,25.4,0.583,22,0 235 | 4,122,68,0,0,35,0.394,29,0 236 | 3,74,68,28,45,29.7,0.293,23,0 237 | 4,171,72,0,0,43.6,0.479,26,1 238 | 7,181,84,21,192,35.9,0.586,51,1 239 | 0,179,90,27,0,44.1,0.686,23,1 240 | 9,164,84,21,0,30.8,0.831,32,1 241 | 0,104,76,0,0,18.4,0.582,27,0 242 | 1,91,64,24,0,29.2,0.192,21,0 243 | 4,91,70,32,88,33.1,0.446,22,0 244 | 3,139,54,0,0,25.6,0.402,22,1 245 | 6,119,50,22,176,27.1,1.318,33,1 246 | 2,146,76,35,194,38.2,0.329,29,0 247 | 9,184,85,15,0,30,1.213,49,1 248 | 10,122,68,0,0,31.2,0.258,41,0 249 | 0,165,90,33,680,52.3,0.427,23,0 250 | 9,124,70,33,402,35.4,0.282,34,0 251 | 1,111,86,19,0,30.1,0.143,23,0 252 | 9,106,52,0,0,31.2,0.38,42,0 253 | 2,129,84,0,0,28,0.284,27,0 254 | 2,90,80,14,55,24.4,0.249,24,0 255 | 0,86,68,32,0,35.8,0.238,25,0 256 | 12,92,62,7,258,27.6,0.926,44,1 257 | 1,113,64,35,0,33.6,0.543,21,1 258 | 3,111,56,39,0,30.1,0.557,30,0 259 | 2,114,68,22,0,28.7,0.092,25,0 260 | 1,193,50,16,375,25.9,0.655,24,0 261 | 11,155,76,28,150,33.3,1.353,51,1 262 | 3,191,68,15,130,30.9,0.299,34,0 263 | 3,141,0,0,0,30,0.761,27,1 264 | 4,95,70,32,0,32.1,0.612,24,0 265 | 3,142,80,15,0,32.4,0.2,63,0 266 | 4,123,62,0,0,32,0.226,35,1 267 | 5,96,74,18,67,33.6,0.997,43,0 268 | 0,138,0,0,0,36.3,0.933,25,1 269 | 2,128,64,42,0,40,1.101,24,0 270 | 0,102,52,0,0,25.1,0.078,21,0 271 | 2,146,0,0,0,27.5,0.24,28,1 272 | 10,101,86,37,0,45.6,1.136,38,1 273 | 2,108,62,32,56,25.2,0.128,21,0 274 | 3,122,78,0,0,23,0.254,40,0 275 | 1,71,78,50,45,33.2,0.422,21,0 276 | 13,106,70,0,0,34.2,0.251,52,0 277 | 2,100,70,52,57,40.5,0.677,25,0 278 | 7,106,60,24,0,26.5,0.296,29,1 279 | 0,104,64,23,116,27.8,0.454,23,0 280 | 5,114,74,0,0,24.9,0.744,57,0 281 | 2,108,62,10,278,25.3,0.881,22,0 282 | 0,146,70,0,0,37.9,0.334,28,1 283 | 10,129,76,28,122,35.9,0.28,39,0 284 | 7,133,88,15,155,32.4,0.262,37,0 285 | 7,161,86,0,0,30.4,0.165,47,1 286 | 2,108,80,0,0,27,0.259,52,1 287 | 7,136,74,26,135,26,0.647,51,0 288 | 5,155,84,44,545,38.7,0.619,34,0 289 | 1,119,86,39,220,45.6,0.808,29,1 290 | 4,96,56,17,49,20.8,0.34,26,0 291 | 5,108,72,43,75,36.1,0.263,33,0 292 | 0,78,88,29,40,36.9,0.434,21,0 293 | 0,107,62,30,74,36.6,0.757,25,1 294 | 2,128,78,37,182,43.3,1.224,31,1 295 | 1,128,48,45,194,40.5,0.613,24,1 296 | 0,161,50,0,0,21.9,0.254,65,0 297 | 6,151,62,31,120,35.5,0.692,28,0 298 | 2,146,70,38,360,28,0.337,29,1 299 | 0,126,84,29,215,30.7,0.52,24,0 300 | 14,100,78,25,184,36.6,0.412,46,1 301 | 8,112,72,0,0,23.6,0.84,58,0 302 | 0,167,0,0,0,32.3,0.839,30,1 303 | 2,144,58,33,135,31.6,0.422,25,1 304 | 5,77,82,41,42,35.8,0.156,35,0 305 | 5,115,98,0,0,52.9,0.209,28,1 306 | 3,150,76,0,0,21,0.207,37,0 307 | 2,120,76,37,105,39.7,0.215,29,0 308 | 10,161,68,23,132,25.5,0.326,47,1 309 | 0,137,68,14,148,24.8,0.143,21,0 310 | 0,128,68,19,180,30.5,1.391,25,1 311 | 2,124,68,28,205,32.9,0.875,30,1 312 | 6,80,66,30,0,26.2,0.313,41,0 313 | 0,106,70,37,148,39.4,0.605,22,0 314 | 2,155,74,17,96,26.6,0.433,27,1 315 | 3,113,50,10,85,29.5,0.626,25,0 316 | 7,109,80,31,0,35.9,1.127,43,1 317 | 2,112,68,22,94,34.1,0.315,26,0 318 | 3,99,80,11,64,19.3,0.284,30,0 319 | 3,182,74,0,0,30.5,0.345,29,1 320 | 3,115,66,39,140,38.1,0.15,28,0 321 | 6,194,78,0,0,23.5,0.129,59,1 322 | 4,129,60,12,231,27.5,0.527,31,0 323 | 3,112,74,30,0,31.6,0.197,25,1 324 | 0,124,70,20,0,27.4,0.254,36,1 325 | 13,152,90,33,29,26.8,0.731,43,1 326 | 2,112,75,32,0,35.7,0.148,21,0 327 | 1,157,72,21,168,25.6,0.123,24,0 328 | 1,122,64,32,156,35.1,0.692,30,1 329 | 10,179,70,0,0,35.1,0.2,37,0 330 | 2,102,86,36,120,45.5,0.127,23,1 331 | 6,105,70,32,68,30.8,0.122,37,0 332 | 8,118,72,19,0,23.1,1.476,46,0 333 | 2,87,58,16,52,32.7,0.166,25,0 334 | 1,180,0,0,0,43.3,0.282,41,1 335 | 12,106,80,0,0,23.6,0.137,44,0 336 | 1,95,60,18,58,23.9,0.26,22,0 337 | 0,165,76,43,255,47.9,0.259,26,0 338 | 0,117,0,0,0,33.8,0.932,44,0 339 | 5,115,76,0,0,31.2,0.343,44,1 340 | 9,152,78,34,171,34.2,0.893,33,1 341 | 7,178,84,0,0,39.9,0.331,41,1 342 | 1,130,70,13,105,25.9,0.472,22,0 343 | 1,95,74,21,73,25.9,0.673,36,0 344 | 1,0,68,35,0,32,0.389,22,0 345 | 5,122,86,0,0,34.7,0.29,33,0 346 | 8,95,72,0,0,36.8,0.485,57,0 347 | 8,126,88,36,108,38.5,0.349,49,0 348 | 1,139,46,19,83,28.7,0.654,22,0 349 | 3,116,0,0,0,23.5,0.187,23,0 350 | 3,99,62,19,74,21.8,0.279,26,0 351 | 5,0,80,32,0,41,0.346,37,1 352 | 4,92,80,0,0,42.2,0.237,29,0 353 | 4,137,84,0,0,31.2,0.252,30,0 354 | 3,61,82,28,0,34.4,0.243,46,0 355 | 1,90,62,12,43,27.2,0.58,24,0 356 | 3,90,78,0,0,42.7,0.559,21,0 357 | 9,165,88,0,0,30.4,0.302,49,1 358 | 1,125,50,40,167,33.3,0.962,28,1 359 | 13,129,0,30,0,39.9,0.569,44,1 360 | 12,88,74,40,54,35.3,0.378,48,0 361 | 1,196,76,36,249,36.5,0.875,29,1 362 | 5,189,64,33,325,31.2,0.583,29,1 363 | 5,158,70,0,0,29.8,0.207,63,0 364 | 5,103,108,37,0,39.2,0.305,65,0 365 | 4,146,78,0,0,38.5,0.52,67,1 366 | 4,147,74,25,293,34.9,0.385,30,0 367 | 5,99,54,28,83,34,0.499,30,0 368 | 6,124,72,0,0,27.6,0.368,29,1 369 | 0,101,64,17,0,21,0.252,21,0 370 | 3,81,86,16,66,27.5,0.306,22,0 371 | 1,133,102,28,140,32.8,0.234,45,1 372 | 3,173,82,48,465,38.4,2.137,25,1 373 | 0,118,64,23,89,0,1.731,21,0 374 | 0,84,64,22,66,35.8,0.545,21,0 375 | 2,105,58,40,94,34.9,0.225,25,0 376 | 2,122,52,43,158,36.2,0.816,28,0 377 | 12,140,82,43,325,39.2,0.528,58,1 378 | 0,98,82,15,84,25.2,0.299,22,0 379 | 1,87,60,37,75,37.2,0.509,22,0 380 | 4,156,75,0,0,48.3,0.238,32,1 381 | 0,93,100,39,72,43.4,1.021,35,0 382 | 1,107,72,30,82,30.8,0.821,24,0 383 | 0,105,68,22,0,20,0.236,22,0 384 | 1,109,60,8,182,25.4,0.947,21,0 385 | 1,90,62,18,59,25.1,1.268,25,0 386 | 1,125,70,24,110,24.3,0.221,25,0 387 | 1,119,54,13,50,22.3,0.205,24,0 388 | 5,116,74,29,0,32.3,0.66,35,1 389 | 8,105,100,36,0,43.3,0.239,45,1 390 | 5,144,82,26,285,32,0.452,58,1 391 | 3,100,68,23,81,31.6,0.949,28,0 392 | 1,100,66,29,196,32,0.444,42,0 393 | 5,166,76,0,0,45.7,0.34,27,1 394 | 1,131,64,14,415,23.7,0.389,21,0 395 | 4,116,72,12,87,22.1,0.463,37,0 396 | 4,158,78,0,0,32.9,0.803,31,1 397 | 2,127,58,24,275,27.7,1.6,25,0 398 | 3,96,56,34,115,24.7,0.944,39,0 399 | 0,131,66,40,0,34.3,0.196,22,1 400 | 3,82,70,0,0,21.1,0.389,25,0 401 | 3,193,70,31,0,34.9,0.241,25,1 402 | 4,95,64,0,0,32,0.161,31,1 403 | 6,137,61,0,0,24.2,0.151,55,0 404 | 5,136,84,41,88,35,0.286,35,1 405 | 9,72,78,25,0,31.6,0.28,38,0 406 | 5,168,64,0,0,32.9,0.135,41,1 407 | 2,123,48,32,165,42.1,0.52,26,0 408 | 4,115,72,0,0,28.9,0.376,46,1 409 | 0,101,62,0,0,21.9,0.336,25,0 410 | 8,197,74,0,0,25.9,1.191,39,1 411 | 1,172,68,49,579,42.4,0.702,28,1 412 | 6,102,90,39,0,35.7,0.674,28,0 413 | 1,112,72,30,176,34.4,0.528,25,0 414 | 1,143,84,23,310,42.4,1.076,22,0 415 | 1,143,74,22,61,26.2,0.256,21,0 416 | 0,138,60,35,167,34.6,0.534,21,1 417 | 3,173,84,33,474,35.7,0.258,22,1 418 | 1,97,68,21,0,27.2,1.095,22,0 419 | 4,144,82,32,0,38.5,0.554,37,1 420 | 1,83,68,0,0,18.2,0.624,27,0 421 | 3,129,64,29,115,26.4,0.219,28,1 422 | 1,119,88,41,170,45.3,0.507,26,0 423 | 2,94,68,18,76,26,0.561,21,0 424 | 0,102,64,46,78,40.6,0.496,21,0 425 | 2,115,64,22,0,30.8,0.421,21,0 426 | 8,151,78,32,210,42.9,0.516,36,1 427 | 4,184,78,39,277,37,0.264,31,1 428 | 0,94,0,0,0,0,0.256,25,0 429 | 1,181,64,30,180,34.1,0.328,38,1 430 | 0,135,94,46,145,40.6,0.284,26,0 431 | 1,95,82,25,180,35,0.233,43,1 432 | 2,99,0,0,0,22.2,0.108,23,0 433 | 3,89,74,16,85,30.4,0.551,38,0 434 | 1,80,74,11,60,30,0.527,22,0 435 | 2,139,75,0,0,25.6,0.167,29,0 436 | 1,90,68,8,0,24.5,1.138,36,0 437 | 0,141,0,0,0,42.4,0.205,29,1 438 | 12,140,85,33,0,37.4,0.244,41,0 439 | 5,147,75,0,0,29.9,0.434,28,0 440 | 1,97,70,15,0,18.2,0.147,21,0 441 | 6,107,88,0,0,36.8,0.727,31,0 442 | 0,189,104,25,0,34.3,0.435,41,1 443 | 2,83,66,23,50,32.2,0.497,22,0 444 | 4,117,64,27,120,33.2,0.23,24,0 445 | 8,108,70,0,0,30.5,0.955,33,1 446 | 4,117,62,12,0,29.7,0.38,30,1 447 | 0,180,78,63,14,59.4,2.42,25,1 448 | 1,100,72,12,70,25.3,0.658,28,0 449 | 0,95,80,45,92,36.5,0.33,26,0 450 | 0,104,64,37,64,33.6,0.51,22,1 451 | 0,120,74,18,63,30.5,0.285,26,0 452 | 1,82,64,13,95,21.2,0.415,23,0 453 | 2,134,70,0,0,28.9,0.542,23,1 454 | 0,91,68,32,210,39.9,0.381,25,0 455 | 2,119,0,0,0,19.6,0.832,72,0 456 | 2,100,54,28,105,37.8,0.498,24,0 457 | 14,175,62,30,0,33.6,0.212,38,1 458 | 1,135,54,0,0,26.7,0.687,62,0 459 | 5,86,68,28,71,30.2,0.364,24,0 460 | 10,148,84,48,237,37.6,1.001,51,1 461 | 9,134,74,33,60,25.9,0.46,81,0 462 | 9,120,72,22,56,20.8,0.733,48,0 463 | 1,71,62,0,0,21.8,0.416,26,0 464 | 8,74,70,40,49,35.3,0.705,39,0 465 | 5,88,78,30,0,27.6,0.258,37,0 466 | 10,115,98,0,0,24,1.022,34,0 467 | 0,124,56,13,105,21.8,0.452,21,0 468 | 0,74,52,10,36,27.8,0.269,22,0 469 | 0,97,64,36,100,36.8,0.6,25,0 470 | 8,120,0,0,0,30,0.183,38,1 471 | 6,154,78,41,140,46.1,0.571,27,0 472 | 1,144,82,40,0,41.3,0.607,28,0 473 | 0,137,70,38,0,33.2,0.17,22,0 474 | 0,119,66,27,0,38.8,0.259,22,0 475 | 7,136,90,0,0,29.9,0.21,50,0 476 | 4,114,64,0,0,28.9,0.126,24,0 477 | 0,137,84,27,0,27.3,0.231,59,0 478 | 2,105,80,45,191,33.7,0.711,29,1 479 | 7,114,76,17,110,23.8,0.466,31,0 480 | 8,126,74,38,75,25.9,0.162,39,0 481 | 4,132,86,31,0,28,0.419,63,0 482 | 3,158,70,30,328,35.5,0.344,35,1 483 | 0,123,88,37,0,35.2,0.197,29,0 484 | 4,85,58,22,49,27.8,0.306,28,0 485 | 0,84,82,31,125,38.2,0.233,23,0 486 | 0,145,0,0,0,44.2,0.63,31,1 487 | 0,135,68,42,250,42.3,0.365,24,1 488 | 1,139,62,41,480,40.7,0.536,21,0 489 | 0,173,78,32,265,46.5,1.159,58,0 490 | 4,99,72,17,0,25.6,0.294,28,0 491 | 8,194,80,0,0,26.1,0.551,67,0 492 | 2,83,65,28,66,36.8,0.629,24,0 493 | 2,89,90,30,0,33.5,0.292,42,0 494 | 4,99,68,38,0,32.8,0.145,33,0 495 | 4,125,70,18,122,28.9,1.144,45,1 496 | 3,80,0,0,0,0,0.174,22,0 497 | 6,166,74,0,0,26.6,0.304,66,0 498 | 5,110,68,0,0,26,0.292,30,0 499 | 2,81,72,15,76,30.1,0.547,25,0 500 | 7,195,70,33,145,25.1,0.163,55,1 501 | 6,154,74,32,193,29.3,0.839,39,0 502 | 2,117,90,19,71,25.2,0.313,21,0 503 | 3,84,72,32,0,37.2,0.267,28,0 504 | 6,0,68,41,0,39,0.727,41,1 505 | 7,94,64,25,79,33.3,0.738,41,0 506 | 3,96,78,39,0,37.3,0.238,40,0 507 | 10,75,82,0,0,33.3,0.263,38,0 508 | 0,180,90,26,90,36.5,0.314,35,1 509 | 1,130,60,23,170,28.6,0.692,21,0 510 | 2,84,50,23,76,30.4,0.968,21,0 511 | 8,120,78,0,0,25,0.409,64,0 512 | 12,84,72,31,0,29.7,0.297,46,1 513 | 0,139,62,17,210,22.1,0.207,21,0 514 | 9,91,68,0,0,24.2,0.2,58,0 515 | 2,91,62,0,0,27.3,0.525,22,0 516 | 3,99,54,19,86,25.6,0.154,24,0 517 | 3,163,70,18,105,31.6,0.268,28,1 518 | 9,145,88,34,165,30.3,0.771,53,1 519 | 7,125,86,0,0,37.6,0.304,51,0 520 | 13,76,60,0,0,32.8,0.18,41,0 521 | 6,129,90,7,326,19.6,0.582,60,0 522 | 2,68,70,32,66,25,0.187,25,0 523 | 3,124,80,33,130,33.2,0.305,26,0 524 | 6,114,0,0,0,0,0.189,26,0 525 | 9,130,70,0,0,34.2,0.652,45,1 526 | 3,125,58,0,0,31.6,0.151,24,0 527 | 3,87,60,18,0,21.8,0.444,21,0 528 | 1,97,64,19,82,18.2,0.299,21,0 529 | 3,116,74,15,105,26.3,0.107,24,0 530 | 0,117,66,31,188,30.8,0.493,22,0 531 | 0,111,65,0,0,24.6,0.66,31,0 532 | 2,122,60,18,106,29.8,0.717,22,0 533 | 0,107,76,0,0,45.3,0.686,24,0 534 | 1,86,66,52,65,41.3,0.917,29,0 535 | 6,91,0,0,0,29.8,0.501,31,0 536 | 1,77,56,30,56,33.3,1.251,24,0 537 | 4,132,0,0,0,32.9,0.302,23,1 538 | 0,105,90,0,0,29.6,0.197,46,0 539 | 0,57,60,0,0,21.7,0.735,67,0 540 | 0,127,80,37,210,36.3,0.804,23,0 541 | 3,129,92,49,155,36.4,0.968,32,1 542 | 8,100,74,40,215,39.4,0.661,43,1 543 | 3,128,72,25,190,32.4,0.549,27,1 544 | 10,90,85,32,0,34.9,0.825,56,1 545 | 4,84,90,23,56,39.5,0.159,25,0 546 | 1,88,78,29,76,32,0.365,29,0 547 | 8,186,90,35,225,34.5,0.423,37,1 548 | 5,187,76,27,207,43.6,1.034,53,1 549 | 4,131,68,21,166,33.1,0.16,28,0 550 | 1,164,82,43,67,32.8,0.341,50,0 551 | 4,189,110,31,0,28.5,0.68,37,0 552 | 1,116,70,28,0,27.4,0.204,21,0 553 | 3,84,68,30,106,31.9,0.591,25,0 554 | 6,114,88,0,0,27.8,0.247,66,0 555 | 1,88,62,24,44,29.9,0.422,23,0 556 | 1,84,64,23,115,36.9,0.471,28,0 557 | 7,124,70,33,215,25.5,0.161,37,0 558 | 1,97,70,40,0,38.1,0.218,30,0 559 | 8,110,76,0,0,27.8,0.237,58,0 560 | 11,103,68,40,0,46.2,0.126,42,0 561 | 11,85,74,0,0,30.1,0.3,35,0 562 | 6,125,76,0,0,33.8,0.121,54,1 563 | 0,198,66,32,274,41.3,0.502,28,1 564 | 1,87,68,34,77,37.6,0.401,24,0 565 | 6,99,60,19,54,26.9,0.497,32,0 566 | 0,91,80,0,0,32.4,0.601,27,0 567 | 2,95,54,14,88,26.1,0.748,22,0 568 | 1,99,72,30,18,38.6,0.412,21,0 569 | 6,92,62,32,126,32,0.085,46,0 570 | 4,154,72,29,126,31.3,0.338,37,0 571 | 0,121,66,30,165,34.3,0.203,33,1 572 | 3,78,70,0,0,32.5,0.27,39,0 573 | 2,130,96,0,0,22.6,0.268,21,0 574 | 3,111,58,31,44,29.5,0.43,22,0 575 | 2,98,60,17,120,34.7,0.198,22,0 576 | 1,143,86,30,330,30.1,0.892,23,0 577 | 1,119,44,47,63,35.5,0.28,25,0 578 | 6,108,44,20,130,24,0.813,35,0 579 | 2,118,80,0,0,42.9,0.693,21,1 580 | 10,133,68,0,0,27,0.245,36,0 581 | 2,197,70,99,0,34.7,0.575,62,1 582 | 0,151,90,46,0,42.1,0.371,21,1 583 | 6,109,60,27,0,25,0.206,27,0 584 | 12,121,78,17,0,26.5,0.259,62,0 585 | 8,100,76,0,0,38.7,0.19,42,0 586 | 8,124,76,24,600,28.7,0.687,52,1 587 | 1,93,56,11,0,22.5,0.417,22,0 588 | 8,143,66,0,0,34.9,0.129,41,1 589 | 6,103,66,0,0,24.3,0.249,29,0 590 | 3,176,86,27,156,33.3,1.154,52,1 591 | 0,73,0,0,0,21.1,0.342,25,0 592 | 11,111,84,40,0,46.8,0.925,45,1 593 | 2,112,78,50,140,39.4,0.175,24,0 594 | 3,132,80,0,0,34.4,0.402,44,1 595 | 2,82,52,22,115,28.5,1.699,25,0 596 | 6,123,72,45,230,33.6,0.733,34,0 597 | 0,188,82,14,185,32,0.682,22,1 598 | 0,67,76,0,0,45.3,0.194,46,0 599 | 1,89,24,19,25,27.8,0.559,21,0 600 | 1,173,74,0,0,36.8,0.088,38,1 601 | 1,109,38,18,120,23.1,0.407,26,0 602 | 1,108,88,19,0,27.1,0.4,24,0 603 | 6,96,0,0,0,23.7,0.19,28,0 604 | 1,124,74,36,0,27.8,0.1,30,0 605 | 7,150,78,29,126,35.2,0.692,54,1 606 | 4,183,0,0,0,28.4,0.212,36,1 607 | 1,124,60,32,0,35.8,0.514,21,0 608 | 1,181,78,42,293,40,1.258,22,1 609 | 1,92,62,25,41,19.5,0.482,25,0 610 | 0,152,82,39,272,41.5,0.27,27,0 611 | 1,111,62,13,182,24,0.138,23,0 612 | 3,106,54,21,158,30.9,0.292,24,0 613 | 3,174,58,22,194,32.9,0.593,36,1 614 | 7,168,88,42,321,38.2,0.787,40,1 615 | 6,105,80,28,0,32.5,0.878,26,0 616 | 11,138,74,26,144,36.1,0.557,50,1 617 | 3,106,72,0,0,25.8,0.207,27,0 618 | 6,117,96,0,0,28.7,0.157,30,0 619 | 2,68,62,13,15,20.1,0.257,23,0 620 | 9,112,82,24,0,28.2,1.282,50,1 621 | 0,119,0,0,0,32.4,0.141,24,1 622 | 2,112,86,42,160,38.4,0.246,28,0 623 | 2,92,76,20,0,24.2,1.698,28,0 624 | 6,183,94,0,0,40.8,1.461,45,0 625 | 0,94,70,27,115,43.5,0.347,21,0 626 | 2,108,64,0,0,30.8,0.158,21,0 627 | 4,90,88,47,54,37.7,0.362,29,0 628 | 0,125,68,0,0,24.7,0.206,21,0 629 | 0,132,78,0,0,32.4,0.393,21,0 630 | 5,128,80,0,0,34.6,0.144,45,0 631 | 4,94,65,22,0,24.7,0.148,21,0 632 | 7,114,64,0,0,27.4,0.732,34,1 633 | 0,102,78,40,90,34.5,0.238,24,0 634 | 2,111,60,0,0,26.2,0.343,23,0 635 | 1,128,82,17,183,27.5,0.115,22,0 636 | 10,92,62,0,0,25.9,0.167,31,0 637 | 13,104,72,0,0,31.2,0.465,38,1 638 | 5,104,74,0,0,28.8,0.153,48,0 639 | 2,94,76,18,66,31.6,0.649,23,0 640 | 7,97,76,32,91,40.9,0.871,32,1 641 | 1,100,74,12,46,19.5,0.149,28,0 642 | 0,102,86,17,105,29.3,0.695,27,0 643 | 4,128,70,0,0,34.3,0.303,24,0 644 | 6,147,80,0,0,29.5,0.178,50,1 645 | 4,90,0,0,0,28,0.61,31,0 646 | 3,103,72,30,152,27.6,0.73,27,0 647 | 2,157,74,35,440,39.4,0.134,30,0 648 | 1,167,74,17,144,23.4,0.447,33,1 649 | 0,179,50,36,159,37.8,0.455,22,1 650 | 11,136,84,35,130,28.3,0.26,42,1 651 | 0,107,60,25,0,26.4,0.133,23,0 652 | 1,91,54,25,100,25.2,0.234,23,0 653 | 1,117,60,23,106,33.8,0.466,27,0 654 | 5,123,74,40,77,34.1,0.269,28,0 655 | 2,120,54,0,0,26.8,0.455,27,0 656 | 1,106,70,28,135,34.2,0.142,22,0 657 | 2,155,52,27,540,38.7,0.24,25,1 658 | 2,101,58,35,90,21.8,0.155,22,0 659 | 1,120,80,48,200,38.9,1.162,41,0 660 | 11,127,106,0,0,39,0.19,51,0 661 | 3,80,82,31,70,34.2,1.292,27,1 662 | 10,162,84,0,0,27.7,0.182,54,0 663 | 1,199,76,43,0,42.9,1.394,22,1 664 | 8,167,106,46,231,37.6,0.165,43,1 665 | 9,145,80,46,130,37.9,0.637,40,1 666 | 6,115,60,39,0,33.7,0.245,40,1 667 | 1,112,80,45,132,34.8,0.217,24,0 668 | 4,145,82,18,0,32.5,0.235,70,1 669 | 10,111,70,27,0,27.5,0.141,40,1 670 | 6,98,58,33,190,34,0.43,43,0 671 | 9,154,78,30,100,30.9,0.164,45,0 672 | 6,165,68,26,168,33.6,0.631,49,0 673 | 1,99,58,10,0,25.4,0.551,21,0 674 | 10,68,106,23,49,35.5,0.285,47,0 675 | 3,123,100,35,240,57.3,0.88,22,0 676 | 8,91,82,0,0,35.6,0.587,68,0 677 | 6,195,70,0,0,30.9,0.328,31,1 678 | 9,156,86,0,0,24.8,0.23,53,1 679 | 0,93,60,0,0,35.3,0.263,25,0 680 | 3,121,52,0,0,36,0.127,25,1 681 | 2,101,58,17,265,24.2,0.614,23,0 682 | 2,56,56,28,45,24.2,0.332,22,0 683 | 0,162,76,36,0,49.6,0.364,26,1 684 | 0,95,64,39,105,44.6,0.366,22,0 685 | 4,125,80,0,0,32.3,0.536,27,1 686 | 5,136,82,0,0,0,0.64,69,0 687 | 2,129,74,26,205,33.2,0.591,25,0 688 | 3,130,64,0,0,23.1,0.314,22,0 689 | 1,107,50,19,0,28.3,0.181,29,0 690 | 1,140,74,26,180,24.1,0.828,23,0 691 | 1,144,82,46,180,46.1,0.335,46,1 692 | 8,107,80,0,0,24.6,0.856,34,0 693 | 13,158,114,0,0,42.3,0.257,44,1 694 | 2,121,70,32,95,39.1,0.886,23,0 695 | 7,129,68,49,125,38.5,0.439,43,1 696 | 2,90,60,0,0,23.5,0.191,25,0 697 | 7,142,90,24,480,30.4,0.128,43,1 698 | 3,169,74,19,125,29.9,0.268,31,1 699 | 0,99,0,0,0,25,0.253,22,0 700 | 4,127,88,11,155,34.5,0.598,28,0 701 | 4,118,70,0,0,44.5,0.904,26,0 702 | 2,122,76,27,200,35.9,0.483,26,0 703 | 6,125,78,31,0,27.6,0.565,49,1 704 | 1,168,88,29,0,35,0.905,52,1 705 | 2,129,0,0,0,38.5,0.304,41,0 706 | 4,110,76,20,100,28.4,0.118,27,0 707 | 6,80,80,36,0,39.8,0.177,28,0 708 | 10,115,0,0,0,0,0.261,30,1 709 | 2,127,46,21,335,34.4,0.176,22,0 710 | 9,164,78,0,0,32.8,0.148,45,1 711 | 2,93,64,32,160,38,0.674,23,1 712 | 3,158,64,13,387,31.2,0.295,24,0 713 | 5,126,78,27,22,29.6,0.439,40,0 714 | 10,129,62,36,0,41.2,0.441,38,1 715 | 0,134,58,20,291,26.4,0.352,21,0 716 | 3,102,74,0,0,29.5,0.121,32,0 717 | 7,187,50,33,392,33.9,0.826,34,1 718 | 3,173,78,39,185,33.8,0.97,31,1 719 | 10,94,72,18,0,23.1,0.595,56,0 720 | 1,108,60,46,178,35.5,0.415,24,0 721 | 5,97,76,27,0,35.6,0.378,52,1 722 | 4,83,86,19,0,29.3,0.317,34,0 723 | 1,114,66,36,200,38.1,0.289,21,0 724 | 1,149,68,29,127,29.3,0.349,42,1 725 | 5,117,86,30,105,39.1,0.251,42,0 726 | 1,111,94,0,0,32.8,0.265,45,0 727 | 4,112,78,40,0,39.4,0.236,38,0 728 | 1,116,78,29,180,36.1,0.496,25,0 729 | 0,141,84,26,0,32.4,0.433,22,0 730 | 2,175,88,0,0,22.9,0.326,22,0 731 | 2,92,52,0,0,30.1,0.141,22,0 732 | 3,130,78,23,79,28.4,0.323,34,1 733 | 8,120,86,0,0,28.4,0.259,22,1 734 | 2,174,88,37,120,44.5,0.646,24,1 735 | 2,106,56,27,165,29,0.426,22,0 736 | 2,105,75,0,0,23.3,0.56,53,0 737 | 4,95,60,32,0,35.4,0.284,28,0 738 | 0,126,86,27,120,27.4,0.515,21,0 739 | 8,65,72,23,0,32,0.6,42,0 740 | 2,99,60,17,160,36.6,0.453,21,0 741 | 1,102,74,0,0,39.5,0.293,42,1 742 | 11,120,80,37,150,42.3,0.785,48,1 743 | 3,102,44,20,94,30.8,0.4,26,0 744 | 1,109,58,18,116,28.5,0.219,22,0 745 | 9,140,94,0,0,32.7,0.734,45,1 746 | 13,153,88,37,140,40.6,1.174,39,0 747 | 12,100,84,33,105,30,0.488,46,0 748 | 1,147,94,41,0,49.3,0.358,27,1 749 | 1,81,74,41,57,46.3,1.096,32,0 750 | 3,187,70,22,200,36.4,0.408,36,1 751 | 6,162,62,0,0,24.3,0.178,50,1 752 | 4,136,70,0,0,31.2,1.182,22,1 753 | 1,121,78,39,74,39,0.261,28,0 754 | 3,108,62,24,0,26,0.223,25,0 755 | 0,181,88,44,510,43.3,0.222,26,1 756 | 8,154,78,32,0,32.4,0.443,45,1 757 | 1,128,88,39,110,36.5,1.057,37,1 758 | 7,137,90,41,0,32,0.391,39,0 759 | 0,123,72,0,0,36.3,0.258,52,1 760 | 1,106,76,0,0,37.5,0.197,26,0 761 | 6,190,92,0,0,35.5,0.278,66,1 762 | 2,88,58,26,16,28.4,0.766,22,0 763 | 9,170,74,31,0,44,0.403,43,1 764 | 9,89,62,0,0,22.5,0.142,33,0 765 | 10,101,76,48,180,32.9,0.171,63,0 766 | 2,122,70,27,0,36.8,0.34,27,0 767 | 5,121,72,23,112,26.2,0.245,30,0 768 | 1,126,60,0,0,30.1,0.349,47,1 769 | 1,93,70,31,0,30.4,0.315,23,0 770 | -------------------------------------------------------------------------------- /Assignments/Assignment 1 Answers/insurance.csv: -------------------------------------------------------------------------------- 1 | age,sex,bmi,children,smoker,region,charges 2 | 19,female,27.9,0,yes,southwest,16884.924 3 | 18,male,33.77,1,no,southeast,1725.5523 4 | 28,male,33,3,no,southeast,4449.462 5 | 33,male,22.705,0,no,northwest,21984.47061 6 | 32,male,28.88,0,no,northwest,3866.8552 7 | 31,female,25.74,0,no,southeast,3756.6216 8 | 46,female,33.44,1,no,southeast,8240.5896 9 | 37,female,27.74,3,no,northwest,7281.5056 10 | 37,male,29.83,2,no,northeast,6406.4107 11 | 60,female,25.84,0,no,northwest,28923.13692 12 | 25,male,26.22,0,no,northeast,2721.3208 13 | 62,female,26.29,0,yes,southeast,27808.7251 14 | 23,male,34.4,0,no,southwest,1826.843 15 | 56,female,39.82,0,no,southeast,11090.7178 16 | 27,male,42.13,0,yes,southeast,39611.7577 17 | 19,male,24.6,1,no,southwest,1837.237 18 | 52,female,30.78,1,no,northeast,10797.3362 19 | 23,male,23.845,0,no,northeast,2395.17155 20 | 56,male,40.3,0,no,southwest,10602.385 21 | 30,male,35.3,0,yes,southwest,36837.467 22 | 60,female,36.005,0,no,northeast,13228.84695 23 | 30,female,32.4,1,no,southwest,4149.736 24 | 18,male,34.1,0,no,southeast,1137.011 25 | 34,female,31.92,1,yes,northeast,37701.8768 26 | 37,male,28.025,2,no,northwest,6203.90175 27 | 59,female,27.72,3,no,southeast,14001.1338 28 | 63,female,23.085,0,no,northeast,14451.83515 29 | 55,female,32.775,2,no,northwest,12268.63225 30 | 23,male,17.385,1,no,northwest,2775.19215 31 | 31,male,36.3,2,yes,southwest,38711 32 | 22,male,35.6,0,yes,southwest,35585.576 33 | 18,female,26.315,0,no,northeast,2198.18985 34 | 19,female,28.6,5,no,southwest,4687.797 35 | 63,male,28.31,0,no,northwest,13770.0979 36 | 28,male,36.4,1,yes,southwest,51194.55914 37 | 19,male,20.425,0,no,northwest,1625.43375 38 | 62,female,32.965,3,no,northwest,15612.19335 39 | 26,male,20.8,0,no,southwest,2302.3 40 | 35,male,36.67,1,yes,northeast,39774.2763 41 | 60,male,39.9,0,yes,southwest,48173.361 42 | 24,female,26.6,0,no,northeast,3046.062 43 | 31,female,36.63,2,no,southeast,4949.7587 44 | 41,male,21.78,1,no,southeast,6272.4772 45 | 37,female,30.8,2,no,southeast,6313.759 46 | 38,male,37.05,1,no,northeast,6079.6715 47 | 55,male,37.3,0,no,southwest,20630.28351 48 | 18,female,38.665,2,no,northeast,3393.35635 49 | 28,female,34.77,0,no,northwest,3556.9223 50 | 60,female,24.53,0,no,southeast,12629.8967 51 | 36,male,35.2,1,yes,southeast,38709.176 52 | 18,female,35.625,0,no,northeast,2211.13075 53 | 21,female,33.63,2,no,northwest,3579.8287 54 | 48,male,28,1,yes,southwest,23568.272 55 | 36,male,34.43,0,yes,southeast,37742.5757 56 | 40,female,28.69,3,no,northwest,8059.6791 57 | 58,male,36.955,2,yes,northwest,47496.49445 58 | 58,female,31.825,2,no,northeast,13607.36875 59 | 18,male,31.68,2,yes,southeast,34303.1672 60 | 53,female,22.88,1,yes,southeast,23244.7902 61 | 34,female,37.335,2,no,northwest,5989.52365 62 | 43,male,27.36,3,no,northeast,8606.2174 63 | 25,male,33.66,4,no,southeast,4504.6624 64 | 64,male,24.7,1,no,northwest,30166.61817 65 | 28,female,25.935,1,no,northwest,4133.64165 66 | 20,female,22.42,0,yes,northwest,14711.7438 67 | 19,female,28.9,0,no,southwest,1743.214 68 | 61,female,39.1,2,no,southwest,14235.072 69 | 40,male,26.315,1,no,northwest,6389.37785 70 | 40,female,36.19,0,no,southeast,5920.1041 71 | 28,male,23.98,3,yes,southeast,17663.1442 72 | 27,female,24.75,0,yes,southeast,16577.7795 73 | 31,male,28.5,5,no,northeast,6799.458 74 | 53,female,28.1,3,no,southwest,11741.726 75 | 58,male,32.01,1,no,southeast,11946.6259 76 | 44,male,27.4,2,no,southwest,7726.854 77 | 57,male,34.01,0,no,northwest,11356.6609 78 | 29,female,29.59,1,no,southeast,3947.4131 79 | 21,male,35.53,0,no,southeast,1532.4697 80 | 22,female,39.805,0,no,northeast,2755.02095 81 | 41,female,32.965,0,no,northwest,6571.02435 82 | 31,male,26.885,1,no,northeast,4441.21315 83 | 45,female,38.285,0,no,northeast,7935.29115 84 | 22,male,37.62,1,yes,southeast,37165.1638 85 | 48,female,41.23,4,no,northwest,11033.6617 86 | 37,female,34.8,2,yes,southwest,39836.519 87 | 45,male,22.895,2,yes,northwest,21098.55405 88 | 57,female,31.16,0,yes,northwest,43578.9394 89 | 56,female,27.2,0,no,southwest,11073.176 90 | 46,female,27.74,0,no,northwest,8026.6666 91 | 55,female,26.98,0,no,northwest,11082.5772 92 | 21,female,39.49,0,no,southeast,2026.9741 93 | 53,female,24.795,1,no,northwest,10942.13205 94 | 59,male,29.83,3,yes,northeast,30184.9367 95 | 35,male,34.77,2,no,northwest,5729.0053 96 | 64,female,31.3,2,yes,southwest,47291.055 97 | 28,female,37.62,1,no,southeast,3766.8838 98 | 54,female,30.8,3,no,southwest,12105.32 99 | 55,male,38.28,0,no,southeast,10226.2842 100 | 56,male,19.95,0,yes,northeast,22412.6485 101 | 38,male,19.3,0,yes,southwest,15820.699 102 | 41,female,31.6,0,no,southwest,6186.127 103 | 30,male,25.46,0,no,northeast,3645.0894 104 | 18,female,30.115,0,no,northeast,21344.8467 105 | 61,female,29.92,3,yes,southeast,30942.1918 106 | 34,female,27.5,1,no,southwest,5003.853 107 | 20,male,28.025,1,yes,northwest,17560.37975 108 | 19,female,28.4,1,no,southwest,2331.519 109 | 26,male,30.875,2,no,northwest,3877.30425 110 | 29,male,27.94,0,no,southeast,2867.1196 111 | 63,male,35.09,0,yes,southeast,47055.5321 112 | 54,male,33.63,1,no,northwest,10825.2537 113 | 55,female,29.7,2,no,southwest,11881.358 114 | 37,male,30.8,0,no,southwest,4646.759 115 | 21,female,35.72,0,no,northwest,2404.7338 116 | 52,male,32.205,3,no,northeast,11488.31695 117 | 60,male,28.595,0,no,northeast,30259.99556 118 | 58,male,49.06,0,no,southeast,11381.3254 119 | 29,female,27.94,1,yes,southeast,19107.7796 120 | 49,female,27.17,0,no,southeast,8601.3293 121 | 37,female,23.37,2,no,northwest,6686.4313 122 | 44,male,37.1,2,no,southwest,7740.337 123 | 18,male,23.75,0,no,northeast,1705.6245 124 | 20,female,28.975,0,no,northwest,2257.47525 125 | 44,male,31.35,1,yes,northeast,39556.4945 126 | 47,female,33.915,3,no,northwest,10115.00885 127 | 26,female,28.785,0,no,northeast,3385.39915 128 | 19,female,28.3,0,yes,southwest,17081.08 129 | 52,female,37.4,0,no,southwest,9634.538 130 | 32,female,17.765,2,yes,northwest,32734.1863 131 | 38,male,34.7,2,no,southwest,6082.405 132 | 59,female,26.505,0,no,northeast,12815.44495 133 | 61,female,22.04,0,no,northeast,13616.3586 134 | 53,female,35.9,2,no,southwest,11163.568 135 | 19,male,25.555,0,no,northwest,1632.56445 136 | 20,female,28.785,0,no,northeast,2457.21115 137 | 22,female,28.05,0,no,southeast,2155.6815 138 | 19,male,34.1,0,no,southwest,1261.442 139 | 22,male,25.175,0,no,northwest,2045.68525 140 | 54,female,31.9,3,no,southeast,27322.73386 141 | 22,female,36,0,no,southwest,2166.732 142 | 34,male,22.42,2,no,northeast,27375.90478 143 | 26,male,32.49,1,no,northeast,3490.5491 144 | 34,male,25.3,2,yes,southeast,18972.495 145 | 29,male,29.735,2,no,northwest,18157.876 146 | 30,male,28.69,3,yes,northwest,20745.9891 147 | 29,female,38.83,3,no,southeast,5138.2567 148 | 46,male,30.495,3,yes,northwest,40720.55105 149 | 51,female,37.73,1,no,southeast,9877.6077 150 | 53,female,37.43,1,no,northwest,10959.6947 151 | 19,male,28.4,1,no,southwest,1842.519 152 | 35,male,24.13,1,no,northwest,5125.2157 153 | 48,male,29.7,0,no,southeast,7789.635 154 | 32,female,37.145,3,no,northeast,6334.34355 155 | 42,female,23.37,0,yes,northeast,19964.7463 156 | 40,female,25.46,1,no,northeast,7077.1894 157 | 44,male,39.52,0,no,northwest,6948.7008 158 | 48,male,24.42,0,yes,southeast,21223.6758 159 | 18,male,25.175,0,yes,northeast,15518.18025 160 | 30,male,35.53,0,yes,southeast,36950.2567 161 | 50,female,27.83,3,no,southeast,19749.38338 162 | 42,female,26.6,0,yes,northwest,21348.706 163 | 18,female,36.85,0,yes,southeast,36149.4835 164 | 54,male,39.6,1,no,southwest,10450.552 165 | 32,female,29.8,2,no,southwest,5152.134 166 | 37,male,29.64,0,no,northwest,5028.1466 167 | 47,male,28.215,4,no,northeast,10407.08585 168 | 20,female,37,5,no,southwest,4830.63 169 | 32,female,33.155,3,no,northwest,6128.79745 170 | 19,female,31.825,1,no,northwest,2719.27975 171 | 27,male,18.905,3,no,northeast,4827.90495 172 | 63,male,41.47,0,no,southeast,13405.3903 173 | 49,male,30.3,0,no,southwest,8116.68 174 | 18,male,15.96,0,no,northeast,1694.7964 175 | 35,female,34.8,1,no,southwest,5246.047 176 | 24,female,33.345,0,no,northwest,2855.43755 177 | 63,female,37.7,0,yes,southwest,48824.45 178 | 38,male,27.835,2,no,northwest,6455.86265 179 | 54,male,29.2,1,no,southwest,10436.096 180 | 46,female,28.9,2,no,southwest,8823.279 181 | 41,female,33.155,3,no,northeast,8538.28845 182 | 58,male,28.595,0,no,northwest,11735.87905 183 | 18,female,38.28,0,no,southeast,1631.8212 184 | 22,male,19.95,3,no,northeast,4005.4225 185 | 44,female,26.41,0,no,northwest,7419.4779 186 | 44,male,30.69,2,no,southeast,7731.4271 187 | 36,male,41.895,3,yes,northeast,43753.33705 188 | 26,female,29.92,2,no,southeast,3981.9768 189 | 30,female,30.9,3,no,southwest,5325.651 190 | 41,female,32.2,1,no,southwest,6775.961 191 | 29,female,32.11,2,no,northwest,4922.9159 192 | 61,male,31.57,0,no,southeast,12557.6053 193 | 36,female,26.2,0,no,southwest,4883.866 194 | 25,male,25.74,0,no,southeast,2137.6536 195 | 56,female,26.6,1,no,northwest,12044.342 196 | 18,male,34.43,0,no,southeast,1137.4697 197 | 19,male,30.59,0,no,northwest,1639.5631 198 | 39,female,32.8,0,no,southwest,5649.715 199 | 45,female,28.6,2,no,southeast,8516.829 200 | 51,female,18.05,0,no,northwest,9644.2525 201 | 64,female,39.33,0,no,northeast,14901.5167 202 | 19,female,32.11,0,no,northwest,2130.6759 203 | 48,female,32.23,1,no,southeast,8871.1517 204 | 60,female,24.035,0,no,northwest,13012.20865 205 | 27,female,36.08,0,yes,southeast,37133.8982 206 | 46,male,22.3,0,no,southwest,7147.105 207 | 28,female,28.88,1,no,northeast,4337.7352 208 | 59,male,26.4,0,no,southeast,11743.299 209 | 35,male,27.74,2,yes,northeast,20984.0936 210 | 63,female,31.8,0,no,southwest,13880.949 211 | 40,male,41.23,1,no,northeast,6610.1097 212 | 20,male,33,1,no,southwest,1980.07 213 | 40,male,30.875,4,no,northwest,8162.71625 214 | 24,male,28.5,2,no,northwest,3537.703 215 | 34,female,26.73,1,no,southeast,5002.7827 216 | 45,female,30.9,2,no,southwest,8520.026 217 | 41,female,37.1,2,no,southwest,7371.772 218 | 53,female,26.6,0,no,northwest,10355.641 219 | 27,male,23.1,0,no,southeast,2483.736 220 | 26,female,29.92,1,no,southeast,3392.9768 221 | 24,female,23.21,0,no,southeast,25081.76784 222 | 34,female,33.7,1,no,southwest,5012.471 223 | 53,female,33.25,0,no,northeast,10564.8845 224 | 32,male,30.8,3,no,southwest,5253.524 225 | 19,male,34.8,0,yes,southwest,34779.615 226 | 42,male,24.64,0,yes,southeast,19515.5416 227 | 55,male,33.88,3,no,southeast,11987.1682 228 | 28,male,38.06,0,no,southeast,2689.4954 229 | 58,female,41.91,0,no,southeast,24227.33724 230 | 41,female,31.635,1,no,northeast,7358.17565 231 | 47,male,25.46,2,no,northeast,9225.2564 232 | 42,female,36.195,1,no,northwest,7443.64305 233 | 59,female,27.83,3,no,southeast,14001.2867 234 | 19,female,17.8,0,no,southwest,1727.785 235 | 59,male,27.5,1,no,southwest,12333.828 236 | 39,male,24.51,2,no,northwest,6710.1919 237 | 40,female,22.22,2,yes,southeast,19444.2658 238 | 18,female,26.73,0,no,southeast,1615.7667 239 | 31,male,38.39,2,no,southeast,4463.2051 240 | 19,male,29.07,0,yes,northwest,17352.6803 241 | 44,male,38.06,1,no,southeast,7152.6714 242 | 23,female,36.67,2,yes,northeast,38511.6283 243 | 33,female,22.135,1,no,northeast,5354.07465 244 | 55,female,26.8,1,no,southwest,35160.13457 245 | 40,male,35.3,3,no,southwest,7196.867 246 | 63,female,27.74,0,yes,northeast,29523.1656 247 | 54,male,30.02,0,no,northwest,24476.47851 248 | 60,female,38.06,0,no,southeast,12648.7034 249 | 24,male,35.86,0,no,southeast,1986.9334 250 | 19,male,20.9,1,no,southwest,1832.094 251 | 29,male,28.975,1,no,northeast,4040.55825 252 | 18,male,17.29,2,yes,northeast,12829.4551 253 | 63,female,32.2,2,yes,southwest,47305.305 254 | 54,male,34.21,2,yes,southeast,44260.7499 255 | 27,male,30.3,3,no,southwest,4260.744 256 | 50,male,31.825,0,yes,northeast,41097.16175 257 | 55,female,25.365,3,no,northeast,13047.33235 258 | 56,male,33.63,0,yes,northwest,43921.1837 259 | 38,female,40.15,0,no,southeast,5400.9805 260 | 51,male,24.415,4,no,northwest,11520.09985 261 | 19,male,31.92,0,yes,northwest,33750.2918 262 | 58,female,25.2,0,no,southwest,11837.16 263 | 20,female,26.84,1,yes,southeast,17085.2676 264 | 52,male,24.32,3,yes,northeast,24869.8368 265 | 19,male,36.955,0,yes,northwest,36219.40545 266 | 53,female,38.06,3,no,southeast,20462.99766 267 | 46,male,42.35,3,yes,southeast,46151.1245 268 | 40,male,19.8,1,yes,southeast,17179.522 269 | 59,female,32.395,3,no,northeast,14590.63205 270 | 45,male,30.2,1,no,southwest,7441.053 271 | 49,male,25.84,1,no,northeast,9282.4806 272 | 18,male,29.37,1,no,southeast,1719.4363 273 | 50,male,34.2,2,yes,southwest,42856.838 274 | 41,male,37.05,2,no,northwest,7265.7025 275 | 50,male,27.455,1,no,northeast,9617.66245 276 | 25,male,27.55,0,no,northwest,2523.1695 277 | 47,female,26.6,2,no,northeast,9715.841 278 | 19,male,20.615,2,no,northwest,2803.69785 279 | 22,female,24.3,0,no,southwest,2150.469 280 | 59,male,31.79,2,no,southeast,12928.7911 281 | 51,female,21.56,1,no,southeast,9855.1314 282 | 40,female,28.12,1,yes,northeast,22331.5668 283 | 54,male,40.565,3,yes,northeast,48549.17835 284 | 30,male,27.645,1,no,northeast,4237.12655 285 | 55,female,32.395,1,no,northeast,11879.10405 286 | 52,female,31.2,0,no,southwest,9625.92 287 | 46,male,26.62,1,no,southeast,7742.1098 288 | 46,female,48.07,2,no,northeast,9432.9253 289 | 63,female,26.22,0,no,northwest,14256.1928 290 | 59,female,36.765,1,yes,northeast,47896.79135 291 | 52,male,26.4,3,no,southeast,25992.82104 292 | 28,female,33.4,0,no,southwest,3172.018 293 | 29,male,29.64,1,no,northeast,20277.80751 294 | 25,male,45.54,2,yes,southeast,42112.2356 295 | 22,female,28.82,0,no,southeast,2156.7518 296 | 25,male,26.8,3,no,southwest,3906.127 297 | 18,male,22.99,0,no,northeast,1704.5681 298 | 19,male,27.7,0,yes,southwest,16297.846 299 | 47,male,25.41,1,yes,southeast,21978.6769 300 | 31,male,34.39,3,yes,northwest,38746.3551 301 | 48,female,28.88,1,no,northwest,9249.4952 302 | 36,male,27.55,3,no,northeast,6746.7425 303 | 53,female,22.61,3,yes,northeast,24873.3849 304 | 56,female,37.51,2,no,southeast,12265.5069 305 | 28,female,33,2,no,southeast,4349.462 306 | 57,female,38,2,no,southwest,12646.207 307 | 29,male,33.345,2,no,northwest,19442.3535 308 | 28,female,27.5,2,no,southwest,20177.67113 309 | 30,female,33.33,1,no,southeast,4151.0287 310 | 58,male,34.865,0,no,northeast,11944.59435 311 | 41,female,33.06,2,no,northwest,7749.1564 312 | 50,male,26.6,0,no,southwest,8444.474 313 | 19,female,24.7,0,no,southwest,1737.376 314 | 43,male,35.97,3,yes,southeast,42124.5153 315 | 49,male,35.86,0,no,southeast,8124.4084 316 | 27,female,31.4,0,yes,southwest,34838.873 317 | 52,male,33.25,0,no,northeast,9722.7695 318 | 50,male,32.205,0,no,northwest,8835.26495 319 | 54,male,32.775,0,no,northeast,10435.06525 320 | 44,female,27.645,0,no,northwest,7421.19455 321 | 32,male,37.335,1,no,northeast,4667.60765 322 | 34,male,25.27,1,no,northwest,4894.7533 323 | 26,female,29.64,4,no,northeast,24671.66334 324 | 34,male,30.8,0,yes,southwest,35491.64 325 | 57,male,40.945,0,no,northeast,11566.30055 326 | 29,male,27.2,0,no,southwest,2866.091 327 | 40,male,34.105,1,no,northeast,6600.20595 328 | 27,female,23.21,1,no,southeast,3561.8889 329 | 45,male,36.48,2,yes,northwest,42760.5022 330 | 64,female,33.8,1,yes,southwest,47928.03 331 | 52,male,36.7,0,no,southwest,9144.565 332 | 61,female,36.385,1,yes,northeast,48517.56315 333 | 52,male,27.36,0,yes,northwest,24393.6224 334 | 61,female,31.16,0,no,northwest,13429.0354 335 | 56,female,28.785,0,no,northeast,11658.37915 336 | 43,female,35.72,2,no,northeast,19144.57652 337 | 64,male,34.5,0,no,southwest,13822.803 338 | 60,male,25.74,0,no,southeast,12142.5786 339 | 62,male,27.55,1,no,northwest,13937.6665 340 | 50,male,32.3,1,yes,northeast,41919.097 341 | 46,female,27.72,1,no,southeast,8232.6388 342 | 24,female,27.6,0,no,southwest,18955.22017 343 | 62,male,30.02,0,no,northwest,13352.0998 344 | 60,female,27.55,0,no,northeast,13217.0945 345 | 63,male,36.765,0,no,northeast,13981.85035 346 | 49,female,41.47,4,no,southeast,10977.2063 347 | 34,female,29.26,3,no,southeast,6184.2994 348 | 33,male,35.75,2,no,southeast,4889.9995 349 | 46,male,33.345,1,no,northeast,8334.45755 350 | 36,female,29.92,1,no,southeast,5478.0368 351 | 19,male,27.835,0,no,northwest,1635.73365 352 | 57,female,23.18,0,no,northwest,11830.6072 353 | 50,female,25.6,0,no,southwest,8932.084 354 | 30,female,27.7,0,no,southwest,3554.203 355 | 33,male,35.245,0,no,northeast,12404.8791 356 | 18,female,38.28,0,no,southeast,14133.03775 357 | 46,male,27.6,0,no,southwest,24603.04837 358 | 46,male,43.89,3,no,southeast,8944.1151 359 | 47,male,29.83,3,no,northwest,9620.3307 360 | 23,male,41.91,0,no,southeast,1837.2819 361 | 18,female,20.79,0,no,southeast,1607.5101 362 | 48,female,32.3,2,no,northeast,10043.249 363 | 35,male,30.5,1,no,southwest,4751.07 364 | 19,female,21.7,0,yes,southwest,13844.506 365 | 21,female,26.4,1,no,southwest,2597.779 366 | 21,female,21.89,2,no,southeast,3180.5101 367 | 49,female,30.78,1,no,northeast,9778.3472 368 | 56,female,32.3,3,no,northeast,13430.265 369 | 42,female,24.985,2,no,northwest,8017.06115 370 | 44,male,32.015,2,no,northwest,8116.26885 371 | 18,male,30.4,3,no,northeast,3481.868 372 | 61,female,21.09,0,no,northwest,13415.0381 373 | 57,female,22.23,0,no,northeast,12029.2867 374 | 42,female,33.155,1,no,northeast,7639.41745 375 | 26,male,32.9,2,yes,southwest,36085.219 376 | 20,male,33.33,0,no,southeast,1391.5287 377 | 23,female,28.31,0,yes,northwest,18033.9679 378 | 39,female,24.89,3,yes,northeast,21659.9301 379 | 24,male,40.15,0,yes,southeast,38126.2465 380 | 64,female,30.115,3,no,northwest,16455.70785 381 | 62,male,31.46,1,no,southeast,27000.98473 382 | 27,female,17.955,2,yes,northeast,15006.57945 383 | 55,male,30.685,0,yes,northeast,42303.69215 384 | 55,male,33,0,no,southeast,20781.48892 385 | 35,female,43.34,2,no,southeast,5846.9176 386 | 44,male,22.135,2,no,northeast,8302.53565 387 | 19,male,34.4,0,no,southwest,1261.859 388 | 58,female,39.05,0,no,southeast,11856.4115 389 | 50,male,25.365,2,no,northwest,30284.64294 390 | 26,female,22.61,0,no,northwest,3176.8159 391 | 24,female,30.21,3,no,northwest,4618.0799 392 | 48,male,35.625,4,no,northeast,10736.87075 393 | 19,female,37.43,0,no,northwest,2138.0707 394 | 48,male,31.445,1,no,northeast,8964.06055 395 | 49,male,31.35,1,no,northeast,9290.1395 396 | 46,female,32.3,2,no,northeast,9411.005 397 | 46,male,19.855,0,no,northwest,7526.70645 398 | 43,female,34.4,3,no,southwest,8522.003 399 | 21,male,31.02,0,no,southeast,16586.49771 400 | 64,male,25.6,2,no,southwest,14988.432 401 | 18,female,38.17,0,no,southeast,1631.6683 402 | 51,female,20.6,0,no,southwest,9264.797 403 | 47,male,47.52,1,no,southeast,8083.9198 404 | 64,female,32.965,0,no,northwest,14692.66935 405 | 49,male,32.3,3,no,northwest,10269.46 406 | 31,male,20.4,0,no,southwest,3260.199 407 | 52,female,38.38,2,no,northeast,11396.9002 408 | 33,female,24.31,0,no,southeast,4185.0979 409 | 47,female,23.6,1,no,southwest,8539.671 410 | 38,male,21.12,3,no,southeast,6652.5288 411 | 32,male,30.03,1,no,southeast,4074.4537 412 | 19,male,17.48,0,no,northwest,1621.3402 413 | 44,female,20.235,1,yes,northeast,19594.80965 414 | 26,female,17.195,2,yes,northeast,14455.64405 415 | 25,male,23.9,5,no,southwest,5080.096 416 | 19,female,35.15,0,no,northwest,2134.9015 417 | 43,female,35.64,1,no,southeast,7345.7266 418 | 52,male,34.1,0,no,southeast,9140.951 419 | 36,female,22.6,2,yes,southwest,18608.262 420 | 64,male,39.16,1,no,southeast,14418.2804 421 | 63,female,26.98,0,yes,northwest,28950.4692 422 | 64,male,33.88,0,yes,southeast,46889.2612 423 | 61,male,35.86,0,yes,southeast,46599.1084 424 | 40,male,32.775,1,yes,northeast,39125.33225 425 | 25,male,30.59,0,no,northeast,2727.3951 426 | 48,male,30.2,2,no,southwest,8968.33 427 | 45,male,24.31,5,no,southeast,9788.8659 428 | 38,female,27.265,1,no,northeast,6555.07035 429 | 18,female,29.165,0,no,northeast,7323.734819 430 | 21,female,16.815,1,no,northeast,3167.45585 431 | 27,female,30.4,3,no,northwest,18804.7524 432 | 19,male,33.1,0,no,southwest,23082.95533 433 | 29,female,20.235,2,no,northwest,4906.40965 434 | 42,male,26.9,0,no,southwest,5969.723 435 | 60,female,30.5,0,no,southwest,12638.195 436 | 31,male,28.595,1,no,northwest,4243.59005 437 | 60,male,33.11,3,no,southeast,13919.8229 438 | 22,male,31.73,0,no,northeast,2254.7967 439 | 35,male,28.9,3,no,southwest,5926.846 440 | 52,female,46.75,5,no,southeast,12592.5345 441 | 26,male,29.45,0,no,northeast,2897.3235 442 | 31,female,32.68,1,no,northwest,4738.2682 443 | 33,female,33.5,0,yes,southwest,37079.372 444 | 18,male,43.01,0,no,southeast,1149.3959 445 | 59,female,36.52,1,no,southeast,28287.89766 446 | 56,male,26.695,1,yes,northwest,26109.32905 447 | 45,female,33.1,0,no,southwest,7345.084 448 | 60,male,29.64,0,no,northeast,12730.9996 449 | 56,female,25.65,0,no,northwest,11454.0215 450 | 40,female,29.6,0,no,southwest,5910.944 451 | 35,male,38.6,1,no,southwest,4762.329 452 | 39,male,29.6,4,no,southwest,7512.267 453 | 30,male,24.13,1,no,northwest,4032.2407 454 | 24,male,23.4,0,no,southwest,1969.614 455 | 20,male,29.735,0,no,northwest,1769.53165 456 | 32,male,46.53,2,no,southeast,4686.3887 457 | 59,male,37.4,0,no,southwest,21797.0004 458 | 55,female,30.14,2,no,southeast,11881.9696 459 | 57,female,30.495,0,no,northwest,11840.77505 460 | 56,male,39.6,0,no,southwest,10601.412 461 | 40,female,33,3,no,southeast,7682.67 462 | 49,female,36.63,3,no,southeast,10381.4787 463 | 42,male,30,0,yes,southwest,22144.032 464 | 62,female,38.095,2,no,northeast,15230.32405 465 | 56,male,25.935,0,no,northeast,11165.41765 466 | 19,male,25.175,0,no,northwest,1632.03625 467 | 30,female,28.38,1,yes,southeast,19521.9682 468 | 60,female,28.7,1,no,southwest,13224.693 469 | 56,female,33.82,2,no,northwest,12643.3778 470 | 28,female,24.32,1,no,northeast,23288.9284 471 | 18,female,24.09,1,no,southeast,2201.0971 472 | 27,male,32.67,0,no,southeast,2497.0383 473 | 18,female,30.115,0,no,northeast,2203.47185 474 | 19,female,29.8,0,no,southwest,1744.465 475 | 47,female,33.345,0,no,northeast,20878.78443 476 | 54,male,25.1,3,yes,southwest,25382.297 477 | 61,male,28.31,1,yes,northwest,28868.6639 478 | 24,male,28.5,0,yes,northeast,35147.52848 479 | 25,male,35.625,0,no,northwest,2534.39375 480 | 21,male,36.85,0,no,southeast,1534.3045 481 | 23,male,32.56,0,no,southeast,1824.2854 482 | 63,male,41.325,3,no,northwest,15555.18875 483 | 49,male,37.51,2,no,southeast,9304.7019 484 | 18,female,31.35,0,no,southeast,1622.1885 485 | 51,female,39.5,1,no,southwest,9880.068 486 | 48,male,34.3,3,no,southwest,9563.029 487 | 31,female,31.065,0,no,northeast,4347.02335 488 | 54,female,21.47,3,no,northwest,12475.3513 489 | 19,male,28.7,0,no,southwest,1253.936 490 | 44,female,38.06,0,yes,southeast,48885.13561 491 | 53,male,31.16,1,no,northwest,10461.9794 492 | 19,female,32.9,0,no,southwest,1748.774 493 | 61,female,25.08,0,no,southeast,24513.09126 494 | 18,female,25.08,0,no,northeast,2196.4732 495 | 61,male,43.4,0,no,southwest,12574.049 496 | 21,male,25.7,4,yes,southwest,17942.106 497 | 20,male,27.93,0,no,northeast,1967.0227 498 | 31,female,23.6,2,no,southwest,4931.647 499 | 45,male,28.7,2,no,southwest,8027.968 500 | 44,female,23.98,2,no,southeast,8211.1002 501 | 62,female,39.2,0,no,southwest,13470.86 502 | 29,male,34.4,0,yes,southwest,36197.699 503 | 43,male,26.03,0,no,northeast,6837.3687 504 | 51,male,23.21,1,yes,southeast,22218.1149 505 | 19,male,30.25,0,yes,southeast,32548.3405 506 | 38,female,28.93,1,no,southeast,5974.3847 507 | 37,male,30.875,3,no,northwest,6796.86325 508 | 22,male,31.35,1,no,northwest,2643.2685 509 | 21,male,23.75,2,no,northwest,3077.0955 510 | 24,female,25.27,0,no,northeast,3044.2133 511 | 57,female,28.7,0,no,southwest,11455.28 512 | 56,male,32.11,1,no,northeast,11763.0009 513 | 27,male,33.66,0,no,southeast,2498.4144 514 | 51,male,22.42,0,no,northeast,9361.3268 515 | 19,male,30.4,0,no,southwest,1256.299 516 | 39,male,28.3,1,yes,southwest,21082.16 517 | 58,male,35.7,0,no,southwest,11362.755 518 | 20,male,35.31,1,no,southeast,27724.28875 519 | 45,male,30.495,2,no,northwest,8413.46305 520 | 35,female,31,1,no,southwest,5240.765 521 | 31,male,30.875,0,no,northeast,3857.75925 522 | 50,female,27.36,0,no,northeast,25656.57526 523 | 32,female,44.22,0,no,southeast,3994.1778 524 | 51,female,33.915,0,no,northeast,9866.30485 525 | 38,female,37.73,0,no,southeast,5397.6167 526 | 42,male,26.07,1,yes,southeast,38245.59327 527 | 18,female,33.88,0,no,southeast,11482.63485 528 | 19,female,30.59,2,no,northwest,24059.68019 529 | 51,female,25.8,1,no,southwest,9861.025 530 | 46,male,39.425,1,no,northeast,8342.90875 531 | 18,male,25.46,0,no,northeast,1708.0014 532 | 57,male,42.13,1,yes,southeast,48675.5177 533 | 62,female,31.73,0,no,northeast,14043.4767 534 | 59,male,29.7,2,no,southeast,12925.886 535 | 37,male,36.19,0,no,southeast,19214.70553 536 | 64,male,40.48,0,no,southeast,13831.1152 537 | 38,male,28.025,1,no,northeast,6067.12675 538 | 33,female,38.9,3,no,southwest,5972.378 539 | 46,female,30.2,2,no,southwest,8825.086 540 | 46,female,28.05,1,no,southeast,8233.0975 541 | 53,male,31.35,0,no,southeast,27346.04207 542 | 34,female,38,3,no,southwest,6196.448 543 | 20,female,31.79,2,no,southeast,3056.3881 544 | 63,female,36.3,0,no,southeast,13887.204 545 | 54,female,47.41,0,yes,southeast,63770.42801 546 | 54,male,30.21,0,no,northwest,10231.4999 547 | 49,male,25.84,2,yes,northwest,23807.2406 548 | 28,male,35.435,0,no,northeast,3268.84665 549 | 54,female,46.7,2,no,southwest,11538.421 550 | 25,female,28.595,0,no,northeast,3213.62205 551 | 43,female,46.2,0,yes,southeast,45863.205 552 | 63,male,30.8,0,no,southwest,13390.559 553 | 32,female,28.93,0,no,southeast,3972.9247 554 | 62,male,21.4,0,no,southwest,12957.118 555 | 52,female,31.73,2,no,northwest,11187.6567 556 | 25,female,41.325,0,no,northeast,17878.90068 557 | 28,male,23.8,2,no,southwest,3847.674 558 | 46,male,33.44,1,no,northeast,8334.5896 559 | 34,male,34.21,0,no,southeast,3935.1799 560 | 35,female,34.105,3,yes,northwest,39983.42595 561 | 19,male,35.53,0,no,northwest,1646.4297 562 | 46,female,19.95,2,no,northwest,9193.8385 563 | 54,female,32.68,0,no,northeast,10923.9332 564 | 27,male,30.5,0,no,southwest,2494.022 565 | 50,male,44.77,1,no,southeast,9058.7303 566 | 18,female,32.12,2,no,southeast,2801.2588 567 | 19,female,30.495,0,no,northwest,2128.43105 568 | 38,female,40.565,1,no,northwest,6373.55735 569 | 41,male,30.59,2,no,northwest,7256.7231 570 | 49,female,31.9,5,no,southwest,11552.904 571 | 48,male,40.565,2,yes,northwest,45702.02235 572 | 31,female,29.1,0,no,southwest,3761.292 573 | 18,female,37.29,1,no,southeast,2219.4451 574 | 30,female,43.12,2,no,southeast,4753.6368 575 | 62,female,36.86,1,no,northeast,31620.00106 576 | 57,female,34.295,2,no,northeast,13224.05705 577 | 58,female,27.17,0,no,northwest,12222.8983 578 | 22,male,26.84,0,no,southeast,1664.9996 579 | 31,female,38.095,1,yes,northeast,58571.07448 580 | 52,male,30.2,1,no,southwest,9724.53 581 | 25,female,23.465,0,no,northeast,3206.49135 582 | 59,male,25.46,1,no,northeast,12913.9924 583 | 19,male,30.59,0,no,northwest,1639.5631 584 | 39,male,45.43,2,no,southeast,6356.2707 585 | 32,female,23.65,1,no,southeast,17626.23951 586 | 19,male,20.7,0,no,southwest,1242.816 587 | 33,female,28.27,1,no,southeast,4779.6023 588 | 21,male,20.235,3,no,northeast,3861.20965 589 | 34,female,30.21,1,yes,northwest,43943.8761 590 | 61,female,35.91,0,no,northeast,13635.6379 591 | 38,female,30.69,1,no,southeast,5976.8311 592 | 58,female,29,0,no,southwest,11842.442 593 | 47,male,19.57,1,no,northwest,8428.0693 594 | 20,male,31.13,2,no,southeast,2566.4707 595 | 21,female,21.85,1,yes,northeast,15359.1045 596 | 41,male,40.26,0,no,southeast,5709.1644 597 | 46,female,33.725,1,no,northeast,8823.98575 598 | 42,female,29.48,2,no,southeast,7640.3092 599 | 34,female,33.25,1,no,northeast,5594.8455 600 | 43,male,32.6,2,no,southwest,7441.501 601 | 52,female,37.525,2,no,northwest,33471.97189 602 | 18,female,39.16,0,no,southeast,1633.0444 603 | 51,male,31.635,0,no,northwest,9174.13565 604 | 56,female,25.3,0,no,southwest,11070.535 605 | 64,female,39.05,3,no,southeast,16085.1275 606 | 19,female,28.31,0,yes,northwest,17468.9839 607 | 51,female,34.1,0,no,southeast,9283.562 608 | 27,female,25.175,0,no,northeast,3558.62025 609 | 59,female,23.655,0,yes,northwest,25678.77845 610 | 28,male,26.98,2,no,northeast,4435.0942 611 | 30,male,37.8,2,yes,southwest,39241.442 612 | 47,female,29.37,1,no,southeast,8547.6913 613 | 38,female,34.8,2,no,southwest,6571.544 614 | 18,female,33.155,0,no,northeast,2207.69745 615 | 34,female,19,3,no,northeast,6753.038 616 | 20,female,33,0,no,southeast,1880.07 617 | 47,female,36.63,1,yes,southeast,42969.8527 618 | 56,female,28.595,0,no,northeast,11658.11505 619 | 49,male,25.6,2,yes,southwest,23306.547 620 | 19,female,33.11,0,yes,southeast,34439.8559 621 | 55,female,37.1,0,no,southwest,10713.644 622 | 30,male,31.4,1,no,southwest,3659.346 623 | 37,male,34.1,4,yes,southwest,40182.246 624 | 49,female,21.3,1,no,southwest,9182.17 625 | 18,male,33.535,0,yes,northeast,34617.84065 626 | 59,male,28.785,0,no,northwest,12129.61415 627 | 29,female,26.03,0,no,northwest,3736.4647 628 | 36,male,28.88,3,no,northeast,6748.5912 629 | 33,male,42.46,1,no,southeast,11326.71487 630 | 58,male,38,0,no,southwest,11365.952 631 | 44,female,38.95,0,yes,northwest,42983.4585 632 | 53,male,36.1,1,no,southwest,10085.846 633 | 24,male,29.3,0,no,southwest,1977.815 634 | 29,female,35.53,0,no,southeast,3366.6697 635 | 40,male,22.705,2,no,northeast,7173.35995 636 | 51,male,39.7,1,no,southwest,9391.346 637 | 64,male,38.19,0,no,northeast,14410.9321 638 | 19,female,24.51,1,no,northwest,2709.1119 639 | 35,female,38.095,2,no,northeast,24915.04626 640 | 39,male,26.41,0,yes,northeast,20149.3229 641 | 56,male,33.66,4,no,southeast,12949.1554 642 | 33,male,42.4,5,no,southwest,6666.243 643 | 42,male,28.31,3,yes,northwest,32787.45859 644 | 61,male,33.915,0,no,northeast,13143.86485 645 | 23,female,34.96,3,no,northwest,4466.6214 646 | 43,male,35.31,2,no,southeast,18806.14547 647 | 48,male,30.78,3,no,northeast,10141.1362 648 | 39,male,26.22,1,no,northwest,6123.5688 649 | 40,female,23.37,3,no,northeast,8252.2843 650 | 18,male,28.5,0,no,northeast,1712.227 651 | 58,female,32.965,0,no,northeast,12430.95335 652 | 49,female,42.68,2,no,southeast,9800.8882 653 | 53,female,39.6,1,no,southeast,10579.711 654 | 48,female,31.13,0,no,southeast,8280.6227 655 | 45,female,36.3,2,no,southeast,8527.532 656 | 59,female,35.2,0,no,southeast,12244.531 657 | 52,female,25.3,2,yes,southeast,24667.419 658 | 26,female,42.4,1,no,southwest,3410.324 659 | 27,male,33.155,2,no,northwest,4058.71245 660 | 48,female,35.91,1,no,northeast,26392.26029 661 | 57,female,28.785,4,no,northeast,14394.39815 662 | 37,male,46.53,3,no,southeast,6435.6237 663 | 57,female,23.98,1,no,southeast,22192.43711 664 | 32,female,31.54,1,no,northeast,5148.5526 665 | 18,male,33.66,0,no,southeast,1136.3994 666 | 64,female,22.99,0,yes,southeast,27037.9141 667 | 43,male,38.06,2,yes,southeast,42560.4304 668 | 49,male,28.7,1,no,southwest,8703.456 669 | 40,female,32.775,2,yes,northwest,40003.33225 670 | 62,male,32.015,0,yes,northeast,45710.20785 671 | 40,female,29.81,1,no,southeast,6500.2359 672 | 30,male,31.57,3,no,southeast,4837.5823 673 | 29,female,31.16,0,no,northeast,3943.5954 674 | 36,male,29.7,0,no,southeast,4399.731 675 | 41,female,31.02,0,no,southeast,6185.3208 676 | 44,female,43.89,2,yes,southeast,46200.9851 677 | 45,male,21.375,0,no,northwest,7222.78625 678 | 55,female,40.81,3,no,southeast,12485.8009 679 | 60,male,31.35,3,yes,northwest,46130.5265 680 | 56,male,36.1,3,no,southwest,12363.547 681 | 49,female,23.18,2,no,northwest,10156.7832 682 | 21,female,17.4,1,no,southwest,2585.269 683 | 19,male,20.3,0,no,southwest,1242.26 684 | 39,male,35.3,2,yes,southwest,40103.89 685 | 53,male,24.32,0,no,northwest,9863.4718 686 | 33,female,18.5,1,no,southwest,4766.022 687 | 53,male,26.41,2,no,northeast,11244.3769 688 | 42,male,26.125,2,no,northeast,7729.64575 689 | 40,male,41.69,0,no,southeast,5438.7491 690 | 47,female,24.1,1,no,southwest,26236.57997 691 | 27,male,31.13,1,yes,southeast,34806.4677 692 | 21,male,27.36,0,no,northeast,2104.1134 693 | 47,male,36.2,1,no,southwest,8068.185 694 | 20,male,32.395,1,no,northwest,2362.22905 695 | 24,male,23.655,0,no,northwest,2352.96845 696 | 27,female,34.8,1,no,southwest,3577.999 697 | 26,female,40.185,0,no,northwest,3201.24515 698 | 53,female,32.3,2,no,northeast,29186.48236 699 | 41,male,35.75,1,yes,southeast,40273.6455 700 | 56,male,33.725,0,no,northwest,10976.24575 701 | 23,female,39.27,2,no,southeast,3500.6123 702 | 21,female,34.87,0,no,southeast,2020.5523 703 | 50,female,44.745,0,no,northeast,9541.69555 704 | 53,male,41.47,0,no,southeast,9504.3103 705 | 34,female,26.41,1,no,northwest,5385.3379 706 | 47,female,29.545,1,no,northwest,8930.93455 707 | 33,female,32.9,2,no,southwest,5375.038 708 | 51,female,38.06,0,yes,southeast,44400.4064 709 | 49,male,28.69,3,no,northwest,10264.4421 710 | 31,female,30.495,3,no,northeast,6113.23105 711 | 36,female,27.74,0,no,northeast,5469.0066 712 | 18,male,35.2,1,no,southeast,1727.54 713 | 50,female,23.54,2,no,southeast,10107.2206 714 | 43,female,30.685,2,no,northwest,8310.83915 715 | 20,male,40.47,0,no,northeast,1984.4533 716 | 24,female,22.6,0,no,southwest,2457.502 717 | 60,male,28.9,0,no,southwest,12146.971 718 | 49,female,22.61,1,no,northwest,9566.9909 719 | 60,male,24.32,1,no,northwest,13112.6048 720 | 51,female,36.67,2,no,northwest,10848.1343 721 | 58,female,33.44,0,no,northwest,12231.6136 722 | 51,female,40.66,0,no,northeast,9875.6804 723 | 53,male,36.6,3,no,southwest,11264.541 724 | 62,male,37.4,0,no,southwest,12979.358 725 | 19,male,35.4,0,no,southwest,1263.249 726 | 50,female,27.075,1,no,northeast,10106.13425 727 | 30,female,39.05,3,yes,southeast,40932.4295 728 | 41,male,28.405,1,no,northwest,6664.68595 729 | 29,female,21.755,1,yes,northeast,16657.71745 730 | 18,female,40.28,0,no,northeast,2217.6012 731 | 41,female,36.08,1,no,southeast,6781.3542 732 | 35,male,24.42,3,yes,southeast,19361.9988 733 | 53,male,21.4,1,no,southwest,10065.413 734 | 24,female,30.1,3,no,southwest,4234.927 735 | 48,female,27.265,1,no,northeast,9447.25035 736 | 59,female,32.1,3,no,southwest,14007.222 737 | 49,female,34.77,1,no,northwest,9583.8933 738 | 37,female,38.39,0,yes,southeast,40419.0191 739 | 26,male,23.7,2,no,southwest,3484.331 740 | 23,male,31.73,3,yes,northeast,36189.1017 741 | 29,male,35.5,2,yes,southwest,44585.45587 742 | 45,male,24.035,2,no,northeast,8604.48365 743 | 27,male,29.15,0,yes,southeast,18246.4955 744 | 53,male,34.105,0,yes,northeast,43254.41795 745 | 31,female,26.62,0,no,southeast,3757.8448 746 | 50,male,26.41,0,no,northwest,8827.2099 747 | 50,female,30.115,1,no,northwest,9910.35985 748 | 34,male,27,2,no,southwest,11737.84884 749 | 19,male,21.755,0,no,northwest,1627.28245 750 | 47,female,36,1,no,southwest,8556.907 751 | 28,male,30.875,0,no,northwest,3062.50825 752 | 37,female,26.4,0,yes,southeast,19539.243 753 | 21,male,28.975,0,no,northwest,1906.35825 754 | 64,male,37.905,0,no,northwest,14210.53595 755 | 58,female,22.77,0,no,southeast,11833.7823 756 | 24,male,33.63,4,no,northeast,17128.42608 757 | 31,male,27.645,2,no,northeast,5031.26955 758 | 39,female,22.8,3,no,northeast,7985.815 759 | 47,female,27.83,0,yes,southeast,23065.4207 760 | 30,male,37.43,3,no,northeast,5428.7277 761 | 18,male,38.17,0,yes,southeast,36307.7983 762 | 22,female,34.58,2,no,northeast,3925.7582 763 | 23,male,35.2,1,no,southwest,2416.955 764 | 33,male,27.1,1,yes,southwest,19040.876 765 | 27,male,26.03,0,no,northeast,3070.8087 766 | 45,female,25.175,2,no,northeast,9095.06825 767 | 57,female,31.825,0,no,northwest,11842.62375 768 | 47,male,32.3,1,no,southwest,8062.764 769 | 42,female,29,1,no,southwest,7050.642 770 | 64,female,39.7,0,no,southwest,14319.031 771 | 38,female,19.475,2,no,northwest,6933.24225 772 | 61,male,36.1,3,no,southwest,27941.28758 773 | 53,female,26.7,2,no,southwest,11150.78 774 | 44,female,36.48,0,no,northeast,12797.20962 775 | 19,female,28.88,0,yes,northwest,17748.5062 776 | 41,male,34.2,2,no,northwest,7261.741 777 | 51,male,33.33,3,no,southeast,10560.4917 778 | 40,male,32.3,2,no,northwest,6986.697 779 | 45,male,39.805,0,no,northeast,7448.40395 780 | 35,male,34.32,3,no,southeast,5934.3798 781 | 53,male,28.88,0,no,northwest,9869.8102 782 | 30,male,24.4,3,yes,southwest,18259.216 783 | 18,male,41.14,0,no,southeast,1146.7966 784 | 51,male,35.97,1,no,southeast,9386.1613 785 | 50,female,27.6,1,yes,southwest,24520.264 786 | 31,female,29.26,1,no,southeast,4350.5144 787 | 35,female,27.7,3,no,southwest,6414.178 788 | 60,male,36.955,0,no,northeast,12741.16745 789 | 21,male,36.86,0,no,northwest,1917.3184 790 | 29,male,22.515,3,no,northeast,5209.57885 791 | 62,female,29.92,0,no,southeast,13457.9608 792 | 39,female,41.8,0,no,southeast,5662.225 793 | 19,male,27.6,0,no,southwest,1252.407 794 | 22,female,23.18,0,no,northeast,2731.9122 795 | 53,male,20.9,0,yes,southeast,21195.818 796 | 39,female,31.92,2,no,northwest,7209.4918 797 | 27,male,28.5,0,yes,northwest,18310.742 798 | 30,male,44.22,2,no,southeast,4266.1658 799 | 30,female,22.895,1,no,northeast,4719.52405 800 | 58,female,33.1,0,no,southwest,11848.141 801 | 33,male,24.795,0,yes,northeast,17904.52705 802 | 42,female,26.18,1,no,southeast,7046.7222 803 | 64,female,35.97,0,no,southeast,14313.8463 804 | 21,male,22.3,1,no,southwest,2103.08 805 | 18,female,42.24,0,yes,southeast,38792.6856 806 | 23,male,26.51,0,no,southeast,1815.8759 807 | 45,female,35.815,0,no,northwest,7731.85785 808 | 40,female,41.42,1,no,northwest,28476.73499 809 | 19,female,36.575,0,no,northwest,2136.88225 810 | 18,male,30.14,0,no,southeast,1131.5066 811 | 25,male,25.84,1,no,northeast,3309.7926 812 | 46,female,30.8,3,no,southwest,9414.92 813 | 33,female,42.94,3,no,northwest,6360.9936 814 | 54,male,21.01,2,no,southeast,11013.7119 815 | 28,male,22.515,2,no,northeast,4428.88785 816 | 36,male,34.43,2,no,southeast,5584.3057 817 | 20,female,31.46,0,no,southeast,1877.9294 818 | 24,female,24.225,0,no,northwest,2842.76075 819 | 23,male,37.1,3,no,southwest,3597.596 820 | 47,female,26.125,1,yes,northeast,23401.30575 821 | 33,female,35.53,0,yes,northwest,55135.40209 822 | 45,male,33.7,1,no,southwest,7445.918 823 | 26,male,17.67,0,no,northwest,2680.9493 824 | 18,female,31.13,0,no,southeast,1621.8827 825 | 44,female,29.81,2,no,southeast,8219.2039 826 | 60,male,24.32,0,no,northwest,12523.6048 827 | 64,female,31.825,2,no,northeast,16069.08475 828 | 56,male,31.79,2,yes,southeast,43813.8661 829 | 36,male,28.025,1,yes,northeast,20773.62775 830 | 41,male,30.78,3,yes,northeast,39597.4072 831 | 39,male,21.85,1,no,northwest,6117.4945 832 | 63,male,33.1,0,no,southwest,13393.756 833 | 36,female,25.84,0,no,northwest,5266.3656 834 | 28,female,23.845,2,no,northwest,4719.73655 835 | 58,male,34.39,0,no,northwest,11743.9341 836 | 36,male,33.82,1,no,northwest,5377.4578 837 | 42,male,35.97,2,no,southeast,7160.3303 838 | 36,male,31.5,0,no,southwest,4402.233 839 | 56,female,28.31,0,no,northeast,11657.7189 840 | 35,female,23.465,2,no,northeast,6402.29135 841 | 59,female,31.35,0,no,northwest,12622.1795 842 | 21,male,31.1,0,no,southwest,1526.312 843 | 59,male,24.7,0,no,northeast,12323.936 844 | 23,female,32.78,2,yes,southeast,36021.0112 845 | 57,female,29.81,0,yes,southeast,27533.9129 846 | 53,male,30.495,0,no,northeast,10072.05505 847 | 60,female,32.45,0,yes,southeast,45008.9555 848 | 51,female,34.2,1,no,southwest,9872.701 849 | 23,male,50.38,1,no,southeast,2438.0552 850 | 27,female,24.1,0,no,southwest,2974.126 851 | 55,male,32.775,0,no,northwest,10601.63225 852 | 37,female,30.78,0,yes,northeast,37270.1512 853 | 61,male,32.3,2,no,northwest,14119.62 854 | 46,female,35.53,0,yes,northeast,42111.6647 855 | 53,female,23.75,2,no,northeast,11729.6795 856 | 49,female,23.845,3,yes,northeast,24106.91255 857 | 20,female,29.6,0,no,southwest,1875.344 858 | 48,female,33.11,0,yes,southeast,40974.1649 859 | 25,male,24.13,0,yes,northwest,15817.9857 860 | 25,female,32.23,1,no,southeast,18218.16139 861 | 57,male,28.1,0,no,southwest,10965.446 862 | 37,female,47.6,2,yes,southwest,46113.511 863 | 38,female,28,3,no,southwest,7151.092 864 | 55,female,33.535,2,no,northwest,12269.68865 865 | 36,female,19.855,0,no,northeast,5458.04645 866 | 51,male,25.4,0,no,southwest,8782.469 867 | 40,male,29.9,2,no,southwest,6600.361 868 | 18,male,37.29,0,no,southeast,1141.4451 869 | 57,male,43.7,1,no,southwest,11576.13 870 | 61,male,23.655,0,no,northeast,13129.60345 871 | 25,female,24.3,3,no,southwest,4391.652 872 | 50,male,36.2,0,no,southwest,8457.818 873 | 26,female,29.48,1,no,southeast,3392.3652 874 | 42,male,24.86,0,no,southeast,5966.8874 875 | 43,male,30.1,1,no,southwest,6849.026 876 | 44,male,21.85,3,no,northeast,8891.1395 877 | 23,female,28.12,0,no,northwest,2690.1138 878 | 49,female,27.1,1,no,southwest,26140.3603 879 | 33,male,33.44,5,no,southeast,6653.7886 880 | 41,male,28.8,1,no,southwest,6282.235 881 | 37,female,29.5,2,no,southwest,6311.952 882 | 22,male,34.8,3,no,southwest,3443.064 883 | 23,male,27.36,1,no,northwest,2789.0574 884 | 21,female,22.135,0,no,northeast,2585.85065 885 | 51,female,37.05,3,yes,northeast,46255.1125 886 | 25,male,26.695,4,no,northwest,4877.98105 887 | 32,male,28.93,1,yes,southeast,19719.6947 888 | 57,male,28.975,0,yes,northeast,27218.43725 889 | 36,female,30.02,0,no,northwest,5272.1758 890 | 22,male,39.5,0,no,southwest,1682.597 891 | 57,male,33.63,1,no,northwest,11945.1327 892 | 64,female,26.885,0,yes,northwest,29330.98315 893 | 36,female,29.04,4,no,southeast,7243.8136 894 | 54,male,24.035,0,no,northeast,10422.91665 895 | 47,male,38.94,2,yes,southeast,44202.6536 896 | 62,male,32.11,0,no,northeast,13555.0049 897 | 61,female,44,0,no,southwest,13063.883 898 | 43,female,20.045,2,yes,northeast,19798.05455 899 | 19,male,25.555,1,no,northwest,2221.56445 900 | 18,female,40.26,0,no,southeast,1634.5734 901 | 19,female,22.515,0,no,northwest,2117.33885 902 | 49,male,22.515,0,no,northeast,8688.85885 903 | 60,male,40.92,0,yes,southeast,48673.5588 904 | 26,male,27.265,3,no,northeast,4661.28635 905 | 49,male,36.85,0,no,southeast,8125.7845 906 | 60,female,35.1,0,no,southwest,12644.589 907 | 26,female,29.355,2,no,northeast,4564.19145 908 | 27,male,32.585,3,no,northeast,4846.92015 909 | 44,female,32.34,1,no,southeast,7633.7206 910 | 63,male,39.8,3,no,southwest,15170.069 911 | 32,female,24.6,0,yes,southwest,17496.306 912 | 22,male,28.31,1,no,northwest,2639.0429 913 | 18,male,31.73,0,yes,northeast,33732.6867 914 | 59,female,26.695,3,no,northwest,14382.70905 915 | 44,female,27.5,1,no,southwest,7626.993 916 | 33,male,24.605,2,no,northwest,5257.50795 917 | 24,female,33.99,0,no,southeast,2473.3341 918 | 43,female,26.885,0,yes,northwest,21774.32215 919 | 45,male,22.895,0,yes,northeast,35069.37452 920 | 61,female,28.2,0,no,southwest,13041.921 921 | 35,female,34.21,1,no,southeast,5245.2269 922 | 62,female,25,0,no,southwest,13451.122 923 | 62,female,33.2,0,no,southwest,13462.52 924 | 38,male,31,1,no,southwest,5488.262 925 | 34,male,35.815,0,no,northwest,4320.41085 926 | 43,male,23.2,0,no,southwest,6250.435 927 | 50,male,32.11,2,no,northeast,25333.33284 928 | 19,female,23.4,2,no,southwest,2913.569 929 | 57,female,20.1,1,no,southwest,12032.326 930 | 62,female,39.16,0,no,southeast,13470.8044 931 | 41,male,34.21,1,no,southeast,6289.7549 932 | 26,male,46.53,1,no,southeast,2927.0647 933 | 39,female,32.5,1,no,southwest,6238.298 934 | 46,male,25.8,5,no,southwest,10096.97 935 | 45,female,35.3,0,no,southwest,7348.142 936 | 32,male,37.18,2,no,southeast,4673.3922 937 | 59,female,27.5,0,no,southwest,12233.828 938 | 44,male,29.735,2,no,northeast,32108.66282 939 | 39,female,24.225,5,no,northwest,8965.79575 940 | 18,male,26.18,2,no,southeast,2304.0022 941 | 53,male,29.48,0,no,southeast,9487.6442 942 | 18,male,23.21,0,no,southeast,1121.8739 943 | 50,female,46.09,1,no,southeast,9549.5651 944 | 18,female,40.185,0,no,northeast,2217.46915 945 | 19,male,22.61,0,no,northwest,1628.4709 946 | 62,male,39.93,0,no,southeast,12982.8747 947 | 56,female,35.8,1,no,southwest,11674.13 948 | 42,male,35.8,2,no,southwest,7160.094 949 | 37,male,34.2,1,yes,northeast,39047.285 950 | 42,male,31.255,0,no,northwest,6358.77645 951 | 25,male,29.7,3,yes,southwest,19933.458 952 | 57,male,18.335,0,no,northeast,11534.87265 953 | 51,male,42.9,2,yes,southeast,47462.894 954 | 30,female,28.405,1,no,northwest,4527.18295 955 | 44,male,30.2,2,yes,southwest,38998.546 956 | 34,male,27.835,1,yes,northwest,20009.63365 957 | 31,male,39.49,1,no,southeast,3875.7341 958 | 54,male,30.8,1,yes,southeast,41999.52 959 | 24,male,26.79,1,no,northwest,12609.88702 960 | 43,male,34.96,1,yes,northeast,41034.2214 961 | 48,male,36.67,1,no,northwest,28468.91901 962 | 19,female,39.615,1,no,northwest,2730.10785 963 | 29,female,25.9,0,no,southwest,3353.284 964 | 63,female,35.2,1,no,southeast,14474.675 965 | 46,male,24.795,3,no,northeast,9500.57305 966 | 52,male,36.765,2,no,northwest,26467.09737 967 | 35,male,27.1,1,no,southwest,4746.344 968 | 51,male,24.795,2,yes,northwest,23967.38305 969 | 44,male,25.365,1,no,northwest,7518.02535 970 | 21,male,25.745,2,no,northeast,3279.86855 971 | 39,female,34.32,5,no,southeast,8596.8278 972 | 50,female,28.16,3,no,southeast,10702.6424 973 | 34,female,23.56,0,no,northeast,4992.3764 974 | 22,female,20.235,0,no,northwest,2527.81865 975 | 19,female,40.5,0,no,southwest,1759.338 976 | 26,male,35.42,0,no,southeast,2322.6218 977 | 29,male,22.895,0,yes,northeast,16138.76205 978 | 48,male,40.15,0,no,southeast,7804.1605 979 | 26,male,29.15,1,no,southeast,2902.9065 980 | 45,female,39.995,3,no,northeast,9704.66805 981 | 36,female,29.92,0,no,southeast,4889.0368 982 | 54,male,25.46,1,no,northeast,25517.11363 983 | 34,male,21.375,0,no,northeast,4500.33925 984 | 31,male,25.9,3,yes,southwest,19199.944 985 | 27,female,30.59,1,no,northeast,16796.41194 986 | 20,male,30.115,5,no,northeast,4915.05985 987 | 44,female,25.8,1,no,southwest,7624.63 988 | 43,male,30.115,3,no,northwest,8410.04685 989 | 45,female,27.645,1,no,northwest,28340.18885 990 | 34,male,34.675,0,no,northeast,4518.82625 991 | 24,female,20.52,0,yes,northeast,14571.8908 992 | 26,female,19.8,1,no,southwest,3378.91 993 | 38,female,27.835,2,no,northeast,7144.86265 994 | 50,female,31.6,2,no,southwest,10118.424 995 | 38,male,28.27,1,no,southeast,5484.4673 996 | 27,female,20.045,3,yes,northwest,16420.49455 997 | 39,female,23.275,3,no,northeast,7986.47525 998 | 39,female,34.1,3,no,southwest,7418.522 999 | 63,female,36.85,0,no,southeast,13887.9685 1000 | 33,female,36.29,3,no,northeast,6551.7501 1001 | 36,female,26.885,0,no,northwest,5267.81815 1002 | 30,male,22.99,2,yes,northwest,17361.7661 1003 | 24,male,32.7,0,yes,southwest,34472.841 1004 | 24,male,25.8,0,no,southwest,1972.95 1005 | 48,male,29.6,0,no,southwest,21232.18226 1006 | 47,male,19.19,1,no,northeast,8627.5411 1007 | 29,male,31.73,2,no,northwest,4433.3877 1008 | 28,male,29.26,2,no,northeast,4438.2634 1009 | 47,male,28.215,3,yes,northwest,24915.22085 1010 | 25,male,24.985,2,no,northeast,23241.47453 1011 | 51,male,27.74,1,no,northeast,9957.7216 1012 | 48,female,22.8,0,no,southwest,8269.044 1013 | 43,male,20.13,2,yes,southeast,18767.7377 1014 | 61,female,33.33,4,no,southeast,36580.28216 1015 | 48,male,32.3,1,no,northwest,8765.249 1016 | 38,female,27.6,0,no,southwest,5383.536 1017 | 59,male,25.46,0,no,northwest,12124.9924 1018 | 19,female,24.605,1,no,northwest,2709.24395 1019 | 26,female,34.2,2,no,southwest,3987.926 1020 | 54,female,35.815,3,no,northwest,12495.29085 1021 | 21,female,32.68,2,no,northwest,26018.95052 1022 | 51,male,37,0,no,southwest,8798.593 1023 | 22,female,31.02,3,yes,southeast,35595.5898 1024 | 47,male,36.08,1,yes,southeast,42211.1382 1025 | 18,male,23.32,1,no,southeast,1711.0268 1026 | 47,female,45.32,1,no,southeast,8569.8618 1027 | 21,female,34.6,0,no,southwest,2020.177 1028 | 19,male,26.03,1,yes,northwest,16450.8947 1029 | 23,male,18.715,0,no,northwest,21595.38229 1030 | 54,male,31.6,0,no,southwest,9850.432 1031 | 37,female,17.29,2,no,northeast,6877.9801 1032 | 46,female,23.655,1,yes,northwest,21677.28345 1033 | 55,female,35.2,0,yes,southeast,44423.803 1034 | 30,female,27.93,0,no,northeast,4137.5227 1035 | 18,male,21.565,0,yes,northeast,13747.87235 1036 | 61,male,38.38,0,no,northwest,12950.0712 1037 | 54,female,23,3,no,southwest,12094.478 1038 | 22,male,37.07,2,yes,southeast,37484.4493 1039 | 45,female,30.495,1,yes,northwest,39725.51805 1040 | 22,male,28.88,0,no,northeast,2250.8352 1041 | 19,male,27.265,2,no,northwest,22493.65964 1042 | 35,female,28.025,0,yes,northwest,20234.85475 1043 | 18,male,23.085,0,no,northeast,1704.70015 1044 | 20,male,30.685,0,yes,northeast,33475.81715 1045 | 28,female,25.8,0,no,southwest,3161.454 1046 | 55,male,35.245,1,no,northeast,11394.06555 1047 | 43,female,24.7,2,yes,northwest,21880.82 1048 | 43,female,25.08,0,no,northeast,7325.0482 1049 | 22,male,52.58,1,yes,southeast,44501.3982 1050 | 25,female,22.515,1,no,northwest,3594.17085 1051 | 49,male,30.9,0,yes,southwest,39727.614 1052 | 44,female,36.955,1,no,northwest,8023.13545 1053 | 64,male,26.41,0,no,northeast,14394.5579 1054 | 49,male,29.83,1,no,northeast,9288.0267 1055 | 47,male,29.8,3,yes,southwest,25309.489 1056 | 27,female,21.47,0,no,northwest,3353.4703 1057 | 55,male,27.645,0,no,northwest,10594.50155 1058 | 48,female,28.9,0,no,southwest,8277.523 1059 | 45,female,31.79,0,no,southeast,17929.30337 1060 | 24,female,39.49,0,no,southeast,2480.9791 1061 | 32,male,33.82,1,no,northwest,4462.7218 1062 | 24,male,32.01,0,no,southeast,1981.5819 1063 | 57,male,27.94,1,no,southeast,11554.2236 1064 | 59,male,41.14,1,yes,southeast,48970.2476 1065 | 36,male,28.595,3,no,northwest,6548.19505 1066 | 29,female,25.6,4,no,southwest,5708.867 1067 | 42,female,25.3,1,no,southwest,7045.499 1068 | 48,male,37.29,2,no,southeast,8978.1851 1069 | 39,male,42.655,0,no,northeast,5757.41345 1070 | 63,male,21.66,1,no,northwest,14349.8544 1071 | 54,female,31.9,1,no,southeast,10928.849 1072 | 37,male,37.07,1,yes,southeast,39871.7043 1073 | 63,male,31.445,0,no,northeast,13974.45555 1074 | 21,male,31.255,0,no,northwest,1909.52745 1075 | 54,female,28.88,2,no,northeast,12096.6512 1076 | 60,female,18.335,0,no,northeast,13204.28565 1077 | 32,female,29.59,1,no,southeast,4562.8421 1078 | 47,female,32,1,no,southwest,8551.347 1079 | 21,male,26.03,0,no,northeast,2102.2647 1080 | 28,male,31.68,0,yes,southeast,34672.1472 1081 | 63,male,33.66,3,no,southeast,15161.5344 1082 | 18,male,21.78,2,no,southeast,11884.04858 1083 | 32,male,27.835,1,no,northwest,4454.40265 1084 | 38,male,19.95,1,no,northwest,5855.9025 1085 | 32,male,31.5,1,no,southwest,4076.497 1086 | 62,female,30.495,2,no,northwest,15019.76005 1087 | 39,female,18.3,5,yes,southwest,19023.26 1088 | 55,male,28.975,0,no,northeast,10796.35025 1089 | 57,male,31.54,0,no,northwest,11353.2276 1090 | 52,male,47.74,1,no,southeast,9748.9106 1091 | 56,male,22.1,0,no,southwest,10577.087 1092 | 47,male,36.19,0,yes,southeast,41676.0811 1093 | 55,female,29.83,0,no,northeast,11286.5387 1094 | 23,male,32.7,3,no,southwest,3591.48 1095 | 22,female,30.4,0,yes,northwest,33907.548 1096 | 50,female,33.7,4,no,southwest,11299.343 1097 | 18,female,31.35,4,no,northeast,4561.1885 1098 | 51,female,34.96,2,yes,northeast,44641.1974 1099 | 22,male,33.77,0,no,southeast,1674.6323 1100 | 52,female,30.875,0,no,northeast,23045.56616 1101 | 25,female,33.99,1,no,southeast,3227.1211 1102 | 33,female,19.095,2,yes,northeast,16776.30405 1103 | 53,male,28.6,3,no,southwest,11253.421 1104 | 29,male,38.94,1,no,southeast,3471.4096 1105 | 58,male,36.08,0,no,southeast,11363.2832 1106 | 37,male,29.8,0,no,southwest,20420.60465 1107 | 54,female,31.24,0,no,southeast,10338.9316 1108 | 49,female,29.925,0,no,northwest,8988.15875 1109 | 50,female,26.22,2,no,northwest,10493.9458 1110 | 26,male,30,1,no,southwest,2904.088 1111 | 45,male,20.35,3,no,southeast,8605.3615 1112 | 54,female,32.3,1,no,northeast,11512.405 1113 | 38,male,38.39,3,yes,southeast,41949.2441 1114 | 48,female,25.85,3,yes,southeast,24180.9335 1115 | 28,female,26.315,3,no,northwest,5312.16985 1116 | 23,male,24.51,0,no,northeast,2396.0959 1117 | 55,male,32.67,1,no,southeast,10807.4863 1118 | 41,male,29.64,5,no,northeast,9222.4026 1119 | 25,male,33.33,2,yes,southeast,36124.5737 1120 | 33,male,35.75,1,yes,southeast,38282.7495 1121 | 30,female,19.95,3,no,northwest,5693.4305 1122 | 23,female,31.4,0,yes,southwest,34166.273 1123 | 46,male,38.17,2,no,southeast,8347.1643 1124 | 53,female,36.86,3,yes,northwest,46661.4424 1125 | 27,female,32.395,1,no,northeast,18903.49141 1126 | 23,female,42.75,1,yes,northeast,40904.1995 1127 | 63,female,25.08,0,no,northwest,14254.6082 1128 | 55,male,29.9,0,no,southwest,10214.636 1129 | 35,female,35.86,2,no,southeast,5836.5204 1130 | 34,male,32.8,1,no,southwest,14358.36437 1131 | 19,female,18.6,0,no,southwest,1728.897 1132 | 39,female,23.87,5,no,southeast,8582.3023 1133 | 27,male,45.9,2,no,southwest,3693.428 1134 | 57,male,40.28,0,no,northeast,20709.02034 1135 | 52,female,18.335,0,no,northwest,9991.03765 1136 | 28,male,33.82,0,no,northwest,19673.33573 1137 | 50,female,28.12,3,no,northwest,11085.5868 1138 | 44,female,25,1,no,southwest,7623.518 1139 | 26,female,22.23,0,no,northwest,3176.2877 1140 | 33,male,30.25,0,no,southeast,3704.3545 1141 | 19,female,32.49,0,yes,northwest,36898.73308 1142 | 50,male,37.07,1,no,southeast,9048.0273 1143 | 41,female,32.6,3,no,southwest,7954.517 1144 | 52,female,24.86,0,no,southeast,27117.99378 1145 | 39,male,32.34,2,no,southeast,6338.0756 1146 | 50,male,32.3,2,no,southwest,9630.397 1147 | 52,male,32.775,3,no,northwest,11289.10925 1148 | 60,male,32.8,0,yes,southwest,52590.82939 1149 | 20,female,31.92,0,no,northwest,2261.5688 1150 | 55,male,21.5,1,no,southwest,10791.96 1151 | 42,male,34.1,0,no,southwest,5979.731 1152 | 18,female,30.305,0,no,northeast,2203.73595 1153 | 58,female,36.48,0,no,northwest,12235.8392 1154 | 43,female,32.56,3,yes,southeast,40941.2854 1155 | 35,female,35.815,1,no,northwest,5630.45785 1156 | 48,female,27.93,4,no,northwest,11015.1747 1157 | 36,female,22.135,3,no,northeast,7228.21565 1158 | 19,male,44.88,0,yes,southeast,39722.7462 1159 | 23,female,23.18,2,no,northwest,14426.07385 1160 | 20,female,30.59,0,no,northeast,2459.7201 1161 | 32,female,41.1,0,no,southwest,3989.841 1162 | 43,female,34.58,1,no,northwest,7727.2532 1163 | 34,male,42.13,2,no,southeast,5124.1887 1164 | 30,male,38.83,1,no,southeast,18963.17192 1165 | 18,female,28.215,0,no,northeast,2200.83085 1166 | 41,female,28.31,1,no,northwest,7153.5539 1167 | 35,female,26.125,0,no,northeast,5227.98875 1168 | 57,male,40.37,0,no,southeast,10982.5013 1169 | 29,female,24.6,2,no,southwest,4529.477 1170 | 32,male,35.2,2,no,southwest,4670.64 1171 | 37,female,34.105,1,no,northwest,6112.35295 1172 | 18,male,27.36,1,yes,northeast,17178.6824 1173 | 43,female,26.7,2,yes,southwest,22478.6 1174 | 56,female,41.91,0,no,southeast,11093.6229 1175 | 38,male,29.26,2,no,northwest,6457.8434 1176 | 29,male,32.11,2,no,northwest,4433.9159 1177 | 22,female,27.1,0,no,southwest,2154.361 1178 | 52,female,24.13,1,yes,northwest,23887.6627 1179 | 40,female,27.4,1,no,southwest,6496.886 1180 | 23,female,34.865,0,no,northeast,2899.48935 1181 | 31,male,29.81,0,yes,southeast,19350.3689 1182 | 42,female,41.325,1,no,northeast,7650.77375 1183 | 24,female,29.925,0,no,northwest,2850.68375 1184 | 25,female,30.3,0,no,southwest,2632.992 1185 | 48,female,27.36,1,no,northeast,9447.3824 1186 | 23,female,28.49,1,yes,southeast,18328.2381 1187 | 45,male,23.56,2,no,northeast,8603.8234 1188 | 20,male,35.625,3,yes,northwest,37465.34375 1189 | 62,female,32.68,0,no,northwest,13844.7972 1190 | 43,female,25.27,1,yes,northeast,21771.3423 1191 | 23,female,28,0,no,southwest,13126.67745 1192 | 31,female,32.775,2,no,northwest,5327.40025 1193 | 41,female,21.755,1,no,northeast,13725.47184 1194 | 58,female,32.395,1,no,northeast,13019.16105 1195 | 48,female,36.575,0,no,northwest,8671.19125 1196 | 31,female,21.755,0,no,northwest,4134.08245 1197 | 19,female,27.93,3,no,northwest,18838.70366 1198 | 19,female,30.02,0,yes,northwest,33307.5508 1199 | 41,male,33.55,0,no,southeast,5699.8375 1200 | 40,male,29.355,1,no,northwest,6393.60345 1201 | 31,female,25.8,2,no,southwest,4934.705 1202 | 37,male,24.32,2,no,northwest,6198.7518 1203 | 46,male,40.375,2,no,northwest,8733.22925 1204 | 22,male,32.11,0,no,northwest,2055.3249 1205 | 51,male,32.3,1,no,northeast,9964.06 1206 | 18,female,27.28,3,yes,southeast,18223.4512 1207 | 35,male,17.86,1,no,northwest,5116.5004 1208 | 59,female,34.8,2,no,southwest,36910.60803 1209 | 36,male,33.4,2,yes,southwest,38415.474 1210 | 37,female,25.555,1,yes,northeast,20296.86345 1211 | 59,male,37.1,1,no,southwest,12347.172 1212 | 36,male,30.875,1,no,northwest,5373.36425 1213 | 39,male,34.1,2,no,southeast,23563.01618 1214 | 18,male,21.47,0,no,northeast,1702.4553 1215 | 52,female,33.3,2,no,southwest,10806.839 1216 | 27,female,31.255,1,no,northwest,3956.07145 1217 | 18,male,39.14,0,no,northeast,12890.05765 1218 | 40,male,25.08,0,no,southeast,5415.6612 1219 | 29,male,37.29,2,no,southeast,4058.1161 1220 | 46,female,34.6,1,yes,southwest,41661.602 1221 | 38,female,30.21,3,no,northwest,7537.1639 1222 | 30,female,21.945,1,no,northeast,4718.20355 1223 | 40,male,24.97,2,no,southeast,6593.5083 1224 | 50,male,25.3,0,no,southeast,8442.667 1225 | 20,female,24.42,0,yes,southeast,26125.67477 1226 | 41,male,23.94,1,no,northeast,6858.4796 1227 | 33,female,39.82,1,no,southeast,4795.6568 1228 | 38,male,16.815,2,no,northeast,6640.54485 1229 | 42,male,37.18,2,no,southeast,7162.0122 1230 | 56,male,34.43,0,no,southeast,10594.2257 1231 | 58,male,30.305,0,no,northeast,11938.25595 1232 | 52,male,34.485,3,yes,northwest,60021.39897 1233 | 20,female,21.8,0,yes,southwest,20167.33603 1234 | 54,female,24.605,3,no,northwest,12479.70895 1235 | 58,male,23.3,0,no,southwest,11345.519 1236 | 45,female,27.83,2,no,southeast,8515.7587 1237 | 26,male,31.065,0,no,northwest,2699.56835 1238 | 63,female,21.66,0,no,northeast,14449.8544 1239 | 58,female,28.215,0,no,northwest,12224.35085 1240 | 37,male,22.705,3,no,northeast,6985.50695 1241 | 25,female,42.13,1,no,southeast,3238.4357 1242 | 52,male,41.8,2,yes,southeast,47269.854 1243 | 64,male,36.96,2,yes,southeast,49577.6624 1244 | 22,female,21.28,3,no,northwest,4296.2712 1245 | 28,female,33.11,0,no,southeast,3171.6149 1246 | 18,male,33.33,0,no,southeast,1135.9407 1247 | 28,male,24.3,5,no,southwest,5615.369 1248 | 45,female,25.7,3,no,southwest,9101.798 1249 | 33,male,29.4,4,no,southwest,6059.173 1250 | 18,female,39.82,0,no,southeast,1633.9618 1251 | 32,male,33.63,1,yes,northeast,37607.5277 1252 | 24,male,29.83,0,yes,northeast,18648.4217 1253 | 19,male,19.8,0,no,southwest,1241.565 1254 | 20,male,27.3,0,yes,southwest,16232.847 1255 | 40,female,29.3,4,no,southwest,15828.82173 1256 | 34,female,27.72,0,no,southeast,4415.1588 1257 | 42,female,37.9,0,no,southwest,6474.013 1258 | 51,female,36.385,3,no,northwest,11436.73815 1259 | 54,female,27.645,1,no,northwest,11305.93455 1260 | 55,male,37.715,3,no,northwest,30063.58055 1261 | 52,female,23.18,0,no,northeast,10197.7722 1262 | 32,female,20.52,0,no,northeast,4544.2348 1263 | 28,male,37.1,1,no,southwest,3277.161 1264 | 41,female,28.05,1,no,southeast,6770.1925 1265 | 43,female,29.9,1,no,southwest,7337.748 1266 | 49,female,33.345,2,no,northeast,10370.91255 1267 | 64,male,23.76,0,yes,southeast,26926.5144 1268 | 55,female,30.5,0,no,southwest,10704.47 1269 | 24,male,31.065,0,yes,northeast,34254.05335 1270 | 20,female,33.3,0,no,southwest,1880.487 1271 | 45,male,27.5,3,no,southwest,8615.3 1272 | 26,male,33.915,1,no,northwest,3292.52985 1273 | 25,female,34.485,0,no,northwest,3021.80915 1274 | 43,male,25.52,5,no,southeast,14478.33015 1275 | 35,male,27.61,1,no,southeast,4747.0529 1276 | 26,male,27.06,0,yes,southeast,17043.3414 1277 | 57,male,23.7,0,no,southwest,10959.33 1278 | 22,female,30.4,0,no,northeast,2741.948 1279 | 32,female,29.735,0,no,northwest,4357.04365 1280 | 39,male,29.925,1,yes,northeast,22462.04375 1281 | 25,female,26.79,2,no,northwest,4189.1131 1282 | 48,female,33.33,0,no,southeast,8283.6807 1283 | 47,female,27.645,2,yes,northwest,24535.69855 1284 | 18,female,21.66,0,yes,northeast,14283.4594 1285 | 18,male,30.03,1,no,southeast,1720.3537 1286 | 61,male,36.3,1,yes,southwest,47403.88 1287 | 47,female,24.32,0,no,northeast,8534.6718 1288 | 28,female,17.29,0,no,northeast,3732.6251 1289 | 36,female,25.9,1,no,southwest,5472.449 1290 | 20,male,39.4,2,yes,southwest,38344.566 1291 | 44,male,34.32,1,no,southeast,7147.4728 1292 | 38,female,19.95,2,no,northeast,7133.9025 1293 | 19,male,34.9,0,yes,southwest,34828.654 1294 | 21,male,23.21,0,no,southeast,1515.3449 1295 | 46,male,25.745,3,no,northwest,9301.89355 1296 | 58,male,25.175,0,no,northeast,11931.12525 1297 | 20,male,22,1,no,southwest,1964.78 1298 | 18,male,26.125,0,no,northeast,1708.92575 1299 | 28,female,26.51,2,no,southeast,4340.4409 1300 | 33,male,27.455,2,no,northwest,5261.46945 1301 | 19,female,25.745,1,no,northwest,2710.82855 1302 | 45,male,30.36,0,yes,southeast,62592.87309 1303 | 62,male,30.875,3,yes,northwest,46718.16325 1304 | 25,female,20.8,1,no,southwest,3208.787 1305 | 43,male,27.8,0,yes,southwest,37829.7242 1306 | 42,male,24.605,2,yes,northeast,21259.37795 1307 | 24,female,27.72,0,no,southeast,2464.6188 1308 | 29,female,21.85,0,yes,northeast,16115.3045 1309 | 32,male,28.12,4,yes,northwest,21472.4788 1310 | 25,female,30.2,0,yes,southwest,33900.653 1311 | 41,male,32.2,2,no,southwest,6875.961 1312 | 42,male,26.315,1,no,northwest,6940.90985 1313 | 33,female,26.695,0,no,northwest,4571.41305 1314 | 34,male,42.9,1,no,southwest,4536.259 1315 | 19,female,34.7,2,yes,southwest,36397.576 1316 | 30,female,23.655,3,yes,northwest,18765.87545 1317 | 18,male,28.31,1,no,northeast,11272.33139 1318 | 19,female,20.6,0,no,southwest,1731.677 1319 | 18,male,53.13,0,no,southeast,1163.4627 1320 | 35,male,39.71,4,no,northeast,19496.71917 1321 | 39,female,26.315,2,no,northwest,7201.70085 1322 | 31,male,31.065,3,no,northwest,5425.02335 1323 | 62,male,26.695,0,yes,northeast,28101.33305 1324 | 62,male,38.83,0,no,southeast,12981.3457 1325 | 42,female,40.37,2,yes,southeast,43896.3763 1326 | 31,male,25.935,1,no,northwest,4239.89265 1327 | 61,male,33.535,0,no,northeast,13143.33665 1328 | 42,female,32.87,0,no,northeast,7050.0213 1329 | 51,male,30.03,1,no,southeast,9377.9047 1330 | 23,female,24.225,2,no,northeast,22395.74424 1331 | 52,male,38.6,2,no,southwest,10325.206 1332 | 57,female,25.74,2,no,southeast,12629.1656 1333 | 23,female,33.4,0,no,southwest,10795.93733 1334 | 52,female,44.7,3,no,southwest,11411.685 1335 | 50,male,30.97,3,no,northwest,10600.5483 1336 | 18,female,31.92,0,no,northeast,2205.9808 1337 | 18,female,36.85,0,no,southeast,1629.8335 1338 | 21,female,25.8,0,no,southwest,2007.945 1339 | 61,female,29.07,0,yes,northwest,29141.3603 1340 | -------------------------------------------------------------------------------- /Assignments/Assignment 1/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iAAA-event/Machine-learning/6a6525852ad04ed6e467763e5b8d9acc4fecf755/Assignments/Assignment 1/.DS_Store -------------------------------------------------------------------------------- /Assignments/Assignment 1/Datasets/diabetes.csv: -------------------------------------------------------------------------------- 1 | Pregnancies,Glucose,BloodPressure,SkinThickness,Insulin,BMI,DiabetesPedigreeFunction,Age,Outcome 2 | 6,148,72,35,0,33.6,0.627,50,1 3 | 1,85,66,29,0,26.6,0.351,31,0 4 | 8,183,64,0,0,23.3,0.672,32,1 5 | 1,89,66,23,94,28.1,0.167,21,0 6 | 0,137,40,35,168,43.1,2.288,33,1 7 | 5,116,74,0,0,25.6,0.201,30,0 8 | 3,78,50,32,88,31,0.248,26,1 9 | 10,115,0,0,0,35.3,0.134,29,0 10 | 2,197,70,45,543,30.5,0.158,53,1 11 | 8,125,96,0,0,0,0.232,54,1 12 | 4,110,92,0,0,37.6,0.191,30,0 13 | 10,168,74,0,0,38,0.537,34,1 14 | 10,139,80,0,0,27.1,1.441,57,0 15 | 1,189,60,23,846,30.1,0.398,59,1 16 | 5,166,72,19,175,25.8,0.587,51,1 17 | 7,100,0,0,0,30,0.484,32,1 18 | 0,118,84,47,230,45.8,0.551,31,1 19 | 7,107,74,0,0,29.6,0.254,31,1 20 | 1,103,30,38,83,43.3,0.183,33,0 21 | 1,115,70,30,96,34.6,0.529,32,1 22 | 3,126,88,41,235,39.3,0.704,27,0 23 | 8,99,84,0,0,35.4,0.388,50,0 24 | 7,196,90,0,0,39.8,0.451,41,1 25 | 9,119,80,35,0,29,0.263,29,1 26 | 11,143,94,33,146,36.6,0.254,51,1 27 | 10,125,70,26,115,31.1,0.205,41,1 28 | 7,147,76,0,0,39.4,0.257,43,1 29 | 1,97,66,15,140,23.2,0.487,22,0 30 | 13,145,82,19,110,22.2,0.245,57,0 31 | 5,117,92,0,0,34.1,0.337,38,0 32 | 5,109,75,26,0,36,0.546,60,0 33 | 3,158,76,36,245,31.6,0.851,28,1 34 | 3,88,58,11,54,24.8,0.267,22,0 35 | 6,92,92,0,0,19.9,0.188,28,0 36 | 10,122,78,31,0,27.6,0.512,45,0 37 | 4,103,60,33,192,24,0.966,33,0 38 | 11,138,76,0,0,33.2,0.42,35,0 39 | 9,102,76,37,0,32.9,0.665,46,1 40 | 2,90,68,42,0,38.2,0.503,27,1 41 | 4,111,72,47,207,37.1,1.39,56,1 42 | 3,180,64,25,70,34,0.271,26,0 43 | 7,133,84,0,0,40.2,0.696,37,0 44 | 7,106,92,18,0,22.7,0.235,48,0 45 | 9,171,110,24,240,45.4,0.721,54,1 46 | 7,159,64,0,0,27.4,0.294,40,0 47 | 0,180,66,39,0,42,1.893,25,1 48 | 1,146,56,0,0,29.7,0.564,29,0 49 | 2,71,70,27,0,28,0.586,22,0 50 | 7,103,66,32,0,39.1,0.344,31,1 51 | 7,105,0,0,0,0,0.305,24,0 52 | 1,103,80,11,82,19.4,0.491,22,0 53 | 1,101,50,15,36,24.2,0.526,26,0 54 | 5,88,66,21,23,24.4,0.342,30,0 55 | 8,176,90,34,300,33.7,0.467,58,1 56 | 7,150,66,42,342,34.7,0.718,42,0 57 | 1,73,50,10,0,23,0.248,21,0 58 | 7,187,68,39,304,37.7,0.254,41,1 59 | 0,100,88,60,110,46.8,0.962,31,0 60 | 0,146,82,0,0,40.5,1.781,44,0 61 | 0,105,64,41,142,41.5,0.173,22,0 62 | 2,84,0,0,0,0,0.304,21,0 63 | 8,133,72,0,0,32.9,0.27,39,1 64 | 5,44,62,0,0,25,0.587,36,0 65 | 2,141,58,34,128,25.4,0.699,24,0 66 | 7,114,66,0,0,32.8,0.258,42,1 67 | 5,99,74,27,0,29,0.203,32,0 68 | 0,109,88,30,0,32.5,0.855,38,1 69 | 2,109,92,0,0,42.7,0.845,54,0 70 | 1,95,66,13,38,19.6,0.334,25,0 71 | 4,146,85,27,100,28.9,0.189,27,0 72 | 2,100,66,20,90,32.9,0.867,28,1 73 | 5,139,64,35,140,28.6,0.411,26,0 74 | 13,126,90,0,0,43.4,0.583,42,1 75 | 4,129,86,20,270,35.1,0.231,23,0 76 | 1,79,75,30,0,32,0.396,22,0 77 | 1,0,48,20,0,24.7,0.14,22,0 78 | 7,62,78,0,0,32.6,0.391,41,0 79 | 5,95,72,33,0,37.7,0.37,27,0 80 | 0,131,0,0,0,43.2,0.27,26,1 81 | 2,112,66,22,0,25,0.307,24,0 82 | 3,113,44,13,0,22.4,0.14,22,0 83 | 2,74,0,0,0,0,0.102,22,0 84 | 7,83,78,26,71,29.3,0.767,36,0 85 | 0,101,65,28,0,24.6,0.237,22,0 86 | 5,137,108,0,0,48.8,0.227,37,1 87 | 2,110,74,29,125,32.4,0.698,27,0 88 | 13,106,72,54,0,36.6,0.178,45,0 89 | 2,100,68,25,71,38.5,0.324,26,0 90 | 15,136,70,32,110,37.1,0.153,43,1 91 | 1,107,68,19,0,26.5,0.165,24,0 92 | 1,80,55,0,0,19.1,0.258,21,0 93 | 4,123,80,15,176,32,0.443,34,0 94 | 7,81,78,40,48,46.7,0.261,42,0 95 | 4,134,72,0,0,23.8,0.277,60,1 96 | 2,142,82,18,64,24.7,0.761,21,0 97 | 6,144,72,27,228,33.9,0.255,40,0 98 | 2,92,62,28,0,31.6,0.13,24,0 99 | 1,71,48,18,76,20.4,0.323,22,0 100 | 6,93,50,30,64,28.7,0.356,23,0 101 | 1,122,90,51,220,49.7,0.325,31,1 102 | 1,163,72,0,0,39,1.222,33,1 103 | 1,151,60,0,0,26.1,0.179,22,0 104 | 0,125,96,0,0,22.5,0.262,21,0 105 | 1,81,72,18,40,26.6,0.283,24,0 106 | 2,85,65,0,0,39.6,0.93,27,0 107 | 1,126,56,29,152,28.7,0.801,21,0 108 | 1,96,122,0,0,22.4,0.207,27,0 109 | 4,144,58,28,140,29.5,0.287,37,0 110 | 3,83,58,31,18,34.3,0.336,25,0 111 | 0,95,85,25,36,37.4,0.247,24,1 112 | 3,171,72,33,135,33.3,0.199,24,1 113 | 8,155,62,26,495,34,0.543,46,1 114 | 1,89,76,34,37,31.2,0.192,23,0 115 | 4,76,62,0,0,34,0.391,25,0 116 | 7,160,54,32,175,30.5,0.588,39,1 117 | 4,146,92,0,0,31.2,0.539,61,1 118 | 5,124,74,0,0,34,0.22,38,1 119 | 5,78,48,0,0,33.7,0.654,25,0 120 | 4,97,60,23,0,28.2,0.443,22,0 121 | 4,99,76,15,51,23.2,0.223,21,0 122 | 0,162,76,56,100,53.2,0.759,25,1 123 | 6,111,64,39,0,34.2,0.26,24,0 124 | 2,107,74,30,100,33.6,0.404,23,0 125 | 5,132,80,0,0,26.8,0.186,69,0 126 | 0,113,76,0,0,33.3,0.278,23,1 127 | 1,88,30,42,99,55,0.496,26,1 128 | 3,120,70,30,135,42.9,0.452,30,0 129 | 1,118,58,36,94,33.3,0.261,23,0 130 | 1,117,88,24,145,34.5,0.403,40,1 131 | 0,105,84,0,0,27.9,0.741,62,1 132 | 4,173,70,14,168,29.7,0.361,33,1 133 | 9,122,56,0,0,33.3,1.114,33,1 134 | 3,170,64,37,225,34.5,0.356,30,1 135 | 8,84,74,31,0,38.3,0.457,39,0 136 | 2,96,68,13,49,21.1,0.647,26,0 137 | 2,125,60,20,140,33.8,0.088,31,0 138 | 0,100,70,26,50,30.8,0.597,21,0 139 | 0,93,60,25,92,28.7,0.532,22,0 140 | 0,129,80,0,0,31.2,0.703,29,0 141 | 5,105,72,29,325,36.9,0.159,28,0 142 | 3,128,78,0,0,21.1,0.268,55,0 143 | 5,106,82,30,0,39.5,0.286,38,0 144 | 2,108,52,26,63,32.5,0.318,22,0 145 | 10,108,66,0,0,32.4,0.272,42,1 146 | 4,154,62,31,284,32.8,0.237,23,0 147 | 0,102,75,23,0,0,0.572,21,0 148 | 9,57,80,37,0,32.8,0.096,41,0 149 | 2,106,64,35,119,30.5,1.4,34,0 150 | 5,147,78,0,0,33.7,0.218,65,0 151 | 2,90,70,17,0,27.3,0.085,22,0 152 | 1,136,74,50,204,37.4,0.399,24,0 153 | 4,114,65,0,0,21.9,0.432,37,0 154 | 9,156,86,28,155,34.3,1.189,42,1 155 | 1,153,82,42,485,40.6,0.687,23,0 156 | 8,188,78,0,0,47.9,0.137,43,1 157 | 7,152,88,44,0,50,0.337,36,1 158 | 2,99,52,15,94,24.6,0.637,21,0 159 | 1,109,56,21,135,25.2,0.833,23,0 160 | 2,88,74,19,53,29,0.229,22,0 161 | 17,163,72,41,114,40.9,0.817,47,1 162 | 4,151,90,38,0,29.7,0.294,36,0 163 | 7,102,74,40,105,37.2,0.204,45,0 164 | 0,114,80,34,285,44.2,0.167,27,0 165 | 2,100,64,23,0,29.7,0.368,21,0 166 | 0,131,88,0,0,31.6,0.743,32,1 167 | 6,104,74,18,156,29.9,0.722,41,1 168 | 3,148,66,25,0,32.5,0.256,22,0 169 | 4,120,68,0,0,29.6,0.709,34,0 170 | 4,110,66,0,0,31.9,0.471,29,0 171 | 3,111,90,12,78,28.4,0.495,29,0 172 | 6,102,82,0,0,30.8,0.18,36,1 173 | 6,134,70,23,130,35.4,0.542,29,1 174 | 2,87,0,23,0,28.9,0.773,25,0 175 | 1,79,60,42,48,43.5,0.678,23,0 176 | 2,75,64,24,55,29.7,0.37,33,0 177 | 8,179,72,42,130,32.7,0.719,36,1 178 | 6,85,78,0,0,31.2,0.382,42,0 179 | 0,129,110,46,130,67.1,0.319,26,1 180 | 5,143,78,0,0,45,0.19,47,0 181 | 5,130,82,0,0,39.1,0.956,37,1 182 | 6,87,80,0,0,23.2,0.084,32,0 183 | 0,119,64,18,92,34.9,0.725,23,0 184 | 1,0,74,20,23,27.7,0.299,21,0 185 | 5,73,60,0,0,26.8,0.268,27,0 186 | 4,141,74,0,0,27.6,0.244,40,0 187 | 7,194,68,28,0,35.9,0.745,41,1 188 | 8,181,68,36,495,30.1,0.615,60,1 189 | 1,128,98,41,58,32,1.321,33,1 190 | 8,109,76,39,114,27.9,0.64,31,1 191 | 5,139,80,35,160,31.6,0.361,25,1 192 | 3,111,62,0,0,22.6,0.142,21,0 193 | 9,123,70,44,94,33.1,0.374,40,0 194 | 7,159,66,0,0,30.4,0.383,36,1 195 | 11,135,0,0,0,52.3,0.578,40,1 196 | 8,85,55,20,0,24.4,0.136,42,0 197 | 5,158,84,41,210,39.4,0.395,29,1 198 | 1,105,58,0,0,24.3,0.187,21,0 199 | 3,107,62,13,48,22.9,0.678,23,1 200 | 4,109,64,44,99,34.8,0.905,26,1 201 | 4,148,60,27,318,30.9,0.15,29,1 202 | 0,113,80,16,0,31,0.874,21,0 203 | 1,138,82,0,0,40.1,0.236,28,0 204 | 0,108,68,20,0,27.3,0.787,32,0 205 | 2,99,70,16,44,20.4,0.235,27,0 206 | 6,103,72,32,190,37.7,0.324,55,0 207 | 5,111,72,28,0,23.9,0.407,27,0 208 | 8,196,76,29,280,37.5,0.605,57,1 209 | 5,162,104,0,0,37.7,0.151,52,1 210 | 1,96,64,27,87,33.2,0.289,21,0 211 | 7,184,84,33,0,35.5,0.355,41,1 212 | 2,81,60,22,0,27.7,0.29,25,0 213 | 0,147,85,54,0,42.8,0.375,24,0 214 | 7,179,95,31,0,34.2,0.164,60,0 215 | 0,140,65,26,130,42.6,0.431,24,1 216 | 9,112,82,32,175,34.2,0.26,36,1 217 | 12,151,70,40,271,41.8,0.742,38,1 218 | 5,109,62,41,129,35.8,0.514,25,1 219 | 6,125,68,30,120,30,0.464,32,0 220 | 5,85,74,22,0,29,1.224,32,1 221 | 5,112,66,0,0,37.8,0.261,41,1 222 | 0,177,60,29,478,34.6,1.072,21,1 223 | 2,158,90,0,0,31.6,0.805,66,1 224 | 7,119,0,0,0,25.2,0.209,37,0 225 | 7,142,60,33,190,28.8,0.687,61,0 226 | 1,100,66,15,56,23.6,0.666,26,0 227 | 1,87,78,27,32,34.6,0.101,22,0 228 | 0,101,76,0,0,35.7,0.198,26,0 229 | 3,162,52,38,0,37.2,0.652,24,1 230 | 4,197,70,39,744,36.7,2.329,31,0 231 | 0,117,80,31,53,45.2,0.089,24,0 232 | 4,142,86,0,0,44,0.645,22,1 233 | 6,134,80,37,370,46.2,0.238,46,1 234 | 1,79,80,25,37,25.4,0.583,22,0 235 | 4,122,68,0,0,35,0.394,29,0 236 | 3,74,68,28,45,29.7,0.293,23,0 237 | 4,171,72,0,0,43.6,0.479,26,1 238 | 7,181,84,21,192,35.9,0.586,51,1 239 | 0,179,90,27,0,44.1,0.686,23,1 240 | 9,164,84,21,0,30.8,0.831,32,1 241 | 0,104,76,0,0,18.4,0.582,27,0 242 | 1,91,64,24,0,29.2,0.192,21,0 243 | 4,91,70,32,88,33.1,0.446,22,0 244 | 3,139,54,0,0,25.6,0.402,22,1 245 | 6,119,50,22,176,27.1,1.318,33,1 246 | 2,146,76,35,194,38.2,0.329,29,0 247 | 9,184,85,15,0,30,1.213,49,1 248 | 10,122,68,0,0,31.2,0.258,41,0 249 | 0,165,90,33,680,52.3,0.427,23,0 250 | 9,124,70,33,402,35.4,0.282,34,0 251 | 1,111,86,19,0,30.1,0.143,23,0 252 | 9,106,52,0,0,31.2,0.38,42,0 253 | 2,129,84,0,0,28,0.284,27,0 254 | 2,90,80,14,55,24.4,0.249,24,0 255 | 0,86,68,32,0,35.8,0.238,25,0 256 | 12,92,62,7,258,27.6,0.926,44,1 257 | 1,113,64,35,0,33.6,0.543,21,1 258 | 3,111,56,39,0,30.1,0.557,30,0 259 | 2,114,68,22,0,28.7,0.092,25,0 260 | 1,193,50,16,375,25.9,0.655,24,0 261 | 11,155,76,28,150,33.3,1.353,51,1 262 | 3,191,68,15,130,30.9,0.299,34,0 263 | 3,141,0,0,0,30,0.761,27,1 264 | 4,95,70,32,0,32.1,0.612,24,0 265 | 3,142,80,15,0,32.4,0.2,63,0 266 | 4,123,62,0,0,32,0.226,35,1 267 | 5,96,74,18,67,33.6,0.997,43,0 268 | 0,138,0,0,0,36.3,0.933,25,1 269 | 2,128,64,42,0,40,1.101,24,0 270 | 0,102,52,0,0,25.1,0.078,21,0 271 | 2,146,0,0,0,27.5,0.24,28,1 272 | 10,101,86,37,0,45.6,1.136,38,1 273 | 2,108,62,32,56,25.2,0.128,21,0 274 | 3,122,78,0,0,23,0.254,40,0 275 | 1,71,78,50,45,33.2,0.422,21,0 276 | 13,106,70,0,0,34.2,0.251,52,0 277 | 2,100,70,52,57,40.5,0.677,25,0 278 | 7,106,60,24,0,26.5,0.296,29,1 279 | 0,104,64,23,116,27.8,0.454,23,0 280 | 5,114,74,0,0,24.9,0.744,57,0 281 | 2,108,62,10,278,25.3,0.881,22,0 282 | 0,146,70,0,0,37.9,0.334,28,1 283 | 10,129,76,28,122,35.9,0.28,39,0 284 | 7,133,88,15,155,32.4,0.262,37,0 285 | 7,161,86,0,0,30.4,0.165,47,1 286 | 2,108,80,0,0,27,0.259,52,1 287 | 7,136,74,26,135,26,0.647,51,0 288 | 5,155,84,44,545,38.7,0.619,34,0 289 | 1,119,86,39,220,45.6,0.808,29,1 290 | 4,96,56,17,49,20.8,0.34,26,0 291 | 5,108,72,43,75,36.1,0.263,33,0 292 | 0,78,88,29,40,36.9,0.434,21,0 293 | 0,107,62,30,74,36.6,0.757,25,1 294 | 2,128,78,37,182,43.3,1.224,31,1 295 | 1,128,48,45,194,40.5,0.613,24,1 296 | 0,161,50,0,0,21.9,0.254,65,0 297 | 6,151,62,31,120,35.5,0.692,28,0 298 | 2,146,70,38,360,28,0.337,29,1 299 | 0,126,84,29,215,30.7,0.52,24,0 300 | 14,100,78,25,184,36.6,0.412,46,1 301 | 8,112,72,0,0,23.6,0.84,58,0 302 | 0,167,0,0,0,32.3,0.839,30,1 303 | 2,144,58,33,135,31.6,0.422,25,1 304 | 5,77,82,41,42,35.8,0.156,35,0 305 | 5,115,98,0,0,52.9,0.209,28,1 306 | 3,150,76,0,0,21,0.207,37,0 307 | 2,120,76,37,105,39.7,0.215,29,0 308 | 10,161,68,23,132,25.5,0.326,47,1 309 | 0,137,68,14,148,24.8,0.143,21,0 310 | 0,128,68,19,180,30.5,1.391,25,1 311 | 2,124,68,28,205,32.9,0.875,30,1 312 | 6,80,66,30,0,26.2,0.313,41,0 313 | 0,106,70,37,148,39.4,0.605,22,0 314 | 2,155,74,17,96,26.6,0.433,27,1 315 | 3,113,50,10,85,29.5,0.626,25,0 316 | 7,109,80,31,0,35.9,1.127,43,1 317 | 2,112,68,22,94,34.1,0.315,26,0 318 | 3,99,80,11,64,19.3,0.284,30,0 319 | 3,182,74,0,0,30.5,0.345,29,1 320 | 3,115,66,39,140,38.1,0.15,28,0 321 | 6,194,78,0,0,23.5,0.129,59,1 322 | 4,129,60,12,231,27.5,0.527,31,0 323 | 3,112,74,30,0,31.6,0.197,25,1 324 | 0,124,70,20,0,27.4,0.254,36,1 325 | 13,152,90,33,29,26.8,0.731,43,1 326 | 2,112,75,32,0,35.7,0.148,21,0 327 | 1,157,72,21,168,25.6,0.123,24,0 328 | 1,122,64,32,156,35.1,0.692,30,1 329 | 10,179,70,0,0,35.1,0.2,37,0 330 | 2,102,86,36,120,45.5,0.127,23,1 331 | 6,105,70,32,68,30.8,0.122,37,0 332 | 8,118,72,19,0,23.1,1.476,46,0 333 | 2,87,58,16,52,32.7,0.166,25,0 334 | 1,180,0,0,0,43.3,0.282,41,1 335 | 12,106,80,0,0,23.6,0.137,44,0 336 | 1,95,60,18,58,23.9,0.26,22,0 337 | 0,165,76,43,255,47.9,0.259,26,0 338 | 0,117,0,0,0,33.8,0.932,44,0 339 | 5,115,76,0,0,31.2,0.343,44,1 340 | 9,152,78,34,171,34.2,0.893,33,1 341 | 7,178,84,0,0,39.9,0.331,41,1 342 | 1,130,70,13,105,25.9,0.472,22,0 343 | 1,95,74,21,73,25.9,0.673,36,0 344 | 1,0,68,35,0,32,0.389,22,0 345 | 5,122,86,0,0,34.7,0.29,33,0 346 | 8,95,72,0,0,36.8,0.485,57,0 347 | 8,126,88,36,108,38.5,0.349,49,0 348 | 1,139,46,19,83,28.7,0.654,22,0 349 | 3,116,0,0,0,23.5,0.187,23,0 350 | 3,99,62,19,74,21.8,0.279,26,0 351 | 5,0,80,32,0,41,0.346,37,1 352 | 4,92,80,0,0,42.2,0.237,29,0 353 | 4,137,84,0,0,31.2,0.252,30,0 354 | 3,61,82,28,0,34.4,0.243,46,0 355 | 1,90,62,12,43,27.2,0.58,24,0 356 | 3,90,78,0,0,42.7,0.559,21,0 357 | 9,165,88,0,0,30.4,0.302,49,1 358 | 1,125,50,40,167,33.3,0.962,28,1 359 | 13,129,0,30,0,39.9,0.569,44,1 360 | 12,88,74,40,54,35.3,0.378,48,0 361 | 1,196,76,36,249,36.5,0.875,29,1 362 | 5,189,64,33,325,31.2,0.583,29,1 363 | 5,158,70,0,0,29.8,0.207,63,0 364 | 5,103,108,37,0,39.2,0.305,65,0 365 | 4,146,78,0,0,38.5,0.52,67,1 366 | 4,147,74,25,293,34.9,0.385,30,0 367 | 5,99,54,28,83,34,0.499,30,0 368 | 6,124,72,0,0,27.6,0.368,29,1 369 | 0,101,64,17,0,21,0.252,21,0 370 | 3,81,86,16,66,27.5,0.306,22,0 371 | 1,133,102,28,140,32.8,0.234,45,1 372 | 3,173,82,48,465,38.4,2.137,25,1 373 | 0,118,64,23,89,0,1.731,21,0 374 | 0,84,64,22,66,35.8,0.545,21,0 375 | 2,105,58,40,94,34.9,0.225,25,0 376 | 2,122,52,43,158,36.2,0.816,28,0 377 | 12,140,82,43,325,39.2,0.528,58,1 378 | 0,98,82,15,84,25.2,0.299,22,0 379 | 1,87,60,37,75,37.2,0.509,22,0 380 | 4,156,75,0,0,48.3,0.238,32,1 381 | 0,93,100,39,72,43.4,1.021,35,0 382 | 1,107,72,30,82,30.8,0.821,24,0 383 | 0,105,68,22,0,20,0.236,22,0 384 | 1,109,60,8,182,25.4,0.947,21,0 385 | 1,90,62,18,59,25.1,1.268,25,0 386 | 1,125,70,24,110,24.3,0.221,25,0 387 | 1,119,54,13,50,22.3,0.205,24,0 388 | 5,116,74,29,0,32.3,0.66,35,1 389 | 8,105,100,36,0,43.3,0.239,45,1 390 | 5,144,82,26,285,32,0.452,58,1 391 | 3,100,68,23,81,31.6,0.949,28,0 392 | 1,100,66,29,196,32,0.444,42,0 393 | 5,166,76,0,0,45.7,0.34,27,1 394 | 1,131,64,14,415,23.7,0.389,21,0 395 | 4,116,72,12,87,22.1,0.463,37,0 396 | 4,158,78,0,0,32.9,0.803,31,1 397 | 2,127,58,24,275,27.7,1.6,25,0 398 | 3,96,56,34,115,24.7,0.944,39,0 399 | 0,131,66,40,0,34.3,0.196,22,1 400 | 3,82,70,0,0,21.1,0.389,25,0 401 | 3,193,70,31,0,34.9,0.241,25,1 402 | 4,95,64,0,0,32,0.161,31,1 403 | 6,137,61,0,0,24.2,0.151,55,0 404 | 5,136,84,41,88,35,0.286,35,1 405 | 9,72,78,25,0,31.6,0.28,38,0 406 | 5,168,64,0,0,32.9,0.135,41,1 407 | 2,123,48,32,165,42.1,0.52,26,0 408 | 4,115,72,0,0,28.9,0.376,46,1 409 | 0,101,62,0,0,21.9,0.336,25,0 410 | 8,197,74,0,0,25.9,1.191,39,1 411 | 1,172,68,49,579,42.4,0.702,28,1 412 | 6,102,90,39,0,35.7,0.674,28,0 413 | 1,112,72,30,176,34.4,0.528,25,0 414 | 1,143,84,23,310,42.4,1.076,22,0 415 | 1,143,74,22,61,26.2,0.256,21,0 416 | 0,138,60,35,167,34.6,0.534,21,1 417 | 3,173,84,33,474,35.7,0.258,22,1 418 | 1,97,68,21,0,27.2,1.095,22,0 419 | 4,144,82,32,0,38.5,0.554,37,1 420 | 1,83,68,0,0,18.2,0.624,27,0 421 | 3,129,64,29,115,26.4,0.219,28,1 422 | 1,119,88,41,170,45.3,0.507,26,0 423 | 2,94,68,18,76,26,0.561,21,0 424 | 0,102,64,46,78,40.6,0.496,21,0 425 | 2,115,64,22,0,30.8,0.421,21,0 426 | 8,151,78,32,210,42.9,0.516,36,1 427 | 4,184,78,39,277,37,0.264,31,1 428 | 0,94,0,0,0,0,0.256,25,0 429 | 1,181,64,30,180,34.1,0.328,38,1 430 | 0,135,94,46,145,40.6,0.284,26,0 431 | 1,95,82,25,180,35,0.233,43,1 432 | 2,99,0,0,0,22.2,0.108,23,0 433 | 3,89,74,16,85,30.4,0.551,38,0 434 | 1,80,74,11,60,30,0.527,22,0 435 | 2,139,75,0,0,25.6,0.167,29,0 436 | 1,90,68,8,0,24.5,1.138,36,0 437 | 0,141,0,0,0,42.4,0.205,29,1 438 | 12,140,85,33,0,37.4,0.244,41,0 439 | 5,147,75,0,0,29.9,0.434,28,0 440 | 1,97,70,15,0,18.2,0.147,21,0 441 | 6,107,88,0,0,36.8,0.727,31,0 442 | 0,189,104,25,0,34.3,0.435,41,1 443 | 2,83,66,23,50,32.2,0.497,22,0 444 | 4,117,64,27,120,33.2,0.23,24,0 445 | 8,108,70,0,0,30.5,0.955,33,1 446 | 4,117,62,12,0,29.7,0.38,30,1 447 | 0,180,78,63,14,59.4,2.42,25,1 448 | 1,100,72,12,70,25.3,0.658,28,0 449 | 0,95,80,45,92,36.5,0.33,26,0 450 | 0,104,64,37,64,33.6,0.51,22,1 451 | 0,120,74,18,63,30.5,0.285,26,0 452 | 1,82,64,13,95,21.2,0.415,23,0 453 | 2,134,70,0,0,28.9,0.542,23,1 454 | 0,91,68,32,210,39.9,0.381,25,0 455 | 2,119,0,0,0,19.6,0.832,72,0 456 | 2,100,54,28,105,37.8,0.498,24,0 457 | 14,175,62,30,0,33.6,0.212,38,1 458 | 1,135,54,0,0,26.7,0.687,62,0 459 | 5,86,68,28,71,30.2,0.364,24,0 460 | 10,148,84,48,237,37.6,1.001,51,1 461 | 9,134,74,33,60,25.9,0.46,81,0 462 | 9,120,72,22,56,20.8,0.733,48,0 463 | 1,71,62,0,0,21.8,0.416,26,0 464 | 8,74,70,40,49,35.3,0.705,39,0 465 | 5,88,78,30,0,27.6,0.258,37,0 466 | 10,115,98,0,0,24,1.022,34,0 467 | 0,124,56,13,105,21.8,0.452,21,0 468 | 0,74,52,10,36,27.8,0.269,22,0 469 | 0,97,64,36,100,36.8,0.6,25,0 470 | 8,120,0,0,0,30,0.183,38,1 471 | 6,154,78,41,140,46.1,0.571,27,0 472 | 1,144,82,40,0,41.3,0.607,28,0 473 | 0,137,70,38,0,33.2,0.17,22,0 474 | 0,119,66,27,0,38.8,0.259,22,0 475 | 7,136,90,0,0,29.9,0.21,50,0 476 | 4,114,64,0,0,28.9,0.126,24,0 477 | 0,137,84,27,0,27.3,0.231,59,0 478 | 2,105,80,45,191,33.7,0.711,29,1 479 | 7,114,76,17,110,23.8,0.466,31,0 480 | 8,126,74,38,75,25.9,0.162,39,0 481 | 4,132,86,31,0,28,0.419,63,0 482 | 3,158,70,30,328,35.5,0.344,35,1 483 | 0,123,88,37,0,35.2,0.197,29,0 484 | 4,85,58,22,49,27.8,0.306,28,0 485 | 0,84,82,31,125,38.2,0.233,23,0 486 | 0,145,0,0,0,44.2,0.63,31,1 487 | 0,135,68,42,250,42.3,0.365,24,1 488 | 1,139,62,41,480,40.7,0.536,21,0 489 | 0,173,78,32,265,46.5,1.159,58,0 490 | 4,99,72,17,0,25.6,0.294,28,0 491 | 8,194,80,0,0,26.1,0.551,67,0 492 | 2,83,65,28,66,36.8,0.629,24,0 493 | 2,89,90,30,0,33.5,0.292,42,0 494 | 4,99,68,38,0,32.8,0.145,33,0 495 | 4,125,70,18,122,28.9,1.144,45,1 496 | 3,80,0,0,0,0,0.174,22,0 497 | 6,166,74,0,0,26.6,0.304,66,0 498 | 5,110,68,0,0,26,0.292,30,0 499 | 2,81,72,15,76,30.1,0.547,25,0 500 | 7,195,70,33,145,25.1,0.163,55,1 501 | 6,154,74,32,193,29.3,0.839,39,0 502 | 2,117,90,19,71,25.2,0.313,21,0 503 | 3,84,72,32,0,37.2,0.267,28,0 504 | 6,0,68,41,0,39,0.727,41,1 505 | 7,94,64,25,79,33.3,0.738,41,0 506 | 3,96,78,39,0,37.3,0.238,40,0 507 | 10,75,82,0,0,33.3,0.263,38,0 508 | 0,180,90,26,90,36.5,0.314,35,1 509 | 1,130,60,23,170,28.6,0.692,21,0 510 | 2,84,50,23,76,30.4,0.968,21,0 511 | 8,120,78,0,0,25,0.409,64,0 512 | 12,84,72,31,0,29.7,0.297,46,1 513 | 0,139,62,17,210,22.1,0.207,21,0 514 | 9,91,68,0,0,24.2,0.2,58,0 515 | 2,91,62,0,0,27.3,0.525,22,0 516 | 3,99,54,19,86,25.6,0.154,24,0 517 | 3,163,70,18,105,31.6,0.268,28,1 518 | 9,145,88,34,165,30.3,0.771,53,1 519 | 7,125,86,0,0,37.6,0.304,51,0 520 | 13,76,60,0,0,32.8,0.18,41,0 521 | 6,129,90,7,326,19.6,0.582,60,0 522 | 2,68,70,32,66,25,0.187,25,0 523 | 3,124,80,33,130,33.2,0.305,26,0 524 | 6,114,0,0,0,0,0.189,26,0 525 | 9,130,70,0,0,34.2,0.652,45,1 526 | 3,125,58,0,0,31.6,0.151,24,0 527 | 3,87,60,18,0,21.8,0.444,21,0 528 | 1,97,64,19,82,18.2,0.299,21,0 529 | 3,116,74,15,105,26.3,0.107,24,0 530 | 0,117,66,31,188,30.8,0.493,22,0 531 | 0,111,65,0,0,24.6,0.66,31,0 532 | 2,122,60,18,106,29.8,0.717,22,0 533 | 0,107,76,0,0,45.3,0.686,24,0 534 | 1,86,66,52,65,41.3,0.917,29,0 535 | 6,91,0,0,0,29.8,0.501,31,0 536 | 1,77,56,30,56,33.3,1.251,24,0 537 | 4,132,0,0,0,32.9,0.302,23,1 538 | 0,105,90,0,0,29.6,0.197,46,0 539 | 0,57,60,0,0,21.7,0.735,67,0 540 | 0,127,80,37,210,36.3,0.804,23,0 541 | 3,129,92,49,155,36.4,0.968,32,1 542 | 8,100,74,40,215,39.4,0.661,43,1 543 | 3,128,72,25,190,32.4,0.549,27,1 544 | 10,90,85,32,0,34.9,0.825,56,1 545 | 4,84,90,23,56,39.5,0.159,25,0 546 | 1,88,78,29,76,32,0.365,29,0 547 | 8,186,90,35,225,34.5,0.423,37,1 548 | 5,187,76,27,207,43.6,1.034,53,1 549 | 4,131,68,21,166,33.1,0.16,28,0 550 | 1,164,82,43,67,32.8,0.341,50,0 551 | 4,189,110,31,0,28.5,0.68,37,0 552 | 1,116,70,28,0,27.4,0.204,21,0 553 | 3,84,68,30,106,31.9,0.591,25,0 554 | 6,114,88,0,0,27.8,0.247,66,0 555 | 1,88,62,24,44,29.9,0.422,23,0 556 | 1,84,64,23,115,36.9,0.471,28,0 557 | 7,124,70,33,215,25.5,0.161,37,0 558 | 1,97,70,40,0,38.1,0.218,30,0 559 | 8,110,76,0,0,27.8,0.237,58,0 560 | 11,103,68,40,0,46.2,0.126,42,0 561 | 11,85,74,0,0,30.1,0.3,35,0 562 | 6,125,76,0,0,33.8,0.121,54,1 563 | 0,198,66,32,274,41.3,0.502,28,1 564 | 1,87,68,34,77,37.6,0.401,24,0 565 | 6,99,60,19,54,26.9,0.497,32,0 566 | 0,91,80,0,0,32.4,0.601,27,0 567 | 2,95,54,14,88,26.1,0.748,22,0 568 | 1,99,72,30,18,38.6,0.412,21,0 569 | 6,92,62,32,126,32,0.085,46,0 570 | 4,154,72,29,126,31.3,0.338,37,0 571 | 0,121,66,30,165,34.3,0.203,33,1 572 | 3,78,70,0,0,32.5,0.27,39,0 573 | 2,130,96,0,0,22.6,0.268,21,0 574 | 3,111,58,31,44,29.5,0.43,22,0 575 | 2,98,60,17,120,34.7,0.198,22,0 576 | 1,143,86,30,330,30.1,0.892,23,0 577 | 1,119,44,47,63,35.5,0.28,25,0 578 | 6,108,44,20,130,24,0.813,35,0 579 | 2,118,80,0,0,42.9,0.693,21,1 580 | 10,133,68,0,0,27,0.245,36,0 581 | 2,197,70,99,0,34.7,0.575,62,1 582 | 0,151,90,46,0,42.1,0.371,21,1 583 | 6,109,60,27,0,25,0.206,27,0 584 | 12,121,78,17,0,26.5,0.259,62,0 585 | 8,100,76,0,0,38.7,0.19,42,0 586 | 8,124,76,24,600,28.7,0.687,52,1 587 | 1,93,56,11,0,22.5,0.417,22,0 588 | 8,143,66,0,0,34.9,0.129,41,1 589 | 6,103,66,0,0,24.3,0.249,29,0 590 | 3,176,86,27,156,33.3,1.154,52,1 591 | 0,73,0,0,0,21.1,0.342,25,0 592 | 11,111,84,40,0,46.8,0.925,45,1 593 | 2,112,78,50,140,39.4,0.175,24,0 594 | 3,132,80,0,0,34.4,0.402,44,1 595 | 2,82,52,22,115,28.5,1.699,25,0 596 | 6,123,72,45,230,33.6,0.733,34,0 597 | 0,188,82,14,185,32,0.682,22,1 598 | 0,67,76,0,0,45.3,0.194,46,0 599 | 1,89,24,19,25,27.8,0.559,21,0 600 | 1,173,74,0,0,36.8,0.088,38,1 601 | 1,109,38,18,120,23.1,0.407,26,0 602 | 1,108,88,19,0,27.1,0.4,24,0 603 | 6,96,0,0,0,23.7,0.19,28,0 604 | 1,124,74,36,0,27.8,0.1,30,0 605 | 7,150,78,29,126,35.2,0.692,54,1 606 | 4,183,0,0,0,28.4,0.212,36,1 607 | 1,124,60,32,0,35.8,0.514,21,0 608 | 1,181,78,42,293,40,1.258,22,1 609 | 1,92,62,25,41,19.5,0.482,25,0 610 | 0,152,82,39,272,41.5,0.27,27,0 611 | 1,111,62,13,182,24,0.138,23,0 612 | 3,106,54,21,158,30.9,0.292,24,0 613 | 3,174,58,22,194,32.9,0.593,36,1 614 | 7,168,88,42,321,38.2,0.787,40,1 615 | 6,105,80,28,0,32.5,0.878,26,0 616 | 11,138,74,26,144,36.1,0.557,50,1 617 | 3,106,72,0,0,25.8,0.207,27,0 618 | 6,117,96,0,0,28.7,0.157,30,0 619 | 2,68,62,13,15,20.1,0.257,23,0 620 | 9,112,82,24,0,28.2,1.282,50,1 621 | 0,119,0,0,0,32.4,0.141,24,1 622 | 2,112,86,42,160,38.4,0.246,28,0 623 | 2,92,76,20,0,24.2,1.698,28,0 624 | 6,183,94,0,0,40.8,1.461,45,0 625 | 0,94,70,27,115,43.5,0.347,21,0 626 | 2,108,64,0,0,30.8,0.158,21,0 627 | 4,90,88,47,54,37.7,0.362,29,0 628 | 0,125,68,0,0,24.7,0.206,21,0 629 | 0,132,78,0,0,32.4,0.393,21,0 630 | 5,128,80,0,0,34.6,0.144,45,0 631 | 4,94,65,22,0,24.7,0.148,21,0 632 | 7,114,64,0,0,27.4,0.732,34,1 633 | 0,102,78,40,90,34.5,0.238,24,0 634 | 2,111,60,0,0,26.2,0.343,23,0 635 | 1,128,82,17,183,27.5,0.115,22,0 636 | 10,92,62,0,0,25.9,0.167,31,0 637 | 13,104,72,0,0,31.2,0.465,38,1 638 | 5,104,74,0,0,28.8,0.153,48,0 639 | 2,94,76,18,66,31.6,0.649,23,0 640 | 7,97,76,32,91,40.9,0.871,32,1 641 | 1,100,74,12,46,19.5,0.149,28,0 642 | 0,102,86,17,105,29.3,0.695,27,0 643 | 4,128,70,0,0,34.3,0.303,24,0 644 | 6,147,80,0,0,29.5,0.178,50,1 645 | 4,90,0,0,0,28,0.61,31,0 646 | 3,103,72,30,152,27.6,0.73,27,0 647 | 2,157,74,35,440,39.4,0.134,30,0 648 | 1,167,74,17,144,23.4,0.447,33,1 649 | 0,179,50,36,159,37.8,0.455,22,1 650 | 11,136,84,35,130,28.3,0.26,42,1 651 | 0,107,60,25,0,26.4,0.133,23,0 652 | 1,91,54,25,100,25.2,0.234,23,0 653 | 1,117,60,23,106,33.8,0.466,27,0 654 | 5,123,74,40,77,34.1,0.269,28,0 655 | 2,120,54,0,0,26.8,0.455,27,0 656 | 1,106,70,28,135,34.2,0.142,22,0 657 | 2,155,52,27,540,38.7,0.24,25,1 658 | 2,101,58,35,90,21.8,0.155,22,0 659 | 1,120,80,48,200,38.9,1.162,41,0 660 | 11,127,106,0,0,39,0.19,51,0 661 | 3,80,82,31,70,34.2,1.292,27,1 662 | 10,162,84,0,0,27.7,0.182,54,0 663 | 1,199,76,43,0,42.9,1.394,22,1 664 | 8,167,106,46,231,37.6,0.165,43,1 665 | 9,145,80,46,130,37.9,0.637,40,1 666 | 6,115,60,39,0,33.7,0.245,40,1 667 | 1,112,80,45,132,34.8,0.217,24,0 668 | 4,145,82,18,0,32.5,0.235,70,1 669 | 10,111,70,27,0,27.5,0.141,40,1 670 | 6,98,58,33,190,34,0.43,43,0 671 | 9,154,78,30,100,30.9,0.164,45,0 672 | 6,165,68,26,168,33.6,0.631,49,0 673 | 1,99,58,10,0,25.4,0.551,21,0 674 | 10,68,106,23,49,35.5,0.285,47,0 675 | 3,123,100,35,240,57.3,0.88,22,0 676 | 8,91,82,0,0,35.6,0.587,68,0 677 | 6,195,70,0,0,30.9,0.328,31,1 678 | 9,156,86,0,0,24.8,0.23,53,1 679 | 0,93,60,0,0,35.3,0.263,25,0 680 | 3,121,52,0,0,36,0.127,25,1 681 | 2,101,58,17,265,24.2,0.614,23,0 682 | 2,56,56,28,45,24.2,0.332,22,0 683 | 0,162,76,36,0,49.6,0.364,26,1 684 | 0,95,64,39,105,44.6,0.366,22,0 685 | 4,125,80,0,0,32.3,0.536,27,1 686 | 5,136,82,0,0,0,0.64,69,0 687 | 2,129,74,26,205,33.2,0.591,25,0 688 | 3,130,64,0,0,23.1,0.314,22,0 689 | 1,107,50,19,0,28.3,0.181,29,0 690 | 1,140,74,26,180,24.1,0.828,23,0 691 | 1,144,82,46,180,46.1,0.335,46,1 692 | 8,107,80,0,0,24.6,0.856,34,0 693 | 13,158,114,0,0,42.3,0.257,44,1 694 | 2,121,70,32,95,39.1,0.886,23,0 695 | 7,129,68,49,125,38.5,0.439,43,1 696 | 2,90,60,0,0,23.5,0.191,25,0 697 | 7,142,90,24,480,30.4,0.128,43,1 698 | 3,169,74,19,125,29.9,0.268,31,1 699 | 0,99,0,0,0,25,0.253,22,0 700 | 4,127,88,11,155,34.5,0.598,28,0 701 | 4,118,70,0,0,44.5,0.904,26,0 702 | 2,122,76,27,200,35.9,0.483,26,0 703 | 6,125,78,31,0,27.6,0.565,49,1 704 | 1,168,88,29,0,35,0.905,52,1 705 | 2,129,0,0,0,38.5,0.304,41,0 706 | 4,110,76,20,100,28.4,0.118,27,0 707 | 6,80,80,36,0,39.8,0.177,28,0 708 | 10,115,0,0,0,0,0.261,30,1 709 | 2,127,46,21,335,34.4,0.176,22,0 710 | 9,164,78,0,0,32.8,0.148,45,1 711 | 2,93,64,32,160,38,0.674,23,1 712 | 3,158,64,13,387,31.2,0.295,24,0 713 | 5,126,78,27,22,29.6,0.439,40,0 714 | 10,129,62,36,0,41.2,0.441,38,1 715 | 0,134,58,20,291,26.4,0.352,21,0 716 | 3,102,74,0,0,29.5,0.121,32,0 717 | 7,187,50,33,392,33.9,0.826,34,1 718 | 3,173,78,39,185,33.8,0.97,31,1 719 | 10,94,72,18,0,23.1,0.595,56,0 720 | 1,108,60,46,178,35.5,0.415,24,0 721 | 5,97,76,27,0,35.6,0.378,52,1 722 | 4,83,86,19,0,29.3,0.317,34,0 723 | 1,114,66,36,200,38.1,0.289,21,0 724 | 1,149,68,29,127,29.3,0.349,42,1 725 | 5,117,86,30,105,39.1,0.251,42,0 726 | 1,111,94,0,0,32.8,0.265,45,0 727 | 4,112,78,40,0,39.4,0.236,38,0 728 | 1,116,78,29,180,36.1,0.496,25,0 729 | 0,141,84,26,0,32.4,0.433,22,0 730 | 2,175,88,0,0,22.9,0.326,22,0 731 | 2,92,52,0,0,30.1,0.141,22,0 732 | 3,130,78,23,79,28.4,0.323,34,1 733 | 8,120,86,0,0,28.4,0.259,22,1 734 | 2,174,88,37,120,44.5,0.646,24,1 735 | 2,106,56,27,165,29,0.426,22,0 736 | 2,105,75,0,0,23.3,0.56,53,0 737 | 4,95,60,32,0,35.4,0.284,28,0 738 | 0,126,86,27,120,27.4,0.515,21,0 739 | 8,65,72,23,0,32,0.6,42,0 740 | 2,99,60,17,160,36.6,0.453,21,0 741 | 1,102,74,0,0,39.5,0.293,42,1 742 | 11,120,80,37,150,42.3,0.785,48,1 743 | 3,102,44,20,94,30.8,0.4,26,0 744 | 1,109,58,18,116,28.5,0.219,22,0 745 | 9,140,94,0,0,32.7,0.734,45,1 746 | 13,153,88,37,140,40.6,1.174,39,0 747 | 12,100,84,33,105,30,0.488,46,0 748 | 1,147,94,41,0,49.3,0.358,27,1 749 | 1,81,74,41,57,46.3,1.096,32,0 750 | 3,187,70,22,200,36.4,0.408,36,1 751 | 6,162,62,0,0,24.3,0.178,50,1 752 | 4,136,70,0,0,31.2,1.182,22,1 753 | 1,121,78,39,74,39,0.261,28,0 754 | 3,108,62,24,0,26,0.223,25,0 755 | 0,181,88,44,510,43.3,0.222,26,1 756 | 8,154,78,32,0,32.4,0.443,45,1 757 | 1,128,88,39,110,36.5,1.057,37,1 758 | 7,137,90,41,0,32,0.391,39,0 759 | 0,123,72,0,0,36.3,0.258,52,1 760 | 1,106,76,0,0,37.5,0.197,26,0 761 | 6,190,92,0,0,35.5,0.278,66,1 762 | 2,88,58,26,16,28.4,0.766,22,0 763 | 9,170,74,31,0,44,0.403,43,1 764 | 9,89,62,0,0,22.5,0.142,33,0 765 | 10,101,76,48,180,32.9,0.171,63,0 766 | 2,122,70,27,0,36.8,0.34,27,0 767 | 5,121,72,23,112,26.2,0.245,30,0 768 | 1,126,60,0,0,30.1,0.349,47,1 769 | 1,93,70,31,0,30.4,0.315,23,0 770 | -------------------------------------------------------------------------------- /Assignments/Assignment 1/iAAA - Assignment 1 - Eng.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iAAA-event/Machine-learning/6a6525852ad04ed6e467763e5b8d9acc4fecf755/Assignments/Assignment 1/iAAA - Assignment 1 - Eng.pdf -------------------------------------------------------------------------------- /Assignments/Assignment 1/iAAA - Assignment 1 - Per.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iAAA-event/Machine-learning/6a6525852ad04ed6e467763e5b8d9acc4fecf755/Assignments/Assignment 1/iAAA - Assignment 1 - Per.pdf -------------------------------------------------------------------------------- /Assignments/Assignment 2/iAAA - Assignment 2 - En.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iAAA-event/Machine-learning/6a6525852ad04ed6e467763e5b8d9acc4fecf755/Assignments/Assignment 2/iAAA - Assignment 2 - En.pdf -------------------------------------------------------------------------------- /Assignments/Assignment 2/iAAA - Assignment 2 - Per.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iAAA-event/Machine-learning/6a6525852ad04ed6e467763e5b8d9acc4fecf755/Assignments/Assignment 2/iAAA - Assignment 2 - Per.pdf -------------------------------------------------------------------------------- /Assignments/Assignment 3 Solution/Mall_Customers.csv: -------------------------------------------------------------------------------- 1 | CustomerID,Gender,Age,Annual Income (k$),Spending Score (1-100) 2 | 1,Male,19,15,39 3 | 2,Male,21,15,81 4 | 3,Female,20,16,6 5 | 4,Female,23,16,77 6 | 5,Female,31,17,40 7 | 6,Female,22,17,76 8 | 7,Female,35,18,6 9 | 8,Female,23,18,94 10 | 9,Male,64,19,3 11 | 10,Female,30,19,72 12 | 11,Male,67,19,14 13 | 12,Female,35,19,99 14 | 13,Female,58,20,15 15 | 14,Female,24,20,77 16 | 15,Male,37,20,13 17 | 16,Male,22,20,79 18 | 17,Female,35,21,35 19 | 18,Male,20,21,66 20 | 19,Male,52,23,29 21 | 20,Female,35,23,98 22 | 21,Male,35,24,35 23 | 22,Male,25,24,73 24 | 23,Female,46,25,5 25 | 24,Male,31,25,73 26 | 25,Female,54,28,14 27 | 26,Male,29,28,82 28 | 27,Female,45,28,32 29 | 28,Male,35,28,61 30 | 29,Female,40,29,31 31 | 30,Female,23,29,87 32 | 31,Male,60,30,4 33 | 32,Female,21,30,73 34 | 33,Male,53,33,4 35 | 34,Male,18,33,92 36 | 35,Female,49,33,14 37 | 36,Female,21,33,81 38 | 37,Female,42,34,17 39 | 38,Female,30,34,73 40 | 39,Female,36,37,26 41 | 40,Female,20,37,75 42 | 41,Female,65,38,35 43 | 42,Male,24,38,92 44 | 43,Male,48,39,36 45 | 44,Female,31,39,61 46 | 45,Female,49,39,28 47 | 46,Female,24,39,65 48 | 47,Female,50,40,55 49 | 48,Female,27,40,47 50 | 49,Female,29,40,42 51 | 50,Female,31,40,42 52 | 51,Female,49,42,52 53 | 52,Male,33,42,60 54 | 53,Female,31,43,54 55 | 54,Male,59,43,60 56 | 55,Female,50,43,45 57 | 56,Male,47,43,41 58 | 57,Female,51,44,50 59 | 58,Male,69,44,46 60 | 59,Female,27,46,51 61 | 60,Male,53,46,46 62 | 61,Male,70,46,56 63 | 62,Male,19,46,55 64 | 63,Female,67,47,52 65 | 64,Female,54,47,59 66 | 65,Male,63,48,51 67 | 66,Male,18,48,59 68 | 67,Female,43,48,50 69 | 68,Female,68,48,48 70 | 69,Male,19,48,59 71 | 70,Female,32,48,47 72 | 71,Male,70,49,55 73 | 72,Female,47,49,42 74 | 73,Female,60,50,49 75 | 74,Female,60,50,56 76 | 75,Male,59,54,47 77 | 76,Male,26,54,54 78 | 77,Female,45,54,53 79 | 78,Male,40,54,48 80 | 79,Female,23,54,52 81 | 80,Female,49,54,42 82 | 81,Male,57,54,51 83 | 82,Male,38,54,55 84 | 83,Male,67,54,41 85 | 84,Female,46,54,44 86 | 85,Female,21,54,57 87 | 86,Male,48,54,46 88 | 87,Female,55,57,58 89 | 88,Female,22,57,55 90 | 89,Female,34,58,60 91 | 90,Female,50,58,46 92 | 91,Female,68,59,55 93 | 92,Male,18,59,41 94 | 93,Male,48,60,49 95 | 94,Female,40,60,40 96 | 95,Female,32,60,42 97 | 96,Male,24,60,52 98 | 97,Female,47,60,47 99 | 98,Female,27,60,50 100 | 99,Male,48,61,42 101 | 100,Male,20,61,49 102 | 101,Female,23,62,41 103 | 102,Female,49,62,48 104 | 103,Male,67,62,59 105 | 104,Male,26,62,55 106 | 105,Male,49,62,56 107 | 106,Female,21,62,42 108 | 107,Female,66,63,50 109 | 108,Male,54,63,46 110 | 109,Male,68,63,43 111 | 110,Male,66,63,48 112 | 111,Male,65,63,52 113 | 112,Female,19,63,54 114 | 113,Female,38,64,42 115 | 114,Male,19,64,46 116 | 115,Female,18,65,48 117 | 116,Female,19,65,50 118 | 117,Female,63,65,43 119 | 118,Female,49,65,59 120 | 119,Female,51,67,43 121 | 120,Female,50,67,57 122 | 121,Male,27,67,56 123 | 122,Female,38,67,40 124 | 123,Female,40,69,58 125 | 124,Male,39,69,91 126 | 125,Female,23,70,29 127 | 126,Female,31,70,77 128 | 127,Male,43,71,35 129 | 128,Male,40,71,95 130 | 129,Male,59,71,11 131 | 130,Male,38,71,75 132 | 131,Male,47,71,9 133 | 132,Male,39,71,75 134 | 133,Female,25,72,34 135 | 134,Female,31,72,71 136 | 135,Male,20,73,5 137 | 136,Female,29,73,88 138 | 137,Female,44,73,7 139 | 138,Male,32,73,73 140 | 139,Male,19,74,10 141 | 140,Female,35,74,72 142 | 141,Female,57,75,5 143 | 142,Male,32,75,93 144 | 143,Female,28,76,40 145 | 144,Female,32,76,87 146 | 145,Male,25,77,12 147 | 146,Male,28,77,97 148 | 147,Male,48,77,36 149 | 148,Female,32,77,74 150 | 149,Female,34,78,22 151 | 150,Male,34,78,90 152 | 151,Male,43,78,17 153 | 152,Male,39,78,88 154 | 153,Female,44,78,20 155 | 154,Female,38,78,76 156 | 155,Female,47,78,16 157 | 156,Female,27,78,89 158 | 157,Male,37,78,1 159 | 158,Female,30,78,78 160 | 159,Male,34,78,1 161 | 160,Female,30,78,73 162 | 161,Female,56,79,35 163 | 162,Female,29,79,83 164 | 163,Male,19,81,5 165 | 164,Female,31,81,93 166 | 165,Male,50,85,26 167 | 166,Female,36,85,75 168 | 167,Male,42,86,20 169 | 168,Female,33,86,95 170 | 169,Female,36,87,27 171 | 170,Male,32,87,63 172 | 171,Male,40,87,13 173 | 172,Male,28,87,75 174 | 173,Male,36,87,10 175 | 174,Male,36,87,92 176 | 175,Female,52,88,13 177 | 176,Female,30,88,86 178 | 177,Male,58,88,15 179 | 178,Male,27,88,69 180 | 179,Male,59,93,14 181 | 180,Male,35,93,90 182 | 181,Female,37,97,32 183 | 182,Female,32,97,86 184 | 183,Male,46,98,15 185 | 184,Female,29,98,88 186 | 185,Female,41,99,39 187 | 186,Male,30,99,97 188 | 187,Female,54,101,24 189 | 188,Male,28,101,68 190 | 189,Female,41,103,17 191 | 190,Female,36,103,85 192 | 191,Female,34,103,23 193 | 192,Female,32,103,69 194 | 193,Male,33,113,8 195 | 194,Female,38,113,91 196 | 195,Female,47,120,16 197 | 196,Female,35,120,79 198 | 197,Female,45,126,28 199 | 198,Male,32,126,74 200 | 199,Male,32,137,18 201 | 200,Male,30,137,83 202 | -------------------------------------------------------------------------------- /Assignments/Assignment 3/Mall_Customers.csv: -------------------------------------------------------------------------------- 1 | CustomerID,Gender,Age,Annual Income (k$),Spending Score (1-100) 2 | 1,Male,19,15,39 3 | 2,Male,21,15,81 4 | 3,Female,20,16,6 5 | 4,Female,23,16,77 6 | 5,Female,31,17,40 7 | 6,Female,22,17,76 8 | 7,Female,35,18,6 9 | 8,Female,23,18,94 10 | 9,Male,64,19,3 11 | 10,Female,30,19,72 12 | 11,Male,67,19,14 13 | 12,Female,35,19,99 14 | 13,Female,58,20,15 15 | 14,Female,24,20,77 16 | 15,Male,37,20,13 17 | 16,Male,22,20,79 18 | 17,Female,35,21,35 19 | 18,Male,20,21,66 20 | 19,Male,52,23,29 21 | 20,Female,35,23,98 22 | 21,Male,35,24,35 23 | 22,Male,25,24,73 24 | 23,Female,46,25,5 25 | 24,Male,31,25,73 26 | 25,Female,54,28,14 27 | 26,Male,29,28,82 28 | 27,Female,45,28,32 29 | 28,Male,35,28,61 30 | 29,Female,40,29,31 31 | 30,Female,23,29,87 32 | 31,Male,60,30,4 33 | 32,Female,21,30,73 34 | 33,Male,53,33,4 35 | 34,Male,18,33,92 36 | 35,Female,49,33,14 37 | 36,Female,21,33,81 38 | 37,Female,42,34,17 39 | 38,Female,30,34,73 40 | 39,Female,36,37,26 41 | 40,Female,20,37,75 42 | 41,Female,65,38,35 43 | 42,Male,24,38,92 44 | 43,Male,48,39,36 45 | 44,Female,31,39,61 46 | 45,Female,49,39,28 47 | 46,Female,24,39,65 48 | 47,Female,50,40,55 49 | 48,Female,27,40,47 50 | 49,Female,29,40,42 51 | 50,Female,31,40,42 52 | 51,Female,49,42,52 53 | 52,Male,33,42,60 54 | 53,Female,31,43,54 55 | 54,Male,59,43,60 56 | 55,Female,50,43,45 57 | 56,Male,47,43,41 58 | 57,Female,51,44,50 59 | 58,Male,69,44,46 60 | 59,Female,27,46,51 61 | 60,Male,53,46,46 62 | 61,Male,70,46,56 63 | 62,Male,19,46,55 64 | 63,Female,67,47,52 65 | 64,Female,54,47,59 66 | 65,Male,63,48,51 67 | 66,Male,18,48,59 68 | 67,Female,43,48,50 69 | 68,Female,68,48,48 70 | 69,Male,19,48,59 71 | 70,Female,32,48,47 72 | 71,Male,70,49,55 73 | 72,Female,47,49,42 74 | 73,Female,60,50,49 75 | 74,Female,60,50,56 76 | 75,Male,59,54,47 77 | 76,Male,26,54,54 78 | 77,Female,45,54,53 79 | 78,Male,40,54,48 80 | 79,Female,23,54,52 81 | 80,Female,49,54,42 82 | 81,Male,57,54,51 83 | 82,Male,38,54,55 84 | 83,Male,67,54,41 85 | 84,Female,46,54,44 86 | 85,Female,21,54,57 87 | 86,Male,48,54,46 88 | 87,Female,55,57,58 89 | 88,Female,22,57,55 90 | 89,Female,34,58,60 91 | 90,Female,50,58,46 92 | 91,Female,68,59,55 93 | 92,Male,18,59,41 94 | 93,Male,48,60,49 95 | 94,Female,40,60,40 96 | 95,Female,32,60,42 97 | 96,Male,24,60,52 98 | 97,Female,47,60,47 99 | 98,Female,27,60,50 100 | 99,Male,48,61,42 101 | 100,Male,20,61,49 102 | 101,Female,23,62,41 103 | 102,Female,49,62,48 104 | 103,Male,67,62,59 105 | 104,Male,26,62,55 106 | 105,Male,49,62,56 107 | 106,Female,21,62,42 108 | 107,Female,66,63,50 109 | 108,Male,54,63,46 110 | 109,Male,68,63,43 111 | 110,Male,66,63,48 112 | 111,Male,65,63,52 113 | 112,Female,19,63,54 114 | 113,Female,38,64,42 115 | 114,Male,19,64,46 116 | 115,Female,18,65,48 117 | 116,Female,19,65,50 118 | 117,Female,63,65,43 119 | 118,Female,49,65,59 120 | 119,Female,51,67,43 121 | 120,Female,50,67,57 122 | 121,Male,27,67,56 123 | 122,Female,38,67,40 124 | 123,Female,40,69,58 125 | 124,Male,39,69,91 126 | 125,Female,23,70,29 127 | 126,Female,31,70,77 128 | 127,Male,43,71,35 129 | 128,Male,40,71,95 130 | 129,Male,59,71,11 131 | 130,Male,38,71,75 132 | 131,Male,47,71,9 133 | 132,Male,39,71,75 134 | 133,Female,25,72,34 135 | 134,Female,31,72,71 136 | 135,Male,20,73,5 137 | 136,Female,29,73,88 138 | 137,Female,44,73,7 139 | 138,Male,32,73,73 140 | 139,Male,19,74,10 141 | 140,Female,35,74,72 142 | 141,Female,57,75,5 143 | 142,Male,32,75,93 144 | 143,Female,28,76,40 145 | 144,Female,32,76,87 146 | 145,Male,25,77,12 147 | 146,Male,28,77,97 148 | 147,Male,48,77,36 149 | 148,Female,32,77,74 150 | 149,Female,34,78,22 151 | 150,Male,34,78,90 152 | 151,Male,43,78,17 153 | 152,Male,39,78,88 154 | 153,Female,44,78,20 155 | 154,Female,38,78,76 156 | 155,Female,47,78,16 157 | 156,Female,27,78,89 158 | 157,Male,37,78,1 159 | 158,Female,30,78,78 160 | 159,Male,34,78,1 161 | 160,Female,30,78,73 162 | 161,Female,56,79,35 163 | 162,Female,29,79,83 164 | 163,Male,19,81,5 165 | 164,Female,31,81,93 166 | 165,Male,50,85,26 167 | 166,Female,36,85,75 168 | 167,Male,42,86,20 169 | 168,Female,33,86,95 170 | 169,Female,36,87,27 171 | 170,Male,32,87,63 172 | 171,Male,40,87,13 173 | 172,Male,28,87,75 174 | 173,Male,36,87,10 175 | 174,Male,36,87,92 176 | 175,Female,52,88,13 177 | 176,Female,30,88,86 178 | 177,Male,58,88,15 179 | 178,Male,27,88,69 180 | 179,Male,59,93,14 181 | 180,Male,35,93,90 182 | 181,Female,37,97,32 183 | 182,Female,32,97,86 184 | 183,Male,46,98,15 185 | 184,Female,29,98,88 186 | 185,Female,41,99,39 187 | 186,Male,30,99,97 188 | 187,Female,54,101,24 189 | 188,Male,28,101,68 190 | 189,Female,41,103,17 191 | 190,Female,36,103,85 192 | 191,Female,34,103,23 193 | 192,Female,32,103,69 194 | 193,Male,33,113,8 195 | 194,Female,38,113,91 196 | 195,Female,47,120,16 197 | 196,Female,35,120,79 198 | 197,Female,45,126,28 199 | 198,Male,32,126,74 200 | 199,Male,32,137,18 201 | 200,Male,30,137,83 202 | -------------------------------------------------------------------------------- /Assignments/Assignment 3/iAAA - Assignment 3 - En.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iAAA-event/Machine-learning/6a6525852ad04ed6e467763e5b8d9acc4fecf755/Assignments/Assignment 3/iAAA - Assignment 3 - En.pdf -------------------------------------------------------------------------------- /Assignments/Assignment 3/iAAA - Assignment 3 - Per.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iAAA-event/Machine-learning/6a6525852ad04ed6e467763e5b8d9acc4fecf755/Assignments/Assignment 3/iAAA - Assignment 3 - Per.pdf -------------------------------------------------------------------------------- /Assignments/Assignment 4 Solution/IP-Solution.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Fourth Assignment Solution" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "## Image Rotation" 15 | ] 16 | }, 17 | { 18 | "cell_type": "code", 19 | "execution_count": 1, 20 | "metadata": {}, 21 | "outputs": [], 22 | "source": [ 23 | "import cv2\n", 24 | "import numpy as np\n", 25 | "\n", 26 | "def rotate_image(image, angle, interpolation=cv2.INTER_LINEAR):\n", 27 | " \"\"\"\n", 28 | " Rotate an input image by a specified angle.\n", 29 | "\n", 30 | " Parameters:\n", 31 | " image: numpy.ndarray\n", 32 | " Input image.\n", 33 | " angle: float\n", 34 | " Angle in degrees by which to rotate the image.\n", 35 | " interpolation: int, optional\n", 36 | " Interpolation method to use. Default is cv2.INTER_LINEAR.\n", 37 | "\n", 38 | " Returns:\n", 39 | " numpy.ndarray\n", 40 | " Rotated image.\n", 41 | " \"\"\"\n", 42 | " # Get image dimensions\n", 43 | " height, width = image.shape[:2]\n", 44 | "\n", 45 | " # Calculate rotation matrix\n", 46 | " rotation_matrix = cv2.getRotationMatrix2D((width / 2, height / 2), angle, 1)\n", 47 | "\n", 48 | " # Perform rotation\n", 49 | " rotated_image = cv2.warpAffine(image, rotation_matrix, (width, height), flags=interpolation)\n", 50 | "\n", 51 | " return rotated_image\n", 52 | "\n", 53 | "# Load sample image\n", 54 | "image = cv2.imread('logo.png')\n", 55 | "\n", 56 | "# Test rotation by 45 degrees with bilinear interpolation\n", 57 | "rotated_image_bilinear = rotate_image(image, 45, interpolation=cv2.INTER_LINEAR)\n", 58 | "\n", 59 | "# Test rotation by 90 degrees with nearest neighbor interpolation\n", 60 | "rotated_image_nearest = rotate_image(image, 90, interpolation=cv2.INTER_NEAREST)\n" 61 | ] 62 | }, 63 | { 64 | "cell_type": "markdown", 65 | "metadata": {}, 66 | "source": [ 67 | "## Image Cropping" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": 2, 73 | "metadata": {}, 74 | "outputs": [], 75 | "source": [ 76 | "def crop_image(image, x, y, width, height):\n", 77 | " \"\"\"\n", 78 | " Crop an input image to a specified region of interest (ROI).\n", 79 | "\n", 80 | " Parameters:\n", 81 | " image: numpy.ndarray\n", 82 | " Input image.\n", 83 | " x: int\n", 84 | " X-coordinate of the top-left corner of the ROI.\n", 85 | " y: int\n", 86 | " Y-coordinate of the top-left corner of the ROI.\n", 87 | " width: int\n", 88 | " Width of the ROI.\n", 89 | " height: int\n", 90 | " Height of the ROI.\n", 91 | "\n", 92 | " Returns:\n", 93 | " numpy.ndarray\n", 94 | " Cropped image.\n", 95 | " \"\"\"\n", 96 | " cropped_image = image[y:y+height, x:x+width]\n", 97 | " return cropped_image\n", 98 | "\n", 99 | "# Load sample image\n", 100 | "image = cv2.imread('logo.png')\n", 101 | "\n", 102 | "# Define ROI coordinates and dimensions\n", 103 | "x, y, width, height = 100, 100, 300, 200\n", 104 | "\n", 105 | "# Crop image to the specified ROI\n", 106 | "cropped_image = crop_image(image, x, y, width, height)" 107 | ] 108 | }, 109 | { 110 | "cell_type": "markdown", 111 | "metadata": {}, 112 | "source": [ 113 | "## Affine Transformation" 114 | ] 115 | }, 116 | { 117 | "cell_type": "code", 118 | "execution_count": 3, 119 | "metadata": {}, 120 | "outputs": [], 121 | "source": [ 122 | "def translate_image(image, tx, ty):\n", 123 | " \"\"\"\n", 124 | " Translate (shift) an input image by the specified translation factors.\n", 125 | "\n", 126 | " Parameters:\n", 127 | " image: numpy.ndarray\n", 128 | " Input image.\n", 129 | " tx: int\n", 130 | " Translation factor along the x-axis.\n", 131 | " ty: int\n", 132 | " Translation factor along the y-axis.\n", 133 | "\n", 134 | " Returns:\n", 135 | " numpy.ndarray\n", 136 | " Translated image.\n", 137 | " \"\"\"\n", 138 | " rows, cols = image.shape[:2]\n", 139 | " translation_matrix = np.float32([[1, 0, tx], [0, 1, ty]])\n", 140 | " translated_image = cv2.warpAffine(image, translation_matrix, (cols, rows))\n", 141 | " return translated_image\n", 142 | "\n", 143 | "def scale_image(image, scale_x, scale_y):\n", 144 | " \"\"\"\n", 145 | " Scale an input image by the specified scale factors.\n", 146 | "\n", 147 | " Parameters:\n", 148 | " image: numpy.ndarray\n", 149 | " Input image.\n", 150 | " scale_x: float\n", 151 | " Scale factor along the x-axis.\n", 152 | " scale_y: float\n", 153 | " Scale factor along the y-axis.\n", 154 | "\n", 155 | " Returns:\n", 156 | " numpy.ndarray\n", 157 | " Scaled image.\n", 158 | " \"\"\"\n", 159 | " scaled_image = cv2.resize(image, None, fx=scale_x, fy=scale_y)\n", 160 | " return scaled_image\n", 161 | "\n", 162 | "def shear_image(image, shear_x, shear_y):\n", 163 | " \"\"\"\n", 164 | " Shear an input image by the specified shear factors.\n", 165 | "\n", 166 | " Parameters:\n", 167 | " image: numpy.ndarray\n", 168 | " Input image.\n", 169 | " shear_x: float\n", 170 | " Shear factor along the x-axis.\n", 171 | " shear_y: float\n", 172 | " Shear factor along the y-axis.\n", 173 | "\n", 174 | " Returns:\n", 175 | " numpy.ndarray\n", 176 | " Sheared image.\n", 177 | " \"\"\"\n", 178 | " rows, cols = image.shape[:2]\n", 179 | " shear_matrix = np.float32([[1, shear_x, 0], [shear_y, 1, 0]])\n", 180 | " sheared_image = cv2.warpAffine(image, shear_matrix, (cols, rows))\n", 181 | " return sheared_image\n", 182 | "\n", 183 | "def reflect_image(image, axis):\n", 184 | " \"\"\"\n", 185 | " Reflect (flip) an input image along the specified axis.\n", 186 | "\n", 187 | " Parameters:\n", 188 | " image: numpy.ndarray\n", 189 | " Input image.\n", 190 | " axis: int\n", 191 | " Axis along which to reflect the image. 0 for vertical, 1 for horizontal.\n", 192 | "\n", 193 | " Returns:\n", 194 | " numpy.ndarray\n", 195 | " Reflected image.\n", 196 | " \"\"\"\n", 197 | " reflected_image = cv2.flip(image, axis)\n", 198 | " return reflected_image\n", 199 | "\n", 200 | "# Load sample image\n", 201 | "image = cv2.imread('logo.png')\n", 202 | "\n", 203 | "# Test individual transformations\n", 204 | "translated_image = translate_image(image, 50, 50)\n", 205 | "scaled_image = scale_image(image, 0.5, 0.5)\n", 206 | "sheared_image = shear_image(image, 0.2, 0.2)\n", 207 | "reflected_vertical_image = reflect_image(image, 0)\n", 208 | "reflected_horizontal_image = reflect_image(image, 1)" 209 | ] 210 | }, 211 | { 212 | "cell_type": "markdown", 213 | "metadata": {}, 214 | "source": [ 215 | "## Perspective Transformation" 216 | ] 217 | }, 218 | { 219 | "cell_type": "code", 220 | "execution_count": 4, 221 | "metadata": {}, 222 | "outputs": [], 223 | "source": [ 224 | "def perspective_transform(image, src_points, dst_points):\n", 225 | " \"\"\"\n", 226 | " Perform perspective transformation on an input image to correct distortions caused by the viewpoint of the camera.\n", 227 | "\n", 228 | " Parameters:\n", 229 | " image: numpy.ndarray\n", 230 | " Input image.\n", 231 | " src_points: list of tuples\n", 232 | " List of four source points defining the region of interest (ROI) in the input image.\n", 233 | " dst_points: list of tuples\n", 234 | " List of four destination points defining the desired perspective-transformed region.\n", 235 | "\n", 236 | " Returns:\n", 237 | " numpy.ndarray\n", 238 | " Perspective-transformed image.\n", 239 | " \"\"\"\n", 240 | " # Convert points to numpy arrays\n", 241 | " src_points = np.array(src_points, dtype=np.float32)\n", 242 | " dst_points = np.array(dst_points, dtype=np.float32)\n", 243 | "\n", 244 | " # Calculate perspective transform matrix\n", 245 | " perspective_matrix = cv2.getPerspectiveTransform(src_points, dst_points)\n", 246 | "\n", 247 | " # Apply perspective transform\n", 248 | " transformed_image = cv2.warpPerspective(image, perspective_matrix, (image.shape[1], image.shape[0]))\n", 249 | "\n", 250 | " return transformed_image\n", 251 | "\n", 252 | "# Load sample image\n", 253 | "image = cv2.imread('logo.png')\n", 254 | "\n", 255 | "# Define source points (ROI) and destination points for perspective transformation\n", 256 | "src_points = [(150, 150), (450, 150), (450, 350), (150, 350)]\n", 257 | "dst_points = [(200, 100), (400, 100), (400, 300), (200, 300)]\n", 258 | "\n", 259 | "# Perform perspective transformation\n", 260 | "transformed_image = perspective_transform(image, src_points, dst_points)" 261 | ] 262 | }, 263 | { 264 | "cell_type": "markdown", 265 | "metadata": {}, 266 | "source": [ 267 | "## Color Indexing" 268 | ] 269 | }, 270 | { 271 | "cell_type": "code", 272 | "execution_count": 5, 273 | "metadata": {}, 274 | "outputs": [], 275 | "source": [ 276 | "from sklearn.cluster import KMeans\n", 277 | "\n", 278 | "def index_colors(image, k):\n", 279 | " \"\"\"\n", 280 | " Index colors in an image using k-means clustering.\n", 281 | "\n", 282 | " Parameters:\n", 283 | " image: numpy.ndarray\n", 284 | " Input image.\n", 285 | " k: int\n", 286 | " Number of clusters (colors) to group similar colors into.\n", 287 | "\n", 288 | " Returns:\n", 289 | " numpy.ndarray\n", 290 | " Image with indexed colors.\n", 291 | " \"\"\"\n", 292 | " # Reshape image to a 2D array of pixels (rows) by color channels (columns)\n", 293 | " pixels = image.reshape(-1, 3)\n", 294 | "\n", 295 | " # Perform k-means clustering\n", 296 | " kmeans = KMeans(n_clusters=k)\n", 297 | " kmeans.fit(pixels)\n", 298 | "\n", 299 | " # Get cluster centroids (colors)\n", 300 | " centroids = kmeans.cluster_centers_\n", 301 | "\n", 302 | " # Get labels for each pixel indicating the nearest centroid\n", 303 | " labels = kmeans.labels_\n", 304 | "\n", 305 | " # Replace each pixel with its nearest centroid color\n", 306 | " indexed_image = centroids[labels].reshape(image.shape)\n", 307 | "\n", 308 | " return indexed_image\n", 309 | "\n", 310 | "# Load sample image\n", 311 | "image = cv2.imread('logo.png')\n", 312 | "\n", 313 | "# Apply color indexing with k-means clustering\n", 314 | "k = 8 # Number of clusters\n", 315 | "indexed_image = index_colors(image, k)" 316 | ] 317 | }, 318 | { 319 | "cell_type": "markdown", 320 | "metadata": {}, 321 | "source": [ 322 | "## Color Space Conversion" 323 | ] 324 | }, 325 | { 326 | "cell_type": "code", 327 | "execution_count": 6, 328 | "metadata": {}, 329 | "outputs": [ 330 | { 331 | "ename": "", 332 | "evalue": "", 333 | "output_type": "error", 334 | "traceback": [ 335 | "\u001b[1;31mThe Kernel crashed while executing code in the current cell or a previous cell. \n", 336 | "\u001b[1;31mPlease review the code in the cell(s) to identify a possible cause of the failure. \n", 337 | "\u001b[1;31mClick here for more info. \n", 338 | "\u001b[1;31mView Jupyter log for further details." 339 | ] 340 | } 341 | ], 342 | "source": [ 343 | "def convert_rgb_to_hsv(image):\n", 344 | " \"\"\"\n", 345 | " Convert an image from RGB color space to HSV color space.\n", 346 | "\n", 347 | " Parameters:\n", 348 | " image: numpy.ndarray\n", 349 | " Input image in RGB color space.\n", 350 | "\n", 351 | " Returns:\n", 352 | " numpy.ndarray\n", 353 | " Image in HSV color space.\n", 354 | " \"\"\"\n", 355 | " hsv_image = cv2.cvtColor(image, cv2.COLOR_RGB2HSV)\n", 356 | " return hsv_image\n", 357 | "\n", 358 | "def convert_rgb_to_lab(image):\n", 359 | " \"\"\"\n", 360 | " Convert an image from RGB color space to LAB color space.\n", 361 | "\n", 362 | " Parameters:\n", 363 | " image: numpy.ndarray\n", 364 | " Input image in RGB color space.\n", 365 | "\n", 366 | " Returns:\n", 367 | " numpy.ndarray\n", 368 | " Image in LAB color space.\n", 369 | " \"\"\"\n", 370 | " lab_image = cv2.cvtColor(image, cv2.COLOR_RGB2LAB)\n", 371 | " return lab_image\n", 372 | "\n", 373 | "def convert_rgb_to_ycbcr(image):\n", 374 | " \"\"\"\n", 375 | " Convert an image from RGB color space to YCbCr color space.\n", 376 | "\n", 377 | " Parameters:\n", 378 | " image: numpy.ndarray\n", 379 | " Input image in RGB color space.\n", 380 | "\n", 381 | " Returns:\n", 382 | " numpy.ndarray\n", 383 | " Image in YCbCr color space.\n", 384 | " \"\"\"\n", 385 | " ycbcr_image = cv2.cvtColor(image, cv2.COLOR_RGB2YCrCb)\n", 386 | " return ycbcr_image\n", 387 | "\n", 388 | "# Load sample image\n", 389 | "image = cv2.imread('logo.png')\n", 390 | "image_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)\n", 391 | "\n", 392 | "# Convert image to HSV color space\n", 393 | "image_hsv = convert_rgb_to_hsv(image_rgb)\n", 394 | "\n", 395 | "# Convert image to LAB color space\n", 396 | "image_lab = convert_rgb_to_lab(image_rgb)\n", 397 | "\n", 398 | "# Convert image to YCbCr color space\n", 399 | "image_ycbcr = convert_rgb_to_ycbcr(image_rgb)\n", 400 | "\n", 401 | "# Display original image and its converted versions in each color space\n", 402 | "cv2.imshow('Original Image', image_rgb)\n", 403 | "cv2.imshow('HSV Color Space', image_hsv)\n", 404 | "cv2.imshow('LAB Color Space', image_lab)\n", 405 | "cv2.imshow('YCbCr Color Space', image_ycbcr)\n", 406 | "cv2.waitKey(0)\n", 407 | "cv2.destroyAllWindows()" 408 | ] 409 | }, 410 | { 411 | "cell_type": "code", 412 | "execution_count": null, 413 | "metadata": {}, 414 | "outputs": [], 415 | "source": [] 416 | } 417 | ], 418 | "metadata": { 419 | "kernelspec": { 420 | "display_name": "DIP", 421 | "language": "python", 422 | "name": "python3" 423 | }, 424 | "language_info": { 425 | "codemirror_mode": { 426 | "name": "ipython", 427 | "version": 3 428 | }, 429 | "file_extension": ".py", 430 | "mimetype": "text/x-python", 431 | "name": "python", 432 | "nbconvert_exporter": "python", 433 | "pygments_lexer": "ipython3", 434 | "version": "3.9.16" 435 | } 436 | }, 437 | "nbformat": 4, 438 | "nbformat_minor": 2 439 | } 440 | -------------------------------------------------------------------------------- /Assignments/Assignment 4 Solution/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iAAA-event/Machine-learning/6a6525852ad04ed6e467763e5b8d9acc4fecf755/Assignments/Assignment 4 Solution/logo.png -------------------------------------------------------------------------------- /Assignments/Assignment 4/iAAA - Assignment 4 - En.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iAAA-event/Machine-learning/6a6525852ad04ed6e467763e5b8d9acc4fecf755/Assignments/Assignment 4/iAAA - Assignment 4 - En.pdf -------------------------------------------------------------------------------- /Assignments/Assignment 4/iAAA - Assignment 4- Per.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iAAA-event/Machine-learning/6a6525852ad04ed6e467763e5b8d9acc4fecf755/Assignments/Assignment 4/iAAA - Assignment 4- Per.pdf -------------------------------------------------------------------------------- /Assignments/Assignment 5 Solution/Assignment 5 Solution.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Assignment 5 Solution" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "## Student Study Performance" 15 | ] 16 | }, 17 | { 18 | "cell_type": "markdown", 19 | "metadata": {}, 20 | "source": [ 21 | "### Importing Required Libraries" 22 | ] 23 | }, 24 | { 25 | "cell_type": "code", 26 | "execution_count": null, 27 | "metadata": {}, 28 | "outputs": [], 29 | "source": [ 30 | "import numpy as np\n", 31 | "import pandas as pd\n", 32 | "\n", 33 | "import seaborn as sns\n", 34 | "import matplotlib.pyplot as plt\n", 35 | "%matplotlib inline\n", 36 | "\n", 37 | "import os\n", 38 | "import matplotlib.pyplot as plt\n", 39 | "from sklearn.model_selection import train_test_split\n", 40 | "from sklearn.preprocessing import StandardScaler, OneHotEncoder\n", 41 | "from sklearn.compose import ColumnTransformer\n", 42 | "from sklearn.metrics import accuracy_score\n", 43 | "\n", 44 | "import torch\n", 45 | "import torch.nn as nn\n", 46 | "import torch.optim as optim\n", 47 | "\n" 48 | ] 49 | }, 50 | { 51 | "cell_type": "markdown", 52 | "metadata": {}, 53 | "source": [ 54 | "### Reading Data" 55 | ] 56 | }, 57 | { 58 | "cell_type": "code", 59 | "execution_count": null, 60 | "metadata": {}, 61 | "outputs": [], 62 | "source": [ 63 | "Student_Data = pd.read_csv(\"/kaggle/input/student-study-performance/study_performance.csv\")\n", 64 | "Student_Data" 65 | ] 66 | }, 67 | { 68 | "cell_type": "markdown", 69 | "metadata": {}, 70 | "source": [ 71 | "### EDA" 72 | ] 73 | }, 74 | { 75 | "cell_type": "code", 76 | "execution_count": null, 77 | "metadata": {}, 78 | "outputs": [], 79 | "source": [ 80 | "Student_Data.info()" 81 | ] 82 | }, 83 | { 84 | "cell_type": "code", 85 | "execution_count": null, 86 | "metadata": {}, 87 | "outputs": [], 88 | "source": [ 89 | "# Check Missing values\n", 90 | "Student_Data.isna().sum()" 91 | ] 92 | }, 93 | { 94 | "cell_type": "code", 95 | "execution_count": null, 96 | "metadata": {}, 97 | "outputs": [], 98 | "source": [ 99 | "# Check Duplicates\n", 100 | "Student_Data.duplicated().sum()" 101 | ] 102 | }, 103 | { 104 | "cell_type": "code", 105 | "execution_count": null, 106 | "metadata": {}, 107 | "outputs": [], 108 | "source": [ 109 | "# Check the number of unique values of each column\n", 110 | "Student_Data.nunique()" 111 | ] 112 | }, 113 | { 114 | "cell_type": "code", 115 | "execution_count": null, 116 | "metadata": {}, 117 | "outputs": [], 118 | "source": [ 119 | "# Check statistics of data set\n", 120 | "Student_Data.describe()" 121 | ] 122 | }, 123 | { 124 | "cell_type": "code", 125 | "execution_count": null, 126 | "metadata": {}, 127 | "outputs": [], 128 | "source": [ 129 | "print(\"Categories in 'gender' variable: \",end=\" \" )\n", 130 | "print(Student_Data['gender'].unique())\n", 131 | "\n", 132 | "print(\"Categories in 'race_ethnicity' variable: \",end=\" \")\n", 133 | "print(Student_Data['race_ethnicity'].unique())\n", 134 | "\n", 135 | "print(\"Categories in'parental level of education' variable:\",end=\" \" )\n", 136 | "print(Student_Data['parental_level_of_education'].unique())\n", 137 | "\n", 138 | "print(\"Categories in 'lunch' variable: \",end=\" \" )\n", 139 | "print(Student_Data['lunch'].unique())\n", 140 | "\n", 141 | "print(\"Categories in 'test preparation course' variable: \",end=\" \" )\n", 142 | "print(Student_Data['test_preparation_course'].unique())" 143 | ] 144 | }, 145 | { 146 | "cell_type": "code", 147 | "execution_count": null, 148 | "metadata": {}, 149 | "outputs": [], 150 | "source": [ 151 | "# define numerical & categorical columns\n", 152 | "numeric_features = [feature for feature in Student_Data.columns if Student_Data[feature].dtype != 'O']\n", 153 | "categorical_features = [feature for feature in Student_Data.columns if Student_Data[feature].dtype == 'O']\n", 154 | "\n", 155 | "# print columns\n", 156 | "print('We have {} numerical features : {}'.format(len(numeric_features), numeric_features))\n", 157 | "print('\\nWe have {} categorical features : {}'.format(len(categorical_features), categorical_features))" 158 | ] 159 | }, 160 | { 161 | "cell_type": "code", 162 | "execution_count": null, 163 | "metadata": {}, 164 | "outputs": [], 165 | "source": [ 166 | "Student_Data['total score'] = Student_Data['math_score'] + Student_Data['reading_score'] + Student_Data['writing_score']\n", 167 | "Student_Data['average'] = Student_Data['total score']/3\n", 168 | "Student_Data.head()" 169 | ] 170 | }, 171 | { 172 | "cell_type": "code", 173 | "execution_count": null, 174 | "metadata": {}, 175 | "outputs": [], 176 | "source": [ 177 | "reading_full = Student_Data[Student_Data['reading_score'] == 100]['average'].count()\n", 178 | "writing_full = Student_Data[Student_Data['writing_score'] == 100]['average'].count()\n", 179 | "math_full = Student_Data[Student_Data['math_score'] == 100]['average'].count()\n", 180 | "\n", 181 | "print(f'Number of students with full marks in Maths: {math_full}')\n", 182 | "print(f'Number of students with full marks in Writing: {writing_full}')\n", 183 | "print(f'Number of students with full marks in Reading: {reading_full}')" 184 | ] 185 | }, 186 | { 187 | "cell_type": "code", 188 | "execution_count": null, 189 | "metadata": {}, 190 | "outputs": [], 191 | "source": [ 192 | "reading_less_20 = Student_Data[Student_Data['reading_score'] <= 20]['average'].count()\n", 193 | "writing_less_20 = Student_Data[Student_Data['writing_score'] <= 20]['average'].count()\n", 194 | "math_less_20 = Student_Data[Student_Data['math_score'] <= 20]['average'].count()\n", 195 | "\n", 196 | "print(f'Number of students with less than 20 marks in Maths: {math_less_20}')\n", 197 | "print(f'Number of students with less than 20 marks in Writing: {writing_less_20}')\n", 198 | "print(f'Number of students with less than 20 marks in Reading: {reading_less_20}')" 199 | ] 200 | }, 201 | { 202 | "cell_type": "code", 203 | "execution_count": null, 204 | "metadata": {}, 205 | "outputs": [], 206 | "source": [ 207 | "sns.pairplot(Student_Data,hue = 'gender')\n", 208 | "plt.show()" 209 | ] 210 | }, 211 | { 212 | "cell_type": "code", 213 | "execution_count": null, 214 | "metadata": {}, 215 | "outputs": [], 216 | "source": [ 217 | "fig, axs = plt.subplots(1, 2, figsize=(15, 7))\n", 218 | "plt.subplot(121)\n", 219 | "sns.histplot(data=Student_Data,x='average',bins=30,kde=True,color='g')\n", 220 | "plt.subplot(122)\n", 221 | "sns.histplot(data=Student_Data,x='average',kde=True,hue='gender')\n", 222 | "plt.show()" 223 | ] 224 | }, 225 | { 226 | "cell_type": "code", 227 | "execution_count": null, 228 | "metadata": {}, 229 | "outputs": [], 230 | "source": [ 231 | "fig, axs = plt.subplots(1, 2, figsize=(15, 7))\n", 232 | "plt.subplot(121)\n", 233 | "sns.histplot(data=Student_Data,x='total score',bins=30,kde=True,color='g')\n", 234 | "plt.subplot(122)\n", 235 | "sns.histplot(data=Student_Data,x='total score',kde=True,hue='gender')\n", 236 | "plt.show()" 237 | ] 238 | }, 239 | { 240 | "cell_type": "code", 241 | "execution_count": null, 242 | "metadata": {}, 243 | "outputs": [], 244 | "source": [ 245 | "plt.subplots(1,3,figsize=(25,6))\n", 246 | "plt.subplot(141)\n", 247 | "sns.histplot(data=Student_Data,x='average',kde=True,hue='lunch')\n", 248 | "plt.subplot(142)\n", 249 | "sns.histplot(data=Student_Data[Student_Data.gender=='female'],x='average',kde=True,hue='lunch')\n", 250 | "plt.subplot(143)\n", 251 | "sns.histplot(data=Student_Data[Student_Data.gender=='male'],x='average',kde=True,hue='lunch')\n", 252 | "plt.show()" 253 | ] 254 | }, 255 | { 256 | "cell_type": "code", 257 | "execution_count": null, 258 | "metadata": {}, 259 | "outputs": [], 260 | "source": [ 261 | "plt.subplots(1,3,figsize=(25,6))\n", 262 | "plt.subplot(141)\n", 263 | "ax =sns.histplot(data=Student_Data,x='average',kde=True,hue='parental_level_of_education')\n", 264 | "plt.subplot(142)\n", 265 | "ax =sns.histplot(data=Student_Data[Student_Data.gender=='male'],x='average',kde=True,hue='parental_level_of_education')\n", 266 | "plt.subplot(143)\n", 267 | "ax =sns.histplot(data=Student_Data[Student_Data.gender=='female'],x='average',kde=True,hue='parental_level_of_education')\n", 268 | "plt.show()" 269 | ] 270 | }, 271 | { 272 | "cell_type": "code", 273 | "execution_count": null, 274 | "metadata": {}, 275 | "outputs": [], 276 | "source": [ 277 | "plt.subplots(1,3,figsize=(25,6))\n", 278 | "plt.subplot(141)\n", 279 | "ax =sns.histplot(data=Student_Data,x='average',kde=True,hue='race_ethnicity')\n", 280 | "plt.subplot(142)\n", 281 | "ax =sns.histplot(data=Student_Data[Student_Data.gender=='female'],x='average',kde=True,hue='race_ethnicity')\n", 282 | "plt.subplot(143)\n", 283 | "ax =sns.histplot(data=Student_Data[Student_Data.gender=='male'],x='average',kde=True,hue='race_ethnicity')\n", 284 | "plt.show()" 285 | ] 286 | }, 287 | { 288 | "cell_type": "code", 289 | "execution_count": null, 290 | "metadata": {}, 291 | "outputs": [], 292 | "source": [ 293 | "plt.figure(figsize=(18,8))\n", 294 | "plt.subplot(1, 4, 1)\n", 295 | "plt.title('MATH SCORES')\n", 296 | "sns.violinplot(y='math_score',data=Student_Data,color='red',linewidth=3)\n", 297 | "plt.subplot(1, 4, 2)\n", 298 | "plt.title('READING SCORES')\n", 299 | "sns.violinplot(y='reading_score',data=Student_Data,color='green',linewidth=3)\n", 300 | "plt.subplot(1, 4, 3)\n", 301 | "plt.title('WRITING SCORES')\n", 302 | "sns.violinplot(y='writing_score',data=Student_Data,color='blue',linewidth=3)\n", 303 | "plt.show()" 304 | ] 305 | }, 306 | { 307 | "cell_type": "code", 308 | "execution_count": null, 309 | "metadata": {}, 310 | "outputs": [], 311 | "source": [ 312 | "plt.rcParams['figure.figsize'] = (30, 12)\n", 313 | "\n", 314 | "plt.subplot(1, 5, 1)\n", 315 | "size = Student_Data['gender'].value_counts()\n", 316 | "labels = 'Female', 'Male'\n", 317 | "color = ['red','green']\n", 318 | "\n", 319 | "plt.pie(size, colors = color, labels = labels,autopct = '.%2f%%')\n", 320 | "plt.title('Gender', fontsize = 20)\n", 321 | "plt.axis('off')\n", 322 | "\n", 323 | "plt.subplot(1, 5, 2)\n", 324 | "size = Student_Data['race_ethnicity'].value_counts()\n", 325 | "labels = 'Group C', 'Group D','Group B','Group E','Group A'\n", 326 | "color = ['red', 'green', 'blue', 'cyan','orange']\n", 327 | "\n", 328 | "plt.pie(size, colors = color,labels = labels,autopct = '.%2f%%')\n", 329 | "plt.title('Race/Ethnicity', fontsize = 20)\n", 330 | "plt.axis('off')\n", 331 | "\n", 332 | "plt.subplot(1, 5, 3)\n", 333 | "size = Student_Data['lunch'].value_counts()\n", 334 | "labels = 'Standard', 'Free'\n", 335 | "color = ['red','green']\n", 336 | "\n", 337 | "plt.pie(size, colors = color,labels = labels,autopct = '.%2f%%')\n", 338 | "plt.title('Lunch', fontsize = 20)\n", 339 | "plt.axis('off')\n", 340 | "\n", 341 | "plt.subplot(1, 5, 4)\n", 342 | "size = Student_Data['test_preparation_course'].value_counts()\n", 343 | "labels = 'None', 'Completed'\n", 344 | "color = ['red','green']\n", 345 | "\n", 346 | "plt.pie(size, colors = color,labels = labels,autopct = '.%2f%%')\n", 347 | "plt.title('Test Course', fontsize = 20)\n", 348 | "plt.axis('off')\n", 349 | "\n", 350 | "plt.subplot(1, 5, 5)\n", 351 | "size = Student_Data['parental_level_of_education'].value_counts()\n", 352 | "labels = 'Some College', \"Associate's Degree\",'High School','Some High School',\"Bachelor's Degree\",\"Master's Degree\"\n", 353 | "color = ['red', 'green', 'blue', 'cyan','orange','grey']\n", 354 | "\n", 355 | "plt.pie(size, colors = color,labels = labels,autopct = '.%2f%%')\n", 356 | "plt.title('Parental Education', fontsize = 20)\n", 357 | "plt.axis('off')\n", 358 | "\n", 359 | "plt.tight_layout()\n", 360 | "plt.grid()\n", 361 | "\n", 362 | "plt.show()" 363 | ] 364 | }, 365 | { 366 | "cell_type": "markdown", 367 | "metadata": {}, 368 | "source": [ 369 | "### Data Pre-Process" 370 | ] 371 | }, 372 | { 373 | "cell_type": "code", 374 | "execution_count": null, 375 | "metadata": {}, 376 | "outputs": [], 377 | "source": [ 378 | "numeric_features = Student_Data.drop('math_score', axis=1).select_dtypes(include=['int64', 'float64']).columns\n", 379 | "categorical_features = Student_Data.drop('math_score', axis=1).select_dtypes(include=['object']).columns\n", 380 | "X = Student_Data.drop('math_score', axis=1)\n", 381 | "y = Student_Data['math_score']" 382 | ] 383 | }, 384 | { 385 | "cell_type": "code", 386 | "execution_count": null, 387 | "metadata": {}, 388 | "outputs": [], 389 | "source": [ 390 | "num_features = X.select_dtypes(exclude=\"object\").columns\n", 391 | "cat_features = X.select_dtypes(include=\"object\").columns\n", 392 | "\n", 393 | "num_features, cat_features" 394 | ] 395 | }, 396 | { 397 | "cell_type": "markdown", 398 | "metadata": {}, 399 | "source": [ 400 | "### Train Test Split" 401 | ] 402 | }, 403 | { 404 | "cell_type": "code", 405 | "execution_count": null, 406 | "metadata": {}, 407 | "outputs": [], 408 | "source": [ 409 | "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)" 410 | ] 411 | }, 412 | { 413 | "cell_type": "code", 414 | "execution_count": null, 415 | "metadata": {}, 416 | "outputs": [], 417 | "source": [ 418 | "numeric_transformer = StandardScaler()\n", 419 | "categorical_transformer = OneHotEncoder(drop='first', sparse=False)" 420 | ] 421 | }, 422 | { 423 | "cell_type": "code", 424 | "execution_count": null, 425 | "metadata": {}, 426 | "outputs": [], 427 | "source": [ 428 | "preprocessor = ColumnTransformer(\n", 429 | " transformers=[\n", 430 | " ('num', numeric_transformer, numeric_features),\n", 431 | " ('cat', categorical_transformer, categorical_features)])" 432 | ] 433 | }, 434 | { 435 | "cell_type": "code", 436 | "execution_count": null, 437 | "metadata": {}, 438 | "outputs": [], 439 | "source": [ 440 | "X_train_processed = preprocessor.fit_transform(X_train)\n", 441 | "X_test_processed = preprocessor.transform(X_test)" 442 | ] 443 | }, 444 | { 445 | "cell_type": "code", 446 | "execution_count": null, 447 | "metadata": {}, 448 | "outputs": [], 449 | "source": [ 450 | "X_train_tensor = torch.tensor(X_train_processed, dtype=torch.float32)\n", 451 | "y_train_tensor = torch.tensor(y_train.values, dtype=torch.float32).view(-1, 1) # Reshape to column vector\n", 452 | "X_test_tensor = torch.tensor(X_test_processed, dtype=torch.float32)\n", 453 | "y_test_tensor = torch.tensor(y_test.values, dtype=torch.float32).view(-1, 1) # Reshape to column vector\n" 454 | ] 455 | }, 456 | { 457 | "cell_type": "markdown", 458 | "metadata": {}, 459 | "source": [ 460 | "### Model Architecture" 461 | ] 462 | }, 463 | { 464 | "cell_type": "code", 465 | "execution_count": null, 466 | "metadata": {}, 467 | "outputs": [], 468 | "source": [ 469 | "class MLP(nn.Module):\n", 470 | " def __init__(self, input_dim, hidden_dim1, hidden_dim2, output_dim):\n", 471 | " super(MLP, self).__init__()\n", 472 | " self.fc1 = nn.Linear(input_dim, hidden_dim1)\n", 473 | " self.fc2 = nn.Linear(hidden_dim1, hidden_dim2)\n", 474 | " self.fc3 = nn.Linear(hidden_dim2, output_dim)\n", 475 | " self.relu = nn.ReLU()\n", 476 | "\n", 477 | " def forward(self, x):\n", 478 | " x = self.relu(self.fc1(x))\n", 479 | " x = self.relu(self.fc2(x))\n", 480 | " x = self.fc3(x)\n", 481 | " return x" 482 | ] 483 | }, 484 | { 485 | "cell_type": "code", 486 | "execution_count": null, 487 | "metadata": {}, 488 | "outputs": [], 489 | "source": [ 490 | "input_dim = X_train_tensor.shape[1] # Number of features\n", 491 | "hidden_dim1 = 128\n", 492 | "hidden_dim2 = 64\n", 493 | "output_dim = 1 # For regression\n", 494 | "model = MLP(input_dim, hidden_dim1, hidden_dim2, output_dim)" 495 | ] 496 | }, 497 | { 498 | "cell_type": "code", 499 | "execution_count": null, 500 | "metadata": {}, 501 | "outputs": [], 502 | "source": [ 503 | "criterion = nn.MSELoss()\n", 504 | "optimizer = optim.Adam(model.parameters(), lr=0.001)" 505 | ] 506 | }, 507 | { 508 | "cell_type": "markdown", 509 | "metadata": {}, 510 | "source": [ 511 | "### Training Procedure" 512 | ] 513 | }, 514 | { 515 | "cell_type": "code", 516 | "execution_count": null, 517 | "metadata": {}, 518 | "outputs": [], 519 | "source": [ 520 | "num_epochs = 5000\n", 521 | "for epoch in range(num_epochs):\n", 522 | " model.train()\n", 523 | " optimizer.zero_grad()\n", 524 | "# print(f'Input shape: {X_train_tensor.shape}')\n", 525 | " outputs = model(X_train_tensor)\n", 526 | " loss = criterion(outputs, y_train_tensor)\n", 527 | " loss.backward()\n", 528 | " optimizer.step()\n", 529 | " if (epoch+1) % 10 == 0:\n", 530 | " model.eval()\n", 531 | " with torch.no_grad():\n", 532 | " y_pred = model(X_test_tensor)\n", 533 | " test_loss = criterion(y_pred, y_test_tensor)\n", 534 | " print(f'Epoch [{epoch+1}/{num_epochs}] | Train_Loss: {loss.item():.4f} | Test Loss: {test_loss.item():.4f}')" 535 | ] 536 | }, 537 | { 538 | "cell_type": "markdown", 539 | "metadata": {}, 540 | "source": [ 541 | "### Evaluation" 542 | ] 543 | }, 544 | { 545 | "cell_type": "code", 546 | "execution_count": null, 547 | "metadata": {}, 548 | "outputs": [], 549 | "source": [ 550 | "model.eval()\n", 551 | "with torch.no_grad():\n", 552 | " y_pred = model(X_test_tensor)\n", 553 | " test_loss = criterion(y_pred, y_test_tensor)\n", 554 | " print(f'Test Loss: {test_loss.item():.4f}')" 555 | ] 556 | }, 557 | { 558 | "cell_type": "markdown", 559 | "metadata": {}, 560 | "source": [ 561 | "## Obesity Levels" 562 | ] 563 | }, 564 | { 565 | "cell_type": "markdown", 566 | "metadata": {}, 567 | "source": [ 568 | "### Import Required Libraries" 569 | ] 570 | }, 571 | { 572 | "cell_type": "code", 573 | "execution_count": null, 574 | "metadata": {}, 575 | "outputs": [], 576 | "source": [ 577 | "import warnings\n", 578 | "warnings.filterwarnings(\"ignore\")\n", 579 | "\n", 580 | "import numpy as np\n", 581 | "import pandas as pd\n", 582 | "\n", 583 | "import seaborn as sns\n", 584 | "import matplotlib.pyplot as plt\n", 585 | "%matplotlib inline\n", 586 | "\n", 587 | "\n", 588 | "\n", 589 | "import os\n", 590 | "import matplotlib.pyplot as plt\n", 591 | "from sklearn.preprocessing import PowerTransformer\n", 592 | "from sklearn.model_selection import train_test_split\n", 593 | "from sklearn.preprocessing import StandardScaler, OneHotEncoder ,LabelEncoder\n", 594 | "from sklearn.compose import ColumnTransformer\n", 595 | "from sklearn import metrics\n", 596 | "import plotly.graph_objects as go\n", 597 | "import plotly.express as px\n", 598 | "\n", 599 | "\n", 600 | "from torch.utils import data\n", 601 | "from tqdm.notebook import tqdm\n", 602 | "import torch\n", 603 | "import torch.nn as nn\n", 604 | "import torch.optim as optim\n", 605 | "from torch.utils.data import DataLoader, TensorDataset\n" 606 | ] 607 | }, 608 | { 609 | "cell_type": "markdown", 610 | "metadata": {}, 611 | "source": [ 612 | "### Loading Data" 613 | ] 614 | }, 615 | { 616 | "cell_type": "code", 617 | "execution_count": null, 618 | "metadata": {}, 619 | "outputs": [], 620 | "source": [ 621 | "Obesity_Data = pd.read_csv(\"/kaggle/input/obesity-levels/ObesityDataSet_raw_and_data_sinthetic.csv\")\n", 622 | "Obesity_Data" 623 | ] 624 | }, 625 | { 626 | "cell_type": "code", 627 | "execution_count": null, 628 | "metadata": {}, 629 | "outputs": [], 630 | "source": [ 631 | "Obesity_Data.info()" 632 | ] 633 | }, 634 | { 635 | "cell_type": "code", 636 | "execution_count": null, 637 | "metadata": {}, 638 | "outputs": [], 639 | "source": [ 640 | "Obesity_Data.isna().sum()" 641 | ] 642 | }, 643 | { 644 | "cell_type": "code", 645 | "execution_count": null, 646 | "metadata": {}, 647 | "outputs": [], 648 | "source": [ 649 | "Obesity_Data.duplicated().sum()" 650 | ] 651 | }, 652 | { 653 | "cell_type": "code", 654 | "execution_count": null, 655 | "metadata": {}, 656 | "outputs": [], 657 | "source": [ 658 | "Obesity_Data.nunique()" 659 | ] 660 | }, 661 | { 662 | "cell_type": "markdown", 663 | "metadata": {}, 664 | "source": [ 665 | "### EDA" 666 | ] 667 | }, 668 | { 669 | "cell_type": "code", 670 | "execution_count": null, 671 | "metadata": {}, 672 | "outputs": [], 673 | "source": [ 674 | "Obesity_Data.describe()" 675 | ] 676 | }, 677 | { 678 | "cell_type": "code", 679 | "execution_count": null, 680 | "metadata": {}, 681 | "outputs": [], 682 | "source": [ 683 | "categorical_features = Obesity_Data.select_dtypes(include=\"object\").columns\n", 684 | "continuous_features = Obesity_Data.select_dtypes(exclude=\"object\").columns" 685 | ] 686 | }, 687 | { 688 | "cell_type": "code", 689 | "execution_count": null, 690 | "metadata": {}, 691 | "outputs": [], 692 | "source": [ 693 | "target_count = Obesity_Data['NObeyesdad'].value_counts()\n", 694 | "target_unique = Obesity_Data['NObeyesdad'].unique()" 695 | ] 696 | }, 697 | { 698 | "cell_type": "code", 699 | "execution_count": null, 700 | "metadata": {}, 701 | "outputs": [], 702 | "source": [ 703 | "fig = px.pie(values= target_count, names = target_unique,color_discrete_sequence=px.colors.qualitative.Pastel1 ,title = \"the number of people related to each type of obesity level\")\n", 704 | "\n", 705 | "fig.show()" 706 | ] 707 | }, 708 | { 709 | "cell_type": "code", 710 | "execution_count": null, 711 | "metadata": {}, 712 | "outputs": [], 713 | "source": [ 714 | "df_ot = Obesity_Data[Obesity_Data[\"NObeyesdad\"] == 'Obesity_Type_I' ]\n", 715 | "df_ot2 = Obesity_Data[Obesity_Data[\"NObeyesdad\"] == 'Obesity_Type_II']\n", 716 | "df_ot3 = Obesity_Data[Obesity_Data[\"NObeyesdad\"] == 'Obesity_Type_III']" 717 | ] 718 | }, 719 | { 720 | "cell_type": "code", 721 | "execution_count": null, 722 | "metadata": {}, 723 | "outputs": [], 724 | "source": [ 725 | "df_ot_final = pd.concat([df_ot,df_ot2,df_ot3])\n", 726 | "df_ot_final.reset_index(drop=True, inplace = True)" 727 | ] 728 | }, 729 | { 730 | "cell_type": "code", 731 | "execution_count": null, 732 | "metadata": {}, 733 | "outputs": [], 734 | "source": [ 735 | "df_ow = Obesity_Data[Obesity_Data[\"NObeyesdad\"]=='Overweight_Level_I']\n", 736 | "df_ow2 = Obesity_Data[Obesity_Data[\"NObeyesdad\"]=='Overweight_Level_II']" 737 | ] 738 | }, 739 | { 740 | "cell_type": "code", 741 | "execution_count": null, 742 | "metadata": {}, 743 | "outputs": [], 744 | "source": [ 745 | "df_ow_final = pd.concat([df_ow,df_ow2])\n", 746 | "df_ow_final.reset_index(drop=True, inplace = True)" 747 | ] 748 | }, 749 | { 750 | "cell_type": "code", 751 | "execution_count": null, 752 | "metadata": {}, 753 | "outputs": [], 754 | "source": [ 755 | "df_n = Obesity_Data[Obesity_Data[\"NObeyesdad\"]=='Normal_Weight']" 756 | ] 757 | }, 758 | { 759 | "cell_type": "code", 760 | "execution_count": null, 761 | "metadata": {}, 762 | "outputs": [], 763 | "source": [ 764 | "df_In = Obesity_Data[Obesity_Data[\"NObeyesdad\"]=='Insufficient_Weight']" 765 | ] 766 | }, 767 | { 768 | "cell_type": "code", 769 | "execution_count": null, 770 | "metadata": {}, 771 | "outputs": [], 772 | "source": [ 773 | "data_list = [df_ot_final, df_ow_final, df_n, df_In]\n", 774 | "\n", 775 | "data_name =[\"obesity_type\", \"over_weight_type\", \"normal\", \"Insufficient_Weight\" ]\n", 776 | "\n", 777 | "fig, axes = plt.subplots(figsize=(10,8),nrows = 2 , ncols =2)\n", 778 | "\n", 779 | "for i in range(2):\n", 780 | "\n", 781 | " sns.histplot(data=data_list[i], x='Gender', hue='NObeyesdad',palette= 'turbo', ax=axes[i, 0], multiple='stack')\n", 782 | " axes[i,0].set_title(f'{data_name[i]} vs Gender')\n", 783 | "\n", 784 | " sns.histplot(data=data_list[i+2], x='Gender', hue='NObeyesdad',palette= 'turbo', ax=axes[i, 1], multiple='stack')\n", 785 | " axes[i,1].set_title(f'{data_name[i+2]} vs Gender')\n", 786 | "\n", 787 | "fig.suptitle('Obesity_levels vs Gender')\n", 788 | "plt.tight_layout()\n", 789 | "plt.show()" 790 | ] 791 | }, 792 | { 793 | "cell_type": "code", 794 | "execution_count": null, 795 | "metadata": {}, 796 | "outputs": [], 797 | "source": [ 798 | "data_list = [df_ot_final, df_ow_final, df_n, df_In]\n", 799 | "\n", 800 | "data_name =[\"obesity_type\", \"over_weight_type\", \"normal\", \"Insufficient_Weight\" ]\n", 801 | "\n", 802 | "fig, axes = plt.subplots(figsize=(10,8),nrows = 2 , ncols =2)\n", 803 | "\n", 804 | "for i in range(2):\n", 805 | "\n", 806 | " sns.histplot(data=data_list[i], x='CALC', hue='NObeyesdad',palette= 'turbo', ax=axes[i, 0], multiple='stack')\n", 807 | " axes[i,0].set_title(f'{data_name[i]} vs CALC')\n", 808 | "\n", 809 | " sns.histplot(data=data_list[i+2], x='CALC', hue='NObeyesdad',palette= 'turbo', ax=axes[i, 1], multiple='stack')\n", 810 | " axes[i,1].set_title(f'{data_name[i+2]} vs CALC')\n", 811 | "\n", 812 | "fig.suptitle('Obesity_levels vs CALC')\n", 813 | "plt.tight_layout()\n", 814 | "plt.show()" 815 | ] 816 | }, 817 | { 818 | "cell_type": "code", 819 | "execution_count": null, 820 | "metadata": {}, 821 | "outputs": [], 822 | "source": [ 823 | "data_list = [df_ot_final, df_ow_final, df_n, df_In]\n", 824 | "\n", 825 | "data_name =[\"obesity_type\", \"over_weight_type\", \"normal\", \"Insufficient_Weight\" ]\n", 826 | "\n", 827 | "fig, axes = plt.subplots(figsize=(10,8),nrows = 2 , ncols =2)\n", 828 | "\n", 829 | "for i in range(2):\n", 830 | "\n", 831 | " sns.histplot(data=data_list[i], x='FAVC', hue='NObeyesdad',palette= 'turbo' ,ax=axes[i, 0], multiple='stack')\n", 832 | " axes[i,0].set_title(f'{data_name[i]} vs FAVC')\n", 833 | "\n", 834 | " sns.histplot(data=data_list[i+2], x='FAVC', hue='NObeyesdad', palette= 'turbo',ax=axes[i, 1], multiple='stack')\n", 835 | " axes[i,1].set_title(f'{data_name[i+2]} vs FAVC')\n", 836 | "\n", 837 | "fig.suptitle('Obesity_levels vs FAVC')\n", 838 | "plt.tight_layout()\n", 839 | "plt.show()" 840 | ] 841 | }, 842 | { 843 | "cell_type": "code", 844 | "execution_count": null, 845 | "metadata": {}, 846 | "outputs": [], 847 | "source": [ 848 | "data_list = [df_ot_final, df_ow_final, df_n, df_In]\n", 849 | "\n", 850 | "data_name =[\"obesity_type\", \"over_weight_type\", \"normal\", \"Insufficient_Weight\" ]\n", 851 | "\n", 852 | "fig, axes = plt.subplots(figsize=(10,8),nrows = 2 , ncols =2)\n", 853 | "\n", 854 | "for i in range(2):\n", 855 | "\n", 856 | " sns.histplot(data=data_list[i], x='SCC', hue='NObeyesdad',palette= 'turbo', ax=axes[i, 0], multiple='stack')\n", 857 | " axes[i,0].set_title(f'{data_name[i]} vs SCC')\n", 858 | "\n", 859 | " sns.histplot(data=data_list[i+2], x='SCC', hue='NObeyesdad',palette = \"turbo\" ,ax=axes[i, 1], multiple='stack')\n", 860 | " axes[i,1].set_title(f'{data_name[i+2]} vs SCC')\n", 861 | "\n", 862 | "fig.suptitle('Obesity_levels vs SCC')\n", 863 | "plt.tight_layout()\n", 864 | "plt.show()" 865 | ] 866 | }, 867 | { 868 | "cell_type": "code", 869 | "execution_count": null, 870 | "metadata": {}, 871 | "outputs": [], 872 | "source": [ 873 | "data_list = [df_ot_final, df_ow_final, df_n, df_In]\n", 874 | "\n", 875 | "data_name =[\"obesity_type\", \"over_weight_type\", \"normal\", \"Insufficient_Weight\" ]\n", 876 | "\n", 877 | "fig, axes = plt.subplots(figsize=(10,8),nrows = 2 , ncols =2)\n", 878 | "\n", 879 | "for i in range(2):\n", 880 | "\n", 881 | " sns.histplot(data=data_list[i], x='SMOKE', hue='NObeyesdad', palette= 'turbo',ax=axes[i, 0], multiple='stack')\n", 882 | " axes[i,0].set_title(f'{data_name[i]} vs SMOKE')\n", 883 | "\n", 884 | " sns.histplot(data=data_list[i+2], x='SMOKE', hue='NObeyesdad', palette= 'turbo',ax=axes[i, 1], multiple='stack')\n", 885 | " axes[i,1].set_title(f'{data_name[i+2]} vs SMOKE')\n", 886 | "\n", 887 | "fig.suptitle('Obesity_levels vs SMOKE')\n", 888 | "plt.tight_layout()\n", 889 | "plt.show()" 890 | ] 891 | }, 892 | { 893 | "cell_type": "code", 894 | "execution_count": null, 895 | "metadata": {}, 896 | "outputs": [], 897 | "source": [ 898 | "data_list = [df_ot_final, df_ow_final, df_n, df_In]\n", 899 | "\n", 900 | "data_name =[\"obesity_type\", \"over_weight_type\", \"normal\", \"Insufficient_Weight\" ]\n", 901 | "\n", 902 | "fig, axes = plt.subplots(figsize=(10,8),nrows = 2 , ncols =2)\n", 903 | "\n", 904 | "for i in range(2):\n", 905 | "\n", 906 | " sns.histplot(data=data_list[i], x='family_history_with_overweight', hue='NObeyesdad',palette= 'turbo', ax=axes[i, 0], multiple='stack')\n", 907 | " axes[i,0].set_title(f'{data_name[i]} vs family_history_with_overweight')\n", 908 | "\n", 909 | " sns.histplot(data=data_list[i+2], x='family_history_with_overweight', hue='NObeyesdad',palette= 'turbo', ax=axes[i, 1], multiple='stack')\n", 910 | " axes[i,1].set_title(f'{data_name[i+2]} vs family_history_with_overweight')\n", 911 | "\n", 912 | "fig.suptitle('Obesity_levels vs family_history_with_overweight')\n", 913 | "plt.tight_layout()\n", 914 | "plt.show()" 915 | ] 916 | }, 917 | { 918 | "cell_type": "code", 919 | "execution_count": null, 920 | "metadata": {}, 921 | "outputs": [], 922 | "source": [ 923 | "data_list = [df_ot_final, df_ow_final, df_n, df_In]\n", 924 | "\n", 925 | "data_name =[\"obesity_type\", \"over_weight_type\", \"normal\", \"Insufficient_Weight\" ]\n", 926 | "\n", 927 | "fig, axes = plt.subplots(figsize=(10,8),nrows = 2 , ncols =2)\n", 928 | "\n", 929 | "for i in range(2):\n", 930 | "\n", 931 | " sns.histplot(data=data_list[i], x='CAEC', hue='NObeyesdad', palette= 'turbo',ax=axes[i, 0], multiple='stack')\n", 932 | " axes[i,0].set_title(f'{data_name[i]} vs CAEC')\n", 933 | "\n", 934 | " sns.histplot(data=data_list[i+2], x='CAEC', hue='NObeyesdad', palette= 'turbo',ax=axes[i, 1], multiple='stack')\n", 935 | " axes[i,1].set_title(f'{data_name[i+2]} vs CAEC')\n", 936 | "\n", 937 | "fig.suptitle('Obesity_levels vs CAEC')\n", 938 | "plt.tight_layout()\n", 939 | "plt.show()" 940 | ] 941 | }, 942 | { 943 | "cell_type": "code", 944 | "execution_count": null, 945 | "metadata": {}, 946 | "outputs": [], 947 | "source": [ 948 | "data_list = [df_ot_final, df_ow_final, df_n, df_In]\n", 949 | "\n", 950 | "data_name =[\"obesity_type\", \"over_weight_type\", \"normal\", \"Insufficient_Weight\" ]\n", 951 | "\n", 952 | "fig, axes = plt.subplots(figsize=(15,8),nrows = 2 , ncols =2)\n", 953 | "\n", 954 | "for i in range(2):\n", 955 | "\n", 956 | " sns.histplot(data=data_list[i], x='MTRANS', hue='NObeyesdad', palette= 'turbo',ax=axes[i, 0], multiple='stack')\n", 957 | " axes[i,0].set_title(f'{data_name[i]} vs MTRANS')\n", 958 | "\n", 959 | " sns.histplot(data=data_list[i+2], x='MTRANS', hue='NObeyesdad',palette= 'turbo', ax=axes[i, 1], multiple='stack')\n", 960 | " axes[i,1].set_title(f'{data_name[i+2]} vs MTRANS')\n", 961 | "\n", 962 | "fig.suptitle('Obesity_levels vs MTRANS')\n", 963 | "plt.tight_layout()\n", 964 | "plt.show()" 965 | ] 966 | }, 967 | { 968 | "cell_type": "code", 969 | "execution_count": null, 970 | "metadata": {}, 971 | "outputs": [], 972 | "source": [ 973 | "data_list = [df_ot_final, df_ow_final, df_n, df_In]\n", 974 | "data_name =[\"obesity_type\", \"over_weight_type\", \"normal\", \"Insufficient_Weight\" ]\n", 975 | "\n", 976 | "fig,axes = plt.subplots(nrows = 2, ncols = 2, figsize = (10,8))\n", 977 | "\n", 978 | "for i in range(2):\n", 979 | "\n", 980 | " sns.kdeplot(ax = axes[i,0],data=data_list[i], x=\"Age\", hue=\"NObeyesdad\", fill =True)\n", 981 | " axes[i, 0].set_title(f'{data_name[i]} vs Age')\n", 982 | "\n", 983 | " sns.kdeplot(ax = axes[i,1],data=data_list[i+2], x=\"Age\", hue=\"NObeyesdad\", fill =True)\n", 984 | " axes[i, 1].set_title(f'{data_name[i+2]} vs Age')\n", 985 | "\n", 986 | "\n", 987 | "\n", 988 | "fig.suptitle('Obesity_levels vs Age')\n", 989 | "plt.tight_layout()\n", 990 | "plt.show()" 991 | ] 992 | }, 993 | { 994 | "cell_type": "code", 995 | "execution_count": null, 996 | "metadata": {}, 997 | "outputs": [], 998 | "source": [ 999 | "data_list = [df_ot_final, df_ow_final, df_n, df_In]\n", 1000 | "data_name =[\"obesity_type\", \"over_weight_type\", \"normal\", \"Insufficient_Weight\" ]\n", 1001 | "\n", 1002 | "fig,axes = plt.subplots(nrows = 2, ncols = 2, figsize = (10,8))\n", 1003 | "\n", 1004 | "for i in range(2):\n", 1005 | "\n", 1006 | " sns.kdeplot(ax = axes[i,0],data=data_list[i], x=\"Height\", hue=\"NObeyesdad\", fill =True)\n", 1007 | " axes[i, 0].set_title(f'{data_name[i]} vs Height')\n", 1008 | "\n", 1009 | " sns.kdeplot(ax = axes[i,1],data=data_list[i+2], x=\"Height\", hue=\"NObeyesdad\", fill =True)\n", 1010 | " axes[i, 1].set_title(f'{data_name[i+2]} vs Height')\n", 1011 | "\n", 1012 | "\n", 1013 | "\n", 1014 | "fig.suptitle('Obesity_levels vs Height')\n", 1015 | "plt.tight_layout()\n", 1016 | "plt.show()" 1017 | ] 1018 | }, 1019 | { 1020 | "cell_type": "code", 1021 | "execution_count": null, 1022 | "metadata": {}, 1023 | "outputs": [], 1024 | "source": [ 1025 | "data_list = [df_ot_final, df_ow_final, df_n, df_In]\n", 1026 | "data_name =[\"obesity_type\", \"over_weight_type\", \"normal\", \"Insufficient_Weight\" ]\n", 1027 | "\n", 1028 | "fig,axes = plt.subplots(nrows = 2, ncols = 2, figsize = (10,8))\n", 1029 | "\n", 1030 | "for i in range(2):\n", 1031 | "\n", 1032 | " sns.kdeplot(ax = axes[i,0],data=data_list[i], x=\"Weight\", hue=\"NObeyesdad\", fill =True)\n", 1033 | " axes[i, 0].set_title(f'{data_name[i]} vs Weight')\n", 1034 | "\n", 1035 | " sns.kdeplot(ax = axes[i,1],data=data_list[i+2], x=\"Weight\", hue=\"NObeyesdad\", fill =True)\n", 1036 | " axes[i, 1].set_title(f'{data_name[i+2]} vs Weight')\n", 1037 | "\n", 1038 | "\n", 1039 | "\n", 1040 | "fig.suptitle('Obesity_levels vs Weight')\n", 1041 | "plt.tight_layout()\n", 1042 | "plt.show()" 1043 | ] 1044 | }, 1045 | { 1046 | "cell_type": "code", 1047 | "execution_count": null, 1048 | "metadata": {}, 1049 | "outputs": [], 1050 | "source": [ 1051 | "data_list = [df_ot_final, df_ow_final, df_n, df_In]\n", 1052 | "data_name =[\"obesity_type\", \"over_weight_type\", \"normal\", \"Insufficient_Weight\" ]\n", 1053 | "\n", 1054 | "fig,axes = plt.subplots(nrows = 2, ncols = 2, figsize = (10,8))\n", 1055 | "\n", 1056 | "for i in range(2):\n", 1057 | "\n", 1058 | " sns.kdeplot(ax = axes[i,0],data=data_list[i], x=\"FCVC\", hue=\"NObeyesdad\", fill =True)\n", 1059 | " axes[i, 0].set_title(f'{data_name[i]} vs FCVC')\n", 1060 | "\n", 1061 | " sns.kdeplot(ax = axes[i,1],data=data_list[i+2], x=\"FCVC\", hue=\"NObeyesdad\", fill =True)\n", 1062 | " axes[i, 1].set_title(f'{data_name[i+2]} vs FCVC')\n", 1063 | "\n", 1064 | "\n", 1065 | "\n", 1066 | "fig.suptitle('Obesity_levels vs FCVC')\n", 1067 | "plt.tight_layout()\n", 1068 | "plt.show()" 1069 | ] 1070 | }, 1071 | { 1072 | "cell_type": "code", 1073 | "execution_count": null, 1074 | "metadata": {}, 1075 | "outputs": [], 1076 | "source": [ 1077 | "data_list = [df_ot_final, df_ow_final, df_n, df_In]\n", 1078 | "data_name =[\"obesity_type\", \"over_weight_type\", \"normal\", \"Insufficient_Weight\" ]\n", 1079 | "\n", 1080 | "fig,axes = plt.subplots(nrows = 2, ncols = 2, figsize = (10,8))\n", 1081 | "\n", 1082 | "for i in range(2):\n", 1083 | "\n", 1084 | " sns.kdeplot(ax = axes[i,0],data=data_list[i], x=\"NCP\", hue=\"NObeyesdad\", fill =True)\n", 1085 | " axes[i, 0].set_title(f'{data_name[i]} vs NCP')\n", 1086 | "\n", 1087 | " sns.kdeplot(ax = axes[i,1],data=data_list[i+2], x=\"NCP\", hue=\"NObeyesdad\", fill =True)\n", 1088 | " axes[i, 1].set_title(f'{data_name[i+2]} vs NCP')\n", 1089 | "\n", 1090 | "\n", 1091 | "\n", 1092 | "fig.suptitle('Obesity_levels vs NCP')\n", 1093 | "plt.tight_layout()\n", 1094 | "plt.show()" 1095 | ] 1096 | }, 1097 | { 1098 | "cell_type": "markdown", 1099 | "metadata": {}, 1100 | "source": [ 1101 | "### Data Pre-Process" 1102 | ] 1103 | }, 1104 | { 1105 | "cell_type": "code", 1106 | "execution_count": null, 1107 | "metadata": {}, 1108 | "outputs": [], 1109 | "source": [ 1110 | "df1 = Obesity_Data.copy()" 1111 | ] 1112 | }, 1113 | { 1114 | "cell_type": "code", 1115 | "execution_count": null, 1116 | "metadata": {}, 1117 | "outputs": [], 1118 | "source": [ 1119 | "le = LabelEncoder()\n", 1120 | "le.fit(df1['NObeyesdad'])\n", 1121 | "df1['NObeyesdad'] = le.transform(df1['NObeyesdad'])\n", 1122 | "df1.loc[df1['family_history_with_overweight'] == 'no', 'family_history_with_overweight'] = 0\n", 1123 | "df1.loc[df1['family_history_with_overweight'] == 'yes', 'family_history_with_overweight'] = 1\n", 1124 | "df1.loc[df1['FAVC'] == 'no', 'FAVC'] = 0\n", 1125 | "df1.loc[df1['FAVC'] == 'yes', 'FAVC'] = 1\n", 1126 | "df1.loc[df1['CAEC'] == 'no', 'CAEC'] = 0\n", 1127 | "df1.loc[df1['CAEC'] == 'Sometimes', 'CAEC'] = 1\n", 1128 | "df1.loc[df1['CAEC'] == 'Frequently', 'CAEC'] = 2\n", 1129 | "df1.loc[df1['CAEC'] == 'Always', 'CAEC'] = 3\n", 1130 | "df1.loc[df1['SMOKE'] == 'no', 'SMOKE'] = 0\n", 1131 | "df1.loc[df1['SMOKE'] == 'yes', 'SMOKE'] = 1\n", 1132 | "df1.loc[df1['SCC'] == 'no', 'SCC'] = 0\n", 1133 | "df1.loc[df1['SCC'] == 'yes', 'SCC'] = 1\n", 1134 | "df1.loc[df1['CALC'] == 'no', 'CALC'] = 0\n", 1135 | "df1.loc[df1['CALC'] == 'Sometimes', 'CALC'] = 1\n", 1136 | "df1.loc[df1['CALC'] == 'Frequently', 'CALC'] = 2\n", 1137 | "df1.loc[df1['CALC'] == 'Always', 'CALC'] = 3\n", 1138 | "\n", 1139 | "df1 = pd.get_dummies(df1)\n", 1140 | "df1 = df1.astype('float64')" 1141 | ] 1142 | }, 1143 | { 1144 | "cell_type": "code", 1145 | "execution_count": null, 1146 | "metadata": {}, 1147 | "outputs": [], 1148 | "source": [ 1149 | "plt.figure(figsize=(20,12))\n", 1150 | "sns.heatmap(df1.corr(), annot = True, cmap = \"coolwarm\")\n", 1151 | "plt.title('The correlation among features',y= 1.05)\n", 1152 | "plt.show()" 1153 | ] 1154 | }, 1155 | { 1156 | "cell_type": "code", 1157 | "execution_count": null, 1158 | "metadata": {}, 1159 | "outputs": [], 1160 | "source": [ 1161 | "X = df1.drop(columns=['NObeyesdad'])\n", 1162 | "y = df1['NObeyesdad']\n", 1163 | "\n", 1164 | "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)" 1165 | ] 1166 | }, 1167 | { 1168 | "cell_type": "code", 1169 | "execution_count": null, 1170 | "metadata": {}, 1171 | "outputs": [], 1172 | "source": [ 1173 | "scaler = StandardScaler()\n", 1174 | "X_train_scaled = scaler.fit_transform(X_train)\n", 1175 | "X_test_scaled = scaler.transform(X_test)\n" 1176 | ] 1177 | }, 1178 | { 1179 | "cell_type": "code", 1180 | "execution_count": null, 1181 | "metadata": {}, 1182 | "outputs": [], 1183 | "source": [ 1184 | "X_train_tensor = torch.tensor(X_train_scaled, dtype=torch.float32)\n", 1185 | "X_test_tensor = torch.tensor(X_test_scaled, dtype=torch.float32)\n", 1186 | "y_train_tensor = torch.tensor(y_train, dtype=torch.long)\n", 1187 | "y_test_tensor = torch.tensor(y_test.to_numpy(), dtype=torch.long)" 1188 | ] 1189 | }, 1190 | { 1191 | "cell_type": "markdown", 1192 | "metadata": {}, 1193 | "source": [ 1194 | "### Model Architecture" 1195 | ] 1196 | }, 1197 | { 1198 | "cell_type": "code", 1199 | "execution_count": null, 1200 | "metadata": {}, 1201 | "outputs": [], 1202 | "source": [ 1203 | "class Model(nn.Module):\n", 1204 | " def __init__(self, input_size, hidden_size, num_classes):\n", 1205 | " super(Model, self).__init__()\n", 1206 | " self.fc1 = nn.Linear(input_size, hidden_size)\n", 1207 | " self.relu = nn.ReLU()\n", 1208 | " self.dropout1 = nn.Dropout(0.2)\n", 1209 | " self.fc2 = nn.Linear(hidden_size, hidden_size)\n", 1210 | " self.dropout2 = nn.Dropout(0.2)\n", 1211 | " self.fc3 = nn.Linear(hidden_size, num_classes)\n", 1212 | "\n", 1213 | " def forward(self, x):\n", 1214 | " out = self.fc1(x)\n", 1215 | " out = self.relu(out)\n", 1216 | " out = self.dropout1(out)\n", 1217 | " out = self.fc2(out)\n", 1218 | " out = self.relu(out)\n", 1219 | " out = self.dropout2(out)\n", 1220 | " out = self.fc3(out)\n", 1221 | " return out" 1222 | ] 1223 | }, 1224 | { 1225 | "cell_type": "code", 1226 | "execution_count": null, 1227 | "metadata": {}, 1228 | "outputs": [], 1229 | "source": [ 1230 | "input_size = X_train_tensor.shape[1]\n", 1231 | "hidden_size = 128\n", 1232 | "num_classes = 7\n", 1233 | "learning_rate = 0.001\n", 1234 | "num_epochs = 100\n", 1235 | "batch_size = 256" 1236 | ] 1237 | }, 1238 | { 1239 | "cell_type": "code", 1240 | "execution_count": null, 1241 | "metadata": {}, 1242 | "outputs": [], 1243 | "source": [ 1244 | "model = Model(input_size, hidden_size, num_classes)\n", 1245 | "criterion = nn.CrossEntropyLoss()\n", 1246 | "optimizer = optim.Adam(model.parameters(), lr=learning_rate)" 1247 | ] 1248 | }, 1249 | { 1250 | "cell_type": "code", 1251 | "execution_count": null, 1252 | "metadata": {}, 1253 | "outputs": [], 1254 | "source": [ 1255 | "train_dataset = TensorDataset(X_train_tensor, y_train_tensor)\n", 1256 | "train_loader = DataLoader(train_dataset, batch_size=batch_size, shuffle=True)" 1257 | ] 1258 | }, 1259 | { 1260 | "cell_type": "markdown", 1261 | "metadata": {}, 1262 | "source": [ 1263 | "### Training" 1264 | ] 1265 | }, 1266 | { 1267 | "cell_type": "code", 1268 | "execution_count": null, 1269 | "metadata": {}, 1270 | "outputs": [], 1271 | "source": [ 1272 | "for epoch in range(num_epochs):\n", 1273 | " running_loss = 0.0\n", 1274 | " for i, (inputs, labels) in enumerate(train_loader, 1):\n", 1275 | " optimizer.zero_grad()\n", 1276 | " outputs = model(inputs)\n", 1277 | " loss = criterion(outputs, labels)\n", 1278 | " loss.backward()\n", 1279 | " optimizer.step()\n", 1280 | "\n", 1281 | " running_loss += loss.item()\n", 1282 | "\n", 1283 | " epoch_loss = running_loss / len(train_loader)\n", 1284 | " model.eval()\n", 1285 | " with torch.no_grad():\n", 1286 | " outputs = model(X_test_tensor)\n", 1287 | " test_loss = criterion(outputs, y_test_tensor)\n", 1288 | " print(f'Epoch [{epoch + 1}/{num_epochs}] | Train Loss: {epoch_loss:.4f} | Test Loss: {test_loss.item():.4f}')" 1289 | ] 1290 | }, 1291 | { 1292 | "cell_type": "markdown", 1293 | "metadata": {}, 1294 | "source": [ 1295 | "### Evaluation" 1296 | ] 1297 | }, 1298 | { 1299 | "cell_type": "code", 1300 | "execution_count": null, 1301 | "metadata": {}, 1302 | "outputs": [], 1303 | "source": [ 1304 | "with torch.no_grad():\n", 1305 | " outputs = model(X_test_tensor)\n", 1306 | " _, predicted = torch.max(outputs, 1)\n", 1307 | " test_accuracy = accuracy_score(y_test_tensor.numpy(), predicted.numpy())\n", 1308 | "\n", 1309 | "print(f'Test accuracy: {test_accuracy}')" 1310 | ] 1311 | } 1312 | ], 1313 | "metadata": { 1314 | "language_info": { 1315 | "name": "python" 1316 | } 1317 | }, 1318 | "nbformat": 4, 1319 | "nbformat_minor": 2 1320 | } 1321 | -------------------------------------------------------------------------------- /Assignments/Assignment 5/iAAA - Assignment 5 - En.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iAAA-event/Machine-learning/6a6525852ad04ed6e467763e5b8d9acc4fecf755/Assignments/Assignment 5/iAAA - Assignment 5 - En.pdf -------------------------------------------------------------------------------- /Assignments/Assignment 5/iAAA - Assignment 5 - Per.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iAAA-event/Machine-learning/6a6525852ad04ed6e467763e5b8d9acc4fecf755/Assignments/Assignment 5/iAAA - Assignment 5 - Per.pdf -------------------------------------------------------------------------------- /Assignments/Assignment 6/iAAA - Assignment 6 - En.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iAAA-event/Machine-learning/6a6525852ad04ed6e467763e5b8d9acc4fecf755/Assignments/Assignment 6/iAAA - Assignment 6 - En.pdf -------------------------------------------------------------------------------- /Assignments/Assignment 6/iAAA - Assignment 6 - Per.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iAAA-event/Machine-learning/6a6525852ad04ed6e467763e5b8d9acc4fecf755/Assignments/Assignment 6/iAAA - Assignment 6 - Per.pdf -------------------------------------------------------------------------------- /Assignments/Assignment 7/iAAA - Assignment 7 - En.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iAAA-event/Machine-learning/6a6525852ad04ed6e467763e5b8d9acc4fecf755/Assignments/Assignment 7/iAAA - Assignment 7 - En.pdf -------------------------------------------------------------------------------- /Assignments/Assignment 7/iAAA - Assignment 7 - Per.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iAAA-event/Machine-learning/6a6525852ad04ed6e467763e5b8d9acc4fecf755/Assignments/Assignment 7/iAAA - Assignment 7 - Per.pdf -------------------------------------------------------------------------------- /Assignments/Template.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Assignment 1\n", 8 | "\n", 9 | "Student Name: " 10 | ] 11 | }, 12 | { 13 | "cell_type": "markdown", 14 | "metadata": {}, 15 | "source": [ 16 | "## Insurance Dataset" 17 | ] 18 | }, 19 | { 20 | "cell_type": "markdown", 21 | "metadata": {}, 22 | "source": [ 23 | "### Data Preprocess" 24 | ] 25 | }, 26 | { 27 | "cell_type": "markdown", 28 | "metadata": {}, 29 | "source": [ 30 | "## Diabetes Dataset" 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": null, 36 | "metadata": {}, 37 | "outputs": [], 38 | "source": [] 39 | } 40 | ], 41 | "metadata": { 42 | "language_info": { 43 | "name": "python" 44 | } 45 | }, 46 | "nbformat": 4, 47 | "nbformat_minor": 2 48 | } 49 | -------------------------------------------------------------------------------- /CNN/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iAAA-event/Machine-learning/6a6525852ad04ed6e467763e5b8d9acc4fecf755/CNN/.DS_Store -------------------------------------------------------------------------------- /Image Processing/image1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iAAA-event/Machine-learning/6a6525852ad04ed6e467763e5b8d9acc4fecf755/Image Processing/image1.jpg -------------------------------------------------------------------------------- /Image Processing/image2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iAAA-event/Machine-learning/6a6525852ad04ed6e467763e5b8d9acc4fecf755/Image Processing/image2.jpg -------------------------------------------------------------------------------- /Neural Network/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iAAA-event/Machine-learning/6a6525852ad04ed6e467763e5b8d9acc4fecf755/Neural Network/.DS_Store -------------------------------------------------------------------------------- /Neural Network/NN.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Neural Networks Notebook\n", 8 | "\n", 9 | "__Design & Develop:__ Mobin Nesari" 10 | ] 11 | }, 12 | { 13 | "cell_type": "markdown", 14 | "metadata": {}, 15 | "source": [ 16 | "---" 17 | ] 18 | }, 19 | { 20 | "cell_type": "markdown", 21 | "metadata": {}, 22 | "source": [ 23 | "## Importing Required Libraries" 24 | ] 25 | }, 26 | { 27 | "cell_type": "code", 28 | "execution_count": 1, 29 | "metadata": {}, 30 | "outputs": [], 31 | "source": [ 32 | "# Importing necessary libraries\n", 33 | "import torch\n", 34 | "import torch.nn as nn\n", 35 | "import torch.optim as optim\n", 36 | "from sklearn.datasets import load_iris\n", 37 | "from sklearn.model_selection import train_test_split\n", 38 | "from sklearn.preprocessing import StandardScaler\n", 39 | "from sklearn.metrics import accuracy_score\n" 40 | ] 41 | }, 42 | { 43 | "cell_type": "markdown", 44 | "metadata": {}, 45 | "source": [ 46 | "## Load Iris Dataset" 47 | ] 48 | }, 49 | { 50 | "cell_type": "code", 51 | "execution_count": 3, 52 | "metadata": {}, 53 | "outputs": [], 54 | "source": [ 55 | "# Load Iris dataset\n", 56 | "iris = load_iris()\n", 57 | "X = iris.data\n", 58 | "y = iris.target" 59 | ] 60 | }, 61 | { 62 | "cell_type": "markdown", 63 | "metadata": {}, 64 | "source": [ 65 | "## Dataset Splitting" 66 | ] 67 | }, 68 | { 69 | "cell_type": "code", 70 | "execution_count": 4, 71 | "metadata": {}, 72 | "outputs": [], 73 | "source": [ 74 | "# Split dataset into training and testing sets\n", 75 | "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)" 76 | ] 77 | }, 78 | { 79 | "cell_type": "markdown", 80 | "metadata": {}, 81 | "source": [ 82 | "## Scaling" 83 | ] 84 | }, 85 | { 86 | "cell_type": "code", 87 | "execution_count": 5, 88 | "metadata": {}, 89 | "outputs": [], 90 | "source": [ 91 | "# Standardize features\n", 92 | "scaler = StandardScaler()\n", 93 | "X_train = scaler.fit_transform(X_train)\n", 94 | "X_test = scaler.transform(X_test)" 95 | ] 96 | }, 97 | { 98 | "cell_type": "markdown", 99 | "metadata": {}, 100 | "source": [ 101 | "## Converting Numpy arrays to PyTorch Tensors" 102 | ] 103 | }, 104 | { 105 | "cell_type": "code", 106 | "execution_count": 6, 107 | "metadata": {}, 108 | "outputs": [], 109 | "source": [ 110 | "# Convert numpy arrays to PyTorch tensors\n", 111 | "X_train = torch.tensor(X_train, dtype=torch.float32)\n", 112 | "X_test = torch.tensor(X_test, dtype=torch.float32)\n", 113 | "y_train = torch.tensor(y_train, dtype=torch.int64)\n", 114 | "y_test = torch.tensor(y_test, dtype=torch.int64)\n" 115 | ] 116 | }, 117 | { 118 | "cell_type": "markdown", 119 | "metadata": {}, 120 | "source": [ 121 | "## Model Definition" 122 | ] 123 | }, 124 | { 125 | "cell_type": "code", 126 | "execution_count": 7, 127 | "metadata": {}, 128 | "outputs": [], 129 | "source": [ 130 | "# Define MLP model\n", 131 | "class MLP(nn.Module):\n", 132 | " def __init__(self, input_dim, hidden_dim, output_dim):\n", 133 | " super(MLP, self).__init__()\n", 134 | " self.fc1 = nn.Linear(input_dim, hidden_dim)\n", 135 | " self.fc2 = nn.Linear(hidden_dim, output_dim)\n", 136 | "\n", 137 | " def forward(self, x):\n", 138 | " x = torch.relu(self.fc1(x))\n", 139 | " x = self.fc2(x)\n", 140 | " return x" 141 | ] 142 | }, 143 | { 144 | "cell_type": "markdown", 145 | "metadata": {}, 146 | "source": [ 147 | "## Setting Parameters" 148 | ] 149 | }, 150 | { 151 | "cell_type": "code", 152 | "execution_count": 8, 153 | "metadata": {}, 154 | "outputs": [], 155 | "source": [ 156 | "# Initialize model, loss function, and optimizer\n", 157 | "input_dim = X_train.shape[1]\n", 158 | "hidden_dim = 8\n", 159 | "output_dim = 3\n", 160 | "model = MLP(input_dim, hidden_dim, output_dim)\n", 161 | "criterion = nn.CrossEntropyLoss()\n", 162 | "optimizer = optim.Adam(model.parameters(), lr=0.01)" 163 | ] 164 | }, 165 | { 166 | "cell_type": "markdown", 167 | "metadata": {}, 168 | "source": [ 169 | "## Training Phase" 170 | ] 171 | }, 172 | { 173 | "cell_type": "code", 174 | "execution_count": 9, 175 | "metadata": {}, 176 | "outputs": [ 177 | { 178 | "name": "stdout", 179 | "output_type": "stream", 180 | "text": [ 181 | "Epoch [10/100], Loss: 0.9215\n", 182 | "Epoch [20/100], Loss: 0.6476\n", 183 | "Epoch [30/100], Loss: 0.4692\n", 184 | "Epoch [40/100], Loss: 0.3811\n", 185 | "Epoch [50/100], Loss: 0.3331\n", 186 | "Epoch [60/100], Loss: 0.2958\n", 187 | "Epoch [70/100], Loss: 0.2592\n", 188 | "Epoch [80/100], Loss: 0.2167\n", 189 | "Epoch [90/100], Loss: 0.1668\n", 190 | "Epoch [100/100], Loss: 0.1248\n" 191 | ] 192 | } 193 | ], 194 | "source": [ 195 | "# Training loop\n", 196 | "epochs = 100\n", 197 | "for epoch in range(epochs):\n", 198 | " optimizer.zero_grad()\n", 199 | " outputs = model(X_train)\n", 200 | " loss = criterion(outputs, y_train)\n", 201 | " loss.backward()\n", 202 | " optimizer.step()\n", 203 | " \n", 204 | " if (epoch+1) % 10 == 0:\n", 205 | " print(f'Epoch [{epoch+1}/{epochs}], Loss: {loss.item():.4f}')\n" 206 | ] 207 | }, 208 | { 209 | "cell_type": "markdown", 210 | "metadata": {}, 211 | "source": [ 212 | "## Testing Phase" 213 | ] 214 | }, 215 | { 216 | "cell_type": "code", 217 | "execution_count": 10, 218 | "metadata": {}, 219 | "outputs": [ 220 | { 221 | "name": "stdout", 222 | "output_type": "stream", 223 | "text": [ 224 | "Accuracy on test set: 1.0000\n" 225 | ] 226 | } 227 | ], 228 | "source": [ 229 | "# Evaluation\n", 230 | "with torch.no_grad():\n", 231 | " model.eval()\n", 232 | " outputs = model(X_test)\n", 233 | " _, predicted = torch.max(outputs, 1)\n", 234 | " accuracy = accuracy_score(y_test.numpy(), predicted.numpy())\n", 235 | " print(f'Accuracy on test set: {accuracy:.4f}')" 236 | ] 237 | }, 238 | { 239 | "cell_type": "code", 240 | "execution_count": null, 241 | "metadata": {}, 242 | "outputs": [], 243 | "source": [] 244 | } 245 | ], 246 | "metadata": { 247 | "kernelspec": { 248 | "display_name": "Python 3", 249 | "language": "python", 250 | "name": "python3" 251 | }, 252 | "language_info": { 253 | "codemirror_mode": { 254 | "name": "ipython", 255 | "version": 3 256 | }, 257 | "file_extension": ".py", 258 | "mimetype": "text/x-python", 259 | "name": "python", 260 | "nbconvert_exporter": "python", 261 | "pygments_lexer": "ipython3", 262 | "version": "3.10.7" 263 | } 264 | }, 265 | "nbformat": 4, 266 | "nbformat_minor": 2 267 | } 268 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | جایزه سالانه هوش مصنوعی ایران، یک مسابقه از سری مسابقات مسئله-‌محور ستاد توسعه علوم اعصاب شناختی و بنیاد ملی نخبگان (در قالب طرح شهید بابایی)،آزمایشگاه ملی نقشه برداری مغز با همکاری چندین و چند نهاد دولتی دیگر است که باهدف ایجاد رقابتی پرنشاط و سالم، آموزش علاقه‌مندان به این فناوری و پرورش آنان در کشور شکل‌گرفته است. این مسابقه در نظر دارد طی یک فرایند برنامه‌ریزی شده، علاوه بر ارائه آموزش‌های لازم به علاقه‌مندان، با طرح موضوعات چالش برانگیز و واقعی حوزه سلامت، کاربرد هوش مصنوعی را به ایشان آموزش ‌دهد. بدین‌وسیله، علاوه بر پیشبرد اهداف ترویجی خود، به حل مسائل کشور نیز کمک می‌کند. در پایان مسابقه، فارغ از جوایز نقدی و غیرنقدی، تیم‌ها می‌توانند یکی از کاربردهای جذاب هوش مصنوعی در حوزه‌‏های مذکور را تجربه کنند و از ظرفیت‌های جانبی فراهم شده نیز استفاده نمایند. از اهداف برگزاری این مسابقه می‌توان به توسعه مهارت‌ در حوزه علم داده، یادگیری ماشین و تحلیل داده، کمک به ورود ابزارهای هوش مصنوعی به حوزه سلامت، شتاب‌دهی به تکنولوژی‌های نوظهور در سلامت، پشتیبانی از دستاوردهای شرکت‌کنندگان تا دستیابی به محصول، هدایت و آموزش تیم‌های شرکت‌کننده به منظور رفع مسائل تجاری، آموزش و توانمندسازی نیروهای مستعد برتر، پرورش علاقه‌مندان برای ورود به بازار کار، ایجاد تیم‌های چند دانشی و افزایش روحیه کارآفرینی اشاره کرد. -------------------------------------------------------------------------------- /RNN/RNN.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iAAA-event/Machine-learning/6a6525852ad04ed6e467763e5b8d9acc4fecf755/RNN/RNN.pdf -------------------------------------------------------------------------------- /Torch Enhancement/data_tutorial.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": { 7 | "collapsed": false 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "# For tips on running notebooks in Google Colab, see\n", 12 | "# https://pytorch.org/tutorials/beginner/colab\n", 13 | "%matplotlib inline" 14 | ] 15 | }, 16 | { 17 | "cell_type": "markdown", 18 | "metadata": {}, 19 | "source": [ 20 | "Datasets & DataLoaders\n", 21 | "======================\n" 22 | ] 23 | }, 24 | { 25 | "cell_type": "markdown", 26 | "metadata": {}, 27 | "source": [ 28 | "Code for processing data samples can get messy and hard to maintain; we\n", 29 | "ideally want our dataset code to be decoupled from our model training\n", 30 | "code for better readability and modularity. PyTorch provides two data\n", 31 | "primitives: `torch.utils.data.DataLoader` and `torch.utils.data.Dataset`\n", 32 | "that allow you to use pre-loaded datasets as well as your own data.\n", 33 | "`Dataset` stores the samples and their corresponding labels, and\n", 34 | "`DataLoader` wraps an iterable around the `Dataset` to enable easy\n", 35 | "access to the samples.\n", 36 | "\n", 37 | "PyTorch domain libraries provide a number of pre-loaded datasets (such\n", 38 | "as FashionMNIST) that subclass `torch.utils.data.Dataset` and implement\n", 39 | "functions specific to the particular data. They can be used to prototype\n", 40 | "and benchmark your model. You can find them here: [Image\n", 41 | "Datasets](https://pytorch.org/vision/stable/datasets.html), [Text\n", 42 | "Datasets](https://pytorch.org/text/stable/datasets.html), and [Audio\n", 43 | "Datasets](https://pytorch.org/audio/stable/datasets.html)\n" 44 | ] 45 | }, 46 | { 47 | "cell_type": "markdown", 48 | "metadata": {}, 49 | "source": [ 50 | "Loading a Dataset\n", 51 | "=================\n", 52 | "\n", 53 | "Here is an example of how to load the\n", 54 | "[Fashion-MNIST](https://research.zalando.com/project/fashion_mnist/fashion_mnist/)\n", 55 | "dataset from TorchVision. Fashion-MNIST is a dataset of Zalando's\n", 56 | "article images consisting of 60,000 training examples and 10,000 test\n", 57 | "examples. Each example comprises a 28×28 grayscale image and an\n", 58 | "associated label from one of 10 classes.\n", 59 | "\n", 60 | "We load the [FashionMNIST Dataset](https://pytorch.org/vision/stable/datasets.html#fashion-mnist) with the following parameters:\n", 61 | "\n", 62 | ": - `root` is the path where the train/test data is stored,\n", 63 | " - `train` specifies training or test dataset,\n", 64 | " - `download=True` downloads the data from the internet if it\\'s\n", 65 | " not available at `root`.\n", 66 | " - `transform` and `target_transform` specify the feature and label\n", 67 | " transformations\n" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": null, 73 | "metadata": { 74 | "collapsed": false 75 | }, 76 | "outputs": [], 77 | "source": [ 78 | "import torch\n", 79 | "from torch.utils.data import Dataset\n", 80 | "from torchvision import datasets\n", 81 | "from torchvision.transforms import ToTensor\n", 82 | "import matplotlib.pyplot as plt\n", 83 | "\n", 84 | "\n", 85 | "training_data = datasets.FashionMNIST(\n", 86 | " root=\"data\",\n", 87 | " train=True,\n", 88 | " download=True,\n", 89 | " transform=ToTensor()\n", 90 | ")\n", 91 | "\n", 92 | "test_data = datasets.FashionMNIST(\n", 93 | " root=\"data\",\n", 94 | " train=False,\n", 95 | " download=True,\n", 96 | " transform=ToTensor()\n", 97 | ")" 98 | ] 99 | }, 100 | { 101 | "cell_type": "markdown", 102 | "metadata": {}, 103 | "source": [ 104 | "Iterating and Visualizing the Dataset\n", 105 | "=====================================\n", 106 | "\n", 107 | "We can index `Datasets` manually like a list: `training_data[index]`. We\n", 108 | "use `matplotlib` to visualize some samples in our training data.\n" 109 | ] 110 | }, 111 | { 112 | "cell_type": "code", 113 | "execution_count": null, 114 | "metadata": { 115 | "collapsed": false 116 | }, 117 | "outputs": [], 118 | "source": [ 119 | "labels_map = {\n", 120 | " 0: \"T-Shirt\",\n", 121 | " 1: \"Trouser\",\n", 122 | " 2: \"Pullover\",\n", 123 | " 3: \"Dress\",\n", 124 | " 4: \"Coat\",\n", 125 | " 5: \"Sandal\",\n", 126 | " 6: \"Shirt\",\n", 127 | " 7: \"Sneaker\",\n", 128 | " 8: \"Bag\",\n", 129 | " 9: \"Ankle Boot\",\n", 130 | "}\n", 131 | "figure = plt.figure(figsize=(8, 8))\n", 132 | "cols, rows = 3, 3\n", 133 | "for i in range(1, cols * rows + 1):\n", 134 | " sample_idx = torch.randint(len(training_data), size=(1,)).item()\n", 135 | " img, label = training_data[sample_idx]\n", 136 | " figure.add_subplot(rows, cols, i)\n", 137 | " plt.title(labels_map[label])\n", 138 | " plt.axis(\"off\")\n", 139 | " plt.imshow(img.squeeze(), cmap=\"gray\")\n", 140 | "plt.show()" 141 | ] 142 | }, 143 | { 144 | "cell_type": "markdown", 145 | "metadata": {}, 146 | "source": [ 147 | "------------------------------------------------------------------------\n" 148 | ] 149 | }, 150 | { 151 | "cell_type": "markdown", 152 | "metadata": {}, 153 | "source": [ 154 | "Creating a Custom Dataset for your files\n", 155 | "========================================\n", 156 | "\n", 157 | "A custom Dataset class must implement three functions:\n", 158 | "[\\_\\_init\\_\\_]{.title-ref}, [\\_\\_len\\_\\_]{.title-ref}, and\n", 159 | "[\\_\\_getitem\\_\\_]{.title-ref}. Take a look at this implementation; the\n", 160 | "FashionMNIST images are stored in a directory `img_dir`, and their\n", 161 | "labels are stored separately in a CSV file `annotations_file`.\n", 162 | "\n", 163 | "In the next sections, we\\'ll break down what\\'s happening in each of\n", 164 | "these functions.\n" 165 | ] 166 | }, 167 | { 168 | "cell_type": "code", 169 | "execution_count": null, 170 | "metadata": { 171 | "collapsed": false 172 | }, 173 | "outputs": [], 174 | "source": [ 175 | "import os\n", 176 | "import pandas as pd\n", 177 | "from torchvision.io import read_image\n", 178 | "\n", 179 | "class CustomImageDataset(Dataset):\n", 180 | " def __init__(self, annotations_file, img_dir, transform=None, target_transform=None):\n", 181 | " self.img_labels = pd.read_csv(annotations_file)\n", 182 | " self.img_dir = img_dir\n", 183 | " self.transform = transform\n", 184 | " self.target_transform = target_transform\n", 185 | "\n", 186 | " def __len__(self):\n", 187 | " return len(self.img_labels)\n", 188 | "\n", 189 | " def __getitem__(self, idx):\n", 190 | " img_path = os.path.join(self.img_dir, self.img_labels.iloc[idx, 0])\n", 191 | " image = read_image(img_path)\n", 192 | " label = self.img_labels.iloc[idx, 1]\n", 193 | " if self.transform:\n", 194 | " image = self.transform(image)\n", 195 | " if self.target_transform:\n", 196 | " label = self.target_transform(label)\n", 197 | " return image, label" 198 | ] 199 | }, 200 | { 201 | "cell_type": "markdown", 202 | "metadata": {}, 203 | "source": [ 204 | "`__init__`\n", 205 | "==========\n", 206 | "\n", 207 | "The \\_\\_[init]() function is run once when instantiating the Dataset\n", 208 | "object. We initialize the directory containing the images, the\n", 209 | "annotations file, and both transforms (covered in more detail in the\n", 210 | "next section).\n", 211 | "\n", 212 | "The labels.csv file looks like: :\n", 213 | "\n", 214 | " tshirt1.jpg, 0\n", 215 | " tshirt2.jpg, 0\n", 216 | " ......\n", 217 | " ankleboot999.jpg, 9\n" 218 | ] 219 | }, 220 | { 221 | "cell_type": "code", 222 | "execution_count": null, 223 | "metadata": { 224 | "collapsed": false 225 | }, 226 | "outputs": [], 227 | "source": [ 228 | "def __init__(self, annotations_file, img_dir, transform=None, target_transform=None):\n", 229 | " self.img_labels = pd.read_csv(annotations_file)\n", 230 | " self.img_dir = img_dir\n", 231 | " self.transform = transform\n", 232 | " self.target_transform = target_transform" 233 | ] 234 | }, 235 | { 236 | "cell_type": "markdown", 237 | "metadata": {}, 238 | "source": [ 239 | "`__len__`\n", 240 | "=========\n", 241 | "\n", 242 | "The \\_\\_[len]() function returns the number of samples in our dataset.\n", 243 | "\n", 244 | "Example:\n" 245 | ] 246 | }, 247 | { 248 | "cell_type": "code", 249 | "execution_count": null, 250 | "metadata": { 251 | "collapsed": false 252 | }, 253 | "outputs": [], 254 | "source": [ 255 | "def __len__(self):\n", 256 | " return len(self.img_labels)" 257 | ] 258 | }, 259 | { 260 | "cell_type": "markdown", 261 | "metadata": {}, 262 | "source": [ 263 | "`__getitem__`\n", 264 | "=============\n", 265 | "\n", 266 | "The \\_\\_[getitem]() function loads and returns a sample from the dataset\n", 267 | "at the given index `idx`. Based on the index, it identifies the image\\'s\n", 268 | "location on disk, converts that to a tensor using `read_image`,\n", 269 | "retrieves the corresponding label from the csv data in\n", 270 | "`self.img_labels`, calls the transform functions on them (if\n", 271 | "applicable), and returns the tensor image and corresponding label in a\n", 272 | "tuple.\n" 273 | ] 274 | }, 275 | { 276 | "cell_type": "code", 277 | "execution_count": null, 278 | "metadata": { 279 | "collapsed": false 280 | }, 281 | "outputs": [], 282 | "source": [ 283 | "def __getitem__(self, idx):\n", 284 | " img_path = os.path.join(self.img_dir, self.img_labels.iloc[idx, 0])\n", 285 | " image = read_image(img_path)\n", 286 | " label = self.img_labels.iloc[idx, 1]\n", 287 | " if self.transform:\n", 288 | " image = self.transform(image)\n", 289 | " if self.target_transform:\n", 290 | " label = self.target_transform(label)\n", 291 | " return image, label" 292 | ] 293 | }, 294 | { 295 | "cell_type": "markdown", 296 | "metadata": {}, 297 | "source": [ 298 | "------------------------------------------------------------------------\n" 299 | ] 300 | }, 301 | { 302 | "cell_type": "markdown", 303 | "metadata": {}, 304 | "source": [ 305 | "Preparing your data for training with DataLoaders\n", 306 | "=================================================\n", 307 | "\n", 308 | "The `Dataset` retrieves our dataset\\'s features and labels one sample at\n", 309 | "a time. While training a model, we typically want to pass samples in\n", 310 | "\\\"minibatches\\\", reshuffle the data at every epoch to reduce model\n", 311 | "overfitting, and use Python\\'s `multiprocessing` to speed up data\n", 312 | "retrieval.\n", 313 | "\n", 314 | "`DataLoader` is an iterable that abstracts this complexity for us in an\n", 315 | "easy API.\n" 316 | ] 317 | }, 318 | { 319 | "cell_type": "code", 320 | "execution_count": null, 321 | "metadata": { 322 | "collapsed": false 323 | }, 324 | "outputs": [], 325 | "source": [ 326 | "from torch.utils.data import DataLoader\n", 327 | "\n", 328 | "train_dataloader = DataLoader(training_data, batch_size=64, shuffle=True)\n", 329 | "test_dataloader = DataLoader(test_data, batch_size=64, shuffle=True)" 330 | ] 331 | }, 332 | { 333 | "cell_type": "markdown", 334 | "metadata": {}, 335 | "source": [ 336 | "Iterate through the DataLoader\n", 337 | "==============================\n", 338 | "\n", 339 | "We have loaded that dataset into the `DataLoader` and can iterate\n", 340 | "through the dataset as needed. Each iteration below returns a batch of\n", 341 | "`train_features` and `train_labels` (containing `batch_size=64` features\n", 342 | "and labels respectively). Because we specified `shuffle=True`, after we\n", 343 | "iterate over all batches the data is shuffled (for finer-grained control\n", 344 | "over the data loading order, take a look at\n", 345 | "[Samplers](https://pytorch.org/docs/stable/data.html#data-loading-order-and-sampler)).\n" 346 | ] 347 | }, 348 | { 349 | "cell_type": "code", 350 | "execution_count": null, 351 | "metadata": { 352 | "collapsed": false 353 | }, 354 | "outputs": [], 355 | "source": [ 356 | "# Display image and label.\n", 357 | "train_features, train_labels = next(iter(train_dataloader))\n", 358 | "print(f\"Feature batch shape: {train_features.size()}\")\n", 359 | "print(f\"Labels batch shape: {train_labels.size()}\")\n", 360 | "img = train_features[0].squeeze()\n", 361 | "label = train_labels[0]\n", 362 | "plt.imshow(img, cmap=\"gray\")\n", 363 | "plt.show()\n", 364 | "print(f\"Label: {label}\")" 365 | ] 366 | }, 367 | { 368 | "cell_type": "markdown", 369 | "metadata": {}, 370 | "source": [ 371 | "------------------------------------------------------------------------\n" 372 | ] 373 | }, 374 | { 375 | "cell_type": "markdown", 376 | "metadata": {}, 377 | "source": [ 378 | "Further Reading\n", 379 | "===============\n", 380 | "\n", 381 | "- [torch.utils.data API](https://pytorch.org/docs/stable/data.html)\n" 382 | ] 383 | } 384 | ], 385 | "metadata": { 386 | "kernelspec": { 387 | "display_name": "Python 3", 388 | "language": "python", 389 | "name": "python3" 390 | }, 391 | "language_info": { 392 | "codemirror_mode": { 393 | "name": "ipython", 394 | "version": 3 395 | }, 396 | "file_extension": ".py", 397 | "mimetype": "text/x-python", 398 | "name": "python", 399 | "nbconvert_exporter": "python", 400 | "pygments_lexer": "ipython3", 401 | "version": "3.10.14" 402 | } 403 | }, 404 | "nbformat": 4, 405 | "nbformat_minor": 0 406 | } 407 | --------------------------------------------------------------------------------