├── ANN_for_Beginners ├── .ipynb_checkpoints │ └── ANN for Beginners (Predict House Price)-checkpoint.ipynb ├── ANN for Beginners (Predict House Price).ipynb ├── Artificial Neural Network for Beginners.pptx └── housepricedata.csv ├── Feature_Selection ├── .ipynb_checkpoints │ └── Feature_Selection_Techniques-checkpoint.ipynb └── Feature_Selection_Techniques.ipynb ├── Logistic_Regression ├── Basic_Logistic_Model_Week_4_Day_2.ipynb ├── logistic_regression.ipynb └── multiclass_logistic_regression.ipynb ├── Model_Optimisation ├── .gitignore ├── data │ ├── loan_prediction.csv │ ├── titanic_clean.csv │ ├── titanic_test.csv │ └── titanic_train.csv ├── images │ ├── rf1.png │ ├── rf2.gif │ ├── rf3.png │ ├── rf4.png │ ├── rf5.png │ ├── rf6.png │ ├── rf7.png │ └── rf8.png └── notebooks │ ├── config.json │ └── random_forest_slides.ipynb ├── Performance_Evaluation ├── .ipynb_checkpoints │ └── Performance_Evaluation-checkpoint.ipynb ├── Performance_Evaluation.ipynb ├── Standard Metropolitan Areas Data - train_data.csv ├── diabetes.txt └── winequality.csv ├── README.md ├── Random_Forest ├── Random_Forest_for_Beginners.ipynb └── model_optimization_with_random_forest.ipynb ├── Titanic_Dataset_Improvised_Model ├── .ipynb_checkpoints │ └── ML_with_titanic_data_improvised_Julian-checkpoint.ipynb └── ML_with_titanic_data_improvised_Julian.ipynb └── logistic_regression.ipynb /ANN_for_Beginners/Artificial Neural Network for Beginners.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aiplanethub/ML_Models/0c3ee0dae4de4091c0867f8398171f6b8d7ad6c9/ANN_for_Beginners/Artificial Neural Network for Beginners.pptx -------------------------------------------------------------------------------- /Logistic_Regression/Basic_Logistic_Model_Week_4_Day_2.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 2, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import pandas as pd\n", 10 | "import numpy as np\n", 11 | "from sklearn.model_selection import train_test_split\n", 12 | "from sklearn.linear_model import LogisticRegression\n", 13 | "from sklearn.preprocessing import LabelEncoder\n", 14 | "import warnings\n", 15 | "warnings.filterwarnings(\"ignore\")" 16 | ] 17 | }, 18 | { 19 | "cell_type": "code", 20 | "execution_count": 3, 21 | "metadata": {}, 22 | "outputs": [], 23 | "source": [ 24 | "liver_data = pd.read_csv('https://raw.githubusercontent.com/dphi-official/Datasets/master/liver_patient_data/indian_liver_patient_dataset.csv')" 25 | ] 26 | }, 27 | { 28 | "cell_type": "code", 29 | "execution_count": 4, 30 | "metadata": {}, 31 | "outputs": [ 32 | { 33 | "name": "stdout", 34 | "output_type": "stream", 35 | "text": [ 36 | "\n", 37 | "RangeIndex: 500 entries, 0 to 499\n", 38 | "Data columns (total 11 columns):\n", 39 | " # Column Non-Null Count Dtype \n", 40 | "--- ------ -------------- ----- \n", 41 | " 0 Age 500 non-null int64 \n", 42 | " 1 Gender 500 non-null object \n", 43 | " 2 Total_Bilirubin 500 non-null float64\n", 44 | " 3 Direct_Bilirubin 500 non-null float64\n", 45 | " 4 Alkaline_Phosphotase 500 non-null int64 \n", 46 | " 5 Alamine_Aminotransferase 500 non-null int64 \n", 47 | " 6 Aspartate_Aminotransferase 500 non-null int64 \n", 48 | " 7 Total_Protiens 500 non-null float64\n", 49 | " 8 Albumin 500 non-null float64\n", 50 | " 9 Albumin_and_Globulin_Ratio 496 non-null float64\n", 51 | " 10 Liver_Problem 500 non-null int64 \n", 52 | "dtypes: float64(5), int64(5), object(1)\n", 53 | "memory usage: 43.1+ KB\n" 54 | ] 55 | } 56 | ], 57 | "source": [ 58 | "liver_data.info()" 59 | ] 60 | }, 61 | { 62 | "cell_type": "code", 63 | "execution_count": 5, 64 | "metadata": { 65 | "scrolled": false 66 | }, 67 | "outputs": [ 68 | { 69 | "data": { 70 | "text/plain": [ 71 | "(500, 11)" 72 | ] 73 | }, 74 | "execution_count": 5, 75 | "metadata": {}, 76 | "output_type": "execute_result" 77 | } 78 | ], 79 | "source": [ 80 | "liver_data.shape" 81 | ] 82 | }, 83 | { 84 | "cell_type": "code", 85 | "execution_count": 6, 86 | "metadata": { 87 | "scrolled": true 88 | }, 89 | "outputs": [ 90 | { 91 | "data": { 92 | "text/html": [ 93 | "
\n", 94 | "\n", 107 | "\n", 108 | " \n", 109 | " \n", 110 | " \n", 111 | " \n", 112 | " \n", 113 | " \n", 114 | " \n", 115 | " \n", 116 | " \n", 117 | " \n", 118 | " \n", 119 | " \n", 120 | " \n", 121 | " \n", 122 | " \n", 123 | " \n", 124 | " \n", 125 | " \n", 126 | " \n", 127 | " \n", 128 | " \n", 129 | " \n", 130 | " \n", 131 | " \n", 132 | " \n", 133 | " \n", 134 | " \n", 135 | " \n", 136 | " \n", 137 | " \n", 138 | " \n", 139 | " \n", 140 | " \n", 141 | " \n", 142 | " \n", 143 | " \n", 144 | " \n", 145 | " \n", 146 | " \n", 147 | " \n", 148 | " \n", 149 | " \n", 150 | " \n", 151 | " \n", 152 | " \n", 153 | " \n", 154 | " \n", 155 | " \n", 156 | " \n", 157 | " \n", 158 | " \n", 159 | " \n", 160 | " \n", 161 | " \n", 162 | " \n", 163 | " \n", 164 | " \n", 165 | " \n", 166 | " \n", 167 | " \n", 168 | " \n", 169 | " \n", 170 | " \n", 171 | " \n", 172 | " \n", 173 | " \n", 174 | " \n", 175 | " \n", 176 | " \n", 177 | " \n", 178 | " \n", 179 | " \n", 180 | " \n", 181 | " \n", 182 | " \n", 183 | " \n", 184 | " \n", 185 | " \n", 186 | " \n", 187 | " \n", 188 | " \n", 189 | " \n", 190 | " \n", 191 | " \n", 192 | " \n", 193 | " \n", 194 | " \n", 195 | " \n", 196 | "
AgeGenderTotal_BilirubinDirect_BilirubinAlkaline_PhosphotaseAlamine_AminotransferaseAspartate_AminotransferaseTotal_ProtiensAlbuminAlbumin_and_Globulin_RatioLiver_Problem
065Female0.70.118716186.83.30.901
162Male10.95.5699641007.53.20.741
262Male7.34.149060687.03.30.891
358Male1.00.418214206.83.41.001
472Male3.92.019527597.32.40.401
\n", 197 | "
" 198 | ], 199 | "text/plain": [ 200 | " Age Gender Total_Bilirubin Direct_Bilirubin Alkaline_Phosphotase \\\n", 201 | "0 65 Female 0.7 0.1 187 \n", 202 | "1 62 Male 10.9 5.5 699 \n", 203 | "2 62 Male 7.3 4.1 490 \n", 204 | "3 58 Male 1.0 0.4 182 \n", 205 | "4 72 Male 3.9 2.0 195 \n", 206 | "\n", 207 | " Alamine_Aminotransferase Aspartate_Aminotransferase Total_Protiens \\\n", 208 | "0 16 18 6.8 \n", 209 | "1 64 100 7.5 \n", 210 | "2 60 68 7.0 \n", 211 | "3 14 20 6.8 \n", 212 | "4 27 59 7.3 \n", 213 | "\n", 214 | " Albumin Albumin_and_Globulin_Ratio Liver_Problem \n", 215 | "0 3.3 0.90 1 \n", 216 | "1 3.2 0.74 1 \n", 217 | "2 3.3 0.89 1 \n", 218 | "3 3.4 1.00 1 \n", 219 | "4 2.4 0.40 1 " 220 | ] 221 | }, 222 | "execution_count": 6, 223 | "metadata": {}, 224 | "output_type": "execute_result" 225 | } 226 | ], 227 | "source": [ 228 | "liver_data.head()" 229 | ] 230 | }, 231 | { 232 | "cell_type": "code", 233 | "execution_count": 7, 234 | "metadata": {}, 235 | "outputs": [ 236 | { 237 | "data": { 238 | "text/plain": [ 239 | "1 350\n", 240 | "2 150\n", 241 | "Name: Liver_Problem, dtype: int64" 242 | ] 243 | }, 244 | "execution_count": 7, 245 | "metadata": {}, 246 | "output_type": "execute_result" 247 | } 248 | ], 249 | "source": [ 250 | "liver_data['Liver_Problem'].value_counts()" 251 | ] 252 | }, 253 | { 254 | "cell_type": "code", 255 | "execution_count": 8, 256 | "metadata": {}, 257 | "outputs": [ 258 | { 259 | "data": { 260 | "text/html": [ 261 | "
\n", 262 | "\n", 275 | "\n", 276 | " \n", 277 | " \n", 278 | " \n", 279 | " \n", 280 | " \n", 281 | " \n", 282 | " \n", 283 | " \n", 284 | " \n", 285 | " \n", 286 | " \n", 287 | " \n", 288 | " \n", 289 | " \n", 290 | " \n", 291 | " \n", 292 | " \n", 293 | " \n", 294 | " \n", 295 | " \n", 296 | " \n", 297 | " \n", 298 | " \n", 299 | " \n", 300 | " \n", 301 | " \n", 302 | " \n", 303 | " \n", 304 | " \n", 305 | " \n", 306 | " \n", 307 | " \n", 308 | " \n", 309 | " \n", 310 | " \n", 311 | " \n", 312 | " \n", 313 | " \n", 314 | " \n", 315 | " \n", 316 | " \n", 317 | " \n", 318 | " \n", 319 | " \n", 320 | " \n", 321 | " \n", 322 | " \n", 323 | " \n", 324 | " \n", 325 | " \n", 326 | " \n", 327 | " \n", 328 | " \n", 329 | " \n", 330 | " \n", 331 | " \n", 332 | " \n", 333 | " \n", 334 | " \n", 335 | " \n", 336 | " \n", 337 | " \n", 338 | " \n", 339 | " \n", 340 | " \n", 341 | " \n", 342 | " \n", 343 | " \n", 344 | " \n", 345 | " \n", 346 | " \n", 347 | " \n", 348 | " \n", 349 | " \n", 350 | "
AgeGenderTotal_BilirubinDirect_BilirubinAlkaline_PhosphotaseAlamine_AminotransferaseAspartate_AminotransferaseTotal_ProtiensAlbuminAlbumin_and_Globulin_RatioLiver_Problem
20945Female0.90.318923336.63.9NaN1
24151Male0.80.223024466.53.1NaN1
25335Female0.60.218012155.22.7NaN2
31227Male1.30.610625548.54.8NaN2
\n", 351 | "
" 352 | ], 353 | "text/plain": [ 354 | " Age Gender Total_Bilirubin Direct_Bilirubin Alkaline_Phosphotase \\\n", 355 | "209 45 Female 0.9 0.3 189 \n", 356 | "241 51 Male 0.8 0.2 230 \n", 357 | "253 35 Female 0.6 0.2 180 \n", 358 | "312 27 Male 1.3 0.6 106 \n", 359 | "\n", 360 | " Alamine_Aminotransferase Aspartate_Aminotransferase Total_Protiens \\\n", 361 | "209 23 33 6.6 \n", 362 | "241 24 46 6.5 \n", 363 | "253 12 15 5.2 \n", 364 | "312 25 54 8.5 \n", 365 | "\n", 366 | " Albumin Albumin_and_Globulin_Ratio Liver_Problem \n", 367 | "209 3.9 NaN 1 \n", 368 | "241 3.1 NaN 1 \n", 369 | "253 2.7 NaN 2 \n", 370 | "312 4.8 NaN 2 " 371 | ] 372 | }, 373 | "execution_count": 8, 374 | "metadata": {}, 375 | "output_type": "execute_result" 376 | } 377 | ], 378 | "source": [ 379 | "liver_data[liver_data['Albumin_and_Globulin_Ratio'].isnull()]" 380 | ] 381 | }, 382 | { 383 | "cell_type": "code", 384 | "execution_count": 9, 385 | "metadata": {}, 386 | "outputs": [], 387 | "source": [ 388 | "liver_data.Albumin_and_Globulin_Ratio.fillna(liver_data['Albumin_and_Globulin_Ratio'].mean(), inplace=True)" 389 | ] 390 | }, 391 | { 392 | "cell_type": "code", 393 | "execution_count": 10, 394 | "metadata": { 395 | "scrolled": false 396 | }, 397 | "outputs": [ 398 | { 399 | "data": { 400 | "text/html": [ 401 | "
\n", 402 | "\n", 415 | "\n", 416 | " \n", 417 | " \n", 418 | " \n", 419 | " \n", 420 | " \n", 421 | " \n", 422 | " \n", 423 | " \n", 424 | " \n", 425 | " \n", 426 | " \n", 427 | " \n", 428 | " \n", 429 | " \n", 430 | " \n", 431 | " \n", 432 | " \n", 433 | " \n", 434 | " \n", 435 | " \n", 436 | " \n", 437 | " \n", 438 | " \n", 439 | " \n", 440 | " \n", 441 | " \n", 442 | " \n", 443 | " \n", 444 | " \n", 445 | " \n", 446 | " \n", 447 | " \n", 448 | " \n", 449 | " \n", 450 | " \n", 451 | " \n", 452 | " \n", 453 | " \n", 454 | " \n", 455 | " \n", 456 | " \n", 457 | " \n", 458 | " \n", 459 | " \n", 460 | " \n", 461 | " \n", 462 | " \n", 463 | " \n", 464 | " \n", 465 | " \n", 466 | " \n", 467 | " \n", 468 | " \n", 469 | " \n", 470 | " \n", 471 | " \n", 472 | " \n", 473 | " \n", 474 | " \n", 475 | " \n", 476 | " \n", 477 | " \n", 478 | " \n", 479 | " \n", 480 | " \n", 481 | " \n", 482 | " \n", 483 | " \n", 484 | " \n", 485 | " \n", 486 | " \n", 487 | " \n", 488 | " \n", 489 | " \n", 490 | " \n", 491 | " \n", 492 | " \n", 493 | " \n", 494 | " \n", 495 | " \n", 496 | " \n", 497 | " \n", 498 | " \n", 499 | " \n", 500 | " \n", 501 | " \n", 502 | " \n", 503 | " \n", 504 | "
AgeGenderTotal_BilirubinDirect_BilirubinAlkaline_PhosphotaseAlamine_AminotransferaseAspartate_AminotransferaseTotal_ProtiensAlbuminAlbumin_and_Globulin_RatioLiver_Problem
06500.70.118716186.83.30.901
162110.95.5699641007.53.20.741
26217.34.149060687.03.30.891
35811.00.418214206.83.41.001
47213.92.019527597.32.40.401
\n", 505 | "
" 506 | ], 507 | "text/plain": [ 508 | " Age Gender Total_Bilirubin Direct_Bilirubin Alkaline_Phosphotase \\\n", 509 | "0 65 0 0.7 0.1 187 \n", 510 | "1 62 1 10.9 5.5 699 \n", 511 | "2 62 1 7.3 4.1 490 \n", 512 | "3 58 1 1.0 0.4 182 \n", 513 | "4 72 1 3.9 2.0 195 \n", 514 | "\n", 515 | " Alamine_Aminotransferase Aspartate_Aminotransferase Total_Protiens \\\n", 516 | "0 16 18 6.8 \n", 517 | "1 64 100 7.5 \n", 518 | "2 60 68 7.0 \n", 519 | "3 14 20 6.8 \n", 520 | "4 27 59 7.3 \n", 521 | "\n", 522 | " Albumin Albumin_and_Globulin_Ratio Liver_Problem \n", 523 | "0 3.3 0.90 1 \n", 524 | "1 3.2 0.74 1 \n", 525 | "2 3.3 0.89 1 \n", 526 | "3 3.4 1.00 1 \n", 527 | "4 2.4 0.40 1 " 528 | ] 529 | }, 530 | "execution_count": 10, 531 | "metadata": {}, 532 | "output_type": "execute_result" 533 | } 534 | ], 535 | "source": [ 536 | "le = LabelEncoder()\n", 537 | "liver_data.Gender = le.fit_transform(liver_data.Gender)\n", 538 | "liver_data.head()" 539 | ] 540 | }, 541 | { 542 | "cell_type": "markdown", 543 | "metadata": {}, 544 | "source": [ 545 | "# Explanation about Train and Test Data and its splitting" 546 | ] 547 | }, 548 | { 549 | "cell_type": "markdown", 550 | "metadata": {}, 551 | "source": [ 552 | "Often people as this question:\n", 553 | "\n", 554 | "What the point is to do 'X_train, X_test' etc.?\n", 555 | "\n", 556 | "My answer:\n", 557 | "\n", 558 | "Splitting the data into train and test data is a standard practice in data science. This I believe we know by now.\n", 559 | "\n", 560 | "Now why separate input features and target variable?\n", 561 | "\n", 562 | "For that, I would say it is more of constraint or standard practice in python. If I were to code in R programming there is no need to separate input and target variables, instead, I just separate the dataset into train and test dataset. \n", 563 | "\n", 564 | "Please consider this as something that you should remember like a rule. \n", 565 | "\n", 566 | "In python, you do it as follows as a defacto for splitting your dataset into train and test.\n", 567 | "\n", 568 | "1. You split the dataset into input features (X) and target variable ( y )\n", 569 | "2. Further these X and y are divide into train (X_train, y_train) and test (y_train, y_test).\n", 570 | "\n", 571 | "Technically you are just splitting your dataset into train and test only.\n", 572 | "\n", 573 | "![My Title](https://dphi.tech/wp-content/uploads/2020/06/IMG_20200620_194107-768x1024.jpg)" 574 | ] 575 | }, 576 | { 577 | "cell_type": "markdown", 578 | "metadata": {}, 579 | "source": [ 580 | "## The above image can be read as below as well. Just pick the best of the two whichever you are comfortable to comphrehend the process" 581 | ] 582 | }, 583 | { 584 | "cell_type": "markdown", 585 | "metadata": {}, 586 | "source": [ 587 | "![My Title](https://dphi.tech/wp-content/uploads/2020/06/image-2-1024x552.png)" 588 | ] 589 | }, 590 | { 591 | "cell_type": "markdown", 592 | "metadata": {}, 593 | "source": [ 594 | "![My Title](https://dphi.tech/wp-content/uploads/2020/06/image.png)" 595 | ] 596 | }, 597 | { 598 | "cell_type": "markdown", 599 | "metadata": {}, 600 | "source": [ 601 | "## Separating input and target variables\n", 602 | "\n", 603 | "y has the label or the target variable \"Liver_Problem\" - basically this is what we need to predict. \n", 604 | "\n", 605 | "X has input variables which are used to predict y. Basically X has all the columns of our data excluding target variable \"Liver_Problem\"" 606 | ] 607 | }, 608 | { 609 | "cell_type": "code", 610 | "execution_count": 12, 611 | "metadata": {}, 612 | "outputs": [], 613 | "source": [ 614 | "X = liver_data.drop('Liver_Problem', axis = 1) \n", 615 | "\n", 616 | "\n", 617 | "y = liver_data['Liver_Problem']" 618 | ] 619 | }, 620 | { 621 | "cell_type": "markdown", 622 | "metadata": {}, 623 | "source": [ 624 | "![My Title](https://dphi.tech/wp-content/uploads/2020/06/image-1-1024x608.png)" 625 | ] 626 | }, 627 | { 628 | "cell_type": "markdown", 629 | "metadata": {}, 630 | "source": [ 631 | "# Splitting Data into Train and Test" 632 | ] 633 | }, 634 | { 635 | "cell_type": "code", 636 | "execution_count": 13, 637 | "metadata": {}, 638 | "outputs": [], 639 | "source": [ 640 | "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.2, random_state = 101)" 641 | ] 642 | }, 643 | { 644 | "cell_type": "markdown", 645 | "metadata": {}, 646 | "source": [ 647 | "# Building Logistic Regression" 648 | ] 649 | }, 650 | { 651 | "cell_type": "code", 652 | "execution_count": 14, 653 | "metadata": {}, 654 | "outputs": [], 655 | "source": [ 656 | "model = LogisticRegression()" 657 | ] 658 | }, 659 | { 660 | "cell_type": "code", 661 | "execution_count": 15, 662 | "metadata": {}, 663 | "outputs": [ 664 | { 665 | "data": { 666 | "text/plain": [ 667 | "LogisticRegression(C=1.0, class_weight=None, dual=False, fit_intercept=True,\n", 668 | " intercept_scaling=1, l1_ratio=None, max_iter=100,\n", 669 | " multi_class='auto', n_jobs=None, penalty='l2',\n", 670 | " random_state=None, solver='lbfgs', tol=0.0001, verbose=0,\n", 671 | " warm_start=False)" 672 | ] 673 | }, 674 | "execution_count": 15, 675 | "metadata": {}, 676 | "output_type": "execute_result" 677 | } 678 | ], 679 | "source": [ 680 | "model.fit(X_train, y_train)" 681 | ] 682 | }, 683 | { 684 | "cell_type": "code", 685 | "execution_count": 16, 686 | "metadata": {}, 687 | "outputs": [], 688 | "source": [ 689 | "pred = model.predict(X_test)" 690 | ] 691 | }, 692 | { 693 | "cell_type": "code", 694 | "execution_count": 17, 695 | "metadata": {}, 696 | "outputs": [ 697 | { 698 | "name": "stdout", 699 | "output_type": "stream", 700 | "text": [ 701 | "True Positive 5\n", 702 | "True Negative 60\n", 703 | "False Positive 6\n", 704 | "False Negative 29\n" 705 | ] 706 | } 707 | ], 708 | "source": [ 709 | "from sklearn.metrics import confusion_matrix\n", 710 | "tn, fp, fn, tp = confusion_matrix(y_test, pred).ravel()\n", 711 | "print(\"True Positive\", tp)\n", 712 | "print(\"True Negative\", tn)\n", 713 | "print(\"False Positive\", fp)\n", 714 | "print(\"False Negative\", fn)" 715 | ] 716 | }, 717 | { 718 | "cell_type": "code", 719 | "execution_count": 18, 720 | "metadata": {}, 721 | "outputs": [ 722 | { 723 | "data": { 724 | "text/plain": [ 725 | "0.65" 726 | ] 727 | }, 728 | "execution_count": 18, 729 | "metadata": {}, 730 | "output_type": "execute_result" 731 | } 732 | ], 733 | "source": [ 734 | "from sklearn.metrics import accuracy_score\n", 735 | "accuracy_score(y_test, pred)" 736 | ] 737 | }, 738 | { 739 | "cell_type": "code", 740 | "execution_count": 24, 741 | "metadata": {}, 742 | "outputs": [], 743 | "source": [ 744 | "test_new = pd.read_csv('https://raw.githubusercontent.com/dphi-official/Datasets/master/liver_patient_data/indian_liver_patient_new_testdataset.csv')" 745 | ] 746 | }, 747 | { 748 | "cell_type": "code", 749 | "execution_count": 25, 750 | "metadata": {}, 751 | "outputs": [ 752 | { 753 | "data": { 754 | "text/html": [ 755 | "
\n", 756 | "\n", 769 | "\n", 770 | " \n", 771 | " \n", 772 | " \n", 773 | " \n", 774 | " \n", 775 | " \n", 776 | " \n", 777 | " \n", 778 | " \n", 779 | " \n", 780 | " \n", 781 | " \n", 782 | " \n", 783 | " \n", 784 | " \n", 785 | " \n", 786 | " \n", 787 | " \n", 788 | " \n", 789 | " \n", 790 | " \n", 791 | " \n", 792 | " \n", 793 | " \n", 794 | " \n", 795 | " \n", 796 | " \n", 797 | " \n", 798 | " \n", 799 | " \n", 800 | " \n", 801 | " \n", 802 | " \n", 803 | " \n", 804 | " \n", 805 | " \n", 806 | " \n", 807 | " \n", 808 | " \n", 809 | " \n", 810 | " \n", 811 | " \n", 812 | " \n", 813 | " \n", 814 | " \n", 815 | " \n", 816 | " \n", 817 | " \n", 818 | " \n", 819 | " \n", 820 | " \n", 821 | " \n", 822 | " \n", 823 | " \n", 824 | " \n", 825 | " \n", 826 | " \n", 827 | " \n", 828 | " \n", 829 | " \n", 830 | " \n", 831 | " \n", 832 | " \n", 833 | " \n", 834 | " \n", 835 | " \n", 836 | " \n", 837 | " \n", 838 | " \n", 839 | " \n", 840 | " \n", 841 | " \n", 842 | " \n", 843 | " \n", 844 | " \n", 845 | " \n", 846 | " \n", 847 | " \n", 848 | " \n", 849 | " \n", 850 | " \n", 851 | " \n", 852 | "
AgeGenderTotal_BilirubinDirect_BilirubinAlkaline_PhosphotaseAlamine_AminotransferaseAspartate_AminotransferaseTotal_ProtiensAlbuminAlbumin_and_Globulin_Ratio
036Male2.81.530528765.92.50.7
142Male0.80.212729304.92.71.2
253Male19.810.4238392218.12.50.4
332Male30.517.121839795.52.70.9
432Male32.614.1219952355.83.11.1
\n", 853 | "
" 854 | ], 855 | "text/plain": [ 856 | " Age Gender Total_Bilirubin Direct_Bilirubin Alkaline_Phosphotase \\\n", 857 | "0 36 Male 2.8 1.5 305 \n", 858 | "1 42 Male 0.8 0.2 127 \n", 859 | "2 53 Male 19.8 10.4 238 \n", 860 | "3 32 Male 30.5 17.1 218 \n", 861 | "4 32 Male 32.6 14.1 219 \n", 862 | "\n", 863 | " Alamine_Aminotransferase Aspartate_Aminotransferase Total_Protiens \\\n", 864 | "0 28 76 5.9 \n", 865 | "1 29 30 4.9 \n", 866 | "2 39 221 8.1 \n", 867 | "3 39 79 5.5 \n", 868 | "4 95 235 5.8 \n", 869 | "\n", 870 | " Albumin Albumin_and_Globulin_Ratio \n", 871 | "0 2.5 0.7 \n", 872 | "1 2.7 1.2 \n", 873 | "2 2.5 0.4 \n", 874 | "3 2.7 0.9 \n", 875 | "4 3.1 1.1 " 876 | ] 877 | }, 878 | "execution_count": 25, 879 | "metadata": {}, 880 | "output_type": "execute_result" 881 | } 882 | ], 883 | "source": [ 884 | "test_new.head()" 885 | ] 886 | }, 887 | { 888 | "cell_type": "code", 889 | "execution_count": 26, 890 | "metadata": {}, 891 | "outputs": [ 892 | { 893 | "data": { 894 | "text/html": [ 895 | "
\n", 896 | "\n", 909 | "\n", 910 | " \n", 911 | " \n", 912 | " \n", 913 | " \n", 914 | " \n", 915 | " \n", 916 | " \n", 917 | " \n", 918 | " \n", 919 | " \n", 920 | " \n", 921 | " \n", 922 | " \n", 923 | " \n", 924 | " \n", 925 | " \n", 926 | " \n", 927 | " \n", 928 | " \n", 929 | " \n", 930 | " \n", 931 | " \n", 932 | " \n", 933 | " \n", 934 | " \n", 935 | " \n", 936 | " \n", 937 | " \n", 938 | " \n", 939 | " \n", 940 | " \n", 941 | " \n", 942 | " \n", 943 | " \n", 944 | " \n", 945 | " \n", 946 | " \n", 947 | " \n", 948 | " \n", 949 | " \n", 950 | " \n", 951 | " \n", 952 | " \n", 953 | " \n", 954 | " \n", 955 | " \n", 956 | " \n", 957 | " \n", 958 | " \n", 959 | " \n", 960 | " \n", 961 | " \n", 962 | " \n", 963 | " \n", 964 | " \n", 965 | " \n", 966 | " \n", 967 | " \n", 968 | " \n", 969 | " \n", 970 | " \n", 971 | " \n", 972 | " \n", 973 | " \n", 974 | " \n", 975 | " \n", 976 | " \n", 977 | " \n", 978 | " \n", 979 | " \n", 980 | " \n", 981 | " \n", 982 | " \n", 983 | " \n", 984 | " \n", 985 | " \n", 986 | " \n", 987 | " \n", 988 | " \n", 989 | " \n", 990 | " \n", 991 | " \n", 992 | "
AgeGenderTotal_BilirubinDirect_BilirubinAlkaline_PhosphotaseAlamine_AminotransferaseAspartate_AminotransferaseTotal_ProtiensAlbuminAlbumin_and_Globulin_Ratio
03612.81.530528765.92.50.7
14210.80.212729304.92.71.2
253119.810.4238392218.12.50.4
332130.517.121839795.52.70.9
432132.614.1219952355.83.11.1
\n", 993 | "
" 994 | ], 995 | "text/plain": [ 996 | " Age Gender Total_Bilirubin Direct_Bilirubin Alkaline_Phosphotase \\\n", 997 | "0 36 1 2.8 1.5 305 \n", 998 | "1 42 1 0.8 0.2 127 \n", 999 | "2 53 1 19.8 10.4 238 \n", 1000 | "3 32 1 30.5 17.1 218 \n", 1001 | "4 32 1 32.6 14.1 219 \n", 1002 | "\n", 1003 | " Alamine_Aminotransferase Aspartate_Aminotransferase Total_Protiens \\\n", 1004 | "0 28 76 5.9 \n", 1005 | "1 29 30 4.9 \n", 1006 | "2 39 221 8.1 \n", 1007 | "3 39 79 5.5 \n", 1008 | "4 95 235 5.8 \n", 1009 | "\n", 1010 | " Albumin Albumin_and_Globulin_Ratio \n", 1011 | "0 2.5 0.7 \n", 1012 | "1 2.7 1.2 \n", 1013 | "2 2.5 0.4 \n", 1014 | "3 2.7 0.9 \n", 1015 | "4 3.1 1.1 " 1016 | ] 1017 | }, 1018 | "execution_count": 26, 1019 | "metadata": {}, 1020 | "output_type": "execute_result" 1021 | } 1022 | ], 1023 | "source": [ 1024 | "le = LabelEncoder()\n", 1025 | "test_new.Gender = le.fit_transform(test_new.Gender)\n", 1026 | "test_new.head()" 1027 | ] 1028 | }, 1029 | { 1030 | "cell_type": "code", 1031 | "execution_count": 27, 1032 | "metadata": {}, 1033 | "outputs": [], 1034 | "source": [ 1035 | "Liver_Problem = model.predict(test_new)" 1036 | ] 1037 | }, 1038 | { 1039 | "cell_type": "markdown", 1040 | "metadata": {}, 1041 | "source": [ 1042 | "# Downloading the prediction file" 1043 | ] 1044 | }, 1045 | { 1046 | "cell_type": "code", 1047 | "execution_count": 29, 1048 | "metadata": {}, 1049 | "outputs": [], 1050 | "source": [ 1051 | "res = pd.DataFrame(Liver_Problem)\n", 1052 | "res.index = test_new.index # its important for comparison\n", 1053 | "res.columns = [\"Liver_Problem\"]\n", 1054 | "res.to_csv(\"prediction_results_Liver_Problem.csv\") # the csv file will be saved locally on the same location where this notebook is located." 1055 | ] 1056 | } 1057 | ], 1058 | "metadata": { 1059 | "kernelspec": { 1060 | "display_name": "Python 3", 1061 | "language": "python", 1062 | "name": "python3" 1063 | }, 1064 | "language_info": { 1065 | "codemirror_mode": { 1066 | "name": "ipython", 1067 | "version": 3 1068 | }, 1069 | "file_extension": ".py", 1070 | "mimetype": "text/x-python", 1071 | "name": "python", 1072 | "nbconvert_exporter": "python", 1073 | "pygments_lexer": "ipython3", 1074 | "version": "3.7.6" 1075 | } 1076 | }, 1077 | "nbformat": 4, 1078 | "nbformat_minor": 2 1079 | } 1080 | -------------------------------------------------------------------------------- /Logistic_Regression/logistic_regression.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "nbformat": 4, 3 | "nbformat_minor": 0, 4 | "metadata": { 5 | "colab": { 6 | "name": "logistic_regression.ipynb", 7 | "provenance": [], 8 | "collapsed_sections": [] 9 | }, 10 | "kernelspec": { 11 | "name": "python3", 12 | "display_name": "Python 3" 13 | } 14 | }, 15 | "cells": [ 16 | { 17 | "cell_type": "markdown", 18 | "metadata": { 19 | "id": "XaoTTFUduK8L", 20 | "colab_type": "text" 21 | }, 22 | "source": [ 23 | "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/drive/1kqG3UhIcnscMC3sm4hdEHFmIMpmZWEEG?usp=sharing)" 24 | ] 25 | }, 26 | { 27 | "cell_type": "markdown", 28 | "metadata": { 29 | "id": "laB2JbdN0cvV", 30 | "colab_type": "text" 31 | }, 32 | "source": [ 33 | "# Introducing Logistic Regression\n", 34 | "Logistic Regression is a classification algorithm. It is used to predict a binary outcome (1 / 0, Yes / No, True / False) given a set of independent variable/s. You can also think of logistic regression as a special case of linear regression when the outcome variable is categorical. Logistic Regression is a supervised machine learning algorithm/model.\n", 35 | "\n", 36 | "## Agenda\n", 37 | "* About Dataset\n", 38 | "* Loading Libraries\n", 39 | "* Loading Data\n", 40 | "* Understanding Data\n", 41 | "* Separating Input/Independent and Output/Dependent Variables\n", 42 | "* Splitting the data\n", 43 | "* Building Model\n", 44 | "* Prediction\n", 45 | "* Model Performance\n", 46 | "\n", 47 | "## About Dataset\n", 48 | "The dataset has two columns - age (age of the person/customer) and bought_insurance (whether the customer bought insurance or not). If bought_insurance = 1, the customer bought insurance and if bought_insurance = 0, the customer did not buy the insurance.\n", 49 | "\n", 50 | "Dataset Link: [insurance_data](https://raw.githubusercontent.com/codebasics/py/master/ML/7_logistic_reg/insurance_data.csv)" 51 | ] 52 | }, 53 | { 54 | "cell_type": "markdown", 55 | "metadata": { 56 | "id": "MVGxkd1K3xrW", 57 | "colab_type": "text" 58 | }, 59 | "source": [ 60 | "## Loading Libraries\n", 61 | "All Python capabilities are not loaded to our working environment by default (even they are already installed in your system). So, we import each and every library that we want to use.\n", 62 | "\n", 63 | "In data science, numpy and pandas are most commonly used libraries. Numpy is required for calculations like means, medians, square roots, etc. Pandas is used for data processin and data frames. We chose alias names for our libraries for the sake of our convenience (numpy --> np and pandas --> pd).\n", 64 | "\n", 65 | "**We can import all the libraries that we think might be needed or can import as we go along.**" 66 | ] 67 | }, 68 | { 69 | "cell_type": "code", 70 | "metadata": { 71 | "id": "8Zo1haly0Rap", 72 | "colab_type": "code", 73 | "colab": {} 74 | }, 75 | "source": [ 76 | "import pandas as pd\n", 77 | "import matplotlib.pyplot as plt" 78 | ], 79 | "execution_count": 1, 80 | "outputs": [] 81 | }, 82 | { 83 | "cell_type": "markdown", 84 | "metadata": { 85 | "id": "Zp46CaAG4-79", 86 | "colab_type": "text" 87 | }, 88 | "source": [ 89 | "## Loading Data\n", 90 | "Pandas module is used for reading files. We have our data in '.csv' format. We will use 'read_csv()' function for loading the data." 91 | ] 92 | }, 93 | { 94 | "cell_type": "code", 95 | "metadata": { 96 | "id": "cDs7FZ-Y4bMN", 97 | "colab_type": "code", 98 | "colab": {} 99 | }, 100 | "source": [ 101 | "# In read_csv() function, we have passed the raw data link at github\n", 102 | "data_location = \"https://raw.githubusercontent.com/codebasics/py/master/ML/7_logistic_reg/insurance_data.csv\"\n", 103 | "data = pd.read_csv(data_location)" 104 | ], 105 | "execution_count": 2, 106 | "outputs": [] 107 | }, 108 | { 109 | "cell_type": "markdown", 110 | "metadata": { 111 | "id": "sQgP0WXi5pl0", 112 | "colab_type": "text" 113 | }, 114 | "source": [ 115 | "## Understanding Data\n", 116 | "Let's check how our data looks. This can be done using head() method." 117 | ] 118 | }, 119 | { 120 | "cell_type": "code", 121 | "metadata": { 122 | "id": "-gwOLzhX5njJ", 123 | "colab_type": "code", 124 | "colab": { 125 | "base_uri": "https://localhost:8080/", 126 | "height": 204 127 | }, 128 | "outputId": "3a53deaf-ec5f-42a1-8222-68c52a68152a" 129 | }, 130 | "source": [ 131 | "data.head()" 132 | ], 133 | "execution_count": 3, 134 | "outputs": [ 135 | { 136 | "output_type": "execute_result", 137 | "data": { 138 | "text/html": [ 139 | "
\n", 140 | "\n", 153 | "\n", 154 | " \n", 155 | " \n", 156 | " \n", 157 | " \n", 158 | " \n", 159 | " \n", 160 | " \n", 161 | " \n", 162 | " \n", 163 | " \n", 164 | " \n", 165 | " \n", 166 | " \n", 167 | " \n", 168 | " \n", 169 | " \n", 170 | " \n", 171 | " \n", 172 | " \n", 173 | " \n", 174 | " \n", 175 | " \n", 176 | " \n", 177 | " \n", 178 | " \n", 179 | " \n", 180 | " \n", 181 | " \n", 182 | " \n", 183 | " \n", 184 | " \n", 185 | " \n", 186 | " \n", 187 | " \n", 188 | "
agebought_insurance
0220
1250
2471
3520
4461
\n", 189 | "
" 190 | ], 191 | "text/plain": [ 192 | " age bought_insurance\n", 193 | "0 22 0\n", 194 | "1 25 0\n", 195 | "2 47 1\n", 196 | "3 52 0\n", 197 | "4 46 1" 198 | ] 199 | }, 200 | "metadata": { 201 | "tags": [] 202 | }, 203 | "execution_count": 3 204 | } 205 | ] 206 | }, 207 | { 208 | "cell_type": "markdown", 209 | "metadata": { 210 | "id": "rA4wZUbx51KM", 211 | "colab_type": "text" 212 | }, 213 | "source": [ 214 | "There are two columns:\n", 215 | "\n", 216 | "* age: The age of the customer\n", 217 | "* bought_insurance: If the customer bought insurance (1) or not (0). This is our target variable which we are interested to know.\n", 218 | "\n", 219 | "Since our target variable has only two different classes/values, we can say it as a binary classification problem. And Logistic Regression is used for binary classification problems." 220 | ] 221 | }, 222 | { 223 | "cell_type": "markdown", 224 | "metadata": { 225 | "id": "qEYG8who6aXb", 226 | "colab_type": "text" 227 | }, 228 | "source": [ 229 | "Looking the relationship between age and bought_insurance using scatter plot." 230 | ] 231 | }, 232 | { 233 | "cell_type": "code", 234 | "metadata": { 235 | "id": "UOyg4-0K5xDv", 236 | "colab_type": "code", 237 | "colab": { 238 | "base_uri": "https://localhost:8080/", 239 | "height": 282 240 | }, 241 | "outputId": "7b994b90-665a-42c3-b803-781bbc289873" 242 | }, 243 | "source": [ 244 | "plt.scatter(data.age,data.bought_insurance,marker='+',color='red')" 245 | ], 246 | "execution_count": 4, 247 | "outputs": [ 248 | { 249 | "output_type": "execute_result", 250 | "data": { 251 | "text/plain": [ 252 | "" 253 | ] 254 | }, 255 | "metadata": { 256 | "tags": [] 257 | }, 258 | "execution_count": 4 259 | }, 260 | { 261 | "output_type": "display_data", 262 | "data": { 263 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD4CAYAAAD8Zh1EAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAOoUlEQVR4nO3df6zdd13H8eeLlokCUqFXsqyFzljEBmFbbgoEopUfppukTRSXVWeQII0JNRjxx1AzccZEJAEhTnQgokSYdYo2s1rJ3OaPuLlbx6+2Fi9l2FvBXWCbiQRm9e0f51s4uz295/T23J67z3k+kpt7Pj/u+b7vp+e87iffc06/qSokSY9/T5h0AZKk8TDQJakRBrokNcJAl6RGGOiS1Ij1kzrwxo0ba8uWLZM6vCQ9Lh0+fPgLVTUzaGxigb5lyxbm5uYmdXhJelxK8tlzjXnKRZIaYaBLUiMMdElqhIEuSY0w0CWpEUMDPcn7kjyY5JPnGE+SdyWZT/LxJFeNv8zG7NjR+9Las5b+bZarZaVjq3G8cVtLv/dKf25CaznKDv39wM5lxq8GtnZfe4F3X3hZkqTzNfR96FX1d0m2LDNlN/CH1ft/eO9JsiHJpVX1uTHV2I4zf5Xvvvux7bvumkAxeoy19G+zXC0rHVuN443bWvq911Kd52Ec59AvA072tRe6vrMk2ZtkLsnc4uLiGA4tSTojo1zgotuh315Vzxswdjvw61X1D137DuDnq2rZj4HOzs7W1H5S1J352rWW/m2Wq2WlY6txvHFbS7/3Sn9uFdcyyeGqmh00No4d+ilgc197U9cnSbqIxrFD/35gH3AN8ELgXVW1fdh9TvUOXZJWaLkd+tAXRZN8CNgBbEyyAPwy8ESAqvod4CC9MJ8Hvgy8djxlS5LOxyjvctkzZLyAN4ytIknSivhJUUlqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGjFSoCfZmeR4kvkkNwwYf1aSO5Pcn+TjSa4Zf6mSpOUMDfQk64CbgauBbcCeJNuWTPslYH9VXQlcB/z2uAuVJC1vlB36dmC+qk5U1aPArcDuJXMK+Obu9tOA/xhfiZKkUYwS6JcBJ/vaC11fv7cA1ydZAA4CPznojpLsTTKXZG5xcXEF5UqSzmVcL4ruAd5fVZuAa4APJDnrvqvqlqqararZmZmZMR1akgSjBfopYHNfe1PX1+91wH6Aqvon4EnAxnEUKEkazSiBfh+wNcnlSS6h96LngSVz/h14OUCS76QX6J5TkaSLaGigV9VpYB9wCDhG790sR5LclGRXN+1NwOuTfAz4EPBjVVWrVbQk6WzrR5lUVQfpvdjZ33dj3+2jwEvGW5ok6Xz4SVFJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUiJECPcnOJMeTzCe54Rxzrk1yNMmRJB8cb5mSpGHWD5uQZB1wM/BKYAG4L8mBqjraN2cr8GbgJVX1UJJvXa2CJUmDjbJD3w7MV9WJqnoUuBXYvWTO64Gbq+ohgKp6cLxlSpKGGSXQLwNO9rUXur5+zwGek+Qfk9yTZOegO0qyN8lckrnFxcWVVSxJGmhcL4quB7YCO4A9wHuSbFg6qapuqarZqpqdmZkZ06ElSTBaoJ8CNve1N3V9/RaAA1X1P1X1GeBT9AJeknSRjBLo9wFbk1ye5BLgOuDAkjl/Tm93TpKN9E7BnBhjnZKkIYYGelWdBvYBh4BjwP6qOpLkpiS7ummHgC8mOQrcCfxsVX1xtYqWJJ0tVTWRA8/Oztbc3NxEji1Jj1dJDlfV7KAxPykqSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjRgr0JDuTHE8yn+SGZeb9YJJKMju+EiVJoxga6EnWATcDVwPbgD1Jtg2Y91TgjcC94y5SkjTcKDv07cB8VZ2oqkeBW4HdA+b9KvBW4CtjrE+SNKJRAv0y4GRfe6Hr+5okVwGbq+ovl7ujJHuTzCWZW1xcPO9iJUnndsEviiZ5AvB24E3D5lbVLVU1W1WzMzMzF3poSVKfUQL9FLC5r72p6zvjqcDzgLuSPAC8CDjgC6OSdHGNEuj3AVuTXJ7kEuA64MCZwap6pKo2VtWWqtoC3APsqqq5ValYkjTQ0ECvqtPAPuAQcAzYX1VHktyUZNdqFyhJGs36USZV1UHg4JK+G88xd8eFlyVJOl9+UlSSGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1YqRAT7IzyfEk80luGDD+00mOJvl4kjuSPHv8pUqSljM00JOsA24Grga2AXuSbFsy7X5gtqqeD9wG/Ma4C5UkLW+UHfp2YL6qTlTVo8CtwO7+CVV1Z1V9uWveA2wab5mSpGFGCfTLgJN97YWu71xeB/zVoIEke5PMJZlbXFwcvUpJ0lBjfVE0yfXALPC2QeNVdUtVzVbV7MzMzDgPLUlTb/0Ic04Bm/vam7q+x0jyCuAXge+pqq+OpzxJ0qhG2aHfB2xNcnmSS4DrgAP9E5JcCfwusKuqHhx/mZKkYYYGelWdBvYBh4BjwP6qOpLkpiS7umlvA54C/EmSjyY5cI67kyStklFOuVBVB4GDS/pu7Lv9ijHXJUk6T35SVJIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRowU6El2JjmeZD7JDQPGvyHJH3fj9ybZMu5Cv2bDht7XIDt29L7O13I/t9Kxi3m81ahRulCr8djz8bysoYGeZB1wM3A1sA3Yk2TbkmmvAx6qqm8H3gG8ddyFSpKWt36EOduB+ao6AZDkVmA3cLRvzm7gLd3t24DfSpKqqrFVemZX/sgjj20//PDX/2LffXfv+5n2XXctf5/L/dxKxy7m8c4YZ43ShVqNx56P55GMcsrlMuBkX3uh6xs4p6pOA48Az1h6R0n2JplLMre4uLiyiiVJA2XYJjrJq4GdVfXjXftHgRdW1b6+OZ/s5ix07U93c75wrvudnZ2tubm586+4f2e+1Er/ai/3cysdu5jHW40apQu1Go89H88kOVxVs4PGRtmhnwI297U3dX0D5yRZDzwN+OL5lypJWqlRdujrgU8BL6cX3PcBP1xVR/rmvAH4rqr6iSTXAT9QVdcud78r3qFL0hRbboc+9EXRqjqdZB9wCFgHvK+qjiS5CZirqgPA7wEfSDIPfAm4bnzlS5JGMcq7XKiqg8DBJX039t3+CvBD4y1NknQ+/KSoJDXCQJekRhjoktQIA12SGjH0bYurduBkEfjsRT7sRuCcH3aaUq7J2VyTwVyXs01iTZ5dVTODBiYW6JOQZO5c79+cVq7J2VyTwVyXs621NfGUiyQ1wkCXpEZMW6DfMukC1iDX5GyuyWCuy9nW1JpM1Tl0SWrZtO3QJalZBrokNaLZQE+yOcmdSY4mOZLkjV3/05N8JMm/dd+/ZdK1XixJnpTkn5N8rFuTX+n6L+8u7j3fXez7kknXerElWZfk/iS3d+2pXpMkDyT5RJKPJpnr+qb2uQOQZEOS25L8a5JjSV681tak2UAHTgNvqqptwIuAN3QXt74BuKOqtgJ3dO1p8VXgZVX1AuAKYGeSF9G7qPc7uot8P0Tvot/T5o3Asb62awLfW1VX9L3PepqfOwDvBP66qp4LvIDe42VtrUlVTcUX8BfAK4HjwKVd36XA8UnXNqH1+CbgX4AX0vuk2/qu/8XAoUnXd5HXYhO9J+PLgNuBuCY8AGxc0je1zx16V2H7DN0bSdbqmrS8Q/+aJFuAK4F7gWdW1ee6oc8Dz5xQWRPRnVr4KPAg8BHg08DD1bu4Nwy+CHjrfhP4OeD/uvYzcE0K+Jskh5Ps7fqm+blzObAI/H53au69SZ7MGluT5gM9yVOAPwV+qqr+q3+sen9Wp+p9m1X1v1V1Bb1d6XbguRMuaaKSvAp4sKoOT7qWNealVXUVcDW905Xf3T84hc+d9cBVwLur6krgv1lyemUtrEnTgZ7kifTC/I+q6s+67v9Mcmk3fim9nerUqaqHgTvpnU7Y0F07FgZfBLxlLwF2JXkAuJXeaZd3Mt1rQlWd6r4/CHyY3h//aX7uLAALVXVv176NXsCvqTVpNtCThN61To9V1dv7hg4Ar+luv4beufWpkGQmyYbu9jfSe03hGL1gf3U3barWpKreXFWbqmoLvWvh/m1V/QhTvCZJnpzkqWduA98HfJIpfu5U1eeBk0m+o+t6OXCUNbYmzX5SNMlLgb8HPsHXz43+Ar3z6PuBZ9H773uvraovTaTIiyzJ84E/oHex7ycA+6vqpiTfRm93+nTgfuD6qvrq5CqdjCQ7gJ+pqldN85p0v/uHu+Z64INV9WtJnsGUPncAklwBvBe4BDgBvJbuecQaWZNmA12Spk2zp1wkadoY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakR/w+oGVyw4/IRsgAAAABJRU5ErkJggg==\n", 264 | "text/plain": [ 265 | "
" 266 | ] 267 | }, 268 | "metadata": { 269 | "tags": [], 270 | "needs_background": "light" 271 | } 272 | } 273 | ] 274 | }, 275 | { 276 | "cell_type": "markdown", 277 | "metadata": { 278 | "id": "m003O7uL6xc8", 279 | "colab_type": "text" 280 | }, 281 | "source": [ 282 | "We can easily observe from the scatter plot that generally the customer who is of age less than 30 years has not bought the insurance." 283 | ] 284 | }, 285 | { 286 | "cell_type": "markdown", 287 | "metadata": { 288 | "id": "uWzLw-d07EQs", 289 | "colab_type": "text" 290 | }, 291 | "source": [ 292 | "## Separating Input and Output Variables\n", 293 | "Before building any machine learning model, we always separate the input variables and output variables. Input variables are those quantities whose values are changed naturally in an experiment, whereas output variable is the one whose values are dependent on the input variables. So, input variables are also known as independent variables as its values are not dependent on any other quantity, and output variable/s are also known as dependent variables as its values are dependent on other variable i.e. input variables. Like here in this data, we can see that whether a person will buy insurance or not is dependent on the age of that person\n", 294 | "\n", 295 | "By convention input variables are represented with 'X' and output variables are represented with 'y'." 296 | ] 297 | }, 298 | { 299 | "cell_type": "code", 300 | "metadata": { 301 | "id": "BslsxtTY6sd3", 302 | "colab_type": "code", 303 | "colab": {} 304 | }, 305 | "source": [ 306 | "X = data[['age']] # input variable\n", 307 | "\n", 308 | "y = data['bought_insurance'] # output variable" 309 | ], 310 | "execution_count": 5, 311 | "outputs": [] 312 | }, 313 | { 314 | "cell_type": "markdown", 315 | "metadata": { 316 | "id": "UsgEP0tH-ArH", 317 | "colab_type": "text" 318 | }, 319 | "source": [ 320 | "If you notice the above code cell, I have used two square brackets while taking input variables and only one square bracket while taking output variable. Why?\n", 321 | "\n", 322 | "All machine learning algorithm accepts input variables as a 2D array and output variable as 1D array. Using two square brackets while selecting the input variables gives you the shape of input variable/s as 2D, but if you use only one square bracket, the shape will be 1D as you can see in the case of y.\n", 323 | "\n", 324 | "Let's check the shapes of X and y." 325 | ] 326 | }, 327 | { 328 | "cell_type": "code", 329 | "metadata": { 330 | "id": "UnfWV_JW9_Br", 331 | "colab_type": "code", 332 | "colab": { 333 | "base_uri": "https://localhost:8080/", 334 | "height": 51 335 | }, 336 | "outputId": "75232857-9b71-4ce9-85ba-7fd0dd97e516" 337 | }, 338 | "source": [ 339 | "print(\"Shape: \", X.shape, \"Dimension: \", X.ndim)\n", 340 | "print(\"Shape: \", y.shape, \"Dimension: \", y.ndim)" 341 | ], 342 | "execution_count": 6, 343 | "outputs": [ 344 | { 345 | "output_type": "stream", 346 | "text": [ 347 | "Shape: (27, 1) Dimension: 2\n", 348 | "Shape: (27,) Dimension: 1\n" 349 | ], 350 | "name": "stdout" 351 | } 352 | ] 353 | }, 354 | { 355 | "cell_type": "markdown", 356 | "metadata": { 357 | "id": "9J3XXD4A_K61", 358 | "colab_type": "text" 359 | }, 360 | "source": [ 361 | "## Splitting the data into Train and Test Set\n", 362 | "We want to check the performance of the model that we built. For this purpose, we always split (both input and output data) the given data into training set which will be used to train the model, and test set which will be used to check how accurately the model is predicting outcomes.\n", 363 | "\n", 364 | "For this purpose we have a class called 'train_test_split' in the 'sklearn.model_selection' module." 365 | ] 366 | }, 367 | { 368 | "cell_type": "code", 369 | "metadata": { 370 | "id": "XQ1snUZP-4_e", 371 | "colab_type": "code", 372 | "colab": {} 373 | }, 374 | "source": [ 375 | "# import train_test_split\n", 376 | "from sklearn.model_selection import train_test_split" 377 | ], 378 | "execution_count": 7, 379 | "outputs": [] 380 | }, 381 | { 382 | "cell_type": "code", 383 | "metadata": { 384 | "id": "ev1mslygAtL5", 385 | "colab_type": "code", 386 | "colab": {} 387 | }, 388 | "source": [ 389 | "# split the data\n", 390 | "X_train, X_test, y_train, y_test = train_test_split(X,y,test_size=0.3, random_state = 42)\n", 391 | "\n", 392 | "# X_train: independent/input feature data for training the model\n", 393 | "# y_train: dependent/output feature data for training the model\n", 394 | "# X_test: independent/input feature data for testing the model; will be used to predict the output values\n", 395 | "# y_test: original dependent/output values of X_test; We will compare this values with our predicted values to check the performance of our built model.\n", 396 | " \n", 397 | "# test_size = 0.30: 30% of the data will go for test set and 70% of the data will go for train set\n", 398 | "# random_state = 42: this will fix the split i.e. there will be same split for each time you run the code" 399 | ], 400 | "execution_count": 8, 401 | "outputs": [] 402 | }, 403 | { 404 | "cell_type": "markdown", 405 | "metadata": { 406 | "id": "J9tTegE3Bn0D", 407 | "colab_type": "text" 408 | }, 409 | "source": [ 410 | "## Building Model\n" 411 | ] 412 | }, 413 | { 414 | "cell_type": "markdown", 415 | "metadata": { 416 | "id": "OiJazgZFjGQu", 417 | "colab_type": "text" 418 | }, 419 | "source": [ 420 | "Now we are finally ready, and we can train the model.\n", 421 | "\n", 422 | "First, we need to import our model - Logistic Regression (again, using the sklearn library).\n", 423 | "\n", 424 | "Then we would feed the model both with the data (X_train) and the answers for that data (y_train)" 425 | ] 426 | }, 427 | { 428 | "cell_type": "code", 429 | "metadata": { 430 | "id": "nZHXi5CfBlns", 431 | "colab_type": "code", 432 | "colab": {} 433 | }, 434 | "source": [ 435 | "# import Logistic Regression from sklearn.linear_model\n", 436 | "from sklearn.linear_model import LogisticRegression" 437 | ], 438 | "execution_count": 9, 439 | "outputs": [] 440 | }, 441 | { 442 | "cell_type": "code", 443 | "metadata": { 444 | "id": "7FCfCfawjDyK", 445 | "colab_type": "code", 446 | "colab": {} 447 | }, 448 | "source": [ 449 | "log_model = LogisticRegression()" 450 | ], 451 | "execution_count": 10, 452 | "outputs": [] 453 | }, 454 | { 455 | "cell_type": "code", 456 | "metadata": { 457 | "id": "caHe1oKSjuq-", 458 | "colab_type": "code", 459 | "colab": { 460 | "base_uri": "https://localhost:8080/", 461 | "height": 102 462 | }, 463 | "outputId": "0fa6bd81-c446-4be6-eca0-e7528aaec7ad" 464 | }, 465 | "source": [ 466 | "# Fit the model\n", 467 | "log_model.fit(X_train, y_train)" 468 | ], 469 | "execution_count": 11, 470 | "outputs": [ 471 | { 472 | "output_type": "execute_result", 473 | "data": { 474 | "text/plain": [ 475 | "LogisticRegression(C=1.0, class_weight=None, dual=False, fit_intercept=True,\n", 476 | " intercept_scaling=1, l1_ratio=None, max_iter=100,\n", 477 | " multi_class='auto', n_jobs=None, penalty='l2',\n", 478 | " random_state=None, solver='lbfgs', tol=0.0001, verbose=0,\n", 479 | " warm_start=False)" 480 | ] 481 | }, 482 | "metadata": { 483 | "tags": [] 484 | }, 485 | "execution_count": 11 486 | } 487 | ] 488 | }, 489 | { 490 | "cell_type": "markdown", 491 | "metadata": { 492 | "id": "pkxVjh8wkB91", 493 | "colab_type": "text" 494 | }, 495 | "source": [ 496 | "The training happens in the third line (the \"fit\" function)." 497 | ] 498 | }, 499 | { 500 | "cell_type": "markdown", 501 | "metadata": { 502 | "id": "UON4736ekMZw", 503 | "colab_type": "text" 504 | }, 505 | "source": [ 506 | "## Prediction\n", 507 | "Now logistic regression model (i.e. log_model) is trained using X_train and y_trian data. Let's predict the target value (i.e. bought_insurance) for the X_test data. We use \"predict()\" method for prediction." 508 | ] 509 | }, 510 | { 511 | "cell_type": "code", 512 | "metadata": { 513 | "id": "4Fy1LxR_kA-f", 514 | "colab_type": "code", 515 | "colab": {} 516 | }, 517 | "source": [ 518 | "predictions = log_model.predict(X_test)" 519 | ], 520 | "execution_count": 12, 521 | "outputs": [] 522 | }, 523 | { 524 | "cell_type": "markdown", 525 | "metadata": { 526 | "id": "eiA_FMiwkxN4", 527 | "colab_type": "text" 528 | }, 529 | "source": [ 530 | "We already have actual target values (i.e. y_test) for X_test. Let's compare y_test and the predicted value for X_test by our log_model." 531 | ] 532 | }, 533 | { 534 | "cell_type": "code", 535 | "metadata": { 536 | "id": "3kSld42hkvcc", 537 | "colab_type": "code", 538 | "colab": { 539 | "base_uri": "https://localhost:8080/", 540 | "height": 34 541 | }, 542 | "outputId": "99715ab8-f497-4d4a-eeaa-ab8230728b55" 543 | }, 544 | "source": [ 545 | "y_test.values" 546 | ], 547 | "execution_count": 13, 548 | "outputs": [ 549 | { 550 | "output_type": "execute_result", 551 | "data": { 552 | "text/plain": [ 553 | "array([1, 0, 1, 0, 0, 0, 1, 1, 0])" 554 | ] 555 | }, 556 | "metadata": { 557 | "tags": [] 558 | }, 559 | "execution_count": 13 560 | } 561 | ] 562 | }, 563 | { 564 | "cell_type": "code", 565 | "metadata": { 566 | "id": "Csji5Qu9lJW7", 567 | "colab_type": "code", 568 | "colab": { 569 | "base_uri": "https://localhost:8080/", 570 | "height": 34 571 | }, 572 | "outputId": "16d1610e-cf63-4623-b0f3-528265a984ca" 573 | }, 574 | "source": [ 575 | "predictions" 576 | ], 577 | "execution_count": 14, 578 | "outputs": [ 579 | { 580 | "output_type": "execute_result", 581 | "data": { 582 | "text/plain": [ 583 | "array([1, 0, 1, 0, 0, 0, 0, 1, 0])" 584 | ] 585 | }, 586 | "metadata": { 587 | "tags": [] 588 | }, 589 | "execution_count": 14 590 | } 591 | ] 592 | }, 593 | { 594 | "cell_type": "markdown", 595 | "metadata": { 596 | "id": "aBpanTWzlZmU", 597 | "colab_type": "text" 598 | }, 599 | "source": [ 600 | "There is one person who had actually bought insurance but our model predicted that the person had not bought insurance. So, there is one misclassified data by our model." 601 | ] 602 | }, 603 | { 604 | "cell_type": "markdown", 605 | "metadata": { 606 | "id": "dz0ebyPjlzkp", 607 | "colab_type": "text" 608 | }, 609 | "source": [ 610 | "## Model Performance\n", 611 | "We can also check how accurate our model is performing using the 'accuracy_score' class from 'sklearn.metrics'." 612 | ] 613 | }, 614 | { 615 | "cell_type": "code", 616 | "metadata": { 617 | "id": "cKj5hppYyQ9Y", 618 | "colab_type": "code", 619 | "colab": { 620 | "base_uri": "https://localhost:8080/", 621 | "height": 51 622 | }, 623 | "outputId": "0217bbb6-8299-4f31-be89-82b980a359fc" 624 | }, 625 | "source": [ 626 | "# The confusion matrix\n", 627 | "from sklearn.metrics import confusion_matrix\n", 628 | "confusion_matrix(y_test, predictions) " 629 | ], 630 | "execution_count": 18, 631 | "outputs": [ 632 | { 633 | "output_type": "execute_result", 634 | "data": { 635 | "text/plain": [ 636 | "array([[5, 0],\n", 637 | " [1, 3]])" 638 | ] 639 | }, 640 | "metadata": { 641 | "tags": [] 642 | }, 643 | "execution_count": 18 644 | } 645 | ] 646 | }, 647 | { 648 | "cell_type": "code", 649 | "metadata": { 650 | "id": "ooEvw_39zoB2", 651 | "colab_type": "code", 652 | "colab": { 653 | "base_uri": "https://localhost:8080/", 654 | "height": 34 655 | }, 656 | "outputId": "6751bc08-7647-476c-d16b-3ceafe133a89" 657 | }, 658 | "source": [ 659 | "tn, fp, fn, tp = confusion_matrix(y_test, predictions).ravel() # ravel() will convert the 2D numpy array into 1D.\n", 660 | "print(tn, fp, fn, tp)" 661 | ], 662 | "execution_count": 20, 663 | "outputs": [ 664 | { 665 | "output_type": "stream", 666 | "text": [ 667 | "5 0 1 3\n" 668 | ], 669 | "name": "stdout" 670 | } 671 | ] 672 | }, 673 | { 674 | "cell_type": "code", 675 | "metadata": { 676 | "id": "K2TpjY1NlNlS", 677 | "colab_type": "code", 678 | "colab": {} 679 | }, 680 | "source": [ 681 | "from sklearn.metrics import accuracy_score" 682 | ], 683 | "execution_count": 15, 684 | "outputs": [] 685 | }, 686 | { 687 | "cell_type": "code", 688 | "metadata": { 689 | "id": "v0KrwUNsmLV6", 690 | "colab_type": "code", 691 | "colab": { 692 | "base_uri": "https://localhost:8080/", 693 | "height": 34 694 | }, 695 | "outputId": "7a23c16b-4a52-43cc-a4df-de43692b7e8f" 696 | }, 697 | "source": [ 698 | "accuracy_score(y_test, predictions)" 699 | ], 700 | "execution_count": 16, 701 | "outputs": [ 702 | { 703 | "output_type": "execute_result", 704 | "data": { 705 | "text/plain": [ 706 | "0.8888888888888888" 707 | ] 708 | }, 709 | "metadata": { 710 | "tags": [] 711 | }, 712 | "execution_count": 16 713 | } 714 | ] 715 | }, 716 | { 717 | "cell_type": "markdown", 718 | "metadata": { 719 | "id": "x_MMd2w703S5", 720 | "colab_type": "text" 721 | }, 722 | "source": [ 723 | "**Why accuracy score?**\n", 724 | "\n", 725 | "Accuracy is a great measure when you have symmetric datasets where values of false positive and false negatives are almost same. As you can see the confusion matrix above, false positives (fp = 0) and false negatives (fn = 1) are almost same. So here accuracy score is the best measure.\n", 726 | "\n", 727 | "Further reading: https://blog.exsilio.com/all/accuracy-precision-recall-f1-score-interpretation-of-performance-measures/" 728 | ] 729 | }, 730 | { 731 | "cell_type": "markdown", 732 | "metadata": { 733 | "id": "lBMN-wOamc7i", 734 | "colab_type": "text" 735 | }, 736 | "source": [ 737 | "Our model is predicting 88.9% correct results." 738 | ] 739 | }, 740 | { 741 | "cell_type": "markdown", 742 | "metadata": { 743 | "id": "dPR3NvaJmSIF", 744 | "colab_type": "text" 745 | }, 746 | "source": [ 747 | "### Thanks for reading the Notebook!!!" 748 | ] 749 | }, 750 | { 751 | "cell_type": "markdown", 752 | "metadata": { 753 | "id": "G2SmEVskR65o", 754 | "colab_type": "text" 755 | }, 756 | "source": [ 757 | "## Exercise\n", 758 | "**Instruction:**\n", 759 | "\n", 760 | "Use the raw data github link: https://raw.githubusercontent.com/dphi-official/Datasets/master/HR_comma_sep.csv \n", 761 | "\n", 762 | "Or you can download it here from [here](https://www.kaggle.com/giripujar/hr-analytics)\n", 763 | "\n", 764 | "**Exercise:**\n", 765 | "\n", 766 | "* Load libraries and data.\n", 767 | "* Do some exploratory data analysis to figure out which variables have direct and clear impact on employee retention (i.e. whether they leave the company or continue to work)\n", 768 | "* Plot bar charts showing impact of employee salaries on retention\n", 769 | "* See the correlation between department and employee retention\n", 770 | "* Separate dependent and independent variables.\n", 771 | "* Split the data into train set and test set\n", 772 | "* Now build Logistic Regression model and do prediction for test data\n", 773 | "* Measure the accuracy of the model\n" 774 | ] 775 | }, 776 | { 777 | "cell_type": "markdown", 778 | "metadata": { 779 | "id": "y2MZWSKNnX6P", 780 | "colab_type": "text" 781 | }, 782 | "source": [ 783 | "**References:**\n", 784 | "\n", 785 | "https://github.com/codebasics/py/blob/master/ML/7_logistic_reg/7_logistic_regression.ipynb" 786 | ] 787 | } 788 | ] 789 | } -------------------------------------------------------------------------------- /Model_Optimisation/.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | 3 | 4 | # Byte-compiled / optimized / DLL files 5 | __pycache__/ 6 | *.py[cod] 7 | *$py.class 8 | 9 | # C extensions 10 | *.so 11 | 12 | # Distribution / packaging 13 | .Python 14 | env/ 15 | build/ 16 | develop-eggs/ 17 | dist/ 18 | downloads/ 19 | eggs/ 20 | .eggs/ 21 | lib/ 22 | lib64/ 23 | parts/ 24 | sdist/ 25 | var/ 26 | wheels/ 27 | *.egg-info/ 28 | .installed.cfg 29 | *.egg 30 | 31 | # PyInstaller 32 | # Usually these files are written by a python script from a template 33 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 34 | *.manifest 35 | *.spec 36 | 37 | # Installer logs 38 | pip-log.txt 39 | pip-delete-this-directory.txt 40 | 41 | # Unit test / coverage reports 42 | htmlcov/ 43 | .tox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | .hypothesis/ 51 | 52 | # Translations 53 | *.mo 54 | *.pot 55 | 56 | # Django stuff: 57 | *.log 58 | local_settings.py 59 | 60 | # Flask stuff: 61 | instance/ 62 | .webassets-cache 63 | 64 | # Scrapy stuff: 65 | .scrapy 66 | 67 | # Sphinx documentation 68 | docs/_build/ 69 | 70 | # PyBuilder 71 | target/ 72 | 73 | # Jupyter Notebook 74 | .ipynb_checkpoints 75 | 76 | # pyenv 77 | .python-version 78 | 79 | # celery beat schedule file 80 | celerybeat-schedule 81 | 82 | # SageMath parsed files 83 | *.sage.py 84 | 85 | # dotenv 86 | .env 87 | 88 | # virtualenv 89 | .venv 90 | venv/ 91 | ENV/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | -------------------------------------------------------------------------------- /Model_Optimisation/data/loan_prediction.csv: -------------------------------------------------------------------------------- 1 | ApplicantIncome,CoapplicantIncome,LoanAmount,Loan_Amount_Term,Credit_History,Loan_Status 2 | 5849,0.0,0.0,360.0,1.0,1 3 | 4583,1508.0,128.0,360.0,1.0,0 4 | 3000,0.0,66.0,360.0,1.0,1 5 | 2583,2358.0,120.0,360.0,1.0,1 6 | 6000,0.0,141.0,360.0,1.0,1 7 | 5417,4196.0,267.0,360.0,1.0,1 8 | 2333,1516.0,95.0,360.0,1.0,1 9 | 3036,2504.0,158.0,360.0,0.0,0 10 | 4006,1526.0,168.0,360.0,1.0,1 11 | 12841,10968.0,349.0,360.0,1.0,0 12 | 3200,700.0,70.0,360.0,1.0,1 13 | 2500,1840.0,109.0,360.0,1.0,1 14 | 3073,8106.0,200.0,360.0,1.0,1 15 | 1853,2840.0,114.0,360.0,1.0,0 16 | 1299,1086.0,17.0,120.0,1.0,1 17 | 4950,0.0,125.0,360.0,1.0,1 18 | 3596,0.0,100.0,240.0,0.0,1 19 | 3510,0.0,76.0,360.0,0.0,0 20 | 4887,0.0,133.0,360.0,1.0,0 21 | 2600,3500.0,115.0,0.0,1.0,1 22 | 7660,0.0,104.0,360.0,0.0,0 23 | 5955,5625.0,315.0,360.0,1.0,1 24 | 2600,1911.0,116.0,360.0,0.0,0 25 | 3365,1917.0,112.0,360.0,0.0,0 26 | 3717,2925.0,151.0,360.0,0.0,0 27 | 9560,0.0,191.0,360.0,1.0,1 28 | 2799,2253.0,122.0,360.0,1.0,1 29 | 4226,1040.0,110.0,360.0,1.0,1 30 | 1442,0.0,35.0,360.0,1.0,0 31 | 3750,2083.0,120.0,360.0,1.0,1 32 | 4166,3369.0,201.0,360.0,0.0,0 33 | 3167,0.0,74.0,360.0,1.0,0 34 | 4692,0.0,106.0,360.0,1.0,0 35 | 3500,1667.0,114.0,360.0,1.0,1 36 | 12500,3000.0,320.0,360.0,1.0,0 37 | 2275,2067.0,0.0,360.0,1.0,1 38 | 1828,1330.0,100.0,0.0,0.0,0 39 | 3667,1459.0,144.0,360.0,1.0,1 40 | 4166,7210.0,184.0,360.0,1.0,1 41 | 3748,1668.0,110.0,360.0,1.0,1 42 | 3600,0.0,80.0,360.0,1.0,0 43 | 1800,1213.0,47.0,360.0,1.0,1 44 | 2400,0.0,75.0,360.0,0.0,1 45 | 3941,2336.0,134.0,360.0,1.0,1 46 | 4695,0.0,96.0,0.0,1.0,1 47 | 3410,0.0,88.0,0.0,1.0,1 48 | 5649,0.0,44.0,360.0,1.0,1 49 | 5821,0.0,144.0,360.0,1.0,1 50 | 2645,3440.0,120.0,360.0,0.0,0 51 | 4000,2275.0,144.0,360.0,1.0,1 52 | 1928,1644.0,100.0,360.0,1.0,1 53 | 3086,0.0,120.0,360.0,1.0,1 54 | 4230,0.0,112.0,360.0,1.0,0 55 | 4616,0.0,134.0,360.0,1.0,0 56 | 11500,0.0,286.0,360.0,0.0,0 57 | 2708,1167.0,97.0,360.0,1.0,1 58 | 2132,1591.0,96.0,360.0,1.0,1 59 | 3366,2200.0,135.0,360.0,1.0,0 60 | 8080,2250.0,180.0,360.0,1.0,1 61 | 3357,2859.0,144.0,360.0,1.0,1 62 | 2500,3796.0,120.0,360.0,1.0,1 63 | 3029,0.0,99.0,360.0,1.0,1 64 | 2609,3449.0,165.0,180.0,0.0,0 65 | 4945,0.0,0.0,360.0,0.0,0 66 | 4166,0.0,116.0,360.0,0.0,0 67 | 5726,4595.0,258.0,360.0,1.0,0 68 | 3200,2254.0,126.0,180.0,0.0,0 69 | 10750,0.0,312.0,360.0,1.0,1 70 | 7100,0.0,125.0,60.0,1.0,1 71 | 4300,0.0,136.0,360.0,0.0,0 72 | 3208,3066.0,172.0,360.0,1.0,1 73 | 1875,1875.0,97.0,360.0,1.0,1 74 | 3500,0.0,81.0,300.0,1.0,1 75 | 4755,0.0,95.0,0.0,0.0,0 76 | 5266,1774.0,187.0,360.0,1.0,1 77 | 3750,0.0,113.0,480.0,1.0,0 78 | 3750,4750.0,176.0,360.0,1.0,0 79 | 1000,3022.0,110.0,360.0,1.0,0 80 | 3167,4000.0,180.0,300.0,0.0,0 81 | 3333,2166.0,130.0,360.0,0.0,1 82 | 3846,0.0,111.0,360.0,1.0,1 83 | 2395,0.0,0.0,360.0,1.0,1 84 | 1378,1881.0,167.0,360.0,1.0,0 85 | 6000,2250.0,265.0,360.0,0.0,0 86 | 3988,0.0,50.0,240.0,1.0,1 87 | 2366,2531.0,136.0,360.0,1.0,1 88 | 3333,2000.0,99.0,360.0,0.0,1 89 | 2500,2118.0,104.0,360.0,1.0,1 90 | 8566,0.0,210.0,360.0,1.0,1 91 | 5695,4167.0,175.0,360.0,1.0,1 92 | 2958,2900.0,131.0,360.0,1.0,1 93 | 6250,5654.0,188.0,180.0,1.0,1 94 | 3273,1820.0,81.0,360.0,1.0,1 95 | 4133,0.0,122.0,360.0,1.0,1 96 | 3620,0.0,25.0,120.0,1.0,1 97 | 6782,0.0,0.0,360.0,0.0,0 98 | 2484,2302.0,137.0,360.0,1.0,1 99 | 1977,997.0,50.0,360.0,1.0,1 100 | 4188,0.0,115.0,180.0,1.0,1 101 | 1759,3541.0,131.0,360.0,1.0,1 102 | 4288,3263.0,133.0,180.0,1.0,1 103 | 4843,3806.0,151.0,360.0,1.0,1 104 | 13650,0.0,0.0,360.0,1.0,1 105 | 4652,3583.0,0.0,360.0,1.0,1 106 | 3816,754.0,160.0,360.0,1.0,1 107 | 3052,1030.0,100.0,360.0,1.0,1 108 | 11417,1126.0,225.0,360.0,1.0,1 109 | 7333,0.0,120.0,360.0,1.0,0 110 | 3800,3600.0,216.0,360.0,0.0,0 111 | 2071,754.0,94.0,480.0,1.0,1 112 | 5316,0.0,136.0,360.0,1.0,1 113 | 2929,2333.0,139.0,360.0,1.0,1 114 | 3572,4114.0,152.0,0.0,0.0,0 115 | 7451,0.0,0.0,360.0,1.0,1 116 | 5050,0.0,118.0,360.0,1.0,1 117 | 14583,0.0,185.0,180.0,1.0,1 118 | 3167,2283.0,154.0,360.0,1.0,1 119 | 2214,1398.0,85.0,360.0,0.0,1 120 | 5568,2142.0,175.0,360.0,1.0,0 121 | 10408,0.0,259.0,360.0,1.0,1 122 | 5667,2667.0,180.0,360.0,1.0,1 123 | 4166,0.0,44.0,360.0,1.0,1 124 | 2137,8980.0,137.0,360.0,0.0,1 125 | 2957,0.0,81.0,360.0,1.0,1 126 | 4300,2014.0,194.0,360.0,1.0,1 127 | 3692,0.0,93.0,360.0,0.0,1 128 | 23803,0.0,370.0,360.0,1.0,1 129 | 3865,1640.0,0.0,360.0,1.0,1 130 | 10513,3850.0,160.0,180.0,0.0,0 131 | 6080,2569.0,182.0,360.0,0.0,0 132 | 20166,0.0,650.0,480.0,0.0,1 133 | 2014,1929.0,74.0,360.0,1.0,1 134 | 2718,0.0,70.0,360.0,1.0,1 135 | 3459,0.0,25.0,120.0,1.0,1 136 | 4895,0.0,102.0,360.0,1.0,1 137 | 4000,7750.0,290.0,360.0,1.0,0 138 | 4583,0.0,84.0,360.0,1.0,0 139 | 3316,3500.0,88.0,360.0,1.0,1 140 | 14999,0.0,242.0,360.0,0.0,0 141 | 4200,1430.0,129.0,360.0,1.0,0 142 | 5042,2083.0,185.0,360.0,1.0,0 143 | 5417,0.0,168.0,360.0,1.0,1 144 | 6950,0.0,175.0,180.0,1.0,1 145 | 2698,2034.0,122.0,360.0,1.0,1 146 | 11757,0.0,187.0,180.0,1.0,1 147 | 2330,4486.0,100.0,360.0,1.0,1 148 | 14866,0.0,70.0,360.0,1.0,1 149 | 1538,1425.0,30.0,360.0,1.0,1 150 | 10000,1666.0,225.0,360.0,1.0,0 151 | 4860,830.0,125.0,360.0,1.0,1 152 | 6277,0.0,118.0,360.0,0.0,0 153 | 2577,3750.0,152.0,360.0,1.0,1 154 | 9166,0.0,244.0,360.0,1.0,0 155 | 2281,0.0,113.0,360.0,1.0,0 156 | 3254,0.0,50.0,360.0,1.0,1 157 | 39999,0.0,600.0,180.0,0.0,1 158 | 6000,0.0,160.0,360.0,0.0,1 159 | 9538,0.0,187.0,360.0,1.0,1 160 | 2980,2083.0,120.0,360.0,1.0,1 161 | 4583,5625.0,255.0,360.0,1.0,1 162 | 1863,1041.0,98.0,360.0,1.0,1 163 | 7933,0.0,275.0,360.0,1.0,0 164 | 3089,1280.0,121.0,360.0,0.0,0 165 | 4167,1447.0,158.0,360.0,1.0,1 166 | 9323,0.0,75.0,180.0,1.0,1 167 | 3707,3166.0,182.0,0.0,1.0,1 168 | 4583,0.0,112.0,360.0,1.0,0 169 | 2439,3333.0,129.0,360.0,1.0,1 170 | 2237,0.0,63.0,480.0,0.0,0 171 | 8000,0.0,200.0,360.0,1.0,1 172 | 1820,1769.0,95.0,360.0,1.0,1 173 | 51763,0.0,700.0,300.0,1.0,1 174 | 3522,0.0,81.0,180.0,1.0,0 175 | 5708,5625.0,187.0,360.0,1.0,1 176 | 4344,736.0,87.0,360.0,1.0,0 177 | 3497,1964.0,116.0,360.0,1.0,1 178 | 2045,1619.0,101.0,360.0,1.0,1 179 | 5516,11300.0,495.0,360.0,0.0,0 180 | 3750,0.0,116.0,360.0,1.0,1 181 | 2333,1451.0,102.0,480.0,0.0,0 182 | 6400,7250.0,180.0,360.0,0.0,0 183 | 1916,5063.0,67.0,360.0,0.0,0 184 | 4600,0.0,73.0,180.0,1.0,1 185 | 33846,0.0,260.0,360.0,1.0,0 186 | 3625,0.0,108.0,360.0,1.0,1 187 | 39147,4750.0,120.0,360.0,1.0,1 188 | 2178,0.0,66.0,300.0,0.0,0 189 | 2383,2138.0,58.0,360.0,0.0,1 190 | 674,5296.0,168.0,360.0,1.0,1 191 | 9328,0.0,188.0,180.0,1.0,1 192 | 4885,0.0,48.0,360.0,1.0,1 193 | 12000,0.0,164.0,360.0,1.0,0 194 | 6033,0.0,160.0,360.0,1.0,0 195 | 3858,0.0,76.0,360.0,1.0,1 196 | 4191,0.0,120.0,360.0,1.0,1 197 | 3125,2583.0,170.0,360.0,1.0,0 198 | 8333,3750.0,187.0,360.0,1.0,1 199 | 1907,2365.0,120.0,0.0,1.0,1 200 | 3416,2816.0,113.0,360.0,0.0,1 201 | 11000,0.0,83.0,360.0,1.0,0 202 | 2600,2500.0,90.0,360.0,1.0,1 203 | 4923,0.0,166.0,360.0,0.0,1 204 | 3992,0.0,0.0,180.0,1.0,0 205 | 3500,1083.0,135.0,360.0,1.0,1 206 | 3917,0.0,124.0,360.0,1.0,1 207 | 4408,0.0,120.0,360.0,1.0,1 208 | 3244,0.0,80.0,360.0,1.0,1 209 | 3975,2531.0,55.0,360.0,1.0,1 210 | 2479,0.0,59.0,360.0,1.0,1 211 | 3418,0.0,127.0,360.0,1.0,0 212 | 10000,0.0,214.0,360.0,1.0,0 213 | 3430,1250.0,128.0,360.0,0.0,0 214 | 7787,0.0,240.0,360.0,1.0,1 215 | 5703,0.0,130.0,360.0,1.0,1 216 | 3173,3021.0,137.0,360.0,1.0,1 217 | 3850,983.0,100.0,360.0,1.0,1 218 | 150,1800.0,135.0,360.0,1.0,0 219 | 3727,1775.0,131.0,360.0,1.0,1 220 | 5000,0.0,72.0,360.0,0.0,0 221 | 4283,2383.0,127.0,360.0,0.0,1 222 | 2221,0.0,60.0,360.0,0.0,0 223 | 4009,1717.0,116.0,360.0,1.0,1 224 | 2971,2791.0,144.0,360.0,1.0,1 225 | 7578,1010.0,175.0,0.0,1.0,1 226 | 6250,0.0,128.0,360.0,1.0,1 227 | 3250,0.0,170.0,360.0,1.0,0 228 | 4735,0.0,138.0,360.0,1.0,0 229 | 6250,1695.0,210.0,360.0,1.0,1 230 | 4758,0.0,158.0,480.0,1.0,1 231 | 6400,0.0,200.0,360.0,1.0,1 232 | 2491,2054.0,104.0,360.0,1.0,1 233 | 3716,0.0,42.0,180.0,1.0,1 234 | 3189,2598.0,120.0,0.0,1.0,1 235 | 8333,0.0,280.0,360.0,1.0,1 236 | 3155,1779.0,140.0,360.0,1.0,1 237 | 5500,1260.0,170.0,360.0,1.0,1 238 | 5746,0.0,255.0,360.0,0.0,0 239 | 3463,0.0,122.0,360.0,0.0,1 240 | 3812,0.0,112.0,360.0,1.0,1 241 | 3315,0.0,96.0,360.0,1.0,1 242 | 5819,5000.0,120.0,360.0,1.0,1 243 | 2510,1983.0,140.0,180.0,1.0,0 244 | 2965,5701.0,155.0,60.0,1.0,1 245 | 6250,1300.0,108.0,360.0,1.0,1 246 | 3406,4417.0,123.0,360.0,1.0,1 247 | 6050,4333.0,120.0,180.0,1.0,0 248 | 9703,0.0,112.0,360.0,1.0,1 249 | 6608,0.0,137.0,180.0,1.0,1 250 | 2882,1843.0,123.0,480.0,1.0,1 251 | 1809,1868.0,90.0,360.0,1.0,1 252 | 1668,3890.0,201.0,360.0,0.0,0 253 | 3427,0.0,138.0,360.0,1.0,0 254 | 2583,2167.0,104.0,360.0,1.0,1 255 | 2661,7101.0,279.0,180.0,1.0,1 256 | 16250,0.0,192.0,360.0,0.0,0 257 | 3083,0.0,255.0,360.0,1.0,1 258 | 6045,0.0,115.0,360.0,0.0,0 259 | 5250,0.0,94.0,360.0,1.0,0 260 | 14683,2100.0,304.0,360.0,1.0,0 261 | 4931,0.0,128.0,360.0,0.0,0 262 | 6083,4250.0,330.0,360.0,0.0,1 263 | 2060,2209.0,134.0,360.0,1.0,1 264 | 3481,0.0,155.0,36.0,1.0,0 265 | 7200,0.0,120.0,360.0,1.0,1 266 | 5166,0.0,128.0,360.0,1.0,1 267 | 4095,3447.0,151.0,360.0,1.0,1 268 | 4708,1387.0,150.0,360.0,1.0,1 269 | 4333,1811.0,160.0,360.0,0.0,1 270 | 3418,0.0,135.0,360.0,1.0,0 271 | 2876,1560.0,90.0,360.0,1.0,1 272 | 3237,0.0,30.0,360.0,1.0,1 273 | 11146,0.0,136.0,360.0,1.0,1 274 | 2833,1857.0,126.0,360.0,1.0,1 275 | 2620,2223.0,150.0,360.0,1.0,1 276 | 3900,0.0,90.0,360.0,1.0,1 277 | 2750,1842.0,115.0,360.0,1.0,1 278 | 3993,3274.0,207.0,360.0,1.0,1 279 | 3103,1300.0,80.0,360.0,1.0,1 280 | 14583,0.0,436.0,360.0,1.0,1 281 | 4100,0.0,124.0,360.0,0.0,1 282 | 4053,2426.0,158.0,360.0,0.0,0 283 | 3927,800.0,112.0,360.0,1.0,1 284 | 2301,985.7999878,78.0,180.0,1.0,1 285 | 1811,1666.0,54.0,360.0,1.0,1 286 | 20667,0.0,0.0,360.0,1.0,0 287 | 3158,3053.0,89.0,360.0,1.0,1 288 | 2600,1717.0,99.0,300.0,1.0,0 289 | 3704,2000.0,120.0,360.0,1.0,1 290 | 4124,0.0,115.0,360.0,1.0,1 291 | 9508,0.0,187.0,360.0,1.0,1 292 | 3075,2416.0,139.0,360.0,1.0,1 293 | 4400,0.0,127.0,360.0,0.0,0 294 | 3153,1560.0,134.0,360.0,1.0,1 295 | 5417,0.0,143.0,480.0,0.0,0 296 | 2383,3334.0,172.0,360.0,1.0,1 297 | 4416,1250.0,110.0,360.0,1.0,1 298 | 6875,0.0,200.0,360.0,1.0,1 299 | 4666,0.0,135.0,360.0,1.0,1 300 | 5000,2541.0,151.0,480.0,1.0,0 301 | 2014,2925.0,113.0,360.0,1.0,0 302 | 1800,2934.0,93.0,360.0,0.0,0 303 | 2875,1750.0,105.0,360.0,1.0,1 304 | 5000,0.0,132.0,360.0,1.0,1 305 | 1625,1803.0,96.0,360.0,1.0,1 306 | 4000,2500.0,140.0,360.0,1.0,1 307 | 2000,0.0,0.0,360.0,1.0,0 308 | 3762,1666.0,135.0,360.0,1.0,1 309 | 2400,1863.0,104.0,360.0,0.0,0 310 | 20233,0.0,480.0,360.0,1.0,0 311 | 7667,0.0,185.0,360.0,0.0,1 312 | 2917,0.0,84.0,360.0,1.0,1 313 | 2927,2405.0,111.0,360.0,1.0,1 314 | 2507,0.0,56.0,360.0,1.0,1 315 | 5746,0.0,144.0,84.0,0.0,1 316 | 2473,1843.0,159.0,360.0,1.0,0 317 | 3399,1640.0,111.0,180.0,1.0,1 318 | 3717,0.0,120.0,360.0,1.0,1 319 | 2058,2134.0,88.0,360.0,0.0,1 320 | 3541,0.0,112.0,360.0,0.0,1 321 | 10000,0.0,155.0,360.0,1.0,0 322 | 2400,2167.0,115.0,360.0,1.0,1 323 | 4342,189.0,124.0,360.0,1.0,1 324 | 3601,1590.0,0.0,360.0,1.0,1 325 | 3166,2985.0,132.0,360.0,0.0,1 326 | 15000,0.0,300.0,360.0,1.0,1 327 | 8666,4983.0,376.0,360.0,0.0,0 328 | 4917,0.0,130.0,360.0,0.0,1 329 | 5818,2160.0,184.0,360.0,1.0,1 330 | 4333,2451.0,110.0,360.0,1.0,0 331 | 2500,0.0,67.0,360.0,1.0,1 332 | 4384,1793.0,117.0,360.0,1.0,1 333 | 2935,0.0,98.0,360.0,1.0,1 334 | 2833,0.0,71.0,360.0,1.0,1 335 | 63337,0.0,490.0,180.0,1.0,1 336 | 9833,1833.0,182.0,180.0,1.0,1 337 | 5503,4490.0,70.0,0.0,1.0,1 338 | 5250,688.0,160.0,360.0,1.0,1 339 | 2500,4600.0,176.0,360.0,1.0,1 340 | 1830,0.0,0.0,360.0,0.0,0 341 | 4160,0.0,71.0,360.0,1.0,1 342 | 2647,1587.0,173.0,360.0,1.0,0 343 | 2378,0.0,46.0,360.0,1.0,0 344 | 4554,1229.0,158.0,360.0,1.0,1 345 | 3173,0.0,74.0,360.0,1.0,1 346 | 2583,2330.0,125.0,360.0,1.0,1 347 | 2499,2458.0,160.0,360.0,1.0,1 348 | 3523,3230.0,152.0,360.0,0.0,0 349 | 3083,2168.0,126.0,360.0,1.0,1 350 | 6333,4583.0,259.0,360.0,0.0,1 351 | 2625,6250.0,187.0,360.0,1.0,1 352 | 9083,0.0,228.0,360.0,1.0,1 353 | 8750,4167.0,308.0,360.0,1.0,0 354 | 2666,2083.0,95.0,360.0,1.0,1 355 | 5500,0.0,105.0,360.0,0.0,0 356 | 2423,505.0,130.0,360.0,1.0,1 357 | 3813,0.0,116.0,180.0,1.0,1 358 | 8333,3167.0,165.0,360.0,1.0,1 359 | 3875,0.0,67.0,360.0,1.0,0 360 | 3000,1666.0,100.0,480.0,0.0,0 361 | 5167,3167.0,200.0,360.0,1.0,1 362 | 4723,0.0,81.0,360.0,1.0,0 363 | 5000,3667.0,236.0,360.0,1.0,1 364 | 4750,2333.0,130.0,360.0,1.0,1 365 | 3013,3033.0,95.0,300.0,0.0,1 366 | 6822,0.0,141.0,360.0,1.0,1 367 | 6216,0.0,133.0,360.0,1.0,0 368 | 2500,0.0,96.0,480.0,1.0,0 369 | 5124,0.0,124.0,0.0,0.0,0 370 | 6325,0.0,175.0,360.0,1.0,1 371 | 19730,5266.0,570.0,360.0,1.0,0 372 | 15759,0.0,55.0,360.0,1.0,1 373 | 5185,0.0,155.0,360.0,1.0,1 374 | 9323,7873.0,380.0,300.0,1.0,1 375 | 3062,1987.0,111.0,180.0,0.0,0 376 | 2764,1459.0,110.0,360.0,1.0,1 377 | 4817,923.0,120.0,180.0,1.0,1 378 | 8750,4996.0,130.0,360.0,1.0,1 379 | 4310,0.0,130.0,360.0,0.0,1 380 | 3069,0.0,71.0,480.0,1.0,0 381 | 5391,0.0,130.0,360.0,1.0,1 382 | 3333,2500.0,128.0,360.0,1.0,1 383 | 5941,4232.0,296.0,360.0,1.0,1 384 | 6000,0.0,156.0,360.0,1.0,1 385 | 7167,0.0,128.0,360.0,1.0,1 386 | 4566,0.0,100.0,360.0,1.0,0 387 | 3667,0.0,113.0,180.0,1.0,1 388 | 2346,1600.0,132.0,360.0,1.0,1 389 | 3010,3136.0,0.0,360.0,0.0,0 390 | 2333,2417.0,136.0,360.0,1.0,1 391 | 5488,0.0,125.0,360.0,1.0,1 392 | 9167,0.0,185.0,360.0,1.0,1 393 | 9504,0.0,275.0,360.0,1.0,1 394 | 2583,2115.0,120.0,360.0,0.0,1 395 | 1993,1625.0,113.0,180.0,1.0,1 396 | 3100,1400.0,113.0,360.0,1.0,1 397 | 3276,484.0,135.0,360.0,0.0,1 398 | 3180,0.0,71.0,360.0,0.0,0 399 | 3033,1459.0,95.0,360.0,1.0,1 400 | 3902,1666.0,109.0,360.0,1.0,1 401 | 1500,1800.0,103.0,360.0,0.0,0 402 | 2889,0.0,45.0,180.0,0.0,0 403 | 2755,0.0,65.0,300.0,1.0,0 404 | 2500,20000.0,103.0,360.0,1.0,1 405 | 1963,0.0,53.0,360.0,1.0,1 406 | 7441,0.0,194.0,360.0,1.0,0 407 | 4547,0.0,115.0,360.0,1.0,1 408 | 2167,2400.0,115.0,360.0,1.0,1 409 | 2213,0.0,66.0,360.0,1.0,1 410 | 8300,0.0,152.0,300.0,0.0,0 411 | 81000,0.0,360.0,360.0,0.0,0 412 | 3867,0.0,62.0,360.0,1.0,0 413 | 6256,0.0,160.0,360.0,0.0,1 414 | 6096,0.0,218.0,360.0,0.0,0 415 | 2253,2033.0,110.0,360.0,1.0,1 416 | 2149,3237.0,178.0,360.0,0.0,0 417 | 2995,0.0,60.0,360.0,1.0,1 418 | 2600,0.0,160.0,360.0,1.0,0 419 | 1600,20000.0,239.0,360.0,1.0,0 420 | 1025,2773.0,112.0,360.0,1.0,1 421 | 3246,1417.0,138.0,360.0,1.0,1 422 | 5829,0.0,138.0,360.0,1.0,1 423 | 2720,0.0,80.0,0.0,0.0,0 424 | 1820,1719.0,100.0,360.0,1.0,1 425 | 7250,1667.0,110.0,0.0,0.0,0 426 | 14880,0.0,96.0,360.0,1.0,1 427 | 2666,4300.0,121.0,360.0,1.0,1 428 | 4606,0.0,81.0,360.0,1.0,0 429 | 5935,0.0,133.0,360.0,1.0,1 430 | 2920,16.12000084,87.0,360.0,1.0,1 431 | 2717,0.0,60.0,180.0,1.0,1 432 | 8624,0.0,150.0,360.0,1.0,1 433 | 6500,0.0,105.0,360.0,0.0,0 434 | 12876,0.0,405.0,360.0,1.0,1 435 | 2425,2340.0,143.0,360.0,1.0,1 436 | 3750,0.0,100.0,360.0,1.0,1 437 | 10047,0.0,0.0,240.0,1.0,1 438 | 1926,1851.0,50.0,360.0,1.0,1 439 | 2213,1125.0,0.0,360.0,1.0,1 440 | 10416,0.0,187.0,360.0,0.0,0 441 | 7142,0.0,138.0,360.0,1.0,1 442 | 3660,5064.0,187.0,360.0,1.0,1 443 | 7901,1833.0,180.0,360.0,1.0,1 444 | 4707,1993.0,148.0,360.0,1.0,1 445 | 37719,0.0,152.0,360.0,1.0,1 446 | 7333,8333.0,175.0,300.0,0.0,1 447 | 3466,1210.0,130.0,360.0,1.0,1 448 | 4652,0.0,110.0,360.0,1.0,1 449 | 3539,1376.0,55.0,360.0,1.0,0 450 | 3340,1710.0,150.0,360.0,0.0,0 451 | 2769,1542.0,190.0,360.0,0.0,0 452 | 2309,1255.0,125.0,360.0,0.0,0 453 | 1958,1456.0,60.0,300.0,0.0,1 454 | 3948,1733.0,149.0,360.0,0.0,0 455 | 2483,2466.0,90.0,180.0,0.0,1 456 | 7085,0.0,84.0,360.0,1.0,1 457 | 3859,0.0,96.0,360.0,1.0,1 458 | 4301,0.0,118.0,360.0,1.0,1 459 | 3708,2569.0,173.0,360.0,1.0,0 460 | 4354,0.0,136.0,360.0,1.0,1 461 | 8334,0.0,160.0,360.0,1.0,0 462 | 2083,4083.0,160.0,360.0,0.0,1 463 | 7740,0.0,128.0,180.0,1.0,1 464 | 3015,2188.0,153.0,360.0,1.0,1 465 | 5191,0.0,132.0,360.0,1.0,1 466 | 4166,0.0,98.0,360.0,0.0,0 467 | 6000,0.0,140.0,360.0,1.0,1 468 | 2947,1664.0,70.0,180.0,0.0,0 469 | 16692,0.0,110.0,360.0,1.0,1 470 | 210,2917.0,98.0,360.0,1.0,1 471 | 4333,2451.0,110.0,360.0,1.0,0 472 | 3450,2079.0,162.0,360.0,1.0,1 473 | 2653,1500.0,113.0,180.0,0.0,0 474 | 4691,0.0,100.0,360.0,1.0,1 475 | 2500,0.0,93.0,360.0,0.0,1 476 | 5532,4648.0,162.0,360.0,1.0,1 477 | 16525,1014.0,150.0,360.0,1.0,1 478 | 6700,1750.0,230.0,300.0,1.0,1 479 | 2873,1872.0,132.0,360.0,0.0,0 480 | 16667,2250.0,86.0,360.0,1.0,1 481 | 2947,1603.0,0.0,360.0,1.0,0 482 | 4350,0.0,154.0,360.0,1.0,1 483 | 3095,0.0,113.0,360.0,1.0,1 484 | 2083,3150.0,128.0,360.0,1.0,1 485 | 10833,0.0,234.0,360.0,1.0,1 486 | 8333,0.0,246.0,360.0,1.0,1 487 | 1958,2436.0,131.0,360.0,1.0,1 488 | 3547,0.0,80.0,360.0,0.0,0 489 | 18333,0.0,500.0,360.0,1.0,0 490 | 4583,2083.0,160.0,360.0,1.0,1 491 | 2435,0.0,75.0,360.0,1.0,0 492 | 2699,2785.0,96.0,360.0,0.0,1 493 | 5333,1131.0,186.0,360.0,0.0,1 494 | 3691,0.0,110.0,360.0,1.0,1 495 | 17263,0.0,225.0,360.0,1.0,1 496 | 3597,2157.0,119.0,360.0,0.0,0 497 | 3326,913.0,105.0,84.0,1.0,1 498 | 2600,1700.0,107.0,360.0,1.0,1 499 | 4625,2857.0,111.0,12.0,0.0,1 500 | 2895,0.0,95.0,360.0,1.0,1 501 | 6283,4416.0,209.0,360.0,0.0,0 502 | 645,3683.0,113.0,480.0,1.0,1 503 | 3159,0.0,100.0,360.0,1.0,1 504 | 4865,5624.0,208.0,360.0,1.0,1 505 | 4050,5302.0,138.0,360.0,0.0,0 506 | 3814,1483.0,124.0,300.0,1.0,1 507 | 3510,4416.0,243.0,360.0,1.0,1 508 | 20833,6667.0,480.0,360.0,0.0,1 509 | 3583,0.0,96.0,360.0,1.0,0 510 | 2479,3013.0,188.0,360.0,1.0,1 511 | 13262,0.0,40.0,360.0,1.0,1 512 | 3598,1287.0,100.0,360.0,1.0,0 513 | 6065,2004.0,250.0,360.0,1.0,1 514 | 3283,2035.0,148.0,360.0,1.0,1 515 | 2130,6666.0,70.0,180.0,1.0,0 516 | 5815,3666.0,311.0,360.0,1.0,0 517 | 3466,3428.0,150.0,360.0,1.0,1 518 | 2031,1632.0,113.0,480.0,1.0,1 519 | 3074,1800.0,123.0,360.0,0.0,0 520 | 4683,1915.0,185.0,360.0,1.0,0 521 | 3400,0.0,95.0,360.0,1.0,0 522 | 2192,1742.0,45.0,360.0,1.0,1 523 | 2500,0.0,55.0,360.0,1.0,1 524 | 5677,1424.0,100.0,360.0,1.0,1 525 | 7948,7166.0,480.0,360.0,1.0,1 526 | 4680,2087.0,0.0,360.0,1.0,0 527 | 17500,0.0,400.0,360.0,1.0,1 528 | 3775,0.0,110.0,360.0,1.0,1 529 | 5285,1430.0,161.0,360.0,0.0,1 530 | 2679,1302.0,94.0,360.0,1.0,1 531 | 6783,0.0,130.0,360.0,1.0,1 532 | 1025,5500.0,216.0,360.0,0.0,1 533 | 4281,0.0,100.0,360.0,1.0,1 534 | 3588,0.0,110.0,360.0,0.0,0 535 | 11250,0.0,196.0,360.0,0.0,0 536 | 18165,0.0,125.0,360.0,1.0,1 537 | 2550,2042.0,126.0,360.0,1.0,1 538 | 6133,3906.0,324.0,360.0,1.0,1 539 | 3617,0.0,107.0,360.0,1.0,1 540 | 2917,536.0,66.0,360.0,1.0,0 541 | 6417,0.0,157.0,180.0,1.0,1 542 | 4608,2845.0,140.0,180.0,1.0,1 543 | 2138,0.0,99.0,360.0,0.0,0 544 | 3652,0.0,95.0,360.0,1.0,1 545 | 2239,2524.0,128.0,360.0,1.0,1 546 | 3017,663.0,102.0,360.0,0.0,1 547 | 2768,1950.0,155.0,360.0,1.0,1 548 | 3358,0.0,80.0,36.0,1.0,0 549 | 2526,1783.0,145.0,360.0,1.0,1 550 | 5000,0.0,103.0,360.0,0.0,0 551 | 2785,2016.0,110.0,360.0,1.0,1 552 | 6633,0.0,0.0,360.0,0.0,0 553 | 2492,2375.0,0.0,360.0,1.0,1 554 | 3333,3250.0,158.0,360.0,1.0,1 555 | 2454,2333.0,181.0,360.0,0.0,0 556 | 3593,4266.0,132.0,180.0,0.0,0 557 | 5468,1032.0,26.0,360.0,1.0,1 558 | 2667,1625.0,84.0,360.0,0.0,1 559 | 10139,0.0,260.0,360.0,1.0,1 560 | 3887,2669.0,162.0,360.0,1.0,1 561 | 4180,2306.0,182.0,360.0,1.0,1 562 | 3675,242.0,108.0,360.0,1.0,1 563 | 19484,0.0,600.0,360.0,1.0,1 564 | 5923,2054.0,211.0,360.0,1.0,1 565 | 5800,0.0,132.0,360.0,1.0,1 566 | 8799,0.0,258.0,360.0,0.0,0 567 | 4467,0.0,120.0,360.0,0.0,1 568 | 3333,0.0,70.0,360.0,1.0,1 569 | 3400,2500.0,123.0,360.0,0.0,0 570 | 2378,0.0,9.0,360.0,1.0,0 571 | 3166,2064.0,104.0,360.0,0.0,0 572 | 3417,1750.0,186.0,360.0,1.0,1 573 | 5116,1451.0,165.0,360.0,0.0,0 574 | 16666,0.0,275.0,360.0,1.0,1 575 | 6125,1625.0,187.0,480.0,1.0,0 576 | 6406,0.0,150.0,360.0,1.0,0 577 | 3159,461.0,108.0,84.0,1.0,1 578 | 3087,2210.0,136.0,360.0,0.0,0 579 | 3229,2739.0,110.0,360.0,1.0,1 580 | 1782,2232.0,107.0,360.0,1.0,1 581 | 3182,2917.0,161.0,360.0,1.0,1 582 | 6540,0.0,205.0,360.0,1.0,1 583 | 1836,33837.0,90.0,360.0,1.0,0 584 | 3166,0.0,36.0,360.0,1.0,1 585 | 1880,0.0,61.0,360.0,0.0,0 586 | 2787,1917.0,146.0,360.0,0.0,0 587 | 4283,3000.0,172.0,84.0,1.0,0 588 | 2297,1522.0,104.0,360.0,1.0,1 589 | 2165,0.0,70.0,360.0,1.0,1 590 | 4750,0.0,94.0,360.0,1.0,1 591 | 2726,0.0,106.0,360.0,0.0,0 592 | 3000,3416.0,56.0,180.0,1.0,1 593 | 6000,0.0,205.0,240.0,1.0,0 594 | 9357,0.0,292.0,360.0,1.0,1 595 | 3859,3300.0,142.0,180.0,1.0,1 596 | 16120,0.0,260.0,360.0,1.0,1 597 | 3833,0.0,110.0,360.0,1.0,1 598 | 6383,1000.0,187.0,360.0,1.0,0 599 | 2987,0.0,88.0,360.0,0.0,0 600 | 9963,0.0,180.0,360.0,1.0,1 601 | 5780,0.0,192.0,360.0,1.0,1 602 | 416,41667.0,350.0,180.0,0.0,0 603 | 2894,2792.0,155.0,360.0,1.0,1 604 | 5703,0.0,128.0,360.0,1.0,1 605 | 3676,4301.0,172.0,360.0,1.0,1 606 | 12000,0.0,496.0,360.0,1.0,1 607 | 2400,3800.0,0.0,180.0,1.0,0 608 | 3400,2500.0,173.0,360.0,1.0,1 609 | 3987,1411.0,157.0,360.0,1.0,1 610 | 3232,1950.0,108.0,360.0,1.0,1 611 | 2900,0.0,71.0,360.0,1.0,1 612 | 4106,0.0,40.0,180.0,1.0,1 613 | 8072,240.0,253.0,360.0,1.0,1 614 | 7583,0.0,187.0,360.0,1.0,1 615 | 4583,0.0,133.0,360.0,0.0,0 616 | -------------------------------------------------------------------------------- /Model_Optimisation/data/titanic_clean.csv: -------------------------------------------------------------------------------- 1 | ,Survived,Pclass,Sex,Age,Parch,Fare,Embarked,Name_length,Has_Cabin,FamilySize,IsAlone,Title 2 | 0,0,3,1,1,0,0,0,23,0,2,0,1 3 | 1,1,1,0,2,0,3,1,51,1,2,0,3 4 | 2,1,3,0,1,0,1,0,22,0,1,1,2 5 | 3,1,1,0,2,0,3,0,44,1,2,0,3 6 | 4,0,3,1,2,0,1,0,24,0,1,1,1 7 | 5,0,3,1,2,0,1,2,16,0,1,1,1 8 | 6,0,1,1,3,0,3,0,23,1,1,1,1 9 | 7,0,3,1,0,1,2,0,30,0,5,0,4 10 | 8,1,3,0,1,2,1,0,49,0,3,0,3 11 | 9,1,2,0,0,0,2,1,35,0,2,0,3 12 | 10,1,3,0,0,1,2,0,31,1,3,0,2 13 | 11,1,1,0,3,0,2,0,24,1,1,1,2 14 | 12,0,3,1,1,0,1,0,30,0,1,1,1 15 | 13,0,3,1,2,5,3,0,27,0,7,0,1 16 | 14,0,3,0,0,0,0,0,36,0,1,1,2 17 | 15,1,2,0,3,0,2,0,32,0,1,1,3 18 | 16,0,3,1,0,1,2,2,20,0,6,0,4 19 | 17,1,2,1,1,0,1,0,28,0,1,1,1 20 | 18,0,3,0,1,0,2,0,55,0,2,0,3 21 | 19,1,3,0,2,0,0,1,23,0,1,1,3 22 | 20,0,2,1,2,0,2,0,20,0,1,1,1 23 | 21,1,2,1,2,0,1,0,21,1,1,1,1 24 | 22,1,3,0,0,0,1,2,27,0,1,1,2 25 | 23,1,1,1,1,0,3,0,28,1,1,1,1 26 | 24,0,3,0,0,1,2,0,29,0,5,0,2 27 | 25,1,3,0,2,5,3,0,57,0,7,0,3 28 | 26,0,3,1,1,0,0,1,23,0,1,1,1 29 | 27,0,1,1,1,2,3,0,30,1,6,0,1 30 | 28,1,3,0,1,0,0,2,29,0,1,1,2 31 | 29,0,3,1,2,0,0,0,19,0,1,1,1 32 | 30,0,1,1,2,0,2,1,24,0,1,1,5 33 | 31,1,1,0,1,0,3,1,46,1,2,0,3 34 | 32,1,3,0,1,0,0,2,24,0,1,1,2 35 | 33,0,2,1,4,0,1,0,21,0,1,1,1 36 | 34,0,1,1,1,0,3,1,23,0,2,0,1 37 | 35,0,1,1,2,0,3,0,30,0,2,0,1 38 | 36,1,3,1,0,0,0,1,16,0,1,1,1 39 | 37,0,3,1,1,0,1,0,24,0,1,1,1 40 | 38,0,3,0,1,0,2,0,34,0,3,0,2 41 | 39,1,3,0,0,0,1,1,27,0,2,0,2 42 | 40,0,3,0,2,0,1,0,46,0,2,0,3 43 | 41,0,2,0,1,0,2,0,56,0,2,0,3 44 | 42,0,3,1,2,0,0,1,19,0,1,1,1 45 | 43,1,2,0,0,2,3,1,40,0,4,0,2 46 | 44,1,3,0,1,0,0,2,29,0,1,1,2 47 | 45,0,3,1,2,0,1,0,24,0,1,1,1 48 | 46,0,3,1,1,0,2,2,17,0,2,0,1 49 | 47,1,3,0,1,0,0,2,25,0,1,1,2 50 | 48,0,3,1,1,0,2,1,19,0,3,0,1 51 | 49,0,3,0,1,0,2,0,45,0,2,0,3 52 | 50,0,3,1,0,1,3,0,26,0,6,0,4 53 | 51,0,3,1,1,0,0,0,28,0,1,1,1 54 | 52,1,1,0,3,0,3,1,40,1,2,0,3 55 | 53,1,2,0,1,0,2,0,50,0,2,0,3 56 | 54,0,1,1,4,1,3,1,30,1,2,0,1 57 | 55,1,1,1,1,0,3,0,17,1,1,1,1 58 | 56,1,2,0,1,0,1,0,17,0,1,1,2 59 | 57,0,3,1,1,0,0,1,19,0,1,1,1 60 | 58,1,2,0,0,2,2,0,28,0,4,0,2 61 | 59,0,3,1,0,2,3,0,34,0,8,0,4 62 | 60,0,3,1,1,0,0,1,21,0,1,1,1 63 | 61,1,1,0,2,0,3,0,19,1,1,1,2 64 | 62,0,1,1,2,0,3,0,27,1,2,0,1 65 | 63,0,3,1,0,2,2,0,21,0,6,0,4 66 | 64,0,1,1,2,0,2,1,21,0,1,1,1 67 | 65,1,3,1,1,1,2,1,24,0,3,0,4 68 | 66,1,2,0,1,0,1,0,28,1,1,1,3 69 | 67,0,3,1,1,0,1,0,24,0,1,1,1 70 | 68,1,3,0,1,2,1,0,31,0,7,0,2 71 | 69,0,3,1,1,0,1,0,17,0,3,0,1 72 | 70,0,2,1,1,0,1,0,26,0,1,1,1 73 | 71,0,3,0,0,2,3,0,26,0,8,0,2 74 | 72,0,2,1,1,0,3,0,20,0,1,1,1 75 | 73,0,3,1,1,0,2,1,27,0,2,0,1 76 | 74,1,3,1,1,0,3,0,13,0,1,1,1 77 | 75,0,3,1,1,0,0,0,23,1,1,1,1 78 | 76,0,3,1,2,0,0,0,17,0,1,1,1 79 | 77,0,3,1,1,0,1,0,24,0,1,1,1 80 | 78,1,2,1,0,2,2,0,29,0,3,0,4 81 | 79,1,3,0,1,0,1,0,24,0,1,1,2 82 | 80,0,3,1,1,0,1,0,20,0,1,1,1 83 | 81,1,3,1,1,0,1,0,27,0,1,1,1 84 | 82,1,3,0,1,0,0,2,30,0,1,1,2 85 | 83,0,1,1,1,0,3,0,23,0,1,1,1 86 | 84,1,2,0,1,0,1,0,19,0,1,1,2 87 | 85,1,3,0,2,0,2,0,55,0,4,0,3 88 | 86,0,3,1,0,3,3,0,22,0,5,0,1 89 | 87,0,3,1,1,0,1,0,29,0,1,1,1 90 | 88,1,1,0,1,2,3,0,26,1,6,0,2 91 | 89,0,3,1,1,0,1,0,22,0,1,1,1 92 | 90,0,3,1,1,0,1,0,20,0,1,1,1 93 | 91,0,3,1,1,0,0,0,26,0,1,1,1 94 | 92,0,1,1,2,0,3,0,27,1,2,0,1 95 | 93,0,3,1,1,2,2,0,23,0,4,0,1 96 | 94,0,3,1,3,0,0,0,17,0,1,1,1 97 | 95,0,3,1,2,0,1,0,27,0,1,1,1 98 | 96,0,1,1,4,0,3,1,25,1,1,1,1 99 | 97,1,1,1,1,1,3,1,31,1,2,0,1 100 | 98,1,2,0,2,1,2,0,36,0,2,0,3 101 | 99,0,2,1,2,0,2,0,17,0,2,0,1 102 | 100,0,3,0,1,0,0,0,23,0,1,1,2 103 | 101,0,3,1,1,0,0,0,32,0,1,1,1 104 | 102,0,1,1,1,1,3,0,25,1,2,0,1 105 | 103,0,3,1,2,0,1,0,26,0,1,1,1 106 | 104,0,3,1,2,0,1,0,30,0,3,0,1 107 | 105,0,3,1,1,0,0,0,21,0,1,1,1 108 | 106,1,3,0,1,0,0,0,32,0,1,1,2 109 | 107,1,3,1,2,0,0,0,22,0,1,1,1 110 | 108,0,3,1,2,0,0,0,15,0,1,1,1 111 | 109,1,3,0,1,0,2,2,19,0,2,0,2 112 | 110,0,1,1,2,0,3,0,30,1,1,1,1 113 | 111,0,3,0,0,0,2,1,20,0,2,0,2 114 | 112,0,3,1,1,0,1,0,22,0,1,1,1 115 | 113,0,3,0,1,0,1,0,23,0,2,0,2 116 | 114,0,3,0,1,0,2,1,21,0,1,1,2 117 | 115,0,3,1,1,0,1,0,21,0,1,1,1 118 | 116,0,3,1,4,0,0,2,20,0,1,1,1 119 | 117,0,2,1,1,0,2,0,31,0,2,0,1 120 | 118,0,1,1,1,1,3,1,24,1,2,0,1 121 | 119,0,3,0,0,2,3,0,33,0,7,0,2 122 | 120,0,2,1,1,0,3,0,27,0,3,0,1 123 | 121,0,3,1,1,0,1,0,26,0,1,1,1 124 | 122,0,2,1,1,0,2,1,20,0,2,0,1 125 | 123,1,2,0,1,0,1,0,19,1,1,1,2 126 | 124,0,1,1,3,1,3,0,27,1,2,0,1 127 | 125,1,3,1,0,0,1,1,28,0,2,0,4 128 | 126,0,3,1,2,0,0,2,19,0,1,1,1 129 | 127,1,3,1,1,0,0,0,25,0,1,1,1 130 | 128,1,3,0,1,1,2,1,17,1,3,0,2 131 | 129,0,3,1,2,0,0,0,18,0,1,1,1 132 | 130,0,3,1,2,0,0,1,20,0,1,1,1 133 | 131,0,3,1,1,0,0,0,30,0,1,1,1 134 | 132,0,3,0,2,0,2,0,46,0,2,0,3 135 | 133,1,2,0,1,0,2,0,45,0,2,0,3 136 | 134,0,2,1,1,0,1,0,30,0,1,1,1 137 | 135,0,2,1,1,0,2,1,18,0,1,1,1 138 | 136,1,1,0,1,2,2,0,28,1,3,0,2 139 | 137,0,1,1,2,0,3,0,27,1,2,0,1 140 | 138,0,3,1,0,0,1,0,19,0,1,1,1 141 | 139,0,1,1,1,0,3,1,18,1,1,1,1 142 | 140,0,3,0,1,2,2,1,29,0,3,0,3 143 | 141,1,3,0,1,0,0,0,24,0,1,1,2 144 | 142,1,3,0,1,0,2,0,52,0,2,0,3 145 | 143,0,3,1,1,0,0,2,19,0,1,1,1 146 | 144,0,2,1,1,0,1,0,26,0,1,1,1 147 | 145,0,2,1,1,1,3,0,28,0,3,0,1 148 | 146,1,3,1,1,0,0,0,44,0,1,1,1 149 | 147,0,3,0,0,2,3,0,32,0,5,0,2 150 | 148,0,2,1,2,2,2,0,40,1,3,0,1 151 | 149,0,2,1,2,0,1,0,33,0,1,1,5 152 | 150,0,2,1,3,0,1,0,26,0,1,1,5 153 | 151,1,1,0,1,0,3,0,33,1,2,0,3 154 | 152,0,3,1,3,0,1,0,16,0,1,1,1 155 | 153,0,3,1,2,2,2,0,31,0,3,0,1 156 | 154,0,3,1,1,0,0,0,21,0,1,1,1 157 | 155,0,1,1,3,1,3,1,27,0,2,0,1 158 | 156,1,3,0,0,0,0,2,32,0,1,1,2 159 | 157,0,3,1,1,0,1,0,15,0,1,1,1 160 | 158,0,3,1,1,0,1,0,19,0,1,1,1 161 | 159,0,3,1,1,2,3,0,26,0,11,0,4 162 | 160,0,3,1,2,1,2,0,24,0,2,0,1 163 | 161,1,2,0,2,0,2,0,50,0,1,1,3 164 | 162,0,3,1,1,0,0,0,26,0,1,1,1 165 | 163,0,3,1,1,0,1,0,15,0,1,1,1 166 | 164,0,3,1,0,1,3,0,28,0,6,0,4 167 | 165,1,3,1,0,2,2,0,47,0,3,0,4 168 | 166,1,1,0,2,1,3,0,38,1,2,0,3 169 | 167,0,3,0,2,4,2,0,47,0,6,0,3 170 | 168,0,1,1,2,0,2,0,19,0,1,1,1 171 | 169,0,3,1,1,0,3,0,13,0,1,1,1 172 | 170,0,1,1,3,0,3,0,25,1,1,1,1 173 | 171,0,3,1,0,1,2,2,20,0,6,0,4 174 | 172,1,3,0,0,1,1,0,28,0,3,0,2 175 | 173,0,3,1,1,0,1,0,25,0,1,1,1 176 | 174,0,1,1,3,0,2,1,23,1,1,1,1 177 | 175,0,3,1,1,1,0,0,22,0,3,0,1 178 | 176,0,3,1,1,1,2,0,29,0,5,0,4 179 | 177,0,1,0,3,0,2,1,26,1,1,1,2 180 | 178,0,2,1,1,0,1,0,18,0,1,1,1 181 | 179,0,3,1,2,0,0,0,19,0,1,1,1 182 | 180,0,3,0,2,2,3,0,28,0,11,0,2 183 | 181,0,2,1,2,0,2,1,16,0,1,1,1 184 | 182,0,3,1,0,2,3,0,37,0,7,0,4 185 | 183,1,2,1,0,1,3,0,25,1,4,0,4 186 | 184,1,3,0,0,2,2,0,35,0,3,0,2 187 | 185,0,1,1,1,0,3,0,21,1,1,1,1 188 | 186,1,3,0,1,0,2,2,47,0,2,0,3 189 | 187,1,1,1,2,0,2,0,45,0,1,1,1 190 | 188,0,3,1,2,1,2,2,16,0,3,0,1 191 | 189,0,3,1,2,0,0,0,19,0,1,1,1 192 | 190,1,2,0,1,0,1,0,19,0,1,1,3 193 | 191,0,2,1,1,0,1,0,21,0,1,1,1 194 | 192,1,3,0,1,0,0,0,47,0,2,0,2 195 | 193,1,2,1,0,1,2,0,26,1,3,0,4 196 | 194,1,1,0,2,0,2,1,41,1,1,1,3 197 | 195,1,1,0,3,0,3,1,20,1,1,1,2 198 | 196,0,3,1,2,0,0,2,19,0,1,1,1 199 | 197,0,3,1,2,1,1,0,32,0,2,0,1 200 | 198,1,3,0,1,0,0,2,32,0,1,1,2 201 | 199,0,2,0,1,0,1,0,38,0,1,1,2 202 | 200,0,3,1,1,0,1,0,30,0,1,1,1 203 | 201,0,3,1,2,2,3,0,19,0,11,0,1 204 | 202,0,3,1,2,0,0,0,26,0,1,1,1 205 | 203,0,3,1,2,0,0,1,20,0,1,1,1 206 | 204,1,3,1,1,0,1,0,24,0,1,1,1 207 | 205,0,3,0,0,1,1,0,26,1,2,0,2 208 | 206,0,3,1,1,0,2,0,26,0,2,0,1 209 | 207,1,3,1,1,0,2,1,27,0,1,1,1 210 | 208,1,3,0,0,0,0,2,25,0,1,1,2 211 | 209,1,1,1,2,0,2,1,16,1,1,1,1 212 | 210,0,3,1,1,0,0,0,14,0,1,1,1 213 | 211,1,2,0,2,0,2,0,26,0,1,1,2 214 | 212,0,3,1,1,0,0,0,22,0,1,1,1 215 | 213,0,2,1,1,0,1,0,27,0,1,1,1 216 | 214,0,3,1,1,0,0,2,19,0,2,0,1 217 | 215,1,1,0,1,0,3,1,23,1,2,0,2 218 | 216,1,3,0,1,0,1,0,22,0,1,1,2 219 | 217,0,2,1,2,0,2,0,28,0,2,0,1 220 | 218,1,1,0,1,0,3,1,21,1,1,1,2 221 | 219,0,2,1,1,0,1,0,18,0,1,1,1 222 | 220,1,3,1,0,0,1,0,30,0,1,1,1 223 | 221,0,2,1,1,0,1,0,20,0,1,1,1 224 | 222,0,3,1,3,0,1,0,23,0,1,1,1 225 | 223,0,3,1,2,0,0,0,20,0,1,1,1 226 | 224,1,1,1,2,0,3,0,28,1,2,0,1 227 | 225,0,3,1,1,0,1,0,28,0,1,1,1 228 | 226,1,2,1,1,0,1,0,25,0,1,1,1 229 | 227,0,3,1,1,0,0,0,31,0,1,1,1 230 | 228,0,2,1,1,0,1,0,25,0,1,1,1 231 | 229,0,3,0,1,1,2,0,23,0,5,0,2 232 | 230,1,1,0,2,0,3,0,44,1,2,0,3 233 | 231,0,3,1,1,0,0,0,24,0,1,1,1 234 | 232,0,2,1,3,0,1,0,25,0,1,1,1 235 | 233,1,3,0,0,2,3,0,30,0,7,0,2 236 | 234,0,2,1,1,0,1,0,33,0,1,1,1 237 | 235,0,3,0,1,0,0,0,28,0,1,1,2 238 | 236,0,2,1,2,0,2,0,17,0,2,0,1 239 | 237,1,2,0,0,2,2,0,32,0,3,0,2 240 | 238,0,2,1,1,0,1,0,31,0,1,1,1 241 | 239,0,2,1,2,0,1,0,22,0,1,1,1 242 | 240,0,3,0,2,0,2,1,21,0,2,0,2 243 | 241,1,3,0,0,0,2,2,30,0,2,0,2 244 | 242,0,2,1,1,0,1,0,31,0,1,1,1 245 | 243,0,3,1,1,0,0,0,29,0,1,1,1 246 | 244,0,3,1,1,0,0,1,20,0,1,1,1 247 | 245,0,1,1,2,0,3,2,27,1,3,0,5 248 | 246,0,3,0,1,0,0,0,37,0,1,1,2 249 | 247,1,2,0,1,2,2,0,31,0,3,0,3 250 | 248,1,1,1,2,1,3,0,29,1,3,0,1 251 | 249,0,2,1,3,0,2,0,29,0,2,0,5 252 | 250,0,3,1,1,0,0,0,22,0,1,1,1 253 | 251,0,3,0,1,1,1,0,42,1,3,0,3 254 | 252,0,1,1,3,0,2,0,25,1,1,1,1 255 | 253,0,3,1,1,0,2,0,24,0,2,0,1 256 | 254,0,3,0,2,2,2,0,40,0,3,0,3 257 | 255,1,3,0,1,2,2,1,39,0,3,0,3 258 | 256,1,1,0,1,0,3,1,30,0,1,1,3 259 | 257,1,1,0,1,0,3,0,20,1,1,1,2 260 | 258,1,1,0,2,0,3,1,16,0,1,1,2 261 | 259,1,2,0,3,1,2,0,27,0,2,0,3 262 | 260,0,3,1,1,0,0,2,17,0,1,1,1 263 | 261,1,3,1,0,2,3,0,33,0,7,0,4 264 | 262,0,1,1,3,1,3,0,17,1,3,0,1 265 | 263,0,1,1,2,0,0,0,21,1,1,1,1 266 | 264,0,3,0,2,0,0,2,18,0,1,1,2 267 | 265,0,2,1,2,0,1,0,17,0,1,1,1 268 | 266,0,3,1,0,1,3,0,25,0,6,0,1 269 | 267,1,3,1,1,0,0,0,24,0,2,0,1 270 | 268,1,1,0,3,1,3,0,45,1,2,0,3 271 | 269,1,1,0,2,0,3,0,22,1,1,1,2 272 | 270,0,1,1,2,0,2,0,21,0,1,1,1 273 | 271,1,3,1,1,0,0,0,28,0,1,1,1 274 | 272,1,2,0,2,1,2,0,41,0,2,0,3 275 | 273,0,1,1,2,1,2,1,21,1,2,0,1 276 | 274,1,3,0,2,0,0,2,26,0,1,1,2 277 | 275,1,1,0,3,0,3,0,33,1,2,0,2 278 | 276,0,3,0,2,0,0,0,33,0,1,1,2 279 | 277,0,2,1,1,0,0,0,27,0,1,1,1 280 | 278,0,3,1,0,1,2,2,18,0,6,0,4 281 | 279,1,3,0,2,1,2,0,32,0,3,0,3 282 | 280,0,3,1,4,0,0,2,16,0,1,1,1 283 | 281,0,3,1,1,0,0,0,32,0,1,1,1 284 | 282,0,3,1,0,0,1,0,25,0,1,1,1 285 | 283,1,3,1,1,0,1,0,26,0,1,1,1 286 | 284,0,1,1,0,0,2,0,26,1,1,1,1 287 | 285,0,3,1,2,0,1,1,19,0,1,1,1 288 | 286,1,3,1,1,0,1,0,23,0,1,1,1 289 | 287,0,3,1,1,0,0,0,20,0,1,1,1 290 | 288,1,2,1,2,0,1,0,20,0,1,1,1 291 | 289,1,3,0,1,0,0,2,20,0,1,1,2 292 | 290,1,1,0,1,0,3,0,28,0,1,1,2 293 | 291,1,1,0,1,0,3,1,39,1,2,0,3 294 | 292,0,2,1,2,0,1,1,22,1,1,1,1 295 | 293,0,3,0,1,0,1,0,19,0,1,1,2 296 | 294,0,3,1,1,0,0,0,16,0,1,1,1 297 | 295,0,1,1,2,0,2,1,17,0,1,1,1 298 | 296,0,3,1,1,0,0,1,18,0,1,1,1 299 | 297,0,1,0,0,2,3,0,28,1,4,0,2 300 | 298,1,1,1,1,0,2,0,21,1,1,1,1 301 | 299,1,1,0,3,1,3,1,47,1,2,0,3 302 | 300,1,3,0,1,0,0,2,40,0,1,1,2 303 | 301,1,3,1,1,0,2,2,18,0,3,0,1 304 | 302,0,3,1,1,0,0,0,31,0,1,1,1 305 | 303,1,2,0,2,0,1,2,19,1,1,1,2 306 | 304,0,3,1,1,0,1,0,33,0,1,1,1 307 | 305,1,1,1,0,2,3,0,30,1,4,0,4 308 | 306,1,1,0,2,0,3,1,23,0,1,1,2 309 | 307,1,1,0,1,0,3,1,82,1,2,0,3 310 | 308,0,2,1,1,0,2,1,19,0,2,0,1 311 | 309,1,1,0,1,0,3,1,30,1,1,1,2 312 | 310,1,1,0,1,0,3,1,30,1,1,1,2 313 | 311,1,1,0,1,2,3,1,26,1,5,0,2 314 | 312,0,2,0,1,1,2,0,37,0,3,0,3 315 | 313,0,3,1,1,0,0,0,22,0,1,1,1 316 | 314,0,2,1,2,1,2,0,18,0,3,0,1 317 | 315,1,3,0,1,0,0,0,31,0,1,1,2 318 | 316,1,2,0,1,0,2,0,35,0,2,0,3 319 | 317,0,2,1,3,0,1,0,20,0,1,1,5 320 | 318,1,1,0,1,2,3,0,24,1,3,0,2 321 | 319,1,1,0,2,1,3,1,56,1,3,0,3 322 | 320,0,3,1,1,0,0,0,18,0,1,1,1 323 | 321,0,3,1,1,0,0,0,16,0,1,1,1 324 | 322,1,2,0,1,0,1,2,25,0,1,1,2 325 | 323,1,2,0,1,1,2,0,51,0,3,0,3 326 | 324,0,3,1,1,2,3,0,24,0,11,0,1 327 | 325,1,1,0,2,0,3,1,24,1,1,1,2 328 | 326,0,3,1,3,0,0,0,25,0,1,1,1 329 | 327,1,2,0,2,0,1,0,23,1,1,1,3 330 | 328,1,3,0,1,1,2,0,46,0,3,0,3 331 | 329,1,1,0,0,1,3,1,28,1,2,0,2 332 | 330,1,3,0,2,0,2,2,18,0,3,0,2 333 | 331,0,1,1,2,0,2,0,19,1,1,1,1 334 | 332,0,1,1,2,1,3,0,25,1,2,0,1 335 | 333,0,3,1,0,0,2,0,31,0,3,0,1 336 | 334,1,1,0,2,0,3,0,50,0,2,0,3 337 | 335,0,3,1,1,0,0,0,18,0,1,1,1 338 | 336,0,1,1,1,0,3,0,25,1,2,0,1 339 | 337,1,1,0,2,0,3,1,31,1,1,1,2 340 | 338,1,3,1,2,0,1,0,21,0,1,1,1 341 | 339,0,1,1,2,0,3,0,28,1,1,1,1 342 | 340,1,2,1,0,1,2,0,30,1,3,0,4 343 | 341,1,1,0,1,2,3,0,30,1,6,0,2 344 | 342,0,2,1,1,0,1,0,26,0,1,1,1 345 | 343,0,2,1,1,0,1,0,42,0,1,1,1 346 | 344,0,2,1,2,0,1,0,23,0,1,1,1 347 | 345,1,2,0,1,0,1,0,29,1,1,1,2 348 | 346,1,2,0,2,0,1,0,25,0,1,1,2 349 | 347,1,3,0,0,0,2,0,41,0,2,0,3 350 | 348,1,3,1,0,1,2,0,38,0,3,0,4 351 | 349,0,3,1,2,0,1,0,16,0,1,1,1 352 | 350,0,3,1,1,0,1,0,22,0,1,1,1 353 | 351,0,1,1,1,0,3,0,38,1,1,1,1 354 | 352,0,3,1,0,1,0,1,18,0,3,0,1 355 | 353,0,3,1,1,0,2,0,25,0,2,0,1 356 | 354,0,3,1,2,0,0,1,17,0,1,1,1 357 | 355,0,3,1,1,0,1,0,27,0,1,1,1 358 | 356,1,1,0,1,1,3,0,27,1,2,0,2 359 | 357,0,2,0,2,0,1,0,25,0,1,1,2 360 | 358,1,3,0,1,0,0,2,20,0,1,1,2 361 | 359,1,3,0,2,0,0,2,33,0,1,1,2 362 | 360,0,3,1,2,4,2,0,18,0,6,0,1 363 | 361,0,2,1,1,0,2,1,25,0,2,0,1 364 | 362,0,3,0,2,1,2,1,31,0,2,0,3 365 | 363,0,3,1,2,0,0,0,15,0,1,1,1 366 | 364,0,3,1,1,0,2,2,19,0,2,0,1 367 | 365,0,3,1,1,0,0,0,30,0,1,1,1 368 | 366,1,1,0,3,0,3,1,48,1,2,0,3 369 | 367,1,3,0,1,0,0,1,30,0,1,1,3 370 | 368,1,3,0,1,0,0,2,19,0,1,1,2 371 | 369,1,1,0,1,0,3,1,29,1,1,1,3 372 | 370,1,1,1,1,0,3,1,27,1,2,0,1 373 | 371,0,3,1,1,0,0,0,25,0,2,0,1 374 | 372,0,3,1,1,0,1,0,26,0,1,1,1 375 | 373,0,1,1,1,0,3,1,19,0,1,1,1 376 | 374,0,3,0,0,1,2,0,26,0,5,0,2 377 | 375,1,1,0,1,0,3,1,37,0,2,0,3 378 | 376,1,3,0,1,0,0,0,31,0,1,1,2 379 | 377,0,1,1,1,2,3,1,25,1,3,0,1 380 | 378,0,3,1,1,0,0,1,19,0,1,1,1 381 | 379,0,3,1,1,0,0,0,27,0,1,1,1 382 | 380,1,1,0,2,0,3,1,21,0,1,1,2 383 | 381,1,3,0,0,2,2,1,27,0,3,0,2 384 | 382,0,3,1,1,0,1,0,18,0,1,1,1 385 | 383,1,1,0,2,0,3,0,51,0,2,0,3 386 | 384,0,3,1,2,0,0,0,22,0,1,1,1 387 | 385,0,2,1,1,0,3,0,25,0,1,1,1 388 | 386,0,3,1,0,2,3,0,31,0,8,0,4 389 | 387,1,2,0,2,0,1,0,16,0,1,1,2 390 | 388,0,3,1,1,0,0,2,20,0,1,1,1 391 | 389,1,2,0,1,0,1,1,21,0,1,1,2 392 | 390,1,1,1,2,2,3,0,26,1,4,0,1 393 | 391,1,3,1,1,0,0,0,22,0,1,1,1 394 | 392,0,3,1,1,0,1,0,28,0,3,0,1 395 | 393,1,1,0,1,0,3,1,22,1,2,0,2 396 | 394,1,3,0,1,2,2,0,51,1,3,0,3 397 | 395,0,3,1,1,0,0,0,19,0,1,1,1 398 | 396,0,3,0,1,0,0,0,19,0,1,1,2 399 | 397,0,2,1,2,0,2,0,23,0,1,1,1 400 | 398,0,2,1,1,0,1,0,16,0,1,1,5 401 | 399,1,2,0,1,0,1,0,32,0,1,1,3 402 | 400,1,3,1,2,0,1,0,18,0,1,1,1 403 | 401,0,3,1,1,0,1,0,15,0,1,1,1 404 | 402,0,3,0,1,0,1,0,24,0,2,0,2 405 | 403,0,3,1,1,0,2,0,30,0,2,0,1 406 | 404,0,3,0,1,0,1,0,23,0,1,1,2 407 | 405,0,2,1,2,0,2,0,18,0,2,0,1 408 | 406,0,3,1,3,0,0,0,32,0,1,1,1 409 | 407,1,2,1,0,1,2,0,30,0,3,0,4 410 | 408,0,3,1,1,0,0,0,33,0,1,1,1 411 | 409,0,3,0,1,1,2,0,18,0,5,0,2 412 | 410,0,3,1,2,0,0,0,18,0,1,1,1 413 | 411,0,3,1,2,0,0,2,15,0,1,1,1 414 | 412,1,1,0,2,0,3,2,22,1,2,0,2 415 | 413,0,2,1,2,0,0,0,30,0,1,1,1 416 | 414,1,3,1,2,0,1,0,25,0,1,1,1 417 | 415,0,3,0,0,0,1,0,39,0,1,1,3 418 | 416,1,2,0,2,1,3,0,47,0,3,0,3 419 | 417,1,2,0,1,2,1,0,29,0,3,0,2 420 | 418,0,2,1,1,0,1,0,26,0,1,1,1 421 | 419,0,3,0,0,2,2,0,25,0,3,0,2 422 | 420,0,3,1,0,0,0,1,22,0,1,1,1 423 | 421,0,3,1,1,0,0,2,19,0,1,1,1 424 | 422,0,3,1,1,0,0,0,18,0,1,1,1 425 | 423,0,3,0,1,1,1,0,54,0,3,0,3 426 | 424,0,3,1,1,1,2,0,27,0,3,0,1 427 | 425,0,3,1,1,0,0,0,22,0,1,1,1 428 | 426,1,2,0,1,0,2,0,43,0,2,0,3 429 | 427,1,2,0,1,0,2,0,67,0,1,1,2 430 | 428,0,3,1,0,0,0,2,16,0,1,1,1 431 | 429,1,3,1,1,0,1,0,34,1,1,1,1 432 | 430,1,1,1,1,0,2,0,41,1,1,1,1 433 | 431,1,3,0,1,0,2,0,49,0,2,0,3 434 | 432,1,2,0,2,0,2,0,51,0,2,0,3 435 | 433,0,3,1,1,0,0,0,26,0,1,1,1 436 | 434,0,1,1,3,0,3,0,25,1,2,0,1 437 | 435,1,1,0,0,2,3,0,25,1,4,0,2 438 | 436,0,3,0,1,2,3,0,36,0,5,0,2 439 | 437,1,2,0,1,3,2,0,37,0,6,0,3 440 | 438,0,1,1,3,4,3,0,17,1,6,0,1 441 | 439,0,2,1,1,0,1,0,38,0,1,1,1 442 | 440,1,2,0,2,1,2,0,43,0,3,0,3 443 | 441,0,3,1,1,0,1,0,15,0,1,1,1 444 | 442,0,3,1,1,0,0,0,25,0,2,0,1 445 | 443,1,2,0,1,0,1,0,25,0,1,1,2 446 | 444,1,3,1,1,0,1,0,33,0,1,1,1 447 | 445,1,1,1,0,2,3,0,25,1,3,0,4 448 | 446,1,2,0,0,1,2,0,33,0,2,0,2 449 | 447,1,1,1,2,0,2,0,27,0,1,1,1 450 | 448,1,3,0,0,1,2,1,30,0,4,0,2 451 | 449,1,1,1,3,0,2,0,30,1,1,1,5 452 | 450,0,2,1,2,2,2,0,21,0,4,0,1 453 | 451,0,3,1,2,0,2,0,31,0,2,0,1 454 | 452,0,1,1,1,0,2,1,31,1,1,1,1 455 | 453,1,1,1,3,0,3,1,24,1,2,0,1 456 | 454,0,3,1,2,0,1,0,19,0,1,1,1 457 | 455,1,3,1,1,0,0,1,18,0,1,1,1 458 | 456,0,1,1,4,0,2,0,25,1,1,1,1 459 | 457,1,1,0,1,0,3,0,33,1,2,0,3 460 | 458,1,2,0,3,0,1,0,19,0,1,1,2 461 | 459,0,3,1,1,0,0,2,21,0,1,1,1 462 | 460,1,1,1,2,0,2,0,19,1,1,1,1 463 | 461,0,3,1,2,0,1,0,19,0,1,1,1 464 | 462,0,1,1,2,0,3,0,17,1,1,1,1 465 | 463,0,2,1,2,0,1,0,28,0,1,1,1 466 | 464,0,3,1,2,0,1,0,18,0,1,1,1 467 | 465,0,3,1,2,0,0,0,31,0,1,1,1 468 | 466,0,2,1,2,0,0,0,21,0,1,1,1 469 | 467,0,1,1,3,0,2,0,26,0,1,1,1 470 | 468,0,3,1,1,0,0,2,18,0,1,1,1 471 | 469,1,3,0,0,1,2,1,29,0,4,0,2 472 | 470,0,3,1,2,0,0,0,17,0,1,1,1 473 | 471,0,3,1,2,0,1,0,15,0,1,1,1 474 | 472,1,2,0,2,2,2,0,39,0,4,0,3 475 | 473,1,2,0,1,0,1,1,44,1,1,1,3 476 | 474,0,3,0,1,0,1,0,27,0,1,1,2 477 | 475,0,1,1,1,0,3,0,27,1,1,1,1 478 | 476,0,2,1,2,0,2,0,23,0,2,0,1 479 | 477,0,3,1,1,0,0,0,25,0,2,0,1 480 | 478,0,3,1,1,0,0,0,25,0,1,1,1 481 | 479,1,3,0,0,1,1,0,24,0,2,0,2 482 | 480,0,3,1,0,2,3,0,30,0,8,0,4 483 | 481,0,2,1,2,0,0,0,32,0,1,1,1 484 | 482,0,3,1,3,0,1,0,24,0,1,1,1 485 | 483,1,3,0,3,0,1,0,22,0,1,1,3 486 | 484,1,1,1,1,0,3,1,23,1,2,0,1 487 | 485,0,3,0,1,1,2,0,22,0,5,0,2 488 | 486,1,1,0,2,0,3,0,47,1,2,0,3 489 | 487,0,1,1,3,0,2,1,23,1,1,1,1 490 | 488,0,3,1,1,0,1,0,29,0,1,1,1 491 | 489,1,3,1,0,1,2,0,37,0,3,0,4 492 | 490,0,3,1,1,0,2,0,36,0,2,0,1 493 | 491,0,3,1,1,0,0,0,19,0,1,1,1 494 | 492,0,1,1,3,0,2,0,26,1,1,1,1 495 | 493,0,1,1,4,0,3,1,23,0,1,1,1 496 | 494,0,3,1,1,0,1,0,26,0,1,1,1 497 | 495,0,3,1,1,0,2,1,21,0,1,1,1 498 | 496,1,1,0,3,0,3,1,30,1,2,0,2 499 | 497,0,3,1,2,0,2,0,31,0,1,1,1 500 | 498,0,1,0,1,2,3,0,47,1,4,0,3 501 | 499,0,3,1,1,0,0,0,18,0,1,1,1 502 | 500,0,3,1,1,0,1,0,16,0,1,1,1 503 | 501,0,3,0,1,0,0,2,19,0,1,1,2 504 | 502,0,3,0,2,0,0,2,30,0,1,1,2 505 | 503,0,3,0,2,0,1,0,30,0,1,1,2 506 | 504,1,1,0,0,0,3,0,21,1,1,1,2 507 | 505,0,1,1,1,0,3,1,42,1,2,0,1 508 | 506,1,2,0,2,2,2,0,45,0,3,0,3 509 | 507,1,1,1,2,0,2,0,45,0,1,1,1 510 | 508,0,3,1,1,0,2,0,24,0,1,1,1 511 | 509,1,3,1,1,0,3,0,14,0,1,1,1 512 | 510,1,3,1,1,0,0,2,24,0,1,1,1 513 | 511,0,3,1,2,0,1,0,17,0,1,1,1 514 | 512,1,1,1,2,0,2,0,25,1,1,1,1 515 | 513,1,1,0,3,0,3,1,46,0,2,0,3 516 | 514,0,3,1,1,0,0,0,17,0,1,1,1 517 | 515,0,1,1,2,0,3,0,28,1,1,1,1 518 | 516,1,2,0,2,0,1,0,28,1,1,1,3 519 | 517,0,3,1,2,0,2,2,17,0,1,1,1 520 | 518,1,2,0,2,0,2,0,52,0,2,0,3 521 | 519,0,3,1,1,0,0,0,19,0,1,1,1 522 | 520,1,1,0,1,0,3,0,21,1,1,1,2 523 | 521,0,3,1,1,0,0,0,15,0,1,1,1 524 | 522,0,3,1,1,0,0,1,18,0,1,1,1 525 | 523,1,1,0,2,1,3,1,47,1,2,0,3 526 | 524,0,3,1,2,0,0,1,17,0,1,1,1 527 | 525,0,3,1,2,0,0,2,18,0,1,1,1 528 | 526,1,2,0,3,0,1,0,20,0,1,1,2 529 | 527,0,1,1,2,0,3,0,18,1,1,1,1 530 | 528,0,3,1,2,0,1,0,25,0,1,1,1 531 | 529,0,2,1,1,1,1,0,27,0,4,0,1 532 | 530,1,2,0,0,1,2,0,24,0,3,0,2 533 | 531,0,3,1,1,0,0,1,17,0,1,1,1 534 | 532,0,3,1,1,1,0,1,20,0,3,0,1 535 | 533,1,3,0,1,2,2,1,38,0,3,0,3 536 | 534,0,3,0,1,0,1,0,19,0,1,1,2 537 | 535,1,2,0,0,2,2,0,22,0,3,0,2 538 | 536,0,1,1,2,0,2,0,33,1,1,1,5 539 | 537,1,1,0,1,0,3,1,19,0,1,1,2 540 | 538,0,3,1,1,0,2,0,24,0,1,1,1 541 | 539,1,1,0,1,2,3,1,34,1,3,0,2 542 | 540,1,1,0,2,2,3,0,23,1,3,0,2 543 | 541,0,3,0,0,2,3,0,36,0,7,0,2 544 | 542,0,3,0,0,2,3,0,33,0,7,0,2 545 | 543,1,2,1,1,0,2,0,17,0,2,0,1 546 | 544,0,1,1,3,0,3,1,26,1,2,0,1 547 | 545,0,1,1,3,0,2,0,28,0,1,1,1 548 | 546,1,2,0,1,0,2,0,33,0,2,0,3 549 | 547,1,2,1,1,0,1,1,26,0,1,1,1 550 | 548,0,3,1,2,1,2,0,25,0,3,0,1 551 | 549,1,2,1,0,1,3,0,30,0,3,0,4 552 | 550,1,1,1,1,2,3,1,27,1,3,0,1 553 | 551,0,2,1,1,0,2,0,27,0,1,1,1 554 | 552,0,3,1,1,0,0,2,20,0,1,1,1 555 | 553,1,3,1,1,0,0,1,33,0,1,1,1 556 | 554,1,3,0,1,0,0,0,18,0,1,1,2 557 | 555,0,1,1,3,0,2,0,18,0,1,1,1 558 | 556,1,1,0,2,0,3,1,65,1,2,0,5 559 | 557,0,1,1,1,0,3,1,19,0,1,1,1 560 | 558,1,1,0,2,1,3,0,38,1,3,0,3 561 | 559,1,3,0,2,0,2,0,44,0,2,0,3 562 | 560,0,3,1,1,0,0,2,24,0,1,1,1 563 | 561,0,3,1,2,0,0,0,17,0,1,1,1 564 | 562,0,2,1,1,0,1,0,26,0,1,1,1 565 | 563,0,3,1,1,0,1,0,17,0,1,1,1 566 | 564,0,3,0,2,0,1,0,30,0,1,1,2 567 | 565,0,3,1,1,0,2,0,20,0,3,0,1 568 | 566,0,3,1,1,0,0,0,20,0,1,1,1 569 | 567,0,3,0,1,4,2,0,43,0,5,0,3 570 | 568,0,3,1,2,0,0,1,19,0,1,1,1 571 | 569,1,3,1,1,0,0,0,17,0,1,1,1 572 | 570,1,2,1,3,0,1,0,18,0,1,1,1 573 | 571,1,1,0,3,0,3,0,45,1,3,0,3 574 | 572,1,1,1,2,0,2,0,32,1,1,1,1 575 | 573,1,3,0,1,0,0,2,17,0,1,1,2 576 | 574,0,3,1,0,0,1,0,28,0,1,1,1 577 | 575,0,3,1,1,0,2,0,20,0,1,1,1 578 | 576,1,2,0,2,0,1,0,20,0,1,1,2 579 | 577,1,1,0,2,0,3,0,41,1,2,0,3 580 | 578,0,3,0,1,0,2,1,32,0,2,0,3 581 | 579,1,3,1,1,0,1,0,19,0,1,1,1 582 | 580,1,2,0,1,1,2,0,27,0,3,0,2 583 | 581,1,1,0,2,1,3,1,52,1,3,0,3 584 | 582,0,2,1,3,0,2,0,26,0,1,1,1 585 | 583,0,1,1,2,0,3,1,19,1,1,1,1 586 | 584,0,3,1,2,0,1,1,19,0,1,1,1 587 | 585,1,1,0,1,2,3,0,19,1,3,0,2 588 | 586,0,2,1,2,0,2,0,23,0,1,1,1 589 | 587,1,1,1,3,1,3,1,32,1,3,0,1 590 | 588,0,3,1,1,0,1,0,21,0,1,1,1 591 | 589,0,3,1,0,0,1,0,19,0,1,1,1 592 | 590,0,3,1,2,0,0,0,20,0,1,1,1 593 | 591,1,1,0,3,0,3,1,47,1,2,0,3 594 | 592,0,3,1,2,0,0,0,26,0,1,1,1 595 | 593,0,3,0,1,2,0,2,18,0,3,0,2 596 | 594,0,2,1,2,0,2,0,23,0,2,0,1 597 | 595,0,3,1,2,1,2,0,27,0,3,0,1 598 | 596,1,2,0,1,0,3,0,26,0,1,1,2 599 | 597,0,3,1,3,0,0,0,19,0,1,1,1 600 | 598,0,3,1,2,0,0,1,17,0,1,1,1 601 | 599,1,1,1,3,0,3,1,44,1,2,0,5 602 | 600,1,2,0,1,1,2,0,51,0,4,0,3 603 | 601,0,3,1,2,0,0,0,20,0,1,1,1 604 | 602,0,1,1,2,0,3,0,25,0,1,1,1 605 | 603,0,3,1,2,0,1,0,25,0,1,1,1 606 | 604,1,1,1,2,0,2,1,31,0,1,1,1 607 | 605,0,3,1,2,0,2,0,29,0,2,0,1 608 | 606,0,3,1,1,0,0,0,17,0,1,1,1 609 | 607,1,1,1,1,0,2,0,27,0,1,1,1 610 | 608,1,2,0,1,2,3,1,53,0,4,0,3 611 | 609,1,1,0,2,0,3,0,25,1,1,1,2 612 | 610,0,3,0,2,5,3,0,57,0,7,0,3 613 | 611,0,3,1,1,0,0,0,21,0,1,1,1 614 | 612,1,3,0,2,0,2,2,27,0,2,0,2 615 | 613,0,3,1,1,0,0,2,16,0,1,1,1 616 | 614,0,3,1,2,0,1,0,31,0,1,1,1 617 | 615,1,2,0,1,2,3,0,19,0,4,0,2 618 | 616,0,3,1,2,1,1,0,25,0,3,0,1 619 | 617,0,3,0,1,0,2,0,47,0,2,0,3 620 | 618,1,2,0,0,1,3,0,27,1,4,0,2 621 | 619,0,2,1,1,0,1,0,19,0,1,1,1 622 | 620,0,3,1,1,0,2,1,19,0,2,0,1 623 | 621,1,1,1,2,0,3,0,28,1,2,0,1 624 | 622,1,3,1,1,1,2,1,16,0,3,0,1 625 | 623,0,3,1,1,0,0,0,27,0,1,1,1 626 | 624,0,3,1,1,0,2,0,27,0,1,1,1 627 | 625,0,1,1,3,0,3,0,21,1,1,1,1 628 | 626,0,2,1,3,0,1,2,30,0,1,1,5 629 | 627,1,1,0,1,0,3,0,29,1,1,1,2 630 | 628,0,3,1,1,0,0,0,25,0,1,1,1 631 | 629,0,3,1,1,0,0,2,24,0,1,1,1 632 | 630,1,1,1,4,0,2,0,36,1,1,1,1 633 | 631,0,3,1,3,0,0,0,27,0,1,1,1 634 | 632,1,1,1,1,0,2,1,25,1,1,1,5 635 | 633,0,1,1,2,0,0,0,29,0,1,1,1 636 | 634,0,3,0,0,2,2,0,18,0,6,0,2 637 | 635,1,2,0,1,0,1,0,17,0,1,1,2 638 | 636,0,3,1,1,0,1,0,26,0,1,1,1 639 | 637,0,2,1,1,1,2,0,19,0,3,0,1 640 | 638,0,3,0,2,5,3,0,38,0,6,0,3 641 | 639,0,3,1,1,0,2,0,26,0,2,0,1 642 | 640,0,3,1,1,0,0,0,22,0,1,1,1 643 | 641,1,1,0,1,0,3,1,20,1,1,1,2 644 | 642,0,3,0,0,2,2,0,29,0,6,0,2 645 | 643,1,3,1,2,0,3,0,15,0,1,1,1 646 | 644,1,3,0,0,1,2,1,22,0,4,0,2 647 | 645,1,1,1,2,0,3,1,25,1,2,0,1 648 | 646,0,3,1,1,0,0,0,17,0,1,1,1 649 | 647,1,1,1,3,0,3,1,35,1,1,1,5 650 | 648,0,3,1,1,0,0,0,18,0,1,1,1 651 | 649,1,3,0,1,0,0,0,31,0,1,1,2 652 | 650,0,3,1,1,0,0,0,17,0,1,1,1 653 | 651,1,2,0,1,1,2,0,19,0,2,0,2 654 | 652,0,3,1,1,0,1,0,30,0,1,1,1 655 | 653,1,3,0,2,0,0,2,29,0,1,1,2 656 | 654,0,3,0,1,0,0,2,28,0,1,1,2 657 | 655,0,2,1,1,0,3,0,25,0,3,0,1 658 | 656,0,3,1,1,0,0,0,21,0,1,1,1 659 | 657,0,3,0,1,1,2,2,29,0,3,0,3 660 | 658,0,2,1,1,0,1,0,28,0,1,1,1 661 | 659,0,1,1,3,2,3,1,26,1,3,0,1 662 | 660,1,1,1,3,0,3,0,29,0,3,0,5 663 | 661,0,3,1,2,0,0,1,17,0,1,1,1 664 | 662,0,1,1,2,0,2,0,26,1,1,1,1 665 | 663,0,3,1,2,0,0,0,16,0,1,1,1 666 | 664,1,3,1,1,0,1,0,27,0,2,0,1 667 | 665,0,2,1,1,0,3,0,18,0,3,0,1 668 | 666,0,2,1,1,0,1,0,27,0,1,1,1 669 | 667,0,3,1,1,0,0,0,26,0,1,1,1 670 | 668,0,3,1,2,0,1,0,15,0,1,1,1 671 | 669,1,1,0,1,0,3,0,49,1,2,0,3 672 | 670,1,2,0,2,1,3,0,61,0,3,0,3 673 | 671,0,1,1,1,0,3,0,22,1,2,0,1 674 | 672,0,2,1,4,0,1,0,27,0,1,1,1 675 | 673,1,2,1,1,0,1,0,21,0,1,1,1 676 | 674,0,2,1,2,0,0,0,26,0,1,1,1 677 | 675,0,3,1,1,0,0,0,30,0,1,1,1 678 | 676,0,3,1,1,0,1,0,29,0,1,1,1 679 | 677,1,3,0,1,0,1,0,23,0,1,1,2 680 | 678,0,3,0,2,6,3,0,39,0,8,0,3 681 | 679,1,1,1,2,1,3,1,34,1,2,0,1 682 | 680,0,3,0,1,0,1,2,19,0,1,1,2 683 | 681,1,1,1,1,0,3,1,18,1,1,1,1 684 | 682,0,3,1,1,0,1,0,27,0,1,1,1 685 | 683,0,3,1,0,2,3,0,27,0,8,0,1 686 | 684,0,2,1,3,1,3,0,33,0,3,0,1 687 | 685,0,2,1,1,2,3,1,38,0,4,0,1 688 | 686,0,3,1,0,1,3,0,24,0,6,0,1 689 | 687,0,3,1,1,0,1,0,17,0,1,1,1 690 | 688,0,3,1,1,0,0,0,31,0,1,1,1 691 | 689,1,1,0,0,1,3,0,33,1,2,0,2 692 | 690,1,1,1,1,0,3,0,23,1,2,0,1 693 | 691,1,3,0,0,1,1,1,18,0,2,0,2 694 | 692,1,3,1,1,0,3,0,12,0,1,1,1 695 | 693,0,3,1,1,0,0,1,16,0,1,1,1 696 | 694,0,1,1,3,0,2,0,15,0,1,1,5 697 | 695,0,2,1,3,0,1,0,26,0,1,1,1 698 | 696,0,3,1,2,0,1,0,16,0,1,1,1 699 | 697,1,3,0,2,0,0,2,32,0,1,1,2 700 | 698,0,1,1,3,1,3,1,24,1,3,0,1 701 | 699,0,3,1,2,0,0,0,40,1,1,1,1 702 | 700,1,1,0,1,0,3,1,49,1,2,0,3 703 | 701,1,1,1,2,0,2,0,32,1,1,1,1 704 | 702,0,3,0,1,1,2,1,21,0,2,0,2 705 | 703,0,3,1,1,0,0,2,21,0,1,1,1 706 | 704,0,3,1,1,0,0,0,23,0,2,0,1 707 | 705,0,2,1,2,0,2,0,46,0,1,1,1 708 | 706,1,2,0,2,0,1,0,29,0,1,1,3 709 | 707,1,1,1,2,0,2,0,33,1,1,1,1 710 | 708,1,1,0,1,0,3,0,20,0,1,1,2 711 | 709,1,3,1,1,1,2,1,49,0,3,0,4 712 | 710,1,1,0,1,0,3,1,48,1,1,1,2 713 | 711,0,1,1,1,0,2,0,18,1,1,1,1 714 | 712,1,1,1,2,0,3,0,24,1,2,0,1 715 | 713,0,3,1,1,0,1,0,26,0,1,1,1 716 | 714,0,2,1,3,0,1,0,21,0,1,1,1 717 | 715,0,3,1,1,0,0,0,42,1,1,1,1 718 | 716,1,1,0,2,0,3,1,29,1,1,1,2 719 | 717,1,2,0,1,0,1,0,35,1,1,1,2 720 | 718,0,3,1,2,0,2,2,19,0,1,1,1 721 | 719,0,3,1,2,0,0,0,28,0,1,1,1 722 | 720,1,2,0,0,1,3,0,33,0,2,0,2 723 | 721,0,3,1,1,0,0,0,25,0,2,0,1 724 | 722,0,2,1,2,0,1,0,28,0,1,1,1 725 | 723,0,2,1,3,0,1,0,23,0,1,1,1 726 | 724,1,1,1,1,0,3,0,29,1,2,0,1 727 | 725,0,3,1,1,0,1,0,19,0,1,1,1 728 | 726,1,2,0,1,0,2,0,43,0,4,0,3 729 | 727,1,3,0,1,0,0,2,24,0,1,1,2 730 | 728,0,2,1,1,0,2,0,31,0,2,0,1 731 | 729,0,3,0,1,0,1,0,29,0,2,0,2 732 | 730,1,1,0,1,0,3,0,29,1,1,1,2 733 | 731,0,3,1,0,0,2,1,24,0,1,1,1 734 | 732,0,2,1,1,0,0,0,20,0,1,1,1 735 | 733,0,2,1,1,0,1,0,26,0,1,1,1 736 | 734,0,2,1,1,0,1,0,28,0,1,1,1 737 | 735,0,3,1,1,0,2,0,20,0,1,1,1 738 | 736,0,3,0,2,3,3,0,39,0,5,0,3 739 | 737,1,1,1,2,0,3,1,22,1,1,1,1 740 | 738,0,3,1,2,0,0,0,18,0,1,1,1 741 | 739,0,3,1,2,0,0,0,18,0,1,1,1 742 | 740,1,1,1,2,0,2,0,27,1,1,1,1 743 | 741,0,1,1,2,0,3,0,29,1,2,0,1 744 | 742,1,1,0,1,2,3,1,37,1,5,0,2 745 | 743,0,3,1,1,0,2,0,17,0,2,0,1 746 | 744,1,3,1,1,0,1,0,18,0,1,1,1 747 | 745,0,1,1,4,1,3,0,28,1,3,0,5 748 | 746,0,3,1,0,1,2,0,27,0,3,0,1 749 | 747,1,2,0,1,0,1,0,21,0,1,1,2 750 | 748,0,1,1,1,0,3,0,25,1,2,0,1 751 | 749,0,3,1,1,0,0,2,23,0,1,1,1 752 | 750,1,2,0,0,1,2,0,17,0,3,0,2 753 | 751,1,3,1,0,1,1,0,19,1,2,0,4 754 | 752,0,3,1,2,0,1,0,32,0,1,1,1 755 | 753,0,3,1,1,0,0,0,18,0,1,1,1 756 | 754,1,2,0,2,2,3,0,32,0,4,0,3 757 | 755,1,2,1,0,1,2,0,25,0,3,0,4 758 | 756,0,3,1,1,0,0,0,28,0,1,1,1 759 | 757,0,2,1,1,0,1,0,24,0,1,1,1 760 | 758,0,3,1,2,0,1,0,28,0,1,1,1 761 | 759,1,1,0,2,0,3,0,56,1,1,1,5 762 | 760,0,3,1,1,0,2,0,18,0,1,1,1 763 | 761,0,3,1,2,0,0,0,30,0,1,1,1 764 | 762,1,3,1,1,0,0,1,21,0,1,1,1 765 | 763,1,1,0,2,2,3,0,41,1,4,0,3 766 | 764,0,3,1,0,0,0,0,22,0,1,1,1 767 | 765,1,1,0,3,0,3,0,36,1,2,0,3 768 | 766,0,1,1,2,0,3,1,25,0,1,1,5 769 | 767,0,3,0,1,0,0,2,18,0,1,1,2 770 | 768,0,3,1,1,0,2,2,19,0,2,0,1 771 | 769,0,3,1,1,0,1,0,32,0,1,1,1 772 | 770,0,3,1,1,0,1,0,22,0,1,1,1 773 | 771,0,3,1,2,0,0,0,23,0,1,1,1 774 | 772,0,2,0,3,0,1,0,17,1,1,1,3 775 | 773,0,3,1,0,0,0,1,15,0,1,1,1 776 | 774,1,2,0,3,3,2,0,37,0,5,0,3 777 | 775,0,3,1,1,0,0,0,39,0,1,1,1 778 | 776,0,3,1,1,0,0,2,16,1,1,1,1 779 | 777,1,3,0,0,0,1,0,29,0,1,1,2 780 | 778,0,3,1,2,0,0,2,23,0,1,1,1 781 | 779,1,1,0,2,1,3,0,53,1,2,0,3 782 | 780,1,3,0,0,0,0,1,20,0,1,1,2 783 | 781,1,1,0,1,0,3,0,41,1,2,0,3 784 | 782,0,1,1,1,0,2,0,22,1,1,1,1 785 | 783,0,3,1,2,2,2,0,22,0,4,0,1 786 | 784,0,3,1,1,0,0,0,16,0,1,1,1 787 | 785,0,3,1,1,0,0,0,34,0,1,1,1 788 | 786,1,3,0,1,0,0,0,25,0,1,1,2 789 | 787,0,3,1,0,1,2,2,25,0,6,0,4 790 | 788,1,3,1,0,2,2,0,26,0,4,0,4 791 | 789,0,1,1,2,0,3,1,24,1,1,1,1 792 | 790,0,3,1,1,0,0,2,24,0,1,1,1 793 | 791,0,2,1,0,0,2,0,19,0,1,1,1 794 | 792,0,3,0,1,2,3,0,23,0,11,0,2 795 | 793,0,1,1,1,0,2,1,24,0,1,1,1 796 | 794,0,3,1,1,0,0,0,21,0,1,1,1 797 | 795,0,2,1,2,0,1,0,18,0,1,1,1 798 | 796,1,1,0,3,0,2,0,27,1,1,1,5 799 | 797,1,3,0,1,0,1,0,16,0,1,1,3 800 | 798,0,3,1,1,0,0,1,28,0,1,1,1 801 | 799,0,3,0,1,1,2,0,52,0,3,0,3 802 | 800,0,2,1,2,0,1,0,20,0,1,1,1 803 | 801,1,2,0,1,1,2,0,43,0,3,0,3 804 | 802,1,1,1,0,2,3,0,35,1,4,0,4 805 | 803,1,3,1,0,1,1,1,31,0,2,0,4 806 | 804,1,3,1,1,0,0,0,23,0,1,1,1 807 | 805,0,3,1,1,0,0,0,25,0,1,1,1 808 | 806,0,1,1,2,0,0,0,22,1,1,1,1 809 | 807,0,3,0,1,0,0,0,31,0,1,1,2 810 | 808,0,2,1,2,0,1,0,17,0,1,1,1 811 | 809,1,1,0,2,0,3,0,46,1,2,0,3 812 | 810,0,3,1,1,0,0,0,22,0,1,1,1 813 | 811,0,3,1,2,0,2,0,17,0,1,1,1 814 | 812,0,2,1,2,0,1,0,25,0,1,1,1 815 | 813,0,3,0,0,2,3,0,34,0,7,0,2 816 | 814,0,3,1,1,0,1,0,26,0,1,1,1 817 | 815,0,1,1,0,0,0,0,16,1,1,1,1 818 | 816,0,3,0,1,0,1,0,28,0,1,1,2 819 | 817,0,2,1,1,1,3,1,18,0,3,0,1 820 | 818,0,3,1,2,0,0,0,32,0,1,1,1 821 | 819,0,3,1,0,2,2,0,28,0,6,0,4 822 | 820,1,1,0,3,1,3,0,50,1,3,0,3 823 | 821,1,3,1,1,0,1,0,17,0,1,1,1 824 | 822,0,1,1,2,0,0,0,31,0,1,1,5 825 | 823,1,3,0,1,1,1,0,18,1,2,0,3 826 | 824,0,3,1,0,1,3,0,28,0,6,0,4 827 | 825,0,3,1,1,0,0,2,15,0,1,1,1 828 | 826,0,3,1,1,0,3,0,12,0,1,1,1 829 | 827,1,2,1,0,2,3,1,21,0,3,0,4 830 | 828,1,3,1,1,0,0,2,28,0,1,1,1 831 | 829,1,1,0,3,0,3,0,41,1,1,1,3 832 | 830,1,3,0,0,0,2,1,39,0,2,0,3 833 | 831,1,2,1,0,1,2,0,31,0,3,0,4 834 | 832,0,3,1,1,0,0,1,14,0,1,1,1 835 | 833,0,3,1,1,0,0,0,22,0,1,1,1 836 | 834,0,3,1,1,0,1,0,22,0,1,1,1 837 | 835,1,1,0,2,1,3,1,27,1,3,0,2 838 | 836,0,3,1,1,0,1,0,16,0,1,1,1 839 | 837,0,3,1,1,0,1,0,19,0,1,1,1 840 | 838,1,3,1,1,0,3,0,15,0,1,1,1 841 | 839,1,1,1,1,0,2,1,20,1,1,1,1 842 | 840,0,3,1,1,0,1,0,27,0,1,1,1 843 | 841,0,2,1,0,0,1,0,24,0,1,1,1 844 | 842,1,1,0,1,0,2,1,23,0,1,1,2 845 | 843,0,3,1,2,0,0,1,26,0,1,1,1 846 | 844,0,3,1,1,0,1,0,19,0,1,1,1 847 | 845,0,3,1,2,0,0,0,19,0,1,1,1 848 | 846,0,3,1,0,2,3,0,24,0,11,0,1 849 | 847,0,3,1,2,0,0,1,18,0,1,1,1 850 | 848,0,2,1,1,1,3,0,17,0,2,0,5 851 | 849,1,1,0,1,0,3,1,44,1,2,0,3 852 | 850,0,3,1,0,2,3,0,39,0,7,0,4 853 | 851,0,3,1,4,0,0,0,19,0,1,1,1 854 | 852,0,3,0,0,1,2,1,23,0,3,0,2 855 | 853,1,1,0,0,1,3,0,25,1,2,0,2 856 | 854,0,2,0,2,0,2,0,45,0,2,0,3 857 | 855,1,3,0,1,1,1,0,26,0,2,0,3 858 | 856,1,1,0,2,1,3,0,42,0,3,0,3 859 | 857,1,1,1,3,0,2,0,22,1,1,1,1 860 | 858,1,3,0,1,3,2,1,37,0,4,0,3 861 | 859,0,3,1,2,0,0,1,16,0,1,1,1 862 | 860,0,3,1,2,0,1,0,23,0,3,0,1 863 | 861,0,2,1,1,0,1,0,27,0,2,0,1 864 | 862,1,1,0,2,0,2,0,51,1,1,1,3 865 | 863,0,3,0,1,2,3,0,33,0,11,0,2 866 | 864,0,2,1,1,0,1,0,22,0,1,1,1 867 | 865,1,2,0,2,0,1,0,24,0,1,1,3 868 | 866,1,2,0,1,0,1,1,28,0,2,0,2 869 | 867,0,1,1,1,0,3,0,36,1,1,1,1 870 | 868,0,3,1,1,0,1,0,27,0,1,1,1 871 | 869,1,3,1,0,1,1,0,31,0,3,0,4 872 | 870,0,3,1,1,0,0,0,17,0,1,1,1 873 | 871,1,1,0,2,1,3,0,48,1,3,0,3 874 | 872,0,1,1,2,0,0,0,24,1,1,1,1 875 | 873,0,3,1,2,0,1,0,27,0,1,1,1 876 | 874,1,2,0,1,0,2,1,37,0,2,0,3 877 | 875,1,3,0,0,0,0,1,32,0,1,1,2 878 | 876,0,3,1,1,0,1,0,29,0,1,1,1 879 | 877,0,3,1,1,0,0,0,20,0,1,1,1 880 | 878,0,3,1,1,0,0,0,18,0,1,1,1 881 | 879,1,1,0,3,1,3,1,45,1,2,0,3 882 | 880,1,2,0,1,1,2,0,44,0,2,0,3 883 | 881,0,3,1,2,0,0,0,18,0,1,1,1 884 | 882,0,3,0,1,0,1,0,28,0,1,1,2 885 | 883,0,2,1,1,0,1,0,29,0,1,1,1 886 | 884,0,3,1,1,0,0,0,22,0,1,1,1 887 | 885,0,3,0,2,5,2,2,36,0,6,0,3 888 | 886,0,2,1,1,0,1,0,21,0,1,1,5 889 | 887,1,1,0,1,0,2,0,28,1,1,1,2 890 | 888,0,3,0,1,2,2,0,40,0,4,0,2 891 | 889,1,1,1,1,0,2,1,21,1,1,1,1 892 | 890,0,3,1,1,0,0,2,19,0,1,1,1 893 | -------------------------------------------------------------------------------- /Model_Optimisation/data/titanic_test.csv: -------------------------------------------------------------------------------- 1 | PassengerId,Pclass,Name,Sex,Age,SibSp,Parch,Ticket,Fare,Cabin,Embarked 2 | 892,3,"Kelly, Mr. James",male,34.5,0,0,330911,7.8292,,Q 3 | 893,3,"Wilkes, Mrs. James (Ellen Needs)",female,47,1,0,363272,7,,S 4 | 894,2,"Myles, Mr. Thomas Francis",male,62,0,0,240276,9.6875,,Q 5 | 895,3,"Wirz, Mr. Albert",male,27,0,0,315154,8.6625,,S 6 | 896,3,"Hirvonen, Mrs. Alexander (Helga E Lindqvist)",female,22,1,1,3101298,12.2875,,S 7 | 897,3,"Svensson, Mr. Johan Cervin",male,14,0,0,7538,9.225,,S 8 | 898,3,"Connolly, Miss. Kate",female,30,0,0,330972,7.6292,,Q 9 | 899,2,"Caldwell, Mr. Albert Francis",male,26,1,1,248738,29,,S 10 | 900,3,"Abrahim, Mrs. Joseph (Sophie Halaut Easu)",female,18,0,0,2657,7.2292,,C 11 | 901,3,"Davies, Mr. John Samuel",male,21,2,0,A/4 48871,24.15,,S 12 | 902,3,"Ilieff, Mr. Ylio",male,,0,0,349220,7.8958,,S 13 | 903,1,"Jones, Mr. Charles Cresson",male,46,0,0,694,26,,S 14 | 904,1,"Snyder, Mrs. John Pillsbury (Nelle Stevenson)",female,23,1,0,21228,82.2667,B45,S 15 | 905,2,"Howard, Mr. Benjamin",male,63,1,0,24065,26,,S 16 | 906,1,"Chaffee, Mrs. Herbert Fuller (Carrie Constance Toogood)",female,47,1,0,W.E.P. 5734,61.175,E31,S 17 | 907,2,"del Carlo, Mrs. Sebastiano (Argenia Genovesi)",female,24,1,0,SC/PARIS 2167,27.7208,,C 18 | 908,2,"Keane, Mr. Daniel",male,35,0,0,233734,12.35,,Q 19 | 909,3,"Assaf, Mr. Gerios",male,21,0,0,2692,7.225,,C 20 | 910,3,"Ilmakangas, Miss. Ida Livija",female,27,1,0,STON/O2. 3101270,7.925,,S 21 | 911,3,"Assaf Khalil, Mrs. Mariana (Miriam"")""",female,45,0,0,2696,7.225,,C 22 | 912,1,"Rothschild, Mr. Martin",male,55,1,0,PC 17603,59.4,,C 23 | 913,3,"Olsen, Master. Artur Karl",male,9,0,1,C 17368,3.1708,,S 24 | 914,1,"Flegenheim, Mrs. Alfred (Antoinette)",female,,0,0,PC 17598,31.6833,,S 25 | 915,1,"Williams, Mr. Richard Norris II",male,21,0,1,PC 17597,61.3792,,C 26 | 916,1,"Ryerson, Mrs. Arthur Larned (Emily Maria Borie)",female,48,1,3,PC 17608,262.375,B57 B59 B63 B66,C 27 | 917,3,"Robins, Mr. Alexander A",male,50,1,0,A/5. 3337,14.5,,S 28 | 918,1,"Ostby, Miss. Helene Ragnhild",female,22,0,1,113509,61.9792,B36,C 29 | 919,3,"Daher, Mr. Shedid",male,22.5,0,0,2698,7.225,,C 30 | 920,1,"Brady, Mr. John Bertram",male,41,0,0,113054,30.5,A21,S 31 | 921,3,"Samaan, Mr. Elias",male,,2,0,2662,21.6792,,C 32 | 922,2,"Louch, Mr. Charles Alexander",male,50,1,0,SC/AH 3085,26,,S 33 | 923,2,"Jefferys, Mr. Clifford Thomas",male,24,2,0,C.A. 31029,31.5,,S 34 | 924,3,"Dean, Mrs. Bertram (Eva Georgetta Light)",female,33,1,2,C.A. 2315,20.575,,S 35 | 925,3,"Johnston, Mrs. Andrew G (Elizabeth Lily"" Watson)""",female,,1,2,W./C. 6607,23.45,,S 36 | 926,1,"Mock, Mr. Philipp Edmund",male,30,1,0,13236,57.75,C78,C 37 | 927,3,"Katavelas, Mr. Vassilios (Catavelas Vassilios"")""",male,18.5,0,0,2682,7.2292,,C 38 | 928,3,"Roth, Miss. Sarah A",female,,0,0,342712,8.05,,S 39 | 929,3,"Cacic, Miss. Manda",female,21,0,0,315087,8.6625,,S 40 | 930,3,"Sap, Mr. Julius",male,25,0,0,345768,9.5,,S 41 | 931,3,"Hee, Mr. Ling",male,,0,0,1601,56.4958,,S 42 | 932,3,"Karun, Mr. Franz",male,39,0,1,349256,13.4167,,C 43 | 933,1,"Franklin, Mr. Thomas Parham",male,,0,0,113778,26.55,D34,S 44 | 934,3,"Goldsmith, Mr. Nathan",male,41,0,0,SOTON/O.Q. 3101263,7.85,,S 45 | 935,2,"Corbett, Mrs. Walter H (Irene Colvin)",female,30,0,0,237249,13,,S 46 | 936,1,"Kimball, Mrs. Edwin Nelson Jr (Gertrude Parsons)",female,45,1,0,11753,52.5542,D19,S 47 | 937,3,"Peltomaki, Mr. Nikolai Johannes",male,25,0,0,STON/O 2. 3101291,7.925,,S 48 | 938,1,"Chevre, Mr. Paul Romaine",male,45,0,0,PC 17594,29.7,A9,C 49 | 939,3,"Shaughnessy, Mr. Patrick",male,,0,0,370374,7.75,,Q 50 | 940,1,"Bucknell, Mrs. William Robert (Emma Eliza Ward)",female,60,0,0,11813,76.2917,D15,C 51 | 941,3,"Coutts, Mrs. William (Winnie Minnie"" Treanor)""",female,36,0,2,C.A. 37671,15.9,,S 52 | 942,1,"Smith, Mr. Lucien Philip",male,24,1,0,13695,60,C31,S 53 | 943,2,"Pulbaum, Mr. Franz",male,27,0,0,SC/PARIS 2168,15.0333,,C 54 | 944,2,"Hocking, Miss. Ellen Nellie""""",female,20,2,1,29105,23,,S 55 | 945,1,"Fortune, Miss. Ethel Flora",female,28,3,2,19950,263,C23 C25 C27,S 56 | 946,2,"Mangiavacchi, Mr. Serafino Emilio",male,,0,0,SC/A.3 2861,15.5792,,C 57 | 947,3,"Rice, Master. Albert",male,10,4,1,382652,29.125,,Q 58 | 948,3,"Cor, Mr. Bartol",male,35,0,0,349230,7.8958,,S 59 | 949,3,"Abelseth, Mr. Olaus Jorgensen",male,25,0,0,348122,7.65,F G63,S 60 | 950,3,"Davison, Mr. Thomas Henry",male,,1,0,386525,16.1,,S 61 | 951,1,"Chaudanson, Miss. Victorine",female,36,0,0,PC 17608,262.375,B61,C 62 | 952,3,"Dika, Mr. Mirko",male,17,0,0,349232,7.8958,,S 63 | 953,2,"McCrae, Mr. Arthur Gordon",male,32,0,0,237216,13.5,,S 64 | 954,3,"Bjorklund, Mr. Ernst Herbert",male,18,0,0,347090,7.75,,S 65 | 955,3,"Bradley, Miss. Bridget Delia",female,22,0,0,334914,7.725,,Q 66 | 956,1,"Ryerson, Master. John Borie",male,13,2,2,PC 17608,262.375,B57 B59 B63 B66,C 67 | 957,2,"Corey, Mrs. Percy C (Mary Phyllis Elizabeth Miller)",female,,0,0,F.C.C. 13534,21,,S 68 | 958,3,"Burns, Miss. Mary Delia",female,18,0,0,330963,7.8792,,Q 69 | 959,1,"Moore, Mr. Clarence Bloomfield",male,47,0,0,113796,42.4,,S 70 | 960,1,"Tucker, Mr. Gilbert Milligan Jr",male,31,0,0,2543,28.5375,C53,C 71 | 961,1,"Fortune, Mrs. Mark (Mary McDougald)",female,60,1,4,19950,263,C23 C25 C27,S 72 | 962,3,"Mulvihill, Miss. Bertha E",female,24,0,0,382653,7.75,,Q 73 | 963,3,"Minkoff, Mr. Lazar",male,21,0,0,349211,7.8958,,S 74 | 964,3,"Nieminen, Miss. Manta Josefina",female,29,0,0,3101297,7.925,,S 75 | 965,1,"Ovies y Rodriguez, Mr. Servando",male,28.5,0,0,PC 17562,27.7208,D43,C 76 | 966,1,"Geiger, Miss. Amalie",female,35,0,0,113503,211.5,C130,C 77 | 967,1,"Keeping, Mr. Edwin",male,32.5,0,0,113503,211.5,C132,C 78 | 968,3,"Miles, Mr. Frank",male,,0,0,359306,8.05,,S 79 | 969,1,"Cornell, Mrs. Robert Clifford (Malvina Helen Lamson)",female,55,2,0,11770,25.7,C101,S 80 | 970,2,"Aldworth, Mr. Charles Augustus",male,30,0,0,248744,13,,S 81 | 971,3,"Doyle, Miss. Elizabeth",female,24,0,0,368702,7.75,,Q 82 | 972,3,"Boulos, Master. Akar",male,6,1,1,2678,15.2458,,C 83 | 973,1,"Straus, Mr. Isidor",male,67,1,0,PC 17483,221.7792,C55 C57,S 84 | 974,1,"Case, Mr. Howard Brown",male,49,0,0,19924,26,,S 85 | 975,3,"Demetri, Mr. Marinko",male,,0,0,349238,7.8958,,S 86 | 976,2,"Lamb, Mr. John Joseph",male,,0,0,240261,10.7083,,Q 87 | 977,3,"Khalil, Mr. Betros",male,,1,0,2660,14.4542,,C 88 | 978,3,"Barry, Miss. Julia",female,27,0,0,330844,7.8792,,Q 89 | 979,3,"Badman, Miss. Emily Louisa",female,18,0,0,A/4 31416,8.05,,S 90 | 980,3,"O'Donoghue, Ms. Bridget",female,,0,0,364856,7.75,,Q 91 | 981,2,"Wells, Master. Ralph Lester",male,2,1,1,29103,23,,S 92 | 982,3,"Dyker, Mrs. Adolf Fredrik (Anna Elisabeth Judith Andersson)",female,22,1,0,347072,13.9,,S 93 | 983,3,"Pedersen, Mr. Olaf",male,,0,0,345498,7.775,,S 94 | 984,1,"Davidson, Mrs. Thornton (Orian Hays)",female,27,1,2,F.C. 12750,52,B71,S 95 | 985,3,"Guest, Mr. Robert",male,,0,0,376563,8.05,,S 96 | 986,1,"Birnbaum, Mr. Jakob",male,25,0,0,13905,26,,C 97 | 987,3,"Tenglin, Mr. Gunnar Isidor",male,25,0,0,350033,7.7958,,S 98 | 988,1,"Cavendish, Mrs. Tyrell William (Julia Florence Siegel)",female,76,1,0,19877,78.85,C46,S 99 | 989,3,"Makinen, Mr. Kalle Edvard",male,29,0,0,STON/O 2. 3101268,7.925,,S 100 | 990,3,"Braf, Miss. Elin Ester Maria",female,20,0,0,347471,7.8542,,S 101 | 991,3,"Nancarrow, Mr. William Henry",male,33,0,0,A./5. 3338,8.05,,S 102 | 992,1,"Stengel, Mrs. Charles Emil Henry (Annie May Morris)",female,43,1,0,11778,55.4417,C116,C 103 | 993,2,"Weisz, Mr. Leopold",male,27,1,0,228414,26,,S 104 | 994,3,"Foley, Mr. William",male,,0,0,365235,7.75,,Q 105 | 995,3,"Johansson Palmquist, Mr. Oskar Leander",male,26,0,0,347070,7.775,,S 106 | 996,3,"Thomas, Mrs. Alexander (Thamine Thelma"")""",female,16,1,1,2625,8.5167,,C 107 | 997,3,"Holthen, Mr. Johan Martin",male,28,0,0,C 4001,22.525,,S 108 | 998,3,"Buckley, Mr. Daniel",male,21,0,0,330920,7.8208,,Q 109 | 999,3,"Ryan, Mr. Edward",male,,0,0,383162,7.75,,Q 110 | 1000,3,"Willer, Mr. Aaron (Abi Weller"")""",male,,0,0,3410,8.7125,,S 111 | 1001,2,"Swane, Mr. George",male,18.5,0,0,248734,13,F,S 112 | 1002,2,"Stanton, Mr. Samuel Ward",male,41,0,0,237734,15.0458,,C 113 | 1003,3,"Shine, Miss. Ellen Natalia",female,,0,0,330968,7.7792,,Q 114 | 1004,1,"Evans, Miss. Edith Corse",female,36,0,0,PC 17531,31.6792,A29,C 115 | 1005,3,"Buckley, Miss. Katherine",female,18.5,0,0,329944,7.2833,,Q 116 | 1006,1,"Straus, Mrs. Isidor (Rosalie Ida Blun)",female,63,1,0,PC 17483,221.7792,C55 C57,S 117 | 1007,3,"Chronopoulos, Mr. Demetrios",male,18,1,0,2680,14.4542,,C 118 | 1008,3,"Thomas, Mr. John",male,,0,0,2681,6.4375,,C 119 | 1009,3,"Sandstrom, Miss. Beatrice Irene",female,1,1,1,PP 9549,16.7,G6,S 120 | 1010,1,"Beattie, Mr. Thomson",male,36,0,0,13050,75.2417,C6,C 121 | 1011,2,"Chapman, Mrs. John Henry (Sara Elizabeth Lawry)",female,29,1,0,SC/AH 29037,26,,S 122 | 1012,2,"Watt, Miss. Bertha J",female,12,0,0,C.A. 33595,15.75,,S 123 | 1013,3,"Kiernan, Mr. John",male,,1,0,367227,7.75,,Q 124 | 1014,1,"Schabert, Mrs. Paul (Emma Mock)",female,35,1,0,13236,57.75,C28,C 125 | 1015,3,"Carver, Mr. Alfred John",male,28,0,0,392095,7.25,,S 126 | 1016,3,"Kennedy, Mr. John",male,,0,0,368783,7.75,,Q 127 | 1017,3,"Cribb, Miss. Laura Alice",female,17,0,1,371362,16.1,,S 128 | 1018,3,"Brobeck, Mr. Karl Rudolf",male,22,0,0,350045,7.7958,,S 129 | 1019,3,"McCoy, Miss. Alicia",female,,2,0,367226,23.25,,Q 130 | 1020,2,"Bowenur, Mr. Solomon",male,42,0,0,211535,13,,S 131 | 1021,3,"Petersen, Mr. Marius",male,24,0,0,342441,8.05,,S 132 | 1022,3,"Spinner, Mr. Henry John",male,32,0,0,STON/OQ. 369943,8.05,,S 133 | 1023,1,"Gracie, Col. Archibald IV",male,53,0,0,113780,28.5,C51,C 134 | 1024,3,"Lefebre, Mrs. Frank (Frances)",female,,0,4,4133,25.4667,,S 135 | 1025,3,"Thomas, Mr. Charles P",male,,1,0,2621,6.4375,,C 136 | 1026,3,"Dintcheff, Mr. Valtcho",male,43,0,0,349226,7.8958,,S 137 | 1027,3,"Carlsson, Mr. Carl Robert",male,24,0,0,350409,7.8542,,S 138 | 1028,3,"Zakarian, Mr. Mapriededer",male,26.5,0,0,2656,7.225,,C 139 | 1029,2,"Schmidt, Mr. August",male,26,0,0,248659,13,,S 140 | 1030,3,"Drapkin, Miss. Jennie",female,23,0,0,SOTON/OQ 392083,8.05,,S 141 | 1031,3,"Goodwin, Mr. Charles Frederick",male,40,1,6,CA 2144,46.9,,S 142 | 1032,3,"Goodwin, Miss. Jessie Allis",female,10,5,2,CA 2144,46.9,,S 143 | 1033,1,"Daniels, Miss. Sarah",female,33,0,0,113781,151.55,,S 144 | 1034,1,"Ryerson, Mr. Arthur Larned",male,61,1,3,PC 17608,262.375,B57 B59 B63 B66,C 145 | 1035,2,"Beauchamp, Mr. Henry James",male,28,0,0,244358,26,,S 146 | 1036,1,"Lindeberg-Lind, Mr. Erik Gustaf (Mr Edward Lingrey"")""",male,42,0,0,17475,26.55,,S 147 | 1037,3,"Vander Planke, Mr. Julius",male,31,3,0,345763,18,,S 148 | 1038,1,"Hilliard, Mr. Herbert Henry",male,,0,0,17463,51.8625,E46,S 149 | 1039,3,"Davies, Mr. Evan",male,22,0,0,SC/A4 23568,8.05,,S 150 | 1040,1,"Crafton, Mr. John Bertram",male,,0,0,113791,26.55,,S 151 | 1041,2,"Lahtinen, Rev. William",male,30,1,1,250651,26,,S 152 | 1042,1,"Earnshaw, Mrs. Boulton (Olive Potter)",female,23,0,1,11767,83.1583,C54,C 153 | 1043,3,"Matinoff, Mr. Nicola",male,,0,0,349255,7.8958,,C 154 | 1044,3,"Storey, Mr. Thomas",male,60.5,0,0,3701,,,S 155 | 1045,3,"Klasen, Mrs. (Hulda Kristina Eugenia Lofqvist)",female,36,0,2,350405,12.1833,,S 156 | 1046,3,"Asplund, Master. Filip Oscar",male,13,4,2,347077,31.3875,,S 157 | 1047,3,"Duquemin, Mr. Joseph",male,24,0,0,S.O./P.P. 752,7.55,,S 158 | 1048,1,"Bird, Miss. Ellen",female,29,0,0,PC 17483,221.7792,C97,S 159 | 1049,3,"Lundin, Miss. Olga Elida",female,23,0,0,347469,7.8542,,S 160 | 1050,1,"Borebank, Mr. John James",male,42,0,0,110489,26.55,D22,S 161 | 1051,3,"Peacock, Mrs. Benjamin (Edith Nile)",female,26,0,2,SOTON/O.Q. 3101315,13.775,,S 162 | 1052,3,"Smyth, Miss. Julia",female,,0,0,335432,7.7333,,Q 163 | 1053,3,"Touma, Master. Georges Youssef",male,7,1,1,2650,15.2458,,C 164 | 1054,2,"Wright, Miss. Marion",female,26,0,0,220844,13.5,,S 165 | 1055,3,"Pearce, Mr. Ernest",male,,0,0,343271,7,,S 166 | 1056,2,"Peruschitz, Rev. Joseph Maria",male,41,0,0,237393,13,,S 167 | 1057,3,"Kink-Heilmann, Mrs. Anton (Luise Heilmann)",female,26,1,1,315153,22.025,,S 168 | 1058,1,"Brandeis, Mr. Emil",male,48,0,0,PC 17591,50.4958,B10,C 169 | 1059,3,"Ford, Mr. Edward Watson",male,18,2,2,W./C. 6608,34.375,,S 170 | 1060,1,"Cassebeer, Mrs. Henry Arthur Jr (Eleanor Genevieve Fosdick)",female,,0,0,17770,27.7208,,C 171 | 1061,3,"Hellstrom, Miss. Hilda Maria",female,22,0,0,7548,8.9625,,S 172 | 1062,3,"Lithman, Mr. Simon",male,,0,0,S.O./P.P. 251,7.55,,S 173 | 1063,3,"Zakarian, Mr. Ortin",male,27,0,0,2670,7.225,,C 174 | 1064,3,"Dyker, Mr. Adolf Fredrik",male,23,1,0,347072,13.9,,S 175 | 1065,3,"Torfa, Mr. Assad",male,,0,0,2673,7.2292,,C 176 | 1066,3,"Asplund, Mr. Carl Oscar Vilhelm Gustafsson",male,40,1,5,347077,31.3875,,S 177 | 1067,2,"Brown, Miss. Edith Eileen",female,15,0,2,29750,39,,S 178 | 1068,2,"Sincock, Miss. Maude",female,20,0,0,C.A. 33112,36.75,,S 179 | 1069,1,"Stengel, Mr. Charles Emil Henry",male,54,1,0,11778,55.4417,C116,C 180 | 1070,2,"Becker, Mrs. Allen Oliver (Nellie E Baumgardner)",female,36,0,3,230136,39,F4,S 181 | 1071,1,"Compton, Mrs. Alexander Taylor (Mary Eliza Ingersoll)",female,64,0,2,PC 17756,83.1583,E45,C 182 | 1072,2,"McCrie, Mr. James Matthew",male,30,0,0,233478,13,,S 183 | 1073,1,"Compton, Mr. Alexander Taylor Jr",male,37,1,1,PC 17756,83.1583,E52,C 184 | 1074,1,"Marvin, Mrs. Daniel Warner (Mary Graham Carmichael Farquarson)",female,18,1,0,113773,53.1,D30,S 185 | 1075,3,"Lane, Mr. Patrick",male,,0,0,7935,7.75,,Q 186 | 1076,1,"Douglas, Mrs. Frederick Charles (Mary Helene Baxter)",female,27,1,1,PC 17558,247.5208,B58 B60,C 187 | 1077,2,"Maybery, Mr. Frank Hubert",male,40,0,0,239059,16,,S 188 | 1078,2,"Phillips, Miss. Alice Frances Louisa",female,21,0,1,S.O./P.P. 2,21,,S 189 | 1079,3,"Davies, Mr. Joseph",male,17,2,0,A/4 48873,8.05,,S 190 | 1080,3,"Sage, Miss. Ada",female,,8,2,CA. 2343,69.55,,S 191 | 1081,2,"Veal, Mr. James",male,40,0,0,28221,13,,S 192 | 1082,2,"Angle, Mr. William A",male,34,1,0,226875,26,,S 193 | 1083,1,"Salomon, Mr. Abraham L",male,,0,0,111163,26,,S 194 | 1084,3,"van Billiard, Master. Walter John",male,11.5,1,1,A/5. 851,14.5,,S 195 | 1085,2,"Lingane, Mr. John",male,61,0,0,235509,12.35,,Q 196 | 1086,2,"Drew, Master. Marshall Brines",male,8,0,2,28220,32.5,,S 197 | 1087,3,"Karlsson, Mr. Julius Konrad Eugen",male,33,0,0,347465,7.8542,,S 198 | 1088,1,"Spedden, Master. Robert Douglas",male,6,0,2,16966,134.5,E34,C 199 | 1089,3,"Nilsson, Miss. Berta Olivia",female,18,0,0,347066,7.775,,S 200 | 1090,2,"Baimbrigge, Mr. Charles Robert",male,23,0,0,C.A. 31030,10.5,,S 201 | 1091,3,"Rasmussen, Mrs. (Lena Jacobsen Solvang)",female,,0,0,65305,8.1125,,S 202 | 1092,3,"Murphy, Miss. Nora",female,,0,0,36568,15.5,,Q 203 | 1093,3,"Danbom, Master. Gilbert Sigvard Emanuel",male,0.33,0,2,347080,14.4,,S 204 | 1094,1,"Astor, Col. John Jacob",male,47,1,0,PC 17757,227.525,C62 C64,C 205 | 1095,2,"Quick, Miss. Winifred Vera",female,8,1,1,26360,26,,S 206 | 1096,2,"Andrew, Mr. Frank Thomas",male,25,0,0,C.A. 34050,10.5,,S 207 | 1097,1,"Omont, Mr. Alfred Fernand",male,,0,0,F.C. 12998,25.7417,,C 208 | 1098,3,"McGowan, Miss. Katherine",female,35,0,0,9232,7.75,,Q 209 | 1099,2,"Collett, Mr. Sidney C Stuart",male,24,0,0,28034,10.5,,S 210 | 1100,1,"Rosenbaum, Miss. Edith Louise",female,33,0,0,PC 17613,27.7208,A11,C 211 | 1101,3,"Delalic, Mr. Redjo",male,25,0,0,349250,7.8958,,S 212 | 1102,3,"Andersen, Mr. Albert Karvin",male,32,0,0,C 4001,22.525,,S 213 | 1103,3,"Finoli, Mr. Luigi",male,,0,0,SOTON/O.Q. 3101308,7.05,,S 214 | 1104,2,"Deacon, Mr. Percy William",male,17,0,0,S.O.C. 14879,73.5,,S 215 | 1105,2,"Howard, Mrs. Benjamin (Ellen Truelove Arman)",female,60,1,0,24065,26,,S 216 | 1106,3,"Andersson, Miss. Ida Augusta Margareta",female,38,4,2,347091,7.775,,S 217 | 1107,1,"Head, Mr. Christopher",male,42,0,0,113038,42.5,B11,S 218 | 1108,3,"Mahon, Miss. Bridget Delia",female,,0,0,330924,7.8792,,Q 219 | 1109,1,"Wick, Mr. George Dennick",male,57,1,1,36928,164.8667,,S 220 | 1110,1,"Widener, Mrs. George Dunton (Eleanor Elkins)",female,50,1,1,113503,211.5,C80,C 221 | 1111,3,"Thomson, Mr. Alexander Morrison",male,,0,0,32302,8.05,,S 222 | 1112,2,"Duran y More, Miss. Florentina",female,30,1,0,SC/PARIS 2148,13.8583,,C 223 | 1113,3,"Reynolds, Mr. Harold J",male,21,0,0,342684,8.05,,S 224 | 1114,2,"Cook, Mrs. (Selena Rogers)",female,22,0,0,W./C. 14266,10.5,F33,S 225 | 1115,3,"Karlsson, Mr. Einar Gervasius",male,21,0,0,350053,7.7958,,S 226 | 1116,1,"Candee, Mrs. Edward (Helen Churchill Hungerford)",female,53,0,0,PC 17606,27.4458,,C 227 | 1117,3,"Moubarek, Mrs. George (Omine Amenia"" Alexander)""",female,,0,2,2661,15.2458,,C 228 | 1118,3,"Asplund, Mr. Johan Charles",male,23,0,0,350054,7.7958,,S 229 | 1119,3,"McNeill, Miss. Bridget",female,,0,0,370368,7.75,,Q 230 | 1120,3,"Everett, Mr. Thomas James",male,40.5,0,0,C.A. 6212,15.1,,S 231 | 1121,2,"Hocking, Mr. Samuel James Metcalfe",male,36,0,0,242963,13,,S 232 | 1122,2,"Sweet, Mr. George Frederick",male,14,0,0,220845,65,,S 233 | 1123,1,"Willard, Miss. Constance",female,21,0,0,113795,26.55,,S 234 | 1124,3,"Wiklund, Mr. Karl Johan",male,21,1,0,3101266,6.4958,,S 235 | 1125,3,"Linehan, Mr. Michael",male,,0,0,330971,7.8792,,Q 236 | 1126,1,"Cumings, Mr. John Bradley",male,39,1,0,PC 17599,71.2833,C85,C 237 | 1127,3,"Vendel, Mr. Olof Edvin",male,20,0,0,350416,7.8542,,S 238 | 1128,1,"Warren, Mr. Frank Manley",male,64,1,0,110813,75.25,D37,C 239 | 1129,3,"Baccos, Mr. Raffull",male,20,0,0,2679,7.225,,C 240 | 1130,2,"Hiltunen, Miss. Marta",female,18,1,1,250650,13,,S 241 | 1131,1,"Douglas, Mrs. Walter Donald (Mahala Dutton)",female,48,1,0,PC 17761,106.425,C86,C 242 | 1132,1,"Lindstrom, Mrs. Carl Johan (Sigrid Posse)",female,55,0,0,112377,27.7208,,C 243 | 1133,2,"Christy, Mrs. (Alice Frances)",female,45,0,2,237789,30,,S 244 | 1134,1,"Spedden, Mr. Frederic Oakley",male,45,1,1,16966,134.5,E34,C 245 | 1135,3,"Hyman, Mr. Abraham",male,,0,0,3470,7.8875,,S 246 | 1136,3,"Johnston, Master. William Arthur Willie""""",male,,1,2,W./C. 6607,23.45,,S 247 | 1137,1,"Kenyon, Mr. Frederick R",male,41,1,0,17464,51.8625,D21,S 248 | 1138,2,"Karnes, Mrs. J Frank (Claire Bennett)",female,22,0,0,F.C.C. 13534,21,,S 249 | 1139,2,"Drew, Mr. James Vivian",male,42,1,1,28220,32.5,,S 250 | 1140,2,"Hold, Mrs. Stephen (Annie Margaret Hill)",female,29,1,0,26707,26,,S 251 | 1141,3,"Khalil, Mrs. Betros (Zahie Maria"" Elias)""",female,,1,0,2660,14.4542,,C 252 | 1142,2,"West, Miss. Barbara J",female,0.92,1,2,C.A. 34651,27.75,,S 253 | 1143,3,"Abrahamsson, Mr. Abraham August Johannes",male,20,0,0,SOTON/O2 3101284,7.925,,S 254 | 1144,1,"Clark, Mr. Walter Miller",male,27,1,0,13508,136.7792,C89,C 255 | 1145,3,"Salander, Mr. Karl Johan",male,24,0,0,7266,9.325,,S 256 | 1146,3,"Wenzel, Mr. Linhart",male,32.5,0,0,345775,9.5,,S 257 | 1147,3,"MacKay, Mr. George William",male,,0,0,C.A. 42795,7.55,,S 258 | 1148,3,"Mahon, Mr. John",male,,0,0,AQ/4 3130,7.75,,Q 259 | 1149,3,"Niklasson, Mr. Samuel",male,28,0,0,363611,8.05,,S 260 | 1150,2,"Bentham, Miss. Lilian W",female,19,0,0,28404,13,,S 261 | 1151,3,"Midtsjo, Mr. Karl Albert",male,21,0,0,345501,7.775,,S 262 | 1152,3,"de Messemaeker, Mr. Guillaume Joseph",male,36.5,1,0,345572,17.4,,S 263 | 1153,3,"Nilsson, Mr. August Ferdinand",male,21,0,0,350410,7.8542,,S 264 | 1154,2,"Wells, Mrs. Arthur Henry (Addie"" Dart Trevaskis)""",female,29,0,2,29103,23,,S 265 | 1155,3,"Klasen, Miss. Gertrud Emilia",female,1,1,1,350405,12.1833,,S 266 | 1156,2,"Portaluppi, Mr. Emilio Ilario Giuseppe",male,30,0,0,C.A. 34644,12.7375,,C 267 | 1157,3,"Lyntakoff, Mr. Stanko",male,,0,0,349235,7.8958,,S 268 | 1158,1,"Chisholm, Mr. Roderick Robert Crispin",male,,0,0,112051,0,,S 269 | 1159,3,"Warren, Mr. Charles William",male,,0,0,C.A. 49867,7.55,,S 270 | 1160,3,"Howard, Miss. May Elizabeth",female,,0,0,A. 2. 39186,8.05,,S 271 | 1161,3,"Pokrnic, Mr. Mate",male,17,0,0,315095,8.6625,,S 272 | 1162,1,"McCaffry, Mr. Thomas Francis",male,46,0,0,13050,75.2417,C6,C 273 | 1163,3,"Fox, Mr. Patrick",male,,0,0,368573,7.75,,Q 274 | 1164,1,"Clark, Mrs. Walter Miller (Virginia McDowell)",female,26,1,0,13508,136.7792,C89,C 275 | 1165,3,"Lennon, Miss. Mary",female,,1,0,370371,15.5,,Q 276 | 1166,3,"Saade, Mr. Jean Nassr",male,,0,0,2676,7.225,,C 277 | 1167,2,"Bryhl, Miss. Dagmar Jenny Ingeborg ",female,20,1,0,236853,26,,S 278 | 1168,2,"Parker, Mr. Clifford Richard",male,28,0,0,SC 14888,10.5,,S 279 | 1169,2,"Faunthorpe, Mr. Harry",male,40,1,0,2926,26,,S 280 | 1170,2,"Ware, Mr. John James",male,30,1,0,CA 31352,21,,S 281 | 1171,2,"Oxenham, Mr. Percy Thomas",male,22,0,0,W./C. 14260,10.5,,S 282 | 1172,3,"Oreskovic, Miss. Jelka",female,23,0,0,315085,8.6625,,S 283 | 1173,3,"Peacock, Master. Alfred Edward",male,0.75,1,1,SOTON/O.Q. 3101315,13.775,,S 284 | 1174,3,"Fleming, Miss. Honora",female,,0,0,364859,7.75,,Q 285 | 1175,3,"Touma, Miss. Maria Youssef",female,9,1,1,2650,15.2458,,C 286 | 1176,3,"Rosblom, Miss. Salli Helena",female,2,1,1,370129,20.2125,,S 287 | 1177,3,"Dennis, Mr. William",male,36,0,0,A/5 21175,7.25,,S 288 | 1178,3,"Franklin, Mr. Charles (Charles Fardon)",male,,0,0,SOTON/O.Q. 3101314,7.25,,S 289 | 1179,1,"Snyder, Mr. John Pillsbury",male,24,1,0,21228,82.2667,B45,S 290 | 1180,3,"Mardirosian, Mr. Sarkis",male,,0,0,2655,7.2292,F E46,C 291 | 1181,3,"Ford, Mr. Arthur",male,,0,0,A/5 1478,8.05,,S 292 | 1182,1,"Rheims, Mr. George Alexander Lucien",male,,0,0,PC 17607,39.6,,S 293 | 1183,3,"Daly, Miss. Margaret Marcella Maggie""""",female,30,0,0,382650,6.95,,Q 294 | 1184,3,"Nasr, Mr. Mustafa",male,,0,0,2652,7.2292,,C 295 | 1185,1,"Dodge, Dr. Washington",male,53,1,1,33638,81.8583,A34,S 296 | 1186,3,"Wittevrongel, Mr. Camille",male,36,0,0,345771,9.5,,S 297 | 1187,3,"Angheloff, Mr. Minko",male,26,0,0,349202,7.8958,,S 298 | 1188,2,"Laroche, Miss. Louise",female,1,1,2,SC/Paris 2123,41.5792,,C 299 | 1189,3,"Samaan, Mr. Hanna",male,,2,0,2662,21.6792,,C 300 | 1190,1,"Loring, Mr. Joseph Holland",male,30,0,0,113801,45.5,,S 301 | 1191,3,"Johansson, Mr. Nils",male,29,0,0,347467,7.8542,,S 302 | 1192,3,"Olsson, Mr. Oscar Wilhelm",male,32,0,0,347079,7.775,,S 303 | 1193,2,"Malachard, Mr. Noel",male,,0,0,237735,15.0458,D,C 304 | 1194,2,"Phillips, Mr. Escott Robert",male,43,0,1,S.O./P.P. 2,21,,S 305 | 1195,3,"Pokrnic, Mr. Tome",male,24,0,0,315092,8.6625,,S 306 | 1196,3,"McCarthy, Miss. Catherine Katie""""",female,,0,0,383123,7.75,,Q 307 | 1197,1,"Crosby, Mrs. Edward Gifford (Catherine Elizabeth Halstead)",female,64,1,1,112901,26.55,B26,S 308 | 1198,1,"Allison, Mr. Hudson Joshua Creighton",male,30,1,2,113781,151.55,C22 C26,S 309 | 1199,3,"Aks, Master. Philip Frank",male,0.83,0,1,392091,9.35,,S 310 | 1200,1,"Hays, Mr. Charles Melville",male,55,1,1,12749,93.5,B69,S 311 | 1201,3,"Hansen, Mrs. Claus Peter (Jennie L Howard)",female,45,1,0,350026,14.1083,,S 312 | 1202,3,"Cacic, Mr. Jego Grga",male,18,0,0,315091,8.6625,,S 313 | 1203,3,"Vartanian, Mr. David",male,22,0,0,2658,7.225,,C 314 | 1204,3,"Sadowitz, Mr. Harry",male,,0,0,LP 1588,7.575,,S 315 | 1205,3,"Carr, Miss. Jeannie",female,37,0,0,368364,7.75,,Q 316 | 1206,1,"White, Mrs. John Stuart (Ella Holmes)",female,55,0,0,PC 17760,135.6333,C32,C 317 | 1207,3,"Hagardon, Miss. Kate",female,17,0,0,AQ/3. 30631,7.7333,,Q 318 | 1208,1,"Spencer, Mr. William Augustus",male,57,1,0,PC 17569,146.5208,B78,C 319 | 1209,2,"Rogers, Mr. Reginald Harry",male,19,0,0,28004,10.5,,S 320 | 1210,3,"Jonsson, Mr. Nils Hilding",male,27,0,0,350408,7.8542,,S 321 | 1211,2,"Jefferys, Mr. Ernest Wilfred",male,22,2,0,C.A. 31029,31.5,,S 322 | 1212,3,"Andersson, Mr. Johan Samuel",male,26,0,0,347075,7.775,,S 323 | 1213,3,"Krekorian, Mr. Neshan",male,25,0,0,2654,7.2292,F E57,C 324 | 1214,2,"Nesson, Mr. Israel",male,26,0,0,244368,13,F2,S 325 | 1215,1,"Rowe, Mr. Alfred G",male,33,0,0,113790,26.55,,S 326 | 1216,1,"Kreuchen, Miss. Emilie",female,39,0,0,24160,211.3375,,S 327 | 1217,3,"Assam, Mr. Ali",male,23,0,0,SOTON/O.Q. 3101309,7.05,,S 328 | 1218,2,"Becker, Miss. Ruth Elizabeth",female,12,2,1,230136,39,F4,S 329 | 1219,1,"Rosenshine, Mr. George (Mr George Thorne"")""",male,46,0,0,PC 17585,79.2,,C 330 | 1220,2,"Clarke, Mr. Charles Valentine",male,29,1,0,2003,26,,S 331 | 1221,2,"Enander, Mr. Ingvar",male,21,0,0,236854,13,,S 332 | 1222,2,"Davies, Mrs. John Morgan (Elizabeth Agnes Mary White) ",female,48,0,2,C.A. 33112,36.75,,S 333 | 1223,1,"Dulles, Mr. William Crothers",male,39,0,0,PC 17580,29.7,A18,C 334 | 1224,3,"Thomas, Mr. Tannous",male,,0,0,2684,7.225,,C 335 | 1225,3,"Nakid, Mrs. Said (Waika Mary"" Mowad)""",female,19,1,1,2653,15.7417,,C 336 | 1226,3,"Cor, Mr. Ivan",male,27,0,0,349229,7.8958,,S 337 | 1227,1,"Maguire, Mr. John Edward",male,30,0,0,110469,26,C106,S 338 | 1228,2,"de Brito, Mr. Jose Joaquim",male,32,0,0,244360,13,,S 339 | 1229,3,"Elias, Mr. Joseph",male,39,0,2,2675,7.2292,,C 340 | 1230,2,"Denbury, Mr. Herbert",male,25,0,0,C.A. 31029,31.5,,S 341 | 1231,3,"Betros, Master. Seman",male,,0,0,2622,7.2292,,C 342 | 1232,2,"Fillbrook, Mr. Joseph Charles",male,18,0,0,C.A. 15185,10.5,,S 343 | 1233,3,"Lundstrom, Mr. Thure Edvin",male,32,0,0,350403,7.5792,,S 344 | 1234,3,"Sage, Mr. John George",male,,1,9,CA. 2343,69.55,,S 345 | 1235,1,"Cardeza, Mrs. James Warburton Martinez (Charlotte Wardle Drake)",female,58,0,1,PC 17755,512.3292,B51 B53 B55,C 346 | 1236,3,"van Billiard, Master. James William",male,,1,1,A/5. 851,14.5,,S 347 | 1237,3,"Abelseth, Miss. Karen Marie",female,16,0,0,348125,7.65,,S 348 | 1238,2,"Botsford, Mr. William Hull",male,26,0,0,237670,13,,S 349 | 1239,3,"Whabee, Mrs. George Joseph (Shawneene Abi-Saab)",female,38,0,0,2688,7.2292,,C 350 | 1240,2,"Giles, Mr. Ralph",male,24,0,0,248726,13.5,,S 351 | 1241,2,"Walcroft, Miss. Nellie",female,31,0,0,F.C.C. 13528,21,,S 352 | 1242,1,"Greenfield, Mrs. Leo David (Blanche Strouse)",female,45,0,1,PC 17759,63.3583,D10 D12,C 353 | 1243,2,"Stokes, Mr. Philip Joseph",male,25,0,0,F.C.C. 13540,10.5,,S 354 | 1244,2,"Dibden, Mr. William",male,18,0,0,S.O.C. 14879,73.5,,S 355 | 1245,2,"Herman, Mr. Samuel",male,49,1,2,220845,65,,S 356 | 1246,3,"Dean, Miss. Elizabeth Gladys Millvina""""",female,0.17,1,2,C.A. 2315,20.575,,S 357 | 1247,1,"Julian, Mr. Henry Forbes",male,50,0,0,113044,26,E60,S 358 | 1248,1,"Brown, Mrs. John Murray (Caroline Lane Lamson)",female,59,2,0,11769,51.4792,C101,S 359 | 1249,3,"Lockyer, Mr. Edward",male,,0,0,1222,7.8792,,S 360 | 1250,3,"O'Keefe, Mr. Patrick",male,,0,0,368402,7.75,,Q 361 | 1251,3,"Lindell, Mrs. Edvard Bengtsson (Elin Gerda Persson)",female,30,1,0,349910,15.55,,S 362 | 1252,3,"Sage, Master. William Henry",male,14.5,8,2,CA. 2343,69.55,,S 363 | 1253,2,"Mallet, Mrs. Albert (Antoinette Magnin)",female,24,1,1,S.C./PARIS 2079,37.0042,,C 364 | 1254,2,"Ware, Mrs. John James (Florence Louise Long)",female,31,0,0,CA 31352,21,,S 365 | 1255,3,"Strilic, Mr. Ivan",male,27,0,0,315083,8.6625,,S 366 | 1256,1,"Harder, Mrs. George Achilles (Dorothy Annan)",female,25,1,0,11765,55.4417,E50,C 367 | 1257,3,"Sage, Mrs. John (Annie Bullen)",female,,1,9,CA. 2343,69.55,,S 368 | 1258,3,"Caram, Mr. Joseph",male,,1,0,2689,14.4583,,C 369 | 1259,3,"Riihivouri, Miss. Susanna Juhantytar Sanni""""",female,22,0,0,3101295,39.6875,,S 370 | 1260,1,"Gibson, Mrs. Leonard (Pauline C Boeson)",female,45,0,1,112378,59.4,,C 371 | 1261,2,"Pallas y Castello, Mr. Emilio",male,29,0,0,SC/PARIS 2147,13.8583,,C 372 | 1262,2,"Giles, Mr. Edgar",male,21,1,0,28133,11.5,,S 373 | 1263,1,"Wilson, Miss. Helen Alice",female,31,0,0,16966,134.5,E39 E41,C 374 | 1264,1,"Ismay, Mr. Joseph Bruce",male,49,0,0,112058,0,B52 B54 B56,S 375 | 1265,2,"Harbeck, Mr. William H",male,44,0,0,248746,13,,S 376 | 1266,1,"Dodge, Mrs. Washington (Ruth Vidaver)",female,54,1,1,33638,81.8583,A34,S 377 | 1267,1,"Bowen, Miss. Grace Scott",female,45,0,0,PC 17608,262.375,,C 378 | 1268,3,"Kink, Miss. Maria",female,22,2,0,315152,8.6625,,S 379 | 1269,2,"Cotterill, Mr. Henry Harry""""",male,21,0,0,29107,11.5,,S 380 | 1270,1,"Hipkins, Mr. William Edward",male,55,0,0,680,50,C39,S 381 | 1271,3,"Asplund, Master. Carl Edgar",male,5,4,2,347077,31.3875,,S 382 | 1272,3,"O'Connor, Mr. Patrick",male,,0,0,366713,7.75,,Q 383 | 1273,3,"Foley, Mr. Joseph",male,26,0,0,330910,7.8792,,Q 384 | 1274,3,"Risien, Mrs. Samuel (Emma)",female,,0,0,364498,14.5,,S 385 | 1275,3,"McNamee, Mrs. Neal (Eileen O'Leary)",female,19,1,0,376566,16.1,,S 386 | 1276,2,"Wheeler, Mr. Edwin Frederick""""",male,,0,0,SC/PARIS 2159,12.875,,S 387 | 1277,2,"Herman, Miss. Kate",female,24,1,2,220845,65,,S 388 | 1278,3,"Aronsson, Mr. Ernst Axel Algot",male,24,0,0,349911,7.775,,S 389 | 1279,2,"Ashby, Mr. John",male,57,0,0,244346,13,,S 390 | 1280,3,"Canavan, Mr. Patrick",male,21,0,0,364858,7.75,,Q 391 | 1281,3,"Palsson, Master. Paul Folke",male,6,3,1,349909,21.075,,S 392 | 1282,1,"Payne, Mr. Vivian Ponsonby",male,23,0,0,12749,93.5,B24,S 393 | 1283,1,"Lines, Mrs. Ernest H (Elizabeth Lindsey James)",female,51,0,1,PC 17592,39.4,D28,S 394 | 1284,3,"Abbott, Master. Eugene Joseph",male,13,0,2,C.A. 2673,20.25,,S 395 | 1285,2,"Gilbert, Mr. William",male,47,0,0,C.A. 30769,10.5,,S 396 | 1286,3,"Kink-Heilmann, Mr. Anton",male,29,3,1,315153,22.025,,S 397 | 1287,1,"Smith, Mrs. Lucien Philip (Mary Eloise Hughes)",female,18,1,0,13695,60,C31,S 398 | 1288,3,"Colbert, Mr. Patrick",male,24,0,0,371109,7.25,,Q 399 | 1289,1,"Frolicher-Stehli, Mrs. Maxmillian (Margaretha Emerentia Stehli)",female,48,1,1,13567,79.2,B41,C 400 | 1290,3,"Larsson-Rondberg, Mr. Edvard A",male,22,0,0,347065,7.775,,S 401 | 1291,3,"Conlon, Mr. Thomas Henry",male,31,0,0,21332,7.7333,,Q 402 | 1292,1,"Bonnell, Miss. Caroline",female,30,0,0,36928,164.8667,C7,S 403 | 1293,2,"Gale, Mr. Harry",male,38,1,0,28664,21,,S 404 | 1294,1,"Gibson, Miss. Dorothy Winifred",female,22,0,1,112378,59.4,,C 405 | 1295,1,"Carrau, Mr. Jose Pedro",male,17,0,0,113059,47.1,,S 406 | 1296,1,"Frauenthal, Mr. Isaac Gerald",male,43,1,0,17765,27.7208,D40,C 407 | 1297,2,"Nourney, Mr. Alfred (Baron von Drachstedt"")""",male,20,0,0,SC/PARIS 2166,13.8625,D38,C 408 | 1298,2,"Ware, Mr. William Jeffery",male,23,1,0,28666,10.5,,S 409 | 1299,1,"Widener, Mr. George Dunton",male,50,1,1,113503,211.5,C80,C 410 | 1300,3,"Riordan, Miss. Johanna Hannah""""",female,,0,0,334915,7.7208,,Q 411 | 1301,3,"Peacock, Miss. Treasteall",female,3,1,1,SOTON/O.Q. 3101315,13.775,,S 412 | 1302,3,"Naughton, Miss. Hannah",female,,0,0,365237,7.75,,Q 413 | 1303,1,"Minahan, Mrs. William Edward (Lillian E Thorpe)",female,37,1,0,19928,90,C78,Q 414 | 1304,3,"Henriksson, Miss. Jenny Lovisa",female,28,0,0,347086,7.775,,S 415 | 1305,3,"Spector, Mr. Woolf",male,,0,0,A.5. 3236,8.05,,S 416 | 1306,1,"Oliva y Ocana, Dona. Fermina",female,39,0,0,PC 17758,108.9,C105,C 417 | 1307,3,"Saether, Mr. Simon Sivertsen",male,38.5,0,0,SOTON/O.Q. 3101262,7.25,,S 418 | 1308,3,"Ware, Mr. Frederick",male,,0,0,359309,8.05,,S 419 | 1309,3,"Peter, Master. Michael J",male,,1,1,2668,22.3583,,C 420 | -------------------------------------------------------------------------------- /Model_Optimisation/images/rf1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aiplanethub/ML_Models/0c3ee0dae4de4091c0867f8398171f6b8d7ad6c9/Model_Optimisation/images/rf1.png -------------------------------------------------------------------------------- /Model_Optimisation/images/rf2.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aiplanethub/ML_Models/0c3ee0dae4de4091c0867f8398171f6b8d7ad6c9/Model_Optimisation/images/rf2.gif -------------------------------------------------------------------------------- /Model_Optimisation/images/rf3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aiplanethub/ML_Models/0c3ee0dae4de4091c0867f8398171f6b8d7ad6c9/Model_Optimisation/images/rf3.png -------------------------------------------------------------------------------- /Model_Optimisation/images/rf4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aiplanethub/ML_Models/0c3ee0dae4de4091c0867f8398171f6b8d7ad6c9/Model_Optimisation/images/rf4.png -------------------------------------------------------------------------------- /Model_Optimisation/images/rf5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aiplanethub/ML_Models/0c3ee0dae4de4091c0867f8398171f6b8d7ad6c9/Model_Optimisation/images/rf5.png -------------------------------------------------------------------------------- /Model_Optimisation/images/rf6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aiplanethub/ML_Models/0c3ee0dae4de4091c0867f8398171f6b8d7ad6c9/Model_Optimisation/images/rf6.png -------------------------------------------------------------------------------- /Model_Optimisation/images/rf7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aiplanethub/ML_Models/0c3ee0dae4de4091c0867f8398171f6b8d7ad6c9/Model_Optimisation/images/rf7.png -------------------------------------------------------------------------------- /Model_Optimisation/images/rf8.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aiplanethub/ML_Models/0c3ee0dae4de4091c0867f8398171f6b8d7ad6c9/Model_Optimisation/images/rf8.png -------------------------------------------------------------------------------- /Model_Optimisation/notebooks/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "notebook": { 3 | "folder": "random_forest_in_class", 4 | "file": "random_forest_slides.ipynb" 5 | } 6 | } 7 | -------------------------------------------------------------------------------- /Performance_Evaluation/Standard Metropolitan Areas Data - train_data.csv: -------------------------------------------------------------------------------- 1 | land_area,percent_city,percent_senior,physicians,hospital_beds,graduates,labor,income,region,crime_rate 2 | 1384,78.1,12.3,25627,69678,50.1,4083.9,72100,1,75.55 3 | 3719,43.9,9.4,13326,43292,53.9,3305.9,54542,2,56.03 4 | 3553,37.4,10.7,9724,33731,50.6,2066.3,33216,1,41.32 5 | 3916,29.9,8.8,6402,24167,52.2,1966.7,32906,2,67.38 6 | 2480,31.5,10.5,8502,16751,66.1,1514.5,26573,4,80.19 7 | 2815,23.1,6.7,7340,16941,68.3,1541.9,25663,3,58.48 8 | 8360,46.3,8.2,4047,14347,53.6,1321.2,18350,3,72.25 9 | 6794,60.1,6.3,4562,14333,51.7,1272.7,18221,3,64.88 10 | 3049,19.5,12.1,4005,21149,53.4,967.5,15826,1,30.51 11 | 4647,31.5,9.2,3916,12815,65.1,1032.2,14542,2,55.30 12 | 1008,16.6,10.3,4006,16704,55.9,935.5,15953,1,54.16 13 | 1519,31.8,10.5,4094,12545,54.6,906,14684,2,52.73 14 | 4326,23.6,7.3,3064,9976,50.4,867.2,12107,3,58.12 15 | 782,28.4,7.8,3119,8656,70.5,915.2,12591,4,63.20 16 | 4226,38.1,9.8,3285,5392,67.8,699.8,10918,4,64.05 17 | 1456,46.7,10.4,2484,8555,56.8,710.4,10104,2,44.64 18 | 2045,37.2,21.4,1949,8863,50.7,543.2,7989,3,64.68 19 | 2149,29.8,10.6,2530,8354,48.4,617.6,9037,2,49.69 20 | 27293,25.3,12.3,2018,6323,57.4,510.6,7399,4,76.03 21 | 3341,35.8,10.1,2289,7593,59.9,656.3,9106,2,63.04 22 | 9155,53.8,11.1,2280,6450,60.1,575.2,7766,4,85.62 23 | 3072,68,9.3,2181,7497,56,549.6,7736,2,53.65 24 | 1967,51.1,8.8,2520,8467,45.8,460.5,7038,3,61.15 25 | 2460,49.6,8.4,1874,7706,59.9,510.7,6658,2,57.59 26 | 2966,26.9,10.3,2053,6604,56.3,450.4,6966,1,56.55 27 | 3434,28.9,8.3,1844,3215,65.1,422.6,5909,4,77.94 28 | 1392,37.2,9.8,1579,6087,46.5,396.8,5705,3,51.69 29 | 3358,35.1,11.3,1649,5512,44.9,359.1,4941,3,53.15 30 | 2624,30.4,12.2,1532,4730,55.2,356.5,5094,1,38.75 31 | 2187,47,10.2,1098,4342,51.9,355.4,5142,2,59.48 32 | 3214,47.7,9.4,1285,3459,40.3,401.7,4924,3,45.14 33 | 3491,48.5,9.7,1496,5620,59.6,362.3,4798,3,57.88 34 | 4080,59.6,9.9,1597,7496,47.3,380.9,4600,3,43.90 35 | 596,100,6,1260,2819,66,319.9,5181,4,64.98 36 | 3199,80.6,8.7,983,4749,50.8,292.4,4127,3,61.97 37 | 903,37.3,9.6,948,4064,55.6,293.3,4102,2,52.53 38 | 2419,27.8,9.9,1250,2870,57.8,286.8,3860,1,47.65 39 | 938,48.1,7.4,614,3016,50,280.9,4177,2,54.51 40 | 1951,28.4,14.5,696,4843,47.9,271.5,3667,1,23.64 41 | 1490,33.1,11.9,827,3818,47.4,300.2,4144,1,30.59 42 | 5677,55.8,10.5,760,3883,56.2,292,4035,3,52.70 43 | 1525,55.7,8.3,751,3234,44.9,318.5,3777,3,62.09 44 | 2528,19.2,10.2,798,3135,55.4,274.1,3489,3,74.94 45 | 312,19.5,7.5,769,2463,55,298.7,4352,1,48.99 46 | 1537,63.8,8.7,1234,5160,62.7,272.6,3725,2,55.54 47 | 1420,32.6,9.5,833,2950,54,280.8,3553,2,46.26 48 | 47,41.9,11.9,745,3352,36.3,258.9,3915,1,51.70 49 | 2115,19.9,9.1,676,2296,38.8,253.3,2962,3,58.33 50 | 1182,32.4,7.4,518,2515,52.4,216.8,3627,2,68.48 51 | 1165,14.5,8.6,746,4277,54.4,237.1,3724,3,60.77 52 | 1553,50,8,2207,4931,52,257.2,2991,3,49.83 53 | 2023,22.1,21.8,752,2317,55.7,194.2,3283,3,76.35 54 | 2766,67.9,7.7,679,3873,56.3,224,2598,3,63.22 55 | 5966,39.5,9.6,737,1907,52.7,246.6,3007,4,80.94 56 | 1863,50.4,7.7,674,2989,63.8,194.8,2747,4,53.76 57 | 192,60.5,10.8,617,1789,44.1,212.6,3158,1,58.79 58 | 2277,39.5,7.5,512,1788,61.9,221.1,2853,2,44.07 59 | 1057,90.7,6.1,479,2551,51.1,163.4,2012,3,58.97 60 | 1624,13.4,11,832,2938,55.4,207.8,2885,1,39.59 61 | 1676,36.6,9.2,505,3297,60.7,156.3,2689,4,57.37 62 | 2109,41.2,10.3,520,2539,45.2,183.1,2308,3,40.30 63 | 2449,68.4,9.6,681,2864,63.2,207.4,2651,2,63.67 64 | 2618,31.7,6.1,836,2159,48,145.6,1992,3,65.05 65 | 1465,30.3,6.8,598,6456,50.6,164.7,2201,3,70.66 66 | 1704,52.1,10.5,379,2491,55.6,173.2,2662,2,49.60 67 | 1750,49.3,9.7,446,3472,58.2,176.5,2439,2,44.67 68 | 1489,58.8,9.5,911,5720,56.5,175.1,2264,3,70.55 69 | 8152,22.3,9.1,405,1254,51.7,165.6,2257,4,78.10 70 | 655,75.2,6.6,425,3879,51.6,163,2088,3,42.92 71 | 2363,53.1,10.6,565,2717,49.3,146.4,1996,3,53.97 72 | 946,16.4,11.1,366,1455,43.9,163.8,2178,1,23.32 73 | 1136,58.6,9.7,448,2630,68.1,171.4,2396,2,61.46 74 | 2658,39,12.2,365,5430,49.9,136.9,1862,1,28.52 75 | 1758,56.8,11.5,565,2081,65.3,131.2,1939,4,56.06 76 | 1412,39.2,11.3,436,1837,49.4,154.2,2098,4,82.68 77 | 2071,19.9,11.3,470,2531,58.9,133.1,1782,1,36.47 78 | 862,26.3,13.4,423,1929,43.3,145.5,2010,1,25.49 79 | 1526,71.7,7.7,413,1636,47.1,125.8,1692,3,66.13 80 | 1758,33.2,11.6,296,2652,45.3,114.4,1641,3,41.98 81 | 1651,64.6,8.9,774,5431,56.1,136.9,1724,3,48.88 82 | 1493,64.8,8.9,863,3289,53.7,154.7,1787,3,53.98 83 | 2710,63.7,6.2,357,1277,72.8,110.9,1639,4,63.10 84 | 1975,46.5,12.6,405,2896,51.5,133.8,1853,2,43.94 85 | 1404,38.5,10,299,1766,56.2,138.6,1776,2,40.54 86 | 2737,45,10.5,602,1462,71.3,131.4,1980,4,63.44 87 | 1697,23.8,7.2,338,1610,51,105.9,1354,3,70.50 88 | 813,46,9.8,293,1693,58.4,119.9,1688,1,38.24 89 | 7397,47.3,12.5,355,2042,56.2,113.7,1654,2,45.97 90 | 1148,45.3,11.1,891,5790,54,277,3510,1,49.64 91 | 1509,37.6,12,1087,4900,51.4,319.6,3982,1,45.19 92 | 2013,61.7,9.7,273,1484,50.9,106.7,1412,3,56.87 93 | 711,42.4,6.1,1411,3659,67.5,131,1790,2,64.91 94 | 471,46.3,8.6,219,1128,47.8,105.3,1458,2,53.68 95 | 4552,54.4,9.1,329,719,61.9,118,1386,4,62.63 96 | 1511,38.7,10.7,348,1093,50.4,127.2,1452,4,70.66 97 | 1543,39.6,8.1,159,481,30.3,80.6,769,3,36.36 98 | 1011,37.8,10.5,264,964,70.7,93.2,1337,3,60.16 99 | 813,13.4,10.9,371,4355,58,97,1589,1,36.33 100 | 654,28.8,3.9,140,1296,55.1,66.9,1148,3,68.76 -------------------------------------------------------------------------------- /Performance_Evaluation/diabetes.txt: -------------------------------------------------------------------------------- 1 | 6,148,72,35,0,33.6,0.627,50,1 2 | 1,85,66,29,0,26.6,0.351,31,0 3 | 8,183,64,0,0,23.3,0.672,32,1 4 | 1,89,66,23,94,28.1,0.167,21,0 5 | 0,137,40,35,168,43.1,2.288,33,1 6 | 5,116,74,0,0,25.6,0.201,30,0 7 | 3,78,50,32,88,31.0,0.248,26,1 8 | 10,115,0,0,0,35.3,0.134,29,0 9 | 2,197,70,45,543,30.5,0.158,53,1 10 | 8,125,96,0,0,0.0,0.232,54,1 11 | 4,110,92,0,0,37.6,0.191,30,0 12 | 10,168,74,0,0,38.0,0.537,34,1 13 | 10,139,80,0,0,27.1,1.441,57,0 14 | 1,189,60,23,846,30.1,0.398,59,1 15 | 5,166,72,19,175,25.8,0.587,51,1 16 | 7,100,0,0,0,30.0,0.484,32,1 17 | 0,118,84,47,230,45.8,0.551,31,1 18 | 7,107,74,0,0,29.6,0.254,31,1 19 | 1,103,30,38,83,43.3,0.183,33,0 20 | 1,115,70,30,96,34.6,0.529,32,1 21 | 3,126,88,41,235,39.3,0.704,27,0 22 | 8,99,84,0,0,35.4,0.388,50,0 23 | 7,196,90,0,0,39.8,0.451,41,1 24 | 9,119,80,35,0,29.0,0.263,29,1 25 | 11,143,94,33,146,36.6,0.254,51,1 26 | 10,125,70,26,115,31.1,0.205,41,1 27 | 7,147,76,0,0,39.4,0.257,43,1 28 | 1,97,66,15,140,23.2,0.487,22,0 29 | 13,145,82,19,110,22.2,0.245,57,0 30 | 5,117,92,0,0,34.1,0.337,38,0 31 | 5,109,75,26,0,36.0,0.546,60,0 32 | 3,158,76,36,245,31.6,0.851,28,1 33 | 3,88,58,11,54,24.8,0.267,22,0 34 | 6,92,92,0,0,19.9,0.188,28,0 35 | 10,122,78,31,0,27.6,0.512,45,0 36 | 4,103,60,33,192,24.0,0.966,33,0 37 | 11,138,76,0,0,33.2,0.420,35,0 38 | 9,102,76,37,0,32.9,0.665,46,1 39 | 2,90,68,42,0,38.2,0.503,27,1 40 | 4,111,72,47,207,37.1,1.390,56,1 41 | 3,180,64,25,70,34.0,0.271,26,0 42 | 7,133,84,0,0,40.2,0.696,37,0 43 | 7,106,92,18,0,22.7,0.235,48,0 44 | 9,171,110,24,240,45.4,0.721,54,1 45 | 7,159,64,0,0,27.4,0.294,40,0 46 | 0,180,66,39,0,42.0,1.893,25,1 47 | 1,146,56,0,0,29.7,0.564,29,0 48 | 2,71,70,27,0,28.0,0.586,22,0 49 | 7,103,66,32,0,39.1,0.344,31,1 50 | 7,105,0,0,0,0.0,0.305,24,0 51 | 1,103,80,11,82,19.4,0.491,22,0 52 | 1,101,50,15,36,24.2,0.526,26,0 53 | 5,88,66,21,23,24.4,0.342,30,0 54 | 8,176,90,34,300,33.7,0.467,58,1 55 | 7,150,66,42,342,34.7,0.718,42,0 56 | 1,73,50,10,0,23.0,0.248,21,0 57 | 7,187,68,39,304,37.7,0.254,41,1 58 | 0,100,88,60,110,46.8,0.962,31,0 59 | 0,146,82,0,0,40.5,1.781,44,0 60 | 0,105,64,41,142,41.5,0.173,22,0 61 | 2,84,0,0,0,0.0,0.304,21,0 62 | 8,133,72,0,0,32.9,0.270,39,1 63 | 5,44,62,0,0,25.0,0.587,36,0 64 | 2,141,58,34,128,25.4,0.699,24,0 65 | 7,114,66,0,0,32.8,0.258,42,1 66 | 5,99,74,27,0,29.0,0.203,32,0 67 | 0,109,88,30,0,32.5,0.855,38,1 68 | 2,109,92,0,0,42.7,0.845,54,0 69 | 1,95,66,13,38,19.6,0.334,25,0 70 | 4,146,85,27,100,28.9,0.189,27,0 71 | 2,100,66,20,90,32.9,0.867,28,1 72 | 5,139,64,35,140,28.6,0.411,26,0 73 | 13,126,90,0,0,43.4,0.583,42,1 74 | 4,129,86,20,270,35.1,0.231,23,0 75 | 1,79,75,30,0,32.0,0.396,22,0 76 | 1,0,48,20,0,24.7,0.140,22,0 77 | 7,62,78,0,0,32.6,0.391,41,0 78 | 5,95,72,33,0,37.7,0.370,27,0 79 | 0,131,0,0,0,43.2,0.270,26,1 80 | 2,112,66,22,0,25.0,0.307,24,0 81 | 3,113,44,13,0,22.4,0.140,22,0 82 | 2,74,0,0,0,0.0,0.102,22,0 83 | 7,83,78,26,71,29.3,0.767,36,0 84 | 0,101,65,28,0,24.6,0.237,22,0 85 | 5,137,108,0,0,48.8,0.227,37,1 86 | 2,110,74,29,125,32.4,0.698,27,0 87 | 13,106,72,54,0,36.6,0.178,45,0 88 | 2,100,68,25,71,38.5,0.324,26,0 89 | 15,136,70,32,110,37.1,0.153,43,1 90 | 1,107,68,19,0,26.5,0.165,24,0 91 | 1,80,55,0,0,19.1,0.258,21,0 92 | 4,123,80,15,176,32.0,0.443,34,0 93 | 7,81,78,40,48,46.7,0.261,42,0 94 | 4,134,72,0,0,23.8,0.277,60,1 95 | 2,142,82,18,64,24.7,0.761,21,0 96 | 6,144,72,27,228,33.9,0.255,40,0 97 | 2,92,62,28,0,31.6,0.130,24,0 98 | 1,71,48,18,76,20.4,0.323,22,0 99 | 6,93,50,30,64,28.7,0.356,23,0 100 | 1,122,90,51,220,49.7,0.325,31,1 101 | 1,163,72,0,0,39.0,1.222,33,1 102 | 1,151,60,0,0,26.1,0.179,22,0 103 | 0,125,96,0,0,22.5,0.262,21,0 104 | 1,81,72,18,40,26.6,0.283,24,0 105 | 2,85,65,0,0,39.6,0.930,27,0 106 | 1,126,56,29,152,28.7,0.801,21,0 107 | 1,96,122,0,0,22.4,0.207,27,0 108 | 4,144,58,28,140,29.5,0.287,37,0 109 | 3,83,58,31,18,34.3,0.336,25,0 110 | 0,95,85,25,36,37.4,0.247,24,1 111 | 3,171,72,33,135,33.3,0.199,24,1 112 | 8,155,62,26,495,34.0,0.543,46,1 113 | 1,89,76,34,37,31.2,0.192,23,0 114 | 4,76,62,0,0,34.0,0.391,25,0 115 | 7,160,54,32,175,30.5,0.588,39,1 116 | 4,146,92,0,0,31.2,0.539,61,1 117 | 5,124,74,0,0,34.0,0.220,38,1 118 | 5,78,48,0,0,33.7,0.654,25,0 119 | 4,97,60,23,0,28.2,0.443,22,0 120 | 4,99,76,15,51,23.2,0.223,21,0 121 | 0,162,76,56,100,53.2,0.759,25,1 122 | 6,111,64,39,0,34.2,0.260,24,0 123 | 2,107,74,30,100,33.6,0.404,23,0 124 | 5,132,80,0,0,26.8,0.186,69,0 125 | 0,113,76,0,0,33.3,0.278,23,1 126 | 1,88,30,42,99,55.0,0.496,26,1 127 | 3,120,70,30,135,42.9,0.452,30,0 128 | 1,118,58,36,94,33.3,0.261,23,0 129 | 1,117,88,24,145,34.5,0.403,40,1 130 | 0,105,84,0,0,27.9,0.741,62,1 131 | 4,173,70,14,168,29.7,0.361,33,1 132 | 9,122,56,0,0,33.3,1.114,33,1 133 | 3,170,64,37,225,34.5,0.356,30,1 134 | 8,84,74,31,0,38.3,0.457,39,0 135 | 2,96,68,13,49,21.1,0.647,26,0 136 | 2,125,60,20,140,33.8,0.088,31,0 137 | 0,100,70,26,50,30.8,0.597,21,0 138 | 0,93,60,25,92,28.7,0.532,22,0 139 | 0,129,80,0,0,31.2,0.703,29,0 140 | 5,105,72,29,325,36.9,0.159,28,0 141 | 3,128,78,0,0,21.1,0.268,55,0 142 | 5,106,82,30,0,39.5,0.286,38,0 143 | 2,108,52,26,63,32.5,0.318,22,0 144 | 10,108,66,0,0,32.4,0.272,42,1 145 | 4,154,62,31,284,32.8,0.237,23,0 146 | 0,102,75,23,0,0.0,0.572,21,0 147 | 9,57,80,37,0,32.8,0.096,41,0 148 | 2,106,64,35,119,30.5,1.400,34,0 149 | 5,147,78,0,0,33.7,0.218,65,0 150 | 2,90,70,17,0,27.3,0.085,22,0 151 | 1,136,74,50,204,37.4,0.399,24,0 152 | 4,114,65,0,0,21.9,0.432,37,0 153 | 9,156,86,28,155,34.3,1.189,42,1 154 | 1,153,82,42,485,40.6,0.687,23,0 155 | 8,188,78,0,0,47.9,0.137,43,1 156 | 7,152,88,44,0,50.0,0.337,36,1 157 | 2,99,52,15,94,24.6,0.637,21,0 158 | 1,109,56,21,135,25.2,0.833,23,0 159 | 2,88,74,19,53,29.0,0.229,22,0 160 | 17,163,72,41,114,40.9,0.817,47,1 161 | 4,151,90,38,0,29.7,0.294,36,0 162 | 7,102,74,40,105,37.2,0.204,45,0 163 | 0,114,80,34,285,44.2,0.167,27,0 164 | 2,100,64,23,0,29.7,0.368,21,0 165 | 0,131,88,0,0,31.6,0.743,32,1 166 | 6,104,74,18,156,29.9,0.722,41,1 167 | 3,148,66,25,0,32.5,0.256,22,0 168 | 4,120,68,0,0,29.6,0.709,34,0 169 | 4,110,66,0,0,31.9,0.471,29,0 170 | 3,111,90,12,78,28.4,0.495,29,0 171 | 6,102,82,0,0,30.8,0.180,36,1 172 | 6,134,70,23,130,35.4,0.542,29,1 173 | 2,87,0,23,0,28.9,0.773,25,0 174 | 1,79,60,42,48,43.5,0.678,23,0 175 | 2,75,64,24,55,29.7,0.370,33,0 176 | 8,179,72,42,130,32.7,0.719,36,1 177 | 6,85,78,0,0,31.2,0.382,42,0 178 | 0,129,110,46,130,67.1,0.319,26,1 179 | 5,143,78,0,0,45.0,0.190,47,0 180 | 5,130,82,0,0,39.1,0.956,37,1 181 | 6,87,80,0,0,23.2,0.084,32,0 182 | 0,119,64,18,92,34.9,0.725,23,0 183 | 1,0,74,20,23,27.7,0.299,21,0 184 | 5,73,60,0,0,26.8,0.268,27,0 185 | 4,141,74,0,0,27.6,0.244,40,0 186 | 7,194,68,28,0,35.9,0.745,41,1 187 | 8,181,68,36,495,30.1,0.615,60,1 188 | 1,128,98,41,58,32.0,1.321,33,1 189 | 8,109,76,39,114,27.9,0.640,31,1 190 | 5,139,80,35,160,31.6,0.361,25,1 191 | 3,111,62,0,0,22.6,0.142,21,0 192 | 9,123,70,44,94,33.1,0.374,40,0 193 | 7,159,66,0,0,30.4,0.383,36,1 194 | 11,135,0,0,0,52.3,0.578,40,1 195 | 8,85,55,20,0,24.4,0.136,42,0 196 | 5,158,84,41,210,39.4,0.395,29,1 197 | 1,105,58,0,0,24.3,0.187,21,0 198 | 3,107,62,13,48,22.9,0.678,23,1 199 | 4,109,64,44,99,34.8,0.905,26,1 200 | 4,148,60,27,318,30.9,0.150,29,1 201 | 0,113,80,16,0,31.0,0.874,21,0 202 | 1,138,82,0,0,40.1,0.236,28,0 203 | 0,108,68,20,0,27.3,0.787,32,0 204 | 2,99,70,16,44,20.4,0.235,27,0 205 | 6,103,72,32,190,37.7,0.324,55,0 206 | 5,111,72,28,0,23.9,0.407,27,0 207 | 8,196,76,29,280,37.5,0.605,57,1 208 | 5,162,104,0,0,37.7,0.151,52,1 209 | 1,96,64,27,87,33.2,0.289,21,0 210 | 7,184,84,33,0,35.5,0.355,41,1 211 | 2,81,60,22,0,27.7,0.290,25,0 212 | 0,147,85,54,0,42.8,0.375,24,0 213 | 7,179,95,31,0,34.2,0.164,60,0 214 | 0,140,65,26,130,42.6,0.431,24,1 215 | 9,112,82,32,175,34.2,0.260,36,1 216 | 12,151,70,40,271,41.8,0.742,38,1 217 | 5,109,62,41,129,35.8,0.514,25,1 218 | 6,125,68,30,120,30.0,0.464,32,0 219 | 5,85,74,22,0,29.0,1.224,32,1 220 | 5,112,66,0,0,37.8,0.261,41,1 221 | 0,177,60,29,478,34.6,1.072,21,1 222 | 2,158,90,0,0,31.6,0.805,66,1 223 | 7,119,0,0,0,25.2,0.209,37,0 224 | 7,142,60,33,190,28.8,0.687,61,0 225 | 1,100,66,15,56,23.6,0.666,26,0 226 | 1,87,78,27,32,34.6,0.101,22,0 227 | 0,101,76,0,0,35.7,0.198,26,0 228 | 3,162,52,38,0,37.2,0.652,24,1 229 | 4,197,70,39,744,36.7,2.329,31,0 230 | 0,117,80,31,53,45.2,0.089,24,0 231 | 4,142,86,0,0,44.0,0.645,22,1 232 | 6,134,80,37,370,46.2,0.238,46,1 233 | 1,79,80,25,37,25.4,0.583,22,0 234 | 4,122,68,0,0,35.0,0.394,29,0 235 | 3,74,68,28,45,29.7,0.293,23,0 236 | 4,171,72,0,0,43.6,0.479,26,1 237 | 7,181,84,21,192,35.9,0.586,51,1 238 | 0,179,90,27,0,44.1,0.686,23,1 239 | 9,164,84,21,0,30.8,0.831,32,1 240 | 0,104,76,0,0,18.4,0.582,27,0 241 | 1,91,64,24,0,29.2,0.192,21,0 242 | 4,91,70,32,88,33.1,0.446,22,0 243 | 3,139,54,0,0,25.6,0.402,22,1 244 | 6,119,50,22,176,27.1,1.318,33,1 245 | 2,146,76,35,194,38.2,0.329,29,0 246 | 9,184,85,15,0,30.0,1.213,49,1 247 | 10,122,68,0,0,31.2,0.258,41,0 248 | 0,165,90,33,680,52.3,0.427,23,0 249 | 9,124,70,33,402,35.4,0.282,34,0 250 | 1,111,86,19,0,30.1,0.143,23,0 251 | 9,106,52,0,0,31.2,0.380,42,0 252 | 2,129,84,0,0,28.0,0.284,27,0 253 | 2,90,80,14,55,24.4,0.249,24,0 254 | 0,86,68,32,0,35.8,0.238,25,0 255 | 12,92,62,7,258,27.6,0.926,44,1 256 | 1,113,64,35,0,33.6,0.543,21,1 257 | 3,111,56,39,0,30.1,0.557,30,0 258 | 2,114,68,22,0,28.7,0.092,25,0 259 | 1,193,50,16,375,25.9,0.655,24,0 260 | 11,155,76,28,150,33.3,1.353,51,1 261 | 3,191,68,15,130,30.9,0.299,34,0 262 | 3,141,0,0,0,30.0,0.761,27,1 263 | 4,95,70,32,0,32.1,0.612,24,0 264 | 3,142,80,15,0,32.4,0.200,63,0 265 | 4,123,62,0,0,32.0,0.226,35,1 266 | 5,96,74,18,67,33.6,0.997,43,0 267 | 0,138,0,0,0,36.3,0.933,25,1 268 | 2,128,64,42,0,40.0,1.101,24,0 269 | 0,102,52,0,0,25.1,0.078,21,0 270 | 2,146,0,0,0,27.5,0.240,28,1 271 | 10,101,86,37,0,45.6,1.136,38,1 272 | 2,108,62,32,56,25.2,0.128,21,0 273 | 3,122,78,0,0,23.0,0.254,40,0 274 | 1,71,78,50,45,33.2,0.422,21,0 275 | 13,106,70,0,0,34.2,0.251,52,0 276 | 2,100,70,52,57,40.5,0.677,25,0 277 | 7,106,60,24,0,26.5,0.296,29,1 278 | 0,104,64,23,116,27.8,0.454,23,0 279 | 5,114,74,0,0,24.9,0.744,57,0 280 | 2,108,62,10,278,25.3,0.881,22,0 281 | 0,146,70,0,0,37.9,0.334,28,1 282 | 10,129,76,28,122,35.9,0.280,39,0 283 | 7,133,88,15,155,32.4,0.262,37,0 284 | 7,161,86,0,0,30.4,0.165,47,1 285 | 2,108,80,0,0,27.0,0.259,52,1 286 | 7,136,74,26,135,26.0,0.647,51,0 287 | 5,155,84,44,545,38.7,0.619,34,0 288 | 1,119,86,39,220,45.6,0.808,29,1 289 | 4,96,56,17,49,20.8,0.340,26,0 290 | 5,108,72,43,75,36.1,0.263,33,0 291 | 0,78,88,29,40,36.9,0.434,21,0 292 | 0,107,62,30,74,36.6,0.757,25,1 293 | 2,128,78,37,182,43.3,1.224,31,1 294 | 1,128,48,45,194,40.5,0.613,24,1 295 | 0,161,50,0,0,21.9,0.254,65,0 296 | 6,151,62,31,120,35.5,0.692,28,0 297 | 2,146,70,38,360,28.0,0.337,29,1 298 | 0,126,84,29,215,30.7,0.520,24,0 299 | 14,100,78,25,184,36.6,0.412,46,1 300 | 8,112,72,0,0,23.6,0.840,58,0 301 | 0,167,0,0,0,32.3,0.839,30,1 302 | 2,144,58,33,135,31.6,0.422,25,1 303 | 5,77,82,41,42,35.8,0.156,35,0 304 | 5,115,98,0,0,52.9,0.209,28,1 305 | 3,150,76,0,0,21.0,0.207,37,0 306 | 2,120,76,37,105,39.7,0.215,29,0 307 | 10,161,68,23,132,25.5,0.326,47,1 308 | 0,137,68,14,148,24.8,0.143,21,0 309 | 0,128,68,19,180,30.5,1.391,25,1 310 | 2,124,68,28,205,32.9,0.875,30,1 311 | 6,80,66,30,0,26.2,0.313,41,0 312 | 0,106,70,37,148,39.4,0.605,22,0 313 | 2,155,74,17,96,26.6,0.433,27,1 314 | 3,113,50,10,85,29.5,0.626,25,0 315 | 7,109,80,31,0,35.9,1.127,43,1 316 | 2,112,68,22,94,34.1,0.315,26,0 317 | 3,99,80,11,64,19.3,0.284,30,0 318 | 3,182,74,0,0,30.5,0.345,29,1 319 | 3,115,66,39,140,38.1,0.150,28,0 320 | 6,194,78,0,0,23.5,0.129,59,1 321 | 4,129,60,12,231,27.5,0.527,31,0 322 | 3,112,74,30,0,31.6,0.197,25,1 323 | 0,124,70,20,0,27.4,0.254,36,1 324 | 13,152,90,33,29,26.8,0.731,43,1 325 | 2,112,75,32,0,35.7,0.148,21,0 326 | 1,157,72,21,168,25.6,0.123,24,0 327 | 1,122,64,32,156,35.1,0.692,30,1 328 | 10,179,70,0,0,35.1,0.200,37,0 329 | 2,102,86,36,120,45.5,0.127,23,1 330 | 6,105,70,32,68,30.8,0.122,37,0 331 | 8,118,72,19,0,23.1,1.476,46,0 332 | 2,87,58,16,52,32.7,0.166,25,0 333 | 1,180,0,0,0,43.3,0.282,41,1 334 | 12,106,80,0,0,23.6,0.137,44,0 335 | 1,95,60,18,58,23.9,0.260,22,0 336 | 0,165,76,43,255,47.9,0.259,26,0 337 | 0,117,0,0,0,33.8,0.932,44,0 338 | 5,115,76,0,0,31.2,0.343,44,1 339 | 9,152,78,34,171,34.2,0.893,33,1 340 | 7,178,84,0,0,39.9,0.331,41,1 341 | 1,130,70,13,105,25.9,0.472,22,0 342 | 1,95,74,21,73,25.9,0.673,36,0 343 | 1,0,68,35,0,32.0,0.389,22,0 344 | 5,122,86,0,0,34.7,0.290,33,0 345 | 8,95,72,0,0,36.8,0.485,57,0 346 | 8,126,88,36,108,38.5,0.349,49,0 347 | 1,139,46,19,83,28.7,0.654,22,0 348 | 3,116,0,0,0,23.5,0.187,23,0 349 | 3,99,62,19,74,21.8,0.279,26,0 350 | 5,0,80,32,0,41.0,0.346,37,1 351 | 4,92,80,0,0,42.2,0.237,29,0 352 | 4,137,84,0,0,31.2,0.252,30,0 353 | 3,61,82,28,0,34.4,0.243,46,0 354 | 1,90,62,12,43,27.2,0.580,24,0 355 | 3,90,78,0,0,42.7,0.559,21,0 356 | 9,165,88,0,0,30.4,0.302,49,1 357 | 1,125,50,40,167,33.3,0.962,28,1 358 | 13,129,0,30,0,39.9,0.569,44,1 359 | 12,88,74,40,54,35.3,0.378,48,0 360 | 1,196,76,36,249,36.5,0.875,29,1 361 | 5,189,64,33,325,31.2,0.583,29,1 362 | 5,158,70,0,0,29.8,0.207,63,0 363 | 5,103,108,37,0,39.2,0.305,65,0 364 | 4,146,78,0,0,38.5,0.520,67,1 365 | 4,147,74,25,293,34.9,0.385,30,0 366 | 5,99,54,28,83,34.0,0.499,30,0 367 | 6,124,72,0,0,27.6,0.368,29,1 368 | 0,101,64,17,0,21.0,0.252,21,0 369 | 3,81,86,16,66,27.5,0.306,22,0 370 | 1,133,102,28,140,32.8,0.234,45,1 371 | 3,173,82,48,465,38.4,2.137,25,1 372 | 0,118,64,23,89,0.0,1.731,21,0 373 | 0,84,64,22,66,35.8,0.545,21,0 374 | 2,105,58,40,94,34.9,0.225,25,0 375 | 2,122,52,43,158,36.2,0.816,28,0 376 | 12,140,82,43,325,39.2,0.528,58,1 377 | 0,98,82,15,84,25.2,0.299,22,0 378 | 1,87,60,37,75,37.2,0.509,22,0 379 | 4,156,75,0,0,48.3,0.238,32,1 380 | 0,93,100,39,72,43.4,1.021,35,0 381 | 1,107,72,30,82,30.8,0.821,24,0 382 | 0,105,68,22,0,20.0,0.236,22,0 383 | 1,109,60,8,182,25.4,0.947,21,0 384 | 1,90,62,18,59,25.1,1.268,25,0 385 | 1,125,70,24,110,24.3,0.221,25,0 386 | 1,119,54,13,50,22.3,0.205,24,0 387 | 5,116,74,29,0,32.3,0.660,35,1 388 | 8,105,100,36,0,43.3,0.239,45,1 389 | 5,144,82,26,285,32.0,0.452,58,1 390 | 3,100,68,23,81,31.6,0.949,28,0 391 | 1,100,66,29,196,32.0,0.444,42,0 392 | 5,166,76,0,0,45.7,0.340,27,1 393 | 1,131,64,14,415,23.7,0.389,21,0 394 | 4,116,72,12,87,22.1,0.463,37,0 395 | 4,158,78,0,0,32.9,0.803,31,1 396 | 2,127,58,24,275,27.7,1.600,25,0 397 | 3,96,56,34,115,24.7,0.944,39,0 398 | 0,131,66,40,0,34.3,0.196,22,1 399 | 3,82,70,0,0,21.1,0.389,25,0 400 | 3,193,70,31,0,34.9,0.241,25,1 401 | 4,95,64,0,0,32.0,0.161,31,1 402 | 6,137,61,0,0,24.2,0.151,55,0 403 | 5,136,84,41,88,35.0,0.286,35,1 404 | 9,72,78,25,0,31.6,0.280,38,0 405 | 5,168,64,0,0,32.9,0.135,41,1 406 | 2,123,48,32,165,42.1,0.520,26,0 407 | 4,115,72,0,0,28.9,0.376,46,1 408 | 0,101,62,0,0,21.9,0.336,25,0 409 | 8,197,74,0,0,25.9,1.191,39,1 410 | 1,172,68,49,579,42.4,0.702,28,1 411 | 6,102,90,39,0,35.7,0.674,28,0 412 | 1,112,72,30,176,34.4,0.528,25,0 413 | 1,143,84,23,310,42.4,1.076,22,0 414 | 1,143,74,22,61,26.2,0.256,21,0 415 | 0,138,60,35,167,34.6,0.534,21,1 416 | 3,173,84,33,474,35.7,0.258,22,1 417 | 1,97,68,21,0,27.2,1.095,22,0 418 | 4,144,82,32,0,38.5,0.554,37,1 419 | 1,83,68,0,0,18.2,0.624,27,0 420 | 3,129,64,29,115,26.4,0.219,28,1 421 | 1,119,88,41,170,45.3,0.507,26,0 422 | 2,94,68,18,76,26.0,0.561,21,0 423 | 0,102,64,46,78,40.6,0.496,21,0 424 | 2,115,64,22,0,30.8,0.421,21,0 425 | 8,151,78,32,210,42.9,0.516,36,1 426 | 4,184,78,39,277,37.0,0.264,31,1 427 | 0,94,0,0,0,0.0,0.256,25,0 428 | 1,181,64,30,180,34.1,0.328,38,1 429 | 0,135,94,46,145,40.6,0.284,26,0 430 | 1,95,82,25,180,35.0,0.233,43,1 431 | 2,99,0,0,0,22.2,0.108,23,0 432 | 3,89,74,16,85,30.4,0.551,38,0 433 | 1,80,74,11,60,30.0,0.527,22,0 434 | 2,139,75,0,0,25.6,0.167,29,0 435 | 1,90,68,8,0,24.5,1.138,36,0 436 | 0,141,0,0,0,42.4,0.205,29,1 437 | 12,140,85,33,0,37.4,0.244,41,0 438 | 5,147,75,0,0,29.9,0.434,28,0 439 | 1,97,70,15,0,18.2,0.147,21,0 440 | 6,107,88,0,0,36.8,0.727,31,0 441 | 0,189,104,25,0,34.3,0.435,41,1 442 | 2,83,66,23,50,32.2,0.497,22,0 443 | 4,117,64,27,120,33.2,0.230,24,0 444 | 8,108,70,0,0,30.5,0.955,33,1 445 | 4,117,62,12,0,29.7,0.380,30,1 446 | 0,180,78,63,14,59.4,2.420,25,1 447 | 1,100,72,12,70,25.3,0.658,28,0 448 | 0,95,80,45,92,36.5,0.330,26,0 449 | 0,104,64,37,64,33.6,0.510,22,1 450 | 0,120,74,18,63,30.5,0.285,26,0 451 | 1,82,64,13,95,21.2,0.415,23,0 452 | 2,134,70,0,0,28.9,0.542,23,1 453 | 0,91,68,32,210,39.9,0.381,25,0 454 | 2,119,0,0,0,19.6,0.832,72,0 455 | 2,100,54,28,105,37.8,0.498,24,0 456 | 14,175,62,30,0,33.6,0.212,38,1 457 | 1,135,54,0,0,26.7,0.687,62,0 458 | 5,86,68,28,71,30.2,0.364,24,0 459 | 10,148,84,48,237,37.6,1.001,51,1 460 | 9,134,74,33,60,25.9,0.460,81,0 461 | 9,120,72,22,56,20.8,0.733,48,0 462 | 1,71,62,0,0,21.8,0.416,26,0 463 | 8,74,70,40,49,35.3,0.705,39,0 464 | 5,88,78,30,0,27.6,0.258,37,0 465 | 10,115,98,0,0,24.0,1.022,34,0 466 | 0,124,56,13,105,21.8,0.452,21,0 467 | 0,74,52,10,36,27.8,0.269,22,0 468 | 0,97,64,36,100,36.8,0.600,25,0 469 | 8,120,0,0,0,30.0,0.183,38,1 470 | 6,154,78,41,140,46.1,0.571,27,0 471 | 1,144,82,40,0,41.3,0.607,28,0 472 | 0,137,70,38,0,33.2,0.170,22,0 473 | 0,119,66,27,0,38.8,0.259,22,0 474 | 7,136,90,0,0,29.9,0.210,50,0 475 | 4,114,64,0,0,28.9,0.126,24,0 476 | 0,137,84,27,0,27.3,0.231,59,0 477 | 2,105,80,45,191,33.7,0.711,29,1 478 | 7,114,76,17,110,23.8,0.466,31,0 479 | 8,126,74,38,75,25.9,0.162,39,0 480 | 4,132,86,31,0,28.0,0.419,63,0 481 | 3,158,70,30,328,35.5,0.344,35,1 482 | 0,123,88,37,0,35.2,0.197,29,0 483 | 4,85,58,22,49,27.8,0.306,28,0 484 | 0,84,82,31,125,38.2,0.233,23,0 485 | 0,145,0,0,0,44.2,0.630,31,1 486 | 0,135,68,42,250,42.3,0.365,24,1 487 | 1,139,62,41,480,40.7,0.536,21,0 488 | 0,173,78,32,265,46.5,1.159,58,0 489 | 4,99,72,17,0,25.6,0.294,28,0 490 | 8,194,80,0,0,26.1,0.551,67,0 491 | 2,83,65,28,66,36.8,0.629,24,0 492 | 2,89,90,30,0,33.5,0.292,42,0 493 | 4,99,68,38,0,32.8,0.145,33,0 494 | 4,125,70,18,122,28.9,1.144,45,1 495 | 3,80,0,0,0,0.0,0.174,22,0 496 | 6,166,74,0,0,26.6,0.304,66,0 497 | 5,110,68,0,0,26.0,0.292,30,0 498 | 2,81,72,15,76,30.1,0.547,25,0 499 | 7,195,70,33,145,25.1,0.163,55,1 500 | 6,154,74,32,193,29.3,0.839,39,0 501 | 2,117,90,19,71,25.2,0.313,21,0 502 | 3,84,72,32,0,37.2,0.267,28,0 503 | 6,0,68,41,0,39.0,0.727,41,1 504 | 7,94,64,25,79,33.3,0.738,41,0 505 | 3,96,78,39,0,37.3,0.238,40,0 506 | 10,75,82,0,0,33.3,0.263,38,0 507 | 0,180,90,26,90,36.5,0.314,35,1 508 | 1,130,60,23,170,28.6,0.692,21,0 509 | 2,84,50,23,76,30.4,0.968,21,0 510 | 8,120,78,0,0,25.0,0.409,64,0 511 | 12,84,72,31,0,29.7,0.297,46,1 512 | 0,139,62,17,210,22.1,0.207,21,0 513 | 9,91,68,0,0,24.2,0.200,58,0 514 | 2,91,62,0,0,27.3,0.525,22,0 515 | 3,99,54,19,86,25.6,0.154,24,0 516 | 3,163,70,18,105,31.6,0.268,28,1 517 | 9,145,88,34,165,30.3,0.771,53,1 518 | 7,125,86,0,0,37.6,0.304,51,0 519 | 13,76,60,0,0,32.8,0.180,41,0 520 | 6,129,90,7,326,19.6,0.582,60,0 521 | 2,68,70,32,66,25.0,0.187,25,0 522 | 3,124,80,33,130,33.2,0.305,26,0 523 | 6,114,0,0,0,0.0,0.189,26,0 524 | 9,130,70,0,0,34.2,0.652,45,1 525 | 3,125,58,0,0,31.6,0.151,24,0 526 | 3,87,60,18,0,21.8,0.444,21,0 527 | 1,97,64,19,82,18.2,0.299,21,0 528 | 3,116,74,15,105,26.3,0.107,24,0 529 | 0,117,66,31,188,30.8,0.493,22,0 530 | 0,111,65,0,0,24.6,0.660,31,0 531 | 2,122,60,18,106,29.8,0.717,22,0 532 | 0,107,76,0,0,45.3,0.686,24,0 533 | 1,86,66,52,65,41.3,0.917,29,0 534 | 6,91,0,0,0,29.8,0.501,31,0 535 | 1,77,56,30,56,33.3,1.251,24,0 536 | 4,132,0,0,0,32.9,0.302,23,1 537 | 0,105,90,0,0,29.6,0.197,46,0 538 | 0,57,60,0,0,21.7,0.735,67,0 539 | 0,127,80,37,210,36.3,0.804,23,0 540 | 3,129,92,49,155,36.4,0.968,32,1 541 | 8,100,74,40,215,39.4,0.661,43,1 542 | 3,128,72,25,190,32.4,0.549,27,1 543 | 10,90,85,32,0,34.9,0.825,56,1 544 | 4,84,90,23,56,39.5,0.159,25,0 545 | 1,88,78,29,76,32.0,0.365,29,0 546 | 8,186,90,35,225,34.5,0.423,37,1 547 | 5,187,76,27,207,43.6,1.034,53,1 548 | 4,131,68,21,166,33.1,0.160,28,0 549 | 1,164,82,43,67,32.8,0.341,50,0 550 | 4,189,110,31,0,28.5,0.680,37,0 551 | 1,116,70,28,0,27.4,0.204,21,0 552 | 3,84,68,30,106,31.9,0.591,25,0 553 | 6,114,88,0,0,27.8,0.247,66,0 554 | 1,88,62,24,44,29.9,0.422,23,0 555 | 1,84,64,23,115,36.9,0.471,28,0 556 | 7,124,70,33,215,25.5,0.161,37,0 557 | 1,97,70,40,0,38.1,0.218,30,0 558 | 8,110,76,0,0,27.8,0.237,58,0 559 | 11,103,68,40,0,46.2,0.126,42,0 560 | 11,85,74,0,0,30.1,0.300,35,0 561 | 6,125,76,0,0,33.8,0.121,54,1 562 | 0,198,66,32,274,41.3,0.502,28,1 563 | 1,87,68,34,77,37.6,0.401,24,0 564 | 6,99,60,19,54,26.9,0.497,32,0 565 | 0,91,80,0,0,32.4,0.601,27,0 566 | 2,95,54,14,88,26.1,0.748,22,0 567 | 1,99,72,30,18,38.6,0.412,21,0 568 | 6,92,62,32,126,32.0,0.085,46,0 569 | 4,154,72,29,126,31.3,0.338,37,0 570 | 0,121,66,30,165,34.3,0.203,33,1 571 | 3,78,70,0,0,32.5,0.270,39,0 572 | 2,130,96,0,0,22.6,0.268,21,0 573 | 3,111,58,31,44,29.5,0.430,22,0 574 | 2,98,60,17,120,34.7,0.198,22,0 575 | 1,143,86,30,330,30.1,0.892,23,0 576 | 1,119,44,47,63,35.5,0.280,25,0 577 | 6,108,44,20,130,24.0,0.813,35,0 578 | 2,118,80,0,0,42.9,0.693,21,1 579 | 10,133,68,0,0,27.0,0.245,36,0 580 | 2,197,70,99,0,34.7,0.575,62,1 581 | 0,151,90,46,0,42.1,0.371,21,1 582 | 6,109,60,27,0,25.0,0.206,27,0 583 | 12,121,78,17,0,26.5,0.259,62,0 584 | 8,100,76,0,0,38.7,0.190,42,0 585 | 8,124,76,24,600,28.7,0.687,52,1 586 | 1,93,56,11,0,22.5,0.417,22,0 587 | 8,143,66,0,0,34.9,0.129,41,1 588 | 6,103,66,0,0,24.3,0.249,29,0 589 | 3,176,86,27,156,33.3,1.154,52,1 590 | 0,73,0,0,0,21.1,0.342,25,0 591 | 11,111,84,40,0,46.8,0.925,45,1 592 | 2,112,78,50,140,39.4,0.175,24,0 593 | 3,132,80,0,0,34.4,0.402,44,1 594 | 2,82,52,22,115,28.5,1.699,25,0 595 | 6,123,72,45,230,33.6,0.733,34,0 596 | 0,188,82,14,185,32.0,0.682,22,1 597 | 0,67,76,0,0,45.3,0.194,46,0 598 | 1,89,24,19,25,27.8,0.559,21,0 599 | 1,173,74,0,0,36.8,0.088,38,1 600 | 1,109,38,18,120,23.1,0.407,26,0 601 | 1,108,88,19,0,27.1,0.400,24,0 602 | 6,96,0,0,0,23.7,0.190,28,0 603 | 1,124,74,36,0,27.8,0.100,30,0 604 | 7,150,78,29,126,35.2,0.692,54,1 605 | 4,183,0,0,0,28.4,0.212,36,1 606 | 1,124,60,32,0,35.8,0.514,21,0 607 | 1,181,78,42,293,40.0,1.258,22,1 608 | 1,92,62,25,41,19.5,0.482,25,0 609 | 0,152,82,39,272,41.5,0.270,27,0 610 | 1,111,62,13,182,24.0,0.138,23,0 611 | 3,106,54,21,158,30.9,0.292,24,0 612 | 3,174,58,22,194,32.9,0.593,36,1 613 | 7,168,88,42,321,38.2,0.787,40,1 614 | 6,105,80,28,0,32.5,0.878,26,0 615 | 11,138,74,26,144,36.1,0.557,50,1 616 | 3,106,72,0,0,25.8,0.207,27,0 617 | 6,117,96,0,0,28.7,0.157,30,0 618 | 2,68,62,13,15,20.1,0.257,23,0 619 | 9,112,82,24,0,28.2,1.282,50,1 620 | 0,119,0,0,0,32.4,0.141,24,1 621 | 2,112,86,42,160,38.4,0.246,28,0 622 | 2,92,76,20,0,24.2,1.698,28,0 623 | 6,183,94,0,0,40.8,1.461,45,0 624 | 0,94,70,27,115,43.5,0.347,21,0 625 | 2,108,64,0,0,30.8,0.158,21,0 626 | 4,90,88,47,54,37.7,0.362,29,0 627 | 0,125,68,0,0,24.7,0.206,21,0 628 | 0,132,78,0,0,32.4,0.393,21,0 629 | 5,128,80,0,0,34.6,0.144,45,0 630 | 4,94,65,22,0,24.7,0.148,21,0 631 | 7,114,64,0,0,27.4,0.732,34,1 632 | 0,102,78,40,90,34.5,0.238,24,0 633 | 2,111,60,0,0,26.2,0.343,23,0 634 | 1,128,82,17,183,27.5,0.115,22,0 635 | 10,92,62,0,0,25.9,0.167,31,0 636 | 13,104,72,0,0,31.2,0.465,38,1 637 | 5,104,74,0,0,28.8,0.153,48,0 638 | 2,94,76,18,66,31.6,0.649,23,0 639 | 7,97,76,32,91,40.9,0.871,32,1 640 | 1,100,74,12,46,19.5,0.149,28,0 641 | 0,102,86,17,105,29.3,0.695,27,0 642 | 4,128,70,0,0,34.3,0.303,24,0 643 | 6,147,80,0,0,29.5,0.178,50,1 644 | 4,90,0,0,0,28.0,0.610,31,0 645 | 3,103,72,30,152,27.6,0.730,27,0 646 | 2,157,74,35,440,39.4,0.134,30,0 647 | 1,167,74,17,144,23.4,0.447,33,1 648 | 0,179,50,36,159,37.8,0.455,22,1 649 | 11,136,84,35,130,28.3,0.260,42,1 650 | 0,107,60,25,0,26.4,0.133,23,0 651 | 1,91,54,25,100,25.2,0.234,23,0 652 | 1,117,60,23,106,33.8,0.466,27,0 653 | 5,123,74,40,77,34.1,0.269,28,0 654 | 2,120,54,0,0,26.8,0.455,27,0 655 | 1,106,70,28,135,34.2,0.142,22,0 656 | 2,155,52,27,540,38.7,0.240,25,1 657 | 2,101,58,35,90,21.8,0.155,22,0 658 | 1,120,80,48,200,38.9,1.162,41,0 659 | 11,127,106,0,0,39.0,0.190,51,0 660 | 3,80,82,31,70,34.2,1.292,27,1 661 | 10,162,84,0,0,27.7,0.182,54,0 662 | 1,199,76,43,0,42.9,1.394,22,1 663 | 8,167,106,46,231,37.6,0.165,43,1 664 | 9,145,80,46,130,37.9,0.637,40,1 665 | 6,115,60,39,0,33.7,0.245,40,1 666 | 1,112,80,45,132,34.8,0.217,24,0 667 | 4,145,82,18,0,32.5,0.235,70,1 668 | 10,111,70,27,0,27.5,0.141,40,1 669 | 6,98,58,33,190,34.0,0.430,43,0 670 | 9,154,78,30,100,30.9,0.164,45,0 671 | 6,165,68,26,168,33.6,0.631,49,0 672 | 1,99,58,10,0,25.4,0.551,21,0 673 | 10,68,106,23,49,35.5,0.285,47,0 674 | 3,123,100,35,240,57.3,0.880,22,0 675 | 8,91,82,0,0,35.6,0.587,68,0 676 | 6,195,70,0,0,30.9,0.328,31,1 677 | 9,156,86,0,0,24.8,0.230,53,1 678 | 0,93,60,0,0,35.3,0.263,25,0 679 | 3,121,52,0,0,36.0,0.127,25,1 680 | 2,101,58,17,265,24.2,0.614,23,0 681 | 2,56,56,28,45,24.2,0.332,22,0 682 | 0,162,76,36,0,49.6,0.364,26,1 683 | 0,95,64,39,105,44.6,0.366,22,0 684 | 4,125,80,0,0,32.3,0.536,27,1 685 | 5,136,82,0,0,0.0,0.640,69,0 686 | 2,129,74,26,205,33.2,0.591,25,0 687 | 3,130,64,0,0,23.1,0.314,22,0 688 | 1,107,50,19,0,28.3,0.181,29,0 689 | 1,140,74,26,180,24.1,0.828,23,0 690 | 1,144,82,46,180,46.1,0.335,46,1 691 | 8,107,80,0,0,24.6,0.856,34,0 692 | 13,158,114,0,0,42.3,0.257,44,1 693 | 2,121,70,32,95,39.1,0.886,23,0 694 | 7,129,68,49,125,38.5,0.439,43,1 695 | 2,90,60,0,0,23.5,0.191,25,0 696 | 7,142,90,24,480,30.4,0.128,43,1 697 | 3,169,74,19,125,29.9,0.268,31,1 698 | 0,99,0,0,0,25.0,0.253,22,0 699 | 4,127,88,11,155,34.5,0.598,28,0 700 | 4,118,70,0,0,44.5,0.904,26,0 701 | 2,122,76,27,200,35.9,0.483,26,0 702 | 6,125,78,31,0,27.6,0.565,49,1 703 | 1,168,88,29,0,35.0,0.905,52,1 704 | 2,129,0,0,0,38.5,0.304,41,0 705 | 4,110,76,20,100,28.4,0.118,27,0 706 | 6,80,80,36,0,39.8,0.177,28,0 707 | 10,115,0,0,0,0.0,0.261,30,1 708 | 2,127,46,21,335,34.4,0.176,22,0 709 | 9,164,78,0,0,32.8,0.148,45,1 710 | 2,93,64,32,160,38.0,0.674,23,1 711 | 3,158,64,13,387,31.2,0.295,24,0 712 | 5,126,78,27,22,29.6,0.439,40,0 713 | 10,129,62,36,0,41.2,0.441,38,1 714 | 0,134,58,20,291,26.4,0.352,21,0 715 | 3,102,74,0,0,29.5,0.121,32,0 716 | 7,187,50,33,392,33.9,0.826,34,1 717 | 3,173,78,39,185,33.8,0.970,31,1 718 | 10,94,72,18,0,23.1,0.595,56,0 719 | 1,108,60,46,178,35.5,0.415,24,0 720 | 5,97,76,27,0,35.6,0.378,52,1 721 | 4,83,86,19,0,29.3,0.317,34,0 722 | 1,114,66,36,200,38.1,0.289,21,0 723 | 1,149,68,29,127,29.3,0.349,42,1 724 | 5,117,86,30,105,39.1,0.251,42,0 725 | 1,111,94,0,0,32.8,0.265,45,0 726 | 4,112,78,40,0,39.4,0.236,38,0 727 | 1,116,78,29,180,36.1,0.496,25,0 728 | 0,141,84,26,0,32.4,0.433,22,0 729 | 2,175,88,0,0,22.9,0.326,22,0 730 | 2,92,52,0,0,30.1,0.141,22,0 731 | 3,130,78,23,79,28.4,0.323,34,1 732 | 8,120,86,0,0,28.4,0.259,22,1 733 | 2,174,88,37,120,44.5,0.646,24,1 734 | 2,106,56,27,165,29.0,0.426,22,0 735 | 2,105,75,0,0,23.3,0.560,53,0 736 | 4,95,60,32,0,35.4,0.284,28,0 737 | 0,126,86,27,120,27.4,0.515,21,0 738 | 8,65,72,23,0,32.0,0.600,42,0 739 | 2,99,60,17,160,36.6,0.453,21,0 740 | 1,102,74,0,0,39.5,0.293,42,1 741 | 11,120,80,37,150,42.3,0.785,48,1 742 | 3,102,44,20,94,30.8,0.400,26,0 743 | 1,109,58,18,116,28.5,0.219,22,0 744 | 9,140,94,0,0,32.7,0.734,45,1 745 | 13,153,88,37,140,40.6,1.174,39,0 746 | 12,100,84,33,105,30.0,0.488,46,0 747 | 1,147,94,41,0,49.3,0.358,27,1 748 | 1,81,74,41,57,46.3,1.096,32,0 749 | 3,187,70,22,200,36.4,0.408,36,1 750 | 6,162,62,0,0,24.3,0.178,50,1 751 | 4,136,70,0,0,31.2,1.182,22,1 752 | 1,121,78,39,74,39.0,0.261,28,0 753 | 3,108,62,24,0,26.0,0.223,25,0 754 | 0,181,88,44,510,43.3,0.222,26,1 755 | 8,154,78,32,0,32.4,0.443,45,1 756 | 1,128,88,39,110,36.5,1.057,37,1 757 | 7,137,90,41,0,32.0,0.391,39,0 758 | 0,123,72,0,0,36.3,0.258,52,1 759 | 1,106,76,0,0,37.5,0.197,26,0 760 | 6,190,92,0,0,35.5,0.278,66,1 761 | 2,88,58,26,16,28.4,0.766,22,0 762 | 9,170,74,31,0,44.0,0.403,43,1 763 | 9,89,62,0,0,22.5,0.142,33,0 764 | 10,101,76,48,180,32.9,0.171,63,0 765 | 2,122,70,27,0,36.8,0.340,27,0 766 | 5,121,72,23,112,26.2,0.245,30,0 767 | 1,126,60,0,0,30.1,0.349,47,1 768 | 1,93,70,31,0,30.4,0.315,23,0 -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ML_Models -------------------------------------------------------------------------------- /logistic_regression.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "nbformat": 4, 3 | "nbformat_minor": 0, 4 | "metadata": { 5 | "colab": { 6 | "name": "logistic_regression.ipynb", 7 | "provenance": [], 8 | "collapsed_sections": [] 9 | }, 10 | "kernelspec": { 11 | "name": "python3", 12 | "display_name": "Python 3" 13 | } 14 | }, 15 | "cells": [ 16 | { 17 | "cell_type": "markdown", 18 | "metadata": { 19 | "id": "XaoTTFUduK8L", 20 | "colab_type": "text" 21 | }, 22 | "source": [ 23 | "[![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/drive/1kqG3UhIcnscMC3sm4hdEHFmIMpmZWEEG?usp=sharing)" 24 | ] 25 | }, 26 | { 27 | "cell_type": "markdown", 28 | "metadata": { 29 | "id": "laB2JbdN0cvV", 30 | "colab_type": "text" 31 | }, 32 | "source": [ 33 | "# Introducing Logistic Regression\n", 34 | "Logistic Regression is a classification algorithm. It is used to predict a binary outcome (1 / 0, Yes / No, True / False) given a set of independent variable/s. You can also think of logistic regression as a special case of linear regression when the outcome variable is categorical. Logistic Regression is a supervised machine learning algorithm/model.\n", 35 | "\n", 36 | "## Agenda\n", 37 | "* About Dataset\n", 38 | "* Loading Libraries\n", 39 | "* Loading Data\n", 40 | "* Understanding Data\n", 41 | "* Separating Input/Independent and Output/Dependent Variables\n", 42 | "* Splitting the data\n", 43 | "* Building Model\n", 44 | "* Prediction\n", 45 | "* Model Performance\n", 46 | "\n", 47 | "## About Dataset\n", 48 | "The dataset has two columns - age (age of the person/customer) and bought_insurance (whether the customer bought insurance or not). If bought_insurance = 1, the customer bought insurance and if bought_insurance = 0, the customer did not buy the insurance.\n", 49 | "\n", 50 | "Dataset Link: [insurance_data](https://raw.githubusercontent.com/codebasics/py/master/ML/7_logistic_reg/insurance_data.csv)" 51 | ] 52 | }, 53 | { 54 | "cell_type": "markdown", 55 | "metadata": { 56 | "id": "MVGxkd1K3xrW", 57 | "colab_type": "text" 58 | }, 59 | "source": [ 60 | "## Loading Libraries\n", 61 | "All Python capabilities are not loaded to our working environment by default (even they are already installed in your system). So, we import each and every library that we want to use.\n", 62 | "\n", 63 | "In data science, numpy and pandas are most commonly used libraries. Numpy is required for calculations like means, medians, square roots, etc. Pandas is used for data processin and data frames. We chose alias names for our libraries for the sake of our convenience (numpy --> np and pandas --> pd).\n", 64 | "\n", 65 | "**We can import all the libraries that we think might be needed or can import as we go along.**" 66 | ] 67 | }, 68 | { 69 | "cell_type": "code", 70 | "metadata": { 71 | "id": "8Zo1haly0Rap", 72 | "colab_type": "code", 73 | "colab": {} 74 | }, 75 | "source": [ 76 | "import pandas as pd\n", 77 | "import matplotlib.pyplot as plt" 78 | ], 79 | "execution_count": null, 80 | "outputs": [] 81 | }, 82 | { 83 | "cell_type": "markdown", 84 | "metadata": { 85 | "id": "Zp46CaAG4-79", 86 | "colab_type": "text" 87 | }, 88 | "source": [ 89 | "## Loading Data\n", 90 | "Pandas module is used for reading files. We have our data in '.csv' format. We will use 'read_csv()' function for loading the data." 91 | ] 92 | }, 93 | { 94 | "cell_type": "code", 95 | "metadata": { 96 | "id": "cDs7FZ-Y4bMN", 97 | "colab_type": "code", 98 | "colab": {} 99 | }, 100 | "source": [ 101 | "# In read_csv() function, we have passed the raw data link at github\n", 102 | "data_location = \"https://raw.githubusercontent.com/codebasics/py/master/ML/7_logistic_reg/insurance_data.csv\"\n", 103 | "data = pd.read_csv(data_location)" 104 | ], 105 | "execution_count": null, 106 | "outputs": [] 107 | }, 108 | { 109 | "cell_type": "markdown", 110 | "metadata": { 111 | "id": "sQgP0WXi5pl0", 112 | "colab_type": "text" 113 | }, 114 | "source": [ 115 | "## Understanding Data\n", 116 | "Let's check how our data looks. This can be done using head() method." 117 | ] 118 | }, 119 | { 120 | "cell_type": "code", 121 | "metadata": { 122 | "id": "-gwOLzhX5njJ", 123 | "colab_type": "code", 124 | "colab": { 125 | "base_uri": "https://localhost:8080/", 126 | "height": 204 127 | }, 128 | "outputId": "fc58f689-88a2-4a50-91e3-67817ade0aa9" 129 | }, 130 | "source": [ 131 | "data.head()" 132 | ], 133 | "execution_count": null, 134 | "outputs": [ 135 | { 136 | "output_type": "execute_result", 137 | "data": { 138 | "text/html": [ 139 | "
\n", 140 | "\n", 153 | "\n", 154 | " \n", 155 | " \n", 156 | " \n", 157 | " \n", 158 | " \n", 159 | " \n", 160 | " \n", 161 | " \n", 162 | " \n", 163 | " \n", 164 | " \n", 165 | " \n", 166 | " \n", 167 | " \n", 168 | " \n", 169 | " \n", 170 | " \n", 171 | " \n", 172 | " \n", 173 | " \n", 174 | " \n", 175 | " \n", 176 | " \n", 177 | " \n", 178 | " \n", 179 | " \n", 180 | " \n", 181 | " \n", 182 | " \n", 183 | " \n", 184 | " \n", 185 | " \n", 186 | " \n", 187 | " \n", 188 | "
agebought_insurance
0220
1250
2471
3520
4461
\n", 189 | "
" 190 | ], 191 | "text/plain": [ 192 | " age bought_insurance\n", 193 | "0 22 0\n", 194 | "1 25 0\n", 195 | "2 47 1\n", 196 | "3 52 0\n", 197 | "4 46 1" 198 | ] 199 | }, 200 | "metadata": { 201 | "tags": [] 202 | }, 203 | "execution_count": 3 204 | } 205 | ] 206 | }, 207 | { 208 | "cell_type": "markdown", 209 | "metadata": { 210 | "id": "rA4wZUbx51KM", 211 | "colab_type": "text" 212 | }, 213 | "source": [ 214 | "There are two columns:\n", 215 | "\n", 216 | "* age: The age of the customer\n", 217 | "* bought_insurance: If the customer bought insurance (1) or not (0). This is our target variable which we are interested to know.\n", 218 | "\n", 219 | "Since our target variable has only two different classes/values, we can say it as a binary classification problem. And Logistic Regression is used for binary classification problems." 220 | ] 221 | }, 222 | { 223 | "cell_type": "markdown", 224 | "metadata": { 225 | "id": "qEYG8who6aXb", 226 | "colab_type": "text" 227 | }, 228 | "source": [ 229 | "Looking the relationship between age and bought_insurance using scatter plot." 230 | ] 231 | }, 232 | { 233 | "cell_type": "code", 234 | "metadata": { 235 | "id": "UOyg4-0K5xDv", 236 | "colab_type": "code", 237 | "colab": { 238 | "base_uri": "https://localhost:8080/", 239 | "height": 282 240 | }, 241 | "outputId": "2c5c7a82-ee96-45c6-85dc-f8d154c84580" 242 | }, 243 | "source": [ 244 | "plt.scatter(data.age,data.bought_insurance,marker='+',color='red')" 245 | ], 246 | "execution_count": null, 247 | "outputs": [ 248 | { 249 | "output_type": "execute_result", 250 | "data": { 251 | "text/plain": [ 252 | "" 253 | ] 254 | }, 255 | "metadata": { 256 | "tags": [] 257 | }, 258 | "execution_count": 4 259 | }, 260 | { 261 | "output_type": "display_data", 262 | "data": { 263 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD4CAYAAAD8Zh1EAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAOoUlEQVR4nO3df6zdd13H8eeLlokCUqFXsqyFzljEBmFbbgoEopUfppukTRSXVWeQII0JNRjxx1AzccZEJAEhTnQgokSYdYo2s1rJ3OaPuLlbx6+2Fi9l2FvBXWCbiQRm9e0f51s4uz295/T23J67z3k+kpt7Pj/u+b7vp+e87iffc06/qSokSY9/T5h0AZKk8TDQJakRBrokNcJAl6RGGOiS1Ij1kzrwxo0ba8uWLZM6vCQ9Lh0+fPgLVTUzaGxigb5lyxbm5uYmdXhJelxK8tlzjXnKRZIaYaBLUiMMdElqhIEuSY0w0CWpEUMDPcn7kjyY5JPnGE+SdyWZT/LxJFeNv8zG7NjR+9Las5b+bZarZaVjq3G8cVtLv/dKf25CaznKDv39wM5lxq8GtnZfe4F3X3hZkqTzNfR96FX1d0m2LDNlN/CH1ft/eO9JsiHJpVX1uTHV2I4zf5Xvvvux7bvumkAxeoy19G+zXC0rHVuN443bWvq911Kd52Ec59AvA072tRe6vrMk2ZtkLsnc4uLiGA4tSTojo1zgotuh315Vzxswdjvw61X1D137DuDnq2rZj4HOzs7W1H5S1J352rWW/m2Wq2WlY6txvHFbS7/3Sn9uFdcyyeGqmh00No4d+ilgc197U9cnSbqIxrFD/35gH3AN8ELgXVW1fdh9TvUOXZJWaLkd+tAXRZN8CNgBbEyyAPwy8ESAqvod4CC9MJ8Hvgy8djxlS5LOxyjvctkzZLyAN4ytIknSivhJUUlqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGjFSoCfZmeR4kvkkNwwYf1aSO5Pcn+TjSa4Zf6mSpOUMDfQk64CbgauBbcCeJNuWTPslYH9VXQlcB/z2uAuVJC1vlB36dmC+qk5U1aPArcDuJXMK+Obu9tOA/xhfiZKkUYwS6JcBJ/vaC11fv7cA1ydZAA4CPznojpLsTTKXZG5xcXEF5UqSzmVcL4ruAd5fVZuAa4APJDnrvqvqlqqararZmZmZMR1akgSjBfopYHNfe1PX1+91wH6Aqvon4EnAxnEUKEkazSiBfh+wNcnlSS6h96LngSVz/h14OUCS76QX6J5TkaSLaGigV9VpYB9wCDhG790sR5LclGRXN+1NwOuTfAz4EPBjVVWrVbQk6WzrR5lUVQfpvdjZ33dj3+2jwEvGW5ok6Xz4SVFJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUiJECPcnOJMeTzCe54Rxzrk1yNMmRJB8cb5mSpGHWD5uQZB1wM/BKYAG4L8mBqjraN2cr8GbgJVX1UJJvXa2CJUmDjbJD3w7MV9WJqnoUuBXYvWTO64Gbq+ohgKp6cLxlSpKGGSXQLwNO9rUXur5+zwGek+Qfk9yTZOegO0qyN8lckrnFxcWVVSxJGmhcL4quB7YCO4A9wHuSbFg6qapuqarZqpqdmZkZ06ElSTBaoJ8CNve1N3V9/RaAA1X1P1X1GeBT9AJeknSRjBLo9wFbk1ye5BLgOuDAkjl/Tm93TpKN9E7BnBhjnZKkIYYGelWdBvYBh4BjwP6qOpLkpiS7ummHgC8mOQrcCfxsVX1xtYqWJJ0tVTWRA8/Oztbc3NxEji1Jj1dJDlfV7KAxPykqSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjRgr0JDuTHE8yn+SGZeb9YJJKMju+EiVJoxga6EnWATcDVwPbgD1Jtg2Y91TgjcC94y5SkjTcKDv07cB8VZ2oqkeBW4HdA+b9KvBW4CtjrE+SNKJRAv0y4GRfe6Hr+5okVwGbq+ovl7ujJHuTzCWZW1xcPO9iJUnndsEviiZ5AvB24E3D5lbVLVU1W1WzMzMzF3poSVKfUQL9FLC5r72p6zvjqcDzgLuSPAC8CDjgC6OSdHGNEuj3AVuTXJ7kEuA64MCZwap6pKo2VtWWqtoC3APsqqq5ValYkjTQ0ECvqtPAPuAQcAzYX1VHktyUZNdqFyhJGs36USZV1UHg4JK+G88xd8eFlyVJOl9+UlSSGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1YqRAT7IzyfEk80luGDD+00mOJvl4kjuSPHv8pUqSljM00JOsA24Grga2AXuSbFsy7X5gtqqeD9wG/Ma4C5UkLW+UHfp2YL6qTlTVo8CtwO7+CVV1Z1V9uWveA2wab5mSpGFGCfTLgJN97YWu71xeB/zVoIEke5PMJZlbXFwcvUpJ0lBjfVE0yfXALPC2QeNVdUtVzVbV7MzMzDgPLUlTb/0Ic04Bm/vam7q+x0jyCuAXge+pqq+OpzxJ0qhG2aHfB2xNcnmSS4DrgAP9E5JcCfwusKuqHhx/mZKkYYYGelWdBvYBh4BjwP6qOpLkpiS7umlvA54C/EmSjyY5cI67kyStklFOuVBVB4GDS/pu7Lv9ijHXJUk6T35SVJIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRhjoktQIA12SGmGgS1IjDHRJaoSBLkmNMNAlqREGuiQ1wkCXpEYY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakRBrokNcJAl6RGGOiS1AgDXZIaYaBLUiMMdElqhIEuSY0w0CWpEQa6JDXCQJekRowU6El2JjmeZD7JDQPGvyHJH3fj9ybZMu5Cv2bDht7XIDt29L7O13I/t9Kxi3m81ahRulCr8djz8bysoYGeZB1wM3A1sA3Yk2TbkmmvAx6qqm8H3gG8ddyFSpKWt36EOduB+ao6AZDkVmA3cLRvzm7gLd3t24DfSpKqqrFVemZX/sgjj20//PDX/2LffXfv+5n2XXctf5/L/dxKxy7m8c4YZ43ShVqNx56P55GMcsrlMuBkX3uh6xs4p6pOA48Az1h6R0n2JplLMre4uLiyiiVJA2XYJjrJq4GdVfXjXftHgRdW1b6+OZ/s5ix07U93c75wrvudnZ2tubm586+4f2e+1Er/ai/3cysdu5jHW40apQu1Go89H88kOVxVs4PGRtmhnwI297U3dX0D5yRZDzwN+OL5lypJWqlRdujrgU8BL6cX3PcBP1xVR/rmvAH4rqr6iSTXAT9QVdcud78r3qFL0hRbboc+9EXRqjqdZB9wCFgHvK+qjiS5CZirqgPA7wEfSDIPfAm4bnzlS5JGMcq7XKiqg8DBJX039t3+CvBD4y1NknQ+/KSoJDXCQJekRhjoktQIA12SGjH0bYurduBkEfjsRT7sRuCcH3aaUq7J2VyTwVyXs01iTZ5dVTODBiYW6JOQZO5c79+cVq7J2VyTwVyXs621NfGUiyQ1wkCXpEZMW6DfMukC1iDX5GyuyWCuy9nW1JpM1Tl0SWrZtO3QJalZBrokNaLZQE+yOcmdSY4mOZLkjV3/05N8JMm/dd+/ZdK1XixJnpTkn5N8rFuTX+n6L+8u7j3fXez7kknXerElWZfk/iS3d+2pXpMkDyT5RJKPJpnr+qb2uQOQZEOS25L8a5JjSV681tak2UAHTgNvqqptwIuAN3QXt74BuKOqtgJ3dO1p8VXgZVX1AuAKYGeSF9G7qPc7uot8P0Tvot/T5o3Asb62awLfW1VX9L3PepqfOwDvBP66qp4LvIDe42VtrUlVTcUX8BfAK4HjwKVd36XA8UnXNqH1+CbgX4AX0vuk2/qu/8XAoUnXd5HXYhO9J+PLgNuBuCY8AGxc0je1zx16V2H7DN0bSdbqmrS8Q/+aJFuAK4F7gWdW1ee6oc8Dz5xQWRPRnVr4KPAg8BHg08DD1bu4Nwy+CHjrfhP4OeD/uvYzcE0K+Jskh5Ps7fqm+blzObAI/H53au69SZ7MGluT5gM9yVOAPwV+qqr+q3+sen9Wp+p9m1X1v1V1Bb1d6XbguRMuaaKSvAp4sKoOT7qWNealVXUVcDW905Xf3T84hc+d9cBVwLur6krgv1lyemUtrEnTgZ7kifTC/I+q6s+67v9Mcmk3fim9nerUqaqHgTvpnU7Y0F07FgZfBLxlLwF2JXkAuJXeaZd3Mt1rQlWd6r4/CHyY3h//aX7uLAALVXVv176NXsCvqTVpNtCThN61To9V1dv7hg4Ar+luv4beufWpkGQmyYbu9jfSe03hGL1gf3U3barWpKreXFWbqmoLvWvh/m1V/QhTvCZJnpzkqWduA98HfJIpfu5U1eeBk0m+o+t6OXCUNbYmzX5SNMlLgb8HPsHXz43+Ar3z6PuBZ9H773uvraovTaTIiyzJ84E/oHex7ycA+6vqpiTfRm93+nTgfuD6qvrq5CqdjCQ7gJ+pqldN85p0v/uHu+Z64INV9WtJnsGUPncAklwBvBe4BDgBvJbuecQaWZNmA12Spk2zp1wkadoY6JLUCANdkhphoEtSIwx0SWqEgS5JjTDQJakR/w+oGVyw4/IRsgAAAABJRU5ErkJggg==\n", 264 | "text/plain": [ 265 | "
" 266 | ] 267 | }, 268 | "metadata": { 269 | "tags": [], 270 | "needs_background": "light" 271 | } 272 | } 273 | ] 274 | }, 275 | { 276 | "cell_type": "markdown", 277 | "metadata": { 278 | "id": "m003O7uL6xc8", 279 | "colab_type": "text" 280 | }, 281 | "source": [ 282 | "We can easily observe from the scatter plot that generally the customer who is of age less than 30 years has not bought the insurance." 283 | ] 284 | }, 285 | { 286 | "cell_type": "markdown", 287 | "metadata": { 288 | "id": "uWzLw-d07EQs", 289 | "colab_type": "text" 290 | }, 291 | "source": [ 292 | "## Separating Input and Output Variables\n", 293 | "Before building any machine learning model, we always separate the input variables and output variables. Input variables are those quantities whose values are changed naturally in an experiment, whereas output variable is the one whose values are dependent on the input variables. So, input variables are also known as independent variables as its values are not dependent on any other quantity, and output variable/s are also known as dependent variables as its values are dependent on other variable i.e. input variables. Like here in this data, we can see that whether a person will buy insurance or not is dependent on the age of that person\n", 294 | "\n", 295 | "By convention input variables are represented with 'X' and output variables are represented with 'y'." 296 | ] 297 | }, 298 | { 299 | "cell_type": "code", 300 | "metadata": { 301 | "id": "BslsxtTY6sd3", 302 | "colab_type": "code", 303 | "colab": {} 304 | }, 305 | "source": [ 306 | "X = data[['age']] # input variable\n", 307 | "\n", 308 | "y = data['bought_insurance'] # output variable" 309 | ], 310 | "execution_count": null, 311 | "outputs": [] 312 | }, 313 | { 314 | "cell_type": "markdown", 315 | "metadata": { 316 | "id": "UsgEP0tH-ArH", 317 | "colab_type": "text" 318 | }, 319 | "source": [ 320 | "If you notice the above code cell, I have used two square brackets while taking input variables and only one square bracket while taking output variable. Why?\n", 321 | "\n", 322 | "All machine learning algorithm accepts input variables as a 2D array and output variable as 1D array. Using two square brackets while selecting the input variables gives you the shape of input variable/s as 2D, but if you use only one square bracket, the shape will be 1D as you can see in the case of y.\n", 323 | "\n", 324 | "Let's check the shapes of X and y." 325 | ] 326 | }, 327 | { 328 | "cell_type": "code", 329 | "metadata": { 330 | "id": "UnfWV_JW9_Br", 331 | "colab_type": "code", 332 | "colab": { 333 | "base_uri": "https://localhost:8080/", 334 | "height": 51 335 | }, 336 | "outputId": "2f58a923-c447-4195-ed57-8ea3c40ec8d6" 337 | }, 338 | "source": [ 339 | "print(\"Shape: \", X.shape, \"Dimension: \", X.ndim)\n", 340 | "print(\"Shape: \", y.shape, \"Dimension: \", y.ndim)" 341 | ], 342 | "execution_count": null, 343 | "outputs": [ 344 | { 345 | "output_type": "stream", 346 | "text": [ 347 | "Shape: (27, 1) Dimension: 2\n", 348 | "Shape: (27,) Dimension: 1\n" 349 | ], 350 | "name": "stdout" 351 | } 352 | ] 353 | }, 354 | { 355 | "cell_type": "markdown", 356 | "metadata": { 357 | "id": "9J3XXD4A_K61", 358 | "colab_type": "text" 359 | }, 360 | "source": [ 361 | "## Splitting the data into Train and Test Set\n", 362 | "We want to check the performance of the model that we built. For this purpose, we always split (both input and output data) the given data into training set which will be used to train the model, and test set which will be used to check how accurately the model is predicting outcomes.\n", 363 | "\n", 364 | "For this purpose we have a class called 'train_test_split' in the 'sklearn.model_selection' module." 365 | ] 366 | }, 367 | { 368 | "cell_type": "code", 369 | "metadata": { 370 | "id": "XQ1snUZP-4_e", 371 | "colab_type": "code", 372 | "colab": {} 373 | }, 374 | "source": [ 375 | "# import train_test_split\n", 376 | "from sklearn.model_selection import train_test_split" 377 | ], 378 | "execution_count": null, 379 | "outputs": [] 380 | }, 381 | { 382 | "cell_type": "code", 383 | "metadata": { 384 | "id": "ev1mslygAtL5", 385 | "colab_type": "code", 386 | "colab": {} 387 | }, 388 | "source": [ 389 | "# split the data\n", 390 | "X_train, X_test, y_train, y_test = train_test_split(X,y,test_size=0.3, random_state = 42)\n", 391 | "\n", 392 | "# X_train: independent/input feature data for training the model\n", 393 | "# y_train: dependent/output feature data for training the model\n", 394 | "# X_test: independent/input feature data for testing the model; will be used to predict the output values\n", 395 | "# y_test: original dependent/output values of X_test; We will compare this values with our predicted values to check the performance of our built model.\n", 396 | " \n", 397 | "# test_size = 0.30: 30% of the data will go for test set and 70% of the data will go for train set\n", 398 | "# random_state = 42: this will fix the split i.e. there will be same split for each time you run the code" 399 | ], 400 | "execution_count": null, 401 | "outputs": [] 402 | }, 403 | { 404 | "cell_type": "markdown", 405 | "metadata": { 406 | "id": "J9tTegE3Bn0D", 407 | "colab_type": "text" 408 | }, 409 | "source": [ 410 | "## Building Model\n" 411 | ] 412 | }, 413 | { 414 | "cell_type": "markdown", 415 | "metadata": { 416 | "id": "OiJazgZFjGQu", 417 | "colab_type": "text" 418 | }, 419 | "source": [ 420 | "Now we are finally ready, and we can train the model.\n", 421 | "\n", 422 | "First, we need to import our model - Logistic Regression (again, using the sklearn library).\n", 423 | "\n", 424 | "Then we would feed the model both with the data (X_train) and the answers for that data (y_train)" 425 | ] 426 | }, 427 | { 428 | "cell_type": "code", 429 | "metadata": { 430 | "id": "nZHXi5CfBlns", 431 | "colab_type": "code", 432 | "colab": {} 433 | }, 434 | "source": [ 435 | "# import Logistic Regression from sklearn.linear_model\n", 436 | "from sklearn.linear_model import LogisticRegression" 437 | ], 438 | "execution_count": null, 439 | "outputs": [] 440 | }, 441 | { 442 | "cell_type": "code", 443 | "metadata": { 444 | "id": "7FCfCfawjDyK", 445 | "colab_type": "code", 446 | "colab": {} 447 | }, 448 | "source": [ 449 | "log_model = LogisticRegression()" 450 | ], 451 | "execution_count": null, 452 | "outputs": [] 453 | }, 454 | { 455 | "cell_type": "code", 456 | "metadata": { 457 | "id": "caHe1oKSjuq-", 458 | "colab_type": "code", 459 | "colab": { 460 | "base_uri": "https://localhost:8080/", 461 | "height": 102 462 | }, 463 | "outputId": "2cbc400c-39f4-4997-95da-db909797678a" 464 | }, 465 | "source": [ 466 | "# Fit the model\n", 467 | "log_model.fit(X_train, y_train)" 468 | ], 469 | "execution_count": null, 470 | "outputs": [ 471 | { 472 | "output_type": "execute_result", 473 | "data": { 474 | "text/plain": [ 475 | "LogisticRegression(C=1.0, class_weight=None, dual=False, fit_intercept=True,\n", 476 | " intercept_scaling=1, l1_ratio=None, max_iter=100,\n", 477 | " multi_class='auto', n_jobs=None, penalty='l2',\n", 478 | " random_state=None, solver='lbfgs', tol=0.0001, verbose=0,\n", 479 | " warm_start=False)" 480 | ] 481 | }, 482 | "metadata": { 483 | "tags": [] 484 | }, 485 | "execution_count": 11 486 | } 487 | ] 488 | }, 489 | { 490 | "cell_type": "markdown", 491 | "metadata": { 492 | "id": "pkxVjh8wkB91", 493 | "colab_type": "text" 494 | }, 495 | "source": [ 496 | "The training happens in the third line (the \"fit\" function)." 497 | ] 498 | }, 499 | { 500 | "cell_type": "markdown", 501 | "metadata": { 502 | "id": "UON4736ekMZw", 503 | "colab_type": "text" 504 | }, 505 | "source": [ 506 | "## Prediction\n", 507 | "Now logistic regression model (i.e. log_model) is trained using X_train and y_trian data. Let's predict the target value (i.e. bought_insurance) for the X_test data. We use \"predict()\" method for prediction." 508 | ] 509 | }, 510 | { 511 | "cell_type": "code", 512 | "metadata": { 513 | "id": "4Fy1LxR_kA-f", 514 | "colab_type": "code", 515 | "colab": {} 516 | }, 517 | "source": [ 518 | "predictions = log_model.predict(X_test)" 519 | ], 520 | "execution_count": null, 521 | "outputs": [] 522 | }, 523 | { 524 | "cell_type": "markdown", 525 | "metadata": { 526 | "id": "eiA_FMiwkxN4", 527 | "colab_type": "text" 528 | }, 529 | "source": [ 530 | "We already have actual target values (i.e. y_test) for X_test. Let's compare y_test and the predicted value for X_test by our log_model." 531 | ] 532 | }, 533 | { 534 | "cell_type": "code", 535 | "metadata": { 536 | "id": "3kSld42hkvcc", 537 | "colab_type": "code", 538 | "colab": { 539 | "base_uri": "https://localhost:8080/", 540 | "height": 34 541 | }, 542 | "outputId": "bc3348a3-0ad5-46ee-ba64-77f8fd29ffcf" 543 | }, 544 | "source": [ 545 | "y_test.values" 546 | ], 547 | "execution_count": null, 548 | "outputs": [ 549 | { 550 | "output_type": "execute_result", 551 | "data": { 552 | "text/plain": [ 553 | "array([1, 0, 1, 0, 0, 0, 1, 1, 0])" 554 | ] 555 | }, 556 | "metadata": { 557 | "tags": [] 558 | }, 559 | "execution_count": 15 560 | } 561 | ] 562 | }, 563 | { 564 | "cell_type": "code", 565 | "metadata": { 566 | "id": "Csji5Qu9lJW7", 567 | "colab_type": "code", 568 | "colab": { 569 | "base_uri": "https://localhost:8080/", 570 | "height": 34 571 | }, 572 | "outputId": "6798ae5b-3845-46d9-e724-572cd118cdd3" 573 | }, 574 | "source": [ 575 | "predictions" 576 | ], 577 | "execution_count": null, 578 | "outputs": [ 579 | { 580 | "output_type": "execute_result", 581 | "data": { 582 | "text/plain": [ 583 | "array([1, 0, 1, 0, 0, 0, 0, 1, 0])" 584 | ] 585 | }, 586 | "metadata": { 587 | "tags": [] 588 | }, 589 | "execution_count": 14 590 | } 591 | ] 592 | }, 593 | { 594 | "cell_type": "markdown", 595 | "metadata": { 596 | "id": "aBpanTWzlZmU", 597 | "colab_type": "text" 598 | }, 599 | "source": [ 600 | "There is one person who had actually bought insurance but our model predicted that the person had not bought insurance. So, there is one misclassified data by our model." 601 | ] 602 | }, 603 | { 604 | "cell_type": "markdown", 605 | "metadata": { 606 | "id": "dz0ebyPjlzkp", 607 | "colab_type": "text" 608 | }, 609 | "source": [ 610 | "## Model Performance\n", 611 | "We can also check how accurate our model is performing using the 'accuracy_score' class from 'sklearn.metrics'." 612 | ] 613 | }, 614 | { 615 | "cell_type": "code", 616 | "metadata": { 617 | "id": "K2TpjY1NlNlS", 618 | "colab_type": "code", 619 | "colab": {} 620 | }, 621 | "source": [ 622 | "from sklearn.metrics import accuracy_score" 623 | ], 624 | "execution_count": null, 625 | "outputs": [] 626 | }, 627 | { 628 | "cell_type": "code", 629 | "metadata": { 630 | "id": "v0KrwUNsmLV6", 631 | "colab_type": "code", 632 | "colab": { 633 | "base_uri": "https://localhost:8080/", 634 | "height": 34 635 | }, 636 | "outputId": "8391adea-f34d-40df-e9dc-ef78ae6ac09f" 637 | }, 638 | "source": [ 639 | "accuracy_score(y_test, predictions)" 640 | ], 641 | "execution_count": null, 642 | "outputs": [ 643 | { 644 | "output_type": "execute_result", 645 | "data": { 646 | "text/plain": [ 647 | "0.8888888888888888" 648 | ] 649 | }, 650 | "metadata": { 651 | "tags": [] 652 | }, 653 | "execution_count": 17 654 | } 655 | ] 656 | }, 657 | { 658 | "cell_type": "markdown", 659 | "metadata": { 660 | "id": "lBMN-wOamc7i", 661 | "colab_type": "text" 662 | }, 663 | "source": [ 664 | "Our model is predicting 88.9% correct results." 665 | ] 666 | }, 667 | { 668 | "cell_type": "markdown", 669 | "metadata": { 670 | "id": "dPR3NvaJmSIF", 671 | "colab_type": "text" 672 | }, 673 | "source": [ 674 | "### Thanks for reading the Notebook!!!" 675 | ] 676 | }, 677 | { 678 | "cell_type": "markdown", 679 | "metadata": { 680 | "id": "G2SmEVskR65o", 681 | "colab_type": "text" 682 | }, 683 | "source": [ 684 | "## Exercise\n", 685 | "**Instruction:**\n", 686 | "\n", 687 | "Use the raw data github link: https://raw.githubusercontent.com/dphi-official/Datasets/master/HR_comma_sep.csv \n", 688 | "\n", 689 | "Or you can download it here from [here](https://www.kaggle.com/giripujar/hr-analytics)\n", 690 | "\n", 691 | "**Exercise:**\n", 692 | "\n", 693 | "* Load libraries and data.\n", 694 | "* Do some exploratory data analysis to figure out which variables have direct and clear impact on employee retention (i.e. whether they leave the company or continue to work)\n", 695 | "* Plot bar charts showing impact of employee salaries on retention\n", 696 | "* See the correlation between department and employee retention\n", 697 | "* Separate dependent and independent variables.\n", 698 | "* Split the data into train set and test set\n", 699 | "* Now build Logistic Regression model and do prediction for test data\n", 700 | "* Measure the accuracy of the model\n" 701 | ] 702 | }, 703 | { 704 | "cell_type": "markdown", 705 | "metadata": { 706 | "id": "y2MZWSKNnX6P", 707 | "colab_type": "text" 708 | }, 709 | "source": [ 710 | "**References:**\n", 711 | "\n", 712 | "https://github.com/codebasics/py/blob/master/ML/7_logistic_reg/7_logistic_regression.ipynb" 713 | ] 714 | } 715 | ] 716 | } --------------------------------------------------------------------------------