├── Predicting Heart Disease with Classification Machine Learning Algorithms.ipynb ├── Predicting Heart Disease with Classification Machine Learning Algorithms.py ├── README.md └── heartDisease.csv /Predicting Heart Disease with Classification Machine Learning Algorithms.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding: utf-8 3 | 4 | # # Project: Predicting Heart Disease with Classification Machine Learning Algorithms 5 | 6 | # # Table of Contents 7 | # 1. Introduction: 8 | # Scenario 9 | # Goal 10 | # Features & Predictor 11 | # 12 | # 2. Data Wrangling 13 | # 14 | # 3. Exploratory Data Analysis: 15 | # Correlations 16 | # Violin & Box Plots 17 | # Filtering data by positive & negative Heart Disease patient 18 | # 19 | # 4. Machine Learning + Predictive Analytics: 20 | # Prepare Data for Modeling 21 | # Modeling/Training 22 | # Making the Confusion Matrix 23 | # Feature Importance 24 | # Predictions 25 | # 26 | # 5. Conclusions 27 | # 28 | 29 | # # 1. Introduction 30 | 31 | # # Scenario: 32 | 33 | # You have just been hired at a Hospital with an alarming number of patients coming in reporting various cardiac symptoms. A cardioligist measures vitals & hands you this data to peform Data Analysis and predict whether certain patients have Heart Disease. 34 | # 35 | 36 | # # Goal: 37 | # 38 | # 39 | # 40 | 41 | # 42 | # -To predict whether a patient should be diagnosed with Heart Disease. This is a binary outcome. 43 | # Positive (+) = 1, patient diagnosed with Heart Disease 44 | # Negative (-) = 0, patient not diagnosed with Heart Disease 45 | # 46 | # -To experiment with various Classification Models & see which yields greatest accuracy. 47 | # - Examine trends & correlations within our data 48 | # - determine which features are important in determing Positive/Negative Heart Disease 49 | 50 | # # Features & Predictor: 51 | 52 | # Our Predictor (Y, Positive or Negative diagnosis of Heart Disease) is determined by 13 features (X): 53 | # 54 | # 1. age (#) 55 | # 2. sex : 1= Male, 0= Female (Binary) 56 | # 3. (cp)chest pain type (4 values -Ordinal):Value 1: typical angina ,Value 2: atypical angina, Value 3: non-anginal pain , Value 4: asymptomatic ( 57 | # 4. (trestbps) resting blood pressure (#) 58 | # 5. (chol) serum cholestoral in mg/dl (#) 59 | # 6. (fbs)fasting blood sugar > 120 mg/dl(Binary)(1 = true; 0 = false) 60 | # 7. (restecg) resting electrocardiographic results(values 0,1,2) 61 | # 8. (thalach) maximum heart rate achieved (#) 62 | # 9. (exang) exercise induced angina (binary) (1 = yes; 0 = no) 63 | # 10. (oldpeak) = ST depression induced by exercise relative to rest (#) 64 | # 11. (slope) of the peak exercise ST segment (Ordinal) (Value 1: upsloping , Value 2: flat , Value 3: downsloping ) 65 | # 12. (ca) number of major vessels (0-3, Ordinal) colored by fluoroscopy 66 | # 13. (thal) maximum heart rate achieved - (Ordinal): 3 = normal; 6 = fixed defect; 7 = reversable defect 67 | 68 | # In[1]: 69 | 70 | 71 | import numpy as np 72 | import pandas as pd 73 | import matplotlib as plt 74 | import seaborn as sns 75 | import matplotlib.pyplot as plt 76 | 77 | 78 | # # 2. Data Wrangling 79 | 80 | # In[191]: 81 | 82 | 83 | filePath = '/Users/jarar_zaidi/Downloads/datasets-33180-43520-heart.csv' 84 | 85 | data = pd.read_csv(filePath) 86 | 87 | data.head(5) 88 | 89 | 90 | # In[194]: 91 | 92 | 93 | print("(Rows, columns): " + str(data.shape)) 94 | data.columns 95 | 96 | 97 | # In[195]: 98 | 99 | 100 | data.nunique(axis=0)# returns the number of unique values for each variable. 101 | 102 | 103 | # In[7]: 104 | 105 | 106 | #summarizes the count, mean, standard deviation, min, and max for numeric variables. 107 | data.describe() 108 | 109 | 110 | # Luckily we have no missing data to handle! 111 | 112 | # In[199]: 113 | 114 | 115 | # Display the Missing Values 116 | 117 | print(data.isna().sum()) 118 | 119 | 120 | # Let's see if theirs a good proportion between our positive and negative results. It appears we have a good balance between the two. 121 | 122 | # In[9]: 123 | 124 | 125 | data['target'].value_counts() 126 | 127 | 128 | # # 3. Exploratory Data Analysis 129 | 130 | # # Correlations 131 | 132 | # Correlation Matrix- 133 | # let's you see correlations between all variables. Within seconds, you can see whether something is positivly or negativly correlated with our predictor (target) 134 | 135 | # In[10]: 136 | 137 | 138 | # calculate correlation matrix 139 | 140 | corr = data.corr() 141 | plt.subplots(figsize=(15,10)) 142 | sns.heatmap(corr, xticklabels=corr.columns, yticklabels=corr.columns, annot=True, cmap=sns.diverging_palette(220, 20, as_cmap=True)) 143 | sns.heatmap(corr, xticklabels=corr.columns, 144 | yticklabels=corr.columns, 145 | annot=True, 146 | cmap=sns.diverging_palette(220, 20, as_cmap=True)) 147 | 148 | 149 | # We can see there is a positive correlation between chest pain (cp) & target (our predictor). This makes sense since, The greater amount of chest pain results in a greater chance of having heart disease. Cp (chest pain), is a ordinal feature with 4 values: Value 1: typical angina ,Value 2: atypical angina, Value 3: non-anginal pain , Value 4: asymptomatic. 150 | # 151 | # In addition, we see a negative correlation between exercise induced angina (exang) & our predictor. This makes sense because when you excercise, your heart requires more blood, but narrowed arteries slow down blood flow. 152 | # 153 | # 154 | # 155 | 156 | # Pairplots are also a great way to immediatly see the correlations between all variables. 157 | # But you will see me make it with only continous columns from our data, because with so many features, it can be difficult to see each one. 158 | # So instead I will make a pairplot with only our continous features. 159 | 160 | # In[11]: 161 | 162 | 163 | subData = data[['age','trestbps','chol','thalach','oldpeak']] 164 | sns.pairplot(subData) 165 | 166 | 167 | # Chose to make a smaller pairplot with only the continus variables, to dive deeper into the relationships. Also a great way to see if theirs a positve or negative correlation! 168 | 169 | # In[12]: 170 | 171 | 172 | sns.catplot(x="target", y="oldpeak", hue="slope", kind="bar", data=data); 173 | 174 | plt.title('ST depression (induced by exercise relative to rest) vs. Heart Disease',size=25) 175 | plt.xlabel('Heart Disease',size=20) 176 | plt.ylabel('ST depression',size=20) 177 | 178 | 179 | # ST segment depression occurs because when the ventricle is at rest and therefore repolarized. If the trace in the ST segment is abnormally low below the baseline, this can lead to this Heart Disease. This is supports the plot above because low ST Depression yields people at greater risk for heart disease. While a high ST depression is considered normal & healthy. The "slope" hue, refers to the peak exercise ST segment, with values: 0: upsloping , 1: flat , 2: downsloping). Both positive & negative heart disease patients exhibit equal distributions of the 3 slope categories. 180 | 181 | # # Violin & Box Plots 182 | 183 | # The advantages of showing the Box & Violin plots is that it showsthe basic statistics of the data, as well as its distribution. These plots are often used to compare the distribution of a given variable across some categories. 184 | # It shows the median, IQR, & Tukey’s fence. (minimum, first quartile (Q1), median, third quartile (Q3), and maximum). In addition it can provide us with outliers in our data. 185 | 186 | # In[156]: 187 | 188 | 189 | plt.figure(figsize=(12,8)) 190 | sns.violinplot(x= 'target', y= 'oldpeak',hue="sex", inner='quartile',data= data ) 191 | plt.title("Thalach Level vs. Heart Disease",fontsize=20) 192 | plt.xlabel("Heart Disease Target", fontsize=16) 193 | plt.ylabel("Thalach Level", fontsize=16) 194 | 195 | 196 | # We can see that the overall shape & distribution for negative & positive patients differ vastly. Positive patients exhibit a lower median for ST depression level & thus a great distribution of their data is between 0 & 2, while negative patients are between 1 & 3. In addition, we dont see many differences between male & female target outcomes. 197 | 198 | # In[14]: 199 | 200 | 201 | plt.figure(figsize=(12,8)) 202 | sns.boxplot(x= 'target', y= 'thalach',hue="sex", data=data ) 203 | plt.title("ST depression Level vs. Heart Disease", fontsize=20) 204 | plt.xlabel("Heart Disease Target",fontsize=16) 205 | plt.ylabel("ST depression induced by exercise relative to rest", fontsize=16) 206 | 207 | 208 | # Positive patients exhibit a hightened median for ST depression level, while negative patients have lower levels. In addition, we dont see many differences between male & female target outcomes, expect for the fact that males have slightly larger ranges of ST Depression. 209 | 210 | # # Filtering data by positive & negative Heart Disease patient 211 | 212 | # In[15]: 213 | 214 | 215 | # Filtering data by positive Heart Disease patient 216 | pos_data = data[data['target']==1] 217 | pos_data.describe() 218 | 219 | 220 | # Filtering data by negative Heart Disease patient 221 | 222 | # In[16]: 223 | 224 | 225 | # Filtering data by negative Heart Disease patient 226 | neg_data = data[data['target']==0] 227 | neg_data.describe() 228 | 229 | 230 | # In[17]: 231 | 232 | 233 | print("(Positive Patients ST depression): " + str(pos_data['oldpeak'].mean())) 234 | print("(Negative Patients ST depression): " + str(neg_data['oldpeak'].mean())) 235 | 236 | 237 | # In[18]: 238 | 239 | 240 | print("(Positive Patients thalach): " + str(pos_data['thalach'].mean())) 241 | print("(Negative Patients thalach): " + str(neg_data['thalach'].mean())) 242 | 243 | 244 | # From comparing positive and negative patients we can see there are vast differenes in means for many of our Features. From examing the details, we can observe that positive patients experience heightened maximum heart rate achieved (thalach) average. In addition, positive patients exhibit about 1/3rd the amount of ST depression induced by exercise relative to rest (oldpeak). 245 | # 246 | 247 | # # 4. Machine Learning + Predictive Analytics 248 | 249 | # # Prepare Data for Modeling 250 | 251 | # Assign the 13 features to X, & the last column to our classification predictor, y 252 | 253 | # In[169]: 254 | 255 | 256 | X = data.iloc[:, :-1].values 257 | y = data.iloc[:, -1].values 258 | 259 | 260 | # Split: the dataset into the Training set and Test set 261 | 262 | # In[170]: 263 | 264 | 265 | from sklearn.model_selection import train_test_split 266 | x_train, x_test, y_train, y_test = train_test_split(X,y,test_size = 0.2, random_state = 1) 267 | 268 | 269 | # Normalize: Standardizing the data will transform the data so that its distribution will have a mean of 0 and a standard deviation of 1. 270 | 271 | # In[200]: 272 | 273 | 274 | from sklearn.preprocessing import StandardScaler 275 | sc = StandardScaler() 276 | x_train = sc.fit_transform(x_train) 277 | x_test = sc.transform(x_test) 278 | 279 | 280 | # # Modeling /Training 281 | 282 | # We will now Train various Classification Models on the Training set & see which yields the highest accuracy. 283 | # We will compare the accuracy of Logistic Regression, K-NN, SVM, Naives Bayes Classifier, Decision Trees, Random Forest, and XGBoost. Note: these are all supervised learning models. 284 | 285 | # Model 1: Logistic Regression 286 | # 287 | 288 | # In[172]: 289 | 290 | 291 | from sklearn.metrics import classification_report 292 | from sklearn.linear_model import LogisticRegression 293 | 294 | model1 = LogisticRegression(random_state=1) # get instance of model 295 | model1.fit(x_train, y_train) # Train/Fit model 296 | 297 | y_pred1 = model1.predict(x_test) # get y predictions 298 | print(classification_report(y_test, y_pred1)) # output accuracy 299 | 300 | 301 | # Model 2: K-NN (K-Nearest Neighbors) 302 | 303 | # In[173]: 304 | 305 | 306 | from sklearn.metrics import classification_report 307 | from sklearn.neighbors import KNeighborsClassifier 308 | 309 | model2 = KNeighborsClassifier() # get instance of model 310 | model2.fit(x_train, y_train) # Train/Fit model 311 | 312 | y_pred2 = model2.predict(x_test) # get y predictions 313 | print(classification_report(y_test, y_pred2)) # output accuracy 314 | 315 | 316 | # Model 3: SVM (Support Vector Machine) 317 | 318 | # In[174]: 319 | 320 | 321 | from sklearn.metrics import classification_report 322 | from sklearn.svm import SVC 323 | 324 | model3 = SVC(random_state=1) # get instance of model 325 | model3.fit(x_train, y_train) # Train/Fit model 326 | 327 | y_pred3 = model3.predict(x_test) # get y predictions 328 | print(classification_report(y_test, y_pred3)) # output accuracy 329 | 330 | 331 | # Model 4: Naives Bayes Classifier 332 | 333 | # In[175]: 334 | 335 | 336 | from sklearn.metrics import classification_report 337 | from sklearn.naive_bayes import GaussianNB 338 | 339 | model4 = GaussianNB() # get instance of model 340 | model4.fit(x_train, y_train) # Train/Fit model 341 | 342 | y_pred4 = model4.predict(x_test) # get y predictions 343 | print(classification_report(y_test, y_pred4)) # output accuracy 344 | 345 | 346 | # Model 5: Decision Trees 347 | # 348 | 349 | # In[176]: 350 | 351 | 352 | from sklearn.metrics import classification_report 353 | from sklearn.tree import DecisionTreeClassifier 354 | 355 | model5 = DecisionTreeClassifier(random_state=1) # get instance of model 356 | model5.fit(x_train, y_train) # Train/Fit model 357 | 358 | y_pred5 = model5.predict(x_test) # get y predictions 359 | print(classification_report(y_test, y_pred5)) # output accuracy 360 | 361 | 362 | # Model 6: Random Forest 363 | # 364 | 365 | # In[177]: 366 | 367 | 368 | from sklearn.metrics import classification_report 369 | from sklearn.ensemble import RandomForestClassifier 370 | 371 | model6 = RandomForestClassifier(random_state=1)# get instance of model 372 | model6.fit(x_train, y_train) # Train/Fit model 373 | 374 | y_pred6 = model6.predict(x_test) # get y predictions 375 | print(classification_report(y_test, y_pred6)) # output accuracy 376 | 377 | 378 | # Model 7: XGBoost 379 | # 380 | 381 | # In[178]: 382 | 383 | 384 | from xgboost import XGBClassifier 385 | 386 | model7 = XGBClassifier(random_state=1) 387 | model7.fit(x_train, y_train) 388 | y_pred7 = model7.predict(x_test) 389 | print(classification_report(y_test, y_pred7)) 390 | 391 | 392 | # From comparing the 7 models, we can conclude that Model 6: Random Forest yields the highest accuracy. With an accuracy of 80%. 393 | # 394 | # 395 | # We have precision, recall, f1-score and support: 396 | # 397 | # Precision : be "how many are correctly classified among that class" 398 | # 399 | # Recall : "how many of this class you find over the whole number of element of this class" 400 | # 401 | # F1-score : harmonic mean of precision and recall values. 402 | # F1 score reaches its best value at 1 and worst value at 0. 403 | # F1 Score = 2 x ((precision x recall) / (precision + recall)) 404 | # 405 | # Support: # of samples of the true response that lie in that class. 406 | # 407 | # 408 | # 409 | 410 | # # Making the Confusion Matrix 411 | 412 | # In[179]: 413 | 414 | 415 | from sklearn.metrics import confusion_matrix, accuracy_score 416 | cm = confusion_matrix(y_test, y_pred6) 417 | print(cm) 418 | accuracy_score(y_test, y_pred6) 419 | 420 | 421 | # 21 is the amount of True Positives in our data, while 28 is the amount of True Negatives. 422 | # 423 | # 9 & 3 are the number of errors. 424 | # 425 | # There are 9 type 1 error (False Positives)- You predicted positive and it’s false. 426 | # 427 | # There are 3 type 2 error (False Negatives)- You predicted negative and it’s false. 428 | # 429 | # Hence if we calculate the accuracy its # Correct Predicted/ # Total. 430 | # In other words, where TP, FN, FP and TN represent the number of true positives, false negatives, false positives and true negatives. 431 | # 432 | # (TP + TN)/(TP + TN + FP + FN). 433 | # (21+28)/(21+28+9+3) = 0.80 = 80% accuracy 434 | 435 | # Note: A good rule of thumb is that any accuracy above 70% is considered good, but be careful because if your accuracy is extremly high, it may be too good to be true (an example of Overfitting). Thus, 80% is the ideal accuracy! 436 | 437 | # # Feature Importance 438 | 439 | # Feature Importance provides a score that indicates how helpful each feature was in our model. 440 | # 441 | # The higher the Feature Score, the more that feature is used to make key decisions & thus the more important it is. 442 | 443 | # In[135]: 444 | 445 | 446 | # get importance 447 | importance = model6.feature_importances_ 448 | 449 | # summarize feature importance 450 | for i,v in enumerate(importance): 451 | print('Feature: %0d, Score: %.5f' % (i,v)) 452 | 453 | 454 | # In[154]: 455 | 456 | 457 | index= data.columns[:-1] 458 | importance = pd.Series(model6.feature_importances_, index=index) 459 | importance.nlargest(13).plot(kind='barh', colormap='winter') 460 | 461 | 462 | # From the Feature Importance graph above, we can conclude that the top 4 significant features were chest pain type (cp), maximum heart rate achieved (thalach), number of major vessels (ca), and ST depression induced by exercise relative to rest (oldpeak). 463 | 464 | # # Predictions 465 | 466 | # Scenario: A patient develops cardiac symptoms & you input his vitals into the Machine Learning Algorithm. 467 | # 468 | # He is a 20 year old male, with a chest pain value of 2 (atypical angina), with resting blood pressure of 110. 469 | # 470 | # In addition he has a serum cholestoral of 230 mg/dl. 471 | # 472 | # He is fasting blood sugar > 120 mg/dl. 473 | # 474 | # He has a resting electrocardiographic result of 1. 475 | # 476 | # The patients maximum heart rate achieved is 140. 477 | # 478 | # Also, he was exercise induced angina. 479 | # 480 | # His ST depression induced by exercise relative to rest value was 2.2. 481 | # 482 | # The slope of the peak exercise ST segment is flat. 483 | # 484 | # He has no major vessels colored by fluoroscopy, 485 | # and in addition his maximum heart rate achieved is a reversable defect. 486 | # 487 | # Based on this information, can you classify this patient with Heart Disease? 488 | # 489 | 490 | # In[182]: 491 | 492 | 493 | print(model6.predict(sc.transform([[20,1,2,110,230,1,1,140,1,2.2,2,0,2]]))) 494 | 495 | 496 | # Yes! Our machine learning algorithm has classified this patient with Heart Disease. Now we can properly diagnose him, & get him the help he needs to recover. By diagnosing him early, we may prevent worse symtoms from arising later. 497 | 498 | # Predicting the Test set results: 499 | # 500 | # First value represents our predicted value, 501 | # Second value represents our actual value. 502 | # 503 | # If the values match, then we predicted correctly. 504 | # We can see that our results are very accurate! 505 | 506 | # In[185]: 507 | 508 | 509 | y_pred = model6.predict(x_test) 510 | print(np.concatenate((y_pred.reshape(len(y_pred),1), y_test.reshape(len(y_test),1)),1)) 511 | 512 | 513 | # # Conclusions 514 | 515 | # 1. Our Random Forest algorithm yields the highest accuracy, 80%. Any accuracy above 70% is considered good, but be careful because if your accuracy is extremly high, it may be too good to be true (an example of Overfitting). Thus, 80% is the ideal accuracy! 516 | # 517 | # 2. Out of the 13 features we examined, the top 4 significant features that helped us classify between a positive & negative Diagnosis were chest pain type (cp), maximum heart rate achieved (thalach), number of major vessels (ca), and ST depression induced by exercise relative to rest (oldpeak). 518 | # 519 | # 3. Our machine learning algorithm can now classify patients with Heart Disease. Now we can properly diagnose patients, & get them the help they needs to recover. By diagnosing detecting these features early, we may prevent worse symtoms from arising later. 520 | 521 | # In[ ]: 522 | 523 | 524 | 525 | 526 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Project-Predicting-Heart-Disease-with-Classification-Machine-Learning-Algorithms 2 | Project: Predicting Heart Disease with Classification Machine Learning Algorithms 3 | 4 | Author: Jarar Zaidi 5 | 6 | Date: 6/11/2020 7 | 8 | Medium Link to project: https://medium.com/@jararzaidi/project-predicting-heart-disease-with-classification-machine-learning-algorithms-fd69e6fdc9d6 9 | 10 | --------------- 11 | This project is organized as follows: 12 | 13 | Table of Contents 14 | 15 | 1. Introduction: Scenario & Goals, Features & Predictor 16 | 2. Data Wrangling 17 | 3. Exploratory Data Analysis: Correlations, Violin & Box Plots, Filtering data by positive & negative Heart Disease patient 18 | 4. Machine Learning + Predictive Analytics: Prepare Data for Modeling, Modeling/Training, Confusion Matrix, Feature Importance, Predictions 19 | 5. Conclusions 20 | 21 | --------------- 22 | Scenario: 23 | You have just been hired as a Data Scientist at a Hospital with an alarming number of patients coming in reporting various cardiac symptoms. 24 | A cardiologist measures vitals & hands you this data to perform Data Analysis and predict whether certain patients have Heart Disease. 25 | We would like to make a Machine Learning algorithm where we can train our AI to learn & improve from experience. 26 | Thus, we would want to classify patients as either positive or negative for Heart Disease. 27 | 28 | 29 | --------------- 30 | Goal: 31 | Predict whether a patient should be diagnosed with Heart Disease. This is a binary outcome. 32 | Positive (+) = 1, patient diagnosed with Heart Disease 33 | Negative (-) = 0, patient not diagnosed with Heart Disease 34 | Experiment with various Classification Models & see which yields greatest accuracy. 35 | Examine trends & correlations within our data 36 | Determine which features are most important to Positive/Negative Heart Disease diagnosis 37 | 38 | --------------- 39 | Files: 40 | Predicting Heart Disease with Classification Machine Learning Algorithms.ipynb - Jupyter Notebook (.ipynb) version 41 | Predicting Heart Disease with Classification Machine Learning Algorithms.py - the Python (.py) version project 42 | heartDisease.csv - Original dataset used from Kaggle.com in CSV (.csv) format 43 | 44 | 45 | 46 | --------------- 47 | 48 | -------------------------------------------------------------------------------- /heartDisease.csv: -------------------------------------------------------------------------------- 1 | age,sex,cp,trestbps,chol,fbs,restecg,thalach,exang,oldpeak,slope,ca,thal,target 2 | 63,1,3,145,233,1,0,150,0,2.3,0,0,1,1 3 | 37,1,2,130,250,0,1,187,0,3.5,0,0,2,1 4 | 41,0,1,130,204,0,0,172,0,1.4,2,0,2,1 5 | 56,1,1,120,236,0,1,178,0,0.8,2,0,2,1 6 | 57,0,0,120,354,0,1,163,1,0.6,2,0,2,1 7 | 57,1,0,140,192,0,1,148,0,0.4,1,0,1,1 8 | 56,0,1,140,294,0,0,153,0,1.3,1,0,2,1 9 | 44,1,1,120,263,0,1,173,0,0,2,0,3,1 10 | 52,1,2,172,199,1,1,162,0,0.5,2,0,3,1 11 | 57,1,2,150,168,0,1,174,0,1.6,2,0,2,1 12 | 54,1,0,140,239,0,1,160,0,1.2,2,0,2,1 13 | 48,0,2,130,275,0,1,139,0,0.2,2,0,2,1 14 | 49,1,1,130,266,0,1,171,0,0.6,2,0,2,1 15 | 64,1,3,110,211,0,0,144,1,1.8,1,0,2,1 16 | 58,0,3,150,283,1,0,162,0,1,2,0,2,1 17 | 50,0,2,120,219,0,1,158,0,1.6,1,0,2,1 18 | 58,0,2,120,340,0,1,172,0,0,2,0,2,1 19 | 66,0,3,150,226,0,1,114,0,2.6,0,0,2,1 20 | 43,1,0,150,247,0,1,171,0,1.5,2,0,2,1 21 | 69,0,3,140,239,0,1,151,0,1.8,2,2,2,1 22 | 59,1,0,135,234,0,1,161,0,0.5,1,0,3,1 23 | 44,1,2,130,233,0,1,179,1,0.4,2,0,2,1 24 | 42,1,0,140,226,0,1,178,0,0,2,0,2,1 25 | 61,1,2,150,243,1,1,137,1,1,1,0,2,1 26 | 40,1,3,140,199,0,1,178,1,1.4,2,0,3,1 27 | 71,0,1,160,302,0,1,162,0,0.4,2,2,2,1 28 | 59,1,2,150,212,1,1,157,0,1.6,2,0,2,1 29 | 51,1,2,110,175,0,1,123,0,0.6,2,0,2,1 30 | 65,0,2,140,417,1,0,157,0,0.8,2,1,2,1 31 | 53,1,2,130,197,1,0,152,0,1.2,0,0,2,1 32 | 41,0,1,105,198,0,1,168,0,0,2,1,2,1 33 | 65,1,0,120,177,0,1,140,0,0.4,2,0,3,1 34 | 44,1,1,130,219,0,0,188,0,0,2,0,2,1 35 | 54,1,2,125,273,0,0,152,0,0.5,0,1,2,1 36 | 51,1,3,125,213,0,0,125,1,1.4,2,1,2,1 37 | 46,0,2,142,177,0,0,160,1,1.4,0,0,2,1 38 | 54,0,2,135,304,1,1,170,0,0,2,0,2,1 39 | 54,1,2,150,232,0,0,165,0,1.6,2,0,3,1 40 | 65,0,2,155,269,0,1,148,0,0.8,2,0,2,1 41 | 65,0,2,160,360,0,0,151,0,0.8,2,0,2,1 42 | 51,0,2,140,308,0,0,142,0,1.5,2,1,2,1 43 | 48,1,1,130,245,0,0,180,0,0.2,1,0,2,1 44 | 45,1,0,104,208,0,0,148,1,3,1,0,2,1 45 | 53,0,0,130,264,0,0,143,0,0.4,1,0,2,1 46 | 39,1,2,140,321,0,0,182,0,0,2,0,2,1 47 | 52,1,1,120,325,0,1,172,0,0.2,2,0,2,1 48 | 44,1,2,140,235,0,0,180,0,0,2,0,2,1 49 | 47,1,2,138,257,0,0,156,0,0,2,0,2,1 50 | 53,0,2,128,216,0,0,115,0,0,2,0,0,1 51 | 53,0,0,138,234,0,0,160,0,0,2,0,2,1 52 | 51,0,2,130,256,0,0,149,0,0.5,2,0,2,1 53 | 66,1,0,120,302,0,0,151,0,0.4,1,0,2,1 54 | 62,1,2,130,231,0,1,146,0,1.8,1,3,3,1 55 | 44,0,2,108,141,0,1,175,0,0.6,1,0,2,1 56 | 63,0,2,135,252,0,0,172,0,0,2,0,2,1 57 | 52,1,1,134,201,0,1,158,0,0.8,2,1,2,1 58 | 48,1,0,122,222,0,0,186,0,0,2,0,2,1 59 | 45,1,0,115,260,0,0,185,0,0,2,0,2,1 60 | 34,1,3,118,182,0,0,174,0,0,2,0,2,1 61 | 57,0,0,128,303,0,0,159,0,0,2,1,2,1 62 | 71,0,2,110,265,1,0,130,0,0,2,1,2,1 63 | 54,1,1,108,309,0,1,156,0,0,2,0,3,1 64 | 52,1,3,118,186,0,0,190,0,0,1,0,1,1 65 | 41,1,1,135,203,0,1,132,0,0,1,0,1,1 66 | 58,1,2,140,211,1,0,165,0,0,2,0,2,1 67 | 35,0,0,138,183,0,1,182,0,1.4,2,0,2,1 68 | 51,1,2,100,222,0,1,143,1,1.2,1,0,2,1 69 | 45,0,1,130,234,0,0,175,0,0.6,1,0,2,1 70 | 44,1,1,120,220,0,1,170,0,0,2,0,2,1 71 | 62,0,0,124,209,0,1,163,0,0,2,0,2,1 72 | 54,1,2,120,258,0,0,147,0,0.4,1,0,3,1 73 | 51,1,2,94,227,0,1,154,1,0,2,1,3,1 74 | 29,1,1,130,204,0,0,202,0,0,2,0,2,1 75 | 51,1,0,140,261,0,0,186,1,0,2,0,2,1 76 | 43,0,2,122,213,0,1,165,0,0.2,1,0,2,1 77 | 55,0,1,135,250,0,0,161,0,1.4,1,0,2,1 78 | 51,1,2,125,245,1,0,166,0,2.4,1,0,2,1 79 | 59,1,1,140,221,0,1,164,1,0,2,0,2,1 80 | 52,1,1,128,205,1,1,184,0,0,2,0,2,1 81 | 58,1,2,105,240,0,0,154,1,0.6,1,0,3,1 82 | 41,1,2,112,250,0,1,179,0,0,2,0,2,1 83 | 45,1,1,128,308,0,0,170,0,0,2,0,2,1 84 | 60,0,2,102,318,0,1,160,0,0,2,1,2,1 85 | 52,1,3,152,298,1,1,178,0,1.2,1,0,3,1 86 | 42,0,0,102,265,0,0,122,0,0.6,1,0,2,1 87 | 67,0,2,115,564,0,0,160,0,1.6,1,0,3,1 88 | 68,1,2,118,277,0,1,151,0,1,2,1,3,1 89 | 46,1,1,101,197,1,1,156,0,0,2,0,3,1 90 | 54,0,2,110,214,0,1,158,0,1.6,1,0,2,1 91 | 58,0,0,100,248,0,0,122,0,1,1,0,2,1 92 | 48,1,2,124,255,1,1,175,0,0,2,2,2,1 93 | 57,1,0,132,207,0,1,168,1,0,2,0,3,1 94 | 52,1,2,138,223,0,1,169,0,0,2,4,2,1 95 | 54,0,1,132,288,1,0,159,1,0,2,1,2,1 96 | 45,0,1,112,160,0,1,138,0,0,1,0,2,1 97 | 53,1,0,142,226,0,0,111,1,0,2,0,3,1 98 | 62,0,0,140,394,0,0,157,0,1.2,1,0,2,1 99 | 52,1,0,108,233,1,1,147,0,0.1,2,3,3,1 100 | 43,1,2,130,315,0,1,162,0,1.9,2,1,2,1 101 | 53,1,2,130,246,1,0,173,0,0,2,3,2,1 102 | 42,1,3,148,244,0,0,178,0,0.8,2,2,2,1 103 | 59,1,3,178,270,0,0,145,0,4.2,0,0,3,1 104 | 63,0,1,140,195,0,1,179,0,0,2,2,2,1 105 | 42,1,2,120,240,1,1,194,0,0.8,0,0,3,1 106 | 50,1,2,129,196,0,1,163,0,0,2,0,2,1 107 | 68,0,2,120,211,0,0,115,0,1.5,1,0,2,1 108 | 69,1,3,160,234,1,0,131,0,0.1,1,1,2,1 109 | 45,0,0,138,236,0,0,152,1,0.2,1,0,2,1 110 | 50,0,1,120,244,0,1,162,0,1.1,2,0,2,1 111 | 50,0,0,110,254,0,0,159,0,0,2,0,2,1 112 | 64,0,0,180,325,0,1,154,1,0,2,0,2,1 113 | 57,1,2,150,126,1,1,173,0,0.2,2,1,3,1 114 | 64,0,2,140,313,0,1,133,0,0.2,2,0,3,1 115 | 43,1,0,110,211,0,1,161,0,0,2,0,3,1 116 | 55,1,1,130,262,0,1,155,0,0,2,0,2,1 117 | 37,0,2,120,215,0,1,170,0,0,2,0,2,1 118 | 41,1,2,130,214,0,0,168,0,2,1,0,2,1 119 | 56,1,3,120,193,0,0,162,0,1.9,1,0,3,1 120 | 46,0,1,105,204,0,1,172,0,0,2,0,2,1 121 | 46,0,0,138,243,0,0,152,1,0,1,0,2,1 122 | 64,0,0,130,303,0,1,122,0,2,1,2,2,1 123 | 59,1,0,138,271,0,0,182,0,0,2,0,2,1 124 | 41,0,2,112,268,0,0,172,1,0,2,0,2,1 125 | 54,0,2,108,267,0,0,167,0,0,2,0,2,1 126 | 39,0,2,94,199,0,1,179,0,0,2,0,2,1 127 | 34,0,1,118,210,0,1,192,0,0.7,2,0,2,1 128 | 47,1,0,112,204,0,1,143,0,0.1,2,0,2,1 129 | 67,0,2,152,277,0,1,172,0,0,2,1,2,1 130 | 52,0,2,136,196,0,0,169,0,0.1,1,0,2,1 131 | 74,0,1,120,269,0,0,121,1,0.2,2,1,2,1 132 | 54,0,2,160,201,0,1,163,0,0,2,1,2,1 133 | 49,0,1,134,271,0,1,162,0,0,1,0,2,1 134 | 42,1,1,120,295,0,1,162,0,0,2,0,2,1 135 | 41,1,1,110,235,0,1,153,0,0,2,0,2,1 136 | 41,0,1,126,306,0,1,163,0,0,2,0,2,1 137 | 49,0,0,130,269,0,1,163,0,0,2,0,2,1 138 | 60,0,2,120,178,1,1,96,0,0,2,0,2,1 139 | 62,1,1,128,208,1,0,140,0,0,2,0,2,1 140 | 57,1,0,110,201,0,1,126,1,1.5,1,0,1,1 141 | 64,1,0,128,263,0,1,105,1,0.2,1,1,3,1 142 | 51,0,2,120,295,0,0,157,0,0.6,2,0,2,1 143 | 43,1,0,115,303,0,1,181,0,1.2,1,0,2,1 144 | 42,0,2,120,209,0,1,173,0,0,1,0,2,1 145 | 67,0,0,106,223,0,1,142,0,0.3,2,2,2,1 146 | 76,0,2,140,197,0,2,116,0,1.1,1,0,2,1 147 | 70,1,1,156,245,0,0,143,0,0,2,0,2,1 148 | 44,0,2,118,242,0,1,149,0,0.3,1,1,2,1 149 | 60,0,3,150,240,0,1,171,0,0.9,2,0,2,1 150 | 44,1,2,120,226,0,1,169,0,0,2,0,2,1 151 | 42,1,2,130,180,0,1,150,0,0,2,0,2,1 152 | 66,1,0,160,228,0,0,138,0,2.3,2,0,1,1 153 | 71,0,0,112,149,0,1,125,0,1.6,1,0,2,1 154 | 64,1,3,170,227,0,0,155,0,0.6,1,0,3,1 155 | 66,0,2,146,278,0,0,152,0,0,1,1,2,1 156 | 39,0,2,138,220,0,1,152,0,0,1,0,2,1 157 | 58,0,0,130,197,0,1,131,0,0.6,1,0,2,1 158 | 47,1,2,130,253,0,1,179,0,0,2,0,2,1 159 | 35,1,1,122,192,0,1,174,0,0,2,0,2,1 160 | 58,1,1,125,220,0,1,144,0,0.4,1,4,3,1 161 | 56,1,1,130,221,0,0,163,0,0,2,0,3,1 162 | 56,1,1,120,240,0,1,169,0,0,0,0,2,1 163 | 55,0,1,132,342,0,1,166,0,1.2,2,0,2,1 164 | 41,1,1,120,157,0,1,182,0,0,2,0,2,1 165 | 38,1,2,138,175,0,1,173,0,0,2,4,2,1 166 | 38,1,2,138,175,0,1,173,0,0,2,4,2,1 167 | 67,1,0,160,286,0,0,108,1,1.5,1,3,2,0 168 | 67,1,0,120,229,0,0,129,1,2.6,1,2,3,0 169 | 62,0,0,140,268,0,0,160,0,3.6,0,2,2,0 170 | 63,1,0,130,254,0,0,147,0,1.4,1,1,3,0 171 | 53,1,0,140,203,1,0,155,1,3.1,0,0,3,0 172 | 56,1,2,130,256,1,0,142,1,0.6,1,1,1,0 173 | 48,1,1,110,229,0,1,168,0,1,0,0,3,0 174 | 58,1,1,120,284,0,0,160,0,1.8,1,0,2,0 175 | 58,1,2,132,224,0,0,173,0,3.2,2,2,3,0 176 | 60,1,0,130,206,0,0,132,1,2.4,1,2,3,0 177 | 40,1,0,110,167,0,0,114,1,2,1,0,3,0 178 | 60,1,0,117,230,1,1,160,1,1.4,2,2,3,0 179 | 64,1,2,140,335,0,1,158,0,0,2,0,2,0 180 | 43,1,0,120,177,0,0,120,1,2.5,1,0,3,0 181 | 57,1,0,150,276,0,0,112,1,0.6,1,1,1,0 182 | 55,1,0,132,353,0,1,132,1,1.2,1,1,3,0 183 | 65,0,0,150,225,0,0,114,0,1,1,3,3,0 184 | 61,0,0,130,330,0,0,169,0,0,2,0,2,0 185 | 58,1,2,112,230,0,0,165,0,2.5,1,1,3,0 186 | 50,1,0,150,243,0,0,128,0,2.6,1,0,3,0 187 | 44,1,0,112,290,0,0,153,0,0,2,1,2,0 188 | 60,1,0,130,253,0,1,144,1,1.4,2,1,3,0 189 | 54,1,0,124,266,0,0,109,1,2.2,1,1,3,0 190 | 50,1,2,140,233,0,1,163,0,0.6,1,1,3,0 191 | 41,1,0,110,172,0,0,158,0,0,2,0,3,0 192 | 51,0,0,130,305,0,1,142,1,1.2,1,0,3,0 193 | 58,1,0,128,216,0,0,131,1,2.2,1,3,3,0 194 | 54,1,0,120,188,0,1,113,0,1.4,1,1,3,0 195 | 60,1,0,145,282,0,0,142,1,2.8,1,2,3,0 196 | 60,1,2,140,185,0,0,155,0,3,1,0,2,0 197 | 59,1,0,170,326,0,0,140,1,3.4,0,0,3,0 198 | 46,1,2,150,231,0,1,147,0,3.6,1,0,2,0 199 | 67,1,0,125,254,1,1,163,0,0.2,1,2,3,0 200 | 62,1,0,120,267,0,1,99,1,1.8,1,2,3,0 201 | 65,1,0,110,248,0,0,158,0,0.6,2,2,1,0 202 | 44,1,0,110,197,0,0,177,0,0,2,1,2,0 203 | 60,1,0,125,258,0,0,141,1,2.8,1,1,3,0 204 | 58,1,0,150,270,0,0,111,1,0.8,2,0,3,0 205 | 68,1,2,180,274,1,0,150,1,1.6,1,0,3,0 206 | 62,0,0,160,164,0,0,145,0,6.2,0,3,3,0 207 | 52,1,0,128,255,0,1,161,1,0,2,1,3,0 208 | 59,1,0,110,239,0,0,142,1,1.2,1,1,3,0 209 | 60,0,0,150,258,0,0,157,0,2.6,1,2,3,0 210 | 49,1,2,120,188,0,1,139,0,2,1,3,3,0 211 | 59,1,0,140,177,0,1,162,1,0,2,1,3,0 212 | 57,1,2,128,229,0,0,150,0,0.4,1,1,3,0 213 | 61,1,0,120,260,0,1,140,1,3.6,1,1,3,0 214 | 39,1,0,118,219,0,1,140,0,1.2,1,0,3,0 215 | 61,0,0,145,307,0,0,146,1,1,1,0,3,0 216 | 56,1,0,125,249,1,0,144,1,1.2,1,1,2,0 217 | 43,0,0,132,341,1,0,136,1,3,1,0,3,0 218 | 62,0,2,130,263,0,1,97,0,1.2,1,1,3,0 219 | 63,1,0,130,330,1,0,132,1,1.8,2,3,3,0 220 | 65,1,0,135,254,0,0,127,0,2.8,1,1,3,0 221 | 48,1,0,130,256,1,0,150,1,0,2,2,3,0 222 | 63,0,0,150,407,0,0,154,0,4,1,3,3,0 223 | 55,1,0,140,217,0,1,111,1,5.6,0,0,3,0 224 | 65,1,3,138,282,1,0,174,0,1.4,1,1,2,0 225 | 56,0,0,200,288,1,0,133,1,4,0,2,3,0 226 | 54,1,0,110,239,0,1,126,1,2.8,1,1,3,0 227 | 70,1,0,145,174,0,1,125,1,2.6,0,0,3,0 228 | 62,1,1,120,281,0,0,103,0,1.4,1,1,3,0 229 | 35,1,0,120,198,0,1,130,1,1.6,1,0,3,0 230 | 59,1,3,170,288,0,0,159,0,0.2,1,0,3,0 231 | 64,1,2,125,309,0,1,131,1,1.8,1,0,3,0 232 | 47,1,2,108,243,0,1,152,0,0,2,0,2,0 233 | 57,1,0,165,289,1,0,124,0,1,1,3,3,0 234 | 55,1,0,160,289,0,0,145,1,0.8,1,1,3,0 235 | 64,1,0,120,246,0,0,96,1,2.2,0,1,2,0 236 | 70,1,0,130,322,0,0,109,0,2.4,1,3,2,0 237 | 51,1,0,140,299,0,1,173,1,1.6,2,0,3,0 238 | 58,1,0,125,300,0,0,171,0,0,2,2,3,0 239 | 60,1,0,140,293,0,0,170,0,1.2,1,2,3,0 240 | 77,1,0,125,304,0,0,162,1,0,2,3,2,0 241 | 35,1,0,126,282,0,0,156,1,0,2,0,3,0 242 | 70,1,2,160,269,0,1,112,1,2.9,1,1,3,0 243 | 59,0,0,174,249,0,1,143,1,0,1,0,2,0 244 | 64,1,0,145,212,0,0,132,0,2,1,2,1,0 245 | 57,1,0,152,274,0,1,88,1,1.2,1,1,3,0 246 | 56,1,0,132,184,0,0,105,1,2.1,1,1,1,0 247 | 48,1,0,124,274,0,0,166,0,0.5,1,0,3,0 248 | 56,0,0,134,409,0,0,150,1,1.9,1,2,3,0 249 | 66,1,1,160,246,0,1,120,1,0,1,3,1,0 250 | 54,1,1,192,283,0,0,195,0,0,2,1,3,0 251 | 69,1,2,140,254,0,0,146,0,2,1,3,3,0 252 | 51,1,0,140,298,0,1,122,1,4.2,1,3,3,0 253 | 43,1,0,132,247,1,0,143,1,0.1,1,4,3,0 254 | 62,0,0,138,294,1,1,106,0,1.9,1,3,2,0 255 | 67,1,0,100,299,0,0,125,1,0.9,1,2,2,0 256 | 59,1,3,160,273,0,0,125,0,0,2,0,2,0 257 | 45,1,0,142,309,0,0,147,1,0,1,3,3,0 258 | 58,1,0,128,259,0,0,130,1,3,1,2,3,0 259 | 50,1,0,144,200,0,0,126,1,0.9,1,0,3,0 260 | 62,0,0,150,244,0,1,154,1,1.4,1,0,2,0 261 | 38,1,3,120,231,0,1,182,1,3.8,1,0,3,0 262 | 66,0,0,178,228,1,1,165,1,1,1,2,3,0 263 | 52,1,0,112,230,0,1,160,0,0,2,1,2,0 264 | 53,1,0,123,282,0,1,95,1,2,1,2,3,0 265 | 63,0,0,108,269,0,1,169,1,1.8,1,2,2,0 266 | 54,1,0,110,206,0,0,108,1,0,1,1,2,0 267 | 66,1,0,112,212,0,0,132,1,0.1,2,1,2,0 268 | 55,0,0,180,327,0,2,117,1,3.4,1,0,2,0 269 | 49,1,2,118,149,0,0,126,0,0.8,2,3,2,0 270 | 54,1,0,122,286,0,0,116,1,3.2,1,2,2,0 271 | 56,1,0,130,283,1,0,103,1,1.6,0,0,3,0 272 | 46,1,0,120,249,0,0,144,0,0.8,2,0,3,0 273 | 61,1,3,134,234,0,1,145,0,2.6,1,2,2,0 274 | 67,1,0,120,237,0,1,71,0,1,1,0,2,0 275 | 58,1,0,100,234,0,1,156,0,0.1,2,1,3,0 276 | 47,1,0,110,275,0,0,118,1,1,1,1,2,0 277 | 52,1,0,125,212,0,1,168,0,1,2,2,3,0 278 | 58,1,0,146,218,0,1,105,0,2,1,1,3,0 279 | 57,1,1,124,261,0,1,141,0,0.3,2,0,3,0 280 | 58,0,1,136,319,1,0,152,0,0,2,2,2,0 281 | 61,1,0,138,166,0,0,125,1,3.6,1,1,2,0 282 | 42,1,0,136,315,0,1,125,1,1.8,1,0,1,0 283 | 52,1,0,128,204,1,1,156,1,1,1,0,0,0 284 | 59,1,2,126,218,1,1,134,0,2.2,1,1,1,0 285 | 40,1,0,152,223,0,1,181,0,0,2,0,3,0 286 | 61,1,0,140,207,0,0,138,1,1.9,2,1,3,0 287 | 46,1,0,140,311,0,1,120,1,1.8,1,2,3,0 288 | 59,1,3,134,204,0,1,162,0,0.8,2,2,2,0 289 | 57,1,1,154,232,0,0,164,0,0,2,1,2,0 290 | 57,1,0,110,335,0,1,143,1,3,1,1,3,0 291 | 55,0,0,128,205,0,2,130,1,2,1,1,3,0 292 | 61,1,0,148,203,0,1,161,0,0,2,1,3,0 293 | 58,1,0,114,318,0,2,140,0,4.4,0,3,1,0 294 | 58,0,0,170,225,1,0,146,1,2.8,1,2,1,0 295 | 67,1,2,152,212,0,0,150,0,0.8,1,0,3,0 296 | 44,1,0,120,169,0,1,144,1,2.8,0,0,1,0 297 | 63,1,0,140,187,0,0,144,1,4,2,2,3,0 298 | 63,0,0,124,197,0,1,136,1,0,1,0,2,0 299 | 59,1,0,164,176,1,0,90,0,1,1,2,1,0 300 | 57,0,0,140,241,0,1,123,1,0.2,1,0,3,0 301 | 45,1,3,110,264,0,1,132,0,1.2,1,0,3,0 302 | 68,1,0,144,193,1,1,141,0,3.4,1,2,3,0 303 | 57,1,0,130,131,0,1,115,1,1.2,1,1,3,0 304 | 57,0,1,130,236,0,0,174,0,0,1,1,2,0 305 | --------------------------------------------------------------------------------