├── 01_stats_data.py ├── 02_simple_linear_regression.py ├── 03_loss_functions.py ├── 04_scoring.py ├── 05_multiple_linear_regression.py ├── 06_input_data.csv ├── 06_output_data.csv ├── 06_pandas.py ├── 07_model.py ├── 07_output_salepricemodel.pkl ├── 08_input.json ├── 08_predict.py ├── 09_flask_api.py ├── 10_model_compare.py ├── 11_MV_DV_correlation.csv ├── 11_feature_selection_mvdv.py ├── 11_input_data.csv ├── 11_output_data.csv ├── 12_PV_MVDV_correlation.csv ├── 12_feature_selection_pv_mvdv.py ├── 12_output_data.csv ├── 13_feature_selection_rfe.py ├── 14_input_data.csv ├── 14_trend_parity_plots.py ├── 15_outliers.py ├── 16_univariateplot.py ├── 17_bivariateplot.py ├── 18_multivariateplot.py ├── 19_vectors.py ├── 1stFlrSF.png ├── 20_NLTK.py ├── 21_Kfold.py ├── 22_stepwise_forward_selection.py ├── 23_input_data.csv ├── 23_model_creation.py ├── 23_sens_analysis.csv ├── 23_sensitivity_analysis.py ├── 24_hyperparameter_tuning.py ├── 25_low_salespr_corr_columns.py ├── 26_valleymovingwindow.py ├── 27_features_from_images.py ├── 28_feature_scaling.py ├── 29_gradient_descent.py ├── Algorithmns ├── 01_linear_regression_equation.py ├── 02_cost_function.py ├── 03_contour_plots.py ├── 04_gradient_descent.py ├── 05_multiple_linear_equation.py ├── 06_polynomial_regression.py ├── 07_loan_prediction.py ├── 07_logistic_regression.py ├── 08_confusion_matrix_precision_recall.py ├── 08_precision_recall.py ├── 09_multiclass_classification.py ├── 10_issues_classification.py ├── 11_clustering.py ├── 12_elbow_method.py ├── 13_silhouette_coefficient.py ├── 14_svc_linear.py ├── 15_svm_nonlinear_kernels.py ├── 16_pca.py ├── 17.1_perceptron.py ├── 17.2_perceptron.py ├── 18_neural_networks.py ├── 19_decision_trees.py ├── Consumer_Complaints.csv ├── OCR.py ├── ads.csv ├── anomaly_detection.py ├── currency.csv ├── decisionT_randomF.py ├── decisiontree.png ├── flowers.csv ├── flv.csv ├── matrix.png ├── notes ├── notes.odp ├── plot_svm_non_linear.py ├── randomforest.png ├── spam.csv ├── sss.csv ├── train1.csv └── tree.dot ├── Dockerfile ├── ParallelCoordinates.jpg ├── README.md ├── index.html ├── metrics.csv ├── parallel_coordinates_plot.html └── requirements.txt /01_stats_data.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | 3 | x = [[6], [8], [10], [14], [18], [21]] 4 | y = [[7], [9], [13], [17.5], [18], [21]] 5 | 6 | plt.figure() 7 | plt.title('Pizza Price statistics') 8 | plt.xlabel('Diameter') 9 | plt.ylabel('dollar price') 10 | plt.plot(x,y,'.') 11 | plt.axis([0,25,0,25]) 12 | plt.grid(True) 13 | plt.show() 14 | -------------------------------------------------------------------------------- /02_simple_linear_regression.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | from sklearn.linear_model import LinearRegression 3 | 4 | x = [[6], [8], [10], [14], [18]] 5 | y = [[7], [9], [13], [17.5], [18]] 6 | 7 | model = LinearRegression() 8 | model.fit(x,y) 9 | 10 | plt.figure() 11 | plt.title('Pizza Price Predictions') 12 | plt.xlabel('Diameter') 13 | plt.ylabel('dollar price') 14 | plt.plot(x,y,'.') 15 | plt.plot(x,model.predict(x),'--') 16 | plt.axis([0,25,0,25]) 17 | plt.grid(True) 18 | plt.show() 19 | 20 | print (model.predict([[21]])) 21 | -------------------------------------------------------------------------------- /03_loss_functions.py: -------------------------------------------------------------------------------- 1 | from sklearn.linear_model import LinearRegression 2 | import numpy as np 3 | from numpy.linalg import inv,lstsq 4 | from numpy import dot, transpose 5 | 6 | x = [[6], [8], [10], [14], [18]] 7 | y = [[7], [9], [13], [17.5], [18]] 8 | 9 | model = LinearRegression() 10 | model.fit(x,y) 11 | 12 | print ('Residual sum of squares = ',np.mean((model.predict(x)- y) ** 2)) 13 | print ('Variance = ',np.var([6, 8, 10, 14, 18], ddof=1)) 14 | print ('Co-variance = ',np.cov([6, 8, 10, 14, 18], [7, 9, 13, 17.5, 18])[0][1]) 15 | -------------------------------------------------------------------------------- /04_scoring.py: -------------------------------------------------------------------------------- 1 | from sklearn.linear_model import LinearRegression 2 | import numpy as np 3 | from numpy.linalg import inv,lstsq 4 | from numpy import dot, transpose 5 | 6 | x = [[6], [8], [10], [14], [18]] 7 | y = [[7], [9], [13], [17.5], [18]] 8 | model = LinearRegression() 9 | model.fit(x,y) 10 | 11 | x_test = [[8], [9], [11], [16], [12]] 12 | y_test = [[11], [8.5], [15], [18], [11]] 13 | 14 | print ('R-squared score = ',model.score(x_test, y_test)) 15 | -------------------------------------------------------------------------------- /05_multiple_linear_regression.py: -------------------------------------------------------------------------------- 1 | from sklearn.linear_model import LinearRegression 2 | from numpy.linalg import lstsq 3 | 4 | x = [[6, 2], [8, 1], [10, 0], [14, 2], [18, 0]] 5 | y = [[7], [9], [13], [17.5], [18]] 6 | model = LinearRegression() 7 | model.fit(x,y) 8 | 9 | x1 = [[8, 2], [9, 0], [11, 2], [16, 2], [12, 0]] 10 | y1 = [[11], [8.5], [15], [18], [11]] 11 | 12 | predictions = model.predict(x1) 13 | for i, prediction in enumerate(predictions): 14 | print ((prediction, y1[i])) 15 | 16 | print (lstsq(x, y, rcond=None)[0]) 17 | 18 | print ('R-squared score = ',model.score(x1, y1)) 19 | 20 | 21 | 22 | -------------------------------------------------------------------------------- /06_pandas.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | 3 | # data can be downloaded from the url: https://www.kaggle.com/vikrishnan/boston-house-prices 4 | df = pd.read_csv('./06_input_data.csv') 5 | 6 | # Understanding data 7 | print (df.shape) 8 | print (df.columns) 9 | print(df.head(5)) 10 | print(df.info()) 11 | print(df.describe()) 12 | print(df.groupby('LotShape').size()) 13 | 14 | # Dropping null value columns which cross the threshold 15 | a = df.isnull().sum() 16 | print (a) 17 | b = a[a>(0.05*len(a))] 18 | print (b) 19 | df = df.drop(b.index, axis=1) 20 | print (df.shape) 21 | 22 | # Replacing null value columns (text) with most used value 23 | a1 = df.select_dtypes(include=['object']).isnull().sum() 24 | print (a1) 25 | print (a1.index) 26 | for i in a1.index: 27 | b1 = df[i].value_counts().index.tolist() 28 | print (b1) 29 | df[i] = df[i].fillna(b1[0]) 30 | 31 | # Replacing null value columns (int, float) with most used value 32 | a2 = df.select_dtypes(include=['integer','float']).isnull().sum() 33 | print (a2) 34 | b2 = a2[a2!=0].index 35 | print (b2) 36 | df = df.fillna(df[b2].mode().to_dict(orient='records')[0]) 37 | 38 | # Creating new columns from existing columns 39 | print (df.shape) 40 | a3 = df['YrSold'] - df['YearBuilt'] 41 | b3 = df['YrSold'] - df['YearRemodAdd'] 42 | df['Years Before Sale'] = a3 43 | df['Years Since Remod'] = b3 44 | print (df.shape) 45 | 46 | # Dropping unwanted columns 47 | df = df.drop(["Id", "MoSold", "SaleCondition", "SaleType", "YearBuilt", "YearRemodAdd"], axis=1) 48 | print (df.shape) 49 | 50 | # Dropping columns which has correlation with target less than threshold 51 | target='SalePrice' 52 | x = df.select_dtypes(include=['integer','float']).corr()[target].abs() 53 | print (x) 54 | df=df.drop(x[x<0.4].index, axis=1) 55 | print (df.shape) 56 | 57 | # Checking for the necessary features after dropping some columns 58 | l1 = ["PID","MS SubClass","MS Zoning","Street","Alley","Land Contour","Lot Config","Neighborhood","Condition 1","Condition 2","Bldg Type","House Style","Roof Style","Roof Matl","Exterior 1st","Exterior 2nd","Mas Vnr Type","Foundation","Heating","Central Air","Garage Type","Misc Feature","Sale Type","Sale Condition"] 59 | l2 = [] 60 | for i in l1: 61 | if i in df.columns: 62 | l2.append(i) 63 | 64 | # Getting rid of nominal columns with too many unique values 65 | for i in l2: 66 | len(df[i].unique())>10 67 | df=df.drop(i, axis=1) 68 | print (df.columns) 69 | 70 | df.to_csv('06_output_data.csv',index=False) 71 | -------------------------------------------------------------------------------- /07_model.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from sklearn.linear_model import LinearRegression 3 | from sklearn.model_selection import train_test_split,cross_val_score 4 | #from sklearn.externals import joblib 5 | import joblib 6 | from sklearn.metrics import mean_squared_error 7 | import matplotlib.pyplot as plt 8 | from math import sqrt 9 | import os 10 | 11 | df = pd.read_csv('./06_output_data.csv') 12 | 13 | i = list(df.columns.values) 14 | i.pop(i.index('SalePrice')) 15 | df0 = df[i+['SalePrice']] 16 | df = df0.select_dtypes(include=['integer','float']) 17 | print (df.columns) 18 | 19 | X = df[list(df.columns)[:-1]] 20 | y = df['SalePrice'] 21 | X_train, X_test, y_train, y_test = train_test_split(X, y) 22 | regressor = LinearRegression() 23 | regressor.fit(X_train, y_train) 24 | 25 | y_predictions = regressor.predict(X_test) 26 | 27 | meanSquaredError=mean_squared_error(y_test, y_predictions) 28 | rootMeanSquaredError = sqrt(meanSquaredError) 29 | 30 | print("Number of predictions:",len(y_predictions)) 31 | print("Mean Squared Error:", meanSquaredError) 32 | print("Root Mean Squared Error:", rootMeanSquaredError) 33 | print ("Scoring:",regressor.score(X_test, y_test)) 34 | 35 | plt.plot(y_predictions,y_test,'r.') 36 | plt.plot(y_predictions,y_predictions,'k-') 37 | plt.title('Parity Plot - Linear Regression') 38 | plt.show() 39 | 40 | plot = plt.scatter(y_predictions, (y_predictions - y_test), c='b') 41 | plt.hlines(y=0, xmin= 100000, xmax=400000) 42 | plt.title('Residual Plot - Linear Regression') 43 | plt.show() 44 | 45 | joblib.dump(regressor, './07_output_salepricemodel.pkl') 46 | -------------------------------------------------------------------------------- /07_output_salepricemodel.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nithyadurai87/machine_learning_examples/b32ebbfb5f314b08e0e0f4672d1e3ec713110faa/07_output_salepricemodel.pkl -------------------------------------------------------------------------------- /08_input.json: -------------------------------------------------------------------------------- 1 | { 2 | "OverallQual":[7], 3 | "TotalBsmtSF":[856], 4 | "1stFlrSF":[856], 5 | "GrLivArea":[1710], 6 | "FullBath":[2], 7 | "TotRmsAbvGrd":[8], 8 | "Fireplaces":[0], 9 | "GarageCars":[2], 10 | "GarageArea":[548], 11 | "Years Before Sale":[5], 12 | "Years Since Remod":[5] 13 | } 14 | -------------------------------------------------------------------------------- /08_predict.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | import pandas as pd 4 | import numpy 5 | #from sklearn.externals import joblib 6 | import joblib 7 | 8 | s = pd.read_json('./08_input.json') 9 | p = joblib.load("./07_output_salepricemodel.pkl") 10 | r = p.predict(s) 11 | print (str(r)) 12 | -------------------------------------------------------------------------------- /09_flask_api.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | import pandas as pd 4 | import numpy 5 | from flask import Flask, render_template, request, jsonify 6 | from pandas.io.json import json_normalize 7 | #from sklearn.externals import joblib 8 | import joblib 9 | 10 | app = Flask(__name__) 11 | port = int(os.getenv('PORT', 5500)) 12 | 13 | @app.route('/') 14 | def home(): 15 | return render_template('index.html') 16 | 17 | @app.route('/api/salepricemodel', methods=['POST']) 18 | def salepricemodel(): 19 | if request.method == 'POST': 20 | try: 21 | post_data = request.get_json() 22 | json_data = json.dumps(post_data) 23 | s = pd.read_json(json_data) 24 | p = joblib.load("./07_output_salepricemodel.pkl") 25 | r = p.predict(s) 26 | return str(r) 27 | 28 | except Exception as e: 29 | return (e) 30 | 31 | if __name__ == '__main__': 32 | app.run(host='0.0.0.0', port=port, debug=True) 33 | -------------------------------------------------------------------------------- /10_model_compare.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from sklearn.linear_model import LinearRegression, Ridge, Lasso, ElasticNet 3 | from sklearn.ensemble import RandomForestRegressor, AdaBoostRegressor, ExtraTreesRegressor, GradientBoostingRegressor 4 | from sklearn.tree import DecisionTreeRegressor 5 | from sklearn.neural_network import MLPRegressor 6 | from sklearn.preprocessing import StandardScaler 7 | from sklearn.model_selection import train_test_split,cross_val_score 8 | import joblib 9 | from sklearn.metrics import mean_squared_error 10 | import matplotlib.pyplot as plt 11 | from math import sqrt 12 | import numpy as np 13 | import os 14 | 15 | df = pd.read_csv('/content/sample_data/06_output_data.csv') 16 | 17 | i = list(df.columns.values) 18 | i.pop(i.index('SalePrice')) 19 | df0 = df[i+['SalePrice']] 20 | df = df0.select_dtypes(include=['integer','float']) 21 | 22 | X = df[list(df.columns)[:-1]] 23 | y = df['SalePrice'] 24 | X_train, X_test, y_train, y_test = train_test_split(X, y) 25 | 26 | def linear(): 27 | regressor = LinearRegression() 28 | regressor.fit(X_train, y_train) 29 | y_predictions = regressor.predict(X_test) 30 | return (regressor.score(X_test, y_test),sqrt(mean_squared_error(y_test, y_predictions))) 31 | 32 | def ridge(): 33 | scaler = StandardScaler() 34 | X_normalized = scaler.fit_transform(X_train) 35 | ridge = Ridge() 36 | ridge.fit(X_normalized, y_train) 37 | y_predictions = ridge.predict(X_test) 38 | return (ridge.score(X_test, y_test),sqrt(mean_squared_error(y_test, y_predictions))) 39 | 40 | def lasso(): 41 | scaler = StandardScaler() 42 | X_normalized = scaler.fit_transform(X_train) 43 | lasso = Lasso() 44 | lasso.fit(X_normalized, y_train) 45 | y_predictions = lasso.predict(X_test) 46 | return (lasso.score(X_test, y_test),sqrt(mean_squared_error(y_test, y_predictions))) 47 | 48 | def elasticnet(): 49 | scaler = StandardScaler() 50 | X_normalized = scaler.fit_transform(X_train) 51 | elasticNet = ElasticNet() 52 | elasticNet.fit(X_normalized, y_train) 53 | y_predictions = elasticNet.predict(X_test) 54 | return (elasticNet.score(X_test, y_test),sqrt(mean_squared_error(y_test, y_predictions))) 55 | 56 | def randomforest(): 57 | regressor = RandomForestRegressor(n_estimators=15,min_samples_split=15,criterion='friedman_mse',max_depth=None) 58 | regressor.fit(X_train, y_train) 59 | y_predictions = regressor.predict(X_test) 60 | print("Selected Features for RamdomForest",regressor.feature_importances_) 61 | return (regressor.score(X_test, y_test),sqrt(mean_squared_error(y_test, y_predictions))) 62 | 63 | def perceptron(): 64 | regressor = MLPRegressor(hidden_layer_sizes=(5000,), activation='relu', solver='adam', max_iter=1000) 65 | regressor.fit(X_train, y_train) 66 | y_predictions = regressor.predict(X_test) 67 | print("Co-efficients of Perceptron",regressor.coefs_) 68 | return (regressor.score(X_test, y_test),sqrt(mean_squared_error(y_test, y_predictions))) 69 | 70 | def decisiontree(): 71 | regressor = DecisionTreeRegressor(min_samples_split=30,max_depth=None) 72 | regressor.fit(X_train, y_train) 73 | y_predictions = regressor.predict(X_test) 74 | print("Selected Features for DecisionTrees",regressor.feature_importances_) 75 | return (regressor.score(X_test, y_test),sqrt(mean_squared_error(y_test, y_predictions))) 76 | 77 | def adaboost(): 78 | regressor = AdaBoostRegressor(random_state=8, loss='exponential').fit(X_train, y_train) 79 | regressor.fit(X_train, y_train) 80 | y_predictions = regressor.predict(X_test) 81 | print("Selected Features for Adaboost",regressor.feature_importances_) 82 | return (regressor.score(X_test, y_test),sqrt(mean_squared_error(y_test, y_predictions))) 83 | 84 | def extratrees(): 85 | regressor = ExtraTreesRegressor(n_estimators=50).fit(X_train, y_train) 86 | regressor.fit(X_train, y_train) 87 | y_predictions = regressor.predict(X_test) 88 | print("Selected Features for Extratrees",regressor.feature_importances_) 89 | return (regressor.score(X_test, y_test),sqrt(mean_squared_error(y_test, y_predictions))) 90 | 91 | def gradientboosting(): 92 | regressor = GradientBoostingRegressor(loss='quantile',n_estimators=500, min_samples_split=15).fit(X_train, y_train) 93 | regressor.fit(X_train, y_train) 94 | y_predictions = regressor.predict(X_test) 95 | print("Selected Features for Gradientboosting",regressor.feature_importances_) 96 | return (regressor.score(X_test, y_test),sqrt(mean_squared_error(y_test, y_predictions))) 97 | 98 | print ("Score, RMSE values") 99 | print ("Linear = ",linear()) 100 | print ("Ridge = ",ridge()) 101 | print ("Lasso = ",lasso()) 102 | print ("ElasticNet = ",elasticnet()) 103 | print ("RandomForest = ",randomforest()) 104 | print ("Perceptron = ",perceptron()) 105 | print ("DecisionTree = ",decisiontree()) 106 | print ("AdaBoost = ",adaboost()) 107 | print ("ExtraTrees = ",extratrees()) 108 | print ("GradientBoosting = ",gradientboosting()) 109 | -------------------------------------------------------------------------------- /11_MV_DV_correlation.csv: -------------------------------------------------------------------------------- 1 | ,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z 2 | G,1,-0.52,-0.5,-0.19,-0.11,-0.42,0.78,0.53,0.52,-0.16,-0.15,-0.08,-0.44,0.33,0.39,-0.46,-0.08,0.2,0.57,0.76 3 | H,-0.52,1,0.99,0.57,0.85,0.98,-0.77,-0.3,-1,-0.41,0.69,0.72,0.78,-0.6,-0.68,-0.07,0.22,0.58,0.06,-0.64 4 | I,-0.5,0.99,1,0.66,0.87,0.99,-0.74,-0.23,-0.99,-0.36,0.74,0.78,0.76,-0.61,-0.67,-0.03,0.29,0.61,0.09,-0.63 5 | J,-0.19,0.57,0.66,1,0.71,0.68,-0.23,0.46,-0.57,0.01,0.88,0.87,0.38,-0.48,-0.38,0.12,0.77,0.71,0.45,-0.33 6 | K,-0.11,0.85,0.87,0.71,1,0.92,-0.34,0.13,-0.85,-0.52,0.8,0.89,0.62,-0.48,-0.54,-0.35,0.33,0.92,0.52,-0.22 7 | L,-0.42,0.98,0.99,0.68,0.92,1,-0.67,-0.12,-0.98,-0.42,0.8,0.83,0.75,-0.62,-0.69,-0.12,0.36,0.71,0.24,-0.56 8 | M,0.78,-0.77,-0.74,-0.23,-0.34,-0.67,1,0.62,0.77,0.18,-0.33,-0.27,-0.6,0.59,0.61,-0.23,-0.07,0.02,0.44,0.91 9 | N,0.53,-0.3,-0.23,0.46,0.13,-0.12,0.62,1,0.3,0.15,0.45,0.42,-0.32,0.02,0.14,-0.11,0.71,0.48,0.82,0.45 10 | O,0.52,-1,-0.99,-0.57,-0.85,-0.98,0.77,0.3,1,0.41,-0.69,-0.72,-0.78,0.6,0.68,0.07,-0.22,-0.58,-0.06,0.64 11 | P,-0.16,-0.41,-0.36,0.01,-0.52,-0.42,0.18,0.15,0.41,1,-0.13,-0.29,-0.03,0.34,0.5,0.9,0.26,-0.49,-0.37,0.04 12 | Q,-0.15,0.69,0.74,0.88,0.8,0.8,-0.33,0.45,-0.69,-0.13,1,0.96,0.51,-0.55,-0.53,0.01,0.8,0.79,0.57,-0.37 13 | R,-0.08,0.72,0.78,0.87,0.89,0.83,-0.27,0.42,-0.72,-0.29,0.96,1,0.46,-0.57,-0.56,-0.17,0.68,0.89,0.63,-0.27 14 | S,-0.44,0.78,0.76,0.38,0.62,0.75,-0.6,-0.32,-0.78,-0.03,0.51,0.46,1,-0.1,-0.21,0.26,0.15,0.36,-0.05,-0.4 15 | T,0.33,-0.6,-0.61,-0.48,-0.48,-0.62,0.59,0.02,0.6,0.34,-0.55,-0.57,-0.1,1,0.86,0.1,-0.38,-0.35,-0.11,0.66 16 | U,0.39,-0.68,-0.67,-0.38,-0.54,-0.69,0.61,0.14,0.68,0.5,-0.53,-0.56,-0.21,0.86,1,0.23,-0.24,-0.39,-0.14,0.67 17 | V,-0.46,-0.07,-0.03,0.12,-0.35,-0.12,-0.23,-0.11,0.07,0.9,0.01,-0.17,0.26,0.1,0.23,1,0.29,-0.48,-0.54,-0.35 18 | W,-0.08,0.22,0.29,0.77,0.33,0.36,-0.07,0.71,-0.22,0.26,0.8,0.68,0.15,-0.38,-0.24,0.29,1,0.44,0.51,-0.24 19 | X,0.2,0.58,0.61,0.71,0.92,0.71,0.02,0.48,-0.58,-0.49,0.79,0.89,0.36,-0.35,-0.39,-0.48,0.44,1,0.8,0.09 20 | Y,0.57,0.06,0.09,0.45,0.52,0.24,0.44,0.82,-0.06,-0.37,0.57,0.63,-0.05,-0.11,-0.14,-0.54,0.51,0.8,1,0.42 21 | Z,0.76,-0.64,-0.63,-0.33,-0.22,-0.56,0.91,0.45,0.64,0.04,-0.37,-0.27,-0.4,0.66,0.67,-0.35,-0.24,0.09,0.42,1 22 | -------------------------------------------------------------------------------- /11_feature_selection_mvdv.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import matplotlib.pyplot as plt 3 | import numpy 4 | from sklearn.linear_model import LinearRegression 5 | from sklearn.model_selection import train_test_split,cross_val_score 6 | from sklearn.metrics import mean_squared_error 7 | from math import sqrt 8 | from sklearn.feature_selection import RFE 9 | from sklearn.datasets import make_friedman1 10 | 11 | df = pd.read_csv('./11_input_data.csv') 12 | 13 | # Dropping all process parameters 14 | df = df.drop(["A","B", "C", "D", "E", "F"], axis=1) 15 | 16 | #finding correlation between manipulated & disturbance variables 17 | correlations = df.corr() 18 | correlations = correlations.round(2) 19 | correlations.to_csv('11_MV_DV_correlation.csv',index=False) 20 | fig = plt.figure() 21 | g = fig.add_subplot(111) 22 | cax = g.matshow(correlations, vmin=-1, vmax=1) 23 | fig.colorbar(cax) 24 | ticks = numpy.arange(0,20,1) 25 | g.set_xticks(ticks) 26 | g.set_yticks(ticks) 27 | g.set_xticklabels(list(df.columns)) 28 | g.set_yticklabels(list(df.columns)) 29 | plt.savefig('11_MV_DV_correlation.png') 30 | 31 | #removing parameters with high correlation 32 | upper = correlations.where(numpy.triu(numpy.ones(correlations.shape), k=1).astype(numpy.bool)) 33 | cols_to_drop = [] 34 | for i in upper.columns: 35 | if (any(upper[i] == -1) or any(upper[i] == -0.98) or any(upper[i] == -0.99) or any(upper[i] == 0.98) or any(upper[i] == 0.99) or any(upper[i] == 1)): 36 | cols_to_drop.append(i) 37 | df = df.drop(cols_to_drop, axis=1) 38 | 39 | print (df.shape,df.columns) 40 | df.to_csv('./11_output_data.csv',index=False) 41 | -------------------------------------------------------------------------------- /11_input_data.csv: -------------------------------------------------------------------------------- 1 | A,B,C,D,E,F,G,H,I,J,K,L,M,N,O,P,Q,R,S,T,U,V,W,X,Y,Z 2 | 520.5364431,-10.70307907,41.73186869,38.06617464,37.24473095,-30.8719176025,-10.3597923315,36.32092795,89.28150305,96.33635691,101.9959183,95.20303638,96.0393265,93.90096921,40.16075248,425.5527773,450.6792934,461.643155,440.1028581,414.9348594,406.2267325,429.8263829,449.457238,458.741507,453.6588352,423.6767705 3 | 519.1515788,-9.53942145,41.73064277,38.07933667,37.24698306,-30.044836575,-9.987551893,36.31641622,89.30986123,96.5190334,101.9959183,95.16612791,96.12715439,94.30830023,40.17201722,426.6242566,451.8878496,463.1336372,439.5468695,414.8614382,406.3843101,430.9848323,450.0760819,458.9483142,453.1586749,423.141717 4 | 517.1319702,-10.88559798,41.72941685,38.09249871,37.24923516,-29.2177555475,-10.3015117487,36.3119045,89.29955194,96.69908041,101.9959183,95.12921943,96.21498229,94.71563125,40.18328196,426.8144557,452.1943853,463.3582619,439.3168201,414.788017,406.5418878,431.6118953,450.6974617,459.1551214,453.3271191,422.6066635 5 | 514.7546672,-9.599539989,41.72819093,38.10566074,37.25148727,-28.39067452,-10.6154716043,36.30739277,89.28924266,96.74857913,101.9959183,95.09231096,96.30281018,95.12296228,40.1945467,426.9388267,452.4837652,463.370656,439.5463488,414.7145958,406.6994655,431.6599338,451.3188415,459.3619285,453.8778838,422.6791601 6 | 513.3419373,-8.719461175,41.72696502,38.11882278,37.25373937,-26.89307899,-10.92943146,36.30288104,89.27893337,96.7254402,101.9959183,95.05540249,96.39063807,95.5302933,40.20581144,427.0631978,452.7731451,463.3838499,439.4180642,414.6411746,406.8570432,431.7077225,451.9402214,459.5687357,454.4286486,423.5489292 7 | 512.6266246,-7.855236073,41.7257391,38.13198481,37.25599148,-22.70555745,-9.379569646,36.29836931,89.26862409,96.70230126,101.9959183,95.01849402,96.47846596,95.93762433,40.21707618,427.1875688,453.381317,464.4342087,439.2060534,414.5749804,407.0146208,431.7555112,452.5068446,459.7755429,454.9794133,424.6475086 8 | 512.4457469,-8.295845198,41.72451318,38.14514685,37.25824359,-21.20972728,-8.795914626,36.29385759,89.2583148,96.67916233,101.9959183,94.98158554,96.56629385,96.16554107,40.22834092,427.2279303,453.9248058,464.4941257,438.9940425,414.5513749,406.7194402,431.6819447,452.352861,459.9823501,455.5301781,423.8906973 9 | 512.0665894,-7.494122619,41.72328726,38.15830888,37.26049569,-11.87331532,-9.076298598,36.28934586,89.24800552,96.6560234,101.9959183,94.89729258,96.65412174,96.09135273,40.23960566,426.685419,453.4114419,464.1798853,438.7820317,414.5347402,405.7228558,430.766394,451.9754545,460.1891573,456.0809428,424.2640125 10 | 512.3947245,-6.710852682,41.72206135,38.17147092,37.2627478,-4.896339263,-6.614137975,36.28483413,89.23769623,96.63288447,101.9959183,94.7641696,96.74194963,96.01232602,40.25087041,426.0514994,452.8978168,463.8656449,438.5700209,415.1528795,406.3307509,429.7182315,451.5980479,460.3748681,456.3978612,425.4742478 11 | 512.9216003,-6.883846149,41.72083543,38.18463295,37.2649999,0.837976755,-6.605268542,36.2803224,89.22738695,96.60974553,101.9959183,94.63104387,96.82977752,95.9332993,40.26213515,425.9766475,452.3841918,463.5514045,438.3628959,415.2470416,407.2345531,429.4619009,451.2206414,460.3594001,456.123684,426.6217905 12 | 514.4390747,-5.876158072,41.71960951,38.19779499,37.26725201,2.825582199,-6.541620902,36.27581068,89.21707766,96.5866066,101.9959183,94.49791814,96.91760541,95.85427259,40.27339989,426.1548666,451.8705667,463.237164,438.1691368,415.2015669,407.4325274,429.5640058,450.8432348,460.2938577,455.8206789,427.0989665 13 | 515.3169902,-7.11667663,41.71838359,38.21095702,37.26950411,4.907841834,-6.197143015,36.27129895,89.20676838,96.56346767,101.9859062,94.36479241,97.00543331,95.77524587,40.28466463,426.3330857,451.3708241,462.9229236,437.9761398,415.1560922,407.4396475,429.6661106,450.4827718,460.2256388,455.5176739,427.394877 14 | 516.0371653,-6.71947513,41.71715768,38.22411906,37.27175622,8.450391087,-8.80952899,36.26678722,89.19645909,96.54032873,101.9166026,94.23166668,97.0932612,95.69621915,40.29592937,426.5113047,451.1156101,462.6055567,437.8529163,416.3595907,407.4467677,429.8206371,450.4207572,459.8890478,455.2146688,427.6907875 15 | 516.9307475,-6.415715172,41.71593176,38.23728109,37.27400832,6.965950993,-7.657526599,36.26227549,89.18614981,96.5171898,101.8375462,94.09854095,97.16500658,95.61719244,40.30719411,426.6895238,450.9499807,462.2155499,438.0279959,416.4299192,407.4538878,430.4253082,450.468081,459.3741462,454.9116637,427.64251 16 | 517.4477763,-7.114941216,41.71470584,38.25044313,37.27626043,8.536462705,-6.140578808,36.25776377,89.17584052,96.49405087,101.7584897,93.96541523,97.1035246,95.53816572,40.31845885,426.8850377,450.7843514,461.7950725,438.2369991,415.7711972,407.461008,431.133309,450.5154049,458.8592446,454.6086586,426.6716729 17 | 518.3179281,-6.423368381,41.71347992,38.26325433,37.27851254,0.5015253,-7.487370939,36.25325204,89.16553124,96.47091194,101.6794333,93.8322895,97.01233292,95.45913901,40.32972359,427.7198385,450.6187221,461.374595,438.4931662,415.519359,407.5592568,431.8413099,450.5627287,458.344343,454.3056535,427.4405636 18 | 518.5651123,-7.681222304,41.71225401,38.26142935,37.28076464,3.117005869,-8.472931745,36.24874031,89.15522195,96.447773,101.6003769,93.69916377,96.92114124,95.38011229,40.34098833,427.6930797,450.4530927,460.9541175,439.3205917,417.7258385,408.9251453,432.0630442,450.6100526,457.8294413,454.0026485,427.8976405 19 | 518.2864909,-7.157406299,41.71102809,38.25189666,37.28301675,2.069512555,-8.548014815,36.24422858,89.14491267,96.42463407,101.5213204,93.56603804,96.82994955,95.30108558,40.35225307,427.4114372,450.2874634,460.5987206,438.5234855,417.1354296,408.521996,431.6765692,450.6573765,457.3145397,453.6996434,425.4529103 20 | 519.2524994,-6.960548533,41.70980217,38.24236397,37.28526885,-6.706039455,-7.482602964,36.23971686,89.13460338,96.40149514,101.442264,93.43291231,96.73875787,95.22205886,40.36351781,427.1297948,450.1218337,460.4096191,437.2969196,415.7923454,407.5967779,431.2882406,450.7047002,456.7996382,453.3966383,424.7842554 21 | 519.1530762,-8.626463904,41.70857625,38.23283129,37.28752096,-7.915531513,-9.836468192,36.23520513,89.1242941,96.3783562,101.3632075,93.29978658,96.64756619,95.14303215,40.37478255,426.8481523,449.3767346,460.2341418,436.0796656,414.4492613,406.6715598,430.899912,450.4320713,456.5758182,453.1778881,424.2770178 22 | -------------------------------------------------------------------------------- /11_output_data.csv: -------------------------------------------------------------------------------- 1 | G,H,J,K,M,N,P,Q,R,S,T,U,V,W,X,Y,Z,A 2 | -10.3597923315,36.32092795,96.33635691,101.9959183,96.0393265,93.90096921,425.5527773,450.6792934,461.643155,440.1028581,414.9348594,406.2267325,429.8263829,449.457238,458.741507,453.6588352,423.6767705,520.5364431 3 | -9.987551893,36.31641622,96.5190334,101.9959183,96.12715439,94.30830023,426.6242566,451.8878496,463.1336372,439.5468695,414.8614382,406.3843101,430.9848323,450.0760819,458.9483142,453.1586749,423.141717,519.1515788 4 | -10.3015117487,36.3119045,96.69908041,101.9959183,96.21498229,94.71563125,426.8144557,452.1943853,463.3582619,439.3168201,414.788017,406.5418878,431.6118953,450.6974617,459.1551214,453.3271191,422.6066635,517.1319702 5 | -10.6154716043,36.30739277,96.74857913,101.9959183,96.30281018,95.12296228,426.9388267,452.4837652,463.370656,439.5463488,414.7145958,406.6994655,431.6599338,451.3188415,459.3619285,453.8778838,422.6791601,514.7546672 6 | -10.92943146,36.30288104,96.7254402,101.9959183,96.39063807,95.5302933,427.0631978,452.7731451,463.3838499,439.4180642,414.6411746,406.8570432,431.7077225,451.9402214,459.5687357,454.4286486,423.5489292,513.3419373 7 | -9.379569646,36.29836931,96.70230126,101.9959183,96.47846596,95.93762433,427.1875688,453.381317,464.4342087,439.2060534,414.5749804,407.0146208,431.7555112,452.5068446,459.7755429,454.9794133,424.6475086,512.6266246 8 | -8.795914626,36.29385759,96.67916233,101.9959183,96.56629385,96.16554107,427.2279303,453.9248058,464.4941257,438.9940425,414.5513749,406.7194402,431.6819447,452.352861,459.9823501,455.5301781,423.8906973,512.4457469 9 | -9.076298598,36.28934586,96.6560234,101.9959183,96.65412174,96.09135273,426.685419,453.4114419,464.1798853,438.7820317,414.5347402,405.7228558,430.766394,451.9754545,460.1891573,456.0809428,424.2640125,512.0665894 10 | -6.614137975,36.28483413,96.63288447,101.9959183,96.74194963,96.01232602,426.0514994,452.8978168,463.8656449,438.5700209,415.1528795,406.3307509,429.7182315,451.5980479,460.3748681,456.3978612,425.4742478,512.3947245 11 | -6.605268542,36.2803224,96.60974553,101.9959183,96.82977752,95.9332993,425.9766475,452.3841918,463.5514045,438.3628959,415.2470416,407.2345531,429.4619009,451.2206414,460.3594001,456.123684,426.6217905,512.9216003 12 | -6.541620902,36.27581068,96.5866066,101.9959183,96.91760541,95.85427259,426.1548666,451.8705667,463.237164,438.1691368,415.2015669,407.4325274,429.5640058,450.8432348,460.2938577,455.8206789,427.0989665,514.4390747 13 | -6.197143015,36.27129895,96.56346767,101.9859062,97.00543331,95.77524587,426.3330857,451.3708241,462.9229236,437.9761398,415.1560922,407.4396475,429.6661106,450.4827718,460.2256388,455.5176739,427.394877,515.3169902 14 | -8.80952899,36.26678722,96.54032873,101.9166026,97.0932612,95.69621915,426.5113047,451.1156101,462.6055567,437.8529163,416.3595907,407.4467677,429.8206371,450.4207572,459.8890478,455.2146688,427.6907875,516.0371653 15 | -7.657526599,36.26227549,96.5171898,101.8375462,97.16500658,95.61719244,426.6895238,450.9499807,462.2155499,438.0279959,416.4299192,407.4538878,430.4253082,450.468081,459.3741462,454.9116637,427.64251,516.9307475 16 | -6.140578808,36.25776377,96.49405087,101.7584897,97.1035246,95.53816572,426.8850377,450.7843514,461.7950725,438.2369991,415.7711972,407.461008,431.133309,450.5154049,458.8592446,454.6086586,426.6716729,517.4477763 17 | -7.487370939,36.25325204,96.47091194,101.6794333,97.01233292,95.45913901,427.7198385,450.6187221,461.374595,438.4931662,415.519359,407.5592568,431.8413099,450.5627287,458.344343,454.3056535,427.4405636,518.3179281 18 | -8.472931745,36.24874031,96.447773,101.6003769,96.92114124,95.38011229,427.6930797,450.4530927,460.9541175,439.3205917,417.7258385,408.9251453,432.0630442,450.6100526,457.8294413,454.0026485,427.8976405,518.5651123 19 | -8.548014815,36.24422858,96.42463407,101.5213204,96.82994955,95.30108558,427.4114372,450.2874634,460.5987206,438.5234855,417.1354296,408.521996,431.6765692,450.6573765,457.3145397,453.6996434,425.4529103,518.2864909 20 | -7.482602964,36.23971686,96.40149514,101.442264,96.73875787,95.22205886,427.1297948,450.1218337,460.4096191,437.2969196,415.7923454,407.5967779,431.2882406,450.7047002,456.7996382,453.3966383,424.7842554,519.2524994 21 | -9.836468192,36.23520513,96.3783562,101.3632075,96.64756619,95.14303215,426.8481523,449.3767346,460.2341418,436.0796656,414.4492613,406.6715598,430.899912,450.4320713,456.5758182,453.1778881,424.2770178,519.1530762 22 | -------------------------------------------------------------------------------- /12_PV_MVDV_correlation.csv: -------------------------------------------------------------------------------- 1 | 0.14 2 | 0.32 3 | 0.84 4 | 0.62 5 | 0.06 6 | 0.78 7 | 0.14 8 | 0.87 9 | 0.85 10 | 0.14 11 | 0.4 12 | 0.33 13 | 0.16 14 | 0.87 15 | 0.79 16 | 0.81 17 | 0.05 18 | 1.0 19 | -------------------------------------------------------------------------------- /12_feature_selection_pv_mvdv.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import matplotlib.pyplot as plt 3 | import numpy 4 | from sklearn.linear_model import LinearRegression 5 | from sklearn.model_selection import train_test_split,cross_val_score 6 | from sklearn.metrics import mean_squared_error 7 | from math import sqrt 8 | from sklearn.feature_selection import RFE 9 | from sklearn.datasets import make_friedman1 10 | 11 | df = pd.read_csv('./11_output_data.csv') 12 | print (df.shape,df.columns) 13 | 14 | # Dropping columns which has correlation with target less than threshold 15 | target = "A" 16 | correlations = df.corr()[target].abs() 17 | correlations = correlations.round(2) 18 | correlations.to_csv('./12_PV_MVDV_correlation.csv',index=False) 19 | df=df.drop(correlations[correlations<0.06].index, axis=1) 20 | 21 | print (df.shape,df.columns) 22 | df.to_csv('./12_output_data.csv',index=False) 23 | -------------------------------------------------------------------------------- /12_output_data.csv: -------------------------------------------------------------------------------- 1 | G,H,J,K,M,N,P,Q,R,S,T,U,V,W,X,Y,A 2 | -10.3597923315,36.32092795,96.33635691,101.9959183,96.0393265,93.90096921,425.5527773,450.67929339999995,461.643155,440.10285810000005,414.93485939999994,406.2267325,429.8263829,449.45723799999996,458.741507,453.6588352,520.5364431 3 | -9.987551893,36.31641622,96.5190334,101.9959183,96.12715439,94.30830023,426.6242566000001,451.88784960000004,463.13363719999995,439.5468695,414.86143819999995,406.38431010000005,430.9848323,450.0760819,458.9483142,453.1586749,519.1515787999999 4 | -10.3015117487,36.3119045,96.69908041,101.9959183,96.21498229,94.71563125,426.8144557,452.1943853,463.3582619,439.31682010000003,414.78801699999997,406.5418878,431.61189529999996,450.69746169999996,459.1551214,453.32711910000006,517.1319702 5 | -10.6154716043,36.30739277,96.74857913,101.9959183,96.30281018,95.12296228,426.9388267,452.4837652,463.37065599999994,439.5463488,414.7145958,406.6994655,431.65993380000003,451.3188415,459.36192850000003,453.87788380000006,514.7546672 6 | -10.92943146,36.30288104,96.7254402,101.9959183,96.39063807,95.5302933,427.0631978,452.7731451,463.3838499,439.4180642,414.6411746,406.8570432,431.7077225,451.9402214,459.5687357,454.42864860000003,513.3419372999999 7 | -9.379569646,36.29836931,96.70230126,101.9959183,96.47846596,95.93762433,427.1875688,453.38131699999997,464.43420870000006,439.2060534,414.57498039999996,407.0146208,431.7555112,452.5068446,459.77554289999995,454.97941330000003,512.6266246 8 | -8.795914626,36.29385759,96.67916233,101.9959183,96.56629385,96.16554107,427.2279303,453.9248058,464.49412570000004,438.9940425,414.5513749,406.71944019999995,431.6819447,452.352861,459.9823501,455.53017810000006,512.4457468999999 9 | -9.076298598,36.28934586,96.65602340000001,101.9959183,96.65412174,96.09135273,426.685419,453.4114419,464.1798853,438.7820317,414.5347402,405.72285580000005,430.76639400000005,451.9754545,460.1891573,456.0809428,512.0665893999999 10 | -6.614137975,36.28483413,96.63288447,101.9959183,96.74194963,96.01232602,426.0514994,452.89781680000004,463.8656449,438.5700209,415.1528795,406.33075089999994,429.71823150000006,451.5980478999999,460.3748681,456.39786119999997,512.3947245 11 | -6.605268542,36.280322399999996,96.60974553,101.9959183,96.82977752,95.9332993,425.97664749999996,452.38419180000005,463.5514045,438.36289589999996,415.24704160000005,407.2345531,429.4619009,451.2206414,460.3594001,456.12368399999997,512.9216003 12 | -6.541620902000001,36.27581068,96.5866066,101.9959183,96.91760541,95.85427259,426.15486660000005,451.8705666999999,463.23716399999995,438.1691368,415.2015669,407.43252739999997,429.5640058,450.8432348,460.29385769999993,455.82067889999996,514.4390747 13 | -6.197143015,36.27129895,96.56346767,101.9859062,97.00543331,95.77524587,426.33308569999997,451.37082410000005,462.92292360000005,437.9761398,415.1560922,407.43964750000004,429.6661106000001,450.4827718,460.2256388,455.51767390000003,515.3169902 14 | -8.80952899,36.26678722,96.54032873,101.91660259999999,97.09326120000001,95.69621915,426.51130470000004,451.11561009999997,462.60555669999997,437.85291629999995,416.35959069999996,407.44676769999995,429.82063710000006,450.42075719999997,459.8890478,455.2146688,516.0371653 15 | -7.657526599,36.26227549,96.5171898,101.8375462,97.16500658,95.61719244,426.6895238,450.9499807,462.2155499,438.0279959,416.4299192,407.4538878,430.42530819999996,450.468081,459.37414620000004,454.9116637,516.9307474999999 16 | -6.140578808,36.25776377,96.49405087,101.7584897,97.10352459999999,95.53816572,426.88503770000005,450.78435139999993,461.79507249999995,438.23699910000005,415.7711972,407.461008,431.133309,450.51540489999996,458.85924460000007,454.6086586,517.4477763 17 | -7.487370939,36.25325204,96.47091194,101.6794333,97.01233292,95.45913901,427.7198385,450.6187221,461.374595,438.49316619999996,415.519359,407.5592568,431.84130989999994,450.5627287,458.344343,454.3056535,518.3179281 18 | -8.472931745,36.24874031,96.447773,101.60037690000001,96.92114124,95.38011229,427.6930797,450.4530927,460.9541175,439.32059169999997,417.72583849999995,408.9251453,432.0630442,450.6100526000001,457.8294413,454.0026485,518.5651123 19 | -8.548014815,36.24422858,96.42463407,101.52132040000001,96.82994955,95.30108558,427.41143719999997,450.2874634,460.59872060000004,438.52348550000005,417.1354296,408.521996,431.67656919999996,450.65737649999994,457.3145397,453.69964339999996,518.2864909 20 | -7.482602964,36.23971686,96.40149514,101.442264,96.73875787,95.22205886,427.1297948,450.1218337,460.40961910000004,437.2969196,415.7923454,407.59677789999995,431.28824060000005,450.70470020000005,456.7996382,453.39663830000006,519.2524994 21 | -9.836468192,36.23520513,96.3783562,101.3632075,96.64756619,95.14303215,426.8481523,449.37673459999996,460.23414180000003,436.07966560000006,414.4492613,406.6715598,430.89991200000003,450.4320713,456.5758182,453.1778881,519.1530762 22 | -------------------------------------------------------------------------------- /13_feature_selection_rfe.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import matplotlib.pyplot as plt 3 | import numpy 4 | from sklearn.linear_model import LinearRegression 5 | from sklearn.tree import DecisionTreeRegressor 6 | from sklearn.model_selection import train_test_split,cross_val_score 7 | from sklearn.metrics import mean_squared_error 8 | from math import sqrt 9 | from sklearn.feature_selection import RFE 10 | from sklearn.datasets import make_friedman1 11 | 12 | df = pd.read_csv('./12_output_data.csv') 13 | 14 | X = df[list(df.columns)[:-1]] 15 | y = df['A'] 16 | X_train, X_test, y_train, y_test = train_test_split(X, y) 17 | 18 | regressor = DecisionTreeRegressor(min_samples_split=3,max_depth=None) 19 | regressor.fit(X_train, y_train) 20 | y_predictions = regressor.predict(X_test) 21 | print ("Selected Features for DecisionTree",regressor.feature_importances_) 22 | 23 | # RFE Technique - Recursive Feature Elimination 24 | X, y = make_friedman1(n_samples=20, n_features=17, random_state=0) 25 | selector = RFE(LinearRegression()) 26 | selector = selector.fit(X, y) 27 | print ("Selected Features for LinearRegression",selector.ranking_) 28 | -------------------------------------------------------------------------------- /14_input_data.csv: -------------------------------------------------------------------------------- 1 | OverallQual,TotalBsmtSF,1stFlrSF,GrLivArea,FullBath,TotRmsAbvGrd,Fireplaces,GarageCars,GarageArea,SalePrice 2 | 7,856,856,1710,2,8,0,2,548,208500 3 | 6,1262,1262,1262,2,6,1,2,460,181500 4 | 7,920,920,1786,2,6,1,2,608,223500 5 | 7,756,961,1717,1,7,1,3,642,140000 6 | 8,1145,1145,2198,2,9,1,3,836,250000 7 | 5,796,796,1362,1,5,0,2,480,143000 8 | 8,1686,1694,1694,2,7,1,2,636,307000 9 | 7,1107,1107,2090,2,7,2,2,484,200000 10 | 7,952,1022,1774,2,8,2,2,468,129900 11 | 5,991,1077,1077,1,5,2,1,205,118000 12 | 5,1040,1040,1040,1,5,0,1,384,129500 13 | 9,1175,1182,2324,3,11,2,3,736,345000 14 | 5,912,912,912,1,4,0,1,352,144000 15 | 7,1494,1494,1494,2,7,1,3,840,279500 16 | 6,1253,1253,1253,1,5,1,1,352,157000 17 | 7,832,854,854,1,5,0,2,576,132000 18 | 6,1004,1004,1004,1,5,1,2,480,149000 19 | 4,0,1296,1296,2,6,0,2,516,90000 20 | 5,1114,1114,1114,1,6,0,2,576,159000 21 | 5,1029,1339,1339,1,6,0,1,294,139000 22 | 8,1158,1158,2376,3,9,1,3,853,325300 23 | 7,637,1108,1108,1,6,1,1,280,139400 24 | 8,1777,1795,1795,2,7,1,2,534,230000 25 | 5,1040,1060,1060,1,6,1,2,572,129900 26 | 5,1060,1060,1060,1,6,1,1,270,154000 27 | 8,1566,1600,1600,2,7,1,3,890,256300 28 | 5,900,900,900,1,5,0,2,576,134800 29 | 8,1704,1704,1704,2,7,1,3,772,306000 30 | 5,1484,1600,1600,1,6,2,1,319,207500 31 | 4,520,520,520,1,4,0,1,240,68500 32 | 4,649,649,1317,1,6,0,1,250,40000 33 | 5,1228,1228,1228,1,6,0,1,271,149350 34 | 8,1234,1234,1234,2,7,0,2,484,179900 35 | 5,1398,1700,1700,1,6,1,2,447,165500 36 | 9,1561,1561,1561,2,6,1,2,556,277500 37 | 8,1117,1132,2452,3,9,1,3,691,309000 38 | 5,1097,1097,1097,1,6,0,2,672,145000 39 | 5,1297,1297,1297,1,5,1,2,498,153000 40 | 5,1057,1057,1057,1,5,0,1,246,109000 41 | 4,0,1152,1152,2,6,0,0,0,82000 42 | 6,1088,1324,1324,2,6,1,2,440,160000 43 | 5,1350,1328,1328,1,5,2,1,308,170000 44 | 5,840,884,884,1,5,0,2,504,144000 45 | 5,938,938,938,1,5,0,1,308,130250 46 | 5,1150,1150,1150,1,6,0,1,300,141000 47 | 9,1752,1752,1752,2,6,1,2,576,319900 48 | 7,1434,1518,2149,1,6,1,2,670,239686 49 | 8,1656,1656,1656,2,7,0,3,826,249700 50 | 4,736,736,1452,2,8,0,0,0,113000 51 | 5,955,955,955,1,6,0,1,386,127000 52 | 6,794,794,1470,2,6,0,2,388,177000 53 | 6,816,816,1176,1,6,1,2,528,114500 54 | 5,816,816,816,1,5,0,2,516,110000 55 | 9,1842,1842,1842,0,5,1,3,894,385000 56 | 5,384,1360,1360,1,6,1,2,572,130000 57 | 6,1425,1425,1425,2,7,1,2,576,180500 58 | 8,970,983,1739,2,7,0,2,480,172500 59 | 7,860,860,1720,2,7,0,2,565,196500 60 | 10,1410,1426,2945,3,10,1,3,641,438780 61 | 5,780,780,780,1,4,0,1,352,124900 62 | 6,1158,1158,1158,1,5,0,2,576,158000 63 | 5,530,581,1111,1,6,0,1,288,101000 64 | 8,1370,1370,1370,2,6,1,2,484,202500 65 | 7,576,902,1710,2,9,0,2,480,140000 66 | 7,1057,1057,2034,2,8,0,2,645,219500 67 | 8,1143,1143,2473,2,9,1,3,852,317000 68 | 7,1947,2207,2207,2,7,1,2,576,180000 69 | 7,1453,1479,1479,2,7,0,2,558,226000 70 | 4,747,747,747,1,4,0,1,220,80000 71 | 7,1304,1304,2287,2,7,1,2,667,225000 72 | 7,2223,2223,2223,2,8,2,2,516,244000 73 | 4,845,845,845,1,4,0,2,360,129500 74 | 7,832,885,1718,2,7,1,2,427,185000 75 | 5,1086,1086,1086,1,6,0,2,490,144900 76 | 3,840,840,1605,2,8,0,1,379,107400 77 | 4,462,526,988,1,5,0,1,297,91000 78 | 4,952,952,952,1,4,0,1,283,135750 79 | 5,672,1072,1285,1,6,0,1,240,127000 80 | 4,1768,1768,1768,2,8,0,0,0,136500 81 | 5,440,682,1230,1,5,0,2,440,110000 82 | 6,896,1182,2142,2,8,1,1,509,193500 83 | 6,1237,1337,1337,2,5,0,2,405,153500 84 | 8,1563,1563,1563,2,6,1,3,758,245000 85 | 5,1065,1065,1065,1,6,0,2,461,126500 86 | 7,384,804,1474,2,7,1,2,400,168500 87 | 8,1288,1301,2417,2,9,1,2,462,260000 88 | 6,684,684,1560,2,6,1,2,400,174000 89 | 6,612,612,1224,2,4,0,2,528,164500 90 | 3,1013,1013,1526,1,6,0,0,0,85000 91 | 4,990,990,990,1,5,0,0,0,123600 92 | 4,0,1040,1040,1,4,0,2,420,109900 93 | 5,1235,1235,1235,1,6,0,2,480,98600 94 | 5,876,964,964,1,5,0,2,432,163500 95 | 6,1214,1260,2291,2,9,1,2,506,133900 96 | 6,824,905,1786,2,7,0,2,684,204750 97 | 6,680,680,1470,2,6,1,2,420,185000 98 | 7,1588,1588,1588,2,6,0,2,472,214000 99 | 4,960,960,960,1,6,0,1,432,94750 100 | 5,458,835,835,1,5,0,1,366,83000 101 | 4,950,1225,1225,1,6,0,0,0,128950 102 | 6,1610,1610,1610,2,6,2,2,480,205000 103 | 6,741,977,1732,2,7,1,2,476,178000 104 | 5,0,1535,1535,2,8,0,2,410,118964 105 | 7,1226,1226,1226,2,6,0,3,740,198900 106 | 7,1040,1226,1818,1,7,2,1,240,169500 107 | 8,1053,1053,1992,2,9,1,2,648,250000 108 | 4,641,1047,1047,1,6,0,1,273,100000 109 | 5,789,789,789,1,5,0,1,250,115000 110 | 5,793,997,1517,2,7,0,0,0,115000 111 | 6,1844,1844,1844,2,7,1,2,546,190000 112 | 6,994,1216,1855,2,7,0,1,325,136900 113 | 7,384,774,1430,2,7,1,2,400,180000 114 | 7,1264,1282,2696,2,10,1,3,792,383970 115 | 6,1809,2259,2259,2,7,2,2,450,217000 116 | 6,1028,1436,2320,2,9,1,1,180,259500 117 | 6,729,729,1458,2,5,1,2,440,176000 118 | 5,1092,1092,1092,1,6,1,1,288,139000 119 | 5,1125,1125,1125,1,5,0,2,430,155000 120 | 7,1673,1699,3222,3,11,2,3,594,320000 121 | 6,728,728,1456,2,8,1,2,390,163990 122 | 6,938,988,988,1,4,2,2,540,180000 123 | 4,732,772,1123,1,4,0,1,264,100000 124 | 6,1080,1080,1080,1,5,0,1,288,136000 125 | 6,1199,1199,1199,2,5,0,2,530,153900 126 | 6,1362,1586,1586,2,7,1,2,435,181000 127 | 6,520,520,754,1,5,0,0,0,84500 128 | 6,1078,958,958,2,5,1,2,440,128000 129 | 5,672,840,840,1,5,1,0,0,87000 130 | 6,660,660,1348,1,6,1,2,453,155000 131 | 5,1008,1053,1053,1,6,0,2,750,150000 132 | 7,924,1216,2157,2,8,2,2,487,226000 133 | 6,992,1022,2054,2,7,1,2,390,244000 134 | 5,1063,1327,1327,1,7,0,2,624,150750 135 | 8,1267,1296,1296,2,6,0,2,471,220000 136 | 5,1461,1721,1721,2,7,1,2,440,180000 137 | 7,1304,1682,1682,2,7,1,2,530,174000 138 | 5,1214,1214,1214,2,5,1,1,318,143000 139 | 7,1907,1959,1959,3,9,0,3,766,171000 140 | 8,1004,1004,1852,2,7,2,3,660,230000 141 | 6,928,928,1764,2,7,0,2,470,231500 142 | 4,864,864,864,1,5,1,0,0,115000 143 | 7,1734,1734,1734,2,7,0,2,660,260000 144 | 5,910,910,1385,2,6,0,2,720,166000 145 | 7,1490,1501,1501,2,6,0,2,577,204000 146 | 5,1728,1728,1728,2,10,0,2,504,125000 147 | 6,970,970,1709,2,7,0,2,380,130000 148 | 5,715,875,875,1,5,0,1,180,105000 149 | 7,884,884,2035,2,8,1,2,434,222500 150 | 7,1080,1080,1080,1,6,0,0,0,141000 151 | 5,896,896,1344,1,7,0,1,240,115000 152 | 5,969,969,969,1,5,0,2,440,122000 153 | 8,1710,1710,1710,2,6,1,3,866,372402 154 | 6,825,1097,1993,2,8,1,2,495,190000 155 | 6,1602,1252,1252,1,4,1,2,564,235000 156 | 6,1200,1200,1200,1,7,0,1,312,125000 157 | 6,572,572,1096,1,5,0,0,0,79000 158 | 5,0,1040,1040,1,5,0,2,625,109500 159 | 8,774,774,1968,2,8,1,3,680,269500 160 | 7,991,991,1947,2,8,1,2,678,254900 161 | 7,1392,1392,2462,2,9,1,2,576,320000 162 | 6,1232,1232,1232,2,6,0,2,516,162500 163 | 9,1572,1572,2668,2,10,2,3,726,412500 164 | 7,1541,1541,1541,2,7,1,2,532,220000 165 | 4,882,882,882,1,4,0,0,0,103200 166 | 6,1149,1149,1616,2,5,0,1,216,152000 167 | 5,644,808,1355,2,6,0,0,0,127500 168 | 5,1617,1867,1867,1,7,3,1,303,190000 169 | 8,1582,1610,2161,1,8,1,3,789,325624 170 | 7,840,840,1720,2,7,1,2,440,183500 171 | 8,1686,1707,1707,2,6,1,2,511,228000 172 | 5,720,854,1382,1,7,0,2,660,128500 173 | 6,1080,1656,1656,1,7,2,2,528,215000 174 | 7,1064,1064,1767,2,5,1,2,504,239000 175 | 6,1362,1362,1362,1,6,1,2,504,163000 176 | 6,1606,1651,1651,2,7,1,2,616,184000 177 | 6,1202,2158,2158,2,7,1,2,576,243000 178 | 6,1151,1164,2060,2,8,1,2,521,211000 179 | 5,1052,1252,1920,2,8,1,2,451,172500 180 | 9,2216,2234,2234,2,9,1,3,1166,501837 181 | 5,968,968,968,1,5,0,2,480,100000 182 | 6,756,769,1525,2,5,1,2,440,177000 183 | 7,793,901,1802,1,9,1,1,216,200100 184 | 5,0,1340,1340,1,7,1,1,252,120000 185 | 7,1362,1362,2082,2,6,0,2,484,200000 186 | 5,504,936,1252,1,5,0,2,576,127000 187 | 10,1107,1518,3608,2,12,2,3,840,475000 188 | 7,1188,1217,1217,2,6,0,2,497,173000 189 | 5,660,808,1656,2,8,0,1,180,135000 190 | 5,1086,1224,1224,0,6,2,2,528,153337 191 | 8,1593,1593,1593,1,5,1,2,682,286000 192 | 8,853,1549,2727,2,10,2,2,440,315000 193 | 7,725,725,1479,1,7,0,2,484,184000 194 | 7,1431,1431,1431,2,6,0,2,666,192000 195 | 7,970,970,1709,2,7,0,2,380,130000 196 | 5,864,864,864,1,5,0,1,352,127000 197 | 6,855,855,1456,2,7,1,2,440,148500 198 | 7,1726,1726,1726,2,8,1,3,786,311872 199 | 8,1360,1360,3112,2,8,1,2,795,235000 200 | 6,755,929,2229,1,8,0,0,0,104000 201 | 8,1713,1713,1713,2,7,1,3,856,274900 202 | 4,1121,1121,1121,2,5,0,2,440,140000 203 | 6,1196,1279,1279,2,6,2,2,473,171500 204 | 6,617,865,1310,2,6,0,1,398,112000 205 | 6,848,848,848,1,3,1,2,420,149000 206 | 5,720,720,1284,1,5,0,1,240,110000 207 | 7,1424,1442,1442,2,5,0,2,500,180500 208 | 5,1140,1696,1696,1,8,1,1,349,143900 209 | 4,1100,1100,1100,1,6,1,1,312,141000 210 | 7,1157,1180,2062,2,7,1,2,454,277000 211 | 6,1092,1092,1092,1,6,0,2,504,145000 212 | 5,864,864,864,1,5,0,0,0,98000 213 | 6,1212,1212,1212,2,6,0,2,460,186000 214 | 7,900,932,1852,2,7,1,2,644,252678 215 | 5,990,990,990,1,5,0,2,576,156000 216 | 6,689,689,1392,1,6,0,1,299,161750 217 | 5,1070,1236,1236,1,6,1,1,447,134450 218 | 7,1436,1436,1436,2,8,0,2,484,210000 219 | 4,686,810,1328,1,8,0,1,210,107000 220 | 7,798,1137,1954,1,8,2,2,431,311500 221 | 7,1248,1248,1248,2,5,0,2,438,167240 222 | 7,1498,1498,1498,2,5,0,2,675,204900 223 | 6,1010,1010,2267,2,8,1,2,390,200000 224 | 6,713,811,1552,2,6,1,2,434,179900 225 | 4,864,864,864,1,4,0,2,576,97000 226 | 10,2392,2392,2392,2,8,1,3,968,386250 227 | 5,630,630,1302,2,6,0,1,280,112000 228 | 7,1203,1214,2520,2,9,1,3,721,290000 229 | 6,483,483,987,1,5,0,1,280,106000 230 | 5,912,912,912,1,5,1,1,336,125000 231 | 7,1373,1555,1555,2,7,1,2,430,192500 232 | 6,1194,1194,1194,1,6,0,1,312,148000 233 | 8,1462,1490,2794,2,9,1,3,810,403000 234 | 6,483,483,987,1,5,1,1,288,94500 235 | 5,894,894,894,1,5,0,1,308,128200 236 | 6,860,860,1960,2,8,2,2,440,216500 237 | 6,483,483,987,1,5,0,1,264,89500 238 | 7,1414,1414,1414,2,6,0,2,494,185500 239 | 7,996,1014,1744,2,7,0,2,457,194500 240 | 8,1694,1694,1694,2,7,0,3,818,318000 241 | 6,735,798,1487,1,7,1,1,220,113000 242 | 8,1566,1566,1566,2,7,0,2,750,262500 243 | 5,686,866,866,1,4,0,0,0,110500 244 | 5,540,889,1440,1,6,0,1,352,79000 245 | 6,626,626,1217,1,6,1,1,288,120000 246 | 7,948,1222,2110,2,8,2,2,463,205000 247 | 7,1845,1872,1872,2,6,1,2,604,241500 248 | 6,1020,908,1928,2,9,0,1,440,137000 249 | 6,1367,1375,1375,1,5,1,2,451,140000 250 | 7,840,840,1668,2,8,0,2,500,180000 251 | 6,1444,1444,2144,2,7,2,2,389,277000 252 | 3,728,1306,1306,1,6,0,0,0,76500 253 | 8,1573,1625,1625,2,5,1,2,538,235000 254 | 6,798,798,1640,2,6,0,2,520,173000 255 | 6,1302,1302,1302,2,7,0,1,309,158000 256 | 5,1314,1314,1314,1,5,0,1,294,145000 257 | 7,975,1005,2291,2,8,1,2,429,230000 258 | 6,864,864,1728,2,7,0,2,673,207500 259 | 7,1604,1604,1604,2,8,1,2,660,220000 260 | 7,963,963,1792,2,7,1,2,564,231500 261 | 5,0,882,882,1,4,0,1,308,97000 262 | 6,1362,1382,1382,1,6,1,2,884,176000 263 | 8,1482,1482,2574,2,10,1,3,868,276000 264 | 6,506,1212,1212,1,6,1,2,492,151000 265 | 5,926,926,1316,1,6,0,2,484,130000 266 | 5,680,764,764,1,4,0,2,504,73000 267 | 6,1422,1422,1422,2,7,1,2,576,175500 268 | 6,802,802,1511,2,8,1,2,413,185000 269 | 5,720,1052,2192,2,8,1,1,240,179500 270 | 5,740,778,778,1,4,1,1,924,120500 271 | 6,1143,1113,1113,1,6,1,1,504,148000 272 | 8,1095,1095,1939,2,8,1,3,1053,266000 273 | 7,1385,1363,1363,1,5,2,2,439,241500 274 | 8,1152,1164,2270,2,9,1,3,671,290000 275 | 6,1240,1632,1632,2,6,1,1,338,139000 276 | 5,816,816,816,1,5,0,1,264,124500 277 | 7,952,952,1548,2,5,0,2,672,205000 278 | 7,1560,1560,1560,2,7,0,2,573,201000 279 | 4,864,864,864,1,4,0,2,400,141000 280 | 9,2121,2121,2121,2,8,1,3,732,415298 281 | 7,1160,1156,2022,2,8,1,2,505,192000 282 | 7,807,1175,1982,2,7,1,2,575,228500 283 | 6,1262,1262,1262,2,5,0,2,572,185000 284 | 7,1314,1314,1314,2,6,1,2,626,207500 285 | 8,1468,1468,1468,2,6,1,3,898,244600 286 | 6,1575,1575,1575,2,5,0,2,529,179200 287 | 7,625,625,1250,2,5,0,2,528,164700 288 | 6,912,1085,1734,1,7,1,2,440,159000 289 | 4,858,858,858,1,5,0,0,0,88000 290 | 5,882,900,900,1,5,0,1,280,122000 291 | 6,698,698,1396,1,7,0,1,384,153575 292 | 8,1079,1079,1919,2,8,1,2,685,233230 293 | 5,780,936,1716,2,9,0,0,0,135900 294 | 5,768,1148,1716,1,8,1,1,281,131000 295 | 7,795,1468,2263,2,9,1,2,539,235000 296 | 6,1416,1644,1644,1,7,2,2,418,167000 297 | 6,1003,1003,1003,1,6,0,2,588,142500 298 | 5,910,910,1558,1,6,0,1,282,152000 299 | 7,975,975,1950,2,7,1,2,576,239000 300 | 6,702,1041,1743,1,7,1,2,539,175000 301 | 6,1092,1152,1152,1,6,1,1,300,158500 302 | 5,1165,1336,1336,1,5,2,1,375,157000 303 | 8,1028,1210,2452,2,9,1,2,683,267000 304 | 7,1541,1541,1541,2,6,1,3,843,205000 305 | 5,894,894,894,1,5,0,2,552,149900 306 | 7,1470,1675,3493,3,10,1,3,870,295000 307 | 8,2000,2000,2000,2,8,0,3,888,305900 308 | 7,700,1122,2243,2,8,1,3,746,225000 309 | 6,319,1035,1406,1,6,0,0,0,89500 310 | 4,861,861,861,1,4,0,2,539,82500 311 | 9,1896,1944,1944,2,8,3,3,708,360000 312 | 6,697,697,1501,2,6,1,2,420,165600 313 | 6,972,972,972,1,5,1,1,240,132000 314 | 5,793,793,1118,1,5,1,2,410,119900 315 | 7,2136,2036,2036,2,8,2,2,513,375000 316 | 7,728,832,1641,1,6,1,2,546,178000 317 | 7,716,716,1432,2,6,1,2,432,188500 318 | 7,845,1153,2353,2,10,1,2,484,260000 319 | 8,1088,1088,1959,2,8,1,3,1025,270000 320 | 7,1347,1372,2646,2,9,1,3,656,260000 321 | 7,1372,1472,1472,2,6,2,2,588,187500 322 | 9,1249,1249,2596,3,9,0,3,840,342643 323 | 8,1136,1136,2468,2,10,1,3,872,354000 324 | 7,1502,1553,2730,2,8,1,2,576,301000 325 | 3,1162,1163,1163,1,6,0,1,220,126175 326 | 7,710,1898,2978,2,11,1,2,564,242000 327 | 5,720,803,803,1,5,0,2,360,87000 328 | 8,1719,1719,1719,1,6,2,2,473,324000 329 | 6,1383,1383,1383,1,7,0,1,292,145250 330 | 6,844,1445,2134,2,10,0,2,441,214500 331 | 5,596,596,1192,1,6,0,1,189,78000 332 | 5,1728,1728,1728,2,10,0,1,352,119000 333 | 5,1056,1056,1056,1,6,0,1,308,139000 334 | 8,3206,1629,1629,2,7,1,3,880,284000 335 | 7,1358,1358,1358,2,6,1,2,484,207000 336 | 6,943,943,1638,2,7,2,2,472,192000 337 | 5,1499,1619,1786,2,7,2,2,529,228950 338 | 9,1922,1922,1922,2,8,1,3,676,377426 339 | 7,1536,1536,1536,2,7,0,2,532,214000 340 | 7,1208,1621,1621,2,8,0,2,440,202500 341 | 6,1215,1215,1215,1,6,0,1,297,155000 342 | 8,967,993,1908,2,9,0,2,431,202900 343 | 4,721,841,841,1,4,0,1,294,82000 344 | 3,0,1040,1040,2,6,0,2,400,87500 345 | 9,1684,1684,1684,2,6,1,2,564,266000 346 | 5,536,536,1112,1,4,0,1,336,85000 347 | 6,972,972,1577,1,6,1,1,312,140200 348 | 6,958,958,958,1,5,0,1,301,151500 349 | 6,1478,1478,1478,2,6,2,2,498,157500 350 | 7,764,764,1626,2,6,0,2,474,154000 351 | 9,1848,1848,2728,2,10,2,3,706,437154 352 | 9,1869,1869,1869,2,6,1,2,617,318061 353 | 7,1453,1453,1453,1,6,1,2,445,190000 354 | 5,616,616,1111,1,5,0,1,200,95000 355 | 6,624,720,720,1,5,0,2,484,105900 356 | 6,940,1192,1595,1,6,2,1,240,140000 357 | 6,1200,1200,1200,2,6,0,2,521,177500 358 | 6,1158,1167,1167,2,6,0,2,400,173000 359 | 5,1142,1142,1142,1,6,1,2,528,134000 360 | 5,1062,1352,1352,1,6,0,1,288,130000 361 | 8,1086,1086,1924,2,7,1,2,592,280000 362 | 6,888,912,912,1,5,1,2,470,156000 363 | 5,883,988,1505,1,8,0,1,240,145000 364 | 7,0,495,1922,3,7,1,2,672,198500 365 | 6,483,483,987,1,5,0,1,264,118000 366 | 6,796,790,1574,2,6,1,2,566,190000 367 | 5,672,672,1344,1,6,0,1,468,147000 368 | 6,1394,1394,1394,1,6,2,2,514,159000 369 | 6,1099,1431,1431,1,6,1,1,296,165000 370 | 5,1268,1268,1268,1,7,1,1,244,132000 371 | 5,1063,1287,1287,1,7,1,2,576,162000 372 | 6,953,953,1664,2,7,1,2,460,172400 373 | 4,0,1120,1588,2,7,1,2,680,134432 374 | 6,744,752,752,1,4,0,1,264,125000 375 | 5,608,1319,1319,1,5,0,1,270,123000 376 | 7,847,847,1928,2,8,1,2,434,219500 377 | 1,683,904,904,0,4,0,0,0,61000 378 | 5,870,914,914,1,5,0,2,576,148000 379 | 8,1580,1580,2466,3,8,1,2,610,340000 380 | 9,1856,1856,1856,1,8,1,3,834,394432 381 | 6,982,1007,1800,2,7,1,2,463,179000 382 | 5,1026,1026,1691,2,6,1,1,308,127000 383 | 7,1293,1301,1301,2,5,1,2,572,187750 384 | 7,939,939,1797,2,8,0,2,639,213500 385 | 6,784,784,784,1,5,0,2,360,76000 386 | 6,1580,1079,1953,2,9,2,2,501,240000 387 | 8,1256,1269,1269,2,6,1,2,430,192000 388 | 5,658,658,1184,1,8,0,0,0,81000 389 | 6,1041,1125,1125,1,6,1,1,352,125000 390 | 7,1468,1479,1479,2,6,0,2,577,191000 391 | 10,1682,1742,2332,2,9,1,3,846,426000 392 | 5,861,961,1367,1,7,0,1,384,119000 393 | 6,804,804,1961,2,7,1,2,560,215000 394 | 5,0,882,882,1,5,0,1,294,106500 395 | 4,788,788,788,1,4,2,0,0,100000 396 | 5,735,735,1034,1,5,0,1,240,109000 397 | 5,1144,1144,1144,1,6,0,1,596,129000 398 | 5,894,894,894,1,6,0,2,600,123000 399 | 5,864,876,1812,2,8,1,1,264,169500 400 | 5,961,1077,1077,1,6,0,1,338,67000 401 | 7,1092,1112,1550,2,7,0,2,438,241000 402 | 8,1260,1288,1288,1,4,2,2,500,245500 403 | 7,1310,1310,1310,2,6,1,2,400,164990 404 | 5,672,672,672,1,4,0,1,240,108000 405 | 8,1141,1165,2263,2,10,1,2,420,258000 406 | 6,806,806,1572,2,7,1,2,373,168000 407 | 4,1281,1620,1620,2,8,1,2,490,150000 408 | 6,1064,1166,1639,1,6,0,1,240,115000 409 | 6,840,840,1680,2,8,0,1,308,177000 410 | 7,1063,1071,2172,2,9,1,3,947,280000 411 | 8,1034,1050,2078,2,8,1,3,836,339750 412 | 5,1276,1276,1276,1,5,0,1,350,60000 413 | 5,1056,1056,1056,1,5,0,2,572,145000 414 | 7,1470,1478,1478,2,7,1,2,484,222000 415 | 5,1008,1028,1028,1,5,1,2,360,115000 416 | 7,1080,1080,2097,2,9,1,3,678,228000 417 | 7,1340,1340,1340,2,6,0,2,396,181134 418 | 6,672,672,1400,1,6,1,2,440,149500 419 | 6,1370,1370,2624,2,10,1,3,864,239000 420 | 5,756,756,1134,1,7,0,1,240,126000 421 | 5,1056,1056,1056,1,6,1,1,304,142000 422 | 7,1344,1344,1344,2,8,0,4,784,206300 423 | 6,1602,1602,1602,2,8,1,2,529,215000 424 | 5,988,988,988,1,4,0,2,520,113000 425 | 8,1470,1470,2630,2,8,1,3,696,315000 426 | 6,1196,1196,1196,1,6,1,1,297,139000 427 | 7,651,707,1389,1,6,2,1,240,135000 428 | 7,1518,1644,1644,2,5,1,2,569,275000 429 | 4,907,907,907,1,5,0,1,352,109008 430 | 7,1208,1208,1208,2,6,0,2,628,195400 431 | 6,1392,1412,1412,2,6,1,2,576,175000 432 | 6,483,483,987,1,4,0,1,264,85400 433 | 6,901,1088,1198,1,7,0,0,0,79900 434 | 5,765,765,1365,1,6,0,2,440,122500 435 | 6,926,926,1604,2,7,1,2,470,181000 436 | 4,630,630,630,1,3,0,0,0,81000 437 | 7,799,827,1661,2,6,1,2,550,212000 438 | 6,648,734,1118,1,6,0,2,440,116000 439 | 6,884,904,904,1,4,0,1,180,119000 440 | 5,440,694,694,1,4,1,1,352,90350 441 | 6,684,684,1196,1,7,0,2,528,110000 442 | 10,3094,2402,2402,2,10,2,3,672,555000 443 | 4,1440,1440,1440,2,8,0,0,0,118000 444 | 5,1078,1128,1573,2,8,1,2,360,162900 445 | 7,1258,1258,1258,2,6,1,3,648,172500 446 | 7,915,933,1908,2,8,1,2,493,210000 447 | 6,1436,1689,1689,1,7,1,2,480,127500 448 | 6,1517,1888,1888,2,6,1,2,578,190000 449 | 7,930,956,1886,2,10,1,2,431,199900 450 | 6,780,780,1376,2,7,1,1,198,119500 451 | 3,649,679,1183,1,6,0,1,308,120000 452 | 6,813,813,813,1,5,0,1,270,110000 453 | 7,1533,1533,1533,2,5,2,2,576,280000 454 | 6,872,888,1756,2,7,0,2,422,204000 455 | 8,768,786,1590,2,6,0,2,676,210000 456 | 5,1728,1728,1728,2,8,0,2,560,188000 457 | 7,1242,1242,1242,1,6,1,2,528,175500 458 | 5,624,624,1344,1,7,0,3,513,98000 459 | 4,1364,1663,1663,1,6,2,2,529,256000 460 | 8,588,833,1666,1,7,1,1,228,161000 461 | 5,709,979,1203,1,5,1,1,352,110000 462 | 8,832,832,1935,2,8,0,2,552,263435 463 | 7,560,575,1135,1,6,0,2,576,155000 464 | 5,864,864,864,1,5,1,1,360,62383 465 | 6,715,849,1660,1,6,1,1,240,188700 466 | 5,1040,1040,1040,2,5,0,0,0,124000 467 | 7,1375,1414,1414,2,6,1,2,398,178740 468 | 7,1277,1277,1277,1,5,1,2,526,167000 469 | 5,728,888,1644,1,7,2,1,312,146500 470 | 8,1626,1634,1634,2,7,1,3,866,250000 471 | 6,832,832,1710,2,7,0,2,506,187000 472 | 8,1488,1502,1502,1,4,0,2,528,212000 473 | 7,808,1161,1969,2,8,1,2,534,190000 474 | 6,547,1072,1072,1,5,0,2,525,148000 475 | 8,1976,1976,1976,2,7,1,3,908,440000 476 | 8,1494,1652,1652,2,6,0,2,499,251000 477 | 5,970,970,970,1,5,0,2,624,132500 478 | 6,1478,1493,1493,2,7,1,2,508,208900 479 | 9,2153,2069,2643,2,9,1,3,694,380000 480 | 8,1705,1718,1718,2,7,1,3,826,297000 481 | 4,907,1131,1131,1,7,0,2,672,89471 482 | 9,1833,1850,1850,2,8,1,3,772,326000 483 | 9,1792,1792,1792,2,6,1,3,874,374000 484 | 7,910,916,1826,1,7,1,1,164,155000 485 | 6,1216,1216,1216,2,5,0,2,402,164000 486 | 5,999,999,999,1,6,0,1,264,132500 487 | 5,1113,1113,1113,1,5,1,1,264,147000 488 | 5,1073,1073,1073,1,6,0,2,515,156000 489 | 5,1484,1484,1484,2,7,1,2,487,175000 490 | 5,954,1766,2414,2,10,1,2,520,160000 491 | 4,630,630,630,1,3,0,1,286,86000 492 | 5,264,616,1304,1,4,1,1,336,115000 493 | 6,806,958,1578,1,5,2,1,240,133000 494 | 6,728,728,1456,2,8,0,2,429,172785 495 | 5,1269,1269,1269,1,6,1,1,308,155000 496 | 5,190,886,886,1,4,0,1,273,91300 497 | 4,720,720,720,1,4,0,0,0,34900 498 | 8,3200,3228,3228,3,10,1,2,546,430000 499 | 7,1026,1133,1820,2,8,0,1,240,184000 500 | 5,864,899,899,1,5,0,1,288,130000 501 | 5,912,912,912,1,5,0,1,297,120000 502 | 6,672,672,1218,1,7,0,1,264,113000 503 | 7,866,866,1768,2,7,0,2,603,226700 504 | 5,1214,1214,1214,1,6,0,2,461,140000 505 | 7,1501,1801,1801,2,6,2,2,484,289000 506 | 6,855,855,1322,2,6,1,2,440,147000 507 | 5,960,960,1960,2,10,0,2,400,124500 508 | 8,777,1065,1911,2,8,1,2,471,215000 509 | 6,1218,1218,1218,2,4,0,2,676,208300 510 | 7,689,689,1378,2,7,1,2,360,161000 511 | 5,1041,1041,1041,1,6,0,1,270,124500 512 | 5,1008,1363,1363,1,6,2,1,288,164900 513 | 7,1368,1368,1368,2,6,1,2,474,202665 514 | 5,864,864,864,1,5,0,2,624,129900 515 | 6,1084,1080,1080,1,5,0,2,484,134000 516 | 5,768,789,789,1,5,0,1,200,96500 517 | 10,2006,2020,2020,2,9,1,3,900,402861 518 | 6,689,1378,2119,2,7,1,2,583,158000 519 | 7,1264,1277,2344,2,7,1,3,889,265000 520 | 6,794,882,1796,2,7,0,2,546,211000 521 | 7,1276,1276,2080,1,9,2,1,282,234000 522 | 4,0,694,1294,2,7,0,0,0,106250 523 | 6,1244,1244,1244,1,6,2,1,336,150000 524 | 6,1004,1004,1664,2,7,2,2,420,159000 525 | 10,3138,3138,4676,3,11,1,3,884,184750 526 | 7,1379,1383,2398,2,8,1,3,834,315750 527 | 7,1257,1266,1266,2,6,1,2,453,176000 528 | 5,928,928,928,1,4,0,1,252,132000 529 | 9,1452,1476,2713,2,11,1,3,858,446261 530 | 4,528,605,605,1,5,0,0,0,86000 531 | 6,2035,2515,2515,3,9,2,2,484,200624 532 | 6,1461,1509,1509,2,5,1,2,600,175000 533 | 6,611,751,1362,2,6,0,2,502,128000 534 | 5,0,827,827,1,5,1,1,392,107500 535 | 1,0,334,334,1,2,0,0,0,39300 536 | 8,707,707,1414,2,6,1,2,403,178000 537 | 5,1117,820,1347,1,5,0,0,0,107500 538 | 7,880,880,1724,2,8,0,2,527,188000 539 | 4,864,864,864,1,5,0,2,576,111250 540 | 5,1051,1159,1159,1,7,1,1,336,158000 541 | 8,1581,1601,1601,2,6,1,2,670,272000 542 | 9,1838,1838,1838,2,8,1,3,765,315000 543 | 8,969,997,2285,2,8,1,3,648,248000 544 | 7,1650,1680,1680,2,7,1,2,583,213250 545 | 7,723,767,767,1,4,0,1,367,133000 546 | 7,654,664,1496,2,7,1,2,426,179665 547 | 7,1204,1377,2183,2,9,0,3,786,229000 548 | 6,1065,915,1635,1,6,1,2,440,210000 549 | 5,768,768,768,1,5,0,2,624,129500 550 | 5,825,825,825,1,4,0,2,720,125000 551 | 7,912,912,2094,2,8,1,2,615,263000 552 | 6,1069,1069,1069,2,4,0,2,440,140000 553 | 5,928,928,928,1,5,0,1,288,112500 554 | 8,1709,1717,1717,2,7,1,3,908,255500 555 | 4,0,1126,1126,2,5,0,2,520,108000 556 | 7,998,1006,2046,2,8,1,3,871,284000 557 | 5,993,1048,1048,1,5,1,1,280,113000 558 | 5,1092,1092,1092,1,6,1,1,299,141000 559 | 4,637,897,1336,1,7,0,1,570,108000 560 | 7,729,729,1446,2,6,1,2,406,175000 561 | 7,1374,1557,1557,2,7,1,2,420,234000 562 | 5,1392,1392,1392,1,5,1,2,528,121500 563 | 5,1389,1389,1389,1,6,1,2,418,170000 564 | 5,996,996,996,1,6,1,0,0,108000 565 | 6,1163,1163,1674,2,8,1,2,396,185000 566 | 7,1095,1166,2295,2,9,1,2,590,268000 567 | 6,806,841,1647,1,6,0,1,216,128000 568 | 9,1122,1134,2504,2,11,1,3,656,325000 569 | 7,1517,1535,1535,2,7,0,2,532,214000 570 | 8,1496,1496,2132,1,5,1,2,612,316600 571 | 5,943,943,943,1,4,2,2,600,135960 572 | 5,1728,1728,1728,2,10,0,2,576,142600 573 | 6,864,864,864,1,4,0,1,288,120000 574 | 7,846,846,1692,2,6,0,2,650,224500 575 | 7,384,774,1430,2,8,1,2,400,170000 576 | 5,372,576,1109,1,5,0,1,288,139000 577 | 5,832,832,1216,1,6,0,1,336,118500 578 | 7,861,877,1477,2,6,1,1,216,145000 579 | 5,1164,1320,1320,1,6,2,2,564,164500 580 | 7,689,703,1392,2,5,0,2,540,146000 581 | 5,1050,1050,1795,2,7,0,1,352,131500 582 | 6,1144,1429,1429,1,7,2,2,572,181900 583 | 8,2042,2042,2042,2,8,1,3,1390,253293 584 | 6,816,816,816,1,5,0,0,0,118500 585 | 10,1237,1521,2775,3,9,1,2,880,325000 586 | 4,884,989,1573,1,6,0,1,240,133000 587 | 8,1868,2028,2028,2,7,2,3,880,369900 588 | 6,816,838,838,1,5,0,1,275,130000 589 | 5,840,860,860,1,4,0,2,528,137000 590 | 5,1437,1473,1473,1,5,2,1,452,143000 591 | 5,742,779,935,1,4,0,1,308,79500 592 | 7,770,770,1582,2,6,0,2,520,185900 593 | 10,1722,1728,2296,2,10,1,3,842,451950 594 | 5,816,816,816,1,4,0,2,816,138000 595 | 6,848,848,848,1,4,0,2,420,140000 596 | 5,924,924,924,1,5,0,1,280,110000 597 | 8,1814,1826,1826,2,7,1,3,758,319000 598 | 6,684,684,1368,1,7,0,1,216,114504 599 | 7,1258,1402,1402,0,7,1,3,648,194201 600 | 5,1430,1647,1647,2,7,1,2,621,208500 601 | 6,716,716,1556,2,6,1,2,452,208501 602 | 8,1058,1058,1904,2,8,1,2,736,208502 603 | 6,780,780,1375,1,6,1,1,544,208503 604 | 8,908,927,1915,2,8,1,2,506,208504 605 | 7,600,600,1200,2,4,0,2,480,208505 606 | 7,1494,1494,1494,2,6,1,2,530,208506 607 | 7,768,1186,1986,2,7,3,2,486,208507 608 | 5,1040,1040,1040,1,6,0,2,576,208508 609 | 5,896,1112,2008,3,8,0,1,230,208509 610 | 8,965,1940,3194,2,10,2,2,380,208510 611 | 4,1029,1029,1029,1,5,0,1,261,208511 612 | 9,1440,1476,2153,2,8,2,3,736,208512 613 | 6,1032,1032,1032,2,6,1,2,564,208513 614 | 8,1299,1299,1872,2,7,1,2,531,208514 615 | 5,1120,1120,1120,1,6,0,0,0,208515 616 | 4,630,630,630,1,3,0,0,0,208516 617 | 6,936,1054,1054,1,6,0,2,480,208517 618 | 6,783,807,1509,2,7,1,2,393,208518 619 | 6,832,832,832,1,4,0,2,528,208519 620 | 9,1822,1828,1828,2,9,1,3,774,208520 621 | 8,1482,1482,2262,2,10,2,3,749,208521 622 | 3,864,864,864,1,5,0,0,0,208522 623 | 6,1522,1548,2614,2,9,1,2,624,208523 624 | 5,980,980,980,1,6,0,2,484,208524 625 | 6,756,756,1512,2,4,1,2,440,208525 626 | 6,732,1012,1790,1,8,1,2,484,208526 627 | 6,1116,1116,1116,1,5,0,2,440,208527 628 | 5,978,1422,1422,1,6,1,1,286,208528 629 | 6,1156,1520,1520,1,7,2,1,364,208529 630 | 5,1040,1040,2080,1,9,2,2,504,208530 631 | 6,1248,1350,1350,1,6,0,2,520,208531 632 | 5,636,1089,1750,1,8,0,1,240,208532 633 | 8,1554,1554,1554,2,6,1,2,627,208533 634 | 7,1386,1411,1411,2,6,1,2,544,208534 635 | 5,1056,1056,1056,1,6,0,1,260,208535 636 | 6,1056,1056,1056,0,4,0,2,576,208536 637 | 6,1440,1440,3395,2,14,0,0,0,208537 638 | 2,264,800,800,1,4,1,0,0,208538 639 | 5,811,811,1387,2,7,0,1,256,208539 640 | 5,796,796,796,1,4,0,0,0,208540 641 | 8,1520,1567,1567,2,7,1,3,648,208541 642 | 8,1518,1518,1518,1,6,1,2,588,208542 643 | 7,1057,1057,1929,2,7,1,2,650,208543 644 | 8,1952,2000,2704,2,9,3,2,538,208544 645 | 5,780,780,1620,2,7,0,2,462,208545 646 | 9,1766,1766,1766,2,7,1,3,478,208546 647 | 6,981,981,981,1,5,0,2,576,208547 648 | 5,0,1048,1048,1,7,0,2,420,208548 649 | 6,1094,1094,1094,1,5,2,2,495,208549 650 | 6,756,1051,1839,1,7,1,2,442,208550 651 | 4,630,630,630,1,3,0,0,0,208551 652 | 7,813,822,1665,2,7,0,2,562,208552 653 | 4,755,755,1510,1,7,1,1,296,208553 654 | 7,880,909,1716,2,7,1,2,512,208554 655 | 6,756,756,1469,1,7,0,1,216,208555 656 | 8,2109,2113,2113,2,7,1,3,839,208556 657 | 6,525,525,1092,1,6,0,1,264,208557 658 | 5,1053,1053,1053,1,5,0,1,312,208558 659 | 7,776,851,1502,1,6,1,1,270,208559 660 | 6,912,912,1458,1,6,1,1,330,208560 661 | 5,1486,1486,1486,1,7,0,2,480,208561 662 | 7,793,1142,1935,2,7,1,2,550,208562 663 | 8,1629,1686,2448,2,8,1,3,711,208563 664 | 6,1392,1392,1392,1,5,2,2,576,208564 665 | 4,1138,1181,1181,2,6,0,2,588,208565 666 | 8,2077,2097,2097,1,8,1,3,1134,208566 667 | 8,1406,1454,1936,2,7,1,2,504,208567 668 | 6,1021,1465,2380,2,7,1,2,596,208568 669 | 6,1408,1679,1679,2,7,1,2,575,208569 670 | 5,1188,1437,1437,1,6,1,2,576,208570 671 | 4,700,1180,1180,1,5,1,1,252,208571 672 | 6,738,738,1476,2,7,0,2,540,208572 673 | 6,672,697,1369,2,6,0,1,300,208573 674 | 6,1208,1208,1208,1,6,1,2,546,208574 675 | 6,1477,1839,1839,2,7,2,2,416,208575 676 | 6,1136,1136,1136,1,5,1,1,384,208576 677 | 6,855,855,1441,2,7,1,2,440,208577 678 | 4,1095,1095,1774,2,8,0,3,779,208578 679 | 5,768,792,792,1,5,0,1,240,208579 680 | 8,2046,2046,2046,2,7,1,3,834,208580 681 | 5,988,988,988,1,5,0,2,572,208581 682 | 6,923,923,923,2,5,1,1,264,208582 683 | 5,793,848,1520,1,6,0,1,281,208583 684 | 6,1291,1291,1291,1,6,1,2,431,208584 685 | 9,1626,1668,1668,2,7,1,3,702,208585 686 | 7,1195,1195,1839,2,7,0,2,486,208586 687 | 7,1190,1190,2090,2,6,1,2,577,208587 688 | 7,874,874,1761,3,7,0,2,578,208588 689 | 7,551,551,1102,2,4,0,2,480,208589 690 | 8,1419,1419,1419,2,7,1,2,567,208590 691 | 6,1362,1362,1362,2,6,1,2,460,208591 692 | 6,848,848,848,1,3,1,2,420,208592 693 | 10,2444,2444,4316,3,10,2,3,832,208593 694 | 7,1210,1238,2519,2,9,2,2,628,208594 695 | 5,1073,1073,1073,1,4,0,1,326,208595 696 | 5,927,1067,1539,1,5,0,2,576,208596 697 | 6,1112,1137,1137,2,5,1,2,551,208597 698 | 5,616,616,616,1,4,0,1,205,208598 699 | 5,980,1148,1148,1,6,0,1,308,208599 700 | 5,894,894,894,1,5,1,1,336,208600 701 | 7,1391,1391,1391,2,5,0,2,530,208601 702 | 8,1800,1800,1800,2,7,1,3,765,208602 703 | 7,1164,1164,1164,1,6,0,2,528,208603 704 | 8,1234,1264,2576,2,10,1,3,666,208604 705 | 5,360,1032,1812,2,8,1,2,672,208605 706 | 7,1473,1484,1484,2,7,0,2,606,208606 707 | 4,0,372,1092,2,7,0,0,0,208607 708 | 7,1643,1824,1824,2,5,2,2,739,208608 709 | 8,1324,1324,1324,2,6,1,2,550,208609 710 | 7,728,728,1456,2,8,1,2,400,208610 711 | 5,876,904,904,1,6,0,1,408,208611 712 | 3,270,729,729,1,5,0,0,0,208612 713 | 4,859,859,1178,1,7,0,1,384,208613 714 | 8,1228,1228,1228,2,5,1,2,472,208614 715 | 4,960,960,960,1,6,0,2,576,208615 716 | 6,725,725,1479,2,6,0,2,475,208616 717 | 6,1064,1350,1350,2,7,1,2,478,208617 718 | 7,718,1576,2554,1,8,0,2,704,208618 719 | 5,1176,1178,1178,1,5,1,2,439,208619 720 | 7,1311,1325,2418,2,9,1,3,983,208620 721 | 5,971,971,971,1,5,1,1,300,208621 722 | 8,1742,1742,1742,2,5,1,2,564,208622 723 | 6,848,848,848,1,3,0,2,420,208623 724 | 4,864,864,864,1,5,0,2,463,208624 725 | 4,941,997,1470,2,7,0,1,548,208625 726 | 9,1698,1698,1698,2,8,1,3,768,208626 727 | 4,864,864,864,1,5,0,2,660,208627 728 | 6,880,1680,1680,2,5,1,2,540,208628 729 | 7,1232,1232,1232,2,6,0,2,632,208629 730 | 5,1584,1776,1776,2,9,0,3,888,208630 731 | 4,780,848,1208,1,5,0,2,539,208631 732 | 8,1595,1616,1616,2,5,1,2,608,208632 733 | 7,868,1146,1146,2,6,1,2,438,208633 734 | 7,1153,1153,2031,2,8,1,2,541,208634 735 | 5,864,1144,1144,1,6,1,1,264,208635 736 | 5,948,948,948,1,6,0,1,300,208636 737 | 7,880,880,1768,1,6,2,2,320,208637 738 | 3,0,1040,1040,2,6,0,2,400,208638 739 | 8,893,901,1801,2,8,1,3,800,208639 740 | 5,1200,1200,1200,3,5,0,0,0,208640 741 | 7,864,864,1728,2,7,0,2,572,208641 742 | 5,264,768,1432,2,7,0,2,360,208642 743 | 6,912,912,912,1,5,0,1,288,208643 744 | 7,1349,1349,1349,2,6,0,2,539,208644 745 | 5,520,1464,1464,2,6,1,2,480,208645 746 | 8,1337,1337,1337,2,5,1,2,462,208646 747 | 8,1142,1175,2715,3,11,2,2,831,208647 748 | 7,952,980,2256,2,8,1,2,554,208648 749 | 7,1240,1320,2640,1,8,1,4,864,208649 750 | 7,1720,1720,1720,2,7,1,2,527,208650 751 | 4,0,1088,1529,2,9,0,1,240,208651 752 | 4,576,792,1140,1,7,0,0,0,208652 753 | 7,660,660,1320,2,6,0,2,400,208653 754 | 6,1479,1494,1494,2,6,0,2,576,208654 755 | 8,1030,1038,2098,2,8,1,3,878,208655 756 | 6,1026,1026,1026,1,5,0,2,440,208656 757 | 6,729,742,1471,2,6,0,2,440,208657 758 | 8,866,866,1768,2,7,0,2,578,208658 759 | 6,672,672,1386,2,6,1,2,440,208659 760 | 7,744,757,1501,2,6,0,2,440,208660 761 | 8,1318,1328,2531,2,9,1,3,752,208661 762 | 6,864,864,864,1,5,0,1,300,208662 763 | 5,1145,1301,1301,1,5,0,2,440,208663 764 | 7,756,764,1547,2,7,0,2,614,208664 765 | 8,1252,1268,2365,2,8,1,3,856,208665 766 | 8,1494,1494,1494,1,6,1,2,481,208666 767 | 9,1498,1506,1506,2,6,1,2,592,208667 768 | 7,980,980,1714,2,7,1,2,496,208668 769 | 6,983,983,1750,2,7,0,1,423,208669 770 | 7,1860,1836,1836,2,8,1,2,484,208670 771 | 8,1650,1690,3279,3,12,1,3,841,208671 772 | 5,858,858,858,1,5,0,2,576,208672 773 | 4,836,1220,1220,1,6,0,2,396,208673 774 | 6,1029,1117,1117,1,6,1,2,672,208674 775 | 5,912,912,912,1,5,0,1,275,208675 776 | 8,1935,1973,1973,2,9,1,3,895,208676 777 | 6,1204,1204,1204,2,5,0,2,412,208677 778 | 7,1614,1614,1614,2,7,0,3,865,208678 779 | 5,864,894,894,1,5,1,2,440,208679 780 | 5,0,2020,2020,2,10,2,2,630,208680 781 | 6,975,1004,1004,1,4,0,2,504,208681 782 | 7,1237,1253,1253,2,6,1,2,402,208682 783 | 6,761,810,1603,2,7,0,2,484,208683 784 | 7,1413,1430,1430,2,6,0,2,605,208684 785 | 5,1097,1110,1110,1,4,1,2,602,208685 786 | 6,742,742,1484,2,9,1,0,0,208686 787 | 6,1372,1342,1342,2,7,1,2,457,208687 788 | 5,686,966,1652,2,7,0,1,416,208688 789 | 7,956,956,2084,2,8,0,2,618,208689 790 | 4,901,901,901,1,4,0,1,281,208690 791 | 6,832,976,2087,2,9,0,2,444,208691 792 | 7,1145,1145,1145,2,5,1,2,397,208692 793 | 6,1029,1062,1062,1,5,2,2,539,208693 794 | 7,1117,1127,2013,2,8,1,2,455,208694 795 | 8,1496,1496,1496,2,7,0,2,474,208695 796 | 7,712,1086,1895,2,7,1,2,409,208696 797 | 6,650,888,1564,2,7,1,2,476,208697 798 | 6,660,1285,1285,1,7,1,2,528,208698 799 | 5,773,773,773,1,4,0,1,240,208699 800 | 9,1926,1966,3140,3,11,2,3,820,208700 801 | 5,731,981,1768,1,7,2,1,240,208701 802 | 6,616,616,1688,2,8,0,2,603,208702 803 | 4,1196,1196,1196,1,5,0,2,440,208703 804 | 7,728,728,1456,2,7,1,2,410,208704 805 | 9,1734,1734,2822,3,12,1,3,1020,208705 806 | 5,936,1128,1128,1,5,0,1,286,208706 807 | 7,1417,1428,1428,2,6,0,2,554,208707 808 | 5,980,980,980,2,6,0,1,384,208708 809 | 5,1324,1072,1576,1,6,1,2,528,208709 810 | 5,1024,1086,1086,1,6,1,2,484,208710 811 | 5,849,1075,2138,2,11,0,2,360,208711 812 | 6,1040,1309,1309,1,5,1,2,484,208712 813 | 6,848,848,848,1,4,1,2,420,208713 814 | 5,540,1044,1044,1,4,0,2,504,208714 815 | 6,1442,1442,1442,1,7,0,1,301,208715 816 | 5,686,686,1250,1,7,0,1,280,208716 817 | 7,1649,1661,1661,2,6,0,2,598,208717 818 | 5,1008,1008,1008,1,4,1,1,275,208718 819 | 8,1568,1689,1689,2,7,2,3,857,208719 820 | 6,1010,1052,1052,1,6,0,2,440,208720 821 | 7,1358,1358,1358,2,6,1,2,484,208721 822 | 7,798,798,1640,2,6,0,2,595,208722 823 | 4,936,936,936,1,4,0,2,576,208723 824 | 7,847,847,1733,2,7,1,2,433,208724 825 | 6,778,944,1489,2,7,1,1,240,208725 826 | 8,1489,1489,1489,2,7,1,2,776,208726 827 | 10,2078,2084,2084,2,7,1,3,1220,208727 828 | 5,784,784,784,1,5,0,0,0,208728 829 | 7,1454,1434,1434,2,6,1,2,527,208729 830 | 5,1013,1160,2126,2,7,0,2,538,208730 831 | 7,600,520,1223,2,4,0,2,480,208731 832 | 6,1392,1392,1392,1,6,2,2,458,208732 833 | 7,600,520,1200,2,4,0,2,480,208733 834 | 7,941,941,1829,2,7,1,2,613,208734 835 | 6,1516,1516,1516,1,6,0,2,472,208735 836 | 5,1144,1144,1144,1,6,0,2,456,208736 837 | 4,1067,1067,1067,2,4,0,2,436,208737 838 | 5,1559,1559,1559,1,5,0,2,812,208738 839 | 6,483,483,987,1,5,0,1,264,208739 840 | 5,1099,1099,1099,1,6,0,1,352,208740 841 | 5,768,768,1200,1,6,0,1,240,208741 842 | 6,672,810,1482,2,7,0,1,400,208742 843 | 5,650,958,1539,2,8,1,2,686,208743 844 | 6,1127,1165,1165,1,6,0,2,490,208744 845 | 5,1800,1800,1800,2,10,0,0,0,208745 846 | 5,876,876,1416,1,7,1,3,720,208746 847 | 5,1390,1701,1701,2,6,2,2,611,208747 848 | 7,740,1006,1775,2,7,1,2,425,208748 849 | 5,864,864,864,1,5,1,1,338,208749 850 | 6,907,1307,2358,3,10,1,2,360,208750 851 | 6,528,1094,1855,2,7,1,2,512,208751 852 | 6,848,848,848,1,3,0,2,420,208752 853 | 8,1273,1456,1456,2,7,1,2,400,208753 854 | 7,918,918,1646,2,7,2,1,240,208754 855 | 6,1127,1445,1445,1,7,1,2,645,208755 856 | 5,1763,1779,1779,1,6,1,2,454,208756 857 | 5,1040,1040,1040,1,6,0,1,260,208757 858 | 6,940,1026,1026,1,5,0,2,576,208758 859 | 6,702,702,1481,2,6,1,2,343,208759 860 | 7,1090,1370,1370,2,6,1,2,479,208760 861 | 6,1054,1512,2654,2,9,1,2,619,208761 862 | 7,912,912,1426,1,7,1,1,216,208762 863 | 5,1039,1039,1039,1,6,0,2,504,208763 864 | 6,1040,1097,1097,2,6,0,2,480,208764 865 | 5,1148,1148,1148,1,6,0,1,672,208765 866 | 7,1372,1372,1372,2,6,0,2,529,208766 867 | 5,1002,1002,1002,1,5,0,2,902,208767 868 | 8,1638,1646,1646,2,6,1,3,870,208768 869 | 4,1040,1120,1120,1,5,0,2,544,208769 870 | 5,0,1547,2320,2,7,1,2,672,208770 871 | 7,1050,1062,1949,2,8,1,2,574,208771 872 | 5,894,894,894,1,5,0,1,308,208772 873 | 6,804,804,1682,2,7,0,2,523,208773 874 | 5,105,910,910,1,5,0,2,414,208774 875 | 5,832,1036,1268,1,6,1,1,288,208775 876 | 5,676,676,1131,1,5,0,1,200,208776 877 | 8,1184,1184,2610,2,11,1,2,550,208777 878 | 4,1064,1040,1040,1,5,0,2,648,208778 879 | 9,1462,1462,2224,2,10,1,3,738,208779 880 | 5,1109,1155,1155,1,6,0,2,576,208780 881 | 5,864,864,864,1,6,0,1,336,208781 882 | 5,1090,1090,1090,1,5,0,2,450,208782 883 | 7,1156,1187,1717,2,7,1,2,400,208783 884 | 6,808,808,1593,2,7,1,2,389,208784 885 | 4,795,954,2230,1,10,0,1,440,208785 886 | 5,892,892,892,1,5,0,1,288,208786 887 | 9,1698,1709,1709,2,5,1,2,506,208787 888 | 5,1626,1712,1712,2,8,0,2,588,208788 889 | 5,816,872,1393,1,8,0,1,300,208789 890 | 8,2217,2217,2217,2,8,1,2,621,208790 891 | 6,1505,1505,1505,1,6,1,2,505,208791 892 | 5,672,672,924,1,6,1,2,576,208792 893 | 6,918,918,1683,2,7,1,2,440,208793 894 | 6,1059,1068,1068,1,6,0,1,264,208794 895 | 5,1383,1383,1383,1,6,1,1,354,208795 896 | 5,0,1535,1535,2,8,0,2,400,208796 897 | 6,780,983,1796,1,8,1,2,483,208797 898 | 4,951,951,951,1,6,0,1,327,208798 899 | 5,0,1120,2240,2,12,0,2,528,208799 900 | 9,2330,2364,2364,2,11,2,3,820,208800 901 | 5,912,1236,1236,1,6,1,1,288,208801 902 | 4,858,858,858,1,4,0,1,684,208802 903 | 5,992,1306,1306,1,5,0,1,756,208803 904 | 7,783,807,1509,2,8,1,2,393,208804 905 | 7,1670,1670,1670,2,7,1,3,690,208805 906 | 5,876,902,902,1,6,0,1,288,208806 907 | 5,1056,1063,1063,1,6,0,1,280,208807 908 | 8,1623,1636,1636,2,8,1,3,865,208808 909 | 7,1017,1020,2057,1,6,1,1,180,208809 910 | 5,864,902,902,1,5,0,2,484,208810 911 | 6,742,742,1484,2,8,1,2,390,208811 912 | 5,1105,1105,2274,2,12,0,2,480,208812 913 | 5,1268,1268,1268,1,5,0,1,252,208813 914 | 5,768,1015,1015,1,6,0,1,450,208814 915 | 5,1001,1001,2002,2,8,0,3,871,208815 916 | 6,612,612,1224,2,4,0,2,528,208816 917 | 4,546,546,1092,1,6,0,1,286,208817 918 | 2,480,480,480,0,4,0,1,308,208818 919 | 4,1134,1229,1229,1,6,0,1,284,208819 920 | 7,1104,912,2127,2,8,1,3,833,208820 921 | 6,1184,1414,1414,1,6,1,2,601,208821 922 | 6,928,936,1721,2,7,0,2,471,208822 923 | 5,1272,1272,2200,2,9,0,0,0,208823 924 | 6,1316,1316,1316,2,6,1,2,397,208824 925 | 6,1604,1617,1617,2,5,1,2,533,208825 926 | 6,1686,1686,1686,2,7,1,2,612,208826 927 | 5,1126,1126,1126,2,6,0,2,540,208827 928 | 8,1181,1234,2374,2,10,1,3,656,208828 929 | 7,832,1098,1978,2,9,1,2,486,208829 930 | 8,1753,1788,1788,2,7,1,2,522,208830 931 | 7,964,993,2236,2,8,1,2,642,208831 932 | 8,1466,1466,1466,2,7,0,3,610,208832 933 | 5,925,925,925,1,5,0,1,429,208833 934 | 9,1905,1905,1905,2,8,1,3,788,208834 935 | 7,1500,1500,1500,2,6,0,2,570,208835 936 | 7,585,2069,2069,2,9,1,2,505,208836 937 | 4,600,747,747,1,5,0,2,528,208837 938 | 7,1176,1200,1200,2,5,0,2,555,208838 939 | 7,1113,1113,1971,2,8,1,2,689,208839 940 | 7,1391,1391,1962,2,7,0,3,868,208840 941 | 7,1032,1207,2403,2,10,2,1,349,208841 942 | 6,1728,1728,1728,2,8,0,2,574,208842 943 | 7,992,1022,2060,2,8,1,2,390,208843 944 | 4,1440,1440,1440,2,8,0,0,0,208844 945 | 5,1632,1632,1632,2,8,0,2,576,208845 946 | 6,819,1344,1344,1,7,1,2,525,208846 947 | 5,1088,1188,1869,1,7,0,2,456,208847 948 | 5,1144,1144,1144,1,6,1,1,796,208848 949 | 8,1616,1629,1629,2,9,1,3,808,208849 950 | 7,936,936,1776,2,7,1,2,474,208850 951 | 6,1161,1381,1381,1,5,1,2,676,208851 952 | 5,864,864,864,1,5,0,2,720,208852 953 | 5,828,965,965,1,6,0,1,300,208853 954 | 5,768,768,768,1,5,0,1,396,208854 955 | 5,784,1168,1968,2,7,1,2,530,208855 956 | 6,945,980,980,2,4,0,0,0,208856 957 | 6,979,979,1958,2,8,0,2,492,208857 958 | 6,561,561,1229,1,5,1,2,462,208858 959 | 5,1057,1057,1057,1,6,0,2,576,208859 960 | 7,1337,1337,1337,2,6,0,2,531,208860 961 | 7,696,696,1416,2,6,0,2,484,208861 962 | 5,858,858,858,1,4,0,0,0,208862 963 | 6,1330,1542,2872,2,11,1,2,619,208863 964 | 6,804,804,1548,2,7,1,2,440,208864 965 | 9,1800,1800,1800,2,7,0,2,702,208865 966 | 7,817,824,1894,2,8,1,2,510,208866 967 | 6,783,783,1484,2,8,1,2,393,208867 968 | 5,728,976,1308,1,7,2,1,256,208868 969 | 5,1098,1098,1098,1,6,0,1,260,208869 970 | 3,600,600,968,1,6,0,0,0,208870 971 | 6,588,1095,1095,1,6,0,1,264,208871 972 | 4,720,720,1192,1,6,0,0,0,208872 973 | 7,764,764,1626,2,6,0,2,474,208873 974 | 6,918,918,918,2,5,1,1,264,208874 975 | 7,1428,1428,1428,2,6,0,2,480,208875 976 | 7,728,1136,2019,1,8,0,2,532,208876 977 | 7,673,673,1382,2,6,0,2,490,208877 978 | 4,440,869,869,1,4,0,0,0,208878 979 | 7,1241,1241,1241,1,4,0,2,569,208879 980 | 4,894,894,894,1,5,0,2,400,208880 981 | 5,1121,1121,1121,1,5,0,2,480,208881 982 | 7,944,999,999,1,6,0,2,588,208882 983 | 8,1225,1276,2612,2,8,1,3,676,208883 984 | 7,1266,1266,1266,2,6,1,2,388,208884 985 | 8,1128,1149,2290,2,9,1,2,779,208885 986 | 5,0,1302,1734,2,8,0,2,539,208886 987 | 5,1164,1164,1164,1,5,0,1,240,208887 988 | 6,485,1001,1635,1,5,0,1,255,208888 989 | 9,1930,1940,1940,2,8,1,3,606,208889 990 | 6,848,1118,2030,2,8,1,2,551,208890 991 | 7,770,778,1576,2,6,0,2,614,208891 992 | 8,1396,1407,2392,2,7,1,3,870,208892 993 | 8,916,916,1742,1,8,1,2,424,208893 994 | 6,822,1020,1851,2,7,1,2,440,208894 995 | 6,750,750,1500,2,6,0,2,564,208895 996 | 10,1700,1718,1718,2,7,1,3,786,208896 997 | 4,747,774,1230,1,5,0,1,305,208897 998 | 5,1050,1050,1050,1,6,0,1,368,208898 999 | 6,1442,1442,1442,2,6,1,2,615,208899 1000 | 3,1007,1077,1077,1,6,1,1,210,208900 1001 | 7,1187,1208,1208,2,6,0,2,632,181500 1002 | 3,0,944,944,1,4,0,2,528,142125 1003 | 5,691,691,691,1,4,0,1,216,147500 1004 | 8,1574,1574,1574,2,7,1,3,824,181500 1005 | 5,1680,1680,1680,2,8,0,2,528,136905 1006 | 7,1346,1504,1504,2,7,1,2,457,181000 1007 | 5,985,985,985,2,6,0,1,328,149900 1008 | 6,1657,1657,1657,2,7,1,2,484,163500 1009 | 4,546,546,1092,1,6,0,1,286,181500 1010 | 7,1710,1710,1710,2,7,1,2,550,181500 1011 | 5,1008,1008,1522,2,7,0,0,0,102000 1012 | 5,720,720,1271,2,7,1,1,312,135000 1013 | 5,0,1664,1664,2,8,0,0,0,100000 1014 | 6,602,900,1502,1,7,2,1,180,165000 1015 | 5,1022,1022,1022,1,4,0,1,280,181500 1016 | 6,1082,1082,1082,1,5,1,1,240,119200 1017 | 8,810,810,1665,2,6,0,2,528,181500 1018 | 7,1504,1504,1504,2,6,1,2,478,181500 1019 | 8,1220,1360,1360,1,4,1,2,565,187500 1020 | 7,384,802,1472,2,7,1,2,402,160000 1021 | 7,1362,1506,1506,2,6,1,2,440,181500 1022 | 4,1132,1132,1132,1,5,0,2,451,176000 1023 | 7,1199,1220,1220,2,6,0,2,632,194000 1024 | 5,912,912,1248,1,6,0,1,160,87000 1025 | 7,1346,1504,1504,2,7,1,2,437,191000 1026 | 8,1565,2898,2898,2,10,1,2,665,181500 1027 | 5,882,882,882,1,5,0,2,461,112500 1028 | 5,1268,1264,1264,1,6,2,2,461,167500 1029 | 8,1638,1646,1646,2,7,1,3,800,181500 1030 | 5,768,968,1376,1,6,1,1,240,105000 1031 | 6,672,672,1218,1,7,0,1,264,118000 1032 | 5,686,948,1928,2,10,0,0,0,160000 1033 | 7,824,1687,3082,2,12,2,2,672,197000 1034 | 8,1338,1352,2520,2,10,1,3,796,181500 1035 | 7,1654,1654,1654,2,6,0,3,900,181500 1036 | 5,920,954,954,1,5,1,1,240,119750 1037 | 4,0,845,845,1,5,0,1,290,84000 1038 | 9,1620,1620,1620,2,6,1,3,912,181500 1039 | 8,1055,1055,2263,2,7,1,2,905,181500 1040 | 4,546,798,1344,1,6,1,0,0,97000 1041 | 4,630,630,630,1,3,0,1,286,80000 1042 | 5,1134,1803,1803,2,8,1,2,484,155000 1043 | 6,800,800,1632,1,7,0,2,484,173000 1044 | 6,1306,1306,1306,2,5,1,2,624,196000 1045 | 7,1475,1532,2329,2,10,1,2,514,181500 1046 | 8,2524,2524,2524,2,9,1,2,542,181500 1047 | 3,0,1733,1733,2,8,1,2,452,139600 1048 | 9,1992,1992,2868,3,11,1,3,716,181500 1049 | 5,990,990,990,1,5,0,2,672,145000 1050 | 5,0,1771,1771,1,9,1,2,336,115000 1051 | 4,0,930,930,1,6,0,1,308,84900 1052 | 7,1302,1302,1302,2,6,0,2,436,176485 1053 | 7,1316,1316,1316,2,6,1,2,440,181500 1054 | 6,816,1127,1977,1,9,1,2,540,165000 1055 | 5,1216,1526,1526,1,7,1,1,364,144500 1056 | 8,1065,1091,1989,2,7,1,2,586,181500 1057 | 6,1193,1523,1523,2,7,1,2,478,180000 1058 | 7,1364,1364,1364,2,6,1,2,484,185850 1059 | 7,973,979,1850,2,7,1,2,467,181500 1060 | 9,1104,1130,2184,2,10,1,3,836,181500 1061 | 6,854,1096,1991,1,7,1,2,432,181500 1062 | 8,1338,1338,1338,2,6,0,2,582,181500 1063 | 3,894,894,894,1,6,0,3,1248,81000 1064 | 5,662,1422,2337,2,10,0,2,560,90000 1065 | 6,1103,1103,1103,1,5,1,2,440,90001 1066 | 5,1154,1154,1154,1,6,1,2,480,90002 1067 | 7,1306,1306,2260,2,7,0,2,533,90003 1068 | 6,799,799,1571,2,7,1,2,380,90004 1069 | 6,780,798,1611,1,7,0,2,442,90005 1070 | 6,942,1291,2521,2,10,1,2,576,90006 1071 | 5,845,893,893,1,4,0,2,576,90007 1072 | 5,1048,1048,1048,1,6,0,1,286,90008 1073 | 6,727,829,1556,1,8,0,2,441,90009 1074 | 5,810,1002,1456,1,7,1,1,280,90010 1075 | 6,690,698,1426,1,6,0,2,440,90011 1076 | 7,1240,1240,1240,2,5,0,3,826,90012 1077 | 7,800,960,1740,1,6,2,1,240,90013 1078 | 5,796,1096,1466,2,7,1,2,566,90014 1079 | 5,1096,1096,1096,1,6,0,1,299,90015 1080 | 6,848,848,848,1,3,1,2,420,90016 1081 | 5,990,990,990,1,5,0,1,299,90017 1082 | 6,1258,1258,1258,2,5,0,2,528,90018 1083 | 5,1040,1040,1040,1,5,0,1,308,133000 1084 | 7,1459,1459,1459,2,6,1,2,527,133001 1085 | 6,1251,1251,1251,1,6,2,1,461,133002 1086 | 6,691,691,1498,2,6,1,2,409,133003 1087 | 6,936,996,996,1,5,0,2,564,133004 1088 | 4,546,546,1092,1,6,0,1,286,133005 1089 | 8,1082,1082,1953,2,9,1,3,1043,133006 1090 | 7,970,970,1709,2,7,0,2,380,133007 1091 | 8,1247,1247,1247,1,4,1,2,550,133008 1092 | 3,0,1040,1040,2,6,0,2,400,133009 1093 | 7,600,624,1252,2,4,0,2,462,133010 1094 | 6,1181,1390,1694,2,7,1,2,576,133011 1095 | 5,864,1200,1200,1,6,0,2,884,133012 1096 | 5,936,936,936,1,4,0,1,308,133013 1097 | 6,1314,1314,1314,2,6,1,2,440,133014 1098 | 6,684,773,1355,1,7,0,0,0,133015 1099 | 8,1074,1088,1088,1,5,0,2,461,133016 1100 | 4,672,757,1324,1,6,0,1,240,133017 1101 | 7,1271,1601,1601,2,7,1,2,478,133018 1102 | 2,290,438,438,1,3,0,1,246,133019 1103 | 5,950,950,950,1,5,0,1,280,133020 1104 | 5,1010,1134,1134,1,6,0,1,254,133021 1105 | 6,655,1194,1194,1,6,1,2,539,133022 1106 | 5,630,630,1302,2,6,0,2,440,133023 1107 | 8,1463,1500,2622,2,9,2,2,712,133024 1108 | 7,910,1442,1442,2,6,1,2,719,133025 1109 | 7,868,887,2021,2,9,1,2,422,133026 1110 | 6,924,948,1690,2,7,1,2,463,133027 1111 | 8,1836,1836,1836,2,7,1,3,862,133028 1112 | 6,773,773,1658,2,8,1,2,431,133029 1113 | 7,803,1098,1964,2,8,1,2,483,133030 1114 | 5,816,816,816,1,5,0,1,308,133031 1115 | 5,1008,1008,1008,1,6,0,1,240,133032 1116 | 5,833,833,833,1,4,0,1,326,133033 1117 | 8,1734,1734,1734,2,7,1,3,928,133034 1118 | 8,408,779,1419,2,7,1,2,527,133035 1119 | 5,894,894,894,1,5,0,2,450,133036 1120 | 5,533,1021,1601,1,6,0,1,300,133037 1121 | 5,1040,1040,1040,1,6,0,1,286,133038 1122 | 6,1012,1012,1012,1,6,1,1,308,133039 1123 | 7,1552,1552,1552,2,7,0,3,782,112000 1124 | 4,672,960,960,1,5,0,1,288,112000 1125 | 5,698,698,698,1,4,0,0,0,118000 1126 | 7,384,812,1482,2,7,1,2,392,163900 1127 | 4,1005,1005,1005,1,5,1,2,672,115000 1128 | 7,1373,1555,1555,2,7,1,3,660,174000 1129 | 7,1530,1530,1530,2,7,1,3,630,112000 1130 | 7,847,847,1959,2,8,1,2,434,112000 1131 | 5,936,936,936,1,4,0,2,672,140000 1132 | 4,1122,1328,1981,2,7,2,2,576,135000 1133 | 5,974,974,974,1,5,0,0,0,93500 1134 | 6,1008,1178,2210,2,8,0,1,205,117500 1135 | 8,1128,1142,2020,2,8,1,2,466,169000 1136 | 6,916,916,1600,2,7,1,2,460,169000 1137 | 6,960,986,986,1,5,1,1,180,102000 1138 | 6,1032,1032,1252,1,6,0,1,288,119000 1139 | 5,780,780,1020,1,6,0,0,0,94000 1140 | 6,1567,1567,1567,2,5,2,2,714,196000 1141 | 5,915,1167,1167,1,6,1,2,495,144000 1142 | 5,952,952,952,1,4,0,2,840,139000 1143 | 5,780,1088,1868,2,9,1,2,484,197500 1144 | 8,1466,1466,2828,3,11,1,3,1052,169000 1145 | 5,1006,1006,1006,1,5,0,0,0,80000 1146 | 4,672,672,924,1,5,0,1,280,80000 1147 | 5,1042,1042,1576,1,8,1,1,225,149000 1148 | 6,1298,1298,1298,2,5,1,2,403,180000 1149 | 7,704,860,1564,1,7,1,1,234,174500 1150 | 7,572,572,1111,1,5,1,1,288,116900 1151 | 7,650,832,1482,1,7,0,2,324,143000 1152 | 6,932,932,932,1,4,1,1,306,124000 1153 | 5,1466,1466,1466,1,6,2,2,528,149900 1154 | 6,1073,1811,1811,1,6,1,2,470,230000 1155 | 6,816,816,816,1,5,0,1,432,120500 1156 | 7,864,902,1820,1,8,2,2,492,169000 1157 | 5,1437,1437,1437,2,6,1,2,528,169000 1158 | 5,1219,1265,1265,2,6,1,2,502,179900 1159 | 7,1314,1314,1314,2,6,1,2,626,169000 1160 | 8,1580,1580,1580,2,7,0,3,830,169000 1161 | 6,901,943,1876,2,8,1,2,540,185000 1162 | 6,855,855,1456,2,7,1,2,440,146000 1163 | 6,1296,1640,1640,1,7,1,2,924,169000 1164 | 5,894,894,894,1,5,1,2,450,129000 1165 | 4,1198,1258,1258,0,6,0,2,400,108959 1166 | 5,1360,1432,1432,1,5,1,2,588,194000 1167 | 7,1502,1502,1502,2,7,0,2,644,169000 1168 | 8,1694,1694,1694,2,7,0,3,776,169000 1169 | 6,959,959,1671,2,7,1,2,472,169001 1170 | 6,1127,1236,2108,2,7,2,2,540,169002 1171 | 10,1930,1831,3627,3,10,1,3,807,169003 1172 | 6,1096,1118,1118,1,6,1,1,358,169004 1173 | 6,1261,1261,1261,1,6,1,2,433,169005 1174 | 7,625,625,1250,2,5,0,2,625,169006 1175 | 5,1598,1636,3086,3,12,1,0,0,169007 1176 | 6,952,1170,2345,2,9,1,2,360,169008 1177 | 8,1683,2129,2872,2,9,1,2,541,169009 1178 | 5,876,923,923,1,5,0,1,264,169010 1179 | 6,818,818,1224,1,5,0,1,210,169011 1180 | 5,731,820,1343,1,7,1,1,186,169012 1181 | 5,0,1124,1124,1,5,1,0,0,169013 1182 | 7,1216,1298,2514,2,8,0,2,693,169014 1183 | 8,1600,1652,1652,2,5,1,2,482,169015 1184 | 10,2396,2411,4476,3,10,2,3,813,169016 1185 | 5,1120,1130,1130,1,5,1,2,720,169017 1186 | 5,1572,1572,1572,1,5,2,3,995,169018 1187 | 5,784,949,1221,1,7,0,1,392,169019 1188 | 3,978,1014,1699,2,7,0,2,420,169020 1189 | 8,1624,1624,1624,2,5,1,3,757,169021 1190 | 7,831,831,1660,2,7,0,2,493,169022 1191 | 7,994,1028,1804,2,7,1,2,442,169023 1192 | 4,1249,1622,1622,1,7,1,4,1356,169024 1193 | 8,776,764,1441,2,5,0,2,492,169025 1194 | 5,702,842,1472,1,6,0,1,250,169026 1195 | 6,1224,1224,1224,2,5,0,2,402,169027 1196 | 6,663,663,1352,1,7,0,1,299,169028 1197 | 6,728,728,1456,2,8,0,2,400,169029 1198 | 7,879,879,1863,2,9,1,3,660,169030 1199 | 7,815,815,1690,1,7,1,1,225,169031 1200 | 7,1212,1212,1212,2,6,0,2,573,169032 1201 | 4,1051,1382,1382,1,6,1,2,459,169033 1202 | 4,864,864,864,1,5,0,1,280,169034 1203 | 7,866,866,1779,2,6,0,2,546,169035 1204 | 5,884,884,1348,1,5,1,1,216,169036 1205 | 7,1630,1630,1630,2,6,1,2,451,169037 1206 | 5,1056,1074,1074,1,6,0,2,495,169038 1207 | 7,2158,2196,2196,2,7,1,3,701,169039 1208 | 4,1056,1056,1056,1,5,0,1,384,169040 1209 | 6,1682,1700,1700,2,7,0,2,544,169041 1210 | 5,931,1283,1283,1,6,0,2,506,169042 1211 | 8,1660,1660,1660,2,8,1,2,500,169043 1212 | 6,1055,1055,1845,2,8,1,2,462,169044 1213 | 8,559,1080,1752,2,8,0,2,492,169045 1214 | 4,672,672,672,1,4,0,1,234,169046 1215 | 4,648,960,960,0,3,0,1,364,169047 1216 | 5,925,999,999,1,6,0,1,300,169048 1217 | 5,894,894,894,1,5,0,1,384,169049 1218 | 6,0,1318,1902,2,8,0,2,539,169050 1219 | 8,1300,1314,1314,2,6,0,2,552,169051 1220 | 4,0,672,912,1,3,0,0,0,169052 1221 | 6,672,672,1218,1,7,0,0,0,169053 1222 | 5,912,912,912,1,5,0,1,288,169054 1223 | 5,952,1211,1211,1,5,1,1,322,169055 1224 | 6,1040,1168,1846,2,7,1,1,315,169056 1225 | 5,2136,2136,2136,2,7,0,2,528,169057 1226 | 7,788,788,1490,2,8,1,2,388,169058 1227 | 6,588,1138,1138,1,6,0,1,264,169059 1228 | 6,894,894,1933,2,9,1,3,668,169060 1229 | 5,912,912,912,1,5,0,2,576,169061 1230 | 9,1702,1702,1702,1,7,1,3,1052,169062 1231 | 5,1075,1507,1507,2,7,0,1,404,169063 1232 | 5,1361,1361,2620,2,12,1,2,600,169064 1233 | 5,1106,1190,1190,1,6,0,2,540,169065 1234 | 4,0,1224,1224,2,6,0,2,462,169066 1235 | 5,1188,1188,1188,1,6,0,2,531,169067 1236 | 5,940,1024,1964,1,7,0,0,0,169068 1237 | 5,747,892,1784,1,9,0,1,180,169069 1238 | 7,764,764,1626,2,6,0,2,474,169070 1239 | 7,847,847,1948,2,8,1,2,434,169071 1240 | 6,1141,1141,1141,1,6,0,2,484,169072 1241 | 8,1476,1484,1484,2,6,1,2,472,169073 1242 | 7,884,884,1768,2,8,0,2,543,169074 1243 | 7,1689,1689,1689,2,7,0,3,954,169075 1244 | 7,1053,1173,1173,2,6,2,2,528,169076 1245 | 10,2076,2076,2076,2,7,1,3,850,169077 1246 | 8,792,792,1517,1,7,2,2,400,169078 1247 | 6,585,1140,1868,3,7,1,2,477,169079 1248 | 7,756,756,1553,2,6,0,2,615,169080 1249 | 6,1012,1034,1034,1,6,0,3,888,169081 1250 | 6,735,1134,2058,1,8,1,2,396,169082 1251 | 5,876,988,988,1,6,0,1,276,169083 1252 | 7,2110,2110,2110,2,8,2,2,522,169084 1253 | 7,1405,1405,1405,2,6,1,2,478,169085 1254 | 5,864,874,874,1,5,0,1,288,169086 1255 | 7,1192,1516,2167,2,9,2,2,518,169087 1256 | 7,746,760,1656,2,7,1,2,397,169088 1257 | 6,884,959,1367,1,6,1,1,560,169089 1258 | 8,1986,1987,1987,2,7,1,2,691,169090 1259 | 5,864,864,864,1,4,0,0,0,169091 1260 | 7,856,1166,1166,2,5,0,2,400,169092 1261 | 6,1054,1054,1054,1,6,0,2,460,169093 1262 | 6,841,892,1675,2,7,1,2,502,169094 1263 | 5,1050,1050,1050,1,5,0,1,338,169095 1264 | 4,1104,1104,1788,1,8,2,1,304,169096 1265 | 6,764,1060,1824,1,8,1,2,520,169097 1266 | 6,1405,1337,1337,2,5,0,2,511,169098 1267 | 7,691,713,1452,2,6,0,2,506,169099 1268 | 7,925,964,1889,1,9,1,1,308,169100 1269 | 9,2002,2018,2018,2,10,1,3,746,169101 1270 | 8,728,1968,3447,3,11,2,3,1014,169102 1271 | 5,874,874,1524,1,7,0,1,315,169103 1272 | 7,1332,1332,1524,0,4,1,2,586,169104 1273 | 6,1489,1489,1489,2,7,1,2,462,169105 1274 | 5,935,935,935,1,5,0,1,288,169106 1275 | 6,1019,1357,1357,1,5,1,1,312,169107 1276 | 5,661,661,1250,2,8,1,2,552,169108 1277 | 5,928,928,1920,2,10,0,2,400,169109 1278 | 6,723,735,1395,1,6,1,2,497,169110 1279 | 6,1680,1724,1724,1,7,1,2,480,169111 1280 | 8,1128,1128,2031,2,7,1,2,577,169112 1281 | 4,698,698,1128,1,6,0,2,528,169113 1282 | 7,1573,1573,1573,2,6,0,2,544,169114 1283 | 7,1309,1339,1339,2,6,1,2,484,169115 1284 | 5,1040,1040,1040,2,5,0,2,484,169116 1285 | 6,912,912,1824,2,8,0,0,0,169117 1286 | 6,804,1699,2447,2,10,1,1,336,169118 1287 | 6,780,825,1412,1,6,1,1,280,169119 1288 | 6,1328,1328,1328,1,6,2,2,528,169120 1289 | 5,1624,1582,1582,2,7,0,2,390,169121 1290 | 8,1501,1659,1659,2,5,1,2,499,169122 1291 | 8,1085,1120,1970,2,8,1,3,753,169123 1292 | 5,1152,1152,1152,1,6,1,2,484,169124 1293 | 5,630,630,1302,2,6,0,1,264,169125 1294 | 5,994,1378,2372,2,11,0,1,432,169126 1295 | 7,832,832,1664,2,8,1,2,528,169127 1296 | 5,864,864,864,1,5,0,2,572,169128 1297 | 5,1052,1052,1052,1,5,0,1,288,169129 1298 | 5,1120,1128,1128,2,6,0,2,525,169130 1299 | 6,547,1072,1072,2,5,0,2,525,169131 1300 | 10,6110,4692,5642,2,12,3,2,1418,169132 1301 | 5,1246,1246,1246,1,6,0,1,305,169133 1302 | 7,978,1005,1983,2,9,1,2,490,169134 1303 | 6,771,753,1494,1,7,2,1,213,169135 1304 | 8,1165,1203,2526,2,8,1,3,844,169136 1305 | 7,1616,1616,1616,2,7,0,3,834,169137 1306 | 7,976,976,1708,2,7,0,2,380,169138 1307 | 9,1652,1652,1652,2,6,2,2,840,169139 1308 | 7,1368,1368,1368,2,6,1,2,474,169140 1309 | 5,990,990,990,1,5,0,2,480,169141 1310 | 5,924,1122,1122,1,6,0,2,528,169142 1311 | 6,1278,1294,1294,2,6,0,2,496,169143 1312 | 7,1902,1902,1902,2,7,2,2,567,169144 1313 | 7,1274,1274,1274,2,6,0,2,508,169145 1314 | 8,1453,1453,2810,2,9,1,2,750,169146 1315 | 9,1393,1422,2599,2,10,1,3,779,169147 1316 | 4,948,948,948,1,5,1,1,280,169148 1317 | 6,952,1092,2112,2,9,2,2,576,169149 1318 | 8,1622,1630,1630,2,8,1,3,860,169150 1319 | 7,1352,1352,1352,2,5,1,2,466,169151 1320 | 8,1753,1787,1787,2,7,1,3,748,169152 1321 | 4,864,948,948,1,5,0,1,248,169153 1322 | 6,1478,1478,1478,1,6,2,2,442,169154 1323 | 3,0,720,720,1,4,0,1,287,169155 1324 | 7,750,1061,1923,2,8,1,2,564,169156 1325 | 4,420,708,708,1,5,0,0,0,169157 1326 | 8,1795,1795,1795,2,7,1,3,895,169158 1327 | 4,796,796,796,1,5,0,0,0,169159 1328 | 3,544,774,774,1,6,0,0,0,169160 1329 | 5,816,816,816,1,5,1,1,264,169161 1330 | 6,1510,1584,2792,2,8,2,2,520,169162 1331 | 7,935,955,1632,2,8,1,2,462,169163 1332 | 8,1588,1588,1588,2,7,1,3,825,169164 1333 | 5,911,954,954,1,6,0,2,576,169165 1334 | 4,816,816,816,1,3,1,1,288,169166 1335 | 5,803,803,1360,1,6,0,1,297,169167 1336 | 5,765,765,1365,1,7,0,2,440,169168 1337 | 6,1350,1334,1334,2,6,1,2,630,169169 1338 | 5,1656,1656,1656,2,8,0,2,506,169170 1339 | 4,693,693,693,1,4,0,0,0,169171 1340 | 7,916,920,1861,2,8,0,2,492,169172 1341 | 5,864,864,864,1,5,0,1,288,169173 1342 | 4,858,872,872,1,5,0,4,480,169174 1343 | 6,1114,1114,1114,1,6,0,2,576,169175 1344 | 8,1284,1284,2169,2,7,1,2,647,169176 1345 | 6,896,1172,1913,1,9,1,2,342,169177 1346 | 7,728,728,1456,2,8,1,2,440,169178 1347 | 4,960,960,960,1,5,0,1,308,169179 1348 | 7,1568,2156,2156,2,9,1,2,508,169180 1349 | 8,1732,1776,1776,2,7,1,3,712,169181 1350 | 7,1482,1494,1494,2,5,1,2,514,169182 1351 | 8,684,938,2358,2,8,0,0,0,169183 1352 | 5,1248,1338,2634,2,12,0,4,968,169184 1353 | 6,858,858,1716,1,8,1,2,490,169185 1354 | 6,698,786,1176,1,4,0,2,624,169186 1355 | 8,2033,2053,3238,2,9,1,3,666,169187 1356 | 7,992,992,1865,2,7,1,3,839,169188 1357 | 7,570,1222,1920,3,8,1,2,487,169189 1358 | 5,864,892,892,1,5,0,1,264,169190 1359 | 5,1078,1078,1078,1,6,1,2,500,169191 1360 | 6,756,769,1573,2,5,0,2,440,169192 1361 | 9,1980,1980,1980,2,8,1,3,770,169193 1362 | 5,612,990,2601,3,8,0,2,621,169194 1363 | 7,1530,1530,1530,2,7,1,2,430,169195 1364 | 4,715,1281,1738,2,7,1,1,368,169196 1365 | 6,616,616,1412,2,6,1,2,432,169197 1366 | 7,600,520,1200,2,4,0,2,480,169198 1367 | 7,814,814,1674,2,7,0,2,663,169199 1368 | 7,873,882,1790,2,7,0,2,588,169200 1369 | 5,757,925,1475,2,6,1,1,336,169201 1370 | 6,848,848,848,1,4,0,2,420,169202 1371 | 8,1657,1668,1668,2,8,1,2,502,169203 1372 | 4,840,840,1374,1,6,0,1,338,169204 1373 | 6,992,1661,1661,1,8,1,1,377,169205 1374 | 7,1108,1108,2097,2,8,1,2,583,169206 1375 | 10,2633,2633,2633,2,8,2,3,804,169207 1376 | 7,1026,1026,1958,2,9,1,3,936,169208 1377 | 8,1571,1571,1571,2,7,1,3,722,169209 1378 | 6,768,790,790,1,4,0,1,160,169210 1379 | 5,984,984,1604,2,6,0,2,660,169211 1380 | 6,483,483,987,1,5,0,1,264,169212 1381 | 5,384,754,1394,2,7,0,2,400,169213 1382 | 3,864,864,864,1,5,0,1,200,169214 1383 | 6,1205,2117,2117,2,7,2,2,550,169215 1384 | 7,596,998,1762,1,8,0,2,576,169216 1385 | 5,816,1416,1416,2,7,0,2,576,169217 1386 | 6,560,698,1258,1,6,0,1,280,169218 1387 | 4,796,796,1154,1,7,0,1,240,169219 1388 | 7,1392,1392,2784,3,12,2,2,564,169220 1389 | 6,714,1664,2526,2,10,1,1,216,169221 1390 | 9,1746,1746,1746,2,7,2,3,758,169222 1391 | 6,735,869,1218,1,6,1,2,440,169223 1392 | 7,1525,1525,1525,2,6,0,2,541,169224 1393 | 5,1584,1584,1584,2,8,0,3,792,169225 1394 | 5,864,900,900,1,6,1,1,288,169226 1395 | 6,482,1221,1912,2,7,1,2,672,169227 1396 | 7,1356,1500,1500,2,6,1,3,648,169228 1397 | 8,1094,1133,2482,2,9,1,3,642,169229 1398 | 5,747,1687,1687,1,7,2,2,572,169230 1399 | 5,939,939,1513,1,8,0,1,180,169231 1400 | 5,1208,1136,1904,1,7,0,1,240,169232 1401 | 6,976,1160,1608,2,7,1,1,216,169233 1402 | 6,862,950,1158,1,5,1,1,208,169234 1403 | 6,839,864,1593,2,8,1,2,398,169235 1404 | 7,1286,1294,1294,2,6,1,2,662,169236 1405 | 8,1485,1464,1464,2,6,0,3,754,169237 1406 | 3,672,694,1214,1,6,0,3,936,169238 1407 | 8,1594,1646,1646,2,5,1,2,482,169239 1408 | 5,768,768,768,1,5,0,2,396,169240 1409 | 5,833,833,833,1,5,0,0,0,169241 1410 | 4,622,741,1363,1,6,0,2,528,169242 1411 | 7,791,1236,2093,2,7,1,2,542,169243 1412 | 7,944,944,1840,2,6,0,2,622,169244 1413 | 6,856,1112,1668,1,6,0,1,271,169245 1414 | 4,0,1040,1040,2,6,0,2,420,169246 1415 | 8,1844,1844,1844,2,7,1,2,620,169247 1416 | 6,833,1053,1848,1,8,1,2,370,169248 1417 | 7,1386,1569,1569,2,7,1,3,660,169249 1418 | 4,777,1246,2290,2,11,0,2,560,169250 1419 | 8,1284,1310,2450,2,7,1,3,1069,169251 1420 | 5,1144,1144,1144,1,6,0,1,336,169252 1421 | 6,1844,1844,1844,2,7,1,2,540,169253 1422 | 6,708,708,1416,2,7,1,2,776,169254 1423 | 6,1069,1069,1069,2,4,1,2,440,169255 1424 | 6,848,848,848,1,3,0,2,420,169256 1425 | 6,697,1575,2201,2,8,1,2,432,169257 1426 | 5,1024,1344,1344,1,6,1,1,484,169258 1427 | 6,1252,1252,1252,1,7,0,2,528,169259 1428 | 7,1223,1223,2127,2,5,2,2,525,169260 1429 | 5,913,1048,1558,1,6,1,1,288,169261 1430 | 5,788,804,804,1,4,2,1,240,169262 1431 | 6,1440,1440,1440,2,7,1,2,467,169263 1432 | 5,732,734,1838,2,7,1,2,372,169264 1433 | 6,958,958,958,2,5,0,2,440,169265 1434 | 4,656,968,968,2,5,0,1,216,169266 1435 | 6,936,962,1792,2,8,1,2,451,169267 1436 | 5,1126,1126,1126,2,5,1,2,484,169268 1437 | 6,1319,1537,1537,1,7,1,2,462,169269 1438 | 4,864,864,864,1,5,0,2,528,169270 1439 | 8,1932,1932,1932,2,7,1,3,774,169271 1440 | 6,912,1236,1236,1,6,0,2,923,169272 1441 | 7,539,1040,1725,2,6,1,2,550,169273 1442 | 6,588,1423,2555,2,11,1,2,672,169274 1443 | 6,848,848,848,1,3,1,2,420,169275 1444 | 10,1017,1026,2007,2,10,1,3,812,169276 1445 | 6,952,952,952,1,4,1,1,192,169277 1446 | 7,1422,1422,1422,2,7,0,2,626,169278 1447 | 6,814,913,913,1,6,0,1,240,169279 1448 | 5,1188,1188,1188,1,6,0,1,312,169280 1449 | 8,1220,1220,2090,2,8,1,2,556,169281 1450 | 4,560,796,1346,1,6,0,1,384,169282 1451 | 5,630,630,630,1,3,0,0,0,169283 1452 | 5,896,896,1792,2,8,0,0,0,169284 1453 | 8,1573,1578,1578,2,7,1,3,840,169285 1454 | 5,547,1072,1072,1,5,0,2,525,169286 1455 | 5,1140,1140,1140,1,6,0,0,0,169287 1456 | 7,1221,1221,1221,2,6,0,2,400,169288 1457 | 6,953,953,1647,2,7,1,2,460,169289 1458 | 6,1542,2073,2073,2,7,2,2,500,169290 1459 | 7,1152,1188,2340,2,9,2,1,252,169291 1460 | 5,1078,1078,1078,1,5,0,1,240,169292 1461 | 5,1256,1256,1256,1,6,0,1,276,169293 1462 | -------------------------------------------------------------------------------- /14_trend_parity_plots.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from sklearn.linear_model import LinearRegression 3 | from sklearn.model_selection import train_test_split,cross_val_score 4 | from sklearn.externals import joblib 5 | from sklearn.metrics import mean_squared_error 6 | import matplotlib.pyplot as plt 7 | from math import sqrt 8 | import os 9 | 10 | df = pd.read_csv('./14_input_data.csv') 11 | 12 | X = df[list(df.columns)[:-1]] 13 | y = df['SalePrice'] 14 | X_train, X_test, y_train, y_test = train_test_split(X, y) 15 | regressor = LinearRegression() 16 | regressor.fit(X_train, y_train) 17 | 18 | y_predictions = regressor.predict(X_test) 19 | 20 | meanSquaredError=mean_squared_error(y_test, y_predictions) 21 | rootMeanSquaredError = sqrt(meanSquaredError) 22 | 23 | print("Number of predictions:",len(y_predictions)) 24 | print("Mean Squared Error:", meanSquaredError) 25 | print("Root Mean Squared Error:", rootMeanSquaredError) 26 | print ("Scoring:",regressor.score(X_test, y_test)) 27 | 28 | ## TREND PLOT 29 | y_test25 = y_test[:35] 30 | y_predictions25 = y_predictions[:35] 31 | myrange = [i for i in range(1,36)] 32 | fig = plt.figure() 33 | ax = fig.add_subplot(111) 34 | ax.grid() 35 | plt.plot(myrange,y_test25, marker='o') 36 | plt.plot(myrange,y_predictions25, marker='o') 37 | plt.title('Trend between Actual and Predicted - 35 samples') 38 | ax.set_xlabel("No. of Data Points") 39 | ax.set_ylabel("Values- SalePrice") 40 | plt.legend(['Actual points','Predicted values']) 41 | plt.savefig('TrendActualvsPredicted.png',dpi=100) 42 | plt.show() 43 | 44 | 45 | ## PARITY PLOT 46 | y_testp = y_test[:]+50000 47 | y_testm = y_test[:]-50000 48 | fig = plt.figure() 49 | ax = fig.add_subplot(111) 50 | ax.grid() 51 | plt.plot(y_test,y_predictions,'r.') 52 | plt.plot(y_test,y_test,'k-',color = 'green') 53 | plt.plot(y_test,y_testp,color = 'blue') 54 | plt.plot(y_test,y_testm,color = 'blue') 55 | plt.title('Parity Plot') 56 | ax.set_xlabel("Actual Values") 57 | ax.set_ylabel("Predicted Values") 58 | plt.legend(['Actual vs Predicted points','Actual value line','Threshold of 50000']) 59 | plt.show() 60 | 61 | ## Data Distribution 62 | fig = plt.figure() 63 | plt.plot([i for i in range(1,1461)],y,'r.') 64 | plt.title('Data Distribution') 65 | plt.show() 66 | 67 | a, b = 0 , 0 68 | for i in range(0,1460): 69 | if(y[i]>250000): 70 | a += 1 71 | else: 72 | b +=1 73 | print(a, b) 74 | 75 | #X = X[:600] 76 | #y = y[:600] 77 | 78 | 79 | -------------------------------------------------------------------------------- /15_outliers.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import pylab 3 | import numpy as np 4 | from scipy import stats 5 | from scipy.stats import kurtosis 6 | from scipy.stats import skew 7 | import matplotlib._pylab_helpers 8 | 9 | df = pd.read_csv('./14_input_data.csv') 10 | 11 | # Finding outlier in data 12 | for i in range(len(df.columns)): 13 | pylab.figure() 14 | pylab.boxplot(df[df.columns[i]]) 15 | #pylab.violinplot(df[df.columns[i]]) 16 | pylab.title(df[df.columns[i]].name) 17 | 18 | list1=[] 19 | 20 | for i in matplotlib._pylab_helpers.Gcf.get_all_fig_managers(): 21 | list1.append(i.canvas.figure) 22 | print (list1) 23 | 24 | for i, j in enumerate(list1): 25 | j.savefig(df[df.columns[i]].name) 26 | 27 | # Removing outliers 28 | z = np.abs(stats.zscore(df)) 29 | print(z) 30 | print(np.where(z > 3)) 31 | print(z[53][9]) 32 | df1 = df[(z < 3).all(axis=1)] 33 | print (df.shape) 34 | print (df1.shape) -------------------------------------------------------------------------------- /16_univariateplot.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import matplotlib.pyplot as plt 3 | import seaborn as sns 4 | 5 | df = pd.read_csv("14_input_data.csv") 6 | df = df.fillna(0) 7 | df = df[:100] 8 | 9 | #Histogram 10 | y = [i for i in range(0,10)] 11 | fig = plt.figure(figsize=(8,2)) 12 | ax = fig.add_subplot(111) 13 | ax.set(title="Histogram", 14 | ylabel='No of Houses', xlabel='Living Sq.Ft') 15 | ax.hist(df['GrLivArea']) 16 | plt.show() 17 | 18 | #DensityPlot 19 | sns.distplot(df['GrLivArea'], hist = False, kde = True, 20 | kde_kws = {'shade': True, 'linewidth': 3}) 21 | plt.show() 22 | 23 | #BoxPlot 24 | fig = plt.figure(figsize=(8,2)) 25 | ax = fig.add_subplot(111) 26 | ax.set(title="Box Plot", 27 | ylabel='No of Houses', xlabel='Living Sq.Ft') 28 | ax.boxplot(df['GrLivArea']) 29 | plt.show() 30 | -------------------------------------------------------------------------------- /17_bivariateplot.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import matplotlib.pyplot as plt 3 | import seaborn as sns 4 | 5 | 6 | df = pd.read_csv("14_input_data.csv") 7 | df = df.fillna(0) 8 | df = df[:500] 9 | 10 | #ScatterPlot 11 | fig = plt.figure(figsize=(8,6)) 12 | ax = fig.add_subplot(111) 13 | ax.set(title='Living area vs Price of the house', 14 | xlabel='Price', ylabel='Area') 15 | price = df['SalePrice'].tolist() 16 | area = df['GrLivArea'].tolist() 17 | ax.scatter(price,area) 18 | plt.show() 19 | 20 | #HeatMapSeaborn 21 | df2 = pd.DataFrame() 22 | df2['sale'] = df['SalePrice'] 23 | df2['area'] = df['GrLivArea'] 24 | fig = plt.figure(figsize=(12,12)) 25 | r = sns.heatmap(df2, cmap='BuPu') 26 | plt.show() 27 | 28 | #HeatMapMatplotlib 29 | fig = plt.figure(figsize=(8,6)) 30 | ax = fig.add_subplot(111) 31 | ax.set(title="Living area vs Price of the house", 32 | ylabel='GrLivArea', xlabel='SalePrice') 33 | ax.hist2d(price,area,bins=100) 34 | plt.show() 35 | -------------------------------------------------------------------------------- /18_multivariateplot.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import matplotlib.pyplot as plt 3 | from pandas.plotting import parallel_coordinates 4 | import plotly 5 | import plotly.graph_objs as go 6 | import numpy as np 7 | 8 | df = pd.read_csv("14_input_data.csv") 9 | parallel_coordinates(df, 'SalePrice') 10 | plt.savefig('ParallelCoordinates.jpg') 11 | 12 | desc_data = df.describe() 13 | desc_data.to_csv('./metrics.csv') 14 | 15 | data = [ 16 | go.Parcoords( 17 | line = dict(colorscale = 'Jet', 18 | showscale = True, 19 | reversescale = True, 20 | cmin = -4000, 21 | cmax = -100), 22 | dimensions = list([ 23 | dict(range = [1,10], 24 | label = 'OverallQual', values = df['OverallQual']), 25 | dict(range = [0,6110], 26 | label = 'TotalBsmtSF', values = df['TotalBsmtSF']), 27 | dict(tickvals = [334,4692], 28 | label = '1stFlrSF', values = df['1stFlrSF']), 29 | dict(range = [334,5642], 30 | label = 'GrLivArea', values = df['GrLivArea']), 31 | dict(range = [0,3], 32 | label = 'FullBath', values = df['FullBath']), 33 | dict(range = [2,14], 34 | label = 'TotRmsAbvGrd', values = df['TotRmsAbvGrd']), 35 | dict(range = [0,3], 36 | label = 'Fireplaces', values = df['Fireplaces']), 37 | dict(range = [0,4], 38 | label = 'GarageCars', values = df['GarageCars']), 39 | dict(range = [0,1418], 40 | label = 'GarageArea', values = df['GarageArea']), 41 | dict(range = [34900,555000], 42 | label = 'SalePrice', values = df['SalePrice']) 43 | ]) 44 | ) 45 | ] 46 | plotly.offline.plot(data, filename = './parallel_coordinates_plot.html', auto_open= True) 47 | -------------------------------------------------------------------------------- /19_vectors.py: -------------------------------------------------------------------------------- 1 | from sklearn.feature_extraction import DictVectorizer 2 | from sklearn.feature_extraction.text import CountVectorizer 3 | from sklearn.feature_extraction.text import TfidfVectorizer 4 | from sklearn.feature_extraction.text import HashingVectorizer 5 | from sklearn.metrics.pairwise import euclidean_distances 6 | 7 | corpus1 = [{'Gender': 'Male'},{'Gender': 'Female'},{'Gender': 'Transgender'},{'Gender': 'Male'},{'Gender': 'Female'}] 8 | corpus2 = ['Bird is a Peacock Bird','Peacock dances very well','It eats variety of seeds','Cumin seed was eaten by it once'] 9 | vectors = [[2, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0],[0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 1], 10 | [0, 0, 0, 0, 0, 1, 0, 1, 1, 0, 0, 0, 1, 1, 0, 0, 0],[0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 1, 0, 0, 0, 1, 0]] 11 | 12 | # one-hot encoding 13 | v1 = DictVectorizer() 14 | print (v1.fit_transform(corpus1).toarray()) 15 | print (v1.vocabulary_) 16 | 17 | # bag-of-words (term frequencies, binary frequencies) 18 | v2 = CountVectorizer() 19 | print (v2.fit_transform(corpus2).todense()) 20 | print (v2.vocabulary_) 21 | 22 | print (TfidfVectorizer().fit_transform(corpus2).todense()) 23 | 24 | print (HashingVectorizer(n_features=6).transform(corpus2).todense()) 25 | 26 | print (euclidean_distances([vectors[0]],[vectors[1]])) 27 | print (euclidean_distances([vectors[0]],[vectors[2]])) 28 | print (euclidean_distances([vectors[0]],[vectors[3]])) 29 | 30 | """ 31 | 32 | https://gist.github.com/nithyadurai87/f3fff58ab7272279ef069689fc391dec 33 | 34 | https://gist.github.com/nithyadurai87/491e5e6f9c009ebd88912e71ef9363a4 35 | 36 | 37 | print (v1.fit_transform(corpus1).toarray()) 38 | [[0. 1. 0.] 39 | [1. 0. 0.] 40 | [0. 0. 1.] 41 | [0. 1. 0.] 42 | [1. 0. 0.]] 43 | 44 | print (v1.vocabulary_) 45 | {'Gender=Male': 1, 'Gender=Female': 0, 'Gender=Transgender': 2} 46 | 47 | print (v2.fit_transform(corpus2).todense()) 48 | [[2 0 0 0 0 0 1 0 0 0 1 0 0 0 0 0 0] 49 | [0 0 0 1 0 0 0 0 0 0 1 0 0 0 1 0 1] 50 | [0 0 0 0 0 1 0 1 1 0 0 0 1 1 0 0 0] 51 | [0 1 1 0 1 0 0 1 0 1 0 1 0 0 0 1 0]] 52 | 53 | print (v2.vocabulary_) 54 | {'bird': 0, 'is': 6, 'peacock': 10, 'dances': 3, 'very': 14, 'well': 16, 'it': 7 55 | , 'eats': 5, 'variety': 13, 'of': 8, 'seeds': 12, 'cumin': 2, 'seed': 11, 'was': 56 | 15, 'eaten': 4, 'by': 1, 'once': 9} 57 | 58 | print (TfidfVectorizer().fit_transform(corpus2).todense()) 59 | [[0.84352956 0. 0. 0. 0. 0. 60 | 0.42176478 0. 0. 0. 0.3325242 0. 61 | 0. 0. 0. 0. 0. ] 62 | [0. 0. 0. 0.52547275 0. 0. 63 | 0. 0. 0. 0. 0.41428875 0. 64 | 0. 0. 0.52547275 0. 0.52547275] 65 | [0. 0. 0. 0. 0. 0.46516193 66 | 0. 0.36673901 0.46516193 0. 0. 0. 67 | 0.46516193 0.46516193 0. 0. 0. ] 68 | [0. 0.38861429 0.38861429 0. 0.38861429 0. 69 | 0. 0.30638797 0. 0.38861429 0. 0.38861429 70 | 0. 0. 0. 0.38861429 0. ]] 71 | 72 | print (HashingVectorizer(n_features=6).transform(corpus2).todense()) 73 | [[ 0. -0.70710678 -0.70710678 0. 0. 0. ] 74 | [ 0. 0. -0.81649658 -0.40824829 0.40824829 0. ] 75 | [ 0.75592895 0. -0.37796447 0. -0.37796447 -0.37796447] 76 | [ 0.25819889 0.77459667 0. -0.51639778 0. 0.25819889]] 77 | 78 | print (euclidean_distances([vectors[0]],[vectors[1]])) 79 | [[2.82842712]] 80 | 81 | print (euclidean_distances([vectors[0]],[vectors[2]])) 82 | [[3.31662479]] 83 | 84 | print (euclidean_distances([vectors[0]],[vectors[3]])) 85 | [[3.60555128]] 86 | """ 87 | 88 | 89 | 90 | -------------------------------------------------------------------------------- /1stFlrSF.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nithyadurai87/machine_learning_examples/b32ebbfb5f314b08e0e0f4672d1e3ec713110faa/1stFlrSF.png -------------------------------------------------------------------------------- /20_NLTK.py: -------------------------------------------------------------------------------- 1 | """ 2 | import nltk 3 | nltk.download() 4 | """ 5 | from sklearn.feature_extraction.text import CountVectorizer 6 | from nltk import word_tokenize 7 | from nltk.stem import PorterStemmer 8 | from nltk.stem.wordnet import WordNetLemmatizer 9 | from nltk import pos_tag 10 | 11 | def lemmatize(token, tag): 12 | if tag[0].lower() in ['n', 'v']: 13 | return WordNetLemmatizer().lemmatize(token, tag[0].lower()) 14 | return token 15 | 16 | corpus = ['Bird is a Peacock Bird','Peacock dances very well','It eats variety of seeds','Cumin seed was eaten by it once'] 17 | 18 | print (CountVectorizer().fit_transform(corpus).todense()) 19 | print (CountVectorizer(stop_words='english').fit_transform(corpus).todense()) 20 | 21 | print (PorterStemmer().stem('seeds')) 22 | 23 | print (WordNetLemmatizer().lemmatize('gathering', 'v')) 24 | print (WordNetLemmatizer().lemmatize('gathering', 'n')) 25 | 26 | s_lines=[] 27 | for document in corpus: 28 | s_words=[] 29 | for token in word_tokenize(document): 30 | s_words.append(PorterStemmer().stem(token)) 31 | s_lines.append(s_words) 32 | print ('Stemmed:',s_lines) 33 | 34 | tagged_corpus=[] 35 | for document in corpus: 36 | tagged_corpus.append(pos_tag(word_tokenize(document))) 37 | 38 | l_lines=[] 39 | for document in tagged_corpus: 40 | l_words=[] 41 | for token, tag in document: 42 | l_words.append(lemmatize(token, tag)) 43 | l_lines.append(l_words) 44 | print ('Lemmatized:',l_lines) 45 | 46 | """ 47 | [[2 0 0 0 0 0 1 0 0 0 1 0 0 0 0 0 0] 48 | [0 0 0 1 0 0 0 0 0 0 1 0 0 0 1 0 1] 49 | [0 0 0 0 0 1 0 1 1 0 0 0 1 1 0 0 0] 50 | [0 1 1 0 1 0 0 1 0 1 0 1 0 0 0 1 0]] 51 | 52 | words like is, very, well, it, of, was, by, once are ignored. hence 4*9 53 | [[2 0 0 0 0 1 0 0 0] 54 | [0 0 1 0 0 1 0 0 0] 55 | [0 0 0 0 1 0 0 1 1] 56 | [0 1 0 1 0 0 1 0 0]] 57 | 58 | you could still see that seed and seeds are separate words 59 | print (PorterStemmer().stem('seeds')) 60 | seed 61 | 62 | print (WordNetLemmatizer().lemmatize('gathering', 'v')) 63 | gather 64 | 65 | print (WordNetLemmatizer().lemmatize('gathering', 'n')) 66 | gathering 67 | 68 | print ('Stemmed:',s_lines) 69 | Stemmed: [['bird', 'is', 'a', 'peacock', 'bird'], ['peacock', 'danc', 'veri', 'w 70 | ell'], ['It', 'eat', 'varieti', 'of', 'seed'], ['cumin', 'seed', 'wa', 'eaten', 71 | 'by', 'it', 'onc']] 72 | 73 | print ('Lemmatized:',l_lines) 74 | Lemmatized: [['Bird', 'be', 'a', 'Peacock', 'Bird'], ['Peacock', 'dance', 'very', 'well'], 75 | ['It', 'eat', 'variety', 'of', 'seed'], ['Cumin', 'seed', 'be', 'eat', 'by', 'it', 'once']] 76 | 77 | """ 78 | -------------------------------------------------------------------------------- /21_Kfold.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from sklearn.model_selection import KFold 3 | from sklearn.model_selection import train_test_split 4 | from sklearn.linear_model import LinearRegression 5 | from math import sqrt 6 | from sklearn.metrics import mean_squared_error 7 | 8 | def linear(): 9 | regressor = LinearRegression() 10 | regressor.fit(X_train, y_train) 11 | y_predictions = regressor.predict(X_test) 12 | return (regressor.score(X_test, y_test),sqrt(mean_squared_error(y_test, y_predictions))) 13 | 14 | df = pd.read_csv('./14_input_data.csv') 15 | 16 | X = df[list(df.columns)[:-1]] 17 | y = df['SalePrice'] 18 | 19 | X = X[:600] 20 | y = y[:600] 21 | 22 | X_train, X_test, y_train, y_test = train_test_split(X, y) 23 | print ("linear(Before Kfold) = ",linear()) 24 | 25 | i=X_train 26 | j=y_train 27 | 28 | for k_no, (x, y) in enumerate(KFold(n_splits=5,shuffle=True, random_state=42).split(i, j)): 29 | X_train = i.loc[i.index.intersection(x)] 30 | y_train = j.loc[j.index.intersection(x)] 31 | X_test = i.loc[i.index.intersection(y)] 32 | y_test = j.loc[j.index.intersection(y)] 33 | print ("linear = ",linear()) -------------------------------------------------------------------------------- /22_stepwise_forward_selection.py: -------------------------------------------------------------------------------- 1 | import statsmodels.regression.linear_model as sm 2 | import pandas as pd 3 | 4 | def Stepwise_Forward_Selection(Data, Inputs,Output): 5 | Model_var1=sm.OLS 6 | X=Data[Inputs] 7 | y=Data[Output] 8 | initial_list=[] 9 | threshold_in=0.05 10 | verbose=True 11 | included = list(initial_list) 12 | while True: 13 | changed=False 14 | excluded = list(set(X.columns)-set(included)) 15 | new_pval = pd.Series(index=excluded) 16 | for new_column in excluded: 17 | model = Model_var1(y, sm.add_constant(pd.DataFrame(X[included+[new_column]]))).fit() 18 | new_pval[new_column] = model.pvalues[new_column] 19 | best_pval = new_pval.min() 20 | if best_pval < threshold_in: 21 | best_feature = new_pval.argmin() 22 | included.append(best_feature) 23 | changed=True 24 | if verbose: 25 | print('Add {:30} with p-value {:.6}'.format(best_feature, best_pval)) 26 | if not changed: 27 | break 28 | return included 29 | 30 | def build_csv(data, columnlist): 31 | newdata = pd.DataFrame() 32 | for i in columnlist: 33 | newdata[i] = data[i] 34 | 35 | newdata.to_csv("Stepwise_selected.csv", index=False) 36 | 37 | def use_package(): 38 | df = pd.read_csv("./14_input_data.csv") 39 | 40 | included = Stepwise_Forward_Selection(df,list(df.columns)[:-1],['SalePrice']) 41 | 42 | build_csv(df, included) 43 | 44 | use_package() -------------------------------------------------------------------------------- /23_input_data.csv: -------------------------------------------------------------------------------- 1 | OverallQual,GrLivArea,GarageCars,TotalBsmtSF,SalePrice 2 | 7,1710,2,856,208500 3 | 6,1262,2,1262,181500 4 | 7,1786,2,920,223500 5 | 7,1717,3,756,140000 6 | 8,2198,3,1145,250000 7 | 5,1362,2,796,143000 8 | 8,1694,2,1686,307000 9 | 7,2090,2,1107,200000 10 | 7,1774,2,952,129900 11 | 5,1077,1,991,118000 12 | 5,1040,1,1040,129500 13 | 9,2324,3,1175,345000 14 | 5,912,1,912,144000 15 | 7,1494,3,1494,279500 16 | 6,1253,1,1253,157000 17 | 7,854,2,832,132000 18 | 6,1004,2,1004,149000 19 | 4,1296,2,0,90000 20 | 5,1114,2,1114,159000 21 | 5,1339,1,1029,139000 22 | 8,2376,3,1158,325300 23 | 7,1108,1,637,139400 24 | 8,1795,2,1777,230000 25 | 5,1060,2,1040,129900 26 | 5,1060,1,1060,154000 27 | 8,1600,3,1566,256300 28 | 5,900,2,900,134800 29 | 8,1704,3,1704,306000 30 | 5,1600,1,1484,207500 31 | 4,520,1,520,68500 32 | 4,1317,1,649,40000 33 | 5,1228,1,1228,149350 34 | 8,1234,2,1234,179900 35 | 5,1700,2,1398,165500 36 | 9,1561,2,1561,277500 37 | 8,2452,3,1117,309000 38 | 5,1097,2,1097,145000 39 | 5,1297,2,1297,153000 40 | 5,1057,1,1057,109000 41 | 4,1152,0,0,82000 42 | 6,1324,2,1088,160000 43 | 5,1328,1,1350,170000 44 | 5,884,2,840,144000 45 | 5,938,1,938,130250 46 | 5,1150,1,1150,141000 47 | 9,1752,2,1752,319900 48 | 7,2149,2,1434,239686 49 | 8,1656,3,1656,249700 50 | 4,1452,0,736,113000 51 | 5,955,1,955,127000 52 | 6,1470,2,794,177000 53 | 6,1176,2,816,114500 54 | 5,816,2,816,110000 55 | 9,1842,3,1842,385000 56 | 5,1360,2,384,130000 57 | 6,1425,2,1425,180500 58 | 8,1739,2,970,172500 59 | 7,1720,2,860,196500 60 | 10,2945,3,1410,438780 61 | 5,780,1,780,124900 62 | 6,1158,2,1158,158000 63 | 5,1111,1,530,101000 64 | 8,1370,2,1370,202500 65 | 7,1710,2,576,140000 66 | 7,2034,2,1057,219500 67 | 8,2473,3,1143,317000 68 | 7,2207,2,1947,180000 69 | 7,1479,2,1453,226000 70 | 4,747,1,747,80000 71 | 7,2287,2,1304,225000 72 | 7,2223,2,2223,244000 73 | 4,845,2,845,129500 74 | 7,1718,2,832,185000 75 | 5,1086,2,1086,144900 76 | 3,1605,1,840,107400 77 | 4,988,1,462,91000 78 | 4,952,1,952,135750 79 | 5,1285,1,672,127000 80 | 4,1768,0,1768,136500 81 | 5,1230,2,440,110000 82 | 6,2142,1,896,193500 83 | 6,1337,2,1237,153500 84 | 8,1563,3,1563,245000 85 | 5,1065,2,1065,126500 86 | 7,1474,2,384,168500 87 | 8,2417,2,1288,260000 88 | 6,1560,2,684,174000 89 | 6,1224,2,612,164500 90 | 3,1526,0,1013,85000 91 | 4,990,0,990,123600 92 | 4,1040,2,0,109900 93 | 5,1235,2,1235,98600 94 | 5,964,2,876,163500 95 | 6,2291,2,1214,133900 96 | 6,1786,2,824,204750 97 | 6,1470,2,680,185000 98 | 7,1588,2,1588,214000 99 | 4,960,1,960,94750 100 | 5,835,1,458,83000 101 | 4,1225,0,950,128950 102 | 6,1610,2,1610,205000 103 | 6,1732,2,741,178000 104 | 5,1535,2,0,118964 105 | 7,1226,3,1226,198900 106 | 7,1818,1,1040,169500 107 | 8,1992,2,1053,250000 108 | 4,1047,1,641,100000 109 | 5,789,1,789,115000 110 | 5,1517,0,793,115000 111 | 6,1844,2,1844,190000 112 | 6,1855,1,994,136900 113 | 7,1430,2,384,180000 114 | 7,2696,3,1264,383970 115 | 6,2259,2,1809,217000 116 | 6,2320,1,1028,259500 117 | 6,1458,2,729,176000 118 | 5,1092,1,1092,139000 119 | 5,1125,2,1125,155000 120 | 7,3222,3,1673,320000 121 | 6,1456,2,728,163990 122 | 6,988,2,938,180000 123 | 4,1123,1,732,100000 124 | 6,1080,1,1080,136000 125 | 6,1199,2,1199,153900 126 | 6,1586,2,1362,181000 127 | 6,754,0,520,84500 128 | 6,958,2,1078,128000 129 | 5,840,0,672,87000 130 | 6,1348,2,660,155000 131 | 5,1053,2,1008,150000 132 | 7,2157,2,924,226000 133 | 6,2054,2,992,244000 134 | 5,1327,2,1063,150750 135 | 8,1296,2,1267,220000 136 | 5,1721,2,1461,180000 137 | 7,1682,2,1304,174000 138 | 5,1214,1,1214,143000 139 | 7,1959,3,1907,171000 140 | 8,1852,3,1004,230000 141 | 6,1764,2,928,231500 142 | 4,864,0,864,115000 143 | 7,1734,2,1734,260000 144 | 5,1385,2,910,166000 145 | 7,1501,2,1490,204000 146 | 5,1728,2,1728,125000 147 | 6,1709,2,970,130000 148 | 5,875,1,715,105000 149 | 7,2035,2,884,222500 150 | 7,1080,0,1080,141000 151 | 5,1344,1,896,115000 152 | 5,969,2,969,122000 153 | 8,1710,3,1710,372402 154 | 6,1993,2,825,190000 155 | 6,1252,2,1602,235000 156 | 6,1200,1,1200,125000 157 | 6,1096,0,572,79000 158 | 5,1040,2,0,109500 159 | 8,1968,3,774,269500 160 | 7,1947,2,991,254900 161 | 7,2462,2,1392,320000 162 | 6,1232,2,1232,162500 163 | 9,2668,3,1572,412500 164 | 7,1541,2,1541,220000 165 | 4,882,0,882,103200 166 | 6,1616,1,1149,152000 167 | 5,1355,0,644,127500 168 | 5,1867,1,1617,190000 169 | 8,2161,3,1582,325624 170 | 7,1720,2,840,183500 171 | 8,1707,2,1686,228000 172 | 5,1382,2,720,128500 173 | 6,1656,2,1080,215000 174 | 7,1767,2,1064,239000 175 | 6,1362,2,1362,163000 176 | 6,1651,2,1606,184000 177 | 6,2158,2,1202,243000 178 | 6,2060,2,1151,211000 179 | 5,1920,2,1052,172500 180 | 9,2234,3,2216,501837 181 | 5,968,2,968,100000 182 | 6,1525,2,756,177000 183 | 7,1802,1,793,200100 184 | 5,1340,1,0,120000 185 | 7,2082,2,1362,200000 186 | 5,1252,2,504,127000 187 | 10,3608,3,1107,475000 188 | 7,1217,2,1188,173000 189 | 5,1656,1,660,135000 190 | 5,1224,2,1086,153337 191 | 8,1593,2,1593,286000 192 | 8,2727,2,853,315000 193 | 7,1479,2,725,184000 194 | 7,1431,2,1431,192000 195 | 7,1709,2,970,130000 196 | 5,864,1,864,127000 197 | 6,1456,2,855,148500 198 | 7,1726,3,1726,311872 199 | 8,3112,2,1360,235000 200 | 6,2229,0,755,104000 201 | 8,1713,3,1713,274900 202 | 4,1121,2,1121,140000 203 | 6,1279,2,1196,171500 204 | 6,1310,1,617,112000 205 | 6,848,2,848,149000 206 | 5,1284,1,720,110000 207 | 7,1442,2,1424,180500 208 | 5,1696,1,1140,143900 209 | 4,1100,1,1100,141000 210 | 7,2062,2,1157,277000 211 | 6,1092,2,1092,145000 212 | 5,864,0,864,98000 213 | 6,1212,2,1212,186000 214 | 7,1852,2,900,252678 215 | 5,990,2,990,156000 216 | 6,1392,1,689,161750 217 | 5,1236,1,1070,134450 218 | 7,1436,2,1436,210000 219 | 4,1328,1,686,107000 220 | 7,1954,2,798,311500 221 | 7,1248,2,1248,167240 222 | 7,1498,2,1498,204900 223 | 6,2267,2,1010,200000 224 | 6,1552,2,713,179900 225 | 4,864,2,864,97000 226 | 10,2392,3,2392,386250 227 | 5,1302,1,630,112000 228 | 7,2520,3,1203,290000 229 | 6,987,1,483,106000 230 | 5,912,1,912,125000 231 | 7,1555,2,1373,192500 232 | 6,1194,1,1194,148000 233 | 8,2794,3,1462,403000 234 | 6,987,1,483,94500 235 | 5,894,1,894,128200 236 | 6,1960,2,860,216500 237 | 6,987,1,483,89500 238 | 7,1414,2,1414,185500 239 | 7,1744,2,996,194500 240 | 8,1694,3,1694,318000 241 | 6,1487,1,735,113000 242 | 8,1566,2,1566,262500 243 | 5,866,0,686,110500 244 | 5,1440,1,540,79000 245 | 6,1217,1,626,120000 246 | 7,2110,2,948,205000 247 | 7,1872,2,1845,241500 248 | 6,1928,1,1020,137000 249 | 6,1375,2,1367,140000 250 | 7,1668,2,840,180000 251 | 6,2144,2,1444,277000 252 | 3,1306,0,728,76500 253 | 8,1625,2,1573,235000 254 | 6,1640,2,798,173000 255 | 6,1302,1,1302,158000 256 | 5,1314,1,1314,145000 257 | 7,2291,2,975,230000 258 | 6,1728,2,864,207500 259 | 7,1604,2,1604,220000 260 | 7,1792,2,963,231500 261 | 5,882,1,0,97000 262 | 6,1382,2,1362,176000 263 | 8,2574,3,1482,276000 264 | 6,1212,2,506,151000 265 | 5,1316,2,926,130000 266 | 5,764,2,680,73000 267 | 6,1422,2,1422,175500 268 | 6,1511,2,802,185000 269 | 5,2192,1,720,179500 270 | 5,778,1,740,120500 271 | 6,1113,1,1143,148000 272 | 8,1939,3,1095,266000 273 | 7,1363,2,1385,241500 274 | 8,2270,3,1152,290000 275 | 6,1632,1,1240,139000 276 | 5,816,1,816,124500 277 | 7,1548,2,952,205000 278 | 7,1560,2,1560,201000 279 | 4,864,2,864,141000 280 | 9,2121,3,2121,415298 281 | 7,2022,2,1160,192000 282 | 7,1982,2,807,228500 283 | 6,1262,2,1262,185000 284 | 7,1314,2,1314,207500 285 | 8,1468,3,1468,244600 286 | 6,1575,2,1575,179200 287 | 7,1250,2,625,164700 288 | 6,1734,2,912,159000 289 | 4,858,0,858,88000 290 | 5,900,1,882,122000 291 | 6,1396,1,698,153575 292 | 8,1919,2,1079,233230 293 | 5,1716,0,780,135900 294 | 5,1716,1,768,131000 295 | 7,2263,2,795,235000 296 | 6,1644,2,1416,167000 297 | 6,1003,2,1003,142500 298 | 5,1558,1,910,152000 299 | 7,1950,2,975,239000 300 | 6,1743,2,702,175000 301 | 6,1152,1,1092,158500 302 | 5,1336,1,1165,157000 303 | 8,2452,2,1028,267000 304 | 7,1541,3,1541,205000 305 | 5,894,2,894,149900 306 | 7,3493,3,1470,295000 307 | 8,2000,3,2000,305900 308 | 7,2243,3,700,225000 309 | 6,1406,0,319,89500 310 | 4,861,2,861,82500 311 | 9,1944,3,1896,360000 312 | 6,1501,2,697,165600 313 | 6,972,1,972,132000 314 | 5,1118,2,793,119900 315 | 7,2036,2,2136,375000 316 | 7,1641,2,728,178000 317 | 7,1432,2,716,188500 318 | 7,2353,2,845,260000 319 | 8,1959,3,1088,270000 320 | 7,2646,3,1347,260000 321 | 7,1472,2,1372,187500 322 | 9,2596,3,1249,342643 323 | 8,2468,3,1136,354000 324 | 7,2730,2,1502,301000 325 | 3,1163,1,1162,126175 326 | 7,2978,2,710,242000 327 | 5,803,2,720,87000 328 | 8,1719,2,1719,324000 329 | 6,1383,1,1383,145250 330 | 6,2134,2,844,214500 331 | 5,1192,1,596,78000 332 | 5,1728,1,1728,119000 333 | 5,1056,1,1056,139000 334 | 8,1629,3,3206,284000 335 | 7,1358,2,1358,207000 336 | 6,1638,2,943,192000 337 | 5,1786,2,1499,228950 338 | 9,1922,3,1922,377426 339 | 7,1536,2,1536,214000 340 | 7,1621,2,1208,202500 341 | 6,1215,1,1215,155000 342 | 8,1908,2,967,202900 343 | 4,841,1,721,82000 344 | 3,1040,2,0,87500 345 | 9,1684,2,1684,266000 346 | 5,1112,1,536,85000 347 | 6,1577,1,972,140200 348 | 6,958,1,958,151500 349 | 6,1478,2,1478,157500 350 | 7,1626,2,764,154000 351 | 9,2728,3,1848,437154 352 | 9,1869,2,1869,318061 353 | 7,1453,2,1453,190000 354 | 5,1111,1,616,95000 355 | 6,720,2,624,105900 356 | 6,1595,1,940,140000 357 | 6,1200,2,1200,177500 358 | 6,1167,2,1158,173000 359 | 5,1142,2,1142,134000 360 | 5,1352,1,1062,130000 361 | 8,1924,2,1086,280000 362 | 6,912,2,888,156000 363 | 5,1505,1,883,145000 364 | 7,1922,2,0,198500 365 | 6,987,1,483,118000 366 | 6,1574,2,796,190000 367 | 5,1344,1,672,147000 368 | 6,1394,2,1394,159000 369 | 6,1431,1,1099,165000 370 | 5,1268,1,1268,132000 371 | 5,1287,2,1063,162000 372 | 6,1664,2,953,172400 373 | 4,1588,2,0,134432 374 | 6,752,1,744,125000 375 | 5,1319,1,608,123000 376 | 7,1928,2,847,219500 377 | 1,904,0,683,61000 378 | 5,914,2,870,148000 379 | 8,2466,2,1580,340000 380 | 9,1856,3,1856,394432 381 | 6,1800,2,982,179000 382 | 5,1691,1,1026,127000 383 | 7,1301,2,1293,187750 384 | 7,1797,2,939,213500 385 | 6,784,2,784,76000 386 | 6,1953,2,1580,240000 387 | 8,1269,2,1256,192000 388 | 5,1184,0,658,81000 389 | 6,1125,1,1041,125000 390 | 7,1479,2,1468,191000 391 | 10,2332,3,1682,426000 392 | 5,1367,1,861,119000 393 | 6,1961,2,804,215000 394 | 5,882,1,0,106500 395 | 4,788,0,788,100000 396 | 5,1034,1,735,109000 397 | 5,1144,1,1144,129000 398 | 5,894,2,894,123000 399 | 5,1812,1,864,169500 400 | 5,1077,1,961,67000 401 | 7,1550,2,1092,241000 402 | 8,1288,2,1260,245500 403 | 7,1310,2,1310,164990 404 | 5,672,1,672,108000 405 | 8,2263,2,1141,258000 406 | 6,1572,2,806,168000 407 | 4,1620,2,1281,150000 408 | 6,1639,1,1064,115000 409 | 6,1680,1,840,177000 410 | 7,2172,3,1063,280000 411 | 8,2078,3,1034,339750 412 | 5,1276,1,1276,60000 413 | 5,1056,2,1056,145000 414 | 7,1478,2,1470,222000 415 | 5,1028,2,1008,115000 416 | 7,2097,3,1080,228000 417 | 7,1340,2,1340,181134 418 | 6,1400,2,672,149500 419 | 6,2624,3,1370,239000 420 | 5,1134,1,756,126000 421 | 5,1056,1,1056,142000 422 | 7,1344,4,1344,206300 423 | 6,1602,2,1602,215000 424 | 5,988,2,988,113000 425 | 8,2630,3,1470,315000 426 | 6,1196,1,1196,139000 427 | 7,1389,1,651,135000 428 | 7,1644,2,1518,275000 429 | 4,907,1,907,109008 430 | 7,1208,2,1208,195400 431 | 6,1412,2,1392,175000 432 | 6,987,1,483,85400 433 | 6,1198,0,901,79900 434 | 5,1365,2,765,122500 435 | 6,1604,2,926,181000 436 | 4,630,0,630,81000 437 | 7,1661,2,799,212000 438 | 6,1118,2,648,116000 439 | 6,904,1,884,119000 440 | 5,694,1,440,90350 441 | 6,1196,2,684,110000 442 | 10,2402,3,3094,555000 443 | 4,1440,0,1440,118000 444 | 5,1573,2,1078,162900 445 | 7,1258,3,1258,172500 446 | 7,1908,2,915,210000 447 | 6,1689,2,1436,127500 448 | 6,1888,2,1517,190000 449 | 7,1886,2,930,199900 450 | 6,1376,1,780,119500 451 | 3,1183,1,649,120000 452 | 6,813,1,813,110000 453 | 7,1533,2,1533,280000 454 | 6,1756,2,872,204000 455 | 8,1590,2,768,210000 456 | 5,1728,2,1728,188000 457 | 7,1242,2,1242,175500 458 | 5,1344,3,624,98000 459 | 4,1663,2,1364,256000 460 | 8,1666,1,588,161000 461 | 5,1203,1,709,110000 462 | 8,1935,2,832,263435 463 | 7,1135,2,560,155000 464 | 5,864,1,864,62383 465 | 6,1660,1,715,188700 466 | 5,1040,0,1040,124000 467 | 7,1414,2,1375,178740 468 | 7,1277,2,1277,167000 469 | 5,1644,1,728,146500 470 | 8,1634,3,1626,250000 471 | 6,1710,2,832,187000 472 | 8,1502,2,1488,212000 473 | 7,1969,2,808,190000 474 | 6,1072,2,547,148000 475 | 8,1976,3,1976,440000 476 | 8,1652,2,1494,251000 477 | 5,970,2,970,132500 478 | 6,1493,2,1478,208900 479 | 9,2643,3,2153,380000 480 | 8,1718,3,1705,297000 481 | 4,1131,2,907,89471 482 | 9,1850,3,1833,326000 483 | 9,1792,3,1792,374000 484 | 7,1826,1,910,155000 485 | 6,1216,2,1216,164000 486 | 5,999,1,999,132500 487 | 5,1113,1,1113,147000 488 | 5,1073,2,1073,156000 489 | 5,1484,2,1484,175000 490 | 5,2414,2,954,160000 491 | 4,630,1,630,86000 492 | 5,1304,1,264,115000 493 | 6,1578,1,806,133000 494 | 6,1456,2,728,172785 495 | 5,1269,1,1269,155000 496 | 5,886,1,190,91300 497 | 4,720,0,720,34900 498 | 8,3228,2,3200,430000 499 | 7,1820,1,1026,184000 500 | 5,899,1,864,130000 501 | 5,912,1,912,120000 502 | 6,1218,1,672,113000 503 | 7,1768,2,866,226700 504 | 5,1214,2,1214,140000 505 | 7,1801,2,1501,289000 506 | 6,1322,2,855,147000 507 | 5,1960,2,960,124500 508 | 8,1911,2,777,215000 509 | 6,1218,2,1218,208300 510 | 7,1378,2,689,161000 511 | 5,1041,1,1041,124500 512 | 5,1363,1,1008,164900 513 | 7,1368,2,1368,202665 514 | 5,864,2,864,129900 515 | 6,1080,2,1084,134000 516 | 5,789,1,768,96500 517 | 10,2020,3,2006,402861 518 | 6,2119,2,689,158000 519 | 7,2344,3,1264,265000 520 | 6,1796,2,794,211000 521 | 7,2080,1,1276,234000 522 | 4,1294,0,0,106250 523 | 6,1244,1,1244,150000 524 | 6,1664,2,1004,159000 525 | 10,4676,3,3138,184750 526 | 7,2398,3,1379,315750 527 | 7,1266,2,1257,176000 528 | 5,928,1,928,132000 529 | 9,2713,3,1452,446261 530 | 4,605,0,528,86000 531 | 6,2515,2,2035,200624 532 | 6,1509,2,1461,175000 533 | 6,1362,2,611,128000 534 | 5,827,1,0,107500 535 | 1,334,0,0,39300 536 | 8,1414,2,707,178000 537 | 5,1347,0,1117,107500 538 | 7,1724,2,880,188000 539 | 4,864,2,864,111250 540 | 5,1159,1,1051,158000 541 | 8,1601,2,1581,272000 542 | 9,1838,3,1838,315000 543 | 8,2285,3,969,248000 544 | 7,1680,2,1650,213250 545 | 7,767,1,723,133000 546 | 7,1496,2,654,179665 547 | 7,2183,3,1204,229000 548 | 6,1635,2,1065,210000 549 | 5,768,2,768,129500 550 | 5,825,2,825,125000 551 | 7,2094,2,912,263000 552 | 6,1069,2,1069,140000 553 | 5,928,1,928,112500 554 | 8,1717,3,1709,255500 555 | 4,1126,2,0,108000 556 | 7,2046,3,998,284000 557 | 5,1048,1,993,113000 558 | 5,1092,1,1092,141000 559 | 4,1336,1,637,108000 560 | 7,1446,2,729,175000 561 | 7,1557,2,1374,234000 562 | 5,1392,2,1392,121500 563 | 5,1389,2,1389,170000 564 | 5,996,0,996,108000 565 | 6,1674,2,1163,185000 566 | 7,2295,2,1095,268000 567 | 6,1647,1,806,128000 568 | 9,2504,3,1122,325000 569 | 7,1535,2,1517,214000 570 | 8,2132,2,1496,316600 571 | 5,943,2,943,135960 572 | 5,1728,2,1728,142600 573 | 6,864,1,864,120000 574 | 7,1692,2,846,224500 575 | 7,1430,2,384,170000 576 | 5,1109,1,372,139000 577 | 5,1216,1,832,118500 578 | 7,1477,1,861,145000 579 | 5,1320,2,1164,164500 580 | 7,1392,2,689,146000 581 | 5,1795,1,1050,131500 582 | 6,1429,2,1144,181900 583 | 8,2042,3,2042,253293 584 | 6,816,0,816,118500 585 | 10,2775,2,1237,325000 586 | 4,1573,1,884,133000 587 | 8,2028,3,1868,369900 588 | 6,838,1,816,130000 589 | 5,860,2,840,137000 590 | 5,1473,1,1437,143000 591 | 5,935,1,742,79500 592 | 7,1582,2,770,185900 593 | 10,2296,3,1722,451950 594 | 5,816,2,816,138000 595 | 6,848,2,848,140000 596 | 5,924,1,924,110000 597 | 8,1826,3,1814,319000 598 | 6,1368,1,684,114504 599 | 7,1402,3,1258,194201 600 | 5,1647,2,1430,217500 601 | 6,1556,2,716,151000 602 | 8,1904,2,1058,275000 603 | 6,1375,1,780,141000 604 | 8,1915,2,908,220000 605 | 7,1200,2,600,151000 606 | 7,1494,2,1494,221000 607 | 7,1986,2,768,205000 608 | 5,1040,2,1040,152000 609 | 5,2008,1,896,225000 610 | 8,3194,2,965,359100 611 | 4,1029,1,1029,118500 612 | 9,2153,3,1440,313000 613 | 6,1032,2,1032,148000 614 | 8,1872,2,1299,261500 615 | 5,1120,0,1120,147000 616 | 4,630,0,630,75500 617 | 6,1054,2,936,137500 618 | 6,1509,2,783,183200 619 | 6,832,2,832,105500 620 | 9,1828,3,1822,314813 621 | 8,2262,3,1482,305000 622 | 3,864,0,864,67000 623 | 6,2614,2,1522,240000 624 | 5,980,2,980,135000 625 | 6,1512,2,756,168500 626 | 6,1790,2,732,165150 627 | 6,1116,2,1116,160000 628 | 5,1422,1,978,139900 629 | 6,1520,1,1156,153000 630 | 5,2080,2,1040,135000 631 | 6,1350,2,1248,168500 632 | 5,1750,1,636,124000 633 | 8,1554,2,1554,209500 634 | 7,1411,2,1386,82500 635 | 5,1056,1,1056,139400 636 | 6,1056,2,1056,144000 637 | 6,3395,0,1440,200000 638 | 2,800,0,264,60000 639 | 5,1387,1,811,93000 640 | 5,796,0,796,85000 641 | 8,1567,3,1520,264561 642 | 8,1518,2,1518,274000 643 | 7,1929,2,1057,226000 644 | 8,2704,2,1952,345000 645 | 5,1620,2,780,152000 646 | 9,1766,3,1766,370878 647 | 6,981,2,981,143250 648 | 5,1048,2,0,98300 649 | 6,1094,2,1094,155000 650 | 6,1839,2,756,155000 651 | 4,630,0,630,84500 652 | 7,1665,2,813,205950 653 | 4,1510,1,755,108000 654 | 7,1716,2,880,191000 655 | 6,1469,1,756,135000 656 | 8,2113,3,2109,350000 657 | 6,1092,1,525,88000 658 | 5,1053,1,1053,145500 659 | 7,1502,1,776,149000 660 | 6,1458,1,912,97500 661 | 5,1486,2,1486,167000 662 | 7,1935,2,793,197900 663 | 8,2448,3,1629,402000 664 | 6,1392,2,1392,110000 665 | 4,1181,2,1138,137500 666 | 8,2097,3,2077,423000 667 | 8,1936,2,1406,230500 668 | 6,2380,2,1021,129000 669 | 6,1679,2,1408,193500 670 | 5,1437,2,1188,168000 671 | 4,1180,1,700,137500 672 | 6,1476,2,738,173500 673 | 6,1369,1,672,103600 674 | 6,1208,2,1208,165000 675 | 6,1839,2,1477,257500 676 | 6,1136,1,1136,140000 677 | 6,1441,2,855,148500 678 | 4,1774,3,1095,87000 679 | 5,792,1,768,109500 680 | 8,2046,3,2046,372500 681 | 5,988,2,988,128500 682 | 6,923,1,923,143000 683 | 5,1520,1,793,159434 684 | 6,1291,2,1291,173000 685 | 9,1668,3,1626,285000 686 | 7,1839,2,1195,221000 687 | 7,2090,2,1190,207500 688 | 7,1761,2,874,227875 689 | 7,1102,2,551,148800 690 | 8,1419,2,1419,392000 691 | 6,1362,2,1362,194700 692 | 6,848,2,848,141000 693 | 10,4316,3,2444,755000 694 | 7,2519,2,1210,335000 695 | 5,1073,1,1073,108480 696 | 5,1539,2,927,141500 697 | 6,1137,2,1112,176000 698 | 5,616,1,616,89000 699 | 5,1148,1,980,123500 700 | 5,894,1,894,138500 701 | 7,1391,2,1391,196000 702 | 8,1800,3,1800,312500 703 | 7,1164,2,1164,140000 704 | 8,2576,3,1234,361919 705 | 5,1812,2,360,140000 706 | 7,1484,2,1473,213000 707 | 4,1092,0,0,55000 708 | 7,1824,2,1643,302000 709 | 8,1324,2,1324,254000 710 | 7,1456,2,728,179540 711 | 5,904,1,876,109900 712 | 3,729,0,270,52000 713 | 4,1178,1,859,102776 714 | 8,1228,2,1228,189000 715 | 4,960,2,960,129000 716 | 6,1479,2,725,130500 717 | 6,1350,2,1064,165000 718 | 7,2554,2,718,159500 719 | 5,1178,2,1176,157000 720 | 7,2418,3,1311,341000 721 | 5,971,1,971,128500 722 | 8,1742,2,1742,275000 723 | 6,848,2,848,143000 724 | 4,864,2,864,124500 725 | 4,1470,1,941,135000 726 | 9,1698,3,1698,320000 727 | 4,864,2,864,120500 728 | 6,1680,2,880,222000 729 | 7,1232,2,1232,194500 730 | 5,1776,3,1584,110000 731 | 4,1208,2,780,103000 732 | 8,1616,2,1595,236500 733 | 7,1146,2,868,187500 734 | 7,2031,2,1153,222500 735 | 5,1144,1,864,131400 736 | 5,948,1,948,108000 737 | 7,1768,2,880,163000 738 | 3,1040,2,0,93500 739 | 8,1801,3,893,239900 740 | 5,1200,0,1200,179000 741 | 7,1728,2,864,190000 742 | 5,1432,2,264,132000 743 | 6,912,1,912,142000 744 | 7,1349,2,1349,179000 745 | 5,1464,2,520,175000 746 | 8,1337,2,1337,180000 747 | 8,2715,2,1142,299800 748 | 7,2256,2,952,236000 749 | 7,2640,4,1240,265979 750 | 7,1720,2,1720,260400 751 | 4,1529,1,0,98000 752 | 4,1140,0,576,96500 753 | 7,1320,2,660,162000 754 | 6,1494,2,1479,217000 755 | 8,2098,3,1030,275500 756 | 6,1026,2,1026,156000 757 | 6,1471,2,729,172500 758 | 8,1768,2,866,212000 759 | 6,1386,2,672,158900 760 | 7,1501,2,744,179400 761 | 8,2531,3,1318,290000 762 | 6,864,1,864,127500 763 | 5,1301,2,1145,100000 764 | 7,1547,2,756,215200 765 | 8,2365,3,1252,337000 766 | 8,1494,2,1494,270000 767 | 9,1506,2,1498,264132 768 | 7,1714,2,980,196500 769 | 6,1750,1,983,160000 770 | 7,1836,2,1860,216837 771 | 8,3279,3,1650,538000 772 | 5,858,2,858,134900 773 | 4,1220,2,836,102000 774 | 6,1117,2,1029,107000 775 | 5,912,1,912,114500 776 | 8,1973,3,1935,395000 777 | 6,1204,2,1204,162000 778 | 7,1614,3,1614,221500 779 | 5,894,2,864,142500 780 | 5,2020,2,0,144000 781 | 6,1004,2,975,135000 782 | 7,1253,2,1237,176000 783 | 6,1603,2,761,175900 784 | 7,1430,2,1413,187100 785 | 5,1110,2,1097,165500 786 | 6,1484,0,742,128000 787 | 6,1342,2,1372,161500 788 | 5,1652,1,686,139000 789 | 7,2084,2,956,233000 790 | 4,901,1,901,107900 791 | 6,2087,2,832,187500 792 | 7,1145,2,1145,160200 793 | 6,1062,2,1029,146800 794 | 7,2013,2,1117,269790 795 | 8,1496,2,1496,225000 796 | 7,1895,2,712,194500 797 | 6,1564,2,650,171000 798 | 6,1285,2,660,143500 799 | 5,773,1,773,110000 800 | 9,3140,3,1926,485000 801 | 5,1768,1,731,175000 802 | 6,1688,2,616,200000 803 | 4,1196,2,1196,109900 804 | 7,1456,2,728,189000 805 | 9,2822,3,1734,582933 806 | 5,1128,1,936,118000 807 | 7,1428,2,1417,227680 808 | 5,980,1,980,135500 809 | 5,1576,2,1324,223500 810 | 5,1086,2,1024,159950 811 | 5,2138,2,849,106000 812 | 6,1309,2,1040,181000 813 | 6,848,2,848,144500 814 | 5,1044,2,540,55993 815 | 6,1442,1,1442,157900 816 | 5,1250,1,686,116000 817 | 7,1661,2,1649,224900 818 | 5,1008,1,1008,137000 819 | 8,1689,3,1568,271000 820 | 6,1052,2,1010,155000 821 | 7,1358,2,1358,224000 822 | 7,1640,2,798,183000 823 | 4,936,2,936,93000 824 | 7,1733,2,847,225000 825 | 6,1489,1,778,139500 826 | 8,1489,2,1489,232600 827 | 10,2084,3,2078,385000 828 | 5,784,0,784,109500 829 | 7,1434,2,1454,189000 830 | 5,2126,2,1013,185000 831 | 7,1223,2,600,147400 832 | 6,1392,2,1392,166000 833 | 7,1200,2,600,151000 834 | 7,1829,2,941,237000 835 | 6,1516,2,1516,167000 836 | 5,1144,2,1144,139950 837 | 4,1067,2,1067,128000 838 | 5,1559,2,1559,153500 839 | 6,987,1,483,100000 840 | 5,1099,1,1099,144000 841 | 5,1200,1,768,130500 842 | 6,1482,1,672,140000 843 | 5,1539,2,650,157500 844 | 6,1165,2,1127,174900 845 | 5,1800,0,1800,141000 846 | 5,1416,3,876,153900 847 | 5,1701,2,1390,171000 848 | 7,1775,2,740,213000 849 | 5,864,1,864,133500 850 | 6,2358,2,907,240000 851 | 6,1855,2,528,187000 852 | 6,848,2,848,131500 853 | 8,1456,2,1273,215000 854 | 7,1646,1,918,164000 855 | 6,1445,2,1127,158000 856 | 5,1779,2,1763,170000 857 | 5,1040,1,1040,127000 858 | 6,1026,2,940,147000 859 | 6,1481,2,702,174000 860 | 7,1370,2,1090,152000 861 | 6,2654,2,1054,250000 862 | 7,1426,1,912,189950 863 | 5,1039,2,1039,131500 864 | 6,1097,2,1040,152000 865 | 5,1148,1,1148,132500 866 | 7,1372,2,1372,250580 867 | 5,1002,2,1002,148500 868 | 8,1646,3,1638,248900 869 | 4,1120,2,1040,129000 870 | 5,2320,2,0,169000 871 | 7,1949,2,1050,236000 872 | 5,894,1,894,109500 873 | 6,1682,2,804,200500 874 | 5,910,2,105,116000 875 | 5,1268,1,832,133000 876 | 5,1131,1,676,66500 877 | 8,2610,2,1184,303477 878 | 4,1040,2,1064,132250 879 | 9,2224,3,1462,350000 880 | 5,1155,2,1109,148000 881 | 5,864,1,864,136500 882 | 5,1090,2,1090,157000 883 | 7,1717,2,1156,187500 884 | 6,1593,2,808,178000 885 | 4,2230,1,795,118500 886 | 5,892,1,892,100000 887 | 9,1709,2,1698,328900 888 | 5,1712,2,1626,145000 889 | 5,1393,1,816,135500 890 | 8,2217,2,2217,268000 891 | 6,1505,2,1505,149500 892 | 5,924,2,672,122900 893 | 6,1683,2,918,172500 894 | 6,1068,1,1059,154500 895 | 5,1383,1,1383,165000 896 | 5,1535,2,0,118858 897 | 6,1796,2,780,140000 898 | 4,951,1,951,106500 899 | 5,2240,2,0,142953 900 | 9,2364,3,2330,611657 901 | 5,1236,1,912,135000 902 | 4,858,1,858,110000 903 | 5,1306,1,992,153000 904 | 7,1509,2,783,180000 905 | 7,1670,3,1670,240000 906 | 5,902,1,876,125500 907 | 5,1063,1,1056,128000 908 | 8,1636,3,1623,255000 909 | 7,2057,1,1017,250000 910 | 5,902,2,864,131000 911 | 6,1484,2,742,174000 912 | 5,2274,2,1105,154300 913 | 5,1268,1,1268,143500 914 | 5,1015,1,768,88000 915 | 5,2002,3,1001,145000 916 | 6,1224,2,612,173733 917 | 4,1092,1,546,75000 918 | 2,480,1,480,35311 919 | 4,1229,1,1134,135000 920 | 7,2127,3,1104,238000 921 | 6,1414,2,1184,176500 922 | 6,1721,2,928,201000 923 | 5,2200,0,1272,145900 924 | 6,1316,2,1316,169990 925 | 6,1617,2,1604,193000 926 | 6,1686,2,1686,207500 927 | 5,1126,2,1126,175000 928 | 8,2374,3,1181,285000 929 | 7,1978,2,832,176000 930 | 8,1788,2,1753,236500 931 | 7,2236,2,964,222000 932 | 8,1466,3,1466,201000 933 | 5,925,1,925,117500 934 | 9,1905,3,1905,320000 935 | 7,1500,2,1500,190000 936 | 7,2069,2,585,242000 937 | 4,747,2,600,79900 938 | 7,1200,2,1176,184900 939 | 7,1971,2,1113,253000 940 | 7,1962,3,1391,239799 941 | 7,2403,1,1032,244400 942 | 6,1728,2,1728,150900 943 | 7,2060,2,992,214000 944 | 4,1440,0,1440,150000 945 | 5,1632,2,1632,143000 946 | 6,1344,2,819,137500 947 | 5,1869,2,1088,124900 948 | 5,1144,1,1144,143000 949 | 8,1629,3,1616,270000 950 | 7,1776,2,936,192500 951 | 6,1381,2,1161,197500 952 | 5,864,2,864,129000 953 | 5,965,1,828,119900 954 | 5,768,1,768,133900 955 | 5,1968,2,784,172000 956 | 6,980,0,945,127500 957 | 6,1958,2,979,145000 958 | 6,1229,2,561,124000 959 | 5,1057,2,1057,132000 960 | 7,1337,2,1337,185000 961 | 7,1416,2,696,155000 962 | 5,858,0,858,116500 963 | 6,2872,2,1330,272000 964 | 6,1548,2,804,155000 965 | 9,1800,2,1800,239000 966 | 7,1894,2,817,214900 967 | 6,1484,2,783,178900 968 | 5,1308,1,728,160000 969 | 5,1098,1,1098,135000 970 | 3,968,0,600,37900 971 | 6,1095,1,588,140000 972 | 4,1192,0,720,135000 973 | 7,1626,2,764,173000 974 | 6,918,1,918,99500 975 | 7,1428,2,1428,182000 976 | 7,2019,2,728,167500 977 | 7,1382,2,673,165000 978 | 4,869,0,440,85500 979 | 7,1241,2,1241,199900 980 | 4,894,2,894,110000 981 | 5,1121,2,1121,139000 982 | 7,999,2,944,178400 983 | 8,2612,3,1225,336000 984 | 7,1266,2,1266,159895 985 | 8,2290,2,1128,255900 986 | 5,1734,2,0,126000 987 | 5,1164,1,1164,125000 988 | 6,1635,1,485,117000 989 | 9,1940,3,1930,395192 990 | 6,2030,2,848,195000 991 | 7,1576,2,770,197000 992 | 8,2392,3,1396,348000 993 | 8,1742,2,916,168000 994 | 6,1851,2,822,187000 995 | 6,1500,2,750,173900 996 | 10,1718,3,1700,337500 997 | 4,1230,1,747,121600 998 | 5,1050,1,1050,136500 999 | 6,1442,2,1442,185000 1000 | 3,1077,1,1007,91000 1001 | 7,1208,2,1187,206000 1002 | 3,944,2,0,82000 1003 | 5,691,1,691,86000 1004 | 8,1574,3,1574,232000 1005 | 5,1680,2,1680,136905 1006 | 7,1504,2,1346,181000 1007 | 5,985,1,985,149900 1008 | 6,1657,2,1657,163500 1009 | 4,1092,1,546,88000 1010 | 7,1710,2,1710,240000 1011 | 5,1522,0,1008,102000 1012 | 5,1271,1,720,135000 1013 | 5,1664,0,0,100000 1014 | 6,1502,1,602,165000 1015 | 5,1022,1,1022,85000 1016 | 6,1082,1,1082,119200 1017 | 8,1665,2,810,227000 1018 | 7,1504,2,1504,203000 1019 | 8,1360,2,1220,187500 1020 | 7,1472,2,384,160000 1021 | 7,1506,2,1362,213490 1022 | 4,1132,2,1132,176000 1023 | 7,1220,2,1199,194000 1024 | 5,1248,1,912,87000 1025 | 7,1504,2,1346,191000 1026 | 8,2898,2,1565,287000 1027 | 5,882,2,882,112500 1028 | 5,1264,2,1268,167500 1029 | 8,1646,3,1638,293077 1030 | 5,1376,1,768,105000 1031 | 6,1218,1,672,118000 1032 | 5,1928,0,686,160000 1033 | 7,3082,2,824,197000 1034 | 8,2520,3,1338,310000 1035 | 7,1654,3,1654,230000 1036 | 5,954,1,920,119750 1037 | 4,845,1,0,84000 1038 | 9,1620,3,1620,315500 1039 | 8,2263,2,1055,287000 1040 | 4,1344,0,546,97000 1041 | 4,630,1,630,80000 1042 | 5,1803,2,1134,155000 1043 | 6,1632,2,800,173000 1044 | 6,1306,2,1306,196000 1045 | 7,2329,2,1475,262280 1046 | 8,2524,2,2524,278000 1047 | 3,1733,2,0,139600 1048 | 9,2868,3,1992,556581 1049 | 5,990,2,990,145000 1050 | 5,1771,2,0,115000 1051 | 4,930,1,0,84900 1052 | 7,1302,2,1302,176485 1053 | 7,1316,2,1316,200141 1054 | 6,1977,2,816,165000 1055 | 5,1526,1,1216,144500 1056 | 8,1989,2,1065,255000 1057 | 6,1523,2,1193,180000 1058 | 7,1364,2,1364,185850 1059 | 7,1850,2,973,248000 1060 | 9,2184,3,1104,335000 1061 | 6,1991,2,854,220000 1062 | 8,1338,2,1338,213500 1063 | 3,894,3,894,81000 1064 | 5,2337,2,662,90000 1065 | 6,1103,2,1103,110500 1066 | 5,1154,2,1154,154000 1067 | 7,2260,2,1306,328000 1068 | 6,1571,2,799,178000 1069 | 6,1611,2,780,167900 1070 | 6,2521,2,942,151400 1071 | 5,893,2,845,135000 1072 | 5,1048,1,1048,135000 1073 | 6,1556,2,727,154000 1074 | 5,1456,1,810,91500 1075 | 6,1426,2,690,159500 1076 | 7,1240,3,1240,194000 1077 | 7,1740,1,800,219500 1078 | 5,1466,2,796,170000 1079 | 5,1096,1,1096,138800 1080 | 6,848,2,848,155900 1081 | 5,990,1,990,126000 1082 | 6,1258,2,1258,145000 1083 | 5,1040,1,1040,133000 1084 | 7,1459,2,1459,192000 1085 | 6,1251,1,1251,160000 1086 | 6,1498,2,691,187500 1087 | 6,996,2,936,147000 1088 | 4,1092,1,546,83500 1089 | 8,1953,3,1082,252000 1090 | 7,1709,2,970,137500 1091 | 8,1247,2,1247,197000 1092 | 3,1040,2,0,92900 1093 | 7,1252,2,600,160000 1094 | 6,1694,2,1181,136500 1095 | 5,1200,2,864,146000 1096 | 5,936,1,936,129000 1097 | 6,1314,2,1314,176432 1098 | 6,1355,0,684,127000 1099 | 8,1088,2,1074,170000 1100 | 4,1324,1,672,128000 1101 | 7,1601,2,1271,157000 1102 | 2,438,1,290,60000 1103 | 5,950,1,950,119500 1104 | 5,1134,1,1010,135000 1105 | 6,1194,2,655,159500 1106 | 5,1302,2,630,106000 1107 | 8,2622,2,1463,325000 1108 | 7,1442,2,910,179900 1109 | 7,2021,2,868,274725 1110 | 6,1690,2,924,181000 1111 | 8,1836,3,1836,280000 1112 | 6,1658,2,773,188000 1113 | 7,1964,2,803,205000 1114 | 5,816,1,816,129900 1115 | 5,1008,1,1008,134500 1116 | 5,833,1,833,117000 1117 | 8,1734,3,1734,318000 1118 | 8,1419,2,408,184100 1119 | 5,894,2,894,130000 1120 | 5,1601,1,533,140000 1121 | 5,1040,1,1040,133700 1122 | 6,1012,1,1012,118400 1123 | 7,1552,3,1552,212900 1124 | 4,960,1,672,112000 1125 | 5,698,0,698,118000 1126 | 7,1482,2,384,163900 1127 | 4,1005,2,1005,115000 1128 | 7,1555,3,1373,174000 1129 | 7,1530,3,1530,259000 1130 | 7,1959,2,847,215000 1131 | 5,936,2,936,140000 1132 | 4,1981,2,1122,135000 1133 | 5,974,0,974,93500 1134 | 6,2210,1,1008,117500 1135 | 8,2020,2,1128,239500 1136 | 6,1600,2,916,169000 1137 | 6,986,1,960,102000 1138 | 6,1252,1,1032,119000 1139 | 5,1020,0,780,94000 1140 | 6,1567,2,1567,196000 1141 | 5,1167,2,915,144000 1142 | 5,952,2,952,139000 1143 | 5,1868,2,780,197500 1144 | 8,2828,3,1466,424870 1145 | 5,1006,0,1006,80000 1146 | 4,924,1,672,80000 1147 | 5,1576,1,1042,149000 1148 | 6,1298,2,1298,180000 1149 | 7,1564,1,704,174500 1150 | 7,1111,1,572,116900 1151 | 7,1482,2,650,143000 1152 | 6,932,1,932,124000 1153 | 5,1466,2,1466,149900 1154 | 6,1811,2,1073,230000 1155 | 6,816,1,816,120500 1156 | 7,1820,2,864,201800 1157 | 5,1437,2,1437,218000 1158 | 5,1265,2,1219,179900 1159 | 7,1314,2,1314,230000 1160 | 8,1580,3,1580,235128 1161 | 6,1876,2,901,185000 1162 | 6,1456,2,855,146000 1163 | 6,1640,2,1296,224000 1164 | 5,894,2,894,129000 1165 | 4,1258,2,1198,108959 1166 | 5,1432,2,1360,194000 1167 | 7,1502,2,1502,233170 1168 | 8,1694,3,1694,245350 1169 | 6,1671,2,959,173000 1170 | 6,2108,2,1127,235000 1171 | 10,3627,3,1930,625000 1172 | 6,1118,1,1096,171000 1173 | 6,1261,2,1261,163000 1174 | 7,1250,2,625,171900 1175 | 5,3086,0,1598,200500 1176 | 6,2345,2,952,239000 1177 | 8,2872,2,1683,285000 1178 | 5,923,1,876,119500 1179 | 6,1224,1,818,115000 1180 | 5,1343,1,731,154900 1181 | 5,1124,0,0,93000 1182 | 7,2514,2,1216,250000 1183 | 8,1652,2,1600,392500 1184 | 10,4476,3,2396,745000 1185 | 5,1130,2,1120,120000 1186 | 5,1572,3,1572,186700 1187 | 5,1221,1,784,104900 1188 | 3,1699,2,978,95000 1189 | 8,1624,3,1624,262000 1190 | 7,1660,2,831,195000 1191 | 7,1804,2,994,189000 1192 | 4,1622,4,1249,168000 1193 | 8,1441,2,776,174000 1194 | 5,1472,1,702,125000 1195 | 6,1224,2,1224,165000 1196 | 6,1352,1,663,158000 1197 | 6,1456,2,728,176000 1198 | 7,1863,3,879,219210 1199 | 7,1690,1,815,144000 1200 | 7,1212,2,1212,178000 1201 | 4,1382,2,1051,148000 1202 | 4,864,1,864,116050 1203 | 7,1779,2,866,197900 1204 | 5,1348,1,884,117000 1205 | 7,1630,2,1630,213000 1206 | 5,1074,2,1056,153500 1207 | 7,2196,3,2158,271900 1208 | 4,1056,1,1056,107000 1209 | 6,1700,2,1682,200000 1210 | 5,1283,2,931,140000 1211 | 8,1660,2,1660,290000 1212 | 6,1845,2,1055,189000 1213 | 8,1752,2,559,164000 1214 | 4,672,1,672,113000 1215 | 4,960,1,648,145000 1216 | 5,999,1,925,134500 1217 | 5,894,1,894,125000 1218 | 6,1902,2,0,112000 1219 | 8,1314,2,1300,229456 1220 | 4,912,0,0,80500 1221 | 6,1218,0,672,91500 1222 | 5,912,1,912,115000 1223 | 5,1211,1,952,134000 1224 | 6,1846,1,1040,143000 1225 | 5,2136,2,2136,137900 1226 | 7,1490,2,788,184000 1227 | 6,1138,1,588,145000 1228 | 6,1933,3,894,214000 1229 | 5,912,2,912,147000 1230 | 9,1702,3,1702,367294 1231 | 5,1507,1,1075,127000 1232 | 5,2620,2,1361,190000 1233 | 5,1190,2,1106,132500 1234 | 4,1224,2,0,101800 1235 | 5,1188,2,1188,142000 1236 | 5,1964,0,940,130000 1237 | 5,1784,1,747,138887 1238 | 7,1626,2,764,175500 1239 | 7,1948,2,847,195000 1240 | 6,1141,2,1141,142500 1241 | 8,1484,2,1476,265900 1242 | 7,1768,2,884,224900 1243 | 7,1689,3,1689,248328 1244 | 7,1173,2,1053,170000 1245 | 10,2076,3,2076,465000 1246 | 8,1517,2,792,230000 1247 | 6,1868,2,585,178000 1248 | 7,1553,2,756,186500 1249 | 6,1034,3,1012,169900 1250 | 6,2058,2,735,129500 1251 | 5,988,1,876,119000 1252 | 7,2110,2,2110,244000 1253 | 7,1405,2,1405,171750 1254 | 5,874,1,864,130000 1255 | 7,2167,2,1192,294000 1256 | 7,1656,2,746,165400 1257 | 6,1367,1,884,127500 1258 | 8,1987,2,1986,301500 1259 | 5,864,0,864,99900 1260 | 7,1166,2,856,190000 1261 | 6,1054,2,1054,151000 1262 | 6,1675,2,841,181000 1263 | 5,1050,1,1050,128900 1264 | 4,1788,1,1104,161500 1265 | 6,1824,2,764,180500 1266 | 6,1337,2,1405,181000 1267 | 7,1452,2,691,183900 1268 | 7,1889,1,925,122000 1269 | 9,2018,3,2002,378500 1270 | 8,3447,3,728,381000 1271 | 5,1524,1,874,144000 1272 | 7,1524,2,1332,260000 1273 | 6,1489,2,1489,185750 1274 | 5,935,1,935,137000 1275 | 6,1357,1,1019,177000 1276 | 5,1250,2,661,139000 1277 | 5,1920,2,928,137000 1278 | 6,1395,2,723,162000 1279 | 6,1724,2,1680,197900 1280 | 8,2031,2,1128,237000 1281 | 4,1128,2,698,68400 1282 | 7,1573,2,1573,227000 1283 | 7,1339,2,1309,180000 1284 | 5,1040,2,1040,150500 1285 | 6,1824,0,912,139000 1286 | 6,2447,1,804,169000 1287 | 6,1412,1,780,132500 1288 | 6,1328,2,1328,143000 1289 | 5,1582,2,1624,190000 1290 | 8,1659,2,1501,278000 1291 | 8,1970,3,1085,281000 1292 | 5,1152,2,1152,180500 1293 | 5,1302,1,630,119500 1294 | 5,2372,1,994,107500 1295 | 7,1664,2,832,162900 1296 | 5,864,2,864,115000 1297 | 5,1052,1,1052,138500 1298 | 5,1128,2,1120,155000 1299 | 6,1072,2,547,140000 1300 | 10,5642,2,6110,160000 1301 | 5,1246,1,1246,154000 1302 | 7,1983,2,978,225000 1303 | 6,1494,1,771,177500 1304 | 8,2526,3,1165,290000 1305 | 7,1616,3,1616,232000 1306 | 7,1708,2,976,130000 1307 | 9,1652,2,1652,325000 1308 | 7,1368,2,1368,202500 1309 | 5,990,2,990,138000 1310 | 5,1122,2,924,147000 1311 | 6,1294,2,1278,179200 1312 | 7,1902,2,1902,335000 1313 | 7,1274,2,1274,203000 1314 | 8,2810,2,1453,302000 1315 | 9,2599,3,1393,333168 1316 | 4,948,1,948,119000 1317 | 6,2112,2,952,206900 1318 | 8,1630,3,1622,295493 1319 | 7,1352,2,1352,208900 1320 | 8,1787,3,1753,275000 1321 | 4,948,1,864,111000 1322 | 6,1478,2,1478,156500 1323 | 3,720,1,0,72500 1324 | 7,1923,2,750,190000 1325 | 4,708,0,420,82500 1326 | 8,1795,3,1795,147000 1327 | 4,796,0,796,55000 1328 | 3,774,0,544,79000 1329 | 5,816,1,816,130500 1330 | 6,2792,2,1510,256000 1331 | 7,1632,2,935,176500 1332 | 8,1588,3,1588,227000 1333 | 5,954,2,911,132500 1334 | 4,816,1,816,100000 1335 | 5,1360,1,803,125500 1336 | 5,1365,2,765,125000 1337 | 6,1334,2,1350,167900 1338 | 5,1656,2,1656,135000 1339 | 4,693,0,693,52500 1340 | 7,1861,2,916,200000 1341 | 5,864,1,864,128500 1342 | 4,872,4,858,123000 1343 | 6,1114,2,1114,155000 1344 | 8,2169,2,1284,228500 1345 | 6,1913,2,896,177000 1346 | 7,1456,2,728,155835 1347 | 4,960,1,960,108500 1348 | 7,2156,2,1568,262500 1349 | 8,1776,3,1732,283463 1350 | 7,1494,2,1482,215000 1351 | 8,2358,0,684,122000 1352 | 5,2634,4,1248,200000 1353 | 6,1716,2,858,171000 1354 | 6,1176,2,698,134900 1355 | 8,3238,3,2033,410000 1356 | 7,1865,3,992,235000 1357 | 7,1920,2,570,170000 1358 | 5,892,1,864,110000 1359 | 5,1078,2,1078,149900 1360 | 6,1573,2,756,177500 1361 | 9,1980,3,1980,315000 1362 | 5,2601,2,612,189000 1363 | 7,1530,2,1530,260000 1364 | 4,1738,1,715,104900 1365 | 6,1412,2,616,156932 1366 | 7,1200,2,600,144152 1367 | 7,1674,2,814,216000 1368 | 7,1790,2,873,193000 1369 | 5,1475,1,757,127000 1370 | 6,848,2,848,144000 1371 | 8,1668,2,1657,232000 1372 | 4,1374,1,840,105000 1373 | 6,1661,1,992,165500 1374 | 7,2097,2,1108,274300 1375 | 10,2633,3,2633,466500 1376 | 7,1958,3,1026,250000 1377 | 8,1571,3,1571,239000 1378 | 6,790,1,768,91000 1379 | 5,1604,2,984,117000 1380 | 6,987,1,483,83000 1381 | 5,1394,2,384,167500 1382 | 3,864,1,864,58500 1383 | 6,2117,2,1205,237500 1384 | 7,1762,2,596,157000 1385 | 5,1416,2,816,112000 1386 | 6,1258,1,560,105000 1387 | 4,1154,1,796,125500 1388 | 7,2784,2,1392,250000 1389 | 6,2526,1,714,136000 1390 | 9,1746,3,1746,377500 1391 | 6,1218,2,735,131000 1392 | 7,1525,2,1525,235000 1393 | 5,1584,3,1584,124000 1394 | 5,900,1,864,123000 1395 | 6,1912,2,482,163000 1396 | 7,1500,3,1356,246578 1397 | 8,2482,3,1094,281213 1398 | 5,1687,2,747,160000 1399 | 5,1513,1,939,137500 1400 | 5,1904,1,1208,138000 1401 | 6,1608,1,976,137450 1402 | 6,1158,1,862,120000 1403 | 6,1593,2,839,193000 1404 | 7,1294,2,1286,193879 1405 | 8,1464,3,1485,282922 1406 | 3,1214,3,672,105000 1407 | 8,1646,2,1594,275000 1408 | 5,768,2,768,133000 1409 | 5,833,0,833,112000 1410 | 4,1363,2,622,125500 1411 | 7,2093,2,791,215000 1412 | 7,1840,2,944,230000 1413 | 6,1668,1,856,140000 1414 | 4,1040,2,0,90000 1415 | 8,1844,2,1844,257000 1416 | 6,1848,2,833,207000 1417 | 7,1569,3,1386,175900 1418 | 4,2290,2,777,122500 1419 | 8,2450,3,1284,340000 1420 | 5,1144,1,1144,124000 1421 | 6,1844,2,1844,223000 1422 | 6,1416,2,708,179900 1423 | 6,1069,2,1069,127500 1424 | 6,848,2,848,136500 1425 | 6,2201,2,697,274970 1426 | 5,1344,1,1024,144000 1427 | 6,1252,2,1252,142000 1428 | 7,2127,2,1223,271000 1429 | 5,1558,1,913,140000 1430 | 5,804,1,788,119000 1431 | 6,1440,2,1440,182900 1432 | 5,1838,2,732,192140 1433 | 6,958,2,958,143750 1434 | 4,968,1,656,64500 1435 | 6,1792,2,936,186500 1436 | 5,1126,2,1126,160000 1437 | 6,1537,2,1319,174000 1438 | 4,864,2,864,120500 1439 | 8,1932,3,1932,394617 1440 | 6,1236,2,912,149700 1441 | 7,1725,2,539,197000 1442 | 6,2555,2,588,191000 1443 | 6,848,2,848,149300 1444 | 10,2007,3,1017,310000 1445 | 6,952,1,952,121000 1446 | 7,1422,2,1422,179600 1447 | 6,913,1,814,129000 1448 | 5,1188,1,1188,157900 1449 | 8,2090,2,1220,240000 1450 | 4,1346,1,560,112000 1451 | 5,630,0,630,92000 1452 | 5,1792,0,896,136000 1453 | 8,1578,3,1573,287090 1454 | 5,1072,2,547,145000 1455 | 5,1140,0,1140,84500 1456 | 7,1221,2,1221,185000 1457 | 6,1647,2,953,175000 1458 | 6,2073,2,1542,210000 1459 | 7,2340,1,1152,266500 1460 | 5,1078,1,1078,142125 1461 | 5,1256,1,1256,147500 1462 | -------------------------------------------------------------------------------- /23_model_creation.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from sklearn.ensemble import ExtraTreesRegressor 3 | from sklearn.model_selection import train_test_split,cross_val_score 4 | from sklearn.externals import joblib 5 | from sklearn.metrics import mean_squared_error 6 | import matplotlib.pyplot as plt 7 | from math import sqrt 8 | 9 | df = pd.read_csv(r'./23_input_data.csv') 10 | df1 = pd.read_csv(r'./23_sens_analysis.csv') 11 | 12 | X = df[list(df.columns)[:-1]] 13 | y = df['SalePrice'] 14 | X_train, X_test, y_train, y_test = train_test_split(X, y) 15 | regressor = ExtraTreesRegressor(n_estimators=50) 16 | regressor.fit(X_train, y_train) 17 | joblib.dump(regressor, r'./salepriceprediction.pkl') 18 | lin_reg = joblib.load(r"./salepriceprediction.pkl") 19 | y_predictions = regressor.predict(X_test) 20 | meanSquaredError=mean_squared_error(y_test, y_predictions) 21 | rootMeanSquaredError = sqrt(meanSquaredError) 22 | 23 | print("Number of predictions:",len(y_predictions)) 24 | print("Mean Squared Error:", meanSquaredError) 25 | print("Root Mean Squared Error:", rootMeanSquaredError) 26 | print ("Scoring:",regressor.score(X_test, y_test)) 27 | 28 | result = lin_reg.predict(df1) 29 | print (result) -------------------------------------------------------------------------------- /23_sens_analysis.csv: -------------------------------------------------------------------------------- 1 | OverallQual,GrLivArea,GarageCars,TotalBsmtSF 2 | 7,1710,2,856 3 | 6,1262,2,1262 4 | 7,1786,2,920 5 | 7,1717,3,756 6 | 8,2198,3,1145 7 | -------------------------------------------------------------------------------- /23_sensitivity_analysis.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | import pandas as pd 3 | import os.path 4 | import os.path as osp 5 | import pickle 6 | from sklearn.externals import joblib 7 | import glob 8 | from sklearn.model_selection import train_test_split 9 | 10 | def Sensitivity_Analysis(df,txt): 11 | mark_list = ['d'] 12 | model = joblib.load("./salepriceprediction.pkl") 13 | d = {} 14 | min_variable = 334 15 | max_variable = 5642 16 | step = 500 17 | for i in range(0,len(df)): 18 | df2=pd.DataFrame(df.iloc[i]) 19 | actual=df.iloc[i]['GrLivArea'] 20 | df2=df2.T 21 | df3=df2.copy() 22 | var_list=[] 23 | curval = actual.copy() 24 | while(curval <= max_variable): 25 | var_list.append(curval) 26 | curval = curval+step 27 | curval = actual 28 | while(curval >= min_variable): 29 | var_list.append(curval) 30 | curval = curval-step 31 | var_list.sort() 32 | var_list = pd.Series(var_list).drop_duplicates().reset_index(drop=True) 33 | num_point = len(var_list) 34 | for j in range(1,num_point): 35 | df2=df2.append(df3) 36 | df2.reset_index(drop=True,inplace=True) 37 | df2['GrLivArea']=var_list 38 | d[str(i)]=df2 39 | del df2 40 | 41 | for key,value in d.items(): 42 | df_sense=d[key] 43 | df_sense=pd.DataFrame(df_sense) 44 | predicted_Sense=model.predict(df_sense) 45 | d222={'Input':list(df_sense['GrLivArea']),'Output':predicted_Sense} 46 | df_sensitivity=pd.DataFrame(data=d222) 47 | df_sensitivity.set_index('Input',inplace=True) 48 | df_sensitivity.sort_index(inplace=True) 49 | plt.plot(list(df_sensitivity.index),list(df_sensitivity['Output'])) 50 | plt.xlabel('GrLivArea') 51 | plt.ylabel('SalePrice') 52 | del predicted_Sense 53 | del df_sense 54 | plt.grid() 55 | plt.show() 56 | plt.close() 57 | 58 | 59 | df = pd.read_csv("./23_sens_analysis.csv") 60 | Sensitivity_Analysis(df,'1') -------------------------------------------------------------------------------- /24_hyperparameter_tuning.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from sklearn.ensemble import ExtraTreesRegressor 3 | from sklearn.model_selection import train_test_split,cross_val_score 4 | from sklearn.externals import joblib 5 | from sklearn.metrics import mean_squared_error 6 | from math import sqrt 7 | from sklearn.model_selection import GridSearchCV 8 | 9 | df = pd.read_csv(r'./23_input_data.csv') 10 | 11 | X = df[list(df.columns)[:-1]] 12 | y = df['SalePrice'] 13 | X_train, X_test, y_train, y_test = train_test_split(X, y) 14 | regressor = ExtraTreesRegressor() 15 | 16 | extra_grid = {'n_estimators': [5,10,20,50,100,150,200], 17 | 'max_features' : ['auto', 'sqrt'], 18 | 'min_samples_leaf' : [2,5,6,10], 19 | 'max_depth' : [2,6,7,5] 20 | } 21 | 22 | # 500, 6 7 8, 3 4 23 | 24 | Grid_search = GridSearchCV(regressor, param_grid = extra_grid) 25 | Grid_search.fit(X_train, y_train) 26 | Model_var = Grid_search.best_estimator_ 27 | y_predictions = Model_var.predict(X_test) 28 | meanSquaredError=mean_squared_error(y_test, y_predictions) 29 | rootMeanSquaredError = sqrt(meanSquaredError) 30 | 31 | print("Number of predictions:",len(y_predictions)) 32 | print("Mean Squared Error:", meanSquaredError) 33 | print("Root Mean Squared Error:", rootMeanSquaredError) 34 | print ("Scoring:",Model_var.score(X_test, y_test)) 35 | 36 | -------------------------------------------------------------------------------- /25_low_salespr_corr_columns.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import numpy as np 3 | import matplotlib.pyplot as plt 4 | 5 | df = pd.read_csv('23_input_data.csv', sep = ',') 6 | 7 | cols = df.columns 8 | slopelist = [] 9 | 10 | dfs = df[cols[-1]].tolist() 11 | dfs = [int(i) if int(i)<250000 else None for i in dfs] 12 | 13 | fig = plt.figure() 14 | ax = fig.add_subplot(111) 15 | ax.plot([i for i in range(1460)],dfs , marker="o", color="red", zorder=10) 16 | ax.plot([i for i in range(1460)],df[cols[-1]], marker='o', zorder=0) 17 | ax.set(title="sales price below 250000(Red)") 18 | ax.legend() 19 | plt.savefig('lowsalesprice.jpg') 20 | plt.show() 21 | 22 | for i in df.columns[:-1]: 23 | 24 | name = i + ".jpg" 25 | 26 | fig = plt.figure() 27 | ax = fig.add_subplot(111) 28 | ax.plot([i for i in range(1460)],df[i] , marker="x", color="blue", zorder=10) 29 | plt.savefig(name) 30 | plt.show() -------------------------------------------------------------------------------- /26_valleymovingwindow.py: -------------------------------------------------------------------------------- 1 | import matplotlib 2 | import matplotlib.pyplot as plt 3 | import pandas as pd 4 | 5 | df = pd.read_csv("23_input_data.csv", sep = ";") 6 | 7 | cols = df.columns 8 | 9 | c = df[cols[-1]].tolist() 10 | 11 | if __name__ == '__main__': 12 | # Choose window width and threshold 13 | window = 25 14 | thres = 250000 15 | 16 | # Iterate and collect state changes with regard to previous state 17 | changes = [] 18 | rolling = [None] * window 19 | old_state = None 20 | for i in range(window, len(c) - 1): 21 | slc = c[i - window:i + 1] 22 | mean = sum(slc) / float(len(slc)) 23 | state = 'good' if mean > thres else 'bad' 24 | 25 | rolling.append(mean) 26 | if not old_state or old_state != state: 27 | print('Changed to {:>4s} at position {:>3d} ({:5.3f})'.format(state, i, mean)) 28 | changes.append((i, state)) 29 | old_state = state 30 | 31 | # Plot results and state changes 32 | plt.figure(frameon=False, figsize=(10, 8)) 33 | currents, = plt.plot(c, ls='--', label='Current') 34 | rollwndw, = plt.plot(rolling, lw=2, label='Rolling Mean') 35 | plt.axhline(thres, xmin=.0, xmax=1.0, c='grey', ls='-') 36 | plt.text(40, thres, 'Threshold: {:.1f}'.format(thres), horizontalalignment='right') 37 | for c, s in changes: 38 | plt.axvline(c, ymin=.0, ymax=.7, c='red', ls='-') 39 | plt.text(c, 41.5, s, color='red', rotation=90, verticalalignment='bottom') 40 | plt.legend(handles=[currents, rollwndw], fontsize=11) 41 | plt.grid(True) 42 | plt.savefig('plot.png', dpi=72, bbox_inches='tight') 43 | -------------------------------------------------------------------------------- /27_features_from_images.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from skimage.feature import corner_harris, corner_peaks 3 | from skimage.color import rgb2gray 4 | import matplotlib.pyplot as p 5 | import skimage.io as io 6 | from skimage.exposure import equalize_hist 7 | from sklearn import datasets 8 | 9 | x = datasets.load_digits() 10 | print (x.images[9]) 11 | print (x.images[9].reshape(-1, 64)) 12 | print (x.target[9]) 13 | 14 | def show_corners(corners, img): 15 | fig = p.figure() 16 | p.gray() 17 | p.imshow(img) 18 | y_corner, x_corner = zip(*corners) 19 | p.plot(x_corner, y_corner, 'or') 20 | p.xlim(0, img.shape[1]) 21 | p.ylim(img.shape[0], 0) 22 | fig.set_size_inches(np.array(fig.get_size_inches()) * 1.5) 23 | p.show() 24 | 25 | img = io.imread('C:/Users/TCS_1549117/Desktop/Environment/machine_learning_examples/licence.png') 26 | img = equalize_hist(rgb2gray(img)) 27 | corners = corner_peaks(corner_harris(img), min_distance=2) 28 | show_corners(corners, img) 29 | 30 | """ 31 | import mahotas as mh 32 | from mahotas.features import surf 33 | 34 | image = mh.imread('C:/Users/TCS_1549117/Desktop/Environment/machine_learning_examples/licence.png', as_grey=True) 35 | 36 | print ('The first SURF descriptor:\n', surf.surf(image)[0]) 37 | print ('Extracted %s SURF descriptors' % len(surf.surf(image))) 38 | """ -------------------------------------------------------------------------------- /28_feature_scaling.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | from sklearn import preprocessing 3 | from sklearn.preprocessing import StandardScaler 4 | 5 | df = pd.read_csv(r'./23_input_data.csv') 6 | X = df[list(df.columns)[:-1]] 7 | 8 | print (X) 9 | 10 | print (preprocessing.scale(X)) 11 | 12 | scaler = StandardScaler() 13 | print(scaler.fit(X)) 14 | print(scaler.transform(X)) -------------------------------------------------------------------------------- /29_gradient_descent.py: -------------------------------------------------------------------------------- 1 | cost = 3 2 | rate = 0.01 3 | precision = 0.000001 4 | diff = 1 5 | iters = 0 6 | df = lambda x: 2*(x+5) 7 | while diff > precision: 8 | prev_x = cost 9 | cost = cost - rate * df(prev_x) 10 | diff = abs(cost - prev_x) 11 | iters = iters+1 12 | print("Iteration",iters,"\nX value is",cost) 13 | 14 | print("The local minimum occurs at", cost) -------------------------------------------------------------------------------- /Algorithmns/01_linear_regression_equation.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | 3 | x = [1, 2, 3] 4 | y = [1, 2, 3] 5 | 6 | plt.figure() 7 | plt.title('Data - X and Y') 8 | plt.plot(x,y,'*') 9 | plt.xticks([0,1,2,3]) 10 | plt.yticks([0,1,2,3]) 11 | plt.show() 12 | 13 | def linear_regression(theta0,theta1): 14 | predicted_y = [] 15 | for i in x: 16 | predicted_y.append((theta0+(theta1*i))) 17 | plt.figure() 18 | plt.title('Predictions') 19 | plt.plot(x,predicted_y,'.') 20 | plt.xticks([0,1,2,3]) 21 | plt.yticks([0,1,2,3]) 22 | plt.show() 23 | 24 | theta0 = 1.5 25 | theta1 = 0 26 | linear_regression(theta0,theta1) 27 | 28 | theta0a = 0 29 | theta1a = 1.5 30 | linear_regression(theta0a,theta1a) 31 | 32 | theta0b = 1 33 | theta1b = 0.5 34 | linear_regression(theta0b,theta1b) 35 | 36 | -------------------------------------------------------------------------------- /Algorithmns/02_cost_function.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | 3 | x = [1, 2, 3] 4 | y = [1, 2, 3] 5 | 6 | m = len(y) 7 | 8 | def cost_function(theta0,theta1): 9 | predicted_y = [theta0+(theta1*1), theta0+(theta1*2), theta0+(theta1*3)] 10 | sum=0 11 | for i,j in zip(predicted_y,y): 12 | sum = sum+((i-j)**2) 13 | J = 1/(2*m)*sum 14 | return (J) 15 | 16 | theta0 = [1.5,0,1] 17 | theta1 = [0,1.5,0.5] 18 | 19 | for i,j in zip(theta0,theta1): 20 | print ("cost when theta0=%r theta1=%r :"%(i,j), cost_function(i,j)) 21 | 22 | 23 | -------------------------------------------------------------------------------- /Algorithmns/03_contour_plots.py: -------------------------------------------------------------------------------- 1 | from mpl_toolkits.mplot3d.axes3d import Axes3D 2 | import matplotlib.pyplot as plt 3 | import numpy as np 4 | 5 | fig, ax1 = plt.subplots(figsize=(8, 5), 6 | subplot_kw={'projection': '3d'}) 7 | 8 | 9 | values = 2 10 | r = np.linspace(-values,values,100) 11 | 12 | theta0,theta1= np.meshgrid(r,r) 13 | 14 | original_y = [1, 2, 3] 15 | m = len(original_y) 16 | 17 | predicted_y = [theta0+(theta1*1), theta0+(theta1*2), theta0+(theta1*3)] 18 | 19 | sum=0 20 | for i,j in zip(predicted_y,original_y): 21 | sum = sum+((i-j)**2) 22 | J = 1/(2*m)*sum 23 | 24 | ax1.plot_wireframe(theta0,theta1,J) 25 | ax1.set_title("plot") 26 | 27 | plt.show() -------------------------------------------------------------------------------- /Algorithmns/04_gradient_descent.py: -------------------------------------------------------------------------------- 1 | 2 | x = [1, 2, 3] 3 | y = [1, 2, 3] 4 | 5 | m = len(y) 6 | 7 | theta0 = 1 8 | theta1 = 1.5 9 | alpha = 0.01 10 | 11 | def cost_function(theta0,theta1): 12 | predicted_y = [theta0+(theta1*1), theta0+(theta1*2), theta0+(theta1*3)] 13 | sum=0 14 | for i,j in zip(predicted_y,y): 15 | sum = sum+((i-j)**2) 16 | J = 1/(2*m)*sum 17 | return (J) 18 | 19 | def gradientDescent(x, y, theta1, alpha): 20 | J_history = [] 21 | for i in range(50): 22 | for i,j in zip(x,y): 23 | delta=1/m*(i*i*theta1-i*j); 24 | theta1=theta1-alpha*delta; 25 | J_history.append(cost_function(theta0,theta1)) 26 | print (min(J_history)) 27 | 28 | 29 | gradientDescent(x, y, theta1, alpha) -------------------------------------------------------------------------------- /Algorithmns/05_multiple_linear_equation.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | import numpy as np 3 | 4 | x = np.array([[1, 800, 2, 15],[1, 1200, 3, 1],[1, 2400, 5, 5]]) 5 | y = np.array([3000000,2000000,3500000]) 6 | theta = np.array([100, 1000, 10000, 100000]) 7 | 8 | predicted_y = x.dot(theta.transpose()) 9 | print (predicted_y) 10 | 11 | m = y.size 12 | diff = predicted_y - y 13 | squares = np.square(diff) 14 | #sum_of_squares = 5424168464 15 | sum_of_squares = np.sum(squares) 16 | cost_fn = 1/(2*m)*sum_of_squares 17 | print (diff) 18 | print (squares) 19 | print (sum_of_squares) 20 | print (cost_fn) 21 | 22 | -------------------------------------------------------------------------------- /Algorithmns/06_polynomial_regression.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import matplotlib.pyplot as plt 3 | from sklearn.linear_model import LinearRegression 4 | from sklearn.preprocessing import PolynomialFeatures 5 | 6 | X = pd.DataFrame([100,200,300,400,500,600],columns=['sqft']) 7 | y = pd.DataFrame([543543,34543543,35435345,34534,34534534,345345],columns=['Price']) 8 | 9 | lin = LinearRegression() 10 | lin.fit(X, y) 11 | plt.scatter(X, y, color = 'blue') 12 | plt.plot(X, lin.predict(X), color = 'red') 13 | plt.title('Linear Regression') 14 | plt.xlabel('sqft') 15 | plt.ylabel('Price') 16 | plt.show() 17 | 18 | for i in [2,3,4,5]: 19 | poly = PolynomialFeatures(degree = i) 20 | X_poly = poly.fit_transform(X) 21 | poly.fit(X_poly, y) 22 | lin2 = LinearRegression() 23 | lin2.fit(X_poly, y) 24 | plt.scatter(X, y, color = 'blue') 25 | plt.plot(X, lin2.predict(poly.fit_transform(X)), color = 'red') 26 | plt.title('Polynomial Regression') 27 | plt.xlabel('sqft') 28 | plt.ylabel('Price') 29 | plt.show() 30 | 31 | -------------------------------------------------------------------------------- /Algorithmns/07_loan_prediction.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pandas as pd 3 | from sklearn.model_selection import train_test_split, GridSearchCV 4 | 5 | data = pd.read_csv('./train1.csv') 6 | l1 = ['Gender','Married','Dependents','Education','Self_Employed','ApplicantIncome','CoapplicantIncome','LoanAmount','Loan_Amount_Term','Credit_History','Property_Area'] 7 | l2 = ['Loan_Status'] 8 | l1_train, l1_test, l2_train, l2_test = train_test_split(data[l1], data[l2],test_size=0.25, random_state=1) 9 | 10 | l2_train = l2_train.replace({'Y':1, 'N':0}).values 11 | l2_test = l2_test.replace({'Y':1, 'N':0}).values 12 | 13 | print (l2_test) 14 | -------------------------------------------------------------------------------- /Algorithmns/07_logistic_regression.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | from sklearn.feature_extraction.text import TfidfVectorizer 4 | from sklearn.linear_model.logistic import LogisticRegression 5 | from sklearn.model_selection import train_test_split, cross_val_score 6 | 7 | 8 | df = pd.read_csv('./spam.csv', delimiter=',',header=None) 9 | X_train_raw, X_test_raw, y_train, y_test = train_test_split(df[1],df[0]) 10 | 11 | vectorizer = TfidfVectorizer() 12 | X_train = vectorizer.fit_transform(X_train_raw) 13 | X_test = vectorizer.transform(X_test_raw) 14 | 15 | classifier = LogisticRegression() 16 | classifier.fit(X_train, y_train) 17 | predictions = classifier.predict(X_test) 18 | print(predictions) 19 | 20 | 21 | 22 | 23 | -------------------------------------------------------------------------------- /Algorithmns/08_confusion_matrix_precision_recall.py: -------------------------------------------------------------------------------- 1 | from sklearn.metrics import precision_recall_fscore_support 2 | from sklearn.metrics import accuracy_score 3 | from sklearn.metrics import confusion_matrix 4 | import matplotlib.pyplot as plt 5 | 6 | y_true = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1] 7 | y_pred = [0, 1, 0, 0, 0, 0, 0, 1, 1, 1] 8 | 9 | print ('Accuracy:', accuracy_score(y_true, y_pred)) 10 | print (confusion_matrix(y_pred,y_true)) 11 | print (precision_recall_fscore_support(y_true, y_pred)) 12 | 13 | plt.matshow(confusion_matrix(y_true, y_pred)) 14 | plt.title('Confusion matrix') 15 | plt.colorbar() 16 | plt.ylabel('True label') 17 | plt.xlabel('Predicted label') 18 | plt.show() 19 | 20 | 21 | -------------------------------------------------------------------------------- /Algorithmns/08_precision_recall.py: -------------------------------------------------------------------------------- 1 | from sklearn.metrics import precision_recall_fscore_support 2 | from sklearn.metrics import accuracy_score 3 | from sklearn.metrics import confusion_matrix 4 | import matplotlib.pyplot as plt 5 | 6 | y_true = [0, 0, 0, 0, 0, 1, 1, 1, 1, 1] 7 | y_pred = [0, 1, 0, 0, 0, 0, 0, 1, 1, 1] 8 | 9 | print ('Accuracy:', accuracy_score(y_true, y_pred)) 10 | print (confusion_matrix(y_true, y_pred)) 11 | print (precision_recall_fscore_support(y_true, y_pred)) 12 | 13 | plt.matshow(confusion_matrix(y_true, y_pred)) 14 | plt.title('Confusion matrix') 15 | plt.colorbar() 16 | plt.ylabel('True label') 17 | plt.xlabel('Predicted label') 18 | plt.show() 19 | 20 | 21 | -------------------------------------------------------------------------------- /Algorithmns/09_multiclass_classification.py: -------------------------------------------------------------------------------- 1 | from sklearn.metrics import confusion_matrix 2 | from sklearn.metrics import precision_recall_fscore_support 3 | import pandas as pd 4 | from sklearn.model_selection import train_test_split 5 | from sklearn.tree import DecisionTreeClassifier 6 | from sklearn.svm import SVC 7 | from sklearn.neighbors import KNeighborsClassifier 8 | from sklearn.naive_bayes import GaussianNB 9 | 10 | df = pd.read_csv('./flowers.csv') 11 | X = df[list(df.columns)[:-1]] 12 | y = df['Flower'] 13 | X_train, X_test, y_train, y_test = train_test_split(X, y, random_state = 0) 14 | 15 | tree = DecisionTreeClassifier(max_depth = 2).fit(X_train, y_train) 16 | tree_predictions = tree.predict(X_test) 17 | print (tree.score(X_test, y_test)) 18 | print (confusion_matrix(y_test, tree_predictions)) 19 | print (precision_recall_fscore_support(y_test, tree_predictions)) 20 | 21 | svc = SVC(kernel = 'linear', C = 1).fit(X_train, y_train) 22 | svc_predictions = svc.predict(X_test) 23 | print (svc.score(X_test, y_test)) 24 | print (confusion_matrix(y_test, svc_predictions)) 25 | print (precision_recall_fscore_support(y_test, svc_predictions)) 26 | 27 | knn = KNeighborsClassifier(n_neighbors = 7).fit(X_train, y_train) 28 | knn_predictions = knn.predict(X_test) 29 | print (knn.score(X_test, y_test)) 30 | print (confusion_matrix(y_test, knn_predictions)) 31 | print (precision_recall_fscore_support(y_test, knn_predictions)) 32 | 33 | gnb = GaussianNB().fit(X_train, y_train) 34 | gnb_predictions = gnb.predict(X_test) 35 | print (gnb.score(X_test, y_test)) 36 | print (confusion_matrix(y_test, gnb_predictions)) 37 | print (precision_recall_fscore_support(y_test, gnb_predictions)) 38 | 39 | 40 | 41 | நமது neural network-ல் உள்ள ஒவ்வொரு அலகுக்கும் என்னென்ன எடைகளைப் பயன்படுத்தினால், தவறுகளைக் குறைக்கலாம் எனக் கண்டுபிடிப்பதே back propagation ஆகும் . ஒவ்வொரு அடுக்கிலும் நிகழும் தவறைக் கண்டுபிடிக்க அதன் partial derivative மதிப்புகள் பின்னிருந்து முன்னாகக் கணக்கிடப்படுகின்றன. பின்னர் அவைகளை ஒன்று திரட்டி அந்த network-ன் cost கண்டுபிடிக்கப்படுகிறது. பொதுவாக gradient descent algorithm -ஆனது குறைந்த அளவு வேறுபாடு தரக்கூடிய வகையில் neuron-களின் எடையை அமைக்க இந்த back propagation -ஐப் பயன்படுத்துகிறது. 42 | 43 | 44 | 45 | -------------------------------------------------------------------------------- /Algorithmns/10_issues_classification.py: -------------------------------------------------------------------------------- 1 | # https://towardsdatascience.com/multi-class-text-classification-with-scikit-learn-12f1e60e0a9f 2 | # https://towardsdatascience.com/running-chi-square-tests-in-python-with-die-roll-data-b9903817c51b 3 | 4 | import pandas as pd 5 | from io import StringIO 6 | import matplotlib.pyplot as plt 7 | from sklearn.feature_extraction.text import TfidfVectorizer 8 | from sklearn.feature_selection import chi2 9 | import numpy as np 10 | from sklearn.model_selection import train_test_split 11 | from sklearn.feature_extraction.text import CountVectorizer 12 | from sklearn.feature_extraction.text import TfidfTransformer 13 | from sklearn.naive_bayes import MultinomialNB 14 | 15 | df = pd.read_csv('./Consumer_Complaints.csv', sep=',', error_bad_lines=False, index_col=False, dtype='unicode') 16 | df = df[pd.notnull(df['Issue'])] 17 | 18 | fig = plt.figure(figsize=(8,6)) 19 | df.groupby('Product').Issue.count().plot.bar(ylim=0) 20 | plt.show() 21 | 22 | X_train, X_test, y_train, y_test = train_test_split(df['Issue'], df['Product'], random_state = 0) 23 | c = CountVectorizer() 24 | clf = MultinomialNB().fit (TfidfTransformer().fit_transform(c.fit_transform(X_train)), y_train) 25 | 26 | print(clf.predict(c.transform(["This company refuses to provide me verification and validation of debt per my right under the FDCPA. I do not believe this debt is mine."]))) 27 | 28 | tfidf = TfidfVectorizer(sublinear_tf=True, min_df=5, norm='l2', encoding='latin-1', ngram_range=(1, 2), stop_words='english') 29 | features = tfidf.fit_transform(df.Issue).toarray() 30 | print (features) 31 | df['category_id'] = df['Product'].factorize()[0] 32 | pro_cat = df[['Product', 'category_id']].drop_duplicates().sort_values('category_id') 33 | print (pro_cat) 34 | for i, j in sorted(dict(pro_cat.values).items()): 35 | indices = np.argsort(chi2(features, df.category_id == j)[0]) 36 | print (indices) 37 | feature_names = np.array(tfidf.get_feature_names())[indices] 38 | unigrams = [i for i in feature_names if len(i.split(' ')) == 1] 39 | bigrams = [i for i in feature_names if len(i.split(' ')) == 2] 40 | print(">",i) 41 | print("unigrams:",','.join(unigrams[:5])) 42 | print("bigrams:",','.join(bigrams[:5])) 43 | 44 | 45 | 46 | """ 47 | 48 | 49 | print (features.shape) 50 | 51 | 52 | df['category_id'] = df['Product'].factorize()[0] 53 | pro_cat = df[['Product', 'category_id']].drop_duplicates().sort_values('category_id') 54 | for i, j in sorted(dict(pro_cat.values).items()): 55 | indices = np.argsort(chi2(features, df.category_id == j)[0]) 56 | feature_names = np.array(tfidf.get_feature_names())[indices] 57 | unigrams = [i for i in feature_names if len(i.split(' ')) == 1] 58 | bigrams = [i for i in feature_names if len(i.split(' ')) == 2] 59 | print(">",i) 60 | print("unigrams:",','.join(unigrams[:5])) 61 | print("bigrams:",','.join(bigrams[:5])) 62 | """ 63 | 64 | 9884823387 65 | 66 | 67 | 68 | -------------------------------------------------------------------------------- /Algorithmns/11_clustering.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | import math 3 | 4 | def plots(cluster1_x1,cluster1_x2,cluster2_x1,cluster2_x2): 5 | plt.figure() 6 | plt.plot(cluster1_x1,cluster1_x2,'.') 7 | plt.plot(cluster2_x1,cluster2_x2,'*') 8 | plt.grid(True) 9 | plt.show() 10 | 11 | def round1(c1_x1,c1_x2,c2_x1,c2_x2): 12 | cluster1_x1 = [] 13 | cluster1_x2 = [] 14 | cluster2_x1 = [] 15 | cluster2_x2 = [] 16 | 17 | for i,j in zip(x1,x2): 18 | a = math.sqrt(((i-c1_x1)**2 + (j-c1_x2)**2)) 19 | b = math.sqrt(((i-c2_x1)**2 + (j-c2_x2)**2)) 20 | if a < b: 21 | cluster1_x1.append(i) 22 | cluster1_x2.append(j) 23 | else: 24 | cluster2_x1.append(i) 25 | cluster2_x2.append(j) 26 | 27 | plots(cluster1_x1,cluster1_x2,cluster2_x1,cluster2_x2) 28 | 29 | c1_x1 = sum(cluster1_x1)/len(cluster1_x1) 30 | c1_x2 = sum(cluster1_x2)/len(cluster1_x2) 31 | c2_x1 = sum(cluster2_x1)/len(cluster2_x1) 32 | c2_x2 = sum(cluster2_x2)/len(cluster2_x2) 33 | 34 | round2 (c1_x1,c1_x2,c2_x1,c2_x2) 35 | print ((c1_x1,c1_x2,c2_x1,c2_x2)) 36 | 37 | def round2(c1_x1,c1_x2,c2_x1,c2_x2): 38 | cluster1_x1 = [] 39 | cluster1_x2 = [] 40 | cluster2_x1 = [] 41 | cluster2_x2 = [] 42 | 43 | for i,j in zip(x1,x2): 44 | c = math.sqrt(((i-c1_x1)**2 + (j-c1_x2)**2)) 45 | d = math.sqrt(((i-c2_x1)**2 + (j-c2_x2)**2)) 46 | if c < d: 47 | cluster1_x1.append(i) 48 | cluster1_x2.append(j) 49 | else: 50 | cluster2_x1.append(i) 51 | cluster2_x2.append(j) 52 | 53 | plots(cluster1_x1,cluster1_x2,cluster2_x1,cluster2_x2) 54 | 55 | x1 = [15, 19, 15, 5, 13, 17, 15, 12, 8, 6, 9, 13] 56 | x2 = [13, 16, 17, 6, 17, 14, 15, 13, 7, 6, 10, 12] 57 | 58 | plots(x1,x2,[],[]) 59 | round1(x1[4],x2[4],x1[10],x2[10]) 60 | 61 | -------------------------------------------------------------------------------- /Algorithmns/12_elbow_method.py: -------------------------------------------------------------------------------- 1 | from sklearn.cluster import KMeans 2 | from sklearn import metrics 3 | from scipy.spatial.distance import cdist 4 | import numpy as np 5 | import matplotlib.pyplot as plt 6 | 7 | x1 = [15, 19, 15, 5, 13, 17, 15, 12, 8, 6, 9, 13] 8 | x2 = [13, 16, 17, 6, 17, 14, 15, 13, 7, 6, 10, 12] 9 | 10 | X = np.array(list(zip(x1, x2))) 11 | 12 | distortions = [] 13 | K = range(1,8) 14 | for i in K: 15 | print (i) 16 | model = KMeans(n_clusters=i) 17 | model.fit(X) 18 | distortions.append(sum(np.min(cdist(X, model.cluster_centers_, 'euclidean'), axis=1)) / X.shape[0]) 19 | 20 | plt.plot() 21 | plt.plot(K, distortions, 'bx-') 22 | plt.show() 23 | -------------------------------------------------------------------------------- /Algorithmns/13_silhouette_coefficient.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from sklearn.cluster import KMeans 3 | from sklearn import metrics 4 | import matplotlib.pyplot as plt 5 | plt.subplot(3, 2, 1) 6 | 7 | x1 = [15, 19, 15, 5, 13, 17, 15, 12, 8, 6, 9, 13] 8 | x2 = [13, 16, 17, 6, 17, 14, 15, 13, 7, 6, 10, 12] 9 | plt.scatter(x1, x2) 10 | 11 | X = np.array(list(zip(x1, x2))) 12 | 13 | c = ['b', 'g', 'r', 'c', 'm', 'y', 'k', 'b'] 14 | m = ['o', 's', 'D', 'v', '^', 'p', '*', '+'] 15 | 16 | p = 1 17 | for i in [2, 3, 4, 5, 8]: 18 | p += 1 19 | plt.subplot(3, 2, p) 20 | model = KMeans(n_clusters=i).fit(X) 21 | print (model.labels_) 22 | for i, j in enumerate(model.labels_): 23 | plt.plot(x1[i], x2[i], color=c[j], marker=m[j],ls='None') 24 | print (metrics.silhouette_score(X, model.labels_ ,metric='euclidean')) 25 | plt.show() 26 | -------------------------------------------------------------------------------- /Algorithmns/14_svc_linear.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | from sklearn import svm 4 | from sklearn.linear_model.logistic import LogisticRegression 5 | 6 | def classifier(): 7 | xx = np.linspace(1,10) 8 | yy = -regressor.coef_[0][0] / regressor.coef_[0][1] * xx - regressor.intercept_[0] / regressor.coef_[0][1] 9 | plt.plot(xx, yy) 10 | plt.scatter(x1,x2) 11 | plt.show() 12 | 13 | x1 = [2,6,3,9,4,10] 14 | x2 = [3,9,3,10,2,13] 15 | 16 | X = np.array([[2,3],[6,9],[3,3],[9,10],[4,2],[10,13]]) 17 | y = [0,1,0,1,0,1] 18 | 19 | regressor = LogisticRegression() 20 | regressor.fit(X,y) 21 | classifier() 22 | 23 | regressor = svm.SVC(kernel='linear',C = 1.0) 24 | regressor.fit(X,y) 25 | classifier() 26 | 27 | 28 | #http://mlwiki.org/index.php/Support_Vector_Machines 29 | #https://medium.com/data-py-blog/kernel-svm-in-python-a8fae37908b9 30 | -------------------------------------------------------------------------------- /Algorithmns/15_svm_nonlinear_kernels.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | from sklearn import svm 4 | import pandas as pd 5 | from sklearn.metrics import accuracy_score 6 | from sklearn.model_selection import train_test_split 7 | from sklearn.svm import SVC 8 | from sklearn.metrics import classification_report, confusion_matrix 9 | from sklearn.linear_model.logistic import LogisticRegression 10 | 11 | df = pd.read_csv('./flowers.csv') 12 | X = df[list(df.columns)[:-1]] 13 | y = df['Flower'] 14 | X_train, X_test, y_train, y_test = train_test_split(X, y, random_state = 0) 15 | 16 | logistic = LogisticRegression() 17 | logistic.fit(X_train, y_train) 18 | y_pred = logistic.predict(X_test) 19 | print ('Accuracy-logistic:', accuracy_score(y_test, y_pred)) 20 | 21 | gaussian = SVC(kernel='rbf') 22 | gaussian.fit(X_train, y_train) 23 | y_pred = gaussian.predict(X_test) 24 | print ('Accuracy-svm:', accuracy_score(y_test, y_pred)) 25 | 26 | -------------------------------------------------------------------------------- /Algorithmns/16_pca.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | import pandas as pd 4 | from sklearn.model_selection import train_test_split 5 | from sklearn.preprocessing import StandardScaler 6 | from sklearn.decomposition import PCA 7 | 8 | df = pd.read_csv('./flowers.csv') 9 | X = df[list(df.columns)[:-1]] 10 | y = df['Flower'] 11 | X_train, X_test, y_train, y_test = train_test_split(X, y, random_state = 0) 12 | 13 | pca = PCA(n_components=2) 14 | x = StandardScaler().fit_transform(X_train) 15 | new_x = pd.DataFrame(data = pca.fit_transform(x), columns = ['x1', 'x2']) 16 | 17 | df2 = pd.concat([new_x, df[['Flower']]], axis = 1) 18 | 19 | fig = plt.figure(figsize = (8,8)) 20 | ax = fig.add_subplot(1,1,1) 21 | ax.set_xlabel('x1', fontsize = 15) 22 | ax.set_ylabel('x2', fontsize = 15) 23 | ax.set_title('2 Components', fontsize = 20) 24 | for i, j in zip(['Rose', 'Jasmin', 'Lotus'],['g', 'b', 'r']): 25 | ax.scatter(df2.loc[df2['Flower'] == i, 'x1'], df2.loc[df2['Flower'] == i, 'x2'], c = j) 26 | ax.legend(['Rose', 'Jasmin', 'Lotus']) 27 | ax.grid() 28 | plt.show() 29 | 30 | print (pca.explained_variance_ratio_) 31 | 32 | print (df.columns) 33 | print (df2.columns) 34 | 35 | #https://github.com/mGalarnyk/Python_Tutorials/blob/master/Sklearn/PCA/PCA_Data_Visualization_Iris_Dataset_Blog.ipynb 36 | 37 | 38 | 39 | 40 | -------------------------------------------------------------------------------- /Algorithmns/17.1_perceptron.py: -------------------------------------------------------------------------------- 1 | def predict(row, weights): 2 | activation = weights[0] 3 | for i in range(len(row)-1): 4 | activation += weights[i + 1] * row[i] 5 | return 1.0 if activation > 0.0 else 0.0 6 | 7 | def train_weights(dataset, l_rate, n_epoch): 8 | weights = [0.0 for i in range(len(dataset[0]))] 9 | for epoch in range(n_epoch): 10 | sum_error = 0.0 11 | for row in dataset: 12 | error = row[-1] - predict(row, weights) 13 | sum_error += error**2 14 | weights[0] = weights[0] + l_rate * error 15 | for i in range(len(row)-1): 16 | weights[i + 1] = weights[i + 1] + l_rate * error * row[i] 17 | print('epoch=%d, error=%.2f' % (epoch, sum_error)) 18 | print (weights) 19 | 20 | dataset = [[0.4,0.3,1], 21 | [0.6,0.8,1], 22 | [0.7,0.5,1], 23 | [0.9,0.2,0]] 24 | 25 | l_rate = 0.1 26 | n_epoch = 6 27 | train_weights(dataset, l_rate, n_epoch) 28 | 29 | """ 30 | http://marubon-ds.blogspot.com/2017/06/title.html 31 | https://stats.stackexchange.com/questions/281623/knn-outperforms-cnn 32 | https://pythonprogramming.net/cnn-tensorflow-convolutional-nerual-network-machine-learning-tutorial/?completed=/convolutional-neural-network-cnn-machine-learning-tutorial/ 33 | 34 | 35 | https://machinelearningmastery.com/implement-perceptron-algorithm-scratch-python/ 36 | 37 | 38 | 39 | 40 | 41 | 42 | """ 43 | 44 | -------------------------------------------------------------------------------- /Algorithmns/17.2_perceptron.py: -------------------------------------------------------------------------------- 1 | from sklearn.datasets import fetch_20newsgroups 2 | from sklearn.metrics import f1_score, classification_report 3 | from sklearn.feature_extraction.text import TfidfVectorizer 4 | from sklearn.linear_model import Perceptron 5 | from sklearn.linear_model.logistic import LogisticRegression 6 | 7 | categories = ['rec.sport.hockey', 'rec.sport.baseball', 'rec.autos'] 8 | 9 | x1 = fetch_20newsgroups(subset='train',categories=categories, remove=('headers', 'footers', 'quotes')) 10 | x2 = fetch_20newsgroups(subset='test', categories=categories, remove=('headers', 'footers', 'quotes')) 11 | 12 | vectorizer = TfidfVectorizer() 13 | X_train = vectorizer.fit_transform(x1.data) 14 | X_test = vectorizer.transform(x2.data) 15 | 16 | classifier = LogisticRegression() 17 | classifier.fit(X_train, x1.target) 18 | predictions = classifier.predict(X_test) 19 | print (classification_report(x2.target, predictions)) 20 | 21 | classifier = Perceptron(n_iter=100, eta0=0.1) 22 | classifier.fit(X_train, x1.target ) 23 | predictions = classifier.predict(X_test) 24 | print (classification_report(x2.target, predictions)) 25 | 26 | 27 | 28 | -------------------------------------------------------------------------------- /Algorithmns/18_neural_networks.py: -------------------------------------------------------------------------------- 1 | from mlxtend.classifier import MultiLayerPerceptron as MLP 2 | from mlxtend.plotting import plot_decision_regions 3 | import matplotlib.pyplot as plt 4 | import numpy as np 5 | 6 | X = np.asarray([[6.1,1.4],[7.7,2.3],[6.3,2.4],[6.4,1.8],[6.2,1.8],[6.9,2.1], 7 | [6.7,2.4],[6.9,2.3],[5.8,1.9],[6.8,2.3],[6.7,2.5],[6.7,2.3],[6.3,1.9],[6.5,2.1 ],[6.2,2.3],[5.9,1.8]] ) 8 | 9 | X = (X - X.mean(axis=0)) / X.std(axis=0) 10 | 11 | y = np.asarray([0,2,2,1,2,2,2,2,2,2,2,2,2,2,2,2]) 12 | 13 | nn = MLP(hidden_layers=[50],l2=0.00,l1=0.0,epochs=150,eta=0.05, 14 | momentum=0.1,decrease_const=0.0,minibatches=1,random_seed=1,print_progress=3) 15 | nn = nn.fit(X, y) 16 | 17 | fig = plot_decision_regions(X=X, y=y, clf=nn, legend=2) 18 | plt.show() 19 | print('Accuracy(epochs = 150): %.2f%%' % (100 * nn.score(X, y))) 20 | 21 | nn.epochs = 250 22 | nn = nn.fit(X, y) 23 | fig = plot_decision_regions(X=X, y=y, clf=nn, legend=2) 24 | plt.title('epochs = 250') 25 | plt.show() 26 | print('Accuracy(epochs = 250): %.2f%%' % (100 * nn.score(X, y))) 27 | 28 | plt.plot(range(len(nn.cost_)), nn.cost_) 29 | plt.title('Gradient Descent training (minibatches=1)') 30 | plt.xlabel('Epochs') 31 | plt.ylabel('Cost') 32 | plt.show() 33 | 34 | nn.minibatches = len(y) 35 | nn = nn.fit(X, y) 36 | plt.plot(range(len(nn.cost_)), nn.cost_) 37 | plt.title('Stochastic Gradient Descent (minibatches=no. of training examples)') 38 | plt.ylabel('Cost') 39 | plt.xlabel('Epochs') 40 | plt.show() 41 | 42 | #http://rasbt.github.io/mlxtend/user_guide/classifier/MultiLayerPerceptron/ 43 | 44 | 45 | 46 | 47 | -------------------------------------------------------------------------------- /Algorithmns/19_decision_trees.py: -------------------------------------------------------------------------------- 1 | from sklearn.datasets import load_iris 2 | import pandas as pd 3 | import os 4 | from sklearn.tree import DecisionTreeClassifier,export_graphviz 5 | from sklearn.metrics import confusion_matrix,accuracy_score,classification_report 6 | from io import StringIO 7 | import pydotplus 8 | from sklearn.model_selection import train_test_split 9 | from sklearn.ensemble import RandomForestClassifier 10 | from IPython.display import Image 11 | import matplotlib.pyplot as plt 12 | import seaborn as sns 13 | 14 | df = pd.read_csv('./flowers.csv') 15 | X = df[list(df.columns)[:-1]] 16 | y = df['Flower'] 17 | X_train, X_test, y_train, y_test = train_test_split(X, y, random_state = 0) 18 | 19 | a = DecisionTreeClassifier(criterion = "gini", random_state = 100,max_depth=4, min_samples_leaf=5) # entropy for information gain 20 | a.fit(X_train, y_train) 21 | y_pred = a.predict(X_test) 22 | y_train.to_csv('./sss.csv') 23 | print("Confusion Matrix: ", confusion_matrix(y_test, y_pred)) 24 | print ("Accuracy : ", accuracy_score(y_test,y_pred)*100) 25 | print("Report : ", classification_report(y_test, y_pred)) 26 | 27 | dot_data = StringIO() 28 | export_graphviz(a, out_file=dot_data,filled=True, rounded=True,special_characters=True) 29 | graph = pydotplus.graph_from_dot_data(dot_data.getvalue()) 30 | Image(graph.create_png()) 31 | graph.write_png("decisiontree.png") 32 | 33 | b = RandomForestClassifier(max_depth = None, n_estimators=100) 34 | b.fit(X_train,y_train) 35 | y_pred = b.predict(X_test) 36 | print("Confusion Matrix: ", confusion_matrix(y_test, y_pred)) 37 | print ("Accuracy : ", accuracy_score(y_test,y_pred)*100) 38 | print("Report : ", classification_report(y_test, y_pred)) 39 | 40 | export_graphviz(b.estimators_[5], out_file='tree.dot', feature_names = X_train.columns.tolist(), 41 | class_names = ['Lotus', 'Jasmin', 'Rose'], 42 | rounded = True, proportion = False, precision = 2, filled = True) 43 | 44 | os.system ("dot -Tpng tree.dot -o randomforest.png -Gdpi=600") 45 | Image(filename = 'randomforest.png') 46 | f = pd.Series(b.feature_importances_,index=X_train.columns.tolist()).sort_values(ascending=False) 47 | sns.barplot(x=f, y=f.index) 48 | plt.xlabel('Feature Importance Score') 49 | plt.ylabel('Features') 50 | plt.legend() 51 | plt.show() 52 | 53 | 54 | # https://www.kaggle.com/willkoehrsen/visualize-a-decision-tree-w-python-scikit-learn 55 | # https://www.geeksforgeeks.org/decision-tree-implementation-python/ 56 | -------------------------------------------------------------------------------- /Algorithmns/OCR.py: -------------------------------------------------------------------------------- 1 | import pytesseract 2 | import requests 3 | from PIL import Image 4 | from PIL import ImageFilter 5 | from StringIO import StringIO 6 | 7 | 8 | def process_image(url): 9 | image = _get_image(url) 10 | image.filter(ImageFilter.SHARPEN) 11 | return pytesseract.image_to_string(image) 12 | 13 | 14 | def _get_image(url): 15 | return Image.open(StringIO(requests.get(url).content)) 16 | 17 | import sys 18 | import requests 19 | import pytesseract 20 | from PIL import Image 21 | from StringIO import StringIO 22 | 23 | 24 | def get_image(url): 25 | return Image.open(StringIO(requests.get(url).content)) 26 | 27 | 28 | if __name__ == '__main__': 29 | """Tool to test the raw output of pytesseract with a given input URL""" 30 | sys.stdout.write(""" 31 | ===OOOO=====CCCCC===RRRRRR=====\n 32 | ==OO==OO===CC=======RR===RR====\n 33 | ==OO==OO===CC=======RR===RR====\n 34 | ==OO==OO===CC=======RRRRRR=====\n 35 | ==OO==OO===CC=======RR==RR=====\n 36 | ==OO==OO===CC=======RR== RR====\n 37 | ===OOOO=====CCCCC===RR====RR===\n\n 38 | """) 39 | sys.stdout.write("A simple OCR utility\n") 40 | url = raw_input("What is the url of the image you would like to analyze?\n") 41 | image = get_image(url) 42 | sys.stdout.write("The raw output from tesseract with no processing is:\n\n") 43 | sys.stdout.write("-----------------BEGIN-----------------\n") 44 | sys.stdout.write(pytesseract.image_to_string(image) + "\n") 45 | sys.stdout.write("------------------END------------------\n") 46 | 47 | 48 | """ 49 | https://realpython.com/setting-up-a-simple-ocr-server/ 50 | https://micropyramid.com/blog/extract-text-with-ocr-for-image-files-in-python-using-pytesseract/ 51 | https://www.learnopencv.com/deep-learning-based-text-recognition-ocr-using-tesseract-and-opencv/ 52 | """ 53 | -------------------------------------------------------------------------------- /Algorithmns/ads.csv: -------------------------------------------------------------------------------- 1 | Time,Ads 2017-09-13T00:00:00,80115 2017-09-13T01:00:00,79885 2017-09-13T02:00:00,89325 2017-09-13T03:00:00,101930 2017-09-13T04:00:00,121630 2017-09-13T05:00:00,116475 2017-09-13T06:00:00,106495 2017-09-13T07:00:00,102795 2017-09-13T08:00:00,108055 2017-09-13T09:00:00,116125 2017-09-13T10:00:00,131030 2017-09-13T11:00:00,149020 2017-09-13T12:00:00,157590 2017-09-13T13:00:00,150715 2017-09-13T14:00:00,149295 2017-09-13T15:00:00,150100 2017-09-13T16:00:00,144780 2017-09-13T17:00:00,150690 2017-09-13T18:00:00,163840 2017-09-13T19:00:00,166235 2017-09-13T20:00:00,139520 2017-09-13T21:00:00,105895 2017-09-13T22:00:00,96780 2017-09-13T23:00:00,82520 2017-09-14T00:00:00,80125 2017-09-14T01:00:00,75335 2017-09-14T02:00:00,85105 2017-09-14T03:00:00,102080 2017-09-14T04:00:00,125135 2017-09-14T05:00:00,118030 2017-09-14T06:00:00,109225 2017-09-14T07:00:00,102475 2017-09-14T08:00:00,102240 2017-09-14T09:00:00,115840 2017-09-14T10:00:00,130540 2017-09-14T11:00:00,144325 2017-09-14T12:00:00,148970 2017-09-14T13:00:00,149150 2017-09-14T14:00:00,148040 2017-09-14T15:00:00,148810 2017-09-14T16:00:00,149830 2017-09-14T17:00:00,150570 2017-09-14T18:00:00,149440 2017-09-14T19:00:00,150520 2017-09-14T20:00:00,129130 2017-09-14T21:00:00,103815 2017-09-14T22:00:00,92175 2017-09-14T23:00:00,82765 2017-09-15T00:00:00,76315 2017-09-15T01:00:00,75130 2017-09-15T02:00:00,82640 2017-09-15T03:00:00,88795 2017-09-15T04:00:00,118430 2017-09-15T05:00:00,115190 2017-09-15T06:00:00,110940 2017-09-15T07:00:00,98860 2017-09-15T08:00:00,104185 2017-09-15T09:00:00,108665 2017-09-15T10:00:00,126640 2017-09-15T11:00:00,140435 2017-09-15T12:00:00,152470 2017-09-15T13:00:00,146275 2017-09-15T14:00:00,148020 2017-09-15T15:00:00,147735 2017-09-15T16:00:00,145750 2017-09-15T17:00:00,149285 2017-09-15T18:00:00,159725 2017-09-15T19:00:00,161085 2017-09-15T20:00:00,135520 2017-09-15T21:00:00,112945 2017-09-15T22:00:00,100200 2017-09-15T23:00:00,87615 2017-09-16T00:00:00,87835 2017-09-16T01:00:00,88845 2017-09-16T02:00:00,92350 2017-09-16T03:00:00,104465 2017-09-16T04:00:00,115940 2017-09-16T05:00:00,128950 2017-09-16T06:00:00,141385 2017-09-16T07:00:00,144440 2017-09-16T08:00:00,143250 2017-09-16T09:00:00,133190 2017-09-16T10:00:00,131240 2017-09-16T11:00:00,142480 2017-09-16T12:00:00,157375 2017-09-16T13:00:00,152500 2017-09-16T14:00:00,153735 2017-09-16T15:00:00,151195 2017-09-16T16:00:00,150645 2017-09-16T17:00:00,147435 2017-09-16T18:00:00,152525 2017-09-16T19:00:00,146875 2017-09-16T20:00:00,125245 2017-09-16T21:00:00,117925 2017-09-16T22:00:00,96700 2017-09-16T23:00:00,93610 2017-09-17T00:00:00,89060 2017-09-17T01:00:00,89345 2017-09-17T02:00:00,90575 2017-09-17T03:00:00,98290 2017-09-17T04:00:00,112570 2017-09-17T05:00:00,129470 2017-09-17T06:00:00,141405 2017-09-17T07:00:00,152560 2017-09-17T08:00:00,152580 2017-09-17T09:00:00,141170 2017-09-17T10:00:00,147550 2017-09-17T11:00:00,161110 2017-09-17T12:00:00,166335 2017-09-17T13:00:00,166780 2017-09-17T14:00:00,163140 2017-09-17T15:00:00,157305 2017-09-17T16:00:00,159055 2017-09-17T17:00:00,160020 2017-09-17T18:00:00,168345 2017-09-17T19:00:00,169900 2017-09-17T20:00:00,142710 2017-09-17T21:00:00,112955 2017-09-17T22:00:00,97345 2017-09-17T23:00:00,81675 2017-09-18T00:00:00,79510 2017-09-18T01:00:00,78350 2017-09-18T02:00:00,88045 2017-09-18T03:00:00,99790 2017-09-18T04:00:00,123780 2017-09-18T05:00:00,111325 2017-09-18T06:00:00,99440 2017-09-18T07:00:00,97655 2017-09-18T08:00:00,97655 2017-09-18T09:00:00,102565 2017-09-18T10:00:00,119930 2017-09-18T11:00:00,135755 2017-09-18T12:00:00,140120 2017-09-18T13:00:00,141730 2017-09-18T14:00:00,142220 2017-09-18T15:00:00,145360 2017-09-18T16:00:00,145335 2017-09-18T17:00:00,150410 2017-09-18T18:00:00,161520 2017-09-18T19:00:00,153415 2017-09-18T20:00:00,134720 2017-09-18T21:00:00,107065 2017-09-18T22:00:00,95045 2017-09-18T23:00:00,79515 2017-09-19T00:00:00,78335 2017-09-19T01:00:00,74670 2017-09-19T02:00:00,81990 2017-09-19T03:00:00,97950 2017-09-19T04:00:00,119345 2017-09-19T05:00:00,113115 2017-09-19T06:00:00,98880 2017-09-19T07:00:00,94000 2017-09-19T08:00:00,93660 2017-09-19T09:00:00,104185 2017-09-19T10:00:00,119750 2017-09-19T11:00:00,135990 2017-09-19T12:00:00,146455 2017-09-19T13:00:00,139165 2017-09-19T14:00:00,147225 2017-09-19T15:00:00,144935 2017-09-19T16:00:00,151370 2017-09-19T17:00:00,156080 2017-09-19T18:00:00,161385 2017-09-19T19:00:00,165010 2017-09-19T20:00:00,134090 2017-09-19T21:00:00,105585 2017-09-19T22:00:00,92855 2017-09-19T23:00:00,79270 2017-09-20T00:00:00,79980 2017-09-20T01:00:00,78110 2017-09-20T02:00:00,85785 2017-09-20T03:00:00,100010 2017-09-20T04:00:00,123880 2017-09-20T05:00:00,116335 2017-09-20T06:00:00,104290 2017-09-20T07:00:00,101440 2017-09-20T08:00:00,97635 2017-09-20T09:00:00,108265 2017-09-20T10:00:00,121250 2017-09-20T11:00:00,140850 2017-09-20T12:00:00,138555 2017-09-20T13:00:00,140990 2017-09-20T14:00:00,141525 2017-09-20T15:00:00,141590 2017-09-20T16:00:00,140610 2017-09-20T17:00:00,139515 2017-09-20T18:00:00,146215 2017-09-20T19:00:00,142425 2017-09-20T20:00:00,123945 2017-09-20T21:00:00,101360 2017-09-20T22:00:00,88170 2017-09-20T23:00:00,76050 2017-09-21T00:00:00,70335 2017-09-21T01:00:00,72150 2017-09-21T02:00:00,80195 2017-09-21T03:00:00,94945 2017-09-21T04:00:00,121910 2017-09-21T05:00:00,113950 2017-09-21T06:00:00,106495 2017-09-21T07:00:00,97290 2017-09-21T08:00:00,98860 2017-09-21T09:00:00,105635 2017-09-21T10:00:00,114380 2017-09-21T11:00:00,132335 2017-09-21T12:00:00,146630 2017-09-21T13:00:00,141995 2017-09-21T14:00:00,142815 2017-09-21T15:00:00,146020 2017-09-21T16:00:00,152120 2017-09-21T17:00:00,151790 2017-09-21T18:00:00,155665 2017-09-21T19:00:00,155890 2017-09-21T20:00:00,123395 2017-09-21T21:00:00,103080 2017-09-21T22:00:00,95155 2017-09-21T23:00:00,80285 -------------------------------------------------------------------------------- /Algorithmns/anomaly_detection.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import matplotlib.pyplot as plt 3 | import numpy as np 4 | from sklearn.metrics import r2_score, median_absolute_error, mean_absolute_error 5 | 6 | df1 = pd.read_csv('./ads.csv', index_col=['Time'], parse_dates=['Time']) 7 | df1.iloc[-20] = df1.iloc[-20] * 0.2 8 | 9 | def weighted_average(series, weights): 10 | result = 0.0 11 | weights.reverse() 12 | for n in range(len(weights)): 13 | result += series.iloc[-n-1] * weights[n] 14 | return float(result) 15 | 16 | def plotMovingAverage(series, window, plot_intervals=False, scale=1.96, plot_anomalies=False): 17 | rolling_mean = series.rolling(window=window).mean() 18 | 19 | plt.figure(figsize=(15,5)) 20 | plt.title("Moving average\n window size = {}".format(window)) 21 | plt.plot(rolling_mean, "g", label="Rolling mean trend") 22 | 23 | if plot_intervals: 24 | mae = mean_absolute_error(series[window:], rolling_mean[window:]) 25 | deviation = np.std(series[window:] - rolling_mean[window:]) 26 | lower_bond = rolling_mean - (mae + scale * deviation) 27 | upper_bond = rolling_mean + (mae + scale * deviation) 28 | plt.plot(upper_bond, "r--", label="Upper Bond / Lower Bond") 29 | plt.plot(lower_bond, "r--") 30 | 31 | if plot_anomalies: 32 | anomalies = pd.DataFrame(index=series.index, columns=series.columns) 33 | anomalies[seriesupper_bond] = series[series>upper_bond] 35 | plt.plot(anomalies, "ro", markersize=10) 36 | 37 | plt.plot(series[window:], label="Actual values") 38 | plt.legend(loc="upper left") 39 | plt.grid(True) 40 | plt.show() 41 | 42 | def exponential_smoothing(series, alpha): 43 | result = [series[0]] # first value is same as series 44 | for n in range(1, len(series)): 45 | result.append(alpha * series[n] + (1 - alpha) * result[n-1]) 46 | return result 47 | 48 | def plotExponentialSmoothing(series, alphas): 49 | with plt.style.context('seaborn-white'): 50 | plt.figure(figsize=(15, 7)) 51 | for alpha in alphas: 52 | plt.plot(exponential_smoothing(series, alpha), label="Alpha {}".format(alpha)) 53 | plt.plot(series.values, "c", label = "Actual") 54 | plt.legend(loc="best") 55 | plt.axis('tight') 56 | plt.title("Exponential Smoothing") 57 | plt.grid(True) 58 | plt.show() 59 | 60 | def double_exponential_smoothing(series, alpha, beta): 61 | result = [series[0]] 62 | for n in range(1, len(series)+1): 63 | if n == 1: 64 | level, trend = series[0], series[1] - series[0] 65 | if n >= len(series): # forecasting 66 | value = result[-1] 67 | else: 68 | value = series[n] 69 | last_level, level = level, alpha*value + (1-alpha)*(level+trend) 70 | trend = beta*(level-last_level) + (1-beta)*trend 71 | result.append(level+trend) 72 | return result 73 | 74 | def plotDoubleExponentialSmoothing(series, alphas, betas): 75 | with plt.style.context('seaborn-white'): 76 | plt.figure(figsize=(20, 8)) 77 | for alpha in alphas: 78 | for beta in betas: 79 | plt.plot(double_exponential_smoothing(series, alpha, beta), label="Alpha {}, beta {}".format(alpha, beta)) 80 | plt.plot(series.values, label = "Actual") 81 | plt.legend(loc="best") 82 | plt.axis('tight') 83 | plt.title("Double Exponential Smoothing") 84 | plt.grid(True) 85 | plt.show() 86 | 87 | plotMovingAverage(df1, 4) 88 | plotMovingAverage(df1, 4, plot_intervals=True) 89 | plotMovingAverage(df1, 4, plot_intervals=True, plot_anomalies=True) 90 | 91 | df2 = pd.read_csv('./currency.csv', index_col=['Time'], parse_dates=['Time']) 92 | plt.figure() 93 | plt.plot(df2.GEMS_GEMS_SPENT) 94 | plt.show() 95 | 96 | plotMovingAverage(df2, 4, plot_intervals=True, plot_anomalies=True) 97 | print (weighted_average(df1, [0.6, 0.3, 0.1])) 98 | plotExponentialSmoothing(df1.Ads, [0.3, 0.05]) 99 | plotExponentialSmoothing(df2.GEMS_GEMS_SPENT, [0.3, 0.05]) 100 | 101 | plotDoubleExponentialSmoothing(df1.Ads, alphas=[0.9, 0.02], betas=[0.9, 0.02]) 102 | plotDoubleExponentialSmoothing(df2.GEMS_GEMS_SPENT, alphas=[0.9, 0.02], betas=[0.9, 0.02]) 103 | 104 | 105 | 106 | """ 107 | 108 | from sklearn.metrics import median_absolute_error, mean_squared_error, mean_squared_log_error 109 | 110 | def mean_absolute_percentage_error(y_true, y_pred): 111 | return np.mean(np.abs((y_true - y_pred) / y_true)) * 100 112 | 113 | def moving_average(series, n): 114 | return np.average(series[-n:]) 115 | 116 | moving_average(df1, 24) 117 | 118 | 119 | https://medium.com/open-machine-learning-course/open-machine-learning-course-topic-9-time-series-analysis-in-python-a270cb05e0b3 120 | https://www.kaggle.com/victorambonati/unsupervised-anomaly-detection 121 | https://medium.com/making-sense-of-data/time-series-next-value-prediction-using-regression-over-a-rolling-window-228f0acae363 122 | https://iwringer.wordpress.com/2015/11/17/anomaly-detection-concepts-and-techniques/ 123 | """ 124 | -------------------------------------------------------------------------------- /Algorithmns/currency.csv: -------------------------------------------------------------------------------- 1 | Time,GEMS_GEMS_SPENT 5/1/17,1199436 5/2/17,1045515 5/3/17,586111 5/4/17,856601 5/5/17,793775 5/6/17,606535 5/7/17,1112763 5/8/17,1121218 5/9/17,813844 5/10/17,903343 5/11/17,863465 5/12/17,639224 5/13/17,1030389 5/14/17,1132645 5/15/17,1018672 5/16/17,1726870 5/17/17,1378430 5/18/17,532950 5/19/17,828238 5/20/17,823948 5/21/17,592549 5/22/17,939337 5/23/17,862611 5/24/17,551557 5/25/17,878375 5/26/17,784535 5/27/17,613603 5/28/17,1054658 5/29/17,1026401 5/30/17,682284 5/31/17,986644 6/1/17,924769 6/2/17,633489 6/3/17,1044957 6/4/17,1088685 6/5/17,798582 6/6/17,1139786 6/7/17,1066560 6/8/17,754706 6/9/17,1199406 6/10/17,1186341 6/11/17,958210 6/12/17,1564553 6/13/17,1470865 6/14/17,1201275 6/15/17,2418723 6/16/17,2123070 6/17/17,978338 6/18/17,1536623 6/19/17,1420586 6/20/17,966259 6/21/17,1232735 6/22/17,1090762 6/23/17,763828 6/24/17,1153383 6/25/17,1074039 6/26/17,733943 6/27/17,1103070 6/28/17,1123779 6/29/17,752524 6/30/17,1123866 7/1/17,1051964 7/2/17,756827 7/3/17,1109486 7/4/17,1059961 7/5/17,691291 7/6/17,985221 7/7/17,932805 7/8/17,641340 7/9/17,1038572 7/10/17,1037868 7/11/17,732303 7/12/17,962492 7/13/17,875898 7/14/17,1029902 7/15/17,1917268 7/16/17,1662445 7/17/17,791812 7/18/17,1061339 7/19/17,968767 7/20/17,685321 7/21/17,1020324 7/22/17,995864 7/23/17,785353 7/24/17,1192613 7/25/17,1068292 7/26/17,710820 7/27/17,1048429 7/28/17,991163 7/29/17,701672 7/30/17,1239717 7/31/17,1261953 8/1/17,857930 8/2/17,1208488 8/3/17,1165563 8/4/17,821228 8/5/17,1304837 8/6/17,1385061 8/7/17,1143864 8/8/17,1420747 8/9/17,1082965 8/10/17,896817 8/11/17,1309864 8/12/17,1389726 8/13/17,1336787 8/14/17,2426361 8/15/17,1893231 8/16/17,795571 8/17/17,1228308 8/18/17,1171824 8/19/17,832872 8/20/17,1647205 8/21/17,1675743 8/22/17,1269681 8/23/17,1904623 8/24/17,1594601 8/25/17,1162089 8/26/17,1754296 8/27/17,1746942 8/28/17,1325245 8/29/17,1614025 8/30/17,1372725 8/31/17,990184 9/1/17,1492608 9/2/17,1426323 9/3/17,1228686 9/4/17,1735632 9/5/17,1470880 9/6/17,1022235 9/7/17,1466472 9/8/17,1381460 9/9/17,1123100 9/10/17,1908051 9/11/17,1925878 9/12/17,1539409 9/13/17,2566462 9/14/17,2046640 9/15/17,949784 9/16/17,1260130 9/17/17,1025393 9/18/17,795070 9/19/17,1005711 9/20/17,934316 9/21/17,672780 9/22/17,977338 9/23/17,952165 9/24/17,799347 9/25/17,1128242 9/26/17,1004572 9/27/17,722362 9/28/17,1017760 9/29/17,969319 9/30/17,732678 10/1/17,1212104 10/2/17,1190644 10/3/17,845058 10/4/17,1125893 10/5/17,1085597 10/6/17,814176 10/7/17,1241656 10/8/17,1458628 10/9/17,1228912 10/10/17,1489464 10/11/17,1445240 10/12/17,1269546 10/13/17,2209056 10/14/17,1872115 10/15/17,1028471 10/16/17,1434433 10/17/17,1274497 10/18/17,918567 10/19/17,1286976 10/20/17,1211880 10/21/17,977253 10/22/17,1491771 10/23/17,1411582 10/24/17,1016348 10/25/17,1447929 10/26/17,1387572 10/27/17,1057798 10/28/17,1505543 10/29/17,1440709 10/30/17,1134088 10/31/17,1434024 11/1/17,1306378 11/2/17,965258 11/3/17,1316822 11/4/17,985845 11/5/17,931888 11/6/17,1290728 11/7/17,1210535 11/8/17,954986 11/9/17,1363671 11/10/17,1413365 11/11/17,1298822 11/12/17,2328862 11/13/17,1880710 11/14/17,898547 11/15/17,1276048 11/16/17,1206946 11/17/17,906266 11/18/17,1365544 11/19/17,1286705 11/20/17,970724 11/21/17,1308139 11/22/17,1223100 11/23/17,925962 11/24/17,1386423 11/25/17,1319390 11/26/17,995244 11/27/17,1365656 11/28/17,1305871 11/29/17,981010 11/30/17,1425631 12/1/17,1334807 12/2/17,1021410 12/3/17,1763160 12/4/17,1765830 12/5/17,1597298 12/6/17,1627717 12/7/17,1589619 12/8/17,1283460 12/9/17,1907036 12/10/17,1940908 12/11/17,1677208 12/12/17,2984329 12/13/17,2524946 12/14/17,1342763 12/15/17,1904900 12/16/17,1815581 12/17/17,1415851 12/18/17,1826552 12/19/17,1708774 12/20/17,1241392 12/21/17,1737367 12/22/17,1913008 12/23/17,1364683 12/24/17,1478238 12/25/17,1342886 12/26/17,1024933 12/27/17,1430514 12/28/17,1372474 12/29/17,1026572 12/30/17,1417844 12/31/17,1679457 1/1/18,1351801 1/2/18,2023723 1/3/18,1543211 1/4/18,1180192 1/5/18,1673992 1/6/18,1707847 1/7/18,1451361 1/8/18,1998262 1/9/18,2162133 1/10/18,2330069 1/11/18,2995029 1/12/18,2421592 1/13/18,1152460 1/14/18,2203248 1/15/18,2135716 1/16/18,1871435 1/17/18,1764628 1/18/18,1985953 1/19/18,1492338 1/20/18,2209397 1/21/18,2136402 1/22/18,1592552 1/23/18,2035135 1/24/18,1806885 1/25/18,1308255 1/26/18,1875120 1/27/18,1722011 1/28/18,1388952 1/29/18,1918963 1/30/18,1783279 1/31/18,1343029 2/1/18,1930747 2/2/18,1826320 2/3/18,1455626 2/4/18,2294555 2/5/18,2172449 2/6/18,1612939 2/7/18,2117962 2/8/18,2164330 2/9/18,1819026 2/10/18,3242154 2/11/18,2894633 2/12/18,1405753 2/13/18,1842610 2/14/18,1707899 2/15/18,1324588 2/16/18,2090271 2/17/18,2119260 2/18/18,1716590 2/19/18,2398088 2/20/18,2166449 2/21/18,1552313 2/22/18,2175548 2/23/18,2122606 2/24/18,1756394 -------------------------------------------------------------------------------- /Algorithmns/decisionT_randomF.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | import seaborn as sns; sns.set() 4 | import pandas as pd 5 | from sklearn.model_selection import train_test_split 6 | 7 | from sklearn.datasets import make_blobs 8 | 9 | X, y = make_blobs(n_samples=8, centers=2, 10 | random_state=0, cluster_std=1.0) 11 | print (X) 12 | plt.scatter(X[:, 0], X[:, 1], c=y, s=50, cmap='rainbow'); 13 | plt.show() 14 | 15 | 16 | def visualize_classifier(model, X, y, ax=None, cmap='rainbow'): 17 | ax = ax or plt.gca() 18 | 19 | # Plot the training points 20 | ax.scatter(X[:, 0], X[:, 1], c=y, s=30, cmap=cmap, 21 | clim=(y.min(), y.max()), zorder=3) 22 | ax.axis('tight') 23 | ax.axis('off') 24 | xlim = ax.get_xlim() 25 | ylim = ax.get_ylim() 26 | 27 | # fit the estimator 28 | model.fit(X, y) 29 | xx, yy = np.meshgrid(np.linspace(*xlim, num=200), 30 | np.linspace(*ylim, num=200)) 31 | Z = model.predict(np.c_[xx.ravel(), yy.ravel()]).reshape(xx.shape) 32 | 33 | # Create a color plot with the results 34 | n_classes = len(np.unique(y)) 35 | contours = ax.contourf(xx, yy, Z, alpha=0.3, 36 | levels=np.arange(n_classes + 1) - 0.5, 37 | cmap=cmap, clim=(y.min(), y.max()), 38 | zorder=1) 39 | 40 | ax.set(xlim=xlim, ylim=ylim) 41 | plt.show() 42 | 43 | from sklearn.tree import DecisionTreeClassifier 44 | tree = DecisionTreeClassifier().fit(X, y) 45 | visualize_classifier(tree, X, y) 46 | 47 | from sklearn.ensemble import BaggingClassifier 48 | bag = BaggingClassifier(tree, n_estimators=100, max_samples=0.8,random_state=1).fit(X, y) 49 | visualize_classifier(bag, X, y) 50 | 51 | from sklearn.ensemble import RandomForestClassifier 52 | rdm = RandomForestClassifier(n_estimators=100, random_state=0).fit(X, y) 53 | visualize_classifier(rdm, X, y); 54 | 55 | """ 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | https://jakevdp.github.io/PythonDataScienceHandbook/05.08-random-forests.html 73 | https://towardsdatascience.com/random-forests-and-decision-trees-from-scratch-in-python-3e4fa5ae4249 74 | """ 75 | 76 | -------------------------------------------------------------------------------- /Algorithmns/decisiontree.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nithyadurai87/machine_learning_examples/b32ebbfb5f314b08e0e0f4672d1e3ec713110faa/Algorithmns/decisiontree.png -------------------------------------------------------------------------------- /Algorithmns/flowers.csv: -------------------------------------------------------------------------------- 1 | Sepal_length,Sepal_width,Petal_length,Petal_width,Flower 2 | 5.1,3.5,1.4,0.2,Rose 3 | 4.9,3,1.4,0.2,Rose 4 | 4.7,3.2,1.3,0.2,Rose 5 | 4.6,3.1,1.5,0.2,Rose 6 | 5,3.6,1.4,0.3,Rose 7 | 5.4,3.9,1.7,0.4,Rose 8 | 4.6,3.4,1.4,0.3,Rose 9 | 5,3.4,1.5,0.2,Rose 10 | 4.4,2.9,1.4,0.2,Rose 11 | 4.9,3.1,1.5,0.1,Rose 12 | 5.4,3.7,1.5,0.2,Rose 13 | 4.8,3.4,1.6,0.2,Rose 14 | 4.8,3,1.4,0.1,Rose 15 | 4.3,3,1.1,0.1,Rose 16 | 5.8,4,1.2,0.2,Rose 17 | 5.7,4.4,1.5,0.4,Rose 18 | 5.4,3.9,1.3,0.4,Rose 19 | 5.1,3.5,1.4,0.3,Rose 20 | 5.7,3.8,1.7,0.3,Rose 21 | 5.1,3.8,1.5,0.3,Rose 22 | 5.4,3.4,1.7,0.2,Rose 23 | 5.1,3.7,1.5,0.4,Rose 24 | 4.6,3.6,1,0.2,Rose 25 | 5.1,3.3,1.7,0.5,Rose 26 | 4.8,3.4,1.9,0.2,Rose 27 | 5,3,1.6,0.2,Rose 28 | 5,3.4,1.6,0.4,Rose 29 | 5.2,3.5,1.5,0.2,Rose 30 | 5.2,3.4,1.4,0.2,Rose 31 | 4.7,3.2,1.6,0.2,Rose 32 | 4.8,3.1,1.6,0.2,Rose 33 | 5.4,3.4,1.5,0.4,Rose 34 | 5.2,4.1,1.5,0.1,Rose 35 | 5.5,4.2,1.4,0.2,Rose 36 | 4.9,3.1,1.5,0.2,Rose 37 | 5,3.2,1.2,0.2,Rose 38 | 5.5,3.5,1.3,0.2,Rose 39 | 4.9,3.6,1.4,0.1,Rose 40 | 4.4,3,1.3,0.2,Rose 41 | 5.1,3.4,1.5,0.2,Rose 42 | 5,3.5,1.3,0.3,Rose 43 | 4.5,2.3,1.3,0.3,Rose 44 | 4.4,3.2,1.3,0.2,Rose 45 | 5,3.5,1.6,0.6,Rose 46 | 5.1,3.8,1.9,0.4,Rose 47 | 4.8,3,1.4,0.3,Rose 48 | 5.1,3.8,1.6,0.2,Rose 49 | 4.6,3.2,1.4,0.2,Rose 50 | 5.3,3.7,1.5,0.2,Rose 51 | 5,3.3,1.4,0.2,Rose 52 | 7,3.2,4.7,1.4,Jasmin 53 | 6.4,3.2,4.5,1.5,Jasmin 54 | 6.9,3.1,4.9,1.5,Jasmin 55 | 5.5,2.3,4,1.3,Jasmin 56 | 6.5,2.8,4.6,1.5,Jasmin 57 | 5.7,2.8,4.5,1.3,Jasmin 58 | 6.3,3.3,4.7,1.6,Jasmin 59 | 4.9,2.4,3.3,1,Jasmin 60 | 6.6,2.9,4.6,1.3,Jasmin 61 | 5.2,2.7,3.9,1.4,Jasmin 62 | 5,2,3.5,1,Jasmin 63 | 5.9,3,4.2,1.5,Jasmin 64 | 6,2.2,4,1,Jasmin 65 | 6.1,2.9,4.7,1.4,Jasmin 66 | 5.6,2.9,3.6,1.3,Jasmin 67 | 6.7,3.1,4.4,1.4,Jasmin 68 | 5.6,3,4.5,1.5,Jasmin 69 | 5.8,2.7,4.1,1,Jasmin 70 | 6.2,2.2,4.5,1.5,Jasmin 71 | 5.6,2.5,3.9,1.1,Jasmin 72 | 5.9,3.2,4.8,1.8,Jasmin 73 | 6.1,2.8,4,1.3,Jasmin 74 | 6.3,2.5,4.9,1.5,Jasmin 75 | 6.1,2.8,4.7,1.2,Jasmin 76 | 6.4,2.9,4.3,1.3,Jasmin 77 | 6.6,3,4.4,1.4,Jasmin 78 | 6.8,2.8,4.8,1.4,Jasmin 79 | 6.7,3,5,1.7,Jasmin 80 | 6,2.9,4.5,1.5,Jasmin 81 | 5.7,2.6,3.5,1,Jasmin 82 | 5.5,2.4,3.8,1.1,Jasmin 83 | 5.5,2.4,3.7,1,Jasmin 84 | 5.8,2.7,3.9,1.2,Jasmin 85 | 6,2.7,5.1,1.6,Jasmin 86 | 5.4,3,4.5,1.5,Jasmin 87 | 6,3.4,4.5,1.6,Jasmin 88 | 6.7,3.1,4.7,1.5,Jasmin 89 | 6.3,2.3,4.4,1.3,Jasmin 90 | 5.6,3,4.1,1.3,Jasmin 91 | 5.5,2.5,4,1.3,Jasmin 92 | 5.5,2.6,4.4,1.2,Jasmin 93 | 6.1,3,4.6,1.4,Jasmin 94 | 5.8,2.6,4,1.2,Jasmin 95 | 5,2.3,3.3,1,Jasmin 96 | 5.6,2.7,4.2,1.3,Jasmin 97 | 5.7,3,4.2,1.2,Jasmin 98 | 5.7,2.9,4.2,1.3,Jasmin 99 | 6.2,2.9,4.3,1.3,Jasmin 100 | 5.1,2.5,3,1.1,Jasmin 101 | 5.7,2.8,4.1,1.3,Jasmin 102 | 6.3,3.3,6,2.5,Lotus 103 | 5.8,2.7,5.1,1.9,Lotus 104 | 7.1,3,5.9,2.1,Lotus 105 | 6.3,2.9,5.6,1.8,Lotus 106 | 6.5,3,5.8,2.2,Lotus 107 | 7.6,3,6.6,2.1,Lotus 108 | 4.9,2.5,4.5,1.7,Lotus 109 | 7.3,2.9,6.3,1.8,Lotus 110 | 6.7,2.5,5.8,1.8,Lotus 111 | 7.2,3.6,6.1,2.5,Lotus 112 | 6.5,3.2,5.1,2,Lotus 113 | 6.4,2.7,5.3,1.9,Lotus 114 | 6.8,3,5.5,2.1,Lotus 115 | 5.7,2.5,5,2,Lotus 116 | 5.8,2.8,5.1,2.4,Lotus 117 | 6.4,3.2,5.3,2.3,Lotus 118 | 6.5,3,5.5,1.8,Lotus 119 | 7.7,3.8,6.7,2.2,Lotus 120 | 7.7,2.6,6.9,2.3,Lotus 121 | 6,2.2,5,1.5,Lotus 122 | 6.9,3.2,5.7,2.3,Lotus 123 | 5.6,2.8,4.9,2,Lotus 124 | 7.7,2.8,6.7,2,Lotus 125 | 6.3,2.7,4.9,1.8,Lotus 126 | 6.7,3.3,5.7,2.1,Lotus 127 | 7.2,3.2,6,1.8,Lotus 128 | 6.2,2.8,4.8,1.8,Lotus 129 | 6.1,3,4.9,1.8,Lotus 130 | 6.4,2.8,5.6,2.1,Lotus 131 | 7.2,3,5.8,1.6,Lotus 132 | 7.4,2.8,6.1,1.9,Lotus 133 | 7.9,3.8,6.4,2,Lotus 134 | 6.4,2.8,5.6,2.2,Lotus 135 | 6.3,2.8,5.1,1.5,Lotus 136 | 6.1,2.6,5.6,1.4,Lotus 137 | 7.7,3,6.1,2.3,Lotus 138 | 6.3,3.4,5.6,2.4,Lotus 139 | 6.4,3.1,5.5,1.8,Lotus 140 | 6,3,4.8,1.8,Lotus 141 | 6.9,3.1,5.4,2.1,Lotus 142 | 6.7,3.1,5.6,2.4,Lotus 143 | 6.9,3.1,5.1,2.3,Lotus 144 | 5.8,2.7,5.1,1.9,Lotus 145 | 6.8,3.2,5.9,2.3,Lotus 146 | 6.7,3.3,5.7,2.5,Lotus 147 | 6.7,3,5.2,2.3,Lotus 148 | 6.3,2.5,5,1.9,Lotus 149 | 6.5,3,5.2,2,Lotus 150 | 6.2,3.4,5.4,2.3,Lotus 151 | 5.9,3,5.1,1.8,Lotus 152 | -------------------------------------------------------------------------------- /Algorithmns/flv.csv: -------------------------------------------------------------------------------- 1 | Sepal_length,Sepal_width,Petal_length,Petal_width,Flower 2 | 5,3,1,3,Car 3 | 4,3,1,0,Bus 4 | 4,3,1,3,Train 5 | 4,3,1,0,Bus 6 | 5,3,1,0,Train 7 | 4,2,1,3,Bus 8 | 5,4,1,0,Bus 9 | 5,2,4,1,Train 10 | 6,2,4,1,Train 11 | 6,3,5,2,Car 12 | (['Car','Bus','Train','Bus','Train','Bus','Bus','Train','Train','Car'], name='Vehicle') 13 | -------------------------------------------------------------------------------- /Algorithmns/matrix.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nithyadurai87/machine_learning_examples/b32ebbfb5f314b08e0e0f4672d1e3ec713110faa/Algorithmns/matrix.png -------------------------------------------------------------------------------- /Algorithmns/notes: -------------------------------------------------------------------------------- 1 | 2 | (1,1) , (2,2) , (3,3) எனும் புள்ளி விவரங்களுக்கு ஒரு கணிப்பான் மூலம் என கணிப்பதை நாம் இங்கு உதாரணமாக எடுத்துக் கொள்வோம். இந்த கணிப்பா நடத்துவதற்கான சமண்பாடு பிபின்வருமாறு அமையும். அதாவது இந்த கணிப்பணது தீத மற்றும் எனும் இரண்டு முக்கிய ஐப் பொறுத்தே அமைகிறது. எனவே வெவ்வீறு மதிப்புள்ள க்கு வெவ்வீறு வகையில் கணிப்புகள் நிகழ்த்தப்படுவதை பின்வரும் உதாரணத்தில் காணலாம். இங்கு எனும் போது எனும் கணிப்புகளையும், எனும் போது எனும் மதிப்புகளையும், எனும்போது எனும் மதிப்புகளையும் அளிப்பாதைக் காணலாம். இங்கு எனும் போது கணிப்பு நிகழ்துழ்ளது. இவ்வாறே. மேற்கண்ட 3 கணிப்புகளில் எந்த கணிப்பு உண்மையான மதிப்புக்கு அருகில் உள்ளதோ அதனுடைய தீத மதிப்புகலையே நாம் இறுதியாக கணக்கிற்கு எடுத்துக்கொள்ளலாம். இங்கு எனும் மதிப்புகளுக்கு சற்று அருகில் எனும் மதிப்பு வந்துள்ளது. எனவே அதனுடைய தீத மதிப்புகழுக்கான மற்றும் என்பதையே தற்போதைக்கு சிறந்த கணிப்பிர்கான மதிப்புகலாக நாம் கணக்கில் கொள்வோம். அடுத்ததாக உண்மையான மதிப்பிர்கும், கைப்பிற்க்ுக்ம் இடையேயான வேறுபாடு எந்த அளவுக்கு அமைந்துள்ளது என்பதை நாம் கணக்கிடாலாம். அடுத்து இந்த தனித்தனி வேறுபாடுகளைக் குஉதி அதன் சராசரியைக் கண்டுபிடிப்பதான் மூலம் நடத்தும் கணிப்பானது அந்த அளவுக்கு வேறுபா டில் அமையும் என்பதை நாம் கூற முடியும். சராசரியைக் கண்டுபிடிக்கும்போது, ஏதாவது ஒரு எதிர்மறை என் இருந்தால் அது குுட்ப்படுவதற்கு பதிலாக கழிக்கப்பட்டு விடும். எனவே இப்பிரச்னை தவிர்ப்பதற்கான ஒவ்வொரு எண்னிற்கும் அதன் மடங்குகள் கண்டுபிடிக்கப்பட்டு, அவற்றின் குடுத்தொகை ஆல வகுக்கப்படு கிறது. எனவேதான் இது என்றும் அழைக்கப்படும். இத்தகைய க்கான சமண்பாடு பின்வருமாறு அமையும். நமது கணிப்புகளால் வெளிப்படும் மதிப்பூக்கும், உண்மையான மதிப்பூக்கூமுள்ள வேறுபாடு , அதன் வேறுபாடு எனக் கண்டறிந்துள்ளது. இந்த வேறுபாடு சற்று அதிகம் என நாம் கருதினால், இத்தனை நாம் இன்னும் குறைக்க தீத்ாக்களின் மதிப்பு என்னவாக இருக்க வேண்டும் என்பதை நாம் கண்டுபிடிக்க வேண்டும். இதற்கு உதவுவதே ஆகும். இதைப்பற்றிய பின்னர் காணலாம். மேற்கண்ட நீரலில் நாம் ஆகா திடக்களின் மதிப்பினை மாதிரி மாதிரி அதற்கான ஐக் கண்டறிந்து அதில் குறைவானத்தைத் தேர்வு செய்தோம். தரவுகளின் எண்ணிக்கை சிறியதாக இருப்பதால், நம்மால் சுலபமாகப் பார்த்தே எந்த மதிப்பு சற்று அருகில் வருகிறது என்பதைச் சொல்லிவிட முடிகிறது. ஆனால் நிஜத்தில் ஆயிரக்கணக்கில் தரவுகள் இருக்கும்போது, ஒவ்வொன்றும் எந்த அளவுக்கு வேறுபடுகிறது என்பதைக் கண்டறிய உதவுவதே ஆகும். 3 | 4 | பல்வேறு மதிப்பு கொண்ட தீதக்களுக்கு கண்டரியப்பட்டு அவை வரைபபடமாக இங்கு வரையப்பட்டுள்ளது. மற்றும் இவ்விறந்டின் அடிப்படையில் கண்டரியப்பட்ட மதிப்பு இம்மூன்றையும் மூப்பரிமான வரைபபடமாக வரைந்து காட்ட உதவுவதே ஆகும். இவை பின்வருமாறு. இவை கின்னவதிவிவிளோ, அல்லது நீல் வட்ட வடிவிலோ பின்வருமாறு வெளிப்படுத்தப்படும். 5 | 6 | இங்கு மூலம் ஆகா தீதக்களுக்க்உ மதிப்புகள் வழங்கப்படுகின்றன. இதில் பல்வேறு மதிப்புகள் பல்வேறு வாட்டங்களாக வெளிப்படுத்தப்படுகின்றன. எனவே வட்தடிஹ்ண் மையாத்தைக் கண்டறிவதன்மூலம், குறைந்த அளவு வேறுபாட்டினைக் கொண்ட வெளிப்படுத்தக் கூடிய தீதக்களுக்கு மதிப்பினை நாம் கண்டறிய முடியும். இந்த வேலையையே செய்கிறது. முதலில் தீதக்களுக்கு ஒரு குறிப்பிட்ட மதிப்பினைக் கொடுத்து அதற்கான ஐக் கண்டரிகிறது. பின்னர் அம்மாதிப்பிலிருந்து, ஒவ்வொரு சுழற்சியிலும் சிறிது சிரிதாகக் குறைத்துக் கொண்டே வந்து குறைந்த அளவு மதிப்பினைக் கணக்கித்டுக் கொண்டே செல்லும். இதில் நாம் குறிப்பிடுகின்ற சுழற்சிகளின் எண்ணிக்கை முடியும்போதோ அல்லது அடுத்தடுத்த சுழற்சிகளில் ஒரே மதிப்புகள் வெளிப்படும்போதோ, நாம் குறைந்த அளவு வேறுபாட்டினை வெளிப்படுத்துகின்ற தீதக்களின் மதிப்பினைக் கண்டறிந்து விட்டோம் என்று அர்த்தம். இதற்கான சமண்பாடு பின்வருமாறு. 7 | 8 | இதில் ஒவ்வொரு சுழற்சியின் முடிவிலும் மற்றும் ஆகியவற்றின் மதிப்புகள் ஒரே நேரத்தில் குறைக்கப்பட வேண்டும். இதுவே எனப்படுகிறது. என்பது தீதக்களின் மதிப்ப்பு எந்த அளவு விகிர்த்தததில் குறைக்கப்பட வேண்டும் என்பதைக் குறிக்கும். கிண்னமாக இருப்பின், கிண்ணத்தின் அடிப்பகுதியைக் கண்டறிவதும், நீல் வட்டமாக இருப்பின் அவ்வட்டத்தின் மையாத்தைக் கண்டறியும் வேலையையுமே இந்த செய்கிறது. ன் மதிப்பு மிகத் சிறிதாக இருந்தால், புள்ளி வைத்துள்ள இடத்திலிருந்து ஒவ்வொரு சுழற்சிக்கும் சிறிதளவு மட்டுமே குறைக்கப்படும். எனவே கிண்ணத்தின் மையாத்தை அடைவதற்கு அதிக அளவு சுழற்சிகள் தேவையில்லாமல் வீணாகிக் கொண்டிருக்கும். அதுவே நா மதிப்பு மிகவும் அதிகமாக இருந்தால், நாம் தேர்ந்தெடுததுள்ள ன் மதிப்பு கிண்ணத்தின் அடிக்கு மிக அருகாமையில் இருந்தால் கூட, அதிக அளவில் தீதக்களின் மதிப்பு குறைக்கப்படுவதால் மையாத்தை சென்றடாவதற்கு பதிலாக வேறு எங்கெங்கோ செல்லும், அவ்வாறே அடுத்தடுத்த சுழற்சிகளில் எங்கெங்கோ சென்று மையத்ினைச் சென்றடையத் தவரும். எனவே நாம் தேர்ந்தெடுக்கும் ன் மதிப்பு மிகவும் சிறியதாகவும் இல்லாமல், மிகவும் பெரியதாகவும் இல்லாமல் சரியான அளவில் அமையா வேண்டும். பொதுவாக என்று அமையும். 9 | 10 | என்பது எனப்படுகிறது. க்கான என்பது எப்போதும் கின்னவதிவ அமைப்பையே வெளிப்படுத்தும். இதுவே எனப்படுகிறது. ஒரே ஒரு காரணியை மட்டும் வைத்துக்கொண்டு கணிப்பது எனில், ஒன்றுக்கும் மேற்பட்ட காரணிகளை வைத்துக்கொண்டு கணிப்பது ஆகும். அதாவது ஒரு வீட்டின் ஐ மட்டும் வைத்துக்கொண்டு, அவ்வீதின் விலையைக் கணிப்பது எனில், ஒரு வீட்டின் அவ்வீதிலுள்ள அறைகளின் எண்ணிக்கை, அந்த வீடு எத்தனை வருடம் பழையது போன்ற பால்வீரு காரணிகளை வைத்துக்கொண்டு கணிப்பது ஆகும். எனவே இதன் ல் பல்வேறு எங்கள் அணிவகுத்து இருப்பதால் ஆணைகளைப் பற்றி சில அடிப்படை விஷயங்களைக் காற்றுக் கொள்ள வேண்டும். ஒன்றுக்கும் மேற்பட்ட எங்கள் அணிவகுத்துச் செல்வது அணிகள் எனப்படும். எனவே அணிகளின் 11 | 12 | -------------------------------------------------------------------------------- /Algorithmns/notes.odp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nithyadurai87/machine_learning_examples/b32ebbfb5f314b08e0e0f4672d1e3ec713110faa/Algorithmns/notes.odp -------------------------------------------------------------------------------- /Algorithmns/plot_svm_non_linear.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from matplotlib import pyplot as plt 3 | from sklearn import svm 4 | 5 | def nonlinear_model(rseed=42, n_samples=30): 6 | radius = 40 * np.random.random(n_samples) 7 | far_pts = radius > 20 8 | radius[far_pts] *= 1.2 9 | radius[~far_pts] *= 1.1 10 | 11 | theta = np.random.random(n_samples) * np.pi * 2 12 | 13 | data = np.empty((n_samples, 2)) 14 | data[:, 0] = radius * np.cos(theta) 15 | data[:, 1] = radius * np.sin(theta) 16 | 17 | labels = np.ones(n_samples) 18 | labels[far_pts] = -1 19 | 20 | return data, labels 21 | 22 | 23 | X, y = nonlinear_model() 24 | clf = svm.SVC(kernel='rbf', gamma=0.001, coef0=0, degree=3) 25 | clf.fit(X, y) 26 | 27 | plt.figure(figsize=(6, 4)) 28 | ax = plt.subplot(1, 1, 1, xticks=[], yticks=[]) 29 | ax.scatter(X[:, 0], X[:, 1], c=y, cmap=plt.cm.bone, zorder=2) 30 | 31 | ax.scatter(clf.support_vectors_[:, 0], clf.support_vectors_[:, 1], 32 | s=80, edgecolors="k", facecolors="none") 33 | 34 | delta = 1 35 | y_min, y_max = -50, 50 36 | x_min, x_max = -50, 50 37 | x = np.arange(x_min, x_max + delta, delta) 38 | y = np.arange(y_min, y_max + delta, delta) 39 | X1, X2 = np.meshgrid(x, y) 40 | Z = clf.decision_function(np.c_[X1.ravel(), X2.ravel()]) 41 | Z = Z.reshape(X1.shape) 42 | 43 | ax.contour(X1, X2, Z, [-1.0, 0.0, 1.0], colors='k',linestyles=['dashed', 'solid', 'dotted'], zorder=1) 44 | 45 | plt.show() 46 | -------------------------------------------------------------------------------- /Algorithmns/randomforest.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nithyadurai87/machine_learning_examples/b32ebbfb5f314b08e0e0f4672d1e3ec713110faa/Algorithmns/randomforest.png -------------------------------------------------------------------------------- /Algorithmns/spam.csv: -------------------------------------------------------------------------------- 1 | label,message 2 | ham,What you doing?how are you? 3 | ham,Ok lar... Joking wif u oni... 4 | ham,dun say so early hor... U c already then say... 5 | ham,MY NO. IN LUTON 0125698789 RING ME IF UR AROUND! H* 6 | ham,Siva is in hostel aha:-. 7 | ham,Cos i was out shopping wif darren jus now n i called him 2 ask wat present he wan lor. Then he started guessing who i was wif n he finally guessed darren lor. 8 | spam,FreeMsg: Txt: CALL to No: 86888 & claim your reward of 3 hours talk time to use from your phone now! ubscribe6GBP/ mnth inc 3hrs 16 stop?txtStop 9 | spam,Sunshine Quiz! Win a super Sony DVD recorder if you canname the capital of Australia? Text MQUIZ to 82277. B 10 | spam,URGENT! Your Mobile No 07808726822 was awarded a L2000 Bonus Caller Prize on 02/09/03! This is our 2nd attempt to contact YOU! Call 0871-872-9758 BOX95QU 11 | -------------------------------------------------------------------------------- /Algorithmns/sss.csv: -------------------------------------------------------------------------------- 1 | 61,Jasmin 2 | 92,Jasmin 3 | 112,Lotus 4 | 2,Rose 5 | 141,Lotus 6 | 43,Rose 7 | 10,Rose 8 | 60,Jasmin 9 | 116,Lotus 10 | 144,Lotus 11 | 119,Lotus 12 | 108,Lotus 13 | 69,Jasmin 14 | 135,Lotus 15 | 56,Jasmin 16 | 80,Jasmin 17 | 123,Lotus 18 | 133,Lotus 19 | 106,Lotus 20 | 146,Lotus 21 | 50,Jasmin 22 | 147,Lotus 23 | 85,Jasmin 24 | 30,Rose 25 | 101,Lotus 26 | 94,Jasmin 27 | 64,Jasmin 28 | 89,Jasmin 29 | 91,Jasmin 30 | 125,Lotus 31 | 48,Rose 32 | 13,Rose 33 | 111,Lotus 34 | 95,Jasmin 35 | 20,Rose 36 | 15,Rose 37 | 52,Jasmin 38 | 3,Rose 39 | 149,Lotus 40 | 98,Jasmin 41 | 6,Rose 42 | 68,Jasmin 43 | 109,Lotus 44 | 96,Jasmin 45 | 12,Rose 46 | 102,Lotus 47 | 120,Lotus 48 | 104,Lotus 49 | 128,Lotus 50 | 46,Rose 51 | 11,Rose 52 | 110,Lotus 53 | 124,Lotus 54 | 41,Rose 55 | 148,Lotus 56 | 1,Rose 57 | 113,Lotus 58 | 139,Lotus 59 | 42,Rose 60 | 4,Rose 61 | 129,Lotus 62 | 17,Rose 63 | 38,Rose 64 | 5,Rose 65 | 53,Jasmin 66 | 143,Lotus 67 | 105,Lotus 68 | 0,Rose 69 | 34,Rose 70 | 28,Rose 71 | 55,Jasmin 72 | 75,Jasmin 73 | 35,Rose 74 | 23,Rose 75 | 74,Jasmin 76 | 31,Rose 77 | 118,Lotus 78 | 57,Jasmin 79 | 131,Lotus 80 | 65,Jasmin 81 | 32,Rose 82 | 138,Lotus 83 | 14,Rose 84 | 122,Lotus 85 | 19,Rose 86 | 29,Rose 87 | 130,Lotus 88 | 49,Rose 89 | 136,Lotus 90 | 99,Jasmin 91 | 82,Jasmin 92 | 79,Jasmin 93 | 115,Lotus 94 | 145,Lotus 95 | 72,Jasmin 96 | 77,Jasmin 97 | 25,Rose 98 | 81,Jasmin 99 | 140,Lotus 100 | 142,Lotus 101 | 39,Rose 102 | 58,Jasmin 103 | 88,Jasmin 104 | 70,Jasmin 105 | 87,Jasmin 106 | 36,Rose 107 | 21,Rose 108 | 9,Rose 109 | 103,Lotus 110 | 67,Jasmin 111 | 117,Lotus 112 | 47,Rose 113 | -------------------------------------------------------------------------------- /Algorithmns/train1.csv: -------------------------------------------------------------------------------- 1 | Loan_ID,Gender,Married,Dependents,Education,Self_Employed,ApplicantIncome,CoapplicantIncome,LoanAmount,Loan_Amount_Term,Credit_History,Property_Area,Loan_Status 2 | LP001002,Male,No,0,Graduate,No,5849,0,,360,1,Urban,Y 3 | LP001003,Male,Yes,1,Graduate,No,4583,1508,128,360,1,Rural,N 4 | LP001005,Male,Yes,0,Graduate,Yes,3000,0,66,360,1,Urban,Y 5 | LP001006,Male,Yes,0,Not Graduate,No,2583,2358,120,360,1,Urban,Y 6 | LP001008,Male,No,0,Graduate,No,6000,0,141,360,1,Urban,Y 7 | LP001011,Male,Yes,2,Graduate,Yes,5417,4196,267,360,1,Urban,Y 8 | LP001013,Male,Yes,0,Not Graduate,No,2333,1516,95,360,1,Urban,Y 9 | LP001014,Male,Yes,3+,Graduate,No,3036,2504,158,360,0,Semiurban,N 10 | LP001020,Male,Yes,1,Graduate,No,12841,10968,349,360,1,Semiurban,N -------------------------------------------------------------------------------- /Algorithmns/tree.dot: -------------------------------------------------------------------------------- 1 | digraph Tree { 2 | node [shape=box, style="filled, rounded", color="black", fontname=helvetica] ; 3 | edge [fontname=helvetica] ; 4 | 0 [label="Petal_width <= 0.8\ngini = 0.66\nsamples = 64\nvalue = [45, 32, 35]\nclass = Lotus", fillcolor="#e5813921"] ; 5 | 1 [label="gini = 0.0\nsamples = 23\nvalue = [0, 0, 35]\nclass = Rose", fillcolor="#8139e5ff"] ; 6 | 0 -> 1 [labeldistance=2.5, labelangle=45, headlabel="True"] ; 7 | 2 [label="Petal_width <= 1.75\ngini = 0.49\nsamples = 41\nvalue = [45, 32, 0]\nclass = Lotus", fillcolor="#e581394a"] ; 8 | 0 -> 2 [labeldistance=2.5, labelangle=-45, headlabel="False"] ; 9 | 3 [label="Sepal_length <= 7.05\ngini = 0.13\nsamples = 22\nvalue = [41, 3, 0]\nclass = Lotus", fillcolor="#e58139ec"] ; 10 | 2 -> 3 ; 11 | 4 [label="Petal_width <= 1.65\ngini = 0.05\nsamples = 21\nvalue = [41, 1, 0]\nclass = Lotus", fillcolor="#e58139f9"] ; 12 | 3 -> 4 ; 13 | 5 [label="gini = 0.0\nsamples = 19\nvalue = [37, 0, 0]\nclass = Lotus", fillcolor="#e58139ff"] ; 14 | 4 -> 5 ; 15 | 6 [label="Sepal_width <= 2.75\ngini = 0.32\nsamples = 2\nvalue = [4, 1, 0]\nclass = Lotus", fillcolor="#e58139bf"] ; 16 | 4 -> 6 ; 17 | 7 [label="gini = 0.0\nsamples = 1\nvalue = [0, 1, 0]\nclass = Jasmin", fillcolor="#39e581ff"] ; 18 | 6 -> 7 ; 19 | 8 [label="gini = 0.0\nsamples = 1\nvalue = [4, 0, 0]\nclass = Lotus", fillcolor="#e58139ff"] ; 20 | 6 -> 8 ; 21 | 9 [label="gini = 0.0\nsamples = 1\nvalue = [0, 2, 0]\nclass = Jasmin", fillcolor="#39e581ff"] ; 22 | 3 -> 9 ; 23 | 10 [label="Petal_length <= 4.85\ngini = 0.21\nsamples = 19\nvalue = [4, 29, 0]\nclass = Jasmin", fillcolor="#39e581dc"] ; 24 | 2 -> 10 ; 25 | 11 [label="Sepal_width <= 3.1\ngini = 0.32\nsamples = 2\nvalue = [4, 1, 0]\nclass = Lotus", fillcolor="#e58139bf"] ; 26 | 10 -> 11 ; 27 | 12 [label="gini = 0.0\nsamples = 1\nvalue = [0, 1, 0]\nclass = Jasmin", fillcolor="#39e581ff"] ; 28 | 11 -> 12 ; 29 | 13 [label="gini = 0.0\nsamples = 1\nvalue = [4, 0, 0]\nclass = Lotus", fillcolor="#e58139ff"] ; 30 | 11 -> 13 ; 31 | 14 [label="gini = 0.0\nsamples = 17\nvalue = [0, 28, 0]\nclass = Jasmin", fillcolor="#39e581ff"] ; 32 | 10 -> 14 ; 33 | } -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:20.04 2 | COPY . /app 3 | WORKDIR /app 4 | RUN apt-get update 5 | RUN apt-get install python3-pip -y 6 | RUN pip3 install -r requirements.txt 7 | ENTRYPOINT ["python3"] 8 | CMD ["09_flask_api.py"] 9 | -------------------------------------------------------------------------------- /ParallelCoordinates.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nithyadurai87/machine_learning_examples/b32ebbfb5f314b08e0e0f4672d1e3ec713110faa/ParallelCoordinates.jpg -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Example code for Machine Learning with scikit-learn python library. 2 | 3 | License : GNU GPL 4 | 5 | -------------------------------------------------------------------------------- /index.html: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nithyadurai87/machine_learning_examples/b32ebbfb5f314b08e0e0f4672d1e3ec713110faa/index.html -------------------------------------------------------------------------------- /metrics.csv: -------------------------------------------------------------------------------- 1 | ,OverallQual,TotalBsmtSF,1stFlrSF,GrLivArea,FullBath,TotRmsAbvGrd,Fireplaces,GarageCars,GarageArea,SalePrice 2 | count,1460.0,1460.0,1460.0,1460.0,1460.0,1460.0,1460.0,1460.0,1460.0,1460.0 3 | mean,6.0993150684931505,1057.4294520547944,1162.626712328767,1515.463698630137,1.5650684931506849,6.517808219178082,0.613013698630137,1.7671232876712328,472.9801369863014,182187.14520547946 4 | std,1.3829965467415923,438.7053244594705,386.5877380410738,525.4803834232027,0.5509158012954318,1.625393290584064,0.6446663863122344,0.7473150101111116,213.80484145338076,55619.719125879055 5 | min,1.0,0.0,334.0,334.0,0.0,2.0,0.0,0.0,0.0,34900.0 6 | 25%,5.0,795.75,882.0,1129.5,1.0,5.0,0.0,1.0,334.5,153459.25 7 | 50%,6.0,991.5,1087.0,1464.0,2.0,6.0,1.0,2.0,480.0,169750.0 8 | 75%,7.0,1298.25,1391.25,1776.75,2.0,7.0,1.0,2.0,576.0,208705.25 9 | max,10.0,6110.0,4692.0,5642.0,3.0,14.0,3.0,4.0,1418.0,555000.0 10 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | pandas 2 | numpy 3 | flask 4 | scikit-learn==1.5.0 5 | joblib 6 | --------------------------------------------------------------------------------