├── dimensionality-reduction ├── __init__.py └── pca.py ├── model_selection ├── __init__.py ├── base.py └── crossval.py ├── naive_bayes ├── __init__.py ├── gaussian.py └── multinomial.py ├── tree ├── __init__.py ├── regressors.py └── classifiers.py ├── neighbors ├── __init__.py ├── regressors.py └── classifiers.py ├── linear_model ├── __init__.py ├── classifiers.py └── regressors.py ├── cluster ├── __init__.py ├── partitioning.py ├── densitybased.py └── hierarchical.py ├── preprocessing ├── __init__.py ├── encoder.py └── scaler.py ├── ensemble ├── __init__.py ├── voting.py ├── forest.py └── boosting.py ├── __init__.py ├── metrics ├── __init__.py ├── regression.py ├── cluster.py └── classification.py ├── test ├── buildTest.py ├── data_regression.csv └── data_classification.txt ├── LICENSE ├── .gitignore └── README.md /dimensionality-reduction/__init__.py: -------------------------------------------------------------------------------- 1 | from pca import pca -------------------------------------------------------------------------------- /model_selection/__init__.py: -------------------------------------------------------------------------------- 1 | from .base import train_test_split 2 | from .crossval import KFold 3 | 4 | __all__ = ['train_test_split','KFold'] -------------------------------------------------------------------------------- /naive_bayes/__init__.py: -------------------------------------------------------------------------------- 1 | from .gaussian import GaussianNB 2 | from .multinomial import MultinomialNB 3 | 4 | __all__ = ['GaussianNB', 'MultinomialNB'] -------------------------------------------------------------------------------- /tree/__init__.py: -------------------------------------------------------------------------------- 1 | from .regressors import DecisionTreeRegressor 2 | from .classifiers import DecisionTreeClassifier 3 | 4 | __all__ = ['DecisionTreeRegressor','DecisionTreeClassifier'] -------------------------------------------------------------------------------- /neighbors/__init__.py: -------------------------------------------------------------------------------- 1 | from .regressors import KNeighborsRegressor 2 | from .classifiers import KNeighborsClassifier 3 | 4 | 5 | __all__ = ['KNeighborsRegressor', 'KNeighborsClassifier'] -------------------------------------------------------------------------------- /linear_model/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | from .regressors import SGDRegressor, LinearRegression 4 | from .classifiers import SGDClassifier 5 | 6 | 7 | __all__ = ['LinearRegression','SGDRegressor','SGDClassifier'] -------------------------------------------------------------------------------- /cluster/__init__.py: -------------------------------------------------------------------------------- 1 | from .partitioning import KMeans 2 | from .hierarchical import AgglomerativeClustering, MeanShift 3 | from .densitybased import DBSCAN 4 | 5 | __all__ = ['KMeans','AgglomerativeClustering','DBSCAN','MeanShift'] -------------------------------------------------------------------------------- /preprocessing/__init__.py: -------------------------------------------------------------------------------- 1 | from .scaler import StandardScaler, MinMaxScaler, RobustScaler 2 | from .encoder import LabelEncoder, OneHotEncoder 3 | 4 | 5 | __all__ = ['StandardScaler', 'MinMaxScaler', 'RobustScaler', 6 | 'LabelEncoder', 'OneHotEncoder'] -------------------------------------------------------------------------------- /ensemble/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | from .forest import RandomForestClassifier, RandomForestRegressor 3 | from .boosting import GradientBoostingRegressor, GradientBoostingClassifier 4 | from .voting import VotingClassifier 5 | 6 | __all__ = ['RandomForestClassifier','RandomForestRegressor','VotingClassifier', 7 | 'GradientBoostingRegressor','GradientBoostingClassifier'] -------------------------------------------------------------------------------- /__init__.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | from . import linear_model 4 | from . import tree 5 | from . import cluster 6 | from . import naive_bayes 7 | from . import neighbors 8 | from . import ensemble 9 | from . import model_selection 10 | from . import metrics 11 | from . import preprocessing 12 | 13 | 14 | __all__ = [ 'linear_model', 'model_selection', 'metrics', 'tree', 'cluster', 15 | 'neighbors', 'ensemble', 'preprocessing', 'naive_bayes' ] -------------------------------------------------------------------------------- /metrics/__init__.py: -------------------------------------------------------------------------------- 1 | from .regression import root_mean_squared_error, mean_squared_error, mean_absolute_error, r2_score 2 | from .classification import accuracy_score, confusion_matrix, roc_auc_score, roc_curve 3 | from .classification import precision_score, recall_score, sensitivity_score, specificity_score, f1_score 4 | from .cluster import adjusted_rand_score 5 | 6 | 7 | __all__ = ['root_mean_squared_error','mean_squared_error','mean_absolute_error','r2_score','accuracy_score','confusion_matrix', 8 | 'roc_auc_score', 'roc_curve', 'precision_score', 'recall_score', 'sensitivity_score', 'specificity_score', 'f1_score', 9 | 'adjusted_rand_score'] -------------------------------------------------------------------------------- /test/buildTest.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import numpy as np 3 | import os 4 | 5 | def train_test_split(X,Y,test_size=None,seed=5): 6 | """ 7 | Custom Train Test split function 8 | """ 9 | assert test_size!=None, "test_size cannot be None" 10 | np.random.seed(seed) 11 | indexes = np.random.choice([False,True],size=len(X),p=[test_size,1-test_size]) 12 | return X[indexes],X[~indexes],Y[indexes],Y[~indexes] 13 | 14 | def getRegData(path): 15 | reg = pd.read_csv(path) 16 | X = reg.drop('Y',axis=1).values 17 | y = reg.Y.values 18 | return train_test_split(X,y,test_size=0.3,seed=7) 19 | 20 | def getClassiData(path): 21 | cls = pd.read_csv(path) 22 | X = cls.drop('Y',axis=1).values 23 | y = cls.Y.values 24 | return train_test_split(X,y,test_size=0.3,seed=7) -------------------------------------------------------------------------------- /model_selection/base.py: -------------------------------------------------------------------------------- 1 | """ 2 | Authors : Aditya Jain 3 | Contact : https://adityajain.me 4 | """ 5 | 6 | 7 | import numpy as np 8 | 9 | def train_test_split(X,Y,test_size=None,random_state=5): 10 | """ 11 | Train Test split function 12 | 13 | Parameters 14 | ---------- 15 | 16 | X : numpy array, independent variables 17 | 18 | y : numpy array, dependent variables 19 | 20 | test_size : float, percent of test samples 21 | 22 | random_state : integer, random seed 23 | 24 | Returns 25 | ------- 26 | X_train, X_test, Y_train, Y_test 27 | 28 | """ 29 | assert test_size!=None, "test_size cannot be None" 30 | np.random.seed(random_state) 31 | indexes = np.random.choice([False,True],size=len(X),p=[test_size,1-test_size]) 32 | return X[indexes],X[~indexes],Y[indexes],Y[~indexes] -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Aditya Jain 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /dimensionality-reduction/pca.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from numpy import linalg as la 3 | 4 | 5 | class pca(): 6 | 7 | """ 8 | PCA is a dimendionality reduction technique. 9 | Linear dimensionality reduction using Singular Value Decomposition of the data to project it to a lower dimensional space 10 | 11 | """ 12 | 13 | def __init__(self): 14 | self.cov=cov 15 | self.eigen=eigen 16 | self.eigval=None 17 | self.eigvct=None 18 | self.covar=None 19 | self.princicomp=None 20 | 21 | def cov(self,x): 22 | 23 | """ 24 | first we have to find the covariance matrix of the given array/dataframe 25 | 26 | """ 27 | self.covar=np.cov(x) 28 | 29 | return self.covar 30 | 31 | def eigen(self,x): 32 | 33 | """ 34 | Second step is to find eigen values and vectors of the given array 35 | 36 | """ 37 | self.eigval, self.eigvct = np.la.eig(self.covar) 38 | 39 | return self.eigval,self.eigvct 40 | 41 | def ncomp(self,numcomponents): 42 | 43 | """ 44 | Method to choose number of principal components 45 | 46 | """ 47 | self.princicomp=self.eigvct[0:numcomponents] 48 | 49 | """ 50 | For examples, if numcomponents=2 then this function returns first 2 rows of eigenvectors 51 | 52 | """ 53 | 54 | return self.princicomp 55 | 56 | -------------------------------------------------------------------------------- /metrics/regression.py: -------------------------------------------------------------------------------- 1 | """" 2 | Author : Aditya Jain 3 | Contact : https://adityajain.me 4 | """ 5 | 6 | import numpy as np 7 | 8 | def root_mean_squared_error(y_true,y_pred): 9 | """ 10 | Compute root mean square error 11 | 12 | Parameters 13 | ---------- 14 | y_pred : predicted values 15 | 16 | y_true : True values 17 | 18 | Returns 19 | ------- 20 | rmse 21 | 22 | """ 23 | loss = np.square(y_pred-y_true) 24 | cost = np.sqrt( np.mean(loss) ) 25 | return cost 26 | 27 | 28 | def mean_squared_error(y_true,y_pred): 29 | """ 30 | Compute mean square error 31 | 32 | Parameters 33 | ---------- 34 | y_pred : predicted values 35 | 36 | y_true : true values 37 | 38 | Returns 39 | ------- 40 | mse 41 | 42 | """ 43 | loss = np.square(y_pred-y_true) 44 | cost = np.mean(loss) 45 | return cost 46 | 47 | 48 | def mean_absolute_error(y_true,y_pred): 49 | """ 50 | Compute mean absolute error 51 | 52 | Parameters 53 | ---------- 54 | y_pred : predicted values 55 | 56 | y_true : true values 57 | 58 | Returns 59 | ------- 60 | mae 61 | 62 | """ 63 | loss = abs(y_pred-y_true) 64 | cost = np.mean(loss) 65 | return cost 66 | 67 | def r2_score(y_true,y_pred): 68 | """ 69 | Compute Coefficient of Determinance, r2 score 70 | 71 | r2 score = ESS/TSS = 1 - RSS/TSS = 1 - (y_true - y_pred)^2 / (y_true - y_true.mean()) 72 | 73 | Parameters 74 | ---------- 75 | y_pred : predicted values 76 | 77 | y_true : true values 78 | 79 | Returns 80 | ------- 81 | r2_score 82 | """ 83 | return 1-(np.sum((( y_true - y_pred)**2))/np.sum((y_true-np.mean(y_true))**2)) -------------------------------------------------------------------------------- /model_selection/crossval.py: -------------------------------------------------------------------------------- 1 | """ 2 | Author : Aditya Jain 3 | Contact : https://adityajain.me 4 | """ 5 | 6 | import numpy as np 7 | 8 | class KFold(): 9 | """ 10 | K-Folds cross-validator 11 | 12 | Provides train/test indices to split data in train/test sets. Split dataset into k consecutive folds (without shuffling by default). 13 | 14 | Parameters 15 | ---------- 16 | n_splits : int, default=3, Number of folds. Must be at least 2. 17 | 18 | shuffle : boolean, optional Whether to shuffle the data before splitting into batches. 19 | 20 | random_state : int, RandomState instance or None, optional, default=None 21 | 22 | """ 23 | def __init__(self, n_splits=3, shuffle=False, random_state=None): 24 | assert n_splits > 1, "Minimum splits must be greater that 1" 25 | self.__k = n_splits 26 | self.__shuffle = shuffle 27 | np.random.seed(random_state) 28 | 29 | def split(self,X): 30 | """ 31 | Generate indices to split data into training and test set. 32 | 33 | Parameters 34 | ---------- 35 | X : array-like, Training data 36 | 37 | Returns 38 | ------- 39 | train array indices, test array indices 40 | 41 | """ 42 | length = len(X) 43 | indices = np.arange(0,length,1) 44 | if self.__shuffle: np.random.shuffle(indices) 45 | last_index, split_length = 0, int(np.ceil(length/self.__k)) 46 | array = [] 47 | while min(last_index,last_index+split_length) < length: 48 | array.append( indices[ last_index : min(last_index+split_length, length)] ) 49 | last_index = last_index+split_length 50 | for i in range(len(array)): 51 | yield array[:i]+array[i+1:], array[i] 52 | 53 | 54 | -------------------------------------------------------------------------------- /metrics/cluster.py: -------------------------------------------------------------------------------- 1 | """ 2 | Author : Aditya Jain 3 | Contact: https://adityajain.me 4 | """ 5 | 6 | import numpy as np 7 | 8 | def adjusted_rand_score(labels_true, labels_pred): 9 | """ 10 | Rand index adjusted for chance. 11 | 12 | The Rand Index computes a similarity measure between two clusterings 13 | by considering all pairs of samples and counting pairs that are 14 | assigned in the same or different clusters in the predicted and 15 | true clusterings. 16 | 17 | ARI = (RI - Expected_RI) / (max(RI) - Expected_RI) 18 | 19 | Note : https://davetang.org/muse/2017/09/21/adjusted-rand-index/ 20 | 21 | Parameters 22 | ---------- 23 | 24 | labels_true : numpy array, true cluster labels 25 | 26 | label_pred : numpy array, predicted cluster label 27 | 28 | Returns 29 | ------- 30 | adjusted rand index 31 | 32 | 33 | """ 34 | def factorail(n): 35 | if n<=1: return 1 36 | return n*factorail(n-1) 37 | def comb(n,r): 38 | if n= self.__min_samples) 61 | return np.where(cores)[0] 62 | 63 | def __compute_core_labels(self,X): 64 | cluster_index = np.ones( (len(X),), dtype=np.int )*-1 65 | curr_index = 0 66 | for i in self.__core_sample_indices: 67 | if cluster_index[i]!=-1: continue 68 | self.__clusters.append( [i] ) 69 | cluster_index[i] = curr_index; 70 | new = { i } 71 | while len(new)>0: 72 | pt = X[new.pop()] 73 | indexes = np.where( ((self.__metric(X,pt) < self.__eps) & (cluster_index==-1)) )[0] 74 | for index in indexes: 75 | if index not in self.__core_sample_indices: continue 76 | new.add(index) 77 | self.__clusters [ curr_index ].append( index ) 78 | cluster_index[index] = curr_index 79 | curr_index+=1 80 | self.__clusters = [ np.array([ X[index] for index in cluster ]) for cluster in self.__clusters ] 81 | 82 | 83 | def fit(self,X): 84 | """ 85 | Perform DBSCAN clustering from features or distance matrix. 86 | 87 | Parameters 88 | ---------- 89 | X : array, feature array 90 | 91 | """ 92 | self.__X = X 93 | self.__core_sample_indices = self.__get_cores_index(X) 94 | self.__compute_core_labels(X) 95 | self.__labels = self.predict(X) 96 | 97 | def predict(self,X): 98 | """ 99 | Predict cluster labels of new samples using cores 100 | 101 | Parameters 102 | ---------- 103 | X : numpy array, feature array 104 | 105 | Returns 106 | ------- 107 | cluster labels 108 | """ 109 | labels_ = [] 110 | for index in range( len(X) ): 111 | dists = [] 112 | for cluster in self.__clusters: 113 | dists.append( (self.__metric(cluster, X[index])< self.__eps).sum() ) 114 | cls = np.argmax(dists) 115 | labels_.append( cls if dists[cls]!=0 else -1 ) 116 | return np.array(labels_) 117 | 118 | def fit_predict(self,X): 119 | """ 120 | Perform DBSCAN clustering and predict cluster labels 121 | 122 | Parameters 123 | ---------- 124 | X : numpy array, feature array 125 | 126 | Returns 127 | ------- 128 | cluster labels 129 | """ 130 | self.fit(X) 131 | return self.__labels 132 | 133 | @property 134 | def labels_(self): return self.__labels 135 | 136 | @property 137 | def n_clusters_(self): return len(self.__clusters) 138 | 139 | @property 140 | def core_sample_indices_(self): return self.__core_sample_indices 141 | 142 | @property 143 | def componenets_(self): return self.__X[self.core_sample_indices_] 144 | -------------------------------------------------------------------------------- /linear_model/classifiers.py: -------------------------------------------------------------------------------- 1 | """" 2 | Author : Aditya Jain 3 | Contact : https://adityajain.me 4 | """ 5 | import numpy as np 6 | import matplotlib.pyplot as plt 7 | 8 | class SGDClassifier(): 9 | """ 10 | SGD classifier model, that optimizes using gradient descent 11 | 12 | Note: this implementation is restricted to the binary classification task. 13 | 14 | Parameters 15 | ---------- 16 | learning_rate = float, default 0.01, learning rate while updating weights 17 | 18 | tol : float, default 0.01, stopping criteria 19 | 20 | seed : integer, random seed 21 | 22 | normalize : boolean, normalize X in fit method 23 | 24 | Attributes 25 | ---------- 26 | coef_ : Estimated coefficients for the linear regression problem 27 | 28 | intercept_ : integer, bias for the linear regression problem 29 | 30 | """ 31 | def __init__(self, learning_rate=0.01, tol=0.01, seed=None, normalize=False): 32 | np.random.seed(seed if seed is not None else np.random.randint(100)) 33 | self.W = None 34 | self.b = None 35 | self.__lr = learning_rate 36 | self.__tol = tol 37 | self.__length = None 38 | self.__normalize = normalize 39 | self.__m = None 40 | self.__costs = [] 41 | self.__iterations = [] 42 | 43 | def __sigmoid(self,z): return 1/(1+np.exp(-z)) 44 | 45 | def __initialize_weights_and_bais(self): 46 | self.W = np.random.randn(self.__length) #(n,1) 47 | self.b = 0 48 | 49 | def __computeCost(self,p,Y): 50 | loss = -( Y*np.log(p) + (1-Y)*np.log(1-p) ) 51 | cost = np.sum(loss)/self.__m 52 | return cost 53 | 54 | def __optimize(self,X,y): 55 | p = self.__sigmoid( np.dot(X,self.W)+self.b ) 56 | dW = np.dot( X.T, (p-y) )/self.__m # (4,1) 57 | db = np.sum(p-y)/self.__m 58 | self.W = self.W - self.__lr*dW 59 | self.b = self.b - self.__lr*db 60 | 61 | def __normalizeX(self,X): return (X-self.__mean) / (self.__std) 62 | 63 | def fit(self, X, y, verbose=False): 64 | """ 65 | Fit X using y by optimizing weights and bias 66 | 67 | Parameters 68 | ---------- 69 | X : 2D numpy array, independent variables 70 | 71 | y : 1D numpy array, dependent variable 72 | 73 | verbose : boolean, print out details while optimizing (Default : False) 74 | 75 | """ 76 | if self.__normalize: 77 | self.__mean, self.__std = X.mean(axis=0), X.std(axis=0) 78 | X = self.__normalizeX(X) 79 | self.__m,self.__length = X.shape 80 | self.__initialize_weights_and_bais() 81 | last_cost,i = float('inf'),0 82 | while True: 83 | p = self.__sigmoid( np.dot(X,self.W)+self.b ) 84 | cost = self.__computeCost(p,y) 85 | if verbose: print(f"Iteration: {i}, Cost: {cost:.3f}") 86 | self.__optimize(X,y) 87 | if last_cost-cost < self.__tol: break 88 | else: last_cost,i = cost,i+1 89 | self.__costs.append(cost) 90 | self.__iterations.append(i) 91 | 92 | def predict(self,X): 93 | """ 94 | Predict dependent variable 95 | 96 | Parameters 97 | ---------- 98 | X : numpy array, independent variables 99 | 100 | Returns 101 | ------- 102 | predicted classes 103 | 104 | """ 105 | return self.predict_proba(X)[:,1]>0.5 106 | 107 | def predict_proba(self,X): 108 | """ 109 | Predict probability of all classes 110 | 111 | Parameters 112 | X : numpy array, independent variables 113 | 114 | Returns 115 | ------- 116 | predicted probabilities 117 | 118 | """ 119 | if self.__normalize: X = self.__normalizeX(X) 120 | ones = self.__sigmoid( np.dot(X,self.W)+self.b ) 121 | return np.c_[1-ones,ones] 122 | 123 | def plot(self,figsize=(7,5)): 124 | """ 125 | Plot a optimization graph 126 | """ 127 | plt.figure(figsize=figsize) 128 | plt.plot(self.__iterations,self.__costs) 129 | plt.xlabel('Iterations') 130 | plt.ylabel('Cost') 131 | plt.title("Iterations vs Cost") 132 | plt.show() 133 | 134 | def score(self,X,y): 135 | """ 136 | Calculate accuracy from independent variables 137 | 138 | Parameters 139 | ---------- 140 | X : numpy array, independent variables 141 | y : numpy array, dependent variable 142 | 143 | Returns 144 | ------- 145 | accuracy score 146 | 147 | """ 148 | return (self.predict(X) == y).sum() / len(y) 149 | 150 | @property 151 | def coef_(self): return self.W 152 | 153 | @property 154 | def intercept_(self): return self.b -------------------------------------------------------------------------------- /metrics/classification.py: -------------------------------------------------------------------------------- 1 | """" 2 | Author : Aditya Jain 3 | Contact : https://adityajain.me 4 | """ 5 | 6 | import numpy as np 7 | import matplotlib.pyplot as plt 8 | 9 | def accuracy_score(y_true,y_pred): 10 | """ 11 | Calculate Accuracy 12 | 13 | Parameters 14 | ---------- 15 | y_true : true labels 16 | 17 | y_pred : predicted labels 18 | 19 | Returns 20 | ------- 21 | accuracy 22 | """ 23 | return (y_true==y_pred).sum()/len(y_true) 24 | 25 | 26 | def confusion_matrix(y_true,y_pred): 27 | """ 28 | Calculate confusion matrix 29 | tn | fp 30 | ------- 31 | fn | tp 32 | 33 | Note: this implementation is restricted to the binary classification task. 34 | 35 | Parameters 36 | ---------- 37 | y_true : true labels 38 | 39 | y_pred : predicted labels 40 | 41 | Returns 42 | ------- 43 | confusion matrix 44 | 45 | """ 46 | trues,falses,pos,neg = y_true==y_pred, y_true!=y_pred, y_true==1, y_true==0 47 | tp, tn = (trues & pos).sum(), (trues & neg).sum() 48 | fp, fn = (falses & neg).sum(), (falses & pos).sum() 49 | return np.array( [[tn,fp],[fn,tp]] ) 50 | 51 | def roc_curve(y_true,y_score,plot=False): 52 | """ 53 | Compute Receiver operating characteristic (ROC) and return fpr,tpr or plot curve 54 | 55 | Note: this implementation is restricted to the binary classification task. 56 | 57 | Parameters 58 | ---------- 59 | y_true : true labels 60 | 61 | y_score : predicted probabilities 62 | 63 | plot: boolean (Default False), plot a matplotlib roc curve 64 | 65 | Returns 66 | ------- 67 | fpr,tpr,thresholds 68 | 69 | """ 70 | thres = np.sort(np.unique(y_score))[::-1] 71 | fpr,tpr = [],[] 72 | for th in thres: 73 | y_pred = y_score>=th 74 | positives, falses, trues = y_pred==1, y_pred!=y_true, y_pred==y_true 75 | tpr.append((trues & positives).sum() / y_true.sum()) 76 | fpr.append((falses & positives).sum() / (y_true==0).sum()) 77 | if plot: 78 | plt.plot(fpr,tpr) 79 | plt.plot([0,1],[0,1]) 80 | plt.xlabel('False Positive Rate') 81 | plt.ylabel('True Positive Rate') 82 | return np.array(fpr), np.array(tpr), thres 83 | 84 | 85 | def roc_auc_score(y_true, y_score): 86 | """ 87 | Compute Area under Receiver operating characteristic (ROC) 88 | 89 | Note: this implementation is restricted to the binary classification task. 90 | 91 | Parameters 92 | ---------- 93 | y_true : true labels 94 | 95 | y_score : predicted probabilities 96 | 97 | Returns 98 | ------- 99 | area under curve 100 | 101 | """ 102 | fpr,tpr,_ = roc_curve(y_true,y_score) 103 | fpr_last,tpr_last, area = fpr[0],tpr[0],0 104 | for i in range(1,len(fpr)): 105 | fpr_curr, tpr_curr = fpr[i],tpr[i] 106 | tri_area = 0.5 * (fpr_curr-fpr_last) * (tpr_curr-tpr_last) 107 | rect_area = (fpr_curr-fpr_last) * (tpr_last-0) 108 | area, fpr_last,tpr_last = area+tri_area+rect_area, fpr_curr, tpr_curr 109 | return area 110 | 111 | 112 | def precision_score(y_true,y_pred): 113 | """ 114 | Compute Precision 115 | 116 | Precision = tp/(tp+fp) 117 | 118 | Parameters 119 | ---------- 120 | y_true : true labels 121 | 122 | y_pred : predicted labels 123 | 124 | Returns 125 | ------- 126 | precision score 127 | """ 128 | tn,fp,fn,tp = tuple(confusion_matrix(y_true,y_pred).ravel()) 129 | return tp/(tp+fp) 130 | 131 | def recall_score(y_true,y_pred): 132 | """ 133 | Compute Recall 134 | 135 | Recall = tp/(tp+fn) 136 | 137 | Parameters 138 | ---------- 139 | y_true : true labels 140 | 141 | y_pred : predicted labels 142 | 143 | Returns 144 | ------- 145 | recall score 146 | """ 147 | tn,fp,fn,tp = tuple(confusion_matrix(y_true,y_pred).ravel()) 148 | return tp/(tp+fn) 149 | 150 | def sensitivity_score(y_true,y_pred): 151 | """ 152 | Compute Sensitivity 153 | 154 | Sensitivity = tp/(tp+fn) 155 | 156 | Parameters 157 | ---------- 158 | y_true : true labels 159 | 160 | y_pred : predicted labels 161 | 162 | Returns 163 | ------- 164 | sensitivity score 165 | """ 166 | tn,fp,fn,tp = tuple(confusion_matrix(y_true,y_pred).ravel()) 167 | return tp/(tp+fn) 168 | 169 | def specificity_score(y_true,y_pred): 170 | """ 171 | Compute Specificity 172 | 173 | Specificity = tn/(tn+fn) 174 | 175 | Parameters 176 | ---------- 177 | y_true : true labels 178 | 179 | y_pred : predicted labels 180 | 181 | Returns 182 | ------- 183 | specificity score 184 | """ 185 | tn,fp,fn,tp = tuple(confusion_matrix(y_true,y_pred).ravel()) 186 | return tn/(tn+fp) 187 | 188 | 189 | def f1_score(y_true,y_pred): 190 | """ 191 | Compute f1_score 192 | 193 | f1_score = harmonic mean of precision and recall 194 | f1_score = 2 / ( 1/precision + 1/recall ) = (2 * precision * recall) / ( precision + recall ) 195 | 196 | Note: this implementation is restricted to the binary classification task. 197 | 198 | Parameters 199 | ---------- 200 | y_true : true labels 201 | 202 | y_score : predicted probabilities 203 | 204 | Returns 205 | ------- 206 | f1 score 207 | """ 208 | precision = precision_score(y_true,y_pred) 209 | recall = recall_score(y_true,y_pred) 210 | f1_score = 2*(precision*recall)/(precision+recall) 211 | return f1_score -------------------------------------------------------------------------------- /preprocessing/scaler.py: -------------------------------------------------------------------------------- 1 | """ 2 | Author : Aditya Jain 3 | Contact : https://adityajain.me 4 | """ 5 | 6 | import numpy as np 7 | 8 | class StandardScaler(): 9 | """ 10 | Apply Standard Scaling on data 11 | 12 | scaled_data = (data - mean)/ std 13 | 14 | Attributes 15 | ---------- 16 | 17 | mean_ : mean of each column 18 | 19 | std_ : standard deviation of each columns 20 | 21 | """ 22 | def __init__(self): 23 | self.__mean = None 24 | self.__std = None 25 | 26 | def fit(self,X): 27 | """ 28 | Fit data to calucate mean and std 29 | 30 | Parameters 31 | ---------- 32 | X : numpy array 33 | 34 | """ 35 | self.__mean = X.mean(axis=0) 36 | self.__std = X.std(axis=0) 37 | 38 | def transform(self,X): 39 | """ 40 | Transform data using mean and std 41 | 42 | Parameters 43 | ---------- 44 | X : numpy array 45 | 46 | Returns 47 | ------- 48 | transformed_X 49 | 50 | """ 51 | return (X-self.__mean)/self.__std 52 | 53 | def fit_transform(self,X): 54 | """ 55 | Fit and transform data at once 56 | 57 | Parameters 58 | ---------- 59 | X : numpy array 60 | 61 | Returns 62 | ------- 63 | transformed_X 64 | 65 | """ 66 | self.fit(X) 67 | return self.transform(X) 68 | 69 | def inverse_transform(self,X): 70 | """ 71 | Convert transformed data into original data 72 | 73 | Parameter 74 | --------- 75 | X : numpy array, transformed data 76 | 77 | Returns 78 | ------- 79 | Original data 80 | 81 | """ 82 | return (X*self.__std)+self.__mean 83 | 84 | @property 85 | def mean_(self): return self.__mean 86 | 87 | @property 88 | def std_(self): return self.__std 89 | 90 | 91 | class MinMaxScaler(): 92 | """ 93 | Apply Min-Max scaling on data 94 | 95 | scaled_data = (data - min)/ (max - min) 96 | 97 | Attributes 98 | ---------- 99 | 100 | min_ : min of each column 101 | 102 | max_ : max of each columns 103 | 104 | """ 105 | def __init__(self): 106 | self.__min = None 107 | self.__max = None 108 | 109 | def fit(self,X): 110 | """ 111 | Fit data to calucate min and max 112 | 113 | Parameters 114 | ---------- 115 | X : numpy array 116 | 117 | """ 118 | self.__min = X.min(axis=0) 119 | self.__max = X.max(axis=0) 120 | 121 | def transform(self,X): 122 | """ 123 | Transform data using min and max 124 | 125 | Parameters 126 | ---------- 127 | X : numpy array 128 | 129 | Returns 130 | ------- 131 | transformed_X 132 | 133 | """ 134 | return (X-self.__min)/(self.__max-self.__min) 135 | 136 | def fit_transform(self,X): 137 | """ 138 | Fit and transform data at once 139 | 140 | Parameters 141 | ---------- 142 | X : numpy array 143 | 144 | Returns 145 | ------- 146 | transformed_X 147 | 148 | """ 149 | self.fit(X) 150 | return self.transform(X) 151 | 152 | def inverse_transform(self,X): 153 | """ 154 | Convert transformed data into original data 155 | 156 | Parameter 157 | --------- 158 | X : numpy array, transformed data 159 | 160 | Returns 161 | ------- 162 | Original data 163 | 164 | """ 165 | return (X*(self.__max-self.__min))+self.__min 166 | 167 | @property 168 | def min_(self): return self.__min 169 | 170 | @property 171 | def max_(self): return self.__max 172 | 173 | 174 | class RobustScaler(): 175 | """ 176 | Apply Robust scaling on data 177 | 178 | scaled_data = (data - median)/ (3rd and 1st quantile range)) 179 | 180 | Attributes 181 | ---------- 182 | 183 | center_ : median for each column 184 | 185 | scale_ : inter quantile range for each column 186 | 187 | """ 188 | def __init__(self): 189 | self.__median = None 190 | self.__3rd_quantile = None 191 | self.__1st_quantile = None 192 | 193 | def fit(self,X): 194 | """ 195 | Fit data to calucate median and 1st and 3rd quartile range 196 | 197 | Parameters 198 | ---------- 199 | X : numpy array 200 | 201 | """ 202 | self.__median = np.median(X,axis=0) 203 | self.__3rd_quantile = np.percentile(X,75,axis=0) 204 | self.__1st_quantile = np.percentile(X,25,axis=0) 205 | 206 | def transform(self,X): 207 | """ 208 | Transform data using median, 1st and 3rd quartile range 209 | 210 | Parameters 211 | ---------- 212 | X : numpy array 213 | 214 | Returns 215 | ------- 216 | transformed_X 217 | 218 | """ 219 | return (X-self.__median)/(self.__3rd_quantile-self.__1st_quantile) 220 | 221 | def fit_transform(self,X): 222 | """ 223 | Fit and transform data at once 224 | 225 | Parameters 226 | ---------- 227 | X : numpy array 228 | 229 | Returns 230 | ------- 231 | transformed_X 232 | 233 | """ 234 | self.fit(X) 235 | return self.transform(X) 236 | 237 | def inverse_transform(self,X): 238 | """ 239 | Convert transformed data into original data 240 | 241 | Parameter 242 | --------- 243 | X : numpy array, transformed data 244 | 245 | Returns 246 | ------- 247 | Original data 248 | 249 | """ 250 | return (X*(self.__3rd_quantile-self.__1st_quantile))+self.__median 251 | 252 | @property 253 | def center_(self): return self.__median 254 | 255 | @property 256 | def scale_(self): return self.__3rd_quantile - self.__1st_quantile -------------------------------------------------------------------------------- /tree/regressors.py: -------------------------------------------------------------------------------- 1 | """ 2 | Authors : Aditya Jain 3 | Contact : https://adityajain.me 4 | """ 5 | 6 | import numpy as np 7 | 8 | class DecisionTreeRegressor(): 9 | """ 10 | Decision Tree Regressor 11 | 12 | Parameters 13 | ---------- 14 | criterion : ('mse', 'mae', 'std') ( Default 'mse' ) 15 | The function to measure the quality of a split. 16 | 'mse' is mean squared error 17 | 'mae' is mean absolute error 18 | 'std' is standard deviation 19 | 20 | max_depth : integer (Default 'inf'), maximum depth allowed in the decision tree 21 | 22 | min_samples_split : integer (Default 2), minimum nodes to consider before splitting 23 | 24 | Attributes 25 | ---------- 26 | tree_ : dict, dictionary representation of tree 27 | 28 | depth_ : integer, current maximum depth of tree 29 | 30 | """ 31 | def __init__(self, criterion='mse', max_depth=None, min_samples_split=2): 32 | self.__root = None 33 | self.__cost = { 'mse':self.__mse,'std':self.__std,'mae':self.__mae }[criterion] 34 | self.__max_depth = float('inf') if max_depth==None else max_depth 35 | self.__min_samples_split = min_samples_split 36 | self.__depth = 0 37 | 38 | def __std(self,y): 39 | squared_error = (y-y.mean())**2 40 | return np.sqrt( np.sum(squared_error)/len(y) ) 41 | 42 | def __mse(self,y): 43 | squared_error = (y-y.mean())**2 44 | return np.sum( squared_error/len(y) ) 45 | 46 | def __mae(self,y): return np.sum(abs(y-y.mean())/len(y)) 47 | 48 | def __computeCost(self,groups, y): 49 | n_instances = len(groups[0])+len(groups[1]) # count of all samples 50 | weighted_cost = 0.0 # sum weighted Gini index for each group 51 | for indexes in groups: 52 | size = len(indexes) 53 | # avoid divide by zero 54 | if size == 0: continue 55 | weighted_cost += self.__cost(y[indexes]) * (size/n_instances) 56 | return weighted_cost 57 | 58 | def __get_split(self,X,y): 59 | b_index, b_value, b_cost, b_groups = float('inf'), float('inf'), float('inf'), None 60 | for col_ind in range(X.shape[1]): #no of features 61 | for val in np.unique(X[:,col_ind]): #for each unique value in each of the features 62 | 63 | #left_index indexes lower than val for feature, right_index indexes greater that val for feature 64 | left_index = np.reshape( np.argwhere(X[:,col_ind]=val), (-1,) ) 66 | 67 | #find gini index 68 | cost = self.__computeCost((left_index,right_index), y) 69 | if cost < b_cost: 70 | b_index, b_value, b_cost, b_groups = col_ind, val, cost, (left_index, right_index) 71 | return {'index':b_index, 'value':b_value, 'groups':b_groups} 72 | 73 | def __to_terminal(self,y): return y.mean() 74 | 75 | def __split(self,node, X, y, depth): 76 | self.__depth = max(depth,self.__depth) 77 | left, right = node.pop('groups') 78 | 79 | # check for a no split 80 | if len(left)==0 or len(right)==0: 81 | node['left'] = node['right'] = self.__to_terminal(y[np.append(left,right)]) 82 | return 83 | 84 | # check for max depth 85 | if depth >= self.__max_depth: 86 | node['left'], node['right'] = self.__to_terminal(y[left]), self.__to_terminal(y[right]) 87 | return 88 | 89 | # process left child 90 | if len(left) <= self.__min_samples_split: 91 | node['left'] = self.__to_terminal(y[left]) 92 | else: 93 | node['left'] = self.__get_split(X[left],y[left]) 94 | self.__split(node['left'], X[left], y[left], depth+1) 95 | 96 | # process right child 97 | if len(right) <= self.__min_samples_split: 98 | node['right'] = self.__to_terminal(y[right]) 99 | else: 100 | node['right'] = self.__get_split(X[right],y[right]) 101 | self.__split(node['right'],X[right],y[right], depth+1) 102 | 103 | def fit(self, X, y): 104 | """ 105 | Fit X using y by optimizing splits costs using given criterion 106 | 107 | Parameters 108 | ---------- 109 | X : 2D numpy array, independent variables 110 | 111 | y : 1D numpy array, dependent variable 112 | 113 | """ 114 | self.__root = self.__get_split(X,y) 115 | self.__split(self.__root, X, y, 1) 116 | 117 | def __predict_row(self,row,node): 118 | if row[node['index']] < node['value']: 119 | if isinstance(node['left'], dict): return self.__predict_row(row,node['left']) 120 | else: return node['left'] 121 | else: 122 | if isinstance(node['right'], dict): return self.__predict_row(row,node['right']) 123 | else: return node['right'] 124 | 125 | def predict(self, X): 126 | """ 127 | Predict dependent variable 128 | 129 | Parameters 130 | ---------- 131 | X : numpy array, independent variables 132 | 133 | Returns 134 | ------- 135 | predicted values 136 | 137 | """ 138 | return np.array( [self.__predict_row(row,self.__root) for row in X] ) 139 | 140 | def score(self,X,y): 141 | """ 142 | Computer Coefficient of Determination (rsquare) 143 | 144 | Parameters 145 | ---------- 146 | X : 2D numpy array, independent variables 147 | 148 | y : numpy array, dependent variables 149 | 150 | Returns 151 | ------- 152 | r2 values 153 | """ 154 | y_pred = self.predict(X) 155 | return 1-( np.sum( (y-y_pred)**2 )/np.sum( (y-y.mean())**2 ) ) 156 | 157 | @property 158 | def depth_(self): return self.__depth 159 | 160 | @property 161 | def tree_(self): return self.__root -------------------------------------------------------------------------------- /tree/classifiers.py: -------------------------------------------------------------------------------- 1 | """ 2 | Authors : Aditya Jain 3 | Contact : https://adityajain.me 4 | """ 5 | 6 | import numpy as np 7 | 8 | class DecisionTreeClassifier(): 9 | """ 10 | Decision Tree Classifier 11 | 12 | Parameters 13 | ---------- 14 | max_depth : integer (Default 'inf'), maximum depth allowed in the decision tree 15 | 16 | min_samples_split : integer (Default 2), minimum nodes to consider before splitting 17 | 18 | n_classes : integer (Default None), if None find out from training dataset labels 19 | 20 | Attributes 21 | ---------- 22 | tree_ : dict, dictionary representation of tree 23 | 24 | depth_ : integer, current maximum depth of tree 25 | 26 | """ 27 | def __init__(self, max_depth=None, min_samples_split=2, n_classes=None): 28 | self.__root = None 29 | self.__max_depth = float('inf') if max_depth==None else max_depth 30 | self.__min_samples_split = min_samples_split 31 | self.__n_classes = n_classes 32 | self.__depth = 0 33 | 34 | def __gini_index(self,groups, y): 35 | n_instances = len(groups[0])+len(groups[1]) # count of all samples 36 | gini = 0.0 # sum weighted Gini index for each group 37 | for indexes in groups: 38 | size = len(indexes) 39 | if size == 0: continue # avoid divide by zero 40 | score = 0.0 41 | # score the group based on the score for each class 42 | for class_val in np.unique(y): 43 | p = (y[indexes]==class_val).sum()/size 44 | score += p*p 45 | # weight the group score by its relative size 46 | gini += (1-score) * (size / n_instances) 47 | return gini 48 | 49 | def __get_split(self,X,y): 50 | b_index, b_value, b_score, b_groups = float('inf'), float('inf'), float('inf'), None 51 | for col_ind in range(X.shape[1]): #for each features 52 | for val in np.unique(X[:,col_ind]): #for each unique value of that feature 53 | 54 | #left_index indexes lower than val for feature, right_index indexes greater that val for feature 55 | left_index = np.reshape( np.argwhere(X[:,col_ind]=val) ,(-1,)) 57 | 58 | #find gini index 59 | gini = self.__gini_index((left_index,right_index), y) 60 | 61 | if gini < b_score: 62 | b_index, b_value, b_score, b_groups = col_ind, val, gini, (left_index, right_index) 63 | 64 | return {'index':b_index, 'value':b_value, 'groups':b_groups} 65 | 66 | def __to_terminal(self,classes): 67 | # Create a terminal node value 68 | cls,cnt = np.unique(classes,return_counts=True) 69 | probs = np.zeros(self.__n_classes) 70 | for cl,cn in zip(cls,cnt): probs[int(cl)]= cn/sum(cnt) 71 | return cls[np.argmax(cnt)], probs 72 | 73 | def __split(self, node, X, y, depth): 74 | self.__depth = max(depth, self.__depth) 75 | left, right = node.pop('groups') 76 | # check for a no split 77 | if len(left)==0 or len(right)==0: 78 | node['left'] = node['right'] = self.__to_terminal(y[np.append(left,right)]) 79 | return 80 | 81 | # check for max depth 82 | if depth >= self.__max_depth: 83 | node['left'], node['right'] = self.__to_terminal(y[left]), self.__to_terminal(y[right]) 84 | return 85 | 86 | # process left child 87 | if len(left) <= self.__min_samples_split: 88 | node['left'] = self.__to_terminal(y[left]) 89 | else: 90 | node['left'] = self.__get_split(X[left],y[left]) 91 | self.__split(node['left'], X[left], y[left], depth+1) 92 | # process right child 93 | if len(right) <= self.__min_samples_split: 94 | node['right'] = self.__to_terminal(y[right]) 95 | else: 96 | node['right'] = self.__get_split(X[right],y[right]) 97 | self.__split(node['right'],X[right],y[right], depth+1) 98 | 99 | def fit(self,X,y): 100 | """ 101 | Fit X using y by optimizing splits costs using gini index 102 | 103 | Parameters 104 | ---------- 105 | X : 2D numpy array, independent variables 106 | 107 | y : 1D numpy array, dependent variable 108 | 109 | """ 110 | self.__n_classes = len(np.unique(y)) if self.__n_classes==None else self.__n_classes 111 | self.__root = self.__get_split(X,y) 112 | self.__split(self.__root, X, y, 1) 113 | 114 | def predict(self,rows): 115 | """ 116 | Predict dependent variable 117 | 118 | Parameters 119 | --------- 120 | X : numpy array, independent variables 121 | 122 | Returns 123 | ------- 124 | predicted labels 125 | 126 | """ 127 | return np.array([ self.__predict_row(row,self.__root)[0] for row in rows ]) 128 | 129 | def predict_proba(self,X): 130 | """ 131 | Predict probability of all classes 132 | 133 | Parameters 134 | ---------- 135 | X : numpy array, independent variables 136 | 137 | Returns 138 | ------- 139 | probability of each class [ n_samples, n_classes ] 140 | 141 | """ 142 | return np.array([ self.__predict_row(row,self.__root)[1] for row in X ]) 143 | 144 | def __predict_row(self,row,node): 145 | if row[node['index']] < node['value']: 146 | if isinstance(node['left'], dict): return self.__predict_row(row,node['left']) 147 | else: return node['left'] 148 | else: 149 | if isinstance(node['right'], dict): return self.__predict_row(row,node['right']) 150 | else: return node['right'] 151 | 152 | def score(self,X,y): 153 | """ 154 | Calculate accuracy from independent variables 155 | 156 | Parameters 157 | ---------- 158 | X : numpy array, independent variables 159 | 160 | y : numpy array, dependent variable 161 | 162 | Returns 163 | ------- 164 | accuracy score 165 | 166 | """ 167 | return (y==self.predict(X)).sum()/len(y) 168 | 169 | @property 170 | def depth(self): return self.__depth 171 | 172 | @property 173 | def tree_(self): return self.__root -------------------------------------------------------------------------------- /linear_model/regressors.py: -------------------------------------------------------------------------------- 1 | """" 2 | Author : Aditya Jain 3 | Contact : https://adityajain.me 4 | """ 5 | import numpy as np 6 | import matplotlib.pyplot as plt 7 | 8 | class SGDRegressor(): 9 | """ 10 | SGD regressor model, that optimizes using gradient descent 11 | 12 | Parameters 13 | ---------- 14 | lr : float, learning rate (Default 0.01) 15 | 16 | tol : float, tolerance as stopping criteria for gradient descent (Default : 0.01) 17 | 18 | seed : integer, random seed 19 | 20 | normalize : boolean, normalize X in fit method 21 | 22 | Attributes 23 | ---------- 24 | coef_ : Estimated coefficients for the linear regression problem 25 | 26 | intercept_ : integer, bias for the linear regression problem 27 | 28 | """ 29 | def __init__(self, learning_rate=0.01, tol=0.01, seed=None,normalize=False): 30 | self.W = None 31 | self.b = None 32 | self.__lr = learning_rate 33 | self.__tol = tol 34 | self.__length = None 35 | self.__normalize = normalize 36 | self.__m = None 37 | self.__costs = [] 38 | self.__iterations = [] 39 | np.random.seed(seed if seed is not None else np.random.randint(100)) 40 | 41 | def __initialize_weights_and_bais(self): 42 | self.W = np.random.randn(self.__length) #(n,1) 43 | self.b = 0 44 | 45 | def __computeCost(self,h,Y): 46 | loss = np.square(h-Y) 47 | cost = np.sum(loss)/(2*self.__m) 48 | return cost 49 | 50 | def __optimize(self,X,Y): 51 | h = np.dot(X,self.W)+self.b 52 | dW = np.dot( X.T, (h-Y) ) / self.__m 53 | db = np.sum( h-Y ) / self.__m 54 | self.W = self.W - self.__lr*dW 55 | self.b = self.b - self.__lr*db 56 | 57 | def __normalizeX(self,X): return (X-self.__mean) / (self.__std) 58 | 59 | def fit(self, X, y, verbose=False): 60 | """ 61 | Fit X using y by optimizing weights and bias 62 | 63 | Parameters 64 | ---------- 65 | X : 2D numpy array, independent variables 66 | 67 | y : 1D numpy array, dependent variable 68 | 69 | verbose : boolean, print out details while optimizing (Default : False) 70 | 71 | """ 72 | if self.__normalize: 73 | self.__mean, self.__std = X.mean(axis=0), X.std(axis=0) 74 | X = self.__normalizeX(X) 75 | self.__m,self.__length = X.shape 76 | self.__initialize_weights_and_bais() 77 | last_cost,i = float('inf'),0 78 | while True: 79 | h = np.dot(X,self.W)+self.b 80 | cost = self.__computeCost(h,y) 81 | if verbose: print(f"Iteration: {i}, Cost: {cost:.3f}") 82 | self.__optimize(X,y) 83 | if last_cost-cost < self.__tol: break 84 | else: last_cost,i = cost,i+1 85 | self.__costs.append(cost) 86 | self.__iterations.append(i) 87 | 88 | def predict(self,X): 89 | """ 90 | Predict dependent variable 91 | 92 | Parameters 93 | ---------- 94 | X : numpy array, independent variables 95 | 96 | Returns 97 | ------- 98 | predicted values 99 | 100 | """ 101 | if self.__normalize: X = self.__normalizeX(X) 102 | return np.dot(X,self.W)+self.b 103 | 104 | def plot(self,figsize=(7,5)): 105 | """ 106 | Plot a optimization graph 107 | """ 108 | plt.figure(figsize=figsize) 109 | plt.plot(self.__iterations,self.__costs) 110 | plt.xlabel('Iterations') 111 | plt.ylabel('Cost') 112 | plt.title("Iterations vs Cost") 113 | plt.show() 114 | 115 | def score(self,X,y): 116 | """ 117 | Computer Coefficient of Determination (rsquare) 118 | 119 | Parameters 120 | ---------- 121 | X : 2D numpy array, independent variables 122 | 123 | y : numpy array, dependent variables 124 | 125 | Returns 126 | ------- 127 | r2 values 128 | 129 | """ 130 | return 1-(np.sum(((y-self.predict(X))**2))/np.sum((y-np.mean(y))**2)) 131 | 132 | @property 133 | def coef_(self): return self.W 134 | 135 | @property 136 | def intercept_(self): return self.b 137 | 138 | 139 | 140 | class LinearRegression(): 141 | """ 142 | An implementation of OLS regression 143 | 144 | Parameters 145 | ---------- 146 | normalize : boolean, normalize data use standardscaler before fitting 147 | 148 | Attributes 149 | ---------- 150 | coef_ : coefficients for each of the feature 151 | 152 | intercept_ : y intercept 153 | 154 | """ 155 | def __init__(self, normalize=False): 156 | self.__bias = None 157 | self.__normalize = normalize 158 | self.__weights = None 159 | self.__std = None 160 | self.__mean = None 161 | 162 | def __normalizeX(X): 163 | return (X-self.__mean)/self.__std 164 | 165 | def fit(self,X,y): 166 | """ 167 | Fit X,y into the model 168 | 169 | Parameters 170 | ---------- 171 | X : numpy array, array of independent variables 172 | y : numpy array, dependent variable 173 | 174 | """ 175 | if self.__normalize: 176 | self.__mean, self.__std = X.mean(axis=0), X.std(axis=0) 177 | X = self.__normalizeX(X) 178 | 179 | #weights = (X'X)^-1 X'Y 180 | self.__weights = np.dot( np.linalg.inv(np.dot(X.T, X)), np.dot( X.T, y )) 181 | self.__bias = y.mean() - np.sum(b * X.mean(axis=0)) 182 | 183 | def predict(self,X): 184 | """ 185 | Predict dependent variable 186 | 187 | Parameters 188 | ---------- 189 | X : numpy array, independent variables 190 | 191 | Returns 192 | ------- 193 | predicted values 194 | 195 | """ 196 | if self.__normalize: 197 | X = self.__normalizeX(X) 198 | return np.dot(X, self.__weights )+ self.__bias 199 | 200 | def score(self,X,y): 201 | """ 202 | Computer Coefficient of Determination (rsquare) 203 | 204 | Parameters 205 | ---------- 206 | X : 2D numpy array, independent variables 207 | 208 | y : numpy array, dependent variables 209 | 210 | Returns 211 | ------- 212 | r2 values 213 | 214 | """ 215 | return 1-(np.sum(((y-self.predict(X))**2))/np.sum((y-np.mean(y))**2)) 216 | 217 | 218 | @property 219 | def coef_(self): return self.__weights 220 | 221 | @property 222 | def intercept_(self): return self.__bias -------------------------------------------------------------------------------- /cluster/hierarchical.py: -------------------------------------------------------------------------------- 1 | """ 2 | Author : Aditya Jain 3 | Contact : https://adityajain.me 4 | """ 5 | 6 | 7 | import numpy as np 8 | class AgglomerativeClustering(): 9 | """ 10 | Agglomerative Clustering 11 | 12 | Recursively merges the pair of clusters that minimally increases a given linkage distance. 13 | 14 | Parameters 15 | ---------- 16 | n_clusters : int, default=2, 17 | The number of clusters to find. 18 | 19 | affinity : string or callable, default: "euclidean", 20 | Metric used to compute the linkage. Currently "euclidean" is available. 21 | 22 | linkage : {"ward", "complete", "average"}, optional, default: "ward" 23 | Which linkage criterion to use. The linkage criterion determines which 24 | distance to use between sets of observation. The algorithm will merge 25 | the pairs of cluster that minimize this criterion. 26 | 27 | Attributes 28 | ---------- 29 | labels_ : array [n_samples] 30 | cluster labels for each point 31 | 32 | """ 33 | def __init__(self, n_clusters=2, affinity='euclidean', linkage='ward'): 34 | self.__n_clusters = n_clusters 35 | self.__proximity = [] 36 | self.__affinity = { 'euclidean': self.__euclidean }[affinity] 37 | self.__linkage = { 'complete': self.__max_distance, 'average': self.__average_distance, 38 | 'ward':self.__ward_distance}[linkage] 39 | self.__clusters = None 40 | self.__labels_ = None 41 | 42 | def __euclidean(self, X1, X2 ): return np.sqrt( np.sum( (X1-X2)**2, axis=1 ) ) 43 | 44 | def __max_distance(self,distances, c1 ,c2): return max(distances) 45 | def __average_distance(self,distances, c1, c2): return np.sum(distances)/(c1*c2) 46 | def __ward_distance(self,distances, c1, c2 ): return np.sum( np.square(distances) )/(c1*c2) 47 | 48 | def __distances(self, g1, g2 ): 49 | all_distances = np.array([]) 50 | for row in g1: 51 | all_distances = np.append( all_distances, self.__affinity(g2,row) ) 52 | return all_distances 53 | 54 | def __computeProximityMatrix(self,n_clusters): 55 | self.__proximity = np.ones( (n_clusters,n_clusters) )*float('inf') 56 | for i in range(n_clusters-1): 57 | for j in range(i+1,n_clusters): 58 | g1, g2 = self.__clusters[i], self.__clusters[j] 59 | proximity = self.__linkage( self.__distances(g1,g2), len(g1), len(g2) ) 60 | self.__proximity[i][j] = self.__proximity[j][i] = proximity 61 | 62 | def __findMinIndex(self): 63 | minimum, index = float('inf'), (None,None) 64 | size = len(self.__proximity) 65 | for i in range(size): 66 | j = np.argmin( self.__proximity[i] ) 67 | if minimum > self.__proximity[i][j]: 68 | minimum = self.__proximity[i][j] 69 | index = (i,j) 70 | return index 71 | 72 | def __updateProximityMatrixAndClusters(self,i,j,n): 73 | i,j = min(i,j),max(i,j) 74 | new_cluster = np.append( self.__clusters[i], self.__clusters[j], axis=0) 75 | self.__proximity = np.delete(self.__proximity,[i,j],axis=0) 76 | self.__proximity = np.delete(self.__proximity,[i,j],axis=1) 77 | self.__clusters = self.__clusters[:i]+self.__clusters[i+1: j]+self.__clusters[j+1:]+[new_cluster] 78 | infs = np.ones( (n-2,) )*float('inf') 79 | self.__proximity = np.c_[ self.__proximity, infs ] 80 | infs = np.ones( (1,n-1) )*float('inf') 81 | self.__proximity = np.append(self.__proximity, infs, axis=0) 82 | for i in range(n-2): 83 | g = self.__clusters[i] 84 | proximity = self.__linkage( self.__distances( new_cluster, g ), len(new_cluster), len(g) ) 85 | self.__proximity[i][n-2] = self.__proximity[n-2][i] = proximity 86 | 87 | def fit(self,X): 88 | """ 89 | Fit the hierarchical clustering on the data 90 | 91 | Parameters 92 | ---------- 93 | X : array-like, observations 94 | 95 | """ 96 | self.__clusters = list( np.expand_dims( X_train, axis=1 ) ) 97 | n_clusters = len(self.__clusters) 98 | self.__computeProximityMatrix(n_clusters) 99 | while n_clusters != self.__n_clusters: 100 | i,j = self.__findMinIndex() 101 | self.__updateProximityMatrixAndClusters(i,j,n_clusters) 102 | n_clusters = len(self.__clusters) 103 | self.__labels_ = self.predict(X) 104 | 105 | def predict(self,X): 106 | """ 107 | Predict cluster labels for observations 108 | 109 | Parameters 110 | ---------- 111 | X : array-like, observations 112 | """ 113 | distances = self.__predict_distances(X) 114 | return np.argmin(distances, axis=1) 115 | 116 | def __predict_distances(self,X): 117 | X = np.expand_dims( X, axis=1 ) 118 | distances = [] 119 | for sample in X: 120 | sample_dist = [] 121 | for i in range(self.__n_clusters): 122 | proximity = self.__linkage( self.__distances( sample, self.__clusters[i] ), 123 | 1, len(self.__clusters[i]) ) 124 | sample_dist.append(proximity) 125 | distances.append(sample_dist) 126 | return np.array(distances) 127 | 128 | def fit_predict(self,X): 129 | """ 130 | Fit the hierarchical clustering on the data and predict cluster labels 131 | 132 | Parameters 133 | ---------- 134 | X : array-like, observations 135 | 136 | """ 137 | self.fit(X) 138 | return self.__labels_ 139 | 140 | @property 141 | def label_(self): return self.__labels_ 142 | 143 | 144 | 145 | 146 | class MeanShift(): 147 | """ 148 | Mean shift clustering is a centroid based heirarchical clustering algorithm. 149 | 150 | It is a centroid-based algorithm, which works by updating 151 | candidates for centroids to be the mean of the points within a given 152 | region. These candidates are then filtered in a post-processing stage to 153 | eliminate near-duplicates to form the final set of centroids. 154 | 155 | Parameters 156 | ---------- 157 | bandwidth : float, integer 158 | if None calculate by a heuristic defined by Aditya Jain 159 | 160 | Attributes 161 | ---------- 162 | labels_ : cluster labels for feature set 163 | 164 | cluster_centers_ : cluster centers that being used 165 | 166 | n_clusters_ : number of cluster found 167 | """ 168 | 169 | def __init__(self, bandwidth=None): 170 | self.__bandwidth = bandwidth 171 | self.__centroids = None 172 | self.__labels = None 173 | 174 | def __euclidean(self,X1,X2): return np.sqrt(np.sum((X1-X2)**2, axis=1)) 175 | 176 | def __get_new_centroids(self,X,centroids): 177 | new_centroids = set() 178 | for centroid in centroids: 179 | new_centroids.add( tuple(X[self.__euclidean(X,centroid) 2 | 3 |

🧠 MLfromScratch

4 | 5 | ![Python](https://img.shields.io/badge/Python-3.8+-blue?style=flat-square&logo=python&logoColor=white) 6 | ![NumPy](https://img.shields.io/badge/NumPy-Library-green?style=flat-square&logo=numpy&logoColor=white) 7 | ![License](https://img.shields.io/github/license/adityajn105/MLfromScratch?style=flat-square) 8 | ![GitHub contributors](https://img.shields.io/github/contributors/adityajn105/MLfromScratch?style=flat-square) 9 | 10 | 11 | 12 | **MLfromScratch** is a library designed to help you learn and understand machine learning algorithms by building them from scratch using only `NumPy`! No black-box libraries, no hidden magic—just pure Python and math. It's perfect for beginners who want to see what's happening behind the scenes of popular machine learning models. 13 | 14 | 🔗 **[Explore the Documentation](https://github.com/adityajn105/MLfromScratch/wiki)** 15 | 16 | --- 17 | 18 | ## 📦 Package Structure 19 | 20 | Our package structure is designed to look like `scikit-learn`, so if you're familiar with that, you'll feel right at home! 21 | 22 | ### 🔧 Modules and Algorithms (Explained for Beginners)

23 | #### 📈 **1. Linear Models (`linear_model`)** 24 | 25 | - **LinearRegression** ![Linear Regression](https://img.shields.io/badge/Linear%20Regression-blue?style=flat-square&logo=mathworks): Imagine drawing a straight line through a set of points to predict future values. Linear Regression helps in predicting something like house prices based on size. 26 | 27 | - **SGDRegressor** ![SGD](https://img.shields.io/badge/SGD-Fast-blue?style=flat-square&logo=rocket): A fast way to do Linear Regression using Stochastic Gradient Descent. Perfect for large datasets. 28 | 29 | - **SGDClassifier** ![Classifier](https://img.shields.io/badge/SGD-Classifier-yellow?style=flat-square&logo=target): A classification algorithm predicting categories like "spam" or "not spam."

30 | 31 | 32 | #### 🌳 **2. Decision Trees (`tree`)** 33 | 34 | - **DecisionTreeClassifier** ![Tree](https://img.shields.io/badge/Tree-Classifier-brightgreen?style=flat-square&logo=leaf): Think of this as playing 20 questions to guess something. A decision tree asks yes/no questions to classify data. 35 | 36 | - **DecisionTreeRegressor** ![Regressor](https://img.shields.io/badge/Tree-Regressor-yellowgreen?style=flat-square&logo=mathworks): Predicts a continuous number (like temperature tomorrow) based on input features.

37 | 38 | 39 | #### 👥 **3. K-Nearest Neighbors (`neighbors`)** 40 | 41 | - **KNeighborsClassifier** ![KNN](https://img.shields.io/badge/KNN-Classifier-9cf?style=flat-square&logo=people-arrows): Classifies data by looking at the 'k' nearest neighbors to the new point. 42 | 43 | - **KNeighborsRegressor** ![KNN](https://img.shields.io/badge/KNN-Regressor-lightblue?style=flat-square&logo=chart-bar): Instead of classifying, it predicts a number based on nearby data points.

44 | 45 | 46 | #### 🧮 **4. Naive Bayes (`naive_bayes`)** 47 | 48 | - **GaussianNB** ![Gaussian](https://img.shields.io/badge/GaussianNB-fast-brightgreen?style=flat-square&logo=matrix): Works great for data that follows a normal distribution (bell-shaped curve). 49 | 50 | - **MultinomialNB** ![Multinomial](https://img.shields.io/badge/MultinomialNB-text-ff69b4?style=flat-square&logo=alphabetical-order): Ideal for text classification tasks like spam detection.

51 | 52 | 53 | #### 📊 **5. Clustering (`cluster`)** 54 | 55 | - **KMeans** ![KMeans](https://img.shields.io/badge/KMeans-Clustering-ff69b4?style=flat-square&logo=group): Groups data into 'k' clusters based on similarity. 56 | 57 | - **AgglomerativeClustering** ![Agglomerative](https://img.shields.io/badge/Agglomerative-Hierarchical-blueviolet?style=flat-square&logo=chart-bar): Clusters by merging similar points until a single large cluster is formed. 58 | 59 | - **DBSCAN** ![DBSCAN](https://img.shields.io/badge/DBSCAN-Noise%20Filtering-blue?style=flat-square&logo=waves): Groups points close to each other and filters out noise. No need to specify the number of clusters! 60 | 61 | - **MeanShift** ![MeanShift](https://img.shields.io/badge/MeanShift-Clustering-yellowgreen?style=flat-square&logo=sort-amount-up): Shifts data points toward areas of high density to find clusters.

62 | 63 | 64 | #### 🌲 **6. Ensemble Methods (`ensemble`)** 65 | 66 | - **RandomForestClassifier** ![RandomForest](https://img.shields.io/badge/Random%20Forest-Classifier-brightgreen?style=flat-square&logo=forest): Combines multiple decision trees to make stronger decisions. 67 | 68 | - **RandomForestRegressor** ![RandomForest](https://img.shields.io/badge/Random%20Forest-Regressor-lightblue?style=flat-square&logo=tree): Predicts continuous values using an ensemble of decision trees. 69 | 70 | - **GradientBoostingClassifier** ![GradientBoosting](https://img.shields.io/badge/Gradient%20Boosting-Classifier-9cf?style=flat-square&logo=chart-line): Builds trees sequentially, each correcting errors made by the last. 71 | 72 | - **VotingClassifier** ![Voting](https://img.shields.io/badge/Voting-Classifier-orange?style=flat-square&logo=thumbs-up): Combines the results of multiple models to make a final prediction.

73 | 74 | 75 | #### 📐 **7. Metrics (`metrics`)** 76 | 77 | Measure your model’s performance: 78 | 79 | - **accuracy_score** ![Accuracy](https://img.shields.io/badge/Accuracy-High-brightgreen?style=flat-square&logo=bar-chart): Measures how many predictions your model got right. 80 | 81 | - **f1_score** ![F1 Score](https://img.shields.io/badge/F1_Score-Balance-lightgreen?style=flat-square&logo=scales): Balances precision and recall into a single score. 82 | 83 | - **roc_curve** ![ROC](https://img.shields.io/badge/ROC-Curve-orange?style=flat-square&logo=wave): Shows the trade-off between true positives and false positives.

84 | 85 | 86 | #### ⚙️ **8. Model Selection (`model_selection`)** 87 | 88 | - **train_test_split** ![TrainTestSplit](https://img.shields.io/badge/Train_Test_Split-blueviolet?style=flat-square&logo=arrows): Splits your data into training and test sets. 89 | 90 | - **KFold** ![KFold](https://img.shields.io/badge/KFold-CrossValidation-blue?style=flat-square&logo=matrix): Trains the model in 'k' iterations for better validation.

91 | 92 | 93 | #### 🔍 **9. Preprocessing (`preprocessing`)** 94 | 95 | - **StandardScaler** ![StandardScaler](https://img.shields.io/badge/StandardScaler-Normalization-ff69b4?style=flat-square&logo=arrows-v): Standardizes your data so it has a mean of 0 and a standard deviation of 1. 96 | 97 | - **LabelEncoder** ![LabelEncoder](https://img.shields.io/badge/LabelEncoder-Classification-yellow?style=flat-square&logo=code): Converts text labels into numerical labels (e.g., "cat", "dog").

98 | 99 | 100 | #### 🧩 **10. Dimensionality Reduction (`decomposition`)** 101 | 102 | Dimensionality Reduction helps in simplifying data while retaining most of its valuable information. By reducing the number of features (dimensions) in a dataset, it makes data easier to visualize and speeds up machine learning algorithms. 103 | 104 | - **PCA (Principal Component Analysis)** ![PCA](https://img.shields.io/badge/PCA-PrincipalComponentAnalysis-orange?style=flat-square&logo=chart-line): PCA reduces the number of dimensions by finding new uncorrelated variables called principal components. It projects your data onto a lower-dimensional space while retaining as much variance as possible.

105 | - **How It Works**: PCA finds the axes (principal components) that maximize the variance in your data. The first principal component captures the most variance, and each subsequent component captures progressively less. 106 | - **Use Case**: Use PCA when you have many features, and you want to simplify your dataset for better visualization or faster computation. It is particularly useful when features are highly correlated. 107 | 108 | --- 109 | 110 | ## 🎯 Why Use This Library? 111 | 112 | - **Learning-First Approach**: If you're a beginner and want to *understand* machine learning, this is the library for you. No hidden complexity, just code. 113 | - **No Hidden Magic**: Everything is written from scratch, so you can see exactly how each algorithm works. 114 | - **Lightweight**: Uses only `NumPy`, making it fast and easy to run.

115 | 116 | ## 🚀 Getting Started 117 | 118 | ```bash 119 | # Clone the repository 120 | git clone https://github.com/adityajn105/MLfromScratch.git 121 | 122 | # Navigate to the project directory 123 | cd MLfromScratch 124 | 125 | # Install the required dependencies 126 | pip install -r requirements.txt 127 | ``` 128 |

129 | 130 | ## 👨‍💻 Author 131 | This project is maintained by [Aditya Jain](https://adityajain.me/)

132 | 133 | ## 🧑‍💻 Contributors 134 | Constributor: [Subrahmanya Gaonkar](https://github.com/negativenagesh) 135 | 136 | We welcome contributions from everyone, especially beginners! If you're new to open-source, don’t worry—feel free to ask questions, open issues, or submit a pull request.

137 | 138 | ## 🤝 How to Contribute 139 | 1. Fork the repository. 140 | 2. Create a new branch (git checkout -b feature-branch). 141 | 3. Make your changes and commit (git commit -m "Added new feature"). 142 | 4. Push the changes (git push origin feature-branch). 143 | 5. Submit a pull request and explain your changes.

144 | 145 | ## 📄 License 146 | This project is licensed under the [MIT License](https://github.com/adityajn105/MLfromScratch/blob/master/LICENSE) - see the LICENSE file for details. -------------------------------------------------------------------------------- /ensemble/boosting.py: -------------------------------------------------------------------------------- 1 | """ 2 | Author : Aditya Jain 3 | Contact : https://adityajain.me 4 | """ 5 | 6 | import numpy as np 7 | from ..tree import DecisionTreeRegressor 8 | 9 | class GradientBoostingRegressor(): 10 | """ 11 | Gradient Boosting for regression. 12 | 13 | GB builds an additive model in a forward stage-wise fashion; 14 | it allows for the optimization of arbitrary differentiable loss functions. 15 | In each stage a regression tree is fit on the negative gradient of the 16 | given loss function. 17 | 18 | Parameters 19 | ---------- 20 | 21 | loss : 'str', default 'ls' 22 | Loss to be optimized. 'ls' refers to least squares 23 | regression. 24 | Currently only least squares regression is available 25 | 26 | learning_rate : float, default 0.1 27 | learning rate shrinks the contribution of each tree by `learning_rate` 28 | 29 | n_estimators : int, default 100 30 | Number of base estimators to train 31 | 32 | criterion : str, Default 'mse', ('mse', 'mae', 'std') 33 | The function to measure the quality of a split. 34 | 'mse' is mean squared error 35 | 'mae' is mean absolute error 36 | 'std' is standard deviation 37 | 38 | max_depth : int, default : None i.e. 'inf' 39 | The maximum depth allowed for each base regressor 40 | 41 | min_samples_split : int, default 2 42 | The minimum number of samples required to split an internal node 43 | 44 | max_features : int, float, string or None, (default None) 45 | The number of features to consider when looking for the best split 46 | - If int, then consider `max_features` features at each split. 47 | - If float, then `max_features` is a percentage of n_features 48 | - If "auto", then `max_features=n_features`. 49 | - If "sqrt", then `max_features=sqrt(n_features)`. 50 | - If "log2", then `max_features=log2(n_features)`. 51 | - If None, then `max_features=n_features`. 52 | 53 | verbose : boolean, default False 54 | Enable verbose output, print loss once in a while. 55 | 56 | """ 57 | def __init__(self, loss='ls', learning_rate = 0.1, n_estimators=100, criterion='mse', 58 | max_depth=None, min_samples_split=2, max_features=None, verbose=False): 59 | self.__lr = learning_rate 60 | self.__n_estimators = n_estimators 61 | self.__criterion = criterion 62 | self.__max_depth = max_depth 63 | self.__min_samples_split = min_samples_split 64 | self.__max_features = None 65 | if isinstance(max_features,str): 66 | self.__max_features = { 67 | 'auto': lambda x: int(np.sqrt(x)), 'sqrt': lambda x: int(np.sqrt(x)), 68 | 'log2': lambda x: int(np.log2(x)), 'max_features': lambda x: x }[max_features] 69 | elif isinstance(max_features, int): 70 | self.__max_features = lambda x: max_features 71 | elif isinstance(max_features, float): 72 | self.__max_features = lambda x: int(max_features*x) 73 | else: 74 | self.__max_features = lambda x: x 75 | 76 | self.__n_features = None 77 | self.__trees = [] 78 | self.__verbose = verbose 79 | self.__f0 = None 80 | 81 | def __mse(self,y_pred,y_true): 82 | return np.sqrt( np.mean( (y_true-y_pred)**2 ) ) 83 | 84 | def __negative_least_squares_gradient(self,y_pred,y_true): 85 | grad = -(y_true - y_pred) 86 | return -1 * grad 87 | def __get_feature_index(self): 88 | return np.random.choice( np.arange(0,self.__n_features,1), 89 | size=self.__max_features(self.__n_features), replace=False) 90 | 91 | def fit(self, X, y): 92 | """ 93 | Fit decision trees to build GB model in additive fashion 94 | 95 | Parameters 96 | ---------- 97 | X : numpy array, feature observations 98 | 99 | y : numpy array, feature labels 100 | """ 101 | self.__n_features = X.shape[1] 102 | y_ = self.__f0 = y.mean() 103 | for i in range(0,self.__n_estimators): 104 | dt = DecisionTreeRegressor(criterion=self.__criterion, 105 | max_depth=self.__max_depth, 106 | min_samples_split=self.__min_samples_split) 107 | feature_index = self.__get_feature_index() 108 | h = self.__negative_least_squares_gradient(y_,y) 109 | dt.fit(X[:,feature_index], h) 110 | self.__trees.append( (dt.tree_,feature_index) ) 111 | y_ = self.predict(X) 112 | if self.__verbose and i%5==0: 113 | print( f"MSE after trees {i+1} : {self.__mse(y_,y)}" ) 114 | 115 | def predict(self, X): 116 | """ 117 | Predict labels for observations using GB model 118 | 119 | Parameters 120 | ---------- 121 | X : numpy array, features 122 | 123 | Returns 124 | ------- 125 | y : numpy array, labels 126 | 127 | """ 128 | predictions = np.ones( len(X) ) * self.__f0 129 | for i in range(1,len(self.__trees)+1): 130 | root, features = self.__trees[i-1] 131 | predictions += self.__lr * np.array([ self.__predict_row(row,root) for row in X[:,features] ]) 132 | return predictions 133 | 134 | def __predict_row(self,row,node): 135 | if row[node['index']] < node['value']: 136 | if isinstance(node['left'], dict): return self.__predict_row(row,node['left']) 137 | else: return node['left'] 138 | else: 139 | if isinstance(node['right'], dict): return self.__predict_row(row,node['right']) 140 | else: return node['right'] 141 | 142 | def score(self,X,y): 143 | """ 144 | Compute Coefficient of Determinance, r2 score 145 | 146 | r2 score = ESS/TSS = 1 - RSS/TSS = 1 - (y_true - y_pred)^2 / (y_true - y_true.mean()) 147 | 148 | Parameters 149 | ---------- 150 | X : numpy array, features 151 | 152 | y : numpy array, labels 153 | 154 | Returns 155 | ------- 156 | r2_score 157 | """ 158 | y_pred = self.predict(X) 159 | return 1 - np.sum(np.square(y-y_pred))/np.sum(np.square(y-y.mean())) 160 | 161 | 162 | 163 | class GradientBoostingClassifier(): 164 | """ 165 | Gradient Boosting for regression. 166 | 167 | GB builds an additive model in a forward stage-wise fashion; 168 | it allows for the optimization of arbitrary differentiable loss functions. 169 | In each stage n_classes_ regression trees are fit on the negative gradient of the 170 | binomial or multinomial deviance loss function. 171 | 172 | Parameters 173 | ---------- 174 | 175 | loss : 'str', default 'deviance' 176 | Loss to be optimized. 'deviance' refers to cross entropy 177 | regression. 178 | Currently only binomial deviance is available 179 | 180 | learning_rate : float, default 0.1 181 | learning rate shrinks the contribution of each tree by `learning_rate` 182 | 183 | n_estimators : int, default 100 184 | Number of base estimators to train 185 | 186 | criterion : str, Default 'mse', ('mse', 'mae', 'std') 187 | The function to measure the quality of a split. 188 | 'mse' is mean squared error 189 | 'mae' is mean absolute error 190 | 'std' is standard deviation 191 | 192 | max_depth : int, default : None i.e. 'inf' 193 | The maximum depth allowed for each base regressor 194 | 195 | min_samples_split : int, default 2 196 | The minimum number of samples required to split an internal node 197 | 198 | max_features : int, float, string or None, (default None) 199 | The number of features to consider when looking for the best split 200 | - If int, then consider `max_features` features at each split. 201 | - If float, then `max_features` is a percentage of n_features 202 | - If "auto", then `max_features=n_features`. 203 | - If "sqrt", then `max_features=sqrt(n_features)`. 204 | - If "log2", then `max_features=log2(n_features)`. 205 | - If None, then `max_features=n_features`. 206 | 207 | verbose : boolean, default False 208 | Enable verbose output, print loss once in a while. 209 | 210 | """ 211 | def __init__(self, loss='deviance', learning_rate = 0.1, n_estimators=100, criterion='mse', 212 | max_depth=None, min_samples_split=2, max_features=None, verbose=False): 213 | self.__lr = learning_rate 214 | self.__n_estimators = n_estimators 215 | self.__criterion = criterion 216 | self.__max_depth = max_depth 217 | self.__min_samples_split = min_samples_split 218 | self.__max_features = None 219 | if isinstance(max_features,str): 220 | self.__max_features = { 221 | 'auto': lambda x: int(np.sqrt(x)), 'sqrt': lambda x: int(np.sqrt(x)), 222 | 'log2': lambda x: int(np.log2(x)), 'max_features': lambda x: x }[max_features] 223 | elif isinstance(max_features, int): 224 | self.__max_features = lambda x: max_features 225 | elif isinstance(max_features, float): 226 | self.__max_features = lambda x: int(max_features*x) 227 | else: 228 | self.__max_features = lambda x: x 229 | 230 | self.__n_features = None 231 | self.__trees = [] 232 | self.__verbose = verbose 233 | self.__f0 = None 234 | 235 | def __binomial_deviance(self,p_pred,y_true): 236 | return np.sum(-y_true*np.log(p_pred) - (1-y_true)*np.log(1-p_pred)) 237 | 238 | def __negative_binomial_deviance_gradient(self,p_pred,y_true): 239 | grad = -1 * (y_true - p_pred) 240 | return -1 * grad 241 | 242 | def __get_feature_index(self): 243 | return np.random.choice( np.arange(0,self.__n_features,1), 244 | size=self.__max_features(self.__n_features), replace=False) 245 | 246 | def fit(self, X, y): 247 | """ 248 | Fit the X and y to estimators 249 | 250 | Parameters 251 | ---------- 252 | X : numpy array, independent variables 253 | 254 | y : numpy array, target variable 255 | 256 | """ 257 | self.__n_features = X.shape[1] 258 | p = self.__f0 = max( (y==1).sum(), (y==0).sum()) / len(y) 259 | if self.__verbose: 260 | print( f"Binomial Deviance Loss, Accuracy after trees {0} : {self.__binomial_deviance(p,y)}, {self.score(X,y)}" ) 261 | for i in range(0,self.__n_estimators): 262 | dt = DecisionTreeRegressor(criterion=self.__criterion, 263 | max_depth=self.__max_depth, 264 | min_samples_split=self.__min_samples_split) 265 | feature_index = self.__get_feature_index() 266 | h = self.__negative_binomial_deviance_gradient(p,y) 267 | dt.fit(X[:,feature_index], h) 268 | self.__trees.append( (dt.tree_,feature_index) ) 269 | p = self.predict_proba(X)[:,1] 270 | if self.__verbose and (i+1)%5==0: 271 | print( f"Binomial Deviance Loss, Accuracy after trees {i+1} : {self.__binomial_deviance(p,y)}, {self.score(X,y)}" ) 272 | 273 | def predict_proba(self,X): 274 | """ 275 | Predict probaibilty of each class using all estimators 276 | 277 | Parameters 278 | ---------- 279 | X : numpy array, independent variablesss 280 | 281 | Returns 282 | ------- 283 | probability of each class [ n_samples, n_classes ] 284 | 285 | """ 286 | predictions = np.ones( len(X) ) * self.__f0 287 | for i in range(1,len(self.__trees)+1): 288 | root, features = self.__trees[i-1] 289 | predictions += self.__lr * np.array([ self.__predict_row(row,root) for row in X[:,features] ]) 290 | proba = np.zeros( (len(X),2) ) 291 | proba[:,0] = (1-predictions) 292 | proba[:,1] = predictions 293 | return proba 294 | 295 | 296 | def predict(self, X): 297 | """ 298 | Predict labels using all estimators 299 | 300 | Parameters 301 | ---------- 302 | X : numpy array, independent variables 303 | 304 | Returns 305 | ------- 306 | predicted labels 307 | 308 | """ 309 | proba = self.predict_proba(X) 310 | return (proba[:,1]>0.5)*1 311 | 312 | def __predict_row(self,row,node): 313 | if row[node['index']] < node['value']: 314 | if isinstance(node['left'], dict): return self.__predict_row(row,node['left']) 315 | else: return node['left'] 316 | else: 317 | if isinstance(node['right'], dict): return self.__predict_row(row,node['right']) 318 | else: return node['right'] 319 | 320 | def score(self,X,y): 321 | """ 322 | Calculate accuracy from independent variables 323 | 324 | Parameters 325 | ---------- 326 | X : numpy array, independent variables 327 | 328 | y : numpy array, dependent variable 329 | 330 | Returns 331 | ------- 332 | accuracy score 333 | 334 | """ 335 | y_pred = self.predict(X) 336 | return (y_pred==y).sum()/len(y) -------------------------------------------------------------------------------- /test/data_regression.csv: -------------------------------------------------------------------------------- 1 | "feature1","feature2","feature3","feature4","feature5","feature6","feature7","feature8","feature9","feature10","feature11","feature12","feature13","Y" 2 | 0.00632,18,2.31,"0",0.538,6.575,65.2,4.09,1,296,15.3,396.9,4.98,24 3 | 0.02731,0,7.07,"0",0.469,6.421,78.9,4.9671,2,242,17.8,396.9,9.14,21.6 4 | 0.02729,0,7.07,"0",0.469,7.185,61.1,4.9671,2,242,17.8,392.83,4.03,34.7 5 | 0.03237,0,2.18,"0",0.458,6.998,45.8,6.0622,3,222,18.7,394.63,2.94,33.4 6 | 0.06905,0,2.18,"0",0.458,7.147,54.2,6.0622,3,222,18.7,396.9,5.33,36.2 7 | 0.02985,0,2.18,"0",0.458,6.43,58.7,6.0622,3,222,18.7,394.12,5.21,28.7 8 | 0.08829,12.5,7.87,"0",0.524,6.012,66.6,5.5605,5,311,15.2,395.6,12.43,22.9 9 | 0.14455,12.5,7.87,"0",0.524,6.172,96.1,5.9505,5,311,15.2,396.9,19.15,27.1 10 | 0.21124,12.5,7.87,"0",0.524,5.631,100,6.0821,5,311,15.2,386.63,29.93,16.5 11 | 0.17004,12.5,7.87,"0",0.524,6.004,85.9,6.5921,5,311,15.2,386.71,17.1,18.9 12 | 0.22489,12.5,7.87,"0",0.524,6.377,94.3,6.3467,5,311,15.2,392.52,20.45,15 13 | 0.11747,12.5,7.87,"0",0.524,6.009,82.9,6.2267,5,311,15.2,396.9,13.27,18.9 14 | 0.09378,12.5,7.87,"0",0.524,5.889,39,5.4509,5,311,15.2,390.5,15.71,21.7 15 | 0.62976,0,8.14,"0",0.538,5.949,61.8,4.7075,4,307,21,396.9,8.26,20.4 16 | 0.63796,0,8.14,"0",0.538,6.096,84.5,4.4619,4,307,21,380.02,10.26,18.2 17 | 0.62739,0,8.14,"0",0.538,5.834,56.5,4.4986,4,307,21,395.62,8.47,19.9 18 | 1.05393,0,8.14,"0",0.538,5.935,29.3,4.4986,4,307,21,386.85,6.58,23.1 19 | 0.7842,0,8.14,"0",0.538,5.99,81.7,4.2579,4,307,21,386.75,14.67,17.5 20 | 0.80271,0,8.14,"0",0.538,5.456,36.6,3.7965,4,307,21,288.99,11.69,20.2 21 | 0.7258,0,8.14,"0",0.538,5.727,69.5,3.7965,4,307,21,390.95,11.28,18.2 22 | 1.25179,0,8.14,"0",0.538,5.57,98.1,3.7979,4,307,21,376.57,21.02,13.6 23 | 0.85204,0,8.14,"0",0.538,5.965,89.2,4.0123,4,307,21,392.53,13.83,19.6 24 | 1.23247,0,8.14,"0",0.538,6.142,91.7,3.9769,4,307,21,396.9,18.72,15.2 25 | 0.98843,0,8.14,"0",0.538,5.813,100,4.0952,4,307,21,394.54,19.88,14.5 26 | 0.75026,0,8.14,"0",0.538,5.924,94.1,4.3996,4,307,21,394.33,16.3,15.6 27 | 0.84054,0,8.14,"0",0.538,5.599,85.7,4.4546,4,307,21,303.42,16.51,13.9 28 | 0.67191,0,8.14,"0",0.538,5.813,90.3,4.682,4,307,21,376.88,14.81,16.6 29 | 0.95577,0,8.14,"0",0.538,6.047,88.8,4.4534,4,307,21,306.38,17.28,14.8 30 | 0.77299,0,8.14,"0",0.538,6.495,94.4,4.4547,4,307,21,387.94,12.8,18.4 31 | 1.00245,0,8.14,"0",0.538,6.674,87.3,4.239,4,307,21,380.23,11.98,21 32 | 1.13081,0,8.14,"0",0.538,5.713,94.1,4.233,4,307,21,360.17,22.6,12.7 33 | 1.35472,0,8.14,"0",0.538,6.072,100,4.175,4,307,21,376.73,13.04,14.5 34 | 1.38799,0,8.14,"0",0.538,5.95,82,3.99,4,307,21,232.6,27.71,13.2 35 | 1.15172,0,8.14,"0",0.538,5.701,95,3.7872,4,307,21,358.77,18.35,13.1 36 | 1.61282,0,8.14,"0",0.538,6.096,96.9,3.7598,4,307,21,248.31,20.34,13.5 37 | 0.06417,0,5.96,"0",0.499,5.933,68.2,3.3603,5,279,19.2,396.9,9.68,18.9 38 | 0.09744,0,5.96,"0",0.499,5.841,61.4,3.3779,5,279,19.2,377.56,11.41,20 39 | 0.08014,0,5.96,"0",0.499,5.85,41.5,3.9342,5,279,19.2,396.9,8.77,21 40 | 0.17505,0,5.96,"0",0.499,5.966,30.2,3.8473,5,279,19.2,393.43,10.13,24.7 41 | 0.02763,75,2.95,"0",0.428,6.595,21.8,5.4011,3,252,18.3,395.63,4.32,30.8 42 | 0.03359,75,2.95,"0",0.428,7.024,15.8,5.4011,3,252,18.3,395.62,1.98,34.9 43 | 0.12744,0,6.91,"0",0.448,6.77,2.9,5.7209,3,233,17.9,385.41,4.84,26.6 44 | 0.1415,0,6.91,"0",0.448,6.169,6.6,5.7209,3,233,17.9,383.37,5.81,25.3 45 | 0.15936,0,6.91,"0",0.448,6.211,6.5,5.7209,3,233,17.9,394.46,7.44,24.7 46 | 0.12269,0,6.91,"0",0.448,6.069,40,5.7209,3,233,17.9,389.39,9.55,21.2 47 | 0.17142,0,6.91,"0",0.448,5.682,33.8,5.1004,3,233,17.9,396.9,10.21,19.3 48 | 0.18836,0,6.91,"0",0.448,5.786,33.3,5.1004,3,233,17.9,396.9,14.15,20 49 | 0.22927,0,6.91,"0",0.448,6.03,85.5,5.6894,3,233,17.9,392.74,18.8,16.6 50 | 0.25387,0,6.91,"0",0.448,5.399,95.3,5.87,3,233,17.9,396.9,30.81,14.4 51 | 0.21977,0,6.91,"0",0.448,5.602,62,6.0877,3,233,17.9,396.9,16.2,19.4 52 | 0.08873,21,5.64,"0",0.439,5.963,45.7,6.8147,4,243,16.8,395.56,13.45,19.7 53 | 0.04337,21,5.64,"0",0.439,6.115,63,6.8147,4,243,16.8,393.97,9.43,20.5 54 | 0.0536,21,5.64,"0",0.439,6.511,21.1,6.8147,4,243,16.8,396.9,5.28,25 55 | 0.04981,21,5.64,"0",0.439,5.998,21.4,6.8147,4,243,16.8,396.9,8.43,23.4 56 | 0.0136,75,4,"0",0.41,5.888,47.6,7.3197,3,469,21.1,396.9,14.8,18.9 57 | 0.01311,90,1.22,"0",0.403,7.249,21.9,8.6966,5,226,17.9,395.93,4.81,35.4 58 | 0.02055,85,0.74,"0",0.41,6.383,35.7,9.1876,2,313,17.3,396.9,5.77,24.7 59 | 0.01432,100,1.32,"0",0.411,6.816,40.5,8.3248,5,256,15.1,392.9,3.95,31.6 60 | 0.15445,25,5.13,"0",0.453,6.145,29.2,7.8148,8,284,19.7,390.68,6.86,23.3 61 | 0.10328,25,5.13,"0",0.453,5.927,47.2,6.932,8,284,19.7,396.9,9.22,19.6 62 | 0.14932,25,5.13,"0",0.453,5.741,66.2,7.2254,8,284,19.7,395.11,13.15,18.7 63 | 0.17171,25,5.13,"0",0.453,5.966,93.4,6.8185,8,284,19.7,378.08,14.44,16 64 | 0.11027,25,5.13,"0",0.453,6.456,67.8,7.2255,8,284,19.7,396.9,6.73,22.2 65 | 0.1265,25,5.13,"0",0.453,6.762,43.4,7.9809,8,284,19.7,395.58,9.5,25 66 | 0.01951,17.5,1.38,"0",0.4161,7.104,59.5,9.2229,3,216,18.6,393.24,8.05,33 67 | 0.03584,80,3.37,"0",0.398,6.29,17.8,6.6115,4,337,16.1,396.9,4.67,23.5 68 | 0.04379,80,3.37,"0",0.398,5.787,31.1,6.6115,4,337,16.1,396.9,10.24,19.4 69 | 0.05789,12.5,6.07,"0",0.409,5.878,21.4,6.498,4,345,18.9,396.21,8.1,22 70 | 0.13554,12.5,6.07,"0",0.409,5.594,36.8,6.498,4,345,18.9,396.9,13.09,17.4 71 | 0.12816,12.5,6.07,"0",0.409,5.885,33,6.498,4,345,18.9,396.9,8.79,20.9 72 | 0.08826,0,10.81,"0",0.413,6.417,6.6,5.2873,4,305,19.2,383.73,6.72,24.2 73 | 0.15876,0,10.81,"0",0.413,5.961,17.5,5.2873,4,305,19.2,376.94,9.88,21.7 74 | 0.09164,0,10.81,"0",0.413,6.065,7.8,5.2873,4,305,19.2,390.91,5.52,22.8 75 | 0.19539,0,10.81,"0",0.413,6.245,6.2,5.2873,4,305,19.2,377.17,7.54,23.4 76 | 0.07896,0,12.83,"0",0.437,6.273,6,4.2515,5,398,18.7,394.92,6.78,24.1 77 | 0.09512,0,12.83,"0",0.437,6.286,45,4.5026,5,398,18.7,383.23,8.94,21.4 78 | 0.10153,0,12.83,"0",0.437,6.279,74.5,4.0522,5,398,18.7,373.66,11.97,20 79 | 0.08707,0,12.83,"0",0.437,6.14,45.8,4.0905,5,398,18.7,386.96,10.27,20.8 80 | 0.05646,0,12.83,"0",0.437,6.232,53.7,5.0141,5,398,18.7,386.4,12.34,21.2 81 | 0.08387,0,12.83,"0",0.437,5.874,36.6,4.5026,5,398,18.7,396.06,9.1,20.3 82 | 0.04113,25,4.86,"0",0.426,6.727,33.5,5.4007,4,281,19,396.9,5.29,28 83 | 0.04462,25,4.86,"0",0.426,6.619,70.4,5.4007,4,281,19,395.63,7.22,23.9 84 | 0.03659,25,4.86,"0",0.426,6.302,32.2,5.4007,4,281,19,396.9,6.72,24.8 85 | 0.03551,25,4.86,"0",0.426,6.167,46.7,5.4007,4,281,19,390.64,7.51,22.9 86 | 0.05059,0,4.49,"0",0.449,6.389,48,4.7794,3,247,18.5,396.9,9.62,23.9 87 | 0.05735,0,4.49,"0",0.449,6.63,56.1,4.4377,3,247,18.5,392.3,6.53,26.6 88 | 0.05188,0,4.49,"0",0.449,6.015,45.1,4.4272,3,247,18.5,395.99,12.86,22.5 89 | 0.07151,0,4.49,"0",0.449,6.121,56.8,3.7476,3,247,18.5,395.15,8.44,22.2 90 | 0.0566,0,3.41,"0",0.489,7.007,86.3,3.4217,2,270,17.8,396.9,5.5,23.6 91 | 0.05302,0,3.41,"0",0.489,7.079,63.1,3.4145,2,270,17.8,396.06,5.7,28.7 92 | 0.04684,0,3.41,"0",0.489,6.417,66.1,3.0923,2,270,17.8,392.18,8.81,22.6 93 | 0.03932,0,3.41,"0",0.489,6.405,73.9,3.0921,2,270,17.8,393.55,8.2,22 94 | 0.04203,28,15.04,"0",0.464,6.442,53.6,3.6659,4,270,18.2,395.01,8.16,22.9 95 | 0.02875,28,15.04,"0",0.464,6.211,28.9,3.6659,4,270,18.2,396.33,6.21,25 96 | 0.04294,28,15.04,"0",0.464,6.249,77.3,3.615,4,270,18.2,396.9,10.59,20.6 97 | 0.12204,0,2.89,"0",0.445,6.625,57.8,3.4952,2,276,18,357.98,6.65,28.4 98 | 0.11504,0,2.89,"0",0.445,6.163,69.6,3.4952,2,276,18,391.83,11.34,21.4 99 | 0.12083,0,2.89,"0",0.445,8.069,76,3.4952,2,276,18,396.9,4.21,38.7 100 | 0.08187,0,2.89,"0",0.445,7.82,36.9,3.4952,2,276,18,393.53,3.57,43.8 101 | 0.0686,0,2.89,"0",0.445,7.416,62.5,3.4952,2,276,18,396.9,6.19,33.2 102 | 0.14866,0,8.56,"0",0.52,6.727,79.9,2.7778,5,384,20.9,394.76,9.42,27.5 103 | 0.11432,0,8.56,"0",0.52,6.781,71.3,2.8561,5,384,20.9,395.58,7.67,26.5 104 | 0.22876,0,8.56,"0",0.52,6.405,85.4,2.7147,5,384,20.9,70.8,10.63,18.6 105 | 0.21161,0,8.56,"0",0.52,6.137,87.4,2.7147,5,384,20.9,394.47,13.44,19.3 106 | 0.1396,0,8.56,"0",0.52,6.167,90,2.421,5,384,20.9,392.69,12.33,20.1 107 | 0.13262,0,8.56,"0",0.52,5.851,96.7,2.1069,5,384,20.9,394.05,16.47,19.5 108 | 0.1712,0,8.56,"0",0.52,5.836,91.9,2.211,5,384,20.9,395.67,18.66,19.5 109 | 0.13117,0,8.56,"0",0.52,6.127,85.2,2.1224,5,384,20.9,387.69,14.09,20.4 110 | 0.12802,0,8.56,"0",0.52,6.474,97.1,2.4329,5,384,20.9,395.24,12.27,19.8 111 | 0.26363,0,8.56,"0",0.52,6.229,91.2,2.5451,5,384,20.9,391.23,15.55,19.4 112 | 0.10793,0,8.56,"0",0.52,6.195,54.4,2.7778,5,384,20.9,393.49,13,21.7 113 | 0.10084,0,10.01,"0",0.547,6.715,81.6,2.6775,6,432,17.8,395.59,10.16,22.8 114 | 0.12329,0,10.01,"0",0.547,5.913,92.9,2.3534,6,432,17.8,394.95,16.21,18.8 115 | 0.22212,0,10.01,"0",0.547,6.092,95.4,2.548,6,432,17.8,396.9,17.09,18.7 116 | 0.14231,0,10.01,"0",0.547,6.254,84.2,2.2565,6,432,17.8,388.74,10.45,18.5 117 | 0.17134,0,10.01,"0",0.547,5.928,88.2,2.4631,6,432,17.8,344.91,15.76,18.3 118 | 0.13158,0,10.01,"0",0.547,6.176,72.5,2.7301,6,432,17.8,393.3,12.04,21.2 119 | 0.15098,0,10.01,"0",0.547,6.021,82.6,2.7474,6,432,17.8,394.51,10.3,19.2 120 | 0.13058,0,10.01,"0",0.547,5.872,73.1,2.4775,6,432,17.8,338.63,15.37,20.4 121 | 0.14476,0,10.01,"0",0.547,5.731,65.2,2.7592,6,432,17.8,391.5,13.61,19.3 122 | 0.06899,0,25.65,"0",0.581,5.87,69.7,2.2577,2,188,19.1,389.15,14.37,22 123 | 0.07165,0,25.65,"0",0.581,6.004,84.1,2.1974,2,188,19.1,377.67,14.27,20.3 124 | 0.09299,0,25.65,"0",0.581,5.961,92.9,2.0869,2,188,19.1,378.09,17.93,20.5 125 | 0.15038,0,25.65,"0",0.581,5.856,97,1.9444,2,188,19.1,370.31,25.41,17.3 126 | 0.09849,0,25.65,"0",0.581,5.879,95.8,2.0063,2,188,19.1,379.38,17.58,18.8 127 | 0.16902,0,25.65,"0",0.581,5.986,88.4,1.9929,2,188,19.1,385.02,14.81,21.4 128 | 0.38735,0,25.65,"0",0.581,5.613,95.6,1.7572,2,188,19.1,359.29,27.26,15.7 129 | 0.25915,0,21.89,"0",0.624,5.693,96,1.7883,4,437,21.2,392.11,17.19,16.2 130 | 0.32543,0,21.89,"0",0.624,6.431,98.8,1.8125,4,437,21.2,396.9,15.39,18 131 | 0.88125,0,21.89,"0",0.624,5.637,94.7,1.9799,4,437,21.2,396.9,18.34,14.3 132 | 0.34006,0,21.89,"0",0.624,6.458,98.9,2.1185,4,437,21.2,395.04,12.6,19.2 133 | 1.19294,0,21.89,"0",0.624,6.326,97.7,2.271,4,437,21.2,396.9,12.26,19.6 134 | 0.59005,0,21.89,"0",0.624,6.372,97.9,2.3274,4,437,21.2,385.76,11.12,23 135 | 0.32982,0,21.89,"0",0.624,5.822,95.4,2.4699,4,437,21.2,388.69,15.03,18.4 136 | 0.97617,0,21.89,"0",0.624,5.757,98.4,2.346,4,437,21.2,262.76,17.31,15.6 137 | 0.55778,0,21.89,"0",0.624,6.335,98.2,2.1107,4,437,21.2,394.67,16.96,18.1 138 | 0.32264,0,21.89,"0",0.624,5.942,93.5,1.9669,4,437,21.2,378.25,16.9,17.4 139 | 0.35233,0,21.89,"0",0.624,6.454,98.4,1.8498,4,437,21.2,394.08,14.59,17.1 140 | 0.2498,0,21.89,"0",0.624,5.857,98.2,1.6686,4,437,21.2,392.04,21.32,13.3 141 | 0.54452,0,21.89,"0",0.624,6.151,97.9,1.6687,4,437,21.2,396.9,18.46,17.8 142 | 0.2909,0,21.89,"0",0.624,6.174,93.6,1.6119,4,437,21.2,388.08,24.16,14 143 | 1.62864,0,21.89,"0",0.624,5.019,100,1.4394,4,437,21.2,396.9,34.41,14.4 144 | 3.32105,0,19.58,"1",0.871,5.403,100,1.3216,5,403,14.7,396.9,26.82,13.4 145 | 4.0974,0,19.58,"0",0.871,5.468,100,1.4118,5,403,14.7,396.9,26.42,15.6 146 | 2.77974,0,19.58,"0",0.871,4.903,97.8,1.3459,5,403,14.7,396.9,29.29,11.8 147 | 2.37934,0,19.58,"0",0.871,6.13,100,1.4191,5,403,14.7,172.91,27.8,13.8 148 | 2.15505,0,19.58,"0",0.871,5.628,100,1.5166,5,403,14.7,169.27,16.65,15.6 149 | 2.36862,0,19.58,"0",0.871,4.926,95.7,1.4608,5,403,14.7,391.71,29.53,14.6 150 | 2.33099,0,19.58,"0",0.871,5.186,93.8,1.5296,5,403,14.7,356.99,28.32,17.8 151 | 2.73397,0,19.58,"0",0.871,5.597,94.9,1.5257,5,403,14.7,351.85,21.45,15.4 152 | 1.6566,0,19.58,"0",0.871,6.122,97.3,1.618,5,403,14.7,372.8,14.1,21.5 153 | 1.49632,0,19.58,"0",0.871,5.404,100,1.5916,5,403,14.7,341.6,13.28,19.6 154 | 1.12658,0,19.58,"1",0.871,5.012,88,1.6102,5,403,14.7,343.28,12.12,15.3 155 | 2.14918,0,19.58,"0",0.871,5.709,98.5,1.6232,5,403,14.7,261.95,15.79,19.4 156 | 1.41385,0,19.58,"1",0.871,6.129,96,1.7494,5,403,14.7,321.02,15.12,17 157 | 3.53501,0,19.58,"1",0.871,6.152,82.6,1.7455,5,403,14.7,88.01,15.02,15.6 158 | 2.44668,0,19.58,"0",0.871,5.272,94,1.7364,5,403,14.7,88.63,16.14,13.1 159 | 1.22358,0,19.58,"0",0.605,6.943,97.4,1.8773,5,403,14.7,363.43,4.59,41.3 160 | 1.34284,0,19.58,"0",0.605,6.066,100,1.7573,5,403,14.7,353.89,6.43,24.3 161 | 1.42502,0,19.58,"0",0.871,6.51,100,1.7659,5,403,14.7,364.31,7.39,23.3 162 | 1.27346,0,19.58,"1",0.605,6.25,92.6,1.7984,5,403,14.7,338.92,5.5,27 163 | 1.46336,0,19.58,"0",0.605,7.489,90.8,1.9709,5,403,14.7,374.43,1.73,50 164 | 1.83377,0,19.58,"1",0.605,7.802,98.2,2.0407,5,403,14.7,389.61,1.92,50 165 | 1.51902,0,19.58,"1",0.605,8.375,93.9,2.162,5,403,14.7,388.45,3.32,50 166 | 2.24236,0,19.58,"0",0.605,5.854,91.8,2.422,5,403,14.7,395.11,11.64,22.7 167 | 2.924,0,19.58,"0",0.605,6.101,93,2.2834,5,403,14.7,240.16,9.81,25 168 | 2.01019,0,19.58,"0",0.605,7.929,96.2,2.0459,5,403,14.7,369.3,3.7,50 169 | 1.80028,0,19.58,"0",0.605,5.877,79.2,2.4259,5,403,14.7,227.61,12.14,23.8 170 | 2.3004,0,19.58,"0",0.605,6.319,96.1,2.1,5,403,14.7,297.09,11.1,23.8 171 | 2.44953,0,19.58,"0",0.605,6.402,95.2,2.2625,5,403,14.7,330.04,11.32,22.3 172 | 1.20742,0,19.58,"0",0.605,5.875,94.6,2.4259,5,403,14.7,292.29,14.43,17.4 173 | 2.3139,0,19.58,"0",0.605,5.88,97.3,2.3887,5,403,14.7,348.13,12.03,19.1 174 | 0.13914,0,4.05,"0",0.51,5.572,88.5,2.5961,5,296,16.6,396.9,14.69,23.1 175 | 0.09178,0,4.05,"0",0.51,6.416,84.1,2.6463,5,296,16.6,395.5,9.04,23.6 176 | 0.08447,0,4.05,"0",0.51,5.859,68.7,2.7019,5,296,16.6,393.23,9.64,22.6 177 | 0.06664,0,4.05,"0",0.51,6.546,33.1,3.1323,5,296,16.6,390.96,5.33,29.4 178 | 0.07022,0,4.05,"0",0.51,6.02,47.2,3.5549,5,296,16.6,393.23,10.11,23.2 179 | 0.05425,0,4.05,"0",0.51,6.315,73.4,3.3175,5,296,16.6,395.6,6.29,24.6 180 | 0.06642,0,4.05,"0",0.51,6.86,74.4,2.9153,5,296,16.6,391.27,6.92,29.9 181 | 0.0578,0,2.46,"0",0.488,6.98,58.4,2.829,3,193,17.8,396.9,5.04,37.2 182 | 0.06588,0,2.46,"0",0.488,7.765,83.3,2.741,3,193,17.8,395.56,7.56,39.8 183 | 0.06888,0,2.46,"0",0.488,6.144,62.2,2.5979,3,193,17.8,396.9,9.45,36.2 184 | 0.09103,0,2.46,"0",0.488,7.155,92.2,2.7006,3,193,17.8,394.12,4.82,37.9 185 | 0.10008,0,2.46,"0",0.488,6.563,95.6,2.847,3,193,17.8,396.9,5.68,32.5 186 | 0.08308,0,2.46,"0",0.488,5.604,89.8,2.9879,3,193,17.8,391,13.98,26.4 187 | 0.06047,0,2.46,"0",0.488,6.153,68.8,3.2797,3,193,17.8,387.11,13.15,29.6 188 | 0.05602,0,2.46,"0",0.488,7.831,53.6,3.1992,3,193,17.8,392.63,4.45,50 189 | 0.07875,45,3.44,"0",0.437,6.782,41.1,3.7886,5,398,15.2,393.87,6.68,32 190 | 0.12579,45,3.44,"0",0.437,6.556,29.1,4.5667,5,398,15.2,382.84,4.56,29.8 191 | 0.0837,45,3.44,"0",0.437,7.185,38.9,4.5667,5,398,15.2,396.9,5.39,34.9 192 | 0.09068,45,3.44,"0",0.437,6.951,21.5,6.4798,5,398,15.2,377.68,5.1,37 193 | 0.06911,45,3.44,"0",0.437,6.739,30.8,6.4798,5,398,15.2,389.71,4.69,30.5 194 | 0.08664,45,3.44,"0",0.437,7.178,26.3,6.4798,5,398,15.2,390.49,2.87,36.4 195 | 0.02187,60,2.93,"0",0.401,6.8,9.9,6.2196,1,265,15.6,393.37,5.03,31.1 196 | 0.01439,60,2.93,"0",0.401,6.604,18.8,6.2196,1,265,15.6,376.7,4.38,29.1 197 | 0.01381,80,0.46,"0",0.422,7.875,32,5.6484,4,255,14.4,394.23,2.97,50 198 | 0.04011,80,1.52,"0",0.404,7.287,34.1,7.309,2,329,12.6,396.9,4.08,33.3 199 | 0.04666,80,1.52,"0",0.404,7.107,36.6,7.309,2,329,12.6,354.31,8.61,30.3 200 | 0.03768,80,1.52,"0",0.404,7.274,38.3,7.309,2,329,12.6,392.2,6.62,34.6 201 | 0.0315,95,1.47,"0",0.403,6.975,15.3,7.6534,3,402,17,396.9,4.56,34.9 202 | 0.01778,95,1.47,"0",0.403,7.135,13.9,7.6534,3,402,17,384.3,4.45,32.9 203 | 0.03445,82.5,2.03,"0",0.415,6.162,38.4,6.27,2,348,14.7,393.77,7.43,24.1 204 | 0.02177,82.5,2.03,"0",0.415,7.61,15.7,6.27,2,348,14.7,395.38,3.11,42.3 205 | 0.0351,95,2.68,"0",0.4161,7.853,33.2,5.118,4,224,14.7,392.78,3.81,48.5 206 | 0.02009,95,2.68,"0",0.4161,8.034,31.9,5.118,4,224,14.7,390.55,2.88,50 207 | 0.13642,0,10.59,"0",0.489,5.891,22.3,3.9454,4,277,18.6,396.9,10.87,22.6 208 | 0.22969,0,10.59,"0",0.489,6.326,52.5,4.3549,4,277,18.6,394.87,10.97,24.4 209 | 0.25199,0,10.59,"0",0.489,5.783,72.7,4.3549,4,277,18.6,389.43,18.06,22.5 210 | 0.13587,0,10.59,"1",0.489,6.064,59.1,4.2392,4,277,18.6,381.32,14.66,24.4 211 | 0.43571,0,10.59,"1",0.489,5.344,100,3.875,4,277,18.6,396.9,23.09,20 212 | 0.17446,0,10.59,"1",0.489,5.96,92.1,3.8771,4,277,18.6,393.25,17.27,21.7 213 | 0.37578,0,10.59,"1",0.489,5.404,88.6,3.665,4,277,18.6,395.24,23.98,19.3 214 | 0.21719,0,10.59,"1",0.489,5.807,53.8,3.6526,4,277,18.6,390.94,16.03,22.4 215 | 0.14052,0,10.59,"0",0.489,6.375,32.3,3.9454,4,277,18.6,385.81,9.38,28.1 216 | 0.28955,0,10.59,"0",0.489,5.412,9.8,3.5875,4,277,18.6,348.93,29.55,23.7 217 | 0.19802,0,10.59,"0",0.489,6.182,42.4,3.9454,4,277,18.6,393.63,9.47,25 218 | 0.0456,0,13.89,"1",0.55,5.888,56,3.1121,5,276,16.4,392.8,13.51,23.3 219 | 0.07013,0,13.89,"0",0.55,6.642,85.1,3.4211,5,276,16.4,392.78,9.69,28.7 220 | 0.11069,0,13.89,"1",0.55,5.951,93.8,2.8893,5,276,16.4,396.9,17.92,21.5 221 | 0.11425,0,13.89,"1",0.55,6.373,92.4,3.3633,5,276,16.4,393.74,10.5,23 222 | 0.35809,0,6.2,"1",0.507,6.951,88.5,2.8617,8,307,17.4,391.7,9.71,26.7 223 | 0.40771,0,6.2,"1",0.507,6.164,91.3,3.048,8,307,17.4,395.24,21.46,21.7 224 | 0.62356,0,6.2,"1",0.507,6.879,77.7,3.2721,8,307,17.4,390.39,9.93,27.5 225 | 0.6147,0,6.2,"0",0.507,6.618,80.8,3.2721,8,307,17.4,396.9,7.6,30.1 226 | 0.31533,0,6.2,"0",0.504,8.266,78.3,2.8944,8,307,17.4,385.05,4.14,44.8 227 | 0.52693,0,6.2,"0",0.504,8.725,83,2.8944,8,307,17.4,382,4.63,50 228 | 0.38214,0,6.2,"0",0.504,8.04,86.5,3.2157,8,307,17.4,387.38,3.13,37.6 229 | 0.41238,0,6.2,"0",0.504,7.163,79.9,3.2157,8,307,17.4,372.08,6.36,31.6 230 | 0.29819,0,6.2,"0",0.504,7.686,17,3.3751,8,307,17.4,377.51,3.92,46.7 231 | 0.44178,0,6.2,"0",0.504,6.552,21.4,3.3751,8,307,17.4,380.34,3.76,31.5 232 | 0.537,0,6.2,"0",0.504,5.981,68.1,3.6715,8,307,17.4,378.35,11.65,24.3 233 | 0.46296,0,6.2,"0",0.504,7.412,76.9,3.6715,8,307,17.4,376.14,5.25,31.7 234 | 0.57529,0,6.2,"0",0.507,8.337,73.3,3.8384,8,307,17.4,385.91,2.47,41.7 235 | 0.33147,0,6.2,"0",0.507,8.247,70.4,3.6519,8,307,17.4,378.95,3.95,48.3 236 | 0.44791,0,6.2,"1",0.507,6.726,66.5,3.6519,8,307,17.4,360.2,8.05,29 237 | 0.33045,0,6.2,"0",0.507,6.086,61.5,3.6519,8,307,17.4,376.75,10.88,24 238 | 0.52058,0,6.2,"1",0.507,6.631,76.5,4.148,8,307,17.4,388.45,9.54,25.1 239 | 0.51183,0,6.2,"0",0.507,7.358,71.6,4.148,8,307,17.4,390.07,4.73,31.5 240 | 0.08244,30,4.93,"0",0.428,6.481,18.5,6.1899,6,300,16.6,379.41,6.36,23.7 241 | 0.09252,30,4.93,"0",0.428,6.606,42.2,6.1899,6,300,16.6,383.78,7.37,23.3 242 | 0.11329,30,4.93,"0",0.428,6.897,54.3,6.3361,6,300,16.6,391.25,11.38,22 243 | 0.10612,30,4.93,"0",0.428,6.095,65.1,6.3361,6,300,16.6,394.62,12.4,20.1 244 | 0.1029,30,4.93,"0",0.428,6.358,52.9,7.0355,6,300,16.6,372.75,11.22,22.2 245 | 0.12757,30,4.93,"0",0.428,6.393,7.8,7.0355,6,300,16.6,374.71,5.19,23.7 246 | 0.20608,22,5.86,"0",0.431,5.593,76.5,7.9549,7,330,19.1,372.49,12.5,17.6 247 | 0.19133,22,5.86,"0",0.431,5.605,70.2,7.9549,7,330,19.1,389.13,18.46,18.5 248 | 0.33983,22,5.86,"0",0.431,6.108,34.9,8.0555,7,330,19.1,390.18,9.16,24.3 249 | 0.19657,22,5.86,"0",0.431,6.226,79.2,8.0555,7,330,19.1,376.14,10.15,20.5 250 | 0.16439,22,5.86,"0",0.431,6.433,49.1,7.8265,7,330,19.1,374.71,9.52,24.5 251 | 0.19073,22,5.86,"0",0.431,6.718,17.5,7.8265,7,330,19.1,393.74,6.56,26.2 252 | 0.1403,22,5.86,"0",0.431,6.487,13,7.3967,7,330,19.1,396.28,5.9,24.4 253 | 0.21409,22,5.86,"0",0.431,6.438,8.9,7.3967,7,330,19.1,377.07,3.59,24.8 254 | 0.08221,22,5.86,"0",0.431,6.957,6.8,8.9067,7,330,19.1,386.09,3.53,29.6 255 | 0.36894,22,5.86,"0",0.431,8.259,8.4,8.9067,7,330,19.1,396.9,3.54,42.8 256 | 0.04819,80,3.64,"0",0.392,6.108,32,9.2203,1,315,16.4,392.89,6.57,21.9 257 | 0.03548,80,3.64,"0",0.392,5.876,19.1,9.2203,1,315,16.4,395.18,9.25,20.9 258 | 0.01538,90,3.75,"0",0.394,7.454,34.2,6.3361,3,244,15.9,386.34,3.11,44 259 | 0.61154,20,3.97,"0",0.647,8.704,86.9,1.801,5,264,13,389.7,5.12,50 260 | 0.66351,20,3.97,"0",0.647,7.333,100,1.8946,5,264,13,383.29,7.79,36 261 | 0.65665,20,3.97,"0",0.647,6.842,100,2.0107,5,264,13,391.93,6.9,30.1 262 | 0.54011,20,3.97,"0",0.647,7.203,81.8,2.1121,5,264,13,392.8,9.59,33.8 263 | 0.53412,20,3.97,"0",0.647,7.52,89.4,2.1398,5,264,13,388.37,7.26,43.1 264 | 0.52014,20,3.97,"0",0.647,8.398,91.5,2.2885,5,264,13,386.86,5.91,48.8 265 | 0.82526,20,3.97,"0",0.647,7.327,94.5,2.0788,5,264,13,393.42,11.25,31 266 | 0.55007,20,3.97,"0",0.647,7.206,91.6,1.9301,5,264,13,387.89,8.1,36.5 267 | 0.76162,20,3.97,"0",0.647,5.56,62.8,1.9865,5,264,13,392.4,10.45,22.8 268 | 0.7857,20,3.97,"0",0.647,7.014,84.6,2.1329,5,264,13,384.07,14.79,30.7 269 | 0.57834,20,3.97,"0",0.575,8.297,67,2.4216,5,264,13,384.54,7.44,50 270 | 0.5405,20,3.97,"0",0.575,7.47,52.6,2.872,5,264,13,390.3,3.16,43.5 271 | 0.09065,20,6.96,"1",0.464,5.92,61.5,3.9175,3,223,18.6,391.34,13.65,20.7 272 | 0.29916,20,6.96,"0",0.464,5.856,42.1,4.429,3,223,18.6,388.65,13,21.1 273 | 0.16211,20,6.96,"0",0.464,6.24,16.3,4.429,3,223,18.6,396.9,6.59,25.2 274 | 0.1146,20,6.96,"0",0.464,6.538,58.7,3.9175,3,223,18.6,394.96,7.73,24.4 275 | 0.22188,20,6.96,"1",0.464,7.691,51.8,4.3665,3,223,18.6,390.77,6.58,35.2 276 | 0.05644,40,6.41,"1",0.447,6.758,32.9,4.0776,4,254,17.6,396.9,3.53,32.4 277 | 0.09604,40,6.41,"0",0.447,6.854,42.8,4.2673,4,254,17.6,396.9,2.98,32 278 | 0.10469,40,6.41,"1",0.447,7.267,49,4.7872,4,254,17.6,389.25,6.05,33.2 279 | 0.06127,40,6.41,"1",0.447,6.826,27.6,4.8628,4,254,17.6,393.45,4.16,33.1 280 | 0.07978,40,6.41,"0",0.447,6.482,32.1,4.1403,4,254,17.6,396.9,7.19,29.1 281 | 0.21038,20,3.33,"0",0.4429,6.812,32.2,4.1007,5,216,14.9,396.9,4.85,35.1 282 | 0.03578,20,3.33,"0",0.4429,7.82,64.5,4.6947,5,216,14.9,387.31,3.76,45.4 283 | 0.03705,20,3.33,"0",0.4429,6.968,37.2,5.2447,5,216,14.9,392.23,4.59,35.4 284 | 0.06129,20,3.33,"1",0.4429,7.645,49.7,5.2119,5,216,14.9,377.07,3.01,46 285 | 0.01501,90,1.21,"1",0.401,7.923,24.8,5.885,1,198,13.6,395.52,3.16,50 286 | 0.00906,90,2.97,"0",0.4,7.088,20.8,7.3073,1,285,15.3,394.72,7.85,32.2 287 | 0.01096,55,2.25,"0",0.389,6.453,31.9,7.3073,1,300,15.3,394.72,8.23,22 288 | 0.01965,80,1.76,"0",0.385,6.23,31.5,9.0892,1,241,18.2,341.6,12.93,20.1 289 | 0.03871,52.5,5.32,"0",0.405,6.209,31.3,7.3172,6,293,16.6,396.9,7.14,23.2 290 | 0.0459,52.5,5.32,"0",0.405,6.315,45.6,7.3172,6,293,16.6,396.9,7.6,22.3 291 | 0.04297,52.5,5.32,"0",0.405,6.565,22.9,7.3172,6,293,16.6,371.72,9.51,24.8 292 | 0.03502,80,4.95,"0",0.411,6.861,27.9,5.1167,4,245,19.2,396.9,3.33,28.5 293 | 0.07886,80,4.95,"0",0.411,7.148,27.7,5.1167,4,245,19.2,396.9,3.56,37.3 294 | 0.03615,80,4.95,"0",0.411,6.63,23.4,5.1167,4,245,19.2,396.9,4.7,27.9 295 | 0.08265,0,13.92,"0",0.437,6.127,18.4,5.5027,4,289,16,396.9,8.58,23.9 296 | 0.08199,0,13.92,"0",0.437,6.009,42.3,5.5027,4,289,16,396.9,10.4,21.7 297 | 0.12932,0,13.92,"0",0.437,6.678,31.1,5.9604,4,289,16,396.9,6.27,28.6 298 | 0.05372,0,13.92,"0",0.437,6.549,51,5.9604,4,289,16,392.85,7.39,27.1 299 | 0.14103,0,13.92,"0",0.437,5.79,58,6.32,4,289,16,396.9,15.84,20.3 300 | 0.06466,70,2.24,"0",0.4,6.345,20.1,7.8278,5,358,14.8,368.24,4.97,22.5 301 | 0.05561,70,2.24,"0",0.4,7.041,10,7.8278,5,358,14.8,371.58,4.74,29 302 | 0.04417,70,2.24,"0",0.4,6.871,47.4,7.8278,5,358,14.8,390.86,6.07,24.8 303 | 0.03537,34,6.09,"0",0.433,6.59,40.4,5.4917,7,329,16.1,395.75,9.5,22 304 | 0.09266,34,6.09,"0",0.433,6.495,18.4,5.4917,7,329,16.1,383.61,8.67,26.4 305 | 0.1,34,6.09,"0",0.433,6.982,17.7,5.4917,7,329,16.1,390.43,4.86,33.1 306 | 0.05515,33,2.18,"0",0.472,7.236,41.1,4.022,7,222,18.4,393.68,6.93,36.1 307 | 0.05479,33,2.18,"0",0.472,6.616,58.1,3.37,7,222,18.4,393.36,8.93,28.4 308 | 0.07503,33,2.18,"0",0.472,7.42,71.9,3.0992,7,222,18.4,396.9,6.47,33.4 309 | 0.04932,33,2.18,"0",0.472,6.849,70.3,3.1827,7,222,18.4,396.9,7.53,28.2 310 | 0.49298,0,9.9,"0",0.544,6.635,82.5,3.3175,4,304,18.4,396.9,4.54,22.8 311 | 0.3494,0,9.9,"0",0.544,5.972,76.7,3.1025,4,304,18.4,396.24,9.97,20.3 312 | 2.63548,0,9.9,"0",0.544,4.973,37.8,2.5194,4,304,18.4,350.45,12.64,16.1 313 | 0.79041,0,9.9,"0",0.544,6.122,52.8,2.6403,4,304,18.4,396.9,5.98,22.1 314 | 0.26169,0,9.9,"0",0.544,6.023,90.4,2.834,4,304,18.4,396.3,11.72,19.4 315 | 0.26938,0,9.9,"0",0.544,6.266,82.8,3.2628,4,304,18.4,393.39,7.9,21.6 316 | 0.3692,0,9.9,"0",0.544,6.567,87.3,3.6023,4,304,18.4,395.69,9.28,23.8 317 | 0.25356,0,9.9,"0",0.544,5.705,77.7,3.945,4,304,18.4,396.42,11.5,16.2 318 | 0.31827,0,9.9,"0",0.544,5.914,83.2,3.9986,4,304,18.4,390.7,18.33,17.8 319 | 0.24522,0,9.9,"0",0.544,5.782,71.7,4.0317,4,304,18.4,396.9,15.94,19.8 320 | 0.40202,0,9.9,"0",0.544,6.382,67.2,3.5325,4,304,18.4,395.21,10.36,23.1 321 | 0.47547,0,9.9,"0",0.544,6.113,58.8,4.0019,4,304,18.4,396.23,12.73,21 322 | 0.1676,0,7.38,"0",0.493,6.426,52.3,4.5404,5,287,19.6,396.9,7.2,23.8 323 | 0.18159,0,7.38,"0",0.493,6.376,54.3,4.5404,5,287,19.6,396.9,6.87,23.1 324 | 0.35114,0,7.38,"0",0.493,6.041,49.9,4.7211,5,287,19.6,396.9,7.7,20.4 325 | 0.28392,0,7.38,"0",0.493,5.708,74.3,4.7211,5,287,19.6,391.13,11.74,18.5 326 | 0.34109,0,7.38,"0",0.493,6.415,40.1,4.7211,5,287,19.6,396.9,6.12,25 327 | 0.19186,0,7.38,"0",0.493,6.431,14.7,5.4159,5,287,19.6,393.68,5.08,24.6 328 | 0.30347,0,7.38,"0",0.493,6.312,28.9,5.4159,5,287,19.6,396.9,6.15,23 329 | 0.24103,0,7.38,"0",0.493,6.083,43.7,5.4159,5,287,19.6,396.9,12.79,22.2 330 | 0.06617,0,3.24,"0",0.46,5.868,25.8,5.2146,4,430,16.9,382.44,9.97,19.3 331 | 0.06724,0,3.24,"0",0.46,6.333,17.2,5.2146,4,430,16.9,375.21,7.34,22.6 332 | 0.04544,0,3.24,"0",0.46,6.144,32.2,5.8736,4,430,16.9,368.57,9.09,19.8 333 | 0.05023,35,6.06,"0",0.4379,5.706,28.4,6.6407,1,304,16.9,394.02,12.43,17.1 334 | 0.03466,35,6.06,"0",0.4379,6.031,23.3,6.6407,1,304,16.9,362.25,7.83,19.4 335 | 0.05083,0,5.19,"0",0.515,6.316,38.1,6.4584,5,224,20.2,389.71,5.68,22.2 336 | 0.03738,0,5.19,"0",0.515,6.31,38.5,6.4584,5,224,20.2,389.4,6.75,20.7 337 | 0.03961,0,5.19,"0",0.515,6.037,34.5,5.9853,5,224,20.2,396.9,8.01,21.1 338 | 0.03427,0,5.19,"0",0.515,5.869,46.3,5.2311,5,224,20.2,396.9,9.8,19.5 339 | 0.03041,0,5.19,"0",0.515,5.895,59.6,5.615,5,224,20.2,394.81,10.56,18.5 340 | 0.03306,0,5.19,"0",0.515,6.059,37.3,4.8122,5,224,20.2,396.14,8.51,20.6 341 | 0.05497,0,5.19,"0",0.515,5.985,45.4,4.8122,5,224,20.2,396.9,9.74,19 342 | 0.06151,0,5.19,"0",0.515,5.968,58.5,4.8122,5,224,20.2,396.9,9.29,18.7 343 | 0.01301,35,1.52,"0",0.442,7.241,49.3,7.0379,1,284,15.5,394.74,5.49,32.7 344 | 0.02498,0,1.89,"0",0.518,6.54,59.7,6.2669,1,422,15.9,389.96,8.65,16.5 345 | 0.02543,55,3.78,"0",0.484,6.696,56.4,5.7321,5,370,17.6,396.9,7.18,23.9 346 | 0.03049,55,3.78,"0",0.484,6.874,28.1,6.4654,5,370,17.6,387.97,4.61,31.2 347 | 0.03113,0,4.39,"0",0.442,6.014,48.5,8.0136,3,352,18.8,385.64,10.53,17.5 348 | 0.06162,0,4.39,"0",0.442,5.898,52.3,8.0136,3,352,18.8,364.61,12.67,17.2 349 | 0.0187,85,4.15,"0",0.429,6.516,27.7,8.5353,4,351,17.9,392.43,6.36,23.1 350 | 0.01501,80,2.01,"0",0.435,6.635,29.7,8.344,4,280,17,390.94,5.99,24.5 351 | 0.02899,40,1.25,"0",0.429,6.939,34.5,8.7921,1,335,19.7,389.85,5.89,26.6 352 | 0.06211,40,1.25,"0",0.429,6.49,44.4,8.7921,1,335,19.7,396.9,5.98,22.9 353 | 0.0795,60,1.69,"0",0.411,6.579,35.9,10.7103,4,411,18.3,370.78,5.49,24.1 354 | 0.07244,60,1.69,"0",0.411,5.884,18.5,10.7103,4,411,18.3,392.33,7.79,18.6 355 | 0.01709,90,2.02,"0",0.41,6.728,36.1,12.1265,5,187,17,384.46,4.5,30.1 356 | 0.04301,80,1.91,"0",0.413,5.663,21.9,10.5857,4,334,22,382.8,8.05,18.2 357 | 0.10659,80,1.91,"0",0.413,5.936,19.5,10.5857,4,334,22,376.04,5.57,20.6 358 | 8.98296,0,18.1,"1",0.77,6.212,97.4,2.1222,24,666,20.2,377.73,17.6,17.8 359 | 3.8497,0,18.1,"1",0.77,6.395,91,2.5052,24,666,20.2,391.34,13.27,21.7 360 | 5.20177,0,18.1,"1",0.77,6.127,83.4,2.7227,24,666,20.2,395.43,11.48,22.7 361 | 4.26131,0,18.1,"0",0.77,6.112,81.3,2.5091,24,666,20.2,390.74,12.67,22.6 362 | 4.54192,0,18.1,"0",0.77,6.398,88,2.5182,24,666,20.2,374.56,7.79,25 363 | 3.83684,0,18.1,"0",0.77,6.251,91.1,2.2955,24,666,20.2,350.65,14.19,19.9 364 | 3.67822,0,18.1,"0",0.77,5.362,96.2,2.1036,24,666,20.2,380.79,10.19,20.8 365 | 4.22239,0,18.1,"1",0.77,5.803,89,1.9047,24,666,20.2,353.04,14.64,16.8 366 | 3.47428,0,18.1,"1",0.718,8.78,82.9,1.9047,24,666,20.2,354.55,5.29,21.9 367 | 4.55587,0,18.1,"0",0.718,3.561,87.9,1.6132,24,666,20.2,354.7,7.12,27.5 368 | 3.69695,0,18.1,"0",0.718,4.963,91.4,1.7523,24,666,20.2,316.03,14,21.9 369 | 13.5222,0,18.1,"0",0.631,3.863,100,1.5106,24,666,20.2,131.42,13.33,23.1 370 | 4.89822,0,18.1,"0",0.631,4.97,100,1.3325,24,666,20.2,375.52,3.26,50 371 | 5.66998,0,18.1,"1",0.631,6.683,96.8,1.3567,24,666,20.2,375.33,3.73,50 372 | 6.53876,0,18.1,"1",0.631,7.016,97.5,1.2024,24,666,20.2,392.05,2.96,50 373 | 9.2323,0,18.1,"0",0.631,6.216,100,1.1691,24,666,20.2,366.15,9.53,50 374 | 8.26725,0,18.1,"1",0.668,5.875,89.6,1.1296,24,666,20.2,347.88,8.88,50 375 | 11.1081,0,18.1,"0",0.668,4.906,100,1.1742,24,666,20.2,396.9,34.77,13.8 376 | 18.4982,0,18.1,"0",0.668,4.138,100,1.137,24,666,20.2,396.9,37.97,13.8 377 | 19.6091,0,18.1,"0",0.671,7.313,97.9,1.3163,24,666,20.2,396.9,13.44,15 378 | 15.288,0,18.1,"0",0.671,6.649,93.3,1.3449,24,666,20.2,363.02,23.24,13.9 379 | 9.82349,0,18.1,"0",0.671,6.794,98.8,1.358,24,666,20.2,396.9,21.24,13.3 380 | 23.6482,0,18.1,"0",0.671,6.38,96.2,1.3861,24,666,20.2,396.9,23.69,13.1 381 | 17.8667,0,18.1,"0",0.671,6.223,100,1.3861,24,666,20.2,393.74,21.78,10.2 382 | 88.9762,0,18.1,"0",0.671,6.968,91.9,1.4165,24,666,20.2,396.9,17.21,10.4 383 | 15.8744,0,18.1,"0",0.671,6.545,99.1,1.5192,24,666,20.2,396.9,21.08,10.9 384 | 9.18702,0,18.1,"0",0.7,5.536,100,1.5804,24,666,20.2,396.9,23.6,11.3 385 | 7.99248,0,18.1,"0",0.7,5.52,100,1.5331,24,666,20.2,396.9,24.56,12.3 386 | 20.0849,0,18.1,"0",0.7,4.368,91.2,1.4395,24,666,20.2,285.83,30.63,8.8 387 | 16.8118,0,18.1,"0",0.7,5.277,98.1,1.4261,24,666,20.2,396.9,30.81,7.2 388 | 24.3938,0,18.1,"0",0.7,4.652,100,1.4672,24,666,20.2,396.9,28.28,10.5 389 | 22.5971,0,18.1,"0",0.7,5,89.5,1.5184,24,666,20.2,396.9,31.99,7.4 390 | 14.3337,0,18.1,"0",0.7,4.88,100,1.5895,24,666,20.2,372.92,30.62,10.2 391 | 8.15174,0,18.1,"0",0.7,5.39,98.9,1.7281,24,666,20.2,396.9,20.85,11.5 392 | 6.96215,0,18.1,"0",0.7,5.713,97,1.9265,24,666,20.2,394.43,17.11,15.1 393 | 5.29305,0,18.1,"0",0.7,6.051,82.5,2.1678,24,666,20.2,378.38,18.76,23.2 394 | 11.5779,0,18.1,"0",0.7,5.036,97,1.77,24,666,20.2,396.9,25.68,9.7 395 | 8.64476,0,18.1,"0",0.693,6.193,92.6,1.7912,24,666,20.2,396.9,15.17,13.8 396 | 13.3598,0,18.1,"0",0.693,5.887,94.7,1.7821,24,666,20.2,396.9,16.35,12.7 397 | 8.71675,0,18.1,"0",0.693,6.471,98.8,1.7257,24,666,20.2,391.98,17.12,13.1 398 | 5.87205,0,18.1,"0",0.693,6.405,96,1.6768,24,666,20.2,396.9,19.37,12.5 399 | 7.67202,0,18.1,"0",0.693,5.747,98.9,1.6334,24,666,20.2,393.1,19.92,8.5 400 | 38.3518,0,18.1,"0",0.693,5.453,100,1.4896,24,666,20.2,396.9,30.59,5 401 | 9.91655,0,18.1,"0",0.693,5.852,77.8,1.5004,24,666,20.2,338.16,29.97,6.3 402 | 25.0461,0,18.1,"0",0.693,5.987,100,1.5888,24,666,20.2,396.9,26.77,5.6 403 | 14.2362,0,18.1,"0",0.693,6.343,100,1.5741,24,666,20.2,396.9,20.32,7.2 404 | 9.59571,0,18.1,"0",0.693,6.404,100,1.639,24,666,20.2,376.11,20.31,12.1 405 | 24.8017,0,18.1,"0",0.693,5.349,96,1.7028,24,666,20.2,396.9,19.77,8.3 406 | 41.5292,0,18.1,"0",0.693,5.531,85.4,1.6074,24,666,20.2,329.46,27.38,8.5 407 | 67.9208,0,18.1,"0",0.693,5.683,100,1.4254,24,666,20.2,384.97,22.98,5 408 | 20.7162,0,18.1,"0",0.659,4.138,100,1.1781,24,666,20.2,370.22,23.34,11.9 409 | 11.9511,0,18.1,"0",0.659,5.608,100,1.2852,24,666,20.2,332.09,12.13,27.9 410 | 7.40389,0,18.1,"0",0.597,5.617,97.9,1.4547,24,666,20.2,314.64,26.4,17.2 411 | 14.4383,0,18.1,"0",0.597,6.852,100,1.4655,24,666,20.2,179.36,19.78,27.5 412 | 51.1358,0,18.1,"0",0.597,5.757,100,1.413,24,666,20.2,2.6,10.11,15 413 | 14.0507,0,18.1,"0",0.597,6.657,100,1.5275,24,666,20.2,35.05,21.22,17.2 414 | 18.811,0,18.1,"0",0.597,4.628,100,1.5539,24,666,20.2,28.79,34.37,17.9 415 | 28.6558,0,18.1,"0",0.597,5.155,100,1.5894,24,666,20.2,210.97,20.08,16.3 416 | 45.7461,0,18.1,"0",0.693,4.519,100,1.6582,24,666,20.2,88.27,36.98,7 417 | 18.0846,0,18.1,"0",0.679,6.434,100,1.8347,24,666,20.2,27.25,29.05,7.2 418 | 10.8342,0,18.1,"0",0.679,6.782,90.8,1.8195,24,666,20.2,21.57,25.79,7.5 419 | 25.9406,0,18.1,"0",0.679,5.304,89.1,1.6475,24,666,20.2,127.36,26.64,10.4 420 | 73.5341,0,18.1,"0",0.679,5.957,100,1.8026,24,666,20.2,16.45,20.62,8.8 421 | 11.8123,0,18.1,"0",0.718,6.824,76.5,1.794,24,666,20.2,48.45,22.74,8.4 422 | 11.0874,0,18.1,"0",0.718,6.411,100,1.8589,24,666,20.2,318.75,15.02,16.7 423 | 7.02259,0,18.1,"0",0.718,6.006,95.3,1.8746,24,666,20.2,319.98,15.7,14.2 424 | 12.0482,0,18.1,"0",0.614,5.648,87.6,1.9512,24,666,20.2,291.55,14.1,20.8 425 | 7.05042,0,18.1,"0",0.614,6.103,85.1,2.0218,24,666,20.2,2.52,23.29,13.4 426 | 8.79212,0,18.1,"0",0.584,5.565,70.6,2.0635,24,666,20.2,3.65,17.16,11.7 427 | 15.8603,0,18.1,"0",0.679,5.896,95.4,1.9096,24,666,20.2,7.68,24.39,8.3 428 | 12.2472,0,18.1,"0",0.584,5.837,59.7,1.9976,24,666,20.2,24.65,15.69,10.2 429 | 37.6619,0,18.1,"0",0.679,6.202,78.7,1.8629,24,666,20.2,18.82,14.52,10.9 430 | 7.36711,0,18.1,"0",0.679,6.193,78.1,1.9356,24,666,20.2,96.73,21.52,11 431 | 9.33889,0,18.1,"0",0.679,6.38,95.6,1.9682,24,666,20.2,60.72,24.08,9.5 432 | 8.49213,0,18.1,"0",0.584,6.348,86.1,2.0527,24,666,20.2,83.45,17.64,14.5 433 | 10.0623,0,18.1,"0",0.584,6.833,94.3,2.0882,24,666,20.2,81.33,19.69,14.1 434 | 6.44405,0,18.1,"0",0.584,6.425,74.8,2.2004,24,666,20.2,97.95,12.03,16.1 435 | 5.58107,0,18.1,"0",0.713,6.436,87.9,2.3158,24,666,20.2,100.19,16.22,14.3 436 | 13.9134,0,18.1,"0",0.713,6.208,95,2.2222,24,666,20.2,100.63,15.17,11.7 437 | 11.1604,0,18.1,"0",0.74,6.629,94.6,2.1247,24,666,20.2,109.85,23.27,13.4 438 | 14.4208,0,18.1,"0",0.74,6.461,93.3,2.0026,24,666,20.2,27.49,18.05,9.6 439 | 15.1772,0,18.1,"0",0.74,6.152,100,1.9142,24,666,20.2,9.32,26.45,8.7 440 | 13.6781,0,18.1,"0",0.74,5.935,87.9,1.8206,24,666,20.2,68.95,34.02,8.4 441 | 9.39063,0,18.1,"0",0.74,5.627,93.9,1.8172,24,666,20.2,396.9,22.88,12.8 442 | 22.0511,0,18.1,"0",0.74,5.818,92.4,1.8662,24,666,20.2,391.45,22.11,10.5 443 | 9.72418,0,18.1,"0",0.74,6.406,97.2,2.0651,24,666,20.2,385.96,19.52,17.1 444 | 5.66637,0,18.1,"0",0.74,6.219,100,2.0048,24,666,20.2,395.69,16.59,18.4 445 | 9.96654,0,18.1,"0",0.74,6.485,100,1.9784,24,666,20.2,386.73,18.85,15.4 446 | 12.8023,0,18.1,"0",0.74,5.854,96.6,1.8956,24,666,20.2,240.52,23.79,10.8 447 | 10.6718,0,18.1,"0",0.74,6.459,94.8,1.9879,24,666,20.2,43.06,23.98,11.8 448 | 6.28807,0,18.1,"0",0.74,6.341,96.4,2.072,24,666,20.2,318.01,17.79,14.9 449 | 9.92485,0,18.1,"0",0.74,6.251,96.6,2.198,24,666,20.2,388.52,16.44,12.6 450 | 9.32909,0,18.1,"0",0.713,6.185,98.7,2.2616,24,666,20.2,396.9,18.13,14.1 451 | 7.52601,0,18.1,"0",0.713,6.417,98.3,2.185,24,666,20.2,304.21,19.31,13 452 | 6.71772,0,18.1,"0",0.713,6.749,92.6,2.3236,24,666,20.2,0.32,17.44,13.4 453 | 5.44114,0,18.1,"0",0.713,6.655,98.2,2.3552,24,666,20.2,355.29,17.73,15.2 454 | 5.09017,0,18.1,"0",0.713,6.297,91.8,2.3682,24,666,20.2,385.09,17.27,16.1 455 | 8.24809,0,18.1,"0",0.713,7.393,99.3,2.4527,24,666,20.2,375.87,16.74,17.8 456 | 9.51363,0,18.1,"0",0.713,6.728,94.1,2.4961,24,666,20.2,6.68,18.71,14.9 457 | 4.75237,0,18.1,"0",0.713,6.525,86.5,2.4358,24,666,20.2,50.92,18.13,14.1 458 | 4.66883,0,18.1,"0",0.713,5.976,87.9,2.5806,24,666,20.2,10.48,19.01,12.7 459 | 8.20058,0,18.1,"0",0.713,5.936,80.3,2.7792,24,666,20.2,3.5,16.94,13.5 460 | 7.75223,0,18.1,"0",0.713,6.301,83.7,2.7831,24,666,20.2,272.21,16.23,14.9 461 | 6.80117,0,18.1,"0",0.713,6.081,84.4,2.7175,24,666,20.2,396.9,14.7,20 462 | 4.81213,0,18.1,"0",0.713,6.701,90,2.5975,24,666,20.2,255.23,16.42,16.4 463 | 3.69311,0,18.1,"0",0.713,6.376,88.4,2.5671,24,666,20.2,391.43,14.65,17.7 464 | 6.65492,0,18.1,"0",0.713,6.317,83,2.7344,24,666,20.2,396.9,13.99,19.5 465 | 5.82115,0,18.1,"0",0.713,6.513,89.9,2.8016,24,666,20.2,393.82,10.29,20.2 466 | 7.83932,0,18.1,"0",0.655,6.209,65.4,2.9634,24,666,20.2,396.9,13.22,21.4 467 | 3.1636,0,18.1,"0",0.655,5.759,48.2,3.0665,24,666,20.2,334.4,14.13,19.9 468 | 3.77498,0,18.1,"0",0.655,5.952,84.7,2.8715,24,666,20.2,22.01,17.15,19 469 | 4.42228,0,18.1,"0",0.584,6.003,94.5,2.5403,24,666,20.2,331.29,21.32,19.1 470 | 15.5757,0,18.1,"0",0.58,5.926,71,2.9084,24,666,20.2,368.74,18.13,19.1 471 | 13.0751,0,18.1,"0",0.58,5.713,56.7,2.8237,24,666,20.2,396.9,14.76,20.1 472 | 4.34879,0,18.1,"0",0.58,6.167,84,3.0334,24,666,20.2,396.9,16.29,19.9 473 | 4.03841,0,18.1,"0",0.532,6.229,90.7,3.0993,24,666,20.2,395.33,12.87,19.6 474 | 3.56868,0,18.1,"0",0.58,6.437,75,2.8965,24,666,20.2,393.37,14.36,23.2 475 | 4.64689,0,18.1,"0",0.614,6.98,67.6,2.5329,24,666,20.2,374.68,11.66,29.8 476 | 8.05579,0,18.1,"0",0.584,5.427,95.4,2.4298,24,666,20.2,352.58,18.14,13.8 477 | 6.39312,0,18.1,"0",0.584,6.162,97.4,2.206,24,666,20.2,302.76,24.1,13.3 478 | 4.87141,0,18.1,"0",0.614,6.484,93.6,2.3053,24,666,20.2,396.21,18.68,16.7 479 | 15.0234,0,18.1,"0",0.614,5.304,97.3,2.1007,24,666,20.2,349.48,24.91,12 480 | 10.233,0,18.1,"0",0.614,6.185,96.7,2.1705,24,666,20.2,379.7,18.03,14.6 481 | 14.3337,0,18.1,"0",0.614,6.229,88,1.9512,24,666,20.2,383.32,13.11,21.4 482 | 5.82401,0,18.1,"0",0.532,6.242,64.7,3.4242,24,666,20.2,396.9,10.74,23 483 | 5.70818,0,18.1,"0",0.532,6.75,74.9,3.3317,24,666,20.2,393.07,7.74,23.7 484 | 5.73116,0,18.1,"0",0.532,7.061,77,3.4106,24,666,20.2,395.28,7.01,25 485 | 2.81838,0,18.1,"0",0.532,5.762,40.3,4.0983,24,666,20.2,392.92,10.42,21.8 486 | 2.37857,0,18.1,"0",0.583,5.871,41.9,3.724,24,666,20.2,370.73,13.34,20.6 487 | 3.67367,0,18.1,"0",0.583,6.312,51.9,3.9917,24,666,20.2,388.62,10.58,21.2 488 | 5.69175,0,18.1,"0",0.583,6.114,79.8,3.5459,24,666,20.2,392.68,14.98,19.1 489 | 4.83567,0,18.1,"0",0.583,5.905,53.2,3.1523,24,666,20.2,388.22,11.45,20.6 490 | 0.15086,0,27.74,"0",0.609,5.454,92.7,1.8209,4,711,20.1,395.09,18.06,15.2 491 | 0.18337,0,27.74,"0",0.609,5.414,98.3,1.7554,4,711,20.1,344.05,23.97,7 492 | 0.20746,0,27.74,"0",0.609,5.093,98,1.8226,4,711,20.1,318.43,29.68,8.1 493 | 0.10574,0,27.74,"0",0.609,5.983,98.8,1.8681,4,711,20.1,390.11,18.07,13.6 494 | 0.11132,0,27.74,"0",0.609,5.983,83.5,2.1099,4,711,20.1,396.9,13.35,20.1 495 | 0.17331,0,9.69,"0",0.585,5.707,54,2.3817,6,391,19.2,396.9,12.01,21.8 496 | 0.27957,0,9.69,"0",0.585,5.926,42.6,2.3817,6,391,19.2,396.9,13.59,24.5 497 | 0.17899,0,9.69,"0",0.585,5.67,28.8,2.7986,6,391,19.2,393.29,17.6,23.1 498 | 0.2896,0,9.69,"0",0.585,5.39,72.9,2.7986,6,391,19.2,396.9,21.14,19.7 499 | 0.26838,0,9.69,"0",0.585,5.794,70.6,2.8927,6,391,19.2,396.9,14.1,18.3 500 | 0.23912,0,9.69,"0",0.585,6.019,65.3,2.4091,6,391,19.2,396.9,12.92,21.2 501 | 0.17783,0,9.69,"0",0.585,5.569,73.5,2.3999,6,391,19.2,395.77,15.1,17.5 502 | 0.22438,0,9.69,"0",0.585,6.027,79.7,2.4982,6,391,19.2,396.9,14.33,16.8 503 | 0.06263,0,11.93,"0",0.573,6.593,69.1,2.4786,1,273,21,391.99,9.67,22.4 504 | 0.04527,0,11.93,"0",0.573,6.12,76.7,2.2875,1,273,21,396.9,9.08,20.6 505 | 0.06076,0,11.93,"0",0.573,6.976,91,2.1675,1,273,21,396.9,5.64,23.9 506 | 0.10959,0,11.93,"0",0.573,6.794,89.3,2.3889,1,273,21,393.45,6.48,22 507 | 0.04741,0,11.93,"0",0.573,6.03,80.8,2.505,1,273,21,396.9,7.88,11.9 -------------------------------------------------------------------------------- /test/data_classification.txt: -------------------------------------------------------------------------------- 1 | Variance of Wavelet Transformed Image,Skewnes of Wavelet Transformed Image,Curtosis of Wavelet Transformed Image,Entropy of Image,Y 2 | 3.6216,8.6661,-2.8073,-0.44699,0 3 | 4.5459,8.1674,-2.4586,-1.4621,0 4 | 3.866,-2.6383,1.9242,0.10645,0 5 | 3.4566,9.5228,-4.0112,-3.5944,0 6 | 0.32924,-4.4552,4.5718,-0.9888,0 7 | 4.3684,9.6718,-3.9606,-3.1625,0 8 | 3.5912,3.0129,0.72888,0.56421,0 9 | 2.0922,-6.81,8.4636,-0.60216,0 10 | 3.2032,5.7588,-0.75345,-0.61251,0 11 | 1.5356,9.1772,-2.2718,-0.73535,0 12 | 1.2247,8.7779,-2.2135,-0.80647,0 13 | 3.9899,-2.7066,2.3946,0.86291,0 14 | 1.8993,7.6625,0.15394,-3.1108,0 15 | -1.5768,10.843,2.5462,-2.9362,0 16 | 3.404,8.7261,-2.9915,-0.57242,0 17 | 4.6765,-3.3895,3.4896,1.4771,0 18 | 2.6719,3.0646,0.37158,0.58619,0 19 | 0.80355,2.8473,4.3439,0.6017,0 20 | 1.4479,-4.8794,8.3428,-2.1086,0 21 | 5.2423,11.0272,-4.353,-4.1013,0 22 | 5.7867,7.8902,-2.6196,-0.48708,0 23 | 0.3292,-4.4552,4.5718,-0.9888,0 24 | 3.9362,10.1622,-3.8235,-4.0172,0 25 | 0.93584,8.8855,-1.6831,-1.6599,0 26 | 4.4338,9.887,-4.6795,-3.7483,0 27 | 0.7057,-5.4981,8.3368,-2.8715,0 28 | 1.1432,-3.7413,5.5777,-0.63578,0 29 | -0.38214,8.3909,2.1624,-3.7405,0 30 | 6.5633,9.8187,-4.4113,-3.2258,0 31 | 4.8906,-3.3584,3.4202,1.0905,0 32 | -0.24811,-0.17797,4.9068,0.15429,0 33 | 1.4884,3.6274,3.308,0.48921,0 34 | 4.2969,7.617,-2.3874,-0.96164,0 35 | -0.96511,9.4111,1.7305,-4.8629,0 36 | -1.6162,0.80908,8.1628,0.60817,0 37 | 2.4391,6.4417,-0.80743,-0.69139,0 38 | 2.6881,6.0195,-0.46641,-0.69268,0 39 | 3.6289,0.81322,1.6277,0.77627,0 40 | 4.5679,3.1929,-2.1055,0.29653,0 41 | 3.4805,9.7008,-3.7541,-3.4379,0 42 | 4.1711,8.722,-3.0224,-0.59699,0 43 | -0.2062,9.2207,-3.7044,-6.8103,0 44 | -0.0068919,9.2931,-0.41243,-1.9638,0 45 | 0.96441,5.8395,2.3235,0.066365,0 46 | 2.8561,6.9176,-0.79372,0.48403,0 47 | -0.7869,9.5663,-3.7867,-7.5034,0 48 | 2.0843,6.6258,0.48382,-2.2134,0 49 | -0.7869,9.5663,-3.7867,-7.5034,0 50 | 3.9102,6.065,-2.4534,-0.68234,0 51 | 1.6349,3.286,2.8753,0.087054,0 52 | 4.3239,-4.8835,3.4356,-0.5776,0 53 | 5.262,3.9834,-1.5572,1.0103,0 54 | 3.1452,5.825,-0.51439,-1.4944,0 55 | 2.549,6.1499,-1.1605,-1.2371,0 56 | 4.9264,5.496,-2.4774,-0.50648,0 57 | 4.8265,0.80287,1.6371,1.1875,0 58 | 2.5635,6.7769,-0.61979,0.38576,0 59 | 5.807,5.0097,-2.2384,0.43878,0 60 | 3.1377,-4.1096,4.5701,0.98963,0 61 | -0.78289,11.3603,-0.37644,-7.0495,0 62 | 2.888,0.44696,4.5907,-0.24398,0 63 | 0.49665,5.527,1.7785,-0.47156,0 64 | 4.2586,11.2962,-4.0943,-4.3457,0 65 | 1.7939,-1.1174,1.5454,-0.26079,0 66 | 5.4021,3.1039,-1.1536,1.5651,0 67 | 2.5367,2.599,2.0938,0.20085,0 68 | 4.6054,-4.0765,2.7587,0.31981,0 69 | 2.4235,9.5332,-3.0789,-2.7746,0 70 | 1.0009,7.7846,-0.28219,-2.6608,0 71 | 0.12326,8.9848,-0.9351,-2.4332,0 72 | 3.9529,-2.3548,2.3792,0.48274,0 73 | 4.1373,0.49248,1.093,1.8276,0 74 | 4.7181,10.0153,-3.9486,-3.8582,0 75 | 4.1654,-3.4495,3.643,1.0879,0 76 | 4.4069,10.9072,-4.5775,-4.4271,0 77 | 2.3066,3.5364,0.57551,0.41938,0 78 | 3.7935,7.9853,-2.5477,-1.872,0 79 | 0.049175,6.1437,1.7828,-0.72113,0 80 | 0.24835,7.6439,0.9885,-0.87371,0 81 | 1.1317,3.9647,3.3979,0.84351,0 82 | 2.8033,9.0862,-3.3668,-1.0224,0 83 | 4.4682,2.2907,0.95766,0.83058,0 84 | 5.0185,8.5978,-2.9375,-1.281,0 85 | 1.8664,7.7763,-0.23849,-2.9634,0 86 | 3.245,6.63,-0.63435,0.86937,0 87 | 4.0296,2.6756,0.80685,0.71679,0 88 | -1.1313,1.9037,7.5339,1.022,0 89 | 0.87603,6.8141,0.84198,-0.17156,0 90 | 4.1197,-2.7956,2.0707,0.67412,0 91 | 3.8027,0.81529,2.1041,1.0245,0 92 | 1.4806,7.6377,-2.7876,-1.0341,0 93 | 4.0632,3.584,0.72545,0.39481,0 94 | 4.3064,8.2068,-2.7824,-1.4336,0 95 | 2.4486,-6.3175,7.9632,0.20602,0 96 | 3.2718,1.7837,2.1161,0.61334,0 97 | -0.64472,-4.6062,8.347,-2.7099,0 98 | 2.9543,1.076,0.64577,0.89394,0 99 | 2.1616,-6.8804,8.1517,-0.081048,0 100 | 3.82,10.9279,-4.0112,-5.0284,0 101 | -2.7419,11.4038,2.5394,-5.5793,0 102 | 3.3669,-5.1856,3.6935,-1.1427,0 103 | 4.5597,-2.4211,2.6413,1.6168,0 104 | 5.1129,-0.49871,0.62863,1.1189,0 105 | 3.3397,-4.6145,3.9823,-0.23751,0 106 | 4.2027,0.22761,0.96108,0.97282,0 107 | 3.5438,1.2395,1.997,2.1547,0 108 | 2.3136,10.6651,-3.5288,-4.7672,0 109 | -1.8584,7.886,-1.6643,-1.8384,0 110 | 3.106,9.5414,-4.2536,-4.003,0 111 | 2.9163,10.8306,-3.3437,-4.122,0 112 | 3.9922,-4.4676,3.7304,-0.1095,0 113 | 1.518,5.6946,0.094818,-0.026738,0 114 | 3.2351,9.647,-3.2074,-2.5948,0 115 | 4.2188,6.8162,-1.2804,0.76076,0 116 | 1.7819,6.9176,-1.2744,-1.5759,0 117 | 2.5331,2.9135,-0.822,-0.12243,0 118 | 3.8969,7.4163,-1.8245,0.14007,0 119 | 2.108,6.7955,-0.1708,0.4905,0 120 | 2.8969,0.70768,2.29,1.8663,0 121 | 0.9297,-3.7971,4.6429,-0.2957,0 122 | 3.4642,10.6878,-3.4071,-4.109,0 123 | 4.0713,10.4023,-4.1722,-4.7582,0 124 | -1.4572,9.1214,1.7425,-5.1241,0 125 | -1.5075,1.9224,7.1466,0.89136,0 126 | -0.91718,9.9884,1.1804,-5.2263,0 127 | 2.994,7.2011,-1.2153,0.3211,0 128 | -2.343,12.9516,3.3285,-5.9426,0 129 | 3.7818,-2.8846,2.2558,-0.15734,0 130 | 4.6689,1.3098,0.055404,1.909,0 131 | 3.4663,1.1112,1.7425,1.3388,0 132 | 3.2697,-4.3414,3.6884,-0.29829,0 133 | 5.1302,8.6703,-2.8913,-1.5086,0 134 | 2.0139,6.1416,0.37929,0.56938,0 135 | 0.4339,5.5395,2.033,-0.40432,0 136 | -1.0401,9.3987,0.85998,-5.3336,0 137 | 4.1605,11.2196,-3.6136,-4.0819,0 138 | 5.438,9.4669,-4.9417,-3.9202,0 139 | 5.032,8.2026,-2.6256,-1.0341,0 140 | 5.2418,10.5388,-4.1174,-4.2797,0 141 | -0.2062,9.2207,-3.7044,-6.8103,0 142 | 2.0911,0.94358,4.5512,1.234,0 143 | 1.7317,-0.34765,4.1905,-0.99138,0 144 | 4.1736,3.3336,-1.4244,0.60429,0 145 | 3.9232,-3.2467,3.4579,0.83705,0 146 | 3.8481,10.1539,-3.8561,-4.2228,0 147 | 0.5195,-3.2633,3.0895,-0.9849,0 148 | 3.8584,0.78425,1.1033,1.7008,0 149 | 1.7496,-0.1759,5.1827,1.2922,0 150 | 3.6277,0.9829,0.68861,0.63403,0 151 | 2.7391,7.4018,0.071684,-2.5302,0 152 | 4.5447,8.2274,-2.4166,-1.5875,0 153 | -1.7599,11.9211,2.6756,-3.3241,0 154 | 5.0691,0.21313,0.20278,1.2095,0 155 | 3.4591,11.112,-4.2039,-5.0931,0 156 | 1.9358,8.1654,-0.023425,-2.2586,0 157 | 2.486,-0.99533,5.3404,-0.15475,0 158 | 2.4226,-4.5752,5.947,0.21507,0 159 | 3.9479,-3.7723,2.883,0.019813,0 160 | 2.2634,-4.4862,3.6558,-0.61251,0 161 | 1.3566,4.2358,2.1341,0.3211,0 162 | 5.0452,3.8964,-1.4304,0.86291,0 163 | 3.5499,8.6165,-3.2794,-1.2009,0 164 | 0.17346,7.8695,0.26876,-3.7883,0 165 | 2.4008,9.3593,-3.3565,-3.3526,0 166 | 4.8851,1.5995,-0.00029081,1.6401,0 167 | 4.1927,-3.2674,2.5839,0.21766,0 168 | 1.1166,8.6496,-0.96252,-1.8112,0 169 | 1.0235,6.901,-2.0062,-2.7125,0 170 | -1.803,11.8818,2.0458,-5.2728,0 171 | 0.11739,6.2761,-1.5495,-2.4746,0 172 | 0.5706,-0.0248,1.2421,-0.5621,0 173 | 4.0552,-2.4583,2.2806,1.0323,0 174 | -1.6952,1.0657,8.8294,0.94955,0 175 | -1.1193,10.7271,2.0938,-5.6504,0 176 | 1.8799,2.4707,2.4931,0.37671,0 177 | 3.583,-3.7971,3.4391,-0.12501,0 178 | 0.19081,9.1297,-3.725,-5.8224,0 179 | 3.6582,5.6864,-1.7157,-0.23751,0 180 | -0.13144,-1.7775,8.3316,0.35214,0 181 | 2.3925,9.798,-3.0361,-2.8224,0 182 | 1.6426,3.0149,0.22849,-0.147,0 183 | -0.11783,-1.5789,8.03,-0.028031,0 184 | -0.69572,8.6165,1.8419,-4.3289,0 185 | 2.9421,7.4101,-0.97709,-0.88406,0 186 | -1.7559,11.9459,3.0946,-4.8978,0 187 | -1.2537,10.8803,1.931,-4.3237,0 188 | 3.2585,-4.4614,3.8024,-0.15087,0 189 | 1.8314,6.3672,-0.036278,0.049554,0 190 | 4.5645,-3.6275,2.8684,0.27714,0 191 | 2.7365,-5.0325,6.6608,-0.57889,0 192 | 0.9297,-3.7971,4.6429,-0.2957,0 193 | 3.9663,10.1684,-4.1131,-4.6056,0 194 | 1.4578,-0.08485,4.1785,0.59136,0 195 | 4.8272,3.0687,0.68604,0.80731,0 196 | -2.341,12.3784,0.70403,-7.5836,0 197 | -1.8584,7.886,-1.6643,-1.8384,0 198 | 4.1454,7.257,-1.9153,-0.86078,0 199 | 1.9157,6.0816,0.23705,-2.0116,0 200 | 4.0215,-2.1914,2.4648,1.1409,0 201 | 5.8862,5.8747,-2.8167,-0.30087,0 202 | -2.0897,10.8265,2.3603,-3.4198,0 203 | 4.0026,-3.5943,3.5573,0.26809,0 204 | -0.78689,9.5663,-3.7867,-7.5034,0 205 | 4.1757,10.2615,-3.8552,-4.3056,0 206 | 0.83292,7.5404,0.65005,-0.92544,0 207 | 4.8077,2.2327,-0.26334,1.5534,0 208 | 5.3063,5.2684,-2.8904,-0.52716,0 209 | 2.5605,9.2683,-3.5913,-1.356,0 210 | 2.1059,7.6046,-0.47755,-1.8461,0 211 | 2.1721,-0.73874,5.4672,-0.72371,0 212 | 4.2899,9.1814,-4.6067,-4.3263,0 213 | 3.5156,10.1891,-4.2759,-4.978,0 214 | 2.614,8.0081,-3.7258,-1.3069,0 215 | 0.68087,2.3259,4.9085,0.54998,0 216 | 4.1962,0.74493,0.83256,0.753,0 217 | 6.0919,2.9673,-1.3267,1.4551,0 218 | 1.3234,3.2964,0.2362,-0.11984,0 219 | 1.3264,1.0326,5.6566,-0.41337,0 220 | -0.16735,7.6274,1.2061,-3.6241,0 221 | -1.3,10.2678,-2.953,-5.8638,0 222 | -2.2261,12.5398,2.9438,-3.5258,0 223 | 2.4196,6.4665,-0.75688,0.228,0 224 | 1.0987,0.6394,5.989,-0.58277,0 225 | 4.6464,10.5326,-4.5852,-4.206,0 226 | -0.36038,4.1158,3.1143,-0.37199,0 227 | 1.3562,3.2136,4.3465,0.78662,0 228 | 0.5706,-0.0248,1.2421,-0.5621,0 229 | -2.6479,10.1374,-1.331,-5.4707,0 230 | 3.1219,-3.137,1.9259,-0.37458,0 231 | 5.4944,1.5478,0.041694,1.9284,0 232 | -1.3389,1.552,7.0806,1.031,0 233 | -2.3361,11.9604,3.0835,-5.4435,0 234 | 2.2596,-0.033118,4.7355,-0.2776,0 235 | 0.46901,-0.63321,7.3848,0.36507,0 236 | 2.7296,2.8701,0.51124,0.5099,0 237 | 2.0466,2.03,2.1761,-0.083634,0 238 | -1.3274,9.498,2.4408,-5.2689,0 239 | 3.8905,-2.1521,2.6302,1.1047,0 240 | 3.9994,0.90427,1.1693,1.6892,0 241 | 2.3952,9.5083,-3.1783,-3.0086,0 242 | 3.2704,6.9321,-1.0456,0.23447,0 243 | -1.3931,1.5664,7.5382,0.78403,0 244 | 1.6406,3.5488,1.3964,-0.36424,0 245 | 2.7744,6.8576,-1.0671,0.075416,0 246 | 2.4287,9.3821,-3.2477,-1.4543,0 247 | 4.2134,-2.806,2.0116,0.67412,0 248 | 1.6472,0.48213,4.7449,1.225,0 249 | 2.0597,-0.99326,5.2119,-0.29312,0 250 | 0.3798,0.7098,0.7572,-0.4444,0 251 | 1.0135,8.4551,-1.672,-2.0815,0 252 | 4.5691,-4.4552,3.1769,0.0042961,0 253 | 0.57461,10.1105,-1.6917,-4.3922,0 254 | 0.5734,9.1938,-0.9094,-1.872,0 255 | 5.2868,3.257,-1.3721,1.1668,0 256 | 4.0102,10.6568,-4.1388,-5.0646,0 257 | 4.1425,-3.6792,3.8281,1.6297,0 258 | 3.0934,-2.9177,2.2232,0.22283,0 259 | 2.2034,5.9947,0.53009,0.84998,0 260 | 3.744,0.79459,0.95851,1.0077,0 261 | 3.0329,2.2948,2.1135,0.35084,0 262 | 3.7731,7.2073,-1.6814,-0.94742,0 263 | 3.1557,2.8908,0.59693,0.79825,0 264 | 1.8114,7.6067,-0.9788,-2.4668,0 265 | 4.988,7.2052,-3.2846,-1.1608,0 266 | 2.483,6.6155,-0.79287,-0.90863,0 267 | 1.594,4.7055,1.3758,0.081882,0 268 | -0.016103,9.7484,0.15394,-1.6134,0 269 | 3.8496,9.7939,-4.1508,-4.4582,0 270 | 0.9297,-3.7971,4.6429,-0.2957,0 271 | 4.9342,2.4107,-0.17594,1.6245,0 272 | 3.8417,10.0215,-4.2699,-4.9159,0 273 | 5.3915,9.9946,-3.8081,-3.3642,0 274 | 4.4072,-0.070365,2.0416,1.1319,0 275 | 2.6946,6.7976,-0.40301,0.44912,0 276 | 5.2756,0.13863,0.12138,1.1435,0 277 | 3.4312,6.2637,-1.9513,-0.36165,0 278 | 4.052,-0.16555,0.45383,0.51248,0 279 | 1.3638,-4.7759,8.4182,-1.8836,0 280 | 0.89566,7.7763,-2.7473,-1.9353,0 281 | 1.9265,7.7557,-0.16823,-3.0771,0 282 | 0.20977,-0.46146,7.7267,0.90946,0 283 | 4.068,-2.9363,2.1992,0.50084,0 284 | 2.877,-4.0599,3.6259,-0.32544,0 285 | 0.3223,-0.89808,8.0883,0.69222,0 286 | -1.3,10.2678,-2.953,-5.8638,0 287 | 1.7747,-6.4334,8.15,-0.89828,0 288 | 1.3419,-4.4221,8.09,-1.7349,0 289 | 0.89606,10.5471,-1.4175,-4.0327,0 290 | 0.44125,2.9487,4.3225,0.7155,0 291 | 3.2422,6.2265,0.12224,-1.4466,0 292 | 2.5678,3.5136,0.61406,-0.40691,0 293 | -2.2153,11.9625,0.078538,-7.7853,0 294 | 4.1349,6.1189,-2.4294,-0.19613,0 295 | 1.934,-9.2828e-06,4.816,-0.33967,0 296 | 2.5068,1.1588,3.9249,0.12585,0 297 | 2.1464,6.0795,-0.5778,-2.2302,0 298 | 0.051979,7.0521,-2.0541,-3.1508,0 299 | 1.2706,8.035,-0.19651,-2.1888,0 300 | 1.143,0.83391,5.4552,-0.56984,0 301 | 2.2928,9.0386,-3.2417,-1.2991,0 302 | 0.3292,-4.4552,4.5718,-0.9888,0 303 | 2.9719,6.8369,-0.2702,0.71291,0 304 | 1.6849,8.7489,-1.2641,-1.3858,0 305 | -1.9177,11.6894,2.5454,-3.2763,0 306 | 2.3729,10.4726,-3.0087,-3.2013,0 307 | 1.0284,9.767,-1.3687,-1.7853,0 308 | 0.27451,9.2186,-3.2863,-4.8448,0 309 | 1.6032,-4.7863,8.5193,-2.1203,0 310 | 4.616,10.1788,-4.2185,-4.4245,0 311 | 4.2478,7.6956,-2.7696,-1.0767,0 312 | 4.0215,-2.7004,2.4957,0.36636,0 313 | 5.0297,-4.9704,3.5025,-0.23751,0 314 | 1.5902,2.2948,3.2403,0.18404,0 315 | 2.1274,5.1939,-1.7971,-1.1763,0 316 | 1.1811,8.3847,-2.0567,-0.90345,0 317 | 0.3292,-4.4552,4.5718,-0.9888,0 318 | 5.7353,5.2808,-2.2598,0.075416,0 319 | 2.6718,5.6574,0.72974,-1.4892,0 320 | 1.5799,-4.7076,7.9186,-1.5487,0 321 | 2.9499,2.2493,1.3458,-0.037083,0 322 | 0.5195,-3.2633,3.0895,-0.9849,0 323 | 3.7352,9.5911,-3.9032,-3.3487,0 324 | -1.7344,2.0175,7.7618,0.93532,0 325 | 3.884,10.0277,-3.9298,-4.0819,0 326 | 3.5257,1.2829,1.9276,1.7991,0 327 | 4.4549,2.4976,1.0313,0.96894,0 328 | -0.16108,-6.4624,8.3573,-1.5216,0 329 | 4.2164,9.4607,-4.9288,-5.2366,0 330 | 3.5152,6.8224,-0.67377,-0.46898,0 331 | 1.6988,2.9094,2.9044,0.11033,0 332 | 1.0607,2.4542,2.5188,-0.17027,0 333 | 2.0421,1.2436,4.2171,0.90429,0 334 | 3.5594,1.3078,1.291,1.6556,0 335 | 3.0009,5.8126,-2.2306,-0.66553,0 336 | 3.9294,1.4112,1.8076,0.89782,0 337 | 3.4667,-4.0724,4.2882,1.5418,0 338 | 3.966,3.9213,0.70574,0.33662,0 339 | 1.0191,2.33,4.9334,0.82929,0 340 | 0.96414,5.616,2.2138,-0.12501,0 341 | 1.8205,6.7562,0.0099913,0.39481,0 342 | 4.9923,7.8653,-2.3515,-0.71984,0 343 | -1.1804,11.5093,0.15565,-6.8194,0 344 | 4.0329,0.23175,0.89082,1.1823,0 345 | 0.66018,10.3878,-1.4029,-3.9151,0 346 | 3.5982,7.1307,-1.3035,0.21248,0 347 | -1.8584,7.886,-1.6643,-1.8384,0 348 | 4.0972,0.46972,1.6671,0.91593,0 349 | 3.3299,0.91254,1.5806,0.39352,0 350 | 3.1088,3.1122,0.80857,0.4336,0 351 | -4.2859,8.5234,3.1392,-0.91639,0 352 | -1.2528,10.2036,2.1787,-5.6038,0 353 | 0.5195,-3.2633,3.0895,-0.9849,0 354 | 0.3292,-4.4552,4.5718,-0.9888,0 355 | 0.88872,5.3449,2.045,-0.19355,0 356 | 3.5458,9.3718,-4.0351,-3.9564,0 357 | -0.21661,8.0329,1.8848,-3.8853,0 358 | 2.7206,9.0821,-3.3111,-0.96811,0 359 | 3.2051,8.6889,-2.9033,-0.7819,0 360 | 2.6917,10.8161,-3.3,-4.2888,0 361 | -2.3242,11.5176,1.8231,-5.375,0 362 | 2.7161,-4.2006,4.1914,0.16981,0 363 | 3.3848,3.2674,0.90967,0.25128,0 364 | 1.7452,4.8028,2.0878,0.62627,0 365 | 2.805,0.57732,1.3424,1.2133,0 366 | 5.7823,5.5788,-2.4089,-0.056479,0 367 | 3.8999,1.734,1.6011,0.96765,0 368 | 3.5189,6.332,-1.7791,-0.020273,0 369 | 3.2294,7.7391,-0.37816,-2.5405,0 370 | 3.4985,3.1639,0.22677,-0.1651,0 371 | 2.1948,1.3781,1.1582,0.85774,0 372 | 2.2526,9.9636,-3.1749,-2.9944,0 373 | 4.1529,-3.9358,2.8633,-0.017686,0 374 | 0.74307,11.17,-1.3824,-4.0728,0 375 | 1.9105,8.871,-2.3386,-0.75604,0 376 | -1.5055,0.070346,6.8681,-0.50648,0 377 | 0.58836,10.7727,-1.3884,-4.3276,0 378 | 3.2303,7.8384,-3.5348,-1.2151,0 379 | -1.9922,11.6542,2.6542,-5.2107,0 380 | 2.8523,9.0096,-3.761,-3.3371,0 381 | 4.2772,2.4955,0.48554,0.36119,0 382 | 1.5099,0.039307,6.2332,-0.30346,0 383 | 5.4188,10.1457,-4.084,-3.6991,0 384 | 0.86202,2.6963,4.2908,0.54739,0 385 | 3.8117,10.1457,-4.0463,-4.5629,0 386 | 0.54777,10.3754,-1.5435,-4.1633,0 387 | 2.3718,7.4908,0.015989,-1.7414,0 388 | -2.4953,11.1472,1.9353,-3.4638,0 389 | 4.6361,-2.6611,2.8358,1.1991,0 390 | -2.2527,11.5321,2.5899,-3.2737,0 391 | 3.7982,10.423,-4.1602,-4.9728,0 392 | -0.36279,8.2895,-1.9213,-3.3332,0 393 | 2.1265,6.8783,0.44784,-2.2224,0 394 | 0.86736,5.5643,1.6765,-0.16769,0 395 | 3.7831,10.0526,-3.8869,-3.7366,0 396 | -2.2623,12.1177,0.28846,-7.7581,0 397 | 1.2616,4.4303,-1.3335,-1.7517,0 398 | 2.6799,3.1349,0.34073,0.58489,0 399 | -0.39816,5.9781,1.3912,-1.1621,0 400 | 4.3937,0.35798,2.0416,1.2004,0 401 | 2.9695,5.6222,0.27561,-1.1556,0 402 | 1.3049,-0.15521,6.4911,-0.75346,0 403 | 2.2123,-5.8395,7.7687,-0.85302,0 404 | 1.9647,6.9383,0.57722,0.66377,0 405 | 3.0864,-2.5845,2.2309,0.30947,0 406 | 0.3798,0.7098,0.7572,-0.4444,0 407 | 0.58982,7.4266,1.2353,-2.9595,0 408 | 0.14783,7.946,1.0742,-3.3409,0 409 | -0.062025,6.1975,1.099,-1.131,0 410 | 4.223,1.1319,0.72202,0.96118,0 411 | 0.64295,7.1018,0.3493,-0.41337,0 412 | 1.941,0.46351,4.6472,1.0879,0 413 | 4.0047,0.45937,1.3621,1.6181,0 414 | 3.7767,9.7794,-3.9075,-3.5323,0 415 | 3.4769,-0.15314,2.53,2.4495,0 416 | 1.9818,9.2621,-3.521,-1.872,0 417 | 3.8023,-3.8696,4.044,0.95343,0 418 | 4.3483,11.1079,-4.0857,-4.2539,0 419 | 1.1518,1.3864,5.2727,-0.43536,0 420 | -1.2576,1.5892,7.0078,0.42455,0 421 | 1.9572,-5.1153,8.6127,-1.4297,0 422 | -2.484,12.1611,2.8204,-3.7418,0 423 | -1.1497,1.2954,7.701,0.62627,0 424 | 4.8368,10.0132,-4.3239,-4.3276,0 425 | -0.12196,8.8068,0.94566,-4.2267,0 426 | 1.9429,6.3961,0.092248,0.58102,0 427 | 1.742,-4.809,8.2142,-2.0659,0 428 | -1.5222,10.8409,2.7827,-4.0974,0 429 | -1.3,10.2678,-2.953,-5.8638,0 430 | 3.4246,-0.14693,0.80342,0.29136,0 431 | 2.5503,-4.9518,6.3729,-0.41596,0 432 | 1.5691,6.3465,-0.1828,-2.4099,0 433 | 1.3087,4.9228,2.0013,0.22024,0 434 | 5.1776,8.2316,-3.2511,-1.5694,0 435 | 2.229,9.6325,-3.1123,-2.7164,0 436 | 5.6272,10.0857,-4.2931,-3.8142,0 437 | 1.2138,8.7986,-2.1672,-0.74182,0 438 | 0.3798,0.7098,0.7572,-0.4444,0 439 | 0.5415,6.0319,1.6825,-0.46122,0 440 | 4.0524,5.6802,-1.9693,0.026279,0 441 | 4.7285,2.1065,-0.28305,1.5625,0 442 | 3.4359,0.66216,2.1041,1.8922,0 443 | 0.86816,10.2429,-1.4912,-4.0082,0 444 | 3.359,9.8022,-3.8209,-3.7133,0 445 | 3.6702,2.9942,0.85141,0.30688,0 446 | 1.3349,6.1189,0.46497,0.49826,0 447 | 3.1887,-3.4143,2.7742,-0.2026,0 448 | 2.4527,2.9653,0.20021,-0.056479,0 449 | 3.9121,2.9735,0.92852,0.60558,0 450 | 3.9364,10.5885,-3.725,-4.3133,0 451 | 3.9414,-3.2902,3.1674,1.0866,0 452 | 3.6922,-3.9585,4.3439,1.3517,0 453 | 5.681,7.795,-2.6848,-0.92544,0 454 | 0.77124,9.0862,-1.2281,-1.4996,0 455 | 3.5761,9.7753,-3.9795,-3.4638,0 456 | 1.602,6.1251,0.52924,0.47886,0 457 | 2.6682,10.216,-3.4414,-4.0069,0 458 | 2.0007,1.8644,2.6491,0.47369,0 459 | 0.64215,3.1287,4.2933,0.64696,0 460 | 4.3848,-3.0729,3.0423,1.2741,0 461 | 0.77445,9.0552,-2.4089,-1.3884,0 462 | 0.96574,8.393,-1.361,-1.4659,0 463 | 3.0948,8.7324,-2.9007,-0.96682,0 464 | 4.9362,7.6046,-2.3429,-0.85302,0 465 | -1.9458,11.2217,1.9079,-3.4405,0 466 | 5.7403,-0.44284,0.38015,1.3763,0 467 | -2.6989,12.1984,0.67661,-8.5482,0 468 | 1.1472,3.5985,1.9387,-0.43406,0 469 | 2.9742,8.96,-2.9024,-1.0379,0 470 | 4.5707,7.2094,-3.2794,-1.4944,0 471 | 0.1848,6.5079,2.0133,-0.87242,0 472 | 0.87256,9.2931,-0.7843,-2.1978,0 473 | 0.39559,6.8866,1.0588,-0.67587,0 474 | 3.8384,6.1851,-2.0439,-0.033204,0 475 | 2.8209,7.3108,-0.81857,-1.8784,0 476 | 2.5817,9.7546,-3.1749,-2.9957,0 477 | 3.8213,0.23175,2.0133,2.0564,0 478 | 0.3798,0.7098,0.7572,-0.4444,0 479 | 3.4893,6.69,-1.2042,-0.38751,0 480 | -1.7781,0.8546,7.1303,0.027572,0 481 | 2.0962,2.4769,1.9379,-0.040962,0 482 | 0.94732,-0.57113,7.1903,-0.67587,0 483 | 2.8261,9.4007,-3.3034,-1.0509,0 484 | 0.0071249,8.3661,0.50781,-3.8155,0 485 | 0.96788,7.1907,1.2798,-2.4565,0 486 | 4.7432,2.1086,0.1368,1.6543,0 487 | 3.6575,7.2797,-2.2692,-1.144,0 488 | 3.8832,6.4023,-2.432,-0.98363,0 489 | 3.4776,8.811,-3.1886,-0.92285,0 490 | 1.1315,7.9212,1.093,-2.8444,0 491 | 2.8237,2.8597,0.19678,0.57196,0 492 | 1.9321,6.0423,0.26019,-2.053,0 493 | 3.0632,-3.3315,5.1305,0.8267,0 494 | -1.8411,10.8306,2.769,-3.0901,0 495 | 2.8084,11.3045,-3.3394,-4.4194,0 496 | 2.5698,-4.4076,5.9856,0.078002,0 497 | -0.12624,10.3216,-3.7121,-6.1185,0 498 | 3.3756,-4.0951,4.367,1.0698,0 499 | -0.048008,-1.6037,8.4756,0.75558,0 500 | 0.5706,-0.0248,1.2421,-0.5621,0 501 | 0.88444,6.5906,0.55837,-0.44182,0 502 | 3.8644,3.7061,0.70403,0.35214,0 503 | 1.2999,2.5762,2.0107,-0.18967,0 504 | 2.0051,-6.8638,8.132,-0.2401,0 505 | 4.9294,0.27727,0.20792,0.33662,0 506 | 2.8297,6.3485,-0.73546,-0.58665,0 507 | 2.565,8.633,-2.9941,-1.3082,0 508 | 2.093,8.3061,0.022844,-3.2724,0 509 | 4.6014,5.6264,-2.1235,0.19309,0 510 | 5.0617,-0.35799,0.44698,0.99868,0 511 | -0.2951,9.0489,-0.52725,-2.0789,0 512 | 3.577,2.4004,1.8908,0.73231,0 513 | 3.9433,2.5017,1.5215,0.903,0 514 | 2.6648,10.754,-3.3994,-4.1685,0 515 | 5.9374,6.1664,-2.5905,-0.36553,0 516 | 2.0153,1.8479,3.1375,0.42843,0 517 | 5.8782,5.9409,-2.8544,-0.60863,0 518 | -2.3983,12.606,2.9464,-5.7888,0 519 | 1.762,4.3682,2.1384,0.75429,0 520 | 4.2406,-2.4852,1.608,0.7155,0 521 | 3.4669,6.87,-1.0568,-0.73147,0 522 | 3.1896,5.7526,-0.18537,-0.30087,0 523 | 0.81356,9.1566,-2.1492,-4.1814,0 524 | 0.52855,0.96427,4.0243,-1.0483,0 525 | 2.1319,-2.0403,2.5574,-0.061652,0 526 | 0.33111,4.5731,2.057,-0.18967,0 527 | 1.2746,8.8172,-1.5323,-1.7957,0 528 | 2.2091,7.4556,-1.3284,-3.3021,0 529 | 2.5328,7.528,-0.41929,-2.6478,0 530 | 3.6244,1.4609,1.3501,1.9284,0 531 | -1.3885,12.5026,0.69118,-7.5487,0 532 | 5.7227,5.8312,-2.4097,-0.24527,0 533 | 3.3583,10.3567,-3.7301,-3.6991,0 534 | 2.5227,2.2369,2.7236,0.79438,0 535 | 0.045304,6.7334,1.0708,-0.9332,0 536 | 4.8278,7.7598,-2.4491,-1.2216,0 537 | 1.9476,-4.7738,8.527,-1.8668,0 538 | 2.7659,0.66216,4.1494,-0.28406,0 539 | -0.10648,-0.76771,7.7575,0.64179,0 540 | 0.72252,-0.053811,5.6703,-1.3509,0 541 | 4.2475,1.4816,-0.48355,0.95343,0 542 | 3.9772,0.33521,2.2566,2.1625,0 543 | 3.6667,4.302,0.55923,0.33791,0 544 | 2.8232,10.8513,-3.1466,-3.9784,0 545 | -1.4217,11.6542,-0.057699,-7.1025,0 546 | 4.2458,1.1981,0.66633,0.94696,0 547 | 4.1038,-4.8069,3.3491,-0.49225,0 548 | 1.4507,8.7903,-2.2324,-0.65259,0 549 | 3.4647,-3.9172,3.9746,0.36119,0 550 | 1.8533,6.1458,1.0176,-2.0401,0 551 | 3.5288,0.71596,1.9507,1.9375,0 552 | 3.9719,1.0367,0.75973,1.0013,0 553 | 3.534,9.3614,-3.6316,-1.2461,0 554 | 3.6894,9.887,-4.0788,-4.3664,0 555 | 3.0672,-4.4117,3.8238,-0.81682,0 556 | 2.6463,-4.8152,6.3549,0.003003,0 557 | 2.2893,3.733,0.6312,-0.39786,0 558 | 1.5673,7.9274,-0.056842,-2.1694,0 559 | 4.0405,0.51524,1.0279,1.106,0 560 | 4.3846,-4.8794,3.3662,-0.029324,0 561 | 2.0165,-0.25246,5.1707,1.0763,0 562 | 4.0446,11.1741,-4.3582,-4.7401,0 563 | -0.33729,-0.64976,7.6659,0.72326,0 564 | -2.4604,12.7302,0.91738,-7.6418,0 565 | 4.1195,10.9258,-3.8929,-4.1802,0 566 | 2.0193,0.82356,4.6369,1.4202,0 567 | 1.5701,7.9129,0.29018,-2.1953,0 568 | 2.6415,7.586,-0.28562,-1.6677,0 569 | 5.0214,8.0764,-3.0515,-1.7155,0 570 | 4.3435,3.3295,0.83598,0.64955,0 571 | 1.8238,-6.7748,8.3873,-0.54139,0 572 | 3.9382,0.9291,0.78543,0.6767,0 573 | 2.2517,-5.1422,4.2916,-1.2487,0 574 | 5.504,10.3671,-4.413,-4.0211,0 575 | 2.8521,9.171,-3.6461,-1.2047,0 576 | 1.1676,9.1566,-2.0867,-0.80647,0 577 | 2.6104,8.0081,-0.23592,-1.7608,0 578 | 0.32444,10.067,-1.1982,-4.1284,0 579 | 3.8962,-4.7904,3.3954,-0.53751,0 580 | 2.1752,-0.8091,5.1022,-0.67975,0 581 | 1.1588,8.9331,-2.0807,-1.1272,0 582 | 4.7072,8.2957,-2.5605,-1.4905,0 583 | -1.9667,11.8052,-0.40472,-7.8719,0 584 | 4.0552,0.40143,1.4563,0.65343,0 585 | 2.3678,-6.839,8.4207,-0.44829,0 586 | 0.33565,6.8369,0.69718,-0.55691,0 587 | 4.3398,-5.3036,3.8803,-0.70432,0 588 | 1.5456,8.5482,0.4187,-2.1784,0 589 | 1.4276,8.3847,-2.0995,-1.9677,0 590 | -0.27802,8.1881,-3.1338,-2.5276,0 591 | 0.93611,8.6413,-1.6351,-1.3043,0 592 | 4.6352,-3.0087,2.6773,1.212,0 593 | 1.5268,-5.5871,8.6564,-1.722,0 594 | 0.95626,2.4728,4.4578,0.21636,0 595 | -2.7914,1.7734,6.7756,-0.39915,0 596 | 5.2032,3.5116,-1.2538,1.0129,0 597 | 3.1836,7.2321,-1.0713,-2.5909,0 598 | 0.65497,5.1815,1.0673,-0.42113,0 599 | 5.6084,10.3009,-4.8003,-4.3534,0 600 | 1.105,7.4432,0.41099,-3.0332,0 601 | 3.9292,-2.9156,2.2129,0.30817,0 602 | 1.1558,6.4003,1.5506,0.6961,0 603 | 2.5581,2.6218,1.8513,0.40257,0 604 | 2.7831,10.9796,-3.557,-4.4039,0 605 | 3.7635,2.7811,0.66119,0.34179,0 606 | -2.6479,10.1374,-1.331,-5.4707,0 607 | 1.0652,8.3682,-1.4004,-1.6509,0 608 | -1.4275,11.8797,0.41613,-6.9978,0 609 | 5.7456,10.1808,-4.7857,-4.3366,0 610 | 5.086,3.2798,-1.2701,1.1189,0 611 | 3.4092,5.4049,-2.5228,-0.89958,0 612 | -0.2361,9.3221,2.1307,-4.3793,0 613 | 3.8197,8.9951,-4.383,-4.0327,0 614 | -1.1391,1.8127,6.9144,0.70127,0 615 | 4.9249,0.68906,0.77344,1.2095,0 616 | 2.5089,6.841,-0.029423,0.44912,0 617 | -0.2062,9.2207,-3.7044,-6.8103,0 618 | 3.946,6.8514,-1.5443,-0.5582,0 619 | -0.278,8.1881,-3.1338,-2.5276,0 620 | 1.8592,3.2074,-0.15966,-0.26208,0 621 | 0.56953,7.6294,1.5754,-3.2233,0 622 | 3.4626,-4.449,3.5427,0.15429,0 623 | 3.3951,1.1484,2.1401,2.0862,0 624 | 5.0429,-0.52974,0.50439,1.106,0 625 | 3.7758,7.1783,-1.5195,0.40128,0 626 | 4.6562,7.6398,-2.4243,-1.2384,0 627 | 4.0948,-2.9674,2.3689,0.75429,0 628 | 1.8384,6.063,0.54723,0.51248,0 629 | 2.0153,0.43661,4.5864,-0.3151,0 630 | 3.5251,0.7201,1.6928,0.64438,0 631 | 3.757,-5.4236,3.8255,-1.2526,0 632 | 2.5989,3.5178,0.7623,0.81119,0 633 | 1.8994,0.97462,4.2265,0.81377,0 634 | 3.6941,-3.9482,4.2625,1.1577,0 635 | 4.4295,-2.3507,1.7048,0.90946,0 636 | 6.8248,5.2187,-2.5425,0.5461,0 637 | 1.8967,-2.5163,2.8093,-0.79742,0 638 | 2.1526,-6.1665,8.0831,-0.34355,0 639 | 3.3004,7.0811,-1.3258,0.22283,0 640 | 2.7213,7.05,-0.58808,0.41809,0 641 | 3.8846,-3.0336,2.5334,0.20214,0 642 | 4.1665,-0.4449,0.23448,0.27843,0 643 | 0.94225,5.8561,1.8762,-0.32544,0 644 | 5.1321,-0.031048,0.32616,1.1151,0 645 | 0.38251,6.8121,1.8128,-0.61251,0 646 | 3.0333,-2.5928,2.3183,0.303,0 647 | 2.9233,6.0464,-0.11168,-0.58665,0 648 | 1.162,10.2926,-1.2821,-4.0392,0 649 | 3.7791,2.5762,1.3098,0.5655,0 650 | 0.77765,5.9781,1.1941,-0.3526,0 651 | -0.38388,-1.0471,8.0514,0.49567,0 652 | 0.21084,9.4359,-0.094543,-1.859,0 653 | 2.9571,-4.5938,5.9068,0.57196,0 654 | 4.6439,-3.3729,2.5976,0.55257,0 655 | 3.3577,-4.3062,6.0241,0.18274,0 656 | 3.5127,2.9073,1.0579,0.40774,0 657 | 2.6562,10.7044,-3.3085,-4.0767,0 658 | -1.3612,10.694,1.7022,-2.9026,0 659 | -0.278,8.1881,-3.1338,-2.5276,0 660 | 1.04,-6.9321,8.2888,-1.2991,0 661 | 2.1881,2.7356,1.3278,-0.1832,0 662 | 4.2756,-2.6528,2.1375,0.94437,0 663 | -0.11996,6.8741,0.91995,-0.6694,0 664 | 2.9736,8.7944,-3.6359,-1.3754,0 665 | 3.7798,-3.3109,2.6491,0.066365,0 666 | 5.3586,3.7557,-1.7345,1.0789,0 667 | 1.8373,6.1292,0.84027,0.55257,0 668 | 1.2262,0.89599,5.7568,-0.11596,0 669 | -0.048008,-0.56078,7.7215,0.453,0 670 | 0.5706,-0.024841,1.2421,-0.56208,0 671 | 4.3634,0.46351,1.4281,2.0202,0 672 | 3.482,-4.1634,3.5008,-0.078462,0 673 | 0.51947,-3.2633,3.0895,-0.98492,0 674 | 2.3164,-2.628,3.1529,-0.08622,0 675 | -1.8348,11.0334,3.1863,-4.8888,0 676 | 1.3754,8.8793,-1.9136,-0.53751,0 677 | -0.16682,5.8974,0.49839,-0.70044,0 678 | 0.29961,7.1328,-0.31475,-1.1828,0 679 | 0.25035,9.3262,-3.6873,-6.2543,0 680 | 2.4673,1.3926,1.7125,0.41421,0 681 | 0.77805,6.6424,-1.1425,-1.0573,0 682 | 3.4465,2.9508,1.0271,0.5461,0 683 | 2.2429,-4.1427,5.2333,-0.40173,0 684 | 3.7321,-3.884,3.3577,-0.0060486,0 685 | 4.3365,-3.584,3.6884,0.74912,0 686 | -2.0759,10.8223,2.6439,-4.837,0 687 | 4.0715,7.6398,-2.0824,-1.1698,0 688 | 0.76163,5.8209,1.1959,-0.64613,0 689 | -0.53966,7.3273,0.46583,-1.4543,0 690 | 2.6213,5.7919,0.065686,-1.5759,0 691 | 3.0242,-3.3378,2.5865,-0.54785,0 692 | 5.8519,5.3905,-2.4037,-0.061652,0 693 | 0.5706,-0.0248,1.2421,-0.5621,0 694 | 3.9771,11.1513,-3.9272,-4.3444,0 695 | 1.5478,9.1814,-1.6326,-1.7375,0 696 | 0.74054,0.36625,2.1992,0.48403,0 697 | 0.49571,10.2243,-1.097,-4.0159,0 698 | 1.645,7.8612,-0.87598,-3.5569,0 699 | 3.6077,6.8576,-1.1622,0.28231,0 700 | 3.2403,-3.7082,5.2804,0.41291,0 701 | 3.9166,10.2491,-4.0926,-4.4659,0 702 | 3.9262,6.0299,-2.0156,-0.065531,0 703 | 5.591,10.4643,-4.3839,-4.3379,0 704 | 3.7522,-3.6978,3.9943,1.3051,0 705 | 1.3114,4.5462,2.2935,0.22541,0 706 | 3.7022,6.9942,-1.8511,-0.12889,0 707 | 4.364,-3.1039,2.3757,0.78532,0 708 | 3.5829,1.4423,1.0219,1.4008,0 709 | 4.65,-4.8297,3.4553,-0.25174,0 710 | 5.1731,3.9606,-1.983,0.40774,0 711 | 3.2692,3.4184,0.20706,-0.066824,0 712 | 2.4012,1.6223,3.0312,0.71679,0 713 | 1.7257,-4.4697,8.2219,-1.8073,0 714 | 4.7965,6.9859,-1.9967,-0.35001,0 715 | 4.0962,10.1891,-3.9323,-4.1827,0 716 | 2.5559,3.3605,2.0321,0.26809,0 717 | 3.4916,8.5709,-3.0326,-0.59182,0 718 | 0.5195,-3.2633,3.0895,-0.9849,0 719 | 2.9856,7.2673,-0.409,-2.2431,0 720 | 4.0932,5.4132,-1.8219,0.23576,0 721 | 1.7748,-0.76978,5.5854,1.3039,0 722 | 5.2012,0.32694,0.17965,1.1797,0 723 | -0.45062,-1.3678,7.0858,-0.40303,0 724 | 4.8451,8.1116,-2.9512,-1.4724,0 725 | 0.74841,7.2756,1.1504,-0.5388,0 726 | 5.1213,8.5565,-3.3917,-1.5474,0 727 | 3.6181,-3.7454,2.8273,-0.71208,0 728 | 0.040498,8.5234,1.4461,-3.9306,0 729 | -2.6479,10.1374,-1.331,-5.4707,0 730 | 0.37984,0.70975,0.75716,-0.44441,0 731 | -0.95923,0.091039,6.2204,-1.4828,0 732 | 2.8672,10.0008,-3.2049,-3.1095,0 733 | 1.0182,9.109,-0.62064,-1.7129,0 734 | -2.7143,11.4535,2.1092,-3.9629,0 735 | 3.8244,-3.1081,2.4537,0.52024,0 736 | 2.7961,2.121,1.8385,0.38317,0 737 | 3.5358,6.7086,-0.81857,0.47886,0 738 | -0.7056,8.7241,2.2215,-4.5965,0 739 | 4.1542,7.2756,-2.4766,-1.2099,0 740 | 0.92703,9.4318,-0.66263,-1.6728,0 741 | 1.8216,-6.4748,8.0514,-0.41855,0 742 | -2.4473,12.6247,0.73573,-7.6612,0 743 | 3.5862,-3.0957,2.8093,0.24481,0 744 | 0.66191,9.6594,-0.28819,-1.6638,0 745 | 4.7926,1.7071,-0.051701,1.4926,0 746 | 4.9852,8.3516,-2.5425,-1.2823,0 747 | 0.75736,3.0294,2.9164,-0.068117,0 748 | 4.6499,7.6336,-1.9427,-0.37458,0 749 | -0.023579,7.1742,0.78457,-0.75734,0 750 | 0.85574,0.0082678,6.6042,-0.53104,0 751 | 0.88298,0.66009,6.0096,-0.43277,0 752 | 4.0422,-4.391,4.7466,1.137,0 753 | 2.2546,8.0992,-0.24877,-3.2698,0 754 | 0.38478,6.5989,-0.3336,-0.56466,0 755 | 3.1541,-5.1711,6.5991,0.57455,0 756 | 2.3969,0.23589,4.8477,1.437,0 757 | 4.7114,2.0755,-0.2702,1.2379,0 758 | 4.0127,10.1477,-3.9366,-4.0728,0 759 | 2.6606,3.1681,1.9619,0.18662,0 760 | 3.931,1.8541,-0.023425,1.2314,0 761 | 0.01727,8.693,1.3989,-3.9668,0 762 | 3.2414,0.40971,1.4015,1.1952,0 763 | 2.2504,3.5757,0.35273,0.2836,0 764 | -1.3971,3.3191,-1.3927,-1.9948,1 765 | 0.39012,-0.14279,-0.031994,0.35084,1 766 | -1.6677,-7.1535,7.8929,0.96765,1 767 | -3.8483,-12.8047,15.6824,-1.281,1 768 | -3.5681,-8.213,10.083,0.96765,1 769 | -2.2804,-0.30626,1.3347,1.3763,1 770 | -1.7582,2.7397,-2.5323,-2.234,1 771 | -0.89409,3.1991,-1.8219,-2.9452,1 772 | 0.3434,0.12415,-0.28733,0.14654,1 773 | -0.9854,-6.661,5.8245,0.5461,1 774 | -2.4115,-9.1359,9.3444,-0.65259,1 775 | -1.5252,-6.2534,5.3524,0.59912,1 776 | -0.61442,-0.091058,-0.31818,0.50214,1 777 | -0.36506,2.8928,-3.6461,-3.0603,1 778 | -5.9034,6.5679,0.67661,-6.6797,1 779 | -1.8215,2.7521,-0.72261,-2.353,1 780 | -0.77461,-1.8768,2.4023,1.1319,1 781 | -1.8187,-9.0366,9.0162,-0.12243,1 782 | -3.5801,-12.9309,13.1779,-2.5677,1 783 | -1.8219,-6.8824,5.4681,0.057313,1 784 | -0.3481,-0.38696,-0.47841,0.62627,1 785 | 0.47368,3.3605,-4.5064,-4.0431,1 786 | -3.4083,4.8587,-0.76888,-4.8668,1 787 | -1.6662,-0.30005,1.4238,0.024986,1 788 | -2.0962,-7.1059,6.6188,-0.33708,1 789 | -2.6685,-10.4519,9.1139,-1.7323,1 790 | -0.47465,-4.3496,1.9901,0.7517,1 791 | 1.0552,1.1857,-2.6411,0.11033,1 792 | 1.1644,3.8095,-4.9408,-4.0909,1 793 | -4.4779,7.3708,-0.31218,-6.7754,1 794 | -2.7338,0.45523,2.4391,0.21766,1 795 | -2.286,-5.4484,5.8039,0.88231,1 796 | -1.6244,-6.3444,4.6575,0.16981,1 797 | 0.50813,0.47799,-1.9804,0.57714,1 798 | 1.6408,4.2503,-4.9023,-2.6621,1 799 | 0.81583,4.84,-5.2613,-6.0823,1 800 | -5.4901,9.1048,-0.38758,-5.9763,1 801 | -3.2238,2.7935,0.32274,-0.86078,1 802 | -2.0631,-1.5147,1.219,0.44524,1 803 | -0.91318,-2.0113,-0.19565,0.066365,1 804 | 0.6005,1.9327,-3.2888,-0.32415,1 805 | 0.91315,3.3377,-4.0557,-1.6741,1 806 | -0.28015,3.0729,-3.3857,-2.9155,1 807 | -3.6085,3.3253,-0.51954,-3.5737,1 808 | -6.2003,8.6806,0.0091344,-3.703,1 809 | -4.2932,3.3419,0.77258,-0.99785,1 810 | -3.0265,-0.062088,0.68604,-0.055186,1 811 | -1.7015,-0.010356,-0.99337,-0.53104,1 812 | -0.64326,2.4748,-2.9452,-1.0276,1 813 | -0.86339,1.9348,-2.3729,-1.0897,1 814 | -2.0659,1.0512,-0.46298,-1.0974,1 815 | -2.1333,1.5685,-0.084261,-1.7453,1 816 | -1.2568,-1.4733,2.8718,0.44653,1 817 | -3.1128,-6.841,10.7402,-1.0172,1 818 | -4.8554,-5.9037,10.9818,-0.82199,1 819 | -2.588,3.8654,-0.3336,-1.2797,1 820 | 0.24394,1.4733,-1.4192,-0.58535,1 821 | -1.5322,-5.0966,6.6779,0.17498,1 822 | -4.0025,-13.4979,17.6772,-3.3202,1 823 | -4.0173,-8.3123,12.4547,-1.4375,1 824 | -3.0731,-0.53181,2.3877,0.77627,1 825 | -1.979,3.2301,-1.3575,-2.5819,1 826 | -0.4294,-0.14693,0.044265,-0.15605,1 827 | -2.234,-7.0314,7.4936,0.61334,1 828 | -4.211,-12.4736,14.9704,-1.3884,1 829 | -3.8073,-8.0971,10.1772,0.65084,1 830 | -2.5912,-0.10554,1.2798,1.0414,1 831 | -2.2482,3.0915,-2.3969,-2.6711,1 832 | -1.4427,3.2922,-1.9702,-3.4392,1 833 | -0.39416,-0.020702,-0.066267,-0.44699,1 834 | -1.522,-6.6383,5.7491,-0.10691,1 835 | -2.8267,-9.0407,9.0694,-0.98233,1 836 | -1.7263,-6.0237,5.2419,0.29524,1 837 | -0.94255,0.039307,-0.24192,0.31593,1 838 | -0.89569,3.0025,-3.6067,-3.4457,1 839 | -6.2815,6.6651,0.52581,-7.0107,1 840 | -2.3211,3.166,-1.0002,-2.7151,1 841 | -1.3414,-2.0776,2.8093,0.60688,1 842 | -2.258,-9.3263,9.3727,-0.85949,1 843 | -3.8858,-12.8461,12.7957,-3.1353,1 844 | -1.8969,-6.7893,5.2761,-0.32544,1 845 | -0.52645,-0.24832,-0.45613,0.41938,1 846 | 0.0096613,3.5612,-4.407,-4.4103,1 847 | -3.8826,4.898,-0.92311,-5.0801,1 848 | -2.1405,-0.16762,1.321,-0.20906,1 849 | -2.4824,-7.3046,6.839,-0.59053,1 850 | -2.9098,-10.0712,8.4156,-1.9948,1 851 | -0.60975,-4.002,1.8471,0.6017,1 852 | 0.83625,1.1071,-2.4706,-0.062945,1 853 | 0.60731,3.9544,-4.772,-4.4853,1 854 | -4.8861,7.0542,-0.17252,-6.959,1 855 | -3.1366,0.42212,2.6225,-0.064238,1 856 | -2.5754,-5.6574,6.103,0.65214,1 857 | -1.8782,-6.5865,4.8486,-0.021566,1 858 | 0.24261,0.57318,-1.9402,0.44007,1 859 | 1.296,4.2855,-4.8457,-2.9013,1 860 | 0.25943,5.0097,-5.0394,-6.3862,1 861 | -5.873,9.1752,-0.27448,-6.0422,1 862 | -3.4605,2.6901,0.16165,-1.0224,1 863 | -2.3797,-1.4402,1.1273,0.16076,1 864 | -1.2424,-1.7175,-0.52553,-0.21036,1 865 | 0.20216,1.9182,-3.2828,-0.61768,1 866 | 0.59823,3.5012,-3.9795,-1.7841,1 867 | -0.77995,3.2322,-3.282,-3.1004,1 868 | -4.1409,3.4619,-0.47841,-3.8879,1 869 | -6.5084,8.7696,0.23191,-3.937,1 870 | -4.4996,3.4288,0.56265,-1.1672,1 871 | -3.3125,0.10139,0.55323,-0.2957,1 872 | -1.9423,0.3766,-1.2898,-0.82458,1 873 | -0.75793,2.5349,-3.0464,-1.2629,1 874 | -0.95403,1.9824,-2.3163,-1.1957,1 875 | -2.2173,1.4671,-0.72689,-1.1724,1 876 | -2.799,1.9679,-0.42357,-2.1125,1 877 | -1.8629,-0.84841,2.5377,0.097399,1 878 | -3.5916,-6.2285,10.2389,-1.1543,1 879 | -5.1216,-5.3118,10.3846,-1.0612,1 880 | -3.2854,4.0372,-0.45356,-1.8228,1 881 | -0.56877,1.4174,-1.4252,-1.1246,1 882 | -2.3518,-4.8359,6.6479,-0.060358,1 883 | -4.4861,-13.2889,17.3087,-3.2194,1 884 | -4.3876,-7.7267,11.9655,-1.4543,1 885 | -3.3604,-0.32696,2.1324,0.6017,1 886 | -1.0112,2.9984,-1.1664,-1.6185,1 887 | 0.030219,-1.0512,1.4024,0.77369,1 888 | -1.6514,-8.4985,9.1122,1.2379,1 889 | -3.2692,-12.7406,15.5573,-0.14182,1 890 | -2.5701,-6.8452,8.9999,2.1353,1 891 | -1.3066,0.25244,0.7623,1.7758,1 892 | -1.6637,3.2881,-2.2701,-2.2224,1 893 | -0.55008,2.8659,-1.6488,-2.4319,1 894 | 0.21431,-0.69529,0.87711,0.29653,1 895 | -0.77288,-7.4473,6.492,0.36119,1 896 | -1.8391,-9.0883,9.2416,-0.10432,1 897 | -0.63298,-5.1277,4.5624,1.4797,1 898 | 0.0040545,0.62905,-0.64121,0.75817,1 899 | -0.28696,3.1784,-3.5767,-3.1896,1 900 | -5.2406,6.6258,-0.19908,-6.8607,1 901 | -1.4446,2.1438,-0.47241,-1.6677,1 902 | -0.65767,-2.8018,3.7115,0.99739,1 903 | -1.5449,-10.1498,9.6152,-1.2332,1 904 | -2.8957,-12.0205,11.9149,-2.7552,1 905 | -0.81479,-5.7381,4.3919,0.3211,1 906 | 0.50225,0.65388,-1.1793,0.39998,1 907 | 0.74521,3.6357,-4.4044,-4.1414,1 908 | -2.9146,4.0537,-0.45699,-4.0327,1 909 | -1.3907,-1.3781,2.3055,-0.021566,1 910 | -1.786,-8.1157,7.0858,-1.2112,1 911 | -1.7322,-9.2828,7.719,-1.7168,1 912 | 0.55298,-3.4619,1.7048,1.1008,1 913 | 2.031,1.852,-3.0121,0.003003,1 914 | 1.2279,4.0309,-4.6435,-3.9125,1 915 | -4.2249,6.2699,0.15822,-5.5457,1 916 | -2.5346,-0.77392,3.3602,0.00171,1 917 | -1.749,-6.332,6.0987,0.14266,1 918 | -0.539,-5.167,3.4399,0.052141,1 919 | 1.5631,0.89599,-1.9702,0.65472,1 920 | 2.3917,4.5565,-4.9888,-2.8987,1 921 | 0.89512,4.7738,-4.8431,-5.5909,1 922 | -5.4808,8.1819,0.27818,-5.0323,1 923 | -2.8833,1.7713,0.68946,-0.4638,1 924 | -1.4174,-2.2535,1.518,0.61981,1 925 | 0.4283,-0.94981,-1.0731,0.3211,1 926 | 1.5904,2.2121,-3.1183,-0.11725,1 927 | 1.7425,3.6833,-4.0129,-1.7207,1 928 | -0.23356,3.2405,-3.0669,-2.7784,1 929 | -3.6227,3.9958,-0.35845,-3.9047,1 930 | -6.1536,7.9295,0.61663,-3.2646,1 931 | -3.9172,2.6652,0.78886,-0.7819,1 932 | -2.2214,-0.23798,0.56008,0.05602,1 933 | -0.49241,0.89392,-1.6283,-0.56854,1 934 | 0.26517,2.4066,-2.8416,-0.59958,1 935 | -0.10234,1.8189,-2.2169,-0.56725,1 936 | -1.6176,1.0926,-0.35502,-0.59958,1 937 | -1.8448,1.254,0.27218,-1.0728,1 938 | -1.2786,-2.4087,4.5735,0.47627,1 939 | -2.902,-7.6563,11.8318,-0.84268,1 940 | -4.3773,-5.5167,10.939,-0.4082,1 941 | -2.0529,3.8385,-0.79544,-1.2138,1 942 | 0.18868,0.70148,-0.51182,0.0055892,1 943 | -1.7279,-6.841,8.9494,0.68058,1 944 | -3.3793,-13.7731,17.9274,-2.0323,1 945 | -3.1273,-7.1121,11.3897,-0.083634,1 946 | -2.121,-0.05588,1.949,1.353,1 947 | -1.7697,3.4329,-1.2144,-2.3789,1 948 | -0.0012852,0.13863,-0.19651,0.0081754,1 949 | -1.682,-6.8121,7.1398,1.3323,1 950 | -3.4917,-12.1736,14.3689,-0.61639,1 951 | -3.1158,-8.6289,10.4403,0.97153,1 952 | -2.0891,-0.48422,1.704,1.7435,1 953 | -1.6936,2.7852,-2.1835,-1.9276,1 954 | -1.2846,3.2715,-1.7671,-3.2608,1 955 | -0.092194,0.39315,-0.32846,-0.13794,1 956 | -1.0292,-6.3879,5.5255,0.79955,1 957 | -2.2083,-9.1069,8.9991,-0.28406,1 958 | -1.0744,-6.3113,5.355,0.80472,1 959 | -0.51003,-0.23591,0.020273,0.76334,1 960 | -0.36372,3.0439,-3.4816,-2.7836,1 961 | -6.3979,6.4479,1.0836,-6.6176,1 962 | -2.2501,3.3129,-0.88369,-2.8974,1 963 | -1.1859,-1.2519,2.2635,0.77239,1 964 | -1.8076,-8.8131,8.7086,-0.21682,1 965 | -3.3863,-12.9889,13.0545,-2.7202,1 966 | -1.4106,-7.108,5.6454,0.31335,1 967 | -0.21394,-0.68287,0.096532,1.1965,1 968 | 0.48797,3.5674,-4.3882,-3.8116,1 969 | -3.8167,5.1401,-0.65063,-5.4306,1 970 | -1.9555,0.20692,1.2473,-0.3707,1 971 | -2.1786,-6.4479,6.0344,-0.20777,1 972 | -2.3299,-9.9532,8.4756,-1.8733,1 973 | 0.0031201,-4.0061,1.7956,0.91722,1 974 | 1.3518,1.0595,-2.3437,0.39998,1 975 | 1.2309,3.8923,-4.8277,-4.0069,1 976 | -5.0301,7.5032,-0.13396,-7.5034,1 977 | -3.0799,0.60836,2.7039,-0.23751,1 978 | -2.2987,-5.227,5.63,0.91722,1 979 | -1.239,-6.541,4.8151,-0.033204,1 980 | 0.75896,0.29176,-1.6506,0.83834,1 981 | 1.6799,4.2068,-4.5398,-2.3931,1 982 | 0.63655,5.2022,-5.2159,-6.1211,1 983 | -6.0598,9.2952,-0.43642,-6.3694,1 984 | -3.518,2.8763,0.1548,-1.2086,1 985 | -2.0336,-1.4092,1.1582,0.36507,1 986 | -0.69745,-1.7672,-0.34474,-0.12372,1 987 | 0.75108,1.9161,-3.1098,-0.20518,1 988 | 0.84546,3.4826,-3.6307,-1.3961,1 989 | -0.55648,3.2136,-3.3085,-2.7965,1 990 | -3.6817,3.2239,-0.69347,-3.4004,1 991 | -6.7526,8.8172,-0.061983,-3.725,1 992 | -4.577,3.4515,0.66719,-0.94742,1 993 | -2.9883,0.31245,0.45041,0.068951,1 994 | -1.4781,0.14277,-1.1622,-0.48579,1 995 | -0.46651,2.3383,-2.9812,-1.0431,1 996 | -0.8734,1.6533,-2.1964,-0.78061,1 997 | -2.1234,1.1815,-0.55552,-0.81165,1 998 | -2.3142,2.0838,-0.46813,-1.6767,1 999 | -1.4233,-0.98912,2.3586,0.39481,1 1000 | -3.0866,-6.6362,10.5405,-0.89182,1 1001 | -4.7331,-6.1789,11.388,-1.0741,1 1002 | -2.8829,3.8964,-0.1888,-1.1672,1 1003 | -0.036127,1.525,-1.4089,-0.76121,1 1004 | -1.7104,-4.778,6.2109,0.3974,1 1005 | -3.8203,-13.0551,16.9583,-2.3052,1 1006 | -3.7181,-8.5089,12.363,-0.95518,1 1007 | -2.899,-0.60424,2.6045,1.3776,1 1008 | -0.98193,2.7956,-1.2341,-1.5668,1 1009 | -0.17296,-1.1816,1.3818,0.7336,1 1010 | -1.9409,-8.6848,9.155,0.94049,1 1011 | -3.5713,-12.4922,14.8881,-0.47027,1 1012 | -2.9915,-6.6258,8.6521,1.8198,1 1013 | -1.8483,0.31038,0.77344,1.4189,1 1014 | -2.2677,3.2964,-2.2563,-2.4642,1 1015 | -0.50816,2.868,-1.8108,-2.2612,1 1016 | 0.14329,-1.0885,1.0039,0.48791,1 1017 | -0.90784,-7.9026,6.7807,0.34179,1 1018 | -2.0042,-9.3676,9.3333,-0.10303,1 1019 | -0.93587,-5.1008,4.5367,1.3866,1 1020 | -0.40804,0.54214,-0.52725,0.6586,1 1021 | -0.8172,3.3812,-3.6684,-3.456,1 1022 | -4.8392,6.6755,-0.24278,-6.5775,1 1023 | -1.2792,2.1376,-0.47584,-1.3974,1 1024 | -0.66008,-3.226,3.8058,1.1836,1 1025 | -1.7713,-10.7665,10.2184,-1.0043,1 1026 | -3.0061,-12.2377,11.9552,-2.1603,1 1027 | -1.1022,-5.8395,4.5641,0.68705,1 1028 | 0.11806,0.39108,-0.98223,0.42843,1 1029 | 0.11686,3.735,-4.4379,-4.3741,1 1030 | -2.7264,3.9213,-0.49212,-3.6371,1 1031 | -1.2369,-1.6906,2.518,0.51636,1 1032 | -1.8439,-8.6475,7.6796,-0.66682,1 1033 | -1.8554,-9.6035,7.7764,-0.97716,1 1034 | 0.16358,-3.3584,1.3749,1.3569,1 1035 | 1.5077,1.9596,-3.0584,-0.12243,1 1036 | 0.67886,4.1199,-4.569,-4.1414,1 1037 | -3.9934,5.8333,0.54723,-4.9379,1 1038 | -2.3898,-0.78427,3.0141,0.76205,1 1039 | -1.7976,-6.7686,6.6753,0.89912,1 1040 | -0.70867,-5.5602,4.0483,0.903,1 1041 | 1.0194,1.1029,-2.3,0.59395,1 1042 | 1.7875,4.78,-5.1362,-3.2362,1 1043 | 0.27331,4.8773,-4.9194,-5.8198,1 1044 | -5.1661,8.0433,0.044265,-4.4983,1 1045 | -2.7028,1.6327,0.83598,-0.091393,1 1046 | -1.4904,-2.2183,1.6054,0.89394,1 1047 | -0.014902,-1.0243,-0.94024,0.64955,1 1048 | 0.88992,2.2638,-3.1046,-0.11855,1 1049 | 1.0637,3.6957,-4.1594,-1.9379,1 1050 | -0.8471,3.1329,-3.0112,-2.9388,1 1051 | -3.9594,4.0289,-0.35845,-3.8957,1 1052 | -5.8818,7.6584,0.5558,-2.9155,1 1053 | -3.7747,2.5162,0.83341,-0.30993,1 1054 | -2.4198,-0.24418,0.70146,0.41809,1 1055 | -0.83535,0.80494,-1.6411,-0.19225,1 1056 | -0.30432,2.6528,-2.7756,-0.65647,1 1057 | -0.60254,1.7237,-2.1501,-0.77027,1 1058 | -2.1059,1.1815,-0.53324,-0.82716,1 1059 | -2.0441,1.2271,0.18564,-1.091,1 1060 | -1.5621,-2.2121,4.2591,0.27972,1 1061 | -3.2305,-7.2135,11.6433,-0.94613,1 1062 | -4.8426,-4.9932,10.4052,-0.53104,1 1063 | -2.3147,3.6668,-0.6969,-1.2474,1 1064 | -0.11716,0.60422,-0.38587,-0.059065,1 1065 | -2.0066,-6.719,9.0162,0.099985,1 1066 | -3.6961,-13.6779,17.5795,-2.6181,1 1067 | -3.6012,-6.5389,10.5234,-0.48967,1 1068 | -2.6286,0.18002,1.7956,0.97282,1 1069 | -0.82601,2.9611,-1.2864,-1.4647,1 1070 | 0.31803,-0.99326,1.0947,0.88619,1 1071 | -1.4454,-8.4385,8.8483,0.96894,1 1072 | -3.1423,-13.0365,15.6773,-0.66165,1 1073 | -2.5373,-6.959,8.8054,1.5289,1 1074 | -1.366,0.18416,0.90539,1.5806,1 1075 | -1.7064,3.3088,-2.2829,-2.1978,1 1076 | -0.41965,2.9094,-1.7859,-2.2069,1 1077 | 0.37637,-0.82358,0.78543,0.74524,1 1078 | -0.55355,-7.9233,6.7156,0.74394,1 1079 | -1.6001,-9.5828,9.4044,0.081882,1 1080 | -0.37013,-5.554,4.7749,1.547,1 1081 | 0.12126,0.22347,-0.47327,0.97024,1 1082 | -0.27068,3.2674,-3.5562,-3.0888,1 1083 | -5.119,6.6486,-0.049987,-6.5206,1 1084 | -1.3946,2.3134,-0.44499,-1.4905,1 1085 | -0.69879,-3.3771,4.1211,1.5043,1 1086 | -1.48,-10.5244,9.9176,-0.5026,1 1087 | -2.6649,-12.813,12.6689,-1.9082,1 1088 | -0.62684,-6.301,4.7843,1.106,1 1089 | 0.518,0.25865,-0.84085,0.96118,1 1090 | 0.64376,3.764,-4.4738,-4.0483,1 1091 | -2.9821,4.1986,-0.5898,-3.9642,1 1092 | -1.4628,-1.5706,2.4357,0.49826,1 1093 | -1.7101,-8.7903,7.9735,-0.45475,1 1094 | -1.5572,-9.8808,8.1088,-1.0806,1 1095 | 0.74428,-3.7723,1.6131,1.5754,1 1096 | 2.0177,1.7982,-2.9581,0.2099,1 1097 | 1.164,3.913,-4.5544,-3.8672,1 1098 | -4.3667,6.0692,0.57208,-5.4668,1 1099 | -2.5919,-1.0553,3.8949,0.77757,1 1100 | -1.8046,-6.8141,6.7019,1.1681,1 1101 | -0.71868,-5.7154,3.8298,1.0233,1 1102 | 1.4378,0.66837,-2.0267,1.0271,1 1103 | 2.1943,4.5503,-4.976,-2.7254,1 1104 | 0.7376,4.8525,-4.7986,-5.6659,1 1105 | -5.637,8.1261,0.13081,-5.0142,1 1106 | -3.0193,1.7775,0.73745,-0.45346,1 1107 | -1.6706,-2.09,1.584,0.71162,1 1108 | -0.1269,-1.1505,-0.95138,0.57843,1 1109 | 1.2198,2.0982,-3.1954,0.12843,1 1110 | 1.4501,3.6067,-4.0557,-1.5966,1 1111 | -0.40857,3.0977,-2.9607,-2.6892,1 1112 | -3.8952,3.8157,-0.31304,-3.8194,1 1113 | -6.3679,8.0102,0.4247,-3.2207,1 1114 | -4.1429,2.7749,0.68261,-0.71984,1 1115 | -2.6864,-0.097265,0.61663,0.061192,1 1116 | -1.0555,0.79459,-1.6968,-0.46768,1 1117 | -0.29858,2.4769,-2.9512,-0.66165,1 1118 | -0.49948,1.7734,-2.2469,-0.68104,1 1119 | -1.9881,0.99945,-0.28562,-0.70044,1 1120 | -1.9389,1.5706,0.045979,-1.122,1 1121 | -1.4375,-1.8624,4.026,0.55127,1 1122 | -3.1875,-7.5756,11.8678,-0.57889,1 1123 | -4.6765,-5.6636,10.969,-0.33449,1 1124 | -2.0285,3.8468,-0.63435,-1.175,1 1125 | 0.26637,0.73252,-0.67891,0.03533,1 1126 | -1.7589,-6.4624,8.4773,0.31981,1 1127 | -3.5985,-13.6593,17.6052,-2.4927,1 1128 | -3.3582,-7.2404,11.4419,-0.57113,1 1129 | -2.3629,-0.10554,1.9336,1.1358,1 1130 | -2.1802,3.3791,-1.2256,-2.6621,1 1131 | -0.40951,-0.15521,0.060545,-0.088807,1 1132 | -2.2918,-7.257,7.9597,0.9211,1 1133 | -4.0214,-12.8006,15.6199,-0.95647,1 1134 | -3.3884,-8.215,10.3315,0.98187,1 1135 | -2.0046,-0.49457,1.333,1.6543,1 1136 | -1.7063,2.7956,-2.378,-2.3491,1 1137 | -1.6386,3.3584,-1.7302,-3.5646,1 1138 | -0.41645,0.32487,-0.33617,-0.36036,1 1139 | -1.5877,-6.6072,5.8022,0.31593,1 1140 | -2.5961,-9.349,9.7942,-0.28018,1 1141 | -1.5228,-6.4789,5.7568,0.87325,1 1142 | -0.53072,-0.097265,-0.21793,1.0426,1 1143 | -0.49081,2.8452,-3.6436,-3.1004,1 1144 | -6.5773,6.8017,0.85483,-7.5344,1 1145 | -2.4621,2.7645,-0.62578,-2.8573,1 1146 | -1.3995,-1.9162,2.5154,0.59912,1 1147 | -2.3221,-9.3304,9.233,-0.79871,1 1148 | -3.73,-12.9723,12.9817,-2.684,1 1149 | -1.6988,-7.1163,5.7902,0.16723,1 1150 | -0.26654,-0.64562,-0.42014,0.89136,1 1151 | 0.33325,3.3108,-4.5081,-4.012,1 1152 | -4.2091,4.7283,-0.49126,-5.2159,1 1153 | -2.3142,-0.68494,1.9833,-0.44829,1 1154 | -2.4835,-7.4494,6.8964,-0.64484,1 1155 | -2.7611,-10.5099,9.0239,-1.9547,1 1156 | -0.36025,-4.449,2.1067,0.94308,1 1157 | 1.0117,0.9022,-2.3506,0.42714,1 1158 | 0.96708,3.8426,-4.9314,-4.1323,1 1159 | -5.2049,7.259,0.070827,-7.3004,1 1160 | -3.3203,-0.02691,2.9618,-0.44958,1 1161 | -2.565,-5.7899,6.0122,0.046968,1 1162 | -1.5951,-6.572,4.7689,-0.94354,1 1163 | 0.7049,0.17174,-1.7859,0.36119,1 1164 | 1.7331,3.9544,-4.7412,-2.5017,1 1165 | 0.6818,4.8504,-5.2133,-6.1043,1 1166 | -6.3364,9.2848,0.014275,-6.7844,1 1167 | -3.8053,2.4273,0.6809,-1.0871,1 1168 | -2.1979,-2.1252,1.7151,0.45171,1 1169 | -0.87874,-2.2121,-0.051701,0.099985,1 1170 | 0.74067,1.7299,-3.1963,-0.1457,1 1171 | 0.98296,3.4226,-3.9692,-1.7116,1 1172 | -0.3489,3.1929,-3.4054,-3.1832,1 1173 | -3.8552,3.5219,-0.38415,-3.8608,1 1174 | -6.9599,8.9931,0.2182,-4.572,1 1175 | -4.7462,3.1205,1.075,-1.2966,1 1176 | -3.2051,-0.14279,0.97565,0.045675,1 1177 | -1.7549,-0.080711,-0.75774,-0.3707,1 1178 | -0.59587,2.4811,-2.8673,-0.89828,1 1179 | -0.89542,2.0279,-2.3652,-1.2746,1 1180 | -2.0754,1.2767,-0.64206,-1.2642,1 1181 | -3.2778,1.8023,0.1805,-2.3931,1 1182 | -2.2183,-1.254,2.9986,0.36378,1 1183 | -3.5895,-6.572,10.5251,-0.16381,1 1184 | -5.0477,-5.8023,11.244,-0.3901,1 1185 | -3.5741,3.944,-0.07912,-2.1203,1 1186 | -0.7351,1.7361,-1.4938,-1.1582,1 1187 | -2.2617,-4.7428,6.3489,0.11162,1 1188 | -4.244,-13.0634,17.1116,-2.8017,1 1189 | -4.0218,-8.304,12.555,-1.5099,1 1190 | -3.0201,-0.67253,2.7056,0.85774,1 1191 | -2.4941,3.5447,-1.3721,-2.8483,1 1192 | -0.83121,0.039307,0.05369,-0.23105,1 1193 | -2.5665,-6.8824,7.5416,0.70774,1 1194 | -4.4018,-12.9371,15.6559,-1.6806,1 1195 | -3.7573,-8.2916,10.3032,0.38059,1 1196 | -2.4725,-0.40145,1.4855,1.1189,1 1197 | -1.9725,2.8825,-2.3086,-2.3724,1 1198 | -2.0149,3.6874,-1.9385,-3.8918,1 1199 | -0.82053,0.65181,-0.48869,-0.52716,1 1200 | -1.7886,-6.3486,5.6154,0.42584,1 1201 | -2.9138,-9.4711,9.7668,-0.60216,1 1202 | -1.8343,-6.5907,5.6429,0.54998,1 1203 | -0.8734,-0.033118,-0.20165,0.55774,1 1204 | -0.70346,2.957,-3.5947,-3.1457,1 1205 | -6.7387,6.9879,0.67833,-7.5887,1 1206 | -2.7723,3.2777,-0.9351,-3.1457,1 1207 | -1.6641,-1.3678,1.997,0.52283,1 1208 | -2.4349,-9.2497,8.9922,-0.50001,1 1209 | -3.793,-12.7095,12.7957,-2.825,1 1210 | -1.9551,-6.9756,5.5383,-0.12889,1 1211 | -0.69078,-0.50077,-0.35417,0.47498,1 1212 | 0.025013,3.3998,-4.4327,-4.2655,1 1213 | -4.3967,4.9601,-0.64892,-5.4719,1 1214 | -2.456,-0.24418,1.4041,-0.45863,1 1215 | -2.62,-6.8555,6.2169,-0.62285,1 1216 | -2.9662,-10.3257,8.784,-2.1138,1 1217 | -0.71494,-4.4448,2.2241,0.49826,1 1218 | 0.6005,0.99945,-2.2126,0.097399,1 1219 | 0.61652,3.8944,-4.7275,-4.3948,1 1220 | -5.4414,7.2363,0.10938,-7.5642,1 1221 | -3.5798,0.45937,2.3457,-0.45734,1 1222 | -2.7769,-5.6967,5.9179,0.37671,1 1223 | -1.8356,-6.7562,5.0585,-0.55044,1 1224 | 0.30081,0.17381,-1.7542,0.48921,1 1225 | 1.3403,4.1323,-4.7018,-2.5987,1 1226 | 0.26877,4.987,-5.1508,-6.3913,1 1227 | -6.5235,9.6014,-0.25392,-6.9642,1 1228 | -4.0679,2.4955,0.79571,-1.1039,1 1229 | -2.564,-1.7051,1.5026,0.32757,1 1230 | -1.3414,-1.9162,-0.15538,-0.11984,1 1231 | 0.23874,2.0879,-3.3522,-0.66553,1 1232 | 0.6212,3.6771,-4.0771,-2.0711,1 1233 | -0.77848,3.4019,-3.4859,-3.5569,1 1234 | -4.1244,3.7909,-0.6532,-4.1802,1 1235 | -7.0421,9.2,0.25933,-4.6832,1 1236 | -4.9462,3.5716,0.82742,-1.4957,1 1237 | -3.5359,0.30417,0.6569,-0.2957,1 1238 | -2.0662,0.16967,-1.0054,-0.82975,1 1239 | -0.88728,2.808,-3.1432,-1.2035,1 1240 | -1.0941,2.3072,-2.5237,-1.4453,1 1241 | -2.4458,1.6285,-0.88541,-1.4802,1 1242 | -3.551,1.8955,0.1865,-2.4409,1 1243 | -2.2811,-0.85669,2.7185,0.044382,1 1244 | -3.6053,-5.974,10.0916,-0.82846,1 1245 | -5.0676,-5.1877,10.4266,-0.86725,1 1246 | -3.9204,4.0723,-0.23678,-2.1151,1 1247 | -1.1306,1.8458,-1.3575,-1.3806,1 1248 | -2.4561,-4.5566,6.4534,-0.056479,1 1249 | -4.4775,-13.0303,17.0834,-3.0345,1 1250 | -4.1958,-8.1819,12.1291,-1.6017,1 1251 | -3.38,-0.7077,2.5325,0.71808,1 1252 | -2.4365,3.6026,-1.4166,-2.8948,1 1253 | -0.77688,0.13036,-0.031137,-0.35389,1 1254 | -2.7083,-6.8266,7.5339,0.59007,1 1255 | -4.5531,-12.5854,15.4417,-1.4983,1 1256 | -3.8894,-7.8322,9.8208,0.47498,1 1257 | -2.5084,-0.22763,1.488,1.2069,1 1258 | -2.1652,3.0211,-2.4132,-2.4241,1 1259 | -1.8974,3.5074,-1.7842,-3.8491,1 1260 | -0.62043,0.5587,-0.38587,-0.66423,1 1261 | -1.8387,-6.301,5.6506,0.19567,1 1262 | -3,-9.1566,9.5766,-0.73018,1 1263 | -1.9116,-6.1603,5.606,0.48533,1 1264 | -1.005,0.084831,-0.2462,0.45688,1 1265 | -0.87834,3.257,-3.6778,-3.2944,1 1266 | -6.651,6.7934,0.68604,-7.5887,1 1267 | -2.5463,3.1101,-0.83228,-3.0358,1 1268 | -1.4377,-1.432,2.1144,0.42067,1 1269 | -2.4554,-9.0407,8.862,-0.86983,1 1270 | -3.9411,-12.8792,13.0597,-3.3125,1 1271 | -2.1241,-6.8969,5.5992,-0.47156,1 1272 | -0.74324,-0.32902,-0.42785,0.23317,1 1273 | -0.071503,3.7412,-4.5415,-4.2526,1 1274 | -4.2333,4.9166,-0.49212,-5.3207,1 1275 | -2.3675,-0.43663,1.692,-0.43018,1 1276 | -2.5526,-7.3625,6.9255,-0.66811,1 1277 | -3.0986,-10.4602,8.9717,-2.3427,1 1278 | -0.89809,-4.4862,2.2009,0.50731,1 1279 | 0.56232,1.0015,-2.2726,-0.0060486,1 1280 | 0.53936,3.8944,-4.8166,-4.3418,1 1281 | -5.3012,7.3915,0.029699,-7.3987,1 1282 | -3.3553,0.35591,2.6473,-0.37846,1 1283 | -2.7908,-5.7133,5.953,0.45946,1 1284 | -1.9983,-6.6072,4.8254,-0.41984,1 1285 | 0.15423,0.11794,-1.6823,0.59524,1 1286 | 1.208,4.0744,-4.7635,-2.6129,1 1287 | 0.2952,4.8856,-5.149,-6.2323,1 1288 | -6.4247,9.5311,0.022844,-6.8517,1 1289 | -3.9933,2.6218,0.62863,-1.1595,1 1290 | -2.659,-1.6058,1.3647,0.16464,1 1291 | -1.4094,-2.1252,-0.10397,-0.19225,1 1292 | 0.11032,1.9741,-3.3668,-0.65259,1 1293 | 0.52374,3.644,-4.0746,-1.9909,1 1294 | -0.76794,3.4598,-3.4405,-3.4276,1 1295 | -3.9698,3.6812,-0.60008,-4.0133,1 1296 | -7.0364,9.2931,0.16594,-4.5396,1 1297 | -4.9447,3.3005,1.063,-1.444,1 1298 | -3.5933,0.22968,0.7126,-0.3332,1 1299 | -2.1674,0.12415,-1.0465,-0.86208,1 1300 | -0.9607,2.6963,-3.1226,-1.3121,1 1301 | -1.0802,2.1996,-2.5862,-1.2759,1 1302 | -2.3277,1.4381,-0.82114,-1.2862,1 1303 | -3.7244,1.9037,-0.035421,-2.5095,1 1304 | -2.5724,-0.95602,2.7073,-0.16639,1 1305 | -3.9297,-6.0816,10.0958,-1.0147,1 1306 | -5.2943,-5.1463,10.3332,-1.1181,1 1307 | -3.8953,4.0392,-0.3019,-2.1836,1 1308 | -1.2244,1.7485,-1.4801,-1.4181,1 1309 | -2.6406,-4.4159,5.983,-0.13924,1 1310 | -4.6338,-12.7509,16.7166,-3.2168,1 1311 | -4.2887,-7.8633,11.8387,-1.8978,1 1312 | -3.3458,-0.50491,2.6328,0.53705,1 1313 | -1.1188,3.3357,-1.3455,-1.9573,1 1314 | 0.55939,-0.3104,0.18307,0.44653,1 1315 | -1.5078,-7.3191,7.8981,1.2289,1 1316 | -3.506,-12.5667,15.1606,-0.75216,1 1317 | -2.9498,-8.273,10.2646,1.1629,1 1318 | -1.6029,-0.38903,1.62,1.9103,1 1319 | -1.2667,2.8183,-2.426,-1.8862,1 1320 | -0.49281,3.0605,-1.8356,-2.834,1 1321 | 0.66365,-0.045533,-0.18794,0.23447,1 1322 | -0.72068,-6.7583,5.8408,0.62369,1 1323 | -1.9966,-9.5001,9.682,-0.12889,1 1324 | -0.97325,-6.4168,5.6026,1.0323,1 1325 | -0.025314,-0.17383,-0.11339,1.2198,1 1326 | 0.062525,2.9301,-3.5467,-2.6737,1 1327 | -5.525,6.3258,0.89768,-6.6241,1 1328 | -1.2943,2.6735,-0.84085,-2.0323,1 1329 | -0.24037,-1.7837,2.135,1.2418,1 1330 | -1.3968,-9.6698,9.4652,-0.34872,1 1331 | -2.9672,-13.2869,13.4727,-2.6271,1 1332 | -1.1005,-7.2508,6.0139,0.36895,1 1333 | 0.22432,-0.52147,-0.40386,1.2017,1 1334 | 0.90407,3.3708,-4.4987,-3.6965,1 1335 | -2.8619,4.5193,-0.58123,-4.2629,1 1336 | -1.0833,-0.31247,1.2815,0.41291,1 1337 | -1.5681,-7.2446,6.5537,-0.1276,1 1338 | -2.0545,-10.8679,9.4926,-1.4116,1 1339 | 0.2346,-4.5152,2.1195,1.4448,1 1340 | 1.581,0.86909,-2.3138,0.82412,1 1341 | 1.5514,3.8013,-4.9143,-3.7483,1 1342 | -4.1479,7.1225,-0.083404,-6.4172,1 1343 | -2.2625,-0.099335,2.8127,0.48662,1 1344 | -1.7479,-5.823,5.8699,1.212,1 1345 | -0.95923,-6.7128,4.9857,0.32886,1 1346 | 1.3451,0.23589,-1.8785,1.3258,1 1347 | 2.2279,4.0951,-4.8037,-2.1112,1 1348 | 1.2572,4.8731,-5.2861,-5.8741,1 1349 | -5.3857,9.1214,-0.41929,-5.9181,1 1350 | -2.9786,2.3445,0.52667,-0.40173,1 1351 | -1.5851,-2.1562,1.7082,0.9017,1 1352 | -0.21888,-2.2038,-0.0954,0.56421,1 1353 | 1.3183,1.9017,-3.3111,0.065071,1 1354 | 1.4896,3.4288,-4.0309,-1.4259,1 1355 | 0.11592,3.2219,-3.4302,-2.8457,1 1356 | -3.3924,3.3564,-0.72004,-3.5233,1 1357 | -6.1632,8.7096,-0.21621,-3.6345,1 1358 | -4.0786,2.9239,0.87026,-0.65389,1 1359 | -2.5899,-0.3911,0.93452,0.42972,1 1360 | -1.0116,-0.19038,-0.90597,0.003003,1 1361 | 0.066129,2.4914,-2.9401,-0.62156,1 1362 | -0.24745,1.9368,-2.4697,-0.80518,1 1363 | -1.5732,1.0636,-0.71232,-0.8388,1 1364 | -2.1668,1.5933,0.045122,-1.678,1 1365 | -1.1667,-1.4237,2.9241,0.66119,1 1366 | -2.8391,-6.63,10.4849,-0.42113,1 1367 | -4.5046,-5.8126,10.8867,-0.52846,1 1368 | -2.41,3.7433,-0.40215,-1.2953,1 1369 | 0.40614,1.3492,-1.4501,-0.55949,1 1370 | -1.3887,-4.8773,6.4774,0.34179,1 1371 | -3.7503,-13.4586,17.5932,-2.7771,1 1372 | -3.5637,-8.3827,12.393,-1.2823,1 1373 | -2.5419,-0.65804,2.6842,1.1952,1 --------------------------------------------------------------------------------