├── Loaders.py ├── LogisticRegression.py ├── NewSolution.py ├── README.md ├── Sigmoid.py ├── Solution.py ├── iris-data.txt └── mod-iris.txt /Loaders.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Program to read Data from the files 3 | ''' 4 | import numpy as np 5 | 6 | def readFile(filename, mode = False): 7 | f = open(filename,'r') 8 | data = [] 9 | labels = [] 10 | for eachLine in f: 11 | eachLine =eachLine.strip() 12 | if(len(eachLine) == 0 or eachLine.startswith("#")): 13 | continue 14 | linedata = eachLine.split(",") 15 | # print linedata 16 | if( mode == True): 17 | data.append([1,float(linedata[0]),float(linedata[1]), float(linedata[2]), float(linedata[3])]) 18 | else: 19 | data.append([float(linedata[0]),float(linedata[1]), float(linedata[2]), float(linedata[3])]) 20 | labels.append(int(linedata[4])) 21 | data = np.array(data) 22 | labels = np.array(labels) 23 | return data, labels -------------------------------------------------------------------------------- /LogisticRegression.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Logistic Regression classifier class 3 | ''' 4 | from __future__ import division 5 | import numpy as np 6 | import scipy 7 | import Loaders 8 | import math 9 | from scipy.optimize import fmin_bfgs 10 | # import bigfloat 11 | 12 | class LogisticRegression: 13 | def __init__(self, data, labels, alpha = 1, num_iters = 100, regularized= False, debug = False, normalization = 'l2'): 14 | ''' 15 | constructor just takes number of iterations for gradient descent and value of alpha. 16 | ''' 17 | self.normalization_mode = normalization 18 | self.regularized = regularized 19 | self.debug = debug 20 | self.num_iters = num_iters 21 | self.alpha = alpha 22 | assert(len(np.unique(labels))>=2) 23 | pass 24 | 25 | 26 | def train(self, data, Olabels, unique_classes): 27 | ''' 28 | train the classifier. One classifier per unique label 29 | ''' 30 | print 'training....' 31 | debug = self.debug 32 | regularized = self.regularized 33 | #print 'train regularized', regularized 34 | 35 | num_iters = self.num_iters 36 | m,n = data.shape 37 | 38 | # map labels to program friendly labels 39 | labels = np.zeros(Olabels.shape) 40 | 41 | uniq_Olabel_names = np.unique(Olabels) 42 | 43 | uniq_label_list = range(len(uniq_Olabel_names)) 44 | 45 | for each in zip(uniq_Olabel_names, uniq_label_list): 46 | o_label_name = each[0] 47 | new_label_name = each[1] 48 | labels[np.where(Olabels == o_label_name)] = new_label_name 49 | 50 | labels = labels.reshape((len(labels),1)) 51 | # now labels variable contains labels starting from 0 to (num_classes -1) 52 | #print unique_classes 53 | num_classes = len(unique_classes) 54 | 55 | Init_Thetas = [] # to hold initial values of theta 56 | 57 | Thetas = [] # to hold final values of theta to return 58 | 59 | Cost_Thetas = [] # cost associated with each theta 60 | 61 | Cost_History_Theta = [] # contains list of varying cost thetas 62 | 63 | # if num_classes = 2, then N_Thetas will contain only 1 Theta 64 | # if num_classes >2, then N_Thetas will contain num_classes number of Thetas. 65 | 66 | 67 | if(num_classes == 2): 68 | theta_init = np.zeros((n,1)) 69 | Init_Thetas.append(theta_init) 70 | 71 | # we need only 1 theta to classify class A from class B 72 | #local_labels = np.zeros(labels.shape) 73 | #local_labels[np.where(labels == 2)] = 1 74 | # for i in zip(labels, local_labels): 75 | # print i 76 | # exit() 77 | 78 | local_labels = labels 79 | 80 | assert(len(np.unique(labels)) == 2) 81 | 82 | assert(len(local_labels) == len(labels)) 83 | 84 | init_theta = Init_Thetas[0] 85 | 86 | new_theta, final_cost = self.computeGradient(data, local_labels, init_theta) 87 | 88 | Thetas.append(new_theta) 89 | Cost_Thetas.append(final_cost) 90 | 91 | elif(num_classes>2): 92 | for eachInitTheta in range(num_classes): 93 | theta_init = np.zeros((n,1)) 94 | Init_Thetas.append(theta_init) 95 | pass 96 | 97 | for eachClass in range(num_classes): 98 | # load data local of the init_theta 99 | # +ve class is 1 and rest are zeros 100 | # its a one vs all classifier 101 | 102 | local_labels = np.zeros(labels.shape) 103 | 104 | 105 | local_labels[np.where(labels == eachClass)] = 1 106 | 107 | 108 | # assert to make sure that its true 109 | assert(len(np.unique(local_labels)) == 2) 110 | assert(len(local_labels) == len(labels)) 111 | # print eachClass 112 | # print Init_Thetas 113 | init_theta = Init_Thetas[eachClass] 114 | 115 | 116 | new_theta, final_cost = self.computeGradient(data, local_labels, init_theta) 117 | #print final_cost 118 | Thetas.append(new_theta) 119 | Cost_Thetas.append(final_cost) 120 | 121 | return Thetas, Cost_Thetas 122 | 123 | 124 | def classify(self, data, Thetas): 125 | ''' 126 | classify given data and return a list of associated classified labels 127 | ''' 128 | # since it is a one values all classifier, load all classifiers and pick most likely 129 | # i.e. which gives max value for sigmoid(X*theta) 130 | debug = self.debug 131 | assert(len(Thetas)>0) 132 | 133 | if(len(Thetas) > 1): 134 | mvals = [] 135 | for eachTheta in Thetas: 136 | mvals.append(self.sigmoidCalc(np.dot(data, eachTheta))) 137 | 138 | pass 139 | return mvals.index(max(mvals))+1 140 | 141 | elif(len(Thetas) == 1): 142 | # either is close to zero or 1 143 | # if more than 0.5 classify as 1 and if less than 0.5 classify as 0 144 | # print data 145 | # print Thetas[0] 146 | #print self.sigmoidCalc(np.dot(data, Thetas[0])) 147 | 148 | cval = round(self.sigmoidCalc(np.dot(data, Thetas[0])))+1.0 149 | #print 'classification output: ', cval 150 | return cval 151 | 152 | 153 | def sigmoidCalc(self, data): 154 | ''' 155 | calculate the sigmoid of the given data 156 | ''' 157 | # if(len(data.flatten()) == 1 ): 158 | # data = data.reshape((1,1)) 159 | debug = self.debug 160 | data = np.array(data, dtype = np.longdouble) 161 | #g = np.zeros(data.shape, dtype = np.float64) 162 | g = 1/(1+np.exp(-data)) 163 | 164 | return g 165 | 166 | def computeCost(self,data, labels, init_theta): 167 | ''' 168 | compute cost of the given value of theta and return it 169 | ''' 170 | debug = self.debug 171 | regularized = self.regularized 172 | if(regularized == True): 173 | llambda = 1.0 174 | #print 'using llambda', llambda 175 | else: 176 | llambda = 0 177 | 178 | m,n = data.shape 179 | 180 | J = 0 181 | 182 | grad = np.zeros(init_theta.shape) 183 | 184 | theta2 = init_theta[range(1,init_theta.shape[0]),:] 185 | if(self.normalization_mode == "l1"): 186 | regularized_parameter = np.dot(llambda/(2*m), np.sum( np.abs(theta2))) 187 | # print 'mode: ', self.normalization_mode 188 | # print 'lambda: ', llambda 189 | # print regularized_parameter 190 | else: 191 | #(self.mode == "l2") 192 | regularized_parameter = np.dot(llambda/(2*m), np.sum( theta2 * theta2)) 193 | # print 'mode: ', self.normalization_mode 194 | # print 'lambda: ', llambda 195 | # print regularized_parameter 196 | 197 | 198 | J = (-1.0/ m) * ( np.sum( np.log(self.sigmoidCalc( np.dot(data, init_theta))) * labels + ( np.log ( 1 - self.sigmoidCalc(np.dot(data, init_theta)) ) * ( 1 - labels ) ))) 199 | 200 | J = J + regularized_parameter 201 | #print 'llambda, regularized parameter: ', llambda, regularized_parameter 202 | return J 203 | 204 | def computeGradient(self,data, labels, init_theta): 205 | alpha = self.alpha 206 | debug = self.debug 207 | num_iters = self.num_iters 208 | m,n = data.shape 209 | regularized = self.regularized 210 | 211 | #print 'inoming regularized', regularized 212 | if(regularized == True): 213 | llambda = 1 214 | else: 215 | llambda = 0 216 | 217 | for eachIteration in range(num_iters): 218 | cost = self.computeCost(data, labels, init_theta) 219 | if(debug): 220 | print 'iteration: ', eachIteration 221 | print 'cost: ', cost 222 | 223 | #compute gradient 224 | 225 | B = self.sigmoidCalc(np.dot(data, init_theta) - labels) 226 | 227 | A = (1/m)*np.transpose(data) 228 | 229 | grad = np.dot(A,B) 230 | 231 | 232 | A = (self.sigmoidCalc(np.dot(data, init_theta)) - labels ) 233 | B = data[:,0].reshape((data.shape[0],1)) 234 | 235 | grad[0] = (1/m) * np.sum(A*B) 236 | 237 | A = (self.sigmoidCalc(np.dot(data, init_theta)) - labels) 238 | B = (data[:,range(1,n)]) 239 | 240 | for i in range(1, len(grad)): 241 | A = (self.sigmoidCalc(np.dot(data,init_theta)) - labels ) 242 | B = (data[:,i].reshape((data[:,i].shape[0],1))) 243 | grad[i] = (1/m)*np.sum(A*B) + ((llambda/m)*init_theta[i]) 244 | 245 | 246 | 247 | init_theta = init_theta - (np.dot((alpha/m), grad)) 248 | 249 | return init_theta, cost 250 | 251 | def mapper(self): 252 | return None 253 | -------------------------------------------------------------------------------- /NewSolution.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Program to test LogisticRegression using Sklearn toolkit 3 | ''' 4 | from __future__ import division 5 | import numpy as np 6 | import scipy 7 | 8 | from scipy import linalg as la 9 | import math 10 | from sklearn.linear_model import LogisticRegression 11 | import Loaders 12 | 13 | data, labels = Loaders.readFile("iris-data.txt",True) 14 | 15 | lrclassfier = LogisticRegression(penalty='l2', dual=False, tol=0.0001, C=10.0, fit_intercept=True, intercept_scaling=1) 16 | classifier = lrclassfier.fit(data, labels) 17 | predicted_set = classifier.predict(data) 18 | 19 | for i in zip(labels, predicted_set): 20 | print i[0],i[1],i[0]==i[1] 21 | 22 | print 'Accuracy on training data: ',(np.sum(predicted_set == labels)/len(labels))*100,'%' -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | LogisticRegression 2 | ================== 3 | 4 | This program can be used for multi-class classification problems (one vs rest classifer). 5 | This is a basic implementation of Logistic Regression. 6 | 7 | This includes a basic implementation of batch-gradient descent program. It can be tweaked with custom learning rate. 8 | 9 | Logistic Regression in python. 10 | 11 | This program has been updated to work for both Multi-Class classification and 2 class classification tasks. 12 | 13 | Things to do: 14 | ================== 15 | 1) Include support for Stochastic Gradient Descent. 16 | -------------------------------------------------------------------------------- /Sigmoid.py: -------------------------------------------------------------------------------- 1 | ''' 2 | to compute sigmoid of an array 3 | ''' 4 | from __future__ import division 5 | import numpy as np 6 | import scipy 7 | import math 8 | 9 | def sigmoidCalc(data): 10 | ''' 11 | calculate the sigmoid of the given data 12 | ''' 13 | g = 1.0 / (1.0 + np.exp(-data)) 14 | return g -------------------------------------------------------------------------------- /Solution.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Program to test LogisticRegression classifier 3 | ''' 4 | from __future__ import division 5 | import numpy as np 6 | import scipy 7 | 8 | import Loaders 9 | import LogisticRegression 10 | import Sigmoid 11 | 12 | # from scipy import linalg as la 13 | # import math 14 | # from sklearn.linear_model import LogisticRegression 15 | 16 | 17 | ## load data 18 | ## add 1 column vector to entire data 19 | ## initialize parameters 20 | ## perform gradient descent 21 | ## { 22 | ## until number of iterations is satisfied 23 | ## calculate cost 24 | ## adjust parameters 25 | ## } 26 | ## predict using final parameters 27 | ## calculate accuracy 28 | 29 | ############################################################################################################################# 30 | #################################### Multiclass or 3 class data classification############################################################# 31 | ############################################################################################################################# 32 | 33 | data,labels = Loaders.readFile("iris-data.txt",True) 34 | m,n = data.shape 35 | # lr1 = LogisticRegression.LogisticRegression(data, labels, 1.0, 8000, regularized=True, normalization = 'l1') 36 | lr1 = LogisticRegression.LogisticRegression(data, labels, 1.0, 8000, regularized=True, normalization = 'l2') 37 | learntParameters, final_costs = lr1.train(data, labels, np.unique(labels)) 38 | print 'Number of classes', len(np.unique(labels)) 39 | print 'learntParameters(one per class): ',learntParameters 40 | print 'final_costs: ', final_costs 41 | #print len(learntParameters) 42 | classifedLabels = [] 43 | for eachData in data: 44 | classifedLabels.append(lr1.classify(eachData, learntParameters)) 45 | classifedLabels = np.array(classifedLabels) 46 | 47 | # print 'original label', 'classifedLabels' 48 | # for each in zip(labels, classifedLabels): 49 | # print each[0],', ', each[1],', ', each[0]==each[1] 50 | 51 | print 'Accuracy on training data: ',(np.sum(classifedLabels == labels)/len(labels))*100,'%' 52 | 53 | ############################################################################################################################# 54 | #################################### 2 class data classification############################################################# 55 | ############################################################################################################################# 56 | 57 | data,labels = Loaders.readFile("mod-iris.txt",True) 58 | m,n = data.shape 59 | lr1 = LogisticRegression.LogisticRegression(data, labels, 2.0, 100) 60 | learntParameters, final_costs = lr1.train(data, labels, np.unique(labels)) 61 | print 'Number of classes', len(np.unique(labels)) 62 | print 'learntParameters(only 1 learnt parameter): ',learntParameters 63 | print 'final_costs: ', final_costs 64 | classifedLabels = [] 65 | for eachData in data: 66 | classifedLabels.append(lr1.classify(eachData, learntParameters)) 67 | classifedLabels = np.array(classifedLabels) 68 | # print 'original label', 'classifedLabels' 69 | # for each in zip(labels, classifedLabels): 70 | # print each[0],', ', each[1],', ', each[0]==each[1] 71 | 72 | print (np.sum(classifedLabels == labels)/len(labels))*100,'%' -------------------------------------------------------------------------------- /iris-data.txt: -------------------------------------------------------------------------------- 1 | 5.1,3.5,1.4,0.2,1 2 | 4.9,3.0,1.4,0.2,1 3 | 4.7,3.2,1.3,0.2,1 4 | 4.6,3.1,1.5,0.2,1 5 | 5.0,3.6,1.4,0.2,1 6 | 5.4,3.9,1.7,0.4,1 7 | 4.6,3.4,1.4,0.3,1 8 | 5.0,3.4,1.5,0.2,1 9 | 4.4,2.9,1.4,0.2,1 10 | 4.9,3.1,1.5,0.1,1 11 | 5.4,3.7,1.5,0.2,1 12 | 4.8,3.4,1.6,0.2,1 13 | 4.8,3.0,1.4,0.1,1 14 | 4.3,3.0,1.1,0.1,1 15 | 5.8,4.0,1.2,0.2,1 16 | 5.7,4.4,1.5,0.4,1 17 | 5.4,3.9,1.3,0.4,1 18 | 5.1,3.5,1.4,0.3,1 19 | 5.7,3.8,1.7,0.3,1 20 | 5.1,3.8,1.5,0.3,1 21 | 5.4,3.4,1.7,0.2,1 22 | 5.1,3.7,1.5,0.4,1 23 | 4.6,3.6,1.0,0.2,1 24 | 5.1,3.3,1.7,0.5,1 25 | 4.8,3.4,1.9,0.2,1 26 | 5.0,3.0,1.6,0.2,1 27 | 5.0,3.4,1.6,0.4,1 28 | 5.2,3.5,1.5,0.2,1 29 | 5.2,3.4,1.4,0.2,1 30 | 4.7,3.2,1.6,0.2,1 31 | 4.8,3.1,1.6,0.2,1 32 | 5.4,3.4,1.5,0.4,1 33 | 5.2,4.1,1.5,0.1,1 34 | 5.5,4.2,1.4,0.2,1 35 | 4.9,3.1,1.5,0.1,1 36 | 5.0,3.2,1.2,0.2,1 37 | 5.5,3.5,1.3,0.2,1 38 | 4.9,3.1,1.5,0.1,1 39 | 4.4,3.0,1.3,0.2,1 40 | 5.1,3.4,1.5,0.2,1 41 | 5.0,3.5,1.3,0.3,1 42 | 4.5,2.3,1.3,0.3,1 43 | 4.4,3.2,1.3,0.2,1 44 | 5.0,3.5,1.6,0.6,1 45 | 5.1,3.8,1.9,0.4,1 46 | 4.8,3.0,1.4,0.3,1 47 | 5.1,3.8,1.6,0.2,1 48 | 4.6,3.2,1.4,0.2,1 49 | 5.3,3.7,1.5,0.2,1 50 | 5.0,3.3,1.4,0.2,1 51 | 7.0,3.2,4.7,1.4,2 52 | 6.4,3.2,4.5,1.5,2 53 | 6.9,3.1,4.9,1.5,2 54 | 5.5,2.3,4.0,1.3,2 55 | 6.5,2.8,4.6,1.5,2 56 | 5.7,2.8,4.5,1.3,2 57 | 6.3,3.3,4.7,1.6,2 58 | 4.9,2.4,3.3,1.0,2 59 | 6.6,2.9,4.6,1.3,2 60 | 5.2,2.7,3.9,1.4,2 61 | 5.0,2.0,3.5,1.0,2 62 | 5.9,3.0,4.2,1.5,2 63 | 6.0,2.2,4.0,1.0,2 64 | 6.1,2.9,4.7,1.4,2 65 | 5.6,2.9,3.6,1.3,2 66 | 6.7,3.1,4.4,1.4,2 67 | 5.6,3.0,4.5,1.5,2 68 | 5.8,2.7,4.1,1.0,2 69 | 6.2,2.2,4.5,1.5,2 70 | 5.6,2.5,3.9,1.1,2 71 | 5.9,3.2,4.8,1.8,2 72 | 6.1,2.8,4.0,1.3,2 73 | 6.3,2.5,4.9,1.5,2 74 | 6.1,2.8,4.7,1.2,2 75 | 6.4,2.9,4.3,1.3,2 76 | 6.6,3.0,4.4,1.4,2 77 | 6.8,2.8,4.8,1.4,2 78 | 6.7,3.0,5.0,1.7,2 79 | 6.0,2.9,4.5,1.5,2 80 | 5.7,2.6,3.5,1.0,2 81 | 5.5,2.4,3.8,1.1,2 82 | 5.5,2.4,3.7,1.0,2 83 | 5.8,2.7,3.9,1.2,2 84 | 6.0,2.7,5.1,1.6,2 85 | 5.4,3.0,4.5,1.5,2 86 | 6.0,3.4,4.5,1.6,2 87 | 6.7,3.1,4.7,1.5,2 88 | 6.3,2.3,4.4,1.3,2 89 | 5.6,3.0,4.1,1.3,2 90 | 5.5,2.5,4.0,1.3,2 91 | 5.5,2.6,4.4,1.2,2 92 | 6.1,3.0,4.6,1.4,2 93 | 5.8,2.6,4.0,1.2,2 94 | 5.0,2.3,3.3,1.0,2 95 | 5.6,2.7,4.2,1.3,2 96 | 5.7,3.0,4.2,1.2,2 97 | 5.7,2.9,4.2,1.3,2 98 | 6.2,2.9,4.3,1.3,2 99 | 5.1,2.5,3.0,1.1,2 100 | 5.7,2.8,4.1,1.3,2 101 | 6.3,3.3,6.0,2.5,3 102 | 5.8,2.7,5.1,1.9,3 103 | 7.1,3.0,5.9,2.1,3 104 | 6.3,2.9,5.6,1.8,3 105 | 6.5,3.0,5.8,2.2,3 106 | 7.6,3.0,6.6,2.1,3 107 | 4.9,2.5,4.5,1.7,3 108 | 7.3,2.9,6.3,1.8,3 109 | 6.7,2.5,5.8,1.8,3 110 | 7.2,3.6,6.1,2.5,3 111 | 6.5,3.2,5.1,2.0,3 112 | 6.4,2.7,5.3,1.9,3 113 | 6.8,3.0,5.5,2.1,3 114 | 5.7,2.5,5.0,2.0,3 115 | 5.8,2.8,5.1,2.4,3 116 | 6.4,3.2,5.3,2.3,3 117 | 6.5,3.0,5.5,1.8,3 118 | 7.7,3.8,6.7,2.2,3 119 | 7.7,2.6,6.9,2.3,3 120 | 6.0,2.2,5.0,1.5,3 121 | 6.9,3.2,5.7,2.3,3 122 | 5.6,2.8,4.9,2.0,3 123 | 7.7,2.8,6.7,2.0,3 124 | 6.3,2.7,4.9,1.8,3 125 | 6.7,3.3,5.7,2.1,3 126 | 7.2,3.2,6.0,1.8,3 127 | 6.2,2.8,4.8,1.8,3 128 | 6.1,3.0,4.9,1.8,3 129 | 6.4,2.8,5.6,2.1,3 130 | 7.2,3.0,5.8,1.6,3 131 | 7.4,2.8,6.1,1.9,3 132 | 7.9,3.8,6.4,2.0,3 133 | 6.4,2.8,5.6,2.2,3 134 | 6.3,2.8,5.1,1.5,3 135 | 6.1,2.6,5.6,1.4,3 136 | 7.7,3.0,6.1,2.3,3 137 | 6.3,3.4,5.6,2.4,3 138 | 6.4,3.1,5.5,1.8,3 139 | 6.0,3.0,4.8,1.8,3 140 | 6.9,3.1,5.4,2.1,3 141 | 6.7,3.1,5.6,2.4,3 142 | 6.9,3.1,5.1,2.3,3 143 | 5.8,2.7,5.1,1.9,3 144 | 6.8,3.2,5.9,2.3,3 145 | 6.7,3.3,5.7,2.5,3 146 | 6.7,3.0,5.2,2.3,3 147 | 6.3,2.5,5.0,1.9,3 148 | 6.5,3.0,5.2,2.0,3 149 | 6.2,3.4,5.4,2.3,3 150 | 5.9,3.0,5.1,1.8,3 -------------------------------------------------------------------------------- /mod-iris.txt: -------------------------------------------------------------------------------- 1 | # modified iris data 2 | 5.1,3.5,1.4,0.2,1 3 | 4.9,3.0,1.4,0.2,1 4 | 4.7,3.2,1.3,0.2,1 5 | 4.6,3.1,1.5,0.2,1 6 | 5.0,3.6,1.4,0.2,1 7 | 5.4,3.9,1.7,0.4,1 8 | 4.6,3.4,1.4,0.3,1 9 | 5.0,3.4,1.5,0.2,1 10 | 4.4,2.9,1.4,0.2,1 11 | 4.9,3.1,1.5,0.1,1 12 | 5.4,3.7,1.5,0.2,1 13 | 4.8,3.4,1.6,0.2,1 14 | 4.8,3.0,1.4,0.1,1 15 | 4.3,3.0,1.1,0.1,1 16 | 5.8,4.0,1.2,0.2,1 17 | 5.7,4.4,1.5,0.4,1 18 | 5.4,3.9,1.3,0.4,1 19 | 5.1,3.5,1.4,0.3,1 20 | 5.7,3.8,1.7,0.3,1 21 | 5.1,3.8,1.5,0.3,1 22 | 5.4,3.4,1.7,0.2,1 23 | 5.1,3.7,1.5,0.4,1 24 | 4.6,3.6,1.0,0.2,1 25 | 5.1,3.3,1.7,0.5,1 26 | 4.8,3.4,1.9,0.2,1 27 | 5.0,3.0,1.6,0.2,1 28 | 5.0,3.4,1.6,0.4,1 29 | 5.2,3.5,1.5,0.2,1 30 | 5.2,3.4,1.4,0.2,1 31 | 4.7,3.2,1.6,0.2,1 32 | 4.8,3.1,1.6,0.2,1 33 | 5.4,3.4,1.5,0.4,1 34 | 5.2,4.1,1.5,0.1,1 35 | 5.5,4.2,1.4,0.2,1 36 | 4.9,3.1,1.5,0.1,1 37 | 5.0,3.2,1.2,0.2,1 38 | 5.5,3.5,1.3,0.2,1 39 | 4.9,3.1,1.5,0.1,1 40 | 4.4,3.0,1.3,0.2,1 41 | 5.1,3.4,1.5,0.2,1 42 | 5.0,3.5,1.3,0.3,1 43 | 4.5,2.3,1.3,0.3,1 44 | 4.4,3.2,1.3,0.2,1 45 | 5.0,3.5,1.6,0.6,1 46 | 5.1,3.8,1.9,0.4,1 47 | 4.8,3.0,1.4,0.3,1 48 | 5.1,3.8,1.6,0.2,1 49 | 4.6,3.2,1.4,0.2,1 50 | 5.3,3.7,1.5,0.2,1 51 | 5.0,3.3,1.4,0.2,1 52 | 7.0,3.2,4.7,1.4,2 53 | 6.4,3.2,4.5,1.5,2 54 | 6.9,3.1,4.9,1.5,2 55 | 5.5,2.3,4.0,1.3,2 56 | 6.5,2.8,4.6,1.5,2 57 | 5.7,2.8,4.5,1.3,2 58 | 6.3,3.3,4.7,1.6,2 59 | 4.9,2.4,3.3,1.0,2 60 | 6.6,2.9,4.6,1.3,2 61 | 5.2,2.7,3.9,1.4,2 62 | 5.0,2.0,3.5,1.0,2 63 | 5.9,3.0,4.2,1.5,2 64 | 6.0,2.2,4.0,1.0,2 65 | 6.1,2.9,4.7,1.4,2 66 | 5.6,2.9,3.6,1.3,2 67 | 6.7,3.1,4.4,1.4,2 68 | 5.6,3.0,4.5,1.5,2 69 | 5.8,2.7,4.1,1.0,2 70 | 6.2,2.2,4.5,1.5,2 71 | 5.6,2.5,3.9,1.1,2 72 | 5.9,3.2,4.8,1.8,2 73 | 6.1,2.8,4.0,1.3,2 74 | 6.3,2.5,4.9,1.5,2 75 | 6.1,2.8,4.7,1.2,2 76 | 6.4,2.9,4.3,1.3,2 77 | 6.6,3.0,4.4,1.4,2 78 | 6.8,2.8,4.8,1.4,2 79 | 6.7,3.0,5.0,1.7,2 80 | 6.0,2.9,4.5,1.5,2 81 | 5.7,2.6,3.5,1.0,2 82 | 5.5,2.4,3.8,1.1,2 83 | 5.5,2.4,3.7,1.0,2 84 | 5.8,2.7,3.9,1.2,2 85 | 6.0,2.7,5.1,1.6,2 86 | 5.4,3.0,4.5,1.5,2 87 | 6.0,3.4,4.5,1.6,2 88 | 6.7,3.1,4.7,1.5,2 89 | 6.3,2.3,4.4,1.3,2 90 | 5.6,3.0,4.1,1.3,2 91 | 5.5,2.5,4.0,1.3,2 92 | 5.5,2.6,4.4,1.2,2 93 | 6.1,3.0,4.6,1.4,2 94 | 5.8,2.6,4.0,1.2,2 95 | 5.0,2.3,3.3,1.0,2 96 | 5.6,2.7,4.2,1.3,2 97 | 5.7,3.0,4.2,1.2,2 98 | 5.7,2.9,4.2,1.3,2 99 | 6.2,2.9,4.3,1.3,2 100 | 5.1,2.5,3.0,1.1,2 101 | 5.7,2.8,4.1,1.3,2 --------------------------------------------------------------------------------