├── Code ├── ANN.py ├── Hawk.py ├── NeuralNetwork.py ├── Neural_network.py ├── SSA.py ├── genetic.py ├── main-woa.py ├── main.py ├── main_Hawk&WOA.py ├── main_Hawk.py ├── main_SSA.py ├── main_WOA.py ├── main_pso.py ├── pso.py └── testCode ├── Data Sets ├── australian.csv ├── banknote_authentication.csv ├── blood.csv ├── breast_cancer.csv ├── breast_wisconsin.csv ├── cancer_patient.csv ├── cervical_cancer.csv ├── coimbra.csv ├── creditcard.csv ├── label_australian.csv ├── label_banknote.csv ├── label_blood.csv ├── label_breast.csv ├── label_cancer.csv ├── label_cervical.csv ├── label_coimbra.csv ├── label_creditcard.csv ├── label_wisconsin.csv └── test ├── Presentation and Graphs ├── Breast_cancer.png ├── E_value.png ├── HHA+WOA.png ├── Low_resources.png ├── Project presentation.pptx ├── credit_card.png ├── limited_resource.png ├── mean_vs_max1.png ├── mean_vs_max2.png ├── mean_vs_max3.png ├── mean_vs_max4.png ├── mean_vs_max5.png └── testGraph ├── README.md └── User Manual ├── User Manual.pdf └── test /Code/ANN.py: -------------------------------------------------------------------------------- 1 | import numpy 2 | 3 | def sigmoid(inpt): 4 | return 1.0 / (1.0 + numpy.exp(-1 * inpt)) 5 | 6 | def relu(inpt): 7 | result = inpt 8 | result[inpt < 0] = 0 9 | return result 10 | 11 | # Find the min and max values for each column 12 | def dataset_minmax(dataset): 13 | minmax = [[min(column), max(column)] for column in zip(*dataset)] 14 | return minmax 15 | 16 | # Rescale dataset columns to the range 0-1 17 | def normalize_dataset(dataset, minmax): 18 | for row in dataset: 19 | for i in range(len(row)): 20 | row[i] = (row[i] - minmax[i][0] + 1) / (minmax[i][1] - minmax[i][0] + 1) 21 | 22 | def predict_outputs(weights_mat, data_inputs, data_outputs, activation="relu"): 23 | predictions = numpy.zeros(shape=(data_inputs.shape[0])) 24 | for sample_idx in range(data_inputs.shape[0]): 25 | r1 = data_inputs[sample_idx, :] 26 | for curr_weights in weights_mat: 27 | r1 = numpy.matmul(r1, curr_weights) 28 | if activation == "relu": 29 | r1 = relu(r1) 30 | elif activation == "sigmoid": 31 | r1 = sigmoid(r1) 32 | elif activation == 'tanh': 33 | r1 = (1 - r1 ** 2) 34 | predicted_label = numpy.where(r1 == numpy.max(r1))[0][0] 35 | predictions[sample_idx] = predicted_label 36 | predictions = predictions.reshape(data_outputs.shape[0],1) 37 | 38 | correct_predictions = numpy.where(predictions == data_outputs)[0].size 39 | accuracy = (correct_predictions / data_outputs.size) * 100 40 | return accuracy, predictions 41 | 42 | def fitness(weights_mat, data_inputs, data_outputs, activation="relu"): 43 | accuracy = numpy.empty(shape=(weights_mat.shape[0])) 44 | for sol_idx in range(weights_mat.shape[0]): 45 | curr_sol_mat = weights_mat[sol_idx, :] 46 | accuracy[sol_idx], _ = predict_outputs(curr_sol_mat, data_inputs, data_outputs, activation=activation) 47 | return accuracy 48 | -------------------------------------------------------------------------------- /Code/Hawk.py: -------------------------------------------------------------------------------- 1 | import ANN 2 | import random 3 | import numpy 4 | 5 | 6 | def update_WOA(E, J, rand_hawk, hawk): 7 | D = abs(J*rand_hawk - hawk) 8 | return (rand_hawk - E*D) 9 | 10 | def update_hawk1(hawk, rabbit, mean_hawk, rand_hawk, LB, UB): 11 | q = random.random() 12 | r1 = random.random() 13 | r2 = random.random() 14 | if(q >= 0.5): 15 | return (rand_hawk - r1*(rand_hawk - 2 * r2 * hawk)) 16 | else: 17 | return ((rabbit - mean_hawk) - r1*(LB + r2*(UB - LB))) 18 | 19 | def update_hawk2(hawk, rabbit, J, E): 20 | return ((rabbit - hawk) - E*(J*rabbit - hawk)) 21 | 22 | def update_hawk3(hawk, rabbit, E): 23 | return (rabbit - E*abs(rabbit - hawk)) #is absolute needed? 24 | 25 | def update_hawk4(hawk, rabbit, J, E, sigma, S, data_inputs, data_outputs): 26 | Y = numpy.array(rabbit - E*(abs(J*rabbit - hawk))) 27 | LF = 0.01*random.random()*sigma/(pow(random.random(),2/3)) 28 | Z = numpy.array(Y + S[0]*LF) 29 | fitness_Y = ANN.fitness(numpy.array([Y]), data_inputs,data_outputs, activation="sigmoid") 30 | fitness_Z = ANN.fitness(numpy.array([Z]), data_inputs,data_outputs, activation="sigmoid") 31 | fitness_hawk = ANN.fitness(numpy.array([hawk]), data_inputs,data_outputs, activation="sigmoid") 32 | if(fitness_Y > fitness_hawk): 33 | return Y 34 | elif(fitness_Z > fitness_hawk): 35 | return Z 36 | elif(fitness_Y > fitness_Z): 37 | return Y 38 | return Z 39 | 40 | def update_hawk5(hawk, rabbit, mean_hawk, J, E, sigma, S, data_inputs, data_outputs): 41 | Y = numpy.array(rabbit - E*(abs(J*rabbit - mean_hawk))) 42 | LF = 0.01*random.random()*sigma/(pow(random.random(),2/3)) 43 | Z = numpy.array(Y + S[0]*LF) 44 | fitness_Y = ANN.fitness(numpy.array([Y]), data_inputs,data_outputs, activation="sigmoid") 45 | fitness_Z = ANN.fitness(numpy.array([Z]), data_inputs,data_outputs, activation="sigmoid") 46 | fitness_hawk = ANN.fitness(numpy.array([hawk]), data_inputs,data_outputs, activation="sigmoid") 47 | if(fitness_Y > fitness_hawk): 48 | return Y 49 | elif(fitness_Z > fitness_hawk): 50 | return Z 51 | elif(fitness_Y > fitness_Z): 52 | return Y 53 | return Z 54 | -------------------------------------------------------------------------------- /Code/NeuralNetwork.py: -------------------------------------------------------------------------------- 1 | from random import seed 2 | import random 3 | from csv import reader 4 | from math import exp 5 | import numpy 6 | # Evaluate an algorithm using a cross validation split 7 | # def evaluate_algorithm(dataset, algorithm, n_folds, *args): 8 | # folds = cross_validation_split(dataset, n_folds) 9 | # scores = list() 10 | # for fold in folds: 11 | # train_set = list(folds) 12 | # train_set.remove(fold) 13 | # train_set = sum(train_set, []) 14 | # # for t in train_set: 15 | # # print(t) 16 | # # print() 17 | # test_set = list() 18 | # for row in fold: 19 | # row_copy = list(row) 20 | # test_set.append(row_copy) 21 | # row_copy[-1] = None 22 | # # for i in test_set: 23 | # # print(i) 24 | # predicted = algorithm(train_set, test_set, *args) 25 | # actual = [row[-1] for row in fold] 26 | # accuracy = accuracy_metric(actual, predicted) 27 | # scores.append(accuracy) 28 | # return scores 29 | 30 | # Load a CSV file 31 | def load_csv(filename): 32 | dataset = list() 33 | with open(filename, 'r') as file: 34 | csv_reader = reader(file) 35 | for row in csv_reader: 36 | if not row: 37 | continue 38 | dataset.append(row) 39 | return dataset 40 | 41 | # Convert string column to float 42 | def str_column_to_float(dataset, column): 43 | for row in dataset: 44 | row[column] = float(row[column].strip()) 45 | 46 | # Convert string column to integer 47 | def str_column_to_int(dataset, column): 48 | class_values = [row[column] for row in dataset] 49 | unique = set(class_values) 50 | lookup = dict() 51 | for i, value in enumerate(unique): 52 | lookup[value] = i 53 | for row in dataset: 54 | row[column] = lookup[row[column]] 55 | return lookup 56 | 57 | # Find the min and max values for each column 58 | def dataset_minmax(dataset): 59 | minmax = [[min(column), max(column)] for column in zip(*dataset)] 60 | return minmax 61 | 62 | # Rescale dataset columns to the range 0-1 63 | def normalize_dataset(dataset, minmax): 64 | for row in dataset: 65 | for i in range(len(row)-1): 66 | row[i] = (row[i] - minmax[i][0] + 1) / (minmax[i][1] - minmax[i][0] + 1) 67 | 68 | # Split a dataset into k folds 69 | def cross_validation_split(dataset, n_folds): 70 | dataset_split = list() 71 | dataset_copy = list(dataset) 72 | fold_size = int(len(dataset) / n_folds) 73 | for i in range(n_folds): 74 | fold = list() 75 | while len(fold) < fold_size: 76 | index = random.randrange(len(dataset_copy)) 77 | fold.append(dataset_copy.pop(index)) 78 | dataset_split.append(fold) 79 | return dataset_split 80 | 81 | # Calculate accuracy percentage 82 | def accuracy_metric(actual, predicted): 83 | correct = 0 84 | for i in range(len(actual)): 85 | if actual[i] == predicted[i]: 86 | correct += 1 87 | return correct / float(len(actual)) * 100.0 88 | 89 | 90 | def dataset_to_test(dataset): 91 | test_set = list() 92 | for row in dataset: 93 | row_copy = list(row) 94 | test_set.append(row_copy) 95 | row_copy[-1] = None 96 | return test_set 97 | 98 | def cal_fitness(network, dataset): 99 | actual = [row[-1] for row in dataset] 100 | dataset = dataset_to_test(dataset) 101 | prediction = list() 102 | for row in dataset: 103 | predicted = predict(network, row) 104 | prediction.append(predicted) 105 | # print(prediction) 106 | accuracy = accuracy_metric(actual, prediction) 107 | return accuracy 108 | 109 | # Calculate neuron activation for an input 110 | def activate(weights, inputs): 111 | activation = weights[-1] 112 | for i in range(len(weights)-1): 113 | activation += weights[i] * inputs[i] 114 | return activation 115 | 116 | # Transfer neuron activation 117 | def transfer(activation): 118 | # return 1.0 / (1.0 + exp(-activation)) 119 | return numpy.tanh(activation) 120 | 121 | # Forward propagate input to a network output 122 | def forward_propagate(network, row): 123 | inputs = row 124 | for layer in network: 125 | new_inputs = [] 126 | for neuron in layer: 127 | activation = activate(neuron['weights'], inputs) 128 | neuron['output'] = transfer(activation) 129 | new_inputs.append(neuron['output']) 130 | inputs = new_inputs 131 | return inputs 132 | 133 | # Calculate the derivative of an neuron output 134 | def transfer_derivative(output): 135 | return output * (1.0 - output) 136 | 137 | # Backpropagate error and store in neurons 138 | def backward_propagate_error(network, expected): 139 | for i in reversed(range(len(network))): 140 | layer = network[i] 141 | errors = list() 142 | if i != len(network)-1: 143 | for j in range(len(layer)): 144 | error = 0.0 145 | for neuron in network[i + 1]: 146 | error += (neuron['weights'][j] * neuron['delta']) 147 | errors.append(error) 148 | else: 149 | for j in range(len(layer)): 150 | neuron = layer[j] 151 | errors.append(expected[j] - neuron['output']) 152 | for j in range(len(layer)): 153 | neuron = layer[j] 154 | neuron['delta'] = errors[j] * transfer_derivative(neuron['output']) 155 | 156 | # Update network weights with error 157 | def update_weights(network, row, l_rate): 158 | for i in range(len(network)): 159 | inputs = row[:-1] 160 | if i != 0: 161 | inputs = [neuron['output'] for neuron in network[i - 1]] 162 | for neuron in network[i]: 163 | for j in range(len(inputs)): 164 | neuron['weights'][j] += l_rate * neuron['delta'] * inputs[j] 165 | neuron['weights'][-1] += l_rate * neuron['delta'] 166 | 167 | # Train a network for a fixed number of epochs 168 | def train_network(network, train, l_rate, n_epoch, n_outputs): 169 | for epoch in range(n_epoch): 170 | for row in train: 171 | outputs = forward_propagate(network, row) 172 | expected = [0 for i in range(n_outputs)] 173 | expected[row[-1]] = 1 174 | backward_propagate_error(network, expected) 175 | update_weights(network, row, l_rate) 176 | 177 | # Initialize a network 178 | def initialize_network(n_inputs, n_hidden, n_outputs, w1, w2): 179 | network = list() 180 | if w1 < 0 : 181 | prob = [0,1,0,0,1,1,0,0,1,1,1,0,0,0,1,0,0,1,0,1,0,0,1,0,0,1,1,1,0] 182 | hidden_layer = [{'weights':[random.random()*-1 if random.choice(prob) else random.random() for j in range(n_inputs + 1)]} for k in range(n_hidden[0])] 183 | network.append(hidden_layer) 184 | i = 0 185 | while(i<(len(n_hidden)-1)): 186 | hidden_layer = [{'weights':[random.random()*-1 if random.choice(prob) else random.random() for j in range(n_hidden[i] + 1)]} for k in range(n_hidden[i+1])] 187 | network.append(hidden_layer) 188 | i = i + 1 189 | output_layer = [{'weights':[random.random()*-1 if random.choice(prob) else random.random() for j in range(n_hidden[i] + 1)]} for k in range(n_outputs)] 190 | network.append(output_layer) 191 | else : 192 | hidden_layer = [{'weights':[random.random() for j in range(n_inputs + 1)]} for k in range(n_hidden[0])] 193 | network.append(hidden_layer) 194 | i = 0 195 | while(i<(len(n_hidden)-1)): 196 | hidden_layer = [{'weights':[random.random() for j in range(n_hidden[i] + 1)]} for k in range(n_hidden[i+1])] 197 | network.append(hidden_layer) 198 | i = i + 1 199 | output_layer = [{'weights':[random.random() for j in range(n_hidden[i] + 1)]} for k in range(n_outputs)] 200 | network.append(output_layer) 201 | # print(network) 202 | # print() 203 | # print("Hello") 204 | # print(network[0]) 205 | return network 206 | 207 | # Make a prediction with a network 208 | def predict(network, row): 209 | outputs = forward_propagate(network, row) 210 | return outputs.index(max(outputs)) 211 | 212 | # Backpropagation Algorithm With Stochastic Gradient Descent 213 | def back_propagation(train, test, l_rate, n_epoch, n_hidden): 214 | n_inputs = len(train[0]) - 1 215 | n_outputs = len(set([row[-1] for row in train])) 216 | network = initialize_network(n_inputs, n_hidden, n_outputs) 217 | train_network(network, train, l_rate, n_epoch, n_outputs) 218 | predictions = list() 219 | for row in test: 220 | prediction = predict(network, row) 221 | predictions.append(prediction) 222 | return(predictions) 223 | 224 | # Test Backprop on Seeds dataset 225 | # seed(1) 226 | # # load and prepare data 227 | # filename = 'datasets/data_banknote_authentication.csv' 228 | # dataset = load_csv(filename) 229 | # dataset = dataset[:30] 230 | # for i in range(len(dataset[0])-1): 231 | # str_column_to_float(dataset, i) 232 | # # convert class column to integers 233 | # str_column_to_int(dataset, len(dataset[0])-1) 234 | # # normalize input variables 235 | # minmax = dataset_minmax(dataset) 236 | # normalize_dataset(dataset, minmax) 237 | # 238 | # # evaluate algorithm 239 | # n_folds = 2 240 | # l_rate = 0.3 241 | # n_epoch = 100 242 | # n_hidden = 60 243 | # scores = evaluate_algorithm(dataset, back_propagation, n_folds, l_rate, n_epoch, n_hidden) 244 | # print('Scores: %s' % scores) 245 | # print('Mean Accuracy: %.3f%%' % (sum(scores)/float(len(scores)))) 246 | -------------------------------------------------------------------------------- /Code/Neural_network.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | np.random.seed(100) 4 | 5 | 6 | class Layer: 7 | 8 | def __init__(self, n_input, n_neurons, activation=None, weights=None, bias=None): 9 | 10 | self.weights = weights if weights is not None else np.random.rand(n_input, n_neurons) 11 | self.activation = activation 12 | self.bias = bias if bias is not None else np.random.rand(n_neurons) 13 | self.last_activation = None 14 | self.error = None 15 | self.delta = None 16 | 17 | def activate(self, x): 18 | 19 | r = np.dot(x, self.weights) + self.bias 20 | self.last_activation = self._apply_activation(r) 21 | return self.last_activation 22 | 23 | def _apply_activation(self, r): 24 | 25 | if self.activation is None: 26 | return r 27 | 28 | # tanh 29 | if self.activation == 'tanh': 30 | return np.tanh(r) 31 | 32 | # sigmoid 33 | if self.activation == 'sigmoid': 34 | return (1) / (1 + np.exp(-r)) 35 | 36 | return r 37 | 38 | def apply_activation_derivative(self, r): 39 | 40 | if self.activation is None: 41 | return r 42 | 43 | if self.activation == 'tanh': 44 | return 1 - r ** 2 45 | 46 | if self.activation == 'sigmoid': 47 | return r * (1 - r) 48 | 49 | return r 50 | 51 | 52 | class NeuralNetwork: 53 | 54 | def __init__(self): 55 | self._layers = [] 56 | 57 | def add_layer(self, layer): 58 | 59 | self._layers.append(layer) 60 | 61 | def feed_forward(self, X): 62 | for layer in self._layers: 63 | X = layer.activate(X) 64 | 65 | return X 66 | 67 | def predict(self, X): 68 | 69 | ff = self.feed_forward(X) 70 | 71 | # One row 72 | if ff.ndim == 1: 73 | return np.argmax(ff) 74 | 75 | # Multiple rows 76 | return np.argmax(ff, axis=1) 77 | 78 | def backpropagation(self, X, y, learning_rate): 79 | 80 | output = self.feed_forward(X) 81 | 82 | for i in reversed(range(len(self._layers))): 83 | layer = self._layers[i] 84 | 85 | if layer == self._layers[-1]: 86 | layer.error = y - output 87 | layer.delta = layer.error * layer.apply_activation_derivative(output) 88 | else: 89 | next_layer = self._layers[i + 1] 90 | layer.error = np.dot(next_layer.weights, next_layer.delta) 91 | layer.delta = layer.error * layer.apply_activation_derivative(layer.last_activation) 92 | 93 | # Update the weights 94 | for i in range(len(self._layers)): 95 | layer = self._layers[i] 96 | input_to_use = np.atleast_2d(X if i == 0 else self._layers[i - 1].last_activation) 97 | layer.weights += layer.delta * input_to_use.T * learning_rate 98 | 99 | def train(self, X, y, learning_rate, max_epochs): 100 | 101 | mses = [] 102 | 103 | for i in range(max_epochs): 104 | for j in range(len(X)): 105 | self.backpropagation(X[j], y[j], learning_rate) 106 | if i % 10 == 0: 107 | mse = np.mean(np.square(y - self.feed_forward(X))) 108 | mses.append(mse) 109 | print('Epoch: #%s, MSE: %f' % (i, float(mse))) 110 | 111 | return mses 112 | 113 | @staticmethod 114 | def accuracy(y_pred, y_true): 115 | 116 | print(y_pred,"\n",y_true) 117 | return (y_pred == y_true).mean() 118 | -------------------------------------------------------------------------------- /Code/SSA.py: -------------------------------------------------------------------------------- 1 | import random 2 | import ANN 3 | import numpy 4 | def update_leader(F,lb,ub,c1): 5 | for i in range(len(F)): 6 | for j in range(len(F[i])): 7 | c3 = random.uniform(-1,1) 8 | c2 = random.uniform(0,1) 9 | if c3 >= 0: 10 | new_f = F[i][j] + c1*((ub-lb)*c2 + lb) 11 | else: 12 | new_f = F[i][j] - c1*((ub-lb)*c2 + lb) 13 | F[i][j] = new_f 14 | return F 15 | 16 | def update_leader_val(F,lb,ub,c1): 17 | c3 = random.uniform(-1,1) 18 | c2 = random.uniform(0,1) 19 | if c3 >= 0: 20 | F = F + c1*((ub-lb)*c2 + lb) 21 | else: 22 | F = F - c1*((ub-lb)*c2 + lb) 23 | return F 24 | 25 | 26 | def update_follower(pop_weights_mat,fitness,data_inputs,data_outputs): 27 | for i in range(len(pop_weights_mat)): 28 | if i != 0: 29 | pop_weights_mat[i] = 0.5*(pop_weights_mat[i] + pop_weights_mat[i-1]) 30 | # new_fitness = ANN.fitness(numpy.array([new_wts]),data_inputs,data_outputs,activation = 'sigmoid') 31 | # if fitness[i] < new_fitness[0]: 32 | # pop_weights_mat[i] = new_wts 33 | return pop_weights_mat 34 | 35 | def update_follower_val(pop_weights_mat,weight_range_1,weight_range_2): 36 | for i in range(len(pop_weights_mat)): 37 | if i != 0: 38 | pop_weights_mat[i] = 0.5*(pop_weights_mat[i] + pop_weights_mat[i-1]) 39 | if (pop_weights_mat[i] < weight_range_1) or (pop_weights_mat[i]>weight_range_2): 40 | pop_weights_mat[i] = random.uniform(weight_range_1,weight_range_2) 41 | return pop_weights_mat 42 | -------------------------------------------------------------------------------- /Code/genetic.py: -------------------------------------------------------------------------------- 1 | import numpy 2 | import random 3 | 4 | def mat_to_vector(mat_pop_weights): 5 | pop_weights_vector = [] 6 | for sol_idx in range(mat_pop_weights.shape[0]): 7 | curr_vector = [] 8 | for layer_idx in range(mat_pop_weights.shape[1]): 9 | vector_weights = numpy.reshape(mat_pop_weights[sol_idx, layer_idx], newshape=(mat_pop_weights[sol_idx, layer_idx].size)) 10 | curr_vector.extend(vector_weights) 11 | pop_weights_vector.append(curr_vector) 12 | return numpy.array(pop_weights_vector) 13 | 14 | def vector_to_mat(vector_pop_weights, mat_pop_weights): 15 | mat_weights = [] 16 | for sol_idx in range(mat_pop_weights.shape[0]): 17 | start = 0 18 | end = 0 19 | for layer_idx in range(mat_pop_weights.shape[1]): 20 | end = end + mat_pop_weights[sol_idx, layer_idx].size 21 | curr_vector = vector_pop_weights[sol_idx, start:end] 22 | mat_layer_weights = numpy.reshape(curr_vector, newshape=(mat_pop_weights[sol_idx, layer_idx].shape)) 23 | mat_weights.append(mat_layer_weights) 24 | start = end 25 | return numpy.reshape(mat_weights, newshape=mat_pop_weights.shape) 26 | 27 | def select_mating_pool(pop, fitness, num_parents): 28 | parents = numpy.empty((num_parents, pop.shape[1])) 29 | for parent_num in range(num_parents): 30 | max_fitness_idx = numpy.where(fitness == numpy.max(fitness)) 31 | max_fitness_idx = max_fitness_idx[0][0] 32 | parents[parent_num, :] = pop[max_fitness_idx, :] 33 | fitness[max_fitness_idx] = -99999999999 34 | return parents 35 | 36 | def crossover(parents, offspring_size): 37 | offspring = numpy.empty(offspring_size) 38 | crossover_point = numpy.uint32(offspring_size[1]/2) 39 | for k in range(offspring_size[0]): 40 | parent1_idx = k%parents.shape[0] 41 | parent2_idx = (k+1)%parents.shape[0] 42 | offspring[k, 0:crossover_point] = parents[parent1_idx, 0:crossover_point] 43 | offspring[k, crossover_point:] = parents[parent2_idx, crossover_point:] 44 | return offspring 45 | 46 | 47 | def mutation(offspring_crossover, mutation_percent): 48 | num_mutations = numpy.uint32((mutation_percent*offspring_crossover.shape[1])/100) 49 | mutation_indices = numpy.array(random.sample(range(0, offspring_crossover.shape[1]), num_mutations)) 50 | for idx in range(offspring_crossover.shape[0]): 51 | random_value = numpy.random.uniform(-1.0, 1.0, 1) 52 | offspring_crossover[idx, mutation_indices] = offspring_crossover[idx, mutation_indices] + random_value 53 | return offspring_crossover 54 | -------------------------------------------------------------------------------- /Code/main-woa.py: -------------------------------------------------------------------------------- 1 | import numpy 2 | import genetic as ga 3 | import ANN 4 | import csv 5 | import Neural_network as neural 6 | import random 7 | from sklearn.model_selection import train_test_split 8 | import matplotlib as plt 9 | import SSA 10 | import time 11 | import math 12 | 13 | def best(fitness): 14 | max_fitness = 0 15 | pos = -1 16 | for i in range(len(fitness)): 17 | if fitness[i] > max_fitness: 18 | max_fitness = fitness[i] 19 | pos = i 20 | return pos 21 | 22 | def Calculate_mean(pop_weights_mat): 23 | mean = pop_weights_mat[0] 24 | for i in range(1,len(pop_weights_mat)): 25 | mean += pop_weights_mat[i] 26 | return mean/len(pop_weights_mat) 27 | 28 | 29 | def load_data(): 30 | data_inputs = [] 31 | with open('datasets/blood.csv','r') as csvfile: 32 | rows = csv.reader(csvfile) 33 | for row in rows: 34 | row = numpy.array(row,dtype = float) 35 | data_inputs.append(row) 36 | data_outputs = [] 37 | with open('datasets/label_blood.csv','r') as csvfile: 38 | rows = csv.reader(csvfile) 39 | for row in rows: 40 | row = numpy.array(row,dtype = float) 41 | data_outputs.append(row) 42 | return data_inputs,data_outputs 43 | 44 | data_inputs, data_outputs = load_data() 45 | minmax = ANN.dataset_minmax(data_inputs) 46 | ANN.normalize_dataset(data_inputs, minmax) 47 | 48 | data_inputs = numpy.array(data_inputs) 49 | print(data_inputs.shape) 50 | data_outputs = numpy.array(data_outputs) 51 | data_inputs, X, data_outputs, y = train_test_split(data_inputs, data_outputs,test_size = 0.2, random_state = 1) 52 | 53 | # print(data_inputs.shape) 54 | 55 | sol_per_pop = 12 56 | num_parents_mating = 6 57 | num_generations = 200 58 | mutation_percent = 20 59 | 60 | HL1_neurons = data_inputs.shape[1] * 5 61 | HL2_neurons = int(data_inputs.shape[1]) 62 | output_neurons = 2 63 | 64 | weight_range_1 = -1 65 | weight_range_2 = 1 66 | 67 | initial_pop_weights = [] 68 | for curr_sol in numpy.arange(0, sol_per_pop): 69 | 70 | input_HL1_weights = numpy.random.uniform(low=weight_range_1, high=weight_range_2, size=(data_inputs.shape[1], HL1_neurons)) 71 | HL1_HL2_weights = numpy.random.uniform(low=weight_range_1, high=weight_range_2, size=(HL1_neurons, HL2_neurons)) 72 | # HL2_HL3_weights = numpy.random.uniform(low=weight_range_1, high=weight_range_2, size=(HL2_neurons, HL3_neurons)) 73 | HL2_output_weights = numpy.random.uniform(low=weight_range_1, high=weight_range_2, size=(HL2_neurons, output_neurons)) 74 | initial_pop_weights.append(numpy.array([input_HL1_weights, HL1_HL2_weights , HL2_output_weights])) 75 | 76 | pop_weights_mat = numpy.array(initial_pop_weights) 77 | pop_weights_vector = ga.mat_to_vector(pop_weights_mat) 78 | mean_finess = [] 79 | best_outputs = [] 80 | accuracies = {} 81 | F = pop_weights_mat[0] 82 | F_acc = 0 83 | for generation in range(100): 84 | print("Generation : ", generation) 85 | fitness = ANN.fitness(pop_weights_mat, data_inputs,data_outputs, activation="tanh") 86 | fitness = numpy.array(fitness) 87 | mean_finess.append(Calculate_mean(fitness)) 88 | pop_weights_mat = numpy.array(pop_weights_mat) 89 | indices = fitness.argsort() 90 | pop_weights_mat = pop_weights_mat[indices] 91 | pop_weights_mat = pop_weights_mat[::-1] 92 | fitness = numpy.sort(fitness) 93 | fitness = fitness[::-1] 94 | F = pop_weights_mat[0] 95 | c1 = 2*math.exp(-(pow((4*(generation+1)/num_generations),2))) 96 | pop_weights_mat[0] = SSA.update_leader(F,weight_range_1,weight_range_2,c1) 97 | pop_weights_mat = SSA.update_follower(pop_weights_mat, fitness,data_inputs,data_outputs) 98 | accuracies[generation] = fitness 99 | print("Fitness") 100 | print(fitness) 101 | 102 | 103 | for generation in range(100): 104 | print("Generation : ", generation) 105 | pop_weights_mat = ga.vector_to_mat(pop_weights_vector, pop_weights_mat) 106 | 107 | fitness = ANN.fitness(pop_weights_mat, data_inputs,data_outputs, activation="sigmoid") 108 | mean_finess.append(Calculate_mean(fitness)) 109 | 110 | print("Fitness") 111 | print(fitness) 112 | 113 | parents = ga.select_mating_pool(pop_weights_vector, fitness.copy(), num_parents_mating) 114 | 115 | offspring_crossover = ga.crossover(parents, offspring_size=(pop_weights_vector.shape[0]-parents.shape[0], pop_weights_vector.shape[1])) 116 | 117 | offspring_mutation = ga.mutation(offspring_crossover, mutation_percent=mutation_percent) 118 | 119 | pop_weights_vector[0:parents.shape[0], :] = parents 120 | pop_weights_vector[parents.shape[0]:, :] = offspring_mutation 121 | 122 | 123 | import matplotlib.pyplot as plt 124 | 125 | plt.plot(mean_finess) 126 | plt.show() 127 | pop_weights_mat = ga.vector_to_mat(pop_weights_vector, pop_weights_mat) 128 | best_weights = pop_weights_mat [0, :] 129 | 130 | acc, predictions = ANN.predict_outputs(best_weights, X, y, activation="sigmoid") 131 | print(acc) 132 | print(predictions) 133 | print(y) 134 | -------------------------------------------------------------------------------- /Code/main.py: -------------------------------------------------------------------------------- 1 | import numpy 2 | import genetic as ga 3 | import ANN 4 | import csv 5 | import Neural_network as neural 6 | from sklearn.model_selection import train_test_split 7 | import matplotlib as plt 8 | 9 | 10 | 11 | def load_data(): 12 | data_inputs = [] 13 | with open('datasets/blood.csv','r') as csvfile: 14 | rows = csv.reader(csvfile) 15 | for row in rows: 16 | row = numpy.array(row,dtype = float) 17 | data_inputs.append(row) 18 | data_outputs = [] 19 | with open('datasets/label_blood.csv','r') as csvfile: 20 | rows = csv.reader(csvfile) 21 | for row in rows: 22 | row = numpy.array(row,dtype = float) 23 | data_outputs.append(row) 24 | return data_inputs,data_outputs 25 | 26 | data_inputs, data_outputs = load_data() 27 | minmax = ANN.dataset_minmax(data_inputs) 28 | ANN.normalize_dataset(data_inputs, minmax) 29 | 30 | data_inputs = numpy.array(data_inputs) 31 | print(data_inputs.shape) 32 | data_outputs = numpy.array(data_outputs) 33 | data_inputs, X, data_outputs, y = train_test_split(data_inputs, data_outputs,test_size = 0.2, random_state = 1) 34 | 35 | # print(data_inputs.shape) 36 | 37 | sol_per_pop = 12 38 | num_parents_mating = 6 39 | num_generations = 200 40 | mutation_percent = 20 41 | 42 | HL1_neurons = data_inputs.shape[1] * 5 43 | HL2_neurons = int(data_inputs.shape[1]) 44 | output_neurons = 2 45 | 46 | weight_range_1 = -1 47 | weight_range_2 = 1 48 | 49 | initial_pop_weights = [] 50 | for curr_sol in numpy.arange(0, sol_per_pop): 51 | 52 | input_HL1_weights = numpy.random.uniform(low=weight_range_1, high=weight_range_2, size=(data_inputs.shape[1], HL1_neurons)) 53 | HL1_HL2_weights = numpy.random.uniform(low=weight_range_1, high=weight_range_2, size=(HL1_neurons, HL2_neurons)) 54 | # HL2_HL3_weights = numpy.random.uniform(low=weight_range_1, high=weight_range_2, size=(HL2_neurons, HL3_neurons)) 55 | HL2_output_weights = numpy.random.uniform(low=weight_range_1, high=weight_range_2, size=(HL2_neurons, output_neurons)) 56 | initial_pop_weights.append(numpy.array([input_HL1_weights, HL1_HL2_weights , HL2_output_weights])) 57 | 58 | pop_weights_mat = numpy.array(initial_pop_weights) 59 | pop_weights_vector = ga.mat_to_vector(pop_weights_mat) 60 | 61 | best_outputs = [] 62 | accuracies = numpy.empty(shape=(num_generations)) 63 | 64 | for generation in range(num_generations): 65 | print("Generation : ", generation) 66 | pop_weights_mat = ga.vector_to_mat(pop_weights_vector, pop_weights_mat) 67 | 68 | fitness = ANN.fitness(pop_weights_mat, data_inputs,data_outputs, activation="relu") 69 | 70 | # accuracies[generation] = fitness 71 | if(accuracies[generation] == accuracies[generation-20]): 72 | break 73 | print("Fitness") 74 | print(fitness) 75 | 76 | parents = ga.select_mating_pool(pop_weights_vector, fitness.copy(), num_parents_mating) 77 | 78 | offspring_crossover = ga.crossover(parents, offspring_size=(pop_weights_vector.shape[0]-parents.shape[0], pop_weights_vector.shape[1])) 79 | 80 | offspring_mutation = ga.mutation(offspring_crossover, mutation_percent=mutation_percent) 81 | 82 | pop_weights_vector[0:parents.shape[0], :] = parents 83 | pop_weights_vector[parents.shape[0]:, :] = offspring_mutation 84 | 85 | pop_weights_mat = ga.vector_to_mat(pop_weights_vector, pop_weights_mat) 86 | best_weights = pop_weights_mat [0, :] 87 | 88 | acc, predictions = ANN.predict_outputs(best_weights, X, y, activation="relu") 89 | print(acc) 90 | print(predictions) 91 | print(y) 92 | -------------------------------------------------------------------------------- /Code/main_Hawk&WOA.py: -------------------------------------------------------------------------------- 1 | import numpy 2 | import ANN 3 | import csv 4 | import random 5 | import Hawk 6 | from sklearn.model_selection import train_test_split 7 | 8 | def best(fitness): 9 | max_fitness = 0 10 | pos = -1 11 | for i in range(len(fitness)): 12 | if fitness[i] > max_fitness: 13 | max_fitness = fitness[i] 14 | pos = i 15 | return pos 16 | 17 | def Calculate_mean(pop_weights_mat): 18 | mean = pop_weights_mat[0] 19 | for i in range(1,len(pop_weights_mat)): 20 | mean += pop_weights_mat[i] 21 | return mean/len(pop_weights_mat) 22 | 23 | def load_data(): 24 | data_inputs = [] 25 | with open('datasets/cancer_patient.csv','r') as csvfile: 26 | rows = csv.reader(csvfile) 27 | for row in rows: 28 | row = numpy.array(row,dtype = float) 29 | data_inputs.append(row) 30 | # data_output = [row[-1] for row in data_inputs] 31 | # data_input = [row[:-1] for row in data_inputs] 32 | data_outputs = [] 33 | with open('datasets/label_cancer.csv','r') as csvfile: 34 | rows = csv.reader(csvfile) 35 | for row in rows: 36 | row = numpy.array(row,dtype = float) 37 | data_outputs.append(row) 38 | return data_inputs,data_outputs 39 | 40 | data_inputs, data_outputs = load_data() 41 | 42 | minmax = ANN.dataset_minmax(data_inputs) 43 | ANN.normalize_dataset(data_inputs, minmax) 44 | data_inputs = numpy.array(data_inputs) 45 | data_outputs = numpy.array(data_outputs) 46 | data_inputs, X, data_outputs, y = train_test_split(data_inputs, data_outputs,test_size = 0.15, random_state = 1) 47 | 48 | print(data_inputs.shape) 49 | 50 | sol_per_pop = 12 51 | num_generations = 100 52 | 53 | 54 | # HL1_neurons = data_inputs.shape[1] * 2 #for cancer_patient 55 | # HL2_neurons = int(data_inputs.shape[1]/2) 56 | # output_neurons = 3 57 | 58 | HL1_neurons = data_inputs.shape[1] * 2 #for cancer_patient 59 | HL2_neurons = int(data_inputs.shape[1]/2) 60 | output_neurons = 3 61 | 62 | weight_range_1 = -1 63 | weight_range_2 = 1 64 | 65 | initial_pop_weights = [] 66 | for curr_sol in numpy.arange(0, sol_per_pop): 67 | input_HL1_weights = numpy.random.uniform(low=weight_range_1, high=weight_range_2, size=(data_inputs.shape[1], HL1_neurons)) 68 | HL1_HL2_weights = numpy.random.uniform(low=weight_range_1, high=weight_range_2, size=(HL1_neurons, HL2_neurons)) 69 | HL2_output_weights = numpy.random.uniform(low=weight_range_1, high=weight_range_2, size=(HL2_neurons, output_neurons)) 70 | initial_pop_weights.append(numpy.array([input_HL1_weights, HL1_HL2_weights, HL2_output_weights])) 71 | 72 | pop_weights_mat = numpy.array(initial_pop_weights) 73 | 74 | fitness = ANN.fitness(pop_weights_mat, data_inputs,data_outputs, activation="tanh") 75 | best_index = best(fitness) 76 | rabbit = pop_weights_mat[best_index] 77 | E_value = [] 78 | best_result = [] 79 | best_pop = [] 80 | mean_fitness = [] 81 | max_f = [] 82 | accuracies = numpy.empty(shape=(num_generations)) 83 | sigma = pow(((1.33*.707)/(.9064*1.5*pow(2,.25))),2/3) #Levy flights 84 | for generation in range(num_generations): 85 | print("Generation : ", generation) 86 | fitness = ANN.fitness(pop_weights_mat, data_inputs,data_outputs, activation="tanh") 87 | print(fitness) 88 | best_index = best(fitness) 89 | max_f.append(fitness[best_index]) 90 | rabbit = pop_weights_mat[best_index] 91 | 92 | accuracies[generation] = fitness[best_index] 93 | Mean_pop_weight = Calculate_mean(pop_weights_mat) 94 | mean_fitness.append(Calculate_mean(fitness)) 95 | for i in range(len(pop_weights_mat)): 96 | if i == best_index: 97 | continue 98 | input_HL1_weights = numpy.random.uniform(low=weight_range_1, high=weight_range_2, size=(data_inputs.shape[1], HL1_neurons)) 99 | HL1_HL2_weights = numpy.random.uniform(low=weight_range_1, high=weight_range_2, size=(HL1_neurons, HL2_neurons)) 100 | HL2_output_weights = numpy.random.uniform(low=weight_range_1, high=weight_range_2, size=(HL2_neurons, output_neurons)) 101 | S = [] 102 | S.append(numpy.array([input_HL1_weights, HL1_HL2_weights, HL2_output_weights])) 103 | S = numpy.array(S) 104 | p = random.random() 105 | base_E = 2*random.random() - 1 106 | J = 2*(1 - random.random()) 107 | r = random.random() 108 | E = 2*base_E*(1 - generation/num_generations) 109 | E_value.append(E) 110 | if abs(E) >= 1: 111 | if p < 0.3: 112 | rand_hawk_index = random.randrange(0,sol_per_pop) 113 | while(rand_hawk_index != i): 114 | rand_hawk_index = random.randrange(0,sol_per_pop) 115 | rand_hawk = pop_weights_mat[rand_hawk_index] 116 | pop_weights_mat[i] = Hawk.update_hawk1(pop_weights_mat[i], rabbit, Mean_pop_weight, rand_hawk, weight_range_1, weight_range_2) #update using eq 1 117 | else: 118 | rand_hawk_index = random.randrange(0,sol_per_pop) 119 | while(rand_hawk_index != i): 120 | rand_hawk_index = random.randrange(0,sol_per_pop) 121 | rand_hawk = pop_weights_mat[rand_hawk_index] 122 | pop_weights_mat[i] = Hawk.update_WOA(E, J, S, pop_weights_mat[i]) 123 | else: 124 | if r >= .5 and abs(E) >= .5: 125 | pop_weights_mat[i] = Hawk.update_hawk2(pop_weights_mat[i], rabbit, J, E) #update using eq 4 126 | elif r >= .5 and abs(E) < .5: 127 | pop_weights_mat[i] = Hawk.update_hawk3(pop_weights_mat[i], rabbit, E) #update using eq6 128 | elif r < .5 and abs(E) >= .5: 129 | pop_weights_mat[i] = Hawk.update_hawk4(pop_weights_mat[i], rabbit, J, E, sigma, S, data_inputs, data_outputs) #update using eq10 130 | else: 131 | pop_weights_mat[i] = Hawk.update_hawk5(pop_weights_mat[i], rabbit, Mean_pop_weight, J, E, sigma, S, data_inputs, data_outputs) #update using eq11 132 | 133 | fitness = ANN.fitness(pop_weights_mat, data_inputs,data_outputs, activation="tanh") 134 | accuracies[generation] = fitness[0] 135 | print(fitness) 136 | best_index = best(fitness) 137 | rabbit = pop_weights_mat[best_index] 138 | 139 | import matplotlib.pyplot as plt 140 | 141 | plt.plot(E_value) 142 | plt.show() 143 | x = [i for i in range(0,100)] 144 | fig, ax = plt.subplots() 145 | ax.plot(x,mean_fitness) 146 | ax.plot(x, max_f) 147 | plt.show() 148 | best_weights = rabbit 149 | acc, predictions = ANN.predict_outputs(best_weights, X, y, activation="tanh") 150 | print(acc) 151 | -------------------------------------------------------------------------------- /Code/main_Hawk.py: -------------------------------------------------------------------------------- 1 | import numpy 2 | import ANN 3 | import csv 4 | import random 5 | import Hawk 6 | from sklearn.model_selection import train_test_split 7 | 8 | def best(fitness): 9 | max_fitness = 0 10 | pos = -1 11 | for i in range(len(fitness)): 12 | if fitness[i] > max_fitness: 13 | max_fitness = fitness[i] 14 | pos = i 15 | return pos 16 | 17 | def Calculate_mean(pop_weights_mat): 18 | mean = pop_weights_mat[0] 19 | for i in range(1,len(pop_weights_mat)): 20 | mean += pop_weights_mat[i] 21 | return mean/len(pop_weights_mat) 22 | 23 | def load_data(): 24 | data_inputs = [] 25 | with open('datasets/breast_wisconsin.csv','r') as csvfile: 26 | rows = csv.reader(csvfile) 27 | for row in rows: 28 | row = numpy.array(row,dtype = float) 29 | data_inputs.append(row) 30 | # data_output = [row[-1] for row in data_inputs] 31 | # data_input = [row[:-1] for row in data_inputs] 32 | data_outputs = [] 33 | with open('datasets/label_wisconsin.csv','r') as csvfile: 34 | rows = csv.reader(csvfile) 35 | for row in rows: 36 | row = numpy.array(row,dtype = float) 37 | data_outputs.append(row) 38 | return data_inputs,data_outputs 39 | 40 | data_inputs, data_outputs = load_data() 41 | 42 | minmax = ANN.dataset_minmax(data_inputs) 43 | ANN.normalize_dataset(data_inputs, minmax) 44 | data_inputs = numpy.array(data_inputs) 45 | data_outputs = numpy.array(data_outputs) 46 | data_inputs, X, data_outputs, y = train_test_split(data_inputs, data_outputs,test_size = 0.15, random_state = 1) 47 | 48 | print(data_inputs.shape) 49 | 50 | sol_per_pop = 12 51 | num_generations = 100 52 | 53 | # HL1_neurons = data_inputs.shape[1] * 2 #for cancer_patient 54 | # HL2_neurons = int(data_inputs.shape[1]/2) 55 | # output_neurons = 3 56 | 57 | HL1_neurons = data_inputs.shape[1] * 2 #for cancer_patient 58 | HL2_neurons = int(data_inputs.shape[1]/2) 59 | output_neurons = 2 60 | 61 | weight_range_1 = -1 62 | weight_range_2 = 1 63 | 64 | initial_pop_weights = [] 65 | for curr_sol in numpy.arange(0, sol_per_pop): 66 | input_HL1_weights = numpy.random.uniform(low=weight_range_1, high=weight_range_2, size=(data_inputs.shape[1], HL1_neurons)) 67 | HL1_HL2_weights = numpy.random.uniform(low=weight_range_1, high=weight_range_2, size=(HL1_neurons, HL2_neurons)) 68 | HL2_output_weights = numpy.random.uniform(low=weight_range_1, high=weight_range_2, size=(HL2_neurons, output_neurons)) 69 | initial_pop_weights.append(numpy.array([input_HL1_weights, HL1_HL2_weights, HL2_output_weights])) 70 | 71 | pop_weights_mat = numpy.array(initial_pop_weights) 72 | 73 | fitness = ANN.fitness(pop_weights_mat, data_inputs,data_outputs, activation="tanh") 74 | best_index = best(fitness) 75 | rabbit = pop_weights_mat[best_index] 76 | E_value = [] 77 | best_result = [] 78 | best_pop = [] 79 | mean_fitness = [] 80 | max_f = [] 81 | accuracies = numpy.empty(shape=(num_generations)) 82 | sigma = pow(((1.33*.707)/(.9064*1.5*pow(2,.25))),2/3) 83 | for generation in range(num_generations): 84 | print("Generation : ", generation) 85 | fitness = ANN.fitness(pop_weights_mat, data_inputs,data_outputs, activation="tanh") 86 | print(fitness) 87 | best_index = best(fitness) 88 | max_f.append(fitness[best_index]) 89 | rabbit = pop_weights_mat[best_index] 90 | 91 | accuracies[generation] = fitness[best_index] 92 | Mean_pop_weight = Calculate_mean(pop_weights_mat) 93 | mean_fitness.append(Calculate_mean(fitness)) 94 | for i in range(len(pop_weights_mat)): 95 | if i == best_index: 96 | continue 97 | input_HL1_weights = numpy.random.uniform(low=weight_range_1, high=weight_range_2, size=(data_inputs.shape[1], HL1_neurons)) 98 | HL1_HL2_weights = numpy.random.uniform(low=weight_range_1, high=weight_range_2, size=(HL1_neurons, HL2_neurons)) 99 | HL2_output_weights = numpy.random.uniform(low=weight_range_1, high=weight_range_2, size=(HL2_neurons, output_neurons)) 100 | S = [] 101 | S.append(numpy.array([input_HL1_weights, HL1_HL2_weights, HL2_output_weights])) 102 | S = numpy.array(S) 103 | base_E = 2*random.random() - 1 104 | J = 2*(1 - random.random()) 105 | r = random.random() 106 | E = 2*base_E*(1 - generation/num_generations) 107 | E_value.append(E) 108 | if abs(E) >= 1: 109 | rand_hawk_index = random.randrange(0,sol_per_pop) 110 | while(rand_hawk_index != i): 111 | rand_hawk_index = random.randrange(0,sol_per_pop) 112 | rand_hawk = pop_weights_mat[rand_hawk_index] 113 | pop_weights_mat[i] = Hawk.update_hawk1(pop_weights_mat[i], rabbit, Mean_pop_weight, rand_hawk, weight_range_1, weight_range_2) #update using eq 1 114 | else: 115 | if r >= .5 and abs(E) >= .5: 116 | pop_weights_mat[i] = Hawk.update_hawk2(pop_weights_mat[i], rabbit, J, E) #update using eq 4 117 | elif r >= .5 and abs(E) < .5: 118 | pop_weights_mat[i] = Hawk.update_hawk3(pop_weights_mat[i], rabbit, E) #update using eq6 119 | elif r < .5 and abs(E) >= .5: 120 | pop_weights_mat[i] = Hawk.update_hawk4(pop_weights_mat[i], rabbit, J, E, sigma, S, data_inputs, data_outputs) #update using eq10 121 | else: 122 | pop_weights_mat[i] = Hawk.update_hawk5(pop_weights_mat[i], rabbit, Mean_pop_weight, J, E, sigma, S, data_inputs, data_outputs) #update using eq11 123 | 124 | 125 | 126 | import matplotlib.pyplot as plt 127 | 128 | plt.plot(E_value) 129 | plt.show() 130 | x = [i for i in range(0,100)] 131 | fig, ax = plt.subplots() 132 | ax.plot(x,mean_fitness) 133 | ax.plot(x, max_f) 134 | plt.show() 135 | best_weights = rabbit 136 | acc, predictions = ANN.predict_outputs(best_weights, X, y, activation="tanh") 137 | # print(fitness[best_index]) 138 | print(acc) 139 | -------------------------------------------------------------------------------- /Code/main_SSA.py: -------------------------------------------------------------------------------- 1 | import numpy 2 | import genetic as ga 3 | import ANN 4 | import csv 5 | import math 6 | import time 7 | import SSA 8 | import Neural_network as neural 9 | from sklearn.model_selection import train_test_split 10 | from sklearn.metrics import f1_score 11 | 12 | 13 | 14 | def load_data(): 15 | data_inputs = [] 16 | with open('datasets/dataR2.csv','r') as csvfile: 17 | rows = csv.reader(csvfile) 18 | for row in rows: 19 | row = numpy.array(row,dtype = float) 20 | data_inputs.append(row) 21 | data_outputs = [] 22 | with open('datasets/label_dataR2.csv','r') as csvfile: 23 | rows = csv.reader(csvfile) 24 | for row in rows: 25 | row = numpy.array(row,dtype = float) 26 | data_outputs.append(row) 27 | return data_inputs,data_outputs 28 | 29 | data_inputs, data_outputs = load_data() 30 | minmax = ANN.dataset_minmax(data_inputs) 31 | ANN.normalize_dataset(data_inputs, minmax) 32 | 33 | data_inputs = numpy.array(data_inputs) 34 | data_outputs = numpy.array(data_outputs) 35 | inp = data_inputs 36 | out = data_outputs 37 | data_inputs, X, data_outputs, y = train_test_split(data_inputs, data_outputs,test_size = 0.15, random_state = 0) 38 | 39 | sol_per_pop = 12 40 | num_generations = 200 41 | 42 | HL1_neurons = data_inputs.shape[1] * 2 43 | HL2_neurons = int(data_inputs.shape[1] / 2) 44 | output_neurons = 2 45 | 46 | weight_range_1 = -1 47 | weight_range_2 = 1 48 | 49 | initial_pop_weights = [] 50 | for curr_sol in numpy.arange(0, sol_per_pop): 51 | 52 | input_HL1_weights = numpy.random.uniform(low=weight_range_1, high=weight_range_2, size=(data_inputs.shape[1], HL1_neurons)) 53 | HL1_HL2_weights = numpy.random.uniform(low=weight_range_1, high=weight_range_2, size=(HL1_neurons, HL2_neurons)) 54 | HL2_output_weights = numpy.random.uniform(low=weight_range_1, high=weight_range_2, size=(HL2_neurons, output_neurons)) 55 | initial_pop_weights.append(numpy.array([input_HL1_weights, HL1_HL2_weights, HL2_output_weights])) 56 | 57 | pop_weights_mat = numpy.array(initial_pop_weights) 58 | 59 | best_outputs = [] 60 | accuracies = {} 61 | F = pop_weights_mat[0] 62 | F_acc = 0 63 | for generation in range(num_generations): 64 | print("Generation : ", generation) 65 | fitness = ANN.fitness(pop_weights_mat, data_inputs,data_outputs, activation="tanh") 66 | fitness = numpy.array(fitness) 67 | pop_weights_mat = numpy.array(pop_weights_mat) 68 | indices = fitness.argsort() 69 | pop_weights_mat = pop_weights_mat[indices] 70 | pop_weights_mat = pop_weights_mat[::-1] 71 | fitness = numpy.sort(fitness) 72 | fitness = fitness[::-1] 73 | # if F_acc < fitness[0]: 74 | # F = pop_weights_mat[0] 75 | # F_acc = fitness[0] 76 | F = pop_weights_mat[0] 77 | c1 = 2*math.exp(-(pow((4*(generation+1)/num_generations),2))) 78 | pop_weights_mat[0] = SSA.update_leader(F,weight_range_1,weight_range_2,c1) 79 | # leader_fitness = ANN.fitness(numpy.array([new_leader]), data_inputs,data_outputs, activation="sigmoid") 80 | # if fitness[0] < leader_fitness[0]: 81 | # pop_weights_mat[0] = new_leader 82 | # print(pop_weights_mat[0]) 83 | # time.sleep(15) 84 | pop_weights_mat = SSA.update_follower(pop_weights_mat, fitness,data_inputs,data_outputs) 85 | 86 | accuracies[generation] = fitness 87 | print("Fitness") 88 | print(fitness) 89 | 90 | 91 | best_weights = F 92 | acc, predictions = ANN.predict_outputs(best_weights, inp, out, activation="tanh") 93 | print(predictions) 94 | print(out) 95 | f = f1_score(out,predictions) 96 | print(f) 97 | -------------------------------------------------------------------------------- /Code/main_WOA.py: -------------------------------------------------------------------------------- 1 | import numpy 2 | import ANN 3 | import time 4 | import csv 5 | import random 6 | import WOA 7 | from sklearn.model_selection import train_test_split 8 | from csv import reader 9 | import NeuralNetwork 10 | 11 | def best_index(fitness): 12 | max_fitness = 0 13 | pos = -1 14 | for i in range(len(fitness)): 15 | if fitness[i] > max_fitness: 16 | max_fitness = fitness[i] 17 | pos = i 18 | return pos 19 | 20 | def load_csv(filename): 21 | dataset = list() 22 | with open(filename, 'r') as file: 23 | csv_reader = reader(file) 24 | for row in csv_reader: 25 | if not row: 26 | continue 27 | m = [] 28 | for r in row: 29 | m.append(float(r)) 30 | dataset.append(m) 31 | return dataset 32 | 33 | def load_data(): 34 | data_inputs = [] 35 | with open('datasets/banknote_authentication.csv','r') as csvfile: 36 | rows = csv.reader(csvfile) 37 | for row in rows: 38 | row = numpy.array(row,dtype = float) 39 | data_inputs.append(row) 40 | return data_inputs 41 | 42 | filename = 'datasets/banknote_authentication.csv' 43 | dataset = load_csv(filename) #checked 44 | random.shuffle(dataset) #checked 45 | random.shuffle(dataset) 46 | # dataset = load_data() 47 | minmax = NeuralNetwork.dataset_minmax(dataset) #checked 48 | 49 | NeuralNetwork.normalize_dataset(dataset,minmax) #checked 50 | 51 | data_input = [row[:-1] for row in dataset] 52 | data_output = [row[-1] for row in dataset] 53 | 54 | data_inputs, X, data_outputs, y = train_test_split(data_input, data_output,test_size = 0.15, random_state = 0) 55 | train_dataset = [] 56 | print(len(data_inputs),len(data_outputs)) 57 | # print(data_inputs.shape, data_outputs.shape) 58 | for i in range(len(data_inputs)): 59 | data_inputs[i].append(data_outputs[i]) 60 | train_dataset.append(data_inputs[i]) #checked 61 | # for i in train_dataset: 62 | # print(i) 63 | test_dataset = [] 64 | for i in range(len(X)): 65 | X[i].append(y[i]) 66 | test_dataset.append(X[i]) 67 | 68 | sol_per_pop = 7 69 | num_generations = 100 70 | n_inputs = len(dataset[0]) - 1 71 | HL1_neurons = int(n_inputs*2) 72 | HL2_neurons = int(n_inputs/2) 73 | # h_neuron = [HL1_neurons, HL2_neurons] 74 | h_neuron = [HL1_neurons] 75 | n_outputs = len(set([row[-1] for row in dataset])) 76 | # n_outputs = 1 77 | train_dataset = train_dataset[:5] 78 | weight_range_1 = -1 79 | weight_range_2 = 1 80 | 81 | initial_pop_weights = [] 82 | 83 | for i in range(sol_per_pop): 84 | initial_pop_weights.append(NeuralNetwork.initialize_network(n_inputs, h_neuron, n_outputs, weight_range_1, weight_range_2)) 85 | for i in initial_pop_weights: 86 | print() 87 | print(i) 88 | fitness = [] 89 | accuracies = numpy.empty(shape=(num_generations)) 90 | for i in range(sol_per_pop): 91 | fitness.append(NeuralNetwork.cal_fitness(initial_pop_weights[i],train_dataset)) 92 | print(fitness) 93 | best = best_index(fitness) 94 | best_agent = initial_pop_weights[best] 95 | a = 2 96 | rand_pop = [i for i in range(0,sol_per_pop)] 97 | print(rand_pop, best) 98 | for generation in range(num_generations): 99 | fitness = [] 100 | print("Generation : ", generation) 101 | a = a - 2/num_generations 102 | for i in range(0,sol_per_pop): 103 | if i != best: 104 | r = random.random() 105 | A = 2*a*r - a 106 | C = 2*r 107 | prob = [0,1,0,0,1,1,0,0,1,1,1,0,0,0,1,0,0,1,0,1,0,0,1,0,0,1,1,1,0] 108 | l = random.random()*-1 if random.choice(prob) else random.random() #for value between -1, 1 109 | p = random.random() 110 | if(p < 0.5): 111 | if(abs(A) < 1): 112 | parent = list(initial_pop_weights[i]) 113 | new_agent = list(best_agent) 114 | initial_pop_weights[i] = WOA.update_WOA1(parent, new_agent, C, A) 115 | else: 116 | rand_pop.remove(i) 117 | parent = list(initial_pop_weights[i]) 118 | rand_agent = list(initial_pop_weights[random.choice(rand_pop)]) 119 | initial_pop_weights[i] = WOA.update_WOA1(parent, rand_agent, C, A) 120 | rand_pop.append(i) 121 | else: 122 | parent = list(initial_pop_weights[i]) 123 | agent = list(best_agent) 124 | initial_pop_weights[i] = WOA.update_WOA3(agent, parent, C, l) 125 | 126 | print(initial_pop_weights[i]) 127 | time.sleep(3) 128 | for i in range(sol_per_pop): 129 | fitness.append(NeuralNetwork.cal_fitness(initial_pop_weights[i],train_dataset)) 130 | for i in initial_pop_weights: 131 | print() 132 | print(i) 133 | for i in train_dataset: 134 | print() 135 | print(i) 136 | time.sleep(15) 137 | print(fitness) 138 | best = best_index(fitness) 139 | best_agent = initial_pop_weights[best] 140 | 141 | best_weights = best_agent 142 | print(best_agent) 143 | acc, predictions = ANN.predict_outputs(best_weights, X, y, activation="sigmoid") 144 | print(acc) 145 | -------------------------------------------------------------------------------- /Code/main_pso.py: -------------------------------------------------------------------------------- 1 | import numpy 2 | import ANN 3 | import csv 4 | import genetic as ga 5 | import Neural_network as neural 6 | from sklearn.model_selection import train_test_split 7 | import pso 8 | 9 | 10 | def load_data(): 11 | data_inputs = [] 12 | with open('datasets/No_weighting.csv','r') as csvfile: 13 | rows = csv.reader(csvfile) 14 | for row in rows: 15 | row = numpy.array(row,dtype = float) 16 | data_inputs.append(row) 17 | data_outputs = [] 18 | with open('datasets/label_weight.csv','r') as csvfile: 19 | rows = csv.reader(csvfile) 20 | for row in rows: 21 | row = numpy.array(row,dtype = float) 22 | data_outputs.append(row) 23 | return data_inputs,data_outputs 24 | 25 | data_inputs, data_outputs = load_data() 26 | # if(len(data_inputs)>1000): 27 | # data_inputs = numpy.array(data_inputs[:,0:1000]) 28 | # data_outputs = numpy.array(data_outputs[:,0:1000]) 29 | data_inputs = numpy.array(data_inputs) 30 | data_outputs = numpy.array(data_outputs) 31 | data_inputs, X, data_outputs, y = train_test_split(data_inputs, data_outputs,test_size = 0.15, random_state = 1) 32 | 33 | print(data_inputs.shape) 34 | 35 | sol_per_pop = 12 36 | num_generations = 200 37 | 38 | HL1_neurons = 150 39 | HL2_neurons = 60 40 | output_neurons = 2 41 | 42 | weight_range_1 = -5 43 | weight_range_2 = 5 44 | 45 | initial_pop_weights = [] 46 | for curr_sol in numpy.arange(0, sol_per_pop): 47 | 48 | input_HL1_weights = numpy.random.uniform(low=weight_range_1, high=weight_range_2, size=(data_inputs.shape[1], HL1_neurons)) 49 | HL1_HL2_weights = numpy.random.uniform(low=weight_range_1, high=weight_range_2, size=(HL1_neurons, HL2_neurons)) 50 | HL2_output_weights = numpy.random.uniform(low=weight_range_1, high=weight_range_2, size=(HL2_neurons, output_neurons)) 51 | initial_pop_weights.append(numpy.array([input_HL1_weights, HL1_HL2_weights, HL2_output_weights])) 52 | 53 | pop_weights_mat = numpy.array(initial_pop_weights) 54 | pop_weights_vector = ga.mat_to_vector(pop_weights_mat) 55 | 56 | weight_range_1 = 0 57 | weight_range_2 = 0 58 | velocity = [] 59 | 60 | for curr_sol in numpy.arange(0, sol_per_pop): 61 | input_HL1_weights = numpy.random.uniform(low=weight_range_1, high=weight_range_2, size=(data_inputs.shape[1], HL1_neurons)) 62 | HL1_HL2_weights = numpy.random.uniform(low=weight_range_1, high=weight_range_2, size=(HL1_neurons, HL2_neurons)) 63 | HL2_output_weights = numpy.random.uniform(low=weight_range_1, high=weight_range_2, size=(HL2_neurons, output_neurons)) 64 | velocity.append(numpy.array([input_HL1_weights, HL1_HL2_weights, HL2_output_weights])) 65 | velocity = numpy.array(velocity) 66 | 67 | best_outputs = [] 68 | accuracies = numpy.empty(shape=(num_generations)) 69 | personal_best_wt = pop_weights_mat 70 | personal_best_acc = ANN.fitness(pop_weights_mat, data_inputs,data_outputs, activation="sigmoid") 71 | global_best_wt = [] 72 | for generation in range(num_generations): 73 | print("Generation : ", generation) 74 | 75 | fitness = ANN.fitness(pop_weights_mat, data_inputs,data_outputs, activation="sigmoid") 76 | accuracies[generation] = fitness 77 | if(accuracies[generation] == accuracies[generation-10]): 78 | break 79 | print("personal_best") 80 | print(personal_best_acc) 81 | 82 | global_best_index = pso.global_best(fitness) 83 | global_best_wt = pop_weights_mat[global_best_index] 84 | 85 | personal_best_acc, personal_best_wt = pso.personal_best(fitness,personal_best_acc,personal_best_wt,pop_weights_mat) 86 | pop_weights_mat, velocity = pso.update_position(global_best_wt,personal_best_wt,pop_weights_mat,velocity) 87 | 88 | best_weights = global_best_wt 89 | acc, predictions = ANN.predict_outputs(best_weights, X, y, activation="sigmoid") 90 | print(acc) 91 | -------------------------------------------------------------------------------- /Code/pso.py: -------------------------------------------------------------------------------- 1 | import numpy 2 | import random 3 | def global_best(fitness): 4 | max_fitness = 0 5 | pos = -1 6 | for i in range(len(fitness)): 7 | if fitness[i]>max_fitness: 8 | max_fitness = fitness[i] 9 | pos = i 10 | return pos 11 | 12 | def personal_best(fitness,personal_best_acc,personal_best_wt,current_pop_weights): 13 | for i in range(len(personal_best_acc)): 14 | if personal_best_acc[i] < fitness[i]: 15 | personal_best_acc[i] = fitness[i] 16 | personal_best_wt[i] = current_pop_weights[i] 17 | return personal_best_acc, personal_best_wt 18 | 19 | def update_position(global_best_wt,personal_best_wt,pop_weights_mat,velocity): 20 | for i in range(len(pop_weights_mat)): 21 | velocity[i] = velocity[i] + 2*random.random()*(personal_best_wt[i] - pop_weights_mat[i]) + 2*random.random()*(global_best_wt - pop_weights_mat[i]) 22 | pop_weights_mat[i] = pop_weights_mat[i] + velocity[i] 23 | return pop_weights_mat,velocity 24 | 25 | def update_position_val(global_best_wt,personal_best_wt,pop_weights_mat,velocity,weight_range_1,weight_range_2): 26 | for i in range(len(pop_weights_mat)): 27 | velocity[i] = velocity[i] + 2*random.random()*(personal_best_wt[i] - pop_weights_mat[i]) + 2*random.random()*(global_best_wt - pop_weights_mat[i]) 28 | pop_weights_mat[i] = pop_weights_mat[i] + velocity[i] 29 | if (pop_weights_mat[i]weight_range_2): 30 | pop_weights_mat[i] = random.uniform(weight_range_1,weight_range_2) 31 | return pop_weights_mat,velocity 32 | 33 | # function testing 34 | # datasets boosting german, australian, banknote, credit card 35 | # pso optimising 36 | # 37 | -------------------------------------------------------------------------------- /Code/testCode: -------------------------------------------------------------------------------- 1 | This is test code 2 | -------------------------------------------------------------------------------- /Data Sets/blood.csv: -------------------------------------------------------------------------------- 1 | 2,50,12500,98 2 | 0,13,3250,28 3 | 1,16,4000,35 4 | 2,20,5000,45 5 | 1,24,6000,77 6 | 4,4,1000,4 7 | 2,7,1750,14 8 | 1,12,3000,35 9 | 2,9,2250,22 10 | 5,46,11500,98 11 | 4,23,5750,58 12 | 0,3,750,4 13 | 2,10,2500,28 14 | 1,13,3250,47 15 | 2,6,1500,15 16 | 2,5,1250,11 17 | 2,14,3500,48 18 | 2,15,3750,49 19 | 2,6,1500,15 20 | 2,3,750,4 21 | 2,3,750,4 22 | 4,11,2750,28 23 | 2,6,1500,16 24 | 2,6,1500,16 25 | 9,9,2250,16 26 | 4,14,3500,40 27 | 4,6,1500,14 28 | 4,12,3000,34 29 | 4,5,1250,11 30 | 4,8,2000,21 31 | 1,14,3500,58 32 | 4,10,2500,28 33 | 4,10,2500,28 34 | 4,9,2250,26 35 | 2,16,4000,64 36 | 2,8,2000,28 37 | 2,12,3000,47 38 | 4,6,1500,16 39 | 2,14,3500,57 40 | 4,7,1750,22 41 | 2,13,3250,53 42 | 2,5,1250,16 43 | 2,5,1250,16 44 | 2,5,1250,16 45 | 4,20,5000,69 46 | 4,9,2250,28 47 | 2,9,2250,36 48 | 2,2,500,2 49 | 2,2,500,2 50 | 2,2,500,2 51 | 2,11,2750,46 52 | 2,11,2750,46 53 | 2,6,1500,22 54 | 2,12,3000,52 55 | 4,5,1250,14 56 | 4,19,4750,69 57 | 4,8,2000,26 58 | 2,7,1750,28 59 | 2,16,4000,81 60 | 3,6,1500,21 61 | 2,7,1750,29 62 | 2,8,2000,35 63 | 2,10,2500,49 64 | 4,5,1250,16 65 | 2,3,750,9 66 | 3,16,4000,74 67 | 2,4,1000,14 68 | 0,2,500,4 69 | 4,7,1750,25 70 | 1,9,2250,51 71 | 2,4,1000,16 72 | 2,4,1000,16 73 | 4,17,4250,71 74 | 2,2,500,4 75 | 2,2,500,4 76 | 2,2,500,4 77 | 2,4,1000,16 78 | 2,2,500,4 79 | 2,2,500,4 80 | 2,2,500,4 81 | 4,6,1500,23 82 | 2,4,1000,16 83 | 2,4,1000,16 84 | 2,4,1000,16 85 | 2,6,1500,28 86 | 2,6,1500,28 87 | 4,2,500,4 88 | 4,2,500,4 89 | 4,2,500,4 90 | 2,7,1750,35 91 | 4,2,500,4 92 | 4,2,500,4 93 | 4,2,500,4 94 | 4,2,500,4 95 | 12,11,2750,23 96 | 4,7,1750,28 97 | 3,17,4250,86 98 | 4,9,2250,38 99 | 4,4,1000,14 100 | 5,7,1750,26 101 | 4,8,2000,34 102 | 2,13,3250,76 103 | 4,9,2250,40 104 | 2,5,1250,26 105 | 2,5,1250,26 106 | 6,17,4250,70 107 | 0,8,2000,59 108 | 3,5,1250,26 109 | 2,3,750,14 110 | 2,10,2500,64 111 | 4,5,1250,23 112 | 4,9,2250,46 113 | 4,5,1250,23 114 | 4,8,2000,40 115 | 2,12,3000,82 116 | 11,24,6000,64 117 | 2,7,1750,46 118 | 4,11,2750,61 119 | 1,7,1750,57 120 | 2,11,2750,79 121 | 2,3,750,16 122 | 4,5,1250,26 123 | 2,6,1500,41 124 | 2,5,1250,33 125 | 2,4,1000,26 126 | 2,5,1250,34 127 | 4,8,2000,46 128 | 2,4,1000,26 129 | 4,8,2000,48 130 | 2,2,500,10 131 | 4,5,1250,28 132 | 2,12,3000,95 133 | 2,2,500,10 134 | 4,6,1500,35 135 | 2,11,2750,88 136 | 2,3,750,19 137 | 2,5,1250,37 138 | 2,12,3000,98 139 | 9,5,1250,19 140 | 2,2,500,11 141 | 2,9,2250,74 142 | 5,14,3500,86 143 | 4,3,750,16 144 | 4,3,750,16 145 | 4,2,500,9 146 | 4,3,750,16 147 | 6,3,750,14 148 | 2,2,500,11 149 | 2,2,500,11 150 | 2,2,500,11 151 | 2,7,1750,58 152 | 4,6,1500,39 153 | 4,11,2750,78 154 | 2,1,250,2 155 | 2,1,250,2 156 | 2,1,250,2 157 | 2,1,250,2 158 | 2,1,250,2 159 | 2,1,250,2 160 | 2,1,250,2 161 | 2,1,250,2 162 | 2,1,250,2 163 | 2,1,250,2 164 | 2,1,250,2 165 | 2,1,250,2 166 | 2,1,250,2 167 | 2,1,250,2 168 | 2,1,250,2 169 | 2,1,250,2 170 | 2,1,250,2 171 | 2,1,250,2 172 | 2,1,250,2 173 | 2,1,250,2 174 | 2,1,250,2 175 | 2,1,250,2 176 | 11,10,2500,35 177 | 11,4,1000,16 178 | 4,5,1250,33 179 | 4,6,1500,41 180 | 2,3,750,22 181 | 4,4,1000,26 182 | 10,4,1000,16 183 | 2,4,1000,35 184 | 4,12,3000,88 185 | 13,8,2000,26 186 | 11,9,2250,33 187 | 4,5,1250,34 188 | 4,4,1000,26 189 | 8,15,3750,77 190 | 4,5,1250,35 191 | 4,7,1750,52 192 | 4,7,1750,52 193 | 2,4,1000,35 194 | 11,11,2750,42 195 | 2,2,500,14 196 | 2,5,1250,47 197 | 9,8,2000,38 198 | 4,6,1500,47 199 | 11,7,1750,29 200 | 9,9,2250,45 201 | 4,6,1500,52 202 | 4,7,1750,58 203 | 6,2,500,11 204 | 4,7,1750,58 205 | 11,9,2250,38 206 | 11,6,1500,26 207 | 2,2,500,16 208 | 2,7,1750,76 209 | 11,6,1500,27 210 | 11,3,750,14 211 | 4,1,250,4 212 | 4,1,250,4 213 | 4,1,250,4 214 | 4,1,250,4 215 | 4,1,250,4 216 | 4,1,250,4 217 | 4,1,250,4 218 | 4,1,250,4 219 | 4,1,250,4 220 | 4,1,250,4 221 | 4,1,250,4 222 | 4,1,250,4 223 | 4,1,250,4 224 | 4,1,250,4 225 | 4,1,250,4 226 | 4,1,250,4 227 | 4,1,250,4 228 | 4,3,750,24 229 | 4,1,250,4 230 | 4,1,250,4 231 | 4,1,250,4 232 | 4,1,250,4 233 | 4,1,250,4 234 | 10,8,2000,39 235 | 14,7,1750,26 236 | 8,10,2500,63 237 | 11,3,750,15 238 | 4,2,500,14 239 | 2,4,1000,43 240 | 8,9,2250,58 241 | 8,8,2000,52 242 | 11,22,5500,98 243 | 4,3,750,25 244 | 11,17,4250,79 245 | 9,2,500,11 246 | 4,5,1250,46 247 | 11,12,3000,58 248 | 7,12,3000,86 249 | 11,2,500,11 250 | 11,2,500,11 251 | 11,2,500,11 252 | 2,6,1500,75 253 | 11,8,2000,41 254 | 11,3,750,16 255 | 12,13,3250,59 256 | 2,3,750,35 257 | 16,8,2000,28 258 | 11,7,1750,37 259 | 4,3,750,28 260 | 12,12,3000,58 261 | 4,4,1000,41 262 | 11,14,3500,73 263 | 2,2,500,23 264 | 2,3,750,38 265 | 4,5,1250,58 266 | 4,4,1000,43 267 | 3,2,500,23 268 | 11,8,2000,46 269 | 4,7,1750,82 270 | 13,4,1000,21 271 | 16,11,2750,40 272 | 16,7,1750,28 273 | 7,2,500,16 274 | 4,5,1250,58 275 | 4,5,1250,58 276 | 4,4,1000,46 277 | 14,13,3250,57 278 | 4,3,750,34 279 | 14,18,4500,78 280 | 11,8,2000,48 281 | 14,16,4000,70 282 | 14,4,1000,22 283 | 14,5,1250,26 284 | 8,2,500,16 285 | 11,5,1250,33 286 | 11,2,500,14 287 | 4,2,500,23 288 | 9,2,500,16 289 | 14,5,1250,28 290 | 14,3,750,19 291 | 14,4,1000,23 292 | 16,12,3000,50 293 | 11,4,1000,28 294 | 11,5,1250,35 295 | 11,5,1250,35 296 | 2,4,1000,70 297 | 14,5,1250,28 298 | 14,2,500,14 299 | 14,2,500,14 300 | 14,2,500,14 301 | 14,2,500,14 302 | 14,2,500,14 303 | 14,2,500,14 304 | 2,3,750,52 305 | 14,6,1500,34 306 | 11,5,1250,37 307 | 4,5,1250,74 308 | 11,3,750,23 309 | 16,4,1000,23 310 | 16,3,750,19 311 | 11,5,1250,38 312 | 11,2,500,16 313 | 12,9,2250,60 314 | 9,1,250,9 315 | 9,1,250,9 316 | 4,2,500,29 317 | 11,2,500,17 318 | 14,4,1000,26 319 | 11,9,2250,72 320 | 11,5,1250,41 321 | 15,16,4000,82 322 | 9,5,1250,51 323 | 11,4,1000,34 324 | 14,8,2000,50 325 | 16,7,1750,38 326 | 14,2,500,16 327 | 2,2,500,41 328 | 14,16,4000,98 329 | 14,4,1000,28 330 | 16,7,1750,39 331 | 14,7,1750,47 332 | 16,6,1500,35 333 | 16,6,1500,35 334 | 11,7,1750,62 335 | 16,2,500,16 336 | 16,3,750,21 337 | 11,3,750,28 338 | 11,7,1750,64 339 | 11,1,250,11 340 | 9,3,750,34 341 | 14,4,1000,30 342 | 23,38,9500,98 343 | 11,6,1500,58 344 | 11,1,250,11 345 | 11,1,250,11 346 | 11,1,250,11 347 | 11,1,250,11 348 | 11,1,250,11 349 | 11,1,250,11 350 | 11,1,250,11 351 | 11,1,250,11 352 | 11,2,500,21 353 | 11,5,1250,50 354 | 11,2,500,21 355 | 16,4,1000,28 356 | 4,2,500,41 357 | 16,6,1500,40 358 | 14,3,750,26 359 | 9,2,500,26 360 | 21,16,4000,64 361 | 14,6,1500,51 362 | 11,2,500,24 363 | 4,3,750,71 364 | 21,13,3250,57 365 | 11,6,1500,71 366 | 14,2,500,21 367 | 23,15,3750,57 368 | 14,4,1000,38 369 | 11,2,500,26 370 | 16,5,1250,40 371 | 4,2,500,51 372 | 14,3,750,31 373 | 4,2,500,52 374 | 9,4,1000,65 375 | 14,4,1000,40 376 | 11,3,750,40 377 | 14,5,1250,50 378 | 14,1,250,14 379 | 14,1,250,14 380 | 14,1,250,14 381 | 14,1,250,14 382 | 14,1,250,14 383 | 14,1,250,14 384 | 14,1,250,14 385 | 14,1,250,14 386 | 14,7,1750,72 387 | 14,1,250,14 388 | 14,1,250,14 389 | 9,3,750,52 390 | 14,7,1750,73 391 | 11,4,1000,58 392 | 11,4,1000,59 393 | 4,2,500,59 394 | 11,4,1000,61 395 | 16,4,1000,40 396 | 16,10,2500,89 397 | 21,2,500,21 398 | 21,3,750,26 399 | 16,8,2000,76 400 | 21,3,750,26 401 | 18,2,500,23 402 | 23,5,1250,33 403 | 23,8,2000,46 404 | 16,3,750,34 405 | 14,5,1250,64 406 | 14,3,750,41 407 | 16,1,250,16 408 | 16,1,250,16 409 | 16,1,250,16 410 | 16,1,250,16 411 | 16,1,250,16 412 | 16,1,250,16 413 | 16,1,250,16 414 | 16,4,1000,45 415 | 16,1,250,16 416 | 16,1,250,16 417 | 16,1,250,16 418 | 16,1,250,16 419 | 16,1,250,16 420 | 16,2,500,26 421 | 21,2,500,23 422 | 16,2,500,27 423 | 21,2,500,23 424 | 21,2,500,23 425 | 14,4,1000,57 426 | 16,5,1250,60 427 | 23,2,500,23 428 | 14,5,1250,74 429 | 23,3,750,28 430 | 16,3,750,40 431 | 9,2,500,52 432 | 9,2,500,52 433 | 16,7,1750,87 434 | 14,4,1000,64 435 | 14,2,500,35 436 | 16,7,1750,93 437 | 21,2,500,25 438 | 14,3,750,52 439 | 23,14,3500,93 440 | 18,8,2000,95 441 | 16,3,750,46 442 | 11,3,750,76 443 | 11,2,500,52 444 | 11,3,750,76 445 | 23,12,3000,86 446 | 21,3,750,35 447 | 23,2,500,26 448 | 23,2,500,26 449 | 23,8,2000,64 450 | 16,3,750,50 451 | 23,3,750,33 452 | 21,3,750,38 453 | 23,2,500,28 454 | 21,1,250,21 455 | 21,1,250,21 456 | 21,1,250,21 457 | 21,1,250,21 458 | 21,1,250,21 459 | 21,1,250,21 460 | 21,1,250,21 461 | 21,1,250,21 462 | 21,1,250,21 463 | 21,1,250,21 464 | 21,1,250,21 465 | 21,1,250,21 466 | 21,5,1250,60 467 | 23,4,1000,45 468 | 21,4,1000,52 469 | 22,1,250,22 470 | 11,2,500,70 471 | 23,5,1250,58 472 | 23,3,750,40 473 | 23,3,750,41 474 | 14,3,750,83 475 | 21,2,500,35 476 | 26,5,1250,49 477 | 23,6,1500,70 478 | 23,1,250,23 479 | 23,1,250,23 480 | 23,1,250,23 481 | 23,1,250,23 482 | 23,1,250,23 483 | 23,1,250,23 484 | 23,1,250,23 485 | 23,1,250,23 486 | 23,4,1000,53 487 | 21,6,1500,86 488 | 23,3,750,48 489 | 21,2,500,41 490 | 21,3,750,64 491 | 16,2,500,70 492 | 21,3,750,70 493 | 23,4,1000,87 494 | 23,3,750,89 495 | 23,2,500,87 496 | 35,3,750,64 497 | 38,1,250,38 498 | 38,1,250,38 499 | 40,1,250,40 500 | 74,1,250,74 501 | 2,43,10750,86 502 | 6,22,5500,28 503 | 2,34,8500,77 504 | 2,44,11000,98 505 | 0,26,6500,76 506 | 2,41,10250,98 507 | 3,21,5250,42 508 | 2,11,2750,23 509 | 2,21,5250,52 510 | 2,13,3250,32 511 | 4,4,1000,4 512 | 2,11,2750,26 513 | 2,11,2750,28 514 | 3,14,3500,35 515 | 4,16,4000,38 516 | 4,6,1500,14 517 | 3,5,1250,12 518 | 4,33,8250,98 519 | 3,10,2500,33 520 | 4,10,2500,28 521 | 2,11,2750,40 522 | 2,11,2750,41 523 | 4,13,3250,39 524 | 1,10,2500,43 525 | 4,9,2250,28 526 | 2,4,1000,11 527 | 2,5,1250,16 528 | 2,15,3750,64 529 | 5,24,6000,79 530 | 2,6,1500,22 531 | 4,5,1250,16 532 | 2,4,1000,14 533 | 4,8,2000,28 534 | 2,4,1000,14 535 | 2,6,1500,26 536 | 4,5,1250,16 537 | 2,7,1750,32 538 | 2,6,1500,26 539 | 2,8,2000,38 540 | 2,2,500,4 541 | 2,6,1500,28 542 | 2,10,2500,52 543 | 4,16,4000,70 544 | 4,2,500,4 545 | 1,14,3500,95 546 | 4,2,500,4 547 | 7,14,3500,48 548 | 2,3,750,11 549 | 2,12,3000,70 550 | 4,7,1750,32 551 | 4,4,1000,16 552 | 2,6,1500,35 553 | 4,6,1500,28 554 | 2,3,750,14 555 | 2,4,1000,23 556 | 4,4,1000,18 557 | 5,6,1500,28 558 | 4,6,1500,30 559 | 14,5,1250,14 560 | 3,8,2000,50 561 | 4,11,2750,64 562 | 4,9,2250,52 563 | 4,16,4000,98 564 | 7,10,2500,47 565 | 4,14,3500,86 566 | 2,9,2250,75 567 | 4,6,1500,35 568 | 4,9,2250,55 569 | 4,6,1500,35 570 | 2,6,1500,45 571 | 2,6,1500,47 572 | 4,2,500,9 573 | 2,2,500,11 574 | 2,2,500,11 575 | 2,2,500,11 576 | 4,6,1500,38 577 | 3,4,1000,29 578 | 9,9,2250,38 579 | 11,5,1250,18 580 | 2,3,750,21 581 | 2,1,250,2 582 | 2,1,250,2 583 | 2,1,250,2 584 | 2,1,250,2 585 | 2,1,250,2 586 | 2,1,250,2 587 | 2,1,250,2 588 | 2,1,250,2 589 | 2,1,250,2 590 | 2,1,250,2 591 | 2,1,250,2 592 | 11,11,2750,38 593 | 2,3,750,22 594 | 9,11,2750,49 595 | 5,11,2750,75 596 | 3,5,1250,38 597 | 3,1,250,3 598 | 4,6,1500,43 599 | 2,3,750,24 600 | 12,11,2750,39 601 | 2,2,500,14 602 | 4,6,1500,46 603 | 9,3,750,14 604 | 14,8,2000,26 605 | 4,2,500,13 606 | 4,11,2750,95 607 | 2,7,1750,77 608 | 2,7,1750,77 609 | 4,1,250,4 610 | 4,1,250,4 611 | 4,1,250,4 612 | 4,1,250,4 613 | 4,1,250,4 614 | 4,1,250,4 615 | 4,1,250,4 616 | 4,1,250,4 617 | 4,1,250,4 618 | 4,1,250,4 619 | 4,1,250,4 620 | 4,1,250,4 621 | 4,7,1750,62 622 | 4,1,250,4 623 | 4,4,1000,34 624 | 11,6,1500,28 625 | 13,3,750,14 626 | 7,5,1250,35 627 | 9,9,2250,54 628 | 11,2,500,11 629 | 2,5,1250,63 630 | 7,11,2750,89 631 | 8,9,2250,64 632 | 2,2,500,22 633 | 6,3,750,26 634 | 12,15,3750,71 635 | 13,3,750,16 636 | 11,16,4000,89 637 | 4,5,1250,58 638 | 14,7,1750,35 639 | 11,4,1000,27 640 | 7,9,2250,89 641 | 11,8,2000,52 642 | 7,5,1250,52 643 | 11,6,1500,41 644 | 10,5,1250,38 645 | 14,2,500,14 646 | 14,2,500,14 647 | 14,2,500,14 648 | 2,2,500,33 649 | 11,3,750,23 650 | 14,8,2000,46 651 | 9,1,250,9 652 | 16,5,1250,27 653 | 14,4,1000,26 654 | 4,2,500,30 655 | 14,3,750,21 656 | 16,16,4000,77 657 | 4,2,500,31 658 | 14,8,2000,50 659 | 11,3,750,26 660 | 14,7,1750,45 661 | 15,5,1250,33 662 | 16,2,500,16 663 | 16,3,750,21 664 | 11,8,2000,72 665 | 11,1,250,11 666 | 11,1,250,11 667 | 11,1,250,11 668 | 11,1,250,11 669 | 11,1,250,11 670 | 2,3,750,75 671 | 2,3,750,77 672 | 16,4,1000,28 673 | 16,15,3750,87 674 | 16,14,3500,83 675 | 16,10,2500,62 676 | 16,3,750,23 677 | 14,3,750,26 678 | 23,19,4750,62 679 | 11,7,1750,75 680 | 14,3,750,28 681 | 20,14,3500,69 682 | 4,2,500,46 683 | 11,2,500,25 684 | 11,3,750,37 685 | 16,4,1000,33 686 | 21,7,1750,38 687 | 13,7,1750,76 688 | 16,6,1500,50 689 | 14,3,750,33 690 | 14,1,250,14 691 | 14,1,250,14 692 | 14,1,250,14 693 | 14,1,250,14 694 | 14,1,250,14 695 | 14,1,250,14 696 | 17,7,1750,58 697 | 14,3,750,35 698 | 14,3,750,35 699 | 16,7,1750,64 700 | 21,2,500,21 701 | 16,3,750,35 702 | 16,1,250,16 703 | 16,1,250,16 704 | 16,1,250,16 705 | 16,1,250,16 706 | 16,1,250,16 707 | 14,2,500,29 708 | 11,4,1000,74 709 | 11,2,500,38 710 | 21,6,1500,48 711 | 23,2,500,23 712 | 23,6,1500,45 713 | 14,2,500,35 714 | 16,6,1500,81 715 | 16,4,1000,58 716 | 16,5,1250,71 717 | 21,2,500,26 718 | 21,3,750,35 719 | 21,3,750,35 720 | 23,8,2000,69 721 | 21,3,750,38 722 | 23,3,750,35 723 | 21,3,750,40 724 | 23,2,500,28 725 | 21,1,250,21 726 | 21,1,250,21 727 | 25,6,1500,50 728 | 21,1,250,21 729 | 21,1,250,21 730 | 23,3,750,39 731 | 21,2,500,33 732 | 14,3,750,79 733 | 23,1,250,23 734 | 23,1,250,23 735 | 23,1,250,23 736 | 23,1,250,23 737 | 23,1,250,23 738 | 23,1,250,23 739 | 23,1,250,23 740 | 23,4,1000,52 741 | 23,1,250,23 742 | 23,7,1750,88 743 | 16,3,750,86 744 | 23,2,500,38 745 | 21,2,500,52 746 | 23,3,750,62 747 | 39,1,250,39 748 | 72,1,250,72 749 | -------------------------------------------------------------------------------- /Data Sets/breast_cancer.csv: -------------------------------------------------------------------------------- 1 | 5,1,1,1,2,1,3,1,1,0 2 | 5,4,4,5,7,10,3,2,1,0 3 | 3,1,1,1,2,2,3,1,1,0 4 | 6,8,8,1,3,4,3,7,1,0 5 | 4,1,1,3,2,1,3,1,1,0 6 | 8,10,10,8,7,10,9,7,1,1 7 | 1,1,1,1,2,10,3,1,1,0 8 | 2,1,2,1,2,1,3,1,1,0 9 | 2,1,1,1,2,1,1,1,5,0 10 | 4,2,1,1,2,1,2,1,1,0 11 | 1,1,1,1,1,1,3,1,1,0 12 | 2,1,1,1,2,1,2,1,1,0 13 | 5,3,3,3,2,3,4,4,1,1 14 | 1,1,1,1,2,3,3,1,1,0 15 | 8,7,5,10,7,9,5,5,4,1 16 | 7,4,6,4,6,1,4,3,1,1 17 | 4,1,1,1,2,1,2,1,1,0 18 | 4,1,1,1,2,1,3,1,1,0 19 | 10,7,7,6,4,10,4,1,2,1 20 | 6,1,1,1,2,1,3,1,1,0 21 | 7,3,2,10,5,10,5,4,4,1 22 | 10,5,5,3,6,7,7,10,1,1 23 | 3,1,1,1,2,1,2,1,1,0 24 | 8,4,5,1,2,1,7,3,1,1 25 | 1,1,1,1,2,1,3,1,1,0 26 | 5,2,3,4,2,7,3,6,1,1 27 | 3,2,1,1,1,1,2,1,1,0 28 | 5,1,1,1,2,1,2,1,1,0 29 | 2,1,1,1,2,1,2,1,1,0 30 | 1,1,3,1,2,1,1,1,1,0 31 | 3,1,1,1,1,1,2,1,1,0 32 | 2,1,1,1,2,1,3,1,1,0 33 | 10,7,7,3,8,5,7,4,3,1 34 | 2,1,1,2,2,1,3,1,1,0 35 | 3,1,2,1,2,1,2,1,1,0 36 | 2,1,1,1,2,1,2,1,1,0 37 | 10,10,10,8,6,1,8,9,1,1 38 | 6,2,1,1,1,1,7,1,1,0 39 | 5,4,4,9,2,10,5,6,1,1 40 | 2,5,3,3,6,7,7,5,1,1 41 | 6,6,6,9,6,7,7,8,1,0 42 | 10,4,3,1,3,3,6,5,2,1 43 | 6,10,10,2,8,10,7,3,3,1 44 | 5,6,5,6,10,1,3,1,1,1 45 | 10,10,10,4,8,1,8,10,1,1 46 | 1,1,1,1,2,1,2,1,2,0 47 | 3,7,7,4,4,9,4,8,1,1 48 | 1,1,1,1,2,1,2,1,1,0 49 | 4,1,1,3,2,1,3,1,1,0 50 | 7,8,7,2,4,8,3,8,2,1 51 | 9,5,8,1,2,3,2,1,5,1 52 | 5,3,3,4,2,4,3,4,1,1 53 | 10,3,6,2,3,5,4,10,2,1 54 | 5,5,5,8,10,8,7,3,7,1 55 | 10,5,5,6,8,8,7,1,1,1 56 | 10,6,6,3,4,5,3,6,1,1 57 | 8,10,10,1,3,6,3,9,1,1 58 | 8,2,4,1,5,1,5,4,4,1 59 | 5,2,3,1,6,10,5,1,1,1 60 | 9,5,5,2,2,2,5,1,1,1 61 | 5,3,5,5,3,3,4,10,1,1 62 | 1,1,1,1,2,2,2,1,1,0 63 | 9,10,10,1,10,8,3,3,1,1 64 | 6,3,4,1,5,2,3,9,1,1 65 | 1,1,1,1,2,1,2,1,1,0 66 | 10,4,2,1,3,2,4,3,10,1 67 | 4,1,1,1,2,1,3,1,1,0 68 | 5,3,4,1,8,10,4,9,1,1 69 | 8,3,8,3,4,9,8,9,8,1 70 | 1,1,1,1,2,1,3,2,1,0 71 | 5,1,3,1,2,1,2,1,1,0 72 | 6,10,2,8,10,2,7,8,10,1 73 | 1,3,3,2,2,1,7,2,1,0 74 | 9,4,5,10,6,10,4,8,1,1 75 | 10,6,4,1,3,4,3,2,3,1 76 | 1,1,2,1,2,2,4,2,1,0 77 | 1,1,4,1,2,1,2,1,1,0 78 | 5,3,1,2,2,1,2,1,1,0 79 | 3,1,1,1,2,3,3,1,1,0 80 | 2,1,1,1,3,1,2,1,1,0 81 | 2,2,2,1,1,1,7,1,1,0 82 | 4,1,1,2,2,1,2,1,1,0 83 | 5,2,1,1,2,1,3,1,1,0 84 | 3,1,1,1,2,2,7,1,1,0 85 | 3,5,7,8,8,9,7,10,7,1 86 | 5,10,6,1,10,4,4,10,10,1 87 | 3,3,6,4,5,8,4,4,1,1 88 | 3,6,6,6,5,10,6,8,3,1 89 | 4,1,1,1,2,1,3,1,1,0 90 | 2,1,1,2,3,1,2,1,1,0 91 | 1,1,1,1,2,1,3,1,1,0 92 | 3,1,1,2,2,1,1,1,1,0 93 | 4,1,1,1,2,1,3,1,1,0 94 | 1,1,1,1,2,1,2,1,1,0 95 | 2,1,1,1,2,1,3,1,1,0 96 | 1,1,1,1,2,1,3,1,1,0 97 | 2,1,1,2,2,1,1,1,1,0 98 | 5,1,1,1,2,1,3,1,1,0 99 | 9,6,9,2,10,6,2,9,10,1 100 | 7,5,6,10,5,10,7,9,4,1 101 | 10,3,5,1,10,5,3,10,2,1 102 | 2,3,4,4,2,5,2,5,1,1 103 | 4,1,2,1,2,1,3,1,1,0 104 | 8,2,3,1,6,3,7,1,1,1 105 | 10,10,10,10,10,1,8,8,8,1 106 | 7,3,4,4,3,3,3,2,7,1 107 | 10,10,10,8,2,10,4,1,1,1 108 | 1,6,8,10,8,10,5,7,1,1 109 | 1,1,1,1,2,1,2,3,1,0 110 | 6,5,4,4,3,9,7,8,3,1 111 | 1,3,1,2,2,2,5,3,2,0 112 | 8,6,4,3,5,9,3,1,1,1 113 | 10,3,3,10,2,10,7,3,3,1 114 | 10,10,10,3,10,8,8,1,1,1 115 | 3,3,2,1,2,3,3,1,1,0 116 | 1,1,1,1,2,5,1,1,1,0 117 | 8,3,3,1,2,2,3,2,1,0 118 | 4,5,5,10,4,10,7,5,8,1 119 | 1,1,1,1,4,3,1,1,1,0 120 | 3,2,1,1,2,2,3,1,1,0 121 | 1,1,2,2,2,1,3,1,1,0 122 | 4,2,1,1,2,2,3,1,1,0 123 | 10,10,10,2,10,10,5,3,3,1 124 | 5,3,5,1,8,10,5,3,1,1 125 | 5,4,6,7,9,7,8,10,1,1 126 | 1,1,1,1,2,1,2,1,1,0 127 | 7,5,3,7,4,10,7,5,5,1 128 | 3,1,1,1,2,1,3,1,1,0 129 | 8,3,5,4,5,10,1,6,2,1 130 | 1,1,1,1,10,1,1,1,1,0 131 | 5,1,3,1,2,1,2,1,1,0 132 | 2,1,1,1,2,1,3,1,1,0 133 | 5,10,8,10,8,10,3,6,3,1 134 | 3,1,1,1,2,1,2,2,1,0 135 | 3,1,1,1,3,1,2,1,1,0 136 | 5,1,1,1,2,2,3,3,1,0 137 | 4,1,1,1,2,1,2,1,1,0 138 | 3,1,1,1,2,1,1,1,1,0 139 | 4,1,2,1,2,1,2,1,1,0 140 | 1,1,1,1,1,1,2,1,1,0 141 | 3,1,1,1,2,1,1,1,1,0 142 | 2,1,1,1,2,1,1,1,1,0 143 | 9,5,5,4,4,5,4,3,3,1 144 | 1,1,1,1,2,5,1,1,1,0 145 | 2,1,1,1,2,1,2,1,1,0 146 | 1,1,3,1,2,1,2,1,1,0 147 | 3,4,5,2,6,8,4,1,1,1 148 | 1,1,1,1,3,2,2,1,1,0 149 | 3,1,1,3,8,1,5,8,1,0 150 | 8,8,7,4,10,10,7,8,7,1 151 | 1,1,1,1,1,1,3,1,1,0 152 | 7,2,4,1,6,10,5,4,3,1 153 | 10,10,8,6,4,5,8,10,1,1 154 | 4,1,1,1,2,3,1,1,1,0 155 | 1,1,1,1,2,1,1,1,1,0 156 | 5,5,5,6,3,10,3,1,1,1 157 | 1,2,2,1,2,1,2,1,1,0 158 | 2,1,1,1,2,1,3,1,1,0 159 | 1,1,2,1,3,1,1,1,1,0 160 | 9,9,10,3,6,10,7,10,6,1 161 | 10,7,7,4,5,10,5,7,2,1 162 | 4,1,1,1,2,1,3,2,1,0 163 | 3,1,1,1,2,1,3,1,1,0 164 | 1,1,1,2,1,3,1,1,7,0 165 | 5,1,1,1,2,3,3,1,1,0 166 | 4,1,1,1,2,2,3,2,1,0 167 | 5,6,7,8,8,10,3,10,3,1 168 | 10,8,10,10,6,1,3,1,10,1 169 | 3,1,1,1,2,1,3,1,1,0 170 | 1,1,1,2,1,1,1,1,1,0 171 | 3,1,1,1,2,1,1,1,1,0 172 | 1,1,1,1,2,1,3,1,1,0 173 | 1,1,1,1,2,1,2,1,1,0 174 | 6,10,10,10,8,10,10,10,7,1 175 | 8,6,5,4,3,10,6,1,1,1 176 | 5,8,7,7,10,10,5,7,1,1 177 | 2,1,1,1,2,1,3,1,1,0 178 | 5,10,10,3,8,1,5,10,3,1 179 | 4,1,1,1,2,1,3,1,1,0 180 | 5,3,3,3,6,10,3,1,1,1 181 | 1,1,1,1,1,1,3,1,1,0 182 | 1,1,1,1,2,1,1,1,1,0 183 | 6,1,1,1,2,1,3,1,1,0 184 | 5,8,8,8,5,10,7,8,1,1 185 | 8,7,6,4,4,10,5,1,1,1 186 | 2,1,1,1,1,1,3,1,1,0 187 | 1,5,8,6,5,8,7,10,1,1 188 | 10,5,6,10,6,10,7,7,10,1 189 | 5,8,4,10,5,8,9,10,1,1 190 | 1,2,3,1,2,1,3,1,1,0 191 | 10,10,10,8,6,8,7,10,1,1 192 | 7,5,10,10,10,10,4,10,3,1 193 | 5,1,1,1,2,1,2,1,1,0 194 | 1,1,1,1,2,1,3,1,1,0 195 | 3,1,1,1,2,1,3,1,1,0 196 | 4,1,1,1,2,1,3,1,1,0 197 | 8,4,4,5,4,7,7,8,2,0 198 | 5,1,1,4,2,1,3,1,1,0 199 | 1,1,1,1,2,1,1,1,1,0 200 | 3,1,1,1,2,1,2,1,1,0 201 | 9,7,7,5,5,10,7,8,3,1 202 | 10,8,8,4,10,10,8,1,1,1 203 | 1,1,1,1,2,1,3,1,1,0 204 | 5,1,1,1,2,1,3,1,1,0 205 | 1,1,1,1,2,1,3,1,1,0 206 | 5,10,10,9,6,10,7,10,5,1 207 | 10,10,9,3,7,5,3,5,1,1 208 | 1,1,1,1,1,1,3,1,1,0 209 | 1,1,1,1,1,1,3,1,1,0 210 | 5,1,1,1,1,1,3,1,1,0 211 | 8,10,10,10,5,10,8,10,6,1 212 | 8,10,8,8,4,8,7,7,1,1 213 | 1,1,1,1,2,1,3,1,1,0 214 | 10,10,10,10,7,10,7,10,4,1 215 | 10,10,10,10,3,10,10,6,1,1 216 | 8,7,8,7,5,5,5,10,2,1 217 | 1,1,1,1,2,1,2,1,1,0 218 | 1,1,1,1,2,1,3,1,1,0 219 | 6,10,7,7,6,4,8,10,2,1 220 | 6,1,3,1,2,1,3,1,1,0 221 | 1,1,1,2,2,1,3,1,1,0 222 | 10,6,4,3,10,10,9,10,1,1 223 | 4,1,1,3,1,5,2,1,1,1 224 | 7,5,6,3,3,8,7,4,1,1 225 | 10,5,5,6,3,10,7,9,2,1 226 | 1,1,1,1,2,1,2,1,1,0 227 | 10,5,7,4,4,10,8,9,1,1 228 | 8,9,9,5,3,5,7,7,1,1 229 | 1,1,1,1,1,1,3,1,1,0 230 | 10,10,10,3,10,10,9,10,1,1 231 | 7,4,7,4,3,7,7,6,1,1 232 | 6,8,7,5,6,8,8,9,2,1 233 | 8,4,6,3,3,1,4,3,1,0 234 | 10,4,5,5,5,10,4,1,1,1 235 | 3,3,2,1,3,1,3,6,1,0 236 | 3,1,4,1,2,1,3,1,1,0 237 | 10,8,8,2,8,10,4,8,10,1 238 | 9,8,8,5,6,2,4,10,4,1 239 | 8,10,10,8,6,9,3,10,10,1 240 | 10,4,3,2,3,10,5,3,2,1 241 | 5,1,3,3,2,2,2,3,1,0 242 | 3,1,1,3,1,1,3,1,1,0 243 | 2,1,1,1,2,1,3,1,1,0 244 | 1,1,1,1,2,5,5,1,1,0 245 | 1,1,1,1,2,1,3,1,1,0 246 | 5,1,1,2,2,2,3,1,1,0 247 | 8,10,10,8,5,10,7,8,1,1 248 | 8,4,4,1,2,9,3,3,1,1 249 | 4,1,1,1,2,1,3,6,1,0 250 | 3,1,1,1,2,1,3,1,1,0 251 | 1,2,2,1,2,1,1,1,1,0 252 | 10,4,4,10,2,10,5,3,3,1 253 | 6,3,3,5,3,10,3,5,3,0 254 | 6,10,10,2,8,10,7,3,3,1 255 | 9,10,10,1,10,8,3,3,1,1 256 | 5,6,6,2,4,10,3,6,1,1 257 | 3,1,1,1,2,1,1,1,1,0 258 | 3,1,1,1,2,1,2,1,1,0 259 | 3,1,1,1,2,1,3,1,1,0 260 | 5,7,7,1,5,8,3,4,1,0 261 | 10,5,8,10,3,10,5,1,3,1 262 | 5,10,10,6,10,10,10,6,5,1 263 | 8,8,9,4,5,10,7,8,1,1 264 | 10,4,4,10,6,10,5,5,1,1 265 | 7,9,4,10,10,3,5,3,3,1 266 | 5,1,4,1,2,1,3,2,1,0 267 | 10,10,6,3,3,10,4,3,2,1 268 | 3,3,5,2,3,10,7,1,1,1 269 | 10,8,8,2,3,4,8,7,8,1 270 | 1,1,1,1,2,1,3,1,1,0 271 | 8,4,7,1,3,10,3,9,2,1 272 | 5,1,1,1,2,1,3,1,1,0 273 | 3,3,5,2,3,10,7,1,1,1 274 | 7,2,4,1,3,4,3,3,1,1 275 | 3,1,1,1,2,1,3,2,1,0 276 | 3,1,3,1,2,1,2,1,1,0 277 | 3,1,1,1,2,1,2,1,1,0 278 | 1,1,1,1,2,1,2,1,1,0 279 | 1,1,1,1,2,1,3,1,1,0 280 | 10,5,7,3,3,7,3,3,8,1 281 | 3,1,1,1,2,1,3,1,1,0 282 | 2,1,1,2,2,1,3,1,1,0 283 | 1,4,3,10,4,10,5,6,1,1 284 | 10,4,6,1,2,10,5,3,1,1 285 | 7,4,5,10,2,10,3,8,2,1 286 | 8,10,10,10,8,10,10,7,3,1 287 | 10,10,10,10,10,10,4,10,10,1 288 | 3,1,1,1,3,1,2,1,1,0 289 | 6,1,3,1,4,5,5,10,1,1 290 | 5,6,6,8,6,10,4,10,4,1 291 | 1,1,1,1,2,1,1,1,1,0 292 | 1,1,1,1,2,1,3,1,1,0 293 | 8,8,8,1,2,1,6,10,1,1 294 | 10,4,4,6,2,10,2,3,1,1 295 | 1,1,1,1,2,10,2,1,1,0 296 | 5,5,7,8,6,10,7,4,1,1 297 | 5,3,4,3,4,5,4,7,1,0 298 | 5,4,3,1,2,5,2,3,1,0 299 | 8,2,1,1,5,1,1,1,1,0 300 | 9,1,2,6,4,10,7,7,2,1 301 | 8,4,10,5,4,4,7,10,1,1 302 | 1,1,1,1,2,1,3,1,1,0 303 | 10,10,10,7,9,10,7,10,10,1 304 | 1,1,1,1,2,1,3,1,1,0 305 | 8,3,4,9,3,10,3,3,1,1 306 | 10,8,4,4,4,10,3,10,4,1 307 | 1,1,1,1,2,1,3,1,1,0 308 | 1,1,1,1,2,1,3,1,1,0 309 | 7,8,7,6,4,3,8,8,4,1 310 | 3,1,1,1,2,5,5,1,1,0 311 | 2,1,1,1,3,1,2,1,1,0 312 | 1,1,1,1,2,1,1,1,1,0 313 | 8,6,4,10,10,1,3,5,1,1 314 | 1,1,1,1,2,1,1,1,1,0 315 | 1,1,1,1,1,1,2,1,1,0 316 | 4,6,5,6,7,1,4,9,1,0 317 | 5,5,5,2,5,10,4,3,1,1 318 | 6,8,7,8,6,8,8,9,1,1 319 | 1,1,1,1,5,1,3,1,1,0 320 | 4,4,4,4,6,5,7,3,1,0 321 | 7,6,3,2,5,10,7,4,6,1 322 | 3,1,1,1,2,1,3,1,1,0 323 | 3,1,1,1,2,1,3,1,1,0 324 | 5,4,6,10,2,10,4,1,1,1 325 | 1,1,1,1,2,1,3,1,1,0 326 | 3,2,2,1,2,1,2,3,1,0 327 | 10,1,1,1,2,10,5,4,1,1 328 | 1,1,1,1,2,1,2,1,1,0 329 | 8,10,3,2,6,4,3,10,1,1 330 | 10,4,6,4,5,10,7,1,1,1 331 | 10,4,7,2,2,8,6,1,1,1 332 | 5,1,1,1,2,1,3,1,2,0 333 | 5,2,2,2,2,1,2,2,1,0 334 | 5,4,6,6,4,10,4,3,1,1 335 | 8,6,7,3,3,10,3,4,2,1 336 | 1,1,1,1,2,1,1,1,1,0 337 | 6,5,5,8,4,10,3,4,1,1 338 | 1,1,1,1,2,1,3,1,1,0 339 | 1,1,1,1,1,1,2,1,1,0 340 | 8,5,5,5,2,10,4,3,1,1 341 | 10,3,3,1,2,10,7,6,1,1 342 | 1,1,1,1,2,1,3,1,1,0 343 | 2,1,1,1,2,1,1,1,1,0 344 | 1,1,1,1,2,1,1,1,1,0 345 | 7,6,4,8,10,10,9,5,3,1 346 | 1,1,1,1,2,1,1,1,1,0 347 | 5,2,2,2,3,1,1,3,1,0 348 | 1,1,1,1,1,1,1,3,1,0 349 | 3,4,4,10,5,1,3,3,1,1 350 | 4,2,3,5,3,8,7,6,1,1 351 | 5,1,1,3,2,1,1,1,1,0 352 | 2,1,1,1,2,1,3,1,1,0 353 | 3,4,5,3,7,3,4,6,1,0 354 | 2,7,10,10,7,10,4,9,4,1 355 | 1,1,1,1,2,1,2,1,1,0 356 | 4,1,1,1,3,1,2,2,1,0 357 | 5,3,3,1,3,3,3,3,3,1 358 | 8,10,10,7,10,10,7,3,8,1 359 | 8,10,5,3,8,4,4,10,3,1 360 | 10,3,5,4,3,7,3,5,3,1 361 | 6,10,10,10,10,10,8,10,10,1 362 | 3,10,3,10,6,10,5,1,4,1 363 | 3,2,2,1,4,3,2,1,1,0 364 | 4,4,4,2,2,3,2,1,1,0 365 | 2,1,1,1,2,1,3,1,1,0 366 | 2,1,1,1,2,1,2,1,1,0 367 | 6,10,10,10,8,10,7,10,7,1 368 | 5,8,8,10,5,10,8,10,3,1 369 | 1,1,3,1,2,1,1,1,1,0 370 | 1,1,3,1,1,1,2,1,1,0 371 | 4,3,2,1,3,1,2,1,1,0 372 | 1,1,3,1,2,1,1,1,1,0 373 | 4,1,2,1,2,1,2,1,1,0 374 | 5,1,1,2,2,1,2,1,1,0 375 | 3,1,2,1,2,1,2,1,1,0 376 | 1,1,1,1,2,1,1,1,1,0 377 | 1,1,1,1,2,1,2,1,1,0 378 | 1,1,1,1,1,1,2,1,1,0 379 | 3,1,1,4,3,1,2,2,1,0 380 | 5,3,4,1,4,1,3,1,1,0 381 | 1,1,1,1,2,1,1,1,1,0 382 | 10,6,3,6,4,10,7,8,4,1 383 | 3,2,2,2,2,1,3,2,1,0 384 | 2,1,1,1,2,1,1,1,1,0 385 | 2,1,1,1,2,1,1,1,1,0 386 | 3,3,2,2,3,1,1,2,3,0 387 | 7,6,6,3,2,10,7,1,1,1 388 | 5,3,3,2,3,1,3,1,1,0 389 | 2,1,1,1,2,1,2,2,1,0 390 | 5,1,1,1,3,2,2,2,1,0 391 | 1,1,1,2,2,1,2,1,1,0 392 | 10,8,7,4,3,10,7,9,1,1 393 | 3,1,1,1,2,1,2,1,1,0 394 | 1,1,1,1,1,1,1,1,1,0 395 | 1,2,3,1,2,1,2,1,1,0 396 | 3,1,1,1,2,1,2,1,1,0 397 | 3,1,1,1,2,1,3,1,1,0 398 | 4,1,1,1,2,1,1,1,1,0 399 | 3,2,1,1,2,1,2,2,1,0 400 | 1,2,3,1,2,1,1,1,1,0 401 | 3,10,8,7,6,9,9,3,8,1 402 | 3,1,1,1,2,1,1,1,1,0 403 | 5,3,3,1,2,1,2,1,1,0 404 | 3,1,1,1,2,4,1,1,1,0 405 | 1,2,1,3,2,1,1,2,1,0 406 | 1,1,1,1,2,1,2,1,1,0 407 | 4,2,2,1,2,1,2,1,1,0 408 | 1,1,1,1,2,1,2,1,1,0 409 | 2,3,2,2,2,2,3,1,1,0 410 | 3,1,2,1,2,1,2,1,1,0 411 | 1,1,1,1,2,1,2,1,1,0 412 | 1,1,1,1,1,1,2,1,1,0 413 | 10,10,10,6,8,4,8,5,1,1 414 | 5,1,2,1,2,1,3,1,1,0 415 | 8,5,6,2,3,10,6,6,1,1 416 | 3,3,2,6,3,3,3,5,1,0 417 | 8,7,8,5,10,10,7,2,1,1 418 | 1,1,1,1,2,1,2,1,1,0 419 | 5,2,2,2,2,2,3,2,2,0 420 | 2,3,1,1,5,1,1,1,1,0 421 | 3,2,2,3,2,3,3,1,1,0 422 | 10,10,10,7,10,10,8,2,1,1 423 | 4,3,3,1,2,1,3,3,1,0 424 | 5,1,3,1,2,1,2,1,1,0 425 | 3,1,1,1,2,1,1,1,1,0 426 | 9,10,10,10,10,10,10,10,1,1 427 | 5,3,6,1,2,1,1,1,1,0 428 | 8,7,8,2,4,2,5,10,1,1 429 | 1,1,1,1,2,1,2,1,1,0 430 | 2,1,1,1,2,1,2,1,1,0 431 | 1,3,1,1,2,1,2,2,1,0 432 | 5,1,1,3,4,1,3,2,1,0 433 | 5,1,1,1,2,1,2,2,1,0 434 | 3,2,2,3,2,1,1,1,1,0 435 | 6,9,7,5,5,8,4,2,1,0 436 | 10,8,10,1,3,10,5,1,1,1 437 | 10,10,10,1,6,1,2,8,1,1 438 | 4,1,1,1,2,1,1,1,1,0 439 | 4,1,3,3,2,1,1,1,1,0 440 | 5,1,1,1,2,1,1,1,1,0 441 | 10,4,3,10,4,10,10,1,1,1 442 | 5,2,2,4,2,4,1,1,1,0 443 | 1,1,1,3,2,3,1,1,1,0 444 | 1,1,1,1,2,2,1,1,1,0 445 | 5,1,1,6,3,1,2,1,1,0 446 | 2,1,1,1,2,1,1,1,1,0 447 | 1,1,1,1,2,1,1,1,1,0 448 | 5,1,1,1,2,1,1,1,1,0 449 | 1,1,1,1,1,1,1,1,1,0 450 | 5,7,9,8,6,10,8,10,1,1 451 | 4,1,1,3,1,1,2,1,1,0 452 | 5,1,1,1,2,1,1,1,1,0 453 | 3,1,1,3,2,1,1,1,1,0 454 | 4,5,5,8,6,10,10,7,1,1 455 | 2,3,1,1,3,1,1,1,1,0 456 | 10,2,2,1,2,6,1,1,2,1 457 | 10,6,5,8,5,10,8,6,1,1 458 | 8,8,9,6,6,3,10,10,1,1 459 | 5,1,2,1,2,1,1,1,1,0 460 | 5,1,3,1,2,1,1,1,1,0 461 | 5,1,1,3,2,1,1,1,1,0 462 | 3,1,1,1,2,5,1,1,1,0 463 | 6,1,1,3,2,1,1,1,1,0 464 | 4,1,1,1,2,1,1,2,1,0 465 | 4,1,1,1,2,1,1,1,1,0 466 | 10,9,8,7,6,4,7,10,3,1 467 | 10,6,6,2,4,10,9,7,1,1 468 | 6,6,6,5,4,10,7,6,2,1 469 | 4,1,1,1,2,1,1,1,1,0 470 | 1,1,2,1,2,1,2,1,1,0 471 | 3,1,1,1,1,1,2,1,1,0 472 | 6,1,1,3,2,1,1,1,1,0 473 | 6,1,1,1,1,1,1,1,1,0 474 | 4,1,1,1,2,1,1,1,1,0 475 | 5,1,1,1,2,1,1,1,1,0 476 | 3,1,1,1,2,1,1,1,1,0 477 | 4,1,2,1,2,1,1,1,1,0 478 | 4,1,1,1,2,1,1,1,1,0 479 | 5,2,1,1,2,1,1,1,1,0 480 | 4,8,7,10,4,10,7,5,1,1 481 | 5,1,1,1,1,1,1,1,1,0 482 | 5,3,2,4,2,1,1,1,1,0 483 | 9,10,10,10,10,5,10,10,10,1 484 | 8,7,8,5,5,10,9,10,1,1 485 | 5,1,2,1,2,1,1,1,1,0 486 | 1,1,1,3,1,3,1,1,1,0 487 | 3,1,1,1,1,1,2,1,1,0 488 | 10,10,10,10,6,10,8,1,5,1 489 | 3,6,4,10,3,3,3,4,1,1 490 | 6,3,2,1,3,4,4,1,1,1 491 | 1,1,1,1,2,1,1,1,1,0 492 | 5,8,9,4,3,10,7,1,1,1 493 | 4,1,1,1,1,1,2,1,1,0 494 | 5,10,10,10,6,10,6,5,2,1 495 | 5,1,2,10,4,5,2,1,1,0 496 | 3,1,1,1,1,1,2,1,1,0 497 | 1,1,1,1,1,1,1,1,1,0 498 | 4,2,1,1,2,1,1,1,1,0 499 | 4,1,1,1,2,1,2,1,1,0 500 | 4,1,1,1,2,1,2,1,1,0 501 | 6,1,1,1,2,1,3,1,1,0 502 | 4,1,1,1,2,1,2,1,1,0 503 | 4,1,1,2,2,1,2,1,1,0 504 | 4,1,1,1,2,1,3,1,1,0 505 | 1,1,1,1,2,1,1,1,1,0 506 | 3,3,1,1,2,1,1,1,1,0 507 | 8,10,10,10,7,5,4,8,7,1 508 | 1,1,1,1,2,4,1,1,1,0 509 | 5,1,1,1,2,1,1,1,1,0 510 | 2,1,1,1,2,1,1,1,1,0 511 | 1,1,1,1,2,1,1,1,1,0 512 | 5,1,1,1,2,1,2,1,1,0 513 | 5,1,1,1,2,1,1,1,1,0 514 | 3,1,1,1,1,1,2,1,1,0 515 | 6,6,7,10,3,10,8,10,2,1 516 | 4,10,4,7,3,10,9,10,1,1 517 | 1,1,1,1,1,1,1,1,1,0 518 | 1,1,1,1,1,1,2,1,1,0 519 | 3,1,2,2,2,1,1,1,1,0 520 | 4,7,8,3,4,10,9,1,1,1 521 | 1,1,1,1,3,1,1,1,1,0 522 | 4,1,1,1,3,1,1,1,1,0 523 | 10,4,5,4,3,5,7,3,1,1 524 | 7,5,6,10,4,10,5,3,1,1 525 | 3,1,1,1,2,1,2,1,1,0 526 | 3,1,1,2,2,1,1,1,1,0 527 | 4,1,1,1,2,1,1,1,1,0 528 | 4,1,1,1,2,1,3,1,1,0 529 | 6,1,3,2,2,1,1,1,1,0 530 | 4,1,1,1,1,1,2,1,1,0 531 | 7,4,4,3,4,10,6,9,1,1 532 | 4,2,2,1,2,1,2,1,1,0 533 | 1,1,1,1,1,1,3,1,1,0 534 | 3,1,1,1,2,1,2,1,1,0 535 | 2,1,1,1,2,1,2,1,1,0 536 | 1,1,3,2,2,1,3,1,1,0 537 | 5,1,1,1,2,1,3,1,1,0 538 | 5,1,2,1,2,1,3,1,1,0 539 | 4,1,1,1,2,1,2,1,1,0 540 | 6,1,1,1,2,1,2,1,1,0 541 | 5,1,1,1,2,2,2,1,1,0 542 | 3,1,1,1,2,1,1,1,1,0 543 | 5,3,1,1,2,1,1,1,1,0 544 | 4,1,1,1,2,1,2,1,1,0 545 | 2,1,3,2,2,1,2,1,1,0 546 | 5,1,1,1,2,1,2,1,1,0 547 | 6,10,10,10,4,10,7,10,1,1 548 | 2,1,1,1,1,1,1,1,1,0 549 | 3,1,1,1,1,1,1,1,1,0 550 | 7,8,3,7,4,5,7,8,2,1 551 | 3,1,1,1,2,1,2,1,1,0 552 | 1,1,1,1,2,1,3,1,1,0 553 | 3,2,2,2,2,1,4,2,1,0 554 | 4,4,2,1,2,5,2,1,2,0 555 | 3,1,1,1,2,1,1,1,1,0 556 | 4,3,1,1,2,1,4,8,1,0 557 | 5,2,2,2,1,1,2,1,1,0 558 | 5,1,1,3,2,1,1,1,1,0 559 | 2,1,1,1,2,1,2,1,1,0 560 | 5,1,1,1,2,1,2,1,1,0 561 | 5,1,1,1,2,1,3,1,1,0 562 | 5,1,1,1,2,1,3,1,1,0 563 | 1,1,1,1,2,1,3,1,1,0 564 | 3,1,1,1,2,1,2,1,1,0 565 | 4,1,1,1,2,1,3,2,1,0 566 | 5,7,10,10,5,10,10,10,1,1 567 | 3,1,2,1,2,1,3,1,1,0 568 | 4,1,1,1,2,3,2,1,1,0 569 | 8,4,4,1,6,10,2,5,2,1 570 | 10,10,8,10,6,5,10,3,1,1 571 | 8,10,4,4,8,10,8,2,1,1 572 | 7,6,10,5,3,10,9,10,2,1 573 | 3,1,1,1,2,1,2,1,1,0 574 | 1,1,1,1,2,1,2,1,1,0 575 | 10,9,7,3,4,2,7,7,1,1 576 | 5,1,2,1,2,1,3,1,1,0 577 | 5,1,1,1,2,1,2,1,1,0 578 | 1,1,1,1,2,1,2,1,1,0 579 | 1,1,1,1,2,1,2,1,1,0 580 | 1,1,1,1,2,1,3,1,1,0 581 | 5,1,2,1,2,1,2,1,1,0 582 | 5,7,10,6,5,10,7,5,1,1 583 | 6,10,5,5,4,10,6,10,1,1 584 | 3,1,1,1,2,1,1,1,1,0 585 | 5,1,1,6,3,1,1,1,1,0 586 | 1,1,1,1,2,1,1,1,1,0 587 | 8,10,10,10,6,10,10,10,1,1 588 | 5,1,1,1,2,1,2,2,1,0 589 | 9,8,8,9,6,3,4,1,1,1 590 | 5,1,1,1,2,1,1,1,1,0 591 | 4,10,8,5,4,1,10,1,1,1 592 | 2,5,7,6,4,10,7,6,1,1 593 | 10,3,4,5,3,10,4,1,1,1 594 | 5,1,2,1,2,1,1,1,1,0 595 | 4,8,6,3,4,10,7,1,1,1 596 | 5,1,1,1,2,1,2,1,1,0 597 | 4,1,2,1,2,1,2,1,1,0 598 | 5,1,3,1,2,1,3,1,1,0 599 | 3,1,1,1,2,1,2,1,1,0 600 | 5,2,4,1,1,1,1,1,1,0 601 | 3,1,1,1,2,1,2,1,1,0 602 | 1,1,1,1,1,1,2,1,1,0 603 | 4,1,1,1,2,1,2,1,1,0 604 | 5,4,6,8,4,1,8,10,1,1 605 | 5,3,2,8,5,10,8,1,2,1 606 | 10,5,10,3,5,8,7,8,3,1 607 | 4,1,1,2,2,1,1,1,1,0 608 | 1,1,1,1,2,1,1,1,1,0 609 | 5,10,10,10,10,10,10,1,1,1 610 | 5,1,1,1,2,1,1,1,1,0 611 | 10,4,3,10,3,10,7,1,2,1 612 | 5,10,10,10,5,2,8,5,1,1 613 | 8,10,10,10,6,10,10,10,10,1 614 | 2,3,1,1,2,1,2,1,1,0 615 | 2,1,1,1,1,1,2,1,1,0 616 | 4,1,3,1,2,1,2,1,1,0 617 | 3,1,1,1,2,1,2,1,1,0 618 | 1,1,1,1,1,1,1,1,1,0 619 | 4,1,1,1,2,1,2,1,1,0 620 | 5,1,1,1,2,1,2,1,1,0 621 | 3,1,1,1,2,1,2,1,1,0 622 | 6,3,3,3,3,2,6,1,1,0 623 | 7,1,2,3,2,1,2,1,1,0 624 | 1,1,1,1,2,1,1,1,1,0 625 | 5,1,1,2,1,1,2,1,1,0 626 | 3,1,3,1,3,4,1,1,1,0 627 | 4,6,6,5,7,6,7,7,3,1 628 | 2,1,1,1,2,5,1,1,1,0 629 | 2,1,1,1,2,1,1,1,1,0 630 | 4,1,1,1,2,1,1,1,1,0 631 | 6,2,3,1,2,1,1,1,1,0 632 | 5,1,1,1,2,1,2,1,1,0 633 | 1,1,1,1,2,1,1,1,1,0 634 | 8,7,4,4,5,3,5,10,1,1 635 | 3,1,1,1,2,1,1,1,1,0 636 | 3,1,4,1,2,1,1,1,1,0 637 | 10,10,7,8,7,1,10,10,3,1 638 | 4,2,4,3,2,2,2,1,1,0 639 | 4,1,1,1,2,1,1,1,1,0 640 | 5,1,1,3,2,1,1,1,1,0 641 | 4,1,1,3,2,1,1,1,1,0 642 | 3,1,1,1,2,1,2,1,1,0 643 | 3,1,1,1,2,1,2,1,1,0 644 | 1,1,1,1,2,1,1,1,1,0 645 | 2,1,1,1,2,1,1,1,1,0 646 | 3,1,1,1,2,1,2,1,1,0 647 | 1,2,2,1,2,1,1,1,1,0 648 | 1,1,1,3,2,1,1,1,1,0 649 | 5,10,10,10,10,2,10,10,10,1 650 | 3,1,1,1,2,1,2,1,1,0 651 | 3,1,1,2,3,4,1,1,1,0 652 | 1,2,1,3,2,1,2,1,1,0 653 | 5,1,1,1,2,1,2,2,1,0 654 | 4,1,1,1,2,1,2,1,1,0 655 | 3,1,1,1,2,1,3,1,1,0 656 | 3,1,1,1,2,1,2,1,1,0 657 | 5,1,1,1,2,1,2,1,1,0 658 | 5,4,5,1,8,1,3,6,1,0 659 | 7,8,8,7,3,10,7,2,3,1 660 | 1,1,1,1,2,1,1,1,1,0 661 | 1,1,1,1,2,1,2,1,1,0 662 | 4,1,1,1,2,1,3,1,1,0 663 | 1,1,3,1,2,1,2,1,1,0 664 | 1,1,3,1,2,1,2,1,1,0 665 | 3,1,1,3,2,1,2,1,1,0 666 | 1,1,1,1,2,1,1,1,1,0 667 | 5,2,2,2,2,1,1,1,2,0 668 | 3,1,1,1,2,1,3,1,1,0 669 | 5,7,4,1,6,1,7,10,3,1 670 | 5,10,10,8,5,5,7,10,1,1 671 | 3,10,7,8,5,8,7,4,1,1 672 | 3,2,1,2,2,1,3,1,1,0 673 | 2,1,1,1,2,1,3,1,1,0 674 | 5,3,2,1,3,1,1,1,1,0 675 | 1,1,1,1,2,1,2,1,1,0 676 | 4,1,4,1,2,1,1,1,1,0 677 | 1,1,2,1,2,1,2,1,1,0 678 | 5,1,1,1,2,1,1,1,1,0 679 | 1,1,1,1,2,1,1,1,1,0 680 | 2,1,1,1,2,1,1,1,1,0 681 | 10,10,10,10,5,10,10,10,7,1 682 | 5,10,10,10,4,10,5,6,3,1 683 | 5,1,1,1,2,1,3,2,1,0 684 | 1,1,1,1,2,1,1,1,1,0 685 | 1,1,1,1,2,1,1,1,1,0 686 | 1,1,1,1,2,1,1,1,1,0 687 | 1,1,1,1,2,1,1,1,1,0 688 | 3,1,1,1,2,1,2,3,1,0 689 | 4,1,1,1,2,1,1,1,1,0 690 | 1,1,1,1,2,1,1,1,8,0 691 | 1,1,1,3,2,1,1,1,1,0 692 | 5,10,10,5,4,5,4,4,1,1 693 | 3,1,1,1,2,1,1,1,1,0 694 | 3,1,1,1,2,1,2,1,2,0 695 | 3,1,1,1,3,2,1,1,1,0 696 | 2,1,1,1,2,1,1,1,1,0 697 | 5,10,10,3,7,3,8,10,2,1 698 | 4,8,6,4,3,4,10,6,1,1 699 | 4,8,8,5,4,5,10,4,1,1 700 | -------------------------------------------------------------------------------- /Data Sets/breast_wisconsin.csv: -------------------------------------------------------------------------------- 1 | 5,1,1,1,2,1,3,1,1 2 | 5,4,4,5,7,10,3,2,1 3 | 3,1,1,1,2,2,3,1,1 4 | 6,8,8,1,3,4,3,7,1 5 | 4,1,1,3,2,1,3,1,1 6 | 8,10,10,8,7,10,9,7,1 7 | 1,1,1,1,2,10,3,1,1 8 | 2,1,2,1,2,1,3,1,1 9 | 2,1,1,1,2,1,1,1,5 10 | 4,2,1,1,2,1,2,1,1 11 | 1,1,1,1,1,1,3,1,1 12 | 2,1,1,1,2,1,2,1,1 13 | 5,3,3,3,2,3,4,4,1 14 | 1,1,1,1,2,3,3,1,1 15 | 8,7,5,10,7,9,5,5,4 16 | 7,4,6,4,6,1,4,3,1 17 | 4,1,1,1,2,1,2,1,1 18 | 4,1,1,1,2,1,3,1,1 19 | 10,7,7,6,4,10,4,1,2 20 | 6,1,1,1,2,1,3,1,1 21 | 7,3,2,10,5,10,5,4,4 22 | 10,5,5,3,6,7,7,10,1 23 | 3,1,1,1,2,1,2,1,1 24 | 8,4,5,1,2,1,7,3,1 25 | 1,1,1,1,2,1,3,1,1 26 | 5,2,3,4,2,7,3,6,1 27 | 3,2,1,1,1,1,2,1,1 28 | 5,1,1,1,2,1,2,1,1 29 | 2,1,1,1,2,1,2,1,1 30 | 1,1,3,1,2,1,1,1,1 31 | 3,1,1,1,1,1,2,1,1 32 | 2,1,1,1,2,1,3,1,1 33 | 10,7,7,3,8,5,7,4,3 34 | 2,1,1,2,2,1,3,1,1 35 | 3,1,2,1,2,1,2,1,1 36 | 2,1,1,1,2,1,2,1,1 37 | 10,10,10,8,6,1,8,9,1 38 | 6,2,1,1,1,1,7,1,1 39 | 5,4,4,9,2,10,5,6,1 40 | 2,5,3,3,6,7,7,5,1 41 | 6,6,6,9,6,1,7,8,1 42 | 10,4,3,1,3,3,6,5,2 43 | 6,10,10,2,8,10,7,3,3 44 | 5,6,5,6,10,1,3,1,1 45 | 10,10,10,4,8,1,8,10,1 46 | 1,1,1,1,2,1,2,1,2 47 | 3,7,7,4,4,9,4,8,1 48 | 1,1,1,1,2,1,2,1,1 49 | 4,1,1,3,2,1,3,1,1 50 | 7,8,7,2,4,8,3,8,2 51 | 9,5,8,1,2,3,2,1,5 52 | 5,3,3,4,2,4,3,4,1 53 | 10,3,6,2,3,5,4,10,2 54 | 5,5,5,8,10,8,7,3,7 55 | 10,5,5,6,8,8,7,1,1 56 | 10,6,6,3,4,5,3,6,1 57 | 8,10,10,1,3,6,3,9,1 58 | 8,2,4,1,5,1,5,4,4 59 | 5,2,3,1,6,10,5,1,1 60 | 9,5,5,2,2,2,5,1,1 61 | 5,3,5,5,3,3,4,10,1 62 | 1,1,1,1,2,2,2,1,1 63 | 9,10,10,1,10,8,3,3,1 64 | 6,3,4,1,5,2,3,9,1 65 | 1,1,1,1,2,1,2,1,1 66 | 10,4,2,1,3,2,4,3,10 67 | 4,1,1,1,2,1,3,1,1 68 | 5,3,4,1,8,10,4,9,1 69 | 8,3,8,3,4,9,8,9,8 70 | 1,1,1,1,2,1,3,2,1 71 | 5,1,3,1,2,1,2,1,1 72 | 6,10,2,8,10,2,7,8,10 73 | 1,3,3,2,2,1,7,2,1 74 | 9,4,5,10,6,10,4,8,1 75 | 10,6,4,1,3,4,3,2,3 76 | 1,1,2,1,2,2,4,2,1 77 | 1,1,4,1,2,1,2,1,1 78 | 5,3,1,2,2,1,2,1,1 79 | 3,1,1,1,2,3,3,1,1 80 | 2,1,1,1,3,1,2,1,1 81 | 2,2,2,1,1,1,7,1,1 82 | 4,1,1,2,2,1,2,1,1 83 | 5,2,1,1,2,1,3,1,1 84 | 3,1,1,1,2,2,7,1,1 85 | 3,5,7,8,8,9,7,10,7 86 | 5,10,6,1,10,4,4,10,10 87 | 3,3,6,4,5,8,4,4,1 88 | 3,6,6,6,5,10,6,8,3 89 | 4,1,1,1,2,1,3,1,1 90 | 2,1,1,2,3,1,2,1,1 91 | 1,1,1,1,2,1,3,1,1 92 | 3,1,1,2,2,1,1,1,1 93 | 4,1,1,1,2,1,3,1,1 94 | 1,1,1,1,2,1,2,1,1 95 | 2,1,1,1,2,1,3,1,1 96 | 1,1,1,1,2,1,3,1,1 97 | 2,1,1,2,2,1,1,1,1 98 | 5,1,1,1,2,1,3,1,1 99 | 9,6,9,2,10,6,2,9,10 100 | 7,5,6,10,5,10,7,9,4 101 | 10,3,5,1,10,5,3,10,2 102 | 2,3,4,4,2,5,2,5,1 103 | 4,1,2,1,2,1,3,1,1 104 | 8,2,3,1,6,3,7,1,1 105 | 10,10,10,10,10,1,8,8,8 106 | 7,3,4,4,3,3,3,2,7 107 | 10,10,10,8,2,10,4,1,1 108 | 1,6,8,10,8,10,5,7,1 109 | 1,1,1,1,2,1,2,3,1 110 | 6,5,4,4,3,9,7,8,3 111 | 1,3,1,2,2,2,5,3,2 112 | 8,6,4,3,5,9,3,1,1 113 | 10,3,3,10,2,10,7,3,3 114 | 10,10,10,3,10,8,8,1,1 115 | 3,3,2,1,2,3,3,1,1 116 | 1,1,1,1,2,5,1,1,1 117 | 8,3,3,1,2,2,3,2,1 118 | 4,5,5,10,4,10,7,5,8 119 | 1,1,1,1,4,3,1,1,1 120 | 3,2,1,1,2,2,3,1,1 121 | 1,1,2,2,2,1,3,1,1 122 | 4,2,1,1,2,2,3,1,1 123 | 10,10,10,2,10,10,5,3,3 124 | 5,3,5,1,8,10,5,3,1 125 | 5,4,6,7,9,7,8,10,1 126 | 1,1,1,1,2,1,2,1,1 127 | 7,5,3,7,4,10,7,5,5 128 | 3,1,1,1,2,1,3,1,1 129 | 8,3,5,4,5,10,1,6,2 130 | 1,1,1,1,10,1,1,1,1 131 | 5,1,3,1,2,1,2,1,1 132 | 2,1,1,1,2,1,3,1,1 133 | 5,10,8,10,8,10,3,6,3 134 | 3,1,1,1,2,1,2,2,1 135 | 3,1,1,1,3,1,2,1,1 136 | 5,1,1,1,2,2,3,3,1 137 | 4,1,1,1,2,1,2,1,1 138 | 3,1,1,1,2,1,1,1,1 139 | 4,1,2,1,2,1,2,1,1 140 | 1,1,1,1,1,1,2,1,1 141 | 3,1,1,1,2,1,1,1,1 142 | 2,1,1,1,2,1,1,1,1 143 | 9,5,5,4,4,5,4,3,3 144 | 1,1,1,1,2,5,1,1,1 145 | 2,1,1,1,2,1,2,1,1 146 | 1,1,3,1,2,1,2,1,1 147 | 3,4,5,2,6,8,4,1,1 148 | 1,1,1,1,3,2,2,1,1 149 | 3,1,1,3,8,1,5,8,1 150 | 8,8,7,4,10,10,7,8,7 151 | 1,1,1,1,1,1,3,1,1 152 | 7,2,4,1,6,10,5,4,3 153 | 10,10,8,6,4,5,8,10,1 154 | 4,1,1,1,2,3,1,1,1 155 | 1,1,1,1,2,1,1,1,1 156 | 5,5,5,6,3,10,3,1,1 157 | 1,2,2,1,2,1,2,1,1 158 | 2,1,1,1,2,1,3,1,1 159 | 1,1,2,1,3,1,1,1,1 160 | 9,9,10,3,6,10,7,10,6 161 | 10,7,7,4,5,10,5,7,2 162 | 4,1,1,1,2,1,3,2,1 163 | 3,1,1,1,2,1,3,1,1 164 | 1,1,1,2,1,3,1,1,7 165 | 5,1,1,1,2,1,3,1,1 166 | 4,1,1,1,2,2,3,2,1 167 | 5,6,7,8,8,10,3,10,3 168 | 10,8,10,10,6,1,3,1,10 169 | 3,1,1,1,2,1,3,1,1 170 | 1,1,1,2,1,1,1,1,1 171 | 3,1,1,1,2,1,1,1,1 172 | 1,1,1,1,2,1,3,1,1 173 | 1,1,1,1,2,1,2,1,1 174 | 6,10,10,10,8,10,10,10,7 175 | 8,6,5,4,3,10,6,1,1 176 | 5,8,7,7,10,10,5,7,1 177 | 2,1,1,1,2,1,3,1,1 178 | 5,10,10,3,8,1,5,10,3 179 | 4,1,1,1,2,1,3,1,1 180 | 5,3,3,3,6,10,3,1,1 181 | 1,1,1,1,1,1,3,1,1 182 | 1,1,1,1,2,1,1,1,1 183 | 6,1,1,1,2,1,3,1,1 184 | 5,8,8,8,5,10,7,8,1 185 | 8,7,6,4,4,10,5,1,1 186 | 2,1,1,1,1,1,3,1,1 187 | 1,5,8,6,5,8,7,10,1 188 | 10,5,6,10,6,10,7,7,10 189 | 5,8,4,10,5,8,9,10,1 190 | 1,2,3,1,2,1,3,1,1 191 | 10,10,10,8,6,8,7,10,1 192 | 7,5,10,10,10,10,4,10,3 193 | 5,1,1,1,2,1,2,1,1 194 | 1,1,1,1,2,1,3,1,1 195 | 3,1,1,1,2,1,3,1,1 196 | 4,1,1,1,2,1,3,1,1 197 | 8,4,4,5,4,7,7,8,2 198 | 5,1,1,4,2,1,3,1,1 199 | 1,1,1,1,2,1,1,1,1 200 | 3,1,1,1,2,1,2,1,1 201 | 9,7,7,5,5,10,7,8,3 202 | 10,8,8,4,10,10,8,1,1 203 | 1,1,1,1,2,1,3,1,1 204 | 5,1,1,1,2,1,3,1,1 205 | 1,1,1,1,2,1,3,1,1 206 | 5,10,10,9,6,10,7,10,5 207 | 10,10,9,3,7,5,3,5,1 208 | 1,1,1,1,1,1,3,1,1 209 | 1,1,1,1,1,1,3,1,1 210 | 5,1,1,1,1,1,3,1,1 211 | 8,10,10,10,5,10,8,10,6 212 | 8,10,8,8,4,8,7,7,1 213 | 1,1,1,1,2,1,3,1,1 214 | 10,10,10,10,7,10,7,10,4 215 | 10,10,10,10,3,10,10,6,1 216 | 8,7,8,7,5,5,5,10,2 217 | 1,1,1,1,2,1,2,1,1 218 | 1,1,1,1,2,1,3,1,1 219 | 6,10,7,7,6,4,8,10,2 220 | 6,1,3,1,2,1,3,1,1 221 | 1,1,1,2,2,1,3,1,1 222 | 10,6,4,3,10,10,9,10,1 223 | 4,1,1,3,1,5,2,1,1 224 | 7,5,6,3,3,8,7,4,1 225 | 10,5,5,6,3,10,7,9,2 226 | 1,1,1,1,2,1,2,1,1 227 | 10,5,7,4,4,10,8,9,1 228 | 8,9,9,5,3,5,7,7,1 229 | 1,1,1,1,1,1,3,1,1 230 | 10,10,10,3,10,10,9,10,1 231 | 7,4,7,4,3,7,7,6,1 232 | 6,8,7,5,6,8,8,9,2 233 | 8,4,6,3,3,1,4,3,1 234 | 10,4,5,5,5,10,4,1,1 235 | 3,3,2,1,3,1,3,6,1 236 | 3,1,4,1,2,1,3,1,1 237 | 10,8,8,2,8,10,4,8,10 238 | 9,8,8,5,6,2,4,10,4 239 | 8,10,10,8,6,9,3,10,10 240 | 10,4,3,2,3,10,5,3,2 241 | 5,1,3,3,2,2,2,3,1 242 | 3,1,1,3,1,1,3,1,1 243 | 2,1,1,1,2,1,3,1,1 244 | 1,1,1,1,2,5,5,1,1 245 | 1,1,1,1,2,1,3,1,1 246 | 5,1,1,2,2,2,3,1,1 247 | 8,10,10,8,5,10,7,8,1 248 | 8,4,4,1,2,9,3,3,1 249 | 4,1,1,1,2,1,3,6,1 250 | 3,1,1,1,2,1,3,1,1 251 | 1,2,2,1,2,1,1,1,1 252 | 10,4,4,10,2,10,5,3,3 253 | 6,3,3,5,3,10,3,5,3 254 | 6,10,10,2,8,10,7,3,3 255 | 9,10,10,1,10,8,3,3,1 256 | 5,6,6,2,4,10,3,6,1 257 | 3,1,1,1,2,1,1,1,1 258 | 3,1,1,1,2,1,2,1,1 259 | 3,1,1,1,2,1,3,1,1 260 | 5,7,7,1,5,8,3,4,1 261 | 10,5,8,10,3,10,5,1,3 262 | 5,10,10,6,10,10,10,6,5 263 | 8,8,9,4,5,10,7,8,1 264 | 10,4,4,10,6,10,5,5,1 265 | 7,9,4,10,10,3,5,3,3 266 | 5,1,4,1,2,1,3,2,1 267 | 10,10,6,3,3,10,4,3,2 268 | 3,3,5,2,3,10,7,1,1 269 | 10,8,8,2,3,4,8,7,8 270 | 1,1,1,1,2,1,3,1,1 271 | 8,4,7,1,3,10,3,9,2 272 | 5,1,1,1,2,1,3,1,1 273 | 3,3,5,2,3,10,7,1,1 274 | 7,2,4,1,3,4,3,3,1 275 | 3,1,1,1,2,1,3,2,1 276 | 3,1,3,1,2,1,2,1,1 277 | 3,1,1,1,2,1,2,1,1 278 | 1,1,1,1,2,1,2,1,1 279 | 1,1,1,1,2,1,3,1,1 280 | 10,5,7,3,3,7,3,3,8 281 | 3,1,1,1,2,1,3,1,1 282 | 2,1,1,2,2,1,3,1,1 283 | 1,4,3,10,4,10,5,6,1 284 | 10,4,6,1,2,10,5,3,1 285 | 7,4,5,10,2,10,3,8,2 286 | 8,10,10,10,8,10,10,7,3 287 | 10,10,10,10,10,10,4,10,10 288 | 3,1,1,1,3,1,2,1,1 289 | 6,1,3,1,4,5,5,10,1 290 | 5,6,6,8,6,10,4,10,4 291 | 1,1,1,1,2,1,1,1,1 292 | 1,1,1,1,2,1,3,1,1 293 | 8,8,8,1,2,1,6,10,1 294 | 10,4,4,6,2,10,2,3,1 295 | 1,1,1,1,2,1,2,1,1 296 | 5,5,7,8,6,10,7,4,1 297 | 5,3,4,3,4,5,4,7,1 298 | 5,4,3,1,2,1,2,3,1 299 | 8,2,1,1,5,1,1,1,1 300 | 9,1,2,6,4,10,7,7,2 301 | 8,4,10,5,4,4,7,10,1 302 | 1,1,1,1,2,1,3,1,1 303 | 10,10,10,7,9,10,7,10,10 304 | 1,1,1,1,2,1,3,1,1 305 | 8,3,4,9,3,10,3,3,1 306 | 10,8,4,4,4,10,3,10,4 307 | 1,1,1,1,2,1,3,1,1 308 | 1,1,1,1,2,1,3,1,1 309 | 7,8,7,6,4,3,8,8,4 310 | 3,1,1,1,2,5,5,1,1 311 | 2,1,1,1,3,1,2,1,1 312 | 1,1,1,1,2,1,1,1,1 313 | 8,6,4,10,10,1,3,5,1 314 | 1,1,1,1,2,1,1,1,1 315 | 1,1,1,1,1,1,2,1,1 316 | 4,6,5,6,7,1,4,9,1 317 | 5,5,5,2,5,10,4,3,1 318 | 6,8,7,8,6,8,8,9,1 319 | 1,1,1,1,5,1,3,1,1 320 | 4,4,4,4,6,5,7,3,1 321 | 7,6,3,2,5,10,7,4,6 322 | 3,1,1,1,2,1,3,1,1 323 | 3,1,1,1,2,1,3,1,1 324 | 5,4,6,10,2,10,4,1,1 325 | 1,1,1,1,2,1,3,1,1 326 | 3,2,2,1,2,1,2,3,1 327 | 10,1,1,1,2,10,5,4,1 328 | 1,1,1,1,2,1,2,1,1 329 | 8,10,3,2,6,4,3,10,1 330 | 10,4,6,4,5,10,7,1,1 331 | 10,4,7,2,2,8,6,1,1 332 | 5,1,1,1,2,1,3,1,2 333 | 5,2,2,2,2,1,2,2,1 334 | 5,4,6,6,4,10,4,3,1 335 | 8,6,7,3,3,10,3,4,2 336 | 1,1,1,1,2,1,1,1,1 337 | 6,5,5,8,4,10,3,4,1 338 | 1,1,1,1,2,1,3,1,1 339 | 1,1,1,1,1,1,2,1,1 340 | 8,5,5,5,2,10,4,3,1 341 | 10,3,3,1,2,10,7,6,1 342 | 1,1,1,1,2,1,3,1,1 343 | 2,1,1,1,2,1,1,1,1 344 | 1,1,1,1,2,1,1,1,1 345 | 7,6,4,8,10,10,9,5,3 346 | 1,1,1,1,2,1,1,1,1 347 | 5,2,2,2,3,1,1,3,1 348 | 1,1,1,1,1,1,1,3,1 349 | 3,4,4,10,5,1,3,3,1 350 | 4,2,3,5,3,8,7,6,1 351 | 5,1,1,3,2,1,1,1,1 352 | 2,1,1,1,2,1,3,1,1 353 | 3,4,5,3,7,3,4,6,1 354 | 2,7,10,10,7,10,4,9,4 355 | 1,1,1,1,2,1,2,1,1 356 | 4,1,1,1,3,1,2,2,1 357 | 5,3,3,1,3,3,3,3,3 358 | 8,10,10,7,10,10,7,3,8 359 | 8,10,5,3,8,4,4,10,3 360 | 10,3,5,4,3,7,3,5,3 361 | 6,10,10,10,10,10,8,10,10 362 | 3,10,3,10,6,10,5,1,4 363 | 3,2,2,1,4,3,2,1,1 364 | 4,4,4,2,2,3,2,1,1 365 | 2,1,1,1,2,1,3,1,1 366 | 2,1,1,1,2,1,2,1,1 367 | 6,10,10,10,8,10,7,10,7 368 | 5,8,8,10,5,10,8,10,3 369 | 1,1,3,1,2,1,1,1,1 370 | 1,1,3,1,1,1,2,1,1 371 | 4,3,2,1,3,1,2,1,1 372 | 1,1,3,1,2,1,1,1,1 373 | 4,1,2,1,2,1,2,1,1 374 | 5,1,1,2,2,1,2,1,1 375 | 3,1,2,1,2,1,2,1,1 376 | 1,1,1,1,2,1,1,1,1 377 | 1,1,1,1,2,1,2,1,1 378 | 1,1,1,1,1,1,2,1,1 379 | 3,1,1,4,3,1,2,2,1 380 | 5,3,4,1,4,1,3,1,1 381 | 1,1,1,1,2,1,1,1,1 382 | 10,6,3,6,4,10,7,8,4 383 | 3,2,2,2,2,1,3,2,1 384 | 2,1,1,1,2,1,1,1,1 385 | 2,1,1,1,2,1,1,1,1 386 | 3,3,2,2,3,1,1,2,3 387 | 7,6,6,3,2,10,7,1,1 388 | 5,3,3,2,3,1,3,1,1 389 | 2,1,1,1,2,1,2,2,1 390 | 5,1,1,1,3,2,2,2,1 391 | 1,1,1,2,2,1,2,1,1 392 | 10,8,7,4,3,10,7,9,1 393 | 3,1,1,1,2,1,2,1,1 394 | 1,1,1,1,1,1,1,1,1 395 | 1,2,3,1,2,1,2,1,1 396 | 3,1,1,1,2,1,2,1,1 397 | 3,1,1,1,2,1,3,1,1 398 | 4,1,1,1,2,1,1,1,1 399 | 3,2,1,1,2,1,2,2,1 400 | 1,2,3,1,2,1,1,1,1 401 | 3,10,8,7,6,9,9,3,8 402 | 3,1,1,1,2,1,1,1,1 403 | 5,3,3,1,2,1,2,1,1 404 | 3,1,1,1,2,4,1,1,1 405 | 1,2,1,3,2,1,1,2,1 406 | 1,1,1,1,2,1,2,1,1 407 | 4,2,2,1,2,1,2,1,1 408 | 1,1,1,1,2,1,2,1,1 409 | 2,3,2,2,2,2,3,1,1 410 | 3,1,2,1,2,1,2,1,1 411 | 1,1,1,1,2,1,2,1,1 412 | 1,1,1,1,1,1,2,1,1 413 | 10,10,10,6,8,4,8,5,1 414 | 5,1,2,1,2,1,3,1,1 415 | 8,5,6,2,3,10,6,6,1 416 | 3,3,2,6,3,3,3,5,1 417 | 8,7,8,5,10,10,7,2,1 418 | 1,1,1,1,2,1,2,1,1 419 | 5,2,2,2,2,2,3,2,2 420 | 2,3,1,1,5,1,1,1,1 421 | 3,2,2,3,2,3,3,1,1 422 | 10,10,10,7,10,10,8,2,1 423 | 4,3,3,1,2,1,3,3,1 424 | 5,1,3,1,2,1,2,1,1 425 | 3,1,1,1,2,1,1,1,1 426 | 9,10,10,10,10,10,10,10,1 427 | 5,3,6,1,2,1,1,1,1 428 | 8,7,8,2,4,2,5,10,1 429 | 1,1,1,1,2,1,2,1,1 430 | 2,1,1,1,2,1,2,1,1 431 | 1,3,1,1,2,1,2,2,1 432 | 5,1,1,3,4,1,3,2,1 433 | 5,1,1,1,2,1,2,2,1 434 | 3,2,2,3,2,1,1,1,1 435 | 6,9,7,5,5,8,4,2,1 436 | 10,8,10,1,3,10,5,1,1 437 | 10,10,10,1,6,1,2,8,1 438 | 4,1,1,1,2,1,1,1,1 439 | 4,1,3,3,2,1,1,1,1 440 | 5,1,1,1,2,1,1,1,1 441 | 10,4,3,10,4,10,10,1,1 442 | 5,2,2,4,2,4,1,1,1 443 | 1,1,1,3,2,3,1,1,1 444 | 1,1,1,1,2,2,1,1,1 445 | 5,1,1,6,3,1,2,1,1 446 | 2,1,1,1,2,1,1,1,1 447 | 1,1,1,1,2,1,1,1,1 448 | 5,1,1,1,2,1,1,1,1 449 | 1,1,1,1,1,1,1,1,1 450 | 5,7,9,8,6,10,8,10,1 451 | 4,1,1,3,1,1,2,1,1 452 | 5,1,1,1,2,1,1,1,1 453 | 3,1,1,3,2,1,1,1,1 454 | 4,5,5,8,6,10,10,7,1 455 | 2,3,1,1,3,1,1,1,1 456 | 10,2,2,1,2,6,1,1,2 457 | 10,6,5,8,5,10,8,6,1 458 | 8,8,9,6,6,3,10,10,1 459 | 5,1,2,1,2,1,1,1,1 460 | 5,1,3,1,2,1,1,1,1 461 | 5,1,1,3,2,1,1,1,1 462 | 3,1,1,1,2,5,1,1,1 463 | 6,1,1,3,2,1,1,1,1 464 | 4,1,1,1,2,1,1,2,1 465 | 4,1,1,1,2,1,1,1,1 466 | 10,9,8,7,6,4,7,10,3 467 | 10,6,6,2,4,10,9,7,1 468 | 6,6,6,5,4,10,7,6,2 469 | 4,1,1,1,2,1,1,1,1 470 | 1,1,2,1,2,1,2,1,1 471 | 3,1,1,1,1,1,2,1,1 472 | 6,1,1,3,2,1,1,1,1 473 | 6,1,1,1,1,1,1,1,1 474 | 4,1,1,1,2,1,1,1,1 475 | 5,1,1,1,2,1,1,1,1 476 | 3,1,1,1,2,1,1,1,1 477 | 4,1,2,1,2,1,1,1,1 478 | 4,1,1,1,2,1,1,1,1 479 | 5,2,1,1,2,1,1,1,1 480 | 4,8,7,10,4,10,7,5,1 481 | 5,1,1,1,1,1,1,1,1 482 | 5,3,2,4,2,1,1,1,1 483 | 9,10,10,10,10,5,10,10,10 484 | 8,7,8,5,5,10,9,10,1 485 | 5,1,2,1,2,1,1,1,1 486 | 1,1,1,3,1,3,1,1,1 487 | 3,1,1,1,1,1,2,1,1 488 | 10,10,10,10,6,10,8,1,5 489 | 3,6,4,10,3,3,3,4,1 490 | 6,3,2,1,3,4,4,1,1 491 | 1,1,1,1,2,1,1,1,1 492 | 5,8,9,4,3,10,7,1,1 493 | 4,1,1,1,1,1,2,1,1 494 | 5,10,10,10,6,10,6,5,2 495 | 5,1,2,10,4,5,2,1,1 496 | 3,1,1,1,1,1,2,1,1 497 | 1,1,1,1,1,1,1,1,1 498 | 4,2,1,1,2,1,1,1,1 499 | 4,1,1,1,2,1,2,1,1 500 | 4,1,1,1,2,1,2,1,1 501 | 6,1,1,1,2,1,3,1,1 502 | 4,1,1,1,2,1,2,1,1 503 | 4,1,1,2,2,1,2,1,1 504 | 4,1,1,1,2,1,3,1,1 505 | 1,1,1,1,2,1,1,1,1 506 | 3,3,1,1,2,1,1,1,1 507 | 8,10,10,10,7,5,4,8,7 508 | 1,1,1,1,2,4,1,1,1 509 | 5,1,1,1,2,1,1,1,1 510 | 2,1,1,1,2,1,1,1,1 511 | 1,1,1,1,2,1,1,1,1 512 | 5,1,1,1,2,1,2,1,1 513 | 5,1,1,1,2,1,1,1,1 514 | 3,1,1,1,1,1,2,1,1 515 | 6,6,7,10,3,10,8,10,2 516 | 4,10,4,7,3,10,9,10,1 517 | 1,1,1,1,1,1,1,1,1 518 | 1,1,1,1,1,1,2,1,1 519 | 3,1,2,2,2,1,1,1,1 520 | 4,7,8,3,4,10,9,1,1 521 | 1,1,1,1,3,1,1,1,1 522 | 4,1,1,1,3,1,1,1,1 523 | 10,4,5,4,3,5,7,3,1 524 | 7,5,6,10,4,10,5,3,1 525 | 3,1,1,1,2,1,2,1,1 526 | 3,1,1,2,2,1,1,1,1 527 | 4,1,1,1,2,1,1,1,1 528 | 4,1,1,1,2,1,3,1,1 529 | 6,1,3,2,2,1,1,1,1 530 | 4,1,1,1,1,1,2,1,1 531 | 7,4,4,3,4,10,6,9,1 532 | 4,2,2,1,2,1,2,1,1 533 | 1,1,1,1,1,1,3,1,1 534 | 3,1,1,1,2,1,2,1,1 535 | 2,1,1,1,2,1,2,1,1 536 | 1,1,3,2,2,1,3,1,1 537 | 5,1,1,1,2,1,3,1,1 538 | 5,1,2,1,2,1,3,1,1 539 | 4,1,1,1,2,1,2,1,1 540 | 6,1,1,1,2,1,2,1,1 541 | 5,1,1,1,2,2,2,1,1 542 | 3,1,1,1,2,1,1,1,1 543 | 5,3,1,1,2,1,1,1,1 544 | 4,1,1,1,2,1,2,1,1 545 | 2,1,3,2,2,1,2,1,1 546 | 5,1,1,1,2,1,2,1,1 547 | 6,10,10,10,4,10,7,10,1 548 | 2,1,1,1,1,1,1,1,1 549 | 3,1,1,1,1,1,1,1,1 550 | 7,8,3,7,4,5,7,8,2 551 | 3,1,1,1,2,1,2,1,1 552 | 1,1,1,1,2,1,3,1,1 553 | 3,2,2,2,2,1,4,2,1 554 | 4,4,2,1,2,5,2,1,2 555 | 3,1,1,1,2,1,1,1,1 556 | 4,3,1,1,2,1,4,8,1 557 | 5,2,2,2,1,1,2,1,1 558 | 5,1,1,3,2,1,1,1,1 559 | 2,1,1,1,2,1,2,1,1 560 | 5,1,1,1,2,1,2,1,1 561 | 5,1,1,1,2,1,3,1,1 562 | 5,1,1,1,2,1,3,1,1 563 | 1,1,1,1,2,1,3,1,1 564 | 3,1,1,1,2,1,2,1,1 565 | 4,1,1,1,2,1,3,2,1 566 | 5,7,10,10,5,10,10,10,1 567 | 3,1,2,1,2,1,3,1,1 568 | 4,1,1,1,2,3,2,1,1 569 | 8,4,4,1,6,10,2,5,2 570 | 10,10,8,10,6,5,10,3,1 571 | 8,10,4,4,8,10,8,2,1 572 | 7,6,10,5,3,10,9,10,2 573 | 3,1,1,1,2,1,2,1,1 574 | 1,1,1,1,2,1,2,1,1 575 | 10,9,7,3,4,2,7,7,1 576 | 5,1,2,1,2,1,3,1,1 577 | 5,1,1,1,2,1,2,1,1 578 | 1,1,1,1,2,1,2,1,1 579 | 1,1,1,1,2,1,2,1,1 580 | 1,1,1,1,2,1,3,1,1 581 | 5,1,2,1,2,1,2,1,1 582 | 5,7,10,6,5,10,7,5,1 583 | 6,10,5,5,4,10,6,10,1 584 | 3,1,1,1,2,1,1,1,1 585 | 5,1,1,6,3,1,1,1,1 586 | 1,1,1,1,2,1,1,1,1 587 | 8,10,10,10,6,10,10,10,1 588 | 5,1,1,1,2,1,2,2,1 589 | 9,8,8,9,6,3,4,1,1 590 | 5,1,1,1,2,1,1,1,1 591 | 4,10,8,5,4,1,10,1,1 592 | 2,5,7,6,4,10,7,6,1 593 | 10,3,4,5,3,10,4,1,1 594 | 5,1,2,1,2,1,1,1,1 595 | 4,8,6,3,4,10,7,1,1 596 | 5,1,1,1,2,1,2,1,1 597 | 4,1,2,1,2,1,2,1,1 598 | 5,1,3,1,2,1,3,1,1 599 | 3,1,1,1,2,1,2,1,1 600 | 5,2,4,1,1,1,1,1,1 601 | 3,1,1,1,2,1,2,1,1 602 | 1,1,1,1,1,1,2,1,1 603 | 4,1,1,1,2,1,2,1,1 604 | 5,4,6,8,4,1,8,10,1 605 | 5,3,2,8,5,10,8,1,2 606 | 10,5,10,3,5,8,7,8,3 607 | 4,1,1,2,2,1,1,1,1 608 | 1,1,1,1,2,1,1,1,1 609 | 5,10,10,10,10,10,10,1,1 610 | 5,1,1,1,2,1,1,1,1 611 | 10,4,3,10,3,10,7,1,2 612 | 5,10,10,10,5,2,8,5,1 613 | 8,10,10,10,6,10,10,10,10 614 | 2,3,1,1,2,1,2,1,1 615 | 2,1,1,1,1,1,2,1,1 616 | 4,1,3,1,2,1,2,1,1 617 | 3,1,1,1,2,1,2,1,1 618 | 1,1,1,1,1,1,1,1,1 619 | 4,1,1,1,2,1,2,1,1 620 | 5,1,1,1,2,1,2,1,1 621 | 3,1,1,1,2,1,2,1,1 622 | 6,3,3,3,3,2,6,1,1 623 | 7,1,2,3,2,1,2,1,1 624 | 1,1,1,1,2,1,1,1,1 625 | 5,1,1,2,1,1,2,1,1 626 | 3,1,3,1,3,4,1,1,1 627 | 4,6,6,5,7,6,7,7,3 628 | 2,1,1,1,2,5,1,1,1 629 | 2,1,1,1,2,1,1,1,1 630 | 4,1,1,1,2,1,1,1,1 631 | 6,2,3,1,2,1,1,1,1 632 | 5,1,1,1,2,1,2,1,1 633 | 1,1,1,1,2,1,1,1,1 634 | 8,7,4,4,5,3,5,10,1 635 | 3,1,1,1,2,1,1,1,1 636 | 3,1,4,1,2,1,1,1,1 637 | 10,10,7,8,7,1,10,10,3 638 | 4,2,4,3,2,2,2,1,1 639 | 4,1,1,1,2,1,1,1,1 640 | 5,1,1,3,2,1,1,1,1 641 | 4,1,1,3,2,1,1,1,1 642 | 3,1,1,1,2,1,2,1,1 643 | 3,1,1,1,2,1,2,1,1 644 | 1,1,1,1,2,1,1,1,1 645 | 2,1,1,1,2,1,1,1,1 646 | 3,1,1,1,2,1,2,1,1 647 | 1,2,2,1,2,1,1,1,1 648 | 1,1,1,3,2,1,1,1,1 649 | 5,10,10,10,10,2,10,10,10 650 | 3,1,1,1,2,1,2,1,1 651 | 3,1,1,2,3,4,1,1,1 652 | 1,2,1,3,2,1,2,1,1 653 | 5,1,1,1,2,1,2,2,1 654 | 4,1,1,1,2,1,2,1,1 655 | 3,1,1,1,2,1,3,1,1 656 | 3,1,1,1,2,1,2,1,1 657 | 5,1,1,1,2,1,2,1,1 658 | 5,4,5,1,8,1,3,6,1 659 | 7,8,8,7,3,10,7,2,3 660 | 1,1,1,1,2,1,1,1,1 661 | 1,1,1,1,2,1,2,1,1 662 | 4,1,1,1,2,1,3,1,1 663 | 1,1,3,1,2,1,2,1,1 664 | 1,1,3,1,2,1,2,1,1 665 | 3,1,1,3,2,1,2,1,1 666 | 1,1,1,1,2,1,1,1,1 667 | 5,2,2,2,2,1,1,1,2 668 | 3,1,1,1,2,1,3,1,1 669 | 5,7,4,1,6,1,7,10,3 670 | 5,10,10,8,5,5,7,10,1 671 | 3,10,7,8,5,8,7,4,1 672 | 3,2,1,2,2,1,3,1,1 673 | 2,1,1,1,2,1,3,1,1 674 | 5,3,2,1,3,1,1,1,1 675 | 1,1,1,1,2,1,2,1,1 676 | 4,1,4,1,2,1,1,1,1 677 | 1,1,2,1,2,1,2,1,1 678 | 5,1,1,1,2,1,1,1,1 679 | 1,1,1,1,2,1,1,1,1 680 | 2,1,1,1,2,1,1,1,1 681 | 10,10,10,10,5,10,10,10,7 682 | 5,10,10,10,4,10,5,6,3 683 | 5,1,1,1,2,1,3,2,1 684 | 1,1,1,1,2,1,1,1,1 685 | 1,1,1,1,2,1,1,1,1 686 | 1,1,1,1,2,1,1,1,1 687 | 1,1,1,1,2,1,1,1,1 688 | 3,1,1,1,2,1,2,3,1 689 | 4,1,1,1,2,1,1,1,1 690 | 1,1,1,1,2,1,1,1,8 691 | 1,1,1,3,2,1,1,1,1 692 | 5,10,10,5,4,5,4,4,1 693 | 3,1,1,1,2,1,1,1,1 694 | 3,1,1,1,2,1,2,1,2 695 | 3,1,1,1,3,2,1,1,1 696 | 2,1,1,1,2,1,1,1,1 697 | 5,10,10,3,7,3,8,10,2 698 | 4,8,6,4,3,4,10,6,1 699 | 4,8,8,5,4,5,10,4,1 700 | -------------------------------------------------------------------------------- /Data Sets/coimbra.csv: -------------------------------------------------------------------------------- 1 | 48,23.5,70,2.707,0.467408667,8.8071,9.7024,7.99585,417.114 2 | 83,20.69049454,92,3.115,0.706897333,8.8438,5.429285,4.06405,468.786 3 | 82,23.12467037,91,4.498,1.009651067,17.9393,22.43204,9.27715,554.697 4 | 68,21.36752137,77,3.226,0.612724933,9.8827,7.16956,12.766,928.22 5 | 86,21.11111111,92,3.549,0.8053864,6.6994,4.81924,10.57635,773.92 6 | 49,22.85445769,92,3.226,0.732086933,6.8317,13.67975,10.3176,530.41 7 | 89,22.7,77,4.69,0.890787333,6.964,5.589865,12.9361,1256.083 8 | 76,23.8,118,6.47,1.883201333,4.311,13.25132,5.1042,280.694 9 | 73,22,97,3.35,0.801543333,4.47,10.358725,6.28445,136.855 10 | 75,23,83,4.952,1.013839467,17.127,11.57899,7.0913,318.302 11 | 34,21.47,78,3.469,0.6674356,14.57,13.11,6.92,354.6 12 | 29,23.01,82,5.663,1.145436133,35.59,26.72,4.58,174.8 13 | 25,22.86,82,4.09,0.827270667,20.45,23.67,5.14,313.73 14 | 24,18.67,88,6.107,1.33,8.88,36.06,6.85,632.22 15 | 38,23.34,75,5.782,1.06967,15.26,17.95,9.35,165.02 16 | 44,20.76,86,7.553,1.6,14.09,20.32,7.64,63.61 17 | 47,22.03,84,2.869,0.59,26.65,38.04,3.32,191.72 18 | 61,32.03895937,85,18.077,3.790144333,30.7729,7.780255,13.68392,444.395 19 | 64,34.5297228,95,4.427,1.037393667,21.2117,5.46262,6.70188,252.449 20 | 32,36.51263743,87,14.026,3.0099796,49.3727,5.1,17.10223,588.46 21 | 36,28.57667585,86,4.345,0.921719333,15.1248,8.6,9.1539,534.224 22 | 34,31.97501487,87,4.53,0.972138,28.7502,7.64276,5.62592,572.783 23 | 29,32.27078777,84,5.81,1.203832,45.6196,6.209635,24.6033,904.981 24 | 35,30.27681661,84,4.376,0.9067072,39.2134,9.048185,16.43706,733.797 25 | 54,30.48315806,90,5.537,1.229214,12.331,9.73138,10.19299,1227.91 26 | 45,37.03560819,83,6.76,1.383997333,39.9802,4.617125,8.70448,586.173 27 | 50,38.57875854,106,6.703,1.752611067,46.6401,4.667645,11.78388,887.16 28 | 66,31.44654088,90,9.245,2.05239,45.9624,10.35526,23.3819,1102.11 29 | 35,35.2507611,90,6.817,1.513374,50.6094,6.966895,22.03703,667.928 30 | 36,34.17489,80,6.59,1.300426667,10.2809,5.065915,15.72187,581.313 31 | 66,36.21227888,101,15.533,3.869788067,74.7069,7.53955,22.32024,864.968 32 | 53,36.7901662,101,10.175,2.534931667,27.1841,20.03,10.26309,695.754 33 | 28,35.85581466,87,8.576,1.8404096,68.5102,4.7942,21.44366,358.624 34 | 43,34.42217362,89,23.194,5.091856133,31.2128,8.300955,6.71026,960.246 35 | 51,27.68877813,77,3.855,0.732193,20.092,3.19209,10.37518,473.859 36 | 67,29.60676726,79,5.819,1.133929133,21.9033,2.19428,4.2075,585.307 37 | 66,31.2385898,82,4.181,0.845676933,16.2247,4.267105,3.29175,634.602 38 | 69,35.09270153,101,5.646,1.4066068,83.4821,6.796985,82.1,263.499 39 | 60,26.34929208,103,5.138,1.305394533,24.2998,2.19428,20.2535,378.996 40 | 77,35.58792924,76,3.881,0.727558133,21.7863,8.12555,17.2615,618.272 41 | 76,29.2184076,83,5.376,1.1006464,28.562,7.36996,8.04375,698.789 42 | 76,27.2,94,14.07,3.262364,35.891,9.34663,8.4156,377.227 43 | 75,27.3,85,5.197,1.089637667,10.39,9.000805,7.5767,335.393 44 | 69,32.5,93,5.43,1.245642,15.145,11.78796,11.78796,270.142 45 | 71,30.3,102,8.34,2.098344,56.502,8.13,4.2989,200.976 46 | 66,27.7,90,6.042,1.341324,24.846,7.652055,6.7052,225.88 47 | 75,25.7,94,8.079,1.8732508,65.926,3.74122,4.49685,206.802 48 | 78,25.3,60,3.508,0.519184,6.633,10.567295,4.6638,209.749 49 | 69,29.4,89,10.704,2.3498848,45.272,8.2863,4.53,215.769 50 | 85,26.6,96,4.462,1.0566016,7.85,7.9317,9.6135,232.006 51 | 76,27.1,110,26.211,7.111918,21.778,4.935635,8.49395,45.843 52 | 77,25.9,85,4.58,0.960273333,13.74,9.75326,11.774,488.829 53 | 45,21.30394858,102,13.852,3.4851632,7.6476,21.056625,23.03408,552.444 54 | 45,20.82999519,74,4.56,0.832352,7.7529,8.237405,28.0323,382.955 55 | 49,20.9566075,94,12.305,2.853119333,11.2406,8.412175,23.1177,573.63 56 | 34,24.24242424,92,21.699,4.9242264,16.7353,21.823745,12.06534,481.949 57 | 42,21.35991456,93,2.999,0.6879706,19.0826,8.462915,17.37615,321.919 58 | 68,21.08281329,102,6.2,1.55992,9.6994,8.574655,13.74244,448.799 59 | 51,19.13265306,93,4.364,1.0011016,11.0816,5.80762,5.57055,90.6 60 | 62,22.65625,92,3.482,0.790181867,9.8648,11.236235,10.69548,703.973 61 | 38,22.4996371,95,5.261,1.232827667,8.438,4.77192,15.73606,199.055 62 | 69,21.51385851,112,6.683,1.846290133,32.58,4.138025,15.69876,713.239 63 | 49,21.36752137,78,2.64,0.507936,6.3339,3.886145,22.94254,737.672 64 | 51,22.89281998,103,2.74,0.696142667,8.0163,9.349775,11.55492,359.232 65 | 59,22.83287935,98,6.862,1.658774133,14.9037,4.230105,8.2049,355.31 66 | 45,23.14049587,116,4.902,1.4026256,17.9973,4.294705,5.2633,518.586 67 | 54,24.21875,86,3.73,0.791257333,8.6874,3.70523,10.34455,635.049 68 | 64,22.22222222,98,5.7,1.37788,12.1905,4.783985,13.91245,395.976 69 | 46,20.83,88,3.42,0.742368,12.87,18.55,13.56,301.21 70 | 44,19.56,114,15.89,4.468268,13.08,20.37,4.62,220.66 71 | 45,20.26,92,3.44,0.780650667,7.65,16.67,7.84,193.87 72 | 44,24.74,106,58.46,15.28534133,18.16,16.1,5.31,244.75 73 | 51,18.37,105,6.03,1.56177,9.62,12.76,3.21,513.66 74 | 72,23.62,105,4.42,1.14478,21.78,17.86,4.82,195.94 75 | 46,22.21,86,36.94,7.836205333,10.16,9.76,5.68,312 76 | 43,26.5625,101,10.555,2.629602333,9.8,6.420295,16.1,806.724 77 | 55,31.97501487,92,16.635,3.775036,37.2234,11.018455,7.16514,483.377 78 | 43,31.25,103,4.328,1.099600533,25.7816,12.71896,38.6531,775.322 79 | 86,26.66666667,201,41.611,20.6307338,47.647,5.357135,24.3701,1698.44 80 | 41,26.6727633,97,22.033,5.271762467,44.7059,13.494865,27.8325,783.796 81 | 59,28.67262608,77,3.188,0.605507467,17.022,16.44048,31.6904,910.489 82 | 81,31.64036818,100,9.669,2.38502,38.8066,10.636525,29.5583,426.175 83 | 48,32.46191136,99,28.677,7.0029234,46.076,21.57,10.15726,738.034 84 | 71,25.51020408,112,10.395,2.871792,19.0653,5.4861,42.7447,799.898 85 | 42,29.296875,98,4.172,1.008511467,12.2617,6.695585,53.6717,1041.843 86 | 65,29.666548,85,14.649,3.071407,26.5166,7.28287,19.46324,1698.44 87 | 48,28.125,90,2.54,0.56388,15.5325,10.22231,16.11032,1698.44 88 | 85,27.68877813,196,51.814,25.05034187,70.8824,7.901685,55.2153,1078.359 89 | 48,31.25,199,12.162,5.9699204,18.1314,4.104105,53.6308,1698.44 90 | 58,29.15451895,139,16.582,5.685415067,22.8884,10.26266,13.97399,923.886 91 | 40,30.83653053,128,41.894,13.22733227,31.0385,6.160995,17.55503,638.261 92 | 82,31.21748179,100,18.077,4.458993333,31.6453,9.92365,19.94687,994.316 93 | 52,30.8012487,87,30.212,6.4834952,29.2739,6.26854,24.24591,764.667 94 | 49,32.46191136,134,24.887,8.225983067,42.3914,10.79394,5.768,656.393 95 | 60,31.23140988,131,30.13,9.736007333,37.843,8.40443,11.50005,396.021 96 | 49,29.77777778,70,8.396,1.449709333,51.3387,10.73174,20.76801,602.486 97 | 44,27.88761707,99,9.208,2.2485936,12.6757,5.47817,23.03306,407.206 98 | 40,27.63605442,103,2.432,0.617890133,14.3224,6.78387,26.0136,293.123 99 | 71,27.91551882,104,18.2,4.668906667,53.4997,1.65602,49.24184,256.001 100 | 69,28.44444444,108,8.808,2.3464512,14.7485,5.288025,16.48508,353.568 101 | 74,28.65013774,88,3.012,0.6538048,31.1233,7.65222,18.35574,572.401 102 | 66,26.5625,89,6.524,1.432235467,14.9084,8.42996,14.91922,269.487 103 | 65,30.91557669,97,10.491,2.5101466,44.0217,3.71009,20.4685,396.648 104 | 72,29.13631634,83,10.949,2.241625267,26.8081,2.78491,14.76966,232.018 105 | 57,34.83814777,95,12.548,2.940414667,33.1612,2.36495,9.9542,655.834 106 | 73,37.109375,134,5.636,1.862885867,41.4064,3.335665,6.89235,788.902 107 | 45,29.38475666,90,4.713,1.046286,23.8479,6.644245,15.55625,621.273 108 | 46,33.18,92,5.75,1.304866667,18.69,9.16,8.89,209.19 109 | 68,35.56,131,8.15,2.633536667,17.87,11.9,4.19,198.4 110 | 75,30.48,152,7.01,2.628282667,50.53,10.06,11.73,99.45 111 | 54,36.05,119,11.91,3.495982,89.27,8.01,5.06,218.28 112 | 45,26.85,92,3.33,0.755688,54.68,12.1,10.96,268.23 113 | 62,26.84,100,4.53,1.1174,12.45,21.42,7.32,330.16 114 | 65,32.05,97,5.73,1.370998,61.48,22.54,10.33,314.05 115 | 72,25.59,82,2.82,0.570392,24.96,33.75,3.27,392.46 116 | 86,27.18,138,19.91,6.777364,90.28,14.11,4.35,90.09 117 | -------------------------------------------------------------------------------- /Data Sets/label_australian.csv: -------------------------------------------------------------------------------- 1 | 1.000000000000000000e+00 2 | 1.000000000000000000e+00 3 | 1.000000000000000000e+00 4 | 1.000000000000000000e+00 5 | 1.000000000000000000e+00 6 | 1.000000000000000000e+00 7 | 1.000000000000000000e+00 8 | 1.000000000000000000e+00 9 | 1.000000000000000000e+00 10 | 1.000000000000000000e+00 11 | 1.000000000000000000e+00 12 | 1.000000000000000000e+00 13 | 1.000000000000000000e+00 14 | 1.000000000000000000e+00 15 | 1.000000000000000000e+00 16 | 1.000000000000000000e+00 17 | 1.000000000000000000e+00 18 | 1.000000000000000000e+00 19 | 1.000000000000000000e+00 20 | 1.000000000000000000e+00 21 | 1.000000000000000000e+00 22 | 1.000000000000000000e+00 23 | 1.000000000000000000e+00 24 | 1.000000000000000000e+00 25 | 1.000000000000000000e+00 26 | 1.000000000000000000e+00 27 | 1.000000000000000000e+00 28 | 1.000000000000000000e+00 29 | 1.000000000000000000e+00 30 | 1.000000000000000000e+00 31 | 1.000000000000000000e+00 32 | 1.000000000000000000e+00 33 | 1.000000000000000000e+00 34 | 1.000000000000000000e+00 35 | 1.000000000000000000e+00 36 | 1.000000000000000000e+00 37 | 1.000000000000000000e+00 38 | 1.000000000000000000e+00 39 | 1.000000000000000000e+00 40 | 1.000000000000000000e+00 41 | 1.000000000000000000e+00 42 | 1.000000000000000000e+00 43 | 1.000000000000000000e+00 44 | 1.000000000000000000e+00 45 | 1.000000000000000000e+00 46 | 1.000000000000000000e+00 47 | 1.000000000000000000e+00 48 | 1.000000000000000000e+00 49 | 1.000000000000000000e+00 50 | 1.000000000000000000e+00 51 | 1.000000000000000000e+00 52 | 1.000000000000000000e+00 53 | 1.000000000000000000e+00 54 | 1.000000000000000000e+00 55 | 1.000000000000000000e+00 56 | 1.000000000000000000e+00 57 | 1.000000000000000000e+00 58 | 1.000000000000000000e+00 59 | 1.000000000000000000e+00 60 | 1.000000000000000000e+00 61 | 1.000000000000000000e+00 62 | 1.000000000000000000e+00 63 | 1.000000000000000000e+00 64 | 1.000000000000000000e+00 65 | 1.000000000000000000e+00 66 | 1.000000000000000000e+00 67 | 1.000000000000000000e+00 68 | 1.000000000000000000e+00 69 | 1.000000000000000000e+00 70 | 1.000000000000000000e+00 71 | 1.000000000000000000e+00 72 | 1.000000000000000000e+00 73 | 1.000000000000000000e+00 74 | 1.000000000000000000e+00 75 | 1.000000000000000000e+00 76 | 1.000000000000000000e+00 77 | 1.000000000000000000e+00 78 | 1.000000000000000000e+00 79 | 1.000000000000000000e+00 80 | 1.000000000000000000e+00 81 | 1.000000000000000000e+00 82 | 1.000000000000000000e+00 83 | 1.000000000000000000e+00 84 | 1.000000000000000000e+00 85 | 1.000000000000000000e+00 86 | 1.000000000000000000e+00 87 | 1.000000000000000000e+00 88 | 1.000000000000000000e+00 89 | 1.000000000000000000e+00 90 | 1.000000000000000000e+00 91 | 1.000000000000000000e+00 92 | 1.000000000000000000e+00 93 | 1.000000000000000000e+00 94 | 1.000000000000000000e+00 95 | 1.000000000000000000e+00 96 | 1.000000000000000000e+00 97 | 1.000000000000000000e+00 98 | 1.000000000000000000e+00 99 | 1.000000000000000000e+00 100 | 1.000000000000000000e+00 101 | 1.000000000000000000e+00 102 | 1.000000000000000000e+00 103 | 1.000000000000000000e+00 104 | 1.000000000000000000e+00 105 | 1.000000000000000000e+00 106 | 1.000000000000000000e+00 107 | 1.000000000000000000e+00 108 | 1.000000000000000000e+00 109 | 1.000000000000000000e+00 110 | 1.000000000000000000e+00 111 | 1.000000000000000000e+00 112 | 1.000000000000000000e+00 113 | 1.000000000000000000e+00 114 | 1.000000000000000000e+00 115 | 1.000000000000000000e+00 116 | 1.000000000000000000e+00 117 | 1.000000000000000000e+00 118 | 1.000000000000000000e+00 119 | 1.000000000000000000e+00 120 | 1.000000000000000000e+00 121 | 1.000000000000000000e+00 122 | 1.000000000000000000e+00 123 | 1.000000000000000000e+00 124 | 1.000000000000000000e+00 125 | 1.000000000000000000e+00 126 | 1.000000000000000000e+00 127 | 1.000000000000000000e+00 128 | 1.000000000000000000e+00 129 | 1.000000000000000000e+00 130 | 1.000000000000000000e+00 131 | 1.000000000000000000e+00 132 | 1.000000000000000000e+00 133 | 1.000000000000000000e+00 134 | 1.000000000000000000e+00 135 | 1.000000000000000000e+00 136 | 1.000000000000000000e+00 137 | 1.000000000000000000e+00 138 | 1.000000000000000000e+00 139 | 1.000000000000000000e+00 140 | 1.000000000000000000e+00 141 | 1.000000000000000000e+00 142 | 1.000000000000000000e+00 143 | 1.000000000000000000e+00 144 | 1.000000000000000000e+00 145 | 1.000000000000000000e+00 146 | 1.000000000000000000e+00 147 | 1.000000000000000000e+00 148 | 1.000000000000000000e+00 149 | 1.000000000000000000e+00 150 | 1.000000000000000000e+00 151 | 1.000000000000000000e+00 152 | 1.000000000000000000e+00 153 | 1.000000000000000000e+00 154 | 1.000000000000000000e+00 155 | 1.000000000000000000e+00 156 | 1.000000000000000000e+00 157 | 1.000000000000000000e+00 158 | 1.000000000000000000e+00 159 | 1.000000000000000000e+00 160 | 1.000000000000000000e+00 161 | 1.000000000000000000e+00 162 | 1.000000000000000000e+00 163 | 1.000000000000000000e+00 164 | 1.000000000000000000e+00 165 | 1.000000000000000000e+00 166 | 1.000000000000000000e+00 167 | 1.000000000000000000e+00 168 | 1.000000000000000000e+00 169 | 1.000000000000000000e+00 170 | 1.000000000000000000e+00 171 | 1.000000000000000000e+00 172 | 1.000000000000000000e+00 173 | 1.000000000000000000e+00 174 | 1.000000000000000000e+00 175 | 1.000000000000000000e+00 176 | 1.000000000000000000e+00 177 | 1.000000000000000000e+00 178 | 1.000000000000000000e+00 179 | 1.000000000000000000e+00 180 | 1.000000000000000000e+00 181 | 1.000000000000000000e+00 182 | 1.000000000000000000e+00 183 | 1.000000000000000000e+00 184 | 1.000000000000000000e+00 185 | 1.000000000000000000e+00 186 | 1.000000000000000000e+00 187 | 1.000000000000000000e+00 188 | 1.000000000000000000e+00 189 | 1.000000000000000000e+00 190 | 1.000000000000000000e+00 191 | 1.000000000000000000e+00 192 | 1.000000000000000000e+00 193 | 1.000000000000000000e+00 194 | 1.000000000000000000e+00 195 | 1.000000000000000000e+00 196 | 1.000000000000000000e+00 197 | 1.000000000000000000e+00 198 | 1.000000000000000000e+00 199 | 1.000000000000000000e+00 200 | 1.000000000000000000e+00 201 | 1.000000000000000000e+00 202 | 1.000000000000000000e+00 203 | 1.000000000000000000e+00 204 | 1.000000000000000000e+00 205 | 1.000000000000000000e+00 206 | 1.000000000000000000e+00 207 | 1.000000000000000000e+00 208 | 1.000000000000000000e+00 209 | 1.000000000000000000e+00 210 | 1.000000000000000000e+00 211 | 1.000000000000000000e+00 212 | 1.000000000000000000e+00 213 | 1.000000000000000000e+00 214 | 1.000000000000000000e+00 215 | 1.000000000000000000e+00 216 | 1.000000000000000000e+00 217 | 1.000000000000000000e+00 218 | 1.000000000000000000e+00 219 | 1.000000000000000000e+00 220 | 1.000000000000000000e+00 221 | 1.000000000000000000e+00 222 | 1.000000000000000000e+00 223 | 1.000000000000000000e+00 224 | 1.000000000000000000e+00 225 | 1.000000000000000000e+00 226 | 1.000000000000000000e+00 227 | 1.000000000000000000e+00 228 | 1.000000000000000000e+00 229 | 1.000000000000000000e+00 230 | 1.000000000000000000e+00 231 | 1.000000000000000000e+00 232 | 1.000000000000000000e+00 233 | 1.000000000000000000e+00 234 | 1.000000000000000000e+00 235 | 1.000000000000000000e+00 236 | 1.000000000000000000e+00 237 | 1.000000000000000000e+00 238 | 1.000000000000000000e+00 239 | 1.000000000000000000e+00 240 | 1.000000000000000000e+00 241 | 1.000000000000000000e+00 242 | 1.000000000000000000e+00 243 | 1.000000000000000000e+00 244 | 1.000000000000000000e+00 245 | 1.000000000000000000e+00 246 | 1.000000000000000000e+00 247 | 1.000000000000000000e+00 248 | 1.000000000000000000e+00 249 | 1.000000000000000000e+00 250 | 1.000000000000000000e+00 251 | 1.000000000000000000e+00 252 | 1.000000000000000000e+00 253 | 1.000000000000000000e+00 254 | 1.000000000000000000e+00 255 | 1.000000000000000000e+00 256 | 1.000000000000000000e+00 257 | 1.000000000000000000e+00 258 | 1.000000000000000000e+00 259 | 1.000000000000000000e+00 260 | 1.000000000000000000e+00 261 | 1.000000000000000000e+00 262 | 1.000000000000000000e+00 263 | 1.000000000000000000e+00 264 | 1.000000000000000000e+00 265 | 1.000000000000000000e+00 266 | 1.000000000000000000e+00 267 | 1.000000000000000000e+00 268 | 1.000000000000000000e+00 269 | 1.000000000000000000e+00 270 | 1.000000000000000000e+00 271 | 1.000000000000000000e+00 272 | 1.000000000000000000e+00 273 | 1.000000000000000000e+00 274 | 1.000000000000000000e+00 275 | 1.000000000000000000e+00 276 | 1.000000000000000000e+00 277 | 1.000000000000000000e+00 278 | 1.000000000000000000e+00 279 | 1.000000000000000000e+00 280 | 1.000000000000000000e+00 281 | 1.000000000000000000e+00 282 | 1.000000000000000000e+00 283 | 1.000000000000000000e+00 284 | 1.000000000000000000e+00 285 | 1.000000000000000000e+00 286 | 1.000000000000000000e+00 287 | 1.000000000000000000e+00 288 | 1.000000000000000000e+00 289 | 1.000000000000000000e+00 290 | 1.000000000000000000e+00 291 | 1.000000000000000000e+00 292 | 1.000000000000000000e+00 293 | 1.000000000000000000e+00 294 | 1.000000000000000000e+00 295 | 1.000000000000000000e+00 296 | 1.000000000000000000e+00 297 | 1.000000000000000000e+00 298 | 1.000000000000000000e+00 299 | 1.000000000000000000e+00 300 | 1.000000000000000000e+00 301 | 1.000000000000000000e+00 302 | 1.000000000000000000e+00 303 | 1.000000000000000000e+00 304 | 1.000000000000000000e+00 305 | 1.000000000000000000e+00 306 | 1.000000000000000000e+00 307 | 1.000000000000000000e+00 308 | 1.000000000000000000e+00 309 | 1.000000000000000000e+00 310 | 1.000000000000000000e+00 311 | 1.000000000000000000e+00 312 | 1.000000000000000000e+00 313 | 1.000000000000000000e+00 314 | 1.000000000000000000e+00 315 | 1.000000000000000000e+00 316 | 1.000000000000000000e+00 317 | 1.000000000000000000e+00 318 | 1.000000000000000000e+00 319 | 1.000000000000000000e+00 320 | 1.000000000000000000e+00 321 | 1.000000000000000000e+00 322 | 1.000000000000000000e+00 323 | 1.000000000000000000e+00 324 | 1.000000000000000000e+00 325 | 1.000000000000000000e+00 326 | 1.000000000000000000e+00 327 | 1.000000000000000000e+00 328 | 1.000000000000000000e+00 329 | 1.000000000000000000e+00 330 | 1.000000000000000000e+00 331 | 1.000000000000000000e+00 332 | 1.000000000000000000e+00 333 | 1.000000000000000000e+00 334 | 1.000000000000000000e+00 335 | 1.000000000000000000e+00 336 | 1.000000000000000000e+00 337 | 1.000000000000000000e+00 338 | 1.000000000000000000e+00 339 | 1.000000000000000000e+00 340 | 1.000000000000000000e+00 341 | 1.000000000000000000e+00 342 | 1.000000000000000000e+00 343 | 1.000000000000000000e+00 344 | 1.000000000000000000e+00 345 | 1.000000000000000000e+00 346 | 1.000000000000000000e+00 347 | 1.000000000000000000e+00 348 | 1.000000000000000000e+00 349 | 1.000000000000000000e+00 350 | 1.000000000000000000e+00 351 | 1.000000000000000000e+00 352 | 1.000000000000000000e+00 353 | 1.000000000000000000e+00 354 | 1.000000000000000000e+00 355 | 1.000000000000000000e+00 356 | 1.000000000000000000e+00 357 | 1.000000000000000000e+00 358 | 1.000000000000000000e+00 359 | 1.000000000000000000e+00 360 | 1.000000000000000000e+00 361 | 1.000000000000000000e+00 362 | 1.000000000000000000e+00 363 | 1.000000000000000000e+00 364 | 1.000000000000000000e+00 365 | 1.000000000000000000e+00 366 | 1.000000000000000000e+00 367 | 1.000000000000000000e+00 368 | 1.000000000000000000e+00 369 | 1.000000000000000000e+00 370 | 1.000000000000000000e+00 371 | 1.000000000000000000e+00 372 | 1.000000000000000000e+00 373 | 1.000000000000000000e+00 374 | 1.000000000000000000e+00 375 | 1.000000000000000000e+00 376 | 1.000000000000000000e+00 377 | 1.000000000000000000e+00 378 | 1.000000000000000000e+00 379 | 1.000000000000000000e+00 380 | 1.000000000000000000e+00 381 | 1.000000000000000000e+00 382 | 1.000000000000000000e+00 383 | 1.000000000000000000e+00 384 | 1.000000000000000000e+00 385 | 1.000000000000000000e+00 386 | 1.000000000000000000e+00 387 | 1.000000000000000000e+00 388 | 1.000000000000000000e+00 389 | 1.000000000000000000e+00 390 | 1.000000000000000000e+00 391 | 1.000000000000000000e+00 392 | 1.000000000000000000e+00 393 | 1.000000000000000000e+00 394 | 1.000000000000000000e+00 395 | 1.000000000000000000e+00 396 | 1.000000000000000000e+00 397 | 1.000000000000000000e+00 398 | 1.000000000000000000e+00 399 | 1.000000000000000000e+00 400 | 1.000000000000000000e+00 401 | 1.000000000000000000e+00 402 | 1.000000000000000000e+00 403 | 1.000000000000000000e+00 404 | 1.000000000000000000e+00 405 | 1.000000000000000000e+00 406 | 1.000000000000000000e+00 407 | 1.000000000000000000e+00 408 | 1.000000000000000000e+00 409 | 1.000000000000000000e+00 410 | 1.000000000000000000e+00 411 | 1.000000000000000000e+00 412 | 1.000000000000000000e+00 413 | 1.000000000000000000e+00 414 | 1.000000000000000000e+00 415 | 1.000000000000000000e+00 416 | 1.000000000000000000e+00 417 | 1.000000000000000000e+00 418 | 1.000000000000000000e+00 419 | 1.000000000000000000e+00 420 | 1.000000000000000000e+00 421 | 1.000000000000000000e+00 422 | 1.000000000000000000e+00 423 | 1.000000000000000000e+00 424 | 1.000000000000000000e+00 425 | 1.000000000000000000e+00 426 | 1.000000000000000000e+00 427 | 1.000000000000000000e+00 428 | 1.000000000000000000e+00 429 | 1.000000000000000000e+00 430 | 1.000000000000000000e+00 431 | 1.000000000000000000e+00 432 | 1.000000000000000000e+00 433 | 1.000000000000000000e+00 434 | 1.000000000000000000e+00 435 | 1.000000000000000000e+00 436 | 1.000000000000000000e+00 437 | 1.000000000000000000e+00 438 | 1.000000000000000000e+00 439 | 1.000000000000000000e+00 440 | 1.000000000000000000e+00 441 | 1.000000000000000000e+00 442 | 1.000000000000000000e+00 443 | 1.000000000000000000e+00 444 | 1.000000000000000000e+00 445 | 1.000000000000000000e+00 446 | 1.000000000000000000e+00 447 | 1.000000000000000000e+00 448 | 1.000000000000000000e+00 449 | 1.000000000000000000e+00 450 | 1.000000000000000000e+00 451 | 1.000000000000000000e+00 452 | 1.000000000000000000e+00 453 | 1.000000000000000000e+00 454 | 1.000000000000000000e+00 455 | 1.000000000000000000e+00 456 | 1.000000000000000000e+00 457 | 1.000000000000000000e+00 458 | 1.000000000000000000e+00 459 | 1.000000000000000000e+00 460 | 1.000000000000000000e+00 461 | 1.000000000000000000e+00 462 | 1.000000000000000000e+00 463 | 1.000000000000000000e+00 464 | 1.000000000000000000e+00 465 | 1.000000000000000000e+00 466 | 1.000000000000000000e+00 467 | 1.000000000000000000e+00 468 | 1.000000000000000000e+00 469 | 1.000000000000000000e+00 470 | 1.000000000000000000e+00 471 | 1.000000000000000000e+00 472 | 1.000000000000000000e+00 473 | 1.000000000000000000e+00 474 | 1.000000000000000000e+00 475 | 1.000000000000000000e+00 476 | 1.000000000000000000e+00 477 | 1.000000000000000000e+00 478 | 1.000000000000000000e+00 479 | 1.000000000000000000e+00 480 | 1.000000000000000000e+00 481 | 1.000000000000000000e+00 482 | 1.000000000000000000e+00 483 | 1.000000000000000000e+00 484 | 1.000000000000000000e+00 485 | 1.000000000000000000e+00 486 | 1.000000000000000000e+00 487 | 1.000000000000000000e+00 488 | 1.000000000000000000e+00 489 | 1.000000000000000000e+00 490 | 1.000000000000000000e+00 491 | 1.000000000000000000e+00 492 | 1.000000000000000000e+00 493 | 1.000000000000000000e+00 494 | 1.000000000000000000e+00 495 | 1.000000000000000000e+00 496 | 1.000000000000000000e+00 497 | 1.000000000000000000e+00 498 | 1.000000000000000000e+00 499 | 1.000000000000000000e+00 500 | 1.000000000000000000e+00 501 | 1.000000000000000000e+00 502 | 1.000000000000000000e+00 503 | 1.000000000000000000e+00 504 | 1.000000000000000000e+00 505 | 1.000000000000000000e+00 506 | 1.000000000000000000e+00 507 | 1.000000000000000000e+00 508 | 1.000000000000000000e+00 509 | 1.000000000000000000e+00 510 | 1.000000000000000000e+00 511 | 1.000000000000000000e+00 512 | 1.000000000000000000e+00 513 | 1.000000000000000000e+00 514 | 1.000000000000000000e+00 515 | 1.000000000000000000e+00 516 | 1.000000000000000000e+00 517 | 1.000000000000000000e+00 518 | 1.000000000000000000e+00 519 | 1.000000000000000000e+00 520 | 1.000000000000000000e+00 521 | 1.000000000000000000e+00 522 | 1.000000000000000000e+00 523 | 1.000000000000000000e+00 524 | 1.000000000000000000e+00 525 | 1.000000000000000000e+00 526 | 1.000000000000000000e+00 527 | 1.000000000000000000e+00 528 | 1.000000000000000000e+00 529 | 1.000000000000000000e+00 530 | 1.000000000000000000e+00 531 | 1.000000000000000000e+00 532 | 1.000000000000000000e+00 533 | 1.000000000000000000e+00 534 | 1.000000000000000000e+00 535 | 1.000000000000000000e+00 536 | 1.000000000000000000e+00 537 | 1.000000000000000000e+00 538 | 1.000000000000000000e+00 539 | 1.000000000000000000e+00 540 | 1.000000000000000000e+00 541 | 1.000000000000000000e+00 542 | 1.000000000000000000e+00 543 | 1.000000000000000000e+00 544 | 1.000000000000000000e+00 545 | 1.000000000000000000e+00 546 | 1.000000000000000000e+00 547 | 1.000000000000000000e+00 548 | 1.000000000000000000e+00 549 | 1.000000000000000000e+00 550 | 1.000000000000000000e+00 551 | 1.000000000000000000e+00 552 | 1.000000000000000000e+00 553 | 1.000000000000000000e+00 554 | 1.000000000000000000e+00 555 | 1.000000000000000000e+00 556 | 1.000000000000000000e+00 557 | 1.000000000000000000e+00 558 | 1.000000000000000000e+00 559 | 1.000000000000000000e+00 560 | 1.000000000000000000e+00 561 | 1.000000000000000000e+00 562 | 1.000000000000000000e+00 563 | 1.000000000000000000e+00 564 | 1.000000000000000000e+00 565 | 1.000000000000000000e+00 566 | 1.000000000000000000e+00 567 | 1.000000000000000000e+00 568 | 1.000000000000000000e+00 569 | 1.000000000000000000e+00 570 | 1.000000000000000000e+00 571 | 1.000000000000000000e+00 572 | 1.000000000000000000e+00 573 | 1.000000000000000000e+00 574 | 1.000000000000000000e+00 575 | 1.000000000000000000e+00 576 | 1.000000000000000000e+00 577 | 1.000000000000000000e+00 578 | 1.000000000000000000e+00 579 | 1.000000000000000000e+00 580 | 1.000000000000000000e+00 581 | 1.000000000000000000e+00 582 | 1.000000000000000000e+00 583 | 1.000000000000000000e+00 584 | 1.000000000000000000e+00 585 | 1.000000000000000000e+00 586 | 1.000000000000000000e+00 587 | 1.000000000000000000e+00 588 | 1.000000000000000000e+00 589 | 1.000000000000000000e+00 590 | 1.000000000000000000e+00 591 | 1.000000000000000000e+00 592 | 1.000000000000000000e+00 593 | 1.000000000000000000e+00 594 | 1.000000000000000000e+00 595 | 1.000000000000000000e+00 596 | 1.000000000000000000e+00 597 | 1.000000000000000000e+00 598 | 1.000000000000000000e+00 599 | 1.000000000000000000e+00 600 | 1.000000000000000000e+00 601 | 1.000000000000000000e+00 602 | 1.000000000000000000e+00 603 | 1.000000000000000000e+00 604 | 1.000000000000000000e+00 605 | 1.000000000000000000e+00 606 | 1.000000000000000000e+00 607 | 1.000000000000000000e+00 608 | 1.000000000000000000e+00 609 | 1.000000000000000000e+00 610 | 1.000000000000000000e+00 611 | 1.000000000000000000e+00 612 | 1.000000000000000000e+00 613 | 1.000000000000000000e+00 614 | 1.000000000000000000e+00 615 | 0.000000000000000000e+00 616 | 0.000000000000000000e+00 617 | 0.000000000000000000e+00 618 | 0.000000000000000000e+00 619 | 0.000000000000000000e+00 620 | 0.000000000000000000e+00 621 | 0.000000000000000000e+00 622 | 0.000000000000000000e+00 623 | 0.000000000000000000e+00 624 | 0.000000000000000000e+00 625 | 0.000000000000000000e+00 626 | 0.000000000000000000e+00 627 | 0.000000000000000000e+00 628 | 0.000000000000000000e+00 629 | 0.000000000000000000e+00 630 | 0.000000000000000000e+00 631 | 0.000000000000000000e+00 632 | 0.000000000000000000e+00 633 | 0.000000000000000000e+00 634 | 0.000000000000000000e+00 635 | 0.000000000000000000e+00 636 | 0.000000000000000000e+00 637 | 0.000000000000000000e+00 638 | 0.000000000000000000e+00 639 | 0.000000000000000000e+00 640 | 0.000000000000000000e+00 641 | 0.000000000000000000e+00 642 | 0.000000000000000000e+00 643 | 0.000000000000000000e+00 644 | 0.000000000000000000e+00 645 | 0.000000000000000000e+00 646 | 0.000000000000000000e+00 647 | 0.000000000000000000e+00 648 | 0.000000000000000000e+00 649 | 0.000000000000000000e+00 650 | 0.000000000000000000e+00 651 | 0.000000000000000000e+00 652 | 0.000000000000000000e+00 653 | 0.000000000000000000e+00 654 | 0.000000000000000000e+00 655 | 0.000000000000000000e+00 656 | 0.000000000000000000e+00 657 | 0.000000000000000000e+00 658 | 0.000000000000000000e+00 659 | 0.000000000000000000e+00 660 | 0.000000000000000000e+00 661 | 0.000000000000000000e+00 662 | 0.000000000000000000e+00 663 | 0.000000000000000000e+00 664 | 0.000000000000000000e+00 665 | 0.000000000000000000e+00 666 | 0.000000000000000000e+00 667 | 0.000000000000000000e+00 668 | 0.000000000000000000e+00 669 | 0.000000000000000000e+00 670 | 0.000000000000000000e+00 671 | 0.000000000000000000e+00 672 | 0.000000000000000000e+00 673 | 0.000000000000000000e+00 674 | 0.000000000000000000e+00 675 | 0.000000000000000000e+00 676 | 0.000000000000000000e+00 677 | 0.000000000000000000e+00 678 | 0.000000000000000000e+00 679 | 0.000000000000000000e+00 680 | 0.000000000000000000e+00 681 | 0.000000000000000000e+00 682 | 0.000000000000000000e+00 683 | 0.000000000000000000e+00 684 | 0.000000000000000000e+00 685 | 0.000000000000000000e+00 686 | 0.000000000000000000e+00 687 | 0.000000000000000000e+00 688 | 0.000000000000000000e+00 689 | 0.000000000000000000e+00 690 | 0.000000000000000000e+00 691 | 0.000000000000000000e+00 692 | 0.000000000000000000e+00 693 | 0.000000000000000000e+00 694 | 0.000000000000000000e+00 695 | 0.000000000000000000e+00 696 | 0.000000000000000000e+00 697 | 0.000000000000000000e+00 698 | 0.000000000000000000e+00 699 | 0.000000000000000000e+00 700 | 0.000000000000000000e+00 701 | 0.000000000000000000e+00 702 | 0.000000000000000000e+00 703 | 0.000000000000000000e+00 704 | 0.000000000000000000e+00 705 | 0.000000000000000000e+00 706 | 0.000000000000000000e+00 707 | 0.000000000000000000e+00 708 | 0.000000000000000000e+00 709 | 0.000000000000000000e+00 710 | 0.000000000000000000e+00 711 | 0.000000000000000000e+00 712 | 0.000000000000000000e+00 713 | 0.000000000000000000e+00 714 | 0.000000000000000000e+00 715 | 0.000000000000000000e+00 716 | 0.000000000000000000e+00 717 | 0.000000000000000000e+00 718 | 0.000000000000000000e+00 719 | 0.000000000000000000e+00 720 | 0.000000000000000000e+00 721 | 0.000000000000000000e+00 722 | 0.000000000000000000e+00 723 | 0.000000000000000000e+00 724 | 0.000000000000000000e+00 725 | 0.000000000000000000e+00 726 | 0.000000000000000000e+00 727 | 0.000000000000000000e+00 728 | 0.000000000000000000e+00 729 | 0.000000000000000000e+00 730 | 0.000000000000000000e+00 731 | 0.000000000000000000e+00 732 | 0.000000000000000000e+00 733 | 0.000000000000000000e+00 734 | 0.000000000000000000e+00 735 | 0.000000000000000000e+00 736 | 0.000000000000000000e+00 737 | 0.000000000000000000e+00 738 | 0.000000000000000000e+00 739 | 0.000000000000000000e+00 740 | 0.000000000000000000e+00 741 | 0.000000000000000000e+00 742 | 0.000000000000000000e+00 743 | 0.000000000000000000e+00 744 | 0.000000000000000000e+00 745 | 0.000000000000000000e+00 746 | 0.000000000000000000e+00 747 | 0.000000000000000000e+00 748 | 0.000000000000000000e+00 749 | 0.000000000000000000e+00 750 | 0.000000000000000000e+00 751 | 0.000000000000000000e+00 752 | 0.000000000000000000e+00 753 | 0.000000000000000000e+00 754 | 0.000000000000000000e+00 755 | 0.000000000000000000e+00 756 | 0.000000000000000000e+00 757 | 0.000000000000000000e+00 758 | 0.000000000000000000e+00 759 | 0.000000000000000000e+00 760 | 0.000000000000000000e+00 761 | 0.000000000000000000e+00 762 | 0.000000000000000000e+00 763 | 0.000000000000000000e+00 764 | 0.000000000000000000e+00 765 | 0.000000000000000000e+00 766 | 0.000000000000000000e+00 767 | 0.000000000000000000e+00 768 | 0.000000000000000000e+00 769 | 0.000000000000000000e+00 770 | 0.000000000000000000e+00 771 | 0.000000000000000000e+00 772 | 0.000000000000000000e+00 773 | 0.000000000000000000e+00 774 | 0.000000000000000000e+00 775 | 0.000000000000000000e+00 776 | 0.000000000000000000e+00 777 | 0.000000000000000000e+00 778 | 0.000000000000000000e+00 779 | 0.000000000000000000e+00 780 | 0.000000000000000000e+00 781 | 0.000000000000000000e+00 782 | 0.000000000000000000e+00 783 | 0.000000000000000000e+00 784 | 0.000000000000000000e+00 785 | 0.000000000000000000e+00 786 | 0.000000000000000000e+00 787 | 0.000000000000000000e+00 788 | 0.000000000000000000e+00 789 | 0.000000000000000000e+00 790 | 0.000000000000000000e+00 791 | 0.000000000000000000e+00 792 | 0.000000000000000000e+00 793 | 0.000000000000000000e+00 794 | 0.000000000000000000e+00 795 | 0.000000000000000000e+00 796 | 0.000000000000000000e+00 797 | 0.000000000000000000e+00 798 | 0.000000000000000000e+00 799 | 0.000000000000000000e+00 800 | 0.000000000000000000e+00 801 | 0.000000000000000000e+00 802 | 0.000000000000000000e+00 803 | 0.000000000000000000e+00 804 | 0.000000000000000000e+00 805 | 0.000000000000000000e+00 806 | 0.000000000000000000e+00 807 | 0.000000000000000000e+00 808 | 0.000000000000000000e+00 809 | 0.000000000000000000e+00 810 | 0.000000000000000000e+00 811 | 0.000000000000000000e+00 812 | 0.000000000000000000e+00 813 | 0.000000000000000000e+00 814 | 0.000000000000000000e+00 815 | 0.000000000000000000e+00 816 | 0.000000000000000000e+00 817 | 0.000000000000000000e+00 818 | 0.000000000000000000e+00 819 | 0.000000000000000000e+00 820 | 0.000000000000000000e+00 821 | 0.000000000000000000e+00 822 | 0.000000000000000000e+00 823 | 0.000000000000000000e+00 824 | 0.000000000000000000e+00 825 | 0.000000000000000000e+00 826 | 0.000000000000000000e+00 827 | 0.000000000000000000e+00 828 | 0.000000000000000000e+00 829 | 0.000000000000000000e+00 830 | 0.000000000000000000e+00 831 | 0.000000000000000000e+00 832 | 0.000000000000000000e+00 833 | 0.000000000000000000e+00 834 | 0.000000000000000000e+00 835 | 0.000000000000000000e+00 836 | 0.000000000000000000e+00 837 | 0.000000000000000000e+00 838 | 0.000000000000000000e+00 839 | 0.000000000000000000e+00 840 | 0.000000000000000000e+00 841 | 0.000000000000000000e+00 842 | 0.000000000000000000e+00 843 | 0.000000000000000000e+00 844 | 0.000000000000000000e+00 845 | 0.000000000000000000e+00 846 | 0.000000000000000000e+00 847 | 0.000000000000000000e+00 848 | 0.000000000000000000e+00 849 | 0.000000000000000000e+00 850 | 0.000000000000000000e+00 851 | 0.000000000000000000e+00 852 | 0.000000000000000000e+00 853 | 0.000000000000000000e+00 854 | 0.000000000000000000e+00 855 | 0.000000000000000000e+00 856 | 0.000000000000000000e+00 857 | 0.000000000000000000e+00 858 | 0.000000000000000000e+00 859 | 0.000000000000000000e+00 860 | 0.000000000000000000e+00 861 | 0.000000000000000000e+00 862 | 0.000000000000000000e+00 863 | 0.000000000000000000e+00 864 | 0.000000000000000000e+00 865 | 0.000000000000000000e+00 866 | 0.000000000000000000e+00 867 | 0.000000000000000000e+00 868 | 0.000000000000000000e+00 869 | 0.000000000000000000e+00 870 | 0.000000000000000000e+00 871 | 0.000000000000000000e+00 872 | 0.000000000000000000e+00 873 | 0.000000000000000000e+00 874 | 0.000000000000000000e+00 875 | 0.000000000000000000e+00 876 | 0.000000000000000000e+00 877 | 0.000000000000000000e+00 878 | 0.000000000000000000e+00 879 | 0.000000000000000000e+00 880 | 0.000000000000000000e+00 881 | 0.000000000000000000e+00 882 | 0.000000000000000000e+00 883 | 0.000000000000000000e+00 884 | 0.000000000000000000e+00 885 | 0.000000000000000000e+00 886 | 0.000000000000000000e+00 887 | 0.000000000000000000e+00 888 | 0.000000000000000000e+00 889 | 0.000000000000000000e+00 890 | 0.000000000000000000e+00 891 | 0.000000000000000000e+00 892 | 0.000000000000000000e+00 893 | 0.000000000000000000e+00 894 | 0.000000000000000000e+00 895 | 0.000000000000000000e+00 896 | 0.000000000000000000e+00 897 | 0.000000000000000000e+00 898 | 0.000000000000000000e+00 899 | 0.000000000000000000e+00 900 | 0.000000000000000000e+00 901 | 0.000000000000000000e+00 902 | 0.000000000000000000e+00 903 | 0.000000000000000000e+00 904 | 0.000000000000000000e+00 905 | 0.000000000000000000e+00 906 | 0.000000000000000000e+00 907 | 0.000000000000000000e+00 908 | 0.000000000000000000e+00 909 | 0.000000000000000000e+00 910 | 0.000000000000000000e+00 911 | 0.000000000000000000e+00 912 | 0.000000000000000000e+00 913 | 0.000000000000000000e+00 914 | 0.000000000000000000e+00 915 | 0.000000000000000000e+00 916 | 0.000000000000000000e+00 917 | 0.000000000000000000e+00 918 | 0.000000000000000000e+00 919 | 0.000000000000000000e+00 920 | 0.000000000000000000e+00 921 | 0.000000000000000000e+00 922 | 0.000000000000000000e+00 923 | 0.000000000000000000e+00 924 | 0.000000000000000000e+00 925 | 0.000000000000000000e+00 926 | 0.000000000000000000e+00 927 | 0.000000000000000000e+00 928 | 0.000000000000000000e+00 929 | 0.000000000000000000e+00 930 | 0.000000000000000000e+00 931 | 0.000000000000000000e+00 932 | 0.000000000000000000e+00 933 | 0.000000000000000000e+00 934 | 0.000000000000000000e+00 935 | 0.000000000000000000e+00 936 | 0.000000000000000000e+00 937 | 0.000000000000000000e+00 938 | 0.000000000000000000e+00 939 | 0.000000000000000000e+00 940 | 0.000000000000000000e+00 941 | 0.000000000000000000e+00 942 | 0.000000000000000000e+00 943 | 0.000000000000000000e+00 944 | 0.000000000000000000e+00 945 | 0.000000000000000000e+00 946 | 0.000000000000000000e+00 947 | 0.000000000000000000e+00 948 | 0.000000000000000000e+00 949 | 0.000000000000000000e+00 950 | 0.000000000000000000e+00 951 | 0.000000000000000000e+00 952 | 0.000000000000000000e+00 953 | 0.000000000000000000e+00 954 | 0.000000000000000000e+00 955 | 0.000000000000000000e+00 956 | 0.000000000000000000e+00 957 | 0.000000000000000000e+00 958 | 0.000000000000000000e+00 959 | 0.000000000000000000e+00 960 | 0.000000000000000000e+00 961 | 0.000000000000000000e+00 962 | 0.000000000000000000e+00 963 | 0.000000000000000000e+00 964 | 0.000000000000000000e+00 965 | 0.000000000000000000e+00 966 | 0.000000000000000000e+00 967 | 0.000000000000000000e+00 968 | 0.000000000000000000e+00 969 | 0.000000000000000000e+00 970 | 0.000000000000000000e+00 971 | 0.000000000000000000e+00 972 | 0.000000000000000000e+00 973 | 0.000000000000000000e+00 974 | 0.000000000000000000e+00 975 | 0.000000000000000000e+00 976 | 0.000000000000000000e+00 977 | 0.000000000000000000e+00 978 | 0.000000000000000000e+00 979 | 0.000000000000000000e+00 980 | 0.000000000000000000e+00 981 | 0.000000000000000000e+00 982 | 0.000000000000000000e+00 983 | 0.000000000000000000e+00 984 | 0.000000000000000000e+00 985 | 0.000000000000000000e+00 986 | 0.000000000000000000e+00 987 | 0.000000000000000000e+00 988 | 0.000000000000000000e+00 989 | 0.000000000000000000e+00 990 | 0.000000000000000000e+00 991 | 0.000000000000000000e+00 992 | 0.000000000000000000e+00 993 | 0.000000000000000000e+00 994 | 0.000000000000000000e+00 995 | 0.000000000000000000e+00 996 | 0.000000000000000000e+00 997 | 0.000000000000000000e+00 998 | 0.000000000000000000e+00 999 | 0.000000000000000000e+00 1000 | 0.000000000000000000e+00 1001 | 0.000000000000000000e+00 1002 | 0.000000000000000000e+00 1003 | 0.000000000000000000e+00 1004 | 0.000000000000000000e+00 1005 | 0.000000000000000000e+00 1006 | 0.000000000000000000e+00 1007 | 0.000000000000000000e+00 1008 | 0.000000000000000000e+00 1009 | 0.000000000000000000e+00 1010 | 0.000000000000000000e+00 1011 | 0.000000000000000000e+00 1012 | 0.000000000000000000e+00 1013 | 0.000000000000000000e+00 1014 | 0.000000000000000000e+00 1015 | 0.000000000000000000e+00 1016 | 0.000000000000000000e+00 1017 | 0.000000000000000000e+00 1018 | 0.000000000000000000e+00 1019 | 0.000000000000000000e+00 1020 | 0.000000000000000000e+00 1021 | 0.000000000000000000e+00 1022 | 0.000000000000000000e+00 1023 | 0.000000000000000000e+00 1024 | 0.000000000000000000e+00 1025 | 0.000000000000000000e+00 1026 | 0.000000000000000000e+00 1027 | 0.000000000000000000e+00 1028 | 0.000000000000000000e+00 1029 | 0.000000000000000000e+00 1030 | 0.000000000000000000e+00 1031 | 0.000000000000000000e+00 1032 | 0.000000000000000000e+00 1033 | 0.000000000000000000e+00 1034 | 0.000000000000000000e+00 1035 | 0.000000000000000000e+00 1036 | 0.000000000000000000e+00 1037 | 0.000000000000000000e+00 1038 | 0.000000000000000000e+00 1039 | 0.000000000000000000e+00 1040 | 0.000000000000000000e+00 1041 | 0.000000000000000000e+00 1042 | 0.000000000000000000e+00 1043 | 0.000000000000000000e+00 1044 | 0.000000000000000000e+00 1045 | 0.000000000000000000e+00 1046 | 0.000000000000000000e+00 1047 | 0.000000000000000000e+00 1048 | 0.000000000000000000e+00 1049 | 0.000000000000000000e+00 1050 | 0.000000000000000000e+00 1051 | 0.000000000000000000e+00 1052 | 0.000000000000000000e+00 1053 | 0.000000000000000000e+00 1054 | 0.000000000000000000e+00 1055 | 0.000000000000000000e+00 1056 | 0.000000000000000000e+00 1057 | 0.000000000000000000e+00 1058 | 0.000000000000000000e+00 1059 | 0.000000000000000000e+00 1060 | 0.000000000000000000e+00 1061 | 0.000000000000000000e+00 1062 | 0.000000000000000000e+00 1063 | 0.000000000000000000e+00 1064 | 0.000000000000000000e+00 1065 | 0.000000000000000000e+00 1066 | 0.000000000000000000e+00 1067 | 0.000000000000000000e+00 1068 | 0.000000000000000000e+00 1069 | 0.000000000000000000e+00 1070 | 0.000000000000000000e+00 1071 | 0.000000000000000000e+00 1072 | 0.000000000000000000e+00 1073 | 0.000000000000000000e+00 1074 | 0.000000000000000000e+00 1075 | 0.000000000000000000e+00 1076 | 0.000000000000000000e+00 1077 | 0.000000000000000000e+00 1078 | 0.000000000000000000e+00 1079 | 0.000000000000000000e+00 1080 | 0.000000000000000000e+00 1081 | 0.000000000000000000e+00 1082 | 0.000000000000000000e+00 1083 | 0.000000000000000000e+00 1084 | 0.000000000000000000e+00 1085 | 0.000000000000000000e+00 1086 | 0.000000000000000000e+00 1087 | 0.000000000000000000e+00 1088 | 0.000000000000000000e+00 1089 | 0.000000000000000000e+00 1090 | 0.000000000000000000e+00 1091 | 0.000000000000000000e+00 1092 | 0.000000000000000000e+00 1093 | 0.000000000000000000e+00 1094 | 0.000000000000000000e+00 1095 | 0.000000000000000000e+00 1096 | 0.000000000000000000e+00 1097 | 0.000000000000000000e+00 1098 | 0.000000000000000000e+00 1099 | 0.000000000000000000e+00 1100 | 0.000000000000000000e+00 1101 | 0.000000000000000000e+00 1102 | 0.000000000000000000e+00 1103 | 0.000000000000000000e+00 1104 | 0.000000000000000000e+00 1105 | 0.000000000000000000e+00 1106 | 0.000000000000000000e+00 1107 | 0.000000000000000000e+00 1108 | 0.000000000000000000e+00 1109 | 0.000000000000000000e+00 1110 | 0.000000000000000000e+00 1111 | 0.000000000000000000e+00 1112 | 0.000000000000000000e+00 1113 | 0.000000000000000000e+00 1114 | 0.000000000000000000e+00 1115 | 0.000000000000000000e+00 1116 | 0.000000000000000000e+00 1117 | 0.000000000000000000e+00 1118 | 0.000000000000000000e+00 1119 | 0.000000000000000000e+00 1120 | 0.000000000000000000e+00 1121 | 0.000000000000000000e+00 1122 | 0.000000000000000000e+00 1123 | 0.000000000000000000e+00 1124 | 0.000000000000000000e+00 1125 | 0.000000000000000000e+00 1126 | 0.000000000000000000e+00 1127 | 0.000000000000000000e+00 1128 | 0.000000000000000000e+00 1129 | 0.000000000000000000e+00 1130 | 0.000000000000000000e+00 1131 | 0.000000000000000000e+00 1132 | 0.000000000000000000e+00 1133 | 0.000000000000000000e+00 1134 | 0.000000000000000000e+00 1135 | 0.000000000000000000e+00 1136 | 0.000000000000000000e+00 1137 | 0.000000000000000000e+00 1138 | 0.000000000000000000e+00 1139 | 0.000000000000000000e+00 1140 | 0.000000000000000000e+00 1141 | 0.000000000000000000e+00 1142 | 0.000000000000000000e+00 1143 | 0.000000000000000000e+00 1144 | 0.000000000000000000e+00 1145 | 0.000000000000000000e+00 1146 | 0.000000000000000000e+00 1147 | 0.000000000000000000e+00 1148 | 0.000000000000000000e+00 1149 | 0.000000000000000000e+00 1150 | 0.000000000000000000e+00 1151 | 0.000000000000000000e+00 1152 | 0.000000000000000000e+00 1153 | 0.000000000000000000e+00 1154 | 0.000000000000000000e+00 1155 | 0.000000000000000000e+00 1156 | 0.000000000000000000e+00 1157 | 0.000000000000000000e+00 1158 | 0.000000000000000000e+00 1159 | 0.000000000000000000e+00 1160 | 0.000000000000000000e+00 1161 | 0.000000000000000000e+00 1162 | 0.000000000000000000e+00 1163 | 0.000000000000000000e+00 1164 | 0.000000000000000000e+00 1165 | 0.000000000000000000e+00 1166 | 0.000000000000000000e+00 1167 | 0.000000000000000000e+00 1168 | 0.000000000000000000e+00 1169 | 0.000000000000000000e+00 1170 | 0.000000000000000000e+00 1171 | 0.000000000000000000e+00 1172 | 0.000000000000000000e+00 1173 | 0.000000000000000000e+00 1174 | 0.000000000000000000e+00 1175 | 0.000000000000000000e+00 1176 | 0.000000000000000000e+00 1177 | 0.000000000000000000e+00 1178 | 0.000000000000000000e+00 1179 | 0.000000000000000000e+00 1180 | 0.000000000000000000e+00 1181 | 0.000000000000000000e+00 1182 | 0.000000000000000000e+00 1183 | 0.000000000000000000e+00 1184 | 0.000000000000000000e+00 1185 | 0.000000000000000000e+00 1186 | 0.000000000000000000e+00 1187 | 0.000000000000000000e+00 1188 | 0.000000000000000000e+00 1189 | 0.000000000000000000e+00 1190 | 0.000000000000000000e+00 1191 | 0.000000000000000000e+00 1192 | 0.000000000000000000e+00 1193 | 0.000000000000000000e+00 1194 | 0.000000000000000000e+00 1195 | 0.000000000000000000e+00 1196 | 0.000000000000000000e+00 1197 | 0.000000000000000000e+00 1198 | 0.000000000000000000e+00 1199 | 0.000000000000000000e+00 1200 | 0.000000000000000000e+00 1201 | 0.000000000000000000e+00 1202 | 0.000000000000000000e+00 1203 | 0.000000000000000000e+00 1204 | 0.000000000000000000e+00 1205 | 0.000000000000000000e+00 1206 | 0.000000000000000000e+00 1207 | 0.000000000000000000e+00 1208 | 0.000000000000000000e+00 1209 | 0.000000000000000000e+00 1210 | 0.000000000000000000e+00 1211 | 0.000000000000000000e+00 1212 | 0.000000000000000000e+00 1213 | 0.000000000000000000e+00 1214 | 0.000000000000000000e+00 1215 | 0.000000000000000000e+00 1216 | 0.000000000000000000e+00 1217 | 0.000000000000000000e+00 1218 | 0.000000000000000000e+00 1219 | 0.000000000000000000e+00 1220 | 0.000000000000000000e+00 1221 | 0.000000000000000000e+00 1222 | 0.000000000000000000e+00 1223 | 0.000000000000000000e+00 1224 | 0.000000000000000000e+00 1225 | 0.000000000000000000e+00 1226 | 0.000000000000000000e+00 1227 | 0.000000000000000000e+00 1228 | 0.000000000000000000e+00 1229 | 0.000000000000000000e+00 1230 | 0.000000000000000000e+00 1231 | 0.000000000000000000e+00 1232 | 0.000000000000000000e+00 1233 | 0.000000000000000000e+00 1234 | 0.000000000000000000e+00 1235 | 0.000000000000000000e+00 1236 | 0.000000000000000000e+00 1237 | 0.000000000000000000e+00 1238 | 0.000000000000000000e+00 1239 | 0.000000000000000000e+00 1240 | 0.000000000000000000e+00 1241 | 0.000000000000000000e+00 1242 | 0.000000000000000000e+00 1243 | 0.000000000000000000e+00 1244 | 0.000000000000000000e+00 1245 | 0.000000000000000000e+00 1246 | 0.000000000000000000e+00 1247 | 0.000000000000000000e+00 1248 | 0.000000000000000000e+00 1249 | 0.000000000000000000e+00 1250 | 0.000000000000000000e+00 1251 | 0.000000000000000000e+00 1252 | 0.000000000000000000e+00 1253 | 0.000000000000000000e+00 1254 | 0.000000000000000000e+00 1255 | 0.000000000000000000e+00 1256 | 0.000000000000000000e+00 1257 | 0.000000000000000000e+00 1258 | 0.000000000000000000e+00 1259 | 0.000000000000000000e+00 1260 | 0.000000000000000000e+00 1261 | 0.000000000000000000e+00 1262 | 0.000000000000000000e+00 1263 | 0.000000000000000000e+00 1264 | 0.000000000000000000e+00 1265 | 0.000000000000000000e+00 1266 | 0.000000000000000000e+00 1267 | 0.000000000000000000e+00 1268 | 0.000000000000000000e+00 1269 | 0.000000000000000000e+00 1270 | 0.000000000000000000e+00 1271 | 0.000000000000000000e+00 1272 | 0.000000000000000000e+00 1273 | 0.000000000000000000e+00 1274 | 0.000000000000000000e+00 1275 | 0.000000000000000000e+00 1276 | 0.000000000000000000e+00 1277 | 0.000000000000000000e+00 1278 | 0.000000000000000000e+00 1279 | 0.000000000000000000e+00 1280 | 0.000000000000000000e+00 1281 | 0.000000000000000000e+00 1282 | 0.000000000000000000e+00 1283 | 0.000000000000000000e+00 1284 | 0.000000000000000000e+00 1285 | 0.000000000000000000e+00 1286 | 0.000000000000000000e+00 1287 | 0.000000000000000000e+00 1288 | 0.000000000000000000e+00 1289 | 0.000000000000000000e+00 1290 | 0.000000000000000000e+00 1291 | 0.000000000000000000e+00 1292 | 0.000000000000000000e+00 1293 | 0.000000000000000000e+00 1294 | 0.000000000000000000e+00 1295 | 0.000000000000000000e+00 1296 | 0.000000000000000000e+00 1297 | 0.000000000000000000e+00 1298 | 0.000000000000000000e+00 1299 | 0.000000000000000000e+00 1300 | 0.000000000000000000e+00 1301 | 0.000000000000000000e+00 1302 | 0.000000000000000000e+00 1303 | 0.000000000000000000e+00 1304 | 0.000000000000000000e+00 1305 | 0.000000000000000000e+00 1306 | 0.000000000000000000e+00 1307 | 0.000000000000000000e+00 1308 | 0.000000000000000000e+00 1309 | 0.000000000000000000e+00 1310 | 0.000000000000000000e+00 1311 | 0.000000000000000000e+00 1312 | 0.000000000000000000e+00 1313 | 0.000000000000000000e+00 1314 | 0.000000000000000000e+00 1315 | 0.000000000000000000e+00 1316 | 0.000000000000000000e+00 1317 | 0.000000000000000000e+00 1318 | 0.000000000000000000e+00 1319 | 0.000000000000000000e+00 1320 | 0.000000000000000000e+00 1321 | 0.000000000000000000e+00 1322 | 0.000000000000000000e+00 1323 | 0.000000000000000000e+00 1324 | 0.000000000000000000e+00 1325 | 0.000000000000000000e+00 1326 | 0.000000000000000000e+00 1327 | 0.000000000000000000e+00 1328 | 0.000000000000000000e+00 1329 | 0.000000000000000000e+00 1330 | 0.000000000000000000e+00 1331 | 0.000000000000000000e+00 1332 | 0.000000000000000000e+00 1333 | 0.000000000000000000e+00 1334 | 0.000000000000000000e+00 1335 | 0.000000000000000000e+00 1336 | 0.000000000000000000e+00 1337 | 0.000000000000000000e+00 1338 | 0.000000000000000000e+00 1339 | 0.000000000000000000e+00 1340 | 0.000000000000000000e+00 1341 | 0.000000000000000000e+00 1342 | 0.000000000000000000e+00 1343 | 0.000000000000000000e+00 1344 | 0.000000000000000000e+00 1345 | 0.000000000000000000e+00 1346 | 0.000000000000000000e+00 1347 | 0.000000000000000000e+00 1348 | 0.000000000000000000e+00 1349 | 0.000000000000000000e+00 1350 | 0.000000000000000000e+00 1351 | 0.000000000000000000e+00 1352 | 0.000000000000000000e+00 1353 | 0.000000000000000000e+00 1354 | 0.000000000000000000e+00 1355 | 0.000000000000000000e+00 1356 | 0.000000000000000000e+00 1357 | 0.000000000000000000e+00 1358 | 0.000000000000000000e+00 1359 | 0.000000000000000000e+00 1360 | 0.000000000000000000e+00 1361 | 0.000000000000000000e+00 1362 | 0.000000000000000000e+00 1363 | 0.000000000000000000e+00 1364 | 0.000000000000000000e+00 1365 | 0.000000000000000000e+00 1366 | 0.000000000000000000e+00 1367 | 0.000000000000000000e+00 1368 | 0.000000000000000000e+00 1369 | 0.000000000000000000e+00 1370 | 0.000000000000000000e+00 1371 | 0.000000000000000000e+00 1372 | 0.000000000000000000e+00 1373 | 0.000000000000000000e+00 1374 | 0.000000000000000000e+00 1375 | 0.000000000000000000e+00 1376 | 0.000000000000000000e+00 1377 | 0.000000000000000000e+00 1378 | 0.000000000000000000e+00 1379 | 0.000000000000000000e+00 1380 | 0.000000000000000000e+00 1381 | -------------------------------------------------------------------------------- /Data Sets/label_banknote.csv: -------------------------------------------------------------------------------- 1 | 0 2 | 0 3 | 0 4 | 0 5 | 0 6 | 0 7 | 0 8 | 0 9 | 0 10 | 0 11 | 0 12 | 0 13 | 0 14 | 0 15 | 0 16 | 0 17 | 0 18 | 0 19 | 0 20 | 0 21 | 0 22 | 0 23 | 0 24 | 0 25 | 0 26 | 0 27 | 0 28 | 0 29 | 0 30 | 0 31 | 0 32 | 0 33 | 0 34 | 0 35 | 0 36 | 0 37 | 0 38 | 0 39 | 0 40 | 0 41 | 0 42 | 0 43 | 0 44 | 0 45 | 0 46 | 0 47 | 0 48 | 0 49 | 0 50 | 0 51 | 0 52 | 0 53 | 0 54 | 0 55 | 0 56 | 0 57 | 0 58 | 0 59 | 0 60 | 0 61 | 0 62 | 0 63 | 0 64 | 0 65 | 0 66 | 0 67 | 0 68 | 0 69 | 0 70 | 0 71 | 0 72 | 0 73 | 0 74 | 0 75 | 0 76 | 0 77 | 0 78 | 0 79 | 0 80 | 0 81 | 0 82 | 0 83 | 0 84 | 0 85 | 0 86 | 0 87 | 0 88 | 0 89 | 0 90 | 0 91 | 0 92 | 0 93 | 0 94 | 0 95 | 0 96 | 0 97 | 0 98 | 0 99 | 0 100 | 0 101 | 0 102 | 0 103 | 0 104 | 0 105 | 0 106 | 0 107 | 0 108 | 0 109 | 0 110 | 0 111 | 0 112 | 0 113 | 0 114 | 0 115 | 0 116 | 0 117 | 0 118 | 0 119 | 0 120 | 0 121 | 0 122 | 0 123 | 0 124 | 0 125 | 0 126 | 0 127 | 0 128 | 0 129 | 0 130 | 0 131 | 0 132 | 0 133 | 0 134 | 0 135 | 0 136 | 0 137 | 0 138 | 0 139 | 0 140 | 0 141 | 0 142 | 0 143 | 0 144 | 0 145 | 0 146 | 0 147 | 0 148 | 0 149 | 0 150 | 0 151 | 0 152 | 0 153 | 0 154 | 0 155 | 0 156 | 0 157 | 0 158 | 0 159 | 0 160 | 0 161 | 0 162 | 0 163 | 0 164 | 0 165 | 0 166 | 0 167 | 0 168 | 0 169 | 0 170 | 0 171 | 0 172 | 0 173 | 0 174 | 0 175 | 0 176 | 0 177 | 0 178 | 0 179 | 0 180 | 0 181 | 0 182 | 0 183 | 0 184 | 0 185 | 0 186 | 0 187 | 0 188 | 0 189 | 0 190 | 0 191 | 0 192 | 0 193 | 0 194 | 0 195 | 0 196 | 0 197 | 0 198 | 0 199 | 0 200 | 0 201 | 0 202 | 0 203 | 0 204 | 0 205 | 0 206 | 0 207 | 0 208 | 0 209 | 0 210 | 0 211 | 0 212 | 0 213 | 0 214 | 0 215 | 0 216 | 0 217 | 0 218 | 0 219 | 0 220 | 0 221 | 0 222 | 0 223 | 0 224 | 0 225 | 0 226 | 0 227 | 0 228 | 0 229 | 0 230 | 0 231 | 0 232 | 0 233 | 0 234 | 0 235 | 0 236 | 0 237 | 0 238 | 0 239 | 0 240 | 0 241 | 0 242 | 0 243 | 0 244 | 0 245 | 0 246 | 0 247 | 0 248 | 0 249 | 0 250 | 0 251 | 0 252 | 0 253 | 0 254 | 0 255 | 0 256 | 0 257 | 0 258 | 0 259 | 0 260 | 0 261 | 0 262 | 0 263 | 0 264 | 0 265 | 0 266 | 0 267 | 0 268 | 0 269 | 0 270 | 0 271 | 0 272 | 0 273 | 0 274 | 0 275 | 0 276 | 0 277 | 0 278 | 0 279 | 0 280 | 0 281 | 0 282 | 0 283 | 0 284 | 0 285 | 0 286 | 0 287 | 0 288 | 0 289 | 0 290 | 0 291 | 0 292 | 0 293 | 0 294 | 0 295 | 0 296 | 0 297 | 0 298 | 0 299 | 0 300 | 0 301 | 0 302 | 0 303 | 0 304 | 0 305 | 0 306 | 0 307 | 0 308 | 0 309 | 0 310 | 0 311 | 0 312 | 0 313 | 0 314 | 0 315 | 0 316 | 0 317 | 0 318 | 0 319 | 0 320 | 0 321 | 0 322 | 0 323 | 0 324 | 0 325 | 0 326 | 0 327 | 0 328 | 0 329 | 0 330 | 0 331 | 0 332 | 0 333 | 0 334 | 0 335 | 0 336 | 0 337 | 0 338 | 0 339 | 0 340 | 0 341 | 0 342 | 0 343 | 0 344 | 0 345 | 0 346 | 0 347 | 0 348 | 0 349 | 0 350 | 0 351 | 0 352 | 0 353 | 0 354 | 0 355 | 0 356 | 0 357 | 0 358 | 0 359 | 0 360 | 0 361 | 0 362 | 0 363 | 0 364 | 0 365 | 0 366 | 0 367 | 0 368 | 0 369 | 0 370 | 0 371 | 0 372 | 0 373 | 0 374 | 0 375 | 0 376 | 0 377 | 0 378 | 0 379 | 0 380 | 0 381 | 0 382 | 0 383 | 0 384 | 0 385 | 0 386 | 0 387 | 0 388 | 0 389 | 0 390 | 0 391 | 0 392 | 0 393 | 0 394 | 0 395 | 0 396 | 0 397 | 0 398 | 0 399 | 0 400 | 0 401 | 0 402 | 0 403 | 0 404 | 0 405 | 0 406 | 0 407 | 0 408 | 0 409 | 0 410 | 0 411 | 0 412 | 0 413 | 0 414 | 0 415 | 0 416 | 0 417 | 0 418 | 0 419 | 0 420 | 0 421 | 0 422 | 0 423 | 0 424 | 0 425 | 0 426 | 0 427 | 0 428 | 0 429 | 0 430 | 0 431 | 0 432 | 0 433 | 0 434 | 0 435 | 0 436 | 0 437 | 0 438 | 0 439 | 0 440 | 0 441 | 0 442 | 0 443 | 0 444 | 0 445 | 0 446 | 0 447 | 0 448 | 0 449 | 0 450 | 0 451 | 0 452 | 0 453 | 0 454 | 0 455 | 0 456 | 0 457 | 0 458 | 0 459 | 0 460 | 0 461 | 0 462 | 0 463 | 0 464 | 0 465 | 0 466 | 0 467 | 0 468 | 0 469 | 0 470 | 0 471 | 0 472 | 0 473 | 0 474 | 0 475 | 0 476 | 0 477 | 0 478 | 0 479 | 0 480 | 0 481 | 0 482 | 0 483 | 0 484 | 0 485 | 0 486 | 0 487 | 0 488 | 0 489 | 0 490 | 0 491 | 0 492 | 0 493 | 0 494 | 0 495 | 0 496 | 0 497 | 0 498 | 0 499 | 0 500 | 0 501 | 0 502 | 0 503 | 0 504 | 0 505 | 0 506 | 0 507 | 0 508 | 0 509 | 0 510 | 0 511 | 0 512 | 0 513 | 0 514 | 0 515 | 0 516 | 0 517 | 0 518 | 0 519 | 0 520 | 0 521 | 0 522 | 0 523 | 0 524 | 0 525 | 0 526 | 0 527 | 0 528 | 0 529 | 0 530 | 0 531 | 0 532 | 0 533 | 0 534 | 0 535 | 0 536 | 0 537 | 0 538 | 0 539 | 0 540 | 0 541 | 0 542 | 0 543 | 0 544 | 0 545 | 0 546 | 0 547 | 0 548 | 0 549 | 0 550 | 0 551 | 0 552 | 0 553 | 0 554 | 0 555 | 0 556 | 0 557 | 0 558 | 0 559 | 0 560 | 0 561 | 0 562 | 0 563 | 0 564 | 0 565 | 0 566 | 0 567 | 0 568 | 0 569 | 0 570 | 0 571 | 0 572 | 0 573 | 0 574 | 0 575 | 0 576 | 0 577 | 0 578 | 0 579 | 0 580 | 0 581 | 0 582 | 0 583 | 0 584 | 0 585 | 0 586 | 0 587 | 0 588 | 0 589 | 0 590 | 0 591 | 0 592 | 0 593 | 0 594 | 0 595 | 0 596 | 0 597 | 0 598 | 0 599 | 0 600 | 0 601 | 0 602 | 0 603 | 0 604 | 0 605 | 0 606 | 0 607 | 0 608 | 0 609 | 0 610 | 0 611 | 0 612 | 0 613 | 0 614 | 0 615 | 0 616 | 0 617 | 0 618 | 0 619 | 0 620 | 0 621 | 0 622 | 0 623 | 0 624 | 0 625 | 0 626 | 0 627 | 0 628 | 0 629 | 0 630 | 0 631 | 0 632 | 0 633 | 0 634 | 0 635 | 0 636 | 0 637 | 0 638 | 0 639 | 0 640 | 0 641 | 0 642 | 0 643 | 0 644 | 0 645 | 0 646 | 0 647 | 0 648 | 0 649 | 0 650 | 0 651 | 0 652 | 0 653 | 0 654 | 0 655 | 0 656 | 0 657 | 0 658 | 0 659 | 0 660 | 0 661 | 0 662 | 0 663 | 0 664 | 0 665 | 0 666 | 0 667 | 0 668 | 0 669 | 0 670 | 0 671 | 0 672 | 0 673 | 0 674 | 0 675 | 0 676 | 0 677 | 0 678 | 0 679 | 0 680 | 0 681 | 0 682 | 0 683 | 0 684 | 0 685 | 0 686 | 0 687 | 0 688 | 0 689 | 0 690 | 0 691 | 0 692 | 0 693 | 0 694 | 0 695 | 0 696 | 0 697 | 0 698 | 0 699 | 0 700 | 0 701 | 0 702 | 0 703 | 0 704 | 0 705 | 0 706 | 0 707 | 0 708 | 0 709 | 0 710 | 0 711 | 0 712 | 0 713 | 0 714 | 0 715 | 0 716 | 0 717 | 0 718 | 0 719 | 0 720 | 0 721 | 0 722 | 0 723 | 0 724 | 0 725 | 0 726 | 0 727 | 0 728 | 0 729 | 0 730 | 0 731 | 0 732 | 0 733 | 0 734 | 0 735 | 0 736 | 0 737 | 0 738 | 0 739 | 0 740 | 0 741 | 0 742 | 0 743 | 0 744 | 0 745 | 0 746 | 0 747 | 0 748 | 0 749 | 0 750 | 0 751 | 0 752 | 0 753 | 0 754 | 0 755 | 0 756 | 0 757 | 0 758 | 0 759 | 0 760 | 0 761 | 0 762 | 0 763 | 1 764 | 1 765 | 1 766 | 1 767 | 1 768 | 1 769 | 1 770 | 1 771 | 1 772 | 1 773 | 1 774 | 1 775 | 1 776 | 1 777 | 1 778 | 1 779 | 1 780 | 1 781 | 1 782 | 1 783 | 1 784 | 1 785 | 1 786 | 1 787 | 1 788 | 1 789 | 1 790 | 1 791 | 1 792 | 1 793 | 1 794 | 1 795 | 1 796 | 1 797 | 1 798 | 1 799 | 1 800 | 1 801 | 1 802 | 1 803 | 1 804 | 1 805 | 1 806 | 1 807 | 1 808 | 1 809 | 1 810 | 1 811 | 1 812 | 1 813 | 1 814 | 1 815 | 1 816 | 1 817 | 1 818 | 1 819 | 1 820 | 1 821 | 1 822 | 1 823 | 1 824 | 1 825 | 1 826 | 1 827 | 1 828 | 1 829 | 1 830 | 1 831 | 1 832 | 1 833 | 1 834 | 1 835 | 1 836 | 1 837 | 1 838 | 1 839 | 1 840 | 1 841 | 1 842 | 1 843 | 1 844 | 1 845 | 1 846 | 1 847 | 1 848 | 1 849 | 1 850 | 1 851 | 1 852 | 1 853 | 1 854 | 1 855 | 1 856 | 1 857 | 1 858 | 1 859 | 1 860 | 1 861 | 1 862 | 1 863 | 1 864 | 1 865 | 1 866 | 1 867 | 1 868 | 1 869 | 1 870 | 1 871 | 1 872 | 1 873 | 1 874 | 1 875 | 1 876 | 1 877 | 1 878 | 1 879 | 1 880 | 1 881 | 1 882 | 1 883 | 1 884 | 1 885 | 1 886 | 1 887 | 1 888 | 1 889 | 1 890 | 1 891 | 1 892 | 1 893 | 1 894 | 1 895 | 1 896 | 1 897 | 1 898 | 1 899 | 1 900 | 1 901 | 1 902 | 1 903 | 1 904 | 1 905 | 1 906 | 1 907 | 1 908 | 1 909 | 1 910 | 1 911 | 1 912 | 1 913 | 1 914 | 1 915 | 1 916 | 1 917 | 1 918 | 1 919 | 1 920 | 1 921 | 1 922 | 1 923 | 1 924 | 1 925 | 1 926 | 1 927 | 1 928 | 1 929 | 1 930 | 1 931 | 1 932 | 1 933 | 1 934 | 1 935 | 1 936 | 1 937 | 1 938 | 1 939 | 1 940 | 1 941 | 1 942 | 1 943 | 1 944 | 1 945 | 1 946 | 1 947 | 1 948 | 1 949 | 1 950 | 1 951 | 1 952 | 1 953 | 1 954 | 1 955 | 1 956 | 1 957 | 1 958 | 1 959 | 1 960 | 1 961 | 1 962 | 1 963 | 1 964 | 1 965 | 1 966 | 1 967 | 1 968 | 1 969 | 1 970 | 1 971 | 1 972 | 1 973 | 1 974 | 1 975 | 1 976 | 1 977 | 1 978 | 1 979 | 1 980 | 1 981 | 1 982 | 1 983 | 1 984 | 1 985 | 1 986 | 1 987 | 1 988 | 1 989 | 1 990 | 1 991 | 1 992 | 1 993 | 1 994 | 1 995 | 1 996 | 1 997 | 1 998 | 1 999 | 1 1000 | 1 1001 | 1 1002 | 1 1003 | 1 1004 | 1 1005 | 1 1006 | 1 1007 | 1 1008 | 1 1009 | 1 1010 | 1 1011 | 1 1012 | 1 1013 | 1 1014 | 1 1015 | 1 1016 | 1 1017 | 1 1018 | 1 1019 | 1 1020 | 1 1021 | 1 1022 | 1 1023 | 1 1024 | 1 1025 | 1 1026 | 1 1027 | 1 1028 | 1 1029 | 1 1030 | 1 1031 | 1 1032 | 1 1033 | 1 1034 | 1 1035 | 1 1036 | 1 1037 | 1 1038 | 1 1039 | 1 1040 | 1 1041 | 1 1042 | 1 1043 | 1 1044 | 1 1045 | 1 1046 | 1 1047 | 1 1048 | 1 1049 | 1 1050 | 1 1051 | 1 1052 | 1 1053 | 1 1054 | 1 1055 | 1 1056 | 1 1057 | 1 1058 | 1 1059 | 1 1060 | 1 1061 | 1 1062 | 1 1063 | 1 1064 | 1 1065 | 1 1066 | 1 1067 | 1 1068 | 1 1069 | 1 1070 | 1 1071 | 1 1072 | 1 1073 | 1 1074 | 1 1075 | 1 1076 | 1 1077 | 1 1078 | 1 1079 | 1 1080 | 1 1081 | 1 1082 | 1 1083 | 1 1084 | 1 1085 | 1 1086 | 1 1087 | 1 1088 | 1 1089 | 1 1090 | 1 1091 | 1 1092 | 1 1093 | 1 1094 | 1 1095 | 1 1096 | 1 1097 | 1 1098 | 1 1099 | 1 1100 | 1 1101 | 1 1102 | 1 1103 | 1 1104 | 1 1105 | 1 1106 | 1 1107 | 1 1108 | 1 1109 | 1 1110 | 1 1111 | 1 1112 | 1 1113 | 1 1114 | 1 1115 | 1 1116 | 1 1117 | 1 1118 | 1 1119 | 1 1120 | 1 1121 | 1 1122 | 1 1123 | 1 1124 | 1 1125 | 1 1126 | 1 1127 | 1 1128 | 1 1129 | 1 1130 | 1 1131 | 1 1132 | 1 1133 | 1 1134 | 1 1135 | 1 1136 | 1 1137 | 1 1138 | 1 1139 | 1 1140 | 1 1141 | 1 1142 | 1 1143 | 1 1144 | 1 1145 | 1 1146 | 1 1147 | 1 1148 | 1 1149 | 1 1150 | 1 1151 | 1 1152 | 1 1153 | 1 1154 | 1 1155 | 1 1156 | 1 1157 | 1 1158 | 1 1159 | 1 1160 | 1 1161 | 1 1162 | 1 1163 | 1 1164 | 1 1165 | 1 1166 | 1 1167 | 1 1168 | 1 1169 | 1 1170 | 1 1171 | 1 1172 | 1 1173 | 1 1174 | 1 1175 | 1 1176 | 1 1177 | 1 1178 | 1 1179 | 1 1180 | 1 1181 | 1 1182 | 1 1183 | 1 1184 | 1 1185 | 1 1186 | 1 1187 | 1 1188 | 1 1189 | 1 1190 | 1 1191 | 1 1192 | 1 1193 | 1 1194 | 1 1195 | 1 1196 | 1 1197 | 1 1198 | 1 1199 | 1 1200 | 1 1201 | 1 1202 | 1 1203 | 1 1204 | 1 1205 | 1 1206 | 1 1207 | 1 1208 | 1 1209 | 1 1210 | 1 1211 | 1 1212 | 1 1213 | 1 1214 | 1 1215 | 1 1216 | 1 1217 | 1 1218 | 1 1219 | 1 1220 | 1 1221 | 1 1222 | 1 1223 | 1 1224 | 1 1225 | 1 1226 | 1 1227 | 1 1228 | 1 1229 | 1 1230 | 1 1231 | 1 1232 | 1 1233 | 1 1234 | 1 1235 | 1 1236 | 1 1237 | 1 1238 | 1 1239 | 1 1240 | 1 1241 | 1 1242 | 1 1243 | 1 1244 | 1 1245 | 1 1246 | 1 1247 | 1 1248 | 1 1249 | 1 1250 | 1 1251 | 1 1252 | 1 1253 | 1 1254 | 1 1255 | 1 1256 | 1 1257 | 1 1258 | 1 1259 | 1 1260 | 1 1261 | 1 1262 | 1 1263 | 1 1264 | 1 1265 | 1 1266 | 1 1267 | 1 1268 | 1 1269 | 1 1270 | 1 1271 | 1 1272 | 1 1273 | 1 1274 | 1 1275 | 1 1276 | 1 1277 | 1 1278 | 1 1279 | 1 1280 | 1 1281 | 1 1282 | 1 1283 | 1 1284 | 1 1285 | 1 1286 | 1 1287 | 1 1288 | 1 1289 | 1 1290 | 1 1291 | 1 1292 | 1 1293 | 1 1294 | 1 1295 | 1 1296 | 1 1297 | 1 1298 | 1 1299 | 1 1300 | 1 1301 | 1 1302 | 1 1303 | 1 1304 | 1 1305 | 1 1306 | 1 1307 | 1 1308 | 1 1309 | 1 1310 | 1 1311 | 1 1312 | 1 1313 | 1 1314 | 1 1315 | 1 1316 | 1 1317 | 1 1318 | 1 1319 | 1 1320 | 1 1321 | 1 1322 | 1 1323 | 1 1324 | 1 1325 | 1 1326 | 1 1327 | 1 1328 | 1 1329 | 1 1330 | 1 1331 | 1 1332 | 1 1333 | 1 1334 | 1 1335 | 1 1336 | 1 1337 | 1 1338 | 1 1339 | 1 1340 | 1 1341 | 1 1342 | 1 1343 | 1 1344 | 1 1345 | 1 1346 | 1 1347 | 1 1348 | 1 1349 | 1 1350 | 1 1351 | 1 1352 | 1 1353 | 1 1354 | 1 1355 | 1 1356 | 1 1357 | 1 1358 | 1 1359 | 1 1360 | 1 1361 | 1 1362 | 1 1363 | 1 1364 | 1 1365 | 1 1366 | 1 1367 | 1 1368 | 1 1369 | 1 1370 | 1 1371 | 1 1372 | 1 1373 | -------------------------------------------------------------------------------- /Data Sets/label_blood.csv: -------------------------------------------------------------------------------- 1 | 1 2 | 1 3 | 1 4 | 1 5 | 0 6 | 0 7 | 1 8 | 0 9 | 1 10 | 1 11 | 0 12 | 0 13 | 1 14 | 0 15 | 1 16 | 1 17 | 1 18 | 1 19 | 1 20 | 1 21 | 1 22 | 0 23 | 1 24 | 1 25 | 0 26 | 0 27 | 0 28 | 1 29 | 1 30 | 0 31 | 0 32 | 1 33 | 1 34 | 1 35 | 0 36 | 1 37 | 1 38 | 1 39 | 1 40 | 1 41 | 1 42 | 0 43 | 1 44 | 0 45 | 1 46 | 1 47 | 0 48 | 0 49 | 0 50 | 0 51 | 0 52 | 1 53 | 0 54 | 0 55 | 1 56 | 1 57 | 1 58 | 1 59 | 0 60 | 0 61 | 0 62 | 1 63 | 0 64 | 1 65 | 1 66 | 0 67 | 1 68 | 0 69 | 0 70 | 0 71 | 0 72 | 0 73 | 1 74 | 0 75 | 1 76 | 1 77 | 1 78 | 0 79 | 0 80 | 0 81 | 1 82 | 0 83 | 0 84 | 0 85 | 1 86 | 0 87 | 0 88 | 0 89 | 0 90 | 1 91 | 1 92 | 0 93 | 0 94 | 0 95 | 0 96 | 0 97 | 0 98 | 1 99 | 1 100 | 1 101 | 1 102 | 1 103 | 0 104 | 0 105 | 0 106 | 0 107 | 0 108 | 0 109 | 0 110 | 0 111 | 1 112 | 0 113 | 0 114 | 1 115 | 0 116 | 0 117 | 1 118 | 0 119 | 0 120 | 1 121 | 1 122 | 1 123 | 1 124 | 1 125 | 0 126 | 0 127 | 1 128 | 0 129 | 1 130 | 1 131 | 0 132 | 0 133 | 0 134 | 0 135 | 0 136 | 0 137 | 0 138 | 0 139 | 0 140 | 0 141 | 0 142 | 0 143 | 0 144 | 0 145 | 1 146 | 1 147 | 0 148 | 0 149 | 1 150 | 0 151 | 1 152 | 0 153 | 0 154 | 1 155 | 0 156 | 0 157 | 0 158 | 0 159 | 0 160 | 0 161 | 0 162 | 0 163 | 0 164 | 1 165 | 1 166 | 1 167 | 0 168 | 0 169 | 0 170 | 0 171 | 0 172 | 0 173 | 0 174 | 0 175 | 0 176 | 0 177 | 1 178 | 1 179 | 1 180 | 0 181 | 1 182 | 0 183 | 0 184 | 0 185 | 0 186 | 0 187 | 0 188 | 0 189 | 0 190 | 1 191 | 0 192 | 0 193 | 0 194 | 0 195 | 0 196 | 1 197 | 1 198 | 0 199 | 0 200 | 0 201 | 0 202 | 0 203 | 1 204 | 0 205 | 0 206 | 0 207 | 0 208 | 0 209 | 0 210 | 0 211 | 0 212 | 0 213 | 0 214 | 0 215 | 0 216 | 1 217 | 0 218 | 0 219 | 0 220 | 0 221 | 0 222 | 1 223 | 1 224 | 0 225 | 1 226 | 1 227 | 0 228 | 0 229 | 0 230 | 0 231 | 0 232 | 1 233 | 0 234 | 0 235 | 0 236 | 0 237 | 0 238 | 0 239 | 0 240 | 0 241 | 1 242 | 0 243 | 1 244 | 1 245 | 0 246 | 0 247 | 0 248 | 0 249 | 0 250 | 0 251 | 0 252 | 0 253 | 1 254 | 1 255 | 0 256 | 0 257 | 0 258 | 0 259 | 0 260 | 0 261 | 0 262 | 1 263 | 0 264 | 1 265 | 0 266 | 1 267 | 0 268 | 0 269 | 0 270 | 0 271 | 0 272 | 0 273 | 0 274 | 0 275 | 0 276 | 0 277 | 0 278 | 0 279 | 0 280 | 0 281 | 0 282 | 1 283 | 0 284 | 0 285 | 0 286 | 0 287 | 0 288 | 1 289 | 1 290 | 1 291 | 1 292 | 0 293 | 0 294 | 0 295 | 0 296 | 0 297 | 0 298 | 0 299 | 0 300 | 0 301 | 0 302 | 0 303 | 0 304 | 0 305 | 0 306 | 1 307 | 0 308 | 0 309 | 0 310 | 0 311 | 0 312 | 0 313 | 0 314 | 0 315 | 0 316 | 0 317 | 0 318 | 0 319 | 1 320 | 0 321 | 0 322 | 1 323 | 0 324 | 1 325 | 0 326 | 0 327 | 0 328 | 0 329 | 1 330 | 0 331 | 0 332 | 0 333 | 1 334 | 1 335 | 0 336 | 1 337 | 0 338 | 0 339 | 1 340 | 0 341 | 0 342 | 0 343 | 0 344 | 0 345 | 0 346 | 0 347 | 0 348 | 0 349 | 0 350 | 0 351 | 0 352 | 0 353 | 0 354 | 0 355 | 0 356 | 0 357 | 0 358 | 0 359 | 0 360 | 0 361 | 0 362 | 0 363 | 0 364 | 0 365 | 0 366 | 1 367 | 0 368 | 0 369 | 0 370 | 1 371 | 1 372 | 0 373 | 0 374 | 0 375 | 0 376 | 1 377 | 0 378 | 0 379 | 0 380 | 0 381 | 0 382 | 0 383 | 0 384 | 0 385 | 0 386 | 0 387 | 0 388 | 0 389 | 0 390 | 0 391 | 0 392 | 0 393 | 0 394 | 0 395 | 0 396 | 0 397 | 1 398 | 0 399 | 0 400 | 1 401 | 0 402 | 0 403 | 0 404 | 0 405 | 0 406 | 0 407 | 0 408 | 0 409 | 0 410 | 0 411 | 0 412 | 0 413 | 0 414 | 0 415 | 0 416 | 0 417 | 0 418 | 0 419 | 0 420 | 0 421 | 0 422 | 0 423 | 0 424 | 0 425 | 0 426 | 0 427 | 0 428 | 0 429 | 0 430 | 0 431 | 0 432 | 0 433 | 1 434 | 0 435 | 0 436 | 0 437 | 0 438 | 0 439 | 0 440 | 0 441 | 0 442 | 0 443 | 0 444 | 0 445 | 0 446 | 0 447 | 0 448 | 0 449 | 0 450 | 0 451 | 0 452 | 0 453 | 0 454 | 0 455 | 0 456 | 0 457 | 0 458 | 0 459 | 0 460 | 0 461 | 0 462 | 0 463 | 1 464 | 0 465 | 0 466 | 0 467 | 0 468 | 0 469 | 1 470 | 0 471 | 0 472 | 0 473 | 0 474 | 0 475 | 0 476 | 1 477 | 0 478 | 0 479 | 0 480 | 0 481 | 0 482 | 0 483 | 0 484 | 0 485 | 0 486 | 0 487 | 0 488 | 0 489 | 0 490 | 0 491 | 0 492 | 0 493 | 0 494 | 0 495 | 0 496 | 0 497 | 0 498 | 0 499 | 0 500 | 0 501 | 1 502 | 1 503 | 1 504 | 0 505 | 1 506 | 1 507 | 1 508 | 0 509 | 1 510 | 1 511 | 1 512 | 0 513 | 0 514 | 0 515 | 1 516 | 0 517 | 1 518 | 1 519 | 1 520 | 1 521 | 1 522 | 1 523 | 1 524 | 1 525 | 0 526 | 0 527 | 1 528 | 0 529 | 0 530 | 1 531 | 1 532 | 1 533 | 0 534 | 0 535 | 0 536 | 1 537 | 1 538 | 1 539 | 1 540 | 1 541 | 1 542 | 0 543 | 1 544 | 1 545 | 0 546 | 1 547 | 0 548 | 0 549 | 1 550 | 1 551 | 0 552 | 1 553 | 1 554 | 0 555 | 0 556 | 0 557 | 0 558 | 0 559 | 0 560 | 0 561 | 1 562 | 0 563 | 1 564 | 0 565 | 0 566 | 0 567 | 0 568 | 0 569 | 1 570 | 0 571 | 0 572 | 0 573 | 1 574 | 0 575 | 1 576 | 1 577 | 1 578 | 0 579 | 0 580 | 0 581 | 0 582 | 1 583 | 0 584 | 0 585 | 0 586 | 0 587 | 1 588 | 0 589 | 0 590 | 0 591 | 0 592 | 0 593 | 0 594 | 1 595 | 0 596 | 0 597 | 1 598 | 0 599 | 0 600 | 0 601 | 0 602 | 0 603 | 0 604 | 0 605 | 0 606 | 0 607 | 0 608 | 0 609 | 0 610 | 0 611 | 0 612 | 0 613 | 1 614 | 0 615 | 0 616 | 0 617 | 0 618 | 0 619 | 1 620 | 0 621 | 0 622 | 0 623 | 1 624 | 0 625 | 1 626 | 0 627 | 0 628 | 0 629 | 0 630 | 0 631 | 0 632 | 0 633 | 0 634 | 0 635 | 0 636 | 0 637 | 0 638 | 0 639 | 0 640 | 1 641 | 1 642 | 0 643 | 0 644 | 0 645 | 1 646 | 0 647 | 0 648 | 0 649 | 0 650 | 0 651 | 0 652 | 0 653 | 0 654 | 0 655 | 0 656 | 0 657 | 0 658 | 0 659 | 0 660 | 0 661 | 0 662 | 0 663 | 0 664 | 0 665 | 0 666 | 0 667 | 0 668 | 1 669 | 0 670 | 1 671 | 0 672 | 0 673 | 0 674 | 0 675 | 0 676 | 0 677 | 0 678 | 0 679 | 0 680 | 0 681 | 1 682 | 0 683 | 0 684 | 0 685 | 0 686 | 0 687 | 0 688 | 0 689 | 0 690 | 0 691 | 0 692 | 0 693 | 0 694 | 0 695 | 0 696 | 1 697 | 0 698 | 0 699 | 0 700 | 0 701 | 0 702 | 0 703 | 0 704 | 0 705 | 0 706 | 0 707 | 0 708 | 0 709 | 1 710 | 0 711 | 0 712 | 0 713 | 1 714 | 0 715 | 0 716 | 0 717 | 0 718 | 0 719 | 0 720 | 0 721 | 0 722 | 0 723 | 0 724 | 0 725 | 0 726 | 0 727 | 0 728 | 0 729 | 0 730 | 0 731 | 0 732 | 0 733 | 1 734 | 0 735 | 0 736 | 0 737 | 0 738 | 0 739 | 0 740 | 0 741 | 0 742 | 0 743 | 0 744 | 0 745 | 0 746 | 0 747 | 0 748 | 0 749 | -------------------------------------------------------------------------------- /Data Sets/label_breast.csv: -------------------------------------------------------------------------------- 1 | 0 2 | 0 3 | 0 4 | 0 5 | 0 6 | 1 7 | 0 8 | 0 9 | 0 10 | 0 11 | 0 12 | 0 13 | 1 14 | 0 15 | 1 16 | 1 17 | 0 18 | 0 19 | 1 20 | 0 21 | 1 22 | 1 23 | 0 24 | 1 25 | 0 26 | 1 27 | 0 28 | 0 29 | 0 30 | 0 31 | 0 32 | 0 33 | 1 34 | 0 35 | 0 36 | 0 37 | 1 38 | 0 39 | 1 40 | 1 41 | 0 42 | 1 43 | 1 44 | 1 45 | 1 46 | 0 47 | 1 48 | 0 49 | 0 50 | 1 51 | 1 52 | 1 53 | 1 54 | 1 55 | 1 56 | 1 57 | 1 58 | 1 59 | 1 60 | 1 61 | 1 62 | 0 63 | 1 64 | 1 65 | 0 66 | 1 67 | 0 68 | 1 69 | 1 70 | 0 71 | 0 72 | 1 73 | 0 74 | 1 75 | 1 76 | 0 77 | 0 78 | 0 79 | 0 80 | 0 81 | 0 82 | 0 83 | 0 84 | 0 85 | 1 86 | 1 87 | 1 88 | 1 89 | 0 90 | 0 91 | 0 92 | 0 93 | 0 94 | 0 95 | 0 96 | 0 97 | 0 98 | 0 99 | 1 100 | 1 101 | 1 102 | 1 103 | 0 104 | 1 105 | 1 106 | 1 107 | 1 108 | 1 109 | 0 110 | 1 111 | 0 112 | 1 113 | 1 114 | 1 115 | 0 116 | 0 117 | 0 118 | 1 119 | 0 120 | 0 121 | 0 122 | 0 123 | 1 124 | 1 125 | 1 126 | 0 127 | 1 128 | 0 129 | 1 130 | 0 131 | 0 132 | 0 133 | 1 134 | 0 135 | 0 136 | 0 137 | 0 138 | 0 139 | 0 140 | 0 141 | 0 142 | 0 143 | 1 144 | 0 145 | 0 146 | 0 147 | 1 148 | 0 149 | 0 150 | 1 151 | 0 152 | 1 153 | 1 154 | 0 155 | 0 156 | 1 157 | 0 158 | 0 159 | 0 160 | 1 161 | 1 162 | 0 163 | 0 164 | 0 165 | 0 166 | 0 167 | 1 168 | 1 169 | 0 170 | 0 171 | 0 172 | 0 173 | 0 174 | 1 175 | 1 176 | 1 177 | 0 178 | 1 179 | 0 180 | 1 181 | 0 182 | 0 183 | 0 184 | 1 185 | 1 186 | 0 187 | 1 188 | 1 189 | 1 190 | 0 191 | 1 192 | 1 193 | 0 194 | 0 195 | 0 196 | 0 197 | 0 198 | 0 199 | 0 200 | 0 201 | 1 202 | 1 203 | 0 204 | 0 205 | 0 206 | 1 207 | 1 208 | 0 209 | 0 210 | 0 211 | 1 212 | 1 213 | 0 214 | 1 215 | 1 216 | 1 217 | 0 218 | 0 219 | 1 220 | 0 221 | 0 222 | 1 223 | 1 224 | 1 225 | 1 226 | 0 227 | 1 228 | 1 229 | 0 230 | 1 231 | 1 232 | 1 233 | 0 234 | 1 235 | 0 236 | 0 237 | 1 238 | 1 239 | 1 240 | 1 241 | 0 242 | 0 243 | 0 244 | 0 245 | 0 246 | 0 247 | 1 248 | 1 249 | 0 250 | 0 251 | 0 252 | 1 253 | 0 254 | 1 255 | 1 256 | 1 257 | 0 258 | 0 259 | 0 260 | 0 261 | 1 262 | 1 263 | 1 264 | 1 265 | 1 266 | 0 267 | 1 268 | 1 269 | 1 270 | 0 271 | 1 272 | 0 273 | 1 274 | 1 275 | 0 276 | 0 277 | 0 278 | 0 279 | 0 280 | 1 281 | 0 282 | 0 283 | 1 284 | 1 285 | 1 286 | 1 287 | 1 288 | 0 289 | 1 290 | 1 291 | 0 292 | 0 293 | 1 294 | 1 295 | 0 296 | 1 297 | 0 298 | 0 299 | 0 300 | 1 301 | 1 302 | 0 303 | 1 304 | 0 305 | 1 306 | 1 307 | 0 308 | 0 309 | 1 310 | 0 311 | 0 312 | 0 313 | 1 314 | 0 315 | 0 316 | 0 317 | 1 318 | 1 319 | 0 320 | 0 321 | 1 322 | 0 323 | 0 324 | 1 325 | 0 326 | 0 327 | 1 328 | 0 329 | 1 330 | 1 331 | 1 332 | 0 333 | 0 334 | 1 335 | 1 336 | 0 337 | 1 338 | 0 339 | 0 340 | 1 341 | 1 342 | 0 343 | 0 344 | 0 345 | 1 346 | 0 347 | 0 348 | 0 349 | 1 350 | 1 351 | 0 352 | 0 353 | 0 354 | 1 355 | 0 356 | 0 357 | 1 358 | 1 359 | 1 360 | 1 361 | 1 362 | 1 363 | 0 364 | 0 365 | 0 366 | 0 367 | 1 368 | 1 369 | 0 370 | 0 371 | 0 372 | 0 373 | 0 374 | 0 375 | 0 376 | 0 377 | 0 378 | 0 379 | 0 380 | 0 381 | 0 382 | 1 383 | 0 384 | 0 385 | 0 386 | 0 387 | 1 388 | 0 389 | 0 390 | 0 391 | 0 392 | 1 393 | 0 394 | 0 395 | 0 396 | 0 397 | 0 398 | 0 399 | 0 400 | 0 401 | 1 402 | 0 403 | 0 404 | 0 405 | 0 406 | 0 407 | 0 408 | 0 409 | 0 410 | 0 411 | 0 412 | 0 413 | 1 414 | 0 415 | 1 416 | 0 417 | 1 418 | 0 419 | 0 420 | 0 421 | 0 422 | 1 423 | 0 424 | 0 425 | 0 426 | 1 427 | 0 428 | 1 429 | 0 430 | 0 431 | 0 432 | 0 433 | 0 434 | 0 435 | 0 436 | 1 437 | 1 438 | 0 439 | 0 440 | 0 441 | 1 442 | 0 443 | 0 444 | 0 445 | 0 446 | 0 447 | 0 448 | 0 449 | 0 450 | 1 451 | 0 452 | 0 453 | 0 454 | 1 455 | 0 456 | 1 457 | 1 458 | 1 459 | 0 460 | 0 461 | 0 462 | 0 463 | 0 464 | 0 465 | 0 466 | 1 467 | 1 468 | 1 469 | 0 470 | 0 471 | 0 472 | 0 473 | 0 474 | 0 475 | 0 476 | 0 477 | 0 478 | 0 479 | 0 480 | 1 481 | 0 482 | 0 483 | 1 484 | 1 485 | 0 486 | 0 487 | 0 488 | 1 489 | 1 490 | 1 491 | 0 492 | 1 493 | 0 494 | 1 495 | 0 496 | 0 497 | 0 498 | 0 499 | 0 500 | 0 501 | 0 502 | 0 503 | 0 504 | 0 505 | 0 506 | 0 507 | 1 508 | 0 509 | 0 510 | 0 511 | 0 512 | 0 513 | 0 514 | 0 515 | 1 516 | 1 517 | 0 518 | 0 519 | 0 520 | 1 521 | 0 522 | 0 523 | 1 524 | 1 525 | 0 526 | 0 527 | 0 528 | 0 529 | 0 530 | 0 531 | 1 532 | 0 533 | 0 534 | 0 535 | 0 536 | 0 537 | 0 538 | 0 539 | 0 540 | 0 541 | 0 542 | 0 543 | 0 544 | 0 545 | 0 546 | 0 547 | 1 548 | 0 549 | 0 550 | 1 551 | 0 552 | 0 553 | 0 554 | 0 555 | 0 556 | 0 557 | 0 558 | 0 559 | 0 560 | 0 561 | 0 562 | 0 563 | 0 564 | 0 565 | 0 566 | 1 567 | 0 568 | 0 569 | 1 570 | 1 571 | 1 572 | 1 573 | 0 574 | 0 575 | 1 576 | 0 577 | 0 578 | 0 579 | 0 580 | 0 581 | 0 582 | 1 583 | 1 584 | 0 585 | 0 586 | 0 587 | 1 588 | 0 589 | 1 590 | 0 591 | 1 592 | 1 593 | 1 594 | 0 595 | 1 596 | 0 597 | 0 598 | 0 599 | 0 600 | 0 601 | 0 602 | 0 603 | 0 604 | 1 605 | 1 606 | 1 607 | 0 608 | 0 609 | 1 610 | 0 611 | 1 612 | 1 613 | 1 614 | 0 615 | 0 616 | 0 617 | 0 618 | 0 619 | 0 620 | 0 621 | 0 622 | 0 623 | 0 624 | 0 625 | 0 626 | 0 627 | 1 628 | 0 629 | 0 630 | 0 631 | 0 632 | 0 633 | 0 634 | 1 635 | 0 636 | 0 637 | 1 638 | 0 639 | 0 640 | 0 641 | 0 642 | 0 643 | 0 644 | 0 645 | 0 646 | 0 647 | 0 648 | 0 649 | 1 650 | 0 651 | 0 652 | 0 653 | 0 654 | 0 655 | 0 656 | 0 657 | 0 658 | 0 659 | 1 660 | 0 661 | 0 662 | 0 663 | 0 664 | 0 665 | 0 666 | 0 667 | 0 668 | 0 669 | 1 670 | 1 671 | 1 672 | 0 673 | 0 674 | 0 675 | 0 676 | 0 677 | 0 678 | 0 679 | 0 680 | 0 681 | 1 682 | 1 683 | 0 684 | 0 685 | 0 686 | 0 687 | 0 688 | 0 689 | 0 690 | 0 691 | 0 692 | 1 693 | 0 694 | 0 695 | 0 696 | 0 697 | 1 698 | 1 699 | 1 700 | -------------------------------------------------------------------------------- /Data Sets/label_cancer.csv: -------------------------------------------------------------------------------- 1 | 0 2 | 1 3 | 2 4 | 2 5 | 2 6 | 2 7 | 0 8 | 0 9 | 1 10 | 1 11 | 2 12 | 2 13 | 1 14 | 2 15 | 0 16 | 1 17 | 1 18 | 2 19 | 2 20 | 1 21 | 2 22 | 2 23 | 2 24 | 2 25 | 2 26 | 1 27 | 1 28 | 1 29 | 2 30 | 1 31 | 2 32 | 2 33 | 1 34 | 0 35 | 0 36 | 1 37 | 0 38 | 0 39 | 0 40 | 0 41 | 0 42 | 0 43 | 0 44 | 0 45 | 0 46 | 0 47 | 2 48 | 0 49 | 0 50 | 0 51 | 0 52 | 0 53 | 0 54 | 0 55 | 0 56 | 0 57 | 0 58 | 2 59 | 0 60 | 0 61 | 0 62 | 0 63 | 0 64 | 1 65 | 1 66 | 1 67 | 1 68 | 1 69 | 2 70 | 1 71 | 1 72 | 1 73 | 1 74 | 1 75 | 1 76 | 1 77 | 1 78 | 1 79 | 1 80 | 2 81 | 1 82 | 1 83 | 1 84 | 1 85 | 1 86 | 1 87 | 1 88 | 2 89 | 2 90 | 2 91 | 1 92 | 2 93 | 2 94 | 2 95 | 2 96 | 2 97 | 2 98 | 2 99 | 2 100 | 2 101 | 2 102 | 1 103 | 2 104 | 2 105 | 2 106 | 2 107 | 2 108 | 2 109 | 2 110 | 2 111 | 2 112 | 2 113 | 0 114 | 1 115 | 2 116 | 2 117 | 2 118 | 0 119 | 0 120 | 1 121 | 1 122 | 2 123 | 2 124 | 1 125 | 2 126 | 0 127 | 1 128 | 1 129 | 2 130 | 2 131 | 1 132 | 2 133 | 2 134 | 2 135 | 2 136 | 1 137 | 1 138 | 1 139 | 1 140 | 2 141 | 1 142 | 2 143 | 2 144 | 1 145 | 0 146 | 0 147 | 2 148 | 0 149 | 0 150 | 0 151 | 0 152 | 0 153 | 0 154 | 0 155 | 0 156 | 0 157 | 0 158 | 2 159 | 0 160 | 0 161 | 0 162 | 0 163 | 0 164 | 0 165 | 0 166 | 0 167 | 0 168 | 0 169 | 1 170 | 0 171 | 0 172 | 0 173 | 0 174 | 0 175 | 1 176 | 1 177 | 1 178 | 1 179 | 1 180 | 0 181 | 1 182 | 1 183 | 1 184 | 1 185 | 1 186 | 1 187 | 1 188 | 1 189 | 1 190 | 1 191 | 0 192 | 1 193 | 1 194 | 1 195 | 1 196 | 1 197 | 1 198 | 1 199 | 2 200 | 2 201 | 2 202 | 0 203 | 2 204 | 2 205 | 2 206 | 2 207 | 2 208 | 2 209 | 2 210 | 2 211 | 2 212 | 2 213 | 0 214 | 2 215 | 2 216 | 2 217 | 2 218 | 2 219 | 2 220 | 2 221 | 2 222 | 2 223 | 2 224 | 1 225 | 0 226 | 2 227 | 2 228 | 2 229 | 0 230 | 0 231 | 1 232 | 1 233 | 2 234 | 2 235 | 1 236 | 0 237 | 0 238 | 1 239 | 1 240 | 2 241 | 2 242 | 1 243 | 2 244 | 2 245 | 2 246 | 2 247 | 0 248 | 1 249 | 1 250 | 1 251 | 2 252 | 1 253 | 2 254 | 2 255 | 1 256 | 0 257 | 0 258 | 0 259 | 0 260 | 0 261 | 0 262 | 0 263 | 0 264 | 0 265 | 0 266 | 0 267 | 0 268 | 0 269 | 0 270 | 0 271 | 0 272 | 0 273 | 0 274 | 0 275 | 0 276 | 0 277 | 0 278 | 0 279 | 0 280 | 0 281 | 0 282 | 0 283 | 0 284 | 0 285 | 0 286 | 1 287 | 1 288 | 1 289 | 1 290 | 1 291 | 0 292 | 1 293 | 1 294 | 1 295 | 1 296 | 1 297 | 1 298 | 1 299 | 1 300 | 1 301 | 1 302 | 0 303 | 1 304 | 1 305 | 1 306 | 1 307 | 1 308 | 1 309 | 1 310 | 2 311 | 2 312 | 2 313 | 0 314 | 2 315 | 2 316 | 2 317 | 2 318 | 2 319 | 2 320 | 2 321 | 2 322 | 2 323 | 2 324 | 0 325 | 2 326 | 2 327 | 2 328 | 2 329 | 2 330 | 2 331 | 2 332 | 2 333 | 2 334 | 2 335 | 1 336 | 0 337 | 2 338 | 2 339 | 2 340 | 0 341 | 0 342 | 1 343 | 1 344 | 2 345 | 2 346 | 1 347 | 0 348 | 0 349 | 1 350 | 1 351 | 2 352 | 2 353 | 1 354 | 2 355 | 2 356 | 2 357 | 2 358 | 0 359 | 1 360 | 1 361 | 1 362 | 2 363 | 1 364 | 2 365 | 2 366 | 1 367 | 0 368 | 0 369 | 0 370 | 0 371 | 0 372 | 0 373 | 0 374 | 0 375 | 0 376 | 0 377 | 0 378 | 0 379 | 0 380 | 0 381 | 0 382 | 0 383 | 0 384 | 0 385 | 0 386 | 0 387 | 0 388 | 0 389 | 0 390 | 0 391 | 0 392 | 0 393 | 0 394 | 0 395 | 0 396 | 0 397 | 1 398 | 1 399 | 1 400 | 1 401 | 1 402 | 0 403 | 1 404 | 1 405 | 1 406 | 1 407 | 1 408 | 1 409 | 1 410 | 1 411 | 1 412 | 1 413 | 0 414 | 1 415 | 1 416 | 1 417 | 1 418 | 1 419 | 1 420 | 1 421 | 2 422 | 2 423 | 2 424 | 0 425 | 2 426 | 2 427 | 2 428 | 2 429 | 2 430 | 2 431 | 2 432 | 2 433 | 2 434 | 2 435 | 0 436 | 2 437 | 2 438 | 2 439 | 2 440 | 2 441 | 2 442 | 2 443 | 2 444 | 2 445 | 2 446 | 2 447 | 0 448 | 2 449 | 2 450 | 2 451 | 0 452 | 0 453 | 1 454 | 1 455 | 2 456 | 2 457 | 1 458 | 0 459 | 0 460 | 1 461 | 1 462 | 2 463 | 2 464 | 1 465 | 2 466 | 2 467 | 2 468 | 2 469 | 0 470 | 1 471 | 1 472 | 1 473 | 2 474 | 1 475 | 2 476 | 2 477 | 1 478 | 0 479 | 0 480 | 1 481 | 0 482 | 0 483 | 0 484 | 0 485 | 0 486 | 0 487 | 0 488 | 0 489 | 0 490 | 0 491 | 1 492 | 0 493 | 0 494 | 0 495 | 0 496 | 0 497 | 0 498 | 0 499 | 0 500 | 0 501 | 0 502 | 1 503 | 0 504 | 0 505 | 0 506 | 0 507 | 0 508 | 1 509 | 1 510 | 1 511 | 1 512 | 1 513 | 1 514 | 1 515 | 1 516 | 1 517 | 1 518 | 1 519 | 1 520 | 1 521 | 1 522 | 1 523 | 1 524 | 1 525 | 1 526 | 1 527 | 1 528 | 1 529 | 1 530 | 1 531 | 1 532 | 2 533 | 2 534 | 2 535 | 1 536 | 2 537 | 2 538 | 2 539 | 2 540 | 2 541 | 2 542 | 2 543 | 2 544 | 2 545 | 2 546 | 1 547 | 2 548 | 2 549 | 2 550 | 2 551 | 2 552 | 2 553 | 2 554 | 2 555 | 2 556 | 2 557 | 2 558 | 1 559 | 2 560 | 2 561 | 2 562 | 0 563 | 0 564 | 1 565 | 1 566 | 2 567 | 2 568 | 1 569 | 1 570 | 0 571 | 1 572 | 1 573 | 2 574 | 2 575 | 1 576 | 2 577 | 2 578 | 2 579 | 2 580 | 1 581 | 1 582 | 1 583 | 1 584 | 2 585 | 1 586 | 2 587 | 2 588 | 1 589 | 0 590 | 0 591 | 1 592 | 0 593 | 0 594 | 0 595 | 0 596 | 0 597 | 0 598 | 0 599 | 0 600 | 0 601 | 0 602 | 1 603 | 0 604 | 0 605 | 0 606 | 0 607 | 0 608 | 0 609 | 0 610 | 0 611 | 0 612 | 0 613 | 1 614 | 0 615 | 0 616 | 0 617 | 0 618 | 0 619 | 1 620 | 1 621 | 1 622 | 1 623 | 1 624 | 1 625 | 1 626 | 1 627 | 1 628 | 1 629 | 1 630 | 1 631 | 1 632 | 1 633 | 1 634 | 1 635 | 1 636 | 1 637 | 1 638 | 1 639 | 1 640 | 1 641 | 1 642 | 1 643 | 2 644 | 2 645 | 2 646 | 1 647 | 2 648 | 2 649 | 2 650 | 2 651 | 2 652 | 2 653 | 2 654 | 2 655 | 2 656 | 2 657 | 1 658 | 2 659 | 2 660 | 2 661 | 2 662 | 2 663 | 2 664 | 2 665 | 2 666 | 2 667 | 2 668 | 1 669 | 1 670 | 2 671 | 2 672 | 2 673 | 0 674 | 0 675 | 1 676 | 1 677 | 2 678 | 2 679 | 1 680 | 1 681 | 0 682 | 1 683 | 1 684 | 2 685 | 2 686 | 1 687 | 2 688 | 2 689 | 2 690 | 2 691 | 1 692 | 1 693 | 1 694 | 1 695 | 2 696 | 1 697 | 2 698 | 2 699 | 1 700 | 0 701 | 0 702 | 1 703 | 0 704 | 0 705 | 0 706 | 0 707 | 0 708 | 0 709 | 0 710 | 0 711 | 0 712 | 0 713 | 1 714 | 0 715 | 0 716 | 0 717 | 0 718 | 0 719 | 0 720 | 0 721 | 0 722 | 0 723 | 0 724 | 2 725 | 0 726 | 0 727 | 0 728 | 0 729 | 0 730 | 1 731 | 1 732 | 1 733 | 1 734 | 1 735 | 2 736 | 1 737 | 1 738 | 1 739 | 1 740 | 1 741 | 1 742 | 1 743 | 1 744 | 1 745 | 1 746 | 2 747 | 1 748 | 1 749 | 1 750 | 1 751 | 1 752 | 1 753 | 1 754 | 2 755 | 2 756 | 2 757 | 2 758 | 2 759 | 2 760 | 2 761 | 2 762 | 2 763 | 2 764 | 2 765 | 2 766 | 2 767 | 2 768 | 2 769 | 2 770 | 2 771 | 2 772 | 2 773 | 2 774 | 2 775 | 2 776 | 2 777 | 2 778 | 2 779 | 0 780 | 2 781 | 2 782 | 2 783 | 2 784 | 0 785 | 0 786 | 1 787 | 1 788 | 2 789 | 2 790 | 1 791 | 2 792 | 0 793 | 1 794 | 1 795 | 2 796 | 2 797 | 1 798 | 2 799 | 2 800 | 2 801 | 2 802 | 2 803 | 1 804 | 1 805 | 1 806 | 2 807 | 1 808 | 2 809 | 2 810 | 1 811 | 0 812 | 0 813 | 2 814 | 0 815 | 0 816 | 0 817 | 0 818 | 0 819 | 0 820 | 0 821 | 0 822 | 0 823 | 0 824 | 2 825 | 0 826 | 0 827 | 0 828 | 0 829 | 0 830 | 0 831 | 0 832 | 0 833 | 0 834 | 0 835 | 2 836 | 0 837 | 0 838 | 0 839 | 0 840 | 0 841 | 1 842 | 1 843 | 1 844 | 1 845 | 1 846 | 2 847 | 1 848 | 1 849 | 1 850 | 1 851 | 1 852 | 1 853 | 1 854 | 1 855 | 1 856 | 1 857 | 2 858 | 1 859 | 1 860 | 1 861 | 1 862 | 1 863 | 1 864 | 1 865 | 2 866 | 2 867 | 2 868 | 2 869 | 2 870 | 2 871 | 2 872 | 2 873 | 2 874 | 2 875 | 2 876 | 2 877 | 2 878 | 2 879 | 2 880 | 2 881 | 2 882 | 2 883 | 2 884 | 2 885 | 2 886 | 2 887 | 2 888 | 2 889 | 2 890 | 1 891 | 2 892 | 2 893 | 2 894 | 2 895 | 0 896 | 0 897 | 1 898 | 1 899 | 2 900 | 2 901 | 1 902 | 2 903 | 0 904 | 1 905 | 1 906 | 2 907 | 2 908 | 1 909 | 2 910 | 2 911 | 2 912 | 2 913 | 2 914 | 1 915 | 1 916 | 1 917 | 2 918 | 1 919 | 2 920 | 2 921 | 1 922 | 0 923 | 0 924 | 2 925 | 0 926 | 0 927 | 0 928 | 0 929 | 0 930 | 0 931 | 0 932 | 0 933 | 0 934 | 0 935 | 2 936 | 0 937 | 0 938 | 0 939 | 0 940 | 0 941 | 0 942 | 0 943 | 0 944 | 0 945 | 0 946 | 2 947 | 0 948 | 0 949 | 0 950 | 0 951 | 0 952 | 1 953 | 1 954 | 1 955 | 1 956 | 1 957 | 2 958 | 1 959 | 1 960 | 1 961 | 1 962 | 1 963 | 1 964 | 1 965 | 1 966 | 1 967 | 1 968 | 2 969 | 1 970 | 1 971 | 1 972 | 1 973 | 1 974 | 1 975 | 1 976 | 2 977 | 0 978 | 0 979 | 2 980 | 0 981 | 2 982 | 2 983 | 2 984 | 1 985 | 1 986 | 2 987 | 2 988 | 2 989 | 2 990 | 2 991 | 2 992 | 2 993 | 2 994 | 2 995 | 2 996 | 2 997 | 2 998 | 2 999 | 2 1000 | 2 1001 | -------------------------------------------------------------------------------- /Data Sets/label_cervical.csv: -------------------------------------------------------------------------------- 1 | 0 2 | 0 3 | 0 4 | 0 5 | 0 6 | 0 7 | 1 8 | 0 9 | 0 10 | 0 11 | 0 12 | 0 13 | 0 14 | 0 15 | 0 16 | 0 17 | 0 18 | 0 19 | 0 20 | 0 21 | 0 22 | 0 23 | 1 24 | 1 25 | 0 26 | 0 27 | 0 28 | 0 29 | 0 30 | 0 31 | 0 32 | 0 33 | 0 34 | 0 35 | 1 36 | 0 37 | 0 38 | 0 39 | 0 40 | 0 41 | 0 42 | 1 43 | 0 44 | 0 45 | 1 46 | 0 47 | 0 48 | 0 49 | 0 50 | 0 51 | 0 52 | 0 53 | 0 54 | 0 55 | 0 56 | 0 57 | 0 58 | 1 59 | 1 60 | 0 61 | 0 62 | 0 63 | 0 64 | 0 65 | 1 66 | 0 67 | 0 68 | 0 69 | 0 70 | 0 71 | 0 72 | 0 73 | 0 74 | 0 75 | 0 76 | 0 77 | 0 78 | 0 79 | 1 80 | 0 81 | 0 82 | 0 83 | 0 84 | 0 85 | 0 86 | 0 87 | 0 88 | 0 89 | 0 90 | 0 91 | 0 92 | 0 93 | 0 94 | 1 95 | 0 96 | 0 97 | 1 98 | 0 99 | 0 100 | 0 101 | 0 102 | 0 103 | 0 104 | 1 105 | 0 106 | 0 107 | 0 108 | 0 109 | 0 110 | 0 111 | 0 112 | 0 113 | 0 114 | 0 115 | 0 116 | 0 117 | 0 118 | 0 119 | 0 120 | 0 121 | 0 122 | 0 123 | 0 124 | 0 125 | 0 126 | 0 127 | 0 128 | 0 129 | 0 130 | 0 131 | 0 132 | 1 133 | 0 134 | 0 135 | 0 136 | 0 137 | 0 138 | 0 139 | 0 140 | 0 141 | 0 142 | 0 143 | 0 144 | 0 145 | 0 146 | 0 147 | 0 148 | 0 149 | 0 150 | 0 151 | 0 152 | 0 153 | 1 154 | 0 155 | 0 156 | 0 157 | 0 158 | 0 159 | 0 160 | 0 161 | 0 162 | 0 163 | 0 164 | 0 165 | 0 166 | 0 167 | 0 168 | 0 169 | 0 170 | 0 171 | 0 172 | 0 173 | 0 174 | 0 175 | 0 176 | 0 177 | 0 178 | 0 179 | 0 180 | 0 181 | 0 182 | 0 183 | 0 184 | 0 185 | 0 186 | 1 187 | 0 188 | 0 189 | 0 190 | 0 191 | 0 192 | 1 193 | 0 194 | 0 195 | 0 196 | 0 197 | 0 198 | 0 199 | 0 200 | 0 201 | 0 202 | 0 203 | 0 204 | 1 205 | 0 206 | 1 207 | 0 208 | 0 209 | 0 210 | 0 211 | 0 212 | 0 213 | 0 214 | 0 215 | 1 216 | 0 217 | 1 218 | 0 219 | 0 220 | 0 221 | 0 222 | 0 223 | 0 224 | 0 225 | 1 226 | 0 227 | 0 228 | 1 229 | 0 230 | 0 231 | 0 232 | 0 233 | 0 234 | 0 235 | 0 236 | 0 237 | 0 238 | 0 239 | 0 240 | 0 241 | 0 242 | 0 243 | 0 244 | 0 245 | 0 246 | 0 247 | 0 248 | 0 249 | 0 250 | 0 251 | 0 252 | 1 253 | 0 254 | 0 255 | 0 256 | 0 257 | 0 258 | 1 259 | 0 260 | 0 261 | 0 262 | 0 263 | 0 264 | 0 265 | 1 266 | 0 267 | 0 268 | 0 269 | 0 270 | 0 271 | 0 272 | 0 273 | 0 274 | 0 275 | 0 276 | 1 277 | 0 278 | 0 279 | 0 280 | 0 281 | 0 282 | 0 283 | 0 284 | 0 285 | 0 286 | 0 287 | 0 288 | 0 289 | 0 290 | 0 291 | 0 292 | 0 293 | 0 294 | 0 295 | 0 296 | 0 297 | 0 298 | 0 299 | 0 300 | 0 301 | 0 302 | 0 303 | 0 304 | 0 305 | 0 306 | 0 307 | 0 308 | 0 309 | 0 310 | 0 311 | 0 312 | 0 313 | 0 314 | 0 315 | 1 316 | 0 317 | 0 318 | 0 319 | 0 320 | 0 321 | 1 322 | 0 323 | 0 324 | 0 325 | 0 326 | 0 327 | 0 328 | 0 329 | 0 330 | 0 331 | 0 332 | 0 333 | 0 334 | 0 335 | 0 336 | 1 337 | 0 338 | 0 339 | 0 340 | 0 341 | 0 342 | 0 343 | 0 344 | 0 345 | 0 346 | 1 347 | 0 348 | 0 349 | 0 350 | 0 351 | 0 352 | 0 353 | 0 354 | 0 355 | 1 356 | 0 357 | 0 358 | 0 359 | 0 360 | 0 361 | 0 362 | 0 363 | 0 364 | 0 365 | 0 366 | 0 367 | 0 368 | 0 369 | 0 370 | 0 371 | 1 372 | 0 373 | 0 374 | 0 375 | 0 376 | 0 377 | 0 378 | 0 379 | 0 380 | 0 381 | 0 382 | 0 383 | 0 384 | 1 385 | 0 386 | 1 387 | 0 388 | 0 389 | 0 390 | 0 391 | 0 392 | 0 393 | 0 394 | 0 395 | 0 396 | 0 397 | 0 398 | 0 399 | 0 400 | 0 401 | 0 402 | 0 403 | 0 404 | 0 405 | 0 406 | 0 407 | 0 408 | 1 409 | 0 410 | 0 411 | 0 412 | 1 413 | 0 414 | 0 415 | 0 416 | 0 417 | 0 418 | 0 419 | 0 420 | 0 421 | 0 422 | 1 423 | 0 424 | 0 425 | 0 426 | 0 427 | 0 428 | 0 429 | 0 430 | 0 431 | 0 432 | 0 433 | 0 434 | 1 435 | 0 436 | 0 437 | 0 438 | 0 439 | 0 440 | 0 441 | 0 442 | 0 443 | 0 444 | 0 445 | 0 446 | 0 447 | 0 448 | 0 449 | 0 450 | 0 451 | 0 452 | 0 453 | 0 454 | 0 455 | 0 456 | 0 457 | 0 458 | 0 459 | 0 460 | 0 461 | 0 462 | 0 463 | 0 464 | 0 465 | 1 466 | 0 467 | 0 468 | 0 469 | 0 470 | 0 471 | 0 472 | 0 473 | 1 474 | 0 475 | 0 476 | 0 477 | 0 478 | 0 479 | 0 480 | 0 481 | 0 482 | 0 483 | 1 484 | 0 485 | 0 486 | 0 487 | 0 488 | 0 489 | 0 490 | 0 491 | 0 492 | 0 493 | 0 494 | 0 495 | 0 496 | 0 497 | 1 498 | 0 499 | 0 500 | 0 501 | 0 502 | 0 503 | 0 504 | 0 505 | 0 506 | 0 507 | 0 508 | 0 509 | 0 510 | 0 511 | 0 512 | 0 513 | 0 514 | 0 515 | 0 516 | 0 517 | 0 518 | 0 519 | 0 520 | 0 521 | 0 522 | 0 523 | 0 524 | 0 525 | 0 526 | 0 527 | 0 528 | 0 529 | 0 530 | 0 531 | 1 532 | 0 533 | 0 534 | 0 535 | 0 536 | 0 537 | 0 538 | 0 539 | 0 540 | 1 541 | 0 542 | 0 543 | 0 544 | 0 545 | 0 546 | 0 547 | 0 548 | 0 549 | 0 550 | 1 551 | 0 552 | 0 553 | 0 554 | 0 555 | 0 556 | 0 557 | 0 558 | 0 559 | 0 560 | 0 561 | 0 562 | 0 563 | 0 564 | 0 565 | 0 566 | 0 567 | 0 568 | 0 569 | 0 570 | 0 571 | 0 572 | 0 573 | 0 574 | 0 575 | 0 576 | 0 577 | 0 578 | 1 579 | 1 580 | 0 581 | 0 582 | 0 583 | 0 584 | 0 585 | 0 586 | 0 587 | 0 588 | 0 589 | 0 590 | 0 591 | 0 592 | 0 593 | 0 594 | 0 595 | 0 596 | 0 597 | 0 598 | 0 599 | 1 600 | 0 601 | 0 602 | 0 603 | 0 604 | 0 605 | 0 606 | 0 607 | 0 608 | 0 609 | 0 610 | 0 611 | 1 612 | 0 613 | 0 614 | 0 615 | 1 616 | 0 617 | 0 618 | 0 619 | 0 620 | 0 621 | 0 622 | 0 623 | 0 624 | 0 625 | 0 626 | 0 627 | 0 628 | 0 629 | 0 630 | 0 631 | 0 632 | 0 633 | 0 634 | 0 635 | 0 636 | 0 637 | 0 638 | 0 639 | 0 640 | 0 641 | 0 642 | 0 643 | 0 644 | 0 645 | 1 646 | 0 647 | 1 648 | 0 649 | 0 650 | 0 651 | 0 652 | 0 653 | 0 654 | 0 655 | 0 656 | 0 657 | 0 658 | 0 659 | 0 660 | 0 661 | 0 662 | 0 663 | 0 664 | 0 665 | 1 666 | 0 667 | 0 668 | 0 669 | 1 670 | 1 671 | 1 672 | 1 673 | 0 674 | 1 675 | 0 676 | 0 677 | 0 678 | 0 679 | 0 680 | 0 681 | 0 682 | 0 683 | 0 684 | 0 685 | 0 686 | 0 687 | 1 688 | 1 689 | 1 690 | 0 691 | 0 692 | 1 693 | 0 694 | 1 695 | 0 696 | 0 697 | 0 698 | 0 699 | 0 700 | 0 701 | 0 702 | 0 703 | 0 704 | 0 705 | 1 706 | 0 707 | 0 708 | 0 709 | 0 710 | 0 711 | 0 712 | 0 713 | 0 714 | 0 715 | 0 716 | 0 717 | 0 718 | 0 719 | 0 720 | 1 721 | 0 722 | 0 723 | 0 724 | 0 725 | 0 726 | 0 727 | 0 728 | 0 729 | 0 730 | 0 731 | 0 732 | 0 733 | 0 734 | 0 735 | 0 736 | 0 737 | 0 738 | 0 739 | 0 740 | 1 741 | 0 742 | 0 743 | 0 744 | 0 745 | 0 746 | 0 747 | 0 748 | 0 749 | 0 750 | 0 751 | 1 752 | 0 753 | 0 754 | 1 755 | 1 756 | 0 757 | 1 758 | 0 759 | 0 760 | 0 761 | 0 762 | 0 763 | 0 764 | 0 765 | 0 766 | 0 767 | 0 768 | 0 769 | 0 770 | 0 771 | 0 772 | 0 773 | 0 774 | 0 775 | 0 776 | 0 777 | 0 778 | 0 779 | 0 780 | 0 781 | 0 782 | 1 783 | 0 784 | 0 785 | 0 786 | 0 787 | 1 788 | 0 789 | 0 790 | 0 791 | 0 792 | 0 793 | 0 794 | 0 795 | 0 796 | 0 797 | 0 798 | 1 799 | 0 800 | 0 801 | 0 802 | 0 803 | 0 804 | 0 805 | 0 806 | 0 807 | 0 808 | 0 809 | 0 810 | 0 811 | 0 812 | 0 813 | 0 814 | 0 815 | 0 816 | 0 817 | 0 818 | 0 819 | 0 820 | 0 821 | 0 822 | 0 823 | 0 824 | 0 825 | 0 826 | 0 827 | 0 828 | 0 829 | 0 830 | 0 831 | 0 832 | 0 833 | 0 834 | 0 835 | 0 836 | 0 837 | 0 838 | 0 839 | 0 840 | 0 841 | 0 842 | 0 843 | 0 844 | 0 845 | 0 846 | 0 847 | 0 848 | 0 849 | 0 850 | 0 851 | 0 852 | 0 853 | 0 854 | 0 855 | 0 856 | 0 857 | 0 858 | 0 859 | -------------------------------------------------------------------------------- /Data Sets/label_coimbra.csv: -------------------------------------------------------------------------------- 1 | 1 2 | 1 3 | 1 4 | 1 5 | 1 6 | 1 7 | 1 8 | 1 9 | 1 10 | 1 11 | 1 12 | 1 13 | 1 14 | 1 15 | 1 16 | 1 17 | 1 18 | 1 19 | 1 20 | 1 21 | 1 22 | 1 23 | 1 24 | 1 25 | 1 26 | 1 27 | 1 28 | 1 29 | 1 30 | 1 31 | 1 32 | 1 33 | 1 34 | 1 35 | 1 36 | 1 37 | 1 38 | 1 39 | 1 40 | 1 41 | 1 42 | 1 43 | 1 44 | 1 45 | 1 46 | 1 47 | 1 48 | 1 49 | 1 50 | 1 51 | 1 52 | 1 53 | 0 54 | 0 55 | 0 56 | 0 57 | 0 58 | 0 59 | 0 60 | 0 61 | 0 62 | 0 63 | 0 64 | 0 65 | 0 66 | 0 67 | 0 68 | 0 69 | 0 70 | 0 71 | 0 72 | 0 73 | 0 74 | 0 75 | 0 76 | 0 77 | 0 78 | 0 79 | 0 80 | 0 81 | 0 82 | 0 83 | 0 84 | 0 85 | 0 86 | 0 87 | 0 88 | 0 89 | 0 90 | 0 91 | 0 92 | 0 93 | 0 94 | 0 95 | 0 96 | 0 97 | 0 98 | 0 99 | 0 100 | 0 101 | 0 102 | 0 103 | 0 104 | 0 105 | 0 106 | 0 107 | 0 108 | 0 109 | 0 110 | 0 111 | 0 112 | 0 113 | 0 114 | 0 115 | 0 116 | 0 117 | -------------------------------------------------------------------------------- /Data Sets/label_wisconsin.csv: -------------------------------------------------------------------------------- 1 | 0 2 | 0 3 | 0 4 | 0 5 | 0 6 | 1 7 | 0 8 | 0 9 | 0 10 | 0 11 | 0 12 | 0 13 | 1 14 | 0 15 | 1 16 | 1 17 | 0 18 | 0 19 | 1 20 | 0 21 | 1 22 | 1 23 | 0 24 | 1 25 | 0 26 | 1 27 | 0 28 | 0 29 | 0 30 | 0 31 | 0 32 | 0 33 | 1 34 | 0 35 | 0 36 | 0 37 | 1 38 | 0 39 | 1 40 | 1 41 | 0 42 | 1 43 | 1 44 | 1 45 | 1 46 | 0 47 | 1 48 | 0 49 | 0 50 | 1 51 | 1 52 | 1 53 | 1 54 | 1 55 | 1 56 | 1 57 | 1 58 | 1 59 | 1 60 | 1 61 | 1 62 | 0 63 | 1 64 | 1 65 | 0 66 | 1 67 | 0 68 | 1 69 | 1 70 | 0 71 | 0 72 | 1 73 | 0 74 | 1 75 | 1 76 | 0 77 | 0 78 | 0 79 | 0 80 | 0 81 | 0 82 | 0 83 | 0 84 | 0 85 | 1 86 | 1 87 | 1 88 | 1 89 | 0 90 | 0 91 | 0 92 | 0 93 | 0 94 | 0 95 | 0 96 | 0 97 | 0 98 | 0 99 | 1 100 | 1 101 | 1 102 | 1 103 | 0 104 | 1 105 | 1 106 | 1 107 | 1 108 | 1 109 | 0 110 | 1 111 | 0 112 | 1 113 | 1 114 | 1 115 | 0 116 | 0 117 | 0 118 | 1 119 | 0 120 | 0 121 | 0 122 | 0 123 | 1 124 | 1 125 | 1 126 | 0 127 | 1 128 | 0 129 | 1 130 | 0 131 | 0 132 | 0 133 | 1 134 | 0 135 | 0 136 | 0 137 | 0 138 | 0 139 | 0 140 | 0 141 | 0 142 | 0 143 | 1 144 | 0 145 | 0 146 | 0 147 | 1 148 | 0 149 | 0 150 | 1 151 | 0 152 | 1 153 | 1 154 | 0 155 | 0 156 | 1 157 | 0 158 | 0 159 | 0 160 | 1 161 | 1 162 | 0 163 | 0 164 | 0 165 | 0 166 | 0 167 | 1 168 | 1 169 | 0 170 | 0 171 | 0 172 | 0 173 | 0 174 | 1 175 | 1 176 | 1 177 | 0 178 | 1 179 | 0 180 | 1 181 | 0 182 | 0 183 | 0 184 | 1 185 | 1 186 | 0 187 | 1 188 | 1 189 | 1 190 | 0 191 | 1 192 | 1 193 | 0 194 | 0 195 | 0 196 | 0 197 | 0 198 | 0 199 | 0 200 | 0 201 | 1 202 | 1 203 | 0 204 | 0 205 | 0 206 | 1 207 | 1 208 | 0 209 | 0 210 | 0 211 | 1 212 | 1 213 | 0 214 | 1 215 | 1 216 | 1 217 | 0 218 | 0 219 | 1 220 | 0 221 | 0 222 | 1 223 | 1 224 | 1 225 | 1 226 | 0 227 | 1 228 | 1 229 | 0 230 | 1 231 | 1 232 | 1 233 | 0 234 | 1 235 | 0 236 | 0 237 | 1 238 | 1 239 | 1 240 | 1 241 | 0 242 | 0 243 | 0 244 | 0 245 | 0 246 | 0 247 | 1 248 | 1 249 | 0 250 | 0 251 | 0 252 | 1 253 | 0 254 | 1 255 | 1 256 | 1 257 | 0 258 | 0 259 | 0 260 | 0 261 | 1 262 | 1 263 | 1 264 | 1 265 | 1 266 | 0 267 | 1 268 | 1 269 | 1 270 | 0 271 | 1 272 | 0 273 | 1 274 | 1 275 | 0 276 | 0 277 | 0 278 | 0 279 | 0 280 | 1 281 | 0 282 | 0 283 | 1 284 | 1 285 | 1 286 | 1 287 | 1 288 | 0 289 | 1 290 | 1 291 | 0 292 | 0 293 | 1 294 | 1 295 | 0 296 | 1 297 | 0 298 | 0 299 | 0 300 | 1 301 | 1 302 | 0 303 | 1 304 | 0 305 | 1 306 | 1 307 | 0 308 | 0 309 | 1 310 | 0 311 | 0 312 | 0 313 | 1 314 | 0 315 | 0 316 | 0 317 | 1 318 | 1 319 | 0 320 | 0 321 | 1 322 | 0 323 | 0 324 | 1 325 | 0 326 | 0 327 | 1 328 | 0 329 | 1 330 | 1 331 | 1 332 | 0 333 | 0 334 | 1 335 | 1 336 | 0 337 | 1 338 | 0 339 | 0 340 | 1 341 | 1 342 | 0 343 | 0 344 | 0 345 | 1 346 | 0 347 | 0 348 | 0 349 | 1 350 | 1 351 | 0 352 | 0 353 | 0 354 | 1 355 | 0 356 | 0 357 | 1 358 | 1 359 | 1 360 | 1 361 | 1 362 | 1 363 | 0 364 | 0 365 | 0 366 | 0 367 | 1 368 | 1 369 | 0 370 | 0 371 | 0 372 | 0 373 | 0 374 | 0 375 | 0 376 | 0 377 | 0 378 | 0 379 | 0 380 | 0 381 | 0 382 | 1 383 | 0 384 | 0 385 | 0 386 | 0 387 | 1 388 | 0 389 | 0 390 | 0 391 | 0 392 | 1 393 | 0 394 | 0 395 | 0 396 | 0 397 | 0 398 | 0 399 | 0 400 | 0 401 | 1 402 | 0 403 | 0 404 | 0 405 | 0 406 | 0 407 | 0 408 | 0 409 | 0 410 | 0 411 | 0 412 | 0 413 | 1 414 | 0 415 | 1 416 | 0 417 | 1 418 | 0 419 | 0 420 | 0 421 | 0 422 | 1 423 | 0 424 | 0 425 | 0 426 | 1 427 | 0 428 | 1 429 | 0 430 | 0 431 | 0 432 | 0 433 | 0 434 | 0 435 | 0 436 | 1 437 | 1 438 | 0 439 | 0 440 | 0 441 | 1 442 | 0 443 | 0 444 | 0 445 | 0 446 | 0 447 | 0 448 | 0 449 | 0 450 | 1 451 | 0 452 | 0 453 | 0 454 | 1 455 | 0 456 | 1 457 | 1 458 | 1 459 | 0 460 | 0 461 | 0 462 | 0 463 | 0 464 | 0 465 | 0 466 | 1 467 | 1 468 | 1 469 | 0 470 | 0 471 | 0 472 | 0 473 | 0 474 | 0 475 | 0 476 | 0 477 | 0 478 | 0 479 | 0 480 | 1 481 | 0 482 | 0 483 | 1 484 | 1 485 | 0 486 | 0 487 | 0 488 | 1 489 | 1 490 | 1 491 | 0 492 | 1 493 | 0 494 | 1 495 | 0 496 | 0 497 | 0 498 | 0 499 | 0 500 | 0 501 | 0 502 | 0 503 | 0 504 | 0 505 | 0 506 | 0 507 | 1 508 | 0 509 | 0 510 | 0 511 | 0 512 | 0 513 | 0 514 | 0 515 | 1 516 | 1 517 | 0 518 | 0 519 | 0 520 | 1 521 | 0 522 | 0 523 | 1 524 | 1 525 | 0 526 | 0 527 | 0 528 | 0 529 | 0 530 | 0 531 | 1 532 | 0 533 | 0 534 | 0 535 | 0 536 | 0 537 | 0 538 | 0 539 | 0 540 | 0 541 | 0 542 | 0 543 | 0 544 | 0 545 | 0 546 | 0 547 | 1 548 | 0 549 | 0 550 | 1 551 | 0 552 | 0 553 | 0 554 | 0 555 | 0 556 | 0 557 | 0 558 | 0 559 | 0 560 | 0 561 | 0 562 | 0 563 | 0 564 | 0 565 | 0 566 | 1 567 | 0 568 | 0 569 | 1 570 | 1 571 | 1 572 | 1 573 | 0 574 | 0 575 | 1 576 | 0 577 | 0 578 | 0 579 | 0 580 | 0 581 | 0 582 | 1 583 | 1 584 | 0 585 | 0 586 | 0 587 | 1 588 | 0 589 | 1 590 | 0 591 | 1 592 | 1 593 | 1 594 | 0 595 | 1 596 | 0 597 | 0 598 | 0 599 | 0 600 | 0 601 | 0 602 | 0 603 | 0 604 | 1 605 | 1 606 | 1 607 | 0 608 | 0 609 | 1 610 | 0 611 | 1 612 | 1 613 | 1 614 | 0 615 | 0 616 | 0 617 | 0 618 | 0 619 | 0 620 | 0 621 | 0 622 | 0 623 | 0 624 | 0 625 | 0 626 | 0 627 | 1 628 | 0 629 | 0 630 | 0 631 | 0 632 | 0 633 | 0 634 | 1 635 | 0 636 | 0 637 | 1 638 | 0 639 | 0 640 | 0 641 | 0 642 | 0 643 | 0 644 | 0 645 | 0 646 | 0 647 | 0 648 | 0 649 | 1 650 | 0 651 | 0 652 | 0 653 | 0 654 | 0 655 | 0 656 | 0 657 | 0 658 | 0 659 | 1 660 | 0 661 | 0 662 | 0 663 | 0 664 | 0 665 | 0 666 | 0 667 | 0 668 | 0 669 | 1 670 | 1 671 | 1 672 | 0 673 | 0 674 | 0 675 | 0 676 | 0 677 | 0 678 | 0 679 | 0 680 | 0 681 | 1 682 | 1 683 | 0 684 | 0 685 | 0 686 | 0 687 | 0 688 | 0 689 | 0 690 | 0 691 | 0 692 | 1 693 | 0 694 | 0 695 | 0 696 | 0 697 | 1 698 | 1 699 | 1 700 | -------------------------------------------------------------------------------- /Data Sets/test: -------------------------------------------------------------------------------- 1 | This is test datatset 2 | -------------------------------------------------------------------------------- /Presentation and Graphs/Breast_cancer.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iinaimaf/Hybridized-Harris-hawk-whale-optimization-algorithm/48c178e7ccb9e506ce6a29256aadb4d72c7ab374/Presentation and Graphs/Breast_cancer.png -------------------------------------------------------------------------------- /Presentation and Graphs/E_value.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iinaimaf/Hybridized-Harris-hawk-whale-optimization-algorithm/48c178e7ccb9e506ce6a29256aadb4d72c7ab374/Presentation and Graphs/E_value.png -------------------------------------------------------------------------------- /Presentation and Graphs/HHA+WOA.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iinaimaf/Hybridized-Harris-hawk-whale-optimization-algorithm/48c178e7ccb9e506ce6a29256aadb4d72c7ab374/Presentation and Graphs/HHA+WOA.png -------------------------------------------------------------------------------- /Presentation and Graphs/Low_resources.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iinaimaf/Hybridized-Harris-hawk-whale-optimization-algorithm/48c178e7ccb9e506ce6a29256aadb4d72c7ab374/Presentation and Graphs/Low_resources.png -------------------------------------------------------------------------------- /Presentation and Graphs/Project presentation.pptx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iinaimaf/Hybridized-Harris-hawk-whale-optimization-algorithm/48c178e7ccb9e506ce6a29256aadb4d72c7ab374/Presentation and Graphs/Project presentation.pptx -------------------------------------------------------------------------------- /Presentation and Graphs/credit_card.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iinaimaf/Hybridized-Harris-hawk-whale-optimization-algorithm/48c178e7ccb9e506ce6a29256aadb4d72c7ab374/Presentation and Graphs/credit_card.png -------------------------------------------------------------------------------- /Presentation and Graphs/limited_resource.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iinaimaf/Hybridized-Harris-hawk-whale-optimization-algorithm/48c178e7ccb9e506ce6a29256aadb4d72c7ab374/Presentation and Graphs/limited_resource.png -------------------------------------------------------------------------------- /Presentation and Graphs/mean_vs_max1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iinaimaf/Hybridized-Harris-hawk-whale-optimization-algorithm/48c178e7ccb9e506ce6a29256aadb4d72c7ab374/Presentation and Graphs/mean_vs_max1.png -------------------------------------------------------------------------------- /Presentation and Graphs/mean_vs_max2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iinaimaf/Hybridized-Harris-hawk-whale-optimization-algorithm/48c178e7ccb9e506ce6a29256aadb4d72c7ab374/Presentation and Graphs/mean_vs_max2.png -------------------------------------------------------------------------------- /Presentation and Graphs/mean_vs_max3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iinaimaf/Hybridized-Harris-hawk-whale-optimization-algorithm/48c178e7ccb9e506ce6a29256aadb4d72c7ab374/Presentation and Graphs/mean_vs_max3.png -------------------------------------------------------------------------------- /Presentation and Graphs/mean_vs_max4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iinaimaf/Hybridized-Harris-hawk-whale-optimization-algorithm/48c178e7ccb9e506ce6a29256aadb4d72c7ab374/Presentation and Graphs/mean_vs_max4.png -------------------------------------------------------------------------------- /Presentation and Graphs/mean_vs_max5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iinaimaf/Hybridized-Harris-hawk-whale-optimization-algorithm/48c178e7ccb9e506ce6a29256aadb4d72c7ab374/Presentation and Graphs/mean_vs_max5.png -------------------------------------------------------------------------------- /Presentation and Graphs/testGraph: -------------------------------------------------------------------------------- 1 | Test graph 2 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Hybridized-Harris-hawk-whale-optimization-algorithm 2 | Uses Harris Hawk and Whale Nature Inspired Algorithm to Train the weights of Neural Network. An approach to adjust the parameters of NN connection weights using the hybrid of Harris Hawk Optimization and Whale Optimization algorithm was proposed. The results showed that the hybrid algorithm has been successfully applied to train neural networks. The results showed that there is no superiority of one algorithm over another, however, the results of the proposed algorithm are a competitive alternative to other P-Metaheuristic algorithms. Hybrid Harris Hawk with Whale optimization to train weights of the neural network was used to increase the efficiency of fraud detection and cancer datasets. Our method for anomaly detection is a supervised method based on classification. The performance of Harris Hawk with Whale is acceptable and has promising results that nominate it for other optimization applications such as scheduling. 3 | 4 | Problem Statement: The point of our undertaking is to improve the viability of the backpropagation Neural network algorithm. In spite of the fact that different arrangements have been proposed to optimize the weights of the neural networks, there is consistently space for enhancements. The old model where they enhanced the weights of the neural networks by backpropagation had different downsides, along these lines there was a requirement for procedures which can enhance the weights, the number of nodes in each hidden layer, the number of hidden layers, network of the model which straightforwardly influence the accuracy of our model. We tested different nature-inspired algorithms hybrid with neural networks. We studied algorithms like Particle swarm optimization, bat algorithm, fireflies algorithm, ant optimization algorithm, artificial bee colony algorithm, salp algorithm, whale algorithm, harris hawk algorithm, etc. We proposed two new algorithms 1) Hybrid of Harris hawk with the neural networks and 2) Hybrid of Harris hawk optimization and whale optimization to train the weights of neural networks. 5 | 6 | Overall Description of the Project 7 | The learning process of artificial neural-networks is considered as one of the most burdensome challenges to the researchers. The major dilemma of training the neural-networks is the nonlinear nature and unknown controlling parameters like weights and biases. The dominant demerits of the traditional training algorithms are the slow convergence speed and getting caught up in local optima. This report proposes a hybridization of the harris hawk optimization algorithm with neural networks. Harris hawk is a metaheuristic evolutionary algorithm. We use the Harris hawk algorithm to optimize the weights of neural networks. Later, we have also combined the exploring phase of harris hawk with the exploitation phase of the whale optimization algorithm. We also present a comparative study of different evolutionary algorithms hybridized with neural networks and compared it to our new proposed algorithm. The excellence of the proposed algorithm is certified by exercising it on many kinds of datasets of fraud and cancer and these statistical results are compared with the results of rival optimization algorithms. The procured results demonstrate that our proposed algorithm functions better than other evolutionary methods. 8 | Multilayer feedforward network owns many attributes and characteristics which suit best for nonlinear optimization. There are two major classifications of supervised training methods for multilayer perceptron neural networks: gradient-based and stochastic based. Through the technique of backpropagation (a standard example of gradient-based methods), it is possible to train weights of Neural-Networks and obtain better outcomes. But it has a few drawbacks. It can get stuck in local optima and hence giving an unreliable output. Also, backpropagation is very much dependent on the input data and it can be quite sensitive to noisy data. The convergence rate of backpropagation is also quite slow. Thus we use metaheuristic algorithms that try to preserve a balance between global and local search. They are more popular among other optimization techniques because they are easy to implement, exhibit higher efficiency to overcome local optima, and do not require gradient information. 9 | The point of our undertaking is to improve the viability of the backpropagation Neural network algorithm. In spite of the fact that different arrangements have been proposed to optimize the weights of the neural networks, there is consistently space for enhancements. The old model where they enhanced the weights of the neural networks by backpropagation had different downsides, along these lines there was a requirement for procedures which can enhance the weights, the number of nodes in each hidden layer, the number of hidden layers, network of the model which straightforwardly influence the accuracy of our model. We tested different nature-inspired algorithms hybrid with neural networks. We studied algorithms like Particle swarm optimization, bat algorithm, fireflies algorithm, ant optimization algorithm, artificial bee colony algorithm, salp algorithm, whale algorithm, harris hawk algorithm, etc. We proposed two new algorithms: 1) Hybrid of Harris hawk with the neural networks and 2) Hybrid of hawk optimization and whale optimization to train the weights of neural networks. 10 | 11 | -------------------------------------------------------------------------------- /User Manual/User Manual.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/iinaimaf/Hybridized-Harris-hawk-whale-optimization-algorithm/48c178e7ccb9e506ce6a29256aadb4d72c7ab374/User Manual/User Manual.pdf -------------------------------------------------------------------------------- /User Manual/test: -------------------------------------------------------------------------------- 1 | Test 2 | --------------------------------------------------------------------------------