├── ExampleOutput.PNG ├── NeuralNetwork.cpp ├── NeuralNetwork.h ├── README.md ├── main.cpp └── seeds_dataset.csv /ExampleOutput.PNG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Cr4ckC4t/neural-network-from-scratch/86e38de81140be7257d74dabb6d990438be0f973/ExampleOutput.PNG -------------------------------------------------------------------------------- /NeuralNetwork.cpp: -------------------------------------------------------------------------------- 1 | #include "NeuralNetwork.h" 2 | 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | 9 | /* NEURON */ 10 | 11 | /* 12 | * Neuron Constructor 13 | */ 14 | Neuron::Neuron(int n_weights) { 15 | this->initWeights(n_weights); 16 | m_nWeights = n_weights; 17 | m_activation = 0; 18 | m_output = 0; 19 | m_delta = 0; 20 | } 21 | 22 | /* 23 | * Neuron Destructor 24 | */ 25 | Neuron::~Neuron() { 26 | /*pass*/ 27 | } 28 | 29 | /* 30 | * Initialize weights 31 | */ 32 | void Neuron::initWeights(int n_weights) { 33 | // add random small weights (between 0 and 1) 34 | for (int w = 0; w < n_weights; w++) { 35 | m_weights.push_back(static_cast(std::rand()) / static_cast(RAND_MAX)); 36 | } 37 | } 38 | 39 | /* 40 | * Calculate the activation of a neuron for a given input 41 | */ 42 | void Neuron::activate(std::vector inputs) { 43 | // the last weight is assumed to be the bias 44 | m_activation = m_weights[m_nWeights-1]; 45 | 46 | // accumulate all weighted inputs 47 | for (size_t i = 0; i < m_nWeights-1; i++) 48 | { 49 | m_activation += m_weights[i] * inputs[i]; 50 | } 51 | } 52 | 53 | /* 54 | * Transfer the activation of the neuron to an actual output 55 | */ 56 | void Neuron::transfer() { 57 | m_output = 1.0f / (1.0f + std::exp(-m_activation)); 58 | } 59 | 60 | /* LAYER */ 61 | 62 | /* 63 | * Layer Constructor 64 | */ 65 | Layer::Layer(int n_neurons, int n_weights) { 66 | this->initNeurons(n_neurons, n_weights); 67 | } 68 | 69 | /* 70 | * Layer Destructor 71 | */ 72 | Layer::~Layer() { 73 | /*pass*/ 74 | } 75 | 76 | void Layer::initNeurons(int n_neurons, int n_weights) { 77 | for (int n = 0; n < n_neurons; n++) { 78 | m_neurons.push_back(Neuron(n_weights)); 79 | } 80 | } 81 | 82 | 83 | /* NETWORK */ 84 | 85 | /* 86 | * Network Constructor 87 | */ 88 | Network::Network() { 89 | // initialize prng 90 | std::srand(static_cast(std::time(nullptr))); 91 | 92 | m_nLayers = 0; 93 | } 94 | 95 | /* 96 | * Network Destructor 97 | */ 98 | Network::~Network() { 99 | /*pass*/ 100 | } 101 | 102 | /* 103 | * Initialize a network manually 104 | */ 105 | void Network::initialize_network(int n_inputs, int n_hidden, int n_outputs) { 106 | 107 | // add a hidden layer (n_hidden neurons are each connected to all inputs) 108 | this->add_layer(n_hidden, n_inputs+1); 109 | 110 | // add an output layer (one neuron for each output is connected to all neurons from the previous layer) 111 | this->add_layer(n_outputs, n_hidden+1); 112 | } 113 | 114 | /* 115 | * Add another layer to the network 116 | */ 117 | void Network::add_layer(int n_neurons, int n_weights) { 118 | m_layers.push_back(Layer(n_neurons, n_weights)); 119 | m_nLayers++; 120 | } 121 | 122 | /* 123 | * One forward propagation of an input 124 | */ 125 | std::vector Network::forward_propagate(std::vector inputs) { 126 | std::vector new_inputs; 127 | for (size_t i = 0; i < m_nLayers; i++) 128 | { 129 | new_inputs.clear(); 130 | 131 | // reference the layer neurons directly 132 | std::vector& layer_neurons = m_layers[i].get_neurons(); 133 | for (size_t n = 0; n < layer_neurons.size(); n++) 134 | { 135 | layer_neurons[n].activate(inputs); 136 | layer_neurons[n].transfer(); 137 | new_inputs.push_back(layer_neurons[n].get_output()); 138 | } 139 | inputs = new_inputs; 140 | } 141 | return inputs; 142 | } 143 | 144 | /* 145 | * Propagate the deviation from an expected output backwards through the network 146 | */ 147 | void Network::backward_propagate_error(std::vector expected) { 148 | // reverse traverse the layers 149 | for (size_t i = m_nLayers; i --> 0;) 150 | { 151 | // get a reference to the neurons of this layer 152 | std::vector& layer_neurons = m_layers[i].get_neurons(); 153 | 154 | // iterate over each neuron in this layer 155 | for (size_t n = 0; n < layer_neurons.size(); n++) 156 | { 157 | float error = 0.0; 158 | // feed the expected result to the output layer 159 | if (i == m_nLayers - 1) 160 | { 161 | error = expected[n] - layer_neurons[n].get_output(); 162 | } 163 | else { 164 | for (auto& neu : m_layers[i + 1].get_neurons()) { 165 | error += (neu.get_weights()[n] * neu.get_delta()); 166 | } 167 | } 168 | // update the delta value of the neuron 169 | layer_neurons[n].set_delta(error * layer_neurons[n].transfer_derivative()); 170 | } 171 | } 172 | } 173 | 174 | /* 175 | * Update weights of a network after an error back propagation 176 | */ 177 | void Network::update_weights(std::vector inputs, float l_rate) { 178 | // iterate over the layers 179 | for (size_t i = 0; i < m_nLayers; i++) 180 | { 181 | std::vector new_inputs = {}; 182 | if (i != 0) { 183 | // grab the outputs from the previous layer (except for the first layer) 184 | for (auto &neuron: m_layers[i-1].get_neurons()) 185 | { 186 | new_inputs.push_back(neuron.get_output()); 187 | } 188 | } 189 | else { 190 | // use the original input for the first layer (ignore the bias input / last element) 191 | new_inputs = std::vector(inputs.begin(), inputs.end() - 1); 192 | } 193 | 194 | // get a reference to the neurons of this layer 195 | std::vector& layer_neurons = m_layers[i].get_neurons(); 196 | 197 | for (size_t n = 0; n < layer_neurons.size(); n++) 198 | { 199 | // get a reference to the weights of the neuron 200 | std::vector& weights = layer_neurons[n].get_weights(); 201 | // update weights 202 | for (size_t j = 0; j < new_inputs.size(); j++) 203 | { 204 | weights[j] += l_rate * layer_neurons[n].get_delta() * new_inputs[j]; 205 | } 206 | // update bias 207 | weights.back() += l_rate * layer_neurons[n].get_delta(); 208 | } 209 | } 210 | } 211 | 212 | /* 213 | * Train the network with trainings data 214 | */ 215 | void Network::train(std::vector>trainings_data, float l_rate, size_t n_epoch, size_t n_outputs) { 216 | for (size_t e = 0; e < n_epoch; e++) 217 | { 218 | float sum_error = 0; 219 | 220 | for (const auto &row: trainings_data) 221 | { 222 | std::vector outputs = this->forward_propagate(row); 223 | std::vector expected(n_outputs, 0.0); 224 | expected[static_cast(row.back())] = 1.0; 225 | for (size_t x = 0; x < n_outputs; x++) 226 | { 227 | sum_error += static_cast(std::pow((expected[x] - outputs[x]), 2)); 228 | } 229 | this->backward_propagate_error(expected); 230 | this->update_weights(row, l_rate); 231 | } 232 | std::cout << "[>] epoch=" << e << ", l_rate=" << l_rate << ", error=" << sum_error << std::endl; 233 | } 234 | } 235 | 236 | /* 237 | * Make a prediction for an input (one forward propagation) 238 | */ 239 | int Network::predict(std::vector input) { 240 | std::vector outputs = this->forward_propagate(input); 241 | return std::max_element(outputs.begin(), outputs.end()) - outputs.begin(); 242 | } 243 | 244 | /* 245 | * Display the network in a human readable format 246 | */ 247 | void Network::display_human() { 248 | std::cout << "[Network] (Layers: " << m_nLayers << ")" << std::endl; 249 | 250 | std::cout << "{" << std::endl; 251 | for (size_t l = 0; l < m_layers.size(); l++) 252 | { 253 | Layer layer = m_layers[l]; 254 | std::cout << "\t (Layer " << l << "): {"; 255 | for (size_t i = 0; i < layer.get_neurons().size(); i++) 256 | { 257 | Neuron neuron = layer.get_neurons()[i]; 258 | std::cout << "<(Neuron " << i << "): [ weights={"; 259 | std::vector weights = neuron.get_weights(); 260 | for (size_t w = 0; w < weights.size(); ++w) 261 | { 262 | std::cout << weights[w]; 263 | if (w < weights.size() - 1) { 264 | std::cout << ", "; 265 | } 266 | } 267 | std::cout << "}, output=" << neuron.get_output() << ", activation=" << neuron.get_activation() << ", delta=" << neuron.get_delta(); 268 | std::cout << "]>"; 269 | if (i < layer.get_neurons().size() - 1) { 270 | std::cout << ", "; 271 | } 272 | } 273 | std::cout << "}"; 274 | if (l < m_layers.size() - 1) { 275 | std::cout << ", "; 276 | } 277 | std::cout << std::endl; 278 | } 279 | std::cout << "}" << std::endl; 280 | } 281 | -------------------------------------------------------------------------------- /NeuralNetwork.h: -------------------------------------------------------------------------------- 1 | #pragma once 2 | 3 | #include 4 | #include 5 | 6 | class Neuron { 7 | public: 8 | Neuron(int n_weights); 9 | ~Neuron(); 10 | 11 | void activate(std::vector inputs); 12 | void transfer(); 13 | float transfer_derivative() { return static_cast(m_output * (1.0 - m_output)); }; 14 | 15 | // return mutable reference to the neuron weights 16 | std::vector& get_weights(void) { return m_weights; }; 17 | 18 | float get_output(void) { return m_output; }; 19 | float get_activation(void) { return m_activation; }; 20 | float get_delta(void) { return m_delta; }; 21 | 22 | void set_delta(float delta) { m_delta = delta; }; 23 | 24 | private: 25 | size_t m_nWeights; 26 | std::vector m_weights; 27 | float m_activation; 28 | float m_output; 29 | float m_delta; 30 | 31 | private: 32 | void initWeights(int n_weights); 33 | }; 34 | 35 | class Layer { 36 | public: 37 | Layer(int n_neurons, int n_weights); 38 | ~Layer(); 39 | 40 | // return mutable reference to the neurons 41 | std::vector& get_neurons(void) { return m_neurons; }; 42 | 43 | private: 44 | void initNeurons(int n_neurons, int n_weights); 45 | 46 | std::vector m_neurons; 47 | }; 48 | 49 | class Network { 50 | public: 51 | Network(); 52 | ~Network(); 53 | 54 | void initialize_network(int n_inputs, int n_hidden, int n_outputs); 55 | 56 | void add_layer(int n_neurons, int n_weights); 57 | std::vector forward_propagate(std::vector inputs); 58 | void backward_propagate_error(std::vector expected); 59 | void update_weights(std::vector inputs, float l_rate); 60 | void train(std::vector>trainings_data, float l_rate, size_t n_epoch, size_t n_outputs); 61 | int predict(std::vector input); 62 | 63 | void display_human(); 64 | 65 | private: 66 | size_t m_nLayers; 67 | std::vector m_layers; 68 | 69 | }; 70 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Implementation of a Neural Network from Scratch in C++ 2 | 3 | > Only using the C++ STL (standard template library). 4 | 5 | ## Disclaimer 6 | 7 | This was not my idea. I merely followed up on [this great tutorial](https://machinelearningmastery.com/implement-backpropagation-algorithm-scratch-python/), written by Jason Brownlee, where he explains the steps of programming a neural network from scratch in **Python** without the use of any library. 8 | 9 | ## Details 10 | 11 | Porting the python code from Jason Brownlee to c++ is a great exercise to freshen up a bit on object oriented programming (using classes to represent neurons and layers) and vector handling. It also helps a great deal with understanding how neural networks work on the inside. However, there are many ways that a network like this can be build in c++ and I chose to dive into it without much planning. So the code might not be very beautiful, perfectly encapsulated or easy to comprehend (as a matter of fact, I know it isn't😔) but it was fun nevertheless. 12 | 13 | ## Compilation & Usage 14 | 15 | This code was built in VisualStudio (2019 v16.9.4) with `g++ 8.1.0` (using C++14). 16 | 17 | You can either import the code in an empty console project in VisualStudio or compile it directly on the command line with: 18 | 19 | ``` 20 | g++ -Wall -Wpedantic main.cpp NeuralNetwork.cpp 21 | ``` 22 | Simply running it will read the provided `csv` file from the same directory, train the network and attempt predictions afterwards. Based on the results of the predictions the accuracy is calculated and subsequently printed to `stdout`. 23 | 24 | ### Result 25 | 26 | Using this network on the given seed dataset (and cross-validation) we are able to achieve an approximate accuracy of more than 90%. 27 | 28 | The following result was achieved using a learning rate of 0.3 with 500 epochs and 5 neurons in the first hidden layer on a network with one hidden and one output layer. 29 | 30 | ![Screenshot of example output](ExampleOutput.PNG) 31 | -------------------------------------------------------------------------------- /main.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include 3 | #include 4 | #include 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | #include "NeuralNetwork.h" 12 | 13 | std::vector> load_csv_data(std::string filename); 14 | std::vector evaluate_network(std::vector> dataset, int n_folds, float l_rate, int n_epoch, int n_hidden); 15 | float accuracy_metric(std::vector expect, std::vector predict); 16 | 17 | 18 | /* 19 | * This main function will load a csv-dataset and normalize the data. Subsequently, a network 20 | * for this data will be initialized, trained and evaluated using cross-validation. 21 | * 22 | * Feel free to play around with the folds, learning rate, epochs and hidden neurons. 23 | * If you want to modify the network itself (activation function, additional layers, etc.) 24 | * you will want to look at NeuralNetwork.cpp. 25 | * 26 | * (See at the bottom for a second main function that's for displaying and testing a very small network.) 27 | */ 28 | int main(int argc, char* argv[]) { 29 | std::cout << "Neural Network with Backpropagation in C++ from scratch" << std::endl; 30 | 31 | std::vector> csv_data; 32 | csv_data = load_csv_data("seeds_dataset.csv"); 33 | 34 | /* 35 | * Normalize the last column (turning the outputs into values starting from 0 for the one-hot encoding in the end) 36 | */ 37 | std::map lookup = {}; 38 | int index = 0; 39 | for (auto& vec : csv_data) { 40 | std::pair::iterator, bool> ret; 41 | // insert unique values 42 | ret = lookup.insert(std::pair(static_cast(vec.back()),index)); 43 | // update the vector with the new index 44 | vec.back() = static_cast(ret.first->second); 45 | // if an actual new value was found, increase the index 46 | if (ret.second) { 47 | index++; 48 | } 49 | } 50 | 51 | int n_folds = 5; // how many folds you want to create from the given dataset 52 | float l_rate = 0.3f; // how much of an impact shall an error have on a weight 53 | int n_epoch = 500; // how many times should weights be updated 54 | int n_hidden = 5; // how many neurons you want in the first layer 55 | 56 | // test the implemented neural network 57 | std::vector scores = evaluate_network(csv_data, n_folds, l_rate, n_epoch, n_hidden); 58 | 59 | // calculate the mean average of the scores across each cross validation 60 | float mean = std::accumulate(scores.begin(), scores.end(), decltype(scores)::value_type(0)) / static_cast(scores.size()); 61 | 62 | std::cout << "Mean accuracy: " << mean << std::endl; 63 | 64 | return 0; 65 | } 66 | 67 | std::vector evaluate_network(std::vector> dataset, int n_folds, float l_rate, int n_epoch, int n_hidden) { 68 | 69 | /* Split dataset into k folds */ 70 | 71 | std::vector>> dataset_splits; 72 | // initialize prng 73 | std::srand(static_cast(std::time(nullptr))); 74 | 75 | std::vector scores; 76 | 77 | size_t fold_size = static_cast(dataset.size() / n_folds); 78 | for (int f = 0; f < n_folds; f++) 79 | { 80 | std::vector> fold; 81 | while (fold.size() < fold_size) { 82 | int n = rand() % dataset.size(); // get a random index 83 | 84 | // add the chosen element to the fold and remove it from the dataset 85 | std::swap(dataset[n], dataset.back()); 86 | fold.push_back(dataset.back()); 87 | dataset.pop_back(); 88 | } 89 | 90 | dataset_splits.push_back(fold); 91 | } 92 | 93 | /* Iterate over folds */ 94 | // choose one as test and the rest as training sets 95 | for (size_t i = 0; i < dataset_splits.size(); i++) 96 | { 97 | std::vector>> train_sets = dataset_splits; 98 | std::swap(train_sets[i], train_sets.back()); 99 | std::vector> test_set = train_sets.back(); 100 | train_sets.pop_back(); 101 | 102 | // merge the multiple train_sets into one train set 103 | std::vector> train_set; 104 | for (auto &s: train_sets) 105 | { 106 | for (auto& row : s) { 107 | train_set.push_back(row); 108 | } 109 | } 110 | 111 | // store the expected results 112 | std::vector expected; 113 | for (auto& row: test_set) 114 | { 115 | expected.push_back(static_cast(row.back())); 116 | // just ensure that the actual result is not saved in the test data 117 | row.back() = 42; 118 | } 119 | 120 | std::vector predicted; 121 | 122 | std::set results; 123 | for (const auto& r : train_set) { 124 | results.insert(r.back()); 125 | } 126 | int n_outputs = results.size(); 127 | int n_inputs = train_set[0].size() - 1; 128 | 129 | /* Backpropagation with stochastic gradient descent */ 130 | Network* network = new Network(); 131 | network->initialize_network(n_inputs, n_hidden, n_outputs); 132 | network->train(train_set, l_rate, n_epoch, n_outputs); 133 | 134 | for (const auto& row: test_set) 135 | { 136 | predicted.push_back(network->predict(row)); 137 | } 138 | 139 | scores.push_back(accuracy_metric(expected, predicted)); 140 | } 141 | 142 | return scores; 143 | } 144 | 145 | /* 146 | * 147 | */ 148 | float accuracy_metric(std::vector expect, std::vector predict) { 149 | int correct = 0; 150 | 151 | for (size_t i = 0; i < predict.size(); i++) 152 | { 153 | if (predict[i] == expect[i]) { 154 | correct++; 155 | } 156 | } 157 | return static_cast(correct * 100.0f / predict.size()); 158 | } 159 | 160 | /* 161 | * Load comma separated values from file and normalize the values 162 | */ 163 | std::vector> load_csv_data(std::string filename) { 164 | const std::regex comma(","); 165 | 166 | std::ifstream csv_file(filename); 167 | 168 | std::vector> data; 169 | 170 | std::string line; 171 | 172 | std::vector mins; 173 | std::vector maxs; 174 | bool first = true; 175 | 176 | while (csv_file && std::getline(csv_file, line)) { 177 | // split line by commas 178 | std::vector srow{ std::sregex_token_iterator(line.begin(), line.end(), comma, -1), std::sregex_token_iterator() }; 179 | // create float vector 180 | std::vector row(srow.size()); 181 | // transform the strings to floats 182 | std::transform(srow.begin(), srow.end(), row.begin(), [](std::string const& val) {return std::stof(val); }); 183 | 184 | // keep track of the min and max value for each column for subsequent normalization 185 | if (first) { 186 | mins = row; 187 | maxs = row; 188 | first = false; 189 | } 190 | else { 191 | for (size_t t=0; t < row.size(); t++) 192 | { 193 | if (row[t] > maxs[t]) { 194 | maxs[t] = row[t]; 195 | } 196 | else if (row[t] < mins[t]) { 197 | mins[t] = row[t]; 198 | } 199 | } 200 | } 201 | 202 | data.push_back(row); 203 | } 204 | 205 | // normalize values 206 | for (auto& vec : data) { 207 | // ignore the last column (the output) 208 | for (size_t i = 0; i < vec.size()-1; i++) 209 | { 210 | vec[i] = (vec[i] - mins[i]) / (maxs[i] - mins[i]); 211 | } 212 | } 213 | 214 | return data; 215 | } 216 | 217 | 218 | /* 219 | * // Comment out this main function to test the network on a very small dataset and visualize it 220 | * 221 | int main(int argc, char* argv[]) { 222 | std::cout << "Neural Network with Backpropagation in C++ from scratch (development-phase)" << std::endl; 223 | 224 | // define a set of trainings data 225 | // each row has two inputs and one result 226 | // the result is either one or zero (binary classfication) 227 | std::vector> traindata { 228 | {2.7810836, 2.550537003, 0}, 229 | {1.465489372, 2.362125076, 0}, 230 | {3.396561688, 4.400293529, 0}, 231 | {1.38807019, 1.850220317, 0}, 232 | {3.06407232, 3.005305973, 0}, 233 | {7.627531214, 2.759262235, 1}, 234 | {5.332441248, 2.088626775, 1}, 235 | {6.922596716, 1.77106367, 1}, 236 | {8.675418651, -0.242068655, 1}, 237 | {7.673756466, 3.508563011, 1} 238 | }; 239 | 240 | // get the amount of possible outputs (binary classification => 2 possible outputs in this case) 241 | std::set results; 242 | for (const auto& r : traindata) { 243 | results.insert(r[r.size() - 1]); 244 | } 245 | int n_outputs = results.size(); 246 | int n_inputs = traindata[0].size() - 1; 247 | 248 | // we can experiment with these values 249 | float learn_rate = 0.4; // the learn rate specifies how much the error will influence a weight 250 | int epochs = 50; // the epochs specify how often an error will be back propagated through the network 251 | 252 | // initialize a network with 2 neurons in the first hidden layer 253 | Network* network = new Network(); 254 | network->initialize_network(n_inputs, 2, n_outputs); 255 | 256 | // train the network (forward propagation, backward propagation and weight updating) 257 | network->train(traindata, learn_rate, epochs, n_outputs); 258 | 259 | // display the created network (in an understandable format) for visualization purposes 260 | network->display_human(); 261 | 262 | // make a prediction on the same data we trained with 263 | std::cout << "[Prediction]" << std::endl; 264 | 265 | for (const auto& data : traindata) { 266 | int prediction = network->predict(data); 267 | std::cout << "\t[>] Expected=" << data.back() << ", Got=" << prediction << std::endl; 268 | } 269 | 270 | return 0; 271 | }*/ 272 | -------------------------------------------------------------------------------- /seeds_dataset.csv: -------------------------------------------------------------------------------- 1 | 15.26,14.84,0.871,5.763,3.312,2.221,5.22,1 2 | 14.88,14.57,0.8811,5.554,3.333,1.018,4.956,1 3 | 14.29,14.09,0.905,5.291,3.337,2.699,4.825,1 4 | 13.84,13.94,0.8955,5.324,3.379,2.259,4.805,1 5 | 16.14,14.99,0.9034,5.658,3.562,1.355,5.175,1 6 | 14.38,14.21,0.8951,5.386,3.312,2.462,4.956,1 7 | 14.69,14.49,0.8799,5.563,3.259,3.586,5.219,1 8 | 14.11,14.1,0.8911,5.42,3.302,2.70,5.00,1 9 | 16.63,15.46,0.8747,6.053,3.465,2.04,5.877,1 10 | 16.44,15.25,0.888,5.884,3.505,1.969,5.533,1 11 | 15.26,14.85,0.8696,5.714,3.242,4.543,5.314,1 12 | 14.03,14.16,0.8796,5.438,3.201,1.717,5.001,1 13 | 13.89,14.02,0.888,5.439,3.199,3.986,4.738,1 14 | 13.78,14.06,0.8759,5.479,3.156,3.136,4.872,1 15 | 13.74,14.05,0.8744,5.482,3.114,2.932,4.825,1 16 | 14.59,14.28,0.8993,5.351,3.333,4.185,4.781,1 17 | 13.99,13.83,0.9183,5.119,3.383,5.234,4.781,1 18 | 15.69,14.75,0.9058,5.527,3.514,1.599,5.046,1 19 | 14.7,14.21,0.9153,5.205,3.466,1.767,4.649,1 20 | 12.72,13.57,0.8686,5.226,3.049,4.102,4.914,1 21 | 14.16,14.4,0.8584,5.658,3.129,3.072,5.176,1 22 | 14.11,14.26,0.8722,5.52,3.168,2.688,5.219,1 23 | 15.88,14.9,0.8988,5.618,3.507,0.7651,5.091,1 24 | 12.08,13.23,0.8664,5.099,2.936,1.415,4.961,1 25 | 15.01,14.76,0.8657,5.789,3.245,1.791,5.001,1 26 | 16.19,15.16,0.8849,5.833,3.421,0.903,5.307,1 27 | 13.02,13.76,0.8641,5.395,3.026,3.373,4.825,1 28 | 12.74,13.67,0.8564,5.395,2.956,2.504,4.869,1 29 | 14.11,14.18,0.882,5.541,3.221,2.754,5.038,1 30 | 13.45,14.02,0.8604,5.516,3.065,3.531,5.097,1 31 | 13.16,13.82,0.8662,5.454,2.975,0.8551,5.056,1 32 | 15.49,14.94,0.8724,5.757,3.371,3.412,5.228,1 33 | 14.09,14.41,0.8529,5.717,3.186,3.92,5.299,1 34 | 13.94,14.17,0.8728,5.585,3.15,2.124,5.012,1 35 | 15.05,14.68,0.8779,5.712,3.328,2.129,5.36,1 36 | 16.12,15.00,0.90,5.709,3.485,2.27,5.443,1 37 | 16.2,15.27,0.8734,5.826,3.464,2.823,5.527,1 38 | 17.08,15.38,0.9079,5.832,3.683,2.956,5.484,1 39 | 14.8,14.52,0.8823,5.656,3.288,3.112,5.309,1 40 | 14.28,14.17,0.8944,5.397,3.298,6.685,5.001,1 41 | 13.54,13.85,0.8871,5.348,3.156,2.587,5.178,1 42 | 13.5,13.85,0.8852,5.351,3.158,2.249,5.176,1 43 | 13.16,13.55,0.9009,5.138,3.201,2.461,4.783,1 44 | 15.5,14.86,0.882,5.877,3.396,4.711,5.528,1 45 | 15.11,14.54,0.8986,5.579,3.462,3.128,5.18,1 46 | 13.8,14.04,0.8794,5.376,3.155,1.56,4.961,1 47 | 15.36,14.76,0.8861,5.701,3.393,1.367,5.132,1 48 | 14.99,14.56,0.8883,5.57,3.377,2.958,5.175,1 49 | 14.79,14.52,0.8819,5.545,3.291,2.704,5.111,1 50 | 14.86,14.67,0.8676,5.678,3.258,2.129,5.351,1 51 | 14.43,14.4,0.8751,5.585,3.272,3.975,5.144,1 52 | 15.78,14.91,0.8923,5.674,3.434,5.593,5.136,1 53 | 14.49,14.61,0.8538,5.715,3.113,4.116,5.396,1 54 | 14.33,14.28,0.8831,5.504,3.199,3.328,5.224,1 55 | 14.52,14.6,0.8557,5.741,3.113,1.481,5.487,1 56 | 15.03,14.77,0.8658,5.702,3.212,1.933,5.439,1 57 | 14.46,14.35,0.8818,5.388,3.377,2.802,5.044,1 58 | 14.92,14.43,0.9006,5.384,3.412,1.142,5.088,1 59 | 15.38,14.77,0.8857,5.662,3.419,1.999,5.222,1 60 | 12.11,13.47,0.8392,5.159,3.032,1.502,4.519,1 61 | 11.42,12.86,0.8683,5.008,2.85,2.7,4.607,1 62 | 11.23,12.63,0.884,4.902,2.879,2.269,4.703,1 63 | 12.36,13.19,0.8923,5.076,3.042,3.22,4.605,1 64 | 13.22,13.84,0.868,5.395,3.07,4.157,5.088,1 65 | 12.78,13.57,0.8716,5.262,3.026,1.176,4.782,1 66 | 12.88,13.5,0.8879,5.139,3.119,2.352,4.607,1 67 | 14.34,14.37,0.8726,5.63,3.19,1.313,5.15,1 68 | 14.01,14.29,0.8625,5.609,3.158,2.217,5.132,1 69 | 14.37,14.39,0.8726,5.569,3.153,1.464,5.3,1 70 | 12.73,13.75,0.8458,5.412,2.882,3.533,5.067,1 71 | 17.63,15.98,0.8673,6.191,3.561,4.076,6.06,2 72 | 16.84,15.67,0.8623,5.998,3.484,4.675,5.877,2 73 | 17.26,15.73,0.8763,5.978,3.594,4.539,5.791,2 74 | 19.11,16.26,0.9081,6.154,3.93,2.936,6.079,2 75 | 16.82,15.51,0.8786,6.017,3.486,4.004,5.841,2 76 | 16.77,15.62,0.8638,5.927,3.438,4.92,5.795,2 77 | 17.32,15.91,0.8599,6.064,3.403,3.824,5.922,2 78 | 20.71,17.23,0.8763,6.579,3.814,4.451,6.451,2 79 | 18.94,16.49,0.875,6.445,3.639,5.064,6.362,2 80 | 17.12,15.55,0.8892,5.85,3.566,2.858,5.746,2 81 | 16.53,15.34,0.8823,5.875,3.467,5.532,5.88,2 82 | 18.72,16.19,0.8977,6.006,3.857,5.324,5.879,2 83 | 20.2,16.89,0.8894,6.285,3.864,5.173,6.187,2 84 | 19.57,16.74,0.8779,6.384,3.772,1.472,6.273,2 85 | 19.51,16.71,0.878,6.366,3.801,2.962,6.185,2 86 | 18.27,16.09,0.887,6.173,3.651,2.443,6.197,2 87 | 18.88,16.26,0.8969,6.084,3.764,1.649,6.109,2 88 | 18.98,16.66,0.859,6.549,3.67,3.691,6.498,2 89 | 21.18,17.21,0.8989,6.573,4.033,5.78,6.231,2 90 | 20.88,17.05,0.9031,6.45,4.032,5.016,6.321,2 91 | 20.1,16.99,0.8746,6.581,3.785,1.955,6.449,2 92 | 18.76,16.2,0.8984,6.172,3.796,3.12,6.053,2 93 | 18.81,16.29,0.8906,6.272,3.693,3.237,6.053,2 94 | 18.59,16.05,0.9066,6.037,3.86,6.001,5.877,2 95 | 18.36,16.52,0.8452,6.666,3.485,4.933,6.448,2 96 | 16.87,15.65,0.8648,6.139,3.463,3.696,5.967,2 97 | 19.31,16.59,0.8815,6.341,3.81,3.477,6.238,2 98 | 18.98,16.57,0.8687,6.449,3.552,2.144,6.453,2 99 | 18.17,16.26,0.8637,6.271,3.512,2.853,6.273,2 100 | 18.72,16.34,0.881,6.219,3.684,2.188,6.097,2 101 | 16.41,15.25,0.8866,5.718,3.525,4.217,5.618,2 102 | 17.99,15.86,0.8992,5.89,3.694,2.068,5.837,2 103 | 19.46,16.5,0.8985,6.113,3.892,4.308,6.009,2 104 | 19.18,16.63,0.8717,6.369,3.681,3.357,6.229,2 105 | 18.95,16.42,0.8829,6.248,3.755,3.368,6.148,2 106 | 18.83,16.29,0.8917,6.037,3.786,2.553,5.879,2 107 | 18.85,16.17,0.9056,6.152,3.806,2.843,6.2,2 108 | 17.63,15.86,0.88,6.033,3.573,3.747,5.929,2 109 | 19.94,16.92,0.8752,6.675,3.763,3.252,6.55,2 110 | 18.55,16.22,0.8865,6.153,3.674,1.738,5.894,2 111 | 18.45,16.12,0.8921,6.107,3.769,2.235,5.794,2 112 | 19.38,16.72,0.8716,6.303,3.791,3.678,5.965,2 113 | 19.13,16.31,0.9035,6.183,3.902,2.109,5.924,2 114 | 19.14,16.61,0.8722,6.259,3.737,6.682,6.053,2 115 | 20.97,17.25,0.8859,6.563,3.991,4.677,6.316,2 116 | 19.06,16.45,0.8854,6.416,3.719,2.248,6.163,2 117 | 18.96,16.2,0.9077,6.051,3.897,4.334,5.75,2 118 | 19.15,16.45,0.889,6.245,3.815,3.084,6.185,2 119 | 18.89,16.23,0.9008,6.227,3.769,3.639,5.966,2 120 | 20.03,16.9,0.8811,6.493,3.857,3.063,6.32,2 121 | 20.24,16.91,0.8897,6.315,3.962,5.901,6.188,2 122 | 18.14,16.12,0.8772,6.059,3.563,3.619,6.011,2 123 | 16.17,15.38,0.8588,5.762,3.387,4.286,5.703,2 124 | 18.43,15.97,0.9077,5.98,3.771,2.984,5.905,2 125 | 15.99,14.89,0.9064,5.363,3.582,3.336,5.144,2 126 | 18.75,16.18,0.8999,6.111,3.869,4.188,5.992,2 127 | 18.65,16.41,0.8698,6.285,3.594,4.391,6.102,2 128 | 17.98,15.85,0.8993,5.979,3.687,2.257,5.919,2 129 | 20.16,17.03,0.8735,6.513,3.773,1.91,6.185,2 130 | 17.55,15.66,0.8991,5.791,3.69,5.366,5.661,2 131 | 18.3,15.89,0.9108,5.979,3.755,2.837,5.962,2 132 | 18.94,16.32,0.8942,6.144,3.825,2.908,5.949,2 133 | 15.38,14.9,0.8706,5.884,3.268,4.462,5.795,2 134 | 16.16,15.33,0.8644,5.845,3.395,4.266,5.795,2 135 | 15.56,14.89,0.8823,5.776,3.408,4.972,5.847,2 136 | 15.38,14.66,0.899,5.477,3.465,3.6,5.439,2 137 | 17.36,15.76,0.8785,6.145,3.574,3.526,5.971,2 138 | 15.57,15.15,0.8527,5.92,3.231,2.64,5.879,2 139 | 15.6,15.11,0.858,5.832,3.286,2.725,5.752,2 140 | 16.23,15.18,0.885,5.872,3.472,3.769,5.922,2 141 | 13.07,13.92,0.848,5.472,2.994,5.304,5.395,3 142 | 13.32,13.94,0.8613,5.541,3.073,7.035,5.44,3 143 | 13.34,13.95,0.862,5.389,3.074,5.995,5.307,3 144 | 12.22,13.32,0.8652,5.224,2.967,5.469,5.221,3 145 | 11.82,13.4,0.8274,5.314,2.777,4.471,5.178,3 146 | 11.21,13.13,0.8167,5.279,2.687,6.169,5.275,3 147 | 11.43,13.13,0.8335,5.176,2.719,2.221,5.132,3 148 | 12.49,13.46,0.8658,5.267,2.967,4.421,5.002,3 149 | 12.7,13.71,0.8491,5.386,2.911,3.26,5.316,3 150 | 10.79,12.93,0.8107,5.317,2.648,5.462,5.194,3 151 | 11.83,13.23,0.8496,5.263,2.84,5.195,5.307,3 152 | 12.01,13.52,0.8249,5.405,2.776,6.992,5.27,3 153 | 12.26,13.6,0.8333,5.408,2.833,4.756,5.36,3 154 | 11.18,13.04,0.8266,5.22,2.693,3.332,5.001,3 155 | 11.36,13.05,0.8382,5.175,2.755,4.048,5.263,3 156 | 11.19,13.05,0.8253,5.25,2.675,5.813,5.219,3 157 | 11.34,12.87,0.8596,5.053,2.849,3.347,5.003,3 158 | 12.13,13.73,0.8081,5.394,2.745,4.825,5.22,3 159 | 11.75,13.52,0.8082,5.444,2.678,4.378,5.31,3 160 | 11.49,13.22,0.8263,5.304,2.695,5.388,5.31,3 161 | 12.54,13.67,0.8425,5.451,2.879,3.082,5.491,3 162 | 12.02,13.33,0.8503,5.35,2.81,4.271,5.308,3 163 | 12.05,13.41,0.8416,5.267,2.847,4.988,5.046,3 164 | 12.55,13.57,0.8558,5.333,2.968,4.419,5.176,3 165 | 11.14,12.79,0.8558,5.011,2.794,6.388,5.049,3 166 | 12.1,13.15,0.8793,5.105,2.941,2.201,5.056,3 167 | 12.44,13.59,0.8462,5.319,2.897,4.924,5.27,3 168 | 12.15,13.45,0.8443,5.417,2.837,3.638,5.338,3 169 | 11.35,13.12,0.8291,5.176,2.668,4.337,5.132,3 170 | 11.24,13,0.8359,5.09,2.715,3.521,5.088,3 171 | 11.02,13,0.8189,5.325,2.701,6.735,5.163,3 172 | 11.55,13.1,0.8455,5.167,2.845,6.715,4.956,3 173 | 11.27,12.97,0.8419,5.088,2.763,4.309,5.00,3 174 | 11.4,13.08,0.8375,5.136,2.763,5.588,5.089,3 175 | 10.83,12.96,0.8099,5.278,2.641,5.182,5.185,3 176 | 10.8,12.57,0.859,4.981,2.821,4.773,5.063,3 177 | 11.26,13.01,0.8355,5.186,2.71,5.335,5.092,3 178 | 10.74,12.73,0.8329,5.145,2.642,4.702,4.963,3 179 | 11.48,13.05,0.8473,5.18,2.758,5.876,5.002,3 180 | 12.21,13.47,0.8453,5.357,2.893,1.661,5.178,3 181 | 11.41,12.95,0.856,5.09,2.775,4.957,4.825,3 182 | 12.46,13.41,0.8706,5.236,3.017,4.987,5.147,3 183 | 12.19,13.36,0.8579,5.24,2.909,4.857,5.158,3 184 | 11.65,13.07,0.8575,5.108,2.85,5.209,5.135,3 185 | 12.89,13.77,0.8541,5.495,3.026,6.185,5.316,3 186 | 11.56,13.31,0.8198,5.363,2.683,4.062,5.182,3 187 | 11.81,13.45,0.8198,5.413,2.716,4.898,5.352,3 188 | 10.91,12.8,0.8372,5.088,2.675,4.179,4.956,3 189 | 11.23,12.82,0.8594,5.089,2.821,7.524,4.957,3 190 | 10.59,12.41,0.8648,4.899,2.787,4.975,4.794,3 191 | 10.93,12.8,0.839,5.046,2.717,5.398,5.045,3 192 | 11.27,12.86,0.8563,5.091,2.804,3.985,5.001,3 193 | 11.87,13.02,0.8795,5.132,2.953,3.597,5.132,3 194 | 10.82,12.83,0.8256,5.18,2.63,4.853,5.089,3 195 | 12.11,13.27,0.8639,5.236,2.975,4.132,5.012,3 196 | 12.8,13.47,0.886,5.16,3.126,4.873,4.914,3 197 | 12.79,13.53,0.8786,5.224,3.054,5.483,4.958,3 198 | 13.37,13.78,0.8849,5.32,3.128,4.67,5.091,3 199 | 12.62,13.67,0.8481,5.41,2.911,3.306,5.231,3 200 | 12.76,13.38,0.8964,5.073,3.155,2.828,4.83,3 201 | 12.38,13.44,0.8609,5.219,2.989,5.472,5.045,3 202 | 12.67,13.32,0.8977,4.984,3.135,2.3,4.745,3 203 | 11.18,12.72,0.868,5.009,2.81,4.051,4.828,3 204 | 12.7,13.41,0.8874,5.183,3.091,8.456,5.00,3 205 | 12.37,13.47,0.8567,5.204,2.96,3.919,5.001,3 206 | 12.19,13.2,0.8783,5.137,2.981,3.631,4.87,3 207 | 11.23,12.88,0.8511,5.14,2.795,4.325,5.003,3 208 | 13.2,13.66,0.8883,5.236,3.232,8.315,5.056,3 209 | 11.84,13.21,0.8521,5.175,2.836,3.598,5.044,3 210 | 12.3,13.34,0.8684,5.243,2.974,5.637,5.063,3 --------------------------------------------------------------------------------