├── images
└── NN.png
├── .gitignore
├── LICENSE
├── README.md
├── src
├── utils.py
└── NN.py
├── NN_scratch.py
└── data
└── seeds_dataset.csv
/images/NN.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ankonzoid/NN-scratch/HEAD/images/NN.png
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | /target/
2 | .settings/org.scala*
3 | *.log
4 | .cache*
5 | .idea
6 | *.iml
7 | log.txt
8 | .DS_Store
9 | *.pyc
10 | __pycache__/
11 | *.swp
12 | *.swo
13 | .ipynb_checkpoints/
14 | *.pyc
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2017 Anson Wong
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Coding up a Neural Network classifier from scratch
2 |
3 |
4 |
5 |
6 |
7 | We train a multi-layer fully-connected neural network from scratch to classify the seeds dataset (https://archive.ics.uci.edu/ml/datasets/seeds).
8 | An L2 loss function, sigmoid activation, and no bias terms are assumed.
9 | The weight optimization is gradient descent via the delta rule.
10 |
11 | ### Usage
12 |
13 | Run:
14 | ```
15 | python3 NN_scratch.py
16 | ```
17 |
18 | The output should look like:
19 |
20 | ```
21 | Reading 'data/seeds_dataset.csv'...
22 | -> X.shape = (210, 7), y.shape = (210,), n_classes = 3
23 |
24 | Neural network model:
25 | input_dim = 7
26 | hidden_layers = [5]
27 | output_dim = 3
28 | eta = 0.1
29 | n_epochs = 400
30 | n_folds = 4
31 | seed_crossval = 1
32 | seed_weights = 1
33 |
34 | Cross-validating with 4 folds...
35 | Fold 1/4: acc_train = 98.10%, acc_valid = 94.23% (n_train = 158, n_valid = 52)
36 | Fold 2/4: acc_train = 98.10%, acc_valid = 98.08% (n_train = 158, n_valid = 52)
37 | Fold 3/4: acc_train = 98.73%, acc_valid = 96.15% (n_train = 158, n_valid = 52)
38 | Fold 4/4: acc_train = 98.73%, acc_valid = 94.23% (n_train = 158, n_valid = 52)
39 | -> acc_train_avg = 98.42%, acc_valid_avg = 95.67%
40 | ```
41 |
42 | ### Libraries required:
43 |
44 | * numpy, pandas
--------------------------------------------------------------------------------
/src/utils.py:
--------------------------------------------------------------------------------
1 | """
2 |
3 | utils.py (author: Anson Wong / git: ankonzoid)
4 |
5 | """
6 | import numpy as np
7 | import pandas as pd
8 |
9 | # Read csv file for (X, y, n_classes) data
10 | def read_csv(csv_filename, target_name="y", normalize=False):
11 | df = pd.read_csv(csv_filename, delimiter=",", dtype={target_name: str})
12 | if list(df.columns.values).count(target_name) != 1: # check target
13 | raise Exception("Need exactly 1 count of '{}' in {}".format(target_name, csv_filename))
14 | target2idx = {target: idx for idx, target in enumerate(sorted(list(set(df[target_name].values))))}
15 | X = df.drop([target_name], axis=1).values
16 | y = np.vectorize(lambda x: target2idx[x])(df[target_name].values)
17 | n_classes = len(target2idx.keys())
18 | if X.shape[0] != y.shape[0]:
19 | raise Exception("X.shape = {} and y.shape = {} are inconsistent!".format(X.shape, y.shape))
20 | if normalize:
21 | X = (X - X.mean(axis=0)) / X.std(axis=0)
22 | return X, y, n_classes
23 |
24 | # Randomly permute [0,N] and extract indices for each fold
25 | def crossval_folds(N, n_folds, seed=1):
26 | np.random.seed(seed)
27 | idx_all_permute = np.random.permutation(N)
28 | N_fold = int(N/n_folds)
29 | idx_folds = []
30 | for i in range(n_folds):
31 | start = i*N_fold
32 | end = min([(i+1)*N_fold, N])
33 | idx_folds.append(idx_all_permute[start:end])
34 | return idx_folds
--------------------------------------------------------------------------------
/NN_scratch.py:
--------------------------------------------------------------------------------
1 | """
2 |
3 | NN_scratch.py (author: Anson Wong / git: ankonzoid)
4 |
5 | We train a multi-layer fully-connected neural network from scratch to classify
6 | the seeds dataset (https://archive.ics.uci.edu/ml/datasets/seeds). An L2 loss
7 | function, sigmoid activation, and no bias terms are assumed. The weight
8 | optimization is gradient descent via the delta rule.
9 |
10 | """
11 | import numpy as np
12 | from src.NN import NN
13 | import src.utils as utils
14 |
15 | # Settings
16 | csv_filename = "data/seeds_dataset.csv"
17 | hidden_layers = [5] # number of nodes in hidden layers i.e. [layer1, layer2, ...]
18 | eta = 0.1 # learning rate
19 | n_epochs = 400 # number of training epochs
20 | n_folds = 4 # number of folds for cross-validation
21 | seed_crossval = 1 # seed for cross-validation
22 | seed_weights = 1 # seed for NN weight initialization
23 |
24 | # Read csv data + normalize features
25 | print("Reading '{}'...".format(csv_filename))
26 | X, y, n_classes = utils.read_csv(csv_filename, target_name="y", normalize=True)
27 | print(" -> X.shape = {}, y.shape = {}, n_classes = {}\n".format(X.shape, y.shape, n_classes))
28 | N, d = X.shape
29 |
30 | print("Neural network model:")
31 | print(" input_dim = {}".format(d))
32 | print(" hidden_layers = {}".format(hidden_layers))
33 | print(" output_dim = {}".format(n_classes))
34 | print(" eta = {}".format(eta))
35 | print(" n_epochs = {}".format(n_epochs))
36 | print(" n_folds = {}".format(n_folds))
37 | print(" seed_crossval = {}".format(seed_crossval))
38 | print(" seed_weights = {}\n".format(seed_weights))
39 |
40 | # Create cross-validation folds
41 | idx_all = np.arange(0, N)
42 | idx_folds = utils.crossval_folds(N, n_folds, seed=seed_crossval) # list of list of fold indices
43 |
44 | # Train/evaluate the model on each fold
45 | acc_train, acc_valid = list(), list()
46 | print("Cross-validating with {} folds...".format(len(idx_folds)))
47 | for i, idx_valid in enumerate(idx_folds):
48 |
49 | # Collect training and test data from folds
50 | idx_train = np.delete(idx_all, idx_valid)
51 | X_train, y_train = X[idx_train], y[idx_train]
52 | X_valid, y_valid = X[idx_valid], y[idx_valid]
53 |
54 | # Build neural network classifier model and train
55 | model = NN(input_dim=d, output_dim=n_classes,
56 | hidden_layers=hidden_layers, seed=seed_weights)
57 | model.train(X_train, y_train, eta=eta, n_epochs=n_epochs)
58 |
59 | # Make predictions for training and test data
60 | ypred_train = model.predict(X_train)
61 | ypred_valid = model.predict(X_valid)
62 |
63 | # Compute training/test accuracy score from predicted values
64 | acc_train.append(100*np.sum(y_train==ypred_train)/len(y_train))
65 | acc_valid.append(100*np.sum(y_valid==ypred_valid)/len(y_valid))
66 |
67 | # Print cross-validation result
68 | print(" Fold {}/{}: acc_train = {:.2f}%, acc_valid = {:.2f}% (n_train = {}, n_valid = {})".format(
69 | i+1, n_folds, acc_train[-1], acc_valid[-1], len(X_train), len(X_valid)))
70 |
71 | # Print results
72 | print(" -> acc_train_avg = {:.2f}%, acc_valid_avg = {:.2f}%".format(
73 | sum(acc_train)/float(len(acc_train)), sum(acc_valid)/float(len(acc_valid))))
--------------------------------------------------------------------------------
/src/NN.py:
--------------------------------------------------------------------------------
1 | """
2 |
3 | NeuralNetwork.py (author: Anson Wong / git: ankonzoid)
4 |
5 | """
6 | import math, random
7 | import numpy as np
8 |
9 | class NN:
10 |
11 | def __init__(self, input_dim=None, output_dim=None, hidden_layers=None, seed=1):
12 | if (input_dim is None) or (output_dim is None) or (hidden_layers is None):
13 | raise Exception("Invalid arguments given!")
14 | self.input_dim = input_dim # number of input nodes
15 | self.output_dim = output_dim # number of output nodes
16 | self.hidden_layers = hidden_layers # number of hidden nodes @ each layer
17 | self.network = self._build_network(seed=seed)
18 |
19 | # Train network
20 | def train(self, X, y, eta=0.5, n_epochs=200):
21 | for epoch in range(n_epochs):
22 | for (x_, y_) in zip(X, y):
23 | self._forward_pass(x_) # forward pass (update node["output"])
24 | yhot_ = self._one_hot_encoding(y_, self.output_dim) # one-hot target
25 | self._backward_pass(yhot_) # backward pass error (update node["delta"])
26 | self._update_weights(x_, eta) # update weights (update node["weight"])
27 |
28 | # Predict using argmax of logits
29 | def predict(self, X):
30 | ypred = np.array([np.argmax(self._forward_pass(x_)) for x_ in X], dtype=np.int)
31 | return ypred
32 |
33 | # ==============================
34 | #
35 | # Internal functions
36 | #
37 | # ==============================
38 |
39 | # Build fully-connected neural network (no bias terms)
40 | def _build_network(self, seed=1):
41 | random.seed(seed)
42 |
43 | # Create a single fully-connected layer
44 | def _layer(input_dim, output_dim):
45 | layer = []
46 | for i in range(output_dim):
47 | weights = [random.random() for _ in range(input_dim)] # sample N(0,1)
48 | node = {"weights": weights, # list of weights
49 | "output": None, # scalar
50 | "delta": None} # scalar
51 | layer.append(node)
52 | return layer
53 |
54 | # Stack layers (input -> hidden -> output)
55 | network = []
56 | if len(self.hidden_layers) == 0:
57 | network.append(_layer(self.input_dim, self.output_dim))
58 | else:
59 | network.append(_layer(self.input_dim, self.hidden_layers[0]))
60 | for i in range(1, len(self.hidden_layers)):
61 | network.append(_layer(self.hidden_layers[i-1], self.hidden_layers[i]))
62 | network.append(_layer(self.hidden_layers[-1], self.output_dim))
63 |
64 | return network
65 |
66 | # Forward-pass (updates node['output'])
67 | def _forward_pass(self, x):
68 | transfer = self._sigmoid
69 | x_in = x
70 | for layer in self.network:
71 | x_out = []
72 | for node in layer:
73 | node['output'] = transfer(self._dotprod(node['weights'], x_in))
74 | x_out.append(node['output'])
75 | x_in = x_out # set output as next input
76 | return x_in
77 |
78 | # Backward-pass (updates node['delta'], L2 loss is assumed)
79 | def _backward_pass(self, yhot):
80 | transfer_derivative = self._sigmoid_derivative # sig' = f(sig)
81 | n_layers = len(self.network)
82 | for i in reversed(range(n_layers)): # traverse backwards
83 | if i == n_layers - 1:
84 | # Difference between logits and one-hot target
85 | for j, node in enumerate(self.network[i]):
86 | err = node['output'] - yhot[j]
87 | node['delta'] = err * transfer_derivative(node['output'])
88 | else:
89 | # Weighted sum of deltas from upper layer
90 | for j, node in enumerate(self.network[i]):
91 | err = sum([node_['weights'][j] * node_['delta'] for node_ in self.network[i+1]])
92 | node['delta'] = err * transfer_derivative(node['output'])
93 |
94 | # Update weights (updates node['weight'])
95 | def _update_weights(self, x, eta):
96 | for i, layer in enumerate(self.network):
97 | # Grab input values
98 | if i == 0: inputs = x
99 | else: inputs = [node_['output'] for node_ in self.network[i-1]]
100 | # Update weights
101 | for node in layer:
102 | for j, input in enumerate(inputs):
103 | # dw = - learning_rate * (error * transfer') * input
104 | node['weights'][j] += - eta * node['delta'] * input
105 |
106 | # Dot product
107 | def _dotprod(self, a, b):
108 | return sum([a_ * b_ for (a_, b_) in zip(a, b)])
109 |
110 | # Sigmoid (activation function)
111 | def _sigmoid(self, x):
112 | return 1.0/(1.0+math.exp(-x))
113 |
114 | # Sigmoid derivative
115 | def _sigmoid_derivative(self, sigmoid):
116 | return sigmoid*(1.0-sigmoid)
117 |
118 | # One-hot encoding
119 | def _one_hot_encoding(self, idx, output_dim):
120 | x = np.zeros(output_dim, dtype=np.int)
121 | x[idx] = 1
122 | return x
--------------------------------------------------------------------------------
/data/seeds_dataset.csv:
--------------------------------------------------------------------------------
1 | x1,x2,x3,x4,x5,x6,x7,y
2 | 15.26,14.84,0.871,5.763,3.312,2.221,5.22,1
3 | 14.88,14.57,0.8811,5.554,3.333,1.018,4.956,1
4 | 14.29,14.09,0.905,5.291,3.337,2.699,4.825,1
5 | 13.84,13.94,0.8955,5.324,3.379,2.259,4.805,1
6 | 16.14,14.99,0.9034,5.658,3.562,1.355,5.175,1
7 | 14.38,14.21,0.8951,5.386,3.312,2.462,4.956,1
8 | 14.69,14.49,0.8799,5.563,3.259,3.586,5.219,1
9 | 14.11,14.1,0.8911,5.42,3.302,2.7,5,1
10 | 16.63,15.46,0.8747,6.053,3.465,2.04,5.877,1
11 | 16.44,15.25,0.888,5.884,3.505,1.969,5.533,1
12 | 15.26,14.85,0.8696,5.714,3.242,4.543,5.314,1
13 | 14.03,14.16,0.8796,5.438,3.201,1.717,5.001,1
14 | 13.89,14.02,0.888,5.439,3.199,3.986,4.738,1
15 | 13.78,14.06,0.8759,5.479,3.156,3.136,4.872,1
16 | 13.74,14.05,0.8744,5.482,3.114,2.932,4.825,1
17 | 14.59,14.28,0.8993,5.351,3.333,4.185,4.781,1
18 | 13.99,13.83,0.9183,5.119,3.383,5.234,4.781,1
19 | 15.69,14.75,0.9058,5.527,3.514,1.599,5.046,1
20 | 14.7,14.21,0.9153,5.205,3.466,1.767,4.649,1
21 | 12.72,13.57,0.8686,5.226,3.049,4.102,4.914,1
22 | 14.16,14.4,0.8584,5.658,3.129,3.072,5.176,1
23 | 14.11,14.26,0.8722,5.52,3.168,2.688,5.219,1
24 | 15.88,14.9,0.8988,5.618,3.507,0.7651,5.091,1
25 | 12.08,13.23,0.8664,5.099,2.936,1.415,4.961,1
26 | 15.01,14.76,0.8657,5.789,3.245,1.791,5.001,1
27 | 16.19,15.16,0.8849,5.833,3.421,0.903,5.307,1
28 | 13.02,13.76,0.8641,5.395,3.026,3.373,4.825,1
29 | 12.74,13.67,0.8564,5.395,2.956,2.504,4.869,1
30 | 14.11,14.18,0.882,5.541,3.221,2.754,5.038,1
31 | 13.45,14.02,0.8604,5.516,3.065,3.531,5.097,1
32 | 13.16,13.82,0.8662,5.454,2.975,0.8551,5.056,1
33 | 15.49,14.94,0.8724,5.757,3.371,3.412,5.228,1
34 | 14.09,14.41,0.8529,5.717,3.186,3.92,5.299,1
35 | 13.94,14.17,0.8728,5.585,3.15,2.124,5.012,1
36 | 15.05,14.68,0.8779,5.712,3.328,2.129,5.36,1
37 | 16.12,15,0.9,5.709,3.485,2.27,5.443,1
38 | 16.2,15.27,0.8734,5.826,3.464,2.823,5.527,1
39 | 17.08,15.38,0.9079,5.832,3.683,2.956,5.484,1
40 | 14.8,14.52,0.8823,5.656,3.288,3.112,5.309,1
41 | 14.28,14.17,0.8944,5.397,3.298,6.685,5.001,1
42 | 13.54,13.85,0.8871,5.348,3.156,2.587,5.178,1
43 | 13.5,13.85,0.8852,5.351,3.158,2.249,5.176,1
44 | 13.16,13.55,0.9009,5.138,3.201,2.461,4.783,1
45 | 15.5,14.86,0.882,5.877,3.396,4.711,5.528,1
46 | 15.11,14.54,0.8986,5.579,3.462,3.128,5.18,1
47 | 13.8,14.04,0.8794,5.376,3.155,1.56,4.961,1
48 | 15.36,14.76,0.8861,5.701,3.393,1.367,5.132,1
49 | 14.99,14.56,0.8883,5.57,3.377,2.958,5.175,1
50 | 14.79,14.52,0.8819,5.545,3.291,2.704,5.111,1
51 | 14.86,14.67,0.8676,5.678,3.258,2.129,5.351,1
52 | 14.43,14.4,0.8751,5.585,3.272,3.975,5.144,1
53 | 15.78,14.91,0.8923,5.674,3.434,5.593,5.136,1
54 | 14.49,14.61,0.8538,5.715,3.113,4.116,5.396,1
55 | 14.33,14.28,0.8831,5.504,3.199,3.328,5.224,1
56 | 14.52,14.6,0.8557,5.741,3.113,1.481,5.487,1
57 | 15.03,14.77,0.8658,5.702,3.212,1.933,5.439,1
58 | 14.46,14.35,0.8818,5.388,3.377,2.802,5.044,1
59 | 14.92,14.43,0.9006,5.384,3.412,1.142,5.088,1
60 | 15.38,14.77,0.8857,5.662,3.419,1.999,5.222,1
61 | 12.11,13.47,0.8392,5.159,3.032,1.502,4.519,1
62 | 11.42,12.86,0.8683,5.008,2.85,2.7,4.607,1
63 | 11.23,12.63,0.884,4.902,2.879,2.269,4.703,1
64 | 12.36,13.19,0.8923,5.076,3.042,3.22,4.605,1
65 | 13.22,13.84,0.868,5.395,3.07,4.157,5.088,1
66 | 12.78,13.57,0.8716,5.262,3.026,1.176,4.782,1
67 | 12.88,13.5,0.8879,5.139,3.119,2.352,4.607,1
68 | 14.34,14.37,0.8726,5.63,3.19,1.313,5.15,1
69 | 14.01,14.29,0.8625,5.609,3.158,2.217,5.132,1
70 | 14.37,14.39,0.8726,5.569,3.153,1.464,5.3,1
71 | 12.73,13.75,0.8458,5.412,2.882,3.533,5.067,1
72 | 17.63,15.98,0.8673,6.191,3.561,4.076,6.06,2
73 | 16.84,15.67,0.8623,5.998,3.484,4.675,5.877,2
74 | 17.26,15.73,0.8763,5.978,3.594,4.539,5.791,2
75 | 19.11,16.26,0.9081,6.154,3.93,2.936,6.079,2
76 | 16.82,15.51,0.8786,6.017,3.486,4.004,5.841,2
77 | 16.77,15.62,0.8638,5.927,3.438,4.92,5.795,2
78 | 17.32,15.91,0.8599,6.064,3.403,3.824,5.922,2
79 | 20.71,17.23,0.8763,6.579,3.814,4.451,6.451,2
80 | 18.94,16.49,0.875,6.445,3.639,5.064,6.362,2
81 | 17.12,15.55,0.8892,5.85,3.566,2.858,5.746,2
82 | 16.53,15.34,0.8823,5.875,3.467,5.532,5.88,2
83 | 18.72,16.19,0.8977,6.006,3.857,5.324,5.879,2
84 | 20.2,16.89,0.8894,6.285,3.864,5.173,6.187,2
85 | 19.57,16.74,0.8779,6.384,3.772,1.472,6.273,2
86 | 19.51,16.71,0.878,6.366,3.801,2.962,6.185,2
87 | 18.27,16.09,0.887,6.173,3.651,2.443,6.197,2
88 | 18.88,16.26,0.8969,6.084,3.764,1.649,6.109,2
89 | 18.98,16.66,0.859,6.549,3.67,3.691,6.498,2
90 | 21.18,17.21,0.8989,6.573,4.033,5.78,6.231,2
91 | 20.88,17.05,0.9031,6.45,4.032,5.016,6.321,2
92 | 20.1,16.99,0.8746,6.581,3.785,1.955,6.449,2
93 | 18.76,16.2,0.8984,6.172,3.796,3.12,6.053,2
94 | 18.81,16.29,0.8906,6.272,3.693,3.237,6.053,2
95 | 18.59,16.05,0.9066,6.037,3.86,6.001,5.877,2
96 | 18.36,16.52,0.8452,6.666,3.485,4.933,6.448,2
97 | 16.87,15.65,0.8648,6.139,3.463,3.696,5.967,2
98 | 19.31,16.59,0.8815,6.341,3.81,3.477,6.238,2
99 | 18.98,16.57,0.8687,6.449,3.552,2.144,6.453,2
100 | 18.17,16.26,0.8637,6.271,3.512,2.853,6.273,2
101 | 18.72,16.34,0.881,6.219,3.684,2.188,6.097,2
102 | 16.41,15.25,0.8866,5.718,3.525,4.217,5.618,2
103 | 17.99,15.86,0.8992,5.89,3.694,2.068,5.837,2
104 | 19.46,16.5,0.8985,6.113,3.892,4.308,6.009,2
105 | 19.18,16.63,0.8717,6.369,3.681,3.357,6.229,2
106 | 18.95,16.42,0.8829,6.248,3.755,3.368,6.148,2
107 | 18.83,16.29,0.8917,6.037,3.786,2.553,5.879,2
108 | 18.85,16.17,0.9056,6.152,3.806,2.843,6.2,2
109 | 17.63,15.86,0.88,6.033,3.573,3.747,5.929,2
110 | 19.94,16.92,0.8752,6.675,3.763,3.252,6.55,2
111 | 18.55,16.22,0.8865,6.153,3.674,1.738,5.894,2
112 | 18.45,16.12,0.8921,6.107,3.769,2.235,5.794,2
113 | 19.38,16.72,0.8716,6.303,3.791,3.678,5.965,2
114 | 19.13,16.31,0.9035,6.183,3.902,2.109,5.924,2
115 | 19.14,16.61,0.8722,6.259,3.737,6.682,6.053,2
116 | 20.97,17.25,0.8859,6.563,3.991,4.677,6.316,2
117 | 19.06,16.45,0.8854,6.416,3.719,2.248,6.163,2
118 | 18.96,16.2,0.9077,6.051,3.897,4.334,5.75,2
119 | 19.15,16.45,0.889,6.245,3.815,3.084,6.185,2
120 | 18.89,16.23,0.9008,6.227,3.769,3.639,5.966,2
121 | 20.03,16.9,0.8811,6.493,3.857,3.063,6.32,2
122 | 20.24,16.91,0.8897,6.315,3.962,5.901,6.188,2
123 | 18.14,16.12,0.8772,6.059,3.563,3.619,6.011,2
124 | 16.17,15.38,0.8588,5.762,3.387,4.286,5.703,2
125 | 18.43,15.97,0.9077,5.98,3.771,2.984,5.905,2
126 | 15.99,14.89,0.9064,5.363,3.582,3.336,5.144,2
127 | 18.75,16.18,0.8999,6.111,3.869,4.188,5.992,2
128 | 18.65,16.41,0.8698,6.285,3.594,4.391,6.102,2
129 | 17.98,15.85,0.8993,5.979,3.687,2.257,5.919,2
130 | 20.16,17.03,0.8735,6.513,3.773,1.91,6.185,2
131 | 17.55,15.66,0.8991,5.791,3.69,5.366,5.661,2
132 | 18.3,15.89,0.9108,5.979,3.755,2.837,5.962,2
133 | 18.94,16.32,0.8942,6.144,3.825,2.908,5.949,2
134 | 15.38,14.9,0.8706,5.884,3.268,4.462,5.795,2
135 | 16.16,15.33,0.8644,5.845,3.395,4.266,5.795,2
136 | 15.56,14.89,0.8823,5.776,3.408,4.972,5.847,2
137 | 15.38,14.66,0.899,5.477,3.465,3.6,5.439,2
138 | 17.36,15.76,0.8785,6.145,3.574,3.526,5.971,2
139 | 15.57,15.15,0.8527,5.92,3.231,2.64,5.879,2
140 | 15.6,15.11,0.858,5.832,3.286,2.725,5.752,2
141 | 16.23,15.18,0.885,5.872,3.472,3.769,5.922,2
142 | 13.07,13.92,0.848,5.472,2.994,5.304,5.395,3
143 | 13.32,13.94,0.8613,5.541,3.073,7.035,5.44,3
144 | 13.34,13.95,0.862,5.389,3.074,5.995,5.307,3
145 | 12.22,13.32,0.8652,5.224,2.967,5.469,5.221,3
146 | 11.82,13.4,0.8274,5.314,2.777,4.471,5.178,3
147 | 11.21,13.13,0.8167,5.279,2.687,6.169,5.275,3
148 | 11.43,13.13,0.8335,5.176,2.719,2.221,5.132,3
149 | 12.49,13.46,0.8658,5.267,2.967,4.421,5.002,3
150 | 12.7,13.71,0.8491,5.386,2.911,3.26,5.316,3
151 | 10.79,12.93,0.8107,5.317,2.648,5.462,5.194,3
152 | 11.83,13.23,0.8496,5.263,2.84,5.195,5.307,3
153 | 12.01,13.52,0.8249,5.405,2.776,6.992,5.27,3
154 | 12.26,13.6,0.8333,5.408,2.833,4.756,5.36,3
155 | 11.18,13.04,0.8266,5.22,2.693,3.332,5.001,3
156 | 11.36,13.05,0.8382,5.175,2.755,4.048,5.263,3
157 | 11.19,13.05,0.8253,5.25,2.675,5.813,5.219,3
158 | 11.34,12.87,0.8596,5.053,2.849,3.347,5.003,3
159 | 12.13,13.73,0.8081,5.394,2.745,4.825,5.22,3
160 | 11.75,13.52,0.8082,5.444,2.678,4.378,5.31,3
161 | 11.49,13.22,0.8263,5.304,2.695,5.388,5.31,3
162 | 12.54,13.67,0.8425,5.451,2.879,3.082,5.491,3
163 | 12.02,13.33,0.8503,5.35,2.81,4.271,5.308,3
164 | 12.05,13.41,0.8416,5.267,2.847,4.988,5.046,3
165 | 12.55,13.57,0.8558,5.333,2.968,4.419,5.176,3
166 | 11.14,12.79,0.8558,5.011,2.794,6.388,5.049,3
167 | 12.1,13.15,0.8793,5.105,2.941,2.201,5.056,3
168 | 12.44,13.59,0.8462,5.319,2.897,4.924,5.27,3
169 | 12.15,13.45,0.8443,5.417,2.837,3.638,5.338,3
170 | 11.35,13.12,0.8291,5.176,2.668,4.337,5.132,3
171 | 11.24,13,0.8359,5.09,2.715,3.521,5.088,3
172 | 11.02,13,0.8189,5.325,2.701,6.735,5.163,3
173 | 11.55,13.1,0.8455,5.167,2.845,6.715,4.956,3
174 | 11.27,12.97,0.8419,5.088,2.763,4.309,5,3
175 | 11.4,13.08,0.8375,5.136,2.763,5.588,5.089,3
176 | 10.83,12.96,0.8099,5.278,2.641,5.182,5.185,3
177 | 10.8,12.57,0.859,4.981,2.821,4.773,5.063,3
178 | 11.26,13.01,0.8355,5.186,2.71,5.335,5.092,3
179 | 10.74,12.73,0.8329,5.145,2.642,4.702,4.963,3
180 | 11.48,13.05,0.8473,5.18,2.758,5.876,5.002,3
181 | 12.21,13.47,0.8453,5.357,2.893,1.661,5.178,3
182 | 11.41,12.95,0.856,5.09,2.775,4.957,4.825,3
183 | 12.46,13.41,0.8706,5.236,3.017,4.987,5.147,3
184 | 12.19,13.36,0.8579,5.24,2.909,4.857,5.158,3
185 | 11.65,13.07,0.8575,5.108,2.85,5.209,5.135,3
186 | 12.89,13.77,0.8541,5.495,3.026,6.185,5.316,3
187 | 11.56,13.31,0.8198,5.363,2.683,4.062,5.182,3
188 | 11.81,13.45,0.8198,5.413,2.716,4.898,5.352,3
189 | 10.91,12.8,0.8372,5.088,2.675,4.179,4.956,3
190 | 11.23,12.82,0.8594,5.089,2.821,7.524,4.957,3
191 | 10.59,12.41,0.8648,4.899,2.787,4.975,4.794,3
192 | 10.93,12.8,0.839,5.046,2.717,5.398,5.045,3
193 | 11.27,12.86,0.8563,5.091,2.804,3.985,5.001,3
194 | 11.87,13.02,0.8795,5.132,2.953,3.597,5.132,3
195 | 10.82,12.83,0.8256,5.18,2.63,4.853,5.089,3
196 | 12.11,13.27,0.8639,5.236,2.975,4.132,5.012,3
197 | 12.8,13.47,0.886,5.16,3.126,4.873,4.914,3
198 | 12.79,13.53,0.8786,5.224,3.054,5.483,4.958,3
199 | 13.37,13.78,0.8849,5.32,3.128,4.67,5.091,3
200 | 12.62,13.67,0.8481,5.41,2.911,3.306,5.231,3
201 | 12.76,13.38,0.8964,5.073,3.155,2.828,4.83,3
202 | 12.38,13.44,0.8609,5.219,2.989,5.472,5.045,3
203 | 12.67,13.32,0.8977,4.984,3.135,2.3,4.745,3
204 | 11.18,12.72,0.868,5.009,2.81,4.051,4.828,3
205 | 12.7,13.41,0.8874,5.183,3.091,8.456,5,3
206 | 12.37,13.47,0.8567,5.204,2.96,3.919,5.001,3
207 | 12.19,13.2,0.8783,5.137,2.981,3.631,4.87,3
208 | 11.23,12.88,0.8511,5.14,2.795,4.325,5.003,3
209 | 13.2,13.66,0.8883,5.236,3.232,8.315,5.056,3
210 | 11.84,13.21,0.8521,5.175,2.836,3.598,5.044,3
211 | 12.3,13.34,0.8684,5.243,2.974,5.637,5.063,3
--------------------------------------------------------------------------------