├── Datasets
├── pkmn.csv
├── seeds_dataset.csv
├── winequality-red.csv
└── winequality-white.csv
├── README.md
└── src
├── DataPreparation.py
├── Test.py
├── __pycache__
├── ClassificationEvaluator.cpython-36.pyc
├── DataPreparation.cpython-36.pyc
├── MultilayerFeedForwardClassifier.cpython-36.pyc
├── MultilayerFeedForwardRegressor.cpython-36.pyc
├── MultilayerNnClassifier.cpython-36.pyc
├── MultilayerNnRegressor.cpython-36.pyc
└── RegressionEvaluator.cpython-36.pyc
├── evaluation
├── ClassificationEvaluator.py
├── RegressionEvaluator.py
├── Splitting.py
├── __init__.py
└── __pycache__
│ ├── ClassificationEvaluator.cpython-36.pyc
│ ├── Splitting.cpython-36.pyc
│ └── __init__.cpython-36.pyc
└── ml
├── MultilayerNnClassifier.py
├── MultilayerNnRegressor.py
├── __init__.py
├── __pycache__
└── __init__.cpython-36.pyc
└── activation
├── ActivationFunction.py
├── Linear.py
├── ReLU.py
├── Sigmoid.py
├── Tanh.py
└── __init__.py
/Datasets/seeds_dataset.csv:
--------------------------------------------------------------------------------
1 | 15.26,14.84,0.871,5.763,3.312,2.221,5.22,1
2 | 14.88,14.57,0.8811,5.554,3.333,1.018,4.956,1
3 | 14.29,14.09,0.905,5.291,3.337,2.699,4.825,1
4 | 13.84,13.94,0.8955,5.324,3.379,2.259,4.805,1
5 | 16.14,14.99,0.9034,5.658,3.562,1.355,5.175,1
6 | 14.38,14.21,0.8951,5.386,3.312,2.462,4.956,1
7 | 14.69,14.49,0.8799,5.563,3.259,3.586,5.219,1
8 | 14.11,14.1,0.8911,5.42,3.302,2.7,5,1
9 | 16.63,15.46,0.8747,6.053,3.465,2.04,5.877,1
10 | 16.44,15.25,0.888,5.884,3.505,1.969,5.533,1
11 | 15.26,14.85,0.8696,5.714,3.242,4.543,5.314,1
12 | 14.03,14.16,0.8796,5.438,3.201,1.717,5.001,1
13 | 13.89,14.02,0.888,5.439,3.199,3.986,4.738,1
14 | 13.78,14.06,0.8759,5.479,3.156,3.136,4.872,1
15 | 13.74,14.05,0.8744,5.482,3.114,2.932,4.825,1
16 | 14.59,14.28,0.8993,5.351,3.333,4.185,4.781,1
17 | 13.99,13.83,0.9183,5.119,3.383,5.234,4.781,1
18 | 15.69,14.75,0.9058,5.527,3.514,1.599,5.046,1
19 | 14.7,14.21,0.9153,5.205,3.466,1.767,4.649,1
20 | 12.72,13.57,0.8686,5.226,3.049,4.102,4.914,1
21 | 14.16,14.4,0.8584,5.658,3.129,3.072,5.176,1
22 | 14.11,14.26,0.8722,5.52,3.168,2.688,5.219,1
23 | 15.88,14.9,0.8988,5.618,3.507,0.7651,5.091,1
24 | 12.08,13.23,0.8664,5.099,2.936,1.415,4.961,1
25 | 15.01,14.76,0.8657,5.789,3.245,1.791,5.001,1
26 | 16.19,15.16,0.8849,5.833,3.421,0.903,5.307,1
27 | 13.02,13.76,0.8641,5.395,3.026,3.373,4.825,1
28 | 12.74,13.67,0.8564,5.395,2.956,2.504,4.869,1
29 | 14.11,14.18,0.882,5.541,3.221,2.754,5.038,1
30 | 13.45,14.02,0.8604,5.516,3.065,3.531,5.097,1
31 | 13.16,13.82,0.8662,5.454,2.975,0.8551,5.056,1
32 | 15.49,14.94,0.8724,5.757,3.371,3.412,5.228,1
33 | 14.09,14.41,0.8529,5.717,3.186,3.92,5.299,1
34 | 13.94,14.17,0.8728,5.585,3.15,2.124,5.012,1
35 | 15.05,14.68,0.8779,5.712,3.328,2.129,5.36,1
36 | 16.12,15,0.9,5.709,3.485,2.27,5.443,1
37 | 16.2,15.27,0.8734,5.826,3.464,2.823,5.527,1
38 | 17.08,15.38,0.9079,5.832,3.683,2.956,5.484,1
39 | 14.8,14.52,0.8823,5.656,3.288,3.112,5.309,1
40 | 14.28,14.17,0.8944,5.397,3.298,6.685,5.001,1
41 | 13.54,13.85,0.8871,5.348,3.156,2.587,5.178,1
42 | 13.5,13.85,0.8852,5.351,3.158,2.249,5.176,1
43 | 13.16,13.55,0.9009,5.138,3.201,2.461,4.783,1
44 | 15.5,14.86,0.882,5.877,3.396,4.711,5.528,1
45 | 15.11,14.54,0.8986,5.579,3.462,3.128,5.18,1
46 | 13.8,14.04,0.8794,5.376,3.155,1.56,4.961,1
47 | 15.36,14.76,0.8861,5.701,3.393,1.367,5.132,1
48 | 14.99,14.56,0.8883,5.57,3.377,2.958,5.175,1
49 | 14.79,14.52,0.8819,5.545,3.291,2.704,5.111,1
50 | 14.86,14.67,0.8676,5.678,3.258,2.129,5.351,1
51 | 14.43,14.4,0.8751,5.585,3.272,3.975,5.144,1
52 | 15.78,14.91,0.8923,5.674,3.434,5.593,5.136,1
53 | 14.49,14.61,0.8538,5.715,3.113,4.116,5.396,1
54 | 14.33,14.28,0.8831,5.504,3.199,3.328,5.224,1
55 | 14.52,14.6,0.8557,5.741,3.113,1.481,5.487,1
56 | 15.03,14.77,0.8658,5.702,3.212,1.933,5.439,1
57 | 14.46,14.35,0.8818,5.388,3.377,2.802,5.044,1
58 | 14.92,14.43,0.9006,5.384,3.412,1.142,5.088,1
59 | 15.38,14.77,0.8857,5.662,3.419,1.999,5.222,1
60 | 12.11,13.47,0.8392,5.159,3.032,1.502,4.519,1
61 | 11.42,12.86,0.8683,5.008,2.85,2.7,4.607,1
62 | 11.23,12.63,0.884,4.902,2.879,2.269,4.703,1
63 | 12.36,13.19,0.8923,5.076,3.042,3.22,4.605,1
64 | 13.22,13.84,0.868,5.395,3.07,4.157,5.088,1
65 | 12.78,13.57,0.8716,5.262,3.026,1.176,4.782,1
66 | 12.88,13.5,0.8879,5.139,3.119,2.352,4.607,1
67 | 14.34,14.37,0.8726,5.63,3.19,1.313,5.15,1
68 | 14.01,14.29,0.8625,5.609,3.158,2.217,5.132,1
69 | 14.37,14.39,0.8726,5.569,3.153,1.464,5.3,1
70 | 12.73,13.75,0.8458,5.412,2.882,3.533,5.067,1
71 | 17.63,15.98,0.8673,6.191,3.561,4.076,6.06,2
72 | 16.84,15.67,0.8623,5.998,3.484,4.675,5.877,2
73 | 17.26,15.73,0.8763,5.978,3.594,4.539,5.791,2
74 | 19.11,16.26,0.9081,6.154,3.93,2.936,6.079,2
75 | 16.82,15.51,0.8786,6.017,3.486,4.004,5.841,2
76 | 16.77,15.62,0.8638,5.927,3.438,4.92,5.795,2
77 | 17.32,15.91,0.8599,6.064,3.403,3.824,5.922,2
78 | 20.71,17.23,0.8763,6.579,3.814,4.451,6.451,2
79 | 18.94,16.49,0.875,6.445,3.639,5.064,6.362,2
80 | 17.12,15.55,0.8892,5.85,3.566,2.858,5.746,2
81 | 16.53,15.34,0.8823,5.875,3.467,5.532,5.88,2
82 | 18.72,16.19,0.8977,6.006,3.857,5.324,5.879,2
83 | 20.2,16.89,0.8894,6.285,3.864,5.173,6.187,2
84 | 19.57,16.74,0.8779,6.384,3.772,1.472,6.273,2
85 | 19.51,16.71,0.878,6.366,3.801,2.962,6.185,2
86 | 18.27,16.09,0.887,6.173,3.651,2.443,6.197,2
87 | 18.88,16.26,0.8969,6.084,3.764,1.649,6.109,2
88 | 18.98,16.66,0.859,6.549,3.67,3.691,6.498,2
89 | 21.18,17.21,0.8989,6.573,4.033,5.78,6.231,2
90 | 20.88,17.05,0.9031,6.45,4.032,5.016,6.321,2
91 | 20.1,16.99,0.8746,6.581,3.785,1.955,6.449,2
92 | 18.76,16.2,0.8984,6.172,3.796,3.12,6.053,2
93 | 18.81,16.29,0.8906,6.272,3.693,3.237,6.053,2
94 | 18.59,16.05,0.9066,6.037,3.86,6.001,5.877,2
95 | 18.36,16.52,0.8452,6.666,3.485,4.933,6.448,2
96 | 16.87,15.65,0.8648,6.139,3.463,3.696,5.967,2
97 | 19.31,16.59,0.8815,6.341,3.81,3.477,6.238,2
98 | 18.98,16.57,0.8687,6.449,3.552,2.144,6.453,2
99 | 18.17,16.26,0.8637,6.271,3.512,2.853,6.273,2
100 | 18.72,16.34,0.881,6.219,3.684,2.188,6.097,2
101 | 16.41,15.25,0.8866,5.718,3.525,4.217,5.618,2
102 | 17.99,15.86,0.8992,5.89,3.694,2.068,5.837,2
103 | 19.46,16.5,0.8985,6.113,3.892,4.308,6.009,2
104 | 19.18,16.63,0.8717,6.369,3.681,3.357,6.229,2
105 | 18.95,16.42,0.8829,6.248,3.755,3.368,6.148,2
106 | 18.83,16.29,0.8917,6.037,3.786,2.553,5.879,2
107 | 18.85,16.17,0.9056,6.152,3.806,2.843,6.2,2
108 | 17.63,15.86,0.88,6.033,3.573,3.747,5.929,2
109 | 19.94,16.92,0.8752,6.675,3.763,3.252,6.55,2
110 | 18.55,16.22,0.8865,6.153,3.674,1.738,5.894,2
111 | 18.45,16.12,0.8921,6.107,3.769,2.235,5.794,2
112 | 19.38,16.72,0.8716,6.303,3.791,3.678,5.965,2
113 | 19.13,16.31,0.9035,6.183,3.902,2.109,5.924,2
114 | 19.14,16.61,0.8722,6.259,3.737,6.682,6.053,2
115 | 20.97,17.25,0.8859,6.563,3.991,4.677,6.316,2
116 | 19.06,16.45,0.8854,6.416,3.719,2.248,6.163,2
117 | 18.96,16.2,0.9077,6.051,3.897,4.334,5.75,2
118 | 19.15,16.45,0.889,6.245,3.815,3.084,6.185,2
119 | 18.89,16.23,0.9008,6.227,3.769,3.639,5.966,2
120 | 20.03,16.9,0.8811,6.493,3.857,3.063,6.32,2
121 | 20.24,16.91,0.8897,6.315,3.962,5.901,6.188,2
122 | 18.14,16.12,0.8772,6.059,3.563,3.619,6.011,2
123 | 16.17,15.38,0.8588,5.762,3.387,4.286,5.703,2
124 | 18.43,15.97,0.9077,5.98,3.771,2.984,5.905,2
125 | 15.99,14.89,0.9064,5.363,3.582,3.336,5.144,2
126 | 18.75,16.18,0.8999,6.111,3.869,4.188,5.992,2
127 | 18.65,16.41,0.8698,6.285,3.594,4.391,6.102,2
128 | 17.98,15.85,0.8993,5.979,3.687,2.257,5.919,2
129 | 20.16,17.03,0.8735,6.513,3.773,1.91,6.185,2
130 | 17.55,15.66,0.8991,5.791,3.69,5.366,5.661,2
131 | 18.3,15.89,0.9108,5.979,3.755,2.837,5.962,2
132 | 18.94,16.32,0.8942,6.144,3.825,2.908,5.949,2
133 | 15.38,14.9,0.8706,5.884,3.268,4.462,5.795,2
134 | 16.16,15.33,0.8644,5.845,3.395,4.266,5.795,2
135 | 15.56,14.89,0.8823,5.776,3.408,4.972,5.847,2
136 | 15.38,14.66,0.899,5.477,3.465,3.6,5.439,2
137 | 17.36,15.76,0.8785,6.145,3.574,3.526,5.971,2
138 | 15.57,15.15,0.8527,5.92,3.231,2.64,5.879,2
139 | 15.6,15.11,0.858,5.832,3.286,2.725,5.752,2
140 | 16.23,15.18,0.885,5.872,3.472,3.769,5.922,2
141 | 13.07,13.92,0.848,5.472,2.994,5.304,5.395,3
142 | 13.32,13.94,0.8613,5.541,3.073,7.035,5.44,3
143 | 13.34,13.95,0.862,5.389,3.074,5.995,5.307,3
144 | 12.22,13.32,0.8652,5.224,2.967,5.469,5.221,3
145 | 11.82,13.4,0.8274,5.314,2.777,4.471,5.178,3
146 | 11.21,13.13,0.8167,5.279,2.687,6.169,5.275,3
147 | 11.43,13.13,0.8335,5.176,2.719,2.221,5.132,3
148 | 12.49,13.46,0.8658,5.267,2.967,4.421,5.002,3
149 | 12.7,13.71,0.8491,5.386,2.911,3.26,5.316,3
150 | 10.79,12.93,0.8107,5.317,2.648,5.462,5.194,3
151 | 11.83,13.23,0.8496,5.263,2.84,5.195,5.307,3
152 | 12.01,13.52,0.8249,5.405,2.776,6.992,5.27,3
153 | 12.26,13.6,0.8333,5.408,2.833,4.756,5.36,3
154 | 11.18,13.04,0.8266,5.22,2.693,3.332,5.001,3
155 | 11.36,13.05,0.8382,5.175,2.755,4.048,5.263,3
156 | 11.19,13.05,0.8253,5.25,2.675,5.813,5.219,3
157 | 11.34,12.87,0.8596,5.053,2.849,3.347,5.003,3
158 | 12.13,13.73,0.8081,5.394,2.745,4.825,5.22,3
159 | 11.75,13.52,0.8082,5.444,2.678,4.378,5.31,3
160 | 11.49,13.22,0.8263,5.304,2.695,5.388,5.31,3
161 | 12.54,13.67,0.8425,5.451,2.879,3.082,5.491,3
162 | 12.02,13.33,0.8503,5.35,2.81,4.271,5.308,3
163 | 12.05,13.41,0.8416,5.267,2.847,4.988,5.046,3
164 | 12.55,13.57,0.8558,5.333,2.968,4.419,5.176,3
165 | 11.14,12.79,0.8558,5.011,2.794,6.388,5.049,3
166 | 12.1,13.15,0.8793,5.105,2.941,2.201,5.056,3
167 | 12.44,13.59,0.8462,5.319,2.897,4.924,5.27,3
168 | 12.15,13.45,0.8443,5.417,2.837,3.638,5.338,3
169 | 11.35,13.12,0.8291,5.176,2.668,4.337,5.132,3
170 | 11.24,13,0.8359,5.09,2.715,3.521,5.088,3
171 | 11.02,13,0.8189,5.325,2.701,6.735,5.163,3
172 | 11.55,13.1,0.8455,5.167,2.845,6.715,4.956,3
173 | 11.27,12.97,0.8419,5.088,2.763,4.309,5,3
174 | 11.4,13.08,0.8375,5.136,2.763,5.588,5.089,3
175 | 10.83,12.96,0.8099,5.278,2.641,5.182,5.185,3
176 | 10.8,12.57,0.859,4.981,2.821,4.773,5.063,3
177 | 11.26,13.01,0.8355,5.186,2.71,5.335,5.092,3
178 | 10.74,12.73,0.8329,5.145,2.642,4.702,4.963,3
179 | 11.48,13.05,0.8473,5.18,2.758,5.876,5.002,3
180 | 12.21,13.47,0.8453,5.357,2.893,1.661,5.178,3
181 | 11.41,12.95,0.856,5.09,2.775,4.957,4.825,3
182 | 12.46,13.41,0.8706,5.236,3.017,4.987,5.147,3
183 | 12.19,13.36,0.8579,5.24,2.909,4.857,5.158,3
184 | 11.65,13.07,0.8575,5.108,2.85,5.209,5.135,3
185 | 12.89,13.77,0.8541,5.495,3.026,6.185,5.316,3
186 | 11.56,13.31,0.8198,5.363,2.683,4.062,5.182,3
187 | 11.81,13.45,0.8198,5.413,2.716,4.898,5.352,3
188 | 10.91,12.8,0.8372,5.088,2.675,4.179,4.956,3
189 | 11.23,12.82,0.8594,5.089,2.821,7.524,4.957,3
190 | 10.59,12.41,0.8648,4.899,2.787,4.975,4.794,3
191 | 10.93,12.8,0.839,5.046,2.717,5.398,5.045,3
192 | 11.27,12.86,0.8563,5.091,2.804,3.985,5.001,3
193 | 11.87,13.02,0.8795,5.132,2.953,3.597,5.132,3
194 | 10.82,12.83,0.8256,5.18,2.63,4.853,5.089,3
195 | 12.11,13.27,0.8639,5.236,2.975,4.132,5.012,3
196 | 12.8,13.47,0.886,5.16,3.126,4.873,4.914,3
197 | 12.79,13.53,0.8786,5.224,3.054,5.483,4.958,3
198 | 13.37,13.78,0.8849,5.32,3.128,4.67,5.091,3
199 | 12.62,13.67,0.8481,5.41,2.911,3.306,5.231,3
200 | 12.76,13.38,0.8964,5.073,3.155,2.828,4.83,3
201 | 12.38,13.44,0.8609,5.219,2.989,5.472,5.045,3
202 | 12.67,13.32,0.8977,4.984,3.135,2.3,4.745,3
203 | 11.18,12.72,0.868,5.009,2.81,4.051,4.828,3
204 | 12.7,13.41,0.8874,5.183,3.091,8.456,5,3
205 | 12.37,13.47,0.8567,5.204,2.96,3.919,5.001,3
206 | 12.19,13.2,0.8783,5.137,2.981,3.631,4.87,3
207 | 11.23,12.88,0.8511,5.14,2.795,4.325,5.003,3
208 | 13.2,13.66,0.8883,5.236,3.232,8.315,5.056,3
209 | 11.84,13.21,0.8521,5.175,2.836,3.598,5.044,3
210 | 12.3,13.34,0.8684,5.243,2.974,5.637,5.063,3
--------------------------------------------------------------------------------
/Datasets/winequality-red.csv:
--------------------------------------------------------------------------------
1 | 7.4,0.7,0,1.9,0.076,11,34,0.9978,3.51,0.56,9.4,5
2 | 7.8,0.88,0,2.6,0.098,25,67,0.9968,3.2,0.68,9.8,5
3 | 7.8,0.76,0.04,2.3,0.092,15,54,0.997,3.26,0.65,9.8,5
4 | 11.2,0.28,0.56,1.9,0.075,17,60,0.998,3.16,0.58,9.8,6
5 | 7.4,0.7,0,1.9,0.076,11,34,0.9978,3.51,0.56,9.4,5
6 | 7.4,0.66,0,1.8,0.075,13,40,0.9978,3.51,0.56,9.4,5
7 | 7.9,0.6,0.06,1.6,0.069,15,59,0.9964,3.3,0.46,9.4,5
8 | 7.3,0.65,0,1.2,0.065,15,21,0.9946,3.39,0.47,10,7
9 | 7.8,0.58,0.02,2,0.073,9,18,0.9968,3.36,0.57,9.5,7
10 | 7.5,0.5,0.36,6.1,0.071,17,102,0.9978,3.35,0.8,10.5,5
11 | 6.7,0.58,0.08,1.8,0.097,15,65,0.9959,3.28,0.54,9.2,5
12 | 7.5,0.5,0.36,6.1,0.071,17,102,0.9978,3.35,0.8,10.5,5
13 | 5.6,0.615,0,1.6,0.089,16,59,0.9943,3.58,0.52,9.9,5
14 | 7.8,0.61,0.29,1.6,0.114,9,29,0.9974,3.26,1.56,9.1,5
15 | 8.9,0.62,0.18,3.8,0.176,52,145,0.9986,3.16,0.88,9.2,5
16 | 8.9,0.62,0.19,3.9,0.17,51,148,0.9986,3.17,0.93,9.2,5
17 | 8.5,0.28,0.56,1.8,0.092,35,103,0.9969,3.3,0.75,10.5,7
18 | 8.1,0.56,0.28,1.7,0.368,16,56,0.9968,3.11,1.28,9.3,5
19 | 7.4,0.59,0.08,4.4,0.086,6,29,0.9974,3.38,0.5,9,4
20 | 7.9,0.32,0.51,1.8,0.341,17,56,0.9969,3.04,1.08,9.2,6
21 | 8.9,0.22,0.48,1.8,0.077,29,60,0.9968,3.39,0.53,9.4,6
22 | 7.6,0.39,0.31,2.3,0.082,23,71,0.9982,3.52,0.65,9.7,5
23 | 7.9,0.43,0.21,1.6,0.106,10,37,0.9966,3.17,0.91,9.5,5
24 | 8.5,0.49,0.11,2.3,0.084,9,67,0.9968,3.17,0.53,9.4,5
25 | 6.9,0.4,0.14,2.4,0.085,21,40,0.9968,3.43,0.63,9.7,6
26 | 6.3,0.39,0.16,1.4,0.08,11,23,0.9955,3.34,0.56,9.3,5
27 | 7.6,0.41,0.24,1.8,0.08,4,11,0.9962,3.28,0.59,9.5,5
28 | 7.9,0.43,0.21,1.6,0.106,10,37,0.9966,3.17,0.91,9.5,5
29 | 7.1,0.71,0,1.9,0.08,14,35,0.9972,3.47,0.55,9.4,5
30 | 7.8,0.645,0,2,0.082,8,16,0.9964,3.38,0.59,9.8,6
31 | 6.7,0.675,0.07,2.4,0.089,17,82,0.9958,3.35,0.54,10.1,5
32 | 6.9,0.685,0,2.5,0.105,22,37,0.9966,3.46,0.57,10.6,6
33 | 8.3,0.655,0.12,2.3,0.083,15,113,0.9966,3.17,0.66,9.8,5
34 | 6.9,0.605,0.12,10.7,0.073,40,83,0.9993,3.45,0.52,9.4,6
35 | 5.2,0.32,0.25,1.8,0.103,13,50,0.9957,3.38,0.55,9.2,5
36 | 7.8,0.645,0,5.5,0.086,5,18,0.9986,3.4,0.55,9.6,6
37 | 7.8,0.6,0.14,2.4,0.086,3,15,0.9975,3.42,0.6,10.8,6
38 | 8.1,0.38,0.28,2.1,0.066,13,30,0.9968,3.23,0.73,9.7,7
39 | 5.7,1.13,0.09,1.5,0.172,7,19,0.994,3.5,0.48,9.8,4
40 | 7.3,0.45,0.36,5.9,0.074,12,87,0.9978,3.33,0.83,10.5,5
41 | 7.3,0.45,0.36,5.9,0.074,12,87,0.9978,3.33,0.83,10.5,5
42 | 8.8,0.61,0.3,2.8,0.088,17,46,0.9976,3.26,0.51,9.3,4
43 | 7.5,0.49,0.2,2.6,0.332,8,14,0.9968,3.21,0.9,10.5,6
44 | 8.1,0.66,0.22,2.2,0.069,9,23,0.9968,3.3,1.2,10.3,5
45 | 6.8,0.67,0.02,1.8,0.05,5,11,0.9962,3.48,0.52,9.5,5
46 | 4.6,0.52,0.15,2.1,0.054,8,65,0.9934,3.9,0.56,13.1,4
47 | 7.7,0.935,0.43,2.2,0.114,22,114,0.997,3.25,0.73,9.2,5
48 | 8.7,0.29,0.52,1.6,0.113,12,37,0.9969,3.25,0.58,9.5,5
49 | 6.4,0.4,0.23,1.6,0.066,5,12,0.9958,3.34,0.56,9.2,5
50 | 5.6,0.31,0.37,1.4,0.074,12,96,0.9954,3.32,0.58,9.2,5
51 | 8.8,0.66,0.26,1.7,0.074,4,23,0.9971,3.15,0.74,9.2,5
52 | 6.6,0.52,0.04,2.2,0.069,8,15,0.9956,3.4,0.63,9.4,6
53 | 6.6,0.5,0.04,2.1,0.068,6,14,0.9955,3.39,0.64,9.4,6
54 | 8.6,0.38,0.36,3,0.081,30,119,0.997,3.2,0.56,9.4,5
55 | 7.6,0.51,0.15,2.8,0.11,33,73,0.9955,3.17,0.63,10.2,6
56 | 7.7,0.62,0.04,3.8,0.084,25,45,0.9978,3.34,0.53,9.5,5
57 | 10.2,0.42,0.57,3.4,0.07,4,10,0.9971,3.04,0.63,9.6,5
58 | 7.5,0.63,0.12,5.1,0.111,50,110,0.9983,3.26,0.77,9.4,5
59 | 7.8,0.59,0.18,2.3,0.076,17,54,0.9975,3.43,0.59,10,5
60 | 7.3,0.39,0.31,2.4,0.074,9,46,0.9962,3.41,0.54,9.4,6
61 | 8.8,0.4,0.4,2.2,0.079,19,52,0.998,3.44,0.64,9.2,5
62 | 7.7,0.69,0.49,1.8,0.115,20,112,0.9968,3.21,0.71,9.3,5
63 | 7.5,0.52,0.16,1.9,0.085,12,35,0.9968,3.38,0.62,9.5,7
64 | 7,0.735,0.05,2,0.081,13,54,0.9966,3.39,0.57,9.8,5
65 | 7.2,0.725,0.05,4.65,0.086,4,11,0.9962,3.41,0.39,10.9,5
66 | 7.2,0.725,0.05,4.65,0.086,4,11,0.9962,3.41,0.39,10.9,5
67 | 7.5,0.52,0.11,1.5,0.079,11,39,0.9968,3.42,0.58,9.6,5
68 | 6.6,0.705,0.07,1.6,0.076,6,15,0.9962,3.44,0.58,10.7,5
69 | 9.3,0.32,0.57,2,0.074,27,65,0.9969,3.28,0.79,10.7,5
70 | 8,0.705,0.05,1.9,0.074,8,19,0.9962,3.34,0.95,10.5,6
71 | 7.7,0.63,0.08,1.9,0.076,15,27,0.9967,3.32,0.54,9.5,6
72 | 7.7,0.67,0.23,2.1,0.088,17,96,0.9962,3.32,0.48,9.5,5
73 | 7.7,0.69,0.22,1.9,0.084,18,94,0.9961,3.31,0.48,9.5,5
74 | 8.3,0.675,0.26,2.1,0.084,11,43,0.9976,3.31,0.53,9.2,4
75 | 9.7,0.32,0.54,2.5,0.094,28,83,0.9984,3.28,0.82,9.6,5
76 | 8.8,0.41,0.64,2.2,0.093,9,42,0.9986,3.54,0.66,10.5,5
77 | 8.8,0.41,0.64,2.2,0.093,9,42,0.9986,3.54,0.66,10.5,5
78 | 6.8,0.785,0,2.4,0.104,14,30,0.9966,3.52,0.55,10.7,6
79 | 6.7,0.75,0.12,2,0.086,12,80,0.9958,3.38,0.52,10.1,5
80 | 8.3,0.625,0.2,1.5,0.08,27,119,0.9972,3.16,1.12,9.1,4
81 | 6.2,0.45,0.2,1.6,0.069,3,15,0.9958,3.41,0.56,9.2,5
82 | 7.8,0.43,0.7,1.9,0.464,22,67,0.9974,3.13,1.28,9.4,5
83 | 7.4,0.5,0.47,2,0.086,21,73,0.997,3.36,0.57,9.1,5
84 | 7.3,0.67,0.26,1.8,0.401,16,51,0.9969,3.16,1.14,9.4,5
85 | 6.3,0.3,0.48,1.8,0.069,18,61,0.9959,3.44,0.78,10.3,6
86 | 6.9,0.55,0.15,2.2,0.076,19,40,0.9961,3.41,0.59,10.1,5
87 | 8.6,0.49,0.28,1.9,0.11,20,136,0.9972,2.93,1.95,9.9,6
88 | 7.7,0.49,0.26,1.9,0.062,9,31,0.9966,3.39,0.64,9.6,5
89 | 9.3,0.39,0.44,2.1,0.107,34,125,0.9978,3.14,1.22,9.5,5
90 | 7,0.62,0.08,1.8,0.076,8,24,0.9978,3.48,0.53,9,5
91 | 7.9,0.52,0.26,1.9,0.079,42,140,0.9964,3.23,0.54,9.5,5
92 | 8.6,0.49,0.28,1.9,0.11,20,136,0.9972,2.93,1.95,9.9,6
93 | 8.6,0.49,0.29,2,0.11,19,133,0.9972,2.93,1.98,9.8,5
94 | 7.7,0.49,0.26,1.9,0.062,9,31,0.9966,3.39,0.64,9.6,5
95 | 5,1.02,0.04,1.4,0.045,41,85,0.9938,3.75,0.48,10.5,4
96 | 4.7,0.6,0.17,2.3,0.058,17,106,0.9932,3.85,0.6,12.9,6
97 | 6.8,0.775,0,3,0.102,8,23,0.9965,3.45,0.56,10.7,5
98 | 7,0.5,0.25,2,0.07,3,22,0.9963,3.25,0.63,9.2,5
99 | 7.6,0.9,0.06,2.5,0.079,5,10,0.9967,3.39,0.56,9.8,5
100 | 8.1,0.545,0.18,1.9,0.08,13,35,0.9972,3.3,0.59,9,6
101 | 8.3,0.61,0.3,2.1,0.084,11,50,0.9972,3.4,0.61,10.2,6
102 | 7.8,0.5,0.3,1.9,0.075,8,22,0.9959,3.31,0.56,10.4,6
103 | 8.1,0.545,0.18,1.9,0.08,13,35,0.9972,3.3,0.59,9,6
104 | 8.1,0.575,0.22,2.1,0.077,12,65,0.9967,3.29,0.51,9.2,5
105 | 7.2,0.49,0.24,2.2,0.07,5,36,0.996,3.33,0.48,9.4,5
106 | 8.1,0.575,0.22,2.1,0.077,12,65,0.9967,3.29,0.51,9.2,5
107 | 7.8,0.41,0.68,1.7,0.467,18,69,0.9973,3.08,1.31,9.3,5
108 | 6.2,0.63,0.31,1.7,0.088,15,64,0.9969,3.46,0.79,9.3,5
109 | 8,0.33,0.53,2.5,0.091,18,80,0.9976,3.37,0.8,9.6,6
110 | 8.1,0.785,0.52,2,0.122,37,153,0.9969,3.21,0.69,9.3,5
111 | 7.8,0.56,0.19,1.8,0.104,12,47,0.9964,3.19,0.93,9.5,5
112 | 8.4,0.62,0.09,2.2,0.084,11,108,0.9964,3.15,0.66,9.8,5
113 | 8.4,0.6,0.1,2.2,0.085,14,111,0.9964,3.15,0.66,9.8,5
114 | 10.1,0.31,0.44,2.3,0.08,22,46,0.9988,3.32,0.67,9.7,6
115 | 7.8,0.56,0.19,1.8,0.104,12,47,0.9964,3.19,0.93,9.5,5
116 | 9.4,0.4,0.31,2.2,0.09,13,62,0.9966,3.07,0.63,10.5,6
117 | 8.3,0.54,0.28,1.9,0.077,11,40,0.9978,3.39,0.61,10,6
118 | 7.8,0.56,0.12,2,0.082,7,28,0.997,3.37,0.5,9.4,6
119 | 8.8,0.55,0.04,2.2,0.119,14,56,0.9962,3.21,0.6,10.9,6
120 | 7,0.69,0.08,1.8,0.097,22,89,0.9959,3.34,0.54,9.2,6
121 | 7.3,1.07,0.09,1.7,0.178,10,89,0.9962,3.3,0.57,9,5
122 | 8.8,0.55,0.04,2.2,0.119,14,56,0.9962,3.21,0.6,10.9,6
123 | 7.3,0.695,0,2.5,0.075,3,13,0.998,3.49,0.52,9.2,5
124 | 8,0.71,0,2.6,0.08,11,34,0.9976,3.44,0.53,9.5,5
125 | 7.8,0.5,0.17,1.6,0.082,21,102,0.996,3.39,0.48,9.5,5
126 | 9,0.62,0.04,1.9,0.146,27,90,0.9984,3.16,0.7,9.4,5
127 | 8.2,1.33,0,1.7,0.081,3,12,0.9964,3.53,0.49,10.9,5
128 | 8.1,1.33,0,1.8,0.082,3,12,0.9964,3.54,0.48,10.9,5
129 | 8,0.59,0.16,1.8,0.065,3,16,0.9962,3.42,0.92,10.5,7
130 | 6.1,0.38,0.15,1.8,0.072,6,19,0.9955,3.42,0.57,9.4,5
131 | 8,0.745,0.56,2,0.118,30,134,0.9968,3.24,0.66,9.4,5
132 | 5.6,0.5,0.09,2.3,0.049,17,99,0.9937,3.63,0.63,13,5
133 | 5.6,0.5,0.09,2.3,0.049,17,99,0.9937,3.63,0.63,13,5
134 | 6.6,0.5,0.01,1.5,0.06,17,26,0.9952,3.4,0.58,9.8,6
135 | 7.9,1.04,0.05,2.2,0.084,13,29,0.9959,3.22,0.55,9.9,6
136 | 8.4,0.745,0.11,1.9,0.09,16,63,0.9965,3.19,0.82,9.6,5
137 | 8.3,0.715,0.15,1.8,0.089,10,52,0.9968,3.23,0.77,9.5,5
138 | 7.2,0.415,0.36,2,0.081,13,45,0.9972,3.48,0.64,9.2,5
139 | 7.8,0.56,0.19,2.1,0.081,15,105,0.9962,3.33,0.54,9.5,5
140 | 7.8,0.56,0.19,2,0.081,17,108,0.9962,3.32,0.54,9.5,5
141 | 8.4,0.745,0.11,1.9,0.09,16,63,0.9965,3.19,0.82,9.6,5
142 | 8.3,0.715,0.15,1.8,0.089,10,52,0.9968,3.23,0.77,9.5,5
143 | 5.2,0.34,0,1.8,0.05,27,63,0.9916,3.68,0.79,14,6
144 | 6.3,0.39,0.08,1.7,0.066,3,20,0.9954,3.34,0.58,9.4,5
145 | 5.2,0.34,0,1.8,0.05,27,63,0.9916,3.68,0.79,14,6
146 | 8.1,0.67,0.55,1.8,0.117,32,141,0.9968,3.17,0.62,9.4,5
147 | 5.8,0.68,0.02,1.8,0.087,21,94,0.9944,3.54,0.52,10,5
148 | 7.6,0.49,0.26,1.6,0.236,10,88,0.9968,3.11,0.8,9.3,5
149 | 6.9,0.49,0.1,2.3,0.074,12,30,0.9959,3.42,0.58,10.2,6
150 | 8.2,0.4,0.44,2.8,0.089,11,43,0.9975,3.53,0.61,10.5,6
151 | 7.3,0.33,0.47,2.1,0.077,5,11,0.9958,3.33,0.53,10.3,6
152 | 9.2,0.52,1,3.4,0.61,32,69,0.9996,2.74,2.0,9.4,4
153 | 7.5,0.6,0.03,1.8,0.095,25,99,0.995,3.35,0.54,10.1,5
154 | 7.5,0.6,0.03,1.8,0.095,25,99,0.995,3.35,0.54,10.1,5
155 | 7.1,0.43,0.42,5.5,0.07,29,129,0.9973,3.42,0.72,10.5,5
156 | 7.1,0.43,0.42,5.5,0.071,28,128,0.9973,3.42,0.71,10.5,5
157 | 7.1,0.43,0.42,5.5,0.07,29,129,0.9973,3.42,0.72,10.5,5
158 | 7.1,0.43,0.42,5.5,0.071,28,128,0.9973,3.42,0.71,10.5,5
159 | 7.1,0.68,0,2.2,0.073,12,22,0.9969,3.48,0.5,9.3,5
160 | 6.8,0.6,0.18,1.9,0.079,18,86,0.9968,3.59,0.57,9.3,6
161 | 7.6,0.95,0.03,2,0.09,7,20,0.9959,3.2,0.56,9.6,5
162 | 7.6,0.68,0.02,1.3,0.072,9,20,0.9965,3.17,1.08,9.2,4
163 | 7.8,0.53,0.04,1.7,0.076,17,31,0.9964,3.33,0.56,10,6
164 | 7.4,0.6,0.26,7.3,0.07,36,121,0.9982,3.37,0.49,9.4,5
165 | 7.3,0.59,0.26,7.2,0.07,35,121,0.9981,3.37,0.49,9.4,5
166 | 7.8,0.63,0.48,1.7,0.1,14,96,0.9961,3.19,0.62,9.5,5
167 | 6.8,0.64,0.1,2.1,0.085,18,101,0.9956,3.34,0.52,10.2,5
168 | 7.3,0.55,0.03,1.6,0.072,17,42,0.9956,3.37,0.48,9,4
169 | 6.8,0.63,0.07,2.1,0.089,11,44,0.9953,3.47,0.55,10.4,6
170 | 7.5,0.705,0.24,1.8,0.36,15,63,0.9964,3,1.59,9.5,5
171 | 7.9,0.885,0.03,1.8,0.058,4,8,0.9972,3.36,0.33,9.1,4
172 | 8,0.42,0.17,2,0.073,6,18,0.9972,3.29,0.61,9.2,6
173 | 8,0.42,0.17,2,0.073,6,18,0.9972,3.29,0.61,9.2,6
174 | 7.4,0.62,0.05,1.9,0.068,24,42,0.9961,3.42,0.57,11.5,6
175 | 7.3,0.38,0.21,2,0.08,7,35,0.9961,3.33,0.47,9.5,5
176 | 6.9,0.5,0.04,1.5,0.085,19,49,0.9958,3.35,0.78,9.5,5
177 | 7.3,0.38,0.21,2,0.08,7,35,0.9961,3.33,0.47,9.5,5
178 | 7.5,0.52,0.42,2.3,0.087,8,38,0.9972,3.58,0.61,10.5,6
179 | 7,0.805,0,2.5,0.068,7,20,0.9969,3.48,0.56,9.6,5
180 | 8.8,0.61,0.14,2.4,0.067,10,42,0.9969,3.19,0.59,9.5,5
181 | 8.8,0.61,0.14,2.4,0.067,10,42,0.9969,3.19,0.59,9.5,5
182 | 8.9,0.61,0.49,2,0.27,23,110,0.9972,3.12,1.02,9.3,5
183 | 7.2,0.73,0.02,2.5,0.076,16,42,0.9972,3.44,0.52,9.3,5
184 | 6.8,0.61,0.2,1.8,0.077,11,65,0.9971,3.54,0.58,9.3,5
185 | 6.7,0.62,0.21,1.9,0.079,8,62,0.997,3.52,0.58,9.3,6
186 | 8.9,0.31,0.57,2,0.111,26,85,0.9971,3.26,0.53,9.7,5
187 | 7.4,0.39,0.48,2,0.082,14,67,0.9972,3.34,0.55,9.2,5
188 | 7.7,0.705,0.1,2.6,0.084,9,26,0.9976,3.39,0.49,9.7,5
189 | 7.9,0.5,0.33,2,0.084,15,143,0.9968,3.2,0.55,9.5,5
190 | 7.9,0.49,0.32,1.9,0.082,17,144,0.9968,3.2,0.55,9.5,5
191 | 8.2,0.5,0.35,2.9,0.077,21,127,0.9976,3.23,0.62,9.4,5
192 | 6.4,0.37,0.25,1.9,0.074,21,49,0.9974,3.57,0.62,9.8,6
193 | 6.8,0.63,0.12,3.8,0.099,16,126,0.9969,3.28,0.61,9.5,5
194 | 7.6,0.55,0.21,2.2,0.071,7,28,0.9964,3.28,0.55,9.7,5
195 | 7.6,0.55,0.21,2.2,0.071,7,28,0.9964,3.28,0.55,9.7,5
196 | 7.8,0.59,0.33,2,0.074,24,120,0.9968,3.25,0.54,9.4,5
197 | 7.3,0.58,0.3,2.4,0.074,15,55,0.9968,3.46,0.59,10.2,5
198 | 11.5,0.3,0.6,2,0.067,12,27,0.9981,3.11,0.97,10.1,6
199 | 5.4,0.835,0.08,1.2,0.046,13,93,0.9924,3.57,0.85,13,7
200 | 6.9,1.09,0.06,2.1,0.061,12,31,0.9948,3.51,0.43,11.4,4
201 | 9.6,0.32,0.47,1.4,0.056,9,24,0.99695,3.22,0.82,10.3,7
202 | 8.8,0.37,0.48,2.1,0.097,39,145,0.9975,3.04,1.03,9.3,5
203 | 6.8,0.5,0.11,1.5,0.075,16,49,0.99545,3.36,0.79,9.5,5
204 | 7,0.42,0.35,1.6,0.088,16,39,0.9961,3.34,0.55,9.2,5
205 | 7,0.43,0.36,1.6,0.089,14,37,0.99615,3.34,0.56,9.2,6
206 | 12.8,0.3,0.74,2.6,0.095,9,28,0.9994,3.2,0.77,10.8,7
207 | 12.8,0.3,0.74,2.6,0.095,9,28,0.9994,3.2,0.77,10.8,7
208 | 7.8,0.57,0.31,1.8,0.069,26,120,0.99625,3.29,0.53,9.3,5
209 | 7.8,0.44,0.28,2.7,0.1,18,95,0.9966,3.22,0.67,9.4,5
210 | 11,0.3,0.58,2.1,0.054,7,19,0.998,3.31,0.88,10.5,7
211 | 9.7,0.53,0.6,2,0.039,5,19,0.99585,3.3,0.86,12.4,6
212 | 8,0.725,0.24,2.8,0.083,10,62,0.99685,3.35,0.56,10,6
213 | 11.6,0.44,0.64,2.1,0.059,5,15,0.998,3.21,0.67,10.2,6
214 | 8.2,0.57,0.26,2.2,0.06,28,65,0.9959,3.3,0.43,10.1,5
215 | 7.8,0.735,0.08,2.4,0.092,10,41,0.9974,3.24,0.71,9.8,6
216 | 7,0.49,0.49,5.6,0.06,26,121,0.9974,3.34,0.76,10.5,5
217 | 8.7,0.625,0.16,2,0.101,13,49,0.9962,3.14,0.57,11,5
218 | 8.1,0.725,0.22,2.2,0.072,11,41,0.9967,3.36,0.55,9.1,5
219 | 7.5,0.49,0.19,1.9,0.076,10,44,0.9957,3.39,0.54,9.7,5
220 | 7.8,0.53,0.33,2.4,0.08,24,144,0.99655,3.3,0.6,9.5,5
221 | 7.8,0.34,0.37,2,0.082,24,58,0.9964,3.34,0.59,9.4,6
222 | 7.4,0.53,0.26,2,0.101,16,72,0.9957,3.15,0.57,9.4,5
223 | 6.8,0.61,0.04,1.5,0.057,5,10,0.99525,3.42,0.6,9.5,5
224 | 8.6,0.645,0.25,2,0.083,8,28,0.99815,3.28,0.6,10,6
225 | 8.4,0.635,0.36,2,0.089,15,55,0.99745,3.31,0.57,10.4,4
226 | 7.7,0.43,0.25,2.6,0.073,29,63,0.99615,3.37,0.58,10.5,6
227 | 8.9,0.59,0.5,2,0.337,27,81,0.9964,3.04,1.61,9.5,6
228 | 9,0.82,0.14,2.6,0.089,9,23,0.9984,3.39,0.63,9.8,5
229 | 7.7,0.43,0.25,2.6,0.073,29,63,0.99615,3.37,0.58,10.5,6
230 | 6.9,0.52,0.25,2.6,0.081,10,37,0.99685,3.46,0.5,11,5
231 | 5.2,0.48,0.04,1.6,0.054,19,106,0.9927,3.54,0.62,12.2,7
232 | 8,0.38,0.06,1.8,0.078,12,49,0.99625,3.37,0.52,9.9,6
233 | 8.5,0.37,0.2,2.8,0.09,18,58,0.998,3.34,0.7,9.6,6
234 | 6.9,0.52,0.25,2.6,0.081,10,37,0.99685,3.46,0.5,11,5
235 | 8.2,1,0.09,2.3,0.065,7,37,0.99685,3.32,0.55,9,6
236 | 7.2,0.63,0,1.9,0.097,14,38,0.99675,3.37,0.58,9,6
237 | 7.2,0.63,0,1.9,0.097,14,38,0.99675,3.37,0.58,9,6
238 | 7.2,0.645,0,1.9,0.097,15,39,0.99675,3.37,0.58,9.2,6
239 | 7.2,0.63,0,1.9,0.097,14,38,0.99675,3.37,0.58,9,6
240 | 8.2,1,0.09,2.3,0.065,7,37,0.99685,3.32,0.55,9,6
241 | 8.9,0.635,0.37,1.7,0.263,5,62,0.9971,3,1.09,9.3,5
242 | 12,0.38,0.56,2.1,0.093,6,24,0.99925,3.14,0.71,10.9,6
243 | 7.7,0.58,0.1,1.8,0.102,28,109,0.99565,3.08,0.49,9.8,6
244 | 15,0.21,0.44,2.2,0.075,10,24,1.00005,3.07,0.84,9.2,7
245 | 15,0.21,0.44,2.2,0.075,10,24,1.00005,3.07,0.84,9.2,7
246 | 7.3,0.66,0,2,0.084,6,23,0.9983,3.61,0.96,9.9,6
247 | 7.1,0.68,0.07,1.9,0.075,16,51,0.99685,3.38,0.52,9.5,5
248 | 8.2,0.6,0.17,2.3,0.072,11,73,0.9963,3.2,0.45,9.3,5
249 | 7.7,0.53,0.06,1.7,0.074,9,39,0.99615,3.35,0.48,9.8,6
250 | 7.3,0.66,0,2,0.084,6,23,0.9983,3.61,0.96,9.9,6
251 | 10.8,0.32,0.44,1.6,0.063,16,37,0.9985,3.22,0.78,10,6
252 | 7.1,0.6,0,1.8,0.074,16,34,0.9972,3.47,0.7,9.9,6
253 | 11.1,0.35,0.48,3.1,0.09,5,21,0.9986,3.17,0.53,10.5,5
254 | 7.7,0.775,0.42,1.9,0.092,8,86,0.9959,3.23,0.59,9.5,5
255 | 7.1,0.6,0,1.8,0.074,16,34,0.9972,3.47,0.7,9.9,6
256 | 8,0.57,0.23,3.2,0.073,17,119,0.99675,3.26,0.57,9.3,5
257 | 9.4,0.34,0.37,2.2,0.075,5,13,0.998,3.22,0.62,9.2,5
258 | 6.6,0.695,0,2.1,0.075,12,56,0.9968,3.49,0.67,9.2,5
259 | 7.7,0.41,0.76,1.8,0.611,8,45,0.9968,3.06,1.26,9.4,5
260 | 10,0.31,0.47,2.6,0.085,14,33,0.99965,3.36,0.8,10.5,7
261 | 7.9,0.33,0.23,1.7,0.077,18,45,0.99625,3.29,0.65,9.3,5
262 | 7,0.975,0.04,2,0.087,12,67,0.99565,3.35,0.6,9.4,4
263 | 8,0.52,0.03,1.7,0.07,10,35,0.99575,3.34,0.57,10,5
264 | 7.9,0.37,0.23,1.8,0.077,23,49,0.9963,3.28,0.67,9.3,5
265 | 12.5,0.56,0.49,2.4,0.064,5,27,0.9999,3.08,0.87,10.9,5
266 | 11.8,0.26,0.52,1.8,0.071,6,10,0.9968,3.2,0.72,10.2,7
267 | 8.1,0.87,0,3.3,0.096,26,61,1.00025,3.6,0.72,9.8,4
268 | 7.9,0.35,0.46,3.6,0.078,15,37,0.9973,3.35,0.86,12.8,8
269 | 6.9,0.54,0.04,3,0.077,7,27,0.9987,3.69,0.91,9.4,6
270 | 11.5,0.18,0.51,4,0.104,4,23,0.9996,3.28,0.97,10.1,6
271 | 7.9,0.545,0.06,4,0.087,27,61,0.9965,3.36,0.67,10.7,6
272 | 11.5,0.18,0.51,4,0.104,4,23,0.9996,3.28,0.97,10.1,6
273 | 10.9,0.37,0.58,4,0.071,17,65,0.99935,3.22,0.78,10.1,5
274 | 8.4,0.715,0.2,2.4,0.076,10,38,0.99735,3.31,0.64,9.4,5
275 | 7.5,0.65,0.18,7,0.088,27,94,0.99915,3.38,0.77,9.4,5
276 | 7.9,0.545,0.06,4,0.087,27,61,0.9965,3.36,0.67,10.7,6
277 | 6.9,0.54,0.04,3,0.077,7,27,0.9987,3.69,0.91,9.4,6
278 | 11.5,0.18,0.51,4,0.104,4,23,0.9996,3.28,0.97,10.1,6
279 | 10.3,0.32,0.45,6.4,0.073,5,13,0.9976,3.23,0.82,12.6,8
280 | 8.9,0.4,0.32,5.6,0.087,10,47,0.9991,3.38,0.77,10.5,7
281 | 11.4,0.26,0.44,3.6,0.071,6,19,0.9986,3.12,0.82,9.3,6
282 | 7.7,0.27,0.68,3.5,0.358,5,10,0.9972,3.25,1.08,9.9,7
283 | 7.6,0.52,0.12,3,0.067,12,53,0.9971,3.36,0.57,9.1,5
284 | 8.9,0.4,0.32,5.6,0.087,10,47,0.9991,3.38,0.77,10.5,7
285 | 9.9,0.59,0.07,3.4,0.102,32,71,1.00015,3.31,0.71,9.8,5
286 | 9.9,0.59,0.07,3.4,0.102,32,71,1.00015,3.31,0.71,9.8,5
287 | 12,0.45,0.55,2,0.073,25,49,0.9997,3.1,0.76,10.3,6
288 | 7.5,0.4,0.12,3,0.092,29,53,0.9967,3.37,0.7,10.3,6
289 | 8.7,0.52,0.09,2.5,0.091,20,49,0.9976,3.34,0.86,10.6,7
290 | 11.6,0.42,0.53,3.3,0.105,33,98,1.001,3.2,0.95,9.2,5
291 | 8.7,0.52,0.09,2.5,0.091,20,49,0.9976,3.34,0.86,10.6,7
292 | 11,0.2,0.48,2,0.343,6,18,0.9979,3.3,0.71,10.5,5
293 | 10.4,0.55,0.23,2.7,0.091,18,48,0.9994,3.22,0.64,10.3,6
294 | 6.9,0.36,0.25,2.4,0.098,5,16,0.9964,3.41,0.6,10.1,6
295 | 13.3,0.34,0.52,3.2,0.094,17,53,1.0014,3.05,0.81,9.5,6
296 | 10.8,0.5,0.46,2.5,0.073,5,27,1.0001,3.05,0.64,9.5,5
297 | 10.6,0.83,0.37,2.6,0.086,26,70,0.9981,3.16,0.52,9.9,5
298 | 7.1,0.63,0.06,2,0.083,8,29,0.99855,3.67,0.73,9.6,5
299 | 7.2,0.65,0.02,2.3,0.094,5,31,0.9993,3.67,0.8,9.7,5
300 | 6.9,0.67,0.06,2.1,0.08,8,33,0.99845,3.68,0.71,9.6,5
301 | 7.5,0.53,0.06,2.6,0.086,20,44,0.9965,3.38,0.59,10.7,6
302 | 11.1,0.18,0.48,1.5,0.068,7,15,0.9973,3.22,0.64,10.1,6
303 | 8.3,0.705,0.12,2.6,0.092,12,28,0.9994,3.51,0.72,10,5
304 | 7.4,0.67,0.12,1.6,0.186,5,21,0.996,3.39,0.54,9.5,5
305 | 8.4,0.65,0.6,2.1,0.112,12,90,0.9973,3.2,0.52,9.2,5
306 | 10.3,0.53,0.48,2.5,0.063,6,25,0.9998,3.12,0.59,9.3,6
307 | 7.6,0.62,0.32,2.2,0.082,7,54,0.9966,3.36,0.52,9.4,5
308 | 10.3,0.41,0.42,2.4,0.213,6,14,0.9994,3.19,0.62,9.5,6
309 | 10.3,0.43,0.44,2.4,0.214,5,12,0.9994,3.19,0.63,9.5,6
310 | 7.4,0.29,0.38,1.7,0.062,9,30,0.9968,3.41,0.53,9.5,6
311 | 10.3,0.53,0.48,2.5,0.063,6,25,0.9998,3.12,0.59,9.3,6
312 | 7.9,0.53,0.24,2,0.072,15,105,0.996,3.27,0.54,9.4,6
313 | 9,0.46,0.31,2.8,0.093,19,98,0.99815,3.32,0.63,9.5,6
314 | 8.6,0.47,0.3,3,0.076,30,135,0.9976,3.3,0.53,9.4,5
315 | 7.4,0.36,0.29,2.6,0.087,26,72,0.99645,3.39,0.68,11,5
316 | 7.1,0.35,0.29,2.5,0.096,20,53,0.9962,3.42,0.65,11,6
317 | 9.6,0.56,0.23,3.4,0.102,37,92,0.9996,3.3,0.65,10.1,5
318 | 9.6,0.77,0.12,2.9,0.082,30,74,0.99865,3.3,0.64,10.4,6
319 | 9.8,0.66,0.39,3.2,0.083,21,59,0.9989,3.37,0.71,11.5,7
320 | 9.6,0.77,0.12,2.9,0.082,30,74,0.99865,3.3,0.64,10.4,6
321 | 9.8,0.66,0.39,3.2,0.083,21,59,0.9989,3.37,0.71,11.5,7
322 | 9.3,0.61,0.26,3.4,0.09,25,87,0.99975,3.24,0.62,9.7,5
323 | 7.8,0.62,0.05,2.3,0.079,6,18,0.99735,3.29,0.63,9.3,5
324 | 10.3,0.59,0.42,2.8,0.09,35,73,0.999,3.28,0.7,9.5,6
325 | 10,0.49,0.2,11,0.071,13,50,1.0015,3.16,0.69,9.2,6
326 | 10,0.49,0.2,11,0.071,13,50,1.0015,3.16,0.69,9.2,6
327 | 11.6,0.53,0.66,3.65,0.121,6,14,0.9978,3.05,0.74,11.5,7
328 | 10.3,0.44,0.5,4.5,0.107,5,13,0.998,3.28,0.83,11.5,5
329 | 13.4,0.27,0.62,2.6,0.082,6,21,1.0002,3.16,0.67,9.7,6
330 | 10.7,0.46,0.39,2,0.061,7,15,0.9981,3.18,0.62,9.5,5
331 | 10.2,0.36,0.64,2.9,0.122,10,41,0.998,3.23,0.66,12.5,6
332 | 10.2,0.36,0.64,2.9,0.122,10,41,0.998,3.23,0.66,12.5,6
333 | 8,0.58,0.28,3.2,0.066,21,114,0.9973,3.22,0.54,9.4,6
334 | 8.4,0.56,0.08,2.1,0.105,16,44,0.9958,3.13,0.52,11,5
335 | 7.9,0.65,0.01,2.5,0.078,17,38,0.9963,3.34,0.74,11.7,7
336 | 11.9,0.695,0.53,3.4,0.128,7,21,0.9992,3.17,0.84,12.2,7
337 | 8.9,0.43,0.45,1.9,0.052,6,16,0.9948,3.35,0.7,12.5,6
338 | 7.8,0.43,0.32,2.8,0.08,29,58,0.9974,3.31,0.64,10.3,5
339 | 12.4,0.49,0.58,3,0.103,28,99,1.0008,3.16,1,11.5,6
340 | 12.5,0.28,0.54,2.3,0.082,12,29,0.9997,3.11,1.36,9.8,7
341 | 12.2,0.34,0.5,2.4,0.066,10,21,1,3.12,1.18,9.2,6
342 | 10.6,0.42,0.48,2.7,0.065,5,18,0.9972,3.21,0.87,11.3,6
343 | 10.9,0.39,0.47,1.8,0.118,6,14,0.9982,3.3,0.75,9.8,6
344 | 10.9,0.39,0.47,1.8,0.118,6,14,0.9982,3.3,0.75,9.8,6
345 | 11.9,0.57,0.5,2.6,0.082,6,32,1.0006,3.12,0.78,10.7,6
346 | 7,0.685,0,1.9,0.067,40,63,0.9979,3.6,0.81,9.9,5
347 | 6.6,0.815,0.02,2.7,0.072,17,34,0.9955,3.58,0.89,12.3,7
348 | 13.8,0.49,0.67,3,0.093,6,15,0.9986,3.02,0.93,12,6
349 | 9.6,0.56,0.31,2.8,0.089,15,46,0.9979,3.11,0.92,10,6
350 | 9.1,0.785,0,2.6,0.093,11,28,0.9994,3.36,0.86,9.4,6
351 | 10.7,0.67,0.22,2.7,0.107,17,34,1.0004,3.28,0.98,9.9,6
352 | 9.1,0.795,0,2.6,0.096,11,26,0.9994,3.35,0.83,9.4,6
353 | 7.7,0.665,0,2.4,0.09,8,19,0.9974,3.27,0.73,9.3,5
354 | 13.5,0.53,0.79,4.8,0.12,23,77,1.0018,3.18,0.77,13,5
355 | 6.1,0.21,0.4,1.4,0.066,40.5,165,0.9912,3.25,0.59,11.9,6
356 | 6.7,0.75,0.01,2.4,0.078,17,32,0.9955,3.55,0.61,12.8,6
357 | 11.5,0.41,0.52,3,0.08,29,55,1.0001,3.26,0.88,11,5
358 | 10.5,0.42,0.66,2.95,0.116,12,29,0.997,3.24,0.75,11.7,7
359 | 11.9,0.43,0.66,3.1,0.109,10,23,1,3.15,0.85,10.4,7
360 | 12.6,0.38,0.66,2.6,0.088,10,41,1.001,3.17,0.68,9.8,6
361 | 8.2,0.7,0.23,2,0.099,14,81,0.9973,3.19,0.7,9.4,5
362 | 8.6,0.45,0.31,2.6,0.086,21,50,0.9982,3.37,0.91,9.9,6
363 | 11.9,0.58,0.66,2.5,0.072,6,37,0.9992,3.05,0.56,10,5
364 | 12.5,0.46,0.63,2,0.071,6,15,0.9988,2.99,0.87,10.2,5
365 | 12.8,0.615,0.66,5.8,0.083,7,42,1.0022,3.07,0.73,10,7
366 | 10,0.42,0.5,3.4,0.107,7,21,0.9979,3.26,0.93,11.8,6
367 | 12.8,0.615,0.66,5.8,0.083,7,42,1.0022,3.07,0.73,10,7
368 | 10.4,0.575,0.61,2.6,0.076,11,24,1,3.16,0.69,9,5
369 | 10.3,0.34,0.52,2.8,0.159,15,75,0.9998,3.18,0.64,9.4,5
370 | 9.4,0.27,0.53,2.4,0.074,6,18,0.9962,3.2,1.13,12,7
371 | 6.9,0.765,0.02,2.3,0.063,35,63,0.9975,3.57,0.78,9.9,5
372 | 7.9,0.24,0.4,1.6,0.056,11,25,0.9967,3.32,0.87,8.7,6
373 | 9.1,0.28,0.48,1.8,0.067,26,46,0.9967,3.32,1.04,10.6,6
374 | 7.4,0.55,0.22,2.2,0.106,12,72,0.9959,3.05,0.63,9.2,5
375 | 14,0.41,0.63,3.8,0.089,6,47,1.0014,3.01,0.81,10.8,6
376 | 11.5,0.54,0.71,4.4,0.124,6,15,0.9984,3.01,0.83,11.8,7
377 | 11.5,0.45,0.5,3,0.078,19,47,1.0003,3.26,1.11,11,6
378 | 9.4,0.27,0.53,2.4,0.074,6,18,0.9962,3.2,1.13,12,7
379 | 11.4,0.625,0.66,6.2,0.088,6,24,0.9988,3.11,0.99,13.3,6
380 | 8.3,0.42,0.38,2.5,0.094,24,60,0.9979,3.31,0.7,10.8,6
381 | 8.3,0.26,0.42,2,0.08,11,27,0.9974,3.21,0.8,9.4,6
382 | 13.7,0.415,0.68,2.9,0.085,17,43,1.0014,3.06,0.8,10,6
383 | 8.3,0.26,0.42,2,0.08,11,27,0.9974,3.21,0.8,9.4,6
384 | 8.3,0.26,0.42,2,0.08,11,27,0.9974,3.21,0.8,9.4,6
385 | 7.7,0.51,0.28,2.1,0.087,23,54,0.998,3.42,0.74,9.2,5
386 | 7.4,0.63,0.07,2.4,0.09,11,37,0.9979,3.43,0.76,9.7,6
387 | 7.8,0.54,0.26,2,0.088,23,48,0.9981,3.41,0.74,9.2,6
388 | 8.3,0.66,0.15,1.9,0.079,17,42,0.9972,3.31,0.54,9.6,6
389 | 7.8,0.46,0.26,1.9,0.088,23,53,0.9981,3.43,0.74,9.2,6
390 | 9.6,0.38,0.31,2.5,0.096,16,49,0.9982,3.19,0.7,10,7
391 | 5.6,0.85,0.05,1.4,0.045,12,88,0.9924,3.56,0.82,12.9,8
392 | 13.7,0.415,0.68,2.9,0.085,17,43,1.0014,3.06,0.8,10,6
393 | 9.5,0.37,0.52,2,0.082,6,26,0.998,3.18,0.51,9.5,5
394 | 8.4,0.665,0.61,2,0.112,13,95,0.997,3.16,0.54,9.1,5
395 | 12.7,0.6,0.65,2.3,0.063,6,25,0.9997,3.03,0.57,9.9,5
396 | 12,0.37,0.76,4.2,0.066,7,38,1.0004,3.22,0.6,13,7
397 | 6.6,0.735,0.02,7.9,0.122,68,124,0.9994,3.47,0.53,9.9,5
398 | 11.5,0.59,0.59,2.6,0.087,13,49,0.9988,3.18,0.65,11,6
399 | 11.5,0.59,0.59,2.6,0.087,13,49,0.9988,3.18,0.65,11,6
400 | 8.7,0.765,0.22,2.3,0.064,9,42,0.9963,3.1,0.55,9.4,5
401 | 6.6,0.735,0.02,7.9,0.122,68,124,0.9994,3.47,0.53,9.9,5
402 | 7.7,0.26,0.3,1.7,0.059,20,38,0.9949,3.29,0.47,10.8,6
403 | 12.2,0.48,0.54,2.6,0.085,19,64,1,3.1,0.61,10.5,6
404 | 11.4,0.6,0.49,2.7,0.085,10,41,0.9994,3.15,0.63,10.5,6
405 | 7.7,0.69,0.05,2.7,0.075,15,27,0.9974,3.26,0.61,9.1,5
406 | 8.7,0.31,0.46,1.4,0.059,11,25,0.9966,3.36,0.76,10.1,6
407 | 9.8,0.44,0.47,2.5,0.063,9,28,0.9981,3.24,0.65,10.8,6
408 | 12,0.39,0.66,3,0.093,12,30,0.9996,3.18,0.63,10.8,7
409 | 10.4,0.34,0.58,3.7,0.174,6,16,0.997,3.19,0.7,11.3,6
410 | 12.5,0.46,0.49,4.5,0.07,26,49,0.9981,3.05,0.57,9.6,4
411 | 9,0.43,0.34,2.5,0.08,26,86,0.9987,3.38,0.62,9.5,6
412 | 9.1,0.45,0.35,2.4,0.08,23,78,0.9987,3.38,0.62,9.5,5
413 | 7.1,0.735,0.16,1.9,0.1,15,77,0.9966,3.27,0.64,9.3,5
414 | 9.9,0.4,0.53,6.7,0.097,6,19,0.9986,3.27,0.82,11.7,7
415 | 8.8,0.52,0.34,2.7,0.087,24,122,0.9982,3.26,0.61,9.5,5
416 | 8.6,0.725,0.24,6.6,0.117,31,134,1.0014,3.32,1.07,9.3,5
417 | 10.6,0.48,0.64,2.2,0.111,6,20,0.997,3.26,0.66,11.7,6
418 | 7,0.58,0.12,1.9,0.091,34,124,0.9956,3.44,0.48,10.5,5
419 | 11.9,0.38,0.51,2,0.121,7,20,0.9996,3.24,0.76,10.4,6
420 | 6.8,0.77,0,1.8,0.066,34,52,0.9976,3.62,0.68,9.9,5
421 | 9.5,0.56,0.33,2.4,0.089,35,67,0.9972,3.28,0.73,11.8,7
422 | 6.6,0.84,0.03,2.3,0.059,32,48,0.9952,3.52,0.56,12.3,7
423 | 7.7,0.96,0.2,2,0.047,15,60,0.9955,3.36,0.44,10.9,5
424 | 10.5,0.24,0.47,2.1,0.066,6,24,0.9978,3.15,0.9,11,7
425 | 7.7,0.96,0.2,2,0.047,15,60,0.9955,3.36,0.44,10.9,5
426 | 6.6,0.84,0.03,2.3,0.059,32,48,0.9952,3.52,0.56,12.3,7
427 | 6.4,0.67,0.08,2.1,0.045,19,48,0.9949,3.49,0.49,11.4,6
428 | 9.5,0.78,0.22,1.9,0.077,6,32,0.9988,3.26,0.56,10.6,6
429 | 9.1,0.52,0.33,1.3,0.07,9,30,0.9978,3.24,0.6,9.3,5
430 | 12.8,0.84,0.63,2.4,0.088,13,35,0.9997,3.1,0.6,10.4,6
431 | 10.5,0.24,0.47,2.1,0.066,6,24,0.9978,3.15,0.9,11,7
432 | 7.8,0.55,0.35,2.2,0.074,21,66,0.9974,3.25,0.56,9.2,5
433 | 11.9,0.37,0.69,2.3,0.078,12,24,0.9958,3,0.65,12.8,6
434 | 12.3,0.39,0.63,2.3,0.091,6,18,1.0004,3.16,0.49,9.5,5
435 | 10.4,0.41,0.55,3.2,0.076,22,54,0.9996,3.15,0.89,9.9,6
436 | 12.3,0.39,0.63,2.3,0.091,6,18,1.0004,3.16,0.49,9.5,5
437 | 8,0.67,0.3,2,0.06,38,62,0.9958,3.26,0.56,10.2,6
438 | 11.1,0.45,0.73,3.2,0.066,6,22,0.9986,3.17,0.66,11.2,6
439 | 10.4,0.41,0.55,3.2,0.076,22,54,0.9996,3.15,0.89,9.9,6
440 | 7,0.62,0.18,1.5,0.062,7,50,0.9951,3.08,0.6,9.3,5
441 | 12.6,0.31,0.72,2.2,0.072,6,29,0.9987,2.88,0.82,9.8,8
442 | 11.9,0.4,0.65,2.15,0.068,7,27,0.9988,3.06,0.68,11.3,6
443 | 15.6,0.685,0.76,3.7,0.1,6,43,1.0032,2.95,0.68,11.2,7
444 | 10,0.44,0.49,2.7,0.077,11,19,0.9963,3.23,0.63,11.6,7
445 | 5.3,0.57,0.01,1.7,0.054,5,27,0.9934,3.57,0.84,12.5,7
446 | 9.5,0.735,0.1,2.1,0.079,6,31,0.9986,3.23,0.56,10.1,6
447 | 12.5,0.38,0.6,2.6,0.081,31,72,0.9996,3.1,0.73,10.5,5
448 | 9.3,0.48,0.29,2.1,0.127,6,16,0.9968,3.22,0.72,11.2,5
449 | 8.6,0.53,0.22,2,0.1,7,27,0.9967,3.2,0.56,10.2,6
450 | 11.9,0.39,0.69,2.8,0.095,17,35,0.9994,3.1,0.61,10.8,6
451 | 11.9,0.39,0.69,2.8,0.095,17,35,0.9994,3.1,0.61,10.8,6
452 | 8.4,0.37,0.53,1.8,0.413,9,26,0.9979,3.06,1.06,9.1,6
453 | 6.8,0.56,0.03,1.7,0.084,18,35,0.9968,3.44,0.63,10,6
454 | 10.4,0.33,0.63,2.8,0.084,5,22,0.9998,3.26,0.74,11.2,7
455 | 7,0.23,0.4,1.6,0.063,21,67,0.9952,3.5,0.63,11.1,5
456 | 11.3,0.62,0.67,5.2,0.086,6,19,0.9988,3.22,0.69,13.4,8
457 | 8.9,0.59,0.39,2.3,0.095,5,22,0.9986,3.37,0.58,10.3,5
458 | 9.2,0.63,0.21,2.7,0.097,29,65,0.9988,3.28,0.58,9.6,5
459 | 10.4,0.33,0.63,2.8,0.084,5,22,0.9998,3.26,0.74,11.2,7
460 | 11.6,0.58,0.66,2.2,0.074,10,47,1.0008,3.25,0.57,9,3
461 | 9.2,0.43,0.52,2.3,0.083,14,23,0.9976,3.35,0.61,11.3,6
462 | 8.3,0.615,0.22,2.6,0.087,6,19,0.9982,3.26,0.61,9.3,5
463 | 11,0.26,0.68,2.55,0.085,10,25,0.997,3.18,0.61,11.8,5
464 | 8.1,0.66,0.7,2.2,0.098,25,129,0.9972,3.08,0.53,9,5
465 | 11.5,0.315,0.54,2.1,0.084,5,15,0.9987,2.98,0.7,9.2,6
466 | 10,0.29,0.4,2.9,0.098,10,26,1.0006,3.48,0.91,9.7,5
467 | 10.3,0.5,0.42,2,0.069,21,51,0.9982,3.16,0.72,11.5,6
468 | 8.8,0.46,0.45,2.6,0.065,7,18,0.9947,3.32,0.79,14,6
469 | 11.4,0.36,0.69,2.1,0.09,6,21,1,3.17,0.62,9.2,6
470 | 8.7,0.82,0.02,1.2,0.07,36,48,0.9952,3.2,0.58,9.8,5
471 | 13,0.32,0.65,2.6,0.093,15,47,0.9996,3.05,0.61,10.6,5
472 | 9.6,0.54,0.42,2.4,0.081,25,52,0.997,3.2,0.71,11.4,6
473 | 12.5,0.37,0.55,2.6,0.083,25,68,0.9995,3.15,0.82,10.4,6
474 | 9.9,0.35,0.55,2.1,0.062,5,14,0.9971,3.26,0.79,10.6,5
475 | 10.5,0.28,0.51,1.7,0.08,10,24,0.9982,3.2,0.89,9.4,6
476 | 9.6,0.68,0.24,2.2,0.087,5,28,0.9988,3.14,0.6,10.2,5
477 | 9.3,0.27,0.41,2,0.091,6,16,0.998,3.28,0.7,9.7,5
478 | 10.4,0.24,0.49,1.8,0.075,6,20,0.9977,3.18,1.06,11,6
479 | 9.6,0.68,0.24,2.2,0.087,5,28,0.9988,3.14,0.6,10.2,5
480 | 9.4,0.685,0.11,2.7,0.077,6,31,0.9984,3.19,0.7,10.1,6
481 | 10.6,0.28,0.39,15.5,0.069,6,23,1.0026,3.12,0.66,9.2,5
482 | 9.4,0.3,0.56,2.8,0.08,6,17,0.9964,3.15,0.92,11.7,8
483 | 10.6,0.36,0.59,2.2,0.152,6,18,0.9986,3.04,1.05,9.4,5
484 | 10.6,0.36,0.6,2.2,0.152,7,18,0.9986,3.04,1.06,9.4,5
485 | 10.6,0.44,0.68,4.1,0.114,6,24,0.997,3.06,0.66,13.4,6
486 | 10.2,0.67,0.39,1.9,0.054,6,17,0.9976,3.17,0.47,10,5
487 | 10.2,0.67,0.39,1.9,0.054,6,17,0.9976,3.17,0.47,10,5
488 | 10.2,0.645,0.36,1.8,0.053,5,14,0.9982,3.17,0.42,10,6
489 | 11.6,0.32,0.55,2.8,0.081,35,67,1.0002,3.32,0.92,10.8,7
490 | 9.3,0.39,0.4,2.6,0.073,10,26,0.9984,3.34,0.75,10.2,6
491 | 9.3,0.775,0.27,2.8,0.078,24,56,0.9984,3.31,0.67,10.6,6
492 | 9.2,0.41,0.5,2.5,0.055,12,25,0.9952,3.34,0.79,13.3,7
493 | 8.9,0.4,0.51,2.6,0.052,13,27,0.995,3.32,0.9,13.4,7
494 | 8.7,0.69,0.31,3,0.086,23,81,1.0002,3.48,0.74,11.6,6
495 | 6.5,0.39,0.23,8.3,0.051,28,91,0.9952,3.44,0.55,12.1,6
496 | 10.7,0.35,0.53,2.6,0.07,5,16,0.9972,3.15,0.65,11,8
497 | 7.8,0.52,0.25,1.9,0.081,14,38,0.9984,3.43,0.65,9,6
498 | 7.2,0.34,0.32,2.5,0.09,43,113,0.9966,3.32,0.79,11.1,5
499 | 10.7,0.35,0.53,2.6,0.07,5,16,0.9972,3.15,0.65,11,8
500 | 8.7,0.69,0.31,3,0.086,23,81,1.0002,3.48,0.74,11.6,6
501 | 7.8,0.52,0.25,1.9,0.081,14,38,0.9984,3.43,0.65,9,6
502 | 10.4,0.44,0.73,6.55,0.074,38,76,0.999,3.17,0.85,12,7
503 | 10.4,0.44,0.73,6.55,0.074,38,76,0.999,3.17,0.85,12,7
504 | 10.5,0.26,0.47,1.9,0.078,6,24,0.9976,3.18,1.04,10.9,7
505 | 10.5,0.24,0.42,1.8,0.077,6,22,0.9976,3.21,1.05,10.8,7
506 | 10.2,0.49,0.63,2.9,0.072,10,26,0.9968,3.16,0.78,12.5,7
507 | 10.4,0.24,0.46,1.8,0.075,6,21,0.9976,3.25,1.02,10.8,7
508 | 11.2,0.67,0.55,2.3,0.084,6,13,1,3.17,0.71,9.5,6
509 | 10,0.59,0.31,2.2,0.09,26,62,0.9994,3.18,0.63,10.2,6
510 | 13.3,0.29,0.75,2.8,0.084,23,43,0.9986,3.04,0.68,11.4,7
511 | 12.4,0.42,0.49,4.6,0.073,19,43,0.9978,3.02,0.61,9.5,5
512 | 10,0.59,0.31,2.2,0.09,26,62,0.9994,3.18,0.63,10.2,6
513 | 10.7,0.4,0.48,2.1,0.125,15,49,0.998,3.03,0.81,9.7,6
514 | 10.5,0.51,0.64,2.4,0.107,6,15,0.9973,3.09,0.66,11.8,7
515 | 10.5,0.51,0.64,2.4,0.107,6,15,0.9973,3.09,0.66,11.8,7
516 | 8.5,0.655,0.49,6.1,0.122,34,151,1.001,3.31,1.14,9.3,5
517 | 12.5,0.6,0.49,4.3,0.1,5,14,1.001,3.25,0.74,11.9,6
518 | 10.4,0.61,0.49,2.1,0.2,5,16,0.9994,3.16,0.63,8.4,3
519 | 10.9,0.21,0.49,2.8,0.088,11,32,0.9972,3.22,0.68,11.7,6
520 | 7.3,0.365,0.49,2.5,0.088,39,106,0.9966,3.36,0.78,11,5
521 | 9.8,0.25,0.49,2.7,0.088,15,33,0.9982,3.42,0.9,10,6
522 | 7.6,0.41,0.49,2,0.088,16,43,0.998,3.48,0.64,9.1,5
523 | 8.2,0.39,0.49,2.3,0.099,47,133,0.9979,3.38,0.99,9.8,5
524 | 9.3,0.4,0.49,2.5,0.085,38,142,0.9978,3.22,0.55,9.4,5
525 | 9.2,0.43,0.49,2.4,0.086,23,116,0.9976,3.23,0.64,9.5,5
526 | 10.4,0.64,0.24,2.8,0.105,29,53,0.9998,3.24,0.67,9.9,5
527 | 7.3,0.365,0.49,2.5,0.088,39,106,0.9966,3.36,0.78,11,5
528 | 7,0.38,0.49,2.5,0.097,33,85,0.9962,3.39,0.77,11.4,6
529 | 8.2,0.42,0.49,2.6,0.084,32,55,0.9988,3.34,0.75,8.7,6
530 | 9.9,0.63,0.24,2.4,0.077,6,33,0.9974,3.09,0.57,9.4,5
531 | 9.1,0.22,0.24,2.1,0.078,1,28,0.999,3.41,0.87,10.3,6
532 | 11.9,0.38,0.49,2.7,0.098,12,42,1.0004,3.16,0.61,10.3,5
533 | 11.9,0.38,0.49,2.7,0.098,12,42,1.0004,3.16,0.61,10.3,5
534 | 10.3,0.27,0.24,2.1,0.072,15,33,0.9956,3.22,0.66,12.8,6
535 | 10,0.48,0.24,2.7,0.102,13,32,1,3.28,0.56,10,6
536 | 9.1,0.22,0.24,2.1,0.078,1,28,0.999,3.41,0.87,10.3,6
537 | 9.9,0.63,0.24,2.4,0.077,6,33,0.9974,3.09,0.57,9.4,5
538 | 8.1,0.825,0.24,2.1,0.084,5,13,0.9972,3.37,0.77,10.7,6
539 | 12.9,0.35,0.49,5.8,0.066,5,35,1.0014,3.2,0.66,12,7
540 | 11.2,0.5,0.74,5.15,0.1,5,17,0.9996,3.22,0.62,11.2,5
541 | 9.2,0.59,0.24,3.3,0.101,20,47,0.9988,3.26,0.67,9.6,5
542 | 9.5,0.46,0.49,6.3,0.064,5,17,0.9988,3.21,0.73,11,6
543 | 9.3,0.715,0.24,2.1,0.07,5,20,0.9966,3.12,0.59,9.9,5
544 | 11.2,0.66,0.24,2.5,0.085,16,53,0.9993,3.06,0.72,11,6
545 | 14.3,0.31,0.74,1.8,0.075,6,15,1.0008,2.86,0.79,8.4,6
546 | 9.1,0.47,0.49,2.6,0.094,38,106,0.9982,3.08,0.59,9.1,5
547 | 7.5,0.55,0.24,2,0.078,10,28,0.9983,3.45,0.78,9.5,6
548 | 10.6,0.31,0.49,2.5,0.067,6,21,0.9987,3.26,0.86,10.7,6
549 | 12.4,0.35,0.49,2.6,0.079,27,69,0.9994,3.12,0.75,10.4,6
550 | 9,0.53,0.49,1.9,0.171,6,25,0.9975,3.27,0.61,9.4,6
551 | 6.8,0.51,0.01,2.1,0.074,9,25,0.9958,3.33,0.56,9.5,6
552 | 9.4,0.43,0.24,2.8,0.092,14,45,0.998,3.19,0.73,10,6
553 | 9.5,0.46,0.24,2.7,0.092,14,44,0.998,3.12,0.74,10,6
554 | 5,1.04,0.24,1.6,0.05,32,96,0.9934,3.74,0.62,11.5,5
555 | 15.5,0.645,0.49,4.2,0.095,10,23,1.00315,2.92,0.74,11.1,5
556 | 15.5,0.645,0.49,4.2,0.095,10,23,1.00315,2.92,0.74,11.1,5
557 | 10.9,0.53,0.49,4.6,0.118,10,17,1.0002,3.07,0.56,11.7,6
558 | 15.6,0.645,0.49,4.2,0.095,10,23,1.00315,2.92,0.74,11.1,5
559 | 10.9,0.53,0.49,4.6,0.118,10,17,1.0002,3.07,0.56,11.7,6
560 | 13,0.47,0.49,4.3,0.085,6,47,1.0021,3.3,0.68,12.7,6
561 | 12.7,0.6,0.49,2.8,0.075,5,19,0.9994,3.14,0.57,11.4,5
562 | 9,0.44,0.49,2.4,0.078,26,121,0.9978,3.23,0.58,9.2,5
563 | 9,0.54,0.49,2.9,0.094,41,110,0.9982,3.08,0.61,9.2,5
564 | 7.6,0.29,0.49,2.7,0.092,25,60,0.9971,3.31,0.61,10.1,6
565 | 13,0.47,0.49,4.3,0.085,6,47,1.0021,3.3,0.68,12.7,6
566 | 12.7,0.6,0.49,2.8,0.075,5,19,0.9994,3.14,0.57,11.4,5
567 | 8.7,0.7,0.24,2.5,0.226,5,15,0.9991,3.32,0.6,9,6
568 | 8.7,0.7,0.24,2.5,0.226,5,15,0.9991,3.32,0.6,9,6
569 | 9.8,0.5,0.49,2.6,0.25,5,20,0.999,3.31,0.79,10.7,6
570 | 6.2,0.36,0.24,2.2,0.095,19,42,0.9946,3.57,0.57,11.7,6
571 | 11.5,0.35,0.49,3.3,0.07,10,37,1.0003,3.32,0.91,11,6
572 | 6.2,0.36,0.24,2.2,0.095,19,42,0.9946,3.57,0.57,11.7,6
573 | 10.2,0.24,0.49,2.4,0.075,10,28,0.9978,3.14,0.61,10.4,5
574 | 10.5,0.59,0.49,2.1,0.07,14,47,0.9991,3.3,0.56,9.6,4
575 | 10.6,0.34,0.49,3.2,0.078,20,78,0.9992,3.19,0.7,10,6
576 | 12.3,0.27,0.49,3.1,0.079,28,46,0.9993,3.2,0.8,10.2,6
577 | 9.9,0.5,0.24,2.3,0.103,6,14,0.9978,3.34,0.52,10,4
578 | 8.8,0.44,0.49,2.8,0.083,18,111,0.9982,3.3,0.6,9.5,5
579 | 8.8,0.47,0.49,2.9,0.085,17,110,0.9982,3.29,0.6,9.8,5
580 | 10.6,0.31,0.49,2.2,0.063,18,40,0.9976,3.14,0.51,9.8,6
581 | 12.3,0.5,0.49,2.2,0.089,5,14,1.0002,3.19,0.44,9.6,5
582 | 12.3,0.5,0.49,2.2,0.089,5,14,1.0002,3.19,0.44,9.6,5
583 | 11.7,0.49,0.49,2.2,0.083,5,15,1,3.19,0.43,9.2,5
584 | 12,0.28,0.49,1.9,0.074,10,21,0.9976,2.98,0.66,9.9,7
585 | 11.8,0.33,0.49,3.4,0.093,54,80,1.0002,3.3,0.76,10.7,7
586 | 7.6,0.51,0.24,2.4,0.091,8,38,0.998,3.47,0.66,9.6,6
587 | 11.1,0.31,0.49,2.7,0.094,16,47,0.9986,3.12,1.02,10.6,7
588 | 7.3,0.73,0.24,1.9,0.108,18,102,0.9967,3.26,0.59,9.3,5
589 | 5,0.42,0.24,2,0.06,19,50,0.9917,3.72,0.74,14,8
590 | 10.2,0.29,0.49,2.6,0.059,5,13,0.9976,3.05,0.74,10.5,7
591 | 9,0.45,0.49,2.6,0.084,21,75,0.9987,3.35,0.57,9.7,5
592 | 6.6,0.39,0.49,1.7,0.07,23,149,0.9922,3.12,0.5,11.5,6
593 | 9,0.45,0.49,2.6,0.084,21,75,0.9987,3.35,0.57,9.7,5
594 | 9.9,0.49,0.58,3.5,0.094,9,43,1.0004,3.29,0.58,9,5
595 | 7.9,0.72,0.17,2.6,0.096,20,38,0.9978,3.4,0.53,9.5,5
596 | 8.9,0.595,0.41,7.9,0.086,30,109,0.9998,3.27,0.57,9.3,5
597 | 12.4,0.4,0.51,2,0.059,6,24,0.9994,3.04,0.6,9.3,6
598 | 11.9,0.58,0.58,1.9,0.071,5,18,0.998,3.09,0.63,10,6
599 | 8.5,0.585,0.18,2.1,0.078,5,30,0.9967,3.2,0.48,9.8,6
600 | 12.7,0.59,0.45,2.3,0.082,11,22,1,3,0.7,9.3,6
601 | 8.2,0.915,0.27,2.1,0.088,7,23,0.9962,3.26,0.47,10,4
602 | 13.2,0.46,0.52,2.2,0.071,12,35,1.0006,3.1,0.56,9,6
603 | 7.7,0.835,0,2.6,0.081,6,14,0.9975,3.3,0.52,9.3,5
604 | 13.2,0.46,0.52,2.2,0.071,12,35,1.0006,3.1,0.56,9,6
605 | 8.3,0.58,0.13,2.9,0.096,14,63,0.9984,3.17,0.62,9.1,6
606 | 8.3,0.6,0.13,2.6,0.085,6,24,0.9984,3.31,0.59,9.2,6
607 | 9.4,0.41,0.48,4.6,0.072,10,20,0.9973,3.34,0.79,12.2,7
608 | 8.8,0.48,0.41,3.3,0.092,26,52,0.9982,3.31,0.53,10.5,6
609 | 10.1,0.65,0.37,5.1,0.11,11,65,1.0026,3.32,0.64,10.4,6
610 | 6.3,0.36,0.19,3.2,0.075,15,39,0.9956,3.56,0.52,12.7,6
611 | 8.8,0.24,0.54,2.5,0.083,25,57,0.9983,3.39,0.54,9.2,5
612 | 13.2,0.38,0.55,2.7,0.081,5,16,1.0006,2.98,0.54,9.4,5
613 | 7.5,0.64,0,2.4,0.077,18,29,0.9965,3.32,0.6,10,6
614 | 8.2,0.39,0.38,1.5,0.058,10,29,0.9962,3.26,0.74,9.8,5
615 | 9.2,0.755,0.18,2.2,0.148,10,103,0.9969,2.87,1.36,10.2,6
616 | 9.6,0.6,0.5,2.3,0.079,28,71,0.9997,3.5,0.57,9.7,5
617 | 9.6,0.6,0.5,2.3,0.079,28,71,0.9997,3.5,0.57,9.7,5
618 | 11.5,0.31,0.51,2.2,0.079,14,28,0.9982,3.03,0.93,9.8,6
619 | 11.4,0.46,0.5,2.7,0.122,4,17,1.0006,3.13,0.7,10.2,5
620 | 11.3,0.37,0.41,2.3,0.088,6,16,0.9988,3.09,0.8,9.3,5
621 | 8.3,0.54,0.24,3.4,0.076,16,112,0.9976,3.27,0.61,9.4,5
622 | 8.2,0.56,0.23,3.4,0.078,14,104,0.9976,3.28,0.62,9.4,5
623 | 10,0.58,0.22,1.9,0.08,9,32,0.9974,3.13,0.55,9.5,5
624 | 7.9,0.51,0.25,2.9,0.077,21,45,0.9974,3.49,0.96,12.1,6
625 | 6.8,0.69,0,5.6,0.124,21,58,0.9997,3.46,0.72,10.2,5
626 | 6.8,0.69,0,5.6,0.124,21,58,0.9997,3.46,0.72,10.2,5
627 | 8.8,0.6,0.29,2.2,0.098,5,15,0.9988,3.36,0.49,9.1,5
628 | 8.8,0.6,0.29,2.2,0.098,5,15,0.9988,3.36,0.49,9.1,5
629 | 8.7,0.54,0.26,2.5,0.097,7,31,0.9976,3.27,0.6,9.3,6
630 | 7.6,0.685,0.23,2.3,0.111,20,84,0.9964,3.21,0.61,9.3,5
631 | 8.7,0.54,0.26,2.5,0.097,7,31,0.9976,3.27,0.6,9.3,6
632 | 10.4,0.28,0.54,2.7,0.105,5,19,0.9988,3.25,0.63,9.5,5
633 | 7.6,0.41,0.14,3,0.087,21,43,0.9964,3.32,0.57,10.5,6
634 | 10.1,0.935,0.22,3.4,0.105,11,86,1.001,3.43,0.64,11.3,4
635 | 7.9,0.35,0.21,1.9,0.073,46,102,0.9964,3.27,0.58,9.5,5
636 | 8.7,0.84,0,1.4,0.065,24,33,0.9954,3.27,0.55,9.7,5
637 | 9.6,0.88,0.28,2.4,0.086,30,147,0.9979,3.24,0.53,9.4,5
638 | 9.5,0.885,0.27,2.3,0.084,31,145,0.9978,3.24,0.53,9.4,5
639 | 7.7,0.915,0.12,2.2,0.143,7,23,0.9964,3.35,0.65,10.2,7
640 | 8.9,0.29,0.35,1.9,0.067,25,57,0.997,3.18,1.36,10.3,6
641 | 9.9,0.54,0.45,2.3,0.071,16,40,0.9991,3.39,0.62,9.4,5
642 | 9.5,0.59,0.44,2.3,0.071,21,68,0.9992,3.46,0.63,9.5,5
643 | 9.9,0.54,0.45,2.3,0.071,16,40,0.9991,3.39,0.62,9.4,5
644 | 9.5,0.59,0.44,2.3,0.071,21,68,0.9992,3.46,0.63,9.5,5
645 | 9.9,0.54,0.45,2.3,0.071,16,40,0.9991,3.39,0.62,9.4,5
646 | 7.8,0.64,0.1,6,0.115,5,11,0.9984,3.37,0.69,10.1,7
647 | 7.3,0.67,0.05,3.6,0.107,6,20,0.9972,3.4,0.63,10.1,5
648 | 8.3,0.845,0.01,2.2,0.07,5,14,0.9967,3.32,0.58,11,4
649 | 8.7,0.48,0.3,2.8,0.066,10,28,0.9964,3.33,0.67,11.2,7
650 | 6.7,0.42,0.27,8.6,0.068,24,148,0.9948,3.16,0.57,11.3,6
651 | 10.7,0.43,0.39,2.2,0.106,8,32,0.9986,2.89,0.5,9.6,5
652 | 9.8,0.88,0.25,2.5,0.104,35,155,1.001,3.41,0.67,11.2,5
653 | 15.9,0.36,0.65,7.5,0.096,22,71,0.9976,2.98,0.84,14.9,5
654 | 9.4,0.33,0.59,2.8,0.079,9,30,0.9976,3.12,0.54,12,6
655 | 8.6,0.47,0.47,2.4,0.074,7,29,0.9979,3.08,0.46,9.5,5
656 | 9.7,0.55,0.17,2.9,0.087,20,53,1.0004,3.14,0.61,9.4,5
657 | 10.7,0.43,0.39,2.2,0.106,8,32,0.9986,2.89,0.5,9.6,5
658 | 12,0.5,0.59,1.4,0.073,23,42,0.998,2.92,0.68,10.5,7
659 | 7.2,0.52,0.07,1.4,0.074,5,20,0.9973,3.32,0.81,9.6,6
660 | 7.1,0.84,0.02,4.4,0.096,5,13,0.997,3.41,0.57,11,4
661 | 7.2,0.52,0.07,1.4,0.074,5,20,0.9973,3.32,0.81,9.6,6
662 | 7.5,0.42,0.31,1.6,0.08,15,42,0.9978,3.31,0.64,9,5
663 | 7.2,0.57,0.06,1.6,0.076,9,27,0.9972,3.36,0.7,9.6,6
664 | 10.1,0.28,0.46,1.8,0.05,5,13,0.9974,3.04,0.79,10.2,6
665 | 12.1,0.4,0.52,2,0.092,15,54,1,3.03,0.66,10.2,5
666 | 9.4,0.59,0.14,2,0.084,25,48,0.9981,3.14,0.56,9.7,5
667 | 8.3,0.49,0.36,1.8,0.222,6,16,0.998,3.18,0.6,9.5,6
668 | 11.3,0.34,0.45,2,0.082,6,15,0.9988,2.94,0.66,9.2,6
669 | 10,0.73,0.43,2.3,0.059,15,31,0.9966,3.15,0.57,11,5
670 | 11.3,0.34,0.45,2,0.082,6,15,0.9988,2.94,0.66,9.2,6
671 | 6.9,0.4,0.24,2.5,0.083,30,45,0.9959,3.26,0.58,10,5
672 | 8.2,0.73,0.21,1.7,0.074,5,13,0.9968,3.2,0.52,9.5,5
673 | 9.8,1.24,0.34,2,0.079,32,151,0.998,3.15,0.53,9.5,5
674 | 8.2,0.73,0.21,1.7,0.074,5,13,0.9968,3.2,0.52,9.5,5
675 | 10.8,0.4,0.41,2.2,0.084,7,17,0.9984,3.08,0.67,9.3,6
676 | 9.3,0.41,0.39,2.2,0.064,12,31,0.9984,3.26,0.65,10.2,5
677 | 10.8,0.4,0.41,2.2,0.084,7,17,0.9984,3.08,0.67,9.3,6
678 | 8.6,0.8,0.11,2.3,0.084,12,31,0.9979,3.4,0.48,9.9,5
679 | 8.3,0.78,0.1,2.6,0.081,45,87,0.9983,3.48,0.53,10,5
680 | 10.8,0.26,0.45,3.3,0.06,20,49,0.9972,3.13,0.54,9.6,5
681 | 13.3,0.43,0.58,1.9,0.07,15,40,1.0004,3.06,0.49,9,5
682 | 8,0.45,0.23,2.2,0.094,16,29,0.9962,3.21,0.49,10.2,6
683 | 8.5,0.46,0.31,2.25,0.078,32,58,0.998,3.33,0.54,9.8,5
684 | 8.1,0.78,0.23,2.6,0.059,5,15,0.997,3.37,0.56,11.3,5
685 | 9.8,0.98,0.32,2.3,0.078,35,152,0.998,3.25,0.48,9.4,5
686 | 8.1,0.78,0.23,2.6,0.059,5,15,0.997,3.37,0.56,11.3,5
687 | 7.1,0.65,0.18,1.8,0.07,13,40,0.997,3.44,0.6,9.1,5
688 | 9.1,0.64,0.23,3.1,0.095,13,38,0.9998,3.28,0.59,9.7,5
689 | 7.7,0.66,0.04,1.6,0.039,4,9,0.9962,3.4,0.47,9.4,5
690 | 8.1,0.38,0.48,1.8,0.157,5,17,0.9976,3.3,1.05,9.4,5
691 | 7.4,1.185,0,4.25,0.097,5,14,0.9966,3.63,0.54,10.7,3
692 | 9.2,0.92,0.24,2.6,0.087,12,93,0.9998,3.48,0.54,9.8,5
693 | 8.6,0.49,0.51,2,0.422,16,62,0.9979,3.03,1.17,9,5
694 | 9,0.48,0.32,2.8,0.084,21,122,0.9984,3.32,0.62,9.4,5
695 | 9,0.47,0.31,2.7,0.084,24,125,0.9984,3.31,0.61,9.4,5
696 | 5.1,0.47,0.02,1.3,0.034,18,44,0.9921,3.9,0.62,12.8,6
697 | 7,0.65,0.02,2.1,0.066,8,25,0.9972,3.47,0.67,9.5,6
698 | 7,0.65,0.02,2.1,0.066,8,25,0.9972,3.47,0.67,9.5,6
699 | 9.4,0.615,0.28,3.2,0.087,18,72,1.0001,3.31,0.53,9.7,5
700 | 11.8,0.38,0.55,2.1,0.071,5,19,0.9986,3.11,0.62,10.8,6
701 | 10.6,1.02,0.43,2.9,0.076,26,88,0.9984,3.08,0.57,10.1,6
702 | 7,0.65,0.02,2.1,0.066,8,25,0.9972,3.47,0.67,9.5,6
703 | 7,0.64,0.02,2.1,0.067,9,23,0.997,3.47,0.67,9.4,6
704 | 7.5,0.38,0.48,2.6,0.073,22,84,0.9972,3.32,0.7,9.6,4
705 | 9.1,0.765,0.04,1.6,0.078,4,14,0.998,3.29,0.54,9.7,4
706 | 8.4,1.035,0.15,6,0.073,11,54,0.999,3.37,0.49,9.9,5
707 | 7,0.78,0.08,2,0.093,10,19,0.9956,3.4,0.47,10,5
708 | 7.4,0.49,0.19,3,0.077,16,37,0.9966,3.37,0.51,10.5,5
709 | 7.8,0.545,0.12,2.5,0.068,11,35,0.996,3.34,0.61,11.6,6
710 | 9.7,0.31,0.47,1.6,0.062,13,33,0.9983,3.27,0.66,10,6
711 | 10.6,1.025,0.43,2.8,0.08,21,84,0.9985,3.06,0.57,10.1,5
712 | 8.9,0.565,0.34,3,0.093,16,112,0.9998,3.38,0.61,9.5,5
713 | 8.7,0.69,0,3.2,0.084,13,33,0.9992,3.36,0.45,9.4,5
714 | 8,0.43,0.36,2.3,0.075,10,48,0.9976,3.34,0.46,9.4,5
715 | 9.9,0.74,0.28,2.6,0.078,21,77,0.998,3.28,0.51,9.8,5
716 | 7.2,0.49,0.18,2.7,0.069,13,34,0.9967,3.29,0.48,9.2,6
717 | 8,0.43,0.36,2.3,0.075,10,48,0.9976,3.34,0.46,9.4,5
718 | 7.6,0.46,0.11,2.6,0.079,12,49,0.9968,3.21,0.57,10,5
719 | 8.4,0.56,0.04,2,0.082,10,22,0.9976,3.22,0.44,9.6,5
720 | 7.1,0.66,0,3.9,0.086,17,45,0.9976,3.46,0.54,9.5,5
721 | 8.4,0.56,0.04,2,0.082,10,22,0.9976,3.22,0.44,9.6,5
722 | 8.9,0.48,0.24,2.85,0.094,35,106,0.9982,3.1,0.53,9.2,5
723 | 7.6,0.42,0.08,2.7,0.084,15,48,0.9968,3.21,0.59,10,5
724 | 7.1,0.31,0.3,2.2,0.053,36,127,0.9965,2.94,1.62,9.5,5
725 | 7.5,1.115,0.1,3.1,0.086,5,12,0.9958,3.54,0.6,11.2,4
726 | 9,0.66,0.17,3,0.077,5,13,0.9976,3.29,0.55,10.4,5
727 | 8.1,0.72,0.09,2.8,0.084,18,49,0.9994,3.43,0.72,11.1,6
728 | 6.4,0.57,0.02,1.8,0.067,4,11,0.997,3.46,0.68,9.5,5
729 | 6.4,0.57,0.02,1.8,0.067,4,11,0.997,3.46,0.68,9.5,5
730 | 6.4,0.865,0.03,3.2,0.071,27,58,0.995,3.61,0.49,12.7,6
731 | 9.5,0.55,0.66,2.3,0.387,12,37,0.9982,3.17,0.67,9.6,5
732 | 8.9,0.875,0.13,3.45,0.088,4,14,0.9994,3.44,0.52,11.5,5
733 | 7.3,0.835,0.03,2.1,0.092,10,19,0.9966,3.39,0.47,9.6,5
734 | 7,0.45,0.34,2.7,0.082,16,72,0.998,3.55,0.6,9.5,5
735 | 7.7,0.56,0.2,2,0.075,9,39,0.9987,3.48,0.62,9.3,5
736 | 7.7,0.965,0.1,2.1,0.112,11,22,0.9963,3.26,0.5,9.5,5
737 | 7.7,0.965,0.1,2.1,0.112,11,22,0.9963,3.26,0.5,9.5,5
738 | 8.2,0.59,0,2.5,0.093,19,58,1.0002,3.5,0.65,9.3,6
739 | 9,0.46,0.23,2.8,0.092,28,104,0.9983,3.1,0.56,9.2,5
740 | 9,0.69,0,2.4,0.088,19,38,0.999,3.35,0.6,9.3,5
741 | 8.3,0.76,0.29,4.2,0.075,12,16,0.9965,3.45,0.68,11.5,6
742 | 9.2,0.53,0.24,2.6,0.078,28,139,0.99788,3.21,0.57,9.5,5
743 | 6.5,0.615,0,1.9,0.065,9,18,0.9972,3.46,0.65,9.2,5
744 | 11.6,0.41,0.58,2.8,0.096,25,101,1.00024,3.13,0.53,10,5
745 | 11.1,0.39,0.54,2.7,0.095,21,101,1.0001,3.13,0.51,9.5,5
746 | 7.3,0.51,0.18,2.1,0.07,12,28,0.99768,3.52,0.73,9.5,6
747 | 8.2,0.34,0.38,2.5,0.08,12,57,0.9978,3.3,0.47,9,6
748 | 8.6,0.33,0.4,2.6,0.083,16,68,0.99782,3.3,0.48,9.4,5
749 | 7.2,0.5,0.18,2.1,0.071,12,31,0.99761,3.52,0.72,9.6,6
750 | 7.3,0.51,0.18,2.1,0.07,12,28,0.99768,3.52,0.73,9.5,6
751 | 8.3,0.65,0.1,2.9,0.089,17,40,0.99803,3.29,0.55,9.5,5
752 | 8.3,0.65,0.1,2.9,0.089,17,40,0.99803,3.29,0.55,9.5,5
753 | 7.6,0.54,0.13,2.5,0.097,24,66,0.99785,3.39,0.61,9.4,5
754 | 8.3,0.65,0.1,2.9,0.089,17,40,0.99803,3.29,0.55,9.5,5
755 | 7.8,0.48,0.68,1.7,0.415,14,32,0.99656,3.09,1.06,9.1,6
756 | 7.8,0.91,0.07,1.9,0.058,22,47,0.99525,3.51,0.43,10.7,6
757 | 6.3,0.98,0.01,2,0.057,15,33,0.99488,3.6,0.46,11.2,6
758 | 8.1,0.87,0,2.2,0.084,10,31,0.99656,3.25,0.5,9.8,5
759 | 8.1,0.87,0,2.2,0.084,10,31,0.99656,3.25,0.5,9.8,5
760 | 8.8,0.42,0.21,2.5,0.092,33,88,0.99823,3.19,0.52,9.2,5
761 | 9,0.58,0.25,2.8,0.075,9,104,0.99779,3.23,0.57,9.7,5
762 | 9.3,0.655,0.26,2,0.096,5,35,0.99738,3.25,0.42,9.6,5
763 | 8.8,0.7,0,1.7,0.069,8,19,0.99701,3.31,0.53,10,6
764 | 9.3,0.655,0.26,2,0.096,5,35,0.99738,3.25,0.42,9.6,5
765 | 9.1,0.68,0.11,2.8,0.093,11,44,0.99888,3.31,0.55,9.5,6
766 | 9.2,0.67,0.1,3,0.091,12,48,0.99888,3.31,0.54,9.5,6
767 | 8.8,0.59,0.18,2.9,0.089,12,74,0.99738,3.14,0.54,9.4,5
768 | 7.5,0.6,0.32,2.7,0.103,13,98,0.99938,3.45,0.62,9.5,5
769 | 7.1,0.59,0.02,2.3,0.082,24,94,0.99744,3.55,0.53,9.7,6
770 | 7.9,0.72,0.01,1.9,0.076,7,32,0.99668,3.39,0.54,9.6,5
771 | 7.1,0.59,0.02,2.3,0.082,24,94,0.99744,3.55,0.53,9.7,6
772 | 9.4,0.685,0.26,2.4,0.082,23,143,0.9978,3.28,0.55,9.4,5
773 | 9.5,0.57,0.27,2.3,0.082,23,144,0.99782,3.27,0.55,9.4,5
774 | 7.9,0.4,0.29,1.8,0.157,1,44,0.9973,3.3,0.92,9.5,6
775 | 7.9,0.4,0.3,1.8,0.157,2,45,0.99727,3.31,0.91,9.5,6
776 | 7.2,1,0,3,0.102,7,16,0.99586,3.43,0.46,10,5
777 | 6.9,0.765,0.18,2.4,0.243,5.5,48,0.99612,3.4,0.6,10.3,6
778 | 6.9,0.635,0.17,2.4,0.241,6,18,0.9961,3.4,0.59,10.3,6
779 | 8.3,0.43,0.3,3.4,0.079,7,34,0.99788,3.36,0.61,10.5,5
780 | 7.1,0.52,0.03,2.6,0.076,21,92,0.99745,3.5,0.6,9.8,5
781 | 7,0.57,0,2,0.19,12,45,0.99676,3.31,0.6,9.4,6
782 | 6.5,0.46,0.14,2.4,0.114,9,37,0.99732,3.66,0.65,9.8,5
783 | 9,0.82,0.05,2.4,0.081,26,96,0.99814,3.36,0.53,10,5
784 | 6.5,0.46,0.14,2.4,0.114,9,37,0.99732,3.66,0.65,9.8,5
785 | 7.1,0.59,0.01,2.5,0.077,20,85,0.99746,3.55,0.59,9.8,5
786 | 9.9,0.35,0.41,2.3,0.083,11,61,0.9982,3.21,0.5,9.5,5
787 | 9.9,0.35,0.41,2.3,0.083,11,61,0.9982,3.21,0.5,9.5,5
788 | 10,0.56,0.24,2.2,0.079,19,58,0.9991,3.18,0.56,10.1,6
789 | 10,0.56,0.24,2.2,0.079,19,58,0.9991,3.18,0.56,10.1,6
790 | 8.6,0.63,0.17,2.9,0.099,21,119,0.998,3.09,0.52,9.3,5
791 | 7.4,0.37,0.43,2.6,0.082,18,82,0.99708,3.33,0.68,9.7,6
792 | 8.8,0.64,0.17,2.9,0.084,25,130,0.99818,3.23,0.54,9.6,5
793 | 7.1,0.61,0.02,2.5,0.081,17,87,0.99745,3.48,0.6,9.7,6
794 | 7.7,0.6,0,2.6,0.055,7,13,0.99639,3.38,0.56,10.8,5
795 | 10.1,0.27,0.54,2.3,0.065,7,26,0.99531,3.17,0.53,12.5,6
796 | 10.8,0.89,0.3,2.6,0.132,7,60,0.99786,2.99,1.18,10.2,5
797 | 8.7,0.46,0.31,2.5,0.126,24,64,0.99746,3.1,0.74,9.6,5
798 | 9.3,0.37,0.44,1.6,0.038,21,42,0.99526,3.24,0.81,10.8,7
799 | 9.4,0.5,0.34,3.6,0.082,5,14,0.9987,3.29,0.52,10.7,6
800 | 9.4,0.5,0.34,3.6,0.082,5,14,0.9987,3.29,0.52,10.7,6
801 | 7.2,0.61,0.08,4,0.082,26,108,0.99641,3.25,0.51,9.4,5
802 | 8.6,0.55,0.09,3.3,0.068,8,17,0.99735,3.23,0.44,10,5
803 | 5.1,0.585,0,1.7,0.044,14,86,0.99264,3.56,0.94,12.9,7
804 | 7.7,0.56,0.08,2.5,0.114,14,46,0.9971,3.24,0.66,9.6,6
805 | 8.4,0.52,0.22,2.7,0.084,4,18,0.99682,3.26,0.57,9.9,6
806 | 8.2,0.28,0.4,2.4,0.052,4,10,0.99356,3.33,0.7,12.8,7
807 | 8.4,0.25,0.39,2,0.041,4,10,0.99386,3.27,0.71,12.5,7
808 | 8.2,0.28,0.4,2.4,0.052,4,10,0.99356,3.33,0.7,12.8,7
809 | 7.4,0.53,0.12,1.9,0.165,4,12,0.99702,3.26,0.86,9.2,5
810 | 7.6,0.48,0.31,2.8,0.07,4,15,0.99693,3.22,0.55,10.3,6
811 | 7.3,0.49,0.1,2.6,0.068,4,14,0.99562,3.3,0.47,10.5,5
812 | 12.9,0.5,0.55,2.8,0.072,7,24,1.00012,3.09,0.68,10.9,6
813 | 10.8,0.45,0.33,2.5,0.099,20,38,0.99818,3.24,0.71,10.8,5
814 | 6.9,0.39,0.24,2.1,0.102,4,7,0.99462,3.44,0.58,11.4,4
815 | 12.6,0.41,0.54,2.8,0.103,19,41,0.99939,3.21,0.76,11.3,6
816 | 10.8,0.45,0.33,2.5,0.099,20,38,0.99818,3.24,0.71,10.8,5
817 | 9.8,0.51,0.19,3.2,0.081,8,30,0.9984,3.23,0.58,10.5,6
818 | 10.8,0.29,0.42,1.6,0.084,19,27,0.99545,3.28,0.73,11.9,6
819 | 7.1,0.715,0,2.35,0.071,21,47,0.99632,3.29,0.45,9.4,5
820 | 9.1,0.66,0.15,3.2,0.097,9,59,0.99976,3.28,0.54,9.6,5
821 | 7,0.685,0,1.9,0.099,9,22,0.99606,3.34,0.6,9.7,5
822 | 4.9,0.42,0,2.1,0.048,16,42,0.99154,3.71,0.74,14,7
823 | 6.7,0.54,0.13,2,0.076,15,36,0.9973,3.61,0.64,9.8,5
824 | 6.7,0.54,0.13,2,0.076,15,36,0.9973,3.61,0.64,9.8,5
825 | 7.1,0.48,0.28,2.8,0.068,6,16,0.99682,3.24,0.53,10.3,5
826 | 7.1,0.46,0.14,2.8,0.076,15,37,0.99624,3.36,0.49,10.7,5
827 | 7.5,0.27,0.34,2.3,0.05,4,8,0.9951,3.4,0.64,11,7
828 | 7.1,0.46,0.14,2.8,0.076,15,37,0.99624,3.36,0.49,10.7,5
829 | 7.8,0.57,0.09,2.3,0.065,34,45,0.99417,3.46,0.74,12.7,8
830 | 5.9,0.61,0.08,2.1,0.071,16,24,0.99376,3.56,0.77,11.1,6
831 | 7.5,0.685,0.07,2.5,0.058,5,9,0.99632,3.38,0.55,10.9,4
832 | 5.9,0.61,0.08,2.1,0.071,16,24,0.99376,3.56,0.77,11.1,6
833 | 10.4,0.44,0.42,1.5,0.145,34,48,0.99832,3.38,0.86,9.9,3
834 | 11.6,0.47,0.44,1.6,0.147,36,51,0.99836,3.38,0.86,9.9,4
835 | 8.8,0.685,0.26,1.6,0.088,16,23,0.99694,3.32,0.47,9.4,5
836 | 7.6,0.665,0.1,1.5,0.066,27,55,0.99655,3.39,0.51,9.3,5
837 | 6.7,0.28,0.28,2.4,0.012,36,100,0.99064,3.26,0.39,11.7,7
838 | 6.7,0.28,0.28,2.4,0.012,36,100,0.99064,3.26,0.39,11.7,7
839 | 10.1,0.31,0.35,1.6,0.075,9,28,0.99672,3.24,0.83,11.2,7
840 | 6,0.5,0.04,2.2,0.092,13,26,0.99647,3.46,0.47,10,5
841 | 11.1,0.42,0.47,2.65,0.085,9,34,0.99736,3.24,0.77,12.1,7
842 | 6.6,0.66,0,3,0.115,21,31,0.99629,3.45,0.63,10.3,5
843 | 10.6,0.5,0.45,2.6,0.119,34,68,0.99708,3.23,0.72,10.9,6
844 | 7.1,0.685,0.35,2,0.088,9,92,0.9963,3.28,0.62,9.4,5
845 | 9.9,0.25,0.46,1.7,0.062,26,42,0.9959,3.18,0.83,10.6,6
846 | 6.4,0.64,0.21,1.8,0.081,14,31,0.99689,3.59,0.66,9.8,5
847 | 6.4,0.64,0.21,1.8,0.081,14,31,0.99689,3.59,0.66,9.8,5
848 | 7.4,0.68,0.16,1.8,0.078,12,39,0.9977,3.5,0.7,9.9,6
849 | 6.4,0.64,0.21,1.8,0.081,14,31,0.99689,3.59,0.66,9.8,5
850 | 6.4,0.63,0.21,1.6,0.08,12,32,0.99689,3.58,0.66,9.8,5
851 | 9.3,0.43,0.44,1.9,0.085,9,22,0.99708,3.28,0.55,9.5,5
852 | 9.3,0.43,0.44,1.9,0.085,9,22,0.99708,3.28,0.55,9.5,5
853 | 8,0.42,0.32,2.5,0.08,26,122,0.99801,3.22,1.07,9.7,5
854 | 9.3,0.36,0.39,1.5,0.08,41,55,0.99652,3.47,0.73,10.9,6
855 | 9.3,0.36,0.39,1.5,0.08,41,55,0.99652,3.47,0.73,10.9,6
856 | 7.6,0.735,0.02,2.5,0.071,10,14,0.99538,3.51,0.71,11.7,7
857 | 9.3,0.36,0.39,1.5,0.08,41,55,0.99652,3.47,0.73,10.9,6
858 | 8.2,0.26,0.34,2.5,0.073,16,47,0.99594,3.4,0.78,11.3,7
859 | 11.7,0.28,0.47,1.7,0.054,17,32,0.99686,3.15,0.67,10.6,7
860 | 6.8,0.56,0.22,1.8,0.074,15,24,0.99438,3.4,0.82,11.2,6
861 | 7.2,0.62,0.06,2.7,0.077,15,85,0.99746,3.51,0.54,9.5,5
862 | 5.8,1.01,0.66,2,0.039,15,88,0.99357,3.66,0.6,11.5,6
863 | 7.5,0.42,0.32,2.7,0.067,7,25,0.99628,3.24,0.44,10.4,5
864 | 7.2,0.62,0.06,2.5,0.078,17,84,0.99746,3.51,0.53,9.7,5
865 | 7.2,0.62,0.06,2.7,0.077,15,85,0.99746,3.51,0.54,9.5,5
866 | 7.2,0.635,0.07,2.6,0.077,16,86,0.99748,3.51,0.54,9.7,5
867 | 6.8,0.49,0.22,2.3,0.071,13,24,0.99438,3.41,0.83,11.3,6
868 | 6.9,0.51,0.23,2,0.072,13,22,0.99438,3.4,0.84,11.2,6
869 | 6.8,0.56,0.22,1.8,0.074,15,24,0.99438,3.4,0.82,11.2,6
870 | 7.6,0.63,0.03,2,0.08,27,43,0.99578,3.44,0.64,10.9,6
871 | 7.7,0.715,0.01,2.1,0.064,31,43,0.99371,3.41,0.57,11.8,6
872 | 6.9,0.56,0.03,1.5,0.086,36,46,0.99522,3.53,0.57,10.6,5
873 | 7.3,0.35,0.24,2,0.067,28,48,0.99576,3.43,0.54,10,4
874 | 9.1,0.21,0.37,1.6,0.067,6,10,0.99552,3.23,0.58,11.1,7
875 | 10.4,0.38,0.46,2.1,0.104,6,10,0.99664,3.12,0.65,11.8,7
876 | 8.8,0.31,0.4,2.8,0.109,7,16,0.99614,3.31,0.79,11.8,7
877 | 7.1,0.47,0,2.2,0.067,7,14,0.99517,3.4,0.58,10.9,4
878 | 7.7,0.715,0.01,2.1,0.064,31,43,0.99371,3.41,0.57,11.8,6
879 | 8.8,0.61,0.19,4,0.094,30,69,0.99787,3.22,0.5,10,6
880 | 7.2,0.6,0.04,2.5,0.076,18,88,0.99745,3.53,0.55,9.5,5
881 | 9.2,0.56,0.18,1.6,0.078,10,21,0.99576,3.15,0.49,9.9,5
882 | 7.6,0.715,0,2.1,0.068,30,35,0.99533,3.48,0.65,11.4,6
883 | 8.4,0.31,0.29,3.1,0.194,14,26,0.99536,3.22,0.78,12,6
884 | 7.2,0.6,0.04,2.5,0.076,18,88,0.99745,3.53,0.55,9.5,5
885 | 8.8,0.61,0.19,4,0.094,30,69,0.99787,3.22,0.5,10,6
886 | 8.9,0.75,0.14,2.5,0.086,9,30,0.99824,3.34,0.64,10.5,5
887 | 9,0.8,0.12,2.4,0.083,8,28,0.99836,3.33,0.65,10.4,6
888 | 10.7,0.52,0.38,2.6,0.066,29,56,0.99577,3.15,0.79,12.1,7
889 | 6.8,0.57,0,2.5,0.072,32,64,0.99491,3.43,0.56,11.2,6
890 | 10.7,0.9,0.34,6.6,0.112,23,99,1.00289,3.22,0.68,9.3,5
891 | 7.2,0.34,0.24,2,0.071,30,52,0.99576,3.44,0.58,10.1,5
892 | 7.2,0.66,0.03,2.3,0.078,16,86,0.99743,3.53,0.57,9.7,5
893 | 10.1,0.45,0.23,1.9,0.082,10,18,0.99774,3.22,0.65,9.3,6
894 | 7.2,0.66,0.03,2.3,0.078,16,86,0.99743,3.53,0.57,9.7,5
895 | 7.2,0.63,0.03,2.2,0.08,17,88,0.99745,3.53,0.58,9.8,6
896 | 7.1,0.59,0.01,2.3,0.08,27,43,0.9955,3.42,0.58,10.7,6
897 | 8.3,0.31,0.39,2.4,0.078,17,43,0.99444,3.31,0.77,12.5,7
898 | 7.1,0.59,0.01,2.3,0.08,27,43,0.9955,3.42,0.58,10.7,6
899 | 8.3,0.31,0.39,2.4,0.078,17,43,0.99444,3.31,0.77,12.5,7
900 | 8.3,1.02,0.02,3.4,0.084,6,11,0.99892,3.48,0.49,11,3
901 | 8.9,0.31,0.36,2.6,0.056,10,39,0.99562,3.4,0.69,11.8,5
902 | 7.4,0.635,0.1,2.4,0.08,16,33,0.99736,3.58,0.69,10.8,7
903 | 7.4,0.635,0.1,2.4,0.08,16,33,0.99736,3.58,0.69,10.8,7
904 | 6.8,0.59,0.06,6,0.06,11,18,0.9962,3.41,0.59,10.8,7
905 | 6.8,0.59,0.06,6,0.06,11,18,0.9962,3.41,0.59,10.8,7
906 | 9.2,0.58,0.2,3,0.081,15,115,0.998,3.23,0.59,9.5,5
907 | 7.2,0.54,0.27,2.6,0.084,12,78,0.9964,3.39,0.71,11,5
908 | 6.1,0.56,0,2.2,0.079,6,9,0.9948,3.59,0.54,11.5,6
909 | 7.4,0.52,0.13,2.4,0.078,34,61,0.99528,3.43,0.59,10.8,6
910 | 7.3,0.305,0.39,1.2,0.059,7,11,0.99331,3.29,0.52,11.5,6
911 | 9.3,0.38,0.48,3.8,0.132,3,11,0.99577,3.23,0.57,13.2,6
912 | 9.1,0.28,0.46,9,0.114,3,9,0.99901,3.18,0.6,10.9,6
913 | 10,0.46,0.44,2.9,0.065,4,8,0.99674,3.33,0.62,12.2,6
914 | 9.4,0.395,0.46,4.6,0.094,3,10,0.99639,3.27,0.64,12.2,7
915 | 7.3,0.305,0.39,1.2,0.059,7,11,0.99331,3.29,0.52,11.5,6
916 | 8.6,0.315,0.4,2.2,0.079,3,6,0.99512,3.27,0.67,11.9,6
917 | 5.3,0.715,0.19,1.5,0.161,7,62,0.99395,3.62,0.61,11,5
918 | 6.8,0.41,0.31,8.8,0.084,26,45,0.99824,3.38,0.64,10.1,6
919 | 8.4,0.36,0.32,2.2,0.081,32,79,0.9964,3.3,0.72,11,6
920 | 8.4,0.62,0.12,1.8,0.072,38,46,0.99504,3.38,0.89,11.8,6
921 | 9.6,0.41,0.37,2.3,0.091,10,23,0.99786,3.24,0.56,10.5,5
922 | 8.4,0.36,0.32,2.2,0.081,32,79,0.9964,3.3,0.72,11,6
923 | 8.4,0.62,0.12,1.8,0.072,38,46,0.99504,3.38,0.89,11.8,6
924 | 6.8,0.41,0.31,8.8,0.084,26,45,0.99824,3.38,0.64,10.1,6
925 | 8.6,0.47,0.27,2.3,0.055,14,28,0.99516,3.18,0.8,11.2,5
926 | 8.6,0.22,0.36,1.9,0.064,53,77,0.99604,3.47,0.87,11,7
927 | 9.4,0.24,0.33,2.3,0.061,52,73,0.99786,3.47,0.9,10.2,6
928 | 8.4,0.67,0.19,2.2,0.093,11,75,0.99736,3.2,0.59,9.2,4
929 | 8.6,0.47,0.27,2.3,0.055,14,28,0.99516,3.18,0.8,11.2,5
930 | 8.7,0.33,0.38,3.3,0.063,10,19,0.99468,3.3,0.73,12,7
931 | 6.6,0.61,0.01,1.9,0.08,8,25,0.99746,3.69,0.73,10.5,5
932 | 7.4,0.61,0.01,2,0.074,13,38,0.99748,3.48,0.65,9.8,5
933 | 7.6,0.4,0.29,1.9,0.078,29,66,0.9971,3.45,0.59,9.5,6
934 | 7.4,0.61,0.01,2,0.074,13,38,0.99748,3.48,0.65,9.8,5
935 | 6.6,0.61,0.01,1.9,0.08,8,25,0.99746,3.69,0.73,10.5,5
936 | 8.8,0.3,0.38,2.3,0.06,19,72,0.99543,3.39,0.72,11.8,6
937 | 8.8,0.3,0.38,2.3,0.06,19,72,0.99543,3.39,0.72,11.8,6
938 | 12,0.63,0.5,1.4,0.071,6,26,0.99791,3.07,0.6,10.4,4
939 | 7.2,0.38,0.38,2.8,0.068,23,42,0.99356,3.34,0.72,12.9,7
940 | 6.2,0.46,0.17,1.6,0.073,7,11,0.99425,3.61,0.54,11.4,5
941 | 9.6,0.33,0.52,2.2,0.074,13,25,0.99509,3.36,0.76,12.4,7
942 | 9.9,0.27,0.49,5,0.082,9,17,0.99484,3.19,0.52,12.5,7
943 | 10.1,0.43,0.4,2.6,0.092,13,52,0.99834,3.22,0.64,10,7
944 | 9.8,0.5,0.34,2.3,0.094,10,45,0.99864,3.24,0.6,9.7,7
945 | 8.3,0.3,0.49,3.8,0.09,11,24,0.99498,3.27,0.64,12.1,7
946 | 10.2,0.44,0.42,2,0.071,7,20,0.99566,3.14,0.79,11.1,7
947 | 10.2,0.44,0.58,4.1,0.092,11,24,0.99745,3.29,0.99,12,7
948 | 8.3,0.28,0.48,2.1,0.093,6,12,0.99408,3.26,0.62,12.4,7
949 | 8.9,0.12,0.45,1.8,0.075,10,21,0.99552,3.41,0.76,11.9,7
950 | 8.9,0.12,0.45,1.8,0.075,10,21,0.99552,3.41,0.76,11.9,7
951 | 8.9,0.12,0.45,1.8,0.075,10,21,0.99552,3.41,0.76,11.9,7
952 | 8.3,0.28,0.48,2.1,0.093,6,12,0.99408,3.26,0.62,12.4,7
953 | 8.2,0.31,0.4,2.2,0.058,6,10,0.99536,3.31,0.68,11.2,7
954 | 10.2,0.34,0.48,2.1,0.052,5,9,0.99458,3.2,0.69,12.1,7
955 | 7.6,0.43,0.4,2.7,0.082,6,11,0.99538,3.44,0.54,12.2,6
956 | 8.5,0.21,0.52,1.9,0.09,9,23,0.99648,3.36,0.67,10.4,5
957 | 9,0.36,0.52,2.1,0.111,5,10,0.99568,3.31,0.62,11.3,6
958 | 9.5,0.37,0.52,2,0.088,12,51,0.99613,3.29,0.58,11.1,6
959 | 6.4,0.57,0.12,2.3,0.12,25,36,0.99519,3.47,0.71,11.3,7
960 | 8,0.59,0.05,2,0.089,12,32,0.99735,3.36,0.61,10,5
961 | 8.5,0.47,0.27,1.9,0.058,18,38,0.99518,3.16,0.85,11.1,6
962 | 7.1,0.56,0.14,1.6,0.078,7,18,0.99592,3.27,0.62,9.3,5
963 | 6.6,0.57,0.02,2.1,0.115,6,16,0.99654,3.38,0.69,9.5,5
964 | 8.8,0.27,0.39,2,0.1,20,27,0.99546,3.15,0.69,11.2,6
965 | 8.5,0.47,0.27,1.9,0.058,18,38,0.99518,3.16,0.85,11.1,6
966 | 8.3,0.34,0.4,2.4,0.065,24,48,0.99554,3.34,0.86,11,6
967 | 9,0.38,0.41,2.4,0.103,6,10,0.99604,3.13,0.58,11.9,7
968 | 8.5,0.66,0.2,2.1,0.097,23,113,0.99733,3.13,0.48,9.2,5
969 | 9,0.4,0.43,2.4,0.068,29,46,0.9943,3.2,0.6,12.2,6
970 | 6.7,0.56,0.09,2.9,0.079,7,22,0.99669,3.46,0.61,10.2,5
971 | 10.4,0.26,0.48,1.9,0.066,6,10,0.99724,3.33,0.87,10.9,6
972 | 10.4,0.26,0.48,1.9,0.066,6,10,0.99724,3.33,0.87,10.9,6
973 | 10.1,0.38,0.5,2.4,0.104,6,13,0.99643,3.22,0.65,11.6,7
974 | 8.5,0.34,0.44,1.7,0.079,6,12,0.99605,3.52,0.63,10.7,5
975 | 8.8,0.33,0.41,5.9,0.073,7,13,0.99658,3.3,0.62,12.1,7
976 | 7.2,0.41,0.3,2.1,0.083,35,72,0.997,3.44,0.52,9.4,5
977 | 7.2,0.41,0.3,2.1,0.083,35,72,0.997,3.44,0.52,9.4,5
978 | 8.4,0.59,0.29,2.6,0.109,31,119,0.99801,3.15,0.5,9.1,5
979 | 7,0.4,0.32,3.6,0.061,9,29,0.99416,3.28,0.49,11.3,7
980 | 12.2,0.45,0.49,1.4,0.075,3,6,0.9969,3.13,0.63,10.4,5
981 | 9.1,0.5,0.3,1.9,0.065,8,17,0.99774,3.32,0.71,10.5,6
982 | 9.5,0.86,0.26,1.9,0.079,13,28,0.99712,3.25,0.62,10,5
983 | 7.3,0.52,0.32,2.1,0.07,51,70,0.99418,3.34,0.82,12.9,6
984 | 9.1,0.5,0.3,1.9,0.065,8,17,0.99774,3.32,0.71,10.5,6
985 | 12.2,0.45,0.49,1.4,0.075,3,6,0.9969,3.13,0.63,10.4,5
986 | 7.4,0.58,0,2,0.064,7,11,0.99562,3.45,0.58,11.3,6
987 | 9.8,0.34,0.39,1.4,0.066,3,7,0.9947,3.19,0.55,11.4,7
988 | 7.1,0.36,0.3,1.6,0.08,35,70,0.99693,3.44,0.5,9.4,5
989 | 7.7,0.39,0.12,1.7,0.097,19,27,0.99596,3.16,0.49,9.4,5
990 | 9.7,0.295,0.4,1.5,0.073,14,21,0.99556,3.14,0.51,10.9,6
991 | 7.7,0.39,0.12,1.7,0.097,19,27,0.99596,3.16,0.49,9.4,5
992 | 7.1,0.34,0.28,2,0.082,31,68,0.99694,3.45,0.48,9.4,5
993 | 6.5,0.4,0.1,2,0.076,30,47,0.99554,3.36,0.48,9.4,6
994 | 7.1,0.34,0.28,2,0.082,31,68,0.99694,3.45,0.48,9.4,5
995 | 10,0.35,0.45,2.5,0.092,20,88,0.99918,3.15,0.43,9.4,5
996 | 7.7,0.6,0.06,2,0.079,19,41,0.99697,3.39,0.62,10.1,6
997 | 5.6,0.66,0,2.2,0.087,3,11,0.99378,3.71,0.63,12.8,7
998 | 5.6,0.66,0,2.2,0.087,3,11,0.99378,3.71,0.63,12.8,7
999 | 8.9,0.84,0.34,1.4,0.05,4,10,0.99554,3.12,0.48,9.1,6
1000 | 6.4,0.69,0,1.65,0.055,7,12,0.99162,3.47,0.53,12.9,6
1001 | 7.5,0.43,0.3,2.2,0.062,6,12,0.99495,3.44,0.72,11.5,7
1002 | 9.9,0.35,0.38,1.5,0.058,31,47,0.99676,3.26,0.82,10.6,7
1003 | 9.1,0.29,0.33,2.05,0.063,13,27,0.99516,3.26,0.84,11.7,7
1004 | 6.8,0.36,0.32,1.8,0.067,4,8,0.9928,3.36,0.55,12.8,7
1005 | 8.2,0.43,0.29,1.6,0.081,27,45,0.99603,3.25,0.54,10.3,5
1006 | 6.8,0.36,0.32,1.8,0.067,4,8,0.9928,3.36,0.55,12.8,7
1007 | 9.1,0.29,0.33,2.05,0.063,13,27,0.99516,3.26,0.84,11.7,7
1008 | 9.1,0.3,0.34,2,0.064,12,25,0.99516,3.26,0.84,11.7,7
1009 | 8.9,0.35,0.4,3.6,0.11,12,24,0.99549,3.23,0.7,12,7
1010 | 9.6,0.5,0.36,2.8,0.116,26,55,0.99722,3.18,0.68,10.9,5
1011 | 8.9,0.28,0.45,1.7,0.067,7,12,0.99354,3.25,0.55,12.3,7
1012 | 8.9,0.32,0.31,2,0.088,12,19,0.9957,3.17,0.55,10.4,6
1013 | 7.7,1.005,0.15,2.1,0.102,11,32,0.99604,3.23,0.48,10,5
1014 | 7.5,0.71,0,1.6,0.092,22,31,0.99635,3.38,0.58,10,6
1015 | 8,0.58,0.16,2,0.12,3,7,0.99454,3.22,0.58,11.2,6
1016 | 10.5,0.39,0.46,2.2,0.075,14,27,0.99598,3.06,0.84,11.4,6
1017 | 8.9,0.38,0.4,2.2,0.068,12,28,0.99486,3.27,0.75,12.6,7
1018 | 8,0.18,0.37,0.9,0.049,36,109,0.99007,2.89,0.44,12.7,6
1019 | 8,0.18,0.37,0.9,0.049,36,109,0.99007,2.89,0.44,12.7,6
1020 | 7,0.5,0.14,1.8,0.078,10,23,0.99636,3.53,0.61,10.4,5
1021 | 11.3,0.36,0.66,2.4,0.123,3,8,0.99642,3.2,0.53,11.9,6
1022 | 11.3,0.36,0.66,2.4,0.123,3,8,0.99642,3.2,0.53,11.9,6
1023 | 7,0.51,0.09,2.1,0.062,4,9,0.99584,3.35,0.54,10.5,5
1024 | 8.2,0.32,0.42,2.3,0.098,3,9,0.99506,3.27,0.55,12.3,6
1025 | 7.7,0.58,0.01,1.8,0.088,12,18,0.99568,3.32,0.56,10.5,7
1026 | 8.6,0.83,0,2.8,0.095,17,43,0.99822,3.33,0.6,10.4,6
1027 | 7.9,0.31,0.32,1.9,0.066,14,36,0.99364,3.41,0.56,12.6,6
1028 | 6.4,0.795,0,2.2,0.065,28,52,0.99378,3.49,0.52,11.6,5
1029 | 7.2,0.34,0.21,2.5,0.075,41,68,0.99586,3.37,0.54,10.1,6
1030 | 7.7,0.58,0.01,1.8,0.088,12,18,0.99568,3.32,0.56,10.5,7
1031 | 7.1,0.59,0,2.1,0.091,9,14,0.99488,3.42,0.55,11.5,7
1032 | 7.3,0.55,0.01,1.8,0.093,9,15,0.99514,3.35,0.58,11,7
1033 | 8.1,0.82,0,4.1,0.095,5,14,0.99854,3.36,0.53,9.6,5
1034 | 7.5,0.57,0.08,2.6,0.089,14,27,0.99592,3.3,0.59,10.4,6
1035 | 8.9,0.745,0.18,2.5,0.077,15,48,0.99739,3.2,0.47,9.7,6
1036 | 10.1,0.37,0.34,2.4,0.085,5,17,0.99683,3.17,0.65,10.6,7
1037 | 7.6,0.31,0.34,2.5,0.082,26,35,0.99356,3.22,0.59,12.5,7
1038 | 7.3,0.91,0.1,1.8,0.074,20,56,0.99672,3.35,0.56,9.2,5
1039 | 8.7,0.41,0.41,6.2,0.078,25,42,0.9953,3.24,0.77,12.6,7
1040 | 8.9,0.5,0.21,2.2,0.088,21,39,0.99692,3.33,0.83,11.1,6
1041 | 7.4,0.965,0,2.2,0.088,16,32,0.99756,3.58,0.67,10.2,5
1042 | 6.9,0.49,0.19,1.7,0.079,13,26,0.99547,3.38,0.64,9.8,6
1043 | 8.9,0.5,0.21,2.2,0.088,21,39,0.99692,3.33,0.83,11.1,6
1044 | 9.5,0.39,0.41,8.9,0.069,18,39,0.99859,3.29,0.81,10.9,7
1045 | 6.4,0.39,0.33,3.3,0.046,12,53,0.99294,3.36,0.62,12.2,6
1046 | 6.9,0.44,0,1.4,0.07,32,38,0.99438,3.32,0.58,11.4,6
1047 | 7.6,0.78,0,1.7,0.076,33,45,0.99612,3.31,0.62,10.7,6
1048 | 7.1,0.43,0.17,1.8,0.082,27,51,0.99634,3.49,0.64,10.4,5
1049 | 9.3,0.49,0.36,1.7,0.081,3,14,0.99702,3.27,0.78,10.9,6
1050 | 9.3,0.5,0.36,1.8,0.084,6,17,0.99704,3.27,0.77,10.8,6
1051 | 7.1,0.43,0.17,1.8,0.082,27,51,0.99634,3.49,0.64,10.4,5
1052 | 8.5,0.46,0.59,1.4,0.414,16,45,0.99702,3.03,1.34,9.2,5
1053 | 5.6,0.605,0.05,2.4,0.073,19,25,0.99258,3.56,0.55,12.9,5
1054 | 8.3,0.33,0.42,2.3,0.07,9,20,0.99426,3.38,0.77,12.7,7
1055 | 8.2,0.64,0.27,2,0.095,5,77,0.99747,3.13,0.62,9.1,6
1056 | 8.2,0.64,0.27,2,0.095,5,77,0.99747,3.13,0.62,9.1,6
1057 | 8.9,0.48,0.53,4,0.101,3,10,0.99586,3.21,0.59,12.1,7
1058 | 7.6,0.42,0.25,3.9,0.104,28,90,0.99784,3.15,0.57,9.1,5
1059 | 9.9,0.53,0.57,2.4,0.093,30,52,0.9971,3.19,0.76,11.6,7
1060 | 8.9,0.48,0.53,4,0.101,3,10,0.99586,3.21,0.59,12.1,7
1061 | 11.6,0.23,0.57,1.8,0.074,3,8,0.9981,3.14,0.7,9.9,6
1062 | 9.1,0.4,0.5,1.8,0.071,7,16,0.99462,3.21,0.69,12.5,8
1063 | 8,0.38,0.44,1.9,0.098,6,15,0.9956,3.3,0.64,11.4,6
1064 | 10.2,0.29,0.65,2.4,0.075,6,17,0.99565,3.22,0.63,11.8,6
1065 | 8.2,0.74,0.09,2,0.067,5,10,0.99418,3.28,0.57,11.8,6
1066 | 7.7,0.61,0.18,2.4,0.083,6,20,0.9963,3.29,0.6,10.2,6
1067 | 6.6,0.52,0.08,2.4,0.07,13,26,0.99358,3.4,0.72,12.5,7
1068 | 11.1,0.31,0.53,2.2,0.06,3,10,0.99572,3.02,0.83,10.9,7
1069 | 11.1,0.31,0.53,2.2,0.06,3,10,0.99572,3.02,0.83,10.9,7
1070 | 8,0.62,0.35,2.8,0.086,28,52,0.997,3.31,0.62,10.8,5
1071 | 9.3,0.33,0.45,1.5,0.057,19,37,0.99498,3.18,0.89,11.1,7
1072 | 7.5,0.77,0.2,8.1,0.098,30,92,0.99892,3.2,0.58,9.2,5
1073 | 7.2,0.35,0.26,1.8,0.083,33,75,0.9968,3.4,0.58,9.5,6
1074 | 8,0.62,0.33,2.7,0.088,16,37,0.9972,3.31,0.58,10.7,6
1075 | 7.5,0.77,0.2,8.1,0.098,30,92,0.99892,3.2,0.58,9.2,5
1076 | 9.1,0.25,0.34,2,0.071,45,67,0.99769,3.44,0.86,10.2,7
1077 | 9.9,0.32,0.56,2,0.073,3,8,0.99534,3.15,0.73,11.4,6
1078 | 8.6,0.37,0.65,6.4,0.08,3,8,0.99817,3.27,0.58,11,5
1079 | 8.6,0.37,0.65,6.4,0.08,3,8,0.99817,3.27,0.58,11,5
1080 | 7.9,0.3,0.68,8.3,0.05,37.5,278,0.99316,3.01,0.51,12.3,7
1081 | 10.3,0.27,0.56,1.4,0.047,3,8,0.99471,3.16,0.51,11.8,6
1082 | 7.9,0.3,0.68,8.3,0.05,37.5,289,0.99316,3.01,0.51,12.3,7
1083 | 7.2,0.38,0.3,1.8,0.073,31,70,0.99685,3.42,0.59,9.5,6
1084 | 8.7,0.42,0.45,2.4,0.072,32,59,0.99617,3.33,0.77,12,6
1085 | 7.2,0.38,0.3,1.8,0.073,31,70,0.99685,3.42,0.59,9.5,6
1086 | 6.8,0.48,0.08,1.8,0.074,40,64,0.99529,3.12,0.49,9.6,5
1087 | 8.5,0.34,0.4,4.7,0.055,3,9,0.99738,3.38,0.66,11.6,7
1088 | 7.9,0.19,0.42,1.6,0.057,18,30,0.994,3.29,0.69,11.2,6
1089 | 11.6,0.41,0.54,1.5,0.095,22,41,0.99735,3.02,0.76,9.9,7
1090 | 11.6,0.41,0.54,1.5,0.095,22,41,0.99735,3.02,0.76,9.9,7
1091 | 10,0.26,0.54,1.9,0.083,42,74,0.99451,2.98,0.63,11.8,8
1092 | 7.9,0.34,0.42,2,0.086,8,19,0.99546,3.35,0.6,11.4,6
1093 | 7,0.54,0.09,2,0.081,10,16,0.99479,3.43,0.59,11.5,6
1094 | 9.2,0.31,0.36,2.2,0.079,11,31,0.99615,3.33,0.86,12,7
1095 | 6.6,0.725,0.09,5.5,0.117,9,17,0.99655,3.35,0.49,10.8,6
1096 | 9.4,0.4,0.47,2.5,0.087,6,20,0.99772,3.15,0.5,10.5,5
1097 | 6.6,0.725,0.09,5.5,0.117,9,17,0.99655,3.35,0.49,10.8,6
1098 | 8.6,0.52,0.38,1.5,0.096,5,18,0.99666,3.2,0.52,9.4,5
1099 | 8,0.31,0.45,2.1,0.216,5,16,0.99358,3.15,0.81,12.5,7
1100 | 8.6,0.52,0.38,1.5,0.096,5,18,0.99666,3.2,0.52,9.4,5
1101 | 8.4,0.34,0.42,2.1,0.072,23,36,0.99392,3.11,0.78,12.4,6
1102 | 7.4,0.49,0.27,2.1,0.071,14,25,0.99388,3.35,0.63,12,6
1103 | 6.1,0.48,0.09,1.7,0.078,18,30,0.99402,3.45,0.54,11.2,6
1104 | 7.4,0.49,0.27,2.1,0.071,14,25,0.99388,3.35,0.63,12,6
1105 | 8,0.48,0.34,2.2,0.073,16,25,0.9936,3.28,0.66,12.4,6
1106 | 6.3,0.57,0.28,2.1,0.048,13,49,0.99374,3.41,0.6,12.8,5
1107 | 8.2,0.23,0.42,1.9,0.069,9,17,0.99376,3.21,0.54,12.3,6
1108 | 9.1,0.3,0.41,2,0.068,10,24,0.99523,3.27,0.85,11.7,7
1109 | 8.1,0.78,0.1,3.3,0.09,4,13,0.99855,3.36,0.49,9.5,5
1110 | 10.8,0.47,0.43,2.1,0.171,27,66,0.9982,3.17,0.76,10.8,6
1111 | 8.3,0.53,0,1.4,0.07,6,14,0.99593,3.25,0.64,10,6
1112 | 5.4,0.42,0.27,2,0.092,23,55,0.99471,3.78,0.64,12.3,7
1113 | 7.9,0.33,0.41,1.5,0.056,6,35,0.99396,3.29,0.71,11,6
1114 | 8.9,0.24,0.39,1.6,0.074,3,10,0.99698,3.12,0.59,9.5,6
1115 | 5,0.4,0.5,4.3,0.046,29,80,0.9902,3.49,0.66,13.6,6
1116 | 7,0.69,0.07,2.5,0.091,15,21,0.99572,3.38,0.6,11.3,6
1117 | 7,0.69,0.07,2.5,0.091,15,21,0.99572,3.38,0.6,11.3,6
1118 | 7,0.69,0.07,2.5,0.091,15,21,0.99572,3.38,0.6,11.3,6
1119 | 7.1,0.39,0.12,2.1,0.065,14,24,0.99252,3.3,0.53,13.3,6
1120 | 5.6,0.66,0,2.5,0.066,7,15,0.99256,3.52,0.58,12.9,5
1121 | 7.9,0.54,0.34,2.5,0.076,8,17,0.99235,3.2,0.72,13.1,8
1122 | 6.6,0.5,0,1.8,0.062,21,28,0.99352,3.44,0.55,12.3,6
1123 | 6.3,0.47,0,1.4,0.055,27,33,0.9922,3.45,0.48,12.3,6
1124 | 10.7,0.4,0.37,1.9,0.081,17,29,0.99674,3.12,0.65,11.2,6
1125 | 6.5,0.58,0,2.2,0.096,3,13,0.99557,3.62,0.62,11.5,4
1126 | 8.8,0.24,0.35,1.7,0.055,13,27,0.99394,3.14,0.59,11.3,7
1127 | 5.8,0.29,0.26,1.7,0.063,3,11,0.9915,3.39,0.54,13.5,6
1128 | 6.3,0.76,0,2.9,0.072,26,52,0.99379,3.51,0.6,11.5,6
1129 | 10,0.43,0.33,2.7,0.095,28,89,0.9984,3.22,0.68,10,5
1130 | 10.5,0.43,0.35,3.3,0.092,24,70,0.99798,3.21,0.69,10.5,6
1131 | 9.1,0.6,0,1.9,0.058,5,10,0.9977,3.18,0.63,10.4,6
1132 | 5.9,0.19,0.21,1.7,0.045,57,135,0.99341,3.32,0.44,9.5,5
1133 | 7.4,0.36,0.34,1.8,0.075,18,38,0.9933,3.38,0.88,13.6,7
1134 | 7.2,0.48,0.07,5.5,0.089,10,18,0.99684,3.37,0.68,11.2,7
1135 | 8.5,0.28,0.35,1.7,0.061,6,15,0.99524,3.3,0.74,11.8,7
1136 | 8,0.25,0.43,1.7,0.067,22,50,0.9946,3.38,0.6,11.9,6
1137 | 10.4,0.52,0.45,2,0.08,6,13,0.99774,3.22,0.76,11.4,6
1138 | 10.4,0.52,0.45,2,0.08,6,13,0.99774,3.22,0.76,11.4,6
1139 | 7.5,0.41,0.15,3.7,0.104,29,94,0.99786,3.14,0.58,9.1,5
1140 | 8.2,0.51,0.24,2,0.079,16,86,0.99764,3.34,0.64,9.5,6
1141 | 7.3,0.4,0.3,1.7,0.08,33,79,0.9969,3.41,0.65,9.5,6
1142 | 8.2,0.38,0.32,2.5,0.08,24,71,0.99624,3.27,0.85,11,6
1143 | 6.9,0.45,0.11,2.4,0.043,6,12,0.99354,3.3,0.65,11.4,6
1144 | 7,0.22,0.3,1.8,0.065,16,20,0.99672,3.61,0.82,10,6
1145 | 7.3,0.32,0.23,2.3,0.066,35,70,0.99588,3.43,0.62,10.1,5
1146 | 8.2,0.2,0.43,2.5,0.076,31,51,0.99672,3.53,0.81,10.4,6
1147 | 7.8,0.5,0.12,1.8,0.178,6,21,0.996,3.28,0.87,9.8,6
1148 | 10,0.41,0.45,6.2,0.071,6,14,0.99702,3.21,0.49,11.8,7
1149 | 7.8,0.39,0.42,2,0.086,9,21,0.99526,3.39,0.66,11.6,6
1150 | 10,0.35,0.47,2,0.061,6,11,0.99585,3.23,0.52,12,6
1151 | 8.2,0.33,0.32,2.8,0.067,4,12,0.99473,3.3,0.76,12.8,7
1152 | 6.1,0.58,0.23,2.5,0.044,16,70,0.99352,3.46,0.65,12.5,6
1153 | 8.3,0.6,0.25,2.2,0.118,9,38,0.99616,3.15,0.53,9.8,5
1154 | 9.6,0.42,0.35,2.1,0.083,17,38,0.99622,3.23,0.66,11.1,6
1155 | 6.6,0.58,0,2.2,0.1,50,63,0.99544,3.59,0.68,11.4,6
1156 | 8.3,0.6,0.25,2.2,0.118,9,38,0.99616,3.15,0.53,9.8,5
1157 | 8.5,0.18,0.51,1.75,0.071,45,88,0.99524,3.33,0.76,11.8,7
1158 | 5.1,0.51,0.18,2.1,0.042,16,101,0.9924,3.46,0.87,12.9,7
1159 | 6.7,0.41,0.43,2.8,0.076,22,54,0.99572,3.42,1.16,10.6,6
1160 | 10.2,0.41,0.43,2.2,0.11,11,37,0.99728,3.16,0.67,10.8,5
1161 | 10.6,0.36,0.57,2.3,0.087,6,20,0.99676,3.14,0.72,11.1,7
1162 | 8.8,0.45,0.43,1.4,0.076,12,21,0.99551,3.21,0.75,10.2,6
1163 | 8.5,0.32,0.42,2.3,0.075,12,19,0.99434,3.14,0.71,11.8,7
1164 | 9,0.785,0.24,1.7,0.078,10,21,0.99692,3.29,0.67,10,5
1165 | 9,0.785,0.24,1.7,0.078,10,21,0.99692,3.29,0.67,10,5
1166 | 8.5,0.44,0.5,1.9,0.369,15,38,0.99634,3.01,1.1,9.4,5
1167 | 9.9,0.54,0.26,2,0.111,7,60,0.99709,2.94,0.98,10.2,5
1168 | 8.2,0.33,0.39,2.5,0.074,29,48,0.99528,3.32,0.88,12.4,7
1169 | 6.5,0.34,0.27,2.8,0.067,8,44,0.99384,3.21,0.56,12,6
1170 | 7.6,0.5,0.29,2.3,0.086,5,14,0.99502,3.32,0.62,11.5,6
1171 | 9.2,0.36,0.34,1.6,0.062,5,12,0.99667,3.2,0.67,10.5,6
1172 | 7.1,0.59,0,2.2,0.078,26,44,0.99522,3.42,0.68,10.8,6
1173 | 9.7,0.42,0.46,2.1,0.074,5,16,0.99649,3.27,0.74,12.3,6
1174 | 7.6,0.36,0.31,1.7,0.079,26,65,0.99716,3.46,0.62,9.5,6
1175 | 7.6,0.36,0.31,1.7,0.079,26,65,0.99716,3.46,0.62,9.5,6
1176 | 6.5,0.61,0,2.2,0.095,48,59,0.99541,3.61,0.7,11.5,6
1177 | 6.5,0.88,0.03,5.6,0.079,23,47,0.99572,3.58,0.5,11.2,4
1178 | 7.1,0.66,0,2.4,0.052,6,11,0.99318,3.35,0.66,12.7,7
1179 | 5.6,0.915,0,2.1,0.041,17,78,0.99346,3.68,0.73,11.4,5
1180 | 8.2,0.35,0.33,2.4,0.076,11,47,0.99599,3.27,0.81,11,6
1181 | 8.2,0.35,0.33,2.4,0.076,11,47,0.99599,3.27,0.81,11,6
1182 | 9.8,0.39,0.43,1.65,0.068,5,11,0.99478,3.19,0.46,11.4,5
1183 | 10.2,0.4,0.4,2.5,0.068,41,54,0.99754,3.38,0.86,10.5,6
1184 | 6.8,0.66,0.07,1.6,0.07,16,61,0.99572,3.29,0.6,9.3,5
1185 | 6.7,0.64,0.23,2.1,0.08,11,119,0.99538,3.36,0.7,10.9,5
1186 | 7,0.43,0.3,2,0.085,6,39,0.99346,3.33,0.46,11.9,6
1187 | 6.6,0.8,0.03,7.8,0.079,6,12,0.9963,3.52,0.5,12.2,5
1188 | 7,0.43,0.3,2,0.085,6,39,0.99346,3.33,0.46,11.9,6
1189 | 6.7,0.64,0.23,2.1,0.08,11,119,0.99538,3.36,0.7,10.9,5
1190 | 8.8,0.955,0.05,1.8,0.075,5,19,0.99616,3.3,0.44,9.6,4
1191 | 9.1,0.4,0.57,4.6,0.08,6,20,0.99652,3.28,0.57,12.5,6
1192 | 6.5,0.885,0,2.3,0.166,6,12,0.99551,3.56,0.51,10.8,5
1193 | 7.2,0.25,0.37,2.5,0.063,11,41,0.99439,3.52,0.8,12.4,7
1194 | 6.4,0.885,0,2.3,0.166,6,12,0.99551,3.56,0.51,10.8,5
1195 | 7,0.745,0.12,1.8,0.114,15,64,0.99588,3.22,0.59,9.5,6
1196 | 6.2,0.43,0.22,1.8,0.078,21,56,0.99633,3.52,0.6,9.5,6
1197 | 7.9,0.58,0.23,2.3,0.076,23,94,0.99686,3.21,0.58,9.5,6
1198 | 7.7,0.57,0.21,1.5,0.069,4,9,0.99458,3.16,0.54,9.8,6
1199 | 7.7,0.26,0.26,2,0.052,19,77,0.9951,3.15,0.79,10.9,6
1200 | 7.9,0.58,0.23,2.3,0.076,23,94,0.99686,3.21,0.58,9.5,6
1201 | 7.7,0.57,0.21,1.5,0.069,4,9,0.99458,3.16,0.54,9.8,6
1202 | 7.9,0.34,0.36,1.9,0.065,5,10,0.99419,3.27,0.54,11.2,7
1203 | 8.6,0.42,0.39,1.8,0.068,6,12,0.99516,3.35,0.69,11.7,8
1204 | 9.9,0.74,0.19,5.8,0.111,33,76,0.99878,3.14,0.55,9.4,5
1205 | 7.2,0.36,0.46,2.1,0.074,24,44,0.99534,3.4,0.85,11,7
1206 | 7.2,0.36,0.46,2.1,0.074,24,44,0.99534,3.4,0.85,11,7
1207 | 7.2,0.36,0.46,2.1,0.074,24,44,0.99534,3.4,0.85,11,7
1208 | 9.9,0.72,0.55,1.7,0.136,24,52,0.99752,3.35,0.94,10,5
1209 | 7.2,0.36,0.46,2.1,0.074,24,44,0.99534,3.4,0.85,11,7
1210 | 6.2,0.39,0.43,2,0.071,14,24,0.99428,3.45,0.87,11.2,7
1211 | 6.8,0.65,0.02,2.1,0.078,8,15,0.99498,3.35,0.62,10.4,6
1212 | 6.6,0.44,0.15,2.1,0.076,22,53,0.9957,3.32,0.62,9.3,5
1213 | 6.8,0.65,0.02,2.1,0.078,8,15,0.99498,3.35,0.62,10.4,6
1214 | 9.6,0.38,0.42,1.9,0.071,5,13,0.99659,3.15,0.75,10.5,6
1215 | 10.2,0.33,0.46,1.9,0.081,6,9,0.99628,3.1,0.48,10.4,6
1216 | 8.8,0.27,0.46,2.1,0.095,20,29,0.99488,3.26,0.56,11.3,6
1217 | 7.9,0.57,0.31,2,0.079,10,79,0.99677,3.29,0.69,9.5,6
1218 | 8.2,0.34,0.37,1.9,0.057,43,74,0.99408,3.23,0.81,12,6
1219 | 8.2,0.4,0.31,1.9,0.082,8,24,0.996,3.24,0.69,10.6,6
1220 | 9,0.39,0.4,1.3,0.044,25,50,0.99478,3.2,0.83,10.9,6
1221 | 10.9,0.32,0.52,1.8,0.132,17,44,0.99734,3.28,0.77,11.5,6
1222 | 10.9,0.32,0.52,1.8,0.132,17,44,0.99734,3.28,0.77,11.5,6
1223 | 8.1,0.53,0.22,2.2,0.078,33,89,0.99678,3.26,0.46,9.6,6
1224 | 10.5,0.36,0.47,2.2,0.074,9,23,0.99638,3.23,0.76,12,6
1225 | 12.6,0.39,0.49,2.5,0.08,8,20,0.9992,3.07,0.82,10.3,6
1226 | 9.2,0.46,0.23,2.6,0.091,18,77,0.99922,3.15,0.51,9.4,5
1227 | 7.5,0.58,0.03,4.1,0.08,27,46,0.99592,3.02,0.47,9.2,5
1228 | 9,0.58,0.25,2,0.104,8,21,0.99769,3.27,0.72,9.6,5
1229 | 5.1,0.42,0,1.8,0.044,18,88,0.99157,3.68,0.73,13.6,7
1230 | 7.6,0.43,0.29,2.1,0.075,19,66,0.99718,3.4,0.64,9.5,5
1231 | 7.7,0.18,0.34,2.7,0.066,15,58,0.9947,3.37,0.78,11.8,6
1232 | 7.8,0.815,0.01,2.6,0.074,48,90,0.99621,3.38,0.62,10.8,5
1233 | 7.6,0.43,0.29,2.1,0.075,19,66,0.99718,3.4,0.64,9.5,5
1234 | 10.2,0.23,0.37,2.2,0.057,14,36,0.99614,3.23,0.49,9.3,4
1235 | 7.1,0.75,0.01,2.2,0.059,11,18,0.99242,3.39,0.4,12.8,6
1236 | 6,0.33,0.32,12.9,0.054,6,113,0.99572,3.3,0.56,11.5,4
1237 | 7.8,0.55,0,1.7,0.07,7,17,0.99659,3.26,0.64,9.4,6
1238 | 7.1,0.75,0.01,2.2,0.059,11,18,0.99242,3.39,0.4,12.8,6
1239 | 8.1,0.73,0,2.5,0.081,12,24,0.99798,3.38,0.46,9.6,4
1240 | 6.5,0.67,0,4.3,0.057,11,20,0.99488,3.45,0.56,11.8,4
1241 | 7.5,0.61,0.2,1.7,0.076,36,60,0.99494,3.1,0.4,9.3,5
1242 | 9.8,0.37,0.39,2.5,0.079,28,65,0.99729,3.16,0.59,9.8,5
1243 | 9,0.4,0.41,2,0.058,15,40,0.99414,3.22,0.6,12.2,6
1244 | 8.3,0.56,0.22,2.4,0.082,10,86,0.9983,3.37,0.62,9.5,5
1245 | 5.9,0.29,0.25,13.4,0.067,72,160,0.99721,3.33,0.54,10.3,6
1246 | 7.4,0.55,0.19,1.8,0.082,15,34,0.99655,3.49,0.68,10.5,5
1247 | 7.4,0.74,0.07,1.7,0.086,15,48,0.99502,3.12,0.48,10,5
1248 | 7.4,0.55,0.19,1.8,0.082,15,34,0.99655,3.49,0.68,10.5,5
1249 | 6.9,0.41,0.33,2.2,0.081,22,36,0.9949,3.41,0.75,11.1,6
1250 | 7.1,0.6,0.01,2.3,0.079,24,37,0.99514,3.4,0.61,10.9,6
1251 | 7.1,0.6,0.01,2.3,0.079,24,37,0.99514,3.4,0.61,10.9,6
1252 | 7.5,0.58,0.14,2.2,0.077,27,60,0.9963,3.28,0.59,9.8,5
1253 | 7.1,0.72,0,1.8,0.123,6,14,0.99627,3.45,0.58,9.8,5
1254 | 7.9,0.66,0,1.4,0.096,6,13,0.99569,3.43,0.58,9.5,5
1255 | 7.8,0.7,0.06,1.9,0.079,20,35,0.99628,3.4,0.69,10.9,5
1256 | 6.1,0.64,0.02,2.4,0.069,26,46,0.99358,3.47,0.45,11,5
1257 | 7.5,0.59,0.22,1.8,0.082,43,60,0.99499,3.1,0.42,9.2,5
1258 | 7,0.58,0.28,4.8,0.085,12,69,0.99633,3.32,0.7,11,6
1259 | 6.8,0.64,0,2.7,0.123,15,33,0.99538,3.44,0.63,11.3,6
1260 | 6.8,0.64,0,2.7,0.123,15,33,0.99538,3.44,0.63,11.3,6
1261 | 8.6,0.635,0.68,1.8,0.403,19,56,0.99632,3.02,1.15,9.3,5
1262 | 6.3,1.02,0,2,0.083,17,24,0.99437,3.59,0.55,11.2,4
1263 | 9.8,0.45,0.38,2.5,0.081,34,66,0.99726,3.15,0.58,9.8,5
1264 | 8.2,0.78,0,2.2,0.089,13,26,0.9978,3.37,0.46,9.6,4
1265 | 8.5,0.37,0.32,1.8,0.066,26,51,0.99456,3.38,0.72,11.8,6
1266 | 7.2,0.57,0.05,2.3,0.081,16,36,0.99564,3.38,0.6,10.3,6
1267 | 7.2,0.57,0.05,2.3,0.081,16,36,0.99564,3.38,0.6,10.3,6
1268 | 10.4,0.43,0.5,2.3,0.068,13,19,0.996,3.1,0.87,11.4,6
1269 | 6.9,0.41,0.31,2,0.079,21,51,0.99668,3.47,0.55,9.5,6
1270 | 5.5,0.49,0.03,1.8,0.044,28,87,0.9908,3.5,0.82,14,8
1271 | 5,0.38,0.01,1.6,0.048,26,60,0.99084,3.7,0.75,14,6
1272 | 7.3,0.44,0.2,1.6,0.049,24,64,0.9935,3.38,0.57,11.7,6
1273 | 5.9,0.46,0,1.9,0.077,25,44,0.99385,3.5,0.53,11.2,5
1274 | 7.5,0.58,0.2,2,0.073,34,44,0.99494,3.1,0.43,9.3,5
1275 | 7.8,0.58,0.13,2.1,0.102,17,36,0.9944,3.24,0.53,11.2,6
1276 | 8,0.715,0.22,2.3,0.075,13,81,0.99688,3.24,0.54,9.5,6
1277 | 8.5,0.4,0.4,6.3,0.05,3,10,0.99566,3.28,0.56,12,4
1278 | 7,0.69,0,1.9,0.114,3,10,0.99636,3.35,0.6,9.7,6
1279 | 8,0.715,0.22,2.3,0.075,13,81,0.99688,3.24,0.54,9.5,6
1280 | 9.8,0.3,0.39,1.7,0.062,3,9,0.9948,3.14,0.57,11.5,7
1281 | 7.1,0.46,0.2,1.9,0.077,28,54,0.9956,3.37,0.64,10.4,6
1282 | 7.1,0.46,0.2,1.9,0.077,28,54,0.9956,3.37,0.64,10.4,6
1283 | 7.9,0.765,0,2,0.084,9,22,0.99619,3.33,0.68,10.9,6
1284 | 8.7,0.63,0.28,2.7,0.096,17,69,0.99734,3.26,0.63,10.2,6
1285 | 7,0.42,0.19,2.3,0.071,18,36,0.99476,3.39,0.56,10.9,5
1286 | 11.3,0.37,0.5,1.8,0.09,20,47,0.99734,3.15,0.57,10.5,5
1287 | 7.1,0.16,0.44,2.5,0.068,17,31,0.99328,3.35,0.54,12.4,6
1288 | 8,0.6,0.08,2.6,0.056,3,7,0.99286,3.22,0.37,13,5
1289 | 7,0.6,0.3,4.5,0.068,20,110,0.99914,3.3,1.17,10.2,5
1290 | 7,0.6,0.3,4.5,0.068,20,110,0.99914,3.3,1.17,10.2,5
1291 | 7.6,0.74,0,1.9,0.1,6,12,0.99521,3.36,0.59,11,5
1292 | 8.2,0.635,0.1,2.1,0.073,25,60,0.99638,3.29,0.75,10.9,6
1293 | 5.9,0.395,0.13,2.4,0.056,14,28,0.99362,3.62,0.67,12.4,6
1294 | 7.5,0.755,0,1.9,0.084,6,12,0.99672,3.34,0.49,9.7,4
1295 | 8.2,0.635,0.1,2.1,0.073,25,60,0.99638,3.29,0.75,10.9,6
1296 | 6.6,0.63,0,4.3,0.093,51,77.5,0.99558,3.2,0.45,9.5,5
1297 | 6.6,0.63,0,4.3,0.093,51,77.5,0.99558,3.2,0.45,9.5,5
1298 | 7.2,0.53,0.14,2.1,0.064,15,29,0.99323,3.35,0.61,12.1,6
1299 | 5.7,0.6,0,1.4,0.063,11,18,0.99191,3.45,0.56,12.2,6
1300 | 7.6,1.58,0,2.1,0.137,5,9,0.99476,3.5,0.4,10.9,3
1301 | 5.2,0.645,0,2.15,0.08,15,28,0.99444,3.78,0.61,12.5,6
1302 | 6.7,0.86,0.07,2,0.1,20,57,0.99598,3.6,0.74,11.7,6
1303 | 9.1,0.37,0.32,2.1,0.064,4,15,0.99576,3.3,0.8,11.2,6
1304 | 8,0.28,0.44,1.8,0.081,28,68,0.99501,3.36,0.66,11.2,5
1305 | 7.6,0.79,0.21,2.3,0.087,21,68,0.9955,3.12,0.44,9.2,5
1306 | 7.5,0.61,0.26,1.9,0.073,24,88,0.99612,3.3,0.53,9.8,5
1307 | 9.7,0.69,0.32,2.5,0.088,22,91,0.9979,3.29,0.62,10.1,5
1308 | 6.8,0.68,0.09,3.9,0.068,15,29,0.99524,3.41,0.52,11.1,4
1309 | 9.7,0.69,0.32,2.5,0.088,22,91,0.9979,3.29,0.62,10.1,5
1310 | 7,0.62,0.1,1.4,0.071,27,63,0.996,3.28,0.61,9.2,5
1311 | 7.5,0.61,0.26,1.9,0.073,24,88,0.99612,3.3,0.53,9.8,5
1312 | 6.5,0.51,0.15,3,0.064,12,27,0.9929,3.33,0.59,12.8,6
1313 | 8,1.18,0.21,1.9,0.083,14,41,0.99532,3.34,0.47,10.5,5
1314 | 7,0.36,0.21,2.3,0.086,20,65,0.99558,3.4,0.54,10.1,6
1315 | 7,0.36,0.21,2.4,0.086,24,69,0.99556,3.4,0.53,10.1,6
1316 | 7.5,0.63,0.27,2,0.083,17,91,0.99616,3.26,0.58,9.8,6
1317 | 5.4,0.74,0,1.2,0.041,16,46,0.99258,4.01,0.59,12.5,6
1318 | 9.9,0.44,0.46,2.2,0.091,10,41,0.99638,3.18,0.69,11.9,6
1319 | 7.5,0.63,0.27,2,0.083,17,91,0.99616,3.26,0.58,9.8,6
1320 | 9.1,0.76,0.68,1.7,0.414,18,64,0.99652,2.9,1.33,9.1,6
1321 | 9.7,0.66,0.34,2.6,0.094,12,88,0.99796,3.26,0.66,10.1,5
1322 | 5,0.74,0,1.2,0.041,16,46,0.99258,4.01,0.59,12.5,6
1323 | 9.1,0.34,0.42,1.8,0.058,9,18,0.99392,3.18,0.55,11.4,5
1324 | 9.1,0.36,0.39,1.8,0.06,21,55,0.99495,3.18,0.82,11,7
1325 | 6.7,0.46,0.24,1.7,0.077,18,34,0.9948,3.39,0.6,10.6,6
1326 | 6.7,0.46,0.24,1.7,0.077,18,34,0.9948,3.39,0.6,10.6,6
1327 | 6.7,0.46,0.24,1.7,0.077,18,34,0.9948,3.39,0.6,10.6,6
1328 | 6.7,0.46,0.24,1.7,0.077,18,34,0.9948,3.39,0.6,10.6,6
1329 | 6.5,0.52,0.11,1.8,0.073,13,38,0.9955,3.34,0.52,9.3,5
1330 | 7.4,0.6,0.26,2.1,0.083,17,91,0.99616,3.29,0.56,9.8,6
1331 | 7.4,0.6,0.26,2.1,0.083,17,91,0.99616,3.29,0.56,9.8,6
1332 | 7.8,0.87,0.26,3.8,0.107,31,67,0.99668,3.26,0.46,9.2,5
1333 | 8.4,0.39,0.1,1.7,0.075,6,25,0.99581,3.09,0.43,9.7,6
1334 | 9.1,0.775,0.22,2.2,0.079,12,48,0.9976,3.18,0.51,9.6,5
1335 | 7.2,0.835,0,2,0.166,4,11,0.99608,3.39,0.52,10,5
1336 | 6.6,0.58,0.02,2.4,0.069,19,40,0.99387,3.38,0.66,12.6,6
1337 | 6,0.5,0,1.4,0.057,15,26,0.99448,3.36,0.45,9.5,5
1338 | 6,0.5,0,1.4,0.057,15,26,0.99448,3.36,0.45,9.5,5
1339 | 6,0.5,0,1.4,0.057,15,26,0.99448,3.36,0.45,9.5,5
1340 | 7.5,0.51,0.02,1.7,0.084,13,31,0.99538,3.36,0.54,10.5,6
1341 | 7.5,0.51,0.02,1.7,0.084,13,31,0.99538,3.36,0.54,10.5,6
1342 | 7.5,0.51,0.02,1.7,0.084,13,31,0.99538,3.36,0.54,10.5,6
1343 | 7.6,0.54,0.02,1.7,0.085,17,31,0.99589,3.37,0.51,10.4,6
1344 | 7.5,0.51,0.02,1.7,0.084,13,31,0.99538,3.36,0.54,10.5,6
1345 | 11.5,0.42,0.48,2.6,0.077,8,20,0.99852,3.09,0.53,11,5
1346 | 8.2,0.44,0.24,2.3,0.063,10,28,0.99613,3.25,0.53,10.2,6
1347 | 6.1,0.59,0.01,2.1,0.056,5,13,0.99472,3.52,0.56,11.4,5
1348 | 7.2,0.655,0.03,1.8,0.078,7,12,0.99587,3.34,0.39,9.5,5
1349 | 7.2,0.655,0.03,1.8,0.078,7,12,0.99587,3.34,0.39,9.5,5
1350 | 6.9,0.57,0,2.8,0.081,21,41,0.99518,3.41,0.52,10.8,5
1351 | 9,0.6,0.29,2,0.069,32,73,0.99654,3.34,0.57,10,5
1352 | 7.2,0.62,0.01,2.3,0.065,8,46,0.99332,3.32,0.51,11.8,6
1353 | 7.6,0.645,0.03,1.9,0.086,14,57,0.9969,3.37,0.46,10.3,5
1354 | 7.6,0.645,0.03,1.9,0.086,14,57,0.9969,3.37,0.46,10.3,5
1355 | 7.2,0.58,0.03,2.3,0.077,7,28,0.99568,3.35,0.52,10,5
1356 | 6.1,0.32,0.25,1.8,0.086,5,32,0.99464,3.36,0.44,10.1,5
1357 | 6.1,0.34,0.25,1.8,0.084,4,28,0.99464,3.36,0.44,10.1,5
1358 | 7.3,0.43,0.24,2.5,0.078,27,67,0.99648,3.6,0.59,11.1,6
1359 | 7.4,0.64,0.17,5.4,0.168,52,98,0.99736,3.28,0.5,9.5,5
1360 | 11.6,0.475,0.4,1.4,0.091,6,28,0.99704,3.07,0.65,10.0333333333333,6
1361 | 9.2,0.54,0.31,2.3,0.112,11,38,0.99699,3.24,0.56,10.9,5
1362 | 8.3,0.85,0.14,2.5,0.093,13,54,0.99724,3.36,0.54,10.1,5
1363 | 11.6,0.475,0.4,1.4,0.091,6,28,0.99704,3.07,0.65,10.0333333333333,6
1364 | 8,0.83,0.27,2,0.08,11,63,0.99652,3.29,0.48,9.8,4
1365 | 7.2,0.605,0.02,1.9,0.096,10,31,0.995,3.46,0.53,11.8,6
1366 | 7.8,0.5,0.09,2.2,0.115,10,42,0.9971,3.18,0.62,9.5,5
1367 | 7.3,0.74,0.08,1.7,0.094,10,45,0.99576,3.24,0.5,9.8,5
1368 | 6.9,0.54,0.3,2.2,0.088,9,105,0.99725,3.25,1.18,10.5,6
1369 | 8,0.77,0.32,2.1,0.079,16,74,0.99656,3.27,0.5,9.8,6
1370 | 6.6,0.61,0,1.6,0.069,4,8,0.99396,3.33,0.37,10.4,4
1371 | 8.7,0.78,0.51,1.7,0.415,12,66,0.99623,3,1.17,9.2,5
1372 | 7.5,0.58,0.56,3.1,0.153,5,14,0.99476,3.21,1.03,11.6,6
1373 | 8.7,0.78,0.51,1.7,0.415,12,66,0.99623,3,1.17,9.2,5
1374 | 7.7,0.75,0.27,3.8,0.11,34,89,0.99664,3.24,0.45,9.3,5
1375 | 6.8,0.815,0,1.2,0.267,16,29,0.99471,3.32,0.51,9.8,3
1376 | 7.2,0.56,0.26,2,0.083,13,100,0.99586,3.26,0.52,9.9,5
1377 | 8.2,0.885,0.2,1.4,0.086,7,31,0.9946,3.11,0.46,10,5
1378 | 5.2,0.49,0.26,2.3,0.09,23,74,0.9953,3.71,0.62,12.2,6
1379 | 7.2,0.45,0.15,2,0.078,10,28,0.99609,3.29,0.51,9.9,6
1380 | 7.5,0.57,0.02,2.6,0.077,11,35,0.99557,3.36,0.62,10.8,6
1381 | 7.5,0.57,0.02,2.6,0.077,11,35,0.99557,3.36,0.62,10.8,6
1382 | 6.8,0.83,0.09,1.8,0.074,4,25,0.99534,3.38,0.45,9.6,5
1383 | 8,0.6,0.22,2.1,0.08,25,105,0.99613,3.3,0.49,9.9,5
1384 | 8,0.6,0.22,2.1,0.08,25,105,0.99613,3.3,0.49,9.9,5
1385 | 7.1,0.755,0.15,1.8,0.107,20,84,0.99593,3.19,0.5,9.5,5
1386 | 8,0.81,0.25,3.4,0.076,34,85,0.99668,3.19,0.42,9.2,5
1387 | 7.4,0.64,0.07,1.8,0.1,8,23,0.9961,3.3,0.58,9.6,5
1388 | 7.4,0.64,0.07,1.8,0.1,8,23,0.9961,3.3,0.58,9.6,5
1389 | 6.6,0.64,0.31,6.1,0.083,7,49,0.99718,3.35,0.68,10.3,5
1390 | 6.7,0.48,0.02,2.2,0.08,36,111,0.99524,3.1,0.53,9.7,5
1391 | 6,0.49,0,2.3,0.068,15,33,0.99292,3.58,0.59,12.5,6
1392 | 8,0.64,0.22,2.4,0.094,5,33,0.99612,3.37,0.58,11,5
1393 | 7.1,0.62,0.06,1.3,0.07,5,12,0.9942,3.17,0.48,9.8,5
1394 | 8,0.52,0.25,2,0.078,19,59,0.99612,3.3,0.48,10.2,5
1395 | 6.4,0.57,0.14,3.9,0.07,27,73,0.99669,3.32,0.48,9.2,5
1396 | 8.6,0.685,0.1,1.6,0.092,3,12,0.99745,3.31,0.65,9.55,6
1397 | 8.7,0.675,0.1,1.6,0.09,4,11,0.99745,3.31,0.65,9.55,5
1398 | 7.3,0.59,0.26,2,0.08,17,104,0.99584,3.28,0.52,9.9,5
1399 | 7,0.6,0.12,2.2,0.083,13,28,0.9966,3.52,0.62,10.2,7
1400 | 7.2,0.67,0,2.2,0.068,10,24,0.9956,3.42,0.72,11.1,6
1401 | 7.9,0.69,0.21,2.1,0.08,33,141,0.9962,3.25,0.51,9.9,5
1402 | 7.9,0.69,0.21,2.1,0.08,33,141,0.9962,3.25,0.51,9.9,5
1403 | 7.6,0.3,0.42,2,0.052,6,24,0.9963,3.44,0.82,11.9,6
1404 | 7.2,0.33,0.33,1.7,0.061,3,13,0.996,3.23,1.1,10,8
1405 | 8,0.5,0.39,2.6,0.082,12,46,0.9985,3.43,0.62,10.7,6
1406 | 7.7,0.28,0.3,2,0.062,18,34,0.9952,3.28,0.9,11.3,7
1407 | 8.2,0.24,0.34,5.1,0.062,8,22,0.9974,3.22,0.94,10.9,6
1408 | 6,0.51,0,2.1,0.064,40,54,0.995,3.54,0.93,10.7,6
1409 | 8.1,0.29,0.36,2.2,0.048,35,53,0.995,3.27,1.01,12.4,7
1410 | 6,0.51,0,2.1,0.064,40,54,0.995,3.54,0.93,10.7,6
1411 | 6.6,0.96,0,1.8,0.082,5,16,0.9936,3.5,0.44,11.9,6
1412 | 6.4,0.47,0.4,2.4,0.071,8,19,0.9963,3.56,0.73,10.6,6
1413 | 8.2,0.24,0.34,5.1,0.062,8,22,0.9974,3.22,0.94,10.9,6
1414 | 9.9,0.57,0.25,2,0.104,12,89,0.9963,3.04,0.9,10.1,5
1415 | 10,0.32,0.59,2.2,0.077,3,15,0.9994,3.2,0.78,9.6,5
1416 | 6.2,0.58,0,1.6,0.065,8,18,0.9966,3.56,0.84,9.4,5
1417 | 10,0.32,0.59,2.2,0.077,3,15,0.9994,3.2,0.78,9.6,5
1418 | 7.3,0.34,0.33,2.5,0.064,21,37,0.9952,3.35,0.77,12.1,7
1419 | 7.8,0.53,0.01,1.6,0.077,3,19,0.995,3.16,0.46,9.8,5
1420 | 7.7,0.64,0.21,2.2,0.077,32,133,0.9956,3.27,0.45,9.9,5
1421 | 7.8,0.53,0.01,1.6,0.077,3,19,0.995,3.16,0.46,9.8,5
1422 | 7.5,0.4,0.18,1.6,0.079,24,58,0.9965,3.34,0.58,9.4,5
1423 | 7,0.54,0,2.1,0.079,39,55,0.9956,3.39,0.84,11.4,6
1424 | 6.4,0.53,0.09,3.9,0.123,14,31,0.9968,3.5,0.67,11,4
1425 | 8.3,0.26,0.37,1.4,0.076,8,23,0.9974,3.26,0.7,9.6,6
1426 | 8.3,0.26,0.37,1.4,0.076,8,23,0.9974,3.26,0.7,9.6,6
1427 | 7.7,0.23,0.37,1.8,0.046,23,60,0.9971,3.41,0.71,12.1,6
1428 | 7.6,0.41,0.33,2.5,0.078,6,23,0.9957,3.3,0.58,11.2,5
1429 | 7.8,0.64,0,1.9,0.072,27,55,0.9962,3.31,0.63,11,5
1430 | 7.9,0.18,0.4,2.2,0.049,38,67,0.996,3.33,0.93,11.3,5
1431 | 7.4,0.41,0.24,1.8,0.066,18,47,0.9956,3.37,0.62,10.4,5
1432 | 7.6,0.43,0.31,2.1,0.069,13,74,0.9958,3.26,0.54,9.9,6
1433 | 5.9,0.44,0,1.6,0.042,3,11,0.9944,3.48,0.85,11.7,6
1434 | 6.1,0.4,0.16,1.8,0.069,11,25,0.9955,3.42,0.74,10.1,7
1435 | 10.2,0.54,0.37,15.4,0.214,55,95,1.00369,3.18,0.77,9,6
1436 | 10.2,0.54,0.37,15.4,0.214,55,95,1.00369,3.18,0.77,9,6
1437 | 10,0.38,0.38,1.6,0.169,27,90,0.99914,3.15,0.65,8.5,5
1438 | 6.8,0.915,0.29,4.8,0.07,15,39,0.99577,3.53,0.54,11.1,5
1439 | 7,0.59,0,1.7,0.052,3,8,0.996,3.41,0.47,10.3,5
1440 | 7.3,0.67,0.02,2.2,0.072,31,92,0.99566,3.32,0.68,11.0666666666667,6
1441 | 7.2,0.37,0.32,2,0.062,15,28,0.9947,3.23,0.73,11.3,7
1442 | 7.4,0.785,0.19,5.2,0.094,19,98,0.99713,3.16,0.52,9.56666666666667,6
1443 | 6.9,0.63,0.02,1.9,0.078,18,30,0.99712,3.4,0.75,9.8,5
1444 | 6.9,0.58,0.2,1.75,0.058,8,22,0.99322,3.38,0.49,11.7,5
1445 | 7.3,0.67,0.02,2.2,0.072,31,92,0.99566,3.32,0.68,11.1,6
1446 | 7.4,0.785,0.19,5.2,0.094,19,98,0.99713,3.16,0.52,9.6,6
1447 | 6.9,0.63,0.02,1.9,0.078,18,30,0.99712,3.4,0.75,9.8,5
1448 | 6.8,0.67,0,1.9,0.08,22,39,0.99701,3.4,0.74,9.7,5
1449 | 6.9,0.58,0.01,1.9,0.08,40,54,0.99683,3.4,0.73,9.7,5
1450 | 7.2,0.38,0.31,2,0.056,15,29,0.99472,3.23,0.76,11.3,8
1451 | 7.2,0.37,0.32,2,0.062,15,28,0.9947,3.23,0.73,11.3,7
1452 | 7.8,0.32,0.44,2.7,0.104,8,17,0.99732,3.33,0.78,11,7
1453 | 6.6,0.58,0.02,2,0.062,37,53,0.99374,3.35,0.76,11.6,7
1454 | 7.6,0.49,0.33,1.9,0.074,27,85,0.99706,3.41,0.58,9,5
1455 | 11.7,0.45,0.63,2.2,0.073,7,23,0.99974,3.21,0.69,10.9,6
1456 | 6.5,0.9,0,1.6,0.052,9,17,0.99467,3.5,0.63,10.9,6
1457 | 6,0.54,0.06,1.8,0.05,38,89,0.99236,3.3,0.5,10.55,6
1458 | 7.6,0.49,0.33,1.9,0.074,27,85,0.99706,3.41,0.58,9,5
1459 | 8.4,0.29,0.4,1.7,0.067,8,20,0.99603,3.39,0.6,10.5,5
1460 | 7.9,0.2,0.35,1.7,0.054,7,15,0.99458,3.32,0.8,11.9,7
1461 | 6.4,0.42,0.09,2.3,0.054,34,64,0.99724,3.41,0.68,10.4,6
1462 | 6.2,0.785,0,2.1,0.06,6,13,0.99664,3.59,0.61,10,4
1463 | 6.8,0.64,0.03,2.3,0.075,14,31,0.99545,3.36,0.58,10.4,6
1464 | 6.9,0.63,0.01,2.4,0.076,14,39,0.99522,3.34,0.53,10.8,6
1465 | 6.8,0.59,0.1,1.7,0.063,34,53,0.9958,3.41,0.67,9.7,5
1466 | 6.8,0.59,0.1,1.7,0.063,34,53,0.9958,3.41,0.67,9.7,5
1467 | 7.3,0.48,0.32,2.1,0.062,31,54,0.99728,3.3,0.65,10,7
1468 | 6.7,1.04,0.08,2.3,0.067,19,32,0.99648,3.52,0.57,11,4
1469 | 7.3,0.48,0.32,2.1,0.062,31,54,0.99728,3.3,0.65,10,7
1470 | 7.3,0.98,0.05,2.1,0.061,20,49,0.99705,3.31,0.55,9.7,3
1471 | 10,0.69,0.11,1.4,0.084,8,24,0.99578,2.88,0.47,9.7,5
1472 | 6.7,0.7,0.08,3.75,0.067,8,16,0.99334,3.43,0.52,12.6,5
1473 | 7.6,0.35,0.6,2.6,0.073,23,44,0.99656,3.38,0.79,11.1,6
1474 | 6.1,0.6,0.08,1.8,0.071,14,45,0.99336,3.38,0.54,11,5
1475 | 9.9,0.5,0.5,13.8,0.205,48,82,1.00242,3.16,0.75,8.8,5
1476 | 5.3,0.47,0.11,2.2,0.048,16,89,0.99182,3.54,0.88,13.5666666666667,7
1477 | 9.9,0.5,0.5,13.8,0.205,48,82,1.00242,3.16,0.75,8.8,5
1478 | 5.3,0.47,0.11,2.2,0.048,16,89,0.99182,3.54,0.88,13.6,7
1479 | 7.1,0.875,0.05,5.7,0.082,3,14,0.99808,3.4,0.52,10.2,3
1480 | 8.2,0.28,0.6,3,0.104,10,22,0.99828,3.39,0.68,10.6,5
1481 | 5.6,0.62,0.03,1.5,0.08,6,13,0.99498,3.66,0.62,10.1,4
1482 | 8.2,0.28,0.6,3,0.104,10,22,0.99828,3.39,0.68,10.6,5
1483 | 7.2,0.58,0.54,2.1,0.114,3,9,0.99719,3.33,0.57,10.3,4
1484 | 8.1,0.33,0.44,1.5,0.042,6,12,0.99542,3.35,0.61,10.7,5
1485 | 6.8,0.91,0.06,2,0.06,4,11,0.99592,3.53,0.64,10.9,4
1486 | 7,0.655,0.16,2.1,0.074,8,25,0.99606,3.37,0.55,9.7,5
1487 | 6.8,0.68,0.21,2.1,0.07,9,23,0.99546,3.38,0.6,10.3,5
1488 | 6,0.64,0.05,1.9,0.066,9,17,0.99496,3.52,0.78,10.6,5
1489 | 5.6,0.54,0.04,1.7,0.049,5,13,0.9942,3.72,0.58,11.4,5
1490 | 6.2,0.57,0.1,2.1,0.048,4,11,0.99448,3.44,0.76,10.8,6
1491 | 7.1,0.22,0.49,1.8,0.039,8,18,0.99344,3.39,0.56,12.4,6
1492 | 5.6,0.54,0.04,1.7,0.049,5,13,0.9942,3.72,0.58,11.4,5
1493 | 6.2,0.65,0.06,1.6,0.05,6,18,0.99348,3.57,0.54,11.95,5
1494 | 7.7,0.54,0.26,1.9,0.089,23,147,0.99636,3.26,0.59,9.7,5
1495 | 6.4,0.31,0.09,1.4,0.066,15,28,0.99459,3.42,0.7,10,7
1496 | 7,0.43,0.02,1.9,0.08,15,28,0.99492,3.35,0.81,10.6,6
1497 | 7.7,0.54,0.26,1.9,0.089,23,147,0.99636,3.26,0.59,9.7,5
1498 | 6.9,0.74,0.03,2.3,0.054,7,16,0.99508,3.45,0.63,11.5,6
1499 | 6.6,0.895,0.04,2.3,0.068,7,13,0.99582,3.53,0.58,10.8,6
1500 | 6.9,0.74,0.03,2.3,0.054,7,16,0.99508,3.45,0.63,11.5,6
1501 | 7.5,0.725,0.04,1.5,0.076,8,15,0.99508,3.26,0.53,9.6,5
1502 | 7.8,0.82,0.29,4.3,0.083,21,64,0.99642,3.16,0.53,9.4,5
1503 | 7.3,0.585,0.18,2.4,0.078,15,60,0.99638,3.31,0.54,9.8,5
1504 | 6.2,0.44,0.39,2.5,0.077,6,14,0.99555,3.51,0.69,11,6
1505 | 7.5,0.38,0.57,2.3,0.106,5,12,0.99605,3.36,0.55,11.4,6
1506 | 6.7,0.76,0.02,1.8,0.078,6,12,0.996,3.55,0.63,9.95,3
1507 | 6.8,0.81,0.05,2,0.07,6,14,0.99562,3.51,0.66,10.8,6
1508 | 7.5,0.38,0.57,2.3,0.106,5,12,0.99605,3.36,0.55,11.4,6
1509 | 7.1,0.27,0.6,2.1,0.074,17,25,0.99814,3.38,0.72,10.6,6
1510 | 7.9,0.18,0.4,1.8,0.062,7,20,0.9941,3.28,0.7,11.1,5
1511 | 6.4,0.36,0.21,2.2,0.047,26,48,0.99661,3.47,0.77,9.7,6
1512 | 7.1,0.69,0.04,2.1,0.068,19,27,0.99712,3.44,0.67,9.8,5
1513 | 6.4,0.79,0.04,2.2,0.061,11,17,0.99588,3.53,0.65,10.4,6
1514 | 6.4,0.56,0.15,1.8,0.078,17,65,0.99294,3.33,0.6,10.5,6
1515 | 6.9,0.84,0.21,4.1,0.074,16,65,0.99842,3.53,0.72,9.23333333333333,6
1516 | 6.9,0.84,0.21,4.1,0.074,16,65,0.99842,3.53,0.72,9.25,6
1517 | 6.1,0.32,0.25,2.3,0.071,23,58,0.99633,3.42,0.97,10.6,5
1518 | 6.5,0.53,0.06,2,0.063,29,44,0.99489,3.38,0.83,10.3,6
1519 | 7.4,0.47,0.46,2.2,0.114,7,20,0.99647,3.32,0.63,10.5,5
1520 | 6.6,0.7,0.08,2.6,0.106,14,27,0.99665,3.44,0.58,10.2,5
1521 | 6.5,0.53,0.06,2,0.063,29,44,0.99489,3.38,0.83,10.3,6
1522 | 6.9,0.48,0.2,1.9,0.082,9,23,0.99585,3.39,0.43,9.05,4
1523 | 6.1,0.32,0.25,2.3,0.071,23,58,0.99633,3.42,0.97,10.6,5
1524 | 6.8,0.48,0.25,2,0.076,29,61,0.9953,3.34,0.6,10.4,5
1525 | 6,0.42,0.19,2,0.075,22,47,0.99522,3.39,0.78,10,6
1526 | 6.7,0.48,0.08,2.1,0.064,18,34,0.99552,3.33,0.64,9.7,5
1527 | 6.8,0.47,0.08,2.2,0.064,18,38,0.99553,3.3,0.65,9.6,6
1528 | 7.1,0.53,0.07,1.7,0.071,15,24,0.9951,3.29,0.66,10.8,6
1529 | 7.9,0.29,0.49,2.2,0.096,21,59,0.99714,3.31,0.67,10.1,6
1530 | 7.1,0.69,0.08,2.1,0.063,42,52,0.99608,3.42,0.6,10.2,6
1531 | 6.6,0.44,0.09,2.2,0.063,9,18,0.99444,3.42,0.69,11.3,6
1532 | 6.1,0.705,0.1,2.8,0.081,13,28,0.99631,3.6,0.66,10.2,5
1533 | 7.2,0.53,0.13,2,0.058,18,22,0.99573,3.21,0.68,9.9,6
1534 | 8,0.39,0.3,1.9,0.074,32,84,0.99717,3.39,0.61,9,5
1535 | 6.6,0.56,0.14,2.4,0.064,13,29,0.99397,3.42,0.62,11.7,7
1536 | 7,0.55,0.13,2.2,0.075,15,35,0.9959,3.36,0.59,9.7,6
1537 | 6.1,0.53,0.08,1.9,0.077,24,45,0.99528,3.6,0.68,10.3,6
1538 | 5.4,0.58,0.08,1.9,0.059,20,31,0.99484,3.5,0.64,10.2,6
1539 | 6.2,0.64,0.09,2.5,0.081,15,26,0.99538,3.57,0.63,12,5
1540 | 7.2,0.39,0.32,1.8,0.065,34,60,0.99714,3.46,0.78,9.9,5
1541 | 6.2,0.52,0.08,4.4,0.071,11,32,0.99646,3.56,0.63,11.6,6
1542 | 7.4,0.25,0.29,2.2,0.054,19,49,0.99666,3.4,0.76,10.9,7
1543 | 6.7,0.855,0.02,1.9,0.064,29,38,0.99472,3.3,0.56,10.75,6
1544 | 11.1,0.44,0.42,2.2,0.064,14,19,0.99758,3.25,0.57,10.4,6
1545 | 8.4,0.37,0.43,2.3,0.063,12,19,0.9955,3.17,0.81,11.2,7
1546 | 6.5,0.63,0.33,1.8,0.059,16,28,0.99531,3.36,0.64,10.1,6
1547 | 7,0.57,0.02,2,0.072,17,26,0.99575,3.36,0.61,10.2,5
1548 | 6.3,0.6,0.1,1.6,0.048,12,26,0.99306,3.55,0.51,12.1,5
1549 | 11.2,0.4,0.5,2,0.099,19,50,0.99783,3.1,0.58,10.4,5
1550 | 7.4,0.36,0.3,1.8,0.074,17,24,0.99419,3.24,0.7,11.4,8
1551 | 7.1,0.68,0,2.3,0.087,17,26,0.99783,3.45,0.53,9.5,5
1552 | 7.1,0.67,0,2.3,0.083,18,27,0.99768,3.44,0.54,9.4,5
1553 | 6.3,0.68,0.01,3.7,0.103,32,54,0.99586,3.51,0.66,11.3,6
1554 | 7.3,0.735,0,2.2,0.08,18,28,0.99765,3.41,0.6,9.4,5
1555 | 6.6,0.855,0.02,2.4,0.062,15,23,0.99627,3.54,0.6,11,6
1556 | 7,0.56,0.17,1.7,0.065,15,24,0.99514,3.44,0.68,10.55,7
1557 | 6.6,0.88,0.04,2.2,0.066,12,20,0.99636,3.53,0.56,9.9,5
1558 | 6.6,0.855,0.02,2.4,0.062,15,23,0.99627,3.54,0.6,11,6
1559 | 6.9,0.63,0.33,6.7,0.235,66,115,0.99787,3.22,0.56,9.5,5
1560 | 7.8,0.6,0.26,2,0.08,31,131,0.99622,3.21,0.52,9.9,5
1561 | 7.8,0.6,0.26,2,0.08,31,131,0.99622,3.21,0.52,9.9,5
1562 | 7.8,0.6,0.26,2,0.08,31,131,0.99622,3.21,0.52,9.9,5
1563 | 7.2,0.695,0.13,2,0.076,12,20,0.99546,3.29,0.54,10.1,5
1564 | 7.2,0.695,0.13,2,0.076,12,20,0.99546,3.29,0.54,10.1,5
1565 | 7.2,0.695,0.13,2,0.076,12,20,0.99546,3.29,0.54,10.1,5
1566 | 6.7,0.67,0.02,1.9,0.061,26,42,0.99489,3.39,0.82,10.9,6
1567 | 6.7,0.16,0.64,2.1,0.059,24,52,0.99494,3.34,0.71,11.2,6
1568 | 7.2,0.695,0.13,2,0.076,12,20,0.99546,3.29,0.54,10.1,5
1569 | 7,0.56,0.13,1.6,0.077,25,42,0.99629,3.34,0.59,9.2,5
1570 | 6.2,0.51,0.14,1.9,0.056,15,34,0.99396,3.48,0.57,11.5,6
1571 | 6.4,0.36,0.53,2.2,0.23,19,35,0.9934,3.37,0.93,12.4,6
1572 | 6.4,0.38,0.14,2.2,0.038,15,25,0.99514,3.44,0.65,11.1,6
1573 | 7.3,0.69,0.32,2.2,0.069,35,104,0.99632,3.33,0.51,9.5,5
1574 | 6,0.58,0.2,2.4,0.075,15,50,0.99467,3.58,0.67,12.5,6
1575 | 5.6,0.31,0.78,13.9,0.074,23,92,0.99677,3.39,0.48,10.5,6
1576 | 7.5,0.52,0.4,2.2,0.06,12,20,0.99474,3.26,0.64,11.8,6
1577 | 8,0.3,0.63,1.6,0.081,16,29,0.99588,3.3,0.78,10.8,6
1578 | 6.2,0.7,0.15,5.1,0.076,13,27,0.99622,3.54,0.6,11.9,6
1579 | 6.8,0.67,0.15,1.8,0.118,13,20,0.9954,3.42,0.67,11.3,6
1580 | 6.2,0.56,0.09,1.7,0.053,24,32,0.99402,3.54,0.6,11.3,5
1581 | 7.4,0.35,0.33,2.4,0.068,9,26,0.9947,3.36,0.6,11.9,6
1582 | 6.2,0.56,0.09,1.7,0.053,24,32,0.99402,3.54,0.6,11.3,5
1583 | 6.1,0.715,0.1,2.6,0.053,13,27,0.99362,3.57,0.5,11.9,5
1584 | 6.2,0.46,0.29,2.1,0.074,32,98,0.99578,3.33,0.62,9.8,5
1585 | 6.7,0.32,0.44,2.4,0.061,24,34,0.99484,3.29,0.8,11.6,7
1586 | 7.2,0.39,0.44,2.6,0.066,22,48,0.99494,3.3,0.84,11.5,6
1587 | 7.5,0.31,0.41,2.4,0.065,34,60,0.99492,3.34,0.85,11.4,6
1588 | 5.8,0.61,0.11,1.8,0.066,18,28,0.99483,3.55,0.66,10.9,6
1589 | 7.2,0.66,0.33,2.5,0.068,34,102,0.99414,3.27,0.78,12.8,6
1590 | 6.6,0.725,0.2,7.8,0.073,29,79,0.9977,3.29,0.54,9.2,5
1591 | 6.3,0.55,0.15,1.8,0.077,26,35,0.99314,3.32,0.82,11.6,6
1592 | 5.4,0.74,0.09,1.7,0.089,16,26,0.99402,3.67,0.56,11.6,6
1593 | 6.3,0.51,0.13,2.3,0.076,29,40,0.99574,3.42,0.75,11,6
1594 | 6.8,0.62,0.08,1.9,0.068,28,38,0.99651,3.42,0.82,9.5,6
1595 | 6.2,0.6,0.08,2,0.09,32,44,0.9949,3.45,0.58,10.5,5
1596 | 5.9,0.55,0.1,2.2,0.062,39,51,0.99512,3.52,0.76,11.2,6
1597 | 6.3,0.51,0.13,2.3,0.076,29,40,0.99574,3.42,0.75,11,6
1598 | 5.9,0.645,0.12,2,0.075,32,44,0.99547,3.57,0.71,10.2,5
1599 | 6,0.31,0.47,3.6,0.067,18,42,0.99549,3.39,0.66,11,6
1600 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Minimalistic Multiple Layer Neural Network from Scratch in Python
2 | * Author: Umberto Griffo
3 |
4 | Inspired by [1] and [2] I implemented a **Minimalistic Multiple Layer Neural Network** from Scratch in Python.
5 | You can use It to better understand the core concepts of Neural Networks.
6 |
7 | ## Software Environment
8 | * Python 3.0 - 3.5
9 |
10 | ## Features
11 | - Backpropagation Algorithm With **Stochastic Gradient Descent**. During training we are using single training examples for one forward/backward pass.
12 | - Supporting multiple hidden layers.
13 | - **Classification** (MultilayerNnClassifier.py).
14 | - **Regression** (MultilayerNnRegressor.py).
15 | - **Activation Function**: Linear, ReLU, Sigmoid, Tanh.
16 | - **Classification Evaluator**: Accuracy.
17 | - **Regression Evaluator**: Mean Squared Error (MSE), Root Mean Squared Error (RMSE), Coefficient of Determination (R^2).
18 |
19 | ## Demo
20 | If you run Test.py you can see the following textual menu:
21 | ```
22 | Please enter one of following numbers:
23 | 0 - Classification on Seed Dataset
24 | 1 - Classification on Wine Red Dataset
25 | 2 - Classification on Pokemon Dataset
26 | 3 - Regression on Wine White Dataset
27 | 4 - Regression on Wine Red Dataset
28 | ```
29 | If you choose 2 will be performed a classification task on Pokemon Dataset:
30 | ```
31 | 2
32 | You entered 2
33 | >epoch=0, lrate=0.100, error=0.396
34 | >epoch=100, lrate=0.100, error=0.087
35 | >epoch=200, lrate=0.100, error=0.083
36 | >epoch=300, lrate=0.100, error=0.081
37 | >epoch=400, lrate=0.100, error=0.081
38 | >epoch=500, lrate=0.100, error=0.080
39 | >accuracy=95.450
40 | >epoch=0, lrate=0.100, error=0.353
41 | >epoch=100, lrate=0.100, error=0.092
42 | >epoch=200, lrate=0.100, error=0.085
43 | >epoch=300, lrate=0.100, error=0.083
44 | >epoch=400, lrate=0.100, error=0.082
45 | >epoch=500, lrate=0.100, error=0.081
46 | >accuracy=95.400
47 | >epoch=0, lrate=0.100, error=0.415
48 | >epoch=100, lrate=0.100, error=0.087
49 | >epoch=200, lrate=0.100, error=0.083
50 | >epoch=300, lrate=0.100, error=0.082
51 | >epoch=400, lrate=0.100, error=0.081
52 | >epoch=500, lrate=0.100, error=0.080
53 | >accuracy=95.520
54 | >epoch=0, lrate=0.100, error=0.401
55 | >epoch=100, lrate=0.100, error=0.089
56 | >epoch=200, lrate=0.100, error=0.084
57 | >epoch=300, lrate=0.100, error=0.083
58 | >epoch=400, lrate=0.100, error=0.082
59 | >epoch=500, lrate=0.100, error=0.081
60 | >accuracy=95.280
61 | >epoch=0, lrate=0.100, error=0.395
62 | >epoch=100, lrate=0.100, error=0.093
63 | >epoch=200, lrate=0.100, error=0.087
64 | >epoch=300, lrate=0.100, error=0.085
65 | >epoch=400, lrate=0.100, error=0.084
66 | >epoch=500, lrate=0.100, error=0.083
67 | >accuracy=94.900
68 | Scores: [95.45, 95.39999999999999, 95.52000000000001, 95.28, 94.89999999999999]
69 | Mean Accuracy: 95.310%
70 | ```
71 |
72 | ## Possible Extensions:
73 | - **Early stopping**.
74 | - Experiment with different **weight initialization techniques** (such as small random numbers).
75 | - **Batch Gradient Descent**. Change the training procedure from online to batch gradient descent
76 | and update the weights only at the end of each epoch.
77 | - **Mini-Batch Gradient Descent**. More info [here](http://cs231n.github.io/optimization-1/#gd).
78 | - **Momentum**. More info [here](http://cs231n.github.io/neural-networks-3/#update).
79 | - **Annealing the learning rate**. More info [here](http://cs231n.github.io/neural-networks-3/#anneal).
80 | - **Dropout Regularization**, **Batch Normalization**. More info [here](http://cs231n.github.io/neural-networks-2/).
81 | - **Model Ensembles**. More info [here](http://cs231n.github.io/neural-networks-3/).
82 |
83 | ## References:
84 | - [1] How to Implement Backpropagation Algorithm from scratch in Python [here](https://machinelearningmastery.com/implement-backpropagation-algorithm-scratch-python/).
85 | - [2] Implementing Multiple Layer Neural Network from Scratch [here](https://github.com/pangolulu/neural-network-from-scratch).
86 | - [3] Andrew Ng Lecture on *Gradient Descent* [here](http://cs229.stanford.edu/notes/cs229-notes1.pdf).
87 | - [4] Andrew Ng Lecture on *Backpropagation Algorithm* [here](http://cs229.stanford.edu/notes/cs229-notes-backprop.pdf).
88 | - [5] (P. Cortez, A. Cerdeira, F. Almeida, T. Matos and J. Reis.
89 | Modeling wine preferences by data mining from physicochemical properties. In Decision Support Systems, Elsevier, 47(4):547-553, 2009.) [here](https://archive.ics.uci.edu/ml/datasets/wine+quality)
90 | - [6] seeds Data Set [here](http://archive.ics.uci.edu/ml/datasets/seeds)
91 |
92 |
93 |
--------------------------------------------------------------------------------
/src/DataPreparation.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 09 gen 2018
3 |
4 | @author: Umberto Griffo
5 | '''
6 |
7 | from csv import reader
8 |
9 | class DataPreparation:
10 |
11 | def load_csv(self, filename):
12 | '''
13 | Load a CSV file
14 | '''
15 | dataset = list()
16 | with open(filename, 'r') as file:
17 | csv_reader = reader(file)
18 | for row in csv_reader:
19 | if not row:
20 | continue
21 | dataset.append(row)
22 | return dataset
23 |
24 | def str_column_to_float(self, dataset, column):
25 | '''
26 | Convert string column to float
27 | '''
28 | for row in dataset:
29 | row[column] = float(row[column].strip())
30 |
31 | def str_column_to_int(self, dataset, column):
32 | '''
33 | Convert string column to integer
34 | '''
35 | class_values = [row[column] for row in dataset]
36 | unique = set(class_values)
37 | lookup = dict()
38 | for i, value in enumerate(unique):
39 | lookup[value] = i
40 | for row in dataset:
41 | row[column] = lookup[row[column]]
42 | return lookup
43 |
44 | def dataset_minmax(self, dataset):
45 | '''
46 | Find the min and max values for each column
47 | '''
48 | stats = [[min(column), max(column)] for column in zip(*dataset)]
49 | self.stats = stats
50 | return stats
51 |
52 | def normalize_dataset_classification(self, dataset, minmax):
53 | '''
54 | Rescale dataset columns to the range 0-1
55 | '''
56 | for row in dataset:
57 | for i in range(len(row) - 1):
58 | row[i] = (row[i] - minmax[i][0]) / (minmax[i][1] - minmax[i][0])
59 |
60 | def denormalize_dataset_classification(self, dataset, minmax):
61 | '''
62 | Rescale dataset columns to the original range
63 | '''
64 | for row in dataset:
65 | for i in range(len(row) - 1):
66 | row[i] = (row[i] - minmax[i][0]) / (minmax[i][1] - minmax[i][0])
67 |
68 | def normalize_dataset_regression(self, dataset, minmax):
69 | '''
70 | Rescale dataset columns to the range 0-1
71 | '''
72 | for row in dataset:
73 | for i in range(len(row)):
74 | row[i] = (row[i] - minmax[i][0]) / (minmax[i][1] - minmax[i][0])
75 |
76 | def denormalize_dataset_regression(self, dataset, minmax):
77 | '''
78 | Rescale dataset columns to the original range
79 | '''
80 | for row in dataset:
81 | for i in range(len(row)):
82 | row[i] = (row[i] * (minmax[i][1] - minmax[i][0])) + minmax[i][0]
--------------------------------------------------------------------------------
/src/Test.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 09 gen 2018
3 |
4 | @author: Umberto
5 | '''
6 |
7 | from random import seed
8 |
9 | from ml.MultilayerNnRegressor import MultilayerNnRegressor
10 | from ml.MultilayerNnClassifier import MultilayerNnClassifier
11 | from ml.activation.Sigmoid import Sigmoid
12 | from DataPreparation import DataPreparation
13 | from evaluation.ClassificationEvaluator import ClassificationEvaluator
14 | from evaluation.RegressionEvaluator import RegressionEvaluator
15 | from evaluation.Splitting import Splitting
16 |
17 | def classificationSeed():
18 | '''
19 | Test Classification on Seeds dataset
20 | '''
21 |
22 | seed(1)
23 |
24 | n_folds = 5
25 | l_rate = 0.3
26 | n_epoch = 1000
27 | n_hidden = [10]
28 |
29 | mlp = MultilayerNnClassifier();
30 | activationFunction = Sigmoid()
31 | dp = DataPreparation();
32 | evaluator = ClassificationEvaluator();
33 | splitting = Splitting();
34 |
35 | # load and prepare data
36 | filename = '../Datasets/seeds_dataset.csv'
37 | dataset = dp.load_csv(filename)
38 | for i in range(len(dataset[0]) - 1):
39 | dp.str_column_to_float(dataset, i)
40 | # convert class column to integers
41 | dp.str_column_to_int(dataset, len(dataset[0]) - 1)
42 | # normalize input variables
43 | minmax = dp.dataset_minmax(dataset)
44 | dp.normalize_dataset_classification(dataset, minmax)
45 | # evaluate algorithm
46 | scores = evaluator.evaluate_algorithm(dataset, splitting, mlp.back_propagation, n_folds, l_rate, n_epoch, n_hidden, activationFunction)
47 | print_classification_scores(scores)
48 |
49 | def classificationWineRed():
50 | '''
51 | Test Classification on WineRed dataset
52 | '''
53 |
54 | seed(1)
55 |
56 | n_folds = 5
57 | l_rate = 0.3
58 | n_epoch = 1000
59 | n_hidden = [10]
60 |
61 | mlp = MultilayerNnClassifier();
62 | activationFunction = Sigmoid()
63 | dp = DataPreparation();
64 | evaluator = ClassificationEvaluator();
65 | splitting = Splitting();
66 |
67 | # Test Backprop on Seeds dataset
68 | # load and prepare data
69 | filename = '../Datasets/winequality-red.csv'
70 | dataset = dp.load_csv(filename)
71 | for i in range(len(dataset[0]) - 1):
72 | dp.str_column_to_float(dataset, i)
73 | # convert class column to integers
74 | dp.str_column_to_int(dataset, len(dataset[0]) - 1)
75 | # normalize input variables
76 | minmax = dp.dataset_minmax(dataset)
77 | dp.normalize_dataset_classification(dataset, minmax)
78 | # evaluate algorithm
79 | scores = evaluator.evaluate_algorithm(dataset, splitting, mlp.back_propagation, n_folds, l_rate, n_epoch, n_hidden, activationFunction)
80 | print_classification_scores(scores)
81 |
82 | def classificationPokemon():
83 | '''
84 | Test Classification on Pokemon dataset
85 | id_combat pk1_ID pk1_Name pk1_Type1 pk1_Type2 pk1_HP pk1_Attack pk1_Defense pk1_SpAtk
86 | pk1_SpDef pk1_Speed pk1_Generation pk1_Legendary pk1_Grass pk1_Fire pk1_Water pk1_Bug
87 | pk1_Normal pk1_Poison pk1_Electric pk1_Ground pk1_Fairy pk1_Fighting pk1_Psychic pk1_Rock
88 | pk1_Ghost pk1_Ice pk1_Dragon pk1_Dark pk1_Steel pk1_Flying ID pk2_Name pk2_Type1 pk2_Type2
89 | pk2_HP pk2_Attack pk2_Defense pk2_SpAtk pk2_SpDef pk2_Speed pk2_Generation pk2_Legendary
90 | pk2_Grass pk2_Fire pk2_Water pk2_Bug pk2_Normal pk2_Poison pk2_Electric pk2_Ground pk2_Fairy
91 | pk2_Fighting pk2_Psychic pk2_Rock pk2_Ghost pk2_Ice pk2_Dragon pk2_Dark pk2_Steel pk2_Flying winner
92 | '''
93 |
94 | seed(1)
95 |
96 | n_folds = 5
97 | l_rate = 0.1
98 | n_epoch = 500
99 | n_hidden = [5]
100 |
101 | mlp = MultilayerNnClassifier();
102 | activationFunction = Sigmoid()
103 | dp = DataPreparation();
104 | evaluator = ClassificationEvaluator();
105 | splitting = Splitting();
106 |
107 | # load and prepare data
108 | filename = '../Datasets/pkmn.csv'
109 | dataset = dp.load_csv(filename)
110 | for i in range(len(dataset[0]) - 1):
111 | dp.str_column_to_float(dataset, i)
112 | # convert class column to integers
113 | dp.str_column_to_int(dataset, len(dataset[0]) - 1)
114 | # normalize input variables
115 | minmax = dp.dataset_minmax(dataset)
116 | dp.normalize_dataset_classification(dataset, minmax)
117 | # evaluate algorithm
118 | scores = evaluator.evaluate_algorithm(dataset, splitting, mlp.back_propagation, n_folds, l_rate, n_epoch, n_hidden, activationFunction)
119 | print_classification_scores(scores)
120 |
121 | def regressionWineRed():
122 | '''
123 | Test Regression on WineRed dataset
124 | '''
125 |
126 | seed(1)
127 |
128 | n_folds = 5
129 | l_rate = 0.3
130 | n_epoch = 1000
131 | n_hidden = [20,10]
132 |
133 | mlp = MultilayerNnRegressor();
134 | activationFunction = Sigmoid()
135 | dp = DataPreparation();
136 | evaluator = RegressionEvaluator();
137 | splitting = Splitting();
138 |
139 | # load and prepare data
140 | filename = '../Datasets/winequality-red.csv'
141 | dataset = dp.load_csv(filename)
142 | for i in range(len(dataset[0])):
143 | dp.str_column_to_float(dataset, i)
144 | # normalize input variables including the target
145 | minmax = dp.dataset_minmax(dataset)
146 | target_minmax = minmax[-1]
147 | dp.normalize_dataset_regression(dataset, minmax)
148 | # evaluate algorithm
149 | scores = evaluator.evaluate_algorithm(dataset, splitting, mlp.back_propagation, n_folds, target_minmax, l_rate, n_epoch, n_hidden, activationFunction, target_minmax)
150 | print_regression_scores(scores)
151 |
152 | def regressionWineWhite():
153 | '''
154 | Test Classification on WineWhite dataset
155 | '''
156 |
157 | seed(1)
158 |
159 | n_folds = 5
160 | l_rate = 0.3
161 | n_epoch = 1000
162 | n_hidden = [10,5]
163 |
164 | mlp = MultilayerNnRegressor();
165 | activationFunction = Sigmoid()
166 | dp = DataPreparation();
167 | evaluator = RegressionEvaluator();
168 | splitting = Splitting();
169 |
170 | # load and prepare data
171 | filename = '../Datasets/winequality-white.csv'
172 | dataset = dp.load_csv(filename)
173 | for i in range(len(dataset[0])):
174 | dp.str_column_to_float(dataset, i)
175 | # normalize input variables including the target
176 | minmax = dp.dataset_minmax(dataset)
177 | target_minmax = minmax[-1]
178 | dp.normalize_dataset_regression(dataset, minmax)
179 | # evaluate algorithm
180 | scores = evaluator.evaluate_algorithm(dataset, splitting , mlp.back_propagation, n_folds, target_minmax, l_rate, n_epoch, n_hidden, activationFunction, target_minmax)
181 | print_regression_scores(scores)
182 |
183 | def print_regression_scores(scores):
184 | print('Scores: %s' % scores)
185 | sum_mse = 0
186 | sum_rmse = 0
187 | sum_r2 = 0
188 | for score in scores:
189 | sum_mse += score[0]
190 | sum_rmse += score[1]
191 | sum_r2 += score[2]
192 | print('Mean MSE: %.3f' % (sum_mse / float(len(scores))))
193 | print('Mean RMSE: %.3f' % (sum_rmse / float(len(scores))))
194 | print('Mean R^2: %.3f' % (sum_r2 / float(len(scores))))
195 |
196 | def print_classification_scores(scores):
197 | print('Scores: %s' % scores)
198 | print('Mean Accuracy: %.3f%%' % (sum(scores) / float(len(scores))))
199 |
200 | if __name__ == '__main__':
201 |
202 | options = {
203 | 0 : classificationSeed,
204 | 1 : classificationWineRed,
205 | 2 : classificationPokemon,
206 | 3 : regressionWineRed,
207 | 4 : regressionWineWhite
208 | }
209 |
210 | var = input("Please enter one of following numbers: \n 0 - Classification on Seed Dataset\n 1 - Classification on Wine Red Dataset\n 2 - Classification on Pokemon Dataset\n 3 - Regression on Wine White Dataset\n 4 - Regression on Wine Red Dataset \n")
211 | print("You entered " + str(var))
212 | if int(var) >= len(options) or int(var) < 0:
213 | raise Exception('You have entered an invalid number: ' +str(len(options)))
214 | options[int(var)]()
215 |
--------------------------------------------------------------------------------
/src/__pycache__/ClassificationEvaluator.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umbertogriffo/Minimalistic-Multiple-Layer-Neural-Network-from-Scratch-in-Python/9743c77b17f3c2920eb6c891386986693ad94c26/src/__pycache__/ClassificationEvaluator.cpython-36.pyc
--------------------------------------------------------------------------------
/src/__pycache__/DataPreparation.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umbertogriffo/Minimalistic-Multiple-Layer-Neural-Network-from-Scratch-in-Python/9743c77b17f3c2920eb6c891386986693ad94c26/src/__pycache__/DataPreparation.cpython-36.pyc
--------------------------------------------------------------------------------
/src/__pycache__/MultilayerFeedForwardClassifier.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umbertogriffo/Minimalistic-Multiple-Layer-Neural-Network-from-Scratch-in-Python/9743c77b17f3c2920eb6c891386986693ad94c26/src/__pycache__/MultilayerFeedForwardClassifier.cpython-36.pyc
--------------------------------------------------------------------------------
/src/__pycache__/MultilayerFeedForwardRegressor.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umbertogriffo/Minimalistic-Multiple-Layer-Neural-Network-from-Scratch-in-Python/9743c77b17f3c2920eb6c891386986693ad94c26/src/__pycache__/MultilayerFeedForwardRegressor.cpython-36.pyc
--------------------------------------------------------------------------------
/src/__pycache__/MultilayerNnClassifier.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umbertogriffo/Minimalistic-Multiple-Layer-Neural-Network-from-Scratch-in-Python/9743c77b17f3c2920eb6c891386986693ad94c26/src/__pycache__/MultilayerNnClassifier.cpython-36.pyc
--------------------------------------------------------------------------------
/src/__pycache__/MultilayerNnRegressor.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umbertogriffo/Minimalistic-Multiple-Layer-Neural-Network-from-Scratch-in-Python/9743c77b17f3c2920eb6c891386986693ad94c26/src/__pycache__/MultilayerNnRegressor.cpython-36.pyc
--------------------------------------------------------------------------------
/src/__pycache__/RegressionEvaluator.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umbertogriffo/Minimalistic-Multiple-Layer-Neural-Network-from-Scratch-in-Python/9743c77b17f3c2920eb6c891386986693ad94c26/src/__pycache__/RegressionEvaluator.cpython-36.pyc
--------------------------------------------------------------------------------
/src/evaluation/ClassificationEvaluator.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 09 gen 2018
3 |
4 | @author: Umberto
5 |
6 | '''
7 | class ClassificationEvaluator:
8 |
9 | def accuracy_metric(self, actual, predicted):
10 | '''
11 | Calculate accuracy percentage
12 | '''
13 | correct = 0
14 | for i in range(len(actual)):
15 | if actual[i] == predicted[i]:
16 | correct += 1
17 | return correct / float(len(actual)) * 100.0
18 |
19 | def evaluate_algorithm(self, dataset, splitting, algorithm, n_folds, *args):
20 | '''
21 | Evaluate an algorithm using a cross validation split
22 | '''
23 | folds = splitting.cross_validation_split(dataset, n_folds)
24 | scores = list()
25 | for fold in folds:
26 | train_set = list(folds)
27 | train_set.remove(fold)
28 | train_set = sum(train_set, [])
29 | test_set = list()
30 | for row in fold:
31 | row_copy = list(row)
32 | test_set.append(row_copy)
33 | row_copy[-1] = None
34 | print('>train size=%d' % (len(train_set.size)))
35 | print('>test size=%d' % (len(test_set)))
36 | predicted = algorithm(train_set, test_set, *args)
37 | actual = [row[-1] for row in fold]
38 | accuracy = self.accuracy_metric(actual, predicted)
39 | print('>accuracy=%.3f' % (accuracy))
40 | scores.append(accuracy)
41 | return scores
42 |
--------------------------------------------------------------------------------
/src/evaluation/RegressionEvaluator.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 09 gen 2018
3 |
4 | @author: Umberto
5 |
6 | '''
7 | from math import sqrt
8 |
9 | class RegressionEvaluator:
10 |
11 | def mse_metric(self, actual, predicted):
12 | '''
13 | Calculate mse
14 | '''
15 | s = 0
16 | for i in range(len(actual)):
17 | a = actual[i]
18 | p = predicted[i]
19 | s += (a - p) * (a - p)
20 | return s / float(len(actual))
21 |
22 | def rmse_metric(self, actual, predicted):
23 | '''
24 | Calculate rmse
25 | '''
26 | s = 0
27 | for i in range(len(actual)):
28 | a = actual[i]
29 | p = predicted[i]
30 | s += (a - p) * (a - p)
31 | return sqrt(s / float(len(actual)))
32 |
33 | def r2_metric(self, actual, predicted):
34 | '''
35 | Calculate r2
36 | '''
37 | s = 0
38 | sumMean = 0
39 | mean = sum(actual) / float(len(actual))
40 | for i in range(len(actual)):
41 | a = actual[i]
42 | p = predicted[i]
43 | s += (a - p) * (a - p)
44 | sumMean += (a - mean) * (a - mean)
45 | return 1 - (s / sumMean)
46 |
47 | def evaluate_algorithm(self, dataset,splitting, algorithm, n_folds, minmax, *args):
48 | '''
49 | Evaluate an algorithm using a cross validation split
50 | '''
51 | folds = splitting.cross_validation_split(dataset, n_folds)
52 | scores = list()
53 | for fold in folds:
54 | train_set = list(folds)
55 | train_set.remove(fold)
56 | train_set = sum(train_set, [])
57 | test_set = list()
58 | for row in fold:
59 | row_copy = list(row)
60 | test_set.append(row_copy)
61 | row_copy[-1] = None
62 | print('>train size=%d' % (len(train_set.size)))
63 | print('>test size=%d' % (len(test_set)))
64 | predicted = algorithm(train_set, test_set, *args)
65 | # Rescale the predictions
66 | self.denormalize_target(predicted, minmax)
67 |
68 | actual = [row[-1] for row in fold]
69 | # Rescale the actuals
70 | self.denormalize_target(actual, minmax)
71 |
72 | mse = self.mse_metric(actual, predicted)
73 | rmse = self.rmse_metric(actual, predicted)
74 | r2 = self.r2_metric(actual, predicted)
75 |
76 | print('>mse=%.3f, rmse=%.3f, r2=%.3f' % (mse, rmse, r2))
77 | scores.append((mse, rmse, r2))
78 | return scores
79 |
80 | def denormalize_target(self, target, target_minmax):
81 | '''
82 | Rescale predicted Value to the original range
83 | '''
84 | for i in range(len(target)):
85 | target[i] = (target[i] * (target_minmax[1] - target_minmax[0])) + target_minmax[0]
86 |
--------------------------------------------------------------------------------
/src/evaluation/Splitting.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 09 gen 2018
3 |
4 | @author: Umberto Griffo
5 |
6 | '''
7 |
8 | from random import randrange
9 |
10 | class Splitting:
11 |
12 | def cross_validation_split(self, dataset, n_folds):
13 | '''
14 | Split a dataset into k folds
15 | '''
16 | dataset_split = list()
17 | dataset_copy = list(dataset)
18 | fold_size = int(len(dataset) / n_folds)
19 | for i in range(n_folds):
20 | fold = list()
21 | while len(fold) < fold_size:
22 | index = randrange(len(dataset_copy))
23 | fold.append(dataset_copy.pop(index))
24 | dataset_split.append(fold)
25 | return dataset_split
--------------------------------------------------------------------------------
/src/evaluation/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umbertogriffo/Minimalistic-Multiple-Layer-Neural-Network-from-Scratch-in-Python/9743c77b17f3c2920eb6c891386986693ad94c26/src/evaluation/__init__.py
--------------------------------------------------------------------------------
/src/evaluation/__pycache__/ClassificationEvaluator.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umbertogriffo/Minimalistic-Multiple-Layer-Neural-Network-from-Scratch-in-Python/9743c77b17f3c2920eb6c891386986693ad94c26/src/evaluation/__pycache__/ClassificationEvaluator.cpython-36.pyc
--------------------------------------------------------------------------------
/src/evaluation/__pycache__/Splitting.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umbertogriffo/Minimalistic-Multiple-Layer-Neural-Network-from-Scratch-in-Python/9743c77b17f3c2920eb6c891386986693ad94c26/src/evaluation/__pycache__/Splitting.cpython-36.pyc
--------------------------------------------------------------------------------
/src/evaluation/__pycache__/__init__.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umbertogriffo/Minimalistic-Multiple-Layer-Neural-Network-from-Scratch-in-Python/9743c77b17f3c2920eb6c891386986693ad94c26/src/evaluation/__pycache__/__init__.cpython-36.pyc
--------------------------------------------------------------------------------
/src/ml/MultilayerNnClassifier.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 09 gen 2018
3 |
4 | @author: Umberto Griffo
5 |
6 | '''
7 |
8 | from random import random
9 | from random import seed
10 | from ml.activation.Tanh import Tanh
11 |
12 | class MultilayerNnClassifier:
13 |
14 | def initialize_network_old(self, n_inputs, n_hidden, n_outputs):
15 | '''
16 | Initialize a new neural network ready for training.
17 | It accepts three parameters, the number of inputs, the number of neurons
18 | to have in the hidden layer and the number of outputs.
19 | '''
20 | network = list()
21 | # hidden layer has 'n_hidden' neuron with 'n_inputs' input weights plus the bias
22 | hidden_layer = [{'weights':[random() for i in range(n_inputs + 1)]} for i in range(n_hidden)]
23 | network.append(hidden_layer)
24 | output_layer = [{'weights':[random() for i in range(n_hidden + 1)]} for i in range(n_outputs)]
25 | network.append(output_layer)
26 | return network
27 |
28 | def initialize_network(self, n_inputs, n_hidden, n_outputs):
29 | '''
30 | Initialize a new neural network ready for training.
31 | It accepts three parameters, the number of inputs, the hidden layers and the number of outputs.
32 | '''
33 | network = list()
34 | h = 0
35 | for hidden in n_hidden:
36 | if(h==0):
37 | # hidden layer has 'hidden' neuron with 'n_inputs' input weights plus the bias
38 | hidden_layer = [{'weights':[random() for i in range(n_inputs + 1)]} for i in range(hidden)]
39 | else:
40 | # hidden layer has 'hidden' neuron with 'hidden - 1' weights plus the bias
41 | hidden_layer = [{'weights':[random() for i in range(n_hidden[h-1] + 1)]} for i in range(hidden)]
42 | network.append(hidden_layer)
43 | h += 1
44 | # output layer has 'n_outputs' neuron with 'last hidden' weights plus the bias
45 | output_layer = [{'weights':[random() for i in range(n_hidden[-1] + 1)]} for i in range(n_outputs)]
46 | network.append(output_layer)
47 | return network
48 |
49 | def activate(self, weights, inputs):
50 | '''
51 | Calculate neuron activation for an input is the First step of forward propagation
52 | activation = sum(weight_i * input_i) + bias.
53 | '''
54 | activation = weights[-1] # Bias
55 | for i in range(len(weights) - 1):
56 | activation += weights[i] * inputs[i]
57 | return activation
58 |
59 | def forward_propagate(self, network, activation_function, row):
60 | '''
61 | Forward propagate input to a network output.
62 | The function returns the outputs from the last layer also called the output layer.
63 | '''
64 | inputs = row
65 | for layer in network:
66 | new_inputs = []
67 | for neuron in layer:
68 | activation = self.activate(neuron['weights'], inputs)
69 | neuron['output'] = activation_function.transfer(activation)
70 | new_inputs.append(neuron['output'])
71 | inputs = new_inputs
72 | return inputs
73 |
74 | def backward_propagate_error(self, network, activation_function, expected):
75 | '''
76 | Backpropagate error and store in neurons.
77 |
78 | The error for a given neuron can be calculated as follows:
79 |
80 | error = (expected - output) * transfer_derivative(output)
81 |
82 | Where expected is the expected output value for the neuron,
83 | output is the output value for the neuron and transfer_derivative()
84 | calculates the slope of the neuron's output value.
85 |
86 | The error signal for a neuron in the hidden layer is calculated as:
87 |
88 | error = (weight_k * error_j) * transfer_derivative(output)
89 |
90 | Where error_j is the error signal from the jth neuron in the output layer,
91 | weight_k is the weight that connects the kth neuron to the current neuron
92 | and output is the output for the current neuron.
93 | '''
94 | for i in reversed(range(len(network))):
95 | layer = network[i]
96 | errors = list()
97 | if i != len(network) - 1:
98 | for j in range(len(layer)):
99 | error = 0.0
100 | for neuron in network[i + 1]:
101 | error += (neuron['weights'][j] * neuron['delta'])
102 | errors.append(error)
103 | else:
104 | for j in range(len(layer)):
105 | neuron = layer[j]
106 | errors.append(expected[j] - neuron['output'])
107 | for j in range(len(layer)):
108 | neuron = layer[j]
109 | neuron['delta'] = errors[j] * activation_function.transfer_derivative(neuron['output'])
110 |
111 | def update_weights(self, network, row, l_rate):
112 | '''
113 | Updates the weights for a network given an input row of data, a learning rate
114 | and assume that a forward and backward propagation have already been performed.
115 |
116 | weight = weight + learning_rate * error * input
117 |
118 | Where weight is a given weight, learning_rate is a parameter that you must specify,
119 | error is the error calculated by the back-propagation procedure for the neuron and
120 | input is the input value that caused the error.
121 | '''
122 | for i in range(len(network)):
123 | inputs = row[:-1]
124 | if i != 0:
125 | inputs = [neuron['output'] for neuron in network[i - 1]]
126 | for neuron in network[i]:
127 | for j in range(len(inputs)):
128 | neuron['weights'][j] += l_rate * neuron['delta'] * inputs[j]
129 | neuron['weights'][-1] += l_rate * neuron['delta']
130 |
131 | def train_network(self, network, activation_function, train, l_rate, n_epoch, n_outputs):
132 | '''
133 | Train a network for a fixed number of epochs.
134 | The network is updated using stochastic gradient descent.
135 | '''
136 | for epoch in range(n_epoch + 1):
137 | sum_error = 0
138 | for row in train:
139 | # Calculate Loss
140 | outputs = self.forward_propagate(network, activation_function, row)
141 | expected = [0 for i in range(n_outputs)]
142 | expected[row[-1]] = 1 # Bias
143 | sum_error += sum([(expected[i] - outputs[i]) ** 2 for i in range(len(expected))])
144 | self.backward_propagate_error(network, activation_function, expected)
145 | self.update_weights(network, row, l_rate)
146 | if (epoch % 100 == 0):
147 | print('>epoch=%d, lrate=%.3f, error=%.3f' % (epoch, l_rate, sum_error/float(len(train))))
148 |
149 | def predict(self, network, activationFunction, row):
150 | '''
151 | Make a prediction with a network.
152 | We can use the output values themselves directly as the probability of a pattern belonging to each output class.
153 | It may be more useful to turn this output back into a crisp class prediction.
154 | We can do this by selecting the class value with the larger probability.
155 | This is also called the arg max function.
156 | '''
157 | outputs = self.forward_propagate(network, activationFunction, row)
158 | return outputs.index(max(outputs))
159 |
160 | def back_propagation(self, train, test, l_rate, n_epoch, n_hidden, activationFunction):
161 | '''
162 | Backpropagation Algorithm With Stochastic Gradient Descent
163 | '''
164 | n_inputs = len(train[0]) - 1
165 | n_outputs = len(set([row[-1] for row in train]))
166 | network = self.initialize_network(n_inputs, n_hidden, n_outputs)
167 | self.train_network(network, activationFunction, train, l_rate, n_epoch, n_outputs)
168 | predictions = list()
169 | for row in test:
170 | prediction = self.predict(network, activationFunction, row)
171 | predictions.append(prediction)
172 | return(predictions)
173 |
174 | if __name__ == '__main__':
175 |
176 | seed(1)
177 | mlp = MultilayerNnClassifier()
178 | activationFunction = Tanh()
179 | network = mlp.initialize_network(2, [10,5], 2)
180 | for layer in network:
181 | print(layer)
182 |
183 | # Test forward_propagate
184 | print("Test Forward")
185 | row = [1, 0, None]
186 | output = mlp.forward_propagate(network, activationFunction, row)
187 | print(output)
188 |
189 | # Test backward_propagate_error
190 | print("Test backpropagation of error")
191 | network = [[{'output': 0.7105668883115941, 'weights': [0.13436424411240122, 0.8474337369372327, 0.763774618976614]}],
192 | [{'output': 0.6213859615555266, 'weights': [0.2550690257394217, 0.49543508709194095]}, {'output': 0.6573693455986976, 'weights': [0.4494910647887381, 0.651592972722763]}]]
193 | expected = [0, 1]
194 | mlp.backward_propagate_error(network, activationFunction, expected)
195 | for layer in network:
196 | print(layer)
197 |
198 | # Test training backprop algorithm
199 | print("Test training backprop algorithm")
200 | seed(1)
201 | dataset = [[2.7810836, 2.550537003, 0],
202 | [1.465489372, 2.362125076, 0],
203 | [3.396561688, 4.400293529, 0],
204 | [1.38807019, 1.850220317, 0],
205 | [3.06407232, 3.005305973, 0],
206 | [7.627531214, 2.759262235, 1],
207 | [5.332441248, 2.088626775, 1],
208 | [6.922596716, 1.77106367, 1],
209 | [8.675418651, -0.242068655, 1],
210 | [7.673756466, 3.508563011, 1]]
211 | n_inputs = len(dataset[0]) - 1
212 | n_outputs = len(set([row[-1] for row in dataset]))
213 | network = mlp.initialize_network(n_inputs, [2], n_outputs)
214 | mlp.train_network(network, activationFunction, dataset, 0.5, 20, n_outputs)
215 | for layer in network:
216 | print(layer)
217 | for row in dataset:
218 | prediction = mlp.predict(network, activationFunction, row)
219 | print('Expected=%d, Got=%d' % (row[-1], prediction))
220 |
--------------------------------------------------------------------------------
/src/ml/MultilayerNnRegressor.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 09 gen 2018
3 |
4 | @author: Umberto Griffo
5 |
6 | '''
7 |
8 | from random import random
9 | from math import sqrt
10 | from ml.activation.Linear import Linear
11 |
12 | class MultilayerNnRegressor:
13 |
14 | def initialize_network_old(self, n_inputs, n_hidden, n_outputs):
15 | '''
16 | Initialize a new neural network ready for training.
17 | It accepts three parameters, the number of inputs, the number of neurons
18 | to have in the hidden layer and the number of outputs.
19 | '''
20 | network = list()
21 | # hidden layer has 'n_hidden' neuron with 'n_inputs' input weights plus the bias
22 | hidden_layer = [{'weights':[random() for i in range(n_inputs + 1)]} for i in range(n_hidden)]
23 | network.append(hidden_layer)
24 | output_layer = [{'weights':[random() for i in range(n_hidden + 1)]} for i in range(n_outputs)]
25 | network.append(output_layer)
26 | return network
27 |
28 | def initialize_network(self, n_inputs, n_hidden, n_outputs):
29 | '''
30 | Initialize a new neural network ready for training.
31 | It accepts three parameters, the number of inputs, the hidden layers and the number of outputs.
32 | '''
33 | network = list()
34 | h = 0
35 | for hidden in n_hidden:
36 | if(h==0):
37 | # hidden layer has 'hidden' neuron with 'n_inputs' input weights plus the bias
38 | hidden_layer = [{'weights':[random() for i in range(n_inputs + 1)]} for i in range(hidden)]
39 | else:
40 | # hidden layer has 'hidden' neuron with 'hidden - 1' weights plus the bias
41 | hidden_layer = [{'weights':[random() for i in range(n_hidden[h-1] + 1)]} for i in range(hidden)]
42 | network.append(hidden_layer)
43 | h += 1
44 | # output layer has 'n_outputs' neuron with 'last hidden' weights plus the bias
45 | output_layer = [{'weights':[random() for i in range(n_hidden[-1] + 1)]} for i in range(n_outputs)]
46 | network.append(output_layer)
47 | return network
48 |
49 | def activate(self, weights, inputs):
50 | '''
51 | Calculate neuron activation for an input is the First step of forward propagation
52 | activation = sum(weight_i * input_i) + bias.
53 | '''
54 | activation = weights[-1] # Bias
55 | for i in range(len(weights) - 1):
56 | activation += weights[i] * inputs[i]
57 | return activation
58 |
59 | def forward_propagate(self, network, activation_function, row):
60 | '''
61 | Forward propagate input to a network output.
62 | The function returns the outputs from the last layer also called the output layer.
63 | '''
64 | inputs = row
65 | i = 1
66 | for layer in network:
67 | new_inputs = []
68 | for neuron in layer:
69 | activation = self.activate(neuron['weights'], inputs)
70 | if(i!=len(layer)):
71 | neuron['output'] = activation_function.transfer(activation)
72 | else:
73 | neuron['output'] = Linear().transfer(activation)
74 | new_inputs.append(neuron['output'])
75 | inputs = new_inputs
76 | i += 1
77 | return inputs[0]
78 |
79 | def backward_propagate_error(self, network, activation_function, expected):
80 | '''
81 | Backpropagate error and store in neurons.
82 |
83 | The error for a given neuron can be calculated as follows:
84 |
85 | error = (expected - output) * transfer_derivative(output)
86 |
87 | Where expected is the expected output value for the neuron,
88 | output is the output value for the neuron and transfer_derivative()
89 | calculates the slope of the neuron's output value.
90 |
91 | The error signal for a neuron in the hidden layer is calculated as:
92 |
93 | error = (weight_k * error_j) * transfer_derivative(output)
94 |
95 | Where error_j is the error signal from the jth neuron in the output layer,
96 | weight_k is the weight that connects the kth neuron to the current neuron
97 | and output is the output for the current neuron.
98 | '''
99 | for i in reversed(range(len(network))):
100 | layer = network[i]
101 | errors = list()
102 | if i != len(network) - 1:
103 | for j in range(len(layer)):
104 | error = 0.0
105 | for neuron in network[i + 1]:
106 | error += (neuron['weights'][j] * neuron['delta'])
107 | errors.append(error)
108 | else:
109 | for j in range(len(layer)):
110 | neuron = layer[j]
111 | errors.append(expected - neuron['output'])
112 | for j in range(len(layer)):
113 | neuron = layer[j]
114 | neuron['delta'] = errors[j] * activation_function.transfer_derivative(neuron['output'])
115 |
116 | def update_weights(self, network, row, l_rate):
117 | '''
118 | Updates the weights for a network given an input row of data, a learning rate
119 | and assume that a forward and backward propagation have already been performed.
120 |
121 | weight = weight + learning_rate * error * input
122 |
123 | Where weight is a given weight, learning_rate is a parameter that you must specify,
124 | error is the error calculated by the back-propagation procedure for the neuron and
125 | input is the input value that caused the error.
126 | '''
127 | for i in range(len(network)):
128 | inputs = row[:-1]
129 | if i != 0:
130 | inputs = [neuron['output'] for neuron in network[i - 1]]
131 | for neuron in network[i]:
132 | for j in range(len(inputs)):
133 | neuron['weights'][j] += l_rate * neuron['delta'] * inputs[j]
134 | neuron['weights'][-1] += l_rate * neuron['delta']
135 |
136 | def train_network(self, network, activation_function, train, l_rate, n_epoch, n_outputs, target_minmax):
137 | '''
138 | Train a network for a fixed number of epochs.
139 | The network is updated using stochastic gradient descent.
140 | '''
141 | for epoch in range(n_epoch + 1):
142 | sum_error = 0
143 | for row in train:
144 | # Forward Propagate return a single value
145 | output = self.forward_propagate(network, activation_function, row)
146 | # Calculate loss
147 | output_rescaled = self.rescale_target(output, target_minmax)
148 | expected = row[len(row)-1]
149 | expected_rescaled = self.rescale_target(expected, target_minmax)
150 | sum_error += (expected_rescaled - output_rescaled) * (expected_rescaled - output_rescaled)
151 | # Back Propagate Error
152 | self.backward_propagate_error(network, activation_function, expected)
153 | self.update_weights(network, row, l_rate)
154 | if (epoch % 100 == 0):
155 | print('>epoch=%d, lrate=%.3f, error(MSE)=%.3f, error(RMSE)=%.3f' % (epoch, l_rate, sum_error/float(len(train)),sqrt(sum_error/float(len(train)))))
156 |
157 | def predict(self, network, activationFunction, row):
158 | '''
159 | Make a prediction with a network.
160 | We can use the output values themselves directly as the value of the scaled output.
161 | '''
162 | outputs = self.forward_propagate(network, activationFunction, row)
163 | return outputs
164 |
165 | def back_propagation(self, train, test, l_rate, n_epoch, n_hidden, activationFunction, target_minmax):
166 | '''
167 | Backpropagation Algorithm With Stochastic Gradient Descent
168 | '''
169 | n_inputs = len(train[0]) - 1
170 | network = self.initialize_network(n_inputs, n_hidden, 1)
171 | self.train_network(network, activationFunction, train, l_rate, n_epoch, 1, target_minmax)
172 | predictions = list()
173 | for row in test:
174 | prediction = self.predict(network, activationFunction, row)
175 | predictions.append(prediction)
176 | return(predictions)
177 |
178 | def rescale_target(self, target_value, target_minmax):
179 | '''
180 | Rescale target to the original range
181 | '''
182 | return (target_value * (target_minmax[1] - target_minmax[0])) + target_minmax[0]
183 |
--------------------------------------------------------------------------------
/src/ml/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umbertogriffo/Minimalistic-Multiple-Layer-Neural-Network-from-Scratch-in-Python/9743c77b17f3c2920eb6c891386986693ad94c26/src/ml/__init__.py
--------------------------------------------------------------------------------
/src/ml/__pycache__/__init__.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umbertogriffo/Minimalistic-Multiple-Layer-Neural-Network-from-Scratch-in-Python/9743c77b17f3c2920eb6c891386986693ad94c26/src/ml/__pycache__/__init__.cpython-36.pyc
--------------------------------------------------------------------------------
/src/ml/activation/ActivationFunction.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 09 gen 2018
3 |
4 | @author: Umberto Griffo
5 | '''
6 | from abc import ABCMeta, abstractmethod
7 |
8 | class ActivationFunction:
9 | '''
10 | classdocs
11 | '''
12 | __metaclass__ = ABCMeta
13 |
14 | @abstractmethod
15 | def transfer(self, activation):
16 | '''
17 | Transfer neuron activation is Second step of forward propagation.
18 | Once a neuron is activated, we need to transfer the activation to see what the neuron output actually is.
19 | '''
20 | raise NotImplementedError()
21 |
22 | @abstractmethod
23 | def transfer_derivative(self, output):
24 | '''
25 | Calculate the derivative of an neuron output.
26 | Given an output value from a neuron, we need to calculate it's slope.
27 | '''
28 | raise NotImplementedError()
--------------------------------------------------------------------------------
/src/ml/activation/Linear.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 09 gen 2018
3 |
4 | @author: Umberto Griffo
5 | '''
6 |
7 | from ml.activation import ActivationFunction
8 |
9 | class Linear(ActivationFunction.ActivationFunction):
10 |
11 | def transfer(self, activation):
12 | '''
13 | Linear activation function.
14 | '''
15 | return activation
16 |
17 | def transfer_derivative(self, output):
18 | '''
19 | We are using the linear transfer function, the derivative of which can be calculated as follows:
20 | derivative = 1.0
21 | '''
22 | return 1.0
--------------------------------------------------------------------------------
/src/ml/activation/ReLU.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 09 gen 2018
3 |
4 | @author: Umberto Griffo
5 | '''
6 |
7 | from ml.activation import ActivationFunction
8 |
9 | class ReLU(ActivationFunction.ActivationFunction):
10 |
11 | def transfer(self, activation):
12 | '''
13 | Rectified Linear Unit activation function.
14 | '''
15 | if(activation < 0):
16 | return 0
17 | elif(activation >= 0):
18 | return activation
19 |
20 | def transfer_derivative(self, output):
21 | '''
22 | We are using the Rectified Linear Unit transfer function, the derivative of which can be calculated as follows:
23 | derivative = 0 for x<0 ; 1 for x>=0
24 | '''
25 | if(output < 0):
26 | return 0
27 | elif(output >= 0):
28 | return 1
29 |
--------------------------------------------------------------------------------
/src/ml/activation/Sigmoid.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 09 gen 2018
3 |
4 | @author: Umberto Griffo
5 | '''
6 |
7 | from ml.activation import ActivationFunction
8 | from math import exp
9 |
10 | class Sigmoid(ActivationFunction.ActivationFunction):
11 |
12 | def transfer(self, activation):
13 | '''
14 | Sigmoid activation function.
15 | '''
16 | return 1.0 / (1.0 + exp(-activation))
17 |
18 | def transfer_derivative(self, output):
19 | '''
20 | We are using the sigmoid transfer function, the derivative of which can be calculated as follows:
21 | derivative = output * (1.0 - output)
22 | '''
23 | return output * (1.0 - output)
--------------------------------------------------------------------------------
/src/ml/activation/Tanh.py:
--------------------------------------------------------------------------------
1 | '''
2 | Created on 09 gen 2018
3 |
4 | @author: Umberto Griffo
5 | '''
6 |
7 | from ml.activation import ActivationFunction
8 | from math import exp
9 |
10 | class Tanh(ActivationFunction.ActivationFunction):
11 |
12 | def transfer(self, activation):
13 | '''
14 | Tanh activation function.
15 | '''
16 | return (exp(activation) - exp(-activation))/(exp(activation) + exp(-activation))
17 |
18 | def transfer_derivative(self, output):
19 | '''
20 | We are using the tanh transfer function, the derivative of which can be calculated as follows:
21 | derivative = 1 - (output * output)
22 | '''
23 | return 1 - (output * output)
--------------------------------------------------------------------------------
/src/ml/activation/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/umbertogriffo/Minimalistic-Multiple-Layer-Neural-Network-from-Scratch-in-Python/9743c77b17f3c2920eb6c891386986693ad94c26/src/ml/activation/__init__.py
--------------------------------------------------------------------------------