├── CI_Workshop_Teplitskiy_Classification.ipynb
├── CI_Workshop_Teplitskiy_Classification.py
├── CI_Workshop_Teplitskiy_Introduction.ipynb
├── CI_Workshop_Teplitskiy_Introduction.py
├── CI_Workshop_Teplitskiy_Regression.ipynb
├── CI_Workshop_Teplitskiy_Regression.py
├── README.md
├── boston_housing_data.csv
└── europarl.txt
/CI_Workshop_Teplitskiy_Classification.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "metadata": {
3 | "name": "",
4 | "signature": "sha256:6cb82d3bd23627f843d520cbd67b75cc4a902633b7bc755611565ee14d6b20b0"
5 | },
6 | "nbformat": 3,
7 | "nbformat_minor": 0,
8 | "worksheets": [
9 | {
10 | "cells": [
11 | {
12 | "cell_type": "markdown",
13 | "metadata": {},
14 | "source": [
15 | "#Classification: Building a Language Detector\n",
16 | "\n",
17 | "\n",
18 | "- inspired by http://bugra.github.io/work/notes/2014-12-26/language-detector-via-scikit-learn/"
19 | ]
20 | },
21 | {
22 | "cell_type": "markdown",
23 | "metadata": {},
24 | "source": [
25 | "#Overfitting\n",
26 | "\n",
27 | "### = Big difference between social science stats and machine learning\n",
28 | "\n",
29 | "
\n",
30 | "\n",
31 | "###Solution: Split data into training part and testing part\n",
32 | "\n",
33 | "- \"testing\" set also called \"validation set,\" \"held-out set\"\n",
34 | "\n",
35 | "###Result: 2 sets of accuracies, 2 sets of errors\n",
36 | "- One for training set <--- no one cares about\n",
37 | "- One for test set <--- everyone cares about, also called \"generalization error\"\n",
38 | "\n",
39 | "
"
40 | ]
41 | },
42 | {
43 | "cell_type": "code",
44 | "collapsed": false,
45 | "input": [
46 | "%matplotlib inline"
47 | ],
48 | "language": "python",
49 | "metadata": {},
50 | "outputs": [],
51 | "prompt_number": 4
52 | },
53 | {
54 | "cell_type": "code",
55 | "collapsed": false,
56 | "input": [
57 | "import matplotlib.pyplot as plt\n",
58 | "import pandas as pd\n",
59 | "import numpy as np"
60 | ],
61 | "language": "python",
62 | "metadata": {},
63 | "outputs": [],
64 | "prompt_number": 5
65 | },
66 | {
67 | "cell_type": "markdown",
68 | "metadata": {},
69 | "source": [
70 | "##Data Description"
71 | ]
72 | },
73 | {
74 | "cell_type": "markdown",
75 | "metadata": {},
76 | "source": [
77 | "European Parliament Proceedings corpus\n",
78 | "- https://language-detection.googlecode.com/git-history/packages/packages/europarl-test.zip\n",
79 | "- 21 languages, 1000 sentences each \n"
80 | ]
81 | },
82 | {
83 | "cell_type": "markdown",
84 | "metadata": {},
85 | "source": [
86 | "##Import data and put it in pandas dataframe"
87 | ]
88 | },
89 | {
90 | "cell_type": "code",
91 | "collapsed": false,
92 | "input": [
93 | "import codecs\n",
94 | "lines = codecs.open('europarl.txt', 'r', 'utf-8').readlines()\n",
95 | "lines = [l.split('\\t') for l in lines]"
96 | ],
97 | "language": "python",
98 | "metadata": {},
99 | "outputs": [],
100 | "prompt_number": 7
101 | },
102 | {
103 | "cell_type": "code",
104 | "collapsed": false,
105 | "input": [
106 | "df = pd.DataFrame(lines, columns=['language', 'text'])\n",
107 | "df.head()"
108 | ],
109 | "language": "python",
110 | "metadata": {},
111 | "outputs": [
112 | {
113 | "html": [
114 | "
\n",
115 | "
\n",
116 | " \n",
117 | " \n",
118 | " | \n",
119 | " language | \n",
120 | " text | \n",
121 | "
\n",
122 | " \n",
123 | " \n",
124 | " \n",
125 | " 0 | \n",
126 | " bg | \n",
127 | " \"\u0415\u0432\u0440\u043e\u043f\u0430 2020\" \u043d\u0435 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0441\u0442\u0430\u0440\u0442\u0438\u0440\u0430 \u043d\u043e\u0432 \u043a\u043e\u043d\u043a\u0443\u0440... | \n",
128 | "
\n",
129 | " \n",
130 | " 1 | \n",
131 | " bg | \n",
132 | " (CS) \u041d\u0430\u0439-\u0433\u043e\u043b\u044f\u043c\u0430\u0442\u0430 \u043d\u0435\u0441\u043f\u0440\u0430\u0432\u0435\u0434\u043b\u0438\u0432\u043e\u0441\u0442 \u043d\u0430 \u0441\u0435\u0433\u0430\u0448\u043d\u0430\u0442\u0430... | \n",
133 | "
\n",
134 | " \n",
135 | " 2 | \n",
136 | " bg | \n",
137 | " (DE) \u0413-\u0436\u043e \u043f\u0440\u0435\u0434\u0441\u0435\u0434\u0430\u0442\u0435\u043b, \u0433-\u043d \u0447\u043b\u0435\u043d \u043d\u0430 \u041a\u043e\u043c\u0438\u0441\u0438\u044f\u0442\u0430, ... | \n",
138 | "
\n",
139 | " \n",
140 | " 3 | \n",
141 | " bg | \n",
142 | " (DE) \u0413-\u043d \u043f\u0440\u0435\u0434\u0441\u0435\u0434\u0430\u0442\u0435\u043b, \u0431\u0438\u0445 \u0438\u0441\u043a\u0430\u043b \u0434\u0430 \u0437\u0430\u043f\u043e\u0447\u043d\u0430 \u0441 \u043a... | \n",
143 | "
\n",
144 | " \n",
145 | " 4 | \n",
146 | " bg | \n",
147 | " (DE) \u0413-\u043d \u043f\u0440\u0435\u0434\u0441\u0435\u0434\u0430\u0442\u0435\u043b, \u0432\u044a\u043f\u0440\u043e\u0441\u044a\u0442 \u0437\u0430 \u043f\u0440\u0430\u0432\u0430\u0442\u0430 \u043d\u0430 \u0447... | \n",
148 | "
\n",
149 | " \n",
150 | "
\n",
151 | "
"
152 | ],
153 | "metadata": {},
154 | "output_type": "pyout",
155 | "prompt_number": 9,
156 | "text": [
157 | " language text\n",
158 | "0 bg \"\u0415\u0432\u0440\u043e\u043f\u0430 2020\" \u043d\u0435 \u0442\u0440\u044f\u0431\u0432\u0430 \u0434\u0430 \u0441\u0442\u0430\u0440\u0442\u0438\u0440\u0430 \u043d\u043e\u0432 \u043a\u043e\u043d\u043a\u0443\u0440...\n",
159 | "1 bg (CS) \u041d\u0430\u0439-\u0433\u043e\u043b\u044f\u043c\u0430\u0442\u0430 \u043d\u0435\u0441\u043f\u0440\u0430\u0432\u0435\u0434\u043b\u0438\u0432\u043e\u0441\u0442 \u043d\u0430 \u0441\u0435\u0433\u0430\u0448\u043d\u0430\u0442\u0430...\n",
160 | "2 bg (DE) \u0413-\u0436\u043e \u043f\u0440\u0435\u0434\u0441\u0435\u0434\u0430\u0442\u0435\u043b, \u0433-\u043d \u0447\u043b\u0435\u043d \u043d\u0430 \u041a\u043e\u043c\u0438\u0441\u0438\u044f\u0442\u0430, ...\n",
161 | "3 bg (DE) \u0413-\u043d \u043f\u0440\u0435\u0434\u0441\u0435\u0434\u0430\u0442\u0435\u043b, \u0431\u0438\u0445 \u0438\u0441\u043a\u0430\u043b \u0434\u0430 \u0437\u0430\u043f\u043e\u0447\u043d\u0430 \u0441 \u043a...\n",
162 | "4 bg (DE) \u0413-\u043d \u043f\u0440\u0435\u0434\u0441\u0435\u0434\u0430\u0442\u0435\u043b, \u0432\u044a\u043f\u0440\u043e\u0441\u044a\u0442 \u0437\u0430 \u043f\u0440\u0430\u0432\u0430\u0442\u0430 \u043d\u0430 \u0447..."
163 | ]
164 | }
165 | ],
166 | "prompt_number": 9
167 | },
168 | {
169 | "cell_type": "code",
170 | "collapsed": false,
171 | "input": [
172 | "# how many of each language\n",
173 | "df.language.value_counts()"
174 | ],
175 | "language": "python",
176 | "metadata": {},
177 | "outputs": [
178 | {
179 | "metadata": {},
180 | "output_type": "pyout",
181 | "prompt_number": 10,
182 | "text": [
183 | "hu 1000\n",
184 | "nl 1000\n",
185 | "fi 1000\n",
186 | "ro 1000\n",
187 | "de 1000\n",
188 | "da 1000\n",
189 | "pt 1000\n",
190 | "pl 1000\n",
191 | "it 1000\n",
192 | "bg 1000\n",
193 | "sv 1000\n",
194 | "cs 1000\n",
195 | "sk 1000\n",
196 | "sl 1000\n",
197 | "lv 1000\n",
198 | "lt 1000\n",
199 | "el 1000\n",
200 | "en 1000\n",
201 | "et 1000\n",
202 | "es 1000\n",
203 | "fr 1000\n",
204 | "dtype: int64"
205 | ]
206 | }
207 | ],
208 | "prompt_number": 10
209 | },
210 | {
211 | "cell_type": "code",
212 | "collapsed": false,
213 | "input": [
214 | "# let's consider just two: english (en) and french (fr)\n",
215 | "df[df.language=='en'].head()"
216 | ],
217 | "language": "python",
218 | "metadata": {},
219 | "outputs": [
220 | {
221 | "html": [
222 | "\n",
223 | "
\n",
224 | " \n",
225 | " \n",
226 | " | \n",
227 | " language | \n",
228 | " text | \n",
229 | "
\n",
230 | " \n",
231 | " \n",
232 | " \n",
233 | " 5000 | \n",
234 | " en | \n",
235 | " (BG) Thank you, Mr President.\\n | \n",
236 | "
\n",
237 | " \n",
238 | " 5001 | \n",
239 | " en | \n",
240 | " (EL) Madam President, I agree and recognise Tu... | \n",
241 | "
\n",
242 | " \n",
243 | " 5002 | \n",
244 | " en | \n",
245 | " (FI) Madam President, firstly, I would like to... | \n",
246 | "
\n",
247 | " \n",
248 | " 5003 | \n",
249 | " en | \n",
250 | " (FI) Mr President, the Treaty of Lisbon will r... | \n",
251 | "
\n",
252 | " \n",
253 | " 5004 | \n",
254 | " en | \n",
255 | " (FR) Madam President, one of the priorities of... | \n",
256 | "
\n",
257 | " \n",
258 | "
\n",
259 | "
"
260 | ],
261 | "metadata": {},
262 | "output_type": "pyout",
263 | "prompt_number": 11,
264 | "text": [
265 | " language text\n",
266 | "5000 en (BG) Thank you, Mr President.\\n\n",
267 | "5001 en (EL) Madam President, I agree and recognise Tu...\n",
268 | "5002 en (FI) Madam President, firstly, I would like to...\n",
269 | "5003 en (FI) Mr President, the Treaty of Lisbon will r...\n",
270 | "5004 en (FR) Madam President, one of the priorities of..."
271 | ]
272 | }
273 | ],
274 | "prompt_number": 11
275 | },
276 | {
277 | "cell_type": "code",
278 | "collapsed": false,
279 | "input": [
280 | "df_sub = df[df.language.isin(('lt', 'lv'))]"
281 | ],
282 | "language": "python",
283 | "metadata": {},
284 | "outputs": [],
285 | "prompt_number": 12
286 | },
287 | {
288 | "cell_type": "markdown",
289 | "metadata": {},
290 | "source": [
291 | "##Build classifier"
292 | ]
293 | },
294 | {
295 | "cell_type": "code",
296 | "collapsed": false,
297 | "input": [
298 | "from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer\n",
299 | "from sklearn.linear_model import LogisticRegression\n",
300 | "from sklearn.cross_validation import train_test_split"
301 | ],
302 | "language": "python",
303 | "metadata": {},
304 | "outputs": [],
305 | "prompt_number": 14
306 | },
307 | {
308 | "cell_type": "markdown",
309 | "metadata": {},
310 | "source": [
311 | "###1. Engineer features we will use to predict"
312 | ]
313 | },
314 | {
315 | "cell_type": "code",
316 | "collapsed": false,
317 | "input": [
318 | "# vectorizer = TfidfVectorizer(ngram_range=(1,3),\n",
319 | "# # analyzer='char',\n",
320 | "# max_features=50)\n",
321 | "# # use_idf=False)\n",
322 | "\n",
323 | "vectorizer = CountVectorizer(max_features=50,\n",
324 | " analyzer='char',\n",
325 | " )\n",
326 | "\n",
327 | "\n",
328 | "X_features = vectorizer.fit_transform(df_sub.text) # fit_transform() is like calling fit() and then predict()\n",
329 | "print X_features.shape, type(X_features)"
330 | ],
331 | "language": "python",
332 | "metadata": {},
333 | "outputs": [
334 | {
335 | "output_type": "stream",
336 | "stream": "stdout",
337 | "text": [
338 | "(2000, 50) \n"
339 | ]
340 | }
341 | ],
342 | "prompt_number": 15
343 | },
344 | {
345 | "cell_type": "markdown",
346 | "metadata": {},
347 | "source": [
348 | "###2. Split into train and test sets"
349 | ]
350 | },
351 | {
352 | "cell_type": "code",
353 | "collapsed": false,
354 | "input": [
355 | "y = df_sub.language.values\n",
356 | "X_train, X_test, y_train, y_test = train_test_split(X_features, y, test_size=0.2) \n",
357 | "#setting random_state=0 to make #sure we all get the same answer"
358 | ],
359 | "language": "python",
360 | "metadata": {},
361 | "outputs": [
362 | {
363 | "output_type": "stream",
364 | "stream": "stdout",
365 | "text": [
366 | "\n"
367 | ]
368 | }
369 | ],
370 | "prompt_number": 17
371 | },
372 | {
373 | "cell_type": "code",
374 | "collapsed": false,
375 | "input": [
376 | "#composition of train and test sets\n",
377 | "print 'Composition of train set:', np.unique(y_train, return_counts=True)\n",
378 | "print 'Composition of test set:', np.unique(y_test, return_counts=True)"
379 | ],
380 | "language": "python",
381 | "metadata": {},
382 | "outputs": [
383 | {
384 | "output_type": "stream",
385 | "stream": "stdout",
386 | "text": [
387 | "Composition of train set: (array([u'lt', u'lv'], dtype=object), array([812, 788]))\n",
388 | "Composition of test set: (array([u'lt', u'lv'], dtype=object), array([188, 212]))\n"
389 | ]
390 | }
391 | ],
392 | "prompt_number": 18
393 | },
394 | {
395 | "cell_type": "markdown",
396 | "metadata": {},
397 | "source": [
398 | "###3. Train model"
399 | ]
400 | },
401 | {
402 | "cell_type": "code",
403 | "collapsed": false,
404 | "input": [
405 | "clf = LogisticRegression()\n",
406 | "clf.fit(X_train, y_train)"
407 | ],
408 | "language": "python",
409 | "metadata": {},
410 | "outputs": [
411 | {
412 | "metadata": {},
413 | "output_type": "pyout",
414 | "prompt_number": 19,
415 | "text": [
416 | "LogisticRegression(C=1.0, class_weight=None, dual=False, fit_intercept=True,\n",
417 | " intercept_scaling=1, penalty='l2', random_state=None, tol=0.0001)"
418 | ]
419 | }
420 | ],
421 | "prompt_number": 19
422 | },
423 | {
424 | "cell_type": "markdown",
425 | "metadata": {},
426 | "source": [
427 | "###4. Evaluate model\n",
428 | "\n",
429 | "*Test it on the held-out test set*\n",
430 | "\n",
431 | "* **accuracy**: percent correct\n",
432 | "\n",
433 | "\n",
434 | "* When especially interested in a particular class, say \"positive,\"\n",
435 | " - **precision**: of the things you called \"positive,\" what percent were correct?\n",
436 | " - **recall**: of all positive cases, what percent did you find?"
437 | ]
438 | },
439 | {
440 | "cell_type": "code",
441 | "collapsed": false,
442 | "input": [
443 | "y_predicted = clf.predict(X_test)"
444 | ],
445 | "language": "python",
446 | "metadata": {},
447 | "outputs": [],
448 | "prompt_number": 20
449 | },
450 | {
451 | "cell_type": "code",
452 | "collapsed": false,
453 | "input": [
454 | "from sklearn import metrics\n",
455 | "print 'Accuracy:', metrics.accuracy_score(y_test, y_predicted)\n",
456 | "print\n",
457 | "print metrics.classification_report(y_test, y_predicted)\n",
458 | "print\n",
459 | "print 'confusion matrix'\n",
460 | "print\n",
461 | "print pd.DataFrame(metrics.confusion_matrix(y_test, y_predicted))"
462 | ],
463 | "language": "python",
464 | "metadata": {},
465 | "outputs": [
466 | {
467 | "output_type": "stream",
468 | "stream": "stdout",
469 | "text": [
470 | "Accuracy: 0.955\n",
471 | "\n",
472 | " precision recall f1-score support\n",
473 | "\n",
474 | " lt 0.91 1.00 0.95 188\n",
475 | " lv 1.00 0.92 0.96 212\n",
476 | "\n",
477 | "avg / total 0.96 0.95 0.96 400\n",
478 | "\n",
479 | "\n",
480 | "confusion matrix\n",
481 | "\n",
482 | " 0 1\n",
483 | "0 188 0\n",
484 | "1 18 194\n"
485 | ]
486 | }
487 | ],
488 | "prompt_number": 21
489 | },
490 | {
491 | "cell_type": "markdown",
492 | "metadata": {},
493 | "source": [
494 | "###Out of curiousity, how well did we do on the training set?"
495 | ]
496 | },
497 | {
498 | "cell_type": "code",
499 | "collapsed": false,
500 | "input": [
501 | "print 'Accuracy:', metrics.accuracy_score(y_train, clf.predict(X_train))"
502 | ],
503 | "language": "python",
504 | "metadata": {},
505 | "outputs": [
506 | {
507 | "output_type": "stream",
508 | "stream": "stdout",
509 | "text": [
510 | "Accuracy: 0.9575\n"
511 | ]
512 | }
513 | ],
514 | "prompt_number": 22
515 | },
516 | {
517 | "cell_type": "markdown",
518 | "metadata": {},
519 | "source": [
520 | "##ROC curve\n",
521 | "\n",
522 | "x-axis: What percent of negative things did you falsely call positive?\n",
523 | "\n",
524 | "y-axis: Of the positive examples, what percent did you find?"
525 | ]
526 | },
527 | {
528 | "cell_type": "code",
529 | "collapsed": false,
530 | "input": [
531 | "from sklearn.metrics import roc_curve, roc_auc_score\n",
532 | "\n",
533 | "y_label_test = np.asarray(y_test == 'lv', dtype=int)\n",
534 | "proba = clf.predict_proba(X_test)\n",
535 | "proba_label = proba[:,1]\n",
536 | "fpr, tpr, roc_thresholds = roc_curve(y_label_test, proba_label)\n",
537 | "\n",
538 | "plt.plot(fpr, tpr, '-', linewidth=5)\n",
539 | "plt.plot([0, 1], [0, 1], 'k--')\n",
540 | "plt.xlabel('False Positive Rate (\"Cost\")')\n",
541 | "plt.ylabel('True Positive Rate (\"Benefit\")')\n",
542 | "plt.title('Receiver operating characteristic example')\n",
543 | "plt.legend(loc=\"lower right\") \n",
544 | " "
545 | ],
546 | "language": "python",
547 | "metadata": {},
548 | "outputs": [
549 | {
550 | "output_type": "stream",
551 | "stream": "stderr",
552 | "text": [
553 | "C:\\WinPython27\\python-2.7.5\\lib\\site-packages\\matplotlib\\axes.py:4752: UserWarning: No labeled objects found. Use label='...' kwarg on individual plots.\n",
554 | " warnings.warn(\"No labeled objects found. \"\n"
555 | ]
556 | },
557 | {
558 | "metadata": {},
559 | "output_type": "display_data",
560 | "png": "iVBORw0KGgoAAAANSUhEUgAAAYYAAAEZCAYAAACTsIJzAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3XuclGX9//HXh+WkAp7QVFBMJUvzmKAm6pokqFmWp68o\niJn2rewgeEj7qnwz7Nc3PKQoiiZ0MDHTb1/AA5m5ZXlKdlcwgdQixWOiIYm6Ap/fH9c1cO84O3vv\nsjP3zM77+XjsY+ee+/SZe2buz1z3dd3XZe6OiIhITo+sAxARkcqixCAiIq0oMYiISCtKDCIi0ooS\ng4iItKLEICIirSgxVDEze8rMDsk6jqyZ2TQz+68y73OmmV1Wzn2WipmdYmbzOrlut/0MmtlaM9sp\n6ziyYLqPoWuY2VJga2AN8DZwP/A1d38ry7i6GzMbD5zh7gdnHMcM4AV3vyTjOCYBO7v72DLsaybh\nNV9c6n1VAjNbC+zi7n/LOpZyU4mh6zjwGXfvD+wF7AGU9VdsVzCznrW47yyZWV0t7lsqmLvrrwv+\ngL8Dn0pM/w9wd2L6AOBh4E2gGTg0MW8LYAbwIvAG8L+JeZ+Jy78J/AnYIzFvKfApYDtgFbB5Yt4+\nwD+Bujj9ReDpuP37gB0Sy64Fvgo8AzzXxuv7LPCXGMeDwEfz4vh2nP8GcAvQpwOv4XxgAfAOUBe3\n9SzwVtzmsXHZj8VlVgMrgTfi8zOBy+LjemAZMAF4FXgJGJ/Y35bAHGAF8DjwPeChIu/riMT79jww\nLj4/A5gKzI1xPgrslFjvR3H5FcATwIjEvEnAr4CfxflfBIYBj8T9vARcC/RKrLM7oRS6HHgFuBAY\nBbwHtMTj0RSX3RT4cdzOMuAyoEecNz6+B1cCr8d543PHADDgqnjsVsT3ZXfgrLif9+K+/i/x/h0e\nH9cBFyXeuyeAwW0c14LfB+CThM/t4Di9F+Ez9ZE4XfCzUeC1vRmX+yRwenwvXs29f4nPzQ3Ab+L2\nGvjg92Kn+LgPMAX4Rzz+04C+WZ93SnY+yzqA7vJHSAy5L8jg+IW6JE4Pil/C0XF6ZJzeMk7fDdwW\nv9A9gYPj8/vED/Ow+IUdF/fTK7HPT8XHDwBfSsTzQ+D6+PhzhJP+roRS4neAPyWWXQvMAzYjcUJP\nzP8I8G/g8PjlPy9ur2ecvzS+3kHA5sAfWX+ibu81LAUa47p94nPHA9vExyfGfX8oTp9G3omccJL+\nbnxcD7xPOPnWAUcSLu1tGufPAn4B9CUkmueBP7Txng6JJ4yT4ra2APaK82bG93C/OO/nwG2JdU+J\nx6IHIUm9DPSO8yYRTrKfjdN9gX2B4XH5IYQk/s04v39c/xygN9APGB7nXQr8NC/u/yWcuDYCtgIe\nA86K88bH4/O1uK++tE4Mowgn9AFxetfEe7HuOOd97nOfwfPi52BonN4D2KLAcW3v+/A9wud5I2Ah\n8NXEusU+G7nXdhrhs3YZITFeC/QCPh3fz40T7+FbhOTfG7iaxGeL1onhKuDXhO9IP2A2cHnW552S\nnc+yDqC7/BFOcCvjB21t/HLmfqVdUODLex/hJLktoV5i0wLbnFbgi7iY9Ykj+aU8A3ggPjbCCW9E\nnL4X+GJiGz0IJ8vt4/RaoL7Ia7sYmJWYtviFOyQRx1mJ+UcCz3bgNYxv59g2sf4kOp7CiSFZYliV\nO/bxuVcJJ906wgl5aGLeZfnbS8y7ELizjXkzgOl5r3lRkdfwBrGkREgMDe285m8Bd8XHJwPz21hu\nEvCzxPSHgHdJ/JqN6/8ucfz+kbeNdceUUAJdAuyfPIb5xznxXPIzuAQ4JsV3pc3vQ3zck5CcFgL3\ndPCz8dfEvD3iZ3urxHOvA3vGxzOBXyTmbUIojQ5KfC92Inze/03rEuGBwN/ae63V+qc6hq7jwOfc\nfQDh5PQpwq9JCL8ATzCzN3N/wEHANsD2hEsiKwpscwgwMW+9wYRLR/nuAg40s22AQ4C17v7HxHZ+\nlNjG8vj8oMT6LxR5bdsSEk14oeGb8UKR9Z9PxJjmNbTat5mNM7OmxPIfJ1wCSmu5u69NTK8i/Mrb\ninDSSe5vWZHtDAaKVTy+mnj8TtwHAGZ2rpk9bWb/iq9hU2BgW/s1s4+Y2Vwze9nMVgCTWf+at28n\njqQhhF/HLyeO3w2E157T5nvt7r8jXCK7DnjVzG40s/4p9z0YeC5ljG19H3D31cBPCJewrkiumOKz\nkf+e4O7/zHsu9z45iffB3d8mJPD879dWwMbA/MR+76X1+9mtKDGUgLv/gVB8/UF86nnCr7rNE3/9\n3f1/CF/SLcxs0wKbeh6YnLdeP3e/vcA+3yRcKz0JGEO4NJXczll529nE3R9NbqLIS3qJ8GUGwMyM\ncLJ6MbHMDnmPc/PSvIZ1+zazIcB0wqWOLdx9c+Apwq+2YnEWiz/nn4RfhNsnntu+jWUhvDc7p9hu\nK2Z2MOGyygnuvll8DStY/xrgg/FOI1w+2sXdNyVc7st9P58n/HItZG3e9AuEeoAtE8d7U3ffo8i+\nW3H3a919P2A3wmXE89KsF/e9SzvLQPHvA2Y2CLiEUFd1pZn1js+399noqNznmLj9foTLhS/lLfc6\nIaHsloh3s/gjsFtSYiidq4HhZrY/4frzMWZ2hJnVmVlfM6s3s0Hu/jLh18f1ZraZmfVKtAu/CfhP\nMxtuwSZmdnT8ABfyC8L11ePi45wbgIvMbDcAM9vUzE7owGv5JXC0mX3KzHoBEwmXKx6O8w34qpkN\nMrMtCCe13Im/o69hE8IJ6HWgh5mdTvhVmPMqMDjGkWOkODm4+xpCyWqSmW1kZh8FxtL2Ce9WYKSZ\nnWBmPc1sSzPbK7HPtvQnJKDXzay3mV0CtHcS6Ue4FLkqxvWVxLy7gW3N7Jtm1sfM+pvZ8DjvVWDH\nmKyJn6ffEE6o/c2sh5ntnPZeAzPbz8z2j8d3FeF9XpPYV7F2/TcDl5nZLvG93jN+HvK1+X2Ir2Mm\ncLO7f4lQt5K7X6S9z0ZnHGVmB8XkcxnwiLsnf/AQS583AVeb2VYQkpeZHbGB+65YSgwl4u6vE4rD\nF7j7MkIF8EXAa4RfTBNZf/zHEirNFhO+fN+I25gPnEko2r9BqPAdR9snstmEX2wvu/vCRCy/JpRe\nZsXLFAsJlYzrFmnntfwVOJVQCvoncDThWvLqxPq/IJyQnotxfq8zr8HdnyZcPniE0Prj44TK7JwH\nCK1RXjGz1xL7T26v2Os5m3BZ5xXC+3Mbod6hUCwvAEcR3qvlhOvZe7axz+R+74t/fyXUPb1D4lJc\nG+ueSyjpvUX4VTwrt4y7ryRUnB5DOFH+lXC5EuCO+H+5mT0RH48jVKbmWqHdQbxMUyTu3HMD4v7f\niLG/TmjIAKGl027xcspdfNCVhB8RvyGUkG4iVG633lnx78M3CJdocvdKnA6cbmYHpfhsFHtPCsl9\nbi8lvL/7ED7nhda9gNDK6dH4HbqfUJrqlkp6g5uZ3UI4ibyWV5RNLnMNoeJuFaESsqlkAUlJmNnf\nCTed/S7rWDrKzH4AbO3up2cdi5SXhZsUl3mN3LDXEaUuMcwARrc108yOIlxTHUpoJz2txPFIjTOz\nXeMlDouXY75IaEEmtaezdRPdXkkTg7s/RLjRpC2fJRTncffHgM3M7EOljElqXn/gTkLzw1nAFHef\nnW1IkpFCl56E0HQvS4P4YNPBwbRuciYVzt0/nHUMabn7E8DQrOOQ7OnyYdsqofI5vzinDC4ikqGs\nSwwv0rod+WBat40HYJdddvHnnktz34yIiCQ85+5p7i1pJesSw2xC0zrM7ADgX+7+gctIzz33XElu\n+37/fefOO539989daqyGv0srIIZK+dOx0LHQsVj/10hoTX004fe1Qydu0IQSlxjM7DbgUGCgmb1A\neMd6Abj7je5+j5kdZWbPEvruKcs1vzfegJtvhuuug+efb3/5zTeHb3wDxo2Dvh9olV1eV1wBEydm\nG0Ol0LFYT8divVo8FtOnX8XUqd/n4ouncPzxY4n3OzJoUDsrtqGkicHdT06xzNmljCFp4UK49lr4\n+c/hnXfaX3677cIH7KyzoF9b9+mWWf/+IS7RsUjSsVivFo/FyJHDOOusZrbroheedR1Dya1ZA3Pn\nwjXXwO9S3n61yy5wwQUwdiz06VPa+Dqqvr4+6xAqho7FejoW69XisRgxYkSXbq8qhvY0M+9onG++\nCbfcAlOnwtKl6dbZe2+48EI47jio07hWIlLlzAx37/CNfN2uxPD00+Fy0U9/CqtWtb+8GXzmM6EO\n4fDDw7SISKVpaWlh8uTJbLbZZpxzzjkl3Ve3SAxr1sA994TLRb/9bbp1BgyAM86Ar30Ndu5Uvb2I\nSHk0NTUxfvx4tt9+e6ZPn17y/VV1YlixAmbMCCWEv6UcxmTXXde3MKqUCmURkUJypYRp06YxZcoU\nxo5d3+KolKomMTQ2hmamEEoIc+bAzJnw9tvp1j/66JAQRo6EHlnfvSEiksK3vvUtnn/+eZqbu67F\nURpVU/kcb9bokP794fTT4eyzYah6xxGRKrNy5Ur69evX6VJCZyufu2ViGDoUvv51OO20UJcgIlKL\n1CoJGD06XC4aNUqXi0SkerS0tLBy5Uq23HLLrEMBsu8raYP16xcuFS1eDPfeC0ceqaQgItWjqamJ\nYcOGcf3112cdyjpVWWLo2zeUDg47LFwu2nTTrCMSEemYQi2OKkVVJoY//hE+8YmsoxAR6ZzkfQnl\nbnGURtVVPm+3HTz3XPa9nIqIdNaVV17JwIEDS35fQk20SurVC+6/Hw49NOuIREQqX2cTQ1VV0557\nrpKCiEipVVViqJCWXCIiqTQ1NfHggw9mHUaHVVViEBGpBi0tLVx66aWMGjWK5cuXZx1Oh1VlqyQR\nkUpV6S2O0lCJQUSki0ydOpVRo0YxceJE5syZU5VJAVRiEBHpMgceeGDVlhKSlBhERLrIJ7rJnbe6\nlCQiIq2oxCAi0gG5Po569OjBpZdemnU4JaESg4hISrmeUOfPn8+ZZ56ZdTglo8QgItKO5H0J1d7i\nKA1dShIRacd3vvMdFi1a1C1aHKWhxCAi0o7vfve79O3bt6Q9oVYSJQYRkXZstNFGWYdQVqpjEBGJ\nWlpaeOWVV7IOI3NKDCIirG9xdM0112QdSuaUGESkpuW3OJo8eXLWIWVOdQwiUrO6Q0+opaDEICI1\na/78+UycOLHkYy9XGyUGEalZX/rSl7IOoSKpjkFERFpRYhCRbq+pqYm5c+dmHUbVSJ0YzKyvmfXp\nyMbNbLSZLTazZ8zsggLzB5rZfWbWbGZPmdn4jmxfRKSYZIujt99+O+twqkabdQxm1gM4FjgZ+CQh\niZiZrQEeAW4Ffu3u3sb6dcBUYCTwIvBnM5vt7osSi50NNLn7hWY2EFhiZj9399Vd8NpEpIapxVHn\nFSsxNACfAKYAO7n7tu6+DbBTfG4Y8Psi6w8HnnX3pe7+PjAL+FzeMi8DA+LjAcByJQUR2VDTp0+v\nmZ5QS6FYq6RPu/t7+U/G5x4FHm3n0tIg4IXE9DJg/7xlbgJ+Z2YvAf2BE1NFLSJSxIgRI1RK2ABt\nJoZCSaGDyxS8xJTnIqDZ3evNbGfgfjPby91XfnDRScybBytXQn19PfX19Sk2LyK1aLfddss6hEw0\nNDTQ0NCwwduxNqoIMLOlhJP7a+6e/0u//Q2bHQBMcvfRcfpCYK27/yCxzD3AZHf/U5x+ALjA3Z/I\n25aDM2UKTJzY0UhEpDtzd92c1gYzw907fHCKXUo6NP5f07mQeAIYamY7Ai8BJxEqspMWEyqn/2Rm\nHwJ2Bf7Wyf2JSA3Jjb28cuVKrrzyyqzD6VaKJYaZ8f8bwHEd3bC7rzazs4F5QB3wY3dfZGZfjvNv\nBC4HZpjZk4SK8PPd/Y2O7ktEakuyxdH06dOzDqfbKVbHcBiE+xfy55lZX3d/t72Nu/u9wL15z92Y\nePw6cExHAhaR2pUrJUybNo0pU6aoj6MSSdNX0sPAvimeExEpqcsvv5z58+erxVGJFbvBbVtgO2Bj\nM9sXMEJl9ABg4/KEJyKy3kUXXUSvXr1USiixYiWGUcBphPsRrkg8v5LQzFREpKx69+6ddQg1oVgd\nw0xgppkd5+53li8kEal1ubGXd9hhh6xDqUltdolhZmPjwx3NbELib6KZTShTfCJSY3JjL1999dVZ\nh1Kzil1KytUj9CfdXcwiIp1WqMWRZKNYYtg5/n/a3X9ZjmBEpDapJ9TKUqx31aMsVP1fWK5gRKQ2\nPfPMM+oJtYIUKzHcC7wJ9DOz/E7t3N0HFFhHRKTDTjxRHStXkjZLDO5+nrtvBtzj7v3z/pQURES6\nqXaH9nT3z5rZEDMbCWBmG5tZ/9KHJiLdTVNTE7Nmzco6DGlHu4nBzM4CfgXk+jgaDPy6lEGJSPeS\nHHt57dq1WYcj7UjTV9LXCMN0Pgrg7n81s61LGpWIdBtqcVR92i0xAO8lR2ozs57ovgYRSWHmzJka\ne7kKpSkx/N7MvkPoTO/TwFeBOaUNS0S6g0MOOUSlhCqUJjF8GzgDWAh8GbgHuLmUQYlI97DTTjtl\nHYJ0QruJwd3XANPjn4hIQRp7uftI0ypphJndb2bPmNnf45/GZRYRYH2LozPPPDPrUKSLpLmU9GPg\nW0AjsKa04YhINdHYy91TmsTwrzh2s4gIoLGXu7s0ieFBM/shcBewrtmquzeWLCoRqWjXXnutxl7u\nxsy9+C0JZtZAgfsW3P2wEsVUKAYHZ8oUmDixXHsVkbasXr2auro6lRIqnJnh7h1+k9K0SqrvVEQi\n0m317JnmYoNUqzStkrYxsx+b2X1xejczO6P0oYlI1lpaWnjmmWeyDkPKLE2XGDOB3wC5C4nPAOeU\nKiARqQy5sZevuuqqrEORMkuTGAa6++3Epqru/j6wuqRRiUhmWlpauOSSSxg1ahTnnnsu1113XdYh\nSZmluVD4bzPbMjdhZgcAK0oXkohkpampidNOO40hQ4aoxVENS5MYJhI6zdvJzB4GtgKOL2lUIpKJ\nV155hfPOO49TTz1VLY5qWJpWSfPN7FBg1/jUkng5SUS6mSOPPDLrEKQCFK1jiEN6DoyJoD9wJPCZ\nskQmIiKZaDMxmNklwO+Ax8zse8BVwJbAN8zsR2WKT0RKoLGxkZtvVu/5UlixEsPJwG7AfsA3gcPd\n/dvAp4GRZYhNRLpYrsXR6NGj2WijjbIORypUsTqGd+OQnu+Z2bPu/jaAu682s5byhCciXaWxsZHx\n48erxZG0q1hi2NTMvgBY4jG56ZJHJiJd5tZbb+Wcc87hiiuuUIsjaVexxPAH4JgCjwF+X7KIRKTL\nHXbYYSolSGptJgZ3H7+hGzez0cDVQB1ws7v/oMAy9YSK7V7A6+q0T6TrKSFIR7SZGMzstPhwlbvf\n0dENm1kdMJVQUf0i8Gczm+3uixLLbAZcB4xy92VmNrCj+xGR1tauXUuPHml6uxEprNinZ8f4t0Mn\ntz0ceNbdl8b7IGYBn8tbZgxwp7svA3D31zu5L5Gal2txNGbMmKxDkSpXyp8Vg4AXEtPL4nNJQ4Et\nzOxBM3vCzMaWMB6RbquxsZH99tuPxsZGrrzyyqzDkSpXrPL5H4SR297p5LaLDw0X9AL2BQ4HNgYe\nMbNH3V0dwIukoLGXpRSKVT7P3MBtvwhsn5jenlBqSHqBUOH8DvCOmf0B2Isw5kOeScybBytXQn19\nPfX19RsYnkj1u+WWWzT2sqzT0NBAQ0PDBm+nzTGfzewWYJq7/7mN+fsD/+nup7cxvyewhFAaeAl4\nHDg5r/L5o4QK6lFAH+Ax4CR3fzpvWxrzWaSAtWvXYmYqJUhBpRjz+SrgvDj+whLgZcLNbdsQelp9\nGJjS1srxDumzgXmE5qo/dvdFZvblOP9Gd18chwxdAKwFbspPCiLSNrU+klJos8SwbgGzPsA+wBBC\nvcE/gCfd/d3Sh7cuBpUYpKblxl7efffdsw5FqkhnSwzt/txw9/fc/VF3v93df+nuj5UzKYjUuubm\nZoYPH67WRlI2KoeKVKiWlhYuvfRSjjjiCCZMmKBusqVs0gztKSJltmDBAsaNG8fgwYPV4kjKLnVi\nMLON3X1VKYMRkWDFihVMmDBB9yVIJtq9lGRmnzSzpwktkzCzvc3s+pJHJlLDDj74YMaNG6ekIJlI\nU8dwNTAaeB3A3ZuBQ0sZlIiIZCdV5bO7P5/31OoSxCJSc5qbm7n66quzDkOklTSJ4XkzOwjAzHqb\n2bnAonbWEZEiki2Ottxyy6zDEWklTeXzV4AfEXpGfRH4DfC1UgYl0p01Nzczfvx4tTiSipWmxPAR\ndx/j7lu7+1bufgrw0VIHJtId3XnnnevuS5gzZ46SglSkNCWGqYQuMdp7TkTaUV9fr1KCVLxiQ3se\nCHwS2MrMJhA60APoj+6YFukU1SdINShWYuhNSAJ18X/OW8DxpQxKpDtYs2YNdXV1WYch0mHFBur5\nPfB7M5vp7kvLF5JIdcuNqvbEE09w9913Zx2OSIelqWNYZWZTgN2AjeJz7u6fKl1YItUp2eLopptu\nyjockU5JU1dwK7AY2AmYBCwFnihdSCLVJ78nVLU4kmqWpsSwpbvfbGbfSFxeUmIQSbjjjjs09rJ0\nG2kSQ0v8/4qZfYYwfvPmpQtJpPqMGTOGMWPGqNM76RbSJIbJZrYZMBG4FhgAnFPSqESqjBKCdCdp\nhvac4+7/cveF7l7v7vsCr5QhNpGK09LSQmNjY9ZhiJRUm4nBzHqY2XFmdr6ZHRWf28/MfgNML1uE\nIhVCYy9LrShWYpgOfJVQn/BfZnYn8BPgetQdhtSQ/BZHP/vZz7IOSaSkitUxHADs6e5rzawv4fLR\nzu6+vDyhiWRv4cKFjB07Vj2hSk0plhjed/e1AO7+rpn9XUlBas2aNWs09rLUnGKJ4aNmtjAxvXNi\n2t19zxLGJVIR9t57b/bee++swxApq2KJ4WNli0JERCpGsU70lpYxDpFMNTc3M2fOHC6++OKsQxHJ\nnMZVkJqWbHE0ZMiQrMMRqQhp7nwW6ZY09rJIYalKDGa2sZntWupgRMrl7rvvVk+oIm1ot8RgZp8F\nfgj0AXY0s32A/3b3z5Y6OJFSOeSQQ1RKEGlDmhLDJGB/4E0Ad28ijM0gUrX69++vpCDShjSJ4X13\n/1fec2tLEYxIKbz//vtZhyBSVdIkhr+Y2SlATzMbambXAg+XOC6RDZZrcTRy5EjcPetwRKpGmsTw\ndWB34D3gNuAt4FulDEpkQ+V6Qp0/fz633XaburMQ6YA0zVV3dfeLgItKHYzIhmppaWHy5MlMmzaN\nKVOmqI8jkU5IU2K40swWm9llZvbxjmzczEbHdZ8xswuKLDfMzFab2Rc6sn2RfPPmzVs39vK4ceOU\nFEQ6wdJcezWzbYET498A4Jfuflk769QBS4CRwIvAn4GT3X1RgeXuB1YBM9z9zgLbcnCmTIGJE1O9\nLqlRuc+zEoJI+B64e4e/DKlucHP3l939R8B/Ak8Cl6RYbTjwrLsvdff3gVnA5wos93XgV8A/04Us\n0jYzU1IQ2UDtJgYz283MJpnZU8BUQoukQSm2PQh4ITG9LH89MxtESBbT4lNqOiKptLS08PDDahwn\nUgppKp9vIfzaH+XuL3Zg22lO8lcD33Z3t/AzTz/1pF25Po6GDh3KgQceqBKCSBdrNzG4+wGd3PaL\nwPaJ6e0JpYakTwCz4hd7IHCkmb3v7rM/uLlJzJsHK1dCfX099fX1nQxLqpVaHIkU19DQQENDwwZv\np83KZzO7w91PyBvFLafdEdzMrCeh8vlw4CXgcQpUPieWnwHMcfe7CsxT5XONe/rppxkzZgyDBw9m\n+vTp6s5CJIXOVj4XKzF8M/7/DB+8xNPuZSJ3X21mZwPzgDrgx+6+yMy+HOff2NFgpXb17t1bYy+L\nlEm7zVXN7AfufkF7z5WSSgwiIh1XyuaqRxR47qiO7khERKpDm4nBzL4S6xd2NbOFib+lwIKyRSg1\npbm5mfPOO0+d3olkqFiJ4RfAMcBsQj3DMfHvE+5+ShlikxqSHHt5jz32yDockZpWrPLZ3X2pmX2N\nvMpmM9vC3d8obWhSKzT2skhlKZYYbgOOBuZTuBXSh0sSkdSUBx54gJNPPln3JYhUkDYTg7sfHf/v\nWLZopOaMGDFCpQSRCpOmr6SDzKxffDzWzK40syGlD01qQZ8+fZQURCpMmuaqNwCrzGwvYALwN+Cn\nJY1KuqV333036xBEJIU0iWG1u68FjgWuc/epQP/ShiXdSa7F0YgRI9QMVaQKpEkMK83sIuBUYG4c\nWKdXacOS7iI59vLs2bNVuSxSBdIkhpOA94AvuvsrhDEVfljSqKTqJe9LmDBhAnPmzFFdgkiVSNPt\n9stmdiswzMw+Azzu7qpjkKIeeeQRGhsb1eJIpAqlaZV0IvAYcAJhzOfHzeyEUgcm1e3QQw9VKUGk\nSqUZwe2/gGHu/hqAmW0FPADcUcrAREQkG2nqGAz4Z2J6ORqCU6KWlhYeeOCBrMMQkS6UJjHcB8wz\ns/FmdjpwD3BvacOSapBrcTR16lTWrl2bdTgi0kXaTQzufh7hJrc9gT2AG939/FIHJpUrv8XRXXfd\nRY8eaX5jiEg1aLOOwcw+QmiWugth/IXz3H1ZuQKTyrR48WL+4z/+Qz2hinRjxX7m3QLMBY4DGoFr\nyhKRVLQBAwYwceJEtTgS6caKtUrq5+43xceLzaypHAFJZdtuu+0YO3Zs1mGISAkVSwx9zWzf+NiA\njeK0EQbxaSx5dCIiUnbFEsMrwBVFpg8rSURSEZqbm7nhhhu4/vrrVbEsUmOKDdRTX8Y4pEK0tLQw\nefJkpk2bxpQpU9TpnUgNSnPns9QIjb0sIpDuBjepAQ8//LB6QhURQCUGifbff38WLFjANttsk3Uo\nIpKxNL2r9ohjPV8Sp3cws+GlD03Kqa6uTklBRIB0l5KuBw4ExsTpf8fnpEq9/fbbWYcgIhUsTWLY\n392/CrwD4O5voKE9q1Kuj6Phw4ezZs2arMMRkQqVJjG0xHGegXXjMagrzSqTHHv5/vvvp66urv2V\nRKQmpUnrnoHRAAAO9UlEQVQM1wL/C2xtZpcDfwK+X9KopMto7GUR6ag0Yz7/3MzmA4fHpz7n7otK\nG5Z0lYULF9Lc3Kz7EkQkNXP34guY7ZB7GP87gLs/X8K48mNwcKZMgYkTy7VXEZHqZma4e4e7L0hz\nH8M9xGQA9AU+DCwBdu/ozkREpPKlGcHt4+6+R/wbCgwHHi19aNIRLS0tzJ07N+swRKQb6HCXGLG7\n7f1LEIt0Uq7F0fTp01m9enXW4YhIlWv3UpKZJa/q9wD2BV5MuwMzGw1cDdQBN7v7D/LmnwKcT6jD\nWAl8xd0XpN1+LcvvCXXs2LHqDVVENliaOoZ+icerCcN93plm4/H+h6nASEIy+bOZzc5r1fQ34BB3\nXxGTyHTggDTbr2XPPvssxx9/vHpCFZEuVzQxxBP7AHfvbFug4cCz7r40bm8W8DlgXWJw90cSyz8G\nDO7kvmrKlltuyfnnn8/JJ5+sUoKIdKk26xjMrKe7rwEOss6feQYBLySml8Xn2nIGoRWUtGPzzTdn\nzJgxSgoi0uWKlRgeJ9QnNAP/Z2Z3AKviPHf3u1Jsv/hNEglmdhjwReCgwktMYt48WLkS6uvrqa+v\nT7tpEZGa0NDQQENDwwZvp80b3Mysyd33MbOZFDjBu/vp7W7c7ABgkruPjtMXAmsLVEDvCdwFjHb3\nZwtsp2ZvcGtubmbKlCnMmDGDXr3Ud6GIpNfZG9yKNVfdyswmAAuBpwr8pfEEMNTMdjSz3sBJwOy8\nwHcgJIVTCyWFWpXs4+iII46gZ0+NqSQi5VHsbFMH9N+Qjbv7ajM7G5gXt/djd19kZl+O828ELgE2\nB6bF6+Xvu3tNDwSksZdFJEvtXkoqczwF1dKlpKamJkaNGqX7EkRkg5WyryQpo7333pu//OUvbLXV\nVlmHIiI1qlgdw8iyRSHrmJmSgohkqs3E4O7LyxlILVqxYkXWIYiIfECHO9GTDZdrcbTvvvvS0tKS\ndTgiIq0oMZRZU1MTw4YNY/78+Tz00EP07t0765BERFpRYiiTXClh1KhRnHvuuRp7WUQqllollclz\nzz3HU089pfsSRKTiKTGUycc+9jHuvDNVb+UiIpnSpSQREWlFiaGLtbS0cMcdd2QdhohIpykxdKFc\ni6Of/vSnvPfee1mHIyLSKUoMXSC/xdHs2bPp06dP1mGJiHSKKp830N///neOPfZYdthhB7U4EpFu\nQYlhA2299dZcdNFFnHjiieoJVUS6BSWGDbTJJptw0kknZR2GiEiXUR2DiIi0osSQUlNTE1/4whd4\n9913sw5FRKSklBjakWxx9PnPf16tjUSk21MdQxFNTU2MHz9eLY5EpKYoMbRhyZIljBo1iiuuuIJT\nTz1VLY5EpGYoMbRh1113ZcmSJWy++eZZhyIiUlaqYyhCSUFEapESA7B8uYa3FhHJqenEkGtxtM8+\n+7Bq1aqswxERqQg1mxhyPaE2Njby6KOPsvHGG2cdkohIRai5xFCoJ1Q1QxURWa/mWiW9/PLLLF68\nWPcliIi0oeYSw5AhQ7j99tuzDkNEpGLV3KUkEREprtsmhpaWFn7yk5/g7lmHIiJSVbplYsi1OPrV\nr36lZqgiIh3UrRJDoRZHm2yySdZhiYhUlaqpfP74x2HgwLbnL1u2jKOPPlo9oYqIbCCrhmvwZubt\nxdnS0sLcuXP5/Oc/r55QRUQAM8PdO3xC7DaJQUREWutsYihpHYOZjTazxWb2jJld0MYy18T5T5rZ\nPqWMR0RE2leyxGBmdcBUYDSwG3CymX0sb5mjgF3cfShwFjCtve02NTVx5JFH8tZbb5Ug6srX0NCQ\ndQgVQ8diPR2L9XQsNlwpSwzDgWfdfam7vw/MAj6Xt8xngZ8AuPtjwGZm9qFCG0u2OBozZgz9+/cv\nYeiVSx/69XQs1tOxWE/HYsOVslXSIOCFxPQyYP8UywwGXs3f2LBhw9TiSESkDEqZGNLWFudXjBRc\nb+LEiYwdO1YtjkRESqxkrZLM7ABgkruPjtMXAmvd/QeJZW4AGtx9VpxeDBzq7q/mbetZYOeSBCoi\n0n095+67dHSlUpYYngCGmtmOwEvAScDJecvMBs4GZsVE8q/8pADQmRcmIiKdU7LE4O6rzexsYB5Q\nB/zY3ReZ2Zfj/Bvd/R4zOyqWCN4GTi9VPCIikk5V3OAmIiLlU1Gd6OmGuPXaOxZmdko8BgvM7E9m\ntmcWcZZDms9FXG6Yma02sy+UM75ySfn9qDezJjN7yswayhxi2aT4fgw0s/vMrDkei/EZhFkWZnaL\nmb1qZguLLNOx86a7V8Qf4XLTs8COQC+gGfhY3jJHAffEx/sDj2Ydd4bH4kBg0/h4dC0fi8RyvwPm\nAsdlHXdGn4nNgL8Ag+P0wKzjzvBYTAK+nzsOwHKgZ9axl+h4HAzsAyxsY36Hz5uVVGLo0hviqly7\nx8LdH3H3FXHyMcL9H91Rms8FwNeBXwH/LGdwZZTmOIwB7nT3ZQDu/nqZYyyXNMfiZWBAfDwAWO7u\nq8sYY9m4+0PAm0UW6fB5s5ISQ6Gb3QalWKY7nhDTHIukM4B7ShpRdto9FmY2iHBiyHWp0h0rztJ8\nJoYCW5jZg2b2hJmNLVt05ZXmWNwE7G5mLwFPAt8sU2yVqMPnzUoaj6FLb4ircqlfk5kdBnwROKh0\n4WQqzbG4Gvi2u7uFOyC7412QaY5DL2Bf4HBgY+ARM3vU3Z8paWTll+ZYXAQ0u3u9me0M3G9me7n7\nyhLHVqk6dN6spMTwIrB9Ynp7QmYrtszg+Fx3k+ZYECucbwJGu3uxomQ1S3MsPkG4FwbC9eQjzex9\nd59dnhDLIs1xeAF43d3fAd4xsz8AewHdLTGkORafBCYDuPtzZvZ3YFfC/VW1psPnzUq6lLTuhjgz\n6024IS7/iz0bGAfr7qwueENcN9DusTCzHYC7gFPd/dkMYiyXdo+Fu+/k7h929w8T6hm+0s2SAqT7\nfvwfMMLM6sxsY0JF49NljrMc0hyLxcBIgHg9fVfgb2WNsnJ0+LxZMSUG1w1x66Q5FsAlwObAtPhL\n+X13H55VzKWS8lh0eym/H4vN7D5gAbAWuMndu11iSPmZuByYYWZPEn4An+/ub2QWdAmZ2W3AocBA\nM3sBuJRwWbHT503d4CYiIq1U0qUkERGpAEoMIiLSihKDiIi0osQgIiKtKDGIiEgrSgwiItKKEoN0\nipmtid075/52KLLsv7tgfzPN7G9xX/PjjTod3cZNZvbR+PiivHl/2tAY43Zyx2WBmd1lZv3aWX4v\nMzuyE/vZ2szujo/rzWyGmZ1mZpcmlhlnZgtjLI1mNrET+znUzA5MTF8a9zPDzA6Nz/3SzD7c0W1L\n5VJikM5a5e77JP6eL7JsV9ws48C57r4P8G2gwze2ufuZ7r44Tl6YN6+r+prKHZc9gbeAL7ez/D6E\nbpE76mxgZnz8geMbk803gU/HWA4AVuQvl8JhhO4lirkJOKcT25YKpcQgXcLMNjGz38Zf8wvM7LMF\nltnWzP4Qf1EvNLMR8fkjzOzhuO4vzWyTtnYT/z8E7BLXnRC3tdDMvpmI5e44SMtCMzshPt9gZp8w\ns/8HbBTj+Fmc9+/4f5aZrTtRx5LKF8ysh5n90MwetzDYyVkpDssjwM5xO8Pja2y0MLDSR2J3Dt8F\nToqxnBBjv8XMHovLfuA4RscDd8fHLYST/jtArpO4C4GJ7v4KgLu3uPvNMZa9zezR+DruMrPN4vPf\nMLO/xOd/YWZDCIntnBjfCODfcT8rgPfivhroXHKTSpX1IBP6q84/YDXQFP/uJHRN0D/OGwg8k1h2\nZfw/EbgoPu4B9IvL/h7YKD5/AXBxgf3NIA7AA5xAOOnuS+j+YSNgE+ApYG/gOGB6Yt0B8f+DwL7J\nmArEeCwwMz7uDTwP9AHOAr4Tn+8D/BnYsUCcue3UxePy1TjdH6iLj0cCv4qPTwOuSax/OXBKfLwZ\nsATYOG8f29DGoCyJZZbn3o8C8xYAB8fH/w1cFR+/CPTKO2aXAhNSfB5+T4EBlPRXnX8V01eSVJ13\nPFzWAcDMegHfN7ODCf30bGdmW7v7a4l1Hgduicv+2t2fNLN6YDfg4djnU2/g4QL7M+CHZvZfwGuE\nMSg+DdzloTdRzOwuwmhW9wFTYslgrrv/sQOv6z7gR/HX/JHA7939PTM7AtjDzI6Pyw0glFqW5q2/\nkZk1EfrAXwrcEJ/fDPipme1CuPST++7ldxN+BHCMmZ0bp/sQesZcklhmCGEgmg4zs00JI/89FJ/6\nCXBHfLwA+IWZ/Rr4dXK1FJt+iTCi2qLOxCWVRZeSpKucQvj1v29MGK8BfZMLxJPRwYRfpjNt/UAy\n9/v6uord3f3MAttfV8fg7qM8dA7ntD5pWdiNP0Mc6hD4npldnPZFuPu7hEsjo4ATgdsTs89OxLmz\nu/+2wCZyCXMI8C7rRxa7DHjA3fcAjiGUctryhcR+dnT3JQWWae9k/Rdgv3aWyd/O0cB1hJLYn82s\nLsX6ye2s7cDyUsGUGKSrDABec/c1FgYPGpK/QGy59E8P17pvJpy8HwUOsjCYSq5+YGgb+8g/GT4E\nHGtmG8V6iWOBh8xsW+Bdd78VmBL3k+99M2urxHw7YfCjXOkDQk+eX82tE+sINm5jfWIp5hvAZAtF\noQGEX9XQunfLtwiXmXLmxfWI+ykU+z8Il5OK+T6hhPWhuJ3eZnaGh+Fg38zV7wBjgYYY4w7u3kCo\n3N+UcKlvZV58bdk2xiXdgBKDdFZ+S5hbgf3MbAHhZLOowLKHAc1m1kj4Nf4jD+MSjwdus9BF8sOE\nvvPb3ae7NxFa5jxOSDA3ufuTwB7AY/GSziXA9wpsazqwIFf5nLft3wCHEEoyuXGCbyaMbdBoZgsJ\nw4gWSizrtuPuzYRB608E/odwqa2RUP+QW+5BYLdc5TOhZNErVuA/RagDaL2DUKHcs0glPe5+LzAV\n+G3cznzWn+BPIySNJ4E9CRXgPYGfxfevkfDerADmAJ+P8RVsuRUvDQ729S2+pMqp222RKmRmk4BF\n7n57e8uWIZYjgKPdvZbHVe5WVGIQqU7XEX75V4IvAVdlHYR0HZUYRESkFZUYRESkFSUGERFpRYlB\nRERaUWIQEZFWlBhERKQVJQYREWnl/wOXz5j71NmaVgAAAABJRU5ErkJggg==\n",
561 | "text": [
562 | ""
563 | ]
564 | }
565 | ],
566 | "prompt_number": 24
567 | },
568 | {
569 | "cell_type": "markdown",
570 | "metadata": {},
571 | "source": [
572 | "#Examine the coefficients"
573 | ]
574 | },
575 | {
576 | "cell_type": "code",
577 | "collapsed": false,
578 | "input": [
579 | "pd.DataFrame(zip(vectorizer.get_feature_names(), np.exp(clf.coef_[0]))).sort(1)"
580 | ],
581 | "language": "python",
582 | "metadata": {},
583 | "outputs": [
584 | {
585 | "html": [
586 | "\n",
587 | "
\n",
588 | " \n",
589 | " \n",
590 | " | \n",
591 | " 0 | \n",
592 | " 1 | \n",
593 | "
\n",
594 | " \n",
595 | " \n",
596 | " \n",
597 | " 47 | \n",
598 | " yra | \n",
599 | " 0.132292 | \n",
600 | "
\n",
601 | " \n",
602 | " 36 | \n",
603 | " su | \n",
604 | " 0.137260 | \n",
605 | "
\n",
606 | " \n",
607 | " 37 | \n",
608 | " tai | \n",
609 | " 0.145047 | \n",
610 | "
\n",
611 | " \n",
612 | " 10 | \n",
613 | " europos | \n",
614 | " 0.166072 | \n",
615 | "
\n",
616 | " \n",
617 | " 7 | \n",
618 | " d\u0117l | \n",
619 | " 0.174451 | \n",
620 | "
\n",
621 | " \n",
622 | " 16 | \n",
623 | " kad | \n",
624 | " 0.214095 | \n",
625 | "
\n",
626 | " \n",
627 | " 24 | \n",
628 | " mes | \n",
629 | " 0.219679 | \n",
630 | "
\n",
631 | " \n",
632 | " 12 | \n",
633 | " i\u0161 | \n",
634 | " 0.230045 | \n",
635 | "
\n",
636 | " \n",
637 | " 5 | \n",
638 | " b\u016bti | \n",
639 | " 0.236922 | \n",
640 | "
\n",
641 | " \n",
642 | " 17 | \n",
643 | " kaip | \n",
644 | " 0.237122 | \n",
645 | "
\n",
646 | " \n",
647 | " 34 | \n",
648 | " pirmininke | \n",
649 | " 0.240614 | \n",
650 | "
\n",
651 | " \n",
652 | " 38 | \n",
653 | " taip | \n",
654 | " 0.241801 | \n",
655 | "
\n",
656 | " \n",
657 | " 4 | \n",
658 | " buvo | \n",
659 | " 0.248400 | \n",
660 | "
\n",
661 | " \n",
662 | " 35 | \n",
663 | " savo | \n",
664 | " 0.299503 | \n",
665 | "
\n",
666 | " \n",
667 | " 0 | \n",
668 | " apie | \n",
669 | " 0.334180 | \n",
670 | "
\n",
671 | " \n",
672 | " 42 | \n",
673 | " turi | \n",
674 | " 0.341146 | \n",
675 | "
\n",
676 | " \n",
677 | " 28 | \n",
678 | " m\u016bs\u0173 | \n",
679 | " 0.389030 | \n",
680 | "
\n",
681 | " \n",
682 | " 6 | \n",
683 | " b\u016bt\u0173 | \n",
684 | " 0.394295 | \n",
685 | "
\n",
686 | " \n",
687 | " 40 | \n",
688 | " tik | \n",
689 | " 0.459583 | \n",
690 | "
\n",
691 | " \n",
692 | " 20 | \n",
693 | " komisija | \n",
694 | " 0.531496 | \n",
695 | "
\n",
696 | " \n",
697 | " 33 | \n",
698 | " pat | \n",
699 | " 0.739714 | \n",
700 | "
\n",
701 | " \n",
702 | " 3 | \n",
703 | " bet | \n",
704 | " 0.855985 | \n",
705 | "
\n",
706 | " \n",
707 | " 11 | \n",
708 | " ir | \n",
709 | " 0.984499 | \n",
710 | "
\n",
711 | " \n",
712 | " 30 | \n",
713 | " ne | \n",
714 | " 1.088736 | \n",
715 | "
\n",
716 | " \n",
717 | " 41 | \n",
718 | " to | \n",
719 | " 1.269904 | \n",
720 | "
\n",
721 | " \n",
722 | " 14 | \n",
723 | " jo | \n",
724 | " 1.299818 | \n",
725 | "
\n",
726 | " \n",
727 | " 1 | \n",
728 | " ar | \n",
729 | " 1.669845 | \n",
730 | "
\n",
731 | " \n",
732 | " 18 | \n",
733 | " kas | \n",
734 | " 1.879318 | \n",
735 | "
\n",
736 | " \n",
737 | " 25 | \n",
738 | " mums | \n",
739 | " 2.019102 | \n",
740 | "
\n",
741 | " \n",
742 | " 19 | \n",
743 | " ko | \n",
744 | " 2.301956 | \n",
745 | "
\n",
746 | " \n",
747 | " 13 | \n",
748 | " ja | \n",
749 | " 2.590359 | \n",
750 | "
\n",
751 | " \n",
752 | " 9 | \n",
753 | " es | \n",
754 | " 3.274294 | \n",
755 | "
\n",
756 | " \n",
757 | " 29 | \n",
758 | " nav | \n",
759 | " 3.316575 | \n",
760 | "
\n",
761 | " \n",
762 | " 27 | \n",
763 | " m\u016bsu | \n",
764 | " 3.328093 | \n",
765 | "
\n",
766 | " \n",
767 | " 23 | \n",
768 | " lai | \n",
769 | " 3.810891 | \n",
770 | "
\n",
771 | " \n",
772 | " 39 | \n",
773 | " tas | \n",
774 | " 3.846516 | \n",
775 | "
\n",
776 | " \n",
777 | " 48 | \n",
778 | " \u013coti | \n",
779 | " 3.864085 | \n",
780 | "
\n",
781 | " \n",
782 | " 46 | \n",
783 | " vai | \n",
784 | " 4.200415 | \n",
785 | "
\n",
786 | " \n",
787 | " 2 | \n",
788 | " ar\u012b | \n",
789 | " 4.425187 | \n",
790 | "
\n",
791 | " \n",
792 | " 49 | \n",
793 | " \u0161o | \n",
794 | " 4.436995 | \n",
795 | "
\n",
796 | " \n",
797 | " 43 | \n",
798 | " t\u0101 | \n",
799 | " 5.567311 | \n",
800 | "
\n",
801 | " \n",
802 | " 22 | \n",
803 | " k\u0101 | \n",
804 | " 5.645506 | \n",
805 | "
\n",
806 | " \n",
807 | " 31 | \n",
808 | " no | \n",
809 | " 6.474617 | \n",
810 | "
\n",
811 | " \n",
812 | " 21 | \n",
813 | " kungs | \n",
814 | " 6.480887 | \n",
815 | "
\n",
816 | " \n",
817 | " 45 | \n",
818 | " uz | \n",
819 | " 7.243644 | \n",
820 | "
\n",
821 | " \n",
822 | " 8 | \n",
823 | " eiropas | \n",
824 | " 9.303418 | \n",
825 | "
\n",
826 | " \n",
827 | " 26 | \n",
828 | " m\u0113s | \n",
829 | " 9.333379 | \n",
830 | "
\n",
831 | " \n",
832 | " 15 | \n",
833 | " ka | \n",
834 | " 11.846539 | \n",
835 | "
\n",
836 | " \n",
837 | " 32 | \n",
838 | " par | \n",
839 | " 16.106159 | \n",
840 | "
\n",
841 | " \n",
842 | " 44 | \n",
843 | " un | \n",
844 | " 20.055701 | \n",
845 | "
\n",
846 | " \n",
847 | "
\n",
848 | "
"
849 | ],
850 | "metadata": {},
851 | "output_type": "pyout",
852 | "prompt_number": 26,
853 | "text": [
854 | " 0 1\n",
855 | "47 yra 0.132292\n",
856 | "36 su 0.137260\n",
857 | "37 tai 0.145047\n",
858 | "10 europos 0.166072\n",
859 | "7 d\u0117l 0.174451\n",
860 | "16 kad 0.214095\n",
861 | "24 mes 0.219679\n",
862 | "12 i\u0161 0.230045\n",
863 | "5 b\u016bti 0.236922\n",
864 | "17 kaip 0.237122\n",
865 | "34 pirmininke 0.240614\n",
866 | "38 taip 0.241801\n",
867 | "4 buvo 0.248400\n",
868 | "35 savo 0.299503\n",
869 | "0 apie 0.334180\n",
870 | "42 turi 0.341146\n",
871 | "28 m\u016bs\u0173 0.389030\n",
872 | "6 b\u016bt\u0173 0.394295\n",
873 | "40 tik 0.459583\n",
874 | "20 komisija 0.531496\n",
875 | "33 pat 0.739714\n",
876 | "3 bet 0.855985\n",
877 | "11 ir 0.984499\n",
878 | "30 ne 1.088736\n",
879 | "41 to 1.269904\n",
880 | "14 jo 1.299818\n",
881 | "1 ar 1.669845\n",
882 | "18 kas 1.879318\n",
883 | "25 mums 2.019102\n",
884 | "19 ko 2.301956\n",
885 | "13 ja 2.590359\n",
886 | "9 es 3.274294\n",
887 | "29 nav 3.316575\n",
888 | "27 m\u016bsu 3.328093\n",
889 | "23 lai 3.810891\n",
890 | "39 tas 3.846516\n",
891 | "48 \u013coti 3.864085\n",
892 | "46 vai 4.200415\n",
893 | "2 ar\u012b 4.425187\n",
894 | "49 \u0161o 4.436995\n",
895 | "43 t\u0101 5.567311\n",
896 | "22 k\u0101 5.645506\n",
897 | "31 no 6.474617\n",
898 | "21 kungs 6.480887\n",
899 | "45 uz 7.243644\n",
900 | "8 eiropas 9.303418\n",
901 | "26 m\u0113s 9.333379\n",
902 | "15 ka 11.846539\n",
903 | "32 par 16.106159\n",
904 | "44 un 20.055701"
905 | ]
906 | }
907 | ],
908 | "prompt_number": 26
909 | },
910 | {
911 | "cell_type": "markdown",
912 | "metadata": {},
913 | "source": [
914 | "#Exercise\n",
915 | "\n",
916 | "##Create a classifier for *all* 21 languages\n",
917 | "i.e. Given a sentence, output its most probable language\n",
918 | "\n",
919 | "**hint**: Create 21 classifiers which classify *langauge x* vs. *all other languages* and choose langauge with highest probability"
920 | ]
921 | }
922 | ],
923 | "metadata": {}
924 | }
925 | ]
926 | }
--------------------------------------------------------------------------------
/CI_Workshop_Teplitskiy_Classification.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # 3.0
3 |
4 | #
5 |
6 | # #Classification: Building a Language Detector
7 | #
8 | #
9 | # - inspired by http://bugra.github.io/work/notes/2014-12-26/language-detector-via-scikit-learn/
10 |
11 | #
12 |
13 | # #Overfitting
14 | #
15 | # ### = Big difference between social science stats and machine learning
16 | #
17 | #
18 | #
19 | # ###Solution: Split data into training part and testing part
20 | #
21 | # - "testing" set also called "validation set," "held-out set"
22 | #
23 | # ###Result: 2 sets of accuracies, 2 sets of errors
24 | # - One for training set <--- no one cares about
25 | # - One for test set <--- everyone cares about, also called "generalization error"
26 | #
27 | #
28 |
29 | #
30 |
31 | %matplotlib inline
32 |
33 | #
34 |
35 | import matplotlib.pyplot as plt
36 | import pandas as pd
37 | import numpy as np
38 |
39 | #
40 |
41 | # ##Data Description
42 |
43 | #
44 |
45 | # European Parliament Proceedings corpus
46 | # - https://language-detection.googlecode.com/git-history/packages/packages/europarl-test.zip
47 | # - 21 languages, 1000 sentences each
48 |
49 | #
50 |
51 | # ##Import data and put it in pandas dataframe
52 |
53 | #
54 |
55 | import codecs
56 | lines = codecs.open('europarl.txt', 'r', 'utf-8').readlines()
57 | lines = [l.split('\t') for l in lines]
58 |
59 | #
60 |
61 | df = pd.DataFrame(lines, columns=['language', 'text'])
62 | df.head()
63 |
64 | #
65 |
66 | # how many of each language
67 | df.language.value_counts()
68 |
69 | #
70 |
71 | # let's consider just two: english (en) and french (fr)
72 | df[df.language=='en'].head()
73 |
74 | #
75 |
76 | df_sub = df[df.language.isin(('lt', 'lv'))]
77 |
78 | #
79 |
80 | # ##Build classifier
81 |
82 | #
83 |
84 | from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer
85 | from sklearn.linear_model import LogisticRegression
86 | from sklearn.cross_validation import train_test_split
87 |
88 | #
89 |
90 | # ###1. Engineer features we will use to predict
91 |
92 | #
93 |
94 | # vectorizer = TfidfVectorizer(ngram_range=(1,3),
95 | # # analyzer='char',
96 | # max_features=50)
97 | # # use_idf=False)
98 |
99 | vectorizer = CountVectorizer(max_features=50,
100 | analyzer='char',
101 | )
102 |
103 |
104 | X_features = vectorizer.fit_transform(df_sub.text) # fit_transform() is like calling fit() and then predict()
105 | print X_features.shape, type(X_features)
106 |
107 | #
108 |
109 | # ###2. Split into train and test sets
110 |
111 | #
112 |
113 | y = df_sub.language.values
114 | X_train, X_test, y_train, y_test = train_test_split(X_features, y, test_size=0.2)
115 | #setting random_state=0 to make #sure we all get the same answer
116 |
117 | #
118 |
119 | #composition of train and test sets
120 | print 'Composition of train set:', np.unique(y_train, return_counts=True)
121 | print 'Composition of test set:', np.unique(y_test, return_counts=True)
122 |
123 | #
124 |
125 | # ###3. Train model
126 |
127 | #
128 |
129 | clf = LogisticRegression()
130 | clf.fit(X_train, y_train)
131 |
132 | #
133 |
134 | # ###4. Evaluate model
135 | #
136 | # *Test it on the held-out test set*
137 | #
138 | # * **accuracy**: percent correct
139 | #
140 | #
141 | # * When especially interested in a particular class, say "positive,"
142 | # - **precision**: of the things you called "positive," what percent were correct?
143 | # - **recall**: of all positive cases, what percent did you find?
144 |
145 | #
146 |
147 | y_predicted = clf.predict(X_test)
148 |
149 | #
150 |
151 | from sklearn import metrics
152 | print 'Accuracy:', metrics.accuracy_score(y_test, y_predicted)
153 | print
154 | print metrics.classification_report(y_test, y_predicted)
155 | print
156 | print 'confusion matrix'
157 | print
158 | print pd.DataFrame(metrics.confusion_matrix(y_test, y_predicted))
159 |
160 | #
161 |
162 | # ###Out of curiousity, how well did we do on the training set?
163 |
164 | #
165 |
166 | print 'Accuracy:', metrics.accuracy_score(y_train, clf.predict(X_train))
167 |
168 | #
169 |
170 | # ##ROC curve
171 | #
172 | # x-axis: What percent of negative things did you falsely call positive?
173 | #
174 | # y-axis: Of the positive examples, what percent did you find?
175 |
176 | #
177 |
178 | from sklearn.metrics import roc_curve, roc_auc_score
179 |
180 | y_label_test = np.asarray(y_test == 'lv', dtype=int)
181 | proba = clf.predict_proba(X_test)
182 | proba_label = proba[:,1]
183 | fpr, tpr, roc_thresholds = roc_curve(y_label_test, proba_label)
184 |
185 | plt.plot(fpr, tpr, '-', linewidth=5)
186 | plt.plot([0, 1], [0, 1], 'k--')
187 | plt.xlabel('False Positive Rate ("Cost")')
188 | plt.ylabel('True Positive Rate ("Benefit")')
189 | plt.title('Receiver operating characteristic example')
190 | plt.legend(loc="lower right")
191 |
192 |
193 | #
194 |
195 | # #Examine the coefficients
196 |
197 | #
198 |
199 | pd.DataFrame(zip(vectorizer.get_feature_names(), np.exp(clf.coef_[0]))).sort(1)
200 |
201 | #
202 |
203 | # #Exercise
204 | #
205 | # ##Create a classifier for *all* 21 languages
206 | # i.e. Given a sentence, output its most probable language
207 | #
208 | # **hint**: Create 21 classifiers which classify *langauge x* vs. *all other languages* and choose langauge with highest probability
209 |
210 |
--------------------------------------------------------------------------------
/CI_Workshop_Teplitskiy_Introduction.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # 3.0
3 |
4 | #
5 |
6 | # #Statistical Learning with Python
7 | #
8 | # Agenda:
9 | #
10 | # **Intro and Plugs**
11 | # - Sociology / computational social science
12 | # - www.knowledgelab.org
13 | # - www.dssg.io
14 | #
15 | # **If you couldn't get your environment set up!**
16 | # - Use: https://wakari.io/
17 | #
18 | # **IPython Notebook** as your IDE
19 | # - Advantages/disadvantages
20 | # - notebook: markdown, code, inline images
21 | # - server
22 | # - "--script"
23 | # - Sharing is caring: http://nbviewer.ipython.org/
24 | # - Keyboard shortcuts
25 |
26 | #
27 |
28 | #
29 |
30 | #
31 |
32 | # **Pandas**
33 |
34 | #
35 |
36 | # - Creating Series and DataFrames
37 | # - Setting column names, index, datatypes
38 | # - Indexing
39 | # - By index, by label
40 | # - Subsetting
41 | # - Missing values
42 |
43 | #
44 |
45 | #
46 |
47 | #
48 |
49 | # **Matplotlib**
50 | # - scatter, plot, hist
51 | # - useful plot customazation
52 | # - plots inside of pandas
53 | #
54 | # **Regression Example**
55 | #
56 | # **Classification Example**
57 | #
58 | # * 2 examples, 2 research communities
59 | # * Understanding vs. predicting
60 |
61 | #
62 |
63 | #
64 |
65 | #
66 |
67 | %matplotlib inline
68 |
69 | #
70 |
71 | # #Pandas
72 |
73 | #
74 |
75 | import pandas as pd
76 |
77 | #
78 |
79 | # Provides a crucial 2-d data structure: the ``pandas.DataFrame``
80 | # - pandas.Series is 1-d analogue
81 | # - Like the ``R`` data frames
82 | #
83 | # ``numpy`` does too, BUT ``pandas``
84 | #
85 | # 1. can hold *heterogenous data*; each column can have its own data type,
86 | # 2. the axes of a DataFrame are *labeled* with column names and row indices,
87 | #
88 | # Perfect for data-wrangling: can take subsets, apply functions, join with other DataFrames, etc.
89 |
90 | #
91 |
92 | # Load car dataset
93 | df = pd.read_csv('http://www-bcf.usc.edu/~gareth/ISL/Auto.csv')
94 | df.head() # print the first lines
95 |
96 | #
97 |
98 | print 'Shape of DataFrame:', df.shape
99 | print '\nColumns:', df.columns
100 | print '\nIndex:', df.index[:10]
101 |
102 | #
103 |
104 | # ###Get the ``df`` nice and cozy
105 |
106 | #
107 |
108 | df.index = df.name
109 | del df['name']
110 | df.head()
111 |
112 | #
113 |
114 | # ###Accessing, adding data
115 | # You can use the dot ``.`` or bracket ``[]`` notation to access columns of the dataset. To add new columns you have to use the bracket ``[]`` notation
116 |
117 | #
118 |
119 | mpg = df.mpg # get mpg column using '.'
120 | weight = df['weight'] # get weight column using brackets
121 | df['mpg_per_weight'] = mpg / weight # note the element-wise division
122 |
123 | print df[['mpg', 'weight', 'mpg_per_weight']].head() # get a bunch of columns at the same time
124 |
125 | #
126 |
127 | # ##Looking at data
128 |
129 | #
130 |
131 | # ###Pandas indexing is really smart!
132 |
133 | #
134 |
135 | # To look at all the Fords, create array of length = #rows of True and False, where True if 'ford' in string
136 | arr_for_indexing = ['ford' in name for name in df.index]
137 | df[arr_for_indexing].head()
138 |
139 | #
140 |
141 | # ###But it can get confused: Indexing by "label" and by "location", ``.loc`` vs ``.iloc`` vs ``.ix``
142 |
143 | #
144 |
145 | # ``.loc`` -- by label
146 | #
147 | # ``.iloc`` -- by location
148 | #
149 | # ``.ix`` -- by a mix
150 |
151 | #
152 |
153 | df.ix[0:5, ['weight', 'mpg']] # select the first 5 rows and two columns weight and mpg
154 |
155 | #
156 |
157 | # useful function!: value_counts()
158 | df.year.value_counts()
159 |
160 | #
161 |
162 | # ###Let's change year from "70" to "1970"
163 |
164 | #
165 |
166 | df.year.apply(lambda x: '19' + str(x)) # this spits out the Series we like
167 | df.year = df.year.apply(lambda x: '19' + str(x))
168 |
169 | #
170 |
171 | #Uh oh, let's change it back!
172 | df.year.str[-2:]
173 |
174 | #
175 |
176 | # #Visualizing data
177 |
178 | #
179 |
180 | # Most popular library: ``matplotlib``
181 | #
182 | # Others:
183 | # - ``seaborn``
184 | # - ``ggplot``
185 | # - ``prettyplotlib``
186 | # - ``bokeh``
187 |
188 | #
189 |
190 | # ### common matplotlib plots
191 | # - plt.hist <-- histograms
192 | # - plt.scatter <-- scatter plot
193 | # - plt.plot <-- most others
194 |
195 | #
196 |
197 | import matplotlib.pyplot as plt
198 | plt.hist(df.weight)
199 |
200 | #
201 |
202 | # ### common plot features to tweak
203 | # - plt.title('Sk00l Rox', fontsize=20)
204 | # - plt.xlabel('')
205 | # - plt.ylabel('')
206 | # - plt.xlim(min, max)
207 | # - plt.legend()
208 |
209 | #
210 |
211 | # ###We can also used pandas' ``plot`` and other plotting function!!!
212 |
213 | #
214 |
215 | df.weight.hist()
216 | plt.title('OMG THERES A TITLE!!!11', fontsize=20)
217 |
218 | # let's add decoration
219 | plt.xlabel('weight')
220 | plt.ylabel('frequency')
221 | plt.xlim(0, df.weight.max())
222 | plt.legend()
223 |
224 | #
225 |
226 | plt.scatter(df.year.astype(int), df.weight)
227 |
228 | #
229 |
230 | df.boxplot('weight')
231 | # df.boxplot('weight', 'year')
232 |
233 | #
234 |
235 | from pandas.tools.plotting import scatter_matrix
236 | _ = scatter_matrix(df[['mpg', 'cylinders', 'displacement']], figsize=(14, 10))
237 |
238 | #
239 |
240 | # #Regression next. But first...
241 |
242 | #
243 |
244 | plt.xkcd()
245 |
246 | #
247 |
248 | #
249 |
250 | #
251 |
252 | df.weight.hist()
253 | plt.title('WOT, THERES AN XKCD STYLE???', fontsize=18)
254 | plt.xlabel('weight')
255 | plt.ylabel('freq.')
256 |
257 |
--------------------------------------------------------------------------------
/CI_Workshop_Teplitskiy_Regression.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # 3.0
3 |
4 | #
5 |
6 | # #Regression Example
7 | #
8 | # - Regression = continuous outcome variable (e.g. "price")
9 | # - Classification = discrete outcome variable (e.g. "class")
10 | #
11 | # ###Goal: Find "best" line
12 | #
13 | # Example with only one predictor, "Year"
14 | #
15 | #
16 | #
17 | # ###Solution:
18 | # - Minimize squared distances ("errors")
19 | #
20 | # ###Evaluation
21 | # - model fit: R^2
22 | #
23 | #
24 | # ###Coefficients
25 | #
26 | # size? statistical significance?
27 | # - Usually calculate: **If** the actual coefficient was 0, how often would we see a coefficient *estimate* this large or larger?
28 | # - *"How consistent is the evidence with the null hypothesis?"*
29 | # - Not exactly what we want
30 | # - In practice, good enough if p < 0.05
31 |
32 | #
33 |
34 | %matplotlib inline
35 | import matplotlib.pyplot as plt
36 | import numpy as np
37 | import pandas as pd
38 | import statsmodels.formula.api as smf
39 |
40 | #
41 |
42 | # ##Data: Boston Housing Prices
43 |
44 | #
45 |
46 | #
47 |
48 | #
49 |
50 | # ###How much do people care about pollution?
51 |
52 | #
53 |
54 | from sklearn.datasets import load_boston
55 | boston = load_boston()
56 |
57 | #
58 |
59 | print boston.DESCR
60 |
61 | #
62 |
63 | # ###Set up our df
64 |
65 | #
66 |
67 | column_names = [
68 | 'CRIM', #per capita crime rate by town
69 | 'ZN', #proportion of residential land zoned for lots over 25,000 sq.ft.
70 | 'INDUS', #proportion of non-retail business acres per town
71 | 'CHAS', #Charles River dummy variable (= 1 if tract bounds river; 0 otherwise)
72 | 'NOX', #nitric oxides concentration (parts per 10 million)
73 | 'RM', #average number of rooms per dwelling
74 | 'AGE', #proportion of owner-occupied units built prior to 1940
75 | 'DIS', #weighted distances to five Boston employment centres
76 | 'RAD', #index of accessibility to radial highways
77 | 'TAX', #full-value property-tax rate per $10,000
78 | 'PTRATIO', #pupil-teacher ratio by town
79 | 'B', # 1000(Bk - 0.63)^2 where Bk is the proportion of blacks by town
80 | 'LSTAT'] # % lower status of the population
81 | # 'MEDV'] # Median value of owner-occupied homes in $1000's
82 |
83 | df = pd.DataFrame(boston.data, columns=column_names)
84 | df['MEDV'] = boston.target
85 |
86 | #
87 |
88 | df.columns
89 |
90 | #
91 |
92 | # ### Let's look at value and crime
93 |
94 | #
95 |
96 | plt.scatter(df.CRIM, df.MEDV)
97 |
98 | #
99 |
100 | results = smf.ols(formula='standardize(MEDV) ~ C(CRIM)', data=df).fit()
101 | results.summary()
102 |
103 | #
104 |
105 | results.params
106 |
107 | #
108 |
109 | plt.scatter(df.CRIM, df.MEDV)
110 | xs = np.linspace(0, df.CRIM.max(), 100)
111 | plt.plot(xs, xs*results.params[0], 'r--')
112 |
113 | #
114 |
115 | #
116 |
117 | #
118 |
119 | # ###Let's add an intercept
120 |
121 | #
122 |
123 | results = smf.ols(formula='MEDV ~ CRIM', data=df).fit()
124 | results.summary()
125 |
126 | #
127 |
128 | plt.scatter(df.CRIM, df.MEDV)
129 | xs = np.linspace(0, df.CRIM.max(), 100)
130 | plt.plot(xs, results.params[0] + xs*results.params[1], 'r--')
131 |
132 | #
133 |
134 | # ##Handling categorical variables, standardizing
135 |
136 | #
137 |
138 | # ###by using the R-style formula
139 | # - C(var_name)
140 | # - standardize(var_name)
141 |
142 | #
143 |
144 | # #Exercise:
145 | # Predict house prices as well as you can!
146 |
147 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Statistical_Learning_With_Python
2 | Introduction to Statistical Learning with Python -- a workshop I gave at the Computation Institute (U of Chicago) on 2015-01-21
3 |
4 | ###Note/warning:
5 | **You will need the latest version of IPython Notebook to open these! Make sure to update your IPython if the files don't open. (Or just use the .py files instead)**
6 |
7 | ##Relevant files
8 | There are 3 IPython Notebooks that were used at the workshop:
9 | - Introduction
10 | - Regression (uses the boston housing data)
11 | - Classificaiton (uses the europarl data)
12 |
13 | There is a .py version of each Notebook included too.
14 |
15 | Enjoy!
16 |
--------------------------------------------------------------------------------
/boston_housing_data.csv:
--------------------------------------------------------------------------------
1 | ,CRIM,ZN,INDUS,CHAS,NOX,RM,AGE,DIS,RAD,TAX,PTRATIO,B,LSTAT,MEDV
2 | 0,0.00632,18.0,2.31,0.0,0.538,6.575,65.2,4.09,1.0,296.0,15.3,396.9,4.98,24.0
3 | 1,0.02731,0.0,7.07,0.0,0.469,6.421,78.9,4.9671,2.0,242.0,17.8,396.9,9.14,21.6
4 | 2,0.02729,0.0,7.07,0.0,0.469,7.185,61.1,4.9671,2.0,242.0,17.8,392.83,4.03,34.7
5 | 3,0.03237,0.0,2.18,0.0,0.458,6.998,45.8,6.0622,3.0,222.0,18.7,394.63,2.94,33.4
6 | 4,0.06905,0.0,2.18,0.0,0.458,7.147,54.2,6.0622,3.0,222.0,18.7,396.9,5.33,36.2
7 | 5,0.02985,0.0,2.18,0.0,0.458,6.43,58.7,6.0622,3.0,222.0,18.7,394.12,5.21,28.7
8 | 6,0.08829,12.5,7.87,0.0,0.524,6.012,66.6,5.5605,5.0,311.0,15.2,395.6,12.43,22.9
9 | 7,0.14455,12.5,7.87,0.0,0.524,6.172,96.1,5.9505,5.0,311.0,15.2,396.9,19.15,27.1
10 | 8,0.21124,12.5,7.87,0.0,0.524,5.631,100.0,6.0821,5.0,311.0,15.2,386.63,29.93,16.5
11 | 9,0.17004,12.5,7.87,0.0,0.524,6.004,85.9,6.5921,5.0,311.0,15.2,386.71,17.1,18.9
12 | 10,0.22489,12.5,7.87,0.0,0.524,6.377,94.3,6.3467,5.0,311.0,15.2,392.52,20.45,15.0
13 | 11,0.11747,12.5,7.87,0.0,0.524,6.009,82.9,6.2267,5.0,311.0,15.2,396.9,13.27,18.9
14 | 12,0.09378,12.5,7.87,0.0,0.524,5.889,39.0,5.4509,5.0,311.0,15.2,390.5,15.71,21.7
15 | 13,0.62976,0.0,8.14,0.0,0.538,5.949,61.8,4.7075,4.0,307.0,21.0,396.9,8.26,20.4
16 | 14,0.63796,0.0,8.14,0.0,0.538,6.096,84.5,4.4619,4.0,307.0,21.0,380.02,10.26,18.2
17 | 15,0.62739,0.0,8.14,0.0,0.538,5.834,56.5,4.4986,4.0,307.0,21.0,395.62,8.47,19.9
18 | 16,1.05393,0.0,8.14,0.0,0.538,5.935,29.3,4.4986,4.0,307.0,21.0,386.85,6.58,23.1
19 | 17,0.7842,0.0,8.14,0.0,0.538,5.99,81.7,4.2579,4.0,307.0,21.0,386.75,14.67,17.5
20 | 18,0.80271,0.0,8.14,0.0,0.538,5.456,36.6,3.7965,4.0,307.0,21.0,288.99,11.69,20.2
21 | 19,0.7258,0.0,8.14,0.0,0.538,5.727,69.5,3.7965,4.0,307.0,21.0,390.95,11.28,18.2
22 | 20,1.25179,0.0,8.14,0.0,0.538,5.57,98.1,3.7979,4.0,307.0,21.0,376.57,21.02,13.6
23 | 21,0.85204,0.0,8.14,0.0,0.538,5.965,89.2,4.0123,4.0,307.0,21.0,392.53,13.83,19.6
24 | 22,1.23247,0.0,8.14,0.0,0.538,6.142,91.7,3.9769,4.0,307.0,21.0,396.9,18.72,15.2
25 | 23,0.98843,0.0,8.14,0.0,0.538,5.813,100.0,4.0952,4.0,307.0,21.0,394.54,19.88,14.5
26 | 24,0.75026,0.0,8.14,0.0,0.538,5.924,94.1,4.3996,4.0,307.0,21.0,394.33,16.3,15.6
27 | 25,0.84054,0.0,8.14,0.0,0.538,5.599,85.7,4.4546,4.0,307.0,21.0,303.42,16.51,13.9
28 | 26,0.67191,0.0,8.14,0.0,0.538,5.813,90.3,4.682,4.0,307.0,21.0,376.88,14.81,16.6
29 | 27,0.95577,0.0,8.14,0.0,0.538,6.047,88.8,4.4534,4.0,307.0,21.0,306.38,17.28,14.8
30 | 28,0.77299,0.0,8.14,0.0,0.538,6.495,94.4,4.4547,4.0,307.0,21.0,387.94,12.8,18.4
31 | 29,1.00245,0.0,8.14,0.0,0.538,6.674,87.3,4.239,4.0,307.0,21.0,380.23,11.98,21.0
32 | 30,1.13081,0.0,8.14,0.0,0.538,5.713,94.1,4.233,4.0,307.0,21.0,360.17,22.6,12.7
33 | 31,1.35472,0.0,8.14,0.0,0.538,6.072,100.0,4.175,4.0,307.0,21.0,376.73,13.04,14.5
34 | 32,1.38799,0.0,8.14,0.0,0.538,5.95,82.0,3.99,4.0,307.0,21.0,232.6,27.71,13.2
35 | 33,1.15172,0.0,8.14,0.0,0.538,5.701,95.0,3.7872,4.0,307.0,21.0,358.77,18.35,13.1
36 | 34,1.61282,0.0,8.14,0.0,0.538,6.096,96.9,3.7598,4.0,307.0,21.0,248.31,20.34,13.5
37 | 35,0.06417,0.0,5.96,0.0,0.499,5.933,68.2,3.3603,5.0,279.0,19.2,396.9,9.68,18.9
38 | 36,0.09744,0.0,5.96,0.0,0.499,5.841,61.4,3.3779,5.0,279.0,19.2,377.56,11.41,20.0
39 | 37,0.08014,0.0,5.96,0.0,0.499,5.85,41.5,3.9342,5.0,279.0,19.2,396.9,8.77,21.0
40 | 38,0.17505,0.0,5.96,0.0,0.499,5.966,30.2,3.8473,5.0,279.0,19.2,393.43,10.13,24.7
41 | 39,0.02763,75.0,2.95,0.0,0.428,6.595,21.8,5.4011,3.0,252.0,18.3,395.63,4.32,30.8
42 | 40,0.03359,75.0,2.95,0.0,0.428,7.024,15.8,5.4011,3.0,252.0,18.3,395.62,1.98,34.9
43 | 41,0.12744,0.0,6.91,0.0,0.448,6.77,2.9,5.7209,3.0,233.0,17.9,385.41,4.84,26.6
44 | 42,0.1415,0.0,6.91,0.0,0.448,6.169,6.6,5.7209,3.0,233.0,17.9,383.37,5.81,25.3
45 | 43,0.15936,0.0,6.91,0.0,0.448,6.211,6.5,5.7209,3.0,233.0,17.9,394.46,7.44,24.7
46 | 44,0.12269,0.0,6.91,0.0,0.448,6.069,40.0,5.7209,3.0,233.0,17.9,389.39,9.55,21.2
47 | 45,0.17142,0.0,6.91,0.0,0.448,5.682,33.8,5.1004,3.0,233.0,17.9,396.9,10.21,19.3
48 | 46,0.18836,0.0,6.91,0.0,0.448,5.786,33.3,5.1004,3.0,233.0,17.9,396.9,14.15,20.0
49 | 47,0.22927,0.0,6.91,0.0,0.448,6.03,85.5,5.6894,3.0,233.0,17.9,392.74,18.8,16.6
50 | 48,0.25387,0.0,6.91,0.0,0.448,5.399,95.3,5.87,3.0,233.0,17.9,396.9,30.81,14.4
51 | 49,0.21977,0.0,6.91,0.0,0.448,5.602,62.0,6.0877,3.0,233.0,17.9,396.9,16.2,19.4
52 | 50,0.08873,21.0,5.64,0.0,0.439,5.963,45.7,6.8147,4.0,243.0,16.8,395.56,13.45,19.7
53 | 51,0.04337,21.0,5.64,0.0,0.439,6.115,63.0,6.8147,4.0,243.0,16.8,393.97,9.43,20.5
54 | 52,0.0536,21.0,5.64,0.0,0.439,6.511,21.1,6.8147,4.0,243.0,16.8,396.9,5.28,25.0
55 | 53,0.04981,21.0,5.64,0.0,0.439,5.998,21.4,6.8147,4.0,243.0,16.8,396.9,8.43,23.4
56 | 54,0.0136,75.0,4.0,0.0,0.41,5.888,47.6,7.3197,3.0,469.0,21.1,396.9,14.8,18.9
57 | 55,0.01311,90.0,1.22,0.0,0.403,7.249,21.9,8.6966,5.0,226.0,17.9,395.93,4.81,35.4
58 | 56,0.02055,85.0,0.74,0.0,0.41,6.383,35.7,9.1876,2.0,313.0,17.3,396.9,5.77,24.7
59 | 57,0.01432,100.0,1.32,0.0,0.411,6.816,40.5,8.3248,5.0,256.0,15.1,392.9,3.95,31.6
60 | 58,0.15445,25.0,5.13,0.0,0.453,6.145,29.2,7.8148,8.0,284.0,19.7,390.68,6.86,23.3
61 | 59,0.10328,25.0,5.13,0.0,0.453,5.927,47.2,6.932,8.0,284.0,19.7,396.9,9.22,19.6
62 | 60,0.14932,25.0,5.13,0.0,0.453,5.741,66.2,7.2254,8.0,284.0,19.7,395.11,13.15,18.7
63 | 61,0.17171,25.0,5.13,0.0,0.453,5.966,93.4,6.8185,8.0,284.0,19.7,378.08,14.44,16.0
64 | 62,0.11027,25.0,5.13,0.0,0.453,6.456,67.8,7.2255,8.0,284.0,19.7,396.9,6.73,22.2
65 | 63,0.1265,25.0,5.13,0.0,0.453,6.762,43.4,7.9809,8.0,284.0,19.7,395.58,9.5,25.0
66 | 64,0.01951,17.5,1.38,0.0,0.4161,7.104,59.5,9.2229,3.0,216.0,18.6,393.24,8.05,33.0
67 | 65,0.03584,80.0,3.37,0.0,0.398,6.29,17.8,6.6115,4.0,337.0,16.1,396.9,4.67,23.5
68 | 66,0.04379,80.0,3.37,0.0,0.398,5.787,31.1,6.6115,4.0,337.0,16.1,396.9,10.24,19.4
69 | 67,0.05789,12.5,6.07,0.0,0.409,5.878,21.4,6.498,4.0,345.0,18.9,396.21,8.1,22.0
70 | 68,0.13554,12.5,6.07,0.0,0.409,5.594,36.8,6.498,4.0,345.0,18.9,396.9,13.09,17.4
71 | 69,0.12816,12.5,6.07,0.0,0.409,5.885,33.0,6.498,4.0,345.0,18.9,396.9,8.79,20.9
72 | 70,0.08826,0.0,10.81,0.0,0.413,6.417,6.6,5.2873,4.0,305.0,19.2,383.73,6.72,24.2
73 | 71,0.15876,0.0,10.81,0.0,0.413,5.961,17.5,5.2873,4.0,305.0,19.2,376.94,9.88,21.7
74 | 72,0.09164,0.0,10.81,0.0,0.413,6.065,7.8,5.2873,4.0,305.0,19.2,390.91,5.52,22.8
75 | 73,0.19539,0.0,10.81,0.0,0.413,6.245,6.2,5.2873,4.0,305.0,19.2,377.17,7.54,23.4
76 | 74,0.07896,0.0,12.83,0.0,0.437,6.273,6.0,4.2515,5.0,398.0,18.7,394.92,6.78,24.1
77 | 75,0.09512,0.0,12.83,0.0,0.437,6.286,45.0,4.5026,5.0,398.0,18.7,383.23,8.94,21.4
78 | 76,0.10153,0.0,12.83,0.0,0.437,6.279,74.5,4.0522,5.0,398.0,18.7,373.66,11.97,20.0
79 | 77,0.08707,0.0,12.83,0.0,0.437,6.14,45.8,4.0905,5.0,398.0,18.7,386.96,10.27,20.8
80 | 78,0.05646,0.0,12.83,0.0,0.437,6.232,53.7,5.0141,5.0,398.0,18.7,386.4,12.34,21.2
81 | 79,0.08387,0.0,12.83,0.0,0.437,5.874,36.6,4.5026,5.0,398.0,18.7,396.06,9.1,20.3
82 | 80,0.04113,25.0,4.86,0.0,0.426,6.727,33.5,5.4007,4.0,281.0,19.0,396.9,5.29,28.0
83 | 81,0.04462,25.0,4.86,0.0,0.426,6.619,70.4,5.4007,4.0,281.0,19.0,395.63,7.22,23.9
84 | 82,0.03659,25.0,4.86,0.0,0.426,6.302,32.2,5.4007,4.0,281.0,19.0,396.9,6.72,24.8
85 | 83,0.03551,25.0,4.86,0.0,0.426,6.167,46.7,5.4007,4.0,281.0,19.0,390.64,7.51,22.9
86 | 84,0.05059,0.0,4.49,0.0,0.449,6.389,48.0,4.7794,3.0,247.0,18.5,396.9,9.62,23.9
87 | 85,0.05735,0.0,4.49,0.0,0.449,6.63,56.1,4.4377,3.0,247.0,18.5,392.3,6.53,26.6
88 | 86,0.05188,0.0,4.49,0.0,0.449,6.015,45.1,4.4272,3.0,247.0,18.5,395.99,12.86,22.5
89 | 87,0.07151,0.0,4.49,0.0,0.449,6.121,56.8,3.7476,3.0,247.0,18.5,395.15,8.44,22.2
90 | 88,0.0566,0.0,3.41,0.0,0.489,7.007,86.3,3.4217,2.0,270.0,17.8,396.9,5.5,23.6
91 | 89,0.05302,0.0,3.41,0.0,0.489,7.079,63.1,3.4145,2.0,270.0,17.8,396.06,5.7,28.7
92 | 90,0.04684,0.0,3.41,0.0,0.489,6.417,66.1,3.0923,2.0,270.0,17.8,392.18,8.81,22.6
93 | 91,0.03932,0.0,3.41,0.0,0.489,6.405,73.9,3.0921,2.0,270.0,17.8,393.55,8.2,22.0
94 | 92,0.04203,28.0,15.04,0.0,0.464,6.442,53.6,3.6659,4.0,270.0,18.2,395.01,8.16,22.9
95 | 93,0.02875,28.0,15.04,0.0,0.464,6.211,28.9,3.6659,4.0,270.0,18.2,396.33,6.21,25.0
96 | 94,0.04294,28.0,15.04,0.0,0.464,6.249,77.3,3.615,4.0,270.0,18.2,396.9,10.59,20.6
97 | 95,0.12204,0.0,2.89,0.0,0.445,6.625,57.8,3.4952,2.0,276.0,18.0,357.98,6.65,28.4
98 | 96,0.11504,0.0,2.89,0.0,0.445,6.163,69.6,3.4952,2.0,276.0,18.0,391.83,11.34,21.4
99 | 97,0.12083,0.0,2.89,0.0,0.445,8.069,76.0,3.4952,2.0,276.0,18.0,396.9,4.21,38.7
100 | 98,0.08187,0.0,2.89,0.0,0.445,7.82,36.9,3.4952,2.0,276.0,18.0,393.53,3.57,43.8
101 | 99,0.0686,0.0,2.89,0.0,0.445,7.416,62.5,3.4952,2.0,276.0,18.0,396.9,6.19,33.2
102 | 100,0.14866,0.0,8.56,0.0,0.52,6.727,79.9,2.7778,5.0,384.0,20.9,394.76,9.42,27.5
103 | 101,0.11432,0.0,8.56,0.0,0.52,6.781,71.3,2.8561,5.0,384.0,20.9,395.58,7.67,26.5
104 | 102,0.22876,0.0,8.56,0.0,0.52,6.405,85.4,2.7147,5.0,384.0,20.9,70.8,10.63,18.6
105 | 103,0.21161,0.0,8.56,0.0,0.52,6.137,87.4,2.7147,5.0,384.0,20.9,394.47,13.44,19.3
106 | 104,0.1396,0.0,8.56,0.0,0.52,6.167,90.0,2.421,5.0,384.0,20.9,392.69,12.33,20.1
107 | 105,0.13262,0.0,8.56,0.0,0.52,5.851,96.7,2.1069,5.0,384.0,20.9,394.05,16.47,19.5
108 | 106,0.1712,0.0,8.56,0.0,0.52,5.836,91.9,2.211,5.0,384.0,20.9,395.67,18.66,19.5
109 | 107,0.13117,0.0,8.56,0.0,0.52,6.127,85.2,2.1224,5.0,384.0,20.9,387.69,14.09,20.4
110 | 108,0.12802,0.0,8.56,0.0,0.52,6.474,97.1,2.4329,5.0,384.0,20.9,395.24,12.27,19.8
111 | 109,0.26363,0.0,8.56,0.0,0.52,6.229,91.2,2.5451,5.0,384.0,20.9,391.23,15.55,19.4
112 | 110,0.10793,0.0,8.56,0.0,0.52,6.195,54.4,2.7778,5.0,384.0,20.9,393.49,13.0,21.7
113 | 111,0.10084,0.0,10.01,0.0,0.547,6.715,81.6,2.6775,6.0,432.0,17.8,395.59,10.16,22.8
114 | 112,0.12329,0.0,10.01,0.0,0.547,5.913,92.9,2.3534,6.0,432.0,17.8,394.95,16.21,18.8
115 | 113,0.22212,0.0,10.01,0.0,0.547,6.092,95.4,2.548,6.0,432.0,17.8,396.9,17.09,18.7
116 | 114,0.14231,0.0,10.01,0.0,0.547,6.254,84.2,2.2565,6.0,432.0,17.8,388.74,10.45,18.5
117 | 115,0.17134,0.0,10.01,0.0,0.547,5.928,88.2,2.4631,6.0,432.0,17.8,344.91,15.76,18.3
118 | 116,0.13158,0.0,10.01,0.0,0.547,6.176,72.5,2.7301,6.0,432.0,17.8,393.3,12.04,21.2
119 | 117,0.15098,0.0,10.01,0.0,0.547,6.021,82.6,2.7474,6.0,432.0,17.8,394.51,10.3,19.2
120 | 118,0.13058,0.0,10.01,0.0,0.547,5.872,73.1,2.4775,6.0,432.0,17.8,338.63,15.37,20.4
121 | 119,0.14476,0.0,10.01,0.0,0.547,5.731,65.2,2.7592,6.0,432.0,17.8,391.5,13.61,19.3
122 | 120,0.06899,0.0,25.65,0.0,0.581,5.87,69.7,2.2577,2.0,188.0,19.1,389.15,14.37,22.0
123 | 121,0.07165,0.0,25.65,0.0,0.581,6.004,84.1,2.1974,2.0,188.0,19.1,377.67,14.27,20.3
124 | 122,0.09299,0.0,25.65,0.0,0.581,5.961,92.9,2.0869,2.0,188.0,19.1,378.09,17.93,20.5
125 | 123,0.15038,0.0,25.65,0.0,0.581,5.856,97.0,1.9444,2.0,188.0,19.1,370.31,25.41,17.3
126 | 124,0.09849,0.0,25.65,0.0,0.581,5.879,95.8,2.0063,2.0,188.0,19.1,379.38,17.58,18.8
127 | 125,0.16902,0.0,25.65,0.0,0.581,5.986,88.4,1.9929,2.0,188.0,19.1,385.02,14.81,21.4
128 | 126,0.38735,0.0,25.65,0.0,0.581,5.613,95.6,1.7572,2.0,188.0,19.1,359.29,27.26,15.7
129 | 127,0.25915,0.0,21.89,0.0,0.624,5.693,96.0,1.7883,4.0,437.0,21.2,392.11,17.19,16.2
130 | 128,0.32543,0.0,21.89,0.0,0.624,6.431,98.8,1.8125,4.0,437.0,21.2,396.9,15.39,18.0
131 | 129,0.88125,0.0,21.89,0.0,0.624,5.637,94.7,1.9799,4.0,437.0,21.2,396.9,18.34,14.3
132 | 130,0.34006,0.0,21.89,0.0,0.624,6.458,98.9,2.1185,4.0,437.0,21.2,395.04,12.6,19.2
133 | 131,1.19294,0.0,21.89,0.0,0.624,6.326,97.7,2.271,4.0,437.0,21.2,396.9,12.26,19.6
134 | 132,0.59005,0.0,21.89,0.0,0.624,6.372,97.9,2.3274,4.0,437.0,21.2,385.76,11.12,23.0
135 | 133,0.32982,0.0,21.89,0.0,0.624,5.822,95.4,2.4699,4.0,437.0,21.2,388.69,15.03,18.4
136 | 134,0.97617,0.0,21.89,0.0,0.624,5.757,98.4,2.346,4.0,437.0,21.2,262.76,17.31,15.6
137 | 135,0.55778,0.0,21.89,0.0,0.624,6.335,98.2,2.1107,4.0,437.0,21.2,394.67,16.96,18.1
138 | 136,0.32264,0.0,21.89,0.0,0.624,5.942,93.5,1.9669,4.0,437.0,21.2,378.25,16.9,17.4
139 | 137,0.35233,0.0,21.89,0.0,0.624,6.454,98.4,1.8498,4.0,437.0,21.2,394.08,14.59,17.1
140 | 138,0.2498,0.0,21.89,0.0,0.624,5.857,98.2,1.6686,4.0,437.0,21.2,392.04,21.32,13.3
141 | 139,0.54452,0.0,21.89,0.0,0.624,6.151,97.9,1.6687,4.0,437.0,21.2,396.9,18.46,17.8
142 | 140,0.2909,0.0,21.89,0.0,0.624,6.174,93.6,1.6119,4.0,437.0,21.2,388.08,24.16,14.0
143 | 141,1.62864,0.0,21.89,0.0,0.624,5.019,100.0,1.4394,4.0,437.0,21.2,396.9,34.41,14.4
144 | 142,3.32105,0.0,19.58,1.0,0.871,5.403,100.0,1.3216,5.0,403.0,14.7,396.9,26.82,13.4
145 | 143,4.0974,0.0,19.58,0.0,0.871,5.468,100.0,1.4118,5.0,403.0,14.7,396.9,26.42,15.6
146 | 144,2.77974,0.0,19.58,0.0,0.871,4.903,97.8,1.3459,5.0,403.0,14.7,396.9,29.29,11.8
147 | 145,2.37934,0.0,19.58,0.0,0.871,6.13,100.0,1.4191,5.0,403.0,14.7,172.91,27.8,13.8
148 | 146,2.15505,0.0,19.58,0.0,0.871,5.628,100.0,1.5166,5.0,403.0,14.7,169.27,16.65,15.6
149 | 147,2.36862,0.0,19.58,0.0,0.871,4.926,95.7,1.4608,5.0,403.0,14.7,391.71,29.53,14.6
150 | 148,2.33099,0.0,19.58,0.0,0.871,5.186,93.8,1.5296,5.0,403.0,14.7,356.99,28.32,17.8
151 | 149,2.73397,0.0,19.58,0.0,0.871,5.597,94.9,1.5257,5.0,403.0,14.7,351.85,21.45,15.4
152 | 150,1.6566,0.0,19.58,0.0,0.871,6.122,97.3,1.618,5.0,403.0,14.7,372.8,14.1,21.5
153 | 151,1.49632,0.0,19.58,0.0,0.871,5.404,100.0,1.5916,5.0,403.0,14.7,341.6,13.28,19.6
154 | 152,1.12658,0.0,19.58,1.0,0.871,5.012,88.0,1.6102,5.0,403.0,14.7,343.28,12.12,15.3
155 | 153,2.14918,0.0,19.58,0.0,0.871,5.709,98.5,1.6232,5.0,403.0,14.7,261.95,15.79,19.4
156 | 154,1.41385,0.0,19.58,1.0,0.871,6.129,96.0,1.7494,5.0,403.0,14.7,321.02,15.12,17.0
157 | 155,3.53501,0.0,19.58,1.0,0.871,6.152,82.6,1.7455,5.0,403.0,14.7,88.01,15.02,15.6
158 | 156,2.44668,0.0,19.58,0.0,0.871,5.272,94.0,1.7364,5.0,403.0,14.7,88.63,16.14,13.1
159 | 157,1.22358,0.0,19.58,0.0,0.605,6.943,97.4,1.8773,5.0,403.0,14.7,363.43,4.59,41.3
160 | 158,1.34284,0.0,19.58,0.0,0.605,6.066,100.0,1.7573,5.0,403.0,14.7,353.89,6.43,24.3
161 | 159,1.42502,0.0,19.58,0.0,0.871,6.51,100.0,1.7659,5.0,403.0,14.7,364.31,7.39,23.3
162 | 160,1.27346,0.0,19.58,1.0,0.605,6.25,92.6,1.7984,5.0,403.0,14.7,338.92,5.5,27.0
163 | 161,1.46336,0.0,19.58,0.0,0.605,7.489,90.8,1.9709,5.0,403.0,14.7,374.43,1.73,50.0
164 | 162,1.83377,0.0,19.58,1.0,0.605,7.802,98.2,2.0407,5.0,403.0,14.7,389.61,1.92,50.0
165 | 163,1.51902,0.0,19.58,1.0,0.605,8.375,93.9,2.162,5.0,403.0,14.7,388.45,3.32,50.0
166 | 164,2.24236,0.0,19.58,0.0,0.605,5.854,91.8,2.422,5.0,403.0,14.7,395.11,11.64,22.7
167 | 165,2.924,0.0,19.58,0.0,0.605,6.101,93.0,2.2834,5.0,403.0,14.7,240.16,9.81,25.0
168 | 166,2.01019,0.0,19.58,0.0,0.605,7.929,96.2,2.0459,5.0,403.0,14.7,369.3,3.7,50.0
169 | 167,1.80028,0.0,19.58,0.0,0.605,5.877,79.2,2.4259,5.0,403.0,14.7,227.61,12.14,23.8
170 | 168,2.3004,0.0,19.58,0.0,0.605,6.319,96.1,2.1,5.0,403.0,14.7,297.09,11.1,23.8
171 | 169,2.44953,0.0,19.58,0.0,0.605,6.402,95.2,2.2625,5.0,403.0,14.7,330.04,11.32,22.3
172 | 170,1.20742,0.0,19.58,0.0,0.605,5.875,94.6,2.4259,5.0,403.0,14.7,292.29,14.43,17.4
173 | 171,2.3139,0.0,19.58,0.0,0.605,5.88,97.3,2.3887,5.0,403.0,14.7,348.13,12.03,19.1
174 | 172,0.13914,0.0,4.05,0.0,0.51,5.572,88.5,2.5961,5.0,296.0,16.6,396.9,14.69,23.1
175 | 173,0.09178,0.0,4.05,0.0,0.51,6.416,84.1,2.6463,5.0,296.0,16.6,395.5,9.04,23.6
176 | 174,0.08447,0.0,4.05,0.0,0.51,5.859,68.7,2.7019,5.0,296.0,16.6,393.23,9.64,22.6
177 | 175,0.06664,0.0,4.05,0.0,0.51,6.546,33.1,3.1323,5.0,296.0,16.6,390.96,5.33,29.4
178 | 176,0.07022,0.0,4.05,0.0,0.51,6.02,47.2,3.5549,5.0,296.0,16.6,393.23,10.11,23.2
179 | 177,0.05425,0.0,4.05,0.0,0.51,6.315,73.4,3.3175,5.0,296.0,16.6,395.6,6.29,24.6
180 | 178,0.06642,0.0,4.05,0.0,0.51,6.86,74.4,2.9153,5.0,296.0,16.6,391.27,6.92,29.9
181 | 179,0.0578,0.0,2.46,0.0,0.488,6.98,58.4,2.829,3.0,193.0,17.8,396.9,5.04,37.2
182 | 180,0.06588,0.0,2.46,0.0,0.488,7.765,83.3,2.741,3.0,193.0,17.8,395.56,7.56,39.8
183 | 181,0.06888,0.0,2.46,0.0,0.488,6.144,62.2,2.5979,3.0,193.0,17.8,396.9,9.45,36.2
184 | 182,0.09103,0.0,2.46,0.0,0.488,7.155,92.2,2.7006,3.0,193.0,17.8,394.12,4.82,37.9
185 | 183,0.10008,0.0,2.46,0.0,0.488,6.563,95.6,2.847,3.0,193.0,17.8,396.9,5.68,32.5
186 | 184,0.08308,0.0,2.46,0.0,0.488,5.604,89.8,2.9879,3.0,193.0,17.8,391.0,13.98,26.4
187 | 185,0.06047,0.0,2.46,0.0,0.488,6.153,68.8,3.2797,3.0,193.0,17.8,387.11,13.15,29.6
188 | 186,0.05602,0.0,2.46,0.0,0.488,7.831,53.6,3.1992,3.0,193.0,17.8,392.63,4.45,50.0
189 | 187,0.07875,45.0,3.44,0.0,0.437,6.782,41.1,3.7886,5.0,398.0,15.2,393.87,6.68,32.0
190 | 188,0.12579,45.0,3.44,0.0,0.437,6.556,29.1,4.5667,5.0,398.0,15.2,382.84,4.56,29.8
191 | 189,0.0837,45.0,3.44,0.0,0.437,7.185,38.9,4.5667,5.0,398.0,15.2,396.9,5.39,34.9
192 | 190,0.09068,45.0,3.44,0.0,0.437,6.951,21.5,6.4798,5.0,398.0,15.2,377.68,5.1,37.0
193 | 191,0.06911,45.0,3.44,0.0,0.437,6.739,30.8,6.4798,5.0,398.0,15.2,389.71,4.69,30.5
194 | 192,0.08664,45.0,3.44,0.0,0.437,7.178,26.3,6.4798,5.0,398.0,15.2,390.49,2.87,36.4
195 | 193,0.02187,60.0,2.93,0.0,0.401,6.8,9.9,6.2196,1.0,265.0,15.6,393.37,5.03,31.1
196 | 194,0.01439,60.0,2.93,0.0,0.401,6.604,18.8,6.2196,1.0,265.0,15.6,376.7,4.38,29.1
197 | 195,0.01381,80.0,0.46,0.0,0.422,7.875,32.0,5.6484,4.0,255.0,14.4,394.23,2.97,50.0
198 | 196,0.04011,80.0,1.52,0.0,0.404,7.287,34.1,7.309,2.0,329.0,12.6,396.9,4.08,33.3
199 | 197,0.04666,80.0,1.52,0.0,0.404,7.107,36.6,7.309,2.0,329.0,12.6,354.31,8.61,30.3
200 | 198,0.03768,80.0,1.52,0.0,0.404,7.274,38.3,7.309,2.0,329.0,12.6,392.2,6.62,34.6
201 | 199,0.0315,95.0,1.47,0.0,0.403,6.975,15.3,7.6534,3.0,402.0,17.0,396.9,4.56,34.9
202 | 200,0.01778,95.0,1.47,0.0,0.403,7.135,13.9,7.6534,3.0,402.0,17.0,384.3,4.45,32.9
203 | 201,0.03445,82.5,2.03,0.0,0.415,6.162,38.4,6.27,2.0,348.0,14.7,393.77,7.43,24.1
204 | 202,0.02177,82.5,2.03,0.0,0.415,7.61,15.7,6.27,2.0,348.0,14.7,395.38,3.11,42.3
205 | 203,0.0351,95.0,2.68,0.0,0.4161,7.853,33.2,5.118,4.0,224.0,14.7,392.78,3.81,48.5
206 | 204,0.02009,95.0,2.68,0.0,0.4161,8.034,31.9,5.118,4.0,224.0,14.7,390.55,2.88,50.0
207 | 205,0.13642,0.0,10.59,0.0,0.489,5.891,22.3,3.9454,4.0,277.0,18.6,396.9,10.87,22.6
208 | 206,0.22969,0.0,10.59,0.0,0.489,6.326,52.5,4.3549,4.0,277.0,18.6,394.87,10.97,24.4
209 | 207,0.25199,0.0,10.59,0.0,0.489,5.783,72.7,4.3549,4.0,277.0,18.6,389.43,18.06,22.5
210 | 208,0.13587,0.0,10.59,1.0,0.489,6.064,59.1,4.2392,4.0,277.0,18.6,381.32,14.66,24.4
211 | 209,0.43571,0.0,10.59,1.0,0.489,5.344,100.0,3.875,4.0,277.0,18.6,396.9,23.09,20.0
212 | 210,0.17446,0.0,10.59,1.0,0.489,5.96,92.1,3.8771,4.0,277.0,18.6,393.25,17.27,21.7
213 | 211,0.37578,0.0,10.59,1.0,0.489,5.404,88.6,3.665,4.0,277.0,18.6,395.24,23.98,19.3
214 | 212,0.21719,0.0,10.59,1.0,0.489,5.807,53.8,3.6526,4.0,277.0,18.6,390.94,16.03,22.4
215 | 213,0.14052,0.0,10.59,0.0,0.489,6.375,32.3,3.9454,4.0,277.0,18.6,385.81,9.38,28.1
216 | 214,0.28955,0.0,10.59,0.0,0.489,5.412,9.8,3.5875,4.0,277.0,18.6,348.93,29.55,23.7
217 | 215,0.19802,0.0,10.59,0.0,0.489,6.182,42.4,3.9454,4.0,277.0,18.6,393.63,9.47,25.0
218 | 216,0.0456,0.0,13.89,1.0,0.55,5.888,56.0,3.1121,5.0,276.0,16.4,392.8,13.51,23.3
219 | 217,0.07013,0.0,13.89,0.0,0.55,6.642,85.1,3.4211,5.0,276.0,16.4,392.78,9.69,28.7
220 | 218,0.11069,0.0,13.89,1.0,0.55,5.951,93.8,2.8893,5.0,276.0,16.4,396.9,17.92,21.5
221 | 219,0.11425,0.0,13.89,1.0,0.55,6.373,92.4,3.3633,5.0,276.0,16.4,393.74,10.5,23.0
222 | 220,0.35809,0.0,6.2,1.0,0.507,6.951,88.5,2.8617,8.0,307.0,17.4,391.7,9.71,26.7
223 | 221,0.40771,0.0,6.2,1.0,0.507,6.164,91.3,3.048,8.0,307.0,17.4,395.24,21.46,21.7
224 | 222,0.62356,0.0,6.2,1.0,0.507,6.879,77.7,3.2721,8.0,307.0,17.4,390.39,9.93,27.5
225 | 223,0.6147,0.0,6.2,0.0,0.507,6.618,80.8,3.2721,8.0,307.0,17.4,396.9,7.6,30.1
226 | 224,0.31533,0.0,6.2,0.0,0.504,8.266,78.3,2.8944,8.0,307.0,17.4,385.05,4.14,44.8
227 | 225,0.52693,0.0,6.2,0.0,0.504,8.725,83.0,2.8944,8.0,307.0,17.4,382.0,4.63,50.0
228 | 226,0.38214,0.0,6.2,0.0,0.504,8.04,86.5,3.2157,8.0,307.0,17.4,387.38,3.13,37.6
229 | 227,0.41238,0.0,6.2,0.0,0.504,7.163,79.9,3.2157,8.0,307.0,17.4,372.08,6.36,31.6
230 | 228,0.29819,0.0,6.2,0.0,0.504,7.686,17.0,3.3751,8.0,307.0,17.4,377.51,3.92,46.7
231 | 229,0.44178,0.0,6.2,0.0,0.504,6.552,21.4,3.3751,8.0,307.0,17.4,380.34,3.76,31.5
232 | 230,0.537,0.0,6.2,0.0,0.504,5.981,68.1,3.6715,8.0,307.0,17.4,378.35,11.65,24.3
233 | 231,0.46296,0.0,6.2,0.0,0.504,7.412,76.9,3.6715,8.0,307.0,17.4,376.14,5.25,31.7
234 | 232,0.57529,0.0,6.2,0.0,0.507,8.337,73.3,3.8384,8.0,307.0,17.4,385.91,2.47,41.7
235 | 233,0.33147,0.0,6.2,0.0,0.507,8.247,70.4,3.6519,8.0,307.0,17.4,378.95,3.95,48.3
236 | 234,0.44791,0.0,6.2,1.0,0.507,6.726,66.5,3.6519,8.0,307.0,17.4,360.2,8.05,29.0
237 | 235,0.33045,0.0,6.2,0.0,0.507,6.086,61.5,3.6519,8.0,307.0,17.4,376.75,10.88,24.0
238 | 236,0.52058,0.0,6.2,1.0,0.507,6.631,76.5,4.148,8.0,307.0,17.4,388.45,9.54,25.1
239 | 237,0.51183,0.0,6.2,0.0,0.507,7.358,71.6,4.148,8.0,307.0,17.4,390.07,4.73,31.5
240 | 238,0.08244,30.0,4.93,0.0,0.428,6.481,18.5,6.1899,6.0,300.0,16.6,379.41,6.36,23.7
241 | 239,0.09252,30.0,4.93,0.0,0.428,6.606,42.2,6.1899,6.0,300.0,16.6,383.78,7.37,23.3
242 | 240,0.11329,30.0,4.93,0.0,0.428,6.897,54.3,6.3361,6.0,300.0,16.6,391.25,11.38,22.0
243 | 241,0.10612,30.0,4.93,0.0,0.428,6.095,65.1,6.3361,6.0,300.0,16.6,394.62,12.4,20.1
244 | 242,0.1029,30.0,4.93,0.0,0.428,6.358,52.9,7.0355,6.0,300.0,16.6,372.75,11.22,22.2
245 | 243,0.12757,30.0,4.93,0.0,0.428,6.393,7.8,7.0355,6.0,300.0,16.6,374.71,5.19,23.7
246 | 244,0.20608,22.0,5.86,0.0,0.431,5.593,76.5,7.9549,7.0,330.0,19.1,372.49,12.5,17.6
247 | 245,0.19133,22.0,5.86,0.0,0.431,5.605,70.2,7.9549,7.0,330.0,19.1,389.13,18.46,18.5
248 | 246,0.33983,22.0,5.86,0.0,0.431,6.108,34.9,8.0555,7.0,330.0,19.1,390.18,9.16,24.3
249 | 247,0.19657,22.0,5.86,0.0,0.431,6.226,79.2,8.0555,7.0,330.0,19.1,376.14,10.15,20.5
250 | 248,0.16439,22.0,5.86,0.0,0.431,6.433,49.1,7.8265,7.0,330.0,19.1,374.71,9.52,24.5
251 | 249,0.19073,22.0,5.86,0.0,0.431,6.718,17.5,7.8265,7.0,330.0,19.1,393.74,6.56,26.2
252 | 250,0.1403,22.0,5.86,0.0,0.431,6.487,13.0,7.3967,7.0,330.0,19.1,396.28,5.9,24.4
253 | 251,0.21409,22.0,5.86,0.0,0.431,6.438,8.9,7.3967,7.0,330.0,19.1,377.07,3.59,24.8
254 | 252,0.08221,22.0,5.86,0.0,0.431,6.957,6.8,8.9067,7.0,330.0,19.1,386.09,3.53,29.6
255 | 253,0.36894,22.0,5.86,0.0,0.431,8.259,8.4,8.9067,7.0,330.0,19.1,396.9,3.54,42.8
256 | 254,0.04819,80.0,3.64,0.0,0.392,6.108,32.0,9.2203,1.0,315.0,16.4,392.89,6.57,21.9
257 | 255,0.03548,80.0,3.64,0.0,0.392,5.876,19.1,9.2203,1.0,315.0,16.4,395.18,9.25,20.9
258 | 256,0.01538,90.0,3.75,0.0,0.394,7.454,34.2,6.3361,3.0,244.0,15.9,386.34,3.11,44.0
259 | 257,0.61154,20.0,3.97,0.0,0.647,8.704,86.9,1.801,5.0,264.0,13.0,389.7,5.12,50.0
260 | 258,0.66351,20.0,3.97,0.0,0.647,7.333,100.0,1.8946,5.0,264.0,13.0,383.29,7.79,36.0
261 | 259,0.65665,20.0,3.97,0.0,0.647,6.842,100.0,2.0107,5.0,264.0,13.0,391.93,6.9,30.1
262 | 260,0.54011,20.0,3.97,0.0,0.647,7.203,81.8,2.1121,5.0,264.0,13.0,392.8,9.59,33.8
263 | 261,0.53412,20.0,3.97,0.0,0.647,7.52,89.4,2.1398,5.0,264.0,13.0,388.37,7.26,43.1
264 | 262,0.52014,20.0,3.97,0.0,0.647,8.398,91.5,2.2885,5.0,264.0,13.0,386.86,5.91,48.8
265 | 263,0.82526,20.0,3.97,0.0,0.647,7.327,94.5,2.0788,5.0,264.0,13.0,393.42,11.25,31.0
266 | 264,0.55007,20.0,3.97,0.0,0.647,7.206,91.6,1.9301,5.0,264.0,13.0,387.89,8.1,36.5
267 | 265,0.76162,20.0,3.97,0.0,0.647,5.56,62.8,1.9865,5.0,264.0,13.0,392.4,10.45,22.8
268 | 266,0.7857,20.0,3.97,0.0,0.647,7.014,84.6,2.1329,5.0,264.0,13.0,384.07,14.79,30.7
269 | 267,0.57834,20.0,3.97,0.0,0.575,8.297,67.0,2.4216,5.0,264.0,13.0,384.54,7.44,50.0
270 | 268,0.5405,20.0,3.97,0.0,0.575,7.47,52.6,2.872,5.0,264.0,13.0,390.3,3.16,43.5
271 | 269,0.09065,20.0,6.96,1.0,0.464,5.92,61.5,3.9175,3.0,223.0,18.6,391.34,13.65,20.7
272 | 270,0.29916,20.0,6.96,0.0,0.464,5.856,42.1,4.429,3.0,223.0,18.6,388.65,13.0,21.1
273 | 271,0.16211,20.0,6.96,0.0,0.464,6.24,16.3,4.429,3.0,223.0,18.6,396.9,6.59,25.2
274 | 272,0.1146,20.0,6.96,0.0,0.464,6.538,58.7,3.9175,3.0,223.0,18.6,394.96,7.73,24.4
275 | 273,0.22188,20.0,6.96,1.0,0.464,7.691,51.8,4.3665,3.0,223.0,18.6,390.77,6.58,35.2
276 | 274,0.05644,40.0,6.41,1.0,0.447,6.758,32.9,4.0776,4.0,254.0,17.6,396.9,3.53,32.4
277 | 275,0.09604,40.0,6.41,0.0,0.447,6.854,42.8,4.2673,4.0,254.0,17.6,396.9,2.98,32.0
278 | 276,0.10469,40.0,6.41,1.0,0.447,7.267,49.0,4.7872,4.0,254.0,17.6,389.25,6.05,33.2
279 | 277,0.06127,40.0,6.41,1.0,0.447,6.826,27.6,4.8628,4.0,254.0,17.6,393.45,4.16,33.1
280 | 278,0.07978,40.0,6.41,0.0,0.447,6.482,32.1,4.1403,4.0,254.0,17.6,396.9,7.19,29.1
281 | 279,0.21038,20.0,3.33,0.0,0.4429,6.812,32.2,4.1007,5.0,216.0,14.9,396.9,4.85,35.1
282 | 280,0.03578,20.0,3.33,0.0,0.4429,7.82,64.5,4.6947,5.0,216.0,14.9,387.31,3.76,45.4
283 | 281,0.03705,20.0,3.33,0.0,0.4429,6.968,37.2,5.2447,5.0,216.0,14.9,392.23,4.59,35.4
284 | 282,0.06129,20.0,3.33,1.0,0.4429,7.645,49.7,5.2119,5.0,216.0,14.9,377.07,3.01,46.0
285 | 283,0.01501,90.0,1.21,1.0,0.401,7.923,24.8,5.885,1.0,198.0,13.6,395.52,3.16,50.0
286 | 284,0.00906,90.0,2.97,0.0,0.4,7.088,20.8,7.3073,1.0,285.0,15.3,394.72,7.85,32.2
287 | 285,0.01096,55.0,2.25,0.0,0.389,6.453,31.9,7.3073,1.0,300.0,15.3,394.72,8.23,22.0
288 | 286,0.01965,80.0,1.76,0.0,0.385,6.23,31.5,9.0892,1.0,241.0,18.2,341.6,12.93,20.1
289 | 287,0.03871,52.5,5.32,0.0,0.405,6.209,31.3,7.3172,6.0,293.0,16.6,396.9,7.14,23.2
290 | 288,0.0459,52.5,5.32,0.0,0.405,6.315,45.6,7.3172,6.0,293.0,16.6,396.9,7.6,22.3
291 | 289,0.04297,52.5,5.32,0.0,0.405,6.565,22.9,7.3172,6.0,293.0,16.6,371.72,9.51,24.8
292 | 290,0.03502,80.0,4.95,0.0,0.411,6.861,27.9,5.1167,4.0,245.0,19.2,396.9,3.33,28.5
293 | 291,0.07886,80.0,4.95,0.0,0.411,7.148,27.7,5.1167,4.0,245.0,19.2,396.9,3.56,37.3
294 | 292,0.03615,80.0,4.95,0.0,0.411,6.63,23.4,5.1167,4.0,245.0,19.2,396.9,4.7,27.9
295 | 293,0.08265,0.0,13.92,0.0,0.437,6.127,18.4,5.5027,4.0,289.0,16.0,396.9,8.58,23.9
296 | 294,0.08199,0.0,13.92,0.0,0.437,6.009,42.3,5.5027,4.0,289.0,16.0,396.9,10.4,21.7
297 | 295,0.12932,0.0,13.92,0.0,0.437,6.678,31.1,5.9604,4.0,289.0,16.0,396.9,6.27,28.6
298 | 296,0.05372,0.0,13.92,0.0,0.437,6.549,51.0,5.9604,4.0,289.0,16.0,392.85,7.39,27.1
299 | 297,0.14103,0.0,13.92,0.0,0.437,5.79,58.0,6.32,4.0,289.0,16.0,396.9,15.84,20.3
300 | 298,0.06466,70.0,2.24,0.0,0.4,6.345,20.1,7.8278,5.0,358.0,14.8,368.24,4.97,22.5
301 | 299,0.05561,70.0,2.24,0.0,0.4,7.041,10.0,7.8278,5.0,358.0,14.8,371.58,4.74,29.0
302 | 300,0.04417,70.0,2.24,0.0,0.4,6.871,47.4,7.8278,5.0,358.0,14.8,390.86,6.07,24.8
303 | 301,0.03537,34.0,6.09,0.0,0.433,6.59,40.4,5.4917,7.0,329.0,16.1,395.75,9.5,22.0
304 | 302,0.09266,34.0,6.09,0.0,0.433,6.495,18.4,5.4917,7.0,329.0,16.1,383.61,8.67,26.4
305 | 303,0.1,34.0,6.09,0.0,0.433,6.982,17.7,5.4917,7.0,329.0,16.1,390.43,4.86,33.1
306 | 304,0.05515,33.0,2.18,0.0,0.472,7.236,41.1,4.022,7.0,222.0,18.4,393.68,6.93,36.1
307 | 305,0.05479,33.0,2.18,0.0,0.472,6.616,58.1,3.37,7.0,222.0,18.4,393.36,8.93,28.4
308 | 306,0.07503,33.0,2.18,0.0,0.472,7.42,71.9,3.0992,7.0,222.0,18.4,396.9,6.47,33.4
309 | 307,0.04932,33.0,2.18,0.0,0.472,6.849,70.3,3.1827,7.0,222.0,18.4,396.9,7.53,28.2
310 | 308,0.49298,0.0,9.9,0.0,0.544,6.635,82.5,3.3175,4.0,304.0,18.4,396.9,4.54,22.8
311 | 309,0.3494,0.0,9.9,0.0,0.544,5.972,76.7,3.1025,4.0,304.0,18.4,396.24,9.97,20.3
312 | 310,2.63548,0.0,9.9,0.0,0.544,4.973,37.8,2.5194,4.0,304.0,18.4,350.45,12.64,16.1
313 | 311,0.79041,0.0,9.9,0.0,0.544,6.122,52.8,2.6403,4.0,304.0,18.4,396.9,5.98,22.1
314 | 312,0.26169,0.0,9.9,0.0,0.544,6.023,90.4,2.834,4.0,304.0,18.4,396.3,11.72,19.4
315 | 313,0.26938,0.0,9.9,0.0,0.544,6.266,82.8,3.2628,4.0,304.0,18.4,393.39,7.9,21.6
316 | 314,0.3692,0.0,9.9,0.0,0.544,6.567,87.3,3.6023,4.0,304.0,18.4,395.69,9.28,23.8
317 | 315,0.25356,0.0,9.9,0.0,0.544,5.705,77.7,3.945,4.0,304.0,18.4,396.42,11.5,16.2
318 | 316,0.31827,0.0,9.9,0.0,0.544,5.914,83.2,3.9986,4.0,304.0,18.4,390.7,18.33,17.8
319 | 317,0.24522,0.0,9.9,0.0,0.544,5.782,71.7,4.0317,4.0,304.0,18.4,396.9,15.94,19.8
320 | 318,0.40202,0.0,9.9,0.0,0.544,6.382,67.2,3.5325,4.0,304.0,18.4,395.21,10.36,23.1
321 | 319,0.47547,0.0,9.9,0.0,0.544,6.113,58.8,4.0019,4.0,304.0,18.4,396.23,12.73,21.0
322 | 320,0.1676,0.0,7.38,0.0,0.493,6.426,52.3,4.5404,5.0,287.0,19.6,396.9,7.2,23.8
323 | 321,0.18159,0.0,7.38,0.0,0.493,6.376,54.3,4.5404,5.0,287.0,19.6,396.9,6.87,23.1
324 | 322,0.35114,0.0,7.38,0.0,0.493,6.041,49.9,4.7211,5.0,287.0,19.6,396.9,7.7,20.4
325 | 323,0.28392,0.0,7.38,0.0,0.493,5.708,74.3,4.7211,5.0,287.0,19.6,391.13,11.74,18.5
326 | 324,0.34109,0.0,7.38,0.0,0.493,6.415,40.1,4.7211,5.0,287.0,19.6,396.9,6.12,25.0
327 | 325,0.19186,0.0,7.38,0.0,0.493,6.431,14.7,5.4159,5.0,287.0,19.6,393.68,5.08,24.6
328 | 326,0.30347,0.0,7.38,0.0,0.493,6.312,28.9,5.4159,5.0,287.0,19.6,396.9,6.15,23.0
329 | 327,0.24103,0.0,7.38,0.0,0.493,6.083,43.7,5.4159,5.0,287.0,19.6,396.9,12.79,22.2
330 | 328,0.06617,0.0,3.24,0.0,0.46,5.868,25.8,5.2146,4.0,430.0,16.9,382.44,9.97,19.3
331 | 329,0.06724,0.0,3.24,0.0,0.46,6.333,17.2,5.2146,4.0,430.0,16.9,375.21,7.34,22.6
332 | 330,0.04544,0.0,3.24,0.0,0.46,6.144,32.2,5.8736,4.0,430.0,16.9,368.57,9.09,19.8
333 | 331,0.05023,35.0,6.06,0.0,0.4379,5.706,28.4,6.6407,1.0,304.0,16.9,394.02,12.43,17.1
334 | 332,0.03466,35.0,6.06,0.0,0.4379,6.031,23.3,6.6407,1.0,304.0,16.9,362.25,7.83,19.4
335 | 333,0.05083,0.0,5.19,0.0,0.515,6.316,38.1,6.4584,5.0,224.0,20.2,389.71,5.68,22.2
336 | 334,0.03738,0.0,5.19,0.0,0.515,6.31,38.5,6.4584,5.0,224.0,20.2,389.4,6.75,20.7
337 | 335,0.03961,0.0,5.19,0.0,0.515,6.037,34.5,5.9853,5.0,224.0,20.2,396.9,8.01,21.1
338 | 336,0.03427,0.0,5.19,0.0,0.515,5.869,46.3,5.2311,5.0,224.0,20.2,396.9,9.8,19.5
339 | 337,0.03041,0.0,5.19,0.0,0.515,5.895,59.6,5.615,5.0,224.0,20.2,394.81,10.56,18.5
340 | 338,0.03306,0.0,5.19,0.0,0.515,6.059,37.3,4.8122,5.0,224.0,20.2,396.14,8.51,20.6
341 | 339,0.05497,0.0,5.19,0.0,0.515,5.985,45.4,4.8122,5.0,224.0,20.2,396.9,9.74,19.0
342 | 340,0.06151,0.0,5.19,0.0,0.515,5.968,58.5,4.8122,5.0,224.0,20.2,396.9,9.29,18.7
343 | 341,0.01301,35.0,1.52,0.0,0.442,7.241,49.3,7.0379,1.0,284.0,15.5,394.74,5.49,32.7
344 | 342,0.02498,0.0,1.89,0.0,0.518,6.54,59.7,6.2669,1.0,422.0,15.9,389.96,8.65,16.5
345 | 343,0.02543,55.0,3.78,0.0,0.484,6.696,56.4,5.7321,5.0,370.0,17.6,396.9,7.18,23.9
346 | 344,0.03049,55.0,3.78,0.0,0.484,6.874,28.1,6.4654,5.0,370.0,17.6,387.97,4.61,31.2
347 | 345,0.03113,0.0,4.39,0.0,0.442,6.014,48.5,8.0136,3.0,352.0,18.8,385.64,10.53,17.5
348 | 346,0.06162,0.0,4.39,0.0,0.442,5.898,52.3,8.0136,3.0,352.0,18.8,364.61,12.67,17.2
349 | 347,0.0187,85.0,4.15,0.0,0.429,6.516,27.7,8.5353,4.0,351.0,17.9,392.43,6.36,23.1
350 | 348,0.01501,80.0,2.01,0.0,0.435,6.635,29.7,8.344,4.0,280.0,17.0,390.94,5.99,24.5
351 | 349,0.02899,40.0,1.25,0.0,0.429,6.939,34.5,8.7921,1.0,335.0,19.7,389.85,5.89,26.6
352 | 350,0.06211,40.0,1.25,0.0,0.429,6.49,44.4,8.7921,1.0,335.0,19.7,396.9,5.98,22.9
353 | 351,0.0795,60.0,1.69,0.0,0.411,6.579,35.9,10.7103,4.0,411.0,18.3,370.78,5.49,24.1
354 | 352,0.07244,60.0,1.69,0.0,0.411,5.884,18.5,10.7103,4.0,411.0,18.3,392.33,7.79,18.6
355 | 353,0.01709,90.0,2.02,0.0,0.41,6.728,36.1,12.1265,5.0,187.0,17.0,384.46,4.5,30.1
356 | 354,0.04301,80.0,1.91,0.0,0.413,5.663,21.9,10.5857,4.0,334.0,22.0,382.8,8.05,18.2
357 | 355,0.10659,80.0,1.91,0.0,0.413,5.936,19.5,10.5857,4.0,334.0,22.0,376.04,5.57,20.6
358 | 356,8.98296,0.0,18.1,1.0,0.77,6.212,97.4,2.1222,24.0,666.0,20.2,377.73,17.6,17.8
359 | 357,3.8497,0.0,18.1,1.0,0.77,6.395,91.0,2.5052,24.0,666.0,20.2,391.34,13.27,21.7
360 | 358,5.20177,0.0,18.1,1.0,0.77,6.127,83.4,2.7227,24.0,666.0,20.2,395.43,11.48,22.7
361 | 359,4.26131,0.0,18.1,0.0,0.77,6.112,81.3,2.5091,24.0,666.0,20.2,390.74,12.67,22.6
362 | 360,4.54192,0.0,18.1,0.0,0.77,6.398,88.0,2.5182,24.0,666.0,20.2,374.56,7.79,25.0
363 | 361,3.83684,0.0,18.1,0.0,0.77,6.251,91.1,2.2955,24.0,666.0,20.2,350.65,14.19,19.9
364 | 362,3.67822,0.0,18.1,0.0,0.77,5.362,96.2,2.1036,24.0,666.0,20.2,380.79,10.19,20.8
365 | 363,4.22239,0.0,18.1,1.0,0.77,5.803,89.0,1.9047,24.0,666.0,20.2,353.04,14.64,16.8
366 | 364,3.47428,0.0,18.1,1.0,0.718,8.78,82.9,1.9047,24.0,666.0,20.2,354.55,5.29,21.9
367 | 365,4.55587,0.0,18.1,0.0,0.718,3.561,87.9,1.6132,24.0,666.0,20.2,354.7,7.12,27.5
368 | 366,3.69695,0.0,18.1,0.0,0.718,4.963,91.4,1.7523,24.0,666.0,20.2,316.03,14.0,21.9
369 | 367,13.5222,0.0,18.1,0.0,0.631,3.863,100.0,1.5106,24.0,666.0,20.2,131.42,13.33,23.1
370 | 368,4.89822,0.0,18.1,0.0,0.631,4.97,100.0,1.3325,24.0,666.0,20.2,375.52,3.26,50.0
371 | 369,5.66998,0.0,18.1,1.0,0.631,6.683,96.8,1.3567,24.0,666.0,20.2,375.33,3.73,50.0
372 | 370,6.53876,0.0,18.1,1.0,0.631,7.016,97.5,1.2024,24.0,666.0,20.2,392.05,2.96,50.0
373 | 371,9.2323,0.0,18.1,0.0,0.631,6.216,100.0,1.1691,24.0,666.0,20.2,366.15,9.53,50.0
374 | 372,8.26725,0.0,18.1,1.0,0.668,5.875,89.6,1.1296,24.0,666.0,20.2,347.88,8.88,50.0
375 | 373,11.1081,0.0,18.1,0.0,0.668,4.906,100.0,1.1742,24.0,666.0,20.2,396.9,34.77,13.8
376 | 374,18.4982,0.0,18.1,0.0,0.668,4.138,100.0,1.137,24.0,666.0,20.2,396.9,37.97,13.8
377 | 375,19.6091,0.0,18.1,0.0,0.671,7.313,97.9,1.3163,24.0,666.0,20.2,396.9,13.44,15.0
378 | 376,15.288,0.0,18.1,0.0,0.671,6.649,93.3,1.3449,24.0,666.0,20.2,363.02,23.24,13.9
379 | 377,9.82349,0.0,18.1,0.0,0.671,6.794,98.8,1.358,24.0,666.0,20.2,396.9,21.24,13.3
380 | 378,23.6482,0.0,18.1,0.0,0.671,6.38,96.2,1.3861,24.0,666.0,20.2,396.9,23.69,13.1
381 | 379,17.8667,0.0,18.1,0.0,0.671,6.223,100.0,1.3861,24.0,666.0,20.2,393.74,21.78,10.2
382 | 380,88.9762,0.0,18.1,0.0,0.671,6.968,91.9,1.4165,24.0,666.0,20.2,396.9,17.21,10.4
383 | 381,15.8744,0.0,18.1,0.0,0.671,6.545,99.1,1.5192,24.0,666.0,20.2,396.9,21.08,10.9
384 | 382,9.18702,0.0,18.1,0.0,0.7,5.536,100.0,1.5804,24.0,666.0,20.2,396.9,23.6,11.3
385 | 383,7.99248,0.0,18.1,0.0,0.7,5.52,100.0,1.5331,24.0,666.0,20.2,396.9,24.56,12.3
386 | 384,20.0849,0.0,18.1,0.0,0.7,4.368,91.2,1.4395,24.0,666.0,20.2,285.83,30.63,8.8
387 | 385,16.8118,0.0,18.1,0.0,0.7,5.277,98.1,1.4261,24.0,666.0,20.2,396.9,30.81,7.2
388 | 386,24.3938,0.0,18.1,0.0,0.7,4.652,100.0,1.4672,24.0,666.0,20.2,396.9,28.28,10.5
389 | 387,22.5971,0.0,18.1,0.0,0.7,5.0,89.5,1.5184,24.0,666.0,20.2,396.9,31.99,7.4
390 | 388,14.3337,0.0,18.1,0.0,0.7,4.88,100.0,1.5895,24.0,666.0,20.2,372.92,30.62,10.2
391 | 389,8.15174,0.0,18.1,0.0,0.7,5.39,98.9,1.7281,24.0,666.0,20.2,396.9,20.85,11.5
392 | 390,6.96215,0.0,18.1,0.0,0.7,5.713,97.0,1.9265,24.0,666.0,20.2,394.43,17.11,15.1
393 | 391,5.29305,0.0,18.1,0.0,0.7,6.051,82.5,2.1678,24.0,666.0,20.2,378.38,18.76,23.2
394 | 392,11.5779,0.0,18.1,0.0,0.7,5.036,97.0,1.77,24.0,666.0,20.2,396.9,25.68,9.7
395 | 393,8.64476,0.0,18.1,0.0,0.693,6.193,92.6,1.7912,24.0,666.0,20.2,396.9,15.17,13.8
396 | 394,13.3598,0.0,18.1,0.0,0.693,5.887,94.7,1.7821,24.0,666.0,20.2,396.9,16.35,12.7
397 | 395,8.71675,0.0,18.1,0.0,0.693,6.471,98.8,1.7257,24.0,666.0,20.2,391.98,17.12,13.1
398 | 396,5.87205,0.0,18.1,0.0,0.693,6.405,96.0,1.6768,24.0,666.0,20.2,396.9,19.37,12.5
399 | 397,7.67202,0.0,18.1,0.0,0.693,5.747,98.9,1.6334,24.0,666.0,20.2,393.1,19.92,8.5
400 | 398,38.3518,0.0,18.1,0.0,0.693,5.453,100.0,1.4896,24.0,666.0,20.2,396.9,30.59,5.0
401 | 399,9.91655,0.0,18.1,0.0,0.693,5.852,77.8,1.5004,24.0,666.0,20.2,338.16,29.97,6.3
402 | 400,25.0461,0.0,18.1,0.0,0.693,5.987,100.0,1.5888,24.0,666.0,20.2,396.9,26.77,5.6
403 | 401,14.2362,0.0,18.1,0.0,0.693,6.343,100.0,1.5741,24.0,666.0,20.2,396.9,20.32,7.2
404 | 402,9.59571,0.0,18.1,0.0,0.693,6.404,100.0,1.639,24.0,666.0,20.2,376.11,20.31,12.1
405 | 403,24.8017,0.0,18.1,0.0,0.693,5.349,96.0,1.7028,24.0,666.0,20.2,396.9,19.77,8.3
406 | 404,41.5292,0.0,18.1,0.0,0.693,5.531,85.4,1.6074,24.0,666.0,20.2,329.46,27.38,8.5
407 | 405,67.9208,0.0,18.1,0.0,0.693,5.683,100.0,1.4254,24.0,666.0,20.2,384.97,22.98,5.0
408 | 406,20.7162,0.0,18.1,0.0,0.659,4.138,100.0,1.1781,24.0,666.0,20.2,370.22,23.34,11.9
409 | 407,11.9511,0.0,18.1,0.0,0.659,5.608,100.0,1.2852,24.0,666.0,20.2,332.09,12.13,27.9
410 | 408,7.40389,0.0,18.1,0.0,0.597,5.617,97.9,1.4547,24.0,666.0,20.2,314.64,26.4,17.2
411 | 409,14.4383,0.0,18.1,0.0,0.597,6.852,100.0,1.4655,24.0,666.0,20.2,179.36,19.78,27.5
412 | 410,51.1358,0.0,18.1,0.0,0.597,5.757,100.0,1.413,24.0,666.0,20.2,2.6,10.11,15.0
413 | 411,14.0507,0.0,18.1,0.0,0.597,6.657,100.0,1.5275,24.0,666.0,20.2,35.05,21.22,17.2
414 | 412,18.811,0.0,18.1,0.0,0.597,4.628,100.0,1.5539,24.0,666.0,20.2,28.79,34.37,17.9
415 | 413,28.6558,0.0,18.1,0.0,0.597,5.155,100.0,1.5894,24.0,666.0,20.2,210.97,20.08,16.3
416 | 414,45.7461,0.0,18.1,0.0,0.693,4.519,100.0,1.6582,24.0,666.0,20.2,88.27,36.98,7.0
417 | 415,18.0846,0.0,18.1,0.0,0.679,6.434,100.0,1.8347,24.0,666.0,20.2,27.25,29.05,7.2
418 | 416,10.8342,0.0,18.1,0.0,0.679,6.782,90.8,1.8195,24.0,666.0,20.2,21.57,25.79,7.5
419 | 417,25.9406,0.0,18.1,0.0,0.679,5.304,89.1,1.6475,24.0,666.0,20.2,127.36,26.64,10.4
420 | 418,73.5341,0.0,18.1,0.0,0.679,5.957,100.0,1.8026,24.0,666.0,20.2,16.45,20.62,8.8
421 | 419,11.8123,0.0,18.1,0.0,0.718,6.824,76.5,1.794,24.0,666.0,20.2,48.45,22.74,8.4
422 | 420,11.0874,0.0,18.1,0.0,0.718,6.411,100.0,1.8589,24.0,666.0,20.2,318.75,15.02,16.7
423 | 421,7.02259,0.0,18.1,0.0,0.718,6.006,95.3,1.8746,24.0,666.0,20.2,319.98,15.7,14.2
424 | 422,12.0482,0.0,18.1,0.0,0.614,5.648,87.6,1.9512,24.0,666.0,20.2,291.55,14.1,20.8
425 | 423,7.05042,0.0,18.1,0.0,0.614,6.103,85.1,2.0218,24.0,666.0,20.2,2.52,23.29,13.4
426 | 424,8.79212,0.0,18.1,0.0,0.584,5.565,70.6,2.0635,24.0,666.0,20.2,3.65,17.16,11.7
427 | 425,15.8603,0.0,18.1,0.0,0.679,5.896,95.4,1.9096,24.0,666.0,20.2,7.68,24.39,8.3
428 | 426,12.2472,0.0,18.1,0.0,0.584,5.837,59.7,1.9976,24.0,666.0,20.2,24.65,15.69,10.2
429 | 427,37.6619,0.0,18.1,0.0,0.679,6.202,78.7,1.8629,24.0,666.0,20.2,18.82,14.52,10.9
430 | 428,7.36711,0.0,18.1,0.0,0.679,6.193,78.1,1.9356,24.0,666.0,20.2,96.73,21.52,11.0
431 | 429,9.33889,0.0,18.1,0.0,0.679,6.38,95.6,1.9682,24.0,666.0,20.2,60.72,24.08,9.5
432 | 430,8.49213,0.0,18.1,0.0,0.584,6.348,86.1,2.0527,24.0,666.0,20.2,83.45,17.64,14.5
433 | 431,10.0623,0.0,18.1,0.0,0.584,6.833,94.3,2.0882,24.0,666.0,20.2,81.33,19.69,14.1
434 | 432,6.44405,0.0,18.1,0.0,0.584,6.425,74.8,2.2004,24.0,666.0,20.2,97.95,12.03,16.1
435 | 433,5.58107,0.0,18.1,0.0,0.713,6.436,87.9,2.3158,24.0,666.0,20.2,100.19,16.22,14.3
436 | 434,13.9134,0.0,18.1,0.0,0.713,6.208,95.0,2.2222,24.0,666.0,20.2,100.63,15.17,11.7
437 | 435,11.1604,0.0,18.1,0.0,0.74,6.629,94.6,2.1247,24.0,666.0,20.2,109.85,23.27,13.4
438 | 436,14.4208,0.0,18.1,0.0,0.74,6.461,93.3,2.0026,24.0,666.0,20.2,27.49,18.05,9.6
439 | 437,15.1772,0.0,18.1,0.0,0.74,6.152,100.0,1.9142,24.0,666.0,20.2,9.32,26.45,8.7
440 | 438,13.6781,0.0,18.1,0.0,0.74,5.935,87.9,1.8206,24.0,666.0,20.2,68.95,34.02,8.4
441 | 439,9.39063,0.0,18.1,0.0,0.74,5.627,93.9,1.8172,24.0,666.0,20.2,396.9,22.88,12.8
442 | 440,22.0511,0.0,18.1,0.0,0.74,5.818,92.4,1.8662,24.0,666.0,20.2,391.45,22.11,10.5
443 | 441,9.72418,0.0,18.1,0.0,0.74,6.406,97.2,2.0651,24.0,666.0,20.2,385.96,19.52,17.1
444 | 442,5.66637,0.0,18.1,0.0,0.74,6.219,100.0,2.0048,24.0,666.0,20.2,395.69,16.59,18.4
445 | 443,9.96654,0.0,18.1,0.0,0.74,6.485,100.0,1.9784,24.0,666.0,20.2,386.73,18.85,15.4
446 | 444,12.8023,0.0,18.1,0.0,0.74,5.854,96.6,1.8956,24.0,666.0,20.2,240.52,23.79,10.8
447 | 445,0.6718,0.0,18.1,0.0,0.74,6.459,94.8,1.9879,24.0,666.0,20.2,43.06,23.98,11.8
448 | 446,6.28807,0.0,18.1,0.0,0.74,6.341,96.4,2.072,24.0,666.0,20.2,318.01,17.79,14.9
449 | 447,9.92485,0.0,18.1,0.0,0.74,6.251,96.6,2.198,24.0,666.0,20.2,388.52,16.44,12.6
450 | 448,9.32909,0.0,18.1,0.0,0.713,6.185,98.7,2.2616,24.0,666.0,20.2,396.9,18.13,14.1
451 | 449,7.52601,0.0,18.1,0.0,0.713,6.417,98.3,2.185,24.0,666.0,20.2,304.21,19.31,13.0
452 | 450,6.71772,0.0,18.1,0.0,0.713,6.749,92.6,2.3236,24.0,666.0,20.2,0.32,17.44,13.4
453 | 451,5.44114,0.0,18.1,0.0,0.713,6.655,98.2,2.3552,24.0,666.0,20.2,355.29,17.73,15.2
454 | 452,5.09017,0.0,18.1,0.0,0.713,6.297,91.8,2.3682,24.0,666.0,20.2,385.09,17.27,16.1
455 | 453,8.24809,0.0,18.1,0.0,0.713,7.393,99.3,2.4527,24.0,666.0,20.2,375.87,16.74,17.8
456 | 454,9.51363,0.0,18.1,0.0,0.713,6.728,94.1,2.4961,24.0,666.0,20.2,6.68,18.71,14.9
457 | 455,4.75237,0.0,18.1,0.0,0.713,6.525,86.5,2.4358,24.0,666.0,20.2,50.92,18.13,14.1
458 | 456,4.66883,0.0,18.1,0.0,0.713,5.976,87.9,2.5806,24.0,666.0,20.2,10.48,19.01,12.7
459 | 457,8.20058,0.0,18.1,0.0,0.713,5.936,80.3,2.7792,24.0,666.0,20.2,3.5,16.94,13.5
460 | 458,7.75223,0.0,18.1,0.0,0.713,6.301,83.7,2.7831,24.0,666.0,20.2,272.21,16.23,14.9
461 | 459,6.80117,0.0,18.1,0.0,0.713,6.081,84.4,2.7175,24.0,666.0,20.2,396.9,14.7,20.0
462 | 460,4.81213,0.0,18.1,0.0,0.713,6.701,90.0,2.5975,24.0,666.0,20.2,255.23,16.42,16.4
463 | 461,3.69311,0.0,18.1,0.0,0.713,6.376,88.4,2.5671,24.0,666.0,20.2,391.43,14.65,17.7
464 | 462,6.65492,0.0,18.1,0.0,0.713,6.317,83.0,2.7344,24.0,666.0,20.2,396.9,13.99,19.5
465 | 463,5.82115,0.0,18.1,0.0,0.713,6.513,89.9,2.8016,24.0,666.0,20.2,393.82,10.29,20.2
466 | 464,7.83932,0.0,18.1,0.0,0.655,6.209,65.4,2.9634,24.0,666.0,20.2,396.9,13.22,21.4
467 | 465,3.1636,0.0,18.1,0.0,0.655,5.759,48.2,3.0665,24.0,666.0,20.2,334.4,14.13,19.9
468 | 466,3.77498,0.0,18.1,0.0,0.655,5.952,84.7,2.8715,24.0,666.0,20.2,22.01,17.15,19.0
469 | 467,4.42228,0.0,18.1,0.0,0.584,6.003,94.5,2.5403,24.0,666.0,20.2,331.29,21.32,19.1
470 | 468,15.5757,0.0,18.1,0.0,0.58,5.926,71.0,2.9084,24.0,666.0,20.2,368.74,18.13,19.1
471 | 469,13.0751,0.0,18.1,0.0,0.58,5.713,56.7,2.8237,24.0,666.0,20.2,396.9,14.76,20.1
472 | 470,4.34879,0.0,18.1,0.0,0.58,6.167,84.0,3.0334,24.0,666.0,20.2,396.9,16.29,19.9
473 | 471,4.03841,0.0,18.1,0.0,0.532,6.229,90.7,3.0993,24.0,666.0,20.2,395.33,12.87,19.6
474 | 472,3.56868,0.0,18.1,0.0,0.58,6.437,75.0,2.8965,24.0,666.0,20.2,393.37,14.36,23.2
475 | 473,4.64689,0.0,18.1,0.0,0.614,6.98,67.6,2.5329,24.0,666.0,20.2,374.68,11.66,29.8
476 | 474,8.05579,0.0,18.1,0.0,0.584,5.427,95.4,2.4298,24.0,666.0,20.2,352.58,18.14,13.8
477 | 475,6.39312,0.0,18.1,0.0,0.584,6.162,97.4,2.206,24.0,666.0,20.2,302.76,24.1,13.3
478 | 476,4.87141,0.0,18.1,0.0,0.614,6.484,93.6,2.3053,24.0,666.0,20.2,396.21,18.68,16.7
479 | 477,15.0234,0.0,18.1,0.0,0.614,5.304,97.3,2.1007,24.0,666.0,20.2,349.48,24.91,12.0
480 | 478,10.233,0.0,18.1,0.0,0.614,6.185,96.7,2.1705,24.0,666.0,20.2,379.7,18.03,14.6
481 | 479,14.3337,0.0,18.1,0.0,0.614,6.229,88.0,1.9512,24.0,666.0,20.2,383.32,13.11,21.4
482 | 480,5.82401,0.0,18.1,0.0,0.532,6.242,64.7,3.4242,24.0,666.0,20.2,396.9,10.74,23.0
483 | 481,5.70818,0.0,18.1,0.0,0.532,6.75,74.9,3.3317,24.0,666.0,20.2,393.07,7.74,23.7
484 | 482,5.73116,0.0,18.1,0.0,0.532,7.061,77.0,3.4106,24.0,666.0,20.2,395.28,7.01,25.0
485 | 483,2.81838,0.0,18.1,0.0,0.532,5.762,40.3,4.0983,24.0,666.0,20.2,392.92,10.42,21.8
486 | 484,2.37857,0.0,18.1,0.0,0.583,5.871,41.9,3.724,24.0,666.0,20.2,370.73,13.34,20.6
487 | 485,3.67367,0.0,18.1,0.0,0.583,6.312,51.9,3.9917,24.0,666.0,20.2,388.62,10.58,21.2
488 | 486,5.69175,0.0,18.1,0.0,0.583,6.114,79.8,3.5459,24.0,666.0,20.2,392.68,14.98,19.1
489 | 487,4.83567,0.0,18.1,0.0,0.583,5.905,53.2,3.1523,24.0,666.0,20.2,388.22,11.45,20.6
490 | 488,0.15086,0.0,27.74,0.0,0.609,5.454,92.7,1.8209,4.0,711.0,20.1,395.09,18.06,15.2
491 | 489,0.18337,0.0,27.74,0.0,0.609,5.414,98.3,1.7554,4.0,711.0,20.1,344.05,23.97,7.0
492 | 490,0.20746,0.0,27.74,0.0,0.609,5.093,98.0,1.8226,4.0,711.0,20.1,318.43,29.68,8.1
493 | 491,0.10574,0.0,27.74,0.0,0.609,5.983,98.8,1.8681,4.0,711.0,20.1,390.11,18.07,13.6
494 | 492,0.11132,0.0,27.74,0.0,0.609,5.983,83.5,2.1099,4.0,711.0,20.1,396.9,13.35,20.1
495 | 493,0.17331,0.0,9.69,0.0,0.585,5.707,54.0,2.3817,6.0,391.0,19.2,396.9,12.01,21.8
496 | 494,0.27957,0.0,9.69,0.0,0.585,5.926,42.6,2.3817,6.0,391.0,19.2,396.9,13.59,24.5
497 | 495,0.17899,0.0,9.69,0.0,0.585,5.67,28.8,2.7986,6.0,391.0,19.2,393.29,17.6,23.1
498 | 496,0.2896,0.0,9.69,0.0,0.585,5.39,72.9,2.7986,6.0,391.0,19.2,396.9,21.14,19.7
499 | 497,0.26838,0.0,9.69,0.0,0.585,5.794,70.6,2.8927,6.0,391.0,19.2,396.9,14.1,18.3
500 | 498,0.23912,0.0,9.69,0.0,0.585,6.019,65.3,2.4091,6.0,391.0,19.2,396.9,12.92,21.2
501 | 499,0.17783,0.0,9.69,0.0,0.585,5.569,73.5,2.3999,6.0,391.0,19.2,395.77,15.1,17.5
502 | 500,0.22438,0.0,9.69,0.0,0.585,6.027,79.7,2.4982,6.0,391.0,19.2,396.9,14.33,16.8
503 | 501,0.06263,0.0,11.93,0.0,0.573,6.593,69.1,2.4786,1.0,273.0,21.0,391.99,9.67,22.4
504 | 502,0.04527,0.0,11.93,0.0,0.573,6.12,76.7,2.2875,1.0,273.0,21.0,396.9,9.08,20.6
505 | 503,0.06076,0.0,11.93,0.0,0.573,6.976,91.0,2.1675,1.0,273.0,21.0,396.9,5.64,23.9
506 | 504,0.10959,0.0,11.93,0.0,0.573,6.794,89.3,2.3889,1.0,273.0,21.0,393.45,6.48,22.0
507 | 505,0.04741,0.0,11.93,0.0,0.573,6.03,80.8,2.505,1.0,273.0,21.0,396.9,7.88,11.9
508 |
--------------------------------------------------------------------------------