├── README.md
└── deeplearning
├── .idea
├── deeplearning.iml
├── misc.xml
├── modules.xml
└── workspace.xml
├── dataset
├── ThoraricSurgery.csv
├── housing.csv
├── iris.csv
├── pima-indians-diabetes.csv
├── sonar.csv
└── wine.csv
├── deep_class
├── 01_Linear_Regression.py
├── 02_Data_preparation.py
├── 02_RMSE.py
├── 03_Gradient_Descent.py
├── 04_Multi-Linear-Regression.py
├── 05_3D_Graph.py
├── 06_Logistic_Regression.py
├── 07_Multi_Logistic_Regression.py
├── 08_XOR.py
└── 09_XOR-backpropagation.py
└── deep_code
├── 01_My_First_Deeplearning.py
├── 02_Data_preparation.py
├── 02_Pima_Indian.py
├── 03_Iris_Multi_Classfication.py
├── 04-Sonar.py
├── 05_Sonar_Train_Test.py
├── 06-Sonar-Save-Model.py
├── 07_Sonar-K-fold.py
├── 08_Wine.py
├── 09_Wine_Checkpoint.py
├── 10_Wine_Overfit_Graph.py
├── 11_Wine_Early_Stop.py
├── 12_Wine_Check_and_Stop.py
├── 13_Boston.py
├── 14_MNIST_Data.py
├── 15_MNIST_Simple.py
├── 16_MNIST_Deep.py
├── 17_RNN1_Reuters.py
└── 17_RNN2_imdb_lstm.py
/README.md:
--------------------------------------------------------------------------------
1 | # 006958
2 | 모두의 딥러닝 예제소스
3 |
4 | Clone or Download 버튼을 눌러 파일을 내려받으세요
5 |
--------------------------------------------------------------------------------
/deeplearning/.idea/deeplearning.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/deeplearning/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/deeplearning/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/deeplearning/.idea/workspace.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 |
208 |
209 |
210 |
211 |
212 |
213 |
214 |
215 |
216 |
217 |
218 |
219 |
220 |
221 |
222 |
223 |
224 |
225 |
226 |
227 |
228 |
229 |
230 |
231 |
232 |
233 |
234 |
235 |
236 |
237 |
238 |
239 |
240 |
241 |
242 |
243 |
244 |
245 |
246 |
247 |
248 |
249 |
250 |
251 |
252 |
253 |
254 |
255 |
256 |
257 |
258 |
259 |
260 |
261 |
262 |
263 |
264 |
265 |
266 |
267 |
268 |
269 |
270 |
271 |
272 |
273 |
274 |
275 |
276 |
277 |
278 |
279 |
280 |
281 |
282 |
283 |
284 |
285 |
286 |
287 |
288 |
289 |
290 |
291 |
292 |
293 |
294 |
295 |
296 |
297 |
298 |
299 |
300 |
301 |
302 |
303 |
304 |
305 |
306 |
307 |
308 |
309 |
310 |
311 |
312 |
313 |
314 |
315 |
316 |
317 |
318 |
319 |
320 |
321 |
322 |
323 |
324 |
325 |
326 |
327 |
328 |
329 |
330 |
331 |
332 |
333 | 1505185891199
334 |
335 |
336 | 1505185891199
337 |
338 |
339 |
340 |
341 |
342 |
343 |
344 |
345 |
346 |
347 |
348 |
349 |
350 |
351 |
352 |
353 |
354 |
355 |
356 |
357 |
358 |
359 |
360 |
361 |
362 |
363 |
364 |
365 |
366 |
367 |
368 |
369 |
370 |
371 |
372 |
373 |
374 |
375 |
376 |
377 |
378 |
379 |
380 |
381 |
382 |
383 |
384 |
385 |
386 |
387 |
388 |
389 |
390 |
391 |
392 |
393 |
394 |
395 |
396 |
397 |
398 |
399 |
400 |
401 |
402 |
403 |
404 |
405 |
406 |
407 |
408 |
409 |
410 |
411 |
412 |
413 |
414 |
415 |
416 |
417 |
418 |
419 |
420 |
421 |
422 |
423 |
424 |
425 |
426 |
427 |
428 |
429 |
430 |
431 |
432 |
433 |
434 |
435 |
436 |
437 |
438 |
439 |
440 |
441 |
442 |
443 |
444 |
445 |
446 |
447 |
448 |
449 |
450 |
451 |
452 |
453 |
454 |
455 |
456 |
457 |
458 |
459 |
460 |
461 |
462 |
463 |
464 |
465 |
466 |
467 |
468 |
469 |
470 |
471 |
472 |
473 |
474 |
475 |
476 |
477 |
478 |
479 |
480 |
481 |
482 |
483 |
484 |
485 |
486 |
487 |
488 |
489 |
490 |
491 |
492 |
493 |
494 |
495 |
496 |
497 |
498 |
499 |
500 |
501 |
502 |
503 |
504 |
505 |
506 |
507 |
508 |
509 |
510 |
511 |
512 |
513 |
514 |
515 |
516 |
517 |
518 |
519 |
520 |
521 |
522 |
523 |
524 |
525 |
526 |
527 |
528 |
529 |
530 |
531 |
532 |
533 |
534 |
535 |
536 |
537 |
538 |
539 |
540 |
541 |
542 |
543 |
544 |
545 |
546 |
547 |
548 |
549 |
550 |
551 |
552 |
553 |
554 |
555 |
556 |
557 |
558 |
559 |
560 |
561 |
562 |
563 |
564 |
565 |
566 |
567 |
568 |
569 |
570 |
571 |
572 |
573 |
574 |
575 |
576 |
577 |
578 |
579 |
580 |
581 |
582 |
583 |
584 |
585 |
586 |
587 |
588 |
589 |
590 |
591 |
592 |
593 |
594 |
595 |
596 |
597 |
598 |
599 |
600 |
601 |
602 |
603 |
604 |
605 |
606 |
607 |
608 |
609 |
610 |
611 |
612 |
613 |
614 |
615 |
616 |
617 |
618 |
619 |
620 |
621 |
622 |
623 |
624 |
625 |
626 |
627 |
628 |
629 |
630 |
631 |
632 |
633 |
634 |
635 |
636 |
637 |
638 |
639 |
640 |
641 |
642 |
643 |
644 |
645 |
646 |
647 |
648 |
649 |
650 |
651 |
652 |
653 |
654 |
655 |
656 |
657 |
658 |
659 |
660 |
661 |
662 |
663 |
664 |
665 |
666 |
667 |
668 |
669 |
670 |
671 |
672 |
673 |
674 |
675 |
676 |
677 |
678 |
679 |
680 |
681 |
682 |
683 |
684 |
685 |
686 |
687 |
688 |
689 |
690 |
691 |
692 |
693 |
694 |
695 |
696 |
697 |
698 |
699 |
700 |
701 |
702 |
703 |
704 |
705 |
706 |
707 |
708 |
709 |
710 |
711 |
712 |
713 |
714 |
715 |
716 |
717 |
718 |
719 |
720 |
721 |
722 |
723 |
724 |
725 |
726 |
727 |
728 |
729 |
730 |
731 |
732 |
733 |
734 |
735 |
736 |
737 |
738 |
739 |
740 |
741 |
742 |
743 |
744 |
745 |
746 |
747 |
748 |
749 |
750 |
751 |
752 |
753 |
754 |
755 |
756 |
757 |
758 |
759 |
760 |
761 |
762 |
763 |
764 |
765 |
766 |
767 |
768 |
769 |
770 |
771 |
772 |
773 |
774 |
775 |
776 |
777 |
778 |
779 |
780 |
781 |
782 |
783 |
784 |
785 |
786 |
787 |
788 |
789 |
790 |
791 |
792 |
793 |
794 |
795 |
796 |
797 |
798 |
799 |
800 |
801 |
802 |
803 |
804 |
805 |
806 |
807 |
808 |
809 |
810 |
811 |
812 |
813 |
814 |
815 |
816 |
817 |
818 |
819 |
820 |
821 |
822 |
823 |
824 |
825 |
826 |
827 |
828 |
829 |
830 |
831 |
--------------------------------------------------------------------------------
/deeplearning/dataset/ThoraricSurgery.csv:
--------------------------------------------------------------------------------
1 | 293,1,3.8,2.8,0,0,0,0,0,0,12,0,0,0,1,0,62,0
2 | 1,2,2.88,2.16,1,0,0,0,1,1,14,0,0,0,1,0,60,0
3 | 8,2,3.19,2.5,1,0,0,0,1,0,11,0,0,1,1,0,66,1
4 | 14,2,3.98,3.06,2,0,0,0,1,1,14,0,0,0,1,0,80,1
5 | 17,2,2.21,1.88,0,0,1,0,0,0,12,0,0,0,1,0,56,0
6 | 18,2,2.96,1.67,0,0,0,0,0,0,12,0,0,0,1,0,61,0
7 | 35,2,2.76,2.2,1,0,0,0,1,0,11,0,0,0,0,0,76,0
8 | 42,2,3.24,2.52,1,0,0,0,1,0,12,0,0,0,1,0,63,1
9 | 65,2,3.15,2.76,1,0,1,0,1,0,12,0,0,0,1,0,59,0
10 | 111,2,4.48,4.2,0,0,0,0,0,0,12,0,0,0,1,0,55,0
11 | 121,2,3.84,2.56,1,0,0,0,1,0,11,0,0,0,0,0,59,0
12 | 123,2,2.8,2.12,1,0,0,1,1,0,13,0,0,0,1,0,80,0
13 | 130,2,5.6,4.64,1,0,0,0,1,0,11,0,0,0,1,0,45,0
14 | 132,2,2.12,1.72,1,0,0,0,0,0,12,0,0,0,1,0,74,0
15 | 133,2,2.5,71.1,0,0,0,1,0,0,13,0,0,0,1,0,64,1
16 | 137,2,3.76,3.08,1,0,0,0,1,0,13,0,0,0,1,0,54,0
17 | 141,2,2.16,1.56,1,0,0,0,1,0,11,0,0,0,1,0,63,0
18 | 145,2,3.64,2.48,2,0,0,0,1,1,11,0,0,0,1,0,70,0
19 | 164,2,2.4,1.96,1,0,0,0,1,0,12,0,0,0,0,0,73,0
20 | 165,2,3,2.4,1,0,0,0,1,0,14,0,0,0,1,0,58,0
21 | 167,2,3.4,2.12,1,0,0,0,1,1,11,0,0,0,1,0,62,0
22 | 172,2,2.88,2.2,0,0,0,0,0,0,12,1,0,0,1,0,62,0
23 | 173,2,3.16,2.56,1,0,1,1,1,0,12,0,0,1,1,0,62,0
24 | 193,2,3.08,2.48,1,0,0,0,1,0,11,0,0,0,0,0,49,0
25 | 203,2,4.08,2.56,1,1,1,0,0,0,13,0,0,0,1,0,54,0
26 | 204,2,3.6,3.92,0,0,0,0,0,0,12,0,0,0,1,0,56,0
27 | 210,2,2.8,1.6,1,0,1,0,1,1,12,0,0,0,1,0,53,1
28 | 216,2,2.66,8.56,1,0,1,0,1,0,12,0,0,0,1,0,61,0
29 | 217,2,3.24,1.88,1,0,0,0,1,0,12,0,0,0,1,0,61,0
30 | 243,2,4.88,3.44,0,0,1,0,1,0,14,0,0,0,1,0,75,1
31 | 275,2,4.04,2.76,1,0,0,0,1,0,12,0,0,0,1,0,55,1
32 | 284,2,2.32,1.68,1,0,1,0,1,0,12,0,0,0,1,0,64,0
33 | 295,2,2.64,1.92,1,0,0,0,1,0,11,1,0,0,1,0,63,0
34 | 316,2,3.4,2.76,1,0,1,0,1,0,12,0,0,0,1,0,56,0
35 | 324,2,2.58,1.64,2,0,1,0,1,1,12,0,0,0,1,0,63,0
36 | 331,2,2.94,76,1,0,1,1,1,0,12,0,0,0,0,0,61,0
37 | 335,2,4,3.12,1,0,0,0,1,0,12,0,0,0,1,0,67,1
38 | 346,2,3.12,2.72,2,0,0,0,1,1,14,0,0,0,1,0,70,0
39 | 347,2,3.48,2.84,1,0,0,0,0,1,11,0,0,0,1,0,58,0
40 | 349,2,4.2,3.6,1,0,0,0,0,1,11,0,0,0,1,0,39,1
41 | 390,2,3.8,2.67,1,0,0,0,1,0,14,0,0,0,1,0,48,0
42 | 392,2,1.84,1.36,1,0,1,0,1,0,12,0,0,0,1,0,57,0
43 | 399,2,2.96,2.33,1,0,0,0,1,0,11,0,0,0,1,0,72,0
44 | 405,2,2.96,2.24,0,0,0,0,1,0,12,0,0,0,1,0,57,1
45 | 408,2,2.72,2.08,0,0,0,0,0,0,12,0,0,0,1,0,67,0
46 | 411,2,2.48,2,1,0,0,0,1,0,12,0,0,0,1,0,60,1
47 | 414,2,2.48,2.08,1,0,1,0,0,0,12,0,0,0,1,0,60,0
48 | 419,2,2.6,2.04,0,1,1,0,0,0,12,0,0,0,0,0,70,0
49 | 422,2,3.76,2.96,1,0,0,0,1,0,14,1,0,0,0,0,64,1
50 | 442,2,4.44,3.64,0,0,0,0,0,0,12,0,0,0,0,0,62,0
51 | 443,2,4.08,2.24,1,0,0,1,1,0,12,0,0,0,0,0,61,0
52 | 448,2,4.4,3.72,1,0,0,0,1,1,12,0,0,0,1,0,52,0
53 | 466,2,3.88,2.12,1,0,0,0,1,0,13,0,0,0,1,0,63,0
54 | 2,3,3.4,1.88,0,0,0,0,0,0,12,0,0,0,1,0,51,0
55 | 3,3,2.76,2.08,1,0,0,0,1,0,11,0,0,0,1,0,59,0
56 | 4,3,3.68,3.04,0,0,0,0,0,0,11,0,0,0,0,0,54,0
57 | 5,3,2.44,0.96,2,0,1,0,1,1,11,0,0,0,1,0,73,1
58 | 6,3,2.48,1.88,1,0,0,0,1,0,11,0,0,0,0,0,51,0
59 | 7,3,4.36,3.28,1,0,0,0,1,0,12,1,0,0,1,0,59,1
60 | 9,3,3.16,2.64,2,0,0,0,1,1,11,0,0,0,1,0,68,0
61 | 10,3,2.32,2.16,1,0,0,0,1,0,11,0,0,0,1,0,54,0
62 | 11,3,2.56,2.32,0,0,1,0,1,0,12,0,0,0,0,0,60,0
63 | 12,3,4.28,4.44,1,0,0,0,0,0,12,0,0,0,1,0,58,0
64 | 13,3,3,2.36,1,0,0,0,1,1,11,0,0,0,1,0,68,0
65 | 15,3,1.96,1.4,1,0,0,0,1,0,11,0,0,0,1,0,77,0
66 | 16,3,4.68,4.16,1,0,0,0,1,0,12,0,0,0,1,0,62,0
67 | 19,3,2.6,1.68,1,0,0,0,1,0,12,0,0,0,1,0,70,0
68 | 20,3,2.88,2.48,0,0,0,0,0,0,11,0,0,0,1,0,71,0
69 | 21,3,4.48,3.48,0,0,0,0,0,0,12,0,0,0,1,0,51,0
70 | 23,3,2.36,1.68,0,0,0,0,0,0,12,0,0,0,1,0,62,0
71 | 24,3,3.68,2.32,0,0,0,0,0,0,11,0,0,0,1,0,62,0
72 | 27,3,3.24,3.08,1,0,0,0,1,0,11,0,0,0,1,0,60,0
73 | 28,3,3.4,3.06,1,0,0,0,1,1,11,0,0,0,1,0,68,1
74 | 29,3,3.16,2.69,1,0,0,0,1,1,11,0,0,0,1,0,56,0
75 | 31,3,3.24,2.4,1,1,1,0,0,0,14,0,0,0,1,0,55,1
76 | 32,3,4.44,3.48,1,0,0,0,1,0,12,0,0,0,0,0,52,0
77 | 34,3,1.81,1.4,1,0,0,0,1,0,12,1,0,0,0,0,68,0
78 | 36,3,2.36,1.6,0,0,0,0,0,0,11,0,0,0,1,0,58,0
79 | 37,3,2.2,1.96,1,0,0,0,1,0,12,0,0,0,1,0,71,0
80 | 38,3,3.68,2.44,1,0,1,1,0,0,12,1,0,0,0,0,61,0
81 | 39,3,4.2,3.08,0,0,0,0,0,0,11,0,0,0,1,0,56,0
82 | 40,3,4.6,3.52,1,0,0,0,1,0,11,0,0,0,1,0,52,0
83 | 43,3,3.2,2.82,1,0,0,0,1,0,12,0,0,0,1,0,68,0
84 | 45,3,3.56,2.68,1,1,0,0,1,0,12,0,0,0,1,0,60,0
85 | 46,3,2.48,2.08,0,0,0,0,0,0,11,0,0,0,1,0,60,0
86 | 47,3,4.16,3.28,1,0,0,0,1,0,12,0,0,0,1,0,67,0
87 | 48,3,2.64,2.12,1,0,0,0,1,0,12,0,0,0,1,0,72,1
88 | 49,3,4.44,3.12,2,0,0,0,1,1,12,0,0,0,1,0,59,0
89 | 50,3,4.56,3.92,0,0,0,0,0,0,12,0,0,0,0,0,55,0
90 | 51,3,2.52,1.96,1,0,0,0,1,0,12,0,0,0,0,0,79,0
91 | 52,3,4,2.88,1,0,0,0,1,0,11,0,0,0,1,0,69,0
92 | 53,3,3.2,2.52,2,1,1,1,1,0,12,0,0,0,1,0,68,0
93 | 55,3,3.68,3.08,1,0,0,0,1,0,12,0,0,0,1,0,63,0
94 | 57,3,3.72,2.88,1,0,0,1,1,0,11,0,0,0,0,0,37,0
95 | 58,3,3.4,2.8,1,1,0,0,1,1,11,1,0,0,1,0,64,1
96 | 60,3,3.84,3.72,0,0,0,0,0,0,12,0,0,0,1,0,58,0
97 | 61,3,3.52,2.28,0,0,0,0,0,0,13,0,0,0,1,0,51,1
98 | 62,3,3.04,2.04,2,0,0,0,1,1,12,0,0,0,1,0,77,0
99 | 63,3,4.96,3.6,0,0,0,0,0,0,11,0,0,0,1,0,56,0
100 | 64,3,3.72,2.84,0,0,0,0,0,0,11,1,0,0,0,0,55,0
101 | 66,3,2.88,2.6,1,0,0,0,1,0,12,0,0,0,0,0,54,0
102 | 67,3,2.36,2,0,0,0,0,0,0,11,0,0,0,0,0,39,0
103 | 69,3,2.72,2.2,1,0,0,0,1,0,12,0,0,0,1,0,61,0
104 | 70,3,3.08,1.8,1,0,1,0,1,0,12,0,0,0,1,0,70,0
105 | 71,3,3.48,2.72,1,0,1,0,0,0,11,0,0,0,0,0,53,0
106 | 72,3,3.6,2.6,1,0,0,0,1,0,12,1,0,0,1,0,71,0
107 | 73,3,3.52,2.92,0,0,0,0,0,0,11,0,0,0,1,0,63,0
108 | 75,3,4.6,3.28,1,0,0,0,1,0,11,0,0,0,1,0,55,0
109 | 76,3,3.4,2.8,1,0,0,0,1,0,14,0,0,0,1,0,41,1
110 | 77,3,1.84,1.28,1,0,0,0,1,1,11,0,0,0,1,0,66,0
111 | 78,3,3.04,3.6,1,0,0,0,1,0,12,0,0,0,1,0,62,1
112 | 79,3,2.2,1.44,1,0,0,0,1,0,12,0,0,0,1,0,54,0
113 | 80,3,3.04,2.16,1,0,0,0,1,0,12,0,0,0,0,0,78,0
114 | 81,3,3.68,2.88,1,0,0,0,1,0,12,0,0,0,1,0,58,0
115 | 82,3,1.96,1.68,1,0,0,0,1,0,14,0,0,0,1,0,59,0
116 | 83,3,3.24,1.64,1,0,0,0,1,0,12,0,0,0,1,0,63,0
117 | 84,3,2.84,2.36,1,0,0,0,1,0,11,1,0,0,0,0,62,0
118 | 85,3,4.28,3.28,0,0,0,0,0,0,12,0,0,0,1,0,51,0
119 | 86,3,3.76,2.72,1,0,0,0,1,0,12,0,0,0,1,0,58,0
120 | 87,3,4.9,4.19,0,0,0,1,1,0,12,0,0,0,0,0,52,0
121 | 88,3,2.36,2,1,0,0,1,0,0,12,0,0,1,1,0,67,0
122 | 90,3,2.83,66.4,1,1,1,1,1,0,12,0,0,0,1,0,75,0
123 | 92,3,2.6,2,1,0,0,0,1,0,11,0,0,0,1,0,73,0
124 | 93,3,3.6,2.48,1,0,0,0,1,0,12,0,0,0,1,0,60,1
125 | 94,3,6.08,4.92,0,0,0,0,0,0,11,0,0,0,1,0,50,0
126 | 95,3,1.88,1.44,2,0,0,0,1,1,12,0,0,0,1,0,87,0
127 | 96,3,4.56,3.6,1,0,0,0,1,0,11,0,0,0,1,0,54,0
128 | 99,3,2.63,67.3,1,0,0,1,1,0,11,0,0,0,1,0,54,0
129 | 100,3,4.6,2.92,1,0,1,1,1,0,12,0,0,0,1,0,57,1
130 | 101,3,3.36,2.67,1,0,0,0,1,0,11,0,0,0,1,0,72,0
131 | 102,3,1.84,1.64,1,0,0,0,1,1,12,1,0,0,1,0,72,0
132 | 104,3,2.35,1.64,1,0,0,0,1,0,11,0,0,0,0,1,59,0
133 | 105,3,2.84,1.88,1,0,0,0,1,0,11,0,0,0,0,0,53,0
134 | 107,3,2.48,2.08,1,0,0,0,1,0,12,0,0,0,1,0,55,0
135 | 108,3,3.6,2.6,1,0,0,0,1,0,12,0,0,0,1,0,54,0
136 | 109,3,3.16,2.96,0,0,0,0,0,0,11,0,0,0,0,0,63,0
137 | 110,3,3.24,2.36,1,0,0,1,1,0,12,0,0,0,1,0,74,0
138 | 112,3,4,2.6,1,0,0,0,1,0,12,1,0,0,1,0,58,0
139 | 113,3,3.68,64.1,0,0,0,0,0,0,12,0,0,0,1,0,60,0
140 | 114,3,4.68,3.48,0,0,0,0,0,0,11,0,0,0,1,0,52,0
141 | 115,3,4.52,3.32,0,0,0,0,1,0,12,0,0,0,1,0,58,0
142 | 118,3,2.84,2.16,1,0,0,0,1,0,12,1,0,0,1,0,53,0
143 | 120,3,2.56,1.6,1,0,0,0,1,1,12,0,0,0,1,0,75,0
144 | 122,3,3.56,2.76,1,0,0,0,1,0,12,0,0,0,1,0,74,0
145 | 125,3,3.36,2.8,1,0,0,0,1,1,12,0,0,0,1,0,76,0
146 | 126,3,2.83,1.96,1,0,0,0,1,0,12,0,0,0,1,0,71,0
147 | 127,3,4.56,2.68,1,0,0,0,1,0,11,0,0,0,1,0,62,0
148 | 128,3,2,1,1,0,1,0,1,1,11,1,0,0,1,0,73,1
149 | 131,3,3.32,2.87,1,0,0,0,1,0,11,0,0,0,1,0,63,0
150 | 134,3,2,1.44,0,0,0,0,0,0,11,0,0,0,1,0,63,0
151 | 135,3,4.84,3.48,1,0,0,0,1,0,12,0,0,0,1,0,56,0
152 | 136,3,2.92,2.28,1,0,0,0,1,0,11,0,0,0,1,0,63,0
153 | 138,3,2.08,1.52,1,0,0,0,1,0,14,0,0,0,1,0,49,1
154 | 139,3,2.44,2.08,1,0,0,0,1,0,12,0,0,0,1,0,57,0
155 | 140,3,3.72,3.12,1,0,1,0,0,0,12,0,0,0,0,0,52,0
156 | 142,3,4.2,3.24,1,0,1,0,1,0,12,0,0,0,1,0,73,0
157 | 143,3,5.17,4.3,1,0,0,0,0,0,11,0,0,0,0,0,47,0
158 | 146,3,3.96,2.96,1,0,0,0,1,0,12,0,0,0,1,0,60,0
159 | 147,3,3.92,3.08,1,0,0,0,1,0,11,0,0,0,0,0,70,0
160 | 148,3,2.92,2.2,1,0,0,0,1,0,12,0,0,0,1,0,68,0
161 | 149,3,3.64,2.76,1,0,0,0,1,0,12,0,0,0,1,0,74,0
162 | 150,3,2.72,2.36,0,0,0,0,0,0,11,0,0,0,1,0,71,0
163 | 151,3,2.6,2.24,0,0,0,0,0,0,12,0,0,0,0,0,56,0
164 | 152,3,3.88,2.84,1,0,1,0,1,0,11,0,0,0,1,0,66,1
165 | 153,3,2.72,2.04,1,1,0,0,0,0,12,0,0,0,0,0,76,1
166 | 154,3,3.44,3.13,1,0,0,0,1,1,12,0,0,0,1,0,78,0
167 | 155,3,3.12,3.24,1,0,0,0,1,0,12,0,0,0,1,0,68,0
168 | 156,3,2.6,2.32,1,0,0,0,0,1,12,1,0,0,0,0,66,0
169 | 157,3,3.28,2.32,1,0,0,1,1,0,13,0,0,0,1,0,67,0
170 | 158,3,2.76,1.6,1,0,0,0,1,1,12,0,1,0,1,0,60,0
171 | 159,3,3.08,2.32,1,0,1,0,1,1,12,0,0,1,1,0,61,0
172 | 160,3,2.2,1.7,1,0,0,0,1,0,11,0,0,0,1,0,58,0
173 | 161,3,2.92,1.88,0,0,0,0,0,0,12,0,0,0,0,0,76,0
174 | 162,3,2.88,2.36,0,0,0,0,0,0,11,0,0,0,1,0,56,0
175 | 163,3,3.2,2.28,1,1,1,0,1,0,12,0,0,0,1,0,67,0
176 | 166,3,3.2,2.21,1,0,1,1,1,0,12,0,0,0,1,0,54,0
177 | 168,3,2.57,1.72,1,0,0,0,1,1,11,0,0,0,1,0,81,0
178 | 169,3,2.28,2.08,0,0,0,0,0,0,11,1,0,0,1,0,56,0
179 | 170,3,2.44,1.96,1,0,1,1,1,0,13,0,0,0,0,0,60,1
180 | 171,3,4.04,1.88,1,0,0,0,1,0,12,0,0,0,1,0,66,0
181 | 174,3,2.6,2.36,1,0,0,0,1,0,11,0,0,0,1,0,55,1
182 | 175,3,1.44,1.04,1,0,0,0,1,1,11,0,0,0,1,0,62,0
183 | 176,3,3.68,2.36,0,0,0,1,1,0,12,0,0,0,1,0,71,1
184 | 177,3,3.2,2.72,2,0,0,0,1,0,14,0,0,0,1,0,52,0
185 | 178,3,3.04,2.32,1,0,0,0,1,0,12,0,0,0,1,0,59,0
186 | 179,3,4.32,4.32,1,0,1,0,1,1,12,0,0,0,1,0,48,0
187 | 180,3,3,2.36,2,0,0,0,1,1,12,0,0,0,1,0,60,0
188 | 181,3,3.64,2.88,1,0,0,0,1,1,12,0,0,0,1,0,61,0
189 | 182,3,5.08,4.08,1,0,0,0,1,0,12,0,0,0,1,0,59,0
190 | 183,3,3.16,2.36,1,0,0,0,1,0,11,0,0,0,1,0,64,0
191 | 184,3,2.8,3.36,1,0,0,0,1,0,12,0,0,0,1,0,56,0
192 | 185,3,2.52,2.08,0,0,0,0,0,0,11,0,0,0,0,0,58,0
193 | 187,3,3.32,2.15,1,0,0,0,1,0,11,0,0,0,1,0,64,0
194 | 189,3,2.28,1.24,1,0,0,0,1,0,11,0,0,0,1,0,72,0
195 | 191,3,2.6,1.56,0,0,0,0,0,0,12,0,0,0,1,1,61,0
196 | 192,3,2.68,2.4,0,0,0,0,0,0,11,0,0,0,1,0,60,1
197 | 194,3,3.84,3.36,0,0,0,0,0,0,12,0,0,0,1,0,53,0
198 | 195,3,3.52,2.8,0,0,0,0,0,0,11,0,0,0,1,0,58,0
199 | 196,3,2.73,2.11,1,0,1,0,1,0,12,0,0,0,1,0,61,1
200 | 197,3,2.84,2.24,1,1,1,0,0,0,12,0,0,0,1,0,68,1
201 | 198,3,2.98,2.64,1,0,0,0,1,0,12,0,0,0,0,0,60,0
202 | 199,3,3.52,2.72,1,0,0,0,0,0,11,0,0,0,1,0,72,0
203 | 201,3,2.36,2.08,1,0,0,0,1,0,12,0,0,0,1,0,57,0
204 | 202,3,2.76,2.28,0,0,0,0,0,0,11,0,0,0,1,0,51,0
205 | 205,3,3.12,2.9,0,0,0,0,0,0,12,0,0,0,0,0,77,0
206 | 206,3,2.24,1.76,0,0,0,0,0,0,12,0,0,0,1,0,64,0
207 | 207,3,3.96,2.88,0,0,0,0,0,0,11,0,0,0,1,0,57,0
208 | 208,3,2.6,1.92,1,0,0,0,1,0,11,0,0,0,1,0,66,0
209 | 209,3,4.2,3.24,0,0,0,0,0,0,12,0,0,0,1,0,70,0
210 | 211,3,4.72,4.56,0,0,0,0,0,0,11,0,0,0,1,0,51,0
211 | 212,3,3.58,2.64,1,0,0,0,1,0,12,0,0,0,1,0,58,1
212 | 213,3,2.44,2.12,1,1,1,1,0,0,11,0,0,0,1,0,58,0
213 | 214,3,2.22,1.36,0,0,0,0,0,0,12,1,0,0,1,0,63,1
214 | 215,3,2.96,2.32,0,0,0,0,0,0,11,0,0,0,1,0,51,0
215 | 218,3,4.52,3.6,1,0,0,0,1,0,12,0,0,0,1,0,76,0
216 | 219,3,4,3.08,1,0,0,0,1,0,11,0,0,0,1,0,71,0
217 | 220,3,2.84,2.12,0,0,0,0,0,0,11,0,0,0,1,0,69,0
218 | 223,3,4.8,3.41,1,0,0,1,1,0,12,0,0,0,1,0,54,0
219 | 224,3,3.72,3.04,0,0,0,0,0,0,11,0,0,1,1,0,63,0
220 | 225,3,4.96,3.48,1,0,0,0,1,0,12,0,0,0,1,0,47,0
221 | 227,3,2.96,2.44,1,0,0,0,1,1,12,0,0,0,1,0,65,0
222 | 228,3,2.64,2.44,1,0,0,0,1,0,12,0,0,0,1,0,63,1
223 | 229,3,2.4,1.64,0,0,0,0,0,0,11,0,0,0,0,0,64,0
224 | 230,3,2.64,2.08,1,0,0,0,1,0,12,1,0,0,1,0,65,1
225 | 231,3,4.76,3.31,1,0,0,1,1,0,11,0,0,0,1,0,51,0
226 | 233,3,2.32,1.76,1,0,0,0,1,0,11,0,0,0,1,0,70,0
227 | 234,3,2.6,2,1,0,0,0,1,0,12,0,0,0,1,0,58,0
228 | 235,3,2.46,1.76,1,0,0,0,1,1,11,0,0,0,1,0,67,0
229 | 236,3,4.16,3.64,1,0,0,0,1,0,12,0,0,0,1,0,62,0
230 | 237,3,3.2,1.8,1,0,0,0,1,1,12,0,0,0,1,0,74,0
231 | 238,3,3.24,2.64,0,0,0,0,1,0,11,0,0,0,1,0,69,0
232 | 240,3,3.52,2.52,1,0,0,0,1,0,12,0,0,0,1,0,60,1
233 | 241,3,4.36,3.76,0,0,0,0,0,0,11,0,0,0,1,0,72,0
234 | 242,3,5.52,3.56,1,0,0,0,1,0,12,0,0,0,1,0,64,0
235 | 244,3,4.36,3.92,1,0,0,0,0,0,11,0,0,0,1,0,47,0
236 | 245,3,3.56,2.64,1,0,0,0,1,0,11,0,1,0,1,0,57,0
237 | 246,3,5.49,2.97,1,0,0,0,1,0,12,0,0,0,1,0,56,0
238 | 248,3,4.08,3.2,0,0,0,0,1,0,12,0,0,0,1,0,55,0
239 | 250,3,2.56,1.8,1,0,0,0,1,0,12,0,0,0,1,0,73,0
240 | 251,3,3.8,2.82,1,0,0,0,1,0,12,0,0,0,1,0,68,0
241 | 252,3,3.04,2.24,2,0,0,0,1,1,11,0,0,0,1,0,75,1
242 | 253,3,3.81,2.94,1,0,0,0,1,0,12,0,0,0,1,0,63,0
243 | 254,3,3.92,2.36,1,0,0,0,1,0,12,0,0,0,1,0,61,0
244 | 255,3,3.44,3.52,1,1,0,0,0,0,11,0,0,0,1,0,62,0
245 | 256,3,3.72,78.3,0,1,0,0,1,0,12,0,0,0,1,0,44,0
246 | 257,3,2.8,1.88,1,0,0,0,1,0,11,0,0,0,1,0,56,0
247 | 258,3,2.92,2.32,0,0,0,0,0,0,11,0,0,0,1,0,54,0
248 | 259,3,3.72,2.48,1,0,1,0,1,0,11,0,0,0,1,0,57,0
249 | 260,3,3.64,2.52,0,0,0,0,0,0,12,0,0,0,1,0,56,0
250 | 261,3,2.72,2.09,0,0,0,0,0,0,14,0,0,0,0,0,69,1
251 | 262,3,1.84,1.12,1,0,0,0,1,0,12,0,0,0,1,0,72,0
252 | 263,3,2.96,1.72,0,0,1,0,1,0,11,0,0,0,1,0,59,0
253 | 265,3,2.6,1.92,1,0,0,0,1,0,11,0,0,0,1,0,64,0
254 | 266,3,2.92,2.52,0,0,0,0,0,0,12,0,0,0,1,0,61,0
255 | 267,3,3.8,2.84,1,0,0,0,1,0,12,0,0,0,1,0,72,0
256 | 268,3,3.32,2.92,2,0,0,0,1,1,13,0,0,0,1,0,63,0
257 | 269,3,2.52,1.72,2,0,0,1,1,1,12,0,0,0,1,0,74,1
258 | 270,3,4.28,3.28,1,1,0,0,1,0,11,0,0,0,1,0,71,0
259 | 271,3,2.52,1.72,1,0,0,0,1,1,12,0,0,0,1,0,71,1
260 | 273,3,2.07,1.6,0,0,1,0,0,0,12,0,0,0,0,0,77,0
261 | 276,3,1.7,1.36,1,0,0,0,0,1,12,0,0,0,1,0,65,0
262 | 277,3,3.04,2.04,1,0,0,0,1,0,12,0,0,0,1,0,67,0
263 | 278,3,3.36,2.64,1,0,0,0,1,0,12,1,0,0,1,0,69,0
264 | 279,3,4.57,4.57,1,0,0,0,1,0,11,0,0,0,0,0,55,0
265 | 280,3,4.12,2.32,1,0,0,0,1,0,11,0,0,0,1,0,51,0
266 | 281,3,2,1.36,1,0,0,0,1,0,11,0,0,0,1,0,64,0
267 | 282,3,3.8,3.68,0,0,0,0,0,0,12,0,0,0,0,0,63,0
268 | 283,3,3.16,2.6,1,1,0,0,1,0,12,0,0,0,0,0,69,0
269 | 285,3,2.32,1.92,0,0,0,0,0,0,11,0,0,0,1,0,59,0
270 | 286,3,2.48,1.4,1,0,0,0,1,0,11,0,0,0,1,0,73,0
271 | 288,3,2.96,2.2,1,0,0,0,1,0,12,0,0,0,1,0,63,0
272 | 289,3,2.96,1.88,1,0,0,0,1,1,14,0,0,0,1,0,60,0
273 | 290,3,3.52,2.36,1,0,0,0,1,0,12,0,0,0,1,0,74,0
274 | 291,3,4.12,3.16,1,0,0,0,1,1,12,0,0,0,1,0,65,0
275 | 292,3,2.68,2.32,1,0,0,0,1,1,11,1,0,0,1,0,79,0
276 | 294,3,4.12,2.88,1,0,0,0,1,0,12,0,0,1,1,0,71,0
277 | 296,3,3.68,2.96,1,0,1,0,1,0,12,0,0,0,1,0,67,0
278 | 297,3,2.48,1.84,1,0,0,0,1,0,12,0,0,0,1,0,55,1
279 | 298,3,4.36,3.24,1,1,0,1,1,0,12,0,0,0,1,0,54,1
280 | 299,3,4.32,2.72,2,0,1,0,1,1,11,0,0,0,1,0,77,0
281 | 300,3,3.4,1.92,0,0,0,0,0,0,12,0,0,0,1,0,58,0
282 | 301,3,4.24,3.04,1,0,1,0,1,1,12,0,0,0,1,0,64,0
283 | 302,3,3.28,1.96,0,0,0,0,0,0,12,0,0,0,0,0,61,0
284 | 303,3,4.59,3.02,2,1,0,0,1,1,13,0,0,0,0,0,62,1
285 | 304,3,4.16,3.44,1,0,1,0,1,1,12,0,0,0,1,0,67,0
286 | 305,3,5.16,4.28,0,0,0,0,0,0,12,0,0,0,1,0,56,0
287 | 306,3,2.76,1.8,1,0,0,0,1,0,12,0,0,0,1,0,70,1
288 | 308,3,2.8,2.32,1,0,0,0,1,0,12,0,0,0,1,0,57,0
289 | 309,3,2.32,1.96,1,0,0,0,1,0,11,0,0,0,1,0,61,0
290 | 310,3,1.98,1.57,1,0,0,0,0,1,11,0,0,0,1,0,77,0
291 | 312,3,2.4,1.64,1,0,1,0,1,1,12,0,0,0,0,0,62,0
292 | 313,3,3.12,2.52,1,1,0,0,1,0,12,0,0,0,1,0,59,1
293 | 314,3,2.6,1.84,1,0,0,0,1,0,12,0,0,0,1,0,70,0
294 | 317,3,3.6,2.64,1,0,0,0,1,0,12,0,0,0,0,0,57,0
295 | 318,3,2.48,2.12,1,0,0,1,1,0,12,0,0,0,1,0,78,0
296 | 319,3,2.4,1.96,1,0,0,0,1,0,11,0,0,0,1,0,64,0
297 | 320,3,2.1,69.1,0,0,0,0,0,0,11,0,0,0,1,0,62,0
298 | 321,3,5.12,4,1,0,0,0,1,0,14,0,0,0,1,0,49,0
299 | 322,3,4.65,3.78,1,0,0,0,1,0,12,0,0,0,1,0,77,1
300 | 323,3,2.72,2.36,1,0,0,0,1,0,11,0,0,0,1,0,64,0
301 | 327,3,3.2,2.52,1,0,0,0,1,1,12,0,0,0,1,0,75,0
302 | 328,3,2.52,1.92,2,0,1,0,1,1,11,0,0,0,1,0,70,0
303 | 329,3,1.96,1.48,1,0,0,0,1,0,12,0,0,0,1,0,59,0
304 | 332,3,3.52,3.12,0,0,0,0,0,0,11,0,0,0,1,0,64,0
305 | 333,3,2.6,1.92,0,0,0,0,0,0,11,0,0,0,1,0,59,0
306 | 336,3,2.4,1.8,1,0,0,0,1,0,11,0,0,0,1,0,64,0
307 | 337,3,2.32,1.32,1,0,1,0,1,1,11,0,0,0,1,0,68,0
308 | 339,3,4,3.08,0,0,0,0,0,0,11,0,0,0,1,0,64,0
309 | 340,3,2.96,2,1,0,0,0,1,0,12,0,0,0,1,0,59,0
310 | 341,3,3.88,2.92,0,0,0,0,0,0,11,0,0,0,1,0,67,1
311 | 342,3,2.36,1.76,1,0,1,0,0,0,12,0,0,0,1,0,74,0
312 | 344,3,2.96,2.44,1,0,0,0,1,1,11,0,0,0,1,0,60,0
313 | 345,3,3.64,3.12,1,0,0,0,1,0,12,0,0,0,1,0,64,0
314 | 348,3,4.16,3.44,1,1,0,0,1,0,13,0,0,0,1,0,59,0
315 | 351,3,2.64,2.16,1,0,1,0,1,0,12,0,0,0,1,0,71,1
316 | 352,3,3.05,1.3,1,0,0,0,1,0,11,0,0,0,1,0,70,0
317 | 353,3,2.94,73.3,1,0,1,1,0,0,12,0,0,0,0,0,60,0
318 | 354,3,3.24,52.3,0,0,0,0,0,0,12,1,0,0,1,0,55,0
319 | 355,3,4.28,3.52,1,0,0,0,1,0,11,0,0,0,1,0,60,0
320 | 356,3,3.68,3.2,1,0,0,0,1,0,12,0,0,0,1,0,55,0
321 | 357,3,2.8,2.44,1,0,0,1,1,0,12,0,0,0,1,0,55,0
322 | 358,3,2,1.36,0,0,0,0,0,0,12,0,0,0,1,0,70,1
323 | 359,3,2.4,2.04,1,0,0,0,1,0,12,0,0,0,1,0,63,0
324 | 361,3,2.6,2.12,1,0,0,0,1,0,12,0,0,0,1,0,55,0
325 | 362,3,2.84,2.4,1,0,0,0,1,0,11,0,0,0,1,0,49,0
326 | 363,3,3.08,1.72,1,0,0,0,1,1,12,1,0,0,1,0,58,1
327 | 364,3,2.2,1.6,1,0,1,0,1,0,12,0,0,0,1,0,59,0
328 | 365,3,2.32,1.72,2,0,0,0,1,1,11,0,0,0,1,0,56,0
329 | 366,3,2.04,1.8,0,0,0,0,0,0,12,0,0,0,1,0,64,0
330 | 367,3,2.56,2.2,1,0,0,0,1,0,11,0,0,0,1,0,62,0
331 | 370,3,3.8,3.16,0,0,0,0,0,0,12,0,0,0,1,0,59,0
332 | 371,3,2.88,2.16,0,0,0,0,0,0,12,0,0,0,1,0,59,0
333 | 372,3,2.32,1.76,0,0,0,0,0,0,12,0,0,0,0,0,55,0
334 | 373,3,2.92,2.4,1,0,0,0,1,0,11,0,0,0,1,0,46,0
335 | 374,3,2,1.52,0,0,1,0,1,0,14,1,0,0,1,0,60,0
336 | 375,3,2.4,2.16,1,0,0,0,1,0,12,0,0,0,1,0,69,0
337 | 376,3,4.56,3.84,0,0,0,0,0,0,12,0,0,0,1,0,74,0
338 | 377,3,4.03,3.09,1,0,0,0,1,0,11,0,0,0,1,0,59,0
339 | 378,3,2.16,1.88,0,0,0,0,0,0,12,0,0,0,1,0,63,0
340 | 379,3,4.52,3.36,1,0,0,0,0,1,12,0,0,0,1,0,63,0
341 | 381,3,3.76,1,0,0,1,0,0,0,12,0,0,0,1,0,52,0
342 | 382,3,5,3.88,0,0,0,0,0,0,11,0,0,0,1,0,51,0
343 | 384,3,2.4,1.88,1,0,0,0,1,0,11,0,0,0,0,0,53,0
344 | 385,3,2,1.64,1,0,0,0,1,0,12,0,0,0,0,0,61,0
345 | 386,3,2.52,1.96,1,0,0,0,1,0,12,1,0,0,1,0,72,0
346 | 387,3,4.4,3.56,1,0,0,1,1,1,11,0,0,0,1,0,60,1
347 | 389,3,1.96,1.4,1,0,0,0,1,0,13,0,0,0,1,0,69,0
348 | 391,3,2.92,2.28,1,0,0,0,1,0,12,0,0,0,1,0,75,0
349 | 394,3,3.72,3,1,0,0,0,1,0,12,0,0,0,1,0,61,0
350 | 397,3,2.76,2.08,0,0,0,0,0,0,12,0,0,0,0,0,21,0
351 | 398,3,4.56,3.48,1,0,0,0,1,0,12,0,0,0,1,0,60,0
352 | 400,3,2.7,1.9,1,0,0,0,1,0,11,0,0,0,1,0,65,0
353 | 401,3,2.48,1.6,0,0,0,0,0,0,11,0,0,0,0,0,61,0
354 | 402,3,3.56,2.8,0,0,0,0,0,0,12,0,0,0,0,0,69,0
355 | 403,3,2.96,2.2,1,0,0,0,1,0,12,0,0,0,1,0,53,0
356 | 404,3,4.04,2.56,1,0,1,0,1,0,12,0,0,0,1,0,55,0
357 | 407,3,3.44,2.92,1,0,0,0,1,0,11,0,0,0,1,0,56,0
358 | 409,3,3.08,2.24,1,0,0,0,1,0,12,1,0,0,1,0,59,0
359 | 410,3,2.64,2.15,0,0,0,0,0,0,11,0,0,0,1,0,59,0
360 | 412,3,4.64,4.16,1,1,0,0,1,0,13,0,0,0,1,0,56,0
361 | 413,3,3.32,2.52,0,0,0,0,0,0,11,0,0,0,0,0,56,0
362 | 415,3,1.46,1,1,0,1,0,1,0,11,0,0,0,1,0,68,0
363 | 416,3,3.4,2.39,0,0,0,0,0,0,11,0,0,0,0,0,63,0
364 | 417,3,3.44,2.4,1,0,0,0,1,1,11,1,0,0,1,0,77,0
365 | 418,3,5.16,4.28,1,0,0,0,0,0,12,0,0,0,1,0,52,0
366 | 423,3,2.68,2.16,0,0,0,0,0,0,12,0,0,0,1,0,70,0
367 | 424,3,5,4.04,0,0,1,0,1,0,12,0,0,0,0,0,60,0
368 | 426,3,3.18,2.73,1,0,0,0,1,0,12,0,0,0,1,0,47,0
369 | 427,3,2.48,2.08,1,0,0,0,1,0,13,0,0,0,1,0,54,1
370 | 428,3,3.44,2.72,1,1,1,0,1,0,11,0,0,0,0,0,73,0
371 | 429,3,3.12,2.12,1,0,0,0,1,1,12,0,0,0,1,0,62,0
372 | 430,3,3.48,2.52,1,0,0,0,1,0,14,1,0,0,1,0,72,0
373 | 431,3,3.87,2.68,0,0,0,0,0,0,12,0,0,0,1,0,63,0
374 | 432,3,1.44,1.2,1,0,0,0,1,0,11,0,0,0,1,0,58,0
375 | 433,3,2.28,1.82,0,0,0,0,0,0,11,1,0,0,0,0,69,0
376 | 434,3,4.28,2.72,1,1,1,0,1,0,11,0,0,0,1,0,66,0
377 | 435,3,3.08,2.28,1,0,0,0,1,0,11,0,0,0,1,0,57,0
378 | 436,3,2.96,2.04,1,0,0,0,1,0,11,0,0,0,1,0,56,0
379 | 437,3,4.8,3.32,1,0,0,1,1,0,12,0,0,0,1,0,54,0
380 | 438,3,4.08,3.2,1,0,0,0,1,0,12,0,0,0,1,0,40,0
381 | 440,3,2.36,1.6,1,0,0,0,1,1,11,0,0,1,1,0,54,0
382 | 441,3,3,2.44,1,0,0,0,1,1,12,0,0,0,1,0,65,0
383 | 444,3,4.12,3.2,2,0,0,0,1,1,11,0,0,0,0,0,76,0
384 | 445,3,2.56,60.9,0,0,0,0,0,0,11,0,0,0,1,0,50,0
385 | 446,3,2.72,1.76,0,0,0,0,0,0,11,0,0,0,1,0,63,0
386 | 449,3,2.96,2.24,0,0,0,0,0,0,12,0,0,0,1,0,69,0
387 | 450,3,2.84,1.88,1,0,0,0,1,0,12,0,0,0,1,0,53,1
388 | 451,3,2.28,1.68,2,0,0,0,1,1,11,0,0,0,0,0,77,0
389 | 453,3,2.8,2.24,1,1,0,0,1,0,13,0,0,0,1,0,70,0
390 | 454,3,2.84,2.32,1,0,1,0,1,1,11,0,0,0,1,0,72,0
391 | 455,3,3.24,2.76,0,0,0,0,0,0,11,0,0,0,1,0,70,0
392 | 457,3,2.4,1.24,1,0,0,0,1,0,12,0,0,0,1,0,62,0
393 | 458,3,4.56,3.2,0,0,0,0,1,0,11,0,0,0,1,0,61,0
394 | 459,3,3.6,3,1,0,0,0,1,0,11,0,0,0,0,0,46,0
395 | 460,3,4.28,3.16,0,0,0,0,1,0,12,0,0,0,0,0,66,0
396 | 462,3,1.84,1.56,1,1,1,0,1,0,12,0,0,0,0,0,72,0
397 | 463,3,2.12,1.68,2,1,1,0,0,0,11,0,0,0,1,0,74,0
398 | 465,3,3.08,2.16,1,0,0,0,1,1,13,0,0,0,1,0,79,0
399 | 467,3,3.76,3.12,0,0,0,0,0,0,11,0,0,0,1,0,61,0
400 | 468,3,3.04,2.08,1,0,0,0,1,0,13,0,0,0,0,0,52,0
401 | 469,3,1.96,1.68,1,0,0,0,1,1,12,0,0,0,1,0,79,0
402 | 470,3,4.72,3.56,0,0,0,0,0,0,12,0,0,0,1,0,51,0
403 | 22,4,3.32,2.84,0,0,0,0,0,0,12,0,0,0,1,0,62,0
404 | 54,4,3.76,2.52,1,0,0,0,1,0,12,0,0,0,1,0,75,0
405 | 56,4,3.28,2.36,1,0,0,0,1,0,12,0,0,0,1,0,65,0
406 | 59,4,5.12,4.28,0,0,0,0,0,0,12,0,0,0,1,0,62,0
407 | 68,4,2.32,1.76,1,0,1,0,1,1,11,0,0,0,1,0,62,1
408 | 74,4,6.3,5.48,0,0,0,0,0,0,11,0,0,0,0,0,45,0
409 | 91,4,3.52,2.72,1,0,1,0,1,0,12,0,0,0,1,0,80,0
410 | 97,4,2.68,2,1,0,0,0,1,0,12,0,0,0,1,0,70,1
411 | 103,4,4.32,3.24,1,0,0,0,1,0,12,0,0,0,1,0,76,0
412 | 116,4,2.76,1.76,1,0,1,0,1,0,11,1,0,0,1,0,61,1
413 | 117,4,2.88,2.24,1,0,0,0,1,1,12,0,0,0,1,0,73,0
414 | 119,4,3.48,2.56,1,0,0,0,0,0,11,0,0,0,1,0,57,0
415 | 124,4,3.3,2.56,0,0,0,0,0,0,11,0,0,0,1,0,67,0
416 | 129,4,3.31,2,2,0,0,1,1,0,12,0,0,0,1,0,81,1
417 | 144,4,2.08,1.76,0,0,0,0,0,0,12,0,0,0,1,0,69,1
418 | 188,4,3.28,1.64,1,0,0,0,1,0,11,0,0,0,1,0,62,0
419 | 190,4,4.92,3.72,0,0,0,0,0,0,12,0,0,0,1,0,60,0
420 | 200,4,2.44,1.64,1,0,0,0,1,1,11,0,0,0,1,0,72,0
421 | 222,4,4.24,3.68,1,0,0,0,1,0,12,0,0,0,1,0,67,0
422 | 226,4,2.76,2.16,1,1,0,0,0,0,12,1,0,0,1,0,62,0
423 | 247,4,5.56,4.32,0,0,0,0,0,0,12,0,0,0,1,0,68,0
424 | 249,4,4.56,3.68,1,0,0,0,1,0,12,0,0,0,1,0,62,0
425 | 264,4,3.04,2.88,0,0,0,0,0,0,11,0,0,0,0,0,70,0
426 | 274,4,3.36,2.72,2,0,0,0,1,1,11,1,0,0,1,0,72,0
427 | 287,4,4.9,3.96,1,0,0,0,1,0,12,0,0,0,1,0,55,0
428 | 311,4,3.4,2.92,0,0,0,0,0,0,11,0,0,0,0,0,63,0
429 | 315,4,2.12,1.36,1,0,0,0,1,0,12,0,0,0,1,0,71,0
430 | 325,4,5.16,4.96,1,0,0,0,0,0,11,0,0,0,1,0,54,0
431 | 326,4,5.03,79.3,1,0,0,1,0,0,11,0,0,0,0,0,38,0
432 | 330,4,2.08,1.84,0,0,0,0,0,0,12,0,0,0,0,0,77,0
433 | 334,4,2.2,1.8,0,0,0,0,0,0,11,0,0,0,0,0,71,0
434 | 338,4,3.24,2.6,1,0,0,0,1,0,12,0,0,0,1,0,69,0
435 | 343,4,2.5,1.4,1,0,1,0,1,0,11,0,0,0,1,0,77,0
436 | 350,4,1.82,86.3,0,0,0,0,0,0,12,0,0,0,0,0,67,0
437 | 360,4,2.84,2.12,0,0,0,0,0,0,12,0,0,0,0,0,64,0
438 | 380,4,2.72,2.04,1,0,0,0,1,0,11,0,0,0,1,0,75,0
439 | 383,4,3.4,2.16,1,1,1,0,1,0,12,0,0,0,0,0,68,0
440 | 388,4,4.2,3.32,0,0,0,0,0,0,12,0,0,0,1,0,58,0
441 | 393,4,3.56,2.6,1,0,0,0,1,0,13,0,0,0,1,0,68,0
442 | 395,4,3.96,2.44,1,0,0,0,1,1,11,0,0,0,1,0,44,0
443 | 396,4,3.04,3.68,1,0,0,0,1,0,11,1,0,0,1,0,64,0
444 | 420,4,2.44,2.08,2,0,0,0,1,1,12,0,0,0,1,0,72,1
445 | 425,4,2.81,2.31,1,1,0,0,0,0,12,0,0,0,1,0,58,0
446 | 452,4,3.04,2.36,1,0,0,0,1,0,12,0,0,0,1,0,59,0
447 | 456,4,2.92,1.92,1,0,0,0,1,0,12,0,0,0,1,0,70,0
448 | 461,4,4.65,3.78,1,0,0,0,1,0,12,0,0,0,0,0,55,0
449 | 464,4,3.44,2.16,1,0,0,0,1,1,12,1,0,0,1,0,57,1
450 | 26,5,4.56,72.8,0,1,1,0,1,0,12,0,0,0,1,0,57,0
451 | 33,5,2.48,1.95,1,1,0,0,0,0,12,1,0,0,0,0,72,0
452 | 41,5,3.8,2.98,1,0,0,0,1,0,11,0,0,0,1,0,60,1
453 | 44,5,2.68,2.12,0,0,0,0,1,0,12,0,0,0,1,0,51,1
454 | 89,5,2.68,1.76,2,0,1,0,1,1,11,0,0,0,1,0,76,0
455 | 106,5,4.95,4.12,1,0,0,0,0,1,11,0,0,0,0,0,57,0
456 | 186,5,3.52,2.56,0,0,0,1,0,0,12,0,0,0,0,0,81,1
457 | 221,5,2.87,2.08,1,0,0,0,1,0,13,0,0,0,1,0,56,1
458 | 232,5,2.88,2.52,1,0,0,0,1,0,12,0,0,0,1,0,56,0
459 | 239,5,3.4,2.08,1,0,0,0,0,1,11,0,0,0,1,0,55,1
460 | 272,5,3,2.16,0,0,0,0,0,0,11,0,0,0,1,0,72,0
461 | 307,5,3.3,2.4,1,0,0,0,1,1,12,0,0,0,1,0,70,0
462 | 368,5,2.38,1.72,1,0,1,0,1,0,12,1,0,1,1,0,87,1
463 | 421,5,4.96,4.16,1,0,0,0,1,0,11,0,0,0,1,0,62,1
464 | 439,5,3.67,76.8,0,1,1,0,1,0,12,0,0,0,0,0,61,0
465 | 30,6,3.96,3.28,0,0,0,0,0,0,11,0,0,0,1,0,61,0
466 | 98,6,3.04,2.4,2,0,0,0,1,0,11,0,0,0,1,0,76,0
467 | 369,6,3.88,2.72,1,0,0,0,1,0,12,0,0,0,1,0,77,0
468 | 406,6,5.36,3.96,1,0,0,0,1,0,12,0,0,0,0,0,62,0
469 | 25,8,4.32,3.2,0,0,0,0,0,0,11,0,0,0,0,0,58,1
470 | 447,8,5.2,4.1,0,0,0,0,0,0,12,0,0,0,0,0,49,0
471 |
--------------------------------------------------------------------------------
/deeplearning/dataset/housing.csv:
--------------------------------------------------------------------------------
1 | 0.00632 18.00 2.310 0 0.5380 6.5750 65.20 4.0900 1 296.0 15.30 396.90 4.98 24.00
2 | 0.02731 0.00 7.070 0 0.4690 6.4210 78.90 4.9671 2 242.0 17.80 396.90 9.14 21.60
3 | 0.02729 0.00 7.070 0 0.4690 7.1850 61.10 4.9671 2 242.0 17.80 392.83 4.03 34.70
4 | 0.03237 0.00 2.180 0 0.4580 6.9980 45.80 6.0622 3 222.0 18.70 394.63 2.94 33.40
5 | 0.06905 0.00 2.180 0 0.4580 7.1470 54.20 6.0622 3 222.0 18.70 396.90 5.33 36.20
6 | 0.02985 0.00 2.180 0 0.4580 6.4300 58.70 6.0622 3 222.0 18.70 394.12 5.21 28.70
7 | 0.08829 12.50 7.870 0 0.5240 6.0120 66.60 5.5605 5 311.0 15.20 395.60 12.43 22.90
8 | 0.14455 12.50 7.870 0 0.5240 6.1720 96.10 5.9505 5 311.0 15.20 396.90 19.15 27.10
9 | 0.21124 12.50 7.870 0 0.5240 5.6310 100.00 6.0821 5 311.0 15.20 386.63 29.93 16.50
10 | 0.17004 12.50 7.870 0 0.5240 6.0040 85.90 6.5921 5 311.0 15.20 386.71 17.10 18.90
11 | 0.22489 12.50 7.870 0 0.5240 6.3770 94.30 6.3467 5 311.0 15.20 392.52 20.45 15.00
12 | 0.11747 12.50 7.870 0 0.5240 6.0090 82.90 6.2267 5 311.0 15.20 396.90 13.27 18.90
13 | 0.09378 12.50 7.870 0 0.5240 5.8890 39.00 5.4509 5 311.0 15.20 390.50 15.71 21.70
14 | 0.62976 0.00 8.140 0 0.5380 5.9490 61.80 4.7075 4 307.0 21.00 396.90 8.26 20.40
15 | 0.63796 0.00 8.140 0 0.5380 6.0960 84.50 4.4619 4 307.0 21.00 380.02 10.26 18.20
16 | 0.62739 0.00 8.140 0 0.5380 5.8340 56.50 4.4986 4 307.0 21.00 395.62 8.47 19.90
17 | 1.05393 0.00 8.140 0 0.5380 5.9350 29.30 4.4986 4 307.0 21.00 386.85 6.58 23.10
18 | 0.78420 0.00 8.140 0 0.5380 5.9900 81.70 4.2579 4 307.0 21.00 386.75 14.67 17.50
19 | 0.80271 0.00 8.140 0 0.5380 5.4560 36.60 3.7965 4 307.0 21.00 288.99 11.69 20.20
20 | 0.72580 0.00 8.140 0 0.5380 5.7270 69.50 3.7965 4 307.0 21.00 390.95 11.28 18.20
21 | 1.25179 0.00 8.140 0 0.5380 5.5700 98.10 3.7979 4 307.0 21.00 376.57 21.02 13.60
22 | 0.85204 0.00 8.140 0 0.5380 5.9650 89.20 4.0123 4 307.0 21.00 392.53 13.83 19.60
23 | 1.23247 0.00 8.140 0 0.5380 6.1420 91.70 3.9769 4 307.0 21.00 396.90 18.72 15.20
24 | 0.98843 0.00 8.140 0 0.5380 5.8130 100.00 4.0952 4 307.0 21.00 394.54 19.88 14.50
25 | 0.75026 0.00 8.140 0 0.5380 5.9240 94.10 4.3996 4 307.0 21.00 394.33 16.30 15.60
26 | 0.84054 0.00 8.140 0 0.5380 5.5990 85.70 4.4546 4 307.0 21.00 303.42 16.51 13.90
27 | 0.67191 0.00 8.140 0 0.5380 5.8130 90.30 4.6820 4 307.0 21.00 376.88 14.81 16.60
28 | 0.95577 0.00 8.140 0 0.5380 6.0470 88.80 4.4534 4 307.0 21.00 306.38 17.28 14.80
29 | 0.77299 0.00 8.140 0 0.5380 6.4950 94.40 4.4547 4 307.0 21.00 387.94 12.80 18.40
30 | 1.00245 0.00 8.140 0 0.5380 6.6740 87.30 4.2390 4 307.0 21.00 380.23 11.98 21.00
31 | 1.13081 0.00 8.140 0 0.5380 5.7130 94.10 4.2330 4 307.0 21.00 360.17 22.60 12.70
32 | 1.35472 0.00 8.140 0 0.5380 6.0720 100.00 4.1750 4 307.0 21.00 376.73 13.04 14.50
33 | 1.38799 0.00 8.140 0 0.5380 5.9500 82.00 3.9900 4 307.0 21.00 232.60 27.71 13.20
34 | 1.15172 0.00 8.140 0 0.5380 5.7010 95.00 3.7872 4 307.0 21.00 358.77 18.35 13.10
35 | 1.61282 0.00 8.140 0 0.5380 6.0960 96.90 3.7598 4 307.0 21.00 248.31 20.34 13.50
36 | 0.06417 0.00 5.960 0 0.4990 5.9330 68.20 3.3603 5 279.0 19.20 396.90 9.68 18.90
37 | 0.09744 0.00 5.960 0 0.4990 5.8410 61.40 3.3779 5 279.0 19.20 377.56 11.41 20.00
38 | 0.08014 0.00 5.960 0 0.4990 5.8500 41.50 3.9342 5 279.0 19.20 396.90 8.77 21.00
39 | 0.17505 0.00 5.960 0 0.4990 5.9660 30.20 3.8473 5 279.0 19.20 393.43 10.13 24.70
40 | 0.02763 75.00 2.950 0 0.4280 6.5950 21.80 5.4011 3 252.0 18.30 395.63 4.32 30.80
41 | 0.03359 75.00 2.950 0 0.4280 7.0240 15.80 5.4011 3 252.0 18.30 395.62 1.98 34.90
42 | 0.12744 0.00 6.910 0 0.4480 6.7700 2.90 5.7209 3 233.0 17.90 385.41 4.84 26.60
43 | 0.14150 0.00 6.910 0 0.4480 6.1690 6.60 5.7209 3 233.0 17.90 383.37 5.81 25.30
44 | 0.15936 0.00 6.910 0 0.4480 6.2110 6.50 5.7209 3 233.0 17.90 394.46 7.44 24.70
45 | 0.12269 0.00 6.910 0 0.4480 6.0690 40.00 5.7209 3 233.0 17.90 389.39 9.55 21.20
46 | 0.17142 0.00 6.910 0 0.4480 5.6820 33.80 5.1004 3 233.0 17.90 396.90 10.21 19.30
47 | 0.18836 0.00 6.910 0 0.4480 5.7860 33.30 5.1004 3 233.0 17.90 396.90 14.15 20.00
48 | 0.22927 0.00 6.910 0 0.4480 6.0300 85.50 5.6894 3 233.0 17.90 392.74 18.80 16.60
49 | 0.25387 0.00 6.910 0 0.4480 5.3990 95.30 5.8700 3 233.0 17.90 396.90 30.81 14.40
50 | 0.21977 0.00 6.910 0 0.4480 5.6020 62.00 6.0877 3 233.0 17.90 396.90 16.20 19.40
51 | 0.08873 21.00 5.640 0 0.4390 5.9630 45.70 6.8147 4 243.0 16.80 395.56 13.45 19.70
52 | 0.04337 21.00 5.640 0 0.4390 6.1150 63.00 6.8147 4 243.0 16.80 393.97 9.43 20.50
53 | 0.05360 21.00 5.640 0 0.4390 6.5110 21.10 6.8147 4 243.0 16.80 396.90 5.28 25.00
54 | 0.04981 21.00 5.640 0 0.4390 5.9980 21.40 6.8147 4 243.0 16.80 396.90 8.43 23.40
55 | 0.01360 75.00 4.000 0 0.4100 5.8880 47.60 7.3197 3 469.0 21.10 396.90 14.80 18.90
56 | 0.01311 90.00 1.220 0 0.4030 7.2490 21.90 8.6966 5 226.0 17.90 395.93 4.81 35.40
57 | 0.02055 85.00 0.740 0 0.4100 6.3830 35.70 9.1876 2 313.0 17.30 396.90 5.77 24.70
58 | 0.01432 100.00 1.320 0 0.4110 6.8160 40.50 8.3248 5 256.0 15.10 392.90 3.95 31.60
59 | 0.15445 25.00 5.130 0 0.4530 6.1450 29.20 7.8148 8 284.0 19.70 390.68 6.86 23.30
60 | 0.10328 25.00 5.130 0 0.4530 5.9270 47.20 6.9320 8 284.0 19.70 396.90 9.22 19.60
61 | 0.14932 25.00 5.130 0 0.4530 5.7410 66.20 7.2254 8 284.0 19.70 395.11 13.15 18.70
62 | 0.17171 25.00 5.130 0 0.4530 5.9660 93.40 6.8185 8 284.0 19.70 378.08 14.44 16.00
63 | 0.11027 25.00 5.130 0 0.4530 6.4560 67.80 7.2255 8 284.0 19.70 396.90 6.73 22.20
64 | 0.12650 25.00 5.130 0 0.4530 6.7620 43.40 7.9809 8 284.0 19.70 395.58 9.50 25.00
65 | 0.01951 17.50 1.380 0 0.4161 7.1040 59.50 9.2229 3 216.0 18.60 393.24 8.05 33.00
66 | 0.03584 80.00 3.370 0 0.3980 6.2900 17.80 6.6115 4 337.0 16.10 396.90 4.67 23.50
67 | 0.04379 80.00 3.370 0 0.3980 5.7870 31.10 6.6115 4 337.0 16.10 396.90 10.24 19.40
68 | 0.05789 12.50 6.070 0 0.4090 5.8780 21.40 6.4980 4 345.0 18.90 396.21 8.10 22.00
69 | 0.13554 12.50 6.070 0 0.4090 5.5940 36.80 6.4980 4 345.0 18.90 396.90 13.09 17.40
70 | 0.12816 12.50 6.070 0 0.4090 5.8850 33.00 6.4980 4 345.0 18.90 396.90 8.79 20.90
71 | 0.08826 0.00 10.810 0 0.4130 6.4170 6.60 5.2873 4 305.0 19.20 383.73 6.72 24.20
72 | 0.15876 0.00 10.810 0 0.4130 5.9610 17.50 5.2873 4 305.0 19.20 376.94 9.88 21.70
73 | 0.09164 0.00 10.810 0 0.4130 6.0650 7.80 5.2873 4 305.0 19.20 390.91 5.52 22.80
74 | 0.19539 0.00 10.810 0 0.4130 6.2450 6.20 5.2873 4 305.0 19.20 377.17 7.54 23.40
75 | 0.07896 0.00 12.830 0 0.4370 6.2730 6.00 4.2515 5 398.0 18.70 394.92 6.78 24.10
76 | 0.09512 0.00 12.830 0 0.4370 6.2860 45.00 4.5026 5 398.0 18.70 383.23 8.94 21.40
77 | 0.10153 0.00 12.830 0 0.4370 6.2790 74.50 4.0522 5 398.0 18.70 373.66 11.97 20.00
78 | 0.08707 0.00 12.830 0 0.4370 6.1400 45.80 4.0905 5 398.0 18.70 386.96 10.27 20.80
79 | 0.05646 0.00 12.830 0 0.4370 6.2320 53.70 5.0141 5 398.0 18.70 386.40 12.34 21.20
80 | 0.08387 0.00 12.830 0 0.4370 5.8740 36.60 4.5026 5 398.0 18.70 396.06 9.10 20.30
81 | 0.04113 25.00 4.860 0 0.4260 6.7270 33.50 5.4007 4 281.0 19.00 396.90 5.29 28.00
82 | 0.04462 25.00 4.860 0 0.4260 6.6190 70.40 5.4007 4 281.0 19.00 395.63 7.22 23.90
83 | 0.03659 25.00 4.860 0 0.4260 6.3020 32.20 5.4007 4 281.0 19.00 396.90 6.72 24.80
84 | 0.03551 25.00 4.860 0 0.4260 6.1670 46.70 5.4007 4 281.0 19.00 390.64 7.51 22.90
85 | 0.05059 0.00 4.490 0 0.4490 6.3890 48.00 4.7794 3 247.0 18.50 396.90 9.62 23.90
86 | 0.05735 0.00 4.490 0 0.4490 6.6300 56.10 4.4377 3 247.0 18.50 392.30 6.53 26.60
87 | 0.05188 0.00 4.490 0 0.4490 6.0150 45.10 4.4272 3 247.0 18.50 395.99 12.86 22.50
88 | 0.07151 0.00 4.490 0 0.4490 6.1210 56.80 3.7476 3 247.0 18.50 395.15 8.44 22.20
89 | 0.05660 0.00 3.410 0 0.4890 7.0070 86.30 3.4217 2 270.0 17.80 396.90 5.50 23.60
90 | 0.05302 0.00 3.410 0 0.4890 7.0790 63.10 3.4145 2 270.0 17.80 396.06 5.70 28.70
91 | 0.04684 0.00 3.410 0 0.4890 6.4170 66.10 3.0923 2 270.0 17.80 392.18 8.81 22.60
92 | 0.03932 0.00 3.410 0 0.4890 6.4050 73.90 3.0921 2 270.0 17.80 393.55 8.20 22.00
93 | 0.04203 28.00 15.040 0 0.4640 6.4420 53.60 3.6659 4 270.0 18.20 395.01 8.16 22.90
94 | 0.02875 28.00 15.040 0 0.4640 6.2110 28.90 3.6659 4 270.0 18.20 396.33 6.21 25.00
95 | 0.04294 28.00 15.040 0 0.4640 6.2490 77.30 3.6150 4 270.0 18.20 396.90 10.59 20.60
96 | 0.12204 0.00 2.890 0 0.4450 6.6250 57.80 3.4952 2 276.0 18.00 357.98 6.65 28.40
97 | 0.11504 0.00 2.890 0 0.4450 6.1630 69.60 3.4952 2 276.0 18.00 391.83 11.34 21.40
98 | 0.12083 0.00 2.890 0 0.4450 8.0690 76.00 3.4952 2 276.0 18.00 396.90 4.21 38.70
99 | 0.08187 0.00 2.890 0 0.4450 7.8200 36.90 3.4952 2 276.0 18.00 393.53 3.57 43.80
100 | 0.06860 0.00 2.890 0 0.4450 7.4160 62.50 3.4952 2 276.0 18.00 396.90 6.19 33.20
101 | 0.14866 0.00 8.560 0 0.5200 6.7270 79.90 2.7778 5 384.0 20.90 394.76 9.42 27.50
102 | 0.11432 0.00 8.560 0 0.5200 6.7810 71.30 2.8561 5 384.0 20.90 395.58 7.67 26.50
103 | 0.22876 0.00 8.560 0 0.5200 6.4050 85.40 2.7147 5 384.0 20.90 70.80 10.63 18.60
104 | 0.21161 0.00 8.560 0 0.5200 6.1370 87.40 2.7147 5 384.0 20.90 394.47 13.44 19.30
105 | 0.13960 0.00 8.560 0 0.5200 6.1670 90.00 2.4210 5 384.0 20.90 392.69 12.33 20.10
106 | 0.13262 0.00 8.560 0 0.5200 5.8510 96.70 2.1069 5 384.0 20.90 394.05 16.47 19.50
107 | 0.17120 0.00 8.560 0 0.5200 5.8360 91.90 2.2110 5 384.0 20.90 395.67 18.66 19.50
108 | 0.13117 0.00 8.560 0 0.5200 6.1270 85.20 2.1224 5 384.0 20.90 387.69 14.09 20.40
109 | 0.12802 0.00 8.560 0 0.5200 6.4740 97.10 2.4329 5 384.0 20.90 395.24 12.27 19.80
110 | 0.26363 0.00 8.560 0 0.5200 6.2290 91.20 2.5451 5 384.0 20.90 391.23 15.55 19.40
111 | 0.10793 0.00 8.560 0 0.5200 6.1950 54.40 2.7778 5 384.0 20.90 393.49 13.00 21.70
112 | 0.10084 0.00 10.010 0 0.5470 6.7150 81.60 2.6775 6 432.0 17.80 395.59 10.16 22.80
113 | 0.12329 0.00 10.010 0 0.5470 5.9130 92.90 2.3534 6 432.0 17.80 394.95 16.21 18.80
114 | 0.22212 0.00 10.010 0 0.5470 6.0920 95.40 2.5480 6 432.0 17.80 396.90 17.09 18.70
115 | 0.14231 0.00 10.010 0 0.5470 6.2540 84.20 2.2565 6 432.0 17.80 388.74 10.45 18.50
116 | 0.17134 0.00 10.010 0 0.5470 5.9280 88.20 2.4631 6 432.0 17.80 344.91 15.76 18.30
117 | 0.13158 0.00 10.010 0 0.5470 6.1760 72.50 2.7301 6 432.0 17.80 393.30 12.04 21.20
118 | 0.15098 0.00 10.010 0 0.5470 6.0210 82.60 2.7474 6 432.0 17.80 394.51 10.30 19.20
119 | 0.13058 0.00 10.010 0 0.5470 5.8720 73.10 2.4775 6 432.0 17.80 338.63 15.37 20.40
120 | 0.14476 0.00 10.010 0 0.5470 5.7310 65.20 2.7592 6 432.0 17.80 391.50 13.61 19.30
121 | 0.06899 0.00 25.650 0 0.5810 5.8700 69.70 2.2577 2 188.0 19.10 389.15 14.37 22.00
122 | 0.07165 0.00 25.650 0 0.5810 6.0040 84.10 2.1974 2 188.0 19.10 377.67 14.27 20.30
123 | 0.09299 0.00 25.650 0 0.5810 5.9610 92.90 2.0869 2 188.0 19.10 378.09 17.93 20.50
124 | 0.15038 0.00 25.650 0 0.5810 5.8560 97.00 1.9444 2 188.0 19.10 370.31 25.41 17.30
125 | 0.09849 0.00 25.650 0 0.5810 5.8790 95.80 2.0063 2 188.0 19.10 379.38 17.58 18.80
126 | 0.16902 0.00 25.650 0 0.5810 5.9860 88.40 1.9929 2 188.0 19.10 385.02 14.81 21.40
127 | 0.38735 0.00 25.650 0 0.5810 5.6130 95.60 1.7572 2 188.0 19.10 359.29 27.26 15.70
128 | 0.25915 0.00 21.890 0 0.6240 5.6930 96.00 1.7883 4 437.0 21.20 392.11 17.19 16.20
129 | 0.32543 0.00 21.890 0 0.6240 6.4310 98.80 1.8125 4 437.0 21.20 396.90 15.39 18.00
130 | 0.88125 0.00 21.890 0 0.6240 5.6370 94.70 1.9799 4 437.0 21.20 396.90 18.34 14.30
131 | 0.34006 0.00 21.890 0 0.6240 6.4580 98.90 2.1185 4 437.0 21.20 395.04 12.60 19.20
132 | 1.19294 0.00 21.890 0 0.6240 6.3260 97.70 2.2710 4 437.0 21.20 396.90 12.26 19.60
133 | 0.59005 0.00 21.890 0 0.6240 6.3720 97.90 2.3274 4 437.0 21.20 385.76 11.12 23.00
134 | 0.32982 0.00 21.890 0 0.6240 5.8220 95.40 2.4699 4 437.0 21.20 388.69 15.03 18.40
135 | 0.97617 0.00 21.890 0 0.6240 5.7570 98.40 2.3460 4 437.0 21.20 262.76 17.31 15.60
136 | 0.55778 0.00 21.890 0 0.6240 6.3350 98.20 2.1107 4 437.0 21.20 394.67 16.96 18.10
137 | 0.32264 0.00 21.890 0 0.6240 5.9420 93.50 1.9669 4 437.0 21.20 378.25 16.90 17.40
138 | 0.35233 0.00 21.890 0 0.6240 6.4540 98.40 1.8498 4 437.0 21.20 394.08 14.59 17.10
139 | 0.24980 0.00 21.890 0 0.6240 5.8570 98.20 1.6686 4 437.0 21.20 392.04 21.32 13.30
140 | 0.54452 0.00 21.890 0 0.6240 6.1510 97.90 1.6687 4 437.0 21.20 396.90 18.46 17.80
141 | 0.29090 0.00 21.890 0 0.6240 6.1740 93.60 1.6119 4 437.0 21.20 388.08 24.16 14.00
142 | 1.62864 0.00 21.890 0 0.6240 5.0190 100.00 1.4394 4 437.0 21.20 396.90 34.41 14.40
143 | 3.32105 0.00 19.580 1 0.8710 5.4030 100.00 1.3216 5 403.0 14.70 396.90 26.82 13.40
144 | 4.09740 0.00 19.580 0 0.8710 5.4680 100.00 1.4118 5 403.0 14.70 396.90 26.42 15.60
145 | 2.77974 0.00 19.580 0 0.8710 4.9030 97.80 1.3459 5 403.0 14.70 396.90 29.29 11.80
146 | 2.37934 0.00 19.580 0 0.8710 6.1300 100.00 1.4191 5 403.0 14.70 172.91 27.80 13.80
147 | 2.15505 0.00 19.580 0 0.8710 5.6280 100.00 1.5166 5 403.0 14.70 169.27 16.65 15.60
148 | 2.36862 0.00 19.580 0 0.8710 4.9260 95.70 1.4608 5 403.0 14.70 391.71 29.53 14.60
149 | 2.33099 0.00 19.580 0 0.8710 5.1860 93.80 1.5296 5 403.0 14.70 356.99 28.32 17.80
150 | 2.73397 0.00 19.580 0 0.8710 5.5970 94.90 1.5257 5 403.0 14.70 351.85 21.45 15.40
151 | 1.65660 0.00 19.580 0 0.8710 6.1220 97.30 1.6180 5 403.0 14.70 372.80 14.10 21.50
152 | 1.49632 0.00 19.580 0 0.8710 5.4040 100.00 1.5916 5 403.0 14.70 341.60 13.28 19.60
153 | 1.12658 0.00 19.580 1 0.8710 5.0120 88.00 1.6102 5 403.0 14.70 343.28 12.12 15.30
154 | 2.14918 0.00 19.580 0 0.8710 5.7090 98.50 1.6232 5 403.0 14.70 261.95 15.79 19.40
155 | 1.41385 0.00 19.580 1 0.8710 6.1290 96.00 1.7494 5 403.0 14.70 321.02 15.12 17.00
156 | 3.53501 0.00 19.580 1 0.8710 6.1520 82.60 1.7455 5 403.0 14.70 88.01 15.02 15.60
157 | 2.44668 0.00 19.580 0 0.8710 5.2720 94.00 1.7364 5 403.0 14.70 88.63 16.14 13.10
158 | 1.22358 0.00 19.580 0 0.6050 6.9430 97.40 1.8773 5 403.0 14.70 363.43 4.59 41.30
159 | 1.34284 0.00 19.580 0 0.6050 6.0660 100.00 1.7573 5 403.0 14.70 353.89 6.43 24.30
160 | 1.42502 0.00 19.580 0 0.8710 6.5100 100.00 1.7659 5 403.0 14.70 364.31 7.39 23.30
161 | 1.27346 0.00 19.580 1 0.6050 6.2500 92.60 1.7984 5 403.0 14.70 338.92 5.50 27.00
162 | 1.46336 0.00 19.580 0 0.6050 7.4890 90.80 1.9709 5 403.0 14.70 374.43 1.73 50.00
163 | 1.83377 0.00 19.580 1 0.6050 7.8020 98.20 2.0407 5 403.0 14.70 389.61 1.92 50.00
164 | 1.51902 0.00 19.580 1 0.6050 8.3750 93.90 2.1620 5 403.0 14.70 388.45 3.32 50.00
165 | 2.24236 0.00 19.580 0 0.6050 5.8540 91.80 2.4220 5 403.0 14.70 395.11 11.64 22.70
166 | 2.92400 0.00 19.580 0 0.6050 6.1010 93.00 2.2834 5 403.0 14.70 240.16 9.81 25.00
167 | 2.01019 0.00 19.580 0 0.6050 7.9290 96.20 2.0459 5 403.0 14.70 369.30 3.70 50.00
168 | 1.80028 0.00 19.580 0 0.6050 5.8770 79.20 2.4259 5 403.0 14.70 227.61 12.14 23.80
169 | 2.30040 0.00 19.580 0 0.6050 6.3190 96.10 2.1000 5 403.0 14.70 297.09 11.10 23.80
170 | 2.44953 0.00 19.580 0 0.6050 6.4020 95.20 2.2625 5 403.0 14.70 330.04 11.32 22.30
171 | 1.20742 0.00 19.580 0 0.6050 5.8750 94.60 2.4259 5 403.0 14.70 292.29 14.43 17.40
172 | 2.31390 0.00 19.580 0 0.6050 5.8800 97.30 2.3887 5 403.0 14.70 348.13 12.03 19.10
173 | 0.13914 0.00 4.050 0 0.5100 5.5720 88.50 2.5961 5 296.0 16.60 396.90 14.69 23.10
174 | 0.09178 0.00 4.050 0 0.5100 6.4160 84.10 2.6463 5 296.0 16.60 395.50 9.04 23.60
175 | 0.08447 0.00 4.050 0 0.5100 5.8590 68.70 2.7019 5 296.0 16.60 393.23 9.64 22.60
176 | 0.06664 0.00 4.050 0 0.5100 6.5460 33.10 3.1323 5 296.0 16.60 390.96 5.33 29.40
177 | 0.07022 0.00 4.050 0 0.5100 6.0200 47.20 3.5549 5 296.0 16.60 393.23 10.11 23.20
178 | 0.05425 0.00 4.050 0 0.5100 6.3150 73.40 3.3175 5 296.0 16.60 395.60 6.29 24.60
179 | 0.06642 0.00 4.050 0 0.5100 6.8600 74.40 2.9153 5 296.0 16.60 391.27 6.92 29.90
180 | 0.05780 0.00 2.460 0 0.4880 6.9800 58.40 2.8290 3 193.0 17.80 396.90 5.04 37.20
181 | 0.06588 0.00 2.460 0 0.4880 7.7650 83.30 2.7410 3 193.0 17.80 395.56 7.56 39.80
182 | 0.06888 0.00 2.460 0 0.4880 6.1440 62.20 2.5979 3 193.0 17.80 396.90 9.45 36.20
183 | 0.09103 0.00 2.460 0 0.4880 7.1550 92.20 2.7006 3 193.0 17.80 394.12 4.82 37.90
184 | 0.10008 0.00 2.460 0 0.4880 6.5630 95.60 2.8470 3 193.0 17.80 396.90 5.68 32.50
185 | 0.08308 0.00 2.460 0 0.4880 5.6040 89.80 2.9879 3 193.0 17.80 391.00 13.98 26.40
186 | 0.06047 0.00 2.460 0 0.4880 6.1530 68.80 3.2797 3 193.0 17.80 387.11 13.15 29.60
187 | 0.05602 0.00 2.460 0 0.4880 7.8310 53.60 3.1992 3 193.0 17.80 392.63 4.45 50.00
188 | 0.07875 45.00 3.440 0 0.4370 6.7820 41.10 3.7886 5 398.0 15.20 393.87 6.68 32.00
189 | 0.12579 45.00 3.440 0 0.4370 6.5560 29.10 4.5667 5 398.0 15.20 382.84 4.56 29.80
190 | 0.08370 45.00 3.440 0 0.4370 7.1850 38.90 4.5667 5 398.0 15.20 396.90 5.39 34.90
191 | 0.09068 45.00 3.440 0 0.4370 6.9510 21.50 6.4798 5 398.0 15.20 377.68 5.10 37.00
192 | 0.06911 45.00 3.440 0 0.4370 6.7390 30.80 6.4798 5 398.0 15.20 389.71 4.69 30.50
193 | 0.08664 45.00 3.440 0 0.4370 7.1780 26.30 6.4798 5 398.0 15.20 390.49 2.87 36.40
194 | 0.02187 60.00 2.930 0 0.4010 6.8000 9.90 6.2196 1 265.0 15.60 393.37 5.03 31.10
195 | 0.01439 60.00 2.930 0 0.4010 6.6040 18.80 6.2196 1 265.0 15.60 376.70 4.38 29.10
196 | 0.01381 80.00 0.460 0 0.4220 7.8750 32.00 5.6484 4 255.0 14.40 394.23 2.97 50.00
197 | 0.04011 80.00 1.520 0 0.4040 7.2870 34.10 7.3090 2 329.0 12.60 396.90 4.08 33.30
198 | 0.04666 80.00 1.520 0 0.4040 7.1070 36.60 7.3090 2 329.0 12.60 354.31 8.61 30.30
199 | 0.03768 80.00 1.520 0 0.4040 7.2740 38.30 7.3090 2 329.0 12.60 392.20 6.62 34.60
200 | 0.03150 95.00 1.470 0 0.4030 6.9750 15.30 7.6534 3 402.0 17.00 396.90 4.56 34.90
201 | 0.01778 95.00 1.470 0 0.4030 7.1350 13.90 7.6534 3 402.0 17.00 384.30 4.45 32.90
202 | 0.03445 82.50 2.030 0 0.4150 6.1620 38.40 6.2700 2 348.0 14.70 393.77 7.43 24.10
203 | 0.02177 82.50 2.030 0 0.4150 7.6100 15.70 6.2700 2 348.0 14.70 395.38 3.11 42.30
204 | 0.03510 95.00 2.680 0 0.4161 7.8530 33.20 5.1180 4 224.0 14.70 392.78 3.81 48.50
205 | 0.02009 95.00 2.680 0 0.4161 8.0340 31.90 5.1180 4 224.0 14.70 390.55 2.88 50.00
206 | 0.13642 0.00 10.590 0 0.4890 5.8910 22.30 3.9454 4 277.0 18.60 396.90 10.87 22.60
207 | 0.22969 0.00 10.590 0 0.4890 6.3260 52.50 4.3549 4 277.0 18.60 394.87 10.97 24.40
208 | 0.25199 0.00 10.590 0 0.4890 5.7830 72.70 4.3549 4 277.0 18.60 389.43 18.06 22.50
209 | 0.13587 0.00 10.590 1 0.4890 6.0640 59.10 4.2392 4 277.0 18.60 381.32 14.66 24.40
210 | 0.43571 0.00 10.590 1 0.4890 5.3440 100.00 3.8750 4 277.0 18.60 396.90 23.09 20.00
211 | 0.17446 0.00 10.590 1 0.4890 5.9600 92.10 3.8771 4 277.0 18.60 393.25 17.27 21.70
212 | 0.37578 0.00 10.590 1 0.4890 5.4040 88.60 3.6650 4 277.0 18.60 395.24 23.98 19.30
213 | 0.21719 0.00 10.590 1 0.4890 5.8070 53.80 3.6526 4 277.0 18.60 390.94 16.03 22.40
214 | 0.14052 0.00 10.590 0 0.4890 6.3750 32.30 3.9454 4 277.0 18.60 385.81 9.38 28.10
215 | 0.28955 0.00 10.590 0 0.4890 5.4120 9.80 3.5875 4 277.0 18.60 348.93 29.55 23.70
216 | 0.19802 0.00 10.590 0 0.4890 6.1820 42.40 3.9454 4 277.0 18.60 393.63 9.47 25.00
217 | 0.04560 0.00 13.890 1 0.5500 5.8880 56.00 3.1121 5 276.0 16.40 392.80 13.51 23.30
218 | 0.07013 0.00 13.890 0 0.5500 6.6420 85.10 3.4211 5 276.0 16.40 392.78 9.69 28.70
219 | 0.11069 0.00 13.890 1 0.5500 5.9510 93.80 2.8893 5 276.0 16.40 396.90 17.92 21.50
220 | 0.11425 0.00 13.890 1 0.5500 6.3730 92.40 3.3633 5 276.0 16.40 393.74 10.50 23.00
221 | 0.35809 0.00 6.200 1 0.5070 6.9510 88.50 2.8617 8 307.0 17.40 391.70 9.71 26.70
222 | 0.40771 0.00 6.200 1 0.5070 6.1640 91.30 3.0480 8 307.0 17.40 395.24 21.46 21.70
223 | 0.62356 0.00 6.200 1 0.5070 6.8790 77.70 3.2721 8 307.0 17.40 390.39 9.93 27.50
224 | 0.61470 0.00 6.200 0 0.5070 6.6180 80.80 3.2721 8 307.0 17.40 396.90 7.60 30.10
225 | 0.31533 0.00 6.200 0 0.5040 8.2660 78.30 2.8944 8 307.0 17.40 385.05 4.14 44.80
226 | 0.52693 0.00 6.200 0 0.5040 8.7250 83.00 2.8944 8 307.0 17.40 382.00 4.63 50.00
227 | 0.38214 0.00 6.200 0 0.5040 8.0400 86.50 3.2157 8 307.0 17.40 387.38 3.13 37.60
228 | 0.41238 0.00 6.200 0 0.5040 7.1630 79.90 3.2157 8 307.0 17.40 372.08 6.36 31.60
229 | 0.29819 0.00 6.200 0 0.5040 7.6860 17.00 3.3751 8 307.0 17.40 377.51 3.92 46.70
230 | 0.44178 0.00 6.200 0 0.5040 6.5520 21.40 3.3751 8 307.0 17.40 380.34 3.76 31.50
231 | 0.53700 0.00 6.200 0 0.5040 5.9810 68.10 3.6715 8 307.0 17.40 378.35 11.65 24.30
232 | 0.46296 0.00 6.200 0 0.5040 7.4120 76.90 3.6715 8 307.0 17.40 376.14 5.25 31.70
233 | 0.57529 0.00 6.200 0 0.5070 8.3370 73.30 3.8384 8 307.0 17.40 385.91 2.47 41.70
234 | 0.33147 0.00 6.200 0 0.5070 8.2470 70.40 3.6519 8 307.0 17.40 378.95 3.95 48.30
235 | 0.44791 0.00 6.200 1 0.5070 6.7260 66.50 3.6519 8 307.0 17.40 360.20 8.05 29.00
236 | 0.33045 0.00 6.200 0 0.5070 6.0860 61.50 3.6519 8 307.0 17.40 376.75 10.88 24.00
237 | 0.52058 0.00 6.200 1 0.5070 6.6310 76.50 4.1480 8 307.0 17.40 388.45 9.54 25.10
238 | 0.51183 0.00 6.200 0 0.5070 7.3580 71.60 4.1480 8 307.0 17.40 390.07 4.73 31.50
239 | 0.08244 30.00 4.930 0 0.4280 6.4810 18.50 6.1899 6 300.0 16.60 379.41 6.36 23.70
240 | 0.09252 30.00 4.930 0 0.4280 6.6060 42.20 6.1899 6 300.0 16.60 383.78 7.37 23.30
241 | 0.11329 30.00 4.930 0 0.4280 6.8970 54.30 6.3361 6 300.0 16.60 391.25 11.38 22.00
242 | 0.10612 30.00 4.930 0 0.4280 6.0950 65.10 6.3361 6 300.0 16.60 394.62 12.40 20.10
243 | 0.10290 30.00 4.930 0 0.4280 6.3580 52.90 7.0355 6 300.0 16.60 372.75 11.22 22.20
244 | 0.12757 30.00 4.930 0 0.4280 6.3930 7.80 7.0355 6 300.0 16.60 374.71 5.19 23.70
245 | 0.20608 22.00 5.860 0 0.4310 5.5930 76.50 7.9549 7 330.0 19.10 372.49 12.50 17.60
246 | 0.19133 22.00 5.860 0 0.4310 5.6050 70.20 7.9549 7 330.0 19.10 389.13 18.46 18.50
247 | 0.33983 22.00 5.860 0 0.4310 6.1080 34.90 8.0555 7 330.0 19.10 390.18 9.16 24.30
248 | 0.19657 22.00 5.860 0 0.4310 6.2260 79.20 8.0555 7 330.0 19.10 376.14 10.15 20.50
249 | 0.16439 22.00 5.860 0 0.4310 6.4330 49.10 7.8265 7 330.0 19.10 374.71 9.52 24.50
250 | 0.19073 22.00 5.860 0 0.4310 6.7180 17.50 7.8265 7 330.0 19.10 393.74 6.56 26.20
251 | 0.14030 22.00 5.860 0 0.4310 6.4870 13.00 7.3967 7 330.0 19.10 396.28 5.90 24.40
252 | 0.21409 22.00 5.860 0 0.4310 6.4380 8.90 7.3967 7 330.0 19.10 377.07 3.59 24.80
253 | 0.08221 22.00 5.860 0 0.4310 6.9570 6.80 8.9067 7 330.0 19.10 386.09 3.53 29.60
254 | 0.36894 22.00 5.860 0 0.4310 8.2590 8.40 8.9067 7 330.0 19.10 396.90 3.54 42.80
255 | 0.04819 80.00 3.640 0 0.3920 6.1080 32.00 9.2203 1 315.0 16.40 392.89 6.57 21.90
256 | 0.03548 80.00 3.640 0 0.3920 5.8760 19.10 9.2203 1 315.0 16.40 395.18 9.25 20.90
257 | 0.01538 90.00 3.750 0 0.3940 7.4540 34.20 6.3361 3 244.0 15.90 386.34 3.11 44.00
258 | 0.61154 20.00 3.970 0 0.6470 8.7040 86.90 1.8010 5 264.0 13.00 389.70 5.12 50.00
259 | 0.66351 20.00 3.970 0 0.6470 7.3330 100.00 1.8946 5 264.0 13.00 383.29 7.79 36.00
260 | 0.65665 20.00 3.970 0 0.6470 6.8420 100.00 2.0107 5 264.0 13.00 391.93 6.90 30.10
261 | 0.54011 20.00 3.970 0 0.6470 7.2030 81.80 2.1121 5 264.0 13.00 392.80 9.59 33.80
262 | 0.53412 20.00 3.970 0 0.6470 7.5200 89.40 2.1398 5 264.0 13.00 388.37 7.26 43.10
263 | 0.52014 20.00 3.970 0 0.6470 8.3980 91.50 2.2885 5 264.0 13.00 386.86 5.91 48.80
264 | 0.82526 20.00 3.970 0 0.6470 7.3270 94.50 2.0788 5 264.0 13.00 393.42 11.25 31.00
265 | 0.55007 20.00 3.970 0 0.6470 7.2060 91.60 1.9301 5 264.0 13.00 387.89 8.10 36.50
266 | 0.76162 20.00 3.970 0 0.6470 5.5600 62.80 1.9865 5 264.0 13.00 392.40 10.45 22.80
267 | 0.78570 20.00 3.970 0 0.6470 7.0140 84.60 2.1329 5 264.0 13.00 384.07 14.79 30.70
268 | 0.57834 20.00 3.970 0 0.5750 8.2970 67.00 2.4216 5 264.0 13.00 384.54 7.44 50.00
269 | 0.54050 20.00 3.970 0 0.5750 7.4700 52.60 2.8720 5 264.0 13.00 390.30 3.16 43.50
270 | 0.09065 20.00 6.960 1 0.4640 5.9200 61.50 3.9175 3 223.0 18.60 391.34 13.65 20.70
271 | 0.29916 20.00 6.960 0 0.4640 5.8560 42.10 4.4290 3 223.0 18.60 388.65 13.00 21.10
272 | 0.16211 20.00 6.960 0 0.4640 6.2400 16.30 4.4290 3 223.0 18.60 396.90 6.59 25.20
273 | 0.11460 20.00 6.960 0 0.4640 6.5380 58.70 3.9175 3 223.0 18.60 394.96 7.73 24.40
274 | 0.22188 20.00 6.960 1 0.4640 7.6910 51.80 4.3665 3 223.0 18.60 390.77 6.58 35.20
275 | 0.05644 40.00 6.410 1 0.4470 6.7580 32.90 4.0776 4 254.0 17.60 396.90 3.53 32.40
276 | 0.09604 40.00 6.410 0 0.4470 6.8540 42.80 4.2673 4 254.0 17.60 396.90 2.98 32.00
277 | 0.10469 40.00 6.410 1 0.4470 7.2670 49.00 4.7872 4 254.0 17.60 389.25 6.05 33.20
278 | 0.06127 40.00 6.410 1 0.4470 6.8260 27.60 4.8628 4 254.0 17.60 393.45 4.16 33.10
279 | 0.07978 40.00 6.410 0 0.4470 6.4820 32.10 4.1403 4 254.0 17.60 396.90 7.19 29.10
280 | 0.21038 20.00 3.330 0 0.4429 6.8120 32.20 4.1007 5 216.0 14.90 396.90 4.85 35.10
281 | 0.03578 20.00 3.330 0 0.4429 7.8200 64.50 4.6947 5 216.0 14.90 387.31 3.76 45.40
282 | 0.03705 20.00 3.330 0 0.4429 6.9680 37.20 5.2447 5 216.0 14.90 392.23 4.59 35.40
283 | 0.06129 20.00 3.330 1 0.4429 7.6450 49.70 5.2119 5 216.0 14.90 377.07 3.01 46.00
284 | 0.01501 90.00 1.210 1 0.4010 7.9230 24.80 5.8850 1 198.0 13.60 395.52 3.16 50.00
285 | 0.00906 90.00 2.970 0 0.4000 7.0880 20.80 7.3073 1 285.0 15.30 394.72 7.85 32.20
286 | 0.01096 55.00 2.250 0 0.3890 6.4530 31.90 7.3073 1 300.0 15.30 394.72 8.23 22.00
287 | 0.01965 80.00 1.760 0 0.3850 6.2300 31.50 9.0892 1 241.0 18.20 341.60 12.93 20.10
288 | 0.03871 52.50 5.320 0 0.4050 6.2090 31.30 7.3172 6 293.0 16.60 396.90 7.14 23.20
289 | 0.04590 52.50 5.320 0 0.4050 6.3150 45.60 7.3172 6 293.0 16.60 396.90 7.60 22.30
290 | 0.04297 52.50 5.320 0 0.4050 6.5650 22.90 7.3172 6 293.0 16.60 371.72 9.51 24.80
291 | 0.03502 80.00 4.950 0 0.4110 6.8610 27.90 5.1167 4 245.0 19.20 396.90 3.33 28.50
292 | 0.07886 80.00 4.950 0 0.4110 7.1480 27.70 5.1167 4 245.0 19.20 396.90 3.56 37.30
293 | 0.03615 80.00 4.950 0 0.4110 6.6300 23.40 5.1167 4 245.0 19.20 396.90 4.70 27.90
294 | 0.08265 0.00 13.920 0 0.4370 6.1270 18.40 5.5027 4 289.0 16.00 396.90 8.58 23.90
295 | 0.08199 0.00 13.920 0 0.4370 6.0090 42.30 5.5027 4 289.0 16.00 396.90 10.40 21.70
296 | 0.12932 0.00 13.920 0 0.4370 6.6780 31.10 5.9604 4 289.0 16.00 396.90 6.27 28.60
297 | 0.05372 0.00 13.920 0 0.4370 6.5490 51.00 5.9604 4 289.0 16.00 392.85 7.39 27.10
298 | 0.14103 0.00 13.920 0 0.4370 5.7900 58.00 6.3200 4 289.0 16.00 396.90 15.84 20.30
299 | 0.06466 70.00 2.240 0 0.4000 6.3450 20.10 7.8278 5 358.0 14.80 368.24 4.97 22.50
300 | 0.05561 70.00 2.240 0 0.4000 7.0410 10.00 7.8278 5 358.0 14.80 371.58 4.74 29.00
301 | 0.04417 70.00 2.240 0 0.4000 6.8710 47.40 7.8278 5 358.0 14.80 390.86 6.07 24.80
302 | 0.03537 34.00 6.090 0 0.4330 6.5900 40.40 5.4917 7 329.0 16.10 395.75 9.50 22.00
303 | 0.09266 34.00 6.090 0 0.4330 6.4950 18.40 5.4917 7 329.0 16.10 383.61 8.67 26.40
304 | 0.10000 34.00 6.090 0 0.4330 6.9820 17.70 5.4917 7 329.0 16.10 390.43 4.86 33.10
305 | 0.05515 33.00 2.180 0 0.4720 7.2360 41.10 4.0220 7 222.0 18.40 393.68 6.93 36.10
306 | 0.05479 33.00 2.180 0 0.4720 6.6160 58.10 3.3700 7 222.0 18.40 393.36 8.93 28.40
307 | 0.07503 33.00 2.180 0 0.4720 7.4200 71.90 3.0992 7 222.0 18.40 396.90 6.47 33.40
308 | 0.04932 33.00 2.180 0 0.4720 6.8490 70.30 3.1827 7 222.0 18.40 396.90 7.53 28.20
309 | 0.49298 0.00 9.900 0 0.5440 6.6350 82.50 3.3175 4 304.0 18.40 396.90 4.54 22.80
310 | 0.34940 0.00 9.900 0 0.5440 5.9720 76.70 3.1025 4 304.0 18.40 396.24 9.97 20.30
311 | 2.63548 0.00 9.900 0 0.5440 4.9730 37.80 2.5194 4 304.0 18.40 350.45 12.64 16.10
312 | 0.79041 0.00 9.900 0 0.5440 6.1220 52.80 2.6403 4 304.0 18.40 396.90 5.98 22.10
313 | 0.26169 0.00 9.900 0 0.5440 6.0230 90.40 2.8340 4 304.0 18.40 396.30 11.72 19.40
314 | 0.26938 0.00 9.900 0 0.5440 6.2660 82.80 3.2628 4 304.0 18.40 393.39 7.90 21.60
315 | 0.36920 0.00 9.900 0 0.5440 6.5670 87.30 3.6023 4 304.0 18.40 395.69 9.28 23.80
316 | 0.25356 0.00 9.900 0 0.5440 5.7050 77.70 3.9450 4 304.0 18.40 396.42 11.50 16.20
317 | 0.31827 0.00 9.900 0 0.5440 5.9140 83.20 3.9986 4 304.0 18.40 390.70 18.33 17.80
318 | 0.24522 0.00 9.900 0 0.5440 5.7820 71.70 4.0317 4 304.0 18.40 396.90 15.94 19.80
319 | 0.40202 0.00 9.900 0 0.5440 6.3820 67.20 3.5325 4 304.0 18.40 395.21 10.36 23.10
320 | 0.47547 0.00 9.900 0 0.5440 6.1130 58.80 4.0019 4 304.0 18.40 396.23 12.73 21.00
321 | 0.16760 0.00 7.380 0 0.4930 6.4260 52.30 4.5404 5 287.0 19.60 396.90 7.20 23.80
322 | 0.18159 0.00 7.380 0 0.4930 6.3760 54.30 4.5404 5 287.0 19.60 396.90 6.87 23.10
323 | 0.35114 0.00 7.380 0 0.4930 6.0410 49.90 4.7211 5 287.0 19.60 396.90 7.70 20.40
324 | 0.28392 0.00 7.380 0 0.4930 5.7080 74.30 4.7211 5 287.0 19.60 391.13 11.74 18.50
325 | 0.34109 0.00 7.380 0 0.4930 6.4150 40.10 4.7211 5 287.0 19.60 396.90 6.12 25.00
326 | 0.19186 0.00 7.380 0 0.4930 6.4310 14.70 5.4159 5 287.0 19.60 393.68 5.08 24.60
327 | 0.30347 0.00 7.380 0 0.4930 6.3120 28.90 5.4159 5 287.0 19.60 396.90 6.15 23.00
328 | 0.24103 0.00 7.380 0 0.4930 6.0830 43.70 5.4159 5 287.0 19.60 396.90 12.79 22.20
329 | 0.06617 0.00 3.240 0 0.4600 5.8680 25.80 5.2146 4 430.0 16.90 382.44 9.97 19.30
330 | 0.06724 0.00 3.240 0 0.4600 6.3330 17.20 5.2146 4 430.0 16.90 375.21 7.34 22.60
331 | 0.04544 0.00 3.240 0 0.4600 6.1440 32.20 5.8736 4 430.0 16.90 368.57 9.09 19.80
332 | 0.05023 35.00 6.060 0 0.4379 5.7060 28.40 6.6407 1 304.0 16.90 394.02 12.43 17.10
333 | 0.03466 35.00 6.060 0 0.4379 6.0310 23.30 6.6407 1 304.0 16.90 362.25 7.83 19.40
334 | 0.05083 0.00 5.190 0 0.5150 6.3160 38.10 6.4584 5 224.0 20.20 389.71 5.68 22.20
335 | 0.03738 0.00 5.190 0 0.5150 6.3100 38.50 6.4584 5 224.0 20.20 389.40 6.75 20.70
336 | 0.03961 0.00 5.190 0 0.5150 6.0370 34.50 5.9853 5 224.0 20.20 396.90 8.01 21.10
337 | 0.03427 0.00 5.190 0 0.5150 5.8690 46.30 5.2311 5 224.0 20.20 396.90 9.80 19.50
338 | 0.03041 0.00 5.190 0 0.5150 5.8950 59.60 5.6150 5 224.0 20.20 394.81 10.56 18.50
339 | 0.03306 0.00 5.190 0 0.5150 6.0590 37.30 4.8122 5 224.0 20.20 396.14 8.51 20.60
340 | 0.05497 0.00 5.190 0 0.5150 5.9850 45.40 4.8122 5 224.0 20.20 396.90 9.74 19.00
341 | 0.06151 0.00 5.190 0 0.5150 5.9680 58.50 4.8122 5 224.0 20.20 396.90 9.29 18.70
342 | 0.01301 35.00 1.520 0 0.4420 7.2410 49.30 7.0379 1 284.0 15.50 394.74 5.49 32.70
343 | 0.02498 0.00 1.890 0 0.5180 6.5400 59.70 6.2669 1 422.0 15.90 389.96 8.65 16.50
344 | 0.02543 55.00 3.780 0 0.4840 6.6960 56.40 5.7321 5 370.0 17.60 396.90 7.18 23.90
345 | 0.03049 55.00 3.780 0 0.4840 6.8740 28.10 6.4654 5 370.0 17.60 387.97 4.61 31.20
346 | 0.03113 0.00 4.390 0 0.4420 6.0140 48.50 8.0136 3 352.0 18.80 385.64 10.53 17.50
347 | 0.06162 0.00 4.390 0 0.4420 5.8980 52.30 8.0136 3 352.0 18.80 364.61 12.67 17.20
348 | 0.01870 85.00 4.150 0 0.4290 6.5160 27.70 8.5353 4 351.0 17.90 392.43 6.36 23.10
349 | 0.01501 80.00 2.010 0 0.4350 6.6350 29.70 8.3440 4 280.0 17.00 390.94 5.99 24.50
350 | 0.02899 40.00 1.250 0 0.4290 6.9390 34.50 8.7921 1 335.0 19.70 389.85 5.89 26.60
351 | 0.06211 40.00 1.250 0 0.4290 6.4900 44.40 8.7921 1 335.0 19.70 396.90 5.98 22.90
352 | 0.07950 60.00 1.690 0 0.4110 6.5790 35.90 10.7103 4 411.0 18.30 370.78 5.49 24.10
353 | 0.07244 60.00 1.690 0 0.4110 5.8840 18.50 10.7103 4 411.0 18.30 392.33 7.79 18.60
354 | 0.01709 90.00 2.020 0 0.4100 6.7280 36.10 12.1265 5 187.0 17.00 384.46 4.50 30.10
355 | 0.04301 80.00 1.910 0 0.4130 5.6630 21.90 10.5857 4 334.0 22.00 382.80 8.05 18.20
356 | 0.10659 80.00 1.910 0 0.4130 5.9360 19.50 10.5857 4 334.0 22.00 376.04 5.57 20.60
357 | 8.98296 0.00 18.100 1 0.7700 6.2120 97.40 2.1222 24 666.0 20.20 377.73 17.60 17.80
358 | 3.84970 0.00 18.100 1 0.7700 6.3950 91.00 2.5052 24 666.0 20.20 391.34 13.27 21.70
359 | 5.20177 0.00 18.100 1 0.7700 6.1270 83.40 2.7227 24 666.0 20.20 395.43 11.48 22.70
360 | 4.26131 0.00 18.100 0 0.7700 6.1120 81.30 2.5091 24 666.0 20.20 390.74 12.67 22.60
361 | 4.54192 0.00 18.100 0 0.7700 6.3980 88.00 2.5182 24 666.0 20.20 374.56 7.79 25.00
362 | 3.83684 0.00 18.100 0 0.7700 6.2510 91.10 2.2955 24 666.0 20.20 350.65 14.19 19.90
363 | 3.67822 0.00 18.100 0 0.7700 5.3620 96.20 2.1036 24 666.0 20.20 380.79 10.19 20.80
364 | 4.22239 0.00 18.100 1 0.7700 5.8030 89.00 1.9047 24 666.0 20.20 353.04 14.64 16.80
365 | 3.47428 0.00 18.100 1 0.7180 8.7800 82.90 1.9047 24 666.0 20.20 354.55 5.29 21.90
366 | 4.55587 0.00 18.100 0 0.7180 3.5610 87.90 1.6132 24 666.0 20.20 354.70 7.12 27.50
367 | 3.69695 0.00 18.100 0 0.7180 4.9630 91.40 1.7523 24 666.0 20.20 316.03 14.00 21.90
368 | 13.52220 0.00 18.100 0 0.6310 3.8630 100.00 1.5106 24 666.0 20.20 131.42 13.33 23.10
369 | 4.89822 0.00 18.100 0 0.6310 4.9700 100.00 1.3325 24 666.0 20.20 375.52 3.26 50.00
370 | 5.66998 0.00 18.100 1 0.6310 6.6830 96.80 1.3567 24 666.0 20.20 375.33 3.73 50.00
371 | 6.53876 0.00 18.100 1 0.6310 7.0160 97.50 1.2024 24 666.0 20.20 392.05 2.96 50.00
372 | 9.23230 0.00 18.100 0 0.6310 6.2160 100.00 1.1691 24 666.0 20.20 366.15 9.53 50.00
373 | 8.26725 0.00 18.100 1 0.6680 5.8750 89.60 1.1296 24 666.0 20.20 347.88 8.88 50.00
374 | 11.10810 0.00 18.100 0 0.6680 4.9060 100.00 1.1742 24 666.0 20.20 396.90 34.77 13.80
375 | 18.49820 0.00 18.100 0 0.6680 4.1380 100.00 1.1370 24 666.0 20.20 396.90 37.97 13.80
376 | 19.60910 0.00 18.100 0 0.6710 7.3130 97.90 1.3163 24 666.0 20.20 396.90 13.44 15.00
377 | 15.28800 0.00 18.100 0 0.6710 6.6490 93.30 1.3449 24 666.0 20.20 363.02 23.24 13.90
378 | 9.82349 0.00 18.100 0 0.6710 6.7940 98.80 1.3580 24 666.0 20.20 396.90 21.24 13.30
379 | 23.64820 0.00 18.100 0 0.6710 6.3800 96.20 1.3861 24 666.0 20.20 396.90 23.69 13.10
380 | 17.86670 0.00 18.100 0 0.6710 6.2230 100.00 1.3861 24 666.0 20.20 393.74 21.78 10.20
381 | 88.97620 0.00 18.100 0 0.6710 6.9680 91.90 1.4165 24 666.0 20.20 396.90 17.21 10.40
382 | 15.87440 0.00 18.100 0 0.6710 6.5450 99.10 1.5192 24 666.0 20.20 396.90 21.08 10.90
383 | 9.18702 0.00 18.100 0 0.7000 5.5360 100.00 1.5804 24 666.0 20.20 396.90 23.60 11.30
384 | 7.99248 0.00 18.100 0 0.7000 5.5200 100.00 1.5331 24 666.0 20.20 396.90 24.56 12.30
385 | 20.08490 0.00 18.100 0 0.7000 4.3680 91.20 1.4395 24 666.0 20.20 285.83 30.63 8.80
386 | 16.81180 0.00 18.100 0 0.7000 5.2770 98.10 1.4261 24 666.0 20.20 396.90 30.81 7.20
387 | 24.39380 0.00 18.100 0 0.7000 4.6520 100.00 1.4672 24 666.0 20.20 396.90 28.28 10.50
388 | 22.59710 0.00 18.100 0 0.7000 5.0000 89.50 1.5184 24 666.0 20.20 396.90 31.99 7.40
389 | 14.33370 0.00 18.100 0 0.7000 4.8800 100.00 1.5895 24 666.0 20.20 372.92 30.62 10.20
390 | 8.15174 0.00 18.100 0 0.7000 5.3900 98.90 1.7281 24 666.0 20.20 396.90 20.85 11.50
391 | 6.96215 0.00 18.100 0 0.7000 5.7130 97.00 1.9265 24 666.0 20.20 394.43 17.11 15.10
392 | 5.29305 0.00 18.100 0 0.7000 6.0510 82.50 2.1678 24 666.0 20.20 378.38 18.76 23.20
393 | 11.57790 0.00 18.100 0 0.7000 5.0360 97.00 1.7700 24 666.0 20.20 396.90 25.68 9.70
394 | 8.64476 0.00 18.100 0 0.6930 6.1930 92.60 1.7912 24 666.0 20.20 396.90 15.17 13.80
395 | 13.35980 0.00 18.100 0 0.6930 5.8870 94.70 1.7821 24 666.0 20.20 396.90 16.35 12.70
396 | 8.71675 0.00 18.100 0 0.6930 6.4710 98.80 1.7257 24 666.0 20.20 391.98 17.12 13.10
397 | 5.87205 0.00 18.100 0 0.6930 6.4050 96.00 1.6768 24 666.0 20.20 396.90 19.37 12.50
398 | 7.67202 0.00 18.100 0 0.6930 5.7470 98.90 1.6334 24 666.0 20.20 393.10 19.92 8.50
399 | 38.35180 0.00 18.100 0 0.6930 5.4530 100.00 1.4896 24 666.0 20.20 396.90 30.59 5.00
400 | 9.91655 0.00 18.100 0 0.6930 5.8520 77.80 1.5004 24 666.0 20.20 338.16 29.97 6.30
401 | 25.04610 0.00 18.100 0 0.6930 5.9870 100.00 1.5888 24 666.0 20.20 396.90 26.77 5.60
402 | 14.23620 0.00 18.100 0 0.6930 6.3430 100.00 1.5741 24 666.0 20.20 396.90 20.32 7.20
403 | 9.59571 0.00 18.100 0 0.6930 6.4040 100.00 1.6390 24 666.0 20.20 376.11 20.31 12.10
404 | 24.80170 0.00 18.100 0 0.6930 5.3490 96.00 1.7028 24 666.0 20.20 396.90 19.77 8.30
405 | 41.52920 0.00 18.100 0 0.6930 5.5310 85.40 1.6074 24 666.0 20.20 329.46 27.38 8.50
406 | 67.92080 0.00 18.100 0 0.6930 5.6830 100.00 1.4254 24 666.0 20.20 384.97 22.98 5.00
407 | 20.71620 0.00 18.100 0 0.6590 4.1380 100.00 1.1781 24 666.0 20.20 370.22 23.34 11.90
408 | 11.95110 0.00 18.100 0 0.6590 5.6080 100.00 1.2852 24 666.0 20.20 332.09 12.13 27.90
409 | 7.40389 0.00 18.100 0 0.5970 5.6170 97.90 1.4547 24 666.0 20.20 314.64 26.40 17.20
410 | 14.43830 0.00 18.100 0 0.5970 6.8520 100.00 1.4655 24 666.0 20.20 179.36 19.78 27.50
411 | 51.13580 0.00 18.100 0 0.5970 5.7570 100.00 1.4130 24 666.0 20.20 2.60 10.11 15.00
412 | 14.05070 0.00 18.100 0 0.5970 6.6570 100.00 1.5275 24 666.0 20.20 35.05 21.22 17.20
413 | 18.81100 0.00 18.100 0 0.5970 4.6280 100.00 1.5539 24 666.0 20.20 28.79 34.37 17.90
414 | 28.65580 0.00 18.100 0 0.5970 5.1550 100.00 1.5894 24 666.0 20.20 210.97 20.08 16.30
415 | 45.74610 0.00 18.100 0 0.6930 4.5190 100.00 1.6582 24 666.0 20.20 88.27 36.98 7.00
416 | 18.08460 0.00 18.100 0 0.6790 6.4340 100.00 1.8347 24 666.0 20.20 27.25 29.05 7.20
417 | 10.83420 0.00 18.100 0 0.6790 6.7820 90.80 1.8195 24 666.0 20.20 21.57 25.79 7.50
418 | 25.94060 0.00 18.100 0 0.6790 5.3040 89.10 1.6475 24 666.0 20.20 127.36 26.64 10.40
419 | 73.53410 0.00 18.100 0 0.6790 5.9570 100.00 1.8026 24 666.0 20.20 16.45 20.62 8.80
420 | 11.81230 0.00 18.100 0 0.7180 6.8240 76.50 1.7940 24 666.0 20.20 48.45 22.74 8.40
421 | 11.08740 0.00 18.100 0 0.7180 6.4110 100.00 1.8589 24 666.0 20.20 318.75 15.02 16.70
422 | 7.02259 0.00 18.100 0 0.7180 6.0060 95.30 1.8746 24 666.0 20.20 319.98 15.70 14.20
423 | 12.04820 0.00 18.100 0 0.6140 5.6480 87.60 1.9512 24 666.0 20.20 291.55 14.10 20.80
424 | 7.05042 0.00 18.100 0 0.6140 6.1030 85.10 2.0218 24 666.0 20.20 2.52 23.29 13.40
425 | 8.79212 0.00 18.100 0 0.5840 5.5650 70.60 2.0635 24 666.0 20.20 3.65 17.16 11.70
426 | 15.86030 0.00 18.100 0 0.6790 5.8960 95.40 1.9096 24 666.0 20.20 7.68 24.39 8.30
427 | 12.24720 0.00 18.100 0 0.5840 5.8370 59.70 1.9976 24 666.0 20.20 24.65 15.69 10.20
428 | 37.66190 0.00 18.100 0 0.6790 6.2020 78.70 1.8629 24 666.0 20.20 18.82 14.52 10.90
429 | 7.36711 0.00 18.100 0 0.6790 6.1930 78.10 1.9356 24 666.0 20.20 96.73 21.52 11.00
430 | 9.33889 0.00 18.100 0 0.6790 6.3800 95.60 1.9682 24 666.0 20.20 60.72 24.08 9.50
431 | 8.49213 0.00 18.100 0 0.5840 6.3480 86.10 2.0527 24 666.0 20.20 83.45 17.64 14.50
432 | 10.06230 0.00 18.100 0 0.5840 6.8330 94.30 2.0882 24 666.0 20.20 81.33 19.69 14.10
433 | 6.44405 0.00 18.100 0 0.5840 6.4250 74.80 2.2004 24 666.0 20.20 97.95 12.03 16.10
434 | 5.58107 0.00 18.100 0 0.7130 6.4360 87.90 2.3158 24 666.0 20.20 100.19 16.22 14.30
435 | 13.91340 0.00 18.100 0 0.7130 6.2080 95.00 2.2222 24 666.0 20.20 100.63 15.17 11.70
436 | 11.16040 0.00 18.100 0 0.7400 6.6290 94.60 2.1247 24 666.0 20.20 109.85 23.27 13.40
437 | 14.42080 0.00 18.100 0 0.7400 6.4610 93.30 2.0026 24 666.0 20.20 27.49 18.05 9.60
438 | 15.17720 0.00 18.100 0 0.7400 6.1520 100.00 1.9142 24 666.0 20.20 9.32 26.45 8.70
439 | 13.67810 0.00 18.100 0 0.7400 5.9350 87.90 1.8206 24 666.0 20.20 68.95 34.02 8.40
440 | 9.39063 0.00 18.100 0 0.7400 5.6270 93.90 1.8172 24 666.0 20.20 396.90 22.88 12.80
441 | 22.05110 0.00 18.100 0 0.7400 5.8180 92.40 1.8662 24 666.0 20.20 391.45 22.11 10.50
442 | 9.72418 0.00 18.100 0 0.7400 6.4060 97.20 2.0651 24 666.0 20.20 385.96 19.52 17.10
443 | 5.66637 0.00 18.100 0 0.7400 6.2190 100.00 2.0048 24 666.0 20.20 395.69 16.59 18.40
444 | 9.96654 0.00 18.100 0 0.7400 6.4850 100.00 1.9784 24 666.0 20.20 386.73 18.85 15.40
445 | 12.80230 0.00 18.100 0 0.7400 5.8540 96.60 1.8956 24 666.0 20.20 240.52 23.79 10.80
446 | 10.67180 0.00 18.100 0 0.7400 6.4590 94.80 1.9879 24 666.0 20.20 43.06 23.98 11.80
447 | 6.28807 0.00 18.100 0 0.7400 6.3410 96.40 2.0720 24 666.0 20.20 318.01 17.79 14.90
448 | 9.92485 0.00 18.100 0 0.7400 6.2510 96.60 2.1980 24 666.0 20.20 388.52 16.44 12.60
449 | 9.32909 0.00 18.100 0 0.7130 6.1850 98.70 2.2616 24 666.0 20.20 396.90 18.13 14.10
450 | 7.52601 0.00 18.100 0 0.7130 6.4170 98.30 2.1850 24 666.0 20.20 304.21 19.31 13.00
451 | 6.71772 0.00 18.100 0 0.7130 6.7490 92.60 2.3236 24 666.0 20.20 0.32 17.44 13.40
452 | 5.44114 0.00 18.100 0 0.7130 6.6550 98.20 2.3552 24 666.0 20.20 355.29 17.73 15.20
453 | 5.09017 0.00 18.100 0 0.7130 6.2970 91.80 2.3682 24 666.0 20.20 385.09 17.27 16.10
454 | 8.24809 0.00 18.100 0 0.7130 7.3930 99.30 2.4527 24 666.0 20.20 375.87 16.74 17.80
455 | 9.51363 0.00 18.100 0 0.7130 6.7280 94.10 2.4961 24 666.0 20.20 6.68 18.71 14.90
456 | 4.75237 0.00 18.100 0 0.7130 6.5250 86.50 2.4358 24 666.0 20.20 50.92 18.13 14.10
457 | 4.66883 0.00 18.100 0 0.7130 5.9760 87.90 2.5806 24 666.0 20.20 10.48 19.01 12.70
458 | 8.20058 0.00 18.100 0 0.7130 5.9360 80.30 2.7792 24 666.0 20.20 3.50 16.94 13.50
459 | 7.75223 0.00 18.100 0 0.7130 6.3010 83.70 2.7831 24 666.0 20.20 272.21 16.23 14.90
460 | 6.80117 0.00 18.100 0 0.7130 6.0810 84.40 2.7175 24 666.0 20.20 396.90 14.70 20.00
461 | 4.81213 0.00 18.100 0 0.7130 6.7010 90.00 2.5975 24 666.0 20.20 255.23 16.42 16.40
462 | 3.69311 0.00 18.100 0 0.7130 6.3760 88.40 2.5671 24 666.0 20.20 391.43 14.65 17.70
463 | 6.65492 0.00 18.100 0 0.7130 6.3170 83.00 2.7344 24 666.0 20.20 396.90 13.99 19.50
464 | 5.82115 0.00 18.100 0 0.7130 6.5130 89.90 2.8016 24 666.0 20.20 393.82 10.29 20.20
465 | 7.83932 0.00 18.100 0 0.6550 6.2090 65.40 2.9634 24 666.0 20.20 396.90 13.22 21.40
466 | 3.16360 0.00 18.100 0 0.6550 5.7590 48.20 3.0665 24 666.0 20.20 334.40 14.13 19.90
467 | 3.77498 0.00 18.100 0 0.6550 5.9520 84.70 2.8715 24 666.0 20.20 22.01 17.15 19.00
468 | 4.42228 0.00 18.100 0 0.5840 6.0030 94.50 2.5403 24 666.0 20.20 331.29 21.32 19.10
469 | 15.57570 0.00 18.100 0 0.5800 5.9260 71.00 2.9084 24 666.0 20.20 368.74 18.13 19.10
470 | 13.07510 0.00 18.100 0 0.5800 5.7130 56.70 2.8237 24 666.0 20.20 396.90 14.76 20.10
471 | 4.34879 0.00 18.100 0 0.5800 6.1670 84.00 3.0334 24 666.0 20.20 396.90 16.29 19.90
472 | 4.03841 0.00 18.100 0 0.5320 6.2290 90.70 3.0993 24 666.0 20.20 395.33 12.87 19.60
473 | 3.56868 0.00 18.100 0 0.5800 6.4370 75.00 2.8965 24 666.0 20.20 393.37 14.36 23.20
474 | 4.64689 0.00 18.100 0 0.6140 6.9800 67.60 2.5329 24 666.0 20.20 374.68 11.66 29.80
475 | 8.05579 0.00 18.100 0 0.5840 5.4270 95.40 2.4298 24 666.0 20.20 352.58 18.14 13.80
476 | 6.39312 0.00 18.100 0 0.5840 6.1620 97.40 2.2060 24 666.0 20.20 302.76 24.10 13.30
477 | 4.87141 0.00 18.100 0 0.6140 6.4840 93.60 2.3053 24 666.0 20.20 396.21 18.68 16.70
478 | 15.02340 0.00 18.100 0 0.6140 5.3040 97.30 2.1007 24 666.0 20.20 349.48 24.91 12.00
479 | 10.23300 0.00 18.100 0 0.6140 6.1850 96.70 2.1705 24 666.0 20.20 379.70 18.03 14.60
480 | 14.33370 0.00 18.100 0 0.6140 6.2290 88.00 1.9512 24 666.0 20.20 383.32 13.11 21.40
481 | 5.82401 0.00 18.100 0 0.5320 6.2420 64.70 3.4242 24 666.0 20.20 396.90 10.74 23.00
482 | 5.70818 0.00 18.100 0 0.5320 6.7500 74.90 3.3317 24 666.0 20.20 393.07 7.74 23.70
483 | 5.73116 0.00 18.100 0 0.5320 7.0610 77.00 3.4106 24 666.0 20.20 395.28 7.01 25.00
484 | 2.81838 0.00 18.100 0 0.5320 5.7620 40.30 4.0983 24 666.0 20.20 392.92 10.42 21.80
485 | 2.37857 0.00 18.100 0 0.5830 5.8710 41.90 3.7240 24 666.0 20.20 370.73 13.34 20.60
486 | 3.67367 0.00 18.100 0 0.5830 6.3120 51.90 3.9917 24 666.0 20.20 388.62 10.58 21.20
487 | 5.69175 0.00 18.100 0 0.5830 6.1140 79.80 3.5459 24 666.0 20.20 392.68 14.98 19.10
488 | 4.83567 0.00 18.100 0 0.5830 5.9050 53.20 3.1523 24 666.0 20.20 388.22 11.45 20.60
489 | 0.15086 0.00 27.740 0 0.6090 5.4540 92.70 1.8209 4 711.0 20.10 395.09 18.06 15.20
490 | 0.18337 0.00 27.740 0 0.6090 5.4140 98.30 1.7554 4 711.0 20.10 344.05 23.97 7.00
491 | 0.20746 0.00 27.740 0 0.6090 5.0930 98.00 1.8226 4 711.0 20.10 318.43 29.68 8.10
492 | 0.10574 0.00 27.740 0 0.6090 5.9830 98.80 1.8681 4 711.0 20.10 390.11 18.07 13.60
493 | 0.11132 0.00 27.740 0 0.6090 5.9830 83.50 2.1099 4 711.0 20.10 396.90 13.35 20.10
494 | 0.17331 0.00 9.690 0 0.5850 5.7070 54.00 2.3817 6 391.0 19.20 396.90 12.01 21.80
495 | 0.27957 0.00 9.690 0 0.5850 5.9260 42.60 2.3817 6 391.0 19.20 396.90 13.59 24.50
496 | 0.17899 0.00 9.690 0 0.5850 5.6700 28.80 2.7986 6 391.0 19.20 393.29 17.60 23.10
497 | 0.28960 0.00 9.690 0 0.5850 5.3900 72.90 2.7986 6 391.0 19.20 396.90 21.14 19.70
498 | 0.26838 0.00 9.690 0 0.5850 5.7940 70.60 2.8927 6 391.0 19.20 396.90 14.10 18.30
499 | 0.23912 0.00 9.690 0 0.5850 6.0190 65.30 2.4091 6 391.0 19.20 396.90 12.92 21.20
500 | 0.17783 0.00 9.690 0 0.5850 5.5690 73.50 2.3999 6 391.0 19.20 395.77 15.10 17.50
501 | 0.22438 0.00 9.690 0 0.5850 6.0270 79.70 2.4982 6 391.0 19.20 396.90 14.33 16.80
502 | 0.06263 0.00 11.930 0 0.5730 6.5930 69.10 2.4786 1 273.0 21.00 391.99 9.67 22.40
503 | 0.04527 0.00 11.930 0 0.5730 6.1200 76.70 2.2875 1 273.0 21.00 396.90 9.08 20.60
504 | 0.06076 0.00 11.930 0 0.5730 6.9760 91.00 2.1675 1 273.0 21.00 396.90 5.64 23.90
505 | 0.10959 0.00 11.930 0 0.5730 6.7940 89.30 2.3889 1 273.0 21.00 393.45 6.48 22.00
506 | 0.04741 0.00 11.930 0 0.5730 6.0300 80.80 2.5050 1 273.0 21.00 396.90 7.88 11.90
507 |
--------------------------------------------------------------------------------
/deeplearning/dataset/iris.csv:
--------------------------------------------------------------------------------
1 | 5.1,3.5,1.4,0.2,Iris-setosa
2 | 4.9,3.0,1.4,0.2,Iris-setosa
3 | 4.7,3.2,1.3,0.2,Iris-setosa
4 | 4.6,3.1,1.5,0.2,Iris-setosa
5 | 5.0,3.6,1.4,0.2,Iris-setosa
6 | 5.4,3.9,1.7,0.4,Iris-setosa
7 | 4.6,3.4,1.4,0.3,Iris-setosa
8 | 5.0,3.4,1.5,0.2,Iris-setosa
9 | 4.4,2.9,1.4,0.2,Iris-setosa
10 | 4.9,3.1,1.5,0.1,Iris-setosa
11 | 5.4,3.7,1.5,0.2,Iris-setosa
12 | 4.8,3.4,1.6,0.2,Iris-setosa
13 | 4.8,3.0,1.4,0.1,Iris-setosa
14 | 4.3,3.0,1.1,0.1,Iris-setosa
15 | 5.8,4.0,1.2,0.2,Iris-setosa
16 | 5.7,4.4,1.5,0.4,Iris-setosa
17 | 5.4,3.9,1.3,0.4,Iris-setosa
18 | 5.1,3.5,1.4,0.3,Iris-setosa
19 | 5.7,3.8,1.7,0.3,Iris-setosa
20 | 5.1,3.8,1.5,0.3,Iris-setosa
21 | 5.4,3.4,1.7,0.2,Iris-setosa
22 | 5.1,3.7,1.5,0.4,Iris-setosa
23 | 4.6,3.6,1.0,0.2,Iris-setosa
24 | 5.1,3.3,1.7,0.5,Iris-setosa
25 | 4.8,3.4,1.9,0.2,Iris-setosa
26 | 5.0,3.0,1.6,0.2,Iris-setosa
27 | 5.0,3.4,1.6,0.4,Iris-setosa
28 | 5.2,3.5,1.5,0.2,Iris-setosa
29 | 5.2,3.4,1.4,0.2,Iris-setosa
30 | 4.7,3.2,1.6,0.2,Iris-setosa
31 | 4.8,3.1,1.6,0.2,Iris-setosa
32 | 5.4,3.4,1.5,0.4,Iris-setosa
33 | 5.2,4.1,1.5,0.1,Iris-setosa
34 | 5.5,4.2,1.4,0.2,Iris-setosa
35 | 4.9,3.1,1.5,0.1,Iris-setosa
36 | 5.0,3.2,1.2,0.2,Iris-setosa
37 | 5.5,3.5,1.3,0.2,Iris-setosa
38 | 4.9,3.1,1.5,0.1,Iris-setosa
39 | 4.4,3.0,1.3,0.2,Iris-setosa
40 | 5.1,3.4,1.5,0.2,Iris-setosa
41 | 5.0,3.5,1.3,0.3,Iris-setosa
42 | 4.5,2.3,1.3,0.3,Iris-setosa
43 | 4.4,3.2,1.3,0.2,Iris-setosa
44 | 5.0,3.5,1.6,0.6,Iris-setosa
45 | 5.1,3.8,1.9,0.4,Iris-setosa
46 | 4.8,3.0,1.4,0.3,Iris-setosa
47 | 5.1,3.8,1.6,0.2,Iris-setosa
48 | 4.6,3.2,1.4,0.2,Iris-setosa
49 | 5.3,3.7,1.5,0.2,Iris-setosa
50 | 5.0,3.3,1.4,0.2,Iris-setosa
51 | 7.0,3.2,4.7,1.4,Iris-versicolor
52 | 6.4,3.2,4.5,1.5,Iris-versicolor
53 | 6.9,3.1,4.9,1.5,Iris-versicolor
54 | 5.5,2.3,4.0,1.3,Iris-versicolor
55 | 6.5,2.8,4.6,1.5,Iris-versicolor
56 | 5.7,2.8,4.5,1.3,Iris-versicolor
57 | 6.3,3.3,4.7,1.6,Iris-versicolor
58 | 4.9,2.4,3.3,1.0,Iris-versicolor
59 | 6.6,2.9,4.6,1.3,Iris-versicolor
60 | 5.2,2.7,3.9,1.4,Iris-versicolor
61 | 5.0,2.0,3.5,1.0,Iris-versicolor
62 | 5.9,3.0,4.2,1.5,Iris-versicolor
63 | 6.0,2.2,4.0,1.0,Iris-versicolor
64 | 6.1,2.9,4.7,1.4,Iris-versicolor
65 | 5.6,2.9,3.6,1.3,Iris-versicolor
66 | 6.7,3.1,4.4,1.4,Iris-versicolor
67 | 5.6,3.0,4.5,1.5,Iris-versicolor
68 | 5.8,2.7,4.1,1.0,Iris-versicolor
69 | 6.2,2.2,4.5,1.5,Iris-versicolor
70 | 5.6,2.5,3.9,1.1,Iris-versicolor
71 | 5.9,3.2,4.8,1.8,Iris-versicolor
72 | 6.1,2.8,4.0,1.3,Iris-versicolor
73 | 6.3,2.5,4.9,1.5,Iris-versicolor
74 | 6.1,2.8,4.7,1.2,Iris-versicolor
75 | 6.4,2.9,4.3,1.3,Iris-versicolor
76 | 6.6,3.0,4.4,1.4,Iris-versicolor
77 | 6.8,2.8,4.8,1.4,Iris-versicolor
78 | 6.7,3.0,5.0,1.7,Iris-versicolor
79 | 6.0,2.9,4.5,1.5,Iris-versicolor
80 | 5.7,2.6,3.5,1.0,Iris-versicolor
81 | 5.5,2.4,3.8,1.1,Iris-versicolor
82 | 5.5,2.4,3.7,1.0,Iris-versicolor
83 | 5.8,2.7,3.9,1.2,Iris-versicolor
84 | 6.0,2.7,5.1,1.6,Iris-versicolor
85 | 5.4,3.0,4.5,1.5,Iris-versicolor
86 | 6.0,3.4,4.5,1.6,Iris-versicolor
87 | 6.7,3.1,4.7,1.5,Iris-versicolor
88 | 6.3,2.3,4.4,1.3,Iris-versicolor
89 | 5.6,3.0,4.1,1.3,Iris-versicolor
90 | 5.5,2.5,4.0,1.3,Iris-versicolor
91 | 5.5,2.6,4.4,1.2,Iris-versicolor
92 | 6.1,3.0,4.6,1.4,Iris-versicolor
93 | 5.8,2.6,4.0,1.2,Iris-versicolor
94 | 5.0,2.3,3.3,1.0,Iris-versicolor
95 | 5.6,2.7,4.2,1.3,Iris-versicolor
96 | 5.7,3.0,4.2,1.2,Iris-versicolor
97 | 5.7,2.9,4.2,1.3,Iris-versicolor
98 | 6.2,2.9,4.3,1.3,Iris-versicolor
99 | 5.1,2.5,3.0,1.1,Iris-versicolor
100 | 5.7,2.8,4.1,1.3,Iris-versicolor
101 | 6.3,3.3,6.0,2.5,Iris-virginica
102 | 5.8,2.7,5.1,1.9,Iris-virginica
103 | 7.1,3.0,5.9,2.1,Iris-virginica
104 | 6.3,2.9,5.6,1.8,Iris-virginica
105 | 6.5,3.0,5.8,2.2,Iris-virginica
106 | 7.6,3.0,6.6,2.1,Iris-virginica
107 | 4.9,2.5,4.5,1.7,Iris-virginica
108 | 7.3,2.9,6.3,1.8,Iris-virginica
109 | 6.7,2.5,5.8,1.8,Iris-virginica
110 | 7.2,3.6,6.1,2.5,Iris-virginica
111 | 6.5,3.2,5.1,2.0,Iris-virginica
112 | 6.4,2.7,5.3,1.9,Iris-virginica
113 | 6.8,3.0,5.5,2.1,Iris-virginica
114 | 5.7,2.5,5.0,2.0,Iris-virginica
115 | 5.8,2.8,5.1,2.4,Iris-virginica
116 | 6.4,3.2,5.3,2.3,Iris-virginica
117 | 6.5,3.0,5.5,1.8,Iris-virginica
118 | 7.7,3.8,6.7,2.2,Iris-virginica
119 | 7.7,2.6,6.9,2.3,Iris-virginica
120 | 6.0,2.2,5.0,1.5,Iris-virginica
121 | 6.9,3.2,5.7,2.3,Iris-virginica
122 | 5.6,2.8,4.9,2.0,Iris-virginica
123 | 7.7,2.8,6.7,2.0,Iris-virginica
124 | 6.3,2.7,4.9,1.8,Iris-virginica
125 | 6.7,3.3,5.7,2.1,Iris-virginica
126 | 7.2,3.2,6.0,1.8,Iris-virginica
127 | 6.2,2.8,4.8,1.8,Iris-virginica
128 | 6.1,3.0,4.9,1.8,Iris-virginica
129 | 6.4,2.8,5.6,2.1,Iris-virginica
130 | 7.2,3.0,5.8,1.6,Iris-virginica
131 | 7.4,2.8,6.1,1.9,Iris-virginica
132 | 7.9,3.8,6.4,2.0,Iris-virginica
133 | 6.4,2.8,5.6,2.2,Iris-virginica
134 | 6.3,2.8,5.1,1.5,Iris-virginica
135 | 6.1,2.6,5.6,1.4,Iris-virginica
136 | 7.7,3.0,6.1,2.3,Iris-virginica
137 | 6.3,3.4,5.6,2.4,Iris-virginica
138 | 6.4,3.1,5.5,1.8,Iris-virginica
139 | 6.0,3.0,4.8,1.8,Iris-virginica
140 | 6.9,3.1,5.4,2.1,Iris-virginica
141 | 6.7,3.1,5.6,2.4,Iris-virginica
142 | 6.9,3.1,5.1,2.3,Iris-virginica
143 | 5.8,2.7,5.1,1.9,Iris-virginica
144 | 6.8,3.2,5.9,2.3,Iris-virginica
145 | 6.7,3.3,5.7,2.5,Iris-virginica
146 | 6.7,3.0,5.2,2.3,Iris-virginica
147 | 6.3,2.5,5.0,1.9,Iris-virginica
148 | 6.5,3.0,5.2,2.0,Iris-virginica
149 | 6.2,3.4,5.4,2.3,Iris-virginica
150 | 5.9,3.0,5.1,1.8,Iris-virginica
151 |
152 |
--------------------------------------------------------------------------------
/deeplearning/dataset/pima-indians-diabetes.csv:
--------------------------------------------------------------------------------
1 | 6,148,72,35,0,33.6,0.627,50,1
2 | 1,85,66,29,0,26.6,0.351,31,0
3 | 8,183,64,0,0,23.3,0.672,32,1
4 | 1,89,66,23,94,28.1,0.167,21,0
5 | 0,137,40,35,168,43.1,2.288,33,1
6 | 5,116,74,0,0,25.6,0.201,30,0
7 | 3,78,50,32,88,31.0,0.248,26,1
8 | 10,115,0,0,0,35.3,0.134,29,0
9 | 2,197,70,45,543,30.5,0.158,53,1
10 | 8,125,96,0,0,0.0,0.232,54,1
11 | 4,110,92,0,0,37.6,0.191,30,0
12 | 10,168,74,0,0,38.0,0.537,34,1
13 | 10,139,80,0,0,27.1,1.441,57,0
14 | 1,189,60,23,846,30.1,0.398,59,1
15 | 5,166,72,19,175,25.8,0.587,51,1
16 | 7,100,0,0,0,30.0,0.484,32,1
17 | 0,118,84,47,230,45.8,0.551,31,1
18 | 7,107,74,0,0,29.6,0.254,31,1
19 | 1,103,30,38,83,43.3,0.183,33,0
20 | 1,115,70,30,96,34.6,0.529,32,1
21 | 3,126,88,41,235,39.3,0.704,27,0
22 | 8,99,84,0,0,35.4,0.388,50,0
23 | 7,196,90,0,0,39.8,0.451,41,1
24 | 9,119,80,35,0,29.0,0.263,29,1
25 | 11,143,94,33,146,36.6,0.254,51,1
26 | 10,125,70,26,115,31.1,0.205,41,1
27 | 7,147,76,0,0,39.4,0.257,43,1
28 | 1,97,66,15,140,23.2,0.487,22,0
29 | 13,145,82,19,110,22.2,0.245,57,0
30 | 5,117,92,0,0,34.1,0.337,38,0
31 | 5,109,75,26,0,36.0,0.546,60,0
32 | 3,158,76,36,245,31.6,0.851,28,1
33 | 3,88,58,11,54,24.8,0.267,22,0
34 | 6,92,92,0,0,19.9,0.188,28,0
35 | 10,122,78,31,0,27.6,0.512,45,0
36 | 4,103,60,33,192,24.0,0.966,33,0
37 | 11,138,76,0,0,33.2,0.420,35,0
38 | 9,102,76,37,0,32.9,0.665,46,1
39 | 2,90,68,42,0,38.2,0.503,27,1
40 | 4,111,72,47,207,37.1,1.390,56,1
41 | 3,180,64,25,70,34.0,0.271,26,0
42 | 7,133,84,0,0,40.2,0.696,37,0
43 | 7,106,92,18,0,22.7,0.235,48,0
44 | 9,171,110,24,240,45.4,0.721,54,1
45 | 7,159,64,0,0,27.4,0.294,40,0
46 | 0,180,66,39,0,42.0,1.893,25,1
47 | 1,146,56,0,0,29.7,0.564,29,0
48 | 2,71,70,27,0,28.0,0.586,22,0
49 | 7,103,66,32,0,39.1,0.344,31,1
50 | 7,105,0,0,0,0.0,0.305,24,0
51 | 1,103,80,11,82,19.4,0.491,22,0
52 | 1,101,50,15,36,24.2,0.526,26,0
53 | 5,88,66,21,23,24.4,0.342,30,0
54 | 8,176,90,34,300,33.7,0.467,58,1
55 | 7,150,66,42,342,34.7,0.718,42,0
56 | 1,73,50,10,0,23.0,0.248,21,0
57 | 7,187,68,39,304,37.7,0.254,41,1
58 | 0,100,88,60,110,46.8,0.962,31,0
59 | 0,146,82,0,0,40.5,1.781,44,0
60 | 0,105,64,41,142,41.5,0.173,22,0
61 | 2,84,0,0,0,0.0,0.304,21,0
62 | 8,133,72,0,0,32.9,0.270,39,1
63 | 5,44,62,0,0,25.0,0.587,36,0
64 | 2,141,58,34,128,25.4,0.699,24,0
65 | 7,114,66,0,0,32.8,0.258,42,1
66 | 5,99,74,27,0,29.0,0.203,32,0
67 | 0,109,88,30,0,32.5,0.855,38,1
68 | 2,109,92,0,0,42.7,0.845,54,0
69 | 1,95,66,13,38,19.6,0.334,25,0
70 | 4,146,85,27,100,28.9,0.189,27,0
71 | 2,100,66,20,90,32.9,0.867,28,1
72 | 5,139,64,35,140,28.6,0.411,26,0
73 | 13,126,90,0,0,43.4,0.583,42,1
74 | 4,129,86,20,270,35.1,0.231,23,0
75 | 1,79,75,30,0,32.0,0.396,22,0
76 | 1,0,48,20,0,24.7,0.140,22,0
77 | 7,62,78,0,0,32.6,0.391,41,0
78 | 5,95,72,33,0,37.7,0.370,27,0
79 | 0,131,0,0,0,43.2,0.270,26,1
80 | 2,112,66,22,0,25.0,0.307,24,0
81 | 3,113,44,13,0,22.4,0.140,22,0
82 | 2,74,0,0,0,0.0,0.102,22,0
83 | 7,83,78,26,71,29.3,0.767,36,0
84 | 0,101,65,28,0,24.6,0.237,22,0
85 | 5,137,108,0,0,48.8,0.227,37,1
86 | 2,110,74,29,125,32.4,0.698,27,0
87 | 13,106,72,54,0,36.6,0.178,45,0
88 | 2,100,68,25,71,38.5,0.324,26,0
89 | 15,136,70,32,110,37.1,0.153,43,1
90 | 1,107,68,19,0,26.5,0.165,24,0
91 | 1,80,55,0,0,19.1,0.258,21,0
92 | 4,123,80,15,176,32.0,0.443,34,0
93 | 7,81,78,40,48,46.7,0.261,42,0
94 | 4,134,72,0,0,23.8,0.277,60,1
95 | 2,142,82,18,64,24.7,0.761,21,0
96 | 6,144,72,27,228,33.9,0.255,40,0
97 | 2,92,62,28,0,31.6,0.130,24,0
98 | 1,71,48,18,76,20.4,0.323,22,0
99 | 6,93,50,30,64,28.7,0.356,23,0
100 | 1,122,90,51,220,49.7,0.325,31,1
101 | 1,163,72,0,0,39.0,1.222,33,1
102 | 1,151,60,0,0,26.1,0.179,22,0
103 | 0,125,96,0,0,22.5,0.262,21,0
104 | 1,81,72,18,40,26.6,0.283,24,0
105 | 2,85,65,0,0,39.6,0.930,27,0
106 | 1,126,56,29,152,28.7,0.801,21,0
107 | 1,96,122,0,0,22.4,0.207,27,0
108 | 4,144,58,28,140,29.5,0.287,37,0
109 | 3,83,58,31,18,34.3,0.336,25,0
110 | 0,95,85,25,36,37.4,0.247,24,1
111 | 3,171,72,33,135,33.3,0.199,24,1
112 | 8,155,62,26,495,34.0,0.543,46,1
113 | 1,89,76,34,37,31.2,0.192,23,0
114 | 4,76,62,0,0,34.0,0.391,25,0
115 | 7,160,54,32,175,30.5,0.588,39,1
116 | 4,146,92,0,0,31.2,0.539,61,1
117 | 5,124,74,0,0,34.0,0.220,38,1
118 | 5,78,48,0,0,33.7,0.654,25,0
119 | 4,97,60,23,0,28.2,0.443,22,0
120 | 4,99,76,15,51,23.2,0.223,21,0
121 | 0,162,76,56,100,53.2,0.759,25,1
122 | 6,111,64,39,0,34.2,0.260,24,0
123 | 2,107,74,30,100,33.6,0.404,23,0
124 | 5,132,80,0,0,26.8,0.186,69,0
125 | 0,113,76,0,0,33.3,0.278,23,1
126 | 1,88,30,42,99,55.0,0.496,26,1
127 | 3,120,70,30,135,42.9,0.452,30,0
128 | 1,118,58,36,94,33.3,0.261,23,0
129 | 1,117,88,24,145,34.5,0.403,40,1
130 | 0,105,84,0,0,27.9,0.741,62,1
131 | 4,173,70,14,168,29.7,0.361,33,1
132 | 9,122,56,0,0,33.3,1.114,33,1
133 | 3,170,64,37,225,34.5,0.356,30,1
134 | 8,84,74,31,0,38.3,0.457,39,0
135 | 2,96,68,13,49,21.1,0.647,26,0
136 | 2,125,60,20,140,33.8,0.088,31,0
137 | 0,100,70,26,50,30.8,0.597,21,0
138 | 0,93,60,25,92,28.7,0.532,22,0
139 | 0,129,80,0,0,31.2,0.703,29,0
140 | 5,105,72,29,325,36.9,0.159,28,0
141 | 3,128,78,0,0,21.1,0.268,55,0
142 | 5,106,82,30,0,39.5,0.286,38,0
143 | 2,108,52,26,63,32.5,0.318,22,0
144 | 10,108,66,0,0,32.4,0.272,42,1
145 | 4,154,62,31,284,32.8,0.237,23,0
146 | 0,102,75,23,0,0.0,0.572,21,0
147 | 9,57,80,37,0,32.8,0.096,41,0
148 | 2,106,64,35,119,30.5,1.400,34,0
149 | 5,147,78,0,0,33.7,0.218,65,0
150 | 2,90,70,17,0,27.3,0.085,22,0
151 | 1,136,74,50,204,37.4,0.399,24,0
152 | 4,114,65,0,0,21.9,0.432,37,0
153 | 9,156,86,28,155,34.3,1.189,42,1
154 | 1,153,82,42,485,40.6,0.687,23,0
155 | 8,188,78,0,0,47.9,0.137,43,1
156 | 7,152,88,44,0,50.0,0.337,36,1
157 | 2,99,52,15,94,24.6,0.637,21,0
158 | 1,109,56,21,135,25.2,0.833,23,0
159 | 2,88,74,19,53,29.0,0.229,22,0
160 | 17,163,72,41,114,40.9,0.817,47,1
161 | 4,151,90,38,0,29.7,0.294,36,0
162 | 7,102,74,40,105,37.2,0.204,45,0
163 | 0,114,80,34,285,44.2,0.167,27,0
164 | 2,100,64,23,0,29.7,0.368,21,0
165 | 0,131,88,0,0,31.6,0.743,32,1
166 | 6,104,74,18,156,29.9,0.722,41,1
167 | 3,148,66,25,0,32.5,0.256,22,0
168 | 4,120,68,0,0,29.6,0.709,34,0
169 | 4,110,66,0,0,31.9,0.471,29,0
170 | 3,111,90,12,78,28.4,0.495,29,0
171 | 6,102,82,0,0,30.8,0.180,36,1
172 | 6,134,70,23,130,35.4,0.542,29,1
173 | 2,87,0,23,0,28.9,0.773,25,0
174 | 1,79,60,42,48,43.5,0.678,23,0
175 | 2,75,64,24,55,29.7,0.370,33,0
176 | 8,179,72,42,130,32.7,0.719,36,1
177 | 6,85,78,0,0,31.2,0.382,42,0
178 | 0,129,110,46,130,67.1,0.319,26,1
179 | 5,143,78,0,0,45.0,0.190,47,0
180 | 5,130,82,0,0,39.1,0.956,37,1
181 | 6,87,80,0,0,23.2,0.084,32,0
182 | 0,119,64,18,92,34.9,0.725,23,0
183 | 1,0,74,20,23,27.7,0.299,21,0
184 | 5,73,60,0,0,26.8,0.268,27,0
185 | 4,141,74,0,0,27.6,0.244,40,0
186 | 7,194,68,28,0,35.9,0.745,41,1
187 | 8,181,68,36,495,30.1,0.615,60,1
188 | 1,128,98,41,58,32.0,1.321,33,1
189 | 8,109,76,39,114,27.9,0.640,31,1
190 | 5,139,80,35,160,31.6,0.361,25,1
191 | 3,111,62,0,0,22.6,0.142,21,0
192 | 9,123,70,44,94,33.1,0.374,40,0
193 | 7,159,66,0,0,30.4,0.383,36,1
194 | 11,135,0,0,0,52.3,0.578,40,1
195 | 8,85,55,20,0,24.4,0.136,42,0
196 | 5,158,84,41,210,39.4,0.395,29,1
197 | 1,105,58,0,0,24.3,0.187,21,0
198 | 3,107,62,13,48,22.9,0.678,23,1
199 | 4,109,64,44,99,34.8,0.905,26,1
200 | 4,148,60,27,318,30.9,0.150,29,1
201 | 0,113,80,16,0,31.0,0.874,21,0
202 | 1,138,82,0,0,40.1,0.236,28,0
203 | 0,108,68,20,0,27.3,0.787,32,0
204 | 2,99,70,16,44,20.4,0.235,27,0
205 | 6,103,72,32,190,37.7,0.324,55,0
206 | 5,111,72,28,0,23.9,0.407,27,0
207 | 8,196,76,29,280,37.5,0.605,57,1
208 | 5,162,104,0,0,37.7,0.151,52,1
209 | 1,96,64,27,87,33.2,0.289,21,0
210 | 7,184,84,33,0,35.5,0.355,41,1
211 | 2,81,60,22,0,27.7,0.290,25,0
212 | 0,147,85,54,0,42.8,0.375,24,0
213 | 7,179,95,31,0,34.2,0.164,60,0
214 | 0,140,65,26,130,42.6,0.431,24,1
215 | 9,112,82,32,175,34.2,0.260,36,1
216 | 12,151,70,40,271,41.8,0.742,38,1
217 | 5,109,62,41,129,35.8,0.514,25,1
218 | 6,125,68,30,120,30.0,0.464,32,0
219 | 5,85,74,22,0,29.0,1.224,32,1
220 | 5,112,66,0,0,37.8,0.261,41,1
221 | 0,177,60,29,478,34.6,1.072,21,1
222 | 2,158,90,0,0,31.6,0.805,66,1
223 | 7,119,0,0,0,25.2,0.209,37,0
224 | 7,142,60,33,190,28.8,0.687,61,0
225 | 1,100,66,15,56,23.6,0.666,26,0
226 | 1,87,78,27,32,34.6,0.101,22,0
227 | 0,101,76,0,0,35.7,0.198,26,0
228 | 3,162,52,38,0,37.2,0.652,24,1
229 | 4,197,70,39,744,36.7,2.329,31,0
230 | 0,117,80,31,53,45.2,0.089,24,0
231 | 4,142,86,0,0,44.0,0.645,22,1
232 | 6,134,80,37,370,46.2,0.238,46,1
233 | 1,79,80,25,37,25.4,0.583,22,0
234 | 4,122,68,0,0,35.0,0.394,29,0
235 | 3,74,68,28,45,29.7,0.293,23,0
236 | 4,171,72,0,0,43.6,0.479,26,1
237 | 7,181,84,21,192,35.9,0.586,51,1
238 | 0,179,90,27,0,44.1,0.686,23,1
239 | 9,164,84,21,0,30.8,0.831,32,1
240 | 0,104,76,0,0,18.4,0.582,27,0
241 | 1,91,64,24,0,29.2,0.192,21,0
242 | 4,91,70,32,88,33.1,0.446,22,0
243 | 3,139,54,0,0,25.6,0.402,22,1
244 | 6,119,50,22,176,27.1,1.318,33,1
245 | 2,146,76,35,194,38.2,0.329,29,0
246 | 9,184,85,15,0,30.0,1.213,49,1
247 | 10,122,68,0,0,31.2,0.258,41,0
248 | 0,165,90,33,680,52.3,0.427,23,0
249 | 9,124,70,33,402,35.4,0.282,34,0
250 | 1,111,86,19,0,30.1,0.143,23,0
251 | 9,106,52,0,0,31.2,0.380,42,0
252 | 2,129,84,0,0,28.0,0.284,27,0
253 | 2,90,80,14,55,24.4,0.249,24,0
254 | 0,86,68,32,0,35.8,0.238,25,0
255 | 12,92,62,7,258,27.6,0.926,44,1
256 | 1,113,64,35,0,33.6,0.543,21,1
257 | 3,111,56,39,0,30.1,0.557,30,0
258 | 2,114,68,22,0,28.7,0.092,25,0
259 | 1,193,50,16,375,25.9,0.655,24,0
260 | 11,155,76,28,150,33.3,1.353,51,1
261 | 3,191,68,15,130,30.9,0.299,34,0
262 | 3,141,0,0,0,30.0,0.761,27,1
263 | 4,95,70,32,0,32.1,0.612,24,0
264 | 3,142,80,15,0,32.4,0.200,63,0
265 | 4,123,62,0,0,32.0,0.226,35,1
266 | 5,96,74,18,67,33.6,0.997,43,0
267 | 0,138,0,0,0,36.3,0.933,25,1
268 | 2,128,64,42,0,40.0,1.101,24,0
269 | 0,102,52,0,0,25.1,0.078,21,0
270 | 2,146,0,0,0,27.5,0.240,28,1
271 | 10,101,86,37,0,45.6,1.136,38,1
272 | 2,108,62,32,56,25.2,0.128,21,0
273 | 3,122,78,0,0,23.0,0.254,40,0
274 | 1,71,78,50,45,33.2,0.422,21,0
275 | 13,106,70,0,0,34.2,0.251,52,0
276 | 2,100,70,52,57,40.5,0.677,25,0
277 | 7,106,60,24,0,26.5,0.296,29,1
278 | 0,104,64,23,116,27.8,0.454,23,0
279 | 5,114,74,0,0,24.9,0.744,57,0
280 | 2,108,62,10,278,25.3,0.881,22,0
281 | 0,146,70,0,0,37.9,0.334,28,1
282 | 10,129,76,28,122,35.9,0.280,39,0
283 | 7,133,88,15,155,32.4,0.262,37,0
284 | 7,161,86,0,0,30.4,0.165,47,1
285 | 2,108,80,0,0,27.0,0.259,52,1
286 | 7,136,74,26,135,26.0,0.647,51,0
287 | 5,155,84,44,545,38.7,0.619,34,0
288 | 1,119,86,39,220,45.6,0.808,29,1
289 | 4,96,56,17,49,20.8,0.340,26,0
290 | 5,108,72,43,75,36.1,0.263,33,0
291 | 0,78,88,29,40,36.9,0.434,21,0
292 | 0,107,62,30,74,36.6,0.757,25,1
293 | 2,128,78,37,182,43.3,1.224,31,1
294 | 1,128,48,45,194,40.5,0.613,24,1
295 | 0,161,50,0,0,21.9,0.254,65,0
296 | 6,151,62,31,120,35.5,0.692,28,0
297 | 2,146,70,38,360,28.0,0.337,29,1
298 | 0,126,84,29,215,30.7,0.520,24,0
299 | 14,100,78,25,184,36.6,0.412,46,1
300 | 8,112,72,0,0,23.6,0.840,58,0
301 | 0,167,0,0,0,32.3,0.839,30,1
302 | 2,144,58,33,135,31.6,0.422,25,1
303 | 5,77,82,41,42,35.8,0.156,35,0
304 | 5,115,98,0,0,52.9,0.209,28,1
305 | 3,150,76,0,0,21.0,0.207,37,0
306 | 2,120,76,37,105,39.7,0.215,29,0
307 | 10,161,68,23,132,25.5,0.326,47,1
308 | 0,137,68,14,148,24.8,0.143,21,0
309 | 0,128,68,19,180,30.5,1.391,25,1
310 | 2,124,68,28,205,32.9,0.875,30,1
311 | 6,80,66,30,0,26.2,0.313,41,0
312 | 0,106,70,37,148,39.4,0.605,22,0
313 | 2,155,74,17,96,26.6,0.433,27,1
314 | 3,113,50,10,85,29.5,0.626,25,0
315 | 7,109,80,31,0,35.9,1.127,43,1
316 | 2,112,68,22,94,34.1,0.315,26,0
317 | 3,99,80,11,64,19.3,0.284,30,0
318 | 3,182,74,0,0,30.5,0.345,29,1
319 | 3,115,66,39,140,38.1,0.150,28,0
320 | 6,194,78,0,0,23.5,0.129,59,1
321 | 4,129,60,12,231,27.5,0.527,31,0
322 | 3,112,74,30,0,31.6,0.197,25,1
323 | 0,124,70,20,0,27.4,0.254,36,1
324 | 13,152,90,33,29,26.8,0.731,43,1
325 | 2,112,75,32,0,35.7,0.148,21,0
326 | 1,157,72,21,168,25.6,0.123,24,0
327 | 1,122,64,32,156,35.1,0.692,30,1
328 | 10,179,70,0,0,35.1,0.200,37,0
329 | 2,102,86,36,120,45.5,0.127,23,1
330 | 6,105,70,32,68,30.8,0.122,37,0
331 | 8,118,72,19,0,23.1,1.476,46,0
332 | 2,87,58,16,52,32.7,0.166,25,0
333 | 1,180,0,0,0,43.3,0.282,41,1
334 | 12,106,80,0,0,23.6,0.137,44,0
335 | 1,95,60,18,58,23.9,0.260,22,0
336 | 0,165,76,43,255,47.9,0.259,26,0
337 | 0,117,0,0,0,33.8,0.932,44,0
338 | 5,115,76,0,0,31.2,0.343,44,1
339 | 9,152,78,34,171,34.2,0.893,33,1
340 | 7,178,84,0,0,39.9,0.331,41,1
341 | 1,130,70,13,105,25.9,0.472,22,0
342 | 1,95,74,21,73,25.9,0.673,36,0
343 | 1,0,68,35,0,32.0,0.389,22,0
344 | 5,122,86,0,0,34.7,0.290,33,0
345 | 8,95,72,0,0,36.8,0.485,57,0
346 | 8,126,88,36,108,38.5,0.349,49,0
347 | 1,139,46,19,83,28.7,0.654,22,0
348 | 3,116,0,0,0,23.5,0.187,23,0
349 | 3,99,62,19,74,21.8,0.279,26,0
350 | 5,0,80,32,0,41.0,0.346,37,1
351 | 4,92,80,0,0,42.2,0.237,29,0
352 | 4,137,84,0,0,31.2,0.252,30,0
353 | 3,61,82,28,0,34.4,0.243,46,0
354 | 1,90,62,12,43,27.2,0.580,24,0
355 | 3,90,78,0,0,42.7,0.559,21,0
356 | 9,165,88,0,0,30.4,0.302,49,1
357 | 1,125,50,40,167,33.3,0.962,28,1
358 | 13,129,0,30,0,39.9,0.569,44,1
359 | 12,88,74,40,54,35.3,0.378,48,0
360 | 1,196,76,36,249,36.5,0.875,29,1
361 | 5,189,64,33,325,31.2,0.583,29,1
362 | 5,158,70,0,0,29.8,0.207,63,0
363 | 5,103,108,37,0,39.2,0.305,65,0
364 | 4,146,78,0,0,38.5,0.520,67,1
365 | 4,147,74,25,293,34.9,0.385,30,0
366 | 5,99,54,28,83,34.0,0.499,30,0
367 | 6,124,72,0,0,27.6,0.368,29,1
368 | 0,101,64,17,0,21.0,0.252,21,0
369 | 3,81,86,16,66,27.5,0.306,22,0
370 | 1,133,102,28,140,32.8,0.234,45,1
371 | 3,173,82,48,465,38.4,2.137,25,1
372 | 0,118,64,23,89,0.0,1.731,21,0
373 | 0,84,64,22,66,35.8,0.545,21,0
374 | 2,105,58,40,94,34.9,0.225,25,0
375 | 2,122,52,43,158,36.2,0.816,28,0
376 | 12,140,82,43,325,39.2,0.528,58,1
377 | 0,98,82,15,84,25.2,0.299,22,0
378 | 1,87,60,37,75,37.2,0.509,22,0
379 | 4,156,75,0,0,48.3,0.238,32,1
380 | 0,93,100,39,72,43.4,1.021,35,0
381 | 1,107,72,30,82,30.8,0.821,24,0
382 | 0,105,68,22,0,20.0,0.236,22,0
383 | 1,109,60,8,182,25.4,0.947,21,0
384 | 1,90,62,18,59,25.1,1.268,25,0
385 | 1,125,70,24,110,24.3,0.221,25,0
386 | 1,119,54,13,50,22.3,0.205,24,0
387 | 5,116,74,29,0,32.3,0.660,35,1
388 | 8,105,100,36,0,43.3,0.239,45,1
389 | 5,144,82,26,285,32.0,0.452,58,1
390 | 3,100,68,23,81,31.6,0.949,28,0
391 | 1,100,66,29,196,32.0,0.444,42,0
392 | 5,166,76,0,0,45.7,0.340,27,1
393 | 1,131,64,14,415,23.7,0.389,21,0
394 | 4,116,72,12,87,22.1,0.463,37,0
395 | 4,158,78,0,0,32.9,0.803,31,1
396 | 2,127,58,24,275,27.7,1.600,25,0
397 | 3,96,56,34,115,24.7,0.944,39,0
398 | 0,131,66,40,0,34.3,0.196,22,1
399 | 3,82,70,0,0,21.1,0.389,25,0
400 | 3,193,70,31,0,34.9,0.241,25,1
401 | 4,95,64,0,0,32.0,0.161,31,1
402 | 6,137,61,0,0,24.2,0.151,55,0
403 | 5,136,84,41,88,35.0,0.286,35,1
404 | 9,72,78,25,0,31.6,0.280,38,0
405 | 5,168,64,0,0,32.9,0.135,41,1
406 | 2,123,48,32,165,42.1,0.520,26,0
407 | 4,115,72,0,0,28.9,0.376,46,1
408 | 0,101,62,0,0,21.9,0.336,25,0
409 | 8,197,74,0,0,25.9,1.191,39,1
410 | 1,172,68,49,579,42.4,0.702,28,1
411 | 6,102,90,39,0,35.7,0.674,28,0
412 | 1,112,72,30,176,34.4,0.528,25,0
413 | 1,143,84,23,310,42.4,1.076,22,0
414 | 1,143,74,22,61,26.2,0.256,21,0
415 | 0,138,60,35,167,34.6,0.534,21,1
416 | 3,173,84,33,474,35.7,0.258,22,1
417 | 1,97,68,21,0,27.2,1.095,22,0
418 | 4,144,82,32,0,38.5,0.554,37,1
419 | 1,83,68,0,0,18.2,0.624,27,0
420 | 3,129,64,29,115,26.4,0.219,28,1
421 | 1,119,88,41,170,45.3,0.507,26,0
422 | 2,94,68,18,76,26.0,0.561,21,0
423 | 0,102,64,46,78,40.6,0.496,21,0
424 | 2,115,64,22,0,30.8,0.421,21,0
425 | 8,151,78,32,210,42.9,0.516,36,1
426 | 4,184,78,39,277,37.0,0.264,31,1
427 | 0,94,0,0,0,0.0,0.256,25,0
428 | 1,181,64,30,180,34.1,0.328,38,1
429 | 0,135,94,46,145,40.6,0.284,26,0
430 | 1,95,82,25,180,35.0,0.233,43,1
431 | 2,99,0,0,0,22.2,0.108,23,0
432 | 3,89,74,16,85,30.4,0.551,38,0
433 | 1,80,74,11,60,30.0,0.527,22,0
434 | 2,139,75,0,0,25.6,0.167,29,0
435 | 1,90,68,8,0,24.5,1.138,36,0
436 | 0,141,0,0,0,42.4,0.205,29,1
437 | 12,140,85,33,0,37.4,0.244,41,0
438 | 5,147,75,0,0,29.9,0.434,28,0
439 | 1,97,70,15,0,18.2,0.147,21,0
440 | 6,107,88,0,0,36.8,0.727,31,0
441 | 0,189,104,25,0,34.3,0.435,41,1
442 | 2,83,66,23,50,32.2,0.497,22,0
443 | 4,117,64,27,120,33.2,0.230,24,0
444 | 8,108,70,0,0,30.5,0.955,33,1
445 | 4,117,62,12,0,29.7,0.380,30,1
446 | 0,180,78,63,14,59.4,2.420,25,1
447 | 1,100,72,12,70,25.3,0.658,28,0
448 | 0,95,80,45,92,36.5,0.330,26,0
449 | 0,104,64,37,64,33.6,0.510,22,1
450 | 0,120,74,18,63,30.5,0.285,26,0
451 | 1,82,64,13,95,21.2,0.415,23,0
452 | 2,134,70,0,0,28.9,0.542,23,1
453 | 0,91,68,32,210,39.9,0.381,25,0
454 | 2,119,0,0,0,19.6,0.832,72,0
455 | 2,100,54,28,105,37.8,0.498,24,0
456 | 14,175,62,30,0,33.6,0.212,38,1
457 | 1,135,54,0,0,26.7,0.687,62,0
458 | 5,86,68,28,71,30.2,0.364,24,0
459 | 10,148,84,48,237,37.6,1.001,51,1
460 | 9,134,74,33,60,25.9,0.460,81,0
461 | 9,120,72,22,56,20.8,0.733,48,0
462 | 1,71,62,0,0,21.8,0.416,26,0
463 | 8,74,70,40,49,35.3,0.705,39,0
464 | 5,88,78,30,0,27.6,0.258,37,0
465 | 10,115,98,0,0,24.0,1.022,34,0
466 | 0,124,56,13,105,21.8,0.452,21,0
467 | 0,74,52,10,36,27.8,0.269,22,0
468 | 0,97,64,36,100,36.8,0.600,25,0
469 | 8,120,0,0,0,30.0,0.183,38,1
470 | 6,154,78,41,140,46.1,0.571,27,0
471 | 1,144,82,40,0,41.3,0.607,28,0
472 | 0,137,70,38,0,33.2,0.170,22,0
473 | 0,119,66,27,0,38.8,0.259,22,0
474 | 7,136,90,0,0,29.9,0.210,50,0
475 | 4,114,64,0,0,28.9,0.126,24,0
476 | 0,137,84,27,0,27.3,0.231,59,0
477 | 2,105,80,45,191,33.7,0.711,29,1
478 | 7,114,76,17,110,23.8,0.466,31,0
479 | 8,126,74,38,75,25.9,0.162,39,0
480 | 4,132,86,31,0,28.0,0.419,63,0
481 | 3,158,70,30,328,35.5,0.344,35,1
482 | 0,123,88,37,0,35.2,0.197,29,0
483 | 4,85,58,22,49,27.8,0.306,28,0
484 | 0,84,82,31,125,38.2,0.233,23,0
485 | 0,145,0,0,0,44.2,0.630,31,1
486 | 0,135,68,42,250,42.3,0.365,24,1
487 | 1,139,62,41,480,40.7,0.536,21,0
488 | 0,173,78,32,265,46.5,1.159,58,0
489 | 4,99,72,17,0,25.6,0.294,28,0
490 | 8,194,80,0,0,26.1,0.551,67,0
491 | 2,83,65,28,66,36.8,0.629,24,0
492 | 2,89,90,30,0,33.5,0.292,42,0
493 | 4,99,68,38,0,32.8,0.145,33,0
494 | 4,125,70,18,122,28.9,1.144,45,1
495 | 3,80,0,0,0,0.0,0.174,22,0
496 | 6,166,74,0,0,26.6,0.304,66,0
497 | 5,110,68,0,0,26.0,0.292,30,0
498 | 2,81,72,15,76,30.1,0.547,25,0
499 | 7,195,70,33,145,25.1,0.163,55,1
500 | 6,154,74,32,193,29.3,0.839,39,0
501 | 2,117,90,19,71,25.2,0.313,21,0
502 | 3,84,72,32,0,37.2,0.267,28,0
503 | 6,0,68,41,0,39.0,0.727,41,1
504 | 7,94,64,25,79,33.3,0.738,41,0
505 | 3,96,78,39,0,37.3,0.238,40,0
506 | 10,75,82,0,0,33.3,0.263,38,0
507 | 0,180,90,26,90,36.5,0.314,35,1
508 | 1,130,60,23,170,28.6,0.692,21,0
509 | 2,84,50,23,76,30.4,0.968,21,0
510 | 8,120,78,0,0,25.0,0.409,64,0
511 | 12,84,72,31,0,29.7,0.297,46,1
512 | 0,139,62,17,210,22.1,0.207,21,0
513 | 9,91,68,0,0,24.2,0.200,58,0
514 | 2,91,62,0,0,27.3,0.525,22,0
515 | 3,99,54,19,86,25.6,0.154,24,0
516 | 3,163,70,18,105,31.6,0.268,28,1
517 | 9,145,88,34,165,30.3,0.771,53,1
518 | 7,125,86,0,0,37.6,0.304,51,0
519 | 13,76,60,0,0,32.8,0.180,41,0
520 | 6,129,90,7,326,19.6,0.582,60,0
521 | 2,68,70,32,66,25.0,0.187,25,0
522 | 3,124,80,33,130,33.2,0.305,26,0
523 | 6,114,0,0,0,0.0,0.189,26,0
524 | 9,130,70,0,0,34.2,0.652,45,1
525 | 3,125,58,0,0,31.6,0.151,24,0
526 | 3,87,60,18,0,21.8,0.444,21,0
527 | 1,97,64,19,82,18.2,0.299,21,0
528 | 3,116,74,15,105,26.3,0.107,24,0
529 | 0,117,66,31,188,30.8,0.493,22,0
530 | 0,111,65,0,0,24.6,0.660,31,0
531 | 2,122,60,18,106,29.8,0.717,22,0
532 | 0,107,76,0,0,45.3,0.686,24,0
533 | 1,86,66,52,65,41.3,0.917,29,0
534 | 6,91,0,0,0,29.8,0.501,31,0
535 | 1,77,56,30,56,33.3,1.251,24,0
536 | 4,132,0,0,0,32.9,0.302,23,1
537 | 0,105,90,0,0,29.6,0.197,46,0
538 | 0,57,60,0,0,21.7,0.735,67,0
539 | 0,127,80,37,210,36.3,0.804,23,0
540 | 3,129,92,49,155,36.4,0.968,32,1
541 | 8,100,74,40,215,39.4,0.661,43,1
542 | 3,128,72,25,190,32.4,0.549,27,1
543 | 10,90,85,32,0,34.9,0.825,56,1
544 | 4,84,90,23,56,39.5,0.159,25,0
545 | 1,88,78,29,76,32.0,0.365,29,0
546 | 8,186,90,35,225,34.5,0.423,37,1
547 | 5,187,76,27,207,43.6,1.034,53,1
548 | 4,131,68,21,166,33.1,0.160,28,0
549 | 1,164,82,43,67,32.8,0.341,50,0
550 | 4,189,110,31,0,28.5,0.680,37,0
551 | 1,116,70,28,0,27.4,0.204,21,0
552 | 3,84,68,30,106,31.9,0.591,25,0
553 | 6,114,88,0,0,27.8,0.247,66,0
554 | 1,88,62,24,44,29.9,0.422,23,0
555 | 1,84,64,23,115,36.9,0.471,28,0
556 | 7,124,70,33,215,25.5,0.161,37,0
557 | 1,97,70,40,0,38.1,0.218,30,0
558 | 8,110,76,0,0,27.8,0.237,58,0
559 | 11,103,68,40,0,46.2,0.126,42,0
560 | 11,85,74,0,0,30.1,0.300,35,0
561 | 6,125,76,0,0,33.8,0.121,54,1
562 | 0,198,66,32,274,41.3,0.502,28,1
563 | 1,87,68,34,77,37.6,0.401,24,0
564 | 6,99,60,19,54,26.9,0.497,32,0
565 | 0,91,80,0,0,32.4,0.601,27,0
566 | 2,95,54,14,88,26.1,0.748,22,0
567 | 1,99,72,30,18,38.6,0.412,21,0
568 | 6,92,62,32,126,32.0,0.085,46,0
569 | 4,154,72,29,126,31.3,0.338,37,0
570 | 0,121,66,30,165,34.3,0.203,33,1
571 | 3,78,70,0,0,32.5,0.270,39,0
572 | 2,130,96,0,0,22.6,0.268,21,0
573 | 3,111,58,31,44,29.5,0.430,22,0
574 | 2,98,60,17,120,34.7,0.198,22,0
575 | 1,143,86,30,330,30.1,0.892,23,0
576 | 1,119,44,47,63,35.5,0.280,25,0
577 | 6,108,44,20,130,24.0,0.813,35,0
578 | 2,118,80,0,0,42.9,0.693,21,1
579 | 10,133,68,0,0,27.0,0.245,36,0
580 | 2,197,70,99,0,34.7,0.575,62,1
581 | 0,151,90,46,0,42.1,0.371,21,1
582 | 6,109,60,27,0,25.0,0.206,27,0
583 | 12,121,78,17,0,26.5,0.259,62,0
584 | 8,100,76,0,0,38.7,0.190,42,0
585 | 8,124,76,24,600,28.7,0.687,52,1
586 | 1,93,56,11,0,22.5,0.417,22,0
587 | 8,143,66,0,0,34.9,0.129,41,1
588 | 6,103,66,0,0,24.3,0.249,29,0
589 | 3,176,86,27,156,33.3,1.154,52,1
590 | 0,73,0,0,0,21.1,0.342,25,0
591 | 11,111,84,40,0,46.8,0.925,45,1
592 | 2,112,78,50,140,39.4,0.175,24,0
593 | 3,132,80,0,0,34.4,0.402,44,1
594 | 2,82,52,22,115,28.5,1.699,25,0
595 | 6,123,72,45,230,33.6,0.733,34,0
596 | 0,188,82,14,185,32.0,0.682,22,1
597 | 0,67,76,0,0,45.3,0.194,46,0
598 | 1,89,24,19,25,27.8,0.559,21,0
599 | 1,173,74,0,0,36.8,0.088,38,1
600 | 1,109,38,18,120,23.1,0.407,26,0
601 | 1,108,88,19,0,27.1,0.400,24,0
602 | 6,96,0,0,0,23.7,0.190,28,0
603 | 1,124,74,36,0,27.8,0.100,30,0
604 | 7,150,78,29,126,35.2,0.692,54,1
605 | 4,183,0,0,0,28.4,0.212,36,1
606 | 1,124,60,32,0,35.8,0.514,21,0
607 | 1,181,78,42,293,40.0,1.258,22,1
608 | 1,92,62,25,41,19.5,0.482,25,0
609 | 0,152,82,39,272,41.5,0.270,27,0
610 | 1,111,62,13,182,24.0,0.138,23,0
611 | 3,106,54,21,158,30.9,0.292,24,0
612 | 3,174,58,22,194,32.9,0.593,36,1
613 | 7,168,88,42,321,38.2,0.787,40,1
614 | 6,105,80,28,0,32.5,0.878,26,0
615 | 11,138,74,26,144,36.1,0.557,50,1
616 | 3,106,72,0,0,25.8,0.207,27,0
617 | 6,117,96,0,0,28.7,0.157,30,0
618 | 2,68,62,13,15,20.1,0.257,23,0
619 | 9,112,82,24,0,28.2,1.282,50,1
620 | 0,119,0,0,0,32.4,0.141,24,1
621 | 2,112,86,42,160,38.4,0.246,28,0
622 | 2,92,76,20,0,24.2,1.698,28,0
623 | 6,183,94,0,0,40.8,1.461,45,0
624 | 0,94,70,27,115,43.5,0.347,21,0
625 | 2,108,64,0,0,30.8,0.158,21,0
626 | 4,90,88,47,54,37.7,0.362,29,0
627 | 0,125,68,0,0,24.7,0.206,21,0
628 | 0,132,78,0,0,32.4,0.393,21,0
629 | 5,128,80,0,0,34.6,0.144,45,0
630 | 4,94,65,22,0,24.7,0.148,21,0
631 | 7,114,64,0,0,27.4,0.732,34,1
632 | 0,102,78,40,90,34.5,0.238,24,0
633 | 2,111,60,0,0,26.2,0.343,23,0
634 | 1,128,82,17,183,27.5,0.115,22,0
635 | 10,92,62,0,0,25.9,0.167,31,0
636 | 13,104,72,0,0,31.2,0.465,38,1
637 | 5,104,74,0,0,28.8,0.153,48,0
638 | 2,94,76,18,66,31.6,0.649,23,0
639 | 7,97,76,32,91,40.9,0.871,32,1
640 | 1,100,74,12,46,19.5,0.149,28,0
641 | 0,102,86,17,105,29.3,0.695,27,0
642 | 4,128,70,0,0,34.3,0.303,24,0
643 | 6,147,80,0,0,29.5,0.178,50,1
644 | 4,90,0,0,0,28.0,0.610,31,0
645 | 3,103,72,30,152,27.6,0.730,27,0
646 | 2,157,74,35,440,39.4,0.134,30,0
647 | 1,167,74,17,144,23.4,0.447,33,1
648 | 0,179,50,36,159,37.8,0.455,22,1
649 | 11,136,84,35,130,28.3,0.260,42,1
650 | 0,107,60,25,0,26.4,0.133,23,0
651 | 1,91,54,25,100,25.2,0.234,23,0
652 | 1,117,60,23,106,33.8,0.466,27,0
653 | 5,123,74,40,77,34.1,0.269,28,0
654 | 2,120,54,0,0,26.8,0.455,27,0
655 | 1,106,70,28,135,34.2,0.142,22,0
656 | 2,155,52,27,540,38.7,0.240,25,1
657 | 2,101,58,35,90,21.8,0.155,22,0
658 | 1,120,80,48,200,38.9,1.162,41,0
659 | 11,127,106,0,0,39.0,0.190,51,0
660 | 3,80,82,31,70,34.2,1.292,27,1
661 | 10,162,84,0,0,27.7,0.182,54,0
662 | 1,199,76,43,0,42.9,1.394,22,1
663 | 8,167,106,46,231,37.6,0.165,43,1
664 | 9,145,80,46,130,37.9,0.637,40,1
665 | 6,115,60,39,0,33.7,0.245,40,1
666 | 1,112,80,45,132,34.8,0.217,24,0
667 | 4,145,82,18,0,32.5,0.235,70,1
668 | 10,111,70,27,0,27.5,0.141,40,1
669 | 6,98,58,33,190,34.0,0.430,43,0
670 | 9,154,78,30,100,30.9,0.164,45,0
671 | 6,165,68,26,168,33.6,0.631,49,0
672 | 1,99,58,10,0,25.4,0.551,21,0
673 | 10,68,106,23,49,35.5,0.285,47,0
674 | 3,123,100,35,240,57.3,0.880,22,0
675 | 8,91,82,0,0,35.6,0.587,68,0
676 | 6,195,70,0,0,30.9,0.328,31,1
677 | 9,156,86,0,0,24.8,0.230,53,1
678 | 0,93,60,0,0,35.3,0.263,25,0
679 | 3,121,52,0,0,36.0,0.127,25,1
680 | 2,101,58,17,265,24.2,0.614,23,0
681 | 2,56,56,28,45,24.2,0.332,22,0
682 | 0,162,76,36,0,49.6,0.364,26,1
683 | 0,95,64,39,105,44.6,0.366,22,0
684 | 4,125,80,0,0,32.3,0.536,27,1
685 | 5,136,82,0,0,0.0,0.640,69,0
686 | 2,129,74,26,205,33.2,0.591,25,0
687 | 3,130,64,0,0,23.1,0.314,22,0
688 | 1,107,50,19,0,28.3,0.181,29,0
689 | 1,140,74,26,180,24.1,0.828,23,0
690 | 1,144,82,46,180,46.1,0.335,46,1
691 | 8,107,80,0,0,24.6,0.856,34,0
692 | 13,158,114,0,0,42.3,0.257,44,1
693 | 2,121,70,32,95,39.1,0.886,23,0
694 | 7,129,68,49,125,38.5,0.439,43,1
695 | 2,90,60,0,0,23.5,0.191,25,0
696 | 7,142,90,24,480,30.4,0.128,43,1
697 | 3,169,74,19,125,29.9,0.268,31,1
698 | 0,99,0,0,0,25.0,0.253,22,0
699 | 4,127,88,11,155,34.5,0.598,28,0
700 | 4,118,70,0,0,44.5,0.904,26,0
701 | 2,122,76,27,200,35.9,0.483,26,0
702 | 6,125,78,31,0,27.6,0.565,49,1
703 | 1,168,88,29,0,35.0,0.905,52,1
704 | 2,129,0,0,0,38.5,0.304,41,0
705 | 4,110,76,20,100,28.4,0.118,27,0
706 | 6,80,80,36,0,39.8,0.177,28,0
707 | 10,115,0,0,0,0.0,0.261,30,1
708 | 2,127,46,21,335,34.4,0.176,22,0
709 | 9,164,78,0,0,32.8,0.148,45,1
710 | 2,93,64,32,160,38.0,0.674,23,1
711 | 3,158,64,13,387,31.2,0.295,24,0
712 | 5,126,78,27,22,29.6,0.439,40,0
713 | 10,129,62,36,0,41.2,0.441,38,1
714 | 0,134,58,20,291,26.4,0.352,21,0
715 | 3,102,74,0,0,29.5,0.121,32,0
716 | 7,187,50,33,392,33.9,0.826,34,1
717 | 3,173,78,39,185,33.8,0.970,31,1
718 | 10,94,72,18,0,23.1,0.595,56,0
719 | 1,108,60,46,178,35.5,0.415,24,0
720 | 5,97,76,27,0,35.6,0.378,52,1
721 | 4,83,86,19,0,29.3,0.317,34,0
722 | 1,114,66,36,200,38.1,0.289,21,0
723 | 1,149,68,29,127,29.3,0.349,42,1
724 | 5,117,86,30,105,39.1,0.251,42,0
725 | 1,111,94,0,0,32.8,0.265,45,0
726 | 4,112,78,40,0,39.4,0.236,38,0
727 | 1,116,78,29,180,36.1,0.496,25,0
728 | 0,141,84,26,0,32.4,0.433,22,0
729 | 2,175,88,0,0,22.9,0.326,22,0
730 | 2,92,52,0,0,30.1,0.141,22,0
731 | 3,130,78,23,79,28.4,0.323,34,1
732 | 8,120,86,0,0,28.4,0.259,22,1
733 | 2,174,88,37,120,44.5,0.646,24,1
734 | 2,106,56,27,165,29.0,0.426,22,0
735 | 2,105,75,0,0,23.3,0.560,53,0
736 | 4,95,60,32,0,35.4,0.284,28,0
737 | 0,126,86,27,120,27.4,0.515,21,0
738 | 8,65,72,23,0,32.0,0.600,42,0
739 | 2,99,60,17,160,36.6,0.453,21,0
740 | 1,102,74,0,0,39.5,0.293,42,1
741 | 11,120,80,37,150,42.3,0.785,48,1
742 | 3,102,44,20,94,30.8,0.400,26,0
743 | 1,109,58,18,116,28.5,0.219,22,0
744 | 9,140,94,0,0,32.7,0.734,45,1
745 | 13,153,88,37,140,40.6,1.174,39,0
746 | 12,100,84,33,105,30.0,0.488,46,0
747 | 1,147,94,41,0,49.3,0.358,27,1
748 | 1,81,74,41,57,46.3,1.096,32,0
749 | 3,187,70,22,200,36.4,0.408,36,1
750 | 6,162,62,0,0,24.3,0.178,50,1
751 | 4,136,70,0,0,31.2,1.182,22,1
752 | 1,121,78,39,74,39.0,0.261,28,0
753 | 3,108,62,24,0,26.0,0.223,25,0
754 | 0,181,88,44,510,43.3,0.222,26,1
755 | 8,154,78,32,0,32.4,0.443,45,1
756 | 1,128,88,39,110,36.5,1.057,37,1
757 | 7,137,90,41,0,32.0,0.391,39,0
758 | 0,123,72,0,0,36.3,0.258,52,1
759 | 1,106,76,0,0,37.5,0.197,26,0
760 | 6,190,92,0,0,35.5,0.278,66,1
761 | 2,88,58,26,16,28.4,0.766,22,0
762 | 9,170,74,31,0,44.0,0.403,43,1
763 | 9,89,62,0,0,22.5,0.142,33,0
764 | 10,101,76,48,180,32.9,0.171,63,0
765 | 2,122,70,27,0,36.8,0.340,27,0
766 | 5,121,72,23,112,26.2,0.245,30,0
767 | 1,126,60,0,0,30.1,0.349,47,1
768 | 1,93,70,31,0,30.4,0.315,23,0
769 |
--------------------------------------------------------------------------------
/deeplearning/deep_class/01_Linear_Regression.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | import numpy as np
3 |
4 | # x 값과 y값
5 | x=[2, 4, 6, 8]
6 | y=[81, 93, 91, 97]
7 |
8 | # x와 y의 평균값
9 | mx = np.mean(x)
10 | my = np.mean(y)
11 | print("x의 평균값:", mx)
12 | print("y의 평균값:", my)
13 |
14 | # 기울기 공식의 분모
15 | divisor = sum([(mx - i)**2 for i in x])
16 |
17 | # 기울기 공식의 분자
18 | def top(x, mx, y, my):
19 | d = 0
20 | for i in range(len(x)):
21 | d += (x[i] - mx) * (y[i] - my)
22 | return d
23 | dividend = top(x, mx, y, my)
24 |
25 | print("분모:", divisor)
26 | print("분자:", dividend)
27 |
28 | # 기울기와 y 절편 구하기
29 | a = dividend / divisor
30 | b = my - (mx*a)
31 |
32 | # 출력으로 확인
33 | print("기울기 a =", a)
34 | print("y 절편 b =", b)
35 |
--------------------------------------------------------------------------------
/deeplearning/deep_class/02_Data_preparation.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # 코드 내부에 한글을 사용가능 하게 해주는 부분입니다.
3 |
4 | #pandas 라이브러리를 불러옵니다.
5 | import pandas as pd
6 | import matplotlib.pyplot as plt
7 | import seaborn as sns
8 |
9 | #피마 인디언 당뇨병 데이터셋을 불러옵니다. 불러올 때 각 컬럼에 해당하는 이름을 지정합니다.
10 | df = pd.read_csv('../dataset/pima-indians-diabetes.csv',
11 | names = ["pregnant", "plasma", "pressure", "thickness", "insulin", "BMI", "pedigree", "age", "class"])
12 |
13 | #처음 5줄을 봅니다.
14 | print(df.head(5))
15 |
16 | #데이터의 전반적인 정보를 확인해 봅니다.
17 | print(df.info())
18 |
19 | #각 정보별 특징을 좀더 자세히 출력합니다.
20 | print(df.describe())
21 |
22 | #데이터 중 임신 정보와 클래스 만을 출력해 봅니다.
23 | print(df[['plasma', 'class']])
24 |
25 | #데이터간의 상관관계를 그래프로 표현해 봅니다.
26 |
27 | colormap = plt.cm.gist_heat #그래프의 색상 구성을 정합니다.
28 | plt.figure(figsize=(12,12)) #그래프의 크기를 정합니다.
29 |
30 | #그래프의 속성을 결정합니다. vmax의 값을 0.5로 지정해 0.5에 가까울 수록 밝은 색으로 표시되게 합니다.
31 | sns.heatmap(df.corr(),linewidths=0.1,vmax=0.5, cmap=colormap, linecolor='white', annot=True)
32 | plt.show()
33 |
34 | grid = sns.FacetGrid(df, col='class')
35 | grid.map(plt.hist, 'plasma', bins=10)
36 | plt.show()
37 |
--------------------------------------------------------------------------------
/deeplearning/deep_class/02_RMSE.py:
--------------------------------------------------------------------------------
1 | #-*- coding: utf-8 -*-
2 |
3 | import numpy as np
4 |
5 | #기울기 a와 y 절편 b
6 | ab=[3,76]
7 |
8 | # x,y의 데이터 값
9 | data = [[2, 81], [4, 93], [6, 91], [8, 97]]
10 | x = [i[0] for i in data]
11 | y = [i[1] for i in data]
12 |
13 | # y=ax + b에 a,b 값 대입하여 결과를 출력하는 함수
14 | def predict(x):
15 | return ab[0]*x + ab[1]
16 |
17 | # RMSE 함수
18 | def rmse(p, a):
19 | return np.sqrt(((p - a) ** 2).mean())
20 |
21 | # RMSE 함수를 각 y값에 대입하여 최종 값을 구하는 함수
22 | def rmse_val(predict_result,y):
23 | return rmse(np.array(predict_result), np.array(y))
24 |
25 | # 예측값이 들어갈 빈 리스트
26 | predict_result = []
27 |
28 | # 모든 x값을 한 번씩 대입하여 predict_result 리스트완성.
29 | for i in range(len(x)):
30 | predict_result.append(predict(x[i]))
31 | print("공부시간=%.f, 실제점수=%.f, 예측점수=%.f" % (x[i], y[i], predict(x[i])))
32 |
33 | # 최종 RMSE 출력
34 | print("rmse 최종값: " + str(rmse_val(predict_result,y)))
35 |
36 |
--------------------------------------------------------------------------------
/deeplearning/deep_class/03_Gradient_Descent.py:
--------------------------------------------------------------------------------
1 | import tensorflow as tf
2 |
3 | # x, y의 데이터 값
4 | data = [[2, 81], [4, 93], [6, 91], [8, 97]]
5 | x_data = [x_row[0] for x_row in data]
6 | y_data = [y_row[1] for y_row in data]
7 |
8 | # 기울기 a와 y 절편 b의 값을 임의로 정한다.
9 | # 단, 기울기의 범위는 0 ~ 10 사이이며 y 절편은 0 ~ 100 사이에서 변하게 한다.
10 | a = tf.Variable(tf.random_uniform([1], 0, 10, dtype = tf.float64, seed = 0))
11 | b = tf.Variable(tf.random_uniform([1], 0, 100, dtype = tf.float64, seed = 0))
12 |
13 | # y에 대한 일차 방정식 ax+b의 식을 세운다.
14 | y = a * x_data + b
15 |
16 | # 텐서플로 RMSE 함수
17 | rmse = tf.sqrt(tf.reduce_mean(tf.square( y - y_data )))
18 |
19 | # 학습률 값
20 | learning_rate = 0.1
21 |
22 | # RMSE 값을 최소로 하는 값 찾기
23 | gradient_decent = tf.train.GradientDescentOptimizer(learning_rate).minimize(rmse)
24 |
25 | # 텐서플로를 이용한 학습
26 | with tf.Session() as sess:
27 | # 변수 초기화
28 | sess.run(tf.global_variables_initializer())
29 | # 2001번 실행(0번 째를 포함하므로)
30 | for step in range(2001):
31 | sess.run(gradient_decent)
32 | # 100번마다 결과 출력
33 | if step % 100 == 0:
34 | print("Epoch: %.f, RMSE = %.04f, 기울기 a = %.4f, y 절편 b = %.4f" % (step,sess.run(rmse),sess.run(a),sess.run(b)))
35 |
--------------------------------------------------------------------------------
/deeplearning/deep_class/04_Multi-Linear-Regression.py:
--------------------------------------------------------------------------------
1 | #-*- coding: utf-8 -*-
2 | import tensorflow as tf
3 |
4 | # x1, x2, y의 데이터 값
5 |
6 | data = [[2, 0, 81], [4, 4, 93], [6, 2, 91], [8, 3, 97]]
7 | x1 = [x_row1[0] for x_row1 in data]
8 | x2 = [x_row2[1] for x_row2 in data] # 새로 추가되는 값
9 | y_data = [y_row[2] for y_row in data]
10 |
11 | # 기울기 a와 y절편 b의 값을 임의로 정함. 단 기울기의 범위는 0-10 사이, y 절편은 0-100사이에서 변하게 함
12 | a1 = tf.Variable(tf.random_uniform([1], 0, 10, dtype=tf.float64, seed=0))
13 | a2 = tf.Variable(tf.random_uniform([1], 0, 10, dtype=tf.float64, seed=0))
14 | b = tf.Variable(tf.random_uniform([1], 0, 100, dtype=tf.float64, seed=0))
15 |
16 | # 새로운 방정식
17 | y = a1 * x1 + a2 * x2+ b
18 |
19 | # 텐서플로 RMSE 함수
20 | rmse = tf.sqrt(tf.reduce_mean(tf.square( y - y_data )))
21 |
22 | # 학습률 값
23 | learning_rate = 0.1
24 |
25 | # RMSE 값을 최소로 하는 값 찾기
26 | gradient_decent = tf.train.GradientDescentOptimizer(learning_rate).minimize(rmse)
27 |
28 | # 학습이 진행되는 부분
29 | with tf.Session() as sess:
30 | sess.run(tf.global_variables_initializer())
31 |
32 | for step in range(2001):
33 | sess.run(gradient_decent)
34 | if step % 100 == 0:
35 | print("Epoch: %.f, RMSE = %.04f, 기울기 a1 = %.4f, 기울기 a2 = %.4f, y절편 b = %.4f" % (step,sess.run(rmse),sess.run(a1),sess.run(a2),sess.run(b)))
36 |
37 |
--------------------------------------------------------------------------------
/deeplearning/deep_class/05_3D_Graph.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import statsmodels.api as statm
3 | import statsmodels.formula.api as statfa
4 | import pandas as pd
5 | import matplotlib.pyplot as plt
6 | from mpl_toolkits.mplot3d import Axes3D
7 |
8 | data = [[2, 0, 81], [4, 4, 93], [6, 2, 91], [8, 3, 97]]
9 | X = [i[0:2] for i in data]
10 | Y = [i[2] for i in data]
11 |
12 | X_1=statm.add_constant(X)
13 | results=statm.OLS(Y,X_1).fit()
14 |
15 | hour_class=pd.DataFrame(X,columns=['study_hours','private_class'])
16 | hour_class['Score']=pd.Series(Y)
17 |
18 | model = statfa.ols(formula='Score ~ study_hours + private_class', data=hour_class)
19 |
20 | results_formula = model.fit()
21 |
22 | a, b = np.meshgrid(np.linspace(hour_class.study_hours.min(),hour_class.study_hours.max(),100),
23 | np.linspace(hour_class.private_class.min(),hour_class.private_class.max(),100))
24 |
25 | X_ax = pd.DataFrame({'study_hours': a.ravel(), 'private_class': b.ravel()})
26 | fittedY=results_formula.predict(exog=X_ax)
27 | fig = plt.figure()
28 |
29 | graph = fig.add_subplot(111, projection='3d')
30 |
31 | graph.scatter(hour_class['study_hours'],hour_class['private_class'],hour_class['Score'],
32 | c='blue',marker='o', alpha=1)
33 | graph.plot_surface(a,b,fittedY.values.reshape(a.shape),
34 | rstride=1, cstride=1, color='none', alpha=0.4)
35 | graph.set_xlabel('study_hours')
36 | graph.set_ylabel('private_class')
37 | graph.set_zlabel('Score')
38 |
39 | plt.show()
40 |
41 |
--------------------------------------------------------------------------------
/deeplearning/deep_class/06_Logistic_Regression.py:
--------------------------------------------------------------------------------
1 | #-*- coding: utf-8 -*-
2 | import tensorflow as tf
3 | import numpy as np
4 |
5 | # x,y의 데이터 값
6 | data = [[2, 0], [4, 0], [6, 0], [8, 1], [10, 1], [12, 1], [14, 1]]
7 | x_data = [x_row[0] for x_row in data]
8 | y_data = [y_row[1] for y_row in data]
9 |
10 | # a와 b의 값을 임의로 정함
11 | a = tf.Variable(tf.random_normal([1], dtype=tf.float64, seed=0))
12 | b = tf.Variable(tf.random_normal([1], dtype=tf.float64, seed=0))
13 |
14 | # y 시그모이드 함수의 방정식을 세움
15 | y = 1/(1 + np.e**(a * x_data + b))
16 |
17 | # loss를 구하는 함수
18 | loss = -tf.reduce_mean(np.array(y_data) * tf.log(y) + (1 - np.array(y_data)) * tf.log(1 - y))
19 |
20 | # 학습률 값
21 | learning_rate=0.5
22 |
23 | # loss를 최소로 하는 값 찾기
24 | gradient_decent = tf.train.GradientDescentOptimizer(learning_rate).minimize(loss)
25 |
26 | # 학습
27 | with tf.Session() as sess:
28 | sess.run(tf.global_variables_initializer())
29 |
30 | for i in range(60001):
31 | sess.run(gradient_decent)
32 | if i % 6000 == 0:
33 | print("Epoch: %.f, loss = %.4f, 기울기 a = %.4f, 바이어스 b = %.4f" % (i, sess.run(loss), sess.run(a), sess.run(b)))
34 |
35 |
--------------------------------------------------------------------------------
/deeplearning/deep_class/07_Multi_Logistic_Regression.py:
--------------------------------------------------------------------------------
1 | #-*- coding: utf-8 -*-
2 | import tensorflow as tf
3 | import numpy as np
4 |
5 | # 실행할 때마다 같은 결과를 출력하기 위한 seed 값 설정
6 | seed = 0
7 | np.random.seed(seed)
8 | tf.set_random_seed(seed)
9 |
10 | # x,y의 데이터 값
11 | x_data = np.array([[2, 3],[4, 3],[6, 4],[8, 6],[10, 7],[12, 8],[14, 9]])
12 | y_data = np.array([0, 0, 0, 1, 1, 1,1]).reshape(7, 1)
13 |
14 | # 입력 값을 플래이스 홀더에 저장
15 | X = tf.placeholder(tf.float64, shape=[None, 2])
16 | Y = tf.placeholder(tf.float64, shape=[None, 1])
17 |
18 | # 기울기 a와 bias b의 값을 임의로 정함.
19 | a = tf.Variable(tf.random_uniform([2,1], dtype=tf.float64)) # [2,1] 의미: 들어오는 값은 2개, 나가는 값은 1개
20 | b = tf.Variable(tf.random_uniform([1], dtype=tf.float64))
21 |
22 | # y 시그모이드 함수의 방정식을 세움
23 | y = tf.sigmoid(tf.matmul(X, a) + b)
24 |
25 | # 오차를 구하는 함수
26 | loss = -tf.reduce_mean(Y * tf.log(y) + (1 - Y) * tf.log(1 - y))
27 |
28 | # 학습률 값
29 | learning_rate=0.1
30 |
31 | # 오차를 최소로 하는 값 찾기
32 | gradient_decent = tf.train.GradientDescentOptimizer(learning_rate).minimize(loss)
33 |
34 | predicted = tf.cast(y > 0.5, dtype=tf.float64)
35 | accuracy = tf.reduce_mean(tf.cast(tf.equal(predicted, Y), dtype=tf.float64))
36 |
37 | # 학습
38 | with tf.Session() as sess:
39 | sess.run(tf.global_variables_initializer())
40 |
41 | for i in range(3001):
42 | a_, b_, loss_, _ = sess.run([a, b, loss, gradient_decent], feed_dict={X: x_data, Y: y_data})
43 | if (i + 1) % 300 == 0:
44 | print("step=%d, a1=%.4f, a2=%.4f, b=%.4f, loss=%.4f" % (i + 1, a_[0], a_[1], b_, loss_))
45 |
46 |
47 | # 어떻게 활용하는가
48 | new_x = np.array([7, 6.]).reshape(1, 2) #[7, 6]은 각각 공부 시간과 과외 수업수.
49 | new_y = sess.run(y, feed_dict={X: new_x})
50 |
51 | print("공부 시간: %d, 개인 과외 수: %d" % (new_x[:,0], new_x[:,1]))
52 | print("합격 가능성: %6.2f %%" % (new_y*100))
53 |
--------------------------------------------------------------------------------
/deeplearning/deep_class/08_XOR.py:
--------------------------------------------------------------------------------
1 | #-*- coding: utf-8 -*-
2 |
3 | import numpy as np
4 |
5 | # 가중치와 바이어스
6 | w11 = np.array([-2, -2])
7 | w12 = np.array([2, 2])
8 | w2 = np.array([1, 1])
9 | b1 = 3
10 | b2 = -1
11 | b3 = -1
12 |
13 | # 퍼셉트론
14 | def MLP(x, w, b):
15 | y = np.sum(w * x) + b
16 | if y <= 0:
17 | return 0
18 | else:
19 | return 1
20 |
21 | # NAND 게이트
22 | def NAND(x1,x2):
23 | return MLP(np.array([x1, x2]), w11, b1)
24 |
25 | # OR 게이트
26 | def OR(x1,x2):
27 | return MLP(np.array([x1, x2]), w12, b2)
28 |
29 | # AND 게이트
30 | def AND(x1,x2):
31 | return MLP(np.array([x1, x2]), w2, b3)
32 |
33 | # XOR 게이트
34 | def XOR(x1,x2):
35 | return AND(NAND(x1, x2),OR(x1,x2))
36 |
37 |
38 | # x1, x2 값을 번갈아 대입해 가며 최종값 출력
39 | if __name__ == '__main__':
40 | for x in [(0, 0), (1, 0), (0, 1), (1, 1)]:
41 | y = XOR(x[0], x[1])
42 | print("입력 값: " + str(x) + " 출력 값: " + str(y))
43 |
44 |
--------------------------------------------------------------------------------
/deeplearning/deep_class/09_XOR-backpropagation.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | import random
4 | import numpy as np
5 |
6 | random.seed(777)
7 |
8 | # 환경 변수 지정
9 |
10 | # 입력값 및 타겟값
11 | data = [
12 | [[0, 0], [0]],
13 | [[0, 1], [1]],
14 | [[1, 0], [1]],
15 | [[1, 1], [0]]
16 | ]
17 |
18 | # 실행 횟수(iterations), 학습률(lr), 모멘텀 계수(mo) 설정
19 | iterations=5000
20 | lr=0.1
21 | mo=0.9
22 |
23 | # 활성화 함수 - 1. 시그모이드
24 | # 미분할 때와 아닐 때의 각각의 값
25 | def sigmoid(x, derivative=False):
26 | if (derivative == True):
27 | return x * (1 - x)
28 | return 1 / (1 + np.exp(-x))
29 |
30 | # 활성화 함수 - 2. tanh
31 | # tanh 함수의 미분은 1 - (활성화 함수 출력의 제곱)
32 | def tanh(x, derivative=False):
33 | if (derivative == True):
34 | return 1 - x ** 2
35 | return np.tanh(x)
36 |
37 | # 가중치 배열 만드는 함수
38 | def makeMatrix(i, j, fill=0.0):
39 | mat = []
40 | for i in range(i):
41 | mat.append([fill] * j)
42 | return mat
43 |
44 | # 신경망의 실행
45 | class NeuralNetwork:
46 |
47 | # 초깃값의 지정
48 | def __init__(self, num_x, num_yh, num_yo, bias=1):
49 |
50 | # 입력값(num_x), 은닉층 초깃값(num_yh), 출력층 초깃값(num_yo), 바이어스
51 | self.num_x = num_x + bias # 바이어스는 1로 지정(본문 참조)
52 | self.num_yh = num_yh
53 | self.num_yo = num_yo
54 |
55 | # 활성화 함수 초깃값
56 | self.activation_input = [1.0] * self.num_x
57 | self.activation_hidden = [1.0] * self.num_yh
58 | self.activation_out = [1.0] * self.num_yo
59 |
60 | # 가중치 입력 초깃값
61 | self.weight_in = makeMatrix(self.num_x, self.num_yh)
62 | for i in range(self.num_x):
63 | for j in range(self.num_yh):
64 | self.weight_in[i][j] = random.random()
65 |
66 | # 가중치 출력 초깃값
67 | self.weight_out = makeMatrix(self.num_yh, self.num_yo)
68 | for j in range(self.num_yh):
69 | for k in range(self.num_yo):
70 | self.weight_out[j][k] = random.random()
71 |
72 | # 모멘텀 SGD를 위한 이전 가중치 초깃값
73 | self.gradient_in = makeMatrix(self.num_x, self.num_yh)
74 | self.gradient_out = makeMatrix(self.num_yh, self.num_yo)
75 |
76 | # 업데이트 함수
77 | def update(self, inputs):
78 |
79 | # 입력 레이어의 활성화 함수
80 | for i in range(self.num_x - 1):
81 | self.activation_input[i] = inputs[i]
82 |
83 | # 은닉층의 활성화 함수
84 | for j in range(self.num_yh):
85 | sum = 0.0
86 | for i in range(self.num_x):
87 | sum = sum + self.activation_input[i] * self.weight_in[i][j]
88 | # 시그모이드와 tanh 중에서 활성화 함수 선택
89 | self.activation_hidden[j] = tanh(sum, False)
90 |
91 | # 출력층의 활성화 함수
92 | for k in range(self.num_yo):
93 | sum = 0.0
94 | for j in range(self.num_yh):
95 | sum = sum + self.activation_hidden[j] * self.weight_out[j][k]
96 | # 시그모이드와 tanh 중에서 활성화 함수 선택
97 | self.activation_out[k] = tanh(sum, False)
98 |
99 | return self.activation_out[:]
100 |
101 | # 역전파의 실행
102 | def backPropagate(self, targets):
103 |
104 | # 델타 출력 계산
105 | output_deltas = [0.0] * self.num_yo
106 | for k in range(self.num_yo):
107 | error = targets[k] - self.activation_out[k]
108 | # 시그모이드와 tanh 중에서 활성화 함수 선택, 미분 적용
109 | output_deltas[k] = tanh(self.activation_out[k], True) * error
110 |
111 | # 은닉 노드의 오차 함수
112 | hidden_deltas = [0.0] * self.num_yh
113 | for j in range(self.num_yh):
114 | error = 0.0
115 | for k in range(self.num_yo):
116 | error = error + output_deltas[k] * self.weight_out[j][k]
117 | # 시그모이드와 tanh 중에서 활성화 함수 선택, 미분 적용
118 | hidden_deltas[j] = tanh(self.activation_hidden[j], True) * error
119 |
120 | # 출력 가중치 업데이트
121 | for j in range(self.num_yh):
122 | for k in range(self.num_yo):
123 | gradient = output_deltas[k] * self.activation_hidden[j]
124 | v = mo * self.gradient_in[j][k] - lr * gradient
125 | self.weight_in[j][k] += v
126 | self.gradient_out[j][k] = gradient
127 |
128 | # 입력 가중치 업데이트
129 | for i in range(self.num_x):
130 | for j in range(self.num_yh):
131 | gradient = hidden_deltas[j] * self.activation_input[i]
132 | v = mo*self.gradient_in[i][j] - lr * gradient
133 | self.weight_in[i][j] += v
134 | self.gradient_in[i][j] = gradient
135 |
136 | # 오차의 계산(최소 제곱법)
137 | error = 0.0
138 | for k in range(len(targets)):
139 | error = error + 0.5 * (targets[k] - self.activation_out[k]) ** 2
140 | return error
141 |
142 | # 학습 실행
143 | def train(self, patterns):
144 | for i in range(iterations):
145 | error = 0.0
146 | for p in patterns:
147 | inputs = p[0]
148 | targets = p[1]
149 | self.update(inputs)
150 | error = error + self.backPropagate(targets)
151 | if i % 500 == 0:
152 | print('error: %-.5f' % error)
153 | # 결괏값 출력
154 | def result(self, patterns):
155 | for p in patterns:
156 | print('Input: %s, Predict: %s' % (p[0], self.update(p[0])))
157 |
158 | if __name__ == '__main__':
159 |
160 | # 두 개의 입력 값, 두 개의 레이어, 하나의 출력 값을 갖도록 설정
161 | n = NeuralNetwork(2, 2, 1)
162 |
163 | # 학습 실행
164 | n.train(data)
165 |
166 | # 결괏값 출력
167 | n.result(data)
168 |
169 |
170 | # Reference: http://arctrix.com/nas/python/bpnn.py (Neil Schemenauer)
171 |
172 |
--------------------------------------------------------------------------------
/deeplearning/deep_code/01_My_First_Deeplearning.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # 코드 내부에 한글을 사용가능 하게 해주는 부분입니다.
3 |
4 | # 딥러닝을 구동하는 데 필요한 케라스 함수를 불러옵니다.
5 | from keras.models import Sequential
6 | from keras.layers import Dense
7 |
8 | # 필요한 라이브러리를 불러옵니다.
9 | import numpy
10 | import tensorflow as tf
11 |
12 | # 실행할 때마다 같은 결과를 출력하기 위해 설정하는 부분입니다.
13 | seed = 0
14 | numpy.random.seed(seed)
15 | tf.set_random_seed(seed)
16 |
17 | # 준비된 수술 환자 데이터를 불러들입니다.
18 | Data_set = numpy.loadtxt("../dataset/ThoraricSurgery.csv", delimiter=",")
19 |
20 | # 환자의 기록과 수술 결과를 X와 Y로 구분하여 저장합니다.
21 | X = Data_set[:,0:17]
22 | Y = Data_set[:,17]
23 |
24 | # 딥러닝 구조를 결정합니다(모델을 설정하고 실행하는 부분입니다).
25 | model = Sequential()
26 | model.add(Dense(30, input_dim=17, activation='relu'))
27 | model.add(Dense(1, activation='sigmoid'))
28 |
29 | # 딥러닝을 실행합니다.
30 | model.compile(loss='mean_squared_error', optimizer='adam', metrics=['accuracy'])
31 | model.fit(X, Y, epochs=30, batch_size=10)
32 |
33 | # 결과를 출력합니다.
34 | print("\n Accuracy: %.4f" % (model.evaluate(X, Y)[1]))
35 |
36 |
37 |
--------------------------------------------------------------------------------
/deeplearning/deep_code/02_Data_preparation.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # 코드 내부에 한글을 사용가능 하게 해주는 부분입니다.
3 |
4 | # pandas 라이브러리를 불러옵니다.
5 | import pandas as pd
6 | import matplotlib.pyplot as plt
7 | import seaborn as sns
8 |
9 | # 피마 인디언 당뇨병 데이터셋을 불러옵니다. 불러올 때 각 컬럼에 해당하는 이름을 지정합니다.
10 | df = pd.read_csv('../dataset/pima-indians-diabetes.csv',
11 | names = ["pregnant", "plasma", "pressure", "thickness", "insulin", "BMI", "pedigree", "age", "class"])
12 |
13 | # 처음 5줄을 봅니다.
14 | print(df.head(5))
15 |
16 | # 데이터의 전반적인 정보를 확인해 봅니다.
17 | print(df.info())
18 |
19 | # 각 정보별 특징을 좀더 자세히 출력합니다.
20 | print(df.describe())
21 |
22 | # 데이터 중 임신 정보와 클래스 만을 출력해 봅니다.
23 | print(df[['plasma', 'class']])
24 |
25 | # 데이터 간의 상관관계를 그래프로 표현해 봅니다.
26 |
27 | colormap = plt.cm.gist_heat #그래프의 색상 구성을 정합니다.
28 | plt.figure(figsize=(12,12)) #그래프의 크기를 정합니다.
29 |
30 | # 그래프의 속성을 결정합니다. vmax의 값을 0.5로 지정해 0.5에 가까울 수록 밝은 색으로 표시되게 합니다.
31 | sns.heatmap(df.corr(),linewidths=0.1,vmax=0.5, cmap=colormap, linecolor='white', annot=True)
32 | plt.show()
33 |
34 | grid = sns.FacetGrid(df, col='class')
35 | grid.map(plt.hist, 'plasma', bins=10)
36 | plt.show()
37 |
--------------------------------------------------------------------------------
/deeplearning/deep_code/02_Pima_Indian.py:
--------------------------------------------------------------------------------
1 | from keras.models import Sequential
2 | from keras.layers import Dense
3 | import numpy
4 | import tensorflow as tf
5 |
6 | # seed 값 생성
7 | seed = 0
8 | numpy.random.seed(seed)
9 | tf.set_random_seed(seed)
10 |
11 | # 데이터 로드
12 | dataset = numpy.loadtxt("../dataset/pima-indians-diabetes.csv", delimiter=",")
13 | X = dataset[:,0:8]
14 | Y = dataset[:,8]
15 |
16 | # 모델의 설정
17 | model = Sequential()
18 | model.add(Dense(12, input_dim=8, activation='relu'))
19 | model.add(Dense(8, activation='relu'))
20 | model.add(Dense(1, activation='sigmoid'))
21 |
22 | # 모델 컴파일
23 | model.compile(loss='binary_crossentropy',
24 | optimizer='adam',
25 | metrics=['accuracy'])
26 |
27 | # 모델 실행
28 | model.fit(X, Y, epochs=200, batch_size=10)
29 |
30 | # 결과 출력
31 | print("\n Accuracy: %.4f" % (model.evaluate(X, Y)[1]))
32 |
33 |
34 |
--------------------------------------------------------------------------------
/deeplearning/deep_code/03_Iris_Multi_Classfication.py:
--------------------------------------------------------------------------------
1 | from keras.models import Sequential
2 | from keras.layers.core import Dense
3 | from keras.utils import np_utils
4 | from sklearn.preprocessing import LabelEncoder
5 |
6 | import pandas as pd
7 | import seaborn as sns
8 | import matplotlib.pyplot as plt
9 | import numpy
10 | import tensorflow as tf
11 |
12 | # seed 값 설정
13 | seed = 0
14 | numpy.random.seed(seed)
15 | tf.set_random_seed(seed)
16 |
17 | # 데이터 입력
18 | df = pd.read_csv('../dataset/iris.csv', names = ["sepal_length", "sepal_width", "petal_length", "petal_width", "species"])
19 |
20 | # 그래프로 확인
21 | sns.pairplot(df, hue='species');
22 | plt.show()
23 |
24 | # 데이터 분류
25 | dataset = df.values
26 | X = dataset[:,0:4].astype(float)
27 | Y_obj = dataset[:,4]
28 |
29 | # 문자열을 숫자로 변환
30 | e = LabelEncoder()
31 | e.fit(Y_obj)
32 | Y = e.transform(Y_obj)
33 | Y_encoded = np_utils.to_categorical(Y)
34 |
35 | # 모델의 설정
36 | model = Sequential()
37 | model.add(Dense(16, input_dim=4, activation='relu'))
38 | model.add(Dense(3, activation='softmax'))
39 |
40 | # 모델 컴파일
41 | model.compile(loss='categorical_crossentropy',
42 | optimizer='adam',
43 | metrics=['accuracy'])
44 |
45 | # 모델 실행
46 | model.fit(X, Y_encoded, epochs=50, batch_size=1)
47 |
48 | # 결과 출력
49 | print("\n Accuracy: %.4f" % (model.evaluate(X, Y_encoded)[1]))
--------------------------------------------------------------------------------
/deeplearning/deep_code/04-Sonar.py:
--------------------------------------------------------------------------------
1 | from keras.models import Sequential
2 | from keras.layers.core import Dense
3 | from sklearn.preprocessing import LabelEncoder
4 |
5 | import pandas as pd
6 | import numpy
7 | import tensorflow as tf
8 |
9 | # seed 값 설정
10 | seed = 0
11 | numpy.random.seed(seed)
12 | tf.set_random_seed(seed)
13 |
14 | # 데이터 입력
15 | df = pd.read_csv('../dataset/sonar.csv', header=None)
16 | '''
17 | # 데이터 개괄 보기
18 | print(df.info())
19 |
20 | # 데이터의 일부분 미리 보기
21 | print(df.head())
22 | '''
23 | dataset = df.values
24 | X = dataset[:,0:60]
25 | Y_obj = dataset[:,60]
26 |
27 | # 문자열 변환
28 | e = LabelEncoder()
29 | e.fit(Y_obj)
30 | Y = e.transform(Y_obj)
31 |
32 | # 모델 설정
33 | model = Sequential()
34 | model.add(Dense(24, input_dim=60, activation='relu'))
35 | model.add(Dense(10, activation='relu'))
36 | model.add(Dense(1, activation='sigmoid'))
37 |
38 | # 모델 컴파일
39 | model.compile(loss='mean_squared_error',
40 | optimizer='adam',
41 | metrics=['accuracy'])
42 |
43 | # 모델 실행
44 | model.fit(X, Y, epochs=200, batch_size=5)
45 |
46 | # 결과 출력
47 | print("\n Accuracy: %.4f" % (model.evaluate(X, Y)[1]))
48 |
--------------------------------------------------------------------------------
/deeplearning/deep_code/05_Sonar_Train_Test.py:
--------------------------------------------------------------------------------
1 | from keras.models import Sequential
2 | from keras.layers.core import Dense
3 | from sklearn.preprocessing import LabelEncoder
4 | from sklearn.model_selection import train_test_split
5 |
6 | import pandas as pd
7 | import numpy
8 | import tensorflow as tf
9 |
10 | # seed 값 설정
11 | seed = 0
12 | numpy.random.seed(seed)
13 | tf.set_random_seed(seed)
14 |
15 | df = pd.read_csv('../dataset/sonar.csv', header=None)
16 |
17 | print(df.info())
18 | print(df.head())
19 |
20 | dataset = df.values
21 | X = dataset[:,0:60]
22 | Y_obj = dataset[:,60]
23 |
24 | e = LabelEncoder()
25 | e.fit(Y_obj)
26 | Y = e.transform(Y_obj)
27 |
28 | # 학습 셋과 테스트 셋의 구분
29 | X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.3, random_state=seed)
30 |
31 | model = Sequential()
32 | model.add(Dense(24, input_dim=60, activation='relu'))
33 | model.add(Dense(10, activation='relu'))
34 | model.add(Dense(1, activation='sigmoid'))
35 |
36 | model.compile(loss='mean_squared_error',
37 | optimizer='adam',
38 | metrics=['accuracy'])
39 |
40 | model.fit(X_train, Y_train, epochs=130, batch_size=5)
41 |
42 | # 테스트셋에 모델 적용
43 | print("\n Test Accuracy: %.4f" % (model.evaluate(X_test, Y_test)[1]))
44 |
45 |
--------------------------------------------------------------------------------
/deeplearning/deep_code/06-Sonar-Save-Model.py:
--------------------------------------------------------------------------------
1 | from keras.models import Sequential, load_model
2 | from keras.layers.core import Dense
3 | from sklearn.preprocessing import LabelEncoder
4 |
5 | import pandas as pd
6 | import numpy
7 | import tensorflow as tf
8 |
9 | # seed 값 설정
10 | seed = 0
11 | numpy.random.seed(seed)
12 | tf.set_random_seed(seed)
13 |
14 | df = pd.read_csv('../dataset/sonar.csv', header=None)
15 | '''
16 | print(df.info())
17 | print(df.head())
18 | '''
19 | dataset = df.values
20 | X = dataset[:,0:60]
21 | Y_obj = dataset[:,60]
22 |
23 | e = LabelEncoder()
24 | e.fit(Y_obj)
25 | Y = e.transform(Y_obj)
26 | # 학습셋과 테스트셋을 나눔
27 | from sklearn.model_selection import train_test_split
28 |
29 | X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.3, random_state=seed)
30 |
31 | model = Sequential()
32 | model.add(Dense(24, input_dim=60, activation='relu'))
33 | model.add(Dense(10, activation='relu'))
34 | model.add(Dense(1, activation='sigmoid'))
35 |
36 | model.compile(loss='mean_squared_error',
37 | optimizer='adam',
38 | metrics=['accuracy'])
39 |
40 | model.fit(X_train, Y_train, epochs=130, batch_size=5)
41 | model.save('my_model.h5') # 모델을 컴퓨터에 저장
42 |
43 | del model # 테스트를 위해 메모리 내의 모델을 삭제
44 | model = load_model('my_model.h5') # 모델을 새로 불러옴
45 |
46 | print("\n Test Accuracy: %.4f" % (model.evaluate(X_test, Y_test)[1])) # 불러온 모델로 테스트 실행
47 |
48 |
--------------------------------------------------------------------------------
/deeplearning/deep_code/07_Sonar-K-fold.py:
--------------------------------------------------------------------------------
1 | from keras.models import Sequential
2 | from keras.layers.core import Dense
3 | from sklearn.preprocessing import LabelEncoder
4 | from sklearn.model_selection import StratifiedKFold
5 |
6 | import numpy
7 | import pandas as pd
8 | import tensorflow as tf
9 |
10 | # seed 값 설정
11 | seed = 0
12 | numpy.random.seed(seed)
13 | tf.set_random_seed(seed)
14 |
15 | df = pd.read_csv('../dataset/sonar.csv', header=None)
16 |
17 | dataset = df.values
18 | X = dataset[:,0:60]
19 | Y_obj = dataset[:,60]
20 |
21 | e = LabelEncoder()
22 | e.fit(Y_obj)
23 | Y = e.transform(Y_obj)
24 |
25 | # 10개의 파일로 쪼갬
26 | n_fold = 10
27 | skf = StratifiedKFold(n_splits=n_fold, shuffle=True, random_state=seed)
28 |
29 | # 빈 accuracy 배열
30 | accuracy = []
31 |
32 | # 모델의 설정, 컴파일, 실행
33 | for train, test in skf.split(X, Y):
34 | model = Sequential()
35 | model.add(Dense(24, input_dim=60, activation='relu'))
36 | model.add(Dense(10, activation='relu'))
37 | model.add(Dense(1, activation='sigmoid'))
38 | model.compile(loss='mean_squared_error',
39 | optimizer='adam',
40 | metrics=['accuracy'])
41 | model.fit(X[train], Y[train], epochs=100, batch_size=5)
42 | k_accuracy = "%.4f" % (model.evaluate(X[test], Y[test])[1])
43 | accuracy.append(k_accuracy)
44 |
45 | # 결과 출력
46 | print("\n %.f fold accuracy:" % n_fold, accuracy)
47 |
--------------------------------------------------------------------------------
/deeplearning/deep_code/08_Wine.py:
--------------------------------------------------------------------------------
1 | from keras.models import Sequential
2 | from keras.layers import Dense
3 | from keras.callbacks import ModelCheckpoint, EarlyStopping
4 |
5 | import pandas as pd
6 | import numpy
7 | import tensorflow as tf
8 | import matplotlib.pyplot as plt
9 |
10 | # seed 값 설정
11 | seed = 0
12 | numpy.random.seed(seed)
13 | tf.set_random_seed(seed)
14 |
15 | # 데이터 입력
16 | df_pre = pd.read_csv('../dataset/wine.csv', header=None)
17 | df = df_pre.sample(frac=1)
18 |
19 | dataset = df.values
20 | X = dataset[:,0:12]
21 | Y = dataset[:,12]
22 |
23 | # 모델 설정
24 | model = Sequential()
25 | model.add(Dense(30, input_dim=12, activation='relu'))
26 | model.add(Dense(12, activation='relu'))
27 | model.add(Dense(8, activation='relu'))
28 | model.add(Dense(1, activation='sigmoid'))
29 |
30 | #모델 컴파일
31 | model.compile(loss='binary_crossentropy',
32 | optimizer='adam',
33 | metrics=['accuracy'])
34 |
35 | # 모델 실행
36 | model.fit(X, Y, epochs=200, batch_size=200)
37 |
38 | # 결과 출력
39 | print("\n Accuracy: %.4f" % (model.evaluate(X, Y)[1]))
40 |
--------------------------------------------------------------------------------
/deeplearning/deep_code/09_Wine_Checkpoint.py:
--------------------------------------------------------------------------------
1 | from keras.models import Sequential
2 | from keras.layers import Dense
3 | from keras.callbacks import ModelCheckpoint
4 |
5 | import pandas as pd
6 | import numpy
7 | import os
8 | import tensorflow as tf
9 |
10 | # seed 값 설정
11 | seed = 0
12 | numpy.random.seed(seed)
13 | tf.set_random_seed(seed)
14 |
15 | df_pre = pd.read_csv('../dataset/wine.csv', header=None)
16 | df = df_pre.sample(frac=1)
17 |
18 | dataset = df.values
19 | X = dataset[:,0:12]
20 | Y = dataset[:,12]
21 |
22 | # 모델의 설정
23 | model = Sequential()
24 | model.add(Dense(30, input_dim=12, activation='relu'))
25 | model.add(Dense(12, activation='relu'))
26 | model.add(Dense(8, activation='relu'))
27 | model.add(Dense(1, activation='sigmoid'))
28 |
29 | # 모델 컴파일
30 | model.compile(loss='binary_crossentropy',
31 | optimizer='adam',
32 | metrics=['accuracy'])
33 |
34 | # 모델 저장 폴더 설정
35 | MODEL_DIR = './model/'
36 | if not os.path.exists(MODEL_DIR):
37 | os.mkdir(MODEL_DIR)
38 |
39 | # 모델 저장 조건 설정
40 | modelpath="./model/{epoch:02d}-{val_loss:.4f}.hdf5"
41 | checkpointer = ModelCheckpoint(filepath=modelpath, monitor='val_loss', verbose=1, save_best_only=True)
42 |
43 | # 모델 실행 및 저장
44 | model.fit(X, Y, validation_split=0.2, epochs=200, batch_size=200, verbose=0, callbacks=[checkpointer])
45 |
--------------------------------------------------------------------------------
/deeplearning/deep_code/10_Wine_Overfit_Graph.py:
--------------------------------------------------------------------------------
1 | from keras.models import Sequential
2 | from keras.layers import Dense
3 | from keras.callbacks import ModelCheckpoint
4 |
5 | import pandas as pd
6 | import numpy
7 | import os
8 | import matplotlib.pyplot as plt
9 | import tensorflow as tf
10 |
11 | # seed 값 설정
12 | seed = 0
13 | numpy.random.seed(seed)
14 | tf.set_random_seed(seed)
15 |
16 | df_pre = pd.read_csv('../dataset/wine.csv', header=None)
17 | df = df_pre.sample(frac=0.15)
18 |
19 | dataset = df.values
20 | X = dataset[:,0:12]
21 | Y = dataset[:,12]
22 |
23 | # 모델의 설정
24 | model = Sequential()
25 | model.add(Dense(30, input_dim=12, activation='relu'))
26 | model.add(Dense(12, activation='relu'))
27 | model.add(Dense(8, activation='relu'))
28 | model.add(Dense(1, activation='sigmoid'))
29 |
30 | # 모델 컴파일
31 | model.compile(loss='binary_crossentropy',
32 | optimizer='adam',
33 | metrics=['accuracy'])
34 |
35 | # 모델 저장 폴더 설정
36 | MODEL_DIR = './model/'
37 | if not os.path.exists(MODEL_DIR):
38 | os.mkdir(MODEL_DIR)
39 |
40 | # 모델 저장 조건 설정
41 | modelpath="./model/{epoch:02d}-{val_loss:.4f}.hdf5"
42 | checkpointer = ModelCheckpoint(filepath=modelpath, monitor='val_loss', verbose=1, save_best_only=True)
43 |
44 | # 모델 실행 및 저장
45 | history = model.fit(X, Y, validation_split=0.33, epochs=3500, batch_size=500)
46 |
47 | # y_vloss에 테스트셋으로 실험 결과의 오차 값을 저장
48 | y_vloss=history.history['val_loss']
49 |
50 | # y_acc 에 학습 셋으로 측정한 정확도의 값을 저장
51 | y_acc=history.history['acc']
52 |
53 | # x값을 지정하고 정확도를 파란색으로, 오차를 빨간색으로 표시
54 | x_len = numpy.arange(len(y_acc))
55 | plt.plot(x_len, y_vloss, "o", c="red", markersize=3)
56 | plt.plot(x_len, y_acc, "o", c="blue", markersize=3)
57 |
58 | plt.show()
59 |
--------------------------------------------------------------------------------
/deeplearning/deep_code/11_Wine_Early_Stop.py:
--------------------------------------------------------------------------------
1 | from keras.models import Sequential
2 | from keras.layers import Dense
3 | from keras.callbacks import EarlyStopping
4 |
5 | import pandas as pd
6 | import numpy
7 | import tensorflow as tf
8 |
9 | # seed 값 설정
10 | seed = 0
11 | numpy.random.seed(seed)
12 | tf.set_random_seed(seed)
13 |
14 | df_pre = pd.read_csv('../dataset/wine.csv', header=None)
15 | df = df_pre.sample(frac=0.15)
16 |
17 | dataset = df.values
18 | X = dataset[:,0:12]
19 | Y = dataset[:,12]
20 |
21 | model = Sequential()
22 | model.add(Dense(30, input_dim=12, activation='relu'))
23 | model.add(Dense(12, activation='relu'))
24 | model.add(Dense(8, activation='relu'))
25 | model.add(Dense(1, activation='sigmoid'))
26 |
27 | model.compile(loss='binary_crossentropy',
28 | optimizer='adam',
29 | metrics=['accuracy'])
30 |
31 | # 자동 중단 설정
32 | early_stopping_callback = EarlyStopping(monitor='val_loss', patience=100)
33 |
34 | # 모델 실행
35 | model.fit(X, Y, validation_split=0.2, epochs=2000, batch_size=500, callbacks=[early_stopping_callback])
36 |
37 | # 결과 출력
38 | print("\n Accuracy: %.4f" % (model.evaluate(X, Y)[1]))
39 |
--------------------------------------------------------------------------------
/deeplearning/deep_code/12_Wine_Check_and_Stop.py:
--------------------------------------------------------------------------------
1 | from keras.models import Sequential
2 | from keras.layers import Dense
3 | from keras.callbacks import ModelCheckpoint,EarlyStopping
4 |
5 | import pandas as pd
6 | import numpy
7 | import os
8 | import tensorflow as tf
9 |
10 | # seed 값 설정
11 | seed = 0
12 | numpy.random.seed(seed)
13 | tf.set_random_seed(seed)
14 |
15 | df_pre = pd.read_csv('../dataset/wine.csv', header=None)
16 | df = df_pre.sample(frac=0.15)
17 |
18 | dataset = df.values
19 | X = dataset[:,0:12]
20 | Y = dataset[:,12]
21 |
22 | model = Sequential()
23 | model.add(Dense(30, input_dim=12, activation='relu'))
24 | model.add(Dense(12, activation='relu'))
25 | model.add(Dense(8, activation='relu'))
26 | model.add(Dense(1, activation='sigmoid'))
27 |
28 | model.compile(loss='binary_crossentropy',
29 | optimizer='adam',
30 | metrics=['accuracy'])
31 |
32 | # 모델 저장 폴더 만들기
33 | MODEL_DIR = './model/'
34 | if not os.path.exists(MODEL_DIR):
35 | os.mkdir(MODEL_DIR)
36 |
37 | modelpath="./model/{epoch:02d}-{val_loss:.4f}.hdf5"
38 |
39 | # 모델 업데이트 및 저장
40 | checkpointer = ModelCheckpoint(filepath=modelpath, monitor='val_loss', verbose=1, save_best_only=True)
41 |
42 | # 학습 자동 중단 설정
43 | early_stopping_callback = EarlyStopping(monitor='val_loss', patience=100)
44 |
45 | model.fit(X, Y, validation_split=0.2, epochs=3500, batch_size=500, verbose=0, callbacks=[early_stopping_callback,checkpointer])
46 |
47 |
--------------------------------------------------------------------------------
/deeplearning/deep_code/13_Boston.py:
--------------------------------------------------------------------------------
1 | #-*- coding: utf-8 -*-
2 | from keras.models import Sequential
3 | from keras.layers import Dense
4 | from sklearn.model_selection import train_test_split
5 |
6 | import numpy
7 | import pandas as pd
8 | import tensorflow as tf
9 |
10 | # seed 값 설정
11 | seed = 0
12 | numpy.random.seed(seed)
13 | tf.set_random_seed(seed)
14 |
15 | df = pd.read_csv("../dataset/housing.csv", delim_whitespace=True, header=None)
16 | '''
17 | print(df.info())
18 | print(df.head())
19 | '''
20 | dataset = df.values
21 | X = dataset[:,0:13]
22 | Y = dataset[:,13]
23 |
24 | X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.3, random_state=seed)
25 |
26 | model = Sequential()
27 | model.add(Dense(30, input_dim=13, activation='relu'))
28 | model.add(Dense(6, activation='relu'))
29 | model.add(Dense(1))
30 |
31 | model.compile(loss='mean_squared_error',
32 | optimizer='adam')
33 |
34 | model.fit(X_train, Y_train, epochs=200, batch_size=10)
35 |
36 | # 예측 값과 실제 값의 비교
37 | Y_prediction = model.predict(X_test).flatten()
38 | for i in range(10):
39 | label = Y_test[i]
40 | prediction = Y_prediction[i]
41 | print("실제가격: {:.3f}, 예상가격: {:.3f}".format(label, prediction))
42 |
43 |
--------------------------------------------------------------------------------
/deeplearning/deep_code/14_MNIST_Data.py:
--------------------------------------------------------------------------------
1 | #-*- coding: utf-8 -*-
2 |
3 | from keras.datasets import mnist
4 | from keras.utils import np_utils
5 |
6 | import numpy
7 | import sys
8 | import tensorflow as tf
9 |
10 | # seed 값 설정
11 | seed = 0
12 | numpy.random.seed(seed)
13 | tf.set_random_seed(seed)
14 |
15 | # MNIST데이터셋 불러오기
16 | (X_train, Y_class_train), (X_test, Y_class_test) = mnist.load_data()
17 |
18 | print("학습셋 이미지 수 : %d 개" % (X_train.shape[0]))
19 | print("테스트셋 이미지 수 : %d 개" % (X_test.shape[0]))
20 |
21 | # 그래프로 확인
22 | import matplotlib.pyplot as plt
23 | plt.imshow(X_train[0], cmap='Greys')
24 | plt.show()
25 |
26 | # 코드로 확인
27 | for x in X_train[0]:
28 | for i in x:
29 | sys.stdout.write('%d\t' % i)
30 | sys.stdout.write('\n')
31 |
32 | # 차원 변환 과정
33 | X_train = X_train.reshape(X_train.shape[0], 784)
34 | X_train = X_train.astype('float64')
35 | X_train = X_train / 255
36 |
37 | X_test = X_test.reshape(X_test.shape[0], 784).astype('float64') / 255
38 |
39 | #print(X_train[0])
40 |
41 | # 클래스 값 확인
42 | print("class : %d " % (Y_class_train[0]))
43 |
44 | # 바이너리화 과정
45 | Y_train = np_utils.to_categorical(Y_class_train, 10)
46 | Y_test = np_utils.to_categorical(Y_class_test, 10)
47 |
48 | print(Y_train[0])
49 |
50 |
51 |
52 |
--------------------------------------------------------------------------------
/deeplearning/deep_code/15_MNIST_Simple.py:
--------------------------------------------------------------------------------
1 | #-*- coding: utf-8 -*-
2 |
3 | from keras.datasets import mnist
4 | from keras.utils import np_utils
5 | from keras.models import Sequential
6 | from keras.layers import Dense
7 | from keras.callbacks import ModelCheckpoint,EarlyStopping
8 |
9 | import matplotlib.pyplot as plt
10 | import numpy
11 | import os
12 | import tensorflow as tf
13 |
14 | # seed 값 설정
15 | seed = 0
16 | numpy.random.seed(seed)
17 | tf.set_random_seed(seed)
18 |
19 | # MNIST 데이터 불러오기
20 | (X_train, Y_train), (X_test, Y_test) = mnist.load_data()
21 |
22 | X_train = X_train.reshape(X_train.shape[0], 784).astype('float32') / 255
23 | X_test = X_test.reshape(X_test.shape[0], 784).astype('float32') / 255
24 |
25 | Y_train = np_utils.to_categorical(Y_train, 10)
26 | Y_test = np_utils.to_categorical(Y_test, 10)
27 |
28 | # 모델 프레임 설정
29 | model = Sequential()
30 | model.add(Dense(512, input_dim=784, activation='relu'))
31 | model.add(Dense(10, activation='softmax'))
32 |
33 | # 모델 실행 환경 설정
34 | model.compile(loss='categorical_crossentropy',
35 | optimizer='adam',
36 | metrics=['accuracy'])
37 |
38 | # 모델 최적화 설정
39 | MODEL_DIR = './model/'
40 | if not os.path.exists(MODEL_DIR):
41 | os.mkdir(MODEL_DIR)
42 |
43 | modelpath="./model/{epoch:02d}-{val_loss:.4f}.hdf5"
44 | checkpointer = ModelCheckpoint(filepath=modelpath, monitor='val_loss', verbose=1, save_best_only=True)
45 | early_stopping_callback = EarlyStopping(monitor='val_loss', patience=10)
46 |
47 | # 모델의 실행
48 | history = model.fit(X_train, Y_train, validation_data=(X_test, Y_test), epochs=30, batch_size=200, verbose=0, callbacks=[early_stopping_callback,checkpointer])
49 |
50 | # 테스트 정확도 출력
51 | print("\n Test Accuracy: %.4f" % (model.evaluate(X_test, Y_test)[1]))
52 |
53 | # 테스트 셋의 오차
54 | y_vloss = history.history['val_loss']
55 |
56 | # 학습셋의 오차
57 | y_loss = history.history['loss']
58 |
59 | # 그래프로 표현
60 | x_len = numpy.arange(len(y_loss))
61 | plt.plot(x_len, y_vloss, marker='.', c="red", label='Testset_loss')
62 | plt.plot(x_len, y_loss, marker='.', c="blue", label='Trainset_loss')
63 |
64 | # 그래프에 그리드를 주고 레이블을 표시
65 | plt.legend(loc='upper right')
66 | # plt.axis([0, 20, 0, 0.35])
67 | plt.grid()
68 | plt.xlabel('epoch')
69 | plt.ylabel('loss')
70 | plt.show()
71 |
--------------------------------------------------------------------------------
/deeplearning/deep_code/16_MNIST_Deep.py:
--------------------------------------------------------------------------------
1 | #-*- coding: utf-8 -*-
2 |
3 | from keras.datasets import mnist
4 | from keras.utils import np_utils
5 | from keras.models import Sequential
6 | from keras.layers import Dense, Dropout, Flatten, Conv2D, MaxPooling2D
7 | from keras.callbacks import ModelCheckpoint,EarlyStopping
8 |
9 | import matplotlib.pyplot as plt
10 | import numpy
11 | import os
12 | import tensorflow as tf
13 |
14 | # seed 값 설정
15 | seed = 0
16 | numpy.random.seed(seed)
17 | tf.set_random_seed(seed)
18 |
19 | # 데이터 불러오기
20 |
21 | (X_train, Y_train), (X_test, Y_test) = mnist.load_data()
22 | X_train = X_train.reshape(X_train.shape[0], 28, 28, 1).astype('float32') / 255
23 | X_test = X_test.reshape(X_test.shape[0], 28, 28, 1).astype('float32') / 255
24 | Y_train = np_utils.to_categorical(Y_train)
25 | Y_test = np_utils.to_categorical(Y_test)
26 |
27 | # 컨볼루션 신경망의 설정
28 | model = Sequential()
29 | model.add(Conv2D(32, kernel_size=(3, 3), input_shape=(28, 28, 1), activation='relu'))
30 | model.add(Conv2D(64, (3, 3), activation='relu'))
31 | model.add(MaxPooling2D(pool_size=2))
32 | model.add(Dropout(0.25))
33 | model.add(Flatten())
34 | model.add(Dense(128, activation='relu'))
35 | model.add(Dropout(0.5))
36 | model.add(Dense(10, activation='softmax'))
37 |
38 | model.compile(loss='categorical_crossentropy',
39 | optimizer='adam',
40 | metrics=['accuracy'])
41 |
42 | # 모델 최적화 설정
43 | MODEL_DIR = './model/'
44 | if not os.path.exists(MODEL_DIR):
45 | os.mkdir(MODEL_DIR)
46 |
47 | modelpath="./model/{epoch:02d}-{val_loss:.4f}.hdf5"
48 | checkpointer = ModelCheckpoint(filepath=modelpath, monitor='val_loss', verbose=1, save_best_only=True)
49 | early_stopping_callback = EarlyStopping(monitor='val_loss', patience=10)
50 |
51 | # 모델의 실행
52 | history = model.fit(X_train, Y_train, validation_data=(X_test, Y_test), epochs=30, batch_size=200, verbose=0, callbacks=[early_stopping_callback,checkpointer])
53 |
54 | # 테스트 정확도 출력
55 | print("\n Test Accuracy: %.4f" % (model.evaluate(X_test, Y_test)[1]))
56 |
57 | # 테스트 셋의 오차
58 | y_vloss = history.history['val_loss']
59 |
60 | # 학습셋의 오차
61 | y_loss = history.history['loss']
62 |
63 | # 그래프로 표현
64 | x_len = numpy.arange(len(y_loss))
65 | plt.plot(x_len, y_vloss, marker='.', c="red", label='Testset_loss')
66 | plt.plot(x_len, y_loss, marker='.', c="blue", label='Trainset_loss')
67 |
68 | # 그래프에 그리드를 주고 레이블을 표시
69 | plt.legend(loc='upper right')
70 | plt.grid()
71 | plt.xlabel('epoch')
72 | plt.ylabel('loss')
73 | plt.show()
74 |
--------------------------------------------------------------------------------
/deeplearning/deep_code/17_RNN1_Reuters.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # 코드 내부에 한글을 사용가능 하게 해주는 부분입니다.
3 |
4 | # 로이터 뉴스 데이터셋 불러오기
5 | from keras.datasets import reuters
6 | from keras.models import Sequential
7 | from keras.layers import Dense, LSTM, Embedding
8 | from keras.preprocessing import sequence
9 | from keras.utils import np_utils
10 |
11 | import numpy
12 | import tensorflow as tf
13 | import matplotlib.pyplot as plt
14 |
15 | # seed 값 설정
16 | seed = 0
17 | numpy.random.seed(seed)
18 | tf.set_random_seed(seed)
19 |
20 | # 불러온 데이터를 학습셋, 테스트셋으로 나누기
21 | (X_train, Y_train), (X_test, Y_test) = reuters.load_data(num_words=1000, test_split=0.2)
22 |
23 | # 데이터 확인하기
24 | category = numpy.max(Y_train) + 1
25 | print(category, '카테고리')
26 | print(len(X_train), '학습용 뉴스 기사')
27 | print(len(X_test), '테스트용 뉴스 기사')
28 | print(X_train[0])
29 |
30 | # 데이터 전처리
31 | x_train = sequence.pad_sequences(X_train, maxlen=100)
32 | x_test = sequence.pad_sequences(X_test, maxlen=100)
33 | y_train = np_utils.to_categorical(Y_train)
34 | y_test = np_utils.to_categorical(Y_test)
35 |
36 | # 모델의 설정
37 | model = Sequential()
38 | model.add(Embedding(1000, 100))
39 | model.add(LSTM(100, activation='tanh'))
40 | model.add(Dense(46, activation='softmax'))
41 |
42 | # 모델의 컴파일
43 | model.compile(loss='categorical_crossentropy',
44 | optimizer='adam',
45 | metrics=['accuracy'])
46 |
47 | # 모델의 실행
48 | history = model.fit(x_train, y_train, batch_size=100, epochs=20, validation_data=(x_test, y_test))
49 |
50 | # 테스트 정확도 출력
51 | print("\n Test Accuracy: %.4f" % (model.evaluate(x_test, y_test)[1]))
52 |
53 |
54 | # 테스트 셋의 오차
55 | y_vloss = history.history['val_loss']
56 |
57 | # 학습셋의 오차
58 | y_loss = history.history['loss']
59 |
60 | # 그래프로 표현
61 | x_len = numpy.arange(len(y_loss))
62 | plt.plot(x_len, y_vloss, marker='.', c="red", label='Testset_loss')
63 | plt.plot(x_len, y_loss, marker='.', c="blue", label='Trainset_loss')
64 |
65 | # 그래프에 그리드를 주고 레이블을 표시
66 | plt.legend(loc='upper right')
67 | plt.grid()
68 | plt.xlabel('epoch')
69 | plt.ylabel('loss')
70 | plt.show()
71 |
--------------------------------------------------------------------------------
/deeplearning/deep_code/17_RNN2_imdb_lstm.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | # 코드 내부에 한글을 사용가능 하게 해주는 부분입니다.
3 |
4 | from keras.preprocessing import sequence
5 | from keras.models import Sequential
6 | from keras.layers import Dense, Dropout, Activation
7 | from keras.layers import Embedding
8 | from keras.layers import LSTM
9 | from keras.layers import Conv1D, MaxPooling1D
10 | from keras.datasets import imdb
11 |
12 | import numpy
13 | import tensorflow as tf
14 | import matplotlib.pyplot as plt
15 |
16 | # seed 값 설정
17 | seed = 0
18 | numpy.random.seed(seed)
19 | tf.set_random_seed(seed)
20 |
21 | # 학습셋, 테스트셋 지정하기
22 | (x_train, y_train), (x_test, y_test) = imdb.load_data(num_words=5000)
23 |
24 | # 데이터 전처리
25 | x_train = sequence.pad_sequences(x_train, maxlen=100)
26 | x_test = sequence.pad_sequences(x_test, maxlen=100)
27 |
28 | # 모델의 설정
29 | model = Sequential()
30 | model.add(Embedding(5000, 100))
31 | model.add(Dropout(0.5))
32 | model.add(Conv1D(64, 5, padding='valid', activation='relu',strides=1))
33 | model.add(MaxPooling1D(pool_size=4))
34 | model.add(LSTM(55))
35 | model.add(Dense(1))
36 | model.add(Activation('sigmoid'))
37 | model.summary()
38 |
39 | # 모델의 컴파일
40 | model.compile(loss='binary_crossentropy',
41 | optimizer='adam',
42 | metrics=['accuracy'])
43 |
44 | # 모델의 실행
45 | history = model.fit(x_train, y_train, batch_size=100, epochs=5, validation_data=(x_test, y_test))
46 |
47 | # 테스트 정확도 출력
48 | print("\n Test Accuracy: %.4f" % (model.evaluate(x_test, y_test)[1]))
49 |
50 |
51 | # 테스트 셋의 오차
52 | y_vloss = history.history['val_loss']
53 |
54 | # 학습셋의 오차
55 | y_loss = history.history['loss']
56 |
57 | # 그래프로 표현
58 | x_len = numpy.arange(len(y_loss))
59 | plt.plot(x_len, y_vloss, marker='.', c="red", label='Testset_loss')
60 | plt.plot(x_len, y_loss, marker='.', c="blue", label='Trainset_loss')
61 |
62 | # 그래프에 그리드를 주고 레이블을 표시
63 | plt.legend(loc='upper right')
64 | plt.grid()
65 | plt.xlabel('epoch')
66 | plt.ylabel('loss')
67 | plt.show()
68 |
--------------------------------------------------------------------------------