├── .gitattributes
├── .idea
├── keras.iml
├── misc.xml
├── modules.xml
├── vcs.xml
└── workspace.xml
├── .ipynb_checkpoints
├── 1. predict_iris_data-checkpoint.ipynb
├── 2. predict_redwine_whitewine-checkpoint.ipynb
├── 3. predict_boston_house-checkpoint.ipynb
├── 4. predict_MNIST_with_MLP-checkpoint.ipynb
├── 5. predict_MNIST_with_CNN-checkpoint.ipynb
├── 6. predict_binary_img_with_CNN-checkpoint.ipynb
├── 7. predict_multi_img_with_CNN-checkpoint.ipynb
├── 8. predict_spam_or_ham_with_LSTM-checkpoint.ipynb
├── 9. predict_korea_news_category_with_LSTM-checkpoint.ipynb
├── keras_cpu_test-checkpoint.ipynb
└── keras_gpu_test-checkpoint.ipynb
├── 1. predict_iris_data.ipynb
├── 10. predict_korea_movie_review_classification.ipynb
├── 11. seq2seq model (translate).ipynb
├── 2. predict_redwine_whitewine.ipynb
├── 3. predict_boston_house.ipynb
├── 4. predict_MNIST_with_MLP.ipynb
├── 5. predict_MNIST_with_CNN.ipynb
├── 6. predict_binary_img_with_CNN.ipynb
├── 7. predict_multi_img_with_CNN.ipynb
├── 8. predict_spam_or_ham_with_LSTM.ipynb
├── 9. predict_korea_news_category_with_LSTM.ipynb
├── README.md
├── bfg-1.13.0.jar
├── dataset
├── ThoraricSurgery.csv
├── ThoraricSurgery_test.csv
├── housing.csv
├── iris-test.csv
├── iris.csv
├── pima-indians-diabetes-test.csv
├── pima-indians-diabetes.csv
├── sonar.csv
├── spam.csv
├── test.csv
└── wine.csv
├── keras_cpu_test.ipynb
├── keras_gpu_test.ipynb
├── model
├── cnn_mnist.model
├── dog_cat_classify.model
├── iris.model
├── mnist_mlp.model
├── model_day4.model
├── multi_img_classification.model
├── predict_korea_news_LSTM.model
└── spam_ham_LSTM.model
└── numpy_data
├── binary_image_data.npy
├── image_data.npy
└── multi_image_data.npy
/.gitattributes:
--------------------------------------------------------------------------------
1 | *.npy filter=lfs diff=lfs merge=lfs -text
2 |
--------------------------------------------------------------------------------
/.idea/keras.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/.idea/workspace.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
204 | 1531022522280
205 |
206 |
207 | 1531022522280
208 |
209 |
210 | 1533215128558
211 |
212 |
213 |
214 | 1533215128558
215 |
216 |
217 | 1533218634910
218 |
219 |
220 |
221 | 1533218634910
222 |
223 |
224 | 1533949712839
225 |
226 |
227 |
228 | 1533949712839
229 |
230 |
231 | 1534123102383
232 |
233 |
234 |
235 | 1534123102383
236 |
237 |
238 | 1534669639381
239 |
240 |
241 |
242 | 1534669639381
243 |
244 |
245 |
246 |
247 |
248 |
249 |
250 |
251 |
252 |
253 |
254 |
255 |
256 |
257 |
258 |
259 |
260 |
261 |
262 |
263 |
264 |
265 |
266 |
267 |
268 |
269 |
270 |
271 |
272 |
273 |
274 |
275 |
276 |
277 |
278 |
279 |
280 |
281 |
282 |
283 |
284 |
285 |
286 |
287 |
288 |
289 |
290 |
291 |
292 |
293 |
294 |
295 |
296 |
297 |
298 |
299 |
300 |
301 |
302 |
303 |
304 |
305 |
306 |
307 |
308 |
309 |
310 |
311 |
312 |
313 |
314 |
315 |
316 |
317 |
318 |
319 |
320 |
321 |
322 |
323 |
324 |
325 |
326 |
327 |
328 |
329 |
330 |
331 |
332 |
333 |
334 |
335 |
336 |
337 |
338 |
339 |
340 |
341 |
342 |
343 |
344 |
345 |
346 |
347 |
348 |
349 |
350 |
351 |
352 |
353 |
354 |
355 |
356 |
357 |
358 |
359 |
360 |
361 |
362 |
363 |
364 |
365 |
366 |
367 |
368 |
369 |
370 |
371 |
372 |
373 |
374 |
375 |
--------------------------------------------------------------------------------
/.ipynb_checkpoints/3. predict_boston_house-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [
8 | {
9 | "name": "stderr",
10 | "output_type": "stream",
11 | "text": [
12 | "C:\\Users\\leesoojin\\Anaconda3\\lib\\site-packages\\h5py\\__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n",
13 | " from ._conv import register_converters as _register_converters\n",
14 | "Using TensorFlow backend.\n"
15 | ]
16 | }
17 | ],
18 | "source": [
19 | "from keras.models import Sequential\n",
20 | "from keras.layers import Dense\n",
21 | "from keras.utils import np_utils\n",
22 | "from sklearn.model_selection import train_test_split\n",
23 | "from sklearn.preprocessing import LabelEncoder\n",
24 | "from keras.callbacks import ModelCheckpoint, EarlyStopping\n",
25 | "import keras.backend.tensorflow_backend as K\n",
26 | "import tensorflow as tf\n",
27 | "import pandas as pd\n",
28 | "import numpy as np\n",
29 | "import matplotlib.pyplot as plt\n",
30 | "import matplotlib as mpl\n",
31 | "import seaborn as sns\n",
32 | "import os\n",
33 | "config = tf.ConfigProto()\n",
34 | "config.gpu_options.allow_growth = True\n",
35 | "session = tf.Session(config=config)"
36 | ]
37 | },
38 | {
39 | "cell_type": "code",
40 | "execution_count": 2,
41 | "metadata": {},
42 | "outputs": [
43 | {
44 | "name": "stdout",
45 | "output_type": "stream",
46 | "text": [
47 | "\n",
48 | "RangeIndex: 506 entries, 0 to 505\n",
49 | "Data columns (total 14 columns):\n",
50 | "0 506 non-null float64\n",
51 | "1 506 non-null float64\n",
52 | "2 506 non-null float64\n",
53 | "3 506 non-null int64\n",
54 | "4 506 non-null float64\n",
55 | "5 506 non-null float64\n",
56 | "6 506 non-null float64\n",
57 | "7 506 non-null float64\n",
58 | "8 506 non-null int64\n",
59 | "9 506 non-null float64\n",
60 | "10 506 non-null float64\n",
61 | "11 506 non-null float64\n",
62 | "12 506 non-null float64\n",
63 | "13 506 non-null float64\n",
64 | "dtypes: float64(12), int64(2)\n",
65 | "memory usage: 55.4 KB\n",
66 | "None\n"
67 | ]
68 | }
69 | ],
70 | "source": [
71 | "df = pd.read_csv('./dataset/housing.csv', delim_whitespace=True, header=None)\n",
72 | "\n",
73 | "print(df.info())"
74 | ]
75 | },
76 | {
77 | "cell_type": "code",
78 | "execution_count": 3,
79 | "metadata": {},
80 | "outputs": [
81 | {
82 | "name": "stdout",
83 | "output_type": "stream",
84 | "text": [
85 | " 0 1 2 3 4 5 6 7 8 9 10 \\\n",
86 | "0 0.00632 18.0 2.31 0 0.538 6.575 65.2 4.0900 1 296.0 15.3 \n",
87 | "1 0.02731 0.0 7.07 0 0.469 6.421 78.9 4.9671 2 242.0 17.8 \n",
88 | "2 0.02729 0.0 7.07 0 0.469 7.185 61.1 4.9671 2 242.0 17.8 \n",
89 | "3 0.03237 0.0 2.18 0 0.458 6.998 45.8 6.0622 3 222.0 18.7 \n",
90 | "4 0.06905 0.0 2.18 0 0.458 7.147 54.2 6.0622 3 222.0 18.7 \n",
91 | "\n",
92 | " 11 12 13 \n",
93 | "0 396.90 4.98 24.0 \n",
94 | "1 396.90 9.14 21.6 \n",
95 | "2 392.83 4.03 34.7 \n",
96 | "3 394.63 2.94 33.4 \n",
97 | "4 396.90 5.33 36.2 \n"
98 | ]
99 | }
100 | ],
101 | "source": [
102 | "print(df.head())"
103 | ]
104 | },
105 | {
106 | "cell_type": "code",
107 | "execution_count": 4,
108 | "metadata": {},
109 | "outputs": [],
110 | "source": [
111 | "X = df.iloc[:, 0:13].values\n",
112 | "y = df.iloc[:, 13].values\n",
113 | "\n",
114 | "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=3)"
115 | ]
116 | },
117 | {
118 | "cell_type": "code",
119 | "execution_count": 5,
120 | "metadata": {},
121 | "outputs": [
122 | {
123 | "name": "stdout",
124 | "output_type": "stream",
125 | "text": [
126 | "Epoch 1/200\n",
127 | "404/404 [==============================] - 0s 1ms/step - loss: 2122.1331\n",
128 | "Epoch 2/200\n",
129 | "404/404 [==============================] - 0s 227us/step - loss: 347.9923\n",
130 | "Epoch 3/200\n",
131 | "404/404 [==============================] - 0s 254us/step - loss: 259.2522\n",
132 | "Epoch 4/200\n",
133 | "404/404 [==============================] - 0s 289us/step - loss: 208.1025\n",
134 | "Epoch 5/200\n",
135 | "404/404 [==============================] - 0s 276us/step - loss: 166.0465\n",
136 | "Epoch 6/200\n",
137 | "404/404 [==============================] - 0s 296us/step - loss: 134.6875\n",
138 | "Epoch 7/200\n",
139 | "404/404 [==============================] - 0s 232us/step - loss: 115.4015\n",
140 | "Epoch 8/200\n",
141 | "404/404 [==============================] - 0s 237us/step - loss: 102.1796\n",
142 | "Epoch 9/200\n",
143 | "404/404 [==============================] - ETA: 0s - loss: 98.38 - 0s 222us/step - loss: 97.7743\n",
144 | "Epoch 10/200\n",
145 | "404/404 [==============================] - 0s 237us/step - loss: 87.8853\n",
146 | "Epoch 11/200\n",
147 | "404/404 [==============================] - 0s 239us/step - loss: 85.4755\n",
148 | "Epoch 12/200\n",
149 | "404/404 [==============================] - 0s 227us/step - loss: 81.4273\n",
150 | "Epoch 13/200\n",
151 | "404/404 [==============================] - 0s 237us/step - loss: 79.4530\n",
152 | "Epoch 14/200\n",
153 | "404/404 [==============================] - 0s 232us/step - loss: 75.6931\n",
154 | "Epoch 15/200\n",
155 | "404/404 [==============================] - 0s 232us/step - loss: 73.4265\n",
156 | "Epoch 16/200\n",
157 | "404/404 [==============================] - 0s 309us/step - loss: 69.5513\n",
158 | "Epoch 17/200\n",
159 | "404/404 [==============================] - 0s 244us/step - loss: 68.2290\n",
160 | "Epoch 18/200\n",
161 | "404/404 [==============================] - 0s 227us/step - loss: 66.7893\n",
162 | "Epoch 19/200\n",
163 | "404/404 [==============================] - 0s 235us/step - loss: 64.6461\n",
164 | "Epoch 20/200\n",
165 | "404/404 [==============================] - 0s 227us/step - loss: 63.4591\n",
166 | "Epoch 21/200\n",
167 | "404/404 [==============================] - 0s 232us/step - loss: 63.5708\n",
168 | "Epoch 22/200\n",
169 | "404/404 [==============================] - 0s 235us/step - loss: 59.4259\n",
170 | "Epoch 23/200\n",
171 | "404/404 [==============================] - 0s 235us/step - loss: 61.8058\n",
172 | "Epoch 24/200\n",
173 | "404/404 [==============================] - 0s 237us/step - loss: 56.5482\n",
174 | "Epoch 25/200\n",
175 | "404/404 [==============================] - 0s 242us/step - loss: 57.9695\n",
176 | "Epoch 26/200\n",
177 | "404/404 [==============================] - 0s 230us/step - loss: 56.6245\n",
178 | "Epoch 27/200\n",
179 | "404/404 [==============================] - 0s 237us/step - loss: 54.3742\n",
180 | "Epoch 28/200\n",
181 | "404/404 [==============================] - 0s 227us/step - loss: 52.7804\n",
182 | "Epoch 29/200\n",
183 | "404/404 [==============================] - 0s 232us/step - loss: 53.0014\n",
184 | "Epoch 30/200\n",
185 | "404/404 [==============================] - 0s 249us/step - loss: 50.2627\n",
186 | "Epoch 31/200\n",
187 | "404/404 [==============================] - 0s 247us/step - loss: 49.8172\n",
188 | "Epoch 32/200\n",
189 | "404/404 [==============================] - 0s 227us/step - loss: 51.2612\n",
190 | "Epoch 33/200\n",
191 | "404/404 [==============================] - 0s 262us/step - loss: 48.2356\n",
192 | "Epoch 34/200\n",
193 | "404/404 [==============================] - 0s 242us/step - loss: 48.4007\n",
194 | "Epoch 35/200\n",
195 | "404/404 [==============================] - 0s 242us/step - loss: 46.9832\n",
196 | "Epoch 36/200\n",
197 | "404/404 [==============================] - 0s 254us/step - loss: 45.6215\n",
198 | "Epoch 37/200\n",
199 | "404/404 [==============================] - 0s 252us/step - loss: 46.2905\n",
200 | "Epoch 38/200\n",
201 | "404/404 [==============================] - 0s 232us/step - loss: 44.1845\n",
202 | "Epoch 39/200\n",
203 | "404/404 [==============================] - 0s 235us/step - loss: 43.7748\n",
204 | "Epoch 40/200\n",
205 | "404/404 [==============================] - 0s 230us/step - loss: 43.3959\n",
206 | "Epoch 41/200\n",
207 | "404/404 [==============================] - 0s 237us/step - loss: 42.5955\n",
208 | "Epoch 42/200\n",
209 | "404/404 [==============================] - 0s 232us/step - loss: 47.3782\n",
210 | "Epoch 43/200\n",
211 | "404/404 [==============================] - 0s 235us/step - loss: 43.7336\n",
212 | "Epoch 44/200\n",
213 | "404/404 [==============================] - 0s 257us/step - loss: 40.7421\n",
214 | "Epoch 45/200\n",
215 | "404/404 [==============================] - 0s 254us/step - loss: 40.1482\n",
216 | "Epoch 46/200\n",
217 | "404/404 [==============================] - 0s 230us/step - loss: 39.9290\n",
218 | "Epoch 47/200\n",
219 | "404/404 [==============================] - 0s 230us/step - loss: 41.5608\n",
220 | "Epoch 48/200\n",
221 | "404/404 [==============================] - 0s 252us/step - loss: 41.5198\n",
222 | "Epoch 49/200\n",
223 | "404/404 [==============================] - 0s 257us/step - loss: 42.8822\n",
224 | "Epoch 50/200\n",
225 | "404/404 [==============================] - 0s 240us/step - loss: 38.1835\n",
226 | "Epoch 51/200\n",
227 | "404/404 [==============================] - 0s 237us/step - loss: 39.6656\n",
228 | "Epoch 52/200\n",
229 | "404/404 [==============================] - 0s 254us/step - loss: 37.2524\n",
230 | "Epoch 53/200\n",
231 | "404/404 [==============================] - 0s 237us/step - loss: 37.2365\n",
232 | "Epoch 54/200\n",
233 | "404/404 [==============================] - 0s 235us/step - loss: 36.6913\n",
234 | "Epoch 55/200\n",
235 | "404/404 [==============================] - 0s 247us/step - loss: 36.9208\n",
236 | "Epoch 56/200\n",
237 | "404/404 [==============================] - 0s 244us/step - loss: 36.7544\n",
238 | "Epoch 57/200\n",
239 | "404/404 [==============================] - 0s 230us/step - loss: 36.5409\n",
240 | "Epoch 58/200\n",
241 | "404/404 [==============================] - 0s 232us/step - loss: 39.3795\n",
242 | "Epoch 59/200\n",
243 | "404/404 [==============================] - 0s 225us/step - loss: 45.0863\n",
244 | "Epoch 60/200\n",
245 | "404/404 [==============================] - 0s 244us/step - loss: 35.1973\n",
246 | "Epoch 61/200\n",
247 | "404/404 [==============================] - 0s 301us/step - loss: 34.8124\n",
248 | "Epoch 62/200\n",
249 | "404/404 [==============================] - 0s 242us/step - loss: 33.5994\n",
250 | "Epoch 63/200\n",
251 | "404/404 [==============================] - 0s 232us/step - loss: 36.3246\n",
252 | "Epoch 64/200\n",
253 | "404/404 [==============================] - 0s 232us/step - loss: 35.3689\n",
254 | "Epoch 65/200\n",
255 | "404/404 [==============================] - 0s 230us/step - loss: 34.7354\n",
256 | "Epoch 66/200\n",
257 | "404/404 [==============================] - 0s 232us/step - loss: 33.9282\n",
258 | "Epoch 67/200\n",
259 | "404/404 [==============================] - 0s 222us/step - loss: 34.5711\n",
260 | "Epoch 68/200\n",
261 | "404/404 [==============================] - 0s 237us/step - loss: 32.7263\n",
262 | "Epoch 69/200\n",
263 | "404/404 [==============================] - 0s 234us/step - loss: 35.4556\n",
264 | "Epoch 70/200\n",
265 | "404/404 [==============================] - 0s 235us/step - loss: 33.0661\n",
266 | "Epoch 71/200\n",
267 | "404/404 [==============================] - 0s 239us/step - loss: 34.8677\n",
268 | "Epoch 72/200\n",
269 | "404/404 [==============================] - 0s 235us/step - loss: 37.3278\n",
270 | "Epoch 73/200\n",
271 | "404/404 [==============================] - 0s 227us/step - loss: 33.6334\n",
272 | "Epoch 74/200\n",
273 | "404/404 [==============================] - 0s 230us/step - loss: 32.1562\n",
274 | "Epoch 75/200\n",
275 | "404/404 [==============================] - 0s 232us/step - loss: 32.4006\n",
276 | "Epoch 76/200\n",
277 | "404/404 [==============================] - 0s 222us/step - loss: 33.8208\n",
278 | "Epoch 77/200\n",
279 | "404/404 [==============================] - 0s 234us/step - loss: 32.7471\n",
280 | "Epoch 78/200\n",
281 | "404/404 [==============================] - ETA: 0s - loss: 30.57 - 0s 237us/step - loss: 32.6656\n",
282 | "Epoch 79/200\n",
283 | "404/404 [==============================] - 0s 242us/step - loss: 33.6035\n",
284 | "Epoch 80/200\n",
285 | "404/404 [==============================] - 0s 230us/step - loss: 33.1556\n",
286 | "Epoch 81/200\n",
287 | "404/404 [==============================] - 0s 230us/step - loss: 32.4329\n",
288 | "Epoch 82/200\n",
289 | "404/404 [==============================] - 0s 235us/step - loss: 34.3232\n",
290 | "Epoch 83/200\n",
291 | "404/404 [==============================] - 0s 309us/step - loss: 31.9703\n",
292 | "Epoch 84/200\n",
293 | "404/404 [==============================] - 0s 232us/step - loss: 32.8131\n",
294 | "Epoch 85/200\n",
295 | "404/404 [==============================] - 0s 237us/step - loss: 31.2383\n",
296 | "Epoch 86/200\n",
297 | "404/404 [==============================] - 0s 232us/step - loss: 32.0186\n",
298 | "Epoch 87/200\n",
299 | "404/404 [==============================] - 0s 237us/step - loss: 32.7700\n",
300 | "Epoch 88/200\n",
301 | "404/404 [==============================] - 0s 232us/step - loss: 32.1824\n",
302 | "Epoch 89/200\n",
303 | "404/404 [==============================] - 0s 237us/step - loss: 32.2091\n",
304 | "Epoch 90/200\n",
305 | "404/404 [==============================] - 0s 230us/step - loss: 30.2764\n",
306 | "Epoch 91/200\n",
307 | "404/404 [==============================] - 0s 232us/step - loss: 31.4422\n",
308 | "Epoch 92/200\n",
309 | "404/404 [==============================] - 0s 230us/step - loss: 32.3985\n",
310 | "Epoch 93/200\n",
311 | "404/404 [==============================] - 0s 262us/step - loss: 30.2015\n",
312 | "Epoch 94/200\n",
313 | "404/404 [==============================] - 0s 299us/step - loss: 30.5581\n",
314 | "Epoch 95/200\n",
315 | "404/404 [==============================] - 0s 254us/step - loss: 32.1337\n",
316 | "Epoch 96/200\n",
317 | "404/404 [==============================] - 0s 230us/step - loss: 34.5846\n",
318 | "Epoch 97/200\n",
319 | "404/404 [==============================] - 0s 210us/step - loss: 31.3126\n",
320 | "Epoch 98/200\n",
321 | "404/404 [==============================] - 0s 212us/step - loss: 34.9719\n",
322 | "Epoch 99/200\n",
323 | "404/404 [==============================] - 0s 212us/step - loss: 31.4271\n",
324 | "Epoch 100/200\n",
325 | "404/404 [==============================] - 0s 215us/step - loss: 29.2816\n",
326 | "Epoch 101/200\n",
327 | "404/404 [==============================] - 0s 271us/step - loss: 31.5813\n",
328 | "Epoch 102/200\n",
329 | "404/404 [==============================] - 0s 232us/step - loss: 28.8050\n",
330 | "Epoch 103/200\n",
331 | "404/404 [==============================] - 0s 230us/step - loss: 28.1895\n",
332 | "Epoch 104/200\n",
333 | "404/404 [==============================] - 0s 232us/step - loss: 29.4792\n",
334 | "Epoch 105/200\n",
335 | "404/404 [==============================] - 0s 235us/step - loss: 30.8975\n",
336 | "Epoch 106/200\n",
337 | "404/404 [==============================] - 0s 230us/step - loss: 29.5941\n",
338 | "Epoch 107/200\n",
339 | "404/404 [==============================] - 0s 225us/step - loss: 33.2024\n",
340 | "Epoch 108/200\n",
341 | "404/404 [==============================] - 0s 230us/step - loss: 29.2707\n",
342 | "Epoch 109/200\n",
343 | "404/404 [==============================] - 0s 227us/step - loss: 29.9632\n",
344 | "Epoch 110/200\n",
345 | "404/404 [==============================] - 0s 234us/step - loss: 29.0034\n",
346 | "Epoch 111/200\n",
347 | "404/404 [==============================] - 0s 240us/step - loss: 27.7416\n",
348 | "Epoch 112/200\n",
349 | "404/404 [==============================] - 0s 227us/step - loss: 28.6736\n",
350 | "Epoch 113/200\n",
351 | "404/404 [==============================] - ETA: 0s - loss: 27.21 - 0s 232us/step - loss: 30.4598\n",
352 | "Epoch 114/200\n",
353 | "404/404 [==============================] - 0s 234us/step - loss: 28.4916\n",
354 | "Epoch 115/200\n",
355 | "404/404 [==============================] - 0s 227us/step - loss: 30.8706\n",
356 | "Epoch 116/200\n",
357 | "404/404 [==============================] - 0s 237us/step - loss: 28.7015\n",
358 | "Epoch 117/200\n",
359 | "404/404 [==============================] - 0s 235us/step - loss: 27.0651\n",
360 | "Epoch 118/200\n",
361 | "404/404 [==============================] - 0s 227us/step - loss: 29.1501\n",
362 | "Epoch 119/200\n",
363 | "404/404 [==============================] - 0s 227us/step - loss: 32.5826\n",
364 | "Epoch 120/200\n",
365 | "404/404 [==============================] - 0s 232us/step - loss: 29.9393\n",
366 | "Epoch 121/200\n",
367 | "404/404 [==============================] - 0s 225us/step - loss: 29.9413\n",
368 | "Epoch 122/200\n",
369 | "404/404 [==============================] - 0s 242us/step - loss: 26.3597\n",
370 | "Epoch 123/200\n",
371 | "404/404 [==============================] - 0s 244us/step - loss: 27.2816\n",
372 | "Epoch 124/200\n",
373 | "404/404 [==============================] - 0s 235us/step - loss: 28.6836\n",
374 | "Epoch 125/200\n",
375 | "404/404 [==============================] - 0s 230us/step - loss: 27.6494\n",
376 | "Epoch 126/200\n",
377 | "404/404 [==============================] - 0s 242us/step - loss: 31.7421\n",
378 | "Epoch 127/200\n",
379 | "404/404 [==============================] - 0s 235us/step - loss: 30.7558\n",
380 | "Epoch 128/200\n",
381 | "404/404 [==============================] - 0s 237us/step - loss: 27.4953\n",
382 | "Epoch 129/200\n",
383 | "404/404 [==============================] - 0s 234us/step - loss: 25.6598\n",
384 | "Epoch 130/200\n",
385 | "404/404 [==============================] - 0s 239us/step - loss: 28.6597\n",
386 | "Epoch 131/200\n",
387 | "404/404 [==============================] - 0s 286us/step - loss: 33.7057\n",
388 | "Epoch 132/200\n",
389 | "404/404 [==============================] - 0s 242us/step - loss: 27.8660\n",
390 | "Epoch 133/200\n",
391 | "404/404 [==============================] - 0s 242us/step - loss: 26.1220\n",
392 | "Epoch 134/200\n",
393 | "404/404 [==============================] - 0s 230us/step - loss: 26.6636\n",
394 | "Epoch 135/200\n",
395 | "404/404 [==============================] - 0s 239us/step - loss: 27.3979\n",
396 | "Epoch 136/200\n",
397 | "404/404 [==============================] - 0s 232us/step - loss: 25.3598\n",
398 | "Epoch 137/200\n",
399 | "404/404 [==============================] - 0s 244us/step - loss: 27.29710s - loss: 25.51\n",
400 | "Epoch 138/200\n",
401 | "404/404 [==============================] - 0s 242us/step - loss: 26.6476\n",
402 | "Epoch 139/200\n",
403 | "404/404 [==============================] - 0s 230us/step - loss: 28.6654\n",
404 | "Epoch 140/200\n",
405 | "404/404 [==============================] - ETA: 0s - loss: 25.21 - 0s 239us/step - loss: 26.0402\n",
406 | "Epoch 141/200\n",
407 | "404/404 [==============================] - 0s 229us/step - loss: 26.3300\n",
408 | "Epoch 142/200\n",
409 | "404/404 [==============================] - 0s 232us/step - loss: 28.5133\n",
410 | "Epoch 143/200\n",
411 | "404/404 [==============================] - 0s 234us/step - loss: 27.3029\n",
412 | "Epoch 144/200\n",
413 | "404/404 [==============================] - 0s 237us/step - loss: 25.7936\n",
414 | "Epoch 145/200\n",
415 | "404/404 [==============================] - 0s 237us/step - loss: 24.6531\n",
416 | "Epoch 146/200\n",
417 | "404/404 [==============================] - 0s 234us/step - loss: 29.3940\n",
418 | "Epoch 147/200\n",
419 | "404/404 [==============================] - 0s 234us/step - loss: 26.0954\n",
420 | "Epoch 148/200\n",
421 | "404/404 [==============================] - 0s 244us/step - loss: 27.1548\n",
422 | "Epoch 149/200\n",
423 | "404/404 [==============================] - ETA: 0s - loss: 26.57 - 0s 234us/step - loss: 25.9111\n",
424 | "Epoch 150/200\n",
425 | "404/404 [==============================] - 0s 284us/step - loss: 25.7981\n",
426 | "Epoch 151/200\n",
427 | "404/404 [==============================] - 0s 281us/step - loss: 26.0544\n",
428 | "Epoch 152/200\n",
429 | "404/404 [==============================] - 0s 239us/step - loss: 25.7746\n",
430 | "Epoch 153/200\n",
431 | "404/404 [==============================] - 0s 232us/step - loss: 25.2964\n",
432 | "Epoch 154/200\n",
433 | "404/404 [==============================] - 0s 252us/step - loss: 26.9285\n",
434 | "Epoch 155/200\n",
435 | "404/404 [==============================] - 0s 244us/step - loss: 27.8392\n",
436 | "Epoch 156/200\n",
437 | "404/404 [==============================] - 0s 234us/step - loss: 26.6262\n",
438 | "Epoch 157/200\n",
439 | "404/404 [==============================] - 0s 237us/step - loss: 25.4286\n",
440 | "Epoch 158/200\n",
441 | "404/404 [==============================] - 0s 239us/step - loss: 25.9537\n",
442 | "Epoch 159/200\n",
443 | "404/404 [==============================] - 0s 237us/step - loss: 25.9483\n",
444 | "Epoch 160/200\n",
445 | "404/404 [==============================] - 0s 242us/step - loss: 26.6073\n",
446 | "Epoch 161/200\n",
447 | "404/404 [==============================] - 0s 235us/step - loss: 26.3330\n",
448 | "Epoch 162/200\n",
449 | "404/404 [==============================] - 0s 289us/step - loss: 25.0448\n",
450 | "Epoch 163/200\n",
451 | "404/404 [==============================] - 0s 249us/step - loss: 25.8895\n",
452 | "Epoch 164/200\n",
453 | "404/404 [==============================] - 0s 242us/step - loss: 25.0078\n",
454 | "Epoch 165/200\n",
455 | "404/404 [==============================] - 0s 232us/step - loss: 23.8572\n",
456 | "Epoch 166/200\n",
457 | "404/404 [==============================] - 0s 227us/step - loss: 26.8393\n",
458 | "Epoch 167/200\n",
459 | "404/404 [==============================] - 0s 239us/step - loss: 24.7674\n",
460 | "Epoch 168/200\n",
461 | "404/404 [==============================] - 0s 242us/step - loss: 29.7234\n",
462 | "Epoch 169/200\n",
463 | "404/404 [==============================] - 0s 227us/step - loss: 25.9919\n",
464 | "Epoch 170/200\n",
465 | "404/404 [==============================] - 0s 230us/step - loss: 23.7709\n",
466 | "Epoch 171/200\n",
467 | "404/404 [==============================] - ETA: 0s - loss: 22.95 - 0s 237us/step - loss: 24.0283\n",
468 | "Epoch 172/200\n",
469 | "404/404 [==============================] - 0s 237us/step - loss: 24.3686\n",
470 | "Epoch 173/200\n",
471 | "404/404 [==============================] - 0s 237us/step - loss: 25.7648\n",
472 | "Epoch 174/200\n",
473 | "404/404 [==============================] - 0s 235us/step - loss: 24.6780\n",
474 | "Epoch 175/200\n",
475 | "404/404 [==============================] - 0s 234us/step - loss: 24.5750\n",
476 | "Epoch 176/200\n",
477 | "404/404 [==============================] - 0s 230us/step - loss: 24.4545\n",
478 | "Epoch 177/200\n",
479 | "404/404 [==============================] - 0s 235us/step - loss: 23.2645\n",
480 | "Epoch 178/200\n",
481 | "404/404 [==============================] - 0s 239us/step - loss: 23.5010\n",
482 | "Epoch 179/200\n",
483 | "404/404 [==============================] - 0s 244us/step - loss: 25.2118\n",
484 | "Epoch 180/200\n",
485 | "404/404 [==============================] - 0s 239us/step - loss: 24.6237\n",
486 | "Epoch 181/200\n",
487 | "404/404 [==============================] - 0s 237us/step - loss: 25.8541\n",
488 | "Epoch 182/200\n",
489 | "404/404 [==============================] - 0s 237us/step - loss: 24.4790\n",
490 | "Epoch 183/200\n",
491 | "404/404 [==============================] - 0s 237us/step - loss: 24.2495\n",
492 | "Epoch 184/200\n",
493 | "404/404 [==============================] - 0s 230us/step - loss: 25.2081\n",
494 | "Epoch 185/200\n",
495 | "404/404 [==============================] - 0s 239us/step - loss: 22.2362\n",
496 | "Epoch 186/200\n",
497 | "404/404 [==============================] - 0s 242us/step - loss: 24.2375\n",
498 | "Epoch 187/200\n",
499 | "404/404 [==============================] - 0s 237us/step - loss: 22.1582\n",
500 | "Epoch 188/200\n"
501 | ]
502 | },
503 | {
504 | "name": "stdout",
505 | "output_type": "stream",
506 | "text": [
507 | "404/404 [==============================] - 0s 244us/step - loss: 23.7005\n",
508 | "Epoch 189/200\n",
509 | "404/404 [==============================] - 0s 242us/step - loss: 24.0025\n",
510 | "Epoch 190/200\n",
511 | "404/404 [==============================] - ETA: 0s - loss: 24.14 - 0s 232us/step - loss: 25.4144\n",
512 | "Epoch 191/200\n",
513 | "404/404 [==============================] - 0s 237us/step - loss: 30.1352\n",
514 | "Epoch 192/200\n",
515 | "404/404 [==============================] - 0s 235us/step - loss: 23.3425\n",
516 | "Epoch 193/200\n",
517 | "404/404 [==============================] - 0s 235us/step - loss: 22.6613\n",
518 | "Epoch 194/200\n",
519 | "404/404 [==============================] - 0s 247us/step - loss: 24.7335\n",
520 | "Epoch 195/200\n",
521 | "404/404 [==============================] - 0s 252us/step - loss: 22.1079\n",
522 | "Epoch 196/200\n",
523 | "404/404 [==============================] - 0s 235us/step - loss: 24.2412\n",
524 | "Epoch 197/200\n",
525 | "404/404 [==============================] - 0s 239us/step - loss: 23.6584\n",
526 | "Epoch 198/200\n",
527 | "404/404 [==============================] - 0s 239us/step - loss: 24.2968\n",
528 | "Epoch 199/200\n",
529 | "404/404 [==============================] - 0s 237us/step - loss: 26.1472\n",
530 | "Epoch 200/200\n",
531 | "404/404 [==============================] - 0s 279us/step - loss: 23.6032\n"
532 | ]
533 | }
534 | ],
535 | "source": [
536 | "with K.tf_ops.device('/device:GPU:0'):\n",
537 | " model = Sequential()\n",
538 | " model.add(Dense(30, input_dim=X.shape[1], activation='relu'))\n",
539 | " model.add(Dense(7, activation='relu'))\n",
540 | " model.add(Dense(1)) #선형회귀에서는 마지막 출력에 활성화 함수가 있지 않다.\n",
541 | " \n",
542 | " model.compile(loss='mean_squared_error', optimizer='adam')\n",
543 | " \n",
544 | " model.fit(X_train, y_train, epochs=200, batch_size=10)"
545 | ]
546 | },
547 | {
548 | "cell_type": "code",
549 | "execution_count": 6,
550 | "metadata": {},
551 | "outputs": [
552 | {
553 | "name": "stdout",
554 | "output_type": "stream",
555 | "text": [
556 | "실제 가격 :44.800, 예상 가격 : 29.597\n",
557 | "실제 가격 :17.100, 예상 가격 : 17.374\n",
558 | "실제 가격 :17.800, 예상 가격 : 17.432\n",
559 | "실제 가격 :33.100, 예상 가격 : 29.175\n",
560 | "실제 가격 :21.900, 예상 가격 : 22.060\n",
561 | "실제 가격 :21.000, 예상 가격 : 22.157\n",
562 | "실제 가격 :18.400, 예상 가격 : 16.429\n",
563 | "실제 가격 :10.400, 예상 가격 : 9.389\n",
564 | "실제 가격 :23.100, 예상 가격 : 22.239\n",
565 | "실제 가격 :20.000, 예상 가격 : 16.864\n"
566 | ]
567 | }
568 | ],
569 | "source": [
570 | "predict = model.predict(X_test).flatten()\n",
571 | "for i in range(10):\n",
572 | " label = y_test[i]\n",
573 | " prediction= predict[i]\n",
574 | " print(\"실제 가격 :{:.3f}, 예상 가격 : {:.3f}\".format(label, prediction))"
575 | ]
576 | },
577 | {
578 | "cell_type": "code",
579 | "execution_count": null,
580 | "metadata": {},
581 | "outputs": [],
582 | "source": []
583 | }
584 | ],
585 | "metadata": {
586 | "kernelspec": {
587 | "display_name": "Python 3",
588 | "language": "python",
589 | "name": "python3"
590 | },
591 | "language_info": {
592 | "codemirror_mode": {
593 | "name": "ipython",
594 | "version": 3
595 | },
596 | "file_extension": ".py",
597 | "mimetype": "text/x-python",
598 | "name": "python",
599 | "nbconvert_exporter": "python",
600 | "pygments_lexer": "ipython3",
601 | "version": "3.6.5"
602 | }
603 | },
604 | "nbformat": 4,
605 | "nbformat_minor": 2
606 | }
607 |
--------------------------------------------------------------------------------
/.ipynb_checkpoints/keras_cpu_test-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "** 너무 오래걸려서 패스.... **\n",
8 | "\n",
9 | "gpu는 그냥 6~7초안에 끝나는데 얘는 무슨 몇분씩걸린다."
10 | ]
11 | },
12 | {
13 | "cell_type": "code",
14 | "execution_count": 2,
15 | "metadata": {},
16 | "outputs": [
17 | {
18 | "name": "stdout",
19 | "output_type": "stream",
20 | "text": [
21 | "x_train shape: (60000, 28, 28, 1)\n",
22 | "60000 train samples\n",
23 | "10000 test samples\n",
24 | "Train on 60000 samples, validate on 10000 samples\n",
25 | "Epoch 1/12\n",
26 | " 384/60000 [..............................] - ETA: 27:43 - loss: 2.2585 - acc: 0.1380"
27 | ]
28 | },
29 | {
30 | "ename": "KeyboardInterrupt",
31 | "evalue": "",
32 | "output_type": "error",
33 | "traceback": [
34 | "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
35 | "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)",
36 | "\u001b[1;32m\u001b[0m in \u001b[0;36m\u001b[1;34m()\u001b[0m\n\u001b[0;32m 68\u001b[0m \u001b[0mepochs\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mepochs\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 69\u001b[0m \u001b[0mverbose\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;36m1\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 70\u001b[1;33m validation_data=(x_test, y_test))\n\u001b[0m\u001b[0;32m 71\u001b[0m \u001b[0mscore\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mmodel\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mevaluate\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mx_test\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0my_test\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mverbose\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;36m0\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 72\u001b[0m \u001b[0mprint\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m'Test loss:'\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mscore\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;36m0\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
37 | "\u001b[1;32m~\\Anaconda3\\lib\\site-packages\\keras\\engine\\training.py\u001b[0m in \u001b[0;36mfit\u001b[1;34m(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, **kwargs)\u001b[0m\n\u001b[0;32m 1040\u001b[0m \u001b[0minitial_epoch\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0minitial_epoch\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1041\u001b[0m \u001b[0msteps_per_epoch\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0msteps_per_epoch\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1042\u001b[1;33m validation_steps=validation_steps)\n\u001b[0m\u001b[0;32m 1043\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1044\u001b[0m def evaluate(self, x=None, y=None,\n",
38 | "\u001b[1;32m~\\Anaconda3\\lib\\site-packages\\keras\\engine\\training_arrays.py\u001b[0m in \u001b[0;36mfit_loop\u001b[1;34m(model, f, ins, out_labels, batch_size, epochs, verbose, callbacks, val_f, val_ins, shuffle, callback_metrics, initial_epoch, steps_per_epoch, validation_steps)\u001b[0m\n\u001b[0;32m 197\u001b[0m \u001b[0mins_batch\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mi\u001b[0m\u001b[1;33m]\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mins_batch\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mi\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mtoarray\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 198\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 199\u001b[1;33m \u001b[0mouts\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mf\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mins_batch\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 200\u001b[0m \u001b[1;32mif\u001b[0m \u001b[1;32mnot\u001b[0m \u001b[0misinstance\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mouts\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mlist\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 201\u001b[0m \u001b[0mouts\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;33m[\u001b[0m\u001b[0mouts\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
39 | "\u001b[1;32m~\\Anaconda3\\lib\\site-packages\\keras\\backend\\tensorflow_backend.py\u001b[0m in \u001b[0;36m__call__\u001b[1;34m(self, inputs)\u001b[0m\n\u001b[0;32m 2659\u001b[0m \u001b[1;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_legacy_call\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0minputs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 2660\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 2661\u001b[1;33m \u001b[1;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_call\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0minputs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 2662\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 2663\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mpy_any\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mis_tensor\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mx\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;32mfor\u001b[0m \u001b[0mx\u001b[0m \u001b[1;32min\u001b[0m \u001b[0minputs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
40 | "\u001b[1;32m~\\Anaconda3\\lib\\site-packages\\keras\\backend\\tensorflow_backend.py\u001b[0m in \u001b[0;36m_call\u001b[1;34m(self, inputs)\u001b[0m\n\u001b[0;32m 2629\u001b[0m \u001b[0msymbol_vals\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 2630\u001b[0m session)\n\u001b[1;32m-> 2631\u001b[1;33m \u001b[0mfetched\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_callable_fn\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m*\u001b[0m\u001b[0marray_vals\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 2632\u001b[0m \u001b[1;32mreturn\u001b[0m \u001b[0mfetched\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;33m:\u001b[0m\u001b[0mlen\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0moutputs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 2633\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
41 | "\u001b[1;32m~\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\client\\session.py\u001b[0m in \u001b[0;36m__call__\u001b[1;34m(self, *args)\u001b[0m\n\u001b[0;32m 1449\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_session\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_created_with_new_api\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1450\u001b[0m return tf_session.TF_SessionRunCallable(\n\u001b[1;32m-> 1451\u001b[1;33m self._session._session, self._handle, args, status, None)\n\u001b[0m\u001b[0;32m 1452\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1453\u001b[0m return tf_session.TF_DeprecatedSessionRunCallable(\n",
42 | "\u001b[1;31mKeyboardInterrupt\u001b[0m: "
43 | ]
44 | }
45 | ],
46 | "source": [
47 | "import keras\n",
48 | "from keras.datasets import mnist\n",
49 | "from keras.models import Sequential\n",
50 | "from keras.layers import Dense, Dropout, Flatten\n",
51 | "from keras.layers import Conv2D, MaxPooling2D\n",
52 | "import keras.backend.tensorflow_backend as kk\n",
53 | "from keras import backend as K\n",
54 | "'''\n",
55 | "cpu테스트\n",
56 | "정말 오래걸린다.\n",
57 | "'''\n",
58 | "batch_size = 128\n",
59 | "num_classes = 10\n",
60 | "epochs = 12\n",
61 | "\n",
62 | "# input image dimensions\n",
63 | "img_rows, img_cols = 28, 28\n",
64 | "\n",
65 | "# the data, split between train and test sets\n",
66 | "(x_train, y_train), (x_test, y_test) = mnist.load_data()\n",
67 | "\n",
68 | "if K.image_data_format() == 'channels_first':\n",
69 | " x_train = x_train.reshape(x_train.shape[0], 1, img_rows, img_cols)\n",
70 | " x_test = x_test.reshape(x_test.shape[0], 1, img_rows, img_cols)\n",
71 | " input_shape = (1, img_rows, img_cols)\n",
72 | "else:\n",
73 | " x_train = x_train.reshape(x_train.shape[0], img_rows, img_cols, 1)\n",
74 | " x_test = x_test.reshape(x_test.shape[0], img_rows, img_cols, 1)\n",
75 | " input_shape = (img_rows, img_cols, 1)\n",
76 | "\n",
77 | "x_train = x_train.astype('float32')\n",
78 | "x_test = x_test.astype('float32')\n",
79 | "x_train /= 255\n",
80 | "x_test /= 255\n",
81 | "print('x_train shape:', x_train.shape)\n",
82 | "print(x_train.shape[0], 'train samples')\n",
83 | "print(x_test.shape[0], 'test samples')\n",
84 | "\n",
85 | "# convert class vectors to binary class matrices\n",
86 | "y_train = keras.utils.to_categorical(y_train, num_classes)\n",
87 | "y_test = keras.utils.to_categorical(y_test, num_classes)\n",
88 | "\n",
89 | "import tensorflow as tf\n",
90 | "config = tf.ConfigProto()\n",
91 | "config.gpu_options.allow_growth = True\n",
92 | "session = tf.Session(config=config)\n",
93 | "\n",
94 | "\n",
95 | "with kk.tf_ops.device('/device:CPU:0'):\n",
96 | " model = Sequential()\n",
97 | " model.add(Conv2D(32, kernel_size=(3, 3),\n",
98 | " activation='relu',\n",
99 | " input_shape=input_shape))\n",
100 | " model.add(Conv2D(64, (3, 3), activation='relu'))\n",
101 | " model.add(MaxPooling2D(pool_size=(2, 2)))\n",
102 | " model.add(Dropout(0.25))\n",
103 | " model.add(Flatten())\n",
104 | " model.add(Dense(128, activation='relu'))\n",
105 | " model.add(Dropout(0.5))\n",
106 | " model.add(Dense(num_classes, activation='softmax'))\n",
107 | "\n",
108 | " model.compile(loss=keras.losses.categorical_crossentropy,\n",
109 | " optimizer=keras.optimizers.Adadelta(),\n",
110 | " metrics=['accuracy'])\n",
111 | "\n",
112 | " model.fit(x_train, y_train,\n",
113 | " batch_size=batch_size,\n",
114 | " epochs=epochs,\n",
115 | " verbose=1,\n",
116 | " validation_data=(x_test, y_test))\n",
117 | "score = model.evaluate(x_test, y_test, verbose=0)\n",
118 | "print('Test loss:', score[0])\n",
119 | "print('Test accuracy:', score[1])"
120 | ]
121 | },
122 | {
123 | "cell_type": "code",
124 | "execution_count": null,
125 | "metadata": {},
126 | "outputs": [],
127 | "source": []
128 | }
129 | ],
130 | "metadata": {
131 | "kernelspec": {
132 | "display_name": "Python 3",
133 | "language": "python",
134 | "name": "python3"
135 | },
136 | "language_info": {
137 | "codemirror_mode": {
138 | "name": "ipython",
139 | "version": 3
140 | },
141 | "file_extension": ".py",
142 | "mimetype": "text/x-python",
143 | "name": "python",
144 | "nbconvert_exporter": "python",
145 | "pygments_lexer": "ipython3",
146 | "version": "3.6.5"
147 | }
148 | },
149 | "nbformat": 4,
150 | "nbformat_minor": 2
151 | }
152 |
--------------------------------------------------------------------------------
/.ipynb_checkpoints/keras_gpu_test-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [
8 | {
9 | "name": "stderr",
10 | "output_type": "stream",
11 | "text": [
12 | "C:\\Users\\leesoojin\\Anaconda3\\lib\\site-packages\\h5py\\__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n",
13 | " from ._conv import register_converters as _register_converters\n",
14 | "Using TensorFlow backend.\n"
15 | ]
16 | },
17 | {
18 | "name": "stdout",
19 | "output_type": "stream",
20 | "text": [
21 | "x_train shape: (60000, 28, 28, 1)\n",
22 | "60000 train samples\n",
23 | "10000 test samples\n",
24 | "Train on 60000 samples, validate on 10000 samples\n",
25 | "Epoch 1/12\n",
26 | "60000/60000 [==============================] - 8s 132us/step - loss: 0.2549 - acc: 0.9224 - val_loss: 0.0562 - val_acc: 0.9813\n",
27 | "Epoch 2/12\n",
28 | "60000/60000 [==============================] - 6s 108us/step - loss: 0.0863 - acc: 0.9749 - val_loss: 0.0370 - val_acc: 0.9865\n",
29 | "Epoch 3/12\n",
30 | "60000/60000 [==============================] - 6s 108us/step - loss: 0.0637 - acc: 0.9811 - val_loss: 0.0361 - val_acc: 0.9881\n",
31 | "Epoch 4/12\n",
32 | "60000/60000 [==============================] - 6s 108us/step - loss: 0.0540 - acc: 0.9838 - val_loss: 0.0324 - val_acc: 0.9890\n",
33 | "Epoch 5/12\n",
34 | "60000/60000 [==============================] - 7s 108us/step - loss: 0.0449 - acc: 0.9862 - val_loss: 0.0368 - val_acc: 0.9885\n",
35 | "Epoch 6/12\n",
36 | "60000/60000 [==============================] - 6s 108us/step - loss: 0.0401 - acc: 0.9875 - val_loss: 0.0270 - val_acc: 0.9916\n",
37 | "Epoch 7/12\n",
38 | "60000/60000 [==============================] - 6s 106us/step - loss: 0.0367 - acc: 0.9889 - val_loss: 0.0284 - val_acc: 0.9906\n",
39 | "Epoch 8/12\n",
40 | "60000/60000 [==============================] - 6s 107us/step - loss: 0.0324 - acc: 0.9902 - val_loss: 0.0306 - val_acc: 0.9903\n",
41 | "Epoch 9/12\n",
42 | "60000/60000 [==============================] - 6s 107us/step - loss: 0.0301 - acc: 0.9904 - val_loss: 0.0273 - val_acc: 0.9915\n",
43 | "Epoch 10/12\n",
44 | "60000/60000 [==============================] - 6s 107us/step - loss: 0.0295 - acc: 0.9912 - val_loss: 0.0303 - val_acc: 0.9902\n",
45 | "Epoch 11/12\n",
46 | "60000/60000 [==============================] - 6s 107us/step - loss: 0.0272 - acc: 0.9916 - val_loss: 0.0289 - val_acc: 0.9917\n",
47 | "Epoch 12/12\n",
48 | "60000/60000 [==============================] - 6s 108us/step - loss: 0.0267 - acc: 0.9913 - val_loss: 0.0244 - val_acc: 0.9919\n",
49 | "Test loss: 0.02435819875159068\n",
50 | "Test accuracy: 0.9919\n"
51 | ]
52 | }
53 | ],
54 | "source": [
55 | "import keras\n",
56 | "from keras.datasets import mnist\n",
57 | "from keras.models import Sequential\n",
58 | "from keras.layers import Dense, Dropout, Flatten\n",
59 | "from keras.layers import Conv2D, MaxPooling2D\n",
60 | "import keras.backend.tensorflow_backend as kk\n",
61 | "from keras import backend as K\n",
62 | "'''\n",
63 | "gpu테스트\n",
64 | "한 에포크당 6초.\n",
65 | "\n",
66 | "'''\n",
67 | "batch_size = 128\n",
68 | "num_classes = 10\n",
69 | "epochs = 12\n",
70 | "\n",
71 | "# input image dimensions\n",
72 | "img_rows, img_cols = 28, 28\n",
73 | "\n",
74 | "# the data, split between train and test sets\n",
75 | "(x_train, y_train), (x_test, y_test) = mnist.load_data()\n",
76 | "\n",
77 | "if K.image_data_format() == 'channels_first':\n",
78 | " x_train = x_train.reshape(x_train.shape[0], 1, img_rows, img_cols)\n",
79 | " x_test = x_test.reshape(x_test.shape[0], 1, img_rows, img_cols)\n",
80 | " input_shape = (1, img_rows, img_cols)\n",
81 | "else:\n",
82 | " x_train = x_train.reshape(x_train.shape[0], img_rows, img_cols, 1)\n",
83 | " x_test = x_test.reshape(x_test.shape[0], img_rows, img_cols, 1)\n",
84 | " input_shape = (img_rows, img_cols, 1)\n",
85 | "\n",
86 | "x_train = x_train.astype('float32')\n",
87 | "x_test = x_test.astype('float32')\n",
88 | "x_train /= 255\n",
89 | "x_test /= 255\n",
90 | "print('x_train shape:', x_train.shape)\n",
91 | "print(x_train.shape[0], 'train samples')\n",
92 | "print(x_test.shape[0], 'test samples')\n",
93 | "\n",
94 | "# convert class vectors to binary class matrices\n",
95 | "y_train = keras.utils.to_categorical(y_train, num_classes)\n",
96 | "y_test = keras.utils.to_categorical(y_test, num_classes)\n",
97 | "\n",
98 | "import tensorflow as tf\n",
99 | "config = tf.ConfigProto()\n",
100 | "config.gpu_options.allow_growth = True\n",
101 | "session = tf.Session(config=config)\n",
102 | "\n",
103 | "\n",
104 | "with kk.tf_ops.device('/device:GPU:0'):\n",
105 | " model = Sequential()\n",
106 | " model.add(Conv2D(32, kernel_size=(3, 3),\n",
107 | " activation='relu',\n",
108 | " input_shape=input_shape))\n",
109 | " model.add(Conv2D(64, (3, 3), activation='relu'))\n",
110 | " model.add(MaxPooling2D(pool_size=(2, 2)))\n",
111 | " model.add(Dropout(0.25))\n",
112 | " model.add(Flatten())\n",
113 | " model.add(Dense(128, activation='relu'))\n",
114 | " model.add(Dropout(0.5))\n",
115 | " model.add(Dense(num_classes, activation='softmax'))\n",
116 | "\n",
117 | " model.compile(loss=keras.losses.categorical_crossentropy,\n",
118 | " optimizer=keras.optimizers.Adadelta(),\n",
119 | " metrics=['accuracy'])\n",
120 | "\n",
121 | " model.fit(x_train, y_train,\n",
122 | " batch_size=batch_size,\n",
123 | " epochs=epochs,\n",
124 | " verbose=1,\n",
125 | " validation_data=(x_test, y_test))\n",
126 | "score = model.evaluate(x_test, y_test, verbose=0)\n",
127 | "print('Test loss:', score[0])\n",
128 | "print('Test accuracy:', score[1])"
129 | ]
130 | },
131 | {
132 | "cell_type": "code",
133 | "execution_count": 2,
134 | "metadata": {},
135 | "outputs": [
136 | {
137 | "name": "stdout",
138 | "output_type": "stream",
139 | "text": [
140 | "x_train shape: (60000, 28, 28)\n",
141 | "60000 train samples\n",
142 | "10000 test samples\n"
143 | ]
144 | },
145 | {
146 | "ename": "ValueError",
147 | "evalue": "Error when checking input: expected conv2d_3_input to have 4 dimensions, but got array with shape (60000, 28, 28)",
148 | "output_type": "error",
149 | "traceback": [
150 | "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
151 | "\u001b[1;31mValueError\u001b[0m Traceback (most recent call last)",
152 | "\u001b[1;32m\u001b[0m in \u001b[0;36m\u001b[1;34m()\u001b[0m\n\u001b[0;32m 57\u001b[0m \u001b[0mepochs\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mepochs\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 58\u001b[0m \u001b[0mverbose\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;36m1\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 59\u001b[1;33m validation_data=(x_test, y_test))\n\u001b[0m\u001b[0;32m 60\u001b[0m \u001b[0mscore\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mmodel\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mevaluate\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mx_test\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0my_test\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mverbose\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;36m0\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 61\u001b[0m \u001b[0mprint\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m'Test loss:'\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mscore\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;36m0\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
153 | "\u001b[1;32m~\\Anaconda3\\lib\\site-packages\\keras\\engine\\training.py\u001b[0m in \u001b[0;36mfit\u001b[1;34m(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, **kwargs)\u001b[0m\n\u001b[0;32m 953\u001b[0m \u001b[0msample_weight\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0msample_weight\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 954\u001b[0m \u001b[0mclass_weight\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mclass_weight\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 955\u001b[1;33m batch_size=batch_size)\n\u001b[0m\u001b[0;32m 956\u001b[0m \u001b[1;31m# Prepare validation data.\u001b[0m\u001b[1;33m\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 957\u001b[0m \u001b[0mdo_validation\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;32mFalse\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
154 | "\u001b[1;32m~\\Anaconda3\\lib\\site-packages\\keras\\engine\\training.py\u001b[0m in \u001b[0;36m_standardize_user_data\u001b[1;34m(self, x, y, sample_weight, class_weight, check_array_lengths, batch_size)\u001b[0m\n\u001b[0;32m 752\u001b[0m \u001b[0mfeed_input_shapes\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 753\u001b[0m \u001b[0mcheck_batch_axis\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;32mFalse\u001b[0m\u001b[1;33m,\u001b[0m \u001b[1;31m# Don't enforce the batch size.\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 754\u001b[1;33m exception_prefix='input')\n\u001b[0m\u001b[0;32m 755\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 756\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0my\u001b[0m \u001b[1;32mis\u001b[0m \u001b[1;32mnot\u001b[0m \u001b[1;32mNone\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
155 | "\u001b[1;32m~\\Anaconda3\\lib\\site-packages\\keras\\engine\\training_utils.py\u001b[0m in \u001b[0;36mstandardize_input_data\u001b[1;34m(data, names, shapes, check_batch_axis, exception_prefix)\u001b[0m\n\u001b[0;32m 124\u001b[0m \u001b[1;34m': expected '\u001b[0m \u001b[1;33m+\u001b[0m \u001b[0mnames\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mi\u001b[0m\u001b[1;33m]\u001b[0m \u001b[1;33m+\u001b[0m \u001b[1;34m' to have '\u001b[0m \u001b[1;33m+\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 125\u001b[0m \u001b[0mstr\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mlen\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mshape\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;33m+\u001b[0m \u001b[1;34m' dimensions, but got array '\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 126\u001b[1;33m 'with shape ' + str(data_shape))\n\u001b[0m\u001b[0;32m 127\u001b[0m \u001b[1;32mif\u001b[0m \u001b[1;32mnot\u001b[0m \u001b[0mcheck_batch_axis\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 128\u001b[0m \u001b[0mdata_shape\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mdata_shape\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;36m1\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
156 | "\u001b[1;31mValueError\u001b[0m: Error when checking input: expected conv2d_3_input to have 4 dimensions, but got array with shape (60000, 28, 28)"
157 | ]
158 | }
159 | ],
160 | "source": [
161 | "import keras\n",
162 | "from keras.datasets import mnist\n",
163 | "from keras.models import Sequential\n",
164 | "from keras.layers import Dense, Dropout, Flatten\n",
165 | "from keras.layers import Conv2D, MaxPooling2D\n",
166 | "import keras.backend.tensorflow_backend as kk\n",
167 | "from keras import backend as K\n",
168 | "'''\n",
169 | "gpu테스트\n",
170 | "한 에포크당 6초.\n",
171 | "\n",
172 | "'''\n",
173 | "batch_size = 128\n",
174 | "num_classes = 10\n",
175 | "epochs = 12\n",
176 | "\n",
177 | "# input image dimensions\n",
178 | "img_rows, img_cols = 28, 28\n",
179 | "\n",
180 | "# the data, split between train and test sets\n",
181 | "(x_train, y_train), (x_test, y_test) = mnist.load_data()\n",
182 | "\n",
183 | "if K.image_data_format() == 'channels_first':\n",
184 | " x_train = x_train.reshape(x_train.shape[0], 1, img_rows, img_cols)\n",
185 | " x_test = x_test.reshape(x_test.shape[0], 1, img_rows, img_cols)\n",
186 | " input_shape = (1, img_rows, img_cols)\n",
187 | "else:\n",
188 | " x_train = x_train.reshape(x_train.shape[0], img_rows, img_cols, 1)\n",
189 | " x_test = x_test.reshape(x_test.shape[0], img_rows, img_cols, 1)\n",
190 | " input_shape = (img_rows, img_cols, 1)\n",
191 | "\n",
192 | "x_train = x_train.astype('float32')\n",
193 | "x_test = x_test.astype('float32')\n",
194 | "x_train /= 255\n",
195 | "x_test /= 255\n",
196 | "print('x_train shape:', x_train.shape)\n",
197 | "print(x_train.shape[0], 'train samples')\n",
198 | "print(x_test.shape[0], 'test samples')\n",
199 | "\n",
200 | "# convert class vectors to binary class matrices\n",
201 | "y_train = keras.utils.to_categorical(y_train, num_classes)\n",
202 | "y_test = keras.utils.to_categorical(y_test, num_classes)\n",
203 | "\n",
204 | "\n",
205 | "\n",
206 | "with kk.tf_ops.device('/device:GPU:0'):\n",
207 | " model = Sequential()\n",
208 | " model.add(Conv2D(32, kernel_size=(3, 3),\n",
209 | " activation='relu',\n",
210 | " input_shape=input_shape))\n",
211 | " model.add(Conv2D(64, (3, 3), activation='relu'))\n",
212 | " model.add(MaxPooling2D(pool_size=(2, 2)))\n",
213 | " model.add(Dropout(0.25))\n",
214 | " model.add(Flatten())\n",
215 | " model.add(Dense(128, activation='relu'))\n",
216 | " model.add(Dropout(0.5))\n",
217 | " model.add(Dense(num_classes, activation='softmax'))\n",
218 | "\n",
219 | " model.compile(loss=keras.losses.categorical_crossentropy,\n",
220 | " optimizer=keras.optimizers.Adadelta(),\n",
221 | " metrics=['accuracy'])\n",
222 | "\n",
223 | " model.fit(x_train, y_train,\n",
224 | " batch_size=batch_size,\n",
225 | " epochs=epochs,\n",
226 | " verbose=1,\n",
227 | " validation_data=(x_test, y_test))\n",
228 | "score = model.evaluate(x_test, y_test, verbose=0)\n",
229 | "print('Test loss:', score[0])\n",
230 | "print('Test accuracy:', score[1])"
231 | ]
232 | },
233 | {
234 | "cell_type": "code",
235 | "execution_count": null,
236 | "metadata": {},
237 | "outputs": [],
238 | "source": []
239 | }
240 | ],
241 | "metadata": {
242 | "kernelspec": {
243 | "display_name": "Python 3",
244 | "language": "python",
245 | "name": "python3"
246 | },
247 | "language_info": {
248 | "codemirror_mode": {
249 | "name": "ipython",
250 | "version": 3
251 | },
252 | "file_extension": ".py",
253 | "mimetype": "text/x-python",
254 | "name": "python",
255 | "nbconvert_exporter": "python",
256 | "pygments_lexer": "ipython3",
257 | "version": "3.6.5"
258 | }
259 | },
260 | "nbformat": 4,
261 | "nbformat_minor": 2
262 | }
263 |
--------------------------------------------------------------------------------
/10. predict_korea_movie_review_classification.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# 리뷰 데이터 분류\n",
8 | "\n",
9 | "과정은 아래와 같다.\n",
10 | "\n",
11 | "1. 데이터는 제공\n",
12 | "\n",
13 | "2. 형태소 분석(mecab ver)\n",
14 | "\n",
15 | " 2-1. train set에 있는 단어로 사용자 사전 구축\n",
16 | " \n",
17 | " 2-2. 형태소 분석을 한 데이터를 이용(형태소 분석 mecab이 vmware상에 존재해서 속도 측면 때문에 이렇게 진행)\n",
18 | " \n",
19 | "\n",
20 | "3. 모델링\n",
21 | "\n",
22 | "4. 훈련\n",
23 | "\n",
24 | "5. 평가\n",
25 | "\n",
26 | "6. 번외편(정확성이 아닌 다른 방법으로 테스트 해본 것)\n",
27 | "\n",
28 | " 6-1. 머신러닝\n",
29 | " \n",
30 | " 6-2. 카카오 형태소(khaiii)를 이용한 분석(개인 블로그에 설치 및 리뷰 올려놓았다. 해당 편 커널에 주소가 있다.)\n",
31 | " \n",
32 | " \n",
33 | "
\n",
34 | "\n",
35 | "먼저 위와 같이 사용자 사전을 구축한다.\n",
36 | "\n",
37 | "
\n",
38 | "
\n",
39 | "
\n",
40 | "그리고 위 처럼 형태소 분석을 진행하는 파이썬 프로그램을 구현\n",
41 | "\n",
42 | "
\n",
43 | "\n",
44 | "완료.\n",
45 | "\n",
46 | "
\n",
47 | "\n",
48 | "형태소 분석이 완료된 데이터\n",
49 | "\n",
50 | "\n",
51 | "이제 위와 같이 완료된 데이터로 진행한다."
52 | ]
53 | },
54 | {
55 | "cell_type": "code",
56 | "execution_count": 1,
57 | "metadata": {},
58 | "outputs": [
59 | {
60 | "name": "stderr",
61 | "output_type": "stream",
62 | "text": [
63 | "C:\\Users\\leesoojin\\Anaconda3\\lib\\site-packages\\h5py\\__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n",
64 | " from ._conv import register_converters as _register_converters\n",
65 | "Using TensorFlow backend.\n"
66 | ]
67 | }
68 | ],
69 | "source": [
70 | "import os, json, glob, sys, numpy as np\n",
71 | "import pandas as pd\n",
72 | "import matplotlib.pyplot as plt\n",
73 | "import matplotlib as mpl\n",
74 | "import keras.backend.tensorflow_backend as K\n",
75 | "import tensorflow as tf\n",
76 | "from keras.preprocessing.text import Tokenizer\n",
77 | "from keras.preprocessing.sequence import pad_sequences\n",
78 | "from keras.preprocessing import sequence\n",
79 | "from keras.models import Sequential, Model\n",
80 | "from keras.layers import Dense, Embedding, LSTM, Flatten, Dropout, Input, Conv1D, MaxPooling1D, Activation, GlobalMaxPooling1D, GlobalMaxPool1D\n",
81 | "from keras.utils import np_utils\n",
82 | "from keras.callbacks import EarlyStopping, ModelCheckpoint\n",
83 | "from sklearn.model_selection import train_test_split\n",
84 | "from keras.layers.normalization import BatchNormalization\n",
85 | "from keras.layers.wrappers import Bidirectional\n",
86 | "config = tf.ConfigProto()\n",
87 | "config.gpu_options.allow_growth = True\n",
88 | "session = tf.Session(config=config)"
89 | ]
90 | },
91 | {
92 | "cell_type": "markdown",
93 | "metadata": {},
94 | "source": [
95 | "헤더가 없으니 헤더를 None으로 해서 가지고 온다.\n"
96 | ]
97 | },
98 | {
99 | "cell_type": "code",
100 | "execution_count": 2,
101 | "metadata": {},
102 | "outputs": [],
103 | "source": [
104 | "data = pd.read_csv('./data_after_tokenizer/ratings_train_after_prepro.txt', header=None)\n",
105 | "data_test = pd.read_csv('./data_after_tokenizer/ratings_test_after_prepro.txt', header=None)"
106 | ]
107 | },
108 | {
109 | "cell_type": "markdown",
110 | "metadata": {},
111 | "source": [
112 | "train에 15만개, test에 5만개 데이터 셋 확인"
113 | ]
114 | },
115 | {
116 | "cell_type": "code",
117 | "execution_count": 3,
118 | "metadata": {},
119 | "outputs": [
120 | {
121 | "name": "stdout",
122 | "output_type": "stream",
123 | "text": [
124 | "150000\n",
125 | "(150000, 3)\n",
126 | "(50000, 3)\n"
127 | ]
128 | }
129 | ],
130 | "source": [
131 | "print(len(data.iloc[:, 0]))\n",
132 | "print(data.shape)\n",
133 | "print(data_test.shape)"
134 | ]
135 | },
136 | {
137 | "cell_type": "markdown",
138 | "metadata": {},
139 | "source": [
140 | "뭐 굳이 섞어줄 필요는 없지만 그냥 섞어준다."
141 | ]
142 | },
143 | {
144 | "cell_type": "code",
145 | "execution_count": 4,
146 | "metadata": {},
147 | "outputs": [
148 | {
149 | "name": "stdout",
150 | "output_type": "stream",
151 | "text": [
152 | "0 0\n",
153 | "1 1\n",
154 | "2 0\n",
155 | "3 1\n",
156 | "4 1\n",
157 | "5 1\n",
158 | "6 1\n",
159 | "7 0\n",
160 | "8 0\n",
161 | "9 1\n",
162 | "Name: 2, dtype: int64\n"
163 | ]
164 | }
165 | ],
166 | "source": [
167 | "df = data.sample(frac=1).reset_index(drop=True)\n",
168 | "print(df.iloc[0:10,2])"
169 | ]
170 | },
171 | {
172 | "cell_type": "markdown",
173 | "metadata": {},
174 | "source": [
175 | "또한, 평가 데이터 중 평가 글이 없는 데이터가 있으므로 제거."
176 | ]
177 | },
178 | {
179 | "cell_type": "code",
180 | "execution_count": 5,
181 | "metadata": {},
182 | "outputs": [],
183 | "source": [
184 | "df = df.dropna(how='any')\n",
185 | "data_test = data_test.dropna(how='any')"
186 | ]
187 | },
188 | {
189 | "cell_type": "markdown",
190 | "metadata": {},
191 | "source": [
192 | "train, test로 나눈다."
193 | ]
194 | },
195 | {
196 | "cell_type": "code",
197 | "execution_count": 6,
198 | "metadata": {},
199 | "outputs": [],
200 | "source": [
201 | "X_train = df.iloc[:, 1].values\n",
202 | "y_train = df.iloc[:, 2].values\n",
203 | "X_test = data_test.iloc[:, 1].values\n",
204 | "y_test = data_test.iloc[:, 2].values\n"
205 | ]
206 | },
207 | {
208 | "cell_type": "code",
209 | "execution_count": 7,
210 | "metadata": {},
211 | "outputs": [
212 | {
213 | "name": "stdout",
214 | "output_type": "stream",
215 | "text": [
216 | "['잔잔 흐름 속 피식피식 ' '아주 재밌 고요 저 만화 봐요 ' '시시 전개 시시 결말 촌 전 영화 '\n",
217 | " '영화 최고 한 번 같 다 다른 세 네 번 최고 배우 ' '쉴새없이 몰아치 는 액션 눈 즐겁 다 년 전 영화 라 믿 어려울 정도 ']\n",
218 | "['굳 크 ' '뭐 이 평점 .. .. 나쁘 않 지만 10점 더더욱 잖아 '\n",
219 | " '지루 지 않 은데 완전 막장 돈 주 고 보 .. .. '\n",
220 | " '었 어도 별 개 줬 을 텐데 .. 왜 나와서 심기 불편 게 하 죠 ? ? ' '음악 주가 된 최고 음악 영화 ']\n"
221 | ]
222 | }
223 | ],
224 | "source": [
225 | "print(X_train[:5])\n",
226 | "print(X_test[:5])\n"
227 | ]
228 | },
229 | {
230 | "cell_type": "markdown",
231 | "metadata": {},
232 | "source": [
233 | "**원래 본인이 하던 스타일은 상위 10%~20% 단어만 사용하는데, 네티즌들이 사용하는 단어는 너무 다양해서 최대한 많은 단어를 사용**\n",
234 | "\n",
235 | "그리고 최대 길이가 37이었나? 그래서 길이는 적당히 35로 지정.\n",
236 | "\n",
237 | "그리고 Tokenizer를 통해 단어를 토큰화"
238 | ]
239 | },
240 | {
241 | "cell_type": "code",
242 | "execution_count": 8,
243 | "metadata": {},
244 | "outputs": [
245 | {
246 | "name": "stdout",
247 | "output_type": "stream",
248 | "text": [
249 | "42251\n"
250 | ]
251 | }
252 | ],
253 | "source": [
254 | "max_word = 35000\n",
255 | "max_len = 35\n",
256 | "\n",
257 | "tok = Tokenizer(num_words = max_word)\n",
258 | "tok.fit_on_texts(X_train)\n",
259 | "print(len(tok.word_index))\n",
260 | "#print(tok.word_index)"
261 | ]
262 | },
263 | {
264 | "cell_type": "markdown",
265 | "metadata": {},
266 | "source": [
267 | "텍스트 -> 숫자 변경"
268 | ]
269 | },
270 | {
271 | "cell_type": "code",
272 | "execution_count": 9,
273 | "metadata": {},
274 | "outputs": [
275 | {
276 | "name": "stdout",
277 | "output_type": "stream",
278 | "text": [
279 | "4\n",
280 | "[278, 1095, 160, 9804]\n"
281 | ]
282 | }
283 | ],
284 | "source": [
285 | "sequences = tok.texts_to_sequences(X_train)\n",
286 | "print(len(sequences[0]))\n",
287 | "print(sequences[0])"
288 | ]
289 | },
290 | {
291 | "cell_type": "markdown",
292 | "metadata": {},
293 | "source": [
294 | "시퀀스 메트릭스로 변경"
295 | ]
296 | },
297 | {
298 | "cell_type": "code",
299 | "execution_count": 10,
300 | "metadata": {},
301 | "outputs": [
302 | {
303 | "name": "stdout",
304 | "output_type": "stream",
305 | "text": [
306 | "148915\n",
307 | "[ 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n",
308 | " 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n",
309 | " 0 0 0 278 1095 160 9804]\n"
310 | ]
311 | }
312 | ],
313 | "source": [
314 | "sequences_matrix = sequence.pad_sequences(sequences, maxlen=max_len)\n",
315 | "print(len(sequences_matrix))\n",
316 | "print(sequences_matrix[0])"
317 | ]
318 | },
319 | {
320 | "cell_type": "code",
321 | "execution_count": 11,
322 | "metadata": {},
323 | "outputs": [],
324 | "source": [
325 | "sequences_test = tok.texts_to_sequences(X_test)\n",
326 | "sequences_test_matrix = sequence.pad_sequences(sequences_test, maxlen=max_len)"
327 | ]
328 | },
329 | {
330 | "cell_type": "markdown",
331 | "metadata": {},
332 | "source": []
333 | },
334 | {
335 | "cell_type": "markdown",
336 | "metadata": {},
337 | "source": [
338 | "# 모델 구축"
339 | ]
340 | },
341 | {
342 | "cell_type": "code",
343 | "execution_count": 12,
344 | "metadata": {},
345 | "outputs": [],
346 | "source": [
347 | "with K.tf_ops.device('device:GPU:0'):\n",
348 | " model = Sequential()\n",
349 | " #max_words를 50차원에. 즉 20000개의 단어를 50차원에다가. 문장의 길이는 max_len\n",
350 | " model.add(Embedding(max_word, 50, input_length=max_len))\n",
351 | " model.add(LSTM(64))\n",
352 | " model.add(Dense(256, activation='relu'))\n",
353 | " model.add(Dropout(0.5))\n",
354 | " model.add(Dense(1, activation='sigmoid'))\n",
355 | " \n",
356 | " model.compile(loss='binary_crossentropy',optimizer='adam',metrics=['accuracy'])\n",
357 | " model_dir = './model'\n",
358 | " if not os.path.exists(model_dir):\n",
359 | " os.mkdir(model_dir)\n",
360 | " model_path = model_dir + \"/review_lstm_soojin.model\"\n",
361 | " \n",
362 | " checkpoint = ModelCheckpoint(filepath=model_path, monitor='val_loss', verbose=1, save_best_only=True)\n",
363 | " early_stopping = EarlyStopping(monitor='val_loss', patience=3)"
364 | ]
365 | },
366 | {
367 | "cell_type": "code",
368 | "execution_count": 13,
369 | "metadata": {},
370 | "outputs": [
371 | {
372 | "name": "stdout",
373 | "output_type": "stream",
374 | "text": [
375 | "_________________________________________________________________\n",
376 | "Layer (type) Output Shape Param # \n",
377 | "=================================================================\n",
378 | "embedding_1 (Embedding) (None, 35, 50) 1750000 \n",
379 | "_________________________________________________________________\n",
380 | "lstm_1 (LSTM) (None, 64) 29440 \n",
381 | "_________________________________________________________________\n",
382 | "dense_1 (Dense) (None, 256) 16640 \n",
383 | "_________________________________________________________________\n",
384 | "dropout_1 (Dropout) (None, 256) 0 \n",
385 | "_________________________________________________________________\n",
386 | "dense_2 (Dense) (None, 1) 257 \n",
387 | "=================================================================\n",
388 | "Total params: 1,796,337\n",
389 | "Trainable params: 1,796,337\n",
390 | "Non-trainable params: 0\n",
391 | "_________________________________________________________________\n"
392 | ]
393 | }
394 | ],
395 | "source": [
396 | "model.summary()"
397 | ]
398 | },
399 | {
400 | "cell_type": "markdown",
401 | "metadata": {},
402 | "source": [
403 | "** 훈련 **"
404 | ]
405 | },
406 | {
407 | "cell_type": "code",
408 | "execution_count": 14,
409 | "metadata": {},
410 | "outputs": [
411 | {
412 | "name": "stdout",
413 | "output_type": "stream",
414 | "text": [
415 | "Train on 119132 samples, validate on 29783 samples\n",
416 | "Epoch 1/15\n",
417 | "119132/119132 [==============================] - 61s 509us/step - loss: 0.3904 - acc: 0.8225 - val_loss: 0.3387 - val_acc: 0.8537\n",
418 | "\n",
419 | "Epoch 00001: val_loss improved from inf to 0.33870, saving model to ./model/review_lstm_soojin.model\n",
420 | "Epoch 2/15\n",
421 | "119132/119132 [==============================] - 58s 489us/step - loss: 0.2915 - acc: 0.8775 - val_loss: 0.3407 - val_acc: 0.8548\n",
422 | "\n",
423 | "Epoch 00002: val_loss did not improve from 0.33870\n",
424 | "Epoch 3/15\n",
425 | "119132/119132 [==============================] - 58s 491us/step - loss: 0.2368 - acc: 0.9021 - val_loss: 0.3784 - val_acc: 0.8535\n",
426 | "\n",
427 | "Epoch 00003: val_loss did not improve from 0.33870\n",
428 | "Epoch 4/15\n",
429 | "119132/119132 [==============================] - 58s 489us/step - loss: 0.1957 - acc: 0.9209 - val_loss: 0.3961 - val_acc: 0.8499\n",
430 | "\n",
431 | "Epoch 00004: val_loss did not improve from 0.33870\n"
432 | ]
433 | }
434 | ],
435 | "source": [
436 | "hist = model.fit(sequences_matrix, y_train, batch_size=128, epochs=15, validation_split=0.2, callbacks=[checkpoint, early_stopping])"
437 | ]
438 | },
439 | {
440 | "cell_type": "markdown",
441 | "metadata": {},
442 | "source": [
443 | "** 정확도 **"
444 | ]
445 | },
446 | {
447 | "cell_type": "code",
448 | "execution_count": 15,
449 | "metadata": {},
450 | "outputs": [
451 | {
452 | "name": "stdout",
453 | "output_type": "stream",
454 | "text": [
455 | "49631/49631 [==============================] - 36s 716us/step\n",
456 | "정확도 : 0.8460\n"
457 | ]
458 | }
459 | ],
460 | "source": [
461 | "print(\"정확도 : %.4f\" % (model.evaluate(sequences_test_matrix, y_test)[1]))"
462 | ]
463 | },
464 | {
465 | "cell_type": "markdown",
466 | "metadata": {},
467 | "source": [
468 | "# 번외편\n",
469 | "\n",
470 | "** 사이킷런 및 카카오 형태소 분석기로 했을 때의 성능 비교 **\n",
471 | "\n",
472 | "** 카카오 형태소 분석기는 본인 블로그에 설치 방법 및 리뷰를 등록해놓았다. 참고**\n",
473 | "\n",
474 | "**설치 및 간단 리뷰**\n",
475 | "https://lsjsj92.tistory.com/408\n",
476 | "\n",
477 | "**모델 비교 및 성능 평가**\n",
478 | "https://lsjsj92.tistory.com/410\n"
479 | ]
480 | },
481 | {
482 | "cell_type": "markdown",
483 | "metadata": {},
484 | "source": [
485 | "# 사이킷런\n",
486 | "\n",
487 | "TfidfVectorizer와 logisticregression 이용."
488 | ]
489 | },
490 | {
491 | "cell_type": "code",
492 | "execution_count": 16,
493 | "metadata": {},
494 | "outputs": [],
495 | "source": [
496 | "from sklearn.feature_extraction.text import TfidfVectorizer\n",
497 | "from sklearn.model_selection import train_test_split, GridSearchCV, cross_val_score\n",
498 | "from sklearn.linear_model import LogisticRegression, LinearRegression, Ridge\n",
499 | "from sklearn.pipeline import make_pipeline"
500 | ]
501 | },
502 | {
503 | "cell_type": "code",
504 | "execution_count": null,
505 | "metadata": {},
506 | "outputs": [],
507 | "source": []
508 | },
509 | {
510 | "cell_type": "code",
511 | "execution_count": 17,
512 | "metadata": {},
513 | "outputs": [
514 | {
515 | "name": "stdout",
516 | "output_type": "stream",
517 | "text": [
518 | "최상의 교차 검증 점수 : 0.82\n"
519 | ]
520 | }
521 | ],
522 | "source": [
523 | "pipe = make_pipeline(TfidfVectorizer(min_df=5), LogisticRegression())\n",
524 | "param_grid = {\n",
525 | " 'logisticregression__C' : [0.001, 0.01, 0.1, 1, 10]\n",
526 | "}\n",
527 | "\n",
528 | "grid = GridSearchCV(pipe, param_grid, cv=5)\n",
529 | "grid.fit(X_train, y_train)\n",
530 | "print(\"최상의 교차 검증 점수 : %.2f\" %(grid.best_score_))"
531 | ]
532 | },
533 | {
534 | "cell_type": "code",
535 | "execution_count": 18,
536 | "metadata": {},
537 | "outputs": [
538 | {
539 | "name": "stdout",
540 | "output_type": "stream",
541 | "text": [
542 | " 테스트 점수 : 0.82\n"
543 | ]
544 | }
545 | ],
546 | "source": [
547 | "print(\" 테스트 점수 : %.2f\" %(grid.score(X_test, y_test)))"
548 | ]
549 | },
550 | {
551 | "cell_type": "code",
552 | "execution_count": 19,
553 | "metadata": {},
554 | "outputs": [],
555 | "source": [
556 | "vector = grid.best_estimator_.named_steps[\"tfidfvectorizer\"]"
557 | ]
558 | },
559 | {
560 | "cell_type": "code",
561 | "execution_count": 20,
562 | "metadata": {},
563 | "outputs": [],
564 | "source": [
565 | "indices = np.argsort(vector.idf_)[::-1]\n",
566 | "features = vector.get_feature_names()\n",
567 | "top_n = 2\n",
568 | "top_features = [features[i] for i in indices[:top_n]]"
569 | ]
570 | },
571 | {
572 | "cell_type": "code",
573 | "execution_count": 21,
574 | "metadata": {},
575 | "outputs": [
576 | {
577 | "name": "stdout",
578 | "output_type": "stream",
579 | "text": [
580 | "tfidf가 가장 높은 특성 100 개 : \n",
581 | " ['승부' '승리' '승려' '습니당' '습니다' '습격' '슬플' '슬프' '슬펐' '캐리' '슬퍼요' '슬퍼' '캐릭터'\n",
582 | " '스피드' '스포일러' '스페인' '시간대' '스릴러' '친절' '친일파' '씨발' '씁쓸' '쓴다' '쓰레기통' '충무로'\n",
583 | " '충분' '충분히' '씨엔블루' '쓰레기' '쓰래기' '쏘우' '쏘리' '썸머' '썰전' '썰렁' '취미' '쓰레' '아가'\n",
584 | " '아기자기' '아까' '아래' '아라' '아들' '아드레날린' '아동' '아담' '아니하' '아니야' '아니' '아놔' '아나'\n",
585 | " '아깝' '출연료' '아까웠' '출연진' '아까워' '아까운' '써요' '신데렐라' '쌍둥이' '싸이코' '실질' '실은' '실수'\n",
586 | " '실사' '실망' '치즈' '치킨' '실패' '치히로' '친구' '신하균' '친다' '신파' '신선' '신부' '신발' '칙칙'\n",
587 | " '실패작' '실화' '심각' '싸이' '취한다' '싸움' '싸우' '취향' '싸구려' '싱크로' '싱겁' '심형래' '심했'\n",
588 | " '심한데' '심하' '심장' '심심풀이' '치밀' '심심' '심리' '취지' '0개']\n"
589 | ]
590 | }
591 | ],
592 | "source": [
593 | "X_train_tf = vector.transform(X_train)\n",
594 | "feature_names = np.array(vector.get_feature_names())\n",
595 | "max_value = X_train_tf.max(axis=0).toarray().ravel()\n",
596 | "sorted_by_tfidf = max_value.argsort()\n",
597 | "print(\"tfidf가 가장 높은 특성 100 개 : \\n \", feature_names[sorted_by_tfidf[-100:]])"
598 | ]
599 | },
600 | {
601 | "cell_type": "code",
602 | "execution_count": 22,
603 | "metadata": {},
604 | "outputs": [
605 | {
606 | "name": "stdout",
607 | "output_type": "stream",
608 | "text": [
609 | "LogisticRegression(C=1, class_weight=None, dual=False, fit_intercept=True,\n",
610 | " intercept_scaling=1, max_iter=100, multi_class='ovr', n_jobs=1,\n",
611 | " penalty='l2', random_state=None, solver='liblinear', tol=0.0001,\n",
612 | " verbose=0, warm_start=False)\n"
613 | ]
614 | }
615 | ],
616 | "source": [
617 | "print(grid.best_estimator_.named_steps[\"logisticregression\"])"
618 | ]
619 | },
620 | {
621 | "cell_type": "code",
622 | "execution_count": null,
623 | "metadata": {},
624 | "outputs": [],
625 | "source": []
626 | },
627 | {
628 | "cell_type": "markdown",
629 | "metadata": {},
630 | "source": [
631 | "# 카카오 형태소 분석기 기준\n",
632 | "\n",
633 | "성능이 더 떨어짐. 여러가지 이유가 있겠지만 사용자 정의 사전 미구축 및 은어 등의 단어를 인식을 못하는 문제일 가능성이 높다."
634 | ]
635 | },
636 | {
637 | "cell_type": "code",
638 | "execution_count": 23,
639 | "metadata": {},
640 | "outputs": [],
641 | "source": [
642 | "data = pd.read_csv('./data_after_tokenizer_kakao/ratings_train_after_prepro.txt', header=None)\n",
643 | "data_test = pd.read_csv('./data_after_tokenizer_kakao/ratings_test_after_prepro.txt', header=None)"
644 | ]
645 | },
646 | {
647 | "cell_type": "code",
648 | "execution_count": 24,
649 | "metadata": {},
650 | "outputs": [
651 | {
652 | "name": "stdout",
653 | "output_type": "stream",
654 | "text": [
655 | "0 0\n",
656 | "1 0\n",
657 | "2 0\n",
658 | "3 1\n",
659 | "4 0\n",
660 | "5 1\n",
661 | "6 0\n",
662 | "7 1\n",
663 | "8 1\n",
664 | "9 0\n",
665 | "Name: 2, dtype: int64\n"
666 | ]
667 | }
668 | ],
669 | "source": [
670 | "df = data.sample(frac=1).reset_index(drop=True)\n",
671 | "print(df.iloc[0:10,2])"
672 | ]
673 | },
674 | {
675 | "cell_type": "code",
676 | "execution_count": 25,
677 | "metadata": {},
678 | "outputs": [],
679 | "source": [
680 | "df = df.dropna(how='any')\n",
681 | "data_test = data_test.dropna(how='any')"
682 | ]
683 | },
684 | {
685 | "cell_type": "code",
686 | "execution_count": 26,
687 | "metadata": {},
688 | "outputs": [],
689 | "source": [
690 | "X_train = df.iloc[:, 1].values\n",
691 | "y_train = df.iloc[:, 2].values\n",
692 | "X_test = data_test.iloc[:, 1].values\n",
693 | "y_test = data_test.iloc[:, 2].values"
694 | ]
695 | },
696 | {
697 | "cell_type": "code",
698 | "execution_count": 27,
699 | "metadata": {},
700 | "outputs": [
701 | {
702 | "name": "stdout",
703 | "output_type": "stream",
704 | "text": [
705 | "34388\n"
706 | ]
707 | }
708 | ],
709 | "source": [
710 | "max_word = 32000\n",
711 | "max_len = 35\n",
712 | "\n",
713 | "tok = Tokenizer(num_words = max_word)\n",
714 | "tok.fit_on_texts(X_train)\n",
715 | "print(len(tok.word_index))\n",
716 | "#print(tok.word_index)"
717 | ]
718 | },
719 | {
720 | "cell_type": "code",
721 | "execution_count": 28,
722 | "metadata": {},
723 | "outputs": [],
724 | "source": [
725 | "sequences = tok.texts_to_sequences(X_train)\n",
726 | "sequences_matrix = sequence.pad_sequences(sequences, maxlen=max_len)"
727 | ]
728 | },
729 | {
730 | "cell_type": "code",
731 | "execution_count": 29,
732 | "metadata": {},
733 | "outputs": [],
734 | "source": [
735 | "sequences_test = tok.texts_to_sequences(X_test)\n",
736 | "sequences_test_matrix = sequence.pad_sequences(sequences_test, maxlen=max_len)"
737 | ]
738 | },
739 | {
740 | "cell_type": "code",
741 | "execution_count": 30,
742 | "metadata": {},
743 | "outputs": [],
744 | "source": [
745 | "with K.tf_ops.device('device:GPU:0'):\n",
746 | " model = Sequential()\n",
747 | " #max_words를 50차원에. 즉 20000개의 단어를 50차원에다가. 문장의 길이는 max_len\n",
748 | " model.add(Embedding(max_word, 50, input_length=max_len))\n",
749 | " model.add(LSTM(64))\n",
750 | " model.add(Dense(256, activation='relu'))\n",
751 | " model.add(Dropout(0.5))\n",
752 | " model.add(Dense(1, activation='sigmoid'))\n",
753 | " \n",
754 | " model.compile(loss='binary_crossentropy',optimizer='adam',metrics=['accuracy'])\n",
755 | " model_dir = './model'\n",
756 | " if not os.path.exists(model_dir):\n",
757 | " os.mkdir(model_dir)\n",
758 | " model_path = model_dir + \"/review_lstm_soojin.model\"\n",
759 | " \n",
760 | " checkpoint = ModelCheckpoint(filepath=model_path, monitor='val_loss', verbose=1, save_best_only=True)\n",
761 | " early_stopping = EarlyStopping(monitor='val_loss', patience=3)"
762 | ]
763 | },
764 | {
765 | "cell_type": "code",
766 | "execution_count": 31,
767 | "metadata": {},
768 | "outputs": [
769 | {
770 | "name": "stdout",
771 | "output_type": "stream",
772 | "text": [
773 | "Train on 119996 samples, validate on 29999 samples\n",
774 | "Epoch 1/15\n",
775 | "119996/119996 [==============================] - 61s 505us/step - loss: 0.5612 - acc: 0.6961 - val_loss: 0.5305 - val_acc: 0.7175\n",
776 | "\n",
777 | "Epoch 00001: val_loss improved from inf to 0.53049, saving model to ./model/review_lstm_soojin.model\n",
778 | "Epoch 2/15\n",
779 | "119996/119996 [==============================] - 59s 489us/step - loss: 0.4865 - acc: 0.7620 - val_loss: 0.5324 - val_acc: 0.7183\n",
780 | "\n",
781 | "Epoch 00002: val_loss did not improve from 0.53049\n",
782 | "Epoch 3/15\n",
783 | "119996/119996 [==============================] - 58s 484us/step - loss: 0.4489 - acc: 0.7785 - val_loss: 0.5504 - val_acc: 0.7172\n",
784 | "\n",
785 | "Epoch 00003: val_loss did not improve from 0.53049\n",
786 | "Epoch 4/15\n",
787 | "119996/119996 [==============================] - 58s 486us/step - loss: 0.4270 - acc: 0.7872 - val_loss: 0.5546 - val_acc: 0.7211\n",
788 | "\n",
789 | "Epoch 00004: val_loss did not improve from 0.53049\n"
790 | ]
791 | }
792 | ],
793 | "source": [
794 | "hist = model.fit(sequences_matrix, y_train, batch_size=128, epochs=15, validation_split=0.2, callbacks=[checkpoint, early_stopping])"
795 | ]
796 | },
797 | {
798 | "cell_type": "code",
799 | "execution_count": 32,
800 | "metadata": {},
801 | "outputs": [
802 | {
803 | "name": "stdout",
804 | "output_type": "stream",
805 | "text": [
806 | "49997/49997 [==============================] - 35s 702us/step\n",
807 | "정확도 : 0.7188\n"
808 | ]
809 | }
810 | ],
811 | "source": [
812 | "print(\"정확도 : %.4f\" % (model.evaluate(sequences_test_matrix, y_test)[1]))"
813 | ]
814 | },
815 | {
816 | "cell_type": "code",
817 | "execution_count": null,
818 | "metadata": {},
819 | "outputs": [],
820 | "source": []
821 | }
822 | ],
823 | "metadata": {
824 | "kernelspec": {
825 | "display_name": "Python 3",
826 | "language": "python",
827 | "name": "python3"
828 | },
829 | "language_info": {
830 | "codemirror_mode": {
831 | "name": "ipython",
832 | "version": 3
833 | },
834 | "file_extension": ".py",
835 | "mimetype": "text/x-python",
836 | "name": "python",
837 | "nbconvert_exporter": "python",
838 | "pygments_lexer": "ipython3",
839 | "version": "3.6.5"
840 | }
841 | },
842 | "nbformat": 4,
843 | "nbformat_minor": 2
844 | }
845 |
--------------------------------------------------------------------------------
/3. predict_boston_house.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [
8 | {
9 | "name": "stderr",
10 | "output_type": "stream",
11 | "text": [
12 | "C:\\Users\\leesoojin\\Anaconda3\\lib\\site-packages\\h5py\\__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n",
13 | " from ._conv import register_converters as _register_converters\n",
14 | "Using TensorFlow backend.\n"
15 | ]
16 | }
17 | ],
18 | "source": [
19 | "from keras.models import Sequential\n",
20 | "from keras.layers import Dense\n",
21 | "from keras.utils import np_utils\n",
22 | "from sklearn.model_selection import train_test_split\n",
23 | "from sklearn.preprocessing import LabelEncoder\n",
24 | "from keras.callbacks import ModelCheckpoint, EarlyStopping\n",
25 | "import keras.backend.tensorflow_backend as K\n",
26 | "import tensorflow as tf\n",
27 | "import pandas as pd\n",
28 | "import numpy as np\n",
29 | "import matplotlib.pyplot as plt\n",
30 | "import matplotlib as mpl\n",
31 | "import seaborn as sns\n",
32 | "import os\n",
33 | "config = tf.ConfigProto()\n",
34 | "config.gpu_options.allow_growth = True\n",
35 | "session = tf.Session(config=config)"
36 | ]
37 | },
38 | {
39 | "cell_type": "code",
40 | "execution_count": 2,
41 | "metadata": {},
42 | "outputs": [
43 | {
44 | "name": "stdout",
45 | "output_type": "stream",
46 | "text": [
47 | "\n",
48 | "RangeIndex: 506 entries, 0 to 505\n",
49 | "Data columns (total 14 columns):\n",
50 | "0 506 non-null float64\n",
51 | "1 506 non-null float64\n",
52 | "2 506 non-null float64\n",
53 | "3 506 non-null int64\n",
54 | "4 506 non-null float64\n",
55 | "5 506 non-null float64\n",
56 | "6 506 non-null float64\n",
57 | "7 506 non-null float64\n",
58 | "8 506 non-null int64\n",
59 | "9 506 non-null float64\n",
60 | "10 506 non-null float64\n",
61 | "11 506 non-null float64\n",
62 | "12 506 non-null float64\n",
63 | "13 506 non-null float64\n",
64 | "dtypes: float64(12), int64(2)\n",
65 | "memory usage: 55.4 KB\n",
66 | "None\n"
67 | ]
68 | }
69 | ],
70 | "source": [
71 | "df = pd.read_csv('./dataset/housing.csv', delim_whitespace=True, header=None)\n",
72 | "\n",
73 | "print(df.info())"
74 | ]
75 | },
76 | {
77 | "cell_type": "code",
78 | "execution_count": 3,
79 | "metadata": {},
80 | "outputs": [
81 | {
82 | "name": "stdout",
83 | "output_type": "stream",
84 | "text": [
85 | " 0 1 2 3 4 5 6 7 8 9 10 \\\n",
86 | "0 0.00632 18.0 2.31 0 0.538 6.575 65.2 4.0900 1 296.0 15.3 \n",
87 | "1 0.02731 0.0 7.07 0 0.469 6.421 78.9 4.9671 2 242.0 17.8 \n",
88 | "2 0.02729 0.0 7.07 0 0.469 7.185 61.1 4.9671 2 242.0 17.8 \n",
89 | "3 0.03237 0.0 2.18 0 0.458 6.998 45.8 6.0622 3 222.0 18.7 \n",
90 | "4 0.06905 0.0 2.18 0 0.458 7.147 54.2 6.0622 3 222.0 18.7 \n",
91 | "\n",
92 | " 11 12 13 \n",
93 | "0 396.90 4.98 24.0 \n",
94 | "1 396.90 9.14 21.6 \n",
95 | "2 392.83 4.03 34.7 \n",
96 | "3 394.63 2.94 33.4 \n",
97 | "4 396.90 5.33 36.2 \n"
98 | ]
99 | }
100 | ],
101 | "source": [
102 | "print(df.head())"
103 | ]
104 | },
105 | {
106 | "cell_type": "code",
107 | "execution_count": 4,
108 | "metadata": {},
109 | "outputs": [],
110 | "source": [
111 | "X = df.iloc[:, 0:13].values\n",
112 | "y = df.iloc[:, 13].values\n",
113 | "\n",
114 | "X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=3)"
115 | ]
116 | },
117 | {
118 | "cell_type": "code",
119 | "execution_count": 5,
120 | "metadata": {},
121 | "outputs": [
122 | {
123 | "name": "stdout",
124 | "output_type": "stream",
125 | "text": [
126 | "Epoch 1/200\n",
127 | "404/404 [==============================] - 0s 1ms/step - loss: 2122.1331\n",
128 | "Epoch 2/200\n",
129 | "404/404 [==============================] - 0s 227us/step - loss: 347.9923\n",
130 | "Epoch 3/200\n",
131 | "404/404 [==============================] - 0s 254us/step - loss: 259.2522\n",
132 | "Epoch 4/200\n",
133 | "404/404 [==============================] - 0s 289us/step - loss: 208.1025\n",
134 | "Epoch 5/200\n",
135 | "404/404 [==============================] - 0s 276us/step - loss: 166.0465\n",
136 | "Epoch 6/200\n",
137 | "404/404 [==============================] - 0s 296us/step - loss: 134.6875\n",
138 | "Epoch 7/200\n",
139 | "404/404 [==============================] - 0s 232us/step - loss: 115.4015\n",
140 | "Epoch 8/200\n",
141 | "404/404 [==============================] - 0s 237us/step - loss: 102.1796\n",
142 | "Epoch 9/200\n",
143 | "404/404 [==============================] - ETA: 0s - loss: 98.38 - 0s 222us/step - loss: 97.7743\n",
144 | "Epoch 10/200\n",
145 | "404/404 [==============================] - 0s 237us/step - loss: 87.8853\n",
146 | "Epoch 11/200\n",
147 | "404/404 [==============================] - 0s 239us/step - loss: 85.4755\n",
148 | "Epoch 12/200\n",
149 | "404/404 [==============================] - 0s 227us/step - loss: 81.4273\n",
150 | "Epoch 13/200\n",
151 | "404/404 [==============================] - 0s 237us/step - loss: 79.4530\n",
152 | "Epoch 14/200\n",
153 | "404/404 [==============================] - 0s 232us/step - loss: 75.6931\n",
154 | "Epoch 15/200\n",
155 | "404/404 [==============================] - 0s 232us/step - loss: 73.4265\n",
156 | "Epoch 16/200\n",
157 | "404/404 [==============================] - 0s 309us/step - loss: 69.5513\n",
158 | "Epoch 17/200\n",
159 | "404/404 [==============================] - 0s 244us/step - loss: 68.2290\n",
160 | "Epoch 18/200\n",
161 | "404/404 [==============================] - 0s 227us/step - loss: 66.7893\n",
162 | "Epoch 19/200\n",
163 | "404/404 [==============================] - 0s 235us/step - loss: 64.6461\n",
164 | "Epoch 20/200\n",
165 | "404/404 [==============================] - 0s 227us/step - loss: 63.4591\n",
166 | "Epoch 21/200\n",
167 | "404/404 [==============================] - 0s 232us/step - loss: 63.5708\n",
168 | "Epoch 22/200\n",
169 | "404/404 [==============================] - 0s 235us/step - loss: 59.4259\n",
170 | "Epoch 23/200\n",
171 | "404/404 [==============================] - 0s 235us/step - loss: 61.8058\n",
172 | "Epoch 24/200\n",
173 | "404/404 [==============================] - 0s 237us/step - loss: 56.5482\n",
174 | "Epoch 25/200\n",
175 | "404/404 [==============================] - 0s 242us/step - loss: 57.9695\n",
176 | "Epoch 26/200\n",
177 | "404/404 [==============================] - 0s 230us/step - loss: 56.6245\n",
178 | "Epoch 27/200\n",
179 | "404/404 [==============================] - 0s 237us/step - loss: 54.3742\n",
180 | "Epoch 28/200\n",
181 | "404/404 [==============================] - 0s 227us/step - loss: 52.7804\n",
182 | "Epoch 29/200\n",
183 | "404/404 [==============================] - 0s 232us/step - loss: 53.0014\n",
184 | "Epoch 30/200\n",
185 | "404/404 [==============================] - 0s 249us/step - loss: 50.2627\n",
186 | "Epoch 31/200\n",
187 | "404/404 [==============================] - 0s 247us/step - loss: 49.8172\n",
188 | "Epoch 32/200\n",
189 | "404/404 [==============================] - 0s 227us/step - loss: 51.2612\n",
190 | "Epoch 33/200\n",
191 | "404/404 [==============================] - 0s 262us/step - loss: 48.2356\n",
192 | "Epoch 34/200\n",
193 | "404/404 [==============================] - 0s 242us/step - loss: 48.4007\n",
194 | "Epoch 35/200\n",
195 | "404/404 [==============================] - 0s 242us/step - loss: 46.9832\n",
196 | "Epoch 36/200\n",
197 | "404/404 [==============================] - 0s 254us/step - loss: 45.6215\n",
198 | "Epoch 37/200\n",
199 | "404/404 [==============================] - 0s 252us/step - loss: 46.2905\n",
200 | "Epoch 38/200\n",
201 | "404/404 [==============================] - 0s 232us/step - loss: 44.1845\n",
202 | "Epoch 39/200\n",
203 | "404/404 [==============================] - 0s 235us/step - loss: 43.7748\n",
204 | "Epoch 40/200\n",
205 | "404/404 [==============================] - 0s 230us/step - loss: 43.3959\n",
206 | "Epoch 41/200\n",
207 | "404/404 [==============================] - 0s 237us/step - loss: 42.5955\n",
208 | "Epoch 42/200\n",
209 | "404/404 [==============================] - 0s 232us/step - loss: 47.3782\n",
210 | "Epoch 43/200\n",
211 | "404/404 [==============================] - 0s 235us/step - loss: 43.7336\n",
212 | "Epoch 44/200\n",
213 | "404/404 [==============================] - 0s 257us/step - loss: 40.7421\n",
214 | "Epoch 45/200\n",
215 | "404/404 [==============================] - 0s 254us/step - loss: 40.1482\n",
216 | "Epoch 46/200\n",
217 | "404/404 [==============================] - 0s 230us/step - loss: 39.9290\n",
218 | "Epoch 47/200\n",
219 | "404/404 [==============================] - 0s 230us/step - loss: 41.5608\n",
220 | "Epoch 48/200\n",
221 | "404/404 [==============================] - 0s 252us/step - loss: 41.5198\n",
222 | "Epoch 49/200\n",
223 | "404/404 [==============================] - 0s 257us/step - loss: 42.8822\n",
224 | "Epoch 50/200\n",
225 | "404/404 [==============================] - 0s 240us/step - loss: 38.1835\n",
226 | "Epoch 51/200\n",
227 | "404/404 [==============================] - 0s 237us/step - loss: 39.6656\n",
228 | "Epoch 52/200\n",
229 | "404/404 [==============================] - 0s 254us/step - loss: 37.2524\n",
230 | "Epoch 53/200\n",
231 | "404/404 [==============================] - 0s 237us/step - loss: 37.2365\n",
232 | "Epoch 54/200\n",
233 | "404/404 [==============================] - 0s 235us/step - loss: 36.6913\n",
234 | "Epoch 55/200\n",
235 | "404/404 [==============================] - 0s 247us/step - loss: 36.9208\n",
236 | "Epoch 56/200\n",
237 | "404/404 [==============================] - 0s 244us/step - loss: 36.7544\n",
238 | "Epoch 57/200\n",
239 | "404/404 [==============================] - 0s 230us/step - loss: 36.5409\n",
240 | "Epoch 58/200\n",
241 | "404/404 [==============================] - 0s 232us/step - loss: 39.3795\n",
242 | "Epoch 59/200\n",
243 | "404/404 [==============================] - 0s 225us/step - loss: 45.0863\n",
244 | "Epoch 60/200\n",
245 | "404/404 [==============================] - 0s 244us/step - loss: 35.1973\n",
246 | "Epoch 61/200\n",
247 | "404/404 [==============================] - 0s 301us/step - loss: 34.8124\n",
248 | "Epoch 62/200\n",
249 | "404/404 [==============================] - 0s 242us/step - loss: 33.5994\n",
250 | "Epoch 63/200\n",
251 | "404/404 [==============================] - 0s 232us/step - loss: 36.3246\n",
252 | "Epoch 64/200\n",
253 | "404/404 [==============================] - 0s 232us/step - loss: 35.3689\n",
254 | "Epoch 65/200\n",
255 | "404/404 [==============================] - 0s 230us/step - loss: 34.7354\n",
256 | "Epoch 66/200\n",
257 | "404/404 [==============================] - 0s 232us/step - loss: 33.9282\n",
258 | "Epoch 67/200\n",
259 | "404/404 [==============================] - 0s 222us/step - loss: 34.5711\n",
260 | "Epoch 68/200\n",
261 | "404/404 [==============================] - 0s 237us/step - loss: 32.7263\n",
262 | "Epoch 69/200\n",
263 | "404/404 [==============================] - 0s 234us/step - loss: 35.4556\n",
264 | "Epoch 70/200\n",
265 | "404/404 [==============================] - 0s 235us/step - loss: 33.0661\n",
266 | "Epoch 71/200\n",
267 | "404/404 [==============================] - 0s 239us/step - loss: 34.8677\n",
268 | "Epoch 72/200\n",
269 | "404/404 [==============================] - 0s 235us/step - loss: 37.3278\n",
270 | "Epoch 73/200\n",
271 | "404/404 [==============================] - 0s 227us/step - loss: 33.6334\n",
272 | "Epoch 74/200\n",
273 | "404/404 [==============================] - 0s 230us/step - loss: 32.1562\n",
274 | "Epoch 75/200\n",
275 | "404/404 [==============================] - 0s 232us/step - loss: 32.4006\n",
276 | "Epoch 76/200\n",
277 | "404/404 [==============================] - 0s 222us/step - loss: 33.8208\n",
278 | "Epoch 77/200\n",
279 | "404/404 [==============================] - 0s 234us/step - loss: 32.7471\n",
280 | "Epoch 78/200\n",
281 | "404/404 [==============================] - ETA: 0s - loss: 30.57 - 0s 237us/step - loss: 32.6656\n",
282 | "Epoch 79/200\n",
283 | "404/404 [==============================] - 0s 242us/step - loss: 33.6035\n",
284 | "Epoch 80/200\n",
285 | "404/404 [==============================] - 0s 230us/step - loss: 33.1556\n",
286 | "Epoch 81/200\n",
287 | "404/404 [==============================] - 0s 230us/step - loss: 32.4329\n",
288 | "Epoch 82/200\n",
289 | "404/404 [==============================] - 0s 235us/step - loss: 34.3232\n",
290 | "Epoch 83/200\n",
291 | "404/404 [==============================] - 0s 309us/step - loss: 31.9703\n",
292 | "Epoch 84/200\n",
293 | "404/404 [==============================] - 0s 232us/step - loss: 32.8131\n",
294 | "Epoch 85/200\n",
295 | "404/404 [==============================] - 0s 237us/step - loss: 31.2383\n",
296 | "Epoch 86/200\n",
297 | "404/404 [==============================] - 0s 232us/step - loss: 32.0186\n",
298 | "Epoch 87/200\n",
299 | "404/404 [==============================] - 0s 237us/step - loss: 32.7700\n",
300 | "Epoch 88/200\n",
301 | "404/404 [==============================] - 0s 232us/step - loss: 32.1824\n",
302 | "Epoch 89/200\n",
303 | "404/404 [==============================] - 0s 237us/step - loss: 32.2091\n",
304 | "Epoch 90/200\n",
305 | "404/404 [==============================] - 0s 230us/step - loss: 30.2764\n",
306 | "Epoch 91/200\n",
307 | "404/404 [==============================] - 0s 232us/step - loss: 31.4422\n",
308 | "Epoch 92/200\n",
309 | "404/404 [==============================] - 0s 230us/step - loss: 32.3985\n",
310 | "Epoch 93/200\n",
311 | "404/404 [==============================] - 0s 262us/step - loss: 30.2015\n",
312 | "Epoch 94/200\n",
313 | "404/404 [==============================] - 0s 299us/step - loss: 30.5581\n",
314 | "Epoch 95/200\n",
315 | "404/404 [==============================] - 0s 254us/step - loss: 32.1337\n",
316 | "Epoch 96/200\n",
317 | "404/404 [==============================] - 0s 230us/step - loss: 34.5846\n",
318 | "Epoch 97/200\n",
319 | "404/404 [==============================] - 0s 210us/step - loss: 31.3126\n",
320 | "Epoch 98/200\n",
321 | "404/404 [==============================] - 0s 212us/step - loss: 34.9719\n",
322 | "Epoch 99/200\n",
323 | "404/404 [==============================] - 0s 212us/step - loss: 31.4271\n",
324 | "Epoch 100/200\n",
325 | "404/404 [==============================] - 0s 215us/step - loss: 29.2816\n",
326 | "Epoch 101/200\n",
327 | "404/404 [==============================] - 0s 271us/step - loss: 31.5813\n",
328 | "Epoch 102/200\n",
329 | "404/404 [==============================] - 0s 232us/step - loss: 28.8050\n",
330 | "Epoch 103/200\n",
331 | "404/404 [==============================] - 0s 230us/step - loss: 28.1895\n",
332 | "Epoch 104/200\n",
333 | "404/404 [==============================] - 0s 232us/step - loss: 29.4792\n",
334 | "Epoch 105/200\n",
335 | "404/404 [==============================] - 0s 235us/step - loss: 30.8975\n",
336 | "Epoch 106/200\n",
337 | "404/404 [==============================] - 0s 230us/step - loss: 29.5941\n",
338 | "Epoch 107/200\n",
339 | "404/404 [==============================] - 0s 225us/step - loss: 33.2024\n",
340 | "Epoch 108/200\n",
341 | "404/404 [==============================] - 0s 230us/step - loss: 29.2707\n",
342 | "Epoch 109/200\n",
343 | "404/404 [==============================] - 0s 227us/step - loss: 29.9632\n",
344 | "Epoch 110/200\n",
345 | "404/404 [==============================] - 0s 234us/step - loss: 29.0034\n",
346 | "Epoch 111/200\n",
347 | "404/404 [==============================] - 0s 240us/step - loss: 27.7416\n",
348 | "Epoch 112/200\n",
349 | "404/404 [==============================] - 0s 227us/step - loss: 28.6736\n",
350 | "Epoch 113/200\n",
351 | "404/404 [==============================] - ETA: 0s - loss: 27.21 - 0s 232us/step - loss: 30.4598\n",
352 | "Epoch 114/200\n",
353 | "404/404 [==============================] - 0s 234us/step - loss: 28.4916\n",
354 | "Epoch 115/200\n",
355 | "404/404 [==============================] - 0s 227us/step - loss: 30.8706\n",
356 | "Epoch 116/200\n",
357 | "404/404 [==============================] - 0s 237us/step - loss: 28.7015\n",
358 | "Epoch 117/200\n",
359 | "404/404 [==============================] - 0s 235us/step - loss: 27.0651\n",
360 | "Epoch 118/200\n",
361 | "404/404 [==============================] - 0s 227us/step - loss: 29.1501\n",
362 | "Epoch 119/200\n",
363 | "404/404 [==============================] - 0s 227us/step - loss: 32.5826\n",
364 | "Epoch 120/200\n",
365 | "404/404 [==============================] - 0s 232us/step - loss: 29.9393\n",
366 | "Epoch 121/200\n",
367 | "404/404 [==============================] - 0s 225us/step - loss: 29.9413\n",
368 | "Epoch 122/200\n",
369 | "404/404 [==============================] - 0s 242us/step - loss: 26.3597\n",
370 | "Epoch 123/200\n",
371 | "404/404 [==============================] - 0s 244us/step - loss: 27.2816\n",
372 | "Epoch 124/200\n",
373 | "404/404 [==============================] - 0s 235us/step - loss: 28.6836\n",
374 | "Epoch 125/200\n",
375 | "404/404 [==============================] - 0s 230us/step - loss: 27.6494\n",
376 | "Epoch 126/200\n",
377 | "404/404 [==============================] - 0s 242us/step - loss: 31.7421\n",
378 | "Epoch 127/200\n",
379 | "404/404 [==============================] - 0s 235us/step - loss: 30.7558\n",
380 | "Epoch 128/200\n",
381 | "404/404 [==============================] - 0s 237us/step - loss: 27.4953\n",
382 | "Epoch 129/200\n",
383 | "404/404 [==============================] - 0s 234us/step - loss: 25.6598\n",
384 | "Epoch 130/200\n",
385 | "404/404 [==============================] - 0s 239us/step - loss: 28.6597\n",
386 | "Epoch 131/200\n",
387 | "404/404 [==============================] - 0s 286us/step - loss: 33.7057\n",
388 | "Epoch 132/200\n",
389 | "404/404 [==============================] - 0s 242us/step - loss: 27.8660\n",
390 | "Epoch 133/200\n",
391 | "404/404 [==============================] - 0s 242us/step - loss: 26.1220\n",
392 | "Epoch 134/200\n",
393 | "404/404 [==============================] - 0s 230us/step - loss: 26.6636\n",
394 | "Epoch 135/200\n",
395 | "404/404 [==============================] - 0s 239us/step - loss: 27.3979\n",
396 | "Epoch 136/200\n",
397 | "404/404 [==============================] - 0s 232us/step - loss: 25.3598\n",
398 | "Epoch 137/200\n",
399 | "404/404 [==============================] - 0s 244us/step - loss: 27.29710s - loss: 25.51\n",
400 | "Epoch 138/200\n",
401 | "404/404 [==============================] - 0s 242us/step - loss: 26.6476\n",
402 | "Epoch 139/200\n",
403 | "404/404 [==============================] - 0s 230us/step - loss: 28.6654\n",
404 | "Epoch 140/200\n",
405 | "404/404 [==============================] - ETA: 0s - loss: 25.21 - 0s 239us/step - loss: 26.0402\n",
406 | "Epoch 141/200\n",
407 | "404/404 [==============================] - 0s 229us/step - loss: 26.3300\n",
408 | "Epoch 142/200\n",
409 | "404/404 [==============================] - 0s 232us/step - loss: 28.5133\n",
410 | "Epoch 143/200\n",
411 | "404/404 [==============================] - 0s 234us/step - loss: 27.3029\n",
412 | "Epoch 144/200\n",
413 | "404/404 [==============================] - 0s 237us/step - loss: 25.7936\n",
414 | "Epoch 145/200\n",
415 | "404/404 [==============================] - 0s 237us/step - loss: 24.6531\n",
416 | "Epoch 146/200\n",
417 | "404/404 [==============================] - 0s 234us/step - loss: 29.3940\n",
418 | "Epoch 147/200\n",
419 | "404/404 [==============================] - 0s 234us/step - loss: 26.0954\n",
420 | "Epoch 148/200\n",
421 | "404/404 [==============================] - 0s 244us/step - loss: 27.1548\n",
422 | "Epoch 149/200\n",
423 | "404/404 [==============================] - ETA: 0s - loss: 26.57 - 0s 234us/step - loss: 25.9111\n",
424 | "Epoch 150/200\n",
425 | "404/404 [==============================] - 0s 284us/step - loss: 25.7981\n",
426 | "Epoch 151/200\n",
427 | "404/404 [==============================] - 0s 281us/step - loss: 26.0544\n",
428 | "Epoch 152/200\n",
429 | "404/404 [==============================] - 0s 239us/step - loss: 25.7746\n",
430 | "Epoch 153/200\n",
431 | "404/404 [==============================] - 0s 232us/step - loss: 25.2964\n",
432 | "Epoch 154/200\n",
433 | "404/404 [==============================] - 0s 252us/step - loss: 26.9285\n",
434 | "Epoch 155/200\n",
435 | "404/404 [==============================] - 0s 244us/step - loss: 27.8392\n",
436 | "Epoch 156/200\n",
437 | "404/404 [==============================] - 0s 234us/step - loss: 26.6262\n",
438 | "Epoch 157/200\n",
439 | "404/404 [==============================] - 0s 237us/step - loss: 25.4286\n",
440 | "Epoch 158/200\n",
441 | "404/404 [==============================] - 0s 239us/step - loss: 25.9537\n",
442 | "Epoch 159/200\n",
443 | "404/404 [==============================] - 0s 237us/step - loss: 25.9483\n",
444 | "Epoch 160/200\n",
445 | "404/404 [==============================] - 0s 242us/step - loss: 26.6073\n",
446 | "Epoch 161/200\n",
447 | "404/404 [==============================] - 0s 235us/step - loss: 26.3330\n",
448 | "Epoch 162/200\n",
449 | "404/404 [==============================] - 0s 289us/step - loss: 25.0448\n",
450 | "Epoch 163/200\n",
451 | "404/404 [==============================] - 0s 249us/step - loss: 25.8895\n",
452 | "Epoch 164/200\n",
453 | "404/404 [==============================] - 0s 242us/step - loss: 25.0078\n",
454 | "Epoch 165/200\n",
455 | "404/404 [==============================] - 0s 232us/step - loss: 23.8572\n",
456 | "Epoch 166/200\n",
457 | "404/404 [==============================] - 0s 227us/step - loss: 26.8393\n",
458 | "Epoch 167/200\n",
459 | "404/404 [==============================] - 0s 239us/step - loss: 24.7674\n",
460 | "Epoch 168/200\n",
461 | "404/404 [==============================] - 0s 242us/step - loss: 29.7234\n",
462 | "Epoch 169/200\n",
463 | "404/404 [==============================] - 0s 227us/step - loss: 25.9919\n",
464 | "Epoch 170/200\n",
465 | "404/404 [==============================] - 0s 230us/step - loss: 23.7709\n",
466 | "Epoch 171/200\n",
467 | "404/404 [==============================] - ETA: 0s - loss: 22.95 - 0s 237us/step - loss: 24.0283\n",
468 | "Epoch 172/200\n",
469 | "404/404 [==============================] - 0s 237us/step - loss: 24.3686\n",
470 | "Epoch 173/200\n",
471 | "404/404 [==============================] - 0s 237us/step - loss: 25.7648\n",
472 | "Epoch 174/200\n",
473 | "404/404 [==============================] - 0s 235us/step - loss: 24.6780\n",
474 | "Epoch 175/200\n",
475 | "404/404 [==============================] - 0s 234us/step - loss: 24.5750\n",
476 | "Epoch 176/200\n",
477 | "404/404 [==============================] - 0s 230us/step - loss: 24.4545\n",
478 | "Epoch 177/200\n",
479 | "404/404 [==============================] - 0s 235us/step - loss: 23.2645\n",
480 | "Epoch 178/200\n",
481 | "404/404 [==============================] - 0s 239us/step - loss: 23.5010\n",
482 | "Epoch 179/200\n",
483 | "404/404 [==============================] - 0s 244us/step - loss: 25.2118\n",
484 | "Epoch 180/200\n",
485 | "404/404 [==============================] - 0s 239us/step - loss: 24.6237\n",
486 | "Epoch 181/200\n",
487 | "404/404 [==============================] - 0s 237us/step - loss: 25.8541\n",
488 | "Epoch 182/200\n",
489 | "404/404 [==============================] - 0s 237us/step - loss: 24.4790\n",
490 | "Epoch 183/200\n",
491 | "404/404 [==============================] - 0s 237us/step - loss: 24.2495\n",
492 | "Epoch 184/200\n",
493 | "404/404 [==============================] - 0s 230us/step - loss: 25.2081\n",
494 | "Epoch 185/200\n",
495 | "404/404 [==============================] - 0s 239us/step - loss: 22.2362\n",
496 | "Epoch 186/200\n",
497 | "404/404 [==============================] - 0s 242us/step - loss: 24.2375\n",
498 | "Epoch 187/200\n",
499 | "404/404 [==============================] - 0s 237us/step - loss: 22.1582\n",
500 | "Epoch 188/200\n"
501 | ]
502 | },
503 | {
504 | "name": "stdout",
505 | "output_type": "stream",
506 | "text": [
507 | "404/404 [==============================] - 0s 244us/step - loss: 23.7005\n",
508 | "Epoch 189/200\n",
509 | "404/404 [==============================] - 0s 242us/step - loss: 24.0025\n",
510 | "Epoch 190/200\n",
511 | "404/404 [==============================] - ETA: 0s - loss: 24.14 - 0s 232us/step - loss: 25.4144\n",
512 | "Epoch 191/200\n",
513 | "404/404 [==============================] - 0s 237us/step - loss: 30.1352\n",
514 | "Epoch 192/200\n",
515 | "404/404 [==============================] - 0s 235us/step - loss: 23.3425\n",
516 | "Epoch 193/200\n",
517 | "404/404 [==============================] - 0s 235us/step - loss: 22.6613\n",
518 | "Epoch 194/200\n",
519 | "404/404 [==============================] - 0s 247us/step - loss: 24.7335\n",
520 | "Epoch 195/200\n",
521 | "404/404 [==============================] - 0s 252us/step - loss: 22.1079\n",
522 | "Epoch 196/200\n",
523 | "404/404 [==============================] - 0s 235us/step - loss: 24.2412\n",
524 | "Epoch 197/200\n",
525 | "404/404 [==============================] - 0s 239us/step - loss: 23.6584\n",
526 | "Epoch 198/200\n",
527 | "404/404 [==============================] - 0s 239us/step - loss: 24.2968\n",
528 | "Epoch 199/200\n",
529 | "404/404 [==============================] - 0s 237us/step - loss: 26.1472\n",
530 | "Epoch 200/200\n",
531 | "404/404 [==============================] - 0s 279us/step - loss: 23.6032\n"
532 | ]
533 | }
534 | ],
535 | "source": [
536 | "with K.tf_ops.device('/device:GPU:0'):\n",
537 | " model = Sequential()\n",
538 | " model.add(Dense(30, input_dim=X.shape[1], activation='relu'))\n",
539 | " model.add(Dense(7, activation='relu'))\n",
540 | " model.add(Dense(1)) #선형회귀에서는 마지막 출력에 활성화 함수가 있지 않다.\n",
541 | " \n",
542 | " model.compile(loss='mean_squared_error', optimizer='adam')\n",
543 | " \n",
544 | " model.fit(X_train, y_train, epochs=200, batch_size=10)"
545 | ]
546 | },
547 | {
548 | "cell_type": "code",
549 | "execution_count": 6,
550 | "metadata": {},
551 | "outputs": [
552 | {
553 | "name": "stdout",
554 | "output_type": "stream",
555 | "text": [
556 | "실제 가격 :44.800, 예상 가격 : 29.597\n",
557 | "실제 가격 :17.100, 예상 가격 : 17.374\n",
558 | "실제 가격 :17.800, 예상 가격 : 17.432\n",
559 | "실제 가격 :33.100, 예상 가격 : 29.175\n",
560 | "실제 가격 :21.900, 예상 가격 : 22.060\n",
561 | "실제 가격 :21.000, 예상 가격 : 22.157\n",
562 | "실제 가격 :18.400, 예상 가격 : 16.429\n",
563 | "실제 가격 :10.400, 예상 가격 : 9.389\n",
564 | "실제 가격 :23.100, 예상 가격 : 22.239\n",
565 | "실제 가격 :20.000, 예상 가격 : 16.864\n"
566 | ]
567 | }
568 | ],
569 | "source": [
570 | "predict = model.predict(X_test).flatten()\n",
571 | "for i in range(10):\n",
572 | " label = y_test[i]\n",
573 | " prediction= predict[i]\n",
574 | " print(\"실제 가격 :{:.3f}, 예상 가격 : {:.3f}\".format(label, prediction))"
575 | ]
576 | },
577 | {
578 | "cell_type": "code",
579 | "execution_count": null,
580 | "metadata": {},
581 | "outputs": [],
582 | "source": []
583 | }
584 | ],
585 | "metadata": {
586 | "kernelspec": {
587 | "display_name": "Python 3",
588 | "language": "python",
589 | "name": "python3"
590 | },
591 | "language_info": {
592 | "codemirror_mode": {
593 | "name": "ipython",
594 | "version": 3
595 | },
596 | "file_extension": ".py",
597 | "mimetype": "text/x-python",
598 | "name": "python",
599 | "nbconvert_exporter": "python",
600 | "pygments_lexer": "ipython3",
601 | "version": "3.6.5"
602 | }
603 | },
604 | "nbformat": 4,
605 | "nbformat_minor": 2
606 | }
607 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # keras
2 | keras를 이용한 머신러닝과 딥러닝
3 |
4 | keras 기본서
5 |
6 | - gpu 사용법(keras_gpu_test.ipynb)
7 | - 다층 퍼셉트론을 이용한 다중 분류 iris-data 구분(1. predict_iris_data.ipynb)
8 | - 다층 퍼셉트론을 이용한 단일 분류 wine 구분 (2. predict_redwine_whitewine.ipynb)
9 | - 다층 퍼셉트론을 이용한 회귀 모델 보스턴 집값(boston house) 회귀 예측 (3. predict_boston_house.ipynb)
10 | - 다층 퍼셉트론을 이용한 MNIST 그림 예측 (4. predict_MNIST_with_MLP.ipynb)
11 | - CNN을 이용한 MNIST 에측 (5. predict_MNIST_with_CNN.ipynb)
12 | - CNN을 이용한 단일 이미지(개, 고양이) 분류(6. predict_binary_img_with_CNN.ipynb)
13 | - CNN을 이용한 다중 이미지 분류(7. predict_multi_img_with_CNN.ipynb)
14 | - RNN(LSTM)을 이용한 스팸 메일 예측(8. predict_spam_or_ham_with_LSTM.ipynb)
15 | - RNN(LSTM)을 이용한 한국어 뉴스 카테고리 분류(9. predict_korea_news_category_with_LSTM.ipynb)
16 | - 한국어 영화 평점 텍스트 분류(10. preidct_korea_movie_review.ipynb)
17 |
--------------------------------------------------------------------------------
/bfg-1.13.0.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lsjsj92/keras_basic/afa26e7b830cab23c7ffb2e1806660070279a982/bfg-1.13.0.jar
--------------------------------------------------------------------------------
/dataset/ThoraricSurgery.csv:
--------------------------------------------------------------------------------
1 | 293,1,3.8,2.8,0,0,0,0,0,0,12,0,0,0,1,0,62,0
2 | 1,2,2.88,2.16,1,0,0,0,1,1,14,0,0,0,1,0,60,0
3 | 8,2,3.19,2.5,1,0,0,0,1,0,11,0,0,1,1,0,66,1
4 | 14,2,3.98,3.06,2,0,0,0,1,1,14,0,0,0,1,0,80,1
5 | 17,2,2.21,1.88,0,0,1,0,0,0,12,0,0,0,1,0,56,0
6 | 18,2,2.96,1.67,0,0,0,0,0,0,12,0,0,0,1,0,61,0
7 | 35,2,2.76,2.2,1,0,0,0,1,0,11,0,0,0,0,0,76,0
8 | 42,2,3.24,2.52,1,0,0,0,1,0,12,0,0,0,1,0,63,1
9 | 65,2,3.15,2.76,1,0,1,0,1,0,12,0,0,0,1,0,59,0
10 | 111,2,4.48,4.2,0,0,0,0,0,0,12,0,0,0,1,0,55,0
11 | 121,2,3.84,2.56,1,0,0,0,1,0,11,0,0,0,0,0,59,0
12 | 123,2,2.8,2.12,1,0,0,1,1,0,13,0,0,0,1,0,80,0
13 | 130,2,5.6,4.64,1,0,0,0,1,0,11,0,0,0,1,0,45,0
14 | 132,2,2.12,1.72,1,0,0,0,0,0,12,0,0,0,1,0,74,0
15 | 133,2,2.5,71.1,0,0,0,1,0,0,13,0,0,0,1,0,64,1
16 | 137,2,3.76,3.08,1,0,0,0,1,0,13,0,0,0,1,0,54,0
17 | 141,2,2.16,1.56,1,0,0,0,1,0,11,0,0,0,1,0,63,0
18 | 145,2,3.64,2.48,2,0,0,0,1,1,11,0,0,0,1,0,70,0
19 | 164,2,2.4,1.96,1,0,0,0,1,0,12,0,0,0,0,0,73,0
20 | 165,2,3,2.4,1,0,0,0,1,0,14,0,0,0,1,0,58,0
21 | 167,2,3.4,2.12,1,0,0,0,1,1,11,0,0,0,1,0,62,0
22 | 172,2,2.88,2.2,0,0,0,0,0,0,12,1,0,0,1,0,62,0
23 | 173,2,3.16,2.56,1,0,1,1,1,0,12,0,0,1,1,0,62,0
24 | 193,2,3.08,2.48,1,0,0,0,1,0,11,0,0,0,0,0,49,0
25 | 203,2,4.08,2.56,1,1,1,0,0,0,13,0,0,0,1,0,54,0
26 | 204,2,3.6,3.92,0,0,0,0,0,0,12,0,0,0,1,0,56,0
27 | 210,2,2.8,1.6,1,0,1,0,1,1,12,0,0,0,1,0,53,1
28 | 216,2,2.66,8.56,1,0,1,0,1,0,12,0,0,0,1,0,61,0
29 | 217,2,3.24,1.88,1,0,0,0,1,0,12,0,0,0,1,0,61,0
30 | 243,2,4.88,3.44,0,0,1,0,1,0,14,0,0,0,1,0,75,1
31 | 275,2,4.04,2.76,1,0,0,0,1,0,12,0,0,0,1,0,55,1
32 | 284,2,2.32,1.68,1,0,1,0,1,0,12,0,0,0,1,0,64,0
33 | 295,2,2.64,1.92,1,0,0,0,1,0,11,1,0,0,1,0,63,0
34 | 316,2,3.4,2.76,1,0,1,0,1,0,12,0,0,0,1,0,56,0
35 | 324,2,2.58,1.64,2,0,1,0,1,1,12,0,0,0,1,0,63,0
36 | 331,2,2.94,76,1,0,1,1,1,0,12,0,0,0,0,0,61,0
37 | 335,2,4,3.12,1,0,0,0,1,0,12,0,0,0,1,0,67,1
38 | 346,2,3.12,2.72,2,0,0,0,1,1,14,0,0,0,1,0,70,0
39 | 347,2,3.48,2.84,1,0,0,0,0,1,11,0,0,0,1,0,58,0
40 | 349,2,4.2,3.6,1,0,0,0,0,1,11,0,0,0,1,0,39,1
41 | 390,2,3.8,2.67,1,0,0,0,1,0,14,0,0,0,1,0,48,0
42 | 392,2,1.84,1.36,1,0,1,0,1,0,12,0,0,0,1,0,57,0
43 | 399,2,2.96,2.33,1,0,0,0,1,0,11,0,0,0,1,0,72,0
44 | 405,2,2.96,2.24,0,0,0,0,1,0,12,0,0,0,1,0,57,1
45 | 408,2,2.72,2.08,0,0,0,0,0,0,12,0,0,0,1,0,67,0
46 | 411,2,2.48,2,1,0,0,0,1,0,12,0,0,0,1,0,60,1
47 | 414,2,2.48,2.08,1,0,1,0,0,0,12,0,0,0,1,0,60,0
48 | 419,2,2.6,2.04,0,1,1,0,0,0,12,0,0,0,0,0,70,0
49 | 422,2,3.76,2.96,1,0,0,0,1,0,14,1,0,0,0,0,64,1
50 | 442,2,4.44,3.64,0,0,0,0,0,0,12,0,0,0,0,0,62,0
51 | 443,2,4.08,2.24,1,0,0,1,1,0,12,0,0,0,0,0,61,0
52 | 448,2,4.4,3.72,1,0,0,0,1,1,12,0,0,0,1,0,52,0
53 | 466,2,3.88,2.12,1,0,0,0,1,0,13,0,0,0,1,0,63,0
54 | 2,3,3.4,1.88,0,0,0,0,0,0,12,0,0,0,1,0,51,0
55 | 3,3,2.76,2.08,1,0,0,0,1,0,11,0,0,0,1,0,59,0
56 | 4,3,3.68,3.04,0,0,0,0,0,0,11,0,0,0,0,0,54,0
57 | 5,3,2.44,0.96,2,0,1,0,1,1,11,0,0,0,1,0,73,1
58 | 6,3,2.48,1.88,1,0,0,0,1,0,11,0,0,0,0,0,51,0
59 | 7,3,4.36,3.28,1,0,0,0,1,0,12,1,0,0,1,0,59,1
60 | 9,3,3.16,2.64,2,0,0,0,1,1,11,0,0,0,1,0,68,0
61 | 10,3,2.32,2.16,1,0,0,0,1,0,11,0,0,0,1,0,54,0
62 | 11,3,2.56,2.32,0,0,1,0,1,0,12,0,0,0,0,0,60,0
63 | 12,3,4.28,4.44,1,0,0,0,0,0,12,0,0,0,1,0,58,0
64 | 13,3,3,2.36,1,0,0,0,1,1,11,0,0,0,1,0,68,0
65 | 15,3,1.96,1.4,1,0,0,0,1,0,11,0,0,0,1,0,77,0
66 | 16,3,4.68,4.16,1,0,0,0,1,0,12,0,0,0,1,0,62,0
67 | 19,3,2.6,1.68,1,0,0,0,1,0,12,0,0,0,1,0,70,0
68 | 20,3,2.88,2.48,0,0,0,0,0,0,11,0,0,0,1,0,71,0
69 | 21,3,4.48,3.48,0,0,0,0,0,0,12,0,0,0,1,0,51,0
70 | 23,3,2.36,1.68,0,0,0,0,0,0,12,0,0,0,1,0,62,0
71 | 24,3,3.68,2.32,0,0,0,0,0,0,11,0,0,0,1,0,62,0
72 | 27,3,3.24,3.08,1,0,0,0,1,0,11,0,0,0,1,0,60,0
73 | 28,3,3.4,3.06,1,0,0,0,1,1,11,0,0,0,1,0,68,1
74 | 29,3,3.16,2.69,1,0,0,0,1,1,11,0,0,0,1,0,56,0
75 | 31,3,3.24,2.4,1,1,1,0,0,0,14,0,0,0,1,0,55,1
76 | 32,3,4.44,3.48,1,0,0,0,1,0,12,0,0,0,0,0,52,0
77 | 34,3,1.81,1.4,1,0,0,0,1,0,12,1,0,0,0,0,68,0
78 | 36,3,2.36,1.6,0,0,0,0,0,0,11,0,0,0,1,0,58,0
79 | 37,3,2.2,1.96,1,0,0,0,1,0,12,0,0,0,1,0,71,0
80 | 38,3,3.68,2.44,1,0,1,1,0,0,12,1,0,0,0,0,61,0
81 | 39,3,4.2,3.08,0,0,0,0,0,0,11,0,0,0,1,0,56,0
82 | 40,3,4.6,3.52,1,0,0,0,1,0,11,0,0,0,1,0,52,0
83 | 43,3,3.2,2.82,1,0,0,0,1,0,12,0,0,0,1,0,68,0
84 | 45,3,3.56,2.68,1,1,0,0,1,0,12,0,0,0,1,0,60,0
85 | 46,3,2.48,2.08,0,0,0,0,0,0,11,0,0,0,1,0,60,0
86 | 47,3,4.16,3.28,1,0,0,0,1,0,12,0,0,0,1,0,67,0
87 | 48,3,2.64,2.12,1,0,0,0,1,0,12,0,0,0,1,0,72,1
88 | 49,3,4.44,3.12,2,0,0,0,1,1,12,0,0,0,1,0,59,0
89 | 50,3,4.56,3.92,0,0,0,0,0,0,12,0,0,0,0,0,55,0
90 | 51,3,2.52,1.96,1,0,0,0,1,0,12,0,0,0,0,0,79,0
91 | 52,3,4,2.88,1,0,0,0,1,0,11,0,0,0,1,0,69,0
92 | 53,3,3.2,2.52,2,1,1,1,1,0,12,0,0,0,1,0,68,0
93 | 55,3,3.68,3.08,1,0,0,0,1,0,12,0,0,0,1,0,63,0
94 | 57,3,3.72,2.88,1,0,0,1,1,0,11,0,0,0,0,0,37,0
95 | 58,3,3.4,2.8,1,1,0,0,1,1,11,1,0,0,1,0,64,1
96 | 60,3,3.84,3.72,0,0,0,0,0,0,12,0,0,0,1,0,58,0
97 | 61,3,3.52,2.28,0,0,0,0,0,0,13,0,0,0,1,0,51,1
98 | 62,3,3.04,2.04,2,0,0,0,1,1,12,0,0,0,1,0,77,0
99 | 63,3,4.96,3.6,0,0,0,0,0,0,11,0,0,0,1,0,56,0
100 | 64,3,3.72,2.84,0,0,0,0,0,0,11,1,0,0,0,0,55,0
101 | 66,3,2.88,2.6,1,0,0,0,1,0,12,0,0,0,0,0,54,0
102 | 67,3,2.36,2,0,0,0,0,0,0,11,0,0,0,0,0,39,0
103 | 69,3,2.72,2.2,1,0,0,0,1,0,12,0,0,0,1,0,61,0
104 | 70,3,3.08,1.8,1,0,1,0,1,0,12,0,0,0,1,0,70,0
105 | 71,3,3.48,2.72,1,0,1,0,0,0,11,0,0,0,0,0,53,0
106 | 72,3,3.6,2.6,1,0,0,0,1,0,12,1,0,0,1,0,71,0
107 | 73,3,3.52,2.92,0,0,0,0,0,0,11,0,0,0,1,0,63,0
108 | 75,3,4.6,3.28,1,0,0,0,1,0,11,0,0,0,1,0,55,0
109 | 76,3,3.4,2.8,1,0,0,0,1,0,14,0,0,0,1,0,41,1
110 | 77,3,1.84,1.28,1,0,0,0,1,1,11,0,0,0,1,0,66,0
111 | 78,3,3.04,3.6,1,0,0,0,1,0,12,0,0,0,1,0,62,1
112 | 79,3,2.2,1.44,1,0,0,0,1,0,12,0,0,0,1,0,54,0
113 | 80,3,3.04,2.16,1,0,0,0,1,0,12,0,0,0,0,0,78,0
114 | 81,3,3.68,2.88,1,0,0,0,1,0,12,0,0,0,1,0,58,0
115 | 82,3,1.96,1.68,1,0,0,0,1,0,14,0,0,0,1,0,59,0
116 | 83,3,3.24,1.64,1,0,0,0,1,0,12,0,0,0,1,0,63,0
117 | 84,3,2.84,2.36,1,0,0,0,1,0,11,1,0,0,0,0,62,0
118 | 85,3,4.28,3.28,0,0,0,0,0,0,12,0,0,0,1,0,51,0
119 | 86,3,3.76,2.72,1,0,0,0,1,0,12,0,0,0,1,0,58,0
120 | 87,3,4.9,4.19,0,0,0,1,1,0,12,0,0,0,0,0,52,0
121 | 88,3,2.36,2,1,0,0,1,0,0,12,0,0,1,1,0,67,0
122 | 90,3,2.83,66.4,1,1,1,1,1,0,12,0,0,0,1,0,75,0
123 | 92,3,2.6,2,1,0,0,0,1,0,11,0,0,0,1,0,73,0
124 | 93,3,3.6,2.48,1,0,0,0,1,0,12,0,0,0,1,0,60,1
125 | 94,3,6.08,4.92,0,0,0,0,0,0,11,0,0,0,1,0,50,0
126 | 95,3,1.88,1.44,2,0,0,0,1,1,12,0,0,0,1,0,87,0
127 | 96,3,4.56,3.6,1,0,0,0,1,0,11,0,0,0,1,0,54,0
128 | 99,3,2.63,67.3,1,0,0,1,1,0,11,0,0,0,1,0,54,0
129 | 100,3,4.6,2.92,1,0,1,1,1,0,12,0,0,0,1,0,57,1
130 | 101,3,3.36,2.67,1,0,0,0,1,0,11,0,0,0,1,0,72,0
131 | 102,3,1.84,1.64,1,0,0,0,1,1,12,1,0,0,1,0,72,0
132 | 104,3,2.35,1.64,1,0,0,0,1,0,11,0,0,0,0,1,59,0
133 | 105,3,2.84,1.88,1,0,0,0,1,0,11,0,0,0,0,0,53,0
134 | 107,3,2.48,2.08,1,0,0,0,1,0,12,0,0,0,1,0,55,0
135 | 108,3,3.6,2.6,1,0,0,0,1,0,12,0,0,0,1,0,54,0
136 | 109,3,3.16,2.96,0,0,0,0,0,0,11,0,0,0,0,0,63,0
137 | 110,3,3.24,2.36,1,0,0,1,1,0,12,0,0,0,1,0,74,0
138 | 112,3,4,2.6,1,0,0,0,1,0,12,1,0,0,1,0,58,0
139 | 113,3,3.68,64.1,0,0,0,0,0,0,12,0,0,0,1,0,60,0
140 | 114,3,4.68,3.48,0,0,0,0,0,0,11,0,0,0,1,0,52,0
141 | 115,3,4.52,3.32,0,0,0,0,1,0,12,0,0,0,1,0,58,0
142 | 118,3,2.84,2.16,1,0,0,0,1,0,12,1,0,0,1,0,53,0
143 | 120,3,2.56,1.6,1,0,0,0,1,1,12,0,0,0,1,0,75,0
144 | 122,3,3.56,2.76,1,0,0,0,1,0,12,0,0,0,1,0,74,0
145 | 125,3,3.36,2.8,1,0,0,0,1,1,12,0,0,0,1,0,76,0
146 | 126,3,2.83,1.96,1,0,0,0,1,0,12,0,0,0,1,0,71,0
147 | 127,3,4.56,2.68,1,0,0,0,1,0,11,0,0,0,1,0,62,0
148 | 128,3,2,1,1,0,1,0,1,1,11,1,0,0,1,0,73,1
149 | 131,3,3.32,2.87,1,0,0,0,1,0,11,0,0,0,1,0,63,0
150 | 134,3,2,1.44,0,0,0,0,0,0,11,0,0,0,1,0,63,0
151 | 135,3,4.84,3.48,1,0,0,0,1,0,12,0,0,0,1,0,56,0
152 | 136,3,2.92,2.28,1,0,0,0,1,0,11,0,0,0,1,0,63,0
153 | 138,3,2.08,1.52,1,0,0,0,1,0,14,0,0,0,1,0,49,1
154 | 139,3,2.44,2.08,1,0,0,0,1,0,12,0,0,0,1,0,57,0
155 | 140,3,3.72,3.12,1,0,1,0,0,0,12,0,0,0,0,0,52,0
156 | 142,3,4.2,3.24,1,0,1,0,1,0,12,0,0,0,1,0,73,0
157 | 143,3,5.17,4.3,1,0,0,0,0,0,11,0,0,0,0,0,47,0
158 | 146,3,3.96,2.96,1,0,0,0,1,0,12,0,0,0,1,0,60,0
159 | 147,3,3.92,3.08,1,0,0,0,1,0,11,0,0,0,0,0,70,0
160 | 148,3,2.92,2.2,1,0,0,0,1,0,12,0,0,0,1,0,68,0
161 | 149,3,3.64,2.76,1,0,0,0,1,0,12,0,0,0,1,0,74,0
162 | 150,3,2.72,2.36,0,0,0,0,0,0,11,0,0,0,1,0,71,0
163 | 151,3,2.6,2.24,0,0,0,0,0,0,12,0,0,0,0,0,56,0
164 | 152,3,3.88,2.84,1,0,1,0,1,0,11,0,0,0,1,0,66,1
165 | 153,3,2.72,2.04,1,1,0,0,0,0,12,0,0,0,0,0,76,1
166 | 154,3,3.44,3.13,1,0,0,0,1,1,12,0,0,0,1,0,78,0
167 | 155,3,3.12,3.24,1,0,0,0,1,0,12,0,0,0,1,0,68,0
168 | 156,3,2.6,2.32,1,0,0,0,0,1,12,1,0,0,0,0,66,0
169 | 157,3,3.28,2.32,1,0,0,1,1,0,13,0,0,0,1,0,67,0
170 | 158,3,2.76,1.6,1,0,0,0,1,1,12,0,1,0,1,0,60,0
171 | 159,3,3.08,2.32,1,0,1,0,1,1,12,0,0,1,1,0,61,0
172 | 160,3,2.2,1.7,1,0,0,0,1,0,11,0,0,0,1,0,58,0
173 | 161,3,2.92,1.88,0,0,0,0,0,0,12,0,0,0,0,0,76,0
174 | 162,3,2.88,2.36,0,0,0,0,0,0,11,0,0,0,1,0,56,0
175 | 163,3,3.2,2.28,1,1,1,0,1,0,12,0,0,0,1,0,67,0
176 | 166,3,3.2,2.21,1,0,1,1,1,0,12,0,0,0,1,0,54,0
177 | 168,3,2.57,1.72,1,0,0,0,1,1,11,0,0,0,1,0,81,0
178 | 169,3,2.28,2.08,0,0,0,0,0,0,11,1,0,0,1,0,56,0
179 | 170,3,2.44,1.96,1,0,1,1,1,0,13,0,0,0,0,0,60,1
180 | 171,3,4.04,1.88,1,0,0,0,1,0,12,0,0,0,1,0,66,0
181 | 174,3,2.6,2.36,1,0,0,0,1,0,11,0,0,0,1,0,55,1
182 | 175,3,1.44,1.04,1,0,0,0,1,1,11,0,0,0,1,0,62,0
183 | 176,3,3.68,2.36,0,0,0,1,1,0,12,0,0,0,1,0,71,1
184 | 177,3,3.2,2.72,2,0,0,0,1,0,14,0,0,0,1,0,52,0
185 | 178,3,3.04,2.32,1,0,0,0,1,0,12,0,0,0,1,0,59,0
186 | 179,3,4.32,4.32,1,0,1,0,1,1,12,0,0,0,1,0,48,0
187 | 180,3,3,2.36,2,0,0,0,1,1,12,0,0,0,1,0,60,0
188 | 181,3,3.64,2.88,1,0,0,0,1,1,12,0,0,0,1,0,61,0
189 | 182,3,5.08,4.08,1,0,0,0,1,0,12,0,0,0,1,0,59,0
190 | 183,3,3.16,2.36,1,0,0,0,1,0,11,0,0,0,1,0,64,0
191 | 184,3,2.8,3.36,1,0,0,0,1,0,12,0,0,0,1,0,56,0
192 | 185,3,2.52,2.08,0,0,0,0,0,0,11,0,0,0,0,0,58,0
193 | 187,3,3.32,2.15,1,0,0,0,1,0,11,0,0,0,1,0,64,0
194 | 189,3,2.28,1.24,1,0,0,0,1,0,11,0,0,0,1,0,72,0
195 | 191,3,2.6,1.56,0,0,0,0,0,0,12,0,0,0,1,1,61,0
196 | 192,3,2.68,2.4,0,0,0,0,0,0,11,0,0,0,1,0,60,1
197 | 194,3,3.84,3.36,0,0,0,0,0,0,12,0,0,0,1,0,53,0
198 | 195,3,3.52,2.8,0,0,0,0,0,0,11,0,0,0,1,0,58,0
199 | 196,3,2.73,2.11,1,0,1,0,1,0,12,0,0,0,1,0,61,1
200 | 197,3,2.84,2.24,1,1,1,0,0,0,12,0,0,0,1,0,68,1
201 | 198,3,2.98,2.64,1,0,0,0,1,0,12,0,0,0,0,0,60,0
202 | 199,3,3.52,2.72,1,0,0,0,0,0,11,0,0,0,1,0,72,0
203 | 201,3,2.36,2.08,1,0,0,0,1,0,12,0,0,0,1,0,57,0
204 | 202,3,2.76,2.28,0,0,0,0,0,0,11,0,0,0,1,0,51,0
205 | 205,3,3.12,2.9,0,0,0,0,0,0,12,0,0,0,0,0,77,0
206 | 206,3,2.24,1.76,0,0,0,0,0,0,12,0,0,0,1,0,64,0
207 | 207,3,3.96,2.88,0,0,0,0,0,0,11,0,0,0,1,0,57,0
208 | 208,3,2.6,1.92,1,0,0,0,1,0,11,0,0,0,1,0,66,0
209 | 209,3,4.2,3.24,0,0,0,0,0,0,12,0,0,0,1,0,70,0
210 | 211,3,4.72,4.56,0,0,0,0,0,0,11,0,0,0,1,0,51,0
211 | 212,3,3.58,2.64,1,0,0,0,1,0,12,0,0,0,1,0,58,1
212 | 213,3,2.44,2.12,1,1,1,1,0,0,11,0,0,0,1,0,58,0
213 | 214,3,2.22,1.36,0,0,0,0,0,0,12,1,0,0,1,0,63,1
214 | 215,3,2.96,2.32,0,0,0,0,0,0,11,0,0,0,1,0,51,0
215 | 218,3,4.52,3.6,1,0,0,0,1,0,12,0,0,0,1,0,76,0
216 | 219,3,4,3.08,1,0,0,0,1,0,11,0,0,0,1,0,71,0
217 | 220,3,2.84,2.12,0,0,0,0,0,0,11,0,0,0,1,0,69,0
218 | 223,3,4.8,3.41,1,0,0,1,1,0,12,0,0,0,1,0,54,0
219 | 224,3,3.72,3.04,0,0,0,0,0,0,11,0,0,1,1,0,63,0
220 | 225,3,4.96,3.48,1,0,0,0,1,0,12,0,0,0,1,0,47,0
221 | 227,3,2.96,2.44,1,0,0,0,1,1,12,0,0,0,1,0,65,0
222 | 228,3,2.64,2.44,1,0,0,0,1,0,12,0,0,0,1,0,63,1
223 | 229,3,2.4,1.64,0,0,0,0,0,0,11,0,0,0,0,0,64,0
224 | 230,3,2.64,2.08,1,0,0,0,1,0,12,1,0,0,1,0,65,1
225 | 231,3,4.76,3.31,1,0,0,1,1,0,11,0,0,0,1,0,51,0
226 | 233,3,2.32,1.76,1,0,0,0,1,0,11,0,0,0,1,0,70,0
227 | 234,3,2.6,2,1,0,0,0,1,0,12,0,0,0,1,0,58,0
228 | 235,3,2.46,1.76,1,0,0,0,1,1,11,0,0,0,1,0,67,0
229 | 236,3,4.16,3.64,1,0,0,0,1,0,12,0,0,0,1,0,62,0
230 | 237,3,3.2,1.8,1,0,0,0,1,1,12,0,0,0,1,0,74,0
231 | 238,3,3.24,2.64,0,0,0,0,1,0,11,0,0,0,1,0,69,0
232 | 240,3,3.52,2.52,1,0,0,0,1,0,12,0,0,0,1,0,60,1
233 | 241,3,4.36,3.76,0,0,0,0,0,0,11,0,0,0,1,0,72,0
234 | 242,3,5.52,3.56,1,0,0,0,1,0,12,0,0,0,1,0,64,0
235 | 244,3,4.36,3.92,1,0,0,0,0,0,11,0,0,0,1,0,47,0
236 | 245,3,3.56,2.64,1,0,0,0,1,0,11,0,1,0,1,0,57,0
237 | 246,3,5.49,2.97,1,0,0,0,1,0,12,0,0,0,1,0,56,0
238 | 248,3,4.08,3.2,0,0,0,0,1,0,12,0,0,0,1,0,55,0
239 | 250,3,2.56,1.8,1,0,0,0,1,0,12,0,0,0,1,0,73,0
240 | 251,3,3.8,2.82,1,0,0,0,1,0,12,0,0,0,1,0,68,0
241 | 252,3,3.04,2.24,2,0,0,0,1,1,11,0,0,0,1,0,75,1
242 | 253,3,3.81,2.94,1,0,0,0,1,0,12,0,0,0,1,0,63,0
243 | 254,3,3.92,2.36,1,0,0,0,1,0,12,0,0,0,1,0,61,0
244 | 255,3,3.44,3.52,1,1,0,0,0,0,11,0,0,0,1,0,62,0
245 | 256,3,3.72,78.3,0,1,0,0,1,0,12,0,0,0,1,0,44,0
246 | 257,3,2.8,1.88,1,0,0,0,1,0,11,0,0,0,1,0,56,0
247 | 258,3,2.92,2.32,0,0,0,0,0,0,11,0,0,0,1,0,54,0
248 | 259,3,3.72,2.48,1,0,1,0,1,0,11,0,0,0,1,0,57,0
249 | 260,3,3.64,2.52,0,0,0,0,0,0,12,0,0,0,1,0,56,0
250 | 261,3,2.72,2.09,0,0,0,0,0,0,14,0,0,0,0,0,69,1
251 | 262,3,1.84,1.12,1,0,0,0,1,0,12,0,0,0,1,0,72,0
252 | 263,3,2.96,1.72,0,0,1,0,1,0,11,0,0,0,1,0,59,0
253 | 265,3,2.6,1.92,1,0,0,0,1,0,11,0,0,0,1,0,64,0
254 | 266,3,2.92,2.52,0,0,0,0,0,0,12,0,0,0,1,0,61,0
255 | 267,3,3.8,2.84,1,0,0,0,1,0,12,0,0,0,1,0,72,0
256 | 268,3,3.32,2.92,2,0,0,0,1,1,13,0,0,0,1,0,63,0
257 | 269,3,2.52,1.72,2,0,0,1,1,1,12,0,0,0,1,0,74,1
258 | 270,3,4.28,3.28,1,1,0,0,1,0,11,0,0,0,1,0,71,0
259 | 271,3,2.52,1.72,1,0,0,0,1,1,12,0,0,0,1,0,71,1
260 | 273,3,2.07,1.6,0,0,1,0,0,0,12,0,0,0,0,0,77,0
261 | 276,3,1.7,1.36,1,0,0,0,0,1,12,0,0,0,1,0,65,0
262 | 277,3,3.04,2.04,1,0,0,0,1,0,12,0,0,0,1,0,67,0
263 | 278,3,3.36,2.64,1,0,0,0,1,0,12,1,0,0,1,0,69,0
264 | 279,3,4.57,4.57,1,0,0,0,1,0,11,0,0,0,0,0,55,0
265 | 280,3,4.12,2.32,1,0,0,0,1,0,11,0,0,0,1,0,51,0
266 | 281,3,2,1.36,1,0,0,0,1,0,11,0,0,0,1,0,64,0
267 | 282,3,3.8,3.68,0,0,0,0,0,0,12,0,0,0,0,0,63,0
268 | 283,3,3.16,2.6,1,1,0,0,1,0,12,0,0,0,0,0,69,0
269 | 285,3,2.32,1.92,0,0,0,0,0,0,11,0,0,0,1,0,59,0
270 | 286,3,2.48,1.4,1,0,0,0,1,0,11,0,0,0,1,0,73,0
271 | 288,3,2.96,2.2,1,0,0,0,1,0,12,0,0,0,1,0,63,0
272 | 289,3,2.96,1.88,1,0,0,0,1,1,14,0,0,0,1,0,60,0
273 | 290,3,3.52,2.36,1,0,0,0,1,0,12,0,0,0,1,0,74,0
274 | 291,3,4.12,3.16,1,0,0,0,1,1,12,0,0,0,1,0,65,0
275 | 292,3,2.68,2.32,1,0,0,0,1,1,11,1,0,0,1,0,79,0
276 | 294,3,4.12,2.88,1,0,0,0,1,0,12,0,0,1,1,0,71,0
277 | 296,3,3.68,2.96,1,0,1,0,1,0,12,0,0,0,1,0,67,0
278 | 297,3,2.48,1.84,1,0,0,0,1,0,12,0,0,0,1,0,55,1
279 | 298,3,4.36,3.24,1,1,0,1,1,0,12,0,0,0,1,0,54,1
280 | 299,3,4.32,2.72,2,0,1,0,1,1,11,0,0,0,1,0,77,0
281 | 300,3,3.4,1.92,0,0,0,0,0,0,12,0,0,0,1,0,58,0
282 | 301,3,4.24,3.04,1,0,1,0,1,1,12,0,0,0,1,0,64,0
283 | 302,3,3.28,1.96,0,0,0,0,0,0,12,0,0,0,0,0,61,0
284 | 303,3,4.59,3.02,2,1,0,0,1,1,13,0,0,0,0,0,62,1
285 | 304,3,4.16,3.44,1,0,1,0,1,1,12,0,0,0,1,0,67,0
286 | 305,3,5.16,4.28,0,0,0,0,0,0,12,0,0,0,1,0,56,0
287 | 306,3,2.76,1.8,1,0,0,0,1,0,12,0,0,0,1,0,70,1
288 | 308,3,2.8,2.32,1,0,0,0,1,0,12,0,0,0,1,0,57,0
289 | 309,3,2.32,1.96,1,0,0,0,1,0,11,0,0,0,1,0,61,0
290 | 310,3,1.98,1.57,1,0,0,0,0,1,11,0,0,0,1,0,77,0
291 | 312,3,2.4,1.64,1,0,1,0,1,1,12,0,0,0,0,0,62,0
292 | 313,3,3.12,2.52,1,1,0,0,1,0,12,0,0,0,1,0,59,1
293 | 314,3,2.6,1.84,1,0,0,0,1,0,12,0,0,0,1,0,70,0
294 | 317,3,3.6,2.64,1,0,0,0,1,0,12,0,0,0,0,0,57,0
295 | 318,3,2.48,2.12,1,0,0,1,1,0,12,0,0,0,1,0,78,0
296 | 319,3,2.4,1.96,1,0,0,0,1,0,11,0,0,0,1,0,64,0
297 | 320,3,2.1,69.1,0,0,0,0,0,0,11,0,0,0,1,0,62,0
298 | 321,3,5.12,4,1,0,0,0,1,0,14,0,0,0,1,0,49,0
299 | 322,3,4.65,3.78,1,0,0,0,1,0,12,0,0,0,1,0,77,1
300 | 323,3,2.72,2.36,1,0,0,0,1,0,11,0,0,0,1,0,64,0
301 | 327,3,3.2,2.52,1,0,0,0,1,1,12,0,0,0,1,0,75,0
302 | 328,3,2.52,1.92,2,0,1,0,1,1,11,0,0,0,1,0,70,0
303 | 329,3,1.96,1.48,1,0,0,0,1,0,12,0,0,0,1,0,59,0
304 | 332,3,3.52,3.12,0,0,0,0,0,0,11,0,0,0,1,0,64,0
305 | 333,3,2.6,1.92,0,0,0,0,0,0,11,0,0,0,1,0,59,0
306 | 336,3,2.4,1.8,1,0,0,0,1,0,11,0,0,0,1,0,64,0
307 | 337,3,2.32,1.32,1,0,1,0,1,1,11,0,0,0,1,0,68,0
308 | 339,3,4,3.08,0,0,0,0,0,0,11,0,0,0,1,0,64,0
309 | 340,3,2.96,2,1,0,0,0,1,0,12,0,0,0,1,0,59,0
310 | 341,3,3.88,2.92,0,0,0,0,0,0,11,0,0,0,1,0,67,1
311 | 342,3,2.36,1.76,1,0,1,0,0,0,12,0,0,0,1,0,74,0
312 | 344,3,2.96,2.44,1,0,0,0,1,1,11,0,0,0,1,0,60,0
313 | 345,3,3.64,3.12,1,0,0,0,1,0,12,0,0,0,1,0,64,0
314 | 348,3,4.16,3.44,1,1,0,0,1,0,13,0,0,0,1,0,59,0
315 | 351,3,2.64,2.16,1,0,1,0,1,0,12,0,0,0,1,0,71,1
316 | 352,3,3.05,1.3,1,0,0,0,1,0,11,0,0,0,1,0,70,0
317 | 353,3,2.94,73.3,1,0,1,1,0,0,12,0,0,0,0,0,60,0
318 | 354,3,3.24,52.3,0,0,0,0,0,0,12,1,0,0,1,0,55,0
319 | 355,3,4.28,3.52,1,0,0,0,1,0,11,0,0,0,1,0,60,0
320 | 356,3,3.68,3.2,1,0,0,0,1,0,12,0,0,0,1,0,55,0
321 | 357,3,2.8,2.44,1,0,0,1,1,0,12,0,0,0,1,0,55,0
322 | 358,3,2,1.36,0,0,0,0,0,0,12,0,0,0,1,0,70,1
323 | 359,3,2.4,2.04,1,0,0,0,1,0,12,0,0,0,1,0,63,0
324 | 361,3,2.6,2.12,1,0,0,0,1,0,12,0,0,0,1,0,55,0
325 | 362,3,2.84,2.4,1,0,0,0,1,0,11,0,0,0,1,0,49,0
326 | 363,3,3.08,1.72,1,0,0,0,1,1,12,1,0,0,1,0,58,1
327 | 364,3,2.2,1.6,1,0,1,0,1,0,12,0,0,0,1,0,59,0
328 | 365,3,2.32,1.72,2,0,0,0,1,1,11,0,0,0,1,0,56,0
329 | 366,3,2.04,1.8,0,0,0,0,0,0,12,0,0,0,1,0,64,0
330 | 367,3,2.56,2.2,1,0,0,0,1,0,11,0,0,0,1,0,62,0
331 | 370,3,3.8,3.16,0,0,0,0,0,0,12,0,0,0,1,0,59,0
332 | 371,3,2.88,2.16,0,0,0,0,0,0,12,0,0,0,1,0,59,0
333 | 372,3,2.32,1.76,0,0,0,0,0,0,12,0,0,0,0,0,55,0
334 | 373,3,2.92,2.4,1,0,0,0,1,0,11,0,0,0,1,0,46,0
335 | 374,3,2,1.52,0,0,1,0,1,0,14,1,0,0,1,0,60,0
336 | 375,3,2.4,2.16,1,0,0,0,1,0,12,0,0,0,1,0,69,0
337 | 376,3,4.56,3.84,0,0,0,0,0,0,12,0,0,0,1,0,74,0
338 | 377,3,4.03,3.09,1,0,0,0,1,0,11,0,0,0,1,0,59,0
339 | 378,3,2.16,1.88,0,0,0,0,0,0,12,0,0,0,1,0,63,0
340 | 379,3,4.52,3.36,1,0,0,0,0,1,12,0,0,0,1,0,63,0
341 | 381,3,3.76,1,0,0,1,0,0,0,12,0,0,0,1,0,52,0
342 | 382,3,5,3.88,0,0,0,0,0,0,11,0,0,0,1,0,51,0
343 | 384,3,2.4,1.88,1,0,0,0,1,0,11,0,0,0,0,0,53,0
344 | 385,3,2,1.64,1,0,0,0,1,0,12,0,0,0,0,0,61,0
345 | 386,3,2.52,1.96,1,0,0,0,1,0,12,1,0,0,1,0,72,0
346 | 387,3,4.4,3.56,1,0,0,1,1,1,11,0,0,0,1,0,60,1
347 | 389,3,1.96,1.4,1,0,0,0,1,0,13,0,0,0,1,0,69,0
348 | 391,3,2.92,2.28,1,0,0,0,1,0,12,0,0,0,1,0,75,0
349 | 394,3,3.72,3,1,0,0,0,1,0,12,0,0,0,1,0,61,0
350 | 397,3,2.76,2.08,0,0,0,0,0,0,12,0,0,0,0,0,21,0
351 | 398,3,4.56,3.48,1,0,0,0,1,0,12,0,0,0,1,0,60,0
352 | 400,3,2.7,1.9,1,0,0,0,1,0,11,0,0,0,1,0,65,0
353 | 401,3,2.48,1.6,0,0,0,0,0,0,11,0,0,0,0,0,61,0
354 | 402,3,3.56,2.8,0,0,0,0,0,0,12,0,0,0,0,0,69,0
355 | 403,3,2.96,2.2,1,0,0,0,1,0,12,0,0,0,1,0,53,0
356 | 404,3,4.04,2.56,1,0,1,0,1,0,12,0,0,0,1,0,55,0
357 | 407,3,3.44,2.92,1,0,0,0,1,0,11,0,0,0,1,0,56,0
358 | 409,3,3.08,2.24,1,0,0,0,1,0,12,1,0,0,1,0,59,0
359 | 410,3,2.64,2.15,0,0,0,0,0,0,11,0,0,0,1,0,59,0
360 | 412,3,4.64,4.16,1,1,0,0,1,0,13,0,0,0,1,0,56,0
361 | 413,3,3.32,2.52,0,0,0,0,0,0,11,0,0,0,0,0,56,0
362 | 415,3,1.46,1,1,0,1,0,1,0,11,0,0,0,1,0,68,0
363 | 416,3,3.4,2.39,0,0,0,0,0,0,11,0,0,0,0,0,63,0
364 | 417,3,3.44,2.4,1,0,0,0,1,1,11,1,0,0,1,0,77,0
365 | 418,3,5.16,4.28,1,0,0,0,0,0,12,0,0,0,1,0,52,0
366 | 423,3,2.68,2.16,0,0,0,0,0,0,12,0,0,0,1,0,70,0
367 | 424,3,5,4.04,0,0,1,0,1,0,12,0,0,0,0,0,60,0
368 | 426,3,3.18,2.73,1,0,0,0,1,0,12,0,0,0,1,0,47,0
369 | 427,3,2.48,2.08,1,0,0,0,1,0,13,0,0,0,1,0,54,1
370 | 428,3,3.44,2.72,1,1,1,0,1,0,11,0,0,0,0,0,73,0
371 | 429,3,3.12,2.12,1,0,0,0,1,1,12,0,0,0,1,0,62,0
372 | 430,3,3.48,2.52,1,0,0,0,1,0,14,1,0,0,1,0,72,0
373 | 431,3,3.87,2.68,0,0,0,0,0,0,12,0,0,0,1,0,63,0
374 | 432,3,1.44,1.2,1,0,0,0,1,0,11,0,0,0,1,0,58,0
375 | 433,3,2.28,1.82,0,0,0,0,0,0,11,1,0,0,0,0,69,0
376 | 434,3,4.28,2.72,1,1,1,0,1,0,11,0,0,0,1,0,66,0
377 | 435,3,3.08,2.28,1,0,0,0,1,0,11,0,0,0,1,0,57,0
378 | 436,3,2.96,2.04,1,0,0,0,1,0,11,0,0,0,1,0,56,0
379 | 437,3,4.8,3.32,1,0,0,1,1,0,12,0,0,0,1,0,54,0
380 | 438,3,4.08,3.2,1,0,0,0,1,0,12,0,0,0,1,0,40,0
381 | 440,3,2.36,1.6,1,0,0,0,1,1,11,0,0,1,1,0,54,0
382 | 441,3,3,2.44,1,0,0,0,1,1,12,0,0,0,1,0,65,0
383 | 444,3,4.12,3.2,2,0,0,0,1,1,11,0,0,0,0,0,76,0
384 | 445,3,2.56,60.9,0,0,0,0,0,0,11,0,0,0,1,0,50,0
385 | 446,3,2.72,1.76,0,0,0,0,0,0,11,0,0,0,1,0,63,0
386 | 449,3,2.96,2.24,0,0,0,0,0,0,12,0,0,0,1,0,69,0
387 | 450,3,2.84,1.88,1,0,0,0,1,0,12,0,0,0,1,0,53,1
388 | 451,3,2.28,1.68,2,0,0,0,1,1,11,0,0,0,0,0,77,0
389 | 453,3,2.8,2.24,1,1,0,0,1,0,13,0,0,0,1,0,70,0
390 | 454,3,2.84,2.32,1,0,1,0,1,1,11,0,0,0,1,0,72,0
391 | 455,3,3.24,2.76,0,0,0,0,0,0,11,0,0,0,1,0,70,0
392 | 457,3,2.4,1.24,1,0,0,0,1,0,12,0,0,0,1,0,62,0
393 | 458,3,4.56,3.2,0,0,0,0,1,0,11,0,0,0,1,0,61,0
394 | 459,3,3.6,3,1,0,0,0,1,0,11,0,0,0,0,0,46,0
395 | 460,3,4.28,3.16,0,0,0,0,1,0,12,0,0,0,0,0,66,0
396 | 462,3,1.84,1.56,1,1,1,0,1,0,12,0,0,0,0,0,72,0
397 | 463,3,2.12,1.68,2,1,1,0,0,0,11,0,0,0,1,0,74,0
398 | 465,3,3.08,2.16,1,0,0,0,1,1,13,0,0,0,1,0,79,0
399 | 467,3,3.76,3.12,0,0,0,0,0,0,11,0,0,0,1,0,61,0
400 | 468,3,3.04,2.08,1,0,0,0,1,0,13,0,0,0,0,0,52,0
401 | 469,3,1.96,1.68,1,0,0,0,1,1,12,0,0,0,1,0,79,0
402 | 470,3,4.72,3.56,0,0,0,0,0,0,12,0,0,0,1,0,51,0
403 | 22,4,3.32,2.84,0,0,0,0,0,0,12,0,0,0,1,0,62,0
404 | 54,4,3.76,2.52,1,0,0,0,1,0,12,0,0,0,1,0,75,0
405 | 56,4,3.28,2.36,1,0,0,0,1,0,12,0,0,0,1,0,65,0
406 | 59,4,5.12,4.28,0,0,0,0,0,0,12,0,0,0,1,0,62,0
407 | 68,4,2.32,1.76,1,0,1,0,1,1,11,0,0,0,1,0,62,1
408 | 74,4,6.3,5.48,0,0,0,0,0,0,11,0,0,0,0,0,45,0
409 | 91,4,3.52,2.72,1,0,1,0,1,0,12,0,0,0,1,0,80,0
410 | 97,4,2.68,2,1,0,0,0,1,0,12,0,0,0,1,0,70,1
411 | 103,4,4.32,3.24,1,0,0,0,1,0,12,0,0,0,1,0,76,0
412 | 116,4,2.76,1.76,1,0,1,0,1,0,11,1,0,0,1,0,61,1
413 | 117,4,2.88,2.24,1,0,0,0,1,1,12,0,0,0,1,0,73,0
414 | 119,4,3.48,2.56,1,0,0,0,0,0,11,0,0,0,1,0,57,0
415 | 124,4,3.3,2.56,0,0,0,0,0,0,11,0,0,0,1,0,67,0
416 | 129,4,3.31,2,2,0,0,1,1,0,12,0,0,0,1,0,81,1
417 | 144,4,2.08,1.76,0,0,0,0,0,0,12,0,0,0,1,0,69,1
418 | 188,4,3.28,1.64,1,0,0,0,1,0,11,0,0,0,1,0,62,0
419 | 190,4,4.92,3.72,0,0,0,0,0,0,12,0,0,0,1,0,60,0
420 | 200,4,2.44,1.64,1,0,0,0,1,1,11,0,0,0,1,0,72,0
421 | 222,4,4.24,3.68,1,0,0,0,1,0,12,0,0,0,1,0,67,0
422 | 226,4,2.76,2.16,1,1,0,0,0,0,12,1,0,0,1,0,62,0
423 | 247,4,5.56,4.32,0,0,0,0,0,0,12,0,0,0,1,0,68,0
424 | 249,4,4.56,3.68,1,0,0,0,1,0,12,0,0,0,1,0,62,0
425 | 264,4,3.04,2.88,0,0,0,0,0,0,11,0,0,0,0,0,70,0
426 | 274,4,3.36,2.72,2,0,0,0,1,1,11,1,0,0,1,0,72,0
427 | 287,4,4.9,3.96,1,0,0,0,1,0,12,0,0,0,1,0,55,0
428 | 311,4,3.4,2.92,0,0,0,0,0,0,11,0,0,0,0,0,63,0
429 | 315,4,2.12,1.36,1,0,0,0,1,0,12,0,0,0,1,0,71,0
430 | 325,4,5.16,4.96,1,0,0,0,0,0,11,0,0,0,1,0,54,0
431 | 326,4,5.03,79.3,1,0,0,1,0,0,11,0,0,0,0,0,38,0
432 | 330,4,2.08,1.84,0,0,0,0,0,0,12,0,0,0,0,0,77,0
433 | 334,4,2.2,1.8,0,0,0,0,0,0,11,0,0,0,0,0,71,0
434 | 338,4,3.24,2.6,1,0,0,0,1,0,12,0,0,0,1,0,69,0
435 | 343,4,2.5,1.4,1,0,1,0,1,0,11,0,0,0,1,0,77,0
436 | 350,4,1.82,86.3,0,0,0,0,0,0,12,0,0,0,0,0,67,0
437 | 360,4,2.84,2.12,0,0,0,0,0,0,12,0,0,0,0,0,64,0
438 | 380,4,2.72,2.04,1,0,0,0,1,0,11,0,0,0,1,0,75,0
439 | 383,4,3.4,2.16,1,1,1,0,1,0,12,0,0,0,0,0,68,0
440 | 388,4,4.2,3.32,0,0,0,0,0,0,12,0,0,0,1,0,58,0
441 | 393,4,3.56,2.6,1,0,0,0,1,0,13,0,0,0,1,0,68,0
442 | 395,4,3.96,2.44,1,0,0,0,1,1,11,0,0,0,1,0,44,0
443 | 396,4,3.04,3.68,1,0,0,0,1,0,11,1,0,0,1,0,64,0
444 | 420,4,2.44,2.08,2,0,0,0,1,1,12,0,0,0,1,0,72,1
445 | 425,4,2.81,2.31,1,1,0,0,0,0,12,0,0,0,1,0,58,0
446 | 452,4,3.04,2.36,1,0,0,0,1,0,12,0,0,0,1,0,59,0
447 | 456,4,2.92,1.92,1,0,0,0,1,0,12,0,0,0,1,0,70,0
448 | 461,4,4.65,3.78,1,0,0,0,1,0,12,0,0,0,0,0,55,0
449 | 464,4,3.44,2.16,1,0,0,0,1,1,12,1,0,0,1,0,57,1
450 | 26,5,4.56,72.8,0,1,1,0,1,0,12,0,0,0,1,0,57,0
451 | 33,5,2.48,1.95,1,1,0,0,0,0,12,1,0,0,0,0,72,0
452 | 41,5,3.8,2.98,1,0,0,0,1,0,11,0,0,0,1,0,60,1
453 | 44,5,2.68,2.12,0,0,0,0,1,0,12,0,0,0,1,0,51,1
454 | 89,5,2.68,1.76,2,0,1,0,1,1,11,0,0,0,1,0,76,0
455 | 106,5,4.95,4.12,1,0,0,0,0,1,11,0,0,0,0,0,57,0
456 | 186,5,3.52,2.56,0,0,0,1,0,0,12,0,0,0,0,0,81,1
457 | 221,5,2.87,2.08,1,0,0,0,1,0,13,0,0,0,1,0,56,1
458 | 232,5,2.88,2.52,1,0,0,0,1,0,12,0,0,0,1,0,56,0
459 | 239,5,3.4,2.08,1,0,0,0,0,1,11,0,0,0,1,0,55,1
460 | 272,5,3,2.16,0,0,0,0,0,0,11,0,0,0,1,0,72,0
461 | 307,5,3.3,2.4,1,0,0,0,1,1,12,0,0,0,1,0,70,0
462 | 368,5,2.38,1.72,1,0,1,0,1,0,12,1,0,1,1,0,87,1
463 | 421,5,4.96,4.16,1,0,0,0,1,0,11,0,0,0,1,0,62,1
464 | 439,5,3.67,76.8,0,1,1,0,1,0,12,0,0,0,0,0,61,0
465 | 30,6,3.96,3.28,0,0,0,0,0,0,11,0,0,0,1,0,61,0
466 | 98,6,3.04,2.4,2,0,0,0,1,0,11,0,0,0,1,0,76,0
467 | 369,6,3.88,2.72,1,0,0,0,1,0,12,0,0,0,1,0,77,0
468 | 406,6,5.36,3.96,1,0,0,0,1,0,12,0,0,0,0,0,62,0
469 | 25,8,4.32,3.2,0,0,0,0,0,0,11,0,0,0,0,0,58,1
470 | 447,8,5.2,4.1,0,0,0,0,0,0,12,0,0,0,0,0,49,0
471 |
--------------------------------------------------------------------------------
/dataset/ThoraricSurgery_test.csv:
--------------------------------------------------------------------------------
1 | 293,1,3.8,2.8,0,0,0,0,0,0,12,0,0,0,1,0,62,0
2 | 1,2,2.88,2.16,1,0,0,0,1,1,14,0,0,0,1,0,60,0
3 | 8,2,3.19,2.5,1,0,0,0,1,0,11,0,0,1,1,0,66,1
4 | 14,2,3.98,3.06,2,0,0,0,1,1,14,0,0,0,1,0,80,1
5 | 17,2,2.21,1.88,0,0,1,0,0,0,12,0,0,0,1,0,56,0
--------------------------------------------------------------------------------
/dataset/iris-test.csv:
--------------------------------------------------------------------------------
1 | 5.1,3.5,1.4,0.2,Iris-setosa
2 | 4.9,3.0,1.4,0.2,Iris-setosa
3 | 4.7,3.2,1.3,0.2,Iris-setosa
4 | 6.9,3.1,4.9,1.5,Iris-versicolor
5 | 5.5,2.3,4.0,1.3,Iris-versicolor
6 | 5.7,2.8,4.1,1.3,Iris-versicolor
7 | 6.3,3.3,6.0,2.5,Iris-virginica
8 | 5.8,2.7,5.1,1.9,Iris-virginica
--------------------------------------------------------------------------------
/dataset/iris.csv:
--------------------------------------------------------------------------------
1 | 5.1,3.5,1.4,0.2,Iris-setosa
2 | 4.9,3.0,1.4,0.2,Iris-setosa
3 | 4.7,3.2,1.3,0.2,Iris-setosa
4 | 4.6,3.1,1.5,0.2,Iris-setosa
5 | 5.0,3.6,1.4,0.2,Iris-setosa
6 | 5.4,3.9,1.7,0.4,Iris-setosa
7 | 4.6,3.4,1.4,0.3,Iris-setosa
8 | 5.0,3.4,1.5,0.2,Iris-setosa
9 | 4.4,2.9,1.4,0.2,Iris-setosa
10 | 4.9,3.1,1.5,0.1,Iris-setosa
11 | 5.4,3.7,1.5,0.2,Iris-setosa
12 | 4.8,3.4,1.6,0.2,Iris-setosa
13 | 4.8,3.0,1.4,0.1,Iris-setosa
14 | 4.3,3.0,1.1,0.1,Iris-setosa
15 | 5.8,4.0,1.2,0.2,Iris-setosa
16 | 5.7,4.4,1.5,0.4,Iris-setosa
17 | 5.4,3.9,1.3,0.4,Iris-setosa
18 | 5.1,3.5,1.4,0.3,Iris-setosa
19 | 5.7,3.8,1.7,0.3,Iris-setosa
20 | 5.1,3.8,1.5,0.3,Iris-setosa
21 | 5.4,3.4,1.7,0.2,Iris-setosa
22 | 5.1,3.7,1.5,0.4,Iris-setosa
23 | 4.6,3.6,1.0,0.2,Iris-setosa
24 | 5.1,3.3,1.7,0.5,Iris-setosa
25 | 4.8,3.4,1.9,0.2,Iris-setosa
26 | 5.0,3.0,1.6,0.2,Iris-setosa
27 | 5.0,3.4,1.6,0.4,Iris-setosa
28 | 5.2,3.5,1.5,0.2,Iris-setosa
29 | 5.2,3.4,1.4,0.2,Iris-setosa
30 | 4.7,3.2,1.6,0.2,Iris-setosa
31 | 4.8,3.1,1.6,0.2,Iris-setosa
32 | 5.4,3.4,1.5,0.4,Iris-setosa
33 | 5.2,4.1,1.5,0.1,Iris-setosa
34 | 5.5,4.2,1.4,0.2,Iris-setosa
35 | 4.9,3.1,1.5,0.1,Iris-setosa
36 | 5.0,3.2,1.2,0.2,Iris-setosa
37 | 5.5,3.5,1.3,0.2,Iris-setosa
38 | 4.9,3.1,1.5,0.1,Iris-setosa
39 | 4.4,3.0,1.3,0.2,Iris-setosa
40 | 5.1,3.4,1.5,0.2,Iris-setosa
41 | 5.0,3.5,1.3,0.3,Iris-setosa
42 | 4.5,2.3,1.3,0.3,Iris-setosa
43 | 4.4,3.2,1.3,0.2,Iris-setosa
44 | 5.0,3.5,1.6,0.6,Iris-setosa
45 | 5.1,3.8,1.9,0.4,Iris-setosa
46 | 4.8,3.0,1.4,0.3,Iris-setosa
47 | 5.1,3.8,1.6,0.2,Iris-setosa
48 | 4.6,3.2,1.4,0.2,Iris-setosa
49 | 5.3,3.7,1.5,0.2,Iris-setosa
50 | 5.0,3.3,1.4,0.2,Iris-setosa
51 | 7.0,3.2,4.7,1.4,Iris-versicolor
52 | 6.4,3.2,4.5,1.5,Iris-versicolor
53 | 6.9,3.1,4.9,1.5,Iris-versicolor
54 | 5.5,2.3,4.0,1.3,Iris-versicolor
55 | 6.5,2.8,4.6,1.5,Iris-versicolor
56 | 5.7,2.8,4.5,1.3,Iris-versicolor
57 | 6.3,3.3,4.7,1.6,Iris-versicolor
58 | 4.9,2.4,3.3,1.0,Iris-versicolor
59 | 6.6,2.9,4.6,1.3,Iris-versicolor
60 | 5.2,2.7,3.9,1.4,Iris-versicolor
61 | 5.0,2.0,3.5,1.0,Iris-versicolor
62 | 5.9,3.0,4.2,1.5,Iris-versicolor
63 | 6.0,2.2,4.0,1.0,Iris-versicolor
64 | 6.1,2.9,4.7,1.4,Iris-versicolor
65 | 5.6,2.9,3.6,1.3,Iris-versicolor
66 | 6.7,3.1,4.4,1.4,Iris-versicolor
67 | 5.6,3.0,4.5,1.5,Iris-versicolor
68 | 5.8,2.7,4.1,1.0,Iris-versicolor
69 | 6.2,2.2,4.5,1.5,Iris-versicolor
70 | 5.6,2.5,3.9,1.1,Iris-versicolor
71 | 5.9,3.2,4.8,1.8,Iris-versicolor
72 | 6.1,2.8,4.0,1.3,Iris-versicolor
73 | 6.3,2.5,4.9,1.5,Iris-versicolor
74 | 6.1,2.8,4.7,1.2,Iris-versicolor
75 | 6.4,2.9,4.3,1.3,Iris-versicolor
76 | 6.6,3.0,4.4,1.4,Iris-versicolor
77 | 6.8,2.8,4.8,1.4,Iris-versicolor
78 | 6.7,3.0,5.0,1.7,Iris-versicolor
79 | 6.0,2.9,4.5,1.5,Iris-versicolor
80 | 5.7,2.6,3.5,1.0,Iris-versicolor
81 | 5.5,2.4,3.8,1.1,Iris-versicolor
82 | 5.5,2.4,3.7,1.0,Iris-versicolor
83 | 5.8,2.7,3.9,1.2,Iris-versicolor
84 | 6.0,2.7,5.1,1.6,Iris-versicolor
85 | 5.4,3.0,4.5,1.5,Iris-versicolor
86 | 6.0,3.4,4.5,1.6,Iris-versicolor
87 | 6.7,3.1,4.7,1.5,Iris-versicolor
88 | 6.3,2.3,4.4,1.3,Iris-versicolor
89 | 5.6,3.0,4.1,1.3,Iris-versicolor
90 | 5.5,2.5,4.0,1.3,Iris-versicolor
91 | 5.5,2.6,4.4,1.2,Iris-versicolor
92 | 6.1,3.0,4.6,1.4,Iris-versicolor
93 | 5.8,2.6,4.0,1.2,Iris-versicolor
94 | 5.0,2.3,3.3,1.0,Iris-versicolor
95 | 5.6,2.7,4.2,1.3,Iris-versicolor
96 | 5.7,3.0,4.2,1.2,Iris-versicolor
97 | 5.7,2.9,4.2,1.3,Iris-versicolor
98 | 6.2,2.9,4.3,1.3,Iris-versicolor
99 | 5.1,2.5,3.0,1.1,Iris-versicolor
100 | 5.7,2.8,4.1,1.3,Iris-versicolor
101 | 6.3,3.3,6.0,2.5,Iris-virginica
102 | 5.8,2.7,5.1,1.9,Iris-virginica
103 | 7.1,3.0,5.9,2.1,Iris-virginica
104 | 6.3,2.9,5.6,1.8,Iris-virginica
105 | 6.5,3.0,5.8,2.2,Iris-virginica
106 | 7.6,3.0,6.6,2.1,Iris-virginica
107 | 4.9,2.5,4.5,1.7,Iris-virginica
108 | 7.3,2.9,6.3,1.8,Iris-virginica
109 | 6.7,2.5,5.8,1.8,Iris-virginica
110 | 7.2,3.6,6.1,2.5,Iris-virginica
111 | 6.5,3.2,5.1,2.0,Iris-virginica
112 | 6.4,2.7,5.3,1.9,Iris-virginica
113 | 6.8,3.0,5.5,2.1,Iris-virginica
114 | 5.7,2.5,5.0,2.0,Iris-virginica
115 | 5.8,2.8,5.1,2.4,Iris-virginica
116 | 6.4,3.2,5.3,2.3,Iris-virginica
117 | 6.5,3.0,5.5,1.8,Iris-virginica
118 | 7.7,3.8,6.7,2.2,Iris-virginica
119 | 7.7,2.6,6.9,2.3,Iris-virginica
120 | 6.0,2.2,5.0,1.5,Iris-virginica
121 | 6.9,3.2,5.7,2.3,Iris-virginica
122 | 5.6,2.8,4.9,2.0,Iris-virginica
123 | 7.7,2.8,6.7,2.0,Iris-virginica
124 | 6.3,2.7,4.9,1.8,Iris-virginica
125 | 6.7,3.3,5.7,2.1,Iris-virginica
126 | 7.2,3.2,6.0,1.8,Iris-virginica
127 | 6.2,2.8,4.8,1.8,Iris-virginica
128 | 6.1,3.0,4.9,1.8,Iris-virginica
129 | 6.4,2.8,5.6,2.1,Iris-virginica
130 | 7.2,3.0,5.8,1.6,Iris-virginica
131 | 7.4,2.8,6.1,1.9,Iris-virginica
132 | 7.9,3.8,6.4,2.0,Iris-virginica
133 | 6.4,2.8,5.6,2.2,Iris-virginica
134 | 6.3,2.8,5.1,1.5,Iris-virginica
135 | 6.1,2.6,5.6,1.4,Iris-virginica
136 | 7.7,3.0,6.1,2.3,Iris-virginica
137 | 6.3,3.4,5.6,2.4,Iris-virginica
138 | 6.4,3.1,5.5,1.8,Iris-virginica
139 | 6.0,3.0,4.8,1.8,Iris-virginica
140 | 6.9,3.1,5.4,2.1,Iris-virginica
141 | 6.7,3.1,5.6,2.4,Iris-virginica
142 | 6.9,3.1,5.1,2.3,Iris-virginica
143 | 5.8,2.7,5.1,1.9,Iris-virginica
144 | 6.8,3.2,5.9,2.3,Iris-virginica
145 | 6.7,3.3,5.7,2.5,Iris-virginica
146 | 6.7,3.0,5.2,2.3,Iris-virginica
147 | 6.3,2.5,5.0,1.9,Iris-virginica
148 | 6.5,3.0,5.2,2.0,Iris-virginica
149 | 6.2,3.4,5.4,2.3,Iris-virginica
150 | 5.9,3.0,5.1,1.8,Iris-virginica
151 |
152 |
--------------------------------------------------------------------------------
/dataset/pima-indians-diabetes-test.csv:
--------------------------------------------------------------------------------
1 | 6,148,72,35,0,33.6,0.627,50,1
2 | 1,85,66,29,0,26.6,0.351,31,0
3 | 8,183,64,0,0,23.3,0.672,32,1
4 | 1,89,66,23,94,28.1,0.167,21,0
5 | 0,137,40,35,168,43.1,2.288,33,1
6 | 5,116,74,0,0,25.6,0.201,30,0
--------------------------------------------------------------------------------
/dataset/pima-indians-diabetes.csv:
--------------------------------------------------------------------------------
1 | 6,148,72,35,0,33.6,0.627,50,1
2 | 1,85,66,29,0,26.6,0.351,31,0
3 | 8,183,64,0,0,23.3,0.672,32,1
4 | 1,89,66,23,94,28.1,0.167,21,0
5 | 0,137,40,35,168,43.1,2.288,33,1
6 | 5,116,74,0,0,25.6,0.201,30,0
7 | 3,78,50,32,88,31.0,0.248,26,1
8 | 10,115,0,0,0,35.3,0.134,29,0
9 | 2,197,70,45,543,30.5,0.158,53,1
10 | 8,125,96,0,0,0.0,0.232,54,1
11 | 4,110,92,0,0,37.6,0.191,30,0
12 | 10,168,74,0,0,38.0,0.537,34,1
13 | 10,139,80,0,0,27.1,1.441,57,0
14 | 1,189,60,23,846,30.1,0.398,59,1
15 | 5,166,72,19,175,25.8,0.587,51,1
16 | 7,100,0,0,0,30.0,0.484,32,1
17 | 0,118,84,47,230,45.8,0.551,31,1
18 | 7,107,74,0,0,29.6,0.254,31,1
19 | 1,103,30,38,83,43.3,0.183,33,0
20 | 1,115,70,30,96,34.6,0.529,32,1
21 | 3,126,88,41,235,39.3,0.704,27,0
22 | 8,99,84,0,0,35.4,0.388,50,0
23 | 7,196,90,0,0,39.8,0.451,41,1
24 | 9,119,80,35,0,29.0,0.263,29,1
25 | 11,143,94,33,146,36.6,0.254,51,1
26 | 10,125,70,26,115,31.1,0.205,41,1
27 | 7,147,76,0,0,39.4,0.257,43,1
28 | 1,97,66,15,140,23.2,0.487,22,0
29 | 13,145,82,19,110,22.2,0.245,57,0
30 | 5,117,92,0,0,34.1,0.337,38,0
31 | 5,109,75,26,0,36.0,0.546,60,0
32 | 3,158,76,36,245,31.6,0.851,28,1
33 | 3,88,58,11,54,24.8,0.267,22,0
34 | 6,92,92,0,0,19.9,0.188,28,0
35 | 10,122,78,31,0,27.6,0.512,45,0
36 | 4,103,60,33,192,24.0,0.966,33,0
37 | 11,138,76,0,0,33.2,0.420,35,0
38 | 9,102,76,37,0,32.9,0.665,46,1
39 | 2,90,68,42,0,38.2,0.503,27,1
40 | 4,111,72,47,207,37.1,1.390,56,1
41 | 3,180,64,25,70,34.0,0.271,26,0
42 | 7,133,84,0,0,40.2,0.696,37,0
43 | 7,106,92,18,0,22.7,0.235,48,0
44 | 9,171,110,24,240,45.4,0.721,54,1
45 | 7,159,64,0,0,27.4,0.294,40,0
46 | 0,180,66,39,0,42.0,1.893,25,1
47 | 1,146,56,0,0,29.7,0.564,29,0
48 | 2,71,70,27,0,28.0,0.586,22,0
49 | 7,103,66,32,0,39.1,0.344,31,1
50 | 7,105,0,0,0,0.0,0.305,24,0
51 | 1,103,80,11,82,19.4,0.491,22,0
52 | 1,101,50,15,36,24.2,0.526,26,0
53 | 5,88,66,21,23,24.4,0.342,30,0
54 | 8,176,90,34,300,33.7,0.467,58,1
55 | 7,150,66,42,342,34.7,0.718,42,0
56 | 1,73,50,10,0,23.0,0.248,21,0
57 | 7,187,68,39,304,37.7,0.254,41,1
58 | 0,100,88,60,110,46.8,0.962,31,0
59 | 0,146,82,0,0,40.5,1.781,44,0
60 | 0,105,64,41,142,41.5,0.173,22,0
61 | 2,84,0,0,0,0.0,0.304,21,0
62 | 8,133,72,0,0,32.9,0.270,39,1
63 | 5,44,62,0,0,25.0,0.587,36,0
64 | 2,141,58,34,128,25.4,0.699,24,0
65 | 7,114,66,0,0,32.8,0.258,42,1
66 | 5,99,74,27,0,29.0,0.203,32,0
67 | 0,109,88,30,0,32.5,0.855,38,1
68 | 2,109,92,0,0,42.7,0.845,54,0
69 | 1,95,66,13,38,19.6,0.334,25,0
70 | 4,146,85,27,100,28.9,0.189,27,0
71 | 2,100,66,20,90,32.9,0.867,28,1
72 | 5,139,64,35,140,28.6,0.411,26,0
73 | 13,126,90,0,0,43.4,0.583,42,1
74 | 4,129,86,20,270,35.1,0.231,23,0
75 | 1,79,75,30,0,32.0,0.396,22,0
76 | 1,0,48,20,0,24.7,0.140,22,0
77 | 7,62,78,0,0,32.6,0.391,41,0
78 | 5,95,72,33,0,37.7,0.370,27,0
79 | 0,131,0,0,0,43.2,0.270,26,1
80 | 2,112,66,22,0,25.0,0.307,24,0
81 | 3,113,44,13,0,22.4,0.140,22,0
82 | 2,74,0,0,0,0.0,0.102,22,0
83 | 7,83,78,26,71,29.3,0.767,36,0
84 | 0,101,65,28,0,24.6,0.237,22,0
85 | 5,137,108,0,0,48.8,0.227,37,1
86 | 2,110,74,29,125,32.4,0.698,27,0
87 | 13,106,72,54,0,36.6,0.178,45,0
88 | 2,100,68,25,71,38.5,0.324,26,0
89 | 15,136,70,32,110,37.1,0.153,43,1
90 | 1,107,68,19,0,26.5,0.165,24,0
91 | 1,80,55,0,0,19.1,0.258,21,0
92 | 4,123,80,15,176,32.0,0.443,34,0
93 | 7,81,78,40,48,46.7,0.261,42,0
94 | 4,134,72,0,0,23.8,0.277,60,1
95 | 2,142,82,18,64,24.7,0.761,21,0
96 | 6,144,72,27,228,33.9,0.255,40,0
97 | 2,92,62,28,0,31.6,0.130,24,0
98 | 1,71,48,18,76,20.4,0.323,22,0
99 | 6,93,50,30,64,28.7,0.356,23,0
100 | 1,122,90,51,220,49.7,0.325,31,1
101 | 1,163,72,0,0,39.0,1.222,33,1
102 | 1,151,60,0,0,26.1,0.179,22,0
103 | 0,125,96,0,0,22.5,0.262,21,0
104 | 1,81,72,18,40,26.6,0.283,24,0
105 | 2,85,65,0,0,39.6,0.930,27,0
106 | 1,126,56,29,152,28.7,0.801,21,0
107 | 1,96,122,0,0,22.4,0.207,27,0
108 | 4,144,58,28,140,29.5,0.287,37,0
109 | 3,83,58,31,18,34.3,0.336,25,0
110 | 0,95,85,25,36,37.4,0.247,24,1
111 | 3,171,72,33,135,33.3,0.199,24,1
112 | 8,155,62,26,495,34.0,0.543,46,1
113 | 1,89,76,34,37,31.2,0.192,23,0
114 | 4,76,62,0,0,34.0,0.391,25,0
115 | 7,160,54,32,175,30.5,0.588,39,1
116 | 4,146,92,0,0,31.2,0.539,61,1
117 | 5,124,74,0,0,34.0,0.220,38,1
118 | 5,78,48,0,0,33.7,0.654,25,0
119 | 4,97,60,23,0,28.2,0.443,22,0
120 | 4,99,76,15,51,23.2,0.223,21,0
121 | 0,162,76,56,100,53.2,0.759,25,1
122 | 6,111,64,39,0,34.2,0.260,24,0
123 | 2,107,74,30,100,33.6,0.404,23,0
124 | 5,132,80,0,0,26.8,0.186,69,0
125 | 0,113,76,0,0,33.3,0.278,23,1
126 | 1,88,30,42,99,55.0,0.496,26,1
127 | 3,120,70,30,135,42.9,0.452,30,0
128 | 1,118,58,36,94,33.3,0.261,23,0
129 | 1,117,88,24,145,34.5,0.403,40,1
130 | 0,105,84,0,0,27.9,0.741,62,1
131 | 4,173,70,14,168,29.7,0.361,33,1
132 | 9,122,56,0,0,33.3,1.114,33,1
133 | 3,170,64,37,225,34.5,0.356,30,1
134 | 8,84,74,31,0,38.3,0.457,39,0
135 | 2,96,68,13,49,21.1,0.647,26,0
136 | 2,125,60,20,140,33.8,0.088,31,0
137 | 0,100,70,26,50,30.8,0.597,21,0
138 | 0,93,60,25,92,28.7,0.532,22,0
139 | 0,129,80,0,0,31.2,0.703,29,0
140 | 5,105,72,29,325,36.9,0.159,28,0
141 | 3,128,78,0,0,21.1,0.268,55,0
142 | 5,106,82,30,0,39.5,0.286,38,0
143 | 2,108,52,26,63,32.5,0.318,22,0
144 | 10,108,66,0,0,32.4,0.272,42,1
145 | 4,154,62,31,284,32.8,0.237,23,0
146 | 0,102,75,23,0,0.0,0.572,21,0
147 | 9,57,80,37,0,32.8,0.096,41,0
148 | 2,106,64,35,119,30.5,1.400,34,0
149 | 5,147,78,0,0,33.7,0.218,65,0
150 | 2,90,70,17,0,27.3,0.085,22,0
151 | 1,136,74,50,204,37.4,0.399,24,0
152 | 4,114,65,0,0,21.9,0.432,37,0
153 | 9,156,86,28,155,34.3,1.189,42,1
154 | 1,153,82,42,485,40.6,0.687,23,0
155 | 8,188,78,0,0,47.9,0.137,43,1
156 | 7,152,88,44,0,50.0,0.337,36,1
157 | 2,99,52,15,94,24.6,0.637,21,0
158 | 1,109,56,21,135,25.2,0.833,23,0
159 | 2,88,74,19,53,29.0,0.229,22,0
160 | 17,163,72,41,114,40.9,0.817,47,1
161 | 4,151,90,38,0,29.7,0.294,36,0
162 | 7,102,74,40,105,37.2,0.204,45,0
163 | 0,114,80,34,285,44.2,0.167,27,0
164 | 2,100,64,23,0,29.7,0.368,21,0
165 | 0,131,88,0,0,31.6,0.743,32,1
166 | 6,104,74,18,156,29.9,0.722,41,1
167 | 3,148,66,25,0,32.5,0.256,22,0
168 | 4,120,68,0,0,29.6,0.709,34,0
169 | 4,110,66,0,0,31.9,0.471,29,0
170 | 3,111,90,12,78,28.4,0.495,29,0
171 | 6,102,82,0,0,30.8,0.180,36,1
172 | 6,134,70,23,130,35.4,0.542,29,1
173 | 2,87,0,23,0,28.9,0.773,25,0
174 | 1,79,60,42,48,43.5,0.678,23,0
175 | 2,75,64,24,55,29.7,0.370,33,0
176 | 8,179,72,42,130,32.7,0.719,36,1
177 | 6,85,78,0,0,31.2,0.382,42,0
178 | 0,129,110,46,130,67.1,0.319,26,1
179 | 5,143,78,0,0,45.0,0.190,47,0
180 | 5,130,82,0,0,39.1,0.956,37,1
181 | 6,87,80,0,0,23.2,0.084,32,0
182 | 0,119,64,18,92,34.9,0.725,23,0
183 | 1,0,74,20,23,27.7,0.299,21,0
184 | 5,73,60,0,0,26.8,0.268,27,0
185 | 4,141,74,0,0,27.6,0.244,40,0
186 | 7,194,68,28,0,35.9,0.745,41,1
187 | 8,181,68,36,495,30.1,0.615,60,1
188 | 1,128,98,41,58,32.0,1.321,33,1
189 | 8,109,76,39,114,27.9,0.640,31,1
190 | 5,139,80,35,160,31.6,0.361,25,1
191 | 3,111,62,0,0,22.6,0.142,21,0
192 | 9,123,70,44,94,33.1,0.374,40,0
193 | 7,159,66,0,0,30.4,0.383,36,1
194 | 11,135,0,0,0,52.3,0.578,40,1
195 | 8,85,55,20,0,24.4,0.136,42,0
196 | 5,158,84,41,210,39.4,0.395,29,1
197 | 1,105,58,0,0,24.3,0.187,21,0
198 | 3,107,62,13,48,22.9,0.678,23,1
199 | 4,109,64,44,99,34.8,0.905,26,1
200 | 4,148,60,27,318,30.9,0.150,29,1
201 | 0,113,80,16,0,31.0,0.874,21,0
202 | 1,138,82,0,0,40.1,0.236,28,0
203 | 0,108,68,20,0,27.3,0.787,32,0
204 | 2,99,70,16,44,20.4,0.235,27,0
205 | 6,103,72,32,190,37.7,0.324,55,0
206 | 5,111,72,28,0,23.9,0.407,27,0
207 | 8,196,76,29,280,37.5,0.605,57,1
208 | 5,162,104,0,0,37.7,0.151,52,1
209 | 1,96,64,27,87,33.2,0.289,21,0
210 | 7,184,84,33,0,35.5,0.355,41,1
211 | 2,81,60,22,0,27.7,0.290,25,0
212 | 0,147,85,54,0,42.8,0.375,24,0
213 | 7,179,95,31,0,34.2,0.164,60,0
214 | 0,140,65,26,130,42.6,0.431,24,1
215 | 9,112,82,32,175,34.2,0.260,36,1
216 | 12,151,70,40,271,41.8,0.742,38,1
217 | 5,109,62,41,129,35.8,0.514,25,1
218 | 6,125,68,30,120,30.0,0.464,32,0
219 | 5,85,74,22,0,29.0,1.224,32,1
220 | 5,112,66,0,0,37.8,0.261,41,1
221 | 0,177,60,29,478,34.6,1.072,21,1
222 | 2,158,90,0,0,31.6,0.805,66,1
223 | 7,119,0,0,0,25.2,0.209,37,0
224 | 7,142,60,33,190,28.8,0.687,61,0
225 | 1,100,66,15,56,23.6,0.666,26,0
226 | 1,87,78,27,32,34.6,0.101,22,0
227 | 0,101,76,0,0,35.7,0.198,26,0
228 | 3,162,52,38,0,37.2,0.652,24,1
229 | 4,197,70,39,744,36.7,2.329,31,0
230 | 0,117,80,31,53,45.2,0.089,24,0
231 | 4,142,86,0,0,44.0,0.645,22,1
232 | 6,134,80,37,370,46.2,0.238,46,1
233 | 1,79,80,25,37,25.4,0.583,22,0
234 | 4,122,68,0,0,35.0,0.394,29,0
235 | 3,74,68,28,45,29.7,0.293,23,0
236 | 4,171,72,0,0,43.6,0.479,26,1
237 | 7,181,84,21,192,35.9,0.586,51,1
238 | 0,179,90,27,0,44.1,0.686,23,1
239 | 9,164,84,21,0,30.8,0.831,32,1
240 | 0,104,76,0,0,18.4,0.582,27,0
241 | 1,91,64,24,0,29.2,0.192,21,0
242 | 4,91,70,32,88,33.1,0.446,22,0
243 | 3,139,54,0,0,25.6,0.402,22,1
244 | 6,119,50,22,176,27.1,1.318,33,1
245 | 2,146,76,35,194,38.2,0.329,29,0
246 | 9,184,85,15,0,30.0,1.213,49,1
247 | 10,122,68,0,0,31.2,0.258,41,0
248 | 0,165,90,33,680,52.3,0.427,23,0
249 | 9,124,70,33,402,35.4,0.282,34,0
250 | 1,111,86,19,0,30.1,0.143,23,0
251 | 9,106,52,0,0,31.2,0.380,42,0
252 | 2,129,84,0,0,28.0,0.284,27,0
253 | 2,90,80,14,55,24.4,0.249,24,0
254 | 0,86,68,32,0,35.8,0.238,25,0
255 | 12,92,62,7,258,27.6,0.926,44,1
256 | 1,113,64,35,0,33.6,0.543,21,1
257 | 3,111,56,39,0,30.1,0.557,30,0
258 | 2,114,68,22,0,28.7,0.092,25,0
259 | 1,193,50,16,375,25.9,0.655,24,0
260 | 11,155,76,28,150,33.3,1.353,51,1
261 | 3,191,68,15,130,30.9,0.299,34,0
262 | 3,141,0,0,0,30.0,0.761,27,1
263 | 4,95,70,32,0,32.1,0.612,24,0
264 | 3,142,80,15,0,32.4,0.200,63,0
265 | 4,123,62,0,0,32.0,0.226,35,1
266 | 5,96,74,18,67,33.6,0.997,43,0
267 | 0,138,0,0,0,36.3,0.933,25,1
268 | 2,128,64,42,0,40.0,1.101,24,0
269 | 0,102,52,0,0,25.1,0.078,21,0
270 | 2,146,0,0,0,27.5,0.240,28,1
271 | 10,101,86,37,0,45.6,1.136,38,1
272 | 2,108,62,32,56,25.2,0.128,21,0
273 | 3,122,78,0,0,23.0,0.254,40,0
274 | 1,71,78,50,45,33.2,0.422,21,0
275 | 13,106,70,0,0,34.2,0.251,52,0
276 | 2,100,70,52,57,40.5,0.677,25,0
277 | 7,106,60,24,0,26.5,0.296,29,1
278 | 0,104,64,23,116,27.8,0.454,23,0
279 | 5,114,74,0,0,24.9,0.744,57,0
280 | 2,108,62,10,278,25.3,0.881,22,0
281 | 0,146,70,0,0,37.9,0.334,28,1
282 | 10,129,76,28,122,35.9,0.280,39,0
283 | 7,133,88,15,155,32.4,0.262,37,0
284 | 7,161,86,0,0,30.4,0.165,47,1
285 | 2,108,80,0,0,27.0,0.259,52,1
286 | 7,136,74,26,135,26.0,0.647,51,0
287 | 5,155,84,44,545,38.7,0.619,34,0
288 | 1,119,86,39,220,45.6,0.808,29,1
289 | 4,96,56,17,49,20.8,0.340,26,0
290 | 5,108,72,43,75,36.1,0.263,33,0
291 | 0,78,88,29,40,36.9,0.434,21,0
292 | 0,107,62,30,74,36.6,0.757,25,1
293 | 2,128,78,37,182,43.3,1.224,31,1
294 | 1,128,48,45,194,40.5,0.613,24,1
295 | 0,161,50,0,0,21.9,0.254,65,0
296 | 6,151,62,31,120,35.5,0.692,28,0
297 | 2,146,70,38,360,28.0,0.337,29,1
298 | 0,126,84,29,215,30.7,0.520,24,0
299 | 14,100,78,25,184,36.6,0.412,46,1
300 | 8,112,72,0,0,23.6,0.840,58,0
301 | 0,167,0,0,0,32.3,0.839,30,1
302 | 2,144,58,33,135,31.6,0.422,25,1
303 | 5,77,82,41,42,35.8,0.156,35,0
304 | 5,115,98,0,0,52.9,0.209,28,1
305 | 3,150,76,0,0,21.0,0.207,37,0
306 | 2,120,76,37,105,39.7,0.215,29,0
307 | 10,161,68,23,132,25.5,0.326,47,1
308 | 0,137,68,14,148,24.8,0.143,21,0
309 | 0,128,68,19,180,30.5,1.391,25,1
310 | 2,124,68,28,205,32.9,0.875,30,1
311 | 6,80,66,30,0,26.2,0.313,41,0
312 | 0,106,70,37,148,39.4,0.605,22,0
313 | 2,155,74,17,96,26.6,0.433,27,1
314 | 3,113,50,10,85,29.5,0.626,25,0
315 | 7,109,80,31,0,35.9,1.127,43,1
316 | 2,112,68,22,94,34.1,0.315,26,0
317 | 3,99,80,11,64,19.3,0.284,30,0
318 | 3,182,74,0,0,30.5,0.345,29,1
319 | 3,115,66,39,140,38.1,0.150,28,0
320 | 6,194,78,0,0,23.5,0.129,59,1
321 | 4,129,60,12,231,27.5,0.527,31,0
322 | 3,112,74,30,0,31.6,0.197,25,1
323 | 0,124,70,20,0,27.4,0.254,36,1
324 | 13,152,90,33,29,26.8,0.731,43,1
325 | 2,112,75,32,0,35.7,0.148,21,0
326 | 1,157,72,21,168,25.6,0.123,24,0
327 | 1,122,64,32,156,35.1,0.692,30,1
328 | 10,179,70,0,0,35.1,0.200,37,0
329 | 2,102,86,36,120,45.5,0.127,23,1
330 | 6,105,70,32,68,30.8,0.122,37,0
331 | 8,118,72,19,0,23.1,1.476,46,0
332 | 2,87,58,16,52,32.7,0.166,25,0
333 | 1,180,0,0,0,43.3,0.282,41,1
334 | 12,106,80,0,0,23.6,0.137,44,0
335 | 1,95,60,18,58,23.9,0.260,22,0
336 | 0,165,76,43,255,47.9,0.259,26,0
337 | 0,117,0,0,0,33.8,0.932,44,0
338 | 5,115,76,0,0,31.2,0.343,44,1
339 | 9,152,78,34,171,34.2,0.893,33,1
340 | 7,178,84,0,0,39.9,0.331,41,1
341 | 1,130,70,13,105,25.9,0.472,22,0
342 | 1,95,74,21,73,25.9,0.673,36,0
343 | 1,0,68,35,0,32.0,0.389,22,0
344 | 5,122,86,0,0,34.7,0.290,33,0
345 | 8,95,72,0,0,36.8,0.485,57,0
346 | 8,126,88,36,108,38.5,0.349,49,0
347 | 1,139,46,19,83,28.7,0.654,22,0
348 | 3,116,0,0,0,23.5,0.187,23,0
349 | 3,99,62,19,74,21.8,0.279,26,0
350 | 5,0,80,32,0,41.0,0.346,37,1
351 | 4,92,80,0,0,42.2,0.237,29,0
352 | 4,137,84,0,0,31.2,0.252,30,0
353 | 3,61,82,28,0,34.4,0.243,46,0
354 | 1,90,62,12,43,27.2,0.580,24,0
355 | 3,90,78,0,0,42.7,0.559,21,0
356 | 9,165,88,0,0,30.4,0.302,49,1
357 | 1,125,50,40,167,33.3,0.962,28,1
358 | 13,129,0,30,0,39.9,0.569,44,1
359 | 12,88,74,40,54,35.3,0.378,48,0
360 | 1,196,76,36,249,36.5,0.875,29,1
361 | 5,189,64,33,325,31.2,0.583,29,1
362 | 5,158,70,0,0,29.8,0.207,63,0
363 | 5,103,108,37,0,39.2,0.305,65,0
364 | 4,146,78,0,0,38.5,0.520,67,1
365 | 4,147,74,25,293,34.9,0.385,30,0
366 | 5,99,54,28,83,34.0,0.499,30,0
367 | 6,124,72,0,0,27.6,0.368,29,1
368 | 0,101,64,17,0,21.0,0.252,21,0
369 | 3,81,86,16,66,27.5,0.306,22,0
370 | 1,133,102,28,140,32.8,0.234,45,1
371 | 3,173,82,48,465,38.4,2.137,25,1
372 | 0,118,64,23,89,0.0,1.731,21,0
373 | 0,84,64,22,66,35.8,0.545,21,0
374 | 2,105,58,40,94,34.9,0.225,25,0
375 | 2,122,52,43,158,36.2,0.816,28,0
376 | 12,140,82,43,325,39.2,0.528,58,1
377 | 0,98,82,15,84,25.2,0.299,22,0
378 | 1,87,60,37,75,37.2,0.509,22,0
379 | 4,156,75,0,0,48.3,0.238,32,1
380 | 0,93,100,39,72,43.4,1.021,35,0
381 | 1,107,72,30,82,30.8,0.821,24,0
382 | 0,105,68,22,0,20.0,0.236,22,0
383 | 1,109,60,8,182,25.4,0.947,21,0
384 | 1,90,62,18,59,25.1,1.268,25,0
385 | 1,125,70,24,110,24.3,0.221,25,0
386 | 1,119,54,13,50,22.3,0.205,24,0
387 | 5,116,74,29,0,32.3,0.660,35,1
388 | 8,105,100,36,0,43.3,0.239,45,1
389 | 5,144,82,26,285,32.0,0.452,58,1
390 | 3,100,68,23,81,31.6,0.949,28,0
391 | 1,100,66,29,196,32.0,0.444,42,0
392 | 5,166,76,0,0,45.7,0.340,27,1
393 | 1,131,64,14,415,23.7,0.389,21,0
394 | 4,116,72,12,87,22.1,0.463,37,0
395 | 4,158,78,0,0,32.9,0.803,31,1
396 | 2,127,58,24,275,27.7,1.600,25,0
397 | 3,96,56,34,115,24.7,0.944,39,0
398 | 0,131,66,40,0,34.3,0.196,22,1
399 | 3,82,70,0,0,21.1,0.389,25,0
400 | 3,193,70,31,0,34.9,0.241,25,1
401 | 4,95,64,0,0,32.0,0.161,31,1
402 | 6,137,61,0,0,24.2,0.151,55,0
403 | 5,136,84,41,88,35.0,0.286,35,1
404 | 9,72,78,25,0,31.6,0.280,38,0
405 | 5,168,64,0,0,32.9,0.135,41,1
406 | 2,123,48,32,165,42.1,0.520,26,0
407 | 4,115,72,0,0,28.9,0.376,46,1
408 | 0,101,62,0,0,21.9,0.336,25,0
409 | 8,197,74,0,0,25.9,1.191,39,1
410 | 1,172,68,49,579,42.4,0.702,28,1
411 | 6,102,90,39,0,35.7,0.674,28,0
412 | 1,112,72,30,176,34.4,0.528,25,0
413 | 1,143,84,23,310,42.4,1.076,22,0
414 | 1,143,74,22,61,26.2,0.256,21,0
415 | 0,138,60,35,167,34.6,0.534,21,1
416 | 3,173,84,33,474,35.7,0.258,22,1
417 | 1,97,68,21,0,27.2,1.095,22,0
418 | 4,144,82,32,0,38.5,0.554,37,1
419 | 1,83,68,0,0,18.2,0.624,27,0
420 | 3,129,64,29,115,26.4,0.219,28,1
421 | 1,119,88,41,170,45.3,0.507,26,0
422 | 2,94,68,18,76,26.0,0.561,21,0
423 | 0,102,64,46,78,40.6,0.496,21,0
424 | 2,115,64,22,0,30.8,0.421,21,0
425 | 8,151,78,32,210,42.9,0.516,36,1
426 | 4,184,78,39,277,37.0,0.264,31,1
427 | 0,94,0,0,0,0.0,0.256,25,0
428 | 1,181,64,30,180,34.1,0.328,38,1
429 | 0,135,94,46,145,40.6,0.284,26,0
430 | 1,95,82,25,180,35.0,0.233,43,1
431 | 2,99,0,0,0,22.2,0.108,23,0
432 | 3,89,74,16,85,30.4,0.551,38,0
433 | 1,80,74,11,60,30.0,0.527,22,0
434 | 2,139,75,0,0,25.6,0.167,29,0
435 | 1,90,68,8,0,24.5,1.138,36,0
436 | 0,141,0,0,0,42.4,0.205,29,1
437 | 12,140,85,33,0,37.4,0.244,41,0
438 | 5,147,75,0,0,29.9,0.434,28,0
439 | 1,97,70,15,0,18.2,0.147,21,0
440 | 6,107,88,0,0,36.8,0.727,31,0
441 | 0,189,104,25,0,34.3,0.435,41,1
442 | 2,83,66,23,50,32.2,0.497,22,0
443 | 4,117,64,27,120,33.2,0.230,24,0
444 | 8,108,70,0,0,30.5,0.955,33,1
445 | 4,117,62,12,0,29.7,0.380,30,1
446 | 0,180,78,63,14,59.4,2.420,25,1
447 | 1,100,72,12,70,25.3,0.658,28,0
448 | 0,95,80,45,92,36.5,0.330,26,0
449 | 0,104,64,37,64,33.6,0.510,22,1
450 | 0,120,74,18,63,30.5,0.285,26,0
451 | 1,82,64,13,95,21.2,0.415,23,0
452 | 2,134,70,0,0,28.9,0.542,23,1
453 | 0,91,68,32,210,39.9,0.381,25,0
454 | 2,119,0,0,0,19.6,0.832,72,0
455 | 2,100,54,28,105,37.8,0.498,24,0
456 | 14,175,62,30,0,33.6,0.212,38,1
457 | 1,135,54,0,0,26.7,0.687,62,0
458 | 5,86,68,28,71,30.2,0.364,24,0
459 | 10,148,84,48,237,37.6,1.001,51,1
460 | 9,134,74,33,60,25.9,0.460,81,0
461 | 9,120,72,22,56,20.8,0.733,48,0
462 | 1,71,62,0,0,21.8,0.416,26,0
463 | 8,74,70,40,49,35.3,0.705,39,0
464 | 5,88,78,30,0,27.6,0.258,37,0
465 | 10,115,98,0,0,24.0,1.022,34,0
466 | 0,124,56,13,105,21.8,0.452,21,0
467 | 0,74,52,10,36,27.8,0.269,22,0
468 | 0,97,64,36,100,36.8,0.600,25,0
469 | 8,120,0,0,0,30.0,0.183,38,1
470 | 6,154,78,41,140,46.1,0.571,27,0
471 | 1,144,82,40,0,41.3,0.607,28,0
472 | 0,137,70,38,0,33.2,0.170,22,0
473 | 0,119,66,27,0,38.8,0.259,22,0
474 | 7,136,90,0,0,29.9,0.210,50,0
475 | 4,114,64,0,0,28.9,0.126,24,0
476 | 0,137,84,27,0,27.3,0.231,59,0
477 | 2,105,80,45,191,33.7,0.711,29,1
478 | 7,114,76,17,110,23.8,0.466,31,0
479 | 8,126,74,38,75,25.9,0.162,39,0
480 | 4,132,86,31,0,28.0,0.419,63,0
481 | 3,158,70,30,328,35.5,0.344,35,1
482 | 0,123,88,37,0,35.2,0.197,29,0
483 | 4,85,58,22,49,27.8,0.306,28,0
484 | 0,84,82,31,125,38.2,0.233,23,0
485 | 0,145,0,0,0,44.2,0.630,31,1
486 | 0,135,68,42,250,42.3,0.365,24,1
487 | 1,139,62,41,480,40.7,0.536,21,0
488 | 0,173,78,32,265,46.5,1.159,58,0
489 | 4,99,72,17,0,25.6,0.294,28,0
490 | 8,194,80,0,0,26.1,0.551,67,0
491 | 2,83,65,28,66,36.8,0.629,24,0
492 | 2,89,90,30,0,33.5,0.292,42,0
493 | 4,99,68,38,0,32.8,0.145,33,0
494 | 4,125,70,18,122,28.9,1.144,45,1
495 | 3,80,0,0,0,0.0,0.174,22,0
496 | 6,166,74,0,0,26.6,0.304,66,0
497 | 5,110,68,0,0,26.0,0.292,30,0
498 | 2,81,72,15,76,30.1,0.547,25,0
499 | 7,195,70,33,145,25.1,0.163,55,1
500 | 6,154,74,32,193,29.3,0.839,39,0
501 | 2,117,90,19,71,25.2,0.313,21,0
502 | 3,84,72,32,0,37.2,0.267,28,0
503 | 6,0,68,41,0,39.0,0.727,41,1
504 | 7,94,64,25,79,33.3,0.738,41,0
505 | 3,96,78,39,0,37.3,0.238,40,0
506 | 10,75,82,0,0,33.3,0.263,38,0
507 | 0,180,90,26,90,36.5,0.314,35,1
508 | 1,130,60,23,170,28.6,0.692,21,0
509 | 2,84,50,23,76,30.4,0.968,21,0
510 | 8,120,78,0,0,25.0,0.409,64,0
511 | 12,84,72,31,0,29.7,0.297,46,1
512 | 0,139,62,17,210,22.1,0.207,21,0
513 | 9,91,68,0,0,24.2,0.200,58,0
514 | 2,91,62,0,0,27.3,0.525,22,0
515 | 3,99,54,19,86,25.6,0.154,24,0
516 | 3,163,70,18,105,31.6,0.268,28,1
517 | 9,145,88,34,165,30.3,0.771,53,1
518 | 7,125,86,0,0,37.6,0.304,51,0
519 | 13,76,60,0,0,32.8,0.180,41,0
520 | 6,129,90,7,326,19.6,0.582,60,0
521 | 2,68,70,32,66,25.0,0.187,25,0
522 | 3,124,80,33,130,33.2,0.305,26,0
523 | 6,114,0,0,0,0.0,0.189,26,0
524 | 9,130,70,0,0,34.2,0.652,45,1
525 | 3,125,58,0,0,31.6,0.151,24,0
526 | 3,87,60,18,0,21.8,0.444,21,0
527 | 1,97,64,19,82,18.2,0.299,21,0
528 | 3,116,74,15,105,26.3,0.107,24,0
529 | 0,117,66,31,188,30.8,0.493,22,0
530 | 0,111,65,0,0,24.6,0.660,31,0
531 | 2,122,60,18,106,29.8,0.717,22,0
532 | 0,107,76,0,0,45.3,0.686,24,0
533 | 1,86,66,52,65,41.3,0.917,29,0
534 | 6,91,0,0,0,29.8,0.501,31,0
535 | 1,77,56,30,56,33.3,1.251,24,0
536 | 4,132,0,0,0,32.9,0.302,23,1
537 | 0,105,90,0,0,29.6,0.197,46,0
538 | 0,57,60,0,0,21.7,0.735,67,0
539 | 0,127,80,37,210,36.3,0.804,23,0
540 | 3,129,92,49,155,36.4,0.968,32,1
541 | 8,100,74,40,215,39.4,0.661,43,1
542 | 3,128,72,25,190,32.4,0.549,27,1
543 | 10,90,85,32,0,34.9,0.825,56,1
544 | 4,84,90,23,56,39.5,0.159,25,0
545 | 1,88,78,29,76,32.0,0.365,29,0
546 | 8,186,90,35,225,34.5,0.423,37,1
547 | 5,187,76,27,207,43.6,1.034,53,1
548 | 4,131,68,21,166,33.1,0.160,28,0
549 | 1,164,82,43,67,32.8,0.341,50,0
550 | 4,189,110,31,0,28.5,0.680,37,0
551 | 1,116,70,28,0,27.4,0.204,21,0
552 | 3,84,68,30,106,31.9,0.591,25,0
553 | 6,114,88,0,0,27.8,0.247,66,0
554 | 1,88,62,24,44,29.9,0.422,23,0
555 | 1,84,64,23,115,36.9,0.471,28,0
556 | 7,124,70,33,215,25.5,0.161,37,0
557 | 1,97,70,40,0,38.1,0.218,30,0
558 | 8,110,76,0,0,27.8,0.237,58,0
559 | 11,103,68,40,0,46.2,0.126,42,0
560 | 11,85,74,0,0,30.1,0.300,35,0
561 | 6,125,76,0,0,33.8,0.121,54,1
562 | 0,198,66,32,274,41.3,0.502,28,1
563 | 1,87,68,34,77,37.6,0.401,24,0
564 | 6,99,60,19,54,26.9,0.497,32,0
565 | 0,91,80,0,0,32.4,0.601,27,0
566 | 2,95,54,14,88,26.1,0.748,22,0
567 | 1,99,72,30,18,38.6,0.412,21,0
568 | 6,92,62,32,126,32.0,0.085,46,0
569 | 4,154,72,29,126,31.3,0.338,37,0
570 | 0,121,66,30,165,34.3,0.203,33,1
571 | 3,78,70,0,0,32.5,0.270,39,0
572 | 2,130,96,0,0,22.6,0.268,21,0
573 | 3,111,58,31,44,29.5,0.430,22,0
574 | 2,98,60,17,120,34.7,0.198,22,0
575 | 1,143,86,30,330,30.1,0.892,23,0
576 | 1,119,44,47,63,35.5,0.280,25,0
577 | 6,108,44,20,130,24.0,0.813,35,0
578 | 2,118,80,0,0,42.9,0.693,21,1
579 | 10,133,68,0,0,27.0,0.245,36,0
580 | 2,197,70,99,0,34.7,0.575,62,1
581 | 0,151,90,46,0,42.1,0.371,21,1
582 | 6,109,60,27,0,25.0,0.206,27,0
583 | 12,121,78,17,0,26.5,0.259,62,0
584 | 8,100,76,0,0,38.7,0.190,42,0
585 | 8,124,76,24,600,28.7,0.687,52,1
586 | 1,93,56,11,0,22.5,0.417,22,0
587 | 8,143,66,0,0,34.9,0.129,41,1
588 | 6,103,66,0,0,24.3,0.249,29,0
589 | 3,176,86,27,156,33.3,1.154,52,1
590 | 0,73,0,0,0,21.1,0.342,25,0
591 | 11,111,84,40,0,46.8,0.925,45,1
592 | 2,112,78,50,140,39.4,0.175,24,0
593 | 3,132,80,0,0,34.4,0.402,44,1
594 | 2,82,52,22,115,28.5,1.699,25,0
595 | 6,123,72,45,230,33.6,0.733,34,0
596 | 0,188,82,14,185,32.0,0.682,22,1
597 | 0,67,76,0,0,45.3,0.194,46,0
598 | 1,89,24,19,25,27.8,0.559,21,0
599 | 1,173,74,0,0,36.8,0.088,38,1
600 | 1,109,38,18,120,23.1,0.407,26,0
601 | 1,108,88,19,0,27.1,0.400,24,0
602 | 6,96,0,0,0,23.7,0.190,28,0
603 | 1,124,74,36,0,27.8,0.100,30,0
604 | 7,150,78,29,126,35.2,0.692,54,1
605 | 4,183,0,0,0,28.4,0.212,36,1
606 | 1,124,60,32,0,35.8,0.514,21,0
607 | 1,181,78,42,293,40.0,1.258,22,1
608 | 1,92,62,25,41,19.5,0.482,25,0
609 | 0,152,82,39,272,41.5,0.270,27,0
610 | 1,111,62,13,182,24.0,0.138,23,0
611 | 3,106,54,21,158,30.9,0.292,24,0
612 | 3,174,58,22,194,32.9,0.593,36,1
613 | 7,168,88,42,321,38.2,0.787,40,1
614 | 6,105,80,28,0,32.5,0.878,26,0
615 | 11,138,74,26,144,36.1,0.557,50,1
616 | 3,106,72,0,0,25.8,0.207,27,0
617 | 6,117,96,0,0,28.7,0.157,30,0
618 | 2,68,62,13,15,20.1,0.257,23,0
619 | 9,112,82,24,0,28.2,1.282,50,1
620 | 0,119,0,0,0,32.4,0.141,24,1
621 | 2,112,86,42,160,38.4,0.246,28,0
622 | 2,92,76,20,0,24.2,1.698,28,0
623 | 6,183,94,0,0,40.8,1.461,45,0
624 | 0,94,70,27,115,43.5,0.347,21,0
625 | 2,108,64,0,0,30.8,0.158,21,0
626 | 4,90,88,47,54,37.7,0.362,29,0
627 | 0,125,68,0,0,24.7,0.206,21,0
628 | 0,132,78,0,0,32.4,0.393,21,0
629 | 5,128,80,0,0,34.6,0.144,45,0
630 | 4,94,65,22,0,24.7,0.148,21,0
631 | 7,114,64,0,0,27.4,0.732,34,1
632 | 0,102,78,40,90,34.5,0.238,24,0
633 | 2,111,60,0,0,26.2,0.343,23,0
634 | 1,128,82,17,183,27.5,0.115,22,0
635 | 10,92,62,0,0,25.9,0.167,31,0
636 | 13,104,72,0,0,31.2,0.465,38,1
637 | 5,104,74,0,0,28.8,0.153,48,0
638 | 2,94,76,18,66,31.6,0.649,23,0
639 | 7,97,76,32,91,40.9,0.871,32,1
640 | 1,100,74,12,46,19.5,0.149,28,0
641 | 0,102,86,17,105,29.3,0.695,27,0
642 | 4,128,70,0,0,34.3,0.303,24,0
643 | 6,147,80,0,0,29.5,0.178,50,1
644 | 4,90,0,0,0,28.0,0.610,31,0
645 | 3,103,72,30,152,27.6,0.730,27,0
646 | 2,157,74,35,440,39.4,0.134,30,0
647 | 1,167,74,17,144,23.4,0.447,33,1
648 | 0,179,50,36,159,37.8,0.455,22,1
649 | 11,136,84,35,130,28.3,0.260,42,1
650 | 0,107,60,25,0,26.4,0.133,23,0
651 | 1,91,54,25,100,25.2,0.234,23,0
652 | 1,117,60,23,106,33.8,0.466,27,0
653 | 5,123,74,40,77,34.1,0.269,28,0
654 | 2,120,54,0,0,26.8,0.455,27,0
655 | 1,106,70,28,135,34.2,0.142,22,0
656 | 2,155,52,27,540,38.7,0.240,25,1
657 | 2,101,58,35,90,21.8,0.155,22,0
658 | 1,120,80,48,200,38.9,1.162,41,0
659 | 11,127,106,0,0,39.0,0.190,51,0
660 | 3,80,82,31,70,34.2,1.292,27,1
661 | 10,162,84,0,0,27.7,0.182,54,0
662 | 1,199,76,43,0,42.9,1.394,22,1
663 | 8,167,106,46,231,37.6,0.165,43,1
664 | 9,145,80,46,130,37.9,0.637,40,1
665 | 6,115,60,39,0,33.7,0.245,40,1
666 | 1,112,80,45,132,34.8,0.217,24,0
667 | 4,145,82,18,0,32.5,0.235,70,1
668 | 10,111,70,27,0,27.5,0.141,40,1
669 | 6,98,58,33,190,34.0,0.430,43,0
670 | 9,154,78,30,100,30.9,0.164,45,0
671 | 6,165,68,26,168,33.6,0.631,49,0
672 | 1,99,58,10,0,25.4,0.551,21,0
673 | 10,68,106,23,49,35.5,0.285,47,0
674 | 3,123,100,35,240,57.3,0.880,22,0
675 | 8,91,82,0,0,35.6,0.587,68,0
676 | 6,195,70,0,0,30.9,0.328,31,1
677 | 9,156,86,0,0,24.8,0.230,53,1
678 | 0,93,60,0,0,35.3,0.263,25,0
679 | 3,121,52,0,0,36.0,0.127,25,1
680 | 2,101,58,17,265,24.2,0.614,23,0
681 | 2,56,56,28,45,24.2,0.332,22,0
682 | 0,162,76,36,0,49.6,0.364,26,1
683 | 0,95,64,39,105,44.6,0.366,22,0
684 | 4,125,80,0,0,32.3,0.536,27,1
685 | 5,136,82,0,0,0.0,0.640,69,0
686 | 2,129,74,26,205,33.2,0.591,25,0
687 | 3,130,64,0,0,23.1,0.314,22,0
688 | 1,107,50,19,0,28.3,0.181,29,0
689 | 1,140,74,26,180,24.1,0.828,23,0
690 | 1,144,82,46,180,46.1,0.335,46,1
691 | 8,107,80,0,0,24.6,0.856,34,0
692 | 13,158,114,0,0,42.3,0.257,44,1
693 | 2,121,70,32,95,39.1,0.886,23,0
694 | 7,129,68,49,125,38.5,0.439,43,1
695 | 2,90,60,0,0,23.5,0.191,25,0
696 | 7,142,90,24,480,30.4,0.128,43,1
697 | 3,169,74,19,125,29.9,0.268,31,1
698 | 0,99,0,0,0,25.0,0.253,22,0
699 | 4,127,88,11,155,34.5,0.598,28,0
700 | 4,118,70,0,0,44.5,0.904,26,0
701 | 2,122,76,27,200,35.9,0.483,26,0
702 | 6,125,78,31,0,27.6,0.565,49,1
703 | 1,168,88,29,0,35.0,0.905,52,1
704 | 2,129,0,0,0,38.5,0.304,41,0
705 | 4,110,76,20,100,28.4,0.118,27,0
706 | 6,80,80,36,0,39.8,0.177,28,0
707 | 10,115,0,0,0,0.0,0.261,30,1
708 | 2,127,46,21,335,34.4,0.176,22,0
709 | 9,164,78,0,0,32.8,0.148,45,1
710 | 2,93,64,32,160,38.0,0.674,23,1
711 | 3,158,64,13,387,31.2,0.295,24,0
712 | 5,126,78,27,22,29.6,0.439,40,0
713 | 10,129,62,36,0,41.2,0.441,38,1
714 | 0,134,58,20,291,26.4,0.352,21,0
715 | 3,102,74,0,0,29.5,0.121,32,0
716 | 7,187,50,33,392,33.9,0.826,34,1
717 | 3,173,78,39,185,33.8,0.970,31,1
718 | 10,94,72,18,0,23.1,0.595,56,0
719 | 1,108,60,46,178,35.5,0.415,24,0
720 | 5,97,76,27,0,35.6,0.378,52,1
721 | 4,83,86,19,0,29.3,0.317,34,0
722 | 1,114,66,36,200,38.1,0.289,21,0
723 | 1,149,68,29,127,29.3,0.349,42,1
724 | 5,117,86,30,105,39.1,0.251,42,0
725 | 1,111,94,0,0,32.8,0.265,45,0
726 | 4,112,78,40,0,39.4,0.236,38,0
727 | 1,116,78,29,180,36.1,0.496,25,0
728 | 0,141,84,26,0,32.4,0.433,22,0
729 | 2,175,88,0,0,22.9,0.326,22,0
730 | 2,92,52,0,0,30.1,0.141,22,0
731 | 3,130,78,23,79,28.4,0.323,34,1
732 | 8,120,86,0,0,28.4,0.259,22,1
733 | 2,174,88,37,120,44.5,0.646,24,1
734 | 2,106,56,27,165,29.0,0.426,22,0
735 | 2,105,75,0,0,23.3,0.560,53,0
736 | 4,95,60,32,0,35.4,0.284,28,0
737 | 0,126,86,27,120,27.4,0.515,21,0
738 | 8,65,72,23,0,32.0,0.600,42,0
739 | 2,99,60,17,160,36.6,0.453,21,0
740 | 1,102,74,0,0,39.5,0.293,42,1
741 | 11,120,80,37,150,42.3,0.785,48,1
742 | 3,102,44,20,94,30.8,0.400,26,0
743 | 1,109,58,18,116,28.5,0.219,22,0
744 | 9,140,94,0,0,32.7,0.734,45,1
745 | 13,153,88,37,140,40.6,1.174,39,0
746 | 12,100,84,33,105,30.0,0.488,46,0
747 | 1,147,94,41,0,49.3,0.358,27,1
748 | 1,81,74,41,57,46.3,1.096,32,0
749 | 3,187,70,22,200,36.4,0.408,36,1
750 | 6,162,62,0,0,24.3,0.178,50,1
751 | 4,136,70,0,0,31.2,1.182,22,1
752 | 1,121,78,39,74,39.0,0.261,28,0
753 | 3,108,62,24,0,26.0,0.223,25,0
754 | 0,181,88,44,510,43.3,0.222,26,1
755 | 8,154,78,32,0,32.4,0.443,45,1
756 | 1,128,88,39,110,36.5,1.057,37,1
757 | 7,137,90,41,0,32.0,0.391,39,0
758 | 0,123,72,0,0,36.3,0.258,52,1
759 | 1,106,76,0,0,37.5,0.197,26,0
760 | 6,190,92,0,0,35.5,0.278,66,1
761 | 2,88,58,26,16,28.4,0.766,22,0
762 | 9,170,74,31,0,44.0,0.403,43,1
763 | 9,89,62,0,0,22.5,0.142,33,0
764 | 10,101,76,48,180,32.9,0.171,63,0
765 | 2,122,70,27,0,36.8,0.340,27,0
766 | 5,121,72,23,112,26.2,0.245,30,0
767 | 1,126,60,0,0,30.1,0.349,47,1
768 | 1,93,70,31,0,30.4,0.315,23,0
769 |
--------------------------------------------------------------------------------
/dataset/spam.csv:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lsjsj92/keras_basic/afa26e7b830cab23c7ffb2e1806660070279a982/dataset/spam.csv
--------------------------------------------------------------------------------
/dataset/test.csv:
--------------------------------------------------------------------------------
1 | dog,강아지
2 | cat,고양이
3 | soojin,수진
4 | 가슴,가슴
5 | 슴가,가슴
6 | 부심,진리
7 | 물방울,수술
8 | 가슴,가슴
9 | soojin,수진
10 | jinso,진수
11 | sojin,수진
12 | dog,강아지
--------------------------------------------------------------------------------
/keras_cpu_test.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "** 너무 오래걸려서 패스.... **\n",
8 | "\n",
9 | "gpu는 그냥 6~7초안에 끝나는데 얘는 무슨 몇분씩걸린다."
10 | ]
11 | },
12 | {
13 | "cell_type": "code",
14 | "execution_count": 2,
15 | "metadata": {},
16 | "outputs": [
17 | {
18 | "name": "stdout",
19 | "output_type": "stream",
20 | "text": [
21 | "x_train shape: (60000, 28, 28, 1)\n",
22 | "60000 train samples\n",
23 | "10000 test samples\n",
24 | "Train on 60000 samples, validate on 10000 samples\n",
25 | "Epoch 1/12\n",
26 | " 384/60000 [..............................] - ETA: 27:43 - loss: 2.2585 - acc: 0.1380"
27 | ]
28 | },
29 | {
30 | "ename": "KeyboardInterrupt",
31 | "evalue": "",
32 | "traceback": [
33 | "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
34 | "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)",
35 | "\u001b[1;32m\u001b[0m in \u001b[0;36m\u001b[1;34m()\u001b[0m\n\u001b[0;32m 68\u001b[0m \u001b[0mepochs\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0mepochs\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 69\u001b[0m \u001b[0mverbose\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;36m1\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m---> 70\u001b[1;33m validation_data=(x_test, y_test))\n\u001b[0m\u001b[0;32m 71\u001b[0m \u001b[0mscore\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mmodel\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mevaluate\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mx_test\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0my_test\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mverbose\u001b[0m\u001b[1;33m=\u001b[0m\u001b[1;36m0\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 72\u001b[0m \u001b[0mprint\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;34m'Test loss:'\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mscore\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;36m0\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
36 | "\u001b[1;32m~\\Anaconda3\\lib\\site-packages\\keras\\engine\\training.py\u001b[0m in \u001b[0;36mfit\u001b[1;34m(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, steps_per_epoch, validation_steps, **kwargs)\u001b[0m\n\u001b[0;32m 1040\u001b[0m \u001b[0minitial_epoch\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0minitial_epoch\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1041\u001b[0m \u001b[0msteps_per_epoch\u001b[0m\u001b[1;33m=\u001b[0m\u001b[0msteps_per_epoch\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 1042\u001b[1;33m validation_steps=validation_steps)\n\u001b[0m\u001b[0;32m 1043\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1044\u001b[0m def evaluate(self, x=None, y=None,\n",
37 | "\u001b[1;32m~\\Anaconda3\\lib\\site-packages\\keras\\engine\\training_arrays.py\u001b[0m in \u001b[0;36mfit_loop\u001b[1;34m(model, f, ins, out_labels, batch_size, epochs, verbose, callbacks, val_f, val_ins, shuffle, callback_metrics, initial_epoch, steps_per_epoch, validation_steps)\u001b[0m\n\u001b[0;32m 197\u001b[0m \u001b[0mins_batch\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mi\u001b[0m\u001b[1;33m]\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mins_batch\u001b[0m\u001b[1;33m[\u001b[0m\u001b[0mi\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0mtoarray\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 198\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m--> 199\u001b[1;33m \u001b[0mouts\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mf\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mins_batch\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 200\u001b[0m \u001b[1;32mif\u001b[0m \u001b[1;32mnot\u001b[0m \u001b[0misinstance\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mouts\u001b[0m\u001b[1;33m,\u001b[0m \u001b[0mlist\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 201\u001b[0m \u001b[0mouts\u001b[0m \u001b[1;33m=\u001b[0m \u001b[1;33m[\u001b[0m\u001b[0mouts\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
38 | "\u001b[1;32m~\\Anaconda3\\lib\\site-packages\\keras\\backend\\tensorflow_backend.py\u001b[0m in \u001b[0;36m__call__\u001b[1;34m(self, inputs)\u001b[0m\n\u001b[0;32m 2659\u001b[0m \u001b[1;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_legacy_call\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0minputs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 2660\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n\u001b[1;32m-> 2661\u001b[1;33m \u001b[1;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_call\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0minputs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 2662\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 2663\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mpy_any\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mis_tensor\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mx\u001b[0m\u001b[1;33m)\u001b[0m \u001b[1;32mfor\u001b[0m \u001b[0mx\u001b[0m \u001b[1;32min\u001b[0m \u001b[0minputs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n",
39 | "\u001b[1;32m~\\Anaconda3\\lib\\site-packages\\keras\\backend\\tensorflow_backend.py\u001b[0m in \u001b[0;36m_call\u001b[1;34m(self, inputs)\u001b[0m\n\u001b[0;32m 2629\u001b[0m \u001b[0msymbol_vals\u001b[0m\u001b[1;33m,\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 2630\u001b[0m session)\n\u001b[1;32m-> 2631\u001b[1;33m \u001b[0mfetched\u001b[0m \u001b[1;33m=\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_callable_fn\u001b[0m\u001b[1;33m(\u001b[0m\u001b[1;33m*\u001b[0m\u001b[0marray_vals\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0m\u001b[0;32m 2632\u001b[0m \u001b[1;32mreturn\u001b[0m \u001b[0mfetched\u001b[0m\u001b[1;33m[\u001b[0m\u001b[1;33m:\u001b[0m\u001b[0mlen\u001b[0m\u001b[1;33m(\u001b[0m\u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0moutputs\u001b[0m\u001b[1;33m)\u001b[0m\u001b[1;33m]\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 2633\u001b[0m \u001b[1;33m\u001b[0m\u001b[0m\n",
40 | "\u001b[1;32m~\\Anaconda3\\lib\\site-packages\\tensorflow\\python\\client\\session.py\u001b[0m in \u001b[0;36m__call__\u001b[1;34m(self, *args)\u001b[0m\n\u001b[0;32m 1449\u001b[0m \u001b[1;32mif\u001b[0m \u001b[0mself\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_session\u001b[0m\u001b[1;33m.\u001b[0m\u001b[0m_created_with_new_api\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1450\u001b[0m return tf_session.TF_SessionRunCallable(\n\u001b[1;32m-> 1451\u001b[1;33m self._session._session, self._handle, args, status, None)\n\u001b[0m\u001b[0;32m 1452\u001b[0m \u001b[1;32melse\u001b[0m\u001b[1;33m:\u001b[0m\u001b[1;33m\u001b[0m\u001b[0m\n\u001b[0;32m 1453\u001b[0m return tf_session.TF_DeprecatedSessionRunCallable(\n",
41 | "\u001b[1;31mKeyboardInterrupt\u001b[0m: "
42 | ],
43 | "output_type": "error"
44 | }
45 | ],
46 | "source": [
47 | "import keras\n",
48 | "from keras.datasets import mnist\n",
49 | "from keras.models import Sequential\n",
50 | "from keras.layers import Dense, Dropout, Flatten\n",
51 | "from keras.layers import Conv2D, MaxPooling2D\n",
52 | "import keras.backend.tensorflow_backend as kk\n",
53 | "from keras import backend as K\n",
54 | "'''\n",
55 | "cpu테스트\n",
56 | "정말 오래걸린다.\n",
57 | "'''\n",
58 | "batch_size = 128\n",
59 | "num_classes = 10\n",
60 | "epochs = 12\n",
61 | "\n",
62 | "# input image dimensions\n",
63 | "img_rows, img_cols = 28, 28\n",
64 | "\n",
65 | "# the data, split between train and test sets\n",
66 | "(x_train, y_train), (x_test, y_test) = mnist.load_data()\n",
67 | "\n",
68 | "if K.image_data_format() == 'channels_first':\n",
69 | " x_train = x_train.reshape(x_train.shape[0], 1, img_rows, img_cols)\n",
70 | " x_test = x_test.reshape(x_test.shape[0], 1, img_rows, img_cols)\n",
71 | " input_shape = (1, img_rows, img_cols)\n",
72 | "else:\n",
73 | " x_train = x_train.reshape(x_train.shape[0], img_rows, img_cols, 1)\n",
74 | " x_test = x_test.reshape(x_test.shape[0], img_rows, img_cols, 1)\n",
75 | " input_shape = (img_rows, img_cols, 1)\n",
76 | "\n",
77 | "x_train = x_train.astype('float32')\n",
78 | "x_test = x_test.astype('float32')\n",
79 | "x_train /= 255\n",
80 | "x_test /= 255\n",
81 | "print('x_train shape:', x_train.shape)\n",
82 | "print(x_train.shape[0], 'train samples')\n",
83 | "print(x_test.shape[0], 'test samples')\n",
84 | "\n",
85 | "# convert class vectors to binary class matrices\n",
86 | "y_train = keras.utils.to_categorical(y_train, num_classes)\n",
87 | "y_test = keras.utils.to_categorical(y_test, num_classes)\n",
88 | "\n",
89 | "import tensorflow as tf\n",
90 | "config = tf.ConfigProto()\n",
91 | "config.gpu_options.allow_growth = True\n",
92 | "session = tf.Session(config=config)\n",
93 | "\n",
94 | "\n",
95 | "with kk.tf_ops.device('/device:CPU:0'):\n",
96 | " model = Sequential()\n",
97 | " model.add(Conv2D(32, kernel_size=(3, 3),\n",
98 | " activation='relu',\n",
99 | " input_shape=input_shape))\n",
100 | " model.add(Conv2D(64, (3, 3), activation='relu'))\n",
101 | " model.add(MaxPooling2D(pool_size=(2, 2)))\n",
102 | " model.add(Dropout(0.25))\n",
103 | " model.add(Flatten())\n",
104 | " model.add(Dense(128, activation='relu'))\n",
105 | " model.add(Dropout(0.5))\n",
106 | " model.add(Dense(num_classes, activation='softmax'))\n",
107 | "\n",
108 | " model.compile(loss=keras.losses.categorical_crossentropy,\n",
109 | " optimizer=keras.optimizers.Adadelta(),\n",
110 | " metrics=['accuracy'])\n",
111 | "\n",
112 | " model.fit(x_train, y_train,\n",
113 | " batch_size=batch_size,\n",
114 | " epochs=epochs,\n",
115 | " verbose=1,\n",
116 | " validation_data=(x_test, y_test))\n",
117 | "score = model.evaluate(x_test, y_test, verbose=0)\n",
118 | "print('Test loss:', score[0])\n",
119 | "print('Test accuracy:', score[1])"
120 | ]
121 | },
122 | {
123 | "cell_type": "code",
124 | "execution_count": null,
125 | "metadata": {},
126 | "outputs": [],
127 | "source": []
128 | }
129 | ],
130 | "metadata": {
131 | "kernelspec": {
132 | "display_name": "Python 3",
133 | "language": "python",
134 | "name": "python3"
135 | },
136 | "language_info": {
137 | "codemirror_mode": {
138 | "name": "ipython",
139 | "version": 3
140 | },
141 | "file_extension": ".py",
142 | "mimetype": "text/x-python",
143 | "name": "python",
144 | "nbconvert_exporter": "python",
145 | "pygments_lexer": "ipython3",
146 | "version": "3.6.5"
147 | }
148 | },
149 | "nbformat": 4,
150 | "nbformat_minor": 2
151 | }
152 |
--------------------------------------------------------------------------------
/keras_gpu_test.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [
8 | {
9 | "name": "stderr",
10 | "output_type": "stream",
11 | "text": [
12 | "C:\\Users\\leesoojin\\Anaconda3\\lib\\site-packages\\h5py\\__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n",
13 | " from ._conv import register_converters as _register_converters\n",
14 | "Using TensorFlow backend.\n"
15 | ]
16 | },
17 | {
18 | "name": "stdout",
19 | "output_type": "stream",
20 | "text": [
21 | "x_train shape: (60000, 28, 28, 1)\n",
22 | "60000 train samples\n",
23 | "10000 test samples\n",
24 | "Train on 60000 samples, validate on 10000 samples\n",
25 | "Epoch 1/12\n",
26 | "60000/60000 [==============================] - 8s 132us/step - loss: 0.2549 - acc: 0.9224 - val_loss: 0.0562 - val_acc: 0.9813\n",
27 | "Epoch 2/12\n",
28 | "60000/60000 [==============================] - 6s 108us/step - loss: 0.0863 - acc: 0.9749 - val_loss: 0.0370 - val_acc: 0.9865\n",
29 | "Epoch 3/12\n",
30 | "60000/60000 [==============================] - 6s 108us/step - loss: 0.0637 - acc: 0.9811 - val_loss: 0.0361 - val_acc: 0.9881\n",
31 | "Epoch 4/12\n",
32 | "60000/60000 [==============================] - 6s 108us/step - loss: 0.0540 - acc: 0.9838 - val_loss: 0.0324 - val_acc: 0.9890\n",
33 | "Epoch 5/12\n",
34 | "60000/60000 [==============================] - 7s 108us/step - loss: 0.0449 - acc: 0.9862 - val_loss: 0.0368 - val_acc: 0.9885\n",
35 | "Epoch 6/12\n",
36 | "60000/60000 [==============================] - 6s 108us/step - loss: 0.0401 - acc: 0.9875 - val_loss: 0.0270 - val_acc: 0.9916\n",
37 | "Epoch 7/12\n",
38 | "60000/60000 [==============================] - 6s 106us/step - loss: 0.0367 - acc: 0.9889 - val_loss: 0.0284 - val_acc: 0.9906\n",
39 | "Epoch 8/12\n",
40 | "60000/60000 [==============================] - 6s 107us/step - loss: 0.0324 - acc: 0.9902 - val_loss: 0.0306 - val_acc: 0.9903\n",
41 | "Epoch 9/12\n",
42 | "60000/60000 [==============================] - 6s 107us/step - loss: 0.0301 - acc: 0.9904 - val_loss: 0.0273 - val_acc: 0.9915\n",
43 | "Epoch 10/12\n",
44 | "60000/60000 [==============================] - 6s 107us/step - loss: 0.0295 - acc: 0.9912 - val_loss: 0.0303 - val_acc: 0.9902\n",
45 | "Epoch 11/12\n",
46 | "60000/60000 [==============================] - 6s 107us/step - loss: 0.0272 - acc: 0.9916 - val_loss: 0.0289 - val_acc: 0.9917\n",
47 | "Epoch 12/12\n",
48 | "60000/60000 [==============================] - 6s 108us/step - loss: 0.0267 - acc: 0.9913 - val_loss: 0.0244 - val_acc: 0.9919\n",
49 | "Test loss: 0.02435819875159068\n",
50 | "Test accuracy: 0.9919\n"
51 | ]
52 | }
53 | ],
54 | "source": [
55 | "import keras\n",
56 | "from keras.datasets import mnist\n",
57 | "from keras.models import Sequential\n",
58 | "from keras.layers import Dense, Dropout, Flatten\n",
59 | "from keras.layers import Conv2D, MaxPooling2D\n",
60 | "import keras.backend.tensorflow_backend as kk\n",
61 | "from keras import backend as K\n",
62 | "'''\n",
63 | "gpu테스트\n",
64 | "한 에포크당 6초.\n",
65 | "\n",
66 | "'''\n",
67 | "batch_size = 128\n",
68 | "num_classes = 10\n",
69 | "epochs = 12\n",
70 | "\n",
71 | "# input image dimensions\n",
72 | "img_rows, img_cols = 28, 28\n",
73 | "\n",
74 | "# the data, split between train and test sets\n",
75 | "(x_train, y_train), (x_test, y_test) = mnist.load_data()\n",
76 | "\n",
77 | "if K.image_data_format() == 'channels_first':\n",
78 | " x_train = x_train.reshape(x_train.shape[0], 1, img_rows, img_cols)\n",
79 | " x_test = x_test.reshape(x_test.shape[0], 1, img_rows, img_cols)\n",
80 | " input_shape = (1, img_rows, img_cols)\n",
81 | "else:\n",
82 | " x_train = x_train.reshape(x_train.shape[0], img_rows, img_cols, 1)\n",
83 | " x_test = x_test.reshape(x_test.shape[0], img_rows, img_cols, 1)\n",
84 | " input_shape = (img_rows, img_cols, 1)\n",
85 | "\n",
86 | "x_train = x_train.astype('float32')\n",
87 | "x_test = x_test.astype('float32')\n",
88 | "x_train /= 255\n",
89 | "x_test /= 255\n",
90 | "print('x_train shape:', x_train.shape)\n",
91 | "print(x_train.shape[0], 'train samples')\n",
92 | "print(x_test.shape[0], 'test samples')\n",
93 | "\n",
94 | "# convert class vectors to binary class matrices\n",
95 | "y_train = keras.utils.to_categorical(y_train, num_classes)\n",
96 | "y_test = keras.utils.to_categorical(y_test, num_classes)\n",
97 | "\n",
98 | "import tensorflow as tf\n",
99 | "config = tf.ConfigProto()\n",
100 | "config.gpu_options.allow_growth = True\n",
101 | "session = tf.Session(config=config)\n",
102 | "\n",
103 | "\n",
104 | "with kk.tf_ops.device('/device:GPU:0'):\n",
105 | " model = Sequential()\n",
106 | " model.add(Conv2D(32, kernel_size=(3, 3),\n",
107 | " activation='relu',\n",
108 | " input_shape=input_shape))\n",
109 | " model.add(Conv2D(64, (3, 3), activation='relu'))\n",
110 | " model.add(MaxPooling2D(pool_size=(2, 2)))\n",
111 | " model.add(Dropout(0.25))\n",
112 | " model.add(Flatten())\n",
113 | " model.add(Dense(128, activation='relu'))\n",
114 | " model.add(Dropout(0.5))\n",
115 | " model.add(Dense(num_classes, activation='softmax'))\n",
116 | "\n",
117 | " model.compile(loss=keras.losses.categorical_crossentropy,\n",
118 | " optimizer=keras.optimizers.Adadelta(),\n",
119 | " metrics=['accuracy'])\n",
120 | "\n",
121 | " model.fit(x_train, y_train,\n",
122 | " batch_size=batch_size,\n",
123 | " epochs=epochs,\n",
124 | " verbose=1,\n",
125 | " validation_data=(x_test, y_test))\n",
126 | "score = model.evaluate(x_test, y_test, verbose=0)\n",
127 | "print('Test loss:', score[0])\n",
128 | "print('Test accuracy:', score[1])"
129 | ]
130 | },
131 | {
132 | "cell_type": "code",
133 | "execution_count": null,
134 | "metadata": {},
135 | "outputs": [],
136 | "source": []
137 | }
138 | ],
139 | "metadata": {
140 | "kernelspec": {
141 | "display_name": "Python 3",
142 | "language": "python",
143 | "name": "python3"
144 | },
145 | "language_info": {
146 | "codemirror_mode": {
147 | "name": "ipython",
148 | "version": 3
149 | },
150 | "file_extension": ".py",
151 | "mimetype": "text/x-python",
152 | "name": "python",
153 | "nbconvert_exporter": "python",
154 | "pygments_lexer": "ipython3",
155 | "version": "3.6.5"
156 | }
157 | },
158 | "nbformat": 4,
159 | "nbformat_minor": 2
160 | }
161 |
--------------------------------------------------------------------------------
/model/cnn_mnist.model:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lsjsj92/keras_basic/afa26e7b830cab23c7ffb2e1806660070279a982/model/cnn_mnist.model
--------------------------------------------------------------------------------
/model/dog_cat_classify.model:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lsjsj92/keras_basic/afa26e7b830cab23c7ffb2e1806660070279a982/model/dog_cat_classify.model
--------------------------------------------------------------------------------
/model/iris.model:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lsjsj92/keras_basic/afa26e7b830cab23c7ffb2e1806660070279a982/model/iris.model
--------------------------------------------------------------------------------
/model/mnist_mlp.model:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lsjsj92/keras_basic/afa26e7b830cab23c7ffb2e1806660070279a982/model/mnist_mlp.model
--------------------------------------------------------------------------------
/model/model_day4.model:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lsjsj92/keras_basic/afa26e7b830cab23c7ffb2e1806660070279a982/model/model_day4.model
--------------------------------------------------------------------------------
/model/multi_img_classification.model:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lsjsj92/keras_basic/afa26e7b830cab23c7ffb2e1806660070279a982/model/multi_img_classification.model
--------------------------------------------------------------------------------
/model/predict_korea_news_LSTM.model:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lsjsj92/keras_basic/afa26e7b830cab23c7ffb2e1806660070279a982/model/predict_korea_news_LSTM.model
--------------------------------------------------------------------------------
/model/spam_ham_LSTM.model:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lsjsj92/keras_basic/afa26e7b830cab23c7ffb2e1806660070279a982/model/spam_ham_LSTM.model
--------------------------------------------------------------------------------
/numpy_data/binary_image_data.npy:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:68373a9f6e9d40691c43aa6872c08665d7944596b0c60cc32080cb845784b14f
3 | size 428987886
4 |
--------------------------------------------------------------------------------
/numpy_data/image_data.npy:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:0c3905e654cd7e160d2644f8ed669f15982e74d3fce37efac6f89244708be452
3 | size 428987886
4 |
--------------------------------------------------------------------------------
/numpy_data/multi_image_data.npy:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:f3598fd1f0bc261eb9b963997205e842b72e6b8e4a82de6487c68ecdfcebe454
3 | size 65455034
4 |
--------------------------------------------------------------------------------