├── README.md └── Deap.ipynb /README.md: -------------------------------------------------------------------------------- 1 | # EEG-signal-classification-using-Deep-Learning 2 | 3 | In this repository I have implemented EEG signal classification using 1D convolutional neural network(CNN). 4 | The description of the dataset is that I have used DEAP dataset which is a publicly available dataset. 5 | It consists of 4 classes. I have applied FFT for extracting features and also to remove Nan values. 6 | If you want to know more about this you can visit my blog on medium :https://tinyurl.com/ybrlylwf 7 | -------------------------------------------------------------------------------- /Deap.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 2, 6 | "metadata": {}, 7 | "outputs": [ 8 | { 9 | "name": "stdout", 10 | "output_type": "stream", 11 | "text": [ 12 | "Collecting git+https://github.com/forrestbao/pyeeg.git\n", 13 | " Cloning https://github.com/forrestbao/pyeeg.git to c:\\users\\faizan\\appdata\\local\\temp\\pip-req-build-k3x1tf5l\n", 14 | "Requirement already satisfied: numpy>=1.9.2 in c:\\users\\faizan\\anaconda3\\lib\\site-packages (from pyeeg==0.4.4) (1.16.2)\n", 15 | "Building wheels for collected packages: pyeeg\n", 16 | " Building wheel for pyeeg (setup.py): started\n", 17 | " Building wheel for pyeeg (setup.py): finished with status 'done'\n", 18 | " Stored in directory: C:\\Users\\faizan\\AppData\\Local\\Temp\\pip-ephem-wheel-cache-yttppi6f\\wheels\\2d\\3f\\ad\\106d4fc80b61d1ea1fc18e76e7439fd98aa043d83d58eae741\n", 19 | "Successfully built pyeeg\n", 20 | "Installing collected packages: pyeeg\n", 21 | "Successfully installed pyeeg-0.4.4\n", 22 | "Note: you may need to restart the kernel to use updated packages.\n" 23 | ] 24 | } 25 | ], 26 | "source": [ 27 | "pip install git+https://github.com/forrestbao/pyeeg.git" 28 | ] 29 | }, 30 | { 31 | "cell_type": "code", 32 | "execution_count": 3, 33 | "metadata": {}, 34 | "outputs": [], 35 | "source": [ 36 | "import numpy as np\n", 37 | "import pyeeg as pe\n", 38 | "import pickle as pickle\n", 39 | "import pandas as pd\n", 40 | "import math\n", 41 | "\n", 42 | "from sklearn import svm\n", 43 | "from sklearn.preprocessing import normalize\n", 44 | "\n", 45 | "import os\n", 46 | "#import tensorflow as tf\n", 47 | "import time" 48 | ] 49 | }, 50 | { 51 | "cell_type": "code", 52 | "execution_count": 4, 53 | "metadata": {}, 54 | "outputs": [], 55 | "source": [ 56 | "channel = [1,2,3,4,6,11,13,17,19,20,21,25,29,31] #14 Channels chosen to fit Emotiv Epoch+\n", 57 | "band = [4,8,12,16,25,45] #5 bands\n", 58 | "window_size = 256 #Averaging band power of 2 sec\n", 59 | "step_size = 16 #Each 0.125 sec update once\n", 60 | "sample_rate = 128 #Sampling rate of 128 Hz\n", 61 | "subjectList = ['01','02','03']\n", 62 | "#List of subjects" 63 | ] 64 | }, 65 | { 66 | "cell_type": "code", 67 | "execution_count": 17, 68 | "metadata": {}, 69 | "outputs": [], 70 | "source": [ 71 | "def FFT_Processing (sub, channel, band, window_size, step_size, sample_rate):\n", 72 | " '''\n", 73 | " arguments: string subject\n", 74 | " list channel indice\n", 75 | " list band\n", 76 | " int window size for FFT\n", 77 | " int step size for FFT\n", 78 | " int sample rate for FFT\n", 79 | " return: void\n", 80 | " '''\n", 81 | " meta = []\n", 82 | " with open('C:/Users/faizan/Downloads/data_preprocessed_python/data_preprocessed_python/s' + sub + '.dat', 'rb') as file:\n", 83 | "\n", 84 | " subject = pickle.load(file, encoding='latin1') #resolve the python 2 data problem by encoding : latin1\n", 85 | "\n", 86 | " for i in range (0,40):\n", 87 | " # loop over 0-39 trails\n", 88 | " data = subject[\"data\"][i]\n", 89 | " labels = subject[\"labels\"][i]\n", 90 | " start = 0;\n", 91 | "\n", 92 | " while start + window_size < data.shape[1]:\n", 93 | " meta_array = []\n", 94 | " meta_data = [] #meta vector for analysis\n", 95 | " for j in channel:\n", 96 | " X = data[j][start : start + window_size] #Slice raw data over 2 sec, at interval of 0.125 sec\n", 97 | " Y = pe.bin_power(X, band, sample_rate) #FFT over 2 sec of channel j, in seq of theta, alpha, low beta, high beta, gamma\n", 98 | " meta_data = meta_data + list(Y[0])\n", 99 | "\n", 100 | " meta_array.append(np.array(meta_data))\n", 101 | " meta_array.append(labels)\n", 102 | "\n", 103 | " meta.append(np.array(meta_array)) \n", 104 | " start = start + step_size\n", 105 | " \n", 106 | " meta = np.array(meta)\n", 107 | " np.save('C:/Users/faizan/Downloads/data_preprocessed_python/data_preprocessed_python/s' + sub, meta, allow_pickle=True, fix_imports=True)\n" 108 | ] 109 | }, 110 | { 111 | "cell_type": "code", 112 | "execution_count": 18, 113 | "metadata": {}, 114 | "outputs": [], 115 | "source": [ 116 | "for subjects in subjectList:\n", 117 | " FFT_Processing (subjects, channel, band, window_size, step_size, sample_rate)" 118 | ] 119 | }, 120 | { 121 | "cell_type": "code", 122 | "execution_count": 22, 123 | "metadata": {}, 124 | "outputs": [ 125 | { 126 | "name": "stdout", 127 | "output_type": "stream", 128 | "text": [ 129 | "training dataset: (51240, 70) (51240, 4)\n", 130 | "testing dataset: (7320, 70) (7320, 4)\n" 131 | ] 132 | } 133 | ], 134 | "source": [ 135 | "data_training = []\n", 136 | "label_training = []\n", 137 | "data_testing = []\n", 138 | "label_testing = []\n", 139 | "\n", 140 | "for subjects in subjectList:\n", 141 | "\n", 142 | " with open('C:/Users/faizan/Downloads/data_preprocessed_python/data_preprocessed_python/s' + subjects + '.npy', 'rb') as file:\n", 143 | " sub = np.load(file)\n", 144 | " for i in range (0,sub.shape[0]):\n", 145 | " if i % 8 == 0:\n", 146 | " data_testing.append(sub[i][0])\n", 147 | " label_testing.append(sub[i][1])\n", 148 | " else:\n", 149 | " data_training.append(sub[i][0])\n", 150 | " label_training.append(sub[i][1])\n", 151 | " \n", 152 | "np.save('C:/Users/faizan/Downloads/data_preprocessed_python/data_preprocessed_python/data_training', np.array(data_training), allow_pickle=True, fix_imports=True)\n", 153 | "np.save('C:/Users/faizan/Downloads/data_preprocessed_python/data_preprocessed_python/label_training', np.array(label_training), allow_pickle=True, fix_imports=True)\n", 154 | "print(\"training dataset:\", np.array(data_training).shape, np.array(label_training).shape)\n", 155 | "\n", 156 | "np.save('C:/Users/faizan/Downloads/data_preprocessed_python/data_preprocessed_python/data_testing', np.array(data_testing), allow_pickle=True, fix_imports=True)\n", 157 | "np.save('C:/Users/faizan/Downloads/data_preprocessed_python/data_preprocessed_python/label_testing', np.array(label_testing), allow_pickle=True, fix_imports=True)\n", 158 | "print(\"testing dataset:\", np.array(data_testing).shape, np.array(label_testing).shape)\n", 159 | "\n" 160 | ] 161 | }, 162 | { 163 | "cell_type": "code", 164 | "execution_count": 32, 165 | "metadata": {}, 166 | "outputs": [], 167 | "source": [ 168 | "with open('C:/Users/faizan/Downloads/data_preprocessed_python/data_preprocessed_python/data_training.npy', 'rb') as fileTrain:\n", 169 | " X = np.load(fileTrain)\n", 170 | " \n", 171 | "with open('C:/Users/faizan/Downloads/data_preprocessed_python/data_preprocessed_python/label_training.npy', 'rb') as fileTrainL:\n", 172 | " Y = np.load(fileTrainL)\n", 173 | " \n", 174 | "X = normalize(X)\n", 175 | "Z = np.ravel(Y[:, [1]])\n", 176 | "\n", 177 | "Arousal_Train = np.ravel(Y[:, [0]])\n", 178 | "Valence_Train = np.ravel(Y[:, [1]])\n", 179 | "Domain_Train = np.ravel(Y[:, [2]])\n", 180 | "Like_Train = np.ravel(Y[:, [3]])" 181 | ] 182 | }, 183 | { 184 | "cell_type": "code", 185 | "execution_count": null, 186 | "metadata": {}, 187 | "outputs": [], 188 | "source": [] 189 | }, 190 | { 191 | "cell_type": "code", 192 | "execution_count": 33, 193 | "metadata": {}, 194 | "outputs": [], 195 | "source": [ 196 | "import pandas as pd\n", 197 | "import keras.backend as K\n", 198 | "import numpy as np\n", 199 | "import pandas as pd\n", 200 | "from keras.models import Sequential\n", 201 | "from keras.layers import Dense\n", 202 | "from keras.models import Sequential\n", 203 | "from keras.layers.convolutional import Conv1D\n", 204 | "from keras.layers.convolutional import MaxPooling1D\n", 205 | "from keras.utils import to_categorical \n", 206 | "from keras.layers import Flatten\n", 207 | "from keras.layers import Dense\n", 208 | "import numpy as np\n", 209 | "import keras\n", 210 | "from keras.datasets import mnist\n", 211 | "from keras.models import Sequential\n", 212 | "from keras.layers import Dense, Dropout, Flatten, Conv2D, MaxPooling2D\n", 213 | "from keras import backend as K\n", 214 | "from keras.models import Model\n", 215 | "import timeit\n", 216 | "from keras.models import Sequential\n", 217 | "from keras.layers.core import Flatten, Dense, Dropout\n", 218 | "from keras.layers.convolutional import Convolution1D, MaxPooling1D, ZeroPadding1D\n", 219 | "from keras.optimizers import SGD\n", 220 | "#import cv2, numpy as np\n", 221 | "import warnings\n", 222 | "warnings.filterwarnings('ignore')\n" 223 | ] 224 | }, 225 | { 226 | "cell_type": "code", 227 | "execution_count": 50, 228 | "metadata": {}, 229 | "outputs": [ 230 | { 231 | "data": { 232 | "text/plain": [ 233 | "array([[0., 0., 0., ..., 1., 0., 0.],\n", 234 | " [0., 0., 0., ..., 1., 0., 0.],\n", 235 | " [0., 0., 0., ..., 1., 0., 0.],\n", 236 | " ...,\n", 237 | " [0., 0., 0., ..., 0., 0., 0.],\n", 238 | " [0., 0., 0., ..., 0., 0., 0.],\n", 239 | " [0., 0., 0., ..., 0., 0., 0.]], dtype=float32)" 240 | ] 241 | }, 242 | "execution_count": 50, 243 | "metadata": {}, 244 | "output_type": "execute_result" 245 | } 246 | ], 247 | "source": [ 248 | "from keras.utils import to_categorical\n", 249 | "y_train = to_categorical(Z)\n", 250 | "y_train" 251 | ] 252 | }, 253 | { 254 | "cell_type": "code", 255 | "execution_count": 51, 256 | "metadata": {}, 257 | "outputs": [], 258 | "source": [ 259 | "x_train = np.array(X[:])" 260 | ] 261 | }, 262 | { 263 | "cell_type": "code", 264 | "execution_count": 52, 265 | "metadata": {}, 266 | "outputs": [], 267 | "source": [ 268 | "with open('C:/Users/faizan/Downloads/data_preprocessed_python/data_preprocessed_python/data_testing.npy', 'rb') as fileTrain:\n", 269 | " M = np.load(fileTrain)\n", 270 | " \n", 271 | "with open('C:/Users/faizan/Downloads/data_preprocessed_python/data_preprocessed_python/label_testing.npy', 'rb') as fileTrainL:\n", 272 | " N = np.load(fileTrainL)\n", 273 | "\n", 274 | "M = normalize(M)\n", 275 | "L = np.ravel(N[:, [1]])\n", 276 | "\n", 277 | "Arousal_Test = np.ravel(N[:, [0]])\n", 278 | "Valence_Test = np.ravel(N[:, [1]])\n", 279 | "Domain_Test = np.ravel(N[:, [2]])\n", 280 | "Like_Test = np.ravel(N[:, [3]])" 281 | ] 282 | }, 283 | { 284 | "cell_type": "code", 285 | "execution_count": 53, 286 | "metadata": {}, 287 | "outputs": [], 288 | "source": [ 289 | "x_test = np.array(M[:])" 290 | ] 291 | }, 292 | { 293 | "cell_type": "code", 294 | "execution_count": 54, 295 | "metadata": {}, 296 | "outputs": [ 297 | { 298 | "data": { 299 | "text/plain": [ 300 | "array([[0., 0., 0., ..., 1., 0., 0.],\n", 301 | " [0., 0., 0., ..., 1., 0., 0.],\n", 302 | " [0., 0., 0., ..., 1., 0., 0.],\n", 303 | " ...,\n", 304 | " [0., 0., 0., ..., 0., 0., 0.],\n", 305 | " [0., 0., 0., ..., 0., 0., 0.],\n", 306 | " [0., 0., 0., ..., 0., 0., 0.]], dtype=float32)" 307 | ] 308 | }, 309 | "execution_count": 54, 310 | "metadata": {}, 311 | "output_type": "execute_result" 312 | } 313 | ], 314 | "source": [ 315 | "from keras.utils import to_categorical\n", 316 | "y_test = to_categorical(L)\n", 317 | "y_test" 318 | ] 319 | }, 320 | { 321 | "cell_type": "code", 322 | "execution_count": 55, 323 | "metadata": {}, 324 | "outputs": [], 325 | "source": [ 326 | "from sklearn.preprocessing import StandardScaler\n", 327 | "scaler = StandardScaler()\n", 328 | "x_train = scaler.fit_transform(x_train)\n", 329 | "x_test = scaler.fit_transform(x_test)" 330 | ] 331 | }, 332 | { 333 | "cell_type": "code", 334 | "execution_count": 56, 335 | "metadata": {}, 336 | "outputs": [], 337 | "source": [ 338 | "x_train = x_train.reshape(x_train.shape[0],x_train.shape[1], 1)\n", 339 | "x_test = x_test.reshape(x_test.shape[0],x_test.shape[1], 1)" 340 | ] 341 | }, 342 | { 343 | "cell_type": "code", 344 | "execution_count": 57, 345 | "metadata": {}, 346 | "outputs": [ 347 | { 348 | "data": { 349 | "text/plain": [ 350 | "(51240, 70, 1)" 351 | ] 352 | }, 353 | "execution_count": 57, 354 | "metadata": {}, 355 | "output_type": "execute_result" 356 | } 357 | ], 358 | "source": [ 359 | "x_train.shape" 360 | ] 361 | }, 362 | { 363 | "cell_type": "code", 364 | "execution_count": 71, 365 | "metadata": {}, 366 | "outputs": [], 367 | "source": [ 368 | "batch_size = 256\n", 369 | "num_classes = 10\n", 370 | "epochs = 200\n", 371 | "input_shape=(x_train.shape[1], 1)" 372 | ] 373 | }, 374 | { 375 | "cell_type": "code", 376 | "execution_count": 72, 377 | "metadata": {}, 378 | "outputs": [ 379 | { 380 | "name": "stdout", 381 | "output_type": "stream", 382 | "text": [ 383 | "(70, 1)\n" 384 | ] 385 | } 386 | ], 387 | "source": [ 388 | "print(input_shape)" 389 | ] 390 | }, 391 | { 392 | "cell_type": "code", 393 | "execution_count": 73, 394 | "metadata": {}, 395 | "outputs": [], 396 | "source": [ 397 | "from keras.layers import Convolution1D, ZeroPadding1D, MaxPooling1D, BatchNormalization, Activation, Dropout, Flatten, Dense\n", 398 | "from keras.regularizers import l2" 399 | ] 400 | }, 401 | { 402 | "cell_type": "code", 403 | "execution_count": 74, 404 | "metadata": {}, 405 | "outputs": [ 406 | { 407 | "name": "stdout", 408 | "output_type": "stream", 409 | "text": [ 410 | "Model: \"sequential_4\"\n", 411 | "_________________________________________________________________\n", 412 | "Layer (type) Output Shape Param # \n", 413 | "=================================================================\n", 414 | "conv1d_7 (Conv1D) (None, 70, 128) 512 \n", 415 | "_________________________________________________________________\n", 416 | "batch_normalization_7 (Batch (None, 70, 128) 512 \n", 417 | "_________________________________________________________________\n", 418 | "max_pooling1d_7 (MaxPooling1 (None, 35, 128) 0 \n", 419 | "_________________________________________________________________\n", 420 | "conv1d_8 (Conv1D) (None, 35, 128) 49280 \n", 421 | "_________________________________________________________________\n", 422 | "batch_normalization_8 (Batch (None, 35, 128) 512 \n", 423 | "_________________________________________________________________\n", 424 | "max_pooling1d_8 (MaxPooling1 (None, 17, 128) 0 \n", 425 | "_________________________________________________________________\n", 426 | "flatten_4 (Flatten) (None, 2176) 0 \n", 427 | "_________________________________________________________________\n", 428 | "dense_13 (Dense) (None, 64) 139328 \n", 429 | "_________________________________________________________________\n", 430 | "dropout_10 (Dropout) (None, 64) 0 \n", 431 | "_________________________________________________________________\n", 432 | "dense_14 (Dense) (None, 32) 2080 \n", 433 | "_________________________________________________________________\n", 434 | "dropout_11 (Dropout) (None, 32) 0 \n", 435 | "_________________________________________________________________\n", 436 | "dense_15 (Dense) (None, 16) 528 \n", 437 | "_________________________________________________________________\n", 438 | "dropout_12 (Dropout) (None, 16) 0 \n", 439 | "_________________________________________________________________\n", 440 | "dense_16 (Dense) (None, 10) 170 \n", 441 | "=================================================================\n", 442 | "Total params: 192,922\n", 443 | "Trainable params: 192,410\n", 444 | "Non-trainable params: 512\n", 445 | "_________________________________________________________________\n" 446 | ] 447 | } 448 | ], 449 | "source": [ 450 | "model = Sequential()\n", 451 | "intput_shape=(x_train.shape[1], 1)\n", 452 | "model.add(Conv1D(128, kernel_size=3,padding = 'same',activation='relu', input_shape=input_shape))\n", 453 | "model.add(BatchNormalization())\n", 454 | "model.add(MaxPooling1D(pool_size=(2)))\n", 455 | "model.add(Conv1D(128,kernel_size=3,padding = 'same', activation='relu'))\n", 456 | "model.add(BatchNormalization())\n", 457 | "model.add(MaxPooling1D(pool_size=(2)))\n", 458 | "#model.add(Conv1D(64,kernel_size=3,padding = 'same', activation='relu'))\n", 459 | "#model.add(MaxPooling1D(pool_size=(2)))\n", 460 | "model.add(Flatten())\n", 461 | "model.add(Dense(64, activation='tanh'))\n", 462 | "model.add(Dropout(0.2))\n", 463 | "model.add(Dense(32, activation='tanh'))\n", 464 | "model.add(Dropout(0.2))\n", 465 | "model.add(Dense(16, activation='relu'))\n", 466 | "model.add(Dropout(0.2))\n", 467 | "model.add(Dense(num_classes, activation='softmax'))\n", 468 | "model.summary()" 469 | ] 470 | }, 471 | { 472 | "cell_type": "code", 473 | "execution_count": 75, 474 | "metadata": {}, 475 | "outputs": [], 476 | "source": [ 477 | "model.compile(loss=keras.losses.categorical_crossentropy,\n", 478 | " optimizer='adam',\n", 479 | " metrics=['accuracy'])" 480 | ] 481 | }, 482 | { 483 | "cell_type": "code", 484 | "execution_count": 76, 485 | "metadata": {}, 486 | "outputs": [ 487 | { 488 | "name": "stdout", 489 | "output_type": "stream", 490 | "text": [ 491 | "Epoch 1/200\n", 492 | "51240/51240 [==============================] - 69s 1ms/step - loss: 2.0102 - accuracy: 0.2650\n", 493 | "Epoch 2/200\n", 494 | "51240/51240 [==============================] - 72s 1ms/step - loss: 1.7687 - accuracy: 0.3254\n", 495 | "Epoch 3/200\n", 496 | "51240/51240 [==============================] - 67s 1ms/step - loss: 1.6625 - accuracy: 0.3671\n", 497 | "Epoch 4/200\n", 498 | "51240/51240 [==============================] - 59s 1ms/step - loss: 1.5719 - accuracy: 0.4089\n", 499 | "Epoch 5/200\n", 500 | "51240/51240 [==============================] - 61s 1ms/step - loss: 1.5110 - accuracy: 0.4345\n", 501 | "Epoch 6/200\n", 502 | "51240/51240 [==============================] - 58s 1ms/step - loss: 1.4653 - accuracy: 0.4547\n", 503 | "Epoch 7/200\n", 504 | "51240/51240 [==============================] - 57s 1ms/step - loss: 1.4126 - accuracy: 0.4731\n", 505 | "Epoch 8/200\n", 506 | "51240/51240 [==============================] - 57s 1ms/step - loss: 1.3686 - accuracy: 0.4884\n", 507 | "Epoch 9/200\n", 508 | "51240/51240 [==============================] - 59s 1ms/step - loss: 1.3357 - accuracy: 0.5065\n", 509 | "Epoch 10/200\n", 510 | "51240/51240 [==============================] - 57s 1ms/step - loss: 1.2997 - accuracy: 0.5186\n", 511 | "Epoch 11/200\n", 512 | "51240/51240 [==============================] - 61s 1ms/step - loss: 1.2654 - accuracy: 0.5365\n", 513 | "Epoch 12/200\n", 514 | "51240/51240 [==============================] - 57s 1ms/step - loss: 1.2421 - accuracy: 0.5465\n", 515 | "Epoch 13/200\n", 516 | "51240/51240 [==============================] - 57s 1ms/step - loss: 1.2192 - accuracy: 0.5528\n", 517 | "Epoch 14/200\n", 518 | "51240/51240 [==============================] - 57s 1ms/step - loss: 1.1848 - accuracy: 0.5688\n", 519 | "Epoch 15/200\n", 520 | "51240/51240 [==============================] - 58s 1ms/step - loss: 1.1670 - accuracy: 0.5774\n", 521 | "Epoch 16/200\n", 522 | "51240/51240 [==============================] - 61s 1ms/step - loss: 1.1465 - accuracy: 0.5850\n", 523 | "Epoch 17/200\n", 524 | "51240/51240 [==============================] - 57s 1ms/step - loss: 1.1193 - accuracy: 0.5957\n", 525 | "Epoch 18/200\n", 526 | "51240/51240 [==============================] - 57s 1ms/step - loss: 1.1068 - accuracy: 0.6003\n", 527 | "Epoch 19/200\n", 528 | "51240/51240 [==============================] - 57s 1ms/step - loss: 1.0901 - accuracy: 0.6074\n", 529 | "Epoch 20/200\n", 530 | "51240/51240 [==============================] - 58s 1ms/step - loss: 1.0641 - accuracy: 0.6159\n", 531 | "Epoch 21/200\n", 532 | "51240/51240 [==============================] - 58s 1ms/step - loss: 1.0437 - accuracy: 0.6238\n", 533 | "Epoch 22/200\n", 534 | "51240/51240 [==============================] - 57s 1ms/step - loss: 1.0307 - accuracy: 0.6270\n", 535 | "Epoch 23/200\n", 536 | "51240/51240 [==============================] - 58s 1ms/step - loss: 1.0258 - accuracy: 0.6300\n", 537 | "Epoch 24/200\n", 538 | "51240/51240 [==============================] - 60s 1ms/step - loss: 1.0153 - accuracy: 0.6343\n", 539 | "Epoch 25/200\n", 540 | "51240/51240 [==============================] - 59s 1ms/step - loss: 1.0025 - accuracy: 0.6375\n", 541 | "Epoch 26/200\n", 542 | "51240/51240 [==============================] - 63s 1ms/step - loss: 0.9860 - accuracy: 0.6463\n", 543 | "Epoch 27/200\n", 544 | "51240/51240 [==============================] - 64s 1ms/step - loss: 0.9744 - accuracy: 0.6487\n", 545 | "Epoch 28/200\n", 546 | "51240/51240 [==============================] - 70s 1ms/step - loss: 0.9665 - accuracy: 0.6508\n", 547 | "Epoch 29/200\n", 548 | "51240/51240 [==============================] - 68s 1ms/step - loss: 0.9577 - accuracy: 0.6566\n", 549 | "Epoch 30/200\n", 550 | "51240/51240 [==============================] - 68s 1ms/step - loss: 0.9482 - accuracy: 0.6601\n", 551 | "Epoch 31/200\n", 552 | "51240/51240 [==============================] - 67s 1ms/step - loss: 0.9291 - accuracy: 0.6659\n", 553 | "Epoch 32/200\n", 554 | "51240/51240 [==============================] - 69s 1ms/step - loss: 0.9301 - accuracy: 0.6661\n", 555 | "Epoch 33/200\n", 556 | "51240/51240 [==============================] - 68s 1ms/step - loss: 0.9167 - accuracy: 0.6715\n", 557 | "Epoch 34/200\n", 558 | "51240/51240 [==============================] - 69s 1ms/step - loss: 0.9165 - accuracy: 0.6738\n", 559 | "Epoch 35/200\n", 560 | "51240/51240 [==============================] - 64s 1ms/step - loss: 0.9116 - accuracy: 0.6730\n", 561 | "Epoch 36/200\n", 562 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.8941 - accuracy: 0.6802\n", 563 | "Epoch 37/200\n", 564 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.8885 - accuracy: 0.6817\n", 565 | "Epoch 38/200\n", 566 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.8740 - accuracy: 0.6883\n", 567 | "Epoch 39/200\n", 568 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.8709 - accuracy: 0.6883\n", 569 | "Epoch 40/200\n", 570 | "51240/51240 [==============================] - 60s 1ms/step - loss: 0.8577 - accuracy: 0.6928\n", 571 | "Epoch 41/200\n", 572 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.8601 - accuracy: 0.6927\n", 573 | "Epoch 42/200\n", 574 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.8698 - accuracy: 0.6878\n", 575 | "Epoch 43/200\n", 576 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.8550 - accuracy: 0.6952\n", 577 | "Epoch 44/200\n", 578 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.8470 - accuracy: 0.6980\n", 579 | "Epoch 45/200\n", 580 | "51240/51240 [==============================] - 60s 1ms/step - loss: 0.8473 - accuracy: 0.6976\n", 581 | "Epoch 46/200\n", 582 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.8320 - accuracy: 0.7025\n", 583 | "Epoch 47/200\n", 584 | "51240/51240 [==============================] - 63s 1ms/step - loss: 0.8411 - accuracy: 0.6988\n", 585 | "Epoch 48/200\n", 586 | "51240/51240 [==============================] - 73s 1ms/step - loss: 0.8306 - accuracy: 0.7030\n", 587 | "Epoch 49/200\n", 588 | "51240/51240 [==============================] - 75s 1ms/step - loss: 0.8226 - accuracy: 0.7083\n", 589 | "Epoch 50/200\n", 590 | "51240/51240 [==============================] - 70s 1ms/step - loss: 0.8191 - accuracy: 0.7069\n", 591 | "Epoch 51/200\n", 592 | "51240/51240 [==============================] - 65s 1ms/step - loss: 0.8154 - accuracy: 0.7090\n", 593 | "Epoch 52/200\n", 594 | "51240/51240 [==============================] - 68s 1ms/step - loss: 0.8069 - accuracy: 0.7108\n", 595 | "Epoch 53/200\n", 596 | "51240/51240 [==============================] - 71s 1ms/step - loss: 0.8045 - accuracy: 0.7146\n", 597 | "Epoch 54/200\n", 598 | "51240/51240 [==============================] - 69s 1ms/step - loss: 0.7926 - accuracy: 0.7167\n", 599 | "Epoch 55/200\n", 600 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.7917 - accuracy: 0.7156\n", 601 | "Epoch 56/200\n", 602 | "51240/51240 [==============================] - 61s 1ms/step - loss: 0.8009 - accuracy: 0.7140\n", 603 | "Epoch 57/200\n", 604 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.7891 - accuracy: 0.7218\n", 605 | "Epoch 58/200\n", 606 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.7722 - accuracy: 0.7256\n", 607 | "Epoch 59/200\n", 608 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.7782 - accuracy: 0.7234\n", 609 | "Epoch 60/200\n", 610 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.7745 - accuracy: 0.7231\n", 611 | "Epoch 61/200\n", 612 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.7703 - accuracy: 0.7251\n", 613 | "Epoch 62/200\n", 614 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.7751 - accuracy: 0.7243\n", 615 | "Epoch 63/200\n", 616 | "51240/51240 [==============================] - 60s 1ms/step - loss: 0.7634 - accuracy: 0.7276\n", 617 | "Epoch 64/200\n", 618 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.7606 - accuracy: 0.7302\n", 619 | "Epoch 65/200\n", 620 | "51240/51240 [==============================] - 57s 1ms/step - loss: 0.7559 - accuracy: 0.7312\n", 621 | "Epoch 66/200\n", 622 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.7505 - accuracy: 0.7340\n", 623 | "Epoch 67/200\n", 624 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.7544 - accuracy: 0.7315\n", 625 | "Epoch 68/200\n", 626 | "51240/51240 [==============================] - 56s 1ms/step - loss: 0.7600 - accuracy: 0.7282\n", 627 | "Epoch 69/200\n", 628 | "51240/51240 [==============================] - 57s 1ms/step - loss: 0.7497 - accuracy: 0.7329\n", 629 | "Epoch 70/200\n", 630 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.7396 - accuracy: 0.7355\n", 631 | "Epoch 71/200\n", 632 | "51240/51240 [==============================] - 57s 1ms/step - loss: 0.7258 - accuracy: 0.7400\n", 633 | "Epoch 72/200\n", 634 | "51240/51240 [==============================] - 56s 1ms/step - loss: 0.7348 - accuracy: 0.7376\n", 635 | "Epoch 73/200\n", 636 | "51240/51240 [==============================] - 57s 1ms/step - loss: 0.7395 - accuracy: 0.7385\n", 637 | "Epoch 74/200\n", 638 | "51240/51240 [==============================] - 61s 1ms/step - loss: 0.7355 - accuracy: 0.7373\n", 639 | "Epoch 75/200\n", 640 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.7296 - accuracy: 0.7410\n", 641 | "Epoch 76/200\n", 642 | "51240/51240 [==============================] - 60s 1ms/step - loss: 0.7214 - accuracy: 0.7447\n", 643 | "Epoch 77/200\n", 644 | "51240/51240 [==============================] - 57s 1ms/step - loss: 0.7165 - accuracy: 0.7443\n", 645 | "Epoch 78/200\n", 646 | "51240/51240 [==============================] - 57s 1ms/step - loss: 0.7180 - accuracy: 0.7447\n", 647 | "Epoch 79/200\n", 648 | "51240/51240 [==============================] - 57s 1ms/step - loss: 0.7292 - accuracy: 0.7408\n", 649 | "Epoch 80/200\n", 650 | "51240/51240 [==============================] - 60s 1ms/step - loss: 0.7131 - accuracy: 0.7455\n", 651 | "Epoch 81/200\n", 652 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.7031 - accuracy: 0.7493\n", 653 | "Epoch 82/200\n", 654 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.7158 - accuracy: 0.7466\n", 655 | "Epoch 83/200\n", 656 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.7042 - accuracy: 0.7501\n", 657 | "Epoch 84/200\n", 658 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.7010 - accuracy: 0.7522\n", 659 | "Epoch 85/200\n", 660 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.6964 - accuracy: 0.7516\n", 661 | "Epoch 86/200\n", 662 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.7067 - accuracy: 0.7516\n", 663 | "Epoch 87/200\n", 664 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.6972 - accuracy: 0.7513\n", 665 | "Epoch 88/200\n", 666 | "51240/51240 [==============================] - 60s 1ms/step - loss: 0.6934 - accuracy: 0.7541\n", 667 | "Epoch 89/200\n", 668 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.7019 - accuracy: 0.7495\n", 669 | "Epoch 90/200\n", 670 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.6872 - accuracy: 0.7550\n", 671 | "Epoch 91/200\n", 672 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.6883 - accuracy: 0.7547\n", 673 | "Epoch 92/200\n", 674 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.6740 - accuracy: 0.7593\n", 675 | "Epoch 93/200\n", 676 | "51240/51240 [==============================] - 57s 1ms/step - loss: 0.6941 - accuracy: 0.7529\n", 677 | "Epoch 94/200\n", 678 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.6846 - accuracy: 0.7566\n", 679 | "Epoch 95/200\n", 680 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.6852 - accuracy: 0.7558\n", 681 | "Epoch 96/200\n", 682 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.6777 - accuracy: 0.7577\n", 683 | "Epoch 97/200\n", 684 | "51240/51240 [==============================] - 57s 1ms/step - loss: 0.6785 - accuracy: 0.7591\n", 685 | "Epoch 98/200\n", 686 | "51240/51240 [==============================] - 57s 1ms/step - loss: 0.6792 - accuracy: 0.7583\n", 687 | "Epoch 99/200\n", 688 | "51240/51240 [==============================] - 57s 1ms/step - loss: 0.6705 - accuracy: 0.7620\n", 689 | "Epoch 100/200\n", 690 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.6730 - accuracy: 0.7622\n", 691 | "Epoch 101/200\n", 692 | "51240/51240 [==============================] - 57s 1ms/step - loss: 0.6612 - accuracy: 0.7641\n", 693 | "Epoch 102/200\n", 694 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.6568 - accuracy: 0.7661\n", 695 | "Epoch 103/200\n", 696 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.6667 - accuracy: 0.7635\n", 697 | "Epoch 104/200\n", 698 | "51240/51240 [==============================] - 57s 1ms/step - loss: 0.6600 - accuracy: 0.7632\n", 699 | "Epoch 105/200\n", 700 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.6578 - accuracy: 0.7655\n", 701 | "Epoch 106/200\n", 702 | "51240/51240 [==============================] - 68s 1ms/step - loss: 0.6525 - accuracy: 0.7666\n", 703 | "Epoch 107/200\n", 704 | "51240/51240 [==============================] - 63s 1ms/step - loss: 0.6493 - accuracy: 0.7678\n", 705 | "Epoch 108/200\n", 706 | "51240/51240 [==============================] - 62s 1ms/step - loss: 0.6527 - accuracy: 0.7691\n", 707 | "Epoch 109/200\n", 708 | "51240/51240 [==============================] - 63s 1ms/step - loss: 0.6579 - accuracy: 0.7663\n", 709 | "Epoch 110/200\n", 710 | "51240/51240 [==============================] - 62s 1ms/step - loss: 0.6431 - accuracy: 0.7733\n", 711 | "Epoch 111/200\n", 712 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.6436 - accuracy: 0.7700\n", 713 | "Epoch 112/200\n", 714 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.6607 - accuracy: 0.7678\n", 715 | "Epoch 113/200\n", 716 | "51240/51240 [==============================] - 57s 1ms/step - loss: 0.6452 - accuracy: 0.7709\n", 717 | "Epoch 114/200\n", 718 | "51240/51240 [==============================] - 60s 1ms/step - loss: 0.6387 - accuracy: 0.7734\n", 719 | "Epoch 115/200\n", 720 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.6408 - accuracy: 0.7720\n", 721 | "Epoch 116/200\n", 722 | "51240/51240 [==============================] - 61s 1ms/step - loss: 0.6346 - accuracy: 0.7736\n", 723 | "Epoch 117/200\n", 724 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.6341 - accuracy: 0.7731\n", 725 | "Epoch 118/200\n", 726 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.6367 - accuracy: 0.7735\n", 727 | "Epoch 119/200\n", 728 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.6354 - accuracy: 0.7730\n", 729 | "Epoch 120/200\n", 730 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.6283 - accuracy: 0.7771\n", 731 | "Epoch 121/200\n", 732 | "51240/51240 [==============================] - 64s 1ms/step - loss: 0.6329 - accuracy: 0.7757\n", 733 | "Epoch 122/200\n", 734 | "51240/51240 [==============================] - 63s 1ms/step - loss: 0.6301 - accuracy: 0.7769\n", 735 | "Epoch 123/200\n", 736 | "51240/51240 [==============================] - 56s 1ms/step - loss: 0.6210 - accuracy: 0.7806\n", 737 | "Epoch 124/200\n", 738 | "51240/51240 [==============================] - 57s 1ms/step - loss: 0.6349 - accuracy: 0.7763\n", 739 | "Epoch 125/200\n", 740 | "51240/51240 [==============================] - 56s 1ms/step - loss: 0.6213 - accuracy: 0.7802\n", 741 | "Epoch 126/200\n", 742 | "51240/51240 [==============================] - 56s 1ms/step - loss: 0.6308 - accuracy: 0.7759\n", 743 | "Epoch 127/200\n", 744 | "51240/51240 [==============================] - 57s 1ms/step - loss: 0.6281 - accuracy: 0.7777\n", 745 | "Epoch 128/200\n", 746 | "51240/51240 [==============================] - 57s 1ms/step - loss: 0.6135 - accuracy: 0.7817\n", 747 | "Epoch 129/200\n", 748 | "51240/51240 [==============================] - 56s 1ms/step - loss: 0.6143 - accuracy: 0.7816\n", 749 | "Epoch 130/200\n", 750 | "51240/51240 [==============================] - 57s 1ms/step - loss: 0.6184 - accuracy: 0.7803\n", 751 | "Epoch 131/200\n", 752 | "51240/51240 [==============================] - 66s 1ms/step - loss: 0.6122 - accuracy: 0.7829\n", 753 | "Epoch 132/200\n", 754 | "51240/51240 [==============================] - 68s 1ms/step - loss: 0.6210 - accuracy: 0.7814\n", 755 | "Epoch 133/200\n", 756 | "51240/51240 [==============================] - 69s 1ms/step - loss: 0.6002 - accuracy: 0.7868\n", 757 | "Epoch 134/200\n", 758 | "51240/51240 [==============================] - 60s 1ms/step - loss: 0.6039 - accuracy: 0.7845\n", 759 | "Epoch 135/200\n", 760 | "51240/51240 [==============================] - 70s 1ms/step - loss: 0.6049 - accuracy: 0.7853\n", 761 | "Epoch 136/200\n", 762 | "51240/51240 [==============================] - 69s 1ms/step - loss: 0.6024 - accuracy: 0.7863\n", 763 | "Epoch 137/200\n", 764 | "51240/51240 [==============================] - 61s 1ms/step - loss: 0.6039 - accuracy: 0.7873\n", 765 | "Epoch 138/200\n", 766 | "51240/51240 [==============================] - 60s 1ms/step - loss: 0.6029 - accuracy: 0.7864\n", 767 | "Epoch 139/200\n", 768 | "51240/51240 [==============================] - 61s 1ms/step - loss: 0.5975 - accuracy: 0.7880\n", 769 | "Epoch 140/200\n", 770 | "51240/51240 [==============================] - 61s 1ms/step - loss: 0.6005 - accuracy: 0.7877\n", 771 | "Epoch 141/200\n", 772 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.6042 - accuracy: 0.7869\n", 773 | "Epoch 142/200\n", 774 | "51240/51240 [==============================] - 60s 1ms/step - loss: 0.5909 - accuracy: 0.7899\n", 775 | "Epoch 143/200\n", 776 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5862 - accuracy: 0.7919\n", 777 | "Epoch 144/200\n", 778 | "51240/51240 [==============================] - 56s 1ms/step - loss: 0.5928 - accuracy: 0.7911\n", 779 | "Epoch 145/200\n", 780 | "51240/51240 [==============================] - 61s 1ms/step - loss: 0.5937 - accuracy: 0.7913\n", 781 | "Epoch 146/200\n", 782 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5877 - accuracy: 0.7925\n", 783 | "Epoch 147/200\n", 784 | "51240/51240 [==============================] - 57s 1ms/step - loss: 0.5867 - accuracy: 0.7940\n", 785 | "Epoch 148/200\n", 786 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5905 - accuracy: 0.7917\n", 787 | "Epoch 149/200\n", 788 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5830 - accuracy: 0.7934\n", 789 | "Epoch 150/200\n", 790 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5870 - accuracy: 0.7925\n", 791 | "Epoch 151/200\n", 792 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5879 - accuracy: 0.7927\n", 793 | "Epoch 152/200\n", 794 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5852 - accuracy: 0.7931\n", 795 | "Epoch 153/200\n", 796 | "51240/51240 [==============================] - 57s 1ms/step - loss: 0.5773 - accuracy: 0.7939\n", 797 | "Epoch 154/200\n", 798 | "51240/51240 [==============================] - 56s 1ms/step - loss: 0.5818 - accuracy: 0.7940\n", 799 | "Epoch 155/200\n", 800 | "51240/51240 [==============================] - 57s 1ms/step - loss: 0.5748 - accuracy: 0.7958\n", 801 | "Epoch 156/200\n", 802 | "51240/51240 [==============================] - 57s 1ms/step - loss: 0.5708 - accuracy: 0.7987\n", 803 | "Epoch 157/200\n", 804 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.5742 - accuracy: 0.7975\n", 805 | "Epoch 158/200\n", 806 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5906 - accuracy: 0.7919\n", 807 | "Epoch 159/200\n", 808 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5747 - accuracy: 0.7955\n", 809 | "Epoch 160/200\n", 810 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5703 - accuracy: 0.7990\n", 811 | "Epoch 161/200\n", 812 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5701 - accuracy: 0.7975\n", 813 | "Epoch 162/200\n", 814 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5630 - accuracy: 0.8012\n", 815 | "Epoch 163/200\n", 816 | "51240/51240 [==============================] - 57s 1ms/step - loss: 0.5662 - accuracy: 0.7998\n", 817 | "Epoch 164/200\n", 818 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5632 - accuracy: 0.8010\n", 819 | "Epoch 165/200\n", 820 | "51240/51240 [==============================] - 57s 1ms/step - loss: 0.5669 - accuracy: 0.8014\n", 821 | "Epoch 166/200\n", 822 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5641 - accuracy: 0.8015\n", 823 | "Epoch 167/200\n", 824 | "51240/51240 [==============================] - 57s 1ms/step - loss: 0.5633 - accuracy: 0.8019\n", 825 | "Epoch 168/200\n", 826 | "51240/51240 [==============================] - 57s 1ms/step - loss: 0.5617 - accuracy: 0.8022\n", 827 | "Epoch 169/200\n", 828 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.5561 - accuracy: 0.8048\n", 829 | "Epoch 170/200\n", 830 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5597 - accuracy: 0.8032\n", 831 | "Epoch 171/200\n", 832 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5642 - accuracy: 0.8019\n", 833 | "Epoch 172/200\n", 834 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5607 - accuracy: 0.8034\n", 835 | "Epoch 173/200\n", 836 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.5547 - accuracy: 0.8044\n", 837 | "Epoch 174/200\n", 838 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5626 - accuracy: 0.8029\n", 839 | "Epoch 175/200\n", 840 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5510 - accuracy: 0.8049\n", 841 | "Epoch 176/200\n", 842 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5449 - accuracy: 0.8085\n", 843 | "Epoch 177/200\n", 844 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5520 - accuracy: 0.8065\n", 845 | "Epoch 178/200\n", 846 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.5394 - accuracy: 0.8086\n", 847 | "Epoch 179/200\n", 848 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5531 - accuracy: 0.8057\n", 849 | "Epoch 180/200\n", 850 | "51240/51240 [==============================] - 57s 1ms/step - loss: 0.5446 - accuracy: 0.8086\n", 851 | "Epoch 181/200\n", 852 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5403 - accuracy: 0.8096\n", 853 | "Epoch 182/200\n", 854 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5373 - accuracy: 0.8128\n", 855 | "Epoch 183/200\n", 856 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.5445 - accuracy: 0.8095\n", 857 | "Epoch 184/200\n", 858 | "51240/51240 [==============================] - 64s 1ms/step - loss: 0.5397 - accuracy: 0.8099\n", 859 | "Epoch 185/200\n", 860 | "51240/51240 [==============================] - 62s 1ms/step - loss: 0.5461 - accuracy: 0.8083\n", 861 | "Epoch 186/200\n", 862 | "51240/51240 [==============================] - 63s 1ms/step - loss: 0.5368 - accuracy: 0.8124\n", 863 | "Epoch 187/200\n", 864 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5395 - accuracy: 0.8115\n", 865 | "Epoch 188/200\n", 866 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5426 - accuracy: 0.8098\n", 867 | "Epoch 189/200\n", 868 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5356 - accuracy: 0.8126\n", 869 | "Epoch 190/200\n", 870 | "51240/51240 [==============================] - 60s 1ms/step - loss: 0.5385 - accuracy: 0.8123\n", 871 | "Epoch 191/200\n", 872 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5364 - accuracy: 0.8113\n", 873 | "Epoch 192/200\n", 874 | "51240/51240 [==============================] - 60s 1ms/step - loss: 0.5290 - accuracy: 0.8146\n", 875 | "Epoch 193/200\n", 876 | "51240/51240 [==============================] - 66s 1ms/step - loss: 0.5300 - accuracy: 0.8156\n", 877 | "Epoch 194/200\n", 878 | "51240/51240 [==============================] - 65s 1ms/step - loss: 0.5402 - accuracy: 0.8114\n", 879 | "Epoch 195/200\n", 880 | "51240/51240 [==============================] - 64s 1ms/step - loss: 0.5303 - accuracy: 0.8148\n", 881 | "Epoch 196/200\n", 882 | "51240/51240 [==============================] - 61s 1ms/step - loss: 0.5335 - accuracy: 0.8129\n", 883 | "Epoch 197/200\n", 884 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5268 - accuracy: 0.8142\n", 885 | "Epoch 198/200\n", 886 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5274 - accuracy: 0.8154\n", 887 | "Epoch 199/200\n", 888 | "51240/51240 [==============================] - 58s 1ms/step - loss: 0.5279 - accuracy: 0.8161\n", 889 | "Epoch 200/200\n", 890 | "51240/51240 [==============================] - 59s 1ms/step - loss: 0.5205 - accuracy: 0.8173\n" 891 | ] 892 | }, 893 | { 894 | "data": { 895 | "text/plain": [ 896 | "" 897 | ] 898 | }, 899 | "execution_count": 76, 900 | "metadata": {}, 901 | "output_type": "execute_result" 902 | } 903 | ], 904 | "source": [ 905 | "model.fit(x_train, y_train,\n", 906 | " batch_size=batch_size,\n", 907 | " epochs=epochs,\n", 908 | " verbose=1)" 909 | ] 910 | }, 911 | { 912 | "cell_type": "code", 913 | "execution_count": 77, 914 | "metadata": {}, 915 | "outputs": [ 916 | { 917 | "name": "stdout", 918 | "output_type": "stream", 919 | "text": [ 920 | "7320/7320 [==============================] - 3s 420us/step\n", 921 | "Test loss: 0.5328557654867607\n", 922 | "Test accuracy: 0.8245901465415955\n" 923 | ] 924 | } 925 | ], 926 | "source": [ 927 | "score = model.evaluate(x_test, y_test, verbose=1)\n", 928 | "print('Test loss:', score[0])\n", 929 | "print('Test accuracy:', score[1])" 930 | ] 931 | }, 932 | { 933 | "cell_type": "code", 934 | "execution_count": null, 935 | "metadata": {}, 936 | "outputs": [], 937 | "source": [] 938 | } 939 | ], 940 | "metadata": { 941 | "kernelspec": { 942 | "display_name": "Python 3", 943 | "language": "python", 944 | "name": "python3" 945 | }, 946 | "language_info": { 947 | "codemirror_mode": { 948 | "name": "ipython", 949 | "version": 3 950 | }, 951 | "file_extension": ".py", 952 | "mimetype": "text/x-python", 953 | "name": "python", 954 | "nbconvert_exporter": "python", 955 | "pygments_lexer": "ipython3", 956 | "version": "3.7.3" 957 | } 958 | }, 959 | "nbformat": 4, 960 | "nbformat_minor": 2 961 | } 962 | --------------------------------------------------------------------------------