├── README.md ├── bestmodel ├── interface.py ├── model_training.ipynb └── requirements.txt /README.md: -------------------------------------------------------------------------------- 1 | # Handwritten-Digit-Recognition-using-Deep_Learning 2 | This project recognizes the handwritten numerical digits(0-9) that are drawn on the drawing window. 3 | 4 | # How to use the project 5 | Simply clone the repository and install all the packages in the requirements.txt file and run the interface.py file using python interpreter. 6 | 7 | # Dataset 8 | The dataset that I used is MNIST as it has a good number of training examples that allowed me to build a powerful and accurate model. 9 | 10 | ![image](https://user-images.githubusercontent.com/73419491/211145461-cc2aee2e-e397-418c-9aea-26ab4c7923c6.png) 11 | 12 | # Interface 13 | - The interface is designed using pygame to create an interactive window on which user can draw multiple digits one by one. 14 | - Pressing the key c clears the window. 15 | 16 | ![screenshot 1](https://user-images.githubusercontent.com/73419491/211146205-c206ea5f-f2ef-4e91-aa98-127c2d2a1a33.PNG) 17 | 18 | # Details on the model 19 | 1) The model is built using Convolutional Neural Networks with a total of 3 layers including the output layer and Dropout(to prevent overfitting). 20 | 2) For the hidden layers activation used is 'ReLU' and for the output/classification the activation is 'softmax'. 21 | 3) The model is trained using Keras Sequential API with training accuracy of 99.84% and testing accuracy of 98.98% 22 | 4) It can clssify most of the digits pretty accurately even with a few irregularities(as shown below). 23 | 24 | ![screenshot 2](https://user-images.githubusercontent.com/73419491/211147116-14674f84-360a-4786-a08f-f35c22d69334.PNG) 25 | -------------------------------------------------------------------------------- /bestmodel: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/flashzzz/Handwritten-Digit-Recognition-using-Deep_Learning/3f74acd5b7142d456766ef74824e12887301a1d9/bestmodel -------------------------------------------------------------------------------- /interface.py: -------------------------------------------------------------------------------- 1 | import pygame, sys 2 | import numpy as np 3 | from pygame.locals import * 4 | from keras.models import load_model 5 | import cv2 6 | 7 | 8 | WINDOWSIZEX = 640 9 | WINDOWSIZEY = 480 10 | 11 | BOUNDRYINC = 5 12 | WHITE = (255,255,255) 13 | BLACK = (0,0,0) 14 | RED = (255,0,0) 15 | 16 | IMAGESAVE = False 17 | 18 | MODEL = load_model('bestmodel') 19 | 20 | LABELS = {0: 'Zero', 1: 'One', 2: 'Two', 3: 'Three', 4: 'Four', 5: 'Five', 6: 'Six', 7: 'Seven', 8: 'Eight', 9: 'Nine'} 21 | 22 | 23 | # Initialize pygame 24 | pygame.init() 25 | 26 | FONT = pygame.font.Font('freesansbold.ttf', 18) 27 | 28 | DISPLAYSURF = pygame.display.set_mode((WINDOWSIZEX, WINDOWSIZEY)) 29 | 30 | 31 | pygame.display.set_caption('Digit Board') 32 | 33 | iswriting = False 34 | 35 | number_xcord = [] 36 | number_ycord = [] 37 | image_cnt = 1 38 | PREDICT = True 39 | 40 | while True: 41 | 42 | for event in pygame.event.get(): 43 | if event.type == QUIT: 44 | pygame.quit() 45 | sys.exit() 46 | 47 | if event.type == KEYDOWN: 48 | if event.key == K_c: 49 | DISPLAYSURF.fill(BLACK) 50 | 51 | if event.type == MOUSEMOTION and iswriting: 52 | xcord, ycord = event.pos 53 | pygame.draw.circle(DISPLAYSURF, WHITE, (xcord, ycord), 4, 0) 54 | 55 | number_xcord.append(xcord) 56 | number_ycord.append(ycord) 57 | 58 | if event.type == MOUSEBUTTONDOWN: 59 | iswriting = True 60 | 61 | if event.type == MOUSEBUTTONUP: 62 | iswriting = False 63 | number_xcord = sorted(number_xcord) 64 | number_ycord = sorted(number_ycord) 65 | 66 | rect_min_x , rect_max_x = max(number_xcord[0]-BOUNDRYINC, 0), min(WINDOWSIZEX, number_xcord[-1]+BOUNDRYINC) 67 | rect_min_y , rect_max_y = max(number_ycord[0]-BOUNDRYINC, 0), min(WINDOWSIZEX, number_ycord[-1]+BOUNDRYINC) 68 | 69 | number_xcord = [] 70 | number_ycord = [] 71 | 72 | img_arr = np.array(pygame.PixelArray(DISPLAYSURF))[rect_min_x:rect_max_x, rect_min_y:rect_max_y].T.astype(np.float32) 73 | 74 | if IMAGESAVE: 75 | cv2.imwrite('test.png') 76 | image_cnt+=1 77 | 78 | if PREDICT: 79 | image = cv2.resize(img_arr, (28,28)) 80 | image = np.pad(image, (10,10), 'constant' , constant_values = 0) 81 | image = cv2.resize(image, (28,28))/255 82 | 83 | label = str(LABELS[np.argmax(MODEL.predict(image.reshape(1,28,28,1)))]) 84 | textSurface = FONT.render(label, True, RED, WHITE) 85 | textRecObj = textSurface.get_rect() 86 | textRecObj.left , textRecObj.bottom = rect_min_x, rect_max_y 87 | DISPLAYSURF.blit(textSurface, textRecObj) 88 | 89 | pygame.display.update() 90 | -------------------------------------------------------------------------------- /model_training.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "id": "886fcc65", 7 | "metadata": {}, 8 | "outputs": [], 9 | "source": [ 10 | "import numpy as np\n", 11 | "import matplotlib.pyplot as plt\n", 12 | "import keras\n", 13 | "from keras.datasets import mnist" 14 | ] 15 | }, 16 | { 17 | "cell_type": "code", 18 | "execution_count": 2, 19 | "id": "8172da38", 20 | "metadata": {}, 21 | "outputs": [], 22 | "source": [ 23 | "#data loading" 24 | ] 25 | }, 26 | { 27 | "cell_type": "code", 28 | "execution_count": 4, 29 | "id": "4004b9e4", 30 | "metadata": {}, 31 | "outputs": [ 32 | { 33 | "name": "stdout", 34 | "output_type": "stream", 35 | "text": [ 36 | "Downloading data from https://storage.googleapis.com/tensorflow/tf-keras-datasets/mnist.npz\n", 37 | "11490434/11490434 [==============================] - 1s 0us/step\n" 38 | ] 39 | } 40 | ], 41 | "source": [ 42 | "(x_train, y_train), (x_test, y_test) = mnist.load_data()" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": 5, 48 | "id": "301b6c9c", 49 | "metadata": {}, 50 | "outputs": [ 51 | { 52 | "data": { 53 | "text/plain": [ 54 | "((60000, 28, 28), (10000, 28, 28), (60000,), (10000,))" 55 | ] 56 | }, 57 | "execution_count": 5, 58 | "metadata": {}, 59 | "output_type": "execute_result" 60 | } 61 | ], 62 | "source": [ 63 | "x_train.shape, x_test.shape, y_train.shape, y_test.shape\n" 64 | ] 65 | }, 66 | { 67 | "cell_type": "code", 68 | "execution_count": 8, 69 | "id": "025643bf", 70 | "metadata": {}, 71 | "outputs": [], 72 | "source": [ 73 | "def plot(i):\n", 74 | " plt.imshow(x_train[i] , cmap = 'binary')\n", 75 | " plt.title(y_train[i])\n", 76 | " plt.axis('off')\n", 77 | " plt.show()" 78 | ] 79 | }, 80 | { 81 | "cell_type": "code", 82 | "execution_count": 9, 83 | "id": "7c39cebd", 84 | "metadata": {}, 85 | "outputs": [ 86 | { 87 | "data": { 88 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYUAAAGZCAYAAABmNy2oAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/NK7nSAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAKGElEQVR4nO3cX2hX9R/H8c9sDMTIEkVoZTESFITyZtiFN10I0dW6yqiLSIykIsToZjcFXrSLgvJmEXijiIyu7KIg8Uaoi6T1Bw1x/dnFiuGfYYoZw9PdC37Sr/ic3M6+2+Nx/+K8EeHJ2RdOX9M0TQGAUsqqrg8AYOkQBQBCFAAIUQAgRAGAEAUAQhQACFEAIEQBgBAFVqzJycny1FNPlU2bNpXVq1eXdevWlccff7wcOXKk69OgM/1dHwBdmZubKw8++GDZvXt3GRwcLNevXy9Hjx4tzz//fPn555/L6Oho1yfCouvz7SP4Xzt27CgzMzNlenq661Ng0fnzEdxm/fr1pb/fSzQrk//5rHi3bt0qt27dKleuXCkTExPls88+K4cOHer6LOiEKLDi7du3r4yPj5dSShkYGCjvv/9+eemllzq+CrrhNwVWvOnp6TI7O1tmZ2fLiRMnyocffljeeeedcuDAga5Pg0UnCnCbl19+uXz00UdlZmambNiwoetzYFH5oRluMzw8XObn58uPP/7Y9Smw6EQBbnPq1KmyatWqMjQ01PUpsOj80MyKtXfv3nLPPfeU4eHhsnHjxnLx4sUyMTFRjh8/Xt544w1/OmJF8psCK9bhw4fL4cOHy7lz58rc3Fy5++67y6OPPlr27NlTnnvuua7Pg06IAgDhNwUAQhQACFEAIEQBgBAFAEIUAAhRACBEAYAQBQBCFAAIUQAgRAGAEAUAQhQACFEAIEQBgBAFAEIUAAhRACBEAYAQBQBCFAAIUQAgRAGAEAUAQhQACFEAIEQBgBAFAEIUAAhRACBEAYAQBQBCFAAIUQAgRAGAEAUAQhQACFEAIEQBgBAFAEIUAAhRACBEAYAQBQBCFAAIUQAgRAGAEAUAQhQACFEAIEQBgBAFAEIUAAhRACBEAYAQBQBCFAAIUQAg+rs+gJXj7NmzrXaffPJJ9WZ8fLx6Mzw8XL3Zvn179aat119/vXozMDBw5w9hWfOmAECIAgAhCgCEKAAQogBAiAIAIQoAhCgAEKIAQIgCACEKAIQoABB9TdM0XR9B72nzwbkDBw60eta1a9da7ZabkydPVm+eeOKJBbiE5cybAgAhCgCEKAAQogBAiAIAIQoAhCgAEKIAQIgCACEKAIQoABCiAED4IB6tXL58uXqzdevWVs+anZ1ttVtu7r333urN8ePHqze7du2q3rB8eFMAIEQBgBAFAEIUAAhRACBEAYAQBQBCFAAIUQAgRAGAEAUAQhQAiP6uD6A3rVu3rnrz1ltvtXrW/v37qzc3btyo3mzatKl6Mz09Xb1pa25urnrz6aefVm98EG9l86YAQIgCACEKAIQoABCiAECIAgAhCgCEKAAQogBAiAIAIQoAhCgAEKIAQPQ1TdN0fQT8k8cee6x6880331Rvtm3bVr35/vvvqzeLaWpqqnozNDS0AJfQK7wpABCiAECIAgAhCgCEKAAQogBAiAIAIQoAhCgAEKIAQIgCACEKAER/1wfAvxkdHa3eHDx4sHozOTlZvVnqbt682fUJ9BhvCgCEKAAQogBAiAIAIQoAhCgAEKIAQIgCACEKAIQoABCiAECIAgDR1zRN0/URcKf99ttv1Ztdu3ZVb7777rvqzWJ6+umnqzcff/zxAlxCr/CmAECIAgAhCgCEKAAQogBAiAIAIQoAhCgAEKIAQIgCACEKAIQoABD9XR8A/+bIkSPVm2+//bZ6s9Q/btfGzp07uz6BHuNNAYAQBQBCFAAIUQAgRAGAEAUAQhQACFEAIEQBgBAFAEIUAAhRACBEAYDoa5qm6foIes8PP/xQvRkZGWn1rAsXLlRv5ufnWz1ruZmamqreDA0NLcAl9ApvCgCEKAAQogBAiAIAIQoAhCgAEKIAQIgCACEKAIQoABCiAECIAgDR3/UB9KZz585Vb3766adWz/Jxu/bee++96s0HH3ywAJfQK7wpABCiAECIAgAhCgCEKAAQogBAiAIAIQoAhCgAEKIAQIgCACEKAIQP4tHKyMhI9WZsbKzVs958883qzR9//NHqWcvNzMxM1yfQY7wpABCiAECIAgAhCgCEKAAQogBAiAIAIQoAhCgAEKIAQIgCACEKAIQP4rFoXnvttVa7zZs3V2/m5uZaPavW/Px89eaVV15p9ayrV6+22kENbwoAhCgAEKIAQIgCACEKAIQoABCiAECIAgAhCgCEKAAQogBAiAIA4YN4LHlPPvlk1yf8X03TVG8uXLjQ6llvv/129WZycrJ688svv1RvHnrooeoNS5M3BQBCFAAIUQAgRAGAEAUAQhQACFEAIEQBgBAFAEIUAAhRACBEAYAQBQDCV1LhP/jzzz+rN22+dtrWwMBA9eauu+5agEvoFd4UAAhRACBEAYAQBQBCFAAIUQAgRAGAEAUAQhQACFEAIEQBgBAFAMIH8eA/GB0d7fqEf/Tiiy9Wbx544IEFuIRe4U0BgBAFAEIUAAhRACBEAYAQBQBCFAAIUQAgRAGAEAUAQhQACFEAIPqapmm6PmK5u3TpUvXmhRdeaPWsZ555pnrz7LPPtnrWcvPrr79Wb7Zs2VK9uXr1avWmrampqerN0NDQAlxCr/CmAECIAgAhCgCEKAAQogBAiAIAIQoAhCgAEKIAQIgCACEKAIQoABD9XR+wErz66qvVmxMnTrR61vnz56s3g4ODi7J55JFHqjellHLmzJnqTZt/h7GxserNYn7cbv/+/dWb+++/fwEuYTnzpgBAiAIAIQoAhCgAEKIAQIgCACEKAIQoABCiAECIAgAhCgCEKAAQfU3TNF0fsdx98cUX1Zs2Hz8rpZQvv/yy1a7Www8/XL3ZunVrq2edPn26evP777+3etZi2LJlS6vdV199Vb1Zs2ZNq2excnlTACBEAYAQBQBCFAAIUQAgRAGAEAUAQhQACFEAIEQBgBAFAEIUAAhRACB8JXWJavuV1M2bN1dv9u3b1+pZlHLfffdVby5fvrwAl8Cd4U0BgBAFAEIUAAhRACBEAYAQBQBCFAAIUQAgRAGAEAUAQhQACFEAIPq7PoC/9+6777ba3bx5s3pz7dq1Vs+q9fXXX7faHTt27A5f8vfWrl1bvfn8888X4BLojjcFAEIUAAhRACBEAYAQBQBCFAAIUQAgRAGAEAUAQhQACFEAIEQBgOhrmqbp+ggAlgZvCgCEKAAQogBAiAIAIQoAhCgAEKIAQIgCACEKAIQoABCiAECIAgAhCgCEKAAQogBAiAIAIQoAhCgAEKIAQIgCACEKAIQoABCiAECIAgAhCgCEKAAQogBAiAIAIQoAhCgAEKIAQIgCACEKAIQoABCiAECIAgAhCgCEKAAQogBAiAIAIQoAhCgAEKIAQIgCACEKAIQoABCiAECIAgAhCgCEKAAQogBAiAIAIQoAhCgAEKIAQIgCACEKAIQoABCiAECIAgAhCgCEKAAQfwHj1HUHgWRTVAAAAABJRU5ErkJggg==\n", 89 | "text/plain": [ 90 | "
" 91 | ] 92 | }, 93 | "metadata": {}, 94 | "output_type": "display_data" 95 | } 96 | ], 97 | "source": [ 98 | "plot(10)" 99 | ] 100 | }, 101 | { 102 | "cell_type": "code", 103 | "execution_count": 10, 104 | "id": "09618a19", 105 | "metadata": {}, 106 | "outputs": [], 107 | "source": [ 108 | "#preprocessing" 109 | ] 110 | }, 111 | { 112 | "cell_type": "code", 113 | "execution_count": 11, 114 | "id": "1cbc3461", 115 | "metadata": {}, 116 | "outputs": [], 117 | "source": [ 118 | "#normalization\n", 119 | "x_train = x_train.astype(np.float32)/255\n", 120 | "x_test = x_test.astype(np.float32)/255" 121 | ] 122 | }, 123 | { 124 | "cell_type": "code", 125 | "execution_count": 12, 126 | "id": "abc5bfd4", 127 | "metadata": {}, 128 | "outputs": [ 129 | { 130 | "data": { 131 | "text/plain": [ 132 | "(60000, 28, 28)" 133 | ] 134 | }, 135 | "execution_count": 12, 136 | "metadata": {}, 137 | "output_type": "execute_result" 138 | } 139 | ], 140 | "source": [ 141 | "x_train.shape\n" 142 | ] 143 | }, 144 | { 145 | "cell_type": "code", 146 | "execution_count": 13, 147 | "id": "5fb658e0", 148 | "metadata": {}, 149 | "outputs": [], 150 | "source": [ 151 | "#reshape to (28,28,1) for input\n", 152 | "x_train = np.expand_dims(x_train, -1)\n", 153 | "x_test = np.expand_dims(x_test, -1)" 154 | ] 155 | }, 156 | { 157 | "cell_type": "code", 158 | "execution_count": 14, 159 | "id": "18293485", 160 | "metadata": {}, 161 | "outputs": [ 162 | { 163 | "data": { 164 | "text/plain": [ 165 | "(60000, 28, 28, 1)" 166 | ] 167 | }, 168 | "execution_count": 14, 169 | "metadata": {}, 170 | "output_type": "execute_result" 171 | } 172 | ], 173 | "source": [ 174 | "x_train.shape\n" 175 | ] 176 | }, 177 | { 178 | "cell_type": "code", 179 | "execution_count": 16, 180 | "id": "87e66af8", 181 | "metadata": {}, 182 | "outputs": [], 183 | "source": [ 184 | "#convert classes to one hot vectors\n", 185 | "y_train = keras.utils.to_categorical(y_train)\n", 186 | "y_test = keras.utils.to_categorical(y_test)" 187 | ] 188 | }, 189 | { 190 | "cell_type": "code", 191 | "execution_count": 17, 192 | "id": "74045121", 193 | "metadata": {}, 194 | "outputs": [ 195 | { 196 | "data": { 197 | "text/plain": [ 198 | "array([[0., 0., 0., ..., 0., 0., 0.],\n", 199 | " [1., 0., 0., ..., 0., 0., 0.],\n", 200 | " [0., 0., 0., ..., 0., 0., 0.],\n", 201 | " ...,\n", 202 | " [0., 0., 0., ..., 0., 0., 0.],\n", 203 | " [0., 0., 0., ..., 0., 0., 0.],\n", 204 | " [0., 0., 0., ..., 0., 1., 0.]], dtype=float32)" 205 | ] 206 | }, 207 | "execution_count": 17, 208 | "metadata": {}, 209 | "output_type": "execute_result" 210 | } 211 | ], 212 | "source": [ 213 | "y_train" 214 | ] 215 | }, 216 | { 217 | "cell_type": "code", 218 | "execution_count": 18, 219 | "id": "317a3d8d", 220 | "metadata": {}, 221 | "outputs": [], 222 | "source": [ 223 | "from keras.models import Sequential\n", 224 | "from keras.layers import Dense, Conv2D, MaxPool2D, Flatten, Dropout" 225 | ] 226 | }, 227 | { 228 | "cell_type": "code", 229 | "execution_count": 19, 230 | "id": "3005343d", 231 | "metadata": {}, 232 | "outputs": [], 233 | "source": [ 234 | "#model building\n", 235 | "model = Sequential()\n", 236 | "\n", 237 | "model.add(Conv2D(32, (3,3), input_shape=(28,28,1), activation = 'relu'))\n", 238 | "model.add(MaxPool2D((2,2)))\n", 239 | "\n", 240 | "model.add(Conv2D(64, (3,3), activation = 'relu'))\n", 241 | "model.add(MaxPool2D((2,2)))\n", 242 | "\n", 243 | "model.add(Flatten())\n", 244 | "\n", 245 | "model.add(Dropout(0.25)) #prevent overfitting\n", 246 | "\n", 247 | "model.add(Dense(10, activation = 'softmax')) #for classification" 248 | ] 249 | }, 250 | { 251 | "cell_type": "code", 252 | "execution_count": 20, 253 | "id": "54017be6", 254 | "metadata": {}, 255 | "outputs": [ 256 | { 257 | "name": "stdout", 258 | "output_type": "stream", 259 | "text": [ 260 | "Model: \"sequential\"\n", 261 | "_________________________________________________________________\n", 262 | " Layer (type) Output Shape Param # \n", 263 | "=================================================================\n", 264 | " conv2d (Conv2D) (None, 26, 26, 32) 320 \n", 265 | " \n", 266 | " max_pooling2d (MaxPooling2D (None, 13, 13, 32) 0 \n", 267 | " ) \n", 268 | " \n", 269 | " conv2d_1 (Conv2D) (None, 11, 11, 64) 18496 \n", 270 | " \n", 271 | " max_pooling2d_1 (MaxPooling (None, 5, 5, 64) 0 \n", 272 | " 2D) \n", 273 | " \n", 274 | " flatten (Flatten) (None, 1600) 0 \n", 275 | " \n", 276 | " dropout (Dropout) (None, 1600) 0 \n", 277 | " \n", 278 | " dense (Dense) (None, 10) 16010 \n", 279 | " \n", 280 | "=================================================================\n", 281 | "Total params: 34,826\n", 282 | "Trainable params: 34,826\n", 283 | "Non-trainable params: 0\n", 284 | "_________________________________________________________________\n" 285 | ] 286 | } 287 | ], 288 | "source": [ 289 | "model.summary()" 290 | ] 291 | }, 292 | { 293 | "cell_type": "code", 294 | "execution_count": 21, 295 | "id": "dbe3752b", 296 | "metadata": {}, 297 | "outputs": [], 298 | "source": [ 299 | "model.compile(optimizer= 'adam', loss = keras.losses.categorical_crossentropy, metrics = ['accuracy'])" 300 | ] 301 | }, 302 | { 303 | "cell_type": "code", 304 | "execution_count": 22, 305 | "id": "b85518fa", 306 | "metadata": {}, 307 | "outputs": [], 308 | "source": [ 309 | "#Callbacks\n", 310 | "from keras.callbacks import EarlyStopping, ModelCheckpoint\n", 311 | "\n", 312 | "#EarlyStopping\n", 313 | "es = EarlyStopping(monitor='val_acc', min_delta= 0.01, patience=4, verbose=1)\n", 314 | "\n", 315 | "#ModelCheckPoint\n", 316 | "mc = ModelCheckpoint('./bestmodel.h5', monitor='val_acc', verbose=1, save_best_only=True)\n", 317 | "\n", 318 | "cb = [es,mc]" 319 | ] 320 | }, 321 | { 322 | "cell_type": "code", 323 | "execution_count": 23, 324 | "id": "12c7b206", 325 | "metadata": { 326 | "scrolled": true 327 | }, 328 | "outputs": [ 329 | { 330 | "name": "stdout", 331 | "output_type": "stream", 332 | "text": [ 333 | "Epoch 1/50\n", 334 | "1313/1313 [==============================] - 16s 11ms/step - loss: 0.2220 - accuracy: 0.9311 - val_loss: 0.0840 - val_accuracy: 0.9741\n", 335 | "Epoch 2/50\n", 336 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0792 - accuracy: 0.9758 - val_loss: 0.0606 - val_accuracy: 0.9817\n", 337 | "Epoch 3/50\n", 338 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0588 - accuracy: 0.9818 - val_loss: 0.0533 - val_accuracy: 0.9836\n", 339 | "Epoch 4/50\n", 340 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0493 - accuracy: 0.9848 - val_loss: 0.0504 - val_accuracy: 0.9848\n", 341 | "Epoch 5/50\n", 342 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0397 - accuracy: 0.9874 - val_loss: 0.0518 - val_accuracy: 0.9843\n", 343 | "Epoch 6/50\n", 344 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0347 - accuracy: 0.9890 - val_loss: 0.0411 - val_accuracy: 0.9878\n", 345 | "Epoch 7/50\n", 346 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0297 - accuracy: 0.9903 - val_loss: 0.0395 - val_accuracy: 0.9887\n", 347 | "Epoch 8/50\n", 348 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0267 - accuracy: 0.9915 - val_loss: 0.0430 - val_accuracy: 0.9879\n", 349 | "Epoch 9/50\n", 350 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0237 - accuracy: 0.9922 - val_loss: 0.0410 - val_accuracy: 0.9884\n", 351 | "Epoch 10/50\n", 352 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0209 - accuracy: 0.9927 - val_loss: 0.0456 - val_accuracy: 0.9882\n", 353 | "Epoch 11/50\n", 354 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0186 - accuracy: 0.9937 - val_loss: 0.0392 - val_accuracy: 0.9891\n", 355 | "Epoch 12/50\n", 356 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0173 - accuracy: 0.9939 - val_loss: 0.0368 - val_accuracy: 0.9895\n", 357 | "Epoch 13/50\n", 358 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0166 - accuracy: 0.9944 - val_loss: 0.0411 - val_accuracy: 0.9894\n", 359 | "Epoch 14/50\n", 360 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0133 - accuracy: 0.9955 - val_loss: 0.0480 - val_accuracy: 0.9879\n", 361 | "Epoch 15/50\n", 362 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0146 - accuracy: 0.9951 - val_loss: 0.0423 - val_accuracy: 0.9892\n", 363 | "Epoch 16/50\n", 364 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0121 - accuracy: 0.9955 - val_loss: 0.0420 - val_accuracy: 0.9898\n", 365 | "Epoch 17/50\n", 366 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0123 - accuracy: 0.9958 - val_loss: 0.0380 - val_accuracy: 0.9906\n", 367 | "Epoch 18/50\n", 368 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0120 - accuracy: 0.9958 - val_loss: 0.0482 - val_accuracy: 0.9883\n", 369 | "Epoch 19/50\n", 370 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0112 - accuracy: 0.9960 - val_loss: 0.0451 - val_accuracy: 0.9906\n", 371 | "Epoch 20/50\n", 372 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0103 - accuracy: 0.9962 - val_loss: 0.0456 - val_accuracy: 0.9891\n", 373 | "Epoch 21/50\n", 374 | "1313/1313 [==============================] - 14s 10ms/step - loss: 0.0086 - accuracy: 0.9970 - val_loss: 0.0455 - val_accuracy: 0.9898\n", 375 | "Epoch 22/50\n", 376 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0098 - accuracy: 0.9965 - val_loss: 0.0472 - val_accuracy: 0.9899\n", 377 | "Epoch 23/50\n", 378 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0094 - accuracy: 0.9969 - val_loss: 0.0429 - val_accuracy: 0.9903\n", 379 | "Epoch 24/50\n", 380 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0083 - accuracy: 0.9969 - val_loss: 0.0471 - val_accuracy: 0.9898\n", 381 | "Epoch 25/50\n", 382 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0093 - accuracy: 0.9969 - val_loss: 0.0447 - val_accuracy: 0.9907\n", 383 | "Epoch 26/50\n", 384 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0085 - accuracy: 0.9970 - val_loss: 0.0474 - val_accuracy: 0.9899\n", 385 | "Epoch 27/50\n", 386 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0086 - accuracy: 0.9968 - val_loss: 0.0443 - val_accuracy: 0.9903\n", 387 | "Epoch 28/50\n", 388 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0085 - accuracy: 0.9971 - val_loss: 0.0445 - val_accuracy: 0.9900\n", 389 | "Epoch 29/50\n", 390 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0088 - accuracy: 0.9972 - val_loss: 0.0502 - val_accuracy: 0.9901\n", 391 | "Epoch 30/50\n", 392 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0065 - accuracy: 0.9979 - val_loss: 0.0497 - val_accuracy: 0.9900\n", 393 | "Epoch 31/50\n", 394 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0080 - accuracy: 0.9972 - val_loss: 0.0493 - val_accuracy: 0.9892\n", 395 | "Epoch 32/50\n", 396 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0075 - accuracy: 0.9975 - val_loss: 0.0547 - val_accuracy: 0.9893\n", 397 | "Epoch 33/50\n", 398 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0055 - accuracy: 0.9980 - val_loss: 0.0479 - val_accuracy: 0.9902\n", 399 | "Epoch 34/50\n", 400 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0065 - accuracy: 0.9977 - val_loss: 0.0535 - val_accuracy: 0.9896\n", 401 | "Epoch 35/50\n", 402 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0058 - accuracy: 0.9981 - val_loss: 0.0518 - val_accuracy: 0.9906\n", 403 | "Epoch 36/50\n", 404 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0069 - accuracy: 0.9976 - val_loss: 0.0529 - val_accuracy: 0.9902\n", 405 | "Epoch 37/50\n", 406 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0066 - accuracy: 0.9980 - val_loss: 0.0514 - val_accuracy: 0.9902\n", 407 | "Epoch 38/50\n", 408 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0066 - accuracy: 0.9976 - val_loss: 0.0541 - val_accuracy: 0.9903\n", 409 | "Epoch 39/50\n", 410 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0061 - accuracy: 0.9979 - val_loss: 0.0554 - val_accuracy: 0.9907\n", 411 | "Epoch 40/50\n", 412 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0063 - accuracy: 0.9979 - val_loss: 0.0553 - val_accuracy: 0.9907\n", 413 | "Epoch 41/50\n", 414 | "1313/1313 [==============================] - 14s 10ms/step - loss: 0.0072 - accuracy: 0.9977 - val_loss: 0.0559 - val_accuracy: 0.9902\n", 415 | "Epoch 42/50\n", 416 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0048 - accuracy: 0.9985 - val_loss: 0.0551 - val_accuracy: 0.9907\n", 417 | "Epoch 43/50\n", 418 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0060 - accuracy: 0.9979 - val_loss: 0.0493 - val_accuracy: 0.9906\n", 419 | "Epoch 44/50\n", 420 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0062 - accuracy: 0.9978 - val_loss: 0.0548 - val_accuracy: 0.9901\n", 421 | "Epoch 45/50\n", 422 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0066 - accuracy: 0.9980 - val_loss: 0.0495 - val_accuracy: 0.9903\n", 423 | "Epoch 46/50\n", 424 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0043 - accuracy: 0.9985 - val_loss: 0.0513 - val_accuracy: 0.9909\n", 425 | "Epoch 47/50\n", 426 | "1313/1313 [==============================] - 13s 10ms/step - loss: 0.0055 - accuracy: 0.9982 - val_loss: 0.0605 - val_accuracy: 0.9909\n", 427 | "Epoch 48/50\n", 428 | "1313/1313 [==============================] - 15s 11ms/step - loss: 0.0070 - accuracy: 0.9976 - val_loss: 0.0503 - val_accuracy: 0.9909\n", 429 | "Epoch 49/50\n", 430 | "1313/1313 [==============================] - 15s 12ms/step - loss: 0.0050 - accuracy: 0.9983 - val_loss: 0.0555 - val_accuracy: 0.9904\n", 431 | "Epoch 50/50\n", 432 | "1313/1313 [==============================] - 15s 12ms/step - loss: 0.0055 - accuracy: 0.9984 - val_loss: 0.0553 - val_accuracy: 0.9899\n" 433 | ] 434 | } 435 | ], 436 | "source": [ 437 | "#Model Training\n", 438 | "\n", 439 | "his = model.fit(x_train, y_train, epochs= 50, validation_split= 0.3)" 440 | ] 441 | }, 442 | { 443 | "cell_type": "code", 444 | "execution_count": 28, 445 | "id": "af916ecc", 446 | "metadata": {}, 447 | "outputs": [], 448 | "source": [ 449 | "#save model\n", 450 | "model.save('bestmodel', save_format='h5')\n", 451 | "\n", 452 | "#load model\n", 453 | "model_S = keras.models.load_model('D://super ultimate pro//bestmodel')" 454 | ] 455 | }, 456 | { 457 | "cell_type": "code", 458 | "execution_count": 29, 459 | "id": "cd962aef", 460 | "metadata": {}, 461 | "outputs": [ 462 | { 463 | "name": "stdout", 464 | "output_type": "stream", 465 | "text": [ 466 | "313/313 [==============================] - 1s 3ms/step - loss: 0.0504 - accuracy: 0.9899\n", 467 | "Model Accuracy = 0.9898999929428101\n" 468 | ] 469 | } 470 | ], 471 | "source": [ 472 | "score = model_S.evaluate(x_test, y_test)\n", 473 | "print(f'Model Accuracy = {score[1]}') #score[0] = loss, score[1] = accuracy\n" 474 | ] 475 | }, 476 | { 477 | "cell_type": "code", 478 | "execution_count": null, 479 | "id": "b2ef15a6", 480 | "metadata": {}, 481 | "outputs": [], 482 | "source": [] 483 | } 484 | ], 485 | "metadata": { 486 | "kernelspec": { 487 | "display_name": "Python 3 (ipykernel)", 488 | "language": "python", 489 | "name": "python3" 490 | }, 491 | "language_info": { 492 | "codemirror_mode": { 493 | "name": "ipython", 494 | "version": 3 495 | }, 496 | "file_extension": ".py", 497 | "mimetype": "text/x-python", 498 | "name": "python", 499 | "nbconvert_exporter": "python", 500 | "pygments_lexer": "ipython3", 501 | "version": "3.9.13" 502 | } 503 | }, 504 | "nbformat": 4, 505 | "nbformat_minor": 5 506 | } 507 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | numpy 2 | opencv-python 3 | pygame 4 | tensorflow 5 | keras 6 | --------------------------------------------------------------------------------