├── Emotion_Anaysis.ipynb ├── example_submission.csv ├── fer.h5 ├── fer.json ├── fer2013.bib ├── train.py └── videoTester.py /Emotion_Anaysis.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "nbformat": 4, 3 | "nbformat_minor": 0, 4 | "metadata": { 5 | "colab": { 6 | "name": "Emotion_Anaysis.ipynb", 7 | "version": "0.3.2", 8 | "provenance": [], 9 | "collapsed_sections": [] 10 | }, 11 | "kernelspec": { 12 | "name": "python3", 13 | "display_name": "Python 3" 14 | }, 15 | "accelerator": "GPU" 16 | }, 17 | "cells": [ 18 | { 19 | "cell_type": "code", 20 | "metadata": { 21 | "id": "kN1sqLpV1bqR", 22 | "colab_type": "code", 23 | "outputId": "81341a62-f37d-4f3a-df18-77cb58fd8dd7", 24 | "colab": { 25 | "base_uri": "https://localhost:8080/", 26 | "height": 122 27 | } 28 | }, 29 | "source": [ 30 | "from google.colab import drive\n", 31 | "drive.mount('/content/gdrive')" 32 | ], 33 | "execution_count": 0, 34 | "outputs": [ 35 | { 36 | "output_type": "stream", 37 | "text": [ 38 | "Go to this URL in a browser: https://accounts.google.com/o/oauth2\n", 39 | "Enter your authorization code:\n", 40 | "··········\n", 41 | "Mounted at /content/gdrive\n" 42 | ], 43 | "name": "stdout" 44 | } 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "metadata": { 50 | "id": "jbrkxkK1LvY0", 51 | "colab_type": "code", 52 | "outputId": "023e8d3e-4336-4388-9f9f-0887f8ecb6bc", 53 | "colab": { 54 | "base_uri": "https://localhost:8080/", 55 | "height": 6562 56 | } 57 | }, 58 | "source": [ 59 | "import sys, os\n", 60 | "import pandas as pd\n", 61 | "import numpy as np\n", 62 | "\n", 63 | "from keras.models import Sequential\n", 64 | "from keras.layers import Dense, Dropout, Activation, Flatten\n", 65 | "from keras.layers import Conv2D, MaxPooling2D, BatchNormalization,AveragePooling2D\n", 66 | "from keras.losses import categorical_crossentropy\n", 67 | "from keras.optimizers import Adam\n", 68 | "from keras.regularizers import l2\n", 69 | "from keras.utils import np_utils\n", 70 | "# pd.set_option('display.max_rows', 500)\n", 71 | "# pd.set_option('display.max_columns', 500)\n", 72 | "# pd.set_option('display.width', 1000)\n", 73 | "\n", 74 | "df=pd.read_csv('gdrive/My Drive/fer2013.csv')\n", 75 | "\n", 76 | "# print(df.info())\n", 77 | "# print(df[\"Usage\"].value_counts())\n", 78 | "\n", 79 | "# print(df.head())\n", 80 | "X_train,train_y,X_test,test_y=[],[],[],[]\n", 81 | "\n", 82 | "for index, row in df.iterrows():\n", 83 | " val=row['pixels'].split(\" \")\n", 84 | " try:\n", 85 | " if 'Training' in row['Usage']:\n", 86 | " X_train.append(np.array(val,'float32'))\n", 87 | " train_y.append(row['emotion'])\n", 88 | " elif 'PublicTest' in row['Usage']:\n", 89 | " X_test.append(np.array(val,'float32'))\n", 90 | " test_y.append(row['emotion'])\n", 91 | " except:\n", 92 | " print(f\"error occured at index :{index} and row:{row}\")\n", 93 | "\n", 94 | "\n", 95 | "num_features = 64\n", 96 | "num_labels = 7\n", 97 | "batch_size = 64\n", 98 | "epochs = 200\n", 99 | "width, height = 48, 48\n", 100 | "\n", 101 | "\n", 102 | "X_train = np.array(X_train,'float32')\n", 103 | "train_y = np.array(train_y,'float32')\n", 104 | "X_test = np.array(X_test,'float32')\n", 105 | "test_y = np.array(test_y,'float32')\n", 106 | "\n", 107 | "train_y=np_utils.to_categorical(train_y, num_classes=num_labels)\n", 108 | "test_y=np_utils.to_categorical(test_y, num_classes=num_labels)\n", 109 | "\n", 110 | "#cannot produce\n", 111 | "#normalizing data between oand 1\n", 112 | "X_train -= np.mean(X_train, axis=0)\n", 113 | "X_train /= np.std(X_train, axis=0)\n", 114 | "\n", 115 | "X_test -= np.mean(X_test, axis=0)\n", 116 | "X_test /= np.std(X_test, axis=0)\n", 117 | "\n", 118 | "X_train = X_train.reshape(X_train.shape[0], 48, 48, 1)\n", 119 | "\n", 120 | "X_test = X_test.reshape(X_test.shape[0], 48, 48, 1)\n", 121 | "\n", 122 | "# print(f\"shape:{X_train.shape}\")\n", 123 | "##designing the cnn\n", 124 | "#1st convolution layer\n", 125 | "model = Sequential()\n", 126 | "\n", 127 | "model.add(Conv2D(64, kernel_size=(3, 3), activation='relu', input_shape=(X_train.shape[1:])))\n", 128 | "model.add(Conv2D(64,kernel_size= (3, 3), activation='relu'))\n", 129 | "# model.add(BatchNormalization())\n", 130 | "model.add(MaxPooling2D(pool_size=(2,2), strides=(2, 2)))\n", 131 | "model.add(Dropout(0.5))\n", 132 | "\n", 133 | "#2nd convolution layer\n", 134 | "model.add(Conv2D(64, (3, 3), activation='relu'))\n", 135 | "model.add(Conv2D(64, (3, 3), activation='relu'))\n", 136 | "# model.add(BatchNormalization())\n", 137 | "model.add(MaxPooling2D(pool_size=(2,2), strides=(2, 2)))\n", 138 | "model.add(Dropout(0.5))\n", 139 | "\n", 140 | "#3rd convolution layer\n", 141 | "model.add(Conv2D(128, (3, 3), activation='relu'))\n", 142 | "model.add(Conv2D(128, (3, 3), activation='relu'))\n", 143 | "# model.add(BatchNormalization())\n", 144 | "model.add(MaxPooling2D(pool_size=(2,2), strides=(2, 2)))\n", 145 | "\n", 146 | "model.add(Flatten())\n", 147 | "\n", 148 | "#fully connected neural networks\n", 149 | "model.add(Dense(1024, activation='relu'))\n", 150 | "model.add(Dropout(0.2))\n", 151 | "model.add(Dense(1024, activation='relu'))\n", 152 | "model.add(Dropout(0.2))\n", 153 | "\n", 154 | "model.add(Dense(num_labels, activation='softmax'))\n", 155 | "\n", 156 | "# model.summary()\n", 157 | "\n", 158 | "#Compliling the model\n", 159 | "model.compile(loss=categorical_crossentropy,\n", 160 | " optimizer=Adam(),\n", 161 | " metrics=['accuracy'])\n", 162 | "\n", 163 | "#Training the model\n", 164 | "model.fit(X_train, train_y,\n", 165 | " batch_size=batch_size,\n", 166 | " epochs=epochs,\n", 167 | " verbose=1,\n", 168 | " validation_data=(X_test, test_y),\n", 169 | " shuffle=True)\n", 170 | "\n", 171 | "\n", 172 | "#Saving the model to use it later on\n", 173 | "fer_json = model.to_json()\n", 174 | "with open(\"fer.json\", \"w\") as json_file:\n", 175 | " json_file.write(fer_json)\n", 176 | "model.save_weights(\"fer.h5\")\n", 177 | "\n", 178 | "\n" 179 | ], 180 | "execution_count": 0, 181 | "outputs": [ 182 | { 183 | "output_type": "stream", 184 | "text": [ 185 | "Train on 28709 samples, validate on 3589 samples\n", 186 | "Epoch 1/200\n", 187 | "28709/28709 [==============================] - 11s 369us/step - loss: 1.7274 - acc: 0.2937 - val_loss: 1.5473 - val_acc: 0.3862\n", 188 | "Epoch 2/200\n", 189 | "28709/28709 [==============================] - 9s 328us/step - loss: 1.5182 - acc: 0.4059 - val_loss: 1.4057 - val_acc: 0.4377\n", 190 | "Epoch 3/200\n", 191 | "28709/28709 [==============================] - 9s 330us/step - loss: 1.4109 - acc: 0.4553 - val_loss: 1.3337 - val_acc: 0.4806\n", 192 | "Epoch 4/200\n", 193 | "28709/28709 [==============================] - 9s 329us/step - loss: 1.3445 - acc: 0.4830 - val_loss: 1.2841 - val_acc: 0.4999\n", 194 | "Epoch 5/200\n", 195 | "28709/28709 [==============================] - 9s 330us/step - loss: 1.3077 - acc: 0.4946 - val_loss: 1.2569 - val_acc: 0.5121\n", 196 | "Epoch 6/200\n", 197 | "28709/28709 [==============================] - 10s 331us/step - loss: 1.2674 - acc: 0.5167 - val_loss: 1.2304 - val_acc: 0.5252\n", 198 | "Epoch 7/200\n", 199 | "28709/28709 [==============================] - 9s 330us/step - loss: 1.2401 - acc: 0.5230 - val_loss: 1.2394 - val_acc: 0.5244\n", 200 | "Epoch 8/200\n", 201 | "28709/28709 [==============================] - 9s 330us/step - loss: 1.2177 - acc: 0.5355 - val_loss: 1.2105 - val_acc: 0.5386\n", 202 | "Epoch 9/200\n", 203 | "28709/28709 [==============================] - 9s 330us/step - loss: 1.1868 - acc: 0.5462 - val_loss: 1.1963 - val_acc: 0.5400\n", 204 | "Epoch 10/200\n", 205 | "28709/28709 [==============================] - 10s 331us/step - loss: 1.1722 - acc: 0.5528 - val_loss: 1.1963 - val_acc: 0.5411\n", 206 | "Epoch 11/200\n", 207 | "28709/28709 [==============================] - 9s 330us/step - loss: 1.1479 - acc: 0.5605 - val_loss: 1.1797 - val_acc: 0.5489\n", 208 | "Epoch 12/200\n", 209 | "28709/28709 [==============================] - 10s 332us/step - loss: 1.1372 - acc: 0.5668 - val_loss: 1.1629 - val_acc: 0.5464\n", 210 | "Epoch 13/200\n", 211 | "28709/28709 [==============================] - 10s 332us/step - loss: 1.1145 - acc: 0.5729 - val_loss: 1.1523 - val_acc: 0.5603\n", 212 | "Epoch 14/200\n", 213 | "28709/28709 [==============================] - 9s 331us/step - loss: 1.0974 - acc: 0.5786 - val_loss: 1.1678 - val_acc: 0.5500\n", 214 | "Epoch 15/200\n", 215 | "28709/28709 [==============================] - 10s 331us/step - loss: 1.0796 - acc: 0.5877 - val_loss: 1.1758 - val_acc: 0.5492\n", 216 | "Epoch 16/200\n", 217 | "28709/28709 [==============================] - 9s 330us/step - loss: 1.0705 - acc: 0.5898 - val_loss: 1.1373 - val_acc: 0.5748\n", 218 | "Epoch 17/200\n", 219 | "28709/28709 [==============================] - 9s 331us/step - loss: 1.0452 - acc: 0.5995 - val_loss: 1.1369 - val_acc: 0.5782\n", 220 | "Epoch 18/200\n", 221 | "28709/28709 [==============================] - 9s 331us/step - loss: 1.0413 - acc: 0.6012 - val_loss: 1.1479 - val_acc: 0.5692\n", 222 | "Epoch 19/200\n", 223 | "28709/28709 [==============================] - 10s 332us/step - loss: 1.0221 - acc: 0.6118 - val_loss: 1.1457 - val_acc: 0.5709\n", 224 | "Epoch 20/200\n", 225 | "28709/28709 [==============================] - 10s 332us/step - loss: 1.0037 - acc: 0.6189 - val_loss: 1.1904 - val_acc: 0.5539\n", 226 | "Epoch 21/200\n", 227 | "28709/28709 [==============================] - 10s 332us/step - loss: 0.9915 - acc: 0.6260 - val_loss: 1.1616 - val_acc: 0.5634\n", 228 | "Epoch 22/200\n", 229 | "28709/28709 [==============================] - 10s 332us/step - loss: 0.9759 - acc: 0.6304 - val_loss: 1.1437 - val_acc: 0.5729\n", 230 | "Epoch 23/200\n", 231 | "28709/28709 [==============================] - 10s 332us/step - loss: 0.9606 - acc: 0.6351 - val_loss: 1.1516 - val_acc: 0.5759\n", 232 | "Epoch 24/200\n", 233 | "28709/28709 [==============================] - 10s 332us/step - loss: 0.9515 - acc: 0.6381 - val_loss: 1.1345 - val_acc: 0.5768\n", 234 | "Epoch 25/200\n", 235 | "28709/28709 [==============================] - 10s 333us/step - loss: 0.9292 - acc: 0.6441 - val_loss: 1.1738 - val_acc: 0.5662\n", 236 | "Epoch 26/200\n", 237 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.9239 - acc: 0.6481 - val_loss: 1.1826 - val_acc: 0.5790\n", 238 | "Epoch 27/200\n", 239 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.9038 - acc: 0.6580 - val_loss: 1.2064 - val_acc: 0.5687\n", 240 | "Epoch 28/200\n", 241 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.8991 - acc: 0.6570 - val_loss: 1.1781 - val_acc: 0.5695\n", 242 | "Epoch 29/200\n", 243 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.8785 - acc: 0.6683 - val_loss: 1.1882 - val_acc: 0.5840\n", 244 | "Epoch 30/200\n", 245 | "28709/28709 [==============================] - 10s 333us/step - loss: 0.8652 - acc: 0.6698 - val_loss: 1.2141 - val_acc: 0.5695\n", 246 | "Epoch 31/200\n", 247 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.8572 - acc: 0.6771 - val_loss: 1.2313 - val_acc: 0.5651\n", 248 | "Epoch 32/200\n", 249 | "28709/28709 [==============================] - 10s 333us/step - loss: 0.8416 - acc: 0.6844 - val_loss: 1.2080 - val_acc: 0.5617\n", 250 | "Epoch 33/200\n", 251 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.8379 - acc: 0.6876 - val_loss: 1.2147 - val_acc: 0.5704\n", 252 | "Epoch 34/200\n", 253 | "28709/28709 [==============================] - 10s 332us/step - loss: 0.8231 - acc: 0.6870 - val_loss: 1.2161 - val_acc: 0.5698\n", 254 | "Epoch 35/200\n", 255 | "28709/28709 [==============================] - 10s 333us/step - loss: 0.8032 - acc: 0.6972 - val_loss: 1.1971 - val_acc: 0.5848\n", 256 | "Epoch 36/200\n", 257 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.8068 - acc: 0.6952 - val_loss: 1.2326 - val_acc: 0.5734\n", 258 | "Epoch 37/200\n", 259 | "28709/28709 [==============================] - 10s 332us/step - loss: 0.7877 - acc: 0.7051 - val_loss: 1.2494 - val_acc: 0.5818\n", 260 | "Epoch 38/200\n", 261 | "28709/28709 [==============================] - 10s 332us/step - loss: 0.7820 - acc: 0.7052 - val_loss: 1.2611 - val_acc: 0.5731\n", 262 | "Epoch 39/200\n", 263 | "28709/28709 [==============================] - 10s 332us/step - loss: 0.7634 - acc: 0.7130 - val_loss: 1.2440 - val_acc: 0.5726\n", 264 | "Epoch 40/200\n", 265 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.7549 - acc: 0.7173 - val_loss: 1.2870 - val_acc: 0.5765\n", 266 | "Epoch 41/200\n", 267 | "28709/28709 [==============================] - 10s 332us/step - loss: 0.7390 - acc: 0.7220 - val_loss: 1.2617 - val_acc: 0.5773\n", 268 | "Epoch 42/200\n", 269 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.7424 - acc: 0.7213 - val_loss: 1.2903 - val_acc: 0.5626\n", 270 | "Epoch 43/200\n", 271 | "28709/28709 [==============================] - 10s 332us/step - loss: 0.7380 - acc: 0.7229 - val_loss: 1.2547 - val_acc: 0.5692\n", 272 | "Epoch 44/200\n", 273 | "28709/28709 [==============================] - 10s 332us/step - loss: 0.7154 - acc: 0.7356 - val_loss: 1.3186 - val_acc: 0.5829\n", 274 | "Epoch 45/200\n", 275 | "28709/28709 [==============================] - 10s 334us/step - loss: 0.7089 - acc: 0.7347 - val_loss: 1.2719 - val_acc: 0.5762\n", 276 | "Epoch 46/200\n", 277 | "28709/28709 [==============================] - 10s 332us/step - loss: 0.6960 - acc: 0.7418 - val_loss: 1.2883 - val_acc: 0.5756\n", 278 | "Epoch 47/200\n", 279 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.6930 - acc: 0.7425 - val_loss: 1.2938 - val_acc: 0.5793\n", 280 | "Epoch 48/200\n", 281 | "28709/28709 [==============================] - 10s 332us/step - loss: 0.6795 - acc: 0.7477 - val_loss: 1.3409 - val_acc: 0.5762\n", 282 | "Epoch 49/200\n", 283 | "28709/28709 [==============================] - 10s 332us/step - loss: 0.6664 - acc: 0.7528 - val_loss: 1.3276 - val_acc: 0.5651\n", 284 | "Epoch 50/200\n", 285 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.6754 - acc: 0.7499 - val_loss: 1.3510 - val_acc: 0.5784\n", 286 | "Epoch 51/200\n", 287 | "28709/28709 [==============================] - 10s 333us/step - loss: 0.6598 - acc: 0.7547 - val_loss: 1.3616 - val_acc: 0.5720\n", 288 | "Epoch 52/200\n", 289 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.6559 - acc: 0.7571 - val_loss: 1.3348 - val_acc: 0.5851\n", 290 | "Epoch 53/200\n", 291 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.6407 - acc: 0.7600 - val_loss: 1.3386 - val_acc: 0.5829\n", 292 | "Epoch 54/200\n", 293 | "28709/28709 [==============================] - 10s 332us/step - loss: 0.6458 - acc: 0.7618 - val_loss: 1.3742 - val_acc: 0.5729\n", 294 | "Epoch 55/200\n", 295 | "28709/28709 [==============================] - 10s 332us/step - loss: 0.6309 - acc: 0.7666 - val_loss: 1.3392 - val_acc: 0.5723\n", 296 | "Epoch 56/200\n", 297 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.6296 - acc: 0.7678 - val_loss: 1.3955 - val_acc: 0.5701\n", 298 | "Epoch 57/200\n", 299 | "28709/28709 [==============================] - 10s 332us/step - loss: 0.6087 - acc: 0.7732 - val_loss: 1.3747 - val_acc: 0.5690\n", 300 | "Epoch 58/200\n", 301 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.6030 - acc: 0.7781 - val_loss: 1.4097 - val_acc: 0.5762\n", 302 | "Epoch 59/200\n", 303 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.6031 - acc: 0.7796 - val_loss: 1.4175 - val_acc: 0.5723\n", 304 | "Epoch 60/200\n", 305 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.5877 - acc: 0.7823 - val_loss: 1.4252 - val_acc: 0.5754\n", 306 | "Epoch 61/200\n", 307 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.5887 - acc: 0.7843 - val_loss: 1.4415 - val_acc: 0.5795\n", 308 | "Epoch 62/200\n", 309 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.5753 - acc: 0.7865 - val_loss: 1.3601 - val_acc: 0.5784\n", 310 | "Epoch 63/200\n", 311 | "28709/28709 [==============================] - 10s 332us/step - loss: 0.5728 - acc: 0.7901 - val_loss: 1.4199 - val_acc: 0.5815\n", 312 | "Epoch 64/200\n", 313 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.5752 - acc: 0.7896 - val_loss: 1.4053 - val_acc: 0.5765\n", 314 | "Epoch 65/200\n", 315 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.5518 - acc: 0.7971 - val_loss: 1.5581 - val_acc: 0.5642\n", 316 | "Epoch 66/200\n", 317 | "28709/28709 [==============================] - 10s 333us/step - loss: 0.5536 - acc: 0.7989 - val_loss: 1.4523 - val_acc: 0.5795\n", 318 | "Epoch 67/200\n", 319 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.5690 - acc: 0.7932 - val_loss: 1.4408 - val_acc: 0.5751\n", 320 | "Epoch 68/200\n", 321 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.5530 - acc: 0.7970 - val_loss: 1.4322 - val_acc: 0.5832\n", 322 | "Epoch 69/200\n", 323 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.5528 - acc: 0.7999 - val_loss: 1.4578 - val_acc: 0.5829\n", 324 | "Epoch 70/200\n", 325 | "28709/28709 [==============================] - 10s 332us/step - loss: 0.5347 - acc: 0.8080 - val_loss: 1.4719 - val_acc: 0.5676\n", 326 | "Epoch 71/200\n", 327 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.5215 - acc: 0.8106 - val_loss: 1.5091 - val_acc: 0.5670\n", 328 | "Epoch 72/200\n", 329 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.5323 - acc: 0.8058 - val_loss: 1.4915 - val_acc: 0.5701\n", 330 | "Epoch 73/200\n", 331 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.5139 - acc: 0.8135 - val_loss: 1.4910 - val_acc: 0.5745\n", 332 | "Epoch 74/200\n", 333 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.5185 - acc: 0.8105 - val_loss: 1.5253 - val_acc: 0.5759\n", 334 | "Epoch 75/200\n", 335 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.5214 - acc: 0.8094 - val_loss: 1.4636 - val_acc: 0.5795\n", 336 | "Epoch 76/200\n", 337 | "28709/28709 [==============================] - 10s 333us/step - loss: 0.5134 - acc: 0.8147 - val_loss: 1.4954 - val_acc: 0.5698\n", 338 | "Epoch 77/200\n", 339 | "28709/28709 [==============================] - 10s 332us/step - loss: 0.5117 - acc: 0.8141 - val_loss: 1.4484 - val_acc: 0.5798\n", 340 | "Epoch 78/200\n", 341 | "28709/28709 [==============================] - 10s 333us/step - loss: 0.5071 - acc: 0.8159 - val_loss: 1.4882 - val_acc: 0.5751\n", 342 | "Epoch 79/200\n", 343 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.5074 - acc: 0.8172 - val_loss: 1.5797 - val_acc: 0.5770\n", 344 | "Epoch 80/200\n", 345 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.4962 - acc: 0.8221 - val_loss: 1.5605 - val_acc: 0.5823\n", 346 | "Epoch 81/200\n", 347 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.4892 - acc: 0.8245 - val_loss: 1.5344 - val_acc: 0.5712\n", 348 | "Epoch 82/200\n", 349 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.4834 - acc: 0.8263 - val_loss: 1.5772 - val_acc: 0.5706\n", 350 | "Epoch 83/200\n", 351 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.4838 - acc: 0.8266 - val_loss: 1.5195 - val_acc: 0.5673\n", 352 | "Epoch 84/200\n", 353 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.4790 - acc: 0.8271 - val_loss: 1.5393 - val_acc: 0.5751\n", 354 | "Epoch 85/200\n", 355 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.4687 - acc: 0.8314 - val_loss: 1.5671 - val_acc: 0.5798\n", 356 | "Epoch 86/200\n", 357 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.4554 - acc: 0.8391 - val_loss: 1.6085 - val_acc: 0.5712\n", 358 | "Epoch 87/200\n", 359 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.4731 - acc: 0.8316 - val_loss: 1.6153 - val_acc: 0.5748\n", 360 | "Epoch 88/200\n", 361 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.4589 - acc: 0.8335 - val_loss: 1.6353 - val_acc: 0.5729\n", 362 | "Epoch 89/200\n", 363 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.4676 - acc: 0.8330 - val_loss: 1.5874 - val_acc: 0.5765\n", 364 | "Epoch 90/200\n", 365 | "28709/28709 [==============================] - 10s 332us/step - loss: 0.4436 - acc: 0.8390 - val_loss: 1.6443 - val_acc: 0.5695\n", 366 | "Epoch 91/200\n", 367 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.4584 - acc: 0.8386 - val_loss: 1.6240 - val_acc: 0.5784\n", 368 | "Epoch 92/200\n", 369 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.4514 - acc: 0.8396 - val_loss: 1.6563 - val_acc: 0.5639\n", 370 | "Epoch 93/200\n", 371 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.4431 - acc: 0.8417 - val_loss: 1.6037 - val_acc: 0.5639\n", 372 | "Epoch 94/200\n", 373 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.4421 - acc: 0.8411 - val_loss: 1.5579 - val_acc: 0.5720\n", 374 | "Epoch 95/200\n", 375 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.4364 - acc: 0.8408 - val_loss: 1.5404 - val_acc: 0.5695\n", 376 | "Epoch 96/200\n", 377 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.4245 - acc: 0.8501 - val_loss: 1.6775 - val_acc: 0.5720\n", 378 | "Epoch 97/200\n", 379 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.4300 - acc: 0.8464 - val_loss: 1.6338 - val_acc: 0.5712\n", 380 | "Epoch 98/200\n", 381 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.4229 - acc: 0.8503 - val_loss: 1.6549 - val_acc: 0.5701\n", 382 | "Epoch 99/200\n", 383 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.4283 - acc: 0.8474 - val_loss: 1.6329 - val_acc: 0.5770\n", 384 | "Epoch 100/200\n", 385 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.4192 - acc: 0.8508 - val_loss: 1.6416 - val_acc: 0.5684\n", 386 | "Epoch 101/200\n", 387 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.4247 - acc: 0.8496 - val_loss: 1.6028 - val_acc: 0.5779\n", 388 | "Epoch 102/200\n", 389 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.4169 - acc: 0.8526 - val_loss: 1.6196 - val_acc: 0.5745\n", 390 | "Epoch 103/200\n", 391 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.4155 - acc: 0.8527 - val_loss: 1.6845 - val_acc: 0.5678\n", 392 | "Epoch 104/200\n", 393 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.4220 - acc: 0.8510 - val_loss: 1.6650 - val_acc: 0.5692\n", 394 | "Epoch 105/200\n", 395 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.4110 - acc: 0.8553 - val_loss: 1.6100 - val_acc: 0.5620\n", 396 | "Epoch 106/200\n", 397 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.4232 - acc: 0.8521 - val_loss: 1.6323 - val_acc: 0.5656\n", 398 | "Epoch 107/200\n", 399 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.4117 - acc: 0.8546 - val_loss: 1.6696 - val_acc: 0.5759\n", 400 | "Epoch 108/200\n", 401 | "28709/28709 [==============================] - 10s 332us/step - loss: 0.3984 - acc: 0.8591 - val_loss: 1.7669 - val_acc: 0.5642\n", 402 | "Epoch 109/200\n", 403 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3955 - acc: 0.8600 - val_loss: 1.6740 - val_acc: 0.5782\n", 404 | "Epoch 110/200\n", 405 | "28709/28709 [==============================] - 10s 332us/step - loss: 0.4014 - acc: 0.8593 - val_loss: 1.7157 - val_acc: 0.5709\n", 406 | "Epoch 111/200\n", 407 | "28709/28709 [==============================] - 10s 333us/step - loss: 0.3938 - acc: 0.8631 - val_loss: 1.7009 - val_acc: 0.5795\n", 408 | "Epoch 112/200\n", 409 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.4044 - acc: 0.8573 - val_loss: 1.7021 - val_acc: 0.5690\n", 410 | "Epoch 113/200\n", 411 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.3923 - acc: 0.8630 - val_loss: 1.7301 - val_acc: 0.5729\n", 412 | "Epoch 114/200\n", 413 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.3825 - acc: 0.8681 - val_loss: 1.8037 - val_acc: 0.5639\n", 414 | "Epoch 115/200\n", 415 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.3918 - acc: 0.8634 - val_loss: 1.6887 - val_acc: 0.5748\n", 416 | "Epoch 116/200\n", 417 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.3813 - acc: 0.8649 - val_loss: 1.8227 - val_acc: 0.5695\n", 418 | "Epoch 117/200\n", 419 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.3810 - acc: 0.8667 - val_loss: 1.7369 - val_acc: 0.5720\n", 420 | "Epoch 118/200\n", 421 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3886 - acc: 0.8650 - val_loss: 1.6428 - val_acc: 0.5734\n", 422 | "Epoch 119/200\n", 423 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3735 - acc: 0.8714 - val_loss: 1.7403 - val_acc: 0.5687\n", 424 | "Epoch 120/200\n", 425 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.3794 - acc: 0.8678 - val_loss: 1.6621 - val_acc: 0.5748\n", 426 | "Epoch 121/200\n", 427 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.3756 - acc: 0.8712 - val_loss: 1.7351 - val_acc: 0.5690\n", 428 | "Epoch 122/200\n", 429 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3713 - acc: 0.8713 - val_loss: 1.7025 - val_acc: 0.5717\n", 430 | "Epoch 123/200\n", 431 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3777 - acc: 0.8684 - val_loss: 1.6702 - val_acc: 0.5662\n", 432 | "Epoch 124/200\n", 433 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3656 - acc: 0.8726 - val_loss: 1.7618 - val_acc: 0.5662\n", 434 | "Epoch 125/200\n", 435 | "28709/28709 [==============================] - 10s 332us/step - loss: 0.3736 - acc: 0.8716 - val_loss: 1.7357 - val_acc: 0.5784\n", 436 | "Epoch 126/200\n", 437 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3745 - acc: 0.8709 - val_loss: 1.7745 - val_acc: 0.5631\n", 438 | "Epoch 127/200\n", 439 | "28709/28709 [==============================] - 10s 332us/step - loss: 0.3704 - acc: 0.8738 - val_loss: 1.6782 - val_acc: 0.5720\n", 440 | "Epoch 128/200\n", 441 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.3681 - acc: 0.8711 - val_loss: 1.7745 - val_acc: 0.5768\n", 442 | "Epoch 129/200\n", 443 | "28709/28709 [==============================] - 10s 332us/step - loss: 0.3652 - acc: 0.8764 - val_loss: 1.7043 - val_acc: 0.5698\n", 444 | "Epoch 130/200\n", 445 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.3575 - acc: 0.8789 - val_loss: 1.7609 - val_acc: 0.5659\n", 446 | "Epoch 131/200\n", 447 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.3628 - acc: 0.8746 - val_loss: 1.7452 - val_acc: 0.5662\n", 448 | "Epoch 132/200\n", 449 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3650 - acc: 0.8739 - val_loss: 1.7913 - val_acc: 0.5653\n", 450 | "Epoch 133/200\n", 451 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3570 - acc: 0.8778 - val_loss: 1.7720 - val_acc: 0.5684\n", 452 | "Epoch 134/200\n", 453 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.3539 - acc: 0.8776 - val_loss: 1.8067 - val_acc: 0.5768\n", 454 | "Epoch 135/200\n", 455 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.3478 - acc: 0.8788 - val_loss: 1.8158 - val_acc: 0.5656\n", 456 | "Epoch 136/200\n", 457 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3607 - acc: 0.8773 - val_loss: 1.8087 - val_acc: 0.5706\n", 458 | "Epoch 137/200\n", 459 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.3531 - acc: 0.8778 - val_loss: 1.8008 - val_acc: 0.5692\n", 460 | "Epoch 138/200\n", 461 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3560 - acc: 0.8782 - val_loss: 1.7446 - val_acc: 0.5793\n", 462 | "Epoch 139/200\n", 463 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.3442 - acc: 0.8815 - val_loss: 1.7783 - val_acc: 0.5692\n", 464 | "Epoch 140/200\n", 465 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3486 - acc: 0.8818 - val_loss: 1.8196 - val_acc: 0.5726\n", 466 | "Epoch 141/200\n", 467 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.3368 - acc: 0.8845 - val_loss: 1.8001 - val_acc: 0.5706\n", 468 | "Epoch 142/200\n", 469 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3447 - acc: 0.8815 - val_loss: 1.8024 - val_acc: 0.5676\n", 470 | "Epoch 143/200\n", 471 | "28709/28709 [==============================] - 10s 333us/step - loss: 0.3390 - acc: 0.8818 - val_loss: 1.7858 - val_acc: 0.5692\n", 472 | "Epoch 144/200\n", 473 | "28709/28709 [==============================] - 10s 332us/step - loss: 0.3513 - acc: 0.8807 - val_loss: 1.8013 - val_acc: 0.5776\n", 474 | "Epoch 145/200\n", 475 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3389 - acc: 0.8830 - val_loss: 1.8794 - val_acc: 0.5709\n", 476 | "Epoch 146/200\n", 477 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3366 - acc: 0.8824 - val_loss: 1.8769 - val_acc: 0.5717\n", 478 | "Epoch 147/200\n", 479 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.3298 - acc: 0.8852 - val_loss: 1.7870 - val_acc: 0.5748\n", 480 | "Epoch 148/200\n", 481 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.3360 - acc: 0.8853 - val_loss: 1.8689 - val_acc: 0.5804\n", 482 | "Epoch 149/200\n", 483 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.3256 - acc: 0.8862 - val_loss: 1.8716 - val_acc: 0.5706\n", 484 | "Epoch 150/200\n", 485 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.3356 - acc: 0.8855 - val_loss: 1.7906 - val_acc: 0.5773\n", 486 | "Epoch 151/200\n", 487 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.3362 - acc: 0.8850 - val_loss: 1.8127 - val_acc: 0.5756\n", 488 | "Epoch 152/200\n", 489 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3231 - acc: 0.8931 - val_loss: 1.8828 - val_acc: 0.5690\n", 490 | "Epoch 153/200\n", 491 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.3256 - acc: 0.8898 - val_loss: 1.8626 - val_acc: 0.5648\n", 492 | "Epoch 154/200\n", 493 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.3262 - acc: 0.8885 - val_loss: 1.8027 - val_acc: 0.5717\n", 494 | "Epoch 155/200\n", 495 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3296 - acc: 0.8874 - val_loss: 1.9014 - val_acc: 0.5740\n", 496 | "Epoch 156/200\n", 497 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3266 - acc: 0.8879 - val_loss: 1.7987 - val_acc: 0.5653\n", 498 | "Epoch 157/200\n", 499 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3249 - acc: 0.8895 - val_loss: 1.8893 - val_acc: 0.5592\n", 500 | "Epoch 158/200\n", 501 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3270 - acc: 0.8884 - val_loss: 1.7996 - val_acc: 0.5659\n", 502 | "Epoch 159/200\n", 503 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3291 - acc: 0.8897 - val_loss: 1.7796 - val_acc: 0.5667\n", 504 | "Epoch 160/200\n", 505 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.3385 - acc: 0.8862 - val_loss: 1.8301 - val_acc: 0.5715\n", 506 | "Epoch 161/200\n", 507 | "28709/28709 [==============================] - 10s 332us/step - loss: 0.3316 - acc: 0.8862 - val_loss: 1.8239 - val_acc: 0.5704\n", 508 | "Epoch 162/200\n", 509 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3125 - acc: 0.8938 - val_loss: 1.8945 - val_acc: 0.5561\n", 510 | "Epoch 163/200\n", 511 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.3243 - acc: 0.8885 - val_loss: 1.8810 - val_acc: 0.5645\n", 512 | "Epoch 164/200\n", 513 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.3150 - acc: 0.8923 - val_loss: 1.8962 - val_acc: 0.5648\n", 514 | "Epoch 165/200\n", 515 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3243 - acc: 0.8905 - val_loss: 1.8643 - val_acc: 0.5687\n", 516 | "Epoch 166/200\n", 517 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3238 - acc: 0.8909 - val_loss: 1.8839 - val_acc: 0.5670\n", 518 | "Epoch 167/200\n", 519 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.3221 - acc: 0.8921 - val_loss: 1.9199 - val_acc: 0.5698\n", 520 | "Epoch 168/200\n", 521 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3211 - acc: 0.8922 - val_loss: 1.8677 - val_acc: 0.5712\n", 522 | "Epoch 169/200\n", 523 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3125 - acc: 0.8931 - val_loss: 1.8150 - val_acc: 0.5695\n", 524 | "Epoch 170/200\n", 525 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.3018 - acc: 0.8967 - val_loss: 1.8350 - val_acc: 0.5651\n", 526 | "Epoch 171/200\n", 527 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.3157 - acc: 0.8938 - val_loss: 1.9005 - val_acc: 0.5776\n", 528 | "Epoch 172/200\n", 529 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.3101 - acc: 0.8968 - val_loss: 1.7376 - val_acc: 0.5784\n", 530 | "Epoch 173/200\n", 531 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.2926 - acc: 0.9015 - val_loss: 1.9054 - val_acc: 0.5645\n", 532 | "Epoch 174/200\n", 533 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.3104 - acc: 0.8948 - val_loss: 1.8862 - val_acc: 0.5589\n", 534 | "Epoch 175/200\n", 535 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.3100 - acc: 0.8945 - val_loss: 1.9087 - val_acc: 0.5793\n", 536 | "Epoch 176/200\n", 537 | "28709/28709 [==============================] - 10s 332us/step - loss: 0.3101 - acc: 0.8964 - val_loss: 1.8904 - val_acc: 0.5743\n", 538 | "Epoch 177/200\n", 539 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.3114 - acc: 0.8952 - val_loss: 1.9100 - val_acc: 0.5687\n", 540 | "Epoch 178/200\n", 541 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.3087 - acc: 0.8956 - val_loss: 1.8440 - val_acc: 0.5751\n", 542 | "Epoch 179/200\n", 543 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.3186 - acc: 0.8949 - val_loss: 1.8768 - val_acc: 0.5642\n", 544 | "Epoch 180/200\n", 545 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3044 - acc: 0.8986 - val_loss: 1.9380 - val_acc: 0.5662\n", 546 | "Epoch 181/200\n", 547 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.2971 - acc: 0.9001 - val_loss: 1.9293 - val_acc: 0.5698\n", 548 | "Epoch 182/200\n", 549 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.3056 - acc: 0.8958 - val_loss: 1.9033 - val_acc: 0.5631\n", 550 | "Epoch 183/200\n", 551 | "28709/28709 [==============================] - 10s 331us/step - loss: 0.3002 - acc: 0.8966 - val_loss: 1.9461 - val_acc: 0.5748\n", 552 | "Epoch 184/200\n", 553 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.3042 - acc: 0.8990 - val_loss: 1.8951 - val_acc: 0.5717\n", 554 | "Epoch 185/200\n", 555 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3005 - acc: 0.8986 - val_loss: 1.9418 - val_acc: 0.5656\n", 556 | "Epoch 186/200\n", 557 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.3086 - acc: 0.8991 - val_loss: 1.8882 - val_acc: 0.5659\n", 558 | "Epoch 187/200\n", 559 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3154 - acc: 0.8964 - val_loss: 1.9496 - val_acc: 0.5595\n", 560 | "Epoch 188/200\n", 561 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3068 - acc: 0.8993 - val_loss: 1.9107 - val_acc: 0.5603\n", 562 | "Epoch 189/200\n", 563 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.3056 - acc: 0.8986 - val_loss: 1.8563 - val_acc: 0.5681\n", 564 | "Epoch 190/200\n", 565 | "28709/28709 [==============================] - 9s 330us/step - loss: 0.2929 - acc: 0.9026 - val_loss: 1.9133 - val_acc: 0.5731\n", 566 | "Epoch 191/200\n", 567 | "28709/28709 [==============================] - 9s 331us/step - loss: 0.3019 - acc: 0.8995 - val_loss: 1.9807 - val_acc: 0.5673\n", 568 | "Epoch 192/200\n", 569 | "17344/28709 [=================>............] - ETA: 3s - loss: 0.2899 - acc: 0.9020" 570 | ], 571 | "name": "stdout" 572 | } 573 | ] 574 | } 575 | ] 576 | } 577 | -------------------------------------------------------------------------------- /example_submission.csv: -------------------------------------------------------------------------------- 1 | 3 2 | 4 3 | 0 4 | 4 5 | 3 6 | 3 7 | 4 8 | 6 9 | 4 10 | 2 11 | 3 12 | 2 13 | 2 14 | 2 15 | 6 16 | 2 17 | 5 18 | 5 19 | 5 20 | 3 21 | 6 22 | 5 23 | 4 24 | 3 25 | 6 26 | 4 27 | 6 28 | 0 29 | 5 30 | 5 31 | 3 32 | 6 33 | 3 34 | 6 35 | 0 36 | 3 37 | 6 38 | 2 39 | 0 40 | 3 41 | 0 42 | 4 43 | 3 44 | 3 45 | 3 46 | 3 47 | 4 48 | 3 49 | 5 50 | 6 51 | 6 52 | 3 53 | 5 54 | 4 55 | 0 56 | 3 57 | 2 58 | 3 59 | 5 60 | 3 61 | 6 62 | 3 63 | 3 64 | 5 65 | 5 66 | 3 67 | 5 68 | 6 69 | 6 70 | 3 71 | 4 72 | 5 73 | 6 74 | 6 75 | 3 76 | 6 77 | 5 78 | 6 79 | 6 80 | 4 81 | 0 82 | 4 83 | 6 84 | 6 85 | 5 86 | 0 87 | 3 88 | 3 89 | 5 90 | 3 91 | 6 92 | 3 93 | 3 94 | 4 95 | 0 96 | 0 97 | 2 98 | 3 99 | 3 100 | 0 101 | 3 102 | 6 103 | 4 104 | 3 105 | 0 106 | 3 107 | 6 108 | 6 109 | 5 110 | 3 111 | 3 112 | 0 113 | 0 114 | 2 115 | 3 116 | 2 117 | 3 118 | 6 119 | 3 120 | 3 121 | 5 122 | 4 123 | 6 124 | 2 125 | 3 126 | 3 127 | 3 128 | 0 129 | 5 130 | 5 131 | 5 132 | 0 133 | 4 134 | 6 135 | 3 136 | 5 137 | 5 138 | 3 139 | 5 140 | 6 141 | 6 142 | 3 143 | 3 144 | 0 145 | 6 146 | 0 147 | 3 148 | 3 149 | 6 150 | 2 151 | 6 152 | 0 153 | 5 154 | 3 155 | 5 156 | 5 157 | 2 158 | 5 159 | 0 160 | 3 161 | 6 162 | 3 163 | 3 164 | 3 165 | 4 166 | 6 167 | 4 168 | 0 169 | 5 170 | 6 171 | 0 172 | 3 173 | 5 174 | 3 175 | 0 176 | 6 177 | 0 178 | 4 179 | 0 180 | 5 181 | 4 182 | 3 183 | 2 184 | 3 185 | 3 186 | 4 187 | 3 188 | 3 189 | 0 190 | 3 191 | 4 192 | 3 193 | 5 194 | 5 195 | 3 196 | 6 197 | 5 198 | 6 199 | 0 200 | 4 201 | 3 202 | 3 203 | 5 204 | 3 205 | 4 206 | 5 207 | 4 208 | 4 209 | 0 210 | 3 211 | 6 212 | 4 213 | 0 214 | 4 215 | 6 216 | 2 217 | 5 218 | 4 219 | 4 220 | 3 221 | 3 222 | 4 223 | 5 224 | 3 225 | 2 226 | 3 227 | 5 228 | 4 229 | 3 230 | 1 231 | 3 232 | 6 233 | 6 234 | 3 235 | 3 236 | 0 237 | 3 238 | 0 239 | 0 240 | 6 241 | 3 242 | 6 243 | 6 244 | 0 245 | 1 246 | 3 247 | 3 248 | 3 249 | 0 250 | 6 251 | 0 252 | 4 253 | 0 254 | 3 255 | 0 256 | 6 257 | 6 258 | 3 259 | 4 260 | 6 261 | 3 262 | 6 263 | 2 264 | 4 265 | 2 266 | 0 267 | 5 268 | 3 269 | 3 270 | 4 271 | 3 272 | 6 273 | 5 274 | 3 275 | 5 276 | 3 277 | 3 278 | 5 279 | 6 280 | 0 281 | 3 282 | 3 283 | 6 284 | 2 285 | 3 286 | 5 287 | 0 288 | 6 289 | 4 290 | 6 291 | 6 292 | 6 293 | 6 294 | 2 295 | 3 296 | 4 297 | 4 298 | 6 299 | 0 300 | 3 301 | 4 302 | 4 303 | 3 304 | 3 305 | 0 306 | 3 307 | 5 308 | 3 309 | 3 310 | 3 311 | 3 312 | 0 313 | 6 314 | 2 315 | 6 316 | 3 317 | 4 318 | 3 319 | 2 320 | 3 321 | 0 322 | 5 323 | 0 324 | 2 325 | 4 326 | 2 327 | 2 328 | 3 329 | 3 330 | 6 331 | 3 332 | 6 333 | 0 334 | 3 335 | 2 336 | 3 337 | 3 338 | 2 339 | 3 340 | 3 341 | 0 342 | 6 343 | 0 344 | 4 345 | 6 346 | 6 347 | 3 348 | 3 349 | 4 350 | 4 351 | 6 352 | 0 353 | 6 354 | 3 355 | 3 356 | 4 357 | 3 358 | 0 359 | 3 360 | 0 361 | 4 362 | 5 363 | 6 364 | 6 365 | 5 366 | 3 367 | 3 368 | 0 369 | 3 370 | 0 371 | 3 372 | 4 373 | 4 374 | 6 375 | 6 376 | 0 377 | 6 378 | 5 379 | 3 380 | 4 381 | 4 382 | 5 383 | 3 384 | 3 385 | 3 386 | 6 387 | 3 388 | 5 389 | 6 390 | 2 391 | 2 392 | 3 393 | 3 394 | 2 395 | 4 396 | 5 397 | 4 398 | 5 399 | 0 400 | 5 401 | 0 402 | 5 403 | 3 404 | 3 405 | 2 406 | 0 407 | 4 408 | 5 409 | 5 410 | 5 411 | 0 412 | 5 413 | 6 414 | 3 415 | 3 416 | 3 417 | 2 418 | 3 419 | 5 420 | 4 421 | 3 422 | 3 423 | 5 424 | 3 425 | 6 426 | 0 427 | 3 428 | 3 429 | 3 430 | 4 431 | 3 432 | 6 433 | 2 434 | 6 435 | 3 436 | 2 437 | 4 438 | 5 439 | 4 440 | 0 441 | 6 442 | 0 443 | 6 444 | 4 445 | 4 446 | 4 447 | 0 448 | 3 449 | 5 450 | 6 451 | 4 452 | 3 453 | 6 454 | 6 455 | 4 456 | 5 457 | 3 458 | 4 459 | 0 460 | 5 461 | 5 462 | 5 463 | 5 464 | 3 465 | 6 466 | 4 467 | 3 468 | 4 469 | 3 470 | 3 471 | 0 472 | 5 473 | 2 474 | 2 475 | 6 476 | 1 477 | 3 478 | 6 479 | 6 480 | 4 481 | 0 482 | 2 483 | 6 484 | 4 485 | 4 486 | 4 487 | 6 488 | 5 489 | 3 490 | 6 491 | 3 492 | 6 493 | 3 494 | 3 495 | 4 496 | 3 497 | 0 498 | 2 499 | 3 500 | 4 501 | 6 502 | 4 503 | 3 504 | 2 505 | 0 506 | 4 507 | 2 508 | 6 509 | 6 510 | 4 511 | 2 512 | 3 513 | 0 514 | 3 515 | 4 516 | 3 517 | 5 518 | 4 519 | 3 520 | 4 521 | 0 522 | 6 523 | 6 524 | 6 525 | 3 526 | 3 527 | 3 528 | 6 529 | 0 530 | 3 531 | 3 532 | 3 533 | 5 534 | 2 535 | 0 536 | 3 537 | 4 538 | 6 539 | 5 540 | 4 541 | 2 542 | 6 543 | 6 544 | 3 545 | 3 546 | 6 547 | 0 548 | 4 549 | 3 550 | 3 551 | 6 552 | 4 553 | 4 554 | 3 555 | 3 556 | 4 557 | 6 558 | 0 559 | 6 560 | 4 561 | 2 562 | 3 563 | 5 564 | 4 565 | 5 566 | 5 567 | 3 568 | 2 569 | 4 570 | 0 571 | 6 572 | 3 573 | 5 574 | 6 575 | 5 576 | 2 577 | 2 578 | 3 579 | 4 580 | 4 581 | 4 582 | 0 583 | 3 584 | 3 585 | 5 586 | 0 587 | 4 588 | 4 589 | 6 590 | 3 591 | 6 592 | 3 593 | 4 594 | 6 595 | 3 596 | 4 597 | 4 598 | 4 599 | 4 600 | 6 601 | 2 602 | 3 603 | 2 604 | 3 605 | 3 606 | 4 607 | 2 608 | 4 609 | 6 610 | 4 611 | 5 612 | 5 613 | 3 614 | 3 615 | 2 616 | 6 617 | 6 618 | 6 619 | 4 620 | 2 621 | 3 622 | 3 623 | 6 624 | 3 625 | 3 626 | 3 627 | 0 628 | 6 629 | 0 630 | 3 631 | 6 632 | 6 633 | 3 634 | 6 635 | 6 636 | 4 637 | 3 638 | 3 639 | 0 640 | 0 641 | 5 642 | 4 643 | 3 644 | 6 645 | 3 646 | 4 647 | 3 648 | 6 649 | 5 650 | 3 651 | 2 652 | 4 653 | 3 654 | 3 655 | 3 656 | 2 657 | 3 658 | 4 659 | 6 660 | 5 661 | 6 662 | 3 663 | 3 664 | 3 665 | 3 666 | 5 667 | 6 668 | 6 669 | 6 670 | 0 671 | 0 672 | 2 673 | 3 674 | 0 675 | 6 676 | 6 677 | 6 678 | 0 679 | 2 680 | 6 681 | 2 682 | 4 683 | 6 684 | 3 685 | 2 686 | 6 687 | 6 688 | 3 689 | 6 690 | 3 691 | 3 692 | 2 693 | 4 694 | 0 695 | 4 696 | 3 697 | 5 698 | 3 699 | 0 700 | 4 701 | 2 702 | 3 703 | 2 704 | 6 705 | 0 706 | 4 707 | 3 708 | 0 709 | 3 710 | 3 711 | 3 712 | 3 713 | 2 714 | 3 715 | 2 716 | 3 717 | 6 718 | 4 719 | 4 720 | 6 721 | 0 722 | 3 723 | 3 724 | 0 725 | 3 726 | 0 727 | 6 728 | 2 729 | 0 730 | 3 731 | 3 732 | 2 733 | 2 734 | 6 735 | 2 736 | 6 737 | 6 738 | 0 739 | 2 740 | 0 741 | 0 742 | 0 743 | 6 744 | 3 745 | 4 746 | 3 747 | 0 748 | 3 749 | 2 750 | 6 751 | 4 752 | 4 753 | 0 754 | 2 755 | 6 756 | 3 757 | 2 758 | 4 759 | 3 760 | 3 761 | 4 762 | 0 763 | 6 764 | 3 765 | 6 766 | 3 767 | 4 768 | 3 769 | 2 770 | 3 771 | 3 772 | 5 773 | 4 774 | 6 775 | 0 776 | 5 777 | 3 778 | 6 779 | 3 780 | 2 781 | 3 782 | 6 783 | 6 784 | 3 785 | 0 786 | 6 787 | 0 788 | 2 789 | 4 790 | 2 791 | 3 792 | 5 793 | 4 794 | 4 795 | 0 796 | 3 797 | 6 798 | 2 799 | 0 800 | 5 801 | 3 802 | 3 803 | 5 804 | 3 805 | 5 806 | 4 807 | 5 808 | 2 809 | 3 810 | 4 811 | 3 812 | 2 813 | 4 814 | 0 815 | 0 816 | 4 817 | 6 818 | 5 819 | 4 820 | 2 821 | 3 822 | 5 823 | 2 824 | 0 825 | 5 826 | 6 827 | 5 828 | 4 829 | 5 830 | 4 831 | 0 832 | 5 833 | 5 834 | 5 835 | 5 836 | 0 837 | 3 838 | 0 839 | 3 840 | 0 841 | 6 842 | 4 843 | 5 844 | 4 845 | 4 846 | 0 847 | 3 848 | 6 849 | 4 850 | 5 851 | 6 852 | 3 853 | 4 854 | 0 855 | 0 856 | 3 857 | 4 858 | 3 859 | 6 860 | 5 861 | 0 862 | 0 863 | 0 864 | 2 865 | 6 866 | 3 867 | 6 868 | 4 869 | 3 870 | 3 871 | 3 872 | 3 873 | 0 874 | 2 875 | 3 876 | 0 877 | 4 878 | 5 879 | 3 880 | 0 881 | 3 882 | 4 883 | 6 884 | 6 885 | 5 886 | 5 887 | 3 888 | 3 889 | 6 890 | 3 891 | 4 892 | 5 893 | 4 894 | 4 895 | 5 896 | 6 897 | 1 898 | 6 899 | 6 900 | 6 901 | 6 902 | 3 903 | 3 904 | 6 905 | 5 906 | 3 907 | 0 908 | 5 909 | 4 910 | 5 911 | 5 912 | 4 913 | 3 914 | 0 915 | 4 916 | 0 917 | 4 918 | 4 919 | 0 920 | 4 921 | 6 922 | 2 923 | 3 924 | 3 925 | 6 926 | 3 927 | 2 928 | 4 929 | 6 930 | 0 931 | 3 932 | 3 933 | 6 934 | 4 935 | 0 936 | 6 937 | 6 938 | 6 939 | 3 940 | 3 941 | 3 942 | 3 943 | 3 944 | 4 945 | 0 946 | 4 947 | 4 948 | 6 949 | 3 950 | 3 951 | 6 952 | 6 953 | 4 954 | 2 955 | 2 956 | 2 957 | 4 958 | 3 959 | 4 960 | 5 961 | 2 962 | 6 963 | 6 964 | 3 965 | 2 966 | 0 967 | 3 968 | 2 969 | 3 970 | 3 971 | 3 972 | 3 973 | 6 974 | 0 975 | 3 976 | 0 977 | 6 978 | 3 979 | 6 980 | 0 981 | 3 982 | 5 983 | 2 984 | 6 985 | 2 986 | 4 987 | 3 988 | 0 989 | 3 990 | 5 991 | 6 992 | 3 993 | 2 994 | 3 995 | 0 996 | 4 997 | 3 998 | 0 999 | 4 1000 | 3 1001 | 2 1002 | 4 1003 | 0 1004 | 4 1005 | 2 1006 | 6 1007 | 4 1008 | 6 1009 | 2 1010 | 2 1011 | 3 1012 | 0 1013 | 5 1014 | 3 1015 | 4 1016 | 4 1017 | 0 1018 | 0 1019 | 3 1020 | 4 1021 | 6 1022 | 3 1023 | 6 1024 | 6 1025 | 4 1026 | 4 1027 | 4 1028 | 2 1029 | 4 1030 | 6 1031 | 4 1032 | 0 1033 | 3 1034 | 3 1035 | 0 1036 | 3 1037 | 4 1038 | 0 1039 | 5 1040 | 3 1041 | 3 1042 | 6 1043 | 3 1044 | 6 1045 | 3 1046 | 6 1047 | 3 1048 | 4 1049 | 6 1050 | 6 1051 | 4 1052 | 4 1053 | 4 1054 | 3 1055 | 5 1056 | 3 1057 | 4 1058 | 0 1059 | 3 1060 | 4 1061 | 3 1062 | 3 1063 | 3 1064 | 3 1065 | 3 1066 | 6 1067 | 3 1068 | 6 1069 | 4 1070 | 3 1071 | 2 1072 | 6 1073 | 2 1074 | 3 1075 | 3 1076 | 6 1077 | 2 1078 | 2 1079 | 4 1080 | 0 1081 | 3 1082 | 3 1083 | 5 1084 | 5 1085 | 4 1086 | 6 1087 | 3 1088 | 5 1089 | 5 1090 | 4 1091 | 0 1092 | 3 1093 | 3 1094 | 3 1095 | 6 1096 | 3 1097 | 3 1098 | 3 1099 | 6 1100 | 6 1101 | 6 1102 | 0 1103 | 3 1104 | 2 1105 | 6 1106 | 2 1107 | 0 1108 | 4 1109 | 0 1110 | 0 1111 | 3 1112 | 6 1113 | 3 1114 | 4 1115 | 5 1116 | 6 1117 | 3 1118 | 3 1119 | 4 1120 | 3 1121 | 5 1122 | 4 1123 | 0 1124 | 3 1125 | 5 1126 | 3 1127 | 6 1128 | 0 1129 | 4 1130 | 6 1131 | 6 1132 | 0 1133 | 3 1134 | 3 1135 | 3 1136 | 2 1137 | 5 1138 | 4 1139 | 3 1140 | 4 1141 | 5 1142 | 2 1143 | 3 1144 | 0 1145 | 6 1146 | 3 1147 | 0 1148 | 3 1149 | 6 1150 | 6 1151 | 6 1152 | 4 1153 | 6 1154 | 0 1155 | 2 1156 | 5 1157 | 3 1158 | 3 1159 | 3 1160 | 0 1161 | 4 1162 | 6 1163 | 4 1164 | 2 1165 | 0 1166 | 5 1167 | 6 1168 | 3 1169 | 3 1170 | 0 1171 | 2 1172 | 4 1173 | 3 1174 | 5 1175 | 3 1176 | 3 1177 | 3 1178 | 6 1179 | 3 1180 | 2 1181 | 3 1182 | 3 1183 | 6 1184 | 4 1185 | 6 1186 | 5 1187 | 3 1188 | 2 1189 | 4 1190 | 3 1191 | 3 1192 | 0 1193 | 3 1194 | 6 1195 | 3 1196 | 5 1197 | 2 1198 | 6 1199 | 3 1200 | 3 1201 | 5 1202 | 6 1203 | 2 1204 | 6 1205 | 6 1206 | 6 1207 | 3 1208 | 6 1209 | 0 1210 | 6 1211 | 3 1212 | 5 1213 | 6 1214 | 3 1215 | 3 1216 | 4 1217 | 2 1218 | 3 1219 | 5 1220 | 6 1221 | 6 1222 | 3 1223 | 5 1224 | 3 1225 | 3 1226 | 3 1227 | 4 1228 | 4 1229 | 3 1230 | 4 1231 | 6 1232 | 6 1233 | 6 1234 | 4 1235 | 6 1236 | 6 1237 | 0 1238 | 3 1239 | 3 1240 | 3 1241 | 4 1242 | 0 1243 | 0 1244 | 5 1245 | 6 1246 | 3 1247 | 5 1248 | 4 1249 | 5 1250 | 3 1251 | 3 1252 | 6 1253 | 4 1254 | 2 1255 | 0 1256 | 6 1257 | 4 1258 | 4 1259 | 3 1260 | 4 1261 | 6 1262 | 4 1263 | 0 1264 | 4 1265 | 4 1266 | 3 1267 | 3 1268 | 2 1269 | 4 1270 | 6 1271 | 3 1272 | 4 1273 | 0 1274 | 0 1275 | 4 1276 | 5 1277 | 3 1278 | 4 1279 | 3 1280 | 0 1281 | 0 1282 | 4 1283 | 3 1284 | 3 1285 | 3 1286 | 4 1287 | 3 1288 | 6 1289 | 5 1290 | 0 1291 | 3 1292 | 4 1293 | 4 1294 | 3 1295 | 3 1296 | 0 1297 | 5 1298 | 3 1299 | 4 1300 | 5 1301 | 4 1302 | 0 1303 | 3 1304 | 6 1305 | 6 1306 | 5 1307 | 3 1308 | 3 1309 | 6 1310 | 4 1311 | 6 1312 | 4 1313 | 3 1314 | 2 1315 | 5 1316 | 0 1317 | 4 1318 | 3 1319 | 3 1320 | 0 1321 | 2 1322 | 6 1323 | 3 1324 | 4 1325 | 3 1326 | 3 1327 | 4 1328 | 6 1329 | 5 1330 | 4 1331 | 6 1332 | 6 1333 | 6 1334 | 6 1335 | 3 1336 | 0 1337 | 4 1338 | 4 1339 | 0 1340 | 5 1341 | 3 1342 | 4 1343 | 3 1344 | 6 1345 | 3 1346 | 6 1347 | 3 1348 | 4 1349 | 3 1350 | 6 1351 | 2 1352 | 4 1353 | 3 1354 | 6 1355 | 4 1356 | 3 1357 | 6 1358 | 3 1359 | 5 1360 | 6 1361 | 4 1362 | 6 1363 | 5 1364 | 6 1365 | 5 1366 | 4 1367 | 3 1368 | 5 1369 | 2 1370 | 5 1371 | 3 1372 | 0 1373 | 5 1374 | 3 1375 | 3 1376 | 0 1377 | 4 1378 | 3 1379 | 5 1380 | 0 1381 | 3 1382 | 4 1383 | 3 1384 | 2 1385 | 6 1386 | 0 1387 | 3 1388 | 6 1389 | 3 1390 | 6 1391 | 3 1392 | 6 1393 | 3 1394 | 3 1395 | 6 1396 | 4 1397 | 3 1398 | 3 1399 | 3 1400 | 5 1401 | 6 1402 | 6 1403 | 2 1404 | 5 1405 | 4 1406 | 3 1407 | 3 1408 | 3 1409 | 6 1410 | 3 1411 | 3 1412 | 2 1413 | 4 1414 | 2 1415 | 4 1416 | 6 1417 | 6 1418 | 2 1419 | 0 1420 | 6 1421 | 3 1422 | 4 1423 | 3 1424 | 6 1425 | 5 1426 | 4 1427 | 3 1428 | 6 1429 | 6 1430 | 3 1431 | 4 1432 | 4 1433 | 4 1434 | 5 1435 | 4 1436 | 6 1437 | 2 1438 | 2 1439 | 3 1440 | 6 1441 | 4 1442 | 4 1443 | 2 1444 | 5 1445 | 3 1446 | 6 1447 | 3 1448 | 5 1449 | 6 1450 | 3 1451 | 4 1452 | 6 1453 | 6 1454 | 4 1455 | 3 1456 | 3 1457 | 4 1458 | 3 1459 | 3 1460 | 3 1461 | 6 1462 | 0 1463 | 3 1464 | 3 1465 | 2 1466 | 3 1467 | 3 1468 | 3 1469 | 6 1470 | 3 1471 | 4 1472 | 3 1473 | 2 1474 | 3 1475 | 0 1476 | 2 1477 | 4 1478 | 0 1479 | 3 1480 | 6 1481 | 3 1482 | 5 1483 | 2 1484 | 2 1485 | 3 1486 | 6 1487 | 6 1488 | 3 1489 | 4 1490 | 4 1491 | 4 1492 | 3 1493 | 3 1494 | 0 1495 | 6 1496 | 5 1497 | 0 1498 | 6 1499 | 2 1500 | 6 1501 | 2 1502 | 6 1503 | 6 1504 | 5 1505 | 2 1506 | 6 1507 | 6 1508 | 6 1509 | 3 1510 | 4 1511 | 2 1512 | 6 1513 | 3 1514 | 2 1515 | 3 1516 | 4 1517 | 5 1518 | 5 1519 | 5 1520 | 3 1521 | 4 1522 | 0 1523 | 6 1524 | 5 1525 | 4 1526 | 6 1527 | 5 1528 | 2 1529 | 2 1530 | 4 1531 | 5 1532 | 6 1533 | 3 1534 | 5 1535 | 5 1536 | 2 1537 | 3 1538 | 6 1539 | 2 1540 | 3 1541 | 3 1542 | 2 1543 | 2 1544 | 0 1545 | 0 1546 | 3 1547 | 6 1548 | 3 1549 | 3 1550 | 4 1551 | 0 1552 | 3 1553 | 3 1554 | 0 1555 | 4 1556 | 3 1557 | 4 1558 | 6 1559 | 3 1560 | 0 1561 | 6 1562 | 4 1563 | 6 1564 | 5 1565 | 4 1566 | 4 1567 | 6 1568 | 4 1569 | 4 1570 | 4 1571 | 5 1572 | 3 1573 | 6 1574 | 0 1575 | 6 1576 | 3 1577 | 4 1578 | 3 1579 | 6 1580 | 2 1581 | 4 1582 | 6 1583 | 6 1584 | 4 1585 | 3 1586 | 6 1587 | 4 1588 | 6 1589 | 6 1590 | 4 1591 | 0 1592 | 5 1593 | 6 1594 | 0 1595 | 4 1596 | 5 1597 | 3 1598 | 3 1599 | 0 1600 | 3 1601 | 0 1602 | 4 1603 | 6 1604 | 5 1605 | 3 1606 | 6 1607 | 6 1608 | 5 1609 | 0 1610 | 2 1611 | 5 1612 | 6 1613 | 5 1614 | 3 1615 | 3 1616 | 3 1617 | 3 1618 | 4 1619 | 0 1620 | 3 1621 | 3 1622 | 3 1623 | 4 1624 | 3 1625 | 2 1626 | 3 1627 | 6 1628 | 3 1629 | 3 1630 | 3 1631 | 0 1632 | 5 1633 | 3 1634 | 3 1635 | 3 1636 | 2 1637 | 6 1638 | 6 1639 | 0 1640 | 4 1641 | 2 1642 | 0 1643 | 3 1644 | 0 1645 | 6 1646 | 3 1647 | 5 1648 | 3 1649 | 2 1650 | 4 1651 | 3 1652 | 5 1653 | 3 1654 | 4 1655 | 0 1656 | 3 1657 | 0 1658 | 6 1659 | 3 1660 | 2 1661 | 2 1662 | 3 1663 | 4 1664 | 2 1665 | 6 1666 | 6 1667 | 5 1668 | 2 1669 | 0 1670 | 4 1671 | 6 1672 | 3 1673 | 4 1674 | 3 1675 | 2 1676 | 4 1677 | 6 1678 | 5 1679 | 5 1680 | 5 1681 | 4 1682 | 5 1683 | 3 1684 | 4 1685 | 5 1686 | 0 1687 | 6 1688 | 4 1689 | 0 1690 | 4 1691 | 0 1692 | 3 1693 | 3 1694 | 4 1695 | 3 1696 | 0 1697 | 1 1698 | 3 1699 | 4 1700 | 3 1701 | 6 1702 | 2 1703 | 3 1704 | 0 1705 | 6 1706 | 3 1707 | 0 1708 | 3 1709 | 3 1710 | 2 1711 | 4 1712 | 6 1713 | 6 1714 | 0 1715 | 3 1716 | 3 1717 | 3 1718 | 6 1719 | 5 1720 | 4 1721 | 4 1722 | 5 1723 | 5 1724 | 4 1725 | 6 1726 | 6 1727 | 4 1728 | 3 1729 | 6 1730 | 6 1731 | 6 1732 | 3 1733 | 6 1734 | 2 1735 | 3 1736 | 6 1737 | 3 1738 | 6 1739 | 5 1740 | 5 1741 | 3 1742 | 6 1743 | 5 1744 | 3 1745 | 0 1746 | 0 1747 | 0 1748 | 4 1749 | 4 1750 | 3 1751 | 4 1752 | 4 1753 | 3 1754 | 4 1755 | 3 1756 | 2 1757 | 6 1758 | 3 1759 | 6 1760 | 4 1761 | 0 1762 | 4 1763 | 4 1764 | 2 1765 | 6 1766 | 6 1767 | 2 1768 | 3 1769 | 4 1770 | 4 1771 | 2 1772 | 4 1773 | 3 1774 | 4 1775 | 6 1776 | 0 1777 | 6 1778 | 0 1779 | 0 1780 | 2 1781 | 5 1782 | 6 1783 | 3 1784 | 2 1785 | 2 1786 | 3 1787 | 0 1788 | 4 1789 | 0 1790 | 3 1791 | 0 1792 | 3 1793 | 4 1794 | 5 1795 | 4 1796 | 6 1797 | 3 1798 | 0 1799 | 5 1800 | 0 1801 | 3 1802 | 3 1803 | 3 1804 | 6 1805 | 2 1806 | 3 1807 | 6 1808 | 3 1809 | 3 1810 | 5 1811 | 0 1812 | 2 1813 | 0 1814 | 6 1815 | 2 1816 | 4 1817 | 6 1818 | 0 1819 | 6 1820 | 2 1821 | 4 1822 | 0 1823 | 3 1824 | 0 1825 | 3 1826 | 6 1827 | 4 1828 | 6 1829 | 5 1830 | 4 1831 | 6 1832 | 2 1833 | 3 1834 | 0 1835 | 3 1836 | 4 1837 | 0 1838 | 3 1839 | 3 1840 | 0 1841 | 6 1842 | 1 1843 | 6 1844 | 3 1845 | 6 1846 | 4 1847 | 4 1848 | 4 1849 | 6 1850 | 3 1851 | 0 1852 | 3 1853 | 6 1854 | 2 1855 | 6 1856 | 2 1857 | 2 1858 | 3 1859 | 5 1860 | 5 1861 | 3 1862 | 4 1863 | 3 1864 | 6 1865 | 2 1866 | 4 1867 | 4 1868 | 3 1869 | 3 1870 | 0 1871 | 2 1872 | 5 1873 | 6 1874 | 3 1875 | 6 1876 | 5 1877 | 3 1878 | 5 1879 | 3 1880 | 0 1881 | 0 1882 | 5 1883 | 5 1884 | 6 1885 | 6 1886 | 5 1887 | 4 1888 | 2 1889 | 5 1890 | 5 1891 | 5 1892 | 6 1893 | 1 1894 | 0 1895 | 4 1896 | 3 1897 | 3 1898 | 6 1899 | 3 1900 | 3 1901 | 6 1902 | 6 1903 | 4 1904 | 0 1905 | 4 1906 | 6 1907 | 6 1908 | 6 1909 | 0 1910 | 5 1911 | 3 1912 | 5 1913 | 4 1914 | 3 1915 | 5 1916 | 6 1917 | 5 1918 | 3 1919 | 0 1920 | 5 1921 | 2 1922 | 2 1923 | 4 1924 | 2 1925 | 4 1926 | 4 1927 | 6 1928 | 3 1929 | 3 1930 | 4 1931 | 0 1932 | 5 1933 | 6 1934 | 4 1935 | 6 1936 | 6 1937 | 0 1938 | 3 1939 | 6 1940 | 6 1941 | 3 1942 | 4 1943 | 3 1944 | 3 1945 | 6 1946 | 6 1947 | 4 1948 | 4 1949 | 6 1950 | 5 1951 | 3 1952 | 6 1953 | 6 1954 | 3 1955 | 3 1956 | 3 1957 | 4 1958 | 4 1959 | 3 1960 | 3 1961 | 5 1962 | 2 1963 | 4 1964 | 3 1965 | 6 1966 | 3 1967 | 3 1968 | 3 1969 | 6 1970 | 3 1971 | 3 1972 | 4 1973 | 2 1974 | 0 1975 | 3 1976 | 3 1977 | 2 1978 | 6 1979 | 0 1980 | 3 1981 | 3 1982 | 3 1983 | 3 1984 | 0 1985 | 4 1986 | 2 1987 | 4 1988 | 0 1989 | 3 1990 | 3 1991 | 6 1992 | 3 1993 | 6 1994 | 4 1995 | 6 1996 | 3 1997 | 0 1998 | 0 1999 | 6 2000 | 3 2001 | 0 2002 | 3 2003 | 6 2004 | 4 2005 | 6 2006 | 5 2007 | 6 2008 | 2 2009 | 2 2010 | 3 2011 | 3 2012 | 4 2013 | 2 2014 | 6 2015 | 2 2016 | 6 2017 | 5 2018 | 5 2019 | 0 2020 | 3 2021 | 2 2022 | 2 2023 | 2 2024 | 4 2025 | 0 2026 | 3 2027 | 5 2028 | 3 2029 | 3 2030 | 5 2031 | 5 2032 | 5 2033 | 4 2034 | 4 2035 | 2 2036 | 4 2037 | 2 2038 | 0 2039 | 5 2040 | 4 2041 | 3 2042 | 0 2043 | 5 2044 | 4 2045 | 0 2046 | 4 2047 | 3 2048 | 2 2049 | 0 2050 | 2 2051 | 3 2052 | 3 2053 | 3 2054 | 4 2055 | 6 2056 | 2 2057 | 0 2058 | 4 2059 | 6 2060 | 3 2061 | 5 2062 | 2 2063 | 3 2064 | 0 2065 | 3 2066 | 0 2067 | 6 2068 | 0 2069 | 5 2070 | 5 2071 | 3 2072 | 6 2073 | 1 2074 | 6 2075 | 0 2076 | 6 2077 | 3 2078 | 0 2079 | 5 2080 | 4 2081 | 4 2082 | 3 2083 | 4 2084 | 5 2085 | 2 2086 | 6 2087 | 3 2088 | 3 2089 | 0 2090 | 5 2091 | 2 2092 | 4 2093 | 6 2094 | 3 2095 | 2 2096 | 3 2097 | 3 2098 | 3 2099 | 3 2100 | 2 2101 | 4 2102 | 6 2103 | 5 2104 | 6 2105 | 3 2106 | 3 2107 | 3 2108 | 4 2109 | 4 2110 | 4 2111 | 2 2112 | 3 2113 | 5 2114 | 3 2115 | 3 2116 | 2 2117 | 3 2118 | 3 2119 | 3 2120 | 1 2121 | 3 2122 | 3 2123 | 3 2124 | 3 2125 | 6 2126 | 3 2127 | 3 2128 | 0 2129 | 3 2130 | 6 2131 | 3 2132 | 3 2133 | 5 2134 | 3 2135 | 6 2136 | 6 2137 | 4 2138 | 4 2139 | 4 2140 | 4 2141 | 3 2142 | 6 2143 | 3 2144 | 3 2145 | 4 2146 | 3 2147 | 6 2148 | 6 2149 | 2 2150 | 2 2151 | 3 2152 | 6 2153 | 6 2154 | 5 2155 | 4 2156 | 3 2157 | 5 2158 | 5 2159 | 4 2160 | 4 2161 | 6 2162 | 3 2163 | 3 2164 | 0 2165 | 5 2166 | 3 2167 | 3 2168 | 5 2169 | 2 2170 | 4 2171 | 2 2172 | 4 2173 | 3 2174 | 6 2175 | 6 2176 | 6 2177 | 3 2178 | 6 2179 | 0 2180 | 4 2181 | 3 2182 | 2 2183 | 2 2184 | 6 2185 | 5 2186 | 3 2187 | 3 2188 | 0 2189 | 3 2190 | 4 2191 | 5 2192 | 0 2193 | 4 2194 | 3 2195 | 3 2196 | 3 2197 | 3 2198 | 3 2199 | 6 2200 | 3 2201 | 6 2202 | 6 2203 | 4 2204 | 3 2205 | 3 2206 | 3 2207 | 3 2208 | 2 2209 | 3 2210 | 3 2211 | 2 2212 | 3 2213 | 3 2214 | 2 2215 | 6 2216 | 2 2217 | 2 2218 | 4 2219 | 6 2220 | 3 2221 | 5 2222 | 3 2223 | 0 2224 | 6 2225 | 4 2226 | 6 2227 | 5 2228 | 2 2229 | 2 2230 | 3 2231 | 4 2232 | 6 2233 | 3 2234 | 3 2235 | 5 2236 | 3 2237 | 6 2238 | 0 2239 | 6 2240 | 3 2241 | 6 2242 | 4 2243 | 3 2244 | 3 2245 | 6 2246 | 0 2247 | 3 2248 | 0 2249 | 0 2250 | 4 2251 | 6 2252 | 3 2253 | 4 2254 | 3 2255 | 4 2256 | 0 2257 | 3 2258 | 0 2259 | 3 2260 | 4 2261 | 4 2262 | 3 2263 | 5 2264 | 0 2265 | 6 2266 | 4 2267 | 3 2268 | 4 2269 | 3 2270 | 5 2271 | 3 2272 | 0 2273 | 4 2274 | 4 2275 | 6 2276 | 4 2277 | 6 2278 | 3 2279 | 2 2280 | 0 2281 | 5 2282 | 4 2283 | 3 2284 | 3 2285 | 5 2286 | 0 2287 | 4 2288 | 4 2289 | 6 2290 | 2 2291 | 3 2292 | 3 2293 | 3 2294 | 6 2295 | 3 2296 | 2 2297 | 5 2298 | 3 2299 | 2 2300 | 5 2301 | 6 2302 | 4 2303 | 3 2304 | 3 2305 | 0 2306 | 4 2307 | 6 2308 | 2 2309 | 3 2310 | 3 2311 | 2 2312 | 3 2313 | 2 2314 | 3 2315 | 6 2316 | 4 2317 | 3 2318 | 0 2319 | 3 2320 | 0 2321 | 3 2322 | 6 2323 | 0 2324 | 4 2325 | 0 2326 | 3 2327 | 3 2328 | 4 2329 | 5 2330 | 6 2331 | 3 2332 | 4 2333 | 6 2334 | 4 2335 | 6 2336 | 6 2337 | 0 2338 | 4 2339 | 5 2340 | 3 2341 | 6 2342 | 6 2343 | 3 2344 | 3 2345 | 3 2346 | 5 2347 | 4 2348 | 3 2349 | 0 2350 | 2 2351 | 6 2352 | 0 2353 | 3 2354 | 4 2355 | 3 2356 | 2 2357 | 6 2358 | 3 2359 | 3 2360 | 5 2361 | 3 2362 | 6 2363 | 5 2364 | 4 2365 | 6 2366 | 0 2367 | 6 2368 | 3 2369 | 0 2370 | 4 2371 | 0 2372 | 3 2373 | 6 2374 | 3 2375 | 4 2376 | 3 2377 | 3 2378 | 3 2379 | 3 2380 | 6 2381 | 3 2382 | 5 2383 | 5 2384 | 6 2385 | 2 2386 | 3 2387 | 3 2388 | 6 2389 | 5 2390 | 6 2391 | 3 2392 | 4 2393 | 3 2394 | 5 2395 | 4 2396 | 4 2397 | 2 2398 | 3 2399 | 3 2400 | 3 2401 | 4 2402 | 3 2403 | 6 2404 | 4 2405 | 4 2406 | 6 2407 | 2 2408 | 2 2409 | 3 2410 | 3 2411 | 3 2412 | 6 2413 | 3 2414 | 5 2415 | 4 2416 | 3 2417 | 3 2418 | 5 2419 | 3 2420 | 3 2421 | 4 2422 | 2 2423 | 3 2424 | 0 2425 | 2 2426 | 3 2427 | 2 2428 | 3 2429 | 3 2430 | 3 2431 | 6 2432 | 3 2433 | 3 2434 | 3 2435 | 4 2436 | 4 2437 | 3 2438 | 3 2439 | 5 2440 | 3 2441 | 6 2442 | 3 2443 | 0 2444 | 3 2445 | 6 2446 | 6 2447 | 3 2448 | 0 2449 | 3 2450 | 4 2451 | 5 2452 | 3 2453 | 6 2454 | 5 2455 | 5 2456 | 4 2457 | 4 2458 | 3 2459 | 3 2460 | 3 2461 | 4 2462 | 3 2463 | 3 2464 | 6 2465 | 2 2466 | 4 2467 | 4 2468 | 3 2469 | 3 2470 | 3 2471 | 6 2472 | 4 2473 | 5 2474 | 2 2475 | 4 2476 | 2 2477 | 5 2478 | 3 2479 | 4 2480 | 2 2481 | 6 2482 | 4 2483 | 0 2484 | 5 2485 | 3 2486 | 6 2487 | 3 2488 | 0 2489 | 6 2490 | 5 2491 | 5 2492 | 5 2493 | 2 2494 | 3 2495 | 6 2496 | 2 2497 | 2 2498 | 2 2499 | 0 2500 | 3 2501 | 6 2502 | 0 2503 | 6 2504 | 2 2505 | 0 2506 | 3 2507 | 4 2508 | 2 2509 | 4 2510 | 3 2511 | 6 2512 | 4 2513 | 0 2514 | 2 2515 | 5 2516 | 0 2517 | 4 2518 | 6 2519 | 6 2520 | 5 2521 | 0 2522 | 3 2523 | 5 2524 | 3 2525 | 2 2526 | 3 2527 | 3 2528 | 3 2529 | 2 2530 | 2 2531 | 3 2532 | 2 2533 | 3 2534 | 3 2535 | 3 2536 | 3 2537 | 3 2538 | 6 2539 | 4 2540 | 3 2541 | 2 2542 | 3 2543 | 3 2544 | 3 2545 | 3 2546 | 4 2547 | 3 2548 | 3 2549 | 6 2550 | 3 2551 | 2 2552 | 6 2553 | 3 2554 | 6 2555 | 6 2556 | 6 2557 | 4 2558 | 0 2559 | 4 2560 | 3 2561 | 5 2562 | 2 2563 | 4 2564 | 4 2565 | 3 2566 | 3 2567 | 4 2568 | 3 2569 | 4 2570 | 6 2571 | 5 2572 | 0 2573 | 6 2574 | 3 2575 | 0 2576 | 4 2577 | 3 2578 | 6 2579 | 2 2580 | 0 2581 | 0 2582 | 4 2583 | 3 2584 | 3 2585 | 2 2586 | 5 2587 | 3 2588 | 6 2589 | 3 2590 | 3 2591 | 5 2592 | 4 2593 | 4 2594 | 4 2595 | 3 2596 | 5 2597 | 3 2598 | 2 2599 | 3 2600 | 4 2601 | 3 2602 | 2 2603 | 4 2604 | 4 2605 | 3 2606 | 0 2607 | 6 2608 | 3 2609 | 4 2610 | 3 2611 | 4 2612 | 3 2613 | 3 2614 | 3 2615 | 4 2616 | 5 2617 | 4 2618 | 3 2619 | 0 2620 | 4 2621 | 3 2622 | 0 2623 | 4 2624 | 4 2625 | 2 2626 | 3 2627 | 3 2628 | 0 2629 | 2 2630 | 3 2631 | 3 2632 | 5 2633 | 4 2634 | 2 2635 | 3 2636 | 4 2637 | 5 2638 | 3 2639 | 6 2640 | 5 2641 | 3 2642 | 3 2643 | 3 2644 | 3 2645 | 5 2646 | 0 2647 | 4 2648 | 3 2649 | 5 2650 | 3 2651 | 5 2652 | 6 2653 | 3 2654 | 4 2655 | 3 2656 | 2 2657 | 6 2658 | 3 2659 | 4 2660 | 1 2661 | 6 2662 | 3 2663 | 6 2664 | 4 2665 | 6 2666 | 3 2667 | 3 2668 | 3 2669 | 3 2670 | 4 2671 | 3 2672 | 2 2673 | 6 2674 | 2 2675 | 4 2676 | 3 2677 | 6 2678 | 3 2679 | 6 2680 | 6 2681 | 3 2682 | 2 2683 | 5 2684 | 2 2685 | 6 2686 | 4 2687 | 4 2688 | 6 2689 | 4 2690 | 0 2691 | 3 2692 | 0 2693 | 0 2694 | 3 2695 | 2 2696 | 3 2697 | 5 2698 | 3 2699 | 4 2700 | 0 2701 | 0 2702 | 3 2703 | 3 2704 | 0 2705 | 3 2706 | 3 2707 | 6 2708 | 4 2709 | 5 2710 | 6 2711 | 0 2712 | 3 2713 | 2 2714 | 3 2715 | 5 2716 | 3 2717 | 6 2718 | 5 2719 | 0 2720 | 3 2721 | 0 2722 | 4 2723 | 0 2724 | 6 2725 | 5 2726 | 3 2727 | 0 2728 | 6 2729 | 2 2730 | 3 2731 | 3 2732 | 3 2733 | 4 2734 | 0 2735 | 4 2736 | 2 2737 | 6 2738 | 4 2739 | 4 2740 | 5 2741 | 4 2742 | 4 2743 | 5 2744 | 6 2745 | 5 2746 | 4 2747 | 3 2748 | 3 2749 | 4 2750 | 3 2751 | 2 2752 | 4 2753 | 4 2754 | 3 2755 | 5 2756 | 5 2757 | 3 2758 | 4 2759 | 4 2760 | 0 2761 | 3 2762 | 0 2763 | 6 2764 | 3 2765 | 5 2766 | 3 2767 | 2 2768 | 6 2769 | 3 2770 | 5 2771 | 5 2772 | 4 2773 | 4 2774 | 3 2775 | 0 2776 | 5 2777 | 0 2778 | 6 2779 | 0 2780 | 2 2781 | 2 2782 | 3 2783 | 0 2784 | 2 2785 | 3 2786 | 0 2787 | 6 2788 | 5 2789 | 4 2790 | 2 2791 | 3 2792 | 3 2793 | 4 2794 | 6 2795 | 3 2796 | 6 2797 | 3 2798 | 3 2799 | 6 2800 | 0 2801 | 3 2802 | 5 2803 | 4 2804 | 5 2805 | 5 2806 | 3 2807 | 3 2808 | 6 2809 | 5 2810 | 3 2811 | 4 2812 | 5 2813 | 4 2814 | 6 2815 | 4 2816 | 3 2817 | 6 2818 | 4 2819 | 4 2820 | 2 2821 | 3 2822 | 0 2823 | 0 2824 | 3 2825 | 5 2826 | 4 2827 | 0 2828 | 4 2829 | 0 2830 | 3 2831 | 4 2832 | 3 2833 | 6 2834 | 3 2835 | 2 2836 | 0 2837 | 2 2838 | 0 2839 | 4 2840 | 6 2841 | 6 2842 | 5 2843 | 3 2844 | 2 2845 | 0 2846 | 3 2847 | 4 2848 | 5 2849 | 0 2850 | 4 2851 | 5 2852 | 4 2853 | 3 2854 | 4 2855 | 5 2856 | 0 2857 | 4 2858 | 2 2859 | 2 2860 | 4 2861 | 2 2862 | 5 2863 | 3 2864 | 3 2865 | 3 2866 | 3 2867 | 4 2868 | 6 2869 | 3 2870 | 4 2871 | 6 2872 | 4 2873 | 4 2874 | 3 2875 | 4 2876 | 5 2877 | 0 2878 | 0 2879 | 5 2880 | 6 2881 | 3 2882 | 5 2883 | 6 2884 | 0 2885 | 4 2886 | 5 2887 | 0 2888 | 2 2889 | 3 2890 | 3 2891 | 5 2892 | 3 2893 | 4 2894 | 3 2895 | 4 2896 | 1 2897 | 4 2898 | 3 2899 | 5 2900 | 6 2901 | 0 2902 | 4 2903 | 3 2904 | 5 2905 | 0 2906 | 4 2907 | 6 2908 | 0 2909 | 3 2910 | 2 2911 | 5 2912 | 3 2913 | 6 2914 | 6 2915 | 6 2916 | 3 2917 | 5 2918 | 6 2919 | 3 2920 | 5 2921 | 3 2922 | 6 2923 | 5 2924 | 3 2925 | 5 2926 | 4 2927 | 6 2928 | 2 2929 | 3 2930 | 2 2931 | 5 2932 | 2 2933 | 3 2934 | 4 2935 | 6 2936 | 3 2937 | 3 2938 | 2 2939 | 6 2940 | 3 2941 | 3 2942 | 6 2943 | 4 2944 | 4 2945 | 2 2946 | 3 2947 | 4 2948 | 4 2949 | 3 2950 | 3 2951 | 0 2952 | 4 2953 | 6 2954 | 5 2955 | 4 2956 | 5 2957 | 3 2958 | 4 2959 | 5 2960 | 3 2961 | 5 2962 | 3 2963 | 5 2964 | 4 2965 | 0 2966 | 3 2967 | 4 2968 | 3 2969 | 0 2970 | 5 2971 | 3 2972 | 4 2973 | 5 2974 | 3 2975 | 3 2976 | 2 2977 | 6 2978 | 5 2979 | 3 2980 | 2 2981 | 3 2982 | 0 2983 | 3 2984 | 2 2985 | 3 2986 | 0 2987 | 6 2988 | 4 2989 | 3 2990 | 4 2991 | 5 2992 | 4 2993 | 5 2994 | 3 2995 | 6 2996 | 6 2997 | 2 2998 | 4 2999 | 3 3000 | 4 3001 | 3 3002 | 2 3003 | 4 3004 | 5 3005 | 6 3006 | 4 3007 | 2 3008 | 3 3009 | 2 3010 | 4 3011 | 2 3012 | 4 3013 | 2 3014 | 3 3015 | 3 3016 | 3 3017 | 3 3018 | 6 3019 | 3 3020 | 0 3021 | 4 3022 | 3 3023 | 5 3024 | 4 3025 | 0 3026 | 4 3027 | 5 3028 | 6 3029 | 3 3030 | 4 3031 | 3 3032 | 4 3033 | 3 3034 | 3 3035 | 3 3036 | 4 3037 | 6 3038 | 3 3039 | 5 3040 | 3 3041 | 6 3042 | 6 3043 | 6 3044 | 3 3045 | 6 3046 | 3 3047 | 3 3048 | 3 3049 | 3 3050 | 2 3051 | 2 3052 | 3 3053 | 3 3054 | 6 3055 | 5 3056 | 6 3057 | 3 3058 | 3 3059 | 3 3060 | 3 3061 | 6 3062 | 3 3063 | 0 3064 | 3 3065 | 2 3066 | 6 3067 | 5 3068 | 0 3069 | 2 3070 | 6 3071 | 0 3072 | 0 3073 | 3 3074 | 5 3075 | 3 3076 | 5 3077 | 6 3078 | 6 3079 | 5 3080 | 4 3081 | 0 3082 | 5 3083 | 4 3084 | 3 3085 | 5 3086 | 6 3087 | 5 3088 | 6 3089 | 0 3090 | 2 3091 | 5 3092 | 6 3093 | 3 3094 | 3 3095 | 6 3096 | 2 3097 | 3 3098 | 3 3099 | 3 3100 | 4 3101 | 4 3102 | 3 3103 | 3 3104 | 3 3105 | 6 3106 | 5 3107 | 4 3108 | 4 3109 | 4 3110 | 5 3111 | 6 3112 | 3 3113 | 2 3114 | 3 3115 | 4 3116 | 3 3117 | 3 3118 | 5 3119 | 6 3120 | 1 3121 | 4 3122 | 3 3123 | 3 3124 | 6 3125 | 6 3126 | 4 3127 | 2 3128 | 4 3129 | 3 3130 | 4 3131 | 4 3132 | 3 3133 | 4 3134 | 2 3135 | 4 3136 | 5 3137 | 4 3138 | 3 3139 | 4 3140 | 5 3141 | 5 3142 | 3 3143 | 6 3144 | 4 3145 | 3 3146 | 6 3147 | 2 3148 | 3 3149 | 3 3150 | 3 3151 | 6 3152 | 2 3153 | 3 3154 | 6 3155 | 3 3156 | 2 3157 | 6 3158 | 3 3159 | 4 3160 | 3 3161 | 4 3162 | 3 3163 | 2 3164 | 5 3165 | 0 3166 | 4 3167 | 3 3168 | 3 3169 | 0 3170 | 4 3171 | 0 3172 | 6 3173 | 2 3174 | 5 3175 | 3 3176 | 6 3177 | 5 3178 | 6 3179 | 3 3180 | 4 3181 | 3 3182 | 3 3183 | 6 3184 | 0 3185 | 0 3186 | 4 3187 | 5 3188 | 3 3189 | 3 3190 | 5 3191 | 5 3192 | 6 3193 | 6 3194 | 5 3195 | 2 3196 | 4 3197 | 5 3198 | 6 3199 | 4 3200 | 3 3201 | 6 3202 | 3 3203 | 6 3204 | 4 3205 | 3 3206 | 0 3207 | 4 3208 | 2 3209 | 3 3210 | 6 3211 | 6 3212 | 0 3213 | 4 3214 | 0 3215 | 2 3216 | 0 3217 | 3 3218 | 3 3219 | 4 3220 | 0 3221 | 5 3222 | 6 3223 | 0 3224 | 4 3225 | 3 3226 | 3 3227 | 5 3228 | 6 3229 | 2 3230 | 4 3231 | 0 3232 | 3 3233 | 2 3234 | 6 3235 | 6 3236 | 6 3237 | 3 3238 | 3 3239 | 0 3240 | 3 3241 | 4 3242 | 3 3243 | 0 3244 | 5 3245 | 2 3246 | 6 3247 | 4 3248 | 3 3249 | 4 3250 | 0 3251 | 2 3252 | 3 3253 | 5 3254 | 3 3255 | 6 3256 | 0 3257 | 2 3258 | 3 3259 | 6 3260 | 5 3261 | 5 3262 | 6 3263 | 6 3264 | 3 3265 | 4 3266 | 6 3267 | 3 3268 | 6 3269 | 3 3270 | 0 3271 | 4 3272 | 6 3273 | 6 3274 | 0 3275 | 0 3276 | 4 3277 | 3 3278 | 6 3279 | 3 3280 | 6 3281 | 0 3282 | 3 3283 | 4 3284 | 3 3285 | 3 3286 | 0 3287 | 3 3288 | 6 3289 | 4 3290 | 5 3291 | 2 3292 | 2 3293 | 3 3294 | 6 3295 | 3 3296 | 5 3297 | 4 3298 | 0 3299 | 4 3300 | 6 3301 | 6 3302 | 0 3303 | 3 3304 | 3 3305 | 3 3306 | 4 3307 | 3 3308 | 6 3309 | 0 3310 | 4 3311 | 0 3312 | 6 3313 | 3 3314 | 3 3315 | 4 3316 | 4 3317 | 0 3318 | 4 3319 | 3 3320 | 3 3321 | 5 3322 | 4 3323 | 5 3324 | 5 3325 | 6 3326 | 6 3327 | 2 3328 | 6 3329 | 3 3330 | 4 3331 | 3 3332 | 3 3333 | 6 3334 | 6 3335 | 3 3336 | 4 3337 | 6 3338 | 6 3339 | 3 3340 | 3 3341 | 6 3342 | 6 3343 | 3 3344 | 6 3345 | 6 3346 | 6 3347 | 3 3348 | 4 3349 | 5 3350 | 0 3351 | 3 3352 | 0 3353 | 5 3354 | 4 3355 | 6 3356 | 6 3357 | 0 3358 | 6 3359 | 4 3360 | 3 3361 | 3 3362 | 3 3363 | 6 3364 | 3 3365 | 6 3366 | 4 3367 | 2 3368 | 6 3369 | 4 3370 | 3 3371 | 6 3372 | 3 3373 | 6 3374 | 6 3375 | 3 3376 | 3 3377 | 4 3378 | 2 3379 | 4 3380 | 2 3381 | 2 3382 | 6 3383 | 0 3384 | 3 3385 | 4 3386 | 3 3387 | 3 3388 | 5 3389 | 4 3390 | 5 3391 | 4 3392 | 3 3393 | 3 3394 | 4 3395 | 6 3396 | 2 3397 | 3 3398 | 6 3399 | 4 3400 | 3 3401 | 4 3402 | 0 3403 | 4 3404 | 6 3405 | 6 3406 | 5 3407 | 6 3408 | 6 3409 | 6 3410 | 4 3411 | 5 3412 | 2 3413 | 5 3414 | 3 3415 | 3 3416 | 6 3417 | 4 3418 | 4 3419 | 4 3420 | 2 3421 | 4 3422 | 5 3423 | 4 3424 | 5 3425 | 2 3426 | 0 3427 | 6 3428 | 3 3429 | 3 3430 | 4 3431 | 2 3432 | 3 3433 | 2 3434 | 3 3435 | 4 3436 | 3 3437 | 4 3438 | 5 3439 | 4 3440 | 5 3441 | 0 3442 | 4 3443 | 5 3444 | 5 3445 | 2 3446 | 4 3447 | 4 3448 | 3 3449 | 4 3450 | 3 3451 | 6 3452 | 3 3453 | 0 3454 | 3 3455 | 6 3456 | 2 3457 | 2 3458 | 6 3459 | 3 3460 | 2 3461 | 3 3462 | 5 3463 | 6 3464 | 4 3465 | 5 3466 | 2 3467 | 6 3468 | 5 3469 | 3 3470 | 0 3471 | 5 3472 | 6 3473 | 0 3474 | 3 3475 | 4 3476 | 4 3477 | 3 3478 | 3 3479 | 2 3480 | 6 3481 | 3 3482 | 4 3483 | 6 3484 | 3 3485 | 2 3486 | 3 3487 | 6 3488 | 4 3489 | 3 3490 | 3 3491 | 3 3492 | 3 3493 | 2 3494 | 6 3495 | 4 3496 | 0 3497 | 0 3498 | 3 3499 | 2 3500 | 2 3501 | 6 3502 | 4 3503 | 2 3504 | 5 3505 | 6 3506 | 5 3507 | 4 3508 | 0 3509 | 2 3510 | 0 3511 | 6 3512 | 6 3513 | 4 3514 | 3 3515 | 4 3516 | 3 3517 | 0 3518 | 6 3519 | 3 3520 | 6 3521 | 5 3522 | 4 3523 | 0 3524 | 5 3525 | 4 3526 | 4 3527 | 0 3528 | 3 3529 | 3 3530 | 4 3531 | 6 3532 | 4 3533 | 4 3534 | 2 3535 | 5 3536 | 3 3537 | 2 3538 | 3 3539 | 5 3540 | 3 3541 | 6 3542 | 5 3543 | 4 3544 | 0 3545 | 6 3546 | 5 3547 | 4 3548 | 2 3549 | 5 3550 | 5 3551 | 3 3552 | 1 3553 | 3 3554 | 6 3555 | 3 3556 | 6 3557 | 5 3558 | 5 3559 | 4 3560 | 6 3561 | 4 3562 | 3 3563 | 0 3564 | 6 3565 | 2 3566 | 6 3567 | 3 3568 | 6 3569 | 2 3570 | 4 3571 | 5 3572 | 6 3573 | 6 3574 | 3 3575 | 5 3576 | 5 3577 | 4 3578 | 2 3579 | 6 3580 | 4 3581 | 0 3582 | 3 3583 | 6 3584 | 3 3585 | 3 3586 | 3 3587 | 6 3588 | 0 3589 | 6 3590 | -------------------------------------------------------------------------------- /fer.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Dhanush45/Realtime-emotion-detectionusing-python/ebd3ce4a1af9fc271f745a419da251855155508a/fer.h5 -------------------------------------------------------------------------------- /fer.json: -------------------------------------------------------------------------------- 1 | {"class_name": "Sequential", "config": {"name": "sequential_1", "layers": [{"class_name": "Conv2D", "config": {"name": "conv2d_1", "trainable": true, "batch_input_shape": [null, 48, 48, 1], "dtype": "float32", "filters": 64, "kernel_size": [3, 3], "strides": [1, 1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Conv2D", "config": {"name": "conv2d_2", "trainable": true, "filters": 64, "kernel_size": [3, 3], "strides": [1, 1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "MaxPooling2D", "config": {"name": "max_pooling2d_1", "trainable": true, "pool_size": [2, 2], "padding": "valid", "strides": [2, 2], "data_format": "channels_last"}}, {"class_name": "Dropout", "config": {"name": "dropout_1", "trainable": true, "rate": 0.5, "noise_shape": null, "seed": null}}, {"class_name": "Conv2D", "config": {"name": "conv2d_3", "trainable": true, "filters": 64, "kernel_size": [3, 3], "strides": [1, 1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Conv2D", "config": {"name": "conv2d_4", "trainable": true, "filters": 64, "kernel_size": [3, 3], "strides": [1, 1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "MaxPooling2D", "config": {"name": "max_pooling2d_2", "trainable": true, "pool_size": [2, 2], "padding": "valid", "strides": [2, 2], "data_format": "channels_last"}}, {"class_name": "Dropout", "config": {"name": "dropout_2", "trainable": true, "rate": 0.5, "noise_shape": null, "seed": null}}, {"class_name": "Conv2D", "config": {"name": "conv2d_5", "trainable": true, "filters": 128, "kernel_size": [3, 3], "strides": [1, 1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Conv2D", "config": {"name": "conv2d_6", "trainable": true, "filters": 128, "kernel_size": [3, 3], "strides": [1, 1], "padding": "valid", "data_format": "channels_last", "dilation_rate": [1, 1], "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "MaxPooling2D", "config": {"name": "max_pooling2d_3", "trainable": true, "pool_size": [2, 2], "padding": "valid", "strides": [2, 2], "data_format": "channels_last"}}, {"class_name": "Flatten", "config": {"name": "flatten_1", "trainable": true, "data_format": "channels_last"}}, {"class_name": "Dense", "config": {"name": "dense_1", "trainable": true, "units": 1024, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Dropout", "config": {"name": "dropout_3", "trainable": true, "rate": 0.2, "noise_shape": null, "seed": null}}, {"class_name": "Dense", "config": {"name": "dense_2", "trainable": true, "units": 1024, "activation": "relu", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}, {"class_name": "Dropout", "config": {"name": "dropout_4", "trainable": true, "rate": 0.2, "noise_shape": null, "seed": null}}, {"class_name": "Dense", "config": {"name": "dense_3", "trainable": true, "units": 7, "activation": "softmax", "use_bias": true, "kernel_initializer": {"class_name": "VarianceScaling", "config": {"scale": 1.0, "mode": "fan_avg", "distribution": "uniform", "seed": null}}, "bias_initializer": {"class_name": "Zeros", "config": {}}, "kernel_regularizer": null, "bias_regularizer": null, "activity_regularizer": null, "kernel_constraint": null, "bias_constraint": null}}]}, "keras_version": "2.2.4", "backend": "tensorflow"} -------------------------------------------------------------------------------- /fer2013.bib: -------------------------------------------------------------------------------- 1 | 2 | @MISC{Goodfeli-et-al-2013, 3 | author = {Goodfellow, Ian and Erhan, Dumitru and Carrier, Pierre-Luc and Courville, Aaron and Mirza, Mehdi and Hamner, Ben and Cukierski, Will and Tang, Yichuan and Thaler, David and Lee, Dong-Hyun and Zhou, Yingbo and Ramaiah, Chetan and Feng, Fangxiang and Li, Ruifan and Wang, Xiaojie and Athanasakis, Dimitris and Shawe-Taylor, John and Milakov, Maxim and Park, John and Ionescu, Radu and Popescu, Marius and Grozea, Cristian and Bergstra, James and Xie, Jingjing and Romaszko, Lukasz and Xu, Bing and Chuang, Zhang and Bengio, Yoshua}, 4 | keywords = {competition, dataset, representation learning}, 5 | title = {Challenges in Representation Learning: A report on three machine learning contests}, 6 | year = {2013}, 7 | institution = {Unicer}, 8 | url = {http://arxiv.org/abs/1307.0414}, 9 | abstract = {The ICML 2013 Workshop on Challenges in Representation 10 | Learning focused on three challenges: the black box learning challenge, 11 | the facial expression recognition challenge, and the multimodal learn- 12 | ing challenge. We describe the datasets created for these challenges and 13 | summarize the results of the competitions. We provide suggestions for or- 14 | ganizers of future challenges and some comments on what kind of knowl- 15 | edge can be gained from machine learning competitions. 16 | 17 | http://deeplearning.net/icml2013-workshop-competition} 18 | } 19 | 20 | -------------------------------------------------------------------------------- /train.py: -------------------------------------------------------------------------------- 1 | import sys, os 2 | import pandas as pd 3 | import numpy as np 4 | 5 | from keras.models import Sequential 6 | from keras.layers import Dense, Dropout, Activation, Flatten 7 | from keras.layers import Conv2D, MaxPooling2D, BatchNormalization,AveragePooling2D 8 | from keras.losses import categorical_crossentropy 9 | from keras.optimizers import Adam 10 | from keras.regularizers import l2 11 | from keras.utils import np_utils 12 | # pd.set_option('display.max_rows', 500) 13 | # pd.set_option('display.max_columns', 500) 14 | # pd.set_option('display.width', 1000) 15 | 16 | df=pd.read_csv('fer2013.csv') 17 | 18 | # print(df.info()) 19 | # print(df["Usage"].value_counts()) 20 | 21 | # print(df.head()) 22 | X_train,train_y,X_test,test_y=[],[],[],[] 23 | 24 | for index, row in df.iterrows(): 25 | val=row['pixels'].split(" ") 26 | try: 27 | if 'Training' in row['Usage']: 28 | X_train.append(np.array(val,'float32')) 29 | train_y.append(row['emotion']) 30 | elif 'PublicTest' in row['Usage']: 31 | X_test.append(np.array(val,'float32')) 32 | test_y.append(row['emotion']) 33 | except: 34 | print(f"error occured at index :{index} and row:{row}") 35 | 36 | 37 | num_features = 64 38 | num_labels = 7 39 | batch_size = 64 40 | epochs = 30 41 | width, height = 48, 48 42 | 43 | 44 | X_train = np.array(X_train,'float32') 45 | train_y = np.array(train_y,'float32') 46 | X_test = np.array(X_test,'float32') 47 | test_y = np.array(test_y,'float32') 48 | 49 | train_y=np_utils.to_categorical(train_y, num_classes=num_labels) 50 | test_y=np_utils.to_categorical(test_y, num_classes=num_labels) 51 | 52 | #cannot produce 53 | #normalizing data between oand 1 54 | X_train -= np.mean(X_train, axis=0) 55 | X_train /= np.std(X_train, axis=0) 56 | 57 | X_test -= np.mean(X_test, axis=0) 58 | X_test /= np.std(X_test, axis=0) 59 | 60 | X_train = X_train.reshape(X_train.shape[0], 48, 48, 1) 61 | 62 | X_test = X_test.reshape(X_test.shape[0], 48, 48, 1) 63 | 64 | # print(f"shape:{X_train.shape}") 65 | ##designing the cnn 66 | #1st convolution layer 67 | model = Sequential() 68 | 69 | model.add(Conv2D(64, kernel_size=(3, 3), activation='relu', input_shape=(X_train.shape[1:]))) 70 | model.add(Conv2D(64,kernel_size= (3, 3), activation='relu')) 71 | # model.add(BatchNormalization()) 72 | model.add(MaxPooling2D(pool_size=(2,2), strides=(2, 2))) 73 | model.add(Dropout(0.5)) 74 | 75 | #2nd convolution layer 76 | model.add(Conv2D(64, (3, 3), activation='relu')) 77 | model.add(Conv2D(64, (3, 3), activation='relu')) 78 | # model.add(BatchNormalization()) 79 | model.add(MaxPooling2D(pool_size=(2,2), strides=(2, 2))) 80 | model.add(Dropout(0.5)) 81 | 82 | #3rd convolution layer 83 | model.add(Conv2D(128, (3, 3), activation='relu')) 84 | model.add(Conv2D(128, (3, 3), activation='relu')) 85 | # model.add(BatchNormalization()) 86 | model.add(MaxPooling2D(pool_size=(2,2), strides=(2, 2))) 87 | 88 | model.add(Flatten()) 89 | 90 | #fully connected neural networks 91 | model.add(Dense(1024, activation='relu')) 92 | model.add(Dropout(0.2)) 93 | model.add(Dense(1024, activation='relu')) 94 | model.add(Dropout(0.2)) 95 | 96 | model.add(Dense(num_labels, activation='softmax')) 97 | 98 | # model.summary() 99 | 100 | #Compliling the model 101 | model.compile(loss=categorical_crossentropy, 102 | optimizer=Adam(), 103 | metrics=['accuracy']) 104 | 105 | #Training the model 106 | model.fit(X_train, train_y, 107 | batch_size=batch_size, 108 | epochs=epochs, 109 | verbose=1, 110 | validation_data=(X_test, test_y), 111 | shuffle=True) 112 | 113 | 114 | #Saving the model to use it later on 115 | fer_json = model.to_json() 116 | with open("fer.json", "w") as json_file: 117 | json_file.write(fer_json) 118 | model.save_weights("fer.h5") 119 | 120 | 121 | -------------------------------------------------------------------------------- /videoTester.py: -------------------------------------------------------------------------------- 1 | import os 2 | import cv2 3 | import numpy as np 4 | from keras.models import model_from_json 5 | from keras.preprocessing import image 6 | 7 | #load model 8 | model = model_from_json(open("fer.json", "r").read()) 9 | #load weights 10 | model.load_weights('fer.h5') 11 | 12 | 13 | face_haar_cascade = cv2.CascadeClassifier('haarcascade_frontalface_default.xml') 14 | 15 | 16 | cap=cv2.VideoCapture(0) 17 | 18 | while True: 19 | ret,test_img=cap.read()# captures frame and returns boolean value and captured image 20 | if not ret: 21 | continue 22 | gray_img= cv2.cvtColor(test_img, cv2.COLOR_BGR2GRAY) 23 | 24 | faces_detected = face_haar_cascade.detectMultiScale(gray_img, 1.32, 5) 25 | 26 | 27 | for (x,y,w,h) in faces_detected: 28 | cv2.rectangle(test_img,(x,y),(x+w,y+h),(255,0,0),thickness=7) 29 | roi_gray=gray_img[y:y+w,x:x+h]#cropping region of interest i.e. face area from image 30 | roi_gray=cv2.resize(roi_gray,(48,48)) 31 | img_pixels = image.img_to_array(roi_gray) 32 | img_pixels = np.expand_dims(img_pixels, axis = 0) 33 | img_pixels /= 255 34 | 35 | predictions = model.predict(img_pixels) 36 | 37 | #find max indexed array 38 | max_index = np.argmax(predictions[0]) 39 | 40 | emotions = ('angry', 'disgust', 'fear', 'happy', 'sad', 'surprise', 'neutral') 41 | predicted_emotion = emotions[max_index] 42 | 43 | cv2.putText(test_img, predicted_emotion, (int(x), int(y)), cv2.FONT_HERSHEY_SIMPLEX, 1, (0,0,255), 2) 44 | 45 | resized_img = cv2.resize(test_img, (1000, 700)) 46 | cv2.imshow('Facial emotion analysis ',resized_img) 47 | 48 | 49 | 50 | if cv2.waitKey(10) == ord('q'):#wait until 'q' key is pressed 51 | break 52 | 53 | cap.release() 54 | cv2.destroyAllWindows --------------------------------------------------------------------------------