├── README.md ├── AR_LSTM.ipynb ├── models └── Models.ipynb └── AR.ipynb /README.md: -------------------------------------------------------------------------------- 1 | # Action_Recognition 2 | -------------------------------------------------------------------------------- /AR_LSTM.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "nbformat": 4, 3 | "nbformat_minor": 0, 4 | "metadata": { 5 | "colab": { 6 | "name": "AR_LSTM.ipynb", 7 | "provenance": [], 8 | "include_colab_link": true 9 | }, 10 | "kernelspec": { 11 | "name": "python3", 12 | "display_name": "Python 3" 13 | }, 14 | "accelerator": "GPU" 15 | }, 16 | "cells": [ 17 | { 18 | "cell_type": "markdown", 19 | "metadata": { 20 | "id": "view-in-github", 21 | "colab_type": "text" 22 | }, 23 | "source": [ 24 | "\"Open" 25 | ] 26 | }, 27 | { 28 | "cell_type": "code", 29 | "metadata": { 30 | "id": "NwA2iKQjRvAZ", 31 | "colab_type": "code", 32 | "outputId": "c9092e01-5259-4001-ebe2-cc6e0bd56120", 33 | "colab": { 34 | "base_uri": "https://localhost:8080/", 35 | "height": 1000 36 | } 37 | }, 38 | "source": [ 39 | "from keras.models import Sequential\n", 40 | "from keras.layers import Dropout, Flatten, Dense\n", 41 | "from keras import applications\n", 42 | "from keras.optimizers import SGD\n", 43 | "from sklearn.utils import shuffle\n", 44 | "from keras.preprocessing.image import ImageDataGenerator\n", 45 | "from keras.callbacks import EarlyStopping, ModelCheckpoint\n", 46 | "from keras.applications.vgg16 import VGG16\n", 47 | "from keras.layers import LSTM\n", 48 | "import numpy as np\n", 49 | "import glob,os\n", 50 | "#from scipy.misc import imread,imresize\n", 51 | "from keras.applications import resnet50\n", 52 | "from keras.layers import LSTM\n", 53 | "\n", 54 | "batch_size = 128\n", 55 | "\n", 56 | "resnet_model = resnet50.ResNet50(weights='imagenet')\n", 57 | "from keras.layers import Conv2D, Dense, Dropout, TimeDistributed\n", 58 | "from keras.models import Model\n", 59 | "resnet_model.layers.pop()\n", 60 | "resnet_model.layers.pop()\n", 61 | "resnet_model.summary()" 62 | ], 63 | "execution_count": 5, 64 | "outputs": [ 65 | { 66 | "output_type": "stream", 67 | "text": [ 68 | "Model: \"resnet50\"\n", 69 | "__________________________________________________________________________________________________\n", 70 | "Layer (type) Output Shape Param # Connected to \n", 71 | "==================================================================================================\n", 72 | "input_5 (InputLayer) (None, 224, 224, 3) 0 \n", 73 | "__________________________________________________________________________________________________\n", 74 | "conv1_pad (ZeroPadding2D) (None, 230, 230, 3) 0 input_5[0][0] \n", 75 | "__________________________________________________________________________________________________\n", 76 | "conv1 (Conv2D) (None, 112, 112, 64) 9472 conv1_pad[0][0] \n", 77 | "__________________________________________________________________________________________________\n", 78 | "bn_conv1 (BatchNormalization) (None, 112, 112, 64) 256 conv1[0][0] \n", 79 | "__________________________________________________________________________________________________\n", 80 | "activation_197 (Activation) (None, 112, 112, 64) 0 bn_conv1[0][0] \n", 81 | "__________________________________________________________________________________________________\n", 82 | "pool1_pad (ZeroPadding2D) (None, 114, 114, 64) 0 activation_197[0][0] \n", 83 | "__________________________________________________________________________________________________\n", 84 | "max_pooling2d_5 (MaxPooling2D) (None, 56, 56, 64) 0 pool1_pad[0][0] \n", 85 | "__________________________________________________________________________________________________\n", 86 | "res2a_branch2a (Conv2D) (None, 56, 56, 64) 4160 max_pooling2d_5[0][0] \n", 87 | "__________________________________________________________________________________________________\n", 88 | "bn2a_branch2a (BatchNormalizati (None, 56, 56, 64) 256 res2a_branch2a[0][0] \n", 89 | "__________________________________________________________________________________________________\n", 90 | "activation_198 (Activation) (None, 56, 56, 64) 0 bn2a_branch2a[0][0] \n", 91 | "__________________________________________________________________________________________________\n", 92 | "res2a_branch2b (Conv2D) (None, 56, 56, 64) 36928 activation_198[0][0] \n", 93 | "__________________________________________________________________________________________________\n", 94 | "bn2a_branch2b (BatchNormalizati (None, 56, 56, 64) 256 res2a_branch2b[0][0] \n", 95 | "__________________________________________________________________________________________________\n", 96 | "activation_199 (Activation) (None, 56, 56, 64) 0 bn2a_branch2b[0][0] \n", 97 | "__________________________________________________________________________________________________\n", 98 | "res2a_branch2c (Conv2D) (None, 56, 56, 256) 16640 activation_199[0][0] \n", 99 | "__________________________________________________________________________________________________\n", 100 | "res2a_branch1 (Conv2D) (None, 56, 56, 256) 16640 max_pooling2d_5[0][0] \n", 101 | "__________________________________________________________________________________________________\n", 102 | "bn2a_branch2c (BatchNormalizati (None, 56, 56, 256) 1024 res2a_branch2c[0][0] \n", 103 | "__________________________________________________________________________________________________\n", 104 | "bn2a_branch1 (BatchNormalizatio (None, 56, 56, 256) 1024 res2a_branch1[0][0] \n", 105 | "__________________________________________________________________________________________________\n", 106 | "add_65 (Add) (None, 56, 56, 256) 0 bn2a_branch2c[0][0] \n", 107 | " bn2a_branch1[0][0] \n", 108 | "__________________________________________________________________________________________________\n", 109 | "activation_200 (Activation) (None, 56, 56, 256) 0 add_65[0][0] \n", 110 | "__________________________________________________________________________________________________\n", 111 | "res2b_branch2a (Conv2D) (None, 56, 56, 64) 16448 activation_200[0][0] \n", 112 | "__________________________________________________________________________________________________\n", 113 | "bn2b_branch2a (BatchNormalizati (None, 56, 56, 64) 256 res2b_branch2a[0][0] \n", 114 | "__________________________________________________________________________________________________\n", 115 | "activation_201 (Activation) (None, 56, 56, 64) 0 bn2b_branch2a[0][0] \n", 116 | "__________________________________________________________________________________________________\n", 117 | "res2b_branch2b (Conv2D) (None, 56, 56, 64) 36928 activation_201[0][0] \n", 118 | "__________________________________________________________________________________________________\n", 119 | "bn2b_branch2b (BatchNormalizati (None, 56, 56, 64) 256 res2b_branch2b[0][0] \n", 120 | "__________________________________________________________________________________________________\n", 121 | "activation_202 (Activation) (None, 56, 56, 64) 0 bn2b_branch2b[0][0] \n", 122 | "__________________________________________________________________________________________________\n", 123 | "res2b_branch2c (Conv2D) (None, 56, 56, 256) 16640 activation_202[0][0] \n", 124 | "__________________________________________________________________________________________________\n", 125 | "bn2b_branch2c (BatchNormalizati (None, 56, 56, 256) 1024 res2b_branch2c[0][0] \n", 126 | "__________________________________________________________________________________________________\n", 127 | "add_66 (Add) (None, 56, 56, 256) 0 bn2b_branch2c[0][0] \n", 128 | " activation_200[0][0] \n", 129 | "__________________________________________________________________________________________________\n", 130 | "activation_203 (Activation) (None, 56, 56, 256) 0 add_66[0][0] \n", 131 | "__________________________________________________________________________________________________\n", 132 | "res2c_branch2a (Conv2D) (None, 56, 56, 64) 16448 activation_203[0][0] \n", 133 | "__________________________________________________________________________________________________\n", 134 | "bn2c_branch2a (BatchNormalizati (None, 56, 56, 64) 256 res2c_branch2a[0][0] \n", 135 | "__________________________________________________________________________________________________\n", 136 | "activation_204 (Activation) (None, 56, 56, 64) 0 bn2c_branch2a[0][0] \n", 137 | "__________________________________________________________________________________________________\n", 138 | "res2c_branch2b (Conv2D) (None, 56, 56, 64) 36928 activation_204[0][0] \n", 139 | "__________________________________________________________________________________________________\n", 140 | "bn2c_branch2b (BatchNormalizati (None, 56, 56, 64) 256 res2c_branch2b[0][0] \n", 141 | "__________________________________________________________________________________________________\n", 142 | "activation_205 (Activation) (None, 56, 56, 64) 0 bn2c_branch2b[0][0] \n", 143 | "__________________________________________________________________________________________________\n", 144 | "res2c_branch2c (Conv2D) (None, 56, 56, 256) 16640 activation_205[0][0] \n", 145 | "__________________________________________________________________________________________________\n", 146 | "bn2c_branch2c (BatchNormalizati (None, 56, 56, 256) 1024 res2c_branch2c[0][0] \n", 147 | "__________________________________________________________________________________________________\n", 148 | "add_67 (Add) (None, 56, 56, 256) 0 bn2c_branch2c[0][0] \n", 149 | " activation_203[0][0] \n", 150 | "__________________________________________________________________________________________________\n", 151 | "activation_206 (Activation) (None, 56, 56, 256) 0 add_67[0][0] \n", 152 | "__________________________________________________________________________________________________\n", 153 | "res3a_branch2a (Conv2D) (None, 28, 28, 128) 32896 activation_206[0][0] \n", 154 | "__________________________________________________________________________________________________\n", 155 | "bn3a_branch2a (BatchNormalizati (None, 28, 28, 128) 512 res3a_branch2a[0][0] \n", 156 | "__________________________________________________________________________________________________\n", 157 | "activation_207 (Activation) (None, 28, 28, 128) 0 bn3a_branch2a[0][0] \n", 158 | "__________________________________________________________________________________________________\n", 159 | "res3a_branch2b (Conv2D) (None, 28, 28, 128) 147584 activation_207[0][0] \n", 160 | "__________________________________________________________________________________________________\n", 161 | "bn3a_branch2b (BatchNormalizati (None, 28, 28, 128) 512 res3a_branch2b[0][0] \n", 162 | "__________________________________________________________________________________________________\n", 163 | "activation_208 (Activation) (None, 28, 28, 128) 0 bn3a_branch2b[0][0] \n", 164 | "__________________________________________________________________________________________________\n", 165 | "res3a_branch2c (Conv2D) (None, 28, 28, 512) 66048 activation_208[0][0] \n", 166 | "__________________________________________________________________________________________________\n", 167 | "res3a_branch1 (Conv2D) (None, 28, 28, 512) 131584 activation_206[0][0] \n", 168 | "__________________________________________________________________________________________________\n", 169 | "bn3a_branch2c (BatchNormalizati (None, 28, 28, 512) 2048 res3a_branch2c[0][0] \n", 170 | "__________________________________________________________________________________________________\n", 171 | "bn3a_branch1 (BatchNormalizatio (None, 28, 28, 512) 2048 res3a_branch1[0][0] \n", 172 | "__________________________________________________________________________________________________\n", 173 | "add_68 (Add) (None, 28, 28, 512) 0 bn3a_branch2c[0][0] \n", 174 | " bn3a_branch1[0][0] \n", 175 | "__________________________________________________________________________________________________\n", 176 | "activation_209 (Activation) (None, 28, 28, 512) 0 add_68[0][0] \n", 177 | "__________________________________________________________________________________________________\n", 178 | "res3b_branch2a (Conv2D) (None, 28, 28, 128) 65664 activation_209[0][0] \n", 179 | "__________________________________________________________________________________________________\n", 180 | "bn3b_branch2a (BatchNormalizati (None, 28, 28, 128) 512 res3b_branch2a[0][0] \n", 181 | "__________________________________________________________________________________________________\n", 182 | "activation_210 (Activation) (None, 28, 28, 128) 0 bn3b_branch2a[0][0] \n", 183 | "__________________________________________________________________________________________________\n", 184 | "res3b_branch2b (Conv2D) (None, 28, 28, 128) 147584 activation_210[0][0] \n", 185 | "__________________________________________________________________________________________________\n", 186 | "bn3b_branch2b (BatchNormalizati (None, 28, 28, 128) 512 res3b_branch2b[0][0] \n", 187 | "__________________________________________________________________________________________________\n", 188 | "activation_211 (Activation) (None, 28, 28, 128) 0 bn3b_branch2b[0][0] \n", 189 | "__________________________________________________________________________________________________\n", 190 | "res3b_branch2c (Conv2D) (None, 28, 28, 512) 66048 activation_211[0][0] \n", 191 | "__________________________________________________________________________________________________\n", 192 | "bn3b_branch2c (BatchNormalizati (None, 28, 28, 512) 2048 res3b_branch2c[0][0] \n", 193 | "__________________________________________________________________________________________________\n", 194 | "add_69 (Add) (None, 28, 28, 512) 0 bn3b_branch2c[0][0] \n", 195 | " activation_209[0][0] \n", 196 | "__________________________________________________________________________________________________\n", 197 | "activation_212 (Activation) (None, 28, 28, 512) 0 add_69[0][0] \n", 198 | "__________________________________________________________________________________________________\n", 199 | "res3c_branch2a (Conv2D) (None, 28, 28, 128) 65664 activation_212[0][0] \n", 200 | "__________________________________________________________________________________________________\n", 201 | "bn3c_branch2a (BatchNormalizati (None, 28, 28, 128) 512 res3c_branch2a[0][0] \n", 202 | "__________________________________________________________________________________________________\n", 203 | "activation_213 (Activation) (None, 28, 28, 128) 0 bn3c_branch2a[0][0] \n", 204 | "__________________________________________________________________________________________________\n", 205 | "res3c_branch2b (Conv2D) (None, 28, 28, 128) 147584 activation_213[0][0] \n", 206 | "__________________________________________________________________________________________________\n", 207 | "bn3c_branch2b (BatchNormalizati (None, 28, 28, 128) 512 res3c_branch2b[0][0] \n", 208 | "__________________________________________________________________________________________________\n", 209 | "activation_214 (Activation) (None, 28, 28, 128) 0 bn3c_branch2b[0][0] \n", 210 | "__________________________________________________________________________________________________\n", 211 | "res3c_branch2c (Conv2D) (None, 28, 28, 512) 66048 activation_214[0][0] \n", 212 | "__________________________________________________________________________________________________\n", 213 | "bn3c_branch2c (BatchNormalizati (None, 28, 28, 512) 2048 res3c_branch2c[0][0] \n", 214 | "__________________________________________________________________________________________________\n", 215 | "add_70 (Add) (None, 28, 28, 512) 0 bn3c_branch2c[0][0] \n", 216 | " activation_212[0][0] \n", 217 | "__________________________________________________________________________________________________\n", 218 | "activation_215 (Activation) (None, 28, 28, 512) 0 add_70[0][0] \n", 219 | "__________________________________________________________________________________________________\n", 220 | "res3d_branch2a (Conv2D) (None, 28, 28, 128) 65664 activation_215[0][0] \n", 221 | "__________________________________________________________________________________________________\n", 222 | "bn3d_branch2a (BatchNormalizati (None, 28, 28, 128) 512 res3d_branch2a[0][0] \n", 223 | "__________________________________________________________________________________________________\n", 224 | "activation_216 (Activation) (None, 28, 28, 128) 0 bn3d_branch2a[0][0] \n", 225 | "__________________________________________________________________________________________________\n", 226 | "res3d_branch2b (Conv2D) (None, 28, 28, 128) 147584 activation_216[0][0] \n", 227 | "__________________________________________________________________________________________________\n", 228 | "bn3d_branch2b (BatchNormalizati (None, 28, 28, 128) 512 res3d_branch2b[0][0] \n", 229 | "__________________________________________________________________________________________________\n", 230 | "activation_217 (Activation) (None, 28, 28, 128) 0 bn3d_branch2b[0][0] \n", 231 | "__________________________________________________________________________________________________\n", 232 | "res3d_branch2c (Conv2D) (None, 28, 28, 512) 66048 activation_217[0][0] \n", 233 | "__________________________________________________________________________________________________\n", 234 | "bn3d_branch2c (BatchNormalizati (None, 28, 28, 512) 2048 res3d_branch2c[0][0] \n", 235 | "__________________________________________________________________________________________________\n", 236 | "add_71 (Add) (None, 28, 28, 512) 0 bn3d_branch2c[0][0] \n", 237 | " activation_215[0][0] \n", 238 | "__________________________________________________________________________________________________\n", 239 | "activation_218 (Activation) (None, 28, 28, 512) 0 add_71[0][0] \n", 240 | "__________________________________________________________________________________________________\n", 241 | "res4a_branch2a (Conv2D) (None, 14, 14, 256) 131328 activation_218[0][0] \n", 242 | "__________________________________________________________________________________________________\n", 243 | "bn4a_branch2a (BatchNormalizati (None, 14, 14, 256) 1024 res4a_branch2a[0][0] \n", 244 | "__________________________________________________________________________________________________\n", 245 | "activation_219 (Activation) (None, 14, 14, 256) 0 bn4a_branch2a[0][0] \n", 246 | "__________________________________________________________________________________________________\n", 247 | "res4a_branch2b (Conv2D) (None, 14, 14, 256) 590080 activation_219[0][0] \n", 248 | "__________________________________________________________________________________________________\n", 249 | "bn4a_branch2b (BatchNormalizati (None, 14, 14, 256) 1024 res4a_branch2b[0][0] \n", 250 | "__________________________________________________________________________________________________\n", 251 | "activation_220 (Activation) (None, 14, 14, 256) 0 bn4a_branch2b[0][0] \n", 252 | "__________________________________________________________________________________________________\n", 253 | "res4a_branch2c (Conv2D) (None, 14, 14, 1024) 263168 activation_220[0][0] \n", 254 | "__________________________________________________________________________________________________\n", 255 | "res4a_branch1 (Conv2D) (None, 14, 14, 1024) 525312 activation_218[0][0] \n", 256 | "__________________________________________________________________________________________________\n", 257 | "bn4a_branch2c (BatchNormalizati (None, 14, 14, 1024) 4096 res4a_branch2c[0][0] \n", 258 | "__________________________________________________________________________________________________\n", 259 | "bn4a_branch1 (BatchNormalizatio (None, 14, 14, 1024) 4096 res4a_branch1[0][0] \n", 260 | "__________________________________________________________________________________________________\n", 261 | "add_72 (Add) (None, 14, 14, 1024) 0 bn4a_branch2c[0][0] \n", 262 | " bn4a_branch1[0][0] \n", 263 | "__________________________________________________________________________________________________\n", 264 | "activation_221 (Activation) (None, 14, 14, 1024) 0 add_72[0][0] \n", 265 | "__________________________________________________________________________________________________\n", 266 | "res4b_branch2a (Conv2D) (None, 14, 14, 256) 262400 activation_221[0][0] \n", 267 | "__________________________________________________________________________________________________\n", 268 | "bn4b_branch2a (BatchNormalizati (None, 14, 14, 256) 1024 res4b_branch2a[0][0] \n", 269 | "__________________________________________________________________________________________________\n", 270 | "activation_222 (Activation) (None, 14, 14, 256) 0 bn4b_branch2a[0][0] \n", 271 | "__________________________________________________________________________________________________\n", 272 | "res4b_branch2b (Conv2D) (None, 14, 14, 256) 590080 activation_222[0][0] \n", 273 | "__________________________________________________________________________________________________\n", 274 | "bn4b_branch2b (BatchNormalizati (None, 14, 14, 256) 1024 res4b_branch2b[0][0] \n", 275 | "__________________________________________________________________________________________________\n", 276 | "activation_223 (Activation) (None, 14, 14, 256) 0 bn4b_branch2b[0][0] \n", 277 | "__________________________________________________________________________________________________\n", 278 | "res4b_branch2c (Conv2D) (None, 14, 14, 1024) 263168 activation_223[0][0] \n", 279 | "__________________________________________________________________________________________________\n", 280 | "bn4b_branch2c (BatchNormalizati (None, 14, 14, 1024) 4096 res4b_branch2c[0][0] \n", 281 | "__________________________________________________________________________________________________\n", 282 | "add_73 (Add) (None, 14, 14, 1024) 0 bn4b_branch2c[0][0] \n", 283 | " activation_221[0][0] \n", 284 | "__________________________________________________________________________________________________\n", 285 | "activation_224 (Activation) (None, 14, 14, 1024) 0 add_73[0][0] \n", 286 | "__________________________________________________________________________________________________\n", 287 | "res4c_branch2a (Conv2D) (None, 14, 14, 256) 262400 activation_224[0][0] \n", 288 | "__________________________________________________________________________________________________\n", 289 | "bn4c_branch2a (BatchNormalizati (None, 14, 14, 256) 1024 res4c_branch2a[0][0] \n", 290 | "__________________________________________________________________________________________________\n", 291 | "activation_225 (Activation) (None, 14, 14, 256) 0 bn4c_branch2a[0][0] \n", 292 | "__________________________________________________________________________________________________\n", 293 | "res4c_branch2b (Conv2D) (None, 14, 14, 256) 590080 activation_225[0][0] \n", 294 | "__________________________________________________________________________________________________\n", 295 | "bn4c_branch2b (BatchNormalizati (None, 14, 14, 256) 1024 res4c_branch2b[0][0] \n", 296 | "__________________________________________________________________________________________________\n", 297 | "activation_226 (Activation) (None, 14, 14, 256) 0 bn4c_branch2b[0][0] \n", 298 | "__________________________________________________________________________________________________\n", 299 | "res4c_branch2c (Conv2D) (None, 14, 14, 1024) 263168 activation_226[0][0] \n", 300 | "__________________________________________________________________________________________________\n", 301 | "bn4c_branch2c (BatchNormalizati (None, 14, 14, 1024) 4096 res4c_branch2c[0][0] \n", 302 | "__________________________________________________________________________________________________\n", 303 | "add_74 (Add) (None, 14, 14, 1024) 0 bn4c_branch2c[0][0] \n", 304 | " activation_224[0][0] \n", 305 | "__________________________________________________________________________________________________\n", 306 | "activation_227 (Activation) (None, 14, 14, 1024) 0 add_74[0][0] \n", 307 | "__________________________________________________________________________________________________\n", 308 | "res4d_branch2a (Conv2D) (None, 14, 14, 256) 262400 activation_227[0][0] \n", 309 | "__________________________________________________________________________________________________\n", 310 | "bn4d_branch2a (BatchNormalizati (None, 14, 14, 256) 1024 res4d_branch2a[0][0] \n", 311 | "__________________________________________________________________________________________________\n", 312 | "activation_228 (Activation) (None, 14, 14, 256) 0 bn4d_branch2a[0][0] \n", 313 | "__________________________________________________________________________________________________\n", 314 | "res4d_branch2b (Conv2D) (None, 14, 14, 256) 590080 activation_228[0][0] \n", 315 | "__________________________________________________________________________________________________\n", 316 | "bn4d_branch2b (BatchNormalizati (None, 14, 14, 256) 1024 res4d_branch2b[0][0] \n", 317 | "__________________________________________________________________________________________________\n", 318 | "activation_229 (Activation) (None, 14, 14, 256) 0 bn4d_branch2b[0][0] \n", 319 | "__________________________________________________________________________________________________\n", 320 | "res4d_branch2c (Conv2D) (None, 14, 14, 1024) 263168 activation_229[0][0] \n", 321 | "__________________________________________________________________________________________________\n", 322 | "bn4d_branch2c (BatchNormalizati (None, 14, 14, 1024) 4096 res4d_branch2c[0][0] \n", 323 | "__________________________________________________________________________________________________\n", 324 | "add_75 (Add) (None, 14, 14, 1024) 0 bn4d_branch2c[0][0] \n", 325 | " activation_227[0][0] \n", 326 | "__________________________________________________________________________________________________\n", 327 | "activation_230 (Activation) (None, 14, 14, 1024) 0 add_75[0][0] \n", 328 | "__________________________________________________________________________________________________\n", 329 | "res4e_branch2a (Conv2D) (None, 14, 14, 256) 262400 activation_230[0][0] \n", 330 | "__________________________________________________________________________________________________\n", 331 | "bn4e_branch2a (BatchNormalizati (None, 14, 14, 256) 1024 res4e_branch2a[0][0] \n", 332 | "__________________________________________________________________________________________________\n", 333 | "activation_231 (Activation) (None, 14, 14, 256) 0 bn4e_branch2a[0][0] \n", 334 | "__________________________________________________________________________________________________\n", 335 | "res4e_branch2b (Conv2D) (None, 14, 14, 256) 590080 activation_231[0][0] \n", 336 | "__________________________________________________________________________________________________\n", 337 | "bn4e_branch2b (BatchNormalizati (None, 14, 14, 256) 1024 res4e_branch2b[0][0] \n", 338 | "__________________________________________________________________________________________________\n", 339 | "activation_232 (Activation) (None, 14, 14, 256) 0 bn4e_branch2b[0][0] \n", 340 | "__________________________________________________________________________________________________\n", 341 | "res4e_branch2c (Conv2D) (None, 14, 14, 1024) 263168 activation_232[0][0] \n", 342 | "__________________________________________________________________________________________________\n", 343 | "bn4e_branch2c (BatchNormalizati (None, 14, 14, 1024) 4096 res4e_branch2c[0][0] \n", 344 | "__________________________________________________________________________________________________\n", 345 | "add_76 (Add) (None, 14, 14, 1024) 0 bn4e_branch2c[0][0] \n", 346 | " activation_230[0][0] \n", 347 | "__________________________________________________________________________________________________\n", 348 | "activation_233 (Activation) (None, 14, 14, 1024) 0 add_76[0][0] \n", 349 | "__________________________________________________________________________________________________\n", 350 | "res4f_branch2a (Conv2D) (None, 14, 14, 256) 262400 activation_233[0][0] \n", 351 | "__________________________________________________________________________________________________\n", 352 | "bn4f_branch2a (BatchNormalizati (None, 14, 14, 256) 1024 res4f_branch2a[0][0] \n", 353 | "__________________________________________________________________________________________________\n", 354 | "activation_234 (Activation) (None, 14, 14, 256) 0 bn4f_branch2a[0][0] \n", 355 | "__________________________________________________________________________________________________\n", 356 | "res4f_branch2b (Conv2D) (None, 14, 14, 256) 590080 activation_234[0][0] \n", 357 | "__________________________________________________________________________________________________\n", 358 | "bn4f_branch2b (BatchNormalizati (None, 14, 14, 256) 1024 res4f_branch2b[0][0] \n", 359 | "__________________________________________________________________________________________________\n", 360 | "activation_235 (Activation) (None, 14, 14, 256) 0 bn4f_branch2b[0][0] \n", 361 | "__________________________________________________________________________________________________\n", 362 | "res4f_branch2c (Conv2D) (None, 14, 14, 1024) 263168 activation_235[0][0] \n", 363 | "__________________________________________________________________________________________________\n", 364 | "bn4f_branch2c (BatchNormalizati (None, 14, 14, 1024) 4096 res4f_branch2c[0][0] \n", 365 | "__________________________________________________________________________________________________\n", 366 | "add_77 (Add) (None, 14, 14, 1024) 0 bn4f_branch2c[0][0] \n", 367 | " activation_233[0][0] \n", 368 | "__________________________________________________________________________________________________\n", 369 | "activation_236 (Activation) (None, 14, 14, 1024) 0 add_77[0][0] \n", 370 | "__________________________________________________________________________________________________\n", 371 | "res5a_branch2a (Conv2D) (None, 7, 7, 512) 524800 activation_236[0][0] \n", 372 | "__________________________________________________________________________________________________\n", 373 | "bn5a_branch2a (BatchNormalizati (None, 7, 7, 512) 2048 res5a_branch2a[0][0] \n", 374 | "__________________________________________________________________________________________________\n", 375 | "activation_237 (Activation) (None, 7, 7, 512) 0 bn5a_branch2a[0][0] \n", 376 | "__________________________________________________________________________________________________\n", 377 | "res5a_branch2b (Conv2D) (None, 7, 7, 512) 2359808 activation_237[0][0] \n", 378 | "__________________________________________________________________________________________________\n", 379 | "bn5a_branch2b (BatchNormalizati (None, 7, 7, 512) 2048 res5a_branch2b[0][0] \n", 380 | "__________________________________________________________________________________________________\n", 381 | "activation_238 (Activation) (None, 7, 7, 512) 0 bn5a_branch2b[0][0] \n", 382 | "__________________________________________________________________________________________________\n", 383 | "res5a_branch2c (Conv2D) (None, 7, 7, 2048) 1050624 activation_238[0][0] \n", 384 | "__________________________________________________________________________________________________\n", 385 | "res5a_branch1 (Conv2D) (None, 7, 7, 2048) 2099200 activation_236[0][0] \n", 386 | "__________________________________________________________________________________________________\n", 387 | "bn5a_branch2c (BatchNormalizati (None, 7, 7, 2048) 8192 res5a_branch2c[0][0] \n", 388 | "__________________________________________________________________________________________________\n", 389 | "bn5a_branch1 (BatchNormalizatio (None, 7, 7, 2048) 8192 res5a_branch1[0][0] \n", 390 | "__________________________________________________________________________________________________\n", 391 | "add_78 (Add) (None, 7, 7, 2048) 0 bn5a_branch2c[0][0] \n", 392 | " bn5a_branch1[0][0] \n", 393 | "__________________________________________________________________________________________________\n", 394 | "activation_239 (Activation) (None, 7, 7, 2048) 0 add_78[0][0] \n", 395 | "__________________________________________________________________________________________________\n", 396 | "res5b_branch2a (Conv2D) (None, 7, 7, 512) 1049088 activation_239[0][0] \n", 397 | "__________________________________________________________________________________________________\n", 398 | "bn5b_branch2a (BatchNormalizati (None, 7, 7, 512) 2048 res5b_branch2a[0][0] \n", 399 | "__________________________________________________________________________________________________\n", 400 | "activation_240 (Activation) (None, 7, 7, 512) 0 bn5b_branch2a[0][0] \n", 401 | "__________________________________________________________________________________________________\n", 402 | "res5b_branch2b (Conv2D) (None, 7, 7, 512) 2359808 activation_240[0][0] \n", 403 | "__________________________________________________________________________________________________\n", 404 | "bn5b_branch2b (BatchNormalizati (None, 7, 7, 512) 2048 res5b_branch2b[0][0] \n", 405 | "__________________________________________________________________________________________________\n", 406 | "activation_241 (Activation) (None, 7, 7, 512) 0 bn5b_branch2b[0][0] \n", 407 | "__________________________________________________________________________________________________\n", 408 | "res5b_branch2c (Conv2D) (None, 7, 7, 2048) 1050624 activation_241[0][0] \n", 409 | "__________________________________________________________________________________________________\n", 410 | "bn5b_branch2c (BatchNormalizati (None, 7, 7, 2048) 8192 res5b_branch2c[0][0] \n", 411 | "__________________________________________________________________________________________________\n", 412 | "add_79 (Add) (None, 7, 7, 2048) 0 bn5b_branch2c[0][0] \n", 413 | " activation_239[0][0] \n", 414 | "__________________________________________________________________________________________________\n", 415 | "activation_242 (Activation) (None, 7, 7, 2048) 0 add_79[0][0] \n", 416 | "__________________________________________________________________________________________________\n", 417 | "res5c_branch2a (Conv2D) (None, 7, 7, 512) 1049088 activation_242[0][0] \n", 418 | "__________________________________________________________________________________________________\n", 419 | "bn5c_branch2a (BatchNormalizati (None, 7, 7, 512) 2048 res5c_branch2a[0][0] \n", 420 | "__________________________________________________________________________________________________\n", 421 | "activation_243 (Activation) (None, 7, 7, 512) 0 bn5c_branch2a[0][0] \n", 422 | "__________________________________________________________________________________________________\n", 423 | "res5c_branch2b (Conv2D) (None, 7, 7, 512) 2359808 activation_243[0][0] \n", 424 | "__________________________________________________________________________________________________\n", 425 | "bn5c_branch2b (BatchNormalizati (None, 7, 7, 512) 2048 res5c_branch2b[0][0] \n", 426 | "__________________________________________________________________________________________________\n", 427 | "activation_244 (Activation) (None, 7, 7, 512) 0 bn5c_branch2b[0][0] \n", 428 | "__________________________________________________________________________________________________\n", 429 | "res5c_branch2c (Conv2D) (None, 7, 7, 2048) 1050624 activation_244[0][0] \n", 430 | "__________________________________________________________________________________________________\n", 431 | "bn5c_branch2c (BatchNormalizati (None, 7, 7, 2048) 8192 res5c_branch2c[0][0] \n", 432 | "__________________________________________________________________________________________________\n", 433 | "add_80 (Add) (None, 7, 7, 2048) 0 bn5c_branch2c[0][0] \n", 434 | " activation_242[0][0] \n", 435 | "__________________________________________________________________________________________________\n", 436 | "activation_245 (Activation) (None, 7, 7, 2048) 0 add_80[0][0] \n", 437 | "==================================================================================================\n", 438 | "Total params: 23,587,712\n", 439 | "Trainable params: 23,534,592\n", 440 | "Non-trainable params: 53,120\n", 441 | "__________________________________________________________________________________________________\n" 442 | ], 443 | "name": "stdout" 444 | } 445 | ] 446 | }, 447 | { 448 | "cell_type": "code", 449 | "metadata": { 450 | "id": "eDWnhtJz-HGa", 451 | "colab_type": "code", 452 | "colab": {} 453 | }, 454 | "source": [ 455 | "from keras.layers.recurrent import GRU\n", 456 | "from keras.layers import Reshape, Lambda\n", 457 | "from keras import backend as K\n", 458 | "from keras.layers.merge import add, concatenate\n", 459 | "from keras.layers import Reshape, Lambda, BatchNormalization\n", 460 | "\n", 461 | "inner = Reshape(target_shape=((7, 14336)), name='reshape')(resnet_model.layers[-1].output) # (None, 32, 2048)\n", 462 | "inner = Dense(64, activation='relu', kernel_initializer='he_normal', name='dense1')(inner)\n", 463 | "gru_1 = GRU(256, return_sequences=True, kernel_initializer='he_normal', name='gru1')(inner) # (None, 32, 512)\n", 464 | "gru_1b = GRU(256, return_sequences=True, go_backwards=True, kernel_initializer='he_normal', name='gru1_b')(inner)\n", 465 | "reversed_gru_1b = Lambda(lambda inputTensor: K.reverse(inputTensor, axes=1)) (gru_1b)\n", 466 | "\n", 467 | "gru1_merged = add([gru_1, reversed_gru_1b]) # (None, 32, 512)\n", 468 | "gru1_merged = BatchNormalization()(gru1_merged)\n", 469 | "\n", 470 | "gru_2 = GRU(256, return_sequences=True, kernel_initializer='he_normal', name='gru2')(gru1_merged)\n", 471 | "gru_2b = GRU(256, return_sequences=True, go_backwards=True, kernel_initializer='he_normal', name='gru2_b')(gru1_merged)\n", 472 | "reversed_gru_2b= Lambda(lambda inputTensor: K.reverse(inputTensor, axes=1)) (gru_2b)\n", 473 | "\n", 474 | "gru2_merged = concatenate([gru_2, reversed_gru_2b]) # (None, 32, 1024)\n", 475 | "gru2_merged = BatchNormalization()(gru2_merged)\n", 476 | "\n", 477 | "out = Dense(7, kernel_initializer='he_normal',name='dense2', activation=\"softmax\")(gru2_merged)" 478 | ], 479 | "execution_count": 0, 480 | "outputs": [] 481 | }, 482 | { 483 | "cell_type": "code", 484 | "metadata": { 485 | "id": "BFr1I4Yz-HB9", 486 | "colab_type": "code", 487 | "colab": { 488 | "base_uri": "https://localhost:8080/", 489 | "height": 71 490 | }, 491 | "outputId": "b23ba487-71bc-474c-9198-997d8d88cc4f" 492 | }, 493 | "source": [ 494 | "model = Model(input=resnet_model.layers[0].input, output=[out])" 495 | ], 496 | "execution_count": 33, 497 | "outputs": [ 498 | { 499 | "output_type": "stream", 500 | "text": [ 501 | "/usr/local/lib/python3.6/dist-packages/ipykernel_launcher.py:1: UserWarning: Update your `Model` call to the Keras 2 API: `Model(inputs=Tensor(\"in..., outputs=[\"Open" 27 | ] 28 | }, 29 | { 30 | "cell_type": "markdown", 31 | "source": [ 32 | "# Models\n", 33 | "\n", 34 | "In the previous lesson we have covered the basics of Langchain. Here is the link to the first lesson https://github.com/SamurAIGPT/langchain-course/blob/main/getting-started/Introduction.ipynb\n", 35 | "\n", 36 | "In this lesson we will talk about different kinds of models including LLMs, Chat Models and Embedding Models\n", 37 | "\n", 38 | "### What is a model ?\n", 39 | "\n", 40 | "A model is a program which is trained to complete a specific task. Since our course is about language tasks the models we will be using are language models. A model is trained on a huge corpus of data\n", 41 | "\n", 42 | "We are not going to cover the process of training as we are going to use already trained models. These pre-trained models are trained on large amounts of data and require a lot of compute to run and thus are called Large Language Models (LLM)" 43 | ], 44 | "metadata": { 45 | "id": "-dBjFwloRjWm" 46 | } 47 | }, 48 | { 49 | "cell_type": "markdown", 50 | "source": [ 51 | "# 1. LLM (Large Language Model)\n", 52 | "\n", 53 | "Now let's talk about LLM. LLM are trained to do language tasks like text generation. There are various LLM in the market but we are going to cover only OpenAI LLM since they are the most popular. OpenAI has 4 such models Davinci, Curie, Ada and Babbage\n", 54 | "\n", 55 | "##### Here is how you can use OpenAI LLM in langchain\n", 56 | "\n", 57 | "##### Let's install necessary libraries\n", 58 | "---\n", 59 | "\n" 60 | ], 61 | "metadata": { 62 | "id": "D39TvmO0XwVq" 63 | } 64 | }, 65 | { 66 | "cell_type": "code", 67 | "source": [ 68 | "!pip install langchain\n", 69 | "!pip install openai\n", 70 | "!pip install tiktoken" 71 | ], 72 | "metadata": { 73 | "colab": { 74 | "base_uri": "https://localhost:8080/" 75 | }, 76 | "id": "hxpekH8sY51F", 77 | "outputId": "5612d231-3814-4cff-a58e-45261c85237f" 78 | }, 79 | "execution_count": 8, 80 | "outputs": [ 81 | { 82 | "output_type": "stream", 83 | "name": "stdout", 84 | "text": [ 85 | "Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n", 86 | "Requirement already satisfied: langchain in /usr/local/lib/python3.10/dist-packages (0.0.180)\n", 87 | "Requirement already satisfied: PyYAML>=5.4.1 in /usr/local/lib/python3.10/dist-packages (from langchain) (6.0)\n", 88 | "Requirement already satisfied: SQLAlchemy<3,>=1.4 in /usr/local/lib/python3.10/dist-packages (from langchain) (2.0.10)\n", 89 | "Requirement already satisfied: aiohttp<4.0.0,>=3.8.3 in /usr/local/lib/python3.10/dist-packages (from langchain) (3.8.4)\n", 90 | "Requirement already satisfied: async-timeout<5.0.0,>=4.0.0 in /usr/local/lib/python3.10/dist-packages (from langchain) (4.0.2)\n", 91 | "Requirement already satisfied: dataclasses-json<0.6.0,>=0.5.7 in /usr/local/lib/python3.10/dist-packages (from langchain) (0.5.7)\n", 92 | "Requirement already satisfied: numexpr<3.0.0,>=2.8.4 in /usr/local/lib/python3.10/dist-packages (from langchain) (2.8.4)\n", 93 | "Requirement already satisfied: numpy<2,>=1 in /usr/local/lib/python3.10/dist-packages (from langchain) (1.22.4)\n", 94 | "Requirement already satisfied: openapi-schema-pydantic<2.0,>=1.2 in /usr/local/lib/python3.10/dist-packages (from langchain) (1.2.4)\n", 95 | "Requirement already satisfied: pydantic<2,>=1 in /usr/local/lib/python3.10/dist-packages (from langchain) (1.10.7)\n", 96 | "Requirement already satisfied: requests<3,>=2 in /usr/local/lib/python3.10/dist-packages (from langchain) (2.27.1)\n", 97 | "Requirement already satisfied: tenacity<9.0.0,>=8.1.0 in /usr/local/lib/python3.10/dist-packages (from langchain) (8.2.2)\n", 98 | "Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0.0,>=3.8.3->langchain) (23.1.0)\n", 99 | "Requirement already satisfied: charset-normalizer<4.0,>=2.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0.0,>=3.8.3->langchain) (2.0.12)\n", 100 | "Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0.0,>=3.8.3->langchain) (6.0.4)\n", 101 | "Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0.0,>=3.8.3->langchain) (1.9.2)\n", 102 | "Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0.0,>=3.8.3->langchain) (1.3.3)\n", 103 | "Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.10/dist-packages (from aiohttp<4.0.0,>=3.8.3->langchain) (1.3.1)\n", 104 | "Requirement already satisfied: marshmallow<4.0.0,>=3.3.0 in /usr/local/lib/python3.10/dist-packages (from dataclasses-json<0.6.0,>=0.5.7->langchain) (3.19.0)\n", 105 | "Requirement already satisfied: marshmallow-enum<2.0.0,>=1.5.1 in /usr/local/lib/python3.10/dist-packages (from dataclasses-json<0.6.0,>=0.5.7->langchain) (1.5.1)\n", 106 | "Requirement already satisfied: typing-inspect>=0.4.0 in /usr/local/lib/python3.10/dist-packages (from dataclasses-json<0.6.0,>=0.5.7->langchain) (0.9.0)\n", 107 | "Requirement already satisfied: typing-extensions>=4.2.0 in /usr/local/lib/python3.10/dist-packages (from pydantic<2,>=1->langchain) (4.5.0)\n", 108 | "Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2->langchain) (1.26.15)\n", 109 | "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2->langchain) (2022.12.7)\n", 110 | "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests<3,>=2->langchain) (3.4)\n", 111 | "Requirement already satisfied: greenlet!=0.4.17 in /usr/local/lib/python3.10/dist-packages (from SQLAlchemy<3,>=1.4->langchain) (2.0.2)\n", 112 | "Requirement already satisfied: packaging>=17.0 in /usr/local/lib/python3.10/dist-packages (from marshmallow<4.0.0,>=3.3.0->dataclasses-json<0.6.0,>=0.5.7->langchain) (23.1)\n", 113 | "Requirement already satisfied: mypy-extensions>=0.3.0 in /usr/local/lib/python3.10/dist-packages (from typing-inspect>=0.4.0->dataclasses-json<0.6.0,>=0.5.7->langchain) (1.0.0)\n", 114 | "Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n", 115 | "Requirement already satisfied: openai in /usr/local/lib/python3.10/dist-packages (0.27.7)\n", 116 | "Requirement already satisfied: requests>=2.20 in /usr/local/lib/python3.10/dist-packages (from openai) (2.27.1)\n", 117 | "Requirement already satisfied: tqdm in /usr/local/lib/python3.10/dist-packages (from openai) (4.65.0)\n", 118 | "Requirement already satisfied: aiohttp in /usr/local/lib/python3.10/dist-packages (from openai) (3.8.4)\n", 119 | "Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests>=2.20->openai) (1.26.15)\n", 120 | "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests>=2.20->openai) (2022.12.7)\n", 121 | "Requirement already satisfied: charset-normalizer~=2.0.0 in /usr/local/lib/python3.10/dist-packages (from requests>=2.20->openai) (2.0.12)\n", 122 | "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests>=2.20->openai) (3.4)\n", 123 | "Requirement already satisfied: attrs>=17.3.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->openai) (23.1.0)\n", 124 | "Requirement already satisfied: multidict<7.0,>=4.5 in /usr/local/lib/python3.10/dist-packages (from aiohttp->openai) (6.0.4)\n", 125 | "Requirement already satisfied: async-timeout<5.0,>=4.0.0a3 in /usr/local/lib/python3.10/dist-packages (from aiohttp->openai) (4.0.2)\n", 126 | "Requirement already satisfied: yarl<2.0,>=1.0 in /usr/local/lib/python3.10/dist-packages (from aiohttp->openai) (1.9.2)\n", 127 | "Requirement already satisfied: frozenlist>=1.1.1 in /usr/local/lib/python3.10/dist-packages (from aiohttp->openai) (1.3.3)\n", 128 | "Requirement already satisfied: aiosignal>=1.1.2 in /usr/local/lib/python3.10/dist-packages (from aiohttp->openai) (1.3.1)\n", 129 | "Looking in indexes: https://pypi.org/simple, https://us-python.pkg.dev/colab-wheels/public/simple/\n", 130 | "Collecting tiktoken\n", 131 | " Downloading tiktoken-0.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.7 MB)\n", 132 | "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.7/1.7 MB\u001b[0m \u001b[31m19.8 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", 133 | "\u001b[?25hRequirement already satisfied: regex>=2022.1.18 in /usr/local/lib/python3.10/dist-packages (from tiktoken) (2022.10.31)\n", 134 | "Requirement already satisfied: requests>=2.26.0 in /usr/local/lib/python3.10/dist-packages (from tiktoken) (2.27.1)\n", 135 | "Requirement already satisfied: urllib3<1.27,>=1.21.1 in /usr/local/lib/python3.10/dist-packages (from requests>=2.26.0->tiktoken) (1.26.15)\n", 136 | "Requirement already satisfied: certifi>=2017.4.17 in /usr/local/lib/python3.10/dist-packages (from requests>=2.26.0->tiktoken) (2022.12.7)\n", 137 | "Requirement already satisfied: charset-normalizer~=2.0.0 in /usr/local/lib/python3.10/dist-packages (from requests>=2.26.0->tiktoken) (2.0.12)\n", 138 | "Requirement already satisfied: idna<4,>=2.5 in /usr/local/lib/python3.10/dist-packages (from requests>=2.26.0->tiktoken) (3.4)\n", 139 | "Installing collected packages: tiktoken\n", 140 | "Successfully installed tiktoken-0.4.0\n" 141 | ] 142 | } 143 | ] 144 | }, 145 | { 146 | "cell_type": "code", 147 | "source": [ 148 | "from langchain.llms import OpenAI\n", 149 | "llm = OpenAI(temperature=1, openai_api_key=\"openai-key\")\n", 150 | "print(llm(\"Tell me a joke\"))" 151 | ], 152 | "metadata": { 153 | "colab": { 154 | "base_uri": "https://localhost:8080/" 155 | }, 156 | "id": "DY2ySwxrR6FA", 157 | "outputId": "ef7623ac-3d45-4fde-9c52-505145cad3b1" 158 | }, 159 | "execution_count": 6, 160 | "outputs": [ 161 | { 162 | "output_type": "stream", 163 | "name": "stdout", 164 | "text": [ 165 | "\n", 166 | "\n", 167 | "Q: What did the fish say when he hit the wall?\n", 168 | "A: Dam!\n" 169 | ] 170 | } 171 | ] 172 | }, 173 | { 174 | "cell_type": "markdown", 175 | "source": [ 176 | "### Estimating number of tokens\n", 177 | "\n", 178 | "OpenAI models have a context length limiting the size of input data which can be sent to the model. Thus we need to make sure the input text is below that limit before sending to the model. We can do that calculation using the code below" 179 | ], 180 | "metadata": { 181 | "id": "rE2dyU_7jFdQ" 182 | } 183 | }, 184 | { 185 | "cell_type": "code", 186 | "source": [ 187 | "llm.get_num_tokens(\"what a joke\")" 188 | ], 189 | "metadata": { 190 | "id": "aJ8nKBryZJ05", 191 | "colab": { 192 | "base_uri": "https://localhost:8080/" 193 | }, 194 | "outputId": "8517fd34-08a1-4bd2-cb25-38fbe30b5bbf" 195 | }, 196 | "execution_count": 9, 197 | "outputs": [ 198 | { 199 | "output_type": "execute_result", 200 | "data": { 201 | "text/plain": [ 202 | "3" 203 | ] 204 | }, 205 | "metadata": {}, 206 | "execution_count": 9 207 | } 208 | ] 209 | }, 210 | { 211 | "cell_type": "markdown", 212 | "source": [ 213 | "### Streaming\n", 214 | "\n", 215 | "Streaming is a major concept in LLM which allows you to display output on the go instead of waiting for the full output. Even in the ChatGPT interface you will see content streamed instead of waiting till entire output is generated\n", 216 | "\n", 217 | "Here is a code example for the same. We handle streaming in langchain using a callback handler" 218 | ], 219 | "metadata": { 220 | "id": "39MZ8q1vkRmy" 221 | } 222 | }, 223 | { 224 | "cell_type": "code", 225 | "source": [ 226 | "from langchain.llms import OpenAI\n", 227 | "from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler\n", 228 | "llm = OpenAI(streaming=True, callbacks=[StreamingStdOutCallbackHandler()], temperature=0, openai_api_key=\"openai-key\")\n", 229 | "resp = llm(\"Write me a poem on beauty.\")" 230 | ], 231 | "metadata": { 232 | "colab": { 233 | "base_uri": "https://localhost:8080/" 234 | }, 235 | "id": "wg_l58n4jelZ", 236 | "outputId": "ab2c4ab4-0ee6-4cb1-b171-8bef5e9a65ba" 237 | }, 238 | "execution_count": 12, 239 | "outputs": [ 240 | { 241 | "output_type": "stream", 242 | "name": "stdout", 243 | "text": [ 244 | "\n", 245 | "\n", 246 | "Beauty is a thing of wonder,\n", 247 | "A sight that can't be denied.\n", 248 | "It's a feeling that can't be measured,\n", 249 | "A feeling that can't be denied.\n", 250 | "\n", 251 | "It's a thing of beauty that can't be seen,\n", 252 | "But can be felt in the heart and soul.\n", 253 | "It's a thing of beauty that can't be touched,\n", 254 | "But can be seen in the eyes of the beholder.\n", 255 | "\n", 256 | "Beauty is a thing of joy,\n", 257 | "A thing that can't be taken away.\n", 258 | "It's a thing of beauty that can't be bought,\n", 259 | "But can be found in the simplest of things.\n", 260 | "\n", 261 | "Beauty is a thing of love,\n", 262 | "A thing that can't be denied.\n", 263 | "It's a thing of beauty that can't be measured,\n", 264 | "But can be seen in the love of others.\n", 265 | "\n", 266 | "Beauty is a thing of life,\n", 267 | "A thing that can't be taken away.\n", 268 | "It's a thing of beauty that can't be bought,\n", 269 | "But can be found in the simplest of things." 270 | ] 271 | } 272 | ] 273 | }, 274 | { 275 | "cell_type": "markdown", 276 | "source": [ 277 | "# 2. Chat Models\n", 278 | "\n", 279 | "The second set of models we are going to cover are the chat models. The famous ChatGPT model GPT-3.5 comes under this. The main difference between the previous LLM models and the Chat Models are\n", 280 | "\n", 281 | "* Chat Models are 10x cheaper for api calls\n", 282 | "* You can hold a conversation with a chat model like you can with a human which is not possible with the previous LLMs\n", 283 | "\n", 284 | "Since Chat Models can hold a conversation they take a list of chat messages as input instead of plain text like a LLM\n", 285 | "\n", 286 | "Now let's discuss how we can use these Chat Models. Let's do the necessary imports" 287 | ], 288 | "metadata": { 289 | "id": "a6POXNT5lISE" 290 | } 291 | }, 292 | { 293 | "cell_type": "code", 294 | "source": [ 295 | "from langchain.chat_models import ChatOpenAI\n", 296 | "from langchain import PromptTemplate, LLMChain\n", 297 | "from langchain.prompts.chat import (\n", 298 | " ChatPromptTemplate,\n", 299 | " SystemMessagePromptTemplate,\n", 300 | " AIMessagePromptTemplate,\n", 301 | " HumanMessagePromptTemplate,\n", 302 | ")\n", 303 | "from langchain.schema import (\n", 304 | " AIMessage,\n", 305 | " HumanMessage,\n", 306 | " SystemMessage\n", 307 | ")" 308 | ], 309 | "metadata": { 310 | "id": "-vt0WitNkGbO" 311 | }, 312 | "execution_count": 13, 313 | "outputs": [] 314 | }, 315 | { 316 | "cell_type": "markdown", 317 | "source": [ 318 | "Instead of using a OpenAI class we will be using a ChatOpenAI class to create our chat LLM" 319 | ], 320 | "metadata": { 321 | "id": "PQCg5peknI4v" 322 | } 323 | }, 324 | { 325 | "cell_type": "code", 326 | "source": [ 327 | "chat = ChatOpenAI(temperature=0, openai_api_key=\"openai-key\")" 328 | ], 329 | "metadata": { 330 | "id": "HvNAJfqJnETc" 331 | }, 332 | "execution_count": 15, 333 | "outputs": [] 334 | }, 335 | { 336 | "cell_type": "markdown", 337 | "source": [ 338 | "Input is a bunch of messages. Messages are classified into 3 types\n", 339 | "\n", 340 | "* System Message - This is an initial prompt sent to the model to control the behavior of the model\n", 341 | "\n", 342 | "* Human Message - Input message of the user\n", 343 | "\n", 344 | "* AI Message - Message response given by ChatGPT\n", 345 | "\n", 346 | "ChatGPT needs a list of all these messages in the conversation to be able to understand the content and converse further\n", 347 | "\n", 348 | "Now let's see an example where we define the system message and the message input of the user and pass to the chat model. The output generated will be an AI message\n", 349 | "\n", 350 | "We are using a System Prompt to let the model do the task of paraphrasing. This technique of providing the model a prompt to make it perform a task is called Prompt Engineering and can be part of another lesson" 351 | ], 352 | "metadata": { 353 | "id": "tJwiLHVWnbvj" 354 | } 355 | }, 356 | { 357 | "cell_type": "code", 358 | "source": [ 359 | "messages = [\n", 360 | " SystemMessage(content=\"You are a helpful assistant that paraphrases the sentence.\"),\n", 361 | " HumanMessage(content=\"I love programming.\")\n", 362 | "]\n", 363 | "chat(messages)" 364 | ], 365 | "metadata": { 366 | "colab": { 367 | "base_uri": "https://localhost:8080/" 368 | }, 369 | "id": "qhyJTCHJnPb2", 370 | "outputId": "0a2e7b23-239a-466a-8df8-e0f4868f796f" 371 | }, 372 | "execution_count": 18, 373 | "outputs": [ 374 | { 375 | "output_type": "execute_result", 376 | "data": { 377 | "text/plain": [ 378 | "AIMessage(content='Programming is something that I have a great passion for.', additional_kwargs={}, example=False)" 379 | ] 380 | }, 381 | "metadata": {}, 382 | "execution_count": 18 383 | } 384 | ] 385 | }, 386 | { 387 | "cell_type": "markdown", 388 | "source": [ 389 | "# Templates in Chat Models\n", 390 | "\n", 391 | "We have discussed templates in lesson 1 which helps us to create dynamic inputs. We can do the same with Chat Models as well. Let's discuss the code for that\n", 392 | "\n", 393 | "We define a system message with input variable task. This task can be dynamically change to do various tasks. For this example we will follow the task of paraphrasing" 394 | ], 395 | "metadata": { 396 | "id": "2WldCk-so6O0" 397 | } 398 | }, 399 | { 400 | "cell_type": "code", 401 | "source": [ 402 | "template=\"You are a helpful assistant that {task}.\"\n", 403 | "system_message_prompt = SystemMessagePromptTemplate.from_template(template)\n", 404 | "human_template=\"{text}\"\n", 405 | "human_message_prompt = HumanMessagePromptTemplate.from_template(human_template)\n", 406 | "chat_prompt = ChatPromptTemplate.from_messages([system_message_prompt, human_message_prompt])\n", 407 | "chat(chat_prompt.format_prompt(task=\"paraphrases the sentence\", text=\"I love programming.\").to_messages())" 408 | ], 409 | "metadata": { 410 | "colab": { 411 | "base_uri": "https://localhost:8080/" 412 | }, 413 | "id": "m1JWLOxwoIYE", 414 | "outputId": "d0748e8f-63a2-4124-a12f-f51cdcc00c9d" 415 | }, 416 | "execution_count": 19, 417 | "outputs": [ 418 | { 419 | "output_type": "execute_result", 420 | "data": { 421 | "text/plain": [ 422 | "AIMessage(content='Programming is something that I have a great passion for.', additional_kwargs={}, example=False)" 423 | ] 424 | }, 425 | "metadata": {}, 426 | "execution_count": 19 427 | } 428 | ] 429 | }, 430 | { 431 | "cell_type": "markdown", 432 | "source": [ 433 | "Just like how we used the LLMChain or SequentialChain for LLMs in lesson 1, we can do the same for Chat Models. Thus all the benefits we talked like chaining multiple tasks for LLMs can be achieved with Chat Models as well Here is an example" 434 | ], 435 | "metadata": { 436 | "id": "2b-ipRktpzc8" 437 | } 438 | }, 439 | { 440 | "cell_type": "code", 441 | "source": [ 442 | "chain = LLMChain(llm=chat, prompt=chat_prompt)\n", 443 | "chain.run(task=\"paraphrases the sentence\", text=\"I love programming.\")" 444 | ], 445 | "metadata": { 446 | "colab": { 447 | "base_uri": "https://localhost:8080/", 448 | "height": 35 449 | }, 450 | "id": "hBTlMb7XpZ_x", 451 | "outputId": "ee9a7415-5394-492d-da95-a8c379bc7f8b" 452 | }, 453 | "execution_count": 20, 454 | "outputs": [ 455 | { 456 | "output_type": "execute_result", 457 | "data": { 458 | "text/plain": [ 459 | "'Programming is something that I have a great passion for.'" 460 | ], 461 | "application/vnd.google.colaboratory.intrinsic+json": { 462 | "type": "string" 463 | } 464 | }, 465 | "metadata": {}, 466 | "execution_count": 20 467 | } 468 | ] 469 | }, 470 | { 471 | "cell_type": "markdown", 472 | "source": [ 473 | "### Streaming with Chat Models\n", 474 | "\n", 475 | "We disucussed about how streaming is useful in the above section of LLMs. Now let's see how we can do the same with Chat Models" 476 | ], 477 | "metadata": { 478 | "id": "1gnDNh7tqRi0" 479 | } 480 | }, 481 | { 482 | "cell_type": "code", 483 | "source": [ 484 | "from langchain.callbacks.streaming_stdout import StreamingStdOutCallbackHandler\n", 485 | "chat = ChatOpenAI(streaming=True, callbacks=[StreamingStdOutCallbackHandler()], temperature=0, openai_api_key=\"openai-key\")\n", 486 | "resp = chat([HumanMessage(content=\"Write me a poem on beauty.\")])" 487 | ], 488 | "metadata": { 489 | "colab": { 490 | "base_uri": "https://localhost:8080/" 491 | }, 492 | "id": "BElza6zoqL5k", 493 | "outputId": "107d9bcb-a852-4e8b-83d0-02ee5d51e3e2" 494 | }, 495 | "execution_count": 23, 496 | "outputs": [ 497 | { 498 | "output_type": "stream", 499 | "name": "stdout", 500 | "text": [ 501 | "Beauty is a thing of wonder,\n", 502 | "A sight that fills the heart with thunder,\n", 503 | "It's the sparkle in a lover's eye,\n", 504 | "Or the colors of a sunset sky.\n", 505 | "\n", 506 | "It's the way a flower blooms,\n", 507 | "Or the way a bird takes flight,\n", 508 | "It's the sound of a baby's coos,\n", 509 | "Or the stars that shine so bright.\n", 510 | "\n", 511 | "Beauty is in the laughter of a child,\n", 512 | "Or the way a mother's love is mild,\n", 513 | "It's the way a friend lends a hand,\n", 514 | "Or the way a stranger understands.\n", 515 | "\n", 516 | "It's the way the ocean waves crash,\n", 517 | "Or the way a mountain stands so tall,\n", 518 | "It's the way a painting can flash,\n", 519 | "Or the way a melody can enthrall.\n", 520 | "\n", 521 | "Beauty is all around us,\n", 522 | "In every moment, every place,\n", 523 | "It's up to us to see it thus,\n", 524 | "And let it fill our hearts with grace." 525 | ] 526 | } 527 | ] 528 | }, 529 | { 530 | "cell_type": "markdown", 531 | "source": [ 532 | "# Embedding models\n", 533 | "\n", 534 | "So far we have talked about text generation models but now we are going to talk about a completely different kind of model called Embedding models\n", 535 | "\n", 536 | "### Embeddings\n", 537 | "\n", 538 | "First we need to understand what is an embedding. An embedding is generally associated with a piece of text and it represents the properties of text\n", 539 | "\n", 540 | "Just to give an example, let's consider the words good, best, bad. If we find the embeddings of these words we observe that embeddings of good and best are close while embedding of bad is far. The reason being embedding of a word has knowledge of the meaning of the word. Thus words with similar meanings have similar embeddings\n", 541 | "\n", 542 | "Embeddings also have an interesting as can be seen below. Let's consider E(x) as Embedding of word x\n", 543 | "\n", 544 | "E(king) - E(male) + E(female) ~= E(queen)\n", 545 | "\n", 546 | "What this represents is if we subtract the embedding of word male from word king and add the embedding of word female it will be quite close to embedding of word queen. As humans we can understand this intuitively as removing male gender and adding female gender to king makes it a queen but now machines have the capability to understand such complex relations" 547 | ], 548 | "metadata": { 549 | "id": "I8Jm6gWhq5wG" 550 | } 551 | }, 552 | { 553 | "cell_type": "markdown", 554 | "source": [ 555 | "### Use-cases\n", 556 | "\n", 557 | "Now that we have an idea of what is embeddings, the task of a embeddings model is to create these embeddings for the text input provided. A model which generates embedding which can show properties like the ones we discussed above and more is considered a good model.\n", 558 | "\n", 559 | "Once these embeddings are generated, we can use it to perform tasks like semantic search similar to how apps like Chatbase, PDF.ai, SiteGPT work. You can creating embeddings for all your documents or webpages and when user asks a query you can fetch the relevant pieces and send to the user\n", 560 | "\n", 561 | "Now let's discuss it with the help of an example" 562 | ], 563 | "metadata": { 564 | "id": "BIeDJtVKtLN6" 565 | } 566 | }, 567 | { 568 | "cell_type": "code", 569 | "source": [ 570 | "from langchain.embeddings import OpenAIEmbeddings\n", 571 | "embeddings = OpenAIEmbeddings(openai_api_key=\"openai-key\")\n", 572 | "text = \"This is dummy content.\"\n", 573 | "doc_result = embeddings.embed_documents([text])" 574 | ], 575 | "metadata": { 576 | "id": "BXNAymAqqiY3" 577 | }, 578 | "execution_count": 25, 579 | "outputs": [] 580 | }, 581 | { 582 | "cell_type": "markdown", 583 | "source": [ 584 | "As you can see, the output is a vector which is a respresentation of text \"This is dummy content\". To identify if two sentences are similar, we can calculate the distance between these vectors. If the distance is small, then the words are of similar meanings" 585 | ], 586 | "metadata": { 587 | "id": "lLnrb5VJu3Gx" 588 | } 589 | }, 590 | { 591 | "cell_type": "code", 592 | "source": [ 593 | "doc_result" 594 | ], 595 | "metadata": { 596 | "colab": { 597 | "base_uri": "https://localhost:8080/" 598 | }, 599 | "id": "3gKfYPHIuoLg", 600 | "outputId": "de4666a5-281b-47a4-c9d5-1b800003bb03" 601 | }, 602 | "execution_count": 26, 603 | "outputs": [ 604 | { 605 | "output_type": "execute_result", 606 | "data": { 607 | "text/plain": [ 608 | "[[0.009644212425631677,\n", 609 | " 0.00568438101747335,\n", 610 | " -0.02505903376829762,\n", 611 | " -0.009684009702816118,\n", 612 | " -0.011945822554121088,\n", 613 | " 0.014194370577687172,\n", 614 | " -0.03528693772998923,\n", 615 | " -0.025881510830109394,\n", 616 | " -0.0183200244397753,\n", 617 | " -0.038019687821944544,\n", 618 | " 0.004019527323934989,\n", 619 | " 0.0072829059157043085,\n", 620 | " -0.015587276210465188,\n", 621 | " 0.009504921955486136,\n", 622 | " -0.00734260183148097,\n", 623 | " -0.006304555719258846,\n", 624 | " 0.017643468864994617,\n", 625 | " -0.022007907321511987,\n", 626 | " 0.01074527135905713,\n", 627 | " -0.017988380463238297,\n", 628 | " -0.0033147834081325555,\n", 629 | " 0.0051736488279450635,\n", 630 | " -0.009664111064223898,\n", 631 | " 0.017988380463238297,\n", 632 | " -0.013318829548306883,\n", 633 | " -0.007959460093501097,\n", 634 | " 0.01968639762310776,\n", 635 | " -0.027247884013441855,\n", 636 | " 0.014141307541441252,\n", 637 | " -0.00493154872507305,\n", 638 | " 0.03128067849342222,\n", 639 | " -0.020734392588964694,\n", 640 | " -0.005962961957764433,\n", 641 | " -0.03711761620576389,\n", 642 | " 0.011481520986969281,\n", 643 | " 0.000909534129521696,\n", 644 | " 0.004513677314636424,\n", 645 | " -0.03669311191579652,\n", 646 | " 0.016966915152859127,\n", 647 | " 0.0019119285724737535,\n", 648 | " -0.020880315938640977,\n", 649 | " 0.007229842879458388,\n", 650 | " 0.0031953915765792337,\n", 651 | " -0.04255658114626115,\n", 652 | " -0.014884190048884142,\n", 653 | " -0.009299302690033193,\n", 654 | " -0.0013398424801167712,\n", 655 | " -0.030723514750194858,\n", 656 | " -0.0012395200607990026,\n", 657 | " 0.016874054839428766,\n", 658 | " 0.01578626259638739,\n", 659 | " -0.006709161370633992,\n", 660 | " -0.033429731461382006,\n", 661 | " -0.010473322366974189,\n", 662 | " -0.001961675285369629,\n", 663 | " 0.011866227999752206,\n", 664 | " -0.013836195083027207,\n", 665 | " 0.020853784420518016,\n", 666 | " 0.01049985388509715,\n", 667 | " -0.0003886455178349516,\n", 668 | " -0.01821389836728346,\n", 669 | " -0.005031041918034151,\n", 670 | " -0.0070374893730669255,\n", 671 | " -0.02216709643024975,\n", 672 | " 0.0009153378991110936,\n", 673 | " -0.009491656196424655,\n", 674 | " -0.004284842505164593,\n", 675 | " -0.006390783153158467,\n", 676 | " 0.0016640245835970178,\n", 677 | " 0.00736250047007319,\n", 678 | " 0.018598605380066385,\n", 679 | " 0.00916664509941839,\n", 680 | " -0.003730996831517146,\n", 681 | " -0.02820302052851362,\n", 682 | " 0.02471412403269914,\n", 683 | " 0.0015836009192867942,\n", 684 | " -0.02488657890049838,\n", 685 | " 0.010327399017297907,\n", 686 | " 0.0075813854945876126,\n", 687 | " -0.011030485178878953,\n", 688 | " 0.022525271924909714,\n", 689 | " -0.04616486761098376,\n", 690 | " -0.025191691358912426,\n", 691 | " 0.018465947789451584,\n", 692 | " 0.03141333608403702,\n", 693 | " 0.01814756957197606,\n", 694 | " 0.02227322250274159,\n", 695 | " 0.030909237239700772,\n", 696 | " -0.003624870759025304,\n", 697 | " -0.012874426619747296,\n", 698 | " -0.01581279411451035,\n", 699 | " 0.017325090647519092,\n", 700 | " 0.017444482479072414,\n", 701 | " 0.013584144729536486,\n", 702 | " -0.011448356589315579,\n", 703 | " 0.035764508781492906,\n", 704 | " 0.001513126457857356,\n", 705 | " 0.019341487887509275,\n", 706 | " -0.014950518844191543,\n", 707 | " -0.047120002263410336,\n", 708 | " -0.01235706201634957,\n", 709 | " 0.00044025764078735406,\n", 710 | " -0.01476479821733082,\n", 711 | " 0.0032484546128251543,\n", 712 | " -0.029954100724629007,\n", 713 | " -0.016728131489752485,\n", 714 | " -0.000562551444446868,\n", 715 | " -0.0025719004350283664,\n", 716 | " 0.028574461782235067,\n", 717 | " 0.013690270802028328,\n", 718 | " -0.004543525272524755,\n", 719 | " 0.0043345891016451435,\n", 720 | " -0.004629752706424376,\n", 721 | " -0.04215860464912636,\n", 722 | " -0.004725929459620107,\n", 723 | " 0.01578626259638739,\n", 724 | " -0.011083548215124875,\n", 725 | " -0.006261442002309036,\n", 726 | " 0.003691199554332705,\n", 727 | " -0.012118277421920329,\n", 728 | " 0.0026912924994123375,\n", 729 | " 0.02361306416795109,\n", 730 | " 0.015971983223248113,\n", 731 | " -0.023533469613582207,\n", 732 | " 0.0075813854945876126,\n", 733 | " 0.0071568812046202464,\n", 734 | " -0.0062349104841860755,\n", 735 | " -0.022512006165848232,\n", 736 | " 0.01306014724660802,\n", 737 | " -0.004364437059533474,\n", 738 | " 0.018014911981361258,\n", 739 | " 0.016661802694445085,\n", 740 | " 0.0072829059157043085,\n", 741 | " 0.026080497216031596,\n", 742 | " -0.01627709568166216,\n", 743 | " 0.034623649776915226,\n", 744 | " -0.01955374003249296,\n", 745 | " 0.007760473707578893,\n", 746 | " -0.0248733131414369,\n", 747 | " 0.006301239279493476,\n", 748 | " -0.003158910739160163,\n", 749 | " 0.027354010085933696,\n", 750 | " -0.025258020154219827,\n", 751 | " -0.008695709721413248,\n", 752 | " 0.007654347169425754,\n", 753 | " 0.015534213174219267,\n", 754 | " -0.008781937155312869,\n", 755 | " -0.012549415522741032,\n", 756 | " 0.000390096460232301,\n", 757 | " -0.023188559877983725,\n", 758 | " -0.010844764552018231,\n", 759 | " 0.016011780500432553,\n", 760 | " 0.021689529104036463,\n", 761 | " -0.012297366100572908,\n", 762 | " 0.007508423819749472,\n", 763 | " 0.03212968707335695,\n", 764 | " 0.012244303064326988,\n", 765 | " -0.00911358206317247,\n", 766 | " -0.002314876353212188,\n", 767 | " -0.005478761752020406,\n", 768 | " -0.007269640156642829,\n", 769 | " 0.009239606774256531,\n", 770 | " 0.0006748958997064407,\n", 771 | " -0.013637207765782408,\n", 772 | " 0.024966173454867263,\n", 773 | " 0.022485474647725275,\n", 774 | " 0.02549680381732647,\n", 775 | " 0.002528786718078556,\n", 776 | " -0.016754663007875446,\n", 777 | " -0.022724258310831916,\n", 778 | " -0.02348040657733629,\n", 779 | " 0.020283356780874368,\n", 780 | " -0.018903717838480428,\n", 781 | " 0.023812050553873292,\n", 782 | " -0.029290812771554996,\n", 783 | " 0.007448727903972811,\n", 784 | " 0.005870101644334072,\n", 785 | " 0.007309437433827269,\n", 786 | " -0.016661802694445085,\n", 787 | " -0.011660608734299263,\n", 788 | " -0.032580722881447276,\n", 789 | " 0.0001352486319410751,\n", 790 | " 0.0058502030057418514,\n", 791 | " 0.02937040732592388,\n", 792 | " -0.009067151906457288,\n", 793 | " 0.010247804462929026,\n", 794 | " 0.012920856776462477,\n", 795 | " -0.004765726736804548,\n", 796 | " -0.011939189674590346,\n", 797 | " -0.0008805152815747081,\n", 798 | " 0.029264281253432035,\n", 799 | " 0.028282615082882503,\n", 800 | " -0.00655992158119234,\n", 801 | " -0.005243294063017835,\n", 802 | " -0.6910934012993952,\n", 803 | " -0.002527128498195871,\n", 804 | " 0.009856464570615361,\n", 805 | " -0.01623729840447772,\n", 806 | " 0.00823804103379218,\n", 807 | " 0.02743360464030258,\n", 808 | " 0.007879865539132216,\n", 809 | " 0.0024160277660559746,\n", 810 | " -0.021331353609376498,\n", 811 | " 0.011813164963506284,\n", 812 | " -0.010911093347325632,\n", 813 | " -0.009491656196424655,\n", 814 | " 0.013637207765782408,\n", 815 | " -0.005435648035070595,\n", 816 | " 0.01149478674603076,\n", 817 | " -0.015918920187002195,\n", 818 | " 0.0017842456415070066,\n", 819 | " -0.006079037349552385,\n", 820 | " -0.018545542343820463,\n", 821 | " 0.031890903410250304,\n", 822 | " 0.005193547466537284,\n", 823 | " 0.010181475667621625,\n", 824 | " -0.020416014371489172,\n", 825 | " 0.007515056699280212,\n", 826 | " -0.009723806980000558,\n", 827 | " 0.003145644980098683,\n", 828 | " -0.0014542596520220378,\n", 829 | " -0.00490501720695009,\n", 830 | " 0.009286036930971712,\n", 831 | " 0.027619327129808493,\n", 832 | " -0.016462816308522882,\n", 833 | " 0.009027354629272849,\n", 834 | " -0.006609668177672891,\n", 835 | " 0.003143986760215998,\n", 836 | " 0.04123000151482275,\n", 837 | " 0.012808097824439896,\n", 838 | " -0.010911093347325632,\n", 839 | " -0.005485394631551147,\n", 840 | " 0.0014418230029019001,\n", 841 | " 0.010605980888911588,\n", 842 | " -0.03032554197835045,\n", 843 | " 0.003551909084187163,\n", 844 | " 0.024196757566656218,\n", 845 | " -0.00890796186639693,\n", 846 | " -0.027128492181888533,\n", 847 | " -0.01309994452379246,\n", 848 | " 0.0009136796792284086,\n", 849 | " 0.006672680533214921,\n", 850 | " 0.0048818021285925,\n", 851 | " 0.015627073487649627,\n", 852 | " -0.0014700127409075455,\n", 853 | " -0.003137353880685258,\n", 854 | " -0.0020876999964536905,\n", 855 | " -0.0037243639519864056,\n", 856 | " -0.01541482041134335,\n", 857 | " 0.004032793082996469,\n", 858 | " 0.02102623928831726,\n", 859 | " 0.012071847265205148,\n", 860 | " 0.016184235368231797,\n", 861 | " 0.012788199185847676,\n", 862 | " -0.010188108547152365,\n", 863 | " 0.005826987927384261,\n", 864 | " -0.01808124077666866,\n", 865 | " -0.0036414529578521542,\n", 866 | " -0.0014260699140163924,\n", 867 | " -0.020110901913075126,\n", 868 | " -0.0021656363309398865,\n", 869 | " 0.018200632608221978,\n", 870 | " 0.012788199185847676,\n", 871 | " -0.022817118624262278,\n", 872 | " -0.0013431589198821413,\n", 873 | " 0.035340000766235155,\n", 874 | " -0.006891565557729345,\n", 875 | " -0.002367939389458109,\n", 876 | " -0.009518187714547615,\n", 877 | " 0.020681329552718775,\n", 878 | " 0.030431668050842294,\n", 879 | " -0.006085670229083125,\n", 880 | " -0.022074236116819388,\n", 881 | " 0.019845586731845524,\n", 882 | " 0.009975856402168682,\n", 883 | " -0.006937996180105824,\n", 884 | " -0.013902523878334608,\n", 885 | " -0.010101881113252744,\n", 886 | " 0.020734392588964694,\n", 887 | " -0.0183200244397753,\n", 888 | " -0.033695046642611616,\n", 889 | " -0.0024326099648828248,\n", 890 | " 0.023201825637045204,\n", 891 | " 0.0031108223625622975,\n", 892 | " 0.02576211899855607,\n", 893 | " 0.002309901693564133,\n", 894 | " -0.011653975854768522,\n", 895 | " -0.016701599971629524,\n", 896 | " 0.012947388294585438,\n", 897 | " 0.021238493295946136,\n", 898 | " 0.0074818923016265115,\n", 899 | " 0.016993446670982088,\n", 900 | " 0.017205698815965773,\n", 901 | " -0.011262635962454857,\n", 902 | " -0.01077180287718009,\n", 903 | " 0.010400360692136049,\n", 904 | " -0.015494415897034828,\n", 905 | " 0.009697275461877599,\n", 906 | " 0.034544055222546347,\n", 907 | " 0.016927117875674687,\n", 908 | " -0.0012975577566929784,\n", 909 | " 0.027725453202300333,\n", 910 | " 0.018545542343820463,\n", 911 | " -0.02464779523739174,\n", 912 | " 0.004709347260793257,\n", 913 | " -0.012556048402271772,\n", 914 | " 0.004971346002257491,\n", 915 | " 0.011660608734299263,\n", 916 | " -0.02227322250274159,\n", 917 | " -0.03581757181773883,\n", 918 | " 0.012648908715702133,\n", 919 | " -0.0020064472222021245,\n", 920 | " 0.010393727812605308,\n", 921 | " -0.0019434348666600935,\n", 922 | " 0.028680587854726908,\n", 923 | " 0.024024302698856977,\n", 924 | " 0.02363959568607405,\n", 925 | " 0.007660980048956494,\n", 926 | " -0.0182536956444679,\n", 927 | " 0.020548671962103974,\n", 928 | " 0.021410948163745377,\n", 929 | " 0.002522153838547816,\n", 930 | " -0.02883977696346467,\n", 931 | " -0.0037940091870591764,\n", 932 | " -0.0044771964772173534,\n", 933 | " -0.0030892655040873924,\n", 934 | " 0.03128067849342222,\n", 935 | " -0.012894325258339516,\n", 936 | " 0.025828447793863476,\n", 937 | " 0.01472500094014638,\n", 938 | " -0.007302804554296529,\n", 939 | " 0.0178557210099783,\n", 940 | " 0.03170518278338959,\n", 941 | " -0.0014202661444269948,\n", 942 | " -0.019142501501587073,\n", 943 | " 0.00042616274268070025,\n", 944 | " -0.003774110548466956,\n", 945 | " -0.013557613211413527,\n", 946 | " -0.0015222467836274484,\n", 947 | " -0.03711761620576389,\n", 948 | " -0.026385609674445643,\n", 949 | " 0.011634077216176302,\n", 950 | " 0.02220689370743419,\n", 951 | " 0.0022170411473031223,\n", 952 | " -0.021848718212774225,\n", 953 | " 0.014844392771699702,\n", 954 | " -0.007939561454908876,\n", 955 | " 0.010725372720464909,\n", 956 | " -0.020694595311780254,\n", 957 | " -0.004032793082996469,\n", 958 | " -0.0010537993175229554,\n", 959 | " -0.020615000757411375,\n", 960 | " 0.0018920299338815332,\n", 961 | " 0.00916001221988765,\n", 962 | " 0.006831869641952684,\n", 963 | " 0.019421082441878155,\n", 964 | " -0.009252872533318012,\n", 965 | " 0.010314133258236427,\n", 966 | " -0.023891645108242175,\n", 967 | " 0.013418322741267984,\n", 968 | " -0.008980924472557668,\n", 969 | " 0.0012420073906230303,\n", 970 | " 0.0009791794228021294,\n", 971 | " -0.037382931386993494,\n", 972 | " -0.007269640156642829,\n", 973 | " 0.0034490992186300426,\n", 974 | " 0.00284716516838473,\n", 975 | " 0.00035320103873872803,\n", 976 | " -0.006420631111046798,\n", 977 | " 0.009518187714547615,\n", 978 | " -0.027327478567810735,\n", 979 | " -0.0027012418187084474,\n", 980 | " 0.002664760981289377,\n", 981 | " -0.005996126355418133,\n", 982 | " -0.0075813854945876126,\n", 983 | " 0.019076172706279673,\n", 984 | " -0.00890132898686619,\n", 985 | " 0.0010695524064084632,\n", 986 | " 0.014260699372994573,\n", 987 | " 0.009756971377654258,\n", 988 | " -0.002749330195306313,\n", 989 | " 0.019978244322460325,\n", 990 | " -0.01049985388509715,\n", 991 | " 0.0013066779660477462,\n", 992 | " 0.009100316304110989,\n", 993 | " 0.011693773131952964,\n", 994 | " 0.0022933194947372827,\n", 995 | " 0.011925923915528866,\n", 996 | " -0.0183200244397753,\n", 997 | " 0.0033960361823841215,\n", 998 | " -0.03510121710312852,\n", 999 | " 0.01968639762310776,\n", 1000 | " 0.005939746879406843,\n", 1001 | " 0.004878485688827129,\n", 1002 | " 0.02107930232456318,\n", 1003 | " 0.0055683056256853975,\n", 1004 | " 0.023533469613582207,\n", 1005 | " -0.02053540620304249,\n", 1006 | " -0.0019550424058388887,\n", 1007 | " -0.036799237988288366,\n", 1008 | " -0.010075349595129783,\n", 1009 | " -0.0004721783735290408,\n", 1010 | " 0.01577299683732591,\n", 1011 | " 0.004258310987041632,\n", 1012 | " -0.0005695988789482793,\n", 1013 | " -0.015826059873571833,\n", 1014 | " 0.018744528729742665,\n", 1015 | " 0.0040725903601809095,\n", 1016 | " -0.0039366163298007375,\n", 1017 | " -0.0043246397823490336,\n", 1018 | " -0.007415563506319111,\n", 1019 | " 0.011700406011483703,\n", 1020 | " -0.004613170507597525,\n", 1021 | " -0.00406927392041554,\n", 1022 | " 0.01577299683732591,\n", 1023 | " -0.0343052715594397,\n", 1024 | " -0.003064392205847117,\n", 1025 | " -0.006977793457290264,\n", 1026 | " 0.004692765061966407,\n", 1027 | " -0.00015307451023185468,\n", 1028 | " 0.01574646531920295,\n", 1029 | " 0.025894776589170877,\n", 1030 | " 0.02143747968186834,\n", 1031 | " -0.03669311191579652,\n", 1032 | " 0.0003869872979522665,\n", 1033 | " 0.022963041973938562,\n", 1034 | " -0.0016540752643009076,\n", 1035 | " -0.00740229774725763,\n", 1036 | " -0.0025321031578439263,\n", 1037 | " -0.005375952119293935,\n", 1038 | " 0.010420259330728269,\n", 1039 | " -0.00575070981278075,\n", 1040 | " 0.024541669164899894,\n", 1041 | " -0.010148311269967925,\n", 1042 | " -0.008125282081769599,\n", 1043 | " 0.020668063793657293,\n", 1044 | " 0.036799237988288366,\n", 1045 | " 0.016529145103830283,\n", 1046 | " 0.033721578160734574,\n", 1047 | " 0.007966092973031837,\n", 1048 | " 0.01415457330050273,\n", 1049 | " 0.006689262732041772,\n", 1050 | " 0.01081823303389527,\n", 1051 | " 0.027407073122179618,\n", 1052 | " -0.013531081693290566,\n", 1053 | " 0.01046668948744345,\n", 1054 | " 4.757021198835776e-05,\n", 1055 | " 0.00028189755467944086,\n", 1056 | " 0.01895678087472635,\n", 1057 | " -0.030590857159580057,\n", 1058 | " -0.009557984991732056,\n", 1059 | " -0.009365631485340593,\n", 1060 | " 0.024554934923961377,\n", 1061 | " 0.016025046259494035,\n", 1062 | " 0.022963041973938562,\n", 1063 | " -0.016330158717908078,\n", 1064 | " 0.014552546072347136,\n", 1065 | " -0.0147780639763923,\n", 1066 | " 0.03483590192189891,\n", 1067 | " 0.003198708016344604,\n", 1068 | " -0.02475392130988358,\n", 1069 | " 0.006937996180105824,\n", 1070 | " 0.01150805250509224,\n", 1071 | " -0.020044573117767726,\n", 1072 | " -0.02210076763494235,\n", 1073 | " 0.004573373230413085,\n", 1074 | " 0.01051311964415863,\n", 1075 | " -0.018041443499484215,\n", 1076 | " 0.004228463029153302,\n", 1077 | " 0.0076079170127105735,\n", 1078 | " 0.012018784228959228,\n", 1079 | " 0.016966915152859127,\n", 1080 | " 0.011090181094655614,\n", 1081 | " 0.021941578526204587,\n", 1082 | " -0.027513199194671458,\n", 1083 | " -0.03711761620576389,\n", 1084 | " -0.004526943073697904,\n", 1085 | " 0.01243002369118771,\n", 1086 | " 0.0011458306374272987,\n", 1087 | " -0.002248547557904787,\n", 1088 | " -0.03205009251898807,\n", 1089 | " -0.006301239279493476,\n", 1090 | " -0.030909237239700772,\n", 1091 | " 0.0011018876941208211,\n", 1092 | " 0.001536341652630271,\n", 1093 | " 0.014234167854871612,\n", 1094 | " 0.014313762409240493,\n", 1095 | " -0.015971983223248113,\n", 1096 | " -0.0010297551292240226,\n", 1097 | " 0.021318087850315016,\n", 1098 | " 0.003415934820976342,\n", 1099 | " 0.007349234711011709,\n", 1100 | " -0.023453875059213328,\n", 1101 | " -0.00576397557184223,\n", 1102 | " 0.01074527135905713,\n", 1103 | " 0.005691013897004089,\n", 1104 | " 0.018094506535730137,\n", 1105 | " -0.00573081117418853,\n", 1106 | " 0.009186543738010611,\n", 1107 | " 0.016794460285059886,\n", 1108 | " -0.00733596895195023,\n", 1109 | " 0.00734260183148097,\n", 1110 | " 0.00244421750406162,\n", 1111 | " 0.01211164454238959,\n", 1112 | " 0.00037848889194967467,\n", 1113 | " 0.016038312018555514,\n", 1114 | " -0.00818497799754626,\n", 1115 | " -4.684473715170417e-05,\n", 1116 | " 0.012224404425734766,\n", 1117 | " -0.0036547187169136343,\n", 1118 | " -0.0008680786324545705,\n", 1119 | " 0.007103818168374326,\n", 1120 | " 0.014950518844191543,\n", 1121 | " 0.005256559822079315,\n", 1122 | " -0.012290733221042169,\n", 1123 | " -0.008443660299245124,\n", 1124 | " -0.021119101464392814,\n", 1125 | " 0.015971983223248113,\n", 1126 | " 0.05492027231685107,\n", 1127 | " 0.017285293370334652,\n", 1128 | " 0.003180467597635069,\n", 1129 | " 0.010181475667621625,\n", 1130 | " 0.008781937155312869,\n", 1131 | " -0.014923987326068583,\n", 1132 | " -0.027486667676548497,\n", 1133 | " 0.004815473333285098,\n", 1134 | " 0.0025536600163188314,\n", 1135 | " -0.007461993663034292,\n", 1136 | " -0.01577299683732591,\n", 1137 | " 0.012781566306316935,\n", 1138 | " 0.01077843575671083,\n", 1139 | " -0.012861160860685816,\n", 1140 | " 0.021769123658405342,\n", 1141 | " -0.013371892584552804,\n", 1142 | " -0.003347947805786256,\n", 1143 | " -0.008085484804585158,\n", 1144 | " -0.00818497799754626,\n", 1145 | " -0.018691465693496747,\n", 1146 | " -0.0025719004350283664,\n", 1147 | " 0.006656098334388071,\n", 1148 | " -0.0007884840198780271,\n", 1149 | " -0.0010446791081681877,\n", 1150 | " 0.021105835705331335,\n", 1151 | " -0.005614735782400578,\n", 1152 | " 0.018731262970681187,\n", 1153 | " 0.01051975252368937,\n", 1154 | " 0.010990687901694513,\n", 1155 | " -0.01955374003249296,\n", 1156 | " -0.01247645384790289,\n", 1157 | " 0.008012523129747018,\n", 1158 | " 0.011083548215124875,\n", 1159 | " 0.02277732134707784,\n", 1160 | " -0.018797591765988587,\n", 1161 | " 0.0196598661049848,\n", 1162 | " 0.002323167452625613,\n", 1163 | " -0.0082579396723844,\n", 1164 | " 0.016688334212568046,\n", 1165 | " -0.008569685010329186,\n", 1166 | " 0.0021689527707052566,\n", 1167 | " 0.01963333458686184,\n", 1168 | " 9.48813198440882e-05,\n", 1169 | " -0.013411689861737245,\n", 1170 | " 0.02952959643466164,\n", 1171 | " -0.022114033394003828,\n", 1172 | " -0.003502162487706612,\n", 1173 | " 0.026425406951630082,\n", 1174 | " -0.005157066629118213,\n", 1175 | " -0.0197925236955996,\n", 1176 | " 0.010327399017297907,\n", 1177 | " -0.010977422142633033,\n", 1178 | " -0.012914223896931738,\n", 1179 | " -0.013491284416106126,\n", 1180 | " 0.021172164500638736,\n", 1181 | " 0.010479955246504928,\n", 1182 | " -0.005226711864190985,\n", 1183 | " 0.009279404051440973,\n", 1184 | " -0.016011780500432553,\n", 1185 | " -0.02937040732592388,\n", 1186 | " -0.01621076688635476,\n", 1187 | " -0.005196863906302654,\n", 1188 | " -0.009312568449094673,\n", 1189 | " 0.0048818021285925,\n", 1190 | " -0.026544798783183405,\n", 1191 | " -0.037807435676960856,\n", 1192 | " 0.0026365710104530827,\n", 1193 | " -0.004759093857273807,\n", 1194 | " -0.019155767260648552,\n", 1195 | " 0.010632512407034549,\n", 1196 | " 0.0020230294210289747,\n", 1197 | " -0.0038006420665899166,\n", 1198 | " 0.0063443529964432865,\n", 1199 | " 0.0008937810406361884,\n", 1200 | " 0.01243665657071845,\n", 1201 | " -0.004689448622201037,\n", 1202 | " -0.003873603974258707,\n", 1203 | " -0.01079170151577231,\n", 1204 | " 0.0026398876830491017,\n", 1205 | " 0.014406622722670855,\n", 1206 | " 0.0001868607548934776,\n", 1207 | " -0.053699822483194895,\n", 1208 | " -0.008191610877077,\n", 1209 | " -0.02560292988981831,\n", 1210 | " -0.0046032211883014155,\n", 1211 | " 0.015282162820728548,\n", 1212 | " -0.004294791824460703,\n", 1213 | " 0.005170332388179694,\n", 1214 | " -0.0069114646619828635,\n", 1215 | " 0.021915047008081626,\n", 1216 | " -0.0026398876830491017,\n", 1217 | " 0.008947760074903967,\n", 1218 | " 0.0053792685590593045,\n", 1219 | " -0.015799528355448872,\n", 1220 | " 0.022896713178631158,\n", 1221 | " 0.014247433613933092,\n", 1222 | " 0.021265024814069097,\n", 1223 | " -0.012861160860685816,\n", 1224 | " 0.01046668948744345,\n", 1225 | " 0.008470191817368083,\n", 1226 | " -0.011096813974186355,\n", 1227 | " -0.010592715129850107,\n", 1228 | " 0.00031423284239179885,\n", 1229 | " 0.005535141228031697,\n", 1230 | " 0.01642301903133844,\n", 1231 | " -0.016860789080367287,\n", 1232 | " 0.0006446333286397767,\n", 1233 | " -0.0009410403072927115,\n", 1234 | " 0.0006081524912207062,\n", 1235 | " -0.009790135775307959,\n", 1236 | " -0.014698469422023418,\n", 1237 | " -0.00736250047007319,\n", 1238 | " -0.008762038516720649,\n", 1239 | " -0.008516621974083264,\n", 1240 | " 0.00490833364671546,\n", 1241 | " -0.005157066629118213,\n", 1242 | " 0.0056512166198196484,\n", 1243 | " 0.01969966338216924,\n", 1244 | " -0.014300496650179014,\n", 1245 | " -0.016542410862891762,\n", 1246 | " -0.003472314529818282,\n", 1247 | " -0.0331644162801524,\n", 1248 | " 0.013756599597335729,\n", 1249 | " 0.010924359106387112,\n", 1250 | " 0.002809026111082974,\n", 1251 | " 0.012954021174116178,\n", 1252 | " -0.019381285164693715,\n", 1253 | " -0.019182298778771513,\n", 1254 | " -0.01406171298707237,\n", 1255 | " 0.02292324469675412,\n", 1256 | " -0.025921308107293834,\n", 1257 | " 0.0010670650765844357,\n", 1258 | " -0.012914223896931738,\n", 1259 | " -0.01902310967003375,\n", 1260 | " -0.014963784603253023,\n", 1261 | " 0.009717174100469819,\n", 1262 | " -0.003920034130973887,\n", 1263 | " 0.010473322366974189,\n", 1264 | " -0.007700777326140934,\n", 1265 | " -0.015507681656096306,\n", 1266 | " -0.0033363402666074606,\n", 1267 | " 0.009412061642055774,\n", 1268 | " 0.012675440233825094,\n", 1269 | " -0.021477276959052778,\n", 1270 | " -0.005959645517999063,\n", 1271 | " -0.030935768757823733,\n", 1272 | " -0.012814730703970635,\n", 1273 | " -0.008078851925054418,\n", 1274 | " -0.009365631485340593,\n", 1275 | " 0.006785439485237504,\n", 1276 | " 0.026385609674445643,\n", 1277 | " 0.008815101552966569,\n", 1278 | " -0.00984319881155388,\n", 1279 | " -0.02830914660100546,\n", 1280 | " 0.006112202212867383,\n", 1281 | " -0.02225995674368011,\n", 1282 | " -0.0068782997986678645,\n", 1283 | " 0.0029632405601726815,\n", 1284 | " 0.03382770423322642,\n", 1285 | " 0.02562946140794127,\n", 1286 | " 0.03990342560867473,\n", 1287 | " -0.011315698998700777,\n", 1288 | " 0.026558064542244884,\n", 1289 | " 0.022498740406786753,\n", 1290 | " -0.0011508052970753539,\n", 1291 | " -0.0011856279146117393,\n", 1292 | " 0.01627709568166216,\n", 1293 | " -0.0003453244942959742,\n", 1294 | " -0.029980632242751968,\n", 1295 | " 0.006755591527349173,\n", 1296 | " 0.003198708016344604,\n", 1297 | " 0.02149054271811426,\n", 1298 | " 0.011083548215124875,\n", 1299 | " 0.01537502313415891,\n", 1300 | " -0.0044771964772173534,\n", 1301 | " 0.007216577120396908,\n", 1302 | " 0.0019732828245484237,\n", 1303 | " -0.004394285017421804,\n", 1304 | " -0.017577140069687216,\n", 1305 | " -0.022087501875880867,\n", 1306 | " -0.027725453202300333,\n", 1307 | " 0.0013945637362453769,\n", 1308 | " -0.008947760074903967,\n", 1309 | " 0.02952959643466164,\n", 1310 | " -0.023904910867303654,\n", 1311 | " -0.01570666804201851,\n", 1312 | " -0.010254437342459765,\n", 1313 | " 0.034490992186300425,\n", 1314 | " 0.012715237511009534,\n", 1315 | " -0.005956329078233693,\n", 1316 | " 0.015865857150756273,\n", 1317 | " -0.016701599971629524,\n", 1318 | " 0.004404234336717915,\n", 1319 | " -0.0035087953672373524,\n", 1320 | " -0.00922634101519505,\n", 1321 | " -0.02228648826180307,\n", 1322 | " 0.01045342372838197,\n", 1323 | " -0.006221644725124595,\n", 1324 | " -0.0033728211040265316,\n", 1325 | " 0.007488525181157252,\n", 1326 | " -0.0011731912654916017,\n", 1327 | " -0.0071568812046202464,\n", 1328 | " 0.0015811135894627667,\n", 1329 | " 0.007117083927435806,\n", 1330 | " -0.004822106212815838,\n", 1331 | " 0.0015471200818677237,\n", 1332 | " -0.020734392588964694,\n", 1333 | " -0.011906025276936646,\n", 1334 | " 0.0009153378991110936,\n", 1335 | " -0.01053965116228159,\n", 1336 | " -0.014472951517978255,\n", 1337 | " -0.02696930307315077,\n", 1338 | " -0.014711735181084899,\n", 1339 | " -0.027698921684177372,\n", 1340 | " 0.0020959910958671156,\n", 1341 | " 0.018651668416312304,\n", 1342 | " -0.015574010451403709,\n", 1343 | " 0.009266138292379492,\n", 1344 | " 0.002596773733268642,\n", 1345 | " -0.027048897627519653,\n", 1346 | " -0.010234538703867545,\n", 1347 | " -0.009989122161230162,\n", 1348 | " 0.053487570338211214,\n", 1349 | " 0.015308694338851509,\n", 1350 | " 0.019248627574078914,\n", 1351 | " 0.022604866479278594,\n", 1352 | " -0.020721126829903215,\n", 1353 | " -0.02219362794837271,\n", 1354 | " 0.0033048340888364456,\n", 1355 | " -0.006685946292276402,\n", 1356 | " -0.004314690463052923,\n", 1357 | " 0.023162028359860764,\n", 1358 | " 0.020561937721165453,\n", 1359 | " -0.018837389043173027,\n", 1360 | " 0.005783874210434451,\n", 1361 | " -0.016011780500432553,\n", 1362 | " 0.01774959493748646,\n", 1363 | " 0.016131172331985875,\n", 1364 | " -0.023069168046430402,\n", 1365 | " -0.011348863396354478,\n", 1366 | " 0.014658672144838979,\n", 1367 | " 0.0016474423847701676,\n", 1368 | " -0.004022843763700359,\n", 1369 | " -0.011017219419817474,\n", 1370 | " -0.033986893341964176,\n", 1371 | " 0.009889628968269061,\n", 1372 | " -0.02097317625207134,\n", 1373 | " -0.01700671243004357,\n", 1374 | " -0.021424213922806856,\n", 1375 | " -0.0005555039517377944,\n", 1376 | " -0.032368470736463595,\n", 1377 | " 0.003608288560198454,\n", 1378 | " -0.016568942381014723,\n", 1379 | " 0.024634529478330256,\n", 1380 | " -0.003989679366046659,\n", 1381 | " -0.021158898741577253,\n", 1382 | " -0.025364146226711667,\n", 1383 | " -0.029184686699063156,\n", 1384 | " -0.023931442385426615,\n", 1385 | " 0.001633347515767345,\n", 1386 | " 0.006669364093449552,\n", 1387 | " 0.018704731452558226,\n", 1388 | " 0.0037873763075284366,\n", 1389 | " -0.008065586165992938,\n", 1390 | " 0.015295428579790028,\n", 1391 | " 0.003069366865495172,\n", 1392 | " -0.002936709042049721,\n", 1393 | " 0.008509989094552525,\n", 1394 | " -0.009949324884045721,\n", 1395 | " 0.0248733131414369,\n", 1396 | " -0.004643018465485856,\n", 1397 | " -0.005860152325037961,\n", 1398 | " 0.008284471190507361,\n", 1399 | " -0.008622748046575106,\n", 1400 | " 0.015441352860788906,\n", 1401 | " -0.008609482287513626,\n", 1402 | " -0.01049985388509715,\n", 1403 | " -0.0028455069485020445,\n", 1404 | " -0.028043831419775858,\n", 1405 | " -0.006009392114479614,\n", 1406 | " 0.002774203493546589,\n", 1407 | " -0.004251678107510893,\n", 1408 | " -0.013484651536575385,\n", 1409 | " -0.008145180720361819,\n", 1410 | " -0.012297366100572908,\n", 1411 | " -0.011169775649024495,\n", 1412 | " -0.020707861070841736,\n", 1413 | " 0.002394470907581069,\n", 1414 | " -0.013279032271122442,\n", 1415 | " -0.039505452836830324,\n", 1416 | " -0.0183200244397753,\n", 1417 | " -0.011302433239639297,\n", 1418 | " 0.00027153365130882834,\n", 1419 | " 0.010062083836068303,\n", 1420 | " -0.0036348200783214144,\n", 1421 | " -0.00736250047007319,\n", 1422 | " 0.019938447045275885,\n", 1423 | " 0.02679684820535153,\n", 1424 | " -0.027698921684177372,\n", 1425 | " 0.0039366163298007375,\n", 1426 | " -0.014234167854871612,\n", 1427 | " 0.007694144446610195,\n", 1428 | " -0.0049381816046037905,\n", 1429 | " 0.00974370561859278,\n", 1430 | " 0.005943063319172213,\n", 1431 | " -0.008801835793905088,\n", 1432 | " -0.0026846596198815973,\n", 1433 | " -0.010446790848851228,\n", 1434 | " -0.015653605005772588,\n", 1435 | " -0.03770130960446902,\n", 1436 | " 0.007322703192888749,\n", 1437 | " 0.005419065836243745,\n", 1438 | " -0.023838582071996253,\n", 1439 | " -0.010280968860582726,\n", 1440 | " 0.02690297427784337,\n", 1441 | " 0.003893502612850927,\n", 1442 | " 0.007939561454908876,\n", 1443 | " 0.015653605005772588,\n", 1444 | " -0.015030113398560424,\n", 1445 | " 0.016847523321305808,\n", 1446 | " 0.0016930434315440058,\n", 1447 | " -0.019288424851263353,\n", 1448 | " 0.007561486855995393,\n", 1449 | " -0.030458199568965255,\n", 1450 | " -0.0011143243432409587,\n", 1451 | " 0.004812156893519728,\n", 1452 | " -0.0012362036210336327,\n", 1453 | " 0.020455811648673612,\n", 1454 | " -0.023241622914229643,\n", 1455 | " -0.020031307358706243,\n", 1456 | " -0.009551352112201317,\n", 1457 | " 0.016542410862891762,\n", 1458 | " 0.014910721567007103,\n", 1459 | " -0.011554482661807421,\n", 1460 | " -0.027062163386581132,\n", 1461 | " -0.014977050362314503,\n", 1462 | " -0.015003581880437464,\n", 1463 | " 0.010274335981051987,\n", 1464 | " 0.0030229364759493424,\n", 1465 | " 0.006805338123829724,\n", 1466 | " 0.004609854067832156,\n", 1467 | " 0.004420816535544765,\n", 1468 | " -0.02284365014238524,\n", 1469 | " 0.017364887924703535,\n", 1470 | " -0.0010471664379922154,\n", 1471 | " 0.0017212331695496512,\n", 1472 | " -0.014128041782379771,\n", 1473 | " -0.021371150886560938,\n", 1474 | " -0.027937705347284018,\n", 1475 | " -0.004162134233845901,\n", 1476 | " 0.003595022801136974,\n", 1477 | " 0.01835982171695974,\n", 1478 | " 0.021941578526204587,\n", 1479 | " -0.02499270497299022,\n", 1480 | " -0.012257568823388468,\n", 1481 | " -0.002195484288828217,\n", 1482 | " -0.030909237239700772,\n", 1483 | " -0.01640975327227696,\n", 1484 | " -0.0182536956444679,\n", 1485 | " -0.010579449370788627,\n", 1486 | " 0.01849247930757454,\n", 1487 | " -0.005999442795183504,\n", 1488 | " -0.0019898650233752743,\n", 1489 | " 0.03608288699896843,\n", 1490 | " 0.012038682867551448,\n", 1491 | " 0.0215966687906061,\n", 1492 | " 0.0012071847730866446,\n", 1493 | " -0.00410243831806924,\n", 1494 | " 0.012071847265205148,\n", 1495 | " 0.0017328408251437708,\n", 1496 | " 0.004613170507597525,\n", 1497 | " 0.011123345492309315,\n", 1498 | " -0.00406927392041554,\n", 1499 | " -0.020681329552718775,\n", 1500 | " 0.03560531594746476,\n", 1501 | " 0.02154360575436018,\n", 1502 | " 0.0081584464794233,\n", 1503 | " 0.013577511850005747,\n", 1504 | " 0.00203463696020777,\n", 1505 | " -0.01147488810743854,\n", 1506 | " -0.01051975252368937,\n", 1507 | " 0.0009219707786418338,\n", 1508 | " -0.007826802502886294,\n", 1509 | " 0.009889628968269061,\n", 1510 | " 0.019500676996247038,\n", 1511 | " -0.014406622722670855,\n", 1512 | " -0.010088615354191264,\n", 1513 | " 0.0010007362812770345,\n", 1514 | " 0.01782918949185534,\n", 1515 | " -0.002671393860820117,\n", 1516 | " 0.007879865539132216,\n", 1517 | " -0.002669735640937432,\n", 1518 | " 0.009047253267865069,\n", 1519 | " -0.0017195749496669661,\n", 1520 | " -0.03019288438773565,\n", 1521 | " -0.0028537980479154696,\n", 1522 | " -0.028574461782235067,\n", 1523 | " -0.020747658348026176,\n", 1524 | " 0.003134037440919888,\n", 1525 | " 0.003611604999963824,\n", 1526 | " -0.030511262605211174,\n", 1527 | " 0.007667612928487234,\n", 1528 | " 0.018067975017607176,\n", 1529 | " -0.01648934782664584,\n", 1530 | " -0.008589583648921406,\n", 1531 | " 0.015401554652281869,\n", 1532 | " -0.02044254588961213,\n", 1533 | " -0.020628266516472853,\n", 1534 | " -0.001264393359039278,\n", 1535 | " -0.02218036218931123,\n", 1536 | " 0.005452230233897446,\n", 1537 | " -0.00651349142447716,\n", 1538 | " 0.010997320781225253,\n", 1539 | " -0.015030113398560424,\n", 1540 | " -0.0019500677461908335,\n", 1541 | " -0.018744528729742665,\n", 1542 | " -0.0028372158490886194,\n", 1543 | " -0.00407590679994628,\n", 1544 | " 0.013365259705022064,\n", 1545 | " -0.006871666919137124,\n", 1546 | " -0.015295428579790028,\n", 1547 | " -0.012350429136818828,\n", 1548 | " -0.011985619831305527,\n", 1549 | " 0.007946194334439617,\n", 1550 | " 0.005634634420992798,\n", 1551 | " -0.005263192701610055,\n", 1552 | " -0.00575402625254612,\n", 1553 | " -0.024568200683022855,\n", 1554 | " -0.004039425962527209,\n", 1555 | " 0.012469820968372151,\n", 1556 | " -0.03502162254875963,\n", 1557 | " -0.04576689483913936,\n", 1558 | " 0.014884190048884142,\n", 1559 | " 0.012522884004618071,\n", 1560 | " -0.021517074236237218,\n", 1561 | " 0.029635722507153482,\n", 1562 | " 0.21639118102015723,\n", 1563 | " -0.0001705894722946308,\n", 1564 | " 0.006493592785884939,\n", 1565 | " 0.024541669164899894,\n", 1566 | " -4.1559161275645656e-05,\n", 1567 | " 0.025961105384478277,\n", 1568 | " -0.01079170151577231,\n", 1569 | " -0.006334403677147177,\n", 1570 | " -0.013272399391591702,\n", 1571 | " 0.00739566486772689,\n", 1572 | " 0.01252951688414881,\n", 1573 | " 0.01768326614217906,\n", 1574 | " -0.01316627331909986,\n", 1575 | " -0.0024574832631231,\n", 1576 | " -0.007190045602273948,\n", 1577 | " -0.009644212425631677,\n", 1578 | " -0.015295428579790028,\n", 1579 | " -0.00911358206317247,\n", 1580 | " -0.04629752520159856,\n", 1581 | " -0.025098831045482065,\n", 1582 | " -0.001875447735054683,\n", 1583 | " 0.005455546673662816,\n", 1584 | " 0.016874054839428766,\n", 1585 | " -0.00572749473442316,\n", 1586 | " 0.01311984316238468,\n", 1587 | " -0.021861983971835704,\n", 1588 | " -0.028017299901652897,\n", 1589 | " 0.0017676634426801565,\n", 1590 | " 0.03345626297950497,\n", 1591 | " 0.034411397631931545,\n", 1592 | " -0.010161577029029405,\n", 1593 | " -0.0006566554227892431,\n", 1594 | " 0.003402669061914862,\n", 1595 | " 0.012343796257288089,\n", 1596 | " -0.012502985366025851,\n", 1597 | " 0.01623729840447772,\n", 1598 | " 0.00824467391332292,\n", 1599 | " -0.0014907404894411082,\n", 1600 | " 0.04146878517792939,\n", 1601 | " 0.008569685010329186,\n", 1602 | " 0.007966092973031837,\n", 1603 | " -0.02030988829899733,\n", 1604 | " 0.011149877010432276,\n", 1605 | " -0.007263007277112089,\n", 1606 | " -0.008383964383468463,\n", 1607 | " 0.01240349217306475,\n", 1608 | " ...]]" 1609 | ] 1610 | }, 1611 | "metadata": {}, 1612 | "execution_count": 26 1613 | } 1614 | ] 1615 | }, 1616 | { 1617 | "cell_type": "code", 1618 | "source": [], 1619 | "metadata": { 1620 | "id": "8amwnhjNuvfN" 1621 | }, 1622 | "execution_count": null, 1623 | "outputs": [] 1624 | } 1625 | ] 1626 | } -------------------------------------------------------------------------------- /AR.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "nbformat": 4, 3 | "nbformat_minor": 0, 4 | "metadata": { 5 | "colab": { 6 | "name": "AR.ipynb", 7 | "provenance": [], 8 | "include_colab_link": true 9 | }, 10 | "kernelspec": { 11 | "name": "python3", 12 | "display_name": "Python 3" 13 | }, 14 | "accelerator": "GPU" 15 | }, 16 | "cells": [ 17 | { 18 | "cell_type": "markdown", 19 | "metadata": { 20 | "id": "view-in-github", 21 | "colab_type": "text" 22 | }, 23 | "source": [ 24 | "\"Open" 25 | ] 26 | }, 27 | { 28 | "cell_type": "code", 29 | "metadata": { 30 | "id": "zS2HkfSWZSLq", 31 | "colab_type": "code", 32 | "outputId": "6e4ed316-f1bd-465a-8477-c1296b74f63e", 33 | "colab": { 34 | "base_uri": "https://localhost:8080/", 35 | "height": 34 36 | } 37 | }, 38 | "source": [ 39 | "from google.colab import drive\n", 40 | "drive.mount('/content/drive', force_remount=True)\n", 41 | "#drive.unmount('/content/drive')#, force_remount=True)" 42 | ], 43 | "execution_count": 0, 44 | "outputs": [ 45 | { 46 | "output_type": "stream", 47 | "text": [ 48 | "Mounted at /content/drive\n" 49 | ], 50 | "name": "stdout" 51 | } 52 | ] 53 | }, 54 | { 55 | "cell_type": "code", 56 | "metadata": { 57 | "id": "Cd0WZtmF0ozu", 58 | "colab_type": "code", 59 | "colab": {} 60 | }, 61 | "source": [ 62 | "!cp /content/drive/'My Drive'/full_data.zip ." 63 | ], 64 | "execution_count": 0, 65 | "outputs": [] 66 | }, 67 | { 68 | "cell_type": "code", 69 | "metadata": { 70 | "id": "biOS1FHb2bIU", 71 | "colab_type": "code", 72 | "colab": {} 73 | }, 74 | "source": [ 75 | "!cp /content/drive/'My Drive'/extras.zip ." 76 | ], 77 | "execution_count": 0, 78 | "outputs": [] 79 | }, 80 | { 81 | "cell_type": "code", 82 | "metadata": { 83 | "id": "Astt9GGu2eyr", 84 | "colab_type": "code", 85 | "colab": {} 86 | }, 87 | "source": [ 88 | "!unzip -qq extras.zip" 89 | ], 90 | "execution_count": 0, 91 | "outputs": [] 92 | }, 93 | { 94 | "cell_type": "code", 95 | "metadata": { 96 | "id": "75fLRq5i2nCy", 97 | "colab_type": "code", 98 | "colab": {} 99 | }, 100 | "source": [ 101 | "!cp /content/drive/'My Drive'/action_data2.zip ." 102 | ], 103 | "execution_count": 0, 104 | "outputs": [] 105 | }, 106 | { 107 | "cell_type": "code", 108 | "metadata": { 109 | "id": "sQ-P1K1x20v3", 110 | "colab_type": "code", 111 | "colab": {} 112 | }, 113 | "source": [ 114 | "!unzip -qq action_data2.zip " 115 | ], 116 | "execution_count": 0, 117 | "outputs": [] 118 | }, 119 | { 120 | "cell_type": "code", 121 | "metadata": { 122 | "id": "Eo7byTgj0vpj", 123 | "colab_type": "code", 124 | "colab": {} 125 | }, 126 | "source": [ 127 | "!unzip -qq full_data.zip" 128 | ], 129 | "execution_count": 0, 130 | "outputs": [] 131 | }, 132 | { 133 | "cell_type": "code", 134 | "metadata": { 135 | "id": "CyWe0-GM2-YB", 136 | "colab_type": "code", 137 | "colab": {} 138 | }, 139 | "source": [ 140 | "!mkdir valid\n", 141 | "!mkdir valid/burpees\n", 142 | "!mkdir valid/mcs\n", 143 | "!mkdir valid/squats\n", 144 | "!mkdir valid/jj\n", 145 | "!mkdir valid/planks\n", 146 | "!mkdir valid/pushups" 147 | ], 148 | "execution_count": 0, 149 | "outputs": [] 150 | }, 151 | { 152 | "cell_type": "code", 153 | "metadata": { 154 | "id": "eRC0Jk7r3WRW", 155 | "colab_type": "code", 156 | "colab": {} 157 | }, 158 | "source": [ 159 | "!mv action_data_2/squats/squat2_1_* valid/squats/\n", 160 | "!mv action_data_2/jj/jumping_jacks2_1_* valid/jj/\n", 161 | "!mv action_data_2/mcs/mountain_climber2_1_* valid/mcs/\n", 162 | "!mv action_data_2/planks/plank2_1_* valid/planks/\n", 163 | "!mv action_data_2/burpees/burpee2_1_* valid/burpees/\n", 164 | "!mv action_data_2/pushups/pushup2_1_* valid/pushups/" 165 | ], 166 | "execution_count": 0, 167 | "outputs": [] 168 | }, 169 | { 170 | "cell_type": "code", 171 | "metadata": { 172 | "id": "hUhp9-VR4aDL", 173 | "colab_type": "code", 174 | "colab": {} 175 | }, 176 | "source": [ 177 | "!mv action_data_2/squats/* full_data/train/squats/\n", 178 | "!mv action_data_2/jj/* full_data/train/jj/\n", 179 | "!mv action_data_2/mcs/* full_data/train/mcs/\n", 180 | "!mv action_data_2/planks/* full_data/train/planks/\n", 181 | "!mv action_data_2/burpees/* full_data/train/burpees/\n", 182 | "!mv action_data_2/pushups/* full_data/train/pushups/" 183 | ], 184 | "execution_count": 0, 185 | "outputs": [] 186 | }, 187 | { 188 | "cell_type": "code", 189 | "metadata": { 190 | "id": "EOyCISgZ45Oh", 191 | "colab_type": "code", 192 | "colab": {} 193 | }, 194 | "source": [ 195 | "!mkdir valid/extras\n", 196 | "!mkdir full_data/train/extras" 197 | ], 198 | "execution_count": 0, 199 | "outputs": [] 200 | }, 201 | { 202 | "cell_type": "code", 203 | "metadata": { 204 | "id": "td2oXScl4zW6", 205 | "colab_type": "code", 206 | "colab": {} 207 | }, 208 | "source": [ 209 | "!mv extras/extra_1* valid/extras/\n", 210 | "!mv extras/* full_data/train/extras/" 211 | ], 212 | "execution_count": 0, 213 | "outputs": [] 214 | }, 215 | { 216 | "cell_type": "code", 217 | "metadata": { 218 | "id": "vvKf5Nbp5Jj6", 219 | "colab_type": "code", 220 | "colab": {} 221 | }, 222 | "source": [ 223 | "!mv valid full_data/" 224 | ], 225 | "execution_count": 0, 226 | "outputs": [] 227 | }, 228 | { 229 | "cell_type": "code", 230 | "metadata": { 231 | "id": "dn2-zxxP5Pcn", 232 | "colab_type": "code", 233 | "colab": {} 234 | }, 235 | "source": [ 236 | "!zip -qr full_data_new.zip full_data" 237 | ], 238 | "execution_count": 0, 239 | "outputs": [] 240 | }, 241 | { 242 | "cell_type": "code", 243 | "metadata": { 244 | "id": "gtab2LhP5WZp", 245 | "colab_type": "code", 246 | "outputId": "d86389a3-4ead-48d2-e645-fcc8ba0437c4", 247 | "colab": { 248 | "base_uri": "https://localhost:8080/", 249 | "height": 34 250 | } 251 | }, 252 | "source": [ 253 | "!cp full_data_new.zip /content/drive/'My Drive'/" 254 | ], 255 | "execution_count": 0, 256 | "outputs": [ 257 | { 258 | "output_type": "stream", 259 | "text": [ 260 | "cp: cannot create regular file '/content/drive/My Drive/': Not a directory\n" 261 | ], 262 | "name": "stdout" 263 | } 264 | ] 265 | }, 266 | { 267 | "cell_type": "code", 268 | "metadata": { 269 | "id": "U5djeJek3jzh", 270 | "colab_type": "code", 271 | "colab": {} 272 | }, 273 | "source": [ 274 | "!ls action_data_2/squats" 275 | ], 276 | "execution_count": 0, 277 | "outputs": [] 278 | }, 279 | { 280 | "cell_type": "code", 281 | "metadata": { 282 | "id": "naK3Lsj4ZZkf", 283 | "colab_type": "code", 284 | "outputId": "83b576af-2d99-4ec2-923a-8fc918c2ec24", 285 | "colab": { 286 | "base_uri": "https://localhost:8080/", 287 | "height": 1000 288 | } 289 | }, 290 | "source": [ 291 | "import cv2, os\n", 292 | "print(cv2.__version__)\n", 293 | "videos = os.listdir(\"/content/drive/My Drive/excercise\")\n", 294 | "for video_name in videos:\n", 295 | " print(video_name)\n", 296 | " vidcap = cv2.VideoCapture(\"/content/drive/My Drive/excercise/\"+video_name)\n", 297 | " success,image = vidcap.read()\n", 298 | " count = 0\n", 299 | " success = True\n", 300 | " print(video_name)\n", 301 | " os.mkdir(video_name)\n", 302 | " while success:\n", 303 | " if count%10000 == 0: \n", 304 | " cv2.imwrite(video_name+\"/frame%d.jpg\" % count, image) # save frame as JPEG file\n", 305 | " success,image = vidcap.read()\n", 306 | " #print('Read a new frame: ', success)\n", 307 | " count += 1" 308 | ], 309 | "execution_count": 0, 310 | "outputs": [ 311 | { 312 | "output_type": "stream", 313 | "text": [ 314 | "3.4.3\n", 315 | "mountain_climber_5.mp4\n", 316 | "mountain_climber_5.mp4\n", 317 | "burpee_1.mp4\n", 318 | "burpee_1.mp4\n", 319 | "plank_3.mp4\n", 320 | "plank_3.mp4\n", 321 | "plank_6.mp4\n", 322 | "plank_6.mp4\n", 323 | "Thumbs.db\n", 324 | "Thumbs.db\n", 325 | "burpee_2.mp4\n", 326 | "burpee_2.mp4\n", 327 | "burpee_3.mp4\n", 328 | "burpee_3.mp4\n", 329 | "jumping_jacks_1.mp4\n", 330 | "jumping_jacks_1.mp4\n", 331 | "jumping_jacks_2.mp4\n", 332 | "jumping_jacks_2.mp4\n", 333 | "jumping_jacks_3.mp4\n", 334 | "jumping_jacks_3.mp4\n", 335 | "jumping_jacks_4.mp4\n", 336 | "jumping_jacks_4.mp4\n", 337 | "jumping_jacks_5.mp4\n", 338 | "jumping_jacks_5.mp4\n", 339 | "jumping_jacks_6.mp4\n", 340 | "jumping_jacks_6.mp4\n", 341 | "jumping_jacks_7.mp4\n", 342 | "jumping_jacks_7.mp4\n", 343 | "mountain_climber_1.mp4\n", 344 | "mountain_climber_1.mp4\n", 345 | "mountain_climber_2.mp4\n", 346 | "mountain_climber_2.mp4\n", 347 | "plank_1.mp4\n", 348 | "plank_1.mp4\n", 349 | "mountain_climber_4.mp4\n", 350 | "mountain_climber_4.mp4\n", 351 | "mountain_climber_3.mp4\n", 352 | "mountain_climber_3.mp4\n", 353 | "plank_4.mp4\n", 354 | "plank_4.mp4\n", 355 | "plank_2.mp4\n", 356 | "plank_2.mp4\n", 357 | "plank_5.mp4\n", 358 | "plank_5.mp4\n", 359 | "squat_2.mp4\n", 360 | "squat_2.mp4\n", 361 | "squat_1.mp4\n", 362 | "squat_1.mp4\n", 363 | "pushup_1.mp4\n", 364 | "pushup_1.mp4\n", 365 | "squat_6.mp4\n", 366 | "squat_6.mp4\n", 367 | "squat_5.mp4\n", 368 | "squat_5.mp4\n", 369 | "squat_4.mp4\n", 370 | "squat_4.mp4\n", 371 | "squat_3.mp4\n", 372 | "squat_3.mp4\n" 373 | ], 374 | "name": "stdout" 375 | } 376 | ] 377 | }, 378 | { 379 | "cell_type": "code", 380 | "metadata": { 381 | "id": "qB700QbFaO_m", 382 | "colab_type": "code", 383 | "colab": {} 384 | }, 385 | "source": [ 386 | "!rm -rf *mp4 Thumbs.db pushups mcs jj planks squats burpees" 387 | ], 388 | "execution_count": 0, 389 | "outputs": [] 390 | }, 391 | { 392 | "cell_type": "code", 393 | "metadata": { 394 | "id": "2I18In1RZpur", 395 | "colab_type": "code", 396 | "outputId": "bc428b5f-e086-4232-d90c-d2a3d314db42", 397 | "colab": { 398 | "base_uri": "https://localhost:8080/", 399 | "height": 241 400 | } 401 | }, 402 | "source": [ 403 | "import os, shutil\n", 404 | "def copy_dir(name, f1):\n", 405 | " print(f1)\n", 406 | " fnames = [x for x in os.listdir(\".\") if name in x]\n", 407 | " print(fnames)\n", 408 | " os.mkdir(f1)\n", 409 | " count = 0\n", 410 | " for x in fnames:\n", 411 | " fx = os.listdir(x)\n", 412 | " for f in fx:\n", 413 | " #print(x+\"/\"+f)\n", 414 | " shutil.copy(x+\"/\"+f, f1+\"/\"+x.split(\".\")[0]+\"_\"+f)\n", 415 | "copy_dir(\"pushup\", \"pushups\")\n", 416 | "copy_dir(\"mountain_climber\", \"mcs\")\n", 417 | "copy_dir(\"squat\", \"squats\")\n", 418 | "copy_dir(\"plank\", \"planks\")\n", 419 | "copy_dir(\"jumping\", \"jj\")\n", 420 | "copy_dir(\"burpee\", \"burpees\")" 421 | ], 422 | "execution_count": 0, 423 | "outputs": [ 424 | { 425 | "output_type": "stream", 426 | "text": [ 427 | "pushups\n", 428 | "['pushup_1.mp4']\n", 429 | "mcs\n", 430 | "['mountain_climber_3.mp4', 'mountain_climber_5.mp4', 'mountain_climber_4.mp4', 'mountain_climber_1.mp4', 'mountain_climber_2.mp4']\n", 431 | "squats\n", 432 | "['squat_2.mp4', 'squat_5.mp4', 'squat_1.mp4', 'squat_4.mp4', 'squat_6.mp4', 'squat_3.mp4']\n", 433 | "planks\n", 434 | "['plank_2.mp4', 'plank_1.mp4', 'plank_4.mp4', 'plank_6.mp4', 'plank_5.mp4', 'plank_3.mp4']\n", 435 | "jj\n", 436 | "['jumping_jacks_4.mp4', 'jumping_jacks_1.mp4', 'jumping_jacks_2.mp4', 'jumping_jacks_6.mp4', 'jumping_jacks_7.mp4', 'jumping_jacks_3.mp4', 'jumping_jacks_5.mp4']\n", 437 | "burpees\n", 438 | "['burpee_3.mp4', 'burpee_2.mp4', 'burpee_1.mp4']\n" 439 | ], 440 | "name": "stdout" 441 | } 442 | ] 443 | }, 444 | { 445 | "cell_type": "code", 446 | "metadata": { 447 | "id": "tFaGqNI2Z392", 448 | "colab_type": "code", 449 | "colab": {} 450 | }, 451 | "source": [ 452 | "!mkdir action_data\n", 453 | "!mv pushups action_data/\n", 454 | "!mv jj action_data/\n", 455 | "!mv mcs action_data/\n", 456 | "!mv squats action_data/\n", 457 | "!mv planks action_data/\n", 458 | "!mv burpees action_data/" 459 | ], 460 | "execution_count": 0, 461 | "outputs": [] 462 | }, 463 | { 464 | "cell_type": "code", 465 | "metadata": { 466 | "id": "Y1MP2ZihZ6w-", 467 | "colab_type": "code", 468 | "colab": {} 469 | }, 470 | "source": [ 471 | "!zip -qr action_data.zip action_data" 472 | ], 473 | "execution_count": 0, 474 | "outputs": [] 475 | }, 476 | { 477 | "cell_type": "code", 478 | "metadata": { 479 | "id": "aG8G9AhpZ9Dc", 480 | "colab_type": "code", 481 | "colab": {} 482 | }, 483 | "source": [ 484 | "!cp action_data.zip /content/drive/'My Drive'/" 485 | ], 486 | "execution_count": 0, 487 | "outputs": [] 488 | }, 489 | { 490 | "cell_type": "code", 491 | "metadata": { 492 | "id": "rpJauBediLyU", 493 | "colab_type": "code", 494 | "colab": {} 495 | }, 496 | "source": [ 497 | "import keras\n", 498 | "import numpy as np\n", 499 | "from keras.applications import resnet50\n", 500 | " \n", 501 | "#Load the ResNet50 model\n", 502 | "resnet_model = resnet50.ResNet50(weights='imagenet')" 503 | ], 504 | "execution_count": 0, 505 | "outputs": [] 506 | }, 507 | { 508 | "cell_type": "code", 509 | "metadata": { 510 | "id": "VY14dbf2ioii", 511 | "colab_type": "code", 512 | "colab": {} 513 | }, 514 | "source": [ 515 | "from keras.layers import Conv2D, Dense, Dropout\n", 516 | "from keras.models import Model\n", 517 | "resnet_model.layers.pop()\n", 518 | "dense_1 = Dense(400, activation='relu', name='dense_1')(resnet_model.layers[-1].output)\n", 519 | "dense_1 = Dropout(0.2)(dense_1)\n", 520 | "final = Dense(7, activation='softmax', name='final')(dense_1)\n", 521 | "model = Model(input=resnet_model.layers[0].input, output=[final])\n", 522 | "model.summary()" 523 | ], 524 | "execution_count": 0, 525 | "outputs": [] 526 | }, 527 | { 528 | "cell_type": "code", 529 | "metadata": { 530 | "id": "tlt0JUF3jNBL", 531 | "colab_type": "code", 532 | "colab": {} 533 | }, 534 | "source": [ 535 | "from keras.applications.resnet50 import preprocess_input, decode_predictions\n", 536 | "from keras.preprocessing.image import ImageDataGenerator\n", 537 | "train_datagen = ImageDataGenerator(\n", 538 | " shear_range=0,\n", 539 | " zoom_range=0,\n", 540 | " preprocessing_function = preprocess_input,\n", 541 | " horizontal_flip=False,\n", 542 | " width_shift_range=0, # randomly shift images horizontally (fraction of total width)\n", 543 | " height_shift_range=0)\n", 544 | "test_datagen = ImageDataGenerator(\n", 545 | " shear_range=0,\n", 546 | " zoom_range=0,\n", 547 | " preprocessing_function = preprocess_input,\n", 548 | " horizontal_flip=False,\n", 549 | " width_shift_range=0, # randomly shift images horizontally (fraction of total width)\n", 550 | " height_shift_range=0)" 551 | ], 552 | "execution_count": 0, 553 | "outputs": [] 554 | }, 555 | { 556 | "cell_type": "code", 557 | "metadata": { 558 | "id": "mVIPSNbGjUUP", 559 | "colab_type": "code", 560 | "outputId": "9dfd4b1e-5c02-4187-8cff-d98a4c596f96", 561 | "colab": { 562 | "base_uri": "https://localhost:8080/", 563 | "height": 51 564 | } 565 | }, 566 | "source": [ 567 | "from keras.callbacks import ModelCheckpoint\n", 568 | "\n", 569 | "train_generator = train_datagen.flow_from_directory(\n", 570 | " directory=r\"./full_data/train/\",\n", 571 | " target_size=(224, 224),\n", 572 | " color_mode=\"rgb\",\n", 573 | " batch_size=32,\n", 574 | " class_mode=\"categorical\",\n", 575 | " shuffle=True,\n", 576 | " seed=42\n", 577 | ")\n", 578 | "test_generator = test_datagen.flow_from_directory(\n", 579 | " directory=r\"./full_data/valid/\",\n", 580 | " target_size=(224, 224),\n", 581 | " color_mode=\"rgb\",\n", 582 | " batch_size=32,\n", 583 | " class_mode=\"categorical\",\n", 584 | " shuffle=True,\n", 585 | " seed=42\n", 586 | ")\n", 587 | "\n", 588 | "fileName = '/content/drive/My Drive/cult_model.hdf5'\n", 589 | "\n", 590 | "checkpoint = ModelCheckpoint(fileName,monitor='val_acc', verbose=1, save_best_only=True, mode='auto')" 591 | ], 592 | "execution_count": 0, 593 | "outputs": [ 594 | { 595 | "output_type": "stream", 596 | "text": [ 597 | "Found 98338 images belonging to 7 classes.\n", 598 | "Found 12457 images belonging to 7 classes.\n" 599 | ], 600 | "name": "stdout" 601 | } 602 | ] 603 | }, 604 | { 605 | "cell_type": "code", 606 | "metadata": { 607 | "id": "68ieBPnrNlLN", 608 | "colab_type": "code", 609 | "outputId": "82a1e933-b9c4-44e5-c7f0-413cf644b2b0", 610 | "colab": { 611 | "base_uri": "https://localhost:8080/", 612 | "height": 34 613 | } 614 | }, 615 | "source": [ 616 | "" 617 | ], 618 | "execution_count": 0, 619 | "outputs": [ 620 | { 621 | "output_type": "stream", 622 | "text": [ 623 | "ls: cannot access 'content/My Drive': No such file or directory\n" 624 | ], 625 | "name": "stdout" 626 | } 627 | ] 628 | }, 629 | { 630 | "cell_type": "code", 631 | "metadata": { 632 | "id": "iGCgZrzEjWUz", 633 | "colab_type": "code", 634 | "colab": {} 635 | }, 636 | "source": [ 637 | "#for layer in model.layers[:-2]:\n", 638 | " #layer.trainable = False" 639 | ], 640 | "execution_count": 0, 641 | "outputs": [] 642 | }, 643 | { 644 | "cell_type": "code", 645 | "metadata": { 646 | "id": "zOja0a_rB5Uo", 647 | "colab_type": "code", 648 | "colab": {} 649 | }, 650 | "source": [ 651 | "!cp full_data_new.zip drive/'My Drive'/" 652 | ], 653 | "execution_count": 0, 654 | "outputs": [] 655 | }, 656 | { 657 | "cell_type": "code", 658 | "metadata": { 659 | "id": "i0_Ptm_vjh1s", 660 | "colab_type": "code", 661 | "outputId": "8542e2e1-e264-417c-efee-338b7c6cc045", 662 | "colab": { 663 | "base_uri": "https://localhost:8080/", 664 | "height": 1000 665 | } 666 | }, 667 | "source": [ 668 | "from keras.optimizers import SGD\n", 669 | "batch_size = 256\n", 670 | "learning_rate = 0.0001\n", 671 | "sgd = SGD(lr=learning_rate, decay=0.0001, momentum=0.9, nesterov=True)\n", 672 | "model.compile(loss='categorical_crossentropy',\n", 673 | " optimizer=sgd,\n", 674 | " metrics=['accuracy'])\n", 675 | "model.fit_generator(\n", 676 | " train_generator,\n", 677 | " steps_per_epoch=8000 // batch_size,\n", 678 | " epochs=100,\n", 679 | " validation_data=test_generator,\n", 680 | " validation_steps=400 // batch_size,\n", 681 | " callbacks = [checkpoint])" 682 | ], 683 | "execution_count": 92, 684 | "outputs": [ 685 | { 686 | "output_type": "stream", 687 | "text": [ 688 | "Epoch 1/100\n", 689 | "31/31 [==============================] - 58s 2s/step - loss: 1.4419 - acc: 0.5292 - val_loss: 2.0851 - val_acc: 0.1875\n", 690 | "\n", 691 | "Epoch 00001: val_acc improved from 0.15625 to 0.18750, saving model to /content/drive/My Drive/cult_model.hdf5\n", 692 | "Epoch 2/100\n", 693 | "31/31 [==============================] - 26s 844ms/step - loss: 1.3036 - acc: 0.5696 - val_loss: 2.2486 - val_acc: 0.1250\n", 694 | "\n", 695 | "Epoch 00002: val_acc did not improve from 0.18750\n", 696 | "Epoch 3/100\n", 697 | "31/31 [==============================] - 26s 847ms/step - loss: 1.1711 - acc: 0.6129 - val_loss: 2.3670 - val_acc: 0.0938\n", 698 | "\n", 699 | "Epoch 00003: val_acc did not improve from 0.18750\n", 700 | "Epoch 4/100\n", 701 | "31/31 [==============================] - 26s 848ms/step - loss: 1.0708 - acc: 0.6754 - val_loss: 2.0778 - val_acc: 0.2188\n", 702 | "\n", 703 | "Epoch 00004: val_acc improved from 0.18750 to 0.21875, saving model to /content/drive/My Drive/cult_model.hdf5\n", 704 | "Epoch 5/100\n", 705 | "31/31 [==============================] - 26s 848ms/step - loss: 0.9312 - acc: 0.7147 - val_loss: 1.7252 - val_acc: 0.4062\n", 706 | "\n", 707 | "Epoch 00005: val_acc improved from 0.21875 to 0.40625, saving model to /content/drive/My Drive/cult_model.hdf5\n", 708 | "Epoch 6/100\n", 709 | "31/31 [==============================] - 26s 849ms/step - loss: 0.8826 - acc: 0.7470 - val_loss: 1.9953 - val_acc: 0.2188\n", 710 | "\n", 711 | "Epoch 00006: val_acc did not improve from 0.40625\n", 712 | "Epoch 7/100\n", 713 | "31/31 [==============================] - 26s 849ms/step - loss: 0.7890 - acc: 0.7712 - val_loss: 2.0271 - val_acc: 0.2500\n", 714 | "\n", 715 | "Epoch 00007: val_acc did not improve from 0.40625\n", 716 | "Epoch 8/100\n", 717 | "31/31 [==============================] - 26s 850ms/step - loss: 0.7795 - acc: 0.7893 - val_loss: 1.9588 - val_acc: 0.1562\n", 718 | "\n", 719 | "Epoch 00008: val_acc did not improve from 0.40625\n", 720 | "Epoch 9/100\n", 721 | "31/31 [==============================] - 26s 849ms/step - loss: 0.7137 - acc: 0.8034 - val_loss: 1.6125 - val_acc: 0.4062\n", 722 | "\n", 723 | "Epoch 00009: val_acc did not improve from 0.40625\n", 724 | "Epoch 10/100\n", 725 | "31/31 [==============================] - 26s 850ms/step - loss: 0.6887 - acc: 0.7984 - val_loss: 2.1470 - val_acc: 0.2188\n", 726 | "\n", 727 | "Epoch 00010: val_acc did not improve from 0.40625\n", 728 | "Epoch 11/100\n", 729 | "31/31 [==============================] - 26s 850ms/step - loss: 0.6243 - acc: 0.8266 - val_loss: 2.0430 - val_acc: 0.3125\n", 730 | "\n", 731 | "Epoch 00011: val_acc did not improve from 0.40625\n", 732 | "Epoch 12/100\n", 733 | "31/31 [==============================] - 26s 854ms/step - loss: 0.6179 - acc: 0.8246 - val_loss: 1.9148 - val_acc: 0.2500\n", 734 | "\n", 735 | "Epoch 00012: val_acc did not improve from 0.40625\n", 736 | "Epoch 13/100\n", 737 | "31/31 [==============================] - 27s 861ms/step - loss: 0.5800 - acc: 0.8347 - val_loss: 2.0870 - val_acc: 0.3750\n", 738 | "\n", 739 | "Epoch 00013: val_acc did not improve from 0.40625\n", 740 | "Epoch 14/100\n", 741 | "31/31 [==============================] - 27s 856ms/step - loss: 0.5634 - acc: 0.8417 - val_loss: 1.8217 - val_acc: 0.3125\n", 742 | "\n", 743 | "Epoch 00014: val_acc did not improve from 0.40625\n", 744 | "Epoch 15/100\n", 745 | "31/31 [==============================] - 26s 853ms/step - loss: 0.4933 - acc: 0.8488 - val_loss: 1.9873 - val_acc: 0.2188\n", 746 | "\n", 747 | "Epoch 00015: val_acc did not improve from 0.40625\n", 748 | "Epoch 16/100\n", 749 | "31/31 [==============================] - 26s 849ms/step - loss: 0.4615 - acc: 0.8750 - val_loss: 1.7904 - val_acc: 0.4375\n", 750 | "\n", 751 | "Epoch 00016: val_acc improved from 0.40625 to 0.43750, saving model to /content/drive/My Drive/cult_model.hdf5\n", 752 | "Epoch 17/100\n", 753 | "31/31 [==============================] - 26s 830ms/step - loss: 0.4910 - acc: 0.8456 - val_loss: 1.6268 - val_acc: 0.4375\n", 754 | "\n", 755 | "Epoch 00017: val_acc did not improve from 0.43750\n", 756 | "Epoch 18/100\n", 757 | "31/31 [==============================] - 26s 848ms/step - loss: 0.4653 - acc: 0.8669 - val_loss: 2.3230 - val_acc: 0.2812\n", 758 | "\n", 759 | "Epoch 00018: val_acc did not improve from 0.43750\n", 760 | "Epoch 19/100\n", 761 | "31/31 [==============================] - 26s 850ms/step - loss: 0.4006 - acc: 0.8911 - val_loss: 2.0437 - val_acc: 0.2500\n", 762 | "\n", 763 | "Epoch 00019: val_acc did not improve from 0.43750\n", 764 | "Epoch 20/100\n", 765 | "31/31 [==============================] - 26s 848ms/step - loss: 0.4310 - acc: 0.8770 - val_loss: 1.8405 - val_acc: 0.3750\n", 766 | "\n", 767 | "Epoch 00020: val_acc did not improve from 0.43750\n", 768 | "Epoch 21/100\n", 769 | "31/31 [==============================] - 26s 848ms/step - loss: 0.4395 - acc: 0.8700 - val_loss: 2.2436 - val_acc: 0.2812\n", 770 | "\n", 771 | "Epoch 00021: val_acc did not improve from 0.43750\n", 772 | "Epoch 22/100\n", 773 | "31/31 [==============================] - 26s 848ms/step - loss: 0.4121 - acc: 0.8740 - val_loss: 1.7271 - val_acc: 0.4375\n", 774 | "\n", 775 | "Epoch 00022: val_acc did not improve from 0.43750\n", 776 | "Epoch 23/100\n", 777 | "31/31 [==============================] - 26s 850ms/step - loss: 0.4005 - acc: 0.8901 - val_loss: 2.0739 - val_acc: 0.3750\n", 778 | "\n", 779 | "Epoch 00023: val_acc did not improve from 0.43750\n", 780 | "Epoch 24/100\n", 781 | "31/31 [==============================] - 26s 851ms/step - loss: 0.3647 - acc: 0.8881 - val_loss: 1.8255 - val_acc: 0.4375\n", 782 | "\n", 783 | "Epoch 00024: val_acc did not improve from 0.43750\n", 784 | "Epoch 25/100\n", 785 | "31/31 [==============================] - 26s 850ms/step - loss: 0.3787 - acc: 0.8891 - val_loss: 1.8199 - val_acc: 0.4375\n", 786 | "\n", 787 | "Epoch 00025: val_acc did not improve from 0.43750\n", 788 | "Epoch 26/100\n", 789 | "31/31 [==============================] - 26s 851ms/step - loss: 0.3357 - acc: 0.9002 - val_loss: 2.0819 - val_acc: 0.2812\n", 790 | "\n", 791 | "Epoch 00026: val_acc did not improve from 0.43750\n", 792 | "Epoch 27/100\n", 793 | "31/31 [==============================] - 26s 852ms/step - loss: 0.3182 - acc: 0.9103 - val_loss: 2.2176 - val_acc: 0.2812\n", 794 | "\n", 795 | "Epoch 00027: val_acc did not improve from 0.43750\n", 796 | "Epoch 28/100\n", 797 | "31/31 [==============================] - 27s 855ms/step - loss: 0.3088 - acc: 0.9183 - val_loss: 1.9996 - val_acc: 0.3750\n", 798 | "\n", 799 | "Epoch 00028: val_acc did not improve from 0.43750\n", 800 | "Epoch 29/100\n", 801 | "31/31 [==============================] - 27s 856ms/step - loss: 0.3135 - acc: 0.9163 - val_loss: 2.1852 - val_acc: 0.3438\n", 802 | "\n", 803 | "Epoch 00029: val_acc did not improve from 0.43750\n", 804 | "Epoch 30/100\n", 805 | "31/31 [==============================] - 27s 856ms/step - loss: 0.3105 - acc: 0.9083 - val_loss: 1.6828 - val_acc: 0.4688\n", 806 | "\n", 807 | "Epoch 00030: val_acc improved from 0.43750 to 0.46875, saving model to /content/drive/My Drive/cult_model.hdf5\n", 808 | "Epoch 31/100\n", 809 | "31/31 [==============================] - 27s 863ms/step - loss: 0.3077 - acc: 0.9224 - val_loss: 1.9741 - val_acc: 0.3750\n", 810 | "\n", 811 | "Epoch 00031: val_acc did not improve from 0.46875\n", 812 | "Epoch 32/100\n", 813 | "31/31 [==============================] - 27s 856ms/step - loss: 0.2918 - acc: 0.9052 - val_loss: 2.3005 - val_acc: 0.2812\n", 814 | "\n", 815 | "Epoch 00032: val_acc did not improve from 0.46875\n", 816 | "Epoch 33/100\n", 817 | "31/31 [==============================] - 26s 851ms/step - loss: 0.2644 - acc: 0.9254 - val_loss: 2.3277 - val_acc: 0.3750\n", 818 | "\n", 819 | "Epoch 00033: val_acc did not improve from 0.46875\n", 820 | "Epoch 34/100\n", 821 | "31/31 [==============================] - 26s 850ms/step - loss: 0.2825 - acc: 0.9234 - val_loss: 2.3653 - val_acc: 0.2812\n", 822 | "\n", 823 | "Epoch 00034: val_acc did not improve from 0.46875\n", 824 | "Epoch 35/100\n", 825 | "31/31 [==============================] - 26s 850ms/step - loss: 0.2734 - acc: 0.9224 - val_loss: 2.0368 - val_acc: 0.4375\n", 826 | "\n", 827 | "Epoch 00035: val_acc did not improve from 0.46875\n", 828 | "Epoch 36/100\n", 829 | "31/31 [==============================] - 26s 847ms/step - loss: 0.2814 - acc: 0.9173 - val_loss: 2.5873 - val_acc: 0.2812\n", 830 | "\n", 831 | "Epoch 00036: val_acc did not improve from 0.46875\n", 832 | "Epoch 37/100\n", 833 | "31/31 [==============================] - 26s 849ms/step - loss: 0.2900 - acc: 0.9103 - val_loss: 2.2650 - val_acc: 0.3438\n", 834 | "\n", 835 | "Epoch 00037: val_acc did not improve from 0.46875\n", 836 | "Epoch 38/100\n", 837 | "31/31 [==============================] - 26s 850ms/step - loss: 0.2736 - acc: 0.9103 - val_loss: 1.6496 - val_acc: 0.4375\n", 838 | "\n", 839 | "Epoch 00038: val_acc did not improve from 0.46875\n", 840 | "Epoch 39/100\n", 841 | "31/31 [==============================] - 26s 851ms/step - loss: 0.2793 - acc: 0.9123 - val_loss: 2.8526 - val_acc: 0.2188\n", 842 | "\n", 843 | "Epoch 00039: val_acc did not improve from 0.46875\n", 844 | "Epoch 40/100\n", 845 | "31/31 [==============================] - 27s 855ms/step - loss: 0.2548 - acc: 0.9274 - val_loss: 1.9245 - val_acc: 0.4062\n", 846 | "\n", 847 | "Epoch 00040: val_acc did not improve from 0.46875\n", 848 | "Epoch 41/100\n", 849 | "31/31 [==============================] - 27s 855ms/step - loss: 0.2378 - acc: 0.9254 - val_loss: 2.1761 - val_acc: 0.3750\n", 850 | "\n", 851 | "Epoch 00041: val_acc did not improve from 0.46875\n", 852 | "Epoch 42/100\n", 853 | "31/31 [==============================] - 27s 855ms/step - loss: 0.2029 - acc: 0.9345 - val_loss: 2.0032 - val_acc: 0.3750\n", 854 | "\n", 855 | "Epoch 00042: val_acc did not improve from 0.46875\n", 856 | "Epoch 43/100\n", 857 | "31/31 [==============================] - 27s 857ms/step - loss: 0.2157 - acc: 0.9446 - val_loss: 1.8191 - val_acc: 0.5312\n", 858 | "\n", 859 | "Epoch 00043: val_acc improved from 0.46875 to 0.53125, saving model to /content/drive/My Drive/cult_model.hdf5\n", 860 | "Epoch 44/100\n", 861 | "31/31 [==============================] - 27s 863ms/step - loss: 0.2568 - acc: 0.9173 - val_loss: 1.8486 - val_acc: 0.4062\n", 862 | "\n", 863 | "Epoch 00044: val_acc did not improve from 0.53125\n", 864 | "Epoch 45/100\n", 865 | "31/31 [==============================] - 27s 857ms/step - loss: 0.2241 - acc: 0.9365 - val_loss: 1.8875 - val_acc: 0.4688\n", 866 | "\n", 867 | "Epoch 00045: val_acc did not improve from 0.53125\n", 868 | "Epoch 46/100\n", 869 | "31/31 [==============================] - 27s 856ms/step - loss: 0.2194 - acc: 0.9405 - val_loss: 2.1556 - val_acc: 0.3750\n", 870 | "\n", 871 | "Epoch 00046: val_acc did not improve from 0.53125\n", 872 | "Epoch 47/100\n", 873 | "31/31 [==============================] - 26s 854ms/step - loss: 0.2004 - acc: 0.9385 - val_loss: 2.3135 - val_acc: 0.3750\n", 874 | "\n", 875 | "Epoch 00047: val_acc did not improve from 0.53125\n", 876 | "Epoch 48/100\n", 877 | "31/31 [==============================] - 26s 851ms/step - loss: 0.2333 - acc: 0.9405 - val_loss: 1.5268 - val_acc: 0.5000\n", 878 | "\n", 879 | "Epoch 00048: val_acc did not improve from 0.53125\n", 880 | "Epoch 49/100\n", 881 | "31/31 [==============================] - 26s 852ms/step - loss: 0.2132 - acc: 0.9405 - val_loss: 1.9070 - val_acc: 0.5000\n", 882 | "\n", 883 | "Epoch 00049: val_acc did not improve from 0.53125\n", 884 | "Epoch 50/100\n", 885 | "31/31 [==============================] - 26s 852ms/step - loss: 0.1854 - acc: 0.9425 - val_loss: 2.7580 - val_acc: 0.3438\n", 886 | "\n", 887 | "Epoch 00050: val_acc did not improve from 0.53125\n", 888 | "Epoch 51/100\n", 889 | "31/31 [==============================] - 26s 849ms/step - loss: 0.1940 - acc: 0.9466 - val_loss: 2.6318 - val_acc: 0.2812\n", 890 | "\n", 891 | "Epoch 00051: val_acc did not improve from 0.53125\n", 892 | "Epoch 52/100\n", 893 | "31/31 [==============================] - 26s 850ms/step - loss: 0.2030 - acc: 0.9375 - val_loss: 2.3249 - val_acc: 0.4062\n", 894 | "\n", 895 | "Epoch 00052: val_acc did not improve from 0.53125\n", 896 | "Epoch 53/100\n", 897 | "31/31 [==============================] - 26s 849ms/step - loss: 0.1855 - acc: 0.9546 - val_loss: 2.4357 - val_acc: 0.3438\n", 898 | "\n", 899 | "Epoch 00053: val_acc did not improve from 0.53125\n", 900 | "Epoch 54/100\n", 901 | "31/31 [==============================] - 26s 850ms/step - loss: 0.1993 - acc: 0.9375 - val_loss: 2.0547 - val_acc: 0.4062\n", 902 | "\n", 903 | "Epoch 00054: val_acc did not improve from 0.53125\n", 904 | "Epoch 55/100\n", 905 | "31/31 [==============================] - 26s 852ms/step - loss: 0.2016 - acc: 0.9325 - val_loss: 2.2949 - val_acc: 0.4062\n", 906 | "\n", 907 | "Epoch 00055: val_acc did not improve from 0.53125\n", 908 | "Epoch 56/100\n", 909 | "31/31 [==============================] - 26s 850ms/step - loss: 0.1877 - acc: 0.9496 - val_loss: 1.9696 - val_acc: 0.4375\n", 910 | "\n", 911 | "Epoch 00056: val_acc did not improve from 0.53125\n", 912 | "Epoch 57/100\n", 913 | "31/31 [==============================] - 26s 848ms/step - loss: 0.1637 - acc: 0.9567 - val_loss: 1.0719 - val_acc: 0.7188\n", 914 | "\n", 915 | "Epoch 00057: val_acc improved from 0.53125 to 0.71875, saving model to /content/drive/My Drive/cult_model.hdf5\n", 916 | "Epoch 58/100\n", 917 | "31/31 [==============================] - 26s 851ms/step - loss: 0.1768 - acc: 0.9435 - val_loss: 2.0257 - val_acc: 0.4062\n", 918 | "\n", 919 | "Epoch 00058: val_acc did not improve from 0.71875\n", 920 | "Epoch 59/100\n", 921 | "31/31 [==============================] - 27s 860ms/step - loss: 0.1717 - acc: 0.9476 - val_loss: 1.5655 - val_acc: 0.6562\n", 922 | "\n", 923 | "Epoch 00059: val_acc did not improve from 0.71875\n", 924 | "Epoch 60/100\n", 925 | "31/31 [==============================] - 27s 862ms/step - loss: 0.1877 - acc: 0.9415 - val_loss: 2.0849 - val_acc: 0.4062\n", 926 | "\n", 927 | "Epoch 00060: val_acc did not improve from 0.71875\n", 928 | "Epoch 61/100\n", 929 | "31/31 [==============================] - 27s 857ms/step - loss: 0.1855 - acc: 0.9365 - val_loss: 2.1887 - val_acc: 0.3125\n", 930 | "\n", 931 | "Epoch 00061: val_acc did not improve from 0.71875\n", 932 | "Epoch 62/100\n", 933 | "31/31 [==============================] - 26s 853ms/step - loss: 0.2009 - acc: 0.9395 - val_loss: 1.8982 - val_acc: 0.3438\n", 934 | "\n", 935 | "Epoch 00062: val_acc did not improve from 0.71875\n", 936 | "Epoch 63/100\n", 937 | "31/31 [==============================] - 26s 850ms/step - loss: 0.1463 - acc: 0.9556 - val_loss: 2.3746 - val_acc: 0.4375\n", 938 | "\n", 939 | "Epoch 00063: val_acc did not improve from 0.71875\n", 940 | "Epoch 64/100\n", 941 | "31/31 [==============================] - 26s 850ms/step - loss: 0.1642 - acc: 0.9536 - val_loss: 2.4013 - val_acc: 0.3438\n", 942 | "\n", 943 | "Epoch 00064: val_acc did not improve from 0.71875\n", 944 | "Epoch 65/100\n", 945 | "31/31 [==============================] - 26s 850ms/step - loss: 0.1516 - acc: 0.9577 - val_loss: 1.9631 - val_acc: 0.5625\n", 946 | "\n", 947 | "Epoch 00065: val_acc did not improve from 0.71875\n", 948 | "Epoch 66/100\n", 949 | "31/31 [==============================] - 26s 850ms/step - loss: 0.1744 - acc: 0.9506 - val_loss: 2.4781 - val_acc: 0.3438\n", 950 | "\n", 951 | "Epoch 00066: val_acc did not improve from 0.71875\n", 952 | "Epoch 67/100\n", 953 | "31/31 [==============================] - 26s 850ms/step - loss: 0.1534 - acc: 0.9556 - val_loss: 2.1707 - val_acc: 0.4688\n", 954 | "\n", 955 | "Epoch 00067: val_acc did not improve from 0.71875\n", 956 | "Epoch 68/100\n", 957 | "31/31 [==============================] - 26s 851ms/step - loss: 0.1640 - acc: 0.9415 - val_loss: 1.8293 - val_acc: 0.4688\n", 958 | "\n", 959 | "Epoch 00068: val_acc did not improve from 0.71875\n", 960 | "Epoch 69/100\n", 961 | "31/31 [==============================] - 26s 853ms/step - loss: 0.1713 - acc: 0.9456 - val_loss: 2.4750 - val_acc: 0.4375\n", 962 | "\n", 963 | "Epoch 00069: val_acc did not improve from 0.71875\n", 964 | "Epoch 70/100\n", 965 | "31/31 [==============================] - 27s 857ms/step - loss: 0.1692 - acc: 0.9546 - val_loss: 2.4391 - val_acc: 0.3125\n", 966 | "\n", 967 | "Epoch 00070: val_acc did not improve from 0.71875\n", 968 | "Epoch 71/100\n", 969 | "31/31 [==============================] - 27s 856ms/step - loss: 0.1595 - acc: 0.9435 - val_loss: 2.4760 - val_acc: 0.4062\n", 970 | "\n", 971 | "Epoch 00071: val_acc did not improve from 0.71875\n", 972 | "Epoch 72/100\n", 973 | "31/31 [==============================] - 27s 859ms/step - loss: 0.1419 - acc: 0.9607 - val_loss: 2.7476 - val_acc: 0.3125\n", 974 | "\n", 975 | "Epoch 00072: val_acc did not improve from 0.71875\n", 976 | "Epoch 73/100\n", 977 | "31/31 [==============================] - 27s 858ms/step - loss: 0.1524 - acc: 0.9506 - val_loss: 2.1075 - val_acc: 0.4375\n", 978 | "\n", 979 | "Epoch 00073: val_acc did not improve from 0.71875\n", 980 | "Epoch 74/100\n", 981 | "31/31 [==============================] - 27s 858ms/step - loss: 0.1604 - acc: 0.9536 - val_loss: 2.1605 - val_acc: 0.4062\n", 982 | "\n", 983 | "Epoch 00074: val_acc did not improve from 0.71875\n", 984 | "Epoch 75/100\n", 985 | "31/31 [==============================] - 26s 851ms/step - loss: 0.1312 - acc: 0.9587 - val_loss: 2.2271 - val_acc: 0.4375\n", 986 | "\n", 987 | "Epoch 00075: val_acc did not improve from 0.71875\n", 988 | "Epoch 76/100\n", 989 | "31/31 [==============================] - 26s 851ms/step - loss: 0.1408 - acc: 0.9567 - val_loss: 2.1830 - val_acc: 0.5312\n", 990 | "\n", 991 | "Epoch 00076: val_acc did not improve from 0.71875\n", 992 | "Epoch 77/100\n", 993 | "31/31 [==============================] - 26s 853ms/step - loss: 0.1444 - acc: 0.9546 - val_loss: 2.3005 - val_acc: 0.4375\n", 994 | "\n", 995 | "Epoch 00077: val_acc did not improve from 0.71875\n", 996 | "Epoch 78/100\n", 997 | "31/31 [==============================] - 26s 852ms/step - loss: 0.1336 - acc: 0.9546 - val_loss: 2.2758 - val_acc: 0.4062\n", 998 | "\n", 999 | "Epoch 00078: val_acc did not improve from 0.71875\n", 1000 | "Epoch 79/100\n", 1001 | "31/31 [==============================] - 26s 851ms/step - loss: 0.1235 - acc: 0.9617 - val_loss: 3.0044 - val_acc: 0.3125\n", 1002 | "\n", 1003 | "Epoch 00079: val_acc did not improve from 0.71875\n", 1004 | "Epoch 80/100\n", 1005 | "31/31 [==============================] - 26s 849ms/step - loss: 0.1208 - acc: 0.9647 - val_loss: 2.4776 - val_acc: 0.4062\n", 1006 | "\n", 1007 | "Epoch 00080: val_acc did not improve from 0.71875\n", 1008 | "Epoch 81/100\n", 1009 | "31/31 [==============================] - 26s 849ms/step - loss: 0.1228 - acc: 0.9607 - val_loss: 2.0290 - val_acc: 0.3750\n", 1010 | "\n", 1011 | "Epoch 00081: val_acc did not improve from 0.71875\n", 1012 | "Epoch 82/100\n", 1013 | "31/31 [==============================] - 26s 850ms/step - loss: 0.1409 - acc: 0.9556 - val_loss: 2.6174 - val_acc: 0.3438\n", 1014 | "\n", 1015 | "Epoch 00082: val_acc did not improve from 0.71875\n", 1016 | "Epoch 83/100\n", 1017 | "31/31 [==============================] - 26s 850ms/step - loss: 0.1375 - acc: 0.9617 - val_loss: 4.1044 - val_acc: 0.1562\n", 1018 | "\n", 1019 | "Epoch 00083: val_acc did not improve from 0.71875\n", 1020 | "Epoch 84/100\n", 1021 | "31/31 [==============================] - 26s 849ms/step - loss: 0.1312 - acc: 0.9627 - val_loss: 2.5817 - val_acc: 0.3125\n", 1022 | "\n", 1023 | "Epoch 00084: val_acc did not improve from 0.71875\n", 1024 | "Epoch 85/100\n", 1025 | "31/31 [==============================] - 26s 851ms/step - loss: 0.1598 - acc: 0.9476 - val_loss: 2.8712 - val_acc: 0.3438\n", 1026 | "\n", 1027 | "Epoch 00085: val_acc did not improve from 0.71875\n", 1028 | "Epoch 86/100\n", 1029 | "31/31 [==============================] - 26s 848ms/step - loss: 0.1348 - acc: 0.9587 - val_loss: 1.8507 - val_acc: 0.5938\n", 1030 | "\n", 1031 | "Epoch 00086: val_acc did not improve from 0.71875\n", 1032 | "Epoch 87/100\n", 1033 | "31/31 [==============================] - 26s 852ms/step - loss: 0.1351 - acc: 0.9516 - val_loss: 2.4836 - val_acc: 0.4375\n", 1034 | "\n", 1035 | "Epoch 00087: val_acc did not improve from 0.71875\n", 1036 | "Epoch 88/100\n", 1037 | "31/31 [==============================] - 27s 858ms/step - loss: 0.1518 - acc: 0.9476 - val_loss: 2.3129 - val_acc: 0.4375\n", 1038 | "\n", 1039 | "Epoch 00088: val_acc did not improve from 0.71875\n", 1040 | "Epoch 89/100\n", 1041 | "31/31 [==============================] - 27s 858ms/step - loss: 0.1154 - acc: 0.9667 - val_loss: 2.1990 - val_acc: 0.4688\n", 1042 | "\n", 1043 | "Epoch 00089: val_acc did not improve from 0.71875\n", 1044 | "Epoch 90/100\n", 1045 | "31/31 [==============================] - 26s 854ms/step - loss: 0.1248 - acc: 0.9617 - val_loss: 1.5832 - val_acc: 0.6250\n", 1046 | "\n", 1047 | "Epoch 00090: val_acc did not improve from 0.71875\n", 1048 | "Epoch 91/100\n", 1049 | "31/31 [==============================] - 26s 849ms/step - loss: 0.1287 - acc: 0.9627 - val_loss: 2.8468 - val_acc: 0.3438\n", 1050 | "\n", 1051 | "Epoch 00091: val_acc did not improve from 0.71875\n", 1052 | "Epoch 92/100\n", 1053 | "31/31 [==============================] - 26s 850ms/step - loss: 0.1108 - acc: 0.9647 - val_loss: 2.7019 - val_acc: 0.3438\n", 1054 | "\n", 1055 | "Epoch 00092: val_acc did not improve from 0.71875\n", 1056 | "Epoch 93/100\n", 1057 | "31/31 [==============================] - 26s 850ms/step - loss: 0.1383 - acc: 0.9567 - val_loss: 2.7887 - val_acc: 0.4062\n", 1058 | "\n", 1059 | "Epoch 00093: val_acc did not improve from 0.71875\n", 1060 | "Epoch 94/100\n", 1061 | "31/31 [==============================] - 26s 846ms/step - loss: 0.1341 - acc: 0.9607 - val_loss: 2.5627 - val_acc: 0.5000\n", 1062 | "\n", 1063 | "Epoch 00094: val_acc did not improve from 0.71875\n", 1064 | "Epoch 95/100\n", 1065 | "31/31 [==============================] - 26s 845ms/step - loss: 0.1133 - acc: 0.9738 - val_loss: 3.1483 - val_acc: 0.2812\n", 1066 | "\n", 1067 | "Epoch 00095: val_acc did not improve from 0.71875\n", 1068 | "Epoch 96/100\n", 1069 | "31/31 [==============================] - 26s 847ms/step - loss: 0.1290 - acc: 0.9546 - val_loss: 2.5955 - val_acc: 0.3750\n", 1070 | "\n", 1071 | "Epoch 00096: val_acc did not improve from 0.71875\n", 1072 | "Epoch 97/100\n", 1073 | "31/31 [==============================] - 26s 848ms/step - loss: 0.1151 - acc: 0.9677 - val_loss: 2.1688 - val_acc: 0.5000\n", 1074 | "\n", 1075 | "Epoch 00097: val_acc did not improve from 0.71875\n", 1076 | "Epoch 98/100\n", 1077 | "31/31 [==============================] - 26s 848ms/step - loss: 0.1280 - acc: 0.9597 - val_loss: 2.4794 - val_acc: 0.4375\n", 1078 | "\n", 1079 | "Epoch 00098: val_acc did not improve from 0.71875\n", 1080 | "Epoch 99/100\n", 1081 | "31/31 [==============================] - 26s 849ms/step - loss: 0.1315 - acc: 0.9536 - val_loss: 2.1666 - val_acc: 0.5000\n", 1082 | "\n", 1083 | "Epoch 00099: val_acc did not improve from 0.71875\n", 1084 | "Epoch 100/100\n", 1085 | "31/31 [==============================] - 26s 850ms/step - loss: 0.1005 - acc: 0.9688 - val_loss: 3.6812 - val_acc: 0.1562\n", 1086 | "\n", 1087 | "Epoch 00100: val_acc did not improve from 0.71875\n" 1088 | ], 1089 | "name": "stdout" 1090 | }, 1091 | { 1092 | "output_type": "execute_result", 1093 | "data": { 1094 | "text/plain": [ 1095 | "" 1096 | ] 1097 | }, 1098 | "metadata": { 1099 | "tags": [] 1100 | }, 1101 | "execution_count": 92 1102 | } 1103 | ] 1104 | }, 1105 | { 1106 | "cell_type": "code", 1107 | "metadata": { 1108 | "id": "RhDSAEdaPW72", 1109 | "colab_type": "code", 1110 | "colab": {} 1111 | }, 1112 | "source": [ 1113 | "# import the necessary packages\n", 1114 | "from keras.models import load_model\n", 1115 | "from collections import deque\n", 1116 | "import numpy as np\n", 1117 | "import cv2\n", 1118 | "\n", 1119 | "model = load_model(fileName)\n", 1120 | "\n", 1121 | "Q = deque(maxlen=10)\n", 1122 | "\n", 1123 | "# initialize the video stream, pointer to output video file, and\n", 1124 | "# frame dimensions\n", 1125 | "vs = cv2.VideoCapture()\n", 1126 | "writer = None\n", 1127 | "(W, H) = (None, None)\n", 1128 | " \n", 1129 | "# loop over frames from the video file stream\n", 1130 | "while True:\n", 1131 | "\t# read the next frame from the file\n", 1132 | "\t(grabbed, frame) = vs.read()\n", 1133 | " \n", 1134 | "\t# if the frame was not grabbed, then we have reached the end\n", 1135 | "\t# of the stream\n", 1136 | "\tif not grabbed:\n", 1137 | "\t\tbreak\n", 1138 | " \n", 1139 | "\t# if the frame dimensions are empty, grab them\n", 1140 | "\tif W is None or H is None:\n", 1141 | "\t\t(H, W) = frame.shape[:2]" 1142 | ], 1143 | "execution_count": 0, 1144 | "outputs": [] 1145 | }, 1146 | { 1147 | "cell_type": "code", 1148 | "metadata": { 1149 | "id": "SuFDIxOA2BQ0", 1150 | "colab_type": "code", 1151 | "colab": {} 1152 | }, 1153 | "source": [ 1154 | "loss, acc = model.evaluate_generator(test_generator, steps=100, verbose=0)" 1155 | ], 1156 | "execution_count": 0, 1157 | "outputs": [] 1158 | }, 1159 | { 1160 | "cell_type": "code", 1161 | "metadata": { 1162 | "id": "gIMDPk4k9GVC", 1163 | "colab_type": "code", 1164 | "outputId": "17c5486e-dcf0-418c-b3e0-557a49da51d2", 1165 | "colab": { 1166 | "base_uri": "https://localhost:8080/", 1167 | "height": 258 1168 | } 1169 | }, 1170 | "source": [ 1171 | "# memory footprint support libraries/code\n", 1172 | "!ln -sf /opt/bin/nvidia-smi /usr/bin/nvidia-smi\n", 1173 | "!pip install gputil\n", 1174 | "!pip install psutil\n", 1175 | "!pip install humanize\n", 1176 | "\n", 1177 | "import psutil\n", 1178 | "import humanize\n", 1179 | "import os\n", 1180 | "import GPUtil as GPU\n", 1181 | "GPUs = GPU.getGPUs()\n", 1182 | "\n", 1183 | "# XXX: only one GPU on Colab and isn’t guaranteed\n", 1184 | "gpu = GPUs[0]\n", 1185 | "def printm():\n", 1186 | " process = psutil.Process(os.getpid())\n", 1187 | " print(\"Gen RAM Free: \" + humanize.naturalsize( psutil.virtual_memory().available ), \" | Proc size: \" + humanize.naturalsize( process.memory_info().rss))\n", 1188 | " print(\"GPU RAM Free: {0:.0f}MB | Used: {1:.0f}MB | Util {2:3.0f}% | Total {3:.0f}MB\".format(gpu.memoryFree, gpu.memoryUsed, gpu.memoryUtil*100, gpu.memoryTotal))\n", 1189 | "\n", 1190 | "printm()" 1191 | ], 1192 | "execution_count": 0, 1193 | "outputs": [ 1194 | { 1195 | "output_type": "stream", 1196 | "text": [ 1197 | "Collecting gputil\n", 1198 | " Downloading https://files.pythonhosted.org/packages/ed/0e/5c61eedde9f6c87713e89d794f01e378cfd9565847d4576fa627d758c554/GPUtil-1.4.0.tar.gz\n", 1199 | "Building wheels for collected packages: gputil\n", 1200 | " Building wheel for gputil (setup.py) ... \u001b[?25l\u001b[?25hdone\n", 1201 | " Created wheel for gputil: filename=GPUtil-1.4.0-cp36-none-any.whl size=7410 sha256=68dcb84d83b014db1c20348c1b73245fabfb64ed58e92e25660dc8f921b6931a\n", 1202 | " Stored in directory: /root/.cache/pip/wheels/3d/77/07/80562de4bb0786e5ea186911a2c831fdd0018bda69beab71fd\n", 1203 | "Successfully built gputil\n", 1204 | "Installing collected packages: gputil\n", 1205 | "Successfully installed gputil-1.4.0\n", 1206 | "Requirement already satisfied: psutil in /usr/local/lib/python3.6/dist-packages (5.4.8)\n", 1207 | "Requirement already satisfied: humanize in /usr/local/lib/python3.6/dist-packages (0.5.1)\n", 1208 | "Gen RAM Free: 10.8 GB | Proc size: 3.1 GB\n", 1209 | "GPU RAM Free: 3021MB | Used: 8420MB | Util 74% | Total 11441MB\n" 1210 | ], 1211 | "name": "stdout" 1212 | } 1213 | ] 1214 | }, 1215 | { 1216 | "cell_type": "code", 1217 | "metadata": { 1218 | "id": "w598p8kqTVBM", 1219 | "colab_type": "code", 1220 | "colab": {} 1221 | }, 1222 | "source": [ 1223 | "Inference" 1224 | ], 1225 | "execution_count": 0, 1226 | "outputs": [] 1227 | }, 1228 | { 1229 | "cell_type": "code", 1230 | "metadata": { 1231 | "id": "IowR7DtKOvyX", 1232 | "colab_type": "code", 1233 | "colab": { 1234 | "base_uri": "https://localhost:8080/", 1235 | "height": 54 1236 | }, 1237 | "outputId": "b9a4c969-17e8-483e-c39c-51ac16076b3f" 1238 | }, 1239 | "source": [ 1240 | "import cv2\n", 1241 | "def most_frequent(List): \n", 1242 | " counter = 0\n", 1243 | " num = List[0] \n", 1244 | " \n", 1245 | " for i in List: \n", 1246 | " curr_frequency = List.count(i) \n", 1247 | " if(curr_frequency> counter): \n", 1248 | " counter = curr_frequency \n", 1249 | " num = i \n", 1250 | " \n", 1251 | " return num \n", 1252 | "#all_images = [x for x in os.listdir(\"full_data/valid/extras/\") if \"extra_1\" in x]\n", 1253 | "all_images = [x for x in os.listdir(\"GroupWorkoutVideo(0).m4v\")]\n", 1254 | "\n", 1255 | "import numpy as np\n", 1256 | "preds = []\n", 1257 | "for i in range(len(all_images)):\n", 1258 | " #image = cv2.imread(\"full_data/valid/extras/\"+all_images[i])\n", 1259 | " image = cv2.imread(\"GroupWorkoutVideo(0).m4v/\"+all_images[i]) \n", 1260 | " image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)\n", 1261 | " image = preprocess_input(image)\n", 1262 | " image = cv2.resize(image, (224, 224)).astype(\"float32\")\n", 1263 | " image = np.expand_dims(image, axis=0)\n", 1264 | " #print(val.shape)\n", 1265 | " pred = np.argmax(model.predict(image))\n", 1266 | " preds.append(pred)\n", 1267 | "\n", 1268 | "out = []\n", 1269 | "for i in range(len(preds[10:])):\n", 1270 | " out.append(most_frequent(preds[i:i+10]))\n", 1271 | " #out.append(preds[i])\n", 1272 | "print(out) \n" 1273 | ], 1274 | "execution_count": 125, 1275 | "outputs": [ 1276 | { 1277 | "output_type": "stream", 1278 | "text": [ 1279 | "[2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2]\n" 1280 | ], 1281 | "name": "stdout" 1282 | } 1283 | ] 1284 | }, 1285 | { 1286 | "cell_type": "code", 1287 | "metadata": { 1288 | "id": "ZwC-r-6KluWH", 1289 | "colab_type": "code", 1290 | "colab": { 1291 | "base_uri": "https://localhost:8080/", 1292 | "height": 34 1293 | }, 1294 | "outputId": "73847f1e-77f9-43a3-904d-f65aea227ab7" 1295 | }, 1296 | "source": [ 1297 | "from collections import Counter\n", 1298 | "a = Counter(out)\n", 1299 | "actions = []\n", 1300 | "for key in a:\n", 1301 | " if a[key] > 20:\n", 1302 | " actions.append(key)\n", 1303 | "for key in train_generator.class_indices:\n", 1304 | " if train_generator.class_indices[key] in actions:\n", 1305 | " print(key)" 1306 | ], 1307 | "execution_count": 132, 1308 | "outputs": [ 1309 | { 1310 | "output_type": "stream", 1311 | "text": [ 1312 | "jj\n" 1313 | ], 1314 | "name": "stdout" 1315 | } 1316 | ] 1317 | }, 1318 | { 1319 | "cell_type": "code", 1320 | "metadata": { 1321 | "id": "Dpe99JC5dBUT", 1322 | "colab_type": "code", 1323 | "colab": { 1324 | "base_uri": "https://localhost:8080/", 1325 | "height": 1000 1326 | }, 1327 | "outputId": "60af2150-f0e2-4525-dd45-9e9639e42d84" 1328 | }, 1329 | "source": [ 1330 | "!ls full_data/valid/squats" 1331 | ], 1332 | "execution_count": 111, 1333 | "outputs": [ 1334 | { 1335 | "output_type": "stream", 1336 | "text": [ 1337 | "squat2_1_frame0.jpg\t squat2_1_frame380000.jpg squat2_1_frame6620000.jpg\n", 1338 | "squat2_1_frame1000000.jpg squat2_1_frame3810000.jpg squat2_1_frame6630000.jpg\n", 1339 | "squat2_1_frame100000.jpg squat2_1_frame3820000.jpg squat2_1_frame6640000.jpg\n", 1340 | "squat2_1_frame10000.jpg squat2_1_frame3830000.jpg squat2_1_frame6650000.jpg\n", 1341 | "squat2_1_frame1010000.jpg squat2_1_frame3840000.jpg squat2_1_frame6660000.jpg\n", 1342 | "squat2_1_frame1020000.jpg squat2_1_frame3850000.jpg squat2_1_frame6670000.jpg\n", 1343 | "squat2_1_frame1030000.jpg squat2_1_frame3860000.jpg squat2_1_frame6680000.jpg\n", 1344 | "squat2_1_frame1040000.jpg squat2_1_frame3870000.jpg squat2_1_frame6690000.jpg\n", 1345 | "squat2_1_frame1050000.jpg squat2_1_frame3880000.jpg squat2_1_frame6700000.jpg\n", 1346 | "squat2_1_frame1060000.jpg squat2_1_frame3890000.jpg squat2_1_frame670000.jpg\n", 1347 | "squat2_1_frame1070000.jpg squat2_1_frame3900000.jpg squat2_1_frame6710000.jpg\n", 1348 | "squat2_1_frame1080000.jpg squat2_1_frame390000.jpg squat2_1_frame6720000.jpg\n", 1349 | "squat2_1_frame1090000.jpg squat2_1_frame3910000.jpg squat2_1_frame6730000.jpg\n", 1350 | "squat2_1_frame1100000.jpg squat2_1_frame3920000.jpg squat2_1_frame6740000.jpg\n", 1351 | "squat2_1_frame110000.jpg squat2_1_frame3930000.jpg squat2_1_frame6750000.jpg\n", 1352 | "squat2_1_frame1110000.jpg squat2_1_frame3940000.jpg squat2_1_frame6760000.jpg\n", 1353 | "squat2_1_frame1120000.jpg squat2_1_frame3950000.jpg squat2_1_frame6770000.jpg\n", 1354 | "squat2_1_frame1130000.jpg squat2_1_frame3960000.jpg squat2_1_frame6780000.jpg\n", 1355 | "squat2_1_frame1140000.jpg squat2_1_frame3970000.jpg squat2_1_frame6790000.jpg\n", 1356 | "squat2_1_frame1150000.jpg squat2_1_frame3980000.jpg squat2_1_frame6800000.jpg\n", 1357 | "squat2_1_frame1160000.jpg squat2_1_frame3990000.jpg squat2_1_frame680000.jpg\n", 1358 | "squat2_1_frame1170000.jpg squat2_1_frame4000000.jpg squat2_1_frame6810000.jpg\n", 1359 | "squat2_1_frame1180000.jpg squat2_1_frame400000.jpg squat2_1_frame6820000.jpg\n", 1360 | "squat2_1_frame1190000.jpg squat2_1_frame40000.jpg squat2_1_frame6830000.jpg\n", 1361 | "squat2_1_frame1200000.jpg squat2_1_frame4010000.jpg squat2_1_frame6840000.jpg\n", 1362 | "squat2_1_frame120000.jpg squat2_1_frame4020000.jpg squat2_1_frame6850000.jpg\n", 1363 | "squat2_1_frame1210000.jpg squat2_1_frame4030000.jpg squat2_1_frame6860000.jpg\n", 1364 | "squat2_1_frame1220000.jpg squat2_1_frame4040000.jpg squat2_1_frame6870000.jpg\n", 1365 | "squat2_1_frame1230000.jpg squat2_1_frame4050000.jpg squat2_1_frame6880000.jpg\n", 1366 | "squat2_1_frame1240000.jpg squat2_1_frame4060000.jpg squat2_1_frame6890000.jpg\n", 1367 | "squat2_1_frame1250000.jpg squat2_1_frame4070000.jpg squat2_1_frame6900000.jpg\n", 1368 | "squat2_1_frame1260000.jpg squat2_1_frame4080000.jpg squat2_1_frame690000.jpg\n", 1369 | "squat2_1_frame1270000.jpg squat2_1_frame4090000.jpg squat2_1_frame6910000.jpg\n", 1370 | "squat2_1_frame1280000.jpg squat2_1_frame4100000.jpg squat2_1_frame6920000.jpg\n", 1371 | "squat2_1_frame1290000.jpg squat2_1_frame410000.jpg squat2_1_frame6930000.jpg\n", 1372 | "squat2_1_frame1300000.jpg squat2_1_frame4110000.jpg squat2_1_frame6940000.jpg\n", 1373 | "squat2_1_frame130000.jpg squat2_1_frame4120000.jpg squat2_1_frame6950000.jpg\n", 1374 | "squat2_1_frame1310000.jpg squat2_1_frame4130000.jpg squat2_1_frame6960000.jpg\n", 1375 | "squat2_1_frame1320000.jpg squat2_1_frame4140000.jpg squat2_1_frame6970000.jpg\n", 1376 | "squat2_1_frame1330000.jpg squat2_1_frame4150000.jpg squat2_1_frame6980000.jpg\n", 1377 | "squat2_1_frame1340000.jpg squat2_1_frame4160000.jpg squat2_1_frame6990000.jpg\n", 1378 | "squat2_1_frame1350000.jpg squat2_1_frame4170000.jpg squat2_1_frame7000000.jpg\n", 1379 | "squat2_1_frame1360000.jpg squat2_1_frame4180000.jpg squat2_1_frame700000.jpg\n", 1380 | "squat2_1_frame1370000.jpg squat2_1_frame4190000.jpg squat2_1_frame70000.jpg\n", 1381 | "squat2_1_frame1380000.jpg squat2_1_frame4200000.jpg squat2_1_frame7010000.jpg\n", 1382 | "squat2_1_frame1390000.jpg squat2_1_frame420000.jpg squat2_1_frame7020000.jpg\n", 1383 | "squat2_1_frame1400000.jpg squat2_1_frame4210000.jpg squat2_1_frame7030000.jpg\n", 1384 | "squat2_1_frame140000.jpg squat2_1_frame4220000.jpg squat2_1_frame7040000.jpg\n", 1385 | "squat2_1_frame1410000.jpg squat2_1_frame4230000.jpg squat2_1_frame7050000.jpg\n", 1386 | "squat2_1_frame1420000.jpg squat2_1_frame4240000.jpg squat2_1_frame7060000.jpg\n", 1387 | "squat2_1_frame1430000.jpg squat2_1_frame4250000.jpg squat2_1_frame7070000.jpg\n", 1388 | "squat2_1_frame1440000.jpg squat2_1_frame4260000.jpg squat2_1_frame7080000.jpg\n", 1389 | "squat2_1_frame1450000.jpg squat2_1_frame4270000.jpg squat2_1_frame7090000.jpg\n", 1390 | "squat2_1_frame1460000.jpg squat2_1_frame4280000.jpg squat2_1_frame7100000.jpg\n", 1391 | "squat2_1_frame1470000.jpg squat2_1_frame4290000.jpg squat2_1_frame710000.jpg\n", 1392 | "squat2_1_frame1480000.jpg squat2_1_frame4300000.jpg squat2_1_frame7110000.jpg\n", 1393 | "squat2_1_frame1490000.jpg squat2_1_frame430000.jpg squat2_1_frame7120000.jpg\n", 1394 | "squat2_1_frame1500000.jpg squat2_1_frame4310000.jpg squat2_1_frame7130000.jpg\n", 1395 | "squat2_1_frame150000.jpg squat2_1_frame4320000.jpg squat2_1_frame7140000.jpg\n", 1396 | "squat2_1_frame1510000.jpg squat2_1_frame4330000.jpg squat2_1_frame7150000.jpg\n", 1397 | "squat2_1_frame1520000.jpg squat2_1_frame4340000.jpg squat2_1_frame7160000.jpg\n", 1398 | "squat2_1_frame1530000.jpg squat2_1_frame4350000.jpg squat2_1_frame7170000.jpg\n", 1399 | "squat2_1_frame1540000.jpg squat2_1_frame4360000.jpg squat2_1_frame7180000.jpg\n", 1400 | "squat2_1_frame1550000.jpg squat2_1_frame4370000.jpg squat2_1_frame7190000.jpg\n", 1401 | "squat2_1_frame1560000.jpg squat2_1_frame4380000.jpg squat2_1_frame7200000.jpg\n", 1402 | "squat2_1_frame1570000.jpg squat2_1_frame4390000.jpg squat2_1_frame720000.jpg\n", 1403 | "squat2_1_frame1580000.jpg squat2_1_frame4400000.jpg squat2_1_frame7210000.jpg\n", 1404 | "squat2_1_frame1590000.jpg squat2_1_frame440000.jpg squat2_1_frame7220000.jpg\n", 1405 | "squat2_1_frame1600000.jpg squat2_1_frame4410000.jpg squat2_1_frame7230000.jpg\n", 1406 | "squat2_1_frame160000.jpg squat2_1_frame4420000.jpg squat2_1_frame7240000.jpg\n", 1407 | "squat2_1_frame1610000.jpg squat2_1_frame4430000.jpg squat2_1_frame7250000.jpg\n", 1408 | "squat2_1_frame1620000.jpg squat2_1_frame4440000.jpg squat2_1_frame7260000.jpg\n", 1409 | "squat2_1_frame1630000.jpg squat2_1_frame4450000.jpg squat2_1_frame7270000.jpg\n", 1410 | "squat2_1_frame1640000.jpg squat2_1_frame4460000.jpg squat2_1_frame7280000.jpg\n", 1411 | "squat2_1_frame1650000.jpg squat2_1_frame4470000.jpg squat2_1_frame7290000.jpg\n", 1412 | "squat2_1_frame1660000.jpg squat2_1_frame4480000.jpg squat2_1_frame7300000.jpg\n", 1413 | "squat2_1_frame1670000.jpg squat2_1_frame4490000.jpg squat2_1_frame730000.jpg\n", 1414 | "squat2_1_frame1680000.jpg squat2_1_frame4500000.jpg squat2_1_frame7310000.jpg\n", 1415 | "squat2_1_frame1690000.jpg squat2_1_frame450000.jpg squat2_1_frame7320000.jpg\n", 1416 | "squat2_1_frame1700000.jpg squat2_1_frame4510000.jpg squat2_1_frame7330000.jpg\n", 1417 | "squat2_1_frame170000.jpg squat2_1_frame4520000.jpg squat2_1_frame7340000.jpg\n", 1418 | "squat2_1_frame1710000.jpg squat2_1_frame4530000.jpg squat2_1_frame7350000.jpg\n", 1419 | "squat2_1_frame1720000.jpg squat2_1_frame4540000.jpg squat2_1_frame7360000.jpg\n", 1420 | "squat2_1_frame1730000.jpg squat2_1_frame4550000.jpg squat2_1_frame7370000.jpg\n", 1421 | "squat2_1_frame1740000.jpg squat2_1_frame4560000.jpg squat2_1_frame7380000.jpg\n", 1422 | "squat2_1_frame1750000.jpg squat2_1_frame4570000.jpg squat2_1_frame7390000.jpg\n", 1423 | "squat2_1_frame1760000.jpg squat2_1_frame4580000.jpg squat2_1_frame7400000.jpg\n", 1424 | "squat2_1_frame1770000.jpg squat2_1_frame4590000.jpg squat2_1_frame740000.jpg\n", 1425 | "squat2_1_frame1780000.jpg squat2_1_frame4600000.jpg squat2_1_frame7410000.jpg\n", 1426 | "squat2_1_frame1790000.jpg squat2_1_frame460000.jpg squat2_1_frame7420000.jpg\n", 1427 | "squat2_1_frame1800000.jpg squat2_1_frame4610000.jpg squat2_1_frame7430000.jpg\n", 1428 | "squat2_1_frame180000.jpg squat2_1_frame4620000.jpg squat2_1_frame7440000.jpg\n", 1429 | "squat2_1_frame1810000.jpg squat2_1_frame4630000.jpg squat2_1_frame7450000.jpg\n", 1430 | "squat2_1_frame1820000.jpg squat2_1_frame4640000.jpg squat2_1_frame7460000.jpg\n", 1431 | "squat2_1_frame1830000.jpg squat2_1_frame4650000.jpg squat2_1_frame7470000.jpg\n", 1432 | "squat2_1_frame1840000.jpg squat2_1_frame4660000.jpg squat2_1_frame7480000.jpg\n", 1433 | "squat2_1_frame1850000.jpg squat2_1_frame4670000.jpg squat2_1_frame7490000.jpg\n", 1434 | "squat2_1_frame1860000.jpg squat2_1_frame4680000.jpg squat2_1_frame7500000.jpg\n", 1435 | "squat2_1_frame1870000.jpg squat2_1_frame4690000.jpg squat2_1_frame750000.jpg\n", 1436 | "squat2_1_frame1880000.jpg squat2_1_frame4700000.jpg squat2_1_frame7510000.jpg\n", 1437 | "squat2_1_frame1890000.jpg squat2_1_frame470000.jpg squat2_1_frame7520000.jpg\n", 1438 | "squat2_1_frame1900000.jpg squat2_1_frame4710000.jpg squat2_1_frame7530000.jpg\n", 1439 | "squat2_1_frame190000.jpg squat2_1_frame4720000.jpg squat2_1_frame7540000.jpg\n", 1440 | "squat2_1_frame1910000.jpg squat2_1_frame4730000.jpg squat2_1_frame7550000.jpg\n", 1441 | "squat2_1_frame1920000.jpg squat2_1_frame4740000.jpg squat2_1_frame7560000.jpg\n", 1442 | "squat2_1_frame1930000.jpg squat2_1_frame4750000.jpg squat2_1_frame7570000.jpg\n", 1443 | "squat2_1_frame1940000.jpg squat2_1_frame4760000.jpg squat2_1_frame7580000.jpg\n", 1444 | "squat2_1_frame1950000.jpg squat2_1_frame4770000.jpg squat2_1_frame7590000.jpg\n", 1445 | "squat2_1_frame1960000.jpg squat2_1_frame4780000.jpg squat2_1_frame7600000.jpg\n", 1446 | "squat2_1_frame1970000.jpg squat2_1_frame4790000.jpg squat2_1_frame760000.jpg\n", 1447 | "squat2_1_frame1980000.jpg squat2_1_frame4800000.jpg squat2_1_frame7610000.jpg\n", 1448 | "squat2_1_frame1990000.jpg squat2_1_frame480000.jpg squat2_1_frame7620000.jpg\n", 1449 | "squat2_1_frame2000000.jpg squat2_1_frame4810000.jpg squat2_1_frame7630000.jpg\n", 1450 | "squat2_1_frame200000.jpg squat2_1_frame4820000.jpg squat2_1_frame7640000.jpg\n", 1451 | "squat2_1_frame20000.jpg squat2_1_frame4830000.jpg squat2_1_frame7650000.jpg\n", 1452 | "squat2_1_frame2010000.jpg squat2_1_frame4840000.jpg squat2_1_frame7660000.jpg\n", 1453 | "squat2_1_frame2020000.jpg squat2_1_frame4850000.jpg squat2_1_frame7670000.jpg\n", 1454 | "squat2_1_frame2030000.jpg squat2_1_frame4860000.jpg squat2_1_frame7680000.jpg\n", 1455 | "squat2_1_frame2040000.jpg squat2_1_frame4870000.jpg squat2_1_frame7690000.jpg\n", 1456 | "squat2_1_frame2050000.jpg squat2_1_frame4880000.jpg squat2_1_frame7700000.jpg\n", 1457 | "squat2_1_frame2060000.jpg squat2_1_frame4890000.jpg squat2_1_frame770000.jpg\n", 1458 | "squat2_1_frame2070000.jpg squat2_1_frame4900000.jpg squat2_1_frame7710000.jpg\n", 1459 | "squat2_1_frame2080000.jpg squat2_1_frame490000.jpg squat2_1_frame7720000.jpg\n", 1460 | "squat2_1_frame2090000.jpg squat2_1_frame4910000.jpg squat2_1_frame7730000.jpg\n", 1461 | "squat2_1_frame2100000.jpg squat2_1_frame4920000.jpg squat2_1_frame7740000.jpg\n", 1462 | "squat2_1_frame210000.jpg squat2_1_frame4930000.jpg squat2_1_frame7750000.jpg\n", 1463 | "squat2_1_frame2110000.jpg squat2_1_frame4940000.jpg squat2_1_frame7760000.jpg\n", 1464 | "squat2_1_frame2120000.jpg squat2_1_frame4950000.jpg squat2_1_frame7770000.jpg\n", 1465 | "squat2_1_frame2130000.jpg squat2_1_frame4960000.jpg squat2_1_frame7780000.jpg\n", 1466 | "squat2_1_frame2140000.jpg squat2_1_frame4970000.jpg squat2_1_frame7790000.jpg\n", 1467 | "squat2_1_frame2150000.jpg squat2_1_frame4980000.jpg squat2_1_frame7800000.jpg\n", 1468 | "squat2_1_frame2160000.jpg squat2_1_frame4990000.jpg squat2_1_frame780000.jpg\n", 1469 | "squat2_1_frame2170000.jpg squat2_1_frame5000000.jpg squat2_1_frame7810000.jpg\n", 1470 | "squat2_1_frame2180000.jpg squat2_1_frame500000.jpg squat2_1_frame7820000.jpg\n", 1471 | "squat2_1_frame2190000.jpg squat2_1_frame50000.jpg squat2_1_frame7830000.jpg\n", 1472 | "squat2_1_frame2200000.jpg squat2_1_frame5010000.jpg squat2_1_frame7840000.jpg\n", 1473 | "squat2_1_frame220000.jpg squat2_1_frame5020000.jpg squat2_1_frame7850000.jpg\n", 1474 | "squat2_1_frame2210000.jpg squat2_1_frame5030000.jpg squat2_1_frame7860000.jpg\n", 1475 | "squat2_1_frame2220000.jpg squat2_1_frame5040000.jpg squat2_1_frame7870000.jpg\n", 1476 | "squat2_1_frame2230000.jpg squat2_1_frame5050000.jpg squat2_1_frame7880000.jpg\n", 1477 | "squat2_1_frame2240000.jpg squat2_1_frame5060000.jpg squat2_1_frame7890000.jpg\n", 1478 | "squat2_1_frame2250000.jpg squat2_1_frame5070000.jpg squat2_1_frame7900000.jpg\n", 1479 | "squat2_1_frame2260000.jpg squat2_1_frame5080000.jpg squat2_1_frame790000.jpg\n", 1480 | "squat2_1_frame2270000.jpg squat2_1_frame5090000.jpg squat2_1_frame7910000.jpg\n", 1481 | "squat2_1_frame2280000.jpg squat2_1_frame5100000.jpg squat2_1_frame7920000.jpg\n", 1482 | "squat2_1_frame2290000.jpg squat2_1_frame510000.jpg squat2_1_frame7930000.jpg\n", 1483 | "squat2_1_frame2300000.jpg squat2_1_frame5110000.jpg squat2_1_frame7940000.jpg\n", 1484 | "squat2_1_frame230000.jpg squat2_1_frame5120000.jpg squat2_1_frame7950000.jpg\n", 1485 | "squat2_1_frame2310000.jpg squat2_1_frame5130000.jpg squat2_1_frame7960000.jpg\n", 1486 | "squat2_1_frame2320000.jpg squat2_1_frame5140000.jpg squat2_1_frame7970000.jpg\n", 1487 | "squat2_1_frame2330000.jpg squat2_1_frame5150000.jpg squat2_1_frame7980000.jpg\n", 1488 | "squat2_1_frame2340000.jpg squat2_1_frame5160000.jpg squat2_1_frame7990000.jpg\n", 1489 | "squat2_1_frame2350000.jpg squat2_1_frame5170000.jpg squat2_1_frame8000000.jpg\n", 1490 | "squat2_1_frame2360000.jpg squat2_1_frame5180000.jpg squat2_1_frame800000.jpg\n", 1491 | "squat2_1_frame2370000.jpg squat2_1_frame5190000.jpg squat2_1_frame80000.jpg\n", 1492 | "squat2_1_frame2380000.jpg squat2_1_frame5200000.jpg squat2_1_frame8010000.jpg\n", 1493 | "squat2_1_frame2390000.jpg squat2_1_frame520000.jpg squat2_1_frame8020000.jpg\n", 1494 | "squat2_1_frame2400000.jpg squat2_1_frame5210000.jpg squat2_1_frame8030000.jpg\n", 1495 | "squat2_1_frame240000.jpg squat2_1_frame5220000.jpg squat2_1_frame8040000.jpg\n", 1496 | "squat2_1_frame2410000.jpg squat2_1_frame5230000.jpg squat2_1_frame8050000.jpg\n", 1497 | "squat2_1_frame2420000.jpg squat2_1_frame5240000.jpg squat2_1_frame8060000.jpg\n", 1498 | "squat2_1_frame2430000.jpg squat2_1_frame5250000.jpg squat2_1_frame8070000.jpg\n", 1499 | "squat2_1_frame2440000.jpg squat2_1_frame5260000.jpg squat2_1_frame8080000.jpg\n", 1500 | "squat2_1_frame2450000.jpg squat2_1_frame5270000.jpg squat2_1_frame8090000.jpg\n", 1501 | "squat2_1_frame2460000.jpg squat2_1_frame5280000.jpg squat2_1_frame8100000.jpg\n", 1502 | "squat2_1_frame2470000.jpg squat2_1_frame5290000.jpg squat2_1_frame810000.jpg\n", 1503 | "squat2_1_frame2480000.jpg squat2_1_frame5300000.jpg squat2_1_frame8110000.jpg\n", 1504 | "squat2_1_frame2490000.jpg squat2_1_frame530000.jpg squat2_1_frame8120000.jpg\n", 1505 | "squat2_1_frame2500000.jpg squat2_1_frame5310000.jpg squat2_1_frame8130000.jpg\n", 1506 | "squat2_1_frame250000.jpg squat2_1_frame5320000.jpg squat2_1_frame8140000.jpg\n", 1507 | "squat2_1_frame2510000.jpg squat2_1_frame5330000.jpg squat2_1_frame8150000.jpg\n", 1508 | "squat2_1_frame2520000.jpg squat2_1_frame5340000.jpg squat2_1_frame8160000.jpg\n", 1509 | "squat2_1_frame2530000.jpg squat2_1_frame5350000.jpg squat2_1_frame8170000.jpg\n", 1510 | "squat2_1_frame2540000.jpg squat2_1_frame5360000.jpg squat2_1_frame8180000.jpg\n", 1511 | "squat2_1_frame2550000.jpg squat2_1_frame5370000.jpg squat2_1_frame8190000.jpg\n", 1512 | "squat2_1_frame2560000.jpg squat2_1_frame5380000.jpg squat2_1_frame8200000.jpg\n", 1513 | "squat2_1_frame2570000.jpg squat2_1_frame5390000.jpg squat2_1_frame820000.jpg\n", 1514 | "squat2_1_frame2580000.jpg squat2_1_frame5400000.jpg squat2_1_frame8210000.jpg\n", 1515 | "squat2_1_frame2590000.jpg squat2_1_frame540000.jpg squat2_1_frame8220000.jpg\n", 1516 | "squat2_1_frame2600000.jpg squat2_1_frame5410000.jpg squat2_1_frame8230000.jpg\n", 1517 | "squat2_1_frame260000.jpg squat2_1_frame5420000.jpg squat2_1_frame8240000.jpg\n", 1518 | "squat2_1_frame2610000.jpg squat2_1_frame5430000.jpg squat2_1_frame8250000.jpg\n", 1519 | "squat2_1_frame2620000.jpg squat2_1_frame5440000.jpg squat2_1_frame8260000.jpg\n", 1520 | "squat2_1_frame2630000.jpg squat2_1_frame5450000.jpg squat2_1_frame8270000.jpg\n", 1521 | "squat2_1_frame2640000.jpg squat2_1_frame5460000.jpg squat2_1_frame8280000.jpg\n", 1522 | "squat2_1_frame2650000.jpg squat2_1_frame5470000.jpg squat2_1_frame8290000.jpg\n", 1523 | "squat2_1_frame2660000.jpg squat2_1_frame5480000.jpg squat2_1_frame8300000.jpg\n", 1524 | "squat2_1_frame2670000.jpg squat2_1_frame5490000.jpg squat2_1_frame830000.jpg\n", 1525 | "squat2_1_frame2680000.jpg squat2_1_frame5500000.jpg squat2_1_frame8310000.jpg\n", 1526 | "squat2_1_frame2690000.jpg squat2_1_frame550000.jpg squat2_1_frame8320000.jpg\n", 1527 | "squat2_1_frame2700000.jpg squat2_1_frame5510000.jpg squat2_1_frame8330000.jpg\n", 1528 | "squat2_1_frame270000.jpg squat2_1_frame5520000.jpg squat2_1_frame8340000.jpg\n", 1529 | "squat2_1_frame2710000.jpg squat2_1_frame5530000.jpg squat2_1_frame8350000.jpg\n", 1530 | "squat2_1_frame2720000.jpg squat2_1_frame5540000.jpg squat2_1_frame8360000.jpg\n", 1531 | "squat2_1_frame2730000.jpg squat2_1_frame5550000.jpg squat2_1_frame8370000.jpg\n", 1532 | "squat2_1_frame2740000.jpg squat2_1_frame5560000.jpg squat2_1_frame8380000.jpg\n", 1533 | "squat2_1_frame2750000.jpg squat2_1_frame5570000.jpg squat2_1_frame8390000.jpg\n", 1534 | "squat2_1_frame2760000.jpg squat2_1_frame5580000.jpg squat2_1_frame8400000.jpg\n", 1535 | "squat2_1_frame2770000.jpg squat2_1_frame5590000.jpg squat2_1_frame840000.jpg\n", 1536 | "squat2_1_frame2780000.jpg squat2_1_frame5600000.jpg squat2_1_frame8410000.jpg\n", 1537 | "squat2_1_frame2790000.jpg squat2_1_frame560000.jpg squat2_1_frame8420000.jpg\n", 1538 | "squat2_1_frame2800000.jpg squat2_1_frame5610000.jpg squat2_1_frame8430000.jpg\n", 1539 | "squat2_1_frame280000.jpg squat2_1_frame5620000.jpg squat2_1_frame8440000.jpg\n", 1540 | "squat2_1_frame2810000.jpg squat2_1_frame5630000.jpg squat2_1_frame8450000.jpg\n", 1541 | "squat2_1_frame2820000.jpg squat2_1_frame5640000.jpg squat2_1_frame8460000.jpg\n", 1542 | "squat2_1_frame2830000.jpg squat2_1_frame5650000.jpg squat2_1_frame8470000.jpg\n", 1543 | "squat2_1_frame2840000.jpg squat2_1_frame5660000.jpg squat2_1_frame8480000.jpg\n", 1544 | "squat2_1_frame2850000.jpg squat2_1_frame5670000.jpg squat2_1_frame8490000.jpg\n", 1545 | "squat2_1_frame2860000.jpg squat2_1_frame5680000.jpg squat2_1_frame8500000.jpg\n", 1546 | "squat2_1_frame2870000.jpg squat2_1_frame5690000.jpg squat2_1_frame850000.jpg\n", 1547 | "squat2_1_frame2880000.jpg squat2_1_frame5700000.jpg squat2_1_frame8510000.jpg\n", 1548 | "squat2_1_frame2890000.jpg squat2_1_frame570000.jpg squat2_1_frame8520000.jpg\n", 1549 | "squat2_1_frame2900000.jpg squat2_1_frame5710000.jpg squat2_1_frame8530000.jpg\n", 1550 | "squat2_1_frame290000.jpg squat2_1_frame5720000.jpg squat2_1_frame8540000.jpg\n", 1551 | "squat2_1_frame2910000.jpg squat2_1_frame5730000.jpg squat2_1_frame8550000.jpg\n", 1552 | "squat2_1_frame2920000.jpg squat2_1_frame5740000.jpg squat2_1_frame8560000.jpg\n", 1553 | "squat2_1_frame2930000.jpg squat2_1_frame5750000.jpg squat2_1_frame8570000.jpg\n", 1554 | "squat2_1_frame2940000.jpg squat2_1_frame5760000.jpg squat2_1_frame8580000.jpg\n", 1555 | "squat2_1_frame2950000.jpg squat2_1_frame5770000.jpg squat2_1_frame8590000.jpg\n", 1556 | "squat2_1_frame2960000.jpg squat2_1_frame5780000.jpg squat2_1_frame8600000.jpg\n", 1557 | "squat2_1_frame2970000.jpg squat2_1_frame5790000.jpg squat2_1_frame860000.jpg\n", 1558 | "squat2_1_frame2980000.jpg squat2_1_frame5800000.jpg squat2_1_frame8610000.jpg\n", 1559 | "squat2_1_frame2990000.jpg squat2_1_frame580000.jpg squat2_1_frame8620000.jpg\n", 1560 | "squat2_1_frame3000000.jpg squat2_1_frame5810000.jpg squat2_1_frame8630000.jpg\n", 1561 | "squat2_1_frame300000.jpg squat2_1_frame5820000.jpg squat2_1_frame8640000.jpg\n", 1562 | "squat2_1_frame30000.jpg squat2_1_frame5830000.jpg squat2_1_frame8650000.jpg\n", 1563 | "squat2_1_frame3010000.jpg squat2_1_frame5840000.jpg squat2_1_frame8660000.jpg\n", 1564 | "squat2_1_frame3020000.jpg squat2_1_frame5850000.jpg squat2_1_frame8670000.jpg\n", 1565 | "squat2_1_frame3030000.jpg squat2_1_frame5860000.jpg squat2_1_frame8680000.jpg\n", 1566 | "squat2_1_frame3040000.jpg squat2_1_frame5870000.jpg squat2_1_frame8690000.jpg\n", 1567 | "squat2_1_frame3050000.jpg squat2_1_frame5880000.jpg squat2_1_frame8700000.jpg\n", 1568 | "squat2_1_frame3060000.jpg squat2_1_frame5890000.jpg squat2_1_frame870000.jpg\n", 1569 | "squat2_1_frame3070000.jpg squat2_1_frame5900000.jpg squat2_1_frame8710000.jpg\n", 1570 | "squat2_1_frame3080000.jpg squat2_1_frame590000.jpg squat2_1_frame8720000.jpg\n", 1571 | "squat2_1_frame3090000.jpg squat2_1_frame5910000.jpg squat2_1_frame8730000.jpg\n", 1572 | "squat2_1_frame3100000.jpg squat2_1_frame5920000.jpg squat2_1_frame8740000.jpg\n", 1573 | "squat2_1_frame310000.jpg squat2_1_frame5930000.jpg squat2_1_frame8750000.jpg\n", 1574 | "squat2_1_frame3110000.jpg squat2_1_frame5940000.jpg squat2_1_frame8760000.jpg\n", 1575 | "squat2_1_frame3120000.jpg squat2_1_frame5950000.jpg squat2_1_frame8770000.jpg\n", 1576 | "squat2_1_frame3130000.jpg squat2_1_frame5960000.jpg squat2_1_frame8780000.jpg\n", 1577 | "squat2_1_frame3140000.jpg squat2_1_frame5970000.jpg squat2_1_frame8790000.jpg\n", 1578 | "squat2_1_frame3150000.jpg squat2_1_frame5980000.jpg squat2_1_frame8800000.jpg\n", 1579 | "squat2_1_frame3160000.jpg squat2_1_frame5990000.jpg squat2_1_frame880000.jpg\n", 1580 | "squat2_1_frame3170000.jpg squat2_1_frame6000000.jpg squat2_1_frame8810000.jpg\n", 1581 | "squat2_1_frame3180000.jpg squat2_1_frame600000.jpg squat2_1_frame8820000.jpg\n", 1582 | "squat2_1_frame3190000.jpg squat2_1_frame60000.jpg squat2_1_frame8830000.jpg\n", 1583 | "squat2_1_frame3200000.jpg squat2_1_frame6010000.jpg squat2_1_frame8840000.jpg\n", 1584 | "squat2_1_frame320000.jpg squat2_1_frame6020000.jpg squat2_1_frame8850000.jpg\n", 1585 | "squat2_1_frame3210000.jpg squat2_1_frame6030000.jpg squat2_1_frame8860000.jpg\n", 1586 | "squat2_1_frame3220000.jpg squat2_1_frame6040000.jpg squat2_1_frame8870000.jpg\n", 1587 | "squat2_1_frame3230000.jpg squat2_1_frame6050000.jpg squat2_1_frame8880000.jpg\n", 1588 | "squat2_1_frame3240000.jpg squat2_1_frame6060000.jpg squat2_1_frame8890000.jpg\n", 1589 | "squat2_1_frame3250000.jpg squat2_1_frame6070000.jpg squat2_1_frame8900000.jpg\n", 1590 | "squat2_1_frame3260000.jpg squat2_1_frame6080000.jpg squat2_1_frame890000.jpg\n", 1591 | "squat2_1_frame3270000.jpg squat2_1_frame6090000.jpg squat2_1_frame8910000.jpg\n", 1592 | "squat2_1_frame3280000.jpg squat2_1_frame6100000.jpg squat2_1_frame8920000.jpg\n", 1593 | "squat2_1_frame3290000.jpg squat2_1_frame610000.jpg squat2_1_frame8930000.jpg\n", 1594 | "squat2_1_frame3300000.jpg squat2_1_frame6110000.jpg squat2_1_frame8940000.jpg\n", 1595 | "squat2_1_frame330000.jpg squat2_1_frame6120000.jpg squat2_1_frame8950000.jpg\n", 1596 | "squat2_1_frame3310000.jpg squat2_1_frame6130000.jpg squat2_1_frame8960000.jpg\n", 1597 | "squat2_1_frame3320000.jpg squat2_1_frame6140000.jpg squat2_1_frame8970000.jpg\n", 1598 | "squat2_1_frame3330000.jpg squat2_1_frame6150000.jpg squat2_1_frame8980000.jpg\n", 1599 | "squat2_1_frame3340000.jpg squat2_1_frame6160000.jpg squat2_1_frame8990000.jpg\n", 1600 | "squat2_1_frame3350000.jpg squat2_1_frame6170000.jpg squat2_1_frame9000000.jpg\n", 1601 | "squat2_1_frame3360000.jpg squat2_1_frame6180000.jpg squat2_1_frame900000.jpg\n", 1602 | "squat2_1_frame3370000.jpg squat2_1_frame6190000.jpg squat2_1_frame90000.jpg\n", 1603 | "squat2_1_frame3380000.jpg squat2_1_frame6200000.jpg squat2_1_frame9010000.jpg\n", 1604 | "squat2_1_frame3390000.jpg squat2_1_frame620000.jpg squat2_1_frame9020000.jpg\n", 1605 | "squat2_1_frame3400000.jpg squat2_1_frame6210000.jpg squat2_1_frame9030000.jpg\n", 1606 | "squat2_1_frame340000.jpg squat2_1_frame6220000.jpg squat2_1_frame9040000.jpg\n", 1607 | "squat2_1_frame3410000.jpg squat2_1_frame6230000.jpg squat2_1_frame9050000.jpg\n", 1608 | "squat2_1_frame3420000.jpg squat2_1_frame6240000.jpg squat2_1_frame9060000.jpg\n", 1609 | "squat2_1_frame3430000.jpg squat2_1_frame6250000.jpg squat2_1_frame9070000.jpg\n", 1610 | "squat2_1_frame3440000.jpg squat2_1_frame6260000.jpg squat2_1_frame9080000.jpg\n", 1611 | "squat2_1_frame3450000.jpg squat2_1_frame6270000.jpg squat2_1_frame9090000.jpg\n", 1612 | "squat2_1_frame3460000.jpg squat2_1_frame6280000.jpg squat2_1_frame9100000.jpg\n", 1613 | "squat2_1_frame3470000.jpg squat2_1_frame6290000.jpg squat2_1_frame910000.jpg\n", 1614 | "squat2_1_frame3480000.jpg squat2_1_frame6300000.jpg squat2_1_frame9110000.jpg\n", 1615 | "squat2_1_frame3490000.jpg squat2_1_frame630000.jpg squat2_1_frame9120000.jpg\n", 1616 | "squat2_1_frame3500000.jpg squat2_1_frame6310000.jpg squat2_1_frame9130000.jpg\n", 1617 | "squat2_1_frame350000.jpg squat2_1_frame6320000.jpg squat2_1_frame9140000.jpg\n", 1618 | "squat2_1_frame3510000.jpg squat2_1_frame6330000.jpg squat2_1_frame9150000.jpg\n", 1619 | "squat2_1_frame3520000.jpg squat2_1_frame6340000.jpg squat2_1_frame9160000.jpg\n", 1620 | "squat2_1_frame3530000.jpg squat2_1_frame6350000.jpg squat2_1_frame9170000.jpg\n", 1621 | "squat2_1_frame3540000.jpg squat2_1_frame6360000.jpg squat2_1_frame9180000.jpg\n", 1622 | "squat2_1_frame3550000.jpg squat2_1_frame6370000.jpg squat2_1_frame9190000.jpg\n", 1623 | "squat2_1_frame3560000.jpg squat2_1_frame6380000.jpg squat2_1_frame9200000.jpg\n", 1624 | "squat2_1_frame3570000.jpg squat2_1_frame6390000.jpg squat2_1_frame920000.jpg\n", 1625 | "squat2_1_frame3580000.jpg squat2_1_frame6400000.jpg squat2_1_frame9210000.jpg\n", 1626 | "squat2_1_frame3590000.jpg squat2_1_frame640000.jpg squat2_1_frame9220000.jpg\n", 1627 | "squat2_1_frame3600000.jpg squat2_1_frame6410000.jpg squat2_1_frame9230000.jpg\n", 1628 | "squat2_1_frame360000.jpg squat2_1_frame6420000.jpg squat2_1_frame9240000.jpg\n", 1629 | "squat2_1_frame3610000.jpg squat2_1_frame6430000.jpg squat2_1_frame9250000.jpg\n", 1630 | "squat2_1_frame3620000.jpg squat2_1_frame6440000.jpg squat2_1_frame9260000.jpg\n", 1631 | "squat2_1_frame3630000.jpg squat2_1_frame6450000.jpg squat2_1_frame9270000.jpg\n", 1632 | "squat2_1_frame3640000.jpg squat2_1_frame6460000.jpg squat2_1_frame9280000.jpg\n", 1633 | "squat2_1_frame3650000.jpg squat2_1_frame6470000.jpg squat2_1_frame9290000.jpg\n", 1634 | "squat2_1_frame3660000.jpg squat2_1_frame6480000.jpg squat2_1_frame9300000.jpg\n", 1635 | "squat2_1_frame3670000.jpg squat2_1_frame6490000.jpg squat2_1_frame930000.jpg\n", 1636 | "squat2_1_frame3680000.jpg squat2_1_frame6500000.jpg squat2_1_frame9310000.jpg\n", 1637 | "squat2_1_frame3690000.jpg squat2_1_frame650000.jpg squat2_1_frame9320000.jpg\n", 1638 | "squat2_1_frame3700000.jpg squat2_1_frame6510000.jpg squat2_1_frame9330000.jpg\n", 1639 | "squat2_1_frame370000.jpg squat2_1_frame6520000.jpg squat2_1_frame9340000.jpg\n", 1640 | "squat2_1_frame3710000.jpg squat2_1_frame6530000.jpg squat2_1_frame9350000.jpg\n", 1641 | "squat2_1_frame3720000.jpg squat2_1_frame6540000.jpg squat2_1_frame9360000.jpg\n", 1642 | "squat2_1_frame3730000.jpg squat2_1_frame6550000.jpg squat2_1_frame940000.jpg\n", 1643 | "squat2_1_frame3740000.jpg squat2_1_frame6560000.jpg squat2_1_frame950000.jpg\n", 1644 | "squat2_1_frame3750000.jpg squat2_1_frame6570000.jpg squat2_1_frame960000.jpg\n", 1645 | "squat2_1_frame3760000.jpg squat2_1_frame6580000.jpg squat2_1_frame970000.jpg\n", 1646 | "squat2_1_frame3770000.jpg squat2_1_frame6590000.jpg squat2_1_frame980000.jpg\n", 1647 | "squat2_1_frame3780000.jpg squat2_1_frame6600000.jpg squat2_1_frame990000.jpg\n", 1648 | "squat2_1_frame3790000.jpg squat2_1_frame660000.jpg\n", 1649 | "squat2_1_frame3800000.jpg squat2_1_frame6610000.jpg\n" 1650 | ], 1651 | "name": "stdout" 1652 | } 1653 | ] 1654 | }, 1655 | { 1656 | "cell_type": "code", 1657 | "metadata": { 1658 | "id": "4qhrW-SSabEW", 1659 | "colab_type": "code", 1660 | "colab": { 1661 | "base_uri": "https://localhost:8080/", 1662 | "height": 136 1663 | }, 1664 | "outputId": "6a67635c-8f1d-47be-9eaa-bd0f9fd1e7b2" 1665 | }, 1666 | "source": [ 1667 | "train_generator.class_indices" 1668 | ], 1669 | "execution_count": 106, 1670 | "outputs": [ 1671 | { 1672 | "output_type": "execute_result", 1673 | "data": { 1674 | "text/plain": [ 1675 | "{'burpees': 0,\n", 1676 | " 'extras': 1,\n", 1677 | " 'jj': 2,\n", 1678 | " 'mcs': 3,\n", 1679 | " 'planks': 4,\n", 1680 | " 'pushups': 5,\n", 1681 | " 'squats': 6}" 1682 | ] 1683 | }, 1684 | "metadata": { 1685 | "tags": [] 1686 | }, 1687 | "execution_count": 106 1688 | } 1689 | ] 1690 | }, 1691 | { 1692 | "cell_type": "code", 1693 | "metadata": { 1694 | "id": "yfd6Mzo8czOa", 1695 | "colab_type": "code", 1696 | "colab": { 1697 | "base_uri": "https://localhost:8080/", 1698 | "height": 238 1699 | }, 1700 | "outputId": "09845248-7bd5-48f8-9417-1607937fad84" 1701 | }, 1702 | "source": [ 1703 | "import cv2, os\n", 1704 | "print(cv2.__version__)\n", 1705 | "videos = os.listdir(\"/content/drive/My Drive/exc3\")\n", 1706 | "for video_name in videos:\n", 1707 | " print(video_name)\n", 1708 | " vidcap = cv2.VideoCapture(\"/content/drive/My Drive/exc3/\"+video_name)\n", 1709 | " success,image = vidcap.read()\n", 1710 | " count = 0\n", 1711 | " success = True\n", 1712 | " print(video_name)\n", 1713 | " os.mkdir(video_name)\n", 1714 | " while success:\n", 1715 | " if count%10000 == 0: \n", 1716 | " cv2.imwrite(video_name+\"/frame%d.jpg\" % count, image) # save frame as JPEG file\n", 1717 | " success,image = vidcap.read()\n", 1718 | " #print('Read a new frame: ', success)\n", 1719 | " count += 1" 1720 | ], 1721 | "execution_count": 123, 1722 | "outputs": [ 1723 | { 1724 | "output_type": "stream", 1725 | "text": [ 1726 | "3.4.3\n", 1727 | "Thumbs.db\n", 1728 | "Thumbs.db\n", 1729 | "GroupWorkoutSideView(0).m4v\n", 1730 | "GroupWorkoutSideView(0).m4v\n", 1731 | "GroupWorkoutVideo(1).m4v\n", 1732 | "GroupWorkoutVideo(1).m4v\n", 1733 | "GroupWorkoutVideo(0).m4v\n", 1734 | "GroupWorkoutVideo(0).m4v\n", 1735 | "SingleWorkoutVideo(0).m4v\n", 1736 | "SingleWorkoutVideo(0).m4v\n", 1737 | "SingleWorkoutSideView(0).m4v\n", 1738 | "SingleWorkoutSideView(0).m4v\n" 1739 | ], 1740 | "name": "stdout" 1741 | } 1742 | ] 1743 | }, 1744 | { 1745 | "cell_type": "code", 1746 | "metadata": { 1747 | "id": "QLSI7-BHiTtx", 1748 | "colab_type": "code", 1749 | "colab": {} 1750 | }, 1751 | "source": [ 1752 | "" 1753 | ], 1754 | "execution_count": 0, 1755 | "outputs": [] 1756 | } 1757 | ] 1758 | } --------------------------------------------------------------------------------