├── readme.md ├── 000_TensorFlow_Installation_M1.md ├── 013_CNN_006_Increasing_Model_Complexity.ipynb ├── 001_TensorFlow_Test.ipynb ├── 015_CNN_008_Transfer_Learning.ipynb ├── 006_Callbacks.ipynb ├── 005_Optimize_Neural_Network_Architecture.ipynb └── 007_Custom_Callbacks.ipynb /readme.md: -------------------------------------------------------------------------------- 1 | # Better Data Science | TensorFlow 2 | - A repo containing notebooks and examples of TensorFlow 2.5+ -------------------------------------------------------------------------------- /000_TensorFlow_Installation_M1.md: -------------------------------------------------------------------------------- 1 | # How to Install TensorFlow 2.5.0 on the M1 Mac 2 | 3 | 1. Install Homebrew: 4 | ``` 5 | /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" 6 | ``` 7 | 2. Install Xcode build tools: 8 | ``` 9 | xcode-select --install 10 | ``` 11 | 3. Download and install Miniforge from the following URL: 12 | - https://github.com/conda-forge/miniforge 13 | - Download the ARM64 version 14 | 4. Create a new virtual environment and activate it: 15 | ``` 16 | conda create --name env_tensorflow python=3.9 17 | conda activate env_tensorflow 18 | ``` 19 | 5. Install TensorFlow: 20 | ``` 21 | conda install -c apple tensorflow-deps 22 | pip install tensorflow-macos # it should fail 23 | pip install tensorflow-metal 24 | pip install tensorflow-macos --no-dependencies 25 | pip install flatbuffers --no-dependencies 26 | ``` 27 | 6. Verify the installation was successful: 28 | ``` 29 | python 30 | 31 | >>> import tensorflow as tf 32 | >>> tf.__version__ 33 | 2.5.0 34 | ``` -------------------------------------------------------------------------------- /013_CNN_006_Increasing_Model_Complexity.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "560ebc68-d9ad-4020-8d46-b2877b748710", 6 | "metadata": {}, 7 | "source": [ 8 | "# CNN 6 - Do Larger Model Lead to Better Performance?\n", 9 | "- Dataset:\n", 10 | " - https://www.kaggle.com/shaunthesheep/microsoft-catsvsdogs-dataset\n", 11 | "- The dataset isn't deep-learning-compatible by default, here's how to preprocess it:\n", 12 | " - Video: https://www.youtube.com/watch?v=O7EV2BjOXus&ab_channel=BetterDataScience\n", 13 | " - Article: https://towardsdatascience.com/tensorflow-for-image-classification-top-3-prerequisites-for-deep-learning-projects-34c549c89e42\n", 14 | " - Code: https://github.com/better-data-science/TensorFlow/blob/main/008_CNN_001_Working_With_Image_Data.ipynb\n", 15 | "\n", 16 | "**What you should know by now:**\n", 17 | "- How to preprocess image data\n", 18 | "- How to load image data from a directory\n", 19 | "- What's a convolution, pooling, and a fully-connected layer\n", 20 | "- Categorical vs. binary classification\n", 21 | "\n", 22 | "
\n", 23 | "\n", 24 | "- First things first, let's import the libraries\n", 25 | "- The models we'll declare today will have more layers than the ones before\n", 26 | " - We'll implement individual classes from TensorFlow" 27 | ] 28 | }, 29 | { 30 | "cell_type": "code", 31 | "execution_count": 1, 32 | "id": "d1148f14-6117-4ab2-8d46-96807f6c8061", 33 | "metadata": {}, 34 | "outputs": [], 35 | "source": [ 36 | "import os\n", 37 | "os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'\n", 38 | "\n", 39 | "import warnings\n", 40 | "warnings.filterwarnings('ignore')\n", 41 | "\n", 42 | "import tensorflow as tf\n", 43 | "from tensorflow.keras import Sequential\n", 44 | "from tensorflow.keras.layers import Conv2D, MaxPool2D, Flatten, Dense, Dropout\n", 45 | "from tensorflow.keras.preprocessing.image import ImageDataGenerator\n", 46 | "from tensorflow.keras.losses import categorical_crossentropy\n", 47 | "from tensorflow.keras.optimizers import Adam\n", 48 | "from tensorflow.keras.metrics import BinaryAccuracy\n", 49 | "\n", 50 | "tf.random.set_seed(42)\n", 51 | "physical_devices = tf.config.list_physical_devices('GPU')\n", 52 | "try:\n", 53 | " tf.config.experimental.set_memory_growth(physical_devices[0], True)\n", 54 | "except:\n", 55 | " pass" 56 | ] 57 | }, 58 | { 59 | "cell_type": "markdown", 60 | "id": "28bf5a38-a5d4-46f3-bf99-76028a0a0581", 61 | "metadata": {}, 62 | "source": [ 63 | "- I'm using Nvidia RTX 3060 TI" 64 | ] 65 | }, 66 | { 67 | "cell_type": "code", 68 | "execution_count": 2, 69 | "id": "699c796a-945c-41f8-9c91-8f051243bdaa", 70 | "metadata": {}, 71 | "outputs": [ 72 | { 73 | "data": { 74 | "text/plain": [ 75 | "[PhysicalDevice(name='/physical_device:GPU:0', device_type='GPU')]" 76 | ] 77 | }, 78 | "execution_count": 2, 79 | "metadata": {}, 80 | "output_type": "execute_result" 81 | } 82 | ], 83 | "source": [ 84 | "physical_devices" 85 | ] 86 | }, 87 | { 88 | "cell_type": "markdown", 89 | "id": "7f09eb01-fa4b-44df-b666-e07b9e5eea9f", 90 | "metadata": {}, 91 | "source": [ 92 | "
\n", 93 | "\n", 94 | "## Load in the data\n", 95 | "- Use `ImageDataGenerator` to convert image matrices to 0-1 range\n", 96 | "- Load in the images from directories and convert them to 224x224x3\n", 97 | "- For memory concerns, we'll lower the batch size:" 98 | ] 99 | }, 100 | { 101 | "cell_type": "code", 102 | "execution_count": 3, 103 | "id": "25fd6610-ba6d-4ffb-8add-cb7e3120eb40", 104 | "metadata": {}, 105 | "outputs": [ 106 | { 107 | "name": "stdout", 108 | "output_type": "stream", 109 | "text": [ 110 | "Found 20030 images belonging to 2 classes.\n", 111 | "Found 2478 images belonging to 2 classes.\n" 112 | ] 113 | } 114 | ], 115 | "source": [ 116 | "train_datagen = ImageDataGenerator(rescale=1/255.0)\n", 117 | "valid_datagen = ImageDataGenerator(rescale=1/255.0)\n", 118 | "\n", 119 | "train_data = train_datagen.flow_from_directory(\n", 120 | " directory='data/train/',\n", 121 | " target_size=(224, 224),\n", 122 | " class_mode='categorical',\n", 123 | " batch_size=32,\n", 124 | " shuffle=True,\n", 125 | " seed=42\n", 126 | ")\n", 127 | "\n", 128 | "valid_data = valid_datagen.flow_from_directory(\n", 129 | " directory='data/validation/',\n", 130 | " target_size=(224, 224),\n", 131 | " class_mode='categorical',\n", 132 | " batch_size=32,\n", 133 | " seed=42\n", 134 | ")" 135 | ] 136 | }, 137 | { 138 | "cell_type": "markdown", 139 | "id": "b13f854f-bb13-4927-80ee-61f6e9117bd2", 140 | "metadata": {}, 141 | "source": [ 142 | "
\n", 143 | "\n", 144 | "## Model 1\n", 145 | "- Block 1: Conv, Conv, Pool\n", 146 | "- Block 2: Conv, Conv, Pool\n", 147 | "- Block 3: Flatten, Dense\n", 148 | "- Output\n", 149 | "\n", 150 | "
\n", 151 | "\n", 152 | "- We won't mess with the hyperparameters today" 153 | ] 154 | }, 155 | { 156 | "cell_type": "code", 157 | "execution_count": 5, 158 | "id": "0e293a9c-7fcf-4cbc-b5fd-55a81e1394f7", 159 | "metadata": {}, 160 | "outputs": [ 161 | { 162 | "name": "stdout", 163 | "output_type": "stream", 164 | "text": [ 165 | "Epoch 1/10\n", 166 | "626/626 [==============================] - 40s 61ms/step - loss: 0.6586 - accuracy: 0.6149 - val_loss: 0.6115 - val_accuracy: 0.6804\n", 167 | "Epoch 2/10\n", 168 | "626/626 [==============================] - 37s 59ms/step - loss: 0.5223 - accuracy: 0.7422 - val_loss: 0.5265 - val_accuracy: 0.7554\n", 169 | "Epoch 3/10\n", 170 | "626/626 [==============================] - 38s 60ms/step - loss: 0.4073 - accuracy: 0.8125 - val_loss: 0.5061 - val_accuracy: 0.7571\n", 171 | "Epoch 4/10\n", 172 | "626/626 [==============================] - 38s 61ms/step - loss: 0.2476 - accuracy: 0.8942 - val_loss: 0.6336 - val_accuracy: 0.7672\n", 173 | "Epoch 5/10\n", 174 | "626/626 [==============================] - 38s 61ms/step - loss: 0.1004 - accuracy: 0.9625 - val_loss: 1.0141 - val_accuracy: 0.7571\n", 175 | "Epoch 6/10\n", 176 | "626/626 [==============================] - 39s 62ms/step - loss: 0.0419 - accuracy: 0.9863 - val_loss: 1.3990 - val_accuracy: 0.7700\n", 177 | "Epoch 7/10\n", 178 | "626/626 [==============================] - 38s 61ms/step - loss: 0.0352 - accuracy: 0.9894 - val_loss: 1.2963 - val_accuracy: 0.7680\n", 179 | "Epoch 8/10\n", 180 | "626/626 [==============================] - 39s 62ms/step - loss: 0.0263 - accuracy: 0.9932 - val_loss: 1.4017 - val_accuracy: 0.7684\n", 181 | "Epoch 9/10\n", 182 | "626/626 [==============================] - 38s 61ms/step - loss: 0.0263 - accuracy: 0.9940 - val_loss: 1.3149 - val_accuracy: 0.7780\n", 183 | "Epoch 10/10\n", 184 | "626/626 [==============================] - 38s 61ms/step - loss: 0.0237 - accuracy: 0.9940 - val_loss: 1.6602 - val_accuracy: 0.7482\n" 185 | ] 186 | } 187 | ], 188 | "source": [ 189 | "model_1 = tf.keras.Sequential([\n", 190 | " Conv2D(filters=32, kernel_size=(3, 3), input_shape=(224, 224, 3), activation='relu'),\n", 191 | " Conv2D(filters=32, kernel_size=(3, 3), activation='relu'),\n", 192 | " MaxPool2D(pool_size=(2, 2), padding='same'),\n", 193 | " \n", 194 | " Conv2D(filters=64, kernel_size=(3, 3), activation='relu'),\n", 195 | " Conv2D(filters=64, kernel_size=(3, 3), activation='relu'),\n", 196 | " MaxPool2D(pool_size=(2, 2), padding='same'),\n", 197 | " \n", 198 | " Flatten(),\n", 199 | " Dense(units=128, activation='relu'),\n", 200 | " Dense(units=2, activation='softmax')\n", 201 | "])\n", 202 | "\n", 203 | "\n", 204 | "model_1.compile(\n", 205 | " loss=categorical_crossentropy,\n", 206 | " optimizer=Adam(),\n", 207 | " metrics=[BinaryAccuracy(name='accuracy')]\n", 208 | ")\n", 209 | "model_1_history = model_1.fit(\n", 210 | " train_data,\n", 211 | " validation_data=valid_data,\n", 212 | " epochs=10\n", 213 | ")" 214 | ] 215 | }, 216 | { 217 | "cell_type": "markdown", 218 | "id": "f516c509-7d70-4fdc-9661-f4a180b1a64d", 219 | "metadata": {}, 220 | "source": [ 221 | "
\n", 222 | "\n", 223 | "- Not bad, but we got 75% accuracy on the validation set in notebook 010\n", 224 | "- Will adding complexity to the model increase the accuracy?\n", 225 | "\n", 226 | "## Model 2\n", 227 | "- Block 1: Conv, Conv, Pool\n", 228 | "- Block 2: Conv, Conv, Pool\n", 229 | "- Block 3: Conv, Conv, Pool\n", 230 | "- Block 4: Flatten, Dense\n", 231 | "- Ouput\n", 232 | "\n", 233 | "
\n", 234 | "\n", 235 | "- This artchitecture is a bit of an overkill for our dataset\n", 236 | "- The model isn't learning at all:" 237 | ] 238 | }, 239 | { 240 | "cell_type": "code", 241 | "execution_count": 6, 242 | "id": "7cb9fb77-1ead-426f-bf9f-2a959763f7e9", 243 | "metadata": {}, 244 | "outputs": [ 245 | { 246 | "name": "stdout", 247 | "output_type": "stream", 248 | "text": [ 249 | "Epoch 1/10\n", 250 | "626/626 [==============================] - 39s 62ms/step - loss: 0.7040 - accuracy: 0.4955 - val_loss: 0.6932 - val_accuracy: 0.5000\n", 251 | "Epoch 2/10\n", 252 | "626/626 [==============================] - 39s 62ms/step - loss: 0.6932 - accuracy: 0.4959 - val_loss: 0.6932 - val_accuracy: 0.5000\n", 253 | "Epoch 3/10\n", 254 | "626/626 [==============================] - 39s 62ms/step - loss: 0.6932 - accuracy: 0.4987 - val_loss: 0.6932 - val_accuracy: 0.5000\n", 255 | "Epoch 4/10\n", 256 | "626/626 [==============================] - 39s 62ms/step - loss: 0.6932 - accuracy: 0.4993 - val_loss: 0.6932 - val_accuracy: 0.5000\n", 257 | "Epoch 5/10\n", 258 | "626/626 [==============================] - 39s 62ms/step - loss: 0.6932 - accuracy: 0.5006 - val_loss: 0.6932 - val_accuracy: 0.5000\n", 259 | "Epoch 6/10\n", 260 | "626/626 [==============================] - 40s 64ms/step - loss: 0.6932 - accuracy: 0.4924 - val_loss: 0.6932 - val_accuracy: 0.5000\n", 261 | "Epoch 7/10\n", 262 | "626/626 [==============================] - 40s 64ms/step - loss: 0.6932 - accuracy: 0.5020 - val_loss: 0.6932 - val_accuracy: 0.5000\n", 263 | "Epoch 8/10\n", 264 | "626/626 [==============================] - 40s 63ms/step - loss: 0.6932 - accuracy: 0.5023 - val_loss: 0.6932 - val_accuracy: 0.5000\n", 265 | "Epoch 9/10\n", 266 | "626/626 [==============================] - 40s 64ms/step - loss: 0.6932 - accuracy: 0.5003 - val_loss: 0.6932 - val_accuracy: 0.5000\n", 267 | "Epoch 10/10\n", 268 | "626/626 [==============================] - 40s 64ms/step - loss: 0.6932 - accuracy: 0.5034 - val_loss: 0.6932 - val_accuracy: 0.5000\n" 269 | ] 270 | } 271 | ], 272 | "source": [ 273 | "model_2 = Sequential([\n", 274 | " Conv2D(filters=32, kernel_size=(3, 3), input_shape=(224, 224, 3), activation='relu'),\n", 275 | " Conv2D(filters=32, kernel_size=(3, 3), activation='relu'),\n", 276 | " MaxPool2D(pool_size=(2, 2), padding='same'),\n", 277 | " \n", 278 | " Conv2D(filters=64, kernel_size=(3, 3), activation='relu'),\n", 279 | " Conv2D(filters=64, kernel_size=(3, 3), activation='relu'),\n", 280 | " MaxPool2D(pool_size=(2, 2), padding='same'),\n", 281 | " \n", 282 | " Conv2D(filters=128, kernel_size=(3, 3), activation='relu'),\n", 283 | " Conv2D(filters=128, kernel_size=(3, 3), activation='relu'),\n", 284 | " MaxPool2D(pool_size=(2, 2), padding='same'),\n", 285 | " \n", 286 | " Flatten(),\n", 287 | " Dense(units=128, activation='relu'),\n", 288 | " Dense(units=2, activation='softmax')\n", 289 | "])\n", 290 | "\n", 291 | "\n", 292 | "model_2.compile(\n", 293 | " loss=categorical_crossentropy,\n", 294 | " optimizer=Adam(),\n", 295 | " metrics=[BinaryAccuracy(name='accuracy')]\n", 296 | ")\n", 297 | "model_2_history = model_2.fit(\n", 298 | " train_data,\n", 299 | " validation_data=valid_data,\n", 300 | " epochs=10\n", 301 | ")" 302 | ] 303 | }, 304 | { 305 | "cell_type": "markdown", 306 | "id": "12dea366-50b9-4310-94ea-f23a9d3a551d", 307 | "metadata": {}, 308 | "source": [ 309 | "
\n", 310 | "\n", 311 | "- When that happens, you can try experimenting with the learning rate and other parameters\n", 312 | "- Let's dial it down a bit next\n", 313 | "\n", 314 | "
\n", 315 | "\n", 316 | "## Model 3 \n", 317 | "- Block 1: Conv, Conv, Pool\n", 318 | "- Block 2: Conv, Conv, Pool\n", 319 | "- Block 3: Flatten, Dense, Dropout, Dense\n", 320 | "- Output\n", 321 | "\n", 322 | "
\n", 323 | "\n", 324 | "- The first model was better than the second\n", 325 | "- We can try adding a dropout layer as a regulizer and tweaking the fully connected layers:" 326 | ] 327 | }, 328 | { 329 | "cell_type": "code", 330 | "execution_count": 7, 331 | "id": "3710c286-833c-40e5-8522-57411c146e57", 332 | "metadata": {}, 333 | "outputs": [ 334 | { 335 | "name": "stdout", 336 | "output_type": "stream", 337 | "text": [ 338 | "Epoch 1/10\n", 339 | "626/626 [==============================] - 39s 62ms/step - loss: 0.7498 - accuracy: 0.5622 - val_loss: 0.6580 - val_accuracy: 0.6295\n", 340 | "Epoch 2/10\n", 341 | "626/626 [==============================] - 39s 62ms/step - loss: 0.6101 - accuracy: 0.6744 - val_loss: 0.5645 - val_accuracy: 0.7159\n", 342 | "Epoch 3/10\n", 343 | "626/626 [==============================] - 39s 62ms/step - loss: 0.5007 - accuracy: 0.7562 - val_loss: 0.5734 - val_accuracy: 0.7070\n", 344 | "Epoch 4/10\n", 345 | "626/626 [==============================] - 39s 63ms/step - loss: 0.3297 - accuracy: 0.8585 - val_loss: 0.7222 - val_accuracy: 0.7038\n", 346 | "Epoch 5/10\n", 347 | "626/626 [==============================] - 40s 64ms/step - loss: 0.1246 - accuracy: 0.9556 - val_loss: 1.1581 - val_accuracy: 0.6965\n", 348 | "Epoch 6/10\n", 349 | "626/626 [==============================] - 39s 63ms/step - loss: 0.0786 - accuracy: 0.9786 - val_loss: 0.8357 - val_accuracy: 0.6832\n", 350 | "Epoch 7/10\n", 351 | "626/626 [==============================] - 40s 64ms/step - loss: 0.0425 - accuracy: 0.9877 - val_loss: 1.3557 - val_accuracy: 0.7006\n", 352 | "Epoch 8/10\n", 353 | "626/626 [==============================] - 40s 64ms/step - loss: 0.0277 - accuracy: 0.9934 - val_loss: 2.0383 - val_accuracy: 0.6780\n", 354 | "Epoch 9/10\n", 355 | "626/626 [==============================] - 40s 64ms/step - loss: 0.0334 - accuracy: 0.9926 - val_loss: 1.0312 - val_accuracy: 0.6913\n", 356 | "Epoch 10/10\n", 357 | "626/626 [==============================] - 40s 64ms/step - loss: 0.0298 - accuracy: 0.9925 - val_loss: 1.5798 - val_accuracy: 0.6985\n" 358 | ] 359 | } 360 | ], 361 | "source": [ 362 | "model_3 = tf.keras.Sequential([\n", 363 | " Conv2D(filters=32, kernel_size=(3, 3), input_shape=(224, 224, 3), activation='relu'),\n", 364 | " Conv2D(filters=32, kernel_size=(3, 3), activation='relu'),\n", 365 | " MaxPool2D(pool_size=(2, 2), padding='same'),\n", 366 | " \n", 367 | " Conv2D(filters=64, kernel_size=(3, 3), activation='relu'),\n", 368 | " Conv2D(filters=64, kernel_size=(3, 3), activation='relu'),\n", 369 | " MaxPool2D(pool_size=(2, 2), padding='same'),\n", 370 | " \n", 371 | " Flatten(),\n", 372 | " Dense(units=512, activation='relu'),\n", 373 | " Dropout(rate=0.3),\n", 374 | " Dense(units=128),\n", 375 | " Dense(units=2, activation='softmax')\n", 376 | "])\n", 377 | "\n", 378 | "model_3.compile(\n", 379 | " loss=categorical_crossentropy,\n", 380 | " optimizer=Adam(),\n", 381 | " metrics=[BinaryAccuracy(name='accuracy')]\n", 382 | ")\n", 383 | "\n", 384 | "model_3_history = model_3.fit(\n", 385 | " train_data,\n", 386 | " validation_data=valid_data,\n", 387 | " epochs=10\n", 388 | ")" 389 | ] 390 | }, 391 | { 392 | "cell_type": "markdown", 393 | "id": "4f336697-5e77-47f1-a8b2-a572a720ec47", 394 | "metadata": {}, 395 | "source": [ 396 | "
\n", 397 | "\n", 398 | "- It made the model worse\n", 399 | "- More complex model don't necessarily lead to an increase in performance\n", 400 | "\n", 401 | "
\n", 402 | "\n", 403 | "## Conclusion\n", 404 | "- There you have it - we've been focusing on the wrong thing from the start\n", 405 | "- Our model architecture in the notebook 010 was solid\n", 406 | " - Adding more layers and complexity decreases the predictive power\n", 407 | "- We should shift our focus to improving the dataset quality\n", 408 | "- The following notebook will teach you all about **data augmentation**, and you'll see how it increases the power of our model\n", 409 | "- After that you'll take your models to new heights with **transfer learning**, and you'll see why coming up with custom architectures is a waste of time in most cases" 410 | ] 411 | } 412 | ], 413 | "metadata": { 414 | "kernelspec": { 415 | "display_name": "Python 3 (ipykernel)", 416 | "language": "python", 417 | "name": "python3" 418 | }, 419 | "language_info": { 420 | "codemirror_mode": { 421 | "name": "ipython", 422 | "version": 3 423 | }, 424 | "file_extension": ".py", 425 | "mimetype": "text/x-python", 426 | "name": "python", 427 | "nbconvert_exporter": "python", 428 | "pygments_lexer": "ipython3", 429 | "version": "3.9.4" 430 | } 431 | }, 432 | "nbformat": 4, 433 | "nbformat_minor": 5 434 | } 435 | -------------------------------------------------------------------------------- /001_TensorFlow_Test.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "3c28503e-18ad-49ba-8bb3-d019016dbf16", 6 | "metadata": {}, 7 | "source": [ 8 | "# 001 - TensorFlow Installation Test\n", 9 | "***\n", 10 | "\n", 11 | "## Imports" 12 | ] 13 | }, 14 | { 15 | "cell_type": "code", 16 | "execution_count": 1, 17 | "id": "f95104a3-71c6-49a1-bb2d-8af9c1a233fb", 18 | "metadata": {}, 19 | "outputs": [ 20 | { 21 | "name": "stdout", 22 | "output_type": "stream", 23 | "text": [ 24 | "Init Plugin\n" 25 | ] 26 | }, 27 | { 28 | "data": { 29 | "text/plain": [ 30 | "'2.5.0'" 31 | ] 32 | }, 33 | "execution_count": 1, 34 | "metadata": {}, 35 | "output_type": "execute_result" 36 | }, 37 | { 38 | "name": "stdout", 39 | "output_type": "stream", 40 | "text": [ 41 | "Init Graph Optimizer\n", 42 | "Init Kernel\n" 43 | ] 44 | } 45 | ], 46 | "source": [ 47 | "import tensorflow as tf\n", 48 | "tf.__version__" 49 | ] 50 | }, 51 | { 52 | "cell_type": "markdown", 53 | "id": "c14914e3-f00d-4a06-8dfb-ce9dfd0ebf31", 54 | "metadata": {}, 55 | "source": [ 56 | "
\n", 57 | "\n", 58 | "## Available devices for training" 59 | ] 60 | }, 61 | { 62 | "cell_type": "code", 63 | "execution_count": 2, 64 | "id": "32e6b04e-9af3-4841-9f08-a203d6bdd0c1", 65 | "metadata": {}, 66 | "outputs": [ 67 | { 68 | "data": { 69 | "text/plain": [ 70 | "[PhysicalDevice(name='/physical_device:CPU:0', device_type='CPU'),\n", 71 | " PhysicalDevice(name='/physical_device:GPU:0', device_type='GPU')]" 72 | ] 73 | }, 74 | "execution_count": 2, 75 | "metadata": {}, 76 | "output_type": "execute_result" 77 | } 78 | ], 79 | "source": [ 80 | "tf.config.list_physical_devices()" 81 | ] 82 | }, 83 | { 84 | "cell_type": "markdown", 85 | "id": "847e8e9d-2ba1-4fa1-a6e1-1d053d275982", 86 | "metadata": {}, 87 | "source": [ 88 | "
\n", 89 | "\n", 90 | "## Dummy dataset" 91 | ] 92 | }, 93 | { 94 | "cell_type": "code", 95 | "execution_count": 12, 96 | "id": "9f05f334-09b5-48f1-92dc-514c2c893ec2", 97 | "metadata": {}, 98 | "outputs": [ 99 | { 100 | "name": "stdout", 101 | "output_type": "stream", 102 | "text": [ 103 | "Metal device set to: Apple M1\n" 104 | ] 105 | }, 106 | { 107 | "name": "stderr", 108 | "output_type": "stream", 109 | "text": [ 110 | "2021-09-28 07:37:00.881092: I tensorflow/core/common_runtime/pluggable_device/pluggable_device_factory.cc:305] Could not identify NUMA node of platform GPU ID 0, defaulting to 0. Your kernel may not have been built with NUMA support.\n", 111 | "2021-09-28 07:37:00.881867: I tensorflow/core/common_runtime/pluggable_device/pluggable_device_factory.cc:271] Created TensorFlow device (/job:localhost/replica:0/task:0/device:GPU:0 with 0 MB memory) -> physical PluggableDevice (device: 0, name: METAL, pci bus id: )\n" 112 | ] 113 | } 114 | ], 115 | "source": [ 116 | "import numpy as np\n", 117 | "\n", 118 | "X = np.arange(1, 101, step=0.1)\n", 119 | "y = [x**2 for x in X]\n", 120 | "\n", 121 | "X = tf.cast(tf.constant(X), dtype=tf.float32)\n", 122 | "y = tf.cast(tf.constant(y), dtype=tf.float32)" 123 | ] 124 | }, 125 | { 126 | "cell_type": "markdown", 127 | "id": "c78722a1-8cb9-41b8-b114-7cc90af4e4a3", 128 | "metadata": {}, 129 | "source": [ 130 | "
\n", 131 | "\n", 132 | "## Model and training" 133 | ] 134 | }, 135 | { 136 | "cell_type": "code", 137 | "execution_count": 16, 138 | "id": "0324cb21-9d45-4342-93d8-b589afe08a9e", 139 | "metadata": {}, 140 | "outputs": [ 141 | { 142 | "name": "stdout", 143 | "output_type": "stream", 144 | "text": [ 145 | "Epoch 1/100\n", 146 | "32/32 [==============================] - 0s 4ms/step - loss: 1397.4844 - mean_absolute_error: 1397.4844\n", 147 | "Epoch 2/100\n", 148 | " 1/32 [..............................] - ETA: 0s - loss: 1186.0442 - mean_absolute_error: 1186.0442" 149 | ] 150 | }, 151 | { 152 | "name": "stderr", 153 | "output_type": "stream", 154 | "text": [ 155 | "2021-09-28 07:40:28.525871: I tensorflow/core/grappler/optimizers/custom_graph_optimizer_registry.cc:112] Plugin optimizer for device_type GPU is enabled.\n" 156 | ] 157 | }, 158 | { 159 | "name": "stdout", 160 | "output_type": "stream", 161 | "text": [ 162 | "32/32 [==============================] - 0s 4ms/step - loss: 932.5007 - mean_absolute_error: 932.5007\n", 163 | "Epoch 3/100\n", 164 | "32/32 [==============================] - 0s 3ms/step - loss: 723.8837 - mean_absolute_error: 723.8837\n", 165 | "Epoch 4/100\n", 166 | "32/32 [==============================] - 0s 3ms/step - loss: 539.7209 - mean_absolute_error: 539.7209\n", 167 | "Epoch 5/100\n", 168 | "32/32 [==============================] - 0s 4ms/step - loss: 430.1161 - mean_absolute_error: 430.1161\n", 169 | "Epoch 6/100\n", 170 | "32/32 [==============================] - 0s 4ms/step - loss: 371.5009 - mean_absolute_error: 371.5009\n", 171 | "Epoch 7/100\n", 172 | "32/32 [==============================] - 0s 4ms/step - loss: 288.6484 - mean_absolute_error: 288.6484\n", 173 | "Epoch 8/100\n", 174 | "32/32 [==============================] - 0s 3ms/step - loss: 244.9536 - mean_absolute_error: 244.9536\n", 175 | "Epoch 9/100\n", 176 | "32/32 [==============================] - 0s 4ms/step - loss: 184.6690 - mean_absolute_error: 184.6690\n", 177 | "Epoch 10/100\n", 178 | "32/32 [==============================] - 0s 4ms/step - loss: 270.8843 - mean_absolute_error: 270.8843\n", 179 | "Epoch 11/100\n", 180 | "32/32 [==============================] - 0s 4ms/step - loss: 251.4684 - mean_absolute_error: 251.4684\n", 181 | "Epoch 12/100\n", 182 | "32/32 [==============================] - 0s 4ms/step - loss: 264.9601 - mean_absolute_error: 264.9601\n", 183 | "Epoch 13/100\n", 184 | "32/32 [==============================] - 0s 4ms/step - loss: 212.7753 - mean_absolute_error: 212.7753\n", 185 | "Epoch 14/100\n", 186 | "32/32 [==============================] - 0s 4ms/step - loss: 266.6929 - mean_absolute_error: 266.6929\n", 187 | "Epoch 15/100\n", 188 | "32/32 [==============================] - 0s 4ms/step - loss: 177.0505 - mean_absolute_error: 177.0505\n", 189 | "Epoch 16/100\n", 190 | "32/32 [==============================] - 0s 4ms/step - loss: 316.8542 - mean_absolute_error: 316.8542\n", 191 | "Epoch 17/100\n", 192 | "32/32 [==============================] - 0s 4ms/step - loss: 369.1585 - mean_absolute_error: 369.1585\n", 193 | "Epoch 18/100\n", 194 | "32/32 [==============================] - 0s 4ms/step - loss: 203.2409 - mean_absolute_error: 203.2409\n", 195 | "Epoch 19/100\n", 196 | "32/32 [==============================] - 0s 4ms/step - loss: 222.8471 - mean_absolute_error: 222.8471\n", 197 | "Epoch 20/100\n", 198 | "32/32 [==============================] - 0s 4ms/step - loss: 185.4706 - mean_absolute_error: 185.4706\n", 199 | "Epoch 21/100\n", 200 | "32/32 [==============================] - 0s 4ms/step - loss: 151.8288 - mean_absolute_error: 151.8288\n", 201 | "Epoch 22/100\n", 202 | "32/32 [==============================] - 0s 4ms/step - loss: 308.0082 - mean_absolute_error: 308.0082\n", 203 | "Epoch 23/100\n", 204 | "32/32 [==============================] - 0s 4ms/step - loss: 260.7388 - mean_absolute_error: 260.7388\n", 205 | "Epoch 24/100\n", 206 | "32/32 [==============================] - 0s 4ms/step - loss: 198.2821 - mean_absolute_error: 198.2821\n", 207 | "Epoch 25/100\n", 208 | "32/32 [==============================] - 0s 4ms/step - loss: 228.0280 - mean_absolute_error: 228.0280\n", 209 | "Epoch 26/100\n", 210 | "32/32 [==============================] - 0s 4ms/step - loss: 137.4012 - mean_absolute_error: 137.4012\n", 211 | "Epoch 27/100\n", 212 | "32/32 [==============================] - 0s 4ms/step - loss: 257.5800 - mean_absolute_error: 257.5800\n", 213 | "Epoch 28/100\n", 214 | "32/32 [==============================] - 0s 4ms/step - loss: 414.0735 - mean_absolute_error: 414.0735\n", 215 | "Epoch 29/100\n", 216 | "32/32 [==============================] - 0s 4ms/step - loss: 193.3530 - mean_absolute_error: 193.3530\n", 217 | "Epoch 30/100\n", 218 | "32/32 [==============================] - 0s 4ms/step - loss: 215.4861 - mean_absolute_error: 215.4861\n", 219 | "Epoch 31/100\n", 220 | "32/32 [==============================] - 0s 3ms/step - loss: 180.7227 - mean_absolute_error: 180.7227\n", 221 | "Epoch 32/100\n", 222 | "32/32 [==============================] - 0s 4ms/step - loss: 274.8415 - mean_absolute_error: 274.8415\n", 223 | "Epoch 33/100\n", 224 | "32/32 [==============================] - 0s 4ms/step - loss: 222.8818 - mean_absolute_error: 222.8818\n", 225 | "Epoch 34/100\n", 226 | "32/32 [==============================] - 0s 4ms/step - loss: 224.6321 - mean_absolute_error: 224.6321\n", 227 | "Epoch 35/100\n", 228 | "32/32 [==============================] - 0s 4ms/step - loss: 294.0774 - mean_absolute_error: 294.0774\n", 229 | "Epoch 36/100\n", 230 | "32/32 [==============================] - 0s 4ms/step - loss: 233.9464 - mean_absolute_error: 233.9464\n", 231 | "Epoch 37/100\n", 232 | "32/32 [==============================] - 0s 4ms/step - loss: 259.1333 - mean_absolute_error: 259.1333\n", 233 | "Epoch 38/100\n", 234 | "32/32 [==============================] - 0s 4ms/step - loss: 196.5505 - mean_absolute_error: 196.5505\n", 235 | "Epoch 39/100\n", 236 | "32/32 [==============================] - 0s 4ms/step - loss: 140.2408 - mean_absolute_error: 140.2408\n", 237 | "Epoch 40/100\n", 238 | "32/32 [==============================] - 0s 4ms/step - loss: 126.6778 - mean_absolute_error: 126.6778\n", 239 | "Epoch 41/100\n", 240 | "32/32 [==============================] - 0s 4ms/step - loss: 171.8206 - mean_absolute_error: 171.8206\n", 241 | "Epoch 42/100\n", 242 | "32/32 [==============================] - 0s 4ms/step - loss: 154.8684 - mean_absolute_error: 154.8684\n", 243 | "Epoch 43/100\n", 244 | "32/32 [==============================] - 0s 4ms/step - loss: 183.5418 - mean_absolute_error: 183.5418\n", 245 | "Epoch 44/100\n", 246 | "32/32 [==============================] - 0s 4ms/step - loss: 170.9957 - mean_absolute_error: 170.9957\n", 247 | "Epoch 45/100\n", 248 | "32/32 [==============================] - 0s 4ms/step - loss: 250.7556 - mean_absolute_error: 250.7556\n", 249 | "Epoch 46/100\n", 250 | "32/32 [==============================] - 0s 4ms/step - loss: 130.4983 - mean_absolute_error: 130.4983\n", 251 | "Epoch 47/100\n", 252 | "32/32 [==============================] - 0s 4ms/step - loss: 192.2448 - mean_absolute_error: 192.2448\n", 253 | "Epoch 48/100\n", 254 | "32/32 [==============================] - 0s 4ms/step - loss: 127.2329 - mean_absolute_error: 127.2329\n", 255 | "Epoch 49/100\n", 256 | "32/32 [==============================] - 0s 4ms/step - loss: 167.2814 - mean_absolute_error: 167.2814\n", 257 | "Epoch 50/100\n", 258 | "32/32 [==============================] - 0s 4ms/step - loss: 251.3058 - mean_absolute_error: 251.3058\n", 259 | "Epoch 51/100\n", 260 | "32/32 [==============================] - 0s 4ms/step - loss: 172.5872 - mean_absolute_error: 172.5872\n", 261 | "Epoch 52/100\n", 262 | "32/32 [==============================] - 0s 4ms/step - loss: 140.7612 - mean_absolute_error: 140.7612\n", 263 | "Epoch 53/100\n", 264 | "32/32 [==============================] - 0s 4ms/step - loss: 250.3051 - mean_absolute_error: 250.3051\n", 265 | "Epoch 54/100\n", 266 | "32/32 [==============================] - 0s 4ms/step - loss: 103.1959 - mean_absolute_error: 103.1959\n", 267 | "Epoch 55/100\n", 268 | "32/32 [==============================] - 0s 4ms/step - loss: 148.3423 - mean_absolute_error: 148.3423\n", 269 | "Epoch 56/100\n", 270 | "32/32 [==============================] - 0s 4ms/step - loss: 130.6254 - mean_absolute_error: 130.6254\n", 271 | "Epoch 57/100\n", 272 | "32/32 [==============================] - 0s 4ms/step - loss: 74.8769 - mean_absolute_error: 74.8769\n", 273 | "Epoch 58/100\n", 274 | "32/32 [==============================] - 0s 4ms/step - loss: 156.9543 - mean_absolute_error: 156.9543\n", 275 | "Epoch 59/100\n", 276 | "32/32 [==============================] - 0s 4ms/step - loss: 143.6268 - mean_absolute_error: 143.6268\n", 277 | "Epoch 60/100\n", 278 | "32/32 [==============================] - 0s 4ms/step - loss: 130.9978 - mean_absolute_error: 130.9978\n", 279 | "Epoch 61/100\n", 280 | "32/32 [==============================] - 0s 4ms/step - loss: 202.3431 - mean_absolute_error: 202.3431\n", 281 | "Epoch 62/100\n", 282 | "32/32 [==============================] - 0s 4ms/step - loss: 173.4388 - mean_absolute_error: 173.4388\n", 283 | "Epoch 63/100\n", 284 | "32/32 [==============================] - 0s 4ms/step - loss: 90.0105 - mean_absolute_error: 90.0105\n", 285 | "Epoch 64/100\n", 286 | "32/32 [==============================] - 0s 4ms/step - loss: 110.1699 - mean_absolute_error: 110.1699\n", 287 | "Epoch 65/100\n", 288 | "32/32 [==============================] - 0s 4ms/step - loss: 166.2449 - mean_absolute_error: 166.2449\n", 289 | "Epoch 66/100\n", 290 | "32/32 [==============================] - 0s 4ms/step - loss: 103.2946 - mean_absolute_error: 103.2946\n", 291 | "Epoch 67/100\n", 292 | "32/32 [==============================] - 0s 4ms/step - loss: 185.7347 - mean_absolute_error: 185.7347\n", 293 | "Epoch 68/100\n", 294 | "32/32 [==============================] - 0s 4ms/step - loss: 114.4777 - mean_absolute_error: 114.4777\n", 295 | "Epoch 69/100\n", 296 | "32/32 [==============================] - 0s 4ms/step - loss: 143.8170 - mean_absolute_error: 143.8170\n", 297 | "Epoch 70/100\n", 298 | "32/32 [==============================] - 0s 4ms/step - loss: 240.2648 - mean_absolute_error: 240.2648\n", 299 | "Epoch 71/100\n", 300 | "32/32 [==============================] - 0s 4ms/step - loss: 221.5770 - mean_absolute_error: 221.5770\n", 301 | "Epoch 72/100\n", 302 | "32/32 [==============================] - 0s 4ms/step - loss: 98.1613 - mean_absolute_error: 98.1613\n", 303 | "Epoch 73/100\n", 304 | "32/32 [==============================] - 0s 4ms/step - loss: 144.0810 - mean_absolute_error: 144.0810\n", 305 | "Epoch 74/100\n", 306 | "32/32 [==============================] - 0s 4ms/step - loss: 148.9324 - mean_absolute_error: 148.9324\n", 307 | "Epoch 75/100\n", 308 | "32/32 [==============================] - 0s 4ms/step - loss: 124.8470 - mean_absolute_error: 124.8470\n", 309 | "Epoch 76/100\n", 310 | "32/32 [==============================] - 0s 4ms/step - loss: 242.0774 - mean_absolute_error: 242.0774\n", 311 | "Epoch 77/100\n", 312 | "32/32 [==============================] - 0s 4ms/step - loss: 121.6172 - mean_absolute_error: 121.6172\n", 313 | "Epoch 78/100\n", 314 | "32/32 [==============================] - 0s 4ms/step - loss: 101.0091 - mean_absolute_error: 101.0091\n", 315 | "Epoch 79/100\n", 316 | "32/32 [==============================] - 0s 4ms/step - loss: 187.4833 - mean_absolute_error: 187.4833\n", 317 | "Epoch 80/100\n", 318 | "32/32 [==============================] - 0s 4ms/step - loss: 184.7905 - mean_absolute_error: 184.7905\n", 319 | "Epoch 81/100\n", 320 | "32/32 [==============================] - 0s 4ms/step - loss: 161.8695 - mean_absolute_error: 161.8695\n", 321 | "Epoch 82/100\n", 322 | "32/32 [==============================] - 0s 4ms/step - loss: 274.4011 - mean_absolute_error: 274.4011\n", 323 | "Epoch 83/100\n", 324 | "32/32 [==============================] - 0s 4ms/step - loss: 90.2962 - mean_absolute_error: 90.2962\n", 325 | "Epoch 84/100\n", 326 | "32/32 [==============================] - 0s 3ms/step - loss: 161.3433 - mean_absolute_error: 161.3433\n", 327 | "Epoch 85/100\n", 328 | "32/32 [==============================] - 0s 4ms/step - loss: 239.6779 - mean_absolute_error: 239.6779\n", 329 | "Epoch 86/100\n", 330 | "32/32 [==============================] - 0s 4ms/step - loss: 124.4336 - mean_absolute_error: 124.4336\n", 331 | "Epoch 87/100\n", 332 | "32/32 [==============================] - 0s 4ms/step - loss: 122.3127 - mean_absolute_error: 122.3127\n", 333 | "Epoch 88/100\n", 334 | "32/32 [==============================] - 0s 4ms/step - loss: 128.0379 - mean_absolute_error: 128.0379\n", 335 | "Epoch 89/100\n", 336 | "32/32 [==============================] - 0s 4ms/step - loss: 93.0046 - mean_absolute_error: 93.0046\n", 337 | "Epoch 90/100\n", 338 | "32/32 [==============================] - 0s 4ms/step - loss: 77.4748 - mean_absolute_error: 77.4748\n", 339 | "Epoch 91/100\n", 340 | "32/32 [==============================] - 0s 4ms/step - loss: 167.8816 - mean_absolute_error: 167.8816\n", 341 | "Epoch 92/100\n", 342 | "32/32 [==============================] - 0s 4ms/step - loss: 112.9497 - mean_absolute_error: 112.9497\n", 343 | "Epoch 93/100\n", 344 | "32/32 [==============================] - 0s 4ms/step - loss: 73.7069 - mean_absolute_error: 73.7069\n", 345 | "Epoch 94/100\n", 346 | "32/32 [==============================] - 0s 4ms/step - loss: 122.9201 - mean_absolute_error: 122.9201\n", 347 | "Epoch 95/100\n", 348 | "32/32 [==============================] - 0s 4ms/step - loss: 170.7848 - mean_absolute_error: 170.7848\n", 349 | "Epoch 96/100\n", 350 | "32/32 [==============================] - 0s 4ms/step - loss: 165.6123 - mean_absolute_error: 165.6123\n", 351 | "Epoch 97/100\n", 352 | "32/32 [==============================] - 0s 4ms/step - loss: 102.5798 - mean_absolute_error: 102.5798\n", 353 | "Epoch 98/100\n", 354 | "32/32 [==============================] - 0s 4ms/step - loss: 85.1246 - mean_absolute_error: 85.1246\n", 355 | "Epoch 99/100\n", 356 | "32/32 [==============================] - 0s 4ms/step - loss: 195.0565 - mean_absolute_error: 195.0565\n", 357 | "Epoch 100/100\n", 358 | "32/32 [==============================] - 0s 4ms/step - loss: 131.4377 - mean_absolute_error: 131.4377\n" 359 | ] 360 | }, 361 | { 362 | "data": { 363 | "text/plain": [ 364 | "" 365 | ] 366 | }, 367 | "execution_count": 16, 368 | "metadata": {}, 369 | "output_type": "execute_result" 370 | } 371 | ], 372 | "source": [ 373 | "model = tf.keras.Sequential([\n", 374 | " tf.keras.layers.Dense(64, activation='relu'),\n", 375 | " tf.keras.layers.Dense(64, activation='relu'),\n", 376 | " tf.keras.layers.Dense(1)\n", 377 | "])\n", 378 | "\n", 379 | "model.compile(\n", 380 | " loss=tf.keras.losses.mean_absolute_error,\n", 381 | " optimizer=tf.keras.optimizers.Adam(learning_rate=0.1),\n", 382 | " metrics=['mean_absolute_error']\n", 383 | ")\n", 384 | "\n", 385 | "model.fit(X, y, epochs=100)" 386 | ] 387 | }, 388 | { 389 | "cell_type": "markdown", 390 | "id": "c5b54992-1059-4c68-889b-0bad1a79dc5d", 391 | "metadata": {}, 392 | "source": [ 393 | "
\n", 394 | "\n", 395 | "## Evaluation" 396 | ] 397 | }, 398 | { 399 | "cell_type": "code", 400 | "execution_count": 18, 401 | "id": "33bcd457-9d52-42b2-a3cf-0f024d53d0e5", 402 | "metadata": {}, 403 | "outputs": [ 404 | { 405 | "data": { 406 | "text/plain": [ 407 | "array([[ 94.41813],\n", 408 | " [412.36084],\n", 409 | " [911.89355]], dtype=float32)" 410 | ] 411 | }, 412 | "execution_count": 18, 413 | "metadata": {}, 414 | "output_type": "execute_result" 415 | } 416 | ], 417 | "source": [ 418 | "model.predict([10, 20, 30])" 419 | ] 420 | } 421 | ], 422 | "metadata": { 423 | "kernelspec": { 424 | "display_name": "Python 3 (ipykernel)", 425 | "language": "python", 426 | "name": "python3" 427 | }, 428 | "language_info": { 429 | "codemirror_mode": { 430 | "name": "ipython", 431 | "version": 3 432 | }, 433 | "file_extension": ".py", 434 | "mimetype": "text/x-python", 435 | "name": "python", 436 | "nbconvert_exporter": "python", 437 | "pygments_lexer": "ipython3", 438 | "version": "3.9.4" 439 | } 440 | }, 441 | "nbformat": 4, 442 | "nbformat_minor": 5 443 | } 444 | -------------------------------------------------------------------------------- /015_CNN_008_Transfer_Learning.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "862b4503-f95d-489c-9bf2-2ba9a23e6333", 6 | "metadata": {}, 7 | "source": [ 8 | "# CNN 8 - Transfer Learning\n", 9 | "- Dataset:\n", 10 | " - https://www.kaggle.com/shaunthesheep/microsoft-catsvsdogs-dataset\n", 11 | "- The dataset isn't deep-learning-compatible by default, here's how to preprocess it:\n", 12 | " - Video: https://www.youtube.com/watch?v=O7EV2BjOXus&ab_channel=BetterDataScience\n", 13 | " - Article: https://betterdatascience.com/top-3-prerequisites-for-deep-learning-projects\n", 14 | " - Code: https://github.com/better-data-science/TensorFlow/blob/main/008_CNN_001_Working_With_Image_Data.ipynb\n", 15 | " \n", 16 | "**What you should know by now:**\n", 17 | "- How to preprocess image data\n", 18 | "- How to load image data from a directory\n", 19 | "- What's a convolution, pooling, and a fully-connected layer\n", 20 | "- Categorical vs. binary classification\n", 21 | "- What is data augmentation and why is it useful\n", 22 | "\n", 23 | "**Let's start**\n", 24 | "- We'll import the libraries first:" 25 | ] 26 | }, 27 | { 28 | "cell_type": "code", 29 | "execution_count": 1, 30 | "id": "9a4d1c6a-724a-4558-8fa6-2450a49ebaf9", 31 | "metadata": {}, 32 | "outputs": [], 33 | "source": [ 34 | "import os\n", 35 | "os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' \n", 36 | "\n", 37 | "import warnings\n", 38 | "warnings.filterwarnings('ignore')\n", 39 | "\n", 40 | "import numpy as np\n", 41 | "import tensorflow as tf" 42 | ] 43 | }, 44 | { 45 | "cell_type": "markdown", 46 | "id": "7c57301c-e591-4c61-beac-a99cf52b680c", 47 | "metadata": {}, 48 | "source": [ 49 | "- We'll have to load training and validation data from different directories throughout the notebook\n", 50 | "- The best practice is to declare a function for that\n", 51 | "- The function will also apply data augmentation to the training dataset:" 52 | ] 53 | }, 54 | { 55 | "cell_type": "code", 56 | "execution_count": 2, 57 | "id": "30b3ebed-d201-4794-bbd4-c331e53aa5b5", 58 | "metadata": {}, 59 | "outputs": [], 60 | "source": [ 61 | "def init_data(train_dir: str, valid_dir: str) -> tuple:\n", 62 | " train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(\n", 63 | " rescale=1/255.0,\n", 64 | " rotation_range=20,\n", 65 | " width_shift_range=0.2,\n", 66 | " height_shift_range=0.2,\n", 67 | " shear_range=0.2,\n", 68 | " zoom_range=0.2,\n", 69 | " horizontal_flip=True,\n", 70 | " fill_mode='nearest'\n", 71 | " )\n", 72 | " valid_datagen = tf.keras.preprocessing.image.ImageDataGenerator(\n", 73 | " rescale=1/255.0\n", 74 | " )\n", 75 | " \n", 76 | " train_data = train_datagen.flow_from_directory(\n", 77 | " directory=train_dir,\n", 78 | " target_size=(224, 224),\n", 79 | " class_mode='categorical',\n", 80 | " batch_size=64,\n", 81 | " seed=42\n", 82 | " )\n", 83 | " valid_data = valid_datagen.flow_from_directory(\n", 84 | " directory=valid_dir,\n", 85 | " target_size=(224, 224),\n", 86 | " class_mode='categorical',\n", 87 | " batch_size=64,\n", 88 | " seed=42\n", 89 | " )\n", 90 | " \n", 91 | " return train_data, valid_data" 92 | ] 93 | }, 94 | { 95 | "cell_type": "markdown", 96 | "id": "c8726f17-16f6-4994-8f92-7d2ddc8383d5", 97 | "metadata": {}, 98 | "source": [ 99 | "- Let's now load our dogs and cats dataset:" 100 | ] 101 | }, 102 | { 103 | "cell_type": "code", 104 | "execution_count": 3, 105 | "id": "b4e8f623-bb24-4cba-914e-5b4e2b053a18", 106 | "metadata": {}, 107 | "outputs": [ 108 | { 109 | "name": "stdout", 110 | "output_type": "stream", 111 | "text": [ 112 | "Found 20030 images belonging to 2 classes.\n", 113 | "Found 2488 images belonging to 2 classes.\n" 114 | ] 115 | } 116 | ], 117 | "source": [ 118 | "train_data, valid_data = init_data(\n", 119 | " train_dir='data/train/', \n", 120 | " valid_dir='data/validation/'\n", 121 | ")" 122 | ] 123 | }, 124 | { 125 | "cell_type": "markdown", 126 | "id": "6c93f33b-d859-4c53-95cf-10d775b23b22", 127 | "metadata": {}, 128 | "source": [ 129 | "
\n", 130 | "\n", 131 | "## Transfer Learning in TensorFlow\n", 132 | "- With transfer learning, we're basically loading a huge pretrained model without the top clasification layer\n", 133 | "- That way, we can freeze the learned weights and only add the output layer to match our case\n", 134 | "- For example, most pretrained models were trained on ImageNet dataset which has 1000 classes\n", 135 | " - We only have two classes (cat and dog), so we'll need to specify that\n", 136 | "- We'll also add a couple of additional layers to prevent overfitting:" 137 | ] 138 | }, 139 | { 140 | "cell_type": "code", 141 | "execution_count": 4, 142 | "id": "debc1ec9-769b-4880-9341-e01c4ae6a7e3", 143 | "metadata": {}, 144 | "outputs": [], 145 | "source": [ 146 | "def build_transfer_learning_model(base_model):\n", 147 | " # `base_model` stands for the pretrained model\n", 148 | " # We want to use the learned weights, and to do so we must freeze them\n", 149 | " for layer in base_model.layers:\n", 150 | " layer.trainable = False\n", 151 | " \n", 152 | " # Declare a sequential model that combines the base model with custom layers\n", 153 | " model = tf.keras.Sequential([\n", 154 | " base_model,\n", 155 | " tf.keras.layers.GlobalAveragePooling2D(),\n", 156 | " tf.keras.layers.BatchNormalization(),\n", 157 | " tf.keras.layers.Dropout(rate=0.2),\n", 158 | " tf.keras.layers.Dense(units=2, activation='softmax')\n", 159 | " ])\n", 160 | " \n", 161 | " # Compile the model\n", 162 | " model.compile(\n", 163 | " loss='categorical_crossentropy',\n", 164 | " optimizer=tf.keras.optimizers.Adam(),\n", 165 | " metrics=['accuracy']\n", 166 | " )\n", 167 | " \n", 168 | " return model" 169 | ] 170 | }, 171 | { 172 | "cell_type": "code", 173 | "execution_count": 5, 174 | "id": "d17b0e46-fd0e-4fbf-b673-4462028777d1", 175 | "metadata": {}, 176 | "outputs": [ 177 | { 178 | "name": "stdout", 179 | "output_type": "stream", 180 | "text": [ 181 | "Metal device set to: Apple M1 Pro\n", 182 | "Epoch 1/10\n", 183 | "313/313 [==============================] - 160s 510ms/step - loss: 0.3786 - accuracy: 0.8258 - val_loss: 0.3144 - val_accuracy: 0.8943\n", 184 | "Epoch 2/10\n", 185 | "313/313 [==============================] - 160s 510ms/step - loss: 0.2897 - accuracy: 0.8712 - val_loss: 0.1988 - val_accuracy: 0.9224\n", 186 | "Epoch 3/10\n", 187 | "313/313 [==============================] - 160s 510ms/step - loss: 0.2751 - accuracy: 0.8800 - val_loss: 0.1944 - val_accuracy: 0.9216\n", 188 | "Epoch 4/10\n", 189 | "313/313 [==============================] - 160s 510ms/step - loss: 0.2717 - accuracy: 0.8812 - val_loss: 0.1820 - val_accuracy: 0.9264\n", 190 | "Epoch 5/10\n", 191 | "313/313 [==============================] - 160s 511ms/step - loss: 0.2699 - accuracy: 0.8829 - val_loss: 0.1809 - val_accuracy: 0.9268\n", 192 | "Epoch 6/10\n", 193 | "313/313 [==============================] - 160s 511ms/step - loss: 0.2709 - accuracy: 0.8822 - val_loss: 0.1792 - val_accuracy: 0.9297\n", 194 | "Epoch 7/10\n", 195 | "313/313 [==============================] - 160s 511ms/step - loss: 0.2668 - accuracy: 0.8852 - val_loss: 0.1763 - val_accuracy: 0.9236\n", 196 | "Epoch 8/10\n", 197 | "313/313 [==============================] - 162s 516ms/step - loss: 0.2688 - accuracy: 0.8817 - val_loss: 0.1889 - val_accuracy: 0.9212\n", 198 | "Epoch 9/10\n", 199 | "313/313 [==============================] - 160s 511ms/step - loss: 0.2667 - accuracy: 0.8857 - val_loss: 0.1760 - val_accuracy: 0.9264\n", 200 | "Epoch 10/10\n", 201 | "313/313 [==============================] - 160s 511ms/step - loss: 0.2685 - accuracy: 0.8836 - val_loss: 0.1802 - val_accuracy: 0.9281\n" 202 | ] 203 | } 204 | ], 205 | "source": [ 206 | "# Let's use a simple and well-known architecture - VGG16\n", 207 | "from tensorflow.keras.applications.vgg16 import VGG16\n", 208 | "\n", 209 | "# We'll specify it as a base model\n", 210 | "# `include_top=False` means we don't want the top classification layer\n", 211 | "# Specify the `input_shape` to match our image size\n", 212 | "# Specify the `weights` accordingly\n", 213 | "vgg_model = build_transfer_learning_model(\n", 214 | " base_model=VGG16(include_top=False, input_shape=(224, 224, 3), weights='imagenet')\n", 215 | ")\n", 216 | "\n", 217 | "# Train the model for 10 epochs\n", 218 | "vgg_hist = vgg_model.fit(\n", 219 | " train_data,\n", 220 | " validation_data=valid_data,\n", 221 | " epochs=10\n", 222 | ")" 223 | ] 224 | }, 225 | { 226 | "cell_type": "markdown", 227 | "id": "b9c82bc5-5df0-4fc4-8781-599b462789ff", 228 | "metadata": {}, 229 | "source": [ 230 | "- We got amazing accuracy right from the start!\n", 231 | "- We couldn't surpass 77% accuracy on the validation set with the custom architecture, and we're at 93% with the VGG16 model\n", 232 | "- The beauty of transfer learning isn't only that it yields a highly accurate models - you can also train models with less data, as the model doesn't have to learn as much\n", 233 | "\n", 234 | "
\n", 235 | "\n", 236 | "## Transfer Learning on a 20 times smaller subset\n", 237 | "- We want to see if reducing the dataset size negatively effects the predictive power\n", 238 | "- To do so, we'll create a new directory structure for training and validation images:" 239 | ] 240 | }, 241 | { 242 | "cell_type": "code", 243 | "execution_count": 6, 244 | "id": "dcca0baf-a035-49a8-b554-373c6b74ece2", 245 | "metadata": {}, 246 | "outputs": [], 247 | "source": [ 248 | "import random\n", 249 | "import pathlib\n", 250 | "import shutil\n", 251 | "\n", 252 | "random.seed(42)\n", 253 | "\n", 254 | "\n", 255 | "dir_data = pathlib.Path.cwd().joinpath('data_small')\n", 256 | "dir_train = dir_data.joinpath('train')\n", 257 | "dir_valid = dir_data.joinpath('validation')\n", 258 | "\n", 259 | "if not dir_data.exists(): dir_data.mkdir()\n", 260 | "if not dir_train.exists(): dir_train.mkdir()\n", 261 | "if not dir_valid.exists(): dir_valid.mkdir()\n", 262 | "\n", 263 | "for cls in ['cat', 'dog']:\n", 264 | " if not dir_train.joinpath(cls).exists(): dir_train.joinpath(cls).mkdir()\n", 265 | " if not dir_valid.joinpath(cls).exists(): dir_valid.joinpath(cls).mkdir()" 266 | ] 267 | }, 268 | { 269 | "cell_type": "markdown", 270 | "id": "5d98c155-5ca3-41ae-a807-98b809214d4f", 271 | "metadata": {}, 272 | "source": [ 273 | "- Here's the directory structure printed:" 274 | ] 275 | }, 276 | { 277 | "cell_type": "code", 278 | "execution_count": 9, 279 | "id": "c59561b3-7ffd-4ef3-b46b-d45616de32ca", 280 | "metadata": {}, 281 | "outputs": [ 282 | { 283 | "name": "stdout", 284 | "output_type": "stream", 285 | "text": [ 286 | " |-train\n", 287 | " |---cat\n", 288 | " |---dog\n", 289 | " |-validation\n", 290 | " |---cat\n", 291 | " |---dog\n" 292 | ] 293 | } 294 | ], 295 | "source": [ 296 | "!ls -R data_small | grep \":$\" | sed -e 's/:$//' -e 's/[^-][^\\/]*\\//--/g' -e 's/^/ /' -e 's/-/|/'" 297 | ] 298 | }, 299 | { 300 | "cell_type": "markdown", 301 | "id": "fdb8b502-b28a-4a45-86d4-643bdd619790", 302 | "metadata": {}, 303 | "source": [ 304 | "- Now, we'll copy only a sample of images to the new folders\n", 305 | "- We'll declare a `copy_sample()` function whcih takes `n` images from the `src_folder` and copies them to the `tgt_folder`\n", 306 | "- We'll keep `n` to 500 by default, which is a pretty small number:" 307 | ] 308 | }, 309 | { 310 | "cell_type": "code", 311 | "execution_count": 10, 312 | "id": "7273b861-43e1-4f12-8c96-1ac2582e5a5b", 313 | "metadata": {}, 314 | "outputs": [], 315 | "source": [ 316 | "def copy_sample(src_folder: pathlib.PosixPath, tgt_folder: pathlib.PosixPath, n: int = 500):\n", 317 | " imgs = random.sample(list(src_folder.iterdir()), n)\n", 318 | "\n", 319 | " for img in imgs:\n", 320 | " img_name = str(img).split('/')[-1]\n", 321 | " \n", 322 | " shutil.copy(\n", 323 | " src=img,\n", 324 | " dst=f'{tgt_folder}/{img_name}'\n", 325 | " )" 326 | ] 327 | }, 328 | { 329 | "cell_type": "markdown", 330 | "id": "1ebf15c4-35a2-4977-bfd6-1b21a13a2c89", 331 | "metadata": {}, 332 | "source": [ 333 | "- Let's now copy the training and validation images\n", 334 | "- For the validation set, we'll copy only 100 images per class" 335 | ] 336 | }, 337 | { 338 | "cell_type": "code", 339 | "execution_count": 11, 340 | "id": "289fd65c-d52a-4bdb-afea-ec6c66614082", 341 | "metadata": {}, 342 | "outputs": [], 343 | "source": [ 344 | "# Train - cat\n", 345 | "copy_sample(\n", 346 | " src_folder=pathlib.Path.cwd().joinpath('data/train/cat/'), \n", 347 | " tgt_folder=pathlib.Path.cwd().joinpath('data_small/train/cat/'), \n", 348 | ")\n", 349 | "\n", 350 | "# Train - dog\n", 351 | "copy_sample(\n", 352 | " src_folder=pathlib.Path.cwd().joinpath('data/train/dog/'), \n", 353 | " tgt_folder=pathlib.Path.cwd().joinpath('data_small/train/dog/'), \n", 354 | ")\n", 355 | "\n", 356 | "# Valid - cat\n", 357 | "copy_sample(\n", 358 | " src_folder=pathlib.Path.cwd().joinpath('data/validation/cat/'), \n", 359 | " tgt_folder=pathlib.Path.cwd().joinpath('data_small/validation/cat/'),\n", 360 | " n=100\n", 361 | ")\n", 362 | "\n", 363 | "# Valid - dog\n", 364 | "copy_sample(\n", 365 | " src_folder=pathlib.Path.cwd().joinpath('data/validation/dog/'), \n", 366 | " tgt_folder=pathlib.Path.cwd().joinpath('data_small/validation/dog/'),\n", 367 | " n=100\n", 368 | ")" 369 | ] 370 | }, 371 | { 372 | "cell_type": "markdown", 373 | "id": "e569e212-5ded-4e58-9a00-b2b7587844a2", 374 | "metadata": {}, 375 | "source": [ 376 | "- Let's count the number of files in each folder to verify the images were copied successfully:" 377 | ] 378 | }, 379 | { 380 | "cell_type": "code", 381 | "execution_count": 12, 382 | "id": "9cb7ae6a-f27e-4d7d-afd7-d48393b5277b", 383 | "metadata": {}, 384 | "outputs": [ 385 | { 386 | "name": "stdout", 387 | "output_type": "stream", 388 | "text": [ 389 | " 500\n" 390 | ] 391 | } 392 | ], 393 | "source": [ 394 | "!ls data_small/train/cat/ | wc -l" 395 | ] 396 | }, 397 | { 398 | "cell_type": "code", 399 | "execution_count": 13, 400 | "id": "4a4d1ce4-b863-4584-a379-0176f9be159f", 401 | "metadata": {}, 402 | "outputs": [ 403 | { 404 | "name": "stdout", 405 | "output_type": "stream", 406 | "text": [ 407 | " 100\n" 408 | ] 409 | } 410 | ], 411 | "source": [ 412 | "!ls data_small/validation/cat/ | wc -l" 413 | ] 414 | }, 415 | { 416 | "cell_type": "code", 417 | "execution_count": 14, 418 | "id": "f506056c-8147-43cf-9a3c-9eb106cc3026", 419 | "metadata": {}, 420 | "outputs": [ 421 | { 422 | "name": "stdout", 423 | "output_type": "stream", 424 | "text": [ 425 | " 500\n" 426 | ] 427 | } 428 | ], 429 | "source": [ 430 | "!ls data_small/train/dog/ | wc -l" 431 | ] 432 | }, 433 | { 434 | "cell_type": "code", 435 | "execution_count": 15, 436 | "id": "8ac6b096-c29d-4b98-8fa6-9e923f3da51b", 437 | "metadata": {}, 438 | "outputs": [ 439 | { 440 | "name": "stdout", 441 | "output_type": "stream", 442 | "text": [ 443 | " 100\n" 444 | ] 445 | } 446 | ], 447 | "source": [ 448 | "!ls data_small/validation/dog/ | wc -l" 449 | ] 450 | }, 451 | { 452 | "cell_type": "markdown", 453 | "id": "b2fbad0b-12b6-4dfb-84e4-1ccde96fb03b", 454 | "metadata": {}, 455 | "source": [ 456 | "- Now use `init_data()` to load in the images again:" 457 | ] 458 | }, 459 | { 460 | "cell_type": "code", 461 | "execution_count": 6, 462 | "id": "3ac0ff9f-a164-4159-b7ce-96fc240d230d", 463 | "metadata": {}, 464 | "outputs": [ 465 | { 466 | "name": "stdout", 467 | "output_type": "stream", 468 | "text": [ 469 | "Found 1000 images belonging to 2 classes.\n", 470 | "Found 200 images belonging to 2 classes.\n" 471 | ] 472 | } 473 | ], 474 | "source": [ 475 | "train_data, valid_data = init_data(\n", 476 | " train_dir='data_small/train/', \n", 477 | " valid_dir='data_small/validation/'\n", 478 | ")" 479 | ] 480 | }, 481 | { 482 | "cell_type": "markdown", 483 | "id": "401b7470-3697-4080-93e7-7d6e69344699", 484 | "metadata": {}, 485 | "source": [ 486 | "- There's total of 1000 training images\n", 487 | "- It will be interesting to see if we can get a decent model out of a dataset this small\n", 488 | "- Model architecture is the same, but we'll train for more epochs just because the dataset is smaller\n", 489 | " - Also, we can afford to train for longer since the training time per epoch is reduced:" 490 | ] 491 | }, 492 | { 493 | "cell_type": "code", 494 | "execution_count": 8, 495 | "id": "728db929-7ed2-4db3-a274-2c4f1334152e", 496 | "metadata": {}, 497 | "outputs": [ 498 | { 499 | "name": "stdout", 500 | "output_type": "stream", 501 | "text": [ 502 | "Epoch 1/20\n", 503 | "16/16 [==============================] - 9s 572ms/step - loss: 0.8472 - accuracy: 0.5740 - val_loss: 0.7049 - val_accuracy: 0.5100\n", 504 | "Epoch 2/20\n", 505 | "16/16 [==============================] - 9s 551ms/step - loss: 0.6389 - accuracy: 0.6840 - val_loss: 0.6876 - val_accuracy: 0.5150\n", 506 | "Epoch 3/20\n", 507 | "16/16 [==============================] - 9s 551ms/step - loss: 0.4936 - accuracy: 0.7800 - val_loss: 0.6461 - val_accuracy: 0.5300\n", 508 | "Epoch 4/20\n", 509 | "16/16 [==============================] - 9s 552ms/step - loss: 0.4318 - accuracy: 0.8020 - val_loss: 0.6082 - val_accuracy: 0.5850\n", 510 | "Epoch 5/20\n", 511 | "16/16 [==============================] - 9s 552ms/step - loss: 0.3935 - accuracy: 0.8270 - val_loss: 0.5831 - val_accuracy: 0.6450\n", 512 | "Epoch 6/20\n", 513 | "16/16 [==============================] - 9s 551ms/step - loss: 0.3945 - accuracy: 0.8100 - val_loss: 0.5638 - val_accuracy: 0.7000\n", 514 | "Epoch 7/20\n", 515 | "16/16 [==============================] - 9s 545ms/step - loss: 0.3444 - accuracy: 0.8300 - val_loss: 0.5374 - val_accuracy: 0.7350\n", 516 | "Epoch 8/20\n", 517 | "16/16 [==============================] - 9s 553ms/step - loss: 0.3490 - accuracy: 0.8510 - val_loss: 0.5064 - val_accuracy: 0.8100\n", 518 | "Epoch 9/20\n", 519 | "16/16 [==============================] - 9s 552ms/step - loss: 0.3523 - accuracy: 0.8330 - val_loss: 0.4810 - val_accuracy: 0.8500\n", 520 | "Epoch 10/20\n", 521 | "16/16 [==============================] - 9s 553ms/step - loss: 0.3317 - accuracy: 0.8610 - val_loss: 0.4618 - val_accuracy: 0.8650\n", 522 | "Epoch 11/20\n", 523 | "16/16 [==============================] - 9s 552ms/step - loss: 0.3084 - accuracy: 0.8740 - val_loss: 0.4410 - val_accuracy: 0.8800\n", 524 | "Epoch 12/20\n", 525 | "16/16 [==============================] - 9s 551ms/step - loss: 0.2890 - accuracy: 0.8740 - val_loss: 0.4182 - val_accuracy: 0.8850\n", 526 | "Epoch 13/20\n", 527 | "16/16 [==============================] - 9s 552ms/step - loss: 0.2823 - accuracy: 0.8780 - val_loss: 0.3945 - val_accuracy: 0.9200\n", 528 | "Epoch 14/20\n", 529 | "16/16 [==============================] - 9s 552ms/step - loss: 0.3029 - accuracy: 0.8610 - val_loss: 0.3769 - val_accuracy: 0.9100\n", 530 | "Epoch 15/20\n", 531 | "16/16 [==============================] - 9s 552ms/step - loss: 0.2998 - accuracy: 0.8590 - val_loss: 0.3614 - val_accuracy: 0.9150\n", 532 | "Epoch 16/20\n", 533 | "16/16 [==============================] - 9s 552ms/step - loss: 0.2905 - accuracy: 0.8790 - val_loss: 0.3403 - val_accuracy: 0.9300\n", 534 | "Epoch 17/20\n", 535 | "16/16 [==============================] - 9s 555ms/step - loss: 0.2736 - accuracy: 0.8740 - val_loss: 0.3255 - val_accuracy: 0.9400\n", 536 | "Epoch 18/20\n", 537 | "16/16 [==============================] - 9s 553ms/step - loss: 0.2956 - accuracy: 0.8780 - val_loss: 0.3126 - val_accuracy: 0.9200\n", 538 | "Epoch 19/20\n", 539 | "16/16 [==============================] - 9s 563ms/step - loss: 0.2556 - accuracy: 0.8920 - val_loss: 0.2992 - val_accuracy: 0.9150\n", 540 | "Epoch 20/20\n", 541 | "16/16 [==============================] - 9s 561ms/step - loss: 0.2718 - accuracy: 0.8820 - val_loss: 0.2887 - val_accuracy: 0.9150\n" 542 | ] 543 | } 544 | ], 545 | "source": [ 546 | "vgg_model = build_transfer_learning_model(\n", 547 | " base_model=VGG16(include_top=False, input_shape=(224, 224, 3), weights='imagenet')\n", 548 | ")\n", 549 | "\n", 550 | "vgg_hist = vgg_model.fit(\n", 551 | " train_data,\n", 552 | " validation_data=valid_data,\n", 553 | " epochs=20\n", 554 | ")" 555 | ] 556 | }, 557 | { 558 | "cell_type": "markdown", 559 | "id": "06ee2c27-bbbe-4941-84fd-1efe5a1e1afc", 560 | "metadata": {}, 561 | "source": [ 562 | "- It looks like we got roughly the same validation accuracy as with the model trained on 25K images, which is amazing!\n", 563 | "\n", 564 | "**Homework:**\n", 565 | "- Use both models to predict the entire test set directory\n", 566 | "- How do the accuracies compare?" 567 | ] 568 | } 569 | ], 570 | "metadata": { 571 | "kernelspec": { 572 | "display_name": "Python 3 (ipykernel)", 573 | "language": "python", 574 | "name": "python3" 575 | }, 576 | "language_info": { 577 | "codemirror_mode": { 578 | "name": "ipython", 579 | "version": 3 580 | }, 581 | "file_extension": ".py", 582 | "mimetype": "text/x-python", 583 | "name": "python", 584 | "nbconvert_exporter": "python", 585 | "pygments_lexer": "ipython3", 586 | "version": "3.9.9" 587 | } 588 | }, 589 | "nbformat": 4, 590 | "nbformat_minor": 5 591 | } 592 | -------------------------------------------------------------------------------- /006_Callbacks.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "bf010558-7354-4f8d-b33a-b5d2ff761628", 6 | "metadata": {}, 7 | "source": [ 8 | "# Dataset import and exploration\n", 9 | "- https://www.kaggle.com/shelvigarg/wine-quality-dataset\n", 10 | "- Refer to https://github.com/better-data-science/TensorFlow/blob/main/003_TensorFlow_Classification.ipynb for detailed preparation instructions" 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": 1, 16 | "id": "ca51a1b1-bba1-4832-b177-e0be1f3aee9e", 17 | "metadata": {}, 18 | "outputs": [ 19 | { 20 | "data": { 21 | "text/html": [ 22 | "
\n", 23 | "\n", 36 | "\n", 37 | " \n", 38 | " \n", 39 | " \n", 40 | " \n", 41 | " \n", 42 | " \n", 43 | " \n", 44 | " \n", 45 | " \n", 46 | " \n", 47 | " \n", 48 | " \n", 49 | " \n", 50 | " \n", 51 | " \n", 52 | " \n", 53 | " \n", 54 | " \n", 55 | " \n", 56 | " \n", 57 | " \n", 58 | " \n", 59 | " \n", 60 | " \n", 61 | " \n", 62 | " \n", 63 | " \n", 64 | " \n", 65 | " \n", 66 | " \n", 67 | " \n", 68 | " \n", 69 | " \n", 70 | " \n", 71 | " \n", 72 | " \n", 73 | " \n", 74 | " \n", 75 | " \n", 76 | " \n", 77 | " \n", 78 | " \n", 79 | " \n", 80 | " \n", 81 | " \n", 82 | " \n", 83 | " \n", 84 | " \n", 85 | " \n", 86 | " \n", 87 | " \n", 88 | " \n", 89 | " \n", 90 | " \n", 91 | " \n", 92 | " \n", 93 | " \n", 94 | " \n", 95 | " \n", 96 | " \n", 97 | " \n", 98 | " \n", 99 | " \n", 100 | " \n", 101 | " \n", 102 | " \n", 103 | " \n", 104 | " \n", 105 | " \n", 106 | " \n", 107 | " \n", 108 | " \n", 109 | " \n", 110 | " \n", 111 | " \n", 112 | " \n", 113 | " \n", 114 | " \n", 115 | " \n", 116 | " \n", 117 | " \n", 118 | " \n", 119 | " \n", 120 | " \n", 121 | " \n", 122 | " \n", 123 | " \n", 124 | " \n", 125 | " \n", 126 | " \n", 127 | " \n", 128 | " \n", 129 | " \n", 130 | " \n", 131 | " \n", 132 | " \n", 133 | " \n", 134 | " \n", 135 | " \n", 136 | " \n", 137 | "
typefixed acidityvolatile aciditycitric acidresidual sugarchloridesfree sulfur dioxidetotal sulfur dioxidedensitypHsulphatesalcoholquality
673white6.70.310.302.10.03818.0130.00.992803.360.6310.66
2652white7.30.220.312.30.01845.080.00.989363.060.3412.97
5574red10.80.400.412.20.0847.017.00.998403.080.679.36
6416red7.40.470.462.20.1147.020.00.996473.320.6310.55
3837white8.00.270.331.20.05041.0103.00.990023.000.4512.46
\n", 138 | "
" 139 | ], 140 | "text/plain": [ 141 | " type fixed acidity volatile acidity citric acid residual sugar \\\n", 142 | "673 white 6.7 0.31 0.30 2.1 \n", 143 | "2652 white 7.3 0.22 0.31 2.3 \n", 144 | "5574 red 10.8 0.40 0.41 2.2 \n", 145 | "6416 red 7.4 0.47 0.46 2.2 \n", 146 | "3837 white 8.0 0.27 0.33 1.2 \n", 147 | "\n", 148 | " chlorides free sulfur dioxide total sulfur dioxide density pH \\\n", 149 | "673 0.038 18.0 130.0 0.99280 3.36 \n", 150 | "2652 0.018 45.0 80.0 0.98936 3.06 \n", 151 | "5574 0.084 7.0 17.0 0.99840 3.08 \n", 152 | "6416 0.114 7.0 20.0 0.99647 3.32 \n", 153 | "3837 0.050 41.0 103.0 0.99002 3.00 \n", 154 | "\n", 155 | " sulphates alcohol quality \n", 156 | "673 0.63 10.6 6 \n", 157 | "2652 0.34 12.9 7 \n", 158 | "5574 0.67 9.3 6 \n", 159 | "6416 0.63 10.5 5 \n", 160 | "3837 0.45 12.4 6 " 161 | ] 162 | }, 163 | "execution_count": 1, 164 | "metadata": {}, 165 | "output_type": "execute_result" 166 | } 167 | ], 168 | "source": [ 169 | "import os\n", 170 | "import numpy as np\n", 171 | "import pandas as pd\n", 172 | "import warnings\n", 173 | "os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' \n", 174 | "warnings.filterwarnings('ignore')\n", 175 | "\n", 176 | "df = pd.read_csv('data/winequalityN.csv')\n", 177 | "df.sample(5)" 178 | ] 179 | }, 180 | { 181 | "cell_type": "code", 182 | "execution_count": 2, 183 | "id": "5f8ccfbf-d0f9-4128-ab3b-d6ab1489f205", 184 | "metadata": {}, 185 | "outputs": [], 186 | "source": [ 187 | "from sklearn.model_selection import train_test_split\n", 188 | "from sklearn.preprocessing import StandardScaler\n", 189 | "\n", 190 | "\n", 191 | "# Prepare the data\n", 192 | "df = df.dropna()\n", 193 | "df['is_white_wine'] = [1 if typ == 'white' else 0 for typ in df['type']]\n", 194 | "df['is_good_wine'] = [1 if quality >= 6 else 0 for quality in df['quality']]\n", 195 | "df.drop(['type', 'quality'], axis=1, inplace=True)\n", 196 | "\n", 197 | "# Train/test split\n", 198 | "X = df.drop('is_good_wine', axis=1)\n", 199 | "y = df['is_good_wine']\n", 200 | "X_train, X_test, y_train, y_test = train_test_split(\n", 201 | " X, y, \n", 202 | " test_size=0.2, random_state=42\n", 203 | ")\n", 204 | "\n", 205 | "# Scaling\n", 206 | "scaler = StandardScaler()\n", 207 | "X_train_scaled = scaler.fit_transform(X_train)\n", 208 | "X_test_scaled = scaler.transform(X_test)" 209 | ] 210 | }, 211 | { 212 | "cell_type": "markdown", 213 | "id": "d6e70cb7-d3e7-49b0-aeb7-aa2a3ca19a41", 214 | "metadata": {}, 215 | "source": [ 216 | "
\n", 217 | "\n", 218 | "# Modelling" 219 | ] 220 | }, 221 | { 222 | "cell_type": "code", 223 | "execution_count": 3, 224 | "id": "8e42d1f6-1c30-4334-8397-b8772f958979", 225 | "metadata": {}, 226 | "outputs": [ 227 | { 228 | "name": "stdout", 229 | "output_type": "stream", 230 | "text": [ 231 | "Init Plugin\n", 232 | "Init Graph Optimizer\n", 233 | "Init Kernel\n" 234 | ] 235 | } 236 | ], 237 | "source": [ 238 | "import tensorflow as tf\n", 239 | "tf.random.set_seed(42)" 240 | ] 241 | }, 242 | { 243 | "cell_type": "markdown", 244 | "id": "83bed9be-de94-4a6f-bb1d-5fd571f697a4", 245 | "metadata": {}, 246 | "source": [ 247 | "
\n", 248 | "\n", 249 | "## Callbacks list\n", 250 | "- I like to declare it beforehand\n", 251 | "\n", 252 | "### `ModelCheckpoint`\n", 253 | "- It will save the model locally on the current epoch if it beats the performance on the previous one\n", 254 | "- The configuration below saves it to a `hdf5` file in the following format:\n", 255 | " - `/model--.hdf5`\n", 256 | "- Model is saved only if the validation accuracy is higher than on the previous epoch" 257 | ] 258 | }, 259 | { 260 | "cell_type": "code", 261 | "execution_count": 4, 262 | "id": "ae4dc04b-530f-4320-831e-d31b311f69dd", 263 | "metadata": {}, 264 | "outputs": [], 265 | "source": [ 266 | "cb_checkpoint = tf.keras.callbacks.ModelCheckpoint(\n", 267 | " filepath='checkpoints/model-{epoch:02d}-{val_accuracy:.2f}.hdf5',\n", 268 | " monitor='val_accuracy',\n", 269 | " mode='max',\n", 270 | " save_best_only=True,\n", 271 | " verbose=1\n", 272 | ")" 273 | ] 274 | }, 275 | { 276 | "cell_type": "markdown", 277 | "id": "7509f244-b817-44ba-9221-44606ac21d4f", 278 | "metadata": {}, 279 | "source": [ 280 | "### `ReduceLROnPlateau`\n", 281 | "- Basically if a metric (validation loss) doesn't decrease for a number of epochs (10), reduce the learning rate\n", 282 | "- New learning rate = old learning rate * factor (0.1)\n", 283 | " - nlr = 0.01 * 0.1 = 0.001\n", 284 | "- You can also set the minimum learning rate below the model won't go" 285 | ] 286 | }, 287 | { 288 | "cell_type": "code", 289 | "execution_count": 5, 290 | "id": "be8e7d24-ea3a-48a1-8deb-8373b1978213", 291 | "metadata": {}, 292 | "outputs": [], 293 | "source": [ 294 | "cb_reducelr = tf.keras.callbacks.ReduceLROnPlateau(\n", 295 | " monitor='val_loss',\n", 296 | " mode='min',\n", 297 | " factor=0.1,\n", 298 | " patience=10,\n", 299 | " verbose=1,\n", 300 | " min_lr=0.00001\n", 301 | ")" 302 | ] 303 | }, 304 | { 305 | "cell_type": "markdown", 306 | "id": "ffe6748b-df60-4a4e-b3cc-9e51f0ed66b7", 307 | "metadata": {}, 308 | "source": [ 309 | "### `EarlyStopping`\n", 310 | "- If a metric (validation accuracy) doesn't increase by some minimum delta (0.001) for a given number of epochs (10) - kill the training process\n" 311 | ] 312 | }, 313 | { 314 | "cell_type": "code", 315 | "execution_count": 6, 316 | "id": "c438c4ed-34e7-4ba0-8564-20601d46f80f", 317 | "metadata": {}, 318 | "outputs": [], 319 | "source": [ 320 | "cb_earlystop = tf.keras.callbacks.EarlyStopping(\n", 321 | " monitor='val_accuracy',\n", 322 | " mode='max',\n", 323 | " min_delta=0.001,\n", 324 | " patience=10,\n", 325 | " verbose=1\n", 326 | ")" 327 | ] 328 | }, 329 | { 330 | "cell_type": "markdown", 331 | "id": "9b94763b-b620-4aac-9475-5925d87b87a5", 332 | "metadata": {}, 333 | "source": [ 334 | "### `CSVLogger`\n", 335 | "- Captures model training history and dumps it to a CSV file\n", 336 | "- Useful for analyzing the performance later" 337 | ] 338 | }, 339 | { 340 | "cell_type": "code", 341 | "execution_count": 7, 342 | "id": "0a82de6e-fa77-4701-a18f-82e0bd403f6f", 343 | "metadata": {}, 344 | "outputs": [], 345 | "source": [ 346 | "cb_csvlogger = tf.keras.callbacks.CSVLogger(\n", 347 | " filename='training_log.csv',\n", 348 | " separator=',',\n", 349 | " append=False\n", 350 | ")" 351 | ] 352 | }, 353 | { 354 | "cell_type": "markdown", 355 | "id": "5aa112e1-97f3-4e81-a598-d7f45795d769", 356 | "metadata": {}, 357 | "source": [ 358 | "
\n", 359 | "\n", 360 | "- For simplicity's sake we'll treat test set as a validation set\n", 361 | "- In real deep learning projects you'll want to have 3 sets: training, validation, and test\n", 362 | "- We'll tell the model to train for 1000 epochs, but the `EarlyStopping` callback will kill it way before\n", 363 | "- Specify callbacks in the `fit()` function" 364 | ] 365 | }, 366 | { 367 | "cell_type": "code", 368 | "execution_count": 8, 369 | "id": "3b611c9b-7da2-4b16-9161-272bf69abecf", 370 | "metadata": {}, 371 | "outputs": [ 372 | { 373 | "name": "stdout", 374 | "output_type": "stream", 375 | "text": [ 376 | "Metal device set to: Apple M1\n", 377 | "Epoch 1/1000\n", 378 | "162/162 [==============================] - 1s 7ms/step - loss: 0.5375 - accuracy: 0.7306 - val_loss: 0.4913 - val_accuracy: 0.7649\n", 379 | "\n", 380 | "Epoch 00001: val_accuracy improved from -inf to 0.76489, saving model to checkpoints/model-01-0.76.hdf5\n", 381 | "Epoch 2/1000\n", 382 | "162/162 [==============================] - 1s 6ms/step - loss: 0.4982 - accuracy: 0.7580 - val_loss: 0.5027 - val_accuracy: 0.7401\n", 383 | "\n", 384 | "Epoch 00002: val_accuracy did not improve from 0.76489\n", 385 | "Epoch 3/1000\n", 386 | "162/162 [==============================] - 1s 6ms/step - loss: 0.4880 - accuracy: 0.7689 - val_loss: 0.4774 - val_accuracy: 0.7703\n", 387 | "\n", 388 | "Epoch 00003: val_accuracy improved from 0.76489 to 0.77030, saving model to checkpoints/model-03-0.77.hdf5\n", 389 | "Epoch 4/1000\n", 390 | "162/162 [==============================] - 1s 6ms/step - loss: 0.4739 - accuracy: 0.7749 - val_loss: 0.4809 - val_accuracy: 0.7564\n", 391 | "\n", 392 | "Epoch 00004: val_accuracy did not improve from 0.77030\n", 393 | "Epoch 5/1000\n", 394 | "162/162 [==============================] - 1s 6ms/step - loss: 0.4681 - accuracy: 0.7791 - val_loss: 0.4767 - val_accuracy: 0.7657\n", 395 | "\n", 396 | "Epoch 00005: val_accuracy did not improve from 0.77030\n", 397 | "Epoch 6/1000\n", 398 | "162/162 [==============================] - 1s 7ms/step - loss: 0.4595 - accuracy: 0.7824 - val_loss: 0.4660 - val_accuracy: 0.7688\n", 399 | "\n", 400 | "Epoch 00006: val_accuracy did not improve from 0.77030\n", 401 | "Epoch 7/1000\n", 402 | "162/162 [==============================] - 1s 7ms/step - loss: 0.4533 - accuracy: 0.7872 - val_loss: 0.4647 - val_accuracy: 0.7780\n", 403 | "\n", 404 | "Epoch 00007: val_accuracy improved from 0.77030 to 0.77804, saving model to checkpoints/model-07-0.78.hdf5\n", 405 | "Epoch 8/1000\n", 406 | "162/162 [==============================] - 1s 6ms/step - loss: 0.4464 - accuracy: 0.7894 - val_loss: 0.4621 - val_accuracy: 0.7804\n", 407 | "\n", 408 | "Epoch 00008: val_accuracy improved from 0.77804 to 0.78036, saving model to checkpoints/model-08-0.78.hdf5\n", 409 | "Epoch 9/1000\n", 410 | "162/162 [==============================] - 1s 6ms/step - loss: 0.4398 - accuracy: 0.7956 - val_loss: 0.4605 - val_accuracy: 0.7780\n", 411 | "\n", 412 | "Epoch 00009: val_accuracy did not improve from 0.78036\n", 413 | "Epoch 10/1000\n", 414 | "162/162 [==============================] - 1s 6ms/step - loss: 0.4359 - accuracy: 0.7986 - val_loss: 0.4728 - val_accuracy: 0.7773\n", 415 | "\n", 416 | "Epoch 00010: val_accuracy did not improve from 0.78036\n", 417 | "Epoch 11/1000\n", 418 | "162/162 [==============================] - 1s 7ms/step - loss: 0.4287 - accuracy: 0.8035 - val_loss: 0.4670 - val_accuracy: 0.7811\n", 419 | "\n", 420 | "Epoch 00011: val_accuracy improved from 0.78036 to 0.78113, saving model to checkpoints/model-11-0.78.hdf5\n", 421 | "Epoch 12/1000\n", 422 | "162/162 [==============================] - 1s 6ms/step - loss: 0.4218 - accuracy: 0.8095 - val_loss: 0.4602 - val_accuracy: 0.7842\n", 423 | "\n", 424 | "Epoch 00012: val_accuracy improved from 0.78113 to 0.78422, saving model to checkpoints/model-12-0.78.hdf5\n", 425 | "Epoch 13/1000\n", 426 | "162/162 [==============================] - 1s 6ms/step - loss: 0.4149 - accuracy: 0.8135 - val_loss: 0.4902 - val_accuracy: 0.7711\n", 427 | "\n", 428 | "Epoch 00013: val_accuracy did not improve from 0.78422\n", 429 | "Epoch 14/1000\n", 430 | "162/162 [==============================] - 1s 6ms/step - loss: 0.4150 - accuracy: 0.8064 - val_loss: 0.4626 - val_accuracy: 0.7796\n", 431 | "\n", 432 | "Epoch 00014: val_accuracy did not improve from 0.78422\n", 433 | "Epoch 15/1000\n", 434 | "162/162 [==============================] - 1s 7ms/step - loss: 0.4054 - accuracy: 0.8164 - val_loss: 0.4704 - val_accuracy: 0.7858\n", 435 | "\n", 436 | "Epoch 00015: val_accuracy improved from 0.78422 to 0.78577, saving model to checkpoints/model-15-0.79.hdf5\n", 437 | "Epoch 16/1000\n", 438 | "162/162 [==============================] - 1s 6ms/step - loss: 0.4026 - accuracy: 0.8188 - val_loss: 0.4716 - val_accuracy: 0.7734\n", 439 | "\n", 440 | "Epoch 00016: val_accuracy did not improve from 0.78577\n", 441 | "Epoch 17/1000\n", 442 | "162/162 [==============================] - 1s 7ms/step - loss: 0.3943 - accuracy: 0.8215 - val_loss: 0.4634 - val_accuracy: 0.7796\n", 443 | "\n", 444 | "Epoch 00017: val_accuracy did not improve from 0.78577\n", 445 | "Epoch 18/1000\n", 446 | "162/162 [==============================] - 1s 7ms/step - loss: 0.3902 - accuracy: 0.8203 - val_loss: 0.4661 - val_accuracy: 0.7881\n", 447 | "\n", 448 | "Epoch 00018: val_accuracy improved from 0.78577 to 0.78809, saving model to checkpoints/model-18-0.79.hdf5\n", 449 | "Epoch 19/1000\n", 450 | "162/162 [==============================] - 1s 6ms/step - loss: 0.3837 - accuracy: 0.8267 - val_loss: 0.4740 - val_accuracy: 0.7819\n", 451 | "\n", 452 | "Epoch 00019: val_accuracy did not improve from 0.78809\n", 453 | "Epoch 20/1000\n", 454 | "162/162 [==============================] - 1s 6ms/step - loss: 0.3793 - accuracy: 0.8273 - val_loss: 0.4659 - val_accuracy: 0.7850\n", 455 | "\n", 456 | "Epoch 00020: val_accuracy did not improve from 0.78809\n", 457 | "Epoch 21/1000\n", 458 | "162/162 [==============================] - 1s 6ms/step - loss: 0.3738 - accuracy: 0.8338 - val_loss: 0.4632 - val_accuracy: 0.7873\n", 459 | "\n", 460 | "Epoch 00021: val_accuracy did not improve from 0.78809\n", 461 | "Epoch 22/1000\n", 462 | "162/162 [==============================] - 1s 6ms/step - loss: 0.3691 - accuracy: 0.8306 - val_loss: 0.4632 - val_accuracy: 0.7842\n", 463 | "\n", 464 | "Epoch 00022: val_accuracy did not improve from 0.78809\n", 465 | "\n", 466 | "Epoch 00022: ReduceLROnPlateau reducing learning rate to 0.00010000000474974513.\n", 467 | "Epoch 23/1000\n", 468 | "162/162 [==============================] - 1s 7ms/step - loss: 0.3395 - accuracy: 0.8516 - val_loss: 0.4615 - val_accuracy: 0.7920\n", 469 | "\n", 470 | "Epoch 00023: val_accuracy improved from 0.78809 to 0.79196, saving model to checkpoints/model-23-0.79.hdf5\n", 471 | "Epoch 24/1000\n", 472 | "162/162 [==============================] - 1s 6ms/step - loss: 0.3334 - accuracy: 0.8578 - val_loss: 0.4618 - val_accuracy: 0.7958\n", 473 | "\n", 474 | "Epoch 00024: val_accuracy improved from 0.79196 to 0.79582, saving model to checkpoints/model-24-0.80.hdf5\n", 475 | "Epoch 25/1000\n", 476 | "162/162 [==============================] - 1s 6ms/step - loss: 0.3311 - accuracy: 0.8586 - val_loss: 0.4614 - val_accuracy: 0.7981\n", 477 | "\n", 478 | "Epoch 00025: val_accuracy improved from 0.79582 to 0.79814, saving model to checkpoints/model-25-0.80.hdf5\n", 479 | "Epoch 26/1000\n", 480 | "162/162 [==============================] - 1s 7ms/step - loss: 0.3290 - accuracy: 0.8602 - val_loss: 0.4648 - val_accuracy: 0.7966\n", 481 | "\n", 482 | "Epoch 00026: val_accuracy did not improve from 0.79814\n", 483 | "Epoch 27/1000\n", 484 | "162/162 [==============================] - 1s 6ms/step - loss: 0.3281 - accuracy: 0.8590 - val_loss: 0.4631 - val_accuracy: 0.7912\n", 485 | "\n", 486 | "Epoch 00027: val_accuracy did not improve from 0.79814\n", 487 | "Epoch 28/1000\n", 488 | "162/162 [==============================] - 1s 7ms/step - loss: 0.3265 - accuracy: 0.8594 - val_loss: 0.4626 - val_accuracy: 0.7935\n", 489 | "\n", 490 | "Epoch 00028: val_accuracy did not improve from 0.79814\n", 491 | "Epoch 29/1000\n", 492 | "162/162 [==============================] - 1s 6ms/step - loss: 0.3252 - accuracy: 0.8627 - val_loss: 0.4637 - val_accuracy: 0.7958\n", 493 | "\n", 494 | "Epoch 00029: val_accuracy did not improve from 0.79814\n", 495 | "Epoch 30/1000\n", 496 | "162/162 [==============================] - 1s 6ms/step - loss: 0.3239 - accuracy: 0.8609 - val_loss: 0.4650 - val_accuracy: 0.7935\n", 497 | "\n", 498 | "Epoch 00030: val_accuracy did not improve from 0.79814\n", 499 | "Epoch 31/1000\n", 500 | "162/162 [==============================] - 1s 6ms/step - loss: 0.3230 - accuracy: 0.8623 - val_loss: 0.4667 - val_accuracy: 0.7943\n", 501 | "\n", 502 | "Epoch 00031: val_accuracy did not improve from 0.79814\n", 503 | "Epoch 32/1000\n", 504 | "162/162 [==============================] - 1s 7ms/step - loss: 0.3218 - accuracy: 0.8634 - val_loss: 0.4705 - val_accuracy: 0.7920\n", 505 | "\n", 506 | "Epoch 00032: val_accuracy did not improve from 0.79814\n", 507 | "\n", 508 | "Epoch 00032: ReduceLROnPlateau reducing learning rate to 1.0000000474974514e-05.\n", 509 | "Epoch 33/1000\n", 510 | "162/162 [==============================] - 1s 7ms/step - loss: 0.3187 - accuracy: 0.8613 - val_loss: 0.4664 - val_accuracy: 0.7951\n", 511 | "\n", 512 | "Epoch 00033: val_accuracy did not improve from 0.79814\n", 513 | "Epoch 34/1000\n", 514 | "162/162 [==============================] - 1s 6ms/step - loss: 0.3176 - accuracy: 0.8652 - val_loss: 0.4655 - val_accuracy: 0.7958\n", 515 | "\n", 516 | "Epoch 00034: val_accuracy did not improve from 0.79814\n", 517 | "Epoch 35/1000\n", 518 | "162/162 [==============================] - 1s 6ms/step - loss: 0.3174 - accuracy: 0.8665 - val_loss: 0.4651 - val_accuracy: 0.7958\n", 519 | "\n", 520 | "Epoch 00035: val_accuracy did not improve from 0.79814\n", 521 | "Epoch 00035: early stopping\n" 522 | ] 523 | } 524 | ], 525 | "source": [ 526 | "model = tf.keras.Sequential([\n", 527 | " tf.keras.layers.Dense(64, activation='relu'),\n", 528 | " tf.keras.layers.Dense(64, activation='relu'),\n", 529 | " tf.keras.layers.Dense(64, activation='relu'),\n", 530 | " tf.keras.layers.Dense(1, activation='sigmoid')\n", 531 | "])\n", 532 | "\n", 533 | "model.compile(\n", 534 | " loss=tf.keras.losses.binary_crossentropy,\n", 535 | " optimizer=tf.keras.optimizers.Adam(),\n", 536 | " metrics=[tf.keras.metrics.BinaryAccuracy(name='accuracy')]\n", 537 | ")\n", 538 | "\n", 539 | "history = model.fit(\n", 540 | " X_train_scaled, \n", 541 | " y_train, \n", 542 | " epochs=1000,\n", 543 | " validation_data=(X_test_scaled, y_test),\n", 544 | " callbacks=[cb_checkpoint, cb_reducelr, cb_earlystop, cb_csvlogger]\n", 545 | ")" 546 | ] 547 | }, 548 | { 549 | "cell_type": "markdown", 550 | "id": "98ea578c-ef61-44eb-a243-e4f5092a2e20", 551 | "metadata": {}, 552 | "source": [ 553 | "
\n", 554 | "\n", 555 | "## Final evaluation\n", 556 | "- You can now load the best model - it will be the one with the highest epoch number" 557 | ] 558 | }, 559 | { 560 | "cell_type": "code", 561 | "execution_count": 9, 562 | "id": "970d3a38-1915-457f-b9bb-e683aa4782d7", 563 | "metadata": {}, 564 | "outputs": [], 565 | "source": [ 566 | "best_model = tf.keras.models.load_model('checkpoints/model-25-0.80.hdf5')" 567 | ] 568 | }, 569 | { 570 | "cell_type": "markdown", 571 | "id": "d946ea80-32c5-4a0d-9ab9-195461f77167", 572 | "metadata": {}, 573 | "source": [ 574 | "- Save yourself some time by calling `predict_classes()` instead of `predict()`\n", 575 | "- It assigns the classes automatically - you don't have to calculate them from probabilities" 576 | ] 577 | }, 578 | { 579 | "cell_type": "code", 580 | "execution_count": 10, 581 | "id": "2e1d1c69-33ef-4749-b3c8-6ede188a1f56", 582 | "metadata": {}, 583 | "outputs": [ 584 | { 585 | "data": { 586 | "text/plain": [ 587 | "array([1, 1, 0, ..., 1, 0, 1], dtype=int32)" 588 | ] 589 | }, 590 | "execution_count": 10, 591 | "metadata": {}, 592 | "output_type": "execute_result" 593 | } 594 | ], 595 | "source": [ 596 | "best_model_preds = np.ravel(best_model.predict_classes(X_test_scaled))\n", 597 | "best_model_preds" 598 | ] 599 | }, 600 | { 601 | "cell_type": "markdown", 602 | "id": "bc6d6581-8b03-4e7b-b591-dd9444d6a3b1", 603 | "metadata": {}, 604 | "source": [ 605 | "- Evaluate as you normally would" 606 | ] 607 | }, 608 | { 609 | "cell_type": "code", 610 | "execution_count": 11, 611 | "id": "30bacbff-f634-43af-b79c-171559aca9d7", 612 | "metadata": {}, 613 | "outputs": [ 614 | { 615 | "name": "stdout", 616 | "output_type": "stream", 617 | "text": [ 618 | "0.7981438515081206\n" 619 | ] 620 | } 621 | ], 622 | "source": [ 623 | "from sklearn.metrics import accuracy_score\n", 624 | "\n", 625 | "print(accuracy_score(y_test, best_model_preds))" 626 | ] 627 | } 628 | ], 629 | "metadata": { 630 | "kernelspec": { 631 | "display_name": "tf", 632 | "language": "python", 633 | "name": "env_tensorflow" 634 | }, 635 | "language_info": { 636 | "codemirror_mode": { 637 | "name": "ipython", 638 | "version": 3 639 | }, 640 | "file_extension": ".py", 641 | "mimetype": "text/x-python", 642 | "name": "python", 643 | "nbconvert_exporter": "python", 644 | "pygments_lexer": "ipython3", 645 | "version": "3.9.7" 646 | } 647 | }, 648 | "nbformat": 4, 649 | "nbformat_minor": 5 650 | } 651 | -------------------------------------------------------------------------------- /005_Optimize_Neural_Network_Architecture.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "3e2f0867-f2d1-4fff-81fd-b3b1c0b7f045", 6 | "metadata": {}, 7 | "source": [ 8 | "# Data preparation\n", 9 | "- https://www.kaggle.com/shelvigarg/wine-quality-dataset\n", 10 | "- Refer to https://github.com/better-data-science/TensorFlow/blob/main/003_TensorFlow_Classification.ipynb for detailed preparation instructions" 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": 1, 16 | "id": "7d02ea72-99ff-42f8-a7f6-533ba60c833c", 17 | "metadata": {}, 18 | "outputs": [ 19 | { 20 | "data": { 21 | "text/html": [ 22 | "
\n", 23 | "\n", 36 | "\n", 37 | " \n", 38 | " \n", 39 | " \n", 40 | " \n", 41 | " \n", 42 | " \n", 43 | " \n", 44 | " \n", 45 | " \n", 46 | " \n", 47 | " \n", 48 | " \n", 49 | " \n", 50 | " \n", 51 | " \n", 52 | " \n", 53 | " \n", 54 | " \n", 55 | " \n", 56 | " \n", 57 | " \n", 58 | " \n", 59 | " \n", 60 | " \n", 61 | " \n", 62 | " \n", 63 | " \n", 64 | " \n", 65 | " \n", 66 | " \n", 67 | " \n", 68 | " \n", 69 | " \n", 70 | " \n", 71 | " \n", 72 | " \n", 73 | " \n", 74 | " \n", 75 | " \n", 76 | " \n", 77 | " \n", 78 | " \n", 79 | " \n", 80 | " \n", 81 | " \n", 82 | " \n", 83 | " \n", 84 | " \n", 85 | " \n", 86 | " \n", 87 | " \n", 88 | " \n", 89 | " \n", 90 | " \n", 91 | " \n", 92 | " \n", 93 | " \n", 94 | " \n", 95 | " \n", 96 | " \n", 97 | " \n", 98 | " \n", 99 | " \n", 100 | " \n", 101 | " \n", 102 | " \n", 103 | " \n", 104 | " \n", 105 | " \n", 106 | " \n", 107 | " \n", 108 | " \n", 109 | " \n", 110 | " \n", 111 | " \n", 112 | " \n", 113 | " \n", 114 | " \n", 115 | " \n", 116 | " \n", 117 | " \n", 118 | " \n", 119 | " \n", 120 | " \n", 121 | " \n", 122 | " \n", 123 | " \n", 124 | " \n", 125 | " \n", 126 | " \n", 127 | " \n", 128 | " \n", 129 | " \n", 130 | " \n", 131 | " \n", 132 | " \n", 133 | " \n", 134 | " \n", 135 | " \n", 136 | " \n", 137 | "
typefixed acidityvolatile aciditycitric acidresidual sugarchloridesfree sulfur dioxidetotal sulfur dioxidedensitypHsulphatesalcoholquality
930white7.70.170.525.90.01721.084.00.992903.140.4011.97
2161white6.80.370.284.00.03029.079.00.990003.230.4612.47
5293red12.00.370.764.20.0667.038.01.000403.220.6013.07
2323white7.60.200.341.80.04142.0148.00.993353.350.6611.16
2864white5.30.160.391.00.02840.0101.00.991563.570.5910.66
\n", 138 | "
" 139 | ], 140 | "text/plain": [ 141 | " type fixed acidity volatile acidity citric acid residual sugar \\\n", 142 | "930 white 7.7 0.17 0.52 5.9 \n", 143 | "2161 white 6.8 0.37 0.28 4.0 \n", 144 | "5293 red 12.0 0.37 0.76 4.2 \n", 145 | "2323 white 7.6 0.20 0.34 1.8 \n", 146 | "2864 white 5.3 0.16 0.39 1.0 \n", 147 | "\n", 148 | " chlorides free sulfur dioxide total sulfur dioxide density pH \\\n", 149 | "930 0.017 21.0 84.0 0.99290 3.14 \n", 150 | "2161 0.030 29.0 79.0 0.99000 3.23 \n", 151 | "5293 0.066 7.0 38.0 1.00040 3.22 \n", 152 | "2323 0.041 42.0 148.0 0.99335 3.35 \n", 153 | "2864 0.028 40.0 101.0 0.99156 3.57 \n", 154 | "\n", 155 | " sulphates alcohol quality \n", 156 | "930 0.40 11.9 7 \n", 157 | "2161 0.46 12.4 7 \n", 158 | "5293 0.60 13.0 7 \n", 159 | "2323 0.66 11.1 6 \n", 160 | "2864 0.59 10.6 6 " 161 | ] 162 | }, 163 | "execution_count": 1, 164 | "metadata": {}, 165 | "output_type": "execute_result" 166 | } 167 | ], 168 | "source": [ 169 | "import os\n", 170 | "import numpy as np\n", 171 | "import pandas as pd\n", 172 | "import itertools\n", 173 | "import warnings\n", 174 | "os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' \n", 175 | "warnings.filterwarnings('ignore')\n", 176 | "\n", 177 | "df = pd.read_csv('data/winequalityN.csv')\n", 178 | "df.sample(5)" 179 | ] 180 | }, 181 | { 182 | "cell_type": "code", 183 | "execution_count": 2, 184 | "id": "cfcaaa20-6c4a-433f-b634-2cef2ef667e5", 185 | "metadata": {}, 186 | "outputs": [], 187 | "source": [ 188 | "from sklearn.model_selection import train_test_split\n", 189 | "from sklearn.preprocessing import StandardScaler\n", 190 | "\n", 191 | "\n", 192 | "# Prepare the data\n", 193 | "df = df.dropna()\n", 194 | "df['is_white_wine'] = [1 if typ == 'white' else 0 for typ in df['type']]\n", 195 | "df['is_good_wine'] = [1 if quality >= 6 else 0 for quality in df['quality']]\n", 196 | "df.drop(['type', 'quality'], axis=1, inplace=True)\n", 197 | "\n", 198 | "# Train/test split\n", 199 | "X = df.drop('is_good_wine', axis=1)\n", 200 | "y = df['is_good_wine']\n", 201 | "X_train, X_test, y_train, y_test = train_test_split(\n", 202 | " X, y, \n", 203 | " test_size=0.2, random_state=42\n", 204 | ")\n", 205 | "\n", 206 | "# Scaling\n", 207 | "scaler = StandardScaler()\n", 208 | "X_train_scaled = scaler.fit_transform(X_train)\n", 209 | "X_test_scaled = scaler.transform(X_test)" 210 | ] 211 | }, 212 | { 213 | "cell_type": "markdown", 214 | "id": "177edf80-99c0-4395-aef4-7b44fa3e11ac", 215 | "metadata": {}, 216 | "source": [ 217 | "
\n", 218 | "\n", 219 | "# How will we approach optimization" 220 | ] 221 | }, 222 | { 223 | "cell_type": "code", 224 | "execution_count": 3, 225 | "id": "44c10656-7d55-44c2-a4d6-f3cd152d9408", 226 | "metadata": {}, 227 | "outputs": [ 228 | { 229 | "name": "stdout", 230 | "output_type": "stream", 231 | "text": [ 232 | "Init Plugin\n", 233 | "Init Graph Optimizer\n", 234 | "Init Kernel\n" 235 | ] 236 | } 237 | ], 238 | "source": [ 239 | "import tensorflow as tf\n", 240 | "tf.random.set_seed(42)" 241 | ] 242 | }, 243 | { 244 | "cell_type": "markdown", 245 | "id": "7defa96c-6231-44af-b798-c181959c4f15", 246 | "metadata": {}, 247 | "source": [ 248 | "- Let's declare some constants\n", 249 | " - We want to optimize a network with 3 hidden layers\n", 250 | " - Each hidden layer can have from 64 to 256 nodes\n", 251 | " - The step size between nodes is 64\n", 252 | " - So the possibilities are: 64, 128, 192, 256" 253 | ] 254 | }, 255 | { 256 | "cell_type": "code", 257 | "execution_count": 4, 258 | "id": "81c3c263-e03b-44c2-a401-e986eda4af6c", 259 | "metadata": {}, 260 | "outputs": [], 261 | "source": [ 262 | "num_layers = 3\n", 263 | "min_nodes_per_layer, max_nodes_per_layer = 64, 256\n", 264 | "node_step_size = 64" 265 | ] 266 | }, 267 | { 268 | "cell_type": "markdown", 269 | "id": "af1b807e-1d23-4b86-a469-99e902a12289", 270 | "metadata": {}, 271 | "source": [ 272 | "- Possibilities:" 273 | ] 274 | }, 275 | { 276 | "cell_type": "code", 277 | "execution_count": 5, 278 | "id": "b085f18a-7b3c-4cdc-b77e-50bed49e8d4e", 279 | "metadata": {}, 280 | "outputs": [ 281 | { 282 | "data": { 283 | "text/plain": [ 284 | "[64, 128, 192, 256]" 285 | ] 286 | }, 287 | "execution_count": 5, 288 | "metadata": {}, 289 | "output_type": "execute_result" 290 | } 291 | ], 292 | "source": [ 293 | "node_options = list(range(\n", 294 | " min_nodes_per_layer, \n", 295 | " max_nodes_per_layer + 1, \n", 296 | " node_step_size\n", 297 | "))\n", 298 | "node_options" 299 | ] 300 | }, 301 | { 302 | "cell_type": "markdown", 303 | "id": "09825b27-1194-44c2-8ff6-b3538036053b", 304 | "metadata": {}, 305 | "source": [ 306 | "- Taking them to two layers:" 307 | ] 308 | }, 309 | { 310 | "cell_type": "code", 311 | "execution_count": 6, 312 | "id": "8e81a831-0431-45bc-b38f-49dc5ba62441", 313 | "metadata": {}, 314 | "outputs": [ 315 | { 316 | "data": { 317 | "text/plain": [ 318 | "[[64, 128, 192, 256], [64, 128, 192, 256]]" 319 | ] 320 | }, 321 | "execution_count": 6, 322 | "metadata": {}, 323 | "output_type": "execute_result" 324 | } 325 | ], 326 | "source": [ 327 | "two_layer_possibilities = [node_options, node_options]\n", 328 | "two_layer_possibilities" 329 | ] 330 | }, 331 | { 332 | "cell_type": "markdown", 333 | "id": "4df52357-5bf9-4df8-8797-ff4c09455be3", 334 | "metadata": {}, 335 | "source": [ 336 | "- And now it's just a task of calculating all permutations between these two lists:" 337 | ] 338 | }, 339 | { 340 | "cell_type": "code", 341 | "execution_count": 7, 342 | "id": "a442b96c-53a7-40d2-9997-cd12b771b8b6", 343 | "metadata": {}, 344 | "outputs": [ 345 | { 346 | "data": { 347 | "text/plain": [ 348 | "[(64, 64),\n", 349 | " (64, 128),\n", 350 | " (64, 192),\n", 351 | " (64, 256),\n", 352 | " (128, 64),\n", 353 | " (128, 128),\n", 354 | " (128, 192),\n", 355 | " (128, 256),\n", 356 | " (192, 64),\n", 357 | " (192, 128),\n", 358 | " (192, 192),\n", 359 | " (192, 256),\n", 360 | " (256, 64),\n", 361 | " (256, 128),\n", 362 | " (256, 192),\n", 363 | " (256, 256)]" 364 | ] 365 | }, 366 | "execution_count": 7, 367 | "metadata": {}, 368 | "output_type": "execute_result" 369 | } 370 | ], 371 | "source": [ 372 | "list(itertools.product(*two_layer_possibilities))" 373 | ] 374 | }, 375 | { 376 | "cell_type": "markdown", 377 | "id": "02ecc297-4c38-455c-ba07-16408fcf026c", 378 | "metadata": {}, 379 | "source": [ 380 | "- We want to optimize a 3-layer-deep neural network, so we'll have a bit more possibilities:" 381 | ] 382 | }, 383 | { 384 | "cell_type": "code", 385 | "execution_count": 8, 386 | "id": "22b7259c-796c-48df-b722-89e0c6d752cd", 387 | "metadata": {}, 388 | "outputs": [ 389 | { 390 | "data": { 391 | "text/plain": [ 392 | "[[64, 128, 192, 256], [64, 128, 192, 256], [64, 128, 192, 256]]" 393 | ] 394 | }, 395 | "execution_count": 8, 396 | "metadata": {}, 397 | "output_type": "execute_result" 398 | } 399 | ], 400 | "source": [ 401 | "layer_possibilities = [node_options] * num_layers\n", 402 | "layer_possibilities" 403 | ] 404 | }, 405 | { 406 | "cell_type": "markdown", 407 | "id": "6910711c-5f28-4596-a144-256dfef759ce", 408 | "metadata": {}, 409 | "source": [ 410 | "- Here are the permutations:" 411 | ] 412 | }, 413 | { 414 | "cell_type": "code", 415 | "execution_count": 9, 416 | "id": "ef365068-da6f-4653-9130-0d1c543dd6bf", 417 | "metadata": {}, 418 | "outputs": [ 419 | { 420 | "data": { 421 | "text/plain": [ 422 | "[(64, 64, 64),\n", 423 | " (64, 64, 128),\n", 424 | " (64, 64, 192),\n", 425 | " (64, 64, 256),\n", 426 | " (64, 128, 64),\n", 427 | " (64, 128, 128),\n", 428 | " (64, 128, 192),\n", 429 | " (64, 128, 256),\n", 430 | " (64, 192, 64),\n", 431 | " (64, 192, 128),\n", 432 | " (64, 192, 192),\n", 433 | " (64, 192, 256),\n", 434 | " (64, 256, 64),\n", 435 | " (64, 256, 128),\n", 436 | " (64, 256, 192),\n", 437 | " (64, 256, 256),\n", 438 | " (128, 64, 64),\n", 439 | " (128, 64, 128),\n", 440 | " (128, 64, 192),\n", 441 | " (128, 64, 256),\n", 442 | " (128, 128, 64),\n", 443 | " (128, 128, 128),\n", 444 | " (128, 128, 192),\n", 445 | " (128, 128, 256),\n", 446 | " (128, 192, 64),\n", 447 | " (128, 192, 128),\n", 448 | " (128, 192, 192),\n", 449 | " (128, 192, 256),\n", 450 | " (128, 256, 64),\n", 451 | " (128, 256, 128),\n", 452 | " (128, 256, 192),\n", 453 | " (128, 256, 256),\n", 454 | " (192, 64, 64),\n", 455 | " (192, 64, 128),\n", 456 | " (192, 64, 192),\n", 457 | " (192, 64, 256),\n", 458 | " (192, 128, 64),\n", 459 | " (192, 128, 128),\n", 460 | " (192, 128, 192),\n", 461 | " (192, 128, 256),\n", 462 | " (192, 192, 64),\n", 463 | " (192, 192, 128),\n", 464 | " (192, 192, 192),\n", 465 | " (192, 192, 256),\n", 466 | " (192, 256, 64),\n", 467 | " (192, 256, 128),\n", 468 | " (192, 256, 192),\n", 469 | " (192, 256, 256),\n", 470 | " (256, 64, 64),\n", 471 | " (256, 64, 128),\n", 472 | " (256, 64, 192),\n", 473 | " (256, 64, 256),\n", 474 | " (256, 128, 64),\n", 475 | " (256, 128, 128),\n", 476 | " (256, 128, 192),\n", 477 | " (256, 128, 256),\n", 478 | " (256, 192, 64),\n", 479 | " (256, 192, 128),\n", 480 | " (256, 192, 192),\n", 481 | " (256, 192, 256),\n", 482 | " (256, 256, 64),\n", 483 | " (256, 256, 128),\n", 484 | " (256, 256, 192),\n", 485 | " (256, 256, 256)]" 486 | ] 487 | }, 488 | "execution_count": 9, 489 | "metadata": {}, 490 | "output_type": "execute_result" 491 | } 492 | ], 493 | "source": [ 494 | "layer_node_permutations = list(itertools.product(*layer_possibilities))\n", 495 | "layer_node_permutations" 496 | ] 497 | }, 498 | { 499 | "cell_type": "markdown", 500 | "id": "25982c3a-f544-4165-bb25-b8fd8c473886", 501 | "metadata": {}, 502 | "source": [ 503 | "We'll iterate over the permutations and then iterate again over the values of individual permutation to get the node count for each hidden layer:" 504 | ] 505 | }, 506 | { 507 | "cell_type": "code", 508 | "execution_count": 10, 509 | "id": "fe0e2757-9d3a-4705-9ff5-c5827c29460b", 510 | "metadata": {}, 511 | "outputs": [ 512 | { 513 | "name": "stdout", 514 | "output_type": "stream", 515 | "text": [ 516 | "64\n", 517 | "64\n", 518 | "64\n", 519 | "\n", 520 | "64\n", 521 | "64\n", 522 | "128\n", 523 | "\n" 524 | ] 525 | } 526 | ], 527 | "source": [ 528 | "for permutation in layer_node_permutations[:2]:\n", 529 | " for nodes_at_layer in permutation:\n", 530 | " print(nodes_at_layer)\n", 531 | " print()" 532 | ] 533 | }, 534 | { 535 | "cell_type": "markdown", 536 | "id": "3921118a-050f-4d69-8518-df588b0de4f3", 537 | "metadata": {}, 538 | "source": [ 539 | "- We'll create a new `Sequential` model at each iteration\n", 540 | " - And add an `InputLayer` to it with a shape of `(12,)` (the number of columns in our dataset)\n", 541 | "- Then, we'll iterate over the items in a single permutation and add a `Dense` layer to the model with the current number of nodes\n", 542 | "- Finally, we'll add a `Dense` output layer\n", 543 | "- We'll also setting a name to the model so it's easier to compare them later:" 544 | ] 545 | }, 546 | { 547 | "cell_type": "code", 548 | "execution_count": 11, 549 | "id": "d664c48c-1698-4c5b-aa54-a420ff6d734a", 550 | "metadata": {}, 551 | "outputs": [ 552 | { 553 | "name": "stdout", 554 | "output_type": "stream", 555 | "text": [ 556 | "Metal device set to: Apple M1\n", 557 | "\n", 558 | "systemMemory: 8.00 GB\n", 559 | "maxCacheSize: 2.67 GB\n", 560 | "\n" 561 | ] 562 | } 563 | ], 564 | "source": [ 565 | "models = []\n", 566 | "\n", 567 | "for permutation in layer_node_permutations:\n", 568 | " model = tf.keras.Sequential()\n", 569 | " model.add(tf.keras.layers.InputLayer(input_shape=(12,)))\n", 570 | " model_name = ''\n", 571 | " \n", 572 | " for nodes_at_layer in permutation:\n", 573 | " model.add(tf.keras.layers.Dense(nodes_at_layer, activation='relu'))\n", 574 | " model_name += f'dense{nodes_at_layer}_'\n", 575 | " \n", 576 | " model.add(tf.keras.layers.Dense(1, activation='sigmoid'))\n", 577 | " model._name = model_name[:-1]\n", 578 | " \n", 579 | " models.append(model)" 580 | ] 581 | }, 582 | { 583 | "cell_type": "markdown", 584 | "id": "a7e85ed4-0bef-4969-9f33-86d6b8eb416e", 585 | "metadata": {}, 586 | "source": [ 587 | "- Here's how a single model looks like:" 588 | ] 589 | }, 590 | { 591 | "cell_type": "code", 592 | "execution_count": 12, 593 | "id": "bcc34acb-44c5-4b06-ab11-2de3a83f0c0a", 594 | "metadata": {}, 595 | "outputs": [ 596 | { 597 | "name": "stdout", 598 | "output_type": "stream", 599 | "text": [ 600 | "Model: \"dense64_dense64_dense64\"\n", 601 | "_________________________________________________________________\n", 602 | "Layer (type) Output Shape Param # \n", 603 | "=================================================================\n", 604 | "dense (Dense) (None, 64) 832 \n", 605 | "_________________________________________________________________\n", 606 | "dense_1 (Dense) (None, 64) 4160 \n", 607 | "_________________________________________________________________\n", 608 | "dense_2 (Dense) (None, 64) 4160 \n", 609 | "_________________________________________________________________\n", 610 | "dense_3 (Dense) (None, 1) 65 \n", 611 | "=================================================================\n", 612 | "Total params: 9,217\n", 613 | "Trainable params: 9,217\n", 614 | "Non-trainable params: 0\n", 615 | "_________________________________________________________________\n" 616 | ] 617 | } 618 | ], 619 | "source": [ 620 | "models[0].summary()" 621 | ] 622 | }, 623 | { 624 | "cell_type": "markdown", 625 | "id": "46c3464d-f777-49f2-a1de-21f1dc0f6c5c", 626 | "metadata": {}, 627 | "source": [ 628 | "- Not too bad, right?\n", 629 | "- Let's wrap all this logic into a single function next.\n", 630 | "\n", 631 | "

\n", 632 | "\n", 633 | "# Get architecture possibilities from a function\n", 634 | "- This one will have a lot of parameters\n", 635 | "- But it doesn't do anything we haven't discussed so far:" 636 | ] 637 | }, 638 | { 639 | "cell_type": "code", 640 | "execution_count": 13, 641 | "id": "0ca95622-7d80-4081-af90-3e5202d08535", 642 | "metadata": {}, 643 | "outputs": [], 644 | "source": [ 645 | "def get_models(num_layers: int,\n", 646 | " min_nodes_per_layer: int,\n", 647 | " max_nodes_per_layer: int,\n", 648 | " node_step_size: int,\n", 649 | " input_shape: tuple,\n", 650 | " hidden_layer_activation: str = 'relu',\n", 651 | " num_nodes_at_output: int = 1,\n", 652 | " output_layer_activation: str = 'sigmoid') -> list:\n", 653 | " \n", 654 | " node_options = list(range(min_nodes_per_layer, max_nodes_per_layer + 1, node_step_size))\n", 655 | " layer_possibilities = [node_options] * num_layers\n", 656 | " layer_node_permutations = list(itertools.product(*layer_possibilities))\n", 657 | " \n", 658 | " models = []\n", 659 | " for permutation in layer_node_permutations:\n", 660 | " model = tf.keras.Sequential()\n", 661 | " model.add(tf.keras.layers.InputLayer(input_shape=input_shape))\n", 662 | " model_name = ''\n", 663 | "\n", 664 | " for nodes_at_layer in permutation:\n", 665 | " model.add(tf.keras.layers.Dense(nodes_at_layer, activation=hidden_layer_activation))\n", 666 | " model_name += f'dense{nodes_at_layer}_'\n", 667 | "\n", 668 | " model.add(tf.keras.layers.Dense(num_nodes_at_output, activation=output_layer_activation))\n", 669 | " model._name = model_name[:-1]\n", 670 | " models.append(model)\n", 671 | " \n", 672 | " return models" 673 | ] 674 | }, 675 | { 676 | "cell_type": "markdown", 677 | "id": "970923d9-85ed-40d4-8733-5d0dd74f6ecc", 678 | "metadata": {}, 679 | "source": [ 680 | "- Let's test it:" 681 | ] 682 | }, 683 | { 684 | "cell_type": "code", 685 | "execution_count": 14, 686 | "id": "714f55ab-8555-4fcd-b5fb-7ed875fa52b2", 687 | "metadata": {}, 688 | "outputs": [], 689 | "source": [ 690 | "all_models = get_models(\n", 691 | " num_layers=3, \n", 692 | " min_nodes_per_layer=64, \n", 693 | " max_nodes_per_layer=256, \n", 694 | " node_step_size=64, \n", 695 | " input_shape=(12,)\n", 696 | ")" 697 | ] 698 | }, 699 | { 700 | "cell_type": "markdown", 701 | "id": "3f798f64-f3bd-4047-8a25-cc57b4c17da5", 702 | "metadata": {}, 703 | "source": [ 704 | "- Let's print the names and the count:" 705 | ] 706 | }, 707 | { 708 | "cell_type": "code", 709 | "execution_count": 15, 710 | "id": "be548fc2-940e-44e8-ac9f-a5684d33e5a0", 711 | "metadata": {}, 712 | "outputs": [ 713 | { 714 | "name": "stdout", 715 | "output_type": "stream", 716 | "text": [ 717 | "#Models = 64\n", 718 | "\n", 719 | "dense64_dense64_dense64\n", 720 | "dense64_dense64_dense128\n", 721 | "dense64_dense64_dense192\n", 722 | "dense64_dense64_dense256\n", 723 | "dense64_dense128_dense64\n", 724 | "dense64_dense128_dense128\n", 725 | "dense64_dense128_dense192\n", 726 | "dense64_dense128_dense256\n", 727 | "dense64_dense192_dense64\n", 728 | "dense64_dense192_dense128\n", 729 | "dense64_dense192_dense192\n", 730 | "dense64_dense192_dense256\n", 731 | "dense64_dense256_dense64\n", 732 | "dense64_dense256_dense128\n", 733 | "dense64_dense256_dense192\n", 734 | "dense64_dense256_dense256\n", 735 | "dense128_dense64_dense64\n", 736 | "dense128_dense64_dense128\n", 737 | "dense128_dense64_dense192\n", 738 | "dense128_dense64_dense256\n", 739 | "dense128_dense128_dense64\n", 740 | "dense128_dense128_dense128\n", 741 | "dense128_dense128_dense192\n", 742 | "dense128_dense128_dense256\n", 743 | "dense128_dense192_dense64\n", 744 | "dense128_dense192_dense128\n", 745 | "dense128_dense192_dense192\n", 746 | "dense128_dense192_dense256\n", 747 | "dense128_dense256_dense64\n", 748 | "dense128_dense256_dense128\n", 749 | "dense128_dense256_dense192\n", 750 | "dense128_dense256_dense256\n", 751 | "dense192_dense64_dense64\n", 752 | "dense192_dense64_dense128\n", 753 | "dense192_dense64_dense192\n", 754 | "dense192_dense64_dense256\n", 755 | "dense192_dense128_dense64\n", 756 | "dense192_dense128_dense128\n", 757 | "dense192_dense128_dense192\n", 758 | "dense192_dense128_dense256\n", 759 | "dense192_dense192_dense64\n", 760 | "dense192_dense192_dense128\n", 761 | "dense192_dense192_dense192\n", 762 | "dense192_dense192_dense256\n", 763 | "dense192_dense256_dense64\n", 764 | "dense192_dense256_dense128\n", 765 | "dense192_dense256_dense192\n", 766 | "dense192_dense256_dense256\n", 767 | "dense256_dense64_dense64\n", 768 | "dense256_dense64_dense128\n", 769 | "dense256_dense64_dense192\n", 770 | "dense256_dense64_dense256\n", 771 | "dense256_dense128_dense64\n", 772 | "dense256_dense128_dense128\n", 773 | "dense256_dense128_dense192\n", 774 | "dense256_dense128_dense256\n", 775 | "dense256_dense192_dense64\n", 776 | "dense256_dense192_dense128\n", 777 | "dense256_dense192_dense192\n", 778 | "dense256_dense192_dense256\n", 779 | "dense256_dense256_dense64\n", 780 | "dense256_dense256_dense128\n", 781 | "dense256_dense256_dense192\n", 782 | "dense256_dense256_dense256\n" 783 | ] 784 | } 785 | ], 786 | "source": [ 787 | "print(f'#Models = {len(all_models)}')\n", 788 | "print()\n", 789 | "\n", 790 | "for model in all_models:\n", 791 | " print(model.name)" 792 | ] 793 | }, 794 | { 795 | "cell_type": "markdown", 796 | "id": "5433d59e-e36a-4866-8887-391dcb497bdd", 797 | "metadata": {}, 798 | "source": [ 799 | "- So we have 64 models in total\n", 800 | "- It will take some time to optimize\n", 801 | "- Let's declare another function for that\n", 802 | "\n", 803 | "

\n", 804 | "\n", 805 | "# Model optimization function\n", 806 | "- This one will accept the list of models, training and testing sets (both features and the target), and optionally a number of epochs and verbosity\n", 807 | " - It's advised to set verbosity to 0 so you don't get overwhelmed with the console output" 808 | ] 809 | }, 810 | { 811 | "cell_type": "code", 812 | "execution_count": 16, 813 | "id": "a7ad3417-a53b-44ac-a37d-5a911ffc7b65", 814 | "metadata": {}, 815 | "outputs": [], 816 | "source": [ 817 | "from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score" 818 | ] 819 | }, 820 | { 821 | "cell_type": "code", 822 | "execution_count": 18, 823 | "id": "aa4c1af5-8c67-4c19-9fa8-63926ea2b6c6", 824 | "metadata": {}, 825 | "outputs": [], 826 | "source": [ 827 | "def optimize(models: list,\n", 828 | " X_train: np.array,\n", 829 | " y_train: np.array,\n", 830 | " X_test: np.array,\n", 831 | " y_test: np.array,\n", 832 | " epochs: int = 50,\n", 833 | " verbose: int = 0) -> pd.DataFrame:\n", 834 | " \n", 835 | " # We'll store the results here\n", 836 | " results = []\n", 837 | " \n", 838 | " def train(model: tf.keras.Sequential) -> dict:\n", 839 | " # Change this however you want\n", 840 | " model.compile(\n", 841 | " loss=tf.keras.losses.binary_crossentropy,\n", 842 | " optimizer=tf.keras.optimizers.Adam(),\n", 843 | " metrics=[\n", 844 | " tf.keras.metrics.BinaryAccuracy(name='accuracy')\n", 845 | " ]\n", 846 | " )\n", 847 | " \n", 848 | " # Train the model\n", 849 | " model.fit(\n", 850 | " X_train,\n", 851 | " y_train,\n", 852 | " epochs=epochs,\n", 853 | " verbose=verbose\n", 854 | " )\n", 855 | " \n", 856 | " # Make predictions on the test set\n", 857 | " preds = model.predict(X_test)\n", 858 | " prediction_classes = [1 if prob > 0.5 else 0 for prob in np.ravel(preds)]\n", 859 | " \n", 860 | " # Return evaluation metrics on the test set\n", 861 | " return {\n", 862 | " 'model_name': model.name,\n", 863 | " 'test_accuracy': accuracy_score(y_test, prediction_classes),\n", 864 | " 'test_precision': precision_score(y_test, prediction_classes),\n", 865 | " 'test_recall': recall_score(y_test, prediction_classes),\n", 866 | " 'test_f1': f1_score(y_test, prediction_classes)\n", 867 | " }\n", 868 | " \n", 869 | " # Train every model and save results\n", 870 | " for model in models:\n", 871 | " try:\n", 872 | " print(model.name, end=' ... ')\n", 873 | " res = train(model=model)\n", 874 | " results.append(res)\n", 875 | " except Exception as e:\n", 876 | " print(f'{model.name} --> {str(e)}')\n", 877 | " \n", 878 | " return pd.DataFrame(results)" 879 | ] 880 | }, 881 | { 882 | "cell_type": "markdown", 883 | "id": "9f7817f8-e353-45b9-925b-4f9c4a158c01", 884 | "metadata": {}, 885 | "source": [ 886 | "- Let's optimize the architecture!\n", 887 | "- It will take some time" 888 | ] 889 | }, 890 | { 891 | "cell_type": "code", 892 | "execution_count": 20, 893 | "id": "d4617681-a35b-45e0-9e83-04e9d913f599", 894 | "metadata": {}, 895 | "outputs": [ 896 | { 897 | "name": "stdout", 898 | "output_type": "stream", 899 | "text": [ 900 | "dense64_dense64_dense64 ... dense64_dense64_dense128 ... dense64_dense64_dense192 ... dense64_dense64_dense256 ... dense64_dense128_dense64 ... dense64_dense128_dense128 ... dense64_dense128_dense192 ... dense64_dense128_dense256 ... dense64_dense192_dense64 ... dense64_dense192_dense128 ... dense64_dense192_dense192 ... dense64_dense192_dense256 ... dense64_dense256_dense64 ... dense64_dense256_dense128 ... dense64_dense256_dense192 ... dense64_dense256_dense256 ... dense128_dense64_dense64 ... dense128_dense64_dense128 ... dense128_dense64_dense192 ... dense128_dense64_dense256 ... dense128_dense128_dense64 ... dense128_dense128_dense128 ... dense128_dense128_dense192 ... dense128_dense128_dense256 ... dense128_dense192_dense64 ... dense128_dense192_dense128 ... dense128_dense192_dense192 ... dense128_dense192_dense256 ... dense128_dense256_dense64 ... dense128_dense256_dense128 ... dense128_dense256_dense192 ... dense128_dense256_dense256 ... dense192_dense64_dense64 ... dense192_dense64_dense128 ... dense192_dense64_dense192 ... dense192_dense64_dense256 ... dense192_dense128_dense64 ... dense192_dense128_dense128 ... dense192_dense128_dense192 ... dense192_dense128_dense256 ... dense192_dense192_dense64 ... dense192_dense192_dense128 ... dense192_dense192_dense192 ... dense192_dense192_dense256 ... dense192_dense256_dense64 ... dense192_dense256_dense128 ... dense192_dense256_dense192 ... dense192_dense256_dense256 ... dense256_dense64_dense64 ... dense256_dense64_dense128 ... dense256_dense64_dense192 ... dense256_dense64_dense256 ... dense256_dense128_dense64 ... dense256_dense128_dense128 ... dense256_dense128_dense192 ... dense256_dense128_dense256 ... dense256_dense192_dense64 ... dense256_dense192_dense128 ... dense256_dense192_dense192 ... dense256_dense192_dense256 ... dense256_dense256_dense64 ... dense256_dense256_dense128 ... dense256_dense256_dense192 ... dense256_dense256_dense256 ... " 901 | ] 902 | } 903 | ], 904 | "source": [ 905 | "optimization_results = optimize(\n", 906 | " models=models,\n", 907 | " X_train=X_train_scaled,\n", 908 | " y_train=y_train,\n", 909 | " X_test=X_test_scaled,\n", 910 | " y_test=y_test\n", 911 | ")" 912 | ] 913 | }, 914 | { 915 | "cell_type": "code", 916 | "execution_count": 22, 917 | "id": "3d266ebe-21c3-4227-a3f2-7c144384ce86", 918 | "metadata": {}, 919 | "outputs": [ 920 | { 921 | "data": { 922 | "text/html": [ 923 | "
\n", 924 | "\n", 937 | "\n", 938 | " \n", 939 | " \n", 940 | " \n", 941 | " \n", 942 | " \n", 943 | " \n", 944 | " \n", 945 | " \n", 946 | " \n", 947 | " \n", 948 | " \n", 949 | " \n", 950 | " \n", 951 | " \n", 952 | " \n", 953 | " \n", 954 | " \n", 955 | " \n", 956 | " \n", 957 | " \n", 958 | " \n", 959 | " \n", 960 | " \n", 961 | " \n", 962 | " \n", 963 | " \n", 964 | " \n", 965 | " \n", 966 | " \n", 967 | " \n", 968 | " \n", 969 | " \n", 970 | " \n", 971 | " \n", 972 | " \n", 973 | " \n", 974 | " \n", 975 | " \n", 976 | " \n", 977 | " \n", 978 | " \n", 979 | " \n", 980 | " \n", 981 | " \n", 982 | " \n", 983 | " \n", 984 | " \n", 985 | " \n", 986 | " \n", 987 | " \n", 988 | " \n", 989 | " \n", 990 | " \n", 991 | " \n", 992 | " \n", 993 | " \n", 994 | " \n", 995 | " \n", 996 | " \n", 997 | " \n", 998 | " \n", 999 | " \n", 1000 | " \n", 1001 | " \n", 1002 | " \n", 1003 | " \n", 1004 | " \n", 1005 | " \n", 1006 | " \n", 1007 | " \n", 1008 | " \n", 1009 | " \n", 1010 | " \n", 1011 | " \n", 1012 | " \n", 1013 | " \n", 1014 | " \n", 1015 | " \n", 1016 | " \n", 1017 | " \n", 1018 | " \n", 1019 | " \n", 1020 | " \n", 1021 | " \n", 1022 | " \n", 1023 | " \n", 1024 | " \n", 1025 | " \n", 1026 | " \n", 1027 | " \n", 1028 | " \n", 1029 | " \n", 1030 | " \n", 1031 | " \n", 1032 | " \n", 1033 | " \n", 1034 | " \n", 1035 | " \n", 1036 | " \n", 1037 | " \n", 1038 | "
model_nametest_accuracytest_precisiontest_recalltest_f1
0dense64_dense64_dense640.8105180.8493830.8483350.848859
51dense256_dense64_dense2560.8035580.8351380.8557340.845311
28dense128_dense256_dense640.8027840.8365620.8520350.844227
39dense192_dense128_dense2560.8027840.8333330.8569670.844985
46dense192_dense256_dense1920.8020110.8299640.8606660.845036
..................
38dense192_dense128_dense1920.7780360.8299750.8125770.821184
49dense256_dense64_dense1280.7764890.8389610.7965470.817204
6dense64_dense128_dense1920.7764890.8198530.8249080.822372
9dense64_dense192_dense1280.7703020.8244950.8051790.814722
55dense256_dense128_dense2560.7672080.8203520.8051790.812694
\n", 1039 | "

64 rows × 5 columns

\n", 1040 | "
" 1041 | ], 1042 | "text/plain": [ 1043 | " model_name test_accuracy test_precision test_recall \\\n", 1044 | "0 dense64_dense64_dense64 0.810518 0.849383 0.848335 \n", 1045 | "51 dense256_dense64_dense256 0.803558 0.835138 0.855734 \n", 1046 | "28 dense128_dense256_dense64 0.802784 0.836562 0.852035 \n", 1047 | "39 dense192_dense128_dense256 0.802784 0.833333 0.856967 \n", 1048 | "46 dense192_dense256_dense192 0.802011 0.829964 0.860666 \n", 1049 | ".. ... ... ... ... \n", 1050 | "38 dense192_dense128_dense192 0.778036 0.829975 0.812577 \n", 1051 | "49 dense256_dense64_dense128 0.776489 0.838961 0.796547 \n", 1052 | "6 dense64_dense128_dense192 0.776489 0.819853 0.824908 \n", 1053 | "9 dense64_dense192_dense128 0.770302 0.824495 0.805179 \n", 1054 | "55 dense256_dense128_dense256 0.767208 0.820352 0.805179 \n", 1055 | "\n", 1056 | " test_f1 \n", 1057 | "0 0.848859 \n", 1058 | "51 0.845311 \n", 1059 | "28 0.844227 \n", 1060 | "39 0.844985 \n", 1061 | "46 0.845036 \n", 1062 | ".. ... \n", 1063 | "38 0.821184 \n", 1064 | "49 0.817204 \n", 1065 | "6 0.822372 \n", 1066 | "9 0.814722 \n", 1067 | "55 0.812694 \n", 1068 | "\n", 1069 | "[64 rows x 5 columns]" 1070 | ] 1071 | }, 1072 | "execution_count": 22, 1073 | "metadata": {}, 1074 | "output_type": "execute_result" 1075 | } 1076 | ], 1077 | "source": [ 1078 | "optimization_results.sort_values(by='test_accuracy', ascending=False)" 1079 | ] 1080 | }, 1081 | { 1082 | "cell_type": "markdown", 1083 | "id": "fb132b54-13d5-4c25-8f4b-6feb6a873c55", 1084 | "metadata": {}, 1085 | "source": [ 1086 | "- And there you have it!" 1087 | ] 1088 | } 1089 | ], 1090 | "metadata": { 1091 | "kernelspec": { 1092 | "display_name": "Python 3.9.7 64-bit ('env_tensorflow': conda)", 1093 | "language": "python", 1094 | "name": "python397jvsc74a57bd02525e10832b7ba92743f6be5b80dfb53422b22d81bf31403f095ad9684056cbf" 1095 | }, 1096 | "language_info": { 1097 | "codemirror_mode": { 1098 | "name": "ipython", 1099 | "version": 3 1100 | }, 1101 | "file_extension": ".py", 1102 | "mimetype": "text/x-python", 1103 | "name": "python", 1104 | "nbconvert_exporter": "python", 1105 | "pygments_lexer": "ipython3", 1106 | "version": "3.9.7" 1107 | } 1108 | }, 1109 | "nbformat": 4, 1110 | "nbformat_minor": 5 1111 | } 1112 | -------------------------------------------------------------------------------- /007_Custom_Callbacks.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "b2e8fac7-5e9c-4096-a4ac-0b833f531ced", 6 | "metadata": {}, 7 | "source": [ 8 | "## Dataset import and exploration\n", 9 | "- https://www.kaggle.com/shelvigarg/wine-quality-dataset\n", 10 | "- Refer to https://github.com/better-data-science/TensorFlow/blob/main/003_TensorFlow_Classification.ipynb for detailed preparation instructions" 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": 1, 16 | "id": "da8d92c7-2972-4552-bf94-d01bb0e075b5", 17 | "metadata": {}, 18 | "outputs": [ 19 | { 20 | "data": { 21 | "text/html": [ 22 | "
\n", 23 | "\n", 36 | "\n", 37 | " \n", 38 | " \n", 39 | " \n", 40 | " \n", 41 | " \n", 42 | " \n", 43 | " \n", 44 | " \n", 45 | " \n", 46 | " \n", 47 | " \n", 48 | " \n", 49 | " \n", 50 | " \n", 51 | " \n", 52 | " \n", 53 | " \n", 54 | " \n", 55 | " \n", 56 | " \n", 57 | " \n", 58 | " \n", 59 | " \n", 60 | " \n", 61 | " \n", 62 | " \n", 63 | " \n", 64 | " \n", 65 | " \n", 66 | " \n", 67 | " \n", 68 | " \n", 69 | " \n", 70 | " \n", 71 | " \n", 72 | " \n", 73 | " \n", 74 | " \n", 75 | " \n", 76 | " \n", 77 | " \n", 78 | " \n", 79 | " \n", 80 | " \n", 81 | " \n", 82 | " \n", 83 | " \n", 84 | " \n", 85 | " \n", 86 | " \n", 87 | " \n", 88 | " \n", 89 | " \n", 90 | " \n", 91 | " \n", 92 | " \n", 93 | " \n", 94 | " \n", 95 | " \n", 96 | " \n", 97 | " \n", 98 | " \n", 99 | " \n", 100 | " \n", 101 | " \n", 102 | " \n", 103 | " \n", 104 | " \n", 105 | " \n", 106 | " \n", 107 | " \n", 108 | " \n", 109 | " \n", 110 | " \n", 111 | " \n", 112 | " \n", 113 | " \n", 114 | " \n", 115 | " \n", 116 | " \n", 117 | " \n", 118 | " \n", 119 | " \n", 120 | " \n", 121 | " \n", 122 | " \n", 123 | " \n", 124 | " \n", 125 | " \n", 126 | " \n", 127 | " \n", 128 | " \n", 129 | " \n", 130 | " \n", 131 | " \n", 132 | " \n", 133 | " \n", 134 | " \n", 135 | " \n", 136 | " \n", 137 | "
typefixed acidityvolatile aciditycitric acidresidual sugarchloridesfree sulfur dioxidetotal sulfur dioxidedensitypHsulphatesalcoholquality
3015white7.20.3200.308.250.02014.0104.00.993622.990.4411.46
3406white5.60.3200.337.400.03725.095.00.992683.250.4911.16
3966white5.40.1850.197.100.04836.0110.00.994383.260.419.56
946white8.20.3451.0018.200.04755.0205.00.999652.960.439.65
256white6.30.3500.305.700.0358.097.00.992703.270.4111.07
\n", 138 | "
" 139 | ], 140 | "text/plain": [ 141 | " type fixed acidity volatile acidity citric acid residual sugar \\\n", 142 | "3015 white 7.2 0.320 0.30 8.25 \n", 143 | "3406 white 5.6 0.320 0.33 7.40 \n", 144 | "3966 white 5.4 0.185 0.19 7.10 \n", 145 | "946 white 8.2 0.345 1.00 18.20 \n", 146 | "256 white 6.3 0.350 0.30 5.70 \n", 147 | "\n", 148 | " chlorides free sulfur dioxide total sulfur dioxide density pH \\\n", 149 | "3015 0.020 14.0 104.0 0.99362 2.99 \n", 150 | "3406 0.037 25.0 95.0 0.99268 3.25 \n", 151 | "3966 0.048 36.0 110.0 0.99438 3.26 \n", 152 | "946 0.047 55.0 205.0 0.99965 2.96 \n", 153 | "256 0.035 8.0 97.0 0.99270 3.27 \n", 154 | "\n", 155 | " sulphates alcohol quality \n", 156 | "3015 0.44 11.4 6 \n", 157 | "3406 0.49 11.1 6 \n", 158 | "3966 0.41 9.5 6 \n", 159 | "946 0.43 9.6 5 \n", 160 | "256 0.41 11.0 7 " 161 | ] 162 | }, 163 | "execution_count": 1, 164 | "metadata": {}, 165 | "output_type": "execute_result" 166 | } 167 | ], 168 | "source": [ 169 | "import os\n", 170 | "import numpy as np\n", 171 | "import pandas as pd\n", 172 | "import warnings\n", 173 | "from datetime import datetime\n", 174 | "import matplotlib.pyplot as plt\n", 175 | "plt.rcParams['figure.figsize'] = (24, 6)\n", 176 | "plt.rcParams['axes.spines.top'] = False\n", 177 | "plt.rcParams['axes.spines.right'] = False\n", 178 | "\n", 179 | "os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2' \n", 180 | "warnings.filterwarnings('ignore')\n", 181 | "\n", 182 | "df = pd.read_csv('data/winequalityN.csv')\n", 183 | "df.sample(5)" 184 | ] 185 | }, 186 | { 187 | "cell_type": "code", 188 | "execution_count": 2, 189 | "id": "a529677d-9ec9-42a1-b872-8f95602af3a9", 190 | "metadata": {}, 191 | "outputs": [], 192 | "source": [ 193 | "from sklearn.model_selection import train_test_split\n", 194 | "from sklearn.preprocessing import StandardScaler\n", 195 | "\n", 196 | "\n", 197 | "# Prepare the data\n", 198 | "df = df.dropna()\n", 199 | "df['is_white_wine'] = [1 if typ == 'white' else 0 for typ in df['type']]\n", 200 | "df['is_good_wine'] = [1 if quality >= 6 else 0 for quality in df['quality']]\n", 201 | "df.drop(['type', 'quality'], axis=1, inplace=True)\n", 202 | "\n", 203 | "# Train/test split\n", 204 | "X = df.drop('is_good_wine', axis=1)\n", 205 | "y = df['is_good_wine']\n", 206 | "X_train, X_test, y_train, y_test = train_test_split(\n", 207 | " X, y, \n", 208 | " test_size=0.2, random_state=42\n", 209 | ")\n", 210 | "\n", 211 | "# Scaling\n", 212 | "scaler = StandardScaler()\n", 213 | "X_train_scaled = scaler.fit_transform(X_train)\n", 214 | "X_test_scaled = scaler.transform(X_test)" 215 | ] 216 | }, 217 | { 218 | "cell_type": "markdown", 219 | "id": "d6c6b768-2339-4612-bc1f-e44fdb68fe6e", 220 | "metadata": {}, 221 | "source": [ 222 | "
\n", 223 | "\n", 224 | "## Modelling\n", 225 | "- Let's declare a function that builds and trains the model\n", 226 | "- We're doing this because we'll train the exact same model multiple times" 227 | ] 228 | }, 229 | { 230 | "cell_type": "code", 231 | "execution_count": 3, 232 | "id": "2b0cae3b-3c4f-497e-8e22-c425c5dc8d0c", 233 | "metadata": {}, 234 | "outputs": [ 235 | { 236 | "name": "stdout", 237 | "output_type": "stream", 238 | "text": [ 239 | "Init Plugin\n", 240 | "Init Graph Optimizer\n", 241 | "Init Kernel\n" 242 | ] 243 | } 244 | ], 245 | "source": [ 246 | "import tensorflow as tf\n", 247 | "tf.random.set_seed(42)" 248 | ] 249 | }, 250 | { 251 | "cell_type": "code", 252 | "execution_count": 7, 253 | "id": "f7d11393-3eae-437d-9e90-67ed11274894", 254 | "metadata": {}, 255 | "outputs": [], 256 | "source": [ 257 | "def build_and_train(callbacks: list, num_epochs: int = 5) -> tf.keras.Sequential:\n", 258 | " model = tf.keras.Sequential([\n", 259 | " tf.keras.layers.Dense(64, activation='relu'),\n", 260 | " tf.keras.layers.Dense(64, activation='relu'),\n", 261 | " tf.keras.layers.Dense(64, activation='relu'),\n", 262 | " tf.keras.layers.Dense(1, activation='sigmoid')\n", 263 | " ])\n", 264 | "\n", 265 | " model.compile(\n", 266 | " loss=tf.keras.losses.binary_crossentropy,\n", 267 | " optimizer=tf.keras.optimizers.Adam(),\n", 268 | " metrics=[tf.keras.metrics.BinaryAccuracy(name='accuracy')]\n", 269 | " )\n", 270 | "\n", 271 | " model.fit(\n", 272 | " X_train_scaled, \n", 273 | " y_train, \n", 274 | " epochs=num_epochs,\n", 275 | " validation_data=(X_test_scaled, y_test),\n", 276 | " callbacks=callbacks,\n", 277 | " verbose=0\n", 278 | " )\n", 279 | " \n", 280 | " return model" 281 | ] 282 | }, 283 | { 284 | "cell_type": "markdown", 285 | "id": "24e6cc25-c6df-43d1-b6a6-6602dad60a01", 286 | "metadata": {}, 287 | "source": [ 288 | "
\n", 289 | "\n", 290 | "## Basic custom callback\n", 291 | "- We'll define what happens on:\n", 292 | " - **Train begin** - we'll just print the time at which the training started\n", 293 | " - **Train end** - we'll print the time at which the training finsihed, how much time did the training last, and evaluation metrics (accuracy, precision, recall, f1) on the test set" 294 | ] 295 | }, 296 | { 297 | "cell_type": "code", 298 | "execution_count": 8, 299 | "id": "80448e44-45c0-4ee9-8dc1-94e69602f128", 300 | "metadata": {}, 301 | "outputs": [], 302 | "source": [ 303 | "class MyCallback(tf.keras.callbacks.Callback):\n", 304 | " def __init__(self):\n", 305 | " self.time_started = None\n", 306 | " self.time_finished = None\n", 307 | " \n", 308 | " def on_train_begin(self, logs=None):\n", 309 | " self.time_started = datetime.now()\n", 310 | " print(f'TRAINING STARTED | {self.time_started}\\n')\n", 311 | " \n", 312 | " def on_train_end(self, logs=None):\n", 313 | " self.time_finished = datetime.now()\n", 314 | " train_duration = str(self.time_finished - self.time_started)\n", 315 | " print(f'\\nTRAINING FINISHED | {self.time_finished} | Duration: {train_duration}')\n", 316 | " \n", 317 | " tl = f\"Training loss: {logs['loss']:.5f}\"\n", 318 | " ta = f\"Training accuracy: {logs['accuracy']:.5f}\"\n", 319 | " vl = f\"Validation loss: {logs['val_loss']:.5f}\"\n", 320 | " va = f\"Validation accuracy: {logs['val_accuracy']:.5f}\"\n", 321 | " \n", 322 | " print('\\n'.join([tl, vl, ta, va]))" 323 | ] 324 | }, 325 | { 326 | "cell_type": "markdown", 327 | "id": "efe2a6b8-c7f9-46c7-b32d-18b050292ff5", 328 | "metadata": {}, 329 | "source": [ 330 | "- Pass in the callback like this:" 331 | ] 332 | }, 333 | { 334 | "cell_type": "code", 335 | "execution_count": 9, 336 | "id": "31343da9-95de-4e23-b83f-e6d1bb9d396c", 337 | "metadata": {}, 338 | "outputs": [ 339 | { 340 | "name": "stdout", 341 | "output_type": "stream", 342 | "text": [ 343 | "TRAINING STARTED | 2021-10-29 15:49:21.494512\n", 344 | "\n", 345 | "\n", 346 | "TRAINING FINISHED | 2021-10-29 15:49:26.210987 | Duration: 0:00:04.716475\n", 347 | "Training loss: 0.46859\n", 348 | "Validation loss: 0.46578\n", 349 | "Training accuracy: 0.77988\n", 350 | "Validation accuracy: 0.77726\n" 351 | ] 352 | } 353 | ], 354 | "source": [ 355 | "model = build_and_train(\n", 356 | " callbacks=[MyCallback()]\n", 357 | ")" 358 | ] 359 | }, 360 | { 361 | "cell_type": "markdown", 362 | "id": "634ca4bb-cae9-4e41-af2c-a8218369d67b", 363 | "metadata": {}, 364 | "source": [ 365 | "
\n", 366 | "\n", 367 | "## Extending the callback functionality\n", 368 | "- We'll also modify the behavior for a single epoch:\n", 369 | " - **Epoch begin** - just save the time to the constructor\n", 370 | " - **Epoch end** - Calculate epoch duration and keep track of the training and validation metrics. We'll print them in a somewhat of a visually apealing way" 371 | ] 372 | }, 373 | { 374 | "cell_type": "code", 375 | "execution_count": 10, 376 | "id": "e9f8cf2b-353e-4f7b-8a77-f0c31e2b439d", 377 | "metadata": {}, 378 | "outputs": [], 379 | "source": [ 380 | "class MyCallback(tf.keras.callbacks.Callback):\n", 381 | " def __init__(self):\n", 382 | " self.time_started = None\n", 383 | " self.time_finished = None\n", 384 | " self.time_curr_epoch = None\n", 385 | " \n", 386 | " def on_train_begin(self, logs=None):\n", 387 | " self.time_started = datetime.now()\n", 388 | " print(f'TRAINING STARTED | {self.time_started}\\n')\n", 389 | " \n", 390 | " def on_train_end(self, logs=None):\n", 391 | " self.time_finished = datetime.now()\n", 392 | " train_duration = str(self.time_finished - self.time_started)\n", 393 | " print(f'\\nTRAINING FINISHED | {self.time_finished} | Duration: {train_duration}')\n", 394 | " \n", 395 | " tl = f\"Training loss: {logs['loss']:.5f}\"\n", 396 | " ta = f\"Training accuracy: {logs['accuracy']:.5f}\"\n", 397 | " vl = f\"Validation loss: {logs['val_loss']:.5f}\"\n", 398 | " va = f\"Validation accuracy: {logs['val_accuracy']:.5f}\"\n", 399 | " \n", 400 | " print('\\n'.join([tl, vl, ta, va]))\n", 401 | " \n", 402 | " def on_epoch_begin(self, epoch, logs=None):\n", 403 | " self.time_curr_epoch = datetime.now()\n", 404 | " \n", 405 | " def on_epoch_end(self, epoch, logs=None):\n", 406 | " epoch_dur = (datetime.now() - self.time_curr_epoch).total_seconds()\n", 407 | " tl = logs['loss']\n", 408 | " ta = logs['accuracy']\n", 409 | " vl = logs['val_loss']\n", 410 | " va = logs['val_accuracy']\n", 411 | " \n", 412 | " train_metrics = f\"train_loss: {tl:.5f}, train_accuracy: {ta:.5f}\"\n", 413 | " valid_metrics = f\"valid_loss: {vl:.5f}, valid_accuracy: {va:.5f}\"\n", 414 | " \n", 415 | " print(f\"Epoch: {epoch:4} | Runtime: {epoch_dur:.3f}s | {train_metrics} | {valid_metrics}\")" 416 | ] 417 | }, 418 | { 419 | "cell_type": "code", 420 | "execution_count": 11, 421 | "id": "d9461421-34db-4962-bb81-8fd455003d62", 422 | "metadata": {}, 423 | "outputs": [ 424 | { 425 | "name": "stdout", 426 | "output_type": "stream", 427 | "text": [ 428 | "TRAINING STARTED | 2021-10-29 15:49:34.048506\n", 429 | "\n", 430 | "Epoch: 0 | Runtime: 1.187s | train_loss: 0.53959, train_accuracy: 0.73095 | valid_loss: 0.49353, valid_accuracy: 0.75793\n", 431 | "Epoch: 1 | Runtime: 0.898s | train_loss: 0.49826, train_accuracy: 0.76035 | valid_loss: 0.50824, valid_accuracy: 0.74865\n", 432 | "Epoch: 2 | Runtime: 0.888s | train_loss: 0.48819, train_accuracy: 0.76402 | valid_loss: 0.47304, valid_accuracy: 0.76643\n", 433 | "Epoch: 3 | Runtime: 0.874s | train_loss: 0.47488, train_accuracy: 0.77350 | valid_loss: 0.47708, valid_accuracy: 0.75870\n", 434 | "Epoch: 4 | Runtime: 0.879s | train_loss: 0.46941, train_accuracy: 0.78085 | valid_loss: 0.47244, valid_accuracy: 0.76025\n", 435 | "\n", 436 | "TRAINING FINISHED | 2021-10-29 15:49:38.785216 | Duration: 0:00:04.736710\n", 437 | "Training loss: 0.46941\n", 438 | "Validation loss: 0.47244\n", 439 | "Training accuracy: 0.78085\n", 440 | "Validation accuracy: 0.76025\n" 441 | ] 442 | } 443 | ], 444 | "source": [ 445 | "model = build_and_train(\n", 446 | " callbacks=[MyCallback()]\n", 447 | ")" 448 | ] 449 | }, 450 | { 451 | "cell_type": "markdown", 452 | "id": "a0ac5e00-2e85-4d4d-bc5d-75a73caaf4e2", 453 | "metadata": {}, 454 | "source": [ 455 | "
\n", 456 | "\n", 457 | "## Tweaking the functionality even further\n", 458 | "- We'll declare a function that plots training loss vs. validation loss and training accuracy vs. validation accuracy (`_plot_model_performance()``\n", 459 | "- We'll plot ot on training end" 460 | ] 461 | }, 462 | { 463 | "cell_type": "code", 464 | "execution_count": 12, 465 | "id": "661e0cfe-7419-4e39-a513-399851a2e7f5", 466 | "metadata": {}, 467 | "outputs": [], 468 | "source": [ 469 | "class MyCallback(tf.keras.callbacks.Callback):\n", 470 | " def __init__(self):\n", 471 | " self.time_started = None\n", 472 | " self.time_finished = None\n", 473 | " self.time_curr_epoch = None\n", 474 | " self.num_epochs = 0\n", 475 | " self._loss, self._acc, self._val_loss, self._val_acc = [], [], [], []\n", 476 | " \n", 477 | " def _plot_model_performance(self):\n", 478 | " fig, (ax1, ax2) = plt.subplots(1, 2)\n", 479 | " fig.suptitle('Model performance', size=20)\n", 480 | " \n", 481 | " ax1.plot(range(self.num_epochs), self._loss, label='Training loss')\n", 482 | " ax1.plot(range(self.num_epochs), self._val_loss, label='Validation loss')\n", 483 | " ax1.set_xlabel('Epoch', size=14)\n", 484 | " ax1.set_ylabel('Loss', size=14)\n", 485 | " ax1.legend()\n", 486 | " \n", 487 | " ax2.plot(range(self.num_epochs), self._acc, label='Training accuracy')\n", 488 | " ax2.plot(range(self.num_epochs), self._val_acc, label='Validation Accuracy')\n", 489 | " ax2.set_xlabel('Epoch', size=14)\n", 490 | " ax2.set_ylabel('Accuracy', size=14)\n", 491 | " ax2.legend()\n", 492 | " \n", 493 | " def on_train_begin(self, logs=None):\n", 494 | " self.time_started = datetime.now()\n", 495 | " print(f'TRAINING STARTED | {self.time_started}\\n')\n", 496 | " \n", 497 | " def on_train_end(self, logs=None):\n", 498 | " self.time_finished = datetime.now()\n", 499 | " train_duration = str(self.time_finished - self.time_started)\n", 500 | " print(f'\\nTRAINING FINISHED | {self.time_finished} | Duration: {train_duration}')\n", 501 | " \n", 502 | " tl = f\"Training loss: {logs['loss']:.5f}\"\n", 503 | " ta = f\"Training accuracy: {logs['accuracy']:.5f}\"\n", 504 | " vl = f\"Validation loss: {logs['val_loss']:.5f}\"\n", 505 | " va = f\"Validation accuracy: {logs['val_accuracy']:.5f}\"\n", 506 | " \n", 507 | " print('\\n'.join([tl, vl, ta, va]))\n", 508 | " self._plot_model_performance()\n", 509 | " \n", 510 | " def on_epoch_begin(self, epoch, logs=None):\n", 511 | " self.time_curr_epoch = datetime.now()\n", 512 | " \n", 513 | " def on_epoch_end(self, epoch, logs=None):\n", 514 | " self.num_epochs += 1\n", 515 | " epoch_dur = (datetime.now() - self.time_curr_epoch).total_seconds()\n", 516 | " tl = logs['loss']\n", 517 | " ta = logs['accuracy']\n", 518 | " vl = logs['val_loss']\n", 519 | " va = logs['val_accuracy']\n", 520 | " \n", 521 | " self._loss.append(tl); self._acc.append(ta); self._val_loss.append(vl); self._val_acc.append(va)\n", 522 | " \n", 523 | " train_metrics = f\"train_loss: {tl:.5f}, train_accuracy: {ta:.5f}\"\n", 524 | " valid_metrics = f\"valid_loss: {vl:.5f}, valid_accuracy: {va:.5f}\"\n", 525 | " \n", 526 | " print(f\"Epoch: {epoch:4} | Runtime: {epoch_dur:.3f}s | {train_metrics} | {valid_metrics}\")" 527 | ] 528 | }, 529 | { 530 | "cell_type": "code", 531 | "execution_count": 15, 532 | "id": "3a2df21c-3ba6-4727-bd26-339e7df2b30e", 533 | "metadata": {}, 534 | "outputs": [ 535 | { 536 | "name": "stdout", 537 | "output_type": "stream", 538 | "text": [ 539 | "TRAINING STARTED | 2021-10-29 15:50:07.255394\n", 540 | "\n", 541 | "Epoch: 0 | Runtime: 1.156s | train_loss: 0.54984, train_accuracy: 0.71412 | valid_loss: 0.49403, valid_accuracy: 0.75483\n", 542 | "Epoch: 1 | Runtime: 0.863s | train_loss: 0.49869, train_accuracy: 0.75706 | valid_loss: 0.49263, valid_accuracy: 0.74710\n", 543 | "Epoch: 2 | Runtime: 0.867s | train_loss: 0.48524, train_accuracy: 0.77234 | valid_loss: 0.46720, valid_accuracy: 0.77185\n", 544 | "Epoch: 3 | Runtime: 0.865s | train_loss: 0.47193, train_accuracy: 0.77776 | valid_loss: 0.47483, valid_accuracy: 0.75638\n", 545 | "Epoch: 4 | Runtime: 0.877s | train_loss: 0.46571, train_accuracy: 0.78414 | valid_loss: 0.46983, valid_accuracy: 0.76875\n", 546 | "Epoch: 5 | Runtime: 0.873s | train_loss: 0.45538, train_accuracy: 0.78665 | valid_loss: 0.46185, valid_accuracy: 0.77108\n", 547 | "Epoch: 6 | Runtime: 0.869s | train_loss: 0.45035, train_accuracy: 0.79052 | valid_loss: 0.46432, valid_accuracy: 0.77340\n", 548 | "Epoch: 7 | Runtime: 0.866s | train_loss: 0.44260, train_accuracy: 0.79342 | valid_loss: 0.45941, valid_accuracy: 0.77494\n", 549 | "Epoch: 8 | Runtime: 0.878s | train_loss: 0.43329, train_accuracy: 0.80116 | valid_loss: 0.46422, valid_accuracy: 0.77262\n", 550 | "Epoch: 9 | Runtime: 0.923s | train_loss: 0.43049, train_accuracy: 0.80174 | valid_loss: 0.47019, valid_accuracy: 0.77494\n", 551 | "Epoch: 10 | Runtime: 0.867s | train_loss: 0.42353, train_accuracy: 0.80580 | valid_loss: 0.46703, valid_accuracy: 0.78113\n", 552 | "Epoch: 11 | Runtime: 0.860s | train_loss: 0.41527, train_accuracy: 0.81296 | valid_loss: 0.47062, valid_accuracy: 0.77185\n", 553 | "Epoch: 12 | Runtime: 0.868s | train_loss: 0.41093, train_accuracy: 0.81122 | valid_loss: 0.48872, valid_accuracy: 0.77340\n", 554 | "Epoch: 13 | Runtime: 0.861s | train_loss: 0.40597, train_accuracy: 0.80928 | valid_loss: 0.46695, valid_accuracy: 0.77417\n", 555 | "Epoch: 14 | Runtime: 0.863s | train_loss: 0.39769, train_accuracy: 0.81857 | valid_loss: 0.48053, valid_accuracy: 0.77726\n", 556 | "Epoch: 15 | Runtime: 0.868s | train_loss: 0.39267, train_accuracy: 0.82186 | valid_loss: 0.47627, valid_accuracy: 0.77108\n", 557 | "Epoch: 16 | Runtime: 0.870s | train_loss: 0.38474, train_accuracy: 0.83037 | valid_loss: 0.48007, valid_accuracy: 0.77572\n", 558 | "Epoch: 17 | Runtime: 0.862s | train_loss: 0.38112, train_accuracy: 0.82708 | valid_loss: 0.47731, valid_accuracy: 0.78268\n", 559 | "Epoch: 18 | Runtime: 0.868s | train_loss: 0.37659, train_accuracy: 0.82592 | valid_loss: 0.48525, valid_accuracy: 0.77881\n", 560 | "Epoch: 19 | Runtime: 0.868s | train_loss: 0.36965, train_accuracy: 0.83656 | valid_loss: 0.48052, valid_accuracy: 0.77804\n", 561 | "Epoch: 20 | Runtime: 0.875s | train_loss: 0.36277, train_accuracy: 0.84217 | valid_loss: 0.48497, valid_accuracy: 0.78809\n", 562 | "Epoch: 21 | Runtime: 0.863s | train_loss: 0.36157, train_accuracy: 0.83752 | valid_loss: 0.48956, valid_accuracy: 0.78113\n", 563 | "Epoch: 22 | Runtime: 0.874s | train_loss: 0.35574, train_accuracy: 0.83849 | valid_loss: 0.48658, valid_accuracy: 0.77572\n", 564 | "Epoch: 23 | Runtime: 0.866s | train_loss: 0.34575, train_accuracy: 0.85203 | valid_loss: 0.49739, valid_accuracy: 0.78036\n", 565 | "Epoch: 24 | Runtime: 0.863s | train_loss: 0.34246, train_accuracy: 0.84874 | valid_loss: 0.48785, valid_accuracy: 0.78654\n", 566 | "Epoch: 25 | Runtime: 0.870s | train_loss: 0.33659, train_accuracy: 0.85029 | valid_loss: 0.51253, valid_accuracy: 0.78886\n", 567 | "Epoch: 26 | Runtime: 0.868s | train_loss: 0.32992, train_accuracy: 0.85300 | valid_loss: 0.52090, valid_accuracy: 0.77726\n", 568 | "Epoch: 27 | Runtime: 0.870s | train_loss: 0.32881, train_accuracy: 0.85513 | valid_loss: 0.53420, valid_accuracy: 0.77572\n", 569 | "Epoch: 28 | Runtime: 0.878s | train_loss: 0.32202, train_accuracy: 0.86402 | valid_loss: 0.53723, valid_accuracy: 0.77881\n", 570 | "Epoch: 29 | Runtime: 0.877s | train_loss: 0.31739, train_accuracy: 0.86035 | valid_loss: 0.51675, valid_accuracy: 0.79273\n", 571 | "Epoch: 30 | Runtime: 0.877s | train_loss: 0.31142, train_accuracy: 0.86325 | valid_loss: 0.52563, valid_accuracy: 0.78036\n", 572 | "Epoch: 31 | Runtime: 0.888s | train_loss: 0.30740, train_accuracy: 0.86809 | valid_loss: 0.53693, valid_accuracy: 0.79505\n", 573 | "Epoch: 32 | Runtime: 0.883s | train_loss: 0.30584, train_accuracy: 0.86151 | valid_loss: 0.52875, valid_accuracy: 0.77417\n", 574 | "Epoch: 33 | Runtime: 0.878s | train_loss: 0.30174, train_accuracy: 0.87137 | valid_loss: 0.52646, valid_accuracy: 0.78036\n", 575 | "Epoch: 34 | Runtime: 0.874s | train_loss: 0.29382, train_accuracy: 0.86983 | valid_loss: 0.54965, valid_accuracy: 0.77804\n", 576 | "Epoch: 35 | Runtime: 0.873s | train_loss: 0.29185, train_accuracy: 0.87350 | valid_loss: 0.52804, valid_accuracy: 0.78113\n", 577 | "Epoch: 36 | Runtime: 0.872s | train_loss: 0.28378, train_accuracy: 0.87872 | valid_loss: 0.53974, valid_accuracy: 0.78422\n", 578 | "Epoch: 37 | Runtime: 0.865s | train_loss: 0.27826, train_accuracy: 0.88066 | valid_loss: 0.53486, valid_accuracy: 0.78036\n", 579 | "Epoch: 38 | Runtime: 0.867s | train_loss: 0.27347, train_accuracy: 0.88607 | valid_loss: 0.57144, valid_accuracy: 0.77881\n", 580 | "Epoch: 39 | Runtime: 0.874s | train_loss: 0.27613, train_accuracy: 0.88395 | valid_loss: 0.55132, valid_accuracy: 0.79118\n", 581 | "Epoch: 40 | Runtime: 0.878s | train_loss: 0.26357, train_accuracy: 0.89033 | valid_loss: 0.55898, valid_accuracy: 0.78886\n", 582 | "Epoch: 41 | Runtime: 0.872s | train_loss: 0.26624, train_accuracy: 0.88298 | valid_loss: 0.55616, valid_accuracy: 0.77958\n", 583 | "Epoch: 42 | Runtime: 0.880s | train_loss: 0.26508, train_accuracy: 0.88839 | valid_loss: 0.57052, valid_accuracy: 0.78422\n", 584 | "Epoch: 43 | Runtime: 0.895s | train_loss: 0.25255, train_accuracy: 0.89246 | valid_loss: 0.58852, valid_accuracy: 0.78577\n", 585 | "Epoch: 44 | Runtime: 0.880s | train_loss: 0.24786, train_accuracy: 0.89845 | valid_loss: 0.58164, valid_accuracy: 0.79196\n", 586 | "Epoch: 45 | Runtime: 0.881s | train_loss: 0.24558, train_accuracy: 0.89807 | valid_loss: 0.57115, valid_accuracy: 0.78500\n", 587 | "Epoch: 46 | Runtime: 0.877s | train_loss: 0.23848, train_accuracy: 0.90000 | valid_loss: 0.58779, valid_accuracy: 0.77881\n", 588 | "Epoch: 47 | Runtime: 0.879s | train_loss: 0.23460, train_accuracy: 0.90058 | valid_loss: 0.59602, valid_accuracy: 0.78422\n", 589 | "Epoch: 48 | Runtime: 0.876s | train_loss: 0.23146, train_accuracy: 0.90812 | valid_loss: 0.59239, valid_accuracy: 0.78809\n", 590 | "Epoch: 49 | Runtime: 0.885s | train_loss: 0.22946, train_accuracy: 0.90967 | valid_loss: 0.60174, valid_accuracy: 0.78190\n", 591 | "\n", 592 | "TRAINING FINISHED | 2021-10-29 15:50:51.267975 | Duration: 0:00:44.012581\n", 593 | "Training loss: 0.22946\n", 594 | "Validation loss: 0.60174\n", 595 | "Training accuracy: 0.90967\n", 596 | "Validation accuracy: 0.78190\n" 597 | ] 598 | }, 599 | { 600 | "data": { 601 | "image/png": "iVBORw0KGgoAAAANSUhEUgAABXgAAAGiCAYAAABd4a5tAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjQuMywgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/MnkTPAAAACXBIWXMAAAsTAAALEwEAmpwYAADQSElEQVR4nOzdd3xUxfrH8c+kQCAJJSH03kEgAULoTVSqNAtgQUCxYr0WFO+Vn16vXstVsaAgKGJBUUFUBKVJ773XAKFDIAmE1J3fH2fBgAESSLIp3/frta9kz5kz85wNkOHZ2WeMtRYRERERERERERERyXu8PB2AiIiIiIiIiIiIiFwdJXhFRERERERERERE8igleEVERERERERERETyKCV4RURERERERERERPIoJXhFRERERERERERE8igleEVERERERERERETyKCV4RURERAowY0wHY4w1xoy8xn4GufsZlDWR5RxjzABjzBpjTJz7Ht71dEwiIiIiIhmlBK+IiIhIDnInEK0xxmWMqXGZdnPTtB2UgyEWKMaYlsBXQCAwGvg/YIZHgxIRERERyQQfTwcgIiIiUgCl4MzD7gVeuPikMaYW0D5NO8k+3QEDDLTWLvZ0MCIiIiIimaUVvCIiIiI57wiwEhhsjEkvgXsfTtLxlxyNqmAq7/560KNRiIiIiIhcJSV4RURERDxjLFAW6JH2oDHGF7gHWAxsutTFxphaxpgvjDEHjDFJxpiD7ue1LtG+jDFmnDHmiDHmrDFmrTHmnssFaIwJMsa8ZozZ4r4mxhgz2xhzU6bv9u99j3SXn+hgjLnHXQP3rDHmqDFmvDGm7LXGlLYusDGmizFmnrv9uWMWGOxuvidNSYyqafpoaoz5wR1XojFmrzHmI2NMuXTG+9x9fXVjzKPGmPXuGOdddL6aMWaYMWazMSbBGBNpjHnBGGPc7W4zxiw3xpxxj/uBMcYvnfF6G2O+NMZsd7c9bYxZZYx5zBjzt3l+mvGrGmMeMMZscI9/xBgzxhhT/BKveUVjzChjzA53+2h3fP+8RNsPjDG73a/XCWPMNGNMs/T6FhEREZFrp4/8iYiIiHjGN8D/cFbrTk1zvCdQBhgO1EzvQneybBZO3dhpwGagLnAn0MsY08lauzJN+2CchHF1YKH7UQ74GPj9EmNUAeYBVYEFOHVp/XES0jOMMQ9Ya8dm+q7/7kngJuBb9xhtcJKuHYwxza21x7IgpluBLsBvOPdcFViLU2+3NxAKvAeccrc/5R6vB/ADzmrq74G9QFPgIZzXubW1NjKd8d4D2gK/AtOB1IvOvwV0AH7Gef17Aq8ChYwx0cDrOH8mFgA3Ao8A3u5x03odcAHLgANAceB69/jNgLvTiQ3gDaBzmvE7AkNx/rxdn7ahMSYcmAkEAfOBH4GiQH1gJPBKmrZN3P0Fua/5ESiF8xovNMb0sdZOv0RMIiIiInKVlOAVERER8QBrbZwxZhIwyBhT0Vob5T41FIgFviP9+rwG+AIoBtxlrf0qzbl+wCTgS2NMfWuty33qNZzk7rvW2ifTtP8AWHKJECcAVYAB1tpJaa4pgZNkHWWMmWatPZLpm79QV6C5tXZNmjHeAZ7ASWDemwUxdQO6WWsv3jxtrXu1bijOaxOZps8A4HOc+XIHa+2CNOeec8c2Bic5fbEmQGNr7Z5L3HNToJG19oC7v5HATuAZIB5oaq3d4j5XGFgDDDHGvGStPZqmn+7W2l1pO3av3P0MGGiM+cBauyyd8VsADa21+9zX+ABzgI7GmAhr7XL38ULAZJyE7Z3W2q8vGqtSmu99cP7MBgAdrbV/pjlXHlgBjDPGVLXWJl7idRERERGRq6ASDSIiIiKeMxZnZeYQOL9C9UbgK2tt/CWuaYWzWndJ2uQugLX2W5zVuXVwVsKeK/lwJxCHs+IybfuVwAV9uK8Jxdnk7Ye0iVT3NaeAlwA/4JYM3+mlTUyb3HUbCcQAd7gTnNca00/pJHevpBcQDHybNrnr9jYQCdxojKmczrVvXCa5C/DKueQunI9/Gs7K2NHnkrvuc4k4q5sLAfXSdnJxctd9zIWzghecVbrpeflcctd9TQpOUhggIk27m3FWO0+7OLnrvm5/mqfdgRrA+2mTu+52B3FWDZcFOl0iJhERERG5SlrBKyIiIuIh1tplxpgNOKsz/41TrsELJ/F7KU3cX+dc4vwcnORuY5yP1NfFSRwusNbGpNN+Hk7N37Raur8Wd68uvViI+2u9dM5l1p8XH7DWxhhj1uIkdOvhlFO4lpiWX0Vcl3ydrbUpxpj5OMnPxsC+i5pcabyV6Rw7t8nbqnTOnUsGV0x70F164xmcFcrVccpVpFUhE+OfS9aWTHOshfvrb5foJ61zP58ql/j5nKsNXQ+nbIWIiIiIZBEleEVEREQ8aywwCqdG7GBgVTorWtM6txHWoUucP3e8xEXtL1VK4XA6x4LdX290Py4l4DLnMupKcZ2L/1piSu8erySzr3Nmxksv0Z6SgXO+5w64y1KsAKrhJJS/AKLdbUsAjwOFLzH+qcuM4Z3mWAn31wNc2bmfz21XaJcVf2ZEREREJA0leEVEREQ8ayLwX+ATnBWXL1+h/bkEYNlLnC93UbtzX8tcon16/Zy75nFr7agrxHOtrhTXxfdxNTHZTEeV+df5WsfLrPtwkrv/Z60dmfaEMaYlToL3Wp1yf73USuC0zr0Ovay107JgbBERERHJINXgFREREfEgd/3V73E+fn8G+OYKl5xb3dvhEufPHV/t/roVZ+OuMGNM8cu0T2up+2vbK8SSFdpffMAdZxiQAJyrR5uTMcFlXmf3hmJt3E9XX3w+h9R0f/0hnXN/e02v0rnXvGsm2ubUz0dERERE3JTgFREREfG8F4E+QGdrbdwV2i4CtgFtjDG3pj3hft4O2I6z2RrW2mScjdQCuWiTNWNMOM4GbBdwb762AOhrjBmSXhDGmIbGmNJXvLMru9sY0/iiYyNxSiR8495kLKdjApiKU/JggDGmxUXnnsCpeTsr7WZlOSzS/bVD2oPu1/L5LBrjZ/c4PY0xAy4+aYxJu7L3J2AX8Igxplt6nRljWhpjimZRbCIiIiLiphINIiIiIh7mThJmKFForbXGmHuAP4BvjTE/4azSrQP0BuKAgdZaV5rLXgA6AU+4k7oLcUoM9MPZ8KpnOkPdgbPB2DhjzGPAMpyP7FcEGgENcDbWOpqZe03Hb8AiY8x3OHVt27gfkcBwD8WEtfa0O5E8GfjTGDMZ52fUFLgJp87uA9c6zjX4AmeDtXeNMR2BHTgbmfUAfsT52V4Ta22SMeY24Hfga2PMAzgrdf1wNkvrhPv/E9baZGNMX2Am8KsxZjHO5njxQCWgGU5SvJz7mIiIiIhkESV4RURERPIYa+0yY0wznJW/NwA3A8dxyju8Yq3ddlH748aY1sB/3G3DcVYBP4R7hWY6Y0QZY5oCjwK34Kz09cZJbG4G3gc2ZMHtvANMwVkV2w84DXwOvGCtvSBRm4MxnRvvJ/fr9gLQGWdV8WHgY5zX+WBWjXUVsR00xrQFXsdJiHfGSfQ/DMwiCxK87nFWGmPCcJLtXYFWOG8i7AReuqjtemNMKPAUTqJ5MODCSdyvcbc/nhVxiYiIiMhfjLU5sQeEiIiIiMhfjDEjcRJ+Ha218zwbjYiIiIhI3qUavCIiIiIiIiIiIiJ5lBK8IiIiIiIiIiIiInmUErwiIiIiIiIiIiIieZRq8IqIiIiIiIiIiIjkUVrBKyIiIiIiIiIiIpJHKcErIiIiIiIiIiIikkcpwSsiIiIiIiIiIiKSRynBKyIiIiIiIiIiIpJHKcErIiIiIiIiIiIikkcpwSsiIiIiIiIiIiKSRynBKyIiIiIiIiIiIpJHKcErIiIiIiIiIiIikkcpwSsiIiIiIiIiIiKSRynBKyIiIiIiIiIiIpJHKcErIiIiIiIiIiIikkcpwSsiIiIiIiIiIiKSRynBKyIiIiIiIiIiIpJHKcErIiIiIiIiIiIikkcpwSsiIiIiIiIiIiKSRynBKyIiIiIiIiIiIpJHKcErIiIiIiIiIiIikkcpwSsiIiIiIiIiIiKSRynBKyIiIiIiIiIiIpJHKcErIiIiIiIiIiIikkcpwSsiIiIiIiIiIiKSRynBKyIiIiIiIiIiIpJHKcErIiIiIiIiIiIikkf5eDqA7NSlSxc7Y8YMT4chIiIiUpAYTwcg2U/zbBERERGPSHeuna9X8B4/ftzTIYiIiIiI5DuaZ4uIiIjkHvk6wSsiIiIiIiIiIiKSnynBKyIiIiIiIiIiIpJHKcErIiIiIiIiIiIikkfl603W0pOcnExUVBQJCQmeDkWuwM/Pj4oVK+Lr6+vpUERERETkCjTPlvRoTi8iIpL9ClyCNyoqisDAQKpWrYox2uQ5t7LWcuLECaKioqhWrZqnwxERERGRK9A8Wy6mOb2IiEjOKHAlGhISEggODtakM5czxhAcHKwVICIiIiJ5hObZcjHN6UVERHJGgUvwApp05hH6OYmIiIjkLZq/ycX0Z0JERCT7FcgEr6ecOHGCsLAwwsLCKFu2LBUqVDj/PCkp6bLXrly5kscee+yKY7Rq1SpLYp03bx49evTIkr5ERERERLJbXppri4iIiGSlAleD15OCg4NZu3YtACNHjiQgIICnn376/PmUlBR8fNL/kYSHhxMeHn7FMRYvXpwlsYqIiIiI5CWaa6cvNTUVb29vT4chIiIi2UgreD1s0KBBPPXUU3Ts2JHnnnuO5cuX06pVKxo3bkyrVq3Ytm0bcOGK2pEjRzJkyBA6dOhA9erVGTVq1Pn+AgICzrfv0KEDt956K3Xr1uXOO+/EWgvA9OnTqVu3Lm3atOGxxx674krd6OhoevfuTaNGjWjRogXr168H4M8//zy/KqJx48bExcVx6NAh2rVrR1hYGA0aNGDBggVZ/pqJiIiIiGREbp1rR0ZG0rZtW5o0aUKTJk0uSBy/8cYbNGzYkNDQUIYPHw7Azp07ueGGGwgNDaVJkybs2rXrb5+4GzZsGJ9//jkAVatW5eWXX6ZNmzZMnjyZsWPH0qxZM0JDQ7nllluIj48H4MiRI/Tp04fQ0FBCQ0NZvHgx//znP3nvvffO9ztixIgLXgMRERHJfQr0Ct7/+3kTmw/GZmmf9csX46Wbr8vUNdu3b2fWrFl4e3sTGxvL/Pnz8fHxYdasWbzwwgv88MMPf7tm69atzJ07l7i4OOrUqcNDDz2Er6/vBW3WrFnDpk2bKF++PK1bt2bRokWEh4fzwAMPMH/+fKpVq8aAAQOuGN9LL71E48aNmTp1KnPmzGHgwIGsXbuWt956iw8//JDWrVtz+vRp/Pz8GDNmDJ07d2bEiBGkpqaenzyKiIiISMGRW+bZkDvn2qVLl+aPP/7Az8+PHTt2MGDAAFauXMlvv/3G1KlTWbZsGUWLFiU6OhqAO++8k+HDh9OnTx8SEhJwuVzs37//svft5+fHwoULAad8xdChQwF48cUXGTduHI8++iiPPfYY7du3Z8qUKaSmpnL69GnKly9P3759efzxx3G5XEyaNInly5dn+nUXERGRnJOjCV5jTBfgPcAb+NRa+3o6bToA7wK+wHFrbfuMXptX3Xbbbec/NhUTE8M999zDjh07MMaQnJyc7jXdu3encOHCFC5cmNKlS3PkyBEqVqx4QZuIiIjzx8LCwoiMjCQgIIDq1atTrVo1AAYMGMCYMWMuG9/ChQvPT3yvv/56Tpw4QUxMDK1bt+app57izjvvpG/fvlSsWJFmzZoxZMgQkpOT6d27N2FhYdfy0oiIiMi1sBa2TIOaN0Khop6ORsQjcuNcOzk5mWHDhrF27Vq8vb3Zvn07ALNmzWLw4MEULer8fQ0KCiIuLo4DBw7Qp08fwEncZkS/fv3Of79x40ZefPFFTp06xenTp+ncuTMAc+bM4YsvvgDA29ub4sWLU7x4cYKDg1mzZg1HjhyhcePGBAcHZ2hMERGRgiIl1cWOo6eJPpNE65qlPB1OziV4jTHewIfAjUAUsMIYM81auzlNmxLAR0AXa+0+Y0zpjF57Na5mBUB28Pf3P//9P//5Tzp27MiUKVOIjIykQ4cO6V5TuHDh8997e3uTkpKSoTbnPjqWGeldY4xh+PDhdO/enenTp9OiRQtmzZpFu3btmD9/Pr/++it33303zzzzDAMHDsz0mCIiInKN4g7DL0/CtunQ+T/Q8hFPRyQFSG6ZZ0PunGu/8847lClThnXr1uFyuc4nba21GGMuaHupPn18fHC5XOefJyQkXHA+7X0PGjSIqVOnEhoayueff868efMuG999993H559/zuHDhxkyZEiG7klERCS/stay90Q866JOsT4qhvVRp9h4IJazyalUKFGERcOv93SIOVqDNwLYaa3dba1NAiYBvS5qcwfwo7V2H4C19mgmrs0XYmJiqFChAsD5GlpZqW7duuzevZvIyEgAvv322yte065dO7766ivAqTdWqlQpihUrxq5du2jYsCHPPfcc4eHhbN26lb1791K6dGmGDh3Kvffey+rVq7P8HkREROQyrIW1X8OHEbBrDtz0b2j+oKejEskVcstcOyYmhnLlyuHl5cXEiRNJTU0F4KabbmL8+PHny5xFR0dTrFgxKlasyNSpUwFITEwkPj6eKlWqsHnzZhITE4mJiWH27NmXjCsuLo5y5cqRnJx8fl4P0KlTJ0aPHg04m7HFxjplNfr06cOMGTNYsWLF+dW+IiIiBcWR2AR+33SYt2Zu4+5xywh7+Q86vDWPxyet5cule0l1WfpHVOLdfmFMvDfC0+ECOVuioQKQtlBUFND8oja1AV9jzDwgEHjPWvtFBq8FwBhzP3A/QOXKlbMk8Jz07LPPcs899/C///2P66/P+ncAihQpwkcffUSXLl0oVaoUERFX/oM4cuRIBg8eTKNGjShatCgTJkwA4N1332Xu3Ll4e3tTv359unbtyqRJk3jzzTfx9fUlICDg/Ee+REREJAfERMHPT8DOP6ByS+j5AZSq6emoRHKN3DLXfvjhh7nllluYPHkyHTt2PL/atkuXLqxdu5bw8HAKFSpEt27d+M9//sPEiRN54IEH+Ne//oWvry+TJ0+mevXq3H777TRq1IhatWrRuHHjS8b1yiuv0Lx5c6pUqULDhg2Ji4sD4L333uP+++9n3LhxeHt7M3r0aFq2bEmhQoXo2LEjJUqUOF/eQkREJL87GpvAkAkr2HjAecPT28tQp0wg3RqWpVHFEjSqWJzaZQLx9c7J9bIZY67mI/tXNZAxtwGdrbX3uZ/fDURYax9N0+YDIBzoBBQBlgDdgdArXZue8PBwu3LlyguObdmyhXr16mXZfeVFp0+fJiAgAGstjzzyCLVq1eLJJ5/0dFjp0s9LREQkA6yF1RNg5otgU+GGkdBsKHh5ZPJprtxE8jrNsy8tL821L8XlctGkSRMmT55MrVq1rrk//dkQEZHc7lhcIv3HLOFQTAJP3lCbJlVKUL9ccYoUynVvdKY7187JFbxRQKU0zysCB9Npc9xaewY4Y4yZj5Pczci1kkFjx45lwoQJJCUl0bhxYx544AFPhyQiIlJwJcbB4vfh1H6o0gqqtYOSVTJ+/cm98PNjsHseVG0LPd+HoGrZFq6IXF5en2tv3ryZHj160KdPnyxJ7oqIiOR2x08ncsfYpRw8lcDng5vRvHre21w0J1fw+gDbcVbnHgBWAHdYazelaVMP+ADoDBQClgP9ga1XujY9WlmQ9+nnJSIi+ZbLBesnwayRcPoIFCkJZ08650pUdhK9VdtBtbZQrHz6168cB3+8BMbAjS9D08GeWrWbllbwFgCaZ0tm6M+GiIjkVidOJ3LH2GXsjT7DZ4MiaFkj1yd3PbuC11qbYowZBswEvIHx1tpNxpgH3ec/ttZuMcbMANYDLuBTa+1GgPSuzanYRURERLJU1Er47Vk4sAoqNIX+Xztfj26ByAWwZz5s+QXWfOm0D67prM6t1s75mhgL0x6DvQuhxvVw8ygoUenyY4qIiIiIyHknzyRx56fLiDxxhvGDmuWF5O4l5WSJBqy104HpFx37+KLnbwJvZuRaERERkTwl9pCzYnf9JAgoC30+gYa3/7Xqtkx959H8AXClwpGNTrJ3zwLY8D2s+sxp5+UDvv7OJmqN73JW8IqIiIiISIacineSu7uPn+HTgeG0rlnK0yFdkxxN8IqIiIgUSMkJsPRDmP82uJKhzVPQ9ikoHHjpa7y8oVyo82j1KKSmwKG1TsL3zDHnWHqlG0RERERE5JJi4pO5a9wydh49zZiBTWlXO8TTIV0zJXhFREREsou1sPVX+H0EnIyEuj3gplcgqHrm+/L2gYrhzkNERERERDIt5mwyd49fxvbDp/nk7qZ0qFPa0yFlCY/vwlHQdOjQgZkzZ15w7N133+Xhhx++7DXnNrHo1q0bp06d+lubkSNH8tZbb1127KlTp7J58+bzz//1r38xa9asTESfvnnz5tGjR49r7kdEROSauVwwc4R747Kjno3l6FaY2Bu+vRN8isDdU6H/V1eX3BWRK8qP8+xzHn/8cSpUqIDL5cqyPkVERAqa2IRkBo5fzpZDsYy+qwkd6+aP5C4owZvjBgwYwKRJky44NmnSJAYMGJCh66dPn06JEiWuauyLJ54vv/wyN9xww1X1JSIikisteBuWfAAL34F3G8Jvz0HMgZyPI2oljGkPB9dC1zfhwYVQo2POxyFSgOTXebbL5WLKlClUqlSJ+fPnZ0mf6UlNTc22vkVERDwtLiGZe8YvZ9OBGD68owmd6pXxdEhZSgneHHbrrbfyyy+/kJiYCEBkZCQHDx6kTZs2PPTQQ4SHh3Pdddfx0ksvpXt91apVOX78OACvvvoqderU4YYbbmDbtm3n24wdO5ZmzZoRGhrKLbfcQnx8PIsXL2batGk888wzhIWFsWvXLgYNGsT3338PwOzZs2ncuDENGzZkyJAh5+OrWrUqL730Ek2aNKFhw4Zs3br1svcXHR1N7969adSoES1atGD9+vUA/Pnnn4SFhREWFkbjxo2Ji4vj0KFDtGvXjrCwMBo0aMCCBQuu7cUVEZGCbccfMPdVaNQPhq2CBrfA8rEwKgx+fsIpkZATTu6Fb/pDQBl4ZDk0v98pryCSBYwxXYwx24wxO40xw9M5X9IYM8UYs94Ys9wY0+BK1xpjgowxfxhjdri/lsyp+8lK+XWePXfuXBo0aMBDDz3EN998c/74kSNH6NOnD6GhoYSGhrJ48WIAvvjiCxo1akRoaCh33303wAXxAAQEBADOJ/E6duzIHXfcQcOGDQHo3bs3TZs25brrrmPMmDHnr5kxYwZNmjQhNDSUTp064XK5qFWrFseOHQOcRHTNmjXPv4YiIiK5xenEFAZ/toL1UTF8cEdjbrqurKdDynIF+38bvw2Hwxuyts+yDaHr65c8HRwcTEREBDNmzKBXr15MmjSJfv36YYzh1VdfJSgoiNTUVDp16sT69etp1KhRuv2sWrWKSZMmsWbNGlJSUmjSpAlNmzYFoG/fvgwdOhSAF198kXHjxvHoo4/Ss2dPevTowa233npBXwkJCQwaNIjZs2dTu3ZtBg4cyOjRo3niiScAKFWqFKtXr+ajjz7irbfe4tNPP73k/b300ks0btyYqVOnMmfOHAYOHMjatWt56623+PDDD2ndujWnT5/Gz8+PMWPG0LlzZ0aMGEFqairx8fGZeaVFRET+Er0HfrgPyjSAHu9CoaLQ+yNo/xwsehfWfAmrv3CSv22fglK1sieOhBj4uh+kJMGgXyEwf60MEM8yxngDHwI3AlHACmPMNGvt5jTNXgDWWmv7GGPqutt3usK1w4HZ1trX3Ynf4cBz1xSs5tlA1syzv/nmGwYMGECvXr144YUXSE5OxtfXl8cee4z27dszZcoUUlNTOX36NJs2beLVV19l0aJFlCpViujo6Cu+rMuXL2fjxo1Uq1YNgPHjxxMUFMTZs2dp1qwZt9xyCy6Xi6FDhzJ//nyqVatGdHQ0Xl5e3HXXXXz11Vc88cQTzJo1i9DQUEqVytu7kIuISN6VkJxK9Jkkos8kcTLe/fVMEtPWHWRdVAzvD2hMlwblPB1mttAKXg9I+/GxtB8b++6772jSpAmNGzdm06ZNF3zM62ILFiygT58+FC1alGLFitGzZ8/z5zZu3Ejbtm1p2LAhX331FZs2bbpsPNu2baNatWrUrl0bgHvuueeCj3/17dsXgKZNmxIZGXnZvhYuXHh+pcD111/PiRMniImJoXXr1jz11FOMGjWKU6dO4ePjQ7Nmzfjss88YOXIkGzZsIDDwMjuJi4iIXEpSPHx7N2Ch30QnuXtOySrQ4x14fB00fwA2TYEPmsHkwXDk8r8fMy01xen3xA7o9wWE1Mna/kUgAthprd1trU0CJgG9LmpTH5gNYK3dClQ1xpS5wrW9gAnu7ycAvbP1LrJRfptnJyUlMX36dHr37k2xYsVo3rw5v//+OwBz5szhoYceAsDb25vixYszZ84cbr311vNJ1qCgoMvGBxAREXE+uQswatQoQkNDadGiBfv372fHjh0sXbqUdu3anW93rt8hQ4bwxRdfAE5iePDgwVccT0RE5Gqluizro07x8Z+7eHzSGu4et4we7y+g9etzqPfPGdT95wxavT6HHu8v5O5xy3l80lpG/ryZzYdiebdfGN0a5s/kLhT0FbyXWQGQnXr37s1TTz3F6tWrOXv2LE2aNGHPnj289dZbrFixgpIlSzJo0CASEhIu248xJt3jgwYNYurUqYSGhvL5558zb968y/Zjrb3s+cKFCwPOxDElJSXTfRljGD58ON27d2f69Om0aNGCWbNm0a5dO+bPn8+vv/7K3XffzTPPPMPAgQMv27+IiMgFrIVfnoAjG+HOyRBULf12xcpDl9egzVOw9EOndMOmH6FOd+jwHJQLvfY4fnsWds2Gm0dB9Q7X1p9I+ioA+9M8jwKaX9RmHdAXWGiMiQCqABWvcG0Za+0hAGvtIWNMujuOGGPuB+4HqFy58uUj1TwbuPZ59owZM4iJiTlfPiE+Pp6iRYvSvXv3S46XXuw+Pj7nN2iz1pKUlHT+nL+///nv582bx6xZs1iyZAlFixalQ4cOJCQkXLLfSpUqUaZMGebMmcOyZcv46quvLnu/IiIimWGtZefR0yzedYJFO4+zdPcJYhOc35cVSxahVEBhSgf6UadMMYL8fSnpX4igooWcr/6FKFnU+Vq8iC/eXun/bs8vtILXAwICAujQoQNDhgw5v6ogNjYWf39/ihcvzpEjR/jtt98u20e7du2YMmUKZ8+eJS4ujp9//vn8ubi4OMqVK0dycvIFk6zAwEDi4uL+1lfdunWJjIxk586dAEycOJH27dtf1b21a9fu/Jjz5s2jVKlSFCtWjF27dtGwYUOee+45wsPD2bp1K3v37qV06dIMHTqUe++9l9WrV1/VmCIiUoAtHwvrv4WOL0CtG6/cPiAEbhgJT2yA9sNh70IY09HZnO1adqdf9jGsHAetH4em91x9PyKXl97/TC7OIL4OlDTGrAUeBdYAKRm89rKstWOsteHW2vCQkJDMXJpj8ts8+5tvvuHTTz8lMjKSyMhI9uzZw++//058fDydOnVi9OjRgLNBWmxsLJ06deK7777jxIkTAOdLNFStWpVVq1YB8NNPP5GcnJzueDExMZQsWZKiRYuydetWli5dCkDLli35888/2bNnzwX9Atx3333cdddd3H777Xh7e2f43kREJP/beCCGFZHRbD4Yy/7oeKLPJJGYcvlNPaNOxvPdiv08PmkNEf+ZzY3vzOelaZvYfCiWrg3K8V7/MJaP6MTC565n6iOtGT+oGW/fHsqI7vV5uENN+kdUpvN1ZWlWNYiapQMI8i+U75O7UNBX8HrQgAED6Nu37/mPkIWGhtK4cWOuu+46qlevTuvWrS97fZMmTejXrx9hYWFUqVKFtm3bnj/3yiuv0Lx5c6pUqULDhg3PTzb79+/P0KFDGTVq1AWbLPj5+fHZZ59x2223kZKSQrNmzXjwwQev6r5GjhzJ4MGDadSoEUWLFmXCBOfTfu+++y5z587F29ub+vXr07VrVyZNmsSbb76Jr68vAQEB5z/eJSIikiF7l8DM56F2V2j7dOauLRoEHZ+HFg/BL0/C7JchciH0+QQC0l28eGnbfoMZz0O9m6HTyMxdK5I5UUClNM8rAgfTNrDWxgKDAYyz5HKP+1H0MtceMcaUc6/eLQcczZ7wc0Z+mWfHx8czc+ZMPvnkk/PH/P39adOmDT///DPvvfce999/P+PGjcPb25vRo0fTsmVLRowYQfv27fH29qZx48Z8/vnnDB06lF69ehEREUGnTp0uWLWbVpcuXfj4449p1KgRderUoUWLFgCEhIQwZswY+vbti8vlonTp0vzxxx8A9OzZk8GDB6s8g4iInJec6uLfv2xmwpK96Z4v5O2Ff2Fv/Av7EOB++Bf2Yc/xM+yLdvZnKhVQmFY1gmlVI5jWNUtRKahoun2Jw1zpY0N5WXh4uF25cuUFx7Zs2UK9evU8FJFkln5eIiKSrrjD8Ek7KBQA988Fv+JX35e1sHoC/Pac00/fMRkvsXBoPYzvAiG1YdD0C+v/Flz5f4mEhxhjfIDtQCfgALACuMNauylNmxJAvLU2yRgzFGhrrR14uWuNMW8CJ9JsshZkrX32crFoni3nrFy5kieffJIFCxZcso3+bIiIFBzHTyfy8FerWb4nmvvaVKN9nRDOJKYQl5DCmcQUziSlnv/+tPtx7vvSgX60rukkdGuVDrhkyaQCLt0XRSt4RUREJG9JSYLv7oHEOLh76rUldwGMgaaDoKJ787UvekO7p50SDt6XmSrFHoKv+0GREjBgkpK7ku2stSnGmGHATMAbGO9O0D7oPv8xUA/4whiTCmwG7r3cte6uXwe+M8bcC+wDbsvJ+5K86/XXX2f06NGqvSsiIgBsiIrhgYkrOXEmiff6h9ErrIKnQyowlOAVERGRvOX3EbB/Kdw6HsrUz7p+y1znrAae/izMfxMiF8Etn0LxdCamSWfgm36QGAtDZkBg2ayLQ+QyrLXTgekXHfs4zfdLgFoZvdZ9/ATOyl6RTBk+fDjDhw/3dBgiIpILTF1zgOd+WE+pgML88FArGlS4xkUYkinaZE1ERETyjnWTYPkYaDkMGtyS9f0X8ofeH0KfMXBoHXzcBrbNuLCNKxV+GAqHNzhJ5rINsz4OEREREZE8IMVdb/eJb9cSVqkE04a1VnLXAwpkgjc/1x3OT/RzEhGRCxxaBz8/DlXbwg3/l71jhfaDB+Y7q3e/6QczRzilIQBmvQTbfoUur0Ptztkbh0geo/mbXEx/JkRE8q+TZ5IY9NkKPl24h0GtqvLlfc0JDijs6bAKpAJXosHPz48TJ04QHBysYs25mLWWEydO4Ofn5+lQRETyn6T4vFcvNj4avr0LigTBrZ9dvjZuVilVE+6dBb+/CEs+gL2LoU5XWPw+RNwPzR/I/hhE8hDNs+VimtOLiORfWw7Fcv/ElRyJSeSNWxtxe3glT4dUoBW4BG/FihWJiori2LFjng5FrsDPz4+KFSt6OgwRkfxl809OeYE7J0P19p6OJmNcqfDDfRB3GAb/BgEhOTe2rx90fwuqtYWfHoW5r0LNG6HzazkXg0geoXm2pEdzehGR/OfX9Yd4evI6ihXx4dsHWtC4cklPh1TgFbgEr6+vL9WqVfN0GCIiIp6x7BNITYQpD8LDi6FILp+MuVKdWHfNhpvfg4rhnomjfi8oFwbrv3NW7ubECmKRPEbzbBERkfwt1WV5+/dtfDRvF02rlGT0XU0oHahPaeQGBbIGr4iISIF0fCfsXeRsTnbmKPzyJOTm2oiuVPjpEdjwHXT6FzQd5Nl4SlaB9s+AXzHPxiEiIiIikoOstczddpTuoxbw0bxdDIiozDdDWyi5m4to+YmIiEhBsWYiGG+46VUocx3Mfhlqd4HQ/p6O7O9cLpj2GKz7Bjq+CG3/4emIREREREQKnA1RMbz22xYW7zpB5aCifHhHE7o3KufpsOQiSvCKiIgUBKkpTrK01k1QrBy0fgJ2zIJfn4bKLaBkVU9H+BeXC355HNZ+Ce2HO6tmRUREREQkx+yPjufNmduYtu4gQf6FGHlzfe5oXoVCPioGkBvppyIiIlIQ7PgdTh+BJnc7z728oc/HYAz8+IBTDiE3sBZ+fQpWfwHtnoEOwz0dkYiIiIiIR01euZ/2b85lwY7s38j05JkkXv55M9e/PY/fNx9mWMea/PlMBwa1rqbkbi6mFbwiIiIFwZqJ4F/aWcF7Tskq0O0tmHI/LHwH2j3tufjASe5OfxpWfQZtnoSOI5wEtIiIiIhIAeRyWf73x3Y+mLuTwj5e3DthJWPubkqHOqWzfKyE5FTGL9rD6Lm7OJOUwm1NK/HkjbUpW1x1dvMCpd5FRETyu7jDsH0mhN0B3r4Xnmt0u7Pp2rzX4MBqz8QHTnL3t+dgxafQ6jHo9JKSuyIiIiJSYCUkp/LYpDV8MHcn/ZtVYsFzHakZEsD9X6xi7tajWTZOqsvy3cr9dHxrHm/M2EZEtSBmPNGO/97aSMndPEQJXhERkfxu7ddgU6Hx3X8/Zwx0fxsCysKPQyHpTM7HZy3MfAGWfwIth8GNLyu5KyIiIiIF1onTidwxdim/rD/E8K51ea1vQ0oH+vH10ObUKRvIAxNXMWvzkWseZ9+JeG4ZvZhnv19P6cDCTLq/BeMGNaN2mcAsuAvJSUrwioiI5GfWwpovoXIrKFUz/TZFSkKf0XBiF8wccfVjnTkB0budMTMT3x//hKUfQfOH4KZ/K7krIiIiIgXWzqOn6fPRYjYdjGX0nU14sH0NjHt+XKJoIb68tzn1ygXy0Fer+H3T4ase56e1B+g2agG7jp3m3X5hTH2kNS2qB2fVbUgOUw1eERGR/GzvYojedeX6utXaQatHYfEoqN0Z6nTN+BhnT8Gid2HpaEhJgMLFoVwjKBcK5RtDuTAIqg5eF72vbC3MGgmL34dmQ6HLa0ruioiIiEiBtXjXcR6cuIpCPl5Mur8FjSuX/Fub4kV9mXhfcwaOW87DX63mgzsa06VBuQyPcSYxhZembeL7VVGEVynJu/3DqFiyaFbehniAErwiIiL52ZqJULgY1O915bbXvwi758JPw+DhJRBwhc0bkhOcmrkL3nKSvA1vg6qt4dA6OLgWlo+F1ESnbeFiULYRlA9zEr7lw2DdN05iOHwIdHtTyV0RERERybOstczYeJiElFTa1QohOKBwpq6fvHI/z/+4gWql/Bk/qBmVgi6ddC3m58vEeyMY9NkKHvl6DaP6Q/dGV07ybjwQw6PfrCHyxBkeu74mj3WqhY+3PtyfHyjBKyIikl8lxMCmqRDaHwr5X7m9T2Ho+ymMaQ8/PQJ3fJd+0tWVCuu/g7mvQsx+qNEJbhjprNpNKzUZjm6BQ2udhO+htRcmfQGa3APd3lZyV0RERETyrFPxSTz7/Xp+d9fFNQYaVShO+zql6VgnhEYVS+Dtlf581+Wy/O+P7XwwdydtapbiwzubULyIb7pt0wr082XCkAgGf7acxyatIdVaeoaWT7ettZZxC/fw3xlbCfYvzNf3taBlDZVjyE+U4BUREcmvNnwPKWehSTqbq11K6brOJme/Peuszo0Y+tc5a2HHH05ZhaObnJW4vT6A6h3S78vb112qoRE0GegcS02GY1udhC9A2J1/L90gIiIiIpJHLNt9gie+Xcvx04m82L0eEdWCmLftGPO2HeWDOTsYNXsHJYv60q52CB3qhFywujchOZWnJ6/jl/WH6N+sEq/0boBvJlbUBhT24fPBEQz+fAVPTFqDy2Xp3bjCBW2On07kmcnrmLvtGDfWL8MbtzSipH+hLH0NxPOMzcxGKNc6mDFdgPcAb+BTa+3rF53vAPwE7HEf+tFa+7L7XCQQB6QCKdba8CuNFx4ebleuXJlV4YuIiOQtYzpAShI8tChzK2SthS9vgb2L4IH5EFIHolbBrJcgcgGUrAad/gn1+yg5K+nRcuwCQPNsEREp6FJSXYyas5MP5uygSrA/7w9oTIMKxS9oc/JMEvN3HOPPbcf4c/sxTpxJumB176Kdx1m19yTDu9blgXbVz2+mllnxSSnc+/lKlu45wVu3hnJL04oALNhxjKe+W0fM2WRe7F6Pu1tUueoxJNdI9weYYyt4jTHewIfAjUAUsMIYM81au/mipgustT0u0U1Ha+3x7IxTREQkXzi8AQ6ugS7/zXz5A2Og90fwUUv44V5ng7TNP4F/CHR7yymr4KN3/UVERESkYDpw6ixPTFrDisiT3NKkIi/3ug7/wn9PsZX0L0SvsAr0CquAy2XZeDDmgtW9vt5ejL6zCV0bZnyTtPQULeTD+EHNuO+LFTz9/ToSU1zsjT7DJ3/upmbpAL4YEkG9csWuaQzJ3XKyREMEsNNauxvAGDMJ6AVcnODNkxKSU/Hz9fZ0GCIiIo7VE8G7EDS6/equDywLPd+Hb++EE7uhw/PQ8hEoHJi1cYqIiIiI5CG/bTjEcz+sx2Xh3X5hfyuJcCleXoZGFUvQqGIJHutUi5NnkrBAUBaVSyhSyJtx9zRj6BcreWHKBgAGRFTmXz3qU6SQ8lX5XU4meCsA+9M8jwKap9OupTFmHXAQeNpau8l93AK/G2Ms8Im1dky2RpsJ78/eweeLI1k+4oZLFs0WEZF8aPtM2DINerzr1JvNLZITYP23ULcHFA26+n7q9YBBv0Kp2hBQOuviExERERHJY84mpfLKr5v5etk+QisWZ9SAxlQJzsBGxpeQHXVw/Xy9GTswnLdmbiO8akm6NLi2lcGSd+Rkgje9zOfFBYBXA1WstaeNMd2AqUAt97nW1tqDxpjSwB/GmK3W2vl/G8SY+4H7ASpXrpxlwV9OlVL+nDiTxPqoUzSuXDJHxhQREQ+Lj4YpD8LZaChRFdo/4+mI/rL1F0g4lbnN1S6laptr70NEREREJA/bejiWR79ew46jp3mgfXX+cWMdCvnkzr0o/Hy9ebFHfU+HITksJ/80RgGV0jyviLNK9zxrbay19rT7++mArzGmlPv5QffXo8AUnJIPf2OtHWOtDbfWhoeEhGT9XaSjTc1SGAPzt6s8sIhIgTFrJCTEQJXW8Od/4egWT0f0lzUToXhlqNbB05GIiIiIiORZcQnJfLZoDz0/WMTJ+GS+GBLB813r5drkrhRcObmCdwVQyxhTDTgA9AfuSNvAGFMWOGKttcaYCJwE9AljjD/gZa2Nc39/E/ByDsZ+WUH+hWhYoTjzdxzj8RtqXfkCERHJ2/avgNUToOUwaPMkfBgBUx+Ge/8A75z81ZqOk5Gwex50eAG8NPEUEREREcmohORUVu89yaJdx1m86wTro2JIdVna1w7h7dtDKRVQ2NMhiqQrx/4Xaq1NMcYMA2YC3sB4a+0mY8yD7vMfA7cCDxljUoCzQH93srcMMMU4u4D7AF9ba2fkVOwZ0a5WCKP/3EXM2WSKF8lFdRhFRCRrpabAr09CYHnoMNzZdKzrG/DDvbD0I2j9mGfjW/MVYCDsjis2FREREREpyFJSXayLimHJruMs2nmCVftOkpTiwtvLEFqxOA93qEGrGqVoXi0IL+25JLlYji4zcpddmH7RsY/TfP8B8EE61+0GQrM9wGvQrnYIH8zdyeKdx+naUEWsRUTyrRVj4fAGuP0LJ7kL0OAW2PgjzH0V6nSDUjU9E5srFdZ+BTWuhxKVrtxeRERERKSAOXkmiR9WR7F41wmW74nmdGIKAPXLFWNgiyq0qhlMRLVgAgp7+JN5IpmgP61ZpHHlEgQU9mH+jmNK8IqI5Fexh2DOq1DzBqjX86/jxkD3t+Gj5jBtGAya7pnyCLvmQuwB6PyfnB9bRERERCSX+3P7MZ6evI5jcYlUL+VPr7DytK5ZihbVgwnyL+Tp8ESumhK8WcTX24tWNYKZv/041lrc5SRERCQ/mfkCpCZBtzedpG5axcpB59fgp4dh5TiIGJrz8a35AooGO6uIRUREREQEcGrrvjZ9CxOW7KV2mQA+G9SMBhWKezoskSyj3VeyULvaIRw4dZZdx854OhQREclqu+bAph+h7T8gqHr6bcLugBqd4I+X4OTeqx/r5F74ojd8eQus/RoSYq58zZnjsHU6NOoPPlp9ICIiIiICsPFADD3eX8iEJXsZ0roa04a1UXJX8h0leLNQ+9ohAMzffszDkYiISJZKToBfn4agGtD68Uu3MwZufs/5+vNjYG3mx4pcBGM7woHVcGw7TH0I3qwFk+506vwmxad/3bpJ4EqGJndnfkwRyTOMMV2MMduMMTuNMcPTOV/cGPOzMWadMWaTMWaw+3gdY8zaNI9YY8wT7nMjjTEH0pzTxwBERCTPS3VZPpq3kz4fLSIuIZkv723Ov26uj5+vt6dDE8lyKtGQhSoFFaVaKX/m7zjGkDbVPB2OiEjWOHMCDqyCAyshaiXERMHtE6B0Pc/FtH8FbPgOmj8IwTWyf7zFoyB6F9w9BXz9Lt+2RCW48WX49SlYMxGaDMz4OKsmwK//gJJVYMC3zr1FrYCNP8CmKbD1F/D1hzpdnY3danYCn8JOInnNRKjYzLM/FxHJVsYYb+BD4EYgClhhjJlmrd2cptkjwGZr7c3GmBBgmzHmK2vtNiAsTT8HgClprnvHWvtWTtyHiIhIdtsfHc8/vlvH8shoujcsx6t9GlCiqD7lJvmXErxZrF2tUny7cj8Jyal6V0hE8p7kBDi84a9k7oGVcDLSOWe8oHR9OHMUfhwK983xTCmAleNh+rPOatWVn0GLh6DdM+BXLHvGi94N89+C6/pCjeszdk3Twc5q25kjnA3ZipW/fPvUFPh9BCz72CnxcOt4KFLCOVcpwnl0/g/sXeQkezf/BBu/h8LFod7NULYhHNsKN4+6plsVkVwvAthprd0NYIyZBPQC0iZ4LRBonA0hAoBoIOWifjoBu6y111BLRkREJPex1vLj6gO8NG0TAP+7PZQ+jStonyTJ95TgzWLtaocwYcleVkaepE2tUp4OR0Tkyo5shlWfOQndwxucxClAYHmo2NRJVlYMh3JhUDjAqfM6aQD8+Tp0+lfOxZmSCNOfgdUToOaN0PlVWDTKWV277hsnlrA7wSsL31yz1hnTu5CTYM0oLy/oOQpGt4ZfnoQBk/6+Kds5Z0/C5MGwey60eBhufAW80/n17OUN1do5j25vwe55fyV7137prOxt0PeqblNE8owKwP40z6OA5he1+QCYBhwEAoF+1lrXRW36A99cdGyYMWYgsBL4h7X25MWDG2PuB+4HqFy58tXeg4iISLY4FZ/EiCkb+XXDISKqBvH27aFUCirq6bBEcoQSvFmsRfVgfL0N83ccU4JXRHK/vYvh637gSoEKTaHlI04yt0LTS686rdsNGt8FC9+BWp2h8sW5hWwQexC+vdtZUdz2aej4gpPw7P0hNLsXZgyHaY/Cik+hy3+hSsusGXfLNNg5C7q8DsXKZe7a4BrQ6Z8w8wXYMBka3f73Nsd3OK//qX3Q84OM18/19oVaNzqPHgmw8w8oHOg8RCQ/S++doouLfXcG1gLXAzWAP4wxC6y1sQDGmEJAT+D5NNeMBl5x9/UK8DYw5G8DWTsGGAMQHh5+FUXGRUREsp61ltlbjjJi6gaizyTxXJe63N+uOt5eWrUrBYcSvFnMv7AP4VWCmL/9GC90Ux1EEcnFdvwB394FxSvBwKlQvGLGr+3yOuxZAFPuhwcXOSt7s8veJfDdQEiOh9snQv2eF56v0ASGzHRWs/7xL/isi1NO4caXnXq4VysxDn4b7pQ/aDb06vpo/qBTO/e3Z6F6Bwgo/de5nbNg8hAnWXvPz1eflPb1c8o0iEhBEAWk/YetIs5K3bQGA69bay2w0xizB6gLLHef7wqsttYeOXdB2u+NMWOBX7IhdhERkSx1LrE7as4O1kfFULN0AOPuaUaDCsU9HZpIjvPydAD5UbvaIWw9HMeR2ARPhyIikr6NP8I3/aFUbRgyI3PJXXBWivb5GE7udWrHZgdrYflYmNDDqa973+y/J3fPMQYa3grDVkD74bBtOnwQDnP/A0lnrm78ea9D3EHo/k76JRMywssben3oxDD96b/ua8lH8NVtUKIy3D8361Yci0h+twKoZYyp5l6J2x+nHENa+3Bq7GKMKQPUAXanOT+Ai8ozGGPSfkShD7Axi+MWERHJMi6XZeamw/R4fyH3fbGSk/FJ/PeWhkx/rK2Su1JgKcGbDdrVdkozzN9+zMORiIikY9UE+H4IVGwGg34B/6ssJ1OlFbR+DFZ9DttmZGmIJCfAT8OcpGjNG2DoHChd98rXFfKHjs/DsJVQtzv8+V/4oBmsnwzJZzM+/pFNsHQ0NLkHKjW7+vsACKkDHYY7tXLXT4Zpw2Dm81Cnm5NcL6E6liKSMdbaFGAYMBPYAnxnrd1kjHnQGPOgu9krQCtjzAZgNvCctfY4gDGmKHAj8ONFXb9hjNlgjFkPdASezIHbERGRAsZay44jcSQkp17V9S6XZfqGQ3QbtYAHJq7iTGIKb97aiDn/6EC/ZpUp5KMUlxRcxvn0Vv4UHh5uV65cmePjulyWiP/MpmWNYN4f0DjHxxcRuaTF78PvLzpJ09snQqFr3HQgJRHGXg+nj8LDS64+WZxWTJRTOuLgGmc1bvvnnE3LrsbexU593kPrAAMlq0CpOk7SNaQOhNR1VjH7FfvrGpcLPusKJ3Y4ieKiQdd+T6nJ8GkndxxAu2ehw/NXf18iuZsK3hUAnppni4hI3vW/37cxas5OfL0NdcoGElqxBKEVS9CoUnFqlQ68ZM3cVHdi9/05O9h+5DTVQ/x59Pqa3NyoPD7emk9LgZPuXxTV4M0GXl6GdrVKMXfbUVJdVoW9RcTzrIW5r8L8N6F+b+g7FnwKXXu/PoWhzycwtiP88oSTNDbX8G9e5EL47h4ncdz/G2dDt2tRpRUMnQvbZ8Lh9XBsKxzbDrvnQmrSX+0Cy0NIbSf5a12wf6lTWiErkrvg1Nnt9RFMeRDaPgkNbsmafkVERERE8oD524/x/tyddL6uDNVDAli3/xTT1h7kq2X7ACji602DCsVoVLEEjSoWJ6xSCSqUKMKvGw7x/pyd7Dx6mpqlA3ivfxg9GpVXnkXkIkrwZpN2tUP4cc0BNh6IIbRSCU+HIyIFmcsFM56D5WOg8d1w83tObdisUrYBXP+is8HZukkQNiDzfaSmwJL3YfYrEFwD+n3lJFyzgpe3kyhOmyxOTYFTe+HYNifpe3y78/3aryDpNFRpDaF3ZM3455RtAA8tzNo+RURERERyucMxCTzx7Vpqlw7k3X6NKVLI+b+Iy2XZc+IM66NOsW5/DOuiTjFx6V6SUlwAFPL2IinVRZ0ygXx4RxO6NiiLlxK7IulSgjebtKn1Vx1eJXhFxGNSU+CnR2D9JGg5DG7697WtsL2UlsOcOrzTn4GqrTNXVzZqFfz8OBzZAPV6Oitn05ZMyA7ePk4iObjGhYlfayH2IBQpqfIJIiIiIiLXKCXVxaPfrCYhOZUP72xyPrkLzqefa4QEUCMkgD6NnU2fk1NdbDscx/qoGLYdjqVF9WA6X6fErsiVKMGbTUoFFKZBhWLM33GMRzvV8nQ4IlIQJSfAD/fC1l+cFbZtn86e5C44q2T7fAyjW8PUh2HgtCsnSBNiYc4rsHwsBJaDfl9C3R7ZF2NGGAPFK3hufBERERERDzt46ixB/oXw8732T/299ft2VkSe5L3+YdQsHXDF9r7eXjSoUJwGFYpf89giBYmWJ2WjdrVCWL3vFLEJyZ4ORUQKmsTT8PXtTnK365vQ7pnsT5yWrAJdX4fIBbD0o0u3sxY2/wQfRjjJ3eYPwCPLoN7Nnk3uioiIiIgUYAnJqbwxYytt35hL7w8XcSjm7DX1N2frET7+cxcDIirTK0yLKESykxK82ahd7RBSXZbFO094OhQRKUiiVsJnXZwNy3p/DM3vz7mxw+6EOt1h9v/Bkc1/P39qH3zTH74bCP6lYOhs6Prf7C/JICIiIiIil7QyMppuoxbw0bxd3FS/DFEnz9Lnw8VsPRx7Vf0dOHWWp75bR71yxXjp5vpZHK2IXEwJ3mzUpHJJ/At5M3/HMU+HIiIFwemjMPUR+LQTnD4G/b++ug3ProUxziZufsXhx/shJck5npoCi9+HD5vDnvlw06swdB5UaJqz8YmIiIiIyHmnE1N46aeN3PbJEhKTXXwxJILRdzXluwdaYrHcNnoJi3cez1SfSSkuhn29mpRUy0d3NsmSUg8icnmqwZuNCvl40bJGKeZvP4a1FqOPHotIdkhNdkodzHsNks9Cq8eg/bNQONAz8QSEQM/3nZW6816Dej2cTdQOb4DaXaDbm5nbhE1ERERERLLcn9uP8cKPGzgYc5Z7Wlblmc518C/spInqly/GlIdbM+iz5dzz2XLeui00w2UW3pixlTX7TvHhHU2oVso/O29BRNyU4M1m7WuXYtaWI+w5fobqIVcuKC4ikim7/4TfnoVjW6FGJ6fcQalcsLFjna7Q+G5Y+I7zCCgDt38B9Xqqzq6IiIiIiAedik/ilV+28MPqKGqE+DP5gZaEVw36W7vyJYow+cFWPDBxJY9PWsvBUwk82L76ZRev/b7pMJ8u3MPAllXo3qhcdt6GiKShBG82a1c7BID5248pwSsiWefUfvh9hLNZWYkqTjmGOt1yV/K0y2twMhJK14PrX3TKNoiIiIiISLpSXRYvQ7Z++ve3DYf450+bOBmfxLCONRl2fc3LllAoXsSXCUMieHryev47YysHT51lZM/r8Pb6e4z7o+N5evI6GlYozoju9bLtHkTk75TgzWZVgv2pElyU+TuOM6h1NU+HIyJ5XXICLB4FC/7nPO84Alo9Cr5FPBtXegoHwqBfPB2FiIiIiEiuF5uQzO0fLyE+KZVHOtagb5OK+Hpn3bZJR2MT+NdPm5ix6TDXlS/GhCHNuK58xhZgFPbx5r1+YZQv4ccnf+7mcGwCo/o3pkihvxLDiSmpDPt6NRb46M4mFPZR3V2RnKQEbw5oVyuE71dFkZiSqn/kROTqbfsNfnsOTu2F+r3gpn+rlq2IiIiISB6Xkupi2Ndr2Hn0NLXLBPLcDxsYNXsnj3Ssya1NK1LI5+oTvduPxPHtiv18t3I/iSkunutSl6Ftq+GTyeSxl5fh+a71KF+8CCN/3sSAsUsZd084wQGFAXht+lbWRcXw8V1NqRRU9KrjFZGrowRvDmhXO4SJS/eyKvIkrWqW8nQ4IpIX7ZztbFoWUhcG/gTVO3g6IhERERERyQL//nUL87cf47W+DenfrBLzth/jvVk7eGHKBj6Ys4OHOtTg9maVMrxg7ExiCr+sP8i3K/azet8pfL0NN9Uvyz9uqn3NpSPvaVWVMsX8eHzSGm4ZvZgJQyLYdDCWzxdHMqR1Nbo0KHtN/YvI1THWWk/HkG3Cw8PtypUrPR0GpxNTCPu/37m3bTWe76o6NCKSSakp8HFrSEmEh5eCr5+nIxIRuZxcVAxcsktumWeLiOR1Xy7dy4tTN3Jvm2r8s0f988ettSzYcZz3Zu9g1d6TlC3mx4Ptq9M/onK6NXOttazZf4rvVuzn53UHOZOUSs3SAfRvVok+jSucX2mbVVbtPcl9E1bgZQxJKS5qlA7guwdaXtNqYxHJkHTn2lrBmwMCCvvQtEpJ5m8/zvNdPR2NiOQ5K8fDsa3Q7ysld0VERERE8onFO4/z0rRNdKwTwgvdLlwMZoyhXe0Q2tYqxZJdJ3h39g5G/ryZD+ft4oF21bmzeRWKFPIm+kwSU9Yc4NsV+9h+5DRFfL25ObQc/ZpVpknlEtm2YVvTKiX54aFW3PPZclKt5YM7Giu5K+JBSvDmkHa1Q3hz5jaOxiVQOlAJGhHJoPhomPsqVGsPdbt7OhoREREREckCu4+d5sEvV1EjxJ9RAxrj7ZV+ItYYQ6uapWhVsxRLd5/gvVk7+PevW/j4z12EVSrJ/O3HSEp1EVapBK/1bUiPRuUI9PPNkXuoHhLAjMfbEZ+USkhg1q4QFpHMUYI3h7R3J3gXbD/OLU0rejocEckr5r0GibHQ5TXIpnffRUREREQk58TEJ3PvhJX4eHsx7p5mGU7ItqgeTIv7g1m+J5r35+xgXdQp7mxRmX7NKlG3bLFsjjp9/oV98C+s1JKIp+lvYQ6pX64Ywf6FmL/jmBK8IpIxR7fAinHQdDCUuc7T0YiIiIiIyDVKTnXx8NerOHDyLF8NbU6loKKZ7iOiWhAT722eDdGJSF6lAik5xMvL0LZWKRbsOI7LlX83thORLGItzHwBCgdAxxGejkZERERERK6RtZaR0zaxaOcJ/tO3Ic2qBnk6JBHJJ3I0wWuM6WKM2WaM2WmMGZ7O+Q7GmBhjzFr3418ZvTYvaFc7hOgzSWw6GOvpUEQkt9s+E3bNgfbDwT/Y09GIiIiIiMg1mrA4kq+W7ePB9jW4VZ/sFZEslGMlGowx3sCHwI1AFLDCGDPNWrv5oqYLrLU9rvLaXK1trRAA5u84RsOKxT0cjYjkWilJzurd4FoQMdTT0YiIiIiIyDWat+0oL/+ymRvrl+HZznU8HY6I5DM5uYI3Athprd1trU0CJgG9cuDaXCMksDD1yxXjz+3HPB2KiORmy8dA9C7o/B/wzpkdcEVEREREJHvsOBLHo1+voU7ZYrzbLwwvL22eLCJZKycTvBWA/WmeR7mPXaylMWadMeY3Y8y5XYUyei3GmPuNMSuNMSuPHct9idR2tUNYvfckcQnJng5FRHKjM8fhzzeg5g1Q+yZPRyMiIiIiItcg+kwS905YSWFfbz69Jxz/wtrrXkSyXk4meNN7i+ri3cZWA1WstaHA+8DUTFzrHLR2jLU23FobHhIScrWxZpuOdUJIcVle/22rNlsTyY8ST0Ni3NVfP+ffkHTaWb0rIiIiIiJ5UkJyKnO3HeW+CSs4HJvAmIFNqVCiiKfDEpF8KiffOooCKqV5XhE4mLaBtTY2zffTjTEfGWNKZeTavCKiWhAPtKvOJ/N3k5Ds4r+3NMTHO0f3uhOR7BK5EL4fAq5U6PEO1O+ZuesPb4TVEyDifghRXS4RERERkbxkz/EzzNt2lHnbjrF09wkSU1wU8fXmf7eH0qRySU+HJyL5WE4meFcAtYwx1YADQH/gjrQNjDFlgSPWWmuMicBZYXwCOHWla/MKYwzDu9bFv7AP//tjO/FJKbzXvzGFfJTkFcmzXC5Y9C7MeQWCqkMhf/jubmhwK3R7E4oGXbkPa2HGcPArAR2GZ3fEIiIiIiJyjRKSU1my+wR/bjvG3G1H2XsiHoDqpfy5o3llOtQpTfNqQfj5ens4UhHJ73IswWutTTHGDANmAt7AeGvtJmPMg+7zHwO3Ag8ZY1KAs0B/a60F0r02p2K/oj3zYf9yCCwLAWWdr4FloUgQeP09cWuM4bFOtfAv7MMrv2wm/ouVfHxXU4oU0j/6InlOfDRMfQi2z4Dr+kDP98HHDxa+C3/+FyIXQI93oW63y/ez9Renbbe3oIje3RcRERERyY0OxyQwY+Mh5qZZpevn60XL6sHc26YaHWqXpnJwUU+HKSIFjHHyp/lTeHi4XblyZfYPNOdVmP/G3497+UJAGQgsc2HiN6i6kwjy8mbS8n08P2UDzaoGMe6ecAL9fLM/XhHJGgdWwXeDIO6QUzM3YiiYNCXDD2+AKQ/BkQ0QOgC6vJZ+8jYlET6MAN+i8MAC8NbGCyKSp2lr8AIgx+bZIiK5wJnEFGZsPMyUNQdYtOs41kK1Uv50qBOiVboiktPSnWsri5AVrh8BbZ+C00cg7rDzOH3ESfrEHYHTh+HkHti3BM5GO9ec2gdtn6J/RGWKFvbhqW/Xcteny5gwJIISRQt59n5E5PKsheVjYeYLzps2Q2ZCxaZ/b1e2IQydA/PfhAVvw+55zgrfWjde2G7pR3AyEu6equSuiIiIiEg6thyK5enJ6+hUtzR3taxC6UC/bB0v1WVZuPM4U1ZHMXPTEc4mp1IpqAiPdqxJr8YVqBESkK3ji4hkhlbw5rSURPhuIEQugsfXgn8pAGZtPsLDX6+mWrA/E++LyPZfViJylRLjYNpjsOlHqNUZ+nycsRq7B9c4q3mPbYHGd0PnV8GvuPMm0PtNoFo7GPBN9scvIpL9tII3GxljugDv4ZQt+9Ra+/pF54sDXwKVcRZzvGWt/cx9LhKIA1KBFGttuPt4EPAtUBWIBG631p68XBy5cp4tIvmWtZY7P13Gyr0nSUpxUcjbi55h5RnSuhr1yxfL0rE2H4zlx9VR/LTuIMfiEinm50OP0PL0aVyB8ColMUa/5kTEo9L9R0gJXk84th0+agHhg6H72+cPL9p5nPsmrKRscT++vK85FUoU8WCQkmVO7oXAcuCjldked/aUUyt38zQn4RpSByo0gQpNnUdg2ctff2Sz8wZN9C64/p/Q+ol062xfUkoizHvd2ZAtsDz0eh82/gDrvoVHlkFwjWu4ORGRXEP/880mxhhvYDtwIxCFs4nxAGvt5jRtXgCKW2ufM8aEANuAstbaJHeCN9xae/yift8Aoq21rxtjhgMlrbXPXS6WXDvPFpF8ae7Wowz+fAUjb65Pu9ohfLYoku9XRXE2OZVWNZzatx3rlMbLK/O/glwuS+SJM/yx+QhT1hxg6+E4fL0NHeuUpm+TCnSsW5rCPiq/ICK5hhK8ucqvT8PK8fDwEifJ5LZqbzSDPltBMT9fvryvOdVK+XswSLlm+5bB592gTlfo96WnoymYzhyHrb/Clmmw+09wJTvJ1Sot4fgOOLIJbKrTtlgFKN/4r4Rv+TBnlS3A2q/hl6egcCDcOh6qtb36mKJWwpQH4cQO53mrR+Gmf1/TbYqI5CJK8GYTY0xLYKS1trP7+fMA1trX0rR5HqgEPIKzIvcPoLa11nWZBO82oIO19pAxphwwz1pbh8vI1fNsEclXUlJddBu1gKQUF78/2Z5CPs4Ci1PxSXyzfD8TFkdyODaB6qX8Gdy6Krc0rUjRQumXPbPWcigmgfVRp1gXFcP6qFOsj4ohLiEFgCaVS9CnSUV6NCxHSX8t0BGRXEkJ3lzlzHEY1RiqtII7vr3g1MYDMQwcvxwvY/jyvgjqls3aj5xIDok7AmPaw9mTkJIAt02A63p7OqqCIfYQbP0FNv8EexeBdUGJKlC/J9Tr5SRvz628TT4Lh9bDwdXOpmkHVkH0bndHBkrVclb27pkPVdvCLeOcjROvVfJZmPsq7FsKd/3wVyJZRCTvU4I3mxhjbgW6WGvvcz+/G2hurR2Wpk0gMA2oCwQC/ay1v7rP7QFOAhb4xFo7xn38lLW2RJo+Tlpr/7YrqDHmfuB+gMqVKzfdu3dvttyniEhak5bvY/iPG/joziZ0a1jub+eTU11M33CI8Qv3sC4qhuJFfBkQUZl7WlXBz8ebde4k7rmk7rG4RAB8vAx1ywXSqGIJQisWp0X1YKoEa4GViOR6SvDmOgvfgVkjYeA0qN7+glM7j8Zx56fLSExxMWFwBKGVSngkRLlKqcnwRS84sBqGzICfH4PYg/DI8ozVa82v4o7A5qnuVbLhmStvcCUn98KWn52VuvuXAxZK1YZ6PZ3EbtlGkNF6WfHR7oTvGifhe2wLNLgVOjyvTdBERK5MCd5sYoy5Deh8UYI3wlr7aJo2twKtgaeAGjgreEOttbHGmPLW2oPGmNLu449aa+dnNMGbVq6fZ4tIvhCflEKHN+dRsWQRfnio1WXr31prWbX3JOMW7mHmpsMAuNzpDmOgRkgAjSoWJ7RiCRpVLE69csXw81XpBRHJc9L9h1CZCk9q/hCsGA+/j4D751+Q7KpZOpDJD7TiznFLufPTZXx6Tzgtqgd7MFjJlFkjnZWjfcc6H/Pv9SGM6QAzX3A25SpokuJh6Yew8F1IOu0cK1YR6vdyVjVfTbLXWqe8wtZfnMfhDc7xMg2h4wtOYrd03auLt2gQ1LzBeYiIiOQeUTjlF86pCBy8qM1g4HXrrOLY6V61WxdYbq09CGCtPWqMmQJEAPOBI8aYcmlKNBzN7hsREcmITxfs4WhcIqPvanLFzc2MMYRXDSK8ahD7o+OZvCqKooW8aVSxOA0rFCfQzzeHohYRyXlK8HqSrx/c8BL8cC+snwRhd1xwunJwUSY/0Iq7xi3jnvHLGX1XE66vmwUfDZfstfEHWPIBRDwAjW53jpVt6GzIteAtaHAL1LrRoyHmGJcLNnwHs1+G2ANQtwe0fxaOboVNU2DFWCfxm9FkryvVKWmw9VcnqXtqL2Cgcgunhm2dbtqoTERE8rMVQC1jTDXgANAfuOOiNvuATsACY0wZoA6w2xjjD3hZa+Pc398EvOy+ZhpwD/C6++tP2X4nIiJXcCwukU/+3EWX68rStErmPgVZKagoT91YO5siExHJfVSiwdOshU87OTVDH10FhYr+rUn0mSTuGb+cLYdi+V+/MHqGlvdAoJIhR7fA2E5QtgHc8wv4pCnMn5IIH7eFpDPwyFJns678LHKRs2L50FooFwad/wNVW1/YJiEGtv0Gm6bCrtmQmpQm2dsHKoY79Yt3z3MSutt+g/gT4F0IqneEut2dDewCSuf8/YmIyKWoREM2MsZ0A94FvIHx1tpXjTEPAlhrPzbGlAc+B8rh/Cxet9Z+aYypDkxxd+MDfG2tfdXdZzDwHVAZJ0F8m7U2+nJx5Il5tojkaSOmbODbFfv5/cl2VA8J8HQ4IiK5hWrw5lp7l8BnXaDjCGd1YzriEpK5d8JKVkRG82rvhtzRvHIOBylXlBADY6+HhFh4YD4U+/sGAOxfDuNugmb3Qve3r228yEWwcryTBK3dGYKqX1t/5ySedlbJYqFcaOaTpyd2wR//chKyxSpAp5eg4W1XLsGQXrI3sLxzPPkMFC7m3Gfd7k7phPyeIBcRybuU4C0A8sw8W0TypJ1HT9P53fnc2bwyL/dq4OlwRERyE9XgzbWqtIR6Nzv1SZvcA4F/L8MQ6OfLF0MieOjLVbwwZQNxCck80F4fRc81rIWpD0P0Hrjn5/STuwCVIqDFQ7D0I7iu799XtGbUjj/g27vAeMPG72HGcChVB+p0gdpdnXG8MrhhQGoKHFzjrJLdPddJQruS/zofWM5J9JZt5HwtFwrFK/59w7L4aPjzDafsgo8fXP8itHgk3VXp6fIrDqH9nce5ZO/WX8G/lFPaoWrbC1dEi4iIiIhIvvT6b1sp4uvN451qeToUEZE8QSt4c4sTu+DD5k4d3p6jLtksKcXFU9+t5Zf1hxjWsSb/uKn2FYvNSw5Y8D+Y/X9OGYKWj1y+bdIZ+KglePnAQ4vAt0jmxto8Db4fAqXrwd1TITEGts90EqJ7F4ErBYoEQa2bnBWvNTs5ydNzrIXo3bBrjpPU3bPA6QMD5Ro5pQ+qd3DKIBxa99fj+DawLqePIkHuZK876Rt7COa/CYmx0PhuZzV6Om9UiIhIgaCJSQGQp+bZIpKnLNt9gn5jlvJM5zo80rGmp8MREcltVKIh15vxPCz7GB5cBGXqX7JZqssyYsoGJq3Yzz0tq/DSzdfh5aX/S3nMrrnwZV+o3xtuHf/3la3p2T0PvugFrR+HG1++YvPz1k+GKQ9AhaZw52QoUuLC8wkxsHM2bJ8BO36HsyedRHKV1lCjo/NGwu55ELPfaV+8MtTo4CR1q7UH/+BLj50UD0c2OTV1D693kr5HNv+12rfG9c5GZ2Wuy/j9iIhIfqRJSQGQ5+bZIpInWGvp/dFijsQkMPfpDhQplMFPJYqIFBwq0ZDrtXsG1n4Fv78Id/94yWbeXobX+jYk0M+HsQv2EJeQwhu3NsIn9SwkxkFg2RwMuoA7tR9+uNcpj9Dz/Ywld8FZIdtkICx+30kMV2hy5WtWTYCfH4eqbWDAJCiczkYDfsWhQV/nkZoCUcudZO+2GTBrpHO+Wjto86QTQ1D1jMdcqChUauY8zklJgmNbnLEqNs1YPyIiIiIiIun4Zf0h1u0/xZu3NlJyV0QkE5TgzU2KBkG7Z+H3EbBzlrOR1CUYY3ihWz2KF/Fl1O+baH70W26Pn4Q5Gw2VmjubWl3Xx6lfKtkjOQG+G+gkOft9mX7C9XJufMWppfvTMLh/3uXryy4d7dTZrXmDM1ZGyjp4+0CVVs7jxpfh9FEoGpzx2rwZ4VPIKdEgIiIiIiJyDRJTUnlj5lbqlg2kb5OKng5HRCRPucK29pLjIoZCyarw+7/AlXrZpsa6GBa8itUlhtPvxEdsclUhqe3zkBAL05+Gt2rDl7fCum8h8XTOxF+QzHgODq6GPh9DqauoDVWkBHT/HxzdBIvevXS7BW87yd26PaD/15mv2XtOQOmsTe6KiIiIiEi+N2vzEa5/ex7v/LGdhOTL/x/1Wkxcspf90Wd5vls9vFWCUEQkU5TgzW18CsMN/+ck/dZ8mX4ba52Vn5+0gykPEFAihAUtxtIz9hl6rG/JlFbfk3z/Qmj9GBzbClPuhzdrOhtzbfvNWXEq12b1RFj1ObR5Cur1uPp+6naDBrfAn2/A0S0XnrMWZr8Cs1+GBrfCbZ87fz5ERERERESyWXKqi1d/3cx9X6zkTGIK783eQbf3FrB41/EsHysmPpn35+ykba1StK8dkuX9i4jkd9pkLTeyFsZ3hpOR8OjqCz/6H7US/ngJ9i50Vvpe/0+4ri94eTFn6xFe/XULu46dISSwMANbVOGOiIoER6+FDZNh0xQ4Gw1FSjp1Xxv1g8otMl6DNS9JjINNU8G6oFKEUyPX6xrfzzi5FyIXQuQC2PgjVGkJd/147atizxyHD5o59XDv/d3pz1qY+QIs/Qga3w03v6fVtyIiklfkw4mFXCzPzrNFJEMOnDrLsK9Xs2bfKQa2rMKI7vVYujuaf07dyL7oePo2qcCIbvUIDsiaBSivTd/CmAW7+fXRttQvXyxL+hQRyafSnWsrwZtb7V8B425wavJePwKO73BWcm6ZBkVLQfvnoOmgv9Vtdbks83ccY/yiSOZvP0ZhHy/6NK7A4NbVqBPiB7vmwobvYOuvkBwP1TtC51ehzHWeuc+sdnwHLB8La7+GpLi/jhcuBhWaOsneihFQMdwpkXA5MVGwZ4E7qTsfTu1zjhcNdl63rm+Af3DWxL1+Mvx4H3T+DzR/CH59ClZ9BhEPQJfXrz05LSIiknOU4C0A8vQ8W0Qua87WIzz13TpSUi2v39KQHo3Knz+XkJzK+3N28Mmfuwnw8+GFrvW4Lbwi5hoWDUWdjOf6t//k5kbleft27e8hInIFSvDmOZMHOyUVGt4Ca79xaq+2ehRaPgKFA694+Y4jcXy2OJIfV0eRkOyiTc1SDGlTlQ61S+OVEg+rv4B5r0NirLNKtOMICCyTAzeWxVypsH0mLB8Du+eCl6+zwVzEUCgSBFHLYf9yiFoBRzc7q3rBWdVbqZmT8K0U4SSB9y6CPfOdpO7JPU67IiWhSmuo1g6qtoGQelmfcLUWvukPu/+Emp1g6y/Q5kno9FL+XGEtIiL5mX5xFQB5fp4tIn+TnOrird+38cmfu6lfrhgf3tmEaqX80227/UgcI6ZsYEXkSSKqBfGfPg2oWfrK/0dNzxOT1vDbxsPMe6YD5Ypf5X4jIiIFhxK8ec7JSOej+9ZC+BBo9wwEZL4e0ckzSXyzYh9fLN7L4dgEqpfyZ3DrqvRtUhH/1FiY/xYs/wR8/JykYstHrn4jr5wUH+0kqVeMg5h9EFgemg2BJvc4G4qlJzEODqxyVkhHuZO+Z09e2KZwcajaGqq2hWptofR1ObOCNuYAfNTCSbh3fBHaPa3kroiI5EX65VUA5Pl5tohc4FDMWR79eg0r957kjuaV+VeP+vj5Xr5EnMtlmbxqP/+ZvpX4pBQebF+DRzrWvOJ1AIkpqRw4eZYNB2J4fNJaHu5Qg2e71M2q2xERyc+U4M2TDq51SgmUrHrNXSWnupi+4RDjF+5hXVQMJYr68mrvhnRvVA5O7II//uWsHC1WEW4YCQ1vzZ0JxoNrnDIMG76H1EQnERsxFOp0B2+fzPVlrXPvUcshIQYqt4SyDT1X73bPAjhz1Nl4TUREJG/KhZMHyWr5Yp4tIgDM3XaUp75dS1KKi//0bUivsAqZuv746UT+8+sWflxzgKrBRfl374a0qhHMkbgE9kefZX90PPui49l/Mp6o6LPsPxnP4dgEzqUiyhbz4/en2lHMzzcb7k5EJN9Rglcc1lpW7zvFK79sZu3+U/RvVomXbr6OIoW8nQTjzBfg8HqoEO7UhK3c/PIdnj0FR7c45Q+ObnEehQOdMgl1u2WonMQVxR2GzT/B+u/gwErw9YfQ/k5it3S9a+9fREREsooSvAWA5tkieV9Kqov//bGdj+btom7ZQD68swk1QgKufOElLNp5nBenbmTP8TMU8vYiKdV1/pwxTiK3UsmiVAoqSqWgIue/r1suUMldEZGMU4JXLpSc6uKdP7Yz+s9d1AgJ4P0BjalXrhi4XLDuG2dTt9OHnUTtDSMhoCwc3wZHNqdJ5m6G2AN/dVq4GITUhdiDEBvllH2o3Rka3Aq1bgJfv4wHePoYbPkJNk11auJinXIJTQZC2ADwK57Fr4iIiIhkASV4CwDNs0XytmNxiTzy1WqWR0bTv1klRva8LkOlFa4kITmVL5ZEcuJ0kjuRW5RKJYtQoWQRCvt46FOSIiL5ixK8kr5FO4/zxLdriTmbzIvd63F3iyrOLqhJZ2DRKFj0HriSnc3Jzm1Q5l3I2aSsTH1nBW3p+s6jeEXn7VmXC/Yvg40/wKYpEH8cCgVCvR5Osrd6e/BO513a+GjYMs25Zs98Z7xSteG6vtCgL4TUydkXR0RERDJLCd4CQPNskbzL5bLcNW4Zq/ed5LW+DenTuKKnQxIRkYxTglcu7cTpRJ6evI65245xY/0yvHFLI0r6F3JOxh6EpaOdjdfOJXODamS83m1qCkTOhw0/wJafITEGigZD/V5OrdnS9WHbb7DpR9g9D1wpEFT9r6Ru6fq5sxawiIiIpEe/tAsAzbNF8q5PF+zm379u4fW+DekfUdnT4YiISOYowSuX53JZPlscyeu/baFUQGHe6RdGi+rBWTtISiLsnOVskLbtN0g5+9e5EpWdchDX9YVyoUrqioiI5E36BV4AaJ4tkjdtPRxLz/cX0a52CGMHNnU+uSkiInlJuv9wZ3AJphQEXl6Ge9tUo3m1IB79Zg13jF3KsOtr8dj1NfHx9sqaQXwKQ93uziPxNGyfAce3Q63OUKGJkroiIiIiIiLZIDEllScmraVYER9ev6WhkrsiIvmIErzyNw0qFOfnR9vw0k+bGDV7B0t2Hefd/o2pUKJI1g5UOAAa3pq1fYqIiIiIiMjfvP37drYejmP8oHBKBRT2dDgiIpKFsmhZpuQ3AYV9ePv2UN7tF8bmg7F0fXc+78/ewb4T8Z4OTURERERERDJh8a7jjF2wmzubV+b6umU8HY6IiGQxreCVy+rduAJhlUowYuoG3v5jO2//sZ0mlUvQu3EFujcsR7De+RUREREREcm1Ys4m8/R366ga7M+I7vU8HY6IiGSDHF3Ba4zpYozZZozZaYwZfpl2zYwxqcaYW9McizTGbDDGrDXGaEeHHFS1lD9f3deChc915NkudTiTmMq/ftpExH9mM/iz5Uxdc4D4pBRPhykiIiIiIiIX+ddPGzkSl8g7/cIoWkhrvERE8qMc+9fdGOMNfAjcCEQBK4wx06y1m9Np919gZjrddLTWHs/2YCVdFUsW5eEONXm4Q022HIrlp7UHmbb2AE98u5Yivt7cdF0ZeodVoE2tUvhm1aZsIiIiIvmAMaY38LO1NtXTsYiIZ1lrc2yDs5/WHuCntQd56sbahFUqkSNjiohIzsvJt+8igJ3W2t0AxphJQC9g80XtHgV+AJrlYGySSfXKFaNeuWI827kOKyKjmbr2INM3HOKntQcJ8i/EbeEVebh9TYoX9fV0qCIiIiK5wVdAnDFmAjDeWrvN0wGJSM77Zvk+/u/nTXRrUI4hbarRoELxbBvr4KmzvDh1I40rl+DhDjWybRwREfG8nFxmWQHYn+Z5lPvYecaYCkAf4ON0rrfA78aYVcaY+y81iDHmfmPMSmPMymPHjmVB2HI5Xl6G5tWDea1vQ1aMuIGxA8NpUT2IMfN30+7NuYydv5uEZC1UERERkQKvLPAS0B7YbIxZaIwZbIzx93BcIpJD5m49yogpG6gS5M+MTYfp8f5C+n2yhN83HSbVZbN0LJfL8o/v1pHqsrzbLwwffcJSRCRfy8l/5dP7DMrFv8XeBZ67xEfXWltrmwBdgUeMMe3SG8RaO8ZaG26tDQ8JCbmmgCVzCvl4cWP9Mnx0Z1OmP9aWsEoleHX6Fjq9/SdT1xzAlcWTFhEREZG8wlobZ639xFrbAmgILANeAw4ZY8YaY1p4NkIRyU4bomJ45OvV1C9fjB8fbsWS5zvxQre6RJ08y/0TV3H92/P4fNEeziRmzd4m4xftYcnuE/yrR32qBOt9JBGR/M5YmzNJN2NMS2Cktbaz+/nzANba19K02cNfieBSQDxwv7V26kV9jQROW2vfutyY4eHhduVK7cfmSQt3HOe137aw6WAs15Uvxgvd6tG6ZilPhyUiIiLZJ2cKS+ZxxpiKwP3As0ASUARYDQy11q73ZGwZoXm2SMbtj46n7+jFFPL2YsrDrShdzO/8uZRUFzM2HWbcwj2s2XeKQD8fBkRU5p5WValQoshVjbf1cCw9319E+zohjLm7aY7V+xURkRyR7j/qOZng9QG2A52AA8AK4A5r7aZLtP8c+MVa+737o2te1to49/d/AC9ba2dcbkxNPHMHl8sybd1B3py5jQOnztK+dgjDu9alXrling5NREREsp4yCZdgjPHFKUc2BGdOvAz4FPgWKImz0XC4tbaex4LMIM2zRTImJj6ZWz5ezNHYBH54qBW1ygResu3qfScZt3APMzYeBqBLg7Lc26YajSuVyHCSNiE5ld4fLuL46SRmPtGW4IDCWXIfIiKSa6T7CyHHNlmz1qYYY4YBMwFvnM0lNhljHnSfT6/u7jllgCnuX2o+wNdXSu5K7uHlZejduAJdGpRl4pK9vD9nB91GLeCWJhX5x021KVf86t6ZFhEREckrjDHvAwNwSpRNBJ6y1qbdbPisMWYEEOmB8EQkGySmpHL/xJXsOxHPF/dGXDa5C9Ckckma3FGSA6fOMmFxJN8s38ev6w8R6OdD9ZAAaoT4UyMkgBohAdQs7U/lIH8K+VxYdfHt37ex9XAc4weFK7krIlKA5NgKXk/QyoLc6VR8Eh/N28XniyIxBu5tU41Hr69FkULeng5NRERErp1W8KbDGDMbGAv8aK1NukQbH5x9J/68TD9dgPdwFkx8aq19/aLzxYEvgco4CyPestZ+ZoypBHyBs9mbCxhjrX3Pfc1IYChwbofiF6y10y93P5pni1yey2V54tu1TFt3kPf6h9ErrMKVL7rI6cQUfl1/kE0HY9l17DS7jp7hcGzC+fPeXoYqQUXPJ3+LFfHlrd+3cWfzyvy7d8OsvB0REck9PFuiwRM08czd9kfH8/bv25i69iBVg4vy31sa0bx6sKfDEhERkWujBG82McZ445Q8uxGIwil5NiDtSmBjzAtAcWvtc8aYEGAbTlI3GChnrV1tjAkEVgG9rbWbM7q/RVqaZ4tc3hsztvLRvF0826UOD3eomWX9nk5MYc+xM07C99zj6Bn2HD9DUqqL6iH+/PJoG4oWyrEP64qISM7ybIkGkYtVCirKu/0bc3uzSgz/YQP9xixlYMsqPNelLv6F9UdTRERE8g9jzKvA/ovLkrnLlVWw1v4zA91EADuttbvd104CegFpSz1YINA4tc0CgGggxVp7CDgE4N7XYgtQ4aJrRSQLfLVsLx/N28UdzSvzUPsaWdp3QGEfGlYsTsOKxS84nuqyRJ2Mp0SRQkruiogUQF5XbiKSvVrVKMWMJ9oypHU1Ji7dy03vzGfhjuOeDktEREQkK90NrEnn+CpgYAb7qADsT/M8yn0srQ+AesBBYAPwuLXWlbaBMaYq0Bhnk7dzhhlj1htjxhtjSqY3uDHmfmPMSmPMymPHjqXXRKTAm7v1KP+cupGOdUJ4ued1Gd4c7Vp5exmqBPtTvKhvjownIiK5ixK8kisULeTDv26uz/cPtqSwrxd3jVvG8B/WE5uQ7OnQRERERLJCaf6qcZvWCZwNhTMivUzRxfXWOgNrgfJAGPCBMabY+Q6MCQB+AJ6w1sa6D48GarjbHwLeTm9wa+0Ya224tTY8JCQkgyGLFBwbomJ45OvV1C9fjA/uaIKPt/67LSIiOUO/cSRXaVoliOmPteWhDjX4buV+bvrffGZvOeLpsERERESu1T6gbTrH2+GsxM2IKKBSmucVcVbqpjUYZyM3a63dCewB6gIYY3xxkrtfWWt/PHeBtfaItTbVvdJ3LE4pCBHJhP3R8QyZsIKSRQsxflAzlZwTEZEcpQSv5Dp+vt4816UuUx9pTYmivtw7YSVPfruWk2fS3XBaREREJC/4BHjHGDPUGFPD/bgfZ7XsmAz2sQKoZYypZowpBPQHpl3UZh/QCcAYUwaoA+x21+QdB2yx1v4v7QXGmHJpnvYBNmby3kQKtJj4ZAZ/voLE5FQ+H9yM0oF+ng5JREQKGL2tKLlWo4olmDasDR/O3cmHc3eyYMcx/q9nA7o2KIuXlzboFhERkbzDWvu2MaYUMAoo5D6cBLxnrX0jg32kGGOGATMBb2C8tXaTe6M23Bu4vQJ8bozZgFPS4Tlr7XFjTBucOsAbjDFr3V2+YK2dDrxhjAnDKfcQCTxwzTcsUgAcP53IV0v3MXHpXmLPJvPFvRHUKhPo6bBERKQAMtZeXLYr/wgPD7crV670dBiSBTYfjOXZH9ax8UAs/oW8ua58ca6rUIyGFYrToEJxaoQE4K2kr4iISG6gX8iXYYzxB+rjvE6brbWnPRzSVdE8WwqybYfjGL9wD1PWHiApxUXHOiE80rEm4VWDPB2aiIjkf+nOtbWCV/KE+uWLMfXh1vyy/hCr951k44EYvlm+j8+SnU2hi/h6U69c4PmEb4MKxalVOkAbG4iIiEiuYq09g1NqQUTyEJfL8ueOY4xfuIcFO47j5+vFbU0rMrh1NWqWDvB0eCIiUsApwSt5ho+3F70bV6B34woApKS62H38DBsPxLDhQAybDsTy/aooJizZC0BhHy+ur1uaYdfX5LryxT0ZuoiIiAjGmI7AAKAyf5VpAMBae71HghKRyzqblMqPa6IYv3APu46doXRgYZ7pXIc7IipT0r/QlTsQERHJAdec4DXG+Fprk7MiGJHM8PH2onaZQGqXCaRvk4qA8876nhNO0nfNvlP8sDqK3zYe5oZ6ZXisU00aVSzh2aBFRESkQDLGDAI+BqYAHYCfgNpANeBLjwUmIuk6GpvAhCWRfLVsH6fik2lQoRjv9Aule8PyFPLRpwRFRCR3yVQNXmPMY8ABa+0P7ufjgHuAXUBPa+22bInyKqk2mMScTeaLxZF8unAPMWeT6VAnhEevr0XTKiU9HZqIiEh+pRq86TDGbATetdZ+aoyJA0KttbuNMR8Ap621wz0cYqZoni352b4T8fT6cCGnziZzY70y3NumGhHVgjBG/7yJiIjHpfvLKLMJ3p3AEGvtfGNMO+BX4F7gFsDfWtsjKyLNKpp4yjmnE1OYuGQvYxfsJvpMEm1rleLR62sRUU0bIYiIiGQxZUDSYYyJB+pbayONMceB6621640xdYF51tqyHg4xUzTPlvwqPimFvh8t5uCps0x+sBV1ygZ6OiQREZG00p1rZ/azJRWASPf3NwOTrbXfASOBFlcbmUh2Cyjsw0MdarDwuY6M6FaPLYfiuP2TJfQfs4TFu46TmTc6RERERK7CCeBcpugA0MD9fTBQxCMRicgFrLU88/16th+J4/07mii5KyIieUZmE7yxQIj7+xuB2e7vkwG/rApKJLsULeTD0HbVWfBsR/7Voz57jp/hjrHLuO3jJSzccdzT4YmIiEj+tQC4yf39d8AoY8xnwDfAHx6LSkTO+2T+bn5df4hnOtelfe2QK18gIiKSS2R2k7XfgbHGmDVATeA39/HrgD1ZGZhIdipSyJshbapxR/PKTF65n9HzdnHXuGXcUK80L3avT9VS/p4OUURERPKXYfy1IOI1IAVojZPs/benghIRx5/bj/HfGVvp3qgcD7av7ulwREREMiWzK3gfARYBpYBbrbXR7uNNcFYfiOQpfr7e3N2yKnOf6cDzXeuyZNcJbnpnPq//tpXTiSmeDk9ERETyAWOMD9D/3HNrrcta+19rbU9r7dPW2lOei05E9p44w6Nfr6ZOmUDevLWRNlMTEZE8J1ObrOU12vxBMutobAL/nbGNH1ZHUTqwMMO71qV3WAW8vDTJExERySD90kyHMeYMziZrez0dS1bQPFvyizOJzqZqh2MT+HlYGyoHF/V0SCIiIpdz7ZusGWPqG2PqpHl+ozHmS2PM88YY72uNUMTTShfz4+3bQ5nycCvKlSjCU9+t45aPF7Nu/ylPhyYiIiJ521KgqaeDEJG/OJuqrWPH0Tg+uKOxkrsiIpJnZbZEwzigMYAxpiLwExCEU7pBtcMk32hcuSRTHmrFW7eFEnXyLL0+XMQzk9dxNC7B06GJiIhI3jQWeMsY84Qxpq0xpknah6eDE8kNrLV8s3wfr/22hT82H+HkmaRsHe+jebuYvuEww7vWpW0tbaomIiJ5V6ZKNBhjTgER1trtxpgngZ7W2o7GmI7AZ9baqtkT5tXRR8ckK8QlJPPB3J2MX7iHwj7ePNapJoNaVaOQT2bfHxERESkQVKIhHcYY12VOW2ttnvo0nObZktWstfxn+hbGLtiDlwGX+7+pNUsH0KxqScKrBNGsahCVgopkSY3cuduOMuTzFdzcqDzv9Q9T3V0REckr0v2F5ZPJTryBc2+jdgKmu7/fBZS5urhEcrdAP1+e71qPfuGV+PevW/jP9K18vWwffZtU5Mb6ZahbNlATQhEREbmSap4OQCS3crks/5q2kS+X7mNQq6o816UuGw7EsCIympWR0fyy/hDfLN8PQOnAwjSrGkTTKiVpVjWIeuUC8fHO3MKLPcfP8Ng3a6hXthj/vUWbqomISN6X2RW8S4D5wC/A7zireTcYY1oC31lrK2VPmFdHKwskO8zddpT3Z+9gzf5TWAsVShThxvpluLF+GSKqBeGbyQmmiIhIPqNMSQGgebZklVSX5bkf1vP9qigeaF+d4V3q/i3h6nJZth+NY0XkSVZFRrMi8iQHTp0FIKCwDxHVgmhVI5hWNUpRt2zgZTdIPp2YQp8PF3H8dCLThrWhUpDq7oqISJ6S7i+5zCZ42wFTgeLABGvtEPfx14Da1tpbrj3OrKOJp2Sno3EJzNlylFlbjrBgx3ESU1wE+vnQsU5pbqhfhva1QyhexNfTYYqIiOQ0JXjTYYzpe7nz1tofcyqWrKB5tmSF5FQXT367ll/WH+KJG2rxeKdaGV5Ne/DUWVbuPcmy3SdYsusEu4+fASDIvxAtqwfTskYwrWuWompw0fN9ulyWh75axR+bjzDx3ua0rlkq2+5NREQkm1x7ghfAGOMNFLPWnkxzrCoQb609ei0RZjVNPCWnxCelsHDHcWZtOcLsLUc5cSYJHy9Di+rB3FCvNF0alKNscT9PhykiIpITlOBNx2Vq8FoA1eCVgiYxJZVhX6/hj81HeL5rXR5oX+Oa+jt46ixLdp1g0a7jLN55gsOxzubI5Yv70bJGKVrVCGbXsdN8NG8XL3avx31tq2fFbYiIiOS0rEnwAhhj/ICaOBPSXdbahGuLLXto4imekOqyrN1/kj82H+WPzYfZdewMxkDzakH0DK1At4ZlKVG0kKfDFBERyS5K8GaAMcYHaAy8CYyw1i7ycEiZonm2XIuzSak88OUq5m8/xv/1vI57WlXN0v6ttew5fobFu06weNdxluw6wcn4ZAB6h5XnnX7aVE1ERPKsLCnR4AO8BgwDCrk7TQTex5mYJl97nFlHE0/JDXYdO80v6w7x07oD7D52Bl9vQ7taIfQMK88N9crgXzizex2KiIjkasqaZIIxphUw2lob6ulYMkPzbLlapxNTuG/CCpbtiea/fRtxe7Ps38bF5bJsPRzHlkOxdG9UDj/fPLVgXkREJK1059qZzSy9AQwAHgQWuo+1xUn6egFPX210IvlVjZAAHr+hFo91qsmmg7FMW3eQn9cdZPbWoxTx9eaG+mXoFVqedrVDKOSjDdpEREQKmFPAtX02XSSPiDmbzODPlrMuKoZ3+4XRK6xCjozr5WWoX74Y9csXy5HxREREclpmV/AeBoZYa6dfdLw78Km1tlwWx3dNtLJAciuXy7IiMppp6w4yfcMhTsYnU7yIL90alqVfs8qEVSrh6RBFRESullbwpsMY0+TiQ0A54DkAa23bHA/qGmieLZl18kwSd49fxrbDcbw/oDFdGuSq/zqKiIjkFVmygrc4sCud47uAEpnsS6TA8vIyNK8eTPPqwYzseR0Ldxxn2rqD/LT2IN8s30/vsPIM71pPG7OJiIjkHytx9q+4eFK+FBic8+GI5JyjcQnc/elyIk+cYczd4XSsW9rTIYmIiOQrmU3wrgMeAx656Pjj7nMikkm+3l50rFuajnVLcyYxhdHzdjFmwW5+33yERzrW5N421VQnTEREJO+rdtFzF3Ast25WLJJVFuw4xj+nbuRIbCKfDWpGq5qlPB2SiIhIvpPZgp/PAvcYY7YbYyYYYz43xmwD7iID9XeNMV2MMduMMTuNMcMv066ZMSbVGHNrZq8Vycv8C/vwdOc6zHqyPW1qluLNmdu46Z35/LH5CJkppyIiIiK5i7V270WP/UruSn626WAMd49bxt3jlpPisky8N0LJXRERkWySqQSvtXY+UBuYDAQAxdzfd8ZZ2XtJxhhv4EOgK1AfGGCMqX+Jdv8FZmb2WpH8onJwUcYMDGfivREU8vFi6BcrGTh+OTuPxnk6NBEREbkKxphXjTEPpnP8QWPMK56ISSQ7RJ2M58lv19Lj/YVsOBDDi93rMfsf7QmvGuTp0ERERPKtzJZowFp7EBiR9pgxJhS45QqXRgA7rbW73ddMAnoBmy9q9yjwA9DsKq4VyVfa1grht8fb/n979x0fVZX+cfxz0hsESAKEXkKvQkCKIAhItTcs6NpQ176ua9nmrj9X17auay+ICthFEcFGFxXpPaFIDykQIAXSz++PM0jAAAkkmUzyfb9eeU3mzL13nskNw8kzz30O7/64jf98t4ERzy3gun4tuHtoG2qHBHo7PBERESm9ccBlJYwvBR4C/lq54YiUrwMH83lx7iYmLtyKMXDLwNbcNqg1kaGas4qIiFS0Mid4T0NjYEex+zuBM4tvYIxpDFwEnMPRCd6T7lvsGOOB8QDNmjU77aBFvC3Q348bzmrJ+d0b8cw3iUxYuIXPV+zi/uHtuKxnU/z8tFi5iIiID6gPpJUwvhdoUMmxiJSbnPxC3vlxKy/M3kRmbgGX9GjCH4a1pVGdUG+HJiIiUmNUZoK3pCzUsU1FnwMesNYWGnPU5qXZ1w1a+xrwGkB8fLyalkq1ER0RzOMXd+Wq3s155Iu1PPDJat7+YRtXntmMMV1iqRse5O0QRURE5Pi2AwOAX44ZH4grXhDxKUVFls9W7OKZbzawa/8hBrWL4YER7ekQW9vboYmIiNQ4lZng3Qk0LXa/CZB0zDbxwPue5G40MMoYU1DKfUVqhC5NIvn41r58viKJF+ds4q+freEf09ZydtsYLjijMcM6NCA0yN/bYYqIiMjRXgX+Y4wJAmZ7xoYAj+PWnxCpUnILCtl/MJ/07Dz2ZeeRftBzm53PvoN5LNqSzvrdGXRuXJsnL+1Kfy2gJiIi4jWlSvAaY6adZJPSfEy7GGhjjGkJ7ALGAlcV38Ba27LYc04EpltrPzPGBJxsX5GaxBjDhWc05oLujVi3O4PPVyQxbUUSsxJSCQvyZ3inhlzQvRFnxUUT4F+mtRRFRESkAlhrnzHGRAPPA4cvu8kD/mutfbK0xzHGjAD+C/gDb1hrnzjm8UhgEtAMN9d/2lr71on2NcbUAz4AWgBbgcuttftO7ZWKr5rwvWsD5hK5+WTlFhx329ohATSuG8Z/x3bnvK6N1DJMRETEy0pbwbu3FI9vOdEG1toCY8wdwNe4SeUEa+3aw6sJW2tfKeu+pYxdpNoyxtCpUSSdGkXy4Ij2LNqSzucrdjFj9W6mLt9FdEQQY7o24oLujejetA7HtD4RERGRSmStfcgY839AR1wLsnXW2qzS7m+M8QdeBIbhrnBbbIyZZq0tvvDw7Z7jnmeMiQESjTGTgcIT7PsgMMta+4Qx5kHP/QdO+wWLz3hl3maemJlAtyaRxDevR92wIOqFB1I3PIh6YUHuNjyIumFB1AkLJFAFBCIiIlWKsbb6tqmNj4+3S5Ys8XYYIpUut6CQuYlpfL5iF9+tTyWvoIjmUWHcMrA1l8c3UVWviIhUJH2aWAJjTEMgwFq785jxJkC+tTalFMfoCzxirR3uuf8QgLX28WLbPIRrbXY7riL3W6AtboHiEvc1xiQCg6y1u40xscBca227E8WieXb1MXHhFh75Yh3ndWvEc1d0x1/VuCIiIlVZif9RK8sjUg0FB7g2DS9d3ZMlfxnKk5d2pV54EA9PXc2o5xcwNzHV2yGKiIjUNO8CI0sYH+55rDQaAzuK3d/pGSvuBaADbr2K1cDd1tqik+zbwFq7G8BzW7+kJzfGjDfGLDHGLElLSytlyFKVvffzdh75Yh3DOzXg2cu7KbkrIiLio5TgFanmaocEcnl8Uz69rR8vX92D3IIifvfWYsa9uYiE5AxvhyciIlJT9ALmlzC+ALfQcGmUlH079nK84cAKoBHQHXjBGFO7lPuekLX2NWttvLU2PiYmpiy7ShU0dflOHp66mkHtYnj+yjPUdkFERMSH6X9xkRrCGMPILrF8c+9A/jK6Ayt37GfUfxfw0KerSM3M8XZ4IiIi1V0AEFzCeMhxxkuyE9d+4bAmuErd4q4HPrXOJtw6Ge1Psm+KpzUDnltd6lPNzVi9m/s+XEnfVlG8ck1PggP8vR2SiIiInAYleEVqmOAAf24a0Ip59w/mun4t+GjJTgY/NZf/zdrIobxCb4cnIiJSXS0Cbith/HZgcSmPsRhoY4xpaYwJAsYC047ZZjswBMAY0wBoB/xykn2nAdd5vr8O+LyU8YgPmrU+hbveW06PZnV5/dp4QgKV3BUREfF1WmRNpIbbsiebJ2au5+u1KcRGhnD/8HZc2L0xfurBJiIip0b/gZTAGNMHmI1rnzDLM3wO0AMYYq39oZTHGQU8B/gDE6y1jxljbgWw1r5ijGkETARicefiCWvtpOPt6xmPAj4EmuESxJdZa9NPFIfm2b5pwcY0bpy4hPaxtZh005nUDgn0dkgiIiJSNiXOtZXgFREAFv2yl//7cj2rdx2gS+NIxg9sRb/WUURFlPaqUREREUAJ3uMyxnQD/oTrjWuApcAzQLS19jsvhlZmmmf7nkW/7OW6t36mRVQ474/vQ52wIG+HJCIiImWnBK+InFhRkeXzlbt46qtEkg64vrztG9aiT6so+raOok/LKCLDVOkhIiInpARvKRhjmuD65d4ANLPW+tR18ppn+5bl2/dxzRuLiK0Tyvvj+xCtD/BF5EQSZ0J0W4hq7e1IROS3SpxrB1R2FCJSdfn5GS46owljujZi9a4D/Lh5Lz9u3sv7i7cz8YetGAOdGtWmryfh26tFPWrp0j4REZFSMcb4A+cDNwHnAquAl4GPvBmXVG9rdh3gugk/E10rmMk3nankroicWNoGeP8qiBsGV3/o7WhEpJSU4BWR3wj096NHs7r0aFaX2wfHkVtQyModLuH7w+Y9vP3DNl5fsAV/P0OXxpGcFRfNuZ0a0KVxJMaocEtERKQ4Y0w7XFL3WiAbmAIMB8ZZa9d5Mzap3jakZDLuzUXUCglk8k1n0qB2iLdDEpGqbt6/wRbB5lmQvRfCo7wdkYiUglo0iEiZ5eQXsmzbPn78ZS8/bN7Lih37KSyyNK4TyrmdGjCiU0PiW9TDXwu1iYjURHrzL8YYswDoDHwMTLLWzvOM5wPdfDXBq3l21bZm1wE+WLyDqct3ERbkz4e39KVFdLi3wxKRqi51PbzUF9qOgA0zYdTT0Ptmb0clIkdTiwYRKR8hgf70i4umX1w09wH7svOYlZDKV2uSmbxoO28t3EpUeBDndmrA8E4N6dc6mqAAP2+HLSIi4g19gReB1621a7wdjFRfBw7lM23FLt5fvIO1SRkEB/gxqkssdw9po+SuSHW0dCIsnwTjpkJwrfI55twnICgCLnwJJo6G1R8pwSviI5TgFZHTVjc8iEt7NuHSnk3Izi1gbmIaX61NZtqKJN77eQe1ggMY0qE+Izo3ZGDbGMKC9NYjIiI1RjxwM7DAGLMVeAd4z6sRSbVhrWXRlnQ+WLyDGat3k1tQRMfY2jx6QSfO796YyFCtlSBSLR1Mh2/+BrkHYN6TcO6jp3/M5DWw7jMYeD+E1YMul8Gsf0D6FqjX8vSPLzXPnk3wzV+g103QZqi3o6n2lGURkXIVHhzA6K6xjO4aS05+IT9s3sNXa5L5dl0Kn61IIiTQj7G9mnHr2a1pGKk+cCIiUr1Za1cAtxtj7gMuA24EngT8gNHGmN3W2n1eDFF8UGpmDp8s3cWHS3awZU82tUICuCy+CWN7NaNz40hvhyciFe37ZyE3A1oNgp9egu5XQ/32p3fMuY9DcG3oe7u73+VSl+Bd/TGcff9phywl2PYjbPgK2gyDpn3Avxql6DbPho9+BzkHYNO3cPFr0PkSb0dVrakHr4hUioLCIhZv3ccny3by2fJd+BnDZfFNuG1Qa5rUDfN2eCIiUn7Ug/ckjDFxHFl0LQqYba0d6d2oykbzbO9Yvn0fL83dzOyEVAqLLL1b1mNsr6aM7BxLaJC/t8MTkcpwYCc838Mly859FP7XExp2geu+gFNd8Hr3Snh1IAx6CAY9eGT8rVGQnQa3/3zqx5aS5WbBC70gM8ndD63neh+3Hw2tz4EgH/0b2Vr4+XX46kGIaecSuzMfgG0/wJj/QPz13o6wOlAPXhHxngB/P/q2jqJv6yjuHtKGl+dt5sMlO/hg8Q4u6dGE3w9uTfMo9YcTEZHqz1q7CXjQGPNnYAxwg5dDkipuXVIGz36byHfrU6kXHsRNA1pyRXxTWsVEeDs0Ealscx8HLAx+CMKjYcjf4Ms/wJpPXNXtKR3zCQiJhD63HT3e5TKYfg/sXgGNzjjNwOUo8590yd1xUyE3ExK+hMQvYeUUCAiF1oNdsrftCHeefUFhPsz8EyyZAG1HwiWvu/7Q13wCH17nfpdy9sNZ93o70mpJFbwi4jVJ+w/x6rzNvLd4B4VFlgu6N+L2wXG01h8rIiK+TCU+NYDm2ZVjc1oW//l2A9NX7aZWSAC3DGzF9f1bEh6sOh2RclWQ524Dgrwbx8mkJsDLfeHM22DEv9xYUSG8fg5kJsOdS8q+4NquZfD6YBj8l9+2YjiYDk+3hd7jjzyfnL60RHi5H3QbCxe8eGS8MN9VuiZ86b4ydoLxc+0b2o+GDmOgbguvhX1CB9Phw2th6wLof4/74MGv2JUlhfkw9VZY87FL8A75u6rCT12JPzgleEXE61Izcnht/i9MWrSN3IIixnRtxJ3nxNG2QTmtBisiIpVJs/UaQPPsirUj/SDPz9rIJ8t2EhLoz/X9WzB+QGsiw7Romki5ObQfNn0HCdNh43dQtzncPBsCgr0d2fG9fzX8Mg/uXgnhUUfGdy6FN4a4/rnDHyvbMSdfBjsXw92rIKR2yc+5czH8Yf3RCTs5NdbCO+e7thh3Ljt+da61bpuELyFxBqSsccney991id6qJC0RplwBGbvgvOeh+5Ulb1dUCDP+6Cp842+AUU/rd+rUqEWDiFRN9WuH8JcxHbl1UGveWLCFd3/cyhcrkxjRqSF3DomjUyMtFiIiIiLVX2pGDi/M2cR7P2/HGMPv+rXk94NbEx1RhRNOUv2lbYCPr4d+d7qKQ192YJdLliV86SoNiwogPMZdDr9+Gnz/n6N70FYlO352yejBfz46uQvQpCf0uBZ+etktuNagY+mOuXMJbPzGVVOWlNwF16YhYTpsmed6w8rpWfspbJkPo585cesFY6BRd/d1zp8hfQt8fANMvQWiZp3+onrlZeN37v0hIBiumw7Nzjz+tn7+MPpZCKnjFgrMOQAXvQr+VeTDy6Ii1+pk4XPQvB8M/1fVia0UVMErIlXOvuw83lq4hbd+2EpmTgFD2tfnjnPiOKNZXW+HJiIiJ6cK3hpA8+zytS87j1fmbebtH7dSUGi5LL4pd54TR6M6od4OTWq6/EPw+hBIXevuj3wSzrzFuzGVhbWQluASlAkzIGmZG4+Kc5e8txsNTeJd4umTm2DtZ3DrAqjfwath/4a1MHE07NkIdy2H4BJa2mXvhRd6Qv1O8Lvppbv8/d2LXX/du1eVfEyA/Bx4ug20HwMXvXxaL6PGy810C6tF1Ieb55S9evXALnjtbAiu7arNQ+tUSJilYq37QOGbP7vfuSunQJ1mpd//++fgu79Dm3Phsre9u6icte5DnzmPQeo69zr2b4cWA+DydyCsnvdiK1mJ/7j9KjsKEZGTqRsexB/Obcf3D5zDfcPasnT7Pi566QeueWMRP/2y19vhiYiIiJy2oiLL4q3pPDJtLQOenMNrC35hZOdYZt13No9f3EXJXakavn7YJXfHTnEJvpl/grn/dgmRqqww3y0c9r8e8FIfmP1/7vL2IX+H2xfDnUth2D9dteHhJNuIJ1z/2ml3ukvJq5KN38K2hXD2n46fiA2Pcq9v2/ew+uOTH3P7T7B5FvS/+/jHBAgMgY7nw/ovIO/gqcUvzrx/u17Jo589tdYEkY1di4b92+DTm733e1qQB1/cBV8/BO1GwQ1flS25C3DWPTDmOfe7PekSV81b2ax1bVpeHwwfXA2FeXDpBLhrJVz4CuxY5PpbpyVWfmynQBW8IlLlZecWMHnRNl6bv4U9Wbn0blGPO86JY0CbaIwas4uIVDV6Y64BNM8+NdZalu/Yz/SVu5mxejfJGTkEB/gxrGMD7hrSRusPSNWy5lN36XX/e2DYP6CwwCU/V06BPr+Hcx8DvypYM1Z8safW50CH86DtSKgde/J9V77vLoEf+RScOb7iYy2NoiJ4dQDkZbnk9IkWgisqhDeGul6odyyGkBO0unv7fFetePdKCAo/cQy/zHN9Yy+dAJ0vObXXUdOlrodXznItNM5//vSOtfgN+PI+GPBHGPLX8omvNHIOQNJymPek+8BhwB9dy5DTeR9Y8wl8Oh4adIJrPj1x24rytO0HmPUobP8BIpvBoAeg61jwL9bJdvsil/gtyIVL34I2QysntpPTImsi4tty8gv5YPEOXpm3md0HcujWJJI7zmnD0A71legVEak69IZcA2ieXXrWWlbtPMD0VUnMWJ3Mrv2HCPL34+x2MYzpGsuQDg2ICNbSKFLFpP8Cr54NMe3h+hlH+lAWFbmqvUWvuETVec8fnRDxttQEeO8KyNgNF7wAXS8v2/7WumrCHYvg9z9BnaYVE2dZrPwApo6HS96ELpeefPtdy1zVYZ/bYMTjJW+zdSFMHOV6jPa9/eTHLCqE/3SC2O5w1ftlCl/wtNgY46rh71j62x7Kp3K8L+6CZe+49gadLiyXMI+SfwiSV7vfp6RlsGsp7N3kHgsIgfNfgK6Xlc9zbfgGPhznqoDHTYXIJuVz3JLsWuYq+jfPgogGMPB+17/6eIsr7t8O713lzt3wf8GZt5au/UnFUoJXRKqHvIIiPl22k5fmbmZ7+kHaN6zFHefEMbJzLP5+Xn+zFRGp6fRGXANonn1i1lrWJmUwfdVuvlydxI70QwT6Gwa2iWF011iGdmxA7RDfWbhFapiCPJhwrkvy3vr9by+9tta1P5j3hKuOveTN4ydHKtOGb9wiVIGhrqVE016ndpx92+ClvtCiP1z1oXeTOQW58EK8W5Rq/LzSV0pOvxeWvg23zIeGnX/7+MQxsGeDq94NLGU7mG/+4nqu3rfh9BOU3nY4D1ZZ53bVR/DpTa4lQfz15XPMglzXlzllHdz0XekX1itJUaGr5i6ezE1d7xYhBIhoCI17QKMe0PgMaNwTQst5fZxtP8CUK1w1+dgp7vnKU+p6l9hNmO5iP+te6HVz6Xr/5ma5yv6E6S4ZPOqZE1fSVzwleEWkeikoLGLayiRenLOJzWnZ1AoJoH6tYKIigomOCKJeeBBR4e77qIhgosKDiIpwY5GhgfgpGSwiUhH05loDaJ59fMkHcrh10lJW7NhPgJ+hf1w0Y7rGcm7HhkSGKakrPuCrh+GnF+GKydBhzPG3++ll+OpBaDUYrph04j6uFcla+PFF+Pav0KAzXPne6VcAHn5tpa2arSg/vQJfPQDXfAJxZbg8/GA6/K8nxLSD62cencjcMh/ePg9G/Bv63Fr6Y+5e5VpFjH4Get1U+v2qkrxs15948euwd7N7HWfdW7GLaOVkuCR97cYuEXsqvXePJ2O3W3QtMNQt2nYqr2Pr9+4DgT0b3P2QSGjkSeI26uESrbUblV/MJ5KyFqaMhexUuPCl8mkHUljgeh8veBoCw6HfHa7FTEjtsh2nqMgtwrbgaWh+llt8zXsfdCjBKyLVU2GR5eu1yfyweQ/p2Xnsycpjb1Yu6dl57DuYX+I+YUH+nNe1Edf0aU6XJifoTSUiImWlBG8NoHl2ydYmHeDGiUvIzMnnwVEdGNMllrrhXq3yESmbhBnw/pXQ+xYY9eTJt18+Gabd4ZJBV39U/lV9J1OQB1/eC8snQYfz4aJXTt5PtjSKCuHNYbBvq+t7641ETm4m/Lc71O8A131R9mrTZe+4nskXvQrdxroxa+Gtke513bXCLaBWWta6BetC6sCNX5ctFm/bsxEWvwkrpkDuAajfCaLjYN00CIpwbSr63l72pF9pfPUw/PQS3DzL/Tspbzt+hrdGQcsBcPXHpU8gZ+91H4qsmAx1msPZD0CzPlCvlXer1rPS4INrYMdPLqazHzz1Hr/7t8MnN7mWK92vhnP/7/ST+as+gs9vdz29r3zf/fusfErwikjNU1BYRPrBPNKz89iblceerFz2ZuWRkJzBFyt3cyi/kG5NIrm6T3PO69qI0KBy/ERVRKRmUoK3BtA8+7dmJ6Rwx5TlRIYG8uZ1vejYqAISBVKzHNrnLotvdAaccW3FXxJ8YKdbBKpOM7jx29K3XVg3DT65EaLauP6ZtRpUbJyHZe+BD8a5RZIG/gkGPVS+i76lrIVXB0LnS+HiV8vvuKU153HXBuOm2dDkFBKDRUUuSb1/G9yxBELrwObZ8O5FMOpp6H1z2Y85/2mY/ahr7VC3Rdn3r0yFBbBhpluQ7Je54BcIHS9wVbvN+rgkZup6V5W5/gv34UT/e6D3+NJdtl8aKWvhlQHusv7zniufY5Zk6UT44u4jCyKeiLWw8j34+s+QmwH97nJ9aMvrNZeHglyY/gdYMcmdswtfKXt8a6fCtLvBFrmffXlW4u9cAu9fBXkH4dI3oe3w8jt26SjBKyJS3IFD+Xy6bCeTF21nU2oWkaGBXNqzCVef2YxWMV66xExExPcpwVsDaJ59tLd/2Mo/vlhLx0a1efO6XjSoXYaqOJGSZKW6RFzKGne/TjNXydb1iopZ1KywwNPPc43r2xrVumz7b54D718NEfXh2s+hbvPyj7G4lLXw3lj3cyqvS7lLMvsxmP8kXP0JtClDi4TTlZUGz3eH1ufAFe+e+nGSVsBrg+DMW2DEE/DmuZCRBHctO7W+yfu2wX+7wjl/cUnBqigzBZa9DUvegswkqN0E4n8HPa5zv58lSVruzvWmb93CWwP+CD2vO73e0ta6ytq0BLhzacW2gQDXZmHJBLh0wvH/PaRtgC//AFsXQNMzXU/g0+ndW5GshR9fgG/+CrFdYex7ENn45PvlZbv2KsvecRXTl7wJ9VqWf3wHdrmrHXavgnMfhb53VGblsxK8IiIlsdby0y/pTFq0ja/XJFNQZDkrLppr+jRjaIcGBPiXYyWAiEj1pwRvDaB5tlNYZHl0+jom/rCVYR0b8N+x3QkLqoDkmy+zFn6Z45IJ5XHpfE2wfzu8cwFkJsPYya4Sc/ajsHuFq5Id/DB0vLB8q1Vn/RMWPHN6PWd3LIbJl0BgGFzzacUljhJnusuugyLgyikVc9n7YQW5rqo5Pwd+/2Pl9Rme8SdXeXr7Iohuc3rH+vI+l/gb8jf47hEY8x+Iv+HUjzdhhOvxe/si717KX5y1bpGuxW/A+mlucbDW57hq3TbDS/+hyLYf3UJc276HyKZw9p+g21Wn9qHKyg9g6ng473mXLK5oBXmut3LyKrjxG2jY5chj+Tnw/bPw/X9cv95h/3RXBZTne0hFObx4YlCYW3ytSfzxt01e7bbdsxHOugcG/xn8K7D3fV42fHabe8++4evKXGxSCV4RkZNJzczhw8U7mLJoO0kHcmhQO5ixvZpxff8W1AlTDz0RkVKoIn/tSUXSPBuycwu4673lzEpI5cazWvLwqA74awHXo+Ufgs/vgDUfQ9wwuOqD8l1gqDpKS4R3LoT8bNdPs2lvN26tu4x8zmOuIrBBF1dF2Xb46SfZNs+Gdy+GHuPg/P+d3rFS1rrK49xMGPlvOGNc+SUBrYWF/3VJythubjG1ylj8aftPMGG4W5hpxOMV/3zpW+CFXtD9Kjj/+dM/3qF9bsG1g3shspmrJj2ddh+L33RVoLfMd+ehLKyFjF1Qq1H5JBdzM2HVBy6m1HVugbDu17gEdnTcqR3z8IdSsx6FpGVQr7Vr/9FuZOkT/DkH4H/xUKcp3Phd5SVSM1Pcomv+gTB+nqsa/mWua3eQvhm6XA7DHzt+JXNVlboeplzhPvS64EXoetnRj1sLP7/mqn1D67qWKq0GVU5sRUWu1UVoncp5PkcJXhGR0iosssxOSGXST9uYvzGN+rWCeeay7pzVJtrboYmIVHXKcNUANX2enXwghxvfXsz63Rn844LOjOtTwZej+6KM3a5HYdIyaDcaEr+EAfe5KkIpWdIKmHQxGH/Xy7Zh599uU1QIqz+Guf9yC2U16QXn/BVanX1qz5mZAq/0h7BouHl2+fThzNgNn97sLgPvdLGrGD3d5MeeTW4xtS3zodNFcMFLldsz9Mv7XBLxpu9OXEFYHj652VWh3rW8/BLYyyfD57+H819wifzTcTAdnm7r2j4Mf6z0++Vlw9Rb3AcVEQ2h/Sj33tByQNkrH1PXu/Ox8n3Iy4SGXV1P4c6Xlt/vhbWQOMO1bkhdC8YPottB4x6uL3bjntCgc8nJ8pkPwqJXYPwct21l2rnELaTXrI9LpK963y2cNvoZV9Xsq7L3wofXuurqAffB4L+4xHn2Xrfo2YaZ0OZcuPBlCK/2f7MrwSsicipW7zzA3R8s55e0bG48qyX3D29HSKCqT0REjkMJ3hqgJs+z1yYd4MaJS8jMyeeFq3swuJ2PVUJVhqTl8N5Vrortkteh/WiYdpfri3nZ29DpQm9HWPVsXeh6yobUgWs/O3kP3MJ8WD4J5j/lKiJbDnSJ3sMVv6VRVAjvXuhaK4yfU76rwRcVwsLnXHIssrFr/VCW2A4ryIXvn4MFT0NAKAz9u6vOrOzWADkZ8FIfVyE6fl7FLXiXvNotytX/LncZfXnas8n9XpXHz+69K2HXMvjDutJV5e/f4fZJXQt9b3f3N30HeVkQVAvaDHPvE22GuZ9xSQrzIWG6S+xuXQD+Qe4DhN43u2RrRf1OFBW5it4dP7sPrHYtg4N73GP+QS7J27gHNOrh4ijMdX2Pe14PY56tmJhOZvkkl/T0C3StCgbc51oz+LqCPJjxR/d/SfsxbvG6L+521enD/gln3lp12oZULO8neI0xI4D/Av7AG9baJ455/ALgUaAIKADusdZ+73lsK5AJFAIF1tqTfmxWkyeeIlK+DuUV8q8Z63n3p220b1iL58Z2p31DrY4tIlKCGjGzrulq6jx7dkIKd0xZTmRoIG9e14uOjTQX+I21U2Hqba6C6sr3jvSBLMiFiWPcJfw3fQsNOnk3zqpkwzfw4Ti3kNq4z0q3kNBh+Tmuv+qCZ1zSKaaD279WrPuqHVvs+0auUvfw5eLznoI5/1c+VZ3Hs2MxfHKDW5Bo8MNw1r2lb9OxZYFbOGrvRrdo1PDHoVaDiomzNBK/gveucH09z/5T+R77cPJy7r/dwmB3r3SXmldVaz6Fj693C+qd7FL4HT+7BfgKctwCYG2GufH8HFeRnTDdVcpmp7mEZMsBLtnbbpT7nc1IgqVvw9KJkJXs/p3E3whnXOOdSk1r4cAOl+jdtdR9oJW0wlUSHxYWBXcsqfiF1U5k/Reu4jimrfdiqAjWwqJX4euHwBa5vuSXvln2diG+zbsJXmOMP7ABGAbsBBYDV1pr1xXbJgLIttZaY0xX4ENrbXvPY1uBeGvtntI+Z02deIpIxZmTkMr9H68kI6eAB0a05/p+LfBTvz0RkeL0plgD1LR5trWWt3/Yyj+nr6Njo9q8eV0vGtQO8XZYVYu1MO/fMPdxaNoHrpgEETFHb5OZDK+eDYEhcPMc7yY/qorVH7vL1ht0hms+OfWEVW6WW2BqxyKXEMvcDVmpwDF/7/sFuMvjazV01YidL4GLX6/YqrecA/DFPbD2U2gxAC5+7cStB7L3wjd/gZVToE5zVwUZN7Ti4iuLj653Cclbv4eYdqd/vGOTl5HNYMS/oMN5p3/sipR/CJ5qAx3PhwtfOv52K9+HaXdC7cauB/fxfmZFha61QOKXsH666xcL7gOLPRtcIq/NMLdoWtzQqtfLu6jIfRCxaxnsXulijRvi7aiqt1/muisfzrqnJi7g6fUEb1/gEWvtcM/9hwCstSV2KfdsP8Fa28FzfytK8IpIFbAnK5cHP1nFd+tTGdAmmqcv66Y/8kREjlCCtwaoSfPsjJx8/jJ1DdNWJjGsYwP+O7Y7YUGnsKJ6dZZ30PX3XDvVrTh/3nPH76m5YzFMHAUtznKLiFW1RA24RanWfOIut67I/plLJrjFj5r3gyvfh5ByrggvLICsFJdYz0xyvXEzPV8ZSa4v7gUvQnCt8n3ekljrLhuf+ScICHF9MtuN+O02Kya7hZJyM6DfXTDw/srttXsyWWnwYi+o2xLOfsBdml/WBausdS0GFr/hkpm20CUte93sEoNV8d9EST77PaybBvdv/O3l/0VFMOsfrk1HiwFw+Tul/0DHWpfUTZjuknix3SH+etdHVkSgCiR4LwVGWGtv8twfB5xprb3jmO0uAh4H6gOjrbU/esa3APtwH0G+aq197TjPMx4YD9CsWbOe27Ztq6BXJCI1mbWWKT9v59Hp6wgJ9Ofxi7owskust8MSEakKlOCtQKVoeXY/cLXnbgDQAYjxfH1QbNNWwN+stc8ZYx4BbgbSPI89bK2dcaI4akqCd/n2fdz1/nKS9udw79A23DYoDn9duXO0A7vg/Sth9yrXA7HfnSevBl36NnxxF/S/u/z7jJ6OpOUu6bb6Eyg4BP7BcP7/oNsV5f9cC551CbA2w+Hyt6tHf8zSSNsAH98AKauh9y3u/AeGuPHp97oFlJr2cR8SlGdP4PK05hP4dDwUFbj7tZtA4zOO9GBt1L3kPrI5B2DlB+53bE+ia8FwxjWup7AvJi83z3E9nC+b6Ba+Oyw30/18Eme4PrSjngL/QG9FKVIdeT3Bexkw/JgEb29r7Z3H2X4gbtI51HO/kbU2yRhTH/gWuNNaO/9Ez1lTJp4i4j2b07K494MVrNp5gMt6NuHv53ciIlhVPSJSoyn7VUFK0/LsmO3PA+611p5zzLg/sAtXbLHNk+DNstY+XdpYqvs8u6jI8sr8zTz7zQYa1A7h+Su707O52gn8xs6l8P5VbqGkS978bUXmiUz/Ayx50/Xk7HxJxcV4Mvk5rvJ48euun2ZgGHS9HLpcDnP+5RKOZ90L5/ztSO/a02EtfPeIq2zsfClc9ErNS37l57ifwaKXXWuK1ufATy+7y6yH/RPOGFc+P+uKlJsFyavcJfmHF97at+XI41Ftjiy8FR3nKnVXfQj52W6s103Q+WLfTuwXFcKzHd3rvPI9N7Zvm1tMLS0BRjzhFkCrGYteiVSmEv9RVWYWYifQtNj9JkDS8Ta21s43xrQ2xkRba/dYa5M846nGmKlAb+CECV4RkYrWOiaCT27rx3+/28hLczexaEs6D49qT59WUdQJq6DVdUVEpKbqDWyy1v4CYIx5H7gAKDHBC1wJvFfC+BBgs7VWl7qVIDUjh3s/XMHCTXsZ3SWWf13chcjQGpaAK43VH7tV2iPqw7hvoUHHsu0/4glIXQef3Q7RbY8sxlZZ9m11LRKWvQuH0l0MI5+EbmOPVF+Om+pWbP/+P67C9OLXIDji1J9zzyb48l63sFT8DTDqad+5HL88BYbAyCfc4lyf3QY/PA9dr4BzH/tt3+aqKjjCtdZo3u/I2MF0T7J3ubv9ZR6s8lw4ERDiPsjodaOr8q0O/Pyhy6VuwauD6ZCWCB9c4xaMu+Zjl7gXkUpTmRW8AbiKgyG4ioHFwFXW2rXFtonDTTatMaYH8AUuERwG+FlrM40x4bgK3n9aa7860XNW98oCEalaft6Szh8+XMHOfYcAiKsfQXzzusS3qEd887o0jwrD6BNsEan+9EZXQUrb8szzWBiuwCLOWpt+zGMTgGXW2hc89x8BfgdkAEuA+6y1+04US3WdZ89JSOW+j1ZyMK+Af5zficvjm/rO/93WuqTl4WrCpOWQvBrqNod2o92q9A27nF413Z5NbhGkhC/dYl7N+sEV7576wmCZKfDaIPAPgPHzKn7RtaJC2DTLVetu/BaMn/u59LoJWg4s+WdTfMX2+h1dpWKdZmV73oJc+P45WPCMS/QN/btL8PrK71ZFykqDjJ0V2+vYmzKS3AcZjXpUz0UFk1bAa2e795iN37h/G1d9ANFtvB2ZSHXm3RYNAMaYUcBzuJ5hE6y1jxljbgWw1r5ijHkAuBbIBw4B91trvzfGtAKmeg4TAEyx1j52suerrhNPEam6cvILWbFjP0u37WPJ1nSWbttHRo7rzxUdEUzP5nXo1aIePZvXpVOjSIICqvjlZyIiZaeMRQUpS8szY8wVwDXW2vOOGQ/CXUXXyVqb4hlrAOzBrXXxKBBrrb2hhGNW27UucgsK+ffMRCYs3EL7hrV44aoziKtfCQtPnY7MlGLJXM/tIU8u3z/IJXMbdoHUBJeMxUJkM5fQbD8amvV1idUTKSpyieKE6S6puyfRjTfsCp0uhL53QsBpXrG0cym8NRKa9YFrPj15TCeMtxCyUt2CYpnJLrmWmXxkYbG0RMjYBRENoOfv3FftRqU79qZZ8NH1rp3C2Mku3tLYssD1lt270VVwDn8cajU41VcoUrVYCy/2douitRrk+vGG1vV2VCLVnfcTvJVNCV4R8baiIsvG1CyWbEtn6dZ9LN6Wzo50V+EbHOBHrxb1uKJXU4Z3aqhkr4hUF0rwVhBjTF/gEWvtcM/9hwCstY+XsO1U4CNr7ZRjxi8AbrfWnnuc52gBTLfWdj5RLNVpnr05LYs7pyxn3e4MftevBQ+ObE9IYBW8bD4rDTZ85arkdi1zVY/gqlBjOhRb5KkH1O90dOI1K9Xtm/ClWxipMNclYdqOcMne1ue4/qcABXmwdb7bNnGmS4waf2jRH9qPgXYjy17BejLLJ7l2D33vgOEnreOBjN2uX27SMpfAPpzAzUoBW3T0tsbfJXRrx0Ltxq7vafsxp9b3Nm0DvHcFHNgJ5/0Xul91/G2z98I3f4GVU6BOcxjzLMQNLftzilR1m2bB7hXQ766a109axDuU4BURqQpSM3JYsm0fi7em8936FHakHyI6Ipgrezflyt7NaFTHhxdbEBFRgrfClKblmWe7SGAL0NRam33MY+8DX1tr3yo2Fmut3e35/l5c24exJ4qlOsyzrbV8tHQnf/98LSGBfjx1aTeGdqxilZV7N7uV6BO+hO0/ARZqN4FmZ3qSuT0htuuR5Gxp5GbB5tnumBu+gpz9rm1Aq8FuwadN30FuBgSGQ9wQlwxtM6ziLy+fcT/8/Bpc/AZ0vezI+MF0V0VcvLdp5m73mPF3l4LXbgy1Yl0St1ZDqNXI3dZuBOEx5dvn9mA6fHSd66Pb7y4Y+sjRx7cWVkyGb/7qfo797oKB90NQWPnFICIiNZkSvCIiVU1RkWXexjQm/biN2YmpGGBohwZc27cF/VpH4eenPImI+By9cVWgk7U882zzO1yv3rHH7BsG7ABaWWsPFBt/F+iOa9GwFbjlcML3eKrDPHvywg28M30WdVt257krzqBhZIi3Q3LJwaRlkOBJ6qatd+MNu3iqZ0edfh/d4grzYfuP7rkSZkBBDrQb4Z6r5dluMazKUpgP71zgKnMH3AdpCa5Sed+WI9tExR2pUm7c0/0sAr3wwXhhPsx8AJa86aqgL3kDgmu5Ct/p98K276FpHzjvOajfofLjExGR6kwJXhGRqmxH+kGm/LydDxbvID07j1bR4VzdpzmX9mhCZJgudxIRn6EEbw3g6/PsnPxCJj5+O+OL3sfeuhD/hp28F0xBHmxd4KnUneH6xxp/aN7PtU9oN8otlFYTZKW5RdcydrpK5eJtJ2K7Q2gdLwd4jJ9fd4nemHauyvnHl1w19bB/whnjwE/tt0REpNwpwSsi4gtyCwqZuTqZd37cyrLt+wkJ9OOCbo0Z17c5nRtHejs8EZGTUYK3BvD1efa7P26l+8wL6OK3Fbpc5iowK1NOhmuFkPCl66mbmwGBYa4Xbvsx0HZ4xbdEqKoO7XNJb19ZiGzzHNeyIecAdL0Czn0MImK8HZWIiFRfSvCKiPiaNbsOMHnRNj5bnsSh/ELObFmP3w+OY2CbaEx5XZ4pIlK+9OZUA/jyPDuvoIhLn/yYaXk3Y8PrYw7ugTuWQFTrin3izOQj/XS3zIfCPAiLcouWtRsNrQd7p92AnL79O9wCb03ivR2JiIhUfyXOtQMqOwoRESm9zo0jefzirjw4sgMfLdnBGwu2cN2En+kYW5vbBrVmVJdY/NWnV0REpNQ+XbaTztk/QSCYS16HyZfD9/+BC14o/ydL2wAJ011Sd5cnIV63BfQe79ovND2zfBcAE++o09R9iYiIeIkSvCIiPiAyNJCbBrRiXN/mfL48iVfmbebO95bz9DeJ3DKwNRf3aExIoP5AFBEROZGCwiJemruZZ8JWYcObY1qeDT2uhaUTYdCDENmkfJ5o8Zvw08uwd6O73+gMGPwXl9St36H8FkkTERERAdT1XUTEhwQH+HN5r6Z8+4ezefnqHtQOCeThqasZ8OQcXpm3mcycfG+HKCIiUmVNW5lEavo+ehSuwrQb6RKt/e8GLCx8vnyeZMdi+PIPEBIJo56Ge9fC+Llw9v3QoKOSuyIiIlLulOAVEfFB/n6GkV1imXZHfybfdCZtG0TwxMwE+j0xm6e+TmBPVq63QxQREalSCossL8zZxNiozfgX5bret+Aure86Fpa9DVmpp/ckRYUw4z6oFQvXfga9by6/qmARERGR41CLBhERH2aMoX9cNP3jolm5Yz+vzNvMS3M388aCLXRvWoeoiCDqhgVRL7zYbXgQUZ7bemFBhAaptYOIiFR/M1bv5pe0bCa2T4CC2tCs35EHz7oXVk6BH1+AYf889SdZ+hbsXgmXToDgWqcftIiIiEgpKMErIlJNdGtah5ev6cnmtCwmfL+FjSlZbEjJYl92HvsO5lFkS94vJNCPuPoR3HRWK8Z0jSXAXxd3iIhI9VJUZHlh9ibiokNpmjYf4oZAQNCRDaLjoNNFrndu/3sgrF7ZnyR7D8z6J7QcCJ0uLrfYRURERE5GCV4RkWqmdUwEj13U5aixoiLLgUP5pB/MY192HumepG96dj7p2bnM25DGPR+s4D/fbeDWs92ibcEBquwVEZHq4dv1KSSmZDLxXIOZnwptR/52owH3wZpP4OfX3IJrZfXd3yEv2/XdVZ9dERERqURK8IqI1AB+foa6nrYMxPz28YdGWr5dn8KLczbx0Ker+e93G7l5YCuu7N2UsCD9VyEiIr7LWsv/Zm+keVQYA4u+B+MHbYb9dsMGnaDdKPjpZeh7e9laLOz4GZZPcgu2xbQrv+BFRERESkHX4YqICH5+huGdGvL57f1598beNI8K49Hp6zjr33N4YfZGDhzK93aIIiIip2RuYhprdmVw+6A4/DZ+DU37HL8Fw4A/Qs5+16qhtIoK4cs/QK1GMPBP5RKziIiISFkowSsiIr8yxjCgTQwf3NKXj2/tS9cmkTz9zQbOemI2T32dwN6sXG+HKCIiUmrWWp6fvZHGdUK5sFURpKyGdiOOv0OTntBqsFtsLf9Q6Z5kyQRIXg0j/gXBEeUTuIiIiEgZKMErIiIlim9Rj4nX92b6nWcxoG00L83dTP9/z+aRaWtZs+sA1h5n1TYREZEq4ofNe1m+fT+3DmpN0OZv3GBJ/XeLG/hHyE6DZe+c/Amy0mDWo9BqEHS88HTDFRERETklaqwoIiIn1LlxJC9d3ZNNqZm8PPcXJv20jYk/bKVJ3VBGdYllROeGdG9SBz8/LSgjIiJVy/OzNtKgdjCX9WwCH3wF9VpBdJsT79S8PzTrCwv/Cz2vh4Cg42/77d8g/yCMfEoLq4mIiIjXqIJXRERKJa5+LZ65vBuL/zyUJy/pSpv6Eby1cAsXv/QD/f89m398sZaft6RTWKTKXhER8b6ft6SzaEs6twxsTUjRIdgy31XvniwRa4zrxZuxC1a9f/zttv0IK6dAvzsgpm35Bi8iIiJSBqrgFRGRMqkbHsTlvZpyea+mHDiUz6z1Kcxck8zkRdt5a+FWYmoFM7xTA0Z1jqV3y3oE+OuzRBERqXz/m72R6IggruzdDDbPgMK8E/ffLS5uCMR2h+//A92uAv9j/mwqLIAZf4TaTWDg/eUeu4iIiEhZKMErIiKnLDI0kIt7NOHiHk3Iyi1gdkIqX63ZzcdLdzLpp+3UCw/iwu6NGde3OS2jw70droiI1BArduxnwcY9PDiyPaFB/pD4FQRHutYLpWEMDLgPPhwHa6dC18uOfnzxG5CyBi5/F4L0/5uIiIh4lxK8IiJSLiKCAzi/WyPO79aIg3kFzEtMY/qq3bzz41YmLNzC2W1juLZvcwa1q4+/+vWKiEgF+t+sjdQJC+SaPs2hqAg2fg1thoJ/YOkP0n4MxLSHBc9A50vAz3NFSmYKzHkMWg+BDudVzAsQERERKQMleEVEpNyFBQUwskssI7vEkpqRw3s/72Dyom3c+PYSmtYLZVyf5lwe35Q6YSdYuEZEROQUrNl1gFkJqdw3rC0RwQGwYzFkp7n+u2Xh5+eqeD+9GRJnQIcxbvzbv0FBDozSwmoiIiJSNagxooiIVKj6tUO4e2gbFj54Di9e1YPYyFD+NSOBM/81iwc+XsWaXQe8HaKIiFQjL87ZRK2QAK7r38INbJgJxt9V8JZVp4uhbgtY8DRYC1sXuoXX+t0FUa3LM2wRERGRU6YKXhERqRSB/n6M7hrL6K6xJCRn8M6P25i6bBcfLNlBz+Z1ubZvc0Z2jiUoQJ89iojIqdmQksnMNcncdU4ctUM87RgSv3K9d0Prlv2A/gFw1r3wxd2w8Rv47hGIbOoqe0VERESqCP0VLSIila59w9r866Iu/PTwEP46piN7s3K5+/0V9Py/b7lt0lI+WLyd3QcOeTtMERHxMa/O+4XwIH+u79/SDezfDqlrod2IUz9otyuhdmP4+EZIXQcjnoCgsPIJWERERKQcqIJXRES8JjI0kBvPasn1/Vowf2MaX61JZm5iGjPXJAPQvmEtzm4bw9ntYohvXk/VvSIickKLt6YzqF196oZ7erwnfuVuy9p/t7iAYNeS4asHIG4YtB99+oGKiIiIlCMleEVExOv8/AyD2tVnULv6WGvZkJLF3MRU5m1IY8LCLbw631Vk9YuLdgnftjE0rafqKREROeJgXgHb0w9yWc8mRwY3zISoOIiOO72D97wOcg7AGddoYTURERGpcpTgFRGRKsUYQ7uGtWjXsBa3nN2arNwCfty8l7mJqcxNTOPbdSkANKsXRrN6YTSMDCE2MuTIbe1QGtUJITI0EKM/wkVEfFtREcx+FLpffdIk7caULADaNqzlBnIyYMsCOPOW048jMBQGPXD6xxERERGpAErwiohIlRYRHMCwjg0Y1rEB1lo2p2Uzb0MaS7els/tADt9v3ENqZg5F9uj9QgL9iI0MpWHtEGLrhNCrRT0GtYshNjLUOy9ERETKLjMJlr0Nqz6A62dC3ebH3TQxOROAdg08Cd7Ns6EoH9qdRnsGERERER+gBK+IiPgMYwxx9SOIqx/BjWe1/HW8oLCItKxcdh/IIflAjuf2ELs93y/YuIdPl+0CXF/fwe3rM7hdfXo0q0OAv/r6iohUWZFNYNxn8PYYePs8l+SNbFzipokpmYQE+tHscAufDV9BSB1o2qfSwhURERHxBiV4RUTE5wX4u2rd41XnWmvZmJrFnIRU5iSm8vr8X3h57mZqhwQwoG0M57Srz9ntYoiOCK7kyEVE5KRiu8I1U+GdC+Cd812SN6L+bzZLTM6kbYNa+PkZKCqEjd9Am2Hgrz95REREpHqr1NmOMWYE8F/AH3jDWvvEMY9fADwKFAEFwD3W2u9Ls6+IiMjxGGNo26AWbRu4vr4ZOfks3LiHOYmpzElM48tVuwHo1iSSQe3qc0H3RrSKifBy1CIi8qsmPeHqj2DSxS7Re910CI86apPElEwGtY1xd3YuhoN7oe0ILwQrIiIiUrkq7bpUY4w/8CIwEugIXGmM6XjMZrOAbtba7sANwBtl2FdERKRUaocEMrJLLE9e2o1FDw1h+p1ncd+wtvj7GZ6fvZFh/5nP3z5fw96sXG+HKiJyFGPMCGNMojFmkzHmwRIev98Ys8LztcYYU2iMqed5bKsxZrXnsSXF9qlnjPnWGLPRc1u3Ml9TqTXvC1e+B3s3w6SL4ND+Xx9Kz84jLTOXdocXWEucCX4BEDfUO7GKiIiIVKLKbDzYG9hkrf3FWpsHvA9cUHwDa22WtfbwMjnhgC3tviIiIqfCz8/QuXEkdw5pw6e/78+ih4dwVe9mTF60nUFPzeWVeZvJyS/0dpgiIqUqerDWPmWt7e4pmHgImGetTS+2yWDP4/HFxh4EZllr2+AKLn6TOK4yWg2CKyZByjqYfCnkuoXVDi+w1vbwAmsbvoJmfSG0jnfiFBEREalElZngbQzsKHZ/p2fsKMaYi4wxCcCXuCreUu/r2X+8MWaJMWZJWlpauQQuIiI1R/1aITx6YWe+vmcAvVvW44mZCQx5Zh7TViZx5DNIERGvKGvRw5XAe6U47gXA257v3wYuPJ0gK1zbc+HSCbBrGUwZC3kH2ZDiErztG9aC9C2QlgDtRno5UBEREZHKUZkJXlPC2G/+UrbWTrXWtsdNLB8ty76e/V+z1sZba+NjYmJONVYREanh4urX4s3f9WLyTWcSGRrIXe8t58KXfmDJ1vST7ywiUjHKUvQQBowAPik2bIFvjDFLjTHji403sNbuBvDc/nYFs6qm4/lw0auwbSF8cDWbkvZQJyyQmFrBrnoX1H9XREREaozKTPDuBJoWu98ESDrextba+UBrY0x0WfcVEREpL/3jovnizrN46tKuJB84xKWv/Mhtk5aybW+2t0MTkZqn1EUPwHnAwmPaM/S31vbAtXi43RgzsExPXtWulOt6GZz/P9g8m/M3PEzH+qEYY1z/3ei2ENXa2xGKiIiIVIrKTPAuBtoYY1oaY4KAscC04hsYY+KMMcbzfQ8gCNhbmn1FREQqir+f4bL4psz54yDuHdqWeRvSGPrsPB6dvo79B/O8HZ6I1BxlKXoYyzHtGay1SZ7bVGAqruUDQIoxJhbAc5ta0gGr5JVyPcZhRz5Fr7xFPJzzLBxMd1W9qt4VERGRGiSgsp7IWltgjLkD+BrwByZYa9caY271PP4KcAlwrTEmHzgEXOFZdK3EfSsrdhEREYCwoADuHtqGK3s35dlvN/DWwi28tXALjeuG0rxeOM2jwjxfnu/rhRMa5O/tsEWk+vi16AHYhUviXnXsRsaYSOBs4JpiY+GAn7U20/P9ucA/PQ9PA64DnvDcfl6RL6K8JbUbx1vTVvCX/ZPhrVFQVKD+uyIiIlKjmOq8YEx8fLxdsmSJt8MQEZFqKiE5gxmrdrN170G2pR9k295s9h/MP2qb+rWCaREVTrOoMFrHRHBZfBOiI4K9FLFIpSipjYCUE2PMKOA5jhQ9PHZMwQTGmN8BI6y1Y4vt1wpXtQuuyGOKtfYxz2NRwIdAM2A7cNkxrR1+oyrNs+ckpHL9xMUs7LuExsufhdC68MdN4F9ptSwiIiIilaXEubZmPSIiIqeofcPatG9Y+6ixAwfz2Zaezba9LuHrbg+yYGMaHy/dyUtzNnHHOXH8rn8LggNU3SsiZWOtnQHMOGbslWPuTwQmHjP2C9DtOMfcCwwpzzgrU0JyJgARwx6C2EYQFK7kroiIiNQomvmIiIiUo8iwQLqG1aFrkzq/eWxzWhb/+nI9j89MYPKi7Tw8qgPDOzXA035eREROwYaUTBpFhhAZFgS9b/Z2OCIiIiKVrjIXWRMREanRWsdE8ObvevHODb0JCfTj1klLGfvaT6zZdcDboYmI+KzE5EzaNqzl7TBEREREvEYJXhERkUo2sG0MM+4awP9d2JmNqVmc98L3PPDxKlIzc7wdmoiITykoLGJTWhbtGijBKyIiIjWXErwiIiJeEODvxzV9mjPnj4O46ayWfLp8J4OfmstLczeRk1/o7fBERHzC1r0HySsoop0qeEVERKQGU4JXRETEiyJDA/nz6I58c+/Z9I+L5smvEhn67Dy+XLUba623wxMRqdISPQustVUFr4iIiNRgSvCKiIhUAS2jw3nt2nim3HQmEcEB3D5lGcOfm8+b328hPTvP2+GJiFRJiSmZ+BmIqx/h7VBEREREvEYJXhERkSqkX1w0X941gKcv60ZYUACPTl9Hn3/N4vYpy5i/IY2iIlX1iogctiE5kxbR4YQE+ns7FBERERGvCfB2ACIiInI0fz/DpT2bcGnPJiQkZ/DB4h1MXb6LL1ftpnGdUC6Pb8pl8U1oVCfU26GKiHhVYkom7dV/V0RERGo4VfCKiIhUYe0b1ubv53Vi0cND+N+VZ9AqJpz/fLeB/v+ezXUTfmbG6t3kFRR5O0wRkUqXk1/I1r3ZWmBNREREajxV8IqIiPiA4AB/zuvWiPO6NWJH+kE+WrKDD5fs5PeTlxEVHsQF3RszplssZzStgzHG2+GKiFS4jSlZWAvttMCaiIiI1HBK8IqIiPiYpvXC+MO57bh7aFvmb0zjg593MOmnbUxYuIXGdUIZ0zWW0V1j6dI4UsleEam2ElMyAVTBKyIiIjWeErwiIiI+yt/PMLhdfQa3q8+BQ/l8uy6FL1cl8eb3W3h1/i80jwpjdBeX7O0YW/ukyd78wiJ+Sctm3e4DrEvKYG1SBtv2HuSqM5vx+0GtlSwWkSplQ0omQQF+NI8K93YoIiIiIl6lBK+IiEg1EBka+OvCbPsP5vHN2hS+WJXEq/N/4aW5m2kVHe6p7G1Eu4a1yMotIGF3But2Z7B2l7tNTMn8tZ9vcIAf7RvWonHdUJ76OpGd+w7y6AWdCfBX+34RqRoSkjNpUz8Cfz99+CQiIiI1mxK8IiIi1UydsCAu79WUy3s1JT07j6/WJPPl6iRemLOJ52dvIjoiiD1Zeb9uXzcskE6NIvldvxZ0jK1Nx0a1aRUdToC/H9Zanvo6kZfmbiYlI5cXrjqDsCBNH0TE+zYkZ9IvLsrbYYiIiIh4nf5CExERqcbqhQdx1ZnNuOrMZqRl5vLV2mSWb9tHq5hwOjaqTcfYSBrUDj5u+wVjDH8a0Z7YOqH8/fM1XPnaT7z5u15ERwRX8isRETniwMF8kjNytMCaiIiICErwioiI1BgxtYIZ16c54/o0L/O+4/o0p0GtYO56fzmXvPwDb1/fmxbR6nspIt6hBdZEREREjlAjPRERESmVczs1ZMrNfcg4lM/FL//A8u37vB2SiNRQickZgBK8IiIiIqAEr4iIiJRBj2Z1+eS2foQH+3Pl6z/x3boUb4ckIjVQYkomtUICaFg7xNuhiIiIiHidErwiIiJSJq1iIvj0tv60bVCL8e8uYfKibWU+hrWWg3kFWGsrIEIRqe42JGfRvmGt4/YPFxEREalJ1INXREREyiymVjDv3dyHO6Ys489T15B8IIc/DGtbYrIlr6CITalZrNudwbqkDNbtPsC6pAwycgoICvCjXlgQ9cKDiIpwt/XCg4gKD6JeePCv4w1rh9C0XpgXXqmIVDXWWhKSMzivWyNvhyIiIiJSJSjBKyIiIqckPDiA16+N589T1/C/2ZtI2p/Dn0d3YGNK5q/J3LVJGWxMzSS/0FXqhgT60b5hbcZ0a0TjOqFkHMpnb3Ye+7Lz2Judx7a9B0nPziMrt+A3z3dt3+b8/bxO+PupYk+kJkvJyCUjp4D26r8rIiIiAijBKyIiIqchwN+PJy7pQmydEJ77biOfLNv562NR4UF0bFSbAW1b0jG2Np0a1aZldESpErQ5+YXsO5jH3qw80rPzmJ2QysQftrL7QA7Pjz2D0CD/inxZIlKFJXgWWGvbQAleEREREVCCV0RERE6TMYZ7hralU6NINqRk/prMjakVfMr9MUMC/YmNDCU2MhSAgW1jaBEVxj+mr+OqN37izet6US88qDxfhoj4iA0pmQC0UwWviIiICKAEr4iIiJSTYR0bMKxjgwo7/u/6t6RhZAh3v7+CS17+gbev702zKPXlFalpEpOzaFA7mDph+pBHREREBMDP2wGIiIiIlNaIzrFMvulM9h3M4+KXF7Jyx35vhyQilSwxJUPtGURERESKUYJXREREfEp8i3p8cls/QgL9GfvaT8xOSPF2SCJSSQqLLBtTsrTAmoiIiEgxSvCKiIiIz2kdE8Gnv+9HXP0Ibnp7CVMWbfd2SCJSCbbtzSa3oEgVvCIiIiLFKMErIiIiPql+rRDeH9+HgW1jeHjqap75JhFrrbfDEpEKpAXWRERERH5LCV4RERHxWeHBAbx+bTxXxDflf7M38cePVpFfWOTtsESkgiQmZ2EMtKmvBK+IiIjIYZWa4DXGjDDGJBpjNhljHizh8auNMas8Xz8YY7oVe2yrMWa1MWaFMWZJZcYtIiIiVVegvx9PXNKFe4e25ZNlO7lh4mIycvK9HZZIhSjFfPp+z3x5hTFmjTGm0BhTzxjT1Bgzxxiz3hiz1hhzd7F9HjHG7Cq236jKfVWll5iSQfN6YYQG+Xs7FBEREZEqI6CynsgY4w+8CAwDdgKLjTHTrLXrim22BTjbWrvPGDMSeA04s9jjg621eyorZhEREfENxhjuHtqG2DohPPTpas58bBaD28cwqkssg9vVJzy40qY8IhWmNPNpa+1TwFOe7c8D7rXWphtjgoH7rLXLjDG1gKXGmG+L7fsfa+3TlfqCTkFicqbaM4iIiIgcozL/2ukNbLLW/gJgjHkfuAAoPiH9odj2PwFNKjE+ERER8XGXxzelfcNafLRkJzPXJDNjdTIhgX4MalufUV1jOad9fSKU7BXfddL59DGuBN4DsNbuBnZ7vs80xqwHGp9g3yonJ7+QrXsPMrpLrLdDEREREalSKvMvnMbAjmL3d3J0de6xbgRmFrtvgW+MMRZ41Vr7WvmHKCIiIr6ua5M6dG1Sh0fO78TirenMXL2bmWuS+WptMsEBfpzdNobRnmRvrZBAb4crUhalnk8bY8KAEcAdJTzWAjgDWFRs+A5jzLXAElyl774S9hsPjAdo1qzZqb2C07A5LYvCIktbVfCKiIiIHKUyE7ymhLESl7o2xgzGJXjPKjbc31qbZIypD3xrjEmw1s4vYV+vTjxFRESkavD3M/RpFUWfVlH8/bxOLNm2jxmrdzNzzW6+WZdCUIAfA9vEcE77+rRrGEFcTC0iw5TwlSqt1PNp4DxgobU2/agDGBMBfALcY63N8Ay/DDzqOdajwDPADb95Ildg8RpAfHz88Z63wmxIyQSgvRK8IiIiIkepzATvTqBpsftNgKRjNzLGdAXeAEZaa/ceHrfWJnluU40xU3GXqP0mwevtiaeIiIhUPX5+ht4t69G7ZT3+NqYjy7bv48vVu5m5Opnv1qf8ul10RDBx9cNpU78WcfUjfv2qXysYY0rKrYlUqlLNpz3G4mnPcJgxJhCX3J1srf308Li1NqXYNq8D08sr4PKUkJxJkL8fzaPCvR2KiIiISJVSmQnexUAbY0xLYBdu0nlV8Q2MMc2AT4Fx1toNxcbDAT9Pv7Bw4Fzgn5UWuYiIiFQbfn6G+Bb1iG9Rj7+O7siOfQfZlJp15Csti89W7CIzp+DXfWqFBBBXP4L2DWszrk9zOjaq7cVXIDXYSefTAMaYSOBs4JpiYwZ4E1hvrX32mO1jPT16AS4C1lRM+KdnQ3ImretHEOjv5+1QRERERKqUSkvwWmsLjDF3AF8D/sAEa+1aY8ytnsdfAf4GRAEveapkCqy18UADYKpnLACYYq39qrJiFxERkerJz8/QPCqc5lHhDOnQ4Ndxay1pmblsLJ74Tc3ii5VJvPfzdkZ0ashdQ9oo0SuVqpTzaXBJ2m+stdnFdu8PjANWG2NWeMYettbOAJ40xnTHtWjYCtxS0a/lVCQmZ9K7ZT1vhyEiIiJS5Rhrq28Xg/j4eLtkyRJvhyEiIiLVxIFD+by1cAtvfr+FzJwCJXpLpl4WNUBlz7MzcvLp+sg3/GlEO34/KK7SnldERESkiilxrl2ZLRpEREREfFpkaCD3DG3L9f1bMuH7LUxYuIWv1iYzolND7h7ahg6xSvSKVISNWmBNRERE5LiU4BUREREpo8jQQO4d1pYbzvIker93id6RnV1FrxK9IuUrIdkleNs2UIJXRERE5FhK8IqIiIicol8Tvf1bMmGhS/TOXKNEr0h525CcSURwAI3rhHo7FBEREZEqRwleERERkdMUGXYk0fvmwi285Un0dm9ah4FtohnQNobuTesQ6O/n7VBFfFJCciZtG0TgWXRZRERERIpRgldERESknESGBfKHYW25sX9LJi3axqz1KbwwZxPPz95ERHAAfVtHuYRvmxiaR4UpWSVSCtZaNqRkMqJzQ2+HIiIiIlIlKcErIiIiUs4iwwK5fXActw+O48ChfH7cvIf5G/cwf0Ma365LAaBpvVAGtIlhYJto+raOJjI00MtRi1RNaVm57DuYTzv13xUREREpkRK8IiIiIhUoMjSQEZ1jGdE5Fmst2/YeZMHGNOZv3MO0FUlMWbQdPwOdG0fSo1ldzmhWhx7N6tKkbqgqfEWAxMMLrDVUgldERESkJErwioiIiFQSYwwtosNpER3OuL4tyC8sYsWO/czfkMbPW9L5YPEOJv6wFYCYWsH08CR7z2hWl65NIgkJ9PfuCxDxgsMJXlXwioiIiJRMCV4RERERLwn096NXi3r0alEPgILCIhKSM1m+fR/Ltu9n2fZ9fL3WtXQI8DN0bFSbHs3q0q1pJC2jI2gRFUadsCBvvgSRCpeYnEl0RDBREcHeDkVERESkSlKCV0RERKSKCPD3o3PjSDo3jmRcXze2JyuX5dv3e5K++46q8gXXAqJFVBjNo8KP3Ea726jwILV5EJ+3ISWTdg0jvB2GiIiISJWlBK+IiIhIFRYdEcywjg0Y1rEB4Kp8t+zJZuveg2zbm83Wvdls23uQ5Tv2MX1VEkX2yL4RwQE0jwrjnqFtf91fxJcUFVk2pGRxZe9m3g5FREREpMpSgldERETEhwT4+9GmQS3alNCPNK+giJ37DrJt78FfE79b92YTqt694qMycwvoHxdFfIu63g5FREREpMpSgldERESkmggK8KNVTAStYnQ5u1QPkaGBvHFdL2+HISIiIlKl+Xk7ABERERERERERERE5NUrwioiIiIiIiIiIiPgoJXhFREREREREREREfJQSvCIiIiIiIiIiIiI+SgleERERERERERERER+lBK+IiIiIiIiIiIiIj1KCV0RERERERERERMRHKcErIiIiIiIiIiIi4qOU4BURERERERERERHxUUrwioiIiIiIiIiIiPgoJXhFREREREREREREfJQSvCIiIiIiIiIiIiI+SgleERERERERERERER9lrLXejqHCGGPSgG2V9HTRwJ5Kei6pGDqHvk/n0PfpHPo+nUPfd7rncI+1dkR5BSNVUyXPs0HvLdWBzqHv0zn0fTqHvk3nz/eVxzksca5drRO8lckYs8RaG+/tOOTU6Rz6Pp1D36dz6Pt0Dn2fzqFURfq99H06h75P59D36Rz6Np0/31eR51AtGkRERERERERERER8lBK8IiIiIiIiIiIiIj5KCd7y85q3A5DTpnPo+3QOfZ/Ooe/TOfR9OodSFen30vfpHPo+nUPfp3Po23T+fF+FnUP14BURERERERERERHxUargFREREREREREREfFRSvCKiIiIiIiIiIiI+CgleMuBMWaEMSbRGLPJGPOgt+ORkzPGTDDGpBpj1hQbq2eM+dYYs9FzW9ebMcrxGWOaGmPmGGPWG2PWGmPu9ozrHPoIY0yIMeZnY8xKzzn8h2dc59DHGGP8jTHLjTHTPfd1Dn2IMWarMWa1MWaFMWaJZ0znUKoMzbN9j+bZvk9zbd+nuXb1obm2b6vMubYSvKfJGOMPvAiMBDoCVxpjOno3KimFicCIY8YeBGZZa9sAszz3pWoqAO6z1nYA+gC3e/7d6Rz6jlzgHGttN6A7MMIY0wedQ190N7C+2H2dQ98z2Frb3Vob77mvcyhVgubZPmsimmf7Os21fZ/m2tWH5tq+r1Lm2krwnr7ewCZr7S/W2jzgfeACL8ckJ2GtnQ+kHzN8AfC25/u3gQsrMyYpPWvtbmvtMs/3mbj/8Bqjc+gzrJPluRvo+bLoHPoUY0wTYDTwRrFhnUPfp3MoVYXm2T5I82zfp7m279Ncu3rQXLvaqpBzqATv6WsM7Ch2f6dnTHxPA2vtbnCTGqC+l+ORUjDGtADOABahc+hTPJcbrQBSgW+ttTqHvuc54E9AUbExnUPfYoFvjDFLjTHjPWM6h1JVaJ5dfeh9xUdpru27NNeuFp5Dc21fV2lz7YDyOEgNZ0oYs5UehUgNZIyJAD4B7rHWZhhT0j9HqaqstYVAd2NMHWCqMaazl0OSMjDGjAFSrbVLjTGDvByOnLr+1tokY0x94FtjTIK3AxIpRvNsES/SXNu3aa7t2zTXrjYqba6tCt7TtxNoWux+EyDJS7HI6UkxxsQCeG5TvRyPnIAxJhA34Zxsrf3UM6xz6IOstfuBubh+fTqHvqM/cL4xZivusulzjDGT0Dn0KdbaJM9tKjAVd0m8zqFUFZpnVx96X/ExmmtXH5pr+yzNtauBypxrK8F7+hYDbYwxLY0xQcBYYJqXY5JTMw24zvP9dcDnXoxFTsC48oE3gfXW2meLPaRz6COMMTGeagKMMaHAUCABnUOfYa19yFrbxFrbAvd/32xr7TXoHPoMY0y4MabW4e+Bc4E16BxK1aF5dvWh9xUform279Nc2/dpru37KnuubazVVU6nyxgzCtcbxR+YYK19zLsRyckYY94DBgHRQArwd+Az4EOgGbAduMxae+wCEVIFGGPOAhYAqznSj+hhXG8wnUMfYIzpimso74/7sPFDa+0/jTFR6Bz6HM9lY3+01o7ROfQdxphWuEoCcG27plhrH9M5lKpE82zfo3m279Nc2/dprl29aK7tmyp7rq0Er4iIiIiIiIiIiIiPUosGERERERERERERER+lBK+IiIiIiIiIiIiIj1KCV0RERERERERERMRHKcErIiIiIiIiIiIi4qOU4BURERERERERERHxUUrwiohUE8YYa4y51NtxiIiIiIhUJ5pni0hVpwSviEg5MMZM9Ez8jv36yduxiYiIiIj4Ks2zRUROLsDbAYiIVCPfAeOOGcvzRiAiIiIiItWI5tkiIiegCl4RkfKTa61NPuYrHX69rOsOY8yXxpiDxphtxphriu9sjOlijPnOGHPIGJPuqVaIPGab64wxq40xucaYFGPMxGNiqGeM+cgYk22M+eXY5xARERER8UGaZ4uInIASvCIilecfwDSgO/Aa8I4xJh7AGBMGfAVkAb2Bi4B+wITDOxtjbgFeBd4CugKjgLXHPMffgM+BbsAHwARjTPMKe0UiIiIiIt6nebaI1GjGWuvtGEREfJ7nE/5rgJxjHnrRWvuAMcYCb1hrby62z3dAsrX2GmPMzcDTQBNrbabn8UHAHKCNtXaTMWYnMMla++BxYrDAE9bahzz3A4AMYLy1dlL5vVoRERERkcqhebaIyMmpB6+ISPmZD4w/Zmx/se9/POaxH4HRnu87AKsOTzo9fgCKgI7GmAygMTDrJDGsOvyNtbbAGJMG1C9V9CIiIiIiVZPm2SIiJ6AEr4hI+Tlord10ivsa4HiXVFjP46WRX8K+ascjIiIiIr5M82wRkRPQm5GISOXpU8L99Z7v1wHdjDG1ij3eD/c+vd5amwLsAoZUeJQiIiIiIr5F82wRqdFUwSsiUn6CjTENjxkrtNameb6/2BizGJgLXIqbRJ7peWwybnGId4wxfwPq4hZ6+LRYtcJjwH+MMSnAl0AYMMRa+0xFvSARERERkSpA82wRkRNQgldEpPwMBXYfM7YLaOL5/hHgEuB5IA243lq7GMBae9AYMxx4DvgZt4jE58Ddhw9krX3ZGJMH3Af8G0gHZlTQaxERERERqSo0zxYROQFj7fFa0YiISHnxrLx7mbX2Y2/HIiIiIiJSXWieLSKiHrwiIiIiIiIiIiIiPksJXhEREREREREREREfpRYNIiIiIiIiIiIiIj5KFbwiIiIiIiIiIiIiPkoJXhEREREREREREREfpQSviIiIiIiIiIiIiI9SgldERERERERERETERynBKyIiIiIiIiIiIuKj/h+gRDaAdbtXwgAAAABJRU5ErkJggg==\n", 602 | "text/plain": [ 603 | "
" 604 | ] 605 | }, 606 | "metadata": { 607 | "needs_background": "light" 608 | }, 609 | "output_type": "display_data" 610 | } 611 | ], 612 | "source": [ 613 | "model = build_and_train(\n", 614 | " callbacks=[MyCallback()],\n", 615 | " num_epochs=50\n", 616 | ")" 617 | ] 618 | } 619 | ], 620 | "metadata": { 621 | "kernelspec": { 622 | "display_name": "tf", 623 | "language": "python", 624 | "name": "env_tensorflow" 625 | }, 626 | "language_info": { 627 | "codemirror_mode": { 628 | "name": "ipython", 629 | "version": 3 630 | }, 631 | "file_extension": ".py", 632 | "mimetype": "text/x-python", 633 | "name": "python", 634 | "nbconvert_exporter": "python", 635 | "pygments_lexer": "ipython3", 636 | "version": "3.9.7" 637 | } 638 | }, 639 | "nbformat": 4, 640 | "nbformat_minor": 5 641 | } 642 | --------------------------------------------------------------------------------