├── Notebooks ├── wp_CNN_test_preprocess.ipynb ├── wp_EDA_preprocessing.ipynb ├── wp_confusion_matrix.ipynb ├── wp_prediction_and_log-loss.ipynb └── wp_vgg_resnet_test_preprocess.ipynb ├── README.md ├── figures_and_plots ├── Accuracy_curve_CNN.jpg ├── Accuracy_curve_CNN_augmentation.jpg ├── Accuracy_curve_drop_batch.jpg ├── Architecture-of-the-Residual-Network.png ├── Architecture-of-the-VGG-Convolutional-Neural-Network.png ├── Log_loss_graph.png ├── Loss_curve_CNN.jpg ├── Loss_curve_CNN_augmentation.jpg ├── Loss_curve_drop_batch.jpg ├── cm_norm.png ├── cm_wo_norm.png ├── fol.jpg ├── fol_train_val.jpg ├── kde_cloudy.png ├── kde_foggy.png ├── kde_rainy.png ├── kde_shine.png ├── kde_sunrise.png ├── nimg_bar.png ├── nimg_dist_training.png ├── nimg_dist_validation.png ├── randm_data_aug1.png ├── randm_data_aug2.png ├── randm_data_aug3.png ├── randm_data_aug4.png └── randm_data_aug5.png ├── preprocessed_test_images ├── test_preproc_CNN.npy ├── test_preproc_resnet.npy ├── test_preproc_vgg16.npy └── test_preproc_vgg19.npy ├── scripts ├── wp_CNN_test_preprocess.py ├── wp_EDA_preprocessing.py ├── wp_confusion_matrix.py ├── wp_prediction_and_log_loss.py ├── wp_training_CNN.py ├── wp_training_CNN_aug.py ├── wp_training_resnet101.py ├── wp_training_resnet152.py ├── wp_training_resnet50.py ├── wp_training_vgg16.py ├── wp_training_vgg19.py └── wp_vgg_resnet_test_preprocess.py └── wp_project_report.pdf /Notebooks/wp_CNN_test_preprocess.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import numpy as np\n", 10 | "import pandas as pd" 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": 2, 16 | "metadata": {}, 17 | "outputs": [ 18 | { 19 | "name": "stderr", 20 | "output_type": "stream", 21 | "text": [ 22 | "Using TensorFlow backend.\n" 23 | ] 24 | } 25 | ], 26 | "source": [ 27 | "from keras.preprocessing.image import load_img, img_to_array" 28 | ] 29 | }, 30 | { 31 | "cell_type": "code", 32 | "execution_count": 3, 33 | "metadata": {}, 34 | "outputs": [], 35 | "source": [ 36 | "img_width, img_height = 256, 256" 37 | ] 38 | }, 39 | { 40 | "cell_type": "code", 41 | "execution_count": 4, 42 | "metadata": {}, 43 | "outputs": [], 44 | "source": [ 45 | "def preprocess_image(path):\n", 46 | " img = load_img(path, target_size = (img_height, img_width))\n", 47 | " a = img_to_array(img)\n", 48 | " a = np.expand_dims(a, axis = 0)\n", 49 | " a /= 255.\n", 50 | " return a" 51 | ] 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": 5, 56 | "metadata": {}, 57 | "outputs": [], 58 | "source": [ 59 | "test_images_dir = '../dataset/alien_test/'" 60 | ] 61 | }, 62 | { 63 | "cell_type": "code", 64 | "execution_count": 6, 65 | "metadata": {}, 66 | "outputs": [], 67 | "source": [ 68 | "test_df = pd.read_csv('../dataset/test.csv')\n", 69 | "\n", 70 | "test_dfToList = test_df['Image_id'].tolist()\n", 71 | "test_ids = [str(item) for item in test_dfToList]" 72 | ] 73 | }, 74 | { 75 | "cell_type": "code", 76 | "execution_count": 7, 77 | "metadata": {}, 78 | "outputs": [], 79 | "source": [ 80 | "test_images = [test_images_dir+item for item in test_ids]\n", 81 | "test_preprocessed_images = np.vstack([preprocess_image(fn) for fn in test_images])" 82 | ] 83 | }, 84 | { 85 | "cell_type": "code", 86 | "execution_count": 8, 87 | "metadata": {}, 88 | "outputs": [], 89 | "source": [ 90 | "np.save('../test_preproc_CNN.npy', test_preprocessed_images)" 91 | ] 92 | }, 93 | { 94 | "cell_type": "code", 95 | "execution_count": null, 96 | "metadata": {}, 97 | "outputs": [], 98 | "source": [] 99 | } 100 | ], 101 | "metadata": { 102 | "kernelspec": { 103 | "display_name": "TF_GPU", 104 | "language": "python", 105 | "name": "tf_gpu" 106 | }, 107 | "language_info": { 108 | "codemirror_mode": { 109 | "name": "ipython", 110 | "version": 3 111 | }, 112 | "file_extension": ".py", 113 | "mimetype": "text/x-python", 114 | "name": "python", 115 | "nbconvert_exporter": "python", 116 | "pygments_lexer": "ipython3", 117 | "version": "3.7.5" 118 | } 119 | }, 120 | "nbformat": 4, 121 | "nbformat_minor": 2 122 | } 123 | -------------------------------------------------------------------------------- /Notebooks/wp_confusion_matrix.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import os\n", 10 | "import numpy as np\n", 11 | "import pandas as pd\n", 12 | "import matplotlib.pyplot as plt\n", 13 | "\n", 14 | "import warnings\n", 15 | "warnings.filterwarnings(\"ignore\")" 16 | ] 17 | }, 18 | { 19 | "cell_type": "code", 20 | "execution_count": 2, 21 | "metadata": {}, 22 | "outputs": [], 23 | "source": [ 24 | "from tensorflow.keras.models import load_model" 25 | ] 26 | }, 27 | { 28 | "cell_type": "code", 29 | "execution_count": 3, 30 | "metadata": {}, 31 | "outputs": [], 32 | "source": [ 33 | "test_preprocessed_images = np.load('../test_preproc_resnet.npy')" 34 | ] 35 | }, 36 | { 37 | "cell_type": "code", 38 | "execution_count": 4, 39 | "metadata": {}, 40 | "outputs": [], 41 | "source": [ 42 | "model_path = '../resnet101_drop_batch_best_weights_256.h5'" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": 5, 48 | "metadata": {}, 49 | "outputs": [ 50 | { 51 | "name": "stdout", 52 | "output_type": "stream", 53 | "text": [ 54 | "WARNING:tensorflow:From C:\\ProgramData\\Anaconda3\\envs\\tf_gpu\\lib\\site-packages\\tensorflow_core\\python\\ops\\init_ops.py:97: calling GlorotUniform.__init__ (from tensorflow.python.ops.init_ops) with dtype is deprecated and will be removed in a future version.\n", 55 | "Instructions for updating:\n", 56 | "Call initializer instance with the dtype argument instead of passing it to the constructor\n", 57 | "WARNING:tensorflow:From C:\\ProgramData\\Anaconda3\\envs\\tf_gpu\\lib\\site-packages\\tensorflow_core\\python\\ops\\init_ops.py:97: calling Zeros.__init__ (from tensorflow.python.ops.init_ops) with dtype is deprecated and will be removed in a future version.\n", 58 | "Instructions for updating:\n", 59 | "Call initializer instance with the dtype argument instead of passing it to the constructor\n", 60 | "WARNING:tensorflow:From C:\\ProgramData\\Anaconda3\\envs\\tf_gpu\\lib\\site-packages\\tensorflow_core\\python\\ops\\init_ops.py:97: calling Ones.__init__ (from tensorflow.python.ops.init_ops) with dtype is deprecated and will be removed in a future version.\n", 61 | "Instructions for updating:\n", 62 | "Call initializer instance with the dtype argument instead of passing it to the constructor\n", 63 | "WARNING:tensorflow:From C:\\ProgramData\\Anaconda3\\envs\\tf_gpu\\lib\\site-packages\\tensorflow_core\\python\\ops\\resource_variable_ops.py:1630: calling BaseResourceVariable.__init__ (from tensorflow.python.ops.resource_variable_ops) with constraint is deprecated and will be removed in a future version.\n", 64 | "Instructions for updating:\n", 65 | "If using Keras pass *_constraint arguments to layers.\n" 66 | ] 67 | } 68 | ], 69 | "source": [ 70 | "model = load_model(model_path)" 71 | ] 72 | }, 73 | { 74 | "cell_type": "code", 75 | "execution_count": 6, 76 | "metadata": {}, 77 | "outputs": [ 78 | { 79 | "name": "stdout", 80 | "output_type": "stream", 81 | "text": [ 82 | "30/30 [==============================] - 12s 415ms/sample\n" 83 | ] 84 | } 85 | ], 86 | "source": [ 87 | "#Prediction Function\n", 88 | "array = model.predict(test_preprocessed_images, batch_size=1, verbose=1)\n", 89 | "y_pred = np.argmax(array, axis=1)" 90 | ] 91 | }, 92 | { 93 | "cell_type": "code", 94 | "execution_count": 7, 95 | "metadata": {}, 96 | "outputs": [], 97 | "source": [ 98 | "test_df = pd.read_csv('../dataset/test.csv')\n", 99 | "y_true = test_df['labels']" 100 | ] 101 | }, 102 | { 103 | "cell_type": "code", 104 | "execution_count": 8, 105 | "metadata": {}, 106 | "outputs": [], 107 | "source": [ 108 | "from sklearn.metrics import confusion_matrix\n", 109 | "conf_mat = confusion_matrix(y_true, y_pred)" 110 | ] 111 | }, 112 | { 113 | "cell_type": "code", 114 | "execution_count": 9, 115 | "metadata": {}, 116 | "outputs": [], 117 | "source": [ 118 | "train_dir = '../Data/training/'\n", 119 | "classes = os.listdir(train_dir)" 120 | ] 121 | }, 122 | { 123 | "cell_type": "code", 124 | "execution_count": 10, 125 | "metadata": {}, 126 | "outputs": [], 127 | "source": [ 128 | "import itertools\n", 129 | "def plot_confusion_matrix(cm, classes,\n", 130 | " normalize=False,\n", 131 | " title='Confusion matrix',\n", 132 | " cmap=plt.cm.Reds):\n", 133 | " \"\"\"\n", 134 | " This function prints and plots the confusion matrix.\n", 135 | " Normalization can be applied by setting `normalize=True`.\n", 136 | " \"\"\"\n", 137 | " plt.imshow(cm, interpolation='nearest', cmap=cmap)\n", 138 | " plt.title(title)\n", 139 | " plt.colorbar()\n", 140 | " tick_marks = np.arange(len(classes))\n", 141 | " plt.xticks(tick_marks, classes, rotation=45)\n", 142 | " plt.yticks(tick_marks, classes)\n", 143 | "\n", 144 | " if normalize:\n", 145 | " cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]\n", 146 | " cm = cm.round(2)\n", 147 | " #print(\"Normalized confusion matrix\")\n", 148 | " else:\n", 149 | " cm=cm\n", 150 | " #print('Confusion matrix, without normalization')\n", 151 | "\n", 152 | " #print(cm)\n", 153 | "\n", 154 | " thresh = cm.max() / 2.\n", 155 | " for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):\n", 156 | " plt.text(j, i, cm[i, j],\n", 157 | " horizontalalignment=\"center\",\n", 158 | " color=\"white\" if cm[i, j] > thresh else \"black\")\n", 159 | "\n", 160 | " plt.tight_layout()\n", 161 | " plt.ylabel('True label')\n", 162 | " plt.xlabel('Predicted label')" 163 | ] 164 | }, 165 | { 166 | "cell_type": "code", 167 | "execution_count": 11, 168 | "metadata": {}, 169 | "outputs": [ 170 | { 171 | "data": { 172 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAeIAAAGzCAYAAADzOxTxAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8li6FKAAAgAElEQVR4nO3dd7wcZb3H8c83JJESqtQUCKFJlRSQ3g1VIhcUkA7CBb2IIteLoIIKihLBgqIoTekEEKQXpSkgSegkUgOkUALSS5LD7/7xzInLYU/fs7Mz+32/Xvs6uzOzM799ds/+9inzjCICMzMzy0e/vAMwMzNrZk7EZmZmOXIiNjMzy5ETsZmZWY6ciM3MzHLkRGxmZpYjJ2LrMUkLSfqLpDckXd6L/ewj6eZaxpYXSZtL+lejHE/ScEkhqX+9YioKSdMkbZfdP07SH/rgGL+V9N1a79fKRT6PuPwkfQk4GvgU8BbwIHByRNzdy/3uBxwJbBIR83odaIOTFMBqEfFU3rG0R9I04MsRcWv2eDjwLDCg1u+RpPOA6RHxnVrut17allUN9ndgtr/NarE/ax6uEZecpKOBnwM/ApYDVgR+A4yrwe5XAp5ohiTcFa519h2XrZVaRPhW0huwOPA28IUOtvkEKVHPzG4/Bz6RrdsKmA58E3gZmAUclK37PjAHmJsd4xDgROCCin0PBwLonz0+EHiGVCt/FtinYvndFc/bBLgfeCP7u0nFutuBHwJ/z/ZzM7B0O6+tNf5vVcT/eWAn4AngNeC4iu03BO4BXs+2PQMYmK27M3st72Svd8+K/f8f8CLwp9Zl2XNWyY4xKns8GJgNbNWF9+584JvZ/SHZsb+SPV4126/aHO9PwIfAe1mM36p4Dw4Ans+Of3wX3/+PvC/ZssiOf1j23s/JjvWXdl5HAIcDTwL/Bn7Nf1ri+gHfAZ7L3p8/Aou3+ewcksV9Z8Wyg4AXsv0dDmwAPJy9b2dUHHsV4K/Aq9nrvhBYomL9NGC77P6JZJ/d7H1/u+I2DzgxW3cs8DTps/c4sFu2fE3gfaAle87r2fLzgJMqjnko8FT2/l0DDO5KWflW7lvuAfjWh28u7JB9ifTvYJsfAPcCywLLAP8Afpit2yp7/g+AAaQE9i6wZLZ+/pdXO49bvzj7A4sAbwJrZOtWANbO7h9I9oUPLJV9Ce2XPW/v7PEns/W3Z1+EqwMLZY9Paee1tcb/vSz+Q4FXgIuARYG1sy/PEdn2o4GNsuMOB6YAX6/YXwCrVtn/T0gJbSEqEmO2zaHZfhYGbgLGd/G9O5gsuQFfyl7zpRXrrq6IofJ408iSS5v34PdZfJ8GPgDW7ML7P/99qVYGtEky7byOAK4FliC1xrwC7FDxOp4CRgCDgCuBP7WJ+4+kz85CFct+CywIjM3evz9n8Q8hJfQts32sCnw2e2+WISXzn1crK9p8diu2WT+LeWT2+AukH1T9SD/G3gFW6KC85pcRsA3pB8GoLKZfAXd2pax8a8wbcE72mXu0YtlSwC2kH1S3kH1fdnRz03S5fRKYHR03He8D/CAiXo6IV0g13f0q1s/N1s+NiOtJv/bX6GE8HwLrSFooImZFxGNVttkZeDIi/hQR8yLiYmAq8LmKbc6NiCci4j3gMtKXZXvmkvrD5wKXAEsDv4iIt7LjPwasBxARkyLi3uy404DfAVt24TWdEBEfZPF8RET8nvQPeR/px8fxneyv1R3A5pL6AVsAPwU2zdZtma3vju9HxHsR8RDwECkhQ+fvfy2cEhGvR8TzwN/4z/u1D3BaRDwTEW8D3wb2atMMfWJEvNOmbH8YEe9HxM2kRHhxFv8M4C5gJEBEPBURt2TvzSvAaXT+fs4naRlSkj8yIh7I9nl5RMyMiA8j4lLSe7thF3e5D3BOREyOiA+y17tx1o/fqr2yssZ0HqnCU+lY4LaIWA24LXvcISficnsVWLqT/rXBpKbBVs9ly+bvo00if5dUe+mWiHiHVIM4HJgl6TpJn+pCPK0xDal4/GI34nk1Ilqy+61f5i9VrH+v9fmSVpd0raQXJb1J6ldfuoN9A7wSEe93ss3vgXWAX2VfwJ2KiKdJP3rWBzYn1ZRmSlqDniXi9sqss/e/Frpz7P6ksQytXqiyv7bvX3vv57KSLpE0I3s/L6Dz95PsuQOACcBFEXFJxfL9JT0o6XVJr5Pe1y7tkzavN/vx8So9/2xbziLiTlI3Q6VxpK4lsr+f72w/TsTldg+p6a6jD8JM0qCrVitmy3riHVITbKvlK1dGxE0R8VlSzXAqKUF1Fk9rTDN6GFN3nEmKa7WIWAw4jtQP25EOTzuQNIjU73o2cKKkpboRzx3AHqR+6hnZ4/2BJUkj37sdTxUdvf8feT8lfeT97MGxunLseXw0sfbmGD/Onr9e9n7uS+fvZ6tfkfqB548Il7QS6TP7P6SukiWARyv22VmsH3m9khYhtVrV47PdVIapfyyjBXp9k/SopIkVt8O6cPjlImIWQPZ32c6e4JGIJRYRb0j6HvBrSfNIA5vmAtsBW0fEt4CLge9Iup/0RfI9Us2hJx4E/k/SiqSBVt9uXSFpOeAzpKaa1sFELVX2cT3wq+yUq8uA3YG1SDXCvrYoqR/77ay2fgSpn67VS6T+zO6cvvQLYFJEfFnSWaT+zS8CSDqRNHBrq3aeewcwHmg9R/t20vt1V0Utv63WGLuqo/f/IWBtSeuTfqCc2MtjVTv2/0m6gVTOPyL1g8+TupovO7Qo6XP4uqQhwP925UmS/pvU6vCZiPiwYtUipDJ6JdvuIFKNuNVLwFBJAyNiTpVdXwRcIuki0riBHwH3Zd0gVkPvE+zOIr3ez+946/2IGFODkDrkGnHJRcRppHOIv0P6AnmB9Iv+z9kmJwETSaNOHwEmZ8t6cqxbgEuzfU3io8mzH2n09UxSU86WwFeq7ONVYJds21dJI393iYjZPYmpm44hDYx6i1TzubTN+hOB87NmyS92tjNJ40j9R4dni44GRknaJ3s8jDT6uz13kJLJndnju0k11DvbfUaqBX4ni/GYzmKkg/c/Ip4gDea6ldQX2va887OBtbJj/ZnuO4c00vtO0ij690nnpdfK90kDo94AriMNBuuKvUk/MGZKeju7HRcRjwM/I7U0vQSsy0ffv7+Sxhy8KOljn9eIuA34LnAFaVT+KsBePXlh1jGRvnB6e+uhlyStAJD9fbnTeLNRXmZWZ5IeBLbNfnyYWY0sqwVijxrUiM/krUmd1YizwXbXRsQ62eNTSWNTTpF0LLBU1vrYLjdNm+UkIjwi1qyP9KtF90Yn9VRJF5NOIVxa0nTgBOAU4DJJrefAf6GzwzgRm5lZqbQ2Tfe1iNi7nVXbdmc/7iM2MzPLkWvEZmZWOv1qMfC+TkOonIjNzKx0itTc60QMLL3IQrHSkovmHUbD0bJDOt/IzKyHpj3/PLNnv1qTk8YrCdVmsFadOBEDKy25KPcd1enAtqbT/6s/yjsEMyuxMZttlXcIDcGJ2MzMSsdN02ZmZjkRNRqsVSdF+tFgZmZWOq4Rm5lZ6RSplulEbGZm5SKo0RW86qJIPxrMzMxKxzViMzMrlXrNNV0rTsRmZlY6RRo17URsZmalU6QacZFiNTMzKx3XiM3MrFTShB7FaZt2IjYzs9IpUnNvkWI1MzMrHdeIzcysVIo217QTsZmZlU6RmnuLFKuZmVnpuEZsZmal04/itE07EZuZWakUrY/YTdNmZmY5co3YzMxKp0i1TCdiMzMrFalYTdNOxGZmVjpFGqxVpNq7mZlZ6bhGbGZmpeOmaTMzs5yIYjX3FinW0mv58EPG/Pwyxp1zXd6hNIwbb76VNdYfw6rrjuSU8afnHU5DcdlU53KpzuXSuJyIG8gv736YNZddMu8wGkZLSwtfPfoYbrhqAo9Puo+LL5/A41Om5h1WQ3DZVOdyqa4Zy6Wfen+rW6z1O5R1ZPrrb3PD1Oc4eMM18w6lYfxz4iRWHTGCESsPZ+DAgey1x+5cfe31eYfVEFw21blcqmu2chGiXw1u9eJE3CC++Ze7+fFOG9NPBRph0MdmzJzFsKFD5j8eOmQwM2bNyjGixuGyqc7lUp3LpbHlloglnSjpmBrt63ZJY2qxrzxc9/g0lhm0EKOHLpt3KA0lIj62zL9TEpdNdS6X6pqxXIrUNO1R0w3gH8/N4trHp3Hj1Od5f+483vxgLvtffAt/3PuzeYeWq6FDBvPC9BnzH0+fMZPBy6+QY0SNw2VTnculumYslyL9zqhbjVjS/pIelvSQpD+1Wbe+pHuz9VdJWjJbPr+mK2lpSdOy+wtJuiTb/lJgoWz5IZJOr9jvoZJOq9dr7KmTd9yYaccfwFPf3o8L9xnL1qsMafokDLDB6FE8+fTTPDttGnPmzOGSCVew68475h1WQ3DZVOdyqa7ZyqX16kuuEVeQtDZwPLBpRMyWtBTwtYpN/ggcGRF3SPoBcALw9Q52eQTwbkSsJ2k9YHK2/BLgYUnfioi5wEHAf9f69Vh99O/fnzN+dirbj9udlpYWDt5/X9Zey4PZwGXTHpdLdS6XxlavpultgAkRMRsgIl5T1kEhaXFgiYi4I9v2fODyTva3BfDLbF8PS3o4u/+OpL8Cu0iaAgyIiEeq7UDSYcBhACsuMag3r62mtlxlCFuuMqTzDZvETjuMZacdxuYdRkNy2VTncqmu2cqlSHNN1ysRC/j4aIHOzeM/zecLtlnX3v7+ABwHTAXObW/HEXEWcBbA6KHL9iQ2MzNrQEW7+lK9+ohvA74o6ZMAWdM0ABHxBvBvSZtni/YDWmvH04DR2f09KvZ3J7BPtq91gPUq9ncfMAz4EnBxrV+ImZlZLdWlRhwRj0k6GbhDUgvwACnJtjoA+K2khYFnSH27AOOByyTtB/y1YvszgXOzJukHgX+2OeRlwPoR8e+avxgzM2t4RZoko26nL0XE+aT+32rrHgQ2qrJ8KhW1XeA72fL3gL06ONxmgCdTNTNrUgVqmS7Uj4ZOSVpC0hPAexFxW97xmJmZdaZUE3pExOvA6nnHYWZm+UnnERenTlyqRGxmZgZumjYzM7Muco3YzMxKp0g1YidiMzMrHSdiMzOzHKlAg7XcR2xmZpYj14jNzKxUhJumzczMclWk5t4ixWpmZlY6rhGbmVnpFGislhOxmZmVjwrUS+ymaTMzsxy5RmxmZqXiUdNmZmY5cyI2MzPLUb8CZWL3EZuZmeXINWIzMysZFWrUtBOxmZmVStEGa7lp2szMLEdOxGZmVi5KM2v19talQ0nfkPSYpEclXSxpwe6G60RsZmaloxrcOj2GNAT4GjAmItYBFgD26m6sTsRmZmY91x9YSFJ/YGFgZk92YGZmVir96jBcKyJmSBoPPA+8B9wcETd3dz+uEZuZWanUolk6S+NLS5pYcTvsI8eRlgTGASsDg4FFJO3b3XhdIzYzM6tudkSM6WD9dsCzEfEKgKQrgU2AC7pzECdiMzMrnTpdj/h5YCNJC5OaprcFJnZ3J07EZmZWOvXIwxFxn6QJwGRgHvAAcFZ39+NEDGjZIfT/6o/yDqPhHL7I0LxDaFi/fWd63iGYWQfqNcVlRJwAnNCbfXiwlpmZWY5cIzYzs1IRxboMohOxmZmVToHysJumzczM8uQasZmZlU6RasROxGZmVjr1GjVdC26aNjMzy5FrxGZmVjp1mlmrJpyIzcysVESxmnuLFKuZmVnpuEZsZmalU6CWaSdiMzMrHxWok9iJ2MzMSqc4adh9xGZmZrlyjdjMzEpFFKtG7ERsZmblIhWqj9hN02ZmZjlyjdjMzErH1yM2MzPLkQqUid00bWZmliPXiM3MrFSEL/pgZmaWHzkRm5mZ5cqnL5mZmVmXuEZsZmalU6AKsROxmZmVj5umzczMrEtcIzYzs1Ip2ulLrhE3iBtvvpU11h/DquuO5JTxp+cdTq72O/s3/PSlZ/juI/fNX7bwkkty1M1X84MnHuCom69m4SWWyDHCxuDPTHUul+qaqlwE/aRe3+rFibgBtLS08NWjj+GGqybw+KT7uPjyCTw+ZWreYeXmnvMu5Fc77PaRZTscezRTb7uD760+kqm33cH2xx6dU3SNwZ+Z6lwu1blcGpsTcQP458RJrDpiBCNWHs7AgQPZa4/dufra6/MOKzdP3fV33n3t3x9Ztt64nbnn/AsBuOf8C/n053fJI7SG4c9MdS6X6pqxXKTe3+rFibgBzJg5i2FDh8x/PHTIYGbMmpVjRI1nseWW4c0XXwLgzRdfYtFll845onz5M1Ody6W65iuXdD3i3t7qJddELOlrkqZIujDPOPIWER9bVqSBBlZ//sxU53KpzuXS2PIeNf0VYMeIeDbnOHI1dMhgXpg+Y/7j6TNmMnj5FXKMqPG8+dIrLLb8crz54ksstvxyvPXy7LxDypU/M9W5XKprtnIRoAK19+YWqqTfAiOAayR9U9KfJT0s6V5J62XbLCPpFkmTJf1O0nOSls7WfVfS1Gz9xZKOkbSKpMkVx1hN0qR8XmHXbTB6FE8+/TTPTpvGnDlzuGTCFey68455h9VQHr7mejY+YB8ANj5gHx6++rqcI8qXPzPVuVyqa7pyEYVqms6tRhwRh0vaAdgaOAF4ICI+L2kb4I/A+tnyv0bEj7NtDwOQNAbYHRhJeg2TgUkR8bSkNyStHxEPAgcB51U7vqTDWve34rBhffhKO9e/f3/O+NmpbD9ud1paWjh4/31Ze601c40pT4dcdA6rb7U5g5b+JD9+YSp/OeFH3HTKaRx62flsesh+vPb8dM76wv55h5krf2aqc7lU14zlUqSmd1XrO6jbwaVpwBjgFmD3iHgmW/4CsA5wB7Bba9O1pNeA1YF9gSUj4oRs+WnAzIgYL2kfYEPgaOAJYMOIeLWjOMaMGhkT77699i+w4A5fZGjeITSs374zPe8QzApvzGZbMXHyAzVPmesutGD8eeXeV7BWnfLUpIgYU4OQOpR3H3Gram9EtLO8ve1bXUFWkybVkjtMwmZmVj6ea7r77gT2AZC0FTA7It4E7ga+mC0fCyyZbX838DlJC0oaBOzcuqOIeB+4CTgTOLdeL8DMzBqHzyPuvhOBMZIeBk4BDsiWfx8Ymw3A2hGYBbwVEfcD1wAPAVcCE4E3KvZ3IalGfXNdojczM+uhXJumI2J4xcNxVTZ5A9g+IuZJ2hjYOiI+yNaNj4gTJS1MqlH/rOJ5mwHnRERLX8RtZmaNS1DXuaJ7q1H6iNuzInCZpH7AHODQinVnSVoLWBA4PyImA0i6ClgF2KbewZqZWQOoc9NybzV0Io6IJ0mnKFVb96V2lu9WbbmZmVkjauhEbGZm1hNFGjXtRGxmZqVToDzsRGxmZuUiipWIG+X0JTMzs6bkGrGZmZWLhPoVp0rsRGxmZqXjpmkzMzPrEteIzcysdDyzlpmZWU48atrMzMy6zDViMzMrHc+sZWZmlpeCXfTBTdNmZmY5co3YzMxKx03TZmZmOSpQHnYiNjOzckmnLxUnE7uP2MzMLEeuEZuZWbkIVKBqphOxmZmVjNw0bWZmZl3jGrGZmZWPr0dsZmaWIzdNm5mZlZ+kJSRNkDRV0hRJG3d3H64Rm5lZuaiu5xH/ArgxIvaQNBBYuLs7cCI2M7PyqUMfsaTFgC2AAwEiYg4wp7v7cdO0mZmVTHb5pd7eOjcCeAU4V9IDkv4gaZHuRutEbGZmVt3SkiZW3A5rs74/MAo4MyJGAu8Ax3b3IG6atnadOX1i3iE0rLnH7J13CA1pwPiL8w7BLFVoa9M0PTsixnSwfjowPSLuyx5PoAeJ2DViMzMrnzo0TUfEi8ALktbIFm0LPN7dUF0jNjMz67kjgQuzEdPPAAd1dwdOxGZmVjo1apruVEQ8CHTUfN0pJ2IzMysfz6xlZmZmXeEasZmZlYvkiz6YmZnlydcjNjMzsy5xjdjMzMrHTdNmZmY5EYUaNe1EbGZmpaMCdby2m4izyzu1KyLerH04ZmZmzaWjGvFjQJAq+a1aHwewYh/GZWZm1nNlaJqOiGH1DMTMzKwmpLpNcVkLXWpFl7SXpOOy+0Mlje7bsMzMzJpDp4lY0hnA1sB+2aJ3gd/2ZVBmZma9UofLINZKV0ZNbxIRoyQ9ABARr2WXezIzM2tMJWuaniupH2mAFpI+CXzYp1GZmZk1ia7UiH8NXAEsI+n7wBeB7/dpVGZmZj2UWpaLUyPuNBFHxB8lTQK2yxZ9ISIe7duwzMzMeqFATdNdnVlrAWAuqXm6QPOVmJmZNbaujJo+HrgYGAwMBS6S9O2+DszMzKxnajBiusFGTe8LjI6IdwEknQxMAn7cl4GZmZn1VKn6iIHn2mzXH3imb8IxMzPrJVGOPmJJp5P6hN8FHpN0U/Z4LHB3fcIzMzMrt45qxK0jox8DrqtYfm/fhWNmZtZ7pWiajoiz6xmImZlZzRSoaboro6ZXkXSJpIclPdF6q0dwzeTGm29ljfXHsOq6Izll/Ol5h9MwDv7mcSz36U1Yd9vP5R1Kw1ntgtsZeeldjLnsbjaa8Pe8w2kY/l+qzuXSuLpyTvB5wLmk7u8dgcuAS/owpqbT0tLCV48+hhuumsDjk+7j4ssn8PiUqXmH1RAO/MJu3HDB7/MOo2HdsutnmPjFzbh3j03zDqUh+H+puqYrl1qculTHpu2uJOKFI+ImgIh4OiK+Q7oak9XIPydOYtURIxix8nAGDhzIXnvsztXXXp93WA1hi402YKklFs87DCsI/y9V14zlon7q9a1eupKIP1Dq9X5a0uGSPgcs28dxNZUZM2cxbOiQ+Y+HDhnMjFmzcozIikDATtfez2cu/zt/ePz5vMNpCP5fqs7l0ti6ch7xN4BBwNeAk4HFgYP7Mqj2SPoDcFpEPJ7H8ftKRHxsWYEG/FlObt9tIwYvsiAvv/sBO157P2ssMYjNBy+Vd1i58v9SdU1ZLgV6gV256MN92d23gP36NhzIat+KiI9dajEivtzXx8/D0CGDeWH6jPmPp8+YyeDlV8gxIiuCwYssCMCyC3+CcSsvx/0vv970idj/S9U1XbkUbEKPdpumJV0l6cr2brUMQtJwSVMk/QaYDJwtaaKkx7JLL7Zud7ukMdn9tyWdLOkhSfdKWk7SopKelTQg22YxSdNaHzeqDUaP4smnn+bZadOYM2cOl0y4gl133jHvsKyBvTN3Hm/NmTf//q0vzGbtpRbNOar8+X+pumYsF0m9vtVLRzXiM+oWRbIGcFBEfEXSUhHxmqQFgNskrRcRD7fZfhHg3og4XtJPgUMj4iRJtwM7A38G9gKuiIi5bQ8m6TDgMIAVhw3rw5fVuf79+3PGz05l+3G709LSwsH778vaa62Za0yN4ktfPZrb77mf2a/9m2FjtuTEbx7JIXvvkXdYuXvpvTl84cbJAMz7MNhrtRXYfsVlco4qf/5fqs7l0thUre+g7kFIw4G/RcTK2ePDSUmyP7ACcGREXJIl2WMiYqKkD4AFIyIk7Ql8NiK+LGlT4FsRMU7SPaQE3eH1k8eMGhkT7769r15eYcW/X8w7hIY17+Rv5B1CQxow/uK8Q7ACGbPZVkyc/EDNq56jl10i7vviZr3ez4BfXzcpIsbUIKQOdfV6xPXwDoCklYFjgA0i4t+SzgMWrLL93PjPr4gWstcSEX/Pmrq3BBboLAmbmVkJFWiwVldOX6q3xUhJ+Q1Jy5EmEemuP5KuoXxuLQMzMzOrtS4nYkmf6MtAWkXEQ8ADpItNnAP0ZO6+C4ElScnYzMyaiSjUzFqdNk1L2hA4m3T+8IqSPg18OSKOrFUQETENWKfi8YHtbLdVxf1BFfcnABMqNt0MmBARr9cqRjMzK5ACNU13pY/4l8AupFHIRMRDkhp2iktJvyI1Z++UdyxmZmad6Uoi7hcRz7U5p6qlj+LptVrW1M3MrIgE/RpxCFR1XUnEL2TN05Gd13sk4MsgmplZ4ypQ03RXfjIcARwNrAi8BGyULTMzM7Ne6spc0y+TZqgyMzNrfK2jpguiK6Omfw98bPqtiDisTyIyMzPrrTIlYuDWivsLArsBL/RNOGZmZr1VssFaEXFp5WNJfwJu6bOIzMzMmkhP5ppeGVip1oGYmZnVTJmapiX9m//0EfcDXgOO7cugzMzMeqxMg7WUZvH4NDAjW/RhNMJ1E83MzEqiw97sLOleFREt2c1J2MzMGl+ZLvoA/FPSqIiY3OfRmJmZ9VpJRk1L6h8R80hXMjpU0tOk6wSLVFkeVacYzczMSqujGvE/gVHA5+sUi5mZWW2UZLCWACLi6TrFYmZm1nslGjW9jKSj21sZEaf1QTxmZma9V5JEvAAwiKxmbGZmZrXXUSKeFRE/qFskZmZmNSCEyjBqGteEzcysqArUNN3RT4Zt6xaFmZlZk2q3RhwRr9UzEDMzs5oo0ahpMzOzYipQIi5Ob7aZmVkJuUZsZmYlU5K5ps3MzArLTdNmZmbWFa4Rm5lZuXjUtJmZWc7qmIglLQBMBGZExC7dfb4TsbVLSy6fdwgNa8D4i/MOoSG1XH9O3iE0pAV2OjjvEJpM3QdrHQVMARbryZPdR2xmZtZDkoYCOwN/6Ok+XCM2M7PyqU3T9NKSJlY8Pisizmqzzc+BbwGL9vQgTsRmZlYutRusNTsixrR7GGkX4OWImCRpq54exE3TZmZmPbMpsKukacAlwDaSLujuTpyIzcysZLLBWr29dSIivh0RQyNiOLAX8NeI2Le70bpp2szMysfnEZuZmTWPiLgduL0nz3UiNjOz8nGN2MzMLCee4tLMzCxPxboMYnEiNTMzKyHXiM3MrHzcNG1mZpajAiViN02bmZnlyDViMzMrFwEqTj3TidjMzEpG0M9N02ZmZtYFrhGbmVn5uGnazMwsRx41bWZmZl3hGrGZmZWLijXFpROxmZmVT4Gapp2IzcysfAo0WKs4kZqZmZWQa8RmZlY+bpo2MzPLScEGaxUnUjMzsxJyIm4QN958K2usP4ZV1x3JKeNPzzuchuFyaZ/L5uPenzOXjY49nVHHnMp63/gJJ156Y94hNYym+7xIvb/ViZumG0BLSwtfPfoYbvnLnxk6ZDAbbL41u+68I2ut+am8Q6TkmDQAABrYSURBVMuVy6V9LpvqPjGgP7ee8BUGLfQJ5s5rYYvv/oodRn6KjVYfnndouWrKz4tHTVt3/HPiJFYdMYIRKw9n4MCB7LXH7lx97fV5h5U7l0v7XDbVSWLQQp8AYG5LC/NaWlCBBu30FX9eGpsTcQOYMXMWw4YOmf946JDBzJg1K8eIGoPLpX0um/a1tHzI6GPGs8Ih32Pb9VbnM6utlHdIuWu6z4uyyyD29lYnDZeIJU2TtHSV5btKOjaPmPpaRHxsmX/Eu1w64rJp3wIL9GPS+GN47ncncP9Tz/Po8yVOOF3UlJ8X9ev9rU4aLhG3JyKuiYhT8o6jLwwdMpgXps+Y/3j6jJkMXn6FHCNqDC6X9rlsOrfEIgux5dqrctODU/MOJXf+vDS2XBOxpEUkXSfpIUmPStozW3WkpMmSHpH0qWzbAyWdkd0/T9IvJf1D0jOS9qjY5/9Kul/Sw5K+n8PL6rYNRo/iyaef5tlp05gzZw6XTLiCXXfeMe+wcudyaZ/LprpX3nib1995D4D3PpjDbQ8/wRpDls05qvw15efFo6a7bAdgZkTsDCBpceAnwOyIGCXpK8AxwJerPHcFYDPgU8A1wARJY4HVgA0BAddI2iIi7uz7l9Jz/fv354yfncr243anpaWFg/ffl7XXWjPvsHLncmmfy6a6Wa+/ycFnXEzLhx/yYQR7bPxpdhm9dt5h5a75Pi8q1KjpvBPxI8B4ST8Bro2Iu7IRjldm6ycB/9XOc/8cER8Cj0taLls2Nrs9kD0eRErMH0vEkg4DDgNYcdiwGryU3tlph7HstMPYvMNoOC6X9rlsPm69lQYz8dRv5h1GQ2qqz4uo62Cr3so1EUfEE5JGAzsBP5Z0c7bqg+xvC+3H+EHFfVX8/XFE/K4Lxz4LOAtgzKiRHx/JYGZmVgd59xEPBt6NiAuA8cCoXu7yJuBgSYOy/Q+R5A4iM7Nm4z7iLlsXOFXSh8Bc4AhgQk93FhE3S1oTuCdr4n4b2Bd4uQaxmplZUbiPuGsi4iZSLbbS8Ir1E4GtsvvnAedl9w9ss59BFfd/Afyi9tGamZnVXt41YjMzs9pqnVmrIJyIzcysfArUNF2cSM3MzErINWIzMyufAk2m7URsZmYl45m1zMzM8lOwmbWK85PBzMyshFwjNjOz8nHTtJmZWY4KNFirOD8ZzMzMSsg1YjMzKxlBv+LUM52IzcysXISbps3MzKxrXCM2M7Py8ahpMzOzvMhN02ZmZtY1rhGbmVn5eNS0mZlZTgo2atqJ2MzMSqZYV18qTqRmZmYl5BqxmZmVj5umzczMcuSmaTMzM+sK14jNzKxcJOjnpmkzM7P8uGnazMzMusI1YjMzKx+PmjYzM8tLsSb0cCI2M7PSkWvEZtaMFtjp4LxDaEj/Wn9U3iE0pPefn553CA3BidjMzMpFFKppujiRmpmZdUnWR9zbW2dHkYZJ+pukKZIek3RUT6J1jdjMzKxn5gHfjIjJkhYFJkm6JSIe785OnIjNzKx86jCzVkTMAmZl99+SNAUYAjgRm5lZk6tzH7Gk4cBI4L7uPteJ2MzMrLqlJU2seHxWRJzVdiNJg4ArgK9HxJvdPYgTsZmZlYuo1cxasyNiTIeHkgaQkvCFEXFlTw7iRGxmZiVTn5m1lGYNORuYEhGn9XQ/Pn3JzMysZzYF9gO2kfRgdtupuztxjdjMzMqnDlNcRsTdpIbwXnEiNjOz8inQzFpOxGZmVi5SXc4jrpXi/GQwMzMrIdeIzcysfNw0bWZmlqMCXY+4OD8ZzMzMSsg1YjMzK5n6TOhRK07EZmZWPm6aNjMzs65wjdjMzMpFuGnazMwsP4J+TsRmZma5kfuIzczMrCtcIzYzs/JxH7GZmVlOhE9fMjMzs65xjdjMzEqmWDNrFSfSkrvx5ltZY/0xrLruSE4Zf3re4TQMl0v7XDbVuVw+bsDKK7PS1X+Zf1t18oMsecCBeYfVt6Te3+rEibgBtLS08NWjj+GGqybw+KT7uPjyCTw+ZWreYeXO5dI+l011Lpfq5j77LM+N+1y67TaOeO993rrl5rzDsowTcQP458RJrDpiBCNWHs7AgQPZa4/dufra6/MOK3cul/a5bKpzuXRu4Y03Ye7zzzNv5sy8Q+lb/fr1/lavUOt2JGvXjJmzGDZ0yPzHQ4cMZsasWTlG1BhcLu1z2VTncuncYjvvwpvX/SXvMPpWLZql3TTdPknXS1oi7zhqKSI+tqxAI+/7jMulfS6b6lwunRgwgEW23Za3bnArQSMpzKhppfnKFBE75R1LrQ0dMpgXps+Y/3j6jJkMXn6FHCNqDC6X9rlsqnO5dGzQFlvywWOP0fLqq3mH0vc8ahokLSLpOkkPSXpU0p6SpklaOls/RtLt2f0TJZ0j6XZJz0j6WrZ8uKQpkn4DTAaGte6j2v6z54yWdIekSZJuktTw/4UbjB7Fk08/zbPTpjFnzhwumXAFu+68Y95h5c7l0j6XTXUul44tusvnePPakjdLtypQ03Rf1oh3AGZGxM4AkhYHftLB9p8CtgYWBf4l6cxs+RrAQRHxlWw/7e5f0gDgV8C4iHglS84nAwfX9JXVWP/+/TnjZ6ey/bjdaWlp4eD992XttdbMO6zcuVza57KpzuXSPi24IItssikvfff4vEOpk+L0Sahan0pNdiytDtwEXAZcGxF3SZoGjImI2ZLGAOMjYitJJwJzI+Lk7LlTgM+Sfij8LSJWrtjvNGAMsFSV/a8D/AN4Jtt8AWBWRIytEt9hwGEAKw4bNvq5qY/UugjMzAD41/qj8g6hIe3+/HQeff+DmmfMMeutHfdfe1mv99NvpXUmRcSYGoTUoT6rEUfEE5JGAzsBP5Z0MzCP/zSHL9jmKR9U3G+piO2dbuz/KuCxiNi4C/GdBZwFMGbUyL75NWJmZjmob9Nyb/VlH/Fg4N2IuAAYD4wCpgGjs01274P9/wtYRtLG2TYDJK3dm+OYmVkBuY8YgHWBUyV9CMwFjgAWAs6WdBxwX633HxFzJO0B/DLrk+4P/Bx4rJfHMjMz6xN92TR9E6kPt63Vq2x7YpvH61Q8XKfNuuHZ3ar7j4gHgS26F62ZmZVLcZqmC3MesZmZWZf4esRmZmbWVa4Rm5lZ+RSnQuxEbGZmZVScTOymaTMzsxy5RmxmZiVTrAk9nIjNzKx8nIjNzMzyVJxE7D5iMzOzHLlGbGZm5eOmaTMzszwVJxG7adrMzCxHrhGbmVm51Pkyhr3lRGxmZuVToETspmkzM7McuUZsZmYlVJwasROxmZmVjgrUNO1EbGZm5VOgROw+YjMzsxy5RmxmZiUj3EdsZmaWJzdNm5mZWVe4RmxmZuUiClUjdiI2M7MSKk4idtO0mZlZjlwjNjOz8nHTtJmZWY6Kk4fdNG1mZpYn14jNzKxkPKGHmZlZvtxHXCyTHnhwthZZ4rm848gsDczOO4gG5HKpzuVSnculfY1UNiv1yV59HnHxRMQyecfQStLEiBiTdxyNxuVSnculOpdL+1w2jceJ2MzMSsg1YjMzs/wUqGnapy81nrPyDqBBuVyqc7lU53Jpn8umwTgRN5iI8D9JFS6X6lwu1blc2tccZaNUI+7trStHknaQ9C9JT0k6tifROhGbmVkJqQa3To4gLQD8GtgRWAvYW9Ja3Y3UidjMzKxnNgSeiohnImIOcAkwrrs7cSK2UpDkz3IFqUAjVcz6Qn2apocAL1Q8np4t6xaPmrZCk7RuRDwSER/mHUuDWQV4Ku8gGoUkRUS099jKZdIDD96kRZZYuga7WlDSxIrHZ7XpY6+Wrbv9uXIiLhBJawObRMTv846lEWS14JMlXRMRf8g7nkaQ1YQHAtdIOikiLso7prxVJl1JKwEvAvOAllwDazDt/TiR1K9oP3QjYoc6HWo6MKzi8VBgZnd34kRcANmXq4CRwGo5h5Oryi+LiPhQ0lXAUjmH1TCysvlA0o+AT0Ixv0hrqSIJHw1sCbwO3CPpkoh4PdfgGkhFOX0ZWIb0nfOHiHg518Aa2/3AapJWBmYAewFf6u5O3K9WDAOyL9J/ADtI+nzeAeUlIkLShpKGZ4vuBA6XtE1+UTUGSWMq+sofAw6QNKKZk3ArSTsDu0TEOFINZmREvO6xBR8l6ShSMrkP2AM4JN+IGltEzAP+B7gJmAJcFhGPdXc//hA2IElDJI3J7q8KnCRpvYh4BvgOsJ6kAc34JSJpYWALUtPr/5Bqfd8AWstrgRzDy0XFwKyjgWslHQf8GzgH+G9J/Tx4i8WBiyUdAcwBjsyWr5xfSI0l+z4ZERHbAaOAWcBPJS0s6RP5Rte4IuL6iFg9IlaJiJN7sg83TTem3Unnox0FLAy8Bpwr6SLS1UpWABaPiNnNMOik9TVKWgPYGfgF8DdgbdI5fC3A4pLOiIh3cwy1rire+6HACxHxJUnrA5sAfyHViufXiJvhswIfbYqvuP8M8CvgnYjYKlv3DWBdSYdnp540lTZ954tHxBuSBku6ldR8v1tEtEj6Eqlf/do84y0zNcH/ZWG0+cf4KSnp/iIi/iFpJOkL92BgO+APwDER0RQDTrKm5yOB9YBfAhdmP0SWBMYC+5ESz7FNkmxaf5zsCPwGuB14AvhVRLwtaV1gdeAkYEJEfDe/aPMh6UBgOeCZiLhc0q9JCWYKMAD4GrBfRDyaX5T5aPNdcziwcEScJumzwJnA8RFxqaQDgGOBnbMWOesDTsQNSNK2wGHAiqQvkn0j4h/ZugVJNeYtgG80Qw1Q0gbAH0nJdhvSD5SngYsjYla2zWeA3SPiW7kFWmeSRpH6824kJZax2aofRMQb2TZrAQc2U7kASNoTOIFUCz4Z+F/gQmBvYFvgLeDXzZiEK0n6b1I/8B4R8bykhYCtSD927yHNFnVAT/o9reuaro+x0UlaETgV+ElEbEzq5/uhpI0lLRAR70fEhcA6pCbIZvAp4J8RMTEifgrcRhpIsqekZbNtVgd2kbR4WftDJa0kaRtJC0halFQOn46IvwJ/Ba4APiSd0rVY9rTPANtLWiSfqOtD0iqS+mf3twO2Bw6JiDNJP1pPJSWUcyNiX+CoZkzCkrZsnYIx+wyNA74FvCnpMOCnpNPfxgD/B+zkJNz3nIgbQJvEMRuY1vogIk7KHl8ObJgN0hpK1uRWxzDrTtLg7O4kYHlJWwNExJXAk6QEvW62zXRSn9YbJW6aXpXUtDooIt4izW+7iaRDImIucC9wDekc2aHZc14BvhAR7+QRcD1IWobUSrRotmhlUk1uM0lLZgl3S+BMSUcCNGOfcGYF4O2sXN4itaacDpxL+n96HtiB1Jc+y6cu1YcHazWArK9vc2DdiPiNpFnARpJeiIhXSLXiVYG3sy/c6ZI2jIjX8oy7L1T0fY4ErpN0akScLul2YKzShAwPkRLNY6Rz9m6LiL/lF3V9RMRtkhYnjYw+KyL+JGkscIMkIuJsSX8HHmk9PzYimmGAzWukJujVJR0RET+S9B6pG2NTSXdGxCNKE+I0xZiKtloHrUXEJZJGAP/KxhecSfp/eiwbc7EP8FlSbpiXY8hNxYm4cQTwPUkvAz8CfgusJelt0hfKUdmXiSIpXRKG+T9KdgT2IY3SPEnSG6TBadsD+2brvgEsC+wr6RMR8UFeMddTNrL1N8CXJc3JBtSMJU1QsUCk6feaYpKKiv+FFknvA8OBVSR9LSJ+mY2n2A0YKOmWiJiSa8A5ycqpdRT54aSWk58A5wMHR8QdWXfHl4GjgL0j4v38Im4+TsQ5y/ru3ouIuyXtThqUdAwp2Ywl9QUfGxH3wH9mvymjrIl+CeB7wA8j4npJvwOuAxaKiF9LuhRYENgcOIU0kK20SbiihWADUj/43RFxcfYD7Zhs9WWSNuM/TbNNoWLU70rAixFxdfajbW9JX4+In2fJeFvg5jxjzVNFOe0CbAZcFxE/k/QB8KesFvwwacDfnhHxeH7RNqmI8C2nG6lPZgKpn7NftmxzUt/vPnnHl2O5/A7YqKJM9gHmtpYJ6Qfk94C18o61TuWxC+mUm1OAf5J+fADsBEwk1WBat1Xe8dahPDYjDVKDNInJFOA84JvZsq1Jp3R9O3u8RN4x530j9Q0/C1ySPW793zoCeBn4dDN8dhr15sFadVY5MCsipgLPAd8FPiWpf0TcBdwNjJc0TCWfKar19SlNJLBMtngG6ZzhhbLHU4BLgVMkbRAR8yLiB9EEv9wlbUo6bWssaRKTFYCdJR0QEdcDJ5K+YIFyt5hU2AT4i6T/AtYEvgCcTRqcdXyk8QJXA8tKWiqacD7pyu+ZrH94Fum86a0kHRhZU3WkUeXHk8afNMNnpyH5POI6qmhm3Jh08YaZEXGr0pSEI4HTSDNp7UY6x7G0fVpZc6IiYpqknUi1vSdJyXdv4OekaQlnkCYw2RU4CLg8Ih7IJ+r6qPicbEgaTPM/pDEEvyYl5AOBrwMnRMQ5uQVaZ/rojFknkn6gXB0RRytNwbgW6UftlIg4XtLC0QTn2bfV+vnJ7u8FjAD+GhH3Zqd2jQdOj4jz84zT/sM14jrKvlx3In25jgYOkXQFKQndQzqx/tfATSVPwsNJp00MlbQ66Rf54RGxOynxXkSa0OSXpAtdfJ50se1dSXMol1pFEv4eaZDePaRR849ExKvA30l9evfkGGZdtRlwdASpFeBsYDela1J/QCqTHwMrS/pkMyZh+Eif8H7AcaQxFVdLGhcRt5Ka83+gNHWlNQAP1upjkj4JLBMRU7Pmoj2B4yINRBpIOodvfParfmFg0Yh4qfJXbZlkZTCGlIifA74IvABMBYiIQ5UubXhsRPwwe846wBmkvtBpecSdgyVI5wm3dlXcAxwv6VzSj7ivl/nHWlsVyaV1JqhxETEj+zydL2nfiHhc0mTgoCjxAL6uyAbv7Ub6n3lM0sOkiYEiIq6RtD/p/84agGvEfSgbsXkkcLCkdbIvk4VI/XyQzmk8l2y0a0S8GxEvZfdLl4Rh/uu6lTQ14yOk/t+BpNmPWl0DVNZmngS2johH6hVn3iLiZuC/SJ+dPSPiadJsYk8BX4s0m1ZTUZp+cUdSC8rcLCkPIHVh/CX7H2tp9iScWZt0rv1u2el9E0jjCX4jaeeIuCM8d3TDcI24D0XE+5L+TJr1Z5yk2aSa3fmSZkTEjUrTzK0haWng1bIm4DbeBB4gnZoVpDL5hdLUe28Dh5LmBgYg+2J9JYc4cxXpdJx5pGbEARFxAWkSk6YUEe9Jup7U/PwC6SIXz5GaqOcC7+UYXkPIur42jIgTJb0DbAjsLunyiLgy+zw1TUtKUXiwVh9pHViiNDH/l0hfFHNIp+asS6oJTyBNUvGNbARsU8kGbN0GfJtU09uG1Bd8Q0TcUtbm+e6SNI6UfLYDXoomueJWNVkr07rA0xHxWtYPeiCwYzThtJVt/0eUrmN+Emlw1k8lHUw6Nekh4I+RLmRvDcaJuA9J2pXUHPQV0nRxewLvkAYhLQYsSWqtfTCvGPOmNJXlRaTL9/0m73galaRlIk13asy/iP1BpNHje0cTXsChUnaa1mvZ6YDrkb53/p4l46+Srlh2UkS8mWecVp0TcR+RNAj4E3Bq/OcShp8h9fu1ABc1+5dHq6zVYAJpBqRprgVbZ7KBjXsC9zbToLVWbU5R2prUwvb5iHhQ6SpUo0mnAF4ZEadKWjyyS2Na4/Fgrb4TwNLAIjC/qfo+0gClebg/a76ImEyaKelZJ2HriuzUpPOchPUVUnfOJaTpKteLNOHNfaQzEbbKastOwg3Mg7X6SES8I+ky0tVfpkfElGwij11J8yg/nXOIjebtvAOwYmnWH21tTuU6mHQq1wWS3gTOlnQ0adT0QODAKOkFYsrEibhvXQkcDvxO6fJ0ewL/00yn4XRVs36pmvVEO6dyDSTNzLcHaaa+r3pcQTG4j7iPKV1daQNgOVL/5305h2RmJSDpMNIP/cpTuYaSLqM6NyLc/VUQTsRmZgVU5VSufYAvAzs36/SeReVEbGZWYD6Vq/jcR2xmVmwLAh8CX2zGUeRl4BqxmVnBeRa6YnMiNjMzy5En9DAzM8uRE7GZmVmOnIjNzMxy5ERs1g2SWiQ9KOlRSZdnFx/o6b62knRtdn9XScd2sO0S2bzC3T3GiZKO6eryNtucJ2mPbhxruCSfOmPWTU7EZt3zXkSsHxHrkK4vfXjlSiXd/r+KiGsi4pQONlmCdDlNMysZJ2KznrsLWDWrCU6R9BtgMjBM0lhJ90ianNWcBwFI2kHSVEl3ky6JSbb8QElnZPeXk3SVpIey2ybAKcAqWW381Gy7/5V0v6SHJX2/Yl/HS/qXpFuBNTp7EZIOzfbzkKQr2tTyt5N0l6QnJO2Sbb+ApFMrjv3fvS1Is2bmRGzWA9k1X3ckXdYSUsL7Y0SMBN4BvgNsFxGjgInA0dmUhL8HPgdsDizfzu5/CdwREZ8GRgGPAceSpjJcPyL+V9JYYDVgQ2B9YLSkLSSNBvYiTfr/X6R5zjtzZURskB1vCnBIxbrhwJbAzsBvs9dwCPBGRGyQ7f9QSSt34ThmVoVn1jLrnoUkPZjdvws4GxgMPBcR92bLNwLWAv4uCdJVce4BPgU8GxFPAki6ADisyjG2AfYHiIgW4A1JS7bZZmx2eyB7PIiUmBcFrmqda1jSNV14TetIOonU/D0IuKli3WUR8SHwpKRnstcwFlivov948ezYT3ThWGbWhhOxWfe8FxHrVy7Iku07lYuAWyJi7zbbrQ/UagYdAT+OiN+1OcbXe3CM84DPR8RDkg4EtqpY13ZfkR37yIioTNhIGt7N45oZbpo26wv3AptKWhVA0sKSVgemAitLWiXbbu92nn8bcET23AUkLQa8RarttroJOLii73mIpGWBO4HdJC0kaVFSM3hnFgVmSRoA7NNm3Rck9ctiHgH8Kzv2Edn2SFo9u9ynmfWAa8RmNRYRr2Q1y4slfSJb/J2IeCK7hux1kmYDdwPrVNnFUcBZkg4BWoAjIuIeSX/PTg+6IesnXhO4J6uRvw3sGxGTJV0KPEi6Pu1dXQj5u8B92faP8NGE/y/gDtL1tA+PiPcl/YHUdzxZ6eCvAJ/vWumYWVuea9rMzCxHbpo2MzPLkROxmZlZjpyIzczMcuREbGZmliMnYjMzsxw5EZuZmeXIidjMzCxHTsRmZmY5+n9L+tPbH4A9PAAAAABJRU5ErkJggg==\n", 173 | "text/plain": [ 174 | "
" 175 | ] 176 | }, 177 | "metadata": { 178 | "needs_background": "light" 179 | }, 180 | "output_type": "display_data" 181 | } 182 | ], 183 | "source": [ 184 | "np.set_printoptions(precision=2)\n", 185 | "\n", 186 | "fig1 = plt.figure(figsize=(7,6))\n", 187 | "plot_confusion_matrix(conf_mat, classes=classes, title='Confusion matrix, without normalization')\n", 188 | "#fig1.savefig('../cm_wo_norm.jpg')\n", 189 | "plt.show()" 190 | ] 191 | }, 192 | { 193 | "cell_type": "code", 194 | "execution_count": 12, 195 | "metadata": {}, 196 | "outputs": [ 197 | { 198 | "data": { 199 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAeIAAAGzCAYAAADzOxTxAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8li6FKAAAgAElEQVR4nOzdeZyVdfn/8dd7GEgUFXBDFkVELRUVGDXL1NBQxC9kUmqK+5ZLpfkry1RKTUuzMpeyXMgMVIwwJPfQNMVY3BBzBWXRBBVcQYbr98d9DxzGMzDruefc834+HufBuZe5z3U+nHOu+7Pcn1sRgZmZmWWjIusAzMzM2jInYjMzsww5EZuZmWXIidjMzCxDTsRmZmYZciI2MzPLkBOxtXmSRkn6c/p8C0nvS2rXzK8xW9J+zXnMBrz2RZIWSnqjCcdokXLJSvpe+mQdhxk4EVsJpEnoTUnrFaw7QdLkDMMqKiJei4hOEVFdyteVtJukSZLelfS2pCckHdsMx+0FfA/YPiK6NfY4WZVLQ0maLOmEte2XvpdXShGT2do4EVupVALfaepBlMjV51bSHsCDwENAX2Aj4FvAkGY4/JbAooj4XzMcq+xJqsw6BrPacvWDZq3aZcDZkjoX2yjpC5L+I2lx+u8XCrZNlnSxpEeBD4E+6bqLJP07bWb8u6SNJN0iaUl6jN4Fx/iNpNfTbdMkfamOOHpLCkmVkvZIj13z+FjS7HS/CknnSHpZ0iJJt0nqWnCckZLmpNvOrUfZjI6In0fEwkhMi4hvFBzvREkvpbXlOyV1L9gWkk6R9KKkdyRdnZ6w7AfcB3RP479J0j6S5tZ6zyubzdOa+dS0nN6UdEXtckmXu6dxvJ3GdWLB8Ual5fEnSe9Jmimpqq43nx731DT+9yRdKGlrSY+lcdwmqUO6bxdJEyW9lb7XiZJ6ptsuBr4EXJW+36sKjn+apBeBFwvW9ZXUQdKTks5I17eT9Kik89fyf2bWfCLCDz9a9AHMBvYD/gpclK47AZicPu8KvAOMJKk5H54ub5Runwy8BuyQbm+frnsJ2BrYEHgOeCF9nUrgT8CNBTEcSVLTrCRpqn0DWCfdNgr4c/q8NxBAZa33UPOal6TL3wUeB3oCnwF+D4xJt20PvA/slW67AlgO7FekbNYFqoEvr6H8BgELgQHp8X4LPFywPYCJQGdgC+At4IB02z7A3IJ9V1su/P9Jnz8GjEyfdwI+X6xcSGrv1wDrALukr7lvQXl+DBwItAMuAR5fw/sL4E5gg/T/eCnwANCn4P/26HTfjYBD0nJbH7gd+FvBsSYDJxQ5/n0kn7OOBev6ps93JPm8fQ44N/1/bZf198aP1v8AbgD+BzxbsK5r+nl7Mf23y9qO4xqxldL5wBmSNqm1fijwYkTcHBHLI2IM8DzwfwX73BQRM9Ptn6TrboyIlyNiMfAP4OWIuD8ilpP8QPev+eOI+HNELEr//pckCW27BsR+JfAByQ81wMnAuRExNyKWkiSfEWmNcQQwMSIeTredB6yo47hdSFqmFqzhtY8AboiI6enxfgjsUVjjBy6NiHcj4jXgnyTJsTE+AfpK2jgi3o+Ix2vvkPY77wn8ICI+jogngT+SnEjVeCQiJkXSp3wzsPNaXvfnEbEkImYCzwL3RsQrBf+3/QHS/8M7IuLDiHgPuBjYux7v65KIeDsiPqq9ISKeBS4CxgNnk5yItOq+cGs1bgIOqLXuHOCBiNiG5ITynLUdxInYSib9wZvIpz+Y3YE5tdbNAXoULL9e5JBvFjz/qMhyp5oFSd+TNCtt+n6XpKa1cX3ilnQySU3ymxFRk1C3BMYrGVz1LjCLpGa7Wfp+VsYbER8Ai+o4/DskSXrzNYSwWvlExPvp8QrLp3BE9IcUvPcGOh7YFng+bd4/qI543k4TYY3a/1+141lHa+6frdf/paR1Jf0+bfZfAjwMdNbaR3MX+/wUGk1S658UES+uZV8zACLiYeDtWquHk3yeSP/96tqO44ELVmoXANOBXxasm0+S2AptAdxdsNzo24Sl/cE/APYFZkbECknvAKrn314I7JnWzmq8DhwXEY8W+ZsFJM2cNcvrkjSpfkpEfCjpMZLm1n/WEcZq5aNk9PlGwLy1xV/EByTNujXHagesbKFIk9DhSgbEfQ0YJ6l27POBrpLWL0jGWzQynob6HklLxu4R8YakXYAZrPq/rOtzsrbPzzUkJ4n7S9ozIh5plmgtE71UGR83/idjpYWsmEnSzVLjuoi4bi1/tllELACIiAWSNl3b6zgRW0lFxEuSbgW+DTyTrp4E/FbSN4HbSJLS9iQ/jM1hfZI+2reASknnkPRHrlHaBHsrcFREvFBr8++AiyUdHRFz0ub2L0TEBGAcMEXSnsATwE9Zc+vT94F7Jc0haYJeJGln4IcRcRjwF2CspL+Q1Lx/BkyJiNn1LYACL5DUTocC9wI/Immmr3nPRwL3RMRbaU0fkpr+ShHxuqR/A5dIOpukBn08ST98S1ufpIb8bjo47oJa298k6VuuN0kjgYEkzefDgNGSdk5bHqwMfUxwCOutfce1+D3vfRwRdQ40bC5umrYs/BRWfUsiYhFwEEltZxFJYjooIhY20+vdQ9LP+AJJE+rHrL2pEpIadDeSWmHNyOmZ6bbfkAwwulfSeyQDfHZP389M4DSSBLqApPl5bu2D14iIf5MMyBoEvCLpbeA6khMUIuIBkn7mO9LjbQ0cVt83X+u1FgOnkvTpziOpIRfGdgAwU9L76Xs8LCI+/tSBkgF1vUlqx+OBCyLivsbE1EC/BjqSDF57nNVbTSCJeUQ6ovrKtR1M0hbpMY9K+8T/AkwFftW8YVspiSS5NfXRSG9K2hwg/Xetlw4qHeVlZmaWC5uqXYxohhrxtbw3bW014nTQ5MSI2DFdvozk2v1L09a3rhHx/TUdw03TZmaWOxVa6xCQtVtLPVXSGJKBnBun1+dfAFwK3CbpeJLLLr++tpdxIjYzs1ypaZpuaRFxeB2b9m3IcdxHbGZmliHXiM3MLHcqmqFluhmugKoXJ2IzM8udcmrudSIGNl6vY2zZZf2sw2h1tGmPte9kZtZIs197jYULFzVH3XU1Qs0zWKtEnIiBLbusz5TvrHVgW5tTedrPsg7BzHKsas99sg6hVXAiNjOz3HHTtJmZWUZEMw3WKpFyOmkwMzPLHdeIzcwsd8qplulEbGZm+SJQGY2aLqeTBjMzs9xxjdjMzHKlVHNNNxcnYjMzy51yGjXtRGxmZrlTTjXicorVzMwsd1wjNjOzXEkm9CiftmknYjMzy51yau4tp1jNzMxyxzViMzPLlXKba9qJ2MzMcqecmnvLKVYzM7PccY3YzMxyp4LyaZt2IjYzs1wptz5iN02bmZllyDViMzPLnXKqZToRm5lZrkjl1TTtRGxmZrlTToO1yqn2bmZmljuuEZuZWe64adrMzCwjoryae8sp1tyoOOBI2p16Ke2OObfufQZ9nXYnjKLdMT+CTXuVKLLs3X3v/Wy3SxV9+/Xn0st/9antS5cu5dCjjqVvv/7svve+zJ4zJ4MoS8/lUpzLpTiXS3lxIs7Aimcfp3rc1XVu11Y7QJdNqP7jKKrv+QvtvnJYCaPLTnV1NaeddTb/GD+O56ZNYczt43hu1vOr7XP96Jvp0rkzLz0zgzNPP5UfnDcqm2BLyOVSnMulOJdLokJNf5Qs1tK9lK009yX4+IM6N2ubnYiZU5KFBbNhnY6w3gYlCS1LT0ydRt8+feizVW86dOjAYSMOYcLESavtM2HiJI4+4nAARhw8nAcmP0REZBBt6bhcinO5FOdyASEqmuFRKk7ErVGnDYn33l25GO+9C506ZxhQacybv4BePXusXO7ZozvzFiyoc5/Kyko23GADFi16u6RxlprLpTiXS3Eul/KTWSKWNErS2c10rMmSqprjWK1DsTOx/Jyt1qXYGblqFUUUKQfV3ilnXC7FuVyKc7kk3DRtTfP+u2j9VTVgrd8Z3l+cYUCl0bNHd16fO2/l8tx58+nebfPV9+m+ap/ly5ezeMkSunbtUtI4S83lUpzLpTiXS0LN8CiVkiViSUdJelrSU5JurrVtF0mPp9vHS+qSrl9Z05W0saTZ6fOOksam+98KdEzXHy/pVwXHPVHSFaV6j80lXnoa7bB7srB5b1j6EXywJNOYSmHXgQN48eWXeXX2bJYtW8bYcXcwbOiQ1fYZNnQIo28ZA8C48RMYtPdeuTuTr83lUpzLpTiXy6q7L5VLjbgk1xFL2gE4F/hiRCyU1BX4dsEufwLOiIiHJP0UuAD47hoO+S3gw4jYSdJOwPR0/VjgaUnfj4hPgGOBk5v7/TRVxUHHol7bQMdOtDvlIlY8ehdUtAMgnnqEeGUm6rMD7U4cBZ8so/off8424BKprKzkql9exv7DD6G6uprjjjqSHbb/HOdfeDFVA/ozbOiBHH/0SEaecDJ9+/Wna5cujB19Q9ZhtziXS3Eul+JcLuVHpRgpJ+kMoFtEnFuwbhTwPvAH4JmI2CJdvzVwe0QMkDQZODsipkraGJgaEb0l/Q24MiIeTP9mOnBSut8fgEnALODmiNi1jphOAk4C2KJzp4Ev/+ioFnnv5azytJ9lHYKZ5VjVnvswdfqMZq97btWuffykU9Ob2o9e8ta0iGjx8UelmllLNG600XJWNZ+vU2tbXcf7I/Aj4HngxroOHBHXAdcBDOy5af5HQpmZtRHldvelUvURPwB8Q9JGAGnTNAARsRh4R9KX0lUjgYfS57OBgenzEQXHexg4Ij3WjsBOBcebAvQCvgmMae43YmZm1pxKUiOOiJmSLgYeklQNzCBJsjWOBn4naV3gFZK+XYDLgdskjQQeLNj/WuBGSU8DTwJP1HrJ24BdIuKdZn8zZmbW6pXTJUElu+lDRIwGRtex7Ung80XWP09BbRf4cbr+I2BN8z7uCXx6glUzM2sTyqhluqxOGtZKUmdJLwAfRcQDWcdjZma2Nrm6DWJEvAtsm3UcZmaWneQ64vKpE+cqEZuZmYGbps3MzKyeXCM2M7PcKacasROxmZnljhOxmZlZhsrpJhbuIzYzM8uQa8RmZpYrpb6fcFM5EZuZWe6UU3NvOcVqZmaWO64Rm5lZ7pTRWC0nYjMzyx+VUS+xm6bNzMwy5BqxmZnlikdNm5mZZcyJ2MzMLEMVZZSJ3UdsZmaWIdeIzcwsZ1RWo6adiM3MLFfKbbCWm6bNzMwy5ERsZmb5omRmraY+6vVS0pmSZkp6VtIYSes0NFwnYjMzyx01w2OtryH1AL4NVEXEjkA74LCGxupEbGZm1niVQEdJlcC6wPzGHMDMzCxXKkowXCsi5km6HHgN+Ai4NyLubehxXCM2M7NcaY5m6TSNbyxpasHjpNVeR+oCDAe2AroD60k6sqHxukZsZmZW3MKIqFrD9v2AVyPiLQBJfwW+APy5IS/iRGxmZrlTovsRvwZ8XtK6JE3T+wJTG3oQJ2IzM8udUuThiJgiaRwwHVgOzACua+hxnIgBbdqDytN+lnUYrc4p6/XMOoRW63cfzM06BDNbg1JNcRkRFwAXNOUYHqxlZmaWIdeIzcwsV0R53QbRidjMzHKnjPKwm6bNzMyy5BqxmZnlTjnViJ2Izcwsd0o1aro5uGnazMwsQ64Rm5lZ7pRoZq1m4URsZma5IsqrubecYjUzM8sd14jNzCx3yqhl2onYzMzyR2XUSexEbGZmuVM+adh9xGZmZplyjdjMzHJFlFeN2InYzMzyRSqrPmI3TZuZmWXINWIzM8sd34/YzMwsQyqjTOymaTMzswy5RmxmZrkifNMHMzOz7MiJ2MzMLFO+fMnMzMzqxTViMzPLnTKqEDsRm5lZ/rhp2szMzOrFNWIzM8uVcrt8yTXiDNx97/1st0sVffv159LLf/Wp7UuXLuXQo46lb7/+7L73vsyeMyeDKEtv5PXX8Is3X+G8Z6bUuc83fvMLfvrik/z4qcfo1X/nEkaXLX9minO5FNfmy0VQITX5USpOxCVWXV3NaWedzT/Gj+O5aVMYc/s4npv1/Gr7XD/6Zrp07sxLz8zgzNNP5Qfnjcom2BJ77KZb+O0BB9e5fcchg9l0m605f5tduOWkb/PNaz/9A5NH/swU53IpzuVSfpyIS+yJqdPo26cPfbbqTYcOHThsxCFMmDhptX0mTJzE0UccDsCIg4fzwOSHiIgMoi2tl/71KB++/U6d23caPpTH/zQGgFen/IeOnTuzQbfNShVeZvyZKc7lUpzLJSE1/VEqTsQlNm/+Anr17LFyuWeP7sxbsKDOfSorK9lwgw1YtOjtksbZGnXu0Z13Xp+3cvndufPo3KN7hhGVhj8zxblcinO5ACT3I27qo1QyTcSSvi1plqRbsoyjlIqdddb+/w6K7VNGIw9aSNEyyNlZfDH+zBTncinO5VJ+sq4RnwocGBFHZBxHyfTs0Z3X566q1c2dN5/u3TZffZ/uq/ZZvnw5i5csoWvXLiWNszV6Z+48uvRadabfuWcP3p2/YA1/kQ/+zBTncinO5ZKOmq5o+qNUMkvEkn4H9AHulPQ9SX+T9LSkxyXtlO6ziaT7JE2X9HtJcyRtnG47T9Lz6fYxks6WtLWk6QWvsY2kadm8w+J2HTiAF19+mVdnz2bZsmWMHXcHw4YOWW2fYUOHMPqWpC903PgJDNp7L5+tAk/fOYnPH5X0a221+658vHgxS954M+OoWp4/M8W5XIpzuZDe9KF8mqYzu444Ik6RdADwZeACYEZEfFXSIOBPwC7p+gcj4pJ035MAJFUBhwD9Sd7DdGBaRLwsabGkXSLiSeBY4KZiry/ppJrjbdGrVwu+09VVVlZy1S8vY//hh1BdXc1xRx3JDtt/jvMvvJiqAf0ZNvRAjj96JCNPOJm+/frTtUsXxo6+oWTxZen4v9zAtvt8iU4bb8Qlrz/P3y/4Ge3aJx/Rf/3+Bp6ddA87HjiYC196imUffsToY7+VccSl4c9McS6X4lwuiXI6r1CWI+UkzQaqgPuAQyLilXT968COwEPAwRHxarr+bWBb4EigS0RckK6/ApgfEZdLOgLYDTgLeAHYLSIWrSmOqgH9Y+ojk5v/DZa5U9brmXUIrdbvPpibdQhmZa9qz32YOn1Gs6fMfh3Xib9t1fQKVt9ZL02LiKpmCGmNWsvMWsX+I6KO9XXtX+MO0po0SS15jUnYzMzyp5ya2rMerFXjYeAIAEn7AAsjYgnwCPCNdP1goGY0wSPA/0laR1InYGjNgSLiY+Ae4FrgxlK9ATMzaz18HXHDjQKqJD0NXAocna7/CTA4HYA1BFgAvBcR/wHuBJ4C/gpMBRYXHO8Wkhr1vSWJ3szMrJEybZqOiN4Fi8OL7LIY2D8ilkvaA/hyRCxNt10eEaMkrUtSo/5lwd/tCdwQEdUtEbeZmbVegpLOFd1UraWPuC5bALdJqgCWAScWbLtO0vbAOsDoiJgOIGk8sDUwqNTBmplZK1DipuWmatWJOCJeJLlEqdi2b9axvu67BpiZmbUyrToRm5mZNUY5jZp2IjYzs9wpozzsRGxmZvkiyisRt5bLl8zMzNok14jNzCxfJFRRPlViJ2IzM8sdN02bmZlZvbhGbGZmueOZtczMzDLiUdNmZmZWb64Rm5lZ7nhmLTMzs6yU2U0f3DRtZmaWIdeIzcwsd9w0bWZmlqEyysNOxGZmli/J5Uvlk4ndR2xmZpYh14jNzCxfBCqjaqYTsZmZ5YzcNG1mZmb14xqxmZnlj+9HbGZmliE3TZuZmeWfpM6Sxkl6XtIsSXs09BiuEZuZWb6opNcR/wa4OyJGSOoArNvQAzgRm5lZ/pSgj1jSBsBewDEAEbEMWNbQ47hp2szMcia9/VJTH2vXB3gLuFHSDEl/lLReQ6N1IjYzMytuY0lTCx4n1dpeCQwAro2I/sAHwDkNfRE3TVudrp07NesQWq1Pzj486xBapfaXj8k6BLOkQts8TdMLI6JqDdvnAnMjYkq6PI5GJGLXiM3MLH9K0DQdEW8Ar0vaLl21L/BcQ0N1jdjMzKzxzgBuSUdMvwIc29ADOBGbmVnuNFPT9FpFxJPAmpqv18qJ2MzM8scza5mZmVl9uEZsZmb5IvmmD2ZmZlny/YjNzMysXlwjNjOz/HHTtJmZWUZEWY2adiI2M7PcURl1vNaZiNPbO9UpIpY0fzhmZmZty5pqxDOBIKnk16hZDmCLFozLzMys8fLQNB0RvUoZiJmZWbOQSjbFZXOoVyu6pMMk/Sh93lPSwJYNy8zMrG1YayKWdBXwZWBkuupD4HctGZSZmVmTlOA2iM2lPqOmvxARAyTNAIiIt9PbPZmZmbVOOWua/kRSBckALSRtBKxo0ajMzMzaiPrUiK8G7gA2kfQT4BvAT1o0KjMzs0ZKWpbLp0a81kQcEX+SNA3YL1319Yh4tmXDMjMza4Iyapqu78xa7YBPSJqny2i+EjMzs9atPqOmzwXGAN2BnsBfJP2wpQMzMzNrnGYYMd3KRk0fCQyMiA8BJF0MTAMuacnAzMzMGitXfcTAnFr7VQKvtEw4ZmZmTSTy0Ucs6VckfcIfAjMl3ZMuDwYeKU14ZmZm+bamGnHNyOiZwF0F6x9vuXDMzMyaLhdN0xFxfSkDMTMzazZl1DRdn1HTW0saK+lpSS/UPEoRXF7dfe/9bLdLFX379efSy3/1qe1Lly7l0KOOpW+//uy+977MnjMngyhL4+5//ovP7nUA23xxMJdedd2ntj/8+H8YeMDXaL/lDoybePfK9f989HH6D/7qykfHrXfib3ffX8rQW5y2H0jlBddROeqPVAz++qd36LIJ7b57CZU//C2V516NdqhK/m7LbZN1P/wtlT+6Cu28R2kDLyF/l4pzuZSX+lwTfBNwI0n39xDgNmBsC8aUa9XV1Zx21tn8Y/w4nps2hTG3j+O5Wc+vts/1o2+mS+fOvPTMDM48/VR+cN6obIJtYdXV1Zz+458y6eY/MPOfExk74S6ee+Gl1fbZosfm3HjFJXzzqwettv7LX/w8M+79GzPu/RsP3HoT667TkcF7f7GU4bcsVdDu0FNZftX5LL/wFCqq9oZuq9+ZtN2Qw4hp/2L5JWew/PpLaXfYaQDE/Dks//l3kvVXnUe7b54BFfm7/N/fpeJcLjTPpUslbNquz7dz3Yi4ByAiXo6IH5Pcjcka4Ymp0+jbpw99tupNhw4dOGzEIUyYOGm1fSZMnMTRRxwOwIiDh/PA5IeIiAyibVlPPPk0fXtvQZ8te9GhQwcOHX4gE+59YLV9evfqyU7bb0fFGpqZxt11D0O+/CXW7dixpUMuGfXelnhrPix6A6qXs2Law1TUqtlGBKyzbrJ/x/WIxYuSDZ8shRXpdPDtO0AOPzvg71JdXC4JVajJj1KpTyJeqqTX+2VJp0j6P2DTFo4rt+bNX0Cvnj1WLvfs0Z15CxbUuU9lZSUbbrABixa9XdI4S2HegjfpufnmK5d7duvGvAVvNvg4t945icO+OrQ5Q8te543gnYUrF+OdhbDhRqvtsuKuW6jYbRCVF/+Jdqf9hOpbV92dVL23o/LH11J57jVUj7lqVWLOEX+XinO5lJ/6JOIzgU7At4EvAicCx7VkUHWR9EdJ22fx2s2l2Fln7RaQoNg+5TPwoL6KnX839H0uePN/PPP8C+y/957NE1SrUawcVi+xiqp9WPH4fSw/9yiqr76AymPOXvlhitn/ZflF32L5L75Lxf7fgMr2JYi5tPxdKs7lkspT03RETImI9yLitYgYGRHDIuLRlgpIiaJxRcQJEfFcS712KfTs0Z3X585buTx33ny6d9t89X26r9pn+fLlLF6yhK5du5Q0zlLouflmzC04U5/7xht079awxpbb/n43Xz1gP9q3z1mieXchdNl45aK6bAyLV6+xVHxhMCum/wuAePV5aN8e1ttg9eO88Tos+xh1793SEZecv0vFuVxYNaFHUx8lUmciljRe0l/rejRnEJJ6S5ol6RpgOnC9pKmSZqa3XqzZb7KkqvT5+5IulvSUpMclbSZpfUmvSmqf7rOBpNk1y63BrgMH8OLLL/Pq7NksW7aMsePuYNjQIavtM2zoEEbfMgaAceMnMGjvvfJ3tgrsunM/Xnx1Dq++Npdly5Zx64RJDPvKoAYdY+yEuzh8eM6apYGY8wLatDtstBm0q6Ri4F6seHr1S/jjnbfQdrskC916QWUHeH9x8jc1g7O6boo27UksaniTf2vn71JxLpeEpCY/SmVNE3pcVbIoEtsBx0bEqZK6RsTbktoBD0jaKSKerrX/esDjEXGupF8AJ0bERZImA0OBvwGHAXdExCe1X0zSScBJAFv06lV7c4uprKzkql9exv7DD6G6uprjjjqSHbb/HOdfeDFVA/ozbOiBHH/0SEaecDJ9+/Wna5cujB19Q8niK6XKykp+e+F5HHDE8VSvWMGxhx7CDtttw/mXXUnVzjsybPAg/vPkM3zthNN5Z/ES/n7fPxl1xVU8++BEAGa/PpfX5y9g7z12y/idtIAVK6i+9VoqT78IKipY8di9sOA1Kg46kpjzIvHMFKrv+APtjvgOGvRViKD65isA0NY70G7w16F6ebL+1mvggyUZv6Hm5+9ScS6X8qPWMFJOUm/gnxGxVbp8CkmSrAQ2B86IiLFpkj07IqZKWgqsExEh6VDgKxFxgqQvAt+PiOGSHiNJ0Gu8f3LVgP4x9ZHJLfX2yla880bWIbRayy8+M+sQWqX2l4/JOgQrI1V77sPU6TOaveo5cNPOMeUbTR830v7qu6ZFRFUzhLRG9b0fcSl8ACBpK+BsYNeIeEfSTcA6Rfb/JFadRVSTvpeIeDRt6t4baLe2JGxmZjlURk3trfEq/w1IkvJiSZuRTCLSUH8iuYfyjc0ZmJmZWXOrdyKW9JmWDKRGRDwFzCC52cQNQGNGaN8CdCFJxmZm1paIsrp8aa1N05J2A64HNgS2kLQzcEJEnNFcQUTEbGDHguVj6thvn4LnnQqejwPGFey6JzAuIt5trhjNzKyMlFHTdH36iK8EDiIZhUxEPCWp1U5xKem3JM3ZB2Ydi5mZ2drUJxFXRMScWtdUVbdQPE3WnDV1M4EbUG8AACAASURBVDMrRyqrG53UJxG/njZPR3pd7xmAb4NoZmatVxk1TdfnlOFbwFnAFsCbwOfTdWZmZtZEa60RR8T/SGaoMjMza/1qRk2XifqMmv4DRW6UExEntUhEZmZmTZWnRAzcX/B8HeBg4PWWCcfMzKypcjZYKyJuLVyWdDNwX4tFZGZm1oY0Zq7prYAtmzsQMzOzZpOnpmlJ77Cqj7gCeBs4pyWDMjMza7Q8DdZSMovHzsC8dNWKaA33TTQzM8uJNfZmp0l3fERUpw8nYTMza/3ydNMH4AlJAyJieotHY2Zm1mQ5GTUtqTIilpPcyehESS+T3CdYJJXlASWK0czMLLfWVCN+AhgAfLVEsZiZmTWPnAzWEkBEvFyiWMzMzJouR6OmN5F0Vl0bI+KKFojHzMys6XKSiNsBnUhrxmZmZtb81pSIF0TET0sWiZmZWTMQQnkYNY1rwmZmVq7KqGl6TacM+5YsCjMzszaqzhpxRLxdykDMzMyaRY5GTZuZmZWnMkrE5dObbWZmlkOuEZuZWc7kZK5pMzOzsuWmaTMzM6sP14jNzCxfPGrazMwsYyVMxJLaAVOBeRFxUEP/3onY6qQu3bIOodVqf/mYrENolaon3ZB1CK1SuwOPyzqENqbkg7W+A8wCNmjMH7uP2MzMrJEk9QSGAn9s7DFcIzYzs/xpnqbpjSVNLVi+LiKuq7XPr4HvA+s39kWciM3MLF+ab7DWwoioqvNlpIOA/0XENEn7NPZF3DRtZmbWOF8EhkmaDYwFBkn6c0MP4kRsZmY5kw7WaupjLSLihxHRMyJ6A4cBD0bEkQ2N1k3TZmaWP76O2MzMrO2IiMnA5Mb8rROxmZnlj2vEZmZmGfEUl2ZmZlkqr9sglk+kZmZmOeQasZmZ5Y+bps3MzDJURonYTdNmZmYZco3YzMzyRYDKp57pRGxmZjkjqHDTtJmZmdWDa8RmZpY/bpo2MzPLkEdNm5mZWX24RmxmZvmi8pri0onYzMzyp4yapp2Izcwsf8posFb5RGpmZpZDrhGbmVn+uGnazMwsI2U2WKt8IjUzM8shJ+IM3H3v/Wy3SxV9+/Xn0st/9antS5cu5dCjjqVvv/7svve+zJ4zJ4Mos+GyKc7lUpwGfoWKg06m4isj695n532oOOBYKvY7EjpvWsLosuPPC0mtuKmPEnEiLrHq6mpOO+ts/jF+HM9Nm8KY28fx3KznV9vn+tE306VzZ156ZgZnnn4qPzhvVDbBlpjLpjiXS91iznOseGR83Tt0643W78yKu29kxfT7qRgwqHTBZcSfl5Qqmv4oESfiEnti6jT69ulDn61606FDBw4bcQgTJk5abZ8JEydx9BGHAzDi4OE8MPkhIiKDaEvLZVOcy2UNFs6DZR/XuVndtybmzEoW3n4D2n8G1lmvRMFlw5+X8uNEXGLz5i+gV88eK5d79ujOvAUL6tynsrKSDTfYgEWL3i5pnFlw2RTncmk8dexEfPjeqhUfvQ8dO2UXUAn480I6WKsZHiXS6hKxpNmSNi6yfpikc7KIqTkVO+us3RURFNunfIbiN5bLpjiXSzPLec3Pn5eUm6abX0TcGRGXZh1HU/Xs0Z3X585buTx33ny6d9t89X26r9pn+fLlLF6yhK5du5Q0ziy4bIpzuTRefPQ+Wnf9VSs6doKPP8guoBLw56X8ZJqIJa0n6S5JT0l6VtKh6aYzJE2X9Iykz6b7HiPpqvT5TZKulPRvSa9IGlFwzP8n6T+Snpb0kwze1hrtOnAAL778Mq/Ons2yZcsYO+4Ohg0dsto+w4YOYfQtYwAYN34Cg/beK39nq0W4bIpzuTRezH8Fbfm5ZKFrN/hkWe4TsT8vqTIaNZ31hB4HAPMjYiiApA2BnwMLI2KApFOBs4ETivzt5sCewGeBO4FxkgYD2wC7AQLulLRXRDzc8m+lfiorK7nql5ex//BDqK6u5rijjmSH7T/H+RdeTNWA/gwbeiDHHz2SkSecTN9+/enapQtjR9+Qddgl4bIpzuVSN+02BG3SCz6zDhUHnkA89xhUtAMgXnka3niV6NabigOOherlrJh6b8YRtzx/XgBUVnNNK8uRcpK2Be4BbgMmRsS/JM0GvhgR8yTtDlwcEftJOgaoiojTJd0E3BcRt6THeS8i1pd0OTACeDd9iU7AJRFxfZHXPgk4CWCLXr0Gznn+mRZ9r2ZtQfWkvP2gN492Bx6XdQitUtWe+zB1+oxmr3pWbd0rplzy3SYfp/LQs6dFRFUzhLTm12npF1iTiHhB0kDgQOASSTWnq0vTf6upO8alBc9V8O8lEfH7erz2dcB1AFUD+ud79IaZmbVaWfcRdwc+jIg/A5cDA5p4yHuA4yR1So/fQ1LbmErHzMxWcR9xvfUDLpO0AvgE+BYwrrEHi4h7JX0OeCwdePA+cCTwv2aI1czMykUZ9RFn3TR9D0kttlDvgu1TgX3S5zcBN6XPj6l1nE4Fz38D/Kb5ozUzM2t+WdeIzczMmlfNzFplwonYzMzyp4yapssnUjMzsxxyjdjMzPKnjGYKcyI2M7OcKa+ZtZyIzcwsX0RZDdYqn1MGMzOzHHKN2MzM8sdN02ZmZhkqo8Fa5XPKYGZmlkOuEZuZWc4IKsqnnulEbGZm+SLcNG1mZmb14xqxmZnlj0dNm5mZZUVumjYzM7P6cY3YzMzyx6OmzczMMlJmo6adiM3MLGfK6+5L5ROpmZlZDrlGbGZm+eOmaTMzswy5adrMzMzqwzViMzPLFwkq3DRtZmaWHTdNm5mZWX24RmxmZvnjUdNmZmZZKa8JPZyIzcwsd+QasZm1Re0OPC7rEFql/+4yIOsQWqWPX5ubdQitghOxmZnliyirpunyidTMzKxe0j7ipj7W9ipSL0n/lDRL0kxJ32lMtK4Rm5mZNc5y4HsRMV3S+sA0SfdFxHMNOYgTsZmZ5U8JZtaKiAXAgvT5e5JmAT0AJ2IzM2vjStxHLKk30B+Y0tC/dSI2MzMrbmNJUwuWr4uI62rvJKkTcAfw3YhY0tAXcSI2M7N8Ec01s9bCiKha40tJ7UmS8C0R8dfGvIgTsZmZ5UxpZtZSMmvI9cCsiLiiscfx5UtmZmaN80VgJDBI0pPp48CGHsQ1YjMzy58STHEZEY+QNIQ3iROxmZnlTxnNrOVEbGZm+SKV5Dri5lI+pwxmZmY55BqxmZnlj5umzczMMlRG9yMun1MGMzOzHHKN2MzMcqY0E3o0FydiMzPLHzdNm5mZWX24RmxmZvki3DRtZmaWHUGFE7GZmVlm5D5iMzMzqw/XiM3MLH/cR2xmZpYR4cuXzMzMrH5cIzYzs5wpr5m1yifSHLn73vvZbpcq+vbrz6WX/+pT25cuXcqhRx1L33792X3vfZk9Z04GUWbDZVOcy6U4l0tx3X52KVs/9gS9J/6jzn02/fH5bHXfg/S+8y4+s/0OJYyuRKSmP0rEibjEqqurOe2ss/nH+HE8N20KY24fx3Oznl9tn+tH30yXzp156ZkZnHn6qfzgvFHZBFtiLpviXC7FuVzqtvivdzD3+GPr3L7e3vvQvndvXv3KIN4471w2+8lPSxid1eZEXGJPTJ1G3z596LNVbzp06MBhIw5hwsRJq+0zYeIkjj7icABGHDycByY/RERkEG1puWyKc7kU53Kp20dT/0P14nfr3N5p3/1YMn48AB8/9STt1t+AdptsUqrwSqOioumPUoVaslcyAObNX0Cvnj1WLvfs0Z15CxbUuU9lZSUbbrABixa9XdI4s+CyKc7lUpzLpfEqN9uM5W/MX7n8yZtvULlZtwwjambN0Sztpum6SZokqXPWcTRWsbPx2v/fQbF9ymcofmO5bIpzuRTncmmCYmXQBloKWquyScRKVETEgRFRd5tLK9ezR3denztv5fLcefPp3m3z1ffpvmqf5cuXs3jJErp27VLSOLPgsinO5VKcy6Xxlr/xBpXduq9cbr9ZN5b/780MI2oBqmj6o0Ra7JUkrSfpLklPSXpW0qGSZkvaON1eJWly+nyUpBskTZb0iqRvp+t7S5ol6RpgOtCr5hjFjp/+zUBJD0maJukeSZvXEWImdh04gBdffplXZ89m2bJljB13B8OGDlltn2FDhzD6ljEAjBs/gUF779UmzuJdNsW5XIpzuTTe+w/ezwYHHwzAOjvvQvX771H91lsZR9XMyqhpuiWvIz4AmB8RQwEkbQj8fA37fxb4MrA+8F9J16brtwOOjYhT0+PUeXxJ7YHfAsMj4q00OV8MHNes76wJKisrueqXl7H/8EOorq7muKOOZIftP8f5F15M1YD+DBt6IMcfPZKRJ5xM33796dqlC2NH35B12CXhsinO5VKcy6Vum1/xa9bdbXfadelCn4cfYdGVv4HK5Od+8dgxfDB5MuvtvQ9b3f8g8dHHLPjhDzKOuCWUzwmXWmoEoaRtgXuA24CJEfEvSbOBqohYKKkKuDwi9pE0CvgkIi5O/3YW8BWSE4V/RsRWBcedDVQBXYscf0fg38Ar6e7tgAURMbhIfCcBJwFs0avXwDnPP9PcRWBmBsB/dxmQdQit0iGvzeXZj5c2e8as2mmH+M/E25p8nIotd5wWEVXNENIatViNOCJekDQQOBC4RNK9wHJWNYevU+tPlhY8ry6I7YMGHH88MDMi9qhHfNcB1wFUDejvUQpmZrlR2qblpmrJPuLuwIcR8WfgcmAAMBsYmO5ySAsc/7/AJpL2SPdpLymHU8aYmdkauY8YgH7AZZJWAJ8A3wI6AtdL+hEwpbmPHxHLJI0Arkz7pCuBXwMzm/haZmZmLaIlm6bvIenDrW3bIvuOqrW8Y8HijrW29U6fFj1+RDwJ7NWwaM3MLF/Kp2nad18yM7N88f2IzczMrL5cIzYzs/wpnwqxE7GZmeVR+WRiN02bmZllyDViMzPLmfKa0MOJ2MzM8seJ2MzMLEvlk4jdR2xmZpYh14jNzCx/3DRtZmaWpfJJxG6aNjMzy5BrxGZmli8lvo1hUzkRm5lZ/pRRInbTtJmZWYZcIzYzsxwqnxqxE7GZmeWOyqhp2onYzMzyp4wSsfuIzczMMuQasZmZ5YxwH7GZmVmW3DRtZmZm9eEasZmZ5YsoqxqxE7GZmeVQ+SRiN02bmZllyDViMzPLHzdNm5mZZah88rCbps3MzLLkGrGZmeWMJ/QwMzPLlvuIy8u0GU8u1Hqd52QdR2pjYGHWQbRCLpfiXC7FuVzq1prKZssWOaqvIy4/EbFJ1jHUkDQ1IqqyjqO1cbkU53IpzuVSN5dN6+NEbGZmOeQasZmZWXbKqGnaly+1PtdlHUAr5XIpzuVSnMulbi6bVsaJuJWJCH9JinC5FOdyKc7lUre2UTZKasRNfdTnlaQDJP1X0kuSzmlMtE7EZmaWQ2qGx1peQWoHXA0MAbYHDpe0fUMjdSI2MzNrnN2AlyLilYhYBowFhjf0IE7ElguS/FkuIJXRSBWzllCapukewOsFy3PTdQ3iUdNW1iT1i4hnImJF1rG0MlsDL2UdRGshSRERdS1bvkyb8eQ9Wq/zxs1wqHUkTS1Yvq5WH3uxbN3gz5UTcRmRtAPwhYj4Q9axtAZpLfhiSXdGxB+zjqc1SGvCHYA7JV0UEX/JOqasFSZdSVsCbwDLgepMA2tl6jo5kVRRbie6EXFAiV5qLtCrYLknML+hB3EiLgPpj6uA/sA2GYeTqcIfi4hYIWk80DXjsFqNtGyWSvoZsBGU5w9pcypIwmcBewPvAo9JGhsR72YaXCtSUE4nAJuQ/Ob8MSL+l2lgrdt/gG0kbQXMAw4DvtnQg7hfrTy0T39I/w0cIOmrWQeUlYgISbtJ6p2uehg4RdKg7KJqHSRVFfSVzwSOltSnLSfhGpKGAgdFxHCSGkz/iHjXYwtWJ+k7JMlkCjACOD7biFq3iFgOnA7cA8wCbouImQ09jj+ErZCkHpKq0ud9gYsk7RQRrwA/BnaS1L4t/ohIWhfYi6Tp9XSSWt+ZQE15tcswvEwUDMw6C5go6UfAO8ANwMmSKjx4iw2BMZK+BSwDzkjXb5VdSK1L+nvSJyL2AwYAC4BfSFpX0meyja71iohJEbFtRGwdERc35hhumm6dDiG5Hu07wLrA28CNkv5CcreSzYENI2JhWxh0UvMeJW0HDAV+A/wT2IHkGr5qYENJV0XEhxmGWlIF//c9gdcj4puSdgG+APydpFa8skbcFj4rsHpTfMHzV4DfAh9ExD7ptjOBfpJOSS89aVNq9Z1vGBGLJXWXdD9J8/3BEVEt6Zsk/eoTs4w3z9QGvpdlo9YX4xckSfc3EfFvSf1JfnCPA/YD/gicHRFtYsBJ2vR8BrATcCVwS3oi0gUYDIwkSTzntJFkU3NyMgS4BpgMvAD8NiLel9QP2Ba4CBgXEedlF202JB0DbAa8EhG3S7qaJMHMAtoD3wZGRsSz2UWZjVq/NacA60bEFZK+AlwLnBsRt0o6GjgHGJq2yFkLcCJuhSTtC5wEbEHyQ3JkRPw73bYOSY15L+DMtlADlLQr8CeSZDuI5ATlZWBMRCxI99kdOCQivp9ZoCUmaQBJf97dJIllcLrppxGxON1ne+CYtlQuAJIOBS4gqQVfDPw/4BbgcGBf4D3g6raYhAtJOpmkH3hERLwmqSOwD8nJ7mMks0Ud3Zh+T6u/NtfH2NpJ2gK4DPh5ROxB0s93oaQ9JLWLiI8j4hZgR5ImyLbgs8ATETE1In4BPEAykORQSZum+2wLHCRpw7z2h0raUtIgSe0krU9SDjtHxIPAg8AdwAqSS7o2SP9sd2B/SetlE3VpSNpaUmX6fD9gf+D4iLiW5KT1MpKEcmNEHAl8py0mYUl710zBmH6GhgPfB5ZIOgn4Bcnlb1XAD4ADnYRbnhNxK1ArcSwEZtcsRMRF6fLtwG7pIK2epE1uJQyz5CR1T59OA7pJ+jJARPwVeJEkQfdL95lL0qe1OMdN031JmlY7RcR7JPPbfkHS8RHxCfA4cCfJNbI90795C/h6RHyQRcClIGkTklai9dNVW5HU5PaU1CVNuHsD10o6A6At9gmnNgfeT8vlPZLWlF8BN5J8n14DDiDpS1/gS5dKw4O1WoG0r+9LQL+IuEbSAuDzkl6PiLdIasV9gffTH9y5knaLiLezjLslFPR99gfuknRZRPxK0mRgsJIJGZ4iSTQzSa7ZeyAi/pld1KUREQ9I2pBkZPR1EXGzpMHAPyQREddLehR4pub62IhoCwNs3iZpgt5W0rci4meSPiLpxviipIcj4hklE+K0iTEVtdUMWouIsZL6AP9NxxdcS/J9mpmOuTgC+ApJblieYchtihNx6xHA+ZL+B/wM+B2wvaT3SX5QvpP+mCgSuUvCsPKkZAhwBMkozYskLSYZnLY/cGS67UxgU+BISZ+JiKVZxVxK6cjWa4ATJC1LB9QMJpmgol0k0++1iUkqCr4L1ZI+BnoDW0v6dkRcmY6nOBjoIOm+iJiVacAZScupZhT5KSQtJz8HRgPHRcRDaXfHCcB3gMMj4uPsIm57nIgzlvbdfRQRj0g6hGRQ0tkkyWYwSV/wORHxGKya/SaP0ib6zsD5wIURMUnS74G7gI4RcbWkW4F1gC8Bl5IMZMttEi5oIdiVpB/8kYgYk56gnZ1uvk3Snqxqmm0TCkb9bgm8ERET0pO2wyV9NyJ+nSbjfYF7s4w1SwXldBCwJ3BXRPxS0lLg5rQW/DTJgL9DI+K57KJtoyLCj4weJH0y40j6OSvSdV8i6fs9Iuv4MiyX3wOfLyiTI4BPasqE5ATyfGD7rGMtUXkcRHLJzaXAEyQnHwAHAlNJajA1+yrreEtQHnuSDFKDZBKTWcBNwPfSdV8muaTrh+ly56xjzvpB0jf8KjA2Xa75bn0L+B+wc1v47LTWhwdrlVjhwKyIeB6YA5wHfFZSZUT8C3gEuFxSL+V8pqia96dkIoFN0tXzSK4Z7pguzwJuBS6VtGtELI+In0YbOHOX9EWSy7YGk0xisjkwVNLRETEJGEXyAwvku8WkwBeAv0v6GvA54OvA9SSDs86NZLzABGBTSV2jDc4nXfg7k/YPLyC5bnofScdE2lQdyajyc0nGn7SFz06r5OuIS6igmXEPkps3zI+I+5VMSdgfuIJkJq2DSa5xzG2fVtqcqIiYLelAktreiyTJ93Dg1yTTEs4jmcBkGHAscHtEzMgm6tIo+JzsRjKY5nSSMQRXkyTkY4DvAhdExA2ZBVpiWn3GrFEkJygTIuIsJVMwbk9yUjsrIs6VtG60gevsa6v5/KTPDwP6AA9GxOPppV2XA7+KiNFZxmmruEZcQumP64EkP64DgeMl3UGShB4jubD+auCenCfh3iSXTfSUtC3JGfkpEXEISeL9C8mEJleS3OjiqyQ32x5GModyrhUk4fNJBuk9RjJq/pmIWAQ8StKn91iGYZZUrQFH3yJpBbgeOFjJPamXkpTJJcBWkjZqi0kYVusTHgn8iGRMxQRJwyPifpLm/J8qmbrSWgEP1mphkjYCNomI59PmokOBH0UyEKkDyTV8l6dn9esC60fEm4VntXmSlkEVSSKeA3wDeB14HiAiTlRya8NzIuLC9G92BK4i6QudnUXcGehMcp1wTVfFY8C5km4kOYn7bp5P1morSC41M0ENj4h56edptKQjI+I5SdOBYyPHA/jqIx28dzDJd2ampKdJJgaKiLhT0lEk3ztrBVwjbkHpiM0zgOMk7Zj+mHQk6eeD5JrGG0lHu0bEhxHxZvo8d0kYVr6v+0mmZnyGpP+3A8nsRzXuBAprMy8CX46IZ0oVZ9Yi4l7gaySfnUMj4mWS2cReAr4dyWxabYqS6ReHkLSgfJIm5fYkXRh/T79j1W09Cad2ILnW/uD08r5xJOMJrpE0NCIeCs8d3Wq4RtyCIuJjSX8jmfVnuKSFJDW70ZLmRcTdSqaZ207SxsCivCbgWpYAM0guzQqSMvmNkqn33gdOJJkbGID0h/WtDOLMVCSX4ywnaUZsHxF/JpnEpE2KiI8kTSJpfn6d5CYXc0iaqD8BPsowvFYh7fraLSJGSfoA2A04RNLtEfHX9PPUZlpSyoUHa7WQmoElSibm/ybJD8Uykktz+pHUhMeRTFJxZjoCtk1JB2w9APyQpKY3iKQv+B8RcV9em+cbStJwkuSzH/BmtJE7bhWTtjL1A16OiLfTftBjgCHRBqetrP0dUXIf84tIBmf9QtJxJJcmPQX8KZIb2Vsr40TcgiQNI2kOOpVkurhDgQ9IBiFtAHQhaa19MqsYs6ZkKsu/kNy+75qs42mtJG0SyXSnxsqb2B9LMnr88GiDN3AolF6m9XZ6OeBOJL87j6bJ+DSSO5ZdFBFLsozTinMibiGSOgE3A5fFqlsY7k7S71cN/KWt/3jUSFsNxpHMgDTbtWBbm3Rg46HA421p0FqNWpcofZmkhe2rEfGkkrtQDSS5BPCvEXGZpA0jvTWmtT4erNVyAtgYWA9WNlVPIRmgtBz3Z60UEdNJZkp61UnY6iO9NOkmJ2GdStKdM5ZkusqdIpnwZgrJlQj7pLVlJ+FWzIO1WkhEfCDpNpK7v8yNiFnpRB7DSOZRfjnjEFub97MOwMpLWz1pq3Up13Ekl3L9WdIS4HpJZ5GMmu4AHBM5vUFMnjgRt6y/AqcAv1dye7pDgdPb0mU49dVWf1TNGqOOS7k6kMzMN4Jkpr7TPK6gPLiPuIUpubvSrsBmJP2fUzIOycxyQNJJJCf6hZdy9SS5jeonEeHurzLhRGxmVoaKXMp1BHACMLStTu9ZrpyIzczKmC/lKn/uIzYzK2/rACuAb7TFUeR54BqxmVmZ8yx05c2J2MzMLEOe0MPMzCxDTsRmZmYZciI2MzPLkBOxWQNIqpb0pKRnJd2e3nygscfaR9LE9PkwSeesYd/O6bzCDX2NUZLOru/6WvvcJGlEA16rtyRfOmPWQE7EZg3zUUTsEhE7ktxf+pTCjUo0+HsVEXdGxKVr2KUzye00zSxnnIjNGu9fQN+0JjhL0jXAdKCXpMGSHpM0Pa05dwKQdICk5yU9QnJLTNL1x0i6Kn2+maTxkp5KH18ALgW2Tmvjl6X7/T9J/5H0tKSfFBzrXEn/lXQ/sN3a3oSkE9PjPCXpjlq1/P0k/UvSC5IOSvdvJ+mygtc+uakFadaWORGbNUJ6z9chJLe1hCTh/Ski+gMfAD8G9ouIAcBU4Kx0SsI/AP8HfAnoVsfhrwQeioidgQHATOAckqkMd4mI/ydpMLANsBuwCzBQ0l6SBgKHkUz6/zWSec7X5q8RsWv6erOA4wu29Qb2BoYCv0vfw/HA4ojYNT3+iZK2qsfrmFkRnlnLrGE6Snoyff4v4HqgOzAnIh5P138e2B54VBIkd8V5DPgs8GpEvAgg6c/ASUVeYxBwFEBEVAOLJXWptc/g9DEjXe5EkpjXB8bXzDUs6c56vKcdJV1E0vzdCbinYNttEbECeFHSK+l7GAzsVNB/vGH62i/U47XMrBYnYrOG+SgidilckSbbD/5/e3evmkUQhmH4fhQRhaS0sdEYAoKFvY2HEAuLoEWIjV8RzAFo7TEYLNKJByAiYuEf0SYYbIwnkCJ2WtjIm2JH+FwUY4hMc1/l7sy8yzYPM8PuTF8CnlfV0qjdZeCo/qAT4H5VPRjVWDtEjQ1gsaq2kywDV6fujceqVnu1qqYDmyTn/rGuJFyalv6Hd8CVJPMASU4nWQA+AeeTXGjtlv7Q/wUwaX2PJ5kFvjLMdn96BqxM7T2fTXIGeAVcS3IqyQzDMvjfzAC7SU4AN0b3ric51p55DthptSetPUkW2nGfkg7BGbF0xKpqr80sHyU52S7frarP7QzZJ0m+AG+AS78Z4g6wnuQW8AOYVNVmkrft86CnbZ/4IrDZZuTfgJtVtZXkMfCB4Xza1wd45HvA+9b+I78G/g7wkuE87dtV9T3JQ4a9S/96DwAAAEJJREFU460MxfeAxYO9HUlj/mtakqSOXJqWJKkjg1iSpI4MYkmSOjKIJUnqyCCWJKkjg1iSpI4MYkmSOjKIJUnqaB9Q45UOUis2KQAAAABJRU5ErkJggg==\n", 200 | "text/plain": [ 201 | "
" 202 | ] 203 | }, 204 | "metadata": { 205 | "needs_background": "light" 206 | }, 207 | "output_type": "display_data" 208 | } 209 | ], 210 | "source": [ 211 | "np.set_printoptions(precision=2)\n", 212 | "\n", 213 | "fig2 = plt.figure(figsize=(7,6))\n", 214 | "plot_confusion_matrix(conf_mat, classes=classes, normalize = True, title='Normalized Confusion matrix')\n", 215 | "fig2.savefig('../cm_norm.jpg')\n", 216 | "plt.show()" 217 | ] 218 | }, 219 | { 220 | "cell_type": "code", 221 | "execution_count": null, 222 | "metadata": {}, 223 | "outputs": [], 224 | "source": [] 225 | } 226 | ], 227 | "metadata": { 228 | "kernelspec": { 229 | "display_name": "TF_GPU", 230 | "language": "python", 231 | "name": "tf_gpu" 232 | }, 233 | "language_info": { 234 | "codemirror_mode": { 235 | "name": "ipython", 236 | "version": 3 237 | }, 238 | "file_extension": ".py", 239 | "mimetype": "text/x-python", 240 | "name": "python", 241 | "nbconvert_exporter": "python", 242 | "pygments_lexer": "ipython3", 243 | "version": "3.7.5" 244 | } 245 | }, 246 | "nbformat": 4, 247 | "nbformat_minor": 2 248 | } 249 | -------------------------------------------------------------------------------- /Notebooks/wp_prediction_and_log-loss.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import numpy as np\n", 10 | "import pandas as pd" 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": 2, 16 | "metadata": {}, 17 | "outputs": [], 18 | "source": [ 19 | "from tensorflow.keras.models import load_model" 20 | ] 21 | }, 22 | { 23 | "cell_type": "code", 24 | "execution_count": 3, 25 | "metadata": {}, 26 | "outputs": [], 27 | "source": [ 28 | "img_width, img_height = 256, 256" 29 | ] 30 | }, 31 | { 32 | "cell_type": "code", 33 | "execution_count": 4, 34 | "metadata": {}, 35 | "outputs": [], 36 | "source": [ 37 | "test_preprocessed_images = np.load('../test_preproc_resnet.npy')" 38 | ] 39 | }, 40 | { 41 | "cell_type": "code", 42 | "execution_count": 5, 43 | "metadata": {}, 44 | "outputs": [], 45 | "source": [ 46 | "#Define Path\n", 47 | "#model_path = '../CNN_best_weights_256.h5'\n", 48 | "#model_path = '../CNN_augmentation_best_weights_256.h5'\n", 49 | "#model_path = '../vgg16_best_weights_256.h5'\n", 50 | "#model_path = '../vgg16_aug_best_weights_256.h5'\n", 51 | "#model_path = '../vgg16_drop_batch_best_weights_256.h5'\n", 52 | "#model_path = '../vgg19_drop_batch_best_weights_256.h5'\n", 53 | "model_path = '../resnet101_drop_batch_best_weights_256.h5'" 54 | ] 55 | }, 56 | { 57 | "cell_type": "code", 58 | "execution_count": 6, 59 | "metadata": {}, 60 | "outputs": [ 61 | { 62 | "name": "stdout", 63 | "output_type": "stream", 64 | "text": [ 65 | "WARNING:tensorflow:From C:\\ProgramData\\Anaconda3\\envs\\tf_gpu\\lib\\site-packages\\tensorflow_core\\python\\ops\\init_ops.py:97: calling GlorotUniform.__init__ (from tensorflow.python.ops.init_ops) with dtype is deprecated and will be removed in a future version.\n", 66 | "Instructions for updating:\n", 67 | "Call initializer instance with the dtype argument instead of passing it to the constructor\n", 68 | "WARNING:tensorflow:From C:\\ProgramData\\Anaconda3\\envs\\tf_gpu\\lib\\site-packages\\tensorflow_core\\python\\ops\\init_ops.py:97: calling Zeros.__init__ (from tensorflow.python.ops.init_ops) with dtype is deprecated and will be removed in a future version.\n", 69 | "Instructions for updating:\n", 70 | "Call initializer instance with the dtype argument instead of passing it to the constructor\n", 71 | "WARNING:tensorflow:From C:\\ProgramData\\Anaconda3\\envs\\tf_gpu\\lib\\site-packages\\tensorflow_core\\python\\ops\\init_ops.py:97: calling Ones.__init__ (from tensorflow.python.ops.init_ops) with dtype is deprecated and will be removed in a future version.\n", 72 | "Instructions for updating:\n", 73 | "Call initializer instance with the dtype argument instead of passing it to the constructor\n", 74 | "WARNING:tensorflow:From C:\\ProgramData\\Anaconda3\\envs\\tf_gpu\\lib\\site-packages\\tensorflow_core\\python\\ops\\resource_variable_ops.py:1630: calling BaseResourceVariable.__init__ (from tensorflow.python.ops.resource_variable_ops) with constraint is deprecated and will be removed in a future version.\n", 75 | "Instructions for updating:\n", 76 | "If using Keras pass *_constraint arguments to layers.\n" 77 | ] 78 | } 79 | ], 80 | "source": [ 81 | "#Load the pre-trained models\n", 82 | "model = load_model(model_path)" 83 | ] 84 | }, 85 | { 86 | "cell_type": "code", 87 | "execution_count": 7, 88 | "metadata": {}, 89 | "outputs": [ 90 | { 91 | "name": "stdout", 92 | "output_type": "stream", 93 | "text": [ 94 | "30/30 [==============================] - 13s 437ms/sample\n" 95 | ] 96 | } 97 | ], 98 | "source": [ 99 | "#Prediction Function\n", 100 | "y_pred = model.predict(test_preprocessed_images, batch_size=1, verbose=1)\n", 101 | "#y_pred = np.argmax(array, axis=1)" 102 | ] 103 | }, 104 | { 105 | "cell_type": "code", 106 | "execution_count": 8, 107 | "metadata": {}, 108 | "outputs": [], 109 | "source": [ 110 | "test_df = pd.read_csv('../dataset/test.csv')\n", 111 | "y_true = test_df['labels']" 112 | ] 113 | }, 114 | { 115 | "cell_type": "code", 116 | "execution_count": 9, 117 | "metadata": {}, 118 | "outputs": [ 119 | { 120 | "name": "stdout", 121 | "output_type": "stream", 122 | "text": [ 123 | "0.08073835350865105\n" 124 | ] 125 | } 126 | ], 127 | "source": [ 128 | "from sklearn.metrics import log_loss\n", 129 | "loss = log_loss(y_true, y_pred, eps=1e-15, normalize=True, sample_weight=None, labels=None)\n", 130 | "print(loss)" 131 | ] 132 | } 133 | ], 134 | "metadata": { 135 | "kernelspec": { 136 | "display_name": "TF_GPU", 137 | "language": "python", 138 | "name": "tf_gpu" 139 | }, 140 | "language_info": { 141 | "codemirror_mode": { 142 | "name": "ipython", 143 | "version": 3 144 | }, 145 | "file_extension": ".py", 146 | "mimetype": "text/x-python", 147 | "name": "python", 148 | "nbconvert_exporter": "python", 149 | "pygments_lexer": "ipython3", 150 | "version": "3.7.5" 151 | } 152 | }, 153 | "nbformat": 4, 154 | "nbformat_minor": 2 155 | } 156 | -------------------------------------------------------------------------------- /Notebooks/wp_vgg_resnet_test_preprocess.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import numpy as np\n", 10 | "import pandas as pd" 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": 2, 16 | "metadata": {}, 17 | "outputs": [ 18 | { 19 | "name": "stderr", 20 | "output_type": "stream", 21 | "text": [ 22 | "Using TensorFlow backend.\n" 23 | ] 24 | } 25 | ], 26 | "source": [ 27 | "from keras.preprocessing.image import load_img, img_to_array" 28 | ] 29 | }, 30 | { 31 | "cell_type": "code", 32 | "execution_count": 3, 33 | "metadata": {}, 34 | "outputs": [], 35 | "source": [ 36 | "#from tensorflow.keras.applications.vgg16 import preprocess_input\n", 37 | "from tensorflow.keras.applications.resnet import preprocess_input" 38 | ] 39 | }, 40 | { 41 | "cell_type": "code", 42 | "execution_count": 4, 43 | "metadata": {}, 44 | "outputs": [], 45 | "source": [ 46 | "img_width, img_height = 256, 256" 47 | ] 48 | }, 49 | { 50 | "cell_type": "code", 51 | "execution_count": 5, 52 | "metadata": {}, 53 | "outputs": [], 54 | "source": [ 55 | "def preprocess_image(path):\n", 56 | " img = load_img(path, target_size = (img_height, img_width))\n", 57 | " a = img_to_array(img)\n", 58 | " a = np.expand_dims(a, axis = 0)\n", 59 | " a = preprocess_input(a)\n", 60 | " return a" 61 | ] 62 | }, 63 | { 64 | "cell_type": "code", 65 | "execution_count": 6, 66 | "metadata": {}, 67 | "outputs": [], 68 | "source": [ 69 | "test_images_dir = '../dataset/alien_test/'" 70 | ] 71 | }, 72 | { 73 | "cell_type": "code", 74 | "execution_count": 7, 75 | "metadata": {}, 76 | "outputs": [], 77 | "source": [ 78 | "test_df = pd.read_csv('../dataset/test.csv')\n", 79 | "\n", 80 | "test_dfToList = test_df['Image_id'].tolist()\n", 81 | "test_ids = [str(item) for item in test_dfToList]" 82 | ] 83 | }, 84 | { 85 | "cell_type": "code", 86 | "execution_count": 8, 87 | "metadata": {}, 88 | "outputs": [], 89 | "source": [ 90 | "test_images = [test_images_dir+item for item in test_ids]\n", 91 | "test_preprocessed_images = np.vstack([preprocess_image(fn) for fn in test_images])" 92 | ] 93 | }, 94 | { 95 | "cell_type": "code", 96 | "execution_count": 9, 97 | "metadata": {}, 98 | "outputs": [], 99 | "source": [ 100 | "np.save('../test_preproc_resnet.npy', test_preprocessed_images)" 101 | ] 102 | } 103 | ], 104 | "metadata": { 105 | "kernelspec": { 106 | "display_name": "TF_GPU", 107 | "language": "python", 108 | "name": "tf_gpu" 109 | }, 110 | "language_info": { 111 | "codemirror_mode": { 112 | "name": "ipython", 113 | "version": 3 114 | }, 115 | "file_extension": ".py", 116 | "mimetype": "text/x-python", 117 | "name": "python", 118 | "nbconvert_exporter": "python", 119 | "pygments_lexer": "ipython3", 120 | "version": "3.7.5" 121 | } 122 | }, 123 | "nbformat": 4, 124 | "nbformat_minor": 2 125 | } 126 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Multi class Weather Classification 2 |

Time and again unfortunate accidents due to inclement weather conditions across the globe have surfaced. Ship collision, train derailment, plane crash and car accidents are some of the tragic incidents that have been a part of the headlines in recent times. This grave problem of safety and security in adverse conditions has drawn the attention of the society and numerous studies have been done in past to expose the vulnerability of functioning of transportation services due to weather conditions. 3 | In past, weather-controlled driving speeds and behaviour have been proposed. With the advancement in technology and emergence of a new field, intelligent transportation, automated determination of weather condition has become more relevant. Present systems either rely on series of expensive sensors or human assistance to identify the weather conditions. Researchers, in recent era have looked at various economical solutions. They have channelled their research in a direction where computer vision techniques have been used to classify the weather condition using a single image. The task of assessing the weather condition from a single image is a straightforward and easy task for humans. Nevertheless, this task has a higher difficulty level for an autonomous system and designing a decent classifier of weather that receives single images as an input would represent an important achievement. 4 | 5 | 6 |

The aim of this capstone project is to build a convolutional neural network that classifies different weather conditions while working reasonably well under constraints of computation. The work described in this report translates to two contributions to the field of weather classification. The first one is exploring the use of data augmentation technique, considering different Convolutional Neural Network (CNN) architectures for the feature extraction process when classifying outdoor scenes in a multi-class setting using general-purpose images. The second contribution is the creation of a new, open source dataset, consisting of images collected online that depict scenes of five weather conditions. 7 | 8 |

Transfer Learning with ResNet and VGG neural network architecture on multi-class weather classification problem with dataset collected online containing Creative Commons license retrieved from Flickr, Unsplash and Pexels. The final model (ResNet101) on test-data yields a log-loss of 0.080738 with valid accuracy of 96.67%. 9 | 10 |

11 | 12 |
13 | 14 | The final report with model visualizations and validation plots is [here](https://github.com/vijayg15/Multi-class-Weather-Classification/blob/master/wp_project_report.pdf). 15 | 16 | 17 | # Dependencies : 18 | - Python 3.5 19 | - keras `conda install keras` 20 | - tensorflow `conda install tensorflow` 21 | 22 | Note that keras and tensorflow have their own dependencies. I recommend using [Anaconda](https://www.anaconda.com/) for handlinng the packages. 23 | 24 | # Dataset : 25 | The Weather dataset can be downloaded from [Kaggle](https://www.kaggle.com/vijaygiitk/multiclass-weather-dataset) (that I've uploaded there). 26 |

27 | 28 |

29 | 30 | # Usage : 31 | - Use wp_EDA_preprocessing Notebook to split training and validation data into respective folders. 32 |

33 | 34 |

35 | 36 | - Read the [wp_project_report](https://github.com/vijayg15/Multi-class-Weather-Classification/blob/master/wp_project_report.pdf) report for understanding the approaches taken in solving this problem 37 | - Notebooks are under the [Notebooks folder](https://github.com/vijayg15/Multi-class-Weather-Classification/tree/master/Notebooks) and scripts are under the [scripts folder](https://github.com/vijayg15/Multi-class-Weather-Classification/tree/master/scripts). 38 | - All the model diagrams and performance charts are provided. 39 | - The [model weights](https://github.com/vijayg15/Multi-class-Weather-Classification/releases) can be downloaded [here](https://github.com/vijayg15/Multi-class-Weather-Classification/releases) to predict the weather condition. 40 | -------------------------------------------------------------------------------- /figures_and_plots/Accuracy_curve_CNN.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/figures_and_plots/Accuracy_curve_CNN.jpg -------------------------------------------------------------------------------- /figures_and_plots/Accuracy_curve_CNN_augmentation.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/figures_and_plots/Accuracy_curve_CNN_augmentation.jpg -------------------------------------------------------------------------------- /figures_and_plots/Accuracy_curve_drop_batch.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/figures_and_plots/Accuracy_curve_drop_batch.jpg -------------------------------------------------------------------------------- /figures_and_plots/Architecture-of-the-Residual-Network.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/figures_and_plots/Architecture-of-the-Residual-Network.png -------------------------------------------------------------------------------- /figures_and_plots/Architecture-of-the-VGG-Convolutional-Neural-Network.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/figures_and_plots/Architecture-of-the-VGG-Convolutional-Neural-Network.png -------------------------------------------------------------------------------- /figures_and_plots/Log_loss_graph.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/figures_and_plots/Log_loss_graph.png -------------------------------------------------------------------------------- /figures_and_plots/Loss_curve_CNN.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/figures_and_plots/Loss_curve_CNN.jpg -------------------------------------------------------------------------------- /figures_and_plots/Loss_curve_CNN_augmentation.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/figures_and_plots/Loss_curve_CNN_augmentation.jpg -------------------------------------------------------------------------------- /figures_and_plots/Loss_curve_drop_batch.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/figures_and_plots/Loss_curve_drop_batch.jpg -------------------------------------------------------------------------------- /figures_and_plots/cm_norm.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/figures_and_plots/cm_norm.png -------------------------------------------------------------------------------- /figures_and_plots/cm_wo_norm.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/figures_and_plots/cm_wo_norm.png -------------------------------------------------------------------------------- /figures_and_plots/fol.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/figures_and_plots/fol.jpg -------------------------------------------------------------------------------- /figures_and_plots/fol_train_val.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/figures_and_plots/fol_train_val.jpg -------------------------------------------------------------------------------- /figures_and_plots/kde_cloudy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/figures_and_plots/kde_cloudy.png -------------------------------------------------------------------------------- /figures_and_plots/kde_foggy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/figures_and_plots/kde_foggy.png -------------------------------------------------------------------------------- /figures_and_plots/kde_rainy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/figures_and_plots/kde_rainy.png -------------------------------------------------------------------------------- /figures_and_plots/kde_shine.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/figures_and_plots/kde_shine.png -------------------------------------------------------------------------------- /figures_and_plots/kde_sunrise.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/figures_and_plots/kde_sunrise.png -------------------------------------------------------------------------------- /figures_and_plots/nimg_bar.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/figures_and_plots/nimg_bar.png -------------------------------------------------------------------------------- /figures_and_plots/nimg_dist_training.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/figures_and_plots/nimg_dist_training.png -------------------------------------------------------------------------------- /figures_and_plots/nimg_dist_validation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/figures_and_plots/nimg_dist_validation.png -------------------------------------------------------------------------------- /figures_and_plots/randm_data_aug1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/figures_and_plots/randm_data_aug1.png -------------------------------------------------------------------------------- /figures_and_plots/randm_data_aug2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/figures_and_plots/randm_data_aug2.png -------------------------------------------------------------------------------- /figures_and_plots/randm_data_aug3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/figures_and_plots/randm_data_aug3.png -------------------------------------------------------------------------------- /figures_and_plots/randm_data_aug4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/figures_and_plots/randm_data_aug4.png -------------------------------------------------------------------------------- /figures_and_plots/randm_data_aug5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/figures_and_plots/randm_data_aug5.png -------------------------------------------------------------------------------- /preprocessed_test_images/test_preproc_CNN.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/preprocessed_test_images/test_preproc_CNN.npy -------------------------------------------------------------------------------- /preprocessed_test_images/test_preproc_resnet.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/preprocessed_test_images/test_preproc_resnet.npy -------------------------------------------------------------------------------- /preprocessed_test_images/test_preproc_vgg16.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/preprocessed_test_images/test_preproc_vgg16.npy -------------------------------------------------------------------------------- /preprocessed_test_images/test_preproc_vgg19.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/preprocessed_test_images/test_preproc_vgg19.npy -------------------------------------------------------------------------------- /scripts/wp_CNN_test_preprocess.py: -------------------------------------------------------------------------------- 1 | # In[1]: 2 | import numpy as np 3 | import pandas as pd 4 | 5 | # In[2]: 6 | from keras.preprocessing.image import load_img, img_to_array 7 | 8 | # In[3]: 9 | 10 | img_width, img_height = 256, 256 11 | 12 | # In[4]: 13 | def preprocess_image(path): 14 | img = load_img(path, target_size = (img_height, img_width)) 15 | a = img_to_array(img) 16 | a = np.expand_dims(a, axis = 0) 17 | a /= 255. 18 | return a 19 | 20 | # In[5]: 21 | test_images_dir = '../dataset/alien_test/' 22 | 23 | # In[6]: 24 | test_df = pd.read_csv('../dataset/test.csv') 25 | 26 | test_dfToList = test_df['Image_id'].tolist() 27 | test_ids = [str(item) for item in test_dfToList] 28 | 29 | # In[7]: 30 | 31 | test_images = [test_images_dir+item for item in test_ids] 32 | test_preprocessed_images = np.vstack([preprocess_image(fn) for fn in test_images]) 33 | 34 | # In[8]: 35 | np.save('../test_preproc_CNN.npy', test_preprocessed_images) 36 | 37 | 38 | -------------------------------------------------------------------------------- /scripts/wp_EDA_preprocessing.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding: utf-8 3 | 4 | # In[1]: 5 | 6 | 7 | import os 8 | import random 9 | from shutil import copyfile 10 | 11 | 12 | # In[2]: 13 | 14 | 15 | import numpy as np 16 | import pandas as pd 17 | 18 | 19 | # In[3]: 20 | 21 | 22 | import matplotlib.pyplot as plt 23 | import seaborn as sns 24 | from matplotlib.image import imread 25 | import pathlib 26 | 27 | 28 | # In[4]: 29 | 30 | 31 | image_folder = ['cloudy', 'foggy', 'rainy', 'shine', 'sunrise'] 32 | nimgs = {} 33 | for i in image_folder: 34 | nimages = len(os.listdir('../dataset/'+i+'/')) 35 | nimgs[i]=nimages 36 | plt.figure(figsize=(10, 8)) 37 | plt.bar(range(len(nimgs)), list(nimgs.values()), align='center') 38 | plt.xticks(range(len(nimgs)), list(nimgs.keys())) 39 | plt.title('Distribution of different classes of Dataset') 40 | plt.show() 41 | 42 | 43 | # In[5]: 44 | 45 | 46 | image_folder = ['cloudy', 'foggy', 'rainy', 'shine', 'sunrise'] 47 | 48 | for i in image_folder: 49 | sample_images = list(pathlib.Path('../dataset/'+i+'/').rglob('*/')) 50 | np.random.seed(42) 51 | rand_imgs = np.random.choice(sample_images, size=10*10) 52 | 53 | shapes = [] 54 | for img in rand_imgs: 55 | shapes.append(imread(str(img)).shape) 56 | 57 | shapes = pd.DataFrame().assign(X=pd.Series(shapes).map(lambda s: s[0]), Y=pd.Series(shapes).map(lambda s: s[1])) 58 | 59 | plt.figure(figsize=(12, 8)) 60 | sns.set_context("notebook", font_scale=1.5) 61 | sns.kdeplot(shapes['X'], bw=75) 62 | sns.kdeplot(shapes['Y'], bw=75) 63 | plt.title('Distribution of {}_image Sizes'.format(i)) 64 | ax = plt.gca() 65 | ax.set_xlim(0, ax.get_xlim()[1]) 66 | 67 | 68 | # In[ ]: 69 | 70 | 71 | try: 72 | os.mkdir('../weather_pred/Data') 73 | os.mkdir('../weather_pred/Data/training') 74 | os.mkdir('..weather_pred/Data/validation') 75 | os.mkdir('../weather_pred/Data/training/cloudy') 76 | os.mkdir('../weather_pred/Data/training/foggy') 77 | os.mkdir('../weather_pred/Data/training/rainy') 78 | os.mkdir('../weather_pred/Data/training/shine') 79 | os.mkdir('../weather_pred/Data/training/sunrise') 80 | os.mkdir('../weather_pred/Data/validation/cloudy') 81 | os.mkdir('../weather_pred/Data/validation/foggy') 82 | os.mkdir('../weather_pred/Data/validation/rainy') 83 | os.mkdir('../weather_pred/Data/validation/shine') 84 | os.mkdir('../weather_pred/Data/validation/sunrise') 85 | except OSError: 86 | pass 87 | 88 | 89 | # In[ ]: 90 | 91 | 92 | def split_data(SOURCE, TRAINING, VALIDATION, SPLIT_SIZE): 93 | files = [] 94 | for filename in os.listdir(SOURCE): 95 | file = SOURCE + filename 96 | if os.path.getsize(file) > 0: 97 | files.append(filename) 98 | else: 99 | print(filename + " is zero length, so ignoring.") 100 | 101 | training_length = int(len(files) * SPLIT_SIZE) 102 | valid_length = int(len(files) - training_length) 103 | shuffled_set = random.sample(files, len(files)) 104 | training_set = shuffled_set[0:training_length] 105 | valid_set = shuffled_set[training_length:] 106 | 107 | for filename in training_set: 108 | this_file = SOURCE + filename 109 | destination = TRAINING + filename 110 | copyfile(this_file, destination) 111 | 112 | for filename in valid_set: 113 | this_file = SOURCE + filename 114 | destination = VALIDATION + filename 115 | copyfile(this_file, destination) 116 | 117 | 118 | # In[ ]: 119 | 120 | 121 | CLOUDY_SOURCE_DIR = '.../dataset/cloudy/' 122 | TRAINING_CLOUDY_DIR = '.../weather_pred/Data/training/cloudy/' 123 | VALID_CLOUDY_DIR = '.../weather_pred/Data/validation/cloudy/' 124 | 125 | FOGGY_SOURCE_DIR = '.../dataset/foggy/' 126 | TRAINING_FOGGY_DIR = '../weather_pred/Data/training/foggy/' 127 | VALID_FOGGY_DIR = '.../weather_pred/Data/validation/foggy/' 128 | 129 | RAINY_SOURCE_DIR = '.../dataset/rainy/' 130 | TRAINING_RAINY_DIR = '.../weather_pred/Data/training/rainy/' 131 | VALID_RAINY_DIR = '.../weather_pred/Data/validation/rainy/' 132 | 133 | SHINE_SOURCE_DIR = '.../dataset/shine/' 134 | TRAINING_SHINE_DIR = '.../weather_pred/Data/training/shine/' 135 | VALID_SHINE_DIR = '.../weather_pred/Data/validation/shine/' 136 | 137 | SUNRISE_SOURCE_DIR = '.../dataset/sunrise/' 138 | TRAINING_SUNRISE_DIR = '.../weather_pred/Data/training/sunrise/' 139 | VALID_SUNRISE_DIR = '.../weather_pred/Data/validation/sunrise/' 140 | 141 | 142 | # In[ ]: 143 | 144 | 145 | split_size = .85 146 | 147 | 148 | # In[ ]: 149 | 150 | 151 | split_data(CLOUDY_SOURCE_DIR, TRAINING_CLOUDY_DIR, VALID_CLOUDY_DIR, split_size) 152 | split_data(FOGGY_SOURCE_DIR, TRAINING_FOGGY_DIR, VALID_FOGGY_DIR, split_size) 153 | split_data(RAINY_SOURCE_DIR, TRAINING_RAINY_DIR, VALID_RAINY_DIR, split_size) 154 | split_data(SHINE_SOURCE_DIR, TRAINING_SHINE_DIR, VALID_SHINE_DIR, split_size) 155 | split_data(SUNRISE_SOURCE_DIR, TRAINING_SUNRISE_DIR, VALID_SUNRISE_DIR, split_size) 156 | 157 | 158 | # In[6]: 159 | 160 | 161 | image_folder = ['cloudy', 'foggy', 'rainy', 'shine', 'sunrise'] 162 | nimgs = {} 163 | for i in image_folder: 164 | nimages = len(os.listdir('../weather_pred/Data/training/'+i+'/')) 165 | nimgs[i]=nimages 166 | plt.figure(figsize=(9, 6)) 167 | plt.bar(range(len(nimgs)), list(nimgs.values()), align='center') 168 | plt.xticks(range(len(nimgs)), list(nimgs.keys())) 169 | plt.title('Distribution of different classes in Training Dataset') 170 | plt.show() 171 | 172 | 173 | # In[7]: 174 | 175 | 176 | for i in ['cloudy', 'foggy', 'rainy', 'shine', 'sunrise']: 177 | print('Training {} images are: '.format(i)+str(len(os.listdir('../weather_pred/Data/training/'+i+'/')))) 178 | 179 | 180 | # In[8]: 181 | 182 | 183 | image_folder = ['cloudy', 'foggy', 'rainy', 'shine', 'sunrise'] 184 | nimgs = {} 185 | for i in image_folder: 186 | nimages = len(os.listdir('../weather_pred/Data/validation/'+i+'/')) 187 | nimgs[i]=nimages 188 | plt.figure(figsize=(9, 6)) 189 | plt.bar(range(len(nimgs)), list(nimgs.values()), align='center') 190 | plt.xticks(range(len(nimgs)), list(nimgs.keys())) 191 | plt.title('Distribution of different classes in Validation Dataset') 192 | plt.show() 193 | 194 | 195 | # In[9]: 196 | 197 | 198 | for i in ['cloudy', 'foggy', 'rainy', 'shine', 'sunrise']: 199 | print('Valid {} images are: '.format(i)+str(len(os.listdir('../weather_pred/Data/validation/'+i+'/')))) 200 | 201 | 202 | # In[ ]: 203 | 204 | 205 | 206 | 207 | -------------------------------------------------------------------------------- /scripts/wp_confusion_matrix.py: -------------------------------------------------------------------------------- 1 | # In[1]: 2 | 3 | import os 4 | import numpy as np 5 | import pandas as pd 6 | import matplotlib.pyplot as plt 7 | 8 | # In[2]: 9 | 10 | from tensorflow.keras.models import load_model 11 | 12 | # In[3]: 13 | 14 | test_preprocessed_images = np.load('D../test_preproc_resnet.npy') 15 | 16 | # In[4]: 17 | model_path = '../resnet101_drop_batch_best_weights_256.h5' 18 | 19 | #Load the pre-trained models 20 | model = load_model(model_path) 21 | 22 | # In[5]: 23 | #Prediction Function 24 | array = model.predict(test_preprocessed_images, batch_size=1, verbose=1) 25 | y_pred = np.argmax(array, axis=1) 26 | 27 | # In[6]: 28 | test_df = pd.read_csv('../dataset/test.csv') 29 | y_true = test_df['labels'] 30 | 31 | # In[7]: 32 | from sklearn.metrics import confusion_matrix 33 | conf_mat = confusion_matrix(y_true, y_pred) 34 | 35 | # In[8]: 36 | train_dir = '../weather_pred/Data/training/' 37 | classes = os.listdir(train_dir) 38 | 39 | # In[9]: 40 | 41 | import itertools 42 | def plot_confusion_matrix(cm, classes, 43 | normalize=False, 44 | title='Confusion matrix', 45 | cmap=plt.cm.Reds): 46 | """ 47 | This function prints and plots the confusion matrix. 48 | Normalization can be applied by setting `normalize=True`. 49 | """ 50 | plt.imshow(cm, interpolation='nearest', cmap=cmap) 51 | plt.title(title) 52 | plt.colorbar() 53 | tick_marks = np.arange(len(classes)) 54 | plt.xticks(tick_marks, classes, rotation=45) 55 | plt.yticks(tick_marks, classes) 56 | 57 | if normalize: 58 | cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis] 59 | cm = cm.round(2) 60 | print("Normalized confusion matrix") 61 | else: 62 | print('Confusion matrix, without normalization') 63 | 64 | print(cm) 65 | 66 | thresh = cm.max() / 2. 67 | for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])): 68 | plt.text(j, i, cm[i, j], 69 | horizontalalignment="center", 70 | color="white" if cm[i, j] > thresh else "black") 71 | 72 | plt.tight_layout() 73 | plt.ylabel('True label') 74 | plt.xlabel('Predicted label') 75 | 76 | # In[10]: 77 | np.set_printoptions(precision=2) 78 | 79 | fig1 = plt.figure(figsize=(7,6)) 80 | plot_confusion_matrix(conf_mat, classes=classes, title='Confusion matrix, without normalization') 81 | fig1.savefig('../cm_wo_norm.jpg') 82 | plt.show() 83 | 84 | # In[11]: 85 | np.set_printoptions(precision=2) 86 | 87 | fig2 = plt.figure(figsize=(7,6)) 88 | plot_confusion_matrix(conf_mat, classes=classes, normalize = True, title='Normalized Confusion matrix') 89 | fig2.savefig('../cm_norm.jpg') 90 | plt.show() 91 | 92 | 93 | -------------------------------------------------------------------------------- /scripts/wp_prediction_and_log_loss.py: -------------------------------------------------------------------------------- 1 | # In[1]: 2 | import numpy as np 3 | import pandas as pd 4 | 5 | # In[2]: 6 | 7 | from tensorflow.keras.models import load_model 8 | 9 | # In[3]: 10 | img_width, img_height = 256, 256 11 | 12 | # In[4]: 13 | test_preprocessed_images = np.load('../test_preproc_CNN.npy') 14 | 15 | # In[5]: 16 | #Define Path 17 | model_path = '../CNN_best_weights_256.h5' 18 | #model_path = '../CNN_augmentation_best_weights_256.h5' 19 | #model_path = '../vgg16_best_weights_256.h5' 20 | #model_path = '../vgg16_drop_batch_best_weights_256.h5' 21 | #model_path = '../vgg19_drop_batch_best_weights_256.h5' 22 | #model_path = '../resnet101_drop_batch_best_weights_256.h5' 23 | 24 | #Load the pre-trained models 25 | model = load_model(model_path) 26 | 27 | 28 | # In[6]: 29 | #Prediction Function 30 | array = model.predict(test_preprocessed_images, batch_size=1, verbose=1) 31 | answer = np.argmax(array, axis=1) 32 | 33 | # In[7]: 34 | test_df = pd.read_csv('../dataset/test.csv') 35 | y_true = test_df['labels'] 36 | y_pred = array 37 | 38 | # In[8]: 39 | from sklearn.metrics import log_loss 40 | loss = log_loss(y_true, y_pred, eps=1e-15, normalize=True, sample_weight=None, labels=None) 41 | 42 | 43 | -------------------------------------------------------------------------------- /scripts/wp_training_CNN.py: -------------------------------------------------------------------------------- 1 | # In[1]: 2 | import os 3 | import matplotlib.pyplot as plt 4 | 5 | # In[2]: 6 | import tensorflow as tf 7 | from tensorflow.keras.optimizers import Adam 8 | from tensorflow.keras.preprocessing.image import ImageDataGenerator 9 | 10 | from tensorflow.keras.layers import Conv2D, MaxPooling2D, Flatten, Dense 11 | 12 | from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint 13 | 14 | # In[3]: 15 | 16 | 17 | # In[4]: 18 | 19 | img_width=256; img_height=256 20 | batch_size=16 21 | 22 | # In[5]: 23 | TRAINING_DIR = '../weather_pred/Data/training/' 24 | 25 | train_datagen = ImageDataGenerator(rescale = 1/255.0) 26 | 27 | # In[6]: 28 | 29 | train_generator = train_datagen.flow_from_directory(TRAINING_DIR, 30 | batch_size=batch_size, 31 | class_mode='categorical', 32 | target_size=(img_height, img_width) 33 | ) 34 | 35 | # In[7]: 36 | 37 | VALIDATION_DIR = '../weather_pred/Data/validation/' 38 | 39 | validation_datagen = ImageDataGenerator(rescale = 1/255.0) 40 | 41 | # In[8]: 42 | 43 | validation_generator = validation_datagen.flow_from_directory(VALIDATION_DIR, 44 | batch_size=batch_size, 45 | class_mode='categorical', 46 | target_size=(img_height, img_width) 47 | ) 48 | 49 | # In[9]: 50 | callbacks = EarlyStopping(monitor='val_loss', patience=5, verbose=1, mode='auto') 51 | best_model_file = '../CNN_best_weights_256.h5' 52 | best_model = ModelCheckpoint(best_model_file, monitor='val_acc', verbose = 1, save_best_only = True) 53 | 54 | # In[10]: 55 | model = tf.keras.models.Sequential([ 56 | Conv2D(16, (3, 3), activation='relu', input_shape=(img_height, img_width, 3)), 57 | MaxPooling2D(2, 2), 58 | 59 | Conv2D(32, (3, 3), activation='relu'), 60 | MaxPooling2D(2, 2), 61 | 62 | Conv2D(64, (3, 3), activation='relu'), 63 | Conv2D(64, (3, 3), activation='relu'), 64 | MaxPooling2D(2, 2), 65 | 66 | Conv2D(128, (3, 3), activation='relu'), 67 | Conv2D(128, (3, 3), activation='relu'), 68 | MaxPooling2D(2, 2), 69 | 70 | Conv2D(256, (3, 3), activation='relu'), 71 | Conv2D(256, (3, 3), activation='relu'), 72 | Conv2D(256, (3, 3), activation='relu'), 73 | MaxPooling2D(2, 2), 74 | 75 | Flatten(), 76 | Dense(512, activation='relu'), 77 | Dense(512, activation='relu'), 78 | Dense(5, activation='softmax') 79 | ]) 80 | # In[11]: 81 | model.summary() 82 | # In[12]: 83 | 84 | model.compile(optimizer='Adam', loss='categorical_crossentropy', metrics =['accuracy']) 85 | 86 | # In[13]: 87 | history = model.fit_generator(train_generator, 88 | epochs=30, 89 | verbose=1, 90 | validation_data=validation_generator, 91 | callbacks = [best_model, callbacks] 92 | ) 93 | # In[14]: 94 | target_dir = '../weather_pred/' 95 | if not os.path.exists(target_dir): 96 | os.mkdir(target_dir) 97 | model.save(target_dir + 'CNN_model_256.h5') 98 | model.save_weights(target_dir + 'CNN_weights_256.h5') 99 | 100 | # In[15]: 101 | 102 | acc=history.history['acc'] 103 | val_acc=history.history['val_acc'] 104 | loss=history.history['loss'] 105 | val_loss=history.history['val_loss'] 106 | 107 | epochs=range(len(acc)) 108 | 109 | # In[16]: 110 | 111 | fig = plt.figure(figsize=(20,10)) 112 | plt.plot(epochs, acc, 'r', label="Training Accuracy") 113 | plt.plot(epochs, val_acc, 'b', label="Validation Accuracy") 114 | plt.xlabel('Epoch') 115 | plt.ylabel('Accuracy') 116 | plt.title('Training and validation accuracy') 117 | plt.legend(loc='lower right') 118 | plt.show() 119 | fig.savefig('../Accuracy_curve_CNN_256.jpg') 120 | 121 | # In[17]: 122 | 123 | fig2 = plt.figure(figsize=(20,10)) 124 | plt.plot(epochs, loss, 'r', label="Training Loss") 125 | plt.plot(epochs, val_loss, 'b', label="Validation Loss") 126 | plt.legend(loc='upper right') 127 | plt.xlabel('Epoch') 128 | plt.ylabel('Loss') 129 | plt.title('Training and validation loss') 130 | fig2.savefig('../Loss_curve_CNN_256.jpg') 131 | 132 | -------------------------------------------------------------------------------- /scripts/wp_training_CNN_aug.py: -------------------------------------------------------------------------------- 1 | # In[1]: 2 | import os 3 | import matplotlib.pyplot as plt 4 | 5 | # In[2]: 6 | 7 | from tensorflow.keras.optimizers import Adam 8 | from tensorflow.keras.preprocessing.image import ImageDataGenerator 9 | 10 | from tensorflow.keras.layers import Conv2D, MaxPooling2D, Flatten, Dense, Dropout 11 | from tensorflow.keras.models import Sequential 12 | 13 | from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint 14 | 15 | # In[3]: 16 | 17 | # In[4]: 18 | img_width=256; img_height=256 19 | batch_size=16 20 | 21 | # In[5]: 22 | TRAINING_DIR = '../weather_pred/Data/training/' 23 | 24 | train_datagen = ImageDataGenerator(rescale = 1/255.0, 25 | rotation_range=30, 26 | zoom_range=0.4, 27 | horizontal_flip=True 28 | ) 29 | 30 | # In[6]: 31 | 32 | train_generator = train_datagen.flow_from_directory(TRAINING_DIR, 33 | batch_size=batch_size, 34 | class_mode='categorical', 35 | target_size=(img_height, img_width)) 36 | 37 | # In[7]: 38 | 39 | VALIDATION_DIR = '../weather_pred/Data/validation/' 40 | 41 | validation_datagen = ImageDataGenerator(rescale = 1/255.0) 42 | 43 | # In[8]: 44 | 45 | validation_generator = validation_datagen.flow_from_directory(VALIDATION_DIR, 46 | batch_size=batch_size, 47 | class_mode='categorical', 48 | target_size=(img_height, img_width) 49 | ) 50 | 51 | 52 | # In[9]: 53 | callbacks = EarlyStopping(monitor='val_loss', patience=5, verbose=1, mode='auto') 54 | # autosave best Model 55 | best_model_file = '../CNN_aug_drop25_best_weights_256.h5' 56 | best_model = ModelCheckpoint(best_model_file, monitor='val_acc', verbose = 1, save_best_only = True) 57 | 58 | # In[10]: 59 | model = Sequential([ 60 | Conv2D(16, (3, 3), activation='relu', input_shape=(img_height, img_width, 3)), 61 | MaxPooling2D(2, 2), 62 | 63 | Conv2D(32, (3, 3), activation='relu'), 64 | MaxPooling2D(2, 2), 65 | 66 | Conv2D(64, (3, 3), activation='relu'), 67 | Conv2D(64, (3, 3), activation='relu'), 68 | MaxPooling2D(2, 2), 69 | 70 | Conv2D(128, (3, 3), activation='relu'), 71 | Conv2D(128, (3, 3), activation='relu'), 72 | MaxPooling2D(2, 2), 73 | 74 | Conv2D(256, (3, 3), activation='relu'), 75 | Conv2D(256, (3, 3), activation='relu'), 76 | Conv2D(256, (3, 3), activation='relu'), 77 | MaxPooling2D(2, 2), 78 | 79 | Flatten(), 80 | Dense(512, activation='relu'), 81 | Dense(512, activation='relu'), 82 | Dense(5, activation='softmax') 83 | ]) 84 | # In[11]: 85 | 86 | model.compile(optimizer='Adam', loss='categorical_crossentropy', metrics =['accuracy']) 87 | 88 | # In[12]: 89 | history = model.fit_generator(train_generator, 90 | epochs=30, 91 | verbose=1, 92 | validation_data=validation_generator, 93 | callbacks = [best_model] 94 | ) 95 | # In[13]: 96 | 97 | acc=history.history['acc'] 98 | val_acc=history.history['val_acc'] 99 | loss=history.history['loss'] 100 | val_loss=history.history['val_loss'] 101 | 102 | epochs=range(len(acc)) 103 | 104 | # In[14]: 105 | 106 | fig = plt.figure(figsize=(20,10)) 107 | plt.plot(epochs, acc, 'r', label="Training Accuracy") 108 | plt.plot(epochs, val_acc, 'b', label="Validation Accuracy") 109 | plt.xlabel('Epoch') 110 | plt.ylabel('Accuracy') 111 | plt.title('Training and validation accuracy') 112 | plt.legend(loc='lower right') 113 | plt.show() 114 | fig.savefig('../Accuracy_curve_CNN_aug_256.jpg') 115 | 116 | # In[15]: 117 | 118 | fig2 = plt.figure(figsize=(20,10)) 119 | plt.plot(epochs, loss, 'r', label="Training Loss") 120 | plt.plot(epochs, val_loss, 'b', label="Validation Loss") 121 | plt.legend(loc='upper right') 122 | plt.xlabel('Epoch') 123 | plt.ylabel('Loss') 124 | plt.title('Training and validation loss') 125 | fig2.savefig('../Loss_curve_CNN_aug_256.jpg') 126 | 127 | -------------------------------------------------------------------------------- /scripts/wp_training_resnet101.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding: utf-8 3 | 4 | import os 5 | import matplotlib.pyplot as plt 6 | 7 | # In[1]: 8 | 9 | from tensorflow.keras.optimizers import Adam 10 | from tensorflow.keras.preprocessing.image import ImageDataGenerator 11 | 12 | from tensorflow.keras.layers import Dropout, Flatten, Dense, BatchNormalization 13 | 14 | from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint 15 | 16 | # In[2]: 17 | 18 | from tensorflow.keras.models import Model 19 | from tensorflow.keras.applications.resnet import ResNet101, preprocess_input 20 | 21 | # In[3]: 22 | 23 | img_width=256; img_height=256 24 | batch_size=8 25 | 26 | # In[4]: 27 | 28 | TRAINING_DIR = '.../weather_pred/Data/training/' 29 | 30 | train_datagen = ImageDataGenerator(preprocessing_function=preprocess_input, 31 | rotation_range=30, 32 | zoom_range=0.4, 33 | horizontal_flip=True 34 | ) 35 | 36 | # In[5]: 37 | 38 | train_generator = train_datagen.flow_from_directory(TRAINING_DIR, 39 | batch_size=batch_size, 40 | class_mode='categorical', 41 | target_size=(img_height, img_width)) 42 | 43 | # In[6]: 44 | 45 | VALIDATION_DIR = '.../weather_pred/Data/validation/' 46 | 47 | validation_datagen = ImageDataGenerator(preprocessing_function=preprocess_input) 48 | 49 | # In[7]: 50 | 51 | validation_generator = validation_datagen.flow_from_directory(VALIDATION_DIR, 52 | batch_size=batch_size, 53 | class_mode='categorical', 54 | target_size=(img_height, img_width) 55 | ) 56 | 57 | # In[8]: 58 | 59 | callbacks = EarlyStopping(monitor='val_loss', patience=5, verbose=1, mode='auto') 60 | # autosave best Model 61 | best_model_file = '.../resnet101_drop_batch_best_weights_256.h5' 62 | best_model = ModelCheckpoint(best_model_file, monitor='val_acc', verbose = 1, save_best_only = True) 63 | 64 | # In[9]: 65 | 66 | wp = '.../resnet101_weights_tf_dim_ordering_tf_kernels_notop.h5' 67 | resnet101_base = ResNet101(include_top=False, weights=wp, 68 | input_tensor=None, input_shape=(img_height, img_width,3)) 69 | 70 | # In[10]: 71 | 72 | print('Adding new layers...') 73 | output = resnet101_base.get_layer(index = -1).output 74 | output = Flatten()(output) 75 | # let's add a fully-connected layer 76 | output = Dense(512,activation = "relu")(output) 77 | output = BatchNormalization()(output) 78 | output = Dropout(0.2)(output) 79 | output = Dense(512,activation = "relu")(output) 80 | output = BatchNormalization()(output) 81 | output = Dropout(0.2)(output) 82 | # and a logistic layer -- let's say we have 4 classes 83 | output = Dense(5, activation='softmax')(output) 84 | print('New layers added!') 85 | 86 | # In[11]: 87 | 88 | resnet101_model = Model(resnet101_base.input, output) 89 | for layer in resnet101_model.layers[:-7]: 90 | layer.trainable = False 91 | 92 | resnet101_model.summary() 93 | 94 | # In[12]: 95 | 96 | resnet101_model.compile(optimizer='Adam', loss='categorical_crossentropy', metrics =['accuracy']) 97 | 98 | # In[13]: 99 | 100 | history = resnet101_model.fit_generator(train_generator, 101 | epochs=30, 102 | verbose=1, 103 | validation_data=validation_generator, 104 | callbacks = [callbacks, best_model] 105 | ) 106 | 107 | # In[14]: 108 | 109 | target_dir = '.../weather_pred/' 110 | if not os.path.exists(target_dir): 111 | os.mkdir(target_dir) 112 | resnet101_model.save(target_dir + 'resnet101_model.h5') 113 | resnet101_model.save_weights(target_dir + 'resnet101_weights.h5') 114 | 115 | # In[15]: 116 | 117 | acc=history.history['acc'] 118 | val_acc=history.history['val_acc'] 119 | loss=history.history['loss'] 120 | val_loss=history.history['val_loss'] 121 | 122 | epochs=range(len(acc)) 123 | 124 | # In[16]: 125 | 126 | fig = plt.figure(figsize=(20,10)) 127 | plt.plot(epochs, acc, 'r', label="Training Accuracy") 128 | plt.plot(epochs, val_acc, 'b', label="Validation Accuracy") 129 | plt.xlabel('Epoch') 130 | plt.ylabel('Accuracy') 131 | plt.title('Training and validation accuracy of ResNet101') 132 | #plt.ylim([0.7, 1]) 133 | plt.legend(loc='lower right') 134 | #plt.show() 135 | fig.savefig('.../Accuracy_curve_resnet101_drop_batch_256.jpg') 136 | 137 | # In[17]: 138 | 139 | fig2 = plt.figure(figsize=(20,10)) 140 | plt.plot(epochs, loss, 'r', label="Training Loss") 141 | plt.plot(epochs, val_loss, 'b', label="Validation Loss") 142 | plt.legend(loc='upper right') 143 | plt.xlabel('Epoch') 144 | plt.ylabel('Loss') 145 | plt.title('Training and validation loss of ResNet101') 146 | fig2.savefig('.../Loss_curve_resnet101_drop_batch_256.jpg') 147 | 148 | # In[ ]: 149 | 150 | -------------------------------------------------------------------------------- /scripts/wp_training_resnet152.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding: utf-8 3 | 4 | import os 5 | import matplotlib.pyplot as plt 6 | 7 | # In[1]: 8 | 9 | from tensorflow.keras.optimizers import Adam 10 | from tensorflow.keras.preprocessing.image import ImageDataGenerator 11 | 12 | from tensorflow.keras.layers import Dropout, Flatten, Dense, BatchNormalization 13 | 14 | #from tensorflow.keras.optimizers import SGD 15 | from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint 16 | 17 | # In[2]: 18 | 19 | from tensorflow.keras.models import Model 20 | from tensorflow.keras.applications.resnet import ResNet152, preprocess_input 21 | 22 | # In[3]: 23 | 24 | img_width=256; img_height=256 25 | batch_size=8 26 | 27 | # In[4]: 28 | 29 | TRAINING_DIR = '.../weather_pred/Data/training/' 30 | 31 | train_datagen = ImageDataGenerator(preprocessing_function=preprocess_input, 32 | rotation_range=30, 33 | zoom_range=0.4, 34 | horizontal_flip=True 35 | ) 36 | 37 | # In[5]: 38 | 39 | train_generator = train_datagen.flow_from_directory(TRAINING_DIR, 40 | batch_size=batch_size, 41 | class_mode='categorical', 42 | target_size=(img_height, img_width)) 43 | 44 | # In[6]: 45 | 46 | VALIDATION_DIR = '.../weather_pred/Data/validation/' 47 | 48 | validation_datagen = ImageDataGenerator(preprocessing_function=preprocess_input) 49 | 50 | # In[7]: 51 | 52 | validation_generator = validation_datagen.flow_from_directory(VALIDATION_DIR, 53 | batch_size=batch_size, 54 | class_mode='categorical', 55 | target_size=(img_height, img_width) 56 | ) 57 | 58 | # In[8]: 59 | 60 | callbacks = EarlyStopping(monitor='val_loss', patience=4, verbose=1, mode='auto') 61 | best_model_file = '.../resnet152_drop_batch_best_weights_256.h5' 62 | best_model = ModelCheckpoint(best_model_file, monitor='val_acc', verbose = 1, save_best_only = True) 63 | 64 | # In[9]: 65 | 66 | wp = '.../resnet152_weights_tf_dim_ordering_tf_kernels_notop.h5' 67 | resnet152_base = ResNet152(include_top=False, weights=wp, 68 | input_tensor=None, input_shape=(img_width, img_height,3)) 69 | 70 | # In[10]: 71 | 72 | print('Adding new layers...') 73 | output = resnet152_base.get_layer(index = -1).output 74 | output = Flatten()(output) 75 | # let's add a fully-connected layer 76 | output = Dense(1024,activation = "relu")(output) 77 | output = BatchNormalization()(output) 78 | output = Dropout(0.2)(output) 79 | output = Dense(1024,activation = "relu")(output) 80 | output = BatchNormalization()(output) 81 | output = Dropout(0.2)(output) 82 | output = Dense(5, activation='softmax')(output) 83 | print('New layers added!') 84 | 85 | # In[11]: 86 | 87 | resnet152_model = Model(resnet152_base.input, output) 88 | for layer in resnet152_model.layers[:-7]: 89 | layer.trainable = False 90 | 91 | resnet152_model.summary() 92 | 93 | # In[12]: 94 | 95 | resnet152_model.compile(optimizer='Adam', loss='categorical_crossentropy', metrics =['accuracy']) 96 | 97 | # In[13]: 98 | 99 | history = resnet152_model.fit_generator(train_generator, 100 | epochs=30, 101 | verbose=1, 102 | validation_data=validation_generator, 103 | callbacks = [callbacks, best_model] 104 | ) 105 | 106 | # In[14]: 107 | 108 | target_dir = '.../weather_pred/' 109 | if not os.path.exists(target_dir): 110 | os.mkdir(target_dir) 111 | resnet152_model.save(target_dir + 'resnet152_model.h5') 112 | resnet152_model.save_weights(target_dir + 'resnet152_weights.h5') 113 | 114 | # In[15]: 115 | 116 | acc=history.history['acc'] 117 | val_acc=history.history['val_acc'] 118 | loss=history.history['loss'] 119 | val_loss=history.history['val_loss'] 120 | 121 | epochs=range(len(acc)) 122 | 123 | # In[16]: 124 | 125 | fig = plt.figure(figsize=(20,10)) 126 | plt.plot(epochs, acc, 'r', label="Training Accuracy") 127 | plt.plot(epochs, val_acc, 'b', label="Validation Accuracy") 128 | plt.xlabel('Epoch') 129 | plt.ylabel('Accuracy') 130 | plt.title('Training and validation accuracy of ResNet152') 131 | #plt.ylim([0.7, 1]) 132 | plt.legend(loc='lower right') 133 | #plt.show() 134 | fig.savefig('.../Accuracy_curve_resnet152_drop_batch_256.jpg') 135 | 136 | # In[17]: 137 | 138 | fig2 = plt.figure(figsize=(20,10)) 139 | plt.plot(epochs, loss, 'r', label="Training Loss") 140 | plt.plot(epochs, val_loss, 'b', label="Validation Loss") 141 | plt.legend(loc='upper right') 142 | plt.xlabel('Epoch') 143 | plt.ylabel('Loss') 144 | plt.title('Training and validation loss of ResNet152') 145 | fig2.savefig('.../Loss_curve_resnet152_drop_batch_256.jpg') 146 | 147 | # In[ ]: 148 | 149 | 150 | -------------------------------------------------------------------------------- /scripts/wp_training_resnet50.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding: utf-8 3 | 4 | import os 5 | import matplotlib.pyplot as plt 6 | 7 | # In[1]: 8 | 9 | from tensorflow.keras.optimizers import Adam 10 | from tensorflow.keras.preprocessing.image import ImageDataGenerator 11 | 12 | from tensorflow.keras.layers import Dropout, Flatten, Dense, BatchNormalization 13 | 14 | from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint 15 | 16 | # In[2]: 17 | 18 | from tensorflow.keras.models import Model 19 | from tensorflow.keras.applications.resnet50 import ResNet50, preprocess_input 20 | 21 | # In[3]: 22 | 23 | img_width=256; img_height=256 24 | batch_size=8 25 | 26 | # In[4]: 27 | 28 | TRAINING_DIR = '.../weather_pred/Data/training/' 29 | 30 | train_datagen = ImageDataGenerator(preprocessing_function=preprocess_input, 31 | rotation_range=30, 32 | zoom_range=0.4, 33 | horizontal_flip=True 34 | ) 35 | 36 | # In[5]: 37 | 38 | train_generator = train_datagen.flow_from_directory(TRAINING_DIR, 39 | batch_size=batch_size, 40 | class_mode='categorical', 41 | target_size=(img_height, img_width) 42 | ) 43 | 44 | # In[6]: 45 | 46 | VALIDATION_DIR = '.../weather_pred/Data/validation/' 47 | 48 | validation_datagen = ImageDataGenerator(preprocessing_function=preprocess_input) 49 | 50 | # In[7]: 51 | 52 | validation_generator = validation_datagen.flow_from_directory(VALIDATION_DIR, 53 | batch_size=batch_size, 54 | class_mode='categorical', 55 | target_size=(img_height, img_width) 56 | ) 57 | 58 | # In[8]: 59 | 60 | callbacks = EarlyStopping(monitor='val_loss', patience=4, verbose=1, mode='auto') 61 | best_model_file = '.../resnet50_drop_batch_best_weights_256.h5' 62 | best_model = ModelCheckpoint(best_model_file, monitor='val_acc', verbose = 1, save_best_only = True) 63 | 64 | # In[9]: 65 | 66 | wp = '.../weather_pred/resnet50_weights_tf_dim_ordering_tf_kernels_notop.h5' 67 | resnet50_base = ResNet50(include_top=False, weights=wp, 68 | input_tensor=None, input_shape=(img_height, img_width,3)) 69 | 70 | # In[10]: 71 | 72 | print('Adding new layers...') 73 | output = resnet50_base.get_layer(index = -1).output 74 | output = Flatten()(output) 75 | output = Dense(512,activation = "relu")(output) 76 | output = BatchNormalization()(output) 77 | output = Dropout(0.2)(output) 78 | output = Dense(512,activation = "relu")(output) 79 | output = BatchNormalization()(output) 80 | output = Dropout(0.2)(output) 81 | output = Dense(5, activation='softmax')(output) 82 | print('New layers added!') 83 | 84 | # In[11]: 85 | 86 | resnet50_model = Model(resnet50_base.input, output) 87 | for layer in resnet50_model.layers[:-7]: 88 | layer.trainable = False 89 | 90 | resnet50_model.summary() 91 | 92 | # In[12]: 93 | 94 | resnet50_model.compile(optimizer='Adam', loss='categorical_crossentropy', metrics =['accuracy']) 95 | 96 | # In[13]: 97 | 98 | history = resnet50_model.fit_generator(train_generator, 99 | epochs=30, 100 | verbose=1, 101 | validation_data=validation_generator, 102 | callbacks = [callbacks, best_model] 103 | ) 104 | 105 | # In[14]: 106 | 107 | target_dir = '.../weather_pred/' 108 | if not os.path.exists(target_dir): 109 | os.mkdir(target_dir) 110 | resnet50_model.save(target_dir + 'resnet50_model.h5') 111 | resnet50_model.save_weights(target_dir + 'resnet50_weights.h5') 112 | 113 | # In[15]: 114 | 115 | acc=history.history['acc'] 116 | val_acc=history.history['val_acc'] 117 | loss=history.history['loss'] 118 | val_loss=history.history['val_loss'] 119 | 120 | epochs=range(len(acc)) 121 | 122 | # In[16]: 123 | 124 | fig = plt.figure(figsize=(20,10)) 125 | plt.plot(epochs, acc, 'r', label="Training Accuracy") 126 | plt.plot(epochs, val_acc, 'b', label="Validation Accuracy") 127 | plt.xlabel('Epoch') 128 | plt.ylabel('Accuracy') 129 | plt.title('Training and validation accuracy of ResNet50') 130 | plt.legend(loc='lower right') 131 | plt.show() 132 | fig.savefig('.../Accuracy_curve_resnet50_drop_batch_256.jpg') 133 | 134 | # In[17]: 135 | 136 | fig2 = plt.figure(figsize=(20,10)) 137 | plt.plot(epochs, loss, 'r', label="Training Loss") 138 | plt.plot(epochs, val_loss, 'b', label="Validation Loss") 139 | plt.legend(loc='upper right') 140 | plt.xlabel('Epoch') 141 | plt.ylabel('Loss') 142 | plt.title('Training and validation loss of ResNet50') 143 | fig2.savefig('.../Loss_curve_resnet50_drop_batch_256.jpg') 144 | 145 | -------------------------------------------------------------------------------- /scripts/wp_training_vgg16.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding: utf-8 3 | 4 | import os 5 | import matplotlib.pyplot as plt 6 | 7 | # In[1]: 8 | 9 | from tensorflow.keras.optimizers import Adam 10 | from tensorflow.keras.preprocessing.image import ImageDataGenerator 11 | 12 | from tensorflow.keras.layers import Dropout, Flatten, Dense, BatchNormalization 13 | 14 | from tensorflow.keras.callbacks import EarlyStopping, ReduceLROnPlateau 15 | from tensorflow.keras.callbacks import ModelCheckpoint 16 | 17 | # In[2]: 18 | 19 | from tensorflow.keras.models import Model 20 | from tensorflow.keras.applications.vgg16 import VGG16, preprocess_input 21 | 22 | # In[3]: 23 | 24 | img_width=256; img_height=256 25 | batch_size=8 26 | 27 | # In[4]: 28 | 29 | TRAINING_DIR = '../weather_pred/Data/training/' 30 | 31 | train_datagen = ImageDataGenerator(preprocessing_function=preprocess_input, 32 | rotation_range=30, 33 | zoom_range=0.4, 34 | horizontal_flip=True 35 | ) 36 | 37 | 38 | # In[5]: 39 | 40 | train_generator = train_datagen.flow_from_directory(TRAINING_DIR, 41 | batch_size=batch_size, 42 | class_mode='categorical', 43 | target_size=(img_height, img_width)) 44 | 45 | # In[6]: 46 | 47 | VALIDATION_DIR = '../weather_pred/Data/validation/' 48 | 49 | validation_datagen = ImageDataGenerator(preprocessing_function=preprocess_input) 50 | 51 | # In[7]: 52 | 53 | validation_generator = validation_datagen.flow_from_directory(VALIDATION_DIR, 54 | batch_size=batch_size, 55 | class_mode='categorical', 56 | target_size=(img_height, img_width) 57 | ) 58 | 59 | # In[8]: 60 | 61 | callbacks = EarlyStopping(monitor='val_loss', patience=4, verbose=1, mode='auto') 62 | best_model_file = '../weather_pred/vgg16_drop_batch_best_weights_256.h5' 63 | best_model = ModelCheckpoint(best_model_file, monitor='val_acc', verbose = 1, save_best_only = True) 64 | #reduce_lr = ReduceLROnPlateau(patience=5, monitor='val_acc', factor=0.1, min_lr=0.0000001, mode='auto', verbose=1) 65 | 66 | 67 | # In[9]: 68 | 69 | wp = '.../vgg16_weights_tf_dim_ordering_tf_kernels_notop.h5' 70 | vgg16_base = VGG16(include_top=False, weights=wp, 71 | input_tensor=None, input_shape=(img_height, img_width, 3)) 72 | 73 | # In[10]: 74 | 75 | print('Adding new layers...') 76 | output = vgg16_base.get_layer(index = -1).output 77 | output = Flatten()(output) 78 | # let's add a fully-connected layer 79 | output = Dense(1024,activation = "relu")(output) 80 | output = BatchNormalization()(output) 81 | output = Dropout(0.2)(output) 82 | output = Dense(1024,activation = "relu")(output) 83 | output = BatchNormalization()(output) 84 | output = Dropout(0.2)(output) 85 | output = Dense(5, activation='softmax')(output) 86 | print('New layers added!') 87 | 88 | # In[11]: 89 | 90 | vgg16_model = Model(vgg16_base.input, output) 91 | for layer in vgg16_model.layers[:-7]: 92 | layer.trainable = False 93 | vgg16_model.summary() 94 | 95 | # In[12]: 96 | 97 | vgg16_model.compile(optimizer='Adam', loss='categorical_crossentropy',metrics =['accuracy']) 98 | 99 | # In[13]: 100 | 101 | history = vgg16_model.fit_generator(train_generator, 102 | epochs=30, 103 | verbose=1, 104 | validation_data=validation_generator, 105 | callbacks = [callbacks, best_model] 106 | ) 107 | 108 | # In[14]: 109 | 110 | target_dir = '.../weather_pred/' 111 | if not os.path.exists(target_dir): 112 | os.mkdir(target_dir) 113 | vgg16_model.save(target_dir + 'vgg16_model_256.h5') 114 | vgg16_model.save_weights(target_dir + 'vgg16_weights_256.h5') 115 | 116 | # In[15]: 117 | 118 | acc=history.history['acc'] 119 | val_acc=history.history['val_acc'] 120 | loss=history.history['loss'] 121 | val_loss=history.history['val_loss'] 122 | 123 | epochs=range(len(acc)) 124 | 125 | # In[16]: 126 | 127 | fig = plt.figure(figsize=(20,10)) 128 | plt.plot(epochs, acc, 'r', label="Training Accuracy") 129 | plt.plot(epochs, val_acc, 'b', label="Validation Accuracy") 130 | plt.xlabel('Epoch') 131 | plt.ylabel('Accuracy') 132 | plt.title('Training and validation accuracy') 133 | plt.legend(loc='lower right') 134 | plt.show() 135 | fig.savefig('.../Accuracy_curve_vgg16_drop_batch_256.jpg') 136 | 137 | # In[17]: 138 | 139 | fig2 = plt.figure(figsize=(20,10)) 140 | plt.plot(epochs, loss, 'r', label="Training Loss") 141 | plt.plot(epochs, val_loss, 'b', label="Validation Loss") 142 | plt.legend(loc='upper right') 143 | plt.xlabel('Epoch') 144 | plt.ylabel('Loss') 145 | plt.title('Training and validation loss') 146 | fig2.savefig('.../Loss_curve_vgg16_drop_batch_256.jpg') 147 | 148 | 149 | 150 | -------------------------------------------------------------------------------- /scripts/wp_training_vgg19.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding: utf-8 3 | 4 | import os 5 | import matplotlib.pyplot as plt 6 | 7 | # In[1]: 8 | 9 | from tensorflow.keras.optimizers import Adam 10 | from tensorflow.keras.preprocessing.image import ImageDataGenerator 11 | 12 | from tensorflow.keras.layers import Dropout, Flatten, Dense, BatchNormalization 13 | 14 | from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint 15 | 16 | # In[2]: 17 | 18 | from tensorflow.keras.models import Model 19 | from tensorflow.keras.applications.vgg19 import VGG19, preprocess_input 20 | 21 | # In[3]: 22 | 23 | img_width=256; img_height=256 24 | batch_size=8 25 | 26 | # In[4]: 27 | 28 | TRAINING_DIR = '.../weather_pred/Data/training/' 29 | 30 | train_datagen = ImageDataGenerator(preprocessing_function=preprocess_input, 31 | rotation_range=30, 32 | zoom_range=0.4, 33 | horizontal_flip=True 34 | ) 35 | 36 | # In[5]: 37 | 38 | train_generator = train_datagen.flow_from_directory(TRAINING_DIR, 39 | batch_size=batch_size, 40 | class_mode='categorical', 41 | target_size=(img_height, img_width)) 42 | 43 | # In[6]: 44 | 45 | VALIDATION_DIR = '.../weather_pred/Data/validation/' 46 | 47 | validation_datagen = ImageDataGenerator(preprocessing_function=preprocess_input) 48 | 49 | # In[7]: 50 | 51 | validation_generator = validation_datagen.flow_from_directory(VALIDATION_DIR, 52 | batch_size=batch_size, 53 | class_mode='categorical', 54 | target_size=(img_height, img_width) 55 | ) 56 | 57 | # In[8]: 58 | 59 | callbacks = EarlyStopping(monitor='val_loss', patience=5, verbose=1, mode='auto') 60 | # autosave best Model 61 | best_model_file = '.../weather_pred/vgg19_drop_batch_best_weights_256.h5' 62 | best_model = ModelCheckpoint(best_model_file, monitor='val_acc', verbose = 1, save_best_only = True) 63 | 64 | 65 | # In[9]: 66 | 67 | wp = '.../vgg19_weights_tf_dim_ordering_tf_kernels_notop.h5' 68 | vgg19_base = VGG19(include_top=False, weights=wp, 69 | input_tensor=None, input_shape=(img_height, img_width, 3)) 70 | 71 | # In[10]: 72 | 73 | print('Adding new layers...') 74 | output = vgg19_base.get_layer(index = -1).output 75 | output = Flatten()(output) 76 | # let's add a fully-connected layer 77 | output = Dense(512,activation = "relu")(output) 78 | output = BatchNormalization()(output) 79 | output = Dropout(0.2)(output) 80 | output = Dense(512,activation = "relu")(output) 81 | output = BatchNormalization()(output) 82 | output = Dropout(0.2)(output) 83 | # and a logistic layer -- let's say we have 4 classes 84 | output = Dense(5, activation='softmax')(output) 85 | print('New layers added!') 86 | 87 | # In[11]: 88 | 89 | vgg19_model = Model(vgg19_base.input, output) 90 | for layer in vgg19_model.layers[:-7]: 91 | layer.trainable = False 92 | 93 | vgg19_model.summary() 94 | 95 | # In[12]: 96 | 97 | vgg19_model.compile(optimizer='Adam', loss='categorical_crossentropy', metrics =['accuracy']) 98 | 99 | # In[13]: 100 | 101 | history = vgg19_model.fit_generator(train_generator, 102 | epochs=30, 103 | verbose=1, 104 | validation_data=validation_generator, 105 | callbacks = [best_model] 106 | ) 107 | 108 | # In[14]: 109 | 110 | target_dir = '.../weather_pred/' 111 | if not os.path.exists(target_dir): 112 | os.mkdir(target_dir) 113 | vgg19_model.save(target_dir + 'vgg19_model.h5') 114 | vgg19_model.save_weights(target_dir + 'vgg19_weights.h5') 115 | 116 | # In[15]: 117 | 118 | acc=history.history['acc'] 119 | val_acc=history.history['val_acc'] 120 | loss=history.history['loss'] 121 | val_loss=history.history['val_loss'] 122 | 123 | epochs=range(len(acc)) 124 | 125 | # In[16]: 126 | 127 | fig = plt.figure(figsize=(20,10)) 128 | plt.plot(epochs, acc, 'r', label="Training Accuracy") 129 | plt.plot(epochs, val_acc, 'b', label="Validation Accuracy") 130 | plt.xlabel('Epoch') 131 | plt.ylabel('Accuracy') 132 | plt.title('Training and validation accuracy of VGG19') 133 | plt.legend(loc='lower right') 134 | plt.show() 135 | fig.savefig('.../Accuracy_curve_vgg19_drop_batch_256.jpg') 136 | 137 | # In[17]: 138 | 139 | fig2 = plt.figure(figsize=(20,10)) 140 | plt.plot(epochs, loss, 'r', label="Training Loss") 141 | plt.plot(epochs, val_loss, 'b', label="Validation Loss") 142 | plt.legend(loc='upper right') 143 | plt.xlabel('Epoch') 144 | plt.ylabel('Loss') 145 | plt.title('Training and validation loss of VGG19') 146 | fig2.savefig('.../Loss_curve_vgg19_drop_batch_256.jpg') 147 | 148 | -------------------------------------------------------------------------------- /scripts/wp_vgg_resnet_test_preprocess.py: -------------------------------------------------------------------------------- 1 | # In[1]: 2 | import numpy as np 3 | import pandas as pd 4 | 5 | # In[2]: 6 | from keras.preprocessing.image import load_img, img_to_array 7 | 8 | from tensorflow.keras.applications.vgg16 import preprocess_input 9 | #from tensorflow.keras.applications.resnet import preprocess_input 10 | 11 | 12 | # In[3]: 13 | 14 | img_width, img_height = 256, 256 15 | 16 | # In[4]: 17 | def preprocess_image(path): 18 | img = load_img(path, target_size = (img_height, img_width)) 19 | a = img_to_array(img) 20 | a = np.expand_dims(a, axis = 0) 21 | a = preprocess_input(a) 22 | return a 23 | 24 | # In[5]: 25 | test_images_dir = '../dataset/alien_test/' 26 | 27 | # In[6]: 28 | test_df = pd.read_csv('../dataset/test.csv') 29 | 30 | test_dfToList = test_df['Image_id'].tolist() 31 | test_ids = [str(item) for item in test_dfToList] 32 | 33 | # In[7]: 34 | 35 | test_images = [test_images_dir+item for item in test_ids] 36 | test_preprocessed_images = np.vstack([preprocess_image(fn) for fn in test_images]) 37 | 38 | # In[8]: 39 | np.save('../test_preproc_vgg.npy', test_preprocessed_images) 40 | 41 | 42 | -------------------------------------------------------------------------------- /wp_project_report.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/vijayg15/Keras-MultiClass-Image-Classification/b31bbb79d77a95a632f27c20b1a00fb1df22725f/wp_project_report.pdf --------------------------------------------------------------------------------