├── Code.ipynb
├── README.md
├── d1-dataset-mri-and-d2-dataset-ct-scan.ipynb
├── drifa-net.ipynb
└── uncertainty-quantification-of-drifa-net.ipynb
/Code.ipynb:
--------------------------------------------------------------------------------
1 | {"metadata":{"kernelspec":{"language":"python","display_name":"Python 3","name":"python3"},"language_info":{"name":"python","version":"3.10.14","mimetype":"text/x-python","codemirror_mode":{"name":"ipython","version":3},"pygments_lexer":"ipython3","nbconvert_exporter":"python","file_extension":".py"},"kaggle":{"accelerator":"gpu","dataSources":[{"sourceId":1079953,"sourceType":"datasetVersion","datasetId":601280},{"sourceId":7957702,"sourceType":"datasetVersion","datasetId":4680825},{"sourceId":8031909,"sourceType":"datasetVersion","datasetId":4734294}],"dockerImageVersionId":30787,"isInternetEnabled":true,"language":"python","sourceType":"notebook","isGpuEnabled":true}},"nbformat_minor":4,"nbformat":4,"cells":[{"cell_type":"code","source":"import cv2\nimport seaborn as sns\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport pandas as pd\nimport plotly.express as px\nsns.set_style('whitegrid')\nfrom sklearn.metrics import confusion_matrix , classification_report\nimport tensorflow as tf\nfrom tensorflow import keras\nfrom tensorflow.keras.models import Sequential\nfrom tensorflow.keras.layers import Dense , Flatten , Conv2D , MaxPooling2D , Dropout , Activation , BatchNormalization\nfrom tensorflow.keras.preprocessing.image import ImageDataGenerator\nfrom tensorflow.keras.optimizers import Adam , Adamax\nfrom tensorflow.keras import regularizers\n\n#Warnings\nimport warnings\nwarnings.filterwarnings('ignore')","metadata":{"_uuid":"8f2839f25d086af736a60e9eeb907d3b93b6e0e5","_cell_guid":"b1076dfc-b9ad-4769-8c92-a6c4dae69d19","trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"import tensorflow as tf\ntf.keras.mixed_precision.set_global_policy('mixed_float16')\n\nimport tensorflow as tf\nimport numpy as np\nimport numpy as np\nimport tensorflow as tf\nfrom tensorflow.keras.layers import Input, Conv2D, MaxPooling2D, Flatten, Dense, Dropout, GlobalAveragePooling2D, BatchNormalization, ReLU, Add\nfrom tensorflow.keras.models import Model\nfrom tensorflow.keras.losses import KLDivergence\nfrom tensorflow.keras.callbacks import ModelCheckpoint, EarlyStopping\nfrom tensorflow.keras.utils import to_categorical\nfrom tensorflow.keras.datasets import cifar10\nfrom tensorflow.keras.applications import EfficientNetB0\nfrom tensorflow.keras.applications import DenseNet121, ResNet50V2\nfrom tensorflow.keras.layers import GlobalAveragePooling2D\nimport copy\nimport numpy as np\nimport tensorflow as tf\nfrom tensorflow.keras.layers import Input, Conv2D, MaxPooling2D, Flatten, Dense, Dropout, GlobalAveragePooling2D, BatchNormalization, ReLU, Add\nfrom tensorflow.keras.models import Model\nfrom tensorflow.keras.losses import KLDivergence\nfrom tensorflow.keras.callbacks import ModelCheckpoint, EarlyStopping\nfrom tensorflow.keras.utils import to_categorical\nfrom tensorflow.keras.datasets import cifar10\nfrom tensorflow.keras.applications import EfficientNetB0\nfrom tensorflow.keras.applications import DenseNet169, MobileNetV2, ResNet50, EfficientNetB0\nfrom tensorflow.keras.layers import GlobalAveragePooling2D\nimport copy\n\nimport tensorflow as tf\nfrom tensorflow.keras import layers\nimport tensorflow as tf\nfrom tensorflow.keras import layers\n","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"X_train_h = np.load('X_train_HAM10000_ISIC_2018.npy')\ny_train_h = np.load('y_train_HAM10000_ISIC_2018.npy')\nX_test_h = np.load('X_test_HAM10000_ISIC_2018.npy')\ny_test_h = np.load('y_test_HAM10000_ISIC_2018.npy')\n\n\nX_train_h.shape, y_train_h.shape, X_test_h.shape, y_test_h.shape\n","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"random_indices = np.random.choice(2003, 810, replace=False)\n\nX_test_h1 = X_test_h[random_indices]\ny_test_h1 = y_test_h[random_indices]\n\nX_test_h1.shape, y_test_h1.shape, X_test_h.shape, y_test_h.shape","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"#X_train_s.shape,X_test_s.shape, y_train_s.shape,y_test_s.shape","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"X_train_h.shape, y_train_h.shape, X_test_h.shape, y_test_h.shape","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"X_train_s = np.load('/kaggle/input/spikamed-ds/data_cervical_cancer_sipkamed.npy')\ny_train_s = np.load('/kaggle/input/spikamed-ds/labels_cervical_cancer_sipkamed.npy')\n\nX_train_s.shape, y_train_s.shape","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"from sklearn.model_selection import train_test_split\n\nX_train_s, X_test_s, y_train_s, y_test_s = train_test_split(X_train_s, y_train_s, test_size=0.2, random_state=42)\n\nX_train_s.shape,X_test_s.shape, y_train_s.shape,y_test_s.shape","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"import numpy as np\nimport cv2\n\ndef rotate_image(image, angle):\n \"\"\"\n Rotate the image by the specified angle.\n \"\"\"\n center = tuple(np.array(image.shape[1::-1]) / 2)\n rotation_matrix = cv2.getRotationMatrix2D(center, angle, 1.0)\n rotated_image = cv2.warpAffine(image, rotation_matrix, image.shape[1::-1], flags=cv2.INTER_LINEAR)\n return rotated_image\n\ndef translate_image(image, tx, ty):\n \"\"\"\n Translate the image by the specified translation parameters.\n \"\"\"\n translation_matrix = np.float32([[1, 0, tx], [0, 1, ty]])\n translated_image = cv2.warpAffine(image, translation_matrix, image.shape[1::-1])\n return translated_image\n\n# Example data\n#X_train = np.random.rand(100, 28, 28) # Assuming 100 images of size 28x28\n#y_train = np.random.randint(0, 10, 100) # Assuming 100 labels\n\n# Augmentation parameters\nrotation_angles = [20]\ntranslations = [(5, 5)]\n\naugmented_X_train = []\naugmented_y_train = []\n\nfor image, label in zip(X_train_s, y_train_s):\n # Original image\n #augmented_X_train.append(image)\n #augmented_y_train.append(label)\n\n # Augment with rotations\n for angle in rotation_angles:\n rotated_image = rotate_image(image, angle)\n augmented_X_train.append(rotated_image)\n augmented_y_train.append(label)\n\n # Augment with translations\n for tx, ty in translations:\n translated_image = translate_image(image, tx, ty)\n augmented_X_train.append(translated_image)\n augmented_y_train.append(label)\n\n# Convert lists to numpy arrays\naugmented_X_train = np.array(augmented_X_train)\naugmented_y_train = np.array(augmented_y_train)\n\n# Shuffle the data\nshuffle_indices = np.random.permutation(len(augmented_X_train))\naugmented_X_train = augmented_X_train[shuffle_indices]\naugmented_y_train = augmented_y_train[shuffle_indices]\naugmented_X_train.shape, augmented_y_train.shape\n# Now, augmented_X_train and augmented_y_train contain the augmented dataset.","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"random_indices = np.random.choice(6478, 4773, replace=False)\n\naugmented_X_train = augmented_X_train[random_indices]\naugmented_y_train = augmented_y_train[random_indices]\n\naugmented_X_train.shape, augmented_y_train.shape\n","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"X_train_s = np.concatenate((X_train_s, augmented_X_train), axis=0)\ny_train_s = np.concatenate((y_train_s, augmented_y_train), axis=0)\nX_train_s.shape, y_train_s.shape","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"'''X_train_s = np.concatenate((X_train_s, X_train_s, X_train_s), axis=0)\ny_train_s = np.concatenate((y_train_s, y_train_s, y_train_s), axis=0)\nX_train_s.shape, y_train_s.shape'''","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"X_test_s1 = np.concatenate((X_test_s, X_test_s, X_test_s), axis=0)\ny_test_s1 = np.concatenate((y_test_s, y_test_s, y_test_s), axis=0)\nX_test_s1.shape, y_test_s1.shape","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"X_train_s.shape, y_train_s.shape","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"augmented_X_train.shape, augmented_y_train.shape","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"random_indices = np.random.choice(2430, 2003, replace=False)\n\nX_test_s1 = X_test_s1[random_indices]\ny_test_s1 = y_test_s1[random_indices]\n\nX_test_s1.shape, y_test_s1.shape, X_test_s.shape, y_test_s.shape","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"#X_train.shape, y_train.shape, X_test.shape, y_test.shape, \nX_train_s.shape,X_test_s.shape, y_train_s.shape,y_test_s.shape, X_test_s1.shape, y_test_s1.shape","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"print(X_train_h.shape, y_train_h.shape, X_test_h.shape, y_test_h.shape,\n#X_train.shape, y_train.shape, X_test.shape, y_test.shape,\nX_train_s.shape,X_test_s.shape, X_test_s1.shape, y_train_s.shape,y_test_s.shape, y_test_s1.shape)","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"markdown","source":"**Multi-branch fusion attention (MFA) module**","metadata":{}},{"cell_type":"code","source":"#### Multi-branch fusion attention (MFA) module #####\n\nclass DeeperGlobalLocalAttentionLayer1(layers.Layer):\n def __init__(self, units, activation='sigmoid', dropout_rate=0.2, use_scale=True, axis=-1, **kwargs):\n super(DeeperGlobalLocalAttentionLayer1, self).__init__(**kwargs)\n self.units = units\n self.activation = activation\n self.dropout_rate = dropout_rate\n self.use_scale = use_scale\n self.axis = axis\n\n def build(self, input_shape):\n _, _, _, channels = input_shape\n self.global_conv1 = layers.Conv2D(filters=self.units, kernel_size=(1, 1), activation=self.activation)\n self.global_avg_pooling1 = layers.GlobalAveragePooling2D()\n \n self.global_conv2 = layers.Conv2D(filters=self.units, kernel_size=(1, 1), activation=self.activation)\n self.global_avg_pooling2 = layers.GlobalMaxPooling2D()\n \n self.global_conv3 = layers.Conv2D(filters=self.units, kernel_size=(1, 1), activation=self.activation)\n self.global_avg_pooling3 = layers.GlobalAveragePooling2D()\n \n self.global_conv4 = layers.Conv2D(filters=self.units, kernel_size=(1, 1), activation=self.activation)\n self.global_avg_pooling4 = layers.GlobalMaxPooling2D()\n \n self.concat1 = layers.Add()\n self.concat2 = layers.Add()\n self.concat3 = layers.Add()\n self.concat4 = layers.Add()\n self.concat5 = layers.Concatenate(axis=-1)\n \n self.global_attention = layers.Dense(units=self.units, activation=self.activation)\n \n self.local_conv1 = layers.Conv2D(filters=self.units, kernel_size=(1, 1), activation=self.activation)\n self.local_conv2 = layers.Conv2D(filters=self.units, kernel_size=(1, 1), activation=self.activation)\n self.concat6 = layers.Add()\n \n if self.use_scale:\n self.global_scale = self.add_weight(shape=(1, 1, 1, 1), initializer='ones', trainable=True, name='global_scale')\n self.local_scale = self.add_weight(shape=(1, 1, 1, self.units), initializer='ones', trainable=True, name='local_scale')\n \n super(DeeperGlobalLocalAttentionLayer1, self).build(input_shape)\n\n def call(self, inputs, training=None):\n ##### Hierarchical Information Fusion Attention(HIFA) ######\n \n global_attention1 = self.global_conv1(inputs)\n global_avg1 = self.global_avg_pooling1(global_attention1)\n \n global_attention2 = self.global_conv2(global_attention1)\n global_avg2 = self.global_avg_pooling2(global_attention2)\n \n global_concat1 = self.concat1([global_avg1, global_avg2])\n global_attention_concat1 = self.concat2([global_attention1, global_attention2])\n \n global_attention3 = self.global_conv3(global_attention_concat1)\n global_avg3 = self.global_avg_pooling3(global_attention3)\n \n global_attention4 = self.global_conv4(global_attention3)\n global_avg4 = self.global_avg_pooling4(global_attention4)\n \n global_concat2 = self.concat3([global_avg3, global_avg4])\n global_attention_concat2 = self.concat4([global_attention3, global_attention4])\n \n global_avg_concat = self.concat5([global_concat1, global_concat2])\n \n global_attention = self.global_attention(global_avg_concat)\n global_attention = tf.expand_dims(tf.expand_dims(global_attention, 1), 1)\n\n ##### Channel-wise Local Information Attention (CLIA) ######\n \n local_attention1 = self.local_conv1(inputs)\n local_attention1 = tf.reduce_mean(local_attention1, axis=[1, 2], keepdims=True) # Reduce spatial dimensions\n local_attention2 = self.local_conv2(local_attention1)\n local_attention2 = tf.reduce_mean(local_attention2, axis=[1, 2], keepdims=True) # Reduce spatial dimensions\n \n local_attention = self.concat6([local_attention1, local_attention2])\n \n # Scale Global and Local Attention\n if self.use_scale:\n global_attention *= self.global_scale\n local_attention *= self.local_scale\n\n # Combine Global and Local Attention\n attention = tf.sigmoid(global_attention + local_attention)\n return attention\n\n def get_config(self):\n config = super(DeeperGlobalLocalAttentionLayer1, self).get_config()\n config.update({'units': self.units, 'activation': self.activation, 'dropout_rate': self.dropout_rate,\n 'use_scale': self.use_scale})\n return config\n\nclass DeeperAttentionLayer1(layers.Layer):\n def __init__(self, units=64, use_scale=True, **kwargs):\n super(DeeperAttentionLayer1, self).__init__(**kwargs)\n self.units = units\n self.use_scale = use_scale\n\n def build(self, input_shape):\n _, H, W, C = input_shape\n self.alpha = self.add_weight(shape=(1, 1, 1, C), initializer='ones', trainable=True, name='alpha')\n self.deeper_global_local_attention = DeeperGlobalLocalAttentionLayer1(units=self.units, activation='sigmoid', \n dropout_rate=0.2, # You can adjust the dropout rate\n use_scale=self.use_scale)\n super(DeeperAttentionLayer1, self).build(input_shape)\n\n def call(self, inputs, training=None):\n attention = self.deeper_global_local_attention(inputs, training=training)\n attention_feature = inputs * attention * self.alpha\n return attention_feature\n\n def get_config(self):\n config = super(DeeperAttentionLayer1, self).get_config()\n config.update({'units': self.units, 'use_scale': self.use_scale})\n return config\n","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"markdown","source":"**Multimodal information fusion attention (MIFA)**","metadata":{}},{"cell_type":"code","source":"########## Multimodal information fusion attention (MIFA) ###############\n\n\n\nclass GlobalMinPooling2D(layers.Layer):\n def __init__(self, **kwargs):\n super(GlobalMinPooling2D, self).__init__(**kwargs)\n\n def call(self, inputs):\n return tf.reduce_min(inputs, axis=[1, 2])\n\n def compute_output_shape(self, input_shape):\n return (input_shape[0], input_shape[-1])\n\n def get_config(self):\n config = super(GlobalMinPooling2D, self).get_config()\n return config\n\n\nclass DeeperGlobalLocalAttentionLayer(layers.Layer):\n def __init__(self, units, activation='sigmoid', dropout_rate=0.2, use_scale=True, axis=-1, **kwargs):\n super(DeeperGlobalLocalAttentionLayer, self).__init__(**kwargs)\n self.units = units\n self.activation = activation\n self.dropout_rate = dropout_rate\n self.use_scale = use_scale\n self.axis = axis\n\n def build(self, input_shapes):\n input_shape1, input_shape2 = input_shapes\n _, _, _, channels1 = input_shape1\n _, _, _, channels2 = input_shape2\n \n self.global_min_pooling1 = GlobalMinPooling2D()\n self.global_avg_pooling1 = layers.GlobalAveragePooling2D()\n self.global_max_pooling1 = layers.GlobalMaxPooling2D()\n \n self.global_attention = layers.Dense(units=self.units, activation=self.activation)\n \n self.global_min_pooling2 = GlobalMinPooling2D()\n self.global_avg_pooling2 = layers.GlobalAveragePooling2D()\n self.global_max_pooling2 = layers.GlobalMaxPooling2D()\n \n #self.global_attention2 = layers.Dense(units=self.units, activation=self.activation)\n \n \n self.concat = layers.Add()\n #self.global_attention3 = layers.Dense(units=self.units, activation=self.activation)\n \n self.local_conv1 = layers.Conv2D(filters=self.units, kernel_size=(1, 1), activation=self.activation)\n self.local_conv2 = layers.Conv2D(filters=self.units, kernel_size=(1, 1), activation=self.activation)\n \n \n \n self.concat2 = layers.Add()\n #self.local_conv5 = layers.Conv2D(filters=self.units, kernel_size=(1, 1), activation=self.activation)\n \n if self.use_scale:\n self.global_scale = self.add_weight(shape=(1, 1, 1, 1), initializer='ones', trainable=True, name='global_scale')\n self.local_scale = self.add_weight(shape=(1, 1, 1, self.units), initializer='ones', trainable=True, name='local_scale')\n \n super(DeeperGlobalLocalAttentionLayer, self).build(input_shapes)\n\n def call(self, inputs, training=None):\n inputs1, inputs2 = inputs\n\n ######### Multimodal Global Information Fusion Attention (MGIFA) #########\n global_min1 = self.global_min_pooling1(inputs1)\n global_avg1 = self.global_avg_pooling1(inputs1)\n global_max1 = self.global_max_pooling1(inputs1)\n\n global_min2 = self.global_min_pooling2(inputs2)\n global_avg2 = self.global_avg_pooling2(inputs2)\n global_max2 = self.global_max_pooling2(inputs2)\n\n concat_min = self.concat([global_min1, global_min2])\n concat_avg = self.concat([global_avg1, global_avg2])\n concat_max = self.concat([global_max1, global_max2])\n \n concat_min = self.global_attention(concat_min)\n concat_avg = self.global_attention(concat_avg)\n concat_max = self.global_attention(concat_max)\n \n concat_global_attention = self.concat([concat_min, concat_avg, concat_max])\n \n #global_attention = self.global_attention3(concat_global_attention)\n \n global_attention = tf.expand_dims(tf.expand_dims(concat_global_attention, 1), 1)\n\n ######### Multimodal Local Information Fusion Attention (MLIFA) #########\n \n local_conv1 = self.local_conv1(inputs1)\n local_min1 = tf.reduce_min(local_conv1, axis=[1, 2], keepdims=True) # Reduce spatial dimensions\n local_avg1 = tf.reduce_mean(local_conv1, axis=[1, 2], keepdims=True) # Reduce spatial dimensions\n local_max1 = tf.reduce_max(local_conv1, axis=[1, 2], keepdims=True) # Reduce spatial dimensions\n \n local_conv2 = self.local_conv2(inputs2)\n local_min2 = tf.reduce_min(local_conv2, axis=[1, 2], keepdims=True) # Reduce spatial dimensions\n local_avg2 = tf.reduce_mean(local_conv2, axis=[1, 2], keepdims=True) # Reduce spatial dimensions\n local_max2 = tf.reduce_max(local_conv2, axis=[1, 2], keepdims=True) # Reduce spatial dimensions\n \n local_concat_min = self.concat2([local_min1, local_min2])\n local_concat_avg = self.concat2([local_avg1, local_avg2])\n local_concat_max = self.concat2([local_max1, local_max2])\n\n local_attention = self.concat2([local_concat_min, local_concat_avg, local_concat_max])\n \n \n # Scale Global and Local Attention\n if self.use_scale:\n global_attention *= self.global_scale\n local_attention *= self.local_scale\n\n # Combine Global and Local Attention\n attention = tf.sigmoid(global_attention + local_attention)\n return attention\n\n def get_config(self):\n config = super(DeeperGlobalLocalAttentionLayer, self).get_config()\n config.update({'units': self.units, 'activation': self.activation, 'dropout_rate': self.dropout_rate,\n 'use_scale': self.use_scale})\n return config\n\nclass DeeperAttentionLayer(layers.Layer):\n def __init__(self, units=64, use_scale=True,axis=-1, **kwargs):\n super(DeeperAttentionLayer, self).__init__(**kwargs)\n self.units = units\n self.use_scale = use_scale\n self.axis = axis \n\n def build(self, input_shapes):\n input_shape1, input_shape2 = input_shapes\n _, H, W, C1 = input_shape1\n _, H, W, C2 = input_shape2\n \n self.alpha1 = self.add_weight(shape=(1, 1, 1, C1), initializer='ones', trainable=True, name='alpha1')\n self.alpha2 = self.add_weight(shape=(1, 1, 1, C2), initializer='ones', trainable=True, name='alpha2')\n \n self.deeper_global_local_attention = DeeperGlobalLocalAttentionLayer(units=self.units, activation='sigmoid', \n dropout_rate=0.2, # You can adjust the dropout rate\n use_scale=self.use_scale)\n #self.concat3 = layers.Add()\n #self.concat4 = layers.Add()\n \n super(DeeperAttentionLayer, self).build(input_shapes)\n\n def call(self, inputs, training=None):\n inputs1, inputs2 = inputs\n attention = self.deeper_global_local_attention([inputs1, inputs2], training=training)\n \n #inputs_concat = self.concat3([inputs1, inputs2])\n #alpha_concat = self.concat4([self.alpha1, self.alpha2])\n \n attention_feature1 = inputs1 * attention * self.alpha1\n attention_feature2 = inputs2 * attention * self.alpha2\n \n return attention_feature1, attention_feature2\n\n def get_config(self):\n config = super(DeeperAttentionLayer, self).get_config()\n config.update({'units': self.units, 'use_scale': self.use_scale})\n return config\n","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"### RRA block ########\n\ndef RGSA(x, filters, strides=(1, 1), use_projection=False):\n shortcut = x\n\n # Define the first convolutional layer of the block\n \n x = Conv2D(filters=filters, kernel_size=(3, 3), strides=strides, padding='same', \n #activation = 'relu'\n\n )(x)\n x = DeeperAttentionLayer1(units=filters, use_scale=True)(x)\n x = BatchNormalization()(x)\n x = tf.keras.layers.Activation('relu')(x)\n\n # Define the second convolutional layer of the block\n \n x = Conv2D(filters=filters, kernel_size=(3, 3), padding='same')(x)\n x = DeeperAttentionLayer1(units=filters, use_scale=True)(x)\n \n x = BatchNormalization()(x)\n\n # If the stride is not (1, 1), the dimensions need to be adjusted\n if strides != (1, 1) or use_projection:\n \n shortcut = Conv2D(filters=filters, kernel_size=(1, 1), strides=strides, padding='same')(shortcut)\n shortcut = BatchNormalization()(shortcut)\n\n # Add the shortcut (identity connection)\n \n x = tf.keras.layers.add([x, shortcut])\n \n x = tf.keras.layers.Activation('relu')(x)\n return x\n","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"def residual_GLC_branch1(inputs1, inputs2):\n \n x1 = Conv2D(filters=64, kernel_size=(7, 7), strides=(2, 2), padding='same')(inputs1)\n x1 = DeeperAttentionLayer1(units=64, use_scale=True)(x1) ## MFA ####\n x1 = BatchNormalization()(x1)\n x1 = tf.keras.layers.Activation('relu')(x1)\n x1 = MaxPooling2D(pool_size=(3, 3), strides=(2, 2), padding='same')(x1)\n \n x2 = Conv2D(filters=64, kernel_size=(7, 7), strides=(2, 2), padding='same')(inputs2)\n x2 = DeeperAttentionLayer1(units=64, use_scale=True)(x2) ## MFA ####\n x2 = BatchNormalization()(x2)\n x2 = tf.keras.layers.Activation('relu')(x2)\n x2 = MaxPooling2D(pool_size=(3, 3), strides=(2, 2), padding='same')(x2)\n \n\n x1 = RGSA(x1, filters=64)\n x1 = tf.keras.layers.Dropout(0.25)(x1, training = True) ## MCD ####\n x1 = DeeperAttentionLayer1(units=64, use_scale=True)(x1) ## MFA ####\n\n x2 = RGSA(x2, filters=64)\n x2 = tf.keras.layers.Dropout(0.25)(x2, training = True) ## MCD ####\n x2 = DeeperAttentionLayer1(units=64, use_scale=True)(x2)\n \n x1, x2 = DeeperAttentionLayer(units=64, use_scale=True)([x1, x2]) ## MIFA ####\n \n x1 = RGSA(x1, filters=64)\n x1 = tf.keras.layers.Dropout(0.25)(x1, training = True) ## MCD ####\n x1 = DeeperAttentionLayer1(units=64, use_scale=True)(x1) ## MFA ####\n \n x2 = RGSA(x2, filters=64)\n x2 = tf.keras.layers.Dropout(0.25)(x2, training = True) ## MCD ####\n x2 = DeeperAttentionLayer1(units=64, use_scale=True)(x2)\n\n x1, x2 = DeeperAttentionLayer(units=64, use_scale=True)([x1, x2]) ## MIFA ####\n \n x1 = RGSA(x1, filters=128, strides=(2, 2), use_projection=True)\n x1 = tf.keras.layers.Dropout(0.25)(x1, training = True) ## MCD ####\n x1 = DeeperAttentionLayer1(units=128, use_scale=True)(x1) ## MFA ####\n\n x2 = RGSA(x2, filters=128, strides=(2, 2), use_projection=True)\n x2 = tf.keras.layers.Dropout(0.25)(x2, training = True) ## MCD ####\n x2 = DeeperAttentionLayer1(units=128, use_scale=True)(x2)\n\n x1, x2 = DeeperAttentionLayer(units=128, use_scale=True)([x1, x2]) ## MIFA ####\n \n x1 = RGSA(x1, filters=128)\n x1 = tf.keras.layers.Dropout(0.25)(x1, training = True) ## MCD ####\n x1 = DeeperAttentionLayer1(units=128, use_scale=True)(x1)\n \n x2 = RGSA(x2, filters=128)\n x2 = tf.keras.layers.Dropout(0.25)(x2, training = True) ## MCD ####\n x2 = DeeperAttentionLayer1(units=128, use_scale=True)(x2)\n\n x1, x2 = DeeperAttentionLayer(units=128, use_scale=True)([x1, x2]) ## MIFA ####\n \n x1 = RGSA(x1, filters=256, strides=(2, 2), use_projection=True)\n x1 = tf.keras.layers.Dropout(0.25)(x1, training = True) ## MCD ####\n x1 = DeeperAttentionLayer1(units=256, use_scale=True)(x1)\n \n x2 = RGSA(x2, filters=256, strides=(2, 2), use_projection=True)\n x2 = tf.keras.layers.Dropout(0.25)(x2, training = True) ## MCD ####\n x2 = DeeperAttentionLayer1(units=256, use_scale=True)(x2)\n\n x1, x2 = DeeperAttentionLayer(units=256, use_scale=True)([x1, x2]) ## MIFA ####\n \n \n x1 = RGSA(x1, filters=256)\n x1 = tf.keras.layers.Dropout(0.25)(x1, training = True) ## MCD ####\n x1 = DeeperAttentionLayer1(units=256, use_scale=True)(x1)\n \n x2 = RGSA(x2, filters=256)\n x2 = tf.keras.layers.Dropout(0.25)(x2, training = True) ## MCD ####\n x2 = DeeperAttentionLayer1(units=256, use_scale=True)(x2)\n \n x1, x2 = DeeperAttentionLayer(units=256, use_scale=True)([x1, x2]) ## MIFA ####\n\n x1 = RGSA(x1, filters=512, strides=(2, 2), use_projection=True)\n x1 = DeeperAttentionLayer1(units=512, use_scale=True)(x1)\n \n x2 = RGSA(x2, filters=512, strides=(2, 2), use_projection=True)\n x2 = DeeperAttentionLayer1(units=512, use_scale=True)(x2)\n\n x1, x2 = DeeperAttentionLayer(units=512, use_scale=True)([x1, x2]) ## MIFA ####\n \n x1 = RGSA(x1, filters=512)\n x2 = RGSA(x2, filters=512)\n x1, x2 = DeeperAttentionLayer(units=512, use_scale=True)([x1, x2])\n \n return x1, x2","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"#def build_resnet18(input_shape=(128, 128, 3), num_classes=2):\ninput_shape=(128, 128, 3)\ninputs1 = Input(shape=input_shape)\ninputs2 = Input(shape=input_shape)\n\n\n\n#input_data = Input(shape=input_shape, name='input_data')\n# Initial convolutional layer\n\nx1, x2 = residual_GLC_branch1(inputs1, inputs2)\n#print('x:',x.shape)\n\ncon = tf.keras.layers.Concatenate(axis=-1)([x1, x2])\n\ncon = tf.keras.layers.Dropout(0.25)(con, training = True) ## MCD ####\n\nx = GlobalAveragePooling2D()(con)\nprint('GlobalAveragePooling2D x:',x.shape)\n\noutputs1 = Dense(5, activation='softmax')(x)\noutputs2 = Dense(7, activation='softmax')(x)\n\n# Create the model\nmodel = Model([inputs1, inputs2], [outputs1, outputs2])\n#return model\nprint(model.summary())","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"from tensorflow.keras.optimizers import Adam\nfrom tensorflow.keras.optimizers.schedules import ExponentialDecay\n\ninitial_gamma = 0.5\n\noptimizer = Adam(learning_rate=0.001)\n# Compile the model with the custom optimizer\nmodel.compile(optimizer=optimizer,\n loss=['categorical_crossentropy', 'categorical_crossentropy'],\n loss_weights=[initial_gamma, (1 - initial_gamma)],\n metrics=['accuracy', 'accuracy'])\n\n\nfrom tensorflow.keras.callbacks import ModelCheckpoint, EarlyStopping\ndef checkpoint_callback():\n\n checkpoint_filepath = 'best1_model_cer_skin_lung.keras'\n\n model_checkpoint_callback= ModelCheckpoint(filepath=checkpoint_filepath,\n save_weights_only=False,\n #frequency='epoch',\n monitor='val_loss',\n save_best_only=True,\n mode='min',\n verbose=0)\n\n return model_checkpoint_callback\n\ndef early_stopping(patience):\n es_callback = tf.keras.callbacks.EarlyStopping(monitor='val_loss', patience=patience, verbose=1)\n return es_callback\n\n\n\nfrom tensorflow.keras.callbacks import ReduceLROnPlateau\n\nreduce_lr = ReduceLROnPlateau(monitor='val_loss', factor=0.2,\n patience=5, min_lr=0.00001)\n\ncheckpoint_callback = checkpoint_callback()\n\nearly_stopping = early_stopping(patience=100)\ncallbacks = [checkpoint_callback, early_stopping, reduce_lr]\n \n\n# Fit the model with callbacks\nhistory = model.fit([X_train_s, X_train_h], [y_train_s, y_train_h],\n epochs=200,\n validation_split=0.2, verbose=1,\n shuffle=True,\n callbacks=callbacks) # UpdateGammaCallback\n","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"model.evaluate([X_test_s, X_test_h1], [y_test_s, y_test_h1])","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"\nmodel.evaluate([X_test_s1, X_test_h], [y_test_s1, y_test_h])","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"y_pred = model.predict([X_test_s1, X_test_h]) \n\ny_pred_binary1 = y_pred[0] >= 0.5\ny_pred_binary_pgd_test1 = np.array(y_pred_binary1, dtype='int32')\n\nprint('y_pred_binary_pgd_test1:', y_pred_binary_pgd_test1.shape)\n\ny_pred_binary2 = y_pred[1] >= 0.5\ny_pred_binary_pgd_test2 = np.array(y_pred_binary2, dtype='int32')\n\nprint('y_pred_binary_pgd_test2:', y_pred_binary_pgd_test2.shape)\n\n#y_test_s, y_test_h\n# Calculate evaluation metrics for the current epsilon\ny_test_categorical1 = y_test_s1\ny_test_categorical2 = y_test_h\n\n## Task 1:\nprint('skin cancer classification:')\naccuracy = accuracy_score(y_pred_binary_pgd_test1, y_test_categorical1) * 100\nprecision = precision_score(y_pred_binary_pgd_test1, y_test_categorical1, average='macro') * 100\nrecall = recall_score(y_pred_binary_pgd_test1, y_test_categorical1, average='macro') * 100\nf1 = f1_score(y_pred_binary_pgd_test1, y_test_categorical1, average='macro') * 100\n#auc = roc_auc_score(y_pred, y_train_categorical, multi_class='ovr') * 100\nprint('accuracy:', accuracy)\nprint('precision:', precision)\nprint('recall:', recall)\nprint('f1:', f1)\n\n## Task 2:\nprint('Cervical cancer classification:')\naccuracy = accuracy_score(y_pred_binary_pgd_test2, y_test_categorical2) * 100\nprecision = precision_score(y_pred_binary_pgd_test2, y_test_categorical2, average='macro') * 100\nrecall = recall_score(y_pred_binary_pgd_test2, y_test_categorical2, average='macro') * 100\nf1 = f1_score(y_pred_binary_pgd_test2, y_test_categorical2, average='macro') * 100\nprint('accuracy:', accuracy)\nprint('precision:', precision)\nprint('recall:', recall)\nprint('f1:', f1)","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"history = model.fit([X_train_s, X_train_h], [y_train_s, y_train_h],\n epochs=100,\n validation_split=0.2, verbose=1,\n shuffle=True,\n callbacks=callbacks)","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"model.evaluate([X_test_s, X_test_h1], [y_test_s, y_test_h1])","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"from tensorflow.keras.models import load_model\n\nmodel1 = load_model('/kaggle/working/best1_model_cer_skin_lung.keras', custom_objects={'DeeperAttentionLayer1': DeeperAttentionLayer1,\n 'DeeperAttentionLayer': DeeperAttentionLayer\n })\nmodel1.evaluate([X_test_s, X_test_h1], [y_test_s, y_test_h1])","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"model.evaluate([X_test_s1, X_test_h], [y_test_s1, y_test_h])","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"y_pred = model.predict([X_test_s1, X_test_h]) \n\ny_pred_binary1 = y_pred[0] >= 0.5\ny_pred_binary_pgd_test1 = np.array(y_pred_binary1, dtype='int32')\n\nprint('y_pred_binary_pgd_test1:', y_pred_binary_pgd_test1.shape)\n\ny_pred_binary2 = y_pred[1] >= 0.5\ny_pred_binary_pgd_test2 = np.array(y_pred_binary2, dtype='int32')\n\nprint('y_pred_binary_pgd_test2:', y_pred_binary_pgd_test2.shape)\n\n#y_test_s, y_test_h\n# Calculate evaluation metrics for the current epsilon\ny_test_categorical1 = y_test_s1\ny_test_categorical2 = y_test_h\n\n## Task 1:\nprint('skin cancer classification:')\naccuracy = accuracy_score(y_pred_binary_pgd_test1, y_test_categorical1) * 100\nprecision = precision_score(y_pred_binary_pgd_test1, y_test_categorical1, average='macro') * 100\nrecall = recall_score(y_pred_binary_pgd_test1, y_test_categorical1, average='macro') * 100\nf1 = f1_score(y_pred_binary_pgd_test1, y_test_categorical1, average='macro') * 100\n#auc = roc_auc_score(y_pred, y_train_categorical, multi_class='ovr') * 100\nprint('accuracy:', accuracy)\nprint('precision:', precision)\nprint('recall:', recall)\nprint('f1:', f1)\n\n## Task 2:\nprint('Cervical cancer classification:')\naccuracy = accuracy_score(y_pred_binary_pgd_test2, y_test_categorical2) * 100\nprecision = precision_score(y_pred_binary_pgd_test2, y_test_categorical2, average='macro') * 100\nrecall = recall_score(y_pred_binary_pgd_test2, y_test_categorical2, average='macro') * 100\nf1 = f1_score(y_pred_binary_pgd_test2, y_test_categorical2, average='macro') * 100\nprint('accuracy:', accuracy)\nprint('precision:', precision)\nprint('recall:', recall)\nprint('f1:', f1)","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"y_pred = model.predict([X_test_s, X_test_h1]) \n\ny_pred_binary1 = y_pred[0] >= 0.5\ny_pred_binary_pgd_test1 = np.array(y_pred_binary1, dtype='int32')\n\nprint('y_pred_binary_pgd_test1:', y_pred_binary_pgd_test1.shape)\n\ny_pred_binary2 = y_pred[1] >= 0.5\ny_pred_binary_pgd_test2 = np.array(y_pred_binary2, dtype='int32')\n\nprint('y_pred_binary_pgd_test2:', y_pred_binary_pgd_test2.shape)\n\n#y_test_s, y_test_h\n# Calculate evaluation metrics for the current epsilon\ny_test_categorical1 = y_test_s\ny_test_categorical2 = y_test_h1\n\n## Task 1:\nprint('skin cancer classification:')\naccuracy = accuracy_score(y_pred_binary_pgd_test1, y_test_categorical1) * 100\nprecision = precision_score(y_pred_binary_pgd_test1, y_test_categorical1, average='macro') * 100\nrecall = recall_score(y_pred_binary_pgd_test1, y_test_categorical1, average='macro') * 100\nf1 = f1_score(y_pred_binary_pgd_test1, y_test_categorical1, average='macro') * 100\n#auc = roc_auc_score(y_pred, y_train_categorical, multi_class='ovr') * 100\nprint('accuracy:', accuracy)\nprint('precision:', precision)\nprint('recall:', recall)\nprint('f1:', f1)\n\n## Task 2:\nprint('Cervical cancer classification:')\naccuracy = accuracy_score(y_pred_binary_pgd_test2, y_test_categorical2) * 100\nprecision = precision_score(y_pred_binary_pgd_test2, y_test_categorical2, average='macro') * 100\nrecall = recall_score(y_pred_binary_pgd_test2, y_test_categorical2, average='macro') * 100\nf1 = f1_score(y_pred_binary_pgd_test2, y_test_categorical2, average='macro') * 100\nprint('accuracy:', accuracy)\nprint('precision:', precision)\nprint('recall:', recall)\nprint('f1:', f1)","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"model.save('best_model_ever.keras')","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"","metadata":{"trusted":true},"outputs":[],"execution_count":null}]}
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Welcome to My GitHub Profile 👋
2 |
3 | ## Multimodal Fusion Learning with Dual Attention for Medical Imaging
4 |
5 | Multimodal fusion learning has shown significant promise in classifying various diseases such as skin cancer and brain tumors. However, existing methods face three key limitations:
6 |
7 | 1. **Lack of Generalizability**: Existing methods often fail to generalize across diagnosis tasks due to their focus on a specific disease.
8 | 2. **Limited Use of Diverse Modalities**: They do not fully leverage multiple health records from diverse modalities to learn robust complementary information.
9 | 3. **Single Attention Mechanism**: Relying on a single attention mechanism misses the benefits of combining multiple attention strategies within and across various modalities.
10 |
11 | ### Our Proposed Approach: **DRIFA**
12 |
13 | To address these challenges, we propose:
14 | **A Dual Robust Information Fusion Attention Mechanism** (**DRIFA**)
15 |
16 | ### Key Features of DRIFA:
17 |
18 | - **Multi-Branch Fusion Attention Module**: Enhances representations for each modality, such as dermoscopy, pap smear, MRI, and CT scans.
19 | - **Multimodal Information Fusion Attention Module**: Learns refined multimodal shared representations, improving the network's generalization across multiple tasks.
20 |
21 | DRIFA can be integrated with any deep neural network, forming a multimodal fusion learning framework known as **DRIFA-Net**.
22 |
23 | ### Performance Highlights:
24 |
25 | - **Uncertainty Estimation**: Using an ensemble Monte Carlo dropout strategy, DRIFA-Net provides reliable predictions with uncertainty estimates.
26 | - **State-of-the-Art Results**: Extensive experiments on five publicly available datasets demonstrate consistent performance improvements over existing methods.
27 |
28 | ### Technologies and Applications:
29 | - **Applications**: Disease classification (e.g., skin cancer, brain tumors).
30 | - **Modalities**: Dermoscopy, pap smear, MRI, and CT scans.
31 |
32 |
33 | 
34 | Figure 1. Detailed architecture of DRIFA-Net. Key components include: (A) the target-specific multimodal fusion learning (TMFL)
35 | phase, followed by (B) an uncertainty quantification (UQ) phase. TMFL phase comprises a robust residual attention (RRA) block, shown
36 | in (C), and utilizes multi-branch fusion attention (MFA), an additional MFA module for further refinement of local representations, a
37 | multimodal information fusion attention (MIFA) module for improved multimodal representation learning, and multitask learning (MTL)
38 | for handling multiple classification tasks. During (UQ) phase, the reliability of DRIFA-Net predictions are assessed.
39 |
40 |
41 | 
42 |
43 | Figure 2. (a) Multi-branch fusion attention (MFA) module.Key components include hierarchical information fusion attention (HIFA) for diverse
44 | local information enhancement and channelwise local information attention (CLIA) for improved channelspecific representation learning.
45 |
46 |
47 | 
48 |
49 | Figure 3. (a) Multimodal information fusion attention (MIFA) module. This module includes multimodal global information fusion attention (MGIFA) (shown in b) and multimodal local information fusion attention (MLIFA) (shown in c).
50 |
51 |
52 | 
53 |
54 | Figure 4. Visual representation of the important regions highlighted by our proposed DRIFA-Net and four SOTA methods using the
55 | GRAD-CAM technique on two benchmark datasets D1 and D3. (a) and (g) display the original images, while (b) and (h) present results for
56 | Gloria, (c) and (i) for MTF with MA, (d) and (j) for CAF, (e) and (k) for MTTU-Net, and (f) and (l) for our proposed DRIFA-Net.
57 |
58 |
59 | 
60 |
61 | Figure 5. T-SNE visualization of different models applied to the dermoscopy images of the D1 dataset, where (a) represents the T-SNE visualization of Gloria, (b) of MTTU-Net, and (c) of our proposed DRIFA-Net.
62 |
63 |
64 | ### Citation:
65 |
66 | If you find this work useful, please cite:
67 | ```bibtex
68 | @inproceedings{dhar2025multimodal,
69 | title={Multimodal Fusion Learning with Dual Attention for Medical Imaging},
70 | author={Dhar, Joy and Zaidi, N. and Haghighat, M. and Goyal, P. and Roy, S. and Alavi, A. and Kumar, V.},
71 | booktitle={IEEE/CVF Winter Conference on Applications of Computer Vision (WACV)},
72 | year={2025},
73 | url={https://arxiv.org/abs/2412.01248}
74 | }
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
--------------------------------------------------------------------------------
/d1-dataset-mri-and-d2-dataset-ct-scan.ipynb:
--------------------------------------------------------------------------------
1 | {"metadata":{"kernelspec":{"name":"python3","display_name":"Python 3","language":"python"},"language_info":{"name":"python","version":"3.10.14","mimetype":"text/x-python","codemirror_mode":{"name":"ipython","version":3},"pygments_lexer":"ipython3","nbconvert_exporter":"python","file_extension":".py"},"kaggle":{"accelerator":"gpu","dataSources":[{"sourceId":1183191,"sourceType":"datasetVersion","datasetId":672399},{"sourceId":2645886,"sourceType":"datasetVersion","datasetId":1608934}],"dockerImageVersionId":30787,"isInternetEnabled":true,"language":"python","sourceType":"notebook","isGpuEnabled":true}},"nbformat_minor":4,"nbformat":4,"cells":[{"cell_type":"markdown","source":"# 1. Import needed libraries","metadata":{}},{"cell_type":"code","source":"import os\nfrom PIL import Image\nimport numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport seaborn as sns\nfrom glob import glob\n#---------------------------------------\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.metrics import classification_report, confusion_matrix\n#---------------------------------------\nimport tensorflow as tf\nfrom tensorflow.keras.models import Sequential\nfrom tensorflow.keras.layers import Dense, Dropout, Flatten\nfrom tensorflow.keras.optimizers import Adamax\nfrom tensorflow.keras.metrics import Precision, Recall\nfrom tensorflow.keras.preprocessing.image import ImageDataGenerator\n#---------------------------------------\nimport warnings\nwarnings.filterwarnings(\"ignore\")\n\nimport os\nfrom PIL import Image\nimport numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport seaborn as sns\nfrom glob import glob\n#---------------------------------------\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.metrics import classification_report, confusion_matrix\n#---------------------------------------\nimport tensorflow as tf\nfrom tensorflow.keras.models import Sequential\nfrom tensorflow.keras.layers import Dense, Dropout, Flatten\nfrom tensorflow.keras.optimizers import Adamax\nfrom tensorflow.keras.metrics import Precision, Recall\nfrom tensorflow.keras.preprocessing.image import ImageDataGenerator\n#---------------------------------------\nimport warnings\nwarnings.filterwarnings(\"ignore\")\n\nimg_rows, img_cols = 128, 128\ninput_shape = (img_rows, img_cols, 3)\n\n#n_classes = df['category'].nunique()\nn_classes = 4\nprint('Total number of unique categories:', n_classes)\n\nfrom os import listdir, makedirs\nfrom os.path import isfile, join, basename, splitext, isfile, exists\n\nimport numpy as np\nimport pandas as pd\n\nfrom tqdm import tqdm_notebook\n\nimport tensorflow as tf\nimport keras.backend as K\n\nimport keras\nfrom keras.models import Sequential, Model\nfrom keras.layers import Dropout, Dense, Flatten, BatchNormalization\nfrom keras.layers import DepthwiseConv2D, SeparableConvolution2D, Convolution2D, Conv2D,GRU, LSTM, AlphaDropout, Embedding, ZeroPadding2D,AveragePooling2D, MaxPooling2D, GlobalAveragePooling2D, GlobalMaxPooling2D, Dropout\nfrom keras.layers import Concatenate, Average, Maximum, Bidirectional, TimeDistributed\nfrom keras.callbacks import Callback, EarlyStopping, ModelCheckpoint\n#from keras.engine.input_layer import Input\nfrom keras.models import load_model\n#from keras.initializers import LecunNormal\n\nimport matplotlib.pyplot as plt\nimport seaborn as sns\n\n#pd.set_option('precision', 30)\nnp.set_printoptions(precision = 30)\n\n\n#tf.set_random_seed(1090)\n\nimport pandas as pd\nimport numpy as np\n\nimport seaborn as sns\nimport matplotlib.pyplot as plt\nimport matplotlib.image as img\n\nimport cv2\nimport itertools\nimport pathlib\nimport warnings\nfrom PIL import Image\nfrom random import randint\nwarnings.filterwarnings('ignore')\n\nfrom imblearn.over_sampling import SMOTE\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.metrics import matthews_corrcoef as MCC\nfrom sklearn.metrics import balanced_accuracy_score as BAS\nfrom sklearn.metrics import classification_report, confusion_matrix\n\n\nfrom tensorflow import keras\nfrom keras import layers\nimport tensorflow as tf\n#import tensorflow_addons as tfa\nfrom tensorflow.keras.preprocessing import image_dataset_from_directory\n##from keras.utils.vis_utils import plot_model\nfrom tensorflow.keras import Sequential, Input\nfrom tensorflow.keras.layers import Dense, Dropout\nfrom tensorflow.keras.layers import Conv2D, Flatten\nfrom tensorflow.keras.callbacks import ReduceLROnPlateau\nfrom tensorflow.keras.applications.inception_v3 import InceptionV3\nfrom tensorflow.keras.preprocessing.image import ImageDataGenerator as IDG\nfrom tensorflow.keras.layers import SeparableConv2D, BatchNormalization, GlobalAveragePooling2D\n\nfrom distutils.dir_util import copy_tree, remove_tree\n\nimport os\n#print(os.listdir(\"../input/alzheimer-mri-dataset/Dataset\"))\nimport tensorflow as tf\nfrom keras.datasets import mnist\nimport cv2\nimport os\nimport pathlib\nfrom keras.layers import Conv2D, Conv2DTranspose,Concatenate, Dropout, Dense, Reshape, LayerNormalization, LeakyReLU\nfrom keras import layers, models\nimport matplotlib.pyplot as plt\nimport numpy as np\nfrom sklearn.metrics import accuracy_score, classification_report\nfrom sklearn.metrics import f1_score, recall_score, precision_score\nprint(\"TensorFlow Version:\", tf.__version__)","metadata":{"_cell_guid":"b1076dfc-b9ad-4769-8c92-a6c4dae69d19","_uuid":"8f2839f25d086af736a60e9eeb907d3b93b6e0e5","execution":{"iopub.status.busy":"2024-07-28T19:52:44.874456Z","iopub.execute_input":"2024-07-28T19:52:44.875406Z","iopub.status.idle":"2024-07-28T19:52:56.032164Z","shell.execute_reply.started":"2024-07-28T19:52:44.875365Z","shell.execute_reply":"2024-07-28T19:52:56.031161Z"},"trusted":true},"outputs":[{"name":"stderr","text":"2024-07-28 19:52:48.776203: E external/local_xla/xla/stream_executor/cuda/cuda_dnn.cc:9261] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\n2024-07-28 19:52:48.776341: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:607] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\n2024-07-28 19:52:48.909666: E external/local_xla/xla/stream_executor/cuda/cuda_blas.cc:1515] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\n","output_type":"stream"},{"name":"stdout","text":"Total number of unique categories: 4\nTensorFlow Version: 2.15.0\n","output_type":"stream"}],"execution_count":1},{"cell_type":"code","source":"","metadata":{},"outputs":[],"execution_count":null},{"cell_type":"markdown","source":"# 2. Preprocessing","metadata":{}},{"cell_type":"markdown","source":"## 2.1 Load data","metadata":{}},{"cell_type":"code","source":"def train_df(tr_path):\n classes, class_paths = zip(*[(label, os.path.join(tr_path, label, image))\n for label in os.listdir(tr_path) if os.path.isdir(os.path.join(tr_path, label))\n for image in os.listdir(os.path.join(tr_path, label))])\n\n tr_df = pd.DataFrame({'Class Path': class_paths, 'Class': classes})\n return tr_df","metadata":{"execution":{"iopub.status.busy":"2024-07-28T12:44:24.370880Z","iopub.execute_input":"2024-07-28T12:44:24.371245Z","iopub.status.idle":"2024-07-28T12:44:24.377523Z","shell.execute_reply.started":"2024-07-28T12:44:24.371214Z","shell.execute_reply":"2024-07-28T12:44:24.376355Z"},"trusted":true},"outputs":[],"execution_count":3},{"cell_type":"code","source":"def test_df(ts_path):\n classes, class_paths = zip(*[(label, os.path.join(ts_path, label, image))\n for label in os.listdir(ts_path) if os.path.isdir(os.path.join(ts_path, label))\n for image in os.listdir(os.path.join(ts_path, label))])\n\n ts_df = pd.DataFrame({'Class Path': class_paths, 'Class': classes})\n return ts_df","metadata":{"execution":{"iopub.status.busy":"2024-07-28T12:44:24.718610Z","iopub.execute_input":"2024-07-28T12:44:24.719460Z","iopub.status.idle":"2024-07-28T12:44:24.725054Z","shell.execute_reply.started":"2024-07-28T12:44:24.719421Z","shell.execute_reply":"2024-07-28T12:44:24.724083Z"},"trusted":true},"outputs":[],"execution_count":4},{"cell_type":"code","source":"tr_df = train_df('/kaggle/input/brain-tumor-mri-dataset/Training')","metadata":{"execution":{"iopub.status.busy":"2024-07-28T12:44:24.973845Z","iopub.execute_input":"2024-07-28T12:44:24.974205Z","iopub.status.idle":"2024-07-28T12:44:25.800440Z","shell.execute_reply.started":"2024-07-28T12:44:24.974175Z","shell.execute_reply":"2024-07-28T12:44:25.799605Z"},"trusted":true},"outputs":[],"execution_count":5},{"cell_type":"code","source":"tr_df","metadata":{"execution":{"iopub.status.busy":"2024-07-28T12:44:25.802198Z","iopub.execute_input":"2024-07-28T12:44:25.802878Z","iopub.status.idle":"2024-07-28T12:44:25.823415Z","shell.execute_reply.started":"2024-07-28T12:44:25.802840Z","shell.execute_reply":"2024-07-28T12:44:25.822519Z"},"trusted":true},"outputs":[{"execution_count":6,"output_type":"execute_result","data":{"text/plain":" Class Path Class\n0 /kaggle/input/brain-tumor-mri-dataset/Training... pituitary\n1 /kaggle/input/brain-tumor-mri-dataset/Training... pituitary\n2 /kaggle/input/brain-tumor-mri-dataset/Training... pituitary\n3 /kaggle/input/brain-tumor-mri-dataset/Training... pituitary\n4 /kaggle/input/brain-tumor-mri-dataset/Training... pituitary\n... ... ...\n5707 /kaggle/input/brain-tumor-mri-dataset/Training... glioma\n5708 /kaggle/input/brain-tumor-mri-dataset/Training... glioma\n5709 /kaggle/input/brain-tumor-mri-dataset/Training... glioma\n5710 /kaggle/input/brain-tumor-mri-dataset/Training... glioma\n5711 /kaggle/input/brain-tumor-mri-dataset/Training... glioma\n\n[5712 rows x 2 columns]","text/html":"
\n\n
\n \n \n \n Class Path \n Class \n \n \n \n \n 0 \n /kaggle/input/brain-tumor-mri-dataset/Training... \n pituitary \n \n \n 1 \n /kaggle/input/brain-tumor-mri-dataset/Training... \n pituitary \n \n \n 2 \n /kaggle/input/brain-tumor-mri-dataset/Training... \n pituitary \n \n \n 3 \n /kaggle/input/brain-tumor-mri-dataset/Training... \n pituitary \n \n \n 4 \n /kaggle/input/brain-tumor-mri-dataset/Training... \n pituitary \n \n \n ... \n ... \n ... \n \n \n 5707 \n /kaggle/input/brain-tumor-mri-dataset/Training... \n glioma \n \n \n 5708 \n /kaggle/input/brain-tumor-mri-dataset/Training... \n glioma \n \n \n 5709 \n /kaggle/input/brain-tumor-mri-dataset/Training... \n glioma \n \n \n 5710 \n /kaggle/input/brain-tumor-mri-dataset/Training... \n glioma \n \n \n 5711 \n /kaggle/input/brain-tumor-mri-dataset/Training... \n glioma \n \n \n
\n
5712 rows × 2 columns
\n
"},"metadata":{}}],"execution_count":6},{"cell_type":"code","source":"ts_df = test_df('/kaggle/input/brain-tumor-mri-dataset/Testing')","metadata":{"execution":{"iopub.status.busy":"2024-07-28T12:44:25.824615Z","iopub.execute_input":"2024-07-28T12:44:25.824969Z","iopub.status.idle":"2024-07-28T12:44:26.048128Z","shell.execute_reply.started":"2024-07-28T12:44:25.824936Z","shell.execute_reply":"2024-07-28T12:44:26.047317Z"},"trusted":true},"outputs":[],"execution_count":7},{"cell_type":"code","source":"ts_df","metadata":{"execution":{"iopub.status.busy":"2024-07-28T12:44:26.050134Z","iopub.execute_input":"2024-07-28T12:44:26.050473Z","iopub.status.idle":"2024-07-28T12:44:26.061200Z","shell.execute_reply.started":"2024-07-28T12:44:26.050444Z","shell.execute_reply":"2024-07-28T12:44:26.060152Z"},"trusted":true},"outputs":[{"execution_count":8,"output_type":"execute_result","data":{"text/plain":" Class Path Class\n0 /kaggle/input/brain-tumor-mri-dataset/Testing/... pituitary\n1 /kaggle/input/brain-tumor-mri-dataset/Testing/... pituitary\n2 /kaggle/input/brain-tumor-mri-dataset/Testing/... pituitary\n3 /kaggle/input/brain-tumor-mri-dataset/Testing/... pituitary\n4 /kaggle/input/brain-tumor-mri-dataset/Testing/... pituitary\n... ... ...\n1306 /kaggle/input/brain-tumor-mri-dataset/Testing/... glioma\n1307 /kaggle/input/brain-tumor-mri-dataset/Testing/... glioma\n1308 /kaggle/input/brain-tumor-mri-dataset/Testing/... glioma\n1309 /kaggle/input/brain-tumor-mri-dataset/Testing/... glioma\n1310 /kaggle/input/brain-tumor-mri-dataset/Testing/... glioma\n\n[1311 rows x 2 columns]","text/html":"\n\n
\n \n \n \n Class Path \n Class \n \n \n \n \n 0 \n /kaggle/input/brain-tumor-mri-dataset/Testing/... \n pituitary \n \n \n 1 \n /kaggle/input/brain-tumor-mri-dataset/Testing/... \n pituitary \n \n \n 2 \n /kaggle/input/brain-tumor-mri-dataset/Testing/... \n pituitary \n \n \n 3 \n /kaggle/input/brain-tumor-mri-dataset/Testing/... \n pituitary \n \n \n 4 \n /kaggle/input/brain-tumor-mri-dataset/Testing/... \n pituitary \n \n \n ... \n ... \n ... \n \n \n 1306 \n /kaggle/input/brain-tumor-mri-dataset/Testing/... \n glioma \n \n \n 1307 \n /kaggle/input/brain-tumor-mri-dataset/Testing/... \n glioma \n \n \n 1308 \n /kaggle/input/brain-tumor-mri-dataset/Testing/... \n glioma \n \n \n 1309 \n /kaggle/input/brain-tumor-mri-dataset/Testing/... \n glioma \n \n \n 1310 \n /kaggle/input/brain-tumor-mri-dataset/Testing/... \n glioma \n \n \n
\n
1311 rows × 2 columns
\n
"},"metadata":{}}],"execution_count":8},{"cell_type":"code","source":"# Count of images in each class in train data\nplt.figure(figsize=(15,7))\nax = sns.countplot(data=tr_df , y=tr_df['Class'])\n\nplt.xlabel('')\nplt.ylabel('')\nplt.title('Count of images in each class', fontsize=20)\nax.bar_label(ax.containers[0])\nplt.show()","metadata":{"execution":{"iopub.status.busy":"2024-07-28T12:44:26.062338Z","iopub.execute_input":"2024-07-28T12:44:26.062615Z","iopub.status.idle":"2024-07-28T12:44:26.386783Z","shell.execute_reply.started":"2024-07-28T12:44:26.062592Z","shell.execute_reply":"2024-07-28T12:44:26.385815Z"},"trusted":true},"outputs":[{"output_type":"display_data","data":{"text/plain":"","image/png":"iVBORw0KGgoAAAANSUhEUgAABP4AAAJlCAYAAABKcTx6AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuNSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/xnp5ZAAAACXBIWXMAAA9hAAAPYQGoP6dpAABepklEQVR4nO3debRd4/0/8PfNdDPPMiIDMQdBYiqCkKDm0ppKqamGxlwtklI/ilJTKdWgX6W0+JbWEFMEMQQpMURiniMhIlGR5J7fH1bON1dyb0KGm2yv11pnrXPOfvazP3uf59zc+86z964olUqlAAAAAACFUq+uCwAAAAAAFj/BHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwDANzR58uSceOKJWXPNNdOkSZNUVFSkoqIiv//97xe6j4MOOigVFRXp3r37EquTxa9///6pqKhI//7967qU5d61115b/u688cYbdV1OjYYOHVquEwCWNw3qugAA4P98+eWX+cc//pG77rorTz75ZD766KNMnTo1rVq1Srdu3dKvX7/sueee2WabbVKvnv+/qwuffvppNt1004wfP76uSwEAgFoJ/gBgGXHrrbfmhBNOmO/Ml8mTJ2fy5Ml55plncuWVV2a11VbLhRdemJ122mnpF7oUde/ePW+++WYOPPDAXHvttXVdTpLk8ssvL4d+J598cnbeeee0bt06SdK5c+c6rAwAAKoT/AHAMuCss87KGWecUX693XbbZZdddslaa62V1q1b5+OPP864ceNyxx13ZPjw4XnllVfyq1/9qvDB37LovvvuS5JstNFG+e1vf/ut+7n22muXmTCThffQQw/VdQkAAAtN8AcAdWzYsGHl0K9Dhw65+eabs9VWW83TbsCAATnqqKMyduzYHHfccfnoo4+Wdqkkeffdd5Mkq622Wh1XAgAAtRP8AUAdevfdd3P00UcnSZo1a5YRI0ZkjTXWqHWdddZZJ/fcc0/++te/Lo0S+ZoZM2YkSRo2bFjHlQAAQO1cFRwA6tBFF12Uzz//PEly5plnLjD0m6NevXrZf//9a1z+yCOP5IADDkj37t3TuHHjtG7dOn369Mlpp51W60zBhb3L5htvvFFuN7/TVb9+x9opU6bkjDPOyNprr51mzZqldevW2XLLLXPDDTfMt/85d0598803kyTXXXddeXtzHotyV9U77rgjP/jBD7LiiiumsrIy7dq1y6abbppzzz0306ZNm6f9Qw89VN5uTTV903oWdFffOf0OHTo0SfLggw9mt912S5cuXdKkSZOsueaaOeusszJ9+vRq6/373//OjjvuWG631lpr5ZxzzsmXX35ZYy1ffvll7rjjjhx99NHp27dv2rRpk4YNG6Zdu3bZeOONM3To0EyaNGmh9uuRRx7JnnvumU6dOqVx48bp2bNnjjjiiEyYMCHJwt8Vd8KECTnuuOPSu3fvtGrVKk2aNEnPnj1z0EEHZfTo0bWu+8UXX+SSSy5J//79s8IKK6Rhw4Zp27ZtVl999eywww658MILv/VdZGurf37fi+HDh2fnnXdOp06dUllZmR49euTII4/MO++88622Pz+333579tprr6y88srl7/tGG22UX//61/nkk09qXffxxx/Paaedlv79+6dTp05p1KhRWrZsmbXWWitHHnlkXnzxxYWu49///nf233//9OzZM82aNUvjxo3To0eP7Lnnnrn22mvLP+tqUlVVlauuuiqbbbZZ2rRpk2bNmmXdddfN2WefvcB1F9aMGTNy1VVXZaeddkrXrl1TWVmZZs2aZe21185Pf/rT3HPPPSmVSt+oz8X1/XnggQeyzz77pEePHmnSpEmaNm2abt26ZZNNNsmJJ56YBx54YL7rTZkyJWeffXY23XTT8rZXWGGFrLXWWtl9991zxRVX5MMPP/xG+wRAgZQAgDpRVVVVat++fSlJqVmzZqWpU6cucp+zZ88uHXXUUaUkNT5atWpVuvfee+e7/rBhw8rtXn/99Rq38/rrr5fbDRs2bJ7lBx54YClJqVu3bqWXX3651L179xrrOeqoo+ZZf6uttqp1H5KUttpqq298fP773/+Wdt9991r77dKlS+nZZ5+ttt6DDz642OuZ+xjNz5x+hwwZUjrnnHNKFRUV893uZpttVpo2bVqpqqqqdOyxx9ZY36BBg0qzZs2qtZbaHu3atSs98sgjte7TueeeW2OdLVq0KN1zzz3lz7a243X++eeXGjZsWGMtFRUVpdNPP32+67733nultdZaa4H7c8IJJ9S6LzWprf6vfy9+8Ytf1Lj9FVZYofTiiy9+qxrm+Pjjj0vbbLNNrfvZoUOH0qhRo+a7/tzf95oe9evXL11++eW11jFp0qTStttuu8C+vv6zYu7tv/DCC7X20a9fv9K0adMW6Xg9++yzpR49eiywzq//7BsyZEh52fwsju/P4MGDF6qPr3vxxRdLXbp0WeC6l1566SIdOwCWX071BYA68sILL5RngWyxxRZp0aLFIvf5i1/8IpdffnmSpEePHjnllFOywQYbZPr06fnnP/+Zyy67LJ9++mm+//3v58knn8x66623yNuszeeff56dd945kydPzmmnnZYBAwakefPmefbZZ/PrX/8677zzTi6//PLsvPPOGThwYHm9YcOGZfr06Rk4cGDee++97LrrrvnNb35Tre9mzZp943oOPPDA3HbbbUmS9dZbLyeccELWXHPNfPzxx7npppty7bXX5r333su2226b5557Ll27dk2S9O3bN88//3yS1FjTt6lnYdx111158skns+mmm+aYY47JaqutlkmTJuXiiy/OXXfdlcceeyznnHNO2rZtm0suuSQ77LBDfvrTn6Z79+555513cs455+Txxx/P3XffnauvvjpHHHHEPNuYNWtWevbsmd133z39+vXLyiuvnAYNGuTNN9/Mfffdlz//+c+ZPHlydt9994wdOzYdOnSYp4+bb745v/jFL5Ikbdu2zSmnnJItttgiSTJy5Mice+65+dGPfpQVVlih1v09//zzc/LJJydJ1l133Rx55JHp1atXWrdunXHjxuWyyy7LqFGjctZZZ6V9+/Y59thjq61/zDHHlGep7b///tljjz3SpUuX1K9fP++//35Gjx6d//3f//3mH8Q3dPXVV+exxx7LVlttlcMPPzyrrbZapkyZkuuvvz7XX399Pvrooxx88MEZNWrUt+p/xowZGTBgQJ555pnUr18/++67b3bcccf06NEjM2fOzMMPP5wLL7wwEydOzI477phnn3023bp1q9bHrFmz0qZNm+y6667Zcsst06tXrzRr1izvvfdennnmmVxyySWZNGlSjj766KyxxhrZZptt5qnj888/z9Zbb13+fmy44YY57LDDss4666SysjJvv/12Hn744fztb3+rdX8OPfTQPP744znwwAOz9957p1OnTnnrrbdy3nnnZdSoUXnyySfzm9/8Juecc863Ol4vvfRStthii/KM3t133z0/+tGP0rNnz8yePTuvvPJK7r333vLPh29iUb8/d955Z37/+98n+b8xv+aaa6ZVq1aZMmVKXnjhhdx333158skn59n2AQcckPfeey8NGzbMoYcemh122CGdOnVKVVVV3nnnnTz++OPfap8AKJC6Th4B4Lvqf/7nf8qzMX71q18tcn/PPfdcqV69eqUkpXXWWaf0ySefzNPmrrvuKrfp16/fPMsX94y/5KsZhmPHjp2nzfjx40uNGzcuJSntsssu891Ot27dSklKBx54YI21LKw777yzXNO2225bmjFjxjxtrrrqqnKbvffee4nWtLAz/pKU9txzz3lm682aNau0ySablGfTNW7cuDR48OB5+pk+fXq55nXXXXe+25owYUKpqqqqxlqfe+65UvPmzUtJSqeddto8y7/44otSx44dS0lK7du3L40fP36eNuPGjSu1bdu2vE/zmzH3wgsvlGf6DRkyZL41zZ49u7T//vuXkpSaN29e+vjjj8vL/vvf/5bXX9CMvsmTJ9e6vCYLO+MvSenQQw+d7z789Kc/Lbd55plnvlUdv/zlL0tJSq1bty6NHj16vm3eeOONUufOnUtJSvvuu+88y995553S9OnTa9zGlClTSuuuu24pSel73/vefNscd9xx5X056qijahxHM2bMKH3wwQfV3vv6jMO//OUv86z3xRdflNZZZ53yjLeZM2fWWG9tNthgg1KSUr169Uo33nhjje0mTZpU+vzzz6u9t6AZf4v6/TnggAPKPws+++yzGvv5+ph99dVXF2pGX1VVVbXvCQDfLa7xBwB1ZPLkyeXn85tB9U1dccUVqaqqSpL86U9/SuvWredpM2jQoBx88MFJkieffDJPPfXUIm93Qc4666ysvfba87y/6qqrZrfddkvy1XXhlrQ5MyEbNmyYYcOGpVGjRvO0OfTQQzNgwIAkya233pr3339/ide1IE2bNs1VV12V+vXrV3u/fv36Oeyww5Ikn332WVZYYYWcd955813/wAMPTJI899xz+fTTT+dps8oqq6SioqLGGnr37p2f/vSnSb66ntzX3X777eVriA0dOjSrrrrqPG1WW221DBkypMZtJMnvfve7zJw5MxtttFGGDBky35rq1auXSy+9NJWVlZk2bVr+/ve/l5d9/PHHmTlzZpJkyy23rHVbbdu2rXX5ourcuXMuvfTS+e7DiSeeWH4+cuTIb9z3tGnTyuP5rLPOyoYbbjjfdt26dcvpp5+eJLnlllvmuR5k165d07Rp0xq306pVq5x55plJvvqOzv0zK/nq2nJ//OMfk3w10+/iiy+ucRw1atQoHTt2rHFbe+yxx3yvW1pZWVm+AdLkyZO/0TUH57j33nvzzDPPJEmOPfbY/OhHP6qxbbt27dKkSZNv1P+ifn8++OCDJMkGG2yQ5s2b19jP18fsnPWS2sd7RUVF2rRpU+NyAIpN8AcAdeSzzz4rP18cp4ned999SZK11147G2+8cY3tDj300HnWWVIqKiqy77771rh8TmDx8ccfZ8qUKUusjlmzZmXEiBFJku233z4rrbRSjW3nHJ9Zs2bloYceWmI1LaztttuuxpBq7lO199hjjxrvNDx3u9dff32B2/zkk0/y6quv5oUXXsjYsWMzduzYcpD84osvlsO1OeaMo3r16mW//farsd/999+/1oDkjjvuSJLsueeetbZr3bp1evfunSTVTpVt165dOdD9y1/+klmzZtWyl0vWD37wg1RWVs532eqrr14OeF577bVv3PeIESPKAe4PfvCDWtvOCYRmzpyZp59+uta206dPzxtvvFHtc597TP3nP/+p1v6BBx4o33Tj2GOPnSec/iZqGzdzB5vf5njdeeed5eeDBw/+xut/U9/0+9O5c+ckycMPP5xXX311obczZ70k873JEgAkiWv8AUAdmfuafl+fifNNzZgxI+PHj0+SWkO/JOnTp08aNmyYmTNnZuzYsYu03QVp37592rVrV+PyuQOtzz77bL6zFBeH1157rRxQLOj4zL18SR+fhbHaaqvVuGzu47Ww7eYOnOf2/PPP56KLLspdd91VbSbR11VVVeWTTz6pNkt1znHq2bNnrZ9h27Zt07Nnz/mGG2+++Wb5jtOnnnpqTj311Br7mdvctVZWVuaHP/xh/vKXv+Tvf/97nnrqqey9997p379/NttssyU2vuZnQXfobtOmTaZNm1bj51Gbue9qPHf4syDz+1wnTZqUCy+8MP/4xz8yfvz4Wu9o+/U70z777LPl53Ou5/ht1Xa8vv5z4puaU+fKK688z3UOF5dF+f78+Mc/zvXXX5/JkydnnXXWya677pqBAwdmiy22mO/s2Tl69OiRLbbYIiNHjsxFF12Ue+65J3vuuWf69++fTTbZpNbZnAB8d5jxBwB1ZO5AbM5pkt/WJ598Un6+oNOGGzZsWN72xx9/vEjbXZAF/eFZr97//Soye/bsJVbH3Pu5oOPTqVOn+a5XV2o7hnMfv4VtN7/jfM0112SDDTbIsGHDag0t5vjvf/9b7fWc8begG3fU1mbixIkLXHd+5gS6c1x22WXZeeedk3wVJp5//vnZaaed0q5du/Tt2zfnn3/+fE93XtwWdux/m3G/uI7V008/nTXWWCPnnHNOXnnllVpDv2Tez33uIPCbBJDzsyjjd0Hm1LmoNdZkUb8/2267bS677LI0adIkX3zxRf72t7/l4IMPTq9evbLiiivmiCOOmGe25Rw33nhjNt100yRfzSY866yzsu2226Z169bZcsstc+WVV+aLL75Y9J0EYLllxh8A1JG5T7+cc/2pxaG2UyRxfL7u5ZdfzhFHHJFZs2alQ4cOOemkk7LNNtuke/fuadGiRflUzz//+c855JBDkmSBAdG3MXegc8YZZ2SvvfZaqPW+fpp8y5Yt889//jNPPvlkbr755jz00EMZM2ZMZs+endGjR2f06NG54IILcvvtt5cDk+XN3MfqmWeeqfEU769bccUVy8+//PLL7L333pk8eXIaNmyYY445JrvuumtWW221tGnTpnya8muvvZZVVlklyZL53Jd3i+v7c9RRR2WvvfbKX//61wwfPjyPPvpoPv3007z77rv54x//mKuuuiq//OUv57m7edeuXfPYY4/l/vvvz6233poRI0aUTyceOXJkRo4cmQsuuCD//ve/a50VDEBxCf4AoI6svfbaad++fSZNmpSRI0dm6tSpadmy5bfqa+4Lty9o9uCsWbPKF+n/+rXj5p5ZM+dGIfOzqKcmL21z7+eCjs/cM3aW9A0glgXXXnttZs2alfr162fEiBE1nnJZ2+zHOeNvzqm6tampzdwzYBs2bJh11llngX3Vpl+/funXr1+Sr04Pfeihh3Lttdfm1ltvzcSJE7Pnnnvm1Vdf/cY3clgWzH2sVlhhhWqB3sJ64IEHytfL+8Mf/lC++cTX1fa5t2/fvvz8/fffT48ePb5xHUvDnDqXxM16Fsf3Z44OHTpk8ODBGTx4cKqqqjJmzJjcdtttueyyyzJlypScffbZ6du3b3bdddd51t12222z7bbbJvnqJij33XdfrrrqqjzwwAN59dVX88Mf/rDaqdkAfHc41RcA6khFRUX5bqvTp0/Pn/70p2/dV2VlZXr16pUkeeKJJ2pt++yzz5YvLv/1cGXu6w7Offrw173yyivfttRvZHHNzuvZs2f5VMIFHZ8nn3yy/HxRw6flwQsvvJDkqxmotV1nbe7ryn3dnLs2v/baa7WOm48//rjGmzP07NkzrVq1SpI8+uijC6z7m2jRokV23nnn/OMf/8ixxx6b5KsQaGncTXpJ6NOnT/n5tz1Wcz73JPnhD39YY7vaPvcNNtig/Pzhhx/+VnUsDXPqfOutt/Lmm28u1r4Xx/dnfurVq5cNNtggZ511Vu6///7y+zfffPMC123Xrl1++MMf5v77788uu+ySJBkzZkz5OrAAfLcI/gCgDh133HHlQOqMM87Iyy+/vFDrVVVV5YYbbqj23oABA5J89Yfo3OHV180dMM5ZZ465Z+zU9ofqjTfeuFB1LqrGjRsn+ermJYuiQYMG2WqrrZIkw4cPzzvvvFNj2znHp0GDBunfv/8ibXd5MOfOt7XN4nz//ffzz3/+s8blc2YaVVVV5a9//WuN7f7nf/6nxtNF69evnx133DFJcu+99+all15aYO3fxpxak3lvVrG8GDBgQPnnxiWXXPKtTsGd+47HNX32VVVVufrqq2vsY+utty6fan3ppZcu0et0Loo513xMkosuumix9r04vj8LssEGG5Rn1X7TMVuE8Q7AohH8AUAd6tq1ay677LIkX/3huNVWW2XEiBG1rvPiiy9m0KBBOf/886u9f+SRR5ZP1T3ssMMyderUeda99957c8011yT56lTIvn37Vlu+zjrrlE9vveyyy+YbuN1888255ZZbFnIPF82ci/HP7y6w39RRRx2V5Ktrmx1yyCHlWY9z+/Of/5x77703SbLHHnsssZsBLEvmzBQdP358HnvssXmWf/7559l3333nuSHB3HbffffyTVOGDh06389r/Pjx+fWvf11rLaeeemrq16+fqqqq/OAHP6g1oJ09e3ZuuOGGam1ee+21BX5/5ny+SZbZU1MXpHXr1jn66KOTJI899liOO+64Wk/N//DDD+eZUTznc0++Ol11fk499dRarz/aunXrHH744Um+ulHI4MGDawwhZ86c+a1vSrKoBgwYkA033DDJVwHlTTfdVGPbyZMn1zrWv25xfH/+9re/1bp89OjR5Zm0c4/ZMWPGZMyYMTWuVyqVct999yX5avZ09+7da9sVAArKNf4AoI795Cc/yTvvvJMzzjgjEydOTP/+/bP99ttn1113zZprrpnWrVvn448/ziuvvJJ//etfufvuuzN79uxqNwdJkt69e+eEE07I+eefn//85z/ZYIMNcsopp6RPnz6ZPn167rjjjlxyySWZPXt2GjVqlD/+8Y/z1NKgQYMcfvjhOeecczJ27Nhss802Ofnkk7Pyyivnww8/zC233JJrr702m2222Xz/yF3cNttsszz44IN56qmncu6552aHHXYozzBq0qRJunbtutB97bTTTtlrr71yyy235N57780mm2yS448/PmussUY++eST3HTTTfnzn/+c5Ktr+1144YVLZJ+WNQcccEAuvfTSVFVVZaeddspJJ52U733ve2ncuHGefvrpXHTRRRk/fnw233zzGk8rbdy4cX7/+99n3333zaRJk7LxxhvnlFNOyRZbbJHkq9NAf/vb36aqqiq9evXK+PHj53sad+/evXPBBRfkuOOOy4svvph11lknhx12WLbZZpt07NgxX3zxRd54442MGjUqf//73/P+++/n+eefL1/j7q233srWW2+dtdZaK7vvvns22mij8hh5++2387e//a18quT666+fjTfeeEkc0qXizDPPzIgRI/LEE0/k4osvzkMPPZRDDz0066+/fpo1a5ZPPvkkL7zwQu67777cdddd6d27d7Xr+A0cODAdOnTIxIkTc9ppp+WNN97I7rvvnvbt22fChAm5+uqrc//999f6uSfJWWedleHDh+f555/PZZddllGjRuXwww9P796906hRo7zzzjsZOXJkbrzxxvzmN7/JQQcdtBSOzrz+8pe/pF+/fpk2bVr22Wef3HLLLfnRj36Unj17Zvbs2ZkwYULuvffe/P3vf8/YsWMXOiRbHN+fU045JUcccUR23XXXbLnllllttdXSrFmzTJ48OY888kguvfTSJF/Nip37MxwzZkx+8pOfpG/fvtl5552zwQYbpFOnTpk5c2Zef/31DBs2LMOHD0+S7LLLLt+J/8gAYD5KAMAy4R//+Eepe/fupSQLfKy99tqle+65Z54+Zs+eXfrZz35W67qtWrWa77pzTJ8+vbTJJpvUuH7//v1LY8eOLb8eNmzYPH0ceOCBpSSlbt261brPw4YNK/fz+uuvz7P8nXfeKbVt23a+dWy11VYLOKLz+u9//1vafffdaz0+Xbp0KT377LM19tGtW7dSktKBBx74jbc/twUdozn1DBkypMY+Xn/99Vo/hzkefPDBcrsHH3xwnuW//vWvaz0mJ5xwwgI/q1KpVPrNb35TqqiomG8fTZs2Lf3rX/8qbbHFFqUkpUGDBtVY71VXXVVq2rTpAr8HjRo1Ko0fP36++1nbY4011ii99tprNW6/NltttVWN429hP49SafGMo6lTp5b22GOPhdrnrbfeep7177777lLjxo0X6bteKpVKH330UWnLLbdcYA1fX39hxlSp9M2Oa21Gjx5dWmmllRZY59drGTJkSHnZ/Czq92fOWKjtUVlZWevxq+2x2WablSZNmvStjxsAyzen+gLAMmKPPfbIuHHjcsMNN2T//ffP6quvnjZt2qRBgwZp27ZtNthgg/zsZz/LAw88kOeffz7bb7/9PH3Uq1cvl19+eR5++OHst99+WXnllVNZWZmWLVtm/fXXzy9/+cuMHz9+vuvO0bRp0zzwwAM5++yz07t37zRp0iQtW7ZM3759c9lll+W+++4rz7pb0rp27Zonn3wyhxxySFZdddXyNf++rcaNG+fWW2/NP//5z+yxxx7p0qVLGjVqlDZt2mTjjTfOOeeck3HjxmX99ddfPDuwnDjjjDPyr3/9K9tvv33atGmTRo0aZcUVV8wee+yRe++9NxdccMFC9fOrX/0qI0aMyG677ZYOHTqksrIy3bp1y8EHH5zRo0dnxx13LJ+CPudGHvNz6KGH5rXXXsuvf/3rbL755mnfvn0aNGiQZs2aZbXVVsuee+6ZK6+8Mu+++25WXXXV8npbbLFFHnrooZx66qnZeuuts+qqq6ZFixZp2LBhOnbsmO233z5XXnllxowZs9ye5ju3Fi1a5B//+EdGjhyZn/70p1l99dXTokWL8s+Mvn375qijjsq///3v8syvuQ0cODCjR4/O/vvvny5duqRhw4ZZYYUVstVWW+Wqq67K/fffv1Df9fbt22fEiBG59dZb84Mf/CArrrhiKisr07hx4/Ts2TN77bVXbrjhhuyzzz5L4jAstA033DDjxo3LJZdckm222SYdOnRIgwYN0rx58/Tu3TuHHXZY7r///m98Suyifn8efPDBXHzxxdlzzz3Tu3fvrLDCCmnQoEFatmyZPn365MQTT8yLL744z2zJffbZJ//+979z3HHH5Xvf+1569OiRpk2blre/yy675IYbbsjIkSOr3QkagO+WilLpW1wNGAAAvqGZM2emVatW+e9//5vTTjstZ511Vl2XBABQaGb8AQCwVNx+++3lmxhssskmdVwNAEDxmfEHAMBiMWHChGqn3s7tjTfeyBZbbJF33nknHTt2zDvvvJMGDdxnDgBgSfLbFgAAi8Uaa6yRHXfcMd///vez9tprp1mzZpk4cWIefPDBXHnllZkyZUqS5IILLhD6AQAsBWb8AQCwWFRUVNS6vF69evnNb36TU089dSlVBADw3ea/WgEAWCzuuOOO3HXXXXnsscfy4YcfZvLkyamsrEzXrl3Tv3//HHXUUVlnnXXqukwAgO8MM/4AAAAAoIDM+FsOVFVV5b333kuLFi0WeAoNAAAAAMVWKpXy2WefpUuXLqlXr16N7QR/y4H33nsvK620Ul2XAQAAAMAy5O23386KK65Y43LB33KgRYsWSb76MFu2bFnH1QAAAABQl6ZOnZqVVlqpnBnVRPC3HJhzem/Lli0FfwAAAAAkyQIvCVfzScAAAAAAwHJL8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACalDXBbDwtjztxtSvbFLXZQAAAAAL6enzf1zXJfAdZsYfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUEDLdfDXvXv3/P73v1/kfvr375/Bgwcvcj8AAAAAX/fwww9n5513TpcuXVJRUZHbb7+9xrZHHHFEKioq5sk7unfvnoqKimqPc889t7x86NCh8yyvqKhIs2bNltBesTxoUNcFLIqnnnqq2gCuqKjIbbfdlt122+0b9XPrrbemYcOG5dfdu3fP4MGDhYEAAADAIps+fXrWW2+9HHzwwdljjz1qbHfbbbfl8ccfT5cuXea7/Mwzz8yhhx5aft2iRYvy8xNPPDFHHHFEtfbbbrtt+vbtu4jVszxbroO/FVZYYbH007Zt28XSz9d9+eWXadSo0RLpGwAAAFg+7LDDDtlhhx1qbfPuu+/mmGOOyT333JOddtppvm1atGiRTp06zXdZ8+bN07x58/Lr//znP3nxxRdz5ZVXfvvCWe4t06f69u/fP0cffXSOPvrotGrVKu3bt8/pp5+eUqmUpPqpvt27d0+S7L777qmoqCi/Puigg+aZATh48OD079+/2nbmzO7r379/3nzzzRx33HHlabFJMnny5Oyzzz7p2rVrmjZtmt69e+fGG2+cb72DBw9O+/btM3DgwBx88MH5/ve/X63dzJkz06FDh1xzzTWLfpAAAACA5VpVVVUOOOCAnHTSSVl77bVrbHfuueemXbt26dOnT84///zMmjWrxrZ/+tOfstpqq2WLLbZYEiWznFjmZ/xdd911OeSQQ/Lkk09m9OjROeyww7LyyitXm9qafHXab4cOHTJs2LAMGjQo9evX/1bbu/XWW7PeeuvlsMMOq7aNL774IhtuuGFOOeWUtGzZMv/6179ywAEHZJVVVkm/fv2q1XvkkUfm0UcfTfJVYLjlllvm/fffT+fOnZMkd955Zz7//PP88Ic//FY1AgAAAMXx29/+Ng0aNMixxx5bY5tjjz02G2ywQdq2bZvHHnssp556at5///1ceOGF87T94osvcsMNN+QXv/jFkiyb5cAyH/yttNJKueiii1JRUZHVV189zz//fC666KJ5gr85p/22bt26xmmvC6Nt27apX7/+PNNnu3btmhNPPLH8es7025tvvrla8NerV6+cd9551fpcffXV85e//CUnn3xykmTYsGHZa6+9qk3BnduMGTMyY8aM8uupU6d+6/0BAAAAll1PP/10Lr744jzzzDPlsw7n5/jjjy8/X3fdddOoUaMcfvjhOeecc1JZWVmt7W233ZbPPvssBx544BKrm+XDMn2qb5Jssskm1Qb+pptumvHjx2f27NlLtY7Zs2fnrLPOSu/evdO2bds0b94899xzT956661q7TbccMN51v3pT3+aYcOGJUk+/PDD3HXXXTn44INr3NY555yTVq1alR8rrbTS4t0ZAAAAYJkwcuTITJw4MSuvvHIaNGiQBg0a5M0338wJJ5xQvozZ/Gy88caZNWtW3njjjXmW/elPf8r3v//9dOzYcckVznJhmQ/+FlW9evXK1wScY+bMmd+4n/PPPz8XX3xxTjnllDz44IMZM2ZMBg4cmC+//LJau/ndJvvHP/5xXnvttYwaNSr/8z//kx49etR6jv2pp56aTz/9tPx4++23v3G9AAAAwLLvgAMOyHPPPZcxY8aUH126dMlJJ52Ue+65p8b1xowZk3r16qVDhw7V3n/99dfz4IMP5pBDDlnSpbMcWOZP9X3iiSeqvX788cfTq1ev+V7Dr2HDhvPMBFxhhRUyduzYau+NGTMmDRs2rHGbjRo1mqefRx99NLvuumv233//JF9dePOVV17JWmuttcB9aNeuXXbbbbcMGzYso0aNyk9+8pNa21dWVs4zTRcAAABYPk2bNi0TJkwov3799dczZsyYtG3bNiuvvHLatWtXrX3Dhg3TqVOnrL766kmSUaNG5YknnsjWW2+dFi1aZNSoUTnuuOOy//77p02bNtXW/fOf/5zOnTsv8C7CfDcs8zP+3nrrrRx//PEZN25cbrzxxlx66aX5+c9/Pt+23bt3z/33358PPvggn3zySZJkm222yejRo3P99ddn/PjxGTJkyDxB4Pz6efjhh/Puu+9m0qRJSb66dt/w4cPz2GOP5aWXXsrhhx+eDz/8cKH346c//Wmuu+66vPTSS86xBwAAgO+Q0aNHp0+fPunTp0+Sr67X16dPn5xxxhkLtX5lZWVuuummbLXVVll77bVz9tln57jjjstVV11VrV1VVVWuvfbaHHTQQd/6pqcUyzI/4+/HP/5x/vvf/6Zfv36pX79+fv7zn+ewww6bb9vf/e53Of7443P11Vena9eueeONNzJw4MCcfvrpOfnkk/PFF1/k4IMPzo9//OM8//zzNW7zzDPPzOGHH55VVlklM2bMSKlUymmnnZbXXnstAwcOTNOmTXPYYYdlt912y6effrpQ+zFgwIB07tw5a6+9drp06fKtjgUAAACw/Onfv/88lyGrzdev27fBBhvk8ccfX+B69erVc7kwqqkofZORt5T1798/66+/fn7/+9/XdSmLbNq0aenatWuGDRuWPfbY4xutO3Xq1LRq1SrrHXNl6lc2WUIVAgAAAIvb0+f/uK5LoIDmZEWffvppWrZsWWO7ZX7G3/KuqqoqkyZNyu9+97u0bt06u+yyS12XBAAAAMB3gOBvCXvrrbfSo0ePrLjiirn22mvToIFDDgAAAMCSt0ynUA899FBdl7DIunfv/o3O4wcAAACAxWGZv6svAAAAAPDNCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFJDgDwAAAAAKSPAHAAAAAAUk+AMAAACAAhL8AQAAAEABCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFJDgDwAAAAAKSPAHAAAAAAUk+AMAAACAAhL8AQAAAEABCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFJDgDwAAAAAKSPAHAAAAAAUk+AMAAACAAhL8AQAAAEABCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFJDgDwAAAAAKSPAHAAAAAAUk+AMAAACAAhL8AQAAAEABCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFJDgDwAAAAAKSPAHAAAAAAUk+AMAAACAAhL8AQAAAEABCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFFCDui6Ahffwb/ZJy5Yt67oMAAAAAJYDZvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggBrUdQEsvLfP3SQtGtev6zIAAAAAlgsrn/F8XZdQp8z4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAgMJ6+OGHs/POO6dLly6pqKjI7bffXm35QQcdlIqKimqPQYMGVWvzzDPPZLvttkvr1q3Trl27HHbYYZk2bVq1Nl/vo6KiIjfddNOS3r1aCf4AAAAAKKzp06dnvfXWy+WXX15jm0GDBuX9998vP2688cbysvfeey8DBgzIqquumieeeCJ33313XnjhhRx00EHz9DNs2LBq/ey2225LYI8WXoM63foSNnTo0Nx+++0ZM2ZMXZcCAAAAQB3YYYcdssMOO9TaprKyMp06dZrvsjvvvDMNGzbM5Zdfnnr1vppDd+WVV2bdddfNhAkTsuqqq5bbtm7dusZ+6oIZf3WoVCpl1qxZdV0GAAAAwHfaQw89lA4dOmT11VfPkUcemcmTJ5eXzZgxI40aNSqHfknSpEmTJMkjjzxSrZ+jjjoq7du3T79+/fLnP/85pVJp6exADZbp4K9///459thjc/LJJ6dt27bp1KlThg4dWl7+1ltvZdddd03z5s3TsmXL7L333vnwww+TJNdee21+/etf5z//+U/5vOprr702b7zxRioqKqrNApwyZUoqKiry0EMPJfnqw66oqMg999yTPn36pEmTJtlmm20yceLE3HXXXVlzzTXTsmXL7Lvvvvn888/L/cyYMSPHHntsOnTokMaNG+d73/tennrqqfLyOf3edddd2XDDDVNZWTnPAAEAAABg6Rk0aFCuv/763H///fntb3+bESNGZIcddsjs2bOTJNtss00++OCDnH/++fnyyy/zySef5Be/+EWS5P333y/3c+aZZ+bmm2/O8OHDs+eee+ZnP/tZLr300jrZpzmW+VN9r7vuuhx//PF54oknMmrUqBx00EHZfPPNs+2225ZDvxEjRmTWrFk56qij8sMf/jAPPfRQfvjDH2bs2LG5++67c9999yVJWrVqVQ4GF8bQoUNz2WWXpWnTptl7772z9957p7KyMn/9618zbdq07L777rn00ktzyimnJElOPvnk/OMf/8h1112Xbt265bzzzsvAgQMzYcKEtG3bttzvL37xi1xwwQXp2bNn2rRpM892Z8yYkRkzZpRfT5069dsePgAAAABq8aMf/aj8vHfv3ll33XWzyiqr5KGHHsq2226btddeu5xPnXrqqalfv36OPfbYdOzYsdoswNNPP738vE+fPpk+fXrOP//8HHvssUt1f+a2TM/4S5J11103Q4YMSa9evfLjH/84G220Ue6///7cf//9ef755/PXv/41G264YTbeeONcf/31GTFiRJ566qk0adIkzZs3T4MGDdKpU6d06tSpPA1zYf3mN7/J5ptvnj59+uSQQw7JiBEjcsUVV6RPnz7ZYost8oMf/CAPPvhgkq8uFHnFFVfk/PPPzw477JC11lorV199dZo0aZJrrrmmWr9nnnlmtttuu6yyyirVAsE5zjnnnLRq1ar8WGmllb79AQQAAABgofXs2TPt27fPhAkTyu/tu++++eCDD/Luu+9m8uTJGTp0aD766KP07Nmzxn423njjvPPOO9Umdy1ty0XwN7fOnTtn4sSJeemll7LSSitVC8XWWmuttG7dOi+99NJi33bHjh3TtGnTah9ox44dM3HixCTJq6++mpkzZ2bzzTcvL2/YsGH69es3Tz0bbbRRrds99dRT8+mnn5Yfb7/99uLYHQAAAAAW4J133snkyZPTuXPneZZ17NgxzZs3z9/+9rc0btw42223XY39jBkzJm3atEllZeWSLLdWy/ypvg0bNqz2uqKiIlVVVd+6vzlTMOe+uOLMmTMXuO2KiorFVkuzZs1qXV5ZWVmngwIAAACgKKZNm1Zt9t7rr7+eMWPGpG3btmnbtm1+/etfZ88990ynTp3y6quv5uSTT86qq66agQMHlte57LLLstlmm6V58+YZPnx4TjrppJx77rlp3bp1kuSOO+7Ihx9+mE022SSNGzfO8OHD8//+3//LiSeeuLR3t5plfsZfTdZcc828/fbb1WbDvfjii5kyZUrWWmutJEmjRo3KF2KcY4UVVkhS/eKLc9/o49taZZVV0qhRozz66KPl92bOnJmnnnqqXA8AAAAAS9fo0aPTp0+f9OnTJ0ly/PHHp0+fPjnjjDNSv379PPfcc9lll12y2mqr5ZBDDsmGG26YkSNHVpuU9eSTT2a77bZL7969c9VVV+WPf/xjtWv3NWzYMJdffnk23XTTrL/++vnjH/+YCy+8MEOGDFnq+zu3ZX7GX00GDBiQ3r17Z7/99svvf//7zJo1Kz/72c+y1VZblU+l7d69eznFXXHFFdOiRYs0adIkm2yySc4999z06NEjEydOzGmnnbbI9TRr1ixHHnlkTjrppLRt2zYrr7xyzjvvvHz++ec55JBDFrl/AAAAAL65/v37Vzvz8+vuueeeBfZx/fXX17p80KBBGTRo0DeubUlbbmf8VVRU5H//93/Tpk2bbLnllhkwYEB69uyZv/3tb+U2e+65ZwYNGpStt946K6ywQm688cYkyZ///OfMmjUrG264YQYPHpzf/OY3i6Wmc889N3vuuWcOOOCAbLDBBpkwYULuueee+d65FwAAAACWpIpSbZEny4SpU6emVatWGXvqmmnRuH5dlwMAAACwXFj5jOfruoQlYk5W9Omnn6Zly5Y1tltuZ/wBAAAAADUT/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACqhBXRfAwlvpF4+nZcuWdV0GAAAAAMsBM/4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFJDgDwAAAAAKSPAHAAAAAAUk+AMAAACAAhL8AQAAAEABCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFJDgDwAAAAAKSPAHAAAAAAUk+AMAAACAAhL8AQAAAEABCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFJDgDwAAAAAKSPAHAAAAAAUk+AMAAACAAhL8AQAAAEABCf4AAAAAoIAEfwAAAABQQA3qugAW3nZXbpcGTXxkAAAALHmPHvNoXZcALCIz/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAA8/Xwww9n5513TpcuXVJRUZHbb7+92vKhQ4dmjTXWSLNmzdKmTZsMGDAgTzzxRLU2u+yyS1ZeeeU0btw4nTt3zgEHHJD33nuvWpubb74566+/fpo2bZpu3brl/PPPX9K7Bt8Jy0XwN78fLotq6NChWX/99RdrnwAAAFAk06dPz3rrrZfLL798vstXW221XHbZZXn++efzyCOPpHv37tl+++3z0UcfldtsvfXWufnmmzNu3Lj84x//yKuvvpof/OAH5eV33XVX9ttvvxxxxBEZO3Zs/vCHP+Siiy7KZZddtsT3D4quolQqleq6iAX54IMP0qZNm1RWVi62PqdNm5YZM2akXbt2i63PJWXq1Klp1apV+v22Xxo0aVDX5QAAAPAd8Ogxj1Z7XVFRkdtuuy277bZbjevM+fv1vvvuy7bbbjvfNv/85z+z2267ZcaMGWnYsGH23XffzJw5M7fccku5zaWXXprzzjsvb731VioqKhbL/kCRzPmuffrpp2nZsmWN7ZaLGX+dOnVarKFfkjRv3ny5CP0AAABgefDll1/mqquuSqtWrbLeeuvNt83HH3+cG264IZtttlkaNmyYJJkxY0YaN25crV2TJk3yzjvv5M0331zidUORfaPgr3///jnmmGMyePDgtGnTJh07dszVV1+d6dOn5yc/+UlatGiRVVddNXfddVd5nbFjx2aHHXZI8+bN07FjxxxwwAGZNGlStT6PPfbYnHzyyWnbtm06deqUoUOHVtvu3Kf6vvHGG6moqMitt96arbfeOk2bNs16662XUaNGVVvn6quvzkorrZSmTZtm9913z4UXXpjWrVuXl3/9VN+qqqqceeaZWXHFFVNZWZn1118/d999d3n5nO3efPPN2WKLLdKkSZP07ds3r7zySp566qlstNFGad68eXbYYYdqU5qfeuqpbLfddmnfvn1atWqVrbbaKs8888w3OewAAACwzLrzzjvTvHnzNG7cOBdddFGGDx+e9u3bV2tzyimnpFmzZmnXrl3eeuut/O///m952cCBA3Prrbfm/vvvT1VVVV555ZX87ne/S5K8//77S3VfoGi+8Yy/6667Lu3bt8+TTz6ZY445JkceeWT22muvbLbZZnnmmWey/fbb54ADDsjnn3+eKVOmZJtttkmfPn0yevTo3H333fnwww+z9957z9Nns2bN8sQTT+S8887LmWeemeHDh9dax69+9auceOKJGTNmTFZbbbXss88+mTVrVpLk0UcfzRFHHJGf//znGTNmTLbbbrucffbZtfZ38cUX53e/+10uuOCCPPfccxk4cGB22WWXjB8/vlq7IUOG5LTTTsszzzyTBg0aZN99983JJ5+ciy++OCNHjsyECRNyxhlnlNt/9tlnOfDAA/PII4/k8ccfT69evbLjjjvms88+q7GWGTNmZOrUqdUeAAAAsCzaeuutM2bMmDz22GMZNGhQ9t5770ycOLFam5NOOinPPvts7r333tSvXz8//vGPM+fKY4ceemiOPvrofP/730+jRo2yySab5Ec/+lGSpF695eJERVhmfaNr/PXv3z+zZ8/OyJEjkySzZ89Oq1atsscee+T6669P8tX1+Dp37pxRo0blvvvuy8iRI3PPPfeU+3jnnXey0korZdy4cVlttdXm6TNJ+vXrl2222SbnnnvuV0XOdR2BN954Iz169Mif/vSnHHLIIUmSF198MWuvvXZeeumlrLHGGvnRj36UadOm5c477yz3uf/+++fOO+/MlClTknw14+/222/PmDFjkiRdu3bNUUcdlV/+8pfV6ujbt28uv/zy+W73pptuyj777JP7778/22yzTZLk3HPPzbXXXpuXX355vsewqqoqrVu3zl//+td8//vfn2+boUOH5te//vU877vGHwAAAEvLt7nGX5L06tUrBx98cE499dT5Lp+TCzz22GPZdNNNy+/Pnj07H3zwQVZYYYXcf//92XHHHTNx4sSssMIKi7wvUDRL7Bp/6667bvl5/fr1065du/Tu3bv8XseOHZMkEydOzH/+8588+OCDad68efmxxhprJEleffXV+faZJJ07d57nfwdqq6Nz587lbSbJuHHj0q9fv2rtv/56blOnTs17772XzTffvNr7m2++eV566aUatztnX7++/3PX/uGHH+bQQw9Nr1690qpVq7Rs2TLTpk3LW2+9VWM9p556aj799NPy4+23366xLQAAACxLqqqqMmPGjFqXJ5mnTf369dO1a9c0atQoN954YzbddFOhHyyibzx9bM7FN+eoqKio9t6cu+1UVVVl2rRp2XnnnfPb3/52nn7mhHU19TnnB8HC1DH3Npe0+W336+/NXceBBx6YyZMn5+KLL063bt1SWVmZTTfdNF9++WWN26isrFzsNzMBAACAb2ratGmZMGFC+fXrr7+eMWPGpG3btmnXrl3OPvvs7LLLLuncuXMmTZqUyy+/PO+++2722muvJMkTTzyRp556Kt/73vfSpk2bvPrqqzn99NOzyiqrlGf7TZo0KX//+9/Tv3//fPHFFxk2bFhuueWWjBgxok72GYpkiZ43usEGG+Qf//hHunfvngYNlt4pqquvvnqeeuqpau99/fXcWrZsmS5duuTRRx/NVlttVX7/0UcfrXWm4MJ49NFH84c//CE77rhjkuTtt9+udnMTAAAAWFaNHj06W2+9dfn18ccfn+SrSS5XXnllXn755Vx33XWZNGlS2rVrl759+2bkyJFZe+21kyRNmzbNrbfemiFDhmT69Onp3LlzBg0alNNOO63ahJfrrrsuJ554YkqlUjbddNM89NBDi/z3OLCEg7+jjjoqV199dfbZZ5/yXXsnTJiQm266KX/6059Sv379JbLdY445JltuuWUuvPDC7LzzznnggQdy1113lWfozc9JJ52UIUOGZJVVVsn666+fYcOGZcyYMbnhhhsWqZZevXrlL3/5SzbaaKNMnTo1J510Upo0abJIfQIAAMDS0L9//9R2a4Bbb7211vV79+6dBx54oNY27du3z6hRo75VfUDtlujtcebMops9e3a233779O7dO4MHD07r1q2X6J15Nt9881x55ZW58MILs9566+Xuu+/Occcdl8aNG9e4zrHHHpvjjz8+J5xwQnr37p277747//znP9OrV69FquWaa67JJ598kg022CAHHHBAjj322HTo0GGR+gQAAACABflGd/Vdnh166KF5+eWXq909eHkx504t7uoLAADA0vL1u/oCy46FvatvYVOkCy64INttt12aNWuWu+66K9ddd13+8Ic/1HVZAAAAALBUFDb4e/LJJ3Peeefls88+S8+ePXPJJZfkpz/9aV2XBQAAAABLRWGDv5tvvrmuSwAAAACAOrNEb+4BAAAAANQNwR8AAAAAFJDgDwAAAAAKSPAHAAAAAAUk+AMAAACAAhL8AQAAAEABCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFJDgDwAAAAAKSPAHAAAAAAUk+AMAAACAAhL8AQAAAEABCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFJDgDwAAAAAKSPAHAAAAAAUk+AMAAACAAhL8AQAAAEABCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFJDgDwAAAAAKSPAHAAAAAAUk+AMAAACAAhL8AQAAAEABCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFJDgDwAAAAAKSPAHAAAAAAUk+AMAAACAAhL8AQAAAEABCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFJDgDwAAAAAKSPAHAAAAAAXUoK4LYOENP2J4WrZsWddlAAAAALAcMOMPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABdSgrgtg4T0yaIc0a+AjAwAA4JvZ6uERdV0CUAfM+AMAAACAAhL8AQAAAEABCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFJDgDwAAAAAKSPAHAAAAAAUk+AMAAACAAhL8AQAAAEABCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFJDgDwAAAAAKSPAHAAAAAAUk+AMAAACAAhL8AQAAAEABCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFJDgDwAAAAAKSPAHAAAAAAUk+AMAAACAAhL8AQAAAEABCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFJDgDwAAAAAKSPAHAAAAAAUk+AMAAACAAhL8AQAAAEABCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFJDgDwAAAAAKSPAHAAAAAAUk+AMAAACAAhL8AQAAAEABCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFJDgDwAAAAAK6DsV/HXv3j2///3vy68rKipy++2311k9AAAAsDQ9/PDD2XnnndOlS5f5/k08dOjQrLHGGmnWrFnatGmTAQMG5Iknnigvf+ONN3LIIYekR48eadKkSVZZZZUMGTIkX375ZbnNF198kYMOOii9e/dOgwYNsttuuy2lvQO+7jsV/H3d+++/nx122KGuywAAAIClYvr06VlvvfVy+eWXz3f5aqutlssuuyzPP/98HnnkkXTv3j3bb799PvrooyTJyy+/nKqqqvzxj3/MCy+8kIsuuihXXnllfvnLX5b7mD17dpo0aZJjjz02AwYMWCr7Bcxfg7ouoC516tSprksAAACApWaHHXaodQLMvvvuW+31hRdemGuuuSbPPfdctt122wwaNCiDBg0qL+/Zs2fGjRuXK664IhdccEGSpFmzZrniiiuSJI8++mimTJmy+HcEWCiFmvH32WefZb/99kuzZs3SuXPnXHTRRenfv38GDx483/Zfn9b8/PPPZ5tttkmTJk3Srl27HHbYYZk2bVp5+UEHHZTddtst/+///b907NgxrVu3zplnnplZs2blpJNOStu2bbPiiitm2LBh1bZzyimnZLXVVkvTpk3Ts2fPnH766Zk5c+aSOAQAAACwWHz55Ze56qqr0qpVq6y33no1tvv000/Ttm3bpVgZsLAKFfwdf/zxefTRR/PPf/4zw4cPz8iRI/PMM88s1LrTp0/PwIED06ZNmzz11FO55ZZbct999+Xoo4+u1u6BBx7Ie++9l4cffjgXXnhhhgwZku9///tp06ZNnnjiiRxxxBE5/PDD884775TXadGiRa699tq8+OKLufjii3P11VfnoosuWqz7DgAAAIvDnXfemebNm6dx48a56KKLMnz48LRv336+bSdMmJBLL700hx9++FKuElgYhQn+Pvvss1x33XW54IILsu2222adddbJsGHDMnv27IVa/69//Wu++OKLXH/99VlnnXWyzTbb5LLLLstf/vKXfPjhh+V2bdu2zSWXXJLVV189Bx98cFZfffV8/vnn+eUvf5levXrl1FNPTaNGjfLII4+U1znttNOy2WabpXv37tl5551z4okn5uabb66xlhkzZmTq1KnVHgAAALA0bL311hkzZkwee+yxDBo0KHvvvXcmTpw4T7t33303gwYNyl577ZVDDz20DioFFqQwwd9rr72WmTNnpl+/fuX3WrVqldVXX32h1n/ppZey3nrrpVmzZuX3Nt9881RVVWXcuHHl99Zee+3Uq/d/h61jx47p3bt3+XX9+vXTrl27aj8U//a3v2XzzTdPp06d0rx585x22ml56623aqzlnHPOSatWrcqPlVZaaaH2AQAAABZVs2bNsuqqq2aTTTbJNddckwYNGuSaa66p1ua9997L1ltvnc022yxXXXVVHVUKLEhhgr+lpWHDhtVeV1RUzPe9qqqqJMmoUaOy3377Zccdd8ydd96ZZ599Nr/61a+q3er860499dR8+umn5cfbb7+9+HcEAAAAFkJVVVVmzJhRfv3uu++mf//+2XDDDTNs2LBqk2OAZUth7urbs2fPNGzYME899VRWXnnlJF9dYPSVV17JlltuucD111xzzVx77bWZPn16edbfo48+mnr16i30rMH5eeyxx9KtW7f86le/Kr/35ptv1rpOZWVlKisrv/U2AQAAYH6mTZuWCRMmlF+//vrrGTNmTNq2bZt27drl7LPPzi677JLOnTtn0qRJufzyy/Puu+9mr732SvJ/oV+3bt1ywQUX5KOPPir31alTp/LzF198MV9++WU+/vjjfPbZZxkzZkySZP31118q+wl8pTDBX4sWLXLggQeW767boUOHDBkyJPXq1UtFRcUC199vv/0yZMiQHHjggRk6dGg++uijHHPMMTnggAPSsWPHb11Xr1698tZbb+Wmm25K3759869//Su33Xbbt+4PAAAAvq3Ro0dn6623Lr8+/vjjkyQHHnhgrrzyyrz88su57rrrMmnSpLRr1y59+/bNyJEjs/baaydJhg8fngkTJmTChAlZccUVq/VdKpXKz3fcccdqk1769OkzTxtgyStM8JckF154YY444oh8//vfT8uWLXPyySfn7bffTuPGjRe4btOmTXPPPffk5z//efr27ZumTZtmzz33zIUXXrhINe2yyy457rjjcvTRR2fGjBnZaaedcvrpp2fo0KGL1C8AAAB8U/379681fLv11ltrXf+ggw7KQQcdtMDtvPHGG9+wMmBJqCgVOG6fPn16unbtmt/97nc55JBD6rqcb23q1Klp1apV/rXpZmnWoFBZLQAAAEvBVg+PqOsSgMVoTlb06aefpmXLljW2K1SK9Oyzz+bll19Ov3798umnn+bMM89Mkuy66651XBkAAAAALF2FCv6S5IILLsi4cePSqFGjbLjhhhk5cmTat29f12UBAAAAwFJVqOCvT58+efrpp+u6DAAAAACoc/XqugAAAAAAYPET/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAooAZ1XQAL73t335WWLVvWdRkAAAAALAfM+AMAAACAAhL8AQAAAEABCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFJDgDwAAAAAKSPAHAAAAAAUk+AMAAACAAhL8AQAAAEABCf4AAAAAoIAa1HUBLFipVEqSTJ06tY4rAQAAAKCuzcmI5mRGNRH8LQcmT56cJFlppZXquBIAAAAAlhWfffZZWrVqVeNywd9yoG3btkmSt956q9YPE6ZOnZqVVlopb7/9dlq2bFnX5bAMM1ZYWMYKC8tYYWEZKywM44SFZaywsIo2VkqlUj777LN06dKl1naCv+VAvXpfXYqxVatWhRicLHktW7Y0VlgoxgoLy1hhYRkrLCxjhYVhnLCwjBUWVpHGysJMDnNzDwAAAAAoIMEfAAAAABSQ4G85UFlZmSFDhqSysrKuS2EZZ6ywsIwVFpaxwsIyVlhYxgoLwzhhYRkrLKzv6lipKC3ovr8AAAAAwHLHjD8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4Ww5cfvnl6d69exo3bpyNN944Tz75ZF2XxFJ0zjnnpG/fvmnRokU6dOiQ3XbbLePGjavW5osvvshRRx2Vdu3apXnz5tlzzz3z4YcfVmvz1ltvZaeddkrTpk3ToUOHnHTSSZk1a9bS3BWWsnPPPTcVFRUZPHhw+T1jhTnefffd7L///mnXrl2aNGmS3r17Z/To0eXlpVIpZ5xxRjp37pwmTZpkwIABGT9+fLU+Pv744+y3335p2bJlWrdunUMOOSTTpk1b2rvCEjR79uycfvrp6dGjR5o0aZJVVlklZ511Vua+N5yx8t308MMPZ+edd06XLl1SUVGR22+/vdryxTUunnvuuWyxxRZp3LhxVlpppZx33nlLetdYjGobJzNnzswpp5yS3r17p1mzZunSpUt+/OMf57333qvWh3Hy3bCgnylzO+KII1JRUZHf//731d43Vr4bFmasvPTSS9lll13SqlWrNGvWLH379s1bb71VXv5d+5tI8LeM+9vf/pbjjz8+Q4YMyTPPPJP11lsvAwcOzMSJE+u6NJaSESNG5Kijjsrjjz+e4cOHZ+bMmdl+++0zffr0cpvjjjsud9xxR2655ZaMGDEi7733XvbYY4/y8tmzZ2ennXbKl19+mcceeyzXXXddrr322pxxxhl1sUssBU899VT++Mc/Zt111632vrFCknzyySfZfPPN07Bhw9x111158cUX87vf/S5t2rQptznvvPNyySWX5Morr8wTTzyRZs2aZeDAgfniiy/Kbfbbb7+88MILGT58eO688848/PDDOeyww+pil1hCfvvb3+aKK67IZZddlpdeeim//e1vc9555+XSSy8ttzFWvpumT5+e9dZbL5dffvl8ly+OcTF16tRsv/326datW55++umcf/75GTp0aK666qolvn8sHrWNk88//zzPPPNMTj/99DzzzDO59dZbM27cuOyyyy7V2hkn3w0L+pkyx2233ZbHH388Xbp0mWeZsfLdsKCx8uqrr+Z73/te1lhjjTz00EN57rnncvrpp6dx48blNt+5v4lKLNP69etXOuqoo8qvZ8+eXerSpUvpnHPOqcOqqEsTJ04sJSmNGDGiVCqVSlOmTCk1bNiwdMstt5TbvPTSS6UkpVGjRpVKpVLp3//+d6levXqlDz74oNzmiiuuKLVs2bI0Y8aMpbsDLHGfffZZqVevXqXhw4eXttpqq9LPf/7zUqlkrPB/TjnllNL3vve9GpdXVVWVOnXqVDr//PPL702ZMqVUWVlZuvHGG0ulUqn04osvlpKUnnrqqXKbu+66q1RRUVF69913l1zxLFU77bRT6eCDD6723h577FHab7/9SqWSscJXkpRuu+228uvFNS7+8Ic/lNq0aVPt359TTjmltPrqqy/hPWJJ+Po4mZ8nn3yylKT05ptvlkol4+S7qqax8s4775S6du1aGjt2bKlbt26liy66qLzMWPlumt9Y+eEPf1jaf//9a1znu/g3kRl/y7Avv/wyTz/9dAYMGFB+r169ehkwYEBGjRpVh5VRlz799NMkSdu2bZMkTz/9dGbOnFltnKyxxhpZeeWVy+Nk1KhR6d27dzp27FhuM3DgwEydOjUvvPDCUqyepeGoo47KTjvtVG1MJMYK/+ef//xnNtpoo+y1117p0KFD+vTpk6uvvrq8/PXXX88HH3xQbay0atUqG2+8cbWx0rp162y00UblNgMGDEi9evXyxBNPLL2dYYnabLPNcv/99+eVV15JkvznP//JI488kh122CGJscL8La5xMWrUqGy55ZZp1KhRuc3AgQMzbty4fPLJJ0tpb1iaPv3001RUVKR169ZJjBP+T1VVVQ444ICcdNJJWXvttedZbqyQfDVO/vWvf2W11VbLwIED06FDh2y88cbVTgf+Lv5NJPhbhk2aNCmzZ8+uNtiSpGPHjvnggw/qqCrqUlVVVQYPHpzNN98866yzTpLkgw8+SKNGjcq/IM0x9zj54IMP5juO5iyjOG666aY888wzOeecc+ZZZqwwx2uvvZYrrrgivXr1yj333JMjjzwyxx57bK677rok//dZ1/bvzwcffJAOHTpUW96gQYO0bdvWWCmQX/ziF/nRj36UNdZYIw0bNkyfPn0yePDg7LfffkmMFeZvcY0L/yZ9t3zxxRc55ZRTss8++6Rly5ZJjBP+z29/+9s0aNAgxx577HyXGyskycSJEzNt2rSce+65GTRoUO69997svvvu2WOPPTJixIgk382/iRrUdQHAwjvqqKMyduzYPPLII3VdCsugt99+Oz//+c8zfPjwatewgK+rqqrKRhttlP/3//5fkqRPnz4ZO3Zsrrzyyhx44IF1XB3Lkptvvjk33HBD/vrXv2bttdfOmDFjMnjw4HTp0sVYARabmTNnZu+9906pVMoVV1xR1+WwjHn66adz8cUX55lnnklFRUVdl8MyrKqqKkmy66675rjjjkuSrL/++nnsscdy5ZVXZquttqrL8uqMGX/LsPbt26d+/frz3F3mww8/TKdOneqoKurK0UcfnTvvvDMPPvhgVlxxxfL7nTp1ypdffpkpU6ZUaz/3OOnUqdN8x9GcZRTD008/nYkTJ2aDDTZIgwYN0qBBg4wYMSKXXHJJGjRokI4dOxorJEk6d+6ctdZaq9p7a665ZvluZ3M+69r+/enUqdM8N5qaNWtWPv74Y2OlQE466aTyrL/evXvngAMOyHHHHVeeVWysMD+La1z4N+m7YU7o9+abb2b48OHl2X6JccJXRo4cmYkTJ2bllVcu/4775ptv5oQTTkj37t2TGCt8pX379mnQoMECf8/9rv1NJPhbhjVq1Cgbbrhh7r///vJ7VVVVuf/++7PpppvWYWUsTaVSKUcffXRuu+22PPDAA+nRo0e15RtuuGEaNmxYbZyMGzcub731VnmcbLrppnn++eer/WM45xerr/9QZPm17bbb5vnnn8+YMWPKj4022ij77bdf+bmxQpJsvvnmGTduXLX3XnnllXTr1i1J0qNHj3Tq1KnaWJk6dWqeeOKJamNlypQpefrpp8ttHnjggVRVVWXjjTdeCnvB0vD555+nXr3qvy7Wr1+//D/qxgrzs7jGxaabbpqHH344M2fOLLcZPnx4Vl999Wp3IWf5NSf0Gz9+fO677760a9eu2nLjhCQ54IAD8txzz1X7HbdLly456aSTcs899yQxVvhKo0aN0rdv31p/z/1O/v1c13cXoXY33XRTqbKysnTttdeWXnzxxdJhhx1Wat26dbW7y1BsRx55ZKlVq1alhx56qPT++++XH59//nm5zRFHHFFaeeWVSw888EBp9OjRpU033bS06aablpfPmjWrtM4665S233770pgxY0p33313aYUVViideuqpdbFLLEVz39W3VDJW+MqTTz5ZatCgQenss88ujR8/vnTDDTeUmjZtWvqf//mfcptzzz231Lp169L//u//lp577rnSrrvuWurRo0fpv//9b7nNoEGDSn369Ck98cQTpUceeaTUq1ev0j777FMXu8QScuCBB5a6du1auvPOO0uvv/566dZbby21b9++dPLJJ5fbGCvfTZ999lnp2WefLT377LOlJKULL7yw9Oyzz5bvxro4xsWUKVNKHTt2LB1wwAGlsWPHlm666aZS06ZNS3/84x+X+v7y7dQ2Tr788svSLrvsUlpxxRVLY8aMqfZ77tx3zTROvhsW9DPl675+V99SyVj5rljQWLn11ltLDRs2LF111VWl8ePHly699NJS/fr1SyNHjiz38V37m0jwtxy49NJLSyuvvHKpUaNGpX79+pUef/zxui6JpSjJfB/Dhg0rt/nvf/9b+tnPflZq06ZNqWnTpqXdd9+99P7771fr54033ijtsMMOpSZNmpTat29fOuGEE0ozZ85cynvD0vb14M9YYY477rijtM4665QqKytLa6yxRumqq66qtryqqqp0+umnlzp27FiqrKwsbbvttqVx48ZVazN58uTSPvvsU2revHmpZcuWpZ/85Celzz77bGnuBkvY1KlTSz//+c9LK6+8cqlx48alnj17ln71q19V+6PcWPluevDBB+f7+8mBBx5YKpUW37j4z3/+U/re975XqqysLHXt2rV07rnnLq1dZDGobZy8/vrrNf6e++CDD5b7ME6+Gxb0M+Xr5hf8GSvfDQszVq655prSqquuWmrcuHFpvfXWK91+++3V+viu/U1UUSqVSkt2TiEAAAAAsLS5xh8AAAAAFJDgDwAAAAAKSPAHAAAAAAUk+AMAAACAAhL8AQAAAEABCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFJDgDwAAAAAKSPAHAAAAAAUk+AMAAACAAvr/zGH+CVpT8ZcAAAAASUVORK5CYII="},"metadata":{}}],"execution_count":9},{"cell_type":"code","source":"#Count each class in test data\nplt.figure(figsize=(15, 7))\nax = sns.countplot(y=ts_df['Class'], palette='viridis')\n\nax.set(xlabel='', ylabel='', title='Count of images in each class')\nax.bar_label(ax.containers[0])\n\nplt.show()","metadata":{"execution":{"iopub.status.busy":"2024-07-28T12:44:26.388404Z","iopub.execute_input":"2024-07-28T12:44:26.388715Z","iopub.status.idle":"2024-07-28T12:44:26.662331Z","shell.execute_reply.started":"2024-07-28T12:44:26.388689Z","shell.execute_reply":"2024-07-28T12:44:26.661435Z"},"trusted":true},"outputs":[{"output_type":"display_data","data":{"text/plain":"","image/png":"iVBORw0KGgoAAAANSUhEUgAABP4AAAJdCAYAAACiWLm6AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuNSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/xnp5ZAAAACXBIWXMAAA9hAAAPYQGoP6dpAABQbElEQVR4nO3debwWZf0//tdhO2we9kVlURFRVNQw9eRXMUCR1NSozPwI7qm44B6lglphuJIZlRlQn8zUtNJCwgVKREWNIjUTUzFlSYvVZDv3749+3J+OgOLGgfH5fDzux4OZueaa99zncg68vGamolQqlQIAAAAAFEq9ui4AAAAAAPjgCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFJDgDwAAAAAKSPAHAAAAAAUk+AMAAACAAhL8AQAAAEABCf4AAApi1apVufDCC9O5c+fUq1cvRxxxxHrbHnDAATnggAM2Wm2bqoqKiowcObKuy3hfpkyZkoqKitxxxx0f+jGmTJnyoR0DAPjgCf4AgEJ5/vnn86UvfSnbbbddGjdunKqqquy7774ZM2ZM/v3vf9d1eUmS73znOxk/fvwH3u8Pf/jDXHXVVfnsZz+bCRMm5JxzzvnAjwEAwOajQV0XAADwQfn1r3+dz33uc6msrMzgwYOzyy67ZMWKFXnooYdywQUX5Kmnnsr3v//9ui4z3/nOd9K2bdscd9xxH2i/DzzwQLbeeutcd91179j2t7/97Qd67M3Vv//97zRo4K/EAEAx+VsOAFAIL7zwQr7whS+ka9eueeCBB7LllluWtw0dOjSzZ8/Or3/96zqs8MO3YMGCtGzZcoPaNmrU6MMtZjPRuHHjui4BAOBD41ZfAKAQRo8enaVLl+bmm2+uFfqtsf322+fss88uL69atSpXXHFFunXrlsrKymyzzTb5yle+kuXLl9fab33PgNtmm21qzdgbP358KioqMm3atJx77rlp165dmjVrliOPPDL/+Mc/au331FNPZerUqamoqEhFRcU7Pmtv2bJlOe+889K5c+dUVlamR48eufrqq1MqlZIkL774YioqKvLggw/mqaeeKvf7ds9je+sz/tY8w+22227LZZddlq233jpbbLFFPvvZz2bRokVZvnx5hg0blvbt26d58+Y5/vjj1/quxo0bl759+6Z9+/aprKxMz549M3bs2LWOXVNTk5EjR2arrbZK06ZN88lPfjJPP/30Wt9pkixcuDDDhg0rn/v222+fb37zm6mpqanV7tZbb03v3r2zxRZbpKqqKrvuumvGjBnztt9rsvbPd+TIkamoqMjs2bNz3HHHpWXLlmnRokWOP/74vPHGG+/YX5I8+uijOfjgg9OiRYs0bdo0ffr0ybRp02q1eemll3L66aenR48eadKkSdq0aZPPfe5zefHFF9fqb+HChTnnnHOyzTbbpLKyMp06dcrgwYPz2muv1WpXU1OTr3/96+nUqVMaN26cfv36Zfbs2RtU8yuvvJITTzwxW221VSorK7PtttvmtNNOy4oVK9a7z+9///t87nOfS5cuXVJZWZnOnTvnnHPOWeuW+nnz5uX4449Pp06dUllZmS233DKHH354rXN9/PHHM2DAgLRt2zZNmjTJtttumxNOOGGDagcA1s+MPwCgEO6+++5st912+cQnPrFB7U866aRMmDAhn/3sZ3Peeefl0UcfzahRo/LMM8/krrvues91nHnmmWnVqlVGjBiRF198Mddff33OOOOM/OxnP0uSXH/99TnzzDPTvHnzfPWrX02SdOjQYb39lUqlfPrTn86DDz6YE088MbvvvnsmTZqUCy64IK+88kquu+66tGvXLj/+8Y/z9a9/PUuXLs2oUaOSJDvttNO7rn/UqFFp0qRJvvzlL2f27Nm54YYb0rBhw9SrVy//+te/MnLkyDzyyCMZP358tt1221x66aXlfceOHZudd945n/70p9OgQYPcfffdOf3001NTU5OhQ4eW2w0fPjyjR4/OYYcdlgEDBuSPf/xjBgwYkDfffLNWLW+88Ub69OmTV155JV/60pfSpUuXPPzwwxk+fHjmzp2b66+/PkkyefLkHH300enXr1+++c1vJkmeeeaZTJs2rVbY+258/vOfz7bbbptRo0blySefzA9+8IO0b9++3P/6PPDAAxk4cGB69+6dESNGpF69euVA9Pe//3322muvJMmMGTPy8MMP5wtf+EI6deqUF198MWPHjs0BBxyQp59+Ok2bNk2SLF26NPvtt1+eeeaZnHDCCfnYxz6W1157Lb/61a/y97//PW3bti0f+8orr0y9evVy/vnnZ9GiRRk9enSOOeaYPProo29b86uvvpq99torCxcuzCmnnJIdd9wxr7zySu6444688cYb650devvtt+eNN97IaaedljZt2uSxxx7LDTfckL///e+5/fbby+0GDRqUp556KmeeeWa22WabLFiwIJMnT86cOXPKywcddFDatWuXL3/5y2nZsmVefPHF3HnnnRv0swIA3kYJAGAzt2jRolKS0uGHH75B7WfOnFlKUjrppJNqrT///PNLSUoPPPBAeV2S0ogRI9bqo2vXrqUhQ4aUl8eNG1dKUurfv3+ppqamvP6cc84p1a9fv7Rw4cLyup133rnUp0+fDar1F7/4RSlJ6Wtf+1qt9Z/97GdLFRUVpdmzZ5fX9enTp7TzzjtvUL99+vSpVcODDz5YSlLaZZddSitWrCivP/roo0sVFRWlgQMH1tq/urq61LVr11rr3njjjbWOM2DAgNJ2221XXp43b16pQYMGpSOOOKJWu5EjR5aS1PpOr7jiilKzZs1Kf/3rX2u1/fKXv1yqX79+ac6cOaVSqVQ6++yzS1VVVaVVq1Zt0Ln/t7f+fEeMGFFKUjrhhBNqtTvyyCNLbdq0edu+ampqSt27dy8NGDCg1hh44403Sttuu23pwAMPrLXuraZPn15KUvrRj35UXnfppZeWkpTuvPPOdR6vVPq/n91OO+1UWr58eXn7mDFjSklKs2bNetu6Bw8eXKpXr15pxowZ73iMBx988G3PYdSoUaWKiorSSy+9VCqVSqV//etfpSSlq666ar3Hv+uuu0pJ1nl8AOD9casvALDZW7x4cZJkiy222KD2v/nNb5Ik5557bq315513XpK8r2cBnnLKKamoqCgv77ffflm9enVeeuml99Tfb37zm9SvXz9nnXXWWrWWSqVMnDjxPde6LoMHD07Dhg3Ly3vvvXdKpdJat13uvffeefnll7Nq1aryuiZNmpT/vGjRorz22mvp06dP/va3v2XRokVJkvvvvz+rVq3K6aefXqu/M888c61abr/99uy3335p1apVXnvttfKnf//+Wb16dX73u98lSVq2bJlly5Zl8uTJ7/8L+P+deuqptZb322+/vP766+Wxti4zZ87Mc889ly9+8Yt5/fXXy/UuW7Ys/fr1y+9+97vyLcr//V2tXLkyr7/+erbffvu0bNkyTz75ZHnbz3/+8+y222458sgj1zref4+zJDn++ONrzc7bb7/9kiR/+9vf1ltzTU1NfvGLX+Swww7Lnnvu+Y7H+G//fQ7Lli3La6+9lk984hMplUr5wx/+UG7TqFGjTJkyJf/617/W2c+a51Lec889Wbly5XqPBwC8e4I/AGCzV1VVlSRZsmTJBrV/6aWXUq9evWy//fa11nfs2DEtW7Z8zyFdknTp0qXWcqtWrZJkvaHHO3nppZey1VZbrRVqrrmN9/3Uui5vrb9FixZJks6dO6+1vqamphzoJcm0adPSv3//NGvWLC1btky7du3yla98JUnK7dbU+9bvvnXr1uXvao3nnnsu9957b9q1a1fr079//yT/eZlJkpx++unZYYcdMnDgwHTq1CknnHBC7r333g/0e9iQn+Nzzz2XJBkyZMhaNf/gBz/I8uXLy9/Dv//971x66aXlZxe2bds27dq1y8KFC2t9p88//3x22WWXD63mf/zjH1m8ePEGH+O/zZkzJ8cdd1xat26d5s2bp127dunTp0+S//t5V1ZW5pvf/GYmTpyYDh06ZP/998/o0aMzb968cj99+vTJoEGDctlll6Vt27Y5/PDDM27cuLWeIQkAvHue8QcAbPaqqqqy1VZb5c9//vO72u/tZjO9k9WrV69zff369de5vvT/v4hjU7e++t/pvJ5//vn069cvO+64Y6699tp07tw5jRo1ym9+85tcd911a72MY0PU1NTkwAMPzIUXXrjO7TvssEOSpH379pk5c2YmTZqUiRMnZuLEiRk3blwGDx6cCRMmvOvjJu/t57jmHK+66qrsvvvu62zTvHnzJP+Z4Thu3LgMGzYs1dXVadGiRSoqKvKFL3zhPX1X77Xm92r16tU58MAD889//jMXXXRRdtxxxzRr1iyvvPJKjjvuuFrnMGzYsBx22GH5xS9+kUmTJuWSSy7JqFGj8sADD2SPPfZIRUVF7rjjjjzyyCO5++67M2nSpJxwwgm55ppr8sgjj5S/MwDg3RP8AQCFcOihh+b73/9+pk+fnurq6rdt27Vr19TU1OS5556r9QKM+fPnZ+HChenatWt5XatWrbJw4cJa+69YsSJz5859z7W+m8Cxa9euue+++7JkyZJas/7+8pe/lLdvCu6+++4sX748v/rVr2rNPHvwwQdrtVtT7+zZs7PtttuW17/++utrzUzr1q1bli5dWp7h93YaNWqUww47LIcddlhqampy+umn53vf+14uueSStWYXfli6deuW5D9B9DvVfMcdd2TIkCG55ppryuvefPPNtcZat27d3nWg/W60a9cuVVVV7/oYs2bNyl//+tdMmDAhgwcPLq9f3+3W3bp1y3nnnZfzzjsvzz33XHbfffdcc801+d///d9ym3322Sf77LNPvv71r+eWW27JMccck1tvvTUnnXTSezs5AMCtvgBAMVx44YVp1qxZTjrppMyfP3+t7c8//3zGjBmTJPnUpz6VJOW3wq5x7bXXJkkOOeSQ8rpu3bqVnyW3xve///31zvjbEM2aNVsr4FmfT33qU1m9enW+/e1v11p/3XXXpaKiIgMHDnzPdXyQ1sw2++/ZZYsWLcq4ceNqtevXr18aNGiQsWPH1lr/1vNL/vNm3enTp2fSpElrbVu4cGH5+YKvv/56rW316tVLr169kmSj3i7au3fvdOvWLVdffXWWLl261vZ//OMf5T/Xr19/rZl4N9xww1rjatCgQfnjH/+4zjdNfxAz+erVq5cjjjgid999dx5//PENPsa6ft6lUqn839gab7zxxlpva+7WrVu22GKL8s/mX//611rHWTNj0u2+APD+mPEHABRCt27dcsstt+Soo47KTjvtlMGDB2eXXXbJihUr8vDDD+f222/PcccdlyTZbbfdMmTIkHz/+9/PwoUL06dPnzz22GOZMGFCjjjiiHzyk58s93vSSSfl1FNPzaBBg3LggQfmj3/8YyZNmpS2bdu+51p79+6dsWPH5mtf+1q23377tG/fPn379l1n28MOOyyf/OQn89WvfjUvvvhidtttt/z2t7/NL3/5ywwbNqw8y6yuHXTQQeVZd1/60peydOnS3HTTTWnfvn2t2ZEdOnTI2WefnWuuuSaf/vSnc/DBB+ePf/xjJk6cmLZt29aaDXnBBRfkV7/6VQ499NAcd9xx6d27d5YtW5ZZs2bljjvuyIsvvpi2bdvmpJNOyj//+c/07ds3nTp1yksvvZQbbrghu+++e60ZnR+2evXq5Qc/+EEGDhyYnXfeOccff3y23nrrvPLKK3nwwQdTVVWVu+++O8l/Zqj++Mc/TosWLdKzZ89Mnz499913X9q0aVOrzwsuuCB33HFHPve5z+WEE05I7969889//jO/+tWv8t3vfje77bbb+677G9/4Rn7729+mT58+OeWUU7LTTjtl7ty5uf322/PQQw+VX77x33bcccd069Yt559/fl555ZVUVVXl5z//+VqzNv/617+mX79++fznP5+ePXumQYMGueuuuzJ//vx84QtfSJJMmDAh3/nOd3LkkUemW7duWbJkSW666aZUVVWVQ3oA4L0R/AEAhfHpT386f/rTn3LVVVfll7/8ZcaOHZvKysr06tUr11xzTU4++eRy2x/84AfZbrvtMn78+Nx1113p2LFjhg8fnhEjRtTq8+STT84LL7yQm2++Offee2/222+/TJ48Of369XvPdV566aV56aWXMnr06CxZsiR9+vRZb/BXr169/OpXv8qll16an/3sZxk3bly22WabXHXVVeW3EG8KevTokTvuuCMXX3xxzj///HTs2DGnnXZa2rVrt9Ybgb/5zW+madOmuemmm3Lfffeluro6v/3tb/P//t//S+PGjcvtmjZtmqlTp+Yb3/hGbr/99vzoRz9KVVVVdthhh1x22WXlF4/8z//8T77//e/nO9/5ThYuXJiOHTvmqKOOysiRI1Ov3sa9weWAAw7I9OnTc8UVV+Tb3/52li5dmo4dO2bvvffOl770pXK7MWPGpH79+vnJT36SN998M/vuu2/uu+++DBgwoFZ/zZs3z+9///uMGDEid911VyZMmJD27dunX79+6dSp0wdS89Zbb51HH300l1xySX7yk59k8eLF2XrrrTNw4MA0bdp0nfs0bNgwd999d84666yMGjUqjRs3zpFHHpkzzjijVhjZuXPnHH300bn//vvz4x//OA0aNMiOO+6Y2267LYMGDUqScvB+6623Zv78+WnRokX22muv/OQnP6l1OzgA8O5VlDaXJ00DAFBYCxcuTKtWrfK1r30tX/3qV+u6HACAQvCMPwAANqp///vfa61b87zFAw44YOMWAwBQYG71BQBgo/rZz36W8ePH51Of+lSaN2+ehx56KD/96U9z0EEHZd99963r8gAACkPwBwDARtWrV680aNAgo0ePzuLFi8sv/Pja175W16UBABSKZ/wBAAAAQAF5xh8AAAAAFJDgDwAAAAAKyDP+NgM1NTV59dVXs8UWW6SioqKuywEAAACgDpVKpSxZsiRbbbVV6tVb/7w+wd9m4NVXX03nzp3rugwAAAAANiEvv/xyOnXqtN7tgr/NwBZbbJHkPz/MqqqqOq4GAAAAgLq0ePHidO7cuZwZrY/gbzOw5vbeqqoqwR8AAAAASfKOj4Tzcg8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAAqoQV0XwIb77Oe/kYYNK+u6DAAAgHft13dfVtclAHzkmPEHAAAAAAUk+AMAAACAAhL8AQAAAEABCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFJDgDwAAAAAKSPAHAAAAAAUk+AMAAACAAhL8AQAAAEABCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFJDgDwAAAAAKSPAHAAAAAAUk+AMAAACAAhL8AQAAAEABCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFJDgDwAAAAAKSPAHAAAAAAUk+AMAAACAAhL8AQAAAEABCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFJDgDwAAAAAKSPAHAAAAAAUk+AMAAACAAhL8AQAAAEABCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFJDgDwAAAAAKSPAHAAAAAAUk+AMAAACAAhL8AQAAAEABCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFNBmHfxts802uf766993PwcccECGDRv2vvsBAABg/caOHZtevXqlqqoqVVVVqa6uzsSJE8vb33zzzQwdOjRt2rRJ8+bNM2jQoMyfP79WH3PmzMkhhxySpk2bpn379rnggguyatWqjX0qAJuFzTr4mzFjRk455ZTyckVFRX7xi1+8637uvPPOXHHFFeXlDypQBAAA4P906tQpV155ZZ544ok8/vjj6du3bw4//PA89dRTSZJzzjknd999d26//fZMnTo1r776aj7zmc+U91+9enUOOeSQrFixIg8//HAmTJiQ8ePH59JLL62rUwLYpDWo6wLej3bt2n0g/bRu3foD6eetVqxYkUaNGn0ofQMAAGxuDjvssFrLX//61zN27Ng88sgj6dSpU26++ebccsst6du3b5Jk3Lhx2WmnnfLII49kn332yW9/+9s8/fTTue+++9KhQ4fsvvvuueKKK3LRRRdl5MiR/v0F8Bab9Iy/Aw44IGeccUbOOOOMtGjRIm3bts0ll1ySUqmUpPbMvG222SZJcuSRR6aioqK8fNxxx+WII46o1e+wYcNywAEH1DrOmlt9DzjggLz00ks555xzUlFRkYqKiiTJ66+/nqOPPjpbb711mjZtml133TU//elP11nvsGHD0rZt2wwYMCAnnHBCDj300FrtVq5cmfbt2+fmm29+/18SAADAZmj16tW59dZbs2zZslRXV+eJJ57IypUr079//3KbHXfcMV26dMn06dOTJNOnT8+uu+6aDh06lNsMGDAgixcvLs8aBOD/bNLBX5JMmDAhDRo0yGOPPZYxY8bk2muvzQ9+8IO12s2YMSPJf/6P0Ny5c8vL79add96ZTp065fLLL8/cuXMzd+7cJP951kTv3r3z61//On/+859zyimn5Nhjj81jjz22Vr2NGjXKtGnT8t3vfjcnnXRS7r333nI/SXLPPffkjTfeyFFHHfWeagQAANhczZo1K82bN09lZWVOPfXU3HXXXenZs2fmzZuXRo0apWXLlrXad+jQIfPmzUuSzJs3r1bot2b7mm0A1LbJ3+rbuXPnXHfddamoqEiPHj0ya9asXHfddTn55JNrtVtz22/Lli3TsWPH93y81q1bp379+tliiy1q9bP11lvn/PPPLy+feeaZmTRpUm677bbstdde5fXdu3fP6NGja/XZo0eP/PjHP86FF16Y5D/h5Oc+97k0b958nTUsX748y5cvLy8vXrz4PZ8PAADApqRHjx6ZOXNmFi1alDvuuCNDhgzJ1KlT67osgELa5Gf87bPPPuXbbZOkuro6zz33XFavXr1R61i9enWuuOKK7LrrrmndunWaN2+eSZMmZc6cObXa9e7de619TzrppIwbNy5JMn/+/EycODEnnHDCeo81atSotGjRovzp3LnzB3syAAAAdaRRo0bZfvvt07t374waNSq77bZbxowZk44dO2bFihVZuHBhrfbz588vT8ro2LHjWm/5XbP8fiaAABTVJh/8vV/16tUrPxNwjZUrV77rfq666qqMGTMmF110UR588MHMnDkzAwYMyIoVK2q1a9as2Vr7Dh48OH/7298yffr0/O///m+23Xbb7Lfffus91vDhw7No0aLy5+WXX37X9QIAAGwOampqsnz58vTu3TsNGzbM/fffX9727LPPZs6cOamurk7yn4kgs2bNyoIFC8ptJk+enKqqqvTs2XOj1w6wqdvkb/V99NFHay0/8sgj6d69e+rXr79W24YNG641E7Bdu3b585//XGvdzJkz07Bhw/Ues1GjRmv1M23atBx++OH5n//5nyT/+eX017/+dYN+ubRp0yZHHHFExo0bl+nTp+f4449/2/aVlZWprKx8x34BAAA2J8OHD8/AgQPTpUuXLFmyJLfcckumTJmSSZMmpUWLFjnxxBNz7rnnpnXr1qmqqsqZZ56Z6urq7LPPPkmSgw46KD179syxxx6b0aNHZ968ebn44oszdOhQ/4YCWIdNfsbfnDlzcu655+bZZ5/NT3/609xwww05++yz19l2m222yf3335958+blX//6V5Kkb9++efzxx/OjH/0ozz33XEaMGLFWELiufn73u9/llVdeyWuvvZbkP8/umzx5ch5++OE888wz+dKXvrTWFPO3c9JJJ2XChAl55plnMmTIkA3eDwAAoCgWLFiQwYMHp0ePHunXr19mzJiRSZMm5cADD0ySXHfddTn00EMzaNCg7L///unYsWPuvPPO8v7169fPPffck/r166e6ujr/8z//k8GDB+fyyy+vq1MC2KRt8jP+Bg8enH//+9/Za6+9Ur9+/Zx99tk55ZRT1tn2mmuuybnnnpubbropW2+9dV588cUMGDAgl1xySS688MK8+eabOeGEEzJ48ODMmjVrvce8/PLL86UvfSndunXL8uXLUyqVcvHFF+dvf/tbBgwYkKZNm+aUU07JEUcckUWLFm3QefTv3z9bbrlldt5552y11Vbv6bsAAADYnN18881vu71x48a58cYbc+ONN663TdeuXfOb3/zmgy4NoJAqSm99AN4m5IADDsjuu++e66+/vq5Led+WLl2arbfeOuPGjctnPvOZd7Xv4sWL06JFixw44KI0bGj6OgAAsPn59d2X1XUJAIWxJitatGhRqqqq1ttuk5/xt7mrqanJa6+9lmuuuSYtW7bMpz/96bouCQAAAICPAMHfh2zOnDnZdttt06lTp4wfPz4NGvjKAQAAAPjwbdIp1JQpU+q6hPdtm222ySZ8NzUAAAAABbXJv9UXAAAAAHj3BH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACqhBXRfAhrvjtq+kqqqqrssAAAAAYDNgxh8AAAAAFJDgDwAAAAAKSPAHAAAAAAUk+AMAAACAAhL8AQAAAEABCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFJDgDwAAAAAKSPAHAAAAAAUk+AMAAACAAhL8AQAAAEABCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFJDgDwAAAAAKSPAHAAAAAAUk+AMAAACAAhL8AQAAAEABCf4AAAAAoIAEfwAAAABQQII/AAAAACggwR8AAAAAFJDgDwAAAAAKqEFdF8CG6zv8m6lf2biuywAAAADYLDx67SV1XUKdMuMPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAPhKuvPLKVFRUZNiwYeV1b775ZoYOHZo2bdqkefPmGTRoUObPn19rv4qKirU+t95660au/t0T/AEAAABQeDNmzMj3vve99OrVq9b6c845J3fffXduv/32TJ06Na+++mo+85nPrLX/uHHjMnfu3PLniCOO2EiVv3eFDv5GjhyZ3Xffva7LAAAAAKAOLV26NMccc0xuuummtGrVqrx+0aJFufnmm3Pttdemb9++6d27d8aNG5eHH344jzzySK0+WrZsmY4dO5Y/jRs33tin8a4VOvjb1JVKpaxataquywAAAAAotKFDh+aQQw5J//79a61/4oknsnLlylrrd9xxx3Tp0iXTp09fq4+2bdtmr732yg9/+MOUSqWNUvv7sUkHfwcccEDOOuusXHjhhWndunU6duyYkSNHlrfPmTMnhx9+eJo3b56qqqp8/vOfL9+DPX78+Fx22WX54x//WL73evz48XnxxRdTUVGRmTNnlvtZuHBhKioqMmXKlCTJlClTUlFRkUmTJmWPPfZIkyZN0rdv3yxYsCATJ07MTjvtlKqqqnzxi1/MG2+8Ue5n+fLlOeuss9K+ffs0btw4/+///b/MmDGjvH1NvxMnTkzv3r1TWVmZhx566EP9DgEAAAA+ym699dY8+eSTGTVq1Frb5s2bl0aNGqVly5a11nfo0CHz5s0rL19++eW57bbbMnny5AwaNCinn356brjhhg+79PetQV0X8E4mTJiQc889N48++mimT5+e4447Lvvuu2/69etXDv2mTp2aVatWZejQoTnqqKMyZcqUHHXUUfnzn/+ce++9N/fdd1+SpEWLFms9nPHtjBw5Mt/+9rfTtGnTfP7zn8/nP//5VFZW5pZbbsnSpUtz5JFH5oYbbshFF12UJLnwwgvz85//PBMmTEjXrl0zevToDBgwILNnz07r1q3L/X75y1/O1Vdfne22267W9NI1li9fnuXLl5eXFy9e/F6/PgAAAICPrJdffjlnn312Jk+e/L5uzb3kkkvKf95jjz2ybNmyXHXVVTnrrLM+iDI/NJv0jL8k6dWrV0aMGJHu3btn8ODB2XPPPXP//ffn/vvvz6xZs3LLLbekd+/e2XvvvfOjH/0oU6dOzYwZM9KkSZM0b948DRo0KN973aRJk3d17K997WvZd999s8cee+TEE0/M1KlTM3bs2Oyxxx7Zb7/98tnPfjYPPvhgkmTZsmUZO3ZsrrrqqgwcODA9e/bMTTfdlCZNmuTmm2+u1e/ll1+eAw88MN26dasVCK4xatSotGjRovzp3Lnze/8CAQAAAD6innjiiSxYsCAf+9jH0qBBgzRo0CBTp07Nt771rTRo0CAdOnTIihUrsnDhwlr7zZ8/Px07dlxvv3vvvXf+/ve/15q4tSnaLIK//7bllltmwYIFeeaZZ9K5c+daoVjPnj3TsmXLPPPMMx/4sTt06JCmTZtmu+22q7VuwYIFSZLnn38+K1euzL777lve3rBhw+y1115r1bPnnnu+7XGHDx+eRYsWlT8vv/zyB3E6AAAAAB8p/fr1y6xZszJz5szyZ88998wxxxxT/nPDhg1z//33l/d59tlnM2fOnFRXV6+335kzZ6ZVq1aprKzcGKfxnm3yt/o2bNiw1nJFRUVqamrec3/16v0n6/zvBzCuXLnyHY9dUVHxgdXSrFmzt91eWVm5yQ8cAAAAgE3dFltskV122aXWumbNmqVNmzbl9SeeeGLOPffctG7dOlVVVTnzzDNTXV2dffbZJ0ly9913Z/78+dlnn33SuHHjTJ48Od/4xjdy/vnnb/Tzebc2+Rl/67PTTjvl5ZdfrjUb7umnn87ChQvTs2fPJEmjRo2yevXqWvu1a9cuSTJ37tzyuv9+0cd71a1btzRq1CjTpk0rr1u5cmVmzJhRrgcAAACATct1112XQw89NIMGDcr++++fjh075s477yxvb9iwYW688cZUV1dn9913z/e+971ce+21GTFiRB1WvWE2+Rl/69O/f//suuuuOeaYY3L99ddn1apVOf3009OnT5/yrbTbbLNNXnjhhcycOTOdOnXKFltskSZNmmSfffbJlVdemW233TYLFizIxRdf/L7radasWU477bRccMEFad26dbp06ZLRo0fnjTfeyIknnvi++wcAAADg/ZsyZUqt5caNG+fGG2/MjTfeuM72Bx98cA4++OCNUNkHb7Od8VdRUZFf/vKXadWqVfbff//0798/2223XX72s5+V2wwaNCgHH3xwPvnJT6Zdu3b56U9/miT54Q9/mFWrVqV3794ZNmxYvva1r30gNV155ZUZNGhQjj322HzsYx/L7NmzM2nSpHW+uRcAAAAAPkwVpf9+2B2bpMWLF6dFixbpffpXUr/yvb96GgAAAOCj5NFrL6nrEj4Ua7KiRYsWpaqqar3tNtsZfwAAAADA+gn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAF1KCuC2DDPTDqolRVVdV1GQAAAABsBsz4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAE1qOsC2HB9bv5a6jeprOsyAAAANrrHT72irksA2OyY8QcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAACAzcLYsWPTq1evVFVVpaqqKtXV1Zk4cWJ5+5tvvpmhQ4emTZs2ad68eQYNGpT58+ev1c/48ePTq1evNG7cOO3bt8/QoUM35mkAbDSbRfBXUVGRX/ziFx9onyNHjszuu+/+gfYJAADAh6dTp0658sor88QTT+Txxx9P3759c/jhh+epp55Kkpxzzjm5++67c/vtt2fq1Kl59dVX85nPfKZWH9dee22++tWv5stf/nKeeuqp3HfffRkwYEBdnA7Ah66iVCqV6rqIdzJv3ry0atUqlZWVH1ifS5cuzfLly9OmTZsPrM8Py+LFi9OiRYvsfu0Fqd/kg/sOAAAANhePn3rFOte3bt06V111VT772c+mXbt2ueWWW/LZz342SfKXv/wlO+20U6ZPn5599tkn//rXv7L11lvn7rvvTr9+/TZm+QAfqDVZ0aJFi1JVVbXedpvFjL+OHTt+oKFfkjRv3nyzCP0AAABY2+rVq3Prrbdm2bJlqa6uzhNPPJGVK1emf//+5TY77rhjunTpkunTpydJJk+enJqamrzyyivZaaed0qlTp3z+85/Pyy+/XFenAfChelfB3wEHHJAzzzwzw4YNS6tWrdKhQ4fcdNNNWbZsWY4//vhsscUW2X777Ws9Y+HPf/5zBg4cmObNm6dDhw459thj89prr9Xq86yzzsqFF16Y1q1bp2PHjhk5cmSt4/73rb4vvvhiKioqcuedd+aTn/xkmjZtmt122618IV/jpptuSufOndO0adMceeSRufbaa9OyZcvy9rfe6ltTU5PLL788nTp1SmVlZXbffffce++95e1rjnvbbbdlv/32S5MmTfLxj388f/3rXzNjxozsueeead68eQYOHJh//OMf5f1mzJiRAw88MG3btk2LFi3Sp0+fPPnkk+/mawcAAOD/N2vWrDRv3jyVlZU59dRTc9ddd6Vnz56ZN29eGjVqVOvffUnSoUOHzJs3L0nyt7/9LTU1NfnGN76R66+/PnfccUf++c9/5sADD8yKFSvq4GwAPlzvesbfhAkT0rZt2zz22GM588wzc9ppp+Vzn/tcPvGJT+TJJ5/MQQcdlGOPPTZvvPFGFi5cmL59+2aPPfbI448/nnvvvTfz58/P5z//+bX6bNasWR599NGMHj06l19+eSZPnvy2dXz1q1/N+eefn5kzZ2aHHXbI0UcfnVWrViVJpk2bllNPPTVnn312Zs6cmQMPPDBf//rX37a/MWPG5JprrsnVV1+dP/3pTxkwYEA+/elP57nnnqvVbsSIEbn44ovz5JNPpkGDBvniF7+YCy+8MGPGjMnvf//7zJ49O5deemm5/ZIlSzJkyJA89NBDeeSRR9K9e/d86lOfypIlS9Zby/Lly7N48eJaHwAAAJIePXpk5syZefTRR3PaaadlyJAhefrppzdo35qamqxcuTLf+ta3MmDAgOyzzz756U9/mueeey4PPvjgh1w5wMbX4N3usNtuu+Xiiy9OkgwfPjxXXnll2rZtm5NPPjlJcumll2bs2LH505/+lPvuuy977LFHvvGNb5T3/+EPf5jOnTvnr3/9a3bYYYckSa9evTJixIgkSffu3fPtb387999/fw488MD11nH++efnkEMOSZJcdtll2XnnnTN79uzsuOOOueGGGzJw4MCcf/75SZIddtghDz/8cO6555719nf11Vfnoosuyhe+8IUkyTe/+c08+OCDuf7663PjjTfWOu6aB7+effbZOfroo3P//fdn3333TZKceOKJGT9+fLl93759ax3n+9//flq2bJmpU6fm0EMPXWcto0aNymWXXbbeWgEAAD6qGjVqlO233z5J0rt378yYMSNjxozJUUcdlRUrVmThwoW1Zv3Nnz8/HTt2TJJsueWWSZKePXuWt7dr1y5t27bNnDlzNt5JAGwk73rGX69evcp/rl+/ftq0aZNdd921vK5Dhw5JkgULFuSPf/xjHnzwwTRv3rz82XHHHZMkzz///Dr7TP5zMV6wYMEG17Hm4r1mn2effTZ77bVXrfZvXf5vixcvzquvvloO79bYd99988wzz6z3uGvO9a3n/9+1z58/PyeffHK6d++eFi1apKqqKkuXLn3bXyrDhw/PokWLyh/PmwAAAFi3mpqaLF++PL17907Dhg1z//33l7c9++yzmTNnTqqrq5Ok/G++Z599ttzmn//8Z1577bV07dp14xYOsBG86xl/DRs2rLVcUVFRa11FRUWS/1x8ly5dmsMOOyzf/OY31+pnTVi3vj5ramo2uI7/PuaHbV3Hfeu6/65jyJAhef311zNmzJh07do1lZWVqa6uftvnR1RWVn7gLzMBAADY3A0fPjwDBw5Mly5dsmTJktxyyy2ZMmVKJk2alBYtWuTEE0/Mueeem9atW6eqqipnnnlmqqurs88++yT5z91ghx9+eM4+++x8//vfT1VVVYYPH54dd9wxn/zkJ+v47AA+eO86+Hs3Pvaxj+XnP/95ttlmmzRo8KEeqpYePXpkxowZtda9dfm/VVVVZauttsq0adPSp0+f8vpp06a97UzBDTFt2rR85zvfyac+9akkycsvv1zr5SYAAABsmAULFmTw4MGZO3duWrRokV69emXSpEnlx0Rdd911qVevXgYNGpTly5dnwIAB+c53vlOrjx/96Ec555xzcsghh6RevXrp06dP7r333rUmpAAUwYeaxg0dOjQ33XRTjj766PJbe2fPnp1bb701P/jBD1K/fv0P5bhnnnlm9t9//1x77bU57LDD8sADD2TixInlGXrrcsEFF2TEiBHp1q1bdt9994wbNy4zZ87MT37yk/dVS/fu3fPjH/84e+65ZxYvXpwLLrggTZo0eV99AgAAfBTdfPPNb7u9cePGufHGG2s9p/2tqqqqcvPNN79jXwBF8K6f8fdurJlFt3r16hx00EHZddddM2zYsLRs2TL16n14h953333z3e9+N9dee21222233HvvvTnnnHPSuHHj9e5z1lln5dxzz815552XXXfdNffee29+9atfpXv37u+rlptvvjn/+te/8rGPfSzHHntszjrrrLRv3/599QkAAAAA76SiVCqV6rqIjeHkk0/OX/7yl/z+97+v61LetcWLF6dFixbZ/doLUr+JZ/8BAAAfPY+fekVdlwCwyViTFS1atChVVVXrbbfxHry3kV199dU58MAD06xZs0ycODETJkxY69kOAAAAAFBUhQ3+HnvssYwePTpLlizJdtttl29961s56aST6rosAAAAANgoChv83XbbbXVdAgAAAADUmQ/15R4AAAAAQN0Q/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUEAN6roANtzUEy9OVVVVXZcBAAAAwGbAjD8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUUIO6LoANN2ra6WncrFFdlwEAAPCujdj/h3VdAsBHjhl/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAEJ/gAAAACggAR/AAAAAFBAgj8AAAAAKCDBHwAAAAAUkOAPAAAAAApI8AcAAAAABST4AwAAAIACEvwBAAAAQAF9pIK/bbbZJtdff315uaKiIr/4xS/qrB4AAICPkrFjx6ZXr16pqqpKVVVVqqurM3HixPL2N998M0OHDk2bNm3SvHnzDBo0KPPnz6/Vx5w5c3LIIYekadOmad++fS644IKsWrVqY58KwGahQV0XUJfmzp2bVq1a1XUZAAAAHwmdOnXKlVdeme7du6dUKmXChAk5/PDD84c//CE777xzzjnnnPz617/O7bffnhYtWuSMM87IZz7zmUybNi1Jsnr16hxyyCHp2LFjHn744cydOzeDBw9Ow4YN841vfKOOzw5g0/ORDv46duxY1yUAAAB8ZBx22GG1lr/+9a9n7NixeeSRR9KpU6fcfPPNueWWW9K3b98kybhx47LTTjvlkUceyT777JPf/va3efrpp3PfffelQ4cO2X333XPFFVfkoosuysiRI9OoUaO6OC2ATVahbvVdsmRJjjnmmDRr1ixbbrllrrvuuhxwwAEZNmzYOtu/9VbfWbNmpW/fvmnSpEnatGmTU045JUuXLi1vP+6443LEEUfkG9/4Rjp06JCWLVvm8ssvz6pVq3LBBRekdevW6dSpU8aNG1frOBdddFF22GGHNG3aNNttt10uueSSrFy58sP4CgAAADYLq1evzq233pply5aluro6TzzxRFauXJn+/fuX2+y4447p0qVLpk+fniSZPn16dt1113To0KHcZsCAAVm8eHGeeuqpjX4OAJu6QgV/5557bqZNm5Zf/epXmTx5cn7/+9/nySef3KB9ly1blgEDBqRVq1aZMWNGbr/99tx3330544wzarV74IEH8uqrr+Z3v/tdrr322owYMSKHHnpoWrVqlUcffTSnnnpqvvSlL+Xvf/97eZ8tttgi48ePz9NPP50xY8bkpptuynXXXfeBnjsAAMDmYNasWWnevHkqKytz6qmn5q677krPnj0zb968NGrUKC1btqzVvkOHDpk3b16SZN68ebVCvzXb12wDoLbCBH9LlizJhAkTcvXVV6dfv37ZZZddMm7cuKxevXqD9r/lllvy5ptv5kc/+lF22WWX9O3bN9/+9rfz4x//uNbDZFu3bp1vfetb6dGjR0444YT06NEjb7zxRr7yla+ke/fuGT58eBo1apSHHnqovM/FF1+cT3ziE9lmm21y2GGH5fzzz89tt9223lqWL1+exYsX1/oAAAAUQY8ePTJz5sw8+uijOe200zJkyJA8/fTTdV0WQCEV5hl/f/vb37Jy5crstdde5XUtWrRIjx49Nmj/Z555JrvttluaNWtWXrfvvvumpqYmzz77bPn/Iu28886pV+//8tIOHTpkl112KS/Xr18/bdq0yYIFC8rrfvazn+Vb3/pWnn/++SxdujSrVq1KVVXVemsZNWpULrvssg2qGwAAYHPSqFGjbL/99kmS3r17Z8aMGRkzZkyOOuqorFixIgsXLqw162/+/Pnl57N37Ngxjz32WK3+1kzU8Ax3gLUVZsbfxtKwYcNayxUVFetcV1NTk+Q/z6A45phj8qlPfSr33HNP/vCHP+SrX/1qVqxYsd5jDB8+PIsWLSp/Xn755Q/+RAAAADYBNTU1Wb58eXr37p2GDRvm/vvvL2979tlnM2fOnFRXVydJqqurM2vWrFoTLSZPnpyqqqr07Nlzo9cOsKkrzIy/7bbbLg0bNsyMGTPSpUuXJMmiRYvy17/+Nfvvv/877r/TTjtl/PjxWbZsWXnW37Rp01KvXr0NnjW4Lg8//HC6du2ar371q+V1L7300tvuU1lZmcrKyvd8TAAAgE3R8OHDM3DgwHTp0iVLlizJLbfckilTpmTSpElp0aJFTjzxxJx77rlp3bp1qqqqcuaZZ6a6ujr77LNPkuSggw5Kz549c+yxx2b06NGZN29eLr744gwdOtS/oQDWoTDB3xZbbJEhQ4aU367bvn37jBgxIvXq1UtFRcU77n/MMcdkxIgRGTJkSEaOHJl//OMfOfPMM3Pssceu9fDYd6N79+6ZM2dObr311nz84x/Pr3/969x1113vuT8AAIDN1YIFCzJ48ODMnTs3LVq0SK9evTJp0qQceOCBSZLrrrsu9erVy6BBg7J8+fIMGDAg3/nOd8r7169fP/fcc09OO+20VFdXp1mzZhkyZEguv/zyujolgE1aYYK/JLn22mtz6qmn5tBDD01VVVUuvPDCvPzyy2ncuPE77tu0adNMmjQpZ599dj7+8Y+nadOmGTRoUK699tr3VdOnP/3pnHPOOTnjjDOyfPnyHHLIIbnkkksycuTI99UvAADA5ubmm29+2+2NGzfOjTfemBtvvHG9bbp27Zrf/OY3H3RpAIVUUSqVSnVdxIdl2bJl2XrrrXPNNdfkxBNPrOty3rPFixenRYsW+fJvjknjZo3quhwAAIB3bcT+P6zrEgAKY01WtGjRord9gWyhZvz94Q9/yF/+8pfstddeWbRoUXm69+GHH17HlQEAAADAxlWo4C9Jrr766jz77LNp1KhRevfund///vdp27ZtXZcFAAAAABtVoYK/PfbYI0888URdlwEAAAAAda5eXRcAAAAAAHzwBH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACqhBXRfAhhu+73dSVVVV12UAAAAAsBkw4w8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAJqUNcF8M5KpVKSZPHixXVcCQAAAAB1bU1GtCYzWh/B32bg9ddfT5J07ty5jisBAAAAYFOxZMmStGjRYr3bBX+bgdatWydJ5syZ87Y/TD66Fi9enM6dO+fll19OVVVVXZfDJsgY4Z0YI7wd44N3YozwTowR3o7xwTsxRtZWKpWyZMmSbLXVVm/bTvC3GahX7z+PYmzRooUBztuqqqoyRnhbxgjvxBjh7RgfvBNjhHdijPB2jA/eiTFS24ZMDvNyDwAAAAAoIMEfAAAAABSQ4G8zUFlZmREjRqSysrKuS2ETZYzwTowR3okxwtsxPngnxgjvxBjh7RgfvBNj5L2rKL3Te38BAAAAgM2OGX8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwtxm48cYbs80226Rx48bZe++989hjj9V1SdSBkSNHpqKiotZnxx13LG9/8803M3To0LRp0ybNmzfPoEGDMn/+/DqsmA/b7373uxx22GHZaqutUlFRkV/84he1tpdKpVx66aXZcsst06RJk/Tv3z/PPfdcrTb//Oc/c8wxx6SqqiotW7bMiSeemKVLl27Es+DD9E5j5LjjjlvrunLwwQfXamOMFNeoUaPy8Y9/PFtssUXat2+fI444Is8++2ytNhvyu2XOnDk55JBD0rRp07Rv3z4XXHBBVq1atTFPhQ/JhoyRAw44YK3ryKmnnlqrjTFSXGPHjk2vXr1SVVWVqqqqVFdXZ+LEieXtriEfbe80Plw/eKsrr7wyFRUVGTZsWHmd68j7J/jbxP3sZz/LueeemxEjRuTJJ5/MbrvtlgEDBmTBggV1XRp1YOedd87cuXPLn4ceeqi87Zxzzsndd9+d22+/PVOnTs2rr76az3zmM3VYLR+2ZcuWZbfddsuNN964zu2jR4/Ot771rXz3u9/No48+mmbNmmXAgAF58803y22OOeaYPPXUU5k8eXLuueee/O53v8spp5yysU6BD9k7jZEkOfjgg2tdV37605/W2m6MFNfUqVMzdOjQPPLII5k8eXJWrlyZgw46KMuWLSu3eaffLatXr84hhxySFStW5OGHH86ECRMyfvz4XHrppXVxSnzANmSMJMnJJ59c6zoyevTo8jZjpNg6deqUK6+8Mk888UQef/zx9O3bN4cffnieeuqpJK4hH3XvND4S1w/+z4wZM/K9730vvXr1qrXedeQDUGKTttdee5WGDh1aXl69enVpq622Ko0aNaoOq6IujBgxorTbbrutc9vChQtLDRs2LN1+++3ldc8880wpSWn69OkbqULqUpLSXXfdVV6uqakpdezYsXTVVVeV1y1cuLBUWVlZ+ulPf1oqlUqlp59+upSkNGPGjHKbiRMnlioqKkqvvPLKRqudjeOtY6RUKpWGDBlSOvzww9e7jzHy0bJgwYJSktLUqVNLpdKG/W75zW9+U6pXr15p3rx55TZjx44tVVVVlZYvX75xT4AP3VvHSKlUKvXp06d09tlnr3cfY+Sjp1WrVqUf/OAHriGs05rxUSq5fvB/lixZUurevXtp8uTJtcaF68gHw4y/TdiKFSvyxBNPpH///uV19erVS//+/TN9+vQ6rIy68txzz2WrrbbKdtttl2OOOSZz5sxJkjzxxBNZuXJlrbGy4447pkuXLsbKR9QLL7yQefPm1RoTLVq0yN57710eE9OnT0/Lli2z5557ltv0798/9erVy6OPPrrRa6ZuTJkyJe3bt0+PHj1y2mmn5fXXXy9vM0Y+WhYtWpQkad26dZIN+90yffr07LrrrunQoUO5zYABA7J48eJaMzoohreOkTV+8pOfpG3bttlll10yfPjwvPHGG+VtxshHx+rVq3Prrbdm2bJlqa6udg2hlreOjzVcP0iSoUOH5pBDDql1vUj8XeSD0qCuC2D9XnvttaxevbrWAE6SDh065C9/+UsdVUVd2XvvvTN+/Pj06NEjc+fOzWWXXZb99tsvf/7znzNv3rw0atQoLVu2rLVPhw4dMm/evLopmDq15ue+ruvHmm3z5s1L+/bta21v0KBBWrdubdx8RBx88MH5zGc+k2233TbPP/98vvKVr2TgwIGZPn166tevb4x8hNTU1GTYsGHZd999s8suuyTJBv1umTdv3jqvM2u2URzrGiNJ8sUvfjFdu3bNVlttlT/96U+56KKL8uyzz+bOO+9MYox8FMyaNSvV1dV5880307x589x1113p2bNnZs6c6RrCesdH4vrBf9x666158sknM2PGjLW2+bvIB0PwB5uJgQMHlv/cq1ev7L333unatWtuu+22NGnSpA4rAzZXX/jCF8p/3nXXXdOrV69069YtU6ZMSb9+/eqwMja2oUOH5s9//nOtZ8fCf1vfGPnvZ37uuuuu2XLLLdOvX788//zz6dat28YukzrQo0ePzJw5M4sWLcodd9yRIUOGZOrUqXVdFpuI9Y2Pnj17un6Ql19+OWeffXYmT56cxo0b13U5heVW301Y27ZtU79+/bXeWDN//vx07NixjqpiU9GyZcvssMMOmT17djp27JgVK1Zk4cKFtdoYKx9da37ub3f96Nix41ovClq1alX++c9/GjcfUdttt13atm2b2bNnJzFGPirOOOOM3HPPPXnwwQfTqVOn8voN+d3SsWPHdV5n1myjGNY3RtZl7733TpJa1xFjpNgaNWqU7bffPr17986oUaOy2267ZcyYMa4hJFn/+FgX14+PnieeeCILFizIxz72sTRo0CANGjTI1KlT861vfSsNGjRIhw4dXEc+AIK/TVijRo3Su3fv3H///eV1NTU1uf/++2s9F4GPpqVLl+b555/Plltumd69e6dhw4a1xsqzzz6bOXPmGCsfUdtuu206duxYa0wsXrw4jz76aHlMVFdXZ+HChXniiSfKbR544IHU1NSU/+LFR8vf//73vP7669lyyy2TGCNFVyqVcsYZZ+Suu+7KAw88kG233bbW9g353VJdXZ1Zs2bVCognT56cqqqq8q1cbL7eaYysy8yZM5Ok1nXEGPloqampyfLly11DWKc142NdXD8+evr165dZs2Zl5syZ5c+ee+6ZY445pvxn15EPQF2/XYS3d+utt5YqKytL48ePLz399NOlU045pdSyZctab6zho+G8884rTZkypfTCCy+Upk2bVurfv3+pbdu2pQULFpRKpVLp1FNPLXXp0qX0wAMPlB5//PFSdXV1qbq6uo6r5sO0ZMmS0h/+8IfSH/7wh1KS0rXXXlv6wx/+UHrppZdKpVKpdOWVV5ZatmxZ+uUvf1n605/+VDr88MNL2267benf//53uY+DDz64tMcee5QeffTR0kMPPVTq3r176eijj66rU+ID9nZjZMmSJaXzzz+/NH369NILL7xQuu+++0of+9jHSt27dy+9+eab5T6MkeI67bTTSi1atChNmTKlNHfu3PLnjTfeKLd5p98tq1atKu2yyy6lgw46qDRz5szSvffeW2rXrl1p+PDhdXFKfMDeaYzMnj27dPnll5cef/zx0gsvvFD65S9/Wdpuu+1K+++/f7kPY6TYvvzlL5emTp1aeuGFF0p/+tOfSl/+8pdLFRUVpd/+9relUsk15KPu7caH6wfr89a3PbuOvH+Cv83ADTfcUOrSpUupUaNGpb322qv0yCOP1HVJ1IGjjjqqtOWWW5YaNWpU2nrrrUtHHXVUafbs2eXt//73v0unn356qVWrVqWmTZuWjjzyyNLcuXPrsGI+bA8++GApyVqfIUOGlEqlUqmmpqZ0ySWXlDp06FCqrKws9evXr/Tss8/W6uP1118vHX300aXmzZuXqqqqSscff3xpyZIldXA2fBjeboy88cYbpYMOOqjUrl27UsOGDUtdu3YtnXzyyWv9jyVjpLjWNTaSlMaNG1dusyG/W1588cXSwIEDS02aNCm1bdu2dN5555VWrly5kc+GD8M7jZE5c+aU9t9//1Lr1q1LlZWVpe233750wQUXlBYtWlSrH2OkuE444YRS165dS40aNSq1a9eu1K9fv3LoVyq5hnzUvd34cP1gfd4a/LmOvH8VpVKptPHmFwIAAAAAG4Nn/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAoIMEfAAAAABSQ4A8AAAAACkjwBwAAAAAFJPgDAAAAgAIS/AEAAABAAQn+AAAAAKCABH8AAAAAUECCPwAAAAAooP8PrQMzxhKigckAAAAASUVORK5CYII="},"metadata":{}}],"execution_count":10},{"cell_type":"markdown","source":"## 2.2 Split data into train, test, valid","metadata":{}},{"cell_type":"code","source":"valid_df, ts_df = train_test_split(ts_df, train_size=0.5, random_state=20, stratify=ts_df['Class'])","metadata":{"execution":{"iopub.status.busy":"2024-07-28T12:44:26.664286Z","iopub.execute_input":"2024-07-28T12:44:26.664748Z","iopub.status.idle":"2024-07-28T12:44:26.675186Z","shell.execute_reply.started":"2024-07-28T12:44:26.664714Z","shell.execute_reply":"2024-07-28T12:44:26.674321Z"},"trusted":true},"outputs":[],"execution_count":11},{"cell_type":"code","source":"valid_df","metadata":{"execution":{"iopub.status.busy":"2024-07-28T12:44:26.687435Z","iopub.execute_input":"2024-07-28T12:44:26.687712Z","iopub.status.idle":"2024-07-28T12:44:26.697848Z","shell.execute_reply.started":"2024-07-28T12:44:26.687689Z","shell.execute_reply":"2024-07-28T12:44:26.696861Z"},"trusted":true},"outputs":[{"execution_count":12,"output_type":"execute_result","data":{"text/plain":" Class Path Class\n1297 /kaggle/input/brain-tumor-mri-dataset/Testing/... glioma\n723 /kaggle/input/brain-tumor-mri-dataset/Testing/... meningioma\n61 /kaggle/input/brain-tumor-mri-dataset/Testing/... pituitary\n778 /kaggle/input/brain-tumor-mri-dataset/Testing/... meningioma\n1120 /kaggle/input/brain-tumor-mri-dataset/Testing/... glioma\n... ... ...\n1070 /kaggle/input/brain-tumor-mri-dataset/Testing/... glioma\n554 /kaggle/input/brain-tumor-mri-dataset/Testing/... notumor\n1284 /kaggle/input/brain-tumor-mri-dataset/Testing/... glioma\n899 /kaggle/input/brain-tumor-mri-dataset/Testing/... meningioma\n626 /kaggle/input/brain-tumor-mri-dataset/Testing/... notumor\n\n[655 rows x 2 columns]","text/html":"\n\n
\n \n \n \n Class Path \n Class \n \n \n \n \n 1297 \n /kaggle/input/brain-tumor-mri-dataset/Testing/... \n glioma \n \n \n 723 \n /kaggle/input/brain-tumor-mri-dataset/Testing/... \n meningioma \n \n \n 61 \n /kaggle/input/brain-tumor-mri-dataset/Testing/... \n pituitary \n \n \n 778 \n /kaggle/input/brain-tumor-mri-dataset/Testing/... \n meningioma \n \n \n 1120 \n /kaggle/input/brain-tumor-mri-dataset/Testing/... \n glioma \n \n \n ... \n ... \n ... \n \n \n 1070 \n /kaggle/input/brain-tumor-mri-dataset/Testing/... \n glioma \n \n \n 554 \n /kaggle/input/brain-tumor-mri-dataset/Testing/... \n notumor \n \n \n 1284 \n /kaggle/input/brain-tumor-mri-dataset/Testing/... \n glioma \n \n \n 899 \n /kaggle/input/brain-tumor-mri-dataset/Testing/... \n meningioma \n \n \n 626 \n /kaggle/input/brain-tumor-mri-dataset/Testing/... \n notumor \n \n \n
\n
655 rows × 2 columns
\n
"},"metadata":{}}],"execution_count":12},{"cell_type":"markdown","source":"## 2.3 Data preprocessing","metadata":{}},{"cell_type":"code","source":"batch_size = 32\nimg_size = (128, 128)\n\n_gen = ImageDataGenerator(rescale=1/255,\n brightness_range=(0.8, 1.2))\n\nts_gen = ImageDataGenerator(rescale=1/255)\n\n\ntr_gen = _gen.flow_from_dataframe(tr_df, x_col='Class Path',\n y_col='Class', batch_size=5712,\n target_size=img_size)\n\nvalid_gen = _gen.flow_from_dataframe(valid_df, x_col='Class Path',\n y_col='Class', batch_size=655,\n target_size=img_size)\n\nts_gen = ts_gen.flow_from_dataframe(ts_df, x_col='Class Path',\n y_col='Class', batch_size=656,\n target_size=img_size, shuffle=False)","metadata":{"execution":{"iopub.status.busy":"2024-07-28T12:44:27.150148Z","iopub.execute_input":"2024-07-28T12:44:27.150994Z","iopub.status.idle":"2024-07-28T12:44:31.743371Z","shell.execute_reply.started":"2024-07-28T12:44:27.150964Z","shell.execute_reply":"2024-07-28T12:44:31.742454Z"},"trusted":true},"outputs":[{"name":"stdout","text":"Found 5712 validated image filenames belonging to 4 classes.\nFound 655 validated image filenames belonging to 4 classes.\nFound 656 validated image filenames belonging to 4 classes.\n","output_type":"stream"}],"execution_count":13},{"cell_type":"markdown","source":"## 2.4 Getting samples from data","metadata":{}},{"cell_type":"code","source":"X_train, y_train = next(tr_gen)\nX_train.shape, y_train.shape","metadata":{"execution":{"iopub.status.busy":"2024-07-28T12:44:31.745113Z","iopub.execute_input":"2024-07-28T12:44:31.745810Z","iopub.status.idle":"2024-07-28T12:45:01.160185Z","shell.execute_reply.started":"2024-07-28T12:44:31.745773Z","shell.execute_reply":"2024-07-28T12:45:01.159309Z"},"trusted":true},"outputs":[{"execution_count":14,"output_type":"execute_result","data":{"text/plain":"((5712, 128, 128, 3), (5712, 4))"},"metadata":{}}],"execution_count":14},{"cell_type":"code","source":"class ReadDataset:\n def __init__(self, datasetpath, labels, image_shape):\n self.datasetpath = datasetpath\n self.labels = labels\n self.image_shape = image_shape\n def returListImages(self,):\n self.images = []\n for label in self.labels:\n self.images.append(list(pathlib.Path(os.path.join(self.datasetpath,\n label)).glob('*.*')))\n def readImages(self,):\n self.returListImages()\n self.finalImages = []\n labels = []\n for label in range(len(self.labels)):\n for img in self.images[label]:\n img = cv2.imread(str(img))\n img = cv2.resize(img , self.image_shape)\n img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)\n img = img/255\n self.finalImages.append(img)\n labels.append(label)\n images = np.array(self.finalImages)\n labels = np.array(labels)\n return images, labels\nimport os\nimport pathlib\nreadDatasetObject = ReadDataset('/kaggle/input/the-iqothnccd-lung-cancer-dataset/The IQ-OTHNCCD lung cancer dataset',\n ['Bengin cases', 'Malignant cases', 'Normal cases'],\n (128,128))\nimages_train_covid_cxr, labels_train_covid_cxr = readDatasetObject.readImages()\nprint(images_train_covid_cxr.shape, labels_train_covid_cxr.shape)\n\nfrom sklearn.model_selection import train_test_split\n\nX_train_covid_cxr, X_test_covid_cxr, y_train_covid_cxr, y_test_covid_cxr = train_test_split(\n images_train_covid_cxr, labels_train_covid_cxr, test_size=0.3, random_state=42)\n\nprint(X_train_covid_cxr.shape, X_test_covid_cxr.shape, y_train_covid_cxr.shape, y_test_covid_cxr.shape)\n\nfrom sklearn.model_selection import train_test_split\n\nX_test_covid_cxr, X_val_covid_cxr, y_test_covid_cxr, y_val_covid_cxr = train_test_split(\n X_test_covid_cxr, y_test_covid_cxr, test_size=0.5, random_state=42)\n\nX_test_covid_cxr.shape, X_val_covid_cxr.shape, y_test_covid_cxr.shape, y_val_covid_cxr.shape","metadata":{"execution":{"iopub.status.busy":"2024-07-28T12:45:01.161627Z","iopub.execute_input":"2024-07-28T12:45:01.161922Z","iopub.status.idle":"2024-07-28T12:45:14.557137Z","shell.execute_reply.started":"2024-07-28T12:45:01.161897Z","shell.execute_reply":"2024-07-28T12:45:14.556237Z"},"trusted":true},"outputs":[{"name":"stdout","text":"(1097, 128, 128, 3) (1097,)\n(767, 128, 128, 3) (330, 128, 128, 3) (767,) (330,)\n","output_type":"stream"},{"execution_count":15,"output_type":"execute_result","data":{"text/plain":"((165, 128, 128, 3), (165, 128, 128, 3), (165,), (165,))"},"metadata":{}}],"execution_count":15},{"cell_type":"code","source":"","metadata":{},"outputs":[],"execution_count":null},{"cell_type":"code","source":"'''y_train_class_indices = y_train_covid_cxr\n\n# Map class indices\ny_train_mapped_indices = y_train_class_indices + 4\n\n# Convert back to one-hot encoding for training (if needed)\ny_train_mapped_one_hot = tf.keras.utils.to_categorical(y_train_mapped_indices)\n\n# Print the first 10 one-hot encoded labels to verify\nprint(\"One-hot encoded mapped labels (first 10):\", y_train_mapped_one_hot.shape)\ny_train_mapped_indices = np.array(y_train_mapped_indices)\ny_train_mapped_series = pd.Series(y_train_mapped_indices)\n\n# Use value_counts to count unique values\nvalue_counts = y_train_mapped_series.value_counts()\n\n# Print the value counts\nprint(value_counts)\n\ny_train1 = np.argmax(y_train, axis=1)\nprint(y_train1.shape)\n\nconcatenated_labels = np.concatenate((y_train1, y_train_mapped_indices))\nprint(concatenated_labels.shape)\n\nconcatenated_images = np.concatenate((X_train, X_train_covid_cxr))\nconcatenated_images.shape'''","metadata":{"execution":{"iopub.status.busy":"2024-12-01T13:59:56.653026Z","iopub.execute_input":"2024-12-01T13:59:56.653538Z","iopub.status.idle":"2024-12-01T13:59:56.663263Z","shell.execute_reply.started":"2024-12-01T13:59:56.653493Z","shell.execute_reply":"2024-12-01T13:59:56.662570Z"},"trusted":true},"outputs":[{"execution_count":1,"output_type":"execute_result","data":{"text/plain":"'y_train_class_indices = y_train_covid_cxr\\n\\n# Map class indices\\ny_train_mapped_indices = y_train_class_indices + 4\\n\\n# Convert back to one-hot encoding for training (if needed)\\ny_train_mapped_one_hot = tf.keras.utils.to_categorical(y_train_mapped_indices)\\n\\n# Print the first 10 one-hot encoded labels to verify\\nprint(\"One-hot encoded mapped labels (first 10):\", y_train_mapped_one_hot.shape)\\ny_train_mapped_indices = np.array(y_train_mapped_indices)\\ny_train_mapped_series = pd.Series(y_train_mapped_indices)\\n\\n# Use value_counts to count unique values\\nvalue_counts = y_train_mapped_series.value_counts()\\n\\n# Print the value counts\\nprint(value_counts)\\n\\ny_train1 = np.argmax(y_train, axis=1)\\nprint(y_train1.shape)\\n\\nconcatenated_labels = np.concatenate((y_train1, y_train_mapped_indices))\\nprint(concatenated_labels.shape)\\n\\nconcatenated_images = np.concatenate((X_train, X_train_covid_cxr))\\nconcatenated_images.shape'"},"metadata":{}}],"execution_count":1},{"cell_type":"code","source":"'''random_indices = np.random.choice(6479, 6478, replace=False)\nconcatenated_images = concatenated_images[random_indices]\nconcatenated_labels = concatenated_labels[random_indices]\nconcatenated_images.shape, concatenated_labels.shape'''","metadata":{"execution":{"iopub.status.busy":"2024-07-28T12:45:15.273605Z","iopub.execute_input":"2024-07-28T12:45:15.273975Z","iopub.status.idle":"2024-07-28T12:45:16.025240Z","shell.execute_reply.started":"2024-07-28T12:45:15.273943Z","shell.execute_reply":"2024-07-28T12:45:16.024292Z"},"trusted":true},"outputs":[{"execution_count":17,"output_type":"execute_result","data":{"text/plain":"((6478, 128, 128, 3), (6478,))"},"metadata":{}}],"execution_count":17},{"cell_type":"code","source":"'''import numpy as np\nimport cv2\n\ndef rotate_image(image, angle):\n \"\"\"\n Rotate the image by the specified angle.\n \"\"\"\n center = tuple(np.array(image.shape[1::-1]) / 2)\n rotation_matrix = cv2.getRotationMatrix2D(center, angle, 1.0)\n rotated_image = cv2.warpAffine(image, rotation_matrix, image.shape[1::-1], flags=cv2.INTER_LINEAR)\n return rotated_image\n\ndef translate_image(image, tx, ty):\n \"\"\"\n Translate the image by the specified translation parameters.\n \"\"\"\n translation_matrix = np.float32([[1, 0, tx], [0, 1, ty]])\n translated_image = cv2.warpAffine(image, translation_matrix, image.shape[1::-1])\n return translated_image\n\n# Example data\n#X_train = np.random.rand(100, 28, 28) # Assuming 100 images of size 28x28\n#y_train = np.random.randint(0, 10, 100) # Assuming 100 labels\n\n# Augmentation parameters\nrotation_angles = [5]\ntranslations = [(5, 5)]\n\naugmented_X_train = []\naugmented_y_train = []\n\nfor image, label in zip(X_train, y_train):\n # Original image\n augmented_X_train.append(image)\n augmented_y_train.append(label)\n\n # Augment with rotations\n for angle in rotation_angles:\n rotated_image = rotate_image(image, angle)\n augmented_X_train.append(rotated_image)\n augmented_y_train.append(label)\n\n # Augment with translations\n for tx, ty in translations:\n translated_image = translate_image(image, tx, ty)\n augmented_X_train.append(translated_image)\n augmented_y_train.append(label)\n\n# Convert lists to numpy arrays\naugmented_X_train = np.array(augmented_X_train)\naugmented_y_train = np.array(augmented_y_train)\n\n# Shuffle the data\nshuffle_indices = np.random.permutation(len(augmented_X_train))\naugmented_X_train = augmented_X_train[shuffle_indices]\naugmented_y_train = augmented_y_train[shuffle_indices]\naugmented_X_train.shape, augmented_y_train.shape\n# Now, augmented_X_train and augmented_y_train contain the augmented dataset.'''","metadata":{"execution":{"iopub.status.busy":"2024-07-28T12:45:16.026248Z","iopub.execute_input":"2024-07-28T12:45:16.026553Z","iopub.status.idle":"2024-07-28T12:45:16.034622Z","shell.execute_reply.started":"2024-07-28T12:45:16.026528Z","shell.execute_reply":"2024-07-28T12:45:16.033754Z"},"trusted":true},"outputs":[{"execution_count":18,"output_type":"execute_result","data":{"text/plain":"'import numpy as np\\nimport cv2\\n\\ndef rotate_image(image, angle):\\n \"\"\"\\n Rotate the image by the specified angle.\\n \"\"\"\\n center = tuple(np.array(image.shape[1::-1]) / 2)\\n rotation_matrix = cv2.getRotationMatrix2D(center, angle, 1.0)\\n rotated_image = cv2.warpAffine(image, rotation_matrix, image.shape[1::-1], flags=cv2.INTER_LINEAR)\\n return rotated_image\\n\\ndef translate_image(image, tx, ty):\\n \"\"\"\\n Translate the image by the specified translation parameters.\\n \"\"\"\\n translation_matrix = np.float32([[1, 0, tx], [0, 1, ty]])\\n translated_image = cv2.warpAffine(image, translation_matrix, image.shape[1::-1])\\n return translated_image\\n\\n# Example data\\n#X_train = np.random.rand(100, 28, 28) # Assuming 100 images of size 28x28\\n#y_train = np.random.randint(0, 10, 100) # Assuming 100 labels\\n\\n# Augmentation parameters\\nrotation_angles = [5]\\ntranslations = [(5, 5)]\\n\\naugmented_X_train = []\\naugmented_y_train = []\\n\\nfor image, label in zip(X_train, y_train):\\n # Original image\\n augmented_X_train.append(image)\\n augmented_y_train.append(label)\\n\\n # Augment with rotations\\n for angle in rotation_angles:\\n rotated_image = rotate_image(image, angle)\\n augmented_X_train.append(rotated_image)\\n augmented_y_train.append(label)\\n\\n # Augment with translations\\n for tx, ty in translations:\\n translated_image = translate_image(image, tx, ty)\\n augmented_X_train.append(translated_image)\\n augmented_y_train.append(label)\\n\\n# Convert lists to numpy arrays\\naugmented_X_train = np.array(augmented_X_train)\\naugmented_y_train = np.array(augmented_y_train)\\n\\n# Shuffle the data\\nshuffle_indices = np.random.permutation(len(augmented_X_train))\\naugmented_X_train = augmented_X_train[shuffle_indices]\\naugmented_y_train = augmented_y_train[shuffle_indices]\\naugmented_X_train.shape, augmented_y_train.shape\\n# Now, augmented_X_train and augmented_y_train contain the augmented dataset.'"},"metadata":{}}],"execution_count":18},{"cell_type":"code","source":"random_indices = np.random.choice(5712, 766, replace=False)\naugmented_X_train = X_train[random_indices]\naugmented_y_train = y_train[random_indices]\naugmented_X_train.shape, augmented_y_train.shape","metadata":{"execution":{"iopub.status.busy":"2024-07-28T12:45:16.035757Z","iopub.execute_input":"2024-07-28T12:45:16.036054Z","iopub.status.idle":"2024-07-28T12:45:16.094678Z","shell.execute_reply.started":"2024-07-28T12:45:16.036031Z","shell.execute_reply":"2024-07-28T12:45:16.093736Z"},"trusted":true},"outputs":[{"execution_count":19,"output_type":"execute_result","data":{"text/plain":"((766, 128, 128, 3), (766, 4))"},"metadata":{}}],"execution_count":19},{"cell_type":"code","source":"#X_train_covid_cxr.shape, X_test_covid_cxr.shape, y_train_covid_cxr.shape, y_test_covid_cxr.shape\n\ny_train = np.concatenate((y_train,augmented_y_train), axis=0)\nX_train = np.concatenate((X_train, augmented_X_train), axis=0)\nX_train.shape, y_train.shape","metadata":{"execution":{"iopub.status.busy":"2024-07-28T12:45:16.096011Z","iopub.execute_input":"2024-07-28T12:45:16.096659Z","iopub.status.idle":"2024-07-28T12:45:16.614766Z","shell.execute_reply.started":"2024-07-28T12:45:16.096622Z","shell.execute_reply":"2024-07-28T12:45:16.613870Z"},"trusted":true},"outputs":[{"execution_count":20,"output_type":"execute_result","data":{"text/plain":"((6478, 128, 128, 3), (6478, 4))"},"metadata":{}}],"execution_count":20},{"cell_type":"code","source":"import numpy as np\nimport cv2\n\ndef rotate_image(image, angle):\n \"\"\"\n Rotate the image by the specified angle.\n \"\"\"\n center = tuple(np.array(image.shape[1::-1]) / 2)\n rotation_matrix = cv2.getRotationMatrix2D(center, angle, 1.0)\n rotated_image = cv2.warpAffine(image, rotation_matrix, image.shape[1::-1], flags=cv2.INTER_LINEAR)\n return rotated_image\n\ndef translate_image(image, tx, ty):\n \"\"\"\n Translate the image by the specified translation parameters.\n \"\"\"\n translation_matrix = np.float32([[1, 0, tx], [0, 1, ty]])\n translated_image = cv2.warpAffine(image, translation_matrix, image.shape[1::-1])\n return translated_image\n\n# Example data\n#X_train = np.random.rand(100, 28, 28) # Assuming 100 images of size 28x28\n#y_train = np.random.randint(0, 10, 100) # Assuming 100 labels\n\n# Augmentation parameters\nrotation_angles = [5]\ntranslations = [(5, 5)]\n\naugmented_X_train = []\naugmented_y_train = []\n\nfor image, label in zip(X_train_covid_cxr, y_train_covid_cxr):\n # Original image\n augmented_X_train.append(image)\n augmented_y_train.append(label)\n\n # Augment with rotations\n for angle in rotation_angles:\n rotated_image = rotate_image(image, angle)\n augmented_X_train.append(rotated_image)\n augmented_y_train.append(label)\n\n # Augment with translations\n for tx, ty in translations:\n translated_image = translate_image(image, tx, ty)\n augmented_X_train.append(translated_image)\n augmented_y_train.append(label)\n\n# Convert lists to numpy arrays\naugmented_X_train = np.array(augmented_X_train)\naugmented_y_train = np.array(augmented_y_train)\n\n# Shuffle the data\nshuffle_indices = np.random.permutation(len(augmented_X_train))\naugmented_X_train = augmented_X_train[shuffle_indices]\naugmented_y_train = augmented_y_train[shuffle_indices]\naugmented_X_train.shape, augmented_y_train.shape\n# Now, augmented_X_train and augmented_y_train contain the augmented dataset.","metadata":{"execution":{"iopub.status.busy":"2024-07-28T12:45:16.616058Z","iopub.execute_input":"2024-07-28T12:45:16.616438Z","iopub.status.idle":"2024-07-28T12:45:17.888485Z","shell.execute_reply.started":"2024-07-28T12:45:16.616404Z","shell.execute_reply":"2024-07-28T12:45:17.887493Z"},"trusted":true},"outputs":[{"execution_count":21,"output_type":"execute_result","data":{"text/plain":"((2301, 128, 128, 3), (2301,))"},"metadata":{}}],"execution_count":21},{"cell_type":"code","source":"#X_train_covid_cxr.shape, X_test_covid_cxr.shape, y_train_covid_cxr.shape, y_test_covid_cxr.shape\n\nlabels_train_covid_cxr = np.concatenate((augmented_y_train,augmented_y_train, augmented_y_train), axis=0)\nimages_train_covid_cxr = np.concatenate((augmented_X_train, augmented_X_train, augmented_X_train), axis=0)\nimages_train_covid_cxr.shape, labels_train_covid_cxr.shape\n\n","metadata":{"execution":{"iopub.status.busy":"2024-07-28T12:45:17.892639Z","iopub.execute_input":"2024-07-28T12:45:17.892948Z","iopub.status.idle":"2024-07-28T12:45:18.870631Z","shell.execute_reply.started":"2024-07-28T12:45:17.892921Z","shell.execute_reply":"2024-07-28T12:45:18.869600Z"},"trusted":true},"outputs":[{"execution_count":22,"output_type":"execute_result","data":{"text/plain":"((6903, 128, 128, 3), (6903,))"},"metadata":{}}],"execution_count":22},{"cell_type":"code","source":"random_indices = np.random.choice(6903, 6478, replace=False)\nimages_train_covid_cxr = images_train_covid_cxr[random_indices]\nlabels_train_covid_cxr = labels_train_covid_cxr[random_indices]\nimages_train_covid_cxr.shape, labels_train_covid_cxr.shape","metadata":{"execution":{"iopub.status.busy":"2024-07-28T12:45:18.871833Z","iopub.execute_input":"2024-07-28T12:45:18.872130Z","iopub.status.idle":"2024-07-28T12:45:19.616245Z","shell.execute_reply.started":"2024-07-28T12:45:18.872105Z","shell.execute_reply":"2024-07-28T12:45:19.615456Z"},"trusted":true},"outputs":[{"execution_count":23,"output_type":"execute_result","data":{"text/plain":"((6478, 128, 128, 3), (6478,))"},"metadata":{}}],"execution_count":23},{"cell_type":"code","source":"X_val, y_val = next(valid_gen)\nX_val.shape, y_val.shape","metadata":{"execution":{"iopub.status.busy":"2024-07-28T12:45:19.617579Z","iopub.execute_input":"2024-07-28T12:45:19.617952Z","iopub.status.idle":"2024-07-28T12:45:22.853226Z","shell.execute_reply.started":"2024-07-28T12:45:19.617918Z","shell.execute_reply":"2024-07-28T12:45:22.852312Z"},"trusted":true},"outputs":[{"execution_count":24,"output_type":"execute_result","data":{"text/plain":"((655, 128, 128, 3), (655, 4))"},"metadata":{}}],"execution_count":24},{"cell_type":"code","source":"","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"random_indices = np.random.choice(820, 800, replace=False)\nconcatenated_val_images1 = concatenated_val_images[random_indices]\nconcatenated_val_labels1 = concatenated_val_labels[random_indices]\nconcatenated_val_images1.shape, concatenated_val_labels1.shape","metadata":{"execution":{"iopub.status.busy":"2024-07-28T12:45:22.917173Z","iopub.execute_input":"2024-07-28T12:45:22.917550Z","iopub.status.idle":"2024-07-28T12:45:23.014879Z","shell.execute_reply.started":"2024-07-28T12:45:22.917524Z","shell.execute_reply":"2024-07-28T12:45:23.013962Z"},"trusted":true},"outputs":[{"execution_count":26,"output_type":"execute_result","data":{"text/plain":"((800, 128, 128, 3), (800, 7))"},"metadata":{}}],"execution_count":26},{"cell_type":"code","source":"#X_train_covid_cxr.shape, X_test_covid_cxr.shape, y_train_covid_cxr.shape, y_test_covid_cxr.shape\n\nconcatenated_val_labels = np.concatenate((concatenated_val_labels, concatenated_val_labels1), axis=0)\nconcatenated_val_images = np.concatenate((concatenated_val_images, concatenated_val_images1), axis=0)\nconcatenated_val_images.shape, concatenated_val_labels.shape","metadata":{"execution":{"iopub.status.busy":"2024-07-28T12:45:23.016111Z","iopub.execute_input":"2024-07-28T12:45:23.016496Z","iopub.status.idle":"2024-07-28T12:45:23.262223Z","shell.execute_reply.started":"2024-07-28T12:45:23.016463Z","shell.execute_reply":"2024-07-28T12:45:23.261337Z"},"trusted":true},"outputs":[{"execution_count":27,"output_type":"execute_result","data":{"text/plain":"((1620, 128, 128, 3), (1620, 7))"},"metadata":{}}],"execution_count":27},{"cell_type":"code","source":"#X_train_covid_cxr.shape, X_test_covid_cxr.shape, y_train_covid_cxr.shape, y_test_covid_cxr.shape\n\nX_val = np.concatenate((X_val, X_val, X_val), axis=0)\ny_val = np.concatenate((y_val, y_val, y_val), axis=0)\nX_val.shape, y_val.shape","metadata":{"execution":{"iopub.status.busy":"2024-07-28T12:45:23.263535Z","iopub.execute_input":"2024-07-28T12:45:23.263835Z","iopub.status.idle":"2024-07-28T12:45:23.352803Z","shell.execute_reply.started":"2024-07-28T12:45:23.263811Z","shell.execute_reply":"2024-07-28T12:45:23.351862Z"},"trusted":true},"outputs":[{"execution_count":28,"output_type":"execute_result","data":{"text/plain":"((1965, 128, 128, 3), (1965, 4))"},"metadata":{}}],"execution_count":28},{"cell_type":"code","source":"random_indices = np.random.choice(1965, 1620, replace=False)\nX_val = X_val[random_indices]\ny_val = y_val[random_indices]\nX_val.shape, y_val.shape","metadata":{"execution":{"iopub.status.busy":"2024-07-28T12:45:23.353895Z","iopub.execute_input":"2024-07-28T12:45:23.354193Z","iopub.status.idle":"2024-07-28T12:45:23.453976Z","shell.execute_reply.started":"2024-07-28T12:45:23.354169Z","shell.execute_reply":"2024-07-28T12:45:23.453057Z"},"trusted":true},"outputs":[{"execution_count":29,"output_type":"execute_result","data":{"text/plain":"((1620, 128, 128, 3), (1620, 4))"},"metadata":{}}],"execution_count":29},{"cell_type":"code","source":"#X_val_covid_cxr\n\n#X_train_covid_cxr.shape, X_test_covid_cxr.shape, y_train_covid_cxr.shape, y_test_covid_cxr.shape\n\nX_val_covid_cxr = np.concatenate((X_val_covid_cxr, X_val_covid_cxr, X_val_covid_cxr, X_val_covid_cxr, X_val_covid_cxr), axis=0)\ny_val_covid_cxr = np.concatenate((y_val_covid_cxr, y_val_covid_cxr, y_val_covid_cxr, y_val_covid_cxr, y_val_covid_cxr), axis=0)\nX_val_covid_cxr.shape, y_val_covid_cxr.shape","metadata":{"execution":{"iopub.status.busy":"2024-07-28T12:45:23.455229Z","iopub.execute_input":"2024-07-28T12:45:23.455947Z","iopub.status.idle":"2024-07-28T12:45:23.529774Z","shell.execute_reply.started":"2024-07-28T12:45:23.455911Z","shell.execute_reply":"2024-07-28T12:45:23.528862Z"},"trusted":true},"outputs":[{"execution_count":30,"output_type":"execute_result","data":{"text/plain":"((825, 128, 128, 3), (825,))"},"metadata":{}}],"execution_count":30},{"cell_type":"code","source":"#X_val_covid_cxr\n\n#X_train_covid_cxr.shape, X_test_covid_cxr.shape, y_train_covid_cxr.shape, y_test_covid_cxr.shape\n\nX_val_covid_cxr = np.concatenate((X_val_covid_cxr, X_val_covid_cxr), axis=0)\ny_val_covid_cxr = np.concatenate((y_val_covid_cxr, y_val_covid_cxr), axis=0)\nX_val_covid_cxr.shape, y_val_covid_cxr.shape","metadata":{"execution":{"iopub.status.busy":"2024-07-28T12:45:23.531096Z","iopub.execute_input":"2024-07-28T12:45:23.531745Z","iopub.status.idle":"2024-07-28T12:45:23.789693Z","shell.execute_reply.started":"2024-07-28T12:45:23.531709Z","shell.execute_reply":"2024-07-28T12:45:23.788678Z"},"trusted":true},"outputs":[{"execution_count":31,"output_type":"execute_result","data":{"text/plain":"((1650, 128, 128, 3), (1650,))"},"metadata":{}}],"execution_count":31},{"cell_type":"code","source":"random_indices = np.random.choice(1650, 1620, replace=False)\nX_val_covid_cxr = X_val_covid_cxr[random_indices]\ny_val_covid_cxr = y_val_covid_cxr[random_indices]\nX_val_covid_cxr.shape, y_val_covid_cxr.shape","metadata":{"execution":{"iopub.status.busy":"2024-07-28T12:45:23.790715Z","iopub.execute_input":"2024-07-28T12:45:23.790975Z","iopub.status.idle":"2024-07-28T12:45:23.981638Z","shell.execute_reply.started":"2024-07-28T12:45:23.790954Z","shell.execute_reply":"2024-07-28T12:45:23.980697Z"},"trusted":true},"outputs":[{"execution_count":32,"output_type":"execute_result","data":{"text/plain":"((1620, 128, 128, 3), (1620,))"},"metadata":{}}],"execution_count":32},{"cell_type":"code","source":"","metadata":{},"outputs":[],"execution_count":null}]}
--------------------------------------------------------------------------------
/drifa-net.ipynb:
--------------------------------------------------------------------------------
1 | {"metadata":{"kernelspec":{"language":"python","display_name":"Python 3","name":"python3"},"language_info":{"name":"python","version":"3.10.14","mimetype":"text/x-python","codemirror_mode":{"name":"ipython","version":3},"pygments_lexer":"ipython3","nbconvert_exporter":"python","file_extension":".py"},"kaggle":{"accelerator":"gpu","dataSources":[],"dockerImageVersionId":30787,"isInternetEnabled":true,"language":"python","sourceType":"notebook","isGpuEnabled":true}},"nbformat_minor":4,"nbformat":4,"cells":[{"cell_type":"code","source":"import cv2\nimport seaborn as sns\nimport matplotlib.pyplot as plt\nimport numpy as np\nimport pandas as pd\nimport plotly.express as px\nsns.set_style('whitegrid')\nfrom sklearn.metrics import confusion_matrix , classification_report\nimport tensorflow as tf\nfrom tensorflow import keras\nfrom tensorflow.keras.models import Sequential\nfrom tensorflow.keras.layers import Dense , Flatten , Conv2D , MaxPooling2D , Dropout , Activation , BatchNormalization\nfrom tensorflow.keras.preprocessing.image import ImageDataGenerator\nfrom tensorflow.keras.optimizers import Adam , Adamax\nfrom tensorflow.keras import regularizers\n\n#Warnings\nimport warnings\nwarnings.filterwarnings('ignore')","metadata":{"_uuid":"8f2839f25d086af736a60e9eeb907d3b93b6e0e5","_cell_guid":"b1076dfc-b9ad-4769-8c92-a6c4dae69d19","trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"import tensorflow as tf\ntf.keras.mixed_precision.set_global_policy('mixed_float16')\n\nimport tensorflow as tf\nimport numpy as np\nimport numpy as np\nimport tensorflow as tf\nfrom tensorflow.keras.layers import Input, Conv2D, MaxPooling2D, Flatten, Dense, Dropout, GlobalAveragePooling2D, BatchNormalization, ReLU, Add\nfrom tensorflow.keras.models import Model\nfrom tensorflow.keras.losses import KLDivergence\nfrom tensorflow.keras.callbacks import ModelCheckpoint, EarlyStopping\nfrom tensorflow.keras.utils import to_categorical\nfrom tensorflow.keras.datasets import cifar10\nfrom tensorflow.keras.applications import EfficientNetB0\nfrom tensorflow.keras.applications import DenseNet121, ResNet50V2\nfrom tensorflow.keras.layers import GlobalAveragePooling2D\nimport copy\nimport numpy as np\nimport tensorflow as tf\nfrom tensorflow.keras.layers import Input, Conv2D, MaxPooling2D, Flatten, Dense, Dropout, GlobalAveragePooling2D, BatchNormalization, ReLU, Add\nfrom tensorflow.keras.models import Model\nfrom tensorflow.keras.losses import KLDivergence\nfrom tensorflow.keras.callbacks import ModelCheckpoint, EarlyStopping\nfrom tensorflow.keras.utils import to_categorical\nfrom tensorflow.keras.datasets import cifar10\nfrom tensorflow.keras.applications import EfficientNetB0\nfrom tensorflow.keras.applications import DenseNet169, MobileNetV2, ResNet50, EfficientNetB0\nfrom tensorflow.keras.layers import GlobalAveragePooling2D\nimport copy\n\nimport tensorflow as tf\nfrom tensorflow.keras import layers\nimport tensorflow as tf\nfrom tensorflow.keras import layers\n","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"X_train_h = np.load('X_train_HAM10000_ISIC_2018.npy')\ny_train_h = np.load('y_train_HAM10000_ISIC_2018.npy')\nX_test_h = np.load('X_test_HAM10000_ISIC_2018.npy')\ny_test_h = np.load('y_test_HAM10000_ISIC_2018.npy')\n\n\nX_train_h.shape, y_train_h.shape, X_test_h.shape, y_test_h.shape\n","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"random_indices = np.random.choice(2003, 810, replace=False)\n\nX_test_h1 = X_test_h[random_indices]\ny_test_h1 = y_test_h[random_indices]\n\nX_test_h1.shape, y_test_h1.shape, X_test_h.shape, y_test_h.shape","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"#X_train_s.shape,X_test_s.shape, y_train_s.shape,y_test_s.shape","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"X_train_h.shape, y_train_h.shape, X_test_h.shape, y_test_h.shape","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"X_train_s = np.load('data_cervical_cancer_sipkamed.npy')\ny_train_s = np.load('labels_cervical_cancer_sipkamed.npy')\n\nX_train_s.shape, y_train_s.shape","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"from sklearn.model_selection import train_test_split\n\nX_train_s, X_test_s, y_train_s, y_test_s = train_test_split(X_train_s, y_train_s, test_size=0.2, random_state=42)\n\nX_train_s.shape,X_test_s.shape, y_train_s.shape,y_test_s.shape","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"import numpy as np\nimport cv2\n\ndef rotate_image(image, angle):\n \"\"\"\n Rotate the image by the specified angle.\n \"\"\"\n center = tuple(np.array(image.shape[1::-1]) / 2)\n rotation_matrix = cv2.getRotationMatrix2D(center, angle, 1.0)\n rotated_image = cv2.warpAffine(image, rotation_matrix, image.shape[1::-1], flags=cv2.INTER_LINEAR)\n return rotated_image\n\ndef translate_image(image, tx, ty):\n \"\"\"\n Translate the image by the specified translation parameters.\n \"\"\"\n translation_matrix = np.float32([[1, 0, tx], [0, 1, ty]])\n translated_image = cv2.warpAffine(image, translation_matrix, image.shape[1::-1])\n return translated_image\n\n# Example data\n#X_train = np.random.rand(100, 28, 28) # Assuming 100 images of size 28x28\n#y_train = np.random.randint(0, 10, 100) # Assuming 100 labels\n\n# Augmentation parameters\nrotation_angles = [20]\ntranslations = [(5, 5)]\n\naugmented_X_train = []\naugmented_y_train = []\n\nfor image, label in zip(X_train_s, y_train_s):\n # Original image\n #augmented_X_train.append(image)\n #augmented_y_train.append(label)\n\n # Augment with rotations\n for angle in rotation_angles:\n rotated_image = rotate_image(image, angle)\n augmented_X_train.append(rotated_image)\n augmented_y_train.append(label)\n\n # Augment with translations\n for tx, ty in translations:\n translated_image = translate_image(image, tx, ty)\n augmented_X_train.append(translated_image)\n augmented_y_train.append(label)\n\n# Convert lists to numpy arrays\naugmented_X_train = np.array(augmented_X_train)\naugmented_y_train = np.array(augmented_y_train)\n\n# Shuffle the data\nshuffle_indices = np.random.permutation(len(augmented_X_train))\naugmented_X_train = augmented_X_train[shuffle_indices]\naugmented_y_train = augmented_y_train[shuffle_indices]\naugmented_X_train.shape, augmented_y_train.shape\n# Now, augmented_X_train and augmented_y_train contain the augmented dataset.","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"random_indices = np.random.choice(6478, 4773, replace=False)\n\naugmented_X_train = augmented_X_train[random_indices]\naugmented_y_train = augmented_y_train[random_indices]\n\naugmented_X_train.shape, augmented_y_train.shape\n","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"X_train_s = np.concatenate((X_train_s, augmented_X_train), axis=0)\ny_train_s = np.concatenate((y_train_s, augmented_y_train), axis=0)\nX_train_s.shape, y_train_s.shape","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"'''X_train_s = np.concatenate((X_train_s, X_train_s, X_train_s), axis=0)\ny_train_s = np.concatenate((y_train_s, y_train_s, y_train_s), axis=0)\nX_train_s.shape, y_train_s.shape'''","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"X_test_s1 = np.concatenate((X_test_s, X_test_s, X_test_s), axis=0)\ny_test_s1 = np.concatenate((y_test_s, y_test_s, y_test_s), axis=0)\nX_test_s1.shape, y_test_s1.shape","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"X_train_s.shape, y_train_s.shape","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"augmented_X_train.shape, augmented_y_train.shape","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"random_indices = np.random.choice(2430, 2003, replace=False)\n\nX_test_s1 = X_test_s1[random_indices]\ny_test_s1 = y_test_s1[random_indices]\n\nX_test_s1.shape, y_test_s1.shape, X_test_s.shape, y_test_s.shape","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"#X_train.shape, y_train.shape, X_test.shape, y_test.shape, \nX_train_s.shape,X_test_s.shape, y_train_s.shape,y_test_s.shape, X_test_s1.shape, y_test_s1.shape","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"print(X_train_h.shape, y_train_h.shape, X_test_h.shape, y_test_h.shape,\n#X_train.shape, y_train.shape, X_test.shape, y_test.shape,\nX_train_s.shape,X_test_s.shape, X_test_s1.shape, y_train_s.shape,y_test_s.shape, y_test_s1.shape)","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"markdown","source":"**Multi-branch fusion attention (MFA) module**","metadata":{}},{"cell_type":"code","source":"#### Multi-branch fusion attention (MFA) module #####\n\nclass DeeperGlobalLocalAttentionLayer1(layers.Layer):\n def __init__(self, units, activation='sigmoid', dropout_rate=0.2, use_scale=True, axis=-1, **kwargs):\n super(DeeperGlobalLocalAttentionLayer1, self).__init__(**kwargs)\n self.units = units\n self.activation = activation\n self.dropout_rate = dropout_rate\n self.use_scale = use_scale\n self.axis = axis\n\n def build(self, input_shape):\n _, _, _, channels = input_shape\n self.global_conv1 = layers.Conv2D(filters=self.units, kernel_size=(1, 1), activation=self.activation)\n self.global_avg_pooling1 = layers.GlobalAveragePooling2D()\n \n self.global_conv2 = layers.Conv2D(filters=self.units, kernel_size=(1, 1), activation=self.activation)\n self.global_avg_pooling2 = layers.GlobalMaxPooling2D()\n \n self.global_conv3 = layers.Conv2D(filters=self.units, kernel_size=(1, 1), activation=self.activation)\n self.global_avg_pooling3 = layers.GlobalAveragePooling2D()\n \n self.global_conv4 = layers.Conv2D(filters=self.units, kernel_size=(1, 1), activation=self.activation)\n self.global_avg_pooling4 = layers.GlobalMaxPooling2D()\n \n self.concat1 = layers.Add()\n self.concat2 = layers.Add()\n self.concat3 = layers.Add()\n self.concat4 = layers.Add()\n self.concat5 = layers.Concatenate(axis=-1)\n \n self.global_attention = layers.Dense(units=self.units, activation=self.activation)\n \n self.local_conv1 = layers.Conv2D(filters=self.units, kernel_size=(1, 1), activation=self.activation)\n self.local_conv2 = layers.Conv2D(filters=self.units, kernel_size=(1, 1), activation=self.activation)\n self.concat6 = layers.Add()\n \n if self.use_scale:\n self.global_scale = self.add_weight(shape=(1, 1, 1, 1), initializer='ones', trainable=True, name='global_scale')\n self.local_scale = self.add_weight(shape=(1, 1, 1, self.units), initializer='ones', trainable=True, name='local_scale')\n \n super(DeeperGlobalLocalAttentionLayer1, self).build(input_shape)\n\n def call(self, inputs, training=None):\n ##### Hierarchical Information Fusion Attention(HIFA) ######\n \n global_attention1 = self.global_conv1(inputs)\n global_avg1 = self.global_avg_pooling1(global_attention1)\n \n global_attention2 = self.global_conv2(global_attention1)\n global_avg2 = self.global_avg_pooling2(global_attention2)\n \n global_concat1 = self.concat1([global_avg1, global_avg2])\n global_attention_concat1 = self.concat2([global_attention1, global_attention2])\n \n global_attention3 = self.global_conv3(global_attention_concat1)\n global_avg3 = self.global_avg_pooling3(global_attention3)\n \n global_attention4 = self.global_conv4(global_attention3)\n global_avg4 = self.global_avg_pooling4(global_attention4)\n \n global_concat2 = self.concat3([global_avg3, global_avg4])\n global_attention_concat2 = self.concat4([global_attention3, global_attention4])\n \n global_avg_concat = self.concat5([global_concat1, global_concat2])\n \n global_attention = self.global_attention(global_avg_concat)\n global_attention = tf.expand_dims(tf.expand_dims(global_attention, 1), 1)\n\n ##### Channel-wise Local Information Attention (CLIA) ######\n \n local_attention1 = self.local_conv1(inputs)\n local_attention1 = tf.reduce_mean(local_attention1, axis=[1, 2], keepdims=True) # Reduce spatial dimensions\n local_attention2 = self.local_conv2(local_attention1)\n local_attention2 = tf.reduce_mean(local_attention2, axis=[1, 2], keepdims=True) # Reduce spatial dimensions\n \n local_attention = self.concat6([local_attention1, local_attention2])\n \n # Scale Global and Local Attention\n if self.use_scale:\n global_attention *= self.global_scale\n local_attention *= self.local_scale\n\n # Combine Global and Local Attention\n attention = tf.sigmoid(global_attention + local_attention)\n return attention\n\n def get_config(self):\n config = super(DeeperGlobalLocalAttentionLayer1, self).get_config()\n config.update({'units': self.units, 'activation': self.activation, 'dropout_rate': self.dropout_rate,\n 'use_scale': self.use_scale})\n return config\n\nclass DeeperAttentionLayer1(layers.Layer):\n def __init__(self, units=64, use_scale=True, **kwargs):\n super(DeeperAttentionLayer1, self).__init__(**kwargs)\n self.units = units\n self.use_scale = use_scale\n\n def build(self, input_shape):\n _, H, W, C = input_shape\n self.alpha = self.add_weight(shape=(1, 1, 1, C), initializer='ones', trainable=True, name='alpha')\n self.deeper_global_local_attention = DeeperGlobalLocalAttentionLayer1(units=self.units, activation='sigmoid', \n dropout_rate=0.2, # You can adjust the dropout rate\n use_scale=self.use_scale)\n super(DeeperAttentionLayer1, self).build(input_shape)\n\n def call(self, inputs, training=None):\n attention = self.deeper_global_local_attention(inputs, training=training)\n attention_feature = inputs * attention * self.alpha\n return attention_feature\n\n def get_config(self):\n config = super(DeeperAttentionLayer1, self).get_config()\n config.update({'units': self.units, 'use_scale': self.use_scale})\n return config\n","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"markdown","source":"**Multimodal information fusion attention (MIFA)**","metadata":{}},{"cell_type":"code","source":"########## Multimodal information fusion attention (MIFA) ###############\n\n\n\nclass GlobalMinPooling2D(layers.Layer):\n def __init__(self, **kwargs):\n super(GlobalMinPooling2D, self).__init__(**kwargs)\n\n def call(self, inputs):\n return tf.reduce_min(inputs, axis=[1, 2])\n\n def compute_output_shape(self, input_shape):\n return (input_shape[0], input_shape[-1])\n\n def get_config(self):\n config = super(GlobalMinPooling2D, self).get_config()\n return config\n\n\nclass DeeperGlobalLocalAttentionLayer(layers.Layer):\n def __init__(self, units, activation='sigmoid', dropout_rate=0.2, use_scale=True, axis=-1, **kwargs):\n super(DeeperGlobalLocalAttentionLayer, self).__init__(**kwargs)\n self.units = units\n self.activation = activation\n self.dropout_rate = dropout_rate\n self.use_scale = use_scale\n self.axis = axis\n\n def build(self, input_shapes):\n input_shape1, input_shape2 = input_shapes\n _, _, _, channels1 = input_shape1\n _, _, _, channels2 = input_shape2\n \n self.global_min_pooling1 = GlobalMinPooling2D()\n self.global_avg_pooling1 = layers.GlobalAveragePooling2D()\n self.global_max_pooling1 = layers.GlobalMaxPooling2D()\n \n self.global_attention = layers.Dense(units=self.units, activation=self.activation)\n \n self.global_min_pooling2 = GlobalMinPooling2D()\n self.global_avg_pooling2 = layers.GlobalAveragePooling2D()\n self.global_max_pooling2 = layers.GlobalMaxPooling2D()\n \n #self.global_attention2 = layers.Dense(units=self.units, activation=self.activation)\n \n \n self.concat = layers.Add()\n #self.global_attention3 = layers.Dense(units=self.units, activation=self.activation)\n \n self.local_conv1 = layers.Conv2D(filters=self.units, kernel_size=(1, 1), activation=self.activation)\n self.local_conv2 = layers.Conv2D(filters=self.units, kernel_size=(1, 1), activation=self.activation)\n \n \n \n self.concat2 = layers.Add()\n #self.local_conv5 = layers.Conv2D(filters=self.units, kernel_size=(1, 1), activation=self.activation)\n \n if self.use_scale:\n self.global_scale = self.add_weight(shape=(1, 1, 1, 1), initializer='ones', trainable=True, name='global_scale')\n self.local_scale = self.add_weight(shape=(1, 1, 1, self.units), initializer='ones', trainable=True, name='local_scale')\n \n super(DeeperGlobalLocalAttentionLayer, self).build(input_shapes)\n\n def call(self, inputs, training=None):\n inputs1, inputs2 = inputs\n\n ######### Multimodal Global Information Fusion Attention (MGIFA) #########\n global_min1 = self.global_min_pooling1(inputs1)\n global_avg1 = self.global_avg_pooling1(inputs1)\n global_max1 = self.global_max_pooling1(inputs1)\n\n global_min2 = self.global_min_pooling2(inputs2)\n global_avg2 = self.global_avg_pooling2(inputs2)\n global_max2 = self.global_max_pooling2(inputs2)\n\n concat_min = self.concat([global_min1, global_min2])\n concat_avg = self.concat([global_avg1, global_avg2])\n concat_max = self.concat([global_max1, global_max2])\n \n concat_min = self.global_attention(concat_min)\n concat_avg = self.global_attention(concat_avg)\n concat_max = self.global_attention(concat_max)\n \n concat_global_attention = self.concat([concat_min, concat_avg, concat_max])\n \n #global_attention = self.global_attention3(concat_global_attention)\n \n global_attention = tf.expand_dims(tf.expand_dims(concat_global_attention, 1), 1)\n\n ######### Multimodal Local Information Fusion Attention (MLIFA) #########\n \n local_conv1 = self.local_conv1(inputs1)\n local_min1 = tf.reduce_min(local_conv1, axis=[1, 2], keepdims=True) # Reduce spatial dimensions\n local_avg1 = tf.reduce_mean(local_conv1, axis=[1, 2], keepdims=True) # Reduce spatial dimensions\n local_max1 = tf.reduce_max(local_conv1, axis=[1, 2], keepdims=True) # Reduce spatial dimensions\n \n local_conv2 = self.local_conv2(inputs2)\n local_min2 = tf.reduce_min(local_conv2, axis=[1, 2], keepdims=True) # Reduce spatial dimensions\n local_avg2 = tf.reduce_mean(local_conv2, axis=[1, 2], keepdims=True) # Reduce spatial dimensions\n local_max2 = tf.reduce_max(local_conv2, axis=[1, 2], keepdims=True) # Reduce spatial dimensions\n \n local_concat_min = self.concat2([local_min1, local_min2])\n local_concat_avg = self.concat2([local_avg1, local_avg2])\n local_concat_max = self.concat2([local_max1, local_max2])\n\n local_attention = self.concat2([local_concat_min, local_concat_avg, local_concat_max])\n \n \n # Scale Global and Local Attention\n if self.use_scale:\n global_attention *= self.global_scale\n local_attention *= self.local_scale\n\n # Combine Global and Local Attention\n attention = tf.sigmoid(global_attention + local_attention)\n return attention\n\n def get_config(self):\n config = super(DeeperGlobalLocalAttentionLayer, self).get_config()\n config.update({'units': self.units, 'activation': self.activation, 'dropout_rate': self.dropout_rate,\n 'use_scale': self.use_scale})\n return config\n\nclass DeeperAttentionLayer(layers.Layer):\n def __init__(self, units=64, use_scale=True,axis=-1, **kwargs):\n super(DeeperAttentionLayer, self).__init__(**kwargs)\n self.units = units\n self.use_scale = use_scale\n self.axis = axis \n\n def build(self, input_shapes):\n input_shape1, input_shape2 = input_shapes\n _, H, W, C1 = input_shape1\n _, H, W, C2 = input_shape2\n \n self.alpha1 = self.add_weight(shape=(1, 1, 1, C1), initializer='ones', trainable=True, name='alpha1')\n self.alpha2 = self.add_weight(shape=(1, 1, 1, C2), initializer='ones', trainable=True, name='alpha2')\n \n self.deeper_global_local_attention = DeeperGlobalLocalAttentionLayer(units=self.units, activation='sigmoid', \n dropout_rate=0.2, # You can adjust the dropout rate\n use_scale=self.use_scale)\n #self.concat3 = layers.Add()\n #self.concat4 = layers.Add()\n \n super(DeeperAttentionLayer, self).build(input_shapes)\n\n def call(self, inputs, training=None):\n inputs1, inputs2 = inputs\n attention = self.deeper_global_local_attention([inputs1, inputs2], training=training)\n \n #inputs_concat = self.concat3([inputs1, inputs2])\n #alpha_concat = self.concat4([self.alpha1, self.alpha2])\n \n attention_feature1 = inputs1 * attention * self.alpha1\n attention_feature2 = inputs2 * attention * self.alpha2\n \n return attention_feature1, attention_feature2\n\n def get_config(self):\n config = super(DeeperAttentionLayer, self).get_config()\n config.update({'units': self.units, 'use_scale': self.use_scale})\n return config\n","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"### RRA block ########\n\ndef RGSA(x, filters, strides=(1, 1), use_projection=False):\n shortcut = x\n\n # Define the first convolutional layer of the block\n \n x = Conv2D(filters=filters, kernel_size=(3, 3), strides=strides, padding='same', \n #activation = 'relu'\n\n )(x)\n x = DeeperAttentionLayer1(units=filters, use_scale=True)(x)\n x = BatchNormalization()(x)\n x = tf.keras.layers.Activation('relu')(x)\n\n # Define the second convolutional layer of the block\n \n x = Conv2D(filters=filters, kernel_size=(3, 3), padding='same')(x)\n x = DeeperAttentionLayer1(units=filters, use_scale=True)(x)\n \n x = BatchNormalization()(x)\n\n # If the stride is not (1, 1), the dimensions need to be adjusted\n if strides != (1, 1) or use_projection:\n \n shortcut = Conv2D(filters=filters, kernel_size=(1, 1), strides=strides, padding='same')(shortcut)\n shortcut = BatchNormalization()(shortcut)\n\n # Add the shortcut (identity connection)\n \n x = tf.keras.layers.add([x, shortcut])\n \n x = tf.keras.layers.Activation('relu')(x)\n return x\n","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"markdown","source":"**DRIFA-Net**","metadata":{}},{"cell_type":"code","source":"def residual_GLC_branch1(inputs1, inputs2):\n \n x1 = Conv2D(filters=64, kernel_size=(7, 7), strides=(2, 2), padding='same')(inputs1)\n x1 = DeeperAttentionLayer1(units=64, use_scale=True)(x1) ## MFA ####\n x1 = BatchNormalization()(x1)\n x1 = tf.keras.layers.Activation('relu')(x1)\n x1 = MaxPooling2D(pool_size=(3, 3), strides=(2, 2), padding='same')(x1)\n \n x2 = Conv2D(filters=64, kernel_size=(7, 7), strides=(2, 2), padding='same')(inputs2)\n x2 = DeeperAttentionLayer1(units=64, use_scale=True)(x2) ## MFA ####\n x2 = BatchNormalization()(x2)\n x2 = tf.keras.layers.Activation('relu')(x2)\n x2 = MaxPooling2D(pool_size=(3, 3), strides=(2, 2), padding='same')(x2)\n \n\n x1 = RGSA(x1, filters=64)\n x1 = tf.keras.layers.Dropout(0.25)(x1, training = True) ## MCD ####\n x1 = DeeperAttentionLayer1(units=64, use_scale=True)(x1) ## MFA ####\n\n x2 = RGSA(x2, filters=64)\n x2 = tf.keras.layers.Dropout(0.25)(x2, training = True) ## MCD ####\n x2 = DeeperAttentionLayer1(units=64, use_scale=True)(x2)\n \n x1, x2 = DeeperAttentionLayer(units=64, use_scale=True)([x1, x2]) ## MIFA ####\n \n x1 = RGSA(x1, filters=64)\n x1 = tf.keras.layers.Dropout(0.25)(x1, training = True) ## MCD ####\n x1 = DeeperAttentionLayer1(units=64, use_scale=True)(x1) ## MFA ####\n \n x2 = RGSA(x2, filters=64)\n x2 = tf.keras.layers.Dropout(0.25)(x2, training = True) ## MCD ####\n x2 = DeeperAttentionLayer1(units=64, use_scale=True)(x2)\n\n x1, x2 = DeeperAttentionLayer(units=64, use_scale=True)([x1, x2]) ## MIFA ####\n \n x1 = RGSA(x1, filters=128, strides=(2, 2), use_projection=True)\n x1 = tf.keras.layers.Dropout(0.25)(x1, training = True) ## MCD ####\n x1 = DeeperAttentionLayer1(units=128, use_scale=True)(x1) ## MFA ####\n\n x2 = RGSA(x2, filters=128, strides=(2, 2), use_projection=True)\n x2 = tf.keras.layers.Dropout(0.25)(x2, training = True) ## MCD ####\n x2 = DeeperAttentionLayer1(units=128, use_scale=True)(x2)\n\n x1, x2 = DeeperAttentionLayer(units=128, use_scale=True)([x1, x2]) ## MIFA ####\n \n x1 = RGSA(x1, filters=128)\n x1 = tf.keras.layers.Dropout(0.25)(x1, training = True) ## MCD ####\n x1 = DeeperAttentionLayer1(units=128, use_scale=True)(x1)\n \n x2 = RGSA(x2, filters=128)\n x2 = tf.keras.layers.Dropout(0.25)(x2, training = True) ## MCD ####\n x2 = DeeperAttentionLayer1(units=128, use_scale=True)(x2)\n\n x1, x2 = DeeperAttentionLayer(units=128, use_scale=True)([x1, x2]) ## MIFA ####\n \n x1 = RGSA(x1, filters=256, strides=(2, 2), use_projection=True)\n x1 = tf.keras.layers.Dropout(0.25)(x1, training = True) ## MCD ####\n x1 = DeeperAttentionLayer1(units=256, use_scale=True)(x1)\n \n x2 = RGSA(x2, filters=256, strides=(2, 2), use_projection=True)\n x2 = tf.keras.layers.Dropout(0.25)(x2, training = True) ## MCD ####\n x2 = DeeperAttentionLayer1(units=256, use_scale=True)(x2)\n\n x1, x2 = DeeperAttentionLayer(units=256, use_scale=True)([x1, x2]) ## MIFA ####\n \n \n x1 = RGSA(x1, filters=256)\n x1 = tf.keras.layers.Dropout(0.25)(x1, training = True) ## MCD ####\n x1 = DeeperAttentionLayer1(units=256, use_scale=True)(x1)\n \n x2 = RGSA(x2, filters=256)\n x2 = tf.keras.layers.Dropout(0.25)(x2, training = True) ## MCD ####\n x2 = DeeperAttentionLayer1(units=256, use_scale=True)(x2)\n \n x1, x2 = DeeperAttentionLayer(units=256, use_scale=True)([x1, x2]) ## MIFA ####\n\n x1 = RGSA(x1, filters=512, strides=(2, 2), use_projection=True)\n x1 = DeeperAttentionLayer1(units=512, use_scale=True)(x1)\n \n x2 = RGSA(x2, filters=512, strides=(2, 2), use_projection=True)\n x2 = DeeperAttentionLayer1(units=512, use_scale=True)(x2)\n\n x1, x2 = DeeperAttentionLayer(units=512, use_scale=True)([x1, x2]) ## MIFA ####\n \n x1 = RGSA(x1, filters=512)\n x2 = RGSA(x2, filters=512)\n x1, x2 = DeeperAttentionLayer(units=512, use_scale=True)([x1, x2])\n \n return x1, x2","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"#def build_resnet18(input_shape=(128, 128, 3), num_classes=2):\ninput_shape=(128, 128, 3)\ninputs1 = Input(shape=input_shape)\ninputs2 = Input(shape=input_shape)\n\n\n\n#input_data = Input(shape=input_shape, name='input_data')\n# Initial convolutional layer\n\nx1, x2 = residual_GLC_branch1(inputs1, inputs2)\n#print('x:',x.shape)\n\ncon = tf.keras.layers.Concatenate(axis=-1)([x1, x2])\n\ncon = tf.keras.layers.Dropout(0.25)(con, training = True) ## MCD ####\n\nx = GlobalAveragePooling2D()(con)\nprint('GlobalAveragePooling2D x:',x.shape)\n\noutputs1 = Dense(5, activation='softmax')(x)\noutputs2 = Dense(7, activation='softmax')(x)\n\n# Create the model\nmodel = Model([inputs1, inputs2], [outputs1, outputs2])\n#return model\nprint(model.summary())","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"from tensorflow.keras.optimizers import Adam\nfrom tensorflow.keras.optimizers.schedules import ExponentialDecay\n\ninitial_gamma = 0.5\n\noptimizer = Adam(learning_rate=0.001)\n# Compile the model with the custom optimizer\nmodel.compile(optimizer=optimizer,\n loss=['categorical_crossentropy', 'categorical_crossentropy'],\n loss_weights=[initial_gamma, (1 - initial_gamma)],\n metrics=['accuracy', 'accuracy'])\n\n\nfrom tensorflow.keras.callbacks import ModelCheckpoint, EarlyStopping\ndef checkpoint_callback():\n\n checkpoint_filepath = 'best1_model_cer_skin_lung.keras'\n\n model_checkpoint_callback= ModelCheckpoint(filepath=checkpoint_filepath,\n save_weights_only=False,\n #frequency='epoch',\n monitor='val_loss',\n save_best_only=True,\n mode='min',\n verbose=0)\n\n return model_checkpoint_callback\n\ndef early_stopping(patience):\n es_callback = tf.keras.callbacks.EarlyStopping(monitor='val_loss', patience=patience, verbose=1)\n return es_callback\n\n\n\nfrom tensorflow.keras.callbacks import ReduceLROnPlateau\n\nreduce_lr = ReduceLROnPlateau(monitor='val_loss', factor=0.2,\n patience=5, min_lr=0.00001)\n\ncheckpoint_callback = checkpoint_callback()\n\nearly_stopping = early_stopping(patience=100)\ncallbacks = [checkpoint_callback, early_stopping, reduce_lr]\n \n\n# Fit the model with callbacks\nhistory = model.fit([X_train_s, X_train_h], [y_train_s, y_train_h],\n epochs=200,\n validation_split=0.2, verbose=1,\n shuffle=True,\n callbacks=callbacks) # UpdateGammaCallback\n","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"model.evaluate([X_test_s, X_test_h1], [y_test_s, y_test_h1])","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"\nmodel.evaluate([X_test_s1, X_test_h], [y_test_s1, y_test_h])","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"y_pred = model.predict([X_test_s1, X_test_h]) \n\ny_pred_binary1 = y_pred[0] >= 0.5\ny_pred_binary_pgd_test1 = np.array(y_pred_binary1, dtype='int32')\n\nprint('y_pred_binary_pgd_test1:', y_pred_binary_pgd_test1.shape)\n\ny_pred_binary2 = y_pred[1] >= 0.5\ny_pred_binary_pgd_test2 = np.array(y_pred_binary2, dtype='int32')\n\nprint('y_pred_binary_pgd_test2:', y_pred_binary_pgd_test2.shape)\n\n#y_test_s, y_test_h\n# Calculate evaluation metrics for the current epsilon\ny_test_categorical1 = y_test_s1\ny_test_categorical2 = y_test_h\n\n## Task 1:\nprint('skin cancer classification:')\naccuracy = accuracy_score(y_pred_binary_pgd_test1, y_test_categorical1) * 100\nprecision = precision_score(y_pred_binary_pgd_test1, y_test_categorical1, average='macro') * 100\nrecall = recall_score(y_pred_binary_pgd_test1, y_test_categorical1, average='macro') * 100\nf1 = f1_score(y_pred_binary_pgd_test1, y_test_categorical1, average='macro') * 100\n#auc = roc_auc_score(y_pred, y_train_categorical, multi_class='ovr') * 100\nprint('accuracy:', accuracy)\nprint('precision:', precision)\nprint('recall:', recall)\nprint('f1:', f1)\n\n## Task 2:\nprint('Cervical cancer classification:')\naccuracy = accuracy_score(y_pred_binary_pgd_test2, y_test_categorical2) * 100\nprecision = precision_score(y_pred_binary_pgd_test2, y_test_categorical2, average='macro') * 100\nrecall = recall_score(y_pred_binary_pgd_test2, y_test_categorical2, average='macro') * 100\nf1 = f1_score(y_pred_binary_pgd_test2, y_test_categorical2, average='macro') * 100\nprint('accuracy:', accuracy)\nprint('precision:', precision)\nprint('recall:', recall)\nprint('f1:', f1)","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"history = model.fit([X_train_s, X_train_h], [y_train_s, y_train_h],\n epochs=100,\n validation_split=0.2, verbose=1,\n shuffle=True,\n callbacks=callbacks)","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"model.evaluate([X_test_s, X_test_h1], [y_test_s, y_test_h1])","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"from tensorflow.keras.models import load_model\n\nmodel1 = load_model('/kaggle/working/best1_model_cer_skin_lung.keras', custom_objects={'DeeperAttentionLayer1': DeeperAttentionLayer1,\n 'DeeperAttentionLayer': DeeperAttentionLayer\n })\nmodel1.evaluate([X_test_s, X_test_h1], [y_test_s, y_test_h1])","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"model.evaluate([X_test_s1, X_test_h], [y_test_s1, y_test_h])","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"y_pred = model.predict([X_test_s1, X_test_h]) \n\ny_pred_binary1 = y_pred[0] >= 0.5\ny_pred_binary_pgd_test1 = np.array(y_pred_binary1, dtype='int32')\n\nprint('y_pred_binary_pgd_test1:', y_pred_binary_pgd_test1.shape)\n\ny_pred_binary2 = y_pred[1] >= 0.5\ny_pred_binary_pgd_test2 = np.array(y_pred_binary2, dtype='int32')\n\nprint('y_pred_binary_pgd_test2:', y_pred_binary_pgd_test2.shape)\n\n#y_test_s, y_test_h\n# Calculate evaluation metrics for the current epsilon\ny_test_categorical1 = y_test_s1\ny_test_categorical2 = y_test_h\n\n## Task 1:\nprint('skin cancer classification:')\naccuracy = accuracy_score(y_pred_binary_pgd_test1, y_test_categorical1) * 100\nprecision = precision_score(y_pred_binary_pgd_test1, y_test_categorical1, average='macro') * 100\nrecall = recall_score(y_pred_binary_pgd_test1, y_test_categorical1, average='macro') * 100\nf1 = f1_score(y_pred_binary_pgd_test1, y_test_categorical1, average='macro') * 100\n#auc = roc_auc_score(y_pred, y_train_categorical, multi_class='ovr') * 100\nprint('accuracy:', accuracy)\nprint('precision:', precision)\nprint('recall:', recall)\nprint('f1:', f1)\n\n## Task 2:\nprint('Cervical cancer classification:')\naccuracy = accuracy_score(y_pred_binary_pgd_test2, y_test_categorical2) * 100\nprecision = precision_score(y_pred_binary_pgd_test2, y_test_categorical2, average='macro') * 100\nrecall = recall_score(y_pred_binary_pgd_test2, y_test_categorical2, average='macro') * 100\nf1 = f1_score(y_pred_binary_pgd_test2, y_test_categorical2, average='macro') * 100\nprint('accuracy:', accuracy)\nprint('precision:', precision)\nprint('recall:', recall)\nprint('f1:', f1)","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"y_pred = model.predict([X_test_s, X_test_h1]) \n\ny_pred_binary1 = y_pred[0] >= 0.5\ny_pred_binary_pgd_test1 = np.array(y_pred_binary1, dtype='int32')\n\nprint('y_pred_binary_pgd_test1:', y_pred_binary_pgd_test1.shape)\n\ny_pred_binary2 = y_pred[1] >= 0.5\ny_pred_binary_pgd_test2 = np.array(y_pred_binary2, dtype='int32')\n\nprint('y_pred_binary_pgd_test2:', y_pred_binary_pgd_test2.shape)\n\n#y_test_s, y_test_h\n# Calculate evaluation metrics for the current epsilon\ny_test_categorical1 = y_test_s\ny_test_categorical2 = y_test_h1\n\n## Task 1:\nprint('skin cancer classification:')\naccuracy = accuracy_score(y_pred_binary_pgd_test1, y_test_categorical1) * 100\nprecision = precision_score(y_pred_binary_pgd_test1, y_test_categorical1, average='macro') * 100\nrecall = recall_score(y_pred_binary_pgd_test1, y_test_categorical1, average='macro') * 100\nf1 = f1_score(y_pred_binary_pgd_test1, y_test_categorical1, average='macro') * 100\n#auc = roc_auc_score(y_pred, y_train_categorical, multi_class='ovr') * 100\nprint('accuracy:', accuracy)\nprint('precision:', precision)\nprint('recall:', recall)\nprint('f1:', f1)\n\n## Task 2:\nprint('Cervical cancer classification:')\naccuracy = accuracy_score(y_pred_binary_pgd_test2, y_test_categorical2) * 100\nprecision = precision_score(y_pred_binary_pgd_test2, y_test_categorical2, average='macro') * 100\nrecall = recall_score(y_pred_binary_pgd_test2, y_test_categorical2, average='macro') * 100\nf1 = f1_score(y_pred_binary_pgd_test2, y_test_categorical2, average='macro') * 100\nprint('accuracy:', accuracy)\nprint('precision:', precision)\nprint('recall:', recall)\nprint('f1:', f1)","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"model.save('best_model_ever.keras')","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"","metadata":{"trusted":true},"outputs":[],"execution_count":null}]}
--------------------------------------------------------------------------------
/uncertainty-quantification-of-drifa-net.ipynb:
--------------------------------------------------------------------------------
1 | {"metadata":{"kernelspec":{"name":"python3","display_name":"Python 3","language":"python"},"language_info":{"name":"python","version":"3.10.12","mimetype":"text/x-python","codemirror_mode":{"name":"ipython","version":3},"pygments_lexer":"ipython3","nbconvert_exporter":"python","file_extension":".py"},"kaggle":{"accelerator":"gpu","dataSources":[],"dockerImageVersionId":30823,"isInternetEnabled":true,"language":"python","sourceType":"notebook","isGpuEnabled":true}},"nbformat_minor":4,"nbformat":4,"cells":[{"cell_type":"markdown","source":"# 1. Import needed libraries","metadata":{}},{"cell_type":"code","source":"import os\nfrom PIL import Image\nimport numpy as np\nimport pandas as pd\nimport matplotlib.pyplot as plt\nimport seaborn as sns\nfrom glob import glob\n#---------------------------------------\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.metrics import classification_report, confusion_matrix\n#---------------------------------------\nimport tensorflow as tf\nfrom tensorflow.keras.models import Sequential\nfrom tensorflow.keras.layers import Dense, Dropout, Flatten\nfrom tensorflow.keras.optimizers import Adamax\nfrom tensorflow.keras.metrics import Precision, Recall\nfrom tensorflow.keras.preprocessing.image import ImageDataGenerator\n#---------------------------------------\nimport warnings\nwarnings.filterwarnings(\"ignore\")","metadata":{"_cell_guid":"b1076dfc-b9ad-4769-8c92-a6c4dae69d19","_uuid":"8f2839f25d086af736a60e9eeb907d3b93b6e0e5","trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"img_rows, img_cols = 128, 128\ninput_shape = (img_rows, img_cols, 3)\n\n#n_classes = df['category'].nunique()\nn_classes = 4\nprint('Total number of unique categories:', n_classes)\n\nfrom os import listdir, makedirs\nfrom os.path import isfile, join, basename, splitext, isfile, exists\n\nimport numpy as np\nimport pandas as pd\n\nfrom tqdm import tqdm_notebook\n\nimport tensorflow as tf\nimport keras.backend as K\n\nimport keras\nfrom keras.models import Sequential, Model\nfrom keras.layers import Dropout, Dense, Flatten, BatchNormalization\nfrom keras.layers import DepthwiseConv2D, SeparableConvolution2D, Convolution2D, Conv2D,GRU, LSTM, AlphaDropout, Embedding, ZeroPadding2D,AveragePooling2D, MaxPooling2D, GlobalAveragePooling2D, GlobalMaxPooling2D, Dropout\nfrom keras.layers import Concatenate, Average, Maximum, Bidirectional, TimeDistributed\nfrom keras.callbacks import Callback, EarlyStopping, ModelCheckpoint\n#from keras.engine.input_layer import Input\nfrom keras.models import load_model\n#from keras.initializers import LecunNormal\n\nimport matplotlib.pyplot as plt\nimport seaborn as sns\n\n#pd.set_option('precision', 30)\nnp.set_printoptions(precision = 30)\n\n\n#tf.set_random_seed(1090)\n\nimport pandas as pd\nimport numpy as np\n\nimport seaborn as sns\nimport matplotlib.pyplot as plt\nimport matplotlib.image as img\n\nimport cv2\nimport itertools\nimport pathlib\nimport warnings\nfrom PIL import Image\nfrom random import randint\nwarnings.filterwarnings('ignore')\n\nfrom imblearn.over_sampling import SMOTE\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.metrics import matthews_corrcoef as MCC\nfrom sklearn.metrics import balanced_accuracy_score as BAS\nfrom sklearn.metrics import classification_report, confusion_matrix\n\n\nfrom tensorflow import keras\nfrom keras import layers\nimport tensorflow as tf\n#import tensorflow_addons as tfa\nfrom tensorflow.keras.preprocessing import image_dataset_from_directory\n##from keras.utils.vis_utils import plot_model\nfrom tensorflow.keras import Sequential, Input\nfrom tensorflow.keras.layers import Dense, Dropout\nfrom tensorflow.keras.layers import Conv2D, Flatten\nfrom tensorflow.keras.callbacks import ReduceLROnPlateau\nfrom tensorflow.keras.applications.inception_v3 import InceptionV3\nfrom tensorflow.keras.preprocessing.image import ImageDataGenerator as IDG\nfrom tensorflow.keras.layers import SeparableConv2D, BatchNormalization, GlobalAveragePooling2D\n\nfrom distutils.dir_util import copy_tree, remove_tree\n\nimport os\n#print(os.listdir(\"../input/alzheimer-mri-dataset/Dataset\"))\nimport tensorflow as tf\nfrom keras.datasets import mnist\nimport cv2\nimport os\nimport pathlib\nfrom keras.layers import Conv2D, Conv2DTranspose,Concatenate, Dropout, Dense, Reshape, LayerNormalization, LeakyReLU\nfrom keras import layers, models\nimport matplotlib.pyplot as plt\nimport numpy as np\nfrom sklearn.metrics import accuracy_score, classification_report\nfrom sklearn.metrics import f1_score, recall_score, precision_score\nprint(\"TensorFlow Version:\", tf.__version__)","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"markdown","source":"### Uncertainty Quantification of DRIFA-Net","metadata":{}},{"cell_type":"code","source":"import numpy as np\nfrom tensorflow.keras.callbacks import ModelCheckpoint\nfrom tensorflow.keras.losses import KLDivergence\n\n# Set a random seed for reproducibility\nnp.random.seed(42)\n\ndef create_ensemble(num_models, input_shape=(128, 128, 3)):\n ensemble_models = []\n \n for _ in range(num_models):\n model = DRIFA_Net(input_shape) # Assuming ResNet18 is defined elsewhere\n ensemble_models.append(model)\n \n return ensemble_models\n\n# Function to perform Monte Carlo Dropout inference\n\n# Example usage\ninput_shape = (128, 128, 3)\nnum_models = 5\ndropout_rate = 0.25\n\nensemble_models = create_ensemble(num_models, input_shape)\n\n# Train each model in the ensemble\nfor i, model in enumerate(ensemble_models):\n print(\"Training Model\", i)\n #model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])\n model.compile(optimizer='adam', loss=[adaptive_knowledge_distillation_loss,\n adaptive_knowledge_distillation_loss,\n adaptive_knowledge_distillation_loss,\n adaptive_knowledge_distillation_loss], \n metrics=['accuracy', 'accuracy', 'accuracy', 'accuracy'])\n\n \n # Define checkpoint callback for each model\n checkpoint = ModelCheckpoint(f\"best_student_models1_covid_brain_{i}.keras\", monitor='val_loss', \n verbose=1, save_best_only=True, mode='min')\n \n model.fit(x = [images_train_brain_mri, images_train_brain_ct, images_train_covid_cxr, images_train_covid_ct], \n y=([soft_prob_brain_mri, soft_prob_brain_ct, soft_prob_covid_cxr, soft_prob_covid_ct]),\n epochs=200,\n #validation_data=([X_val, X_val_c], [y_val, y_val_c]), \n callbacks = [checkpoint],#batch_size=16,\n #validation_split = 0.2\n validation_data = ([X_val_brain_mri, X_val_brain_ct, X_val_covid_cxr, X_val_covid_ct],\n [y_val_brain_mri, y_val_brain_ct, y_val_covid_cxr, y_val_covid_ct]), verbose=0)","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"for i, model in enumerate(ensemble_models):\n model.evaluate([X_test_brain_mri, X_test_brain_ct,X_test_covid_cxr, X_test_covid_ct], \n[y_test_brain_mri,y_test_brain_ct, y_test_covid_cxr, y_test_covid_ct])","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"'''def monte_carlo_dropout_inference(model, x1, x2, x3, x4, num_samples=3):\n predictions = np.zeros((num_samples,) + model.predict([x1, x2, x3, x4],\n verbose = 0).shape)\n print(len(predictions))\n \n for i in range(num_samples):\n print(i)\n predictions[i, :] = model.predict([x1, x2, x3, x4], verbose = 0)\n\n return predictions\n'''\n\ndef monte_carlo_dropout_inference(model, x1, x2, x3, x4, num_samples=30):\n predictions_shape = model.predict([x1, x2, x3, x4], verbose=0)[0].shape\n predictions = np.zeros((num_samples,) + predictions_shape)\n \n for i in range(num_samples):\n predictions[i, :] = model.predict([x1, x2, x3, x4], verbose=0)[0]\n\n return predictions\n# Perform Monte Carlo Dropout inference for each model in the ensemble\nensemble_predictions = []\nfor model in ensemble_models:\n predictions = monte_carlo_dropout_inference(model, X_test_brain_mri, \n X_test_brain_ct,X_test_covid_cxr, X_test_covid_ct)\n ensemble_predictions.append(predictions)\n\n#print('ensemble_predictions:', ensemble_predictions.shape)\n# Take the mean or other aggregation method across the ensemble predictions\n\n","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"final_prediction_model1 = np.mean(ensemble_predictions[0], axis=0)\n\n# Use the final_prediction for further analysis or decision making\nprint(\"final_prediction_model1:\", final_prediction_model1.shape)","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"'''def monte_carlo_dropout_inference(model, x1, x2, x3, x4, num_samples=3):\n predictions = np.zeros((num_samples,) + model.predict([x1, x2, x3, x4],\n verbose = 0).shape)\n print(len(predictions))\n \n for i in range(num_samples):\n print(i)\n predictions[i, :] = model.predict([x1, x2, x3, x4], verbose = 0)\n\n return predictions\n'''\n\ndef monte_carlo_dropout_inference(model, x1, x2, x3, x4, num_samples=30):\n predictions_shape = model.predict([x1, x2, x3, x4], verbose=0)[1].shape\n predictions = np.zeros((num_samples,) + predictions_shape)\n \n for i in range(num_samples):\n predictions[i, :] = model.predict([x1, x2, x3, x4], verbose=0)[1]\n\n return predictions\n# Perform Monte Carlo Dropout inference for each model in the ensemble\nensemble_predictions = []\nfor model in ensemble_models:\n predictions = monte_carlo_dropout_inference(model, X_test_brain_mri, \n X_test_brain_ct,X_test_covid_cxr, X_test_covid_ct)\n ensemble_predictions.append(predictions)\n\n#print('ensemble_predictions:', ensemble_predictions.shape)\n# Take the mean or other aggregation method across the ensemble predictions\n","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"final_prediction_model2 = np.mean(ensemble_predictions[0], axis=0)\n\n# Use the final_prediction for further analysis or decision making\nprint(\"final_prediction_model2:\", final_prediction_model2.shape)","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"'''def monte_carlo_dropout_inference(model, x1, x2, x3, x4, num_samples=3):\n predictions = np.zeros((num_samples,) + model.predict([x1, x2, x3, x4],\n verbose = 0).shape)\n print(len(predictions))\n \n for i in range(num_samples):\n print(i)\n predictions[i, :] = model.predict([x1, x2, x3, x4], verbose = 0)\n\n return predictions\n'''\n\ndef monte_carlo_dropout_inference(model, x1, x2, x3, x4, num_samples=30):\n predictions_shape = model.predict([x1, x2, x3, x4], verbose=0)[2].shape\n predictions = np.zeros((num_samples,) + predictions_shape)\n \n for i in range(num_samples):\n predictions[i, :] = model.predict([x1, x2, x3, x4], verbose=0)[2]\n\n return predictions\n# Perform Monte Carlo Dropout inference for each model in the ensemble\nensemble_predictions = []\nfor model in ensemble_models:\n predictions = monte_carlo_dropout_inference(model, X_test_brain_mri, \n X_test_brain_ct,X_test_covid_cxr, X_test_covid_ct)\n ensemble_predictions.append(predictions)\n\n#print('ensemble_predictions:', ensemble_predictions.shape)\n# Take the mean or other aggregation method across the ensemble predictions\n","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"final_prediction_model3 = np.mean(ensemble_predictions[0], axis=0)\n\n# Use the final_prediction for further analysis or decision making\nprint(\"final_prediction_model3:\", final_prediction_model3.shape)","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"'''def monte_carlo_dropout_inference(model, x1, x2, x3, x4, num_samples=3):\n predictions = np.zeros((num_samples,) + model.predict([x1, x2, x3, x4],\n verbose = 0).shape)\n print(len(predictions))\n \n for i in range(num_samples):\n print(i)\n predictions[i, :] = model.predict([x1, x2, x3, x4], verbose = 0)\n\n return predictions\n'''\n\ndef monte_carlo_dropout_inference(model, x1, x2, x3, x4, num_samples=30):\n predictions_shape = model.predict([x1, x2, x3, x4], verbose=0)[3].shape\n predictions = np.zeros((num_samples,) + predictions_shape)\n \n for i in range(num_samples):\n predictions[i, :] = model.predict([x1, x2, x3, x4], verbose=0)[3]\n\n return predictions\n# Perform Monte Carlo Dropout inference for each model in the ensemble\nensemble_predictions = []\nfor model in ensemble_models:\n predictions = monte_carlo_dropout_inference(model, X_test_brain_mri, \n X_test_brain_ct,X_test_covid_cxr, X_test_covid_ct)\n ensemble_predictions.append(predictions)\n\n#print('ensemble_predictions:', ensemble_predictions.shape)\n# Take the mean or other aggregation method across the ensemble predictions\n","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"final_prediction_model4 = np.mean(ensemble_predictions[0], axis=0)\n\n# Use the final_prediction for further analysis or decision making\nprint(\"final_prediction_model4:\", final_prediction_model4.shape)","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"y_pred_binary_pgd_test1 = np.argmax(final_prediction_model1, axis=1)\ny_pred_binary_pgd_test2 = np.argmax(final_prediction_model2, axis=1)\ny_pred_binary_pgd_test3 = np.argmax(final_prediction_model3, axis=1)\ny_pred_binary_pgd_test4 = np.argmax(final_prediction_model4, axis=1)\n\ny_test_categorical1 = y_test_brain_mri\ny_test_categorical2 = y_test_brain_ct\ny_test_categorical4 = y_test_covid_ct\ny_test_categorical3 = y_test_covid_cxr\n\ny_test_categorical1 = np.argmax(y_test_categorical1, axis=1)\ny_test_categorical2 = np.argmax(y_test_categorical2, axis=1)\ny_test_categorical3 = np.argmax(y_test_categorical3, axis=1)\ny_test_categorical4 = np.argmax(y_test_categorical4, axis=1)\n\n## Task 1:\nprint('Task 1:')\nprint('Brain Tumours classification in MRI images:')\naccuracy = accuracy_score(y_pred_binary_pgd_test1, y_test_categorical1) * 100\nprecision = precision_score(y_pred_binary_pgd_test1, y_test_categorical1, average='macro') * 100\nrecall = recall_score(y_pred_binary_pgd_test1, y_test_categorical1, average='macro') * 100\nf1 = f1_score(y_pred_binary_pgd_test1, y_test_categorical1, average='macro') * 100\n#auc = roc_auc_score(y_pred, y_train_categorical, multi_class='ovr') * 100\nprint('accuracy:', accuracy)\nprint('precision:', precision)\nprint('recall:', recall)\nprint('f1:', f1)\n\n## Task 2:\nprint('Task 2:')\nprint('Brain Stroke classification in CT scan images:')\naccuracy = accuracy_score(y_pred_binary_pgd_test2, y_test_categorical2) * 100\nprecision = precision_score(y_pred_binary_pgd_test2, y_test_categorical2, average='macro') * 100\nrecall = recall_score(y_pred_binary_pgd_test2, y_test_categorical2, average='macro') * 100\nf1 = f1_score(y_pred_binary_pgd_test2, y_test_categorical2, average='macro') * 100\nprint('accuracy:', accuracy)\nprint('precision:', precision)\nprint('recall:', recall)\nprint('f1:', f1)\n\n## Task 3:\nprint('Task 3:')\nprint('COVID19 classification in CXR images:')\naccuracy = accuracy_score(y_pred_binary_pgd_test3, y_test_categorical3) * 100\nprecision = precision_score(y_pred_binary_pgd_test3, y_test_categorical3, average='macro') * 100\nrecall = recall_score(y_pred_binary_pgd_test3, y_test_categorical3, average='macro') * 100\nf1 = f1_score(y_pred_binary_pgd_test3, y_test_categorical3, average='macro') * 100\n#auc = roc_auc_score(y_pred, y_train_categorical, multi_class='ovr') * 100\nprint('accuracy:', accuracy)\nprint('precision:', precision)\nprint('recall:', recall)\nprint('f1:', f1)\n\n## Task 4:\nprint('Task 4:')\nprint('COVID19 classification in CT scan images:')\naccuracy = accuracy_score(y_pred_binary_pgd_test4, y_test_categorical4) * 100\nprecision = precision_score(y_pred_binary_pgd_test4, y_test_categorical4, average='macro') * 100\nrecall = recall_score(y_pred_binary_pgd_test4, y_test_categorical4, average='macro') * 100\nf1 = f1_score(y_pred_binary_pgd_test4, y_test_categorical4, average='macro') * 100\nprint('accuracy:', accuracy)\nprint('precision:', precision)\nprint('recall:', recall)\nprint('f1:', f1)","metadata":{"trusted":true},"outputs":[],"execution_count":null},{"cell_type":"code","source":"","metadata":{"trusted":true},"outputs":[],"execution_count":null}]}
--------------------------------------------------------------------------------