├── FF.py ├── Forward_Forward (Update).ipynb ├── LICENSE ├── README.md └── train.py /FF.py: -------------------------------------------------------------------------------- 1 | class FFDense(keras.layers.Layer): 2 | """ 3 | A custom ForwardForward-enabled Dense layer. It has an implementation of the 4 | Forward-Forward network internally for use. 5 | This layer must be used in conjunction with the `FFNetwork` model. 6 | """ 7 | 8 | def __init__( 9 | self, 10 | units, 11 | optimizer, 12 | loss_metric, 13 | num_epochs=50, 14 | use_bias=True, 15 | kernel_initializer="glorot_uniform", 16 | bias_initializer="zeros", 17 | kernel_regularizer=None, 18 | bias_regularizer=None, 19 | **kwargs, 20 | ): 21 | super().__init__(**kwargs) 22 | self.dense = keras.layers.Dense( 23 | units=units, 24 | use_bias=use_bias, 25 | kernel_initializer=kernel_initializer, 26 | bias_initializer=bias_initializer, 27 | kernel_regularizer=kernel_regularizer, 28 | bias_regularizer=bias_regularizer, 29 | ) 30 | self.relu = keras.layers.ReLU() 31 | self.optimizer = optimizer 32 | self.loss_metric = loss_metric 33 | self.threshold = 1.5 34 | self.num_epochs = num_epochs 35 | 36 | # We perform a normalization step before we run the input through the Dense 37 | # layer. 38 | 39 | def call(self, x): 40 | x_norm = tf.norm(x, ord=2, axis=1, keepdims=True) 41 | x_norm = x_norm + 1e-4 42 | x_dir = x / x_norm 43 | res = self.dense(x_dir) 44 | return self.relu(res) 45 | 46 | # The Forward-Forward algorithm is below. We first perform the Dense-layer 47 | # operation and then get a Mean Square value for all positive and negative 48 | # samples respectively. 49 | # The custom loss function finds the distance between the Mean-squared 50 | # result and the threshold value we set (a hyperparameter) that will define 51 | # whether the prediction is positive or negative in nature. Once the loss is 52 | # calculated, we get a mean across the entire batch combined and perform a 53 | # gradient calculation and optimization step. This does not technically 54 | # qualify as backpropagation since there is no gradient being 55 | # sent to any previous layer and is completely local in nature. 56 | 57 | def forward_forward(self, x_pos, x_neg): 58 | for i in range(self.num_epochs): 59 | with tf.GradientTape() as tape: 60 | g_pos = tf.math.reduce_mean(tf.math.pow(self.call(x_pos), 2), 1) 61 | g_neg = tf.math.reduce_mean(tf.math.pow(self.call(x_neg), 2), 1) 62 | 63 | loss = tf.math.log( 64 | 1 65 | + tf.math.exp( 66 | tf.concat([-g_pos + self.threshold, g_neg - self.threshold], 0) 67 | ) 68 | ) 69 | mean_loss = tf.cast(tf.math.reduce_mean(loss), tf.float32) 70 | self.loss_metric.update_state([mean_loss]) 71 | gradients = tape.gradient(mean_loss, self.dense.trainable_weights) 72 | self.optimizer.apply_gradients(zip(gradients, self.dense.trainable_weights)) 73 | return ( 74 | tf.stop_gradient(self.call(x_pos)), 75 | tf.stop_gradient(self.call(x_neg)), 76 | self.loss_metric.result(), 77 | ) 78 | 79 | class FFNetwork(keras.Model): 80 | """ 81 | A [`keras.Model`](/api/models/model#model-class) that supports a `FFDense` network creation. This model 82 | can work for any kind of classification task. It has an internal 83 | implementation with some details specific to the MNIST dataset which can be 84 | changed as per the use-case. 85 | """ 86 | 87 | # Since each layer runs gradient-calculation and optimization locally, each 88 | # layer has its own optimizer that we pass. As a standard choice, we pass 89 | # the `Adam` optimizer with a default learning rate of 0.03 as that was 90 | # found to be the best rate after experimentation. 91 | # Loss is tracked using `loss_var` and `loss_count` variables. 92 | # Use legacy optimizer for Layer Optimizer to fix issue 93 | # https://github.com/keras-team/keras-io/issues/1241 94 | 95 | def __init__( 96 | self, 97 | dims, 98 | layer_optimizer=keras.optimizers.legacy.Adam(learning_rate=0.03), 99 | **kwargs, 100 | ): 101 | super().__init__(**kwargs) 102 | self.layer_optimizer = layer_optimizer 103 | self.loss_var = tf.Variable(0.0, trainable=False, dtype=tf.float32) 104 | self.loss_count = tf.Variable(0.0, trainable=False, dtype=tf.float32) 105 | self.layer_list = [keras.Input(shape=(dims[0],))] 106 | for d in range(len(dims) - 1): 107 | self.layer_list += [ 108 | FFDense( 109 | dims[d + 1], 110 | optimizer=self.layer_optimizer, 111 | loss_metric=keras.metrics.Mean(), 112 | ) 113 | ] 114 | 115 | # This function makes a dynamic change to the image wherein the labels are 116 | # put on top of the original image (for this example, as MNIST has 10 117 | # unique labels, we take the top-left corner's first 10 pixels). This 118 | # function returns the original data tensor with the first 10 pixels being 119 | # a pixel-based one-hot representation of the labels. 120 | 121 | @tf.function(reduce_retracing=True) 122 | def overlay_y_on_x(self, data): 123 | X_sample, y_sample = data 124 | max_sample = tf.reduce_max(X_sample, axis=0, keepdims=True) 125 | max_sample = tf.cast(max_sample, dtype=tf.float64) 126 | X_zeros = tf.zeros([10], dtype=tf.float64) 127 | X_update = xla.dynamic_update_slice(X_zeros, max_sample, [y_sample]) 128 | X_sample = xla.dynamic_update_slice(X_sample, X_update, [0]) 129 | return X_sample, y_sample 130 | 131 | # A custom `predict_one_sample` performs predictions by passing the images 132 | # through the network, measures the results produced by each layer (i.e. 133 | # how high/low the output values are with respect to the set threshold for 134 | # each label) and then simply finding the label with the highest values. 135 | # In such a case, the images are tested for their 'goodness' with all 136 | # labels. 137 | 138 | @tf.function(reduce_retracing=True) 139 | def predict_one_sample(self, x): 140 | goodness_per_label = [] 141 | x = tf.reshape(x, [tf.shape(x)[0] * tf.shape(x)[1]]) 142 | for label in range(10): 143 | h, label = self.overlay_y_on_x(data=(x, label)) 144 | h = tf.reshape(h, [-1, tf.shape(h)[0]]) 145 | goodness = [] 146 | for layer_idx in range(1, len(self.layer_list)): 147 | layer = self.layer_list[layer_idx] 148 | h = layer(h) 149 | goodness += [tf.math.reduce_mean(tf.math.pow(h, 2), 1)] 150 | goodness_per_label += [ 151 | tf.expand_dims(tf.reduce_sum(goodness, keepdims=True), 1) 152 | ] 153 | goodness_per_label = tf.concat(goodness_per_label, 1) 154 | return tf.cast(tf.argmax(goodness_per_label, 1), tf.float64) 155 | 156 | def predict(self, data): 157 | x = data 158 | preds = list() 159 | preds = tf.map_fn(fn=self.predict_one_sample, elems=x) 160 | return np.asarray(preds, dtype=int) 161 | 162 | # This custom `train_step` function overrides the internal `train_step` 163 | # implementation. We take all the input image tensors, flatten them and 164 | # subsequently produce positive and negative samples on the images. 165 | # A positive sample is an image that has the right label encoded on it with 166 | # the `overlay_y_on_x` function. A negative sample is an image that has an 167 | # erroneous label present on it. 168 | # With the samples ready, we pass them through each `FFLayer` and perform 169 | # the Forward-Forward computation on it. The returned loss is the final 170 | # loss value over all the layers. 171 | 172 | @tf.function(jit_compile=True) 173 | def train_step(self, data): 174 | x, y = data 175 | 176 | # Flatten op 177 | x = tf.reshape(x, [-1, tf.shape(x)[1] * tf.shape(x)[2]]) 178 | 179 | x_pos, y = tf.map_fn(fn=self.overlay_y_on_x, elems=(x, y)) 180 | 181 | random_y = tf.random.shuffle(y) 182 | x_neg, y = tf.map_fn(fn=self.overlay_y_on_x, elems=(x, random_y)) 183 | 184 | h_pos, h_neg = x_pos, x_neg 185 | 186 | for idx, layer in enumerate(self.layers): 187 | if isinstance(layer, FFDense): 188 | print(f"Training layer {idx+1} now : ") 189 | h_pos, h_neg, loss = layer.forward_forward(h_pos, h_neg) 190 | self.loss_var.assign_add(loss) 191 | self.loss_count.assign_add(1.0) 192 | else: 193 | print(f"Passing layer {idx+1} now : ") 194 | x = layer(x) 195 | mean_res = tf.math.divide(self.loss_var, self.loss_count) 196 | return {"FinalLoss": mean_res} -------------------------------------------------------------------------------- /Forward_Forward (Update).ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "nbformat": 4, 3 | "nbformat_minor": 0, 4 | "metadata": { 5 | "colab": { 6 | "provenance": [], 7 | "gpuType": "T4" 8 | }, 9 | "kernelspec": { 10 | "name": "python3", 11 | "display_name": "Python 3" 12 | }, 13 | "language_info": { 14 | "name": "python" 15 | }, 16 | "accelerator": "GPU", 17 | "gpuClass": "standard" 18 | }, 19 | "cells": [ 20 | { 21 | "cell_type": "code", 22 | "source": [ 23 | "import tensorflow as tf\n", 24 | "from tensorflow import keras\n", 25 | "import numpy as np\n", 26 | "import matplotlib.pyplot as plt\n", 27 | "from sklearn.metrics import accuracy_score\n", 28 | "import random\n", 29 | "from tensorflow.compiler.tf2xla.python import xla" 30 | ], 31 | "metadata": { 32 | "id": "Fr-Cu_azBs-S" 33 | }, 34 | "execution_count": 7, 35 | "outputs": [] 36 | }, 37 | { 38 | "cell_type": "code", 39 | "source": [ 40 | "(x_train, y_train), (x_test, y_test) = keras.datasets.mnist.load_data()\n", 41 | "\n", 42 | "print(\"4 Random Training samples and labels\")\n", 43 | "idx1, idx2, idx3, idx4 = random.sample(range(0, x_train.shape[0]), 4)\n", 44 | "\n", 45 | "img1 = (x_train[idx1], y_train[idx1])\n", 46 | "img2 = (x_train[idx2], y_train[idx2])\n", 47 | "img3 = (x_train[idx3], y_train[idx3])\n", 48 | "img4 = (x_train[idx4], y_train[idx4])\n", 49 | "\n", 50 | "imgs = [img1, img2, img3, img4]\n", 51 | "\n", 52 | "plt.figure(figsize=(10, 10))\n", 53 | "\n", 54 | "for idx, item in enumerate(imgs):\n", 55 | " image, label = item[0], item[1]\n", 56 | " plt.subplot(2, 2, idx + 1)\n", 57 | " plt.imshow(image, cmap=\"gray\")\n", 58 | " plt.title(f\"Label : {label}\")\n", 59 | "plt.show()" 60 | ], 61 | "metadata": { 62 | "colab": { 63 | "base_uri": "https://localhost:8080/", 64 | "height": 871 65 | }, 66 | "id": "SWjt3XbzBtf0", 67 | "outputId": "21590136-0901-4492-93dc-049384da2113" 68 | }, 69 | "execution_count": 8, 70 | "outputs": [ 71 | { 72 | "output_type": "stream", 73 | "name": "stdout", 74 | "text": [ 75 | "4 Random Training samples and labels\n" 76 | ] 77 | }, 78 | { 79 | "output_type": "display_data", 80 | "data": { 81 | "text/plain": [ 82 | "
" 83 | ], 84 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAzQAAANECAYAAABmUGq2AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABVQElEQVR4nO3de3xV5Z0v/m+4BVQSigghchHES0eBtiqRqmgrR6CtFbXe6jlix9HRolOlakt/o9h2prRaL2PHauc1HajToqLjpXU69FgEPLaAA2o9tJUCB4coEJSWBFCCwvr90TE15WJWyGbnyX6/X6/n9SJrP0+e78qO+frJ2nulLMuyLAAAABLUqdgFAAAAtJZAAwAAJEugAQAAkiXQAAAAyRJoAACAZAk0AABAsgQaAAAgWQINAACQLIEGAABIlkADf+aVV16JsrKy+Pa3v91mn3P+/PlRVlYW8+fPb7PPCUBp0Jdg7wQaOoSZM2dGWVlZLFmypNil7FcrVqyICy+8MAYMGBAHHHBAHH300fG1r30t3nzzzWKXBlDSSrUvRUQ8//zz8elPfzp69+4dBxxwQBx77LFx9913F7ssOrAuxS4AaJ3a2toYNWpUVFZWxtVXXx29e/eOhQsXxrRp02Lp0qXxxBNPFLtEAErM//7f/zvOPPPM+PCHPxw33XRTHHTQQbFq1ap49dVXi10aHZhAA4n613/919i0aVM8++yzccwxx0RExBVXXBE7d+6M+++/P/7whz/EBz7wgSJXCUCpaGhoiEsuuSQ++clPxiOPPBKdOnkhEPuH7zRKxvbt2+Pmm2+O4447LiorK+PAAw+MU045JebNm7fHNXfeeWcMHjw4evToEaeeemosW7Zslzkvv/xyfOYzn4nevXtH9+7d4/jjj48f//jHra7z5ZdfjjVr1rzvvIaGhoiI6NevX7Pj/fv3j06dOkW3bt1aXQMAhdfR+tKsWbOirq4u/v7v/z46deoUW7dujZ07d7Z6X2gpgYaS0dDQEP/8z/8cp512WnzrW9+KW265JV5//fUYN25cvPjii7vMv//+++Puu++OyZMnx9SpU2PZsmXx8Y9/POrq6prm/PrXv44TTzwxfvvb38aXv/zluP322+PAAw+MiRMnxmOPPdaqOj/4wQ/GJZdc8r7zTjvttIiIuOyyy+LFF1+M2traeOihh+Lee++Nv/mbv4kDDzywVfsDsH90tL7085//PCoqKuK1116Lo446Kg466KCoqKiIq666KrZt29aqvaFFMugAZsyYkUVE9p//+Z97nPPOO+9kjY2NzY794Q9/yPr165f95V/+ZdOx1atXZxGR9ejRI3v11Vebji9evDiLiOy6665rOnb66adnw4cPz7Zt29Z0bOfOndlHP/rR7Igjjmg6Nm/evCwisnnz5r3vuUREduqpp77vvCzLsq9//etZjx49sohoGv/f//f/tWgtAIVTin1pxIgR2QEHHJAdcMAB2TXXXJP927/9W3bNNddkEZFdeOGF77seWssVGkpG586dm16GtXPnzvj9738f77zzThx//PHx/PPP7zJ/4sSJceihhzZ9PGrUqKipqYmf/vSnERHx+9//Pp5++uk4//zzY/PmzfHGG2/EG2+8ERs3boxx48bFihUr4rXXXstdZ5ZlLb6N5mGHHRZjxoyJf/qnf4p/+7d/i7/8y7+Mb3zjG/GP//iPufcFYP/qaH1py5Yt8eabb8Yll1wSd999d5xzzjlx9913x1//9V/Hgw8+GCtWrMi9N7SEmwJQUn7wgx/E7bffHi+//HK8/fbbTceHDBmyy9wjjjhil2NHHnlkzJ49OyIiVq5cGVmWxU033RQ33XTTbvfbsGFDs+bTlh588MG44oor4ne/+10MGDAgIiLOOeec2LlzZ3zpS1+Kiy66KA4++OCC7A1A2+hIfalHjx4REXHRRRc1O/7Zz342vve978XChQt3ew6wrwQaSsYPf/jDuPTSS2PixIlxww03RN++faNz584xffr0WLVqVe7P9+4bHa+//voYN27cbucMGzZsn2rem+9+97vx4Q9/uCnMvOvTn/50zJw5M1544YUYO3ZswfYHYN90tL5UXV0dv/71r3e5WU3fvn0jIuIPf/hDwfamtAk0lIxHHnkkhg4dGo8++miUlZU1HZ82bdpu5+/u0vjvfve7OOywwyIiYujQoRER0bVr16IEh7q6ut3elvnd3/C98847+7skAHLoaH3puOOOi6eeeqrppgDvWrt2bUREHHLIIfu9JkqD99BQMjp37hwRf3wt8LsWL14cCxcu3O38xx9/vNlrjZ977rlYvHhxTJgwISL++Bun0047Lb73ve/FunXrdln/+uuvt6rOlt4e88gjj4wXXnghfve73zU7/sADD0SnTp1ixIgRrdofgP2jo/Wl888/PyIivv/97zc7/s///M/RpUuXprtzQltzhYYO5V/+5V9izpw5uxz/whe+EJ/61Kfi0UcfjbPPPjs++clPxurVq+O+++6Lv/iLv4gtW7bssmbYsGFx8sknx1VXXRWNjY1x1113xcEHHxw33nhj05x77rknTj755Bg+fHhcfvnlMXTo0Kirq4uFCxfGq6++Gr/61a9yn8MHP/jBOPXUU9/3DZg33HBD/Md//EeccsopcfXVV8fBBx8cTz75ZPzHf/xH/NVf/VVUV1fn3huAtlVKfenDH/5w/OVf/mX8y7/8S7zzzjtNax5++OGYOnWqvkThFPMWa9BW3r095p5GbW1ttnPnzuwb3/hGNnjw4Ky8vDz78Ic/nD355JPZpEmTssGDBzd9rndvj3nbbbdlt99+ezZw4MCsvLw8O+WUU7Jf/epXu+y9atWq7JJLLsmqqqqyrl27Zoceemj2qU99KnvkkUea5hTqts2LFy/OJkyY0LT3kUcemf393/999vbbb7doPQCFUap9afv27dktt9ySDR48OOvatWs2bNiw7M4772zRWmitsix7z3VOAACAhHgPDQAAkCyBBgAASJZAAwAAJEugAQAAkiXQAAAAyRJoAACAZLW7P6y5c+fOWLt2bfTs2TPKysqKXQ5Ah5dlWWzevDmqq6ujUye/59odvQlg/8rTm9pdoFm7dm0MHDiw2GUAlJza2toYMGBAsctol/QmgOJoSW9qd7+K69mzZ7FLAChJfv7uma8NQHG05OdvwQLNPffcE4cddlh07949ampq4rnnnmvROpfyAYqjo//8bW1fiuj4XxuA9qolP38LEmgeeuihmDJlSkybNi2ef/75GDlyZIwbNy42bNhQiO0AYK/0JYAOLCuAUaNGZZMnT276eMeOHVl1dXU2ffr0911bX1+fRYRhGIaxn0d9fX0hWkK7sC99Kcv0JsMwjGKNlvSmNr9Cs3379li6dGmMHTu26VinTp1i7NixsXDhwrbeDgD2Sl8C6Nja/C5nb7zxRuzYsSP69evX7Hi/fv3i5Zdf3mV+Y2NjNDY2Nn3c0NDQ1iUBUMLy9qUIvQkgJUW/y9n06dOjsrKyabgtJgDFpjcBpKPNA02fPn2ic+fOUVdX1+x4XV1dVFVV7TJ/6tSpUV9f3zRqa2vbuiQASljevhShNwGkpM0DTbdu3eK4446LuXPnNh3buXNnzJ07N0aPHr3L/PLy8qioqGg2AKCt5O1LEXoTQEra/D00ERFTpkyJSZMmxfHHHx+jRo2Ku+66K7Zu3Rqf+9znCrEdAOyVvgTQcRUk0FxwwQXx+uuvx8033xzr16+PD33oQzFnzpxd3pAJAPuDvgTQcZVlWZYVu4j3amhoiMrKymKXAVBy6uvrvbRqD/QmgOJoSW8q+l3OAAAAWkugAQAAkiXQAAAAyRJoAACAZAk0AABAsgQaAAAgWQINAACQLIEGAABIlkADAAAkS6ABAACSJdAAAADJEmgAAIBkCTQAAECyBBoAACBZAg0AAJAsgQYAAEiWQAMAACRLoAEAAJIl0AAAAMkSaAAAgGQJNAAAQLIEGgAAIFkCDQAAkCyBBgAASFaXYhcAAFBo3bp1y73mhz/8Ya75s2bNyr3H448/nnsN0JwrNAAAQLIEGgAAIFkCDQAAkCyBBgAASJZAAwAAJEugAQAAkiXQAAAAyRJoAACAZAk0AABAsgQaAAAgWQINAACQLIEGAABIVpdiFwAAUGgf//jHc6/5zGc+k2t+XV1d7j0ef/zx3GuA5lyhAQAAkiXQAAAAyRJoAACAZAk0AABAsgQaAAAgWQINAACQLIEGAABIlkADAAAkS6ABAACSJdAAAADJEmgAAIBkdSl2AbC/XHfddQWdHxExcODA3GsK7Y477si95q677so1v7a2NvceAPvioIMOyjX/f/2v/1WgSv5k/fr1Bd8D2JUrNAAAQLIEGgAAIFltHmhuueWWKCsrazaOPvrott4GAFpMbwLouAryHppjjjkmfv7zn/9pky7eqgNAcelNAB1TQX6ad+nSJaqqqgrxqQGgVfQmgI6pIO+hWbFiRVRXV8fQoUPj4osvjjVr1uxxbmNjYzQ0NDQbANDW9CaAjqnNA01NTU3MnDkz5syZE/fee2+sXr06TjnllNi8efNu50+fPj0qKyubRnu87S0AadObADquNg80EyZMiPPOOy9GjBgR48aNi5/+9KexadOmmD179m7nT506Nerr65uGv2cBQFvTmwA6roK/I7JXr15x5JFHxsqVK3f7eHl5eZSXlxe6DABoojcBdBwF/zs0W7ZsiVWrVkX//v0LvRUAtIjeBNBxtHmguf7662PBggXxyiuvxC9/+cs4++yzo3PnznHRRRe19VYA0CJ6E0DH1eYvOXv11Vfjoosuio0bN8YhhxwSJ598cixatCgOOeSQtt4KAFpEbwLouMqyLMuKXcR7NTQ0RGVlZbHLoJ07//zzc6956KGHClBJx5T3DdCDBg0qUCXsT/X19VFRUVHsMtolvan9GTVqVK75ixYtKlAlfzJ48ODca9xwAvauJb2p4O+hAQAAKBSBBgAASJZAAwAAJEugAQAAkiXQAAAAyRJoAACAZAk0AABAsgQaAAAgWQINAACQLIEGAABIlkADAAAkq0uxC4DWeOihhwq+x8KFC3Ovueuuu9q+kH107bXX5l4zevToti8EIDH/8R//kWt+XV1dgSrpeIYOHZp7Ta9evXLNf/7553PvQZpcoQEAAJIl0AAAAMkSaAAAgGQJNAAAQLIEGgAAIFkCDQAAkCyBBgAASJZAAwAAJEugAQAAkiXQAAAAyRJoAACAZAk0AABAsroUuwBorz760Y8Wu4Q2MXv27Nxrzj///AJUApCWTZs25Zq/ffv2whSSgHPPPTfX/OnTp+feo6qqKtf8DRs25N5j4sSJueYvW7Ys9x60PVdoAACAZAk0AABAsgQaAAAgWQINAACQLIEGAABIlkADAAAkS6ABAACSJdAAAADJEmgAAIBkCTQAAECyBBoAACBZXYpdAOwvCxcuLHYJyZg9e3axS9jF+eefX/A92uN5A7t33XXXFbuEDuv666/Pveab3/xmrvmdOhX+d+oHHXRQ7jVLlizJNf/SSy/NvceDDz6Yew175woNAACQLIEGAABIlkADAAAkS6ABAACSJdAAAADJEmgAAIBkCTQAAECyBBoAACBZAg0AAJAsgQYAAEiWQAMAACRLoAEAAJLVpdgFwP4yevToYpeQjIEDB+aaf+211+beY8qUKbnX5HXBBRcUfA+gOLp27VrsEpJx8cUX55p/1VVX5d6jU6fC/4783//933PNf/bZZ3PvMX369Fzz77777tx7vPjii7nmv/zyy7n3KDWu0AAAAMnKHWieeeaZOPPMM6O6ujrKysri8ccfb/Z4lmVx8803R//+/aNHjx4xduzYWLFiRVvVCwDN6EsApS13oNm6dWuMHDky7rnnnt0+fuutt8bdd98d9913XyxevDgOPPDAGDduXGzbtm2fiwWAP6cvAZS23O+hmTBhQkyYMGG3j2VZFnfddVf87d/+bZx11lkREXH//fdHv3794vHHH48LL7xw36oFgD+jLwGUtjZ9D83q1atj/fr1MXbs2KZjlZWVUVNTEwsXLmzLrQDgfelLAB1fm97lbP369RER0a9fv2bH+/Xr1/TYn2tsbIzGxsamjxsaGtqyJABKWGv6UoTeBJCSot/lbPr06VFZWdk08t4uFgDamt4EkI42DTRVVVUREVFXV9fseF1dXdNjf27q1KlRX1/fNGpra9uyJABKWGv6UoTeBJCSNg00Q4YMiaqqqpg7d27TsYaGhli8ePEe/6hheXl5VFRUNBsA0BZa05ci9CaAlOR+D82WLVti5cqVTR+vXr06Xnzxxejdu3cMGjQorr322vi7v/u7OOKII2LIkCFx0003RXV1dUycOLEt6waAiNCXAEpd7kCzZMmS+NjHPtb08ZQpUyIiYtKkSTFz5sy48cYbY+vWrXHFFVfEpk2b4uSTT445c+ZE9+7d265qAPhv+hJAaSvLsiwrdhHv1dDQEJWVlcUug3ZuzZo1udfkfVPvBRdckHuP2bNn515TaLfffnvuNe/+D2Eh3XHHHbnmf/GLXyxQJbyrvr7eS6v2QG9qfx555JFc888555zcezzwwAO55l988cW598irS5f8N6jN+7X69Kc/nXuPvOrr63OvGTZsWK75rbk74b/+67/mmn/++efn3mNPfzdrT372s5/l3qMjaUlvKvpdzgAAAFpLoAEAAJIl0AAAAMkSaAAAgGQJNAAAQLIEGgAAIFkCDQAAkCyBBgAASJZAAwAAJEugAQAAkiXQAAAAyRJoAACAZHUpdgHQGtdff33uNQ899FCu+d/+9rdz7zF79uxc80888cSC7zFw4MDce9TW1uaa35rnI+95ABDxoQ99KPeaT3/607nml5WV5d4jy7Jc8y+55JLce2zcuDH3mrw+97nP5Zrfv3//3HsMGDAg9xr2zhUaAAAgWQINAACQLIEGAABIlkADAAAkS6ABAACSJdAAAADJEmgAAIBkCTQAAECyBBoAACBZAg0AAJAsgQYAAEhWl2IXAK0xe/bs3Gu+/e1v55o/cODA3HtkWZZ7TaHV1tbmXnPSSScVfA+A9u5//I//kWt+v379cu9RV1eXa/5nP/vZ3Hvk1Zpe9otf/CLX/Llz5+beY3946623cs1/7bXXcu+R9zn8/ve/n3uPUuMKDQAAkCyBBgAASJZAAwAAJEugAQAAkiXQAAAAyRJoAACAZAk0AABAsgQaAAAgWQINAACQLIEGAABIlkADAAAkS6ABAACS1aXYBcD+cuedd+aaf8cddxSokn3z8MMP55rfmvOora3NvQZgf1q2bFmu+eecc07uPfr06ZNr/lVXXZV7j69//eu55v/FX/xF7j3yKisry73mtttuyzX/zTffzL1HRzFq1Khc84899tjce+T97yN1rtAAAADJEmgAAIBkCTQAAECyBBoAACBZAg0AAJAsgQYAAEiWQAMAACRLoAEAAJIl0AAAAMkSaAAAgGQJNAAAQLK6FLsAaI0TTzwx95o77rijAJXsm4ULF+Zec/755xegEoC0/PSnP801/+abby5QJX9y8MEH517TuXPnXPPPOOOM3Hvk9eqrr+Ze89JLLxWgko7pwAMPzDW/e/fuBaqk43CFBgAASJZAAwAAJCt3oHnmmWfizDPPjOrq6igrK4vHH3+82eOXXnpplJWVNRvjx49vq3oBoBl9CaC05Q40W7dujZEjR8Y999yzxznjx4+PdevWNY0HHnhgn4oEgD3RlwBKW+6bAkyYMCEmTJiw1znl5eVRVVXV6qIAoKX0JYDSVpD30MyfPz/69u0bRx11VFx11VWxcePGPc5tbGyMhoaGZgMA2lKevhShNwGkpM0Dzfjx4+P++++PuXPnxre+9a1YsGBBTJgwIXbs2LHb+dOnT4/KysqmMXDgwLYuCYASlrcvRehNAClp879Dc+GFFzb9e/jw4TFixIg4/PDDY/78+XH66afvMn/q1KkxZcqUpo8bGho0DgDaTN6+FKE3AaSk4LdtHjp0aPTp0ydWrly528fLy8ujoqKi2QCAQnm/vhShNwGkpOCB5tVXX42NGzdG//79C70VALwvfQmgY8n9krMtW7Y0+63W6tWr48UXX4zevXtH796946tf/Wqce+65UVVVFatWrYobb7wxhg0bFuPGjWvTwgEgQl8CKHW5A82SJUviYx/7WNPH777GeNKkSXHvvffGSy+9FD/4wQ9i06ZNUV1dHWeccUZ8/etfj/Ly8rarGgD+m74EUNpyB5rTTjstsizb4+M/+9nP9qkgSlPeN9suXLiwQJXsX6NHjy52CZA8fak07dy5M9f8vd3Vbk86d+6ca35NTU3uPT7wgQ/kXlNor7zyyn5Z0x716tUr1/zhw4fn3mP16tW55q9duzb3HqWm4O+hAQAAKBSBBgAASJZAAwAAJEugAQAAkiXQAAAAyRJoAACAZAk0AABAsgQaAAAgWQINAACQLIEGAABIlkADAAAkq0uxC4CIiF/84hcF36O2tjbX/PPPPz/3HlOmTMk1/7zzzsu9x3XXXZdr/p133pl7D4D2bsmSJbnm33jjjbn3+OY3v5lr/vHHH597j3Xr1uVeQ+F85jOfyTX/mGOOyb3HE088kWv+2rVrc+9RalyhAQAAkiXQAAAAyRJoAACAZAk0AABAsgQaAAAgWQINAACQLIEGAABIlkADAAAkS6ABAACSJdAAAADJEmgAAIBkCTQAAECyuhS7ADqe22+/PfeagQMHFqCS5k466aRc82tra3Pvcccdd+Saf9555+XeY/To0bnm33nnnbn3AOhoWvOzcMKECbnmjx07Nvce7dGGDRuKXUKb6Ny5c+41eZ/Dbdu25d7jtttuy72GvXOFBgAASJZAAwAAJEugAQAAkiXQAAAAyRJoAACAZAk0AABAsgQaAAAgWQINAACQLIEGAABIlkADAAAkS6ABAACS1aXYBdDxTJkypeB73HHHHbnX1NbWFqCS5hYtWlTwPc4777yC7wFAxKRJk3LN//GPf5x7j+OOOy73mkJ7+umni13Cbg0aNCjX/L/6q7/Kvcf555+fa/6vfvWr3Hv88pe/zL2GvXOFBgAASJZAAwAAJEugAQAAkiXQAAAAyRJoAACAZAk0AABAsgQaAAAgWQINAACQLIEGAABIlkADAAAkS6ABAACSJdAAAADJKsuyLCt2Ee/V0NAQlZWVxS6DfbA/vqXKysoKvsf+4GtFe1JfXx8VFRXFLqNd0ptoiQkTJuRec+utt+aaf8wxx+TeI68tW7bkXvPyyy8XoJLm+vXrl2v+wIEDc+/x4osv5pr/jW98I/cejzzySO41pawlvckVGgAAIFm5As306dPjhBNOiJ49e0bfvn1j4sSJsXz58mZztm3bFpMnT46DDz44DjrooDj33HOjrq6uTYsGgHfpTQClLVegWbBgQUyePDkWLVoUTz31VLz99ttxxhlnxNatW5vmXHfddfGTn/wkHn744ViwYEGsXbs2zjnnnDYvHAAi9CaAUtclz+Q5c+Y0+3jmzJnRt2/fWLp0aYwZMybq6+vj+9//fsyaNSs+/vGPR0TEjBkz4oMf/GAsWrQoTjzxxLarHABCbwIodfv0Hpr6+vqIiOjdu3dERCxdujTefvvtGDt2bNOco48+OgYNGhQLFy7cl60AoEX0JoDSkusKzXvt3Lkzrr322jjppJPi2GOPjYiI9evXR7du3aJXr17N5vbr1y/Wr1+/28/T2NgYjY2NTR83NDS0tiQASpzeBFB6Wn2FZvLkybFs2bJ48MEH96mA6dOnR2VlZdNozS32ACBCbwIoRa0KNFdffXU8+eSTMW/evBgwYEDT8aqqqti+fXts2rSp2fy6urqoqqra7eeaOnVq1NfXN43a2trWlARAidObAEpTrkCTZVlcffXV8dhjj8XTTz8dQ4YMafb4cccdF127do25c+c2HVu+fHmsWbMmRo8evdvPWV5eHhUVFc0GALSU3gRQ2nK9h2by5Mkxa9aseOKJJ6Jnz55Nrz2urKyMHj16RGVlZVx22WUxZcqU6N27d1RUVMQ111wTo0ePdhcZAApCbwIobbkCzb333hsREaeddlqz4zNmzIhLL700IiLuvPPO6NSpU5x77rnR2NgY48aNi+9+97ttUiwA/Dm9CaC0lWVZlhW7iPdqaGiIysrKYpfBe+T9Deb+uA1qWVlZwfdojdmzZ+eaf9555+XeI+/X96Mf/WjuPShN9fX1Xlq1B3oThdKvX79c89etW1egSjqe9/5x3ZZ67/vvWuLd28RTOC3pTfv0d2gAAACKSaABAACSJdAAAADJEmgAAIBkCTQAAECyBBoAACBZAg0AAJAsgQYAAEiWQAMAACRLoAEAAJIl0AAAAMkSaAAAgGR1KXYBtH+LFi0qdgm7mD17dsH3OPHEE3OvGThwYAEqae6CCy4o+B4A7B8bN27MNX/JkiW59zj++ONzzf+3f/u33HusXLky1/xf//rXuff493//91zzsyzLvUd9fX3uNRSfKzQAAECyBBoAACBZAg0AAJAsgQYAAEiWQAMAACRLoAEAAJIl0AAAAMkSaAAAgGQJNAAAQLIEGgAAIFkCDQAAkKwuxS6Ajqe2tjb3moEDB+aaf9555+Xeoz264447cq9pzdcXgPbpnXfeyTV/1KhRBaoE0uUKDQAAkCyBBgAASJZAAwAAJEugAQAAkiXQAAAAyRJoAACAZAk0AABAsgQaAAAgWQINAACQLIEGAABIlkADAAAkS6ABAACS1aXYBdDxDBo0KPea22+/Pdf80aNH594j75qHH3449x6PPPJIrvmzZ8/OvQcAAH/iCg0AAJAsgQYAAEiWQAMAACRLoAEAAJIl0AAAAMkSaAAAgGQJNAAAQLIEGgAAIFkCDQAAkCyBBgAASJZAAwAAJKtLsQuAiIgvfvGLxS4BAIAEuUIDAAAkS6ABAACSlSvQTJ8+PU444YTo2bNn9O3bNyZOnBjLly9vNue0006LsrKyZuPKK69s06IB4F16E0BpyxVoFixYEJMnT45FixbFU089FW+//XacccYZsXXr1mbzLr/88li3bl3TuPXWW9u0aAB4l94EUNpy3RRgzpw5zT6eOXNm9O3bN5YuXRpjxoxpOn7AAQdEVVVV21QIAHuhNwGUtn16D019fX1ERPTu3bvZ8R/96EfRp0+fOPbYY2Pq1Knx5ptv7vFzNDY2RkNDQ7MBAK2lNwGUmKyVduzYkX3yk5/MTjrppGbHv/e972Vz5szJXnrppeyHP/xhduihh2Znn332Hj/PtGnTsogwDMMwijzq6+tb2xLaDb3JMAyjY42W9KZWB5orr7wyGzx4cFZbW7vXeXPnzs0iIlu5cuVuH9+2bVtWX1/fNGpra4v+hTMMwyjF0RECjd5kGIbRsUZLelOr/rDm1VdfHU8++WQ888wzMWDAgL3OrampiYiIlStXxuGHH77L4+Xl5VFeXt6aMgCgid4EUJpyBZosy+Kaa66Jxx57LObPnx9Dhgx53zUvvvhiRET079+/VQUCwN7oTQClLVegmTx5csyaNSueeOKJ6NmzZ6xfvz4iIiorK6NHjx6xatWqmDVrVnziE5+Igw8+OF566aW47rrrYsyYMTFixIiCnAAApU1vAihxeV6bHHt4bduMGTOyLMuyNWvWZGPGjMl69+6dlZeXZ8OGDctuuOGGXK/Lrq+vL/pr9QzDMEpxpPoemj2dj95kGIaR/mjJz+qy/24G7UZDQ0NUVlYWuwyAklNfXx8VFRXFLqNd0psAiqMlvWmf/g4NAABAMQk0AABAsgQaAAAgWQINAACQLIEGAABIlkADAAAkS6ABAACSJdAAAADJEmgAAIBkCTQAAECyBBoAACBZAg0AAJAsgQYAAEiWQAMAACRLoAEAAJIl0AAAAMkSaAAAgGQJNAAAQLIEGgAAIFkCDQAAkCyBBgAASJZAAwAAJEugAQAAktXuAk2WZcUuAaAk+fm7Z742AMXRkp+/7S7QbN68udglAJQkP3/3zNcGoDha8vO3LGtnv3bauXNnrF27Nnr27BllZWXNHmtoaIiBAwdGbW1tVFRUFKnC/a9UzzuidM+9VM87onTPvZjnnWVZbN68Oaqrq6NTp3b3e652YU+9qVS/XyNK99xL9bwjSvfcS/W8I9LpTV32U00t1qlTpxgwYMBe51RUVJTcN1RE6Z53ROmee6med0TpnnuxzruysnK/75mS9+tNpfr9GlG6516q5x1Ruudequcd0f57k1/FAQAAyRJoAACAZCUVaMrLy2PatGlRXl5e7FL2q1I974jSPfdSPe+I0j33Uj3v1JXy81aq516q5x1Ruudequcdkc65t7ubAgAAALRUUldoAAAA3kugAQAAkiXQAAAAyRJoAACAZCUTaO6555447LDDonv37lFTUxPPPfdcsUsquFtuuSXKysqajaOPPrrYZbW5Z555Js4888yorq6OsrKyePzxx5s9nmVZ3HzzzdG/f//o0aNHjB07NlasWFGcYtvY+537pZdeusv3wPjx44tTbBuaPn16nHDCCdGzZ8/o27dvTJw4MZYvX95szrZt22Ly5Mlx8MEHx0EHHRTnnntu1NXVFanittGS8z7ttNN2ec6vvPLKIlXM+9Gb9Ca9SW/Sm4oviUDz0EMPxZQpU2LatGnx/PPPx8iRI2PcuHGxYcOGYpdWcMccc0ysW7euaTz77LPFLqnNbd26NUaOHBn33HPPbh+/9dZb4+6774777rsvFi9eHAceeGCMGzcutm3btp8rbXvvd+4REePHj2/2PfDAAw/sxwoLY8GCBTF58uRYtGhRPPXUU/H222/HGWecEVu3bm2ac91118VPfvKTePjhh2PBggWxdu3aOOecc4pY9b5ryXlHRFx++eXNnvNbb721SBWzN3qT3qQ36U16UzuRJWDUqFHZ5MmTmz7esWNHVl1dnU2fPr2IVRXetGnTspEjRxa7jP0qIrLHHnus6eOdO3dmVVVV2W233dZ0bNOmTVl5eXn2wAMPFKHCwvnzc8+yLJs0aVJ21llnFaWe/WnDhg1ZRGQLFizIsuyPz3HXrl2zhx9+uGnOb3/72ywisoULFxarzDb35+edZVl26qmnZl/4wheKVxQtpjeVDr3psWbH9Ca9qb1p91dotm/fHkuXLo2xY8c2HevUqVOMHTs2Fi5cWMTK9o8VK1ZEdXV1DB06NC6++OJYs2ZNsUvar1avXh3r169v9vxXVlZGTU1NSTz/ERHz58+Pvn37xlFHHRVXXXVVbNy4sdgltbn6+vqIiOjdu3dERCxdujTefvvtZs/70UcfHYMGDepQz/ufn/e7fvSjH0WfPn3i2GOPjalTp8abb75ZjPLYC71Jb9Kb9KYIvam96FLsAt7PG2+8ETt27Ih+/fo1O96vX794+eWXi1TV/lFTUxMzZ86Mo446KtatWxdf/epX45RTTolly5ZFz549i13efrF+/fqIiN0+/+8+1pGNHz8+zjnnnBgyZEisWrUqvvKVr8SECRNi4cKF0blz52KX1yZ27twZ1157bZx00klx7LHHRsQfn/du3bpFr169ms3tSM/77s47IuKzn/1sDB48OKqrq+Oll16KL33pS7F8+fJ49NFHi1gtf05v0psi9Ca96Y860vOeam9q94GmlE2YMKHp3yNGjIiampoYPHhwzJ49Oy677LIiVsb+cuGFFzb9e/jw4TFixIg4/PDDY/78+XH66acXsbK2M3ny5Fi2bFmHfA3+3uzpvK+44oqmfw8fPjz69+8fp59+eqxatSoOP/zw/V0m7EJvQm/quFLtTe3+JWd9+vSJzp0773IHibq6uqiqqipSVcXRq1evOPLII2PlypXFLmW/efc59vz/0dChQ6NPnz4d5nvg6quvjieffDLmzZsXAwYMaDpeVVUV27dvj02bNjWb31Ge9z2d9+7U1NRERHSY57yj0Jv+RG/6k1J8/iP0po7yvKfcm9p9oOnWrVscd9xxMXfu3KZjO3fujLlz58bo0aOLWNn+t2XLlli1alX079+/2KXsN0OGDImqqqpmz39DQ0MsXry45J7/iIhXX301Nm7cmPz3QJZlcfXVV8djjz0WTz/9dAwZMqTZ48cdd1x07dq12fO+fPnyWLNmTdLP+/ud9+68+OKLERHJP+cdjd70J3rTH+lNelOqOkRvKu49CVrmwQcfzMrLy7OZM2dmv/nNb7Irrrgi69WrV7Z+/fpil1ZQX/ziF7P58+dnq1evzn7xi19kY8eOzfr06ZNt2LCh2KW1qc2bN2cvvPBC9sILL2QRkd1xxx3ZCy+8kP3Xf/1XlmVZ9s1vfjPr1atX9sQTT2QvvfRSdtZZZ2VDhgzJ3nrrrSJXvu/2du6bN2/Orr/++mzhwoXZ6tWrs5///OfZRz7ykeyII47Itm3bVuzS98lVV12VVVZWZvPnz8/WrVvXNN58882mOVdeeWU2aNCg7Omnn86WLFmSjR49Ohs9enQRq95373feK1euzL72ta9lS5YsyVavXp098cQT2dChQ7MxY8YUuXJ2R2/Sm/QmvUlvah+SCDRZlmXf+c53skGDBmXdunXLRo0alS1atKjYJRXcBRdckPXv3z/r1q1bduihh2YXXHBBtnLlymKX1ebmzZuXRcQuY9KkSVmW/fH2mDfddFPWr1+/rLy8PDv99NOz5cuXF7foNrK3c3/zzTezM844IzvkkEOyrl27ZoMHD84uv/zyDvE/S7s754jIZsyY0TTnrbfeyj7/+c9nH/jAB7IDDjggO/vss7N169YVr+g28H7nvWbNmmzMmDFZ7969s/Ly8mzYsGHZDTfckNXX1xe3cPZIb9Kb9Ca9SW8qvrIsy7K2v+4DAABQeO3+PTQAAAB7ItAAAADJEmgAAIBkCTQAAECyBBoAACBZAg0AAJAsgQYAAEiWQAMAACRLoAEAAJIl0AAAAMkSaAAAgGQJNAAAQLIEGgAAIFkCDQAAkCyBBgAASJZAAwAAJEugAQAAkiXQAAAAyRJoAACAZAk0AABAsgQaAAAgWQINAACQLIEGAABIlkADAAAkS6ABAACSJdAAAADJEmjgz7zyyitRVlYW3/72t9vsc86fPz/Kyspi/vz5bfY5ASgN+hLsnUBDhzBz5swoKyuLJUuWFLuU/WbLli0xbdq0GD9+fPTu3TvKyspi5syZxS4LgCjNvhQR0djYGF/60peiuro6evToETU1NfHUU08Vuyw6OIEGEvXGG2/E1772tfjtb38bI0eOLHY5ABCXXnpp3HHHHXHxxRfHP/zDP0Tnzp3jE5/4RDz77LPFLo0OrEuxCwBap3///rFu3bqoqqqKJUuWxAknnFDskgAoYc8991w8+OCDcdttt8X1118fERGXXHJJHHvssXHjjTfGL3/5yyJXSEflCg0lY/v27XHzzTfHcccdF5WVlXHggQfGKaecEvPmzdvjmjvvvDMGDx4cPXr0iFNPPTWWLVu2y5yXX345PvOZz0Tv3r2je/fucfzxx8ePf/zjVtf58ssvx5o1a953Xnl5eVRVVbV6HwCKq6P1pUceeSQ6d+4cV1xxRdOx7t27x2WXXRYLFy6M2traVtcAeyPQUDIaGhrin//5n+O0006Lb33rW3HLLbfE66+/HuPGjYsXX3xxl/n3339/3H333TF58uSYOnVqLFu2LD7+8Y9HXV1d05xf//rXceKJJ8Zvf/vb+PKXvxy33357HHjggTFx4sR47LHHWlXnBz/4wbjkkktae5oAJKKj9aUXXnghjjzyyKioqGh2fNSoURERuz0naAteckbJ+MAHPhCvvPJKdOvWrenY5ZdfHkcffXR85zvfie9///vN5q9cuTJWrFgRhx56aEREjB8/PmpqauJb3/pW3HHHHRER8YUvfCEGDRoU//mf/xnl5eUREfH5z38+Tj755PjSl74UZ5999n46OwBS09H60rp166J///67HH/32Nq1awu2N6XNFRpKRufOnZuaxs6dO+P3v/99vPPOO3H88cfH888/v8v8iRMnNjWNiD/+hqmmpiZ++tOfRkTE73//+3j66afj/PPPj82bN8cbb7wRb7zxRmzcuDHGjRsXK1asiNdeey13nVmWuY0mQAnoaH3prbfeagpR79W9e/emx6EQBBpKyg9+8IMYMWJEdO/ePQ4++OA45JBD4t///d+jvr5+l7lHHHHELseOPPLIeOWVVyLij78py7IsbrrppjjkkEOajWnTpkVExIYNGwp6PgCkrSP1pR49ekRjY+Mux7dt29b0OBSCl5xRMn74wx/GpZdeGhMnTowbbrgh+vbtG507d47p06fHqlWrcn++nTt3RkTE9ddfH+PGjdvtnGHDhu1TzQB0XB2tL/Xv33+3V4DWrVsXERHV1dUF25vSJtBQMh555JEYOnRoPProo1FWVtZ0/N3fWv25FStW7HLsd7/7XRx22GERETF06NCIiOjatWuMHTu27QsGoEPraH3pQx/6UMybNy8aGhqa3Rhg8eLFTY9DIXjJGSWjc+fOEfHH1wK/a/HixbFw4cLdzn/88ceb/abpueeei8WLF8eECRMiIqJv375x2mmnxfe+972m3z691+uvv96qOlt6e0wA0tbR+tJnPvOZ2LFjR/zTP/1T07HGxsaYMWNG1NTUxMCBA1u1P7wfV2joUP7lX/4l5syZs8vxL3zhC/GpT30qHn300Tj77LPjk5/8ZKxevTruu++++Iu/+IvYsmXLLmuGDRsWJ598clx11VXR2NgYd911Vxx88MFx4403Ns2555574uSTT47hw4fH5ZdfHkOHDo26urpYuHBhvPrqq/GrX/0q9zl88IMfjFNPPbVFb8D8x3/8x9i0aVPTnWN+8pOfxKuvvhoREddcc01UVlbm3h+AtlNKfammpibOO++8mDp1amzYsCGGDRsWP/jBD+KVV17Z5Y5t0KYy6ABmzJiRRcQeR21tbbZz587sG9/4RjZ48OCsvLw8+/CHP5w9+eST2aRJk7LBgwc3fa7Vq1dnEZHddttt2e23354NHDgwKy8vz0455ZTsV7/61S57r1q1KrvkkkuyqqqqrGvXrtmhhx6afepTn8oeeeSRpjnz5s3LIiKbN2/e+55LRGSnnnpqi8578ODBezzn1atXt+hzAND2SrUvvfXWW9n111+fVVVVZeXl5dkJJ5yQzZkzp0VrobXKsuw91zkBAAAS4j00AABAsgQaAAAgWQINAACQLIEGAABIlkADAAAkS6ABAACS1e7+sObOnTtj7dq10bNnzygrKyt2OQAdXpZlsXnz5qiuro5Onfyea3f0JoD9K09vaneBZu3atTFw4MBilwFQcmpra2PAgAHFLqNd0psAiqMlvand/SquZ8+exS4BoCT5+btnvjYAxdGSn78FCzT33HNPHHbYYdG9e/eoqamJ5557rkXrXMoHKI6O/vO3tX0pouN/bQDaq5b8/C1IoHnooYdiypQpMW3atHj++edj5MiRMW7cuNiwYUMhtgOAvdKXADqwrABGjRqVTZ48uenjHTt2ZNXV1dn06dPfd219fX0WEYZhGMZ+HvX19YVoCe3CvvSlLNObDMMwijVa0pva/ArN9u3bY+nSpTF27NimY506dYqxY8fGwoUL23o7ANgrfQmgY2vzu5y98cYbsWPHjujXr1+z4/369YuXX355l/mNjY3R2NjY9HFDQ0NblwRACcvblyL0JoCUFP0uZ9OnT4/Kysqm4baYABSb3gSQjjYPNH369InOnTtHXV1ds+N1dXVRVVW1y/ypU6dGfX1906itrW3rkgAoYXn7UoTeBJCSNg803bp1i+OOOy7mzp3bdGznzp0xd+7cGD169C7zy8vLo6KiotkAgLaSty9F6E0AKWnz99BEREyZMiUmTZoUxx9/fIwaNSruuuuu2Lp1a3zuc58rxHYAsFf6EkDHVZBAc8EFF8Trr78eN998c6xfvz4+9KEPxZw5c3Z5QyYA7A/6EkDHVZZlWVbsIt6roaEhKisri10GQMmpr6/30qo90JsAiqMlvanodzkDAABoLYEGAABIlkADAAAkS6ABAACSJdAAAADJEmgAAIBkCTQAAECyBBoAACBZAg0AAJAsgQYAAEiWQAMAACRLoAEAAJIl0AAAAMkSaAAAgGQJNAAAQLIEGgAAIFkCDQAAkCyBBgAASJZAAwAAJEugAQAAkiXQAAAAyRJoAACAZAk0AABAsgQaAAAgWQINAACQLIEGAABIlkADAAAkS6ABAACSJdAAAADJEmgAAIBkCTQAAECyBBoAACBZAg0AAJAsgQYAAEiWQAMAACRLoAEAAJIl0AAAAMnqUuwCgMIbO3Zs7jX/83/+z1zzL7300tx7AADsK1doAACAZAk0AABAsgQaAAAgWQINAACQLIEGAABIlkADAAAkS6ABAACSJdAAAADJEmgAAIBkCTQAAECyBBoAACBZXYpdAJDf4Ycfnmv+E088kXuPRx99NPcagPaqvLw895oJEybkmn/cccfl3iOvAw88MPeac889N9f8+++/P/ceec2ZMyf3mldeeSXX/Ndeey33HqTJFRoAACBZAg0AAJCsNg80t9xyS5SVlTUbRx99dFtvAwAtpjcBdFwFeQ/NMcccEz//+c//tEkXb9UBoLj0JoCOqSA/zbt06RJVVVWF+NQA0Cp6E0DHVJD30KxYsSKqq6tj6NChcfHFF8eaNWv2OLexsTEaGhqaDQBoa3oTQMfU5oGmpqYmZs6cGXPmzIl77703Vq9eHaecckps3rx5t/OnT58elZWVTWPgwIFtXRIAJU5vAui42jzQTJgwIc4777wYMWJEjBs3Ln7605/Gpk2bYvbs2budP3Xq1Kivr28atbW1bV0SACVObwLouAr+jshevXrFkUceGStXrtzt4+Xl5a36Y1cA0Fp6E0DHUfC/Q7Nly5ZYtWpV9O/fv9BbAUCL6E0AHUebB5rrr78+FixYEK+88kr88pe/jLPPPjs6d+4cF110UVtvBQAtojcBdFxt/pKzV199NS666KLYuHFjHHLIIXHyySfHokWL4pBDDmnrrQCgRfQmgI6rLMuyrNhFvFdDQ0NUVlYWuwxo16677rpc87/yla/k3uOoo47KNf/3v/997j1oX+rr66OioqLYZbRLelP7c/jhh+eaP2vWrNx7HH/88bnXdARlZWW51+yP/538wx/+kGv+P/7jP+be4xvf+Eau+du3b8+9B/m0pDcV/D00AAAAhSLQAAAAyRJoAACAZAk0AABAsgQaAAAgWQINAACQLIEGAABIlkADAAAkS6ABAACSJdAAAADJEmgAAIBklWVZlhW7iPdqaGiIysrKYpcB+83QoUNzr/m///f/5pq/bNmy3HvU1NTkXkPa6uvro6KiothltEt6U2F17tw595p58+blmn/SSSfl3qNUvfbaa7nXHHrooQWoZP/L218vu+yy3HssXbo095pS1pLe5AoNAACQLIEGAABIlkADAAAkS6ABAACSJdAAAADJEmgAAIBkCTQAAECyBBoAACBZAg0AAJAsgQYAAEiWQAMAACRLoAEAAJLVpdgFQKnr06dP7jUHHHBArvnPPPNM7j0AWqu8vDzX/Pvuuy/3HieddFLuNYX2//7f/8u95qmnnso1/9Zbb829R0NDQ67527dvz71HTU1NrvnDhw/PvcenPvWpXPM/9rGP5d4jb13PPvts7j0uuOCCXPN//OMf596j1LhCAwAAJEugAQAAkiXQAAAAyRJoAACAZAk0AABAsgQaAAAgWQINAACQLIEGAABIlkADAAAkS6ABAACSJdAAAADJKsuyLCt2Ee/V0NAQlZWVxS4D9ptZs2blXnPRRRflml9TU5N7j+eeey73GtJWX18fFRUVxS6jXSrl3tS1a9fca773ve/lmj9p0qTce+T1zjvv5F7z/e9/P9f8L3/5y7n3aGhoyL2mVOX9+fTXf/3Xufe46aabcs0/8MADc+/xyiuv5Jp//PHH597jD3/4Q+417VVLepMrNAAAQLIEGgAAIFkCDQAAkCyBBgAASJZAAwAAJEugAQAAkiXQAAAAyRJoAACAZAk0AABAsgQaAAAgWQINAACQLIEGAABIVlmWZVmxi3ivhoaGqKysLHYZ0CqHHXZY7jW/+c1vcq+pra3NNX/kyJG599i2bVvuNaStvr4+Kioqil1Gu1TKvenrX/967jVf+cpXClDJvrnjjjtyr7nhhhsKUAnt2dFHH51r/lNPPZV7j+rq6lzzv/Wtb+Xeoz3+N9haLelNrtAAAADJyh1onnnmmTjzzDOjuro6ysrK4vHHH2/2eJZlcfPNN0f//v2jR48eMXbs2FixYkVb1QsAzehLAKUtd6DZunVrjBw5Mu65557dPn7rrbfG3XffHffdd18sXrw4DjzwwBg3bpyXrwBQEPoSQGnrknfBhAkTYsKECbt9LMuyuOuuu+Jv//Zv46yzzoqIiPvvvz/69esXjz/+eFx44YX7Vi0A/Bl9CaC0tel7aFavXh3r16+PsWPHNh2rrKyMmpqaWLhwYVtuBQDvS18C6PhyX6HZm/Xr10dERL9+/Zod79evX9Njf66xsTEaGxubPm5oaGjLkgAoYa3pSxF6E0BKin6Xs+nTp0dlZWXTGDhwYLFLAqDE6U0A6WjTQFNVVRUREXV1dc2O19XVNT3256ZOnRr19fVNI+/f1wCAPWlNX4rQmwBS0qaBZsiQIVFVVRVz585tOtbQ0BCLFy+O0aNH73ZNeXl5VFRUNBsA0BZa05ci9CaAlOR+D82WLVti5cqVTR+vXr06Xnzxxejdu3cMGjQorr322vi7v/u7OOKII2LIkCFx0003RXV1dUycOLEt6waAiNCXAEpd7kCzZMmS+NjHPtb08ZQpUyIiYtKkSTFz5sy48cYbY+vWrXHFFVfEpk2b4uSTT445c+ZE9+7d265qAPhv+hJAaSvLsiwrdhHv1dDQEJWVlcUuA1rl+OOPz73mP//zP3OvefDBB3PNv+iii3LvQempr6/30qo9KOXe9Nvf/jb3miOPPLIAleybESNG5F7z61//ugCV0JG05nv9pZdeyjX/vXdcbKlRo0blmr98+fLce+wvLelNRb/LGQAAQGsJNAAAQLIEGgAAIFkCDQAAkCyBBgAASJZAAwAAJEugAQAAkiXQAAAAyRJoAACAZAk0AABAsgQaAAAgWQINAACQrC7FLgA6kilTpuyXfWbPnr1f9gE46qijcq/JsqwAleybb37zm7nXXHPNNbnmv/LKK7n3IG2/+93vcq95++23c80/6KCDcu8xfvz4XPOXL1+ee4/2xBUaAAAgWQINAACQLIEGAABIlkADAAAkS6ABAACSJdAAAADJEmgAAIBkCTQAAECyBBoAACBZAg0AAJAsgQYAAEhWl2IXAO3ZYYcdlmv+2WefnXuPFStW5F7z05/+NPcagPZq+fLlude89tprueYfcsghufcYMGBArvnvvPNO7j1effXV3GtI21NPPZVr/llnnZV7j3POOSfX/H/4h3/IvUd74goNAACQLIEGAABIlkADAAAkS6ABAACSJdAAAADJEmgAAIBkCTQAAECyBBoAACBZAg0AAJAsgQYAAEiWQAMAACRLoAEAAJLVpdgFQHvWrVu3XPO7d++ee4+33nor95rGxsbcawBao6ysrOB7zJ8/P/eaz3/+821fCOwHdXV1uea35r/BU045JfealLlCAwAAJEugAQAAkiXQAAAAyRJoAACAZAk0AABAsgQaAAAgWQINAACQLIEGAABIlkADAAAkS6ABAACSJdAAAADJ6lLsAqA9Gz9+fMH3+NnPflbwPQBaq76+Pveanj175pp/wgkn5N7jox/9aK75v/zlL3PvAYVw7rnn5pqfZVnuPV544YXca1LmCg0AAJAsgQYAAEhW7kDzzDPPxJlnnhnV1dVRVlYWjz/+eLPHL7300igrK2s29sfLdgAoTfoSQGnLHWi2bt0aI0eOjHvuuWePc8aPHx/r1q1rGg888MA+FQkAe6IvAZS23DcFmDBhQkyYMGGvc8rLy6OqqqrVRQFAS+lLAKWtIO+hmT9/fvTt2zeOOuqouOqqq2Ljxo17nNvY2BgNDQ3NBgC0pTx9KUJvAkhJmwea8ePHx/333x9z586Nb33rW7FgwYKYMGFC7NixY7fzp0+fHpWVlU1j4MCBbV0SACUsb1+K0JsAUtLmf4fmwgsvbPr38OHDY8SIEXH44YfH/Pnz4/TTT99l/tSpU2PKlClNHzc0NGgcALSZvH0pQm8CSEnBb9s8dOjQ6NOnT6xcuXK3j5eXl0dFRUWzAQCF8n59KUJvAkhJwQPNq6++Ghs3boz+/fsXeisAeF/6EkDHkvslZ1u2bGn2W63Vq1fHiy++GL17947evXvHV7/61Tj33HOjqqoqVq1aFTfeeGMMGzYsxo0b16aFA0CEvgRQ6nIHmiVLlsTHPvaxpo/ffY3xpEmT4t57742XXnopfvCDH8SmTZuiuro6zjjjjPj6178e5eXlbVc1APw3fQmgtOUONKeddlpkWbbHx3/2s5/tU0HQnnz0ox8t+B6//OUvC74HdGT6UmE98sgjudd87nOfyzX/Ix/5SO49vvvd7+aaf9JJJ+XeY+vWrbnXwPvp0aNHwfd44YUXCr5He1Lw99AAAAAUikADAAAkS6ABAACSJdAAAADJEmgAAIBkCTQAAECyBBoAACBZAg0AAJAsgQYAAEiWQAMAACRLoAEAAJLVpdgFQHt25JFH5ppfV1eXe4//83/+T+41APvLV77yldxrPvKRj+SaP3LkyNx7DB8+PNf8J554Ivcef/M3f5Nr/m9+85vce5C28847L/ea8vLyAlTS3KxZswq+R3viCg0AAJAsgQYAAEiWQAMAACRLoAEAAJIl0AAAAMkSaAAAgGQJNAAAQLIEGgAAIFkCDQAAkCyBBgAASJZAAwAAJEugAQAAktWl2AXA/nL88cfnXjN8+PBc81esWJF7j40bN+ZeA7C/bNiwIfeaT3ziE7nmP/nkk7n3+PCHP5xr/sc+9rHcezz77LO55s+YMSP3HrNmzco1f9myZbn3aGxszL2mI+jRo0fuNSeccEKu+ffff3/uPTp37px7DXvnCg0AAJAsgQYAAEiWQAMAACRLoAEAAJIl0AAAAMkSaAAAgGQJNAAAQLIEGgAAIFkCDQAAkCyBBgAASJZAAwAAJKssy7Ks2EW8V0NDQ1RWVha7DBLQvXv3XPMXL16ce48RI0bkmj9lypTce9x5552510Ah1NfXR0VFRbHLaJf0psLq169f7jU/+9nPcs0fPnx47j3ao5dffjn3mieffDLX/PXr1+feY38YM2ZMrvlHH3107j2OPPLI3GsKbc6cObnXXHDBBbnmb9myJfce+0tLepMrNAAAQLIEGgAAIFkCDQAAkCyBBgAASJZAAwAAJEugAQAAkiXQAAAAyRJoAACAZAk0AABAsgQaAAAgWQINAACQLIEGAABIVlmWZVmxi3ivhoaGqKysLHYZJGDQoEG55v/Xf/1X7j1WrVqVa/5RRx2Ve48dO3bkXgOFUF9fHxUVFcUuo13Sm9qffv365Zr/pS99KfceX/jCF3Kv6QjKyspyr2ln/zvZrm3cuDHX/JEjR+beY926dbnXtFct6U2u0AAAAMnKFWimT58eJ5xwQvTs2TP69u0bEydOjOXLlzebs23btpg8eXIcfPDBcdBBB8W5554bdXV1bVo0ALxLbwIobbkCzYIFC2Ly5MmxaNGieOqpp+Ltt9+OM844I7Zu3do057rrrouf/OQn8fDDD8eCBQti7dq1cc4557R54QAQoTcBlLoueSbPmTOn2cczZ86Mvn37xtKlS2PMmDFRX18f3//+92PWrFnx8Y9/PCIiZsyYER/84Adj0aJFceKJJ7Zd5QAQehNAqdun99DU19dHRETv3r0jImLp0qXx9ttvx9ixY5vmHH300TFo0KBYuHDhvmwFAC2iNwGUllxXaN5r586dce2118ZJJ50Uxx57bERErF+/Prp16xa9evVqNrdfv36xfv363X6exsbGaGxsbPq4oaGhtSUBUOL0JoDS0+orNJMnT45ly5bFgw8+uE8FTJ8+PSorK5vGwIED9+nzAVC69CaA0tOqQHP11VfHk08+GfPmzYsBAwY0Ha+qqort27fHpk2bms2vq6uLqqqq3X6uqVOnRn19fdOora1tTUkAlDi9CaA05Qo0WZbF1VdfHY899lg8/fTTMWTIkGaPH3fccdG1a9eYO3du07Hly5fHmjVrYvTo0bv9nOXl5VFRUdFsAEBL6U0ApS3Xe2gmT54cs2bNiieeeCJ69uzZ9NrjysrK6NGjR1RWVsZll10WU6ZMid69e0dFRUVcc801MXr0aHeRAaAg9CaA0pYr0Nx7770REXHaaac1Oz5jxoy49NJLIyLizjvvjE6dOsW5554bjY2NMW7cuPjud7/bJsUCwJ/TmwBKW1mWZVmxi3ivhoaGqKysLHYZJGD69Om55n/5y1/Ovcdtt92Wa/6NN96Yew9oL+rr6720ag/0pvR16pT/bcN5/3toTQ/45Cc/mWv+u3fvK6SysrLca9rZ/0622tq1a3PNnzFjRu49vvnNb+aa/+abb+beoyNpSW/ap79DAwAAUEwCDQAAkCyBBgAASJZAAwAAJEugAQAAkiXQAAAAyRJoAACAZAk0AABAsgQaAAAgWQINAACQLIEGAABIlkADAAAkq0uxC4DW6tGjR8H3mD17dsH3AKDwdu7cmXvNpk2bcs3/yle+knuPr3/967nmH3XUUbn3OOecc3LNLysry71HlmW51+T12muv5Zo/d+7c3HvU19fnmv/666/n3oO25woNAACQLIEGAABIlkADAAAkS6ABAACSJdAAAADJEmgAAIBkCTQAAECyBBoAACBZAg0AAJAsgQYAAEiWQAMAACSrLMuyrNhFvFdDQ0NUVlYWuwwScMghh+Sa//zzz+fe48Ybb8w1/4EHHsi9B7QX9fX1UVFRUewy2iW9CaA4WtKbXKEBAACSJdAAAADJEmgAAIBkCTQAAECyBBoAACBZAg0AAJAsgQYAAEiWQAMAACRLoAEAAJIl0AAAAMkSaAAAgGQJNAAAQLK6FLsAaK3XX3891/yBAwcWqBIAAIrFFRoAACBZAg0AAJAsgQYAAEiWQAMAACRLoAEAAJIl0AAAAMkSaAAAgGQJNAAAQLIEGgAAIFkCDQAAkCyBBgAASJZAAwAAJEugAQAAkiXQAAAAycoVaKZPnx4nnHBC9OzZM/r27RsTJ06M5cuXN5tz2mmnRVlZWbNx5ZVXtmnRAPAuvQmgtOUKNAsWLIjJkyfHokWL4qmnnoq33347zjjjjNi6dWuzeZdffnmsW7euadx6661tWjQAvEtvAihtXfJMnjNnTrOPZ86cGX379o2lS5fGmDFjmo4fcMABUVVV1TYVAsBe6E0ApW2f3kNTX18fERG9e/dudvxHP/pR9OnTJ4499tiYOnVqvPnmm3v8HI2NjdHQ0NBsAEBr6U0AJSZrpR07dmSf/OQns5NOOqnZ8e9973vZnDlzspdeein74Q9/mB166KHZ2WefvcfPM23atCwiDMMwjCKP+vr61raEdkNvMgzD6FijJb2p1YHmyiuvzAYPHpzV1tbudd7cuXOziMhWrly528e3bduW1dfXN43a2tqif+EMwzBKcXSEQKM3GYZhdKzRkt6U6z0077r66qvjySefjGeeeSYGDBiw17k1NTUREbFy5co4/PDDd3m8vLw8ysvLW1MGADTRmwBKU65Ak2VZXHPNNfHYY4/F/PnzY8iQIe+75sUXX4yIiP79+7eqQADYG70JoLTlCjSTJ0+OWbNmxRNPPBE9e/aM9evXR0REZWVl9OjRI1atWhWzZs2KT3ziE3HwwQfHSy+9FNddd12MGTMmRowYUZATAKC06U0AJS7Pa5NjD69tmzFjRpZlWbZmzZpszJgxWe/evbPy8vJs2LBh2Q033JDrddn19fVFf62eYRhGKY5U30Ozp/PRmwzDMNIfLflZXfbfzaDdaGhoiMrKymKXAVBy6uvro6KiothltEt6E0BxtKQ37dPfoQEAACgmgQYAAEiWQAMAACRLoAEAAJIl0AAAAMkSaAAAgGQJNAAAQLIEGgAAIFkCDQAAkCyBBgAASJZAAwAAJEugAQAAkiXQAAAAyRJoAACAZAk0AABAsgQaAAAgWQINAACQLIEGAABIlkADAAAkS6ABAACSJdAAAADJEmgAAIBkCTQAAECy2l2gybKs2CUAlCQ/f/fM1wagOFry87fdBZrNmzcXuwSAkuTn75752gAUR0t+/pZl7ezXTjt37oy1a9dGz549o6ysrNljDQ0NMXDgwKitrY2KiooiVbj/lep5R5TuuZfqeUeU7rkX87yzLIvNmzdHdXV1dOrU7n7P1S7sqTeV6vdrROmee6med0TpnnupnndEOr2py36qqcU6deoUAwYM2OucioqKkvuGiijd844o3XMv1fOOKN1zL9Z5V1ZW7vc9U/J+valUv18jSvfcS/W8I0r33Ev1vCPaf2/yqzgAACBZAg0AAJCspAJNeXl5TJs2LcrLy4tdyn5VqucdUbrnXqrnHVG6516q5526Un7eSvXcS/W8I0r33Ev1vCPSOfd2d1MAAACAlkrqCg0AAMB7CTQAAECyBBoAACBZAg0AAJCsZALNPffcE4cddlh07949ampq4rnnnit2SQV3yy23RFlZWbNx9NFHF7usNvfMM8/EmWeeGdXV1VFWVhaPP/54s8ezLIubb745+vfvHz169IixY8fGihUrilNsG3u/c7/00kt3+R4YP358cYptQ9OnT48TTjghevbsGX379o2JEyfG8uXLm83Ztm1bTJ48OQ4++OA46KCD4txzz426uroiVdw2WnLep5122i7P+ZVXXlmkink/epPepDfpTXpT8SURaB566KGYMmVKTJs2LZ5//vkYOXJkjBs3LjZs2FDs0grumGOOiXXr1jWNZ599ttgltbmtW7fGyJEj45577tnt47feemvcfffdcd9998XixYvjwAMPjHHjxsW2bdv2c6Vt7/3OPSJi/Pjxzb4HHnjggf1YYWEsWLAgJk+eHIsWLYqnnnoq3n777TjjjDNi69atTXOuu+66+MlPfhIPP/xwLFiwINauXRvnnHNOEavedy0574iIyy+/vNlzfuuttxapYvZGb9Kb9Ca9SW9qJ7IEjBo1Kps8eXLTxzt27Miqq6uz6dOnF7Gqwps2bVo2cuTIYpexX0VE9thjjzV9vHPnzqyqqiq77bbbmo5t2rQpKy8vzx544IEiVFg4f37uWZZlkyZNys4666yi1LM/bdiwIYuIbMGCBVmW/fE57tq1a/bwww83zfntb3+bRUS2cOHCYpXZ5v78vLMsy0499dTsC1/4QvGKosX0ptKhNz3W7JjepDe1N+3+Cs327dtj6dKlMXbs2KZjnTp1irFjx8bChQuLWNn+sWLFiqiuro6hQ4fGxRdfHGvWrCl2SfvV6tWrY/369c2e/8rKyqipqSmJ5z8iYv78+dG3b9846qij4qqrroqNGzcWu6Q2V19fHxERvXv3joiIpUuXxttvv93seT/66KNj0KBBHep5//PzftePfvSj6NOnTxx77LExderUePPNN4tRHnuhN+lNepPeFKE3tRddil3A+3njjTdix44d0a9fv2bH+/XrFy+//HKRqto/ampqYubMmXHUUUfFunXr4qtf/WqccsopsWzZsujZs2exy9sv1q9fHxGx2+f/3cc6svHjx8c555wTQ4YMiVWrVsVXvvKVmDBhQixcuDA6d+5c7PLaxM6dO+Paa6+Nk046KY499tiI+OPz3q1bt+jVq1ezuR3ped/deUdEfPazn43BgwdHdXV1vPTSS/GlL30pli9fHo8++mgRq+XP6U16U4TepDf9UUd63lPtTe0+0JSyCRMmNP17xIgRUVNTE4MHD47Zs2fHZZddVsTK2F8uvPDCpn8PHz48RowYEYcffnjMnz8/Tj/99CJW1nYmT54cy5Yt65Cvwd+bPZ33FVdc0fTv4cOHR//+/eP000+PVatWxeGHH76/y4Rd6E3oTR1Xqr2p3b/krE+fPtG5c+dd7iBRV1cXVVVVRaqqOHr16hVHHnlkrFy5stil7DfvPsee/z8aOnRo9OnTp8N8D1x99dXx5JNPxrx582LAgAFNx6uqqmL79u2xadOmZvM7yvO+p/PenZqamoiIDvOcdxR605/oTX9Sis9/hN7UUZ73lHtTuw803bp1i+OOOy7mzp3bdGznzp0xd+7cGD16dBEr2/+2bNkSq1ativ79+xe7lP1myJAhUVVV1ez5b2hoiMWLF5fc8x8R8eqrr8bGjRuT/x7IsiyuvvrqeOyxx+Lpp5+OIUOGNHv8uOOOi65duzZ73pcvXx5r1qxJ+nl/v/PenRdffDEiIvnnvKPRm/5Eb/ojvUlvSlWH6E3FvSdByzz44INZeXl5NnPmzOw3v/lNdsUVV2S9evXK1q9fX+zSCuqLX/xiNn/+/Gz16tXZL37xi2zs2LFZnz59sg0bNhS7tDa1efPm7IUXXsheeOGFLCKyO+64I3vhhRey//qv/8qyLMu++c1vZr169cqeeOKJ7KWXXsrOOuusbMiQIdlbb71V5Mr33d7OffPmzdn111+fLVy4MFu9enX285//PPvIRz6SHXHEEdm2bduKXfo+ueqqq7LKysps/vz52bp165rGm2++2TTnyiuvzAYNGpQ9/fTT2ZIlS7LRo0dno0ePLmLV++79znvlypXZ1772tWzJkiXZ6tWrsyeeeCIbOnRoNmbMmCJXzu7oTXqT3qQ36U3tQxKBJsuy7Dvf+U42aNCgrFu3btmoUaOyRYsWFbukgrvggguy/v37Z926dcsOPfTQ7IILLshWrlxZ7LLa3Lx587KI2GVMmjQpy7I/3h7zpptuyvr165eVl5dnp59+erZ8+fLiFt1G9nbub775ZnbGGWdkhxxySNa1a9ds8ODB2eWXX94h/mdpd+ccEdmMGTOa5rz11lvZ5z//+ewDH/hAdsABB2Rnn312tm7duuIV3Qbe77zXrFmTjRkzJuvdu3dWXl6eDRs2LLvhhhuy+vr64hbOHulNepPepDfpTcVXlmVZ1vbXfQAAAAqv3b+HBgAAYE8EGgAAIFkCDQAAkCyBBgAASJZAAwAAJEugAQAAkiXQAAAAyRJoAACAZAk0AABAsgQaAAAgWQINAACQLIEGAABI1v8Ppk4lRe/kpYsAAAAASUVORK5CYII=\n" 85 | }, 86 | "metadata": {} 87 | } 88 | ] 89 | }, 90 | { 91 | "cell_type": "code", 92 | "source": [ 93 | "class FFDense(keras.layers.Layer):\n", 94 | " \"\"\"\n", 95 | " A custom ForwardForward-enabled Dense layer. It has an implementation of the\n", 96 | " Forward-Forward network internally for use.\n", 97 | " This layer must be used in conjunction with the `FFNetwork` model.\n", 98 | " \"\"\"\n", 99 | "\n", 100 | " def __init__(\n", 101 | " self,\n", 102 | " units,\n", 103 | " optimizer,\n", 104 | " loss_metric,\n", 105 | " num_epochs=50,\n", 106 | " use_bias=True,\n", 107 | " kernel_initializer=\"glorot_uniform\",\n", 108 | " bias_initializer=\"zeros\",\n", 109 | " kernel_regularizer=None,\n", 110 | " bias_regularizer=None,\n", 111 | " **kwargs,\n", 112 | " ):\n", 113 | " super().__init__(**kwargs)\n", 114 | " self.dense = keras.layers.Dense(\n", 115 | " units=units,\n", 116 | " use_bias=use_bias,\n", 117 | " kernel_initializer=kernel_initializer,\n", 118 | " bias_initializer=bias_initializer,\n", 119 | " kernel_regularizer=kernel_regularizer,\n", 120 | " bias_regularizer=bias_regularizer,\n", 121 | " )\n", 122 | " self.relu = keras.layers.ReLU()\n", 123 | " self.optimizer = optimizer\n", 124 | " self.loss_metric = loss_metric\n", 125 | " self.threshold = 1.5\n", 126 | " self.num_epochs = num_epochs\n", 127 | "\n", 128 | " # We perform a normalization step before we run the input through the Dense\n", 129 | " # layer.\n", 130 | "\n", 131 | " def call(self, x):\n", 132 | " x_norm = tf.norm(x, ord=2, axis=1, keepdims=True)\n", 133 | " x_norm = x_norm + 1e-4\n", 134 | " x_dir = x / x_norm\n", 135 | " res = self.dense(x_dir)\n", 136 | " return self.relu(res)\n", 137 | "\n", 138 | " # The Forward-Forward algorithm is below. We first perform the Dense-layer\n", 139 | " # operation and then get a Mean Square value for all positive and negative\n", 140 | " # samples respectively.\n", 141 | " # The custom loss function finds the distance between the Mean-squared\n", 142 | " # result and the threshold value we set (a hyperparameter) that will define\n", 143 | " # whether the prediction is positive or negative in nature. Once the loss is\n", 144 | " # calculated, we get a mean across the entire batch combined and perform a\n", 145 | " # gradient calculation and optimization step. This does not technically\n", 146 | " # qualify as backpropagation since there is no gradient being\n", 147 | " # sent to any previous layer and is completely local in nature.\n", 148 | "\n", 149 | " def forward_forward(self, x_pos, x_neg):\n", 150 | " for i in range(self.num_epochs):\n", 151 | " with tf.GradientTape() as tape:\n", 152 | " g_pos = tf.math.reduce_mean(tf.math.pow(self.call(x_pos), 2), 1)\n", 153 | " g_neg = tf.math.reduce_mean(tf.math.pow(self.call(x_neg), 2), 1)\n", 154 | "\n", 155 | " loss = tf.math.log(\n", 156 | " 1\n", 157 | " + tf.math.exp(\n", 158 | " tf.concat([-g_pos + self.threshold, g_neg - self.threshold], 0)\n", 159 | " )\n", 160 | " )\n", 161 | " mean_loss = tf.cast(tf.math.reduce_mean(loss), tf.float32)\n", 162 | " self.loss_metric.update_state([mean_loss])\n", 163 | " gradients = tape.gradient(mean_loss, self.dense.trainable_weights)\n", 164 | " self.optimizer.apply_gradients(zip(gradients, self.dense.trainable_weights))\n", 165 | " return (\n", 166 | " tf.stop_gradient(self.call(x_pos)),\n", 167 | " tf.stop_gradient(self.call(x_neg)),\n", 168 | " self.loss_metric.result(),\n", 169 | " )" 170 | ], 171 | "metadata": { 172 | "id": "7-ooQdIWBvbd" 173 | }, 174 | "execution_count": 9, 175 | "outputs": [] 176 | }, 177 | { 178 | "cell_type": "code", 179 | "source": [ 180 | " " 181 | ], 182 | "metadata": { 183 | "id": "51AtIFVyB0bl" 184 | }, 185 | "execution_count": 10, 186 | "outputs": [] 187 | }, 188 | { 189 | "cell_type": "code", 190 | "source": [ 191 | "x_train = x_train.astype(float) / 255\n", 192 | "x_test = x_test.astype(float) / 255\n", 193 | "y_train = y_train.astype(int)\n", 194 | "y_test = y_test.astype(int)\n", 195 | "\n", 196 | "train_dataset = tf.data.Dataset.from_tensor_slices((x_train, y_train))\n", 197 | "test_dataset = tf.data.Dataset.from_tensor_slices((x_test, y_test))\n", 198 | "\n", 199 | "train_dataset = train_dataset.batch(60000)\n", 200 | "test_dataset = test_dataset.batch(10000)" 201 | ], 202 | "metadata": { 203 | "id": "TDTTAdZcB3h_" 204 | }, 205 | "execution_count": 11, 206 | "outputs": [] 207 | }, 208 | { 209 | "cell_type": "code", 210 | "source": [ 211 | "model = FFNetwork(dims=[784, 500, 500])\n", 212 | "\n", 213 | "model.compile(\n", 214 | " optimizer=keras.optimizers.Adam(learning_rate=0.03),\n", 215 | " loss=\"mse\",\n", 216 | " jit_compile=True,\n", 217 | " metrics=[keras.metrics.Mean()],\n", 218 | ")\n", 219 | "\n", 220 | "epochs = 250\n", 221 | "history = model.fit(train_dataset, epochs=epochs)" 222 | ], 223 | "metadata": { 224 | "colab": { 225 | "base_uri": "https://localhost:8080/" 226 | }, 227 | "id": "ZLPH01lSB7vA", 228 | "outputId": "6dce8917-6cb2-44fa-9eb5-5950201d6f61" 229 | }, 230 | "execution_count": 12, 231 | "outputs": [ 232 | { 233 | "metadata": { 234 | "tags": null 235 | }, 236 | "name": "stdout", 237 | "output_type": "stream", 238 | "text": [ 239 | "Epoch 1/250\n", 240 | "Training layer 1 now : \n", 241 | "Training layer 2 now : \n", 242 | "Training layer 1 now : \n", 243 | "Training layer 2 now : \n", 244 | "1/1 [==============================] - 94s 94s/step - FinalLoss: 0.7271\n", 245 | "Epoch 2/250\n", 246 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.7086\n", 247 | "Epoch 3/250\n", 248 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.6972\n", 249 | "Epoch 4/250\n", 250 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.6755\n", 251 | "Epoch 5/250\n", 252 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.6525\n", 253 | "Epoch 6/250\n", 254 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.6311\n", 255 | "Epoch 7/250\n", 256 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.6121\n", 257 | "Epoch 8/250\n", 258 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.5955\n", 259 | "Epoch 9/250\n", 260 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.5806\n", 261 | "Epoch 10/250\n", 262 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.5674\n", 263 | "Epoch 11/250\n", 264 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.5556\n", 265 | "Epoch 12/250\n", 266 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.5449\n", 267 | "Epoch 13/250\n", 268 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.5350\n", 269 | "Epoch 14/250\n", 270 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.5260\n", 271 | "Epoch 15/250\n", 272 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.5178\n", 273 | "Epoch 16/250\n", 274 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.5102\n", 275 | "Epoch 17/250\n", 276 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.5032\n", 277 | "Epoch 18/250\n", 278 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4967\n", 279 | "Epoch 19/250\n", 280 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4907\n", 281 | "Epoch 20/250\n", 282 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4851\n", 283 | "Epoch 21/250\n", 284 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4799\n", 285 | "Epoch 22/250\n", 286 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4750\n", 287 | "Epoch 23/250\n", 288 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4704\n", 289 | "Epoch 24/250\n", 290 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4660\n", 291 | "Epoch 25/250\n", 292 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4619\n", 293 | "Epoch 26/250\n", 294 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4581\n", 295 | "Epoch 27/250\n", 296 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4543\n", 297 | "Epoch 28/250\n", 298 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4508\n", 299 | "Epoch 29/250\n", 300 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4475\n", 301 | "Epoch 30/250\n", 302 | "1/1 [==============================] - 7s 7s/step - FinalLoss: 0.4443\n", 303 | "Epoch 31/250\n", 304 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4412\n", 305 | "Epoch 32/250\n", 306 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4383\n", 307 | "Epoch 33/250\n", 308 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4356\n", 309 | "Epoch 34/250\n", 310 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4330\n", 311 | "Epoch 35/250\n", 312 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4305\n", 313 | "Epoch 36/250\n", 314 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4281\n", 315 | "Epoch 37/250\n", 316 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4258\n", 317 | "Epoch 38/250\n", 318 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4237\n", 319 | "Epoch 39/250\n", 320 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4215\n", 321 | "Epoch 40/250\n", 322 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4194\n", 323 | "Epoch 41/250\n", 324 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4174\n", 325 | "Epoch 42/250\n", 326 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4155\n", 327 | "Epoch 43/250\n", 328 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4136\n", 329 | "Epoch 44/250\n", 330 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4118\n", 331 | "Epoch 45/250\n", 332 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4100\n", 333 | "Epoch 46/250\n", 334 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4083\n", 335 | "Epoch 47/250\n", 336 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4067\n", 337 | "Epoch 48/250\n", 338 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4051\n", 339 | "Epoch 49/250\n", 340 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4036\n", 341 | "Epoch 50/250\n", 342 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4020\n", 343 | "Epoch 51/250\n", 344 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.4005\n", 345 | "Epoch 52/250\n", 346 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3991\n", 347 | "Epoch 53/250\n", 348 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3977\n", 349 | "Epoch 54/250\n", 350 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3963\n", 351 | "Epoch 55/250\n", 352 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3950\n", 353 | "Epoch 56/250\n", 354 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3937\n", 355 | "Epoch 57/250\n", 356 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3924\n", 357 | "Epoch 58/250\n", 358 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3912\n", 359 | "Epoch 59/250\n", 360 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3900\n", 361 | "Epoch 60/250\n", 362 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3889\n", 363 | "Epoch 61/250\n", 364 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3877\n", 365 | "Epoch 62/250\n", 366 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3866\n", 367 | "Epoch 63/250\n", 368 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3855\n", 369 | "Epoch 64/250\n", 370 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3844\n", 371 | "Epoch 65/250\n", 372 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3834\n", 373 | "Epoch 66/250\n", 374 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3824\n", 375 | "Epoch 67/250\n", 376 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3813\n", 377 | "Epoch 68/250\n", 378 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3803\n", 379 | "Epoch 69/250\n", 380 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3794\n", 381 | "Epoch 70/250\n", 382 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3784\n", 383 | "Epoch 71/250\n", 384 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3775\n", 385 | "Epoch 72/250\n", 386 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3765\n", 387 | "Epoch 73/250\n", 388 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3756\n", 389 | "Epoch 74/250\n", 390 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3747\n", 391 | "Epoch 75/250\n", 392 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3739\n", 393 | "Epoch 76/250\n", 394 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3730\n", 395 | "Epoch 77/250\n", 396 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3722\n", 397 | "Epoch 78/250\n", 398 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3713\n", 399 | "Epoch 79/250\n", 400 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3705\n", 401 | "Epoch 80/250\n", 402 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3698\n", 403 | "Epoch 81/250\n", 404 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3689\n", 405 | "Epoch 82/250\n", 406 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3681\n", 407 | "Epoch 83/250\n", 408 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3674\n", 409 | "Epoch 84/250\n", 410 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3667\n", 411 | "Epoch 85/250\n", 412 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3659\n", 413 | "Epoch 86/250\n", 414 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3652\n", 415 | "Epoch 87/250\n", 416 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3645\n", 417 | "Epoch 88/250\n", 418 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3638\n", 419 | "Epoch 89/250\n", 420 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3631\n", 421 | "Epoch 90/250\n", 422 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3624\n", 423 | "Epoch 91/250\n", 424 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3617\n", 425 | "Epoch 92/250\n", 426 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3611\n", 427 | "Epoch 93/250\n", 428 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3604\n", 429 | "Epoch 94/250\n", 430 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3598\n", 431 | "Epoch 95/250\n", 432 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3592\n", 433 | "Epoch 96/250\n", 434 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3585\n", 435 | "Epoch 97/250\n", 436 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3579\n", 437 | "Epoch 98/250\n", 438 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3573\n", 439 | "Epoch 99/250\n", 440 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3567\n", 441 | "Epoch 100/250\n", 442 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3561\n", 443 | "Epoch 101/250\n", 444 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3555\n", 445 | "Epoch 102/250\n", 446 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3549\n", 447 | "Epoch 103/250\n", 448 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3544\n", 449 | "Epoch 104/250\n", 450 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3538\n", 451 | "Epoch 105/250\n", 452 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3532\n", 453 | "Epoch 106/250\n", 454 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3527\n", 455 | "Epoch 107/250\n", 456 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3521\n", 457 | "Epoch 108/250\n", 458 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3516\n", 459 | "Epoch 109/250\n", 460 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3511\n", 461 | "Epoch 110/250\n", 462 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3506\n", 463 | "Epoch 111/250\n", 464 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3500\n", 465 | "Epoch 112/250\n", 466 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3495\n", 467 | "Epoch 113/250\n", 468 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3490\n", 469 | "Epoch 114/250\n", 470 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3485\n", 471 | "Epoch 115/250\n", 472 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3480\n", 473 | "Epoch 116/250\n", 474 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3475\n", 475 | "Epoch 117/250\n", 476 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3471\n", 477 | "Epoch 118/250\n", 478 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3466\n", 479 | "Epoch 119/250\n", 480 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3462\n", 481 | "Epoch 120/250\n", 482 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3457\n", 483 | "Epoch 121/250\n", 484 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3452\n", 485 | "Epoch 122/250\n", 486 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3448\n", 487 | "Epoch 123/250\n", 488 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3443\n", 489 | "Epoch 124/250\n", 490 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3439\n", 491 | "Epoch 125/250\n", 492 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3435\n", 493 | "Epoch 126/250\n", 494 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3430\n", 495 | "Epoch 127/250\n", 496 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3426\n", 497 | "Epoch 128/250\n", 498 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3422\n", 499 | "Epoch 129/250\n", 500 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3418\n", 501 | "Epoch 130/250\n", 502 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3414\n", 503 | "Epoch 131/250\n", 504 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3410\n", 505 | "Epoch 132/250\n", 506 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3406\n", 507 | "Epoch 133/250\n", 508 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3402\n", 509 | "Epoch 134/250\n", 510 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3398\n", 511 | "Epoch 135/250\n", 512 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3394\n", 513 | "Epoch 136/250\n", 514 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3391\n", 515 | "Epoch 137/250\n", 516 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3387\n", 517 | "Epoch 138/250\n", 518 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3383\n", 519 | "Epoch 139/250\n", 520 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3379\n", 521 | "Epoch 140/250\n", 522 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3376\n", 523 | "Epoch 141/250\n", 524 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3372\n", 525 | "Epoch 142/250\n", 526 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3368\n", 527 | "Epoch 143/250\n", 528 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3365\n", 529 | "Epoch 144/250\n", 530 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3361\n", 531 | "Epoch 145/250\n", 532 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3358\n", 533 | "Epoch 146/250\n", 534 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3354\n", 535 | "Epoch 147/250\n", 536 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3351\n", 537 | "Epoch 148/250\n", 538 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3347\n", 539 | "Epoch 149/250\n", 540 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3344\n", 541 | "Epoch 150/250\n", 542 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3341\n", 543 | "Epoch 151/250\n", 544 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3338\n", 545 | "Epoch 152/250\n", 546 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3335\n", 547 | "Epoch 153/250\n", 548 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3331\n", 549 | "Epoch 154/250\n", 550 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3328\n", 551 | "Epoch 155/250\n", 552 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3325\n", 553 | "Epoch 156/250\n", 554 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3322\n", 555 | "Epoch 157/250\n", 556 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3319\n", 557 | "Epoch 158/250\n", 558 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3316\n", 559 | "Epoch 159/250\n", 560 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3313\n", 561 | "Epoch 160/250\n", 562 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3310\n", 563 | "Epoch 161/250\n", 564 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3307\n", 565 | "Epoch 162/250\n", 566 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3304\n", 567 | "Epoch 163/250\n", 568 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3301\n", 569 | "Epoch 164/250\n", 570 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3298\n", 571 | "Epoch 165/250\n", 572 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3296\n", 573 | "Epoch 166/250\n", 574 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3293\n", 575 | "Epoch 167/250\n", 576 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3290\n", 577 | "Epoch 168/250\n", 578 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3287\n", 579 | "Epoch 169/250\n", 580 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3285\n", 581 | "Epoch 170/250\n", 582 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3282\n", 583 | "Epoch 171/250\n", 584 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3279\n", 585 | "Epoch 172/250\n", 586 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3276\n", 587 | "Epoch 173/250\n", 588 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3274\n", 589 | "Epoch 174/250\n", 590 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3271\n", 591 | "Epoch 175/250\n", 592 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3269\n", 593 | "Epoch 176/250\n", 594 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3266\n", 595 | "Epoch 177/250\n", 596 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3263\n", 597 | "Epoch 178/250\n", 598 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3261\n", 599 | "Epoch 179/250\n", 600 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3258\n", 601 | "Epoch 180/250\n", 602 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3256\n", 603 | "Epoch 181/250\n", 604 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3253\n", 605 | "Epoch 182/250\n", 606 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3251\n", 607 | "Epoch 183/250\n", 608 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3249\n", 609 | "Epoch 184/250\n", 610 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3246\n", 611 | "Epoch 185/250\n", 612 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3244\n", 613 | "Epoch 186/250\n", 614 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3242\n", 615 | "Epoch 187/250\n", 616 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3239\n", 617 | "Epoch 188/250\n", 618 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3237\n", 619 | "Epoch 189/250\n", 620 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3235\n", 621 | "Epoch 190/250\n", 622 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3232\n", 623 | "Epoch 191/250\n", 624 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3230\n", 625 | "Epoch 192/250\n", 626 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3228\n", 627 | "Epoch 193/250\n", 628 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3226\n", 629 | "Epoch 194/250\n", 630 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3223\n", 631 | "Epoch 195/250\n", 632 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3221\n", 633 | "Epoch 196/250\n", 634 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3219\n", 635 | "Epoch 197/250\n", 636 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3217\n", 637 | "Epoch 198/250\n", 638 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3215\n", 639 | "Epoch 199/250\n", 640 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3213\n", 641 | "Epoch 200/250\n", 642 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3211\n", 643 | "Epoch 201/250\n", 644 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3209\n", 645 | "Epoch 202/250\n", 646 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3207\n", 647 | "Epoch 203/250\n", 648 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3205\n", 649 | "Epoch 204/250\n", 650 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3203\n", 651 | "Epoch 205/250\n", 652 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3201\n", 653 | "Epoch 206/250\n", 654 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3199\n", 655 | "Epoch 207/250\n", 656 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3197\n", 657 | "Epoch 208/250\n", 658 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3195\n", 659 | "Epoch 209/250\n", 660 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3193\n", 661 | "Epoch 210/250\n", 662 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3191\n", 663 | "Epoch 211/250\n", 664 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3189\n", 665 | "Epoch 212/250\n", 666 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3187\n", 667 | "Epoch 213/250\n", 668 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3185\n", 669 | "Epoch 214/250\n", 670 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3184\n", 671 | "Epoch 215/250\n", 672 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3182\n", 673 | "Epoch 216/250\n", 674 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3180\n", 675 | "Epoch 217/250\n", 676 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3178\n", 677 | "Epoch 218/250\n", 678 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3176\n", 679 | "Epoch 219/250\n", 680 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3174\n", 681 | "Epoch 220/250\n", 682 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3172\n", 683 | "Epoch 221/250\n", 684 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3170\n", 685 | "Epoch 222/250\n", 686 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3169\n", 687 | "Epoch 223/250\n", 688 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3167\n", 689 | "Epoch 224/250\n", 690 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3165\n", 691 | "Epoch 225/250\n", 692 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3164\n", 693 | "Epoch 226/250\n", 694 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3162\n", 695 | "Epoch 227/250\n", 696 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3160\n", 697 | "Epoch 228/250\n", 698 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3158\n", 699 | "Epoch 229/250\n", 700 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3157\n", 701 | "Epoch 230/250\n", 702 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3155\n", 703 | "Epoch 231/250\n", 704 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3153\n", 705 | "Epoch 232/250\n", 706 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3152\n", 707 | "Epoch 233/250\n", 708 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3150\n", 709 | "Epoch 234/250\n", 710 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3149\n", 711 | "Epoch 235/250\n", 712 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3147\n", 713 | "Epoch 236/250\n", 714 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3145\n", 715 | "Epoch 237/250\n", 716 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3144\n", 717 | "Epoch 238/250\n", 718 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3142\n", 719 | "Epoch 239/250\n", 720 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3141\n", 721 | "Epoch 240/250\n", 722 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3139\n", 723 | "Epoch 241/250\n", 724 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3138\n", 725 | "Epoch 242/250\n", 726 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3136\n", 727 | "Epoch 243/250\n", 728 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3135\n", 729 | "Epoch 244/250\n", 730 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3133\n", 731 | "Epoch 245/250\n", 732 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3132\n", 733 | "Epoch 246/250\n", 734 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3130\n", 735 | "Epoch 247/250\n", 736 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3129\n", 737 | "Epoch 248/250\n", 738 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3127\n", 739 | "Epoch 249/250\n", 740 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3126\n", 741 | "Epoch 250/250\n", 742 | "1/1 [==============================] - 6s 6s/step - FinalLoss: 0.3124\n" 743 | ] 744 | } 745 | ] 746 | }, 747 | { 748 | "cell_type": "code", 749 | "source": [], 750 | "metadata": { 751 | "id": "7vDTRzOqB8m0" 752 | }, 753 | "execution_count": null, 754 | "outputs": [] 755 | } 756 | ] 757 | } -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 TAWSIF AHMED 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Forward-Forward Algorithm in Tensorflow 2 | Forward-Forward is an algorithm that was unveiled in NeurIPS '22. **Hinton's Lecture** [https://www.cs.toronto.edu/~hinton/FFA13.pdf] 3 | I saw a lot of implementations of this algorithm in pytorch, I personally like tensorflow so I tried to recreate the algorithm using pytorch. I was inspired by [https://github.com/madcato/forward-forward-pytorch] repo. And on top of that, I tried to recreate it in Tensorflow. 4 | 5 | **Status:** 6 | This is an on-going project, I aim to deliver more updates and implementations of this algorithm beyond the scope of Conv networks. (Though I used a dense layer for the current implementation) and with other datasets. 7 | 8 | **Contribution:** 9 | Feel free to contribute to the project so that we can make forward-forward the default algorithm used in Tensorflow :) 10 | 11 | -------------------------------------------------------------------------------- /train.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | from tensorflow import keras 3 | import numpy as np 4 | import matplotlib.pyplot as plt 5 | from sklearn.metrics import accuracy_score 6 | import random 7 | from tensorflow.compiler.tf2xla.python import xla 8 | 9 | (x_train, y_train), (x_test, y_test) = keras.datasets.mnist.load_data() 10 | 11 | print("4 Random Training samples and labels") 12 | idx1, idx2, idx3, idx4 = random.sample(range(0, x_train.shape[0]), 4) 13 | 14 | img1 = (x_train[idx1], y_train[idx1]) 15 | img2 = (x_train[idx2], y_train[idx2]) 16 | img3 = (x_train[idx3], y_train[idx3]) 17 | img4 = (x_train[idx4], y_train[idx4]) 18 | 19 | imgs = [img1, img2, img3, img4] 20 | 21 | plt.figure(figsize=(10, 10)) 22 | 23 | for idx, item in enumerate(imgs): 24 | image, label = item[0], item[1] 25 | plt.subplot(2, 2, idx + 1) 26 | plt.imshow(image, cmap="gray") 27 | plt.title(f"Label : {label}") 28 | plt.show() 29 | 30 | x_train = x_train.astype(float) / 255 31 | x_test = x_test.astype(float) / 255 32 | y_train = y_train.astype(int) 33 | y_test = y_test.astype(int) 34 | 35 | train_dataset = tf.data.Dataset.from_tensor_slices((x_train, y_train)) 36 | test_dataset = tf.data.Dataset.from_tensor_slices((x_test, y_test)) 37 | 38 | train_dataset = train_dataset.batch(60000) 39 | test_dataset = test_dataset.batch(10000) 40 | 41 | model = FFNetwork(dims=[784, 500, 500]) 42 | 43 | model.compile( 44 | optimizer=keras.optimizers.Adam(learning_rate=0.03), 45 | loss="mse", 46 | jit_compile=True, 47 | metrics=[keras.metrics.Mean()], 48 | ) 49 | 50 | epochs = 250 51 | history = model.fit(train_dataset, epochs=epochs) --------------------------------------------------------------------------------