├── .gitignore ├── 01-deep-neural-networks ├── 01-dnn │ ├── 01_dnn.ipynb │ ├── backprop.pdf │ └── clean_backprop.py ├── 02-cnn │ ├── cnn.ipynb │ └── cnn_jax.py ├── 03-alex-net │ └── alexnet.ipynb ├── 04-u-net │ ├── unet.ipynb │ └── unet.py ├── 05-vis-cnn │ └── visulisation_cnn.ipynb └── 06-yolo │ └── yolo.ipynb ├── 02-optimization-and-regularization ├── 01-weight-decay │ └── weightdecay.ipynb ├── 02-relu │ └── relu.ipynb ├── 03-residuals │ └── resnet.ipynb ├── 04-dropout │ └── dropout.ipynb ├── 05-batch-norm │ └── Batch-norm.ipynb ├── 06-layer-norm │ └── layernorm.ipynb ├── 07-gelu │ ├── gelu.ipynb │ └── probability.ipynb ├── 08-adam │ ├── adam.ipynb │ └── loss.ipynb └── 09-early-stopping │ └── early-stopping.ipynb ├── 03-sequence-modeling ├── 01-rnn │ ├── rnn-data │ │ └── Nietzsche_Articles.txt │ ├── rnn.ipynb │ └── simplernn.py ├── 02-lstm │ └── lstm.ipynb ├── 03-learning-to-forget │ └── learning.ipynb ├── 04-word2vec │ └── word2vec.ipynb ├── 05-seq2seq │ └── seq2seq.ipynb ├── 06-attention │ └── attention.ipynb └── 07-mixture-of-experts │ └── mixture-of-experts.ipynb ├── 04-transformers ├── 01-transformer │ └── transformer.ipynb ├── 02-bert │ └── bert.ipynb ├── 03-t5 │ └── t5.ipynb ├── 04-gpt │ └── gpt.ipynb ├── 05-lora │ └── lora.ipynb ├── 06-rlhf │ └── rlhf.ipynb └── 07-vision-transformer │ └── vit.ipynb ├── 05-image-generation ├── 01-gan │ └── gan.ipynb ├── 02-vae │ └── vae.ipynb ├── 03-diffusion │ └── sd.ipynb ├── 04-clip │ └── clip.ipynb └── 05-dall-e │ └── dalle.ipynb ├── README.md └── images ├── .gitkeep ├── 3-11.png ├── 3-12-2.png ├── 3-7.png ├── Cowboy-Bebop-Quotes1.jpeg ├── GRU.png ├── GRulCXpaUAAm5Up.jpeg ├── LKE.png ├── RNN-vs-FNN-660.png ├── T5.jpg ├── T5_1.jpg ├── add-1.drawio.png ├── alexnet-arc.png ├── batcnorm.jpeg ├── bert.jpg ├── bot-res.png ├── bottleneck.png ├── cnn.jpg ├── convolution-2.gif ├── decoder.png ├── dropout.png ├── dropoutex.jpg ├── dropoutt.png ├── earlystopping.jpg ├── encoder.png ├── f_pdf.jpg ├── for_revered_guest.png ├── imagent.png ├── imagnet-win.png ├── last-lstm.png ├── lstm-2.png ├── lstm-add.png ├── lstm-core.png ├── lstm-input.png ├── lstm.png ├── maxpool.gif ├── mul.drawio.png ├── newunet.png ├── nor-res.png ├── overlapping.png ├── pos-cal.png ├── pos-emb.png ├── probs.jpg ├── relu.png ├── reluu.png ├── res-arc.png ├── resnet.jpg ├── rnn_arc.png ├── self-feedback-loop.jpg ├── skip.png ├── transformer.jpeg ├── transistor.png ├── trig.png ├── typesofrnn.png ├── unet.png ├── unetimg.png ├── unetsd.png ├── vis-4.png ├── vis-cnn.png ├── vis_0.png ├── vis_1.png ├── vis_2.png ├── word2vec-embed.png └── word2vec.png /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | .ipynb_checkpoints 3 | datasets 4 | .DS_Store 5 | .vscode 6 | data -------------------------------------------------------------------------------- /01-deep-neural-networks/01-dnn/backprop.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/01-deep-neural-networks/01-dnn/backprop.pdf -------------------------------------------------------------------------------- /01-deep-neural-networks/01-dnn/clean_backprop.py: -------------------------------------------------------------------------------- 1 | #rewrite the backprop from ipynb 2 | 3 | class Tensor: 4 | 5 | def __init__(self,data,children=()): 6 | 7 | self.data = data 8 | self._prev = set(children) 9 | self.grad = 0.0 10 | self._backward = lambda : None 11 | 12 | 13 | 14 | pass -------------------------------------------------------------------------------- /01-deep-neural-networks/02-cnn/cnn_jax.py: -------------------------------------------------------------------------------- 1 | #import 2 | 3 | import numpy as np 4 | import matplotlib.pyplot as plt 5 | import seaborn as sns 6 | import pandas as pd 7 | from sklearn.metrics import ConfusionMatrixDisplay 8 | 9 | import torch 10 | import torchvision 11 | import torchvision.transforms as transforms 12 | from torch.utils import data 13 | 14 | import jax 15 | import jax.numpy as jnp 16 | import flax 17 | import flax.linen as nn 18 | import optax 19 | from jax.tree_util import tree_map 20 | from flax.training import train_state 21 | 22 | 23 | # data 24 | 25 | 26 | IMAGE_SIZE = 32 27 | BATCH_SIZE = 128 28 | DATA_MEANS = np.array([0.49139968, 0.48215841, 0.44653091]) 29 | DATA_STD = np.array([0.24703223, 0.24348513, 0.26158784]) 30 | CROP_SCALES = (0.8, 1.0) 31 | CROP_RATIO = (0.9, 1.1) 32 | SEED = 42 33 | 34 | 35 | 36 | # CNN 37 | 38 | class CNN(nn.Module): 39 | 40 | 41 | @nn.compact 42 | def __call__(self, x): 43 | x = nn.Conv(features=32, kernel_size=(3, 3))(x) 44 | x = nn.relu(x) 45 | x = nn.avg_pool(x, window_shape=(2, 2), strides=(2, 2)) 46 | x = nn.Conv(features=64, kernel_size=(3, 3))(x) 47 | x = nn.relu(x) 48 | x = nn.avg_pool(x, window_shape=(2, 2), strides=(2, 2)) 49 | x = x.reshape((x.shape[0], -1)) # flatten 50 | x = nn.Dense(features=256)(x) 51 | x = nn.relu(x) 52 | x = nn.Dense(features=10)(x) 53 | return x 54 | 55 | 56 | 57 | def image_to_numpy(img): 58 | img = np.array(img, dtype=np.float32) 59 | img = (img / 255. - DATA_MEANS) / DATA_STD 60 | return img 61 | 62 | 63 | def numpy_collate(batch): 64 | if isinstance(batch[0], np.ndarray): 65 | return np.stack(batch) 66 | elif isinstance(batch[0], (tuple, list)): 67 | transposed = zip(*batch) 68 | return [numpy_collate(samples) for samples in transposed] 69 | else: 70 | return np.array(batch) 71 | 72 | classes = ('plane', 'car', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck') 73 | test_transform = image_to_numpy 74 | train_transform = transforms.Compose([ 75 | transforms.RandomHorizontalFlip(), 76 | transforms.RandomResizedCrop((IMAGE_SIZE, IMAGE_SIZE), scale=CROP_SCALES, ratio=CROP_RATIO), 77 | image_to_numpy 78 | ]) 79 | 80 | # Validation set should not use train_transform. 81 | train_dataset = torchvision.datasets.CIFAR10('data', train=True, transform=train_transform, download=True) 82 | val_dataset = torchvision.datasets.CIFAR10('data', train=True, transform=test_transform, download=True) 83 | train_set, _ = torch.utils.data.random_split(train_dataset, [45000, 5000], generator=torch.Generator().manual_seed(SEED)) 84 | _, val_set = torch.utils.data.random_split(val_dataset, [45000, 5000], generator=torch.Generator().manual_seed(SEED)) 85 | test_set = torchvision.datasets.CIFAR10('data', train=False, transform=test_transform, download=True) 86 | 87 | train_data_loader = torch.utils.data.DataLoader( 88 | train_set, batch_size=BATCH_SIZE, shuffle=True, drop_last=True, num_workers=2, persistent_workers=True, collate_fn=numpy_collate, 89 | ) 90 | val_data_loader = torch.utils.data.DataLoader( 91 | val_set, batch_size=BATCH_SIZE, shuffle=False, drop_last=False, num_workers=2, persistent_workers=True, collate_fn=numpy_collate, 92 | ) 93 | test_data_loader = torch.utils.data.DataLoader( 94 | test_set, batch_size=BATCH_SIZE, shuffle=False, drop_last=False, num_workers=2, persistent_workers=True, collate_fn=numpy_collate, 95 | ) 96 | 97 | 98 | #model init 99 | 100 | model = CNN() 101 | 102 | optimizer = optax.adam(learning_rate=1e-4) 103 | 104 | rng, inp_rng, init_rng = jax.random.split(jax.random.PRNGKey(SEED), 3) 105 | params = model.init(jax.random.PRNGKey(SEED), 106 | jax.random.normal(inp_rng, (BATCH_SIZE, 32, 32, 3))) 107 | 108 | model_state = train_state.TrainState.create(apply_fn=model.apply, 109 | params=params, 110 | tx=optimizer) 111 | 112 | 113 | # training 114 | 115 | 116 | @jax.jit 117 | def apply_model(state, images, labels): 118 | 119 | 120 | def loss_fn(params): 121 | logits = state.apply_fn(params, images) 122 | one_hot = jax.nn.one_hot(labels, logits.shape[1]) 123 | loss = jnp.mean(optax.softmax_cross_entropy(logits=logits, labels=one_hot)) 124 | return loss, logits 125 | 126 | grad_fn = jax.value_and_grad(loss_fn, has_aux=True) 127 | (loss, logits), grads = grad_fn(state.params) 128 | accuracy = jnp.mean(jnp.argmax(logits, -1) == labels) 129 | return grads, loss, accuracy 130 | 131 | 132 | @jax.jit 133 | def update_model(state, grads): 134 | return state.apply_gradients(grads=grads) 135 | 136 | 137 | def train_epoch(state, data_loader): 138 | 139 | 140 | epoch_loss = [] 141 | epoch_accuracy = [] 142 | 143 | for batch in data_loader: 144 | batch_images, batch_labels = batch 145 | grads, loss, accuracy = apply_model(state, batch_images, batch_labels) 146 | state = update_model(state, grads) 147 | epoch_loss.append(loss) 148 | epoch_accuracy.append(accuracy) 149 | train_loss = np.mean(epoch_loss) 150 | train_accuracy = np.mean(epoch_accuracy) 151 | return state, train_loss, train_accuracy 152 | 153 | 154 | def train_model(state, train_data_loader, num_epochs): 155 | # Training loop 156 | for epoch in range(num_epochs): 157 | state, train_loss, train_accuracy = train_epoch(state, train_data_loader) 158 | print(f'epoch: {epoch:03d}, train loss: {train_loss:.4f}, train accuracy: {train_accuracy:.4f}') 159 | return state 160 | 161 | 162 | trained_model_state = train_model(model_state, train_data_loader, num_epochs=100) 163 | 164 | #testing 165 | 166 | test_loss = [] 167 | test_accuracy = [] 168 | 169 | for batch in test_data_loader: 170 | batch_images, batch_labels = batch 171 | _, loss, accuracy = apply_model(trained_model_state, batch_images, batch_labels) 172 | test_loss.append(loss) 173 | test_accuracy.append(accuracy) 174 | 175 | print(f'loss: {np.mean(test_loss):.4f}, accuracy: {np.mean(test_accuracy):.4f}') -------------------------------------------------------------------------------- /01-deep-neural-networks/04-u-net/unet.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch import nn 3 | 4 | class DoubleConv(nn.Module): 5 | def __init__(self, in_channels, out_channels): 6 | super(DoubleConv, self).__init__() 7 | self.conv = nn.Sequential( 8 | nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1), 9 | nn.ReLU(inplace=True), 10 | nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1), 11 | nn.ReLU(inplace=True), 12 | ) 13 | 14 | def forward(self, x): 15 | return self.conv(x) 16 | 17 | class DownSample(nn.Module): 18 | def __init__(self, in_channels, out_channels): 19 | super(DownSample, self).__init__() 20 | self.conv = DoubleConv(in_channels, out_channels) 21 | self.pool = nn.MaxPool2d(kernel_size=2, stride=2) 22 | 23 | def forward(self, x): 24 | down = self.conv(x) 25 | pooled = self.pool(down) 26 | return down, pooled 27 | 28 | class UpSample(nn.Module): 29 | def __init__(self, in_channels, out_channels): 30 | super(UpSample, self).__init__() 31 | self.up_conv = nn.ConvTranspose2d(in_channels, out_channels, kernel_size=2, stride=2) 32 | self.conv = DoubleConv(in_channels, out_channels) 33 | 34 | def forward(self, x1, x2): 35 | x1 = self.up_conv(x1) 36 | diffY = x2.size()[2] - x1.size()[2] 37 | diffX = x2.size()[3] - x1.size()[3] 38 | x1 = nn.functional.pad(x1, [diffX // 2, diffX - diffX // 2, diffY // 2, diffY - diffY // 2]) 39 | x = torch.cat([x2, x1], dim=1) 40 | return self.conv(x) 41 | 42 | class UNet(nn.Module): 43 | def __init__(self, in_channels, out_channels): 44 | super(UNet, self).__init__() 45 | self.down_conv_1 = DownSample(in_channels, 64) 46 | self.down_conv_2 = DownSample(64, 128) 47 | self.down_conv_3 = DownSample(128, 256) 48 | self.down_conv_4 = DownSample(256, 512) 49 | 50 | self.bottle_neck = DoubleConv(512, 1024) 51 | 52 | self.up_conv_1 = UpSample(1024, 512) 53 | self.up_conv_2 = UpSample(512, 256) 54 | self.up_conv_3 = UpSample(256, 128) 55 | self.up_conv_4 = UpSample(128, 64) 56 | 57 | self.out = nn.Conv2d(64, out_channels, kernel_size=1) 58 | 59 | def forward(self, x): 60 | down_1, p1 = self.down_conv_1(x) 61 | down_2, p2 = self.down_conv_2(p1) 62 | down_3, p3 = self.down_conv_3(p2) 63 | down_4, p4 = self.down_conv_4(p3) 64 | 65 | b = self.bottle_neck(p4) 66 | 67 | up_1 = self.up_conv_1(b, down_4) 68 | up_2 = self.up_conv_2(up_1, down_3) 69 | up_3 = self.up_conv_3(up_2, down_2) 70 | up_4 = self.up_conv_4(up_3, down_1) 71 | 72 | out = self.out(up_4) 73 | return out 74 | 75 | -------------------------------------------------------------------------------- /01-deep-neural-networks/06-yolo/yolo.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "### YOLO " 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": null, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [] 16 | } 17 | ], 18 | "metadata": { 19 | "language_info": { 20 | "name": "python" 21 | } 22 | }, 23 | "nbformat": 4, 24 | "nbformat_minor": 2 25 | } 26 | -------------------------------------------------------------------------------- /02-optimization-and-regularization/06-layer-norm/layernorm.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Layer-Norm !!! (will come back to this at the time of transformer for now sayonara :) " 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": null, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [] 16 | } 17 | ], 18 | "metadata": { 19 | "language_info": { 20 | "name": "python" 21 | } 22 | }, 23 | "nbformat": 4, 24 | "nbformat_minor": 2 25 | } 26 | -------------------------------------------------------------------------------- /02-optimization-and-regularization/07-gelu/gelu.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "> Before you get into gelu you should know what is CDF ?\n", 8 | "\n", 9 | "- after learning **CDF** i think you can skip it if you want. \n", 10 | "- in short cdf helps gelu to skip negative values.\n", 11 | "\n", 12 | "- -> [text](probability.ipynb) (Link to CDF)" 13 | ] 14 | }, 15 | { 16 | "cell_type": "markdown", 17 | "metadata": {}, 18 | "source": [ 19 | "> Lest understand the gelu\n", 20 | "\n", 21 | "- gelu (gaussian error linear unit), an activation function used in neural networks.\n", 22 | "\n", 23 | "- gelu makes a hard decision by outputting zero for all negative inputs and passing through positive inputs as they are.\n", 24 | "\n", 25 | "- gelu, on the other hand, provides a probabilistic gating mechanism by using the Gaussian CDF to weight the inputs. This gives a smoother transition for input values, particularly around zero.\n", 26 | "\n", 27 | "- relu and leaky relu, which have a sharp transition at x = 0 , GELU transitions smoothly, which can lead to better performance.\n", 28 | "\n", 29 | "- gelu considers the input’s probability of being positive, which integrates both the input value and its likelihood of activation.\n", 30 | "\n", 31 | "> Why Use the Gaussian CDF?\n", 32 | "\n", 33 | "- inputs often follow a normal distribution.\n", 34 | "\n", 35 | "- gelu used this distribution of input \n", 36 | "\n", 37 | "- this can result in better performance across various tasks, as the gelu function smoothly blends in.\n", 38 | "\n", 39 | "> computation \n", 40 | "\n", 41 | "- ReLU is very simple and computationally efficient.\n", 42 | "\n", 43 | " - GELU involves more complex calculations due to the Gaussian CDF, but the approximation helps mitigate this complexity.\n", 44 | "\n", 45 | "> math (approximation )\n", 46 | "\n", 47 | "- for computational efficiency, the GELU function can be approximated as:\n", 48 | "\n", 49 | "$$ \\text{GELU}(x) \\approx 0.5x \\left(1 + \\tanh\\left(\\sqrt{\\frac{2}{\\pi}} \\left(x + 0.044715x^3\\right)\\right)\\right) $$\n", 50 | "\n", 51 | "> uses \n", 52 | "\n", 53 | "- used in bert and gpts " 54 | ] 55 | }, 56 | { 57 | "cell_type": "code", 58 | "execution_count": 24, 59 | "metadata": {}, 60 | "outputs": [ 61 | { 62 | "data": { 63 | "image/png": "iVBORw0KGgoAAAANSUhEUgAABKUAAAJOCAYAAABm7rQwAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8fJSN1AAAACXBIWXMAAA9hAAAPYQGoP6dpAACZ5UlEQVR4nOzdd3hUVf7H8c/MpEMILRBKSIEk9GBFRQSUYl+suOsq9lWxoiioqKCCqCAqih0brBXLuipFRXSxYAGkBQKhhk4KpE5m7u+PkfyMCZCESc7NzPv1PHlyMnNz5/OdADl858y5DsuyLAEAAAAAAAD1yGk6AAAAAAAAAIIPTSkAAAAAAADUO5pSAAAAAAAAqHc0pQAAAAAAAFDvaEoBAAAAAACg3tGUAgAAAAAAQL2jKQUAAAAAAIB6R1MKAAAAAAAA9Y6mFAAAAAAAAOodTSkAtnPFFVcoMTHRyGM/+OCDcjgcRh67Ierfv7/69+9vOgYAAPiDybnMa6+9JofDoQ0bNhh5/IbG5JwXsAuaUkA9OvCL+sBHSEiI2rVrpyuuuEJbt26t1TkXLFggh8Oh999//6DHOBwO3XTTTVXe9/7778vhcGjBggXVfsznnntODodDvXv3rmncctnZ2XrwwQe1ZMmSWp+jtgoLC/Xggw/WqOb68Oc/G3/+iIuLM5pr5cqVevDBB5lgAgCMysrK0k033aTU1FRFRUUpKipKXbt21YgRI7Rs2bIKxx5ozBzsY/v27ZKkDRs2yOFw6Iknnjjo4yYmJurss8+u8r6ff/5ZDodDr732WrXr+Oyzz+RwONS2bVt5vd5qf9+fmZ7LTJgwQR999JGRxz6YxMTEg/68i4uLjeUyOecFGoIQ0wGAYDR+/HglJSWpuLhYP/zwg1577TV99913Wr58uSIiIkzHO6yZM2cqMTFRP/30kzIzM9WpU6canyM7O1vjxo1TYmKievXqVeG+l156qdaTtOooLCzUuHHjJKnSKp/77rtPo0ePrrPHPpxBgwbp8ssvr3BbZGSkoTQ+K1eu1Lhx49S/f/9Kr+bNnTvXTCgAQFD59NNPNWzYMIWEhOjSSy9Venq6nE6nVq9erdmzZ2v69OnKyspSQkJChe+bPn26GjduXOl8TZs2rafklR2YR23YsEFfffWVBg4cWONzmJ7LTJgwQRdeeKGGDh1a4fbLLrtMl1xyicLDw+v08Q+mV69euuOOOyrdHhYWZiCNj8k5L9AQ0JQCDDjjjDN07LHHSpKuueYatWzZUpMmTdInn3yiiy++2HC6Q8vKytKiRYs0e/Zs/etf/9LMmTP1wAMP+PUxQkND/Xq+mggJCVFIiLl/GlNTU/XPf/7T2OPXlMlJHgAgOKxbt06XXHKJEhIS9OWXX6pNmzYV7p80aZKee+45OZ2V3wRy4YUXqmXLlvUV9bAKCgr08ccfa+LEiZoxY4ZmzpxZq6bUoZicy7hcLrlcLiOPLUnt2rVrUPMok3NewC54+x5gA3379pXkm3T92erVq3XhhReqefPmioiI0LHHHqtPPvnERMRyM2fOVLNmzXTWWWfpwgsv1MyZM6s8Ljc3V7fffrsSExMVHh6u9u3b6/LLL9fu3bu1YMECHXfccZKkK6+8snxp9YGl739+f73b7Vbz5s115ZVXVnqM/Px8RURE6M4775QklZaW6v7779cxxxyjmJgYNWrUSH379tXXX39d/j0bNmxQbGysJGncuHHlj/3ggw9KqnofhrKyMj300EPq2LGjwsPDlZiYqHvuuUclJSUVjjuwvP+7777T8ccfr4iICCUnJ+uNN96o2ZN8EAfbd6CqzAfesvnRRx+pe/fuCg8PV7du3fTFF19U+v6tW7fq6quvVtu2bRUeHq6kpCTdcMMNKi0t1WuvvaaLLrpIkjRgwIDy5+vA2wWq2lNq586duvrqq9W6dWtFREQoPT1dr7/+eoVj/vx2iRdffLH8uT3uuOO0ePHi2j9JAICA89hjj6mgoEAzZsyo1JCSfE2YW265RfHx8QbS1cyHH36ooqIiXXTRRbrkkks0e/bsKt9aVlxcrAcffFCpqamKiIhQmzZtdP7552vdunU1nst0795dAwYMqPQYXq9X7dq104UXXlh+2xNPPKGTTjpJLVq0UGRkpI455phKW0Q4HA4VFBTo9ddfL3/sK664QtLB95R67rnn1K1bN4WHh6tt27YaMWKEcnNzKxzTv39/de/eXStXrtSAAQMUFRWldu3a6bHHHqvu03tIB9trq6rMNZnT+WvOe0BBQYHuuOMOxcfHKzw8XGlpaXriiSdkWVaF42oy1wPsjKYUYAMHfgk2a9as/LYVK1bohBNO0KpVqzR69GhNnjxZjRo10tChQ/Xhhx8aSuprSp1//vkKCwvT3//+d61du7ZSE2H//v3q27evnnnmGQ0ePFhPPfWUrr/+eq1evVpbtmxRly5dNH78eEnSddddpzfffFNvvvmmTjnllEqPFxoaqvPOO08fffSRSktLK9z30UcfqaSkRJdccokkX5Pq5ZdfVv/+/TVp0iQ9+OCD2rVrl4YMGVL+Pv7Y2FhNnz5dknTeeeeVP/b5559/0JqvueYa3X///Tr66KP15JNPql+/fpo4cWL54/5ZZmamLrzwQg0aNEiTJ09Ws2bNdMUVV2jFihXVen6Li4u1e/fuCh9/bX5V13fffacbb7xRl1xyiR577DEVFxfrggsu0J49e8qPyc7O1vHHH6+3335bw4YN09NPP63LLrtM33zzjQoLC3XKKafolltukSTdc8895c9Xly5dqnzMoqIi9e/fX2+++aYuvfRSPf7444qJidEVV1yhp556qtLxs2bN0uOPP65//etfevjhh7Vhwwadf/75crvdtaoZABB4Pv30U3Xq1KlWe1nu3bu30u/VvzZD6tPMmTM1YMAAxcXF6ZJLLtG+ffv0n//8p8IxHo9HZ599tsaNG6djjjlGkydP1q233qq8vDwtX768xnOZYcOGaeHCheX7aB3w3XffKTs7u8J85qmnntJRRx2l8ePHa8KECQoJCdFFF12k//73v+XHvPnmmwoPD1ffvn3LH/tf//rXQWt+8MEHNWLECLVt21aTJ0/WBRdcoBdeeEGDBw+u9Ps+JydHp59+utLT0zV58mR17txZd999tz7//PNqPb9ut7vSz7uwsLBa3/tX1ZnT+XPOK0mWZencc8/Vk08+qdNPP11TpkxRWlqaRo0apZEjR1Y6vjpzPcD2LAD1ZsaMGZYka/78+dauXbuszZs3W++//74VGxtrhYeHW5s3by4/9rTTTrN69OhhFRcXl9/m9Xqtk046yUpJSSm/7euvv7YkWe+9995BH1eSNWLEiCrve++99yxJ1tdff33Y/D///LMlyZo3b155nvbt21u33nprhePuv/9+S5I1e/bsSufwer2WZVnW4sWLLUnWjBkzKh0zfPhwKyEhofzrOXPmWJKs//znPxWOO/PMM63k5OTyr8vKyqySkpIKx+Tk5FitW7e2rrrqqvLbdu3aZUmyHnjggUqP/cADD1h//qdxyZIlliTrmmuuqXDcnXfeaUmyvvrqq/LbEhISLEnWwoULy2/buXOnFR4ebt1xxx2VHuuvJFX5ceA5+uvzcrDMB84VFhZmZWZmlt+2dOlSS5L1zDPPlN92+eWXW06n01q8eHGl8x74WR3qz0i/fv2sfv36lX89depUS5L11ltvld9WWlpqnXjiiVbjxo2t/Px8y7IsKysry5JktWjRwtq7d2/5sR9//HGVP2sAQHDKy8uzJFlDhw6tdF9OTo61a9eu8o/CwsLy+w78bqzqIy0trfy4A7+PHn/88YNmSEhIsM4666wq7zvUfOavduzYYYWEhFgvvfRS+W0nnXSS9be//a3Cca+++qolyZoyZUqlcxz43VyTuUxGRkal3/+WZVk33nij1bhx4wrP25/HluX7Hd69e3fr1FNPrXB7o0aNrOHDh1d67ANz3aysLMuyfPOgsLAwa/DgwZbH4yk/btq0aZYk69VXXy2/rV+/fpYk64033ii/raSkxIqLi7MuuOCCSo/1VwfmYX/9OPAcVTVfqirzn891uDmdv+e8H330kSXJevjhhyscd+GFF1oOh6PCvK66cz3A7lgpBRgwcOBAxcbGKj4+XhdeeKEaNWqkTz75RO3bt5fke1Xvq6++0sUXX6x9+/aVv9KzZ88eDRkyRGvXrq311fqOxMyZM9W6devyJeAOh0PDhg3T22+/LY/HU37cBx98oPT0dJ133nmVzlGbSxSfeuqpatmypd55553y23JycjRv3jwNGzas/DaXy1W+x5HX69XevXtVVlamY489Vr/++muNH1fyXSFHUqVXpw5sovnnVw4lqWvXruVvx5R8K7PS0tK0fv36aj3e3/72N82bN6/Cx5AhQ2qVfeDAgerYsWP51z179lSTJk3Ks3i9Xn300Uc655xzyvc4+7Pa/Kw+++wzxcXF6e9//3v5baGhobrlllu0f/9+ffPNNxWOHzZsWIUVggeeu+o+XwCAwJafny9JVW5W3r9/f8XGxpZ/PPvss5WO+eCDDyr9Xp0xY0ad567K22+/LafTqQsuuKD8tr///e/6/PPPlZOTU37bBx98oJYtW+rmm2+udI7a/G5OTU1Vr169KsyjPB6P3n//fZ1zzjkVLqjy53FOTo7y8vLUt2/fWs+j5s+fr9LSUt12220V9vy69tpr1aRJk0rzqMaNG1fYEyosLEzHH398tecFvXv3rvTz/usFZKqrOnM6f895P/vsM7lcrvJV6gfccccdsiyr0oqxw831gIaAjc4BA5599lmlpqYqLy9Pr776qhYuXFjhKiWZmZmyLEtjx47V2LFjqzzHzp071a5dO79lOtwvTo/Ho7ffflsDBgxQVlZW+e29e/fW5MmT9eWXX2rw4MGSfHtj/XnCdaRCQkJ0wQUXaNasWSopKVF4eLhmz54tt9tdoSklSa+//romT56s1atXV1gSnpSUVKvH3rhxo5xOZ6UrDMbFxalp06bauHFjhds7dOhQ6RzNmjWrMNk8lPbt2/ttw9PDZdm1a5fy8/PVvXt3vzye5Hu+UlJSKm02e+Dtfod7vg40qKr7fAEAAlt0dLQk39uk/uqFF17Qvn37tGPHjoNubn3KKafUy0bn1WlAvPXWWzr++OO1Z8+e8rdXHXXUUSotLdV7772n6667TpJvHpWWlubXzcqHDRume+65R1u3blW7du20YMEC7dy5s9I86tNPP9XDDz+sJUuWVNg+oDYNFun/f++npaVVuD0sLEzJycmV5gXt27ev9FjNmjXTsmXLqvV4LVu2rLd5lOT/Oe/GjRvVtm3b8j/3B1R3HlVVRsDuaEoBBhx//PHlK1OGDh2qk08+Wf/4xz+UkZGhxo0bl18a9s477zzoKpm/NkkOJTw8XEVFRVXed+B99hEREYc8x1dffaVt27bp7bff1ttvv13p/pkzZ5Y3perCJZdcohdeeEGff/65hg4dqnfffVedO3dWenp6+TFvvfWWrrjiCg0dOlSjRo1Sq1at5HK5NHHixEqbyNdUdSdjB7vijPWXzSn9meHPq9TqK4u/NISMAABzYmJi1KZNGy1fvrzSfQf2mPrrptr+FhERccTzqD/vwZmSklLp/pkzZ5Y3perCsGHDNGbMGL333nu67bbb9O677yomJkann356+THffvutzj33XJ1yyil67rnn1KZNG4WGhmrGjBmaNWtWnWX7M+ZRNdMQMgKHQ1MKMOxA02TAgAGaNm2aRo8ereTkZEm+tz3549WehIQEZWRkVHnfgdsTEhIOeY6ZM2eqVatWVS6Nnz17tj788EM9//zzioyMVMeOHaucPP5ZTV9xO+WUU9SmTRu98847Ovnkk/XVV1/p3nvvrXDM+++/r+TkZM2ePbvC+R944IFaP3ZCQoK8Xq/Wrl1bYXPvHTt2KDc397DPmz81a9asys1Z//qqWXXFxsaqSZMmfv1ZJSQkaNmyZfJ6vRVWS61evbr8fgAAauKss87Syy+/rJ9++knHH398vT9+QkKCVq5cWeV9NZlHhYaG6s0336zUSPjuu+/09NNPa9OmTerQoYM6duyoH3/8UW63W6GhoVWer6bzqKSkJB1//PF65513dNNNN2n27NkaOnRohZX6H3zwgSIiIjRnzpwKt1f1dsfqPv6B5yUjI6N8fiv5rpiclZXlt1VN1XFgNXZubq6aNm1afntt51GS/D7nTUhI0Pz587Vv374Kq6WYRyGQsacUYAP9+/fX8ccfr6lTp6q4uFitWrVS//799cILL2jbtm2Vjt+1a1eNzn/mmWfqhx9+0C+//FLh9tzcXM2cOVO9evVSXFzcQb+/qKhIs2fP1tlnn60LL7yw0sdNN92kffv26ZNPPpEkXXDBBVq6dGmVVwk88MpNo0aNyjNUh9Pp1IUXXqj//Oc/evPNN1VWVlZpyfmBSd6fXx368ccf9f3331c4LioqqtqPfeaZZ0qSpk6dWuH2KVOmSPJNlOtLx44dlZeXV2EJ+7Zt22p9NUan06mhQ4fqP//5j37++edK99fmZ3XmmWdq+/btFfatKCsr0zPPPKPGjRurX79+tcoKAAhed911l6KionTVVVdpx44dle6v61UhZ555prZs2aKPPvqowu0lJSV6+eWX1apVKx199NGHPMfMmTPVt29fDRs2rNI8atSoUZKkf//735J886jdu3dr2rRplc5zoNaazGUOGDZsmH744Qe9+uqr2r17d5XzKIfDUWHl0IYNGyrVLfnmBtV57IEDByosLExPP/10hZ/TK6+8ory8vHqfR0nSwoULy28rKCjQ66+/Xutz+nvOe+aZZ8rj8VT62T/55JNyOBw644wzap0VsCtWSgE2MWrUKF100UV67bXXdP311+vZZ5/VySefrB49eujaa69VcnKyduzYoe+//15btmzR0qVLK3z/Bx98UP4qyp8NHz5co0eP1nvvvadTTjlF//rXv9S5c2dlZ2frtdde07Zt2w674ecnn3yiffv26dxzz63y/hNOOEGxsbGaOXOmhg0bplGjRun999/XRRddpKuuukrHHHOM9u7dq08++UTPP/+80tPT1bFjRzVt2lTPP/+8oqOj1ahRI/Xu3fuQez8NGzZMzzzzjB544AH16NGjwsolSTr77LM1e/ZsnXfeeTrrrLOUlZWl559/Xl27dq2wF0VkZKS6du2qd955R6mpqWrevLm6d+9e5d5K6enpGj58uF588UXl5uaqX79++umnn/T6669r6NCh5Zu+14dLLrlEd999t8477zzdcsstKiws1PTp05WamlrrDUgnTJiguXPnql+/frruuuvUpUsXbdu2Te+9956+++47NW3aVL169ZLL5dKkSZOUl5en8PBwnXrqqWrVqlWl81133XV64YUXdMUVV+iXX35RYmKi3n//ff3vf//T1KlTK+2RAADA4aSkpGjWrFn6+9//rrS0NF166aVKT0+XZVnKysrSrFmz5HQ6yy8Y82fvv/9+lZukDxo0SK1bty7/+ssvv1RxcXGl44YOHarrrrtOr776avm85qijjtKePXv0zjvvaPny5XrjjTfKL7RSlR9//FGZmZm66aabqry/Xbt2OvroozVz5kzdfffduvzyy/XGG29o5MiR+umnn9S3b18VFBRo/vz5uvHGG/W3v/2tRnOZAy6++GLdeeeduvPOO9W8efNKq5TOOussTZkyRaeffrr+8Y9/aOfOnXr22WfVqVOnSns6HXPMMZo/f76mTJmitm3bKikpqfztlH8WGxurMWPGaNy4cTr99NN17rnnKiMjQ88995yOO+64g+4FVhcGDx6sDh066Oqrr9aoUaPkcrn06quvKjY2Vps2barVOf095z3nnHM0YMAA3XvvvdqwYYPS09M1d+5cffzxx7rtttsqbGoOBIz6v+AfELwOXHJ28eLFle7zeDxWx44drY4dO1plZWWWZVnWunXrrMsvv9yKi4uzQkNDrXbt2llnn3229f7775d/39dff33QSx5Lsr799lvLsixry5Yt1jXXXGO1a9fOCgkJsZo3b26dffbZ1g8//HDY3Oecc44VERFhFRQUHPSYK664wgoNDbV2795tWZZl7dmzx7rpppusdu3aWWFhYVb79u2t4cOHl99vWZb18ccfW127drVCQkIqXCr3r5fHPcDr9Vrx8fFVXir3wP0TJkywEhISrPDwcOuoo46yPv300yrPt2jRIuuYY46xwsLCDnu5YLfbbY0bN85KSkqyQkNDrfj4eGvMmDFWcXFxheMOdsnofv36Wf369Tvoc3eAJGvEiBGHPGbu3LlW9+7drbCwMCstLc166623qsx8sHMlJCRUuoTzxo0brcsvv9yKjY21wsPDreTkZGvEiBFWSUlJ+TEvvfSSlZycbLlcLkuS9fXXXx+0th07dlhXXnml1bJlSyssLMzq0aNHpcsgH+oS3H/+eQAAcEBmZqZ1ww03WJ06dbIiIiKsyMhIq3Pnztb1119vLVmypMKxB343HuzjwO+xA7+PDvbx5ptvWpZlWTk5Odbtt99ePhdo0qSJNWDAAOvzzz8/bO6bb77ZkmStW7fuoMc8+OCDliRr6dKllmVZVmFhoXXvvfeWP15cXJx14YUXVjhHTeYyB/Tp08eSZF1zzTVV3v/KK69YKSkpVnh4uNW5c2drxowZVZ5v9erV1imnnGJFRkZaksrnFgfmullZWRWOnzZtmtW5c2crNDTUat26tXXDDTdYOTk5FY7p16+f1a1bt0qZDjYv/KuDzcP+7JdffrF69+5thYWFWR06dLCmTJlSZeaazOn8Pefdt2+fdfvtt1tt27a1QkNDrZSUFOvxxx+3vF5vheNqMtcD7MxhWeyCBgAAAAAAgPrFnlIAAAAAAACodzSlAAAAAAAAUO9oSgEAAAAAAKDe0ZQCAAAAAABAvaMpBQAAAAAAgHpHUwoAAAAAAAD1LsR0gLrm9XqVnZ2t6OhoORwO03EAAEADY1mW9u3bp7Zt28rpDI7X85g/AQCAI1Hd+VPAN6Wys7MVHx9vOgYAAGjgNm/erPbt25uOUS+YPwEAAH843Pwp4JtS0dHRknxPRJMmTfx+frfbrblz52rw4MEKDQ31+/ntJtjqlYKvZuoNbMFWrxR8NVOv/+Xn5ys+Pr58ThEM6nr+JPFnNdBRb+ALtpqpN/AFW811XW91508B35Q6sOS8SZMmddaUioqKUpMmTYLmD24w1SsFX83UG9iCrV4p+Gqm3roTTG9jq+v5k8Sf1UBHvYEv2Gqm3sAXbDXXV72Hmz8Fx8YIAAAAAAAAsBWaUgAAAAAAAKh3NKUAAAAAAABQ7wJ+T6nq8ng8crvdNf4+t9utkJAQFRcXy+Px1EEyezlUvWFhYUFzqWwAAFD7+ZPEHOqvQkND5XK5DCQDAMCcoG9KWZal7du3Kzc3t9bfHxcXp82bNwfFBqiHqtfpdCopKUlhYWGG0gEAgPpwpPOnA+dgDlVR06ZNFRcXFxTPBwAAEk2p8glVq1atFBUVVeNJgNfr1f79+9W4ceOgWCV0sHq9Xq+ys7O1bds2dejQgckUAAAB7EjnTxJzqD+zLEuFhYXauXOnJKlNmzYmIgIAUO+Cuinl8XjKJ1QtWrSo1Tm8Xq9KS0sVERERNBOqg9UbGxur7OxslZWVBcUlNAEACEb+mD9JzKH+KjIyUpK0c+dOtWrVirfyAQCCQuDPAA7hwB4IUVFRhpMEhgNv2wuGfSEAAAhWzJ/qzoHntLb7dAEA0NAEdVPqAN5q5h88jwAABA9+7/sfzykAINjQlAIAAAAAAEC9oykFAAAAAACAekdTqoG64oor5HA45HA4FBoaqqSkJN11110qLi6u1vdv2LBBDodDS5YsqXTfggUL5HA4qrzMc3JysqZPn36E6QEAAMzZvn27br31VnXq1EkRERFq3bq1+vTpo+nTp6uwsFCSlJiYWD7X+vPHo48+KunI5lJTp06tw+oAAGg4gvrqew3d6aefrhkzZsjtduuXX37R8OHD5XA4NGnSJNPRAAAAbGn9+vXq06ePmjZtqgkTJqhHjx4KDw/X77//rhdffFHt2rXTueeeK0kaP368rr322grfHx0dbSI2AAABiaZUAxYeHq64uDhJUnx8vAYOHKh58+Zp0qRJ8nq9mjRpkl588UVt375dqampGjt2rC688ELDqQEAAMy58cYbFRISop9//lmNGjUqvz05OVl/+9vfZFlW+W3R0dHlcy0AAOB/NKX+xLIsFbk9Nfoer9erolKPQkrL5HTW/t2QkaGuI7riyvLly7Vo0SIlJCRIkiZOnKi33npLzz//vFJSUrRw4UL985//VGxsrPr161frxwEAAKjEsiRPYc2+x+uVygqkMpd0BHMouaKkas6h9uzZo7lz52rChAkVGlJ/xhXwAACoPzSl/qTI7VHX++cYeeyV44coKqxmP45PP/1UjRs3VllZmUpKSuR0OjVt2jSVlJRowoQJmj9/vk488URJvlf/vvvuO73wwgs0pQAAgH95CqV3G9foW5ySmvrjsS/eL4VU3WD6q8zMTFmWpbS0tAq3t2zZsnxfzhEjRpRvhXD33Xfrvvvuq3Ds559/rr59+/ohOAAAMLrR+cKFC3XOOeeobdu2cjgc+uijjyrcb1mW7r//frVp00aRkZEaOHCg1q5dayasDQ0YMEBLlizRjz/+qOHDh+vKK6/UBRdcoMzMTBUWFmrQoEFq3Lhx+ccbb7yhdevWmY4NAACOEHMo//rpp5+0ZMkSdevWTSUlJeW3jxo1SkuWLKnwceyxxxpMCgBAYDG6UqqgoEDp6em66qqrdP7551e6/7HHHtPTTz+t119/XUlJSRo7dqyGDBmilStXKiIiwu95IkNdWjl+SI2+x+v1al/+PkU3iT7it+/VVKNGjdSpUydJ0quvvqr09HS98sor6t69uyTpv//9r9q1a1fhe8LDww973iZNmkiS8vLy1LRp0wr35ebmlt8PAADMsNscSq4o34qlGvB6vcrPz1eTJk2OaA4lV1S1D+3UqZMcDocyMjIq3J6cnCxJioyMrHB7y5Yty+daNXG4uVRMTEyNzwkAQCAy2pQ644wzdMYZZ1R5n2VZmjp1qu677z797W9/kyS98cYbat26tT766CNdcsklfs/jcDhq/BY6r9ersjCXosJCjmxCdYScTqfuuecejRw5UmvWrFF4eLg2bdpUq7fqpaSkyOl06pdffinfo0ryXa0mLy9PHTt29Gd0AABqrczjlcc6/HGBxm5zKDkc1X4LXTmvVwrx+L6vnuZQLVq00KBBgzRt2jTdfPPNB91X6kgdbC61YcMG5eXlKTU1tU4eFwCAavOUHP6YemDbPaWysrK0fft2DRw4sPy2mJgY9e7dW99///1BJ1QlJSUVll3n5+dLktxut9xud4Vj3W63LMuS1+uV1+utVc4DV2g5cJ76YllWpce84IILNGrUKD3//PO64447dPvtt6usrEwnn3yy8vLytGjRIkVHR2v48OHl37dq1apKubt166arr75ad9xxh5xOp3r06KHNmzdrzJgx6t27t3r37l1lvV6vV5Zlye12y+Wq+covuzrw5+avf34CFfUGtmCrVwq+moOt3ncWb9ZzS1yKSNymQd3a1MljNLTnsjZzqPqeP0nm5lDTpk1T3759deyxx+r+++9Xz5495XQ6tXjxYq1evVpHH310eZ78/HxlZ2dX+P6oqCg1adKkxnOpTZs26e6771bv3r11wgknVFlzIM2lgu3fomCrVwq+mqk38AVVzZ4Sueb0Us+STnIXHCs1auX3h6ju82jbptT27dslSa1bt65we+vWrcvvq8rEiRM1bty4SrfPnTtXUVEVl3eHhIQoLi5O+/fvV2lp6RHl3bdv3xF9f0253W6VlZWVTxoPuPrqq/XYY49pyZIlio6O1sSJE7VhwwbFxMQoPT1dt99+u/Lz87V/v2+J/T/+8Y9K516+fLnGjx+v5s2b6+6779bmzZvVqlUr9e/fX2PHjpXD4aiy3tLSUhUVFWnhwoUqKyurm8INmjdvnukI9Yp6A1uw1SsFX83BUG+pR3ryN5fy3A7N+2GJ3Bt/q5PHKSys4VXlDKvNHMrU/Emq/zlUbGysFixYoClTpmjMmDHKzs5WeHi40tLSNGLECF199dXKz8+X1+vVAw88oAceeKDC919xxRV68sknj2gudbCaA3EuFQz/Fv1ZsNUrBV/N1Bv4gqHmZPd/1KN0ndo49mr+gv/J4zj8Nj81Vd35k8M68DKVYQ6HQx9++KGGDh0qSVq0aJH69Omj7OxstWnz/698XnzxxXI4HHrnnXeqPE9Vr/TFx8dr9+7dlfZCKi4u1ubNm5WYmFjr/RUsy9K+ffsUHR0dFJcQPlS9xcXF2rBhg+Lj4+tmvwpD3G635s2bp0GDBik0NNR0nDpHvYEt2OqVgq/mYKr3lf9t0KNfrFGzMEtf39lfjSL9P6GSfHOJli1bKi8vz5b7KvpjDlXf8yeJOVRVAmkuFUz/FknBV68UfDVTb+ALmprd+Qr5rLMcpbu1JOwGpZ75RJ3UW935k21XSsXFxUmSduzYUWFCtWPHDvXq1eug3xceHl7lZt6hoaGVnmiPxyOHwyGn01nr/aAOLL0+cJ5Ad6h6nU6nHA5Hlc91IAjUug6GegNbsNUrBV/NgV7vvmK3XliYJUk6Pd6rRpHhdVZvQ3seazOHqu/5k8QcqiqBOJcKpFqqI9jqlYKvZuoNfAFf86qnpdLdshp30ibvaepWR/VW95y2nQEkJSUpLi5OX375Zflt+fn5+vHHH3XiiScaTAYAAEx7+dss5RS6ldyykY6LtcWib9tgDgUAAKpUvFNaPVmS5Ok+XpbD/Dolown279+vzMzM8q+zsrK0ZMkSNW/eXB06dNBtt92mhx9+WCkpKeWXM27btm358nQAABB89uwv0cvfrpck3XZaR1mbfjWcqP4xhwIAADW2/GGprEBqfqys9hdIv39uOpHZptTPP/+sAQMGlH89cuRISdLw4cP12muv6a677lJBQYGuu+465ebm6uSTT9YXX3zR4N9jDwAAam/6gnUqKPWoR7sYnd6ttT7fZDpR/WMOBQAAamT/einzed+416OSTfZzNNqU6t+/vw61z7rD4dD48eM1fvz4ekwFAADsKju3SG/8sFGSdOeQtKDYILsqzKEAAECNLLtf8rqluEFS3GmS2206kSQb7ylVnw5sPIkjY5MLOQIAAtjTX65VaZlXvZOa65SUlqbjBDXmT/7HcwoAqBM5S6UNs3zjXo+azfIX5ne1MigsLExOp1PZ2dmKjY1VWFhYjV9x9Xq9Ki0tVXFxcdBcOaaqei3L0q5du8qvGAMAgL+t37Vf7/2yRZJ01+nBu0rKNH/MnyTmUH9mWZZKS0u1a9cuOZ1OhYWFGUoJAAhIS8ZIsqQOw6TmR5tOU0FQN6WcTqeSkpK0bds2ZWdn1+oclmWpqKhIkZGRQTE5PlS9DodD7du3l8vlMpQOABDIpsxbI4/X0mmdW+mYhOam4wQtf8yfJOZQVYmKilKHDh2CokkHAKgnO76Rtn0uOUKk9IdNp6kkqJtSku/Vvg4dOqisrEwej6fG3+92u7Vw4UKdcsopQbFC6FD1hoaG0pACANSJ5Vvz9OmybXI4fHtJwawjnT9JzKH+yuVyKSQkJCgadACAemJZ0pK7feNO10rRnczmqULQN6Uklb/lrDYTIpfLpbKyMkVERATFhCrY6gUA2MMTczMkSef0bKsubZoYTgPpyOZPUvDNKYKtXgCADWz5SNrzo+SKkrrfbzpNlVgbDAAAbO2nrL1akLFLIU6HRg5KNR0HAADA/rxl0tJ7fOPOt0uRcWbzHARNKQAAYFuWZenxOaslSRcfF6/Elo0MJwIAAGgAsl6X8ldL4S2kLqNMpzkomlIAAMC2FmTs0uINOQoPceqWU1NMxwEAALC/siLp9wd94673SGExRuMcCk0pAABgS16vpcfn+PaSGn5SouJiIgwnAgAAaADWPisVbpGi4qXUG02nOSSaUgAAwJb++/s2rdyWr8bhIbqhX0fTcQAAAOyvNFdaMcE37jlectn7RT2aUgAAwHbcHq+mzFsjSbq2b7KaNQoznAgAAKABWPmYVJojxXSTEi8zneawaEoBAADbef+XLcraXaAWjcJ0dd8k03EAAADsrzBbypjqG6dPkJwuo3Gqg6YUAACwlWK3R0/NXytJuqF/RzUODzGcCAAAoAFYPl7yFEmxfaR255hOUy00pQAAgK289cNGbc8vVtuYCP3zhATTcQAAAOwvf4207mXfOP1RyeEwm6eaaEoBAADb2Ffs1rNfZ0qSbh2YoohQ+y87BwAAMG7ZfZLlkdqeLbU62XSaaqMpBQAAbOOV77KUU+hWcstGuuDo9qbjAAAA2N+en6VN70lySL0mmE5TIzSlAACALewtKNXL32ZJkkYOTlWIi2kKAADAYS0Z7fuc+E+paQ+zWWqI2R4AALCF6Qsytb+kTN3aNtGZ3duYjgMAAGB/2+ZJO76UnGFSz/Gm09QYTSkAAGDctrwivf79RknSqCFpcjobxuacAAAAxlje/18llXKD1DjRaJzaoCkFAACMe/rLTJWWeXV8YnP1S401HQcAAMD+Nr0n5fwqhURL3e41naZWaEoBAACjsnYX6N2fN0uSRp2eJkcDuYQxAACAMV63tPQ+37jLnVJEw3xRj6YUAAAwasq8NfJ4LQ1Ii9Vxic1NxwEAALC/dS9L+zOliFZS59tNp6k1mlIAAMCYldn5+s/SbEnSnUPSDKcBAABoAMoKpN//2NS821gpNNpsniNAUwoAABjzxNwMSdI56W3VrW2M4TQAAAANwOqpUvF2qVGS1Ok602mOCE0pAABgxM8b9uqr1Tvlcjo0clCq6TgAAAD2V7JHWvWYb9zzIckVZjbPEaIpBQAA6p1lWXrsC98qqYuPba+klo0MJwIAAGgAVkyQ3PlS03Qp8e+m0xwxmlIAAKDefbNml37asFdhIU7dclqK6TgAAAD2V7BJWjPNN+41UXI0/JZOw68AAAA0KF6vpcfn+FZJXX5CgtrERBpOBAAA0AD8/oDkLZVa9ZPanG46jV/QlAIAAPXqs+XbtCI7X43DQ3TjgE6m4wAAANhf7gop6w3fuNejksNhNo+f0JQCAAD1pszj1ZS5ayRJ1/RNUvNGDXtzTgAAgHqx9B7J8krtz5NanmA6jd/QlAIAAPXmg1+3aP3uAjWLCtXVJyeZjgMAAGB/uxZJWz/x7SGV/ojpNH5FUwoAANSLYrdHU+evlSSNGNBJ0RGhhhMBAADYnGVJS0b7xslXSjFdzObxM5pSAACgXsz8cZO25RWrTUyE/nlCguk4AAAA9pf9mbTrW8kVIfV40HQav6MpBQAA6tz+kjI9+3WmJOmW01IUEeoynAgAAMDmvB5p6RjfOPVmKaq92Tx1gKYUAACoc698m6W9BaVKatlIFx0TeBMqAAAAv9s4S8r9XQptKnUdbTpNnaApBQAA6lROQale+na9JGnkoFSFuJh+AAAAHJKnRFo21jfuercU3txsnjrCrBAAANSp6d+s0/6SMnVt00Rn9WhjOg4AAID9rX1eKtgoRbaV0m4xnabO0JQCAAB1ZntesV5ftEGSNGpImpxOh9lAAAAAdufOl1Y87Bv3eEAKiTKbpw7RlAIAAHXm6a/WqqTMq+MSm6l/WqzpOAAAAPa3arJUsluKTpWSrzKdpk7RlAIAAHViw+4Cvbt4syRp1JDOcjhYJQUAAHBIRTuk1ZN94/RHJGeI2Tx1jKYUAACoE0/OX6Myr6X+abE6PikwN+cEAADwqxWPSGUFUvPjpPgLTKepczSlAACA363alq9PlmZLku4cnGY4DQAAQAOwf72U+bxv3OtRKQhWmdOUAgAAfjd5boYsSzqrZxt1bxdjOg4AAID9Lbtf8rqluMFS3Kmm09QLmlIAAMCvftm4V/NX7ZTL6dAdg1JNxwEAALC/nKXShlm+ca+JZrPUI5pSAADAbyzL0mNfZEiSLjqmvZJjGxtOBAAA0AAsGSPJkjoMk5ofbTpNvaEpBQAA/Obbtbv1Y9ZehYU4dctpKabjAAAA2N+Ob6Rtn0uOECn9YdNp6hVNKQAA4BeWZenxOb5VUpedkKC2TSMNJwIAALA5y5KW3O0bd7pWiu5kNk89oykFAAD84vPl2/X71jw1CnPpxv4dTccBAACwvy0fSXt+lFxRUvf7TaepdzSlAADAESvzeDV5rm+V1NV9k9WicbjhRAAAADbnLZOW3uMbd75diowzm8cAmlIAAOCIzf5tq9btKlDTqFBd0zfJdBwAAAD7y3pdyl8thbeQuowyncYImlIAAOCIlJR59NT8tZKkG/t3VJOIUMOJAAAAbK6sSFr2gG/c9R4pLMZsHkNoSgEAgCMy68dN2ppbpLgmEbr8xETTcQAAAOxvzTSpaKsUFS+l3mg6jTE0pQAAQK0VlJRp2leZkqRbTktRRKjLcCIAAACbK82VVk70jXuMk1wRRuOYRFMKAADU2oz/ZWlPQakSW0TpomPbm44DAABgfysnSaU5UkxXKely02mMoikFAABqJbewVC8sXC9Jun1QqkJdTCsAAAAOqTBbynjKN06fIDmDe5U5s0cAAFAr079Zp33FZeocF61zerY1HQcAAMD+lo+TPEVSy5OkdueaTmMcTSkAAFBjO/KL9fqiDZKkUUPS5HQ6zAYCAACwu/w10rpXfONej0oO5k80pQAAQI0989VaFbu9OiahmU7t3Mp0HAAAAPtbeq9keaS2Z0mt+ppOYws0pQAAQI1s3FOgt3/aLMm3SsrBq3wAAACHtmextPl9SQ6p10TTaWyDphQAAKiRqfPXqsxr6ZTUWJ2Q3MJ0HAAAAPtbMsb3OfGfUtMeZrPYCE0pAABQbau35+ujJVslSaMGpxlOAwAA0ABsmyft+FJyhkk9x5tOYys0pQAAQLU9MWeNLEs6s0ecerSPMR0HAADA3iyvtGS0b5xyg9Q40Wgcu6EpBQAAquWXjTmav2qHnA5p5CBWSQEAABzWpveknF+lkGip272m09gOTSkAAHBYlmXp8TmrJUkXHtNenVo1NpwIAADA5rxuael9vnGXO6WIWLN5bIimFAAAOKzvMnfrh/V7FeZy6taBqabjAAAA2N+6l6X9mVJEK6nzSNNpbImmFAAAOCTfKqkMSdKlJ3RQu6aRhhMBAADYXFmB9Psfm5p3GyuFssq8KjSlAADAIc1ZsV3LtuQpKsylEQM6mY4DAABgf6unSsXbpcbJUqfrTKexLZpSAADgoDxeS0/MXSNJuvrkJLVsHG44EQAAgM2V7JFWPeYb93xIcoWZzWNjNKUAAMBBzf51izJ37lfTqFBde0qy6TgAAAD2t2Ki5M6XmvWSEi4xncbWaEoBAIAqlZR5NHX+WknSDf06qklEqOFEAAAANlewSVozzTdOnyg5aLscCs8OAACo0r9/3KStuUVq3SRcw09KNB0HAADA/n5/UPKWSK36S22GmE5jezSlAABAJYWlZZr2daYk6eZTUxQR6jKcCAAAwObyVkpZr/vGvR6VHA6zeRoAmlIAAKCSGf/boN37S5XQIkrDjos3HQcAAMD+lt4jWV4p/nypZW/TaRoEmlIAAKCC3MJSPf/NOknSyEGpCnUxXQAAADikXYukLR/79pDq+YjpNA0Gs0wAAFDBCwvXa19xmTrHReucnm1NxwEAALA3y5KWjvGNk6+SYjqbzdOA0JQCAADlduYXa8b/siRJdw5Ok9PJXggAAACHlP2ZtHOh5IqQejxgOk2DQlMKAACUe+arTBW7vTq6Q1Od1qWV6TgAAAD25vX8/yqp1JulqPZm8zQwNKUAAIAkadOeQv37p02SpFFDOsvBFWMAAAAObeMsKfd3KbSp1HW06TQNDk0pAAAgSZo6f43KvJb6prTUiR1bmI4DAABgb54SadlY37jr3VJ4c7N5GiCaUgAAQGt27NOHS7ZKkkYNSTOcBkfK4/Fo7NixSkpKUmRkpDp27KiHHnpIlmWZjgYAQOBY+7xUsFGKbCul3WI6TYNk66YUEyoAAOrHE3MyZFnSGd3j1LN9U9NxcIQmTZqk6dOna9q0aVq1apUmTZqkxx57TM8884zpaAAABAZ3vrTiYd+4xwNSSJTZPA1UiOkAh3JgQvX666+rW7du+vnnn3XllVcqJiZGt9xCFxIAAH/4bVOO5q7cIadDumNwquk48INFixbpb3/7m8466yxJUmJiov7973/rp59+MpwMAIAAsWqyVLJbik6Vkq8ynabBsnVTigkVAAB174m5GZKk849ur06tog2ngT+cdNJJevHFF7VmzRqlpqZq6dKl+u677zRlypQqjy8pKVFJSUn51/n5+ZIkt9stt9tdJxkPnLeuzm831BvYgq1eKfhqpt7AV6Oai3coZNVkOSSVdR8ny2NJnob1XNX1z7i657V1U6qmEyoAAFAz/8vcrf9l7lGoy6HbBqaYjgM/GT16tPLz89W5c2e5XC55PB498sgjuvTSS6s8fuLEiRo3blyl2+fOnauoqLp9O8K8efPq9Px2Q72BLdjqlYKvZuoNfNWpuUfJi0r2FCjHmaKFSyOkZZ/VQ7K6UVc/48LCwmodZ+umVE0nVFL9v9IXbB3kYKtXCr6aqTewBVu9UvDVXJN6LcvSpC9WSZIuOS5erRuHNrjnqT5+vg3tOZGkd999VzNnztSsWbPUrVs3LVmyRLfddpvatm2r4cOHVzp+zJgxGjlyZPnX+fn5io+P1+DBg9WkSZM6yeh2uzVv3jwNGjRIoaGhdfIYdkK9gS3Y6pWCr2bqDXzVrnn/eoV84WvkRPedpjNbDainhP5V1z/jA72Yw7F1U6qmEyrJ3Ct9wdZBDrZ6peCrmXoDW7DVKwVfzdWpd9leh5ZtcSnMaSnVvV6ffba+HpLVjbr8+Vb3lT47GTVqlEaPHq1LLrlEktSjRw9t3LhREydOrHIOFR4ervDw8Eq3h4aG1vl/RurjMeyEegNbsNUrBV/N1Bv4DlvzyvGS5ZbiBimk3eD6C1ZH6upnXN1z2ropVdMJlVT/r/QFWwc52OqVgq9m6g1swVavFHw1V7dej9fSM9MWSSrQ1Scn65JBDfOte/Xx863uK312UlhYKKez4kWWXS6XvF6voUQAAASAnCXSxlm+ca9HjUYJFLZuStVmQmXqlb5g6yAHW71S8NVMvYEt2OqVgq/mw9X7n1+3KHNXgWIiQ3X9gJQG/9zU5c+3IT4355xzjh555BF16NBB3bp102+//aYpU6boqqu4OhAAALW2ZIzvc4dhUvOjzWYJELZuSjGhAgDA/0rLvHpy/hpJ0vX9OiomsuE1XXBozzzzjMaOHasbb7xRO3fuVNu2bfWvf/1L999/v+loAAA0TDsWSNu+kBwhUvrDptMEDFs3pZhQAQDgf28v3qTNe4sUGx2uK05KNB0HdSA6OlpTp07V1KlTTUcBAKDhsyxpyWjfuNO1UnQns3kCiK2bUkyoAADwr8LSMj39ZaYk6ZZTOykyzGU4EQAAgM1t+Uja86PkipK6s0jGn5yHPwQAAASK1xZt0O79JYpvHqlhx3UwHQcAAMDevGXS0nt84863S5FxZvMEGJpSAAAEibxCt55fsE6SNHJQqsJCmAYAAAAcUtbrUv5qKbyF1GWU6TQBh9koAABB4oWF65RfXKa01tE6N72d6TgAAAD2VlYkLXvAN+56jxQWYzZPAKIpBQBAENi5r1gz/rdBknTH4FS5nA6zgQAAAOxuzTSpaKsUFS+l3mg6TUCiKQUAQBB49qtMFbk96hXfVIO6tjYdBwAAwN5Kc6WVE33jnuMlV4TROIGKphQAAAFu895CzfppkyTprtPT5HCwSgoAAOCQVk6SSnOkmG5S4mWm0wQsmlIAAAS4J+evkdtj6eROLXVSx5am4wAAANhbYbaU8ZRvnD5BcrrM5glgNKUAAAhga3bs04e/bZUkjRqSZjgNAABAA7B8vOQpkmL7SO3OMZ0moNGUAgAggE2emyHLkk7vFqf0+Kam4wAAANjbvjXSupd94/RHJbY9qFM0pQAACFBLNudqzoodcjp8V9wDAADAobmWPyBZHqnt2VKrk03HCXg0pQAACFBPzMmQJJ13VHultI42nAYAAMDemnoy5dzygSSH1GuC6ThBgaYUAAAB6Pv1e/Rd5m6Fuhy6bWCK6TgAAAC217X0Dd8g6TKpaQ+zYYIETSkAAAKMZUmT52VKkv5xfAfFN48ynAgAAMDeHDvmK9a7TJYzTOoxznScoEFTCgCAALM8x6GlW/IUGerSiFM7mY4DAABgb5ZXrmX3SpK8Ha+XGieazRNEaEoBABBAPF5Ln27y/Xq/sk+iWkVHGE4EAABgc5vekyP3N7kVKW+X0abTBBWaUgAABJBPl23T9iKHmkSE6F+ndDQdBwAAwN68bmnpfZKkzNChUnhLs3mCDE0pAAACRGmZV1O/WidJuvbkRMVEhRpOBAAAYHPrXpb2Z8oKb6V1oeeaThN0aEoBABAg3vl5s7bkFCk61NLlJ3YwHQcAAMDeygqk38dLkrxd75HHEWk4UPChKQUAQAAoKvXomS/XSpKGtPcqKizEcCIAAACbWz1VKt4uNU6WN/ka02mCEk0pAAACwGuLNmjnvhK1bxqhE1tZpuMAAADYW8keadVjvnHPhyRnmNk8QYqmFAAADVxekVvPf+PbS+qWUzsphN/uAAAAh7ZiguTOl5r1khIuMZ0maDFtBQCggXtp4XrlFbmV0qqxzk1vYzoOAACAvRVsktZM843TJ0oOWiOm8MwDANCA7dpXolf/lyVJumNwmlxOh+FEAAAANvf7g5K3VGrVX2ozxHSaoEZTCgCABuzZrzNVWOpRevsYDenW2nQcAAAAe8tbKWW97hv3elRy8IKeSTSlAABooLbkFGrWj5skSXed3lkOJlUAAACHtvQeyfJK8edLLXubThP0aEoBANBATZ2/VqUer07q2EJ9OrU0HQcAAMDedi2Stnzs20Oq5yOm00A0pQAAaJAyd+7T7F+3SJJGDUkznAYAAMDmLEtaMto3Tr5KiulsNg8k0ZQCAKBBmjx3jbyWNLhrax3VoZnpOAAAAPaW/Zm061vJFSH1eMB0GvyBphQAAA3Msi25+nz5djkcvivuAQAA4BC8HmnpGN849WYpqr3ZPChHUwoAgAbm8TkZkqTzerVTWly04TQAAAA2t3GWlPu7FNpU6jradBr8CU0pAAAakEXrduvbtbsV6nLo9kGppuMAAADYm6dEWjbWN+56txTe3GweVEBTCgCABsKyrPJVUpcc10HxzaMMJwIAALC5tc9LBRulyLZS2i2m0+AvaEoBANBAzF+1U79tylVEqFM3n9rJdBwAAAB7c+dLKx72jXs8IIXwgp7d0JQCAKAB8HotPfHHKqkr+ySpVZMIw4kAAABsbtVkqWS3FJ0qJV9lOg2qQFMKAIAG4JOl2crYsU/RESG6/pSOpuMAAADYW9EOafVk3zj9EckZYjYPqkRTCgAAm3N7vJoyb40k6fp+HRUTFWo4EQAAgM2teFgqK5CaHyvFX2A6DQ6CphQAADb3zuLN2rS3UC0bh+nKPomm4wAAANjb/vVS5gu+ca9HJYfDbB4cFE0pAABsrKjUo6e/XCtJumlAJ0WFsfQcAADgkJaOlbxuKW6QFHea6TQ4BJpSAADY2Bvfb9DOfSVq1zRSf+/dwXQcAAAAe8tZIm2c5Rv3etRoFBweTSkAAGwqv9it6d+skyTdPihV4SEuw4kAAABsbskY3+cOw6TmR5vNgsOiKQUAgE29vHC9cgvd6tSqsc47qp3pOAAAAPa2Y4G07QvJESKlP2w6DaqBphQAADa0e3+JXv4uS5J05+BUuZxs0AkAAHBQliUtGe0bd7pWiu5kNg+qhaYUAAA29OzXmSos9ahn+xgN6RZnOg4AAIC9bflI2vOj5IqSut9vOg2qiaYUAAA2szW3SDN/2CRJGjUkTQ4uYwwAAHBw3jJp6T2+cefbpUhe0GsoaEoBAGAzT81fo1KPVycmt9DJnVqajgMAAGBvWa9L+aul8BZSl1Gm06AGaEoBAGAjmTv36/1ftkiSRp3OKikAAIBDKiuSlj3gG3e9RwqLMZsHNUJTCgAAG3ly3hp5LWlgl9Y6ukMz03EAAADsbc00qWirFBUvpd5oOg1qiKYUAAA2sXxrnv77+zY5HNKdQ1JNxwEAALC30lxp5UTfuOd4yRVhNA5qjqYUAAA28ficDEnS39LbqnNcE8NpAAAAbG7lJKk0R4rpJiVeZjoNaoGmFAAANvDj+j36Zs0uhTgdun0Qq6QAAAAOqTBbynjKN06fIDldZvOgVmhKAQBgmGVZeuyPVVLDjotXQotGhhMBAADY3PJxkqdIiu0jtTvHdBrUEk0pAAAM+zpjp37ZmKOIUKduOS3FdBwAAAB7y8+Q1r3iG6c/KnG14gaLphQAAAZ5vZYen7NGkjT8pES1bsIGnQAAAIe09D7J8khtz5ZanWw6DY4ATSkAAAz6z7JsrdqWr+jwEN3Qr6PpOAAAAPa2Z7G0+X1JDqnXBNNpcIRoSgEAYIjb49WUeb5VUtedkqymUWGGEwEAANjckjG+z0mXSU17mM2CI0ZTCgAAQ977eYs27ilUy8ZhuurkJNNxAAAA7G3bPGnHl5IzTOoxznQa+AFNKQAADCh2e/TUl75VUiMGdFKj8BDDiQAAAGzM8kpLRvvGKTdIjRONxoF/0JQCAMCAN77foB35JWrXNFL/6N3BdBwAAAB72/SelPOrFBItdbvXdBr4CU0pAADq2b5it55bsE6SdOvAFIWHuAwnAgAAsDGv23fFPUnqcqcUEWs2D/yGphQAAPXspW+zlFvoVsfYRjr/qHam4wAAANjbupel/ZlSRCup80jTaeBHNKUAAKhHe/aX6JVv10uS7hicphAXv4oBAAAOqqxA+n28b9xtrBTa2Gwe+BUzYQAA6tFzC9apoNSjHu1idEb3ONNxAAAA7G31VKl4u9Q4Wep0nek08DOaUgAA1JPs3CK9+cNGSdKoIWlyOByGEwEAANhYyR5p1WO+cc+HJFeY2TzwO5pSAADUk6fmr1VpmVe9k5qrb0pL03EAAADsbcUEyZ0vNeslJVxiOg3qAE0pAADqwbpd+/X+r1skSXed3plVUgAAAIdSsEla86xvnD5RctC+CET8VAEAqAdT5q2Rx2tpYJdWOiahmek4AAAA9vb7g5K3RGrVX2ozxHQa1BGaUgAA1LHlW/P032Xb5HD4rrgHAACAQ8hbKWW97hv3elRihXnAoikFAEAde2JuhiTp3PS26tKmieE0AAAANrf0HsnySvHnSy17m06DOkRTCgCAOvRT1l4tyNilEKdDtw9MNR0HAADA3nYtkrZ87NtDqucjptOgjtGUAgCgjliWpcfnrJYkXXxcvBJbNjKcCAAAwMYsS1oy2jdOvkqK6Ww2D+ocTSkAAOrIgoxdWrwhR+EhTt1yaorpOAAAAPaW/V9p17eSK0Lq8aDpNKgHNKUAAKgDXq+lx+b49pIaflKi4mIiDCcCAACwMa9HWjLGN069RYpqZzYP6gVNKQAA6sB/f9+mVdvyFR0eohv6dTQdBwAAwN42zJTylkuhTaVuo02nQT2hKQUAgJ+5PV5NmbdGknTtKclq1ijMcCIAAAAb85RIv9/vG3cbLYU1M5sH9YamFAAAfvb+L1uUtbtALRqF6aqTk0zHAQAAsLe106WCjVJkWyn1ZtNpUI9oSgEA4EfFbo+emr9WknTjgE5qHB5iOBEAAICNufOlFY/4xj0elEKijMZB/aIpBQCAH731w0Ztzy9W25gIXdq7g+k4AAAA9rZqslSyW4pOlZKvNJ0G9YymFAAAfrK/pEzPfp0pSbp1YIoiQl2GEwEAANhY0Q5p9WTfOP0RyckK82BDUwoAAD95+dv1yil0K7llI11wdHvTcRDktm7dqn/+859q0aKFIiMj1aNHD/3888+mYwEA8P9WPCyVFUjNj5PiLzCdBgbQhgQAwA/2FpTq5W+zJEkjB6cqxMXrPjAnJydHffr00YABA/T5558rNjZWa9euVbNmXM0IAGAT+9dLmS/4xr0elRwOs3lgBE0pAAD84LmvM7W/pEzd2jbRmd3bmI6DIDdp0iTFx8drxowZ5bclJXElSACAjSwdK3ndUtxgKe5U02lgiO1fxmXpOQDA7rblFemNHzZKkkYNSZPTySt9MOuTTz7Rscceq4suukitWrXSUUcdpZdeesl0LAAAfHKWSBtn+ca9HjUaBWbZeqUUS88BAA3B01+uVWmZV8cnNle/1FjTcQCtX79e06dP18iRI3XPPfdo8eLFuuWWWxQWFqbhw4dXOr6kpEQlJSXlX+fn50uS3G633G53nWQ8cN66Or/dUG9gC7Z6peCrmXr9y/XbGDkleeMvlie6u2SD55Wfcd2c/3Bs3ZRi6TkAwO7W79qvd3/eIkm66/Q0OdgPATbg9Xp17LHHasKECZKko446SsuXL9fzzz9fZVNq4sSJGjduXKXb586dq6ioqDrNOm/evDo9v91Qb2ALtnql4KuZeo9cC89ynVz8hbxy6avdA1Tw2Wd+f4wjwc/YPwoLC6t1nK2bUp988omGDBmiiy66SN98843atWunG2+8Uddee63paAAASJKenL9WHq+lUzu30rGJzU3HASRJbdq0UdeuXSvc1qVLF33wwQdVHj9mzBiNHDmy/Ov8/HzFx8dr8ODBatKkSZ1kdLvdmjdvngYNGqTQ0NA6eQw7od7AFmz1SsFXM/X6iWXJ9dUEqViyOl6rfkdf7b9zHyF+xv51YNX14di6KVXTpedS/S8/Z4lf4Au2mqk3sAVbvVLd1rxyW77+szRbknTbqR1t8bwG28+4PuptiM9lnz59lJGRUeG2NWvWKCEhocrjw8PDFR4eXun20NDQOp+Y18dj2An1BrZgq1cKvpqp9wht/lDa+5PkipKr5wNy2fC55Gfsv/NWh62bUjVdei6ZW37OEr/AF2w1U29gC7Z6pbqp+YVVTklOHd3Cq6zfvlXWb35/iFoLtp9xXdZb3eXndnL77bfrpJNO0oQJE3TxxRfrp59+0osvvqgXX3zRdDQAQLDylklL7/GNO4+UIuPM5oEt2LopVdOl51L9Lz9niV/gC7aaqTewBVu9Ut3V/PPGHK38frFcTocmXdZXiS0a+e3cRyLYfsb1UW91l5/byXHHHacPP/xQY8aM0fjx45WUlKSpU6fq0ksvNR0NABCssl6X8ldL4S2kLneaTgObsHVTqqZLzyVzy89Z4hf4gq1m6g1swVav5N+aLcvSk/PXSZIuPjZeKXFN/XJefwq2n3Fd1ttQn8ezzz5bZ599tukYAABIZUXSsgd84673SGExZvPANpymAxzK7bffrh9++EETJkxQZmamZs2apRdffFEjRowwHQ0AEMQWrNmlnzbsVViIU7eelmI6DgAAgL2tmSYVbZWi4qXUG02ngY3Yuil1YOn5v//9b3Xv3l0PPfQQS88BAEZ5vZaemONbxTv8xATFxUQYTgQAAGBjpbnSyom+cc/xkou5E/6frd++J7H0HABgL58t36YV2flqHB6iG/p3Mh0HAADA3lZOkkpzpJhuUuJlptPAZmy9UgoAADsp83g1Ze4aSdI1fZPUvFGY4UQAAAA2VpgtZTzlG6dPkJwus3lgOzSlAACopg9+3aL1uwvUvFGYrumbbDoOAACAvS0fJ3mKpNg+UrtzTKeBDdGUAgCgGordHk2dv1aSdGP/jmocbvt3wAMAAJiTnyGte8U3Tp8oORxm88CWaEoBAFANM3/cpG15xWoTE6F/npBgOg4AAIC9Lb1PsjxS27OlVn1Np4FN0ZQCAOAw9peU6dmvMyVJt56WoohQ9kMAAAA4qD2Lpc3vS3JIvSaYTgMboykFAMBhvPJtlvYWlCqpZSNdeEx703EAAADsy7KkJaN946TLpKY9zOaBrdGUAgDgEHIKSvXSt+slSSMHpSrExa9OAACAg9o+T9rxleQMk3qON50GNsfMGgCAQ5j+zTrtLylT1zZNdFaPNqbjAAAA2Jfl/f9VUik3So3YhxOHRlMKAICD2J5XrNcXbZAkjRqSJqeTq8YAAAAc1MZ3pZzfpJBoqds9ptOgAaApBQDAQTz91VqVlHl1XGIz9U+LNR0HAADAvrxuadl9vnGXO6UI5k44PJpSAABUYcPuAr27eLMkadSQznI4WCUFAABwUJkvSfvXSRGtpM4jTadBA0FTCgCAKjw5f43KvJb6p8Xq+KTmpuMAAADYl3u/tPyPTc27jZVCG5vNgwaDphQAAH+xalu+PlmaLUm6c3Ca4TQAAAA2l/GUVLxDapwsdbrOdBo0IDSlAAD4i8lzM2RZ0lk926h7uxjTcQAAAOyrZI+06jHfuOdDkivMbB40KDSlAAD4k1827tX8VTvlcjo0clCq6TgAAAD2tmKC5M6XmvWSEi4xnQYNDE0pAAD+YFmWHvsiQ5J00THt1TGW/RAAAAAOqmCTtGaab5w+UXLQYkDN8CcGAIA/LFy7Wz9m7VVYiFO3nJZiOg4AAIC9/f6A5C2VWvWX2gwxnQYNEE0pAADkWyX1+JzVkqTLTkhQ26aRhhMBAADYWO4KKesN37jXo5LDYTYPGqRaNaWSk5O1Z8+eSrfn5uYqOTn5iEMBAFDfPl++Xcu35qtRmEs39u9oOg4CEPMnAEBAWXqPZHml9udJLXubToMGqlZNqQ0bNsjj8VS6vaSkRFu3bj3iUAAA1Kcyj1dPzPXtJXVN32S1aBxuOBECEfMnAEDA2LVI2vqJbw+p9EdMp0EDFlKTgz/55JPy8Zw5cxQT8/+XyfZ4PPryyy+VmJjot3AAANSH2b9u1fpdBWoWFapr+iaZjoMAw/wJABBQLEtacrdvnHylFNPFbB40aDVqSg0dOlSS5HA4NHz48Ar3hYaGKjExUZMnT/ZbOAAA6lpJmUdT56+RJN3Yv5OiI0INJ0KgYf4EAAgo2f+Vdn0nuSKkHg+aToMGrkZNKa/XK0lKSkrS4sWL1bJlyzoJBQBAfZn5wyZl5xUrrkmELjsxwXQcBCDmTwCAgOH1SEvG+MapN0tR7c3mQYNXo6bUAVlZWf7OAQBAvdtfUqZnv86UJN1yWooiQl2GEyGQMX8CADR4G2ZKecul0KZS19Gm0yAA1KopNX78+EPef//999cqDAAA9WnGd1naU1CqxBZRuuhYXulD3WL+BABo0Dwl0u9//K7qercU3txsHgSEWjWlPvzwwwpfu91uZWVlKSQkRB07dmRSBQCwvZyCUr24cL0kaeTgNIW6anVBWqDamD8BABq0tc9LBRulyLZS2i2m0yBA1Kop9dtvv1W6LT8/X1dccYXOO++8Iw4FAEBde/6bddpXUqYubZro7B5tTMdBEGD+BABosNz50oqHfeMeD0ghUWbzIGD47WXhJk2aaNy4cRo7dqy/TgkAQJ3YkV+s1xZtkCSNGpIqp9NhNhCCFvMnAECDsGqyVLJbik6Vkq8ynQYBxK/vVcjLy1NeXp4/TwkAgN89/eValZR5dWxCMw1Ia2U6DoIc8ycAgK0V7ZBWT/aN0x+RnLV6wxVQpVr9aXr66acrfG1ZlrZt26Y333xTZ5xxhl+CAQBQFzbuKdA7izdLkkYNSZPDwSop1A/mTwCABmnFw1JZgdT8OCn+AtNpEGBq1ZR68sknK3ztdDoVGxur4cOHa8yYMX4JBgBAXXhy3hqVeS31S41V7+QWpuMgiDB/AgA0OPvXS5kv+Ma9HpV4MQ9+VqumVFZWlr9zAABQ51Zvz9fHS7Ml+VZJAfWJ+RMAoKFxrXhQ8rqluMFS3Kmm4yAAHfGeUps3b9bmzZv9kQUAgDr1xJw1sizprB5t1L1djOk4CGLMnwAAdtfEs17OTW/7vuj1qNkwCFi1akqVlZVp7NixiomJUWJiohITExUTE6P77rtPbrfb3xkBADhiv23K1fxVO+R0SCMHp5qOgyDE/AkA0JB0db/lGyRcIjU/ymwYBKxavX3v5ptv1uzZs/XYY4/pxBNPlCR9//33evDBB7Vnzx5Nnz7dryEBADgSliVNnr9WknThMe3VMbax4UQIRsyfAAANhWPXQrX2/CrLESJHz4dMx0EAq1VTatasWXr77bcrXCmmZ8+eio+P19///ncmVQAAW8nIc+jHrByFuZy6dSCrpGAG8ycAQINgWXIuu0eS5E2+Rq7oToYDIZDV6u174eHhSkxMrHR7UlKSwsLCjjQTAAB+Y1mWPt3k+3V36Qkd1K5ppOFECFbMnwAADcKWj+Tc+5PKFC5v13tMp0GAq1VT6qabbtJDDz2kkpKS8ttKSkr0yCOP6KabbvJbOAAAjtSclTu1ucChqDCXRgzglT6Yw/wJAGB73jJpqa8RtS70XCkiznAgBLpavX3vt99+05dffqn27dsrPT1dkrR06VKVlpbqtNNO0/nnn19+7OzZs/2TFACAGvJ4LT05P1OSdOVJCWrZONxwIgQz5k8AANvLel3KXy0rrIUyQ4Yq2XQeBLxaNaWaNm2qCy64oMJt8fHxfgkEAIC/zP51i9bvLlBUiKWr+ySYjoMgx/wJAGBrZUXSsgckSd4uo1WW2chwIASDWjWlZsyY4e8cAAD4VUmZR1P/uOLewLZeRUeEGk6EYMf8CQBga2umSUVbpagO8nb8l5T5lelECAK12lPq1FNPVW5ubqXb8/Pzdeqppx5pJgAAjti/f9ykrblFah0drr5xluk4APMnAIB9leZKKyf6xj3HSa4Io3EQPGrVlFqwYIFKS0sr3V5cXKxvv/32iEMBAHAkCkrKNO1r315SN/ZPVpjLcCBAzJ8AADa2cpJUmiPFdJMSLzOdBkGkRm/fW7ZsWfl45cqV2r59e/nXHo9HX3zxhdq1a+e/dAAA1MJrizZo9/5SJbSI0kXHtNO8Ob+bjoQgxvwJAGBrhdlSxlO+cfoEyemSPF6zmRA0atSU6tWrlxwOhxwOR5XLzCMjI/XMM8/4LRwAADWVW1iq579ZJ0kaOShVoa5aLQoG/Ib5EwDA1paPkzxFUmwfqd05ptMgyNSoKZWVlSXLspScnKyffvpJsbGx5feFhYWpVatWcrl4jwQAwJwXFq7XvuIydY6L1jk928rjKTMdCUGO+RMAwLbyM6R1r/jG6Y9KDofZPAg6NWpKJST4Lqft9bKUDwBgPzvzizXjf1mSpDsHp8npdMjjMRwKQY/5EwDAtpbeJ1keqe3ZUquTTadBEKpRU+qAN95445D3X3755bUKAwDAkXjmq0wVu706ukNTndallek4QAXMnwAAtrJnsbT5fUkOqdcE02kQpGrVlLr11lsrfO12u1VYWKiwsDBFRUUxqQIA1LtNewr17582SZJGDeksB8vPYTPMnwAAtmFZ0pLRvnHSZVLTHmbzIGjVavfXnJycCh/79+9XRkaGTj75ZP373//2d0YAAA5r6vw1KvNa6pvSUid2bGE6DlAJ8ycAgG1snyft+Epyhkk9x5tOgyDmt0sSpaSk6NFHH630KiAAAHVtzY59+nDJVknSqCFphtMA1cf8CQBQ7yzv/6+SSrlRapRgNg+Cml+vkx0SEqLs7Gx/nhIAgMN6Yk6GLEs6vVucerZvajoOUCPMnwAA9Wrju1LOb1JItNTtXtNpEORqtafUJ598UuFry7K0bds2TZs2TX369PFLMAAAquO3TTmau3KHnA7pziGppuMAB8X8CQBgnNctLbvPN+4ySopoaTYPgl6tmlJDhw6t8LXD4VBsbKxOPfVUTZ482R+5AAColsfnZEiSzj+6vTq1ijacBjg45k8AAOPWvSztXydFtJY63246DVC7ppTX65Uk7dq1S5IUGxvrv0QAAFTT/zJ3a9G6PQp1OXTbwBTTcYBDYv4EADCqrED6/Y9NzbuPlUIbm80DqBZ7SuXm5mrEiBFq2bKl4uLiFBcXp5YtW+qmm25Sbm5uHUQEAKAyy7L02B+rpC7tnaD2zaIMJwIOjvkTAMC41VOl4u1S42Sp47Wm0wCSarhSau/evTrxxBO1detWXXrpperSpYskaeXKlXrttdf05ZdfatGiRWrWrFmdhAUA4IA5K3Zo6eZcRYW5NGJAJ9NxgINi/gQAMK5kj7TqMd+458OSK8xsHuAPNWpKjR8/XmFhYVq3bp1at25d6b7Bgwdr/PjxevLJJ/0aEgCAP/N4LU2e61sldVWfJMVGhxtOBBwc8ycAgHErJkjufKnZUVLCMNNpgHI1evveRx99pCeeeKLShEqS4uLi9Nhjj+nDDz/0WzgAAKry0W9btXbnfsVEhuraU5JNxwEOifkTAMCogk3Smmm+cfpEyVHjXXyAOlOjP43btm1Tt27dDnp/9+7dtX379iMOBQDAwZSWefXk/DWSpOv7dVRMZKjhRMChMX8CABj1+wOSt1RqPUBqM9h0GqCCGjWlWrZsqQ0bNhz0/qysLDVv3vxIMwEAcFBvL96kLTlFahUdritOSjQdBzgs5k8AAGNyV0hZb/jG6RMlh8NsHuAvatSUGjJkiO69916VlpZWuq+kpERjx47V6aef7rdwAAD8WWFpmZ7+MlOSdPNpKYoMcxlOBBwe8ycAgDHL7pUsrxR/vtSyt+k0QCU13uj82GOPVUpKikaMGKHOnTvLsiytWrVKzz33nEpKSvTmm2/WVVYAQJB7bdEG7d5fovjmkRp2bLzpOEC1MH8CABixa5G05WPfHlI9HzGdBqhSjZpS7du31/fff68bb7xRY8aMkWVZkiSHw6FBgwZp2rRpio/nPwkAAP/LK3Tr+QXrJEkjB6UqLIRNOtEwMH8CANQ7y5KWjPaNk6+SYjqbzQMcRI2aUpKUlJSkzz//XDk5OVq7dq0kqVOnTuyFAACoUy8sXKf84jKltm6sc9PbmY4D1AjzJwBAvcr+TNr1reSKkHo8aDoNcFA1bkod0KxZMx1//PH+zAIAQJV27ivWjP9tkCTdOThNLiebdKJhYv4EAKhzXo+0dIxvnHqLFMWLebAv3vsAALC9Z7/KVJHbo6M6NNWgrq1NxwEAALCvjbOk3N+l0KZSt9Gm0wCHRFMKAGBrm/cWatZPmyRJo4akycGljAEAAKrmKZGWjfWNu42WwpqZzQMcBk0pAICtPTl/jdweSyd3aqmTOrY0HQcAAMC+1j4vFWyUIttKqTebTgMcFk0pAIBtrdmxTx/+tlWSb5UUAAAADsKdL6142Dfu8YAUEmU2D1ANNKUAALY1eW6GLEs6vVuc0uObmo4DAABgX6smSyW7pehUKfkq02mAaqEpBQCwpSWbczVnxQ45HdIdg1NNxwEAALCvoh3S6sm+cfojkjPEbB6gmmhKAQBs6Yk5GZKk845qr5TW0YbTAAAA2NiKh6WyAqn5cVL8BabTANVGUwoAYDuLMnfru8zdCnU5dNvAFNNxAAAA7Gv/einzBd+416MSVypGA0JTCgBgK5ZladIfq6T+cXwHxTdnk04AAICDWjpW8rqluEFS3Kmm0wA1QlMKAGAr81bu0NLNuYoMdemmU1klBQAAcFA5S6SNs3zjXo8ajQLUBk0pAIBteLyWnpjrWyV11cmJio0ON5wICAyPPvqoHA6HbrvtNtNRAAD+tGSM73OHYVLzo81mAWqBphQAwDY+WbpVa3bsV5OIEF3Xt6PpOEBAWLx4sV544QX17NnTdBQAgD/tWCBt+0JyhEjpD5tOA9QKTSkAgC2Ulnk1Zd4aSdL1/TsqJirUcCKg4du/f78uvfRSvfTSS2rWrJnpOAAAf7Esaclo37jTtVJ0J7N5gFqiKQUAsIV3Fm/S5r1Fio0O1xUnJZqOAwSEESNG6KyzztLAgQNNRwEA+NOWj6Q9P0quKKn7/abTALUWYjoAAABFpR49/VWmJOnmUzspKoxfT8CRevvtt/Xrr79q8eLFhz22pKREJSUl5V/n5+dLktxut9xud53kO3Deujq/3VBvYAu2eqXgq9lW9XrLFLJkjBySPKm3yBvSQvJzLlvVW0+Crea6rre6521Qs/5HH31UY8aM0a233qqpU6eajgMA8JPXFm3Qrn0lim8eqUuO62A6DtDgbd68WbfeeqvmzZuniIiIwx4/ceJEjRs3rtLtc+fOVVRUVF1ELDdv3rw6Pb/dUG9gC7Z6peCr2Q71dnDP01GlGSpRtOZv7KGyTZ/V2WPZod76Fmw111W9hYWF1TquwTSl2KQTAAJTXpFbz3+zTpJ0+8BUhYXwznLgSP3yyy/auXOnjj76/6/E5PF4tHDhQk2bNk0lJSVyuVzl940ZM0YjR44s/zo/P1/x8fEaPHiwmjRpUicZ3W635s2bp0GDBik0NPD3kKPewBZs9UrBV7Nt6vUUKeTzEZKkkPSxGpx6UZ08jG3qrUfBVnNd13tg1fXhNIim1J836Xz4Ya4qAACB5KWF65VX5FZq68b6W692puMAAeG0007T77//XuG2K6+8Up07d9bdd99doSElSeHh4QoPD690ntDQ0DqfmNfHY9gJ9Qa2YKtXCr6ajdeb+ZRUtFWKiper881yufg32t+Crea6qre652wQTak/b9J5uKZUfe+JwPtOA1+w1Uy9gc1u9e7eX6JX/5clSbrt1E7yesrk9fj3MexWc12j3rp7jIYkOjpa3bt3r3Bbo0aN1KJFi0q3AwAaiNJcacUE37jneMl1+LdnA3Zn+6ZUTTbplMzticD7TgNfsNVMvYHNLvV+kOVUYalTCY0tlWb9rM821N1j2aXm+kK9/lPdPREAAKhTKx+TSnOkmK5S4mWm0wB+YeumVE036ZTqf08E3nca+IKtZuoNbHaqd2tuke786TtJlsZfeKxO6tiiTh7HTjXXB+r1v+ruiWB3CxYsMB0BAFBbhdlSxlTfOH2C5HQd8nCgobB1U6qmm3RK5vZE4H2ngS/YaqbewGaHeqctWCm3x1KfTi3Ur3NcnT+eHWquT9Tr33MDAGDU8ockT5HU8iSp3bmm0wB+Y+umVE036QQANAxrd+zT7F+3SJJGDelsOA0AAICN5a+V1r3kG/d6VHI4zOYB/MjWTSk26QSAwDRl3hp5LWlw19bqFd/UdBwAAAD7WnafZHmktmdJrfqaTgP4ldN0AABAcFm2JVefL98uh0O6c0ia6TgAAAD2tfcXadO7khy+vaSAAGPrlVJVYZNOAGjYHp+TIUk676h2Sm0dbTgNAACAjS0Z7fuceKnUrKfZLEAdYKUUAKDeLFq3W9+u3a1Ql0O3D0w1HQcAAMC+tn8pbZ8vOUOlnuNNpwHqBE0pAEC9sCyrfJXU34/voPjmUYYTAQAA2JRl/f8qqU7XS42TzOYB6ghNKQBAvZi/aqd+25SriFCnbhrQyXQcAAAA+9r8vrT3ZymksdT9PtNpgDpDUwoAUOe8XktP/LFK6so+SWrVJMJwIgAAAJvyuqWl9/rGne+QIlqZzQPUIZpSAIA698nSbGXs2KfoiBBdf0pH03EAAADsa/0Mad9aKbyl1OUO02mAOkVTCgBQp0rLvJoyb40k6fp+HRUTFWo4EQAAgE2VFUq/P+gbd7tPCuVKxQhsNKUAAHXq3Z83a9PeQrVsHKYr+ySajgMAAGBfGU9LRdukRolSyvWm0wB1jqYUAKDOFJV69PSXayVJNw3opKiwEMOJAAAAbKo0R1o5yTfuOV5yhZvNA9QDmlIAgDrz+vcbtHNfido1jdTfe3cwHQcAAMC+VjwquXOlmO5Swj9MpwHqBU0pAECdyC92a/qCdZKk2welKjzEZTgRAACATRVuldY87Rv3mig5mTchONCUAgDUiZcWrldekVudWjXWeUe1Mx0HAADAvn4fJ3mKpdg+UtuzTKcB6g1NKQCA3+3eX6JXvsuSJN05OFUup8NwIgAAAJvKz5DWv+ob95okOZg3IXjQlAIA+N2zX2eqsNSj9PYxGtItznQcAAAA+1p6n2R5pHbn+FZKAUGEphQAwK+25BRq5g+bJEmjhnSWg1f7AAAAqrZnsbT5fUkOKf0R02mAekdTCgDgV0/NX6tSj1cnJrdQn04tTMcBAACwryVjfJ8T/yk17WE2C2AATSkAgN9k7tyvD37dIkkadXoaq6QAAAAOZvt8aceXkjNU6jnedBrACJpSAAC/mTIvQ15LGtS1tY7u0Mx0HAAAAHuyLGnJaN+40w1S40SjcQBTaEoBAPzi9y15+uz37XI4pDsHp5mOAwAAYF+b35f2/iKFNJa632s6DWAMTSkAgF88PjdDkjS0VzulxUUbTgMAAGBT3jLfFfckqfMdUkQrs3kAg2hKAQCO2A/r92jhml0KcTp0+8BU03EAAADsa/0Mad8aKbyl1OUO02kAo2hKAQCOiGVZenyOb5XUJcfHq0OLKMOJAAAAbKqsSPr9Qd+4231SKKvLEdxoSgEAjshXq3fql405igh16pZTU0zHAQAAsK81z0hF2VJUBynletNpAONoSgEAas3r/f9VUsNPSlSrJhGGEwEAANhUaY60YqJv3HO85Ao3mwewAZpSAIBa+8+ybK3evk/RESG6oV9H03EAAADsa+XjkjtXiukqJf7TdBrAFmhKAQBqxe3xasq8NZKkf52SrKZRYYYTAQAA2FTRNiljqm+cPkFyuozGAeyCphQAoFbe+3mLNu4pVMvGYbqyT5LpOAAAAPa1/CHJUyS1OEFqd67pNIBt0JQCANRYsdujp770rZIaMaCTGoWHGE4EAABgU/sypcyXfONej0oOh9k8gI3QlAIA1Nib32/UjvwStWsaqX/07mA6DgAAgH0tu1+yyqQ2p0ut+5lOA9gKTSkAQI3sK3bruQWZkqRbB6YoPIQ9EQAAAKqUs0Ta+G/fOH2C0SiAHdGUAgDUyEvfZimn0K2OsY10/lHtTMcBAACwr6X3+j4nXCI1P8psFsCGaEoBAKptz/4SvfLteknSHYPTFOLi1wgAAECVdn4rZX8mOVxSj/Gm0wC2xP8mAADV9tyCdSoo9ahHuxid0T3OdBwAAAB7sixp6RjfuOPVUpMUs3kAm6IpBQColuzcIr35w0ZJ0qghaXJw5RgAAICqZX8m7fqf5IqQut9vOg1gWzSlAADV8vSXa1Va5lXvpObqm9LSdBwAAAB7srzS0nt849SbpSj24AQOhqYUAOCw1u/ar/d+2SJJuuv0zqySAgAAOJiNb0u5y6TQGKnraNNpAFujKQUAOKzJ89bI47U0sEsrHZPQzHQcAAAAe/KUSsvG+sZdRknhzc3mAWyOphQA4JCWb83Tf5dtk8Phu+IeAAAADmL9K9L+9VJEayntVtNpANujKQUAOKQn5mZIks5Nb6subZoYTgMAAGBTZYXS8od84273SaGNzeYBGgCaUgCAg/opa68WZOxSiNOh2wemmo4DAABgX2umSUXbpEaJUqfrTKcBGgSaUgCAKlmWpcfnrJYkXXxcvBJbNjKcCAAAwKZK86SVj/rGPR6UXGFG4wANBU0pAECVFmTs0uINOQoPceqWU1NMxwEAALCvVU9IpTlSky5S4j9NpwEaDJpSAIBKvF5Lj83x7SV1xUmJiouJMJwIAADApop3ShlP+sbpD0tOl9k8QANCUwoAUMl/f9+mVdvyFR0eouv7dTQdBwAAwL5WTJDKCqTmx0rtzzOdBmhQaEoBACpwe7yaMm+NJOnaU5LVrBF7IgAAAFSpYJO0drpvnD5BcjjM5gEaGJpSAIAKPvhli7J2F6hFozBddXKS6TgAAAD29fs4yVsqtR4gxQ00nQZocGhKAQDKFbs9eurLtZKkGwd0UuPwEMOJAAAAbCo/Q8p6zTdmlRRQKzSlAADl3vpho7blFattTIQu7d3BdBwAAAD7Wna/ZHmldudKLU8wnQZokGhKAQAkSfuK3Xr260xJ0m0DUxURypVjAAAAqrT3N2nTu5IcUs+HTKcBGiyaUgAASdIr32Upp9Ct5NhGOv/odqbjAAAA2Neysb7PCcOkZj3NZgEaMJpSAADtLSjVy99mSZLuGJSmEBe/HgAAAKq063sp+7+SwyX1GGc6DdCg8b8OAICe+zpT+0vK1L1dE53RPc50HAAAAPtadq/vc9JwqUmq2SxAA0dTCgCCXHZukd74YaMk6c7BaXI6uXIMAABAVRw7vpJ2fC05w6Qe95uOAzR4NKUAIMg989ValZZ5dXxSc/VLjTUdBwAAwJ4sS87lfzSiOv1LapRgNg8QAGhKAUAQW79rv979eYsk6a4haXI4WCUFAABQldaexXLu/UlyRUrd7jEdBwgINKUAIIg9OX+tPF5Lp3ZupWMTm5uOAwAAYE+WV13cs3zjtFukSPbgBPyBphQABKkV2Xn6z9JsSb69pAAAAFA1x5b3FePdICukidTlLtNxgIBBUwoAgtQTczIkSeemt1XXtk0MpwEAALApr0euFQ/5hqm3SuGsLgf8haYUAAShnzfm6OuMXXI5Hbp9EJcyBgAAOKiNs+TYl6FSRfuaUgD8hqYUAAQZy5Imz1srSbr42HgltWxkOBEAAIBNed3S7+MkSZmhQ6VQVpcD/kRTCgCCzKpch37emKuwEKduPS3FdBwAAAD7Wv+6tH+drPBWWh96luk0QMChKQUAQcTrtfTfzb5/+oefmKC4mAjDiQAAAGzKUyIt/2Mvqc6j5HEwbwL8jaYUAASRL1bs0JYChxqFu3RD/06m4wAAANjXulekwk1SZFt5O15nOg0QkGhKAUCQKPN4NfXLTEnS1SclqnmjMMOJAAAAbKqsSFrxiG/c7R7JFWk2DxCgaEoBQJD44NctytpTqEYhlq7sk2A6DgAAgH1lviAVZUtRHaSO15hOAwQsmlIAEASK3R5Nne+74t6gdl41Dg8xnAgAAMCmygqklRN94+5jJVe42TxAAKMpBQBBYOaPm7Qtr1hxTcJ1cpxlOg4AAIB9rXlOKt4pNU6WkoebTgMENJpSABDg9peU6dmvfXtJ3Tygo0L5lx8AAKBq7v3Sqsd84+73S85Qs3mAAMd/TQAgwL3ybZb2FpQqqWUjnX9UW9NxAAAA7Gvts1LJbik6RUq81HQaIODRlAKAAJZTUKqXvl0vSRo5KFUhLv7ZBwAAqJJ7n7Tqcd+4+1jJyR6cQF3jfycAEMCmf7NO+0vK1LVNE53Vo43pOAAAAPa1ZppUskeKTpUS/m46DRAUaEoBQIDanles1xdtkCSNGpImp9NhNhAAAIBdufOlVU/4xt3vZ5UUUE9oSgFAgHr6q7UqKfPquMRm6p8WazoOAACAfWU8I5XulZqkSQmXmE4DBA2aUgAQgDbsLtC7izdLku46vbMcDlZJAQAAVKk0T1o92Tfufr/kdJnNAwQRmlIAEICmzFujMq+lAWmxOi6xuek4AAAA9pXxtFSaIzXpInUYZjoNEFRoSgFAgFmZna9PlmZLku4YnGY4DQATJk6cqOOOO07R0dFq1aqVhg4dqoyMDNOxAMB+SvOk1VN8Y1ZJAfWOphQABJjJc33/8Ty7Zxt1bxdjOA0AE7755huNGDFCP/zwg+bNmye3263BgweroKDAdDQAsJc1z0juXCmmq9ThItNpgKDDJQUAIID8vGGvvly9Uy6nQyMHpZqOA8CQL774osLXr732mlq1aqVffvlFp5xyiqFUAGAz7vz/XyXVbSyrpAADWCkFAAHCsiw9Nse3SuqiY9orObax4UQA7CIvL0+S1Lw5e8wBQLk1z/6xl1RnVkkBhrBSCgACxMK1u/VT1l6FhTh168AU03EA2ITX69Vtt92mPn36qHv37lUeU1JSopKSkvKv8/PzJUlut1tut7tOch04b12d326oN7AFW71SANRctl8hqybLIams82hZHq/k8R708AZfbw0FW71S8NVc1/VW97w0pQAgAHi9lh6fs1qSdPkJCWoTE2k4EQC7GDFihJYvX67vvvvuoMdMnDhR48aNq3T73LlzFRUVVZfxNG/evDo9v91Qb2ALtnqlhltzp9LZ6ubeo/2OtvpqebSsFZ9V6/saar21FWz1SsFXc13VW1hYWK3jaEoBQAD4YsV2Ld+ar0ZhLt3Qv6PpOABs4qabbtKnn36qhQsXqn379gc9bsyYMRo5cmT51/n5+YqPj9fgwYPVpEmTOsnmdrs1b948DRo0SKGhoXXyGHZCvYEt2OqVGnjNZQUK+exaSVLEseN1RuI5h/2WBl1vLQRbvVLw1VzX9R5YdX04tm5KTZw4UbNnz9bq1asVGRmpk046SZMmTVJaGpc4B4ADyjxePfHHFfeu6ZusFo3DDScCYJplWbr55pv14YcfasGCBUpKSjrk8eHh4QoPr/xvR2hoaJ1PzOvjMeyEegNbsNUrNdCaM1+RSnZJjZMV0nG45Kz+f4sbZL1HINjqlYKv5rqqt7rntPVG51zOGAAOb/avW7V+V4GaRYXqmr6H/o8ngOAwYsQIvfXWW5o1a5aio6O1fft2bd++XUVFRaajAYBZZYXSqsd942731qghBcD/bP03kMsZA8ChlZR5NHX+GknSjf07KToieF7VAXBw06dPlyT179+/wu0zZszQFVdcUf+BAMAuMl+SindIjRKlpMtMpwGCnq2bUn9VncsZ1/fVY9ihP/AFW83U27C88f1GZecVq3WTcF1ybNvD1tHQ662NYKuZeuvuMRoSy7JMRwAA+/EUS6sm+cbd7pGcvJgHmNZgmlLVuZyxZO7qMezQH/iCrWbqtb9ij/TUry5JDvVvWaiv5s2p9vc2xHqPVLDVTL3+U92rxwAAbG7dq1LRNikqXkoabjoNADWgplR1Lmcs1f/VY9ihP/AFW83U23A8u2C99pdlKqF5lB64/CSFug6/TWBDrre2gq1m6vW/6l49BgBgY55SaeUfq6S63i25wszmASCpgTSlqns5Y8nc1WPYoT/wBVvN1GtvOQWleuW7DZKkO4akKSqiZlfca2j1+kOw1Uy9/j03AKCB2/CWVLhJioiTkq8ynQbAH2zdlKrp5YwBIFg8/8067SspU5c2TXR2jzam4wAAANiXt0xaMdE37nKnFBJpNg+AcrZuSo0YMUKzZs3Sxx9/XH45Y0mKiYlRZCT/kAAITjvyi/Xaog2SpFFDUuV0OswGAgAAsLNN70r7M6XwFlLK9abTAPiTw29AYtD06dOVl5en/v37q02bNuUf77zzjuloAGDM01+uVUmZV8cmNNOAtFam4wAAANiX5ZVWPOIbdx4phTQymwdABbZeKcXljAGgoo17CvTO4s2SpFFD0uRwsEoKAADgoDZ/KOWtlEJjpJQRptMA+Atbr5QCAFT05Lw1KvNa6pcaq97JLUzHAQAAsC/LklY87Bun3SKFxZjNA6ASmlIA0ECs3p6vj5dmS/KtkgIAAMAhZH8m5SzxvWUv7VbTaQBUgaYUADQQT8xZI8uSzurRRt3b8UofAADAQVmWtPyPVVIpN/o2OQdgOzSlAKAB+GVjjuav2iGX06GRg1NNxwEAALC3nQukPT9IznDfBucAbImmFADYnGVZenzOaknShUe3V8fYxoYTAQAA2NyKCb7PHa+RIuPMZgFwUDSlAMDmvsvcrR/W71WYy6lbBqaYjgMAAGBvu3+Sts+XHCFS11Gm0wA4BJpSAGBjvlVSGZKkf56QoHZNIw0nAgAAsLmVE32fEy+VGiWYzQLgkGhKAYCNfbF8u5ZtyVOjMJdGDOhoOg4AAIC95a6QtnwkySF1HW06DYDDoCkFADbl8Vp6Yq5vldTVJyepReNww4kAAABsbuWjvs/xF0gxnc1mAXBYNKUAwKZm/7pF63YVqGlUqK45Jdl0HAAAAHvbv17a+G/fuNsYs1kAVAtNKQCwoZIyj6bOXytJurF/RzWJCDWcCAAAwOZWPi5ZHqnN6VLzo02nAVANNKUAwIb+/eMmbc0tUusm4br8xETTcQAAAOytaJu0/lXfuNs9ZrMAqDaaUgBgMwUlZZr2daYk6ZbTUhQR6jKcCAAAwOZWT5G8pVLsyVKrvqbTAKgmmlIAYDMz/pel3ftLldAiShcfG286DgAAgL2V7JXWTveNu7KXFNCQ0JQCABvJLSzVCwvXS5JGDkpVqIt/pgEAAA5pzTNSWYHUrJfU9gzTaQDUAP/bAQAbef6b9dpXXKbOcdE6p2db03EAAADszb1fynjaN+46RnI4zOYBUCM0pQDAJnbmF+u1RVmSpFFD0uR0MqkCAAA4pMwXpdK9UnSKFH+B6TQAaoimFADYxDNfZarY7dXRHZrq1M6tTMcBAACwN0+JtHqyb9zlLsnJxWGAhoamFADYwKY9hfr3T5skSXed3lkOlp4DAAAcWtYbUlG2FNlOSrrMdBoAtUBTCgBs4Mn5a1TmtdQ3paVOSG5hOg4AAIC9ecuklZN84y53SK5ws3kA1ApNKQAwLGP7Pn20ZKsk6a4hnQ2nAQAAaAA2vS/tXyeFNZc6Xms6DYBaoikFAIY9MTdDliWd0T1OPdrHmI4DAABgb5YlrZzoG6fdKoU2NpsHQK3RlAIAg37dlKN5K3fI6ZDuGJxqOg4AAID9ZX8m5S6TQhpLqTeZTgPgCNCUAgCDnpiTIUm64Oj26tQq2nAaAAAAm/vzKqlO/5LCm5vNA+CI0JQCAEO+W7tbi9btUZjLqVsHppiOAwAAYH87F0q7/ic5w6TOI02nAXCEaEoBgAGWZenxOaslSf/o3UHtm0UZTgQAANAArJjg+5x8lRTV1mwWAEeMphQAGDBnxQ4t3ZKnqDCXRgzoZDoOAACA/e1ZLG2fKzlcUte7TKcB4Ac0pQCgnnm8libP9e0ldVWfJMVGhxtOBAAA0ACs+GMvqYR/SI2TzGYB4Bc0pQCgnn3021at3blfMZGhuvaUZNNxAAAA7C9vpbTlQ9+422izWQD4DU0pAKhHpWVePTl/jSTphv4dFRMZajgRAABAA3BglVT8+VJMV7NZAPgNTSkAqEdvL96kLTlFahUdruEnJpqOAwAAYH/710sb/+0bd7vHbBYAfkVTCgDqSWFpmZ7+MlOSdPNpKYoMcxlOBAAA0ACsfEyyPFKbIVLzY0ynAeBHNKUAoJ7M+N8G7d5fog7NozTs2HjTcQAAAOyvMFtaP8M3ZpUUEHBoSgFAPcgrdOuFb9ZJkkYOSlVYCP/8AgAAHNbqyZK3VIo9WWp1iuk0APyM/xUBQD14YeE65ReXKa11tM5Jb2s6DgAAgP0V75TWTveNu91rNguAOkFTCgDq2M59xZrxvw2SpDuHpMnldJgNBAAA0BCsekLyFEktjvftJwUg4NCUAoA69uxXmSpye3RUh6Ya2KWV6TgAAAD2V7xLWvOsb9z9fsnBi3pAIKIpBQB1aPPeQs36aZMkadSQNDmYUAEAABze6iclT6HvanttzzSdBkAdoSkFAHXoyflr5PZY6pvSUid1bGk6DgAAgP2V7JXWPOMbs0oKCGg0pQCgjqzZsU8f/rZVkm+VFAAAAKph9ZNS2X6pabrU7hzTaQDUIZpSAFBHJs/NkGVJp3eLU8/2TU3HAQAAsL/SHGnN075xD1ZJAYGOphQA1IElm3M1Z8UOOR3SnUNSTccBAABoGDKeltz5UtMeUvuhptMAqGM0pQCgDjwxJ0OSdP7R7dWpVbThNAAAAA1AaZ60eqpv3H2s5OC/q0Cg4285APjZoszd+i5zt0JdDt16WorpOAAAAA1DxlOSO1eK6SrFX2A6DYB6QFMKAPzIsixN+mOV1KW9ExTfPMpwIgAAgAagZI+06gnfuPsDrJICggR/0wHAj+at3KGlm3MVGerSiAGdTMcBAABoGFZOksr2Sc2OkjpcaDoNgHpCUwoA/MTjtfTEXN8qqatOTlRsdLjhRAAAAA1A4VZpzTO+cfojrJICggh/2wHATz5eslVrduxXTGSorjulo+k4AAAADcPyhyVPsRR7stTmdNNpANQjmlIA4AelZV49OX+NJOlf/ZIVExlqOBEAAEADsG+dtO5l3zh9guRwmM0DoF7RlAIAP3hn8SZt3luk2OhwXXlSkuk4AAAADcPvD0pWmW+FVKu+ptMAqGc0pQDgCBWWlunprzIlSbec2kmRYS7DiQAAABqA3OXShpm+cfrDZrMAMIKmFAAcodcXbdSufSWKbx6pYcd1MB0HAACgYVg2VpIlxV8oNT/GdBoABtCUAoAjkFfk1vPfrJMk3XZaqsJC+GcVAADgsHb/IG35yHelvZ7jTacBYAj/ewKAI/DiwnXKK3IrtXVjDT2qnek4AAAA9md5pV9u9Y2ThksxXczmAWAMTSkAqKVd+0r06ncbJEl3DE6Ty8nVYgAAAA4r6y1pz09SSGMp/RHTaQAYRFMKAGrp2a8zVeT2KD2+qQZ3bW06DgAAgP2590tLR/vG3e+TItuYzQPAKJpSAFALW3IKNfPHjZKku4akyeFglRQAAMBhrZwoFW2TGidLabeZTgPAMJpSAFALU+evldtjqU+nFurTqaXpOAAAAPa3P0taNdk3Pmqy5Ao3mweAcTSlAPxfe3ceXVV97n/8s09GIiQhEBKCYYaAShBKySVtHSoS0bbS+qNIuQ4UByzUAbRK708i2BYFClQuS/AuAXvtQsVf1bYOGKloEQhTQBmCQBkMZBBoBggkJznf3x8HTokZIHDm/X6ttVdy9vnufZ4n37Dz8Jx99kYr7S2t0p+3FkmSnsjpF+BoAAAAQkTBE5KrRkr5vnTl7YGOBkAQoCkFAK00L+9LuYw04qoUXZueGOhwAAAAgl/pGumr/ydZDulbCyQufQBANKUAoFU+LyrX+ztKZFnS4zkZgQ4HAAAg+LnqpC2Pur/vPVFKHBDQcAAED5pSANAKc1btkST9eFAX9U1pF+BoAAAAQkDhPKl8uxSVKA2YEehoAAQRmlIAcJHW7T+mf+w9pqgIS48N7xvocAAAAIJfxW7p8+nu7wfPk2K5QQyAf6MpBQAXwRjjOUtq7NCuSk+KC3BEAAAAQc5VL20Y7764eeeRUs97Ax0RgCBDUwoALsJHu8tUcLhcsVEOTb6xd6DDAQAACH575kvH86WoeCnrJS5uDqARmlIAcAEul9Hcs2dJjf9OD3WKjw1wRAAAAEGuolDa/n/d3w+eL8VdGdh4AAQlmlIAcAF/2X5Ue0qr1C42UhOv6xXocAAAAIJbg4/t5Ug9xwc6IgBBiqYUALTAWe/SvLwvJUkTr++lhLioAEcEAAAQ5PYskI5vkCLbSUP/h4/tAWgWTSkAaMHrm77S4RPV6tg2WuO/0z3Q4QAAAAS3Yxul7f/l/n7wPOmK9MDGAyCo0ZQCgGacrq3XC6v3SpIm39hbcdGRAY4IAAAgiJ0ulv7xY/fH9rr8SOo1IdARAQhyNKUAoBl/XH9QZVU16pLYRmOzugY6HAAAgOBVXyN9+hPp9FEp4Sop+3/52B6AC6IpBQBNqDzj1Iuf7JckPTq8j2IiIwIcEQAAQJAyRtr0kPs6UlGJ0nXvSFHxgY4KQAigKQUATfifT/+p8mqnendqq58M5hbGAAAAzfryv6V/LpMsh/Td16V2vQMdEYAQQVMKAL7h2Mkavbz2gCTp8RF9FeHg1HMAoWnRokXq3r27YmNjlZWVpY0bNwY6JABhxir7WNr6mPvBtXOkziMCGxCAkEJTCgC+YdHH+1RdW6/MKxOUc3VqoMMBgEvy+uuva8qUKcrNzdXWrVs1cOBA5eTkqKysLNChAQgTHep3KuKz/yOZeqn7XVK/xwIdEoAQQ1MKAM5T9K9q/WnDYUnSEzkZsrhAJ4AQNW/ePN1///0aP368rrrqKi1evFhxcXFaunRpoEMDEAas4vc07MwMWXVVUqcbpKFLuLA5gFbj/uYAcJ4XVu9Vbb1Lw3p20Hd7dwx0OABwSWpra7VlyxZNmzbNs87hcGj48OFav359o/E1NTWqqanxPK6srJQkOZ1OOZ1Or8fnKJyjiL2LNKKmRhF/jZGxwX9kI4whX69oal/nrfO8lvXv9ZYlyXH2q+W+7pHlOLsuQrIiZM5+lSNSsqIkx/lLjBTRRiYiVopoI0XESpFtpch4mah2UmQ71Vtxiq8/oLrKw1LbVMkR7cWcg491+DVFbPy5LNWpLvVWmewVkomUfHC8CBbnjoW+OCYGI7vlK9kvZ1/ne7H7pSkFAGftKzupN7cUSZKeuIWzpACErmPHjqm+vl4pKSkN1qekpKiwsLDR+FmzZmnGjBmN1n/44YeKi4vzenz9awvU13lUbSTpjNd3H5QsiXyD2OX+xY+UdKMkrXJ/fM2pONVYiTptJem0I1mnrY5nl2SddKSp2kp2N8FCUHfnB8qsXSJLRl9FXK+Cyp/LrPo40GH5TV5eXqBD8Cu75SvZL2df5VtdXX1R42hKAcBZ8/O+lMtIN1+VosFd2wc6HADwm2nTpmnKlCmex5WVlUpPT9eIESMUH++D27qfvlanTz6m/A35yvqPLEVGhn9JWldXR74+YTzfWcact85IxjT+3rgkudyPTX0TS53kckrGKbmcslxOyVUj1Z8+bzkjq+6U5KyU6qrcX52Vqq0qVoyqZMmlKFUrylSrrTkquZqI2hEjte0tE58h0y5DJvFamfaDpLhuwfsRuPoaOXbNVEThYkmSs8eD2lqao5tH5CgqKirAwfme0+lUXl6ebr75ZvINU3bL2df5njvr+kLC/y8iAFyEDf88oXe/KJZlSY+PyAh0OABwWTp27KiIiAiVlpY2WF9aWqrU1MY3cIiJiVFMTEyj9VFRUb4pzKO6SW3SVBHxtSKTv22L4l9OJ/mGMafTqVXvvadbR96iKHNSOlPmXk4fkU4dlqq/cn89dVCq2ivLVSNV7pRVubPhjqLbS+0HS0nfkjoOcy9tUpp8Tb8qWyttvF+qPHum5dX/JfWfLr3/vu+OE0GKfMOf3XL2Vb4Xu8+QaEotWrRIc+bMUUlJiQYOHKiFCxdq6NChgQ4LQJgoLLe07NWtkqQ7Bl+pjNR2AY4IAC5PdHS0vvWtb2n16tUaNWqUJMnlcmn16tWaPHlyYIMDwpnlkKI7SDEdpIT+TY9x1UvVh90Nnso9UsUO6USBVPGFVPsvqXS1ezmnbS+pY7aUnC0lf8+9X8tP96uqrZC2PSXtc58dpdgUachCqevosL5+FAD/Cfqm1LnbGS9evFhZWVlasGCBcnJytGfPHnXq1CnQ4QEIcXm7yvRSoUP1xqUbM5L1m1HXBDokAPCKKVOm6J577tGQIUM0dOhQLViwQKdOndL48eMDHRpgb44IqW0P95I28t/r62ulip3Sv7ZKxzdKx9ZL5Tukk/vdy8H/dY+LTpKSvyt1+p77a/tBUkTjMx0vy5mvpUMrpF3PS6ePutf1miANmuM+mwsAvCTom1Ln385YkhYvXqx3331XS5cu1VNPPRXg6ACEsrcLjmjqyu2qN5ZuuTpFL4wdrOhIP73zCAA+NmbMGH399deaPn26SkpKdO211+qDDz5odPFzAEEiIlpKGuReek1wr6utkI7nS19/5l6OrZdqT0hH/uJeJPfdAZMGn/2433+4P/53RXd386s16s9IR/4q/fOPUvEH7utrSVK7PtLQl6SUG7yVKQB4BHVTqrW3M/Y3l8so/8AJ7auQ8g+csM1FK+2Ur2S/nO2S7xdFFfrd+7tljDQ02aX5owfQkAIQdiZPnszH9YBQFp0gdR7hXiT3RdhPbJW+/odU9ql0bJ1Uc9zdrDp23v+PHDFSfF8pvr8U30+K7SRFxEmRV0iRce7nTx+RTv5Tqtrv/lqx033h9nOShkg97pZ63SdFtvFv3gBsI6j/x9na2xlLUk1NjWpqajyPz13x3el0yunlzz076136z6WbJUVq4a7NXt13cLNbvpL9crZPvmOHdNHQyEMyrnqvHyOC0bkc7ZDrOXbLmXx99xoAEHCOKKljlnvp/7j7LoIn959tSm1wf63Y5b5jYPkX7qU14tKl7v8p9bir+WtiAYAXBXVT6lLMmjVLM2bMaLT+ww8/VFxcnFdfq94lpbRp5WmxAIKCJWlIsktZkYdkWVJeXl6gQ/Iru+Ur2S9n8vWe6upqn+0bAC6LZUnteruXHne517nqpepDUkWhVLnbfTH12n9J9dVS3SmprlqqPy21SXVfRL1tT/fXdr2lxAH+u4g6ACjIm1KtvZ2xJE2bNk1TpkzxPK6srFR6erpGjBih+Ph4r8d4S45TeXl5uvnmm21x20in0175SvbLmXzDm93yleyXM/l637mzrgEgJDgizjaaekpdbg10NADQoqBuSl3K7YxjYmIUE9P47hNRUVE+Lc59vf9gY7d8JfvlTL7hzW75SvbLmXy9u28AAAB4X1A3pSRuZwwAAAAAABCOgr4pxe2MAQAAAAAAwk/QN6UkbmcMAAAAAAAQbri1AgAAAAAAAPyOphQAAAAAAAD8jqYUAAAAAAAA/I6mFAAAAAAAAPyOphQAAAAAAAD8jqYUAAAAAAAA/I6mFAAAAAAAAPyOphQAAAAAAAD8jqYUAAAAAAAA/I6mFAAAAAAAAPyOphQAAAAAAAD8jqYUAAAAAAAA/I6mFAAAAAAAAPyOphQAAAAAAAD8jqYUAAAAAAAA/I6mFAAAAAAAAPwuMtAB+JoxRpJUWVnpk/07nU5VV1ersrJSUVFRPnmNYGK3fCX75Uy+4c1u+Ur2y5l8ve9cDXGuprADX9dPEr+r4Y58w5/dcibf8Ge3nH2d78XWT2HflKqqqpIkpaenBzgSAAAQyqqqqpSQkBDoMPyC+gkAAHjDheony4T5234ul0tHjx5Vu3btZFmW1/dfWVmp9PR0ffXVV4qPj/f6/oON3fKV7Jcz+YY3u+Ur2S9n8vU+Y4yqqqqUlpYmh8MeVz7wdf0k8bsa7sg3/NktZ/INf3bL2df5Xmz9FPZnSjkcDl155ZU+f534+Hhb/OKeY7d8JfvlTL7hzW75SvbLmXy9yy5nSJ3jr/pJ4nc13JFv+LNbzuQb/uyWsy/zvZj6yR5v9wEAAAAAACCo0JQCAAAAAACA39GUukwxMTHKzc1VTExMoEPxC7vlK9kvZ/INb3bLV7JfzuSLUGG3uSPf8Ga3fCX75Uy+4c9uOQdLvmF/oXMAAAAAAAAEH86UAgAAAAAAgN/RlAIAAAAAAIDf0ZQCAAAAAACA39GUuoDf/va3ys7OVlxcnBITE5scc/jwYd12222Ki4tTp06d9MQTT6iurq7F/Z44cULjxo1TfHy8EhMTNWHCBJ08edIHGVyeNWvWyLKsJpdNmzY1u90NN9zQaPzEiRP9GPml6969e6PYn3vuuRa3OXPmjCZNmqQOHTqobdu2uuOOO1RaWuqniC/PwYMHNWHCBPXo0UNt2rRRr169lJubq9ra2ha3C6U5XrRokbp3767Y2FhlZWVp48aNLY5fuXKl+vXrp9jYWA0YMEDvvfeenyK9fLNmzdK3v/1ttWvXTp06ddKoUaO0Z8+eFrdZvnx5o7mMjY31U8SX55lnnmkUe79+/VrcJpTnt6njk2VZmjRpUpPjQ3FuP/30U/3whz9UWlqaLMvS22+/3eB5Y4ymT5+uzp07q02bNho+fLj27t17wf229jiAy0cNRQ0VzjWUHeonyT41FPVTeNdPUvjXUKFcP9GUuoDa2lqNHj1aDz30UJPP19fX67bbblNtba3WrVunV155RcuXL9f06dNb3O+4ceO0c+dO5eXl6W9/+5s+/fRTPfDAA75I4bJkZ2eruLi4wXLfffepR48eGjJkSIvb3n///Q22mz17tp+ivnwzZ85sEPsvf/nLFsc/9thj+utf/6qVK1fqk08+0dGjR/WTn/zET9FensLCQrlcLi1ZskQ7d+7U/PnztXjxYv3617++4LahMMevv/66pkyZotzcXG3dulUDBw5UTk6OysrKmhy/bt06jR07VhMmTFBBQYFGjRqlUaNGaceOHX6O/NJ88sknmjRpkjZs2KC8vDw5nU6NGDFCp06danG7+Pj4BnN56NAhP0V8+a6++uoGsa9du7bZsaE+v5s2bWqQa15eniRp9OjRzW4TanN76tQpDRw4UIsWLWry+dmzZ+uFF17Q4sWLlZ+fryuuuEI5OTk6c+ZMs/ts7XEA3kENRQ0VzjVUuNdPkr1qKOqn8K6fpPCvoUK6fjK4KMuWLTMJCQmN1r/33nvG4XCYkpISz7oXX3zRxMfHm5qamib3tWvXLiPJbNq0ybPu/fffN5ZlmSNHjng9dm+qra01ycnJZubMmS2Ou/76680jjzzin6C8rFu3bmb+/PkXPb68vNxERUWZlStXetbt3r3bSDLr16/3QYS+N3v2bNOjR48Wx4TKHA8dOtRMmjTJ87i+vt6kpaWZWbNmNTn+pz/9qbntttsarMvKyjIPPvigT+P0lbKyMiPJfPLJJ82Oae74Fgpyc3PNwIEDL3p8uM3vI488Ynr16mVcLleTz4fy3BpjjCTz1ltveR67XC6Tmppq5syZ41lXXl5uYmJizIoVK5rdT2uPA/Auaig3aqjGwq2GCqf6yRh711DUTw2F09yeE841VKjVT5wpdZnWr1+vAQMGKCUlxbMuJydHlZWV2rlzZ7PbJCYmNniXbPjw4XI4HMrPz/d5zJfjL3/5i44fP67x48dfcOyf/vQndezYUddcc42mTZum6upqP0ToHc8995w6dOigQYMGac6cOS1+lGDLli1yOp0aPny4Z12/fv3UtWtXrV+/3h/hel1FRYWSkpIuOC7Y57i2tlZbtmxpMDcOh0PDhw9vdm7Wr1/fYLzk/jcdynMp6YLzefLkSXXr1k3p6em6/fbbmz1+BaO9e/cqLS1NPXv21Lhx43T48OFmx4bT/NbW1urVV1/Vz3/+c1mW1ey4UJ7bbzpw4IBKSkoazGFCQoKysrKancNLOQ7AP6ihmhfsf19bYucaKlzqJ4kaivqpoXCaW8l+NVSw10+RXt2bDZWUlDQopiR5HpeUlDS7TadOnRqsi4yMVFJSUrPbBIuXX35ZOTk5uvLKK1sc97Of/UzdunVTWlqaPv/8cz355JPas2eP/vznP/sp0kv38MMPa/DgwUpKStK6des0bdo0FRcXa968eU2OLykpUXR0dKPrZaSkpAT9fDZl3759WrhwoebOndviuFCY42PHjqm+vr7Jf6OFhYVNbtPcv+lQnEuXy6VHH31U3/nOd3TNNdc0Oy4jI0NLly5VZmamKioqNHfuXGVnZ2vnzp0X/LceaFlZWVq+fLkyMjJUXFysGTNm6Hvf+5527Nihdu3aNRofTvP79ttvq7y8XPfee2+zY0J5bptybp5aM4eXchyAf1BDNS0U/r42x841VDjVT5K9ayjqp/CunyT71VDBXj/Zsin11FNP6fnnn29xzO7duy94sbdQdik/g6KiIq1atUpvvPHGBfd//rUdBgwYoM6dO+umm27S/v371atXr0sP/BK1Jt8pU6Z41mVmZio6OloPPvigZs2apZiYGF+H6jWXMsdHjhzRLbfcotGjR+v+++9vcdtgm2M0NmnSJO3YsaPFawRI0rBhwzRs2DDP4+zsbPXv319LlizRs88+6+swL8vIkSM932dmZiorK0vdunXTG2+8oQkTJgQwMt97+eWXNXLkSKWlpTU7JpTnFsGJGooaqinhVENRP4H6KbzrJ4kaKtjYsik1derUFruiktSzZ8+L2ldqamqjK9Cfu2NIampqs9t88+JgdXV1OnHiRLPbeNul/AyWLVumDh066Ec/+lGrXy8rK0uS+12kQPzBvZw5z8rKUl1dnQ4ePKiMjIxGz6empqq2tlbl5eUN3ukrLS3123w2pbU5Hz16VDfeeKOys7P10ksvtfr1Aj3HTenYsaMiIiIa3cWnpblJTU1t1fhgNXnyZM8FgFv7bk5UVJQGDRqkffv2+Sg630lMTFTfvn2bjT1c5vfQoUP66KOPWv3OeijPrfTvv6ulpaXq3LmzZ31paamuvfbaJre5lOMAmkcNRQ3VlHCqoaif3OxaQ1E/hXf9JNmzhgr6+smrV6gKYxe6SGdpaaln3ZIlS0x8fLw5c+ZMk/s6d5HOzZs3e9atWrUqqC/S6XK5TI8ePczUqVMvafu1a9caSWb79u1ejsz3Xn31VeNwOMyJEyeafP7cRTrffPNNz7rCwsKQukhnUVGR6dOnj7nzzjtNXV3dJe0jWOd46NChZvLkyZ7H9fX1pkuXLi1epPMHP/hBg3XDhg0LmQs5ulwuM2nSJJOWlma+/PLLS9pHXV2dycjIMI899piXo/O9qqoq0759e/OHP/yhyedDfX7Pyc3NNampqcbpdLZqu1CbWzVzoc65c+d61lVUVFzUhTpbcxyAd1FDUUOFaw0VzvWTMfaqoaif7FE/GWOPGirU6ieaUhdw6NAhU1BQYGbMmGHatm1rCgoKTEFBgamqqjLGuH85r7nmGjNixAizbds288EHH5jk5GQzbdo0zz7y8/NNRkaGKSoq8qy75ZZbzKBBg0x+fr5Zu3at6dOnjxk7dqzf87tYH330kZFkdu/e3ei5oqIik5GRYfLz840xxuzbt8/MnDnTbN682Rw4cMC88847pmfPnua6667zd9ittm7dOjN//nyzbds2s3//fvPqq6+a5ORkc/fdd3vGfDNfY4yZOHGi6dq1q/n73/9uNm/ebIYNG2aGDRsWiBRaraioyPTu3dvcdNNNpqioyBQXF3uW88eE6hy/9tprJiYmxixfvtzs2rXLPPDAAyYxMdFzt6e77rrLPPXUU57xn332mYmMjDRz5841u3fvNrm5uSYqKsp88cUXgUqhVR566CGTkJBg1qxZ02Auq6urPWO+mfOMGTPMqlWrzP79+82WLVvMnXfeaWJjY83OnTsDkUKrTJ061axZs8YcOHDAfPbZZ2b48OGmY8eOpqyszBgTfvNrjLsg6Nq1q3nyyScbPRcOc1tVVeX5WyvJzJs3zxQUFJhDhw4ZY4x57rnnTGJionnnnXfM559/bm6//XbTo0cPc/r0ac8+vv/975uFCxd6Hl/oOADfoIZyo4YKzxoq3OsnY+xVQ1E/hX/9ZEx411ChXD/RlLqAe+65x0hqtHz88ceeMQcPHjQjR440bdq0MR07djRTp05t0Hn9+OOPjSRz4MABz7rjx4+bsWPHmrZt25r4+Hgzfvx4T5EWjMaOHWuys7ObfO7AgQMNfiaHDx821113nUlKSjIxMTGmd+/e5oknnjAVFRV+jPjSbNmyxWRlZZmEhAQTGxtr+vfvb373u981eMf2m/kaY8zp06fNL37xC9O+fXsTFxdnfvzjHzcoSoLZsmXLmvwdP/9EylCf44ULF5quXbua6OhoM3ToULNhwwbPc9dff7255557Gox/4403TN++fU10dLS5+uqrzbvvvuvniC9dc3O5bNkyz5hv5vzoo496fj4pKSnm1ltvNVu3bvV/8JdgzJgxpnPnziY6Otp06dLFjBkzxuzbt8/zfLjNrzHus0IkmT179jR6Lhzm9tzfzG8u5/JyuVzm6aefNikpKSYmJsbcdNNNjX4W3bp1M7m5uQ3WtXQcgG9QQ7lRQ4VnDWWH+skY+9RQ1E/hXz8ZE941VCjXT5YxxlzeBwABAAAAAACA1nEEOgAAAAAAAADYD00pAAAAAAAA+B1NKQAAAAAAAPgdTSkAAAAAAAD4HU0pAAAAAAAA+B1NKQAAAAAAAPgdTSkAAAAAAAD4HU0pAAAAAAAA+B1NKQAAAAAAAPgdTSkAIe/ee+/VqFGj/Pqay5cvV2Jiol9fEwAAwFuonwAEA5pSAAAAAAAA8DuaUgDCyg033KCHH35Yv/rVr5SUlKTU1FQ988wzDcZYlqUXX3xRI0eOVJs2bdSzZ0+9+eabnufXrFkjy7JUXl7uWbdt2zZZlqWDBw9qzZo1Gj9+vCoqKmRZlizLavQaAAAAoYL6CUCg0JQCEHZeeeUVXXHFFcrPz9fs2bM1c+ZM5eXlNRjz9NNP64477tD27ds1btw43Xnnndq9e/dF7T87O1sLFixQfHy8iouLVVxcrMcff9wXqQAAAPgF9ROAQKApBSDsZGZmKjc3V3369NHdd9+tIUOGaPXq1Q3GjB49Wvfdd5/69u2rZ599VkOGDNHChQsvav/R0dFKSEiQZVlKTU1Vamqq2rZt64tUAAAA/IL6CUAg0JQCEHYyMzMbPO7cubPKysoarBs2bFijxxf7Th8AAEC4oX4CEAg0pQCEnaioqAaPLcuSy+W66O0dDveh0RjjWed0Or0THAAAQBCifgIQCDSlANjShg0bGj3u37+/JCk5OVmSVFxc7Hl+27ZtDcZHR0ervr7et0ECAAAEEeonAN5GUwqALa1cuVJLly7Vl19+qdzcXG3cuFGTJ0+WJPXu3Vvp6el65plntHfvXr377rv6/e9/32D77t276+TJk1q9erWOHTum6urqQKQBAADgN9RPALyNphQAW5oxY4Zee+01ZWZm6o9//KNWrFihq666SpL79PUVK1aosLBQmZmZev755/Wb3/ymwfbZ2dmaOHGixowZo+TkZM2ePTsQaQAAAPgN9RMAb7PM+R/6BQAbsCxLb731lkaNGhXoUAAAAEIC9RMAX+BMKQAAAAAAAPgdTSkAAAAAAAD4HR/fAwAAAAAAgN9xphQAAAAAAAD8jqYUAAAAAAAA/I6mFAAAAAAAAPyOphQAAAAAAAD8jqYUAAAAAAAA/I6mFAAAAAAAAPyOphQAAAAAAAD8jqYUAAAAAAAA/I6mFAAAAAAAAPzu/wMJSRvH7jUkwAAAAABJRU5ErkJggg==", 64 | "text/plain": [ 65 | "
" 66 | ] 67 | }, 68 | "metadata": {}, 69 | "output_type": "display_data" 70 | } 71 | ], 72 | "source": [ 73 | "import torch\n", 74 | "import torch.nn.functional as F\n", 75 | "import matplotlib.pyplot as plt\n", 76 | "import numpy as np\n", 77 | "\n", 78 | "\n", 79 | "x = torch.linspace(-10, 10, 100)\n", 80 | "\n", 81 | "\n", 82 | "relu_output = F.relu(x) # relu\n", 83 | "gelu_output = F.gelu(x) # gelu\n", 84 | "\n", 85 | "\n", 86 | "x_np = x.numpy()\n", 87 | "relu_output_np = relu_output.numpy()\n", 88 | "gelu_output_np = gelu_output.numpy()\n", 89 | "\n", 90 | "# Plot the results\n", 91 | "plt.figure(figsize=(12, 6))\n", 92 | "\n", 93 | "# Plot ReLU\n", 94 | "plt.subplot(1, 2, 1)\n", 95 | "plt.plot(x_np, relu_output_np, label='ReLU')\n", 96 | "plt.title('ReLU Activation Function')\n", 97 | "plt.xlabel('Input')\n", 98 | "plt.ylabel('Output')\n", 99 | "plt.grid(True)\n", 100 | "plt.legend()\n", 101 | "\n", 102 | "# Plot GELU\n", 103 | "plt.subplot(1, 2, 2)\n", 104 | "plt.plot(x_np, gelu_output_np, label='GELU', color='orange')\n", 105 | "plt.title('GELU Activation Function')\n", 106 | "plt.xlabel('Input')\n", 107 | "plt.ylabel('Output')\n", 108 | "plt.grid(True)\n", 109 | "plt.legend()\n", 110 | "\n", 111 | "plt.tight_layout()\n", 112 | "plt.show()" 113 | ] 114 | }, 115 | { 116 | "cell_type": "markdown", 117 | "metadata": {}, 118 | "source": [ 119 | "#### as you can see the smooth curve around zero in gelu plot that helps gelu in better performance !" 120 | ] 121 | } 122 | ], 123 | "metadata": { 124 | "kernelspec": { 125 | "display_name": "Python 3", 126 | "language": "python", 127 | "name": "python3" 128 | }, 129 | "language_info": { 130 | "name": "python", 131 | "version": "3.9.6" 132 | } 133 | }, 134 | "nbformat": 4, 135 | "nbformat_minor": 2 136 | } 137 | -------------------------------------------------------------------------------- /02-optimization-and-regularization/08-adam/loss.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "### Loss Function (will be talking in terms of probability) (working on this)\n", 8 | "\n", 9 | "> **What is a loss fucntion ?**\n", 10 | "\n", 11 | "- while training the model the loss fucntion is used to calculate the mismatch between actual vlaues and predicted values.\n", 12 | "\n", 13 | "- it requires to parameters to calulate it Y_hat(predicted) and y(actual value).\n", 14 | "\n", 15 | "- the goal is to find paramaters values that minimize the loss.\n", 16 | "\n", 17 | "> **Maximum Likelihood**\n", 18 | "\n", 19 | "- here we compute the distribution over a outputs.(y_hat)\n", 20 | "\n", 21 | "- instead of guessing single outcomes the model predicts a range of possible outcomes and how likely each one is.\n", 22 | "\n", 23 | "- ex. if we guess someone's height it 6ft but we are guessing so it might be less or more. So we represent this using bell curve.\n", 24 | "\n", 25 | " - to plot this we need mean and variance.\n", 26 | "\n", 27 | "> **Maximum Likelihood Criterion**\n", 28 | "\n", 29 | "- Maximum Likeihood Estimation : we want to get the right of values for our model's parameter in order to reduce the gap between our prediceted output and actual outputs.\n", 30 | "\n", 31 | "$$\n", 32 | "\\hat{\\phi} = \\argmax_{\\phi} \\left[ \\prod_{i=1}^{n} \\Pr(y_i \\mid x_i, \\phi) \\right]\n", 33 | "$$\n", 34 | "\n", 35 | "**Break Down of above equaton**\n", 36 | "\n", 37 | "1. **Model Prediction (Pr(y_i | x_i, φ)):**\n", 38 | " - Pr(y_i | x_i, φ) represents the probability that our model predicts the actual outcome (y_i) given the input data (x_i) and the model parameters (φ).\n", 39 | "\n", 40 | " - Think of this as the model saying, \"Based on my current parameters, there's an X% chance that this specific input (x_i) will result in this specific output (y_i).\"\n", 41 | "\n", 42 | "2. **Product of Probabilities:**\n", 43 | " - ∏(i=1 to n) means we multiply these probabilities together for all the data points (from 1 to n).\n", 44 | "\n", 45 | " - This gives us a combined probability that shows how likely it is that all our predictions match the actual data.\n", 46 | "\n", 47 | "3. **Maximizing the Combined Probability:**\n", 48 | " - argmax_φ means we want to find the specific parameters (φ) that make this combined probability as large as possible.\n", 49 | "\n", 50 | " - inshort, we're adjusting our model's paramters to [maximize] the chances that it predicts the actual outcomes correctly.\n", 51 | "\n", 52 | "> **Maximum log Likelihood**\n", 53 | "\n", 54 | "- MLC uses the product of the ouputs prob and it can genrate the very small values.\n", 55 | "\n", 56 | "- and aslo it is not a numericaly stable.\n", 57 | "\n", 58 | "**Example**\n", 59 | "\n", 60 | "If you flip a coin 10 times and each flip has a probability of 0.5 for heads, the product of probabilities for all heads is:\n", 61 | "\n", 62 | "$$\n", 63 | "0.5 \\times 0.5 \\times 0.5 \\times \\ldots = (0.5)^{10} = 0.0009765625\n", 64 | "$$\n", 65 | "\n", 66 | "This is a very small number, and if you had more flips, it would get even smaller.\n", 67 | "\n", 68 | "Taking the log of each probability and summing them:\n", 69 | "\n", 70 | "$$\n", 71 | "\\log(0.5) + \\log(0.5) + \\log(0.5) + \\ldots = 10 \\times \\log(0.5) \\approx 10 \\times (-0.693) = -6.93\n", 72 | "$$\n", 73 | "\n", 74 | "This sum is a manageable number and avoids the numerical issues of multiplying many small numbers.\n", 75 | "\n", 76 | "- This is why using log-liklihood is more practical where we take the sum and not product.\n", 77 | "\n", 78 | "> **Negative log Likelihood**\n", 79 | "\n", 80 | "- this reframed the problem as minimizes prolem.\n", 81 | "\n", 82 | "- most of optimizestion algorithms are the build to solve the minimizestion problem.\n", 83 | "\n", 84 | "- this changes our goal from finding the maximum value of the log-likelihood to finding the minimum value of the negative log-likelihood.\n", 85 | "\n", 86 | "\n", 87 | "- The negative log-likelihood is calculated as:\n", 88 | "\n", 89 | "$$\n", 90 | "-\\sum_{i=1}^{n} \\log \\Pr(y_i \\mid x_i, \\phi)\n", 91 | "$$\n", 92 | "\n", 93 | "- To find the parameter values (\\(\\phi\\)) that minimize the negative log-likelihood, we use:\n", 94 | "\n", 95 | "$$\n", 96 | "\\hat{\\phi} = \\argmin_{\\phi} \\left[ -\\sum_{i=1}^{n} \\log \\Pr(y_i \\mid x_i, \\phi) \\right]\n", 97 | "$$\n", 98 | "\n", 99 | "- comapare to **Maximum log Likelihood** its now minizing problem with neg.\n", 100 | "\n", 101 | "- this is the final formula of **LOSS.**\n", 102 | "\n", 103 | "> **Inference**\n", 104 | "\n", 105 | "- instead of the network directly predicting a specific value (y), it now predicts a range of possible values with different likelihoods. This is called a probability distribution.\n", 106 | "\n", 107 | "- During inference, we need a single \"best guess\" answer. We choose the single value from the distribution that has the highest probability.\n", 108 | "\n", 109 | "- **finding the Best Guess:**\n", 110 | "- We use the argmax operation to find this best guess:\n", 111 | "$$\\hat{y} = \\arg\\max_y \\text{Pr}(y|f[x,\\hat{\\phi}])$$\n", 112 | "- This means \"find the value of $y$ that gives the highest probability, given our model's output.\"\n", 113 | "\n", 114 | "> **IN-short**\n", 115 | "\n", 116 | "- Log-Likelihood: A measure of model fit that we want to maximize.\n", 117 | "- Negative Log-Likelihood: The negative of the log-likelihood, which we want to minimize.\n", 118 | " \n", 119 | "> **Recipe for constructing loss function**\n", 120 | "\n", 121 | "- This recipe outlines the process of creating loss functions for training probabilistic \n", 122 | "neural networks using the maximum likelihood approach.\n", 123 | "\n", 124 | "\n", 125 | "**1. Choose a Suitable Probability Distribution**\n", 126 | "\n", 127 | "$$\\text{Pr}(y|\\theta)$$\n", 128 | "\n", 129 | "- Choose a probability distribution that's appropriate for your prediction task.\n", 130 | "- This distribution is defined over the domain of the predictions $y$.\n", 131 | "$\\theta$ represents the parameters of this distribution.\n", 132 | "\n", 133 | "- Examples:\n", 134 | "\n", 135 | "- For regression tasks, you might choose a Normal (Gaussian) distribution.\n", 136 | "\n", 137 | "- For binary classification, you might choose a Bernoulli distribution.\n", 138 | "- For multi-class classification, you might choose a Categorical distribution.\n", 139 | "\n", 140 | "**2. Set the Machine Learning Model to Predict Distribution Parameters**\n", 141 | "\n", 142 | "$$\\theta = f[x, \\phi]$$\n", 143 | "$$\\text{Pr}(y|\\theta) = \\text{Pr}(y|f[x, \\phi])$$\n", 144 | "\n", 145 | "- Your neural network $f[x, \\phi]$ is set to predict the parameters $\\theta$ of the chosen distribution.\n", 146 | "$x$ is the input to the network.\n", 147 | "$\\phi$ represents the parameters of the neural network itself.\n", 148 | "\n", 149 | "- Example:\n", 150 | "\n", 151 | "- For a Normal distribution, the network might output the mean $\\mu$ and standard deviation $\\sigma$.\n", 152 | "\n", 153 | "**3. Train the Model by Minimizing Negative Log-Likelihood**\n", 154 | "$$\\hat{\\phi} = \\arg\\min_{\\phi} L[\\phi] = \\arg\\min_{\\phi} -\\sum_{i=1}^N \\log \\text{Pr}(y_i|f[x_i, \\phi])$$\n", 155 | "\n", 156 | "- We want to find the network parameters $\\hat{\\phi}$ that minimize the negative log-likelihood loss function.\n", 157 | "\n", 158 | "This is done over the entire training dataset of pairs ${x_i, y_i}$.\n", 159 | "$N$ is the number of training examples.\n", 160 | "\n", 161 | "- **Why negative log-likelihood?**\n", 162 | "\n", 163 | "- Using log transforms the product of probabilities into a sum, which is computationally easier to handle.\n", 164 | "\n", 165 | "- The negative sign turns the maximization problem into a minimization problem, \n", 166 | "which is conventionally used in optimization algorithms.\n", 167 | "\n", 168 | "**4. Perform Inference**\n", 169 | "\n", 170 | "- For a new test example $x$, you have two options:\n", 171 | "\n", 172 | "- Return the full distribution: $\\text{Pr}(y|f[x,\\hat{\\phi}])$\n", 173 | "\n", 174 | "- This gives you the complete probability distribution over possible outputs.\n", 175 | "\n", 176 | "\n", 177 | "- Return the maximum of this distribution:\n", 178 | "$$\\hat{y} = \\arg\\max_y \\text{Pr}(y|f[x,\\hat{\\phi}])$$\n", 179 | "\n", 180 | "- This gives you a single point estimate, which is often more practical for decision-making.\n", 181 | "\n", 182 | "\n", 183 | "> **IN-Short**\n", 184 | "\n", 185 | "- This approach allows the model to learn to predict not just a single value, but a full probability distribution over possible outputs.\n", 186 | "\n", 187 | "- By minimizing the negative log-likelihood, we're effectively maximizing the probability of the observed data given our model.\n", 188 | "- This method naturally handles uncertainty: the predicted distribution will be wider (more uncertain) for inputs the model is less confident about.\n", 189 | "- The choice of distribution in step 1 is crucial and should reflect the nature of your data and task.\n", 190 | "- This recipe forms the basis for many modern machine learning approaches.\n", 191 | "\n", 192 | "\n", 193 | "> **Ex. 1: Univariate regression**\n", 194 | "\n", 195 | "\n", 196 | "\n", 197 | "\n", 198 | "\n", 199 | "> **Ex. 2: Binary Classification**\n", 200 | "\n", 201 | "> **Ex. 3: Multiclass classification**\n", 202 | "\n", 203 | "> **Cross Entropy Loss**" 204 | ] 205 | }, 206 | { 207 | "cell_type": "code", 208 | "execution_count": 28, 209 | "metadata": {}, 210 | "outputs": [ 211 | { 212 | "name": "stdout", 213 | "output_type": "stream", 214 | "text": [ 215 | "[1, 2, 2]\n" 216 | ] 217 | }, 218 | { 219 | "data": { 220 | "text/plain": [ 221 | "2" 222 | ] 223 | }, 224 | "execution_count": 28, 225 | "metadata": {}, 226 | "output_type": "execute_result" 227 | } 228 | ], 229 | "source": [] 230 | }, 231 | { 232 | "cell_type": "code", 233 | "execution_count": null, 234 | "metadata": {}, 235 | "outputs": [], 236 | "source": [] 237 | } 238 | ], 239 | "metadata": { 240 | "kernelspec": { 241 | "display_name": "Python 3", 242 | "language": "python", 243 | "name": "python3" 244 | }, 245 | "language_info": { 246 | "codemirror_mode": { 247 | "name": "ipython", 248 | "version": 3 249 | }, 250 | "file_extension": ".py", 251 | "mimetype": "text/x-python", 252 | "name": "python", 253 | "nbconvert_exporter": "python", 254 | "pygments_lexer": "ipython3", 255 | "version": "3.9.6" 256 | } 257 | }, 258 | "nbformat": 4, 259 | "nbformat_minor": 2 260 | } 261 | -------------------------------------------------------------------------------- /03-sequence-modeling/01-rnn/simplernn.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch import nn 3 | from torch.nn import functional as F 4 | 5 | 6 | 7 | class RNNModel(nn.Module): 8 | 9 | 10 | def __init__(self, rnn_layer, **kwargs): 11 | super(RNNModel, self).__init__(**kwargs) 12 | self.rnn = rnn_layer 13 | if self.rnn.bidirectional: 14 | self.num_directions = 2 15 | else: 16 | self.num_directions = 1 17 | self.linear = nn.Linear( 18 | self.num_directions * self.rnn.hidden_size, self.rnn.input_size) 19 | 20 | # forward 21 | 22 | def forward(self, inputs, state): 23 | X = F.one_hot(inputs.T.long(), self.rnn.input_size) 24 | X = X.to(torch.float32) 25 | Y, state = self.rnn(X, state) 26 | output = self.linear(Y.reshape((-1, Y.shape[-1]))) 27 | return output, state # Change begin_state to state 28 | 29 | 30 | #begin state 31 | 32 | def begin_state(self, device, batch_size=1): 33 | tensor = torch.zeros((self.num_directions * self.rnn.num_layers, 34 | batch_size, self.rnn.hidden_size), 35 | device=device) 36 | if isinstance(self.rnn, nn.LSTM): 37 | return (tensor, tensor) 38 | else: 39 | return tensor -------------------------------------------------------------------------------- /03-sequence-modeling/03-learning-to-forget/learning.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# backgroung \n", 8 | "\n", 9 | "rnn suffers from the vanishing or exploding gradients problem and some them porblem solved by std lstm to some extend.\n", 10 | "lstms creats the bridge which help in solving the context problems through the cells.\n", 11 | "\n", 12 | "but lstms may suffers in input seq get too big. there is chance the magnitude of thst bridge carries may lead overflow and eventually failed. the proposed solution is an adaptive \"forget gate\" that allows an lstm cell to learn to reset itself at appropriate times, releasing internal resources.\n", 13 | "\n", 14 | "## standard lstm\n", 15 | "\n", 16 | "the basic unit in the hidden layer of an lstm network is the memory block, which contains:\n", 17 | "- one or more memory cells\n", 18 | "- a pair of adaptive, multiplicative gating units (input and output gates)\n", 19 | "\n", 20 | "each memory cell has a recurrently self-connected linear unit called the \"constant error carousel\" (cec), which helps prevent the vanishing gradient problem. the cell state, denoted as $s_c$, is updated as follows:\n", 21 | "\n", 22 | "$$\n", 23 | "s_{c}(t) = s_{c}(t-1) + y_{in}(t) \\cdot g(\\text{net}_{c}(t))\n", 24 | "$$\n", 25 | "\n", 26 | "where:\n", 27 | "- $y_{in}(t)$ is the input gate activation\n", 28 | "- $g(\\cdot)$ is a centered logistic sigmoid function with range $[-2, 2]$\n", 29 | "\n", 30 | "the cell output $y_c$ is calculated as:\n", 31 | "\n", 32 | "$$\n", 33 | "y_{c}(t) = y_{out}(t) \\cdot h(s_{c}(t))\n", 34 | "$$\n", 35 | "\n", 36 | "where:\n", 37 | "- $y_{out}(t)$ is the output gate activation\n", 38 | "- $h(\\cdot)$ is a centered sigmoid function with range $[-1, 1]$\n", 39 | "\n", 40 | "## solution: forget gates\n", 41 | "to address the issue of indefinite growth of cell states, the \"forget gate\" is introduced. the forget gate activation $y'_j$ is calculated similarly to other gates and is squashed using a logistic sigmoid function:\n", 42 | "\n", 43 | "$$\n", 44 | "y'_j (t) = f'_j \\left( \\sum_m w'_{jm} y_m(t-1) \\right)\n", 45 | "$$\n", 46 | "\n", 47 | "the revised update equation for the cell state $s_c$ in the extended lstm is:\n", 48 | "\n", 49 | "$$\n", 50 | "s_{cvj}(t) = y'_{j}(t) \\cdot s_{cvj}(t-1) + y_{in}(t) \\cdot g(\\text{net}_{cvj}(t))\n", 51 | "$$\n", 52 | "\n", 53 | "forget gates learn to reset the memory block when its contents are no longer useful, thereby preventing unbounded growth of internal states.\n", 54 | "\n", 55 | "## experiments\n", 56 | "\n", 57 | "to test the effectiveness of forget gates, the authors extended the embedded reber grammar (erg) problem to create a continual version, where the network must handle concatenated sequences without explicit resets. the results show that extended lstm with forget gates can solve the task more efficiently than standard lstm, especially when combined with learning rate decay.\n" 58 | ] 59 | }, 60 | { 61 | "cell_type": "markdown", 62 | "metadata": {}, 63 | "source": [] 64 | } 65 | ], 66 | "metadata": { 67 | "language_info": { 68 | "name": "python" 69 | } 70 | }, 71 | "nbformat": 4, 72 | "nbformat_minor": 2 73 | } 74 | -------------------------------------------------------------------------------- /03-sequence-modeling/04-word2vec/word2vec.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "attachments": { 5 | "word2vec.png": { 6 | "image/png": "iVBORw0KGgoAAAANSUhEUgAABJgAAAIKCAYAAABvKvXCAAAYKGlDQ1BJQ0MgUHJvZmlsZQAAWIWVeQdUFE2zds/OBliWJeeck+QMknPOGYEl57BkMBFEgoogoAiooIKggoEkYgBBQBFBBQyIBANJBQUUAblD0Pe77/3Pf8/tc2bm2erq6qe7qrundgDgYCVFRoaiaAEIC48h2xrp8jq7uPLiJgEMqAAl4AXUJJ/oSB1ra3OAlD/P/16WhwC0+XwuuWnrf9b/fwudr1+0DwCQNYK9faN9whBcDwCa3SeSHAMAph+RC8THRG7iRQQzkhGCAGApNnHANubcxN7bWGZLx95WD8H6AFAQSCRyAADETfu8cT4BiB1iJFJHH+4bFI6opiNY0yeQ5AsAeweisyssLGITzyNY1Ps/7AT8N5vef22SSAF/8fZYtgqFflB0ZCgp8f84Hf97CQuN/dMHP3IRAsnGtptjRubtUkiE2SYmILg13NvSCsH0CO4O8t3S38SvA2ONHXb053yi9ZA5A8wAoIAvSd8MwchcophjQxx0drAcibzVFtFHWQbFmNjvYG9yhO2OfVRceKil+Y6dzEA/kz/4jF+0gd0fHf8gQxMEI5GGqk8KtHfa5onqiAtytEQwEcH90SF2ZjttR5MC9Sz/6JBjbTc5CyJ40Z9saLutA7OGRf8ZFyzlQ9rqixXB2jGB9sbbbWFnv2hn8z8cfP30DbY5wL5+4Q473GAkunRtd9pmRIZa7+jDZ/xCjWy35xm+Fh1n96ftsxgkwLbnAZ4IJplab/OHlyNjrO23uaHRwBzoAX1kBcUilzeIAMEgqG+uaQ75tV1jCEiADAKAH5Dckfxp4bRVE47c7UAS+IwgPxD9t53uVq0fiEPk63+l23dJ4L9VG7fVIgR8RHAYmh2tiVZHmyN3beSSQ6ugVf+046X50yvWAKuPNcYaYsX+8vBBWIciFxkE/T9kZsjTDxndJpfwP2P4xx7mI2YAM4EZxIxhXgFH8H7Lyo6WZ1Aq+V/MeYEFGEOsGe6MzhuxOf1HBy2MsFZE66I1EP4IdzQzmh1IohWQkeigtZCxKSLS/2QY+5fbP3P57/42Wf/neHbkRHGi4g4L77+e0fur9W8rev8xR77I0+zfmnAmfBPugtvgHrgVbgK88D24Ge6F72ziv5HwfisS/vRmu8UtBLET9EdH5rLMtMza/+idtMOAvOVvEOOXELO5IPQiIhPJQQGBMbw6yI7sx2sS7iO1i1dORlYVgM39fXv7+G67tW9DzE//kflNAbAbiXHK/n9kwScAqOkEgCX7H5mwGwBsuwC4/swnlhy3LUNv3jAAD2iQlcEGuIEAEEXGJAeUgDrQBgbAFFgBe+ACPJBZDwRhCOt4sBekgAyQA46DQnAanAXnwSVwFdwATaAVtIGH4DHoB4PgDRIbH8AsmAfLYBWCIBxEDTFAbBAPJARJQHKQCqQJGUDmkC3kAnlBAVA4FAvthdKgHCgfOg2VQ9XQdegW1Ab1QAPQK2gcmoa+Qb9QMIqAYkRxoYRR0igVlA7KDGWP2oMKQEWhklDpqGOoU6gK1BVUI6oN9Rg1iBpDzaKWYABTwcwwHywJq8B6sBXsCvvDZHg/nA0XwRVwLdyC+Po5PAbPwStoLJoBzYuWROLTGO2A9kFHofejj6BPoy+hG9Ed6OfocfQ8+jeGGsOJkcCoYUwwzpgATDwmA1OEqcQ0YDqRtfMBs4zFYpmxIlhlZG26YIOxydgj2DJsHfY+dgA7iV3C4XBsOAmcBs4KR8LF4DJwxbgruHu4Z7gPuJ8UVBQ8FHIUhhSuFOEUqRRFFDUUdymeUXyiWKWkpRSiVKO0ovSlTKTMpbxA2UL5lPID5SqeDi+C18Db44PxKfhT+Fp8J34E/52KioqfSpXKhiqI6iDVKaprVN1U41QrBHqCOEGP4E6IJRwjVBHuE14RvlNTUwtTa1O7UsdQH6Oupn5APUr9k8hAlCKaEH2JB4glxEbiM+IXGkoaIRodGg+aJJoimps0T2nmaClphWn1aEm0+2lLaG/RDtMu0THQydJZ0YXRHaGroeuhm6LH0QvTG9D70qfTn6d/QD/JADMIMOgx+DCkMVxg6GT4wIhlFGE0YQxmzGG8ytjHOM9Ez6TA5MiUwFTCdIdpjBlmFmY2YQ5lzmW+wTzE/IuFi0WHxY8li6WW5RnLD1YOVm1WP9Zs1jrWQdZfbLxsBmwhbHlsTWxv2dHs4uw27PHsZ9g72ec4GDnUOXw4sjlucLzmRHGKc9pyJnOe5+zlXOLi5jLiiuQq5nrANcfNzK3NHcxdwH2Xe5qHgUeTJ4ingOcezwwvE68ObyjvKd4O3nk+Tj5jvli+cr4+vlV+EX4H/lT+Ov63AngBFQF/gQKBdoF5QR5BC8G9gpcFXwtRCqkIBQqdFOoS+iEsIuwkfFi4SXhKhFXERCRJ5LLIiCi1qJZolGiF6AsxrJiKWIhYmVi/OEpcUTxQvET8qQRKQkkiSKJMYmAXZpfqrvBdFbuGJQmSOpJxkpclx6WYpcylUqWapL5IC0q7SudJd0n/llGUCZW5IPNGll7WVDZVtkX2m5y4nI9cidwLeWp5Q/kD8s3yCwoSCn4KZxReKjIoWigeVmxXXFdSViIr1SpNKwsqeymXKg+rMKpYqxxR6VbFqOqqHlBtVV1RU1KLUbuh9lVdUj1EvUZ9arfIbr/dF3ZPavBrkDTKNcY0eTW9NM9pjmnxaZG0KrQmtAW0fbUrtT/piOkE61zR+aIro0vWbdD9oaemt0/vvj6sb6Sfrd9nQG/gYHDaYNSQ3zDA8LLhvJGiUbLRfWOMsZlxnvGwCZeJj0m1ybypsuk+0w4zgpmd2WmzCXNxc7J5iwXKwtTihMWIpZBluGWTFbAysTph9dZaxDrK+rYN1sbapsTmo62s7V7bLjsGO0+7Grtle137XPs3DqIOsQ7tjjSO7o7Vjj+c9J3yncacpZ33OT92YXcJcml2xbk6ula6LrkZuBW6fXBXdM9wH9ojsidhT48Hu0eoxx1PGk+S500vjJeTV43XGsmKVEFa8jbxLvWe99HzOekz66vtW+A77afhl+/3yV/DP99/KkAj4ETAdKBWYFHgXJBe0OmghWDj4LPBP0KsQqpCNkKdQuvCKMK8wm6F04eHhHdEcEckRAxESkRmRI5FqUUVRs2TzciV0VD0nujmGEbkVac3VjT2UOx4nGZcSdzPeMf4mwl0CeEJvYniiVmJn5IMky4mo5N9ktv38u1N2Tu+T2df+X5ov/f+9gMCB9IPfDhodPBSCj4lJOVJqkxqfupimlNaSzpX+sH0yUNGhy5nEDPIGcOH1Q+fzURnBmX2ZclnFWf9zvbNfpQjk1OUs3bE58ijo7JHTx3dOOZ/rC9XKffMcezx8ONDeVp5l/Lp8pPyJ09YnGgs4C3ILlgs9CzsKVIoOnsSfzL25Ngp81PNxYLFx4vXTgeeHizRLakr5SzNKv1R5lv27Iz2mdqzXGdzzv46F3TuZblReWOFcEXReez5uPMfLzhe6LqocrG6kr0yp3K9Krxq7JLtpY5q5erqGs6a3Muoy7GXp6+4X+m/qn+1uVaytryOuS7nGrgWe23mutf1oRtmN9pvqtysrReqL21gaMhuhBoTG+ebApvGml2aB26Z3mpvUW9puC11u6qVr7XkDtOd3Lv4u+l3N+4l3Vu6H3l/ri2gbbLds/3NA+cHLzpsOvo6zTq7Hxo+fNCl03WvW6O7tUet59YjlUdNj5UeN/Yq9jY8UXzS0KfU1/hU+Wlzv2p/y8DugbvPtJ61Pdd//vCFyYvHg5aDA0MOQy+H3YfHXvq+nHoV+mrhddzr1TcHRzAj2W9p3xaNco5WvBN7VzemNHZnXH+8d8Ju4s2kz+Ts++j3ax/SP1J/LPrE86l6Sm6qddpwun/GbebDbOTs6lzGZ7rPpV9Ev9R/1f7aO+88/2GBvLDx7ch3tu9ViwqL7UvWS6PLYcurP7J/sv28tKKy0vXL6den1fg13NqpdbH1lt9mv0c2wjY2Iklk0tarAIxcKH9/AL5VAUDtAgADksfhidv5106Boc20AwBHyAClA6ugWTF4LAVOhsKFMg1/j4ClJhGbaPF0ofSPGBWZSlkAawhbH4cS53GuWR5t3ly+AQG8oKqQi3CISJiou5iuOJf4gsTDXcWSIVIa0tTS72TqZA/K2cjzyX9WuKV4SMlGmVP5g0qtaoKajjpe/fnuUg1fzV2a37SatPfq6OoSdN/p3dWvMSgzzDPab0wy0TJlNV0w6zWvtSizLLdqtZ60xdix2bM70DrCjmtOqy7AldKN6E69B71nyWPCs9/rPummd6VPsW+2X6J/QIB9oG6QQrB4CF8oWxhNOBy+GDER2R91m3wh+ljMgdiMuIYEdKJf0v29YJ/wfrUDJgfdUmJTj6UVpicfUjg0mZF72DpTKIsqG+SgjtAdFT2mmWt53CnPNd/1hHOBY6F9kc1Jy1NmxUandUs0S1XL5M9InhU/J1NuVpF2fuyiSeWVqtlquhqhy7JX1K/q11rUOV3zvB54I/JmfP3+htTGQ02ZzTm3clsKb5e2Vt6pv9t5b/j+WNtQe90D/w7Wju7OoofxXf7de3qcHtk8Nus1emLcZ/80qv/cwKvnVC+kB/WGTIYNXqq8EnpNfL3yZmrk5du20fPv0sYCxh0mLCct3lt9sPpo+kl1imVqbDp7RmFmbPbSXNJn4y8UX6q/Gn2dnD+/kPDN47vVosVS8HL7z8O/mtb1NzZ2/C8Lo+Fp9BhmEjtPAVMq4QOpSgljRHGaeNqH9GwMiYwvmOVYUlnfsityZHD2c7PzOPPm8bXyjwgsCS4LzQg/ETkvShbTFKcQfyFxdlewpKLkb6mH0sdknGR5ZD/J1crHKWgoQoqdStnKVioMKkOqxWpu6lzqI0gUuGuyaQ5rndR20xHWWdUd1Luuf8TAz3C3EZ3RR+NWk0LTODM/c2+LQMsIqzBrbxsrW3U7cXsOB6IjynHZ6ZPzkMsD11q3EvfsPUkeQZ7OXvokaW9WH8hnxnfQr8O/IaAysCgoPTgixCVUO0wknBqJhPHI0ajFaL4Yz9jiuLb4lwmTiXNJK3up9nHvFz3AexB78F1KQ2puGjnd45BDhvPhoMy0rLLsqzkNRxqP1h+7nnv1eHXexfxzJ0oKCgtzi7JOpp5KLI44HVASVHqw7N5ZsXOXKkTO5194fnGliniJvVqgRhyJA+WrmrX6dRbXXK6H3si4eb7+bsNA42jTVPP3Fvg2S6vEHfW72veU7/O1odom2rseNHRUdZY8PN51qDuph/wo5nFWb2sf89N9/W+fsT/XemE/6D90cPjiy6evFt/Qj0i+NR+NfHdy7Pb4s4nRyYn3sx8xiPdTpgdm6eZkPit+Ef5K8/Xn/MeF4W+Pvt9aLF86sOz4Q+TH8s/WlaRf6quENf316R3/S0GzqDLYAy2GwWEWsNO4GYoJygUqPEGIWofoSpNCe4VugH6DUYjJgDmY5RDrWbZ69k6Obs6HXLe5y3kSeHV5f/Fd4DfjnxXIFBQRbBfyEFoRLhCREXkkGiCGE6sSNxb/JJGxS3RXp6SPFJAqk94t/VImFnm7qZMzl5uST1PgVmhWtFWcUzqkzKPchLy1TKkeUGNWu6yuo/5st8/uLxrJmjjNEi0FrSHtJB1unWZdK91XeoF6G/oVBtaGlIYPjPYaKxjPmFSYupuxmg2ZF1rYWdJY9lilWatbL9rU2YbYidi9ty932OPI5vjCKdfZ2HnDpcE11E3Q7a170R7LPcseBZ5CnvVeOl6vSQne/N4vkX0k0M/IXzlANdAkiBQcFkIK1QqjDRsJvxgRFqkYuRb1gJwdbR3DFPMm9mycb7xw/MeEM4kGiSNJocmMyc/33t53d3/HgQcHb6VUpxalpaVHHHLLMDgsnonJfJFVnO2aI5izemTs6JNjt3LPHd+f55avdoL9xErBUOGNopMnj57KLy4/fbPkYenLspkzq+eoy3kr5M8bX3C/GFG5vyrr0pHqgzWky8pXiFe+Xf1cu3KNcJ37htxN6/rkhvrGn82qtyJbim9fa22+c/tuz72lNqP2Wx12nUtdRT3yj170Hu3z6jd5pvNCdyj0FXFkdqJvZmlxZdP/2//DbRasEgAnUpAMNQMABy0A8jqQPHMQyTvxAFhTA2CvClDC/gBF6AWQ2vjf8wNCThssoAJ0gBXwABEgA9SQ3NgKuAJ/JCdOAbngDKgFd8FTMA4WkcyRE5KFjCBPKB7Kg65A3dBHFBYlijJHRaPKkDxvA8nr4uBb8G+0EfoEegIjj8nEvMOqYYuxq0iG9YhCmaKKkoMyD0+Fz6LCUx0nsBOqqBWoW4kaxBYaFZrbtMa0b+hi6GnprzLoMwww2jMOMFkxPWP2ZP7JUsyqwTrKto+dg72Fw4OTkrOVK45bgfs7zw1eMp8i3xp/l0CRYKDQbmGi8JjITdFMMW9xHQnhXcRdq5JfpN5LD8o0yCbLycqNymcqKCp8VWxWyldOVPFVNVeTUWfZTdSQ0izRltA5qtuj99WAwpDJiM2Y00TQVMHM0jzK4pRlh9U3GwFbJ7tj9l2OaCd95wyXXjdmd+89NR7vvbAkOm+s95LPB98Rv5kAmkCzoMLgT6G7wwrCv0SaRtVEE2KiYl/HGyY0J0kmV+7j3V9ykDklLw2fnnJo6XBw5mx2zpGwYw15dCfYCz4XVZ/yPM1c0l929KzRuaWK3AuMFzMrly+FVH+7fPyqQR3dtYUbH+unGmebP7VMti7cY2nTe+DR6dVl16P1WPqJ2FOlgfDnP4fRrylHzr5jGL/7gTi1d1bnc93X1W9Ki4bL+B9Hfz5amfr1YfXVWv368d/eGzJb+8em/3GAAOgBG+AD4kAeaABjYA+8QBhIBlmgGFSDW+AxeAvmIQzEDslseT8RKoCuQX3QZxQNSh7likpD3UB9gHlgT/gCPIdWQqejBzFimBTMCOL7EhzABeIGKQwomimlKWvwYvgrVApU9wjWhEnqBCIlsZCGj+Yakr++oYunZ6ZvYnBk+My4jwnPdIpZkvkRSwQrC+t9tiB2Rvb7HBGcgpwjXMXczjysPK94y/h8+WUEgMALwctC6cLuIgpILjcj1it+EznFciXTpPZKx8j4yGrLEeT65LMVzBRZFBeUXil3qTSqVqgdUU/aHaeRpdms9UNHXtdXL0e/0qDR8LbRbeM7Jj2m4+YoC3FLR6tD1k02c3aC9p4OZY6jzvwuwa6N7rg9Th6nPTu9Bkjt3tU+mb5Bfrb+xgEugalB90OoQ73DWiPYI5Oi3kbrxlTH0cRHJjxO4kuO29u/X/HAhRSO1IJ0/KHkjLlMUtZETtJRmVzU8bf51wviihROfiu+XhJbpnbm17nKCrnzZRc+VYpUBV66VsNyufSqRu3na8U3VG/2NZAaV5srWmxawZ3qe+b3F9rPdng/VOvme4R+/ORJ3FNsf/YzwvOKQc9hi1ehb6refhrjmbB+n/Lx7jTL7PEvwvNPvhcsH1kxWZVbO7P+/vfCjv/RgBLQIqufD0gAJaAHrIEH4vt9yMovB/WgG4wi654ACUPa0B4oGSqB7kDjKErE6yRUIaofZoL94DtoTvRB9AzGBfMEq4e9g9PAtVGYU7yljMbT4K9RORJgQhN1FFGW+JOmk7aYLpbehcGE0ZTJhtmURZlVjE2R3ZMjkTOGy5vbnseS14LPgt9cwELQVshTOFrkqGiNWLf49C5qSWUpf+nTMkNy7PK+CnWKq8rWKk/Usna7aGK0jmuv6ZrppSEebDJsNbpr3GeyamZm3mgpZXXFRsq20V7PYcgpzAXvesXd0YPOi8rb09fN732AemBO0McQ29DecIuIZ1Fu5KmY5Dju+NHEh8n395UdcDj4K7U83TGD5/B81p2cI0f9c43y2PIfF/gXLp9MK6Y7XVGqVPbkrH85VFF6QeXiYFVsNUdN95UDtUbXpG8Y1h9orGjObXFpZbkzfK+kzeUBruPiQ4Wu2z0Gj4Z7E/qk++GB+edTgwPDea9EXpe9+f3WYDT73eNxmgmHyXPvpz/KfgqZOjfdPTMzh/nM+UXmq/680wLpm+9360X+xaWlo8ucyzU/VH+c/rHy0+ln4wrzCnmlcWX1l/av9F89q8RVu9WTq/1rFGvaawlr19em1/nWXdbz1x+tr/+W/e37++Tvx79/b8hu+G2c2ujd9H+0v7zc1vEBEXQBwIxubHwXBgCXD8B63sbGasXGxvp5JNkYAeB+6Pa3na2zhhaA0s1vPODxLXDu399Y/gu/NccKnAq5TgAAAZ5pVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IlhNUCBDb3JlIDUuNC4wIj4KICAgPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4KICAgICAgPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIKICAgICAgICAgICAgeG1sbnM6ZXhpZj0iaHR0cDovL25zLmFkb2JlLmNvbS9leGlmLzEuMC8iPgogICAgICAgICA8ZXhpZjpQaXhlbFhEaW1lbnNpb24+MTE3NjwvZXhpZjpQaXhlbFhEaW1lbnNpb24+CiAgICAgICAgIDxleGlmOlBpeGVsWURpbWVuc2lvbj41MjI8L2V4aWY6UGl4ZWxZRGltZW5zaW9uPgogICAgICA8L3JkZjpEZXNjcmlwdGlvbj4KICAgPC9yZGY6UkRGPgo8L3g6eG1wbWV0YT4KpFUVygAAQABJREFUeAHs3QecXFd9MOz/FmnVu2x1yZZlyx33gm1sMITiYEyHQBJCSCGNJORNSL6El0BCEkoILwkJSSCEBEwJBgPGgDHGvfcqucjqsnrXaou+eybMaGa2a2d3dmeew2+ZW8495TljWfv3Oec2HMpSSAQIECBAgAABAgQIECBAgAABAgSOUKDxCJ/zGAECBAgQIECAAAECBAgQIECAAIGcgACTLwIBAgQIECBAgAABAgQIECBAgMCgBASYBsXnYQIECBAgQIAAAQIECBAgQIAAAQEm3wECBAgQIECAAAECBAgQIECAAIFBCQgwDYrPwwQIECBAgAABAgQIECBAgAABAgJMvgMECBAgQIAAAQIECBAgQIAAAQKDEhBgGhSfhwkQIECAAAECBAgQIECAAAECBASYfAcIECBAgAABAgQIECBAgAABAgQGJSDANCg+DxMgQIAAAQIECBAgQIAAAQIECAgw+Q4QIECAAAECBAgQIECAAAECBAgMSkCAaVB8HiZAgAABAgQIECBAgAABAgQIEBBg8h0gQIAAAQIECBAgQIAAAQIECBAYlIAA06D4PEyAAAECBAgQIECAAAECBAgQICDA5DtAgAABAgQIECBAgAABAgQIECAwKAEBpkHxeZgAAQIECBAgQIAAAQIECBAgQECAyXeAAAECBAgQIECAAAECBAgQIEBgUAICTIPi8zABAgQIECBAgAABAgQIECBAgIAAk+8AAQIECBAgQIAAAQIECBAgQIDAoAQEmAbF52ECBAgQIECAAAECBAgQIECAAAEBJt8BAgQIECBAgAABAgQIECBAgACBQQkIMA2Kz8MECBAgQIAAAQIECBAgQIAAAQICTL4DBAgQIECAAAECBAgQIECAAAECgxIQYBoUn4cJECBAgAABAgQIECBAgAABAgQEmHwHCBAgQIAAAQIECBAgQIAAAQIEBiUgwDQoPg8TIECAAAECBAgQIECAAAECBAgIMPkOECBAgAABAgQIECBAgAABAgQIDEpAgGlQfB4mQIAAAQIECBAgQIAAAQIECBAQYPIdIECAAAECBAgQIECAAAECBAgQGJSAANOg+DxMgAABAgQIECBAgAABAgQIECAgwOQ7QIAAAQIECBAgQIAAAQIECBAgMCgBAaZB8XmYAAECBAgQIECAAAECBAgQIEBAgMl3gAABAgQIECBAgAABAgQIECBAYFACzYN62sMECBAgQIBATQmsWbsu2ts7Cn2aP29ujB07pnDugAABAgQIECBAgEB3Ag2HstTdDdcIECBAgACB+hP4jd95f2zesrXQ8b//u4/EooXzC+f9Oejs7Ixt27fHC5u3xJYt22LSpIkxb+6cmD17VjQ1mjzdH0N5CBAgQIAAAQKjTcAMptE2YtpLgACBGhP48w99NPYfOFDo1Vve+Lo456wzCudHevDkipXxb1/4r6LHG+KjH/7/Ykyzf/UVoVTscP2GjfHTW26PO+66NzZueiE6Og7PgspX0tTUFEcfNTvOP/fseP2Vr47x48fnb/kkQIAAAQIECBAY5QL+lj3KB1DzCRAgMNoF9h9ojedWrS504/nVaysSYFq7dn1JuTOmTxNcKihX5iBNgv7Bj26Mm7LA0sqnn+2z0BR0SoGob377u/Hjm26Ot7/59fHSSy+ORrOa+rSTgQABAgQIECAw0gXMUx/pI6R9BAgQqHGBeXOPLunhlqLlWSU3BnhSvMwrPZqWaEmVFWhvb49/zWaJ9Se4VF7zzp274rP/+h/x13/3qbBav1zHOQECBAgQIEBg9AmYwTT6xkyLCRAgUFMC5YGf8sDQkXZ2y9ZtJY/OFWAq8RjKkylTJmf7Ni2IxdnPwgXzo7W1Ndau3xCPP/FUrMs+i9MDDz2Sm9H0htf9fPFlxwQIECBAgAABAqNMQIBplA2Y5hIgQKDWBObNK51ZVLEAU9lMqPKZUrXmWO3+NGd7W132koviyiteGXPnlM5Ky7etI9v8+/s/uCG+8tVvxoEs6JRPV3/tmjhp+Qlx4vLj85d8EiBAgAABAgQIjDIBAaZRNmCaS4AAgVoTmDenNMC0ZevhN5gNpq/lgarymVKDKduzhwXGjRsXr3z5ZXHFq38upk+bevhGN0fpDXJXvOoVkWY4/cNnPlfI0Znt5fTlr30zPvwXf1K45oAAAQIECBAgQGB0CQgwja7x0loCBAjUnMDcsj2YWlsPxu49e2LypElH3Ne0p8/WsiVyAkxHzNnjg2nW0uc+84mYOHFCj3m6u3HJiy/IvXHuwYceLdx+9rnnc3sxNTQ0FK45IECAAAECBAgQGD0CNvkePWOlpQQIEKhJgYkTJsTUqVNK+rZlS+n+SSU3+3GyY8fOaM/eWJZPTU1NcfRRs/On/fpMG1hv3bY9nl+9Jl7YvCW3j1C/HqxwphQs2759R6xdt37QbejMlqhtz2xWr1mb+xzs5topGDTQ4FKe55yzzsgf5j4PHDgQGzZuKrk2kk+S5c5duzLLdbFr1+5haWqqc/fuPdl3YUPubXxt2XdUIkCAAAECBAiMFAEzmEbKSGgHAQIE6lggLZNLbxXLp81btsQxSxblTwf8Wb487qjZsyIFmfpKKWDwwxtuivseeCiefua5Lm83m3P0UXHWmafHyy69OBYvWthXcb3e/6+vfL0kMPHG1782UjtT6siCY7ffeU987/ofxarnV0db2+FAwrQsGHf6aafEVa99dW4D7V4r+dnNhx55LG648adx7/0PxcGDBwuPtLSMjQvOOyde88rL49hjlhSuD8fBwvnzulTz7Krnu33bXwo8XfPt7xXyjxk7Jn71l98RA53tlAJq//7F/46D2Sy5fHpd5tjf2W179+2Ln/z01rgvc0wblhcHMWdMn5Z9ZxfnvrdpL6r0XalESt/ln95ye6QxXLHi6ZI6U/9nzpgeCxbMy+1/df45Z0WaVSYRIECAAAECBKoh4G8h1VBXJwECBAiUCKSNvp94akXhWk8zmNIr7Xfs3FnIN2H8+Pi//9//KZznD8rfINefAML3vv+juPrr18S+/fvzxXT53LjphUj5vn/9DXH5y14Sv/yOt0ZLS0uXfP25kAJIm17YXMj6yle8NBdgWr9hY/zV3/59pLq6SzuyQFwKOKSfX37nW+Pns72Pekr7s7587vNfiptvvaPbLGk54k0335a7/4u/8OZey+q2gEFcTMGtLulQlyu5C7NnzYy77r0/9uzZW8jw4gvOzW0MXrjQj4OnVj6dbTL+40LONPvqPe/+xcJ5TwcpMPX9H/44vvqNb5W0oTj/tmyWWfpJwckUGPzN97wrUhuPNKVloun7eMOPf1oSVCouL7UrfdfTT1puOC3bA+uVL39pvCkLVkoECBAgQIAAgeEWEGAabnH1ESBAgEAXgfIAUPkMpPRA+oU7/fJenlLeFIAoTuXPl5dfnDcdf/4/v5wLHJVf7+k8bUqdZjqtWbM+/uyP3xfjs0BXJVKarfSXH/1EyWyu3sotD6QV5z14sC0++rF/iMeymTZ9pbT06j++dHXF+tFXfen+ipXPdsk2a9aMLtfShTQr58Lzz8mZ5zPcevvdAw4w3Xr7XfnHc58XnHd2jOljxk9ahvaZz/5blD9bUlDZyf79B+KTn/5sPPr4E/Gud749xmYzrgaSUpAxBVMHumQwLQ295trvCTANBFteAgQIECBAoGICAkwVo1QQAQIECBypwLyyjb67e5Pc008/123xK59+tkuAqfz58vKLC7rhJzd3CS6NGdOcW4Z2/HFL49hs2VOaTbTi6Wdyy5SKl/KlWVf/8u//Ge/77V8vLvKIjjs6OuNTn/mXkuDS2DFjYuHC+bFo4YLccrB12d4769ZviD17D8/k6amyL/731V2CS2k51Yuy5XXLlx8fae+rp1asjEceeyLSBtspfT5bPtaYveltOFKaTVScxmUzwY5Z3POyyLQxeArq5dMd2Qywd//yL0R6M11/UkcWREuzxopTKrOv9Ol//FyX58aPHxdpOdqSrL1peVoK7Dy/em08+PCjuf2t8mWm9ibP97zrnflLfX6m/b4+8Bcf6TJTatLEibH8hGVZnQtjSbY8My0TTHWuyfaASksL0/dCIkCAAAECBAhUU0CAqZr66iZAgACBnED5DKPyGUgp08pnus54yV3PAj9pdktx2pLNaipO8+bNLT4tHKeNvL/xzWsL5+kgLbv7kz/6vTj5xBNKrr/q516W22w7zTBKm2TnU5rZ8sarXhsL5ndfRz5fX58/+vFNWZBgYy7bzBkz4rd/891xyknLuw343HX3ffGlbA+nnlLayPvHWeCsOKU9rf7iA++PKVMmFy6fd86ZuX2m0r5EaelYWjI3HCntMZX2MCpOadPv3pYbpuBKmqmW/27s2r07Hnn08VzArLicno4fzQJpxcHBZHxS2RiXP/twVn55UOq0U0+O3/r1d8WsmaWz5tKzv9D+hvjK166Jb3/3+4X9u374o5/Ey1/6klwwqrz88vP0ffzEP/xTl+DSaaecFL/3W7+WWwJX/MzZZ76ocHrzbXfEF774lWg92Fq45oAAAQIECBAgMJwC/fvPfsPZInURIECAQN0JHJ1tiNxY9Hr67pZ+FQeY0gySfFrZzcymzWVvoZs35+h89pLPtP9QPmCRbqR9gT78wT/pElzKPzQ928j5Ix/8QMkG32kfnK9/89v5LEf8eWO2eXRKKdDyib/9UKSgQk+zic4796z41Mc+kttsvLsKv3vdD0o2Bp+fBdj+75/9n5LgUv65tFF02jA77QE1XOn6H96Y26+ouL4rXv3y4tMux6mdF7/4/JLrt952Z8l5byflS9wuuvDcXjcJTzOePv/FL5cU+ZKLLsiCdH/YbXApZUxL+d759jfF2958VeG5tJzy377w34Xz3g7SPk9pc/ni9Marfj7+PKsz7a/UW0qzsT79ib+Ol1x8YW/Z3CNAgAABAgQIDJmAANOQ0SqYAAECBPorkPbBmf2zN6ilZ9KSo/JXsBcvkXv5Sy9N2XLpmedW5d66lj9Pn8VL5NLSqxnZ0rDylJu9dM13Si5fdMF5fc40SRtDv/7K15Q8d9sdd2evjl9fcm2gJylQld4i9/73vTcmT5rU5+MpmJGWzpWnvXv3xfXZrJnidMWrXxGTJk0svtTl+M2vv7Jfb9rr8uAAL2zbtj2u/sY1JU9desmL47ilx5Zc6+7k4rIlbXfdc38WSGvrLmvJtfRduvPue0uuXZIFi3pLaebRmrXrClmOPmp2vOdX3tlrUCqf+corXpXNaJuXP81tYH/PfQ8Uzrs7SLO6vnvdD0tupeVwb33TVT0GGksyZyeTJ0+KX3/3L5Vfdk6AAAECBAgQGBYBAaZhYVYJAQIECPQlULxMLgVbtmZvxsqn9La1tCQqn17zqsujuakpd3rw4MFsydrhQEBra2vs3r0nnzXmlu3vlL/x+JMrSmYvpesvf9ml+du9fqalZemX+XxK7U1vdRtsevMbrhz0a+bvuOueOHDgQKEpaR+n/rzNbOrUKXHWmacXnhuKgxRE+eSn/zn27Tv8pr5U7zvf9qZ+Vbco249qcbb/UD6lN/7d/+Aj+dMePx/M8hTXuXDB/F4DiWk8v/o/3yop7+1veUO/N0FPwb8rr3hlyfPlSwJLbmYn9z/4cO5tcMXXU3ApzdySCBAgQIAAAQKjQUCAaTSMkjYSIECgDgSKA0ypu8XL5IqXx83Nlrul/W+OOWZxQWVltg9TPnVdHjcnf6vkM22mXJzSMrJlx/U9iyY9MyYL2lx04XnFj0d5eSU3+3GS+nVJBZY3pQ3Ji9OZWdAobejdn3TpRUO7vOrLX/1mbjZPvi1pWeQf/u5v9rn8K58/fZbPPLqt7M1wxXnzx7fcXrqUrnypXT5f/jPNsioOUqbracniQNIpJ59Ykv3ZVatLzstPHnu8dE+qtGdWWi4pESBAgAABAgRGi4BNvkfLSGknAQIEalyg/E1vxRt1Fy+PO+H443ISy49fFukNcimtzPatecXll+WOi5fHpQvz5nUfYNpcFmAqD3DlCuvl/8rzDzbAdFL2Zrf+vhGtl2bFlrL9p+Zm+1v1N/U026u/z/eWL+139a3vXFeS5R3ZfkUnZxuZDyRdnAX2vvTlrxUeuef+B7MZW60xblxL4VrxQbp3z30PFl/qspdTyc3sZN2GjSWXJkwYHz+44SeFjbvTDKfuUv56/jO9jbCtrT2X9bnsTW+9pfRGwuLUnyWDxfkdEyBAgAABAgSqLSDAVO0RUD8BAgQI5ATmzi0NBBVvvl08gykFllJafsJx8Z1sM+uUimcwFQem0r3yQFC6llJ5gGn69N43Uf7fpw7///SppfmL23s4V/+Pjsr2+KlE2ryldGbWtLJ29lbHQPL2Vk75vQcffjT+6XNfKLmc3qyW9ioaaJo5c0ZuE/bHfvYWurREMu1v1NOspHuzAFTKk0/pbXRpr6ve0vqyAFNaXnf116/p7ZE+76W9sdJ3JL0Jr7uUloEWp6OP6r2NxXkdEyBAgAABAgRGgoAlciNhFLSBAAECBLoEgvIBm7Rvz7PPHZ79kZ/BdMLPAk2Jbu26DbE/248npfxzuZPs/8pnRuWvl+cbaHBlWvZGueK0ffuOLhuTF9/v67ivoEdfz+fvl/cr7XHU35T2lerpzXX9LaM839PPPBsf++RnSjZiP/vMF8V73v2L5Vn7fV6+TK78DXHFBZUvj0tvW+srlQeY+srf3/vFe2MVP5NmPO3ds7f4Usmm9yU3nBAgQIAAAQIERqiAGUwjdGA0iwABAvUmMCubmZI2pD74s7eC5WcipQ288zNQ0lKlhQvm5WimZ69tT2/2SjM/0i/o6fXup2b75BTv3ZQyls+MyruW5xtogGnqlMn5ogqfae+e1KYjST3NbBlIWckhBbqK05Ru2ll8v/g4bSid8qe3+FUipcDfh//mk3Eg23g9n9IMoj/4vd8c1HLA8889O/7181+K9iz4mNKDDz0Se7IATfmb8tK1tMF3PjVlG8NfeP45+dMePzduLN3HKn03U7sHmyb0sBdWCjx1li27m53tMyYRIECAAAECBEaTgADTaBotbSVAgEANC6TgRtoD6PnVa3O9zO+lVLw87vhlS0veqrU8248pv7Qo7cOUCzBly5DyKQVLetrgOgWritOB1sNvXiu+3tNx2tunPJWXWX6/t/OWlrG93e7XvWQ4cdLEkg2q01v1BpK669dAns/nTeP3l3/98VzgJ39t8aIF8ad/9L5oGTu4vqZAUtq8/O577s8VnQJNd95zX1x+2SX5qnKf6Vo+CJUunHH6qSVv/yvJXHRSPotrQRbU/P3f+Y2iHJU9TJvGl6e9+/aVX3JOgAABAgQIEBjRApbIjejh0TgCBAjUl0DxbKP82+Ce/tlG3kkiv/9SXqV4mVx+H6biJWI97b+Uni9fkrZ9+8Bm7ezYWZp/XEtLTJ40Kd+0qn2mN+wVpx07dxWf9nqcglE9LePq9cGymzt37YoP/dXHY+u2bYU7c7LNxv/iA++PiRP790a7woM9HFxy4fkld7p7m1z5tYsvKn2mpICik7RUsDjtHIBh8XP9PW5ubo70/SlOL5TtyVR8zzEBAgQIECBAYCQKCDCNxFHRJgIECNSpwLw5hzf6Tsvi0qviV5QEmI4rkUkbfedTypeWiG3dejioMb9s4/B83vQ5e1bpJsrbd5QuLSvO291x+VK02bNLAzvdPTMc12bPmlFSzUCCI9srsDQuzbz58Ec/EcX7GM2cMT0++Gd/FNOyZY2VSmdl+zhNGH94Ftqjjz1RsrQv9SVdy6dx48bFOWedkT/t9bN8+WMlXHqtMLtZvlfWYN9K2Fd97hMgQIAAAQIEKi0gwFRpUeURIECAwBELlG/IvXrtumwD7/W58hqz5V/HHXdsSdkLF8wvBBnSvkFpH6biJVHFM6JKHsxOygNC5QGj8vzl59vK9joqD1iV5x+u81llbykbSHBkIHm7609rFhT86N/9Qzy3anXh9pTJk+Mv/vSPuswYK2Q4woOxY8fEeeeeVXg67WF0+133FM7vuPOekn2NzjvnzH4vzVu2tPR7tmvX7lzwslD4EBwct/SYklI3bCrdB6rkphMCBAgQIECAwAgUEGAagYOiSQQIEKhXgfnz5pZ0/a5sD500KymlxYsXxvhsFkpxSnvlpH2Z8un2LKhQnHpbIle+qfZTK58p2buouJzuju974KGSy+XlldwcxpPyJXIPPfxov2u//4GH+523PGN7e3vubXFPPLWicCvNMPrzD/xhLJhfOq6FDIM86PI2udvuKpTY5e1xF11QuNfXwfLlpRt6d3Z2xqOPP9nXY4O6f+IJx5c8n8aieLlnyU0nBAgQIECAAIERKCDANAIHRZMIECBQrwJz5xxd0vU777qvcF6+/1L+xgnZRt/5dEfRDJZ0rXxGVD5f+lx67DFRvJlzCpDcdMttxVl6PH5+9ZpYWbR0L2U8vqgdPT44DDdOP/Xkklo2ZjNhnnjycNCn5GbRSUcWRLnp5luLrvT/MD3795/5l3gge5tbPqVNy//sT34/jj1mcf5SxT9POWl5TJ8+rVDuUyufzgVl0vKyFVnAMJ/S8rO0AXx/09QpU+Kk5aUBny9/9X/6+/gR5Tvn7Bd1+T5+45prj6gsDxEgQIAAAQIEqiEgwFQNdXUSIECAQLcCaXPl4lfNF28SXRxIKn64+PXxxTM+0pK6tLF0TynNOLr0kheX3P7Rj2+KFCzpK13/oxtLshx91Oy4+MLzSq5V62RJNtMrvS2tON3wk5uLT7s9vj+bkVW+7K/bjGUX0wyzz37uC3HnXfcW7qRNq//4D3+3y6bshQwVOkgBwosuKHW/9fa74rY7Ds9kSlWlPE1Z3oGkK3/+VSXZU8DqnnsfKLnW35O0xC7NguotpZlnL77g3JIsN950a6zJlokOJK1bv2Eg2eUlQIAAAQIECFRMYGB/26pYtQoiQIAAAQLdC/S0rK2nANOybF+mFEwqT7Nnz4ruXv9enO8Nr7uiZNbIuvUb4+N//5k4eLCtOFvJ8be+8/344Q03lVx741U/H01NTSXXqnly1WtfXVL9TTffFt+57gcl14pP0t5Vn/nsvxdf6vfx5//zy/GTnx6e+ZSCPu//vfdG+Uyqfhc4wIyXlL0ZLgWXbilaKpeKu/jF/Xt7XHHVZ51xehYgOzw7Lt379Gf/NX7cj2BdvpwUlPr0P34u3vNbf9Drdyqf/41XvTbGjhmTP80Fpf70L/4q7igK3hVulh20ZTPwrv76NfGBv/hI2R2nBAgQIECAAIHhEWgenmrUQoAAAQIE+ieQAkzFy5vSUzOyZVBHZQGj7lLalyntz1S8sXTKN3du6XK77p5NM5zSLKYbb7qlcPvubJbKR/7mE/HG1782lmUbL4/P9hFKy+dWPb8mfpItIbv+h11nL73k4gsLz4+Eg5OzpWMp8Fa8jO8/vnR19pa1XfGSiy+ItDl6QxaU27ptezzy6OPx7//x37Fv//4BNz09e931N5Q8N2nixPje9T/K/ZTc6MfJ6aedHFe99jX9yHk4y7HHLIn58+ZECg6m1OV7kC27TBYDTcnn93/3N+IP/+SDsWfP3tzj+/btj3/KZmulWVKvveKVmeO8KN7zKm2Svj6bQfT86rVxYxZ0e27V8wOqNu1V9e53vSM3Iyz/YBqXj3/qH+PlL31JXHD+OZFmqKUlfCm1trZmM5zWx+o1a+Pa712fO05LEyUCBAgQIECAQDUEBJiqoa5OAgQIEOhRoLsZTCccX7rpcvnDaX+m8sBCd+WUP5fO3/rG18XDjzwWW7ZuK9x+7Imn4rG/+lguCJOCUFuzewfbus5qas5mLf1qFhAYSbOX8p341V9+R3zww38bB7IgRD596zvXRfqZMGF8TJwwocsm0ueefUbOsXipYf7Z7j67M9m1e3c88tgT3WXv81oKJB5JuvjFF+Rm73T37JHMXsqXk4JHH/ij34u/+finSzaAfzgLrKWflMZlAc7Zs2bkvj/79x/IP3rEn5dfdkls2bI1vv7N0v2XfnTjTyP9pJT2nUoznTa9sPmI6/EgAQIECBAgQKDSApbIVVpUeQQIECAwKIHuAkPlS5XKK+hu+dy8OXPKs3V7PnPmjPjwBz+QbQjeNX/aX2jDxk3dBpdSYOFP3v97ceaLTuu23GpfTK+9/z9/+Du5AEh5W9JMnPIg0uJFC+K9v/4r5VlHxXlvQaTe7vWncyl4+Tcf/vPczKHu8h84cCA3c6i34NKUKZNLlmJ2V07xtbe+6ar43fe+J5s9V/rWxHye7dt39BhcmjJ5cj6bTwIECBAgQIDAsAqYwTSs3CojQIAAgb4EunvzW3cBpOJyijf6zl/vrpz8vfLPtPzuk3/7l/Hd7/8orv3u9ZFm4fSU0syRi7NX3r/9za+PadOm9pRtRFxP+yB96mMfiX/7wn/Ffdkm3ilgVp7SLKzLLr043vWLb4uWsaNzeVWaZXb8sqVdllamNwV2FzgsN+jrPJX/8Y9+KG678+741rXXdZkt193zaRP58849K84758zcZufFbyzsLn/5tbTs8swzTotvZ3t+/Thbwpk2Cu8tpWWAr3rFy+LCso3Ce3vGPQIECBAgQIBAJQUasr9sdv3bZiVrUBYBAgQIEBhFAultX2nT6yeeWhk7d+6M3dn+O2kmSdr3ZsmihXHKKSeOykDMtmy/pXuzINML2bKqXbv3xJTsjX0pcHLu2WdGmmEj9V9gR7bXUloGmJaopWDkgWxpXAo2zspmw6UZcUcfdVSk/ZQqldJf1Z5fvSaeeHJl7Mi+k6nOFLBKby/M1zV/XuXqq1S7lUOAAAECBAjUl4AAU32Nt94SIECAAAECBAgQIECAAAECBCouYA+mipMqkAABAgQIECBAgAABAgQIECBQXwICTPU13npLgAABAgQIECBAgAABAgQIEKi4gABTxUkVSIAAAQIECBAgQIAAAQIECBCoLwEBpvoab70lQIAAAQIECBAgQIAAAQIECFRcQICp4qQKJECAAAECBAgQIECAAAECBAjUl4AAU32Nt94SIECAAAECBAgQIECAAAECBCouIMBUcVIFEiBAgAABAgQIECBAgAABAgTqS0CAqb7GW28JECBAgAABAgQIECBAgAABAhUXEGCqOKkCCRAgQIAAAQIECBAgQIAAAQL1JSDAVF/jrbcECBAgQIAAAQIECBAgQIAAgYoLCDBVnFSBBAgQIECAAAECBAgQIECAAIH6EhBgqq/x1lsCBAgQIECAAAECBAgQIECAQMUFBJgqTqpAAgQIECBAgAABAgQIECBAgEB9CQgw1dd46y0BAgQIECBAgAABAgQIECBAoOICAkwVJ1UgAQIECBAgQIAAAQIECBAgQKC+BASY6mu89ZYAAQIECBAgQIAAAQIECBAgUHEBAaaKkyqQAAECBAgQIECAAAECBAgQIFBfAgJM9TXeekuAAAECBAgQIECAAAECBAgQqLiAAFPFSRVIgAABAgQIECBAgAABAgQIEKgvAQGm+hpvvSVAgAABAgQIECBAgAABAgQIVFxAgKnipAokQIAAAQIECBAgQIAAAQIECNSXgABTfY233hIgQIAAAQIECBAgQIAAAQIEKi4gwFRxUgUSIECAAAECBAgQIECAAAECBOpLQICpvsZbbwkQIECAAAECBAgQIECAAAECFRcQYKo4qQIJECBAgAABAgQIECBAgAABAvUlIMBUX+OttwQIECBAgAABAgQIECBAgACBigsIMFWcVIEECBAgQIAAAQIECBAgQIAAgfoSEGCqr/HWWwIECBAgQIAAAQIECBAgQIBAxQWaK16iAgkQIECAAIGaFTh06FDs3LUrtm3fFrt3745de3bHnj17oq2tLdra26O9rT3X9+YxzTGmOfsZMyYmTZoUUyZNjsmTJ8eM6TNi6pQp0dDQULNGOkaAAAECBAgQqEeBhuwviofqseP6TIAAAQIECPRP4GDbwVizdm1s3LQpNm1+IVpbW/v3YA+5Wlpa4ujZR8Wco4+OhQsWxNgxY3vI6TIBAgQIECBAgMBoERBgGi0jpZ0ECBAgQGAYBdJ/f9qwcWM89/yqWLt+XXR2dg5J7Y2NjbFg3vw4ZvGSmDtnjplNQ6KsUAIECBAgQIDA0AsIMA29sRoIECBAgMCoEUiBpdVr18RjTzyRLYXbOaztnjplapx84omxaMFCgaZhlVcZAQIECBAgQGDwAgJMgzdUAgECBAgQqAmBNGPpvgcfiN3Zvkq9pbS30lGzZ8e0qVNicra/0tQpkyMte2tuSnsu/e/2jm3ZXkztHe255XQ7d+3OytwTO3buihc2b87t1dRb+ZOz/ZrOetEZuRlNveVzjwABAgQIECBAYOQICDCNnLHQEgIECBAgUBWBffv2xX0PPRBr163rsf5JEyfGkkULY162jG3G9GlHPMMozZDatn1HrM+CWatWr4k9e/f2WOfC+QvizNNfFBMmTOgxjxsECBAgQIAAAQIjQ0CAaWSMg1YQIECAAIGqCKTNu++6955sVlFbl/rTm94WLZgfy5YeG7NnzuxyvxIXNm/dGiufeTZblrcuunvvyJjmMXHe2efkNgOvRH3KIECAAAECBAgQGBoBAaahcVUqAQIECBAY0QIdHR3xwMMPZcGdp7u0MwWWjlm8KE5efkKkmUvDkdJMpseefCrbVHx1t4GmZUuPizNOOz2ampqGoznqIECAAAECBAgQGKCAANMAwWQnQIAAAQKjXeDgwYNx8+23xuYtW7p0ZeaM6XH2GS+KGdOmdbk3HBe27dgR9z7wYGzdtr1LdbNnzYpLLrwoxo4d2+WeCwQIECBAgAABAtUVEGCqrr/aCRAgQIDAsArs378/fnLLzV3eENfY2JjNEDo1js+Ww42EtCJbNvfAw49EZ2dnSXPSm+Yuu/iSGD9+fMl1JwQIECBAgAABAtUVEGCqrr/aCRAgQIDAsAmk4NKPfnJj7N1XurF2WgZ30fnnxvQqzVrqCWB7Npvp1rvujj17Sts7ccLEePllLxVk6gnOdQIECBAgQIBAFQQEmKqArkoCBAgQIDDcAmlZ3A03/aTLzKWj0rKzF18QY5qbh7tJ/aqvrb09br7tjnihbDlfmsl0+aWXWS7XL0WZCBAgQIAAAQJDL9A49FWogQABAgQIEKimQNrQO+25tHPXzpJmLJg3Ny696MIRG1xKjU2Br9TG1NbilPqS+pT6JhEgQIAAAQIECFRfQICp+mOgBQQIECBAYEgF0tviyjf0TgGbi84/b1S8lS29OS61tTzIlPqU+iYRIECAAAECBAhUX0CAqfpjoAUECBAgQGDIBNasXRsrn3m6pPy0LO7Cc8+JhoaGkusj+SS1NbU5tb04pb6lPkoECBAgQIAAAQLVFRBgqq6/2gkQIECAwJAJ7Nu3L+66956S8idNmpjbcynNChptKbX5kgvPj7QpeXFKfUx9lQgQIECAAAECBKonIMBUPXs1EyBAgACBIRW476EHoq29rVBHY2NjXHTeuSN6z6VCY3s4GDNmTO6Nd6kv+ZT6mPoqESBAgAABAgQIVE/g8N/OqtcGNRMgQIAAAQIVFtiwcWOsXbeupNQzTjs1pk+bVnJtNJ6kPqS+FKfU19RniQABAgQIECBAoDoCAkzVcVcrAQIECBAYMoFDhw7FfQ+WzuiZOWN6HL/02CGrc7gLTn1JfSpOqc+p7xIBAgQIECBAgMDwCwgwDb+5GgkQIECAwJAKrF67Jnbv2V2oI22QffYZLyqc18pB6lPxRuWpz6nvEgECBAgQIECAwPALCDANv7kaCRAgQIDAkAmkGTyPPfFESfnHLF4UM2pgaVxJp7KT1KfUt+KU+m4WU7GIYwIECBAgQIDA8AgIMA2Ps1oIECBAgMCwCKR9iHbu2lmoK83wOXn5CYXzWjtIfSuexZT6bi+mWhtl/SFAgAABAgRGg4AA02gYJW0kQIAAAQL9FHju+VUlORctmB+TJk4suVZLJ6lvqY/Fqdyg+J5jAgQIECBAgACBoREQYBoaV6USIECAAIFhFzjYdjDWri99c9yyGtrYuyfQ8j4mg2QhESBAgAABAgQIDJ+AANPwWauJAAECBAgMqcCatWujs7OzUEea3TN75szCea0epD4Wz9JKBslCIkCAAAECBAgQGD4BAabhs1YTAQIECBAYUoGNmzaVlL9k0cKS81o+Ke9ruUUt913fCBAgQIAAAQIjQUCAaSSMgjYQIECAAIFBCqQ3p23a/EJJKfPmzCk5r+WT8r4mC2+Tq+UR1zcCBAgQIEBgpAkIMI20EdEeAgQIECBwBAI7d+2K1tbWwpNjmptjxvRphfNaP0h9TX3Op2SRTCQCBAgQIECAAIHhERBgGh5ntRAgQIAAgSEV2LZ9W0n5R82eHQ0NDSXXavkk9TX1uTiVmxTfc0yAAAECBAgQIFBZAQGmynoqjQABAgQIVEVg9+7dJfVOmzql5LweTsr7XG5SDwb6SIAAAQIECBColoAAU7Xk1UuAAAECBCoosGtPaYBpyuTJFSx9dBQ1edKkkoaWm5TcdEKAAAECBAgQIFBRAQGminIqjAABAgQIVEdgz549JRVPmVwabCm5WaMnU6eUBtXKTWq027pFgAABAgQIEBgRAgJMI2IYNIIAAQIECAxOoK2traSAlpaWkvN6OCnvc7lJPRjoIwECBAgQIECgWgICTNWSVy8BAgQIEKigQFt7e0lpzU2H36hWcqOGT8r7XG5Sw13XNQIECBAgQIBA1QUEmKo+BBpAgAABAgQGL9DeVhpgGjOm/gJM5X0uNxm8shIIECBAgAABAgR6EhBg6knGdQIECBAgMIoEOg91lrS2sbH+/hVf3udykxIgJwQIECBAgAABAhUVqL+/fVaUT2EECBAgQIAAAQIECBAgQIAAAQICTL4DBAgQIECAAAECBAgQIECAAAECgxIQYBoUn4cJECBAgAABAgQIECBAgAABAgQEmHwHCBAgQIAAAQIECBAgQIAAAQIEBiUgwDQoPg8TIECAAAECBAgQIECAAAECBAgIMPkOECBAgAABAgQIECBAgAABAgQIDEpAgGlQfB4mQIAAAQIECBAgQIAAAQIECBAQYPIdIECAAAECBAgQIECAAAECBAgQGJSAANOg+DxMgAABAgQIECBAgAABAgQIECAgwOQ7QIAAAQIECBAgQIAAAQIECBAgMCgBAaZB8XmYAAECBAgQIECAAAECBAgQIEBAgMl3gAABAgQIECBAgAABAgQIECBAYFACAkyD4vMwAQIECBAgQIAAAQIECBAgQICAAJPvAAECBAgQIECAAAECBAgQIECAwKAEmgf1tIcJECBAgAABAgQIECAwRAIdnR3xxJNPxdSpU4aohtFfbEdHR+zZs5dRH0PJqQ+gn93m1LdTMkppyaLFfWeusxwCTHU24LpLgAABAgQIECBAYLQIdHR0xjOrno3jj1s2Wpo87O3ct29frF2zJpbvHz/sdY+mCjeNb47nDu2NDTN9l3obt1md+2LKjnUxeT6nnpw6DuyLxtbdAkzdAAkwdYPiEgECBAgQIECAAAEC1RdobGiIlrEtsezYpdVvzAhtwbbt22Lz+o1xwo/WjdAWjoxmbTljYayZMzb+fO+xI6NBI7QV75y4M17esCW2jhNg6mmImg5ti6Pan+3pdl1ftwdTXQ+/zhMgQIAAAQIECBAgQIAAAQIEBi8gwDR4QyUQIECAAAECBAgQIECAAAECBOpaQICprodf5wkQIECAAAECBAgQIECAAAECgxcQYBq8oRIIECBAgAABAgQIECBAgAABAnUtIMBU18Ov8wQIECBAgAABAgQIECBAgACBwQsIMA3eUAkECBAgQIAAAQIECBAgQIAAgboWEGCq6+HXeQIECBAgQIAAAQIECBAgQIDA4AUEmAZvqAQCBAgQIECAAAECBAgQIECAQF0LCDDV9fDrPAECBAgQIECAAAECBAgQIEBg8AICTIM3VAIBAgQIECBAgAABAgQIECBAoK4FBJjqevh1ngABAgQIECBAgAABAgQIECAweAEBpsEbKoEAAQIECBAgQIAAAQIECBAgUNcCAkx1Pfw6T4AAAQIECBAgQIAAAQIECBAYvEDz4ItQAoGhEdi8f120dR4sFD69ZXaMb55UOB/Og9aO/dHe2VaosqVpfDQ3jimcOyBAgAABAgQIECBAgAABAvUsIMBUz6M/wvt+9YqPx8Z9qwqtfNNxvxcvmn1p4Xw4D775zGfi0a23F6p8/dLfibOOemnh3AEBAgQIECBAgAABAgQIEKhnAUvk6nn09Z0AAQIECBAgQIAAAQIECBAgUAEBAaYKICqCAAECBAgQIECAAAECBAgQIFDPAgJM9Tz6+k6AAAECBAgQIECAAAECBAgQqICAAFMFEBVBgAABAgQIECBAgAABAgQIEKhnAZt81/Po63u/Bd6y7A/izdlPPjWG2GzewicBAgQIECBAgAABAgQIEBBg8h0g0A+BxoamfuSShQABAgQIECBAgAABAgQI1KeAAFN9jrteEyBAgACBigrs2r0ntu/YGQcO7M+VO27c+Jg+bWpMmTypovUojAABAgQIECBAYGQKCDCNzHHRqjoXaO3YHztbt8SkMdNiwpjJFdXoPNQRe9p2xv723TFxzNSY2Dw1GhoaKlqHwggQqA+Bjo6OWPHMs/H0c8/Fnj17u+30pEkT47hjjonjlx4bTU1mg3aL5CIBAgQIECBAoAYEBJhqYBB1oVRg7Z6VcfemH5RcXDr1tDh91iUl17o7uWHNVyIFYPpKJ8+4IOZPWtpXti73H916R6zYcV/h+vLp58RJM87LnW89sCHu2nh9PLTlp7kAUD7T1LEzY+7EY+PCuVdE6seRppU7Hoh7Nv0wntp+X7QfaisUM6axJU6deWFckJU/L6snpVvWfysLQO0p5El1p2CXRIAAgbzAC5u3xO333BP79x/IX+r2MwWeHnzk0Xjq6afjwnPOiaNmz+o2n4sECBAgQIAAAQKjW0CAaXSPn9aXCaza9Xj855MfiTQDKJ9OmHZWFsQ5P3/a6+fN674ZHYfae82Tbs4cN++IAkxr96yI+174caH8KVnwKAWYHtx8U1zzzD+VBH7ymXYe3BrpJwWGXrHoHXHJ/Kvyt/r1mSyuffZf4sEscNVdautsjfs3/yS7f3NcseRX47w5r4w7N14XO1o3F7Kn4JwAU4HDAYG6F3h21fNx9/0PxKFDh/ptkQJRN95ya5x75hlx7JLF/X5ORgIEhk/g2VXPxaIFC6O52a8Iw6euJgIECNSOgH971M5Y1n1Pntn5cHzpyb+OFDDJpzTTKL0Brqlx5H7V06yibz372XyTe/w8FJ3xg9X/mQtCvXTBm3vMV3yjvbMt/uvJj8azux4pvtztcZq5de1z/5IFkqZ2e99FAgQIJIENGzfF3fdlwaXsfwNNKSCVnh0/blzMnXP0QB+XnwCBIRZ45PHH4sGHH44TTzghli09TqBpiL0VT4AAgVoTGLm/ddeatP4MqUCa3fPlp/62ZAbQGbMvjdcv/e0YyBvgzph9WbdL5FKApnhGT6U6k5bF3br+27nixjSOzZbAnR7zJy6NGeOOjs3718eq3Y/Hql2PlVR3+4bvxEVzr4yxTS0l17s7SQGp8uDShObJsWzambF48gkxreWoSEsKn931aKGeFOw6kl8cu6vfNQIEakvgQOuBuO3uewb1Z0T68yWVccXPXR7jWsbVFpDeEKgBgdaDrdmy1ofjiaeeEmiqgfHUBQIECAyngADTcGqra0gEHtt6Z3x15SdKlrade/TPxWuP+fUBb1591dL3dtvGr6z42JAEmB7eckuuvulZoOcdJ/xpzJnYddlICkBd//wXC7/Qpb2R7t98Y5w/51XdtjV/cU/bjmwvqh/mT3OfaWnfu076YKT68umE6WfFy7KTm9Z+I3605r9jX7b5t0SAAIHuBB5/ckW0tR3ew627PP25lspIZZ15+pHvK9efeuQhQODIBQSajtzOkwQIEKhXgcZ67bh+14bAQ9m+QVdnwZ/ifZNePPe1ceWxvzHg4FK1RCY0T4n3nvqxboNLqU0XzbsyTpxxbknzivdxKrlRdHLb+u9Ee+fBwpXpLUfHr53yVyXBpcLN7ODSBW+Mly54S/ElxwQIECgItLe3x8pnnyucD/YglZXKlAgQGNkC+UDTtdd9L5vV9KR/bkf2cGkdAQIEqiogwFRVfpUPRuDeF26Ir6/8VLYzUWehmMvmvyleveRdhfPRcJDe0DZhzJRem3rhnCtK7m9v3VRyXn6yv31v3LXp+yWX+/MmuBTMSm+VkwgQIFAusHHT5ujsPPznbfn9gZ6nslKZEgECo0OgOND0uEDT6Bg0rSRAgMAwC1giN8zgqquMwJ0bvx/fee5zJYW9YtE74yXzX19ybaSfpH2X+lrqlvqwMNsvqTilZXJtHa0xpod9mB7denvJm/Sas3rS/lJ9pZam8XHKzAvjgeytciMxfeUbXxuJzdImAiNS4Cv/c82IbFdxo265887i0yE59ufGkLAqtI4FUqDpoWyPpiezPZqWZ5uBH28z8Dr+Nug6AQIESgXMYCr1cDYKBNKeROXBpSuW/OqoCy4l6lnj5sf45kl9qjc3jom0OXdx2tW2rfi05HjbgY0l58unn53VM7HkWk8np826uKdbrhMgUMcC6Q1wlU5DUWal26g8AgS6F8gHmtLSOTOaujdylQABAvUmIMBUbyM+yvt749qvxfef/4+SXrx84S/EBXNfU3JttJxMa5nd76ZOHjujJO+ugz0HmLa3vlCSd0bLnJLz3k5mFG0A3ls+9wgQqC+BhmioeIeHosyKN1KBBAj0KpB78+wQBKB7rdRNAgQIEBiRApbIjchh0ajuBG5df21s2Nd1g9nHt90ZF897XTQ1jr6v89SWWd11tdtrzQ1jSq53dPb8JqcdZQGmyWOnlzzb28mkMdN6u+0eAQL1KlD5+FIMQcyqXkdHvwkMu8DYsWPjxOOzJXLHLYvm5tH3d7BhB1MhAQIE6kDAvw3qYJBrpYvdBZdS39btfSZ+sPpLo25z79T2/iyPS/kGmna0lm6cO3kAQaNx2VK6FMxqP9RzAGug7alU/re98c2VKko5BGpOoHyvobe94aqK9vHJFSvjgUcerWiZZ5x6Siw/fllFyyzfe8qfGxXlVViNC3z7uu/Gvn37eu2lwFKvPG4SIECgrgUskavr4R+dnR/XNKHLhtW3bbg2Vmy/f3R2qMKtTnua7GnbUVJqX2+pK8mcnQxV4Ku8HucECIwegXlz+7/Utr+9Gooy+1u3fAQIDEygZWxLnH7qaXHlq6+Ik5afaNbSwPjkJkCAQF0ImMFUF8NcO52c2Dw13nXSB+PoCYtj+4FNsWr344XOfePpT8fvnP73MZDlYIWHa+igoaEh0jK33W3bC73a17arcNyfg/SWOokAAQLFAlMmT44Z06fFtu2lAeziPAM5TmWlMiUCBEa2QAoseVvcyB4jrSNAgMBIETCDaaSMhHb0KTB17Mz4tVP+KuZOPCYaGxrjTcveF2k2Uz7tbd8ZX3/6U+GtRBHTyjbqLg425b16+jzQvndELo/rqb2uEyAwfAKnnXRSxSqrZFkVa5SCCBAoCORnLL321a+Jk05YbsZSQcYBAQIECPQkIMDUk4zrI07g8uxtcbPGzy+0K72B7XXH/mbhPB08s/PhuHn9N0uu1eNJ+dvpdh88PJupL4/dZcvr+srvPgEC9SMwd87RsWTRwkF3OJWRypIIEBh5AgJLI29MtIgAAQKjRcASudEyUtqZzVrq+gqjU2ddFCt2PBD3b76xIHTD6i/HMVNOiUWTTyhcq7eD6WUzmLYe2NhvgrT0UCJAgEBPAueeeUbs3rMntm7rf+C6uKyZM6ZHKkMiQGDkCZx60smxaMFCs5VG3tBoEQECBEaFgBlMo2KYNLI3gSuO+dWYMe7w5rOd0RlfW/nJSEu96jXNHDe3pOtP7bgv+ruv0sNbby151gkBAgSKBZqamuKlF18UC+aV/jlTnKen4wXz5+aeTWVIBAiMPIFjlxwjuDTyhkWLCBAgMGoEBJhGzVBpaE8CLU3j4y3L/iCb4XT4F5btrS/Et579bE+P1Pz1U2ZeGC1F+1O1dx6MBzb/pM9+t3bsj0e33t5nPhkIEKhvgebm5rjo/PNyM5EmjB/XJ0bKk2YtXXTeeX557VNLBgIECBAgQIDA6BSwRG50jptWlwksmLQsLl/4tvjh6v8q3Hlk622xdNPpcc7RLy9cq5eDcc0T4oI5r46b1n2j0OXbN3wvTp91SUwcM7VwrfzgtvXXRltna/ll5wQIEOgikN5YufSYJbk9mdZv3BTrN26M7Tt2xIEDB3J5x40bF9OnTYt5c+ZkP0eHWUtdCF0gQIAAAQIECNSUgABTTQ1nfXfm4nlXxcpsP6bndj1WgPjeqn+LxZOXx1ETBr8pbaHQUXJw4dyfj9s2pIDRwVyLt7duis89+qfxrpM+lL1lblaXXty6/tvx47VXd7nuAgECBHoTSIGjhfPn5X56y+ceAQIECBAgQIBAbQsIMNX2+NZV7xobGuNNx70vPv3Q++JAx//uv5SCK1ev/ET85ql/F2Max/bokd6ydrDjf/+re3eZyu/tyd60tnX/hu6y5q6lWUJpFlE108QxU+K8o18Vt274dqEZWw6sj88+8v5YPv2cWJhtgj6jZU6s3bMyC8o9mm2Wfn8u3+zxC3L7V+1uO7INfAuVOSBAgAABAgQIECBAgACBuhEQYKqboa6Pjk7NZuZctfS34isr/q7Q4U37no/vr/pCvPbYXy9cKz/49rP/HE9sv7v8co/nP1z9pWw53pd6vP+6Y38zW5r3ih7vD9eNVyx6R2zavzo3sytf5562nXHvCzfkfvLX8p8N0RCp7V9/+lP5Sz4JECBAgAABAgQIECBAgECfAjb57pNIhtEmcMrMC+Lsoy4vafZdm66Px7fdVXKtHk6aGpvj7cf/cZx11Mv67G6a4fX2E/44lkw5qUvepgax6C4oLhAgQIAAAQIECBAgQIBAQcBvjQUKB7Uk8Jol786WfT0eW7MlYfn0zWc+E/MmLu12/6F8nlr8HNvUEq9f+ttxxqxL4+5NP4wnt98TBzsPLwec0Dw5t/n3+dmm4LPGz8sRpLfJFadpLbOLTx0TIECAAAECBAgQIECAAIESAQGmEg4nI0ngd07/+yNuztimcfEHZ/xjv59/x/IP9DvvYDK+cvEvRfo5kvTe0z52JI8Vnjlm6imRflLa17Yr9rXvicljp0dL0/hCnnRwsKM19mf38mnK2JnR3Dgmf+qTAAECBAgQIECAAAECBAh0ERBg6kLiAoHaF5iQbQCefrpLuw5uKbk8o+XoknMnBAgQIECAAAECBAgQIECgXMAeTOUizgnUucD6vc+VCMwYN6fk3AkBAgQIECBAgAABAgQIECgXEGAqF3FOoM4Fbll/TYnAkiknl5w7IUCAAAECBAgQIECAAAEC5QICTOUizgnUkMC2Axuzzc4fi0OHDvWrVw9vuSXW7322kPeo8YvijNkvKZw7IECAAAECBAgQIECAAAEC3QnYg6k7FdcI1IjA1gMb4j+e+MtIy9zOmv2yLFh0aUxtmdWld22dB+MHz38p7tj43ZJ7r8o2JG9saCq55oQAAQIECBAgQIAAAQIECJQLCDCVizgnUIMCaSbTj9b8d+5nQvOUmDV+XswaNy8OdrbG5v1rY+v+9dF+qK2k58dPOzOOn35myTUnBAgQIECAAAECBAgQIECgOwEBpu5UXCNQwwL72nfF6t3p58lue9kYjfGS+W+ISxe8qdv7LhIgQIAAAQIECBAgQIAAgXIBAaZyEdt7CxsAAEAASURBVOcEakhg5ri5cdrMi7N9mB6N3W3b++zZ3AlL4vXH/U7Mm3hsn3llIECAAAECBAgQIECAAAECeQEBpryETwI1KJD2XnrL8X+Q69mWbBnc2j0rYtfB7bGnbUekmUzjmibFUeMXxOwJ2c/4+TFpzLQaVNAlAgQIECBAgAABAgQIEBhqAQGmoRZWPoERIpDbdynbe0kiQIAAAQIECBAgQIAAAQKVFmisdIHKI0CAAAECBAgQIECAAAECBAgQqC8BAab6Gm+9JUCAAAECBAgQIECAAAECBAhUXECAqeKkCiRAgAABAgQIECBAgAABAgQI1JeAAFN9jbfeEiBAgAABAgQIECBAgAABAgQqLiDAVHFSBRIgQIAAAQIECBAgQIAAAQIE6ktAgKm+xltvCRAgQIAAAQIECBAgQIAAAQIVF2iueIkKJECAAAECBAgQIECAQAUEOjs7Y9++ffHgww9VoLTaLOJA64HYd/BA3Ld8TG12sEK92j55X8zs7Ih3Hni4QiXWZjEndhyMxrZ9MX7jg7XZwQr0qqH9QHREawVKqr0iBJhqb0z1iAABAgQIECBAgEBNCIwdOzbOO/ucmDhxQk30Zyg60dHRGcfMXxQTTzw0FMXXTJlzmhpiakNjnDned6m3QR17qDPGtS+OcZx6ZOrM/pkb2+Sft+6ABJi6U3GNAAECBAgQIECAAIERITB/3rwR0Y4R3YgZM0Z080ZC46ZljZg/EhqiDQRqWMAeTDU8uLpGgAABAgQIECBAgAABAgQIEBgOAQGm4VBWBwECBAgQIECAAAECBAgQIECghgUEmGp4cHWNAAECBAgQIECAAAECBAgQIDAcAgJMw6GsDgIECBAgQIAAAQIECBAgQIBADQsIMNXw4OoaAQIECBAgQIAAAQIECBAgQGA4BASYhkNZHQQIECBAgAABAgQIECBAgACBGhYQYKrhwdU1AgQIECBAgAABAgQIECBAgMBwCAgwDYeyOggQIECAAAECBAgQIECAAAECNSwgwFTDg6trBAgQIECAAAECBAgQIECAAIHhEBBgGg5ldRAgQIAAAQIECBAgQIAAAQIEalhAgKmGB1fXCBAgQIAAAQIECBAgQIAAAQLDISDANBzK6iBAgAABAgQIECBAgAABAgQI1LCAAFMND66uESBAgAABAgQIECBAgAABAgSGQ0CAaTiU1UGAAAECBAgQIECAAAECBAgQqGEBAaYaHlxdI0CAAAECBAgQIECAAAECBAgMh4AA03Aoq4MAAQIECBAgQIAAAQIECBAgUMMCAkw1PLi6RoAAAQIECBAgQIAAAQIECBAYDgEBpuFQVgcBAgQIECBAgAABAgQIECBAoIYFBJhqeHB1jQABAgQIECBAgAABAgQIECAwHAICTMOhrA4CBAgQIECAAAECBAgQIECAQA0LCDDV8ODqGgECBAgQIECAAAECBAgQIEBgOAQEmIZDWR0ECBAgQIAAAQIECBAgQIAAgRoWEGCq4cHVNQIECBAgQIAAAQIECBAgQIDAcAgIMA2HsjoIECBAgAABAgQIECBAgAABAjUsIMBUw4OrawQIECBAgAABAgQIECBAgACB4RAQYBoOZXUQIECAAAECBAgQIECAAAECBGpYQICphgdX1wgQIECAAAECBAgQIECAAAECwyEgwDQcyuogQIAAAQIECBAgQIAAAQIECNSwgABTDQ+urhEgQIAAAQIECBAgQIAAAQIEhkNAgGk4lNVBgAABAgQIECBAgAABAgQIEKhhAQGmGh5cXSNAgAABAgQIECBAgAABAgQIDIeAANNwKKuDAAECBAgQIECAAAECBAgQIFDDAgJMNTy4ukaAAAECBAgQIECAAAECBAgQGA4BAabhUFYHAQIECBAgQIAAAQIECBAgQKCGBQSYanhwdY0AAQIECBAgQIAAAQIECBAgMBwCAkzDoawOAgQIECBAgAABAgQIECBAgEANCwgw1fDg6hoBAgQIECBAgAABAgQIECBAYDgEBJiGQ1kdBAgQIECAAAECBAgQIECAAIEaFhBgquHB1TUCBAgQIECAAAECBAgQIECAwHAICDANh7I6CBAgQIAAAQIECBAgQIAAAQI1LCDAVMODq2sECBAgQIAAAQIECBAgQIAAgeEQaB6OStRBgAABAgQIECBAgACBgQq0trbGNd+9Npqamgb6aF3l7+joYNSPEefUD6QsC6e+nSZOmBCvfsUr+85YZzkEmOpswHWXAAECBAgQIECAwGgRaGpoiOkt4+LyJcePliYPezu37d8f921eF5e97PJhr3s0VXj3hh1xy/0PxV9PuGQ0NXvY2/rmiTvjpdsfjp3HXjrsdY+WCpv2b48xe54bLc0d1nYKMA0rt8oIECBAgAABAgQIEOi/QEMua1P872f/n6ufnGnPk4YsEGeWV+9jfqixMTqzLHvDbLjepNozn0NZhk5OPTI1RPZPXfbPnNRVwB5MXU1cIUCAAAECBAgQIECAAAECBAgQGICAANMAsGQlQIAAAQIECBAgQIAAAQIECBDoKiDA1NXEFQIECBAgQIAAAQIECBAgQIAAgQEICDANAEtWAgQIECBAgAABAgQIECBAgACBrgICTF1NXCFAgAABAgQIECBAgAABAgQIEBiAgADTALBkJUCAAAECBAgQIECAAAECBAgQ6CogwNTVxBUCBAgQIECAAAECBAgQIECAAIEBCAgwDQBLVgIECBAgQIAAAQIECBAgQIAAga4CAkxdTVwhQIAAAQIECBAgQIAAAQIECBAYgIAA0wCwZCVAgAABAgQIECBAgAABAgQIEOgqIMDU1cQVAgQIECBAgAABAgQIECBAgACBAQgIMA0AS1YCBAgQIECAAAECBAgQIECAAIGuAgJMXU1cIUCAAAECBAgQIECAAAECBAgQGICAANMAsGQlQIAAAQIECBAgQIAAAQIECBDoKiDA1NXEFQIECBAgQIAAAQIECBAgQIAAgQEICDANAEtWAgQIECBAgAABAgQIECBAgACBrgICTF1NXCFAgAABAgQIECBAgAABAgQIEBiAgADTALBkJUCAAAECBAgQIECAAAECBAgQ6CogwNTVxBUCBAgQIECAAAECBAgQIECAAIEBCAgwDQBLVgIECBAgQIAAAQIECBAgQIAAga4CAkxdTVwhQIAAAQIECBAgQIAAAQIECBAYgEDzAPLKSoAAAQIECBDoVmD//v2xfefOOHCgNXd/3LiWmD51aowfP77b/C4SIECAAAECBAjUloAAU22Np94cgUBHZ3vsOrgtpo87qsenDx06FNtaN8a0sbOjqXHw/9i0dR7M6twa7Z1tMbF5SowfMzmaGpp6rL9SN1o79sfO1i0xacy0mJDVKREgQGAwAp2dnfHsqufj6edWxfYdO7otavq0aXHcMUvi2CWLo7HRxOlukVwkQIAAAQIECNSAwOB/U64BBF2ofYH7X/hJrNr9WKGjVyx5T2w7sCFuWPOVeHbXI5ECL1PHzoxl086IS+e/qRBsWr37ybhj43Xx9I6HYl/7rmhuHBsLJh4Xr1j8zlg8eXmhvN4OUjDpmZ0Px1Pb743Vu5+KnVlgaX/77i6PzJmwJI7P6j911sUxb+IxXe73duHRrXfEih33FbIsn35OnDTjvNz51qyfd228Ph7a8tPY07azkCf1d+7EY+PCuVfE0qmnFa47IECAQH8Etm7bHrffc0/s2bO31+wp8HTPAw/GEytXxoXnnBMzZ0zvNb+bBAgQIECAAAECo1NAgGl0jptWD1Dg+d1PxH0v/Ljw1PlzXh1ffOLDWcDl8H9xT4Gfe1+4ITbsfS5+7ZSPZsGgJ3N52g+1FZ5rz4JFq3Y/nl3/y/iVk/4yFkw6rnCv/CDNivr2s/+cBZceihRk6itt3Lcq0s/N66+JM2e/NF65+Bdj4pipfT2Wu792z4qS/k3JgkcpwPTg5pvimmf+KYr7kC8w9Tf9PLX9vnjFonfEJfOvyt/ySYAAgV4Fnl+zLu66977o6OzoNV/xzRSI+vFPb4nzzj4rFi+cX3zLMQECI0Tg2VXPxaIFC6O52a8II2RINIMAAQKjSsBc9VE1XBpbKYEvPfnXJcGl4nLX7X0mvrfq8/HNZ/6x28BMyptmPF294uORls71lNJsoSe339Ov4FJ5GfdvvjE+9+ifxZ6DhwNg5Xn6Or9n0w/j60//Q499yD9/KDrjB6v/M25c+7X8JZ8ECBDoUeCFzVvijmzm0kCCS/nC0jPp2VSGRIDAyBN45PHH4trrvhdPPPVktLe3j7wGahEBAgQIjGgB/3liRA+Pxg2VQNr/6JSZF+ZmCnUcao8UjFmx4/5CdXdvuj53PL55cly+8K2Rlq+lmU0/WP2lLGD0vxvYbm/dlJvltHjKiYXnejsY2zgum/G0LGaNnxczx82NGePmxJhsyd2ObE+kHa2b4+Ett+T2ecqXseXAuviPJz4Uv3XaJ6OhoSF/uV+faVncreu/ncub6lg69fSYP3FpVufRsXn/+twsrFW7Di8ZTBlv3/CduGjulTG2qaVfdchEgED9CRw8eDBuvfPuXoPrfamkwHwq44qfuzzGjh3bV3b3CRAYZoHWg63x4CMPZ0Gmp+LEE06IZUuPM6NpmMdAdQQIEBitAgJMo3XktHtQAouy/ZPesuwPovFnG2sfP+3M+MeH3x8v7F9dUu47T/hA5ANIS6aclAVfxmczm/5fIc/qPU8V7hculh3MHr8g0pK8M2ZfGi3Z8z2lly58Sxbo+kF857l/LWTZkC2Ze2rHvZH2VBpISsGqlKa3HBXvOOFPY87ExV0eTwGo65//YhzK/pfS/vY9kWZOnT/nVV3yukCAAIEk8PiKlZF++RxsSmWksl50ysmDLcrzBAgMkYBA0xDBKpYAAQI1LGCJXA0Prq71LHDazIsKwaWUq7lxTBbEObvkgRktc7oEj/IbZ+czbty7Kn/Y5XNy9qa2XznpQ/G+F/2/XNCmt+BSeji9RS4Fos456hUlZd2+4bsl5/09mZC9ne69p36s2+BSKuOieVfGiTPOLSmueJ+qkhtOCBCoe4H2jvZY8fQzFXNIZaUyJQIERrZAPtBk6dzIHietI0CAwEgQEGAaCaOgDcMukGYVlaeZ4+eWXDpqQtc845snxvjmSYV8aSPvntLksdOP6O1sL1/09pIi0xvo0p5PA03p7XATxkzp9bEL51xRcj8t+5MIECDQncCmTZujo6P/m3p3V0bxtVRWKlMiQGB0CBQHmh63R9PoGDStJECAwDALWCI3zOCqGxkCk7LZReVpYnPpG9u6y5OeSTOD0nKylIrfQpe7UIH/S2+OS23Z276zUNq2Axtj7sRjCud9HaR9l/qz1G3h5BNKikr9autojTEjdB+mr3zDRuQlA+aEQC8CX/mfa3q5ewS30ksNBrgfXF+13Hz7HRUvs7xOf26UizgnMDiBFGh6KNuj6clsj6bl2R5Nx9ujaXCgniZAgEANCZjBVEODqSv9FxiXzUQqT+PK9kcqnqlUnLc435HMLCouq6fjtBF4cdrdNrC3yc0aN79kplVxWcXHaWnghGwj8+K0q63nWVnF+RwTIFBnAhUOLuX0hqLMOhsW3SVQLYF8oCktnTOjqVqjoF4CBAiMLAEzmEbWeGjNMAk0N3b96jeVXWtqGNNtaxobDj/bcait2zw9XTyYzQ7aeXBL7MzeHJf7zN5m19ZxoEv28qV37Z0Hu+Tp7cK0ltm93S65N3nsjNjXvrtwLdWd3nInESBAoFggvRCgIftfJdNQlFnJ9imLAIG+BXIvC0kzHCUCBAgQqHuBw78p1z0FgHoSaIymLt3Nv1Euf6OxofsJfsXX2zv7t0Htmt0r4qfr/iee2H53vvgBfbZ3DiyQNbVlVr/Lby4LpHUMsK5+VyQjAQKjWyD9/ljZ+FLkXmJZ6TJHt7LWExg1AmPHjo0Tj8+WyB23LJqb/UoxagZOQwkQIDCEAv5tMIS4ih65Ag3dBI8aojSg1F2e1KPi67n/atdLNzftWx3fee5z8dyux3rJ1fetQ4c6+85UlKOn5X1FWUbl4dve+OZR2W6NJjAcAuV7Db3tDVdVtNonnloRDz46uD/Lyht0xqmnxIknHF9+eVDn5XtP+XNjUJwerjOBb1/33di3b1+vvRZY6pXHTQIECNS1gABTXQ+/zg+lwL623fGfT34kdrR2fUvS5DHTY0q2NC29aa67YFB6c1z5MrmhbKuyCRAg0JfA3DlzKh5gSmVKBAiMDoGWsS029R4dQ6WVBAgQqJqAAFPV6FVcywKd2Yyjr678ZElwKS1FO+Ooy+L8o18dcyYu7rX7X3j8QwJMvQq5SYDAcAtMmzol0s+OnbsqUnW+vIoUphACBIZMQGBpyGgVTIAAgZoTEGCquSHVoZEg8OjW2+PpnQ8WmpI2xn3r8e+PE2ecW7jW20F3s556y+8eAQIEhkPg1JNOilvuuLMiVaWyJAIERq6AwNLIHRstI0CAwEgVEGAaqSOjXaNaYN2ep0vaf9G8K/sdXDqUvYlle+umkuedECBAYCQILJg3N9LP2vUbBtWcfDmDKsTDBAgMiYDA0pCwKpQAAQJ1ISDAVBfDrJPDLbB+77MlVS6delrJeW8nuw5ujY5D/Xs7XW/luEeAAIGhEDj/nLPihptuPuKlcmlpXCpDIkBg5AmcetLJsWjBQm+FG3lDo0UECBAYFQKlr80aFU3WSAIjX2DDvlUljTx6Qu97LhVnfnrnQ8WnjgkQIDCiBMY0j4mXXXJxzDn6qAG3Kz2Tnk1lSAQIjDyBY5ccI7g08oZFiwgQIDBqBASYRs1QaehoEmhpHFfS3DQrqT+po7M9blz71f5klYcAAQJVE0ivKX/Jiy+IM047NdJymr5SypPypmfSsxIBAgQIECBAgEDtCVgiV3tjqkcjQCDNWNpxcHOhJat3PxULJi0rnPd0cM8LPyx581xP+VwnQIBAtQUaGxpj+bLjYukxi2PN2vWxfuPG2L5zZxzY35pr2rjxLTF92tSYd/ScWLhgnllL1R4w9RMgQIAAAQIEhlhAgGmIgRVfnwJzJi6Op3bcW+j8j9dcHafMvDCmjJ1RuFZ+cNfG6+O6VV8ov+ycAAECI1ogLXc7dsni3M+IbqjGESBAgAABAgQIDKmAJXJDyqvwehU4Y/Zl0dx4eBnIgY698c+P/HE8suXW6DzUWWBp6zwYa3aviK+t/Pu49rl/yW3u3dTQHNNbji7kcUCAAAECBAgQIECAAAECBEa6gBlMI32EtG9UCswePz9esfAdcd3zny+0f+fBLXH1yk9E09P/EDPGzYmGaIwt+9dGZ/a/4vSaJe+OJ7ffE9tbNxVfdkyAAAECBAgQIECAAAECBEasgBlMI3ZoNGy0C1w494o4fdYlXbrRcag9NmeBpRf2ry4JLjVkIaeXLXhrnDfnlV2ecYEAAQIECBAgQIAAAQIECIxkATOYRvLoaNuoFmhoaIg3L/v9OGP2pfGd5/41th7Y0GN/0gbgP7fonXHs1FN7zOMGAQIECBAgQIAAAQIECBAYqQICTCN1ZLSrogJXLX1vpJ/e0vxJS+OvLrimtyy5e+85+SN95inOsGzaGfG7p/9DbNz3fG5J3Ob962JP286YPGZaTM42/T5u6ukxc/zc4kfil07885Lzvk5eufiXIv0cSXrvaR87ksc8Q4AAAQIECBAgQIAAAQIECgICTAUKBwSGTqC5cUwsmHRc7mfoalEyAQIECBAgQIAAAQIECBCojoA9mKrjrlYCBAgQIECAAAECBAgQIECAQM0ICDDVzFDqCAECBAgQIECAAAECBAgQIECgOgICTNVxVysBAgQIECBAgAABAgQIECBAoGYEBJhqZih1hAABAgQIECBAgAABAgQIECBQHQEBpuq4q5UAAQIECBAgQIAAAQIECBAgUDMCAkw1M5Q6QoAAAQIECBAgQIAAAQIECBCojoAAU3Xc1UqAAAECBAgQIECAAAECBAgQqBkBAaaaGUodIUCAAAECBAgQIECAAAECBAhUR0CAqTruaiVAgAABAgQIECBAgAABAgQI1IyAAFPNDKWOECBAgAABAgQIECBAgAABAgSqIyDAVB13tRIgQIAAAQIECBAgQIAAAQIEakZAgKlmhlJHCBAgQIAAAQIECBAgQIAAAQLVERBgqo67WgkQIECAAAECBAgQIECAAAECNSMgwFQzQ6kjBAgQIECAAAECBAgQIECAAIHqCAgwVcddrQQIECBAgAABAgQIECBAgACBmhForpme6AgBAgQIECBAgAABAjUl0N7REdsO7I+vPvVQTfWrkp05dCgrrSHif679ViWLrbmyOjOoY7OfL+76ds31rZIdatp1KPs6HYqpT3yzksXWVlnZ96ht/KTa6lOFeiPAVCFIxRAgQIAAAQIECBAgUFmBceNa4m1vfHNlC1Va3QqkWJzUt0AWr5QIHJGAANMRsXmIAAECBAgQIECAAIEhF2jwq+6QG9dRBb5NdTTYuloVAXswVYVdpQQIECBAgAABAgQIECBAgACB2hEQYKqdsdQTAgQIECBAgAABAgQIECBAgEBVBASYqsKuUgIECBAgQIAAAQIECBAgQIBA7QgIMNXOWOoJAQIECBAgQIAAAQIECBAgQKAqAgJMVWFXKQECBAgQIECAAAECBAgQIECgdgQEmGpnLPWEAAECBAgQIECAAAECBAgQIFAVAQGmqrCrlAABAgQIECBAgAABAgQIECBQOwICTLUzlnpCgAABAgQIECBAgAABAgQIEKiKgABTVdhVSoAAAQIECBAgQIAAAQIECBCoHQEBptoZSz0hQIAAAQIECBAgQIAAAQIECFRFQICpKuwqJUCAAAECBAgQIECAAAECBAjUjoAAU+2MpZ4QIECAAAECBAgQIECAAAECBKoiIMBUFXaVEiBAgAABAgQIECBAgAABAgRqR0CAqXbGUk8IECBAgAABAgQIECBAgAABAlUREGCqCrtKCRAgQIAAAQIECBAgQIAAAQK1IyDAVDtjqScECBAgQIAAAQIECBAgQIAAgaoICDBVhV2lBAgQIECAAAECBAgQIECAAIHaERBgqp2x1BMCBAgQIECAAAECBAgQIECAQFUEBJiqwq5SAgQIECBAgAABAgQIECBAgEDtCAgw1c5Y6gkBAgQIECBAgAABAgQIECBAoCoCAkxVYVcpAQIECBAgQIAAAQIECBAgQKB2BASYamcs9YQAAQIECBAgQIAAAQIECBAgUBUBAaaqsKuUAAECBAgQIECAAAECBAgQIFA7AgJMtTOWekKAAAECBAgQIECAAAECBAgQqIqAAFNV2FVKgAABAgQIECBAgAABAgQIEKgdAQGm2hlLPSFAgAABAgQIECBAgAABAgQIVEVAgKkq7ColQIAAAQIECBAgQIAAAQIECNSOgABT7YylnhAgQIAAAQIECBAgQIAAAQIEqiIgwFQVdpUSIECAAAECBAgQIECAAAECBGpHQICpdsZSTwgQIECAAAECBAgQIECAAAECVREQYKoKu0oJECBAgAABAgQIECBAgAABArUjIMBUO2OpJwQIECBAgAABAgQIECBAgACBqggIMFWFXaUECBAgQIAAAQIECBAgQIAAgdoREGCqnbHUEwIECBAgQIAAAQIECBAgQIBAVQQEmKrCrlICBAgQIECAAAECBAgQIECAQO0ICDDVzljqCQECBAgQIECAAAECBAgQIECgKgICTFVhVykBAgQIECBAgAABAgQIECBAoHYEBJhqZyz1hAABAgQIECBAgAABAgQIECBQFQEBpqqwq5QAAQIECBAgQIAAAQIECBAgUDsCAky1M5Z6QoAAAQIECBAgQIAAAQIECBCoioAAU1XYVUqAAAECBAgQIECAAAECBAgQqB0BAabaGUs9IUCAAAECBAgQIECAAAECBAhURaC5KrWqlAABAgQIECBAgAABAn0IHGxtje9df11MGjuuj5z1e7vjUGccaG+LiZMn1y9CP3q+t60jtu49EDubJ/Yjd/1mmRFtMfXgnpjUaC5KT9+CjuxGS0tLXPbq1/SUpW6vCzDV7dDrOAECBAgQIECAAIGRLdDY0Bgt2S+6F85ZOLIbWsXWbT+wPx7etinOPeucKrZi5Fd936ad8eTDj8Unxpw98htbxRa+q2l9XLr/4biwtb2KrRjZVW9viFgztmVkN7JKrRNgqhK8agkQIECAAAECBAgQ6FugKQsyTRwztu+MdZrjQHt7NGVBuIkTJtSpQP+63dzSGu3Zd2lLI6fexFobW6Ipy2CeV89KB7JbjQ1ZlEnqImDeWxcSFwgQIECAAAECBAgQIECAAAECBAYiIMA0EC15CRAgQIAAAQIECBAgQIAAAQIEuggIMHUhcYEAAQIECBAgQIAAAQIECPz/7d0JcF3XWcDxT09P+y5blixLtrzbSeOliZ0EktA0GySlQ7qkTaGdYUhhBigzncIwMCzDOgPD0MIwUArttGUgQ9pOm7RNk5C2qbM7TrzF+67Vsqxd1vaeJM53nft073279Va9/8koume5557zuyWJP845FwEEEEhGgABTMlq0RQABBBBAAAEEEEAAAQQQQAABBBAIEyDAFEZCAQIIIIAAAggggAACCCCAAAIIIIBAMgIEmJLRoi0CCCCAAAIIIIAAAggggAACCCCAQJgAAaYwEgoQQAABBBBAAAEEEEAAAQQQQAABBJIRIMCUjBZtEUAAAQQQQAABBBBAAAEEEEAAAQTCBAgwhZFQgAACCCCAAAIIIIAAAggggAACCCCQjAABpmS0aIsAAggggAACCCCAAAIIIIAAAgggECZAgCmMhAIEEEAAAQQQQAABBBBAAAEEEEAAgWQECDAlo0VbBBBAAAEEEEAAAQQQQAABBBBAAIEwAQJMYSQUIIAAAggggAACCCCAAAIIIIAAAggkI0CAKRkt2iKAAAIIIIAAAggggAACCCCAAAIIhAkQYAojoQABBBBAAAEEEEAAAQQQQAABBBBAIBkBAkzJaNEWAQQQQAABBBBAAAEEEEAAAQQQQCBMgABTGAkFCCCAAAIIIIAAAggggAACCCCAAALJCBBgSkaLtggggAACCCCAAAIIIIAAAggggAACYQIEmMJIKEAAAQQQQAABBBBAAAEEEEAAAQQQSEaAAFMyWrRFAAEEEEAAAQQQQAABBBBAAAEEEAgTIMAURkIBAggggAACCCCAAAIIIIAAAggggEAyAv5kGtMWgVwUmJsPykRgVKaCE1JWXCFVJXVSWlyW0qEuLCzI8Ey/1JetEl9R9Ljs8MwVqfKn5vk6r/HAsEwHJ6XCX23mVSt+X0lK5xWps8D8rIzOXJUSX5nUla2I1IQyBBBAICQQCAakq7tXevsvy/DIqExPzVh15RVl0lBfJ63NLdLe1iol/vT/8ys0KC4QQAABBBBAAAEEMi5AgCnj5Pn7wB9c+KrMzk9ZEyiSInmk44mUBXKePv9lmVsIvte3Tz684bekuKg4KtY1E1Da3/+8nBw+ID0TZ2XB/OVMjeUtsq3hNrm16X5pqVrnrIp6fW70iBy+ui9Uf0/ro1JeXCU/uvR1OTt6yApiVfhrZFPdDvn51R+W9potVtvB6T55ued7cmb0oIzMDIjP/NVStV7uav2w7Fx5T6i/WBfzC3PSOX7KzOctuTB2zOpH5+idV0NZs2yp3y03Nd4hm+p3xuoyrK5r/LS8deWFUPnqyvVy5+pHrPy1wJgcuPJ/5udFGZq+HGpT6a+V1WYuO1feLbeuui9UzgUCCCAwvzAvp8+el+MnT8vM7PWgklNlYsIE/yeuWcGnQ0ePyU3btsiWTRtiBumd93ONAAIIIIAAAgggkF8CBJjy631ldbT9k5fk/NjR0Bh2N90rHbU3hfI3enF1qtcKFtn3N1W0xQwuvdb3A3mx60mZmZu0bwn7rUESbfd637Oyp/kB+aV1vx43GHZlskvevvLjUF8axPmxeU7vtfOhsqnguBwdfNUEgd6Vz+34kgm4TctX3v0jK/hkN5qXeXPPOfnWmX8yYTif7Fh5l10V8fdTZ74kp0feMSuwxiPWOwt1FdWb/c9ZP5vqdsmH1j8hTRVrnE2iXg/NXHbNb1vDHivAdGnshPzP6b9zzcHuZDI4JudGD1s/neMn5ZfX/2ZGVlHZz+c3AgjkpsDs7Ky8uv8tudx/JaEBagDq4JGj0tffLz+/d4+UlpYmdB+NEEAAAQQQQAABBPJHIPpen/yZAyPNkMC62u2uJ3VNnHblbzTT7emnoyZ60OqHF78q+hMruOQcx4IJ9uhKp6+f+Atzz/XVV876WNe6qsoZXHK21S15T539ojx9/t8jBma0rT7722f/SXR1UKx0+OrPEgouefvQVVVfPvqHZowXvFUJ5zW49NXjfxZ1Ds6OdHXT/575R2cR1wggUIACuiXux/teTji45CTSgJTeq32QEEAg9wTOX7wgweD1FeW5NzpGhAACCCCQ6wKsYMr1N5RD4+uo8QSYzJarVKSuiTOubryBLLvyQP+L1qokO6+//UUlZqvYLmmv3iKtVRtEV+l0mnGde29Lm9320vgJEwz6sjy2+fN2UdzfY7ODsrZmm9kO98tmq1yltXJJgyx20i11mnxmK98H2x6TdSYwplvkdNXTyOyAVafb/t41K55ub/klKx/vb9qXzmVF+errPxWrpdJsyxudGbT6PD38tjgDe9Nz1+Rrx/9cPr/rX6wzmuL176yfnZs2AbB/trYm6pbHDXW3yJqqTdJc2W7mcVV6rp2V40NvOm8x+TdEV3qtMm1ICCBQmAJvvPW2jIzGDpzHktF7tY+777wjVjPqEEAgCwJHjx+TQ0eOyPatW2Xzxk3i9/NHhSy8Bh6JAAII5K0A/9bI21eX+YG312y1tnzpyhxN3pVHNzqiLnP2kDOt8wSytE4PvP5pz1POZuZA70r59LY/lvW1N7vK72h5WMZmh6xVS/2TnaG6I1dflnvbPm62lLWFymJd1JQ0yGe2/Yk5YLvKaqaBLD1I/JgJsjjThzp+wxVAaq5cK/969PdDTfRspXgBJn3WnuYHZW/zQ1JT2hC613uhgawTQ2/Jt8zqKXtFlm6t22+2zd1r6pJJ9nZHddTAm55Z5U0nhvbLU2e+aG0FtOte7fu+PLrxt+0svxFAoIAEunv7RH+Wmux+2lpXL7Ur7kcAgRQL6JbWQ0ePyIlTpwg0pdiW7hBAAIHlLsAWueX+hlM4P/1C2+qqjlCPo2aFjwZylpL0i2V9kxdDXWigpbG8OZS3L94Z+Km1OsjO6xfOPnvzX4cFl+z62tJGU/+30lLZYRdZB2b/pMsdpApVRrjYagIudnDJrr55xZ32pfVbV/7savqAq2xN9UapK138+tplc3ZVrPSJzV+QP3j/V+S+9k/GDC7ZfWxv3CMPrf20nbV+v3n5OSsI5ypMIKPjf8I4Rgou6e3bG/fK3a2/4urp0MBLZtXTnKuMDAIIFIbA0ePHUzbRVPaVskHREQIIhATsQNMzz/7QBJtOsnUuJMMFAggggEA0AQJM0WQojyigW8acqduzvc1Zl8h1nzlAW7+gZqd1EQ4N19VLL/V8y25i/daDs/XrZrGSBod+Yc1HXE2ODr5ibfFyFUbJRDo8e0V5q6t1XelKs5KqwlWmGf2KnZ3iBeF0LsW+5BYT7m3+Ral1BLHGA8OurXP2s+P91oPMW+M47m15yBy6vji+4EJAxmeH43VNPQIILDMB3dq2lK1xXo5U9+ftnzwCCKRGgEBTahzpBQEEECgEAQJMhfCWUzjHRM9hGjVn+Lze98PQz9B0f8RRdI27z1/y9q83XRw/7lq9pGV7Vj2ov+ImDaDoGUZ2WjDrmA6ZQ7UTSdUl9WHNqkpqXWXVpeFttEGlf7Gdfokt1St+ioqKZFWF+xwk/XJessm7OinS/eqgZ0I5k55PRUIAgcIS6Luc/D9j4gmlo894z6QeAQRuTMAZaDrOiqYbQ+QuBBBAYJkLLC5LWOYTZXqpEfCuMOqacJ+fZD/lsDnv6PnOb9pZ8yW1Ubl/7adCefvCeWC1lkU64Ht4+ord3PqtZyi112xxlUXL+H0lsmPl3fLG5WdDTbz9hSo8F+Xvnb3kLNbDvp2povj6+UzOMr32tguYA7WLI/TnvS+Z/MqKVtEvydlpPDBiXyb02yc+WVO9KaG2ei7UlamuUNt4q7JCDVN88eS3E9/imOJH0x0CeSfw5He+m9IxLywsiAa3U5kOHn1XDr17LJVdhvXFPzfCSChAYEkCGmg6bM5oOmnOaNpmDgPfwmHgS/LkZgQQQGA5CRBgWk5vMwNz0bONGspWyfDM9aBPz8Q5s8Vt3nxJzb0Y7szIQddozphAyP0SHmByHhRe6is3Zyatc92nGftZdsVKzzY1uzzab297b3/R7tMv1HmTc6uY1hWbAFak5PMVu4p1W1miKTgfsM62Gp29ar4eZ37M7+ngtbDbeybOusqC5jyrZJJusfO+t2j363t3JlYwOTW4RqBABFIbW7qOlo4+C+R1ME0Esi1AoCnbb4DnI4AAArknQIAp995Jzo9Iv/JmB2lm56etM41aqhYDQ7NzM3Jp/IRrHhoMmQyMSaVji9mEWXFj96ON9St1viJ3YEbLR94LZum1plhfWbvewv13b3tvf+7Wi7lIY/GWefP23T5xz0PPkYqXBqf65OXe78k7Az8xW+rit/f2p4GpZFJd2cqEmxd7gm3JPivhB9EQAQRyVkA/CpDqlI4+Uz1G+kMAgdgCevyAmBWOJAQQQAABBAgw8b+BpAV0m5zzHCPd5uYMMF0YezcUIKnwV8tUcML6gpuuYtq58p7Q87wHhEc6f0kbj8wMhO7Ri0hnI7kaeDL6ZTpn0gOxNUCi2+dipUhbQbx/GPLm7f6891r/8WVXen7r9sHvX/gPeXfwdeM076lNPJvsvfpuSAgggEDCAvoHyBRvkbP+UJrqPhOeEA0RQGApAqWlpbJ9i9kit2mz+P38kWIpltyLAAIILBcB/m2wXN5kBufhDQTpNrc9zQ+ERnBmZPFcoPvaPik/uPifVp1um3MGmLrGT4fu0YtI5y9puW4Rc6ZkA0xVJXXO261rPUOosbw5rDzTBfoFvSdP/4NoUM6bKszh5HVmG5uuwKoyh4Z7g1a95gt8/ZOd3tuWdf7xjz22rOfH5BBYioD3rKHHP/roUroLu7ent0/2vf5GWPlSCu75uTtlTav7IwJL6U/v9Z49xT83lirK/YUk8PSzP5DJycmYUyawFJOHSgQQQKCgBQgwFfTrv7HJ6yHb9sok7cF7ULd9/lKZORB7b/NDZtvXd02QaFA08OQ8JNZ5nx443V4d+eBu74HZui0vmRSpvbfPZPpLZdvnL/1XWHDpZvPluztaHpYNdbfEfNRzl75RcAGmmCBUIoBAWgWam5ukuLhY5ubmUvIc7Uv7JCGAQH4IlJWWcah3frwqRokAAghkTcB9MnPWhsGD80lAV9Ksrd4WGvKVyS6ZmZuy8rqd7ep0j3W9uX6XOQTbL1sbbrPyeuZS3+RF61oDTd2OQ6pXV22Q0uJyq877t3pzqLgzjc8OO7Nxr73t9TDxypKauPelu8HAVI+80ve06zH3t39KPrX1D+MGl/SmEXMAOAkBBBDIlIC/2G+2wmxM2eO0L+2ThAACuS2ggaWdt+yQDz/8iNy0dRvb4XL7dTE6BBBAIKsCBJiyyp+/D3duZ9PzhfRrcpqc2+O21N9qldm/r9e/Y5UNTHWboNTiEmxnf1YDx9+WHGAyZy45U31Zbvx/zL1fgdtUt0vubfu4c6gxr4emL8espxIBBBBItcBNWzaL/mFzqUn70L5ICCCQuwIElnL33TAyBBBAIFcFCDDl6pvJ8XF5z2HqmjhljdjeHqeZrfXvt8o21u0Q/3tfITttzmHS1DVxxvpt/83bn12uvxs8AaHxwJCzOu71uDlvyZlyJcCkZyg5kzolk4Zm+pNpTlsEEEBgyQJ69spdd+wNOxMumY51Faz2oX2REEAg9wQILOXeO2FECCCAQL4IEGDKlzeVY+NcU71JiosWtzbogd16YPW50cPWSFurNkp1ab11XVpcJuvr3mddd46ftLbT6cHgzrSuZrsz67r2BoQ6x0/JZGDM1SZW5tTw265q74ooV2UGM33XLrie1ly51pWPldHg1FRwPFYT6hBAAIG0CKxqWil37tljtkAXJ92/3qP3ah8kBBDIPYFbbrqZrXC591oYEQIIIJA3AgSY8uZV5dZA/b4Saate3N7QbVYk6c/0e9ve7NVL9qi3vrdd7noQ6og4vyDXWN4SCkbZ7Z2/NZilh4DbaW4hKAcHXrKzMX9fvnYp7BDytTVbY96Tqcqy4grXo8bMQeiJpv/r/O9Em9IOAQQQSLnAuvY1ct8v3C3V1VUJ961t9R69l4QAArkpsKFjPWcs5earYVQIIIBAXggs/qk9L4bLIHNJwLnqaNycc3Sg/8XQ8OyDve2CrQ3Xz2PS/PGhN8zXzy7ZVRJre5w20hVMu5vuDbXXi/39L1grplyFETJv9v/IVdpY1iI7Vt7tKstWprlynevRujIrkXRp7IScHrl+llUi7WmDAAIIpENgRWODPPLA/bJn9y5pqK+L+git0zbaVu8hIYAAAggggAACCCxPgcU9TstzfswqjQLWwdy9iw84ePUlK1PprxFddeRMukppZXmr+cJcrxwe2Cfz5i87rau5yb6M+vsDbR8zq5Z+GrpPv1T3P6f+Xj6x5QtS4ot8jse+nu+aQNTzrj61n+Ki5Ld1uDpJUabFsyVO53fbqvsl1oHnesbVU2e+mKIR0A0CCCCwNAGfzyebNqy3fianpmVkdESmp2esTsvLy6Shrk4qKtyrNZf2RO5GAAEEEEAAAQQQyFUBVjDl6pvJg3Gtq9nmGqVuf9O02Rzu7SsK/5+WvarJGVzS9rECKlqvSQNU3lVMJ4b3yzdO/KU59+mIda6TtpubD5qtemflmfNfkec7v6lFoaSrl3Y1fSCUz/bFFrOqq7508Yt2+jW+b5z8a3ml92kJzgdCw5szrrriS7fFfePEX8nke2cvNVW0hdpwgQACCGRboLKiXFpbWmRDxzrrR68JLmX7rfB8BBBAAAEEEEAgcwKsYMqc9bJ7UoW/WlZVtMuVqS7X3LY2XP96nKvQZHSb3Kt9z7iKK/210lSR2Hkc97V/Us6aQ8RHZ6+G+rgwdkwuHP9zKTJ/NZavtuqC87OhevtCDyT/0Poncmb1ko5Lz2D6yKbfla+Z8dtpxpxh9aNLX5fnLn3TfD1vlZT7K+XKZJcEFxYDTtp2T/ODoivFftbTbd/KbwQQQAABBBBAAAEEEEAAAQSyJhC+zCRrQ+HB+SjgXX1UZA7j3ly3O+JUOsxWuFJfuavOuwrKVenJ1JWtlM++72+srXaeKrP2Z0EGzfa7SMElfeavbf0jK8DlvS/b+Y11O+SB9l91HWKuY1owmwGHZi6Lfi3OG1zaufIe+VDHE9keOs9HAAEEEEAAAQQQQAABBBBAICRAgClEwcWNCHgP6G43X5arLKmJ2FWxzy+b6ne56jpq45+/5LxBV/V8bueX5KG1nzEreGqdVWHXfnM2k55p9IXd/yZboqyqCrspCwV6LtTv7PhHcR6aHmkYK8wZVh/d+Dl5bPPnRb/iR0IAAQQQQAABBBBAAAEEEEAgVwSKFkzKlcEwDgSSEZhfmDfnLZ2RzvGTMhEYkcnAuJT5K6TKXyctVR2ysfYWKSkuS6bLrLbV/1O8MtVpfrplYKpHRmcGrCBaTWmDrK3ZKm0meEdCAAEEogk8+e2nXFWPf/RRV75QMk9+57uuqT7+scdceTIIIJBfAsHZgPz4hefkoY6t+TXwDI52cGpSDlztlfseeDCDT82/R73WOyz7DhyUP612f506/2aS3hH/XnGXPDz4ljwUJEwQTXqwSORsXa3c/vDD0ZoUbDlnMBXsq8//ietB4hp40Z/lkIqKiqS5cp31sxzmwxwQQAABBBBAAAEEEEAAAQQKR4AtcoXzrpkpAggggAACCCCAAAIIIIAAAgggkBYBAkxpYaVTBBBAAAEEEEAAAQQQQAABBBBAoHAECDAVzrtmpggggAACCCCAAAIIIIAAAggggEBaBAgwpYWVThFAAAEEEEAAAQQQQAABBBBAAIHCESDAVDjvmpkigAACCCCAAAIIIIAAAggggAACaREgwJQWVjpFAAEEEEAAAQQQQAABBBBAAAEECkeAAFPhvGtmigACCCCAAAIIIIAAAggggAACCKRFgABTWljpFAEEEEAAAQQQQAABBBBAAAEEECgcAQJMhfOumSkCCCCAAAIIIIAAAggggAACCCCQFgECTGlhpVMEEEAAAQQQQAABBBBAAAEEEECgcAQIMBXOu2amCCCAAAIIIIAAAggggAACCCCAQFoECDClhZVOEUAAAQQQQAABBBBAAAEEEEAAgcIRIMBUOO+amSKAAAIIIIAAAggggAACCCCAAAJpESDAlBZWOkUAAQQQQAABBBBAAAEEEEAAAQQKR4AAU+G8a2aKAAIIIIAAAggggAACCCCAAAIIpEWAAFNaWOkUAQQQQAABBBBAAAEEEEAAAQQQKBwBf+FMlZkigAACCCCAAAIIIIBAPgnMLyzIyOyMPNN5Op+GndGxzs3Ny2xgVp594fmMPjffHjY3PyebZFa+E3gh34ae0fEuzMzJiL9Ynqkqy+hz8+lhc/PzUuYryqchZ2ysBJgyRs2DEEAAAQQQQAABBBBAIBmB0rJS+fijH0nmFtoigAACaRcoEgJMkZAJMEVSoQwBBBBAAAEEEEAAAQRyQsBXxKkeOfEiGAQCCCAQR4B/WscBohoBBBBAAAEEEEAAAQQQQAABBBBAILYAAabYPtQigAACCCCAAAIIIIAAAggggAACCMQRIMAUB4hqBBBAAAEEEEAAAQQQQAABBBBAAIHYAgSYYvtQiwACCCCAAAIIIIAAAggggAACCCAQR4AAUxwgqhFAAAEEEEAAAQQQQAABBBBAAAEEYgsQYIrtQy0CCCCAAAIIIIAAAggggAACCCCAQBwBAkxxgKhGAAEEEEAAAQQQQAABBBBAAAEEEIgtQIAptg+1CCCAAAIIIIAAAggggAACCCCAAAJxBAgwxQGiGgEEEEAAAQQQQAABBBBAAAEEEEAgtgABptg+1CKAAAIIIIAAAggggAACCCCAAAIIxBEgwBQHiGoEEEAAAQQQQAABBBBAAAEEEEAAgdgCBJhi+1CLAAIIIIAAAggggAACCCCAAAIIIBBHgABTHCCqEUAAAQQQQAABBBBAAAEEEEAAAQRiCxBgiu1DLQIIIIAAAggggAACCCCAAAIIIIBAHAECTHGAqEYAAQQQQAABBBBAAAEEEEAAAQQQiC1AgCm2D7UIIIAAAggggAACCCCAAAIIIIAAAnEECDDFAaIaAQQQQAABBBBAAAEEEEAAAQQQQCC2AAGm2D7UIoAAAggggAACCCCAAAIIIIAAAgjEESDAFAeIagQQQAABBBBAAAEEEEAAAQQQQACB2AIEmGL7UIsAAggggAACCCCAAAIIIIAAAgggEEeAAFMcIKoRQAABBBBAAAEEEEAAAQQQQAABBGILEGCK7UMtAggggAACeSHgK3L/K31+fj4vxp3KQXrn7DVJ5bPoCwEEEEAAAQQQQMAt4P6vUXcdOQQQQAABBBDIEwF/id810kAg6MoXQsY7Z69JIRgwRwQQQAABBBBAIFsCBJiyJc9zEUAAAQQQSKFAid8dYArOFV6AyTtnr0kKuekKAQQQQAABBBBAwCNAgMkDQhYBBBBAAIF8FCgpKXENe2ZmxpUvhIx3zl6TQjBgjggggAACCCCAQLYECDBlS57nIoAAAgggkEKB6upqV29j4xOufCFkRsfGXdP0mrgqySCAAAIIIIAAAgikVIAAU0o56QwBBBBAAIHsCNRW17gePDbuDra4KpdpZnzCHVTzmizTaTMtBBBAAAEEEEAgJwQIMOXEa2AQCCCAAAIILE2gpsYdYBoZHVtah3l4t3fOXpM8nBJDRgABBBBAAAEE8kaAAFPevCoGigACCCCAQHSBxoZGV+WVgQFZWFhwlS3njM5V5+xMXhNnHdcIIIAAAggggAACqRUgwJRaT3pDAAEEEEAgKwJ1tbVSVlYWenYgGJSh4ZFQfrlf6Fx1znZSCzUhIYAAAggggAACCGRGgABTZpx5CgIIIIAAAmkVKCoqkuamVa5n9F6+7Mov50xff79remqhJiQEEEAAAQQQQACBzAgQYMqMM09BAAEEEEAg7QItzc2uZ1zs7HLll3PmwqVO1/S8Fq5KMggggAACCCCAAAIpFyDAlHJSOkQAAQQQQCA7Au1tbeLzLf6rfeLaNRkYHMzOYDL4VJ2jztVOaqAWJAQQQAABBBBAAIHMCSz+V2jmnsmTEEAAAQQQQCANAqUlpdLWusbV85lz51355ZjxzlEN1IKEAAIIIIAAAgggkDkBAkyZs+ZJCCCAAAIIpF1g/boO1zM6u3tcq3tclcsgMzFxTXSOzuQ1cNZxjQACCCCAAAIIIJAeAQJM6XGlVwQQQAABBLIisLqlxXw9rS707IWFBTl28lQov9wujp06JTpHO+nc1YCEAAIIIIAAAgggkFkBAkyZ9eZpCCCAAAIIpFVAv5x28/btrmfoAdhDIyOusuWQ0Tl5D/fWufP1uOXwdpkDAggggAACCOSbAAGmfHtjjBcBBBBAAIE4Amvb2qWmuibUSlf4HDh4OJRfLhcHDh5yrV7SOevcSQgggAACCCCAAAKZFyDAlHlznogAAggggEBaBXQFz627drueMTg0JKeX0YHfOpfBoWHXHHXOrF5ykZBBAAEEEEAAAQQyJkCAKWPUPAgBBBBAAIHMCeg5RG1r3F+UO3jkqAwvg61yOgedizO1r2nj7CUnCNcIIIAAAggggECGBQgwZRicxyGAAAIIIJApgVt37pYSf0nocfPz8/LKm/slEAyGyvLtIhAIyCtv7Bedi510ju/fucvO8hsBBBBAAAEEEEAgCwIEmLKAziMRQAABBBDIhEBlZaXsve0216MmJq7Jvldfl7m5OVd5PmR0zPtee0Mmrl1zDff22/aIzpWEAAIIIIAAAgggkD0BAkzZs+fJCCCAAAIIpF1AD73evHGT6zlXrl6V1/a/5Tog29UgBzN6ULmOWcfuTDq39rY2ZxHXCCCAAAIIIIAAAlkQIMCUBXQeiQACCCCAQCYFdu/YKU0rV7oe2d3bZ7aavZkXK5l05ZKOVcfsTDonnRsJAQQQQAABBBBAIPsCBJiy/w4YAQIIIIAAAmkVKC4ulnt+7i6pq61zPUcDNi+98lpOn8mkZy7pGL3BJZ2LzknnRkIAAQQQQAABBBDIvkCRWXK+kP1hMAIEEEAAAQQQSLfA5NSUvPjTn8i1SfcZRtXVVXLX7Xulob4+3UNIqn/9Wpwe6O09c6mqqkoe+MAHpaKiIqn+aIwAAggggAACCCCQPgECTOmzpWcEEEAAAQRyTkCDTC+9vE9Gx0ZdY/P5fGa72S2yZeMGV3m2MqfPnZeDR466vhanY9GVS/fefQ/BpWy9GJ6LAAIIIIAAAghEESDAFAWGYgQQQAABBJarwOzsrPka2ysy4DkwW+e7orFRbtu9UxqztJppyKxaOnDwkAwODYfx65lLui2utLQ0rI4CBBBAAAEEEEAAgewKEGDKrj9PRwABBBBAICsCenD2wSOH5cy5s2HPLyoqkvXr1srN27ZKtdmOlomk2+COnTwlFy51Rvy6nX4tTg/05sylTLwNnoEAAggggAACCCQvQIApeTPuQAABBBBAYNkIdHV3y5sH3jIHfQfC5qSBprVta2Sz2TbXtGJFWH0qCgYGB02zIjleAAAEc0lEQVSQ67x0dvdEDCyV+Evk9tv2SHtbWyoeRx8IIIAAAggggAACaRIgwJQmWLpFAAEEEEAgXwQmJyfl7cMHpbunJ+qQdSVTx9p2aW1pkcaGetHg040k/bbI0PCI9F6+LBc7u8IO8Hb22b6mTd6/c5dUVlY6i7lGAAEEEEAAAQQQyEEBAkw5+FIYEgIIIIAAAtkQ6DNBn7cPHZTxifGYjy/x+2VVU5PU19VKbU2N+amWsrIy8Rf7paTEb90bCAQlOBeUmZkZGRufMD/jMjI6JlcGBsxqqWDM/muqa+TWXbtltQlmkRBAAAEEEEAAAQTyQ4AAU368J0aJAAIIIIBARgR0hVFnd5ccO3Ei7Etz6R6AfiHu5u3bzba89hteIZXuMdI/AggggAACCCCAQGQBAkyRXShFAAEEEECgoAU00KQrmi5cuijdvT0yPz+fFg+fzydtrWvMoeId1oqlG916l5bB0SkCCCCAAAIIIIBAwgIEmBKmoiECCCCAAAKFKTAbmBU9DPxyf7/0D1yxtr0tRUK30zU3rZKW5mbr8O7SktKldMe9CCCAAAIIIIAAAjkgQIApB14CQ0AAAQQQQCBfBHRl0+jYmDmoe0jGzblKY+a8pomJCQkEAtbZSkFz9pImvzmLSc9qKikpkerqaqk15yrVmPOaGhsapa62li1w+fLCGScCCCCAAAIIIJCgAAGmBKFohgACCCCAAAIIIIAAAggggAACCCAQWcAXuZhSBBBAAAEEEEAAAQQQQAABBBBAAAEEEhMgwJSYE60QQAABBBBAAAEEEEAAAQQQQAABBKIIEGCKAkMxAggggAACCCCAAAIIIIAAAggggEBiAgSYEnOiFQIIIIAAAggggAACCCCAAAIIIIBAFAECTFFgKEYAAQQQQAABBBBAAAEEEEAAAQQQSEyAAFNiTrRCAAEEEEAAAQQQQAABBBBAAAEEEIgiQIApCgzFCCCAAAIIIIAAAggggAACCCCAAAKJCRBgSsyJVggggAACCCCAAAIIIIAAAggggAACUQQIMEWBoRgBBBBAAAEEEEAAAQQQQAABBBBAIDEBAkyJOdEKAQQQQAABBBBAAAEEEEAAAQQQQCCKAAGmKDAUI4AAAggggAACCCCAAAIIIIAAAggkJkCAKTEnWiGAAAIIIIAAAggggAACCCCAAAIIRBEgwBQFhmIEEEAAAQQQQAABBBBAAAEEEEAAgcQECDAl5kQrBBBAAAEEEEAAAQQQQAABBBBAAIEoAgSYosBQjAACCCCAAAIIIIAAAggggAACCCCQmAABpsScaIUAAggggAACCCCAAAIIIIAAAgggEEWAAFMUGIoRQAABBBBAAAEEEEAAAQQQQAABBBITIMCUmBOtEEAAAQQQQAABBBBAAAEEEEAAAQSiCBBgigJDMQIIIIAAAggggAACCCCAAAIIIIBAYgIEmBJzohUCCCCAAAIIIIAAAggggAACCCCAQBQBAkxRYChGAAEEEEAAAQQQQAABBBBAAAEEEEhM4P8B7nuG+YIfyPcAAAAASUVORK5CYII=" 7 | } 8 | }, 9 | "cell_type": "markdown", 10 | "metadata": {}, 11 | "source": [ 12 | "# word representations in vector space\n", 13 | "\n", 14 | "\n", 15 | "to work with nlp we need embedding(converting txt into meaningful vec representation). word2vec was one the intial idea that guided the innovation of embedding.\n", 16 | "\n", 17 | "![word2vec.png](attachment:word2vec.png)\n", 18 | "\n", 19 | "as we can see here words is getting converted into vectors(list of numbers). ther are new methods do this in current time.\n", 20 | "\n", 21 | "highy recommended this [blog-post](https://jalammar.github.io/illustrated-word2vec/) by jalmmar to read more about Word2vec.\n", 22 | "\n", 23 | "\n", 24 | "## model architectures\n", 25 | "\n", 26 | "many models have been proposed for estimating continuous word representations, including latent semantic analysis (lsa) and latent dirichlet allocation (lda). but the distributed representations learned by neural networks have shown better performance. the computational complexity of these models is defined as:\n", 27 | "\n", 28 | "$$\n", 29 | "o = e \\times t \\times q\n", 30 | "$$\n", 31 | "\n", 32 | "where:\n", 33 | "- $e$ is the number of training epochs\n", 34 | "- $t$ is the number of words in the training set\n", 35 | "- $q$ is defined for each model architecture\n", 36 | "\n", 37 | "### feedforward neural net language model \n", 38 | "the feedforward neural net language model consists of input, projection, hidden, and output layers. at the input layer, $n$ previous words are encoded using a 1-of-v coding, where $v$ is the vocabulary size. the projection layer has dimensionality $n \\times d$. the computational complexity per training example is:\n", 39 | "\n", 40 | "$$\n", 41 | "q = n \\times d + n \\times d \\times h + h \\times v\n", 42 | "$$\n", 43 | "\n", 44 | "where:\n", 45 | "- $d$ is the dimensionality of the word vectors\n", 46 | "- $h$ is the size of the hidden layer\n", 47 | "- $v$ is the size of the vocabulary\n", 48 | "\n", 49 | "## new log-linear models\n", 50 | "the paper introduces two new model architectures to learn distributed representations of words with reduced computational complexity:\n", 51 | "\n", 52 | "1. **continuous bag-of-words model (cbow)**: the cbow model predicts the current word based on its context by averaging the word vectors in the context. the training complexity is:\n", 53 | "\n", 54 | "$$\n", 55 | "q = n \\times d + d \\times \\log_2(v)\n", 56 | "$$\n", 57 | "\n", 58 | "2. **continuous skip-gram model**: the skip-gram model maximizes the classification of a word based on another word in the same sentence. the training complexity is:\n", 59 | "\n", 60 | "$$\n", 61 | "q = c \\times (d + d \\times \\log_2(v))\n", 62 | "$$\n", 63 | "\n", 64 | "where $c$ is the maximum distance between words.\n", 65 | "\n", 66 | "## results\n", 67 | "the paper compares the quality of different word vector models by measuring their performance on semantic-syntactic word relationships. it shows that the new architectures (cbow and skip-gram) outperform the previous neural network models in accuracy while requiring significantly less computational cost.\n", 68 | "\n", 69 | "## conclusion\n", 70 | "the study shows that it is possible to train high-quality word vectors using simpler model architectures, such as cbow and skip-gram, which are computationally efficient and capable of learning from much larger datasets. this advancement could lead to new applications in nlp tasks like machine translation, information retrieval, and question-answering systems.\n" 71 | ] 72 | }, 73 | { 74 | "cell_type": "code", 75 | "execution_count": null, 76 | "metadata": {}, 77 | "outputs": [], 78 | "source": [] 79 | } 80 | ], 81 | "metadata": { 82 | "language_info": { 83 | "name": "python" 84 | } 85 | }, 86 | "nbformat": 4, 87 | "nbformat_minor": 2 88 | } 89 | -------------------------------------------------------------------------------- /03-sequence-modeling/05-seq2seq/seq2seq.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# seq2seq \n", 8 | "\n", 9 | "## background\n", 10 | "\n", 11 | "### the problem with traditional neural networks\n", 12 | "\n", 13 | "traditional neural networks like anns (artificial neural networks) and cnns (convolutional neural networks) weren't cutting it for text data.\n", 14 | "\n", 15 | "Two main significant reasons : \n", 16 | "\n", 17 | "- **fixed input size**: these models typically expect a fixed input size, which doesn't work well for variable-length sequences like sentences.\n", 18 | "- **lack of temporal understanding**: they don't naturally capture the order and context of words in a sequence.\n", 19 | "\n", 20 | "### rnns (recurrent neural networks)\n", 21 | "\n", 22 | "rnns were introduced to handle sequential data better. they process input sequentially, maintaining a hidden state that can capture some context. however, they had their own issues:\n", 23 | "\n", 24 | "- **vanishing gradient problem**: as the sequence gets longer, rnns struggle to carry information from earlier time steps.\n", 25 | "- **limited context**: they have trouble capturing long-term dependencies in the data.\n", 26 | "\n", 27 | "### lstm (kind of solve)\n", 28 | "\n", 29 | "long short-term memory (lstm) networks were design to solve the RNN problem \n", 30 | "\n", 31 | "- **gating mechanisms**: lstms use gates to control the flow of information, helping to mitigate the vanishing gradient problem.\n", 32 | "- **better at long-term dependencies**: they can carry relevant information across longer sequences.\n", 33 | "\n", 34 | "but even lstms (and their variants like grus - gated recurrent units) struggle with very long sequences.\n", 35 | "\n", 36 | "## seq2seq: \n", 37 | "\n", 38 | "seq2seq (sequence-to-sequence) models were designed to handle tasks where both input and output are sequences, like machine translation.\n", 39 | "\n", 40 | "### core idea\n", 41 | "\n", 42 | "the seq2seq model consists of two main parts:\n", 43 | "\n", 44 | "1. **encoder**: processes the input sequence\n", 45 | "2. **decoder**: generates the output sequence\n", 46 | "\n", 47 | "this architecture allows the model to map sequences of different lengths, which is crucial for tasks like translation where input and output lengths may vary.\n", 48 | "\n", 49 | "### how seq2seq works\n", 50 | "\n", 51 | "let's break down the process:\n", 52 | "\n", 53 | "1. **input processing**:\n", 54 | " - text input is tokenized (split into words or subwords)\n", 55 | " - tokens are converted to numerical representations via an embedding layer\n", 56 | "\n", 57 | "2. **encoding**:\n", 58 | " - the embedded input sequence is fed into the encoder (usually lstm-based)\n", 59 | " - encoder processes the sequence, updating its hidden state at each step\n", 60 | " - final hidden state of the encoder captures the essence of the input sequence\n", 61 | "\n", 62 | "3. **context vector**:\n", 63 | " - the final hidden state of the encoder becomes the \"context vector\"\n", 64 | " - this vector is meant to encapsulate the meaning of the entire input sequence\n", 65 | "\n", 66 | "4. **decoding**:\n", 67 | " - decoder initializes its hidden state with the context vector\n", 68 | " - at each step, the decoder:\n", 69 | " - takes the previous output and its current hidden state as input\n", 70 | " - produces a probability distribution over the output vocabulary\n", 71 | " - selects the most likely token as the output for that step\n", 72 | "\n", 73 | "5. **output generation**:\n", 74 | " - the process continues until the decoder generates an end-of-sequence token or reaches a maximum length\n", 75 | "\n", 76 | "### IMP finding in seq2seq\n", 77 | "\n", 78 | "1. **separate encoder and decoder**:\n", 79 | " - allows handling different languages or domains for input and output\n", 80 | " - enables more parameters without excessive computational cost\n", 81 | " - can be trained separately, adding flexibility\n", 82 | "\n", 83 | "2. **deep lstms**:\n", 84 | " - stacking multiple lstm layers (typically 4) in both encoder and decoder\n", 85 | " - increases model capacity to capture complex patterns\n", 86 | " - helps maintain long-term dependencies\n", 87 | "\n", 88 | "3. **input reversal**:\n", 89 | " - reversing the order of input tokens (but not output tokens)\n", 90 | " - creates shorter dependencies between source and target\n", 91 | " - makes optimization easier for gradient-based methods like sgd\n", 92 | "\n", 93 | "4. **attention mechanism** (a later addition):\n", 94 | " - allows decoder to focus on different parts of input for each output token\n", 95 | " - significantly improves performance, especially for long sequences\n", 96 | " - paved the way for transformer models\n", 97 | "\n", 98 | "\n", 99 | "### beam search decoding\n", 100 | "\n", 101 | "instead of greedily selecting the most probable token at each step, beam search maintains multiple candidate sequences:\n", 102 | "\n", 103 | "- keeps top-k most likely sequences at each step\n", 104 | "- improves output quality by exploring more possibilities\n", 105 | "\n", 106 | "### handling unknown words\n", 107 | "\n", 108 | "seq2seq models struggle with words not in their vocabulary. solutions include:\n", 109 | "\n", 110 | "- subword tokenization (e.g., byte-pair encoding)\n", 111 | "- pointer-generator networks for copying unknown words from input\n", 112 | "\n", 113 | "### bidirectional encoders\n", 114 | "\n", 115 | "using bidirectional lstms in the encoder to capture context from both directions of the input sequence.\n", 116 | "\n", 117 | "\n", 118 | "## limitations \n", 119 | "\n", 120 | "\n", 121 | "- still struggle with very long sequences\n", 122 | "- computationally intensive, especially during training\n", 123 | "- require large amounts of parallel data for training\n" 124 | ] 125 | }, 126 | { 127 | "cell_type": "markdown", 128 | "metadata": {}, 129 | "source": [] 130 | } 131 | ], 132 | "metadata": { 133 | "language_info": { 134 | "name": "python" 135 | } 136 | }, 137 | "nbformat": 4, 138 | "nbformat_minor": 2 139 | } 140 | -------------------------------------------------------------------------------- /04-transformers/04-gpt/gpt.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "\n", 8 | "### understanding generative pre-trained transformer (gpt) \n", 9 | "\n", 10 | "\n", 11 | "\n", 12 | "the generative pre-trained transformer (gpt) family of models, introduced by openai, represents a significant advancement in language modeling and natural language processing. these models employ a decoder-only transformer architecture and utilize an autoregressive approach to text generation. the evolution from gpt-1 through subsequent versions has demonstrated remarkable scaling properties, with each iteration showing significant improvements in performance and capabilities.\n", 13 | "\n", 14 | "\n", 15 | "\n", 16 | "gpt models utilize a decoder-only transformer architecture, which differs from encoder-decoder models like t5. the architecture consists of multiple transformer blocks stacked upon each other, with each block containing self-attention mechanisms and feed-forward neural networks. the model processes text as a sequence of tokens and predicts the next token based on all previous tokens in the sequence.\n", 17 | "\n", 18 | "\n", 19 | "> attention mechanism\n", 20 | "\n", 21 | "gpt implements a masked self-attention mechanism where each token can only attend to its previous tokens and itself. this causality constraint is crucial for the autoregressive nature of the model. the attention mechanism computes queries, keys, and values for each token and uses scaled dot-product attention to weight the importance of different tokens in the sequence.\n", 22 | "\n", 23 | "\n", 24 | "\n", 25 | "\n", 26 | "\n", 27 | "the pre-training of gpt models follows an autoregressive language modeling objective. the model learns to predict the next token in a sequence given all previous tokens. this unsupervised learning approach allows the model to learn from vast amounts of text data without requiring labeled examples.\n", 28 | "\n", 29 | "\n", 30 | "\n", 31 | "> tokenization\n", 32 | "\n", 33 | "gpt models use byte-pair encoding (bpe) tokenization, which breaks down text into subword units. this approach provides a balance between character-level and word-level tokenization, allowing the model to handle both common and rare words effectively.\n", 34 | "\n", 35 | "\n", 36 | "the model uses learned position embeddings to maintain awareness of token positions in the sequence. these embeddings are added to the token embeddings before being processed by the transformer layers.\n", 37 | "\n", 38 | "\n", 39 | "gpt models demonstrate impressive scaling properties, with performance improving predictably with model size. key scaling factors include:\n", 40 | "\n", 41 | "\n", 42 | "during text generation, the model employs various decoding strategies such as greedy decoding, beam search, or sampling with temperature control. these strategies help balance between output quality and diversity.\n", 43 | "\n", 44 | "the success of gpt models has significantly influenced the direction of nlp research and applications, demonstrating the potential of large-scale language models trained on vast amounts of text data. their ability to generate coherent and contextually appropriate text has opened new possibilities in various domains, from creative writing to technical documentation.\n", 45 | "\n", 46 | "\n", 47 | "> model size\n", 48 | "\n", 49 | "increasing the number of parameters by expanding model depth and width helps in performance. gpt-2 ranges from 117 million to 1.5 billion parameters across its variants.\n", 50 | "\n", 51 | "> dataset size\n", 52 | "\n", 53 | " gpt-2 was trained on a diverse dataset of 8 million web pages selected for quality.\n", 54 | "\n", 55 | "\n", 56 | "> context window\n", 57 | "\n", 58 | "gpt-2 processes sequences of up to 1024 tokens, allowing it to maintain longer-range dependencies than previous models. this expanded context window enables more coherent generation of long passages.\n", 59 | "\n" 60 | ] 61 | }, 62 | { 63 | "cell_type": "markdown", 64 | "metadata": {}, 65 | "source": [] 66 | } 67 | ], 68 | "metadata": { 69 | "kernelspec": { 70 | "display_name": "Python 3", 71 | "language": "python", 72 | "name": "python3" 73 | }, 74 | "language_info": { 75 | "codemirror_mode": { 76 | "name": "ipython", 77 | "version": 3 78 | }, 79 | "file_extension": ".py", 80 | "mimetype": "text/x-python", 81 | "name": "python", 82 | "nbconvert_exporter": "python", 83 | "pygments_lexer": "ipython3", 84 | "version": "3.9.6" 85 | } 86 | }, 87 | "nbformat": 4, 88 | "nbformat_minor": 2 89 | } 90 | -------------------------------------------------------------------------------- /04-transformers/05-lora/lora.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "### let's understand lora (low-rank adaptation)\n", 8 | "\n", 9 | "> **why do we need lora?**\n", 10 | "- when we fine-tune large language models, it's incredibly expensive to update all parameters.\n", 11 | "- lora provides a memory-efficient alternative that achieves similar results while only training a small fraction of parameters.\n", 12 | "- traditional fine-tuning requires storing and updating the entire model, which is impractical for most users without expensive hardware.\n", 13 | "- lora introduces a clever \"bypass\" solution that keeps the original pre-trained weights frozen and only trains small adapter modules.\n", 14 | "\n", 15 | "> **what is lora?**\n", 16 | "- lora stands for low-rank adaptation, a technique that makes fine-tuning large models more accessible.\n", 17 | "- instead of modifying all weights directly, lora decomposes weight updates into smaller matrices through low-rank decomposition.\n", 18 | "- this dramatically reduces the number of trainable parameters (often by 10,000x or more) while maintaining performance.\n", 19 | "- example: instead of training billions of parameters in a large model, lora might only train a few million parameters.\n", 20 | "\n", 21 | "> **how lora works?**\n", 22 | "- lora freezes the pre-trained model weights completely.\n", 23 | "- for each weight matrix we want to adapt, lora adds a parallel \"bypass\" connection.\n", 24 | "- this bypass consists of two smaller matrices: a down-projection and an up-projection.\n", 25 | "- the original path: input → original frozen weight → output\n", 26 | "- the lora path: input → down-projection → up-projection → output\n", 27 | "- the final output combines both paths.\n", 28 | "\n", 29 | "> **three key steps**\n", 30 | "- 1. decompose each weight matrix update into two smaller matrices (down-projection and up-projection)\n", 31 | "- 2. initialize these matrices so their product is zero (ensuring no change to behavior initially)\n", 32 | "- 3. train only these small matrices while keeping the original weights frozen\n", 33 | "\n", 34 | "> **why is this efficient?**\n", 35 | "- the rank of these matrices (r) is tiny compared to the original dimensions.\n", 36 | "- this makes the number of trainable parameters much smaller than the original model.\n", 37 | "- storage requirements are reduced significantly, often enabling fine-tuning on consumer hardware.\n", 38 | "- during inference, lora matrices can be merged with the original weights with no performance penalty.\n", 39 | "\n", 40 | "> **benefits of lora**\n", 41 | "- dramatically reduced memory requirements for fine-tuning\n", 42 | "- faster training times\n", 43 | "- lower computational costs\n", 44 | "- ability to switch between different adaptations quickly\n", 45 | "- preserves the general knowledge of the base model while adding specialized capabilities" 46 | ] 47 | } 48 | ], 49 | "metadata": { 50 | "language_info": { 51 | "name": "python" 52 | } 53 | }, 54 | "nbformat": 4, 55 | "nbformat_minor": 2 56 | } 57 | -------------------------------------------------------------------------------- /04-transformers/06-rlhf/rlhf.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "### let's understand rlhf (reinforcement learning from human feedback)\n", 8 | "\n", 9 | "> **why do we need rlhf?**\n", 10 | "- large language models trained only on text prediction don't naturally align with human values and preferences.\n", 11 | "- models may generate harmful, misleading, or unhelpful content if optimized solely to predict the next token.\n", 12 | "- we need a way to teach models to produce outputs that humans actually prefer and find helpful.\n", 13 | "- supervised fine-tuning alone can't capture nuanced human preferences about quality, safety, and helpfulness.\n", 14 | "\n", 15 | "> **what is rlhf?**\n", 16 | "- rlhf stands for reinforcement learning from human feedback, a technique for aligning ai systems with human preferences.\n", 17 | "- it teaches models to generate content humans prefer by using human judgments as rewards.\n", 18 | "- human evaluators compare different model outputs, ranking them from most to least preferred.\n", 19 | "- these preferences are used to train a reward model that scores outputs, which then guides the language model's learning.\n", 20 | "\n", 21 | "> **how rlhf works?**\n", 22 | "- rlhf typically follows a three-stage process:\n", 23 | "- 1. supervised fine-tuning: first train the model on high-quality examples to get a decent starting point.\n", 24 | "- 2. reward model training: collect human preferences between outputs and train a model to predict which responses humans prefer.\n", 25 | "- 3. reinforcement learning: optimize the language model using the reward model's scores as feedback.\n", 26 | "\n", 27 | "> **the key components**\n", 28 | "- the policy model: the language model being trained to generate preferred outputs\n", 29 | "- the reward model: evaluates outputs based on human preferences\n", 30 | "- ppo (proximal policy optimization): the reinforcement learning algorithm typically used\n", 31 | "- kl penalty: ensures the model doesn't deviate too far from its original capabilities\n", 32 | "\n", 33 | "> **practical implementation**\n", 34 | "- human evaluators compare pairs of model responses and select which one better satisfies criteria like helpfulness.\n", 35 | "- these preferences create a dataset for training the reward model.\n", 36 | "- during reinforcement learning, the model generates many variations of responses to prompts.\n", 37 | "- these responses are scored by the reward model, and the policy is updated to maximize these scores.\n", 38 | "- a kl divergence penalty prevents the model from changing too drastically and forgetting its capabilities.\n", 39 | "\n", 40 | "> **benefits of rlhf**\n", 41 | "- models that produce more helpful, harmless, and honest outputs\n", 42 | "- reduced likelihood of generating harmful content\n", 43 | "- better alignment with complex human values that are difficult to specify explicitly\n", 44 | "- improved ability to follow instructions and understand user intent\n", 45 | "- more natural, helpful interactions that better meet human expectations" 46 | ] 47 | } 48 | ], 49 | "metadata": { 50 | "language_info": { 51 | "name": "python" 52 | } 53 | }, 54 | "nbformat": 4, 55 | "nbformat_minor": 2 56 | } 57 | -------------------------------------------------------------------------------- /04-transformers/07-vision-transformer/vit.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "### let's understand vision transformer (vit)\n", 8 | "\n", 9 | "> **why do we need vision transformer?**\n", 10 | "- traditionally, convolutional neural networks (cnns) dominated computer vision tasks.\n", 11 | "- cnns have built-in inductive biases for images, but these can sometimes limit what the model learns.\n", 12 | "- transformer architecture had revolutionized nlp, suggesting it might also benefit vision.\n", 13 | "- vision transformer brings the power of self-attention to image processing, enabling better global understanding of images.\n", 14 | "- vit can capture long-range dependencies between image patches that might be missed by cnns.\n", 15 | "\n", 16 | "> **what is a vision transformer?**\n", 17 | "- vision transformer (vit) adapts the transformer architecture from nlp to computer vision.\n", 18 | "- instead of processing text tokens, it processes image patches as tokens.\n", 19 | "- the key insight: split an image into fixed-size patches and treat each patch like a word token.\n", 20 | "- this approach removes the convolution operations entirely in the pure vit design.\n", 21 | "- it applies the same self-attention mechanism that made transformers successful in language tasks.\n", 22 | "\n", 23 | "> **how vision transformer works?**\n", 24 | "- 1. split the image into fixed-size patches (like 16×16 pixels)\n", 25 | "- 2. flatten each patch into a 1d vector\n", 26 | "- 3. project these vectors to the model dimension\n", 27 | "- 4. add position embeddings to retain spatial information\n", 28 | "- 5. process through standard transformer encoder blocks\n", 29 | "- 6. use the output of the special [class] token for classification\n", 30 | "\n", 31 | "\n", 32 | "\n", 33 | "> **transformer encoder block:**\n", 34 | "- each block contains:\n", 35 | " - multi-head self-attention (msa)\n", 36 | " - layer normalization (ln)\n", 37 | " - multilayer perceptron (mlp)\n", 38 | " - residual connections\n", 39 | "\n", 40 | "\n", 41 | "\n", 42 | "> **key advantages of vit**\n", 43 | "- global receptive field from the start (unlike cnns which build this gradually)\n", 44 | "- flexible attention to relevant parts of the image regardless of distance\n", 45 | "- fewer inductive biases, allowing the model to learn more complex patterns\n", 46 | "- excellent scaling properties - performance improves predictably with more data and compute\n", 47 | "- ability to visualize attention maps to see what the model focuses on\n", 48 | "- transfer learning capabilities across different vision tasks\n", 49 | "\n", 50 | "> **challenges and solutions**\n", 51 | "- requires more data than cnns to reach similar performance\n", 52 | "- computationally intensive for high-resolution images\n", 53 | "- positional information must be explicitly added\n", 54 | "- hybrid approaches combining cnn features with transformers often work best in practice\n", 55 | "- data augmentation and regularization are crucial for good performance\n" 56 | ] 57 | } 58 | ], 59 | "metadata": { 60 | "language_info": { 61 | "name": "python" 62 | } 63 | }, 64 | "nbformat": 4, 65 | "nbformat_minor": 2 66 | } 67 | -------------------------------------------------------------------------------- /05-image-generation/01-gan/gan.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "### let's understand gan (generative adversarial networks)\n", 8 | "\n", 9 | "> **why do we need gan?**\n", 10 | "\n", 11 | "traditional generative models like variational autoencoders often produce blurry or unrealistic outputs. we needed a way to generate sharper, more realistic images, audio, and other media that truly captures the complexity of real-world data. supervised learning requires labeled data, but generative tasks often need to learn the underlying distribution of unlabeled data. gans introduced a revolutionary approach: instead of explicitly modeling probability distributions with mathematical formulas, they learn to generate data through an adversarial game between two neural networks.\n", 12 | "\n", 13 | "> **what is gan?**\n", 14 | "\n", 15 | "generative adversarial networks consist of two neural networks; a generator and a discriminator that compete against each other in a minimax game. the generator creates fake samples trying to fool the discriminator, while the discriminator works to distinguish between real and fake samples. this adversarial process forces both networks to improve: the generator creates increasingly realistic data, and the discriminator becomes better at spotting subtle flaws. after training, the generator can create new, never-before-seen samples that closely resemble the training data distribution.\n", 16 | "\n", 17 | "> **how gan works?**\n", 18 | "\n", 19 | "the generator takes random noise (typically from a normal or uniform distribution) as input and transforms it into synthetic data samples. the discriminator receives both real samples from the training dataset and fake samples from the generator, outputting a probability indicating whether each sample is real or fake. the generator aims to maximize the discriminator's error rate, while the discriminator aims to minimize its own error rate. mathematically, this forms a two-player minimax game where the generator minimizes and the discriminator maximizes the same objective function. training alternates between updating the discriminator and the generator, gradually improving both networks.\n", 20 | "\n", 21 | "\n", 22 | "> **training algorithm**\n", 23 | "\n", 24 | "the training process alternates between:\n", 25 | "\n", 26 | "1. training the discriminator:\n", 27 | " $$\\max_D V(D, G) = \\mathbb{E}_{x \\sim p_{data}(x)}[\\log D(x)] + \\mathbb{E}_{z \\sim p_z(z)}[\\log(1 - D(G(z)))]$$\n", 28 | "\n", 29 | "2. training the generator:\n", 30 | " $$\\min_G V(D, G) = \\mathbb{E}_{z \\sim p_z(z)}[\\log(1 - D(G(z)))]$$\n", 31 | " or equivalently:\n", 32 | " $$\\max_G \\mathbb{E}_{z \\sim p_z(z)}[\\log D(G(z))]$$\n", 33 | "\n", 34 | "when the system reaches equilibrium, the generator produces samples indistinguishable from real data, and the discriminator outputs 0.5 for all inputs, indicating it can no longer differentiate between real and fake samples.\n", 35 | "\n", 36 | "> **challenges in gan training**\n", 37 | "\n", 38 | "training gans is notoriously difficult due to several issues. mode collapse occurs when the generator produces limited varieties of outputs, failing to capture the full data distribution. vanishing gradients can happen when the discriminator becomes too effective too quickly, providing minimal feedback to the generator. training instability manifests as oscillating losses rather than convergence. these issues have led to numerous gan variants like wasserstein gan (wgan), which uses wasserstein distance instead of jensen shannon divergence, and spectral normalization gan (sn-gan), which stabilizes discriminator training through weight normalization.\n", 39 | "\n", 40 | "> **applications of gan**\n", 41 | "\n", 42 | "gans have revolutionized multiple fields with their ability to generate realistic data. in computer vision, they create photorealistic images, perform image-to-image translation (like converting satellite images to maps), and enhance low resolution photos. in medicine, gans generate synthetic medical images for training algorithms and data augmentation when real samples are scarce. they've also been applied to audio synthesis for creating realistic speech and music, text generation for creating coherent passages, and even drug discovery by generating molecular structures with specific properties. perhaps most famously, deepfakes highly realistic fake videos and images—are created using gan-based approaches.\n", 43 | "\n", 44 | "> **recent advances in gan**\n", 45 | "\n", 46 | "stylegan represents a significant advancement with its ability to generate incredibly realistic faces and control different aspects of image generation separately. biggan scaled up gan training to produce high-resolution, diverse images. cyclegan enabled unpaired image-to-image translation, allowing transformation between domains without paired examples. diffusion models, while technically different from gans, have recently outperformed them in image generation quality by gradually denoising random noise. gans continue to evolve, with research focusing on improving training stability, increasing output diversity, and extending their capabilities to new domains and applications.\n" 47 | ] 48 | }, 49 | { 50 | "cell_type": "code", 51 | "execution_count": null, 52 | "metadata": {}, 53 | "outputs": [], 54 | "source": [] 55 | } 56 | ], 57 | "metadata": { 58 | "kernelspec": { 59 | "display_name": "Python 3", 60 | "language": "python", 61 | "name": "python3" 62 | }, 63 | "language_info": { 64 | "name": "python", 65 | "version": "3.9.6" 66 | } 67 | }, 68 | "nbformat": 4, 69 | "nbformat_minor": 2 70 | } 71 | -------------------------------------------------------------------------------- /05-image-generation/02-vae/vae.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "### let's understand vae (variational autoencoders)\n", 8 | "\n", 9 | "> **why do we need vae?**\n", 10 | "\n", 11 | "traditional autoencoders compress data into a lower-dimensional latent space but lack the ability to generate new samples effectively. we needed a generative model that could not only reconstruct inputs but also produce new, realistic samples by sampling from a continuous latent space. vae solves this by introducing probabilistic encoding that forces the latent space to be well-structured and meaningful. unlike gans which require adversarial training, vaes offer a more stable training process based on a clear mathematical foundation of variational inference. vaes also provide explicit probability distributions, allowing us to reason about the underlying data structure and uncertainty in a principled way.\n", 12 | "\n", 13 | "> **what is vae?**\n", 14 | "\n", 15 | "variational autoencoder is a generative model that combines deep learning with bayesian inference. it consists of an encoder network that maps input data to a probability distribution in latent space, and a decoder network that reconstructs the input from samples of this distribution. the key innovation is representing each point in latent space not as a single value but as a distribution (typically gaussian) defined by mean and variance parameters. this probabilistic approach enables smooth interpolation between data points and generation of new samples by sampling from the latent space. vae training optimizes a balance between reconstruction quality and ensuring the latent space follows a predefined prior distribution, usually a standard normal distribution.\n", 16 | "\n", 17 | "> **how vae works?**\n", 18 | "\n", 19 | "the encoder in a vae takes input data and outputs parameters of a probability distribution (mean μ and variance σ²) rather than a fixed encoding. the model then uses the reparameterization trick to sample from this distribution in a way that allows gradient flow during backpropagation: z = μ + σ * ε, where ε is random noise from a standard normal distribution. the decoder takes this sampled point z and reconstructs the input. during training, the vae optimizes two components: the reconstruction loss (how well the decoder reconstructs the input) and the kullback-leibler divergence between the encoder's distribution and a prior distribution (usually standard normal). this second term acts as a regularizer, ensuring the latent space is well-structured and continuous.\n", 20 | "\n", 21 | "\n", 22 | "> **detailed architecture**\n", 23 | "\n", 24 | "the encoder network typically consists of several layers that process the input and output two vectors: one for the means (μ) and one for the log-variances (log σ²) of the latent dimensions. we use log-variance instead of variance directly for numerical stability. these parameters define a multivariate gaussian distribution for each input. during training, we sample from this distribution using the reparameterization trick. the decoder network takes this sample and attempts to reconstruct the original input. the loss function combines reconstruction error (often mean squared error for continuous data or binary cross-entropy for binary data) with the kl divergence term that regularizes the latent space distributions to be close to the prior.\n", 25 | "\n", 26 | "> **vae vs traditional autoencoders**\n", 27 | "\n", 28 | "unlike traditional autoencoders that encode inputs as single points in latent space, vaes encode inputs as probability distributions. this probabilistic approach creates a continuous, structured latent space where similar inputs cluster together and interpolation between points produces meaningful outputs. standard autoencoders may have gaps or discontinuities in their latent space, making generation of new samples difficult. vaes solve this by enforcing a smooth, continuous latent space through the kl divergence regularization. this structure allows for semantic operations in latent space, such as attribute manipulation through vector arithmetic, and enables generation of diverse samples by sampling different points from the prior distribution and decoding them.\n", 29 | "\n", 30 | "> **applications of vae**\n", 31 | "\n", 32 | "vaes excel in various generative applications across different domains. in computer vision, they generate images, perform image inpainting to fill missing regions, and enable controlled image generation and editing. in natural language processing, text vaes can generate coherent paragraphs and perform sentence interpolation. for anomaly detection, vaes learn the normal data distribution, allowing them to identify outliers as samples with high reconstruction error. in drug discovery, vaes generate novel molecular structures with desired properties by learning the distribution of valid chemical compounds. vaes also excel at learning disentangled representations, where different dimensions in latent space correspond to interpretable features of the data, enabling controlled generation and attribute manipulation.\n", 33 | "\n", 34 | "> **limitations and extensions**\n", 35 | "\n", 36 | "despite their elegant mathematical foundation, vaes often produce blurrier outputs than gans, especially for images. this is partially due to the pixel-wise reconstruction loss, which doesn't capture perceptual quality effectively. to address these limitations, numerous vae variants have been developed. β-vae introduces a hyperparameter to control the trade-off between reconstruction quality and latent space regularity. vq-vae (vector quantized vae) uses discrete latent variables instead of continuous ones, producing sharper outputs. conditional vaes incorporate additional information like class labels to control the generation process. flow-based models and diffusion models extend vae concepts with more expressive transformation functions. hybrid approaches like vae-gans combine the stable training of vaes with the perceptual quality of gans." 37 | ] 38 | }, 39 | { 40 | "cell_type": "markdown", 41 | "metadata": {}, 42 | "source": [] 43 | } 44 | ], 45 | "metadata": { 46 | "language_info": { 47 | "name": "python" 48 | } 49 | }, 50 | "nbformat": 4, 51 | "nbformat_minor": 2 52 | } 53 | -------------------------------------------------------------------------------- /05-image-generation/03-diffusion/sd.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "### let's understand stable diffusion\n", 8 | "\n", 9 | "> **why do we need stable diffusion?**\n", 10 | "\n", 11 | "previous generative models like gans and vaes struggled with generating high quality, diverse images at high resolutions. despite advances, these models required enormous computational resources and often produced artifacts or unrealistic features. we needed a fundamentally different approach that could create photorealistic images with precise control, while being more computationally efficient. text-to-image generation remained particularly challenging, with models failing to accurately interpret complex prompts and generate corresponding images. stable diffusion addresses these limitations by leveraging diffusion probabilistic models combined with latent space compression, resulting in remarkable image generation capabilities with reasonable computational requirements.\n", 12 | "\n", 13 | "> **what is stable diffusion?**\n", 14 | "\n", 15 | "stable diffusion is a latent diffusion model that generates images from text prompts or other images. unlike earlier diffusion models that operated in pixel space, stable diffusion works in a compressed latent space, making it much more computationally efficient. it consists of three key components: a text encoder (usually clip) that transforms text prompts into embeddings, a unet model that performs the diffusion process in latent space, and a variational autoencoder that decodes the latent representations into images. the model works by gradually denoising random noise into coherent images guided by text embeddings. by operating in a compressed latent space rather than full pixel space, stable diffusion can generate high-resolution images with significantly lower computational requirements than previous state-of-the-art models.\n", 16 | "\n", 17 | "> **how diffusion models work?**\n", 18 | "\n", 19 | "diffusion models are based on the concept of gradually adding noise to data and then learning to reverse this process. the forward diffusion process systematically destroys structure in data by adding gaussian noise over multiple steps until the data becomes pure noise. the model then learns the reverse diffusion process starting from random noise and gradually denoising it into meaningful data. mathematically, each step of the forward process can be seen as adding a small amount of gaussian noise to the previous state. the neural network is trained to predict the noise component at each step of the reverse process, allowing it to gradually denoise random samples into data that matches the training distribution. this approach creates a smooth path between random noise and structured data.\n", 20 | "\n", 21 | "> **the math behind diffusion models**\n", 22 | "\n", 23 | "the forward diffusion process adds noise to the data in t steps according to:\n", 24 | "\n", 25 | "$$q(x_t|x_{t-1}) = \\mathcal{N}(x_t; \\sqrt{1-\\beta_t}x_{t-1}, \\beta_t\\mathbf{I})$$\n", 26 | "\n", 27 | "where $$\\beta_t$$ is the noise schedule parameter at step t.\n", 28 | "\n", 29 | "this leads to:\n", 30 | "\n", 31 | "$$q(x_t|x_0) = \\mathcal{N}(x_t; \\sqrt{\\bar{\\alpha}_t}x_0, (1-\\bar{\\alpha}_t)\\mathbf{I})$$\n", 32 | "\n", 33 | "where $$\\alpha_t = 1 - \\beta_t$$ and $$\\bar{\\alpha}_t = \\prod_{s=1}^{t}\\alpha_s$$\n", 34 | "\n", 35 | "the model learns to reverse this process by predicting the noise $$\\epsilon$$ added at each step:\n", 36 | "\n", 37 | "$$\\epsilon_\\theta(x_t, t) \\approx \\epsilon$$\n", 38 | "\n", 39 | "the loss function is:\n", 40 | "\n", 41 | "$$L = \\mathbb{E}_{x_0,\\epsilon,t}[||\\epsilon - \\epsilon_\\theta(x_t, t)||^2]$$\n", 42 | "\n", 43 | "where $$x_t = \\sqrt{\\bar{\\alpha}_t}x_0 + \\sqrt{1-\\bar{\\alpha}_t}\\epsilon$$ and $$\\epsilon \\sim \\mathcal{N}(0, \\mathbf{I})$$\n", 44 | "\n", 45 | "for sampling, we use:\n", 46 | "\n", 47 | "$$x_{t-1} = \\frac{1}{\\sqrt{\\alpha_t}}(x_t - \\frac{1-\\alpha_t}{\\sqrt{1-\\bar{\\alpha}_t}}\\epsilon_\\theta(x_t, t)) + \\sigma_t\\mathbf{z}$$\n", 48 | "\n", 49 | "where $$\\mathbf{z} \\sim \\mathcal{N}(0, \\mathbf{I})$$ and $$\\sigma_t$$ controls the sampling stochasticity.\n", 50 | "\n", 51 | "> **stable diffusion architecture**\n", 52 | "\n", 53 | "stable diffusion's key innovation is performing the diffusion process in a compressed latent space instead of pixel space. the architecture consists of three main components working together. first, a text encoder (commonly clip) processes text prompts into embeddings that guide the image generation. next, a u-net with cross-attention layers performs the actual diffusion process in latent space, conditioned on the text embeddings. the u-net predicts noise to be removed at each denoising step. finally, a variational autoencoder decodes the final latent representation into a high-resolution image. this latent-space approach dramatically reduces computation requirements—working with 64×64 latent representations versus 512×512 or larger pixel images—while maintaining generation quality.\n", 54 | "\n", 55 | "> **conditioning and guidance**\n", 56 | "\n", 57 | "one of stable diffusion's powerful features is its ability to be conditioned on various inputs. text conditioning is the most common, where the diffusion process is guided by text embeddings to generate images matching textual descriptions. classifier free guidance improves this by interpolating between conditional and unconditional generation, controlled by a guidance scale parameter that determines how strongly the generation follows the conditioning signal. higher guidance values produce images that more closely match the prompt but may sacrifice some natural variation. stable diffusion can also be conditioned on images for tasks like inpainting (filling in missing parts), outpainting (extending images beyond their boundaries), and image-to-image translation where an input image is transformed according to a text prompt.\n", 58 | "\n", 59 | "> **applications and extensions**\n", 60 | "\n", 61 | "stable diffusion has found applications across numerous domains due to its versatility and accessibility. beyond basic text-to-image generation, it powers creative tools that help artists, designers, and content creators generate and edit visual content. in entertainment and media, it's used for concept art, storyboarding, and asset creation. researchers have extended stable diffusion for video generation by adding temporal layers, 3d model generation by incorporating additional 3d constraints, and personalized image generation by fine-tuning on specific concepts or styles. techniques like dreambooth, textual inversion, and lora allow users to teach the model new concepts or styles with just a few reference images. its open-source nature has led to a flourishing ecosystem of innovations built upon the base model.\n", 62 | "\n", 63 | "> **limitations and ethical considerations**\n", 64 | "\n", 65 | "despite its capabilities, stable diffusion faces several limitations. it sometimes struggles with complex compositions, accurate text rendering, precise counting, and consistent object rendering across images. the model can also reproduce biases present in its training data, potentially reinforcing stereotypes. since it's trained on internet data, it may generate inappropriate content without proper safeguards. there are also concerns about copyright infringement, as the model may reproduce styles of specific artists or copyrighted characters. to address these issues, researchers have implemented various safety mechanisms, including prompt filtering, output checking, and image watermarking. ongoing research focuses on making these models more controllable, accurate, and aligned with human values while preserving their creative capabilities.\n", 66 | "\n" 67 | ] 68 | }, 69 | { 70 | "cell_type": "markdown", 71 | "metadata": {}, 72 | "source": [] 73 | } 74 | ], 75 | "metadata": { 76 | "kernelspec": { 77 | "display_name": "Python 3", 78 | "language": "python", 79 | "name": "python3" 80 | }, 81 | "language_info": { 82 | "codemirror_mode": { 83 | "name": "ipython", 84 | "version": 3 85 | }, 86 | "file_extension": ".py", 87 | "mimetype": "text/x-python", 88 | "name": "python", 89 | "nbconvert_exporter": "python", 90 | "pygments_lexer": "ipython3", 91 | "version": "3.9.6" 92 | } 93 | }, 94 | "nbformat": 4, 95 | "nbformat_minor": 2 96 | } 97 | -------------------------------------------------------------------------------- /05-image-generation/04-clip/clip.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# clip (contrastive language-image pre-training)\n", 8 | "\n", 9 | "clip is a neural network model developed to connect text and images. in the context of stable diffusion, clip serves as the text encoder that transforms text prompts into embeddings that guide the image generation process.\n", 10 | "\n", 11 | "the core principle behind clip is **contrastive learning** between text and image pairs. clip learns to align text and images in a shared embedding space, where related text and images are positioned closer together and unrelated ones are farther apart.\n", 12 | "\n", 13 | "## how clip works\n", 14 | "\n", 15 | "clip consists of two encoders:\n", 16 | "1. a text encoder (typically a transformer)\n", 17 | "2. an image encoder (typically a vision transformer or cnn)\n", 18 | "\n", 19 | "these encoders map text and images into a shared, high-dimensional embedding space. during training, clip maximizes the cosine similarity between correct text-image pairs while minimizing similarity between incorrect pairs.\n", 20 | "\n", 21 | "## the main formula\n", 22 | "\n", 23 | "the contrastive loss function that clip optimizes can be represented as:\n", 24 | "\n", 25 | "$$l = -\\log \\frac{\\exp(sim(t_i, i_i)/\\tau)}{\\sum_{j=1}^{n} \\exp(sim(t_i, i_j)/\\tau)}$$\n", 26 | "\n", 27 | "where:\n", 28 | "- $t_i$ is the text embedding\n", 29 | "- $i_i$ is the corresponding image embedding\n", 30 | "- $sim$ is the cosine similarity function\n", 31 | "- $\\tau$ is a temperature parameter\n", 32 | "- $n$ is the batch size\n" 33 | ] 34 | }, 35 | { 36 | "cell_type": "markdown", 37 | "metadata": {}, 38 | "source": [] 39 | } 40 | ], 41 | "metadata": { 42 | "language_info": { 43 | "name": "python" 44 | } 45 | }, 46 | "nbformat": 4, 47 | "nbformat_minor": 2 48 | } 49 | -------------------------------------------------------------------------------- /05-image-generation/05-dall-e/dalle.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# dall-e (a neural network that creates images from text)\n", 8 | "\n", 9 | "dall-e is a generative ai system developed by openai that creates images from text descriptions. it represents a breakthrough in the field of text-to-image synthesis, allowing users to generate novel and creative visual content simply by describing what they want to see.\n", 10 | "\n", 11 | "the core principle behind dall-e is **autoregressive generation** based on transformer architecture. dall-e understands the relationships between images and their textual descriptions to produce images that match the given text prompts with remarkable accuracy.\n", 12 | "\n", 13 | "## how dall-e works\n", 14 | "\n", 15 | "dall-e follows a two-stage approach:\n", 16 | "1. a text encoder processes the input prompt\n", 17 | "2. a generative model produces images based on the encoded text\n", 18 | "\n", 19 | "the original dall-e used a discrete vae (variational autoencoder) to compress images into tokens, treating image generation similar to language modeling. dall-e 2 and dall-e 3 use diffusion models for higher quality generation.\n", 20 | "\n", 21 | "## the technical approach\n", 22 | "\n", 23 | "dall-e 3 specifically uses:\n", 24 | "\n", 25 | "1. a text encoder to understand the prompt\n", 26 | "2. a diffusion model that gradually removes noise from random pixels\n", 27 | "3. a refinement process that ensures adherence to the text prompt\n", 28 | "\n", 29 | "the simplified process can be represented as:\n", 30 | "\n", 31 | "$$x_t = \\sqrt{\\alpha_t}x_0 + \\sqrt{1-\\alpha_t}\\epsilon$$\n", 32 | "\n", 33 | "where:\n", 34 | "- $x_t$ is the noisy image at timestep $t$\n", 35 | "- $x_0$ is the original image\n", 36 | "- $\\alpha_t$ is a noise schedule parameter\n", 37 | "- $\\epsilon$ is gaussian noise\n", 38 | "\n", 39 | "## dall-e's capabilities\n", 40 | "\n", 41 | "dall-e excels at:\n", 42 | "- creating photorealistic images from detailed descriptions\n", 43 | "- generating artistic compositions in various styles\n", 44 | "- understanding complex spatial relationships\n", 45 | "- maintaining coherence across complex prompts\n", 46 | "- rendering text within images (especially in dall-e 3)\n", 47 | "\n", 48 | "when you provide a text prompt to dall-e, it interprets your description and generates an image that visually represents the concepts, styles, and relationships you've described, demonstrating an impressive understanding of both language and visual content." 49 | ] 50 | } 51 | ], 52 | "metadata": { 53 | "language_info": { 54 | "name": "python" 55 | } 56 | }, 57 | "nbformat": 4, 58 | "nbformat_minor": 2 59 | } 60 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Deep Learning Research Paper Collection 2 | 3 | ## Overview 4 | 5 | This repository is a collection of IMPORTANT deep learning research papers, organized by research area and implementation. The goal is to provide a structured approach to understanding the evolution and core concepts of deep learning. 6 | 7 | ## Disclaimer 8 | 9 | > [!IMPORTANT] 10 | > This is a personal learning project. The implementations and notes may contain errors or simplifications. Use with caution and always refer to the original papers. 11 | 12 | ## Inspiration and Credits 13 | 14 | Inspired by [adam-maj](https://github.com/adam-maj) and expanded with additional research papers and implementations. 15 | 16 | ## Project Goals 17 | 18 | - Implement approximately 60 important deep learning papers 19 | - Provide scratch implementations for learning and understanding 20 | - Create a comprehensive resource for deep learning research 21 | 22 | ## Contents 23 | 24 | ### 1. Foundational Deep Neural Networks 25 | 26 | #### Papers 27 | - **DNN** (1987): Learning Internal Representations by Error Propagation [pdf](https://www.iro.umontreal.ca/~vincentp/ift3395/lectures/backprop_old.pdf) 28 | - **CNN** (1989): Backpropagation Applied to Handwritten Zip Code Recognition [pdf](http://yann.lecun.com/exdb/publis/pdf/lecun-89e.pdf) 29 | - **LeNet** (1998): Gradient-Based Learning Applied to Document Recognition [pdf](http://vision.stanford.edu/cs598_spring07/papers/Lecun98.pdf) 30 | - **AlexNet** (2012): ImageNet Classification with Deep Convolutional Networks [pdf](https://papers.nips.cc/paper/2012/file/c399862d3b9d6b76c8436e924a68c45b-Paper.pdf) 31 | - **U-Net** (2015): Convolutional Networks for Biomedical Image Segmentation [pdf](https://arxiv.org/pdf/1505.04597.pdf) 32 | 33 | ### 2. Optimization and Regularization Techniques 34 | 35 | #### Papers 36 | - **Weight Decay** (1991): A Simple Weight Decay Can Improve Generalization [pdf](https://www.cs.toronto.edu/~hinton/absps/nips93.pdf) 37 | - **ReLU** (2011): Deep Sparse Rectified Neural Networks [pdf](https://www.cs.toronto.edu/~hinton/absps/reluICML.pdf) 38 | - **Residuals** (2015): Deep Residual Learning for Image Recognition [pdf](https://arxiv.org/pdf/1512.03385.pdf) 39 | - **Dropout** (2014): Preventing Neural Networks from Overfitting [pdf](https://www.cs.toronto.edu/~hinton/absps/JMLRdropout.pdf) 40 | - **BatchNorm** (2015): Accelerating Deep Network Training [pdf](https://arxiv.org/pdf/1502.03167.pdf) 41 | - **LayerNorm** (2016): Layer Normalization [pdf](https://arxiv.org/pdf/1607.06450.pdf) 42 | - **GELU** (2016): Gaussian Error Linear Units [pdf](https://arxiv.org/pdf/1606.08415.pdf) 43 | - **Adam** (2014): Stochastic Optimization Method [pdf](https://arxiv.org/pdf/1412.6980.pdf) 44 | 45 | ### 3. Sequence Modeling 46 | 47 | #### Papers 48 | - **RNN** (1989): Continually Running Fully Recurrent Neural Networks [pdf](https://www.bioinf.jku.at/publications/older/2604.pdf) 49 | - **LSTM** (1997): Long-Short Term Memory [pdf](https://www.bioinf.jku.at/publications/older/2308.pdf) 50 | - **Learning to Forget** (2000): Continual Prediction with LSTM [pdf](https://www.researchgate.net/publication/221601044_Learning_to_Forget_Continual_Prediction_with_LSTM) 51 | - **Word2Vec** (2013): Word Representations in Vector Space [pdf](https://arxiv.org/pdf/1301.3781.pdf) 52 | - **Phrase2Vec** (2013): Distributed Representations of Words and Phrases [pdf](https://arxiv.org/pdf/1310.4546.pdf) 53 | - **Encoder-Decoder** (2014): RNN Encoder-Decoder for Machine Translation [pdf](https://arxiv.org/pdf/1406.1078.pdf) 54 | - **Seq2Seq** (2014): Sequence to Sequence Learning [pdf](https://arxiv.org/pdf/1409.3215.pdf) 55 | - **Attention** (2014): Neural Machine Translation with Alignment [pdf](https://arxiv.org/pdf/1409.0473.pdf) 56 | - **Mixture of Experts** (2017): Sparsely-Gated Neural Networks [pdf](https://arxiv.org/pdf/1701.06538.pdf) 57 | 58 | ### 4. Language Modeling 59 | 60 | #### Papers 61 | - **Transformer** (2017): Attention Is All You Need [pdf](https://arxiv.org/pdf/1706.03762.pdf) 62 | - **BERT** (2018): Bidirectional Transformers for Language Understanding [pdf](https://arxiv.org/pdf/1810.04805.pdf) 63 | - **RoBERTa** (2019): Robustly Optimized BERT Pretraining [pdf](https://arxiv.org/pdf/1907.11692.pdf) 64 | - **T5** (2019): Unified Text-to-Text Transformer [pdf](https://arxiv.org/pdf/1910.10683.pdf) 65 | - **GPT Series**: 66 | - GPT (2018): Generative Pre-Training [pdf](https://arxiv.org/pdf/1810.04805.pdf) 67 | - GPT-2 (2018): Unsupervised Multitask Learning [pdf](https://arxiv.org/pdf/1902.01082.pdf) 68 | - GPT-3 (2020): Few-Shot Learning [pdf](https://arxiv.org/pdf/2005.14165.pdf) 69 | - GPT-4 (2023): Advanced Language Model [pdf](https://arxiv.org/pdf/2303.08774.pdf) 70 | - **LoRA** (2021): Low-Rank Adaptation of Large Language Models [pdf](https://arxiv.org/pdf/2106.09685.pdf) 71 | - **RLHF** (2019): Fine-Tuning from Human Preferences [pdf](https://arxiv.org/pdf/1909.08593.pdf) 72 | - **InstructGPT** (2022): Following Instructions with Human Feedback [pdf](https://arxiv.org/pdf/2203.02155.pdf) 73 | - **Vision Transformer** (2020): Image Recognition with Transformers [pdf](https://arxiv.org/pdf/2010.11929.pdf) 74 | - **ELECTRA** (2020): Discriminative Pre-training [pdf](https://arxiv.org/pdf/2003.10555.pdf) 75 | 76 | ### 5. Image Generative Modeling 77 | 78 | #### Papers 79 | - **GAN** (2014): Generative Adversarial Networks [pdf](https://arxiv.org/pdf/1406.2661.pdf) 80 | - **VAE** (2013): Auto-Encoding Variational Bayes [pdf](https://arxiv.org/pdf/1312.6114.pdf) 81 | - **VQ VAE** (2017): Neural Discrete Representation Learning [pdf](https://arxiv.org/pdf/1711.00937.pdf) 82 | - **Diffusion Models**: 83 | - Initial Diffusion (2015): Nonequilibrium Thermodynamics [pdf](https://arxiv.org/pdf/1503.03585.pdf) 84 | - Denoising Diffusion (2020): Probabilistic Models [pdf](https://arxiv.org/pdf/2006.11239.pdf) 85 | - Improved Denoising Diffusion (2021) [pdf](https://arxiv.org/pdf/2102.09672.pdf) 86 | - **CLIP** (2021): Visual Models from Natural Language Supervision [pdf](https://arxiv.org/pdf/2103.00020.pdf) 87 | - **DALL-E** (2021-2022): Text-to-Image Generation [pdf](https://arxiv.org/pdf/2102.12092.pdf) 88 | - **SimCLR** (2020): Contrastive Learning of Visual Representations [pdf](https://arxiv.org/pdf/2002.05709.pdf) 89 | 90 | ### 6. Deep Reinforcement Learning 91 | 92 | #### Papers 93 | - **Deep Reinforcement Learning** (2017): Mastering Chess and Shogi [pdf](https://arxiv.org/pdf/1712.01815.pdf) 94 | - **Deep Q-Learning** (2013): Playing Atari Games [pdf](https://www.cs.toronto.edu/~vmnih/docs/dqn.pdf) 95 | - **AlphaGo** (2016): Mastering the Game of Go [pdf](https://www.nature.com/articles/nature16961.pdf) 96 | - **AlphaFold** (2021): Protein Structure Prediction [pdf](https://www.nature.com/articles/s41586-021-03819-2.pdf) 97 | 98 | ### 7. Additional Influential Papers 99 | 100 | - **Deep Learning Survey** (2015): By LeCun, Bengio, and Hinton [pdf](https://www.cs.toronto.edu/~hinton/absps/NatureDeepReview.pdf) 101 | - **BigGAN** (2018): Large Scale GAN Training [pdf](https://arxiv.org/pdf/1809.11096.pdf) 102 | - **WaveNet** (2016): Generative Model for Raw Audio [pdf](https://arxiv.org/pdf/1609.03499.pdf) 103 | - **BERTology** (2020): Survey of BERT Use Cases [pdf](https://arxiv.org/pdf/2002.10063.pdf) 104 | 105 | #### Scaling and Model Optimization 106 | - **Scaling Laws for Neural Language Models** (2020): Predicting Model Performance [pdf](https://arxiv.org/pdf/2001.08361.pdf) 107 | - **Chinchilla** (2022): Training Compute-Optimal Large Language Models [pdf](https://arxiv.org/pdf/2203.15556.pdf) 108 | - **Gopher** (2022): Scaling Language Models with Massive Compute [pdf](https://arxiv.org/pdf/2112.11446.pdf) 109 | 110 | #### Fine-tuning and Adaptation 111 | - **P-Tuning** (2021): Prompt Tuning with Soft Prompts [pdf](https://arxiv.org/pdf/2103.10385.pdf) 112 | - **Prefix-Tuning** (2021): Optimizing Continuous Prompts [pdf](https://arxiv.org/pdf/2101.00190.pdf) 113 | - **AdaLoRA** (2023): Adaptive Low-Rank Adaptation [pdf](https://arxiv.org/pdf/2303.10512.pdf) 114 | - **QLoRA** (2023): Efficient Fine-Tuning of Quantized Models [pdf](https://arxiv.org/pdf/2305.14314.pdf) 115 | 116 | #### Inference and Optimization Techniques 117 | - **FlashAttention** (2022): Fast and Memory-Efficient Attention [pdf](https://arxiv.org/pdf/2205.14135.pdf) 118 | - **FlashAttention-2** (2023): Faster Attention Mechanism [pdf](https://arxiv.org/pdf/2307.08691.pdf) 119 | - **Direct Preference Optimization (DPO)** (2023): Aligning Language Models with Human Preferences [pdf](https://arxiv.org/pdf/2305.18046.pdf) 120 | - **LoRA** (2021): Low-Rank Adaptation of Large Language Models [pdf](https://arxiv.org/pdf/2106.09685.pdf) 121 | 122 | #### Pre-training and Model Architecture 123 | - **Mixture of Experts (MoE)** (2022): Scaling Language Models with Sparse Experts [pdf](https://arxiv.org/pdf/2201.05596.pdf) 124 | - **GLaM** (2021): Efficient Scaling with Mixture of Experts [pdf](https://arxiv.org/pdf/2112.06905.pdf) 125 | - **Switch Transformers** (2022): Scaling to Trillion Parameter Models [pdf](https://arxiv.org/pdf/2101.03961.pdf) 126 | 127 | #### Reasoning and Capabilities 128 | - **Chain of Thought Prompting** (2022): Reasoning with Language Models [pdf](https://arxiv.org/pdf/2201.11903.pdf) 129 | - **Self-Consistency** (2022): Improving Language Model Reasoning [pdf](https://arxiv.org/pdf/2203.11171.pdf) 130 | - **Tree of Thoughts** (2023): Deliberate Problem Solving [pdf](https://arxiv.org/pdf/2305.10601.pdf) 131 | 132 | #### Efficiency and Compression 133 | - **DistilBERT** (2019): Distilled Version of BERT [pdf](https://arxiv.org/pdf/1910.01108.pdf) 134 | - **Knowledge Distillation** (2022): Comprehensive Survey [pdf](https://arxiv.org/pdf/2006.05525.pdf) 135 | - **Pruning and Quantization Techniques** (2022): Model Compression Survey [pdf](https://arxiv.org/pdf/2102.06322.pdf) -------------------------------------------------------------------------------- /images/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/.gitkeep -------------------------------------------------------------------------------- /images/3-11.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/3-11.png -------------------------------------------------------------------------------- /images/3-12-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/3-12-2.png -------------------------------------------------------------------------------- /images/3-7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/3-7.png -------------------------------------------------------------------------------- /images/Cowboy-Bebop-Quotes1.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/Cowboy-Bebop-Quotes1.jpeg -------------------------------------------------------------------------------- /images/GRU.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/GRU.png -------------------------------------------------------------------------------- /images/GRulCXpaUAAm5Up.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/GRulCXpaUAAm5Up.jpeg -------------------------------------------------------------------------------- /images/LKE.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/LKE.png -------------------------------------------------------------------------------- /images/RNN-vs-FNN-660.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/RNN-vs-FNN-660.png -------------------------------------------------------------------------------- /images/T5.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/T5.jpg -------------------------------------------------------------------------------- /images/T5_1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/T5_1.jpg -------------------------------------------------------------------------------- /images/add-1.drawio.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/add-1.drawio.png -------------------------------------------------------------------------------- /images/alexnet-arc.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/alexnet-arc.png -------------------------------------------------------------------------------- /images/batcnorm.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/batcnorm.jpeg -------------------------------------------------------------------------------- /images/bert.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/bert.jpg -------------------------------------------------------------------------------- /images/bot-res.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/bot-res.png -------------------------------------------------------------------------------- /images/bottleneck.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/bottleneck.png -------------------------------------------------------------------------------- /images/cnn.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/cnn.jpg -------------------------------------------------------------------------------- /images/convolution-2.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/convolution-2.gif -------------------------------------------------------------------------------- /images/decoder.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/decoder.png -------------------------------------------------------------------------------- /images/dropout.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/dropout.png -------------------------------------------------------------------------------- /images/dropoutex.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/dropoutex.jpg -------------------------------------------------------------------------------- /images/dropoutt.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/dropoutt.png -------------------------------------------------------------------------------- /images/earlystopping.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/earlystopping.jpg -------------------------------------------------------------------------------- /images/encoder.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/encoder.png -------------------------------------------------------------------------------- /images/f_pdf.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/f_pdf.jpg -------------------------------------------------------------------------------- /images/for_revered_guest.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/for_revered_guest.png -------------------------------------------------------------------------------- /images/imagent.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/imagent.png -------------------------------------------------------------------------------- /images/imagnet-win.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/imagnet-win.png -------------------------------------------------------------------------------- /images/last-lstm.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/last-lstm.png -------------------------------------------------------------------------------- /images/lstm-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/lstm-2.png -------------------------------------------------------------------------------- /images/lstm-add.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/lstm-add.png -------------------------------------------------------------------------------- /images/lstm-core.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/lstm-core.png -------------------------------------------------------------------------------- /images/lstm-input.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/lstm-input.png -------------------------------------------------------------------------------- /images/lstm.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/lstm.png -------------------------------------------------------------------------------- /images/maxpool.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/maxpool.gif -------------------------------------------------------------------------------- /images/mul.drawio.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/mul.drawio.png -------------------------------------------------------------------------------- /images/newunet.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/newunet.png -------------------------------------------------------------------------------- /images/nor-res.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/nor-res.png -------------------------------------------------------------------------------- /images/overlapping.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/overlapping.png -------------------------------------------------------------------------------- /images/pos-cal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/pos-cal.png -------------------------------------------------------------------------------- /images/pos-emb.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/pos-emb.png -------------------------------------------------------------------------------- /images/probs.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/probs.jpg -------------------------------------------------------------------------------- /images/relu.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/relu.png -------------------------------------------------------------------------------- /images/reluu.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/reluu.png -------------------------------------------------------------------------------- /images/res-arc.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/res-arc.png -------------------------------------------------------------------------------- /images/resnet.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/resnet.jpg -------------------------------------------------------------------------------- /images/rnn_arc.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/rnn_arc.png -------------------------------------------------------------------------------- /images/self-feedback-loop.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/self-feedback-loop.jpg -------------------------------------------------------------------------------- /images/skip.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/skip.png -------------------------------------------------------------------------------- /images/transformer.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/transformer.jpeg -------------------------------------------------------------------------------- /images/transistor.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/transistor.png -------------------------------------------------------------------------------- /images/trig.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/trig.png -------------------------------------------------------------------------------- /images/typesofrnn.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/typesofrnn.png -------------------------------------------------------------------------------- /images/unet.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/unet.png -------------------------------------------------------------------------------- /images/unetimg.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/unetimg.png -------------------------------------------------------------------------------- /images/unetsd.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/unetsd.png -------------------------------------------------------------------------------- /images/vis-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/vis-4.png -------------------------------------------------------------------------------- /images/vis-cnn.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/vis-cnn.png -------------------------------------------------------------------------------- /images/vis_0.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/vis_0.png -------------------------------------------------------------------------------- /images/vis_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/vis_1.png -------------------------------------------------------------------------------- /images/vis_2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/vis_2.png -------------------------------------------------------------------------------- /images/word2vec-embed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/word2vec-embed.png -------------------------------------------------------------------------------- /images/word2vec.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/saurabhaloneai/History-of-Deep-Learning/3bf46e64963175f6607ab97672c59ae8ec25832f/images/word2vec.png --------------------------------------------------------------------------------