├── img └── plot2.png ├── torch_data_utils.pyc ├── mlp_torch.py ├── torch_data_utils.py ├── README.md ├── plot_tool.py ├── utils.py ├── cnn.py ├── mi_tool.py ├── Extension.ipynb ├── .ipynb_checkpoints ├── Extension-checkpoint.ipynb ├── Untitled-checkpoint.ipynb ├── Untitled-Copy3-checkpoint.ipynb ├── Untitled-Copy2-checkpoint.ipynb ├── Untitled-Copy1-checkpoint.ipynb ├── CNN-checkpoint.ipynb └── MLP-checkpoint.ipynb ├── MLP.ipynb ├── mlp_vec.py ├── mlp.py └── CNN.ipynb /img/plot2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mutual-ai/Information-Bottleneck-for-Deep-Learning/HEAD/img/plot2.png -------------------------------------------------------------------------------- /torch_data_utils.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mutual-ai/Information-Bottleneck-for-Deep-Learning/HEAD/torch_data_utils.pyc -------------------------------------------------------------------------------- /mlp_torch.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch.autograd import Variable 3 | import torch.nn as nn 4 | import torch.nn.functional as F 5 | import torchvision 6 | from torchvision import datasets, transforms 7 | import torch.optim as optim 8 | import torchvision.datasets as dset 9 | 10 | class Model(nn.Module): 11 | def __init__(self, input_size, h1, h2, h3, num_classes): 12 | super(Model, self).__init__() 13 | 14 | self.fc1 = nn.Linear(input_size, h1) 15 | self.fc2 = nn.Linear(h1, h2) 16 | self.fc3 = nn.Linear(h2, h3) 17 | self.fc4 = nn.Linear(h3, num_classes) 18 | 19 | def forward(self, x): 20 | x = x.view(-1, 28*28) 21 | x = F.relu(self.fc1(x)) 22 | x = F.relu(self.fc2(x)) 23 | x = F.relu(self.fc3(x)) 24 | x = F.relu(self.fc4(x)) 25 | return x -------------------------------------------------------------------------------- /torch_data_utils.py: -------------------------------------------------------------------------------- 1 | import torchvision.datasets as dset 2 | from torchvision import datasets, transforms 3 | import torch 4 | 5 | class FashionMNIST(dset.MNIST): 6 | urls = [ 7 | 'http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/train-images-idx3-ubyte.gz', 8 | 'http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/train-labels-idx1-ubyte.gz', 9 | 'http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/t10k-images-idx3-ubyte.gz', 10 | 'http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/t10k-labels-idx1-ubyte.gz', 11 | ] 12 | 13 | def load_fashion_mnist(shuffle=True, batch_size = 64, num_workers=1, download = True, root_dir='./'): 14 | data_transform = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.5,), (1.0,))]) 15 | train_set = FashionMNIST(root=root_dir, train=True, transform=data_transform, download=True) 16 | test_set = FashionMNIST(root=root_dir, train=False, transform=data_transform, download=True) 17 | 18 | train_loader = torch.utils.data.DataLoader(dataset=train_set, batch_size=batch_size, shuffle=shuffle, num_workers=num_workers) 19 | test_loader = torch.utils.data.DataLoader(dataset=test_set, batch_size=10000, shuffle=False, num_workers=num_workers) 20 | 21 | return train_loader, test_loader -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Information Bottleneck for Deep Learning 2 | 3 | ![](https://raw.githubusercontent.com/LargePanda/Information-Bottleneck-for-Deep-Learning/master/img/plot2.png) 4 | 5 | 6 | **Dataset**: fashion mnist 7 | 8 | **Model 1**: MLP with Batch Normalization 9 | 10 | Refactored and reused my previous code for this implementation. 11 | 12 | URL: [link](https://github.com/LargePanda/Information-Bottleneck-for-Deep-Learning/blob/master/Fashion%20MNIST%20experiments.ipynb) 13 | 14 | **Model 2**: CNN 15 | 16 | Implemented in PyTorch 17 | 18 | URL: [link](https://github.com/LargePanda/Information-Bottleneck-for-Deep-Learning/blob/master/CNN.ipynb) 19 | 20 | 21 | **Model extension**: 22 | 1. MLP with more than 3 layers (computationally expensive, in progress) 23 | 2. MLP with weights intialization via denoised autoencoder (in progress) 24 | 25 | **Other parameters**: 26 | 27 | number of bins for MI: 10 28 | 29 | **Papers** 30 | 31 | 0. [Opening the black box of Deep Neural Networks via Information](https://arxiv.org/pdf/1703.00810.pdf) 32 | 1. [Deep Learning and the Information Bottleneck Principle](https://arxiv.org/pdf/1503.02406.pdf) 33 | 2. [Batch Normalization: Accelerating Deep Network Training by Reducing Internal Covariate Shift](https://arxiv.org/pdf/1502.03167.pdf) 34 | 3. [Regularization of Neural Networks using DropConnect](https://cs.nyu.edu/~wanli/dropc/dropc.pdf) 35 | -------------------------------------------------------------------------------- /plot_tool.py: -------------------------------------------------------------------------------- 1 | import matplotlib 2 | import matplotlib.pyplot as plt 3 | import numpy as np 4 | matplotlib.rcParams.update({'font.size': 14}) 5 | figsize = (8, 5) 6 | 7 | 8 | def plot(train_logs, test_logs, size = figsize): 9 | 10 | plt.figure(1, figsize=size) 11 | 12 | lists = sorted(train_logs.items()) 13 | x, y = zip(*lists) 14 | plt.plot(x, y, label = 'Training') 15 | 16 | lists = sorted(test_logs.items()) 17 | x, y = zip(*lists) 18 | plt.plot(x, y, label = 'Testing') 19 | 20 | plt.ylabel('Accuracy ') 21 | plt.xlabel('Number of Epoches') 22 | plt.legend() 23 | plt.title('Accuracy VS. Number of Epoches') 24 | 25 | def mi_plot(MI_client): 26 | en_mis = np.array(MI_client.en_mi_collector) 27 | de_mis = np.array(MI_client.de_mi_collector) 28 | 29 | fig, ax = plt.subplots(figsize=(8, 8)) 30 | ax.set_ylabel('MI_T,Y') 31 | ax.set_xlabel('MI_X,T') 32 | title = ax.set_title('Information plane') 33 | plt.close(fig) 34 | 35 | cmap = plt.cm.get_cmap('cool') 36 | 37 | def plot_point(i): 38 | ax.plot(en_mis[i,:], de_mis[i,:], 'k-', alpha=0.2) 39 | if i > 0: 40 | for j in range(len(en_mis[0])): 41 | ax.plot(en_mis[(i-1):(i+1),j],de_mis[(i-1):(i+1),j],'.-', c = cmap(i*.008), ms = 8) 42 | 43 | for i in range(len(en_mis)): 44 | plot_point(i) 45 | 46 | return fig -------------------------------------------------------------------------------- /utils.py: -------------------------------------------------------------------------------- 1 | import os 2 | import gzip 3 | import numpy as np 4 | import pdb 5 | 6 | def load_mnist(path, kind='train'): 7 | 8 | """Load MNIST data from `path`""" 9 | labels_path = os.path.join(path, 10 | '%s-labels-idx1-ubyte.gz' 11 | % kind) 12 | images_path = os.path.join(path, 13 | '%s-images-idx3-ubyte.gz' 14 | % kind) 15 | 16 | with gzip.open(labels_path, 'rb') as lbpath: 17 | labels = np.frombuffer(lbpath.read(), dtype=np.uint8, 18 | offset=8) 19 | 20 | with gzip.open(images_path, 'rb') as imgpath: 21 | images = np.frombuffer(imgpath.read(), dtype=np.uint8, 22 | offset=16).reshape(len(labels), 784) 23 | 24 | return images, labels 25 | 26 | 27 | def d_sigmoid(x): 28 | return x*(1 - x) 29 | 30 | def sigmoid(x): 31 | return 1 / (1 + np.exp(-x)) 32 | 33 | def unison_shuffled_copies(a, b): 34 | assert len(a) == len(b) 35 | p = np.random.permutation(len(a)) 36 | return a[p], b[p] 37 | 38 | def randomize_copies(a, b): 39 | assert len(a) == len(b) 40 | p1 = np.random.permutation(len(a)) 41 | p2 = np.random.permutation(len(a)) 42 | return a[p1], b[p2] 43 | 44 | def d_RELU(x): 45 | return np.array(x>0).astype('double') 46 | 47 | def RELU(x): 48 | return np.maximum(x, 0) 49 | 50 | def totuple(a): 51 | try: 52 | return tuple(totuple(i) for i in a) 53 | except TypeError: 54 | return a -------------------------------------------------------------------------------- /cnn.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch.autograd import Variable 3 | import torch.nn as nn 4 | import torch.nn.functional as F 5 | import torchvision 6 | from torchvision import datasets, transforms 7 | import torch.optim as optim 8 | import torchvision.datasets as dset 9 | 10 | class Model(nn.Module): 11 | def __init__(self): 12 | super(Model, self).__init__() 13 | 14 | # Convolution + average pooling 15 | self.cnn1 = nn.Conv2d(in_channels=1, out_channels=16, kernel_size=3, stride=1, padding=0) 16 | self.relu1 = nn.ReLU() 17 | self.avgpool1 = nn.AvgPool2d(kernel_size=2) 18 | 19 | # Convolution + max pooling 20 | self.cnn2 = nn.Conv2d(in_channels=16, out_channels=64, kernel_size=3, stride=1, padding=0) 21 | self.relu2 = nn.ReLU() 22 | self.maxpool2 = nn.MaxPool2d(kernel_size=2) 23 | 24 | self.dropout = nn.Dropout(p=0.5) 25 | 26 | self.fc1 = nn.Linear(1600, 100) 27 | self.fc2 = nn.Linear(100, 10) 28 | 29 | 30 | def forward(self, x): 31 | # Convolution + average pooling 32 | out = self.cnn1(x) 33 | out = self.relu1(out) 34 | out = self.avgpool1(out) 35 | 36 | # Convolution + max pooling 37 | out = self.cnn2(out) 38 | out = self.relu2(out) 39 | out = self.maxpool2(out) 40 | 41 | # resize 42 | out = out.view(out.size(0), -1) 43 | out = self.dropout(out) 44 | 45 | # full connect layers 46 | out = self.fc1(out) 47 | out = self.fc2(out) 48 | 49 | return out -------------------------------------------------------------------------------- /mi_tool.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import utils 3 | from collections import Counter 4 | import math 5 | 6 | class MI: 7 | def __init__(self, X, y, bin_size): 8 | self.X = X 9 | self.y = y 10 | self.bins = np.linspace(0, 1, bin_size+1) 11 | self.n_samples = self.X.shape[0] 12 | self.unit = 1./self.n_samples 13 | 14 | self.pdf_x = Counter() 15 | self.pdf_y = Counter() 16 | 17 | self.en_mi_collector = [] 18 | self.de_mi_collector = [] 19 | self.epochs = [] 20 | 21 | 22 | def discretize(self): 23 | self.X_d = np.digitize(self.X, self.bins).tolist() 24 | self.y_d = self.y.tolist() 25 | 26 | def pre_compute(self): 27 | for i in range(self.n_samples): 28 | self.pdf_x[utils.totuple(self.X_d[i])] += self.unit 29 | self.pdf_y[utils.totuple(self.y_d[i])] += self.unit 30 | 31 | def combine(self, a, b): 32 | ret = ( utils.totuple(a) , tuple(b)) 33 | return ret 34 | 35 | def joint_compute(self, hidden): 36 | 37 | self.h = np.digitize(hidden, self.bins).tolist() 38 | self.pdf_t = Counter() 39 | self.pdf_xt = Counter() 40 | self.pdf_yt = Counter() 41 | 42 | for i in range(self.n_samples): 43 | xt = self.combine(self.X_d[i], self.h[i]) 44 | yt = self.combine(self.y_d[i], self.h[i]) 45 | self.pdf_xt[xt] += self.unit 46 | self.pdf_yt[yt] += self.unit 47 | self.pdf_t[tuple(self.h[i])] += self.unit 48 | 49 | def encoder_mi(self): 50 | return sum([ self.pdf_xt[xt] * (math.log(self.pdf_xt[xt]) - math.log(self.pdf_x[xt[0]]) - math.log(self.pdf_t[xt[1]])) for xt in self.pdf_xt ]) 51 | 52 | def decoder_mi(self): 53 | return sum([ self.pdf_yt[yt] * (math.log(self.pdf_yt[yt]) - math.log(self.pdf_y[yt[0]]) - math.log(self.pdf_t[yt[1]])) for yt in self.pdf_yt ]) 54 | 55 | def mi_single_epoch(self, hiddens, epoch): 56 | ens = [] 57 | des = [] 58 | for hidden in hiddens: 59 | self.joint_compute(hidden) 60 | ens.append(self.encoder_mi()) 61 | des.append(self.decoder_mi()) 62 | self.en_mi_collector.append(ens) 63 | self.de_mi_collector.append(des) 64 | 65 | simple_ens = [round(a, 4) for a in ens] 66 | simple_des = [round(b, 4) for b in des] 67 | points = [(simple_ens[i], simple_des[i]) for i in range(len(simple_des))] 68 | # print "MI points", points 69 | 70 | self.epochs.append(epoch) -------------------------------------------------------------------------------- /Extension.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "### Extension: Multilayer (n>=3) perceptron w/ Batch Normalization & binarized inputs" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": 1, 13 | "metadata": { 14 | "collapsed": true 15 | }, 16 | "outputs": [], 17 | "source": [ 18 | "from utils import load_mnist\n", 19 | "import utils\n", 20 | "import numpy as np\n", 21 | "from mlp import Layer, LayerArgs, Model, ModelArgs\n", 22 | "from collections import Counter\n", 23 | "import math\n", 24 | "from mi_tool import MI\n", 25 | "import plot_tool" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": 2, 31 | "metadata": { 32 | "collapsed": true 33 | }, 34 | "outputs": [], 35 | "source": [ 36 | "X_train, y_train = load_mnist('../fashion_mnist/', kind='train')\n", 37 | "X_test, y_test = load_mnist('../fashion_mnist/', kind='t10k')\n", 38 | "\n", 39 | "# normalize inputs\n", 40 | "X_train, X_test = np.multiply(X_train, 1.0 / 255.0), np.multiply(X_test, 1.0 / 255.0)\n", 41 | "X_train, y_train = utils.unison_shuffled_copies(X_train, y_train)\n", 42 | "X_train_subset, y_train_subset = X_train[:10000], y_train[:10000]" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": 3, 48 | "metadata": { 49 | "collapsed": false 50 | }, 51 | "outputs": [], 52 | "source": [ 53 | "# define the network structure with 2 hidden layers of dimension 100 and 20. \n", 54 | "# other parameters were set to default, activation function is set to RELU by default. \n", 55 | "\n", 56 | "layer_args = [LayerArgs(784, 784, layer_type = \"INPUT\"), \\\n", 57 | " LayerArgs(784, 100), \\\n", 58 | " LayerArgs(100, 50), \\\n", 59 | " LayerArgs(50, 25), \\\n", 60 | " LayerArgs(25, 10, layer_type = \"OUTPUT\", activate = np.exp)]\n", 61 | "\n", 62 | "# our model is using mini-batch gradient descent\n", 63 | "# set max #(epoch) as 80 and max #(iteration) as 100000;\n", 64 | "# For everay 1000 iterations, we compute (and plot) MI;\n", 65 | "model_args = ModelArgs(num_passes = 80, max_iter=100000, report_interval=500)\n", 66 | "\n", 67 | "# intialize the model\n", 68 | "# TODO: model weights intialization by denoised autoencoder\n", 69 | "\n", 70 | "model = Model(layer_args, model_args)\n", 71 | "model.feed_data(X_train, y_train, X_test, y_test)\n", 72 | "model.trial_data(X_train_subset, y_train_subset)\n", 73 | "model.intialize_model()" 74 | ] 75 | }, 76 | { 77 | "cell_type": "code", 78 | "execution_count": null, 79 | "metadata": { 80 | "collapsed": true 81 | }, 82 | "outputs": [], 83 | "source": [ 84 | "MI_client = MI(X_train_subset, y_train_subset, 10)\n", 85 | "MI_client.discretize()\n", 86 | "MI_client.pre_compute()" 87 | ] 88 | }, 89 | { 90 | "cell_type": "code", 91 | "execution_count": null, 92 | "metadata": { 93 | "collapsed": false, 94 | "scrolled": true 95 | }, 96 | "outputs": [ 97 | { 98 | "name": "stdout", 99 | "output_type": "stream", 100 | "text": [ 101 | "Epoch: 1, Train Acc: 0.85145, Test Acc: 0.837\n", 102 | "Epoch: 2, Train Acc: 0.868566666667, Test Acc: 0.8483\n", 103 | "Epoch: 3, Train Acc: 0.877083333333, Test Acc: 0.8572\n", 104 | "Epoch: 4, Train Acc: 0.883066666667, Test Acc: 0.8624\n", 105 | "Epoch: 5, Train Acc: 0.887183333333, Test Acc: 0.8665\n", 106 | "Epoch: 6, Train Acc: 0.892083333333, Test Acc: 0.8699\n", 107 | "Epoch: 7, Train Acc: 0.8946, Test Acc: 0.8748\n", 108 | "Epoch: 8, Train Acc: 0.89725, Test Acc: 0.8746\n", 109 | "Epoch: 9, Train Acc: 0.90145, Test Acc: 0.8773\n" 110 | ] 111 | } 112 | ], 113 | "source": [ 114 | "for epoch, hidden_layers in model.run_model():\n", 115 | " MI_client.mi_single_epoch(hidden_layers, epoch)" 116 | ] 117 | }, 118 | { 119 | "cell_type": "markdown", 120 | "metadata": {}, 121 | "source": [ 122 | "### Analysis: Information Bottleneck" 123 | ] 124 | }, 125 | { 126 | "cell_type": "code", 127 | "execution_count": null, 128 | "metadata": { 129 | "collapsed": true 130 | }, 131 | "outputs": [], 132 | "source": [ 133 | "plot_tool.mi_plot(MI_client)" 134 | ] 135 | }, 136 | { 137 | "cell_type": "code", 138 | "execution_count": null, 139 | "metadata": { 140 | "collapsed": true 141 | }, 142 | "outputs": [], 143 | "source": [ 144 | "# binarization\n", 145 | "\n", 146 | "X_train.setflags(write=1)\n", 147 | "X_test.setflags(write=1)\n", 148 | "\n", 149 | "X_train = np.where(X_train>127, 1, 0)\n", 150 | "X_test = np.where(X_test>127, 1, 0)" 151 | ] 152 | } 153 | ], 154 | "metadata": { 155 | "kernelspec": { 156 | "display_name": "Python 2", 157 | "language": "python", 158 | "name": "python2" 159 | }, 160 | "language_info": { 161 | "codemirror_mode": { 162 | "name": "ipython", 163 | "version": 2 164 | }, 165 | "file_extension": ".py", 166 | "mimetype": "text/x-python", 167 | "name": "python", 168 | "nbconvert_exporter": "python", 169 | "pygments_lexer": "ipython2", 170 | "version": "2.7.13" 171 | } 172 | }, 173 | "nbformat": 4, 174 | "nbformat_minor": 2 175 | } 176 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Extension-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "### Extension: Multilayer (n>=3) perceptron w/ Batch Normalization & binarized inputs" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": 1, 13 | "metadata": { 14 | "collapsed": true 15 | }, 16 | "outputs": [], 17 | "source": [ 18 | "from utils import load_mnist\n", 19 | "import utils\n", 20 | "import numpy as np\n", 21 | "from mlp import Layer, LayerArgs, Model, ModelArgs\n", 22 | "from collections import Counter\n", 23 | "import math\n", 24 | "from mi_tool import MI\n", 25 | "import plot_tool" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": 2, 31 | "metadata": { 32 | "collapsed": true 33 | }, 34 | "outputs": [], 35 | "source": [ 36 | "X_train, y_train = load_mnist('../fashion_mnist/', kind='train')\n", 37 | "X_test, y_test = load_mnist('../fashion_mnist/', kind='t10k')\n", 38 | "\n", 39 | "# normalize inputs\n", 40 | "X_train, X_test = np.multiply(X_train, 1.0 / 255.0), np.multiply(X_test, 1.0 / 255.0)\n", 41 | "X_train, y_train = utils.unison_shuffled_copies(X_train, y_train)\n", 42 | "X_train_subset, y_train_subset = X_train[:10000], y_train[:10000]" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": 3, 48 | "metadata": { 49 | "collapsed": false 50 | }, 51 | "outputs": [], 52 | "source": [ 53 | "# define the network structure with 2 hidden layers of dimension 100 and 20. \n", 54 | "# other parameters were set to default, activation function is set to RELU by default. \n", 55 | "\n", 56 | "layer_args = [LayerArgs(784, 784, layer_type = \"INPUT\"), \\\n", 57 | " LayerArgs(784, 100), \\\n", 58 | " LayerArgs(100, 50), \\\n", 59 | " LayerArgs(50, 25), \\\n", 60 | " LayerArgs(25, 10, layer_type = \"OUTPUT\", activate = np.exp)]\n", 61 | "\n", 62 | "# our model is using mini-batch gradient descent\n", 63 | "# set max #(epoch) as 80 and max #(iteration) as 100000;\n", 64 | "# For everay 1000 iterations, we compute (and plot) MI;\n", 65 | "model_args = ModelArgs(num_passes = 80, max_iter=100000, report_interval=500)\n", 66 | "\n", 67 | "# intialize the model\n", 68 | "# TODO: model weights intialization by denoised autoencoder\n", 69 | "\n", 70 | "model = Model(layer_args, model_args)\n", 71 | "model.feed_data(X_train, y_train, X_test, y_test)\n", 72 | "model.trial_data(X_train_subset, y_train_subset)\n", 73 | "model.intialize_model()" 74 | ] 75 | }, 76 | { 77 | "cell_type": "code", 78 | "execution_count": null, 79 | "metadata": { 80 | "collapsed": true 81 | }, 82 | "outputs": [], 83 | "source": [ 84 | "MI_client = MI(X_train_subset, y_train_subset, 10)\n", 85 | "MI_client.discretize()\n", 86 | "MI_client.pre_compute()" 87 | ] 88 | }, 89 | { 90 | "cell_type": "code", 91 | "execution_count": null, 92 | "metadata": { 93 | "collapsed": false, 94 | "scrolled": true 95 | }, 96 | "outputs": [ 97 | { 98 | "name": "stdout", 99 | "output_type": "stream", 100 | "text": [ 101 | "Epoch: 1, Train Acc: 0.85145, Test Acc: 0.837\n", 102 | "Epoch: 2, Train Acc: 0.868566666667, Test Acc: 0.8483\n", 103 | "Epoch: 3, Train Acc: 0.877083333333, Test Acc: 0.8572\n", 104 | "Epoch: 4, Train Acc: 0.883066666667, Test Acc: 0.8624\n", 105 | "Epoch: 5, Train Acc: 0.887183333333, Test Acc: 0.8665\n", 106 | "Epoch: 6, Train Acc: 0.892083333333, Test Acc: 0.8699\n", 107 | "Epoch: 7, Train Acc: 0.8946, Test Acc: 0.8748\n", 108 | "Epoch: 8, Train Acc: 0.89725, Test Acc: 0.8746\n", 109 | "Epoch: 9, Train Acc: 0.90145, Test Acc: 0.8773\n" 110 | ] 111 | } 112 | ], 113 | "source": [ 114 | "for epoch, hidden_layers in model.run_model():\n", 115 | " MI_client.mi_single_epoch(hidden_layers, epoch)" 116 | ] 117 | }, 118 | { 119 | "cell_type": "markdown", 120 | "metadata": {}, 121 | "source": [ 122 | "### Analysis: Information Bottleneck" 123 | ] 124 | }, 125 | { 126 | "cell_type": "code", 127 | "execution_count": null, 128 | "metadata": { 129 | "collapsed": true 130 | }, 131 | "outputs": [], 132 | "source": [ 133 | "plot_tool.mi_plot(MI_client)" 134 | ] 135 | }, 136 | { 137 | "cell_type": "code", 138 | "execution_count": null, 139 | "metadata": { 140 | "collapsed": true 141 | }, 142 | "outputs": [], 143 | "source": [ 144 | "# binarization\n", 145 | "\n", 146 | "X_train.setflags(write=1)\n", 147 | "X_test.setflags(write=1)\n", 148 | "\n", 149 | "X_train = np.where(X_train>127, 1, 0)\n", 150 | "X_test = np.where(X_test>127, 1, 0)" 151 | ] 152 | } 153 | ], 154 | "metadata": { 155 | "kernelspec": { 156 | "display_name": "Python 2", 157 | "language": "python", 158 | "name": "python2" 159 | }, 160 | "language_info": { 161 | "codemirror_mode": { 162 | "name": "ipython", 163 | "version": 2 164 | }, 165 | "file_extension": ".py", 166 | "mimetype": "text/x-python", 167 | "name": "python", 168 | "nbconvert_exporter": "python", 169 | "pygments_lexer": "ipython2", 170 | "version": "2.7.13" 171 | } 172 | }, 173 | "nbformat": 4, 174 | "nbformat_minor": 2 175 | } 176 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Untitled-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": { 7 | "collapsed": false 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "from utils import load_mnist\n", 12 | "import utils\n", 13 | "import numpy as np\n", 14 | "from mlp import Layer, LayerArgs, Model, ModelArgs\n", 15 | "from collections import Counter\n", 16 | "import math\n", 17 | "from mi_tool import MI" 18 | ] 19 | }, 20 | { 21 | "cell_type": "code", 22 | "execution_count": null, 23 | "metadata": { 24 | "collapsed": true 25 | }, 26 | "outputs": [], 27 | "source": [ 28 | "X_train, y_train = load_mnist('../fashion_mnist/', kind='train')\n", 29 | "X_test, y_test = load_mnist('../fashion_mnist/', kind='t10k')\n", 30 | "# X_train, X_test = np.multiply(X_train, 1.0 / 255.0), np.multiply(X_test, 1.0 / 255.0)\n", 31 | "\n", 32 | "thresh = 127\n", 33 | "super_threshold_indices = X_train > thresh\n", 34 | "X_train [super_threshold_indices] = 1\n", 35 | "\n", 36 | "super_threshold_indices = X_test > thresh\n", 37 | "X_test [super_threshold_indices] = 1" 38 | ] 39 | }, 40 | { 41 | "cell_type": "code", 42 | "execution_count": null, 43 | "metadata": { 44 | "collapsed": false 45 | }, 46 | "outputs": [], 47 | "source": [ 48 | "X_train, y_train = utils.unison_shuffled_copies(X_train, y_train)\n", 49 | "X_train_subset, y_train_subset = X_train[:1000], y_train[:1000]" 50 | ] 51 | }, 52 | { 53 | "cell_type": "code", 54 | "execution_count": null, 55 | "metadata": { 56 | "collapsed": false 57 | }, 58 | "outputs": [], 59 | "source": [ 60 | "layer_args = [LayerArgs(784, 784, layer_type = \"INPUT\"), LayerArgs(784, 20), LayerArgs(20, 20), LayerArgs(20, 10, layer_type = \"OUTPUT\", activate = np.exp)]\n", 61 | "model_args = ModelArgs(num_passes = 80, max_iter=100000, report_interval=1000)\n", 62 | "\n", 63 | "model = Model(layer_args, model_args)\n", 64 | "model.feed_data(X_train, y_train, X_test, y_test)\n", 65 | "model.trial_data(X_train_subset, y_train_subset)\n", 66 | "\n", 67 | "model.intialize_model()" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": null, 73 | "metadata": { 74 | "collapsed": false 75 | }, 76 | "outputs": [], 77 | "source": [ 78 | "MI_client = MI(X_train_subset, y_train_subset, 10)\n", 79 | "MI_client.discretize()\n", 80 | "MI_client.pre_compute()" 81 | ] 82 | }, 83 | { 84 | "cell_type": "code", 85 | "execution_count": null, 86 | "metadata": { 87 | "collapsed": false, 88 | "scrolled": true 89 | }, 90 | "outputs": [], 91 | "source": [ 92 | "for epoch, hidden_layers in model.run_model():\n", 93 | " MI_client.mi_single_epoch(hidden_layers, epoch)" 94 | ] 95 | }, 96 | { 97 | "cell_type": "code", 98 | "execution_count": null, 99 | "metadata": { 100 | "collapsed": false 101 | }, 102 | "outputs": [], 103 | "source": [ 104 | "en_mis, de_mis, epochs = np.array(MI_client.en_mi_collector), np.array(MI_client.de_mi_collector), np.array(MI_client.epochs)" 105 | ] 106 | }, 107 | { 108 | "cell_type": "code", 109 | "execution_count": null, 110 | "metadata": { 111 | "collapsed": true 112 | }, 113 | "outputs": [], 114 | "source": [ 115 | "%matplotlib inline\n", 116 | "import matplotlib.pyplot as plt\n", 117 | "from matplotlib import animation\n", 118 | "from IPython.display import HTML" 119 | ] 120 | }, 121 | { 122 | "cell_type": "code", 123 | "execution_count": null, 124 | "metadata": { 125 | "collapsed": true 126 | }, 127 | "outputs": [], 128 | "source": [ 129 | "fig, ax = plt.subplots(figsize=(8,8))\n", 130 | "ax.set_xlabel('I(X;T)')\n", 131 | "ax.set_ylabel('I(T;Y)')\n", 132 | "title = ax.set_title('')\n", 133 | "plt.close(fig)" 134 | ] 135 | }, 136 | { 137 | "cell_type": "code", 138 | "execution_count": null, 139 | "metadata": { 140 | "collapsed": false 141 | }, 142 | "outputs": [], 143 | "source": [ 144 | "cmap = plt.cm.get_cmap('cool')\n", 145 | "\n", 146 | "def animate(i):\n", 147 | " title.set_text('Epoch %s' % str(epochs[i]).zfill(4))\n", 148 | " ax.plot(en_mis[i,:], de_mis[i,:], 'k-',alpha=0.2)\n", 149 | " if i > 0:\n", 150 | " for j in range(len(en_mis[0])):\n", 151 | " ax.plot(en_mis[(i-1):(i+1),j],de_mis[(i-1):(i+1),j],'.-',c=cmap(j*.2),ms=10)\n", 152 | " \n", 153 | "for i in range(len(en_mis)):\n", 154 | " animate(i)" 155 | ] 156 | }, 157 | { 158 | "cell_type": "code", 159 | "execution_count": null, 160 | "metadata": { 161 | "collapsed": false 162 | }, 163 | "outputs": [], 164 | "source": [ 165 | "len(en_mis)" 166 | ] 167 | }, 168 | { 169 | "cell_type": "code", 170 | "execution_count": null, 171 | "metadata": { 172 | "collapsed": false, 173 | "scrolled": false 174 | }, 175 | "outputs": [], 176 | "source": [ 177 | "en_mis" 178 | ] 179 | }, 180 | { 181 | "cell_type": "code", 182 | "execution_count": null, 183 | "metadata": { 184 | "collapsed": false 185 | }, 186 | "outputs": [], 187 | "source": [ 188 | "ax.set_title('Epoch 0000 - 2999')\n", 189 | "fig" 190 | ] 191 | }, 192 | { 193 | "cell_type": "code", 194 | "execution_count": null, 195 | "metadata": { 196 | "collapsed": true 197 | }, 198 | "outputs": [], 199 | "source": [] 200 | } 201 | ], 202 | "metadata": { 203 | "kernelspec": { 204 | "display_name": "Python 2", 205 | "language": "python", 206 | "name": "python2" 207 | }, 208 | "language_info": { 209 | "codemirror_mode": { 210 | "name": "ipython", 211 | "version": 2 212 | }, 213 | "file_extension": ".py", 214 | "mimetype": "text/x-python", 215 | "name": "python", 216 | "nbconvert_exporter": "python", 217 | "pygments_lexer": "ipython2", 218 | "version": "2.7.13" 219 | } 220 | }, 221 | "nbformat": 4, 222 | "nbformat_minor": 2 223 | } 224 | -------------------------------------------------------------------------------- /MLP.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "### Convolution Neural Network" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "Design reasoning\n", 15 | "1. Fashion data is more complex than digits, so we use more filters\n", 16 | "2. Choose Avg pooling on first conv layer to preserve more information\n", 17 | "3. Use Dropout to avoid overfitting. Although current start-of-art is Dropconnect, but it is not supported by PyTorch\n", 18 | "4. Use two layers of FC to reduce dimension" 19 | ] 20 | }, 21 | { 22 | "cell_type": "code", 23 | "execution_count": 1, 24 | "metadata": { 25 | "collapsed": true 26 | }, 27 | "outputs": [], 28 | "source": [ 29 | "import torch\n", 30 | "from torch.autograd import Variable\n", 31 | "import torch.nn as nn\n", 32 | "import torch.nn.functional as F\n", 33 | "import torchvision\n", 34 | "from torchvision import datasets, transforms\n", 35 | "import torch.optim as optim\n", 36 | "import torchvision.datasets as dset\n", 37 | "import numpy as np" 38 | ] 39 | }, 40 | { 41 | "cell_type": "code", 42 | "execution_count": 12, 43 | "metadata": { 44 | "collapsed": false 45 | }, 46 | "outputs": [ 47 | { 48 | "data": { 49 | "text/plain": [ 50 | "" 51 | ] 52 | }, 53 | "execution_count": 12, 54 | "metadata": {}, 55 | "output_type": "execute_result" 56 | } 57 | ], 58 | "source": [ 59 | "from torch_data_utils import load_fashion_mnist\n", 60 | "import mlp_torch\n", 61 | "reload(mlp_torch)" 62 | ] 63 | }, 64 | { 65 | "cell_type": "code", 66 | "execution_count": 4, 67 | "metadata": { 68 | "collapsed": false 69 | }, 70 | "outputs": [ 71 | { 72 | "name": "stdout", 73 | "output_type": "stream", 74 | "text": [ 75 | "Downloading http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/train-images-idx3-ubyte.gz\n", 76 | "Downloading http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/train-labels-idx1-ubyte.gz\n", 77 | "Downloading http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/t10k-images-idx3-ubyte.gz\n", 78 | "Downloading http://fashion-mnist.s3-website.eu-central-1.amazonaws.com/t10k-labels-idx1-ubyte.gz\n", 79 | "Processing...\n", 80 | "Done!\n" 81 | ] 82 | } 83 | ], 84 | "source": [ 85 | "train_loader, test_loader = load_fashion_mnist()" 86 | ] 87 | }, 88 | { 89 | "cell_type": "code", 90 | "execution_count": 13, 91 | "metadata": { 92 | "collapsed": false 93 | }, 94 | "outputs": [], 95 | "source": [ 96 | "model = mlp_torch.Model(784, 500, 256, 64, 10)" 97 | ] 98 | }, 99 | { 100 | "cell_type": "code", 101 | "execution_count": 14, 102 | "metadata": { 103 | "collapsed": false, 104 | "scrolled": true 105 | }, 106 | "outputs": [ 107 | { 108 | "name": "stdout", 109 | "output_type": "stream", 110 | "text": [ 111 | "Iter: 1000. Accuracy: 0.5443\n", 112 | "Iter: 2000. Accuracy: 0.5616\n", 113 | "Iter: 3000. Accuracy: 0.567\n", 114 | "Iter: 4000. Accuracy: 0.563\n", 115 | "Iter: 5000. Accuracy: 0.5626\n", 116 | "Iter: 6000. Accuracy: 0.5648\n", 117 | "Iter: 7000. Accuracy: 0.5634\n", 118 | "Iter: 8000. Accuracy: 0.5632\n", 119 | "Iter: 9000. Accuracy: 0.5682\n", 120 | "Iter: 10000. Accuracy: 0.5572\n", 121 | "Iter: 11000. Accuracy: 0.5668\n", 122 | "Iter: 12000. Accuracy: 0.5636\n", 123 | "Iter: 13000. Accuracy: 0.564\n", 124 | "Iter: 14000. Accuracy: 0.5682\n", 125 | "Iter: 15000. Accuracy: 0.5675\n", 126 | "Iter: 16000. Accuracy: 0.5637\n", 127 | "Iter: 17000. Accuracy: 0.5694\n", 128 | "Iter: 18000. Accuracy: 0.5638\n", 129 | "Iter: 19000. Accuracy: 0.5664\n", 130 | "Iter: 20000. Accuracy: 0.561\n", 131 | "Iter: 21000. Accuracy: 0.5662\n", 132 | "Iter: 22000. Accuracy: 0.5592\n", 133 | "Iter: 23000. Accuracy: 0.5661\n", 134 | "Iter: 24000. Accuracy: 0.6536\n", 135 | "Iter: 25000. Accuracy: 0.657\n", 136 | "Iter: 26000. Accuracy: 0.6572\n", 137 | "Iter: 27000. Accuracy: 0.66\n", 138 | "Iter: 28000. Accuracy: 0.6572\n" 139 | ] 140 | } 141 | ], 142 | "source": [ 143 | "max_epochs = 30\n", 144 | "report_size = 1000\n", 145 | "iter = 0\n", 146 | "optimizer = torch.optim.Adam(model.parameters(), lr=0.001)\n", 147 | "criterion = nn.CrossEntropyLoss()\n", 148 | "log = {}\n", 149 | "\n", 150 | "for epoch in range(max_epochs):\n", 151 | " for i, (features, labels) in enumerate(train_loader):\n", 152 | " \n", 153 | " features = Variable(features)\n", 154 | " labels = Variable(labels)\n", 155 | " \n", 156 | " optimizer.zero_grad()\n", 157 | " outputs = model(features)\n", 158 | " \n", 159 | " loss = criterion(outputs, labels)\n", 160 | " loss.backward()\n", 161 | " \n", 162 | " optimizer.step()\n", 163 | " \n", 164 | " iter += 1\n", 165 | " \n", 166 | " if iter % report_size == 0:\n", 167 | " correct = 0.\n", 168 | " total = 0.\n", 169 | " \n", 170 | " for features, labels in test_loader:\n", 171 | " features = Variable(features)\n", 172 | " result = model(features)\n", 173 | " \n", 174 | " _, predicted = torch.max(result.data, 1)\n", 175 | " \n", 176 | " total += labels.size(0)\n", 177 | " correct += (predicted == labels).sum()\n", 178 | " \n", 179 | " accuracy = correct / total\n", 180 | " log[iter] = accuracy\n", 181 | " print('Iter: {}. Accuracy: {}'.format(iter, accuracy))" 182 | ] 183 | }, 184 | { 185 | "cell_type": "code", 186 | "execution_count": null, 187 | "metadata": { 188 | "collapsed": true 189 | }, 190 | "outputs": [], 191 | "source": [ 192 | "%matplotlib inline \n", 193 | "import matplotlib\n", 194 | "import matplotlib.pyplot as plt\n", 195 | "import numpy as np\n", 196 | "matplotlib.rcParams.update({'font.size': 14})\n", 197 | "figsize = (8, 5)\n", 198 | " \n", 199 | " \n", 200 | "def plot(test_logs, size = figsize):\n", 201 | " \n", 202 | " plt.figure(1, figsize=size)\n", 203 | " \n", 204 | " lists = sorted(test_logs.items()) \n", 205 | " x, y = zip(*lists) \n", 206 | " plt.plot(x, y, label = 'Testing')\n", 207 | " \n", 208 | " plt.ylabel('Accuracy ')\n", 209 | " plt.xlabel('Number of Iterations')\n", 210 | " plt.title('Test Accuracy VS. Number of Iterations')" 211 | ] 212 | }, 213 | { 214 | "cell_type": "code", 215 | "execution_count": null, 216 | "metadata": { 217 | "collapsed": false 218 | }, 219 | "outputs": [], 220 | "source": [ 221 | "plot(log)" 222 | ] 223 | } 224 | ], 225 | "metadata": { 226 | "kernelspec": { 227 | "display_name": "Python 2", 228 | "language": "python", 229 | "name": "python2" 230 | }, 231 | "language_info": { 232 | "codemirror_mode": { 233 | "name": "ipython", 234 | "version": 2 235 | }, 236 | "file_extension": ".py", 237 | "mimetype": "text/x-python", 238 | "name": "python", 239 | "nbconvert_exporter": "python", 240 | "pygments_lexer": "ipython2", 241 | "version": "2.7.13" 242 | } 243 | }, 244 | "nbformat": 4, 245 | "nbformat_minor": 2 246 | } 247 | -------------------------------------------------------------------------------- /mlp_vec.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import math 3 | import random 4 | import utils 5 | 6 | 7 | class BNLayer: 8 | def __init__(self, parent): 9 | self.parent_layer = parent 10 | self.next_layer = parent.next_layer 11 | self.out_dim = self.parent_layer.out_dim 12 | 13 | self.beta = np.zeros(self.out_dim) 14 | self.gamma = np.ones(self.out_dim) 15 | self.dbeta = np.zeros(self.out_dim) 16 | self.dgamma = np.zeros(self.out_dim) 17 | 18 | 19 | self.lrate = self.parent_layer.learning_rate 20 | self.reg = self.parent_layer.reg 21 | self.momentum = self.parent_layer.momentum 22 | 23 | def transform(self, X): 24 | self.X = X 25 | self.mu = np.mean(X, axis=0) 26 | self.var = np.var(X, axis=0) 27 | self.normalized_X = (X - self.mu) / np.sqrt(self.var + 1e-10) 28 | self.vals = self.gamma * self.normalized_X + self.beta 29 | 30 | def compute_delta(self): 31 | self.delta = self.next_layer.delta.dot(self.next_layer.W.T) * self.parent_layer.derivative(self.parent_layer.vals) 32 | 33 | def compute_para_delta(self): 34 | 35 | N, D = self.X.shape 36 | self.Xmu = self.X - self.mu 37 | self.XmuN = self.Xmu/N 38 | self.std_inv = 1. / np.sqrt(self.var + 1e-10) 39 | 40 | self.d_normalized_X = self.delta * self.gamma 41 | 42 | inv = self.std_inv * self.std_inv * self.std_inv 43 | self.dvar = - np.sum(self.d_normalized_X * self.Xmu, axis = 0) * (1/2.) * inv 44 | self.dmuN = 1./N * (np.sum(self.d_normalized_X * -self.std_inv, axis = 0) - self.dvar * (2) * np.mean(self.Xmu, axis = 0)) 45 | 46 | self.dX = (self.d_normalized_X * self.std_inv) + 2 * (self.dvar * self.XmuN) + self.dmuN 47 | self.dgamma = np.sum(self.delta * self.normalized_X, axis=0) + self.momentum*self.dgamma 48 | self.dbeta = np.sum(self.delta, axis=0) + self.momentum * self.dbeta 49 | 50 | def weight_update(self): 51 | self.bn.para_update() 52 | self.gamma -= self.lrate*self.dgamma 53 | self.beta -= self.lrate*self.dbeta 54 | 55 | 56 | class LayerArgs: 57 | def __init__(self, in_dim, out_dim, derivative = utils.d_sigmoid, activate = utils.sigmoid, layer_type = "HIDDEN", learning_rate = 0.05, momentum = 0., regularization = 0.000): 58 | self.in_dim = in_dim 59 | self.out_dim = out_dim 60 | self.derivative = derivative 61 | self.activate = activate 62 | self.type = layer_type 63 | self.learning_rate = learning_rate 64 | self.momentum = momentum 65 | self.regularization = regularization 66 | 67 | class Layer: 68 | def __init__(self, args): 69 | 70 | self.in_dim = args.in_dim 71 | self.out_dim = args.out_dim 72 | 73 | self.vals = None 74 | 75 | self.type = args.type 76 | self.learning_rate = args.learning_rate 77 | self.reg = args.regularization 78 | self.momentum = args.momentum 79 | self.derivative = args.derivative 80 | self.activate = args.activate 81 | 82 | self.next_layer = None 83 | self.prev_layer = None 84 | 85 | self.initialize_weights() 86 | 87 | def initialize_weights(self): 88 | unib = math.sqrt(6)/math.sqrt(self.in_dim + self.out_dim) 89 | self.W = np.random.uniform(-unib, unib, (self.in_dim, self.out_dim)) 90 | self.b = np.zeros((1, self.out_dim)) 91 | 92 | self.dW = np.zeros((self.in_dim, self.out_dim)) 93 | self.db = np.zeros((1, self.out_dim)) 94 | 95 | def epoch_size(self): 96 | return self.vals.shape[0] 97 | 98 | def connect_layer(self, next_layer): 99 | self.next_layer = next_layer 100 | next_layer.prev_layer = self 101 | 102 | def layer_forward(self): 103 | self.wx = self.prev_layer.vals.dot(self.W) + self.b 104 | 105 | if self.type == "INPUT": 106 | self.vals = self.activate(self.wx) 107 | else: 108 | self.bn.transform(self.wx) 109 | self.vals = self.activate(self.bn.vals) 110 | 111 | if self.type == "OUTPUT": 112 | self.prob = self.vals / np.sum(self.vals, axis=1, keepdims=True) 113 | 114 | def epoch_size(self): 115 | return self.vals.shape[0] 116 | 117 | def layer_backward(self, y = None): 118 | 119 | if self.type == "OUTPUT": 120 | self.bn.delta = np.copy(self.prob) 121 | self.bn.delta -= y 122 | self.bn.compute_para_delta() 123 | 124 | elif self.type == "HIDDEN": 125 | self.bn.compute_delta() 126 | self.bn.compute_para_delta() 127 | 128 | self.delta = self.bn.dX 129 | 130 | self.dW = (self.prev_layer.vals.T).dot(self.delta) + self.momentum * self.dW 131 | self.db = np.sum(self.delta, axis=0, keepdims=True) + self.momentum * self.db 132 | 133 | self.W -= self.learning_rate * self.dW /self.epoch_size() 134 | self.b -= self.learning_rate * self.db /self.epoch_size() 135 | 136 | def loss(self, gold): 137 | logprobs = -np.multiply(gold, np.log(self.prob)) 138 | data_loss = np.sum(logprobs) 139 | return 1./self.epoch_size() * data_loss 140 | 141 | 142 | 143 | def add_bn(self): 144 | self.bn = BNLayer(self) 145 | 146 | class ModelArgs: 147 | def __init__(self, num_passes = 100, max_iter = 500, batch_size = 20, report_interval = 10): 148 | self.num_passes = num_passes 149 | self.max_iter = max_iter 150 | self.batch_size = batch_size 151 | self.report_interval = report_interval 152 | 153 | class Model: 154 | def __init__(self, layer_args, model_arg): 155 | 156 | self.layer_args = layer_args 157 | self.max_iter = model_arg.max_iter 158 | self.num_passes = model_arg.num_passes 159 | self.batch_size = model_arg.batch_size 160 | self.report_interval = model_arg.report_interval 161 | 162 | def feed_data(self, X_train, y_train, X_test, y_test): 163 | 164 | self.X_train = X_train 165 | self.y_train = y_train 166 | self.X_test = X_test 167 | self.y_test = y_test 168 | 169 | self.input_dim = self.X_train.shape[1] 170 | self.output_dim = len(self.y_train) 171 | self.train_log_loss = {} 172 | self.test_log_loss = {} 173 | self.train_log_acc = {} 174 | self.test_log_acc = {} 175 | 176 | def trial_data(self, X_train_sub, y_train_sub): 177 | 178 | self.X_train_sub = X_train_sub 179 | self.y_train_sub = y_train_sub 180 | 181 | def make_layer(self, args): 182 | return Layer(args) 183 | 184 | def yield_batches(self, features, classes, batchsize): 185 | sets = np.arange(features.shape[0]) 186 | np.random.shuffle(sets) 187 | for i in range(0, features.shape[0] - batchsize + 1, batchsize): 188 | e = sets[i:i + batchsize] 189 | yield features[e], classes[e] 190 | 191 | def intialize_model(self): 192 | self.input_layer = self.make_layer(self.layer_args[0]) 193 | self.output_layer = self.make_layer(self.layer_args[-1]) 194 | self.hidden_layers = [self.make_layer(self.layer_args[i]) for i in range(1, len(self.layer_args)-1)] 195 | 196 | layers = [self.input_layer] + self.hidden_layers + [self.output_layer] 197 | 198 | for i in range(len(layers)-1): 199 | layers[i].connect_layer(layers[i+1]) 200 | 201 | for i in range(1, len(layers)): 202 | layers[i].add_bn() 203 | 204 | def forward(self, x): 205 | self.input_layer.vals = x 206 | for layer in self.hidden_layers: 207 | layer.layer_forward() 208 | self.output_layer.layer_forward() 209 | 210 | def loss(self, y): 211 | return self.output_layer.loss(y) 212 | 213 | def backward(self, y): 214 | self.output_layer.layer_backward(y) 215 | 216 | for layer in self.hidden_layers[::-1]: 217 | layer.layer_backward() 218 | 219 | def run_model(self): 220 | n_iter = 0 221 | for i in range(1, self.num_passes+1): 222 | for x, y in self.yield_batches(self.X_train, self.y_train, self.batch_size): 223 | n_iter += 1 224 | self.forward(x) 225 | self.backward(y) 226 | if n_iter%self.report_interval == 0: 227 | self.forward(self.X_train_sub) 228 | yield n_iter, [layer.vals for layer in self.hidden_layers] 229 | 230 | self.forward(self.X_test) 231 | self.test_log_loss[i] = self.loss(self.y_test) 232 | 233 | self.forward(self.X_train) 234 | self.train_log_loss[i] = self.loss(self.y_train) 235 | print "Epoch: {}, Train Acc: {}, Test Acc: {}".format(i, self.train_log_loss[i], self.test_log_loss[i]) 236 | 237 | if n_iter > self.max_iter: 238 | break 239 | -------------------------------------------------------------------------------- /mlp.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import math 3 | import random 4 | import utils 5 | 6 | 7 | class BNLayer: 8 | def __init__(self, parent): 9 | self.parent_layer = parent 10 | self.next_layer = parent.next_layer 11 | self.out_dim = self.parent_layer.out_dim 12 | 13 | self.beta = np.zeros(self.out_dim) 14 | self.gamma = np.ones(self.out_dim) 15 | self.dbeta = np.zeros(self.out_dim) 16 | self.dgamma = np.zeros(self.out_dim) 17 | 18 | 19 | self.lrate = self.parent_layer.learning_rate 20 | self.reg = self.parent_layer.reg 21 | self.momentum = self.parent_layer.momentum 22 | 23 | def transform(self, X): 24 | self.X = X 25 | self.mu = np.mean(X, axis=0) 26 | self.var = np.var(X, axis=0) 27 | self.normalized_X = (X - self.mu) / np.sqrt(self.var + 1e-10) 28 | self.vals = self.gamma * self.normalized_X + self.beta 29 | 30 | def compute_delta(self): 31 | self.delta = self.next_layer.delta.dot(self.next_layer.W.T) * self.parent_layer.derivative(self.parent_layer.vals) 32 | 33 | def compute_para_delta(self): 34 | 35 | N, D = self.X.shape 36 | self.Xmu = self.X - self.mu 37 | self.XmuN = self.Xmu/N 38 | self.std_inv = 1. / np.sqrt(self.var + 1e-10) 39 | 40 | self.d_normalized_X = self.delta * self.gamma 41 | 42 | inv = self.std_inv * self.std_inv * self.std_inv 43 | self.dvar = - np.sum(self.d_normalized_X * self.Xmu, axis = 0) * (1/2.) * inv 44 | self.dmuN = 1./N * (np.sum(self.d_normalized_X * -self.std_inv, axis = 0) - self.dvar * (2) * np.mean(self.Xmu, axis = 0)) 45 | 46 | self.dX = (self.d_normalized_X * self.std_inv) + 2 * (self.dvar * self.XmuN) + self.dmuN 47 | self.dgamma = np.sum(self.delta * self.normalized_X, axis=0) + self.momentum*self.dgamma 48 | self.dbeta = np.sum(self.delta, axis=0) + self.momentum * self.dbeta 49 | 50 | def weight_update(self): 51 | self.bn.para_update() 52 | self.gamma -= self.lrate*self.dgamma 53 | self.beta -= self.lrate*self.dbeta 54 | 55 | 56 | class LayerArgs: 57 | def __init__(self, in_dim, out_dim, derivative = utils.d_RELU, activate = utils.RELU, layer_type = "HIDDEN", learning_rate = 0.01, momentum = 0., regularization = 0.000): 58 | self.in_dim = in_dim 59 | self.out_dim = out_dim 60 | self.derivative = derivative 61 | self.activate = activate 62 | self.type = layer_type 63 | self.learning_rate = learning_rate 64 | self.momentum = momentum 65 | self.regularization = regularization 66 | 67 | class Layer: 68 | def __init__(self, args): 69 | 70 | self.in_dim = args.in_dim 71 | self.out_dim = args.out_dim 72 | 73 | self.vals = None 74 | 75 | self.type = args.type 76 | self.learning_rate = args.learning_rate 77 | self.reg = args.regularization 78 | self.momentum = args.momentum 79 | self.derivative = args.derivative 80 | self.activate = args.activate 81 | 82 | self.next_layer = None 83 | self.prev_layer = None 84 | 85 | self.initialize_weights() 86 | 87 | def initialize_weights(self): 88 | unib = math.sqrt(6)/math.sqrt(self.in_dim + self.out_dim) 89 | self.W = np.random.uniform(-unib, unib, (self.in_dim, self.out_dim)) 90 | self.b = np.zeros((1, self.out_dim)) 91 | 92 | self.dW = np.zeros((self.in_dim, self.out_dim)) 93 | self.db = np.zeros((1, self.out_dim)) 94 | 95 | def epoch_size(self): 96 | return self.vals.shape[0] 97 | 98 | def connect_layer(self, next_layer): 99 | self.next_layer = next_layer 100 | next_layer.prev_layer = self 101 | 102 | def layer_forward(self): 103 | self.wx = self.prev_layer.vals.dot(self.W) + self.b 104 | 105 | if self.type == "INPUT": 106 | self.vals = self.activate(self.wx) 107 | else: 108 | self.bn.transform(self.wx) 109 | self.vals = self.activate(self.bn.vals) 110 | 111 | if self.type == "OUTPUT": 112 | self.prob = self.vals / np.sum(self.vals, axis=1, keepdims=True) 113 | 114 | def epoch_size(self): 115 | return self.vals.shape[0] 116 | 117 | def layer_backward(self, y = None): 118 | 119 | if self.type == "OUTPUT": 120 | self.bn.delta = np.copy(self.prob) 121 | self.bn.delta[range(self.epoch_size()), y] -= 1 122 | self.bn.compute_para_delta() 123 | 124 | elif self.type == "HIDDEN": 125 | self.bn.compute_delta() 126 | self.bn.compute_para_delta() 127 | 128 | self.delta = self.bn.dX 129 | 130 | self.dW = (self.prev_layer.vals.T).dot(self.delta) + self.momentum * self.dW 131 | self.db = np.sum(self.delta, axis=0, keepdims=True) + self.momentum * self.db 132 | 133 | self.W -= self.learning_rate * self.dW /self.epoch_size() 134 | self.b -= self.learning_rate * self.db /self.epoch_size() 135 | 136 | def loss(self, gold): 137 | logprobs = -np.log(self.prob[range(self.epoch_size()), gold]) 138 | data_loss = np.sum(logprobs) 139 | return 1./self.epoch_size() * data_loss 140 | 141 | def accuracy(self, gold): 142 | preds = np.argmax(self.prob, axis=1) 143 | err = 0. 144 | for i in xrange(len(preds)): 145 | if preds[i] != gold[i]: 146 | err+=1 147 | return 1-err/len(gold) 148 | 149 | def add_bn(self): 150 | self.bn = BNLayer(self) 151 | 152 | class ModelArgs: 153 | def __init__(self, num_passes = 100, max_iter = 500, batch_size = 20, report_interval = 10): 154 | self.num_passes = num_passes 155 | self.max_iter = max_iter 156 | self.batch_size = batch_size 157 | self.report_interval = report_interval 158 | 159 | class Model: 160 | def __init__(self, layer_args, model_arg): 161 | 162 | self.layer_args = layer_args 163 | self.max_iter = model_arg.max_iter 164 | self.num_passes = model_arg.num_passes 165 | self.batch_size = model_arg.batch_size 166 | self.report_interval = model_arg.report_interval 167 | 168 | def feed_data(self, X_train, y_train, X_test, y_test): 169 | 170 | self.X_train = X_train 171 | self.y_train = y_train 172 | self.X_test = X_test 173 | self.y_test = y_test 174 | 175 | self.input_dim = self.X_train.shape[1] 176 | self.output_dim = len(self.y_train) 177 | self.train_log_loss = {} 178 | self.test_log_loss = {} 179 | self.train_log_acc = {} 180 | self.test_log_acc = {} 181 | 182 | def trial_data(self, X_train_sub, y_train_sub): 183 | 184 | self.X_train_sub = X_train_sub 185 | self.y_train_sub = y_train_sub 186 | 187 | def make_layer(self, args): 188 | return Layer(args) 189 | 190 | def yield_batches(self, features, classes, batchsize): 191 | sets = np.arange(features.shape[0]) 192 | np.random.shuffle(sets) 193 | for i in range(0, features.shape[0] - batchsize + 1, batchsize): 194 | e = sets[i:i + batchsize] 195 | yield features[e], classes[e] 196 | 197 | def intialize_model(self): 198 | self.input_layer = self.make_layer(self.layer_args[0]) 199 | self.output_layer = self.make_layer(self.layer_args[-1]) 200 | self.hidden_layers = [self.make_layer(self.layer_args[i]) for i in range(1, len(self.layer_args)-1)] 201 | 202 | layers = [self.input_layer] + self.hidden_layers + [self.output_layer] 203 | 204 | for i in range(len(layers)-1): 205 | layers[i].connect_layer(layers[i+1]) 206 | 207 | for i in range(1, len(layers)): 208 | layers[i].add_bn() 209 | 210 | def forward(self, x): 211 | self.input_layer.vals = x 212 | for layer in self.hidden_layers: 213 | layer.layer_forward() 214 | self.output_layer.layer_forward() 215 | 216 | def loss(self, y): 217 | return self.output_layer.loss(y) 218 | 219 | def accuracy(self, y): 220 | return self.output_layer.accuracy(y) 221 | 222 | def backward(self, y): 223 | self.output_layer.layer_backward(y) 224 | 225 | for layer in self.hidden_layers[::-1]: 226 | layer.layer_backward() 227 | 228 | def run_model(self): 229 | n_iter = 0 230 | for i in range(1, self.num_passes+1): 231 | for x, y in self.yield_batches(self.X_train, self.y_train, self.batch_size): 232 | n_iter += 1 233 | self.forward(x) 234 | self.backward(y) 235 | if n_iter%self.report_interval == 0: 236 | # print "MI reported at iteration:", n_iter 237 | self.forward(self.X_train_sub) 238 | yield n_iter, [layer.vals for layer in self.hidden_layers] 239 | 240 | self.forward(self.X_test) 241 | self.test_log_loss[i] = self.loss(self.y_test) 242 | self.test_log_acc[i] = self.accuracy(self.y_test) 243 | 244 | self.forward(self.X_train) 245 | self.train_log_loss[i] = self.loss(self.y_train) 246 | self.train_log_acc[i] = self.accuracy(self.y_train) 247 | print "Epoch: {}, Train Acc: {}, Test Acc: {}".format(i, self.train_log_acc[i], self.test_log_acc[i]) 248 | 249 | if n_iter > self.max_iter: 250 | break 251 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Untitled-Copy3-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "collapsed": false 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "from utils import load_mnist\n", 12 | "import utils\n", 13 | "import numpy as np\n", 14 | "from mlp import Layer, LayerArgs, Model, ModelArgs\n", 15 | "from collections import Counter\n", 16 | "import math\n", 17 | "from mi_tool import MI" 18 | ] 19 | }, 20 | { 21 | "cell_type": "code", 22 | "execution_count": 13, 23 | "metadata": { 24 | "collapsed": false 25 | }, 26 | "outputs": [], 27 | "source": [ 28 | "X_train, y_train = load_mnist('../fashion_mnist/', kind='train')\n", 29 | "X_test, y_test = load_mnist('../fashion_mnist/', kind='t10k')\n", 30 | "# X_train, X_test = np.multiply(X_train, 1.0 / 255.0), np.multiply(X_test, 1.0 / 255.0)\n", 31 | "\n", 32 | "X_train.setflags(write=1)\n", 33 | "X_test.setflags(write=1)\n", 34 | "\n", 35 | "X_train = np.where(X_train>127, 1, 0)\n", 36 | "X_test = np.where(X_test>127, 1, 0)" 37 | ] 38 | }, 39 | { 40 | "cell_type": "code", 41 | "execution_count": 14, 42 | "metadata": { 43 | "collapsed": false 44 | }, 45 | "outputs": [], 46 | "source": [ 47 | "X_train, y_train = utils.unison_shuffled_copies(X_train, y_train)\n", 48 | "X_train_subset, y_train_subset = X_train, y_train" 49 | ] 50 | }, 51 | { 52 | "cell_type": "code", 53 | "execution_count": 15, 54 | "metadata": { 55 | "collapsed": false 56 | }, 57 | "outputs": [], 58 | "source": [ 59 | "layer_args = [LayerArgs(784, 784, layer_type = \"INPUT\"), LayerArgs(784, 20), LayerArgs(20, 20), LayerArgs(20, 10, layer_type = \"OUTPUT\", activate = np.exp)]\n", 60 | "model_args = ModelArgs(num_passes = 80, max_iter=100000, report_interval=1000)\n", 61 | "\n", 62 | "model = Model(layer_args, model_args)\n", 63 | "model.feed_data(X_train, y_train, X_test, y_test)\n", 64 | "model.trial_data(X_train_subset, y_train_subset)\n", 65 | "\n", 66 | "model.intialize_model()" 67 | ] 68 | }, 69 | { 70 | "cell_type": "code", 71 | "execution_count": 16, 72 | "metadata": { 73 | "collapsed": false 74 | }, 75 | "outputs": [], 76 | "source": [ 77 | "MI_client = MI(X_train_subset, y_train_subset, 10)\n", 78 | "MI_client.discretize()\n", 79 | "MI_client.pre_compute()" 80 | ] 81 | }, 82 | { 83 | "cell_type": "code", 84 | "execution_count": 17, 85 | "metadata": { 86 | "collapsed": false, 87 | "scrolled": true 88 | }, 89 | "outputs": [ 90 | { 91 | "name": "stdout", 92 | "output_type": "stream", 93 | "text": [ 94 | "MI reported at iteration: 1000\n", 95 | "[10.851518684477504, 10.820112162097875] [2.2932294877208914, 2.289054937423289]\n", 96 | "MI reported at iteration: 2000\n", 97 | "[10.84493358911079, 10.776510916541673] [2.293322556357157, 2.28512581291363]\n", 98 | "MI reported at iteration: 3000\n", 99 | "[10.847337706682275, 10.772486036942007] [2.295519228273611, 2.287298524983312]\n" 100 | ] 101 | }, 102 | { 103 | "ename": "IndexError", 104 | "evalue": "shape mismatch: indexing arrays could not be broadcast together with shapes (60000,) (10000,) ", 105 | "output_type": "error", 106 | "traceback": [ 107 | "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", 108 | "\u001b[0;31mIndexError\u001b[0m Traceback (most recent call last)", 109 | "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0;31m \u001b[0;32mfor\u001b[0m \u001b[0mepoch\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mhidden_layers\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mmodel\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrun_model\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 2\u001b[0m \u001b[0mMI_client\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmi_single_epoch\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mhidden_layers\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mepoch\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", 110 | "\u001b[0;32m/Users/erichsu/Documents/research/Information-Bottleneck-for-Deep-Learning/mlp.pyc\u001b[0m in \u001b[0;36mrun_model\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 239\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 240\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mforward\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mX_test\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 241\u001b[0;31m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtest_log_loss\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mloss\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0my_test\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 242\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtest_log_acc\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0maccuracy\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0my_test\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 243\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n", 111 | "\u001b[0;32m/Users/erichsu/Documents/research/Information-Bottleneck-for-Deep-Learning/mlp.pyc\u001b[0m in \u001b[0;36mloss\u001b[0;34m(self, y)\u001b[0m\n\u001b[1;32m 215\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 216\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mloss\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 217\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0moutput_layer\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mloss\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0my\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 218\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 219\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0maccuracy\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0my\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", 112 | "\u001b[0;32m/Users/erichsu/Documents/research/Information-Bottleneck-for-Deep-Learning/mlp.pyc\u001b[0m in \u001b[0;36mloss\u001b[0;34m(self, gold)\u001b[0m\n\u001b[1;32m 135\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 136\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mloss\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mgold\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 137\u001b[0;31m \u001b[0mlogprobs\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m-\u001b[0m\u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlog\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mprob\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mepoch_size\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mgold\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 138\u001b[0m \u001b[0mdata_loss\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msum\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlogprobs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 139\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0;36m1.\u001b[0m\u001b[0;34m/\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mepoch_size\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0mdata_loss\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", 113 | "\u001b[0;31mIndexError\u001b[0m: shape mismatch: indexing arrays could not be broadcast together with shapes (60000,) (10000,) " 114 | ] 115 | } 116 | ], 117 | "source": [ 118 | "for epoch, hidden_layers in model.run_model():\n", 119 | " MI_client.mi_single_epoch(hidden_layers, epoch)" 120 | ] 121 | }, 122 | { 123 | "cell_type": "code", 124 | "execution_count": null, 125 | "metadata": { 126 | "collapsed": false 127 | }, 128 | "outputs": [], 129 | "source": [ 130 | "en_mis, de_mis, epochs = np.array(MI_client.en_mi_collector), np.array(MI_client.de_mi_collector), np.array(MI_client.epochs)" 131 | ] 132 | }, 133 | { 134 | "cell_type": "code", 135 | "execution_count": null, 136 | "metadata": { 137 | "collapsed": true 138 | }, 139 | "outputs": [], 140 | "source": [ 141 | "%matplotlib inline\n", 142 | "import matplotlib.pyplot as plt" 143 | ] 144 | }, 145 | { 146 | "cell_type": "code", 147 | "execution_count": null, 148 | "metadata": { 149 | "collapsed": true 150 | }, 151 | "outputs": [], 152 | "source": [ 153 | "fig, ax = plt.subplots(figsize=(8,8))\n", 154 | "ax.set_ylabel('MI_T,Y')\n", 155 | "ax.set_xlabel('MI_X,T')\n", 156 | "title = ax.set_title('Information plane')\n", 157 | "# plt.close(fig)" 158 | ] 159 | }, 160 | { 161 | "cell_type": "code", 162 | "execution_count": null, 163 | "metadata": { 164 | "collapsed": false 165 | }, 166 | "outputs": [], 167 | "source": [ 168 | "def plot(i):\n", 169 | " ax.plot(en_mis[i,:], de_mis[i,:], 'k-', alpha=0.2)\n", 170 | " if i > 0:\n", 171 | " for j in range(len(en_mis[0])):\n", 172 | " ax.plot(en_mis[(i-1):(i+1),j],de_mis[(i-1):(i+1),j],'.-',c=cmap(j*.2),ms=10)\n", 173 | " \n", 174 | "for i in range(len(en_mis)):\n", 175 | " animate(i)" 176 | ] 177 | }, 178 | { 179 | "cell_type": "code", 180 | "execution_count": null, 181 | "metadata": { 182 | "collapsed": false 183 | }, 184 | "outputs": [], 185 | "source": [ 186 | "len(en_mis)" 187 | ] 188 | }, 189 | { 190 | "cell_type": "code", 191 | "execution_count": null, 192 | "metadata": { 193 | "collapsed": false, 194 | "scrolled": false 195 | }, 196 | "outputs": [], 197 | "source": [ 198 | "en_mis" 199 | ] 200 | }, 201 | { 202 | "cell_type": "code", 203 | "execution_count": null, 204 | "metadata": { 205 | "collapsed": false 206 | }, 207 | "outputs": [], 208 | "source": [ 209 | "ax.set_title('Epoch 0000 - 2999')\n", 210 | "fig" 211 | ] 212 | }, 213 | { 214 | "cell_type": "code", 215 | "execution_count": null, 216 | "metadata": { 217 | "collapsed": true 218 | }, 219 | "outputs": [], 220 | "source": [] 221 | } 222 | ], 223 | "metadata": { 224 | "kernelspec": { 225 | "display_name": "Python 2", 226 | "language": "python", 227 | "name": "python2" 228 | }, 229 | "language_info": { 230 | "codemirror_mode": { 231 | "name": "ipython", 232 | "version": 2 233 | }, 234 | "file_extension": ".py", 235 | "mimetype": "text/x-python", 236 | "name": "python", 237 | "nbconvert_exporter": "python", 238 | "pygments_lexer": "ipython2", 239 | "version": "2.7.13" 240 | } 241 | }, 242 | "nbformat": 4, 243 | "nbformat_minor": 2 244 | } 245 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Untitled-Copy2-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 26, 6 | "metadata": { 7 | "collapsed": false 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "from utils import load_mnist\n", 12 | "import utils\n", 13 | "import numpy as np\n", 14 | "from mlp import Layer, LayerArgs, Model, ModelArgs\n", 15 | "from collections import Counter\n", 16 | "import math" 17 | ] 18 | }, 19 | { 20 | "cell_type": "code", 21 | "execution_count": 2, 22 | "metadata": { 23 | "collapsed": true 24 | }, 25 | "outputs": [], 26 | "source": [ 27 | "X_train, y_train = load_mnist('../fashion_mnist/', kind='train')\n", 28 | "X_test, y_test = load_mnist('../fashion_mnist/', kind='t10k')" 29 | ] 30 | }, 31 | { 32 | "cell_type": "code", 33 | "execution_count": 3, 34 | "metadata": { 35 | "collapsed": false 36 | }, 37 | "outputs": [], 38 | "source": [ 39 | "X_train, y_train = utils.unison_shuffled_copies(X_train, y_train)\n", 40 | "X_train, y_train= X_train, y_train\n", 41 | "\n", 42 | "X_test, y_test = utils.unison_shuffled_copies(X_test, y_test)\n", 43 | "X_test, y_test = X_test, y_test" 44 | ] 45 | }, 46 | { 47 | "cell_type": "code", 48 | "execution_count": 4, 49 | "metadata": { 50 | "collapsed": true 51 | }, 52 | "outputs": [], 53 | "source": [ 54 | "X_train, X_test = np.multiply(X_train, 1.0 / 255.0), np.multiply(X_test, 1.0 / 255.0)" 55 | ] 56 | }, 57 | { 58 | "cell_type": "code", 59 | "execution_count": 5, 60 | "metadata": { 61 | "collapsed": false 62 | }, 63 | "outputs": [ 64 | { 65 | "name": "stdout", 66 | "output_type": "stream", 67 | "text": [ 68 | "data imported\n" 69 | ] 70 | } 71 | ], 72 | "source": [ 73 | "layer_args = [LayerArgs(784, 784, layer_type = \"INPUT\"), LayerArgs(784, 10), LayerArgs(10, 20), LayerArgs(20, 10, layer_type = \"OUTPUT\", activate = np.exp)]\n", 74 | "model_args = ModelArgs(num_passes = 400)\n", 75 | "\n", 76 | "model = Model(layer_args, model_args)\n", 77 | "model.feed_data(X_train, y_train, X_test, y_test)\n", 78 | "\n", 79 | "print \"data imported\"\n", 80 | "model.intialize_model()" 81 | ] 82 | }, 83 | { 84 | "cell_type": "code", 85 | "execution_count": 33, 86 | "metadata": { 87 | "collapsed": false 88 | }, 89 | "outputs": [], 90 | "source": [ 91 | "class MI:\n", 92 | " def __init__(self, bin_size):\n", 93 | " self.X = X\n", 94 | " self.y = y\n", 95 | " self.bins = np.linspace(0, 1, bin_size+1)\n", 96 | " self.n_samples = self.X.shape[0]\n", 97 | " self.unit = 1./n_samples\n", 98 | " \n", 99 | " self.pdf_x = Counter()\n", 100 | " self.pdf_y = Counter()\n", 101 | " \n", 102 | " self.en_mi_collector = []\n", 103 | " self.de_mi_collector = []\n", 104 | " self.epochs = []\n", 105 | "\n", 106 | "\n", 107 | " def discretize(self):\n", 108 | " self.X_d = np.digitize(self.X, bins).tolist()\n", 109 | " self.y_d = self.X.tolist()\n", 110 | " \n", 111 | " def pre_compute(self):\n", 112 | " for i in range(self.n_samples):\n", 113 | " self.pdf_x[utils.totuple(self.X_d[i])] += unit\n", 114 | " self.pdf_y[self.y_d[i]] += unit\n", 115 | " \n", 116 | " def combine(self, a, b):\n", 117 | " return (totuple(a,)) + tuple(b)\n", 118 | "\n", 119 | " def joint_compute(self, hidden):\n", 120 | " \n", 121 | " self.h = np.digitize(hidden, bins).tolist()\n", 122 | " self.pdf_t = Counter()\n", 123 | " self.pdf_xt = Counter()\n", 124 | " self.pdf_yt = Counter()\n", 125 | " \n", 126 | " for i in range(self.n_samples):\n", 127 | " xt = combine(self.X_d[i], indices[i,:])\n", 128 | " yt = combine(self.y_d[i], indices[i,:])\n", 129 | " self.pdf_xt[xt] += unit\n", 130 | " self.pdf_yt[yt] += unit\n", 131 | " self.pdf_t[tuple(self.h[i])] += unit\n", 132 | " \n", 133 | " def encoder_mi(self):\n", 134 | " return sum([ self.pdf_xt[xt] * (math.log(self.pdf_xt[xt]) - math.log(self.pdf_x[xt[0]]) - math.log(self.pdf_t[xt[1]])) for xt in self.pdf_xt ])\n", 135 | " \n", 136 | " def decoder_mi(self):\n", 137 | " return sum([ self.pdf_yt[yt] * (math.log(self.pdf_yt[yt]) - math.log(self.pdf_y[yt[0]]) - math.log(self.pdf_t[yt[1]])) for yt in self.pdf_yt ])\n", 138 | " \n", 139 | " def mi_single_epoch(self, hiddens, epoch):\n", 140 | " ens = []\n", 141 | " des = []\n", 142 | " for hidden in hiddens:\n", 143 | " self.joint_compute(hidden)\n", 144 | " ens.append(self.encoder_mi())\n", 145 | " des.append(self.decoder_mi())\n", 146 | " self.en_mi_collector.append(ens)\n", 147 | " self.de_mi_collector.append(des)\n", 148 | " self.epochs.append(epoch)" 149 | ] 150 | }, 151 | { 152 | "cell_type": "code", 153 | "execution_count": null, 154 | "metadata": { 155 | "collapsed": false 156 | }, 157 | "outputs": [], 158 | "source": [ 159 | "def calc_mutual_information(hidden):\n", 160 | " n_neurons = hidden.shape[1]\n", 161 | " \n", 162 | " n_bins = 30\n", 163 | " bins = np.linspace(0, 1, n_bins+1)\n", 164 | " indices = np.digitize(hidden, bins)\n", 165 | " \n", 166 | " # initialize pdfs\n", 167 | " pdf_t = Counter(); pdf_xt = Counter(); pdf_yt = Counter()\n", 168 | " \n", 169 | " for i in range(n_train_samples): \n", 170 | " pdf_xt[(totuple(X_train[i]),)+tuple(indices[i,:])] += 1/float(n_train_samples)\n", 171 | " pdf_yt[(y_train[i],)+tuple(indices[i,:])] += 1/float(n_train_samples)\n", 172 | " pdf_t[tuple(indices[i,:])] += 1/float(n_train_samples)\n", 173 | " \n", 174 | " # calcuate encoder mutual information I(X;T)\n", 175 | " mi_xt = 0\n", 176 | " for i in pdf_xt:\n", 177 | " p_xt = pdf_xt[i]; p_x = pdf_x[i[0]]; p_t = pdf_t[i[1:]]\n", 178 | " mi_xt += p_xt * np.log(p_xt / p_x / p_t)\n", 179 | " \n", 180 | " # calculate decoder mutual information I(T;Y)\n", 181 | " mi_ty = 0\n", 182 | " for i in pdf_yt:\n", 183 | " # P(t,y), P(t) and P(y)\n", 184 | " p_yt = pdf_yt[i]; p_t = pdf_t[i[1:]]; p_y = pdf_y[i[0]]\n", 185 | " # I(X;T)\n", 186 | " mi_ty += p_yt * np.log(p_yt / p_t / p_y)\n", 187 | " \n", 188 | " return mi_xt, mi_ty\n", 189 | "\n", 190 | "# get mutual information for all hidden layers\n", 191 | "def get_mutual_information(hiddens):\n", 192 | " mi_xt_list = []; mi_ty_list = []\n", 193 | " for hidden in hiddens:\n", 194 | " mi_xt, mi_ty = calc_mutual_information(hidden)\n", 195 | " mi_xt_list.append(mi_xt)\n", 196 | " mi_ty_list.append(mi_ty)\n", 197 | " return mi_xt_list, mi_ty_list" 198 | ] 199 | }, 200 | { 201 | "cell_type": "code", 202 | "execution_count": null, 203 | "metadata": { 204 | "collapsed": false, 205 | "scrolled": true 206 | }, 207 | "outputs": [], 208 | "source": [ 209 | "for epoch, hidden_layers in model.run_model():\n", 210 | " pass" 211 | ] 212 | }, 213 | { 214 | "cell_type": "code", 215 | "execution_count": null, 216 | "metadata": { 217 | "collapsed": true 218 | }, 219 | "outputs": [], 220 | "source": [ 221 | "n_bins = 30\n", 222 | "bins = np.linspace(0, 1, n_bins+1)\n", 223 | "indices = np.digitize(model.hidden_layers[0].vals, bins)" 224 | ] 225 | }, 226 | { 227 | "cell_type": "code", 228 | "execution_count": null, 229 | "metadata": { 230 | "collapsed": false 231 | }, 232 | "outputs": [], 233 | "source": [ 234 | "indices.shape" 235 | ] 236 | }, 237 | { 238 | "cell_type": "code", 239 | "execution_count": null, 240 | "metadata": { 241 | "collapsed": false, 242 | "scrolled": true 243 | }, 244 | "outputs": [], 245 | "source": [ 246 | "mi_xt_all = []\n", 247 | "mi_ty_all = []\n", 248 | "epochs = []\n", 249 | "\n", 250 | "for epoch, hidden_layers in model.run_model():\n", 251 | " mi_xt, mi_ty = get_mutual_information(hidden_layers)\n", 252 | " mi_xt_all.append(mi_xt)\n", 253 | " mi_ty_all.append(mi_ty)\n", 254 | " epochs.append(epoch)" 255 | ] 256 | }, 257 | { 258 | "cell_type": "code", 259 | "execution_count": null, 260 | "metadata": { 261 | "collapsed": false 262 | }, 263 | "outputs": [], 264 | "source": [ 265 | "mi_xt_all, mi_ty_all, epochs = np.array(mi_xt_all), np.array(mi_ty_all), np.array(epochs)" 266 | ] 267 | }, 268 | { 269 | "cell_type": "code", 270 | "execution_count": null, 271 | "metadata": { 272 | "collapsed": true 273 | }, 274 | "outputs": [], 275 | "source": [ 276 | "%matplotlib inline\n", 277 | "import matplotlib.pyplot as plt\n", 278 | "from matplotlib import animation\n", 279 | "from IPython.display import HTML" 280 | ] 281 | }, 282 | { 283 | "cell_type": "code", 284 | "execution_count": null, 285 | "metadata": { 286 | "collapsed": true 287 | }, 288 | "outputs": [], 289 | "source": [ 290 | "fig, ax = plt.subplots(figsize=(8,8))\n", 291 | "#ax.set_xlim((5, 7.0))\n", 292 | "#ax.set_ylim((1.75,2.3))\n", 293 | "ax.set_xlabel('I(X;T)')\n", 294 | "ax.set_ylabel('I(T;Y)')\n", 295 | "title = ax.set_title('')\n", 296 | "plt.close(fig)" 297 | ] 298 | }, 299 | { 300 | "cell_type": "code", 301 | "execution_count": null, 302 | "metadata": { 303 | "collapsed": false 304 | }, 305 | "outputs": [], 306 | "source": [ 307 | "len(mi_xt_all)" 308 | ] 309 | }, 310 | { 311 | "cell_type": "code", 312 | "execution_count": null, 313 | "metadata": { 314 | "collapsed": false 315 | }, 316 | "outputs": [], 317 | "source": [ 318 | "cmap = plt.cm.get_cmap('cool')\n", 319 | "\n", 320 | "def animate(i):\n", 321 | " title.set_text('Epoch %s' % str(epochs[i]).zfill(4))\n", 322 | " ax.plot(mi_xt_all[i,:], mi_ty_all[i,:], 'k-',alpha=0.2)\n", 323 | " if i > 0:\n", 324 | " for j in range(2):\n", 325 | " ax.plot(mi_xt_all[(i-1):(i+1),j],mi_ty_all[(i-1):(i+1),j],'.-',c=cmap(j*.2),ms=10)\n", 326 | " \n", 327 | "for i in range(len(mi_ty_all)):\n", 328 | " animate(i)" 329 | ] 330 | }, 331 | { 332 | "cell_type": "code", 333 | "execution_count": null, 334 | "metadata": { 335 | "collapsed": false 336 | }, 337 | "outputs": [], 338 | "source": [ 339 | "ax.set_title('Epoch 0000 - 2999')\n", 340 | "fig" 341 | ] 342 | }, 343 | { 344 | "cell_type": "code", 345 | "execution_count": null, 346 | "metadata": { 347 | "collapsed": false 348 | }, 349 | "outputs": [], 350 | "source": [ 351 | "mi_ty_all" 352 | ] 353 | }, 354 | { 355 | "cell_type": "code", 356 | "execution_count": null, 357 | "metadata": { 358 | "collapsed": false 359 | }, 360 | "outputs": [], 361 | "source": [ 362 | "[12,3,4][::-1]" 363 | ] 364 | }, 365 | { 366 | "cell_type": "code", 367 | "execution_count": null, 368 | "metadata": { 369 | "collapsed": true 370 | }, 371 | "outputs": [], 372 | "source": [] 373 | } 374 | ], 375 | "metadata": { 376 | "kernelspec": { 377 | "display_name": "Python 2", 378 | "language": "python", 379 | "name": "python2" 380 | }, 381 | "language_info": { 382 | "codemirror_mode": { 383 | "name": "ipython", 384 | "version": 2 385 | }, 386 | "file_extension": ".py", 387 | "mimetype": "text/x-python", 388 | "name": "python", 389 | "nbconvert_exporter": "python", 390 | "pygments_lexer": "ipython2", 391 | "version": "2.7.13" 392 | } 393 | }, 394 | "nbformat": 4, 395 | "nbformat_minor": 2 396 | } 397 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/Untitled-Copy1-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "collapsed": false 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "from utils import load_mnist\n", 12 | "import utils\n", 13 | "import numpy as np\n", 14 | "from mlp import Layer, LayerArgs, Model, ModelArgs" 15 | ] 16 | }, 17 | { 18 | "cell_type": "code", 19 | "execution_count": null, 20 | "metadata": { 21 | "collapsed": true 22 | }, 23 | "outputs": [], 24 | "source": [ 25 | "X_train, y_train = load_mnist('../fashion_mnist/', kind='train')\n", 26 | "X_test, y_test = load_mnist('../fashion_mnist/', kind='t10k')" 27 | ] 28 | }, 29 | { 30 | "cell_type": "code", 31 | "execution_count": null, 32 | "metadata": { 33 | "collapsed": true 34 | }, 35 | "outputs": [], 36 | "source": [ 37 | "X_train.shape" 38 | ] 39 | }, 40 | { 41 | "cell_type": "code", 42 | "execution_count": 3, 43 | "metadata": { 44 | "collapsed": false 45 | }, 46 | "outputs": [], 47 | "source": [ 48 | "X_train, y_train = utils.unison_shuffled_copies(X_train, y_train)\n", 49 | "X_train, y_train= X_train[:1000], y_train[:1000]\n", 50 | "\n", 51 | "X_test, y_test = utils.unison_shuffled_copies(X_test, y_test)\n", 52 | "X_test, y_test = X_test[:1000], y_test[:1000]" 53 | ] 54 | }, 55 | { 56 | "cell_type": "code", 57 | "execution_count": 4, 58 | "metadata": { 59 | "collapsed": true 60 | }, 61 | "outputs": [], 62 | "source": [ 63 | "X_train, X_test = np.multiply(X_train, 1.0 / 255.0), np.multiply(X_test, 1.0 / 255.0)" 64 | ] 65 | }, 66 | { 67 | "cell_type": "code", 68 | "execution_count": 5, 69 | "metadata": { 70 | "collapsed": false 71 | }, 72 | "outputs": [ 73 | { 74 | "name": "stdout", 75 | "output_type": "stream", 76 | "text": [ 77 | "data imported\n" 78 | ] 79 | } 80 | ], 81 | "source": [ 82 | "layer_args = [LayerArgs(784, 784, layer_type = \"INPUT\"), LayerArgs(784, 12), LayerArgs(12, 12), LayerArgs(12, 12), LayerArgs(12, 10, layer_type = \"OUTPUT\", activate = np.exp)]\n", 83 | "model_args = ModelArgs(num_passes = 200)\n", 84 | "\n", 85 | "model = Model(layer_args, model_args)\n", 86 | "model.feed_data(X_train, y_train, X_test, y_test)\n", 87 | "\n", 88 | "print \"data imported\"\n", 89 | "model.intialize_model()" 90 | ] 91 | }, 92 | { 93 | "cell_type": "code", 94 | "execution_count": 6, 95 | "metadata": { 96 | "collapsed": false 97 | }, 98 | "outputs": [], 99 | "source": [ 100 | "n_train_samples = X_train.shape[0]\n", 101 | "\n", 102 | "def totuple(a):\n", 103 | " try:\n", 104 | " return tuple(totuple(i) for i in a)\n", 105 | " except TypeError:\n", 106 | " return a\n", 107 | " \n", 108 | "from collections import Counter\n", 109 | "\n", 110 | "def calc_mutual_information(hidden):\n", 111 | " n_neurons = hidden.shape[1]\n", 112 | " \n", 113 | " # discretization \n", 114 | " n_bins = 30\n", 115 | " bins = np.linspace(0, 1, n_bins+1)\n", 116 | " indices = np.digitize(hidden, bins)\n", 117 | " \n", 118 | " # initialize pdfs\n", 119 | " pdf_x = Counter(); pdf_y = Counter(); pdf_t = Counter(); pdf_xt = Counter(); pdf_yt = Counter()\n", 120 | "\n", 121 | " for i in range(n_train_samples):\n", 122 | " pdf_x[totuple(X_train[i])] += 1/float(n_train_samples)\n", 123 | " pdf_y[y_train[i]] += 1/float(n_train_samples) \n", 124 | " pdf_xt[(totuple(X_train[i]),)+tuple(indices[i,:])] += 1/float(n_train_samples)\n", 125 | " pdf_yt[(y_train[i],)+tuple(indices[i,:])] += 1/float(n_train_samples)\n", 126 | " pdf_t[tuple(indices[i,:])] += 1/float(n_train_samples)\n", 127 | " \n", 128 | " # calcuate encoder mutual information I(X;T)\n", 129 | " mi_xt = 0\n", 130 | " for i in pdf_xt:\n", 131 | " # P(x,t), P(x) and P(t)\n", 132 | " p_xt = pdf_xt[i]; p_x = pdf_x[i[0]]; p_t = pdf_t[i[1:]]\n", 133 | " # I(X;T)\n", 134 | " mi_xt += p_xt * np.log(p_xt / p_x / p_t)\n", 135 | " \n", 136 | " # calculate decoder mutual information I(T;Y)\n", 137 | " mi_ty = 0\n", 138 | " for i in pdf_yt:\n", 139 | " # P(t,y), P(t) and P(y)\n", 140 | " p_yt = pdf_yt[i]; p_t = pdf_t[i[1:]]; p_y = pdf_y[i[0]]\n", 141 | " # I(X;T)\n", 142 | " mi_ty += p_yt * np.log(p_yt / p_t / p_y)\n", 143 | " \n", 144 | " return mi_xt, mi_ty\n", 145 | "\n", 146 | "# get mutual information for all hidden layers\n", 147 | "def get_mutual_information(hiddens):\n", 148 | " mi_xt_list = []; mi_ty_list = []\n", 149 | " for hidden in hiddens:\n", 150 | " mi_xt, mi_ty = calc_mutual_information(hidden)\n", 151 | " mi_xt_list.append(mi_xt)\n", 152 | " mi_ty_list.append(mi_ty)\n", 153 | " return mi_xt_list, mi_ty_list" 154 | ] 155 | }, 156 | { 157 | "cell_type": "code", 158 | "execution_count": null, 159 | "metadata": { 160 | "collapsed": false, 161 | "scrolled": true 162 | }, 163 | "outputs": [ 164 | { 165 | "name": "stdout", 166 | "output_type": "stream", 167 | "text": [ 168 | "1\n", 169 | "2\n", 170 | "3\n", 171 | "4\n", 172 | "5\n", 173 | "6\n", 174 | "7\n", 175 | "8\n", 176 | "9\n", 177 | "10\n", 178 | "11\n", 179 | "12\n", 180 | "13\n", 181 | "14\n", 182 | "15\n", 183 | "16\n", 184 | "17\n", 185 | "18\n", 186 | "19\n", 187 | "20\n", 188 | "21\n", 189 | "22\n", 190 | "23\n", 191 | "24\n", 192 | "25\n", 193 | "26\n", 194 | "27\n", 195 | "28\n", 196 | "29\n", 197 | "30\n", 198 | "31\n", 199 | "32\n", 200 | "33\n", 201 | "34\n", 202 | "35\n", 203 | "36\n", 204 | "37\n", 205 | "38\n", 206 | "39\n", 207 | "40\n", 208 | "41\n", 209 | "42\n", 210 | "43\n", 211 | "44\n", 212 | "45\n", 213 | "46\n", 214 | "47\n", 215 | "48\n", 216 | "49\n", 217 | "50\n", 218 | "51\n", 219 | "52\n", 220 | "53\n", 221 | "54\n", 222 | "55\n", 223 | "56\n", 224 | "57\n", 225 | "58\n", 226 | "59\n", 227 | "60\n", 228 | "61\n", 229 | "62\n", 230 | "63\n", 231 | "64\n", 232 | "65\n", 233 | "66\n", 234 | "67\n", 235 | "68\n", 236 | "69\n", 237 | "70\n", 238 | "71\n", 239 | "72\n", 240 | "73\n", 241 | "74\n", 242 | "75\n", 243 | "76\n", 244 | "77\n", 245 | "78\n", 246 | "79\n", 247 | "80\n", 248 | "81\n", 249 | "82\n", 250 | "83\n", 251 | "84\n", 252 | "85\n", 253 | "86\n", 254 | "87\n", 255 | "88\n", 256 | "89\n", 257 | "90\n", 258 | "91\n", 259 | "92\n", 260 | "93\n", 261 | "94\n", 262 | "95\n", 263 | "96\n", 264 | "97\n", 265 | "98\n", 266 | "99\n", 267 | "100\n", 268 | "101\n", 269 | "102\n", 270 | "103\n", 271 | "104\n", 272 | "105\n", 273 | "106\n", 274 | "107\n", 275 | "108\n", 276 | "109\n", 277 | "110\n", 278 | "111\n", 279 | "112\n", 280 | "113\n", 281 | "114\n", 282 | "115\n", 283 | "116\n", 284 | "117\n", 285 | "118\n", 286 | "119\n", 287 | "120\n", 288 | "121\n", 289 | "122\n", 290 | "123\n", 291 | "124\n", 292 | "125\n", 293 | "126\n", 294 | "127\n", 295 | "128\n", 296 | "129\n", 297 | "130\n", 298 | "131\n", 299 | "132\n", 300 | "133\n", 301 | "134\n", 302 | "135\n", 303 | "136\n", 304 | "137\n", 305 | "138\n", 306 | "139\n", 307 | "140\n", 308 | "141\n", 309 | "142\n", 310 | "143\n", 311 | "144\n", 312 | "145\n", 313 | "146\n", 314 | "147\n", 315 | "148\n", 316 | "149\n", 317 | "150\n", 318 | "151\n", 319 | "152\n", 320 | "153\n", 321 | "154\n", 322 | "155\n", 323 | "156\n", 324 | "157\n", 325 | "158\n", 326 | "159\n", 327 | "160\n", 328 | "161\n", 329 | "162\n", 330 | "163\n", 331 | "164\n", 332 | "165\n", 333 | "166\n", 334 | "167\n", 335 | "168\n", 336 | "169\n", 337 | "170\n", 338 | "171\n", 339 | "172\n", 340 | "173\n", 341 | "174\n", 342 | "175\n", 343 | "176\n", 344 | "177\n", 345 | "178\n", 346 | "179\n", 347 | "180\n", 348 | "181\n", 349 | "182\n", 350 | "183\n", 351 | "184\n", 352 | "185\n", 353 | "186\n", 354 | "187\n", 355 | "188\n", 356 | "189\n", 357 | "190\n" 358 | ] 359 | } 360 | ], 361 | "source": [ 362 | "mi_xt_all = []\n", 363 | "mi_ty_all = []\n", 364 | "epochs = []\n", 365 | "\n", 366 | "for epoch, hidden_layers in model.run_model():\n", 367 | " mi_xt, mi_ty = get_mutual_information(hidden_layers)\n", 368 | " mi_xt_all.append(mi_xt)\n", 369 | " mi_ty_all.append(mi_ty)\n", 370 | " epochs.append(epoch)" 371 | ] 372 | }, 373 | { 374 | "cell_type": "code", 375 | "execution_count": null, 376 | "metadata": { 377 | "collapsed": false 378 | }, 379 | "outputs": [], 380 | "source": [ 381 | "mi_xt_all, mi_ty_all, epochs = np.array(mi_xt_all), np.array(mi_ty_all), np.array(epochs)" 382 | ] 383 | }, 384 | { 385 | "cell_type": "code", 386 | "execution_count": null, 387 | "metadata": { 388 | "collapsed": false 389 | }, 390 | "outputs": [], 391 | "source": [ 392 | "mi_xt_all\n" 393 | ] 394 | }, 395 | { 396 | "cell_type": "code", 397 | "execution_count": null, 398 | "metadata": { 399 | "collapsed": false 400 | }, 401 | "outputs": [], 402 | "source": [ 403 | "model.hidden_layers[0].vals" 404 | ] 405 | }, 406 | { 407 | "cell_type": "code", 408 | "execution_count": null, 409 | "metadata": { 410 | "collapsed": true 411 | }, 412 | "outputs": [], 413 | "source": [] 414 | }, 415 | { 416 | "cell_type": "code", 417 | "execution_count": null, 418 | "metadata": { 419 | "collapsed": true 420 | }, 421 | "outputs": [], 422 | "source": [ 423 | "%matplotlib inline\n", 424 | "import matplotlib.pyplot as plt\n", 425 | "from matplotlib import animation\n", 426 | "from IPython.display import HTML" 427 | ] 428 | }, 429 | { 430 | "cell_type": "code", 431 | "execution_count": null, 432 | "metadata": { 433 | "collapsed": true 434 | }, 435 | "outputs": [], 436 | "source": [ 437 | "fig, ax = plt.subplots(figsize=(8,8))\n", 438 | "#ax.set_xlim((9.0, 9.24))\n", 439 | "#ax.set_ylim((2.30,2.31))\n", 440 | "ax.set_xlabel('I(X;T)')\n", 441 | "ax.set_ylabel('I(T;Y)')\n", 442 | "title = ax.set_title('')\n", 443 | "plt.close(fig)" 444 | ] 445 | }, 446 | { 447 | "cell_type": "code", 448 | "execution_count": null, 449 | "metadata": { 450 | "collapsed": false 451 | }, 452 | "outputs": [], 453 | "source": [ 454 | "len(mi_xt_all)" 455 | ] 456 | }, 457 | { 458 | "cell_type": "code", 459 | "execution_count": null, 460 | "metadata": { 461 | "collapsed": false 462 | }, 463 | "outputs": [], 464 | "source": [ 465 | "cmap = plt.cm.get_cmap('cool')\n", 466 | "\n", 467 | "def animate(i):\n", 468 | " title.set_text('Epoch %s' % str(epochs[i]).zfill(4))\n", 469 | " ax.plot(mi_xt_all[i,:], mi_ty_all[i,:], 'k-',alpha=0.2)\n", 470 | " if i > 0:\n", 471 | " for j in range(2):\n", 472 | " ax.plot(mi_xt_all[(i-1):(i+1),j],mi_ty_all[(i-1):(i+1),j],'.-',c=cmap(j*.2),ms=10)\n", 473 | " \n", 474 | "for i in range(len(mi_ty_all)):\n", 475 | " animate(i)" 476 | ] 477 | }, 478 | { 479 | "cell_type": "code", 480 | "execution_count": null, 481 | "metadata": { 482 | "collapsed": false 483 | }, 484 | "outputs": [], 485 | "source": [ 486 | "ax.set_title('Epoch 0000 - 2999')\n", 487 | "fig" 488 | ] 489 | }, 490 | { 491 | "cell_type": "code", 492 | "execution_count": null, 493 | "metadata": { 494 | "collapsed": false 495 | }, 496 | "outputs": [], 497 | "source": [ 498 | "mi_ty_all" 499 | ] 500 | }, 501 | { 502 | "cell_type": "code", 503 | "execution_count": null, 504 | "metadata": { 505 | "collapsed": false 506 | }, 507 | "outputs": [], 508 | "source": [ 509 | "[12,3,4][::-1]" 510 | ] 511 | }, 512 | { 513 | "cell_type": "code", 514 | "execution_count": null, 515 | "metadata": { 516 | "collapsed": true 517 | }, 518 | "outputs": [], 519 | "source": [] 520 | } 521 | ], 522 | "metadata": { 523 | "kernelspec": { 524 | "display_name": "Python 2", 525 | "language": "python", 526 | "name": "python2" 527 | }, 528 | "language_info": { 529 | "codemirror_mode": { 530 | "name": "ipython", 531 | "version": 2 532 | }, 533 | "file_extension": ".py", 534 | "mimetype": "text/x-python", 535 | "name": "python", 536 | "nbconvert_exporter": "python", 537 | "pygments_lexer": "ipython2", 538 | "version": "2.7.13" 539 | } 540 | }, 541 | "nbformat": 4, 542 | "nbformat_minor": 2 543 | } 544 | -------------------------------------------------------------------------------- /CNN.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "### Convolution Neural Network" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | " - Conv Layer 1 (16 filters, KERNEL = 3X3, ReLU) \n", 15 | " - Pooling Layer 1 (Average, KERNEL = 2X2) \n", 16 | " - Conv Layer 2 (64 filters, KERNEL = 3X3, ReLU)\n", 17 | " - Pooling Layer 2 (Max, KERNEL = 2X2, ReLU) \n", 18 | " - FC 1 (1600, by 64x5x5)\n", 19 | " - FC2 (100)\n", 20 | " - Output (10)" 21 | ] 22 | }, 23 | { 24 | "cell_type": "markdown", 25 | "metadata": {}, 26 | "source": [ 27 | "Design reasoning\n", 28 | "1. Fashion data is more complex than digits, so we use more filters\n", 29 | "2. Choose Avg pooling on first conv layer to preserve more information\n", 30 | "3. Use Dropout to avoid overfitting. Although current start-of-art is Dropconnect, but it is not supported by PyTorch\n", 31 | "4. Use two layers of FC to reduce dimension" 32 | ] 33 | }, 34 | { 35 | "cell_type": "code", 36 | "execution_count": 6, 37 | "metadata": { 38 | "collapsed": true 39 | }, 40 | "outputs": [], 41 | "source": [ 42 | "import torch\n", 43 | "from torch.autograd import Variable\n", 44 | "import torch.nn as nn\n", 45 | "import torch.nn.functional as F\n", 46 | "import torchvision\n", 47 | "from torchvision import datasets, transforms\n", 48 | "import torch.optim as optim\n", 49 | "import torchvision.datasets as dset\n", 50 | "import numpy as np" 51 | ] 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": 7, 56 | "metadata": { 57 | "collapsed": true 58 | }, 59 | "outputs": [], 60 | "source": [ 61 | "from torch_data_utils import load_fashion_mnist\n", 62 | "import cnn" 63 | ] 64 | }, 65 | { 66 | "cell_type": "code", 67 | "execution_count": 8, 68 | "metadata": { 69 | "collapsed": true 70 | }, 71 | "outputs": [], 72 | "source": [ 73 | "train_loader, test_loader = load_fashion_mnist()" 74 | ] 75 | }, 76 | { 77 | "cell_type": "code", 78 | "execution_count": 9, 79 | "metadata": { 80 | "collapsed": true 81 | }, 82 | "outputs": [], 83 | "source": [ 84 | "model = cnn.Model()" 85 | ] 86 | }, 87 | { 88 | "cell_type": "code", 89 | "execution_count": 10, 90 | "metadata": { 91 | "collapsed": false, 92 | "scrolled": true 93 | }, 94 | "outputs": [ 95 | { 96 | "name": "stdout", 97 | "output_type": "stream", 98 | "text": [ 99 | "Iter: 1000. Accuracy: 0.8312\n", 100 | "Iter: 2000. Accuracy: 0.8559\n", 101 | "Iter: 3000. Accuracy: 0.8614\n", 102 | "Iter: 4000. Accuracy: 0.8663\n", 103 | "Iter: 5000. Accuracy: 0.8711\n", 104 | "Iter: 6000. Accuracy: 0.8741\n", 105 | "Iter: 7000. Accuracy: 0.8749\n", 106 | "Iter: 8000. Accuracy: 0.8807\n", 107 | "Iter: 9000. Accuracy: 0.8804\n", 108 | "Iter: 10000. Accuracy: 0.8801\n", 109 | "Iter: 11000. Accuracy: 0.8823\n", 110 | "Iter: 12000. Accuracy: 0.8801\n", 111 | "Iter: 13000. Accuracy: 0.8895\n", 112 | "Iter: 14000. Accuracy: 0.8865\n", 113 | "Iter: 15000. Accuracy: 0.8883\n", 114 | "Iter: 16000. Accuracy: 0.8874\n", 115 | "Iter: 17000. Accuracy: 0.8922\n", 116 | "Iter: 18000. Accuracy: 0.8858\n", 117 | "Iter: 19000. Accuracy: 0.8874\n", 118 | "Iter: 20000. Accuracy: 0.8887\n", 119 | "Iter: 21000. Accuracy: 0.888\n", 120 | "Iter: 22000. Accuracy: 0.8926\n", 121 | "Iter: 23000. Accuracy: 0.8929\n", 122 | "Iter: 24000. Accuracy: 0.8916\n", 123 | "Iter: 25000. Accuracy: 0.8895\n", 124 | "Iter: 26000. Accuracy: 0.8906\n", 125 | "Iter: 27000. Accuracy: 0.8924\n", 126 | "Iter: 28000. Accuracy: 0.8908\n" 127 | ] 128 | } 129 | ], 130 | "source": [ 131 | "max_epochs = 30\n", 132 | "report_size = 1000\n", 133 | "iter = 0\n", 134 | "optimizer = torch.optim.Adam(model.parameters(), lr=0.001)\n", 135 | "criterion = nn.CrossEntropyLoss()\n", 136 | "log = {}\n", 137 | "\n", 138 | "for epoch in range(max_epochs):\n", 139 | " for i, (features, labels) in enumerate(train_loader):\n", 140 | " \n", 141 | " features = Variable(features)\n", 142 | " labels = Variable(labels)\n", 143 | " \n", 144 | " optimizer.zero_grad()\n", 145 | " outputs = model(features)\n", 146 | " \n", 147 | " loss = criterion(outputs, labels)\n", 148 | " loss.backward()\n", 149 | " \n", 150 | " optimizer.step()\n", 151 | " \n", 152 | " iter += 1\n", 153 | " \n", 154 | " if iter % report_size == 0:\n", 155 | " correct = 0.\n", 156 | " total = 0.\n", 157 | " \n", 158 | " for features, labels in test_loader:\n", 159 | " features = Variable(features)\n", 160 | " result = model(features)\n", 161 | " \n", 162 | " _, predicted = torch.max(result.data, 1)\n", 163 | " \n", 164 | " total += labels.size(0)\n", 165 | " correct += (predicted == labels).sum()\n", 166 | " \n", 167 | " accuracy = correct / total\n", 168 | " log[iter] = accuracy\n", 169 | " print('Iter: {}. Accuracy: {}'.format(iter, accuracy))" 170 | ] 171 | }, 172 | { 173 | "cell_type": "code", 174 | "execution_count": 12, 175 | "metadata": { 176 | "collapsed": true 177 | }, 178 | "outputs": [], 179 | "source": [ 180 | "%matplotlib inline \n", 181 | "import matplotlib\n", 182 | "import matplotlib.pyplot as plt\n", 183 | "import numpy as np\n", 184 | "matplotlib.rcParams.update({'font.size': 14})\n", 185 | "figsize = (8, 5)\n", 186 | " \n", 187 | " \n", 188 | "def plot(test_logs, size = figsize):\n", 189 | " \n", 190 | " plt.figure(1, figsize=size)\n", 191 | " \n", 192 | " lists = sorted(test_logs.items()) \n", 193 | " x, y = zip(*lists) \n", 194 | " plt.plot(x, y, label = 'Testing')\n", 195 | " \n", 196 | " plt.ylabel('Accuracy ')\n", 197 | " plt.xlabel('Number of Iterations')\n", 198 | " plt.title('Test Accuracy VS. Number of Iterations')" 199 | ] 200 | }, 201 | { 202 | "cell_type": "code", 203 | "execution_count": 13, 204 | "metadata": { 205 | "collapsed": false 206 | }, 207 | "outputs": [ 208 | { 209 | "data": { 210 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAggAAAFZCAYAAAD9xtesAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4wLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvpW3flQAAIABJREFUeJzs3Xd4VGXax/HvTUgIAUJvoRcRgwgI\nWAC7qOtKsYFiXxX7Wl677rqrrq5l7brYUexdwYJ9RZTeq0DoNaEnIaQ97x/nBMdhEiYhmZkkv891\nzZXMOc85c5+p93naMeccIiIiIoFqRDsAERERiT1KEERERGQvShBERERkL0oQREREZC9KEERERGQv\nShBERERkL0oQRKTaMbNTzMyZ2dBoxxIuM7vFzJabWYGZTYp2POXNzDaY2ahoxyG/U4JQTfhfhuHc\nLi7nx002s3+Y2YAybNvAzHL8uHqVZ1xVmZnd6T9nx5VQZoRf5sKAZYPN7Ecz22hmu8xshZl9bGZn\nlzGOK/3H2GVmLUOs/9bMFpVl39WNmZ0IPAz8DFwC3FNC2X/7z3uDgGUXmtm1FR9pyfz32N3RjkPC\nUzPaAUjEXBB0fyRwBPCXoOW/lPPjJuN9meXgfbmVxjDAARuA84GZ5RtalfUmcD8wAvihmDLnAdnA\nR+AlFcC/gP8BDwGZQCfgGLz3yPv7EU8icBtww37so7o7Fu+zcLlzLqcM218ItAaeKc+gymAw3mf5\n/hDr2gEFkQ1HSqIEoZpwzr0ReN8/IzkseHmMOR/4AlgBnGtmtzjnCqMbUmhmluScy452HADOuZVm\n9jNwppld45zLDVxvZk2Bk4D3nXOZZpYI3AV85Zz7U/D+zKz5foY0C7jCzB5yzq3fz31VKmZWxzmX\nVQ67agbkljE5qDDl+b53zu0uj/1I+VETg4RkZjXM7CYzm+9X8280sxfMrGFQucPMbLyZZfhVySvN\n7DUzizezrsBqv+iDAc0Y+2xnNLN2wADgHeBtoCVwfDFlDzKz98ws3Y9hsZk9HFSmtZm9bGbrzGy3\n35Y7ysxq++v/bWZ7ffkGVJO3CFi2wcw+8duxp/nb/dVfd4aZfW5ma80s16+mf9DMEkoTt5kN8h/3\n1BDbDdtXEwLwBtAQ2OsHH69mpqZfBqAFkEQxNTzOuY0lPE44/gXEA7eXVMjMuvrHdU7Q8kR/+e0B\ny4qq0Tub2TtmtsPMNpnZP/z17cxsrJntNLP1ZnZ9MQ9b08we8F/TbP+16xAitlQz+9DMtvifh2lm\nNiSoTNF7ZYCZPWdmm4CMfRxzvJndY2Zp/vtyhR9PrcBjBy4HagV8hs4pab9BjzEJOAE4MGD7nID1\n4X7W9+t9b2bvAJcGHceez5aF6INgZs3N7BX/tc0xs9lmdlFQmaL3zQ1mdo3/XOaY2a9m1jOobIqZ\nvWpma/zne4OZjTOzLuE+n9WJahCkOK/gVVG/CjwFtAeuA3qb2ZHOuVwzSwG+BtYBDwLb8aoJhwC1\n/OV/9bd/Fxjn7/u3MB7/PCALGOec22VmS/FqFL4NLGRe34T/AbnAC3i1DR2BocCtfpk2wGSgvl9m\nIdAKONNftivsZ+V3qXiJyyh/n2n+8svwquefAHYA/f04WgIXlyLuL4FN/F6LEuh8YBXwYwnxvY/3\nvJ8HfBq07jwgHe+1A+91ygMGm9lzzrmtJR556S0FxgAjzezf5VyL8AEwD68JYwhwj5ltAa4FvvGX\nXwg8YWZTnXPBTWh/9/8+BDQBrgd+MLNDnHM7AMzsEGACXrL7b7z35dnAx2Y23DkX3PzyPN7zey9e\nE1tJXsV7Pd4D/gMcBtwBHAScjvf+uADvh7U/vzcJ/rqP/Qa6x993I/zPBH+syt/nZz2g7P68758B\nmgcdB8C2UEGbWR28z0gHf9sVwFnAaDNr6Jx7ImiTi/Cas54D4vzH/9DMujjnio73U7yms2eAlXg1\nM8f7y8L5XqpenHO6VcMbMBrIKWbd8XjtncOClp/gL7/Qvz/cv39wCY/T2i9zeynjmw+8EXD/Xrwv\nntpB5SbhfcG0DlpuAf+/A+QDvUI8jvl//x3q+QCu9ONvEbBsg7/spBDlk0IsuxfvC7l5KeN+Au/H\nqG7AssZ4PxoPhPEcfoTXz6BewLKOfuxPB5V90F+eiZec3A30DYynDO+xoueuJ94XcB7wZMD6b4FF\nAfe7+uXPCdpPYvB7yH+9HPBswLJ4YCNQCPxfwPImwG7gpYBlp/jbrw56fk71l98dsOwnvP4vCYGv\nk788LcTxTgDiwnh+DvPL/zdoedGxnRiw7KVQ789i9lu0fYPinuuA5WF91svxfV/scfj7HxVw/1b/\n8YYHLKvpP7+ZQP2g980GIDmgbFEfphP9+839+9eW9T1d3W5qYpBQhgFbgO/NrEnRDZiNV0tQVLW9\n3f87yMzKrTbKzA7l9zOVIm8D9fA6ORWVaw0cjvfFvyZwH87/RjCzeH+bT5xze3VyLCpXBkudc18H\nL3R+e6xfbdvAf94m4DXn9Qw3bt9reFX/ZwYsG473QzgmjBjfAGrjnYkWGRGwLtCdeGd18/H6J9wH\nTAHmm1nvMB6rRM65Zf5jjvRrnsrLSwGPkQfMwPvxfjlgeQbemW7HENuPds7tDCj7BbAMOA3Ar/4+\nCq8GLDngs9AY+AroYF5zWKDn3e9nrCX5s//3saDljwatr0jhftaLlPl9XwZ/Btbi1a4UPU4+8CRQ\nB6/jZqB3nV/r45vg/y163bPwEpbjLWCEhxRPCYKE0gWvOjI9xK0+XrUceFXUnwEPAJvN7FMzu9T8\ndv39cD7ehznNvDbmznhnn8v8dUU6+X/nlbCvFLwfyZLKlEVaqIVm1sPMxuPFv5U/VuXX9/+GEzd+\nQjOXPx7z+cB059zCMGL8HK+W4ryAZefhfclPDnos55x71Tl3ONAA78zyZbyzs8+D26PL6H68M8AS\n+yKU0qqg+9uBTOdccLX1drw+GcGWhFj2G141O3ifBfBqWII/C//y1zXjj5btM2pPO7yarT+U9xOa\njIAYKlK4n/Ui+/O+L612wG8hkvii9377oOXB74WiprKGAM65TLzOuEOATWb2k5ndVs4Ja5WiPggS\nSg1gPV7bbSgZAM4bUTDEzI7Ey/ZPwjuju83MjnDObSntA5tZHHAO3hnCghBF2ppZE/9LtDwVV5MQ\nV8zyvfotmFljvH4Bm/F+BNP8cu2BFylbQv468JD/JZYIHEmYwwWdc7vN7H3gL2bWDK+5pyvwz31s\ntxNveOQPfnv+LcBAAs7kysI5t8zMxgCXm9m/QxUpZtPiXgMIPSyuuLN3Kym+YhS9Zg8R1P8lwOKg\n+2Xp0xItYX3WA0TqfV8W+3zdnXMPmdkHeEnCSXifhbvM7E/OuYkRiLFSUYIgoSwD+gETXBhDj5xz\nv+J1mrrbzE7Ha/u+BK9jVGmr8E/A69h0F3t3GqqL15FqGF5HpKIzr4NL2N86vC+rksqAd7ZRy8wS\n3R+HkgVXH5dkIN7Z9ymBZ+hmNiioXDhxF3kTr015BF5zQz5/bHrZlzfwesAP5/czrtIMbZ3i/y2v\ns6z78Trd3RFiXdEZX3D1b2leg9I6oJhlK/z/i16rXOdccQlCWa3E+w7uREBNhv+D2yQghvJQ3Oew\nVJ/1YoT7vi8pjlBWAgeYmQXVInT1/64obaCwp7nrMeAxM2uPNwz3diBUvNWamhgklHfx2rnvCl5h\nZjWLqpvNrFGIbWf4f4u+5IvGgIdbRX0+sBN41Dn3QdBtNF7b6PkAfvv9ZOAyv10/ME7zy+ThNYMM\ntRCzMRaV4/cfgqMD1iXzx+r9fSk6g9lzxuLXiNwYWCicuAPKrserqj0fr3lgvHNuUylimoD3RXsB\nXs3MFOfc0qDHTDazw4vZvmiY5aKA8p0txFDAcDjn0vBqRS5n76QjHe+1Pzpo+dVleawwXWxm9Yru\nmDestDNe8wzOudV4ye9VFmI+CPPmlCirolE9wTVCNwWtLw9ZhP4MhvVZ34ew3vcBcdQys6Qw9jsO\nr9brrKD9/tXfz49h7GMPM6tj3pwfgVbi9cFQn4QQVIMge3HOfW1mLwN/8zsMfovXB+AAvA/rzXgj\nA0aa2SXAJ3g/sHXxOrrl4c/Q55zbZmbLgPPNrOjDuNQ5Ny34cf0vjdPxJuzJDV7v+8yPq6P/Y3MN\n3lComWb2IrAc70z5dLyOjuD1hj4W+NnMnsf7sWvhH8tJeL2fx/l/Xzezok5il+HVQOw1TXAxfsJr\n637LzJ7B600/DK8PRLBw4i7yOr/XGvydUnDOOTN7i9/P2ENV7ScDk8wbLz8ery03GTgZr7f/BH5v\nTwZvvoRt/H4mV1r341VpH0RA9bwf68vA9Wa2A+/M7nhCdy4sL1vx3hej+X2Y42q84X5FrsB7beeZ\n2Ut4VejN8Zp72gPdyvLAzrmpZvYmcLVfa/AT3siGi/A61X5Xlv0WYxpec+B/gOlAnnPu/VJ81ktS\nmvd90ef+OTP7Bi+5+MSFngDqObzhnWP8BHaFH9PRwI3Oue0htilJd2Cc3+y2AH9oL94wyv+Ucl/V\nQ7SHUegWnRslDHP01xveF+N0vKFyO4A5eG2xrfwyffF+uFbiTaW8EW/Mfr+gfQ0ApvplHAFDmYLK\njSBoaFWIMr39Mn8PWNYd+Bjvy34XXgLw76Dt2uH90G7y40jD+wKqHVDmcD/O3Xg/2NdS/DDHT4qJ\nrz/eGWeWX+4p4FBCD9/bZ9x+uUS8H+TtBA3zDPO1TvUfPw9oGmJ9At7U25/4x73Lj3828DeChrD5\nx7XXkLkQ+90zzDHEupf8dYuCltfx35s7/ON9E6+mobhhjg2Ctn8H2Bbi8SYBswLuFw1zPAuvA+IG\n/33+JdApxPad/PfPBrxhpmvwksqzwzneEp6jeOAf/vOei5ecPQDUCvF87c8wx3r+c7nFX5cTsG6f\nn/Xyet/jnZQ+w+/DUfd8tgga5ugva47XrJiO97mcA1wcVKZomOMNIT43e943eB0un8Xr5Jjpv7+m\nUML3TXW/FY0BF5EY5Q/VLPpyvjTa8YhI9aA+CCKx70y8oWivRTsQEak+VIMgEqP84aPd8fodrHfO\n9Y1ySCJSjagGQSR2XY/XT6KkceoiIhVCNQgiIiKyF9UgiIiIyF6q9TwITZo0ce3bt492GCIiIhEz\nffr0DOfcPif5qtYJQvv27Zk2ba/5ekRERKosf9K6fVITg4iIiOxFCYKIiIjsRQmCiIiI7EUJgoiI\niOxFCYKIiIjsRQmCiIiI7EUJgoiIiOxFCYKIiIjsRQmCiIiI7KVaz6QoIiIVKze/kG3ZuWzOymVr\nlve3hhlHd2lCvcT4aIcnJVCCICIipVJQ6Ji7djsZO3ezJTuXLQE//nv++st35uSH3EdCzRocf2Az\nBvdM4fiuzUiMj4vwUci+KEEQEZGwOef469sz+Xzu+j8sT6hZg8Z1EmiYlEDjugm0bZREozoJIW87\nduUxbs56xs1Zz1fzN1AnIY6TurVgcI8UBhzQhPi4qtP6nZG5mxkrt9KvcxPq1qpcP7mVK1oRkQoy\nOW0zG3fuZnCPlGiHEtOe/n4pn89dzzXHdeKk1BZ7fvSTEuIws7D306d9I/52WiqT0jYzdvY6vpy3\ngY9nrqVBUjx/Orglg3ukcFiHRsTVCH+fsWTdtl288FMa70xdRU5eIXVr1eSs3q258Mh2dGxaN9rh\nhcWcc9GOIWr69OnjdDVHEflt406GPjuRnLwCxl13FKkpydEOKSaNn7+BK8ZM5/RerXhsWI9SJQT7\nkptfyE+/pTN2zjq+WbCR7NwCmifX4s/dUxjcM4UereuX6+NVlBUZWfz3x2V8NHMNzsHQXq04tXsL\nPpu1js/nrievwHFMl6Zc3K89x3RpSo0oJEBmNt0512ef5ZQgKEEQqc525OQx9JmJ7NydT35BIQc0\nr8e7I4+oFD9GkbR4w07OeG4inZvV5d0rjqzQPgPZufl8t3ATY2ev48fF6eQWFNK2URKDerTk9F6t\n6NysXoU9dlkt3rCTZ39Yyrg566gZV4Nz+rZh5NEdad0waU+ZTTtzeHvyat6cvJJNO3fTvnESFxzZ\nnrP7tCY5gh02lSCEQQmCSPXmnOOKMdP5btEm3r78CJZuyuTOj+fy9Lm9GKSmhj22ZuUy+Nmf2Z1X\nyGfXDqBF/cSIPfb2XXmMn7+BsbPXMXFpBoUOTu/ViltOPpCUBrUjFkdxZq3exrM/LOWbBRupkxDH\n+Ue049KjOtCsXvHPUW5+IV/OW89rv6xgxqptJCXEccahrbjoyPYc0Lzikx8lCGFQgiBSvf33x2U8\n9NUi/nZaKpcO6EBBoWPIsz+zOTOX7/7vGJIS1E0rr6CQi16ZwrQVW3nniiM4tG3DqMWSkbmbl39e\nzss/L8eAy4/qyJXHdop45z/nHJPStvDsD0v5eWkG9WvHc3G/9lzSvz0NkhJKta+5a7Yz+pcVjJ29\njtyCQgZ0bsJF/dpzfNdmFdb/QglCGJQgiFRfE5dmcMHLkzm1e0uePrfXniaFaSu2cNaoX7n2uM7c\nfPKBUY4y+v7x2XxG/7KCR8/uwVm9W0c7HADWbM3mkfGL+XTWOprUrcVNA7swrE9ralbw6AfnHD8u\nTueZH5YyfeVWmtStxeVHdeC8I9rtd5KyOXM370xdzZhfV7JhRw5tGtXmgiPaMbxPW+onlW/zQ8wm\nCGZ2NXAL0BKYD9zgnJtQQvkRwK1AF2AH8C1ws3Nug78+HrgDuAhoBSwGbnPOfbWvWJQgiFRPa7ft\nYtDTP9OkbgIfX92fOkFf7je+O4vP56znm5uOpl3jOlGKMvrem7qaWz+cw6UDOvC301KjHc5eZq3e\nxr8+X8DUFVvp0rwud/05lWO6NC33x8nNL+SbBRt59oelLFi/g1YNanPFMR0Z1qdNuffFyCso5Ov5\nG3ntlxVMWbGFxPgavHV5+dbcxGSCYGbDgTeAq4Gf/b+XAKnOuVUhyvcHfgJuBj4BmgPPAVudcyf4\nZR4CLgQuAxYCJwOPAf2cczNLikcJgkj1szu/gGGjfmVZehafXds/5JCzjTtyOO7RH+nfuQkvXrjP\n79EqafrKLZzzwiSO6NiYVy/uW+Fn52XlnOOreRv491eLWLk5m6O7NOWuUw/iwBb715afvnM3Pyze\nxA+LNjFhSQaZu/Pp2KQOVx7bidN7tYrIXA3z123n/WlruOPUrtSqWX6JSKwmCJOBOc65ywOWLQE+\ncM7dEaL8zcB1zrl2AcsuAZ52ztX1768DHnLOPRlQ5kNgl3Pu/JLiUYIgUv3c+fFc3pq8ilHn9+aU\ng1sUW66of8JrfzmsQs5KY9m6bbsY/MxE6tSK49Nr+pe6XT0acvMLef3XFTz13RIyd+czvG8bbhzY\npcTOgoEK/dkhv1+0iR8Wb2LOmu0ANE+uxfFdm3HiQc059sCK6xcQSeEmCBHr2WFmCUBv4NGgVV8D\n/YrZbCLwgJkNAsYBjYFzgC8CytQCcoK22wUM2N+YRaRqeW/aat6avIqrju1UYnIA8JcB7Xl36ir+\nOXY+X11/NAk1Y/MMurzl5BVwxZjp5OQV8Pblh1eK5AC8mRwvO6ojZx7amqe+X8KYX1fy2ax1XHVs\nJy47qmPIpoCdOXn8vCTDTwrSycjcjRn0atOAm0/qwnFdm5HaMrnaDnmNWA2CmaUAa4FjnHM/BSz/\nO3Cecy5kbyAzOwMYDdTGS2i+AYY453b5698CegFDgSXACcCnQJxzrlaI/Y0ERgK0bdu298qVK8vr\nEEUkhs1bu50z/vsLfds35LVLDguryvz7RRv5y+hp3P3ng7jsqI4RiDK6nHNc/84sxs5Zx4sX9OHE\n1ObRDqnMlmdk8e8vFzJ+/kZa1k/k1lMOZEiPVizfnMUPizbx/aJNTF2xhbwCR3JiTY7u0pTjuzbj\nmC5NaVx3r5+OKiXmmhjKkiCYWSpeQvAEMB6vY+MjwCzn3IV+mabAi8AgwAHL8Doy/sU5V+IgWTUx\niBTvpQlpbM3O5YYTu1T6ufG3Zedy2tM/U1DoGHfdgFL9AFzy6hSmrtjK9zcfE3Z1dWVV1Kxyy8kH\ncs1xnaMdTrmYnLaZ+z9fyNy120lOrMkO/+JRXZrX5biuzTj+wGb0btcwZvtYVISYa2IAMoACvI6G\ngZoDG4rZ5g5ginPuEf/+HDPLAiaY2Z3OuTXOuXRgqJkl4jVBrAP+DaSV+xGIVBNbs3J5+KvF5BYU\nMnX5Vp4971Ca1qucZ1UFhd5Z8aYdu3nvyiNLfXb4t9NSOfmJn3j4q8U8enaPCooy+r5ftJGHxy/i\ntENacvWxnaIdTrk5vGNjPr2mP5/OXsuPi9Pp064hx3Vt9ocZDiW0iKVMzrlcYDowMGjVQOCXYjZL\nwksqAhXd/0Pszrkc59xavKTnTLxmBhEpgw9nrCG3oJAbTjyAOWu3Mejpn5m1elu0wyqTJ79bwv9+\nS+cfg7vRs02DUm/fsWldLh3QkQ+mr2Hmqq0VEGH0Ld2UyfVvzyK1ZTKPnFW+11iIBTVqGKf3as2T\n5/TigiPbKzkIU6TrVB4DLjazy8zsIDN7EkgBRgGY2etm9npA+bHAEDO7ysw6+sMenwJmFA2LNLPD\nzewMf/1RwFd4x/VwJA9MpKpwzvHWlFUc2rYBN5zYhQ+v6kdcDWPY87/y3rTV0Q6vVL5buJGnvlvC\n2b1bc+5hbcq8n2uP70yzerX4x2fzKSyMjcnltmfn8d7U1TwyfhGfzlrL0k07KShDbNt35THy9Wkk\n1KzBCxf2oXZCxV1jQSqXiM5P6Zx718waA3fj9SeYB5zqnCvqKdg2qPxoM6sHXAv8B9gOfA/cFlAs\nEbgf6Ahk4o1wuMA5VzlPd0SibMryLaSlZ/HIWYcA0C2lPmOvG8B1b8/g1g/mMHfNdv52WmrM9+pf\nuTmLG9+dRbeUZO4bevB+nRXXrVWTO07tyo3vzuaDGWsY1qfsycb+yM7N59uFm/hs1jr+99sm8goc\nZlDUlSwxvgYHtkgmtWU9Ulsmk5qSTNcWyXtNBFWkoNDx17dnsnprNm9edgStYuDaBhI7NNWyOimK\n/MEN78zku0WbmHLniX84m8wvKOTh8Yt54ac0+rZvyHPn9S7XfgmFhY6vF2zgv/9LI2Pnbnq3a8hh\nHRpxWIdGdG5at1SXxd2VW8AZ//2Fddt2Me66AbRptP9Vys45zvzvL6zaks33Nx8bsavv7c4v4Kff\nMvhs9jq+XbCRXXneZZBPOySFwT1SOKhlMsvSM1mwbgcL1u9gwbodLNywg23ZeQCYQbtGSaSmJO9J\nGlJb1qd5ci0e/HIRL/yUxgOnd2fE4W33EYlUFTE3iiEWKUEQ+aOtWbkc/uB3nNO3DfcOOThkmU9n\nreW2D+fQoHYCoy7oXaZ2/UD5BYWMnbOO535YxpJNmbRvnES3lPpMXbGFTTt3A9AgKZ6+7RtxWPtG\n9O3QiG4pycWOrHDO8X/vzebjWWt55eK+HHdgs/2KL9DcNdsZ/OzPXNq/A3dX4NTD+QWFTErbwmez\n1/LVvA3syMmnYVI8f+reksE9UujbvlGJE/Y451i/PcdLFtb7icP6HazcnL2nTIOkeLZl53HBEe24\nb2jo11qqplgcxSAiMe7DGWvIzS/k3MOKP5sc0rMVnZvV5Yox0xk26lfuH3oww/qWvsp9d34BH0xf\nw6j/LWP1ll10bVGPp87txZ+7tySuhuGcY9WWbKYs38KU5VuYumIL3yzYCEBSQhyHtvVqGPq2b0Sv\ntg32TITzxqSVfDRzLTee2KVckwOA7q3rc07fNoz+ZQXnHNaGzs3K79K8hYWOmau38tmsdXw+dz0Z\nmbnUrVWTk1KbM6hnCgM6Nwl7uKmZkdKgNikNav9hLoOdOXks3rBzT01DvcSa3HpK13I7BqlaVIOg\nGgQRwDvrPPGx/5FcO56Pr+6/z/Jbs3K57u2Z/Lw0gwuOaBd2v4Ts3HzemryKFyeksXHHbnq0acC1\nx3XmhK7N9tmMsGlHDlNWbGHq8i1MXr6FxRt34hzExxmHtG5A91b1eXPySo46oCkvXdinVM0S4dqc\nuZtjH/2RHq0bMObSw/a7x//STTt5f/oaxs1ez9ptu0ioWYMTujZjcI8UjuvarNwvBiSiGgQRKZWp\nK7ayLD2Lh/3OifvSsE4Coy/pyyPjF/P8T2ks2rCjxH4J23fl8fovK3hl4nK2ZudxZMfGPDasJ/06\nNQ77R7ZZciKnHZLCaYekePvMzmP6Ki9ZmLp8C29MWknrhrV5fFjPCkkOABr7lxf+59gFfL1gIyd3\nK3nK5lAKCh3fL9rEa7+s4OelGdSsYQw4oAn/d1IXBqY2p16E+jeIlEQ1CKpBEAG8Sxx/u2Ajk+86\ngaSE0p07fDZ7Hbd+MJsGtRP47/mH0ivg0rQZmbt5+efljPl1JZm78zm+azOuOa4zvduV3+Vri+zK\n9aZJqeihevkFhZz61ASycwv49qZjwj7L356dx7vTVjFm0kpWb9lFy/qJnH9EO4b3bUOTKj69r8QO\n1SCISNi2Zefy+dz1DO/TptTJAcDgHil0blqXK96YxvDnJ3H/0IM5qksTnv9fGu9MXcXu/EJOPbgl\nVx/XiW4p9SvgCDyRGsNfM64G/xjUjREvTebFn9K47oQDSiy/aMMOXvtlJR/PXENOXiGHdWjEHX86\niJNSm1erKX6lclGCICJ8OGPtPjsn7ktqSjKfXTOAv74zk1s/nENcDcOAob1acdWxnejUtG75BRwD\n+nVuwqndW/Dsj0s5s3drUoLmEMgvKOTbhRsZ/csKJqVtoVbNGgzt2YqL+rUnNSU5SlGLhE8Jgkg1\n55zj7Smr6NmmwX7/cDWsk8CrF/flmR+Wsi07j8uO6lClp7W989SD+G7hJh74YiHPjDgU8DpvvjN1\nNW9MWsnabbto1aA2t/+pK8P7tKFhncpx6WQRUIIgUu1NW7mVpZsyefjM8Don7kvNuBrccGKXctlX\nrGvdMImrju3EE98uoX/nVcxctZVPZ61jd34h/To15u+DUjnxoOYlzlkgEquUIIhUc29PXkXdWjU5\nrUfLaIdSKV15TCfen7aGOz6aS+34OM7q3ZqL+rWnS/PymyNBJBqUIIhUY9uycxk3dz3D+rQuU+dE\ngcT4OEad35tZa7Yx+JAU6ic0qTO4AAAgAElEQVRpiKJUDfpGEKnGPvI7J444rF20Q6nUureuT/fW\nFTc6QyQaNL5GpJoq6pzYoxw6J4pI1aMEQaSamr5yK0s2ZTLisOhculhEYpsSBJFq6q0pfudEf9pi\nEZFAShBEqqHt2Xl8Pmc9Q3qmUKeWuiKJyN6UIIhUQx/NXMPu/EJGHF72mRNFpGpTgiBSzezpnNi6\nfoVeF0FEKjclCCLVzIxVW/ltY+Z+XXdBRKo+JQgi1cxbk1dTJyGOQT3UOVFEiqcEQaQa2Z6dx7g5\n6xjSq5U6J4pIiZQgiFQjHxd1TlTzgojsgxIEkWrC65y4mkNa1+fgVuqcKCIlU4IgEsMmp23m7FG/\n8JfRU1m6aed+7WvGqm0s3rhTnRNFJCxqhBSJQeu27eLBLxcxdvY6WtZPJHP3Tk55YgIX9WvP9Sce\nQHJi6a8Y+PaUVeqcKCJhU4IgEkNy8gp4aUIaz/6wjALn+OsJB3DVMZ3Izs3n0a8X88rE5Xw6ay23\nntyVs3q3pkYNC2u/23d5nRPPOLQ1ddU5UUTCoG8KkRjgnOPbhZu4b9wCVm3J5pRuLbjrzwfRplES\nALUT4njwjEMYcVg77vlsHrd+OIc3J6/kH4O70attw33u/5OZa8nJU+dEEQmfEgSRKFu6KZN7xy3g\np9/S6dysLm9cejgDDmgSsmz31vX58Kp+fDJrLQ9+sYjTn/uFs3q35tZTDqRZvcSQ2xTNnNi9lTon\nikj4lCCIRMnOnDye+m4Jr05cQe2EOP5+WioXHNmO+LiS+w6bGaf3as3A1BY88/1SXv45ja/mbeD6\nEw7gon7tSaj5x+1nrt7Gog07eeD07hV5OCJSxUQ8QTCzq4FbgJbAfOAG59yEEsqPAG4FugA7gG+B\nm51zGwLKXA9cBbQDNgOfArc55zIr6jikcvpy7nqWbMqkYZ0EGtdJoGFSAo3ren8bJsVTcx8/zuWh\nsNDx4Yw1PPTVYjZn7WZ4nzbcfPKBNKlbq1T7qVurJrf/qSvD+7bhvnEL+NcXC3l76iruGdSNY7o0\n3VPu7cle58TBPdU5UUTCF9EEwcyGA08CVwM/+3+/NLNU59yqEOX7A2OAm4FPgObAc8CbwAl+mRHA\nw8BlwASgI/AykAhcWsGHJJXIxKUZXPXmjBLL1K8dT6M6CTQqSh7qJNCwTgKN6sTTqE6t3/8mJdCo\nbgJ1EuIwC6+jIMCs1du457P5zF69jV5tG/DKxX04pHWD/TquDk3q8MrFffl+0UbuHbuAi16ZwokH\nNefvp6VSPymesXPWcXovdU4UkdIx51zkHsxsMjDHOXd5wLIlwAfOuTtClL8ZuM451y5g2SXA0865\nuv79Z4DuzrljAsr8EzjTOXdwSfH06dPHTZs2bX8PSyqBXbkFnPzET9Qw+Oy6AeTkFrAlO5ctmbne\n36w/3rZm57I50/u7JSuXvILQn5OEmjVolJTwe41EQM1EUTLRsE489WrFM2bSCt6btoam9Wpx+yld\nOb1Xq7BHIYRrd34Br05cwdPfLSGv0NG7bUN+TdvM2GsH0L21+h+ICJjZdOdcn32Vi9gphZklAL2B\nR4NWfQ30K2azicADZjYIGAc0Bs4Bvggo8zNwgZkd4ZybZGZtgcFBZaSae+ybxazaks07I48gOTGe\n5MR4miWH7tQXzDlH5u58tmTlsjkrl62ByYSfZGzN9tat2ZrN5qxcdubk77Wf+DjjiqM7cu3xnalX\nhnkMwlGrZhxXHtOJ03u14qEvF/HRzLUc3CpZyYGIlFok6xybAHHAxqDlG4ETQ23gnPvVzM7Ba1Ko\njRfvN8BFAWXeMbPGwE/m1fXWxGuWuC3UPs1sJDASoG1bDfmqDmat3sbLPy9nxOFtOaJj41Jvb2bU\nS4ynXmI87RrXCWubvIJCL5EIqKXo3qp+2Nvvr+bJiTw2vCeXHdWR+kkVk4yISNUW042SZpYKPA3c\nB4zH69j4CPA8cKFf5hjgb3j9GSYDnfH6OfwT+HvwPp1zLwAvgNfEUOEHIVGVm1/IbR/MoVm9RG7/\nU9eIPW58XA2aJSeGXUtRUVJTkqP6+CJSeUUyQcgACvA6GgZqDmzYuzgAdwBTnHOP+PfnmFkWMMHM\n7nTOrQHuB952zr3kl5lrZnWAl8zsXufc3nW9Um2M+t8yFm/cyUsX9inT9MQiItVVxC7W5JzLBaYD\nA4NWDQR+KWazJLykIlDR/Rr7KFO+vb+k0lmycSdPf7+EQT1SODE1OC8VEZGSRLqJ4TFgjJlNweuA\neCWQAowCMLPXAZxzF/rlxwIvmtlV/N7E8AQwI2BY5FjgJjObxu9NDPcB41R7UH0VFDpu/XAOdWvV\n5J5BqdEOR0Sk0ologuCce9fvUHg33o/9POBU59xKv0jboPKjzawecC3wH2A78D1/7IB4P+DwkoLW\neE0ZY4G7KvBQJMa9/usKZq7axhPDe5Z6AiIREYnwPAixRvMgVE2rt2Rz0uM/cXjHRrx6cd9STWQk\nIlLVhTsPQsT6IIhEgnOOOz+eSw2Df53eXcmBiEgZKUGQKuXDGWuZsCSD2//UlVYNakc7HBGRSksJ\nglQZm3bmcN+4BfRt35DzDm+37w1ERKRYShCkyvjHZ/PZlVfAv888pNyvcSAiUt0oQZAq4at5G/hi\n7gauP+EAOjWtG+1wREQqPSUIUultz87jb5/OI7VlMiOP7hjtcEREqoSYvhaDSDge+GIhW7JyefXi\nvsTHKecVESkP+jaVSm3i0gzenbaay4/qyMGtdEljEZHyogRBKq3s3Hzu+GguHZrU4YYTD4h2OCIi\nVYqaGKTSeuzr31i1JZt3Rh5BYnxctMMREalSVIMgldKs1dt4ZeJyzju8LUd0bBztcEREqhwlCFLp\n5OYXctsHc2hWL5Hb/9Q12uGIiFRJamKQSmX7rjye+2Epizfu5KUL+1AvMT7aIYmIVElKECRm5eYX\nsmjDDmat3rbnlpaeBcDQnimcmNo8yhGKiFRdShAkJjjnWLUl+w/JwPx1O8jNLwSgSd0EerZpwBm9\nWtGjTQOOVL8DEZEKpQRBomL7rjxmrtq6JxmYvXobW7PzAEiMr0H3VvW56Mh29GzTkB5t6tOqQW1d\nullEJIKUIEjEjZ+/gf97bzaZu/Mxgy7N6jEwtfmeZODA5vWoqRkRRUSiSgmCRExhoeOJb3/jqe+X\n0qN1fW47pSuHtGlA3Vp6G4qIxBp9M0tEbN+Vx03vzuK7RZs4u3dr7ht6sCY3EhGJYUoQpMIt2biT\nkWOms3pLNvcN6cb5R7RTfwIRkRinBEEq1Pj5G7jp3VnUTqjJ2yOPoG/7RtEOSUREwqAEQSpEYaHj\n8W9/4+nvl9KjTQNGnX8oLevXjnZYIiISJiUIUu6278rjhndm8sPidIb1ac29Q9TfQESkslGCIOXq\nD/0Nhh7M+Ye3VX8DEZFKSAmClJuv5q3n/96brf4GIiJVgBIE2W8FhY7Hv/mNZ35YSs82DRh1fm9a\n1E+MdlgiIrIflCDIfgnsbzC8TxvuHdqNWjXV30BEpLJTgiBl9tvGnYx8fRprt+3i/qEHc576G4iI\nVBlKEKRMlqVncuZzv1ArPo63Lz+CPupvICJSpUT8ijhmdrWZLTezHDObbmZH7aP8CDObZWbZZrbB\nzN4wsxYB6380MxfiNr/ij6Z6ytqdzxVjphNfswafXNNPyYGISBUU0QTBzIYDTwIPAL2AX4Avzaxt\nMeX7A2OA14BuwFAgFXgzoNgZQMuAW3tgJ/BehRxENeec49YP55CWnsnT5/aidcOkaIckIiIVINI1\nCDcBo51zLzrnFjrnrgPWA1cVU/5IYI1z7nHn3HLn3CTgaeDwogLOuS3OuQ1FN2AAkAS8UrGHUj29\n/PNyPp+znltO7kr/zk2iHY6IiFSQiCUIZpYA9Aa+Dlr1NdCvmM0mAi3NbJB5mgDnAF+U8FCXA185\n51bvb8zyR5PTNvPgl4s4uVtzrjymY7TDERGRChTJGoQmQBywMWj5RqDF3sXBOfcrXkLwJpALpAMG\nXBSqvJl1AY4BXiwuCDMbaWbTzGxaenp6aY+h2tq4I4dr3ppJu0ZJPHp2D41WEBGp4iLeSbE0zCwV\nr0nhPrzah1Pwkonni9nkcrwmi8+L26dz7gXnXB/nXJ+mTZuWc8RVU25+IVe/OYPs3HxGXdCbeonx\n0Q5JREQqWCSHOWYABUDzoOXNgQ3FbHMHMMU594h/f46ZZQETzOxO59yaooJ+E8ZFwIvOufzyDb16\ne+CLhUxfuZWnz+1Fl+b1oh2OiIhEQFg1CGb2hJkdvD8P5JzLBaYDA4NWDcQbzRBKEl5SEajofnDs\nQ/GaMV7ejzAlyCcz1zL6lxVcOqADg3qkRDscERGJkHCbGPoCs81sit+GX9bTyMeAi83sMjM7yMye\nBFKAUQBm9rqZvR5QfiwwxMyuMrOO/rDHp4AZzrlVQfseCXznnEsrY2wSZOH6Hdz+0RwOa9+I2//U\nNdrhiIhIBIWVIDjn+uPNP/ADcA+w3v8xP6Y0D+acexe4AbgbmIU3JPFU59xKv0hb/1ZUfjTe0Mhr\ngXnAB8BvwJDA/ZpZR+B4SuicKKWzfVceV70xneTEeJ45rxfxcTHdXUVERMqZOedKt4FZDeBU4C/A\nn4FVeNX6LzjntpR7hBWoT58+btq0adEOI+YUFjpGjpnGj4vTddlmEZEqxsymO+f67KtcWU4L44Fk\noD7esMVVwAXAKjMbUYb9SYz57/+W8e3CTdz154OUHIiIVFNhJwhm1sfMnsMbRvgwMAk4wDl3gnOu\nG3AL8HjFhCmR8tNv6Tz69WKG9Ezh4n7tox2OiIhESVjDHM1sLnAgMB64GPjcORc8uuB94NlyjU4i\nas3WbK5/ZyZdmtXjwTO6azIkEZFqLNx5EN4DXnHOrS2ugHMugxifeEmKl5NXwFVvzCC/wDHqgt4k\nJehK4CIi1Vm4vwIPEeLH38wSgUJ/jgOpxP7x2Xzmrt3OCxf0pkOTOtEOR0REoizcM/73gatDLL8S\nXVa50ntnyirembqaa47rxEndQl4WQ0REqplwE4T+7H0VRoBvKP5KjFIJzFmzjb9/Np8BnZtw08AD\nox2OiIjEiHAThCQg1PUNCgFNzl9Jbc3K5ao3ZtC0bi2eOrcXcTXUKVFERDzhJghzgHNDLB+BN8Oh\nVDKFhY4b35tF+s7dPHfeoTSqkxDtkEREJIaE20nxXuBTM+sMfO8vOwE4Gzi9IgKTivXcj0v5cXE6\n9w09mB5tGkQ7HBERiTHhXovhC2AQ0A7vYklP4V0zYbBzblzFhScVYeLSDB775jeG9Ezh/MPb7nsD\nERGpdsIe7O6c+wr4qgJjkQjYsD2H69+ZScemdXngdE2GJCIioWk2nGokr6CQ696eQXZuAe+MPJQ6\ntfTyi4hIaGE1MZhZgpn908x+M7McMysIvFV0kFI+Hhm/mKkrtvLgGd3p3EyDT0REpHjhjmK4D7gI\n+A/e0MZb8K67sJnQEyhJjPlq3gZe+CmNC45ox5CeraIdjoiIxLhwE4RhwJXOueeBAuBT59xfgXuA\ngRUVnJSPlZuzuOX92fRoXZ+7Tzso2uGIiEglEG6C0BxY4P+fCRSNi/sKOKm8g5Lyk5NXwJVvzKBG\nDePZ8w6lVs24aIckIiKVQLgJwiogxf9/KXCy//+RwK7yDkrKzz2fzmfh+h08MbwnrRsmRTscERGp\nJMJNED7GmxgJ4Engn2a2HBgNvFQBcUk5eG/aat6dtpprj+vMcV2bRTscERGpRMIa5+acuyPg/w/M\nbDXeBZx+00RJsWnBuh387ZN59OvUmBsHdol2OCIiUsnsM0Ews3jgDeBO59wyAOfcZGByBccmZbQj\nJ4+r35xO/drxPHmOLsIkIiKlt88mBudcHl5HRFfx4cj+cs5x2wdzWL11F8+MOJSm9WpFOyQREamE\nwu2D8BFwRkUGIuXjlYkr+HLeBm475UAO69Ao2uGIiEglFe5cu6uAu83sKGAakBW40jn3WHkHJqU3\nfeUWHvxiISelNufyozpGOxwREanEwk0QLga2Aof4t0AOUIIQZZszd3PNmzNp1bA2j5zdQxdhEhGR\n/RLuKIYOFR2IlF1BoeOGd2exJTuXj6/uR/3a8dEOSUREKrlw+yBIDHvquyVMWJLBfUO60S2lfrTD\nERGRKiCsGgQze6qk9f51GSQKJqdt5qnvl3Dmoa0Z1qdNtMMREZEqItw+CN2D7scDXYE4YGa5RiRh\ny9ydz80fzKZtoyTuHdJN/Q5ERKTchNXE4Jw7Lug2AGgNfAG8V5oHNLOrzWy5meWY2XR/ZERJ5UeY\n2SwzyzazDWb2hpm1CCqTbGZPmdk6M9ttZkvNbFhp4qqM/vX5QtZs3cWjZ/egTq1wcz0REZF9K3Mf\nBOdcDvAAcFe425jZcLxrOTwA9AJ+Ab40s7bFlO8PjAFeA7oBQ4FU4M2AMvHAN8ABeJelPhBv1MXy\n0h5TZfLj4k28PWUVI4/qSN/2mu9ARETK1/6edjYB6pai/E3AaOfci/7968zsFOAq4I4Q5Y8E1jjn\nHvfvLzezp4GnA8pcAjQFjnLO5frLVpQipkpne3Yet304hy7N6+o6CyIiUiHC7aR4U/AioCVwHl4z\nQzj7SAB6A48Grfoa6FfMZhOBB8xsEDAOaAycE/SYQ/1yT5vZEGALXrPHv/xpoqucv382j82Zubx8\nUV8S4+OiHY6IiFRB4dYgXBd0vxBIB14FHgxzH03wOjVuDFq+ETgx1AbOuV/N7By8JoXafrzfABcF\nFOsIHA+8BfwZaA88i1ezcXPwPs1sJDASoG3bkC0bMe2Luev5dNY6bjyxCwe30pBGERGpGDE9UZKZ\npeI1J9wHjMertXgEeB640C9WA9gEXO6cKwCmm1lj4HEzu8U594eLTDnnXgBeAOjTp0+lugBV+s7d\n3PXxXA5pXZ+rj+sU7XBERKQKC7eJIQGo4XdMDFyeCBQGtP2XJAMoAJoHLW8ObChmmzuAKc65R/z7\nc8wsC5hgZnc659YA64E8PzkoshBIwqu1SA8jtpjnnOOOj+aSlVvAf87uQXyc5rgSEZGKE+6vzPvA\n1SGWX0mYwxz9JGI6MDBo1UC80QyhJOElFYGK7hfFPhHobGaBx9IFyMZLSqqED6av4duFG7n15AM5\noHm9aIcjIiJVXLgJQn+8zoTBvqH4DoahPAZcbGaXmdlBZvYkkAKMAjCz183s9YDyY4EhZnaVmXX0\nhz0+Bcxwzq3yy/wXaAQ8aWYHmtnJwD+B54KbFyqrtdt2ce/YBRzWoRF/6a/LYoiISMULt5NiEpAf\nYnkhEPbprHPuXb9/wN14/QnmAac651b6RdoGlR9tZvWAa4H/ANuB74HbAsqsNrOT8JKPWXjNFa8A\n94cbVywrLHTc+sFsCpzj0bN6UKOGZksUEZGKF26CMAc4F7gnaPkIvB/5sDnnngOeK2bdsSGWBc97\nEGq7SZSuJqPSGDNpJROXbuaB07vTtnFStMMREZFqItwE4V7gUzPrjHcGD3ACcDZwekUEJrA8I4sH\nv1zIMV2acu5huhCTiIhETrjXYvgCGAS0w+sD8BRec8Bg59y4iguv+ioodPzfe7NIiKvBQ2ceogsx\niYhIRIU91bJz7ivgqwqMRQI8/9MyZqzaxpPn9KRF/cRohyMiItVMWDUIZnaMmR1TzPKjyz+s6m3h\n+h08/s1vnNq9BYN7pEQ7HBERqYbCHeb4ONAwxPJkf52Uk9z8Qm56bzb1a8dz35CD1bQgIiJREW4T\nw4HA7BDL5/nrpJw89d0SFq7fwYsX9qFx3VrRDkdERKqpcGsQduHNWxCsFRDONMsShpmrtvLcj0s5\nq3drBqYGz0gtIiISOeEmCOOBh8xsTzODmTXCu5Lj+IoIrLrZlVvA/703mxbJifx9UGq0wxERkWou\n3CaGm4GfgBVmNsdfdgjeVRSHV0Rg1c3D4xeRlpHFm5cdTnJifLTDERGRai7cyz2vN7MewHlAT3/x\na8Bbzrnsigquuti0M4dXJ67g/CPa0r9zk2iHIyIiUqp5ELKBF4OXm9mJzrlvyzWqambJxkwATj04\nVDcPERGRyAs7QQhkZq2AS4C/4M2uGFeeQVU3aelegtCxad0oRyIiIuIJt5MiZhZnZmeY2RfACrxr\nMIwCOldQbNXGsvQskhLiaJ6sYY0iIhIb9lmDYGYHApcBFwJZwFvAQOAC59yCig2vekjLyKJDkzqa\nFElERGJGiTUIZjYBmIQ3i+Iw51xH59zdEYmsGklLz1TzgoiIxJR9NTEcCbwOPO6c+18E4ql2cvIK\nWLttFx2b1Il2KCIiInvsK0Hoi9cM8bOZzTSzG82sRQTiqjZWbs7GOejYVAmCiIjEjhITBOfcTOfc\nNXjTLD8GDAZW+9v9OXBmRSmbohEMndTEICIiMSSsUQzOuRzn3Bjn3HHAQcAjwI3ABjP7siIDrOrS\nMrIA6KAmBhERiSFhD3Ms4pxb6py7HWgDDEMXa9ovy9IzaZGcSJ1aZZqSQkREpEKU+VfJOVcAfOrf\npIzS0rPU/0BERGJOqWsQpPw45/whjkoQREQktihBiKLNWbnsyMmnYxN1UBQRkdiiBCGK0tL9Doqq\nQRARkRijBCGK9gxxVA2CiIjEGCUIUZSWkUVCzRq0alg72qGIiIj8gRKEKEpLz6R94yTiaugiTSIi\nEluUIERRWkaWOiiKiEhMUoIQJXkFhazanK0hjiIiEpMiniCY2dVmttzMcsxsupkdtY/yI8xslpll\nm9kGM3sj8IJRZnaxmbkQt8SKP5qyW70lm/xCp8s8i4hITIpogmBmw4EngQeAXsAvwJdm1raY8v2B\nMcBrQDdgKJAKvBlUNBvvglJ7bs65nIo4hvJSNMRRNQgiIhKLIl2DcBMw2jn3onNuoXPuOmA9cFUx\n5Y8E1jjnHnfOLXfOTQKeBg4PKueccxsCbxV3COUjLUNDHEVEJHZFLEEwswSgN/B10KqvgX7FbDYR\naGlmg8zTBDgH+CKoXG0zW2lma8xsnJn1KtfgK0BaehaN6yRQPyk+2qGIiIjsJZI1CE2AOGBj0PKN\nQIu9i4Nz7le8hOBNvKtGpgMGXBRQbDHwF2AIcC6QA0w0swNC7dPMRprZNDOblp6eXvaj2U9p6Vm6\nxLOIiMSsmB7FYGapeE0K9+HVPpyCl0w8X1TGOferc+4159ws59wEYDiwDLgu1D6dcy845/o45/o0\nbdq0wo+hOGkZukiTiIjErjJf7rkMMoACoHnQ8uZAcX0G7gCmOOce8e/PMbMsYIKZ3emcWxO8gXOu\nwMymASFrEGLB9l15ZGTmagSDiIjErIjVIDjncoHpwMCgVQPxRjOEkoSXVAQquh8ydjMz4BC8zo8x\nqegaDB3VxCAiIjEqkjUIAI8BY8xsCl4HxCuBFGAUgJm9DuCcu9AvPxZ40cyuAsbjDWF8ApjhnFvl\nb3MPMAlYAiQDf8VLEIobGRF1yzOKhjiqBkFERGJTRBME59y7ZtYYuBvvx34ecKpzbqVfpG1Q+dFm\nVg+4FvgPsB34HrgtoFgD4AW8vgnbgZnA0c65KRV5LPsjLT2LuBpG20ZJ0Q5FREQkpEjXIOCcew54\nrph1x4ZY9jReR8Xi9ncjcGN5xRcJaRmZtG2URELNmO4jKiIi1Zh+oaIgLT1L/Q9ERCSmKUGIsMJC\nx/KMLA1xFBGRmKYEIcLWbtvF7vxCdVAUEZGYpgQhwtL8EQyaRVFERGKZEoQI2zMHgpoYREQkhilB\niLC09Czq1apJ07q1oh2KiIhIsZQgRFjRNRi8CR9FRERikxKECFuenqUOiiIiEvOUIERQdm4+67bn\naA4EERGJeUoQIkjXYBARkcpCCUIEpaUXJQiqQRARkdimBCGC0tKzMNMcCCIiEvuUIERQWkYmKfVr\nkxgfF+1QRERESqQEIYLS0nUNBhERqRyUIESIc4609EyNYBARkUpBCUKEbNq5m6zcAo1gEBGRSkEJ\nQoQs0zUYRESkElGCECG/D3FUDYKIiMQ+JQgRsjwji8T4GrRMTox2KCIiIvukBCFC0tIz6dCkLjVq\n6CJNIiIS+5QgREhahoY4iohI5aEEIQJ25xeweks2nTTEUUREKgklCBGwanM2hU4dFEVEpPJQghAB\ny/wRDLoGg4iIVBZKECIgLUNzIIiISOWiBCEC0tKzaFqvFvUS46MdioiISFiUIESArsEgIiKVjRKE\nCPCGOKqDooiIVB5KECrY1qxctmXn0Un9D0REpBKJeIJgZleb2XIzyzGz6WZ21D7KjzCzWWaWbWYb\nzOwNM2tRTNlzzcyZ2biKib701EFRREQqo4gmCGY2HHgSeADoBfwCfGlmbYsp3x8YA7wGdAOGAqnA\nmyHKdgQeASZUSPBlVDTEsWMTNTGIiEjlEekahJuA0c65F51zC51z1wHrgauKKX8ksMY597hzbrlz\nbhLwNHB4YCEziwfeBu4C0iou/NJLS88iPs5o3bB2tEMREREJW8QSBDNLAHoDXwet+hroV8xmE4GW\nZjbIPE2Ac4Avgsr9C1jhnHutPGMuD2npmbRrXIeaceruISIilUckf7WaAHHAxqDlG4GQfQqcc7/i\nJQRvArlAOmDARUVlzOwkYBhwRThBmNlIM5tmZtPS09NLewyllpaRpRkURUSk0onp01ozS8VrUrgP\nr/bhFLxk4nl/fVNgNHCRc25bOPt0zr3gnOvjnOvTtGnTCom7SH5BISs36yqOIiJS+dSM4GNlAAVA\n86DlzYENxWxzBzDFOfeIf3+OmWUBE8zsTqAz0BL4zsyKtqkBYGb5QDfn3OLyO4TSWbN1F3kFjk7q\noCgiIpVMxGoQnHO5wHRgYNCqgXijGUJJwksqAhXdrwFMBboDPQNun+GNZOgJLN/vwPeDhjiKiEhl\nFckaBIDHgDFmNgWvA+KVQAowCsDMXgdwzl3olx8LvGhmVwHj8WoLngBmOOdW+WXmBT6AmW0Dav5/\ne3cfLVdV3nH8+yMhgaQ1h9MAABFOSURBVIQQXhJIFBNeaiUgipCCkZdQIYogSwpt1RYRLVVUFJda\nUwGFIpVaUUEEKlSJDRbQWl8AMaARZEkAExcaFFYxN/IWCPf6EpKbXJKQp3/sfcnJmZncuZd5uXPz\n+6x11p05Z5999nnmzJ1nztlndkRsMb8duvpvcfSvKJqZWYdpaYIQETdJ2h04n/Rh/yBwQkQ8motM\nK5WfJ2kCcDbweWAVsBCY27pWD92y7l52Gbc9u40f0+6mmJmZDUqrzyAQEVcBV9VYdkyVeVeQOirW\nW/8ZQ21boy3v8SBNZmbWmYb1XQydrqvbgzSZmVlncoLQJKv7NvDM6ufcQdHMzDqSE4QmWd7jMRjM\nzKxzOUFoks13MPgMgpmZdR4nCE3S1b2G7QTTdx/X7qaYmZkNmhOEJlnW08teu45j7OhR7W6KmZnZ\noDlBaJJ0B4MvL5iZWWdygtAEmzZF/g0Ed1A0M7PO5AShCZ56to++DZt8BsHMzDqWE4QmWO47GMzM\nrMM5QWiC/lEc9/OvKJqZWYdygtAEXd29jB8zij0mjG13U8zMzIbECUITLOtew76Td0JSu5tiZmY2\nJE4QmqCru5d9PIqjmZl1MCcIDda34XlWrFrnDopmZtbRnCA02PKeXiLwMM9mZtbRnCA02AuDNPkS\ng5mZdTAnCA3W1Z1ucfQlBjMz62ROEBqsq6eXqRN3YNyY0e1uipmZ2ZA5QWiwrh4P0mRmZp3PCUID\nRQRd3R6kyczMOp8ThAbqWbOe1X0bfQbBzMw6nhOEBtrcQdFnEMzMrLM5QWigrh7f4mhmZiODE4QG\n6upew5jR2/GSXXZsd1PMzMxeFCcIDdTV3cs+u49n1HYepMnMzDqbE4QG8i2OZmY2UjhBaJD1Gzfx\n2B/WOkEwM7MRwQlCgzz2h7U8vyn8GwhmZjYitDxBkPR+Scsl9UlaIumoAcr/naQHJK2V9LSk6yVN\nKSz/G0mLJf1JUm8u+87m78mWpu02jls/dCR/uf8erd60mZlZw7U0QZD0VuBy4DPAa4B7gNskTatR\n/ghgPvB14EDgZOAA4BuFYr8HLgZeC7wKuA74qqQTmrQbVY0ZvR0HvmQiu40f08rNmpmZNUWrzyB8\nBJgXEddGxEMR8UHgKeB9NcrPAp6IiC9GxPKIuBe4Aji8v0BELIyI70bEwxGxLCIuB34FbPXMhJmZ\nmdXWsgRB0hjgUOD20qLbgdfVWO1nwFRJJymZBLwN+EGNbUjSscArgJ82puVmZmbbnlaeQZgEjAJW\nluavBKZUFoeIWERKCL4BrAe6AQFb9DGQNFHSmlzmVuBDEXFbtTolvSf3WVjc3d39InbHzMxs5BrW\ndzFIOoB0SeHTpLMPx5OSia+Uiq4GDgb+AjgP+EI+k1AhIq6JiJkRMXPy5MlNa7uZmVknG93CbfUA\nzwN7lubvCTxdY51PAPdHxOfy819J6gXulnRuRDwBEBGbgN/mMg9ImgGcC/y4kTtgZma2rWjZGYSI\nWA8sAeaUFs0h3c1QzThSUlHU/3xrbd8OGDvYNpqZmVnSyjMIAF8A5ku6n9QB8SzgJcB/AEj6L4CI\nOD2Xvxm4VtL7gAXAVOAy4BcR8Vhe5zzgPqCLlBScALwD+GCL9snMzGzEaWmCEBE3SdodOJ/0Yf8g\ncEJEPJqLTCuVnydpAnA28HlgFbAQmFsothNwNbAXsA54GDg9Im5o5r6YmZmNZIqIdrehbWbOnBmL\nFy9udzPMzMxaRtKSiJg5ULlhfReDmZmZtcc2fQZBUjfwaJVFk0h3XdjAHKv6OVb1c6zq51jVz7FK\npkfEgPf5b9MJQi2SFtdz+sUcq8FwrOrnWNXPsaqfYzU4vsRgZmZmFZwgmJmZWQUnCNVd0+4GdBDH\nqn6OVf0cq/o5VvVzrAbBfRDMzMysgs8gmJmZWQUnCGZmZlbBCUKBpPdLWi6pT9ISSUe1u03NJOlC\nSVGani4sVy6zQtI6SXdKOrBUx66S5ktalaf5knYplTlI0l25jiclfUqSWrWfQyXpaEnfz20OSWeU\nlrcsPpJOlfQbSc/lv3/VtB0fgjpiNa/KsXZvqcxYSVdI6pHUm+vbq1RmmqSb8/IeSV+SNKZUZnZ+\n//ZJ6pJ0VtN2fJAkfULSzyU9K6k778srS2V8XFF3rHxcNVNEeEr9MN4KbAD+EZgBXAGsAaa1u21N\n3OcLSWNXTClMkwvL5wKrgVOBVwLfBFYAEwplbgN+DczK06+BmwvLdyYN5/3NXMdf5zo/2u79ryM+\nJwCfyW1eC5xRWt6S+OT1NgLn5WPzvPz88HbHaBCxmgfcUTrWdiuVuTrHbw5wCHAn8AAwKi8fBSzN\n8w/J5VYAVxTq2Afoze/fGfn9vAE4td0xyu1bALwrv9YHAd/Jr/9uhTI+ruqPlY+rZr4G7W7AcJlI\nI0JeW5r3CHBJu9vWxH2+EHiwxjIBTwHnFebtmP/JvDc/nwEEcEShzJF53ivy8/cBzwI7FsqcDzxJ\n7iTbCRMpWTyjHfEBbgLuKLXnR8AN7Y5LPbHK8+YBt2xlnYnAeuDvC/NeBmwC3pifvyk/f1mhzGlA\nH7Bzfv5Z4JFS3f8JLGp3XGrs906kIexP8nE1uFj5uGr+5EsMQD6VdChwe2nR7cDrWt+ilto3n8pc\nLulGSfvm+fuQsvEXYhIR64Cfsjkms0gfBvcU6vsZKdMulrk7r9tvAWmY770bvC+t1Mr4zKLy2FxA\n5x2bR0p6RtL/SbpW0h6FZYcC27NlPB8HHmLLWD2U5/dbQBrm/dBCmWqxmilp+8btSsNMIF3q/WN+\n7uOqtnKs+vm4ahInCMkk0mmmlaX5K0lv1pHqPuAM4HjSKbMpwD1KQ3L37/fWYjIF6I6cTgPkx8+U\nylSrAzo7tq2MT60ynRS/HwKnA8cCHwUOAxZKGpuXTyF9Oyz/Tn45nuU49OT1BorVaNL7fLi5nHS6\ne1F+7uOqtnKswMdVU41udwOsfSLituLz3LmnC3gncG/VlcyGICJuLDxdKmkJaaC0E4H/bU+r2kvS\nF0iXBo6MiOfb3Z7hrFasfFw1l88gJP3Z4p6l+XuSOsVsEyJiDamz08vZvN9bi8nTwORiz+j8eI9S\nmWp1QGfHtpXxqVWmY+MXESuAJ0jHGqR9GUXlt7FyPMtx6D/7N1CsNjKMRvGT9EXg7cDrI6KrsMjH\nVclWYlVhWz+uGs0JAhAR64ElpN6rRXPY8jrfiCZpB2B/Uiep5aQ3xZzS8qPYHJNFpI5DswrVzALG\nl8ocldft199L+HcN34nWaWV8FjHCjk1Jk4CXko41SO+/DWwZz71IHfKKsZpRukVtDvBcXr+/TLVY\nLY6IDY3ch6GSdDmbP/AeLi32cVUwQKyqld9mj6umaHcvyeEykW5zXA+cSTp4Lid1BJre7rY1cZ8v\nBWaTOkYdDtxC6vk8PS+fC6wCTiHdanQj1W+3Wsrm262WsuXtVhNJ//BuzHWckrfRCbc57gQcnKe1\nwKfy42mtjA+pM9VG4J9JCdwnSP/0hsXtaAPFKi+7NO//3sAxpH+4T5RidXWedxzwGuAnVL8dbWFe\nfhypV36129Euy+/jM/P7eljcjgZcmV/f17PlrXk7Fcr4uKojVj6uWvAatLsBw2kC3k/Krvszx6Pb\n3aYm72//P571+Q3xbeCAwnKRboV8inTLz13AK0t17Apcn9/Iz+bHu5TKHETqhd2X67qADrjFMf/D\niSrTvFbHh3Qf+8P5tXoIOKXd8ak3VqTb9BaQOtGtJ10jnkfhtrJcx1jSfea/JyUZN1cpM42UyK7N\n5b4EjC2VmQ38Ir+PlwNntTs+hbZVi1EAFxbK+LiqI1Y+rpo/ebAmMzMzq+A+CGZmZlbBCYKZmZlV\ncIJgZmZmFZwgmJmZWQUnCGZmZlbBCYKZmZlVcIJgNoJJmifplna3o0jSWyQ9ImmjpHntbs9gDMd4\nmjWLEwSzJskfJiHpk6X5x+T5I3YUuAF8lfSjXNOBc6oVkHSnpC/Xet5sW3mNzgFOa1U7zNrJCYJZ\nc/UB/yRpcrsb0kiSth/iersAuwMLIuLJiFjV2JYNuP0xL2b9iFgVEX9qVHvMhjMnCGbN9RPSz3d/\nslaBat9WJe2d580slXmTpCWS1km6W9JekmZL+qWkNZJukbR7lW2cL2llLnOdpB0LyyTp45KW5XqX\nSjqtSlveLmmhpHXAe2vsy66Svi7pj7muH0k6sH8fgD/mogtznccMFMB8GWI28IG8TkjaOy87QNKt\nklZLekbSDZKmFNfNMZkr6QnSb/Ij6TRJPy+s9y1JL+3fX9LrBtCdtzevWF+h/rGSLsux7ZN0r6Qj\nC8v7X7djJd0naa2kxZIOKZSZKGl+bkefpC5JHx4oLmbN5gTBrLk2kQbDOUvSfg2o71+AD5MG19oV\nuIk0MNJ7SOMhHEj6Hf+i2cCrgWOBU4E3AJ8tLL8Y+AfgA8ABwCXAVySdWKrnEuCqXOa7Ndo3L7ft\nLcBhpN+2/2FOSO7J7SO3Yyr1jRx4DmkQnuvyOlOBxyVNJY018GDe1nGkAXy+J6n4v2028Crg+BwD\ngDGksQleDbyZNLzvDXnZ47l95PZOpcalEODfSQO9vZs00M/SvL9TS+UuIR0Hh5B+6/8b0gvDNV9M\nGjfhzcArcl1PDhATs+Zr92AQnjyN1In0YXlLfvwT4Mb8+BjSoDOTqj3P8/bO82aWyryxUObsPO+Q\nwrwLgQdLbfgTW44WeBppwJnxeVoHHFVq+2XAD0pt2eoInMDLc7mjC/MmkkYmPDM/n5TLHDNAXXcC\nX671PM+7CPhxad6uuf7DCvvfTWngnSrb2z+vt1et16TKazqeNEjQ6YXlo4BlwMVbed2OKG3r+8DX\n2n28evJUnkZjZq0wF1gk6XMvsp5fFR6vzH+XlubtUV4nItYUni8ifYPejzTS3Q6kb73Fkdu2J10a\nKVo8QNtmkM6YLOqfERGrJC0lnXVotEOBoyWtqbJsP+D+/PjBiHiuuDCf4r+ANCT1bqQRFCGN6vdE\nndvfjxSnn/XPiIjnJS2icn+Lr9uK/HePvK2rgf+RdChwB2nY5rvqbINZ0zhBMGuBiLhf0rdJp6Q/\nXVq8Kf9VYV6tToAbitXmusvzBnPpsL/sScBjW9kWQO8g6i1rxrCx2wG3Ah+rsmxl4fEW7ZY0njRM\n8I+Ad5CGC54E3E1KnBqhvL8Vrxs59hFxm6TpwJtIl0BulfStiHhXg9piNiROEMxa51zgN6Rr4UXd\n+e/UwuODG7jdgySNj4j+D8rXkk6NLyN9SD0HTI+IhS9yOw/l+maR+gYgaWfS9fXrXmTd60mn74t+\nAfwt8GgpSRrI/qSE4NyIWJ7beUqV7VFlm0XLcrkj8mMkjSLt/38Poj1ERA8wH5gv6TbgBklnlc98\nmLWSOymatUhE/Ba4hsoOb78ldYy7UNKfS3oDcH4DNz0a+JqkAyXNAf4NuDYieiNiNXApcKmkd0v6\nM0kHSzpL0nsGs5GIeAT4HqmD41GSDgKuB55lkB+YVfwOOCzfUTEpd0K8ktTH4SZJh0vaV9Jxkq6R\nNGErdT1GSorOzuucSOVZnUdJ3/RPlDRZ0k5V9reXdHngs5JOkDQjP9+T1JmzLpIuknSypJfnOk4B\nupwcWLs5QTBrrYuAjcUZ+dvv24B9gV+S7lQ4t4HbvAv4Namj5HeAhcDHC8s/Serc+LFc7g5SL/7l\nQ9jWu0jX/r+f/44Djo+IdUNse79LSd/Wf0M6yzItIlaQvr1vAn6Y234l6cO/5odrRHQD7wROzvVd\nAHykVObJPP9fSZcrav1I01zSnSTXAQ+Q75aIiKcGsW/P5e38ktSfYQLpko9ZWymiGZcGzczMrJP5\nDIKZmZlVcIJgZmZmFZwgmJmZWQUnCGZmZlbBCYKZmZlVcIJgZmZmFZwgmJmZWQUnCGZmZlbBCYKZ\nmZlV+H+eyjTX6qdbIgAAAABJRU5ErkJggg==\n", 211 | "text/plain": [ 212 | "" 213 | ] 214 | }, 215 | "metadata": {}, 216 | "output_type": "display_data" 217 | } 218 | ], 219 | "source": [ 220 | "plot(log)" 221 | ] 222 | } 223 | ], 224 | "metadata": { 225 | "kernelspec": { 226 | "display_name": "Python 2", 227 | "language": "python", 228 | "name": "python2" 229 | }, 230 | "language_info": { 231 | "codemirror_mode": { 232 | "name": "ipython", 233 | "version": 2 234 | }, 235 | "file_extension": ".py", 236 | "mimetype": "text/x-python", 237 | "name": "python", 238 | "nbconvert_exporter": "python", 239 | "pygments_lexer": "ipython2", 240 | "version": "2.7.13" 241 | } 242 | }, 243 | "nbformat": 4, 244 | "nbformat_minor": 2 245 | } 246 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/CNN-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "### Convolution Neural Network" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | " - Conv Layer 1 (16 filters, KERNEL = 3X3, ReLU) \n", 15 | " - Pooling Layer 1 (Average, KERNEL = 2X2) \n", 16 | " - Conv Layer 2 (64 filters, KERNEL = 3X3, ReLU)\n", 17 | " - Pooling Layer 2 (Max, KERNEL = 2X2, ReLU) \n", 18 | " - FC 1 (1600, by 64x5x5)\n", 19 | " - FC2 (100)\n", 20 | " - Output (10)" 21 | ] 22 | }, 23 | { 24 | "cell_type": "markdown", 25 | "metadata": {}, 26 | "source": [ 27 | "Design reasoning\n", 28 | "1. Fashion data is more complex than digits, so we use more filters\n", 29 | "2. Choose Avg pooling on first conv layer to preserve more information\n", 30 | "3. Use Dropout to avoid overfitting. Although current start-of-art is Dropconnect, but it is not supported by PyTorch\n", 31 | "4. Use two layers of FC to reduce dimension" 32 | ] 33 | }, 34 | { 35 | "cell_type": "code", 36 | "execution_count": 6, 37 | "metadata": { 38 | "collapsed": true 39 | }, 40 | "outputs": [], 41 | "source": [ 42 | "import torch\n", 43 | "from torch.autograd import Variable\n", 44 | "import torch.nn as nn\n", 45 | "import torch.nn.functional as F\n", 46 | "import torchvision\n", 47 | "from torchvision import datasets, transforms\n", 48 | "import torch.optim as optim\n", 49 | "import torchvision.datasets as dset\n", 50 | "import numpy as np" 51 | ] 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": 7, 56 | "metadata": { 57 | "collapsed": true 58 | }, 59 | "outputs": [], 60 | "source": [ 61 | "from torch_data_utils import load_fashion_mnist\n", 62 | "import cnn" 63 | ] 64 | }, 65 | { 66 | "cell_type": "code", 67 | "execution_count": 8, 68 | "metadata": { 69 | "collapsed": true 70 | }, 71 | "outputs": [], 72 | "source": [ 73 | "train_loader, test_loader = load_fashion_mnist()" 74 | ] 75 | }, 76 | { 77 | "cell_type": "code", 78 | "execution_count": 9, 79 | "metadata": { 80 | "collapsed": true 81 | }, 82 | "outputs": [], 83 | "source": [ 84 | "model = cnn.Model()" 85 | ] 86 | }, 87 | { 88 | "cell_type": "code", 89 | "execution_count": 10, 90 | "metadata": { 91 | "collapsed": false, 92 | "scrolled": true 93 | }, 94 | "outputs": [ 95 | { 96 | "name": "stdout", 97 | "output_type": "stream", 98 | "text": [ 99 | "Iter: 1000. Accuracy: 0.8312\n", 100 | "Iter: 2000. Accuracy: 0.8559\n", 101 | "Iter: 3000. Accuracy: 0.8614\n", 102 | "Iter: 4000. Accuracy: 0.8663\n", 103 | "Iter: 5000. Accuracy: 0.8711\n", 104 | "Iter: 6000. Accuracy: 0.8741\n", 105 | "Iter: 7000. Accuracy: 0.8749\n", 106 | "Iter: 8000. Accuracy: 0.8807\n", 107 | "Iter: 9000. Accuracy: 0.8804\n", 108 | "Iter: 10000. Accuracy: 0.8801\n", 109 | "Iter: 11000. Accuracy: 0.8823\n", 110 | "Iter: 12000. Accuracy: 0.8801\n", 111 | "Iter: 13000. Accuracy: 0.8895\n", 112 | "Iter: 14000. Accuracy: 0.8865\n", 113 | "Iter: 15000. Accuracy: 0.8883\n", 114 | "Iter: 16000. Accuracy: 0.8874\n", 115 | "Iter: 17000. Accuracy: 0.8922\n", 116 | "Iter: 18000. Accuracy: 0.8858\n", 117 | "Iter: 19000. Accuracy: 0.8874\n", 118 | "Iter: 20000. Accuracy: 0.8887\n", 119 | "Iter: 21000. Accuracy: 0.888\n", 120 | "Iter: 22000. Accuracy: 0.8926\n", 121 | "Iter: 23000. Accuracy: 0.8929\n", 122 | "Iter: 24000. Accuracy: 0.8916\n", 123 | "Iter: 25000. Accuracy: 0.8895\n", 124 | "Iter: 26000. Accuracy: 0.8906\n", 125 | "Iter: 27000. Accuracy: 0.8924\n", 126 | "Iter: 28000. Accuracy: 0.8908\n" 127 | ] 128 | } 129 | ], 130 | "source": [ 131 | "max_epochs = 30\n", 132 | "report_size = 1000\n", 133 | "iter = 0\n", 134 | "optimizer = torch.optim.Adam(model.parameters(), lr=0.001)\n", 135 | "criterion = nn.CrossEntropyLoss()\n", 136 | "log = {}\n", 137 | "\n", 138 | "for epoch in range(max_epochs):\n", 139 | " for i, (features, labels) in enumerate(train_loader):\n", 140 | " \n", 141 | " features = Variable(features)\n", 142 | " labels = Variable(labels)\n", 143 | " \n", 144 | " optimizer.zero_grad()\n", 145 | " outputs = model(features)\n", 146 | " \n", 147 | " loss = criterion(outputs, labels)\n", 148 | " loss.backward()\n", 149 | " \n", 150 | " optimizer.step()\n", 151 | " \n", 152 | " iter += 1\n", 153 | " \n", 154 | " if iter % report_size == 0:\n", 155 | " correct = 0.\n", 156 | " total = 0.\n", 157 | " \n", 158 | " for features, labels in test_loader:\n", 159 | " features = Variable(features)\n", 160 | " result = model(features)\n", 161 | " \n", 162 | " _, predicted = torch.max(result.data, 1)\n", 163 | " \n", 164 | " total += labels.size(0)\n", 165 | " correct += (predicted == labels).sum()\n", 166 | " \n", 167 | " accuracy = correct / total\n", 168 | " log[iter] = accuracy\n", 169 | " print('Iter: {}. Accuracy: {}'.format(iter, accuracy))" 170 | ] 171 | }, 172 | { 173 | "cell_type": "code", 174 | "execution_count": 12, 175 | "metadata": { 176 | "collapsed": true 177 | }, 178 | "outputs": [], 179 | "source": [ 180 | "%matplotlib inline \n", 181 | "import matplotlib\n", 182 | "import matplotlib.pyplot as plt\n", 183 | "import numpy as np\n", 184 | "matplotlib.rcParams.update({'font.size': 14})\n", 185 | "figsize = (8, 5)\n", 186 | " \n", 187 | " \n", 188 | "def plot(test_logs, size = figsize):\n", 189 | " \n", 190 | " plt.figure(1, figsize=size)\n", 191 | " \n", 192 | " lists = sorted(test_logs.items()) \n", 193 | " x, y = zip(*lists) \n", 194 | " plt.plot(x, y, label = 'Testing')\n", 195 | " \n", 196 | " plt.ylabel('Accuracy ')\n", 197 | " plt.xlabel('Number of Iterations')\n", 198 | " plt.title('Test Accuracy VS. Number of Iterations')" 199 | ] 200 | }, 201 | { 202 | "cell_type": "code", 203 | "execution_count": 13, 204 | "metadata": { 205 | "collapsed": false 206 | }, 207 | "outputs": [ 208 | { 209 | "data": { 210 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAggAAAFZCAYAAAD9xtesAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4wLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvpW3flQAAIABJREFUeJzs3Xd4VGXax/HvTUgIAUJvoRcRgwgI\nWAC7qOtKsYFiXxX7Wl677rqrrq5l7brYUexdwYJ9RZTeq0DoNaEnIaQ97x/nBMdhEiYhmZkkv891\nzZXMOc85c5+p93naMeccIiIiIoFqRDsAERERiT1KEERERGQvShBERERkL0oQREREZC9KEERERGQv\nShBERERkL0oQRKTaMbNTzMyZ2dBoxxIuM7vFzJabWYGZTYp2POXNzDaY2ahoxyG/U4JQTfhfhuHc\nLi7nx002s3+Y2YAybNvAzHL8uHqVZ1xVmZnd6T9nx5VQZoRf5sKAZYPN7Ecz22hmu8xshZl9bGZn\nlzGOK/3H2GVmLUOs/9bMFpVl39WNmZ0IPAz8DFwC3FNC2X/7z3uDgGUXmtm1FR9pyfz32N3RjkPC\nUzPaAUjEXBB0fyRwBPCXoOW/lPPjJuN9meXgfbmVxjDAARuA84GZ5RtalfUmcD8wAvihmDLnAdnA\nR+AlFcC/gP8BDwGZQCfgGLz3yPv7EU8icBtww37so7o7Fu+zcLlzLqcM218ItAaeKc+gymAw3mf5\n/hDr2gEFkQ1HSqIEoZpwzr0ReN8/IzkseHmMOR/4AlgBnGtmtzjnCqMbUmhmluScy452HADOuZVm\n9jNwppld45zLDVxvZk2Bk4D3nXOZZpYI3AV85Zz7U/D+zKz5foY0C7jCzB5yzq3fz31VKmZWxzmX\nVQ67agbkljE5qDDl+b53zu0uj/1I+VETg4RkZjXM7CYzm+9X8280sxfMrGFQucPMbLyZZfhVySvN\n7DUzizezrsBqv+iDAc0Y+2xnNLN2wADgHeBtoCVwfDFlDzKz98ws3Y9hsZk9HFSmtZm9bGbrzGy3\n35Y7ysxq++v/bWZ7ffkGVJO3CFi2wcw+8duxp/nb/dVfd4aZfW5ma80s16+mf9DMEkoTt5kN8h/3\n1BDbDdtXEwLwBtAQ2OsHH69mpqZfBqAFkEQxNTzOuY0lPE44/gXEA7eXVMjMuvrHdU7Q8kR/+e0B\ny4qq0Tub2TtmtsPMNpnZP/z17cxsrJntNLP1ZnZ9MQ9b08we8F/TbP+16xAitlQz+9DMtvifh2lm\nNiSoTNF7ZYCZPWdmm4CMfRxzvJndY2Zp/vtyhR9PrcBjBy4HagV8hs4pab9BjzEJOAE4MGD7nID1\n4X7W9+t9b2bvAJcGHceez5aF6INgZs3N7BX/tc0xs9lmdlFQmaL3zQ1mdo3/XOaY2a9m1jOobIqZ\nvWpma/zne4OZjTOzLuE+n9WJahCkOK/gVVG/CjwFtAeuA3qb2ZHOuVwzSwG+BtYBDwLb8aoJhwC1\n/OV/9bd/Fxjn7/u3MB7/PCALGOec22VmS/FqFL4NLGRe34T/AbnAC3i1DR2BocCtfpk2wGSgvl9m\nIdAKONNftivsZ+V3qXiJyyh/n2n+8svwquefAHYA/f04WgIXlyLuL4FN/F6LEuh8YBXwYwnxvY/3\nvJ8HfBq07jwgHe+1A+91ygMGm9lzzrmtJR556S0FxgAjzezf5VyL8AEwD68JYwhwj5ltAa4FvvGX\nXwg8YWZTnXPBTWh/9/8+BDQBrgd+MLNDnHM7AMzsEGACXrL7b7z35dnAx2Y23DkX3PzyPN7zey9e\nE1tJXsV7Pd4D/gMcBtwBHAScjvf+uADvh7U/vzcJ/rqP/Qa6x993I/zPBH+syt/nZz2g7P68758B\nmgcdB8C2UEGbWR28z0gHf9sVwFnAaDNr6Jx7ImiTi/Cas54D4vzH/9DMujjnio73U7yms2eAlXg1\nM8f7y8L5XqpenHO6VcMbMBrIKWbd8XjtncOClp/gL7/Qvz/cv39wCY/T2i9zeynjmw+8EXD/Xrwv\nntpB5SbhfcG0DlpuAf+/A+QDvUI8jvl//x3q+QCu9ONvEbBsg7/spBDlk0IsuxfvC7l5KeN+Au/H\nqG7AssZ4PxoPhPEcfoTXz6BewLKOfuxPB5V90F+eiZec3A30DYynDO+xoueuJ94XcB7wZMD6b4FF\nAfe7+uXPCdpPYvB7yH+9HPBswLJ4YCNQCPxfwPImwG7gpYBlp/jbrw56fk71l98dsOwnvP4vCYGv\nk788LcTxTgDiwnh+DvPL/zdoedGxnRiw7KVQ789i9lu0fYPinuuA5WF91svxfV/scfj7HxVw/1b/\n8YYHLKvpP7+ZQP2g980GIDmgbFEfphP9+839+9eW9T1d3W5qYpBQhgFbgO/NrEnRDZiNV0tQVLW9\n3f87yMzKrTbKzA7l9zOVIm8D9fA6ORWVaw0cjvfFvyZwH87/RjCzeH+bT5xze3VyLCpXBkudc18H\nL3R+e6xfbdvAf94m4DXn9Qw3bt9reFX/ZwYsG473QzgmjBjfAGrjnYkWGRGwLtCdeGd18/H6J9wH\nTAHmm1nvMB6rRM65Zf5jjvRrnsrLSwGPkQfMwPvxfjlgeQbemW7HENuPds7tDCj7BbAMOA3Ar/4+\nCq8GLDngs9AY+AroYF5zWKDn3e9nrCX5s//3saDljwatr0jhftaLlPl9XwZ/Btbi1a4UPU4+8CRQ\nB6/jZqB3nV/r45vg/y163bPwEpbjLWCEhxRPCYKE0gWvOjI9xK0+XrUceFXUnwEPAJvN7FMzu9T8\ndv39cD7ehznNvDbmznhnn8v8dUU6+X/nlbCvFLwfyZLKlEVaqIVm1sPMxuPFv5U/VuXX9/+GEzd+\nQjOXPx7z+cB059zCMGL8HK+W4ryAZefhfclPDnos55x71Tl3ONAA78zyZbyzs8+D26PL6H68M8AS\n+yKU0qqg+9uBTOdccLX1drw+GcGWhFj2G141O3ifBfBqWII/C//y1zXjj5btM2pPO7yarT+U9xOa\njIAYKlK4n/Ui+/O+L612wG8hkvii9377oOXB74WiprKGAM65TLzOuEOATWb2k5ndVs4Ja5WiPggS\nSg1gPV7bbSgZAM4bUTDEzI7Ey/ZPwjuju83MjnDObSntA5tZHHAO3hnCghBF2ppZE/9LtDwVV5MQ\nV8zyvfotmFljvH4Bm/F+BNP8cu2BFylbQv468JD/JZYIHEmYwwWdc7vN7H3gL2bWDK+5pyvwz31s\ntxNveOQPfnv+LcBAAs7kysI5t8zMxgCXm9m/QxUpZtPiXgMIPSyuuLN3Kym+YhS9Zg8R1P8lwOKg\n+2Xp0xItYX3WA0TqfV8W+3zdnXMPmdkHeEnCSXifhbvM7E/OuYkRiLFSUYIgoSwD+gETXBhDj5xz\nv+J1mrrbzE7Ha/u+BK9jVGmr8E/A69h0F3t3GqqL15FqGF5HpKIzr4NL2N86vC+rksqAd7ZRy8wS\n3R+HkgVXH5dkIN7Z9ymBZ+hmNiioXDhxF3kTr015BF5zQz5/bHrZlzfwesAP5/czrtIMbZ3i/y2v\ns6z78Trd3RFiXdEZX3D1b2leg9I6oJhlK/z/i16rXOdccQlCWa3E+w7uREBNhv+D2yQghvJQ3Oew\nVJ/1YoT7vi8pjlBWAgeYmQXVInT1/64obaCwp7nrMeAxM2uPNwz3diBUvNWamhgklHfx2rnvCl5h\nZjWLqpvNrFGIbWf4f4u+5IvGgIdbRX0+sBN41Dn3QdBtNF7b6PkAfvv9ZOAyv10/ME7zy+ThNYMM\ntRCzMRaV4/cfgqMD1iXzx+r9fSk6g9lzxuLXiNwYWCicuAPKrserqj0fr3lgvHNuUylimoD3RXsB\nXs3MFOfc0qDHTDazw4vZvmiY5aKA8p0txFDAcDjn0vBqRS5n76QjHe+1Pzpo+dVleawwXWxm9Yru\nmDestDNe8wzOudV4ye9VFmI+CPPmlCirolE9wTVCNwWtLw9ZhP4MhvVZ34ew3vcBcdQys6Qw9jsO\nr9brrKD9/tXfz49h7GMPM6tj3pwfgVbi9cFQn4QQVIMge3HOfW1mLwN/8zsMfovXB+AAvA/rzXgj\nA0aa2SXAJ3g/sHXxOrrl4c/Q55zbZmbLgPPNrOjDuNQ5Ny34cf0vjdPxJuzJDV7v+8yPq6P/Y3MN\n3lComWb2IrAc70z5dLyOjuD1hj4W+NnMnsf7sWvhH8tJeL2fx/l/Xzezok5il+HVQOw1TXAxfsJr\n637LzJ7B600/DK8PRLBw4i7yOr/XGvydUnDOOTN7i9/P2ENV7ScDk8wbLz8ery03GTgZr7f/BH5v\nTwZvvoRt/H4mV1r341VpH0RA9bwf68vA9Wa2A+/M7nhCdy4sL1vx3hej+X2Y42q84X5FrsB7beeZ\n2Ut4VejN8Zp72gPdyvLAzrmpZvYmcLVfa/AT3siGi/A61X5Xlv0WYxpec+B/gOlAnnPu/VJ81ktS\nmvd90ef+OTP7Bi+5+MSFngDqObzhnWP8BHaFH9PRwI3Oue0htilJd2Cc3+y2AH9oL94wyv+Ucl/V\nQ7SHUegWnRslDHP01xveF+N0vKFyO4A5eG2xrfwyffF+uFbiTaW8EW/Mfr+gfQ0ApvplHAFDmYLK\njSBoaFWIMr39Mn8PWNYd+Bjvy34XXgLw76Dt2uH90G7y40jD+wKqHVDmcD/O3Xg/2NdS/DDHT4qJ\nrz/eGWeWX+4p4FBCD9/bZ9x+uUS8H+TtBA3zDPO1TvUfPw9oGmJ9At7U25/4x73Lj3828DeChrD5\nx7XXkLkQ+90zzDHEupf8dYuCltfx35s7/ON9E6+mobhhjg2Ctn8H2Bbi8SYBswLuFw1zPAuvA+IG\n/33+JdApxPad/PfPBrxhpmvwksqzwzneEp6jeOAf/vOei5ecPQDUCvF87c8wx3r+c7nFX5cTsG6f\nn/Xyet/jnZQ+w+/DUfd8tgga5ugva47XrJiO97mcA1wcVKZomOMNIT43e943eB0un8Xr5Jjpv7+m\nUML3TXW/FY0BF5EY5Q/VLPpyvjTa8YhI9aA+CCKx70y8oWivRTsQEak+VIMgEqP84aPd8fodrHfO\n9Y1ySCJSjagGQSR2XY/XT6KkceoiIhVCNQgiIiKyF9UgiIiIyF6q9TwITZo0ce3bt492GCIiIhEz\nffr0DOfcPif5qtYJQvv27Zk2ba/5ekRERKosf9K6fVITg4iIiOxFCYKIiIjsRQmCiIiI7EUJgoiI\niOxFCYKIiIjsRQmCiIiI7EUJgoiIiOxFCYKIiIjsRQmCiIiI7KVaz6QoIiIVKze/kG3ZuWzOymVr\nlve3hhlHd2lCvcT4aIcnJVCCICIipVJQ6Ji7djsZO3ezJTuXLQE//nv++st35uSH3EdCzRocf2Az\nBvdM4fiuzUiMj4vwUci+KEEQEZGwOef469sz+Xzu+j8sT6hZg8Z1EmiYlEDjugm0bZREozoJIW87\nduUxbs56xs1Zz1fzN1AnIY6TurVgcI8UBhzQhPi4qtP6nZG5mxkrt9KvcxPq1qpcP7mVK1oRkQoy\nOW0zG3fuZnCPlGiHEtOe/n4pn89dzzXHdeKk1BZ7fvSTEuIws7D306d9I/52WiqT0jYzdvY6vpy3\ngY9nrqVBUjx/Orglg3ukcFiHRsTVCH+fsWTdtl288FMa70xdRU5eIXVr1eSs3q258Mh2dGxaN9rh\nhcWcc9GOIWr69OnjdDVHEflt406GPjuRnLwCxl13FKkpydEOKSaNn7+BK8ZM5/RerXhsWI9SJQT7\nkptfyE+/pTN2zjq+WbCR7NwCmifX4s/dUxjcM4UereuX6+NVlBUZWfz3x2V8NHMNzsHQXq04tXsL\nPpu1js/nrievwHFMl6Zc3K89x3RpSo0oJEBmNt0512ef5ZQgKEEQqc525OQx9JmJ7NydT35BIQc0\nr8e7I4+oFD9GkbR4w07OeG4inZvV5d0rjqzQPgPZufl8t3ATY2ev48fF6eQWFNK2URKDerTk9F6t\n6NysXoU9dlkt3rCTZ39Yyrg566gZV4Nz+rZh5NEdad0waU+ZTTtzeHvyat6cvJJNO3fTvnESFxzZ\nnrP7tCY5gh02lSCEQQmCSPXmnOOKMdP5btEm3r78CJZuyuTOj+fy9Lm9GKSmhj22ZuUy+Nmf2Z1X\nyGfXDqBF/cSIPfb2XXmMn7+BsbPXMXFpBoUOTu/ViltOPpCUBrUjFkdxZq3exrM/LOWbBRupkxDH\n+Ue049KjOtCsXvHPUW5+IV/OW89rv6xgxqptJCXEccahrbjoyPYc0Lzikx8lCGFQgiBSvf33x2U8\n9NUi/nZaKpcO6EBBoWPIsz+zOTOX7/7vGJIS1E0rr6CQi16ZwrQVW3nniiM4tG3DqMWSkbmbl39e\nzss/L8eAy4/qyJXHdop45z/nHJPStvDsD0v5eWkG9WvHc3G/9lzSvz0NkhJKta+5a7Yz+pcVjJ29\njtyCQgZ0bsJF/dpzfNdmFdb/QglCGJQgiFRfE5dmcMHLkzm1e0uePrfXniaFaSu2cNaoX7n2uM7c\nfPKBUY4y+v7x2XxG/7KCR8/uwVm9W0c7HADWbM3mkfGL+XTWOprUrcVNA7swrE9ralbw6AfnHD8u\nTueZH5YyfeVWmtStxeVHdeC8I9rtd5KyOXM370xdzZhfV7JhRw5tGtXmgiPaMbxPW+onlW/zQ8wm\nCGZ2NXAL0BKYD9zgnJtQQvkRwK1AF2AH8C1ws3Nug78+HrgDuAhoBSwGbnPOfbWvWJQgiFRPa7ft\nYtDTP9OkbgIfX92fOkFf7je+O4vP56znm5uOpl3jOlGKMvrem7qaWz+cw6UDOvC301KjHc5eZq3e\nxr8+X8DUFVvp0rwud/05lWO6NC33x8nNL+SbBRt59oelLFi/g1YNanPFMR0Z1qdNuffFyCso5Ov5\nG3ntlxVMWbGFxPgavHV5+dbcxGSCYGbDgTeAq4Gf/b+XAKnOuVUhyvcHfgJuBj4BmgPPAVudcyf4\nZR4CLgQuAxYCJwOPAf2cczNLikcJgkj1szu/gGGjfmVZehafXds/5JCzjTtyOO7RH+nfuQkvXrjP\n79EqafrKLZzzwiSO6NiYVy/uW+Fn52XlnOOreRv491eLWLk5m6O7NOWuUw/iwBb715afvnM3Pyze\nxA+LNjFhSQaZu/Pp2KQOVx7bidN7tYrIXA3z123n/WlruOPUrtSqWX6JSKwmCJOBOc65ywOWLQE+\ncM7dEaL8zcB1zrl2AcsuAZ52ztX1768DHnLOPRlQ5kNgl3Pu/JLiUYIgUv3c+fFc3pq8ilHn9+aU\ng1sUW66of8JrfzmsQs5KY9m6bbsY/MxE6tSK49Nr+pe6XT0acvMLef3XFTz13RIyd+czvG8bbhzY\npcTOgoEK/dkhv1+0iR8Wb2LOmu0ANE+uxfFdm3HiQc059sCK6xcQSeEmCBHr2WFmCUBv4NGgVV8D\n/YrZbCLwgJkNAsYBjYFzgC8CytQCcoK22wUM2N+YRaRqeW/aat6avIqrju1UYnIA8JcB7Xl36ir+\nOXY+X11/NAk1Y/MMurzl5BVwxZjp5OQV8Pblh1eK5AC8mRwvO6ojZx7amqe+X8KYX1fy2ax1XHVs\nJy47qmPIpoCdOXn8vCTDTwrSycjcjRn0atOAm0/qwnFdm5HaMrnaDnmNWA2CmaUAa4FjnHM/BSz/\nO3Cecy5kbyAzOwMYDdTGS2i+AYY453b5698CegFDgSXACcCnQJxzrlaI/Y0ERgK0bdu298qVK8vr\nEEUkhs1bu50z/vsLfds35LVLDguryvz7RRv5y+hp3P3ng7jsqI4RiDK6nHNc/84sxs5Zx4sX9OHE\n1ObRDqnMlmdk8e8vFzJ+/kZa1k/k1lMOZEiPVizfnMUPizbx/aJNTF2xhbwCR3JiTY7u0pTjuzbj\nmC5NaVx3r5+OKiXmmhjKkiCYWSpeQvAEMB6vY+MjwCzn3IV+mabAi8AgwAHL8Doy/sU5V+IgWTUx\niBTvpQlpbM3O5YYTu1T6ufG3Zedy2tM/U1DoGHfdgFL9AFzy6hSmrtjK9zcfE3Z1dWVV1Kxyy8kH\ncs1xnaMdTrmYnLaZ+z9fyNy120lOrMkO/+JRXZrX5biuzTj+wGb0btcwZvtYVISYa2IAMoACvI6G\ngZoDG4rZ5g5ginPuEf/+HDPLAiaY2Z3OuTXOuXRgqJkl4jVBrAP+DaSV+xGIVBNbs3J5+KvF5BYU\nMnX5Vp4971Ca1qucZ1UFhd5Z8aYdu3nvyiNLfXb4t9NSOfmJn3j4q8U8enaPCooy+r5ftJGHxy/i\ntENacvWxnaIdTrk5vGNjPr2mP5/OXsuPi9Pp064hx3Vt9ocZDiW0iKVMzrlcYDowMGjVQOCXYjZL\nwksqAhXd/0Pszrkc59xavKTnTLxmBhEpgw9nrCG3oJAbTjyAOWu3Mejpn5m1elu0wyqTJ79bwv9+\nS+cfg7vRs02DUm/fsWldLh3QkQ+mr2Hmqq0VEGH0Ld2UyfVvzyK1ZTKPnFW+11iIBTVqGKf3as2T\n5/TigiPbKzkIU6TrVB4DLjazy8zsIDN7EkgBRgGY2etm9npA+bHAEDO7ysw6+sMenwJmFA2LNLPD\nzewMf/1RwFd4x/VwJA9MpKpwzvHWlFUc2rYBN5zYhQ+v6kdcDWPY87/y3rTV0Q6vVL5buJGnvlvC\n2b1bc+5hbcq8n2uP70yzerX4x2fzKSyMjcnltmfn8d7U1TwyfhGfzlrL0k07KShDbNt35THy9Wkk\n1KzBCxf2oXZCxV1jQSqXiM5P6Zx718waA3fj9SeYB5zqnCvqKdg2qPxoM6sHXAv8B9gOfA/cFlAs\nEbgf6Ahk4o1wuMA5VzlPd0SibMryLaSlZ/HIWYcA0C2lPmOvG8B1b8/g1g/mMHfNdv52WmrM9+pf\nuTmLG9+dRbeUZO4bevB+nRXXrVWTO07tyo3vzuaDGWsY1qfsycb+yM7N59uFm/hs1jr+99sm8goc\nZlDUlSwxvgYHtkgmtWU9Ulsmk5qSTNcWyXtNBFWkoNDx17dnsnprNm9edgStYuDaBhI7NNWyOimK\n/MEN78zku0WbmHLniX84m8wvKOTh8Yt54ac0+rZvyHPn9S7XfgmFhY6vF2zgv/9LI2Pnbnq3a8hh\nHRpxWIdGdG5at1SXxd2VW8AZ//2Fddt2Me66AbRptP9Vys45zvzvL6zaks33Nx8bsavv7c4v4Kff\nMvhs9jq+XbCRXXneZZBPOySFwT1SOKhlMsvSM1mwbgcL1u9gwbodLNywg23ZeQCYQbtGSaSmJO9J\nGlJb1qd5ci0e/HIRL/yUxgOnd2fE4W33EYlUFTE3iiEWKUEQ+aOtWbkc/uB3nNO3DfcOOThkmU9n\nreW2D+fQoHYCoy7oXaZ2/UD5BYWMnbOO535YxpJNmbRvnES3lPpMXbGFTTt3A9AgKZ6+7RtxWPtG\n9O3QiG4pycWOrHDO8X/vzebjWWt55eK+HHdgs/2KL9DcNdsZ/OzPXNq/A3dX4NTD+QWFTErbwmez\n1/LVvA3syMmnYVI8f+reksE9UujbvlGJE/Y451i/PcdLFtb7icP6HazcnL2nTIOkeLZl53HBEe24\nb2jo11qqplgcxSAiMe7DGWvIzS/k3MOKP5sc0rMVnZvV5Yox0xk26lfuH3oww/qWvsp9d34BH0xf\nw6j/LWP1ll10bVGPp87txZ+7tySuhuGcY9WWbKYs38KU5VuYumIL3yzYCEBSQhyHtvVqGPq2b0Sv\ntg32TITzxqSVfDRzLTee2KVckwOA7q3rc07fNoz+ZQXnHNaGzs3K79K8hYWOmau38tmsdXw+dz0Z\nmbnUrVWTk1KbM6hnCgM6Nwl7uKmZkdKgNikNav9hLoOdOXks3rBzT01DvcSa3HpK13I7BqlaVIOg\nGgQRwDvrPPGx/5FcO56Pr+6/z/Jbs3K57u2Z/Lw0gwuOaBd2v4Ts3HzemryKFyeksXHHbnq0acC1\nx3XmhK7N9tmMsGlHDlNWbGHq8i1MXr6FxRt34hzExxmHtG5A91b1eXPySo46oCkvXdinVM0S4dqc\nuZtjH/2RHq0bMObSw/a7x//STTt5f/oaxs1ez9ptu0ioWYMTujZjcI8UjuvarNwvBiSiGgQRKZWp\nK7ayLD2Lh/3OifvSsE4Coy/pyyPjF/P8T2ks2rCjxH4J23fl8fovK3hl4nK2ZudxZMfGPDasJ/06\nNQ77R7ZZciKnHZLCaYekePvMzmP6Ki9ZmLp8C29MWknrhrV5fFjPCkkOABr7lxf+59gFfL1gIyd3\nK3nK5lAKCh3fL9rEa7+s4OelGdSsYQw4oAn/d1IXBqY2p16E+jeIlEQ1CKpBEAG8Sxx/u2Ajk+86\ngaSE0p07fDZ7Hbd+MJsGtRP47/mH0ivg0rQZmbt5+efljPl1JZm78zm+azOuOa4zvduV3+Vri+zK\n9aZJqeihevkFhZz61ASycwv49qZjwj7L356dx7vTVjFm0kpWb9lFy/qJnH9EO4b3bUOTKj69r8QO\n1SCISNi2Zefy+dz1DO/TptTJAcDgHil0blqXK96YxvDnJ3H/0IM5qksTnv9fGu9MXcXu/EJOPbgl\nVx/XiW4p9SvgCDyRGsNfM64G/xjUjREvTebFn9K47oQDSiy/aMMOXvtlJR/PXENOXiGHdWjEHX86\niJNSm1erKX6lclGCICJ8OGPtPjsn7ktqSjKfXTOAv74zk1s/nENcDcOAob1acdWxnejUtG75BRwD\n+nVuwqndW/Dsj0s5s3drUoLmEMgvKOTbhRsZ/csKJqVtoVbNGgzt2YqL+rUnNSU5SlGLhE8Jgkg1\n55zj7Smr6NmmwX7/cDWsk8CrF/flmR+Wsi07j8uO6lClp7W989SD+G7hJh74YiHPjDgU8DpvvjN1\nNW9MWsnabbto1aA2t/+pK8P7tKFhncpx6WQRUIIgUu1NW7mVpZsyefjM8Don7kvNuBrccGKXctlX\nrGvdMImrju3EE98uoX/nVcxctZVPZ61jd34h/To15u+DUjnxoOYlzlkgEquUIIhUc29PXkXdWjU5\nrUfLaIdSKV15TCfen7aGOz6aS+34OM7q3ZqL+rWnS/PymyNBJBqUIIhUY9uycxk3dz3D+rQuU+dE\ngcT4OEad35tZa7Yx+JAU6ic0qTO4AAAgAElEQVRpiKJUDfpGEKnGPvI7J444rF20Q6nUureuT/fW\nFTc6QyQaNL5GpJoq6pzYoxw6J4pI1aMEQaSamr5yK0s2ZTLisOhculhEYpsSBJFq6q0pfudEf9pi\nEZFAShBEqqHt2Xl8Pmc9Q3qmUKeWuiKJyN6UIIhUQx/NXMPu/EJGHF72mRNFpGpTgiBSzezpnNi6\nfoVeF0FEKjclCCLVzIxVW/ltY+Z+XXdBRKo+JQgi1cxbk1dTJyGOQT3UOVFEiqcEQaQa2Z6dx7g5\n6xjSq5U6J4pIiZQgiFQjHxd1TlTzgojsgxIEkWrC65y4mkNa1+fgVuqcKCIlU4IgEsMmp23m7FG/\n8JfRU1m6aed+7WvGqm0s3rhTnRNFJCxqhBSJQeu27eLBLxcxdvY6WtZPJHP3Tk55YgIX9WvP9Sce\nQHJi6a8Y+PaUVeqcKCJhU4IgEkNy8gp4aUIaz/6wjALn+OsJB3DVMZ3Izs3n0a8X88rE5Xw6ay23\nntyVs3q3pkYNC2u/23d5nRPPOLQ1ddU5UUTCoG8KkRjgnOPbhZu4b9wCVm3J5pRuLbjrzwfRplES\nALUT4njwjEMYcVg77vlsHrd+OIc3J6/kH4O70attw33u/5OZa8nJU+dEEQmfEgSRKFu6KZN7xy3g\np9/S6dysLm9cejgDDmgSsmz31vX58Kp+fDJrLQ9+sYjTn/uFs3q35tZTDqRZvcSQ2xTNnNi9lTon\nikj4lCCIRMnOnDye+m4Jr05cQe2EOP5+WioXHNmO+LiS+w6bGaf3as3A1BY88/1SXv45ja/mbeD6\nEw7gon7tSaj5x+1nrt7Gog07eeD07hV5OCJSxUQ8QTCzq4FbgJbAfOAG59yEEsqPAG4FugA7gG+B\nm51zGwLKXA9cBbQDNgOfArc55zIr6jikcvpy7nqWbMqkYZ0EGtdJoGFSAo3ren8bJsVTcx8/zuWh\nsNDx4Yw1PPTVYjZn7WZ4nzbcfPKBNKlbq1T7qVurJrf/qSvD+7bhvnEL+NcXC3l76iruGdSNY7o0\n3VPu7cle58TBPdU5UUTCF9EEwcyGA08CVwM/+3+/NLNU59yqEOX7A2OAm4FPgObAc8CbwAl+mRHA\nw8BlwASgI/AykAhcWsGHJJXIxKUZXPXmjBLL1K8dT6M6CTQqSh7qJNCwTgKN6sTTqE6t3/8mJdCo\nbgJ1EuIwC6+jIMCs1du457P5zF69jV5tG/DKxX04pHWD/TquDk3q8MrFffl+0UbuHbuAi16ZwokH\nNefvp6VSPymesXPWcXovdU4UkdIx51zkHsxsMjDHOXd5wLIlwAfOuTtClL8ZuM451y5g2SXA0865\nuv79Z4DuzrljAsr8EzjTOXdwSfH06dPHTZs2bX8PSyqBXbkFnPzET9Qw+Oy6AeTkFrAlO5ctmbne\n36w/3rZm57I50/u7JSuXvILQn5OEmjVolJTwe41EQM1EUTLRsE489WrFM2bSCt6btoam9Wpx+yld\nOb1Xq7BHIYRrd34Br05cwdPfLSGv0NG7bUN+TdvM2GsH0L21+h+ICJjZdOdcn32Vi9gphZklAL2B\nR4NWfQ30K2azicADZjYIGAc0Bs4Bvggo8zNwgZkd4ZybZGZtgcFBZaSae+ybxazaks07I48gOTGe\n5MR4miWH7tQXzDlH5u58tmTlsjkrl62ByYSfZGzN9tat2ZrN5qxcdubk77Wf+DjjiqM7cu3xnalX\nhnkMwlGrZhxXHtOJ03u14qEvF/HRzLUc3CpZyYGIlFok6xybAHHAxqDlG4ETQ23gnPvVzM7Ba1Ko\njRfvN8BFAWXeMbPGwE/m1fXWxGuWuC3UPs1sJDASoG1bDfmqDmat3sbLPy9nxOFtOaJj41Jvb2bU\nS4ynXmI87RrXCWubvIJCL5EIqKXo3qp+2Nvvr+bJiTw2vCeXHdWR+kkVk4yISNUW042SZpYKPA3c\nB4zH69j4CPA8cKFf5hjgb3j9GSYDnfH6OfwT+HvwPp1zLwAvgNfEUOEHIVGVm1/IbR/MoVm9RG7/\nU9eIPW58XA2aJSeGXUtRUVJTkqP6+CJSeUUyQcgACvA6GgZqDmzYuzgAdwBTnHOP+PfnmFkWMMHM\n7nTOrQHuB952zr3kl5lrZnWAl8zsXufc3nW9Um2M+t8yFm/cyUsX9inT9MQiItVVxC7W5JzLBaYD\nA4NWDQR+KWazJLykIlDR/Rr7KFO+vb+k0lmycSdPf7+EQT1SODE1OC8VEZGSRLqJ4TFgjJlNweuA\neCWQAowCMLPXAZxzF/rlxwIvmtlV/N7E8AQwI2BY5FjgJjObxu9NDPcB41R7UH0VFDpu/XAOdWvV\n5J5BqdEOR0Sk0ologuCce9fvUHg33o/9POBU59xKv0jboPKjzawecC3wH2A78D1/7IB4P+DwkoLW\neE0ZY4G7KvBQJMa9/usKZq7axhPDe5Z6AiIREYnwPAixRvMgVE2rt2Rz0uM/cXjHRrx6cd9STWQk\nIlLVhTsPQsT6IIhEgnOOOz+eSw2Df53eXcmBiEgZKUGQKuXDGWuZsCSD2//UlVYNakc7HBGRSksJ\nglQZm3bmcN+4BfRt35DzDm+37w1ERKRYShCkyvjHZ/PZlVfAv888pNyvcSAiUt0oQZAq4at5G/hi\n7gauP+EAOjWtG+1wREQqPSUIUultz87jb5/OI7VlMiOP7hjtcEREqoSYvhaDSDge+GIhW7JyefXi\nvsTHKecVESkP+jaVSm3i0gzenbaay4/qyMGtdEljEZHyogRBKq3s3Hzu+GguHZrU4YYTD4h2OCIi\nVYqaGKTSeuzr31i1JZt3Rh5BYnxctMMREalSVIMgldKs1dt4ZeJyzju8LUd0bBztcEREqhwlCFLp\n5OYXctsHc2hWL5Hb/9Q12uGIiFRJamKQSmX7rjye+2Epizfu5KUL+1AvMT7aIYmIVElKECRm5eYX\nsmjDDmat3rbnlpaeBcDQnimcmNo8yhGKiFRdShAkJjjnWLUl+w/JwPx1O8jNLwSgSd0EerZpwBm9\nWtGjTQOOVL8DEZEKpQRBomL7rjxmrtq6JxmYvXobW7PzAEiMr0H3VvW56Mh29GzTkB5t6tOqQW1d\nullEJIKUIEjEjZ+/gf97bzaZu/Mxgy7N6jEwtfmeZODA5vWoqRkRRUSiSgmCRExhoeOJb3/jqe+X\n0qN1fW47pSuHtGlA3Vp6G4qIxBp9M0tEbN+Vx03vzuK7RZs4u3dr7ht6sCY3EhGJYUoQpMIt2biT\nkWOms3pLNvcN6cb5R7RTfwIRkRinBEEq1Pj5G7jp3VnUTqjJ2yOPoG/7RtEOSUREwqAEQSpEYaHj\n8W9/4+nvl9KjTQNGnX8oLevXjnZYIiISJiUIUu6278rjhndm8sPidIb1ac29Q9TfQESkslGCIOXq\nD/0Nhh7M+Ye3VX8DEZFKSAmClJuv5q3n/96brf4GIiJVgBIE2W8FhY7Hv/mNZ35YSs82DRh1fm9a\n1E+MdlgiIrIflCDIfgnsbzC8TxvuHdqNWjXV30BEpLJTgiBl9tvGnYx8fRprt+3i/qEHc576G4iI\nVBlKEKRMlqVncuZzv1ArPo63Lz+CPupvICJSpUT8ijhmdrWZLTezHDObbmZH7aP8CDObZWbZZrbB\nzN4wsxYB6380MxfiNr/ij6Z6ytqdzxVjphNfswafXNNPyYGISBUU0QTBzIYDTwIPAL2AX4Avzaxt\nMeX7A2OA14BuwFAgFXgzoNgZQMuAW3tgJ/BehRxENeec49YP55CWnsnT5/aidcOkaIckIiIVINI1\nCDcBo51zLzrnFjrnrgPWA1cVU/5IYI1z7nHn3HLn3CTgaeDwogLOuS3OuQ1FN2AAkAS8UrGHUj29\n/PNyPp+znltO7kr/zk2iHY6IiFSQiCUIZpYA9Aa+Dlr1NdCvmM0mAi3NbJB5mgDnAF+U8FCXA185\n51bvb8zyR5PTNvPgl4s4uVtzrjymY7TDERGRChTJGoQmQBywMWj5RqDF3sXBOfcrXkLwJpALpAMG\nXBSqvJl1AY4BXiwuCDMbaWbTzGxaenp6aY+h2tq4I4dr3ppJu0ZJPHp2D41WEBGp4iLeSbE0zCwV\nr0nhPrzah1Pwkonni9nkcrwmi8+L26dz7gXnXB/nXJ+mTZuWc8RVU25+IVe/OYPs3HxGXdCbeonx\n0Q5JREQqWCSHOWYABUDzoOXNgQ3FbHMHMMU594h/f46ZZQETzOxO59yaooJ+E8ZFwIvOufzyDb16\ne+CLhUxfuZWnz+1Fl+b1oh2OiIhEQFg1CGb2hJkdvD8P5JzLBaYDA4NWDcQbzRBKEl5SEajofnDs\nQ/GaMV7ejzAlyCcz1zL6lxVcOqADg3qkRDscERGJkHCbGPoCs81sit+GX9bTyMeAi83sMjM7yMye\nBFKAUQBm9rqZvR5QfiwwxMyuMrOO/rDHp4AZzrlVQfseCXznnEsrY2wSZOH6Hdz+0RwOa9+I2//U\nNdrhiIhIBIWVIDjn+uPNP/ADcA+w3v8xP6Y0D+acexe4AbgbmIU3JPFU59xKv0hb/1ZUfjTe0Mhr\ngXnAB8BvwJDA/ZpZR+B4SuicKKWzfVceV70xneTEeJ45rxfxcTHdXUVERMqZOedKt4FZDeBU4C/A\nn4FVeNX6LzjntpR7hBWoT58+btq0adEOI+YUFjpGjpnGj4vTddlmEZEqxsymO+f67KtcWU4L44Fk\noD7esMVVwAXAKjMbUYb9SYz57/+W8e3CTdz154OUHIiIVFNhJwhm1sfMnsMbRvgwMAk4wDl3gnOu\nG3AL8HjFhCmR8tNv6Tz69WKG9Ezh4n7tox2OiIhESVjDHM1sLnAgMB64GPjcORc8uuB94NlyjU4i\nas3WbK5/ZyZdmtXjwTO6azIkEZFqLNx5EN4DXnHOrS2ugHMugxifeEmKl5NXwFVvzCC/wDHqgt4k\nJehK4CIi1Vm4vwIPEeLH38wSgUJ/jgOpxP7x2Xzmrt3OCxf0pkOTOtEOR0REoizcM/73gatDLL8S\nXVa50ntnyirembqaa47rxEndQl4WQ0REqplwE4T+7H0VRoBvKP5KjFIJzFmzjb9/Np8BnZtw08AD\nox2OiIjEiHAThCQg1PUNCgFNzl9Jbc3K5ao3ZtC0bi2eOrcXcTXUKVFERDzhJghzgHNDLB+BN8Oh\nVDKFhY4b35tF+s7dPHfeoTSqkxDtkEREJIaE20nxXuBTM+sMfO8vOwE4Gzi9IgKTivXcj0v5cXE6\n9w09mB5tGkQ7HBERiTHhXovhC2AQ0A7vYklP4V0zYbBzblzFhScVYeLSDB775jeG9Ezh/MPb7nsD\nERGpdsIe7O6c+wr4qgJjkQjYsD2H69+ZScemdXngdE2GJCIioWk2nGokr6CQ696eQXZuAe+MPJQ6\ntfTyi4hIaGE1MZhZgpn908x+M7McMysIvFV0kFI+Hhm/mKkrtvLgGd3p3EyDT0REpHjhjmK4D7gI\n+A/e0MZb8K67sJnQEyhJjPlq3gZe+CmNC45ox5CeraIdjoiIxLhwE4RhwJXOueeBAuBT59xfgXuA\ngRUVnJSPlZuzuOX92fRoXZ+7Tzso2uGIiEglEG6C0BxY4P+fCRSNi/sKOKm8g5Lyk5NXwJVvzKBG\nDePZ8w6lVs24aIckIiKVQLgJwiogxf9/KXCy//+RwK7yDkrKzz2fzmfh+h08MbwnrRsmRTscERGp\nJMJNED7GmxgJ4Engn2a2HBgNvFQBcUk5eG/aat6dtpprj+vMcV2bRTscERGpRMIa5+acuyPg/w/M\nbDXeBZx+00RJsWnBuh387ZN59OvUmBsHdol2OCIiUsnsM0Ews3jgDeBO59wyAOfcZGByBccmZbQj\nJ4+r35xO/drxPHmOLsIkIiKlt88mBudcHl5HRFfx4cj+cs5x2wdzWL11F8+MOJSm9WpFOyQREamE\nwu2D8BFwRkUGIuXjlYkr+HLeBm475UAO69Ao2uGIiEglFe5cu6uAu83sKGAakBW40jn3WHkHJqU3\nfeUWHvxiISelNufyozpGOxwREanEwk0QLga2Aof4t0AOUIIQZZszd3PNmzNp1bA2j5zdQxdhEhGR\n/RLuKIYOFR2IlF1BoeOGd2exJTuXj6/uR/3a8dEOSUREKrlw+yBIDHvquyVMWJLBfUO60S2lfrTD\nERGRKiCsGgQze6qk9f51GSQKJqdt5qnvl3Dmoa0Z1qdNtMMREZEqItw+CN2D7scDXYE4YGa5RiRh\ny9ydz80fzKZtoyTuHdJN/Q5ERKTchNXE4Jw7Lug2AGgNfAG8V5oHNLOrzWy5meWY2XR/ZERJ5UeY\n2SwzyzazDWb2hpm1CCqTbGZPmdk6M9ttZkvNbFhp4qqM/vX5QtZs3cWjZ/egTq1wcz0REZF9K3Mf\nBOdcDvAAcFe425jZcLxrOTwA9AJ+Ab40s7bFlO8PjAFeA7oBQ4FU4M2AMvHAN8ABeJelPhBv1MXy\n0h5TZfLj4k28PWUVI4/qSN/2mu9ARETK1/6edjYB6pai/E3AaOfci/7968zsFOAq4I4Q5Y8E1jjn\nHvfvLzezp4GnA8pcAjQFjnLO5frLVpQipkpne3Yet304hy7N6+o6CyIiUiHC7aR4U/AioCVwHl4z\nQzj7SAB6A48Grfoa6FfMZhOBB8xsEDAOaAycE/SYQ/1yT5vZEGALXrPHv/xpoqucv382j82Zubx8\nUV8S4+OiHY6IiFRB4dYgXBd0vxBIB14FHgxzH03wOjVuDFq+ETgx1AbOuV/N7By8JoXafrzfABcF\nFOsIHA+8BfwZaA88i1ezcXPwPs1sJDASoG3bkC0bMe2Luev5dNY6bjyxCwe30pBGERGpGDE9UZKZ\npeI1J9wHjMertXgEeB640C9WA9gEXO6cKwCmm1lj4HEzu8U594eLTDnnXgBeAOjTp0+lugBV+s7d\n3PXxXA5pXZ+rj+sU7XBERKQKC7eJIQGo4XdMDFyeCBQGtP2XJAMoAJoHLW8ObChmmzuAKc65R/z7\nc8wsC5hgZnc659YA64E8PzkoshBIwqu1SA8jtpjnnOOOj+aSlVvAf87uQXyc5rgSEZGKE+6vzPvA\n1SGWX0mYwxz9JGI6MDBo1UC80QyhJOElFYGK7hfFPhHobGaBx9IFyMZLSqqED6av4duFG7n15AM5\noHm9aIcjIiJVXLgJQn+8zoTBvqH4DoahPAZcbGaXmdlBZvYkkAKMAjCz183s9YDyY4EhZnaVmXX0\nhz0+Bcxwzq3yy/wXaAQ8aWYHmtnJwD+B54KbFyqrtdt2ce/YBRzWoRF/6a/LYoiISMULt5NiEpAf\nYnkhEPbprHPuXb9/wN14/QnmAac651b6RdoGlR9tZvWAa4H/ANuB74HbAsqsNrOT8JKPWXjNFa8A\n94cbVywrLHTc+sFsCpzj0bN6UKOGZksUEZGKF26CMAc4F7gnaPkIvB/5sDnnngOeK2bdsSGWBc97\nEGq7SZSuJqPSGDNpJROXbuaB07vTtnFStMMREZFqItwE4V7gUzPrjHcGD3ACcDZwekUEJrA8I4sH\nv1zIMV2acu5huhCTiIhETrjXYvgCGAS0w+sD8BRec8Bg59y4iguv+ioodPzfe7NIiKvBQ2ceogsx\niYhIRIU91bJz7ivgqwqMRQI8/9MyZqzaxpPn9KRF/cRohyMiItVMWDUIZnaMmR1TzPKjyz+s6m3h\n+h08/s1vnNq9BYN7pEQ7HBERqYbCHeb4ONAwxPJkf52Uk9z8Qm56bzb1a8dz35CD1bQgIiJREW4T\nw4HA7BDL5/nrpJw89d0SFq7fwYsX9qFx3VrRDkdERKqpcGsQduHNWxCsFRDONMsShpmrtvLcj0s5\nq3drBqYGz0gtIiISOeEmCOOBh8xsTzODmTXCu5Lj+IoIrLrZlVvA/703mxbJifx9UGq0wxERkWou\n3CaGm4GfgBVmNsdfdgjeVRSHV0Rg1c3D4xeRlpHFm5cdTnJifLTDERGRai7cyz2vN7MewHlAT3/x\na8Bbzrnsigquuti0M4dXJ67g/CPa0r9zk2iHIyIiUqp5ELKBF4OXm9mJzrlvyzWqambJxkwATj04\nVDcPERGRyAs7QQhkZq2AS4C/4M2uGFeeQVU3aelegtCxad0oRyIiIuIJt5MiZhZnZmeY2RfACrxr\nMIwCOldQbNXGsvQskhLiaJ6sYY0iIhIb9lmDYGYHApcBFwJZwFvAQOAC59yCig2vekjLyKJDkzqa\nFElERGJGiTUIZjYBmIQ3i+Iw51xH59zdEYmsGklLz1TzgoiIxJR9NTEcCbwOPO6c+18E4ql2cvIK\nWLttFx2b1Il2KCIiInvsK0Hoi9cM8bOZzTSzG82sRQTiqjZWbs7GOejYVAmCiIjEjhITBOfcTOfc\nNXjTLD8GDAZW+9v9OXBmRSmbohEMndTEICIiMSSsUQzOuRzn3Bjn3HHAQcAjwI3ABjP7siIDrOrS\nMrIA6KAmBhERiSFhD3Ms4pxb6py7HWgDDEMXa9ovy9IzaZGcSJ1aZZqSQkREpEKU+VfJOVcAfOrf\npIzS0rPU/0BERGJOqWsQpPw45/whjkoQREQktihBiKLNWbnsyMmnYxN1UBQRkdiiBCGK0tL9Doqq\nQRARkRijBCGK9gxxVA2CiIjEGCUIUZSWkUVCzRq0alg72qGIiIj8gRKEKEpLz6R94yTiaugiTSIi\nEluUIERRWkaWOiiKiEhMUoIQJXkFhazanK0hjiIiEpMiniCY2dVmttzMcsxsupkdtY/yI8xslpll\nm9kGM3sj8IJRZnaxmbkQt8SKP5qyW70lm/xCp8s8i4hITIpogmBmw4EngQeAXsAvwJdm1raY8v2B\nMcBrQDdgKJAKvBlUNBvvglJ7bs65nIo4hvJSNMRRNQgiIhKLIl2DcBMw2jn3onNuoXPuOmA9cFUx\n5Y8E1jjnHnfOLXfOTQKeBg4PKueccxsCbxV3COUjLUNDHEVEJHZFLEEwswSgN/B10KqvgX7FbDYR\naGlmg8zTBDgH+CKoXG0zW2lma8xsnJn1KtfgK0BaehaN6yRQPyk+2qGIiIjsJZI1CE2AOGBj0PKN\nQIu9i4Nz7le8hOBNvKtGpgMGXBRQbDHwF2AIcC6QA0w0swNC7dPMRprZNDOblp6eXvaj2U9p6Vm6\nxLOIiMSsmB7FYGapeE0K9+HVPpyCl0w8X1TGOferc+4159ws59wEYDiwDLgu1D6dcy845/o45/o0\nbdq0wo+hOGkZukiTiIjErjJf7rkMMoACoHnQ8uZAcX0G7gCmOOce8e/PMbMsYIKZ3emcWxO8gXOu\nwMymASFrEGLB9l15ZGTmagSDiIjErIjVIDjncoHpwMCgVQPxRjOEkoSXVAQquh8ydjMz4BC8zo8x\nqegaDB3VxCAiIjEqkjUIAI8BY8xsCl4HxCuBFGAUgJm9DuCcu9AvPxZ40cyuAsbjDWF8ApjhnFvl\nb3MPMAlYAiQDf8VLEIobGRF1yzOKhjiqBkFERGJTRBME59y7ZtYYuBvvx34ecKpzbqVfpG1Q+dFm\nVg+4FvgPsB34HrgtoFgD4AW8vgnbgZnA0c65KRV5LPsjLT2LuBpG20ZJ0Q5FREQkpEjXIOCcew54\nrph1x4ZY9jReR8Xi9ncjcGN5xRcJaRmZtG2URELNmO4jKiIi1Zh+oaIgLT1L/Q9ERCSmKUGIsMJC\nx/KMLA1xFBGRmKYEIcLWbtvF7vxCdVAUEZGYpgQhwtL8EQyaRVFERGKZEoQI2zMHgpoYREQkhilB\niLC09Czq1apJ07q1oh2KiIhIsZQgRFjRNRi8CR9FRERikxKECFuenqUOiiIiEvOUIERQdm4+67bn\naA4EERGJeUoQIkjXYBARkcpCCUIEpaUXJQiqQRARkdimBCGC0tKzMNMcCCIiEvuUIERQWkYmKfVr\nkxgfF+1QRERESqQEIYLS0nUNBhERqRyUIESIc4609EyNYBARkUpBCUKEbNq5m6zcAo1gEBGRSkEJ\nQoQs0zUYRESkElGCECG/D3FUDYKIiMQ+JQgRsjwji8T4GrRMTox2KCIiIvukBCFC0tIz6dCkLjVq\n6CJNIiIS+5QgREhahoY4iohI5aEEIQJ25xeweks2nTTEUUREKgklCBGwanM2hU4dFEVEpPJQghAB\ny/wRDLoGg4iIVBZKECIgLUNzIIiISOWiBCEC0tKzaFqvFvUS46MdioiISFiUIESArsEgIiKVjRKE\nCPCGOKqDooiIVB5KECrY1qxctmXn0Un9D0REpBKJeIJgZleb2XIzyzGz6WZ21D7KjzCzWWaWbWYb\nzOwNM2tRTNlzzcyZ2biKib701EFRREQqo4gmCGY2HHgSeADoBfwCfGlmbYsp3x8YA7wGdAOGAqnA\nmyHKdgQeASZUSPBlVDTEsWMTNTGIiEjlEekahJuA0c65F51zC51z1wHrgauKKX8ksMY597hzbrlz\nbhLwNHB4YCEziwfeBu4C0iou/NJLS88iPs5o3bB2tEMREREJW8QSBDNLAHoDXwet+hroV8xmE4GW\nZjbIPE2Ac4Avgsr9C1jhnHutPGMuD2npmbRrXIeaceruISIilUckf7WaAHHAxqDlG4GQfQqcc7/i\nJQRvArlAOmDARUVlzOwkYBhwRThBmNlIM5tmZtPS09NLewyllpaRpRkURUSk0onp01ozS8VrUrgP\nr/bhFLxk4nl/fVNgNHCRc25bOPt0zr3gnOvjnOvTtGnTCom7SH5BISs36yqOIiJS+dSM4GNlAAVA\n86DlzYENxWxzBzDFOfeIf3+OmWUBE8zsTqAz0BL4zsyKtqkBYGb5QDfn3OLyO4TSWbN1F3kFjk7q\noCgiIpVMxGoQnHO5wHRgYNCqgXijGUJJwksqAhXdrwFMBboDPQNun+GNZOgJLN/vwPeDhjiKiEhl\nFckaBIDHgDFmNgWvA+KVQAowCsDMXgdwzl3olx8LvGhmVwHj8WoLngBmOOdW+WXmBT6AmW0Dav5/\ne3cfLVdV3nH8+yMhgaQ1h9MAABFOSURBVIQQXhJIFBNeaiUgipCCkZdQIYogSwpt1RYRLVVUFJda\nUwGFIpVaUUEEKlSJDRbQWl8AMaARZEkAExcaFFYxN/IWCPf6EpKbXJKQp3/sfcnJmZncuZd5uXPz\n+6x11p05Z5999nnmzJ1nztlndkRsMb8duvpvcfSvKJqZWYdpaYIQETdJ2h04n/Rh/yBwQkQ8motM\nK5WfJ2kCcDbweWAVsBCY27pWD92y7l52Gbc9u40f0+6mmJmZDUqrzyAQEVcBV9VYdkyVeVeQOirW\nW/8ZQ21boy3v8SBNZmbWmYb1XQydrqvbgzSZmVlncoLQJKv7NvDM6ufcQdHMzDqSE4QmWd7jMRjM\nzKxzOUFoks13MPgMgpmZdR4nCE3S1b2G7QTTdx/X7qaYmZkNmhOEJlnW08teu45j7OhR7W6KmZnZ\noDlBaJJ0B4MvL5iZWWdygtAEmzZF/g0Ed1A0M7PO5AShCZ56to++DZt8BsHMzDqWE4QmWO47GMzM\nrMM5QWiC/lEc9/OvKJqZWYdygtAEXd29jB8zij0mjG13U8zMzIbECUITLOtew76Td0JSu5tiZmY2\nJE4QmqCru5d9PIqjmZl1MCcIDda34XlWrFrnDopmZtbRnCA02PKeXiLwMM9mZtbRnCA02AuDNPkS\ng5mZdTAnCA3W1Z1ucfQlBjMz62ROEBqsq6eXqRN3YNyY0e1uipmZ2ZA5QWiwrh4P0mRmZp3PCUID\nRQRd3R6kyczMOp8ThAbqWbOe1X0bfQbBzMw6nhOEBtrcQdFnEMzMrLM5QWigrh7f4mhmZiODE4QG\n6upew5jR2/GSXXZsd1PMzMxeFCcIDdTV3cs+u49n1HYepMnMzDqbE4QG8i2OZmY2UjhBaJD1Gzfx\n2B/WOkEwM7MRwQlCgzz2h7U8vyn8GwhmZjYitDxBkPR+Scsl9UlaIumoAcr/naQHJK2V9LSk6yVN\nKSz/G0mLJf1JUm8u+87m78mWpu02jls/dCR/uf8erd60mZlZw7U0QZD0VuBy4DPAa4B7gNskTatR\n/ghgPvB14EDgZOAA4BuFYr8HLgZeC7wKuA74qqQTmrQbVY0ZvR0HvmQiu40f08rNmpmZNUWrzyB8\nBJgXEddGxEMR8UHgKeB9NcrPAp6IiC9GxPKIuBe4Aji8v0BELIyI70bEwxGxLCIuB34FbPXMhJmZ\nmdXWsgRB0hjgUOD20qLbgdfVWO1nwFRJJymZBLwN+EGNbUjSscArgJ82puVmZmbbnlaeQZgEjAJW\nluavBKZUFoeIWERKCL4BrAe6AQFb9DGQNFHSmlzmVuBDEXFbtTolvSf3WVjc3d39InbHzMxs5BrW\ndzFIOoB0SeHTpLMPx5OSia+Uiq4GDgb+AjgP+EI+k1AhIq6JiJkRMXPy5MlNa7uZmVknG93CbfUA\nzwN7lubvCTxdY51PAPdHxOfy819J6gXulnRuRDwBEBGbgN/mMg9ImgGcC/y4kTtgZma2rWjZGYSI\nWA8sAeaUFs0h3c1QzThSUlHU/3xrbd8OGDvYNpqZmVnSyjMIAF8A5ku6n9QB8SzgJcB/AEj6L4CI\nOD2Xvxm4VtL7gAXAVOAy4BcR8Vhe5zzgPqCLlBScALwD+GCL9snMzGzEaWmCEBE3SdodOJ/0Yf8g\ncEJEPJqLTCuVnydpAnA28HlgFbAQmFsothNwNbAXsA54GDg9Im5o5r6YmZmNZIqIdrehbWbOnBmL\nFy9udzPMzMxaRtKSiJg5ULlhfReDmZmZtcc2fQZBUjfwaJVFk0h3XdjAHKv6OVb1c6zq51jVz7FK\npkfEgPf5b9MJQi2SFtdz+sUcq8FwrOrnWNXPsaqfYzU4vsRgZmZmFZwgmJmZWQUnCNVd0+4GdBDH\nqn6OVf0cq/o5VvVzrAbBfRDMzMysgs8gmJmZWQUnCGZmZlbBCUKBpPdLWi6pT9ISSUe1u03NJOlC\nSVGani4sVy6zQtI6SXdKOrBUx66S5ktalaf5knYplTlI0l25jiclfUqSWrWfQyXpaEnfz20OSWeU\nlrcsPpJOlfQbSc/lv3/VtB0fgjpiNa/KsXZvqcxYSVdI6pHUm+vbq1RmmqSb8/IeSV+SNKZUZnZ+\n//ZJ6pJ0VtN2fJAkfULSzyU9K6k778srS2V8XFF3rHxcNVNEeEr9MN4KbAD+EZgBXAGsAaa1u21N\n3OcLSWNXTClMkwvL5wKrgVOBVwLfBFYAEwplbgN+DczK06+BmwvLdyYN5/3NXMdf5zo/2u79ryM+\nJwCfyW1eC5xRWt6S+OT1NgLn5WPzvPz88HbHaBCxmgfcUTrWdiuVuTrHbw5wCHAn8AAwKi8fBSzN\n8w/J5VYAVxTq2Afoze/fGfn9vAE4td0xyu1bALwrv9YHAd/Jr/9uhTI+ruqPlY+rZr4G7W7AcJlI\nI0JeW5r3CHBJu9vWxH2+EHiwxjIBTwHnFebtmP/JvDc/nwEEcEShzJF53ivy8/cBzwI7FsqcDzxJ\n7iTbCRMpWTyjHfEBbgLuKLXnR8AN7Y5LPbHK8+YBt2xlnYnAeuDvC/NeBmwC3pifvyk/f1mhzGlA\nH7Bzfv5Z4JFS3f8JLGp3XGrs906kIexP8nE1uFj5uGr+5EsMQD6VdChwe2nR7cDrWt+ilto3n8pc\nLulGSfvm+fuQsvEXYhIR64Cfsjkms0gfBvcU6vsZKdMulrk7r9tvAWmY770bvC+t1Mr4zKLy2FxA\n5x2bR0p6RtL/SbpW0h6FZYcC27NlPB8HHmLLWD2U5/dbQBrm/dBCmWqxmilp+8btSsNMIF3q/WN+\n7uOqtnKs+vm4ahInCMkk0mmmlaX5K0lv1pHqPuAM4HjSKbMpwD1KQ3L37/fWYjIF6I6cTgPkx8+U\nylSrAzo7tq2MT60ynRS/HwKnA8cCHwUOAxZKGpuXTyF9Oyz/Tn45nuU49OT1BorVaNL7fLi5nHS6\ne1F+7uOqtnKswMdVU41udwOsfSLituLz3LmnC3gncG/VlcyGICJuLDxdKmkJaaC0E4H/bU+r2kvS\nF0iXBo6MiOfb3Z7hrFasfFw1l88gJP3Z4p6l+XuSOsVsEyJiDamz08vZvN9bi8nTwORiz+j8eI9S\nmWp1QGfHtpXxqVWmY+MXESuAJ0jHGqR9GUXlt7FyPMtx6D/7N1CsNjKMRvGT9EXg7cDrI6KrsMjH\nVclWYlVhWz+uGs0JAhAR64ElpN6rRXPY8jrfiCZpB2B/Uiep5aQ3xZzS8qPYHJNFpI5DswrVzALG\nl8ocldft199L+HcN34nWaWV8FjHCjk1Jk4CXko41SO+/DWwZz71IHfKKsZpRukVtDvBcXr+/TLVY\nLY6IDY3ch6GSdDmbP/AeLi32cVUwQKyqld9mj6umaHcvyeEykW5zXA+cSTp4Lid1BJre7rY1cZ8v\nBWaTOkYdDtxC6vk8PS+fC6wCTiHdanQj1W+3Wsrm262WsuXtVhNJ//BuzHWckrfRCbc57gQcnKe1\nwKfy42mtjA+pM9VG4J9JCdwnSP/0hsXtaAPFKi+7NO//3sAxpH+4T5RidXWedxzwGuAnVL8dbWFe\nfhypV36129Euy+/jM/P7eljcjgZcmV/f17PlrXk7Fcr4uKojVj6uWvAatLsBw2kC3k/Krvszx6Pb\n3aYm72//P571+Q3xbeCAwnKRboV8inTLz13AK0t17Apcn9/Iz+bHu5TKHETqhd2X67qADrjFMf/D\niSrTvFbHh3Qf+8P5tXoIOKXd8ak3VqTb9BaQOtGtJ10jnkfhtrJcx1jSfea/JyUZN1cpM42UyK7N\n5b4EjC2VmQ38Ir+PlwNntTs+hbZVi1EAFxbK+LiqI1Y+rpo/ebAmMzMzq+A+CGZmZlbBCYKZmZlV\ncIJgZmZmFZwgmJmZWQUnCGZmZlbBCYKZmZlVcIJgNoJJmifplna3o0jSWyQ9ImmjpHntbs9gDMd4\nmjWLEwSzJskfJiHpk6X5x+T5I3YUuAF8lfSjXNOBc6oVkHSnpC/Xet5sW3mNzgFOa1U7zNrJCYJZ\nc/UB/yRpcrsb0kiSth/iersAuwMLIuLJiFjV2JYNuP0xL2b9iFgVEX9qVHvMhjMnCGbN9RPSz3d/\nslaBat9WJe2d580slXmTpCWS1km6W9JekmZL+qWkNZJukbR7lW2cL2llLnOdpB0LyyTp45KW5XqX\nSjqtSlveLmmhpHXAe2vsy66Svi7pj7muH0k6sH8fgD/mogtznccMFMB8GWI28IG8TkjaOy87QNKt\nklZLekbSDZKmFNfNMZkr6QnSb/Ij6TRJPy+s9y1JL+3fX9LrBtCdtzevWF+h/rGSLsux7ZN0r6Qj\nC8v7X7djJd0naa2kxZIOKZSZKGl+bkefpC5JHx4oLmbN5gTBrLk2kQbDOUvSfg2o71+AD5MG19oV\nuIk0MNJ7SOMhHEj6Hf+i2cCrgWOBU4E3AJ8tLL8Y+AfgA8ABwCXAVySdWKrnEuCqXOa7Ndo3L7ft\nLcBhpN+2/2FOSO7J7SO3Yyr1jRx4DmkQnuvyOlOBxyVNJY018GDe1nGkAXy+J6n4v2028Crg+BwD\ngDGksQleDbyZNLzvDXnZ47l95PZOpcalEODfSQO9vZs00M/SvL9TS+UuIR0Hh5B+6/8b0gvDNV9M\nGjfhzcArcl1PDhATs+Zr92AQnjyN1In0YXlLfvwT4Mb8+BjSoDOTqj3P8/bO82aWyryxUObsPO+Q\nwrwLgQdLbfgTW44WeBppwJnxeVoHHFVq+2XAD0pt2eoInMDLc7mjC/MmkkYmPDM/n5TLHDNAXXcC\nX671PM+7CPhxad6uuf7DCvvfTWngnSrb2z+vt1et16TKazqeNEjQ6YXlo4BlwMVbed2OKG3r+8DX\n2n28evJUnkZjZq0wF1gk6XMvsp5fFR6vzH+XlubtUV4nItYUni8ifYPejzTS3Q6kb73Fkdu2J10a\nKVo8QNtmkM6YLOqfERGrJC0lnXVotEOBoyWtqbJsP+D+/PjBiHiuuDCf4r+ANCT1bqQRFCGN6vdE\nndvfjxSnn/XPiIjnJS2icn+Lr9uK/HePvK2rgf+RdChwB2nY5rvqbINZ0zhBMGuBiLhf0rdJp6Q/\nXVq8Kf9VYV6tToAbitXmusvzBnPpsL/sScBjW9kWQO8g6i1rxrCx2wG3Ah+rsmxl4fEW7ZY0njRM\n8I+Ad5CGC54E3E1KnBqhvL8Vrxs59hFxm6TpwJtIl0BulfStiHhXg9piNiROEMxa51zgN6Rr4UXd\n+e/UwuODG7jdgySNj4j+D8rXkk6NLyN9SD0HTI+IhS9yOw/l+maR+gYgaWfS9fXrXmTd60mn74t+\nAfwt8GgpSRrI/qSE4NyIWJ7beUqV7VFlm0XLcrkj8mMkjSLt/38Poj1ERA8wH5gv6TbgBklnlc98\nmLWSOymatUhE/Ba4hsoOb78ldYy7UNKfS3oDcH4DNz0a+JqkAyXNAf4NuDYieiNiNXApcKmkd0v6\nM0kHSzpL0nsGs5GIeAT4HqmD41GSDgKuB55lkB+YVfwOOCzfUTEpd0K8ktTH4SZJh0vaV9Jxkq6R\nNGErdT1GSorOzuucSOVZnUdJ3/RPlDRZ0k5V9reXdHngs5JOkDQjP9+T1JmzLpIuknSypJfnOk4B\nupwcWLs5QTBrrYuAjcUZ+dvv24B9gV+S7lQ4t4HbvAv4Namj5HeAhcDHC8s/Serc+LFc7g5SL/7l\nQ9jWu0jX/r+f/44Djo+IdUNse79LSd/Wf0M6yzItIlaQvr1vAn6Y234l6cO/5odrRHQD7wROzvVd\nAHykVObJPP9fSZcrav1I01zSnSTXAQ+Q75aIiKcGsW/P5e38ktSfYQLpko9ZWymiGZcGzczMrJP5\nDIKZmZlVcIJgZmZmFZwgmJmZWQUnCGZmZlbBCYKZmZlVcIJgZmZmFZwgmJmZWQUnCGZmZlbBCYKZ\nmZlV+H+eyjTX6qdbIgAAAABJRU5ErkJggg==\n", 211 | "text/plain": [ 212 | "" 213 | ] 214 | }, 215 | "metadata": {}, 216 | "output_type": "display_data" 217 | } 218 | ], 219 | "source": [ 220 | "plot(log)" 221 | ] 222 | } 223 | ], 224 | "metadata": { 225 | "kernelspec": { 226 | "display_name": "Python 2", 227 | "language": "python", 228 | "name": "python2" 229 | }, 230 | "language_info": { 231 | "codemirror_mode": { 232 | "name": "ipython", 233 | "version": 2 234 | }, 235 | "file_extension": ".py", 236 | "mimetype": "text/x-python", 237 | "name": "python", 238 | "nbconvert_exporter": "python", 239 | "pygments_lexer": "ipython2", 240 | "version": "2.7.13" 241 | } 242 | }, 243 | "nbformat": 4, 244 | "nbformat_minor": 2 245 | } 246 | -------------------------------------------------------------------------------- /.ipynb_checkpoints/MLP-checkpoint.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "### Convolution Neural Network" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | " - Conv Layer 1 (16 filters, KERNEL = 3X3, ReLU) \n", 15 | " - Pooling Layer 1 (Average, KERNEL = 2X2) \n", 16 | " - Conv Layer 2 (64 filters, KERNEL = 3X3, ReLU)\n", 17 | " - Pooling Layer 2 (Max, KERNEL = 2X2, ReLU) \n", 18 | " - FC 1 (1600, by 64x5x5)\n", 19 | " - FC2 (100)\n", 20 | " - Output (10)" 21 | ] 22 | }, 23 | { 24 | "cell_type": "markdown", 25 | "metadata": {}, 26 | "source": [ 27 | "Design reasoning\n", 28 | "1. Fashion data is more complex than digits, so we use more filters\n", 29 | "2. Choose Avg pooling on first conv layer to preserve more information\n", 30 | "3. Use Dropout to avoid overfitting. Although current start-of-art is Dropconnect, but it is not supported by PyTorch\n", 31 | "4. Use two layers of FC to reduce dimension" 32 | ] 33 | }, 34 | { 35 | "cell_type": "code", 36 | "execution_count": 6, 37 | "metadata": { 38 | "collapsed": true 39 | }, 40 | "outputs": [], 41 | "source": [ 42 | "import torch\n", 43 | "from torch.autograd import Variable\n", 44 | "import torch.nn as nn\n", 45 | "import torch.nn.functional as F\n", 46 | "import torchvision\n", 47 | "from torchvision import datasets, transforms\n", 48 | "import torch.optim as optim\n", 49 | "import torchvision.datasets as dset\n", 50 | "import numpy as np" 51 | ] 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": 7, 56 | "metadata": { 57 | "collapsed": true 58 | }, 59 | "outputs": [], 60 | "source": [ 61 | "from torch_data_utils import load_fashion_mnist\n", 62 | "import cnn" 63 | ] 64 | }, 65 | { 66 | "cell_type": "code", 67 | "execution_count": 8, 68 | "metadata": { 69 | "collapsed": true 70 | }, 71 | "outputs": [], 72 | "source": [ 73 | "train_loader, test_loader = load_fashion_mnist()" 74 | ] 75 | }, 76 | { 77 | "cell_type": "code", 78 | "execution_count": 9, 79 | "metadata": { 80 | "collapsed": true 81 | }, 82 | "outputs": [], 83 | "source": [ 84 | "model = cnn.Model()" 85 | ] 86 | }, 87 | { 88 | "cell_type": "code", 89 | "execution_count": 10, 90 | "metadata": { 91 | "collapsed": false, 92 | "scrolled": true 93 | }, 94 | "outputs": [ 95 | { 96 | "name": "stdout", 97 | "output_type": "stream", 98 | "text": [ 99 | "Iter: 1000. Accuracy: 0.8312\n", 100 | "Iter: 2000. Accuracy: 0.8559\n", 101 | "Iter: 3000. Accuracy: 0.8614\n", 102 | "Iter: 4000. Accuracy: 0.8663\n", 103 | "Iter: 5000. Accuracy: 0.8711\n", 104 | "Iter: 6000. Accuracy: 0.8741\n", 105 | "Iter: 7000. Accuracy: 0.8749\n", 106 | "Iter: 8000. Accuracy: 0.8807\n", 107 | "Iter: 9000. Accuracy: 0.8804\n", 108 | "Iter: 10000. Accuracy: 0.8801\n", 109 | "Iter: 11000. Accuracy: 0.8823\n", 110 | "Iter: 12000. Accuracy: 0.8801\n", 111 | "Iter: 13000. Accuracy: 0.8895\n", 112 | "Iter: 14000. Accuracy: 0.8865\n", 113 | "Iter: 15000. Accuracy: 0.8883\n", 114 | "Iter: 16000. Accuracy: 0.8874\n", 115 | "Iter: 17000. Accuracy: 0.8922\n", 116 | "Iter: 18000. Accuracy: 0.8858\n", 117 | "Iter: 19000. Accuracy: 0.8874\n", 118 | "Iter: 20000. Accuracy: 0.8887\n", 119 | "Iter: 21000. Accuracy: 0.888\n", 120 | "Iter: 22000. Accuracy: 0.8926\n", 121 | "Iter: 23000. Accuracy: 0.8929\n", 122 | "Iter: 24000. Accuracy: 0.8916\n", 123 | "Iter: 25000. Accuracy: 0.8895\n", 124 | "Iter: 26000. Accuracy: 0.8906\n", 125 | "Iter: 27000. Accuracy: 0.8924\n", 126 | "Iter: 28000. Accuracy: 0.8908\n" 127 | ] 128 | } 129 | ], 130 | "source": [ 131 | "max_epochs = 30\n", 132 | "report_size = 1000\n", 133 | "iter = 0\n", 134 | "optimizer = torch.optim.Adam(model.parameters(), lr=0.001)\n", 135 | "criterion = nn.CrossEntropyLoss()\n", 136 | "log = {}\n", 137 | "\n", 138 | "for epoch in range(max_epochs):\n", 139 | " for i, (features, labels) in enumerate(train_loader):\n", 140 | " \n", 141 | " features = Variable(features)\n", 142 | " labels = Variable(labels)\n", 143 | " \n", 144 | " optimizer.zero_grad()\n", 145 | " outputs = model(features)\n", 146 | " \n", 147 | " loss = criterion(outputs, labels)\n", 148 | " loss.backward()\n", 149 | " \n", 150 | " optimizer.step()\n", 151 | " \n", 152 | " iter += 1\n", 153 | " \n", 154 | " if iter % report_size == 0:\n", 155 | " correct = 0.\n", 156 | " total = 0.\n", 157 | " \n", 158 | " for features, labels in test_loader:\n", 159 | " features = Variable(features)\n", 160 | " result = model(features)\n", 161 | " \n", 162 | " _, predicted = torch.max(result.data, 1)\n", 163 | " \n", 164 | " total += labels.size(0)\n", 165 | " correct += (predicted == labels).sum()\n", 166 | " \n", 167 | " accuracy = correct / total\n", 168 | " log[iter] = accuracy\n", 169 | " print('Iter: {}. Accuracy: {}'.format(iter, accuracy))" 170 | ] 171 | }, 172 | { 173 | "cell_type": "code", 174 | "execution_count": 12, 175 | "metadata": { 176 | "collapsed": true 177 | }, 178 | "outputs": [], 179 | "source": [ 180 | "%matplotlib inline \n", 181 | "import matplotlib\n", 182 | "import matplotlib.pyplot as plt\n", 183 | "import numpy as np\n", 184 | "matplotlib.rcParams.update({'font.size': 14})\n", 185 | "figsize = (8, 5)\n", 186 | " \n", 187 | " \n", 188 | "def plot(test_logs, size = figsize):\n", 189 | " \n", 190 | " plt.figure(1, figsize=size)\n", 191 | " \n", 192 | " lists = sorted(test_logs.items()) \n", 193 | " x, y = zip(*lists) \n", 194 | " plt.plot(x, y, label = 'Testing')\n", 195 | " \n", 196 | " plt.ylabel('Accuracy ')\n", 197 | " plt.xlabel('Number of Iterations')\n", 198 | " plt.title('Test Accuracy VS. Number of Iterations')" 199 | ] 200 | }, 201 | { 202 | "cell_type": "code", 203 | "execution_count": 13, 204 | "metadata": { 205 | "collapsed": false 206 | }, 207 | "outputs": [ 208 | { 209 | "data": { 210 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAggAAAFZCAYAAAD9xtesAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4wLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvpW3flQAAIABJREFUeJzs3Xd4VGXax/HvTUgIAUJvoRcRgwgI\nWAC7qOtKsYFiXxX7Wl677rqrrq5l7brYUexdwYJ9RZTeq0DoNaEnIaQ97x/nBMdhEiYhmZkkv891\nzZXMOc85c5+p93naMeccIiIiIoFqRDsAERERiT1KEERERGQvShBERERkL0oQREREZC9KEERERGQv\nShBERERkL0oQRKTaMbNTzMyZ2dBoxxIuM7vFzJabWYGZTYp2POXNzDaY2ahoxyG/U4JQTfhfhuHc\nLi7nx002s3+Y2YAybNvAzHL8uHqVZ1xVmZnd6T9nx5VQZoRf5sKAZYPN7Ecz22hmu8xshZl9bGZn\nlzGOK/3H2GVmLUOs/9bMFpVl39WNmZ0IPAz8DFwC3FNC2X/7z3uDgGUXmtm1FR9pyfz32N3RjkPC\nUzPaAUjEXBB0fyRwBPCXoOW/lPPjJuN9meXgfbmVxjDAARuA84GZ5RtalfUmcD8wAvihmDLnAdnA\nR+AlFcC/gP8BDwGZQCfgGLz3yPv7EU8icBtww37so7o7Fu+zcLlzLqcM218ItAaeKc+gymAw3mf5\n/hDr2gEFkQ1HSqIEoZpwzr0ReN8/IzkseHmMOR/4AlgBnGtmtzjnCqMbUmhmluScy452HADOuZVm\n9jNwppld45zLDVxvZk2Bk4D3nXOZZpYI3AV85Zz7U/D+zKz5foY0C7jCzB5yzq3fz31VKmZWxzmX\nVQ67agbkljE5qDDl+b53zu0uj/1I+VETg4RkZjXM7CYzm+9X8280sxfMrGFQucPMbLyZZfhVySvN\n7DUzizezrsBqv+iDAc0Y+2xnNLN2wADgHeBtoCVwfDFlDzKz98ws3Y9hsZk9HFSmtZm9bGbrzGy3\n35Y7ysxq++v/bWZ7ffkGVJO3CFi2wcw+8duxp/nb/dVfd4aZfW5ma80s16+mf9DMEkoTt5kN8h/3\n1BDbDdtXEwLwBtAQ2OsHH69mpqZfBqAFkEQxNTzOuY0lPE44/gXEA7eXVMjMuvrHdU7Q8kR/+e0B\ny4qq0Tub2TtmtsPMNpnZP/z17cxsrJntNLP1ZnZ9MQ9b08we8F/TbP+16xAitlQz+9DMtvifh2lm\nNiSoTNF7ZYCZPWdmm4CMfRxzvJndY2Zp/vtyhR9PrcBjBy4HagV8hs4pab9BjzEJOAE4MGD7nID1\n4X7W9+t9b2bvAJcGHceez5aF6INgZs3N7BX/tc0xs9lmdlFQmaL3zQ1mdo3/XOaY2a9m1jOobIqZ\nvWpma/zne4OZjTOzLuE+n9WJahCkOK/gVVG/CjwFtAeuA3qb2ZHOuVwzSwG+BtYBDwLb8aoJhwC1\n/OV/9bd/Fxjn7/u3MB7/PCALGOec22VmS/FqFL4NLGRe34T/AbnAC3i1DR2BocCtfpk2wGSgvl9m\nIdAKONNftivsZ+V3qXiJyyh/n2n+8svwquefAHYA/f04WgIXlyLuL4FN/F6LEuh8YBXwYwnxvY/3\nvJ8HfBq07jwgHe+1A+91ygMGm9lzzrmtJR556S0FxgAjzezf5VyL8AEwD68JYwhwj5ltAa4FvvGX\nXwg8YWZTnXPBTWh/9/8+BDQBrgd+MLNDnHM7AMzsEGACXrL7b7z35dnAx2Y23DkX3PzyPN7zey9e\nE1tJXsV7Pd4D/gMcBtwBHAScjvf+uADvh7U/vzcJ/rqP/Qa6x993I/zPBH+syt/nZz2g7P68758B\nmgcdB8C2UEGbWR28z0gHf9sVwFnAaDNr6Jx7ImiTi/Cas54D4vzH/9DMujjnio73U7yms2eAlXg1\nM8f7y8L5XqpenHO6VcMbMBrIKWbd8XjtncOClp/gL7/Qvz/cv39wCY/T2i9zeynjmw+8EXD/Xrwv\nntpB5SbhfcG0DlpuAf+/A+QDvUI8jvl//x3q+QCu9ONvEbBsg7/spBDlk0IsuxfvC7l5KeN+Au/H\nqG7AssZ4PxoPhPEcfoTXz6BewLKOfuxPB5V90F+eiZec3A30DYynDO+xoueuJ94XcB7wZMD6b4FF\nAfe7+uXPCdpPYvB7yH+9HPBswLJ4YCNQCPxfwPImwG7gpYBlp/jbrw56fk71l98dsOwnvP4vCYGv\nk788LcTxTgDiwnh+DvPL/zdoedGxnRiw7KVQ789i9lu0fYPinuuA5WF91svxfV/scfj7HxVw/1b/\n8YYHLKvpP7+ZQP2g980GIDmgbFEfphP9+839+9eW9T1d3W5qYpBQhgFbgO/NrEnRDZiNV0tQVLW9\n3f87yMzKrTbKzA7l9zOVIm8D9fA6ORWVaw0cjvfFvyZwH87/RjCzeH+bT5xze3VyLCpXBkudc18H\nL3R+e6xfbdvAf94m4DXn9Qw3bt9reFX/ZwYsG473QzgmjBjfAGrjnYkWGRGwLtCdeGd18/H6J9wH\nTAHmm1nvMB6rRM65Zf5jjvRrnsrLSwGPkQfMwPvxfjlgeQbemW7HENuPds7tDCj7BbAMOA3Ar/4+\nCq8GLDngs9AY+AroYF5zWKDn3e9nrCX5s//3saDljwatr0jhftaLlPl9XwZ/Btbi1a4UPU4+8CRQ\nB6/jZqB3nV/r45vg/y163bPwEpbjLWCEhxRPCYKE0gWvOjI9xK0+XrUceFXUnwEPAJvN7FMzu9T8\ndv39cD7ehznNvDbmznhnn8v8dUU6+X/nlbCvFLwfyZLKlEVaqIVm1sPMxuPFv5U/VuXX9/+GEzd+\nQjOXPx7z+cB059zCMGL8HK+W4ryAZefhfclPDnos55x71Tl3ONAA78zyZbyzs8+D26PL6H68M8AS\n+yKU0qqg+9uBTOdccLX1drw+GcGWhFj2G141O3ifBfBqWII/C//y1zXjj5btM2pPO7yarT+U9xOa\njIAYKlK4n/Ui+/O+L612wG8hkvii9377oOXB74WiprKGAM65TLzOuEOATWb2k5ndVs4Ja5WiPggS\nSg1gPV7bbSgZAM4bUTDEzI7Ey/ZPwjuju83MjnDObSntA5tZHHAO3hnCghBF2ppZE/9LtDwVV5MQ\nV8zyvfotmFljvH4Bm/F+BNP8cu2BFylbQv468JD/JZYIHEmYwwWdc7vN7H3gL2bWDK+5pyvwz31s\ntxNveOQPfnv+LcBAAs7kysI5t8zMxgCXm9m/QxUpZtPiXgMIPSyuuLN3Kym+YhS9Zg8R1P8lwOKg\n+2Xp0xItYX3WA0TqfV8W+3zdnXMPmdkHeEnCSXifhbvM7E/OuYkRiLFSUYIgoSwD+gETXBhDj5xz\nv+J1mrrbzE7Ha/u+BK9jVGmr8E/A69h0F3t3GqqL15FqGF5HpKIzr4NL2N86vC+rksqAd7ZRy8wS\n3R+HkgVXH5dkIN7Z9ymBZ+hmNiioXDhxF3kTr015BF5zQz5/bHrZlzfwesAP5/czrtIMbZ3i/y2v\ns6z78Trd3RFiXdEZX3D1b2leg9I6oJhlK/z/i16rXOdccQlCWa3E+w7uREBNhv+D2yQghvJQ3Oew\nVJ/1YoT7vi8pjlBWAgeYmQXVInT1/64obaCwp7nrMeAxM2uPNwz3diBUvNWamhgklHfx2rnvCl5h\nZjWLqpvNrFGIbWf4f4u+5IvGgIdbRX0+sBN41Dn3QdBtNF7b6PkAfvv9ZOAyv10/ME7zy+ThNYMM\ntRCzMRaV4/cfgqMD1iXzx+r9fSk6g9lzxuLXiNwYWCicuAPKrserqj0fr3lgvHNuUylimoD3RXsB\nXs3MFOfc0qDHTDazw4vZvmiY5aKA8p0txFDAcDjn0vBqRS5n76QjHe+1Pzpo+dVleawwXWxm9Yru\nmDestDNe8wzOudV4ye9VFmI+CPPmlCirolE9wTVCNwWtLw9ZhP4MhvVZ34ew3vcBcdQys6Qw9jsO\nr9brrKD9/tXfz49h7GMPM6tj3pwfgVbi9cFQn4QQVIMge3HOfW1mLwN/8zsMfovXB+AAvA/rzXgj\nA0aa2SXAJ3g/sHXxOrrl4c/Q55zbZmbLgPPNrOjDuNQ5Ny34cf0vjdPxJuzJDV7v+8yPq6P/Y3MN\n3lComWb2IrAc70z5dLyOjuD1hj4W+NnMnsf7sWvhH8tJeL2fx/l/Xzezok5il+HVQOw1TXAxfsJr\n637LzJ7B600/DK8PRLBw4i7yOr/XGvydUnDOOTN7i9/P2ENV7ScDk8wbLz8ery03GTgZr7f/BH5v\nTwZvvoRt/H4mV1r341VpH0RA9bwf68vA9Wa2A+/M7nhCdy4sL1vx3hej+X2Y42q84X5FrsB7beeZ\n2Ut4VejN8Zp72gPdyvLAzrmpZvYmcLVfa/AT3siGi/A61X5Xlv0WYxpec+B/gOlAnnPu/VJ81ktS\nmvd90ef+OTP7Bi+5+MSFngDqObzhnWP8BHaFH9PRwI3Oue0htilJd2Cc3+y2AH9oL94wyv+Ucl/V\nQ7SHUegWnRslDHP01xveF+N0vKFyO4A5eG2xrfwyffF+uFbiTaW8EW/Mfr+gfQ0ApvplHAFDmYLK\njSBoaFWIMr39Mn8PWNYd+Bjvy34XXgLw76Dt2uH90G7y40jD+wKqHVDmcD/O3Xg/2NdS/DDHT4qJ\nrz/eGWeWX+4p4FBCD9/bZ9x+uUS8H+TtBA3zDPO1TvUfPw9oGmJ9At7U25/4x73Lj3828DeChrD5\nx7XXkLkQ+90zzDHEupf8dYuCltfx35s7/ON9E6+mobhhjg2Ctn8H2Bbi8SYBswLuFw1zPAuvA+IG\n/33+JdApxPad/PfPBrxhpmvwksqzwzneEp6jeOAf/vOei5ecPQDUCvF87c8wx3r+c7nFX5cTsG6f\nn/Xyet/jnZQ+w+/DUfd8tgga5ugva47XrJiO97mcA1wcVKZomOMNIT43e943eB0un8Xr5Jjpv7+m\nUML3TXW/FY0BF5EY5Q/VLPpyvjTa8YhI9aA+CCKx70y8oWivRTsQEak+VIMgEqP84aPd8fodrHfO\n9Y1ySCJSjagGQSR2XY/XT6KkceoiIhVCNQgiIiKyF9UgiIiIyF6q9TwITZo0ce3bt492GCIiIhEz\nffr0DOfcPif5qtYJQvv27Zk2ba/5ekRERKosf9K6fVITg4iIiOxFCYKIiIjsRQmCiIiI7EUJgoiI\niOxFCYKIiIjsRQmCiIiI7EUJgoiIiOxFCYKIiIjsRQmCiIiI7KVaz6QoIiIVKze/kG3ZuWzOymVr\nlve3hhlHd2lCvcT4aIcnJVCCICIipVJQ6Ji7djsZO3ezJTuXLQE//nv++st35uSH3EdCzRocf2Az\nBvdM4fiuzUiMj4vwUci+KEEQEZGwOef469sz+Xzu+j8sT6hZg8Z1EmiYlEDjugm0bZREozoJIW87\nduUxbs56xs1Zz1fzN1AnIY6TurVgcI8UBhzQhPi4qtP6nZG5mxkrt9KvcxPq1qpcP7mVK1oRkQoy\nOW0zG3fuZnCPlGiHEtOe/n4pn89dzzXHdeKk1BZ7fvSTEuIws7D306d9I/52WiqT0jYzdvY6vpy3\ngY9nrqVBUjx/Orglg3ukcFiHRsTVCH+fsWTdtl288FMa70xdRU5eIXVr1eSs3q258Mh2dGxaN9rh\nhcWcc9GOIWr69OnjdDVHEflt406GPjuRnLwCxl13FKkpydEOKSaNn7+BK8ZM5/RerXhsWI9SJQT7\nkptfyE+/pTN2zjq+WbCR7NwCmifX4s/dUxjcM4UereuX6+NVlBUZWfz3x2V8NHMNzsHQXq04tXsL\nPpu1js/nrievwHFMl6Zc3K89x3RpSo0oJEBmNt0512ef5ZQgKEEQqc525OQx9JmJ7NydT35BIQc0\nr8e7I4+oFD9GkbR4w07OeG4inZvV5d0rjqzQPgPZufl8t3ATY2ev48fF6eQWFNK2URKDerTk9F6t\n6NysXoU9dlkt3rCTZ39Yyrg566gZV4Nz+rZh5NEdad0waU+ZTTtzeHvyat6cvJJNO3fTvnESFxzZ\nnrP7tCY5gh02lSCEQQmCSPXmnOOKMdP5btEm3r78CJZuyuTOj+fy9Lm9GKSmhj22ZuUy+Nmf2Z1X\nyGfXDqBF/cSIPfb2XXmMn7+BsbPXMXFpBoUOTu/ViltOPpCUBrUjFkdxZq3exrM/LOWbBRupkxDH\n+Ue049KjOtCsXvHPUW5+IV/OW89rv6xgxqptJCXEccahrbjoyPYc0Lzikx8lCGFQgiBSvf33x2U8\n9NUi/nZaKpcO6EBBoWPIsz+zOTOX7/7vGJIS1E0rr6CQi16ZwrQVW3nniiM4tG3DqMWSkbmbl39e\nzss/L8eAy4/qyJXHdop45z/nHJPStvDsD0v5eWkG9WvHc3G/9lzSvz0NkhJKta+5a7Yz+pcVjJ29\njtyCQgZ0bsJF/dpzfNdmFdb/QglCGJQgiFRfE5dmcMHLkzm1e0uePrfXniaFaSu2cNaoX7n2uM7c\nfPKBUY4y+v7x2XxG/7KCR8/uwVm9W0c7HADWbM3mkfGL+XTWOprUrcVNA7swrE9ralbw6AfnHD8u\nTueZH5YyfeVWmtStxeVHdeC8I9rtd5KyOXM370xdzZhfV7JhRw5tGtXmgiPaMbxPW+onlW/zQ8wm\nCGZ2NXAL0BKYD9zgnJtQQvkRwK1AF2AH8C1ws3Nug78+HrgDuAhoBSwGbnPOfbWvWJQgiFRPa7ft\nYtDTP9OkbgIfX92fOkFf7je+O4vP56znm5uOpl3jOlGKMvrem7qaWz+cw6UDOvC301KjHc5eZq3e\nxr8+X8DUFVvp0rwud/05lWO6NC33x8nNL+SbBRt59oelLFi/g1YNanPFMR0Z1qdNuffFyCso5Ov5\nG3ntlxVMWbGFxPgavHV5+dbcxGSCYGbDgTeAq4Gf/b+XAKnOuVUhyvcHfgJuBj4BmgPPAVudcyf4\nZR4CLgQuAxYCJwOPAf2cczNLikcJgkj1szu/gGGjfmVZehafXds/5JCzjTtyOO7RH+nfuQkvXrjP\n79EqafrKLZzzwiSO6NiYVy/uW+Fn52XlnOOreRv491eLWLk5m6O7NOWuUw/iwBb715afvnM3Pyze\nxA+LNjFhSQaZu/Pp2KQOVx7bidN7tYrIXA3z123n/WlruOPUrtSqWX6JSKwmCJOBOc65ywOWLQE+\ncM7dEaL8zcB1zrl2AcsuAZ52ztX1768DHnLOPRlQ5kNgl3Pu/JLiUYIgUv3c+fFc3pq8ilHn9+aU\ng1sUW66of8JrfzmsQs5KY9m6bbsY/MxE6tSK49Nr+pe6XT0acvMLef3XFTz13RIyd+czvG8bbhzY\npcTOgoEK/dkhv1+0iR8Wb2LOmu0ANE+uxfFdm3HiQc059sCK6xcQSeEmCBHr2WFmCUBv4NGgVV8D\n/YrZbCLwgJkNAsYBjYFzgC8CytQCcoK22wUM2N+YRaRqeW/aat6avIqrju1UYnIA8JcB7Xl36ir+\nOXY+X11/NAk1Y/MMurzl5BVwxZjp5OQV8Pblh1eK5AC8mRwvO6ojZx7amqe+X8KYX1fy2ax1XHVs\nJy47qmPIpoCdOXn8vCTDTwrSycjcjRn0atOAm0/qwnFdm5HaMrnaDnmNWA2CmaUAa4FjnHM/BSz/\nO3Cecy5kbyAzOwMYDdTGS2i+AYY453b5698CegFDgSXACcCnQJxzrlaI/Y0ERgK0bdu298qVK8vr\nEEUkhs1bu50z/vsLfds35LVLDguryvz7RRv5y+hp3P3ng7jsqI4RiDK6nHNc/84sxs5Zx4sX9OHE\n1ObRDqnMlmdk8e8vFzJ+/kZa1k/k1lMOZEiPVizfnMUPizbx/aJNTF2xhbwCR3JiTY7u0pTjuzbj\nmC5NaVx3r5+OKiXmmhjKkiCYWSpeQvAEMB6vY+MjwCzn3IV+mabAi8AgwAHL8Doy/sU5V+IgWTUx\niBTvpQlpbM3O5YYTu1T6ufG3Zedy2tM/U1DoGHfdgFL9AFzy6hSmrtjK9zcfE3Z1dWVV1Kxyy8kH\ncs1xnaMdTrmYnLaZ+z9fyNy120lOrMkO/+JRXZrX5biuzTj+wGb0btcwZvtYVISYa2IAMoACvI6G\ngZoDG4rZ5g5ginPuEf/+HDPLAiaY2Z3OuTXOuXRgqJkl4jVBrAP+DaSV+xGIVBNbs3J5+KvF5BYU\nMnX5Vp4971Ca1qucZ1UFhd5Z8aYdu3nvyiNLfXb4t9NSOfmJn3j4q8U8enaPCooy+r5ftJGHxy/i\ntENacvWxnaIdTrk5vGNjPr2mP5/OXsuPi9Pp064hx3Vt9ocZDiW0iKVMzrlcYDowMGjVQOCXYjZL\nwksqAhXd/0Pszrkc59xavKTnTLxmBhEpgw9nrCG3oJAbTjyAOWu3Mejpn5m1elu0wyqTJ79bwv9+\nS+cfg7vRs02DUm/fsWldLh3QkQ+mr2Hmqq0VEGH0Ld2UyfVvzyK1ZTKPnFW+11iIBTVqGKf3as2T\n5/TigiPbKzkIU6TrVB4DLjazy8zsIDN7EkgBRgGY2etm9npA+bHAEDO7ysw6+sMenwJmFA2LNLPD\nzewMf/1RwFd4x/VwJA9MpKpwzvHWlFUc2rYBN5zYhQ+v6kdcDWPY87/y3rTV0Q6vVL5buJGnvlvC\n2b1bc+5hbcq8n2uP70yzerX4x2fzKSyMjcnltmfn8d7U1TwyfhGfzlrL0k07KShDbNt35THy9Wkk\n1KzBCxf2oXZCxV1jQSqXiM5P6Zx718waA3fj9SeYB5zqnCvqKdg2qPxoM6sHXAv8B9gOfA/cFlAs\nEbgf6Ahk4o1wuMA5VzlPd0SibMryLaSlZ/HIWYcA0C2lPmOvG8B1b8/g1g/mMHfNdv52WmrM9+pf\nuTmLG9+dRbeUZO4bevB+nRXXrVWTO07tyo3vzuaDGWsY1qfsycb+yM7N59uFm/hs1jr+99sm8goc\nZlDUlSwxvgYHtkgmtWU9Ulsmk5qSTNcWyXtNBFWkoNDx17dnsnprNm9edgStYuDaBhI7NNWyOimK\n/MEN78zku0WbmHLniX84m8wvKOTh8Yt54ac0+rZvyHPn9S7XfgmFhY6vF2zgv/9LI2Pnbnq3a8hh\nHRpxWIdGdG5at1SXxd2VW8AZ//2Fddt2Me66AbRptP9Vys45zvzvL6zaks33Nx8bsavv7c4v4Kff\nMvhs9jq+XbCRXXneZZBPOySFwT1SOKhlMsvSM1mwbgcL1u9gwbodLNywg23ZeQCYQbtGSaSmJO9J\nGlJb1qd5ci0e/HIRL/yUxgOnd2fE4W33EYlUFTE3iiEWKUEQ+aOtWbkc/uB3nNO3DfcOOThkmU9n\nreW2D+fQoHYCoy7oXaZ2/UD5BYWMnbOO535YxpJNmbRvnES3lPpMXbGFTTt3A9AgKZ6+7RtxWPtG\n9O3QiG4pycWOrHDO8X/vzebjWWt55eK+HHdgs/2KL9DcNdsZ/OzPXNq/A3dX4NTD+QWFTErbwmez\n1/LVvA3syMmnYVI8f+reksE9UujbvlGJE/Y451i/PcdLFtb7icP6HazcnL2nTIOkeLZl53HBEe24\nb2jo11qqplgcxSAiMe7DGWvIzS/k3MOKP5sc0rMVnZvV5Yox0xk26lfuH3oww/qWvsp9d34BH0xf\nw6j/LWP1ll10bVGPp87txZ+7tySuhuGcY9WWbKYs38KU5VuYumIL3yzYCEBSQhyHtvVqGPq2b0Sv\ntg32TITzxqSVfDRzLTee2KVckwOA7q3rc07fNoz+ZQXnHNaGzs3K79K8hYWOmau38tmsdXw+dz0Z\nmbnUrVWTk1KbM6hnCgM6Nwl7uKmZkdKgNikNav9hLoOdOXks3rBzT01DvcSa3HpK13I7BqlaVIOg\nGgQRwDvrPPGx/5FcO56Pr+6/z/Jbs3K57u2Z/Lw0gwuOaBd2v4Ts3HzemryKFyeksXHHbnq0acC1\nx3XmhK7N9tmMsGlHDlNWbGHq8i1MXr6FxRt34hzExxmHtG5A91b1eXPySo46oCkvXdinVM0S4dqc\nuZtjH/2RHq0bMObSw/a7x//STTt5f/oaxs1ez9ptu0ioWYMTujZjcI8UjuvarNwvBiSiGgQRKZWp\nK7ayLD2Lh/3OifvSsE4Coy/pyyPjF/P8T2ks2rCjxH4J23fl8fovK3hl4nK2ZudxZMfGPDasJ/06\nNQ77R7ZZciKnHZLCaYekePvMzmP6Ki9ZmLp8C29MWknrhrV5fFjPCkkOABr7lxf+59gFfL1gIyd3\nK3nK5lAKCh3fL9rEa7+s4OelGdSsYQw4oAn/d1IXBqY2p16E+jeIlEQ1CKpBEAG8Sxx/u2Ajk+86\ngaSE0p07fDZ7Hbd+MJsGtRP47/mH0ivg0rQZmbt5+efljPl1JZm78zm+azOuOa4zvduV3+Vri+zK\n9aZJqeihevkFhZz61ASycwv49qZjwj7L356dx7vTVjFm0kpWb9lFy/qJnH9EO4b3bUOTKj69r8QO\n1SCISNi2Zefy+dz1DO/TptTJAcDgHil0blqXK96YxvDnJ3H/0IM5qksTnv9fGu9MXcXu/EJOPbgl\nVx/XiW4p9SvgCDyRGsNfM64G/xjUjREvTebFn9K47oQDSiy/aMMOXvtlJR/PXENOXiGHdWjEHX86\niJNSm1erKX6lclGCICJ8OGPtPjsn7ktqSjKfXTOAv74zk1s/nENcDcOAob1acdWxnejUtG75BRwD\n+nVuwqndW/Dsj0s5s3drUoLmEMgvKOTbhRsZ/csKJqVtoVbNGgzt2YqL+rUnNSU5SlGLhE8Jgkg1\n55zj7Smr6NmmwX7/cDWsk8CrF/flmR+Wsi07j8uO6lClp7W989SD+G7hJh74YiHPjDgU8DpvvjN1\nNW9MWsnabbto1aA2t/+pK8P7tKFhncpx6WQRUIIgUu1NW7mVpZsyefjM8Don7kvNuBrccGKXctlX\nrGvdMImrju3EE98uoX/nVcxctZVPZ61jd34h/To15u+DUjnxoOYlzlkgEquUIIhUc29PXkXdWjU5\nrUfLaIdSKV15TCfen7aGOz6aS+34OM7q3ZqL+rWnS/PymyNBJBqUIIhUY9uycxk3dz3D+rQuU+dE\ngcT4OEad35tZa7Yx+JAU6ic0qTO4AAAgAElEQVRpiKJUDfpGEKnGPvI7J444rF20Q6nUureuT/fW\nFTc6QyQaNL5GpJoq6pzYoxw6J4pI1aMEQaSamr5yK0s2ZTLisOhculhEYpsSBJFq6q0pfudEf9pi\nEZFAShBEqqHt2Xl8Pmc9Q3qmUKeWuiKJyN6UIIhUQx/NXMPu/EJGHF72mRNFpGpTgiBSzezpnNi6\nfoVeF0FEKjclCCLVzIxVW/ltY+Z+XXdBRKo+JQgi1cxbk1dTJyGOQT3UOVFEiqcEQaQa2Z6dx7g5\n6xjSq5U6J4pIiZQgiFQjHxd1TlTzgojsgxIEkWrC65y4mkNa1+fgVuqcKCIlU4IgEsMmp23m7FG/\n8JfRU1m6aed+7WvGqm0s3rhTnRNFJCxqhBSJQeu27eLBLxcxdvY6WtZPJHP3Tk55YgIX9WvP9Sce\nQHJi6a8Y+PaUVeqcKCJhU4IgEkNy8gp4aUIaz/6wjALn+OsJB3DVMZ3Izs3n0a8X88rE5Xw6ay23\nntyVs3q3pkYNC2u/23d5nRPPOLQ1ddU5UUTCoG8KkRjgnOPbhZu4b9wCVm3J5pRuLbjrzwfRplES\nALUT4njwjEMYcVg77vlsHrd+OIc3J6/kH4O70attw33u/5OZa8nJU+dEEQmfEgSRKFu6KZN7xy3g\np9/S6dysLm9cejgDDmgSsmz31vX58Kp+fDJrLQ9+sYjTn/uFs3q35tZTDqRZvcSQ2xTNnNi9lTon\nikj4lCCIRMnOnDye+m4Jr05cQe2EOP5+WioXHNmO+LiS+w6bGaf3as3A1BY88/1SXv45ja/mbeD6\nEw7gon7tSaj5x+1nrt7Gog07eeD07hV5OCJSxUQ8QTCzq4FbgJbAfOAG59yEEsqPAG4FugA7gG+B\nm51zGwLKXA9cBbQDNgOfArc55zIr6jikcvpy7nqWbMqkYZ0EGtdJoGFSAo3ren8bJsVTcx8/zuWh\nsNDx4Yw1PPTVYjZn7WZ4nzbcfPKBNKlbq1T7qVurJrf/qSvD+7bhvnEL+NcXC3l76iruGdSNY7o0\n3VPu7cle58TBPdU5UUTCF9EEwcyGA08CVwM/+3+/NLNU59yqEOX7A2OAm4FPgObAc8CbwAl+mRHA\nw8BlwASgI/AykAhcWsGHJJXIxKUZXPXmjBLL1K8dT6M6CTQqSh7qJNCwTgKN6sTTqE6t3/8mJdCo\nbgJ1EuIwC6+jIMCs1du457P5zF69jV5tG/DKxX04pHWD/TquDk3q8MrFffl+0UbuHbuAi16ZwokH\nNefvp6VSPymesXPWcXovdU4UkdIx51zkHsxsMjDHOXd5wLIlwAfOuTtClL8ZuM451y5g2SXA0865\nuv79Z4DuzrljAsr8EzjTOXdwSfH06dPHTZs2bX8PSyqBXbkFnPzET9Qw+Oy6AeTkFrAlO5ctmbne\n36w/3rZm57I50/u7JSuXvILQn5OEmjVolJTwe41EQM1EUTLRsE489WrFM2bSCt6btoam9Wpx+yld\nOb1Xq7BHIYRrd34Br05cwdPfLSGv0NG7bUN+TdvM2GsH0L21+h+ICJjZdOdcn32Vi9gphZklAL2B\nR4NWfQ30K2azicADZjYIGAc0Bs4Bvggo8zNwgZkd4ZybZGZtgcFBZaSae+ybxazaks07I48gOTGe\n5MR4miWH7tQXzDlH5u58tmTlsjkrl62ByYSfZGzN9tat2ZrN5qxcdubk77Wf+DjjiqM7cu3xnalX\nhnkMwlGrZhxXHtOJ03u14qEvF/HRzLUc3CpZyYGIlFok6xybAHHAxqDlG4ETQ23gnPvVzM7Ba1Ko\njRfvN8BFAWXeMbPGwE/m1fXWxGuWuC3UPs1sJDASoG1bDfmqDmat3sbLPy9nxOFtOaJj41Jvb2bU\nS4ynXmI87RrXCWubvIJCL5EIqKXo3qp+2Nvvr+bJiTw2vCeXHdWR+kkVk4yISNUW042SZpYKPA3c\nB4zH69j4CPA8cKFf5hjgb3j9GSYDnfH6OfwT+HvwPp1zLwAvgNfEUOEHIVGVm1/IbR/MoVm9RG7/\nU9eIPW58XA2aJSeGXUtRUVJTkqP6+CJSeUUyQcgACvA6GgZqDmzYuzgAdwBTnHOP+PfnmFkWMMHM\n7nTOrQHuB952zr3kl5lrZnWAl8zsXufc3nW9Um2M+t8yFm/cyUsX9inT9MQiItVVxC7W5JzLBaYD\nA4NWDQR+KWazJLykIlDR/Rr7KFO+vb+k0lmycSdPf7+EQT1SODE1OC8VEZGSRLqJ4TFgjJlNweuA\neCWQAowCMLPXAZxzF/rlxwIvmtlV/N7E8AQwI2BY5FjgJjObxu9NDPcB41R7UH0VFDpu/XAOdWvV\n5J5BqdEOR0Sk0ologuCce9fvUHg33o/9POBU59xKv0jboPKjzawecC3wH2A78D1/7IB4P+DwkoLW\neE0ZY4G7KvBQJMa9/usKZq7axhPDe5Z6AiIREYnwPAixRvMgVE2rt2Rz0uM/cXjHRrx6cd9STWQk\nIlLVhTsPQsT6IIhEgnOOOz+eSw2Df53eXcmBiEgZKUGQKuXDGWuZsCSD2//UlVYNakc7HBGRSksJ\nglQZm3bmcN+4BfRt35DzDm+37w1ERKRYShCkyvjHZ/PZlVfAv888pNyvcSAiUt0oQZAq4at5G/hi\n7gauP+EAOjWtG+1wREQqPSUIUultz87jb5/OI7VlMiOP7hjtcEREqoSYvhaDSDge+GIhW7JyefXi\nvsTHKecVESkP+jaVSm3i0gzenbaay4/qyMGtdEljEZHyogRBKq3s3Hzu+GguHZrU4YYTD4h2OCIi\nVYqaGKTSeuzr31i1JZt3Rh5BYnxctMMREalSVIMgldKs1dt4ZeJyzju8LUd0bBztcEREqhwlCFLp\n5OYXctsHc2hWL5Hb/9Q12uGIiFRJamKQSmX7rjye+2Epizfu5KUL+1AvMT7aIYmIVElKECRm5eYX\nsmjDDmat3rbnlpaeBcDQnimcmNo8yhGKiFRdShAkJjjnWLUl+w/JwPx1O8jNLwSgSd0EerZpwBm9\nWtGjTQOOVL8DEZEKpQRBomL7rjxmrtq6JxmYvXobW7PzAEiMr0H3VvW56Mh29GzTkB5t6tOqQW1d\nullEJIKUIEjEjZ+/gf97bzaZu/Mxgy7N6jEwtfmeZODA5vWoqRkRRUSiSgmCRExhoeOJb3/jqe+X\n0qN1fW47pSuHtGlA3Vp6G4qIxBp9M0tEbN+Vx03vzuK7RZs4u3dr7ht6sCY3EhGJYUoQpMIt2biT\nkWOms3pLNvcN6cb5R7RTfwIRkRinBEEq1Pj5G7jp3VnUTqjJ2yOPoG/7RtEOSUREwqAEQSpEYaHj\n8W9/4+nvl9KjTQNGnX8oLevXjnZYIiISJiUIUu6278rjhndm8sPidIb1ac29Q9TfQESkslGCIOXq\nD/0Nhh7M+Ye3VX8DEZFKSAmClJuv5q3n/96brf4GIiJVgBIE2W8FhY7Hv/mNZ35YSs82DRh1fm9a\n1E+MdlgiIrIflCDIfgnsbzC8TxvuHdqNWjXV30BEpLJTgiBl9tvGnYx8fRprt+3i/qEHc576G4iI\nVBlKEKRMlqVncuZzv1ArPo63Lz+CPupvICJSpUT8ijhmdrWZLTezHDObbmZH7aP8CDObZWbZZrbB\nzN4wsxYB6380MxfiNr/ij6Z6ytqdzxVjphNfswafXNNPyYGISBUU0QTBzIYDTwIPAL2AX4Avzaxt\nMeX7A2OA14BuwFAgFXgzoNgZQMuAW3tgJ/BehRxENeec49YP55CWnsnT5/aidcOkaIckIiIVINI1\nCDcBo51zLzrnFjrnrgPWA1cVU/5IYI1z7nHn3HLn3CTgaeDwogLOuS3OuQ1FN2AAkAS8UrGHUj29\n/PNyPp+znltO7kr/zk2iHY6IiFSQiCUIZpYA9Aa+Dlr1NdCvmM0mAi3NbJB5mgDnAF+U8FCXA185\n51bvb8zyR5PTNvPgl4s4uVtzrjymY7TDERGRChTJGoQmQBywMWj5RqDF3sXBOfcrXkLwJpALpAMG\nXBSqvJl1AY4BXiwuCDMbaWbTzGxaenp6aY+h2tq4I4dr3ppJu0ZJPHp2D41WEBGp4iLeSbE0zCwV\nr0nhPrzah1Pwkonni9nkcrwmi8+L26dz7gXnXB/nXJ+mTZuWc8RVU25+IVe/OYPs3HxGXdCbeonx\n0Q5JREQqWCSHOWYABUDzoOXNgQ3FbHMHMMU594h/f46ZZQETzOxO59yaooJ+E8ZFwIvOufzyDb16\ne+CLhUxfuZWnz+1Fl+b1oh2OiIhEQFg1CGb2hJkdvD8P5JzLBaYDA4NWDcQbzRBKEl5SEajofnDs\nQ/GaMV7ejzAlyCcz1zL6lxVcOqADg3qkRDscERGJkHCbGPoCs81sit+GX9bTyMeAi83sMjM7yMye\nBFKAUQBm9rqZvR5QfiwwxMyuMrOO/rDHp4AZzrlVQfseCXznnEsrY2wSZOH6Hdz+0RwOa9+I2//U\nNdrhiIhIBIWVIDjn+uPNP/ADcA+w3v8xP6Y0D+acexe4AbgbmIU3JPFU59xKv0hb/1ZUfjTe0Mhr\ngXnAB8BvwJDA/ZpZR+B4SuicKKWzfVceV70xneTEeJ45rxfxcTHdXUVERMqZOedKt4FZDeBU4C/A\nn4FVeNX6LzjntpR7hBWoT58+btq0adEOI+YUFjpGjpnGj4vTddlmEZEqxsymO+f67KtcWU4L44Fk\noD7esMVVwAXAKjMbUYb9SYz57/+W8e3CTdz154OUHIiIVFNhJwhm1sfMnsMbRvgwMAk4wDl3gnOu\nG3AL8HjFhCmR8tNv6Tz69WKG9Ezh4n7tox2OiIhESVjDHM1sLnAgMB64GPjcORc8uuB94NlyjU4i\nas3WbK5/ZyZdmtXjwTO6azIkEZFqLNx5EN4DXnHOrS2ugHMugxifeEmKl5NXwFVvzCC/wDHqgt4k\nJehK4CIi1Vm4vwIPEeLH38wSgUJ/jgOpxP7x2Xzmrt3OCxf0pkOTOtEOR0REoizcM/73gatDLL8S\nXVa50ntnyirembqaa47rxEndQl4WQ0REqplwE4T+7H0VRoBvKP5KjFIJzFmzjb9/Np8BnZtw08AD\nox2OiIjEiHAThCQg1PUNCgFNzl9Jbc3K5ao3ZtC0bi2eOrcXcTXUKVFERDzhJghzgHNDLB+BN8Oh\nVDKFhY4b35tF+s7dPHfeoTSqkxDtkEREJIaE20nxXuBTM+sMfO8vOwE4Gzi9IgKTivXcj0v5cXE6\n9w09mB5tGkQ7HBERiTHhXovhC2AQ0A7vYklP4V0zYbBzblzFhScVYeLSDB775jeG9Ezh/MPb7nsD\nERGpdsIe7O6c+wr4qgJjkQjYsD2H69+ZScemdXngdE2GJCIioWk2nGokr6CQ696eQXZuAe+MPJQ6\ntfTyi4hIaGE1MZhZgpn908x+M7McMysIvFV0kFI+Hhm/mKkrtvLgGd3p3EyDT0REpHjhjmK4D7gI\n+A/e0MZb8K67sJnQEyhJjPlq3gZe+CmNC45ox5CeraIdjoiIxLhwE4RhwJXOueeBAuBT59xfgXuA\ngRUVnJSPlZuzuOX92fRoXZ+7Tzso2uGIiEglEG6C0BxY4P+fCRSNi/sKOKm8g5Lyk5NXwJVvzKBG\nDePZ8w6lVs24aIckIiKVQLgJwiogxf9/KXCy//+RwK7yDkrKzz2fzmfh+h08MbwnrRsmRTscERGp\nJMJNED7GmxgJ4Engn2a2HBgNvFQBcUk5eG/aat6dtpprj+vMcV2bRTscERGpRMIa5+acuyPg/w/M\nbDXeBZx+00RJsWnBuh387ZN59OvUmBsHdol2OCIiUsnsM0Ews3jgDeBO59wyAOfcZGByBccmZbQj\nJ4+r35xO/drxPHmOLsIkIiKlt88mBudcHl5HRFfx4cj+cs5x2wdzWL11F8+MOJSm9WpFOyQREamE\nwu2D8BFwRkUGIuXjlYkr+HLeBm475UAO69Ao2uGIiEglFe5cu6uAu83sKGAakBW40jn3WHkHJqU3\nfeUWHvxiISelNufyozpGOxwREanEwk0QLga2Aof4t0AOUIIQZZszd3PNmzNp1bA2j5zdQxdhEhGR\n/RLuKIYOFR2IlF1BoeOGd2exJTuXj6/uR/3a8dEOSUREKrlw+yBIDHvquyVMWJLBfUO60S2lfrTD\nERGRKiCsGgQze6qk9f51GSQKJqdt5qnvl3Dmoa0Z1qdNtMMREZEqItw+CN2D7scDXYE4YGa5RiRh\ny9ydz80fzKZtoyTuHdJN/Q5ERKTchNXE4Jw7Lug2AGgNfAG8V5oHNLOrzWy5meWY2XR/ZERJ5UeY\n2SwzyzazDWb2hpm1CCqTbGZPmdk6M9ttZkvNbFhp4qqM/vX5QtZs3cWjZ/egTq1wcz0REZF9K3Mf\nBOdcDvAAcFe425jZcLxrOTwA9AJ+Ab40s7bFlO8PjAFeA7oBQ4FU4M2AMvHAN8ABeJelPhBv1MXy\n0h5TZfLj4k28PWUVI4/qSN/2mu9ARETK1/6edjYB6pai/E3AaOfci/7968zsFOAq4I4Q5Y8E1jjn\nHvfvLzezp4GnA8pcAjQFjnLO5frLVpQipkpne3Yet304hy7N6+o6CyIiUiHC7aR4U/AioCVwHl4z\nQzj7SAB6A48Grfoa6FfMZhOBB8xsEDAOaAycE/SYQ/1yT5vZEGALXrPHv/xpoqucv382j82Zubx8\nUV8S4+OiHY6IiFRB4dYgXBd0vxBIB14FHgxzH03wOjVuDFq+ETgx1AbOuV/N7By8JoXafrzfABcF\nFOsIHA+8BfwZaA88i1ezcXPwPs1sJDASoG3bkC0bMe2Luev5dNY6bjyxCwe30pBGERGpGDE9UZKZ\npeI1J9wHjMertXgEeB640C9WA9gEXO6cKwCmm1lj4HEzu8U594eLTDnnXgBeAOjTp0+lugBV+s7d\n3PXxXA5pXZ+rj+sU7XBERKQKC7eJIQGo4XdMDFyeCBQGtP2XJAMoAJoHLW8ObChmmzuAKc65R/z7\nc8wsC5hgZnc659YA64E8PzkoshBIwqu1SA8jtpjnnOOOj+aSlVvAf87uQXyc5rgSEZGKE+6vzPvA\n1SGWX0mYwxz9JGI6MDBo1UC80QyhJOElFYGK7hfFPhHobGaBx9IFyMZLSqqED6av4duFG7n15AM5\noHm9aIcjIiJVXLgJQn+8zoTBvqH4DoahPAZcbGaXmdlBZvYkkAKMAjCz183s9YDyY4EhZnaVmXX0\nhz0+Bcxwzq3yy/wXaAQ8aWYHmtnJwD+B54KbFyqrtdt2ce/YBRzWoRF/6a/LYoiISMULt5NiEpAf\nYnkhEPbprHPuXb9/wN14/QnmAac651b6RdoGlR9tZvWAa4H/ANuB74HbAsqsNrOT8JKPWXjNFa8A\n94cbVywrLHTc+sFsCpzj0bN6UKOGZksUEZGKF26CMAc4F7gnaPkIvB/5sDnnngOeK2bdsSGWBc97\nEGq7SZSuJqPSGDNpJROXbuaB07vTtnFStMMREZFqItwE4V7gUzPrjHcGD3ACcDZwekUEJrA8I4sH\nv1zIMV2acu5huhCTiIhETrjXYvgCGAS0w+sD8BRec8Bg59y4iguv+ioodPzfe7NIiKvBQ2ceogsx\niYhIRIU91bJz7ivgqwqMRQI8/9MyZqzaxpPn9KRF/cRohyMiItVMWDUIZnaMmR1TzPKjyz+s6m3h\n+h08/s1vnNq9BYN7pEQ7HBERqYbCHeb4ONAwxPJkf52Uk9z8Qm56bzb1a8dz35CD1bQgIiJREW4T\nw4HA7BDL5/nrpJw89d0SFq7fwYsX9qFx3VrRDkdERKqpcGsQduHNWxCsFRDONMsShpmrtvLcj0s5\nq3drBqYGz0gtIiISOeEmCOOBh8xsTzODmTXCu5Lj+IoIrLrZlVvA/703mxbJifx9UGq0wxERkWou\n3CaGm4GfgBVmNsdfdgjeVRSHV0Rg1c3D4xeRlpHFm5cdTnJifLTDERGRai7cyz2vN7MewHlAT3/x\na8Bbzrnsigquuti0M4dXJ67g/CPa0r9zk2iHIyIiUqp5ELKBF4OXm9mJzrlvyzWqambJxkwATj04\nVDcPERGRyAs7QQhkZq2AS4C/4M2uGFeeQVU3aelegtCxad0oRyIiIuIJt5MiZhZnZmeY2RfACrxr\nMIwCOldQbNXGsvQskhLiaJ6sYY0iIhIb9lmDYGYHApcBFwJZwFvAQOAC59yCig2vekjLyKJDkzqa\nFElERGJGiTUIZjYBmIQ3i+Iw51xH59zdEYmsGklLz1TzgoiIxJR9NTEcCbwOPO6c+18E4ql2cvIK\nWLttFx2b1Il2KCIiInvsK0Hoi9cM8bOZzTSzG82sRQTiqjZWbs7GOejYVAmCiIjEjhITBOfcTOfc\nNXjTLD8GDAZW+9v9OXBmRSmbohEMndTEICIiMSSsUQzOuRzn3Bjn3HHAQcAjwI3ABjP7siIDrOrS\nMrIA6KAmBhERiSFhD3Ms4pxb6py7HWgDDEMXa9ovy9IzaZGcSJ1aZZqSQkREpEKU+VfJOVcAfOrf\npIzS0rPU/0BERGJOqWsQpPw45/whjkoQREQktihBiKLNWbnsyMmnYxN1UBQRkdiiBCGK0tL9Doqq\nQRARkRijBCGK9gxxVA2CiIjEGCUIUZSWkUVCzRq0alg72qGIiIj8gRKEKEpLz6R94yTiaugiTSIi\nEluUIERRWkaWOiiKiEhMUoIQJXkFhazanK0hjiIiEpMiniCY2dVmttzMcsxsupkdtY/yI8xslpll\nm9kGM3sj8IJRZnaxmbkQt8SKP5qyW70lm/xCp8s8i4hITIpogmBmw4EngQeAXsAvwJdm1raY8v2B\nMcBrQDdgKJAKvBlUNBvvglJ7bs65nIo4hvJSNMRRNQgiIhKLIl2DcBMw2jn3onNuoXPuOmA9cFUx\n5Y8E1jjnHnfOLXfOTQKeBg4PKueccxsCbxV3COUjLUNDHEVEJHZFLEEwswSgN/B10KqvgX7FbDYR\naGlmg8zTBDgH+CKoXG0zW2lma8xsnJn1KtfgK0BaehaN6yRQPyk+2qGIiIjsJZI1CE2AOGBj0PKN\nQIu9i4Nz7le8hOBNvKtGpgMGXBRQbDHwF2AIcC6QA0w0swNC7dPMRprZNDOblp6eXvaj2U9p6Vm6\nxLOIiMSsmB7FYGapeE0K9+HVPpyCl0w8X1TGOferc+4159ws59wEYDiwDLgu1D6dcy845/o45/o0\nbdq0wo+hOGkZukiTiIjErjJf7rkMMoACoHnQ8uZAcX0G7gCmOOce8e/PMbMsYIKZ3emcWxO8gXOu\nwMymASFrEGLB9l15ZGTmagSDiIjErIjVIDjncoHpwMCgVQPxRjOEkoSXVAQquh8ydjMz4BC8zo8x\nqegaDB3VxCAiIjEqkjUIAI8BY8xsCl4HxCuBFGAUgJm9DuCcu9AvPxZ40cyuAsbjDWF8ApjhnFvl\nb3MPMAlYAiQDf8VLEIobGRF1yzOKhjiqBkFERGJTRBME59y7ZtYYuBvvx34ecKpzbqVfpG1Q+dFm\nVg+4FvgPsB34HrgtoFgD4AW8vgnbgZnA0c65KRV5LPsjLT2LuBpG20ZJ0Q5FREQkpEjXIOCcew54\nrph1x4ZY9jReR8Xi9ncjcGN5xRcJaRmZtG2URELNmO4jKiIi1Zh+oaIgLT1L/Q9ERCSmKUGIsMJC\nx/KMLA1xFBGRmKYEIcLWbtvF7vxCdVAUEZGYpgQhwtL8EQyaRVFERGKZEoQI2zMHgpoYREQkhilB\niLC09Czq1apJ07q1oh2KiIhIsZQgRFjRNRi8CR9FRERikxKECFuenqUOiiIiEvOUIERQdm4+67bn\naA4EERGJeUoQIkjXYBARkcpCCUIEpaUXJQiqQRARkdimBCGC0tKzMNMcCCIiEvuUIERQWkYmKfVr\nkxgfF+1QRERESqQEIYLS0nUNBhERqRyUIESIc4609EyNYBARkUpBCUKEbNq5m6zcAo1gEBGRSkEJ\nQoQs0zUYRESkElGCECG/D3FUDYKIiMQ+JQgRsjwji8T4GrRMTox2KCIiIvukBCFC0tIz6dCkLjVq\n6CJNIiIS+5QgREhahoY4iohI5aEEIQJ25xeweks2nTTEUUREKgklCBGwanM2hU4dFEVEpPJQghAB\ny/wRDLoGg4iIVBZKECIgLUNzIIiISOWiBCEC0tKzaFqvFvUS46MdioiISFiUIESArsEgIiKVjRKE\nCPCGOKqDooiIVB5KECrY1qxctmXn0Un9D0REpBKJeIJgZleb2XIzyzGz6WZ21D7KjzCzWWaWbWYb\nzOwNM2tRTNlzzcyZ2biKib701EFRREQqo4gmCGY2HHgSeADoBfwCfGlmbYsp3x8YA7wGdAOGAqnA\nmyHKdgQeASZUSPBlVDTEsWMTNTGIiEjlEekahJuA0c65F51zC51z1wHrgauKKX8ksMY597hzbrlz\nbhLwNHB4YCEziwfeBu4C0iou/NJLS88iPs5o3bB2tEMREREJW8QSBDNLAHoDXwet+hroV8xmE4GW\nZjbIPE2Ac4Avgsr9C1jhnHutPGMuD2npmbRrXIeaceruISIilUckf7WaAHHAxqDlG4GQfQqcc7/i\nJQRvArlAOmDARUVlzOwkYBhwRThBmNlIM5tmZtPS09NLewyllpaRpRkURUSk0onp01ozS8VrUrgP\nr/bhFLxk4nl/fVNgNHCRc25bOPt0zr3gnOvjnOvTtGnTCom7SH5BISs36yqOIiJS+dSM4GNlAAVA\n86DlzYENxWxzBzDFOfeIf3+OmWUBE8zsTqAz0BL4zsyKtqkBYGb5QDfn3OLyO4TSWbN1F3kFjk7q\noCgiIpVMxGoQnHO5wHRgYNCqgXijGUJJwksqAhXdrwFMBboDPQNun+GNZOgJLN/vwPeDhjiKiEhl\nFckaBIDHgDFmNgWvA+KVQAowCsDMXgdwzl3olx8LvGhmVwHj8WoLngBmOOdW+WXmBT6AmW0Dav5/\ne3cfLVdV3nH8+yMhgaQ1h9MAABFOSURBVIQQXhJIFBNeaiUgipCCkZdQIYogSwpt1RYRLVVUFJda\nUwGFIpVaUUEEKlSJDRbQWl8AMaARZEkAExcaFFYxN/IWCPf6EpKbXJKQp3/sfcnJmZncuZd5uXPz\n+6x11p05Z5999nnmzJ1nztlndkRsMb8duvpvcfSvKJqZWYdpaYIQETdJ2h04n/Rh/yBwQkQ8motM\nK5WfJ2kCcDbweWAVsBCY27pWD92y7l52Gbc9u40f0+6mmJmZDUqrzyAQEVcBV9VYdkyVeVeQOirW\nW/8ZQ21boy3v8SBNZmbWmYb1XQydrqvbgzSZmVlncoLQJKv7NvDM6ufcQdHMzDqSE4QmWd7jMRjM\nzKxzOUFoks13MPgMgpmZdR4nCE3S1b2G7QTTdx/X7qaYmZkNmhOEJlnW08teu45j7OhR7W6KmZnZ\noDlBaJJ0B4MvL5iZWWdygtAEmzZF/g0Ed1A0M7PO5AShCZ56to++DZt8BsHMzDqWE4QmWO47GMzM\nrMM5QWiC/lEc9/OvKJqZWYdygtAEXd29jB8zij0mjG13U8zMzIbECUITLOtew76Td0JSu5tiZmY2\nJE4QmqCru5d9PIqjmZl1MCcIDda34XlWrFrnDopmZtbRnCA02PKeXiLwMM9mZtbRnCA02AuDNPkS\ng5mZdTAnCA3W1Z1ucfQlBjMz62ROEBqsq6eXqRN3YNyY0e1uipmZ2ZA5QWiwrh4P0mRmZp3PCUID\nRQRd3R6kyczMOp8ThAbqWbOe1X0bfQbBzMw6nhOEBtrcQdFnEMzMrLM5QWigrh7f4mhmZiODE4QG\n6upew5jR2/GSXXZsd1PMzMxeFCcIDdTV3cs+u49n1HYepMnMzDqbE4QG8i2OZmY2UjhBaJD1Gzfx\n2B/WOkEwM7MRwQlCgzz2h7U8vyn8GwhmZjYitDxBkPR+Scsl9UlaIumoAcr/naQHJK2V9LSk6yVN\nKSz/G0mLJf1JUm8u+87m78mWpu02jls/dCR/uf8erd60mZlZw7U0QZD0VuBy4DPAa4B7gNskTatR\n/ghgPvB14EDgZOAA4BuFYr8HLgZeC7wKuA74qqQTmrQbVY0ZvR0HvmQiu40f08rNmpmZNUWrzyB8\nBJgXEddGxEMR8UHgKeB9NcrPAp6IiC9GxPKIuBe4Aji8v0BELIyI70bEwxGxLCIuB34FbPXMhJmZ\nmdXWsgRB0hjgUOD20qLbgdfVWO1nwFRJJymZBLwN+EGNbUjSscArgJ82puVmZmbbnlaeQZgEjAJW\nluavBKZUFoeIWERKCL4BrAe6AQFb9DGQNFHSmlzmVuBDEXFbtTolvSf3WVjc3d39InbHzMxs5BrW\ndzFIOoB0SeHTpLMPx5OSia+Uiq4GDgb+AjgP+EI+k1AhIq6JiJkRMXPy5MlNa7uZmVknG93CbfUA\nzwN7lubvCTxdY51PAPdHxOfy819J6gXulnRuRDwBEBGbgN/mMg9ImgGcC/y4kTtgZma2rWjZGYSI\nWA8sAeaUFs0h3c1QzThSUlHU/3xrbd8OGDvYNpqZmVnSyjMIAF8A5ku6n9QB8SzgJcB/AEj6L4CI\nOD2Xvxm4VtL7gAXAVOAy4BcR8Vhe5zzgPqCLlBScALwD+GCL9snMzGzEaWmCEBE3SdodOJ/0Yf8g\ncEJEPJqLTCuVnydpAnA28HlgFbAQmFsothNwNbAXsA54GDg9Im5o5r6YmZmNZIqIdrehbWbOnBmL\nFy9udzPMzMxaRtKSiJg5ULlhfReDmZmZtcc2fQZBUjfwaJVFk0h3XdjAHKv6OVb1c6zq51jVz7FK\npkfEgPf5b9MJQi2SFtdz+sUcq8FwrOrnWNXPsaqfYzU4vsRgZmZmFZwgmJmZWQUnCNVd0+4GdBDH\nqn6OVf0cq/o5VvVzrAbBfRDMzMysgs8gmJmZWQUnCGZmZlbBCUKBpPdLWi6pT9ISSUe1u03NJOlC\nSVGani4sVy6zQtI6SXdKOrBUx66S5ktalaf5knYplTlI0l25jiclfUqSWrWfQyXpaEnfz20OSWeU\nlrcsPpJOlfQbSc/lv3/VtB0fgjpiNa/KsXZvqcxYSVdI6pHUm+vbq1RmmqSb8/IeSV+SNKZUZnZ+\n//ZJ6pJ0VtN2fJAkfULSzyU9K6k778srS2V8XFF3rHxcNVNEeEr9MN4KbAD+EZgBXAGsAaa1u21N\n3OcLSWNXTClMkwvL5wKrgVOBVwLfBFYAEwplbgN+DczK06+BmwvLdyYN5/3NXMdf5zo/2u79ryM+\nJwCfyW1eC5xRWt6S+OT1NgLn5WPzvPz88HbHaBCxmgfcUTrWdiuVuTrHbw5wCHAn8AAwKi8fBSzN\n8w/J5VYAVxTq2Afoze/fGfn9vAE4td0xyu1bALwrv9YHAd/Jr/9uhTI+ruqPlY+rZr4G7W7AcJlI\nI0JeW5r3CHBJu9vWxH2+EHiwxjIBTwHnFebtmP/JvDc/nwEEcEShzJF53ivy8/cBzwI7FsqcDzxJ\n7iTbCRMpWTyjHfEBbgLuKLXnR8AN7Y5LPbHK8+YBt2xlnYnAeuDvC/NeBmwC3pifvyk/f1mhzGlA\nH7Bzfv5Z4JFS3f8JLGp3XGrs906kIexP8nE1uFj5uGr+5EsMQD6VdChwe2nR7cDrWt+ilto3n8pc\nLulGSfvm+fuQsvEXYhIR64Cfsjkms0gfBvcU6vsZKdMulrk7r9tvAWmY770bvC+t1Mr4zKLy2FxA\n5x2bR0p6RtL/SbpW0h6FZYcC27NlPB8HHmLLWD2U5/dbQBrm/dBCmWqxmilp+8btSsNMIF3q/WN+\n7uOqtnKs+vm4ahInCMkk0mmmlaX5K0lv1pHqPuAM4HjSKbMpwD1KQ3L37/fWYjIF6I6cTgPkx8+U\nylSrAzo7tq2MT60ynRS/HwKnA8cCHwUOAxZKGpuXTyF9Oyz/Tn45nuU49OT1BorVaNL7fLi5nHS6\ne1F+7uOqtnKswMdVU41udwOsfSLituLz3LmnC3gncG/VlcyGICJuLDxdKmkJaaC0E4H/bU+r2kvS\nF0iXBo6MiOfb3Z7hrFasfFw1l88gJP3Z4p6l+XuSOsVsEyJiDamz08vZvN9bi8nTwORiz+j8eI9S\nmWp1QGfHtpXxqVWmY+MXESuAJ0jHGqR9GUXlt7FyPMtx6D/7N1CsNjKMRvGT9EXg7cDrI6KrsMjH\nVclWYlVhWz+uGs0JAhAR64ElpN6rRXPY8jrfiCZpB2B/Uiep5aQ3xZzS8qPYHJNFpI5DswrVzALG\nl8ocldft199L+HcN34nWaWV8FjHCjk1Jk4CXko41SO+/DWwZz71IHfKKsZpRukVtDvBcXr+/TLVY\nLY6IDY3ch6GSdDmbP/AeLi32cVUwQKyqld9mj6umaHcvyeEykW5zXA+cSTp4Lid1BJre7rY1cZ8v\nBWaTOkYdDtxC6vk8PS+fC6wCTiHdanQj1W+3Wsrm262WsuXtVhNJ//BuzHWckrfRCbc57gQcnKe1\nwKfy42mtjA+pM9VG4J9JCdwnSP/0hsXtaAPFKi+7NO//3sAxpH+4T5RidXWedxzwGuAnVL8dbWFe\nfhypV36129Euy+/jM/P7eljcjgZcmV/f17PlrXk7Fcr4uKojVj6uWvAatLsBw2kC3k/Krvszx6Pb\n3aYm72//P571+Q3xbeCAwnKRboV8inTLz13AK0t17Apcn9/Iz+bHu5TKHETqhd2X67qADrjFMf/D\niSrTvFbHh3Qf+8P5tXoIOKXd8ak3VqTb9BaQOtGtJ10jnkfhtrJcx1jSfea/JyUZN1cpM42UyK7N\n5b4EjC2VmQ38Ir+PlwNntTs+hbZVi1EAFxbK+LiqI1Y+rpo/ebAmMzMzq+A+CGZmZlbBCYKZmZlV\ncIJgZmZmFZwgmJmZWQUnCGZmZlbBCYKZmZlVcIJgNoJJmifplna3o0jSWyQ9ImmjpHntbs9gDMd4\nmjWLEwSzJskfJiHpk6X5x+T5I3YUuAF8lfSjXNOBc6oVkHSnpC/Xet5sW3mNzgFOa1U7zNrJCYJZ\nc/UB/yRpcrsb0kiSth/iersAuwMLIuLJiFjV2JYNuP0xL2b9iFgVEX9qVHvMhjMnCGbN9RPSz3d/\nslaBat9WJe2d580slXmTpCWS1km6W9JekmZL+qWkNZJukbR7lW2cL2llLnOdpB0LyyTp45KW5XqX\nSjqtSlveLmmhpHXAe2vsy66Svi7pj7muH0k6sH8fgD/mogtznccMFMB8GWI28IG8TkjaOy87QNKt\nklZLekbSDZKmFNfNMZkr6QnSb/Ij6TRJPy+s9y1JL+3fX9LrBtCdtzevWF+h/rGSLsux7ZN0r6Qj\nC8v7X7djJd0naa2kxZIOKZSZKGl+bkefpC5JHx4oLmbN5gTBrLk2kQbDOUvSfg2o71+AD5MG19oV\nuIk0MNJ7SOMhHEj6Hf+i2cCrgWOBU4E3AJ8tLL8Y+AfgA8ABwCXAVySdWKrnEuCqXOa7Ndo3L7ft\nLcBhpN+2/2FOSO7J7SO3Yyr1jRx4DmkQnuvyOlOBxyVNJY018GDe1nGkAXy+J6n4v2028Crg+BwD\ngDGksQleDbyZNLzvDXnZ47l95PZOpcalEODfSQO9vZs00M/SvL9TS+UuIR0Hh5B+6/8b0gvDNV9M\nGjfhzcArcl1PDhATs+Zr92AQnjyN1In0YXlLfvwT4Mb8+BjSoDOTqj3P8/bO82aWyryxUObsPO+Q\nwrwLgQdLbfgTW44WeBppwJnxeVoHHFVq+2XAD0pt2eoInMDLc7mjC/MmkkYmPDM/n5TLHDNAXXcC\nX671PM+7CPhxad6uuf7DCvvfTWngnSrb2z+vt1et16TKazqeNEjQ6YXlo4BlwMVbed2OKG3r+8DX\n2n28evJUnkZjZq0wF1gk6XMvsp5fFR6vzH+XlubtUV4nItYUni8ifYPejzTS3Q6kb73Fkdu2J10a\nKVo8QNtmkM6YLOqfERGrJC0lnXVotEOBoyWtqbJsP+D+/PjBiHiuuDCf4r+ANCT1bqQRFCGN6vdE\nndvfjxSnn/XPiIjnJS2icn+Lr9uK/HePvK2rgf+RdChwB2nY5rvqbINZ0zhBMGuBiLhf0rdJp6Q/\nXVq8Kf9VYV6tToAbitXmusvzBnPpsL/sScBjW9kWQO8g6i1rxrCx2wG3Ah+rsmxl4fEW7ZY0njRM\n8I+Ad5CGC54E3E1KnBqhvL8Vrxs59hFxm6TpwJtIl0BulfStiHhXg9piNiROEMxa51zgN6Rr4UXd\n+e/UwuODG7jdgySNj4j+D8rXkk6NLyN9SD0HTI+IhS9yOw/l+maR+gYgaWfS9fXrXmTd60mn74t+\nAfwt8GgpSRrI/qSE4NyIWJ7beUqV7VFlm0XLcrkj8mMkjSLt/38Poj1ERA8wH5gv6TbgBklnlc98\nmLWSOymatUhE/Ba4hsoOb78ldYy7UNKfS3oDcH4DNz0a+JqkAyXNAf4NuDYieiNiNXApcKmkd0v6\nM0kHSzpL0nsGs5GIeAT4HqmD41GSDgKuB55lkB+YVfwOOCzfUTEpd0K8ktTH4SZJh0vaV9Jxkq6R\nNGErdT1GSorOzuucSOVZnUdJ3/RPlDRZ0k5V9reXdHngs5JOkDQjP9+T1JmzLpIuknSypJfnOk4B\nupwcWLs5QTBrrYuAjcUZ+dvv24B9gV+S7lQ4t4HbvAv4Namj5HeAhcDHC8s/Serc+LFc7g5SL/7l\nQ9jWu0jX/r+f/44Djo+IdUNse79LSd/Wf0M6yzItIlaQvr1vAn6Y234l6cO/5odrRHQD7wROzvVd\nAHykVObJPP9fSZcrav1I01zSnSTXAQ+Q75aIiKcGsW/P5e38ktSfYQLpko9ZWymiGZcGzczMrJP5\nDIKZmZlVcIJgZmZmFZwgmJmZWQUnCGZmZlbBCYKZmZlVcIJgZmZmFZwgmJmZWQUnCGZmZlbBCYKZ\nmZlV+H+eyjTX6qdbIgAAAABJRU5ErkJggg==\n", 211 | "text/plain": [ 212 | "" 213 | ] 214 | }, 215 | "metadata": {}, 216 | "output_type": "display_data" 217 | } 218 | ], 219 | "source": [ 220 | "plot(log)" 221 | ] 222 | } 223 | ], 224 | "metadata": { 225 | "kernelspec": { 226 | "display_name": "Python 2", 227 | "language": "python", 228 | "name": "python2" 229 | }, 230 | "language_info": { 231 | "codemirror_mode": { 232 | "name": "ipython", 233 | "version": 2 234 | }, 235 | "file_extension": ".py", 236 | "mimetype": "text/x-python", 237 | "name": "python", 238 | "nbconvert_exporter": "python", 239 | "pygments_lexer": "ipython2", 240 | "version": "2.7.13" 241 | } 242 | }, 243 | "nbformat": 4, 244 | "nbformat_minor": 2 245 | } 246 | --------------------------------------------------------------------------------