├── utils.py ├── dataloader.py ├── svd.py ├── kernel.py ├── README.md ├── dl_environment.yml ├── eigenpro.py ├── NTKAnalysis.ipynb ├── InterpolationWithNoise.ipynb └── DoubleDescentTutorialPart2.ipynb /utils.py: -------------------------------------------------------------------------------- 1 | '''Helper functions.''' 2 | import numpy as np 3 | 4 | 5 | def float_x(data): 6 | '''Set data array precision.''' 7 | return np.float32(data) 8 | -------------------------------------------------------------------------------- /dataloader.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def make_dataset(n=100, d=1, eps=1): 5 | X = np.random.randn(d, n) 6 | w = np.random.randn(1, d) 7 | y = w @ X 8 | y += np.random.randn(1, n) * eps 9 | 10 | X_test = np.linspace(X.min(), X.max(), num=1000).reshape(d, -1) 11 | y_test = w @ X_test 12 | 13 | return X, y, X_test, y_test, w 14 | -------------------------------------------------------------------------------- /svd.py: -------------------------------------------------------------------------------- 1 | '''Utility functions for performing fast SVD.''' 2 | import scipy.linalg as linalg 3 | import numpy as np 4 | 5 | import utils 6 | 7 | 8 | def nystrom_kernel_svd(samples, kernel_fn, top_q): 9 | """Compute top eigensystem of kernel matrix using Nystrom method. 10 | 11 | Arguments: 12 | samples: data matrix of shape (n_sample, n_feature). 13 | kernel_fn: tensor function k(X, Y) that returns kernel matrix. 14 | top_q: top-q eigensystem. 15 | 16 | Returns: 17 | eigvals: top eigenvalues of shape (top_q). 18 | eigvecs: (rescaled) top eigenvectors of shape (n_sample, top_q). 19 | """ 20 | 21 | n_sample, _ = samples.shape 22 | kmat = kernel_fn(samples, samples).cpu().data.numpy() 23 | scaled_kmat = kmat / n_sample 24 | vals, vecs = linalg.eigh(scaled_kmat, 25 | eigvals=(n_sample - top_q, n_sample - 1)) 26 | eigvals = vals[::-1][:top_q] 27 | eigvecs = vecs[:, ::-1][:, :top_q] / np.sqrt(n_sample) 28 | 29 | return utils.float_x(eigvals), utils.float_x(eigvecs) 30 | -------------------------------------------------------------------------------- /kernel.py: -------------------------------------------------------------------------------- 1 | '''Implementation of kernel functions.''' 2 | 3 | import torch 4 | 5 | 6 | def euclidean_distances(samples, centers, squared=True): 7 | '''Calculate the pointwise distance. 8 | 9 | Args: 10 | samples: of shape (n_sample, n_feature). 11 | centers: of shape (n_center, n_feature). 12 | squared: boolean. 13 | 14 | Returns: 15 | pointwise distances (n_sample, n_center). 16 | ''' 17 | samples_norm = torch.sum(samples**2, dim=1, keepdim=True) 18 | if samples is centers: 19 | centers_norm = samples_norm 20 | else: 21 | centers_norm = torch.sum(centers**2, dim=1, keepdim=True) 22 | centers_norm = torch.reshape(centers_norm, (1, -1)) 23 | 24 | distances = samples.mm(torch.t(centers)) 25 | distances.mul_(-2) 26 | distances.add_(samples_norm) 27 | distances.add_(centers_norm) 28 | if not squared: 29 | distances.clamp_(min=0) 30 | distances.sqrt_() 31 | 32 | return distances 33 | 34 | 35 | def gaussian(samples, centers, bandwidth): 36 | '''Gaussian kernel. 37 | 38 | Args: 39 | samples: of shape (n_sample, n_feature). 40 | centers: of shape (n_center, n_feature). 41 | bandwidth: kernel bandwidth. 42 | 43 | Returns: 44 | kernel matrix of shape (n_sample, n_center). 45 | ''' 46 | assert bandwidth > 0 47 | kernel_mat = euclidean_distances(samples, centers) 48 | kernel_mat.clamp_(min=0) 49 | gamma = 1. / (2 * bandwidth ** 2) 50 | kernel_mat.mul_(-gamma) 51 | kernel_mat.exp_() 52 | return kernel_mat 53 | 54 | 55 | def laplacian(samples, centers, bandwidth): 56 | '''Laplacian kernel. 57 | 58 | Args: 59 | samples: of shape (n_sample, n_feature). 60 | centers: of shape (n_center, n_feature). 61 | bandwidth: kernel bandwidth. 62 | 63 | Returns: 64 | kernel matrix of shape (n_sample, n_center). 65 | ''' 66 | assert bandwidth > 0 67 | kernel_mat = euclidean_distances(samples, centers, squared=False) 68 | kernel_mat.clamp_(min=0) 69 | gamma = 1. / bandwidth 70 | kernel_mat.mul_(-gamma) 71 | kernel_mat.exp_() 72 | return kernel_mat 73 | 74 | 75 | def dispersal(samples, centers, bandwidth, gamma): 76 | '''Dispersal kernel. 77 | 78 | Args: 79 | samples: of shape (n_sample, n_feature). 80 | centers: of shape (n_center, n_feature). 81 | bandwidth: kernel bandwidth. 82 | gamma: dispersal factor. 83 | 84 | Returns: 85 | kernel matrix of shape (n_sample, n_center). 86 | ''' 87 | assert bandwidth > 0 88 | kernel_mat = euclidean_distances(samples, centers) 89 | kernel_mat.pow_(gamma / 2.) 90 | kernel_mat.mul_(-1. / bandwidth) 91 | kernel_mat.exp_() 92 | return kernel_mat 93 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Tutorial for Deep Learning Theory Summer School at Princeton 2 | 3 | This tutorial will cover the following aspects of over-parameterization discussed in https://arxiv.org/abs/2105.14368: 4 | 5 | 1. Interpolation is not at odds with generalization even in the presence of label noise (e.g. using larger and larger models is often beneficial for generalization). 6 | 2. Under certain conditions, as width goes to infinity, training neural networks with gradient descent is equivalent to solving kernel regression with the neural tangent kernel (NTK). 7 | 8 | These results are reviewed in Jupyter notebooks, which we suggest be read/walked through in the following order: 9 | 10 | 1. **DoubleDescentTutorial.ipynb** - A review of the double descent phenomenon for neural networks where only the last layer is trained. In this notebook, we begin by reviewing the connection between Random Fourier Features and the Gaussian kernel. We then review the connection between random nonlinear networks and the corresponding neural network Gaussian process (NNGP) kernel. 11 | 2. **DoubleDescentTutorialPart2.ipynb** - A review of the double descent phenomenon for neural networks where all layers are trained. In this notebook, we demonstrate that the performance of neural networks improves as width increases (for classification of a subset of MNIST digits). We then compare this with the performance of the infinite width limit of neural networks given by the NTK. We demonstrate that the NTK is very easy to compute and use and gives superior performance than training neural networks in this setting. 12 | 3. **NTKAnalysis.ipynb** - A review of the NTK and how it relates to training infinitely wide neural networks. In this notebook, we present the origin of the NTK (e.g. that it arises by considering the linearization of a neural network around the initial weights). We then go through the calculation for the closed form of the NTK for a 1 hidden layer network. We lastly compare training a large width fully connected network to solving kernel regression with the NTK. In particular, we show that the two methods produce results nearly identical results on a noiseless regression dataset. 13 | 4. **InterpolationWithNoise.ipynb** - A review of kernel regression in the presence of label noise. In this notebook, we use EigenPro (https://github.com/EigenPro/EigenPro-pytorch) to train laplace kernels to interpolate noisy data (a subset of MNIST digits). The trained models perform similarly to the Bayes optimal predictor. 14 | 15 | A link to videos for the three tutorials and the associated worksheet is available here: https://www.dropbox.com/sh/skciefkor7s6wy7/AAAqV3NhY1Es_MTq1QBu1t03a?dl=0 16 | 17 | ## A note on library dependencies. 18 | Most of this tutorial is meant to be run without a GPU (the GPU would be useful for training large neural networks). I've attached a dl_environment.yml file that gives a list of all libraries I've downloaded along with versions. I also downloaded auto_tqdm (i.e. pip install auto_tqdm) to keep track of progress bars for training neural networks or computing gradients for the empirical NTK. 19 | -------------------------------------------------------------------------------- /dl_environment.yml: -------------------------------------------------------------------------------- 1 | name: dl_tutorial 2 | channels: 3 | - nvidia 4 | - anaconda 5 | - pytorch 6 | - defaults 7 | dependencies: 8 | - backcall=0.2.0=py_0 9 | - decorator=4.4.2=py_0 10 | - ipykernel=5.3.4=py37h5ca1d4c_0 11 | - ipython=7.18.1=py37h5ca1d4c_0 12 | - ipython_genutils=0.2.0=py37_0 13 | - jedi=0.17.2=py37_0 14 | - jupyter_client=6.1.7=py_0 15 | - jupyter_core=4.6.3=py37_0 16 | - libsodium=1.0.18=h7b6447c_0 17 | - parso=0.7.0=py_0 18 | - pexpect=4.8.0=py37_1 19 | - pickleshare=0.7.5=py37_1001 20 | - prompt-toolkit=3.0.8=py_0 21 | - ptyprocess=0.6.0=py37_0 22 | - pygments=2.7.1=py_0 23 | - python-dateutil=2.8.1=py_0 24 | - pyzmq=19.0.2=py37he6710b0_1 25 | - six=1.15.0=py_0 26 | - tornado=6.0.4=py37h7b6447c_1 27 | - traitlets=5.0.5=py_0 28 | - wcwidth=0.2.5=py_0 29 | - zeromq=4.3.3=he6710b0_3 30 | - _libgcc_mutex=0.1=main 31 | - async_generator=1.10=pyhd3eb1b0_0 32 | - attrs=21.2.0=pyhd3eb1b0_0 33 | - blas=1.0=mkl 34 | - bleach=3.3.1=pyhd3eb1b0_0 35 | - bzip2=1.0.8=h7b6447c_0 36 | - ca-certificates=2021.7.5=h06a4308_1 37 | - certifi=2021.5.30=py37h06a4308_0 38 | - cycler=0.10.0=py37_0 39 | - dbus=1.13.18=hb2f20db_0 40 | - defusedxml=0.7.1=pyhd3eb1b0_0 41 | - entrypoints=0.3=py37_0 42 | - expat=2.4.1=h2531618_2 43 | - fontconfig=2.13.1=h6c09931_0 44 | - freetype=2.10.4=h5ab3b9f_0 45 | - giflib=5.2.1=h7b6447c_0 46 | - glib=2.69.0=h5202010_0 47 | - gmp=6.2.1=h2531618_2 48 | - gnutls=3.6.15=he1e5248_0 49 | - gst-plugins-base=1.14.0=h8213a91_2 50 | - gstreamer=1.14.0=h28cd5cc_2 51 | - icu=58.2=he6710b0_3 52 | - importlib-metadata=3.10.0=py37h06a4308_0 53 | - importlib_metadata=3.10.0=hd3eb1b0_0 54 | - intel-openmp=2021.3.0=h06a4308_3350 55 | - jinja2=3.0.1=pyhd3eb1b0_0 56 | - jpeg=9b=h024ee3a_2 57 | - jsonschema=3.2.0=py_2 58 | - jupyterlab_pygments=0.1.2=py_0 59 | - kiwisolver=1.3.1=py37h2531618_0 60 | - lame=3.100=h7b6447c_0 61 | - lcms2=2.12=h3be6417_0 62 | - ld_impl_linux-64=2.35.1=h7274673_9 63 | - libffi=3.3=he6710b0_2 64 | - libgcc-ng=9.1.0=hdf63c60_0 65 | - libiconv=1.15=h63c8f33_5 66 | - libidn2=2.3.2=h7f8727e_0 67 | - libpng=1.6.37=hbc83047_0 68 | - libstdcxx-ng=9.1.0=hdf63c60_0 69 | - libtasn1=4.16.0=h27cfd23_0 70 | - libtiff=4.2.0=h85742a9_0 71 | - libunistring=0.9.10=h27cfd23_0 72 | - libuuid=1.0.3=h1bed415_2 73 | - libuv=1.40.0=h7b6447c_0 74 | - libwebp=1.2.0=h89dd481_0 75 | - libwebp-base=1.2.0=h27cfd23_0 76 | - libxcb=1.14=h7b6447c_0 77 | - libxml2=2.9.10=hb55368b_3 78 | - lz4-c=1.9.3=h2531618_0 79 | - markupsafe=2.0.1=py37h27cfd23_0 80 | - matplotlib=3.3.4=py37h06a4308_0 81 | - matplotlib-base=3.3.4=py37h62a2d02_0 82 | - mistune=0.8.4=py37h14c3975_1001 83 | - mkl=2021.3.0=h06a4308_520 84 | - mkl-service=2.4.0=py37h7f8727e_0 85 | - mkl_fft=1.3.0=py37h42c9631_2 86 | - mkl_random=1.2.2=py37h51133e4_0 87 | - nbclient=0.5.3=pyhd3eb1b0_0 88 | - nbconvert=6.1.0=py37h06a4308_0 89 | - nbformat=5.1.3=pyhd3eb1b0_0 90 | - ncurses=6.2=he6710b0_1 91 | - nest-asyncio=1.5.1=pyhd3eb1b0_0 92 | - nettle=3.7.3=hbbd107a_1 93 | - ninja=1.10.2=hff7bd54_1 94 | - numpy=1.20.3=py37hf144106_0 95 | - numpy-base=1.20.3=py37h74d4b33_0 96 | - olefile=0.46=py_0 97 | - openh264=2.1.0=hd408876_0 98 | - openssl=1.1.1k=h27cfd23_0 99 | - packaging=21.0=pyhd3eb1b0_0 100 | - pandocfilters=1.4.3=py37h06a4308_1 101 | - pcre=8.45=h295c915_0 102 | - pillow=8.3.1=py37h5aabda8_0 103 | - pip=21.1.3=py37h06a4308_0 104 | - pyparsing=2.4.7=pyhd3eb1b0_0 105 | - pyqt=5.9.2=py37h05f1152_2 106 | - pyrsistent=0.17.3=py37h7b6447c_0 107 | - python=3.7.10=h12debd9_4 108 | - qt=5.9.7=h5867ecd_1 109 | - readline=8.1=h27cfd23_0 110 | - setuptools=52.0.0=py37h06a4308_0 111 | - sip=4.19.8=py37hf484d3e_0 112 | - sqlite=3.36.0=hc218d9a_0 113 | - testpath=0.5.0=pyhd3eb1b0_0 114 | - tk=8.6.10=hbc83047_0 115 | - typing_extensions=3.10.0.0=pyh06a4308_0 116 | - webencodings=0.5.1=py37_1 117 | - wheel=0.36.2=pyhd3eb1b0_0 118 | - xz=5.2.5=h7b6447c_0 119 | - zipp=3.5.0=pyhd3eb1b0_0 120 | - zlib=1.2.11=h7b6447c_3 121 | - zstd=1.4.9=haebb681_0 122 | - cudatoolkit=11.1.74=h6bb024c_0 123 | - ffmpeg=4.3=hf484d3e_0 124 | - pytorch=1.9.0=py3.7_cuda11.1_cudnn8.0.5_0 125 | - torchaudio=0.9.0=py37 126 | - torchvision=0.10.0=py37_cu111 127 | 128 | -------------------------------------------------------------------------------- /eigenpro.py: -------------------------------------------------------------------------------- 1 | '''Construct kernel model with EigenPro optimizer.''' 2 | import collections 3 | import time 4 | import torch 5 | 6 | import torch.nn as nn 7 | import numpy as np 8 | 9 | import svd 10 | import utils 11 | 12 | 13 | def asm_eigenpro_fn(samples, map_fn, top_q, bs_gpu, alpha, min_q=5, seed=1): 14 | """Prepare gradient map for EigenPro and calculate 15 | scale factor for learning ratesuch that the update rule, 16 | p <- p - eta * g 17 | becomes, 18 | p <- p - scale * eta * (g - eigenpro_fn(g)) 19 | 20 | Arguments: 21 | samples: matrix of shape (n_sample, n_feature). 22 | map_fn: kernel k(samples, centers) where centers are specified. 23 | top_q: top-q eigensystem for constructing eigenpro iteration/kernel. 24 | bs_gpu: maxinum batch size corresponding to GPU memory. 25 | alpha: exponential factor (<= 1) for eigenvalue rescaling due to approximation. 26 | min_q: minimum value of q when q (if None) is calculated automatically. 27 | seed: seed for random number generation. 28 | 29 | Returns: 30 | eigenpro_fn: tensor function. 31 | scale: factor that rescales learning rate. 32 | top_eigval: largest eigenvalue. 33 | beta: largest k(x, x) for the EigenPro kernel. 34 | """ 35 | 36 | np.random.seed(seed) # set random seed for subsamples 37 | start = time.time() 38 | n_sample, _ = samples.shape 39 | 40 | if top_q is None: 41 | svd_q = min(n_sample - 1, 1000) 42 | else: 43 | svd_q = top_q 44 | 45 | eigvals, eigvecs = svd.nystrom_kernel_svd(samples, map_fn, svd_q) 46 | 47 | # Choose k such that the batch size is bounded by 48 | # the subsample size and the memory size. 49 | # Keep the original k if it is pre-specified. 50 | if top_q is None: 51 | max_bs = min(max(n_sample / 5, bs_gpu), n_sample) 52 | top_q = np.sum(np.power(1 / eigvals, alpha) < max_bs) - 1 53 | top_q = max(top_q, min_q) 54 | 55 | eigvals, tail_eigval = eigvals[:top_q - 1], eigvals[top_q - 1] 56 | eigvecs = eigvecs[:, :top_q - 1] 57 | 58 | device = samples.device 59 | eigvals_t = torch.tensor(eigvals.copy()).to(device) 60 | eigvecs_t = torch.tensor(eigvecs).to(device) 61 | tail_eigval_t = torch.tensor(tail_eigval, dtype=torch.float).to(device) 62 | 63 | scale = utils.float_x(np.power(eigvals[0] / tail_eigval, alpha)) 64 | diag_t = (1 - torch.pow(tail_eigval_t / eigvals_t, alpha)) / eigvals_t 65 | 66 | def eigenpro_fn(grad, kmat): 67 | '''Function to apply EigenPro preconditioner.''' 68 | return torch.mm(eigvecs_t * diag_t, 69 | torch.t(torch.mm(torch.mm(torch.t(grad), 70 | kmat), 71 | eigvecs_t))) 72 | 73 | print("SVD time: %.2f, top_q: %d, top_eigval: %.2f, new top_eigval: %.2e" % 74 | (time.time() - start, top_q, eigvals[0], eigvals[0] / scale)) 75 | 76 | knorms = 1 - np.sum(eigvecs ** 2, axis=1) * n_sample 77 | beta = np.max(knorms) 78 | 79 | return eigenpro_fn, scale, eigvals[0], utils.float_x(beta) 80 | 81 | 82 | class FKR_EigenPro(nn.Module): 83 | '''Fast Kernel Regression using EigenPro iteration.''' 84 | def __init__(self, kernel_fn, centers, y_dim, device="cuda"): 85 | super(FKR_EigenPro, self).__init__() 86 | self.kernel_fn = kernel_fn 87 | self.n_centers, self.x_dim = centers.shape 88 | self.device = device 89 | self.pinned_list = [] 90 | 91 | self.centers = self.tensor(centers, release=True) 92 | self.weight = self.tensor(torch.zeros( 93 | self.n_centers, y_dim), release=True) 94 | 95 | def __del__(self): 96 | for pinned in self.pinned_list: 97 | _ = pinned.to("cpu") 98 | torch.cuda.empty_cache() 99 | 100 | def tensor(self, data, dtype=None, release=False): 101 | tensor = torch.tensor(data, dtype=dtype, 102 | requires_grad=False).to(self.device) 103 | if release: 104 | self.pinned_list.append(tensor) 105 | return tensor 106 | 107 | def kernel_matrix(self, samples): 108 | return self.kernel_fn(samples, self.centers) 109 | 110 | def forward(self, samples, weight=None): 111 | if weight is None: 112 | weight = self.weight 113 | kmat = self.kernel_matrix(samples) 114 | pred = kmat.mm(weight) 115 | return pred 116 | 117 | def primal_gradient(self, samples, labels, weight): 118 | pred = self.forward(samples, weight) 119 | grad = pred - labels 120 | return grad 121 | 122 | @staticmethod 123 | def _compute_opt_params(bs, bs_gpu, beta, top_eigval): 124 | if bs is None: 125 | bs = min(np.int32(beta / top_eigval + 1), bs_gpu) 126 | 127 | if bs < beta / top_eigval + 1: 128 | eta = bs / beta 129 | else: 130 | eta = 0.99 * 2 * bs / (beta + (bs - 1) * top_eigval) 131 | return bs, utils.float_x(eta) 132 | 133 | def eigenpro_iterate(self, samples, x_batch, y_batch, eigenpro_fn, 134 | eta, sample_ids, batch_ids): 135 | # update random coordiate block (for mini-batch) 136 | grad = self.primal_gradient(x_batch, y_batch, self.weight) 137 | self.weight.index_add_(0, batch_ids, -eta * grad) 138 | 139 | # update fixed coordinate block (for EigenPro) 140 | kmat = self.kernel_fn(x_batch, samples) 141 | correction = eigenpro_fn(grad, kmat) 142 | self.weight.index_add_(0, sample_ids, eta * correction) 143 | return 144 | 145 | def evaluate(self, x_eval, y_eval, bs, 146 | metrics=('mse', 'multiclass-acc')): 147 | p_list = [] 148 | n_sample, _ = x_eval.shape 149 | n_batch = n_sample / min(n_sample, bs) 150 | for batch_ids in np.array_split(range(n_sample), n_batch): 151 | x_batch = self.tensor(x_eval[batch_ids]) 152 | p_batch = self.forward(x_batch).cpu().data.numpy() 153 | p_list.append(p_batch) 154 | p_eval = np.vstack(p_list) 155 | 156 | eval_metrics = collections.OrderedDict() 157 | if 'mse' in metrics: 158 | eval_metrics['mse'] = np.mean(np.square(p_eval - y_eval)) 159 | if 'multiclass-acc' in metrics: 160 | y_class = np.argmax(y_eval, axis=-1) 161 | p_class = np.argmax(p_eval, axis=-1) 162 | eval_metrics['multiclass-acc'] = np.mean(y_class == p_class) 163 | 164 | return eval_metrics 165 | 166 | def fit(self, x_train, y_train, x_val, y_val, epochs, mem_gb, 167 | n_subsamples=None, top_q=None, bs=None, eta=None, 168 | n_train_eval=5000, run_epoch_eval=True, scale=1, seed=1): 169 | 170 | n_samples, n_labels = y_train.shape 171 | if n_subsamples is None: 172 | if n_samples < 100000: 173 | n_subsamples = min(n_samples, 2000) 174 | else: 175 | n_subsamples = 12000 176 | 177 | mem_bytes = (mem_gb - 1) * 1024**3 # preserve 1GB 178 | bsizes = np.arange(n_subsamples) 179 | mem_usages = ((self.x_dim + 3 * n_labels + bsizes + 1) 180 | * self.n_centers + n_subsamples * 1000) * 4 181 | bs_gpu = np.sum(mem_usages < mem_bytes) # device-dependent batch size 182 | 183 | # Calculate batch size / learning rate for improved EigenPro iteration. 184 | np.random.seed(seed) 185 | sample_ids = np.random.choice(n_samples, n_subsamples, replace=False) 186 | sample_ids = self.tensor(sample_ids) 187 | samples = self.centers[sample_ids] 188 | eigenpro_f, gap, top_eigval, beta = asm_eigenpro_fn( 189 | samples, self.kernel_fn, top_q, bs_gpu, alpha=.95, seed=seed) 190 | new_top_eigval = top_eigval / gap 191 | 192 | if eta is None: 193 | bs, eta = self._compute_opt_params( 194 | bs, bs_gpu, beta, new_top_eigval) 195 | else: 196 | bs, _ = self._compute_opt_params(bs, bs_gpu, beta, new_top_eigval) 197 | 198 | print("n_subsamples=%d, bs_gpu=%d, eta=%.2f, bs=%d, top_eigval=%.2e, beta=%.2f" % 199 | (n_subsamples, bs_gpu, eta, bs, top_eigval, beta)) 200 | eta = self.tensor(scale * eta / bs, dtype=torch.float) 201 | 202 | # Subsample training data for fast estimation of training loss. 203 | ids = np.random.choice(n_samples, 204 | min(n_samples, n_train_eval), 205 | replace=False) 206 | x_train_eval, y_train_eval = x_train[ids], y_train[ids] 207 | 208 | res = dict() 209 | initial_epoch = 0 210 | train_sec = 0 # training time in seconds 211 | 212 | for epoch in epochs: 213 | start = time.time() 214 | for _ in range(epoch - initial_epoch): 215 | epoch_ids = np.random.choice( 216 | n_samples, n_samples // bs * bs, replace=False) 217 | for batch_ids in np.array_split(epoch_ids, n_samples / bs): 218 | x_batch = self.tensor(x_train[batch_ids]) 219 | y_batch = self.tensor(y_train[batch_ids]) 220 | batch_ids = self.tensor(batch_ids) 221 | self.eigenpro_iterate(samples, x_batch, y_batch, eigenpro_f, 222 | eta, sample_ids, batch_ids) 223 | del x_batch, y_batch, batch_ids 224 | 225 | if run_epoch_eval: 226 | train_sec += time.time() - start 227 | tr_score = self.evaluate(x_train_eval, y_train_eval, bs) 228 | tv_score = self.evaluate(x_val, y_val, bs) 229 | print("train error: %.2f%%\tval error: %.2f%% (%d epochs, %.2f seconds)\t" 230 | "train l2: %.2e\tval l2: %.2e" % 231 | ((1 - tr_score['multiclass-acc']) * 100, 232 | (1 - tv_score['multiclass-acc']) * 100, 233 | epoch, train_sec, tr_score['mse'], tv_score['mse'])) 234 | res[epoch] = (tr_score, tv_score, train_sec) 235 | 236 | initial_epoch = epoch 237 | 238 | return res 239 | -------------------------------------------------------------------------------- /NTKAnalysis.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "5992bf0f", 6 | "metadata": {}, 7 | "source": [ 8 | "# NTK Derivation and Analysis\n", 9 | "\n", 10 | "In this Notebook, we will derive a closed form for the NTK for 1 hidden layer ReLU networks. We will then present experiments to show that the NTK can be used to describe the behavior of large width neural networks. \n", 11 | "\n", 12 | "We begin with a derivation of the NTK below (this is basically the solution to Section 2 Problem 2 of the worksheet shared in the github). " 13 | ] 14 | }, 15 | { 16 | "cell_type": "markdown", 17 | "id": "a8dbae44", 18 | "metadata": {}, 19 | "source": [ 20 | "## Derivation of the NTK \n", 21 | "\n", 22 | "Suppose we are given a dataset $\\{(x^{(i)}, y^{(i)}\\}_{i=1}^{n} \\subset \\mathbb{R}^{d} \\times \\mathbb{R}$ (also written as $X \\in \\mathbb{R}^{d \\times n}, y \\in \\mathbb{R}^{1 \\times n}$). Let $f$ denote a 1 hidden layer neural network with parameters $\\mathbf{W}$. To train the neural network $f$ to fit the data $(X, y)$, we typically use gradient descent to minimize the following loss: \n", 23 | "\n", 24 | "\\begin{align*}\n", 25 | "\\mathcal{L}(\\mathbf{W}) = \\sum_{i=1}^{n} (y^{(i)} - f(\\mathbf{W} ; x^{(i)}))^2 \n", 26 | "\\end{align*}\n", 27 | "\n", 28 | "**Important:** Note that the network $f$ is written as a function of parameters $\\mathbf{W}$ and data $x^{(i)}$, as opposed to just data. For the neural tangent kernel derivation, we consider the cross section of $f$ given by fixing the data component and writing the neural network as a function of parameters, i.e. consider $f_x(\\mathbf{W}): \\mathbb{R}^{dk + k} \\to \\mathbb{R}$. \n", 29 | "\n", 30 | "### Linearization around Initialization\n", 31 | "Before training the network as usual, let us consider the following alternative. Viewing the neural network as only a function of parameters, we train the linear approximation for $f_x(\\mathbf{W})$, which is given as follows: \n", 32 | "\n", 33 | "\\begin{align*}\n", 34 | "\\tilde{f_x}(\\mathbf{W}) = f_x(\\mathbf{W}^{(0)}) + \\nabla f_x(\\mathbf{W}^{(0)})^T (\\mathbf{W} - \\mathbf{W}^{(0)}) ~;\n", 35 | "\\end{align*}\n", 36 | "where $\\mathbf{W}^{(0)} \\in \\mathbb{R}^{dk + k}$ denotes the parameters at initialization and $\\nabla f_x(\\mathbf{W}^{(0)})^T \\in \\mathbb{R}^{1 \\times (dk + k)}$ denotes the gradient of $f_x(\\mathbf{W})$. Instead of minimizing the loss for the model $f(\\mathbf{W} ; x^{(i)})$ given above, we instead minimize the following loss: \n", 37 | "\n", 38 | "\\begin{align*}\n", 39 | " \\tilde{\\mathcal{L}}(\\mathbf{W}) = \\sum_{i=1}^{n} (y^{(i)} - \\tilde{f}_{x^{(i)}}(\\mathbf{W}))^2 = \\sum_{i=1}^{n} (y^{(i)} - f_{x^{(i)}}(\\mathbf{W}^{(0)}) - \\nabla f_{x^{(i)}}(\\mathbf{W}^{(0)})^T (\\mathbf{W} - \\mathbf{W}^{(0)}))^2\n", 40 | "\\end{align*}\n", 41 | "\n", 42 | "Minimizing this loss naively can be computationally expensive since the vector $\\mathbf{W} \\in \\mathbb{R}^{kd + k}$ depends on $k$, which can be arbitrarily large. To remedy this, we let $\\mathbf{W} = \\mathbf{W}^{(0)} + \\sum_{i=1}^{n} \\nabla f_{x^{(i)}}(\\mathbf{W}^{(0)})\\alpha_i$. \n", 43 | "\n", 44 | "\n", 45 | "**Remark:** At this point, you should be asking why this is a reasonable step to take. The rationale for this step is that we can use this to find the minimum norm minizimer, which lies in the span of the training data. If you haven't seen this trick before, I encourage you to review the Representer theorem. \n", 46 | "\n", 47 | "Using the new form for $\\mathbf{W}$, we can simplify our loss $\\tilde{\\mathcal{L}}(\\mathbf{W})$ as follows: \n", 48 | "\\begin{align*}\n", 49 | "\\tilde{\\mathcal{L}}(\\mathbf{W}) = \\sum_{i=1}^{n} (y^{(i)} - f_{x^{(i)}}(\\mathbf{W}^{(0)}) - \\alpha k(x^{(i)}) )^2 ~;\n", 50 | "\\end{align*}\n", 51 | "where $\\alpha \\in \\mathbb{R}^{1 \\times n}$ and $$k(x) = \\begin{bmatrix} \\langle \\nabla f_{x}(\\mathbf{W}^{(0)}), \\nabla f_{x^{(1)}}(\\mathbf{W}^{(0)}) \\rangle \\\\ \\langle \\nabla f_{x}(\\mathbf{W}^{(0)}), \\nabla f_{x^{(2)}}(\\mathbf{W}^{(0)}) \\rangle \\\\ \\vdots \\\\ \\langle \\nabla f_{x}(\\mathbf{W}^{(0)}), \\nabla f_{x^{(n)}}(\\mathbf{W}^{(0)}) \\rangle \\end{bmatrix} \\in \\mathbb{R}^{n}$$\n", 52 | "\n", 53 | "We can now recognize minimizing the loss $\\tilde{\\mathcal{L}}(\\mathbf{W})$ as solving the following system of equations: \n", 54 | "\\begin{align*}\n", 55 | " \\alpha K = y - f_X(\\mathbf{W}^{(0)}) ~;\n", 56 | "\\end{align*}\n", 57 | "where $K \\in \\mathbb{R}^{n \\times n}$ with $K_{i,j} = \\langle \\nabla f_{x^{(i)}}(\\mathbf{W}^{(0)}), \\nabla f_{x^{(j)}}(\\mathbf{W}^{(0)}) \\rangle$ and $f_X(\\mathbf{W}^{(0)}) \\in \\mathbb{R}^{1 \\times n}$ with $f_X(\\mathbf{W}^{(0)})_i = f_{x^{(i)}}(\\mathbf{W}^{(0)})$. \n", 58 | "\n", 59 | "**Definition [NTK]:** The function $K_{i,j}$ above is written generally as the following Neural Tangent Kernel:\n", 60 | "$$ K(x, x') = \\langle \\nabla f_{x}(\\mathbf{W}^{(0)}), \\nabla f_{x'}(\\mathbf{W}^{(0)}) \\rangle $$. \n", 61 | "\n", 62 | "**Remarks:** This kernel can of course be evaluated using any auto-differentition software (e.g. PyTorch, Tensorflow, Jax, etc.). This is generally memory (and runtime) expensive since neural networks can have millions or billions of parameters. On the other hand, we can actually analytically compute the kernel $K$ when the width of neural networks approaches infinity. We do this below. \n" 63 | ] 64 | }, 65 | { 66 | "cell_type": "markdown", 67 | "id": "3f964c28", 68 | "metadata": {}, 69 | "source": [ 70 | "### Analytical Evaluation of the NTK (1 Hidden Layer,)\n", 71 | "Thus far, we have defined the NTK without explicitly computing it for a given architecture. We now write a closed form for the NTK given a specific archticture. In particular, let $f$ denote a 1 hidden layer network defined as follows: \n", 72 | "\\begin{align*}\n", 73 | " f(\\mathbf{W} ; x) = a \\frac{\\sqrt{c}}{\\sqrt{k}} \\phi(Bx) ~;\n", 74 | "\\end{align*}\n", 75 | "where $a \\in \\mathbb{R}^{1 \\times k}, B \\in \\mathbb{R}^{k \\times d}$ are the trainable parameters ($\\mathbf{W} = [a_1, a_2, \\ldots a_k, B_{1,1}, B_{1,2}, \\ldots B_{k, d}]^T \\in \\mathbb{R}^{k + dk}$ denotes the vector containing all trainable parameters), $c \\in \\mathbb{R}$ is an absolute constant, and $\\phi: \\mathbb{R} \\to \\mathbb{R}$ is an elementwise nonlinearity. \n", 76 | "\n", 77 | "Let us now compute the NTK $K(x, x') = \\langle \\nabla f_{x}(\\mathbf{W}^{(0)}), \\nabla f_{x'}(\\mathbf{W}^{(0)}) \\rangle$ as $k \\to \\infty$ assuming that $\\mathbf{W}_j^{(0)} \\overset{i.i.d.}{\\sim} \\mathcal{N}(0, 1)$. Letting $\\mathbf{W} = [a_1, a_2, \\ldots a_k, B_{1,1}, B_{1,2}, \\ldots B_{k, d}] $, we compute $\\nabla f_{x}(\\mathbf{W}^{(0)})$ as follows: \n", 78 | "\n", 79 | "\\begin{align*}\n", 80 | " \\nabla f_{x}(\\mathbf{W}) = \\begin{bmatrix}\\frac{\\partial f_{x}}{\\partial a_1} \\\\ \\frac{\\partial f_{x}}{\\partial a_2} \\\\ \\vdots \\\\ \\frac{\\partial f_{x}}{\\partial a_k} \\\\ \\frac{\\partial f_{x}}{\\partial B_{1,1}} \\\\ \\vdots \\\\ \\frac{\\partial f_{x}}{\\partial B_{k, d}}\n", 81 | " \\end{bmatrix}\n", 82 | "\\end{align*}\n", 83 | "\n", 84 | "We thus first calculate $\\frac{\\partial f_{x}}{\\partial a_j}$ and $\\frac{\\partial f_{x}}{\\partial B_{j, \\ell}}$: \n", 85 | "\\begin{align*}\n", 86 | " \\frac{\\partial f_{x}}{\\partial a_j} = \\frac{\\sqrt{c}}{\\sqrt{k}} \\phi(B_{j, :} x) \\\\\n", 87 | " \\frac{\\partial f_{x}}{\\partial B_{j, \\ell}} = a_j \\frac{\\sqrt{c}}{\\sqrt{k}} \\phi'(B_{j,:}x) x_{\\ell} \n", 88 | "\\end{align*}\n", 89 | "\n", 90 | "Now that we have all the relevant terms to compute $\\nabla f_x(\\mathbf{W}^{(0)})$, we can compute $K(x, x')$ as follows: \n", 91 | "\\begin{align*}\n", 92 | " K(x, x') &= \\langle \\nabla f_{x}(\\mathbf{W}^{(0)}), \\nabla f_{x'}(\\mathbf{W}^{(0)}) \\rangle \\\\\n", 93 | " &= \\sum_{j=1}^{k} \\frac{\\partial f_x(\\mathbf{W}^{(0)})}{\\partial a_j} \\frac{\\partial f_{x'}(\\mathbf{W}^{(0)})}{\\partial a_j} + \\sum_{j=1}^{k} \\sum_{\\ell = 1}^{d} \\frac{\\partial f_x(\\mathbf{W}^{(0)})}{\\partial B_{j, \\ell}} \\frac{\\partial f_{x'}(\\mathbf{W}^{(0)})}{\\partial B_{j, \\ell}} \\\\\n", 94 | " &= \\color{red}{\\text{$\\frac{c}{k} \\sum_{j=1}^{k} \\phi(B_{j, :} x) \\phi(B_{j, :} x')$}} ~ + ~ \\color{blue}{\\text{$\\frac{c}{k} \\sum_{j=1}^{k} \\sum_{\\ell=1}^{d} a_j^2 \\phi'(B_{j, :} x) \\phi'(B_{j, :} x') x_{\\ell} x'_{\\ell}$}} \\\\\n", 95 | " &= \\color{red}{\\text{$\\frac{c}{k} \\sum_{j=1}^{k} \\phi(B_{j, :} x) \\phi(B_{j, :} x')$}} ~ + ~ \\color{blue}{\\text{$\\frac{c}{k} \\sum_{j=1}^{k} a_j^2 \\phi'(B_{j, :} x) \\phi'(B_{j, :} x') \\sum_{\\ell=1}^{d} x_{\\ell} x'_{\\ell}$}} \\\\\n", 96 | " &= \\color{red}{\\text{$\\frac{c}{k} \\sum_{j=1}^{k} \\phi(B_{j, :} x) \\phi(B_{j, :} x')$}} ~ + ~ \\langle x, x' \\rangle \\color{blue}{\\text{$\\frac{c}{k} \\sum_{j=1}^{k} a_j^2 \\phi'(B_{j, :} x) \\phi'(B_{j, :} x') $}} \n", 97 | "\\end{align*}\n", 98 | "\n", 99 | "**Remark:** Do the red and blue terms look familiar? If you worked through the notebook *DoubleDescentTutorial*, they should. Indeed, as $k \\to \\infty$, the terms in the red and blue correspond to the NNGP kernel for a network with activation $\\phi$ and $\\phi'$ respectively. We know how to evaluate these using dual activations. Namely, we have: \n", 100 | "\\begin{align*}\n", 101 | " \\color{red}{\\text{$\\frac{c}{k} \\sum_{j=1}^{k} \\phi(B_{j, :} x) \\phi(B_{j, :} x')$}} &\\to c \\mathbb{E}_{(u, v) \\sim \\mathcal{N}(\\mathbf{0}, \\Lambda)} [\\phi(u) \\phi(v) ] \\\\\n", 102 | " \\color{blue}{\\text{$\\frac{c}{k} \\sum_{j=1}^{k} a_j^2 \\phi'(B_{j, :} x) \\phi'(B_{j, :} x') $}} &\\to c \\mathbb{E}_{(u, v) \\sim \\mathcal{N}(\\mathbf{0}, \\Lambda)} [\\phi'(u) \\phi'(v)] \\\\\n", 103 | " \\Lambda &= \\begin{bmatrix} \\|x\\|_2^2 & x^T x' \\\\ x^T x' & \\|x'\\|_2^2 \\end{bmatrix}\n", 104 | "\\end{align*}\n", 105 | "\n", 106 | "Let $\\xi = x^T x'$ and $\\check{\\phi}$ denote the dual of $\\phi$. Assuming $\\phi$ is homogeneous of degree 1 and that $\\|x\\|_2 = \\|x'\\|_2 = 1$ we conclude: \n", 107 | "\\begin{align*}\n", 108 | " K(x, x') = \\check{\\phi}(\\xi) + \\xi \\check{\\phi'}(\\xi)\n", 109 | "\\end{align*}\n", 110 | "\n", 111 | "Recalling that the dual activation is computed in closed form for a number of nonlinearities including ReLU, we now have a closed form for the NTK. Next, let's try training some simple neural networks to verify that the NTK does describe the training dynamics of large neural networks. " 112 | ] 113 | }, 114 | { 115 | "cell_type": "markdown", 116 | "id": "30982ecb", 117 | "metadata": {}, 118 | "source": [ 119 | "## Training Neural Nets vs. Using the NTK" 120 | ] 121 | }, 122 | { 123 | "cell_type": "code", 124 | "execution_count": 170, 125 | "id": "fb4a3f93", 126 | "metadata": {}, 127 | "outputs": [ 128 | { 129 | "name": "stdout", 130 | "output_type": "stream", 131 | "text": [ 132 | "(32, 100) (32, 1) (100, 100) (100, 1)\n" 133 | ] 134 | } 135 | ], 136 | "source": [ 137 | "# Loading high dimensional linear data\n", 138 | "import dataloader as dl\n", 139 | "import numpy as np\n", 140 | "from numpy.linalg import norm\n", 141 | "import matplotlib.pyplot as plt\n", 142 | "%matplotlib inline\n", 143 | "\n", 144 | "SEED = 2134\n", 145 | "\n", 146 | "np.random.seed(SEED)\n", 147 | "d = 100\n", 148 | "n = 32\n", 149 | "n_test = 100\n", 150 | "\n", 151 | "X = np.random.randn(n, d)\n", 152 | "X = X / norm(X, axis=-1).reshape(-1, 1)\n", 153 | "X_test = np.random.randn(n_test, d)\n", 154 | "X_test = X_test / norm(X_test, axis=-1).reshape(-1, 1)\n", 155 | "w = np.random.randn(1, d)\n", 156 | "y = (w @ X.T).T\n", 157 | "y_test = (w @ X_test.T).T\n", 158 | "print(X.shape, y.shape, X_test.shape, y_test.shape)" 159 | ] 160 | }, 161 | { 162 | "cell_type": "code", 163 | "execution_count": 171, 164 | "id": "c1ae559d", 165 | "metadata": {}, 166 | "outputs": [], 167 | "source": [ 168 | "## We now need to define and train a neural network to map x^{(i)} to y^{(i)}\n", 169 | "import torch\n", 170 | "import torch.nn as nn\n", 171 | "import torch.nn.functional as F\n", 172 | "\n", 173 | "# Abstraction for nonlinearity \n", 174 | "class Nonlinearity(torch.nn.Module):\n", 175 | " \n", 176 | " def __init__(self):\n", 177 | " super(Nonlinearity, self).__init__()\n", 178 | "\n", 179 | " def forward(self, x):\n", 180 | " # return F.leaky_relu(x)\n", 181 | " return F.relu(x)\n", 182 | " \n", 183 | "class Net(nn.Module):\n", 184 | "\n", 185 | " def __init__(self, width, f_in):\n", 186 | " super(Net, self).__init__()\n", 187 | "\n", 188 | " self.k = width\n", 189 | " self.first = nn.Sequential(nn.Linear(f_in, self.k, bias=True), \n", 190 | " Nonlinearity())\n", 191 | " self.sec = nn.Linear(self.k, 1, bias=False)\n", 192 | "\n", 193 | " def forward(self, x):\n", 194 | " #C = np.sqrt(2/(.01**2 + 1)) * 1/np.sqrt(self.k)\n", 195 | " C = np.sqrt(2/self.k)\n", 196 | " o = self.first(x) * C\n", 197 | " return self.sec(o)" 198 | ] 199 | }, 200 | { 201 | "cell_type": "code", 202 | "execution_count": 185, 203 | "id": "3e5cc8cf", 204 | "metadata": {}, 205 | "outputs": [ 206 | { 207 | "name": "stdout", 208 | "output_type": "stream", 209 | "text": [ 210 | "torch.Size([16000, 100])\n", 211 | "torch.Size([16000])\n", 212 | "torch.Size([1, 16000])\n" 213 | ] 214 | }, 215 | { 216 | "data": { 217 | "application/vnd.jupyter.widget-view+json": { 218 | "model_id": "cc04b360b5454c97ab466ddb2d35d864", 219 | "version_major": 2, 220 | "version_minor": 0 221 | }, 222 | "text/plain": [ 223 | " 0%| | 0/100000 [00:00= max_labels:\n", 53 | " early_exit = True\n", 54 | " for label in classes: \n", 55 | " early_exit &= len(classes[label]) >= max_per_class\n", 56 | " if early_exit: \n", 57 | " break\n", 58 | "\n", 59 | "all_train_examples = []\n", 60 | "all_train_labels = []\n", 61 | "for label in classes:\n", 62 | " label_vec = torch.zeros(max_labels)\n", 63 | " label_vec[label] = 1.\n", 64 | " all_train_examples.extend(classes[label])\n", 65 | " all_train_labels.extend([label_vec]*len(classes[label]))\n", 66 | " \n", 67 | "all_test_labels = [] \n", 68 | "for label in test_labels: \n", 69 | " label = label.data.numpy().item()\n", 70 | " label_vec = torch.zeros(max_labels)\n", 71 | " label_vec[label] = 1.\n", 72 | " all_test_labels.append(label_vec)\n", 73 | " \n", 74 | " \n", 75 | "train_set = torch.stack(all_train_examples, dim=0).view(max_labels * max_per_class, -1)\n", 76 | "train_set = train_set / torch.norm(train_set, p=2, dim=1).view(-1, 1)\n", 77 | "train_labels = torch.stack(all_train_labels, dim=0)\n", 78 | "\n", 79 | "test_set = test_imgs.view(-1, 28*28)\n", 80 | "test_set = test_set / torch.norm(test_set, p=2, dim=1).view(-1, 1) \n", 81 | "test_labels = torch.stack(all_test_labels, dim=0)\n", 82 | "\n", 83 | "print(\"Train Set: \", train_set.shape)\n", 84 | "print(\"Train Labels: \", train_labels.shape)\n", 85 | "print(\"Test Set: \", test_set.shape)\n", 86 | "print(\"Test Labels: \", test_labels.shape)" 87 | ] 88 | }, 89 | { 90 | "cell_type": "markdown", 91 | "metadata": {}, 92 | "source": [ 93 | "### Interpolation with EigenPro\n", 94 | "\n", 95 | "Below we use the Laplace kernel to classify MNIST digits from pixels. We make use of the EigenPro library (https://github.com/EigenPro/EigenPro-pytorch) below for solving kernel regression. " 96 | ] 97 | }, 98 | { 99 | "cell_type": "code", 100 | "execution_count": 15, 101 | "metadata": {}, 102 | "outputs": [ 103 | { 104 | "name": "stdout", 105 | "output_type": "stream", 106 | "text": [ 107 | "probability: 0.00 & Number of labels corrupted: 0\n" 108 | ] 109 | }, 110 | { 111 | "name": "stderr", 112 | "output_type": "stream", 113 | "text": [ 114 | "/home/aradha/princeton_dl_tutorial/eigenpro.py:102: UserWarning: To copy construct from a tensor, it is recommended to use sourceTensor.clone().detach() or sourceTensor.clone().detach().requires_grad_(True), rather than torch.tensor(sourceTensor).\n", 115 | " requires_grad=False).to(self.device)\n" 116 | ] 117 | }, 118 | { 119 | "name": "stdout", 120 | "output_type": "stream", 121 | "text": [ 122 | "SVD time: 0.69, top_q: 36, top_eigval: 0.90, new top_eigval: 5.20e-04\n", 123 | "n_subsamples=2000, bs_gpu=2000, eta=1923.82, bs=1905, top_eigval=8.97e-01, beta=0.99\n", 124 | "train error: 0.00%\tval error: 4.58% (150 epochs, 1.47 seconds)\ttrain l2: 1.57e-07\tval l2: 1.44e-02\n", 125 | "probability: 0.10 & Number of labels corrupted: 371\n", 126 | "SVD time: 0.69, top_q: 36, top_eigval: 0.90, new top_eigval: 5.20e-04\n", 127 | "n_subsamples=2000, bs_gpu=2000, eta=1923.82, bs=1905, top_eigval=8.97e-01, beta=0.99\n", 128 | "train error: 0.00%\tval error: 4.89% (150 epochs, 1.44 seconds)\ttrain l2: 2.14e-06\tval l2: 1.77e-02\n", 129 | "probability: 0.20 & Number of labels corrupted: 805\n", 130 | "SVD time: 0.70, top_q: 36, top_eigval: 0.90, new top_eigval: 5.20e-04\n", 131 | "n_subsamples=2000, bs_gpu=2000, eta=1923.82, bs=1905, top_eigval=8.97e-01, beta=0.99\n", 132 | "train error: 0.00%\tval error: 5.56% (150 epochs, 1.44 seconds)\ttrain l2: 5.00e-06\tval l2: 2.27e-02\n", 133 | "probability: 0.30 & Number of labels corrupted: 1204\n", 134 | "SVD time: 0.69, top_q: 36, top_eigval: 0.90, new top_eigval: 5.20e-04\n", 135 | "n_subsamples=2000, bs_gpu=2000, eta=1923.82, bs=1905, top_eigval=8.97e-01, beta=0.99\n", 136 | "train error: 0.00%\tval error: 7.54% (150 epochs, 1.44 seconds)\ttrain l2: 7.06e-06\tval l2: 2.89e-02\n", 137 | "probability: 0.40 & Number of labels corrupted: 1610\n", 138 | "SVD time: 0.69, top_q: 36, top_eigval: 0.90, new top_eigval: 5.20e-04\n", 139 | "n_subsamples=2000, bs_gpu=2000, eta=1923.82, bs=1905, top_eigval=8.97e-01, beta=0.99\n", 140 | "train error: 0.00%\tval error: 10.51% (150 epochs, 1.44 seconds)\ttrain l2: 8.20e-06\tval l2: 3.57e-02\n", 141 | "probability: 0.50 & Number of labels corrupted: 2016\n", 142 | "SVD time: 0.69, top_q: 36, top_eigval: 0.90, new top_eigval: 5.20e-04\n", 143 | "n_subsamples=2000, bs_gpu=2000, eta=1923.82, bs=1905, top_eigval=8.97e-01, beta=0.99\n", 144 | "train error: 0.00%\tval error: 15.34% (150 epochs, 1.44 seconds)\ttrain l2: 9.80e-06\tval l2: 4.36e-02\n", 145 | "probability: 0.60 & Number of labels corrupted: 2429\n", 146 | "SVD time: 0.70, top_q: 36, top_eigval: 0.90, new top_eigval: 5.20e-04\n", 147 | "n_subsamples=2000, bs_gpu=2000, eta=1923.82, bs=1905, top_eigval=8.97e-01, beta=0.99\n", 148 | "train error: 0.00%\tval error: 23.33% (150 epochs, 1.45 seconds)\ttrain l2: 1.12e-05\tval l2: 5.31e-02\n", 149 | "probability: 0.70 & Number of labels corrupted: 2809\n", 150 | "SVD time: 0.70, top_q: 36, top_eigval: 0.90, new top_eigval: 5.20e-04\n", 151 | "n_subsamples=2000, bs_gpu=2000, eta=1923.82, bs=1905, top_eigval=8.97e-01, beta=0.99\n", 152 | "train error: 0.00%\tval error: 35.97% (150 epochs, 1.44 seconds)\ttrain l2: 1.26e-05\tval l2: 6.29e-02\n", 153 | "probability: 0.80 & Number of labels corrupted: 3189\n", 154 | "SVD time: 0.69, top_q: 36, top_eigval: 0.90, new top_eigval: 5.20e-04\n", 155 | "n_subsamples=2000, bs_gpu=2000, eta=1923.82, bs=1905, top_eigval=8.97e-01, beta=0.99\n", 156 | "train error: 0.00%\tval error: 53.70% (150 epochs, 1.44 seconds)\ttrain l2: 1.31e-05\tval l2: 7.45e-02\n", 157 | "probability: 0.90 & Number of labels corrupted: 3583\n", 158 | "SVD time: 0.70, top_q: 36, top_eigval: 0.90, new top_eigval: 5.20e-04\n", 159 | "n_subsamples=2000, bs_gpu=2000, eta=1923.82, bs=1905, top_eigval=8.97e-01, beta=0.99\n", 160 | "train error: 0.00%\tval error: 73.56% (150 epochs, 1.44 seconds)\ttrain l2: 1.33e-05\tval l2: 8.73e-02\n" 161 | ] 162 | }, 163 | { 164 | "data": { 165 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAEWCAYAAABrDZDcAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAA7QklEQVR4nO3de7xVc/7H8de705VSdHEpKUS55KBCQgehaDC6uoaRNO633G8ZTMMMfiS5TG6jGjHTkMtkCsmlolIoSXRGQ0WJJJ0+vz++69Q+u33O3qfOPvucvT/Px2M/zl73z157n/VZ6/td6/uVmeGccy531ch0AM455zLLE4FzzuU4TwTOOZfjPBE451yO80TgnHM5zhOBc87lOE8EOUrSZEm/28xlW0r6UVJeGuL6UdKuFbSuAZKmVMS6XCDpZEmLo+9p/zSsf66krhW9Xlc2TwQVSNIiSUenOO9mH4grW/znMrOvzKy+mRVV9Lai9S4s73KSWkkySTUrOqZo/bdI+jU6AK6QNFXSIenYVhV3N3Bh9D19GD8x+g4+klQjZtztkkalsnIz29vMJldUsDG/ix+j1zeSXpTUrRzryPoTCk8E1VQ6zsZdUmPMrD7QFJgCPC9J8TNl+XezCzA3yTw7Af0qIZbyaBR9d/sB/wZekDQgsyFVHZ4I0qT4LELS3ZK+l/SFpO7RtD8AhwEPRGcpD0Tj20r6t6TvJM2T1CdmfaMkPSRpgqSfgIJo3IhomVWS3pC0S8wynSVNk7Qy+tu5lFh3k/QfScslLZP0jKRG0bSngJbAv6JYr44/+5a0k6TxUdwLJJ0Xs+5bJI2V9GQU41xJHcrYbyZp95jP/KCkl6Jl35O0WymLvhn9XRHFueFsPdF3EI1vKOkxSUsk/Tc6c016EDezX4EngB2AxqV8NztJGidpabTdi2O220nSdEk/RGeof46ZdnB0tbFC0izFFJMoXEUOlfR2tD9ek9QkZnqXmGUXFx/oJNWJ9sFX0fZGSKpXyv6vIekGSV9K+jb63hpG6/gRyANmSfq8jF00DLhVpVydSfpN9DtYEX2mdjHTNlx9lrafot/DRXHrnC3ppDJiAsDM/mdm9wG3AH9UdOUi6RpJn0f79WNJJ0fj2wEjgEOi39WKaPzxkj6MYlss6ZZk267SzMxfFfQCFgFHR+8HAL8C5xH+eS4AvgYUTZ8M/C5m2a2BxcDZQE3gAGAZsHc0fRSwEjiUkMDrRuNWAYcDdYD7gCnR/NsB3wNnROvrHw03jt8+sDvQLVpHU8JB9d5EnysabgUYUDMafgMYHsWUDywFjoqm3QKsAXpE++FO4N0y9qEBu8d85u+ATtFneAYYXcpyJWJK8Tv4B/BwtO+bAe8D55ey/luAp6P3dYA/AYtL+W62AmYANwG1gV2BhcCx0fzvAGdE7+sDB0fvmwPLo31VI/pOlgNNY76zz4E9gHrR8F3RtJbRb6E/UAtoDORH0+4FxhN+Ew2AfwF3lvI5zwEWRDHXB54Hnkr0/ZTx/bWJPn/x7+t2YFT0fg/gp+iz1QKujrZXO8H/UGn7qQ/wXsw294v2U+1UfhfR+F2j8e2i4d6EK5kaQN8oxh1jfkdT4pbvCuwbzd8e+AY4KdPHoM0+dmU6gGx6sWkiWBAzbavoh7dDNDyZkomgL/BW3PoeBm6O3o8CnoybPoqYA2P0z1IE7ExIAO/Hzf8OMCDR9uPmOwn4MNHnioY3/HNF2yoCGsRMvzPmH/8WYGLMtL2An8vYh/GJ4NGYaT2AT0tZbpN/+LK+A2B74BegXsz0/sCkUtZ/C7AWWAF8C/wHODDRdwMcBHwVt/y1wF+j928CtwJN4uYZQsxBNxr3KnBWzHd2Q8y0wcArMet/IUHcIhzUdosZdwjwRSmf83VgcMzwnoRkWpz0U0kEu0ff1VeEpBmbCG4ExsbMXwP4L9A1wf9QafupDuEEoU00fDcwPNXfRTS+bjT+0FKWmwmcGPM7mlLaZ47muRf4S1nzVOWXFw2l1/+K35jZ6uht/VLm3QU4KLpcXhFdgp5GOGgVW5xguQ3jzOxHwj/ITtHry7h5vyScdZYgqZmk0VHxyA/A00CT+PlKsRPwnZmtKmM7/4t5vxqoW1qxQQLxy5a2/5IuH/cd7EI4I10Ss78fJlwZlGasmTUys2ZmdqSZzYiZFvvd7ALsFPddXkdIPgDnEs6MP1UosjshZrnecct1AXZM9HkouT92JlwtxGtKdIUSs85XovGJxP9uviQk/O0Tz56YmU0gJIKBZa3fzNYT9t0mv0tK2U9m9gswFjg9KtrpDzxVnvhitvcdgKQzJc2M2Uf7UMb/gKSDJE2Kiv5WAoPKmr+qS8sdFi4l8c2+LgbeMLOy7mZI1FTszsVvJNUnXP5/Hb12iZu3JeEgEO/OaN3tzWx5VNb6QJLtFvsa2E5Sg5hk0JJwlleZytuM7mLCFUETM1tXwdtfTDjjbpNwRrPPgP7RQey3wHOSGkfLPWVm5yVaLonFhCK0eMuAnwlFjKl8J/G/m5bAOkLRR3ndAIwG/ha3/n2LBySJ8BveJLbS9pOZ/USoo3mKUGm/2szeKWdsJxOu7OYp1Ks9AhwFvGNmRZJmEq6mIPFv62+E/5HuZrZG0r1U40TgVwSZ8w2hnLLYi8Aeks6QVCt6dYytSCtFj6iSsDYwlFB2uhiYEK3vVEk1JfUlFMu8mGAdDYAfCRWtzYGrksS6QbStqcCdkupKak84k3smSdwVbSmwnlLijGdmS4DXgHskbRNVku4m6YgKiOV94AdJQyTVk5QnaR9JHQEknS6paXQ2vCJapohwJdZT0rHRMnUldZXUIoVtPgMcLalP9H03lpQfbeMR4C+SmkXbby7p2FLW8yxwmaTW0YnFHYS7pcqdLC3cBvoRcFbM6LHA8ZKOklQLuIKQkKfGL1/GfiI68K8H7qEcVwOStpd0IXAzcG207q0JB/ul0TxnE64Iin0DtIj+x4o1IFwJr5HUCTg11RiqIk8EmXMf0Evhbpb7o7PpYwi33X1NKAL4I6E8tCx/I/yovwMOJBQnYWbLgRMI/2jLCZVyJ5jZsgTruJVQOb0SeIlQQRjrTuCG6LL5ygTL9yeUxX4NvECo1/h3krgrVFTs8wfg7SjOg1NY7ExCZe7HhIr05yhZDLO5sRQBPQkV518QzsofBRpGsxwHzFW4C+c+oJ+ZrYmS6omEYqSlhLP8q0jh/9TMviKUy19B+C3MJFSiQqh7WAC8GxX9TSSU/SfyOOHA+mYU+xrgolLmTcUNhKvU4jjnAacD/0fYLz2Bnma2NsGyCfdTzPQnCVcXT6cQxwqFO7o+Iuyn3mb2eBTTx4SE8g7hoL8v8HbMsv8h3DL7P0nF/z+DgdskrSLcFDA2hRiqrOK7J1w1pPCQTqGZ3ZDpWJyrbJLOBAaaWZdMx1Ld+RWBc67akbQV4ax8ZKZjyQaeCJxz1UpUv7GUUIzztySzuxR40ZBzzuU4vyJwzrkcV+2eI2jSpIm1atUq02E451y1MmPGjGVmlvBBwmqXCFq1asX06dMzHYZzzlUrkuJbGtjAi4accy7HeSJwzrkc54nAOedyXLWrI0jk119/pbCwkDVr1iSf2VWaunXr0qJFC2rVqpXpUJxzZciKRFBYWEiDBg1o1aoV2rTnQJcBZsby5cspLCykdevWmQ7HOVeGrCgaWrNmDY0bN/YkUIVIonHjxn6V5tyWGjYMJk0qOW7SpDC+gmRFIgA8CVRB/p04VwE6doQ+fTYmg0mTwnDHjhW2iawoGnLOuaxVUABjx4aD/wUXwEMPheGCggrbRNZcEWTS8uXLyc/PJz8/nx122IHmzZtvGF67NlEz6yVNnjyZqVM36ZcDgFGjRtG0adMN68vPz+fjjz+u6I/gnKvKWreGoiIYOjQkgwpMApCLVwTDhoVLqtgdOWkSTJsGV1+9Wats3LgxM2fOBOCWW26hfv36XHllov5bEps8eTL169enc+fOCaf37duXBx54IOE0gKKiIvLy8kodLs26deuoWTP3fgLOVTtffBH+XnttuCIoKPArgi1SCeVtADNmzOCII47gwAMP5Nhjj2XJkiUA3H///ey11160b9+efv36sWjRIkaMGMFf/vIX8vPzeeutt1Ja/+TJkykoKODUU09l33333WR4zZo1nH322ey7777sv//+TIo+76hRo+jduzc9e/bkmGOOqdDP7JxLgyeeCMeocePgjjs2FhPFVyBvgew8HezaddNxffrA4MFw0EGw005w7LGw446wZAm0awdfRs1wLFsGvXqVXHby5HJt3sy46KKL+Oc//0nTpk0ZM2YM119/PY8//jh33XUXX3zxBXXq1GHFihU0atSIQYMGlXkVMWbMGKZMmbJh+J13Qj/d77//PnPmzKF169ZMnjy5xPA999wDwEcffcSnn37KMcccw/z58zcsP3v2bLbbbrtNN+acqzo+/BDOPhsuuWTjFUBxncG0aRV2VZCdiSCZbbcNSeCrr6BlyzBcgX755RfmzJlDt27dgFBUs+OOoSvc9u3bc9ppp3HSSSdx0kknpbS+0oqGOnXqVOIe/djhKVOmcNFFoavZtm3bsssuu2xIBN26dfMk4FxVV1QE550HzZrBTTeVnFbBRUPZmQjKOoPfaiu4+eZwhXDjjaG87eabN+7UJk3KfQUQz8zYe++9N5y5x3rppZd48803GT9+PEOHDmXu3LmbvZ2tt9661OGyOhyKX845VwX93//BjBkwenSFn6zGy706guI6gbFj4bbb0lLeVqdOHZYuXbohEfz666/MnTuX9evXs3jxYgoKChg2bBgrVqzgxx9/pEGDBqxatarCtg9w+OGH88wzzwAwf/58vvrqK/bcc88K3YZzLk2+/BJuuAGOPz4cn9IsrYlA0nGS5klaIOmaBNOvkjQzes2RVCQpvWUW06aVvAc3trytgtSoUYPnnnuOIUOGsN9++5Gfn8/UqVMpKiri9NNP31CBe9lll9GoUSN69uzJCy+8UGpl8ZgxY0rcPlraraaxBg8eTFFREfvuuy99+/Zl1KhR1KlTp8I+o3MujT77DBo3hgcfhEp4MDNtfRZLygPmA92AQmAa0N/MEt4EL6kncJmZHVnWejt06GDxHdN88skntGvXrkLidhXLvxvnNtO6dVCBt3dLmmFmHRJNS+cVQSdggZktNLO1wGjgxDLm7w88m8Z4nHOuavv+exgxIlQUV+IzPulMBM2BxTHDhdG4TUjaCjgOGFfK9IGSpkuavnTp0goP1DnnqoSrr4YLL4R58yp1s+lMBIkKtkorh+oJvG1m3yWaaGYjzayDmXVo2jRh38vOOVe9vfEGPPooXHEF7LVXpW46nYmgENg5ZrgF8HUp8/bDi4Wcc7lqzRoYODC0KXTzzZW++XQWQk0D2khqDfyXcLA/NX4mSQ2BI4DT0xiLc85VXXfcAfPnw2uvhWedKlnaEoGZrZN0IfAqkAc8bmZzJQ2Kpo+IZj0ZeM3MfkpXLM45V6UVFMD69RC1RlDZ0vocgZlNMLM9zGw3M/tDNG5ETBLAzEaZWb90xpFuW9IM9fTp07n44ouTbqO0lknLa/LkyTRs2LDEcwkTJ06skHU75zZTQQHcfnvGNp+dTUxUsmTNUJfV3HOHDh3o0CHhrb0lpPIQWaoOO+wwXnzxxVKnmxlmRo0aNRIOlybV5q+dc5FHHgl3CN1xB9SunbEwcq+JiUoyYMAALr/8cgoKChgyZAjvv/8+nTt3Zv/996dz587Mi24Pmzx5MieccAIQksg555xD165d2XXXXbn//vs3rK9+/fob5u/atSu9evWibdu2nHbaaRvaFZowYQJt27alS5cuXHzxxRvWm4pFixbRrl07Bg8ezAEHHMBbb71VYnjx4sVcddVV7LPPPuy7776MGTNmQzyxzV8751L03//ClVfCrFlQq1ZGQ8nKK4KuCZqh7tOnD4MHD2b16tX06NFjk+kDBgxgwIABLFu2jF5xzVBP3sxG6ObPn8/EiRPJy8vjhx9+4M0336RmzZpMnDiR6667jnHjNn1s4tNPP2XSpEmsWrWKPffckwsuuIBacT+SDz/8kLlz57LTTjtx6KGH8vbbb9OhQwfOP/983nzzTVq3bk3//v1Ljeutt94iPz9/w/C4cePIy8tj3rx5/PWvf2X48OEsWrSoxPC4ceOYOXMms2bNYtmyZXTs2JHDDz8cKNkctnMuRRdfDGvXhgfIMty/d1Ymgqqid+/eG4pKVq5cyVlnncVnn32GJH799deEyxx//PHUqVOHOnXq0KxZM7755htatGhRYp5OnTptGJefn8+iRYuoX78+u+6664aDcf/+/Rk5cmTCbSQqGlq0aBG77LILBx988IZxscNTpkyhf//+5OXlsf3223PEEUcwbdo0ttlmm02aw3bOJfGPf8Dzz8Ndd8Fuu2U6muxMBGWdwW+11VZlTm/SpMlmXwHEi23u+cYbb6SgoIAXXniBRYsWJbxqAUo0DJeXl8e6detSmqci2ozyZq2dqwRFReGhsf32g8svz3Q0gNcRVJqVK1fSvHloYWPUqFEVvv62bduycOFCFi1aBLChDL+iHH744YwZM4aioiKWLl3Km2++SadOnSp0G87lhLw8eOklePLJjNcNFPNEUEmuvvpqrr32Wg499FCKiooqfP316tVj+PDhHHfccXTp0oXtt9+ehg0bJpy3uI6g+PXcc88lXf/JJ59M+/bt2W+//TjyyCMZNmwYO+ywQ0V/DOey28qV4W/bttC+fWZjiZG2ZqjTxZuhLt2PP/5I/fr1MTN+//vf06ZNGy677LKMxuTfjXORtWvhwAPDMwMxdwRWlkw1Q+0q2SOPPEJ+fj577703K1eu5Pzzz890SM65YnffDXPmZOzp4bJkZWVxrrrssssyfgXgnEtg/vzQNW7v3tCzZ6aj2UTWXBFUtyKuXODfiXOAGZx/PtStC/fdl+loEsqKRFC3bl2WL1/uB54qxMxYvnw5devWzXQozmXWwoXw0Ufwpz/BjjtmOpqEsqJoqEWLFhQWFuK9l1UtdevW3eRhOOdyzm67hfaEtt0205GUKisSQa1atfzJVudc1fPyy3DMMdC4caYjKVPKRUOStpW0t6RdJWVFkZJzzqXNyy9Djx6h+8kqrswrgqj3sN8D/YHawFKgLrC9pHeB4WY2Ke1ROudcdfLTT3DBBdCuHQwYkOlokkpWNPQc8CRwmJmtiJ0g6UDgDEm7mtljaYrPOeeqn5tugi+/hLfegpi2waqqMhOBmZX65IOZzQBmVHhEzjlXnc2YAffeG24Z7dIl09GkJK1l/ZKOkzRP0gJJ15QyT1dJMyXNlfRGOuNxzrm0W7cODjssNDFdTWz2XUOSPjCzA8qYngc8CHQDCoFpksab2ccx8zQChgPHmdlXkpptbjzOOVclHHQQVFBT9pVls68IykoCkU7AAjNbaGZrgdHAiXHznAo8b2ZfRev8dnPjcc65jPriCxgyJFQUVzMpJQJJrSXVjRmuJ6lVksWaA4tjhgujcbH2ALaVNFnSDElnlrL9gZKmS5ruD40556ocs3CX0PDh8N13mY6m3FK9Ivg7sD5muCgaV5ZEnXDGtwFREzgQOB44FrhR0h6bLGQ20sw6mFmHpk2bphiyc85VkmefhVdfhTvugJ13znQ05ZZqHUHNqHgHADNbK6l2kmUKgdg90gL4OsE8y8zsJ+AnSW8C+wHzU4zLOecya/lyuPRS6NQJBg/OdDSbJdUrgqWSflM8IOlEYFmSZaYBbaJipdpAP2B83Dz/BA6TVFPSVsBBwCcpxuScc5l3zTXw/ffwyCOhG8pqKNUrgkHAM5IeiIYLgYTl+cXMbJ2kC4FXgTzgcTObK2lQNH2EmX0i6RVgNqHo6VEzm7M5H8Q55zLiyivhkEOqVNeT5VWuriol1Y+WWZW+kMqWqKtK55yrdOvXQ43q0+zaFndVKekOSY3M7EczWxU1QHd7xYbpnHPVyI03Qq9e4QGyai7VdNY9tq0hM/se6JGWiJxzrqr76CMYNgzq14ea1b81/1QTQZ6kDS0nSaoHVP2WlJxzrqIVFcF550GjRnDPPZmOpkKkmsqeBl6X9FfCswDnEFoldc653DJiBLz3Hjz9dJXvcCZVKSUCMxsmaTZwNOFBsaFm9mpaI3POuapm3bpwFXDMMXDqqZmOpsKkXLhlZq8Ar0jaGjhZ0ktmdnz6QnPOuSqmZk14/31YswaUqPGE6inVu4ZqSzpJ0lhgCXAUMCKtkTnnXFWyYEGoH2jSBFq0yHQ0FarMRCCpm6THgS+AXsBTwHdmdraZ/asyAnTOuYxbuRIOPxwGDsx0JGmR7IrgVWA3oIuZnR4d/NcnWcY556q/YcNgUtQl+zXXwDffQMeOYXyWSZYIDgTeBSZK+rekcwnNRTjnXHbr2BH69IH77w93Cv32t+Ehso4dMx1ZhUu5iQlJhwL9gVOAmcALZjYyfaEl5k1MOOcqzcsvQ8+esPXWUKsW/P3vUFCQ6ag2yxY3MQFgZm+b2YWEzmXuBQ6pmPCcc66KatMmPDj2ww+hielqmgSSKXeLSWa23sxeNbOz0xGQc85VGYsXh9tEb7wRHnpoY51Blqk+Tec551xl+d//oG/fUEcwdizcdlv426dPViYDTwTOORfLDM4+G55/Hu6+e2NxUEFBSAbTpmU2vjQod7N5kgZmopLYOecqxQMPwCuvhI7ozzqr5LSCgqysJ9icK4JBFR6Fc85VBXPmwFVXwQknwKDcOdRtTiJIuYENScdJmidpgaRrEkzvKmmlpJnR66bNiMc557acWXhyuGFDeOyxrGpLKJnN6VGhZyozScoDHgS6Efo4niZpvJl9HDfrW2Z2wmbE4ZxzFUeCJ56AJUugWbNMR1OpNuf20cIUZ+0ELDCzhWa2FhgNnFje7TnnXNp9/XW4ImjTJrQplGPSeddQc2BxzHBhNC7eIZJmSXpZ0t5pjMc55za1bBkceGBoTyhHpbOzzUQFbPHtWXwA7GJmP0rqAfwDaLPJiqSBwECAli1bVnCYzrmcZQa/+x18911WdTRTXpt9RSCpW5JZCoGdY4ZbAF/HzmBmP5jZj9H7CUAtSU3iV2RmI82sg5l1aNq06eaG7JxzJT3yCPzzn3DnnbDffpmOJmO2pGjosSTTpwFtJLWWVBvoB4yPnUHSDlKompfUKYpn+RbE5JxzqZk3Dy67DI4+Gi69NNPRZFSZRUOSxpc2CSiz12YzWyfpQkKfBnnA42Y2V9KgaPoIQmc3F0haB/wM9LNUm0N1zrktsWQJtGwZ7hSqkduNLJTZDLWk74HTgR/jJwFjzGz7NMaWkDdD7ZyrMOvX50wSKKsZ6mSVxe8Cq83sjQQrnVcRwTnnXKV64w2YOhWuvhryvJ8tSJIIzKx7GdNy72Zb51z19v33cMYZULcuXHxx6HDGJa0jULIy+1Tmcc65jDML7QctWRKuCDwJbJCscGySpIsklbh5X1JtSUdKegI4q5RlnXOu6njqqY19C2Rhv8NbIlkdwXHAOcCzkloDK4C6hLuAXgP+YmYz0xmgc85tsVWrwi2ihx8e6gZcCcnqCNYAw4HhkmoBTYCfzWxFJcTmnHMVo0EDeOkl2GknryBOIOUmJszsV2BJGmNxzrmK98UX0Lo1HHJIpiOpsnLjBlrnXG565x3YYw945plMR1KleSJwzmWnH36A006DFi1Cj2OuVEkTgaQ8SRMrIxjnnKswF18MX34JTz8deh1zpUqaCMysCFgtyfekc656GDs2tCF0/fVw6KGZjqbKS7WyeA3wkaR/Az8VjzSzi9MSlXPObYnVq6GgAG68MdORVAupJoKXopdzzlV9AwbAWWflVAf0WyKlRGBmT0R9CuwRjZoX3U7qnHNVx//9HzRqBKef7kmgHFK6a0hSV+Az4EHCA2bzJXmjc865qmPGDLjiitDjmCuXVIuG7gGOMbN5AJL2AJ4FDkxXYM45l7Kffgq3ijZrBiNH+tVAOaWaCGoVJwEAM5sfNTnhnHOZd8UVMH8+TJwI222X6WiqnVQTwQxJjwFPRcOnATPSE5JzzpXDzJnw8MNw1VVw5JGZjqZaSvXJ4kHAXOBi4BLg42hcmSQdJ2mepAWSriljvo6SiiT1SjEe55wL8vPh5Zdh6NBMR1JtJb0ikFQDmGFm+wB/TnXFkvIIlcvdgEJgmqTxZvZxgvn+SOjk3jnnUrN+PSxYENoSOu64TEdTraXyZPF6YFZ85zQp6AQsMLOFZrYWGA2cmGC+i4BxwLflXL9zLpc98ADssw/MmpXpSKq9VOsIdgTmSnqfkk8W/6aMZZoDi2OGC4GDYmeQ1Bw4GTgSKLXLIEkDgYEALVuWNx8557LORx+FDmaOPRbat890NNVeqong1s1Yd6L7t+L7Nr4XGGJmRSrjdi8zGwmMBOjQoYP3j+xcLluzBk49NTw49thjfqtoBUi1juDBqI6gPAqBnWOGWwBfx83TARgdJYEmQA9J68zsH+XclnMuV1xzDcyZEyqImzXLdDRZIWkiMLP1kmZJamlmX5Vj3dOANlFfx/8F+gGnxq27dfF7SaOAFz0JOOdKZRa6nbz0Uq8grkBpqyMws3WSLiTcDZQHPG5mcyUNiqaP2PywnXM5SQq3iZqXEFekdNYRYGYTgAlx4xImADMbsDnbcM7lADMYPBh69YKjjvJ6gQpWZiKQ1NbMPjWzNyTVMbNfYqYdnP7wnHMOeOQRGDEiPDNw1FGZjibrJHuO4G8x79+Jmza8gmNxzrlNffppqBPo1g0uuSTT0WSlZIlApbxPNOycc1tu2DCYNCm8X7s2tCpaqxZ06gQ1Um0Vx5VHsjoCK+V9omHnnNtyHTtCnz6h3+GFC+GDD2CbbbxIKI2SJYIWku4nnP0Xvycabp7WyJxzuamgICSBPn1g0CBo2BBeeCGMd2mRLBFcFfN+ety0+GHnnKsYW28NvXvD7beHDug9CaRVmYnAzJ6orECccw6A994L/QqsXQs33AAPPRQSgSeDtPGaF+dc1fH++yEJrFkDzzwTHh4rLiYqrkB2Fc4TgXOuanj//XCLaN268Le/hYM/bKwzmDYts/FlsVSfLHbOufS64w5o0gQmT4addy45zYuG0iqlRCCpKXAe0Cp2GTM7Jz1hOedyzjPPwIoV0NxvSKxsqRYN/RNoCEwEXop5Oefc5psxA3r2hFWrwp1CngQyItWioa3MbEhaI3HO5ZYPPoCjjw4dzHz/fWhe2mVEqlcEL0rqkdZInHO5ozgJNGwY7gbyLmgzKtVEcAkhGayRtCp6/ZDOwJxzWerDD0MS2GabUDHcqlWmI8p5KRUNmZlfsznnKkb9+tCuXagc9iRQJaR8+6ik3wCHR4OTzezF9ITknMtKixdDixbQpg1MmeKdy1QhKRUNSbqLUDz0cfS6JBrnnHPJzZoF+flwa9TZoSeBKiXVOoIeQDcze9zMHgeOi8aVSdJxkuZJWiDpmgTTT5Q0W9JMSdMldSlf+M65Km/WrNCE9FZbwRlnZDoal0B5mphoFPO+YbKZJeUBDwLdgb2A/pL2ipvtdWA/M8sHzgEeLUc8zrmqbvbskATq1QsVw7vtlumIXAKp1hHcCXwoaRKhL4LDgWuTLNMJWGBmCwEkjQZOJBQtAWBmP8bMvzXe2Y1z2ePnn6F799B20KRJngSqsFTvGnpW0mSgIyERDDGz/yVZrDmwOGa4EDgofiZJJxMSTTPg+EQrkjQQGAjQ0u83dq56qFcvdDq/xx6w++6ZjsaVocyiIUlto78HADsSDuaLgZ2icWUunmDcJmf8ZvaCmbUFTgKGJlqRmY00sw5m1qFp06ZJNuucy6g5c+Dvfw/ve/TwJFANJLsiuJxwJn5PgmkGHFnGsoVAbBOCLYCvS5vZzN6UtJukJma2LElczrmqaM6c0J9A3bpwwgnhqsBVecl6KBsYve1uZmtip0mqm2Td04A2kloD/wX6AafGrWN34HMzs+gKozawvBzxO+eqirlzQxKoWRMmTvQkUI2kWlk8FYgvCko0bgMzWyfpQuBVIA943MzmShoUTR8BnAKcKelX4Gegr5l5hbFz1c3HH29MApMnh3oBV22UmQgk7UCo9K0naX82lvtvA2yVbOVmNgGYEDduRMz7PwJ/LGfMzrmqZvx4yMsLdwd5Eqh2kl0RHAsMIJTv/zlm/CrgujTF5JyrLtavhxo1YMgQOPdc8Js5qqVkdQRPAE9IOsXMxlVSTM656uDTT6FvX3j6adh3X08C1ViqzxGMk3Q8sDdQN2b8bekKzDlXhX36aehD2CzUC7hqLdU+i0cQ6gQKCM1A9ALeT2Nczrmqat68kATWrw91Au3aZToit4VSbWuos5mdCXxvZrcCh1DyGQHnXC744ouSSWCv+ObDXHWUaiL4Ofq7WtJOwK9A6/SE5JyrsnbYISSC//zHk0AWSbVw70VJjYA/AR8Qnir2lkKdyxULF8J224WO5p95JtPRuAqWamVxcRtA4yS9CNQ1s5XpC8s5V2UsWABdu4aOZV70jgmzUao9lP0+uiLAzH4BakganM7AnHNVQHES+OUXuPPOTEfj0iTVOoLzzGxF8YCZfQ+cl5aInHOZMWxYqAAu9vnncMgh8P338Prr4VkBl5VSTQQ1pI2djEa9j9VOT0jOuYzo2BH69AnJwAxOPBGWL4f77oP27TMdnUujVCuLXwXGRs8TGDAIeCVtUTnnKl9BAYwdG5LBBRfAkiUwciT87neZjsylWaqJYAhwPnABoeG51/C7hpzLLmvWhKuBHXaAoUPhxhs9CeSIVO8aWg88FL2cc9nmzTdh4MDw1HCdOnDddfDQQ+EqoaAg09G5NEvWVeXY6O9HkmbHvyonROdc2qxYAeedB0ccAStXQsOG8PLL8Ic/bCwmiq1Adlkp2RXBpdHfE9Ich3MuE9auDX0JXHUVbLMNHHroxiuA4jqDadP8qiDLJUsELxJ6IbvdzM6ohHicc+m2eDE88ADccQc0axaeFWjQIPG8XjSUE5IlgtqSzgI6S/pt/EQzez49YTnnKlxRETz4IFx/fWg0rn//8LRwaUnA5YxkzxEMAg4GGgE9415Ji4skHSdpnqQFkq5JMP20mDqHqZL2K/cncM4lN3s2dO4Ml1wCXbqEjubz8zMdlasikvVQNgWYImm6mT1WnhVHD509CHQDCoFpksab2ccxs30BHGFm30vqDowEDirXJ3DOla347H/pUvjb36BfP9j4fKhzSTuvP9LM/gN8vxlFQ52ABWa2MFrXaOBEYEMiMLOpMfO/S+gb2TlXEd54Azp0gK23htGjYaedoHHjTEflqqBkRUNHRH/ji4VSKRpqDiyOGS6MxpXmXODlRBMkDZQ0XdL0pUuXJtmsczlu+XI455zQWNy994Zx++7rScCVKlnR0M3R37M3Y92Jrj0t4YxSASERdCkljpGEYiM6dOiQcB3O5TwzePZZuPTS0FDctdfC5ZdnOipXDaTaDPUlkrZR8KikDyQdk2SxQkp2Z9kC+DrButsTmqs40cyWpxq4cy7O9dfDaadB69YwY0a4PbRevUxH5aqBVFsfPcfMfgCOAZoBZwN3JVlmGtBGUmtJtYF+wPjYGSS1BJ4HzjCz+eWK3DkH69aFJ4IBzjwztBQ6daq3FurKJdVG54qLeXoAfzWzWbHNUidiZuskXUhouTQPeNzM5koaFE0fAdwENAaGR6tbZ2YdNuNzOJd7PvwwNA+x667hCeC2bcPLuXJKNRHMkPQaocP6ayU1ANYnW8jMJgAT4saNiHn/O8CbN3SuPFavhptvhr/8BZo0gauvznRErppLNRGcC+QDC81staTtCMVDzrnK9OGHcMop8MUX4Wrgj3+EbbfNdFSumks1ERwCzDSznySdTmh/6L70heWcS6hFi/A8wF//GloMda4CpFpZ/BCwOmoC4mrgS+DJtEXlnAvM4Mkn4YQTQltBTZvClCmeBFyFSjURrDMzIzwZfJ+Z3Qd4S1XOpdPnn8Mxx8BZZ8F334WXc2mQaiJYJela4HTgpagdoVrpC8u5HDNs2MYOYH79NQzvtVfoOWz48HAV0LRpZmN0WSvVRNAX+AU418z+R2gq4k9pi8q5XNOx48bewIqK4P77w/inngodyddI9V/VufJLtc/i/wF/jhn+Cq8jcK5irFkD334Lu+8OvXvD4MFh3CuveKcwrlKk2sTEwZKmSfpR0lpJRZJWpjs457Lahx/CRReFu4D69YOvv4aTT4ahQ0My8CTgKkmqt48+QGgi4u9AB+BMoE26gnIua5mFvgDmz4cDDoA6dcLB/5xzQvFPv35w443w0EPeTaSrNCkXPJrZAiDPzIrM7K9A17RF5Vw2KSqC114LB/nfRQ/S77FH6CTm669Di6E1a4bpY8fCbbeFv8V1Bs6lWaqJYHXUcNxMScMkXQZsnca4nKv+Fi0KTUHsuisce2xIBrF9AvTvD9ttF95PmxYO/sVXAAUFYXjatEoP2+UehccDkswk7QJ8S7hl9DKgITA8ukqoVB06dLDp06dX9madS83PP0Pt2pCXB0OGwJ/+BN26wbnnwm9+A3XrZjpCl6MkzSitUc+UEkFV4onAVTlm8MEH8Pjjobhn9OhwBbBkSXgmoGXLTEfoXJmJIFmfxR9RSq9iAGbmjZ673PXLL/Dww/DYYzB7djjbP+UU2GGHMH3HHTMbn3MpSnbXULJ+iZ3LLUVFsHAhtGkTKniHDQu3fz70UKjsbdQo0xE6V27JEkEtYHszezt2pKTDSNDtpHNZ6/PPQ4ufTzwRegVbvDgkglmzvFN4V+0lu2voXmBVgvE/R9Ocy25vvRXu4Nl9d7jzTth3X3jggY3TPQm4LJAsEbQys9nxI81sOtAqLRE5V5liG3uDUPE7fDhce20Y/uWXcPb/hz/Al1/ChAmhHqBmqs9iOlf1JUsEZd3rVi/ZyiUdJ2mepAWSrkkwva2kdyT9IunKZOtzrsIVN/b2wguh68ddd4Xf/z7UAwAcdRR89hlcd13oFMa5LJTstGaapPPM7JHYkZLOBWaUtWDUVPWDQDegMFrXeDP7OGa274CLgZPKG7hzW+SHH2CbbUKxz847w29/G8bXrAmXXw433RSGpczF6FwlSZYILgVekHQaGw/8HYDawMlJlu0ELDCzhQCSRhM6ttmQCMzsW+BbSceXP3TnUrR+PXzySWjT/623wt/atUN7PwAnnRSSwhtvhCKh227LaLjOVbYyi4bM7Bsz6wzcCiyKXrea2SFR09RlaQ4sjhkujMaVm6SBkqZLmr506dLNWYXLJWvXwrvvhvJ+CC187rMPDBoEr78eioMuvDAkCIDDDoO5czc29ubt+7gck2p/BJOA8v53JLqm3qzHmM1sJDASwpPFm7MOl8VWrYK33w5n+lOmwHvvhfb8P/kE2rYNbfp07BgO+LvuWrK4Z9KkUEdQ3M5PQUHJYedyQDpvfSgEdo4ZboE/e+AqwtdfhwP+gQfCbruFs/yTTw7t++y/fzjzP+wwaB5dgHbpEl6JlNXYmycClyPSmQimAW0ktQb+S+jP4NQ0bs9lq9WrQxs+xWX8xXf0DBsGV10VDtj//jccfDDUr1++dV999abjvB8Al2PS2uicpB6EB8/ygMfN7A+SBgGY2QhJOwDTgW2A9cCPwF5m9kNp6/RG57LEsGGhuCb2gDtpUijbP+qocNDffns47bTQomfDhqH5hi5dwtl+ly6Qnw+1amXqEzhXrXjro67qiS+bHzQotN5Zo0Z4iAugb9/QkifAV1+F2zz9dk7nNstmtz7qXIX55huYMwc+/TS8PvkktM7Zpw9ccEFox2eXXaB793DGf+ihoTG3Yt6Us3Np44nAVZy1a0PjbMUH+s8+C00016gBN9wAjz4a5mvQANq1gw4dQoXu0KFh+tChmY3fuRzlicCV34oVG8/sf/vb8DDW/feHJ3KLijbO16IFLFsGzZqFZhv69w+3c+64YyjiKS4eKr5//8gjvZLWuQzwRJBrSquknTat5B00ZlBYGCppt9kGpk6F668PB///xTxLuPvuoeL2wAPhmmvCgb5tW9hzz3DmXyw/v2Qcfv++c1WGJ4JcU9zIWvEBt/iAPGIE3H77xmKdefPgp5/g2WdDhys1a4aHtLp333iwb9s2PKAFoUz/0ENTj8Pv33euyvC7hipLqmfi5WUWilnMwgF81arQoFrxa889oXPncAvm5ZeH6QsXwvTp4Z77tWvhX/8KB/RWrUKFbfFBvl07OOYYaN16iz++cy6zcvuuoXQdgMsr9ky8a1d4+WU444xQNl7slVfg2283HsxXrQpdIp59dph+4omhQ/TiaT/8ENb52GNh+n77hd6zYv3+9yER5OXBuHGhmGebbUI5/VdfheULCkIi+fFH2HrrStkdzrmqI/sTQewBeK+94KWX4IorQjHIG2+Eys0jjwzzTp0KCxaEs+Rffgl/a9cOB1MI97nPnr1x2tq1oYeqe+8N0y+/PCSY4mm//BLOyF94IRxsd9pp47aKDR8e4gO4+OJwp02xmjVDZWxxIpDC9lq3DgfzBg3C07TF08aMgXr1wvji6cU9aNWuHZIMbFpJO2lSiM+TgHM5KfsTQXHZc58+odhjRtSa9oUXhr9164ZiEwjl5E89VXL5Jk02JoLXXgtn7bVrh1edOiWLTczC+Pr1w7TatUNbOMUGDAixFD8927t3iKnY+PHh4F981l6nTskHqP7xj7I/a3Gb+mXxSlrnXJzcqSO46aZwn/opp4QimeIDdZ06Gys5lywJ7doUTyueXi9pZ2ypKT4IX3BBOBPPxMG3qhSVOecqlTcxURUOwPFn4vHDzjmXRmUlgmR9Fld/sQfc227bWExU2Z2PlHW7pHPOZVD2XxF4UYhzznnRkHPO5brcLhpyzjlXJk8EzjmX4zwROOdcjvNE4JxzOS6tiUDScZLmSVog6ZoE0yXp/mj6bEkHpDMe55xzm0pbIpCUBzwIdAf2AvpL2itutu5Am+g1EHgI55xzlSqdbQ11AhaY2UIASaOBE4GPY+Y5EXjSwj2s70pqJGlHM1uSjoC6du26ybg+ffowePBgVq9eTY8ePTaZPmDAAAYMGMCyZcvo1avXJtMvuOAC+vbty+LFiznjjDM2mX7FFVfQs2dP5s2bx/nnn7/J9BtuuIGjjz6amTNncumll24y/Y477qBz585MnTqV6667bpPp9957L/n5+UycOJHbb799k+kPP/wwe+65J//617+45557Npn+1FNPsfPOOzNmzBgeemjTPPzcc8/RpEkTRo0axahRozaZPmHCBLbaaiuGDx/O2LFjN5k+efJkAO6++25efPHFEtPq1avHyy+/DMDQoUN5/fXXS0xv3Lgx48aNA+Daa6/lnXfeKTG9RYsWPP300wBceumlzJw5s8T0PfbYg5EjRwIwcOBA5s+fX2J6fn4+90YNBp5++ukUFhaWmH7IIYdw5513AnDKKaewfPnyEtOPOuoobrzxRgC6d+/Oz8VtVkVOOOEErrzySsB/e/7bq5jfXvFnqmjpLBpqDiyOGS6MxpV3HiQNlDRd0vSlS5dWeKDOOZfL0vZAmaTewLFm9rto+Aygk5ldFDPPS8CdZjYlGn4duNrMZpS2Xn+gzDnnyi9TD5QVAjvHDLcAvt6MeZxzzqVROhPBNKCNpNaSagP9gPFx84wHzozuHjoYWJmu+gHnnHOJpa2y2MzWSboQeBXIAx43s7mSBkXTRwATgB7AAmA1cHa64nHOOZdYWnsoM7MJhIN97LgRMe8N+H06Y3DOOVc2f7LYOedynCcC55zLcZ4InHMux3kicM65HFfteiiTtBT4cjMXbwIsq8BwqjvfHyX5/tjI90VJ2bA/djGzpokmVLtEsCUkTS/tybpc5PujJN8fG/m+KCnb94cXDTnnXI7zROCcczku1xLByEwHUMX4/ijJ98dGvi9Kyur9kVN1BM455zaVa1cEzjnn4ngicM65HJeViUDScZLmSVog6ZoE0yXp/mj6bEkHZCLOypLC/jgt2g+zJU2VtF8m4qwMyfZFzHwdJRVJ2rSPyCySyv6Q1FXSTElzJb1R2TFWphT+VxpK+pekWdH+yI4Wk80sq16EJq8/B3YFagOzgL3i5ukBvAwIOBh4L9NxZ3h/dAa2jd53z9b9kcq+iJnvP4SWc3tlOu4M/zYaEfoZbxkNN8t03BneH9cBf4zeNwW+A2pnOvYtfWXjFUEnYIGZLTSztcBo4MS4eU4EnrTgXaCRpB0rO9BKknR/mNlUM/s+GnyX0FNcNkrltwFwETAO+LYyg8uAVPbHqcDzZvYVgJll8z5JZX8Y0ECSgPqERLCucsOseNmYCJoDi2OGC6Nx5Z0nW5T3s55LuFrKRkn3haTmwMnACLJfKr+NPYBtJU2WNEPSmZUWXeVLZX88ALQjdKn7EXCJma2vnPDSJ60d02SIEoyLv0c2lXmyRcqfVVIBIRF0SWtEmZPKvrgXGGJmReGkL6ulsj9qAgcCRwH1gHckvWtm89MdXAaksj+OBWYCRwK7Af+W9JaZ/ZDm2NIqGxNBIbBzzHALQvYu7zzZIqXPKqk98CjQ3cyWV1JslS2VfdEBGB0lgSZAD0nrzOwflRJh5Ur1f2WZmf0E/CTpTWA/IBsTQSr742zgLguVBAskfQG0Bd6vnBDTIxuLhqYBbSS1llQb6AeMj5tnPHBmdPfQwcBKM1tS2YFWkqT7Q1JL4HngjCw90yuWdF+YWWsza2VmrYDngMFZmgQgtf+VfwKHSaopaSvgIOCTSo6zsqSyP74iXB0haXtgT2BhpUaZBll3RWBm6yRdCLxKuAvgcTObK2lQNH0E4W6QHsACYDUhy2elFPfHTUBjYHh0JrzOsrClxRT3Rc5IZX+Y2SeSXgFmA+uBR81sTuaiTp8Ufx9DgVGSPiIUJQ0xs+rePLU3MeGcc7kuG4uGnHPOlYMnAuecy3GeCJxzLsd5InDOuRznicA553KcJwKXcZJOlmSS2pYxz2RJm9zSKmmApAfKub1FkpqkOr6UdVT0dsfFDPeSNCrJun5TVuupKcbTVNIUSXMknRQz/p+SdtqSdbvqxROBqwr6A1MID/Dkqg6S9k51ZjMbb2Z3beE2+wNPAIcAVwFI6gl8YGbZ+qS9S8ATgcsoSfWBQwltHPWLGV9P0uioj4QxhHZuiqedLWl+1Db+oTHjm0oaJ2la9Do0Gt9Y0muSPpT0MInblCktvk4KfTR8GP3dM2byzpJeidqvvzlmmdMlvR+14f+wpLwUNnU3oYnj+O1vJ+kf0X54N2oKpMQViaTe0Vn9rKgJCCTlSfpTtB9mSzo/wTZ/JezXOsB6STWBS4E/pbZ3XLbwROAy7STglahpi++0sZOgC4DVZtYe+AOh4TMUmgu/lZAAugF7xazrPuAvZtYROIXQdhLAzcAUM9uf0GRAy3LE9ylweLTsTcAdMdM6AacB+UBvSR0ktQP6AoeaWT5QFM2TzFjgAEm7x42/Ffgw2g/XAU8mWPYm4Fgz2w/4TTTuXELTKR2BjsB5klrHLfc3QiNqrwC3AIMJzbOvTiFel0WyrokJV+30J7T4CaH99/7AB8DhwP0AZjZb0uxonoOAyWa2FCC6WtgjmnY0sJc2thq6jaQG0bp+G63rJUnFfS+koiHwhKQ2hJYoa8VM+3dxA32Snie02rqOkLSmRXHUI7V+DYoIZ+LXUrIZ8C6EpIaZ/Se6umkYt+zbhGYPxhLajAI4BmivjT2sNQTaAF8UL2RmK4Hjo/i3BYYAv5X0CLAtcI+ZvZNC7K6a80TgMkZSY0JzvvtIMkL7Libp6miW0to/KW18DeAQM/s5bjtlLZPMUGCSmZ0sqRUwuYw4jFDs9ISZXbsZ23qKkAjmxoxL2jSymQ2SdBDhoD5TUn603EVm9mqK276JcOXVH5hBuFr4J1BQng/gqicvGnKZ1ItQFLFL1OLnzoQz1i7Am0RFKpL2AdpHy7wHdI3OjGsBvWPW9xpwYfFAdEAkbl3dCWe7qWoI/Dd6PyBuWreoDL8eoYjrbeB1oJekZtH2tpO0SyobMrNfgb8QyumLxcbeldAkdIm27yXtZmbvmdlNwDJCU8qvAhdE+whJe0jaOtF2o6udnczsDWArQuNyBtRNJW5X/XkicJnUH3ghbtw4QveIDwH1oyKhq4nae4+aC78FeAeYSChGKnYx4e6b2ZI+BgZF428FDpf0AaHI5KsyYpotqTB6/RkYBtwp6W3CFUusKYSz+JnAODObbmYfAzcAr0Wx/xsoTzeoj1HySv2W4s8E3AWclWCZP0n6SNIcQuKYRagf+Rj4IBr/MKWXAPwhihngWULCe5dQge1ygLc+6pxzOc6vCJxzLsd5InDOuRznicA553KcJwLnnMtxngiccy7HeSJwzrkc54nAOedy3P8D9QNv4f6t++4AAAAASUVORK5CYII=\n", 166 | "text/plain": [ 167 | "
" 168 | ] 169 | }, 170 | "metadata": { 171 | "needs_background": "light" 172 | }, 173 | "output_type": "display_data" 174 | } 175 | ], 176 | "source": [ 177 | "from numpy.linalg import pinv, solve\n", 178 | "import numpy as np\n", 179 | "import time \n", 180 | "import random\n", 181 | "import matplotlib.pyplot as plt\n", 182 | "%matplotlib inline\n", 183 | "\n", 184 | "import kernel\n", 185 | "import eigenpro\n", 186 | "import torch\n", 187 | "\n", 188 | "SEED = 2134\n", 189 | "np.random.seed(SEED)\n", 190 | "random.seed(SEED)\n", 191 | "\n", 192 | "def mse(preds, labels): \n", 193 | " return np.mean(np.abs(np.power(preds - labels, 2)))\n", 194 | "\n", 195 | "def numpy_acc(preds, labels):\n", 196 | " preds_max = np.argmax(preds, axis=0)\n", 197 | " labels_max = np.argmax(labels, axis=0)\n", 198 | " return np.mean(preds_max == labels_max)\n", 199 | "\n", 200 | "\n", 201 | "X = train_set.cpu().data.numpy().astype(\"float32\")\n", 202 | "y = train_labels.cpu().data.numpy().astype(\"float32\")\n", 203 | "X_test = test_set.cpu().data.numpy().astype(\"float32\")\n", 204 | "y_test = test_labels.cpu().data.numpy().astype(\"float32\")\n", 205 | "\n", 206 | "possible_labels = np.eye(10)\n", 207 | "random_idxs = np.random.randint(low=0, high=10, size=len(y))\n", 208 | "random_labels = possible_labels[random_idxs, :]\n", 209 | "\n", 210 | "random_test_idxs = np.random.randint(low=0, high=10, size=len(y_test))\n", 211 | "random_test_labels = possible_labels[random_test_idxs, :]\n", 212 | "\n", 213 | "noise_probs = np.linspace(0, .9, 10)\n", 214 | "train_errors = []\n", 215 | "test_errors = []\n", 216 | "for p in noise_probs:\n", 217 | " choice = np.random.uniform(size=y.shape[0])\n", 218 | " choice = np.where(choice < p, 1, 0)\n", 219 | " y[choice==1] = random_labels[choice==1]\n", 220 | "\n", 221 | " # Uncomment if you want to corrupt the labels for test data as well\n", 222 | " # choice_test = np.random.uniform(size=y_test.shape[0])\n", 223 | " # choice_test = np.where(choice_test < p, 1, 0)\n", 224 | " # y_test[choice_test==1] = random_test_labels[choice_test==1]\n", 225 | " \n", 226 | " print(\"probability: %.2f & Number of labels corrupted: %d\"%(p, np.sum(choice))) \n", 227 | "\n", 228 | " use_cuda = torch.cuda.is_available()\n", 229 | " device = torch.device(\"cuda\" if use_cuda else \"cpu\")\n", 230 | " n_class = 10\n", 231 | " num_epochs=150\n", 232 | " kernel_fn = lambda x,y: kernel.laplacian(x, y, bandwidth=10)\n", 233 | " model = eigenpro.FKR_EigenPro(kernel_fn, X, n_class, device=device)\n", 234 | " res = model.fit(X, y, X_test, y_test, epochs=[num_epochs], mem_gb=12)\n", 235 | " train_errors.append(1 - res[num_epochs][0]['multiclass-acc'])\n", 236 | " test_errors.append(1 - res[num_epochs][1]['multiclass-acc'])\n", 237 | "\n", 238 | "plt.title(\"Interpolation in the Presence of Noisy Data\")\n", 239 | "plt.xlabel(\"Added Label Noise %\")\n", 240 | "plt.ylabel(\"Classification Error (1 - Acc.)\")\n", 241 | "plt.plot(noise_probs, test_errors, 'rx--', label='Test Error')\n", 242 | "plt.plot(noise_probs, train_errors, 'k--', label='Training Error')\n", 243 | "plt.legend()\n", 244 | "plt.show()" 245 | ] 246 | } 247 | ], 248 | "metadata": { 249 | "kernelspec": { 250 | "display_name": "dl_tutorial", 251 | "language": "python", 252 | "name": "dl_tutorial" 253 | }, 254 | "language_info": { 255 | "codemirror_mode": { 256 | "name": "ipython", 257 | "version": 3 258 | }, 259 | "file_extension": ".py", 260 | "mimetype": "text/x-python", 261 | "name": "python", 262 | "nbconvert_exporter": "python", 263 | "pygments_lexer": "ipython3", 264 | "version": "3.7.10" 265 | } 266 | }, 267 | "nbformat": 4, 268 | "nbformat_minor": 2 269 | } 270 | -------------------------------------------------------------------------------- /DoubleDescentTutorialPart2.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "b234b088", 6 | "metadata": {}, 7 | "source": [ 8 | "# Double Descent with 1 Hidden Layer Neural Networks \n", 9 | "\n", 10 | "In this notebook, we identify the double descent phenomenon when training 1 hidden layer neural networks. Again, given a dataset $\\{(x^{(i)}, y^{(i)})\\}_{i=1}^{n} \\subset \\mathbb{R}^{d} \\times \\mathbb{R}$, we wish to learn a map from $x^{(i)} \\to y^{(i)}$. To learn such a map, we use the following 1 hidden layer nonlinear network: \n", 11 | "\\begin{align*}\n", 12 | " f(\\mathbf{W} ; x) = a \\frac{\\sqrt{c}}{\\sqrt{k}} \\phi(B x) ~~;\n", 13 | "\\end{align*}\n", 14 | "where $a \\in \\mathbb{R}^{1 \\times k}$, $B \\in \\mathbb{R}^{k \\times d}$, $x \\in \\mathbb{R}^{d}$, $c \\in \\mathbb{R}$ is a fixed constant, $\\phi$ is an elementwise nonlinearity, and $\\mathbf{W}$ is a vectorized version of all entries of $a, B$ (e.g. $\\mathbf{W} \\in \\mathbb{R}^{k + dk}$). We will also assume that $\\phi$ is a real valued function (as is the case in many models in practice). \n", 15 | "\n", 16 | "\n", 17 | "We will assume that the parameters $\\mathbf{W}_i \\overset{i.i.d}{\\sim} \\mathcal{N}(0, 1)$. We then use gradient descent to minimize the following loss: \n", 18 | "\\begin{align}\n", 19 | " \\mathcal{L}(w) = \\sum_{i=1}^{n} ( y^{(i)} - f(x^{(i)}))^2 ~~;\n", 20 | "\\end{align}\n", 21 | "\n", 22 | "We will now show that double descent occurs when the number of hidden units $k$ increases. \n", 23 | "\n", 24 | "**Note:** The following code will make use of the GPU (it can still run without the GPU, but will take a bit longer). " 25 | ] 26 | }, 27 | { 28 | "cell_type": "code", 29 | "execution_count": 1, 30 | "id": "78ca0cc1", 31 | "metadata": {}, 32 | "outputs": [ 33 | { 34 | "name": "stderr", 35 | "output_type": "stream", 36 | "text": [ 37 | "/home/aradha/anaconda3/envs/dl_tutorial/lib/python3.7/site-packages/torchvision/datasets/mnist.py:498: UserWarning: The given NumPy array is not writeable, and PyTorch does not support non-writeable tensors. This means you can write to the underlying (supposedly non-writeable) NumPy array using the tensor. You may want to copy the array to protect its data or make it writeable before converting it to a tensor. This type of warning will be suppressed for the rest of this program. (Triggered internally at /opt/conda/conda-bld/pytorch_1623448265233/work/torch/csrc/utils/tensor_numpy.cpp:180.)\n", 38 | " return torch.from_numpy(parsed.astype(m[2], copy=False)).view(*s)\n" 39 | ] 40 | }, 41 | { 42 | "name": "stdout", 43 | "output_type": "stream", 44 | "text": [ 45 | "Train Set: torch.Size([4000, 784])\n", 46 | "Train Labels: torch.Size([4000, 10])\n", 47 | "Test Set: torch.Size([10000, 784])\n", 48 | "Test Labels: torch.Size([10000, 10])\n" 49 | ] 50 | } 51 | ], 52 | "source": [ 53 | "# We will use a subset of MNIST for demonstrating double descent \n", 54 | "import torch\n", 55 | "from torchvision import datasets, transforms\n", 56 | "\n", 57 | "\n", 58 | "train_set = datasets.MNIST('./data', train=True, download=True)\n", 59 | "test_set = datasets.MNIST('./data', train=False, download=True)\n", 60 | "\n", 61 | "# Loading/Normalizing training & test images\n", 62 | "train_imgs, train_labels = train_set.data / 256, train_set.targets\n", 63 | "test_imgs, test_labels = test_set.data / 256, test_set.targets\n", 64 | "\n", 65 | "classes = {}\n", 66 | "max_per_class = 400\n", 67 | "max_labels = 10\n", 68 | "\n", 69 | "for idx, label in enumerate(train_labels): \n", 70 | " label = label.data.numpy().item()\n", 71 | " if label in classes and len(classes[label]) < max_per_class: \n", 72 | " classes[label].append(train_imgs[idx])\n", 73 | " elif label not in classes: \n", 74 | " classes[label] = [train_imgs[idx]]\n", 75 | " \n", 76 | " if len(classes) >= max_labels:\n", 77 | " early_exit = True\n", 78 | " for label in classes: \n", 79 | " early_exit &= len(classes[label]) >= max_per_class\n", 80 | " if early_exit: \n", 81 | " break\n", 82 | "\n", 83 | "all_train_examples = []\n", 84 | "all_train_labels = []\n", 85 | "for label in classes:\n", 86 | " label_vec = torch.zeros(max_labels)\n", 87 | " label_vec[label] = 1.\n", 88 | " all_train_examples.extend(classes[label])\n", 89 | " all_train_labels.extend([label_vec]*len(classes[label]))\n", 90 | " \n", 91 | "all_test_labels = [] \n", 92 | "for label in test_labels: \n", 93 | " label = label.data.numpy().item()\n", 94 | " label_vec = torch.zeros(max_labels)\n", 95 | " label_vec[label] = 1.\n", 96 | " all_test_labels.append(label_vec)\n", 97 | " \n", 98 | " \n", 99 | "train_set = torch.stack(all_train_examples, dim=0).view(max_labels * max_per_class, -1)\n", 100 | "train_set = train_set / torch.norm(train_set, p=2, dim=1).view(-1, 1)\n", 101 | "train_labels = torch.stack(all_train_labels, dim=0)\n", 102 | "\n", 103 | "test_set = test_imgs.view(-1, 28*28)\n", 104 | "test_set = test_set / torch.norm(test_set, p=2, dim=1).view(-1, 1) \n", 105 | "test_labels = torch.stack(all_test_labels, dim=0)\n", 106 | "\n", 107 | "print(\"Train Set: \", train_set.shape)\n", 108 | "print(\"Train Labels: \", train_labels.shape)\n", 109 | "print(\"Test Set: \", test_set.shape)\n", 110 | "print(\"Test Labels: \", test_labels.shape)" 111 | ] 112 | }, 113 | { 114 | "cell_type": "markdown", 115 | "id": "63062efd", 116 | "metadata": {}, 117 | "source": [ 118 | "## Neural Network for MNIST Classification\n", 119 | "\n", 120 | "Below we provide code for constructing a 1 hidden layer network of width $k$ in PyTorch. We will consider networks with a bias term in the hidden layer just as in teh previous notebook. " 121 | ] 122 | }, 123 | { 124 | "cell_type": "code", 125 | "execution_count": 3, 126 | "id": "714d74e6", 127 | "metadata": { 128 | "collapsed": true 129 | }, 130 | "outputs": [], 131 | "source": [ 132 | "## We now need to define and train a neural network to map x^{(i)} to y^{(i)}\n", 133 | "import torch\n", 134 | "import torch.nn as nn\n", 135 | "import torch.nn.functional as F\n", 136 | "\n", 137 | "# Abstraction for nonlinearity \n", 138 | "class Nonlinearity(torch.nn.Module):\n", 139 | " \n", 140 | " def __init__(self):\n", 141 | " super(Nonlinearity, self).__init__()\n", 142 | "\n", 143 | " def forward(self, x):\n", 144 | " #return F.leaky_relu(x)\n", 145 | " return F.relu(x)\n", 146 | " \n", 147 | "class Net(nn.Module):\n", 148 | "\n", 149 | " def __init__(self, width):\n", 150 | " super(Net, self).__init__()\n", 151 | "\n", 152 | " self.k = width\n", 153 | " self.first = nn.Sequential(nn.Linear(784, self.k, bias=True), \n", 154 | " Nonlinearity())\n", 155 | " self.sec = nn.Linear(self.k, 10, bias=False)\n", 156 | "\n", 157 | " def forward(self, x):\n", 158 | " #C = np.sqrt(2/(.01**2 + 1)) * 1/np.sqrt(self.k)\n", 159 | " C = np.sqrt(2/self.k)\n", 160 | " o = self.first(x) * C\n", 161 | " return self.sec(o)" 162 | ] 163 | }, 164 | { 165 | "cell_type": "markdown", 166 | "id": "09112886", 167 | "metadata": {}, 168 | "source": [ 169 | "### Training a neural network with gradient descent\n", 170 | "\n", 171 | "Below, we provide code to train neural networks of varying width to classify $4000$ MNIST digits using gradient descent. We chose to run gradient descent for $10^5$ epochs to minimize the training loss and accuracy as much as possible. In practice, we would just early stop the code when the validation accuracy stops improving. The code below takes too much time to run in the tutorial, but you are encouraged to try it out offline. " 172 | ] 173 | }, 174 | { 175 | "cell_type": "code", 176 | "execution_count": 40, 177 | "id": "d1135c7e", 178 | "metadata": {}, 179 | "outputs": [ 180 | { 181 | "name": "stdout", 182 | "output_type": "stream", 183 | "text": [ 184 | "Number of Parameters: 12720\n" 185 | ] 186 | }, 187 | { 188 | "data": { 189 | "application/vnd.jupyter.widget-view+json": { 190 | "model_id": "", 191 | "version_major": 2, 192 | "version_minor": 0 193 | }, 194 | "text/plain": [ 195 | " 0%| | 0/100000 [00:00" 662 | ] 663 | }, 664 | "execution_count": 46, 665 | "metadata": {}, 666 | "output_type": "execute_result" 667 | }, 668 | { 669 | "data": { 670 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYgAAAEaCAYAAAAL7cBuAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAA33klEQVR4nO3dd5xU5b3H8c+XBSnSFFCjVJUitpXsohQRY4WoaCSWoAZLgNgu15JrS2KiWGKJN0ajWIJRLERjx4gaEQWVoouICqKCrOilKAqCCvK7fzxnZHZ3Zmd22dkzu/t7v17nNTOn/mZ2dn7neZ5znkdmhnPOOVdeo7gDcM45l588QTjnnEvJE4RzzrmUPEE455xLyROEc865lDxBOOecS8kTRB0iaaqkM9Is6yrJJDWu7bjqMkmXSLqzkuUjJb1SmzE1JP69zW+eIGqQpMWS1ktaI2m1pBmSxkjK689Z0uWSNkRxr5G0UNJfJf0o7tjSiX5Udt3S/ZjZVWZ2RrTPLf6xSvoOrJX0maQJklpmuW3KE4BonweXm5c2cUkaHL2PW8rNf0XSyCxjqZHPN19F33mTdG65+WOj+ZdHrzN+luX/FpIGRv/7X0r6XNJ0ScXRycjaaPpG0vdJr+fn/l1XXV7/cNVRR5pZK6ALcA3wP8Bd8YaUlYeiuLcFjgF2AObkc5LIY0eaWUugENgHuDiGGL4GTpHUNYZjZyUPSg0LgV+Wm3dKND9Z1p+lpNbAU8DNhP+lnYA/AN9GJyMto+/GGODVxGsz233L3kpueILIETP70syeAI4HfilpDwBJbST9Q9IKSUskXZYoYURnNfcl9pHmjHYXSTOjs5PHJW2b6vjRce6S9KmkTyRdKakgi7g3mNn8KO4VwPlJ+zxCUklS6WivpGX/Ex1njaQFkg6K5hdEZ04fRMvmSOoULesl6bnoLGuBpOOS9jdB0i2Sno62e13SLtGyadFqc6Ozr+NTvP8lkn4cPT8p+hx7R6/PkPRYis88sd/V0X77Je3veklfSPpI0pBMn2P0WX4GPEtIFIn97Bd9dqslzZU0OJt9VcNqYALw+3QrSDpN0rvR+3pWUpdofoXPV9JLko6Nlg+MPs+h0euDJZVEzxtF3+klkpZH3/U20bLE9/l0SR8D/0kR07EKJaY90sT8K0mLou/ME5J2TFpmCiX296P3dIskVfIZzQJaSNo92n53oHk0v0qfZZIeAGb2gJl9b2brzWyKmb2VxbZ5xxNEjpnZTKAU2D+adTPQBtgZOIBwxnJqFXZ5CnAasCOwEfhLmvXuiZbvSjiLPRRI2X6RJu7vgccTcUvqA9wNjAbaAbcDT0hqKqkncDZQHJVCDgMWR7s6DzgRGAq0jmJfJ2lr4DngfmC7aJ1bE/+skRMJZ1/bAIuAcVFsg6Lle0dnXw+leAsvAYOj54OADwmfd+L1Sym2Sey3bbTfV6PX+wILgPbAn4C7MvzwACCpIzAkih1JOwFPA1cSzi4vAB6R1CHTvqppHHBs9PcpH9vRwCXAz4AOwMvAA5D288328xwZTQcSvuMtgb+WO/wBwG6E70lyTKcC1wIHm9nbKWL+CXA1cBzwI2AJ8GC51Y4AioG9o/UOo3L3Ev6nIJQm/pFmvbSfZTkLge8l3SNpiKRtMqyf1zxB1I5lwLbRGfzxwMVmtsbMFgM3ACdXYV/3mtnbZvY18FvguPIlA0nbE36YxprZ12a2HPgzcEJ14o6e/wq43cxej86M7gG+BfYDvgeaAr0lNTGzxWb2QbTdGcBlZrbAgrlmtorwj7zYzP5uZhvN7A3gEWB40vH/ZWYzzWwjMJGkM/EsvMTmH7D9CT8sidcHkDpBpLPEzO6IkuY9hB+n7StZ/zFJa4ClwHI2n3meBEw2s8lmtsnMngNmE5JnjYtKMLcBf0yxeDRwtZm9G32+VwGFiVJECsmf5yDSf54jgBvN7EMzW0uoXjtBZUvBl0ffy/VJ88YCFwKDzWxRmhhGAHeb2Rtm9m20734qW/VzjZmtNrOPgRfJ/J25DzhRUhPC/8d9qVbK8Fkmr/cVMBAw4A5gRVTSqez7krc8QdSOnYDPCWegWxHOfBKWRMuztbTctk2i/SbrEs3/NKrKWE0449+uamH/EHdin+cn9hftsxOwY/QPPRa4HFgu6cGkon8n4AMq6gLsW25/IwhtHwmfJT1fRzgbzdZLwP6SdgAKgIeAAdGPSRugpAr7+iEOM1sXPa0slqOjktRgoBeb/z5dgJ+Xe88DCQmnMhsJf89kTYANWcR+LXCYpL3Lze8C/G9SHJ8DIv138VWgR/RDV0g40+4kqT3Ql83VcztS8fvdmLIJNfk7nHAhcIuZlVbyXsrsO0pAq8rFXKXvTJRIFhES5Ptmliq2hHSfZfl9vmtmI82sI7BHFPdNlW2TrzxB5JikYsIX+BVgJeGfOvksrTPwSfT8a6BF0rLkH8uETuW23RDtN9lSwtl9ezNrG02tq9IQptAuciSh6iGxz3FJ+2trZi3MLFEtcb+ZDYzemxH+mRLb7ZLiEEuBl8rtr6WZ/TrbGCsTJa11wLnANDNbQ/jxGAW8YmabUm1WE8dOiuElQt319dGspYQSYPJ73trMrsmwq4+BruXmdaPsD3G6GFYRfpyuKLdoKTC6XCzNzWxGmv2sA+YA/wW8bWbfATMIVYgfmFniO7iMit/vjcD/Je8uxSEOBS5LtHOkUWbfUTVlOzb//1TXPwhtbemql4BKP8vKtnmP8B1I2aaS7zxB5Iik1pKOINSR3mdm86IqiknAOEmtouL8eWwu1pYAgyR1jhr2Ul39cpKk3pJaEIq7D0f7/YGZfQpMAW6I4mgkaRdJB6TYX/m4m0jajVAfvQNwY7ToDmCMpH0VbC3pp9H76CnpJ5KaAt8A6wnVTgB3AldI6h5tt5ekdoQrPXpIOjk6ZhOFSwF3y+LjhfCDs3OGdV4itI0kqj+mlntd3gpgUxb7rYqbgEMkFRL+zkdKOkyh8b6ZwmWUHZPWbxzNT0xNCKWfsQqN+pJURGjLKV//ns6NQH9CvX/CbcDFSQ20bST9PGl5qs83m8/zAeC/JXVTuLz3KsIVchszxDgfOBy4RdJRada5HzhVUmH0XbsKeD2qqt0SDxES1KQs1k31Wf4g+hudn/ibKlyQcSLw2hbGGAtPEDXvyaT650sJX6jkRuhzCCWFDwmlivsJjb9EddIPAW8RztaeSrH/ewlnJJ8BzQhnyKmcQqjOegf4AniYyqsyjpe0lnDFxhOEovuPzWxZFNtsQjvEX6P9LSI0RkJof7iGUJL5jFCVdUm07EbCP94U4CvCJb/NozP6Qwn1vsui7a6N9pWNy4F7oiqS49Ks8xLQis3VH+VflxGdJY8Dpkf73S/LWNIysxWEM9PfRtUXwwifzQrCd+RCyv4f/o2QYBPT3wnJ+e/Ak8CX0f4uNbN/ZxnDV4TG9W2T5j1K+LwflPQV8Dah3Srhcip+vtl8nncTvqPTgI8IJwznZBnnXELb1B1KcaWYmb1AaHd7BPiUUDKtartaquOuN7Pny7WJpFu3wmdZzhrCRQ2vS/qakBjeJulqwLpE5gMGOeecS8FLEM4551LyBOGccy4lTxDOOedS8gThnHMuJU8QzjnnUoq7N8Ua1b59e+vatWvcYbgttWBBeOyZqdsb59yWmjNnzkozS9kfWL1KEF27dmX27Nlxh+G21ODB4XHq1DijcK5BkJT2jvx6lSBcPXHZZXFH4JzDE4TLRwcfnHkd51zOeSO1yz8lJWFyzsXKSxAu/4wdGx69DaJO2rBhA6WlpXzzzTdxh+KSNGvWjI4dO9KkSfme49Nr8Ali4kS49FL4+GPo3BnGjYMRI+KOyrm6q7S0lFatWtG1a1eUeeA9VwvMjFWrVlFaWkq3bt2y3q5BVzFNnAijRsGSJWAWHkeNCvOdc9XzzTff0K5dO08OeUQS7dq1q3KprkEniEsvhXXrys5bty7Md85VnyeH/FOdv0mDThAff1y1+c65uqFly8yj07788svsvvvuFBYW8sknnzB8+PCM2wwdOpTVq1ezevVqbr311irF9L//+7+MTbSvAaNHj+bgpCv2br75Zs4991xmz57NueemHuala9eurFy5ssLxp06dyhFHHFGleLLRoBNE585Vm+9qyVVXhcm5HJo4cSIXXHABJSUl7LTTTjz88MMZt5k8eTJt27atVoLo378/M2ZsHtG1pKSEL7/8ku+/D4MvzpgxgwEDBlBUVMRf/vKXSvdVneNXR4NOEOPGQYsWFecXF4c2CReT/v3D5NwWmjp1KoMHD2b48OH06tWLESNGYGbceeedTJo0iT/+8Y+MGDGCxYsXs8ceYdjoCRMm8LOf/YzDDz+c7t2785vf/OaH/SXO4C+66CI++OADCgsLufDCCwG47rrrKC4uZq+99uL3v/99hVj22WcfFi5cyPr16/nyyy9p0aIFhYWFzJs3DwgJon///mVKA6tWreLQQw9ln332YfTo0SQGeEt1/LVr11Z4n1uqQV/FlLhaKXEVU8eOofTw8MNw/PHw97/D1lvHG2ODlDjL8iRRLwxOdJ2S5LjjjuPMM89k3bp1DB06tMLykSNHMnLkSFauXFmh6mdqFS9/fvPNN5k/fz477rgjAwYMYPr06Zxxxhm88sorHHHEEQwfPpzFixeX2aakpIQ333yTpk2b0rNnT8455xw6der0w/JrrrmGt99+m5Lofp0pU6bw/vvvM3PmTMyMo446imnTpjFo0KAftmncuDGFhYXMmjWL9evXs++++9K9e3dmzJjBdttth5nRqVMnPvjggx+2+cMf/sDAgQP53e9+x9NPP8348eNTHn/q1Kkp3+fAgQOr9FmV16BLEBCSxOLFsGlTSBIvvwzXXReSxP77e3tELC65JEzO1YC+ffvSsWNHGjVqRGFhYYVkkMpBBx1EmzZtaNasGb1792bJkrTdFQEhQUyZMoV99tmHPn368N577/H+++9XWG/AgAHMmDGDGTNm0K9fP/r168eMGTOYPn06/VOcEE2bNo2TTjoJgJ/+9Kdss802Nfo+M2nQJYhUJLjgAujdG048MVQ3/etfMGBA3JE5VzdVdsbfokWLSpe3b9++yiWG8po2bfrD84KCAjZu3Fjj25gZF198MaNHj650vf79+3P77bfzzTffcNZZZ9GhQwfeeecdOnTowIA0PzLZXn1UnfeZSYMvQaQzdCi8/jq0aQMHHgh33x13RM65fNGqVSvWrFnzw+vDDjuMu+++m7Vr1wLwySefsHz58grb9e/fn9dee40VK1aw3XbbIYkOHTrw+OOPpyxBDBo0iInRjVnPPPMMX3zxRcrj54oniEr06hWSxODBcPrp8F//BTWQlJ1zdVy7du0YMGAAe+yxBxdeeCGHHnoov/jFL+jXrx977rknw4cPT/kDvs0229ChQwd23333H+b169eP5cuXs/fee1dY//e//z3Tpk2jT58+TJkyhc7RJZblj58rqomW7nxRVFRkuRgPYuNG+M1v4M9/Dh2NPvQQbLttjR/GJfh4EHXau+++y2677RZ3GC6FVH8bSXPMrCjV+jktQUg6XNICSYskXZRi+TBJb0kqkTRb0sCkZYslzUssy2WcmTRuDDfeGKqZpk2Dvn3hnXfijKieu+mmMDnnYpWzBCGpALgFGAL0Bk6U1Lvcai8Ae5tZIXAacGe55QeaWWG67FbbTj0VXnwR1q6F/faDp56KO6J6qrAwTM65WOWyBNEXWGRmH5rZd8CDwLDkFcxsrW2u49oayPv6rv79YdYs6N4djjoKrr3Wb6qrcc8/HybnXKxymSB2ApYmvS6N5pUh6RhJ7wFPE0oRCQZMkTRH0qgcxlllnTqF+yWOOw4uughOOgnWr487qnrkyivD5Oqs+tS2WV9U52+SywSR6uLdChGa2aNm1gs4GrgiadEAM+tDqKI6S9Kg8tsCSBoVtV/MXrFiRQ2EnZ0WLeCBB0J3HfffD4MGwSef1NrhnctbzZo1Y9WqVZ4k8khiPIhmzZpVabtc3ihXCnRKet0RWJZuZTObJmkXSe3NbKWZLYvmL5f0KKHKalqK7cYD4yFcxVSTbyATKdzwu/vuoRRRXAyPPgr77lubUTiXXzp27EhpaSm1ecLmMkuMKFcVuUwQs4DukroBnwAnAL9IXkHSrsAHZmaS+gBbAaskbQ00MrM10fNDgT/mMNYtMmwYvPpqaJM44AC44w44+eS4o3IuHk2aNKnSqGUuf+UsQZjZRklnA88CBcDdZjZf0pho+W3AscApkjYA64Hjo2SxPfBodIt5Y+B+M/t3rmKtCXvsERqvf/5zOOUUmDcPrr4aCgrijsw556rHb5SrYRs2wH//N9xyCwwZEtop2rSJNaS6Z8GC8NizZ7xxONcAxHajXEPUpAn89a9w223w3HOhPWLhwrijqmN69vTk4Fwe8ASRI6NHwwsvwKpVIUlMmRJ3RHXIk0+GyTkXK08QOTRoUGiX6Nw5VDf9+c9+U11WbrghTM65WHmCyLGuXWH69HCl03nnwWmnwbffxh2Vc85l5gmiFrRsGUao+/3vYcKEML7EZ5/FHZVzzlXOE0QtadQILr8c/vlPmDs33FQ3Z07cUTnnXHqeIGrZ8OGhyqlRIxg4EB58MO6InHMuNR+TOgaFhaHx+thjw7jX8+bBFVeEpOGAe++NOwLnHF6CiM1224XLYM84A666Co45BmphiNm6oVOnMDnnYuUJIkZbbQXjx8PNN8PTT0O/fvDhh3FHlQceeihMzrlYeYKImQRnnw3PPgvLloXG6xdfjDuqmP3tb2FyzsXKE0SeOOig0C6xww5wyCFw661+U51zLl6eIPLILruEbsOHDIGzzoJf/xq++y7uqJxzDZUniDzTujU89hhcfDHcfnsoTfi4K865OHiCyEMFBeHKpvvvh5kzQ7vE3LlxR+Wca2g8QeSxE0+El1+GjRuhf3945JG4I6olDz8cJudcrDxB5LmiotB4veee4S7sP/wBNm2KO6oca98+TM65WHmCqAN+9COYOjUMZXr55XDccfD113FHlUMTJoTJORcrTxB1RLNm4Tfzhhvg0UdhwABYsiTuqHLEE4RzecETRB0ihTElnn4aFi8Ojdcvvxx3VM65+soTRB10+OHw+uuwzTbhBrs77og7IudcfeQJoo7q2TMkiZ/8BEaNgnPOgQ0b4o7KOVef5DRBSDpc0gJJiyRdlGL5MElvSSqRNFvSwGy3ddC2bahuOv98+OtfQ8li1aq4o3LO1ReyHHX4I6kAWAgcApQCs4ATzeydpHVaAl+bmUnaC5hkZr2y2TaVoqIimz17dk7eT767555QkujUCR5/HHbfPe6ItsC6deGxRYt443CuAZA0x8yKUi3LZQmiL7DIzD40s++AB4FhySuY2VrbnKG2BizbbV1Zv/wlvPRSuPy1Xz948sm4I9oCLVp4cnAuD+QyQewELE16XRrNK0PSMZLeA54GTqvKttH2o6LqqdkrGninRfvtF26q69EDhg2Da66poz3C3nprmJxzscplglCKeRV+rszsUTPrBRwNXFGVbaPtx5tZkZkVdejQobqx1hsdO4ZLX084IXT4N2IErF8fd1RVNGlSmJxzscplgigFkseN7AgsS7eymU0DdpHUvqrburKaN4eJE+Hqq+HBB2H//aG0NO6onHN1TS4TxCygu6RukrYCTgCeSF5B0q6SFD3vA2wFrMpmW1c5CS66KDRYL1gQ+nR69dW4o3LO1SU5SxBmthE4G3gWeJdwhdJ8SWMkjYlWOxZ4W1IJcAtwvAUpt81VrPXZkUfCa6/B1lvD4MHhaqeJE6FrV2jUKDxOnBhzkM65vJSzy1zj0JAvc81k1arQyd9//gONG4cuxBNatIDx40N7RV4YPDg8Tp0aZxTONQiVXebqCaIB2bAB2rWDNWsqLuvSJfTv5JxrWOK6D8LlmSZNYO3a1Ms+/rh2Y3HO5T9PEA1M585Vmx+L668Pk3MuVp4gGphx4yrepNyiRZifN556KkzOuVhlnSAkbSNpd0k7S/LEUkeNGBEapNu2Da87dsyzBmrnXN5oXNlCSW2As4ATCfcorACaAdtLeg241cxezHmUrkaNGBESw+DBITkMGRJ3RM65fJSpJPAwoU+k/c2sp5kNjLq16ARcAwyTdHrOo3Q1rk+fcDPdzJlxR+Kcy1eVliDM7JBKls0B5tR4RK5WtGoFu+0WOvfLO82bxx2Bc44MCcLVb8XF8MwzocdXpeoeMS7PPBN3BM45tuAqJklv1GQgrvYVF8Py5bB0aeZ1nXMNT7UThJn1qclAXO3r2zc85l07xBVXhMk5F6usEkTUq2qzpNfNJXXNWVSuVuy1V7i7Ou/aIV54IUzOuVhlW4L4J7Ap6fX30TxXhzVtCnvvnYcJwjmXF7JNEI2jsaEBiJ5vlZuQXG0qLoY5c2DTpszrOucalmwTxApJRyVeSBoGrMxNSK42FRfDV1/BwoVxR+KcyzfZXuY6Bpgo6a/R61LglNyE5GpTckN1r17xxvKDdu3ijsA5R5YJwsw+APaT1JIwhkSKEQVcXdSrVxhtbtYsOCVfUv4jj8QdgXOO7K9iukpSWzNba2Zroo77rsx1cC73Cgrgxz/2hmrnXEXZtkEMMbPViRdm9gUwNCcRuVpXXAwlJfDddxlXrR0XXxwm51yssm2DKJDU1My+hXAfBNA0d2G52lRcDN9+C2+/HTrxi92rr8YdgXOO7EsQ9wEvSDpd0mnAc8A/cheWq015e0e1cy5WWSUIM/sTcCWwG7A7cIWZXZtpO0mHS1ogaZGki1IsHyHprWiaIWnvpGWLJc2TVCJpdvZvyVVV167hwiFvh3DOJcu6N1cz+zfwb0lbA8dIetrMfppufUkFwC3AIYTLYmdJesLM3kla7SPgADP7QtIQYDywb9LyA83M77fIMSlUM3mCcM4ly/Yqpq0kHS1pEvApcBBwW4bN+gKLzOzD6M7rB4FhySuY2YyowRvgNaBjlaJ3Naa4GObPh6+/jjsSwnB3Hf2r4FzcMg05eghhuNHDgBeBe4G+ZnZqFvveiTAaXUIpZUsH5Z0OJA8EYMAUSQbcbmbj08Q4ChgF0Llz5yzCcqkUF4fuNt58EwYOjDmY++6LOQDnHGQuQTwL7AIMNLOTzOxJynbaV5lUQ9BYyhWlAwkJ4n+SZg+IuhQfApwlaVCqbc1sfDQMalGHDh2yDM2VV1wcHr2h2jmXkClB/JhQ9fO8pOei8acLstx3KdAp6XVHYFn5lSTtBdwJDDOzVYn5ZrYselwOPEqosnI5ssMO0KlTnrRDjB0bJudcrCpNEGb2ppn9j5ntAlwO7ANsJemZqGqnMrOA7tFYElsBJwBPJK8gqTPwL+BkM1uYNH9rSa0Sz4FDgber9tZcVeVNQ3VJSZicc7HKekQ5M5tuZmcT2hZuAvplWH8jcDahmupdYJKZzZc0RtKYaLXfAe2AW8tdzro98IqkucBM4OnoKiqXQ8XF8MEH8PnncUfinMsHWV/mmmBmmwg/+s9mse5kYHK5ebclPT8DOCPFdh8Ce5ef73Ir0Q4xezYcemi8sTjn4lftMald/VNUFB69odo5B9UoQbj6q00b6NkzD9ohevSIOQDnHFQjQUgale6eBFf3FRfDCy/EHMR4/3o5lw+qU8U0JvMqrq4qLoZPP4VPPok7Eudc3KqTIFLdAOfqiby4YW7UqDA552JVnQRxZI1H4fJGYSE0bhxzO8TChWFyzsWqygnCzEpzEYjLD82bw5575kFDtXMudn6Zq6uguDjcC2Epe85yzjUUniBcBcXFsHo1LFoUdyTOuThVO0FEXYG7eij2hurCwjA552K1JTfK3QX4AAz10O67h7aIWbNgxIgYArjpphgO6pwrL9OAQU+kW0ToZM/VQ40bQ58+3lDtXEOXqQSxP3ASsLbcfOHjM9RrxcVw++2wcWNIGLXqpJPCo48s51ysMv3rvwasM7OXyi+QtCA3Ibl8UFwcanrmz4e9a7tf3VK/ktq5fJBpwKAhZvZimmUphwB19UPsDdXOudhVmiAkZexWI5t1XN2z667Qtq23QzjXkGW6zPVFSedEQ4P+QNJWkn4i6R7gl7kLz8VFyqMhSJ1zscjUBnE4cBrwgKRuwGqgGVAATAH+bGYluQzQxae4GK69FtavD5e91pp+lY5m65yrJZUmCDP7BriVMGZ0E6A9sN7MVtdCbC5mxcXw/fdQUlLLv9lXX12LB3POpZP1ndRmtsHMPvXk0HB4Q7VzDZv3xeTS2mkn2HHHGNohjj02TM65WOU0QUg6XNICSYskXZRi+QhJb0XTDEl7Z7utqx2xNFSvWhUm51ysMiYISQWSnq/qjiUVALcAQ4DewImSepdb7SPgADPbC7gCGF+FbV0tKC4OY/esXh13JM652pYxQZjZ98A6SW2quO++wCIz+9DMvgMeBIaV2/cMM/sievka0DHbbV3tSLRDzJkTbxzOudqXbS873wDzJD0HfJ2YaWbnVrLNTsDSpNelwL6VrH868Ew1t3U5UlQUHmfOhIMOijcW51ztyjZBPB1NVZHqDuuUY5RJOpCQIAZWY9tRwCiAzp299/Gatu224a7qWm2H8EzkXF7IKkGY2T2StgJ6RLMWmNmGDJuVAp2SXncElpVfSdJewJ3AEDNbVZVto9jGE7VdFBUV+SCZOVBcDC+/XIsH/O1va/Fgzrl0srqKSdJg4H1Cw/GtwEJJmTrrmwV0l9QtSi4nAGXGl4i68PgXcLKZLazKtq72FBeHDlY/+yzuSJxztSnbKqYbgEPNbAGApB7AA8CP021gZhslnQ08S+ia424zmy9pTLT8NuB3hIGHbo36/NtoZkXptq3WO3RbLNFQPWsWHHlkLRxwyJDw+Mwzla/nnMupbBNEk0RyADCzhVHXG5Uys8nA5HLzbkt6fgZwRrbbunjssw8UFISG6lpJEOvX18JBnHOZZJsg5ki6C7g3ej0C8AsfG4ittw7jVHvPrs41LNneST0GmA+cC/wX8E40zzUQiTuqzS8DcK7ByFiCkNQImGNmewA35j4kl4+Ki+Guu+Cjj2DnneOOxjlXGzImCDPbJGmupM5m9nFtBOXyT3LPrjlPEEcckeMDOOeykW0bxI+A+ZJmUvZO6qNyEpXLO3vuCU2bhmqmE07I8cEuuCDHB3DOZSPbBPGHnEbh8l6TJuFqJm+odq7hyLYN4paoDcI1YMXFcPfdYZS5goIcHmjw4PA4dWoOD+KcyySb3lw3AXOju55dA1ZcDF9/De++G3ckzrna4G0QLmvJDdV7eHnSuXrP2yBc1nr0gNatQzvEaafFHY1zLtcqTRCSepnZe2b2kqSmZvZt0rL9ch+eyyeNGoXxIbyh2rmGIVMbxP1Jz18tt+zWGo7F1QHFxfDWW/Dtt5nXrbbjjguTcy5WmaqYlOZ5qteuASguhg0bYO5c6Ns3Rwc588wc7dg5VxWZShCW5nmq164BSG6ozpl168LknItVphJER0l/IZQWEs+JXu+U08hcXurUCbbfPsftEEOHhke/D8K5WGVKEBcmPZ9dbln5164BkDb37Oqcq98qTRBmdk9tBeLqjuJiePppWLMGWrWKOxrnXK5kOx6Ecz8oLg7jQszxIaOcq9c8Qbgqq5WGaudc7LK6k1rSADObnmmeaxjat4du3XLYDjFyZI527Jyrimy72rgZ6JPFPNdAFBfD66/naOeeIJzLC5m62ugH9Ac6SDovaVFrIJcdPrs8V1wMkybBihXQoUMN73zlyvDYvn0N79g5VxWZ2iC2AloSEkmrpOkrYHimnUs6XNICSYskXZRieS9Jr0r6VtIF5ZYtljRPUokkv6Q2zyTaIXJSzTR8eJicc7HKdJnrS8BLkiaY2RL4YQChlmb2VWXbSioAbgEOAUqBWZKeMLN3klb7HDgXODrNbg40s5VZvRNXq/r0CfdEzJy5+b4251z9ku1VTFdLai1pa+AdYIGkCzNs0xdYZGYfmtl3wIPAsOQVzGy5mc0CNlQ1cBevVq2gd2+/Yc65+izbBNE7KjEcDUwGOgMnZ9hmJ2Bp0utSqtY9hwFTJM2RNCrdSpJGSZotafaKFSuqsHu3pRJ3VJv3yuVcvZRtgmgiqQkhQTxuZhvI3Flfqt5eq/JTMsDM+gBDgLMkDUq1kpmNN7MiMyvqUOOtpa4yxcWhkfrjj+OOxDmXC9le5no7sBiYC0yT1IXQUF2ZUqBT0uuOwLJsAzOzZdHjckmPEqqspmW7vcu95BvmunSpwR3/+tc1uDPnXHVlVYIws7+Y2U5mNtSCJcCBGTabBXSX1E3SVsAJwBPZHE/S1pJaJZ4DhwJvZ7Otqz177QVNmuSgHeL448PknItVtndSbw9cBexoZkMk9Qb6AXel28bMNko6G3iWcM/E3WY2X9KYaPltknYg9ArbGtgkaSzQG2gPPCopEeP9Zvbvar5HlyNNm0JhYQ4SxNKo6apTp8rXc87lVLZtEBMIP/Q7Rq8XAmMzbWRmk82sh5ntYmbjonm3mdlt0fPPzKyjmbU2s7bR86+iK5/2jqbdE9u6/FNcHDrt27SpBnd68slhSmHiROjaNYyP3bVreF2V5c657FWaICQlShjtzWwSsAlC6QD4PsexuTqguDh0+71gQe6PNXEijBoFS5aEK6eWLAmvE0kg03LnXNVkKkEk+uv8WlI7oquQJO0HfJnLwFzdUJs9u156acWRSNetC23aZ5wRkkGq5ZdemvvYnKuPMiWIxKWq5xEamHeRNB34B3BOLgNzdcMbb4Q7qkeOzH2VTrrLadesgWeeST+M9ZIlIYHcdx+UluYuPufqm0yN1Mmd9D1KuElOwLfAwcBbOYzN5bmJE2HMmM03yiWqdABGjKjZY5lB27bwxRcVl3XpAosXhwS1ZEnF5c2bwyOPwF3RJRW77gqDB4fpgAOgY8eajdW5+iJTCaKA0FlfK2BrQkIpAFpE81wDlq7KZ4urdM4/P0yR9evhlFNCcigo14dwixYwLrqEYdy48Lr88jvuCB3EvvEG3Hhj6CLk4YfhpJPChVLdu8OvfhUSnpcwnNtMVkk/CZLeiO5mrhOKiops9mzv+LW2NGqUvpuNd96B3Xbb8mOUlsIxx8Ds2fDHP4aBii67LFQ3de4ckkJyaWXixJCg0i1P+P57eOstmDo1TC+9BF9GrWrJJYzBg2GnqnQQ41wdI2mOmRWlXJYhQbxpZvvkLLIa5gmidqWr0lHUcnXMMXDxxVCU8quX2sSJcNdvFrBsGazevifr14ckdN99cNRRNRJ2Sp4wXEO1JQliWzP7PGeR1TBPELUrcVlpcjVTixZwww3wySdw883hR/bgg+GSS2DZssrP7hP7e3rdYAAOZCoSXHMN/OY3tfvePGG4hqLaCaKu8QRR+yqr0vnqK7jttlDv/3//F6qkkm+o22qr0Lawxx5h3euvD48vMhgICQI2N0LH6fvvYe7czQlj2jRPGK5+8AThYvXNN+FH8/Msy6LlE4RUw3dq14DKEkb37mWvkvKE4fJZZQki295cnau2Zs1SX54K4cd/5cowAFH37qnbNDp3zm181VFQEEbV69MHzjuvYsKYNClcPQWeMFzdlW1fTM5tkXQ/8p07w7bbhl5h012mmriMNZ8lEsZ558ETT8CqVaGPqhtugF69QsIYMSLcc9GjR2hruf/+0FbjXL7yKiZXK9I1aI8fX7GhevJ5z7N8Obzf5eC0l6nWNdlWSQ0eDDvumHY3ztU4b4NweSHbexQaAk8YLl94gnB1S0lJeCwsjDOKWlVZwujRo2wbhicMV5M8Qbi6ZfDg8Dh1apxRxMoThqstlSUIb6R2Lg9V1ujdsyc89BD84hfhiqiePWH0aHjggXAzIvjASa5meAnC5R8vQWRUWQljhx3CpcMbN25eP9UFAc6BlyCcq3dSlTBmzw53o3/5ZdnkAOHqsXPPhbffzr+bDl3+8gThXD1QUAA//nHoJf2bb1Kv8/nnsOee0K4d/PSncNVVoeSxfn3txurqDr+T2uWfq66KO4I6rXPn1Hek77hj+GinT4dXXoHJk8P8Jk1CaWTAABg4MDxut13txuzyU05LEJIOl7RA0iJJF6VY3kvSq5K+lXRBVbZ19Vj//mFy1ZLujvQ//Ql++cvQFvHOO6Gd4sknQzVVkyZwyy3ws5/B9tuHezFOPRXuvBPefTf9uB+ufstZI7WkAmAhcAhQCswCTjSzd5LW2Q7oAhwNfGFm12e7bSreSF1PzJgRHj1JVFt1bkr89ttwpdT06ZunlSvDsm23DSWLxFRUFPrYcnVfLPdBSOoHXG5mh0WvLwYws6tTrHs5sDYpQWS9bTJPEPWEX8WUF8xg4cLNyeKVV8JrCF21FxVtrpbq3x/at483Xlc9cfXmuhOwNOl1KbBvTW8raRQwCqBzPnb76VwdJYV7LHr2hNNOC/NWrAgFvFdeCUnjppvguuvCsp49y7ZjdO++eXRBVzflMkGk+mpkW1zJelszGw+Mh1CCyHL/zrlq6NABhg0LE4QroGbP3lzKeOwxuPvuzesmV0v16QNNm8YWuquGXCaIUqBT0uuOwLJa2NY5V0uaN4f99w8ThHss3nuvbLXUY4+FZU2bQt++mxNG//6hbcPlr1wmiFlAd0ndgE+AE4Bf1MK2zrmYNGoEvXuH6Ve/CvM++6xstdT114dxxiGsl1wttfPOXi2VT3La1YakocBNQAFwt5mNkzQGwMxuk7QDMBtoDWwC1gK9zeyrVNtmOp43UtcTDbA314Zk3TqYOXNzKWPGjM3dhGy/fdmEsc8+4RJclzvem6tzLm9t2gTz52+ukpo+HRYvDsuaN4d9991cLdWvH7RtG2e09Y8nCFe3PP98eDz44HjjcLH55JOy92OUlIQOCiXYY4/NCWPgQOjSJcz3AamqxxOEq1v8PghXztq18PrrmxPGq6/CmjVh2Y47hrG+33wTNmzYvI33YJuduO6DcM65GtGyJRx0UJgglCbmzdtcLfXPf4Z5ydatgzFjoLQUunXbPLVr5w3h2fIShMs/XoJwVdSoUfb9RbVsGQZRSk4aialrV2jdOpeR5h8vQTjn6rV0Pdh26QJvvRUavT/6qOL0n//A11+X3WbbbVMnjsRjQ+qDyhOEc67OGzcORo0K1UoJLVqE+a1bw157hak8szDYUvnEsXhxSCxPPAHffVd2mx/9qGLiSEydOkHjevSrWo/eiqs3br897ghcHZNoiK7qVUxS6GSwfXsoLq64fNMm+PTTsokj8fyVV8I44Mkj9BUUhCSRrgprhx1CdVhd4W0QzjlXTRs2wNKl6auwPvus7PpNm4Zqr3RVWHE0oHsbhKtbnnwyPB55ZLxxOJdBkyahe5Cdd069fP360DaSqgpr1qwwDGyyli3TJ49u3aBVq7Lr5/reD08QLv/ccEN49ATh6rjmzaFXrzCl8tVXFRPHRx/Bhx/CCy9UbEBv125zsli3Dp57bnMbyZIloR0Gai5JeIJwzrmYtG4Ne+8dpvLMwoh+5ds+Pvoo3Fn+/vsVt1m3LpQoPEE451w9JoUxNTp0CN2kl5fu3o+PP665GOpQe7pzzrmEdANo1uTAmp4gnHOuDho3LtzrkSxx70dN8Soml3/uvTfuCJzLe9W996MqPEG4/NOpU+Z1nHOMGJHb3mq9isnln4ceCpNzLlZegnD5529/C4/HHx9vHM41cF6CcM45l5InCOeccyl5gnDOOZdSThOEpMMlLZC0SNJFKZZL0l+i5W9J6pO0bLGkeZJKJHkXrc45V8ty1kgtqQC4BTgEKAVmSXrCzN5JWm0I0D2a9gX+Fj0mHGhmK3MVo8tTDz8cdwTOOXJbgugLLDKzD83sO+BBYFi5dYYB/7DgNaCtpB/lMCZXFyRGcHHOxSqXCWInYGnS69JoXrbrGDBF0hxJo9IdRNIoSbMlzV6xYkUNhO1iN2FCmJxzscplgkg1LlL5vgcrW2eAmfUhVEOdJWlQqoOY2XgzKzKzog4dOlQ/Wpc/PEE4lxdymSBKgeQ+EzoCy7Jdx8wSj8uBRwlVVs4552pJLu+kngV0l9QN+AQ4AfhFuXWeAM6W9CChcfpLM/tU0tZAIzNbEz0/FPhjDmNl8ODBFeYdd9xxnHnmmaxbt46hQ4dWWD5y5EhGjhzJypUrGT58eIXlv/71rzn++ONZunQpJ598coXl559/PkceeSQLFixg9OjRFZZfdtllHHzwwZSUlDB27NgKy6+66ir69+/PjBkzuOSSSyosv+mmmygsLOT555/nyiuvrLD89ttvp2fPnjz55JPckBjFLcm9995Lp06deOihh/hb4u7mJA8//DDt27dnwoQJTEhxxj958mRatGjBrbfeyqRJkyosnzp1KgDXX389Tz311Oa4S0po1KgRe0Wvr7jiCl544YUy27Zr145HHnkEgIsvvphXX321zPKOHTty3333ATB27FhKSkrKLO/Rowfjx48HYNSoUSxcuLDM8sLCQm666SYATjrpJEpLS8ss79evH1dffTUAxx57LKtWrSqz/KCDDuK3v/0tAEOGDGH9+vVllh9xxBFccMEFgH/38um7B9C8eXOeeeYZoO589xLvp6blLEGY2UZJZwPPAgXA3WY2X9KYaPltwGRgKLAIWAecGm2+PfCowujdjYH7zezfuYrVOedcRbJUQxLVUUVFRTZ7tt8yUeclzqhzdFbknNtM0hwzK0q1zDvrc/ln8uS4I3DO4QnC5aPyw2Q552LhfTG5/HPrrWFyzsXKE4TLP5Mmhck5FytPEM4551LyBOGccy4lTxDOOedS8gThnHMupXp1o5ykFcCSuOPIkTbAl3EHEamNWGr6GFu6v+puX5Xtanrd9kBDG0+lof2f1MRxuphZ6p5OzcynOjAB4+OOoTZjqeljbOn+qrt9Vbar6XWB2XF8P+KcGtr/Sa6P41VMdceTcQeQpDZiqeljbOn+qrt9VbbL1boNST59LrUVS86OU6+qmJxzm0mabWn62HEuG16CcK7+Gh93AK5u8xKEc865lLwE4ZxzLiVPEM4551LyBOGccy4lTxDONRCSBkt6WdJtkgbHHY/Lf54gnKvDJN0tabmkt8vNP1zSAkmLJF0UzTZgLdAMKK3tWF3d41cxOVeHSRpE+NH/h5ntEc0rABYChxASwSzgROA9M9skaXvgRjMbEVPYro7wEoRzdZiZTQM+Lze7L7DIzD40s++AB4FhZrYpWv4F0LQWw3R1lI9J7Vz9sxOwNOl1KbCvpJ8BhwFtgb/GEJerYzxBOFf/KMU8M7N/Af+q7WBc3eVVTM7VP6VAp6TXHYFlMcXi6jBPEM7VP7OA7pK6SdoKOAF4IuaYXB3kCcK5OkzSA8CrQE9JpZJON7ONwNnAs8C7wCQzmx9nnK5u8stcnXPOpeQlCOeccyl5gnDOOZeSJwjnnHMpeYJwzjmXkicI55xzKXmCcM45l5InCFejJJmkG5JeXyDp8hra9wRJw2tiXxmO83NJ70p6sdz8rpLWSyqR9E40rkLs/0OSjpbUO0f7bi7pJUkF0ft/O/NWGfd5hKQ/1ER8Lrdi/3K7eudb4GeS2scdSLKoC+xsnQ6caWYHplj2gZkVAnsBvYGjszx+Lvs9OzqKJWtViOc04F9m9n1Vg6rE08BRklrU4D5dDniCcDVtIzAe+O/yC8qXACStjR4HR2epkyQtlHSNpBGSZkqaJ2mXpN0cHI2KtlDSEdH2BZKukzRL0luSRift90VJ9wPzUsRzYrT/tyVdG837HTAQuE3SdeneZHS38gxgV0m/io49V9IjiR++6P3eGJVErpXUV9IMSW9Gjz2j9UZKekzSk5I+knS2pPOi9V6TtG203i6S/i1pTvQZ9JLUHzgKuC4q2eySar008RwQbVMSHatVirc6Ang83ecQ7fegaPt50QBGTaP5QyW9J+kVSX+R9FT02RkwFTiisv26PGBmPvlUYxNh8JrWwGKgDXABcHm0bAIwPHnd6HEwsBr4EWGcgk+AP0TL/gu4KWn7fxNObLoTOqVrBowCLovWaQrMBrpF+/0a6JYizh2Bj4EOhF6N/wMcHS2bChSl2KYr8Hb0vAWhz6MhQLukda4EzkmK9ymgIHrdGmgcPT8YeCR6PhJYBLSK4vkSGBMt+zMwNnr+AtA9er4v8J80n2tl6yXH8yQwIHreMhFb0n62Aj5L9f6T5jUjdC3eI3r9D2Bs0vxu0fwHgKeSthsB3Bz399Wnyifv7tvVODP7StI/gHOB9VluNsvMPgWQ9AEwJZo/D0iu6plkYeCb9yV9CPQCDgX2SiqdtCEkkO+AmWb2UYrjFQNTzWxFdMyJwCDgsQxx7iKphDB85+Nm9kx0Jn4lYZyFloQ+kBL+aZurZ9oA90jqHm3fJGm9F81sDbBG0peEH+/E+99LUkugP/BP6YfevCsM+pPFesnxTAdujN77v8ys/DCk7QmJuzI9gY/MbGH0+h7gLEKS/TDps3+AkMgTlhOStMtjniBcrtwEvAH8PWneRqJqTYVfr62Sln2b9HxT0utNlP2elu88zAjjH5xjZsk/zEgaTChBpJJqzIRsJNogkk0glD7mShpJKLkkJB//CkIiOEZSV8KPaEKm998IWJ3i2OVlWu+HeMzsGklPA0OB1yQdbGbvJa27nlASqEy6zzHT59uM7E8eXEy8DcLlhJl9DkwiNPgmLAZ+HD0fRtkz6Gz9XFKjqF1iZ2AB4Yz915KaAEjqIWnrDPt5HThAUvuoAftE4KVqxAOhaujT6PiVjfPchlB9BqFaKWtm9hXwkaSfQ0iwkvaOFq+JYsi0XhmSdjGzeWZ2LaFarle5Y34BFEiqLEm8B3SVtGv0+mTC5/gesHOUCAGOL7ddD2CLr4hyueUJwuXSDYRqioQ7CD/KMwl14+nO7iuzgPAD9Ayhnv4b4E7gHeANhcswbydD6TiqzroYeBGYC7xhZpU2xlbit4SE8xzhhzGdPwFXS5oOVOWqqoQRwOmS5gLzCUkWwpjTF0YNxbtUsl55Y6MG+rmEs/lnUqwzhdBon5DoVrxUUilwJHAqoUprHqHEc5uZrQfOBP4t6RXg/whtKwkHEq5mcnnMu/t2zqUlaR/gPDM7uRrbtjSztVF14i3A+2b2Z0nbA/eb2UE1Ha+rWV6CcM6lZWZvAi+qaveRJPwqatCfT6heuz2a3xk4v2YidLnkJQjnnHMpeQnCOedcSp4gnHPOpeQJwjnnXEqeIJxzzqXkCcI551xKniCcc86l9P+miH0H/EMqkwAAAABJRU5ErkJggg==\n", 671 | "text/plain": [ 672 | "
" 673 | ] 674 | }, 675 | "metadata": { 676 | "needs_background": "light" 677 | }, 678 | "output_type": "display_data" 679 | } 680 | ], 681 | "source": [ 682 | "import matplotlib.pyplot as plt\n", 683 | "%matplotlib inline\n", 684 | "\n", 685 | "plt.plot(num_params, [1 - acc for acc in test_accs], 'bo-')\n", 686 | "plt.plot(num_params, [1-inf_test_acc]*len(widths), 'k--', label='Infinite Width')\n", 687 | "plt.axvline(x=40000, color='r', linestyle='--')\n", 688 | "plt.xscale(\"log\")\n", 689 | "plt.xlabel(\"Number of Parameters (Log)\")\n", 690 | "plt.ylabel(\"Test Error (1 - Acc.)\")\n", 691 | "plt.title(\"Double Descent with ReLU Network on MNIST\")\n", 692 | "plt.legend()" 693 | ] 694 | } 695 | ], 696 | "metadata": { 697 | "kernelspec": { 698 | "display_name": "dl_tutorial", 699 | "language": "python", 700 | "name": "dl_tutorial" 701 | }, 702 | "language_info": { 703 | "codemirror_mode": { 704 | "name": "ipython", 705 | "version": 3 706 | }, 707 | "file_extension": ".py", 708 | "mimetype": "text/x-python", 709 | "name": "python", 710 | "nbconvert_exporter": "python", 711 | "pygments_lexer": "ipython3", 712 | "version": "3.7.10" 713 | } 714 | }, 715 | "nbformat": 4, 716 | "nbformat_minor": 5 717 | } 718 | --------------------------------------------------------------------------------