├── LICENSE ├── README.md ├── algorithms ├── AccGradientDescent.ipynb ├── ConjugateGrad_BBGrad.ipynb ├── GradientDescent (Basic).ipynb ├── GradientDescent (LineSearch).ipynb ├── GradientDescent (Non Differentiable).ipynb ├── ProxGradientDescent.ipynb └── README.txt └── functions ├── HingeLossTest.ipynb ├── LeastSquaresTest.ipynb ├── LogisticLossTest.ipynb └── README.txt /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Rishabh Iyer 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # A repository containing all the demos for my "Optimization in Machine Learning Courses" 2 | 3 | Here are the repositories for my Optimization in Machine Learning courses: 4 | - Spring 2020: https://github.com/rishabhk108/OptimizationML 5 | - Spring 2021: https://github.com/rishabhk108/AdvancedOptML 6 | 7 | # Prerequisites 8 | - Numpy 9 | - Scipy 10 | 11 | # Setup instructions 12 | Other than the prerequisites listed above, this repository should be self constained. If you would like to try this out, feel free to clone this repo, open jupyter notebook and run this locally. Please reach out to rishabh.iyer@utdallas.edu if you have any difficulties running this. 13 | 14 | # Acknowledgments 15 | I would like to acnowledge Mark Schmidt (https://www.cs.ubc.ca/~schmidtm/) from UBC for this. I converted his Matlab based tutorial to python. In particular, I used 16 | his summer school and tutorial slides as a reference! 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /algorithms/AccGradientDescent.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "### Dataset\n", 8 | "Lets Load the dataset. We shall use the following datasets:\n", 9 | "Features are in: \"sido0_train.mat\"\n", 10 | "Labels are in: \"sido0_train.targets\"" 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": 2, 16 | "metadata": {}, 17 | "outputs": [ 18 | { 19 | "name": "stdout", 20 | "output_type": "stream", 21 | "text": [ 22 | "(12678, 4932)\n" 23 | ] 24 | } 25 | ], 26 | "source": [ 27 | "from scipy.io import loadmat\n", 28 | "import numpy as np\n", 29 | "\n", 30 | "X = loadmat(r\"sido0_matlab/sido0_train.mat\")\n", 31 | "y = np.loadtxt(r\"sido0_matlab/sido0_train.targets\")\n", 32 | "\n", 33 | "# Statistics of the Dense Format of X\n", 34 | "X = X['X'].todense()\n", 35 | "print(X.shape)" 36 | ] 37 | }, 38 | { 39 | "cell_type": "markdown", 40 | "metadata": {}, 41 | "source": [ 42 | "### Logistic Regression Definition\n", 43 | "Lets use the Logistic Regression definition we previously used\n" 44 | ] 45 | }, 46 | { 47 | "cell_type": "code", 48 | "execution_count": 2, 49 | "metadata": {}, 50 | "outputs": [], 51 | "source": [ 52 | "def LogisticLoss(w, X, y, lam):\n", 53 | " # Computes the cost function for all the training samples\n", 54 | " m = X.shape[0]\n", 55 | " Xw = np.dot(X,w)\n", 56 | " yT = y.reshape(-1,1)\n", 57 | " yXw = np.multiply(yT,Xw)\n", 58 | " f = np.sum(np.logaddexp(0,-yXw)) + 0.5*lam*np.sum(np.multiply(w,w))\n", 59 | " gMul = 1/(1 + np.exp(yXw))\n", 60 | " ymul = -1*np.multiply(yT, gMul)\n", 61 | " g = np.dot(ymul.reshape(1,-1),X) + lam*w.reshape(1,-1)\n", 62 | " g = g.reshape(-1,1)\n", 63 | " return [f, g] " 64 | ] 65 | }, 66 | { 67 | "cell_type": "markdown", 68 | "metadata": {}, 69 | "source": [ 70 | "### Reinvoking Gradient Descent Armiojo V4\n", 71 | "Lets invoke the final version of Armijo Line Search GD" 72 | ] 73 | }, 74 | { 75 | "cell_type": "code", 76 | "execution_count": 3, 77 | "metadata": {}, 78 | "outputs": [], 79 | "source": [ 80 | "from numpy import linalg as LA\n", 81 | "\n", 82 | "def gdArmijo(funObj,w,maxEvals,alpha,gamma,X,y,lam, verbosity, freq):\n", 83 | " [f,g] = funObj(w,X,y,lam)\n", 84 | " funEvals = 1\n", 85 | " funVals = []\n", 86 | " f_old = f\n", 87 | " g_old = g\n", 88 | " funVals.append(f)\n", 89 | " alpha = 1/LA.norm(g)\n", 90 | " numBackTrack = 0\n", 91 | " while(1):\n", 92 | " wp = w - alpha*g\n", 93 | " [fp,gp] = funObj(wp,X,y,lam)\n", 94 | " funVals.append(f)\n", 95 | " funEvals = funEvals+1\n", 96 | " backtrack = 0\n", 97 | " while fp > f - gamma*alpha*np.dot(g.T, g):\n", 98 | " alpha = alpha*alpha*np.dot(g.T, g)[0,0]/(2*(fp + np.dot(g.T, g)[0,0]*alpha - f))\n", 99 | " wp = w - alpha*g\n", 100 | " [fp,gp] = funObj(wp,X,y,lam)\n", 101 | " funVals.append(f)\n", 102 | " funEvals = funEvals+1\n", 103 | " numBackTrack = numBackTrack + 1\n", 104 | " if funEvals > 2:\n", 105 | " alpha = min(1,2*(f_old - f)/np.dot(g.T, g)[0,0])\n", 106 | " f_old = f\n", 107 | " g_old = g\n", 108 | " w = wp\n", 109 | " f = fp\n", 110 | " g = gp\n", 111 | " optCond = LA.norm(g, np.inf)\n", 112 | " if ((verbosity > 0) and (funEvals % freq == 0)):\n", 113 | " print(funEvals,alpha,f,optCond)\n", 114 | " if (optCond < 1e-2):\n", 115 | " break\n", 116 | " if (funEvals >= maxEvals):\n", 117 | " break\n", 118 | " return (funVals,numBackTrack)" 119 | ] 120 | }, 121 | { 122 | "cell_type": "markdown", 123 | "metadata": {}, 124 | "source": [ 125 | "### Gradient Descent with Line Search\n", 126 | "#### Version 1: Armijo Backtracking Line Search\n", 127 | "Lets now define the most basic version of Gradient Descent and tune the learning rate!" 128 | ] 129 | }, 130 | { 131 | "cell_type": "code", 132 | "execution_count": 4, 133 | "metadata": {}, 134 | "outputs": [], 135 | "source": [ 136 | "from numpy import linalg as LA\n", 137 | "\n", 138 | "def gdAccelerated(funObj,w,maxEvals,alpha,gamma,X,y,lam, verbosity, freq):\n", 139 | " [f,g] = funObj(w,X,y,lam)\n", 140 | " funEvals = 1\n", 141 | " funVals = []\n", 142 | " funVals.append(f)\n", 143 | " numBackTrack = 0\n", 144 | " t = 1; \n", 145 | " x = w;\n", 146 | " while(1):\n", 147 | " if (funEvals > 1):\n", 148 | " tp = (1 + np.sqrt(1+4*t*t))/2\n", 149 | " x = w + ((t-1)/tp)*(w-w_old)\n", 150 | " t = tp\n", 151 | " [f,g] = funObj(x,X,y,lam)\n", 152 | " funEvals = funEvals+1\n", 153 | " w_old = w\n", 154 | " wp = x - alpha*g; \n", 155 | " [fp,gp] = funObj(wp,X,y,lam)\n", 156 | " funEvals = funEvals+1;\n", 157 | " backtrack = 0\n", 158 | " while fp > f - gamma*alpha*np.dot(g.T, g):\n", 159 | " alpha = alpha*alpha*np.dot(g.T, g)[0,0]/(2*(fp + np.dot(g.T, g)[0,0]*alpha - f))\n", 160 | " wp = x - alpha*g; \n", 161 | " [fp,gp] = funObj(wp,X,y,lam)\n", 162 | " funEvals = funEvals+1; \n", 163 | " funVals.append(f)\n", 164 | " numBackTrack = numBackTrack + 1\n", 165 | " backtrack = 1 \n", 166 | " w = wp\n", 167 | " f = fp\n", 168 | " g = gp\n", 169 | " if (backtrack == 0):\n", 170 | " funVals.append(f)\n", 171 | " optCond = LA.norm(g, np.inf)\n", 172 | " if ((verbosity > 0) and (funEvals % freq == 0)):\n", 173 | " print(funEvals,alpha,f,optCond)\n", 174 | " if (optCond < 1e-2):\n", 175 | " break\n", 176 | " if (funEvals >= maxEvals):\n", 177 | " break\n", 178 | " return (funVals,numBackTrack)" 179 | ] 180 | }, 181 | { 182 | "cell_type": "code", 183 | "execution_count": 5, 184 | "metadata": {}, 185 | "outputs": [ 186 | { 187 | "name": "stdout", 188 | "output_type": "stream", 189 | "text": [ 190 | "20 4.1796056076066775e-05 1012.4951823907003 95.53384664247373\n", 191 | "30 3.997469461451199e-05 953.5753177548778 35.655121235018115\n", 192 | "40 0.00012388804702371698 885.1699025757175 31.335650768554615\n", 193 | "50 1.3144239424878223e-05 868.4387633860271 22.72480197211717\n", 194 | "60 3.19951286344067e-05 846.6514541812807 38.97806992194706\n", 195 | "70 6.404017174599795e-05 830.1929117099367 42.25847013698607\n", 196 | "80 0.00011081246239613855 812.8209357043669 24.121207738723843\n", 197 | "90 3.612908378647803e-05 797.2790559279035 24.85602397608963\n", 198 | "100 6.343884114915358e-05 785.5404258802879 19.27475140750184\n", 199 | "110 0.00013841402942537257 769.2091021378449 22.493117400607588\n", 200 | "120 9.712124304379424e-05 748.6620774703173 17.151002599529306\n", 201 | "130 0.00023936031789675434 729.0076217026603 17.03859779736474\n", 202 | "140 1.9406980779655936e-05 692.6012054315212 17.644861089297766\n", 203 | "150 6.422924249528637e-05 679.7141080649994 15.576008890767799\n", 204 | "160 2.41322291549794e-05 669.1484547018877 28.44097383399261\n", 205 | "170 4.8713805178756046e-05 660.6072256831944 14.413102172561631\n", 206 | "180 1.2529301830671562e-05 653.9827669332112 28.252653890481643\n", 207 | "190 3.819751677923672e-05 647.1789725869651 13.116702915237225\n", 208 | "210 0.00026940911215927874 623.2877509167935 12.716496769920353\n", 209 | "220 5.7479250462785626e-05 610.9093464901916 11.902977748188208\n", 210 | "230 3.513706007390374e-05 600.7644208674036 12.009462306254827\n", 211 | "240 1.002861071551496e-05 592.4282743265723 14.492333059911735\n", 212 | "250 4.002863754206335e-05 585.1233740802434 21.94867365796802\n", 213 | "250\n", 214 | "Number of Backtrackings = 35\n" 215 | ] 216 | } 217 | ], 218 | "source": [ 219 | "[nSamples,nVars] = X.shape\n", 220 | "w = np.zeros((nVars,1))\n", 221 | "(funV1,numBackTrack) = gdArmijo(LogisticLoss,w,250,1,1e-4,X,y,1,1,10)\n", 222 | "print(len(funV1))\n", 223 | "print(\"Number of Backtrackings = \" + str(numBackTrack))" 224 | ] 225 | }, 226 | { 227 | "cell_type": "code", 228 | "execution_count": 6, 229 | "metadata": {}, 230 | "outputs": [ 231 | { 232 | "name": "stderr", 233 | "output_type": "stream", 234 | "text": [ 235 | "/Library/Python/3.7/site-packages/ipykernel_launcher.py:8: RuntimeWarning: overflow encountered in exp\n", 236 | " \n" 237 | ] 238 | }, 239 | { 240 | "name": "stdout", 241 | "output_type": "stream", 242 | "text": [ 243 | "20 1.2270551792159088e-05 3421.4772580811746 447.0919884294048\n", 244 | "30 1.2270551792159088e-05 1139.9065594799376 80.13019693661147\n", 245 | "40 1.2270551792159088e-05 1047.8904744907043 54.85223246465415\n", 246 | "50 1.2270551792159088e-05 985.291868887159 35.03866383246867\n", 247 | "60 1.2270551792159088e-05 938.086984407997 27.327323327228356\n", 248 | "70 1.2270551792159088e-05 897.1309162835348 23.599128324252504\n", 249 | "80 1.2270551792159088e-05 858.3135600342138 21.437055141153905\n", 250 | "90 1.2270551792159088e-05 820.5533876518972 19.893967924792296\n", 251 | "100 1.2270551792159088e-05 783.6170359264083 18.610368547231452\n", 252 | "110 1.2270551792159088e-05 747.8221226783364 17.440211008769058\n", 253 | "120 1.2270551792159088e-05 713.306991704953 16.288372031217055\n", 254 | "130 1.2270551792159088e-05 679.9772013181196 15.032861232889823\n", 255 | "140 1.2270551792159088e-05 647.823562879202 13.553023797030034\n", 256 | "150 1.2270551792159088e-05 616.9347270841175 11.939031480531353\n", 257 | "160 1.2270551792159088e-05 587.3827686140373 11.061990453498689\n", 258 | "170 1.2270551792159088e-05 559.161359353416 10.280160464064606\n", 259 | "180 1.2270551792159088e-05 532.2136615719661 9.59112748859084\n", 260 | "190 1.2270551792159088e-05 506.4926368373115 8.9720849813705\n", 261 | "200 1.2270551792159088e-05 481.9761100536828 8.411442672938987\n", 262 | "210 1.2270551792159088e-05 458.6449002739151 7.912043009677313\n", 263 | "220 1.2270551792159088e-05 436.4684377305792 7.476681133777766\n", 264 | "230 1.2270551792159088e-05 415.40720705430977 7.098858815334528\n", 265 | "240 1.2270551792159088e-05 395.42003424408847 6.765661390083761\n", 266 | "250 1.2270551792159088e-05 376.46858369911143 6.464900701773227\n", 267 | "132\n", 268 | "Number of Backtrackings = 14\n" 269 | ] 270 | } 271 | ], 272 | "source": [ 273 | "[nSamples,nVars] = X.shape\n", 274 | "w = np.zeros((nVars,1))\n", 275 | "(funV2,numBackTrack) = gdAccelerated(LogisticLoss,w,250,1,1e-4,X,y,1,1,10)\n", 276 | "print(len(funV2))\n", 277 | "print(\"Number of Backtrackings = \" + str(numBackTrack))" 278 | ] 279 | } 280 | ], 281 | "metadata": { 282 | "kernelspec": { 283 | "display_name": "Python 2", 284 | "language": "python", 285 | "name": "python2" 286 | }, 287 | "language_info": { 288 | "codemirror_mode": { 289 | "name": "ipython", 290 | "version": 3 291 | }, 292 | "file_extension": ".py", 293 | "mimetype": "text/x-python", 294 | "name": "python", 295 | "nbconvert_exporter": "python", 296 | "pygments_lexer": "ipython3", 297 | "version": "3.7.3" 298 | } 299 | }, 300 | "nbformat": 4, 301 | "nbformat_minor": 2 302 | } 303 | -------------------------------------------------------------------------------- /algorithms/ConjugateGrad_BBGrad.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "### Dataset\n", 8 | "Lets Load the dataset. We shall use the following datasets:\n", 9 | "Features are in: \"sido0_train.mat\"\n", 10 | "Labels are in: \"sido0_train.targets\"" 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": 10, 16 | "metadata": {}, 17 | "outputs": [ 18 | { 19 | "name": "stdout", 20 | "output_type": "stream", 21 | "text": [ 22 | "(12678, 4932)\n" 23 | ] 24 | } 25 | ], 26 | "source": [ 27 | "from scipy.io import loadmat\n", 28 | "import numpy as np\n", 29 | "\n", 30 | "X = loadmat(r\"/Users/rkiyer/Desktop/teaching/CS6301/jupyter/data/sido0_matlab/sido0_train.mat\")\n", 31 | "y = np.loadtxt(r\"/Users/rkiyer/Desktop/teaching/CS6301/jupyter/data/sido0_matlab/sido0_train.targets\")\n", 32 | "\n", 33 | "# Statistics of the Dense Format of X\n", 34 | "X = X['X'].todense()\n", 35 | "print(X.shape)" 36 | ] 37 | }, 38 | { 39 | "cell_type": "markdown", 40 | "metadata": {}, 41 | "source": [ 42 | "### Logistic Regression Definition\n", 43 | "Lets use the Logistic Regression definition we previously used\n" 44 | ] 45 | }, 46 | { 47 | "cell_type": "code", 48 | "execution_count": 11, 49 | "metadata": {}, 50 | "outputs": [], 51 | "source": [ 52 | "def LogisticLoss(w, X, y, lam):\n", 53 | " # Computes the cost function for all the training samples\n", 54 | " m = X.shape[0]\n", 55 | " Xw = np.dot(X,w)\n", 56 | " yT = y.reshape(-1,1)\n", 57 | " yXw = np.multiply(yT,Xw)\n", 58 | " f = np.sum(np.logaddexp(0,-yXw)) + 0.5*lam*np.sum(np.multiply(w,w))\n", 59 | " gMul = 1/(1 + np.exp(yXw))\n", 60 | " ymul = -1*np.multiply(yT, gMul)\n", 61 | " g = np.dot(ymul.reshape(1,-1),X) + lam*w.reshape(1,-1)\n", 62 | " g = g.reshape(-1,1)\n", 63 | " return [f, g] " 64 | ] 65 | }, 66 | { 67 | "cell_type": "markdown", 68 | "metadata": {}, 69 | "source": [ 70 | "### Barzelia Borwein step length\n", 71 | "Lets invoke BB Step Length Gradient Descent" 72 | ] 73 | }, 74 | { 75 | "cell_type": "code", 76 | "execution_count": 24, 77 | "metadata": {}, 78 | "outputs": [], 79 | "source": [ 80 | "from numpy import linalg as LA\n", 81 | "\n", 82 | "def gdBB(funObj,w,maxEvals,alpha,gamma,X,y,lam, verbosity, freq):\n", 83 | " [f,g] = funObj(w,X,y,lam)\n", 84 | " funEvals = 1\n", 85 | " funVals = []\n", 86 | " f_old = f\n", 87 | " g_old = g\n", 88 | " funVals.append(f)\n", 89 | " numBackTrack = 0\n", 90 | " while(1):\n", 91 | " wp = w - alpha*g\n", 92 | " [fp,gp] = funObj(wp,X,y,lam)\n", 93 | " funVals.append(f)\n", 94 | " funEvals = funEvals+1\n", 95 | " backtrack = 0\n", 96 | " if funEvals > 2:\n", 97 | " g_diff = g - g_old\n", 98 | " alpha = -alpha*np.dot(g_old.T, g_diff)[0,0]/np.dot(g_diff.T, g_diff)[0,0]\n", 99 | " while fp > f - gamma*alpha*np.dot(g.T, g):\n", 100 | " alpha = alpha*alpha*np.dot(g.T, g)[0,0]/(2*(fp + np.dot(g.T, g)[0,0]*alpha - f))\n", 101 | " wp = w - alpha*g\n", 102 | " [fp,gp] = funObj(wp,X,y,lam)\n", 103 | " funVals.append(f)\n", 104 | " funEvals = funEvals+1\n", 105 | " numBackTrack = numBackTrack + 1\n", 106 | " f_old = f\n", 107 | " g_old = g\n", 108 | " w = wp\n", 109 | " f = fp\n", 110 | " g = gp\n", 111 | " optCond = LA.norm(g, np.inf)\n", 112 | " if ((verbosity > 0) and (funEvals % freq == 0)):\n", 113 | " print(funEvals,alpha,f,optCond)\n", 114 | " if (optCond < 1e-2):\n", 115 | " break\n", 116 | " if (funEvals >= maxEvals):\n", 117 | " break\n", 118 | " return (funVals,numBackTrack)" 119 | ] 120 | }, 121 | { 122 | "cell_type": "code", 123 | "execution_count": 25, 124 | "metadata": {}, 125 | "outputs": [ 126 | { 127 | "name": "stderr", 128 | "output_type": "stream", 129 | "text": [ 130 | "/usr/local/lib/python3.7/site-packages/ipykernel_launcher.py:8: RuntimeWarning: overflow encountered in exp\n", 131 | " \n" 132 | ] 133 | }, 134 | { 135 | "name": "stdout", 136 | "output_type": "stream", 137 | "text": [ 138 | "30 2.5788899258035455e-05 1135.7630614160032 76.76587360949645\n", 139 | "40 3.4479636532951836e-05 932.6568131903588 87.65958408181322\n", 140 | "50 4.021247435512306e-06 849.9100523680455 43.4455639307481\n", 141 | "70 1.7793550062790673e-05 776.6339625092082 56.759560073959456\n", 142 | "80 0.00013351557593887492 744.1113239997849 39.28022856286154\n", 143 | "100 8.059370484177815e-09 687.6850460523219 74.14968315884308\n", 144 | "110 0.0003390191457756867 661.652761006767 21.602772936085945\n", 145 | "120 0.004059006282489887 518.5101896471618 43.03303982013437\n", 146 | "130 3.537647992869348e-05 513.0734220349092 8.84234859224609\n", 147 | "150 0.00254417659060005 448.2235645802006 80.78196339937944\n", 148 | "160 7.613967174596072e-05 424.1394663061126 16.374169898800403\n", 149 | "170 2.5668319295286767e-08 416.9102811833201 49.00940438219084\n", 150 | "190 1.1284701287159109e-05 379.81228121212956 21.89514315379142\n", 151 | "220 9.464450390497666e-07 237.86073095644207 9.047437842072526\n", 152 | "230 0.004223306733352749 227.94326633703025 50.92814591412953\n", 153 | "240 1.60654955766724e-05 216.1835035208207 3.451857941580089\n", 154 | "250 4.134166791547012e-05 153.586334083372 7.265833688540766\n", 155 | "250\n", 156 | "Number of Backtrackings = 75\n" 157 | ] 158 | } 159 | ], 160 | "source": [ 161 | "[nSamples,nVars] = X.shape\n", 162 | "w = np.zeros((nVars,1))\n", 163 | "(funV1,numBackTrack) = gdBB(LogisticLoss,w,250,1,1e-4,X,y,1,1,10)\n", 164 | "print(len(funV1))\n", 165 | "print(\"Number of Backtrackings = \" + str(numBackTrack))" 166 | ] 167 | }, 168 | { 169 | "cell_type": "markdown", 170 | "metadata": {}, 171 | "source": [ 172 | "### Conjugate Gradient Descent\n", 173 | "Nonlinear Conjugate Gradient Descent" 174 | ] 175 | }, 176 | { 177 | "cell_type": "code", 178 | "execution_count": 37, 179 | "metadata": {}, 180 | "outputs": [], 181 | "source": [ 182 | "from numpy import linalg as LA\n", 183 | "\n", 184 | "def gdCG(funObj,w,maxEvals,alpha,gamma,X,y,lam, verbosity, freq):\n", 185 | " [f,g] = funObj(w,X,y,lam)\n", 186 | " funEvals = 1\n", 187 | " funVals = []\n", 188 | " f_old = f\n", 189 | " g_old = g\n", 190 | " funVals.append(f)\n", 191 | " numBackTrack = 0\n", 192 | " d = g\n", 193 | " while(1):\n", 194 | " wp = w - alpha*d\n", 195 | " [fp,gp] = funObj(wp,X,y,lam)\n", 196 | " funVals.append(f)\n", 197 | " funEvals = funEvals+1\n", 198 | " backtrack = 0\n", 199 | " if funEvals > 2:\n", 200 | " alpha = min(1,2*(f_old - f)/np.dot(g.T, g)[0,0])\n", 201 | " beta = np.dot(g.T, g)[0,0]/np.dot(g_old.T, g_old)[0,0]\n", 202 | " d = g + beta*d\n", 203 | " else:\n", 204 | " d = g\n", 205 | " while fp > f - gamma*alpha*np.dot(g.T, d)[0,0]:\n", 206 | " alpha = alpha*alpha*np.dot(g.T, d)[0,0]/(2*(fp + np.dot(g.T, d)[0,0]*alpha - f))\n", 207 | " wp = w - alpha*d\n", 208 | " [fp,gp] = funObj(wp,X,y,lam)\n", 209 | " funVals.append(f)\n", 210 | " funEvals = funEvals+1\n", 211 | " numBackTrack = numBackTrack + 1\n", 212 | " f_old = f\n", 213 | " g_old = g\n", 214 | " w = wp\n", 215 | " f = fp\n", 216 | " g = gp\n", 217 | " optCond = LA.norm(g, np.inf)\n", 218 | " if ((verbosity > 0) and (funEvals % freq == 0)):\n", 219 | " print(funEvals,alpha,f,optCond)\n", 220 | " if (optCond < 1e-2):\n", 221 | " break\n", 222 | " if (funEvals >= maxEvals):\n", 223 | " break\n", 224 | " return (funVals,numBackTrack)" 225 | ] 226 | }, 227 | { 228 | "cell_type": "code", 229 | "execution_count": 38, 230 | "metadata": {}, 231 | "outputs": [ 232 | { 233 | "name": "stderr", 234 | "output_type": "stream", 235 | "text": [ 236 | "/usr/local/lib/python3.7/site-packages/ipykernel_launcher.py:8: RuntimeWarning: overflow encountered in exp\n", 237 | " \n" 238 | ] 239 | }, 240 | { 241 | "name": "stdout", 242 | "output_type": "stream", 243 | "text": [ 244 | "30 4.8248838390056086e-05 1100.5825812169905 71.17864014090668\n", 245 | "40 5.761205021221726e-06 1020.2047566018817 60.08817550984056\n", 246 | "50 7.00499199239626e-07 1002.4726260840971 153.62023184767298\n", 247 | "60 7.545618878621844e-06 925.5611967492454 85.61509239991781\n", 248 | "70 1.1115290013781674e-07 835.8507464241959 91.58198734211311\n", 249 | "90 4.441863657411808e-05 751.702281580756 101.83346471201462\n", 250 | "100 4.1046760588252236e-06 732.1216358152724 86.17994084529874\n", 251 | "110 2.3993096858746483e-05 644.2756391402414 27.928830911694643\n", 252 | "120 1.0056307504898691e-06 619.8412799763845 57.64301891477717\n", 253 | "130 3.1198498348610805e-06 611.8077214404658 49.09555617390839\n", 254 | "140 1.1188637267886875e-06 543.3696061152098 85.6780959606216\n", 255 | "150 1.9476528884453286e-06 521.0911809959176 57.99381049868182\n", 256 | "160 3.809031251617791e-06 436.93965622540577 60.16531562869829\n", 257 | "170 8.993239281748671e-06 432.4501776394318 36.25034076350421\n", 258 | "190 9.097237262686172e-06 390.3268532288466 40.463044973828325\n", 259 | "200 2.959477446275187e-07 388.0645119908878 34.67348963196586\n", 260 | "210 4.0064325422324243e-08 323.9043367568432 104.47445243511075\n", 261 | "230 4.2860917660356265e-06 302.5701932483058 29.175853310706646\n", 262 | "240 3.2014786140553776e-06 300.7658397250583 38.647272965098615\n", 263 | "250 9.915357890415802e-06 287.6046370596453 55.160188266289865\n", 264 | "250\n", 265 | "Number of Backtrackings = 73\n" 266 | ] 267 | } 268 | ], 269 | "source": [ 270 | "[nSamples,nVars] = X.shape\n", 271 | "w = np.zeros((nVars,1))\n", 272 | "(funV1,numBackTrack) = gdCG(LogisticLoss,w,250,1,1e-4,X,y,1,1,10)\n", 273 | "print(len(funV1))\n", 274 | "print(\"Number of Backtrackings = \" + str(numBackTrack))" 275 | ] 276 | } 277 | ], 278 | "metadata": { 279 | "kernelspec": { 280 | "display_name": "Python 2", 281 | "language": "python", 282 | "name": "python2" 283 | }, 284 | "language_info": { 285 | "codemirror_mode": { 286 | "name": "ipython", 287 | "version": 3 288 | }, 289 | "file_extension": ".py", 290 | "mimetype": "text/x-python", 291 | "name": "python", 292 | "nbconvert_exporter": "python", 293 | "pygments_lexer": "ipython3", 294 | "version": "3.7.3" 295 | } 296 | }, 297 | "nbformat": 4, 298 | "nbformat_minor": 2 299 | } 300 | -------------------------------------------------------------------------------- /algorithms/GradientDescent (Non Differentiable).ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "### Dataset\n", 8 | "Lets Load the dataset. We shall use the following datasets:\n", 9 | "\n", 10 | "Features are in: \"sido0_train.mat\"\n", 11 | "\n", 12 | "Labels are in: \"sido0_train.targets\"" 13 | ] 14 | }, 15 | { 16 | "cell_type": "code", 17 | "execution_count": 1, 18 | "metadata": {}, 19 | "outputs": [ 20 | { 21 | "name": "stdout", 22 | "output_type": "stream", 23 | "text": [ 24 | "(12678, 4932)\n" 25 | ] 26 | } 27 | ], 28 | "source": [ 29 | "from scipy.io import loadmat\n", 30 | "import numpy as np\n", 31 | "\n", 32 | "X = loadmat(r\"sido0_matlab/sido0_train.mat\")\n", 33 | "y = np.loadtxt(r\"sido0_matlab/sido0_train.targets\")\n", 34 | "\n", 35 | "# Statistics of the Dense Format of X\n", 36 | "X = X['X'].todense()\n", 37 | "print(X.shape)" 38 | ] 39 | }, 40 | { 41 | "cell_type": "markdown", 42 | "metadata": {}, 43 | "source": [ 44 | "### Logistic Regression Definition\n", 45 | "Lets use the Logistic Regression definition we previously used\n" 46 | ] 47 | }, 48 | { 49 | "cell_type": "code", 50 | "execution_count": 18, 51 | "metadata": {}, 52 | "outputs": [], 53 | "source": [ 54 | "def LogisticLoss(w, X, y, lam):\n", 55 | " # Computes the cost function for all the training samples\n", 56 | " m = X.shape[0]\n", 57 | " Xw = np.dot(X,w)\n", 58 | " yT = y.reshape(-1,1)\n", 59 | " yXw = np.multiply(yT,Xw)\n", 60 | " f = np.sum(np.logaddexp(0,-yXw)) + 0.5*lam*np.sum(np.multiply(w,w))\n", 61 | " gMul = 1/(1 + np.exp(yXw))\n", 62 | " ymul = -1*np.multiply(yT, gMul)\n", 63 | " g = np.dot(ymul.reshape(1,-1),X) + lam*w.reshape(1,-1)\n", 64 | " g = g.reshape(-1,1)\n", 65 | " return [f, g]\n", 66 | "\n", 67 | "def HingeLoss(w, X, y, lam):\n", 68 | " # Computes the cost function for all the training samples\n", 69 | " Xw = np.matmul(X,w)\n", 70 | " yT = y.reshape(-1,1)\n", 71 | " yXw = np.multiply(yT,Xw)\n", 72 | " f = np.sum(np.maximum(0, 1 - yXw.T)) + 0.5*np.sum(np.multiply(w,w))\n", 73 | " ymul = -1*np.multiply(yT,np.double(1 > yXw)) \n", 74 | " g = np.matmul(ymul.reshape(1,-1),X).reshape(-1,1) + 1*w.reshape(-1,1)\n", 75 | " return [f, g]" 76 | ] 77 | }, 78 | { 79 | "cell_type": "markdown", 80 | "metadata": {}, 81 | "source": [ 82 | "### Gradient Descent\n", 83 | "Lets now define the most basic version of Gradient Descent and tune the learning rate!" 84 | ] 85 | }, 86 | { 87 | "cell_type": "code", 88 | "execution_count": 38, 89 | "metadata": {}, 90 | "outputs": [], 91 | "source": [ 92 | "from numpy import linalg as LA\n", 93 | "\n", 94 | "def gd(funObj,w,maxEvals,alpha,X,y,lam, verbosity, freq):\n", 95 | " [f,g] = funObj(w,X,y,lam)\n", 96 | " funEvals = 1\n", 97 | " funVals = []\n", 98 | " while(1):\n", 99 | " [f,g] = funObj(w,X,y,lam)\n", 100 | " optCond = LA.norm(g, np.inf)\n", 101 | " if ((verbosity > 0) and (funEvals % freq == 0)):\n", 102 | " print(funEvals,alpha,f,optCond)\n", 103 | " w = w - alpha*g\n", 104 | " funEvals = funEvals+1\n", 105 | " if (optCond < 1e-2):\n", 106 | " break\n", 107 | " if (funEvals > maxEvals):\n", 108 | " break\n", 109 | " funVals.append(f)\n", 110 | " return funVals\n", 111 | "\n", 112 | "def gdLinRed(funObj,w,maxEvals,alpha,X,y,lam, verbosity, freq):\n", 113 | " [f,g] = funObj(w,X,y,lam)\n", 114 | " funEvals = 1\n", 115 | " funVals = []\n", 116 | " while(1):\n", 117 | " [f,g] = funObj(w,X,y,lam)\n", 118 | " optCond = LA.norm(g, np.inf)\n", 119 | " if ((verbosity > 0) and (funEvals % freq == 0)):\n", 120 | " print(funEvals,alphaCurr,f,optCond)\n", 121 | " alphaCurr = alpha/(1 + funEvals)\n", 122 | " w = w - alphaCurr*g\n", 123 | " funEvals = funEvals+1\n", 124 | " if (optCond < 1e-2):\n", 125 | " break\n", 126 | " if (funEvals > maxEvals):\n", 127 | " break\n", 128 | " funVals.append(f)\n", 129 | " return funVals\n", 130 | "\n", 131 | "[nSamples,nVars] = X.shape\n", 132 | "w = np.zeros((nVars,1))" 133 | ] 134 | }, 135 | { 136 | "cell_type": "markdown", 137 | "metadata": {}, 138 | "source": [ 139 | "### Lets run it!\n", 140 | "Now let us run it for alpha = 0.1" 141 | ] 142 | }, 143 | { 144 | "cell_type": "code", 145 | "execution_count": 20, 146 | "metadata": {}, 147 | "outputs": [ 148 | { 149 | "name": "stdout", 150 | "output_type": "stream", 151 | "text": [ 152 | "10 0.1 11081430.216650046 449.7090942869999\n", 153 | "20 0.1 15151729.399525635 630.0306803579006\n", 154 | "30 0.1 7555920.214349102 553.0018080323345\n", 155 | "40 0.1 38552020.67535276 876.9416237421942\n", 156 | "50 0.1 2052387.1431931367 153.85400290151262\n", 157 | "60 0.1 43949742.89940409 859.7325051168436\n", 158 | "70 0.1 2529174.923226729 1149.8106795278043\n", 159 | "80 0.1 15654176.864465766 601.8215019788339\n", 160 | "90 0.1 1704567.6486949527 419.2249936786189\n", 161 | "100 0.1 12041483.006587211 523.7640551099435\n", 162 | "110 0.1 13371906.217746304 621.6613697515515\n", 163 | "120 0.1 8176441.420134161 1682.8503981140384\n", 164 | "130 0.1 8208039.339663002 552.8612996781367\n", 165 | "140 0.1 7773375.523865128 591.1089961144314\n", 166 | "150 0.1 16433135.472827531 595.5870421462569\n", 167 | "160 0.1 32541631.60821715 882.8960377593298\n", 168 | "170 0.1 4653605.142299113 557.2451278063938\n", 169 | "180 0.1 582181.9214030121 214.54349372148812\n", 170 | "190 0.1 38121321.79078288 792.6282587212445\n", 171 | "200 0.1 7398339.852883474 541.5915398301028\n" 172 | ] 173 | } 174 | ], 175 | "source": [ 176 | "funV = gd(HingeLoss,w,200,1e-1,X,y,1,1,10)" 177 | ] 178 | }, 179 | { 180 | "cell_type": "markdown", 181 | "metadata": {}, 182 | "source": [ 183 | "### Learning Rate is too large!\n", 184 | "Lets try alpha = 1e-3" 185 | ] 186 | }, 187 | { 188 | "cell_type": "code", 189 | "execution_count": 25, 190 | "metadata": {}, 191 | "outputs": [ 192 | { 193 | "name": "stdout", 194 | "output_type": "stream", 195 | "text": [ 196 | "10 0.001 355587.39778875076 460.07577570515014\n", 197 | "20 0.001 69326.64777147173 95.53320209721444\n", 198 | "30 0.001 56156.572903245295 274.74392008198674\n", 199 | "40 0.001 47313.147978875175 331.36462437921085\n", 200 | "50 0.001 39365.074324166075 323.0080742392985\n", 201 | "60 0.001 33229.86120211198 295.68492612865964\n", 202 | "70 0.001 33034.67104974243 563.4317434249481\n", 203 | "80 0.001 53489.13529767374 414.5880276348508\n", 204 | "90 0.001 23313.72025036299 379.87314616687405\n", 205 | "100 0.001 19346.753964849846 186.6547260854177\n", 206 | "110 0.001 17551.75115706438 197.36615930706017\n", 207 | "120 0.001 34150.924795420906 369.30583976790024\n", 208 | "130 0.001 12011.749357864688 168.40865352892888\n", 209 | "140 0.001 47427.947761316056 443.90985484087815\n", 210 | "150 0.001 16529.582718681522 709.0744782568646\n", 211 | "160 0.001 14049.20490350971 624.8891680339799\n", 212 | "170 0.001 11325.316701736836 468.7272475723256\n", 213 | "180 0.001 8626.853501177908 308.5498123963688\n", 214 | "190 0.001 8770.070635536722 363.4048329177277\n", 215 | "200 0.001 13264.590717277057 286.9621743218164\n" 216 | ] 217 | } 218 | ], 219 | "source": [ 220 | "funV = gd(HingeLoss,w,200,1e-3,X,y,1,1,10)" 221 | ] 222 | }, 223 | { 224 | "cell_type": "markdown", 225 | "metadata": {}, 226 | "source": [ 227 | "#### Still diverging a little. Lets reduce it further!" 228 | ] 229 | }, 230 | { 231 | "cell_type": "code", 232 | "execution_count": 26, 233 | "metadata": {}, 234 | "outputs": [ 235 | { 236 | "name": "stdout", 237 | "output_type": "stream", 238 | "text": [ 239 | "10 1e-05 4001.097768276672 452.0815618475043\n", 240 | "20 1e-05 1366.297854913873 114.93464308419857\n", 241 | "30 1e-05 1181.5456698355072 176.03109274912703\n", 242 | "40 1e-05 1083.9936919317488 311.026449950983\n", 243 | "50 1e-05 1002.4064085771486 326.0220176452961\n", 244 | "60 1e-05 937.1806934183722 317.0254136425789\n", 245 | "70 1e-05 887.9812809677371 300.00811139794996\n", 246 | "80 1e-05 845.6996075872528 279.0055708410381\n", 247 | "90 1e-05 822.8618296850332 359.00338052327083\n", 248 | "100 1e-05 775.2344154871388 238.01638654566491\n", 249 | "110 1e-05 731.8448887247665 109.02160432505585\n", 250 | "120 1e-05 735.8579872061902 222.04410237532318\n", 251 | "130 1e-05 694.6207955221581 150.00684558474606\n", 252 | "140 1e-05 694.6162010401406 281.0052551118116\n", 253 | "150 1e-05 660.4361263265093 107.01660026053987\n", 254 | "160 1e-05 691.4332943193691 435.001904696942\n", 255 | "170 1e-05 636.2241861049883 84.99936479327039\n", 256 | "180 1e-05 737.2332022197469 652.9614528188956\n", 257 | "190 1e-05 620.2969886459373 167.9962154617624\n", 258 | "200 1e-05 602.3522522695534 45.04248070250282\n" 259 | ] 260 | } 261 | ], 262 | "source": [ 263 | "funV = gd(HingeLoss,w,200,1e-5,X,y,1,1,10)" 264 | ] 265 | }, 266 | { 267 | "cell_type": "markdown", 268 | "metadata": {}, 269 | "source": [ 270 | "#### Lets reduce it even lower!" 271 | ] 272 | }, 273 | { 274 | "cell_type": "code", 275 | "execution_count": 28, 276 | "metadata": {}, 277 | "outputs": [ 278 | { 279 | "name": "stdout", 280 | "output_type": "stream", 281 | "text": [ 282 | "10 1e-07 1098.9154640030602 1500.993098203401\n", 283 | "20 1e-07 1031.469451599212 532.9922230107763\n", 284 | "30 1e-07 1008.4179692028557 378.9917665187708\n", 285 | "40 1e-07 992.7039200850411 298.99142592716424\n", 286 | "50 1e-07 981.6551811553716 247.99216523212672\n", 287 | "60 1e-07 973.8391206542256 225.99192194007227\n", 288 | "70 1e-07 967.7594396308126 227.99169624825203\n", 289 | "80 1e-07 962.5723210222791 224.99147405665585\n", 290 | "90 1e-07 958.1058131336175 207.9912584652803\n", 291 | "100 1e-07 954.1142489005043 193.99105597411426\n", 292 | "110 1e-07 950.3798915280574 190.9908644831449\n", 293 | "120 1e-07 946.867327840501 190.99067399236603\n", 294 | "130 1e-07 943.5981598384309 185.99048520177746\n", 295 | "140 1e-07 940.6106774712937 173.99030491137435\n", 296 | "150 1e-07 937.8432148314799 170.99013222114743\n", 297 | "160 1e-07 935.2103228885861 164.98996563109083\n", 298 | "170 1e-07 932.7186689629197 163.98980154119948\n", 299 | "180 1e-07 930.3777314483937 157.98964195147036\n", 300 | "190 1e-07 928.2319097791055 148.9894895618976\n", 301 | "200 1e-07 926.2213786041191 148.98934107247493\n" 302 | ] 303 | } 304 | ], 305 | "source": [ 306 | "funV = gd(HingeLoss,w,200,1e-7,X,y,1,1,10)" 307 | ] 308 | }, 309 | { 310 | "cell_type": "markdown", 311 | "metadata": {}, 312 | "source": [ 313 | "#### Lets now run GD with reducing learning rate!" 314 | ] 315 | }, 316 | { 317 | "cell_type": "code", 318 | "execution_count": 39, 319 | "metadata": {}, 320 | "outputs": [ 321 | { 322 | "name": "stdout", 323 | "output_type": "stream", 324 | "text": [ 325 | "10 0.2 32245377.447160497 761.4444444444445\n", 326 | "20 0.1 21221626.639986154 641.2421052631579\n", 327 | "30 0.06666666666666667 13461987.527152862 621.2137931034482\n", 328 | "40 0.05 1131515.504746055 312.0051282051282\n", 329 | "50 0.04 3599777.3094393965 507.51673469387754\n", 330 | "60 0.03333333333333333 677433.8785471943 780.9644067796611\n", 331 | "70 0.02857142857142857 594692.1298892801 792.9192546583851\n", 332 | "80 0.025 1911751.828772532 483.44398734177213\n", 333 | "90 0.022222222222222223 2073997.9076215914 483.16629213483145\n", 334 | "100 0.02 2037300.6913947961 482.91959595959594\n", 335 | "110 0.01818181818181818 317219.1214046166 898.8824020016681\n", 336 | "120 0.016666666666666666 810213.3268168628 1716.6711484593839\n", 337 | "130 0.015384615384615385 2259476.333305492 481.053786523554\n", 338 | "140 0.014285714285714285 167913.96511001026 207.5596094552929\n", 339 | "150 0.013333333333333334 642321.0162092161 446.33691275167786\n", 340 | "160 0.0125 190996.22375153864 713.2134433962264\n", 341 | "170 0.011764705882352941 183859.27781249591 562.8630699617125\n", 342 | "180 0.011111111111111112 574771.7290551577 452.12607076350093\n", 343 | "190 0.010526315789473684 511538.19752120675 439.1294903926483\n", 344 | "200 0.01 444068.6387938665 419.6343216080402\n" 345 | ] 346 | } 347 | ], 348 | "source": [ 349 | "funV = gdLinRed(HingeLoss,w,200,2,X,y,1,1,10)" 350 | ] 351 | }, 352 | { 353 | "cell_type": "code", 354 | "execution_count": 35, 355 | "metadata": {}, 356 | "outputs": [ 357 | { 358 | "name": "stdout", 359 | "output_type": "stream", 360 | "text": [ 361 | "10 0.0011111111111111111 7851405.047802182 559.3893977832765\n", 362 | "20 0.0005263157894736842 7320625.151370776 555.381584722673\n", 363 | "30 0.0003448275862068966 7018463.744998654 553.0870521359082\n", 364 | "40 0.0002564102564102564 6806894.259712851 551.474767748735\n", 365 | "50 0.00020408163265306123 6644177.951273229 550.2315594929117\n", 366 | "60 0.00016949152542372882 6512049.132153587 549.2199797864713\n", 367 | "70 0.00014492753623188405 6400874.176136425 548.3673774533308\n", 368 | "80 0.00012658227848101267 6304949.446919412 547.6306625579989\n", 369 | "90 0.00011235955056179776 6220619.084772194 546.9821743628672\n", 370 | "100 0.00010101010101010101 6145399.5476547405 546.4030976729175\n", 371 | "110 9.174311926605505e-05 6077526.909238482 545.88005377047\n", 372 | "120 8.403361344537815e-05 6015703.447607201 545.4031898355789\n", 373 | "130 7.751937984496124e-05 5958946.748186374 544.9650400642948\n", 374 | "140 7.194244604316547e-05 5906495.277518248 544.5598123172863\n", 375 | "150 6.711409395973155e-05 5857746.833941903 544.1829227372882\n", 376 | "160 6.289308176100629e-05 5812217.0340104345 543.8306816098549\n", 377 | "170 5.9171597633136094e-05 5769510.487439029 543.5000750797226\n", 378 | "180 5.58659217877095e-05 5729300.275570593 543.1886096410287\n", 379 | "190 5.291005291005291e-05 5691313.019940729 542.894198916341\n", 380 | "200 5.0251256281407036e-05 5655317.80847423 542.6150796366619\n" 381 | ] 382 | } 383 | ], 384 | "source": [ 385 | "funV = gdLinRed(HingeLoss,w,200,1e-2,X,y,1,1,10)" 386 | ] 387 | }, 388 | { 389 | "cell_type": "code", 390 | "execution_count": 40, 391 | "metadata": {}, 392 | "outputs": [ 393 | { 394 | "name": "stdout", 395 | "output_type": "stream", 396 | "text": [ 397 | "10 1e-05 33290.15355275317 452.5239804610305\n", 398 | "20 5e-06 29363.80677521616 452.4937178582692\n", 399 | "30 3.3333333333333333e-06 27031.82487985571 452.4757429804942\n", 400 | "40 2.5e-06 25367.366616204876 452.4629129270702\n", 401 | "50 2e-06 24072.156249112057 452.45292887549033\n", 402 | "60 1.6666666666666667e-06 23011.75834632785 452.44475469771027\n", 403 | "70 1.4285714285714286e-06 22113.96580269146 452.4378338633098\n", 404 | "80 1.25e-06 21335.480485941935 452.43183264962295\n", 405 | "90 1.1111111111111112e-06 20648.283107835887 452.4265350933068\n", 406 | "100 1e-06 20033.195396364645 452.4217933727368\n", 407 | "110 9.090909090909091e-07 19476.512119358453 452.4175018500018\n", 408 | "120 8.333333333333333e-07 18968.098320613382 452.41358240556053\n", 409 | "130 7.692307692307693e-07 18500.247720010913 452.4099756393929\n", 410 | "140 7.142857142857143e-07 18066.963809745142 452.40663533001805\n", 411 | "150 6.666666666666667e-07 17663.488716913147 452.4035248031515\n", 412 | "160 6.25e-07 17285.983938483063 452.40061447091495\n", 413 | "170 5.882352941176471e-07 16931.307742734087 452.3978801161077\n", 414 | "180 5.555555555555556e-07 16596.85611171584 452.39530166622603\n", 415 | "190 5.263157894736843e-07 16280.446629966675 452.3928622984979\n", 416 | "200 5e-07 15980.232114809263 452.3905477741558\n" 417 | ] 418 | } 419 | ], 420 | "source": [ 421 | "funV = gdLinRed(HingeLoss,w,200,1e-4,X,y,1,1,10)" 422 | ] 423 | }, 424 | { 425 | "cell_type": "markdown", 426 | "metadata": {}, 427 | "source": [ 428 | "#### Lets plot the results for different alpha" 429 | ] 430 | }, 431 | { 432 | "cell_type": "code", 433 | "execution_count": 28, 434 | "metadata": {}, 435 | "outputs": [], 436 | "source": [ 437 | "[nSamples,nVars] = X.shape\n", 438 | "w = np.zeros((nVars,1))\n", 439 | "funV1 = gd(LogisticLoss,w,200,1e-4,X,y,1,0,0)\n", 440 | "funV2 = gd(LogisticLoss,w,200,1e-5,X,y,1,0,0)\n", 441 | "funV3 = gd(LogisticLoss,w,200,1e-6,X,y,1,0,0)\n", 442 | "funV4 = gd(LogisticLoss,w,200,1e-7,X,y,1,0,0)" 443 | ] 444 | }, 445 | { 446 | "cell_type": "code", 447 | "execution_count": 29, 448 | "metadata": {}, 449 | "outputs": [ 450 | { 451 | "data": { 452 | "text/plain": [ 453 | "Text(0, 0.5, 'Function Value')" 454 | ] 455 | }, 456 | "execution_count": 29, 457 | "metadata": {}, 458 | "output_type": "execute_result" 459 | }, 460 | { 461 | "data": { 462 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZEAAAEGCAYAAACkQqisAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8li6FKAAAgAElEQVR4nOydd3hcxdX/P2dXqy5ZsixX2diAjQvdphgINWCTZiAkgZDEeQOBBPIjjdBSIAGTEEgIpoUaIAk4vJQXh94JEBsXMLZxk7tlq/e6dX5/3HtXK2l3tSprC+/5PM8+2p07d+7sane+98w5c0aMMSiKoihKf3Dt6w4oiqIon11URBRFUZR+oyKiKIqi9BsVEUVRFKXfqIgoiqIo/SZtX3dgbzNixAgzceLEfd0NRVGUzxQrV66sMcYUdy9PORGZOHEiK1as2NfdUBRF+UwhIjuilet0lqIoitJvVEQURVGUfqMioiiKovSblPOJKIoytPD7/ZSVldHR0bGvu6IAmZmZlJSU4PF4EqqvIqIoyj6lrKyMvLw8Jk6ciIjs6+6kNMYYamtrKSsrY9KkSQmdo9NZiqLsUzo6OigqKlIBGQKICEVFRX2yClVEFEXZ56iADB36+r9QEekDvkCIp1bsIhTS9PmKoiigItIn/rulhqufXs2a3Y37uiuKouwFJk6cSE1NzYDrDBb/+7//y4wZM3C5XANaNP3MM88gIoOy8FpFpA/4AiEAGtv9+7gniqKkIoceeijPPvssJ598cr/baG5u5s477+S4444blD6piPQBZxar1RvYtx1RFGVQOeecc5g5cyYzZszggQce6HF8+/btTJ06lYsuuohp06Zx/vnn09bWFj5+1113cfTRR3PYYYexYcMGAJYtW8bs2bM56qijOOGEE9i4ceOA+zlt2jQOOeSQHuXBYJBf/OIXHHPMMRx++OHcf//9Mdv49a9/zTXXXENmZuaA+wN7IcRXRNzACmC3MeZLIjIJWAQUASuBbxtjfCKSATwOzARqgW8YY7bbbVwHXAwEgSuNMa/a5XOBOwE38JAx5g/JfC8heyvhZhURRUkKv/33p6zb0zSobU4fm88NX54Rt84jjzzC8OHDaW9v55hjjuGrX/0qRUVFXeps3LiRhx9+mBNPPJHvfe973HvvvVx11VUAjBgxgo8++oh7772X22+/nYceeoipU6fy3nvvkZaWxhtvvMH111/PM88806XN5uZmPve5z0Xt0xNPPMH06dMTeo8PP/www4YNY/ny5Xi9Xk488UTOOuusHmG6H330Ebt27eKLX/wit912W0Jt98beWCfyY2A9kG+/vhW4wxizSET+iiUO99l/640xB4vIBXa9b4jIdOACYAYwFnhDRKbYbd0DnAmUActFZLExZl2y3ogjIi0dKiKKsj+xcOFCnnvuOQB27dpFaWlpDxEZP348J554IgDf+ta3WLhwYVhEzjvvPABmzpzJs88+C0BjYyPz58+ntLQUEcHv7zkNnpeXx6pVqwbc/9dee43Vq1fz9NNPh69dWlraRURCoRA/+9nPePTRRwd8vUiSKiIiUgJ8EVgA/Eys2LHTgW/aVR4DbsQSkXn2c4Cngbvt+vOARcYYL7BNRDYDx9r1NhtjttrXWmTXTZqIBO35rBa1RBQlKfRmMSSDd955hzfeeIMlS5aQnZ3NqaeeGnWdRPfQ18jXGRkZALjdbgIBa3z49a9/zWmnncZzzz3H9u3bOfXUU3u0OViWiDGGu+66izlz5nQp/+Uvf8mLL74IwLvvvsvatWvD/aioqOArX/kKixcvZtasWQldJxrJtkT+AlwN5Nmvi4AGY4wzCpcB4+zn44BdAMaYgIg02vXHAUsj2ow8Z1e38qieIhG5FLgUYMKECf1+M7YhoiKiKPsRjY2NFBYWkp2dzYYNG1i6dGnUejt37mTJkiXMnj2bJ554gpNOOqnXdseNs4aqWHf/g2WJzJkzh/vuu4/TTz8dj8fDpk2bGDduHAsWLGDBggXhepFRZKeeeiq33377gAQEkuhYF5EvAVXGmJXJukaiGGMeMMbMMsbMKi7usadKwqgloij7H3PnziUQCDBt2jSuvfZajj/++Kj1DjnkEO655x6mTZtGfX09P/zhD+O2e/XVV3Pddddx1FFHha2TgfLcc89RUlLCkiVL+OIXvxi2PC655BKmT5/O0UcfzaGHHspll102aNfsjWRaIicCXxGRLwCZWD6RO4ECEUmzrZESYLddfzcwHigTkTRgGJaD3Sl3iDwnVnlSUJ+Ioux/ZGRk8PLLL0c9tn37dgBaWlpIS0vjH//4R8w6ALNmzeKdd94BYPbs2WzatCl87Oabbx5wX88991zOPffcHuUul4tbbrmFW265JeG2nH4OlKRZIsaY64wxJcaYiViO8beMMRcBbwPn29XmA8/bzxfbr7GPv2WMMXb5BSKSYUd2TQaWAcuBySIySUTS7WssTtb7gQgRUUtEURQF2DdZfK8BFonIzcDHwMN2+cPA323HeR2WKGCM+VREnsJymAeAK4wxQQAR+RHwKlaI7yPGmE+T2XFnnYhaIoqSWkycOJG1a9fu624MSfaKiBhj3gHesZ9vpTO6KrJOB/C1GOcvwIrw6l7+EvDSIHY1Lo5PRNeJKIqiWOiK9T5gwtNZmvZEURQFVET6hGOJtHqD+7gniqIoQwMVkT6gPhFFUZSuqIj0ASc6yxcM4Q2oNaIo+zv7Wyr4Rx99lOLiYo488kiOPPJIHnrooQH3SfdY7wOOiIBljWTkuvdhbxRFSTWcVPCXXXZZv9v4xje+wd133z1ofVJLpA8EQ53Pda2Iouw/pFIq+MFGLZE+0MUSURFRlMHn5WuhYs3gtjn6MDg7/i4RqZIKHqxdDf/zn/8wZcoU7rjjDsaPHx+lxcRREekDkXurq3NdUfYfUiEVPMCXv/xlLrzwQjIyMrj//vuZP38+b7311oCurSLSByI0RC0RRUkGvVgMySBVUsGvWrWqizBecsklXH311Qm1Hw8VkT4Q1OksRdnvSKVU8OXl5YwZMwaAxYsXM23atAFfWx3rfcBEiEizTmcpyn5BKqWCX7hwITNmzOCII45g4cKFg7LLoUQOjKnArFmzTH/iqwH++MoG7n1nCwDXnj2VH5xy0GB2TVFSkvXr1w/KHXEy2b59O1/60pdSJgljtP+JiKw0xvTYwUotkT4QMpDuduESaNXpLEVRFPWJ9IWQMbhckOtJ0+ksRUkhNBV8bNQS6QOhkMEtQl6mRx3riqIoJHeP9UwRWSYin4jIpyLyW7v8URHZJiKr7MeRdrmIyEIR2Swiq0Xk6Ii25otIqf2YH1E+U0TW2OcslO4xeINM0BhcIuRmpOk6EUVRFJI7neUFTjfGtIiIB3hfRJyNjH9hjHm6W/2zsba+nQwcB9wHHCciw4EbgFmAAVaKyGJjTL1d5/vAh1ibU80Fom+WPAgYAy6XkJPhVktEURSF5O6xbowxLfZLj/2IFwo2D3jcPm8pUCAiY4A5wOvGmDpbOF4H5trH8o0xS+292B8HzknW+wFrPxGXQK5OZymKogBJ9omIiFtEVgFVWELwoX1ogT1ldYeIZNhl44BdEaeX2WXxysuilCeNkDG4XUJeRpqKiKKkAPtbKniAp556iunTpzNjxgy++c1vDrhPSY3OMsYEgSNFpAB4TkQOBa4DKoB04AHgGuB3yeyHiFwKXAowYcKEfrcTMgZRn4iiKPuIgaaCLy0t5fe//z0ffPABhYWFVFVVDbhPeyU6yxjTALwNzDXGlNtTVl7gb8CxdrXdQGQ6yRK7LF55SZTyaNd/wBgzyxgzq7i4uN/vIxQCtwi5mWqJKMr+RKqkgn/wwQe54oorKCwsBGDkyJED7lPSLBERKQb8xpgGEckCzgRuFZExxphyO5LqHMAJvl4M/EhEFmE51hvteq8Ct4hIoV3vLOA6Y0ydiDSJyPFYjvXvAHcl6/2AE50FOfZ0VihkcLmSGhCmKCnFrctuZUPdhkFtc+rwqVxz7DVx66RKKvhNmzYBcOKJJxIMBrnxxhuZO3duQteIRTKns8YAj4mIG8viecoY84KIvGULjACrgB/Y9V8CvgBsBtqA/wGwxeImYLld73fGmDr7+eXAo0AWVlRW0iKzwFlsaPlEAFp9AfIyPcm8pKIoe4FUSQUfCAQoLS3lnXfeoaysjJNPPpk1a9ZQUFDQ72snTUSMMauBo6KUnx6jvgGuiHHsEeCRKOUrgEMH1tPECYXsdSKZtoh4gyoiijKI9GYxJINUSgVfUlLCcccdh8fjYdKkSUyZMoXS0lKOOeaYhK4TDV2x3gdCBtwuy7EO0OLteWehKMpni76mggcGPRV8tEeiAgKdqeAda2fTpk20trayYMGCcHtg+X7eeecdAGpqati0aRMHHnhgwteJhopIHwgagwhhS0TzZynKZ59USgU/Z84cioqKmD59Oqeddhq33XZbj2m7vqKp4PvA5f9cSWllC78/7zDO/+sS/n7xsXxucv+jvRRF0VTwQxFNBZ8kgrZPJMeZzlJLRFGUFEdFpA+E7NxZnT4RFRFFSQU0FXxsVET6QMjOnZWXqSKiKIoCKiJ9wsmdpdNZiqIoFioifSBorNhwj9tFpselloiiKCmPikgfMMbgttcX5Wak0awioihKiqMi0gec6CywRKRVRURR9mv2t1TwP/3pTznyyCM58sgjmTJlyoDSnTgkNRX8/oaTOwusBYfqE1EUZW8y0FTwd9xxR/j5XXfdxccffzzgPqkl0gdCIXDpdJai7HekSir4SJ588kkuvPDCAfdJLZE+EDIGj8vS3dwMD3sa2vdxjxRl/6Lillvwrh/cVPAZ06Yy+vrr49ZJlVTwDjt27GDbtm2cfnrUfLh9QkWkD1j7iTg+EbdGZynKfkKqpIJ3WLRoEeeffz5ut3vA11YR6QMhQ6eI6O6GijLo9GYxJINUSgXvsGjRIu65556E2u4NFZE+4KxYB2s6S0VEUT779DUV/OzZswc9FfxAcVLBn3766Xg8HjZt2sS4ceNYsGABCxYs6FJ3w4YN1NfXM3v27AFfF5LoWBeRTBFZJiKfiMinIvJbu3ySiHwoIptF5F8ikm6XZ9ivN9vHJ0a0dZ1dvlFE5kSUz7XLNovItcl6Lw7OinWwUp/4AiG8gWCyL6soShJJpVTwYFkhF1xwQQ/Lqt8YY5LywNr+Ntd+7sHaB/144CngArv8r8AP7eeXA3+1n18A/Mt+Ph34BMgAJgFbALf92AIcCKTbdab31q+ZM2ea/jLnjnfNJY8tN8YY8+gH28wB17xgalu8/W5PURRj1q1bt6+70Cvbtm0zM2bM2Nfd2GtE+58AK0yUMTVploh93Rb7pcd+GOB04Gm7/DHgHPv5PPs19vEzxJLKecAiY4zXGLMNaw/2Y+3HZmPMVmOMD1hk100axoA7YrEhaP4sRVFSm6SuExERt4isAqqA17EshwZjjDPylgHj7OfjgF0A9vFGoCiyvNs5scqj9eNSEVkhIiuqq6v7/X6CxmBH+IaTMDbrFrmKst+jqeBjk1QRMcYEjTFHAiVYlsPUZF4vTj8eMMbMMsbMKi7u/06EoYgQXycdfKtXfSKKMlBMiu2wOpTp6/9ir6xYN8Y0AG8Ds4ECEXGiwkqA3fbz3cB4APv4MKA2srzbObHKk0aoW+4sgBa1RBRlQGRmZlJbW6tCMgQwxlBbW0tmZmbC5yQtxFdEigG/MaZBRLKAM4FbscTkfCwfxnzgefuUxfbrJfbxt4wxRkQWA0+IyJ+BscBkYBmW436yiEzCEo8LgG8m6/2AtU7EHZE7C6BZfSKKMiBKSkooKytjIFPNyuCRmZlJSUlJwvWTuU5kDPCYiLixLJ6njDEviMg6YJGI3Ax8DDxs138Y+LuIbAbqsEQBY8ynIvIUsA4IAFcYY4IAIvIj4FWsSK1HjDGfJvH9EAwZnKi4PN0iV1EGBY/HE3NltTL0SZqIGGNWA0dFKd+K5R/pXt4BfC1GWwuABVHKXwJeGnBnE8QYE47O0t0NFUVRNItvn4jMnZWd7kZELRFFUVIbFZE+EDKE9xMREXIzNH+WoiipTcIiIiLZyezIZ4HI3Flg+UV0OktRlFSmVxERkRNsZ/gG+/URInJv0ns2BInMnQWayVdRFCURS+QOYA7Wmg2MMZ8AJyezU0OVyD3WAZ3OUhQl5UloOssYs6tbUUou0zYR+4mAFaGl60QURUllEhGRXSJyAmBExCMiVwHrk9yvIYkVndX5Oi8zjVa1RBRFSWESEZEfAFdgJTfcDRxpv045evhEdDpLUZQUp9fFhsaYGuCivdCXIU8o1HVLzNwMj0ZnKYqS0vQqIiLyN6x9QLpgjPleUno0hLEskc7XuZlptPgCVuiva5B2CVMURfkMkUjakxcinmcC5wJ7ktOdoU3kinWA3Aw3xkCbPxjO6qsoipJKJDKd9UzkaxF5Eng/aT0aolhbQdJNRDyAlT9LRURRlFSkP2lPJgMjB7sjQ52QPaHXRUQyNZOvoiipTSI+kWYsn4jYfyuAa5LcryFHyN4wJ9InoungFUVJdRKZzsrbGx0Z6gRtU0SiWSIaoaUoSooSczpLRI6O9+itYREZLyJvi8g6EflURH5sl98oIrtFZJX9+ELEOdeJyGYR2SgicyLK59plm0Xk2ojySSLyoV3+LxFJ7/9HER9n587u60RAt8hVFCV1iWeJ/CnOMQOc3kvbAeDnxpiPRCQPWCkir9vH7jDG3B5ZWUSmY+1mOANrG9w3RGSKffgerO11y4DlIrLYGLMOa7vdO4wxi0Tkr8DFwH299KtfBG0ViYzkdUREU58oipKqxBQRY8xpA2nYGFMOlNvPm0VkPdaq91jMAxYZY7zANnubXGcHxM32joiIyCJgnt3e6XTuq/4YcCNJEpFQWER6WiKa+kRRlFQlobhUETkUmI61TgQAY8zjiV5ERCZibZX7IXAi8CMR+Q6wAstaqccSmKURp5XRKTq7upUfBxQBDcaYQJT63a9/KXApwIQJExLtdhdCoZ4ikqOOdUVRUpxE9hO5AbjLfpwG/BH4SqIXEJFc4BngJ8aYJixL4SCsHFzlxJ82GxSMMQ8YY2YZY2YVFxf3q41QFJ9IepqLjDQXzSoiiqKkKImsEzkfOAOoMMb8D3AEMCyRxkXEgyUg/zTGPAtgjKk0xgSNMSHgQTqnrHYD4yNOL7HLYpXXAgUiktatPCkEQz19ImBl8tXoLEVRUpVERKTdHvADIpIPVNF1UI+KWLGwDwPrjTF/jigfE1HtXGCt/XwxcIGIZIjIJKxFjcuA5cBkOxIrHcv5vtgYY4C3sUQOYD7wfALvp18YxyfSTUVyNJOvoigpTCI+kRUiUoBlNawEWoAlCZx3IvBtYI2IrLLLrgcuFJEjsSK8tgOXARhjPhWRp4B1WJFdVxhjggAi8iPgVcANPGKM+dRu7xpgkYjcDHyMJVpJIRjFsQ52Oni1RBRFSVESWWx4uf30ryLyCpBvjFmdwHnvY61y785Lcc5ZACyIUv5StPPsiK1ju5cng7BPJJqIqCWiKEqKEm+x4ToR+ZWIHOSUGWO2JyIg+yOh8Ir1ruV5mSoiiqKkLvF8IhcCOcBrIrJMRH4qImP3Ur+GHJ25s9QSURRFcYgpIsaYT4wx1xljDgKuBCYAS+1UJt/faz0cIgSjrBMBe2Mq9YkoipKiJJQK3hiz1BjzU+A7QAFwd1J7NQQJp4KPEp2l60QURUlVEkkFfwzW1NZXgW3A/cD/JrlfQ45QlNxZYKWD9wVC+AIh0tP6sz2LoijKZ5eYIiIitwDfAOqARcCJxpiyvdWxoUbYJxIlOgus/FnpaUlLIqwoijIkiWeJdABzjTGle6szQ5lo+4kA5GbaW+R6AxTmqIgoipJaxMvi+7u92ZGhTrT9REDTwSuKktroJH6CxMqdlauZfBVFSWFURBIkFCN3VniLXN3dUFGUFCTR/UTGAQdE1jfG/CdZnRqKRNuUCiItkeBe75OiKMq+JpEQ31uxorTWAc5IaYAUExHrb/forDzHElGfiKIoKUgilsg5wCH2trUpS+8+EZ3OUhQl9UjEJ7IV8CS7I0OdWD6R7HQ3ImqJKIqSmiRiibQBq0TkTSBsjRhjrkxar4YgoZD1t7tPRETITdfUJ4qipCaJWCKLgZuA/2JtSuU84iIi4+1kjetE5FMR+bFdPlxEXheRUvtvoV0uIrJQRDaLyGoROTqirfl2/VIRmR9RPlNE1tjnLJTuKwEHkc4svj2P5Wam0aoioihKCpLIplSP2dvSTrGLNhpjEnEABICfG2M+EpE8YKWIvA58F3jTGPMHEbkWuBZrh8KzsbbEnQwcB9wHHCciw4EbgFlYDv2VIrLYGFNv1/k+8CHWplVzgZcTe+t9w9nZMJpOaTp4RVFSlV4tERE5FSgF7gHuBTaJyMm9nWeMKTfGfGQ/bwbWA+OAecBjdrXHsBz32OWPG4ulQIG9H/sc4HVjTJ0tHK8Dc+1j+XaGYQM8HtHWoGNi5M4CyxLRFeuKoqQiifhE/gScZYzZCCAiU4AngZmJXkREJgJHYVkMo4wx5fahCmCU/XwcsCvitDK7LF55WZTyaNe/FLgUYMKECYl2uwvBGD4RUEtEUZTUJRGfiMcREABjzCb6EK0lIrnAM8BPjDFNkcdsC8Ik2lZ/McY8YIyZZYyZVVxc3K82OqOzeh7LzdCNqRRFSU0SEZEVIvKQiJxqPx4EViTSuIh4sATkn8aYZ+3iSnsqCvtvlV2+GxgfcXqJXRavvCRKeVIIxdjZENQSURQldUlERH6ItVr9Svuxzi6Lix0p9TCw3hjz54hDiwEnwmo+8HxE+XfsKK3jgUZ72utV4CwRKbQjuc4CXrWPNYnI8fa1vhPR1qATipHFF+wtclVEFEVJQRKJzvICf7YffeFE4NvAGhFZZZddD/wBeEpELgZ2AF+3j70EfAHYjLU25X/s69eJyE3Acrve74wxdfbzy4FHgSysqKykRGZBZ3RWFA0hz7ZEjDFRo7cURVH2V+LtbPiUMebrIrKGKH4LY8zh8Ro2xrwPxBpRz4hS3wBXxGjrEeCRKOUrgEPj9WOwMDESMIJliRgDbb4gORkJ5bRUFEXZL4g34v3Y/vulvdGRoU4wjk8kJ2JPERURRVFSiZg+kYgw3MuNMTsiH1jTSClFXJ+I7m6oKEqKkohj/cwoZWcPdkeGOqHwHus9jznp4DX1iaIoqUY8n8gPsSyOg0RkdcShPKw8WilFZ+6saJaItWxGI7QURUk14k3gP4EV7fR7rPxWDs0R0VEpQzCeY12nsxRFSVHi+UQajTHbgTuBugh/SEBEjttbHRwqOD6RaCIS3t1QLRFFUVKMRHwi9wEtEa9b7LKUIhRjZ0OIiM7q0N0NFUVJLRIRETHOIgnAGBMiscSN+xXxfCI5GW5ALRFFUVKPhLbHFZErRcRjP36MtWVuShEMxd5PJCPNTXqaixZvcG93S1EUZZ+SiIj8ADgBK7lhGdaGUZcms1NDERNnnQg4qU90OktRlNQikdxZVcAFe6EvQ5p4ubPATsKo0VmKoqQYvYqIiBRjbUE7MbK+MeZ7yevW0CMUJ8QXICddM/kqipJ6JOIgfx54D3gDSNlJ/3j7iYBukasoSmqSiIhkG2OuSXpPhjjxcmeB5ROpbO7Yiz1SFEXZ9yTiWH9BRL6Q9J4McYJx1omA+kQURUlNEhGRH2MJSbuINIlIs4g09XrWfoa14VT0EF/QLXIVRUlNehURY0yeMcZljMkyxuTbr/N7O09EHhGRKhFZG1F2o4jsFpFV9uMLEceuE5HNIrJRROZElM+1yzaLyLUR5ZNE5EO7/F8ikt63t943gsbE9IeA+kQURUlNehURETk52iOBth8F5kYpv8MYc6T9eMm+xnSsMOIZ9jn3iohbRNzAPVip56cDF9p1AW612zoYqAcuTqBP/SZkwB1PRNLT8AZC+AKhZHZDURRlSJGIY/0XEc8zgWOBlcDp8U4yxvxHRCYm2I95wCJ7P/dtIrLZvg7AZmPMVgARWQTME5H19vW/add5DLiRJOb0CoVM1L1EHHIj9hRJT0uqUaQoijJkSGQ668sRjzOx9jSvH8A1fyQiq+3prkK7bBywK6JOmV0Wq7wIaDDGBLqVR0VELhWRFSKyorq6ul+dDhkTMzILOtPBq19EUZRUIhHHenfKgGn9vN59wEHAkUA58Kd+ttMnjDEPGGNmGWNmFRcX96uNYCj2GhHQdPCKoqQmiaxYvwtwsvi6sATgo/5czBhTGdHug8AL9svdwPiIqiV2GTHKa4ECEUmzrZHI+kkhZEzM8F7Q3Q0VRUlNEvGJrIh4HgCeNMZ80J+LicgYY0y5/fJcwIncWgw8ISJ/BsYCk4FlgACTRWQSlkhcAHzTGGNE5G3gfGARMB9rZX3SCBmDK46KhNPBa4SWoigpRLw91icYY3YaYx7rT8Mi8iRwKjBCRMqAG4BTReRILMtmO3AZgDHmUxF5CliHJVRXGGOCdjs/Al4F3MAjxphP7UtcAywSkZuBj4GH+9PPRAkZEzc6y5nOalZLRFGUFCKeJfJ/wNEAIvKMMearfWnYGHNhlOKYA70xZgGwIEr5S8BLUcq30hnBlXSCodgLDaFzOqtVRURRlBQinmM9csQ8MNkdGeoYY3DH+bScEF+dzlIUJZWIJyImxvOUJBiKv2I92+NGRKezFEVJLeJNZx1h58gSICsiX5YAJpHUJ/sTIRM/xNflEnLTNQmjoiipRUwRMca492ZHhjpWdFb8Ojm6Ra6iKClGfxYbpiS9RWeBnQ5ep7MURUkhVEQSpDefCDjp4FN280dFUVIQFZEEMYa4iw3BWivS0qHTWYqipA4qIgliWSLx6+jGVIqipBoqIgkS6mVTKrAd6xqdpShKCqEikiCJiEhuRpquE1EUJaVQEUmQkCHufiJg+URavQGMSfm1mYqipAgqIgmSqE8kZKDdrxFaiqKkBioiCdJbKnjQ/FmKoqQeKiIJkqhPBDR/lqIoqYOKSIKEQvS+Yj1DLRFFUVKLpImIiDwiIlUisjaibLiIvC4ipfbfQrtcRGShiGwWkdUiclLLiRsAACAASURBVHTEOfPt+qUiMj+ifKaIrLHPWSjxNvsYBILG0NsVwiKiloiiKClCMi2RR4G53cquBd40xkwG3rRfA5yNtSXuZOBS4D6wRAdrR8TjsDagusERHrvO9yPO636tQcXaTyRBn4iKiKIoKULSRMQY8x+grlvxPMDZbvcx4JyI8seNxVKgQETGAHOA140xdcaYeuB1YK59LN8Ys9RY8bSPR7SVFBLJnZVn726o01mKoqQKe9snMsoYU24/rwBG2c/HAbsi6pXZZfHKy6KUR0VELhWRFSKyorq6ul8dDyWQO0stEUVRUo195li3LYi9sirPGPOAMWaWMWZWcXFxv9qworPi18nJsLZgURFRFCVV2NsiUmlPRWH/rbLLdwPjI+qV2GXxykuilCeNRPYTyUhzk+520azTWYqipAh7W0QWA06E1Xzg+Yjy79hRWscDjfa016vAWSJSaDvUzwJetY81icjxdlTWdyLaSgrBECQSAJZrpz5RFEVJBeLtsT4gRORJ4FRghIiUYUVZ/QF4SkQuBnYAX7ervwR8AdgMtAH/A2CMqRORm4Dldr3fGWMcZ/3lWBFgWcDL9iNpmFCINHpPZ6Lp4BVFSSWSJiLGmAtjHDojSl0DXBGjnUeAR6KUrwAOHUgfE8X4/Vz+2hU0F2XB/OVx6+ZmpOl0lqIoKYOuWE8A8XhoyDMcubKF9jVr49a1LBHd3VBRlNRARSRBPjojh5YcKP/VrzA+X8x6uZk6naUoSuqgIpIgOTnZLJrjwrtxI9V33R2zXq7ubqgoSgqhIpIghZ4c3pssDPva16h96CFal34YtZ5lieh+IoqipAYqIglSmJ6PV4T8n19J+sSJ7L7qKvzl5T3q5Q2iT6TDH6SxTf0riqIMXVREEmR4ppX3sSFYR8ldCzHt7ZRd8SNC7e1d6uVmpNHhD+EPhgZ8zVtf2cCFDy4dcDuKoijJQkUkQQozRwBQ37STjIMPZuyfbqdj/Xr2XH99lz3Vc+x08IOx4HBrdSu76toG3I6iKEqyUBFJkMKckQDUt+wBIO/UUxn585/R/PIr1Nx7b7iek4RxMNaK1LR4afYGCAyCVaNY/PPDHdS0ePd1NxRlv0FFJEGG544BoK6lorPs4osZNm8eNXfdTe3fHgUsnwhAq88Skfve2cKGiqYe7X3roQ+59ZUNca9Z22KFEjdptNegUNnUwS+fW8vzq/bs664oyn5D0las728U5lmZ5uvbOlPJiwhjFtxMyOul6tZbEZeQe8IXAGtPkbpWH7e+soHaFi+/+tL08HnGGFbsqIu7U6IxhtpW6465sd3P8Jz0uP371/KdTBiew+yDivr7Fvd7GtutIIWmdg1WUJTBQi2RBMnJHYvHGOo7aruUS1oa4277I3lnnUXl7//AsJeeAaDZG2BLdQsAFU0dXc6pbfXR4Q9R1xp70WJjux9/0ISf98Ztr27k70u39+UtpRzO57i30tKs2F7Hefd+QIdfQ76V/RcVkQSR7EIKg0HqvI09j3k8jPvT7eSdeSae++7k0jXP09LmZXOVJSKV3URkd70V0eVMV0WjJuJYbyISCIaobfVR36p32PFwLJCmjr3zOS3fXs9HOxuoato/fDChkOHut0qpj3Pzk4psrW5J6czdKiKJkp7L8GCIel9P/wbYQvKXO8i44Jucu+U9iv/wK3bstLZL6W6JlDki0urtEtkVSaTztzcRqWv1YQzUt+mPOx6dlkj/ROTtjVW8sDpxf0rjXhatZLOpqpnbX9vEG+sr93VXhgzGGObd8wEPvbdtX3dln6EikigiFOKmPhg75Fbcbsb96nruO+p88tas4MQ7rmFiYzmVTV3FoqzeasMfNDTHuIPpi4hUNVt1G1JoYeKqXQ09LLzeCFsi7f27a7z/3S3c/dbmhOs3tvu6XPezjvP90kCPTlp9QZo7AlQ29+27uD+hItIHCl0e6oLxvywZaW7qP/8l7j37SjxtzSx89y98YcM71EWIgmOJQOwprcjyxl4sjGpbRAZqicy75wOeXLZzQG3sLS55bDn3vJ34gA7QaItHpGWwZEstlz6+glCo952aG9r8CfmnIut3v95nmfD7SZIort3diDfw2fIfNbTtXzcK/WGfiIiIbBeRNSKySkRW2GXDReR1ESm1/xba5SIiC0Vks4isFpGjI9qZb9cvFZH5sa43WAx3ZVBvev+yfG7yCF70lHDZqT9nw/hDuWztYqp+cBm+HTuATksEoK41+nx5TYsXl0B6mqvXgcsREW8gRLuvfz/CVm+AT3Y18PHO+j6dZ4zhofe27tV5cn8wRE2Lr8/rPZzBPNKx/l5pNa+tq6QhgUGgrtXXJxFpjGL53P1WKf9YuiPhNvYGr35awabK5l7rOZZV5GfQEhFAMhBqW7zMu+cDnv/4sxV+rdbZvrVETjPGHGmMmWW/vhZ40xgzGXjTfg1wNjDZflwK3AeW6GDtlngccCxwgyM8yaIwLYc2DN5g/MHrc5OLAWjMyGXDFb9m4RFfJbRuLVu/9GWq7ryTyupGxhVkAV0d6JHUtPgYnpNOYbYn/KMNhQwXP7qcdzdVd6lbHTGY9tcacabE4kWMRWNLdQs3v7ieF9f0zCOWLBzB6uv0XTQfhWPxNfTyuRljaGjz0+YLJpzSJpol8vTKMl5cvfc+q0S4+unVPPTe1l7rRXs/D7y7hXPv+SCmby9Rqpq9BEOmh/9wb9FfC0jDxofWdNY84DH7+WPAORHljxuLpUCBiIwB5gCvG2PqjDH1wOvA3GR2sDA9D4D6jvh361NH5zEiNwOAk6aM4OVJs1l/60PknT2X2vv+yrWLbuDCmo9xh4IxB+2aFi8jcjMYltUpInsa23lzQxXvbKzqUtexRKD/IlLRaP14a/soIhWN1rXjRZoNNrX9FJGmiBBfZ9ALt9XLINDmC+KzxcP5fwSCIa59ZnXMO/FoA0xtqy8hq2dv4Q+GaGz3U5/AZ9kQxbIqa2inqSNA+wDDmJ0bg75YeoPFki21HH7ja11+R4myv01Z9od9JSIGeE1EVorIpXbZKGOMc4tWAYyyn48DdkWcW2aXxSrvgYhcKiIrRGRFdXV1tCoJMTx9GADVbfHbEBFOnjwCt0s4dpK1+K/MlcO4P/6RwgcfoS49lzNeeIgH3vwjnjdewvh7fgFrWrwU5aZ3EZGt1a1AZ4iwQ+SXv7/O9SrbMdhXS8RxbsealksGdf0ccJz6wZChzZ72q4tY0JnINSPr7qxrY9HyXby9oSrqOeH5cnuqwxuwnLC9WT19YVNl84Dyq/Vl8I42YHZacr2fX9XcQW2MKcjafSgimyqb8QZCXaaZI3n10wr+9kH06KuGcPBEp7Burmrhe48ujzm1/NHOet7aEDvC7a43S1m7u+dSgqHKvhKRk4wxR2NNVV0hIidHHrT3XB+Yfdy1vQeMMbOMMbOKi4v73c703PEArK5a1Wvdn545hXsvOprcjDRG5KaHB9uqSdP4ySlXUn3dLXR4spjx+J1sPuPz1Pz1fgL1nRZObYvPtkTSww7hrfYd757GniJSmO0B+m+JhMWgjxaFE5XSVwsmHhsqmgjGcXR3WiKd1wyGDB9sronbbuS8teMXCQtSL4Ng5CDpWBbVcaYA/cEQrfYg4tSv66cFFY8fL1rFzS+u6/f5tQm+f4gebdaX9/TjJ1dx7bNroh5zvreJiEhTh5973t4c9zvSFxxhi2UhPrlsZ8wQ3mjC+sHmGt7aUMW2mtao5yx8s5SbXlgf9Vi7L8ifXt/E86t2Rz1ujIkpdvuKfSIixpjd9t8q4Dksn0alPU2F/de5vdsNjI84vcQui1WeNMbkjWOcP8CK8ugbUkUyfng2c2aMBmBUfiYVjR3UtHh5ZmUZiDDizDO47bzrePGbvyBj8mSq//IXNp9yKrt/fhUt739AXVN7eDrL+dFutb+Uexq6zhtXt3iZPMqeakvgx7xsW12PVdTOtFSzN9Cn+WFnId1gTWftqmvj7Dvf45W1FTHrOJFurb4gvoA1xfT2hioueujDuHdwTe1+Mj3WV9750UcTpGhEirMz0Dm+qGifeeRgGL6W/Rm1+4O9rmIPBEN89b7/9pi67M6ehvawP6s/hEWgvff/nzNgRgYmdA7AvZ+/s66thxXd2U7iYvb6p5Xc9upG1u2JvmZrY0Uz75fGv6Hocu1ehLSmxRvz5sz5P/sCofD/tLdoyepmb9xpbOvc6H1ZsrWWk259O3xD2Z0Of3CvT63tdRERkRwRyXOeA2cBa4HFgBNhNR943n6+GPiOHaV1PNBoT3u9CpwlIoW2Q/0suyx5ZBYws6ODldUf98mROCo/k02VLcy54z88tmQHx04czoHFOQzPzeDjsdOZ8PBDHPjvxRSc/1Wa3nuPXZdcwr0v/o7jXnqMg8o30dRiiYZzZ1PX6utiKlc3e5kyKheAhogvZ7SBalddG1+/fwnPftRVbyPj3PsypdU5nTU4IrK9thVjrAEnFrUxppYAyhtjO2ab2v3hgIbmDn94egl694lEFRFnsIjy3rtaLoEe/e7NEVvR1MHKHfUs21YXs06HP2j5MxL47IMhw9VPf9JDZPsyjdQ9xNfK75bY4G+Mobo59mDcF0uk0/qNLp53vrmJa55Z3Ws7Dr0FV9Q0+2jzRRf+yHOcwbtTCGKLSFOHP2p2bufGJFZftthT2rtiiPFtr27kG/fv3T2I9oUlMgp4X0Q+AZYBLxpjXgH+AJwpIqXA5+3XAC8BW4HNwIPA5QDGmDrgJmC5/fidXZY8coqZ1eGl3tfE1sbeo1kcRuVnsruhnXZ/kOevOJGnfjCbTI+botyM8Bc4Y/JkRv/mN9zwrVtZcMy32TJsHOPee5nT7vsNDyz+Dbt/+SvyVi6hwFj1nSmtNl+AFm+AcQXZ5KS7w3cwa3c3cugNr/a4W1tfbr3u7gyuioiKiWZVVDV38P3HV/QYsJxomr5OZ22oaGLmTa+zu6Hrj2GP/boqzuKtriJiPS+3P49YYb/BkLWws6QwG7AG9sg0MZGD/m2vbuC5j8u6nF8fRQDC01lRfvDOYJjpcUVYIhG+q14GS0ec46fGSTyirryxnadWlPVYbe5YdR3+UK/WUXjFvzdAMGRo9QXxBroGG8Sioc2PLxiysyv0vAHri5/LsX5jDdLlttWf6I1epzXW89qhUGci1GhTdtFuFjotkZ71gyFLeI2J/l7jTZECVPfig9xU2cy2mpYBR8v1hb2exdcYsxU4Ikp5LXBGlHIDXBGjrUeARwa7jzE58BRmpVuO8hXlyzio4KCEThtXkAnAb78ygyPGF4TLR+Sms2pXQ/j1rro2lu1p5Ytz57Ip60t84YRxfLjoRcpeeIWCV17l/7W2cIW42FRQQoPnE1q/cBrV4ycDUJyXQUF2evgO5oPNNQRChmXbapk+Nj98DWc9wI7arvO1FU0djCvIYndDe9Qv8Lsbq3l9XSXLZtaFp+mg6w86FDK4XHFSE0ewYns9ta0+1u1pClsHALvtqbp4UzSRfhvnR+xYILEibJxUJyWF1rWaOvxd7mQjf9CPL9nBzAMKOfeoknBZ5ICQiCXiiNv4wuyw6NRG6XcswlFvcQIWnOs3dQTwB0N43LHvCR1R6i6y3QMGMj3umG3Ut/lIcwmBkKGlI9DlM+tNFJ07bG8gRLs/SHZ616En2rRaizfAlqqWLr8Z6HzfsQS2qsmLN2D5pHIzeh/iauKIRGQi1Po2H6OHZXY5Hvm+u09zNkT5XtS3+cK+nPo2P0V2FGf39xbr+1HZ5IhMrOMddPhDtPmC4Q3yks1QCvEd+qRlUHLaDYwMBPhw4zMJn3bBsRO476KjOX9mSZfy4Tnp1LX6wqulX15rBaddM3cqvz/vcEaNLiJ4yun8cdZFbHvwaa4+8QfsOvtrBMVF1jP/ZOf/fI+2s07l7rf+zIGPL+SsHR/i2rEVEwjwSZklTuvKm9hV18ZhN77K8u11bKy0LJAdtZ3TRcYYKpu8TBtj+VWiiUipnUxyZ8R5oZChqrmD7HQ3wZDpU2TN9rB/J7olEi/csq7VR066NdjVdxORWJaI07ewJWKn6ndoiJhOae4IhEOeHerbfAzL8pDlcUfxicSezpowPDvs0I+0oHoLgHAsq+o4lkhVH0K7Y4lsXZRpug5/kMf+u72L49obCNLmC4ZFuLHdHx58oXdRjExCGe375ZR1+ENhn9xj/93O+X/9b48oJ8dKjfaeQyHT5yCReNNZNb2swWps8zMyzxKC8HRWHAu1ayRl7OOx/p+dUZTRv+fO93awppcTQUWkj8ihX+UsyeOthg1srUksKmZEbgZnHzYG6baBSFFOBsGQCX/5XlhdzuElw5hQlB2uU5Bl7SPycXkra4oPZuzPfsLVp/yI1//wT8Y/cD+N515IY0Yume+/zdfffpyLH/klG486mnl3X8NVK56g+MWn+Ojpl0mvr+Wl1XsotS2RnXVtYfFqbPfjC4SYPsayWKJNTYUtmLpOC6a+zYc/aDhkdF7M84Ih00MoALbbYtQfEalt9XJgse0Dsn9sFb1YIs5UgzMINnf4wz+0cQVZ4TtKJ/Kle16u+jY/hdmeLiHXkdMW3dOmOHXGD8+mxd6dstbOQgC9+xA6p7N6t0SAXjM4V4RFtuv/qKuQWm28vq6SGxZ/yortnbPDke8HrAGzrkum6V5S87R0fp7R+lrf5gvvrxMZ0u4P9lyAGG9hbF2bj4D9v6hJIOzcWScD0a2pyIW8Uaez2n1McD6Tdj/GmPBnHK1+5P8sWv+rIyLFokWfxbNE2n3B8A3L3ty9U0Wkr4hwyed+R4Yx3PX2LwbUVFGuJRA3vbCeX//fWlaXNfLFw8Z0qZOfZYXufrDZ2sfk4JG5jMrPZGeHkHHiSdwz6fPcePJljH33fZ644jb+dvJ8Mi/4JpWeXI6q3cKXljzLlD//in+8ehPzrr+IK5+8kRtX/p1vfvJvdv3jSVqXLaN841bcoSAHj8rD7ZIudznOVE1pFAvG+UI74hPtR/HEhzs47fZ3etx1bben02L5RHqzRA4szgGsASdypXNvlkhxXgbpbhdN7YHwj/3A4pzwoO7kNatv83fxEdS3+ijITic/K62HiARDpsceJc4A4ohWizdAbWvngONM2+xpaOfwG1/lk4hpTYCKBKLeehuQAF5aU067LxghIl0/n9oWH9m2Vef8j5wghcj/TWOEZQXWgOlMtWWnu7sMmE98uLPH++liiXT7LhhjqGv1hac1O/8XVj8irUJjTLitaO+5shffXnfqo4hoJJGiG8viDH8mHQGa2gPhRanR6tf0IkqOFWNM9OCLTgHt+T2PFNu9aYnozob9oGjyXL774e3c27aTdzb9H6dOOaf3k6JwwkEjOGv6KF5ZW07IwBlTR/L1WeO71Blmi8i68ibOnD6K7PQ0xhZksbuhjeufXcP7m2u47fzDKcjNQCZM5NW6dOacdwS/aT6Mbx9/AM/9Zz0HNVcwqbmCUU3VjGmt4ZD2ao4uW0vbLW/jpFtcjMAHRfxFcknbNIrKj6aymRzu39DCz75+PJTtJDsjl50RvhQnSsbxuTh3za3eAGX17RwyOo+l2+rwBkKs29PECQePAKxBd2cUSyQUMuxp6MDjFlq8Adp8gR5z58GQoaHdzwHDs3G7hIY2PzUt3vBdW6w0Mo61NyzLQ15mGs0dftwucLuE8cOz+dQOQIhMjlnV5A1bhfVtPkblZ+JxS1i4au2Bb3dDuzXdZa/VAUu08jPTKMi2bhSa2gPUtngZPzyb3Q3t4QHk450NNHUEWL69rsvcf6U9cLb7g7R6A1Hnt3tLd1Na2czl//yIm+bNoLwpxnRWq49JI3L4dE9TWBydxYuR4bjOXfqECEvEsTwPLM4Jvx9/MMQNi9fyxcPG8JcLjur8LJt73pg4tHgD+IOGSSNyKKtvD/fD+V9ECkOzt3N1fG8iksgCWOc9xMpRVx2n3x1+K7BgfISwdrG4erNEok1ndfufFkbsaOoPhsLCHc2a66uADhYqIv1k/uf/wruLv8rPl9zAnTkjOGncSX1uozgvgwe+Mysc6pcWxTFanJeBxy2cPnUkCy+0fpRjC7J4YfUelm6t48ozJvM1W3gKstNp6vDz0c56XAJfnzWevy/dwSdFB3LUl0/n/v9uB+Dei47mor8v50+njObMAj9LlnzKu++tZf7BWQRXlTJ8z07qN3/C8I4OrgP48HEesvvjc6VR+nIxaUVFZLuz+Uk9HJa5mq+W1sHLu2mqmMST6xt4elMT/7rqLNZvrQRjWFfeKSLlje34giE8bumy5qWm1YsvGOLwkmGsLmukutnLAUVdv6L1bVZkS5G9hqah3RcWognDs2NaMM4AMSzLQ36Wh6aOACFjKMxOZ7gdkBAKdV3IVdHUERaRhjY/h4zOwyWW899xkE4ZlWsFI7T5mEhOl+sNy/aQn2n13xl0DyzOZVhWenhQdjYu674wraKpA7dLLLFq8UUVkaomL0U56dS2+qJOJTp+rA0VzWFRavMFu4hzXauP2QcVdRERxxKJXNTa0MMSCYStmNH5WWGrZVddG/6gYVtt1xDt6mZveCqwe18dMZg0Iof3SmtsZ3Yo7BeKvMN2rBCXxBKRzv9/rBuKSJzBdlJRTtSIwJoWL2kuweN29RAF5zMZlZ9JepoVheeI5ci8jKgBF9XNXrI8lg8xmvBXN3f+T7sftyLO7H5HEcguIqKWyNAne/Sh3D/2bL5X/jKXv3E5F027iB8c8QOGZQzrc1vRxMNhWJaHD645nRG5GeHIp7EFmRgD5x09jp9+fnK47vBsD8ZYvpUpo/KYPjafLI+bdn+Qi0+axL8/2UNdm4+TpxTjSktjc1o+xx96APevFlbNKOFXv53Db/62HF8wxJPfP45Tfv08mQ21FHqbGeZt4YslGaz7dDsXHpyDu6URtu9mZk0tnqc/4pJAAD61VnuebD8q37idewC/y43/zVy2jBqBKy+PJlc611d0kFOQx06vi4o71uDJy6XC5+K0XWUcnz8eqpuoWVnA6IPH4srJsR6ZGeGBY3hOOgVZHhra/OHpjsNKhvHiamv6Jiu9a5SRMzWQH2GJeP1BRuSmU5DtIWSgxWdZUOlpLnyBUI/pgULbqlhf3hwWqymj83h7Y3WPAaOhzUdBVnp4OrKp3U9ti4+iHOt6ztSRE2q9raYVYwzPfLSbM6eNoqKpg4OLc9lY2UxNq7eLn8yhusXLlFF5LNlaG3XAchakbapspryxIxxZVdPsY0JRGiF7IDugKBuXdA6KjohEWmVOf7v4RFqtJKEF2R7W7Wm0348lht2j/6qaOzh4ZC6rdjX06GukiIAlwOUNHTgugcjpLGegnzQiJ64lkpHmSmhKxxmMDxqZQ2lVc48Iw5pmK4ed2yU9BnVnSrIg20N+pqfLFOmUUXnh4JZIqlu8jMzPoN0X7PE5OGtpjhxfQO22uh7WhiOgo/Mzo1o5zufkdklcX9pgoyIyAIad9msev+sZ7hw7kn+s/wfPlD7D3IlzOaXkFI4fezw5npzeG0mAkfldwwrPPWocgvCzM6d0cdYPt8MFq5u9/P68w3C7hGlj8qhqtqZRzpw+irV7GsnNSKOkMIsPNtfyxIc78QZC/P68w8j0uBmem866PU2s2F5PucngtGOP4O2N1WSkufj+d4/hqoc+5LSLj+Vzk4u579nVvPZpJSt+9XmO+9Vivjo5nwumDuPKB/9Dnq+NkrQAwcZGivAy0ng5osDN5i17GJfRwgHNTRS37eaQllbqH/wAQiE8wNUAKy0R4gNrgVAkJi2Np0kj691sfhd0E0pPJys3hz82BxhXWshRdV7KrnqT3GG5SGYmrsxMJDODoh3NzNvRhP//Wjlhyy7ajYtgmodR6emM3drA9Nqd1K38hODmUk7PyeCj8lbqdu4heEA2XnHj9VoDZshYUWiOiBwyKnpEW0O7Pzy4gHU33e4PUpSbERY/6LREtla38umeJq7630/4zuwD8AVCzBibb4lIDOuqptnLcQcOJy8zLeqA6QzoGyqa6fAHmTIqj3XlTVS3WBZWY7ufkLECPPJtK8EfDIUtuy4+kfZOH4+I4xOxRTHLE7asHFFsaPNbQmoLb1Wzl6mj8yjM9vSYxnEG54m2iDS0+btYhJF32M7nPnVMPi+tKScYMrgjBv3KJi8jctPJSncnNJA6lsiBI3IJGWu6bFiWh2ueXk2rL0CrN8CIvHSM6enDcF4XZHnIz0qjqaPzezF5VC7vb64hEAx1uUmsbvZSnJtBizfQQwisbBEhDhmdx4fb6nqIlvM5TB2Tx7ubqnu894qmDnLS3RTaUZ97CxWRgZBTRM7J13D9a7/k/C/fzt87dvLajtd4bvNzpEkahxUfxtThU5lSOIWDCw5mbO5YRmSNwCUDi2eYOjqfa8/O71F+5rRR3Pjl6XzhsDFh4fndvEPDjr7fzptBwI55n1CUw382VVOUk86//99JHGRHO43ISae2xcubG6pId7u4+dzDOOWPb3PwyNzwj3xHbRtprlqe+Wg3nzt4BCJCTuEwdqXn84GM4JPiyUwdncf7Fc24RsN3Zk/klqU7mDWxkKWj6hCBdLeL+789k+/+bTlPXXo8s0Zn8fe31nH/K2t57IJD+dmj/+Xio0Zy2oRcQq2thNraMN4ONu2o5t3VZZwzo4iKrVUEOzoY6TFISxO5vjbGttbjW1NHS8BPqKODUEcH+P3MAGYAFZ88x7won+mfgPb37qFLqMRrsMl++iJg/u0ilJbG2bjJfCWTR32Gkcvz+GuTj6IV2WwryEHS0iDNzfw9LWRmZZD232H8ams9BTuGcXV1G0fUjmB4k5/mgKG88k0+t6yM2Qg+3OysX8LXt9Th3+bhnCCcnj2O9u3VmJf30LhllNW2Ow3xpIHLxbjNq5mWNYbmpgYy1tfRNt6HuN3gdiNuN60bNlLS3EaoRcgRF8dPTds+lAAAHXhJREFUyqSyvZHanRUE8oWaunay/R2MSAsyIh0aWzvYXd9GyEBhtoc9De0YYxCxfE8ugfxM+667w/LxjMrPpCDbQ5sviDcQZEtV5yLWbTWtvLx2S3ia8eTJxRRmp/e4A3cG8olFnZaIYwVNGJ4ddTpr6qg8XlxdTkObr8tai8qmDkbmZeJJcyU0pVPb6sXtkrCl19hm+bLe3FBJQ5ufksIsJo7IiTr95IjIsLAlYvnnPG4Jv5eGdn84ozdYInJQcS5pbukRbFITFiAnhVHX485U2bQx+byzsZrGdj/DI3wmlU0djMrPJC8zjRoVkc8Qx/0APlnElHf+zE0/WsZvjv8Nq6pX8d7u9/io8iOe3/w8bYHOu6o0Vxqjs0eHBaUws5DCjEIKMwspyCgIvy7ILCA/PZ90d3qci3clK93Nd0+c1KXs0HGd02sZaW6cqfUpI3NZurWWB74zMywgAMNzMmjqCLBo2U5mH1TEuIIsLjvlQEblZzLanvt9aU05a8oamTA8m9u/doR9nnX38+6mKsYOy+R7J03i6qdXM2VUHjMPKOTR/25n6da6sCP6gKLscOTSnsYOXAcWsd1k0Th8NAedMJN1L1Wx6aADOWfu1C7v54UPtnE/67j4l2fwwUsbWLGjjiNKCli7u5G7Ljyay+9+nwe+PZOzIhZENjS1c/Ztr3N4cRZ3f+1Q/rB4NctKKzE+P2cdXMiJE4ex4P8+4UcnTeCRtzfxtcNH8eGmSsbnpfGNI0ZRUdPM39/bzFemj6CttYPlpZUcPjKLzbvrmTejmLI1u0kvyGDU8EwIBPD7AhifjxxXEFetj7GtDWTurOWQDh953j0c1OEj5PPTsGMlc9p9eAjhCgZxbTJ0+e+thSsBVkG0rZp+C/A+ON64HX/revyn3U94Hb4G8CqU2kXPALwId9mv22+CFxBwuQggrH/hl7QF4BSXi5NE2PL+rdzb5sfzsodTg4aMdA8ZGR7ubfGz47wHOavBy8kh8IYgsO5vTK5rB5eb6wwc8EkOR7cHMOJi1/vDLbFzCaNr27i2shXXzW9y/foqxu/MIS3Nzc8qWygZnkNlq5/yqtcRl4vROxu5vKKZmf6RXLKxhpo/fUIwNxNcLsTt4ohlu5id6cG43DR5g9SEViMuF7isayEucLusMnEx/JM9zCtvoWRpFWfs3EHjCy2EstKZvmEdIRFCZS5mHjQCr4Edde20/CcIYl0rsKmGw6q3kbepgGm122jxBaExh2PaGxldkcXBDWXUfbyG3OJc67oC6bt3cFDuKOo7/GytacO3c6fVdxFqdjUwor2BA2ljhK+ZtspqAnWFIIK4XNRV1JAd6OCQYW4yAj5q6xop8AyzZiNcLqoa2xmVl0FmuruLgz7ZqIgMFHcafOkOePhMeOGneM57kGNGH8Mxo48BIGRC7GnZw9bGrZS3lLOndQ/lreWUt5Szuno1Dd4GWvyxd4bzuDzkenLJTc8l15NLjieny/O89DxyPDlkpWWR6c60/qZZf52H89r563F5+MmZU/jW8QeErQuH6WPzyUhzcdrUkVx11iEA/GJO50A+vjCL/26p5eCRufz94mPD0SPDc9L5eGcD7b4AXzlyLKdOsbIlH1FS0GXF/MPfncV3H1nOIaPzGTPMEpHdDe00d/h5d2M1E4pycLmEEbnpPZzkgWCIJ5btZMLwbEbk2I71Nj/ljR2MGZbFiDyrL91/QHe+u5WKgJuHzj8Wz5hh/P/2zjy8juo64L8z8xY9Sc+SLFmyjG15xxDbMY7B2AFilgABwhJoPrYCCVnIByFpk7ahbhK+QJeUNG1Jk7pQ1qQBshBKUiesCWYxNsbY4H1fJcva9fT0tpk5/WNG8pOQbCxsSYb70zff3Lkzd+a8M6M5c7dzYhPaeWeHAyG4/JSTGDFtFKteS7OiejKvnBDj+k/NYXvJTja5ylduXMBTr+zgsYb1fPaWBdQ1Jln8izWcO72S17c3cfv3LuS//v55zp1eyTlXzmJPcydffGgFu8d3cv8Nc5k0bRSfXLSEiRVFbG9I8tStH+cPa2p5bMVu/uPaOXzu4Te46/IZfPuptVjqMaOyiC21Ldjq8fvbFnDd4le45ORKvr5wIuo4qOuiOYc9De187eer+ItzJvPq5nraOtLcfenJqOuC59GaSHHHr9ZwyUcqeeadWmz1+OrCySx+cTPnTqvgnGkVbNjbzK/f2M3NC2p4eWM9mWyOmdXF/Gn9fj4xZSSvbm5g8sgCdjd2YKlHadTm0plVvPZOLUUhobEtxeSKGKGITd3uZiZWlHGgo4nKwhCJthT1nS6egrgOEVUKchni2SzZnEPuQA48Bc8j0pZiUmcGZ2Mb05qTxNoFUY85OZfCVqEqnSPRtBFcl7HpHGM8j9g+4eJsDnf3MppVUc8Dz+P8Xu4+Gg7jK3VBsPA6fBNgFaTAH1DSxYqDyT3PHkxPBv4ZSL4KN+QdfinAEt8wu3+CfP+/PwC/Whuw7RcH03HgpwDPBOslsOXbB/d/Mlj4HTwFOL+DTXnnvitYqwgewob/sroNDJYFIkxb9hpWtOcs+feLMSJHg3GnwjmL4MW7YcwpMP+glxZLLMbGxzI2Prbf4lk3S2umlZZ0Cy2ZFn+dbiGZS5LIJUhmk3TkOvwl20FdR12PbVePLCBQSEI9jErEjviLFSFqRzlnYZiQHeUn6yKE7TBRO9q9v3pCgoJRHpfMHM8fa/cRqffLugX1tNLC6PJi5k6Psie1jr+4JMJJo7NkZC+xwkZOrCohXpzggS9MpSgUxSFJSZHHhv1NfPWxRnY3d/Lo508DoDJe8C7XJ0+s3MPm+g7+87o5WJZQWhgmkXZYtbuFL545ifIi/5+jMXGwKr+5PsFPl+3i6lPH85Exfq3sloWTWTC5nBGxMCdVj+juXH076AgdW1bI6BEFvLGzhQOJNP/63GbOnFrB7HGl3W3Nr21r6jbAIwsPtkHf/X/rOdCe4WdfmMfpk3wXOfFoiO0NScaPLGRaVTGlhWGSWZcN+/1hxeedVMm3nwJPLC6fN5GfvW6xoynJ6Jpq7MoqdkdGEJkwoYcuGrc2sqG8mfhpp9Jh72XFtkaKzzyze//abU28sgy+fOVprHPXcCCR4R+uOZfl9S8zcsZorrpiJjuX7+LJ1rX81RfOYf2SjazZ20pqRjWP2zu46ivzefA/XvX71U6Nc8UpY7EEqj8+kT/c97rvkbc1xaKLTmJ6dZy7HljB4uvnsOhnq/jOJSfzwCs72NeaImJbTBpVxMb9CR7+3Kk8u66e59b7/Whd3PXQCjbuT7DsjnO5/d6XqRpRQHsqh20JF82s5rtPr+ONRecxKh7lmvteJ+d6fO+yGVxx78ssvn4OF87w51blXI9pi5Zw+9mTyeZcHn5lO2u/+0nE83xfUp7nG9nutMeXHllB1Ba+cd5UbvjvZXzn4pPYeaCDJ1bsZP6EMpZta+SWMyfSkcryizd28eSX52OpsnxbAz9+cQulBTb/dtVMHnp1J6t2NlFWGGZkLMzlH63mrt+u49aFk5g1ZgTf+vUaOlI5BOXaU8fRnEjz4vr9/ODPPkou53D/S9vI5nxPCd++eDoPLN1GRWGYa04bx8odTfzfmlpiIaEsFubTM0fz4Cvb+czsaqZVxkE9PNfjR89vYs74MvA83t7TwlfOmugH1FAPDQy22P27tRkoxogcLc74Bux7C575W6hbA3NvhvLJEI5BqACs/m9exI5QWVhJZWHlEV9W1Q/Xm3bSpJwUKTdFykmRdtIH85yDeSknRdpNd293Op1k3ay/eP66I9dBxs2Q83Jk3ayfdnNkvWx3aOCf9OEkNTYO2oA738jL3OavQjV+8sLe3mLGw0tZwILYicJty0KElofIxoVdarHwiRghyybnCM0dLhUnhnlo5wge3WXT1JEjVpOmKBJmi1XGrS/axCe08Nv6GFtfKEXEYsWOFgrHeqRKR3PHy2EssbDF7l7/7x4LwSJauZs3ExYlY0I8W7eVeruVpnAbn/vNSzjxFB+dMYlH129kX0uGcNkuVCzOO20Kv958AOLb2ZGB+1Zt44U9mzl/zmiacPj9Dv9LsLxyExVicdvZU3l9/1IavEbs4j08t6OOsvJOdnSUUVW5m8aOLLERMc6cmaBwVxvvNK6mML6HPUmLdxp82RGwsHhmyx6s6H6y1j7saD3NuTp2tu1ERLCwWFVbi4RaiBcnmTg6R3M6AVaCsniGukQDLakWnnxrC+Vxj4JIlqJYjpZUgp1NUU4osxgVF5AcrsJZ08q4Yf5YEHA8h3iBxb7WJGVFUS6eVd3dp/HmLj8ezuTKYiZU+PNh5tSUcu28Gm5/7C0mlBcxsigcDNP2+1qeemsff9zUwDfPnwb4I53aUjn2taQ4Y2oFVUHfXn17mnhBiB2NSebUlHb3BeT3ezQkMihCVWkRyYxDCpukhBhRFKYvVJXNXiGnjC2lbHINdUVbaCypZFktxKZOZd5Zk3iscTWxk08ilcyyeYuSnXYyiXSOm39Zy6wZs7nr+jkUl8RItpeztHU7IUv47MfGUbZwMstXwmdmzCRUVsgfRuaYVlXM5voOPn/BqaQOdPBCYgPW+eezdEM9979ZgG0J0SqLMdddwLpO3xPvrdfN5yc/fpV106aQc5WzTxzFjZ+Zxa/qX2DO+TNZMG884A///eme55n86ZPJuh4PLdnIN2694D35Dnu/yGB6exwOzJ07V1euXHlsTu7mYOkPYOk90Lt2YIUDgxKFUAzCBWBHwAqBHfb326FgHclL994X7lnGCgVVVdtvd7VsP23ZIJKX7lpbPY/rUSbYd4gyioWDklWHjOeQ9Vxy6pBRh6znkPVy5NTDURcHD8dzcVVxcMmpi+O5OF371eGd2lb2t6eYWlVIaXEo2O/w2vYGNta3gbgoCuJSVmQz84Q44bDiqkt7Ksu2hgSTKwsJ2+Cqy/raVhCltNAmlXNoS2eoGhGhMGLhqounXve6O+15tGeyCB4hGxTviGt3H3ZUBQEUIWRJV0sVIcvqHp1kiYXjKlnHoyjqv9g7My6WZVEcDSEiJDMuOVdRhaJIiEjI90xdGouQdXzHgiOLokRDNnVtaUYUhCkJXAO1djok0g7VJTEc1x8uO25kEZGQhQR/QPeIxmTGZW9LijElMUoLI6yvS1AVL6CxI0tJLMIJpbHuptJE2mFnYycfOaGExkSW+vYMs8eVdZ+7tjXN7uZOYpEQM8aMwLYsXt/ezITyQtI5j4ZEhnkTy2lPO4wsjFCfyLBpf4J5E8vZuD9B1vE4ZXwZjqfEo2He3ttKZ9blo+PKeHVrI9Mq44gIxdEQ5cVRnl1Xz7SqOFMq43iqrN7dyv72DPMnl5PKeKze08o5J1VSHAkT/GwE4e6P303Y7tuoHg4ReVNV5/bONzWRo4kdhrPvgI/dBHWroXU3OGnIpcFJgZOBXCrIS/lGx8sFawecLLgdQZ7Tc1/3sXn5Q/CiEyAcLEdnAHNA34HeetIM7Ak6RyVYI1B7cDvjeGRcvxbvYREJ2RS2hv0XR9fxvcsLdOY8QrZNxPb3deYsatvSlBZFiBdGcIN2ZlcEh6DdWfyX5v5Ehta0gweUFEaoiEf9Ttng3B6CJxLI5F9rR3MnhQVhqktjiAjtGYeMq5QWRVAEFf/YfW0ZWtM5asqLEEtIOx77ExnSjsekqjgh26KpM8O+1jRTRscJ2Tbt6Ry7WlIURGxqyou6z6XA7tYUiYyLZQkeyqRKfyRQYzLL/vYMij8XpLggxJaGJDnX48TRIyCQX4Gc55HzlIKQjQo4nrK+LgEChZEQNRVFNCYz1LVlmDiqiMJIqLtsi5Nlb3uKSZXF7G3xJ51OqiwmbFsosC+boqUzRzRiUxMvxFVlc7KDWNamMwcjiwuoDEVRYGM6R4llUxUN05Fx2N2aY3phhEqx6fAc9mY8xmVdCjQ/TKp234ddLZ1MUaXGDkPGIemmiXXmiOQcKgoc4uksZVHQdIZw1iGraSKJLKFEmikRm4LObPfvKvc8wgX+sG7p6MBFqbI6sVMhyDiMjdqk2xNEgI4sSM6lPJShuamZUC5LeSxMR3sTqpBKQFSzeLjsra9lZDiH5fo1lVQK9qSUkQVpkp022+v9OUepnMvYkjBN7XVkHJcRBTm2H9jTbcAVxVVQz/HfU0cRUxM5nvE835B4LmjvtBek3YNr9Q5d5l15hyjTFcFYu5bgNaXeYbaP5fH47b/qkc05RGxB6K+89txGe6wVSOccYiHrXfuCC3WnE+ks+9tSxKMhKuNR38liH+fsKqeqJDM5iiL2Qfm66FWuri3F3pZOBBDfvBCyhHFlMcpi/jdgWyrLrqYkEVtAFdfziIUtxo8sJGRJj3M2JNK0dmaxLKgeUUBh2P99yUyOA+1pKoojxKMhwHcMKqrd2+/+7V26UpqDoa3xghCiiqdKJudQELaQvHI51yOVdbp1FAtb2CLd53I9D9fzvRn4h/hlBLBEseTgPfUA6+hF0f5Q0PC13YwqO/IJ0fABromIyIXAvwM28N+q+k+HKfLBwbIA66h/WRzvCPB+x58IEDvsUT7xYDmScxcf9iifUa5H7d426tpSpHMexdEQC08c1SPuh51xWPLiVg60p4mELCZWFHHtvPGECt79XHQ2JVm64QBXfWwshbGD+2OeUpBIEy85+KvfPROp/99T0mtSnUXf+ksks3zt8bcojoa4cMZoLpt9Qo/9drDkn/veZzZROSLKn59e02Ny7RcffoMXNh7AErhuXg1/ed5Uf7SgKg0dGeb9w/O+wYHudZfxsgSunzeev/3UdH+Guio3P/IGOxuT3HPVTL+DustABcb33B++hCVw+ewx3Pnpk3sa/+C4/DK3P76KrfUdXH3qWK4/vSbv4wIaO9Jc/uNXKS8Kc9OCCVxxygk9jPTv3q7ln36/kbk1pdx+zhQmVRT1OPdtP1/F2to2BLh67ji+dNbErlYr6hMprr1/OeVFYSyUpmSOgpBw7bzxXFl8VNsPfN0ezzUREbHx54N9EtiLH+HwGlXt10f7B6omYjB8iHFcj/a0Q2HE7jOY1lu7W9jbkiKZ8aN/JjMujucb4rOnVzKtqqfpT2VdbEuIhPqeDLyhrp3qkoLuWfiHQ9Xv2+kvUFtz0o9RY/ex3wtCRPR3rXW1bayvbWdKZTGzx5X2MK6qyvf/sIl9rSmiIYsplcVcMqu6O5bOQOmvJnK8G5H5wJ2qekGwfQeAqv5jf2WMETEYDIYjpz8jcrzHEzkB2JO3vTfIMxgMBsMgcLwbkfeEiHxJRFaKyMqGhoahFsdgMBg+MBzvRmQfkB/FaWyQ1wNVvU9V56rq3FGjRg2acAaDwfBB53g3Im8AU0VkoohEgKuBp4dYJoPBYPjQcFwP8VVVR0RuA57BHxn4oKquG2KxDAaD4UPDcW1EAFR1CbBkqOUwGAyGDyPHe3OWwWAwGIYQY0QMBoPBMGCO68mGA0FEGoBdR1isAmg8BuIcDYarbMNVLhi+sg1XuWD4ymbkOnIGKluNqr5reOuHzogMBBFZ2ddMzeHAcJVtuMoFw1e24SoXDF/ZjFxHztGWzTRnGQwGg2HAGCNiMBgMhgFjjMh7476hFuAQDFfZhqtcMHxlG65ywfCVzch15BxV2UyfiMFgMBgGjKmJGAwGg2HAGCNiMBgMhgFjjMhhEJELRWSTiGwVkW8NoRzjROSPIrJeRNaJyNeC/DtFZJ+IrA6Wi4ZIvp0i8k4gw8ogb6SIPCciW4J12SDLdGKeXlaLSLuIfH2odCYiD4rIARFZm5fXp47E597guXtbROYMslz3iMjG4Nq/EZHSIH+CiKTydLf4WMl1CNn6vX8ickegs00icsEgy/VEnkw7RWR1kD9oOjvEe+LYPWd+CEez9LXgO3XcBkwCIsAa4OQhkqUamBOk4/hhgU8G7gS+OQx0tROo6JX3z8C3gvS3gO8P8b3cD9QMlc6As4A5wNrD6Qi4CPg9fpjx04HlgyzX+UAoSH8/T64J+ccNkc76vH/B/8MaIApMDP537cGSq9f+fwG+M9g6O8R74pg9Z6YmcmhOA7aq6nZVzQKPA5cNhSCqWqeqq4J0AtjA8I/ieBnwSJB+BLh8CGU5F9imqkfqreCooapLgeZe2f3p6DLgUfV5HSgVkerBkktVn1VVJ9h8HT9Wz6DTj8764zLgcVXNqOoOYCv+//CgyiUiAnwWeOxYXPtQHOI9ccyeM2NEDs2wDL8rIhOAU4DlQdZtQVX0wcFuMspDgWdF5E0R+VKQV6WqdUF6P1A1NKIBfqyZ/H/q4aAz6F9Hw+nZ+zz+12oXE0XkLRF5SUTOHCKZ+rp/w0VnZwL1qrolL2/QddbrPXHMnjNjRI4zRKQY+DXwdVVtB/4TmAzMBurwq9FDwRmqOgf4FHCriJyVv1P9uvOQjCcXP2DZpcAvg6zhorMeDKWO+kNEFgEO8D9BVh0wXlVPAf4S+LmIjBhksYbl/cvjGnp+sAy6zvp4T3RztJ8zY0QOzXsKvztYiEgY/8H4H1V9EkBV61XVVVUPuJ9jVH0/HKq6L1gfAH4TyFHfVTUO1geGQjZ8w7ZKVesDGYeFzgL609GQP3sichNwCXBd8OIhaCpqCtJv4vc7TBtMuQ5x/4aDzkLAZ4AnuvIGW2d9vSc4hs+ZMSKHZtiE3w3aWR8ANqjqD/Py89svrwDW9i47CLIViUi8K43fKbsWX1c3BofdCPzvYMsW0OPLcDjoLI/+dPQ0cEMweuZ0oC2vOeKYIyIXAn8NXKqqnXn5o0TEDtKTgKnA9sGSK7huf/fvaeBqEYmKyMRAthWDKRtwHrBRVfd2ZQymzvp7T3Asn7PBGDFwPC/4oxc24389LBpCOc7Ar4K+DawOlouAnwLvBPlPA9VDINsk/FExa4B1XXoCyoEXgC3A88DIIZCtCGgCSvLyhkRn+IasDsjhtz3f3J+O8EfL/Dh47t4B5g6yXFvx28q7nrXFwbFXBvd4NbAK+PQQ6Kzf+wcsCnS2CfjUYMoV5D8M3NLr2EHT2SHeE8fsOTNuTwwGg8EwYExzlsFgMBgGjDEiBoPBYBgwxogYDAaDYcAYI2IwGAyGAWOMiMFgMBgGjDEiBsNhEJF/FJGzReRyEbmjn2PuFJFvBumbRGTMUbz+QhFZkLd9i4jccLTObzC8H4wRMRgOzzx8J4SfAJa+h+NvAo7IiAQznftjIdBtRFR1sao+eiTnNxiOFWaeiMHQDyJyD3ABB92KTwZ2AL9S1e/1OvZOoAPfJf7D+K4jUsB8fFfcPwSKgUbgJlWtE5E/4U8GOwN/8tpm4O/www40AdcBMXwD5gINwFfxPRJ3qOoPRGQ2sBgoDGT8vKq2BOdeDpwNlOJPhntZRD4CPBRcwwKu1J6OAg2GI8LURAyGflDVv8KfIf0wcCrwtqrO6m1AepX5FbAS39/UbHznhT8CrlLVjwEPAn+fVySiqnNV9V+AV4DT1XfU9zjw16q6E99I/KuqzlbVl3td8lHgb1R1Fv6M4+/m7Qup6mnA1/PybwH+PZBtLv5sa4NhwByqCm0wGPzAQ2uA6fixGY6UE4EZwHO+WyNsfHcZXTyRlx4LPBH4horg13r6RURKgFJVfSnIeoSDnooBupzvvYkfGAlgGbBIRMYCT5paiOH9YoyIwdAHQTPRw/gv9kb85iIJQp7OV9XUez0VsE5V5/ezP5mX/hHwQ1V9WkQW4kfwez9kgrVL8L+uqj8XkeXAxcASEfmyqr74Pq9j+BBjmrMMhj5Q1dVBk09XeNEXgQuCJqXDGZAEfmhS8B0BjhKR+eC76Q76JfqihINuuG/My88/X76MbUBLXpCjPwde6n1cPoEX2e2qei++J9dZh/ktBsMhMUbEYOgHERkFtKgft2K6qq5/j0UfBhYHtRYbuAr4voiswe9IX9BPuTuBX4rIm/i1ny5+C1whIqv7iIp3I3CPiLyNH6Sp3/6agM8CawPZZuD3qRgMA8aMzjIYDAbDgDE1EYPBYDAMGGNEDAaDwTBgjBExGAwGw4AxRsRgMBgMA8YYEYPBYDAMGGNEDAaDwTBgjBExGAwGw4D5fw7CneWZPyX/AAAAAElFTkSuQmCC\n", 463 | "text/plain": [ 464 | "
" 465 | ] 466 | }, 467 | "metadata": { 468 | "needs_background": "light" 469 | }, 470 | "output_type": "display_data" 471 | } 472 | ], 473 | "source": [ 474 | "from matplotlib import pyplot as plt\n", 475 | "plt.plot(range(1,len(funV1)+1), funV1, label='alpha = 1e-4')\n", 476 | "plt.plot(range(1,len(funV2)+1), funV2, label='alpha = 1e-5')\n", 477 | "plt.plot(range(1,len(funV3)+1), funV3, label='alpha = 1e-6')\n", 478 | "plt.plot(range(1,len(funV4)+1), funV4, label='alpha = 1e-7')\n", 479 | "plt.legend()\n", 480 | "plt.xlabel(\"# Iterations\")\n", 481 | "plt.ylabel(\"Function Value\")" 482 | ] 483 | }, 484 | { 485 | "cell_type": "code", 486 | "execution_count": 30, 487 | "metadata": {}, 488 | "outputs": [ 489 | { 490 | "data": { 491 | "text/plain": [ 492 | "Text(0, 0.5, 'Function Value')" 493 | ] 494 | }, 495 | "execution_count": 30, 496 | "metadata": {}, 497 | "output_type": "execute_result" 498 | }, 499 | { 500 | "data": { 501 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYsAAAEGCAYAAACUzrmNAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8li6FKAAAgAElEQVR4nOzdeZgV1Zn48e97l973hQa6QRYBWRRE3JeoxAWjotEkGqMYdcziOJlkZlyyjI4ZkzhZ/KlxI+qIM4m7RGI07kw0cQNEFASaTegWaGgaGnq/976/P6puc/vSy+3uuzTd7+d56qmqU+dWvVyg3z51Tp0SVcUYY4zpjifVARhjjBn4LFkYY4zpkSULY4wxPbJkYYwxpkeWLIwxxvTIl+oAEqGkpETHjBmT6jCMMeagsnTp0p2qWtrZsUGZLMaMGcOSJUtSHYYxxhxUROSzro4l9DaUiHxPRD4RkZUi8s9uWZGIvCoile660C0XEblbRNaJyAoRmRlxnnlu/UoRmZfImI0xxhwoYclCRKYB/wAcA0wHzhWRQ4GbgNdVdQLwursPMAeY4C7XAve75ykCbgGOdc91SzjBGGOMSY5EtiwmA++paqOqBoD/A74MzAUWuHUWABe423OBx9TxLlAgIiOAs4BXVXWXqtYBrwJnJzBuY4wxURLZZ/EJcLuIFANNwDnAEqBMVbe6dbYBZe52ObAl4vNVbllX5R2IyLU4LRJGjx4dvz+FMabf2traqKqqorm5OdWhGCAjI4OKigr8fn/Mn0lYslDVT0XkDuAVoAFYDgSj6qiIxGVyKlWdD8wHmDVrlk14ZcwAUlVVRW5uLmPGjEFEUh3OkKaq1NbWUlVVxdixY2P+XEI7uFX1YVU9SlVPAeqAtcB29/YS7rrGrV4NjIr4eIVb1lW5MeYg0dzcTHFxsSWKAUBEKC4u7nUrL9GjoYa569E4/RV/ABYB4RFN84Dn3e1FwBXuqKjjgD3u7aqXgTNFpNDt2D7TLTPGHEQsUQwcffm7SPQT3M+KyCrgT8B1qrob+AVwhohUAl909wFeBDYA64DfAd8FUNVdwE+BD9zlNrcs7lase5ebHzmf9z9+JRGnN8aYg1aib0OdrKpTVHW6qr7ultWq6mxVnaCqXwz/4HdHQV2nquNV9XBVXRJxnkdU9VB3+e9ExbutZgMveDeyfNPfEnUJY8wAM2bMGHbu3NnvOvHy9NNPM3XqVDweT58eLn700UcpLS1lxowZzJgxg4ceeigucQ3KJ7j7qixjGAA1bQlpuBhjTI+mTZvGc889x7e+9a0+n+NrX/sav/3tb+MYlU0k2EFueh65wRA7A3WpDsUYE2cXXHABRx11FFOnTmX+/PkHHN+0aROHHXYYl112GZMnT+biiy+msbGx/fg999zDzJkzOfzww1m9ejUA77//PscffzxHHnkkJ5xwAmvWrOl3nJMnT2bSpEkHlAeDQf7t3/6No48+miOOOIIHH3yw39fqDWtZRPB4/VQEAuwM7El1KMYMWv/xp5Ws+rw+ruecMjKPW86b2m2dRx55hKKiIpqamjj66KO56KKLKC4u7lBnzZo1PPzww5x44olcddVV3Hffffzrv/4rACUlJSxbtoz77ruPX/3qVzz00EMcdthhvPXWW/h8Pl577TV++MMf8uyzz3Y45969ezn55JM7jekPf/gDU6ZMienP+PDDD5Ofn88HH3xAS0sLJ554ImeeeWanw1+fffZZ/vrXvzJx4kTuvPNORo0a1ckZe8eSRQTx+hgZCPBp0JKFMYPN3XffzcKFCwHYsmULlZWVBySLUaNGceKJJwLwjW98g7vvvrs9WXz5y18G4KijjuK5554DYM+ePcybN4/KykpEhLa2tgOum5uby/Lly/sd/yuvvMKKFSt45pln2q9dWVl5QLI477zzuPTSS0lPT+fBBx9k3rx5vPHGG/2+viWLCF6vn/JAgLdC9aiqDfUzJgF6agEkwuLFi3nttdd45513yMrK4tRTT+30OYPo//OR++np6QB4vV4CgQAAP/nJTzjttNNYuHAhmzZt4tRTTz3gnPFqWagq99xzD2eddVaH8h/96Ef8+c9/BmD58uUdEuA111zDDTfcENP5e2LJIoLHbVm0EqS2uZaSzJJUh2SMiYM9e/ZQWFhIVlYWq1ev5t133+203ubNm3nnnXc4/vjj+cMf/sBJJ53U43nLy53Zhx599NFO68SrZXHWWWdx//33c/rpp+P3+1m7di3l5eXcfvvt3H777e31tm7dyogRIwBYtGgRkydP7ve1wTq4O/D4/FS0Ob8xfL7v8xRHY4yJl7PPPptAIMDkyZO56aabOO644zqtN2nSJO69914mT55MXV0d3/nOd7o97w033MDNN9/MkUce2d7a6K+FCxdSUVHBO++8w5e+9KX2lsQ111zDlClTmDlzJtOmTeNb3/pWp9e8++67mTp1KtOnT+fuu+/uMon1lqgOvmmUZs2apX0Zn7xjezW7H5rOlytG8F+n/Bdzxs5JQHTGDD2ffvpp3H7DTZRNmzZx7rnn8sknn6Q6lKTo7O9ERJaq6qzO6lvLIoLH7bMAqN5n008ZY0yYJYsIHq+PLFVySLPbUMYMMWPGjBkyrYq+sGQRwetPA6CELGtZGGNMBEsWEbxe50UgJWRStbcqxdEYY8zAYckigtfrBWCYZvD5vs8JhOIzusEYYw52liwieL0e2tRLWSiDgAbY1rAt1SEZY8yAYMkigleEIB6GhZwnNTfv3ZziiIwxiTbYpigHeOqpp5gyZQpTp07l61//elzisie4I3g8QgAvw4J+8GP9FsaYpOvvFOWVlZX8/Oc/529/+xuFhYXU1NT0/KEYJPq1qt8XkZUi8omIPC4iGSIyVkTeE5F1IvKkiKS5ddPd/XXu8TER57nZLV8jImd1db14COKlQD2ke9PZXG8tC2MGi6EyRfnvfvc7rrvuOgoLCwEYNmxYv2OCBLYsRKQc+Cdgiqo2ichTwCXAOcCdqvqEiDwAXA3c767rVPVQEbkEuAP4mohMcT83FRgJvCYiE1U1mIi4g3jwhkJU5FTYbShjEuGlm2Dbx/E95/DDYc4vuq0yVKYoX7t2LQAnnngiwWCQW2+9lbPPPjuma3Qn0behfECmiLQBWcBW4HQgfBNtAXArTrKY624DPAP8VpwpH+cCT6hqC7BRRNYBxwDvJCLgIF5EA4zKG8WWvVsScQljTAoMlSnKA4EAlZWVLF68mKqqKk455RQ+/vhjCgoK+nX9hCULVa0WkV8Bm4Em4BVgKbBbVcNjUquAcne7HNjifjYgInuAYrc8corIyM+0E5FrgWsBRo8e3ee4g3iRUIDRuaN59/N3bapyY+KthxZAIgylKcorKio49thj8fv9jB07lokTJ1JZWcnRRx8d03W6krA+CxEpxGkVjMW5fZQN9L8t1AVVna+qs1R1VmlpaZ/PExQPEgoyKncUzcFmdjTtiGOUxphU6O0U5UDcpyjvbIk1UcD+KcrDrZe1a9fS0NDA7bff3n4+cPpmFi9eDMDOnTtZu3Yt48aNi/k6XUlkB/cXgY2qukNV24DngBOBAhEJt2gqgPC8GtXAKAD3eD5QG1neyWfiLoAPUadlAVgntzGDwFCaovyss86iuLiYKVOmcNppp/HLX/7ygNttfaKqCVmAY4GVOH0VgtM/cT3wNHCJW+cB4Lvu9nXAA+72JcBT7vZU4CMgHaeVsgHwdnfto446Svtq/S1T9KPfzNXN9Zt12qPT9Nm1z/b5XMYYx6pVq1IdQo82btyoU6dOTXUYSdPZ3wmwRLv4uZrIPov3ROQZYBkQAD4E5gN/Bp4Qkf90yx52P/Iw8D9uB/YuN2GgqivdkVSr3PNcpwkaCQUQEqeDe2T2SPweP5v2bErUpYwx5qCR0NFQqnoLcEtU8Qac0UzRdZuBr3RxntuB2zs7Fm9OB3cQr8fLIXmHsLF+YzIua4xJMZuivHs23UeUkHjxuA2XMXljrGVhjDFYsjhACA8SThb5Y6jaW0Vb6MCx08YYM5RYsogSFC8e9zGQMXljCGiA6r32IiRjzNBmySJKSHz7b0PljwFg4x7rtzDGDG2WLKKE6NiyANhUvyl1ARljEmqwTVH+/e9/nxkzZjBjxgwmTpzY72k+wmyK8iiRHdz56fkUZRRZsjDGJE1/pyi/884727fvuecePvzww7jEZS2LKJHJAmxElDGDxVCZojzS448/zqWXXtrvmMBaFgcIiQ9PaH+yGJs/ljc2v5HCiIwZXO54/w5W71od13MeVnQYNx5zY7d1hsoU5WGfffYZGzdu5PTTT4/p/D2xZBElJF487E8W4wvG82zls9Q21VKcGYf5VYwxKTFUpigPe+KJJ7j44ovxer39vjZYsjhA9G2o8QXjAVi/e70lC2PioKcWQCIMpSnKw5544gnuvffemM4dC0sWUTQqWRxacCgA63av45gRB8xSYow5CPR2ivLjjz8+7lOU91d4ivLTTz8dv9/P2rVrKS8v5/bbb+f22zvOhrR69Wrq6uo4/vjj+33dMOvgjhISH96I21ClmaXkpuWyfvf6FEZljOmPoTRFOTitiksuuSSuL24TZ1bawWXWrFnal/HJAIt/9XWOaPg7Rbdsai+74qUrEIQFcxbEKUJjhpZPP/2UyZMnpzqMbm3atIlzzz13yEwm2NnfiYgsVdVZndW3lkWUkHg7tCzA6bdYv2c9gzGxGmNMLCxZRPP4OvRZgNNvsadlD7XNtSkKyhiTaDZFefcsWURR8eLrpGUBTie3MaZvrGU+cPTl7yJhyUJEJonI8oilXkT+WUSKRORVEal014VufRGRu0VknYisEJGZEeea59avFJF5iYoZQD2+A25DtY+IqrNkYUxfZGRkUFtbawljAFBVamtrycjI6NXnEvla1TXADAAR8QLVwELgJuB1Vf2FiNzk7t8IzAEmuMuxwP3AsSJShPO2vVmAAktFZJGq1iUk7qiH8gCKM4opyihibd3aRFzSmEGvoqKCqqoqduzYkepQDE7yrqio6NVnkvWcxWxgvap+JiJzgVPd8gXAYpxkMRd4zH1p+LsiUiAiI9y6r6rqLgAReRU4G3g8EYGGxIePEKiCO+xMRJhYODHuUxQYM1T4/f4unzQ2B4dk9Vlcwv4f7mWqutXd3gaUudvlwJaIz1S5ZV2VdyAi14rIEhFZ0q/fXjxu/gx1bF0cVnQY63evJxCKz1hqY4w5mCQ8WYhIGnA+8HT0MbcVEZebmKo6X1Vnqeqs0tLSvp/H486jEpUUJhZOpDXUajPQGmOGpGS0LOYAy1R1u7u/3b29hLuuccurgVERn6twy7oqT4iuksWkImfK4DV1/Z+C2BhjDjbJSBaX0rF/YREQHtE0D3g+ovwKd1TUccAe93bVy8CZIlLojpw60y1LCPH4nY2oZDE2fyx+j581uyxZGGOGnoR2cItINnAGEPnKp18AT4nI1cBnwFfd8heBc4B1QCPwTQBV3SUiPwU+cOvdFu7sTgSVcMuiY5+F3+Pn0IJDrWVhjBmSEposVLUBKI4qq8UZHRVdV4HrujjPI8AjiYjxAN5wB/eBHdkTCyfydvXbSQnDGGMGEnuCO4pK18licvFkaptrqWmsOeCYMcYMZpYsorV3cB/4xqupxVMBWLlzZTIjMsaYlLNkEUW6eM4CnBFRHvGwstaShTFmaLFkEc1NFqHAgS2LTF8m4wvGW7Iwxgw5liyieZ2hs8HggckCYErRFFbVrrIJ0YwxQ4oli2jhlkWw82k9ppZMZVfzLrY3bu/0uDHGDEaWLKK5HdzBTm5DgXVyG2OGJksWUcS9DdVVy2JS0SR84uPjnR8nMyxjjEkpSxZRxH2CW7vos0j3pjOxaKIlC2PMkGLJIpr7BHcw0PVU5NNLp/Pxzo9tunJjzJBhySJK+DaUdpMIppdOpynQZO/kNsYMGZYsoojbwd3ZcxZhR5QeAcBHNR8lJSZjjEm1mJOFiGQlMpCBYn8Hd9fJoiKngqKMIlbsXJGssIwxJqV6TBYicoKIrAJWu/vTReS+hEeWIuINd3B3fRtKRJheOp2PdljLwhgzNMTSsrgTOAuoBVDVj4BTEhlUKkn7Q3ldtyzA6bf4rP4zdjUn7NUaxhgzYMR0G0pVt0QVHTjL3iDh8aYB3bcsAI4qOwqAD7d/mPCYjDEm1WJJFltE5ARARcQvIv8KfBrLyUWkQESeEZHVIvKpiBwvIkUi8qqIVLrrQreuiMjdIrJORFaIyMyI88xz61eKyLyur9h/4g6d7W40FDhPcqd701myfUkiwzHGmAEhlmTxbZw32JUD1cAMunijXSfuAv6iqocB03GSzE3A66o6AXjd3QeYA0xwl2uB+wFEpAi4BTgWOAa4JZxgEiGcLLp6gjvM7/UzvXQ6S7cvTVQoxhgzYPSYLFR1p6pepqplqjpMVb/hvhq1WyKSj9O38bB7nlZV3Q3MBRa41RYAF7jbc4HH1PEuUCAiI3D6S15V1V2qWge8Cpzdyz9nzDw+t2XRQ7IA51bUmro17G3dm6hwjDFmQOjxHdwi8t/AAfNxq+pVPXx0LLAD+G8RmQ4sBb4HlKnqVrfONqDM3S4HIvtGqtyyrsqj47wWp0XC6NGjewita+Lp+aG8sKPKjiKkIZbXLOfkipP7fE1jjBnoYrkN9QLwZ3d5HcgD9sXwOR8wE7hfVY8EGth/ywkAdV4KEZcXQ6jqfFWdpaqzSktL+3web/gJ7h5GQ4HzcJ5PfNZvYYwZ9HpsWajqs5H7IvI48HYM564CqlT1PXf/GZxksV1ERqjqVvc2U417vBoYFfH5CresGjg1qnxxDNfvk/bnLGJoWWT6Mjm89HDe3/p+osIxxpgBoS/TfUwAhvVUSVW34YykmuQWzQZWAYuA8IimecDz7vYi4Ap3VNRxwB73dtXLwJkiUuh2bJ/pliWEx21ZEEOfBcBxI45j1a5V7GnZk6iQjDEm5WJ5gnuviNSH18CfgBtjPP/1wO9FZAXOKKqfAb8AzhCRSuCL7j7Ai8AGYB3wO+C7AKq6C/gp8IG73OaWJYS3Fx3c4CSLkIZYss1uRRljBq9YbkPl9vXkqrocmNXJodmd1FW6GJKrqo8Aj/Q1jt4Izw1FjNOPH15yOJm+TN7Z+g6zDzngj2WMMYNCl8ki8qG4zqjqsviHk3o+r4+gSswtC7/Xz6yyWby39b2eKxtjzEGqu5bFr7s5psDpcY5lQPB4IIAXNPYXGx034jjeqn6Lrfu2MiJnRAKjM8aY1Oiyz0JVT+tmGZSJAsDrEYJ4Y+7gBjix/EQA3v48lkFixhhz8OmxzwJARKYBU4CMcJmqPpaooFLJ5xECeGLuswAYlz+OkdkjeavqLb4y8SsJjM4YY1Ijlie4b8F5zmEKzoilOTjPWQzKZOERt2XRi2QhIpxUfhIvbHiB1mArae7MtcYYM1jE8pzFxTijl7ap6jdxJgTMT2hUKeRtb1n0bhb2kytOpjHQyLKaQdnvb4wZ4mJJFk2qGgICIpKH88T1qB4+c9Bq77PoRcsC4Jjhx+D3+Hmr6q0ERWaMMakTS7JYIiIFOA/KLQWWAe8kNKoUcloWvU8WWf4sjhl+DIu3LMZ5ZMQYYwaPWKYo/66q7lbVB4AzgHnu7ahByesRgupBepksAE4ffTqb925m/e71CYjMGGNSp8tkISKrROTHIjI+XKaqm1R1RXJCSw2vSK+fswg7ddSpALyx5Y04R2WMManVXcviUiAbeEVE3heR74vIyCTFlTJOn4UH6WUHN8CwrGEcUXIEb2y2ZGGMGVy6eyjvI1W9WVXHA/8EjAbeFZE3ReQfkhZhkjl9Fr5e91mEnTb6NFbWrmRbw7Y4R2aMMakT0xTlqvquqn4fuAIoAH6b0KhSKDx0VvpwGwrgjEPOAOCVTa/EMyxjjEmpWKYoP1pEfiMinwG3Ag8Cg/Z21P6hs72/DQVwSN4hTC6azMubEvbKDWOMSbruOrh/JiLrgftw3lZ3oqqeqqoPqGpt0iJMMqeD24NH+5YsAM4acxYrdq6gel91HCMzxpjU6a5l0QycrapHq+qvVbUqWUGlUrhl0Zehs2FnjTkLwFoXxphBo7sO7ttUtbI/JxeRTSLysYgsF5ElblmRiLwqIpXuutAtFxG5W0TWiciKyPdpiMg8t36liMzr6nrxIOKOhupHy6Iit4IjSo7ghQ0v2AN6xphBoS/v4O6t01R1hqqG35h3E/C6qk4AXnf3wZmgcIK7XAvcD05yAW4BjgWOAW4JJ5hECeLtcwd32Pnjz6eyrpJVu1bFKSpjjEmdZCSLaHOBBe72AuCCiPLH1PEuUCAiI4CzgFdVdZeq1gGvAmcnMkAnWfS9ZQEwZ9wc0r3pLKxcGKeojDEmdWJKFiJSLiIniMgp4SXG8yvOQ31LReRat6xMVbe629uAMne7HNgS8dkqt6yr8ugYrxWRJSKyZMeOHTGG17mQePH0o88CIC8tj9mjZ/PixhdpDjT361zGGJNqsbzP4g7ga8AqIPzrtgJ/jeH8J6lqtYgMA14VkdWRB1VVRSQuN/VVdT4wH2DWrFn9OmdQfHi0rd8xXTjhQl7c+CJvbH6Dc8ad0+/zGWNMqsTyprwLgEmq2tLbk6tqtbuuEZGFOH0O20VkhKpudW8z1bjVq+k49XmFW1aN8/KlyPLFvY2lN5pJxx/sf2vgmOHHMDJ7JAvXLbRkYYw5qMVyG2oD4O/tiUUkW0Ryw9vAmcAnwCIgPKJpHvC8u70IuMIdFXUcsMe9XfUycKaIFLod22e6ZQnTLJmkhZr6fR6PeLjg0At4b+t79syFMeagFkuyaASWi8iD7tDWu0Xk7hg+Vwa8LSIfAe8Df1bVvwC/AM4QkUrgi+4+OK9s3QCsw3l3xncBVHUX8FPgA3e5zS1LmMY4JQuAuYfOBWDRukVxOZ8xxqRCLLehFrlLr6jqBpxXsEaX1+K8pjW6XIHrujjXI8AjvY2hr1okE1+oDQKt4Ovf+7RH5ozk2BHH8mzls1xzxDX4Pb1upBljTMrF8vKjBcDjOG/JWwr8wS0btJo9Gc5G6764nO+yyZexvXE7r332WlzOZ4wxyRbLRIKnApXAvTjzRK3txdDZg1KzZDobrQ1xOd8pFacwJm8MC1YusCe6jTEHpVj6LH4NnKmqX1DVU3AekrszsWGlVosnvsnCIx4un3I5K2tX8mHNh3E5pzHGJFMsycKvqmvCO6q6lj6MjjqYtEiWsxGnZAFw3vjzyE/PZ8HKQX0HzxgzSMWSLJaIyEMicqq7/A5YkujAUqnFG25Z7I3bOTN9mXx14ld5c8ubbK7fHLfzGmNMMsSSLL6D8/T2P7nLKrds0GqN822osK9P/jo+j4///fR/43peY4xJtFhGQ7Wo6m9U9cvucmdfnuY+mLR6438bCqAks4QvjfsSCysXsqOxf/NXGWNMMnX3pryn3PXH7vslOizJCzH59rcs4jN0NtK1h19LIBTgdx//Lu7nNsaYROnuobzvuetzkxHIQNLqSUzLAmBU3igumHABT699miunXsnInEH7OnNjzCDS3ZvywtOIf1dVP4tccKfiGKwCXvehvJb4tywAvnXEtxCEB1c8mJDzG2NMvMXSwX1GJ2Vz4h3IQOLx+miW9ITchgIYnj2cr036Gs+ve57P6j9LyDWMMSaeuuuz+I6IfAwcFtVfsRH4OHkhJp/XIzSTmZDbUGFXH341ad407lt+X8KuYYwx8dJdy+IPwHk4U4ifF7EcpaqXJSG2lPF6hCbJSGiyKMks4bLJl/HixhdZsWNQjxcwxgwC3fVZ7FHVTcBdwK6I/oqAiBybrABTwStCkyS2ZQFwzeHXMCxzGLe/dzvBUP/e+W2MMYkUS5/F/UDkzft9btmg5fEITWQmrM8iLNufzb/M+hdW1a7iuXXPJfRaxhjTH7EkC9GIqVJVNURs78E4aPm9QgOJ6+CONGfsHGaVzeKuZXexu3l3wq9njDF9EdNrVUXkn0TE7y7fw3mjXUxExCsiH4rIC+7+WBF5T0TWiciTIpLmlqe7++vc42MiznGzW75GRM7q3R+x90py0tkTTEv4bSgAEeHmY29mX+s+7vnwnoRfzxhj+iKWZPFt4ASgGqgCjgWu7cU1vgd8GrF/B3Cnqh4K1AFXu+VXA3Vu+Z1uPURkCnAJMBU4G7hPRLy9uH6vDc/PYE8wnVCCnrOINrFwIpcedilPr32aZduXJeWaxhjTG7HMDVWjqpeo6jBVLVPVr6tqTSwnF5EK4EvAQ+6+AKcDz7hVFgAXuNtz3X3c47Pd+nOBJ9w5qjbivKP7mNj+eH0zPC+DfZqBtiS+ZRF2/ZHXMzJnJD96+0c0tjUm7brGGBOLWN6UVyoiPxSR+SLySHiJ8fz/D7gBCLn7xcBuVQ24+1VAubtdDmwBcI/vceu3l3fymcg4rxWRJSKyZMeO/k3SNzw/g0YykLbkJYssfxa3n3Q71fuq+c3S3yTtusYYE4tYbkM9D+QDrwF/jli6JSLnAjWqurRfEcZIVeer6ixVnVVaWtqvcw3Py6BBM/CEWiHQGqcIe3ZU2VFcMeUKnlzzJH+v/nvSrmuMMT2JZVRTlqre2IdznwicLyLnABlAHs4zGwUi4nNbDxU4fSG461FAlYj4cBJUbUR5WORnEsJpWaQ7O637wFeUyMt1cP3M63mr+i1+8vef8Ox5z1KQUZC0axtjTFdiaVm84P7A7xVVvVlVK1R1DE4H9Rvuk99vAhe71ebhtFwAFrn7uMffcIfsLgIucUdLjQUmAO/3Np7eyErzof5sZycJI6IipXvT+fnJP6euuY6b3rrJHtYzxgwIsSSL7+EkjCYRqReRvSJS349r3gj8QETW4fRJPOyWPwwUu+U/AG4CUNWVwFM4b+j7C3Cdqib8J6g/M8/ZSHKyAJhSPIWbjrmJv33+Nx5Y8UDSr2+MMdF6vA2lqrn9vYiqLgYWu9sb6GQ0k6o2A1/p4vO3A7f3N47eyMjJgyZSkiwAvjLxK3y04yMe+OgBDi85nFMqTklJHMYYA7GNhjqlsyUZwaVSTm6+s9G6NyXXFxF+fNyPmVQ4iZvfutmmMjfGpFQst6H+LWL5CSEHbjUAACAASURBVPAn4NYExjQg5OQ6HcuB5uQ8mNeZTF8md556J17x8p3XvsOu5l0pi8UYM7TF8lDeeRHLGcA0nCevB7WCfCdZ1O9J7R91VN4o7pl9DzWNNVz/+vU0BZpSGo8xZmiKpWURrQqYHO9ABpr8Uufd2A27Pk9xJDC9dDp3nHwHH+/8mBv/eqONkDLGJF0sfRb3iMjd7vJb4C1g0E9gVFJSSqOm01aX0Ec6Yjb7kNnceMyNvLnlTf797/9uCcMYk1SxPJS3JGI7ADyuqn9LUDwDxoj8LLZrAVq/NdWhtLts8mXsbd3LvcvvRRBuO/E2PNKXxqExxvROl8lCREar6mZVXdBVncEsL9PHGilmeOP2VIfSwbenfxtV5b6P7sPr8XLL8bdYwjDGJFx3P2X+GN4QkWeTEMuAIiLU+0vIaolpgt2k+s6M7/CtI77Fc5XP8cO3f0hbsC3VIRljBrnubkNJxPa4RAcyEDWll5HX8A6ogkjPH0ii62ZcR4Yvg7uW3UVdcx13nnonWf6sVIdljBmkumtZaBfbQ0Ywp4w02qBx4D3fICJcc/g13HbCbby39T2uevkqdjbtTHVYxphBqrtkMT08FxRwhLsdj7mhDh55zvDZUH3qh8925cIJF3LXaXexfvd6LnnhElbWrkx1SMaYQajLZKGqXlXNU9VcVfW52+H9vGQGmSrphc47lvbu2NJDzdT6wqgv8Nicx/CIh3kvzeOFDS+kOiRjzCBjw2i6kVXivEZjb83mFEfSs8nFk3ni3Cc4vORwbn7rZn723s9oCbakOixjzCBhyaIbRWVOsmjeVZXiSGJTlFHE/DPnc/mUy3l89eNc9ufL2LBnQ6rDMsYMApYsulFWlMdOzUMHcJ9FNL/Hzw1H38C9s++lprGGS164hKfWPEVIQz1/2BhjumDJohsl2enUaCGefdtSHUqvnVJxCs+c/wzTS6fz03d/yj+88g9s2Tuw+16MMQNXwpKFiGSIyPsi8pGIrBSR/3DLx4rIeyKyTkSeFJE0tzzd3V/nHh8Tca6b3fI1InJWomKO5vEIdb4S0psG1lPcsRqWNYz5Z8znluNvYWXtSi5adBELVi6wh/iMMb2WyJZFC3C6qk4HZgBni8hxwB3Anap6KM5U51e79a8G6tzyO916iMgUnHd4TwXOBu4TEW8C4+5gX3oZha3bnAfzDkIiwsUTL+aPc//I0cOP5ldLfsVFf7qIv1UP+um9jDFxlLBkoY7wm4P87qLA6cAzbvkC4AJ3e667j3t8toiIW/6Eqrao6kZgHZ28ljVRduccSrbug/qBMftsXw3PHs69s+/l3tn3EgwF+fZr3+b6169nc/3AH+lljEm9hPZZiIhXRJYDNcCrwHpgt6oG3CpVQLm7XQ5sAXCP7wGKI8s7+Uzkta4VkSUismTHjh1x+zM0Fk8FQLd+FLdzptIpFaewcO5Cvn/U93l/2/tc8PwF/Oy9n1HTOPDmwDLGDBwJTRaqGlTVGUAFTmvgsARea76qzlLVWaWlpXE7r6dsKiEVWqoGR7IASPOmcdW0q3jhwhc4f/z5PL3mac557hzueP8OmzLEGNOppIyGUtXdwJvA8UCBiIQnMKwAwvd3qoFRAO7xfKA2sryTzyRcUVExm7SMturBkyzCSrNKufWEW1l04SLmjJ3D46sfZ86zc/jlB79k676B8x4PY0zqJXI0VKmIFLjbmcAZwKc4SeNit9o84Hl3e5G7j3v8DVVVt/wSd7TUWGAC8H6i4o42Ij+DVToGX80nybpk0o3KHcVPT/wpz1/wPGcccga///T3zHluDjf83w18snPw/rmNMbFLZMtiBPCmiKwAPgBeVdUXgBuBH4jIOpw+iYfd+g8DxW75D4CbAFR1JfAUsAr4C3CdqibtnaLD8zNYFTqEzIYqaNqdrMumxCF5h/Czk3/GS19+iW9M/gZvVb/FpX++lHkvzeMvG/9Ca7A11SEaY1JE9CAdEtqdWbNm6ZIlS3quGIPWQIhr//3nPJr2X3Dln2HMSXE578FgX+s+Fq5byO8//T3V+6opSC/gvPHncfGEixlXMCRfcWLMoCYiS1V1VmfH7AnuHqT5PGzNnOjsVC9NbTBJlpOWw+VTLufFL7/Ig198kKOHH83jnz7O3OfncvmLl/Pk6iepa65LdZjGmCSwlkUMzrvnbe6vv46K8lFw5dCe/ru2qZZF6xfx/LrnWb9nPT7xcdzI4zhn7DmcPvp0sv3ZqQ7RGNNH3bUsunutqnGV5WXwt32z+NrmPzr9FpkFqQ4pZYozi/nmtG9y5dQrWVu3lhc3vshLG1/ih2//EL/HzzEjjuG0itM4ddSplGWXpTpcY0ycWMsiBj/54yd8tvxNHuPHcNHDcPjFPX9oCAlpiI92fMTrn73Om1veZPNe56nwKcVTOG3UaXyh4gtMKpqER+yupzEDWXctC0sWMbj3zXX8+uVPWV/4PeTQ0+Gih+J27sFGVdm4ZyNvbHmDxVsWs2LHChSlML2QY0ccy/Ejj+e4EccxMmdkqkM1xkSx21D9NDwvgxAe9h0ym9zKl6GtCfyZqQ5rQBIRxhWMY1zBOK45/Bp2Nu3knc/f4d2t7/LO5+/wl01/AZxhuscMP4aZZTOZOWwmI7JH4EwFZowZiCxZxGBEfgYAn406n2mrn4KPn4aZV6Q4qoNDSWYJ540/j/PGn4eqsmHPhvbE8eLGF3l67dMAlGWVMXPYTI4sO5Ijhx3J+ILx+D3+FEdvjAmzZBGDMjdZrM2YwbSyw+Hd++HIy8F+E+4VEWF8wXjGF4znssmXEQwFWbd7HctqlvHh9g9ZWrOUlza9BEC6N51JRZOYWjy1fRmbPxavJ2mz0xtjIliyiEG4ZbG1vgWO+w48/13YsBjGn5bawA5yXo+XSUWTmFQ0iUsPuxRVZWvDVpbXLGdl7UpW1q7k+XXP8/jqxwHI9GUyuWgyk4snM6FgAhMKJ3BowaFk+bNS/CcxZvCzZBGDrDQfhVl+qnc3wUkXwRs/hdduhbGngP2mGzciwsickYzMGck5484BIBgK8ln9Z+3J45Odn/Bc5XM0BZqczyBU5Fa0J48JhRMYlz+O0XmjSfemp/KPY8ygYskiRuWFmVTXNYE/A878T3j2alj6KBx9dY+fNX3n9XjbO8zPG38e4AzVrd5bzdrda6msq3SW3ZUsrlpMSEOAk0RG5oxkTP4YxuaNZUzeGMbkj+GQvEMoyyqzznRjesmSRYzKCzJZv6PB2Zl2kZMoXv8PmHAGFIxOaWxDjUc8jMobxai8UcwePbu9vDnQzIY9G9i4ZyOb6jexac8mNtVvYtn2Ze0tEXBuZ43OHU15TjnlueVU5FRQkVtBeU45I3NGkumzkW7GRLNkEaORBZn8de1OVNX5rfS8u2D+qfDk5XDVy06Lw6RUhi+DKcVTmFI8pUN5SEPUNNZ0SCCb6zezqX4Tf//87zQHmzvUL84obk8e5TnljMgZQVlWGWVZZQzPHk5eWp61TMyQY8kiRuUFmTS1Bdnd2EZhdhoUj4cLH4QnLoU/fhu+/BB47esciDziYXj2cIZnD+e4Ecd1OKaq1DbXUrW3iup91VTvq27f/mjHR7y86WWCUTPiZ/oy25NHWXbZAdvDsoZRkF5gI7fMoGI/3WJUUejcmqje3eQkC4DDznH6L175MYgXLnwAvPZswMFERCjJLKEks4QZw2YccDwQCrCzaSfbG7ezrWEb2xu2s73RXRq288G2D6hprDkgoXjEQ1FGESWZJRRnFFOcWdx+neKMiO3MYmupmINCwpKFiIwCHgPKAAXmq+pdIlIEPAmMATYBX1XVOnH+t9wFnAM0Aleq6jL3XPOAH7un/k9VXZCouLtSXuAMz6yqa2Jaef7+AydcD6GAMzpq33b46mOQVZTs8EyC+Dy+9lbJ9NLpndYJhoLsat7VnlB2NO1gZ9NOaptq29fr96xnZ9NOAqHAAZ/3e/wUZxZTlFFEYXohBRkFzjq9gMKM/evwsfz0fHtg0SRdIlsWAeBfVHWZiOQCS0XkVeBK4HVV/YWI3ITzRrwbgTk4r0ydABwL3A8c6yaXW4BZOElnqYgsUtWkvkihPKJlcYCTvg+5I2DR9fDAyXDh/c6wWjMkeD1eSrNKKc0qZVrJtC7rqSr1rfXtSaR9aXYSSl1zHXXNdWyq38Tult00tDV0ea7ctNwOiSU/PZ+8tDzy0vOcdVre/rKI8jRvWiK+AjMEJCxZqOpWYKu7vVdEPgXKgbnAqW61BcBinGQxF3jMfe/2uyJSICIj3LqvquouADfhnA08nqjYO1OY5SfD7+HzzpIFwPRLoGQCPPsPsOA8mP51mP3vkDcimWGaAUxEyE/PJz89P6Y3DbYGW9ndspu65jpn3VLH7uaO67rmOrY3bmdN3RrqW+ppDDR2e84Mb8YBSSV6O8efQ05ajrN2t7P92eT4c0j3ptstsyEqKX0WIjIGOBJ4DyhzEwnANpzbVOAkki0RH6tyy7oqTyoRobzAfdaiK+VHwbffgv+7w5kSZNXzcPL34djvQHpO8oI1g0KaN41hWcMYljUs5s+0hdrY27qX+pZ66lvdpavt1nq2Nmx1Ek1rfbctmTCfx0eOf3/yCCeVbH82uWm5XZZn+bPI9mWT5c8iy5dFpi/TBgAcZBKeLEQkB3gW+GdVrY/8rURVVUTiMke6iFwLXAswenRinnsoL8zq/DZUpLRsOOM2OOpKeOUn8MZ/wt9/6zy8d+y3ISf2//jG9Jbf46coo4iijN73mwVCAfa27mVf2z4a2hrY27qXhrYGZ7+1gb1t7n7rPva17WuvV9NY0+EzbaG2mK6X4c0gy+8kjnASyfJlke3P7rS8fTtyHVXm89iYnURJ6DcrIn6cRPF7VX3OLd4uIiNUdat7m6nGLa8GRkV8vMItq2b/batw+eLoa6nqfGA+OO+ziOMfo115QSafVO+JrXLROLjk97DlA/j7XfDWb5ykMeV8ZxLCMSeDx14GZAYOn8fndKRnFPbrPK3B1k4TTGOg0VnanHVTW1OH/fB6Z/NOZ9vdbwm2xHxtv8dPpi+TDF8Gmb5MZ9ubQYbPWTK9mWT6O5Zl+bI61unscxFlQ7VFlMjRUAI8DHyqqr+JOLQImAf8wl0/H1H+jyLyBE4H9x43obwM/ExEwv+CzwRuTlTc3RldlMWuhlb2NreRmxHjaJRRR8PX/hdq18N7D8CKJ50pzgvHwLSLYeoFUDbNZrA1g0aaN40ib99aN50JhAI0BZr2JxU3sYTLGtoaOiSb5kAzzcFmmgJNNAWanP1AM7tbdh9Q1hpq7XU8fo+/06QSTlIZ3gzSvGlkeDNI96U7a2+6s0Tu+9Lby7uqm+5NHzBvmExky+JE4HLgYxFZ7pb9ECdJPCUiVwOfAV91j72IM2x2Hc7Q2W8CqOouEfkp8IFb77ZwZ3eyjS3JBmDDjgamj+rle7iLx8M5v3RuUX36J/jwf+Dt38Bbv4Ki8TBlrtPqGD7dWhzGRPB5fOSm5ZKblhv3cwdCAVqCLe0JpD2RdJJsmoPN+5NRuE5bE03B/XXqmutoCbbQHGymNdhKc6CZlmDLAc/h9EaaJ63LxBKZlMLHppVM48IJF8bxW3IkcjTU20BXvy7Pji5wR0Fd18W5HgEeiV90fTO+1E0WO/f1PlmE+TPhiK86y74dsPpPTkf43+5ykkd2KYyfDYfOhvGnQ3ZJHP8ExphIPo8Pn8dHtj87oddpC7XREohIIsFmWgItnSaWyGOd7bcEW9rrNgYa2xNUeAlo4OBKFoPR6OIsPOK0LOIipxRmXeUsDbVQ+TKsex0qX4EVTwACI6Y7ieOQE2HUMZAe/9+ujDGJ5ff48af5yeHgHRVpyaIX0n1eRhVlxS9ZRMouhhlfd5ZQELYuh3VvwLrX4O3/B2/9GsQDw4+AQ06A0cc7S05p/GMxxpgolix6aVxJNut37EvsRTxe55mN8qPgC/8GLXthy/uw+R347B1Y8gi8e59Tt+AQKJ8JI4+EkTNh5AxrfRhj4s6SRS+NK83hnQ21hEKKx5OkEUzpuc6tqEPdrp5AC3y+3Ekeny+D6qWwcqFbWaBkops8joSyKTBsqtNyMcaYPrJk0UvjSrNpbgvx+Z4mKgpT9O5nXzqMPtZZwhp2wucfOkv1Mtjwptvv4copg7KpMGzK/nXpJKfD3RhjemDJopfGlTgdVBt2NKQuWXQmu8R5a9+EM5x9VWcW3O0roWYVbF8FNSvhg4cgEPGyn/xRzrDe4kOheIK7Hu+8/W+IPnxkjDmQJYteah8+u2Mfp0wcwJ3LIpA73FkOjRipHAzArg1O4thZCbXrnGXF09AS8XS6N815Cr1onNMvUjC645LZx6HDxpiDkiWLXirNTackJ50Pt+zmylQH0xdeH5ROdJZIqs6trHDyCC+7NsDGv0JrVKd+en5E8hjlTNGeN9JNUCOd2XbTEjt23RiTPJYseklEOGViCW+sriEYUrzJ6uRONBFnGG5OKRxyfMdjqtBUB7s3H7jUbXKTyd4Dz5me5ySR3OFuInG3s0sgq8R5ADG71HlZlN3yMmZAs2TRB6dOGsZzy6r5qGo3M0f3b9K1g4KI8wM9q8gZmtuZlr2wdxvUf+6s934O9Vthr7tsettZd/KmOHDPn13qJpFwIimBrGLILHSXAmedUQAZ+ZZgjEkiSxZ9cMqEEjwCi1fXDI1kEYv0XGcpmdB1nVAImnY5t7sadjhLY+3+7YadzrJ9JTTudFozXRLIyHMSR3QiySx0kklGHqTl7o+tfclz1j57a5wxsbJk0QcFWWkcObqQxWt38IMzJ6U6nIOHx+O2GkqAw3quH2xzkknTbmje7SSPJnfd2f6eqv37sUzc5k3vJJFELGk5Tr+LP8sZYhze7q7Ml24zCJtByZJFH82ePIz/+ssaHv3bRq48cWyqwxmcvP79I7p6Q9XpkG/Z59wea9nr9Km0RC717npfx/L6z936bnnkMONYiMdNJFmQlgX+bHed6Wz70sGXAf4MZ92+pDt1wsfDiz/iuC8zol7G/rp2O84kgSWLPrrqxLEs37ybW/+0ipc+2caRows5oiKfw4bnMqooC7/XphlPGZH9rQP6+Q70UAjaGp2ltcFdN0JbA7Q1HVjW2uiUt29HfK6pznn6PtAMbc37t3vxcp9Oefz7E4s33Umy3jTnNps3LaosvN1ZWUTdzsoO+Hxax7oev7v2OUtkmbW2DnrizAw+uMyaNUuXLFmS8OsEgiF+++Y6Xv+0htXb6mkLOt+l1yOMLspibEl2+3JIcRYj8jMZkZ9BdrrlaBMhFHISRlvT/gTSvkSWN0Ulm+aO5W1Nzq27YAsEWyHQ6qw7LG1O3U7L+pm0uiPeiAQSmUjcxBKZaGI55vEemKC6SlQe99rhGDze/WXt5V2V+aI+H2vZwfnLoogsVdVZnR6zZBEfLYEgq7fupbJmHxt37mPjzgY27GhgU20DzW2hDnXzMnxO4ijIYER+BiPyMynNTacoO42SnDSKstMpzkkjN92H2G9kJllUnRmPY042UYkp1OaUhYIR223Og6ChNmckXHi7w7FAx3pBt24ocOA5ejrGQPl5Jh0TiMcbkawiElaHMk9UUosu83ZMbB3KPPv3R86EmZf3LepukkUiX6v6CHAuUKOq09yyIuBJYAywCfiqqta5r2C9C+dNeY3Alaq6zP3MPODH7mn/U1UXJCrm/kj3eZk+quCAlyKFQsrW+maqdjWyrb6Zz3c3s3VPE1v3OOtPqvewc1/nr3ZM83ooyk6jKDuN4pw0CrPSyMv0kZfhJz/TT16mn7wM/wFluRk+uw1mek/E+W3e6wMO0gcqQ6HOk5EG3SQTdJcuytrLe1MWiDpndFlo/2cij2mwm7KA82cJtII2RRwPHXitcFnktfuYLLqTyPshjwK/BR6LKLsJeF1VfyEiN7n7NwJzgAnucixwP3Csm1xuAWbh/MqwVEQWqWp3YyoHFI9HKC/IpLyg6wn7WgJBdjW0UruvlZ37Wtq3axtaqXX3dza0smVXI/XNAfY0tREMdf8bVFaal9wMH9npPrLTfGSleZ3tdB/ZaV6y0nxkp+9fZ0ftZ6X5yPR7yfB7yfB7yPB7Sfd5rKVjBjaPBzzpTj+KiatEvlb1ryIyJqp4LnCqu70AWIyTLOYCj7mvVn1XRApEZIRb99XwO7dF5FXgbODxRMWdCuk+r9ufEdsMsKpKY2uQ+uY26psC7rqtfX9P0/79xtYgja1B9rUEqNnbTOPOIA2tARpanHVv70KGE0eGb38SiUwoGT4vmWnOfrrPOZbpHk/zuYvXWaf7PPi9HcvC5Wle7/76Pg9+r5DmtWRlTKoku6e1TFW3utvbgDJ3uxzYElGvyi3rqnxIE5H2VsKI/L6fR1VpbgvR0BqgscVJKI2tARpagzS0BGhuC9LcFqK5LUhTW5CWtiDNgZBbHn0sxK6G1k7LW4OhnoOJUWRS6Wnb5xH8Pg9+j+DzOonJ7xV8Hmft93rwuevI8u7q+jwe0nzO2ucmMJ/XvZY38vPuZzxiCc4MCikblqOqKiJx640SkWuBawFGjx4dr9MOaiJCZprTEkjkq4GDIaUlEKQ1EKI1EKIl4CSQ8H7kduSxtqg6LYHI/eABnw8fb2gNsLspRCCotAaddSAYoi2ktLn7bUHn/D3czYsLn0ecpOQmGK/HSSJet9zrcY5F7rcf94ST1P79Dse9UWXtdT3t143c7+yzHWPZv++PjjUyhoiYvCJ4Io57JLzGEuUgkuxksV1ERqjqVvc2U41bXg2MiqhX4ZZVs/+2Vbh8cWcnVtX5wHxwRkPFN2zTH16PkJXmI2sAzq4RCiltocgEogRCIdoC0eUhAqGIOpF128s61g0EQ7S6dQMhpTUQIqRKIKQEg846EAp12A+G90PO+RoDAWc7XNa+dpNge93QAfUGgnAy8Xr2JxCf1+MmFJxj3o4JJ5xsfB6nLPLz4WTU4Zg3qk6nyQu8Hs/+a7rb3V0z+hzdXjP6HFHHnPMRkUg7lndMsgMz0SY7WSwC5gG/cNfPR5T/o4g8gdPBvcdNKC8DPxOR8ARMZwI3JzlmM4h5PEK6x8tge/RFNTq57E8y4UQUmZwCwah6wVDUZ/fvtwX374ciPhNOhKGQEgxBUJ16wRDOsaBTJ3zOUEjdOu6i+893wDE32UbWCV+z4+dx/4zhazqtx8g6PQ0OGSg8QieJZX/SPSDxuOWnHzaMH31pStzjSeTQ2cdxWgUlIlKFM6rpF8BTInI18BnwVbf6izjDZtfhDJ39JoCq7hKRnwIfuPVuC3d2G2O6JuLeZrKZQDrVnpS6S15RSSwYlfCiE1B7wmyvE5Uww9cLny+kBJUO19WIz4VC0XXVPQedlofPNTzGgTK9ZQ/lGWOMAbp/KM+e3DLGGNMjSxbGGGN6ZMnCGGNMjyxZGGOM6ZElC2OMMT2yZGGMMaZHliyMMcb0yJKFMcaYHg3Kh/JEZAfOE+K9VQLsjHM48TBQ44KBG9tAjQsGbmwDNS4YuLENtrgOUdXSzg4MymTRVyKypKunF1NpoMYFAze2gRoXDNzYBmpcMHBjG0px2W0oY4wxPbJkYYwxpkeWLDqan+oAujBQ44KBG9tAjQsGbmwDNS4YuLENmbisz8IYY0yPrGVhjDGmR5YsjDHG9MiShUtEzhaRNSKyTkRuSmEco0TkTRFZJSIrReR7bvmtIlItIsvd5ZwUxLZJRD52r7/ELSsSkVdFpNJdF/Z0ngTENSnie1kuIvUi8s+p+M5E5BERqRGRTyLKOv2OxHG3+29uhYjMTEFsvxSR1e71F4pIgVs+RkSaIr67B5IcV5d/dyJys/udrRGRsxIVVzexPRkR1yYRWe6WJ/M76+rnROL+ranqkF8AL7AeGAekAR8BU1IUywhgprudC6wFpgC3Av+a4u9pE1ASVfZfwE3u9k3AHQPg73IbcEgqvjPgFGAm8ElP3xHOq4RfAgQ4DngvBbGdCfjc7TsiYhsTWS8FcXX6d+f+X/gISAfGuv9vvcmMLer4r4F/T8F31tXPiYT9W7OWheMYYJ2qblDVVuAJYG4qAlHVraq6zN3eC3wKlKcilhjNBRa42wuAC1IYC8BsYL2q9uUJ/n5T1b8C0e+J7+o7mgs8po53gQIRGZHM2FT1FVUNuLvvAhWJun5v4urGXOAJVW1R1Y3AOpz/v0mPTUQE+CrweKKu35Vufk4k7N+aJQtHObAlYr+KAfADWkTGAEcC77lF/+g2IR9Jxe0eQIFXRGSpiFzrlpWp6lZ3extQloK4Il1Cx/+8qf7OoOvvaKD9u7sK57fPsLEi8qGI/J+InJyCeDr7uxtI39nJwHZVrYwoS/p3FvVzImH/1ixZDFAikgM8C/yzqtYD9wPjgRnAVpzmb7KdpKozgTnAdSJySuRBddq7KRuLLSJpwPnA027RQPjOOkj1d9QVEfkREAB+7xZtBUar6pHAD4A/iEheEkMacH93nbiUjr+YJP076+TnRLt4/1uzZOGoBkZF7Fe4ZSkhIn6cfwC/V9XnAFR1u6oGVTUE/I4ENr27oqrV7roGWOjGsD3cnHXXNcmOK8IcYJmqboeB8Z25uvqOBsS/OxG5EjgXuMz9AYN7m6fW3V6K0zcwMVkxdfN3N1C+Mx/wZeDJcFmyv7POfk6QwH9rliwcHwATRGSs+9vpJcCiVATi3gd9GPhUVX8TUR55f/FC4JPozyY4rmwRyQ1v43SMfoLzPc1zq80Dnk9mXFE6/KaX6u8sQlff0SLgCnekynHAnohbCEkhImcDNwDnq2pjRHmpiHjd7XHABGBDEuPq6u9uEXCJiKSLyFg3rveTFVeELwKrVbUqXJDM76yrnxMk8t9aMnruD4YFZ7TAWpzfBn6UwjhOwmk6rgCWu8s5wP8AH7vli4ARSY5rHM4olI+AleHvCCgGXgcqgdeAohR95zgyVgAAA29JREFUb9lALZAfUZb07wwnWW0F2nDuC1/d1XeEMzLlXvff3MfArBTEtg7nXnb439oDbt2L3L/n5cAy4Lwkx9Xl3x3wI/c7WwPMSfZ35pY/Cnw7qm4yv7Oufk4k7N+aTfdhjDGmR3YbyhhjTI8sWRhjjOmRJQtjjDE9smRhjDGmR5YsjDHG9MiShTGAiPxcRE4TkQtE5OYu6twqIv/qbl8pIiPjeP1TReSEiP1vi8gV8Tq/Mf1lycIYx7E4E+l9AfhrDPWvBHqVLNynfrtyKtCeLFT1AVV9rDfnNyaR7DkLM6SJyC+Bs9g/3fV4YCPwjKreFlX3VmAfzlTtj+JMl9AEHI8zPfRvgBxgJ3Clqm4VkcU4D0ydhPOA11rgxzhT4dcClwGZOIkqCOwArseZPXefqv5KRGYADwBZboxXqWqde+73gNOAApwHxt4SkanAf7vX8AAXacfJ7sz/b++OXaOIgjiOf4dI2tikS2+KcARNgmdlZ2ElBDtJqaWNWliYxkJERdNYyWkhBsVCwUYIBAURDMRgLMU/QAgiIhbys5h3ZDlyvoSz29+nuWX33u5ecTfMe7czdmDOLKzVJF0inxjuAfPAlqTOYKAYGPMM+EDWUpolC/CtAIuSjgEPgOuNIeOS5iTdAt4Cx5XF5p4AlyV9JYPBHUmzkt4MXPIRcEVSh3z69lrj2CFJC8DFxv4LwN1yb3Pkk8dmI/lXWmzWFkfJMibTZF+AgzoCzACvs2QPY2SJiL7VxvYUsFpqH42TWcxQETEBHJa0XnY9ZLeqLkC/gNwG2XwH4B1wNSKmgOfOKux/cLCw1irTOz3yB/wbOc0TpU1mV9Kv/Z4K2JbUHXL8Z2N7Bbgt6UVEnCQ7wo3id3n9Q/k+S3ocEe+B08CriDgvaW3E61jLeRrKWkvSZpmq6bekXANOlamgWqD4QbazhCxoNxkRXcjS0WXdYC8T7JaGXmrsb56veY/fgZ1GI51zwPrg+5pKxdMvku6RVUc7lc9iVuVgYa0WEZPAjrJvwrSkz/sc2gPulyxkDFgEbkTER3JB+8SQccvA04jYILOZvpfAmYjY3KPD2hJwMyK2yGZAQ9dTirPAp3JvM+Sah9lI/G8oMzOrcmZhZmZVDhZmZlblYGFmZlUOFmZmVuVgYWZmVQ4WZmZW5WBhZmZVfwHZr0XVA00ITQAAAABJRU5ErkJggg==\n", 502 | "text/plain": [ 503 | "
" 504 | ] 505 | }, 506 | "metadata": { 507 | "needs_background": "light" 508 | }, 509 | "output_type": "display_data" 510 | } 511 | ], 512 | "source": [ 513 | "from matplotlib import pyplot as plt\n", 514 | "plt.plot(range(1,len(funV2)+1), funV2, label='alpha = 1e-5')\n", 515 | "plt.plot(range(1,len(funV3)+1), funV3, label='alpha = 1e-6')\n", 516 | "plt.plot(range(1,len(funV4)+1), funV4, label='alpha = 1e-7')\n", 517 | "plt.legend()\n", 518 | "plt.xlabel(\"# Iterations\")\n", 519 | "plt.ylabel(\"Function Value\")" 520 | ] 521 | } 522 | ], 523 | "metadata": { 524 | "kernelspec": { 525 | "display_name": "Python 2", 526 | "language": "python", 527 | "name": "python2" 528 | }, 529 | "language_info": { 530 | "codemirror_mode": { 531 | "name": "ipython", 532 | "version": 3 533 | }, 534 | "file_extension": ".py", 535 | "mimetype": "text/x-python", 536 | "name": "python", 537 | "nbconvert_exporter": "python", 538 | "pygments_lexer": "ipython3", 539 | "version": "3.7.3" 540 | } 541 | }, 542 | "nbformat": 4, 543 | "nbformat_minor": 2 544 | } 545 | -------------------------------------------------------------------------------- /algorithms/ProxGradientDescent.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "### Dataset\n", 8 | "Lets Load the dataset. We shall use the following datasets:\n", 9 | "Features are in: \"sido0_train.mat\"\n", 10 | "Labels are in: \"sido0_train.targets\"" 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": 191, 16 | "metadata": {}, 17 | "outputs": [ 18 | { 19 | "name": "stdout", 20 | "output_type": "stream", 21 | "text": [ 22 | "(12678, 4932)\n" 23 | ] 24 | } 25 | ], 26 | "source": [ 27 | "from scipy.io import loadmat\n", 28 | "import numpy as np\n", 29 | "\n", 30 | "X = loadmat(r\"sido0_matlab/sido0_train.mat\")\n", 31 | "y = np.loadtxt(r\"sido0_matlab/sido0_train.targets\")\n", 32 | "\n", 33 | "# Statistics of the Dense Format of X\n", 34 | "X = X['X'].todense()\n", 35 | "print(X.shape)" 36 | ] 37 | }, 38 | { 39 | "cell_type": "markdown", 40 | "metadata": {}, 41 | "source": [ 42 | "### Logistic Regression Definition\n", 43 | "Lets define now L1 regularized Logistic Loss\n" 44 | ] 45 | }, 46 | { 47 | "cell_type": "code", 48 | "execution_count": 192, 49 | "metadata": {}, 50 | "outputs": [], 51 | "source": [ 52 | "def LogisticLoss(w, X, y, lam):\n", 53 | " # Computes the cost function for all the training samples\n", 54 | " m = X.shape[0]\n", 55 | " n = X.shape[1]\n", 56 | " Xw = np.dot(X,w)\n", 57 | " yT = y.reshape(-1,1)\n", 58 | " yXw = np.multiply(yT,Xw)\n", 59 | " f = np.sum(np.logaddexp(0,-yXw)) + lam*np.sum(np.abs(w))\n", 60 | " gMul = 1/(1 + np.exp(yXw))\n", 61 | " ymul = -1*np.multiply(yT, gMul)\n", 62 | " g = np.dot(ymul.reshape(1,-1),X) + lam*np.sign(w).reshape(1,-1)\n", 63 | " g = g.reshape(-1,1)\n", 64 | " return [f, g] " 65 | ] 66 | }, 67 | { 68 | "cell_type": "code", 69 | "execution_count": 193, 70 | "metadata": {}, 71 | "outputs": [ 72 | { 73 | "name": "stdout", 74 | "output_type": "stream", 75 | "text": [ 76 | "8787.719955138986\n", 77 | "[[503.5]\n", 78 | " [292. ]\n", 79 | " [ 21.5]\n", 80 | " ...\n", 81 | " [556.5]\n", 82 | " [118. ]\n", 83 | " [194.5]]\n" 84 | ] 85 | } 86 | ], 87 | "source": [ 88 | "[nSamples,nVars] = X.shape\n", 89 | "w = np.zeros((nVars,1))\n", 90 | "[f,g] = LogisticLoss(w, X, y, 1)\n", 91 | "print(f)\n", 92 | "print(g)" 93 | ] 94 | }, 95 | { 96 | "cell_type": "markdown", 97 | "metadata": {}, 98 | "source": [ 99 | "### Reinvoking Gradient Descent Armiojo\n", 100 | "Lets invoke the final version of Armijo Line Search GD" 101 | ] 102 | }, 103 | { 104 | "cell_type": "code", 105 | "execution_count": 194, 106 | "metadata": {}, 107 | "outputs": [], 108 | "source": [ 109 | "from numpy import linalg as LA\n", 110 | "\n", 111 | "def gd(funObj,w,maxEvals,alpha,X,y,lam, verbosity, freq):\n", 112 | " [f,g] = funObj(w,X,y,lam)\n", 113 | " funEvals = 1\n", 114 | " funVals = []\n", 115 | " while(1):\n", 116 | " [f,g] = funObj(w,X,y,lam)\n", 117 | " optCond = LA.norm(g, np.inf)\n", 118 | " if ((verbosity > 0) and (funEvals % freq == 0)):\n", 119 | " print(funEvals,alpha,f,optCond)\n", 120 | " w = w - alpha*g\n", 121 | " funEvals = funEvals+1\n", 122 | " if (optCond < 1e-2):\n", 123 | " break\n", 124 | " if (funEvals > maxEvals):\n", 125 | " break\n", 126 | " funVals.append(f)\n", 127 | " return funVals\n", 128 | "\n", 129 | "def gdArmijo(funObj,w,maxEvals,alpha,gamma,X,y,lam, verbosity, freq):\n", 130 | " [f,g] = funObj(w,X,y,lam)\n", 131 | " funEvals = 1\n", 132 | " funVals = []\n", 133 | " f_old = f\n", 134 | " g_old = g\n", 135 | " funVals.append(f)\n", 136 | " alpha = 1\n", 137 | " numBackTrack = 0\n", 138 | " while(1):\n", 139 | " wp = w - alpha*g\n", 140 | " [fp,gp] = funObj(wp,X,y,lam)\n", 141 | " funVals.append(f)\n", 142 | " funEvals = funEvals+1\n", 143 | " backtrack = 0\n", 144 | " while fp > f - gamma*alpha*np.dot(g.T, g):\n", 145 | " alpha = alpha*alpha*np.dot(g.T, g)[0,0]/(2*(fp + np.dot(g.T, g)[0,0]*alpha - f))\n", 146 | " wp = w - alpha*g\n", 147 | " [fp,gp] = funObj(wp,X,y,lam)\n", 148 | " funVals.append(f)\n", 149 | " funEvals = funEvals+1\n", 150 | " numBackTrack = numBackTrack + 1\n", 151 | " if funEvals > 2:\n", 152 | " alpha = min(1,2*(f_old - f)/np.dot(g.T, g)[0,0])\n", 153 | " f_old = f\n", 154 | " g_old = g\n", 155 | " w = wp\n", 156 | " f = fp\n", 157 | " g = gp\n", 158 | " optCond = LA.norm(g, np.inf)\n", 159 | " if ((verbosity > 0) and (funEvals % freq == 0)):\n", 160 | " print(funEvals,alpha,f,optCond)\n", 161 | " if (optCond < 1e-2):\n", 162 | " break\n", 163 | " if (funEvals >= maxEvals):\n", 164 | " break\n", 165 | " return (funVals,numBackTrack)" 166 | ] 167 | }, 168 | { 169 | "cell_type": "markdown", 170 | "metadata": {}, 171 | "source": [ 172 | "### Proximal Gradient Descent\n", 173 | "Now lets implement Proximal" 174 | ] 175 | }, 176 | { 177 | "cell_type": "code", 178 | "execution_count": 181, 179 | "metadata": {}, 180 | "outputs": [], 181 | "source": [ 182 | "from numpy import linalg as LA\n", 183 | "\n", 184 | "mu = 1\n", 185 | "def L1Prox(x, lam):\n", 186 | " maxx = np.maximum(np.abs(x) - lam*mu, 0)\n", 187 | " return np.multiply(maxx, np.sign(x))\n", 188 | "\n", 189 | "def gdProx(funObj,w,maxEvals,alpha,X,y,lam, verbosity, freq):\n", 190 | " [f,g] = funObj(w,X,y,lam)\n", 191 | " funEvals = 1\n", 192 | " funVals = []\n", 193 | " while(1):\n", 194 | " [f,g] = funObj(w,X,y,0)\n", 195 | " optCond = LA.norm(g, np.inf)\n", 196 | " if ((verbosity > 0) and (funEvals % freq == 0)):\n", 197 | " print(funEvals,alpha,f,optCond)\n", 198 | " w = L1Prox(w - alpha*g,lam*alpha)\n", 199 | " funEvals = funEvals+1\n", 200 | " if (optCond < 1e-2):\n", 201 | " break\n", 202 | " if (funEvals > maxEvals):\n", 203 | " break\n", 204 | " funVals.append(f)\n", 205 | " return funVals\n", 206 | "\n", 207 | "def gdProxArmijo(funObj,w,maxEvals,alpha,gamma,X,y,lam, verbosity, freq):\n", 208 | " [f,g] = funObj(w,X,y,0)\n", 209 | " funEvals = 1\n", 210 | " funVals = []\n", 211 | " f_old = f\n", 212 | " g_old = g\n", 213 | " funVals.append(f)\n", 214 | " alpha = 1\n", 215 | " numBackTrack = 0\n", 216 | " while(1):\n", 217 | " wp = L1Prox(w - alpha*g,alpha*lam)\n", 218 | " [fp,gp] = funObj(wp,X,y,0)\n", 219 | " funVals.append(f)\n", 220 | " funEvals = funEvals+1\n", 221 | " backtrack = 0\n", 222 | " while fp > f - gamma*alpha*np.dot(g.T, g):\n", 223 | " alpha = alpha*alpha*np.dot(g.T, g)[0,0]/(2*(fp + np.dot(g.T, g)[0,0]*alpha - f))\n", 224 | " wp = L1Prox(w - alpha*g,lam*alpha)\n", 225 | " [fp,gp] = funObj(wp,X,y,0)\n", 226 | " funVals.append(f)\n", 227 | " funEvals = funEvals+1\n", 228 | " numBackTrack = numBackTrack + 1\n", 229 | " if funEvals > 2:\n", 230 | " alpha = min(1,2*(f_old - f)/np.dot(g.T, g)[0,0])\n", 231 | " f_old = f\n", 232 | " g_old = g\n", 233 | " w = wp\n", 234 | " f = fp\n", 235 | " g = gp\n", 236 | " optCond = LA.norm(g, np.inf)\n", 237 | " if ((verbosity > 0) and (funEvals % freq == 0)):\n", 238 | " print(funEvals,alpha,f,optCond)\n", 239 | " if (optCond < 1e-2):\n", 240 | " break\n", 241 | " if (funEvals >= maxEvals):\n", 242 | " break\n", 243 | " return (funVals,numBackTrack)" 244 | ] 245 | }, 246 | { 247 | "cell_type": "code", 248 | "execution_count": 185, 249 | "metadata": {}, 250 | "outputs": [ 251 | { 252 | "name": "stdout", 253 | "output_type": "stream", 254 | "text": [ 255 | "Gradient Descent with Fixed Step Size\n", 256 | "10 1e-05 25483.458111208987 1318.165161847604\n", 257 | "20 1e-05 28174.671349879456 1271.76399300516\n", 258 | "30 1e-05 34043.01227034802 1404.3636619139188\n", 259 | "40 1e-05 30346.176427273946 1351.1312121214416\n", 260 | "50 1e-05 35524.90509051614 1410.4548179904532\n", 261 | "60 1e-05 32199.107914998018 1387.2329308605804\n", 262 | "70 1e-05 31392.224061468136 1380.106049412477\n", 263 | "80 1e-05 32667.859332787804 1398.7549782211106\n", 264 | "90 1e-05 30788.29140257608 1368.8791051250455\n", 265 | "100 1e-05 32545.655986155758 1395.7281700648925\n", 266 | "110 1e-05 29787.699474075103 1343.9294118117182\n", 267 | "120 1e-05 29336.54378554532 1311.895386403687\n", 268 | "130 1e-05 31920.301586768965 1387.4926194929058\n", 269 | "140 1e-05 33292.894791505896 1403.806952771307\n", 270 | "150 1e-05 29601.687563686126 1344.4449250864093\n", 271 | "160 1e-05 32629.940009878974 1399.419798719683\n", 272 | "170 1e-05 30101.55044672699 1347.9862901168767\n", 273 | "180 1e-05 33724.27042445242 1405.6284714989056\n", 274 | "190 1e-05 29760.52240155365 1345.8794988936215\n", 275 | "200 1e-05 32007.02668790054 1389.405963235599\n", 276 | "199\n", 277 | "Gradient Descent with Backtracking Line Search\n" 278 | ] 279 | }, 280 | { 281 | "name": "stderr", 282 | "output_type": "stream", 283 | "text": [ 284 | "/usr/local/lib/python3.7/site-packages/ipykernel_launcher.py:9: RuntimeWarning: overflow encountered in exp\n", 285 | " if __name__ == '__main__':\n" 286 | ] 287 | }, 288 | { 289 | "name": "stdout", 290 | "output_type": "stream", 291 | "text": [ 292 | "20 1.4088759485544872e-07 7077.719863352558 1860.6086697728492\n", 293 | "30 8.313546216299688e-09 6629.862512634738 1845.1737521321706\n", 294 | "40 3.3455937772699538e-09 6556.705269974167 1843.1842969400523\n", 295 | "50 1.2557022591437148e-10 6548.129271462571 1842.6636660832119\n", 296 | "60 3.355302803325746e-10 6544.37748462888 1842.4801207285454\n", 297 | "70 4.9051151424008995e-11 6543.443175067647 1842.4497824196078\n", 298 | "80 1.0325039536393731e-11 6543.274002088672 1842.4435246401163\n", 299 | "90 2.13335581249387e-12 6543.237870068007 1842.4423131138674\n", 300 | "100 3.5325304179109113e-13 6543.2336105628365 1842.4421202593758\n", 301 | "110 7.950098487337587e-14 6543.232188062712 1842.4420818232647\n", 302 | "120 1.7868879958084857e-14 6543.231896107758 1842.442071783773\n", 303 | "130 0.0 6543.2318614986125 1842.442071064147\n", 304 | "140 0.0 6543.23185715939 1842.4420709112064\n", 305 | "150 0.0 6543.231855210681 1842.442070843923\n", 306 | "160 0.0 6543.231854791848 1842.442070830396\n", 307 | "170 0.0 6543.231854743397 1842.4420708281405\n", 308 | "180 0.0 6543.231854741502 1842.442070828085\n", 309 | "190 0.0 6543.231854731518 1842.442070827714\n", 310 | "200 0.0 6543.231854729264 1842.4420708276539\n", 311 | "200\n", 312 | "Number of Backtrackings = 14\n" 313 | ] 314 | } 315 | ], 316 | "source": [ 317 | "[nSamples,nVars] = X.shape\n", 318 | "w = np.zeros((nVars,1))\n", 319 | "print(\"Gradient Descent with Fixed Step Size\")\n", 320 | "funV1 = gd(LogisticLoss,w,200,1e-5,X,y,1000,1,10)\n", 321 | "print(len(funV1))\n", 322 | "print(\"Gradient Descent with Backtracking Line Search\")\n", 323 | "(funV2,numBackTrack) = gdArmijo(LogisticLoss,w,200,1,1e-4,X,y,1000,1,10)\n", 324 | "print(len(funV2))\n", 325 | "print(\"Number of Backtrackings = \" + str(numBackTrack))" 326 | ] 327 | }, 328 | { 329 | "cell_type": "code", 330 | "execution_count": 186, 331 | "metadata": {}, 332 | "outputs": [ 333 | { 334 | "name": "stdout", 335 | "output_type": "stream", 336 | "text": [ 337 | "Proximal Gradient Descent with Fixed Step Size\n", 338 | "10 1e-05 2636.788990359461 1355.3541907312065\n", 339 | "20 1e-05 2522.4094780052596 1241.1177191366498\n", 340 | "30 1e-05 2476.492028975414 1198.6590906761421\n", 341 | "40 1e-05 2445.8466936273685 1170.270448987063\n", 342 | "50 1e-05 2418.0767937525343 1141.3717745909807\n", 343 | "60 1e-05 2397.552084345081 1119.5245640500075\n", 344 | "70 1e-05 2380.2692054497343 1100.2119196972003\n", 345 | "80 1e-05 2360.213428295232 1078.5976530015273\n", 346 | "90 1e-05 2351.6236064501545 1069.13244346509\n", 347 | "100 1e-05 2349.1573147351933 1065.8799951955393\n", 348 | "110 1e-05 2345.168520074747 1060.811929376459\n", 349 | "120 1e-05 2341.0091688481543 1055.3118032811078\n", 350 | "130 1e-05 2338.326860674283 1051.611196823867\n", 351 | "140 1e-05 2337.748939954794 1049.9987710632922\n", 352 | "150 1e-05 2332.8626368581536 1043.762821189623\n", 353 | "160 1e-05 2326.6239347212163 1036.565160878359\n", 354 | "170 1e-05 2325.0562761959272 1034.1551077883082\n", 355 | "180 1e-05 2325.022897948621 1033.2962336178455\n", 356 | "190 1e-05 2325.3024594330054 1032.750999556637\n", 357 | "200 1e-05 2322.6750791997874 1029.1253458192427\n", 358 | "199\n" 359 | ] 360 | } 361 | ], 362 | "source": [ 363 | "[nSamples,nVars] = X.shape\n", 364 | "w = np.zeros((nVars,1))\n", 365 | "print(\"Proximal Gradient Descent with Fixed Step Size\")\n", 366 | "(funV3) = gdProx(LogisticLoss,w,200,1e-5,X,y,1000,1,10)\n", 367 | "print(len(funV3))\n", 368 | "#print(\"Proximal Gradient Descent with Backtracking Line Search\")\n", 369 | "#(funV1,numBackTrack) = gdProxArmijo(LogisticLoss,w,200,1,1e-4,X,y,1000,1,10)\n", 370 | "#print(len(funV1))\n", 371 | "#print(\"Number of Backtrackings = \" + str(numBackTrack))" 372 | ] 373 | }, 374 | { 375 | "cell_type": "code", 376 | "execution_count": 190, 377 | "metadata": {}, 378 | "outputs": [ 379 | { 380 | "data": { 381 | "text/plain": [ 382 | "Text(0, 0.5, 'Function Value')" 383 | ] 384 | }, 385 | "execution_count": 190, 386 | "metadata": {}, 387 | "output_type": "execute_result" 388 | }, 389 | { 390 | "data": { 391 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZEAAAEGCAYAAACkQqisAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjIsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8li6FKAAAgAElEQVR4nOydd5xU1d3/32f6zPa+CwssVaS4NBE1gCWixppYorEQHzXGkuQxyfPDlCca9XkSTfHRxGiKPbYkRiXGroiCgoKgFEHqwgLb22yZ2Snn98ctc2dmZ5mFXVjc83699rWzZ+/cOffOvedzvuV8r5BSolAoFArFgWA73B1QKBQKxZGLEhGFQqFQHDBKRBQKhUJxwCgRUSgUCsUBo0REoVAoFAeM43B34FBTWFgoKyoqDnc3FAqF4ohh9erVDVLKop7+N+REpKKiglWrVh3ubigUCsURgxCiKtX/lDtLoVAoFAeMEhGFQqFQHDBKRBQKhUJxwAy5mIhCoRg8hEIhqqurCQQCh7srCsDj8VBeXo7T6Uz7PUpEFArFYaO6upqsrCwqKioQQhzu7gxppJQ0NjZSXV3N6NGj036fcmcpFIrDRiAQoKCgQAnIIEAIQUFBQZ+tQiUiCoXisKIEZPBwIN+FEpGDZMX2Rj6v9R/ubigUCsVhQYnIQfLfL6zn7lc3He5uKBSKg6C2tpZvfOMbjBkzhpkzZ3L88cfz/PPP884775CTk8P06dM56qijmDdvHi+99JL5vttuu43hw4czbdo0pk2bxi233ALANddcw8aNGw+6X48++ig33XTTQe9nIFGB9YOkOxKlqrHzcHdDoVAcIFJKzj//fBYuXMhTTz0FQFVVFYsXLyYvL4+5c+eawrF27VrOP/98vF4vp556KgA333wzP/zhD+P2+Ze//OXQHsRhRFkiB0kkKqlu7kI9IVKhODJ5++23cblcfPvb3zbbRo0axXe+852kbadNm8bPfvYzfv/73/e6z5NOOolVq1ZRVVXF+PHjaWhoIBqNMnfuXF5//XUA/vrXvzJ79mymTZvGddddRyQSAeCRRx5hwoQJzJ49m+XLl/fjkQ4MyhI5SCJRSVcoQkN7N0VZ7sPdHYXiiOXn/9rAxr1t/brPScOyufWcyb1us2HDBmbMmJH2PmfMmMGvfvUr8+977rmHv/71rwDcddddnH766eb/Ro0axaJFi7j++uuZPXs2kyZNYsGCBXz22Wc8++yzLF++HKfTyQ033MCTTz7Jaaedxq233srq1avJycnh5JNPZvr06X086kOLEpGDJBLVLJDdzZ1KRBSKLwA33ngjy5Ytw+VyxYmFQaLXoSd3lpVrrrmGv//97zz44IOsXbsWgLfeeovVq1dz7LHHAtDV1UVxcTErV67kpJNOoqhIK5j79a9/nc8//7y/Dm1AUCJykET1C2p3UyczRuYd5t4oFEcu+7MYBorJkyfz3HPPmX/ff//9NDQ0MGvWrB63X7NmDUcffXTa++/s7KS6uhqA9vZ2srKykFKycOFCfvGLX8Rt+8ILLxzAERxeVEzkIDEskermrsPcE4VCcSCccsopBAIBHnjgAbOts7PnZJlPP/2UO+64gxtvvDHt/S9atIjLLruM22+/nWuvvRaAU089lX/84x/U1dUB0NTURFVVFccddxxLly6lsbGRUCjE3//+94M4skODskQOknA0ZokoFIojDyEEL7zwAjfffDN33303RUVFZGRkcNdddwHw3nvvMX36dDo7OykuLua+++4zM7P2x9KlS/noo49Yvnw5drud5557jkceeYSrrrqKO++8kwULFhCNRnE6ndx///3MmTOH2267jeOPP57c3FymTZs2kIfeL4ihllU0a9Ys2Z8PpZr8s1fp6I5w4rgCnrxmTr/tV6EYCnz22Wd9cg0pBp6evhMhxGopZY/+PeXOOkgiZkxEubMUCsXQQ4nIQRKNar/3tHQRjkQPb2cUCoXiEKNE5CAJR6MUZrqIRCX7WtUzERQKxdBCichBIKUkKqEsxwtAQ3vwMPdIoVAoDi1KRA6Aix/8gAfe2YaemEWuT3sKWHswfBh7pVAoFIceleJ7AGyu9VNR6DPXiGR7dREJKBFRKBRDC2WJHADBcIRINLbQMEcXEb+yRBSKI5L+KAU/ceJErr/+eqLRvifY7Ny5kylTpiS1r127lpdffvmgjs2KURiyP1Ei0keklARCUSLRqJnem6ssEYXiiMUoBT9v3jy2b9/O6tWreeaZZ8xSJXPnzmXNmjVs3ryZ++67j5tuuom33nrLfP/NN9/M2rVr2bhxI+vWrWPp0qX91rfeRCQcHhzjjRKRPtKtp/GGozLJElExEYXiyKO/SsF3d3cTCATIy9Nq6P35z3/m2GOPpbKykgsuuMAspVJbW8tXv/pVKisrqays5P3334/bz/bt25k+fTorV67kZz/7Gc8++yzTpk3j2Wef5bbbbuOKK67gxBNP5IorrmDnzp3MnTuXGTNmMGPGjLh93XXXXUydOpXKykrzYVkG0WiUb37zm/z0pz898BOnM+AxESGEHVgF7JFSni2EGA08AxQAq4ErpJTdQgg38DgwE2gEvi6l3Knv40fA1UAE+K6U8jW9/QzgXsAO/EVK+cuBPp5ASBORqIyJiMthw+O04Q+EBvrjFYovLq/cAjXr+nefpVPhzN6Hhf4qBV9VVcWZZ55plir52te+ZtbK+ulPf8pDDz3Ed77zHb773e8yf/58nn/+eSKRCO3t7TQ3NwOwefNmLrnkEh599FEqKyu5/fbbWbVqlSlat912Gxs3bmTZsmV4vV46Ozt544038Hg8bNmyhUsvvZRVq1bxyiuv8OKLL7Jy5Up8Ph9NTU1mf8PhMJdddhlTpkzhJz/5SdrHnYpDYYl8D/jM8vddwD1SynFAM5o4oP9u1tvv0bdDCDEJuASYDJwB/EEIYdfF6X7gTGAScKm+7YASDGsPjglHYiJitwmyPE5liSgUXwBuvPFGKisrzTLtifRUCn7t2rXU1dXR0dHBM888A8D69euZO3cuU6dO5cknn2TDhg2AZvlcf/31ANjtdnJycgCor6/nvPPO48knn6SysjJl/84991y8Xm1ZQSgU4tprr2Xq1KlcdNFF5iN533zzTa666ip8Ph8A+fn55vuvu+66fhMQGGBLRAhRDpwF/A/wfSGEAE4BvqFv8hhwG/AAcJ7+GuAfwO/17c8DnpFSBoEdQoitwGx9u61Syu36Zz2jb3vwDzbuhaBuiUSi0iwDb7cJstwO/ComolAcOPuxGAaK/ioF73Q6OeOMM3j33Xe55JJL+OY3v8kLL7xAZWUljz76KO+8806v/cjJyWHkyJEsW7aMSZNSz4czMjLM1/fccw8lJSV88sknRKNRPB7Pfo4WTjjhBJYsWcIPfvCDtLbfHwNtifwf8P8AI12hAGiRUhqjbTUwXH89HNgNoP+/Vd/ebE94T6r2AcWwRCIWd5ZdCDI9DmWJKBRHIP1VCl5KyfLlyxk7diwAfr+fsrIyQqEQTz75pLndqaeean5WJBKhtbUVAJfLxfPPP8/jjz9uPus9KysLv9+fsu+tra2UlZVhs9l44oknzEfsnnbaaTzyyCPmcVjdWVdffTVf+cpXuPjii/slOD9gIiKEOBuok1KuHqjP6ENfviWEWCWEWFVfX39Q+wpYLBFDRGw2QabbobKzFIojEKMU/NKlSxk9ejSzZ89m4cKFSaXgjzrqKG688cakUvD33HMP06ZNY8qUKUQiEW644QYA7rjjDo477jhOPPFEJk6caG5/7733smTJEqZOncrMmTNNFxRoVsZLL73EPffcw+LFizn55JPZuHGjGVhP5IYbbuCxxx6jsrKSTZs2mVbKGWecwbnnnsusWbOYNm0av/71r+Pe9/3vf5/p06dzxRVXHFBKctz5G6hS8EKIXwBXAGHAA2QDzwOnA6VSyrAQ4njgNinl6UKI1/TXHwghHEANUATcAiCl/IW+39eIub1uk1Kerrf/yLpdKg62FPzqqiYueOADjh9TwC++NpWTfv0Ov7moktc21LCrqZNX/3PeAe9boRhqqFLwg49BUwpeSvkjKWW5lLICLTD+tpTyMmAJcKG+2ULgRf31Yv1v9P+/LTWFWwxcIoRw65ld44EPgY+A8UKI0UIIl/4ZiwfqeAzMmIiU5joRh11zZ6mYiEKhGGocjrIni4BnhBB3AmuAh/T2h4An9MB5E5ooIKXcIIT4G1rAPAzcKKWMAAghbgJeQ0vxfVhKuWGgOx8wYiJRSdRwZwktsK5iIgqFYqhxSERESvkO8I7+ejux7CrrNgHgohTv/x+0DK/E9peB/qsJkAaGJRKOSvPRuNYUXyklWlKZQqFQfPFRK9b7SDCsLzaMxq8TyfQ4iEQlXaHI4eyeQqFQHFKUiPSRgC4SYes6EaFlZ4Gqn6VQKIYWSkT6iGGJRKLRhBXrmoioSr4KhWIooUSkjxiWSE/rREBZIgrFkYbdbjfXeVx00UUpFxr2lVTl5YH9lpg/klAi0kdilkhMRBxWEVGWiEJxROH1elm7di3r16/H5XLx4IMPxv1fStnnBXn7Ky8P+y8xf6SgRKSPWGMixjoRm172BFBrRRSKI5i5c+eydetWdu7cyVFHHcWVV17JlClT2L17N08//TRTp05lypQpLFq0CICqqirGjx9PQ0MD0WiUuXPn8vrrr/epvDz0XmJ+sKMej9tHrJaIMTmx2wTZHv3phqocvEJxQNz14V1satrUr/ucmD+RRbMXpbVtOBzmlVde4YwzzgBgy5YtPPbYY8yZM4e9e/eyaNEiVq9eTV5eHgsWLOCFF17g/PPPZ9GiRVx//fXMnj2bSZMmsWDBAu67774+lZeH5BLzRwrKEukjQctiw7CuInYbyp2lUByhdHV1MW3aNGbNmsXIkSO5+mrt6RSjRo1izpw5AHz00UecdNJJFBUV4XA4uOyyy3j33XcBuOaaa2hra+PBBx9MqlFlsL/y8pBcYv5IQVkifSTQYyl4GxkqsK5QHBTpWgz9jRETScRacr03Ojs7zVhHe3s7WVlZfS4vD6lLzA92lCXSRwx3lvZ4XK3NLgQuhw23w6YsEYXiC8js2bNZunQpDQ0NRCIRnn76aebPnw/AokWLuOyyy7j99tvNJxn2pbw89F5ifrCjLJE+YgTWtRXrmorYdCnO8jjUOhGF4gtIWVkZv/zlLzn55JORUnLWWWdx3nnnsXTpUj766COWL1+O3W7nueee45FHHuGqq67ihRde4Oabb+buu++mqKiIjIwMs7w8xErMd3Z2UlxcnFRi/khBiUgf6dESsWm1stQzRRSKI4/29vaktoqKCtavXx/Xdumll3LppZfGtc2fP58VK1aYf//zn/80X5eVlZmPyk3kpJNOMh9GdaSj3Fl9JG6xoYytEwH0cvAqO0uhUAwdlIj0ETPFV8aXggfwOOzm/xUKhWIooESkjwRD1hTfWO0sAJfDRrcSEWrbAuo8KNLmSE1t/SJyIN+FEpE+YrU0QnpQxLBEXA4b3ZGhPXiGIlG+/JulPP3hrsPdFcURgMfjobGxUQnJIEBKSWNjIx6Pp0/vU4H1PhK0PC/EmG077JqIOO3KEvEHwviDYfa1Bg53VxRHAOXl5VRXV1NfX3+4u6JAE/Xy8vI+vUeJSB8JWETCEAz7fiyROn+ADXvaGJbrZUJJ5hf6yYdtXVpiQWe3ylJT7B+n08no0aMPdzcUB4ESkT4SDEUQAqTEFAybHhNxp7BEfvnyJv65Zg8AD14+kzOmlB66Dh9i2vTstI6gesKjQjEUUDGRPhIIR/E57UAsPuLYT2C9pSvE8FwvAFWNHYeop4eHti7NAlGWiEIxNFAi0gfCEe1phj69TpYhGDZb7+6sQChCWY4Hr9NOvT/Yr32qbQuwp6WrX/d5MJiWSLeyRA41UkpeXV9jJnwoFIcCJSJ9wLA8MhNExIiJpAqsB0IRPE47xdlu6vpZRH78z3V8/9nk4nGtnaE+WT3hSJRPdrccdH9ajZiIKv9yyNm4r41v/3U1SzbV9bqdPxAyH6iWisWf7OWDbY392b0jllfW7eOGJ1cf7m4MWpSI9AFjtbpXd2d1R7S/retEepoFBkJRPE4bRZnufrdEdjd3UtuWnAn12zc2c8VDH6a9n5fX13De/cvZm2DVRKOSV9btMxdW7g8jsK4skUOPMUFp7uxOuU04EmX+r97Zbwr23a9u4qFlO/q1fwdDa1eI2f/zJiu3H3phe29rAy+vq9mv8B4M6/e0muPLkYYSkT5gWCIZbl1EDEvEEBG7jVBEW8n+o39+yo1Pfay/L4Lbaacoy019+4GJyLuf17N+T3KtnTp/0Jz9W6lu7qKmB3Fp6ujmvS3J6ZQ1rZp4NCT0b+WOJq5/8mNW7mhKq5+GO0vFRAaW1s7k77y5QxOP3p6u2dTZTVNH936t1OaObvO7HAzsauykzh/ks31tSf/bVt9O1wBOWozzerAVuv2BEFc98iG7m+Kr+bZ2hjjv/uX8fXV1inemT70/SM0hTq9XItIHjJlCRgp3lsuhnc7uSJTt9R3sqO/Q3xfF47BTnOWmroeBPR1+/Pw67n1rS1xbMByhpTNEa1coyVJo6OimOxxNmt38dUUV33zko6T2pg5twDAC4waG6DV1xM9uW7tCzL37bVZXxYuL8X6VnTVwrNrZxIw732BXY/xg1JSOiOjbJH7PVgKhCB3dEdOqHAw0dGjXYUtCn7rDUc6+bxmPf7BzwD7bsOwOti7exr1tLNlcz6qEe6bOHyASlQc8Nlj56Qvr+O4zaw56P31BiUgfMC0Rly4iiSm+FhEJhqN06QN1MBzR3FlZbtoC4T6brdGopKY1QGOCldDQrl3cUQntCTP/Bt21kWil1LZpF2xie4t+oyS2G7Owlq54EdnZ0MHupi4+rY63jpQlAp/sbmHj3uQZc3+xpa6dSFSyu/kARES/ZnqzMmKD5sB9h9vr2/nW46vSvhdSXc8N7UG6QpEBXdzanGKCtWJ7I+f+fln6x6Cf+5YEK7Kxo+d20ASmL+PFrqYuZYkMZowv0+eKubMMVxZYLBHdAjBMbC0mormzINlltD8aO7oJR2WSNWCNr1jdG1JKGjt6vukaU1zIxsCROLgYn5m4n3rT/x7fHltsGEk7jnKk8sq6fT2KxU9fWM+d/944YJ9rDKipvsPeZszGgNWbiBjXSE9u0v5i6ef1vL6xlu316SV/GP1OdOMZ91JvcaB02dvSxeYaf1J7U4rzumpnE59Wt1LXlt79XO/XBveU92QP5/uc3y3jgXe2JbW/un4fH+1MdjE3tAfNCeGhQolIH4jFRAxLRMaJiNMeE5FuiyUSCEVwO2wUZ2k1afqaoWXMLBoTRMRq/loHlM7uiPkY38QLtsmc9cTvKzbb6nlgShIR4+ZN6FObZfba1csM6uZn1/K7BPfcoaDeH+Tyv6ykzn/ws7VFz33Kn9/bntS+rzXQ54lCXzD2neq7PVh3lvGdtwfDhAcoXdiI16U7+KeyRFJNZg6EX7yyKSkLS0qZMtZk3I+JxxCJSpZ+Xp9UDyyVJdJkuOoS9tMRDFPbFmRXU/ITEe946bMkcYnqE822QHhAkwASUSLSB5ID6xEzHgJaYB20IoSBUISuUIRwJEo4KuMskb5maBk3nD8QJhiODczWIL315jJmNtDDzC2Fb3l/lkiiuBg3dVPChW/drqMXl9Z7W+r5sIeZVF95df0+zv39srRvmtVVzSzb2sDaXfHpzB3BMBc9+D4b9qb3oCB/IERbIJz0XYYjURo7gnHfQTr72lqXPANOhTkYdfU8EfAHD84SsVq8BxtMbmwP9phyXKtPjBKta6BH4Wrcj0Xc2ofZ999W7ebFtXuS2nc3dSa5gvzBsFmtO9W9kSgiSzbVsfDhD9mQYKXWp3TJ9XxsxmQzcUISjUpq25Ld282d3eZ9cCifa6REpA/E3FmaJRLsxZ0V1K2RTv09RkwEDkBEWmNpt8ZAAcSZ0dYBJZW4gMVVkcqdlTBD3Z8lkjh7aguEzPPQmRBcf29LPTsaOvSBttu8eZ5fU83L6/ZxIKzYrrkUDPddT3SHo7ywZg9SStMCScyS21Tj56OdzT2ujVi7uyVpADF88Ik3eGNHN1Jq5y3RnXfnSxu5/C8rk/b/h3e28dU/vJ92JVuj74nfYVMasQxj1ttb0Nw6sCdeD9Go7JOb8rEPqviPxz5KEqN9KURkS62fST97LcmtZJznxMnPgVgiDy7dxqPv70xqr2kN0NEdicv0slraiee1KUUsw1j8mxin2Z8Fmbgfw9OQeI4M93ZDwkTF+ndP8ZWBQolIH4gF1jVLJBSJYtEQc/AMWrKiWvRB3+O0U5DhQoi+u7OsF6N1sKxvD5qfH2+J9CwioUjU/NsqOlJK8yZMDqzr26fwRTd1xLe3dYUpy9HcdomWyHefXsPv3tpCU6c20BoD2gPvbDPXJLz06V7+vmp3D2ehZ4w1Mr35pV/bUMN/PruWNbtbzJlmgz/+BqzWg9SJ301HMMyFD7zPI8t2xrUb62kSJwRGP6IyecBbs7uFT6uTF3Ruq2vHHwj36mKy0i/urEA4pWhZB63Ez1j03Kd8+6/pL7zb1diBlCTN8GtTDJCfVLfSHYmyqSZ+Fp9qtp5qMpOKaFRS3dRlWkIG4UjU3Jd1YtAUJyI9T8gSLRHjmki8NhpS9LUxhTvLuBYTz1HMvR2M+w6t/e4pvjJQKBHpA6YlYknxddhjp9CVkJ0FsQvM47DjsNsoyHAdsDsL4l1VdW1BKgoygPhBvjHFINCcot3qQ02ccae0RIwZoGWfRhyoNFsTkU7LjC4QitDcGWJva5f53qaObt06iOW2P7RsB398NznOUNsWMNfJNLYHWauvrjcEtrdzurNBC97ubuqkVh/k69vjB5Hq5i7zc+Le29hBOCrZ0RD/HG5zJt3ZHed+scZamhKsoz3NXbQFwklrGozPTuxTKnoKrEei0hyEeg2s69dPJCrjvh8rcZZIwr4+qW4xM/Kqmzt7XLdhpafzKqU0z1/iAGz4/xNFp9EinNaB0/je2wLpxW9q/QG6I1Hq/ME4i6qhPeYKslqp1nPclqYlYlwDiVaq0dfEAd4QSH8wPpZhbN/Y3h13zPt0z0QgFI37DuNE5BAG15WI9IGkFN9w1HwgFcRiIoHuiOlHNW4St1P7X+EBrFqvaQ2Q63MC8Td4fXuQ8nwfboctzj1hDDIepy1u8E9l7lovOOt+pJT7zc7SrIp4P6xpiVhcGMYgUtMaMN8bimgmeUtniDp/ACm1VGZjlvj3Vbv5zeubAfjt65+z8GFtBf79S7Zx6Z9WENF9w0BSoDwUibK6qhmIDUzVzV3m9qkskUQRqdLXYuxujl/Jb1giUsbHhayWjFXwQ5EotXofEz/DcH8kWlNLP6/n1N+8Eyc6wXDEHMys1mRbV4io1L7zxMHOSm8CYd3GuKwTr4c9zV3U+QOEIlH+9+XPuH4/VokhIlZrurUrZN5LibNsYyGedeIUjUoaO7px6WWFjKQRiJ889Hbcsf1r/Qnr+zSwfl6Df/+WiPXeSGVBWPsmZcz9lOSG1PcjZfxnGPvpjkTj3IG1KSaVcdmayhIZnIT0C9/jjMU+LIaIaYlYL2ZjsPbopVKKsz1mql+61LQFmDwsG4i3MurbAhRlusn1OZMskSyPg4IMd9wgYL0hWnpo9zrtSdlVxs3eUzBQCOKy0Iz3lukVi62zJMMC2NcaiBtoP6/VfN+hiKS+PUidP4g/GMYfCPH8mj08uVIrz7GzsYPGjm5aOru1FcqhCNXNnT3esAAvrt3LBQ+8z7b69h5FJDEmEpsxx7fv0K2Y6oQ1GXtbrINOvHVoYD3fNa0BjMmkdcBqC4TMc1uvp2e+v60BgPe3NbCtvoNt9e18Wt3C2b97j50NsX5Yv3NDyEbm+/SYXGorw5iQpHKfNXV0m1WnrULT0hmioztCVGoD2fb6DnY1ddIdjvL4Bzu55rFVcfsJhiOmcFrjelZBSbREjJX0NQmiE4lKRhX4zL8N6tuDZhXtxH1d+MD73PtmfAagNdPJOhjXxLmMY/sx9mms8TJoD4bNdWKJ8RjjGrBaBm0BbXtjYme1LBrbg6aL3Pqdxlu1sT5Zz1+Dxdq1ThKViAxSDFPTY9bOiuKwWdxZuqJYZxOmO0t/T1/rZxmz8wklWThswnSRSKkNusXZbnK8zrhZaUN7kKJMrT0uVqK/N8frjJsNGRfuqAJfj6IzLMeD3+Ly6uwO0x4MMyrfF7ed8d6eLBFj4AyGo2yti7mGrO6QDXvbzM+obQuwu7mTpo5uOoJhc7a+s7HTHGhWVzWb29f5g7y/tYHz9MVfm/T9rt/TahGRTrMfia4GYwZc2xaIu8ENV1htWzBu0dfeli6c+hMtrfuq8wfMOJV1MKq2WDLWwWuPpb2uLcgjy3dy+V9W0toVMlekb2/o4J3N9azf08brG2oAyPU5474rw604Ml9zb/YUF4lGJc2d3Ywu1LZJZYk0d3abblLr9WOtFr23JcDupk6iUmt/87M63vyslvZgmOaObj6tbmFfS0w4rQOf8R0UZLiSYmq7dEshzoWrX7djipL7VO8PmscTZ1EHQqyqamb51oaE/acSkdixNfiD1PuDbNzbRlNHNw6bYFiuN+WELFG8eprYGK/HFmUSjkqztlw4EqWlK8SYokztGBKOzexTe/yExDw37fH3fWGmWz8XSkQGJYaLyhpAt/VgiVhvYGOW4tH/l+dz9ino5Q+G6eyOMCzHS16GK26xYCgiNUvE60pK8S3IdCWJiHEhji3KiBMd4yYYVeBLiKForyuMQUf/nzHznlCSFbed8d6eYiLWQOY6yyr3z/bFsnCsVYR3N3exT5/tVzXGUi+31rWbA/IKSzG+urYgSzbX8Ul1K5/ta2NrvSZUa3a1mAPSVj2AbRPxN2g0KtnT0oXbYaOzOxLnOthpqTFlLU65r7WLiaWadWjdV51lULMONNYBOJWI1LcH2d7QQVRqK7oNV9qO+g626MK7ZLOWLjuuKDPuOjIEq0KfrSeKyFuf1VLd3EVUwuiC+O/T4OFlO1i2pYGmjm5G5Hux20SctQMxQr0AACAASURBVGIVwnV7Ws2BsKqxg216/zbX+Pm/Nz/nwgc/MK3MxGM2roWjy7LjYmodwbApyNbrpd5vXLf6QKtfrx36vTG+xGiPHc8m/braXOtHSsk/VlfzzuY6djd1mp4Eq1DVtAVx2W1keRw0tAf51Wub+PqfPqDeHyQvw0W2xxF3To3z7bAJWjpDfFrdwhn/9y5NHd2m6FknF8brccXxx9DcGUJK7Z60tmvHHTQnZE0JrrdhentjwmeU5XjIcNkHpyUihPANZEeOBKL6tMplWVQYt07EdGclxxvcuiWS43XS2R1J+1nsxuBZkuOhIMNlXryGO6Y42022N96d1dAepCCjB0tEN/1H5vviXSEdhohkxGWWxcQlfgZofLYpInp+epsZE9FcIdbsLOsgsn5Pq7nq35qFYxWRT3a3mKK9elez+fr9bQ3m6xXbtXUm2R4Hdf4A2/TVzxv3tZnWzhsba5FSc0cYs+FxxZl0dkdMS6nOHyQUkVSW5yb1dUdDpykKu5u7eHtTLbubOtnbGmBqeY55voPhCKFIlLq2AMNyvWR5HHE3uCEWboeNmtYgDy/bwZUPf2i6yTJc2rNmdumitbUu5obb0dDOFn1ANhIKxhZpx2C4rZo7YhMBiLeG97V2cfVjq7hDX0XfkyUSiUp++eomfvvGZpo7Q+TrA6d1G6sQWgV8c43f/N+mmjY+3NlMdzjK4k/2AjChJNM89+FIlH2tAYSAo0qz4mJqRhmXEfle6vxBGtuDfP2PH5juPUNEzOvQbwzMxnVoERH9umrtClHTFuDnizdw+782UtXYwdThOdiEJlSvbajhnx9XU9PaRXG2m6IsNw3t3Xy2z48/EGb51gbyfS6yvc64c2qUjxlV4KOlq5vlWxvZVOPnTf168zhtPVoi4xKOwbj3xiS0g3ZdTizVjs16LdW0Bpg0TLv2rNauZom4yPW5BpclIoQ4QQixEdik/10phPjDgPdsEBKOaBe7ESTvjkTNullgdWf1YIno7zH80enOFIybrzTbQ0Gmy7zojEVx5Xm+JNdGY0fPlkhTRzd5GS7yMlxJ7iy7TZh+cKP/hoiMKUwQEf2GmKBf4LuaOpl15xs8uFRbQVuY5cJuE3HrRGraAqZw+INhxusCtKU25tr6xGKhGEFxiB+w3v1cq0BsEzHXxDHludT5g6ZwfFzVYg5qxu8TxhaY+5gyLDb4Q2zwmlmRB8TiIv5AiIb2IHPHFwKa+F37+Gque2I13eEo44sz8ThtNLQHufRPK7jluXXU+YMUZ3kozHTH3eB7WjopynIzPNdLbVuAV9bv493P61n6eT0ep40JpVnU+QNU6cf00c4m0yLaUtdulgcxknfGFsd/J2ZMpCDZnWW4DN/WF/3FLMvYNlWNHXSHo3y8q4VIVJKnD5zW62pPcxdep51cn5MPLVWd39kcqwq9emczm/UB/PWNtdhtgmkjcqlpDbD4k71Mv+MNPtrZRGGmm+Ist7aWSrdoDPfd7IoCwlHJvz7Zy8odTfxx6Xb9mBNExJzMxM/urccM8NIn+/AHw2xv6GDdnlZGF2ZQmOmmti3Ir1/bzK0vbqCqqZOyHA+FGW7q/AHzWtqrJ7Vogho7X8Z9OLYok5aO2LN7lurX58TSbDr0icqSzXXmxMSwRIz7zxCHmJWltXeHozR1dHNUaXws1MhsG1XgI8vtiLd2/N0UZmqTytaEhagDSTqWyD3A6UAjgJTyE2De/t4khPAIIT4UQnwihNgghPi53j5aCLFSCLFVCPGsEMKlt7v1v7fq/6+w7OtHevtmIcTplvYz9LatQohb+nLgB0IkajwOVzttUsYejQtWd1ayJeJxaANottcQkfS+5H36IFiW4yE/w21edE+v3E1FgY9jhueQ6425yMKRKM2d3RRkusnxJbuzCjJc5Hpd+INh89knTZ3d5HqdsYBrIBSXfWIMOi16gDPx5l2yqY7mzhDr92g3bo7Xic9lj7NE6tqCTB6WbS7OHJ7rIcvjoDsSpSDDRX6GJpBuh438DBcfW0RkpW5xFOuzRIDKEbn6dyGYWJpFXVvQFIPXN9YgJRyriwLAiWMLzdeTh8dEREppWgOzRmnb72sN8PgHO3n3c20GPHt0Pk674KmVu4hEJRv1AWpYrpfCTDdb6tr5eFcL//pkL/V+LU5lHI/BnpYuhud6Kcn2sLe1y1zNvGRzPcNzvRRnudlW12EOIkv0gXl4rpcNe9vojmiiBZrVUqpbe8Yg39zRjddpp0j3iVuvQcNlaMSPRhcmu7OsrieAgkwX2R5nQkykk+F5XobleM32UQU+s4ZTfoaLl9fvIyoxM6nKcjyU5/lo7Ohm8do9+ANh3t/WSGm2h7wMFxAbkI1JwezR2vfw2oZaQJ+siZirLnEyM7owA5uId2dt3Oc3r8+n9GenCKElcIzM91Ga42FzrZ8tde34g2HW7GqhJNtDYZaLDXvb4kr25Ge4yPLEWyLGoD62OBN/MGyKjvGYBSMR5p8fV3PVIx/xwDvbsNuEadUb92ujaYnEp+ob4jAy30eGyx63vqdLf1JqQWbMvW3UyyvMcmvjwWCyRACklImrv9IpKxkETpFSVgLTgDOEEHOAu4B7pJTjgGbgan37q4Fmvf0efTuEEJOAS4DJwBnAH4QQdiGEHbgfOBOYBFyqbztgRKTEYRNxwtFTim+8JRIfWM/1aTdOa1eIFdsbufvVTb1+5u7mTuw2oV00ujtrS62fD3c2censkdhsIs5FZizkK9ItEat7qrEjSEGmyxSLF9fu5bTfLmVbXbvm99UF7p3N9Uy59TU+2NaITWgXMmgz8em3v85j7+9ECO3mFQKWb2tACG2Qd9gEXqedDJcjyRIpy9EGS9BSnY0gYFGW22wvy/FQluMx/e0j833mDWVYEz6XnWMr8gHtM0tzPHRHokip/W2c/3Mqh5nbHzMix+zLFP0GX13VzKw73+QPSzQLasZIbfB6df0+fvbiBv7zWa2k9pjCTIbnetnT0kWhfl4BhuV4Kcpym8Hbbr3ETUlWDyLS3MXwPC8l2W7WVbfS2R0x02jL83wUZblNH731Mcrzjyoy9/HVGcO1c5flNvtgDBaNHd36YKeln1tnzZtq/Ob2xjnzuexxrqrNNe0IoQ2YgG6JxM++DSEcplusJdluxhdnEY5KbAJOO7rETL+9YOZw/di8lOr++3c215vXXkm2dj1DvIhkeRzm7PvDnU2MKczApU8ssj1ObCJZREqyPZoLpyu2BmZzTRsnjiukMNPFjoYOSrLdfGmcNpEYke+jOMtjugYNynI0C9KwjAxXkhETCYSiphu6qSOI12k3Yxbrdc+Acb4m6dfYy+tqzO+nMNNlnl/jGIxJYXGWmyy3wzwGIzhfnOUmP9Nlbme6t7M9FGS64wqthiKSQj1bs7UrxBMrqvjvF9Yz0KQjIruFECcAUgjhFEL8EPhsf2+SGoavwqn/SOAU4B96+2PA+frr8/S/0f9/qhBC6O3PSCmDUsodwFZgtv6zVUq5XUrZDTyjbztghKMSm03ElTrpqexJnIh0xLuzrDf/i2v38od3tvX6TOyqxk6G53rNhYr+QJi/vLcDp11wwcxyIN5FZrgZJpRkmaLQZvG/FmS4ze2fWlnFlrp2Vu5oIs/nJNtjiMseOrojvL6xllyfizx9+8Vr99IW0GZd+T4XboedXK+TQEibJT/8zWP5+XmTEULgc8csESm19RylOR5zQCnKdJs3VHG2hxI9GF+a4zED8yXZbjPgmJ/h4ugy7cYcVZBhZg+V5njMcjIAZx1TBmjurjOnaK9H5vtMV12Gy27OxB97v0oT5bp2irLc5OmD8Fub6nDZbQh9lK8o9DFCF9IvH13CVSdW4LQLRuRrlkgoInE5bOZMuTg7Jvgvr9vHX97bzt6WAOW5XkpyPGZM5+xjNJErz/OaxTkBThwXs5rmjY+JyPnTdBHJ1GaboF1H7cEwn+xuoTDLbX6H1mtw0742jq3IN623PH1AtrqzPq/zMzLfx6kTiwHMmFqiO6s8z8uwXI95Xo0YzIh8n7n/sUUZLJhcqh+bz/w+w1HJT8+axHGj8zl+bEHMEtHjIpv2+RlV4DMH5khUMm9CEd+eN4Z544uw2URc/G97fTt2myDPp02MDNfxzsYOAqEoR5dlM16Pl8walc+5+qRibFEmpTnaNeOwCbO9JNtjTmwArjh+FAD5Ps0SAXhhzR4u/uMH7GsNkJ/hMieFgVDUdNcC5rW6ckcj44ozKcpyU5rt6VH8bUKbXGZbsiYNgdSs2phrdK/FM1GgJ9r86J/ruOjBD/RrQzsXLV0hnlxRxRMrqthWH79Qtr9xpLHNt4F7geHAHuB14MZ0dq5bC6uBcWhWwzagRUppXL3V+n7Rf+8GkFKGhRCtQIHevsKyW+t7die0H5eiH98CvgUwcuTIdLreI5GIZonsT0TaeknxzfXGBnxjhl3nD9LVHeHtTbVcO3eMOXiBlnpq3Kj5mdoF++yq3Vw6e4R5wVtdZP/+dB9FWW5mVeRTayn41hWK0OAPkp8Rm0mv2d2CEJpbLtfnIserXQ7WZ4Tk+Zzm/jfX+inN9jBleA5h3bWXl+GiuTPEjJF5TBmewxTdVZThcpgzOmNxWXGWm7IcD2vQrA9jJlqU6TbX25TleM2bcUSej/I87djL87ymK6CiwGcO2KU5HnMAFgLOmlrGI8t3MiJfm92PKcxgbHEmWR7NXZfv02aDQmgz68ryHC6fMwojqbck24O/rp35RxVx8awRfLijEZ/LYfbj1KNLOGViMV+bXk6uz2V+B9PKc5k3oZBfv/65NnvULZEfP7/OHDCG53nNuJrPZee6eWP41yd7GZHvi7MUTplYzJuf1VKa7WGSPhhpg7eX0YUZDM/1mhOB+vYg1zz2ETsbO/nzlTPN4qCG6yUQirC9oYPTJ5dyTkkZL6/bh9Nu060Mizurxs+EkiwunFnOih2NjCzwaUKjb9MRDNPcGWJ4nte0vkfk+0wrdWxRJkeXaQP2jJF5zBqVh9dpZ3xxpikKAPMnFHGhPvkx0qeb2rv55aub+HBnEz84bQKFmW7sNkEkKpk2Ipfzpw8332/E+T6v9fPUh7s455gy7Dahu3C6CUW0OmkAk8qy2VjaxgfbG5lVkccFM8oZme9jyvAcSjdrfZo8LJuLZ41gsf49GO6h0mwPp00q4bbFGxie5zWfFfTEiirW7WnFZbdxVGmWeT8DnDyxmH9/uo88n5NhursxKmHu+EIunzOKSFTicdpwOWy0dHWzfk8rL67dS2m2RzsGi/vZWCNSnOWhMMNFTVuAFdsbWfTcp3iddsYUZVKQ6WbZ1gY+r/Wbj+weXZjBxn1aanKD6frexU/PHjgnzX5FRErZAFx2IDuXUkaAaUKIXOB5YOKB7OdgkVL+CfgTwKxZsw64RnJEaqXfHSlExGETCBE/CzQGUuMiNG7+ls6YiNToWSJ/enc7F84cwZ7mLv703nZ+e3Elu5o6OXOqNqM2LsyLZpZz+3lTzM8wZkP7WgMs2VzHxbNGYNfdXAA//9dGlukul4oCn7m9lPDt+WN54oOdlOV4zFksaAPZ0s/ryc9w4XHa8ThtBEJRZo/O595Lppnb5flcQAfTR+bGnSufy25mPxluGs3K0I6hKMtNQaZhibhx6uexVE9RBG3gHJGvbT8810tFoTZgVRRmMEq3JkqyPRRnu83tpwzPwWETZhbMw988Fp8+sI4qyCDb48Bht5Hv0yyFBZNLuWjWCLPfpdketta1c9bUMk6bVMJpk0oAmDYihzc2ai4Ru00wUhexIv0Yjh2dx5UnVCCEFkheu7tFL0US4uuzRvDSp3uZNiLXTJOdMiyHycOyufeSacwdX2TGgAoz3RyjZ32NLPAxPM+Ly24z4yGP6MfjtmvH9OTKKtbvaePXF1VyysQS89wb1+BW/QFWE8uyOPuYYZynWzNWgQiGI+xo6GDB5BKOG1PAe//vFG0bb8xaMeJNw3O95iRnZL7PPA9jizKYWJrNuOJMzphSSpbHyes3z6Moy21a2hNLs+KsRsMSeWJFFWt3t3D5nJHcdMo4hBAUZWruvWkj4q+rXK+T5s5ubnnuUzLdDv5bHxzzfC6qmjo553fL2FTjZ3ZFPkeVZpnCdmxFPjab4LgxBfo1p4nIjFF5nDiugCevOY7jRuebyQfjSzIpzvLw1vdPojTHYyZ0rNNL73RHouRnuPTrX+PMKaX8+9N9FGd5zGsbNCvICJwbx7CuupVHlu8k3+cy7yfDggiGI/x9VbW2YFh3gX24o4mrHvmIslwPj141m/wMF4WZLnN8WfydL+F12hmW62X51kYz/jUsx8M/Pq7mh6cfZU5k+5v9iogQ4hEgaeCVUv5Huh8ipWwRQiwBjgdyhRAO3RopR7Nu0H+PAKqFEA4gBy2Yb7QbWN+Tqn1AiEQ1EYmzRCxWgxACp92WlH/vdsRcI4ZZHGeJtAXMFNCdjR28ubGWf32yl4tnldPcGTIX9c2bUMTzN5zAtBG5cdaKMaP/35c3EQhF+YouOoaILNvawIJJJdx0yjimDs8x1x8AnH1MGZccO4I8n8vMOgM4t3IYsyryyPW6zH0FQkGOHZ0f99nGTTR9ZCyIDdozV2rbAnzjzytMP3JptseclWqWiDagFGe5zWexlOV4zCrJ5Xk+RugWwHB9Fj57dD7zJxRRlu1h3oQi5o0vMuMpY4sy8TjtfHv+WDP91kgKAPjNRceYSRFG9tTpk0vi+l2a48Flt3Hq0cVx7V8/diQXzCiPq5VmHAdog1S2x8mNJ4/TvhN9EBlTlMEvL5jKLy+YihCCkG6JTC3PQQhhDuqGEI4q8JlB1pH5Puw2wbXzRjN1eG7c8USjEiFg/Z42yvO8XDAjNlvP8jhMS2STXg3XWNNikO11Uu8Psn5PKzWtAcJRaaZsm9t4HHSFItT7g/y/f3yK0y44pjzXXPA6Mt/HuKJMhND273XZefP78833Gy5Aj9POsBwPCyaVJO3fYROs3d3C5GHZ3H7uFPPaKsnxEAhHTCvc2u9lWxuQEv7v69Mo0C3BHJ+TrZs0t81vL67kq9OHI4Tg/OnDKc3xmhaygTEhmzVKu54NF6KxP8MNZoikEWsC3cLZ16YlqfhiE68vjSsky+3QJkV2G3m6i23mqPh7I8fr5P1tjbjsNp6/8QQzJT7X62JjSxu3Ld7I2t0tPHDZDJx2G/mZWiKMz2XniauPM12zxn0/fWRuvEjpfbIJuO3cyXzridW8ur4mzqLrT9JxZ71kee0Bvgrs3d+bhBBFQEgXEC9wGlqwfAlwIVoMYyHwov6WxfrfH+j/f1tKKYUQi4GnhBC/BYYB44EPAQGMF0KMRhOPS4BvpHE8B0w4muzOsqb4ArjtNvz6DNxwFVlnAHabINvj0EREX0RV0xagWvd1VjV2mOmcL6zRTrPhMrDbRNJgDZpJft28MTy0bAcl2W4z6GyIiNth4+fnTY5drPpFlul2cHRZdpJLrjscZcbIPM4viHcj1LYFma3v26BEXzE/znIRA3hddj7b12ampGrbejh2dD7jizOpKMwwYyJFWW7cevZaabbHfOhXeZ7XdCNpLgU7f7vueHN/j//HbECLuRRluTlGHyh+ePpRSecIYusJjP2Fo9G4mw/gxpPHcW7lMFPsrSQKCMD8CcVcOLOVOWMK4toNN9dlx42KE93RhRn4XHa+NL4wbntDjEbl+/C5HPzgtAkcrycS/NfpyQa8zSbM7KlzKofFfUaWRxu87ntrC8+v2RMXrzHI9jj4aGcTX/3DclPYjipNEBH9+vn6nz6guqmLBy+fyejCDEbm+/jpWUdz+uRSMtwO/nXTl8wgdCpe//58c8GtgRCCPL0g6U++cnTcvXT+tGG0dYXjjgu061BKLWnivGnDzHZjMvPlo0v42oxys93tsDN/QhGJzBmTz90XHsOChEnEiDxtkWXliHjRsV4Pd5w/meue+JjRhRmmNZXnc5Lrc/GteWPMxIOiLDc+l8OMAxoY99/XZgw370nQhHBHQwc7Gjq4bv4Y0wNRqE+2frjgKFNAICZ4F1iOF2Iu86PLsvny0SXc/OUJSRZdf5KOO+s5699CiKeBZWnsuwx4TI+L2IC/SSlf0tecPCOEuBNYAzykb/8Q8IQQYivQhCYKSCk3CCH+BmwEwsCNupsMIcRNwGuAHXhYSrkhjX4dMNH9WCKgDcKGiOToQUCPM/7myfE52dvSZaYR1rQF2KO7C3Y2dLJdrxj76nrtGRvGjC4VQgh+9JWjuXzOKMLR2NMWCzNduOw2rv7S6LiLNcvjRAiYVZEXdyyA7tKSphvJ7LOeAmy4VQy+9+XxXD5nVJKYZrjsRKXmWrn7wmNYs6uF4bleRuT7eEOfrZrurCwPE0oyObdyGLN1S+eEsQWcMLaQslwPC48fZQZqUx3/y9+dS7Y3nTmRxv9+dSqhSDRpkBpdmGEG3tNhZIGPX19UmdR+3OgC/vvsSXxjdnwMLj/Dxae3LkgSpMJMN1keh5nV851Tx+/3sw0funUwBW3W/PamOt7YWEvliFy+eUJF0udpi+fCZLkdXDevgr2tXUkTAWMSUt3cxR+vmMnJetDdbhNcM3eMuV3iLL8nMt09fzdjizKYMTKXE8bFi+pVJ47ucfujy7LZuLeNO8+fEvfdled5cTls/OSso/fbF9AmBBfPGpHUXpzt4a3vzzcnbgbGtZXldjBtRB5Lfjgfr9NuureNeJ31e7t41gjTwraS49VictfOGxPXPm1ELsu2NPDD04/iHD1BBODsyjKiUrLwhIq47eeOL+TqL43mqwkWRo4uUrNG5WGzCb735f1fSwdD+nddjPFA8f42klJ+CkzvoX07WmZVYnsAuCjFvv4H+J8e2l8GXt5/l/uHcFRiFyKuXpbDniwiBnn6ylFjlm2Q63XFZUzsauw01z9sb+hgp+5uMtNcC3oXEYNEscnyOHnz+/Mpz4sXBLtN8LXp5aa/38qwXA8j831Jg+uFM8u1kiEJYlGc5YnLLDIwXFKnTy7l7GOGmZlIVk6aUMy1c0dTOSIHt8POfZfGLpenrp1jvv65Jf6TCquvPR0SZ4f9jcuhiXdP9GTROO023v7BSXHukf1RmOnG47AnuaqyPE4iUcl504Zx7yVJtyCAGf/63pfHxwmClfHFWZTlePjF16Zy0lH7veUPiCeu7jEXJiU3njyO6+ePTboOL58ziq9MLTOz/A6Gih4mEYYlMmOUNvGyWibFWe4kixZIeV4vnFnOjFG5Se+5eNaIHoWtLMfLdfPHJrXn+lxmTMiK4apLFOaBIp2YiB8tJiL03zXAogHu16AkEpXY7SKuXpatB0vEwHAHJFkiXmfcitqPd8UW1n2wrYHucJSJpVlsqvHHpd4eCKkE6DcXJ8+eAf6ycFaS6IEWE+gLRpbQudOSxcMgx+fkJ2cN6NKeI4q+CuFdF0xNuv5AW8hZluPh9nNTi+/JE4upbQtw5fEVKbeZNCybD350ap/61Fd6mqnvj0QBMfbTHwKSiiy3g6IsN18+OllM/7xwVlxq8P44Y0pqq7o/qCjMYMkPT0pyYQ4U6bizend2DiEiUYnDZouzRBLdQcZN4XbY8OmxkMSsiByf01wrUFHgMy2P8cWZZqG9C2eWc+e/P0syqweanqyKA+HYinzmTWgzF3gp+h9rjMfKredM5pYzo3Fpw4nMHJWXFPBVpMZmE7x/yylxmZkGk4ft3513qOmLS/ZgSSkiQogZvb1RSvlx/3dncBPRV+Zar6MkS0QXEY/TjldPVfUkzOytN/fkYTmmiJw4rtAUkbOPGcbdr202ayEdaZx0VPGAuUAUvaOlZA9MOudQ5kCspqFAb5bIb3r5n7HyfEhhWCJCCHMxVOLMxHBnuR02cwGQO8GdZV2gNGlYNv9etw+7TXDc6HwefX8nuT4nJdlufnNRpZnuqVAoFIORlCIipTz5UHbkSMCa+WSISKI7yxAR62wwyZ2li0iuz2kGvUuzPWaV0jGFGQghzNpPCoVCMVhJKztLCDEFrcih6TCXUj4+UJ0arESiUVM0HDZBNz2sE7FaIq6YoFgxMnAKM91mMHB4nteMf4zpIdNDoVAoBiPpZGfdCpyEJiIvo1XNXQYMPRGRsUC6sT4kyZ1lBNadMXdW4iIrwxIpzHSZIlKe68XjtPNfpx9lLjJTKBSKwU46lsiFQCWwRkp5lRCiBPjrwHZrcBKJRk3RsOvrQxID60bwzeOwx0QkyZ2lLbIrzNQqe1oXKxllMxQKheJIIB0R6ZJSRoUQYSFENlBHfM2qIUM4Ik33lSkmCQkbZmDdacOjZ2e5U1oibrwuO89eNydluqZCoVAMZtIRkVV6Fd4/o5V1b0erbzXkiEppWhqGBZIysO6wp1wnYsREjMVlM0fF16NSKBSKI4V0FhveoL98UAjxKpCtlzQZcoSjEo8z0RJJkeLrtMXWiSSk+JZke7hoZjmnTFTrKBQKxZFNb4sNNwJPAU9LKbcBSCl3HqJ+DUqsKb1GTCSpAKO5Yj11iq/dJvhVD0X7FAqF4kijtyWYlwIZwOtCiA+FEDcLIYb0wgXr4kJDPBJTfGPrRKyLDdXqYYVC8cUkpYhIKT+RUv5ISjkW+C4wElghhFgihLj2kPVwEBFJWGwIvVsisbInqlyCQqH4YpLW6CalXCGlvBm4EsgFfj+gvRqkWFesG0UY7SlKwVvLnqg6RgqF4otKOosNj0VzbV0A7AD+CPx9gPs1KNEeSqVnZ6WyRMzAup2yXC8Om9jvQ6UUCoXiSKW3wPr/Al9He8rgM8CJUsrqQ9WxwUjYEhNJmZ1lKQU/PNfLuttON91aCoVC8UWjN0skAJwhpdxyqDoz2NFKwceLR6qHUhkuLCUgCoXii0xvVXxvP5QdORKI9GCJpKydpYLpCoViCKBGuj4Q1h+PC7GYSKoUXyUiCoViKKBGbqDpBwAAFvtJREFUuj4QiUaTqvf29jwRhUKh+KKT7vNEhgOjrNtLKd8dqE4NVnpaJ6LcWQqFYiiTTorvXWhZWhuBiN4sASUipA6sq1XqCoViKJCOJXI+cJSUMjjQnRnspJPiW1GQQUGGi9EF6tnoCoXii086IrIdcAJDXkSisgdLJEFERhb4WP3fpx3yvikUCsXhIB0R6QTWCiHewiIkUsrvDlivBinhNGIiCoVCMZRIR0QW6z9DmmhUIq3PWDdqZwklIgqFYuiSzkOpHhNCuIAJetNmKWVoYLs1+IhICSQvMkx0ZykUCsVQIp3srJOAx4CdgABGCCEWDrUU30hUExFbQlZW4jPWFQqFYiiRjjvrN8ACKeVmACHEBOBpYOZAdmywEY72bIkYbi2FQqEYiqQzAjoNAQGQUn6Olq01pDAsEXvCc0RUTEShUAxl0rFEVgkh/gL8Vf/7MmDVwHVpcGKKiK4ZduXOUigUirRE5HrgRrRH5AK8B/xhwHo0SAlHowDYddVItWJdoVAohhLpZGcFgd/qP0MWXUOSS8HblYgoFIqhS29PNvyblPJiIcQ6tFpZcUgpjxnQng0yTEtkPw+lUigUiqFEb5bI9/TfZx+Kjgx2DEskccV6Yu0shUKhGEqkDAtLKffpL2+QUlZZf4AbDk33Bg+GJWK4r8wUX2WJKBSKIUw6uUU9VRM8s787MtgxFxuK+JXqyhJRKBRDmZQiIoS4Xo+HTBRCfGr52QGs29+OhRAjhBBLhBAbhRAbhBDf09vzhRBvCCG26L/z9HYhhLhPCLFV/5wZln0t1LffIoRYaGmfKYRYp7/nPiEGzixIVfZEiYhCoRjK9GaJPAWcA7yo/zZ+ZkopL0tj32HgB1LKScAc4EYhxCTgFuAtKeV44C39b9Csm/H6z7eAB0ATHeBW4DhgNnCrITz6Ntda3ndGGv06IMIRY7Fh/Ep1VTtLoVAMZXqLibRKKXcC9wJNlnhIWAhx3P52LKXcJ6X8WH/tBz4DhgPnodXiQv99vv76POBxqbECyBVClAGnA29IKZuklM3AG8AZ+v+ypZQrpJQSeNyyr34ntmI9fpGhKgWvUCiGMunERB4A2i1/t+ttaSOEqACmAyuBEkvQvgYo0V8PB3Zb3latt/XWXt1De0+f/y0hxCohxKr6+vq+dN3EcGclWSIqsK5QKIYw6YiI0Gf6AEgpo6S30l17sxCZwHPAf0op26z/0/ebtAalv5FS/klKOUtKOauoqOiA9hExCzDa9N8qJqJQKBTpiMh2IcR3hRBO/ed7aI/M3S9CCCeagDwppfyn3lyru6LQf9fp7XuAEZa3l+ttvbWX99A+IBgxEaNor8rOUigUivRE5NvACWgDdDVagPtb+3uTnin1EPCZlNJaMmUxYGRYLUQL3BvtV+pZWnOAVt3t9RqwQAiRpwfUFwCv6f9rE0LM0T/rSsu++p2oVJaIQqFQJJJO7aw64JID2PeJwBXAOiHEWr3tx8Avgb8JIa4GqoCL9f+9DHwF2Ir2XPer9M9vEkLcAXykb3e7lLJJf30D8CjgBV7RfwaEcFJgXS02VCgUinSebFiElkZbYd1eSvkfvb1PSrkM7UmIPXFqD9tLtGrBPe3rYeDhHtpXAVN660d/ETFqZ+niMaksm2PKcyjOdh+Kj1coFIpBSToB8hfRyr+/CUQGtjuDl0hCFd8pw3NYfNOXDmOPFAqF4vCTjoj4pJSLBrwng5xES0ShUCgU6QXWXxJCfGXAezLISYyJKBQKhSI9EfkempB0CSHahBB+IUTbft/1BSNxxbpCoVAo0svOyjoUHRnsxBYbKhFRKBQKg3Sys+b11C6lfLf/uzN4CSeUglcoFApFeoH1/7K89qBV0l0NnDIgPRqkRA1LRD1TXaFQKEzScWedY/1bCDEC+L8B69EgxQysK0tEoVAoTNIJrCdSDRzd3x0Z7KjAukKhUCSTTkzkd8Qq7dqAacDHA9mpwUhiFV+FQqFQpBcTWWV5HQaellIuH6D+DFrMZ6wrDVEoFAqTlCIihBgppdwlpXws1TZDibCyRBQKhSKJ3kbEF4wXQojnDkFfBjVRqWIiCoVCkUhvImIdLccMdEcGO8ZDqZSIKBQKRYzeRESmeD0kMQowKg1RKBSKGL0F1iv1GlkC8FrqZQm0x39kD3jvBhERKXHYBEKtE1EoFAqTlCIipbQfyo4MdsJRaT5XXaFQKBQaKtUoTaJRqYovKhQKRQJKRNIkHJUqqK5QKBQJKBFJk4gSEYVCoUhCiUiaRJQ7S6FQKJJQIpImyhJRKBSKZJSIpEk4KlUZeIVCoUhAiUiaRKMSu3oglUKhUMShRCRNwlGpii8qFApFAmpUTJNIVKqSJwqFQpGAEpE0iShLRKFQKJJQo2KaqMWGCoVCkYwSkTSJRKNKRBQKhSIBJSJpEpHqWSIKhUKRiBKRNIlEo2rFukKhUCSgRCRNwhFVCl6hUCgSUSKSJlGpamcpFApFIkpE0kRlZykUCkUySkTSRBVgVCgUimSUiKSJKgWvUCgUySgRSROt7IkSEYVCobCiRCRNwlGJQ1XxVSgUijgGTESEEA8LIeqEEOstbflCiDeEEFv033l6uxBC3CeE2CqE+FQIMcPynoX69luEEAst7TOFEOv099wnxMCaCdGoxK5qZykUCkUcAzkqPgqckdB2C/CWlHI88Jb+N8CZwHj951vAA6CJDnArcBwwG7jVEB59m2st70v8rH5FeyjVQH6CQqFQHHkMmIhIKd8FmhKazwMe018/BpxvaX9caqwAcoUQZcDpwBtSyiYpZTPwBnCG/r9sKeUKKaUEHrfsa0CIKEtEoVAokjjUo2KJlHKf/roGKNFfDwd2W7ar1tt6a6/uob1HhBDfEkKsEkKsqq+vP6COq+wshUKhSOawTa11C0Ieos/6k5RylpRyVlFR0QHtIxxVZU8UCoUikUMtIrW6Kwr9d53evgcYYdmuXG/rrb28h/YBQxVgVCgUimQOtYgsBowMq4XAi5b2K/UsrTlAq+72eg1YIITI0wPqC4DX9P+1CSHm6FlZV1r2NSCoFesKhUKRjGOgdiyEeBo4CSgUQlSjZVn9EvibEOJqoAq4WN/8ZeArwFagE7gKQErZJIS4A/hI3+52KaURrL8BLQPMC7yi/wwYlRmPU9xYBqun9s8ObQ44+hzwZPfP/hQKheIwMGAiIqW8NMW/Tu1hWwncmGI/DwMP99C+CphyMH3sC5sL13KMfxn868/9t9OWKjj5x/23P4VCoTjEDJiIfNHwegpoGXkmzLq5f3b4j/+Az15SIqJQKI5olIikSZYnm5DTBjkpM4n7xqRz4bUfQ9N2yB/TP/tUKBSKQ4xaPZcmPoePzlBn/+1w4lna703/7r99KhQKxSFGWSJp4nP66Ah19N8O8yqgZCqsfhS6mmNt068AVS1YoVAcISgRSZMMZwb1nQe22j0lMxfCqz+C5feClCAj0NkEX/rP/v0chUKhGCCUiKRJhiODqnBV/+509rXaD2gi8tzV8OZtsGsFZJfBKf8Nvvz+/UyFQqHoR5SIpEm/u7MSEQLOu18Tk/rNsO0t2LYEjr8RvHkw8Wxwegbu8xUKheIAUCKSJj5nPwfWe8LphYse0V7vWgnPXg4v/1D7O3s4jPsyOH0w+asw4v+3d+8xcpVlHMe/z8zsdrtb2C22IgiUUrEEDKEVkSIqjUYuKiA0BkMEoomQKBGNFwxGG4wxgGLUGBuNtWBEiApaQYzgpWqQysUCBSzUFqJNBYpLse1u6cw8/vG+sz07e2Y7c3bnzNT+PpvJnvOe2zPvnD3Pnsu87ym6dyIiHack0qSBngF2lXdR9SoFy+GhtqPeDJ9YDyMvwfNPwJrr4alfw+jLsPY7UOyFQk94PPig14RvwM9dGMaLPWFaoRiG+wahb2hvebEUli/2hrJiL1gRrBBfpgQlIk1REmnSQGkAgJHyCAM9A/lstDQDDjo0vBYsDWW7d8D6n4Xvl1RegW1Pw65tUN4NG++F6p7p274VAKtLLoW68unbXHt0cYBdnai7OTZUd1kMzIWP3j/tq1USaVJ/Tz8AO/fszC+JpJkxKzzVlWbPKOx8AaplqFZCQqm8AqPbw6uyJ0yrxPJqOfwu7w5Phjng1fDC9w6715Unx7uU59LLwBR0cXyqu+y6ue5mzGrLapVEmlRLHG29uT5VPX0wdOS+5xMRmSb6xnqTakmk7TfXRUT2I0oiTeovhctZu8pKIiIiNUoiTdovLmeJiORMSaRJyRvrIiISKIk0SWciIiITKYk0STfWRUQmUhJp0szSTEA31kVEkpREmlSwAjNLM3U5S0QkQUmkBQM9A0oiIiIJSiItGOgZ0D0REZEEJZEW9Jf62VnWmYiISI2SSAva3jGViMh+RkmkBcnLWbsru7lvy314N7faKSLSZkoiLRgoDYw94nvLk7dw+b2Xc/fmuzsclYhI56gp+BYkL2fdtekuAK5/4HoWDC1gy44tzB+cz7yD5+XT86GISBdQEmlB7RHfjcMb2TC8gQuOvYCfb/w5y365bGyeghUY7B1kdt9sZvfNZmjGEP2lfvpKfeFV7GNmaWb6eBwuFUqUCiWKVqRYKFKyEsVCkaIVU8uVtESkU5REWtDf089IeYQ7N91J0YpcuehKlhy2hG0j2zhhzgls3r6ZLTu28NLoSwzvHmZ4dJhnX36WkfIIo+VRRiujjJRHqE5zj4CGhWRCgYLtfZlZGKbBcHJebPxy1A0XCuPL6taVXCZtvZNuv65sXCxY6Im39hO7RbXYBWly3Mz2ltctV5s3uY7kcmPDDaanbW/cOuuXq4slOZ4WS+r7qZ/e6P3U1psyPW1fmVDWZFezzS6bOl9al7GpRc3H1+x2cnl/Tc6Xut02vL/6+Q6fdTiz+2Y3FU+rlERaUOtnfeX6lZx2+GnMmTmHs+afNTZ90asX7XMd7s6e6p5xiWW0HJJLbbhcLVP2MpVqhYpXqFQr48bL1fKE8nK1TJUq7k7Vq1S9ipMYdh83veKVsbKx6Q2WGZs3UVZbR9WrE7abVpa23mq1On77DWJxd2o/oRIZN16bLiLpvvSWL3H+685vy7qVRFqw9KilbNq+ibn9czl3wbmZ1mFm9BZ76S32MjhjcJojFNibVJLJxUPmGTc+brqPLx8bTown1522jvptj223fv31yzVIihOmp2x7bLtp73dCxaQVTSxMe+Iwdb4ml00zle22Om/WZacUY7N1ndP7W3jIwqaWzUJJpAXzDp7HtW+5ttNhyD6MXTpq7mqCiEyB7siKiEhmSiIiIpKZkoiIiGSmJCIiIpkpiYiISGZKIiIikpmSiIiIZKYkIiIimdmB1h+Gmb0APNviYnOAbW0IZzp0a2yKqzWKq3XdGtv/Y1zz3H1u2oQDLolkYWYPuvvJnY4jTbfGprhao7ha162xHWhx6XKWiIhkpiQiIiKZKYk057udDmAS3Rqb4mqN4mpdt8Z2QMWleyIiIpKZzkRERCQzJREREclMSWQfzOwsM9tgZhvN7OoOxnGkmf3ezJ4ws8fN7OOxfLmZbTGzdfF1Tgdie8bMHovbfzCWHWJm95jZ0/F3ezp4bhzTwkSdrDOzl83sqk7Vl5mtNLPnzWx9oiy1jiz4ZtznHjWzxTnHdYOZ/T1u+w4zG4rlR5vZSKLuVuQcV8PPzsw+F+trg5mdmXNctyViesbM1sXyPOur0fGh/fuYu+vV4AUUgX8AxwC9wCPA8R2K5TBgcRw+CHgKOB5YDnyqw/X0DDCnrux64Oo4fDVwXYc/x38D8zpVX8DbgMXA+n3VEXAOcDehb8ZTgbU5x/UuoBSHr0vEdXRyvg7UV+pnF/8OHgFmAPPj32wxr7jqpn8N+EIH6qvR8aHt+5jORCZ3CrDR3Te5+yvArcB5nQjE3be6+8Nx+L/Ak8BrOxFLk84DborDNwHndzCWdwD/cPdWWyqYNu7+R+A/dcWN6ug84GYP7geGzOywvOJy99+4ezmO3g8c0Y5ttxrXJM4DbnX33e6+GdhI+NvNNS4zM+D9wI/bse3JTHJ8aPs+piQyudcC/0yM/4suOHCb2dHAImBtLPpYPCVdmfdlo8iB35jZQ2b2kVh2qLtvjcP/Bg7tQFw1FzH+D7vT9VXTqI66ab/7EOE/1pr5ZvY3M1tjZm/tQDxpn1231Ndbgefc/elEWe71VXd8aPs+piSynzGzWcDPgKvc/WXgO8AC4CRgK+F0Om+nu/ti4Gzgo2b2tuRED+fPHXmW3Mx6gXOBn8SibqivCTpZR42Y2TVAGfhRLNoKHOXui4BPAreY2cE5htSVn13CBxj/z0ru9ZVyfBjTrn1MSWRyW4AjE+NHxLKOMLMewg7yI3e/HcDdn3P3irtXge/RptP4ybj7lvj7eeCOGMNztdPj+Pv5vOOKzgYedvfnYowdr6+ERnXU8f3OzC4D3gNcHA8+xMtFL8bhhwj3Hl6fV0yTfHbdUF8l4ALgtlpZ3vWVdnwgh31MSWRyDwDHmtn8+B/tRcDqTgQSr7d+H3jS3W9MlCevY74PWF+/bJvjGjCzg2rDhJuy6wn1dGmc7VLgF3nGlTDuv8NO11edRnW0GrgkPkFzKrA9cUmi7czsLOAzwLnuvitRPtfMinH4GOBYYFOOcTX67FYDF5nZDDObH+P6a15xRe8E/u7u/6oV5FlfjY4P5LGP5fHkwP78IjzF8BThv4hrOhjH6YRT0UeBdfF1DvBD4LFYvho4LOe4jiE8GfMI8HitjoBXAb8FngbuBQ7pQJ0NAC8Cg4myjtQXIZFtBfYQrj9/uFEdEZ6Y+Xbc5x4DTs45ro2E6+W1/WxFnPfC+BmvAx4G3ptzXA0/O+CaWF8bgLPzjCuWrwKuqJs3z/pqdHxo+z6mZk9ERCQzXc4SEZHMlERERCQzJREREclMSURERDJTEhERkcyUREQmYWZfMbOlZna+mX2uwTzLzexTcfgyMzt8Grd/hpmdlhi/wswuma71i0yVkojI5N5MaITw7cAfm5j/MqClJBK/7dzIGcBYEnH3Fe5+cyvrF2knfU9EJIWZ3QCcyd6mxRcAm4Gfuvu1dfMuB3YQmsRfRWg+YgRYQmiO+0ZgFrANuMzdt5rZHwhfCDud8AW2p4DPE7oceBG4GJhJSGAV4AXgSkKLxDvc/atmdhKwAuiPMX7I3YfjutcCS4Ehwhfi/mRmJwA/iNsoABf6+MYCRVqmMxGRFO7+acK3pFcBbwIedfcT6xNI3TI/BR4ktDd1EqHxwm8By9z9jcBK4MuJRXrd/WR3/xrwZ+BUD4313Qp8xt2fISSJr7v7Se7+p7pN3gx81t1PJHzr+IuJaSV3PwW4KlF+BfCNGNvJhG9ci0zJZKfRIge6xYTmXI4j9M/QqoXAG4B7QtNGFAlNZtTclhg+Argttg/VSzjracjMBoEhd18Ti25ib0vFALUG+B4idI4E8BfgGjM7ArhdZyEyHZREROrEy0SrCAf2bYTLRRa7PV3i7iPNrgp43N2XNJi+MzH8LeBGd19tZmcQevGbit3xd4X4d+7ut5jZWuDdwK/M7HJ3/90UtyMHOF3OEqnj7uviJZ9aF6O/A86Ml5T2lUD+S+ieFEJjgHPNbAmEprrjfYk0g+xtivvSRHlyfckYtwPDiY6OPgisqZ8vKbYku8ndv0lozfXEfbwXkX1SEhFJYWZzgWEPfVcc5+5PNLnoKmBFPGspAsuA68zsEcKN9NMaLLcc+ImZPUQ4+6n5JfA+M1uX0jPepcANZvYooaOmhvdrovcD62NsbyDcUxGZEj2dJSIimelMREREMlMSERGRzJREREQkMyURERHJTElEREQyUxIREZHMlERERCSz/wFW0oULX5+V3wAAAABJRU5ErkJggg==\n", 392 | "text/plain": [ 393 | "
" 394 | ] 395 | }, 396 | "metadata": { 397 | "needs_background": "light" 398 | }, 399 | "output_type": "display_data" 400 | } 401 | ], 402 | "source": [ 403 | "from matplotlib import pyplot as plt\n", 404 | "plt.plot(range(1,len(funV1)+1), funV1, label='GDFixed')\n", 405 | "plt.plot(range(1,len(funV2)+1), funV2, label='GDBacktrack')\n", 406 | "plt.plot(range(1,len(funV3)+1), funV3, label='ProxGD')\n", 407 | "plt.legend()\n", 408 | "plt.xlabel(\"# Iterations\")\n", 409 | "plt.ylabel(\"Function Value\")" 410 | ] 411 | } 412 | ], 413 | "metadata": { 414 | "kernelspec": { 415 | "display_name": "Python 2", 416 | "language": "python", 417 | "name": "python2" 418 | }, 419 | "language_info": { 420 | "codemirror_mode": { 421 | "name": "ipython", 422 | "version": 3 423 | }, 424 | "file_extension": ".py", 425 | "mimetype": "text/x-python", 426 | "name": "python", 427 | "nbconvert_exporter": "python", 428 | "pygments_lexer": "ipython3", 429 | "version": "3.7.3" 430 | } 431 | }, 432 | "nbformat": 4, 433 | "nbformat_minor": 2 434 | } 435 | -------------------------------------------------------------------------------- /algorithms/README.txt: -------------------------------------------------------------------------------- 1 | - Simple Gradient Descent 2 | - Gradient Descent with Line Search 3 | - Nesterov's Accelerated Gradient 4 | - Proximal Gradient Descent 5 | 6 | -------------------------------------------------------------------------------- /functions/HingeLossTest.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import numpy as np\n", 10 | "import time" 11 | ] 12 | }, 13 | { 14 | "cell_type": "markdown", 15 | "metadata": {}, 16 | "source": [ 17 | "## Generate the Data\n", 18 | "\n", 19 | "We first generate a random dataset with number of features (m = 10) and number of instances (n = 100)\n", 20 | "We also generate a random label vector y \\in {-1,1}" 21 | ] 22 | }, 23 | { 24 | "cell_type": "code", 25 | "execution_count": null, 26 | "metadata": {}, 27 | "outputs": [], 28 | "source": [ 29 | "n = 100\n", 30 | "m = 10\n", 31 | "\n", 32 | "X = np.random.rand(n,m)\n", 33 | "y = np.random.rand(n)\n", 34 | "y = np.random.rand(n)\n", 35 | "ybin = [(int(yi >= 0.5) - int(yi < 0.5)) for yi in y]\n", 36 | "y = np.array(ybin)\n", 37 | "w = np.random.rand(m, 1)" 38 | ] 39 | }, 40 | { 41 | "cell_type": "markdown", 42 | "metadata": {}, 43 | "source": [ 44 | "## A Simple naive Implementation of the Least Squares\n", 45 | "\n", 46 | "Below is a simple naive implementation of Least Square Loss. We directly plug in the formula with a simple for loop!" 47 | ] 48 | }, 49 | { 50 | "cell_type": "code", 51 | "execution_count": null, 52 | "metadata": {}, 53 | "outputs": [], 54 | "source": [ 55 | "def HingeLossNaive(w, X, y, lam):\n", 56 | " # Computes the cost function for all the training samples\n", 57 | " f = 0\n", 58 | " g = 0\n", 59 | " for i in range(len(X)):\n", 60 | " featureweightProd = np.dot(X[i],w)\n", 61 | " f = f + np.max([0, 1 - y[i]*featureweightProd])\n", 62 | " g = g - y[i]*X[i]*np.double(1 > y[i]*featureweightProd) \n", 63 | " f = f + 0.5*lam*np.sum(w*w)\n", 64 | " g = g + lam*w.reshape(1,-1)\n", 65 | " return [f, g] " 66 | ] 67 | }, 68 | { 69 | "cell_type": "code", 70 | "execution_count": null, 71 | "metadata": {}, 72 | "outputs": [], 73 | "source": [ 74 | "start = time.time()\n", 75 | "[f,g] = HingeLossNaive(w,X,y,1)\n", 76 | "end = time.time()\n", 77 | "print(\"Time Taken = \" + str(end - start))\n", 78 | "print(\"Function value = \" + str(f))\n", 79 | "print(\"Printing Gradient:\")\n", 80 | "print(g)" 81 | ] 82 | }, 83 | { 84 | "cell_type": "markdown", 85 | "metadata": {}, 86 | "source": [ 87 | "## For Loop in Python == Slow Code\n", 88 | "\n", 89 | "Great, we have a working code now. But while this code might be correct, is this going to be fast? We have a For loop in python which is clearly an issue!\n", 90 | "\n", 91 | "First let us see how slow the code is! Let us increase n to 10000000 and m to 1000, which are somewhat more realistic (though still far from real world)." 92 | ] 93 | }, 94 | { 95 | "cell_type": "code", 96 | "execution_count": null, 97 | "metadata": {}, 98 | "outputs": [], 99 | "source": [ 100 | "n = 1000000\n", 101 | "m = 100\n", 102 | "\n", 103 | "X = np.random.rand(n,m)\n", 104 | "y = np.random.rand(n)\n", 105 | "y = np.random.rand(n)\n", 106 | "ybin = [(int(yi >= 0.5) - int(yi < 0.5)) for yi in y]\n", 107 | "y = np.array(ybin)\n", 108 | "w = np.random.rand(m, 1)\n", 109 | "\n", 110 | "start = time.time()\n", 111 | "[f,g] = HingeLossNaive(w,X,y,1)\n", 112 | "end = time.time()\n", 113 | "print(\"Time Taken = \" + str(end - start))\n", 114 | "print(\"Function value = \" + str(f))\n", 115 | "print(g)" 116 | ] 117 | }, 118 | { 119 | "cell_type": "markdown", 120 | "metadata": {}, 121 | "source": [ 122 | "## Speeding up the code!\n", 123 | "\n", 124 | "With n = 10000000, it takes around 4.5 minutes to run a single function evaluation!\n", 125 | "\n", 126 | "Lets now vectorize the code below." 127 | ] 128 | }, 129 | { 130 | "cell_type": "code", 131 | "execution_count": null, 132 | "metadata": {}, 133 | "outputs": [], 134 | "source": [ 135 | "Xw = np.matmul(X,w)\n", 136 | "yT = y.reshape(-1,1)\n", 137 | "yXw = np.multiply(yT,Xw)\n", 138 | "np.shape(yXw)\n", 139 | "#f = np.sum(np.max(0, 1 - yXw.T)) + 0.5*np.sum(w*w)\n", 140 | "#print(f)\n", 141 | "#ymul = -1*yT*np.double(1 > yXw) \n", 142 | "#print(np.shape(ymul.reshape(1,-1)))\n", 143 | "#print(np.shape(X))\n", 144 | "#g = np.matmul(ymul.reshape(1,-1),X).reshape(-1,1) + 1*w.reshape(-1,1)\n", 145 | "#print(g)" 146 | ] 147 | }, 148 | { 149 | "cell_type": "code", 150 | "execution_count": null, 151 | "metadata": {}, 152 | "outputs": [], 153 | "source": [ 154 | "def HingeLoss(w, X, y, lam):\n", 155 | " # Computes the cost function for all the training samples\n", 156 | " Xw = np.matmul(X,w)\n", 157 | " yT = y.reshape(-1,1)\n", 158 | " yXw = np.multiply(yT,Xw)\n", 159 | " f = np.sum(np.maximum(0, 1 - yXw.T)) + 0.5*np.sum(w*w)\n", 160 | " ymul = -1*yT*np.double(1 > yXw) \n", 161 | " g = np.matmul(ymul.reshape(1,-1),X).reshape(-1,1) + 1*w.reshape(-1,1)\n", 162 | " return [f, g]" 163 | ] 164 | }, 165 | { 166 | "cell_type": "code", 167 | "execution_count": null, 168 | "metadata": {}, 169 | "outputs": [], 170 | "source": [ 171 | "start = time.time()\n", 172 | "[f,g] = HingeLoss(w,X,y,1)\n", 173 | "end = time.time()\n", 174 | "print(\"Time Taken = \" + str(end - start))\n", 175 | "print(\"Function value = \" + str(f))\n", 176 | "print(g)" 177 | ] 178 | }, 179 | { 180 | "cell_type": "markdown", 181 | "metadata": {}, 182 | "source": [ 183 | "## Checking gradient implementations!\n", 184 | "\n", 185 | "So far so good! But how do we verify if our gradient implementation is correct?\n", 186 | "We can test out our loss function analytically, but what if we make a mistake in computing the gradient? We can numerically compute the gradient to ensure it is correct." 187 | ] 188 | }, 189 | { 190 | "cell_type": "code", 191 | "execution_count": null, 192 | "metadata": {}, 193 | "outputs": [], 194 | "source": [ 195 | "def LeastSquaresFun(w, X, y, lam):\n", 196 | " # Computes the cost function for all the training samples\n", 197 | " m = X.shape[0]\n", 198 | " Xw = np.matmul(X,w)\n", 199 | " Xwy = (Xw - y).reshape(-1,1)\n", 200 | " f = np.dot(Xwy.T,Xwy) + 0.5*lam*np.sum(w*w)\n", 201 | " return f\n", 202 | "\n", 203 | "def numericalGrad(funObj, w,epsilon):\n", 204 | " m = len(w)\n", 205 | " grad = np.zeros(m)\n", 206 | " for i in range(m):\n", 207 | " wp = np.copy(w)\n", 208 | " wn = np.copy(w)\n", 209 | " wp[i] = w[i] + epsilon\n", 210 | " wn[i] = w[i] - epsilon\n", 211 | " grad[i] = (funObj(wp) - funObj(wn))/(2*epsilon)\n", 212 | " return grad" 213 | ] 214 | }, 215 | { 216 | "cell_type": "code", 217 | "execution_count": null, 218 | "metadata": {}, 219 | "outputs": [], 220 | "source": [ 221 | "n = 100\n", 222 | "m = 10\n", 223 | "\n", 224 | "X = np.random.rand(n,m)\n", 225 | "wgen = np.random.rand(m)\n", 226 | "y = np.dot(X,wgen) + np.random.normal(0, 0.1, n)\n", 227 | "w = np.random.rand(m)\n", 228 | "\n", 229 | "funObj = lambda w: LeastSquaresFun(w,X,y,1)\n", 230 | "[f,g] = LeastSquares(w,X,y,1)\n", 231 | "gn = numericalGrad(funObj, w, 1e-10)\n", 232 | "fn = funObj(w)\n", 233 | "print(f)\n", 234 | "print(fn)\n", 235 | "print(gn)\n", 236 | "print(g)" 237 | ] 238 | } 239 | ], 240 | "metadata": { 241 | "kernelspec": { 242 | "display_name": "Python 3", 243 | "language": "python", 244 | "name": "python3" 245 | }, 246 | "language_info": { 247 | "codemirror_mode": { 248 | "name": "ipython", 249 | "version": 3 250 | }, 251 | "file_extension": ".py", 252 | "mimetype": "text/x-python", 253 | "name": "python", 254 | "nbconvert_exporter": "python", 255 | "pygments_lexer": "ipython3", 256 | "version": "3.7.3" 257 | } 258 | }, 259 | "nbformat": 4, 260 | "nbformat_minor": 2 261 | } 262 | -------------------------------------------------------------------------------- /functions/LeastSquaresTest.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import numpy as np\n", 10 | "import time" 11 | ] 12 | }, 13 | { 14 | "cell_type": "markdown", 15 | "metadata": {}, 16 | "source": [ 17 | "## Generate the Data\n", 18 | "\n", 19 | "We first generate a random dataset with number of features (m = 10) and number of instances (n = 100)\n", 20 | "We also generate a random label vector y \\in {-1,1}" 21 | ] 22 | }, 23 | { 24 | "cell_type": "code", 25 | "execution_count": 2, 26 | "metadata": {}, 27 | "outputs": [], 28 | "source": [ 29 | "n = 100\n", 30 | "m = 10\n", 31 | "\n", 32 | "X = np.random.rand(n,m)\n", 33 | "wgen = np.random.rand(m)\n", 34 | "y = np.dot(X,wgen) + np.random.normal(0, 0.1, n)\n", 35 | "w = np.random.rand(m)" 36 | ] 37 | }, 38 | { 39 | "cell_type": "markdown", 40 | "metadata": {}, 41 | "source": [ 42 | "## A Simple naive Implementation of the Least Squares\n", 43 | "\n", 44 | "Below is a simple naive implementation of Least Square Loss. We directly plug in the formula with a simple for loop!" 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": 3, 50 | "metadata": {}, 51 | "outputs": [], 52 | "source": [ 53 | "def LeastSquaresNaive(w, X, y, lam):\n", 54 | " # Computes the cost function for all the training samples\n", 55 | " f = 0\n", 56 | " g = 0\n", 57 | " for i in range(len(X)):\n", 58 | " featureweightProd = np.dot(X[i],w)\n", 59 | " f = f + (featureweightProd - y[i])*(featureweightProd - y[i])\n", 60 | " g = g + 2*(featureweightProd - y[i])*X[i]\n", 61 | " f = f + 0.5*lam*np.sum(w*w)\n", 62 | " g = g + lam*w.reshape(1,-1)\n", 63 | " return [f, g] " 64 | ] 65 | }, 66 | { 67 | "cell_type": "code", 68 | "execution_count": 4, 69 | "metadata": {}, 70 | "outputs": [ 71 | { 72 | "name": "stdout", 73 | "output_type": "stream", 74 | "text": [ 75 | "Time Taken = 0.0012369155883789062\n", 76 | "Function value = 18.11430323106446\n", 77 | "Printing Gradient:\n", 78 | "[[-26.40587054 -10.08174756 -20.06086287 -26.54846283 -24.77238833\n", 79 | " -19.75775787 -28.39635462 -27.87900945 -32.93241229 -30.65521977]]\n" 80 | ] 81 | } 82 | ], 83 | "source": [ 84 | "start = time.time()\n", 85 | "[f,g] = LeastSquaresNaive(w,X,y,1)\n", 86 | "end = time.time()\n", 87 | "print(\"Time Taken = \" + str(end - start))\n", 88 | "print(\"Function value = \" + str(f))\n", 89 | "print(\"Printing Gradient:\")\n", 90 | "print(g)" 91 | ] 92 | }, 93 | { 94 | "cell_type": "markdown", 95 | "metadata": {}, 96 | "source": [ 97 | "## For Loop in Python == Slow Code\n", 98 | "\n", 99 | "Great, we have a working code now. But while this code might be correct, is this going to be fast? We have a For loop in python which is clearly an issue!\n", 100 | "\n", 101 | "First let us see how slow the code is! Let us increase n to 10000000 and m to 1000, which are somewhat more realistic (though still far from real world)." 102 | ] 103 | }, 104 | { 105 | "cell_type": "code", 106 | "execution_count": 5, 107 | "metadata": {}, 108 | "outputs": [ 109 | { 110 | "name": "stdout", 111 | "output_type": "stream", 112 | "text": [ 113 | "Time Taken = 3.6550180912017822\n", 114 | "Function value = 2979067.713990548\n" 115 | ] 116 | } 117 | ], 118 | "source": [ 119 | "n = 1000000\n", 120 | "m = 100\n", 121 | "\n", 122 | "X = np.random.rand(n,m)\n", 123 | "wgen = np.random.rand(m)\n", 124 | "y = np.dot(X,wgen) + np.random.normal(0, 0.1, n)\n", 125 | "w = np.random.rand(m)\n", 126 | "\n", 127 | "start = time.time()\n", 128 | "[f,g] = LeastSquaresNaive(w,X,y,1)\n", 129 | "end = time.time()\n", 130 | "print(\"Time Taken = \" + str(end - start))\n", 131 | "print(\"Function value = \" + str(f))" 132 | ] 133 | }, 134 | { 135 | "cell_type": "markdown", 136 | "metadata": {}, 137 | "source": [ 138 | "## Speeding up the code!\n", 139 | "\n", 140 | "With n = 10000000, it takes around 2 minutes to run a single function evaluation!\n", 141 | "\n", 142 | "Lets now vectorize the code below." 143 | ] 144 | }, 145 | { 146 | "cell_type": "code", 147 | "execution_count": 6, 148 | "metadata": {}, 149 | "outputs": [], 150 | "source": [ 151 | "def LeastSquares(w, X, y, lam):\n", 152 | " # Computes the cost function for all the training samples\n", 153 | " m = X.shape[0]\n", 154 | " Xw = np.matmul(X,w)\n", 155 | " Xwy = (Xw - y).reshape(-1,1)\n", 156 | " f = np.dot(Xwy.T,Xwy) + 0.5*lam*np.sum(w*w)\n", 157 | " g = 2*np.dot(X.T,Xwy) + lam*w.reshape(-1,1)\n", 158 | " return [f, g]" 159 | ] 160 | }, 161 | { 162 | "cell_type": "code", 163 | "execution_count": 7, 164 | "metadata": {}, 165 | "outputs": [ 166 | { 167 | "name": "stdout", 168 | "output_type": "stream", 169 | "text": [ 170 | "Time Taken = 0.1029510498046875\n", 171 | "Function value = [[2979067.71399057]]\n", 172 | "(100, 1)\n" 173 | ] 174 | } 175 | ], 176 | "source": [ 177 | "start = time.time()\n", 178 | "[f,g] = LeastSquares(w,X,y,1)\n", 179 | "end = time.time()\n", 180 | "print(\"Time Taken = \" + str(end - start))\n", 181 | "print(\"Function value = \" + str(f))\n", 182 | "print(np.shape(g))" 183 | ] 184 | }, 185 | { 186 | "cell_type": "markdown", 187 | "metadata": {}, 188 | "source": [ 189 | "## Checking gradient implementations!\n", 190 | "\n", 191 | "So far so good! But how do we verify if our gradient implementation is correct?\n", 192 | "We can test out our loss function analytically, but what if we make a mistake in computing the gradient? We can numerically compute the gradient to ensure it is correct." 193 | ] 194 | }, 195 | { 196 | "cell_type": "code", 197 | "execution_count": 8, 198 | "metadata": {}, 199 | "outputs": [], 200 | "source": [ 201 | "def LeastSquaresFun(w, X, y, lam):\n", 202 | " # Computes the cost function for all the training samples\n", 203 | " m = X.shape[0]\n", 204 | " Xw = np.matmul(X,w)\n", 205 | " Xwy = (Xw - y).reshape(-1,1)\n", 206 | " f = np.dot(Xwy.T,Xwy) + 0.5*lam*np.sum(w*w)\n", 207 | " return f\n", 208 | "\n", 209 | "def numericalGrad(funObj, w,epsilon):\n", 210 | " m = len(w)\n", 211 | " grad = np.zeros(m)\n", 212 | " for i in range(m):\n", 213 | " wp = np.copy(w)\n", 214 | " wn = np.copy(w)\n", 215 | " wp[i] = w[i] + epsilon\n", 216 | " wn[i] = w[i] - epsilon\n", 217 | " grad[i] = (funObj(wp) - funObj(wn))/(2*epsilon)\n", 218 | " return grad" 219 | ] 220 | }, 221 | { 222 | "cell_type": "code", 223 | "execution_count": 9, 224 | "metadata": {}, 225 | "outputs": [ 226 | { 227 | "name": "stdout", 228 | "output_type": "stream", 229 | "text": [ 230 | "[[29.21435641]]\n", 231 | "[[29.21435641]]\n", 232 | "[32.52422331 29.83098213 40.37449486 45.67565881 30.93648004 52.74085169\n", 233 | " 38.37445917 41.6134327 38.7920096 46.16868665]\n", 234 | "[[32.52420457]\n", 235 | " [29.8309785 ]\n", 236 | " [40.37451531]\n", 237 | " [45.67562758]\n", 238 | " [30.93648274]\n", 239 | " [52.74079572]\n", 240 | " [38.3744738 ]\n", 241 | " [41.61338888]\n", 242 | " [38.79201958]\n", 243 | " [46.16868019]]\n" 244 | ] 245 | } 246 | ], 247 | "source": [ 248 | "n = 100\n", 249 | "m = 10\n", 250 | "\n", 251 | "X = np.random.rand(n,m)\n", 252 | "wgen = np.random.rand(m)\n", 253 | "y = np.dot(X,wgen) + np.random.normal(0, 0.1, n)\n", 254 | "w = np.random.rand(m)\n", 255 | "\n", 256 | "funObj = lambda w: LeastSquaresFun(w,X,y,1)\n", 257 | "[f,g] = LeastSquares(w,X,y,1)\n", 258 | "gn = numericalGrad(funObj, w, 1e-10)\n", 259 | "fn = funObj(w)\n", 260 | "print(f)\n", 261 | "print(fn)\n", 262 | "print(gn)\n", 263 | "print(g)" 264 | ] 265 | } 266 | ], 267 | "metadata": { 268 | "kernelspec": { 269 | "display_name": "Python 3", 270 | "language": "python", 271 | "name": "python3" 272 | }, 273 | "language_info": { 274 | "codemirror_mode": { 275 | "name": "ipython", 276 | "version": 3 277 | }, 278 | "file_extension": ".py", 279 | "mimetype": "text/x-python", 280 | "name": "python", 281 | "nbconvert_exporter": "python", 282 | "pygments_lexer": "ipython3", 283 | "version": "3.7.3" 284 | } 285 | }, 286 | "nbformat": 4, 287 | "nbformat_minor": 2 288 | } 289 | -------------------------------------------------------------------------------- /functions/LogisticLossTest.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 2, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import numpy as np\n", 10 | "import time" 11 | ] 12 | }, 13 | { 14 | "cell_type": "markdown", 15 | "metadata": {}, 16 | "source": [ 17 | "## Generate the Data\n", 18 | "\n", 19 | "We first generate a random dataset with number of features (m = 10) and number of instances (n = 100)\n", 20 | "We also generate a random label vector y \\in {-1,1}" 21 | ] 22 | }, 23 | { 24 | "cell_type": "code", 25 | "execution_count": 5, 26 | "metadata": {}, 27 | "outputs": [ 28 | { 29 | "name": "stdout", 30 | "output_type": "stream", 31 | "text": [ 32 | "[ 1 1 -1 -1 -1 1 -1 1 1 1 -1 1 -1 1 -1 1 1 1 -1 -1 1 1 -1 -1\n", 33 | " -1 -1 -1 -1 1 -1 -1 1 -1 -1 1 1 1 -1 1 -1 -1 -1 1 1 1 -1 1 1\n", 34 | " 1 -1 1 -1 1 -1 -1 -1 -1 1 -1 1 1 -1 1 1 -1 1 1 -1 -1 1 1 -1\n", 35 | " 1 1 1 -1 1 1 1 -1 -1 1 1 -1 -1 1 1 -1 -1 -1 -1 1 -1 -1 -1 1\n", 36 | " 1 -1 -1 -1]\n", 37 | "[[0.15081579 0.02060775 0.76881967 0.72560174 0.46650267 0.98982589\n", 38 | " 0.91384915 0.81710788 0.29143393 0.55795121]\n", 39 | " [0.86394994 0.90318432 0.78147468 0.81914016 0.1059121 0.44348636\n", 40 | " 0.74783135 0.17751439 0.95526117 0.60228485]\n", 41 | " [0.68263416 0.28028993 0.73856586 0.96458103 0.48799289 0.31165456\n", 42 | " 0.08107802 0.89068509 0.36414014 0.01643942]\n", 43 | " [0.49802953 0.38370109 0.82619215 0.98252769 0.35166422 0.46767241\n", 44 | " 0.34375141 0.11204718 0.57158609 0.63917095]\n", 45 | " [0.18790744 0.54965513 0.99397204 0.20613624 0.57725534 0.73939673\n", 46 | " 0.16331439 0.65325905 0.34976847 0.4567399 ]\n", 47 | " [0.61553026 0.61245476 0.40400442 0.98580638 0.68637067 0.80192352\n", 48 | " 0.7057647 0.96929697 0.45785726 0.05656022]\n", 49 | " [0.15378803 0.88738392 0.56885074 0.37457509 0.63189775 0.91899163\n", 50 | " 0.94779187 0.0703395 0.2099531 0.50982056]\n", 51 | " [0.49422764 0.95925808 0.86412938 0.36245742 0.14789465 0.69404695\n", 52 | " 0.74122011 0.54189698 0.18722263 0.50705846]\n", 53 | " [0.49042015 0.40316866 0.76260622 0.34200398 0.48631196 0.4694385\n", 54 | " 0.2602999 0.88573128 0.10700275 0.18256179]\n", 55 | " [0.90415737 0.81666418 0.8540644 0.95876546 0.33759363 0.53350651\n", 56 | " 0.60018472 0.02201842 0.21450841 0.20511974]\n", 57 | " [0.87543599 0.09669685 0.45376799 0.75592491 0.25840655 0.53541738\n", 58 | " 0.11306012 0.27945036 0.56671048 0.68377008]\n", 59 | " [0.9180633 0.66882064 0.3406679 0.50124086 0.47235231 0.61494773\n", 60 | " 0.35199386 0.98686249 0.18485418 0.73515661]\n", 61 | " [0.15062198 0.92140241 0.80701602 0.81049142 0.00115152 0.72176245\n", 62 | " 0.8411878 0.22283606 0.9609365 0.83310077]\n", 63 | " [0.74772602 0.85420895 0.90721657 0.53653877 0.90049061 0.31412872\n", 64 | " 0.30477273 0.79008855 0.87818515 0.74609516]\n", 65 | " [0.4672885 0.68839039 0.83136809 0.25340632 0.75576629 0.98211634\n", 66 | " 0.57240517 0.33442485 0.36690828 0.09833282]\n", 67 | " [0.15809041 0.71236439 0.38040214 0.71037227 0.21614151 0.863222\n", 68 | " 0.01161398 0.56182743 0.61788184 0.73949381]\n", 69 | " [0.07535999 0.3291597 0.02665491 0.7350559 0.8118688 0.48510404\n", 70 | " 0.66934082 0.79521452 0.86534658 0.62536516]\n", 71 | " [0.5696147 0.34409937 0.70515236 0.69744395 0.98414841 0.06508207\n", 72 | " 0.97233159 0.14439528 0.98135178 0.50612845]\n", 73 | " [0.03389694 0.42003621 0.6006504 0.82574441 0.87668729 0.09146244\n", 74 | " 0.82510403 0.31747265 0.87242905 0.86736503]\n", 75 | " [0.12842019 0.20837674 0.7774347 0.9148231 0.36521668 0.18638835\n", 76 | " 0.83628288 0.74894916 0.89361158 0.69087121]\n", 77 | " [0.53366886 0.35109523 0.11203665 0.28684488 0.17329334 0.73815186\n", 78 | " 0.21001316 0.91701312 0.34136749 0.53893868]\n", 79 | " [0.23815314 0.14926395 0.94751564 0.43055888 0.06652178 0.07255165\n", 80 | " 0.15448903 0.58142142 0.065054 0.79929069]\n", 81 | " [0.14856593 0.10072318 0.95227488 0.25417441 0.1448282 0.85369206\n", 82 | " 0.49852392 0.90281562 0.30525798 0.15824039]\n", 83 | " [0.27422092 0.95445014 0.96920162 0.39296414 0.59668096 0.70311756\n", 84 | " 0.12110895 0.17886279 0.97332931 0.79850841]\n", 85 | " [0.9038036 0.93717946 0.20839064 0.88387182 0.94141587 0.60518385\n", 86 | " 0.49947413 0.05809995 0.61564694 0.66572321]\n", 87 | " [0.48885243 0.33612467 0.88892477 0.51824355 0.34232841 0.9185741\n", 88 | " 0.26140817 0.697149 0.00671581 0.74222663]\n", 89 | " [0.22929496 0.41802644 0.10973218 0.61560231 0.42246689 0.23694141\n", 90 | " 0.27376503 0.86636963 0.12088821 0.14420129]\n", 91 | " [0.44091856 0.43595142 0.51386613 0.61078379 0.12243992 0.62097763\n", 92 | " 0.72674161 0.79790423 0.60872336 0.98029798]\n", 93 | " [0.29175415 0.79761027 0.72897983 0.27684871 0.06402785 0.98239855\n", 94 | " 0.42964625 0.53499877 0.7346432 0.81249562]\n", 95 | " [0.99762639 0.54703507 0.26469741 0.6648704 0.66448966 0.33835094\n", 96 | " 0.42589995 0.86022828 0.09003161 0.45149924]\n", 97 | " [0.68775466 0.36544723 0.99066554 0.612593 0.82442568 0.42981277\n", 98 | " 0.82997508 0.43772405 0.41684529 0.50404119]\n", 99 | " [0.09694504 0.55957835 0.79325614 0.59066279 0.4589773 0.35376927\n", 100 | " 0.75515759 0.35761892 0.28148166 0.60876288]\n", 101 | " [0.76620517 0.50641326 0.63212151 0.49472254 0.82812568 0.60584475\n", 102 | " 0.38841267 0.56282148 0.97855796 0.42203269]\n", 103 | " [0.9565439 0.97806428 0.51013511 0.47953926 0.77274803 0.95516173\n", 104 | " 0.96550984 0.30571265 0.7534045 0.89085851]\n", 105 | " [0.8564249 0.98140248 0.58972607 0.42305435 0.40893588 0.99584674\n", 106 | " 0.52505101 0.36031057 0.17462077 0.97383493]\n", 107 | " [0.31532905 0.80575245 0.52754843 0.97916433 0.6566001 0.91767103\n", 108 | " 0.80142807 0.63801812 0.13654839 0.49656669]\n", 109 | " [0.05241012 0.43063631 0.85208686 0.61750084 0.8883643 0.6082281\n", 110 | " 0.63228284 0.99971805 0.31528238 0.11248778]\n", 111 | " [0.3516571 0.86414141 0.88732963 0.12788129 0.89660523 0.31340897\n", 112 | " 0.9413197 0.34026879 0.12840475 0.06304223]\n", 113 | " [0.58766523 0.07334131 0.97527575 0.34464359 0.49039575 0.48225383\n", 114 | " 0.1849598 0.75227376 0.36126531 0.62224039]\n", 115 | " [0.03266532 0.00911 0.08600772 0.25240737 0.63340264 0.25988349\n", 116 | " 0.22705018 0.09880909 0.94849126 0.7236524 ]\n", 117 | " [0.38596105 0.82192061 0.65875965 0.61824955 0.93582635 0.56216887\n", 118 | " 0.12679865 0.68972849 0.18114718 0.48202791]\n", 119 | " [0.53664303 0.75775355 0.3079981 0.42542869 0.84247211 0.66357127\n", 120 | " 0.52595421 0.65648777 0.08626743 0.15465527]\n", 121 | " [0.39620231 0.92506723 0.73392596 0.68747504 0.89188813 0.83926433\n", 122 | " 0.14180986 0.72137587 0.16302076 0.65328961]\n", 123 | " [0.10979038 0.26026927 0.99593821 0.54758466 0.44429088 0.6481904\n", 124 | " 0.31818729 0.34156364 0.17740772 0.80008603]\n", 125 | " [0.98614614 0.28259321 0.27757007 0.36475997 0.90972793 0.39054735\n", 126 | " 0.26900204 0.39676712 0.57265151 0.21115958]\n", 127 | " [0.72184968 0.97381811 0.88600505 0.23073937 0.11833531 0.09477845\n", 128 | " 0.84434557 0.28882503 0.74845584 0.21635722]\n", 129 | " [0.28984432 0.76384092 0.33881152 0.73067983 0.40677298 0.65022436\n", 130 | " 0.66625292 0.00682215 0.16694174 0.52961786]\n", 131 | " [0.99493372 0.34787581 0.15484845 0.53343817 0.25643502 0.39519103\n", 132 | " 0.86128651 0.74738043 0.19554773 0.36384356]\n", 133 | " [0.24256352 0.63456489 0.59673422 0.63727451 0.28752104 0.41077445\n", 134 | " 0.18182157 0.16556496 0.54487561 0.46790051]\n", 135 | " [0.18383869 0.32203149 0.57841781 0.9574756 0.75478043 0.81608851\n", 136 | " 0.1566852 0.29107981 0.08866971 0.60854146]\n", 137 | " [0.81080965 0.10942493 0.56086568 0.58000453 0.65429026 0.75882658\n", 138 | " 0.68794718 0.39055171 0.99837691 0.42474002]\n", 139 | " [0.96057278 0.02631284 0.53792768 0.20636392 0.54649082 0.23407777\n", 140 | " 0.5714685 0.32063855 0.27114919 0.31033868]\n", 141 | " [0.33229826 0.12076475 0.01815149 0.12172731 0.49226861 0.14637234\n", 142 | " 0.47111647 0.08516909 0.7980051 0.22890693]\n", 143 | " [0.94532076 0.74540643 0.24788903 0.20687326 0.54168595 0.33782576\n", 144 | " 0.18245814 0.13947442 0.73821552 0.73481445]\n", 145 | " [0.38867268 0.66887301 0.30347465 0.41258746 0.4694149 0.03854983\n", 146 | " 0.34630716 0.08505476 0.42185288 0.75459772]\n", 147 | " [0.49075835 0.01964409 0.79279822 0.96051514 0.28435455 0.10329458\n", 148 | " 0.65072794 0.3814806 0.57992734 0.60055211]\n", 149 | " [0.1851037 0.69305733 0.13320545 0.8449315 0.46676122 0.29323117\n", 150 | " 0.60368791 0.48259803 0.89970219 0.7681392 ]\n", 151 | " [0.37372667 0.88618242 0.73580166 0.37697396 0.11711645 0.48556035\n", 152 | " 0.63945846 0.68795012 0.46154467 0.77908143]\n", 153 | " [0.76750642 0.00930551 0.89823227 0.95398014 0.9107683 0.27980534\n", 154 | " 0.17135234 0.72854585 0.27200817 0.65420378]\n", 155 | " [0.28904174 0.9031137 0.39589914 0.84730449 0.93511883 0.8839768\n", 156 | " 0.97481342 0.81821533 0.8825361 0.93910619]\n", 157 | " [0.59735696 0.44153898 0.7190902 0.7008601 0.88561751 0.50659644\n", 158 | " 0.50525217 0.38816282 0.87174965 0.32638283]\n", 159 | " [0.93044485 0.46037723 0.49402849 0.31524041 0.03100103 0.21258426\n", 160 | " 0.15143389 0.010853 0.2902692 0.64278252]\n", 161 | " [0.29923784 0.0448027 0.51333748 0.89929923 0.88408837 0.91606295\n", 162 | " 0.35729563 0.39626187 0.2379128 0.64818096]\n", 163 | " [0.06207415 0.11019885 0.98887958 0.87624589 0.321871 0.56445675\n", 164 | " 0.03181735 0.48102748 0.0546243 0.65131603]\n", 165 | " [0.7381504 0.33533281 0.19630374 0.50274241 0.80636755 0.59245469\n", 166 | " 0.04274858 0.90741287 0.57248406 0.53527741]\n", 167 | " [0.82799448 0.78070914 0.02821645 0.28230751 0.4474667 0.10874\n", 168 | " 0.87756892 0.72785564 0.37689711 0.66837769]\n", 169 | " [0.53783132 0.22086741 0.31933338 0.42460388 0.99333961 0.67425711\n", 170 | " 0.91120082 0.4531589 0.82659041 0.15887741]\n", 171 | " [0.02256037 0.97911003 0.25935157 0.02226363 0.88662738 0.37051827\n", 172 | " 0.22187647 0.01725751 0.96992986 0.71551153]\n", 173 | " [0.35942887 0.06095493 0.33485194 0.72391839 0.33296222 0.46839733\n", 174 | " 0.29581757 0.33338534 0.4481541 0.14454273]\n", 175 | " [0.57402352 0.88587164 0.17247552 0.43013008 0.68498632 0.70756068\n", 176 | " 0.77421288 0.02710013 0.26096935 0.19799809]\n", 177 | " [0.32110049 0.82177609 0.72167422 0.54103508 0.99910456 0.13642862\n", 178 | " 0.15602738 0.09673836 0.95033286 0.41033086]\n", 179 | " [0.46861752 0.73434432 0.14466662 0.07144511 0.71992345 0.25887271\n", 180 | " 0.28929082 0.70623927 0.71071436 0.00323322]\n", 181 | " [0.13327557 0.9081652 0.30079265 0.9185565 0.67922266 0.90396095\n", 182 | " 0.44227169 0.76385528 0.32704301 0.19985448]\n", 183 | " [0.39399036 0.91897027 0.10552592 0.73677225 0.95234161 0.84559137\n", 184 | " 0.22243543 0.63171405 0.9545032 0.2470448 ]\n", 185 | " [0.74559273 0.13285235 0.09087807 0.01441144 0.45178314 0.46161244\n", 186 | " 0.28154838 0.1578316 0.60282596 0.34179827]\n", 187 | " [0.83757455 0.28511733 0.41423408 0.85564533 0.87562223 0.66890484\n", 188 | " 0.54135365 0.84476683 0.73811782 0.25935492]\n", 189 | " [0.89013008 0.1077483 0.27736242 0.35453589 0.33665939 0.29722096\n", 190 | " 0.12509471 0.83656328 0.12469217 0.35835712]\n", 191 | " [0.19500348 0.59787844 0.51824233 0.87959082 0.93557225 0.71772766\n", 192 | " 0.10654139 0.16682067 0.35441476 0.73874453]\n", 193 | " [0.24800677 0.08928022 0.27929397 0.37469936 0.67961741 0.46510035\n", 194 | " 0.18719554 0.55721621 0.89745099 0.30844033]\n", 195 | " [0.44801088 0.28863763 0.55476367 0.92340172 0.06432689 0.16546185\n", 196 | " 0.77906672 0.9945446 0.21871902 0.14143689]\n", 197 | " [0.67978591 0.20732342 0.75136882 0.04417352 0.46554266 0.5837054\n", 198 | " 0.64001526 0.14999122 0.01318914 0.71283091]\n", 199 | " [0.29847867 0.83040856 0.7803665 0.8348507 0.32972064 0.6406533\n", 200 | " 0.30607461 0.9964972 0.5044127 0.97817459]\n", 201 | " [0.35680942 0.84404825 0.51509013 0.10370779 0.83765133 0.91008325\n", 202 | " 0.03542499 0.64370778 0.28976983 0.76411447]\n", 203 | " [0.94865501 0.54694666 0.01836578 0.90248535 0.96646505 0.33310367\n", 204 | " 0.13414138 0.27208923 0.80180008 0.54991031]\n", 205 | " [0.30429841 0.76333426 0.01485642 0.12887294 0.98431904 0.53296599\n", 206 | " 0.17368955 0.95112522 0.68062443 0.31217883]\n", 207 | " [0.27497356 0.90416082 0.10232113 0.55252881 0.91237104 0.40258708\n", 208 | " 0.73097085 0.24531319 0.11626053 0.46886287]\n", 209 | " [0.31085124 0.20101009 0.61765216 0.69831077 0.19009771 0.17496637\n", 210 | " 0.73029228 0.74905952 0.63611587 0.54213331]\n", 211 | " [0.97853099 0.09315488 0.19628694 0.83877845 0.89591182 0.17932265\n", 212 | " 0.73363528 0.01204228 0.99266682 0.09446083]\n", 213 | " [0.53401515 0.07717432 0.22220255 0.8550433 0.75125644 0.45540716\n", 214 | " 0.1528508 0.59100156 0.1386998 0.43933924]\n", 215 | " [0.67563467 0.92440724 0.58916281 0.06602604 0.38125242 0.20692042\n", 216 | " 0.28491161 0.90540683 0.90127137 0.36089936]\n", 217 | " [0.65975041 0.1551392 0.43828457 0.32021911 0.86548361 0.58647614\n", 218 | " 0.50944361 0.743662 0.75048125 0.38924531]\n", 219 | " [0.42311061 0.11841456 0.00392209 0.4854539 0.39777286 0.6857571\n", 220 | " 0.58300033 0.18205163 0.41969084 0.7415865 ]\n", 221 | " [0.49009937 0.0837165 0.09114982 0.68634383 0.78962074 0.01117273\n", 222 | " 0.24463515 0.71897868 0.95139674 0.22583883]\n", 223 | " [0.04329357 0.88331014 0.33355444 0.7597401 0.69667437 0.86553551\n", 224 | " 0.07565831 0.66924024 0.16302389 0.72846505]\n", 225 | " [0.04136035 0.13705953 0.73973672 0.90780428 0.94694561 0.65048365\n", 226 | " 0.23432905 0.41357856 0.060315 0.00539698]\n", 227 | " [0.0810388 0.64566237 0.30763682 0.14034469 0.85647749 0.51785877\n", 228 | " 0.4791941 0.38914424 0.74298705 0.61855507]\n", 229 | " [0.05600949 0.85828769 0.57800569 0.75586877 0.23074984 0.43463464\n", 230 | " 0.59481337 0.30910942 0.43623723 0.65721967]\n", 231 | " [0.51546502 0.80471988 0.44852733 0.5531339 0.90974801 0.87400139\n", 232 | " 0.98851505 0.54747228 0.95295806 0.91964384]\n", 233 | " [0.30357634 0.57161643 0.46857144 0.98923952 0.96909309 0.42412481\n", 234 | " 0.50985028 0.21782108 0.07341004 0.3426428 ]\n", 235 | " [0.94980361 0.39952113 0.70864687 0.79477718 0.47542038 0.23614131\n", 236 | " 0.08668472 0.73100656 0.61389363 0.36876559]]\n" 237 | ] 238 | } 239 | ], 240 | "source": [ 241 | "n = 100 # Number of instances\n", 242 | "m = 10 # Number of Features \n", 243 | "\n", 244 | "X = np.random.rand(n,m)\n", 245 | "y = np.random.rand(n)\n", 246 | "y = np.random.rand(n)\n", 247 | "ybin = [(int(yi >= 0.5) - int(yi < 0.5)) for yi in y]\n", 248 | "y = np.array(ybin)\n", 249 | "w = np.random.rand(m, 1)\n", 250 | "print(y)\n", 251 | "print(X)" 252 | ] 253 | }, 254 | { 255 | "cell_type": "markdown", 256 | "metadata": {}, 257 | "source": [ 258 | "## A Simple naive Implementation of the Logistic Loss \n", 259 | "\n", 260 | "Below is a simple naive implementation of Logistic Loss. We directly plug in the formula with a simple for loop!" 261 | ] 262 | }, 263 | { 264 | "cell_type": "code", 265 | "execution_count": 6, 266 | "metadata": {}, 267 | "outputs": [], 268 | "source": [ 269 | "def LogisticLossNaive(w, X, y, lam):\n", 270 | " # Computes the cost function for all the training samples\n", 271 | " f = 0\n", 272 | " g = 0\n", 273 | " for i in range(len(X)):\n", 274 | " featureweightProd = np.dot(X[i],w)\n", 275 | " f = f + np.log(1 + np.exp(-y[i]*featureweightProd))\n", 276 | " g = g + -y[i]/(1 + np.exp(y[i]*featureweightProd))*X[i]\n", 277 | " f = f + 0.5*lam*np.sum(w*w)\n", 278 | " g = g + lam*w.reshape(1,-1)\n", 279 | " return [f, g] " 280 | ] 281 | }, 282 | { 283 | "cell_type": "code", 284 | "execution_count": 7, 285 | "metadata": {}, 286 | "outputs": [ 287 | { 288 | "name": "stdout", 289 | "output_type": "stream", 290 | "text": [ 291 | "Time Taken = 0.0070149898529052734\n", 292 | "Function value = [134.67014975]\n", 293 | "Printing Gradient:\n", 294 | "[[21.96211495 21.00490527 23.2016018 25.46196684 26.97171059 20.9702425\n", 295 | " 19.03946367 20.76246686 22.74056883 21.53568644]]\n" 296 | ] 297 | } 298 | ], 299 | "source": [ 300 | "start = time.time()\n", 301 | "[f,g] = LogisticLossNaive(w,X,y,1)\n", 302 | "end = time.time()\n", 303 | "print(\"Time Taken = \" + str(end - start))\n", 304 | "print(\"Function value = \" + str(f))\n", 305 | "print(\"Printing Gradient:\")\n", 306 | "print(g)" 307 | ] 308 | }, 309 | { 310 | "cell_type": "markdown", 311 | "metadata": {}, 312 | "source": [ 313 | "This looks great! Can we ship this code? Well, we might be able to, but we should ideally test this out more! \n", 314 | "\n", 315 | "For one, the above scenario is very simplistic! In practice we have much larger number of features and instances. Let us increase the number of features m to 10000" 316 | ] 317 | }, 318 | { 319 | "cell_type": "code", 320 | "execution_count": 7, 321 | "metadata": {}, 322 | "outputs": [ 323 | { 324 | "name": "stdout", 325 | "output_type": "stream", 326 | "text": [ 327 | "Time Taken = 0.010155200958251953\n", 328 | "Function value = [inf]\n", 329 | "Printing Gradient:\n", 330 | "[[28.04187539 28.94383048 29.70031591 ... 31.13802037 27.57701899\n", 331 | " 25.72323391]]\n" 332 | ] 333 | }, 334 | { 335 | "name": "stderr", 336 | "output_type": "stream", 337 | "text": [ 338 | "/usr/local/lib/python3.7/site-packages/ipykernel_launcher.py:7: RuntimeWarning: overflow encountered in exp\n", 339 | " import sys\n", 340 | "/usr/local/lib/python3.7/site-packages/ipykernel_launcher.py:8: RuntimeWarning: overflow encountered in exp\n", 341 | " \n" 342 | ] 343 | } 344 | ], 345 | "source": [ 346 | "n = 100\n", 347 | "m = 10000\n", 348 | "\n", 349 | "X = np.random.rand(n,m)\n", 350 | "y = np.random.rand(n)\n", 351 | "y = np.random.rand(n)\n", 352 | "ybin = [(int(yi >= 0.5) - int(yi < 0.5)) for yi in y]\n", 353 | "y = np.array(ybin)\n", 354 | "w = np.random.rand(m, 1)\n", 355 | "\n", 356 | "start = time.time()\n", 357 | "[f,g] = LogisticLossNaive(w,X,y,1)\n", 358 | "end = time.time()\n", 359 | "print(\"Time Taken = \" + str(end - start))\n", 360 | "print(\"Function value = \" + str(f))\n", 361 | "print(\"Printing Gradient:\")\n", 362 | "print(g)" 363 | ] 364 | }, 365 | { 366 | "cell_type": "markdown", 367 | "metadata": {}, 368 | "source": [ 369 | "## Solving numerical issues with log-sum-exp!\n", 370 | "\n", 371 | "We see that we have run into numerical issues! The main reason is the naive implementation does not have numerical stability in the definition of log-sum-exp. Once -y*x*w becomes large positive, exp(-y*x*w) will become Inf and Log(Inf) is Inf!\n", 372 | "\n", 373 | "We can solve this by either defining our own function which protects against such numerical issues, or use the inbuilt function logaddexp. \n", 374 | "\n", 375 | "However it is super critical to be aware of numerical issues!\n", 376 | "\n", 377 | "Lets define a function LogisticLossFor which is the same as above just fixing the numerical issue above!" 378 | ] 379 | }, 380 | { 381 | "cell_type": "code", 382 | "execution_count": 11, 383 | "metadata": {}, 384 | "outputs": [], 385 | "source": [ 386 | "def LogisticLossFor(w, X, y, lam):\n", 387 | " # Computes the cost function for all the training samples\n", 388 | " m = X.shape[0]\n", 389 | " f = 0\n", 390 | " g = 0\n", 391 | " for i in range(len(X)):\n", 392 | " featureweightProd = np.dot(X[i],w)\n", 393 | " f = f + np.logaddexp(0, -y[i]*featureweightProd)\n", 394 | " g = g + -y[i]/(1 + np.exp(y[i]*featureweightProd))*X[i]\n", 395 | " f = f + 0.5*lam*np.sum(w*w)\n", 396 | " g = g + lam*w.reshape(1,-1)\n", 397 | " return [f, g] " 398 | ] 399 | }, 400 | { 401 | "cell_type": "code", 402 | "execution_count": 9, 403 | "metadata": {}, 404 | "outputs": [ 405 | { 406 | "name": "stdout", 407 | "output_type": "stream", 408 | "text": [ 409 | "Time Taken = 0.00616002082824707\n", 410 | "Function value = [144628.44285299]\n", 411 | "Printing Gradient:\n", 412 | "[[28.04187539 28.94383048 29.70031591 ... 31.13802037 27.57701899\n", 413 | " 25.72323391]]\n" 414 | ] 415 | }, 416 | { 417 | "name": "stderr", 418 | "output_type": "stream", 419 | "text": [ 420 | "/usr/local/lib/python3.7/site-packages/ipykernel_launcher.py:9: RuntimeWarning: overflow encountered in exp\n", 421 | " if __name__ == '__main__':\n" 422 | ] 423 | } 424 | ], 425 | "source": [ 426 | "start = time.time()\n", 427 | "[f,g] = LogisticLossFor(w,X,y,1)\n", 428 | "end = time.time()\n", 429 | "print(\"Time Taken = \" + str(end - start))\n", 430 | "print(\"Function value = \" + str(f))\n", 431 | "print(\"Printing Gradient:\")\n", 432 | "print(g)" 433 | ] 434 | }, 435 | { 436 | "cell_type": "markdown", 437 | "metadata": {}, 438 | "source": [ 439 | "## Lets make sure the two definitions are the same!\n", 440 | "\n", 441 | "The above clearly fixed the Inf issue! However every time we write a new code, we should ensure our code is correct and the best way to do it is by checking with a previous working version!" 442 | ] 443 | }, 444 | { 445 | "cell_type": "code", 446 | "execution_count": 11, 447 | "metadata": {}, 448 | "outputs": [ 449 | { 450 | "name": "stdout", 451 | "output_type": "stream", 452 | "text": [ 453 | "Time Taken = 0.0017960071563720703\n", 454 | "Function value Naive = [146.9464845]\n", 455 | "Printing Gradient Naive:\n", 456 | "[[22.21932155 21.94511061 25.01754849 23.57480241 24.7308955 26.42184361\n", 457 | " 22.82138964 23.90039954 23.95644553 24.0426967 ]]\n", 458 | "Time Taken = 0.002031087875366211\n", 459 | "Function value For = [146.9464845]\n", 460 | "Printing Gradient For:\n", 461 | "[[22.21932155 21.94511061 25.01754849 23.57480241 24.7308955 26.42184361\n", 462 | " 22.82138964 23.90039954 23.95644553 24.0426967 ]]\n" 463 | ] 464 | } 465 | ], 466 | "source": [ 467 | "import numpy as np\n", 468 | "n = 100\n", 469 | "m = 10\n", 470 | "\n", 471 | "X = np.random.rand(n,m)\n", 472 | "y = np.random.rand(n)\n", 473 | "y = np.random.rand(n)\n", 474 | "ybin = [(int(yi >= 0.5) - int(yi < 0.5)) for yi in y]\n", 475 | "y = np.array(ybin)\n", 476 | "w = np.random.rand(m, 1)\n", 477 | "\n", 478 | "start = time.time()\n", 479 | "[f1,g1] = LogisticLossNaive(w,X,y,1)\n", 480 | "end = time.time()\n", 481 | "print(\"Time Taken = \" + str(end - start))\n", 482 | "print(\"Function value Naive = \" + str(f1))\n", 483 | "print(\"Printing Gradient Naive:\")\n", 484 | "print(g1)\n", 485 | "\n", 486 | "start = time.time()\n", 487 | "[f2,g2] = LogisticLossFor(w,X,y,1)\n", 488 | "end = time.time()\n", 489 | "print(\"Time Taken = \" + str(end - start))\n", 490 | "print(\"Function value For = \" + str(f2))\n", 491 | "print(\"Printing Gradient For:\")\n", 492 | "print(g2)" 493 | ] 494 | }, 495 | { 496 | "cell_type": "markdown", 497 | "metadata": {}, 498 | "source": [ 499 | "## For Loop in Python == Slow Code\n", 500 | "\n", 501 | "Great, we have fixed the Inf issue now! But while this code might be correct, is this going to be fast? We have a For loop in python which is clearly an issue!\n", 502 | "\n", 503 | "First let us see how slow the code is! Let us increase n to 1000000 and m to 1000, which are somewhat more realistic (though still far from real world)." 504 | ] 505 | }, 506 | { 507 | "cell_type": "code", 508 | "execution_count": 10, 509 | "metadata": {}, 510 | "outputs": [ 511 | { 512 | "name": "stdout", 513 | "output_type": "stream", 514 | "text": [ 515 | "Time Taken = 13.130612850189209\n", 516 | "Function value = [12994789.70183541]\n" 517 | ] 518 | } 519 | ], 520 | "source": [ 521 | "n = 1000000\n", 522 | "m = 100\n", 523 | "\n", 524 | "X = np.random.rand(n,m)\n", 525 | "y = np.random.rand(n)\n", 526 | "y = np.random.rand(n)\n", 527 | "ybin = [(int(yi >= 0.5) - int(yi < 0.5)) for yi in y]\n", 528 | "y = np.array(ybin)\n", 529 | "w = np.random.rand(m, 1)\n", 530 | "\n", 531 | "start = time.time()\n", 532 | "[f,g] = LogisticLossFor(w,X,y,1)\n", 533 | "end = time.time()\n", 534 | "print(\"Time Taken = \" + str(end - start))\n", 535 | "print(\"Function value = \" + str(f))" 536 | ] 537 | }, 538 | { 539 | "cell_type": "markdown", 540 | "metadata": {}, 541 | "source": [ 542 | "## Speeding up the code!\n", 543 | "\n", 544 | "With m = 100, it takes around 10 seconds and with m = 1000, it is already 65 seconds (you can try it at home). \n", 545 | "With each 10x increase in m or n, the time taken increases exponentially!\n", 546 | "\n", 547 | "Lets now vectorize the code below." 548 | ] 549 | }, 550 | { 551 | "cell_type": "code", 552 | "execution_count": 9, 553 | "metadata": {}, 554 | "outputs": [], 555 | "source": [ 556 | "def LogisticLoss(w, X, y, lam):\n", 557 | " # Computes the cost function for all the training samples\n", 558 | " m = X.shape[0]\n", 559 | " Xw = np.matmul(X,w)\n", 560 | " yT = y.reshape(-1,1)\n", 561 | " yXw = np.multiply(yT,Xw)\n", 562 | " f = np.sum(np.logaddexp(0,-yXw)) + 0.5*lam*np.sum(w*w)\n", 563 | " gMul = np.exp(-yXw)/(1 + np.exp(-yXw))\n", 564 | " ymul = -1*yT*gMul\n", 565 | " g = np.matmul(ymul.reshape(1,-1),X) + lam*w.reshape(1,-1)\n", 566 | " #g = np.dot(X.T,ymul) + lam*w.reshape(1,-1)\n", 567 | " g = g.reshape(-1,1)\n", 568 | " return [f, g]" 569 | ] 570 | }, 571 | { 572 | "cell_type": "code", 573 | "execution_count": 12, 574 | "metadata": {}, 575 | "outputs": [ 576 | { 577 | "name": "stdout", 578 | "output_type": "stream", 579 | "text": [ 580 | "Function value = 133.24380504169818\n", 581 | "Function value = [133.24380504]\n" 582 | ] 583 | } 584 | ], 585 | "source": [ 586 | "n = 100 # Number of instances\n", 587 | "m = 10 # Number of Features \n", 588 | "\n", 589 | "X = np.random.rand(n,m)\n", 590 | "y = np.random.rand(n)\n", 591 | "y = np.random.rand(n)\n", 592 | "ybin = [(int(yi >= 0.5) - int(yi < 0.5)) for yi in y]\n", 593 | "y = np.array(ybin)\n", 594 | "w = np.random.rand(m, 1)\n", 595 | "\n", 596 | "[f1,g1] = LogisticLoss(w,X,y,1)\n", 597 | "print(\"Function value = \" + str(f1))\n", 598 | "\n", 599 | "[f2,g2] = LogisticLossFor(w,X,y,1)\n", 600 | "print(\"Function value = \" + str(f2))\n" 601 | ] 602 | }, 603 | { 604 | "cell_type": "markdown", 605 | "metadata": {}, 606 | "source": [ 607 | "## Checking gradient implementations!\n", 608 | "\n", 609 | "So far so good! But how do we verify if our gradient implementation is correct?\n", 610 | "We can test out our loss function analytically, but what if we make a mistake in computing the gradient? We can numerically compute the gradient to ensure it is correct." 611 | ] 612 | }, 613 | { 614 | "cell_type": "code", 615 | "execution_count": 13, 616 | "metadata": {}, 617 | "outputs": [], 618 | "source": [ 619 | "def LogisticLossFun(w, X, y, lam):\n", 620 | " # Computes the cost function for all the training samples\n", 621 | " m = X.shape[0]\n", 622 | " Xw = X.dot(w)\n", 623 | " yT = y.reshape(-1,1)\n", 624 | " yXw = np.multiply(yT,Xw) \n", 625 | " f = np.sum(np.logaddexp(0,-yXw)) + 0.5*lam*np.sum(w*w)\n", 626 | " return f\n", 627 | "\n", 628 | "\n", 629 | "def numericalGrad(funObj, w,epsilon):\n", 630 | " m = len(w)\n", 631 | " grad = np.zeros(m)\n", 632 | " for i in range(m):\n", 633 | " wp = np.copy(w)\n", 634 | " wn = np.copy(w)\n", 635 | " wp[i] = w[i] + epsilon\n", 636 | " wn[i] = w[i] - epsilon\n", 637 | " grad[i] = (funObj(wp) - funObj(wn))/(2*epsilon)\n", 638 | " return grad" 639 | ] 640 | }, 641 | { 642 | "cell_type": "code", 643 | "execution_count": 14, 644 | "metadata": {}, 645 | "outputs": [ 646 | { 647 | "name": "stdout", 648 | "output_type": "stream", 649 | "text": [ 650 | "123.24355436930067\n", 651 | "123.24355436930067\n", 652 | "[20.91184115 21.99328719 21.94660453 18.89297607 19.07018543 21.4787832\n", 653 | " 18.11095274 20.62350291 20.89137752 19.96859567]\n", 654 | "[[20.91181858]\n", 655 | " [21.99332411]\n", 656 | " [21.94641886]\n", 657 | " [18.89291572]\n", 658 | " [19.07008602]\n", 659 | " [21.47886715]\n", 660 | " [18.1108865 ]\n", 661 | " [20.62333919]\n", 662 | " [20.89130566]\n", 663 | " [19.96848919]]\n" 664 | ] 665 | } 666 | ], 667 | "source": [ 668 | "n = 100\n", 669 | "m = 10\n", 670 | "\n", 671 | "X = np.random.rand(n,m)\n", 672 | "y = np.random.rand(n)\n", 673 | "y = np.random.rand(n)\n", 674 | "ybin = [(int(yi >= 0.5) - int(yi < 0.5)) for yi in y]\n", 675 | "y = np.array(ybin)\n", 676 | "w = np.random.rand(m, 1)\n", 677 | "\n", 678 | "funObj = lambda w: LogisticLossFun(w,X,y,1)\n", 679 | "[f,g] = LogisticLoss(w,X,y,1)\n", 680 | "gn = numericalGrad(funObj, w, 1e-10)\n", 681 | "fn = funObj(w)\n", 682 | "print(f)\n", 683 | "print(fn)\n", 684 | "print(gn)\n", 685 | "print(g)" 686 | ] 687 | } 688 | ], 689 | "metadata": { 690 | "kernelspec": { 691 | "display_name": "Python 3", 692 | "language": "python", 693 | "name": "python3" 694 | }, 695 | "language_info": { 696 | "codemirror_mode": { 697 | "name": "ipython", 698 | "version": 3 699 | }, 700 | "file_extension": ".py", 701 | "mimetype": "text/x-python", 702 | "name": "python", 703 | "nbconvert_exporter": "python", 704 | "pygments_lexer": "ipython3", 705 | "version": "3.7.0" 706 | } 707 | }, 708 | "nbformat": 4, 709 | "nbformat_minor": 2 710 | } 711 | -------------------------------------------------------------------------------- /functions/README.txt: -------------------------------------------------------------------------------- 1 | Jupyter Notebook to test Functions on Random (Synthetic) Data 2 | --------------------------------------------------------------------------------