├── .gitignore ├── Bank_failure_m1_ex4_v3.ipynb ├── Bank_failure_rand_forests_m2_ex1.ipynb ├── DJI_tSNE_m2_ex4_corrected.ipynb ├── Euclidian_Distance_m1_ex1_v3.ipynb ├── README.md ├── Tobit_regression_m1_ex3_v4.ipynb ├── absorp_ratio_m2_ex5.ipynb ├── discrete_black_scholes_m3_ex1_v3.ipynb ├── dp_qlbs_oneset_m3_ex2_v3.ipynb ├── dp_qlbs_oneset_m3_ex3_v4.ipynb ├── linear_regress_m1_ex2_v3.ipynb └── pca_eigen_portfolios_m2_ex3.ipynb /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | 106 | # General 107 | .DS_Store 108 | .AppleDouble 109 | .LSOverride 110 | 111 | # Icon must end with two \r 112 | Icon 113 | 114 | 115 | # Thumbnails 116 | ._* 117 | 118 | # Files that might appear in the root of a volume 119 | .DocumentRevisions-V100 120 | .fseventsd 121 | .Spotlight-V100 122 | .TemporaryItems 123 | .Trashes 124 | .VolumeIcon.icns 125 | .com.apple.timemachine.donotpresent 126 | 127 | # Directories potentially created on remote AFP share 128 | .AppleDB 129 | .AppleDesktop 130 | Network Trash Folder 131 | Temporary Items 132 | .apdisk 133 | -------------------------------------------------------------------------------- /Euclidian_Distance_m1_ex1_v3.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": { 6 | "collapsed": true 7 | }, 8 | "source": [ 9 | "# Euclidean Distance \n", 10 | "\n", 11 | "Welcome to your 1-st assignment. By working through this exercise you will learn how to\n", 12 | "\n", 13 | "- do this\n", 14 | "- understand this\n", 15 | "- learn that\n", 16 | "\n", 17 | "**Instructions:**\n", 18 | "- You will be using Python 3.\n", 19 | "- Avoid using for-loops and while-loops, unless you are explicitly told to do so.\n", 20 | "- Do not modify the (# GRADED FUNCTION [function name]) comment in some cells. Your work would not be graded if you change this. Each cell containing that comment should only contain one function.\n", 21 | "- After coding your function, run the cell right below it to check if your result is correct.\n", 22 | "\n", 23 | "**After this assignment you will:**\n", 24 | "- know how to do this\n", 25 | "- understand so and so\n", 26 | "\n", 27 | "Let's get started!" 28 | ] 29 | }, 30 | { 31 | "cell_type": "markdown", 32 | "metadata": { 33 | "collapsed": true 34 | }, 35 | "source": [ 36 | "## Dataset\n", 37 | "Suppose we have a $n$ dimensional space $\\mathbb{R}^{n}$, we want to generate $1000000$ pairs of uniformly distributed random\n", 38 | "numbers $X\\sim\\mathscr{U}\\left(-1,\\:1\\right)$. \n", 39 | "\n", 40 | "For instance, if $n=1$, we generate $p_{1}=\\left(x_{1},\\:y_{1}\\right)$, $p_{2}=\\left(x_{2},\\:y_{2}\\right)$, $\\cdots$, $p_{1000000}=\\left(x_{1000000},\\:y_{1000000}\\right)$, where $x_{1}$, $x_{2}$, $\\cdots$, $x_{1000000}$ are uniformly distributed, $y_{1}$, $y_{2}$, $\\cdots$, $y_{1000000}$ are uniformly distributed too. \n", 41 | "\n", 42 | "If $n=2$, we generate $\\mathbf{p}_{1}=\\left(\\mathbf{x}_{1},\\:\\mathbf{y}_{1}\\right)$, where $\\mathbf{x}_{1}=\\left(x_{1}^{\\left(1\\right)},\\:x_{1}^{\\left(2\\right)}\\right)$ and $\\mathbf{y}_{1}=\\left(y_{1}^{\\left(1\\right)},\\:y_{1}^{\\left(2\\right)}\\right)$, $\\mathbf{p}_{2}=\\left(\\mathbf{x}_{2},\\:\\mathbf{y}_{2}\\right)$, where $\\mathbf{x}_{2}=\\left(x_{2}^{\\left(1\\right)},\\:x_{2}^{\\left(2\\right)}\\right)$ and $\\mathbf{y}_{2}=\\left(y_{2}^{\\left(1\\right)},\\:y_{2}^{\\left(2\\right)}\\right)$, $\\cdots$, $\\mathbf{p}_{1000000}=\\left(\\mathbf{x}_{1000000},\\:\\mathbf{y}_{1000000}\\right)$, where $\\mathbf{x}_{1000000}=\\left(x_{1000000}^{\\left(1\\right)},\\:x_{1000000}^{\\left(2\\right)}\\right)$ and $\\mathbf{y}_{1000000}=\\left(y_{1000000}^{\\left(1\\right)},\\:y_{1000000}^{\\left(2\\right)}\\right)$, and $x_{1}^{\\left(1\\right)}$, $x_{2}^{\\left(1\\right)}$, $\\cdots$, $x_{1000000}^{\\left(1\\right)}$ are uniformly distributed, $x_{1}^{\\left(2\\right)}$, $x_{2}^{\\left(2\\right)}$, $\\cdots$, $x_{1000000}^{\\left(2\\right)}$ are uniformly distributed, $y_{1}^{\\left(1\\right)}$, $y_{2}^{\\left(1\\right)}$, $\\cdots$, $y_{1000000}^{\\left(1\\right)}$ are uniformly distributed, and $y_{1}^{\\left(2\\right)}$, $y_{2}^{\\left(2\\right)}$, $\\cdots$, $y_{1000000}^{\\left(2\\right)}$ are uniformly distributed too. " 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": 1, 48 | "metadata": { 49 | "collapsed": true 50 | }, 51 | "outputs": [], 52 | "source": [ 53 | "# imports \n", 54 | "import numpy as np\n", 55 | "# import matplotlib.pyplot as plt \n", 56 | "# %matplotlib inline\n", 57 | "\n", 58 | "from sklearn.metrics.pairwise import euclidean_distances\n", 59 | "\n", 60 | "import sys\n", 61 | "sys.path.append(\"..\")\n", 62 | "import grading\n", 63 | "\n", 64 | "import timeit\n", 65 | "import matplotlib.mlab\n", 66 | "import scipy.stats\n", 67 | "from scipy.stats import norm" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": 2, 73 | "metadata": { 74 | "collapsed": true 75 | }, 76 | "outputs": [], 77 | "source": [ 78 | "### ONLY FOR GRADING. DO NOT EDIT ###\n", 79 | "submissions=dict()\n", 80 | "assignment_key=\"2RRok_GPEeeQZgq5AVms2g\" \n", 81 | "all_parts=[\"pmqxU\", \"VrXL6\", \"XsLp1\",\"jD7SY\",\"Ad4J0\",\"1nPFm\"]\n", 82 | "### ONLY FOR GRADING. DO NOT EDIT ###" 83 | ] 84 | }, 85 | { 86 | "cell_type": "code", 87 | "execution_count": 3, 88 | "metadata": { 89 | "collapsed": true 90 | }, 91 | "outputs": [], 92 | "source": [ 93 | "COURSERA_TOKEN = \" \"# the key provided to the Student under his/her email on submission page\n", 94 | "COURSERA_EMAIL = \" \"# the email" 95 | ] 96 | }, 97 | { 98 | "cell_type": "code", 99 | "execution_count": 4, 100 | "metadata": { 101 | "collapsed": true 102 | }, 103 | "outputs": [], 104 | "source": [ 105 | "def euclidean_distances_stats(euclidean_distances_vector):\n", 106 | " \"\"\"\n", 107 | " Calculate Euclidean distances statistics\n", 108 | " \n", 109 | " Arguments:\n", 110 | " euclidean_distances_vector - 1-D vector of Euclidean distances\n", 111 | " \n", 112 | " Return:\n", 113 | " np.array() of length 4\n", 114 | " the first element of array is the mean\n", 115 | " the second element is variance\n", 116 | " the third element is skew of the distribution\n", 117 | " the forth element is kurtusis of the distribution\n", 118 | " \"\"\"\n", 119 | " if len(euclidean_distances_vector) > 0:\n", 120 | " this_mean = np.mean( euclidean_distances_vector )\n", 121 | " this_variance = np.var( euclidean_distances_vector )\n", 122 | " this_skewness = scipy.stats.skew( euclidean_distances_vector ) \n", 123 | " this_kurtosis = scipy.stats.kurtosis( euclidean_distances_vector )\n", 124 | " result = np.array([this_mean, this_variance, this_skewness, this_kurtosis])\n", 125 | " else:\n", 126 | " result = np.array([0.] * 4)\n", 127 | " return result\n", 128 | "\n", 129 | "\n", 130 | "def print_stats(euclidean_stats):\n", 131 | " \"\"\"\n", 132 | " Print Euclidean distances statistics\n", 133 | " \n", 134 | " Arguments: \n", 135 | " euclidean_stats - np.array() of length 4\n", 136 | " the first element of array is the mean\n", 137 | " the second element is variance\n", 138 | " the third element is skew of the distribution\n", 139 | " the forth element is kurtusis of the distribution\n", 140 | " \"\"\"\n", 141 | " this_mean = euclidean_stats[0]\n", 142 | " this_variance = euclidean_stats[1]\n", 143 | " this_skewness = euclidean_stats[2]\n", 144 | " this_kurtosis = euclidean_stats[3]\n", 145 | " print( 'Expectation of Euclidean distances: ', this_mean, '\\n' )\n", 146 | " print( 'Variance of Euclidean distances: ', this_variance, '\\n' )\n", 147 | " print( 'Skewness of Euclidean distances: ', this_skewness, '\\n' )\n", 148 | " print( 'Kurtosis of Euclidean distances: ',this_kurtosis, '\\n' )\n", 149 | "\n", 150 | "\n", 151 | "def plot_distribution(euclidean_distances_vector, euclidean_stats, dim_space, bins_number=30):\n", 152 | " \"\"\"\n", 153 | " Plot histogram of Euclidean distances against normal distribution PDF\n", 154 | " \n", 155 | " Arguments: \n", 156 | " \n", 157 | " euclidean_distances_vector - 1-D vector of Euclidean distances\n", 158 | " \n", 159 | " euclidean_stats - np.array() of length 4\n", 160 | " the first element of array is the mean\n", 161 | " the second element is variance\n", 162 | " the third element is skew of the distribution\n", 163 | " the forth element is kurtusis of the distribution\n", 164 | " \n", 165 | " dim_space - dimension of the space\n", 166 | " bins_number - number of bins in the histogram\n", 167 | " \"\"\"\n", 168 | " # verbose, but this is for clarity\n", 169 | " this_mean = euclidean_stats[0]\n", 170 | " this_variance = euclidean_stats[1]\n", 171 | " this_skewness = euclidean_stats[2]\n", 172 | " this_kurtosis = euclidean_stats[3]\n", 173 | " \n", 174 | " sample_size = len(euclidean_distances_vector)\n", 175 | " try:\n", 176 | " fig_l, ax_l = plt.subplots()\n", 177 | " n_bins_l, bins_l, patches_l = ax_l.hist( euclidean_distances_vector, bins_number, normed=1 ) \n", 178 | " y_l = matplotlib.mlab.normpdf( bins_l, this_mean, np.sqrt( this_variance ) )\n", 179 | " ax_l.plot( bins_l, y_l, 'r--' )\n", 180 | " plt.title( 'Histogram for dimension = %d and sample size = %d \\n $\\mu$ = %.3f, $\\sigma^2$ = %.3f, Skewness = %.3f, Kurtosis = %.3f' \\\n", 181 | " % (dim_space, sample_size, this_mean, this_variance, this_skewness, this_kurtosis ) )\n", 182 | " fig_l.tight_layout()\n", 183 | " plt.grid( True, which='both')\n", 184 | " plt.minorticks_on()\n", 185 | " return fig_l\n", 186 | " except:\n", 187 | " return None" 188 | ] 189 | }, 190 | { 191 | "cell_type": "code", 192 | "execution_count": 5, 193 | "metadata": { 194 | "scrolled": true 195 | }, 196 | "outputs": [ 197 | { 198 | "name": "stdout", 199 | "output_type": "stream", 200 | "text": [ 201 | "X: [[ 0.09220363 0.85065196 0.90075012 0.59361319 0.84875299]\n", 202 | " [ 0.13300259 0.50209599 0.76796562 0.92047036 0.47544869]\n", 203 | " [ 0.72927521 0.8054414 0.4002669 0.01355402 0.31719426]\n", 204 | " ..., \n", 205 | " [ 0.82071112 0.46084335 0.92036074 0.31746465 0.03535725]\n", 206 | " [ 0.21581585 0.12317179 0.42738517 0.35466096 0.93360429]\n", 207 | " [ 0.84577044 0.67545711 0.22706133 0.58893715 0.98216918]]\n", 208 | "Y: [[ 0.32900813 0.34963352 0.52804383 0.38208285 0.03237214]\n", 209 | " [ 0.11760546 0.46402303 0.12260294 0.18876132 0.99071561]\n", 210 | " [ 0.49587495 0.18125864 0.61421199 0.29089588 0.71308158]\n", 211 | " ..., \n", 212 | " [ 0.14440936 0.38925149 0.50634999 0.29421895 0.96282509]\n", 213 | " [ 0.15239208 0.4741476 0.84900715 0.70515312 0.22175127]\n", 214 | " [ 0.46490389 0.50546926 0.04574762 0.75900819 0.25636212]]\n" 215 | ] 216 | } 217 | ], 218 | "source": [ 219 | "lower_boundary = 0\n", 220 | "upper_boundary = 1\n", 221 | "n = 5 # dimension\n", 222 | "sample_size = 10000\n", 223 | "\n", 224 | "np.random.seed(9001) # set the seed to yield reproducible results\n", 225 | "\n", 226 | "X = np.random.uniform( low=lower_boundary, high=upper_boundary, size=(sample_size, n) )\n", 227 | "Y = np.random.uniform( low=lower_boundary, high=upper_boundary, size=(sample_size, n) )\n", 228 | "\n", 229 | "print( 'X: ', X )\n", 230 | "print( 'Y: ', Y )" 231 | ] 232 | }, 233 | { 234 | "cell_type": "markdown", 235 | "metadata": {}, 236 | "source": [ 237 | "## Part 1\n", 238 | "Calculate the Euclidean distance between the two points of each pair. Do this in a loop. Hint: use sklearn to do the computation.\n", 239 | "\n", 240 | "Plot the histogram of the Euclidean distance. In a $n$ dimensional space $\\mathbb{R}^{n}$, the Euclidean distance between $\\mathbf{x}=\\left(x_{1},\\:x_{2},\\:\\cdots,\\:x_{n}\\right)$ and $\\mathbf{y}=\\left(y_{1},\\:y_{2},\\:\\cdots,\\:y_{n}\\right)$ is given\n", 241 | "by \n", 242 | "\\begin{equation}\n", 243 | "\\begin{aligned}d_{E}\\left(\\mathbf{p},\\:\\mathbf{q}\\right) & =\\sqrt{\\left(x_{1}-y_{1}\\right)^{2}+\\left(x_{2}-y_{2}\\right)^{2}+\\cdots+\\left(x_{n}-y_{n}\\right)^{2}}\\\\\n", 244 | " & =\\sqrt{\\sum_{i=1}^{n}\\left(x_{i}-y_{i}\\right)^{2}}\\\\\n", 245 | " & =\\left\\Vert \\mathbf{x}-\\mathbf{y}\\right\\Vert _{2}\n", 246 | "\\end{aligned}\n", 247 | "\\end{equation}" 248 | ] 249 | }, 250 | { 251 | "cell_type": "code", 252 | "execution_count": 6, 253 | "metadata": {}, 254 | "outputs": [ 255 | { 256 | "name": "stdout", 257 | "output_type": "stream", 258 | "text": [ 259 | "Running time: 2.663071423768997\n" 260 | ] 261 | } 262 | ], 263 | "source": [ 264 | "start = timeit.default_timer()\n", 265 | "### START CODE HERE ### (≈ 4 lines of code)\n", 266 | "# implement a loop which computes Euclidean distances between each element in X and Y\n", 267 | "# store results in euclidean_distances_vector_l list\n", 268 | "euclidean_distances_vector_l = []\n", 269 | "# implement a loop which computes Euclidean distances between each element in X and Y\n", 270 | "# store results in euclidean_distances_vector_l list\n", 271 | "for index, x in enumerate(X):\n", 272 | " euclidean_distances_vector_l.append(euclidean_distances(x.reshape(1, -1), Y[index].reshape(1, -1)))\n", 273 | "### END CODE HERE ###\n", 274 | "stop = timeit.default_timer()\n", 275 | "print( 'Running time: ', stop-start )" 276 | ] 277 | }, 278 | { 279 | "cell_type": "code", 280 | "execution_count": 7, 281 | "metadata": {}, 282 | "outputs": [ 283 | { 284 | "name": "stdout", 285 | "output_type": "stream", 286 | "text": [ 287 | "Submission successful, please check on the coursera grader page for the status\n" 288 | ] 289 | }, 290 | { 291 | "data": { 292 | "text/plain": [ 293 | "array([ 0.87662633, 0.06098537, -0.03504537, -0.26237711])" 294 | ] 295 | }, 296 | "execution_count": 7, 297 | "metadata": {}, 298 | "output_type": "execute_result" 299 | } 300 | ], 301 | "source": [ 302 | "# Filename: SklearnDistance, PART: pmqxU\n", 303 | "### GRADED PART (DO NOT EDIT) ###\n", 304 | "result = euclidean_distances_stats(euclidean_distances_vector_l)\n", 305 | "part_1 = list(result.squeeze())\n", 306 | "try:\n", 307 | " part1 = \" \".join(map(repr, part_1))\n", 308 | "except TypeError:\n", 309 | " part1 = repr(part_1)\n", 310 | "submissions[all_parts[0]]=part1\n", 311 | "grading.submit(COURSERA_EMAIL, COURSERA_TOKEN, assignment_key,all_parts[:1],all_parts,submissions)\n", 312 | "result\n", 313 | "### GRADED PART (DO NOT EDIT) ###" 314 | ] 315 | }, 316 | { 317 | "cell_type": "code", 318 | "execution_count": 8, 319 | "metadata": {}, 320 | "outputs": [ 321 | { 322 | "name": "stdout", 323 | "output_type": "stream", 324 | "text": [ 325 | "Expectation of Euclidean distances: 0.876626326649 \n", 326 | "\n", 327 | "Variance of Euclidean distances: 0.0609853651691 \n", 328 | "\n", 329 | "Skewness of Euclidean distances: -0.0350453681886 \n", 330 | "\n", 331 | "Kurtosis of Euclidean distances: -0.262377106269 \n", 332 | "\n" 333 | ] 334 | } 335 | ], 336 | "source": [ 337 | "print_stats(result)\n", 338 | "plot_distribution(euclidean_distances_vector_l, result, n)\n", 339 | "try:\n", 340 | " plt.show()\n", 341 | "except: pass" 342 | ] 343 | }, 344 | { 345 | "cell_type": "markdown", 346 | "metadata": {}, 347 | "source": [ 348 | "## Part 2\n", 349 | "Calculate the Euclidean distance between the two points of each pair using vectorized operations and inner product." 350 | ] 351 | }, 352 | { 353 | "cell_type": "code", 354 | "execution_count": 9, 355 | "metadata": {}, 356 | "outputs": [ 357 | { 358 | "name": "stdout", 359 | "output_type": "stream", 360 | "text": [ 361 | "Running time: 0.0016185259446501732\n" 362 | ] 363 | } 364 | ], 365 | "source": [ 366 | "# using vectorization by calculating inner product\n", 367 | "start = timeit.default_timer()\n", 368 | "# variables needed for grading\n", 369 | "\n", 370 | "### START CODE HERE ### (≈ 3 lines of code)\n", 371 | "# compute Euclidean distances between each element in X and Y using (vectorized implementation)\n", 372 | "# store results in euclidean_distances_vector_v \n", 373 | "euclidean_distances_vector_l_vectorized = np.sqrt(np.sum((X - Y) * (X - Y), axis=1))\n", 374 | "### END CODE HERE ###\n", 375 | "stop = timeit.default_timer()\n", 376 | "print( 'Running time: ', stop-start )" 377 | ] 378 | }, 379 | { 380 | "cell_type": "code", 381 | "execution_count": 10, 382 | "metadata": {}, 383 | "outputs": [ 384 | { 385 | "name": "stdout", 386 | "output_type": "stream", 387 | "text": [ 388 | "Submission successful, please check on the coursera grader page for the status\n" 389 | ] 390 | }, 391 | { 392 | "data": { 393 | "text/plain": [ 394 | "array([ 0.87662633, 0.06098537, -0.03504537, -0.26237711])" 395 | ] 396 | }, 397 | "execution_count": 10, 398 | "metadata": {}, 399 | "output_type": "execute_result" 400 | } 401 | ], 402 | "source": [ 403 | "# Filename: VectorizedDistance, PART: VrXL6\n", 404 | "### GRADED PART (DO NOT EDIT) ### \n", 405 | "result = euclidean_distances_stats(euclidean_distances_vector_l_vectorized)\n", 406 | "part_2 = result.squeeze()\n", 407 | "try:\n", 408 | " part2 = \" \".join(map(repr, part_2))\n", 409 | "except TypeError:\n", 410 | " part2 = repr(part_2)\n", 411 | "submissions[all_parts[1]]=part2\n", 412 | "grading.submit(COURSERA_EMAIL, COURSERA_TOKEN, assignment_key,all_parts[:2],all_parts,submissions)\n", 413 | "result\n", 414 | "### GRADED PART (DO NOT EDIT) ###" 415 | ] 416 | }, 417 | { 418 | "cell_type": "code", 419 | "execution_count": 11, 420 | "metadata": {}, 421 | "outputs": [ 422 | { 423 | "name": "stdout", 424 | "output_type": "stream", 425 | "text": [ 426 | "Expectation of Euclidean distances: 0.876626326649 \n", 427 | "\n", 428 | "Variance of Euclidean distances: 0.0609853651691 \n", 429 | "\n", 430 | "Skewness of Euclidean distances: -0.0350453681886 \n", 431 | "\n", 432 | "Kurtosis of Euclidean distances: -0.262377106269 \n", 433 | "\n" 434 | ] 435 | } 436 | ], 437 | "source": [ 438 | "print_stats(result)\n", 439 | "fig = plot_distribution(euclidean_distances_vector_l_vectorized, result, n)\n", 440 | "try:\n", 441 | " plt.plot()\n", 442 | "except: pass" 443 | ] 444 | }, 445 | { 446 | "cell_type": "markdown", 447 | "metadata": {}, 448 | "source": [ 449 | "## Question 3 \n", 450 | "We repeat question 1 and question 2 for $n=1$, $n=5$, $n=10$, $n=100$, $n=1000$, $n=5000$, and $n=10000$. Then plot the expectation and variance as a function of $n$.\n", 451 | "You need to generate two sets of n-dimensional samples, compute " 452 | ] 453 | }, 454 | { 455 | "cell_type": "code", 456 | "execution_count": 12, 457 | "metadata": { 458 | "collapsed": true 459 | }, 460 | "outputs": [], 461 | "source": [ 462 | "def VectorizationMethod(dim_space, sample_size, lower_boundary, upper_boundary, bins_number=30):\n", 463 | " \"\"\"\n", 464 | " Generate sample_size elements from dim_space-dimensional space. The coordinates of each element in the space\n", 465 | " are sampled from uniform distribution between lower_boundary and upper_boundary\n", 466 | " \n", 467 | " Arguments: \n", 468 | " \n", 469 | " dim_space - dimension of the space, a positive integer\n", 470 | " sample_size - number of samples in the dim_space-dimensional space\n", 471 | " lower_boundary - lower boundary of coordinates sampled from U(lower_boundary, upper_boundary)\n", 472 | " upper_boundary - lower boundary of coordinates sampled from U(lower_boundary, upper_boundary)\n", 473 | " bins_number - number of bins to plot a histogram\n", 474 | " \n", 475 | " stats_result - np.array() of length 4\n", 476 | " the first element of array is the mean\n", 477 | " the second element is variance\n", 478 | " the third element is skew of the distribution\n", 479 | " the forth element is kurtusis of the distribution\n", 480 | " \"\"\"\n", 481 | " np.random.seed(42)\n", 482 | " # variables needed for grading\n", 483 | "# euclidean_distances_vector_v = []\n", 484 | " ### START CODE HERE ### (≈ 7-10 lines of code)\n", 485 | " # store results in euclidean_distances_vector_v\n", 486 | " euclidean_distances_vector_v = np.array([])\n", 487 | " X = np.random.uniform(low=lower_boundary, high=upper_boundary, size=(sample_size, dim_space))\n", 488 | " Y = np.random.uniform(low=lower_boundary, high=upper_boundary, size=(sample_size, dim_space))\n", 489 | " euclidean_distances_vector_v = np.sqrt(np.sum((X - Y) * (X - Y), axis=1))\n", 490 | " \n", 491 | " stats_result = euclidean_distances_stats(euclidean_distances_vector_v)\n", 492 | " fig = plot_distribution(euclidean_distances_vector_v, stats_result, dim_space)\n", 493 | " ### END CODE HERE ###\n", 494 | " stats_result = euclidean_distances_stats(euclidean_distances_vector_v)\n", 495 | " return tuple(stats_result.tolist())" 496 | ] 497 | }, 498 | { 499 | "cell_type": "code", 500 | "execution_count": 13, 501 | "metadata": {}, 502 | "outputs": [ 503 | { 504 | "name": "stdout", 505 | "output_type": "stream", 506 | "text": [ 507 | "Calculating finished for sample size = 10000, dimension = 2\n", 508 | "\n", 509 | "Calculating finished for sample size = 10000, dimension = 5\n", 510 | "\n", 511 | "Calculating finished for sample size = 10000, dimension = 10\n", 512 | "\n", 513 | "Calculating finished for sample size = 10000, dimension = 20\n", 514 | "\n", 515 | "Calculating finished for sample size = 10000, dimension = 40\n", 516 | "\n", 517 | "Calculating finished for sample size = 10000, dimension = 60\n", 518 | "\n", 519 | "Calculating finished for sample size = 10000, dimension = 80\n", 520 | "\n", 521 | "Calculating finished for sample size = 10000, dimension = 100\n", 522 | "\n", 523 | "Calculating finished for sample size = 10000, dimension = 200\n", 524 | "\n", 525 | "Calculating finished for sample size = 10000, dimension = 400\n", 526 | "\n", 527 | "Calculating finished for sample size = 10000, dimension = 600\n", 528 | "\n", 529 | "Calculating finished for sample size = 10000, dimension = 800\n", 530 | "\n", 531 | "Calculating finished for sample size = 10000, dimension = 1000\n", 532 | "\n", 533 | "Running time: 3.0821857806295156\n" 534 | ] 535 | } 536 | ], 537 | "source": [ 538 | "start = timeit.default_timer()\n", 539 | "\n", 540 | "sample_size = 10000\n", 541 | "lower_boundary = 0\n", 542 | "upper_boundary = 1\n", 543 | "dimension_vector = [2, 5, 10, 20, 40, 60, 80, 100, 200, 400, 600, 800, 1000] \n", 544 | "n_dims = len(dimension_vector)\n", 545 | "\n", 546 | "euclidean_distances_mean_vector = [np.nan] * n_dims\n", 547 | "euclidean_distances_variance_vector = [np.nan] * n_dims\n", 548 | "euclidean_distances_skewness_vector = [np.nan] * n_dims\n", 549 | "euclidean_distances_kurtosis_vector = [np.nan] * n_dims\n", 550 | "\n", 551 | "for idx, space_dims in enumerate(dimension_vector):\n", 552 | " \n", 553 | " # using vectorization\n", 554 | " euclidean_distances_mean, euclidean_distances_variance, euclidean_distances_skewness, euclidean_distances_kurtosis = \\\n", 555 | " VectorizationMethod( space_dims, sample_size, lower_boundary, upper_boundary )\n", 556 | " \n", 557 | " euclidean_distances_mean_vector[idx] = euclidean_distances_mean\n", 558 | " euclidean_distances_variance_vector[idx] = euclidean_distances_variance\n", 559 | " euclidean_distances_skewness_vector[idx] = euclidean_distances_skewness\n", 560 | " euclidean_distances_kurtosis_vector[idx] = euclidean_distances_kurtosis\n", 561 | " \n", 562 | " print( 'Calculating finished for sample size = %d, dimension = %d\\n' %( sample_size, space_dims) )\n", 563 | "\n", 564 | "stop = timeit.default_timer()\n", 565 | "print( 'Running time: ', stop-start )" 566 | ] 567 | }, 568 | { 569 | "cell_type": "code", 570 | "execution_count": 14, 571 | "metadata": {}, 572 | "outputs": [ 573 | { 574 | "name": "stdout", 575 | "output_type": "stream", 576 | "text": [ 577 | "Submission successful, please check on the coursera grader page for the status\n" 578 | ] 579 | }, 580 | { 581 | "data": { 582 | "text/plain": [ 583 | "[0.5244117684024786,\n", 584 | " 0.8822841161864812,\n", 585 | " 1.2676717606162842,\n", 586 | " 1.8110504380007288,\n", 587 | " 2.5684460728327534,\n", 588 | " 3.1487610877583165,\n", 589 | " 3.64396853019095,\n", 590 | " 4.073344650824303,\n", 591 | " 5.768449828048197,\n", 592 | " 8.160150803731382,\n", 593 | " 9.997217189326257,\n", 594 | " 11.543203181243685,\n", 595 | " 12.906928018524363]" 596 | ] 597 | }, 598 | "execution_count": 14, 599 | "metadata": {}, 600 | "output_type": "execute_result" 601 | } 602 | ], 603 | "source": [ 604 | "# Filename : DistancesMean, PART: XsLp1\n", 605 | "### GRADED PART (DO NOT EDIT) ###\n", 606 | "part_3 = list(euclidean_distances_mean_vector)\n", 607 | "try:\n", 608 | " part3 = \" \".join(map(repr, part_3))\n", 609 | "except TypeError:\n", 610 | " part3 = repr(part_3)\n", 611 | "submissions[all_parts[2]]=part3\n", 612 | "grading.submit(COURSERA_EMAIL, COURSERA_TOKEN, assignment_key,all_parts[:3],all_parts,submissions)\n", 613 | "euclidean_distances_mean_vector\n", 614 | "### GRADED PART (DO NOT EDIT) ###" 615 | ] 616 | }, 617 | { 618 | "cell_type": "code", 619 | "execution_count": 15, 620 | "metadata": {}, 621 | "outputs": [ 622 | { 623 | "name": "stdout", 624 | "output_type": "stream", 625 | "text": [ 626 | "Submission successful, please check on the coursera grader page for the status\n" 627 | ] 628 | }, 629 | { 630 | "data": { 631 | "text/plain": [ 632 | "[0.06230677292748971,\n", 633 | " 0.061198079555789694,\n", 634 | " 0.0608126495018327,\n", 635 | " 0.059183678488410246,\n", 636 | " 0.05949007814616248,\n", 637 | " 0.05725268125796696,\n", 638 | " 0.05935452158486421,\n", 639 | " 0.05831142832530561,\n", 640 | " 0.05928563431624706,\n", 641 | " 0.059076129472239725,\n", 642 | " 0.05762985490169308,\n", 643 | " 0.059174927565307574,\n", 644 | " 0.0581599059610326]" 645 | ] 646 | }, 647 | "execution_count": 15, 648 | "metadata": {}, 649 | "output_type": "execute_result" 650 | } 651 | ], 652 | "source": [ 653 | "# Filename: DistancesVariance, PART jD7SY\n", 654 | "### GRADED PART (DO NOT EDIT) ###\n", 655 | "part_4 = list(euclidean_distances_variance_vector)\n", 656 | "try:\n", 657 | " part4 = \" \".join(map(repr, part_4))\n", 658 | "except TypeError:\n", 659 | " part4 = repr(part_4)\n", 660 | "submissions[all_parts[3]]=part4\n", 661 | "grading.submit(COURSERA_EMAIL, COURSERA_TOKEN, assignment_key,all_parts[:4],all_parts,submissions)\n", 662 | "euclidean_distances_variance_vector\n", 663 | "### GRADED PART (DO NOT EDIT) ###" 664 | ] 665 | }, 666 | { 667 | "cell_type": "code", 668 | "execution_count": 16, 669 | "metadata": {}, 670 | "outputs": [ 671 | { 672 | "name": "stdout", 673 | "output_type": "stream", 674 | "text": [ 675 | "Submission successful, please check on the coursera grader page for the status\n" 676 | ] 677 | }, 678 | { 679 | "data": { 680 | "text/plain": [ 681 | "[0.1988768646152347,\n", 682 | " -0.021074633737255224,\n", 683 | " -0.05749817620192312,\n", 684 | " -0.0718962153911559,\n", 685 | " -0.006116609407693513,\n", 686 | " -0.023983251393225696,\n", 687 | " -0.05204557015527253,\n", 688 | " -0.018424595473803283,\n", 689 | " -0.0040378906739251515,\n", 690 | " -0.020853349346522568,\n", 691 | " -0.014025628984910854,\n", 692 | " 0.029458241353260126,\n", 693 | " -0.04396638054084743]" 694 | ] 695 | }, 696 | "execution_count": 16, 697 | "metadata": {}, 698 | "output_type": "execute_result" 699 | } 700 | ], 701 | "source": [ 702 | "# Filename: DistancesSkewness, PART: Ad4J0\n", 703 | "### GRADED PART (DO NOT EDIT) ###\n", 704 | "part_5 = list(euclidean_distances_skewness_vector)\n", 705 | "try:\n", 706 | " part5 = \" \".join(map(repr, part_5))\n", 707 | "except TypeError:\n", 708 | " part5 = repr(part_5)\n", 709 | "submissions[all_parts[4]]=part5\n", 710 | "grading.submit(COURSERA_EMAIL, COURSERA_TOKEN, assignment_key,all_parts[:5],all_parts,submissions)\n", 711 | "euclidean_distances_skewness_vector\n", 712 | "### GRADED PART (DO NOT EDIT) ###" 713 | ] 714 | }, 715 | { 716 | "cell_type": "code", 717 | "execution_count": 17, 718 | "metadata": {}, 719 | "outputs": [ 720 | { 721 | "name": "stdout", 722 | "output_type": "stream", 723 | "text": [ 724 | "Submission successful, please check on the coursera grader page for the status\n" 725 | ] 726 | }, 727 | { 728 | "data": { 729 | "text/plain": [ 730 | "[-0.6384013133225133,\n", 731 | " -0.2758439734602782,\n", 732 | " -0.15223233078033216,\n", 733 | " -0.07988375526844216,\n", 734 | " -0.01044769148587088,\n", 735 | " -0.08064860279897701,\n", 736 | " -0.02331335574782667,\n", 737 | " -0.020166667252636383,\n", 738 | " 0.10669665209383927,\n", 739 | " -0.0536906631006282,\n", 740 | " 0.024930971487188813,\n", 741 | " 0.003075352050577962,\n", 742 | " 0.06775391815498777]" 743 | ] 744 | }, 745 | "execution_count": 17, 746 | "metadata": {}, 747 | "output_type": "execute_result" 748 | } 749 | ], 750 | "source": [ 751 | "# Filename: DistancesKurtosis, PART: 1nPFm\n", 752 | "### GRADED PART (DO NOT EDIT) ###\n", 753 | "part_6 = list(euclidean_distances_kurtosis_vector)\n", 754 | "try:\n", 755 | " part6 = \" \".join(map(repr, part_6))\n", 756 | "except TypeError:\n", 757 | " part6 = repr(part_6)\n", 758 | "submissions[all_parts[5]]=part6\n", 759 | "grading.submit(COURSERA_EMAIL, COURSERA_TOKEN, assignment_key,all_parts[:6],all_parts,submissions)\n", 760 | "euclidean_distances_kurtosis_vector\n", 761 | "### GRADED PART (DO NOT EDIT) ###" 762 | ] 763 | }, 764 | { 765 | "cell_type": "code", 766 | "execution_count": 18, 767 | "metadata": { 768 | "collapsed": true 769 | }, 770 | "outputs": [], 771 | "source": [ 772 | "# here we plot the stats for different sample sizes\n", 773 | "try:\n", 774 | " plt.figure()\n", 775 | " plt.plot( dimension_vector, euclidean_distances_mean_vector, 'r-', marker='o' )\n", 776 | " plt.grid( True, which='both')\n", 777 | " plt.minorticks_on()\n", 778 | " plt.title( 'Mean of Euclidean Distances Distribution' )\n", 779 | " plt.xlabel( 'Dimension' )\n", 780 | " plt.ylabel( 'Mean of Euclidean Distances' )\n", 781 | "\n", 782 | " plt.figure()\n", 783 | " plt.plot( dimension_vector, euclidean_distances_variance_vector, 'r-', marker='o' )\n", 784 | " plt.grid( True, which='both')\n", 785 | " plt.minorticks_on()\n", 786 | " plt.title( 'Variance of Euclidean Distances Distribution' )\n", 787 | " plt.xlabel( 'Dimension' )\n", 788 | " plt.ylabel( 'Variance of Euclidean Distances' )\n", 789 | "\n", 790 | " plt.figure()\n", 791 | " plt.plot( dimension_vector, euclidean_distances_skewness_vector, 'r-', marker='o' )\n", 792 | " plt.grid( True, which='both')\n", 793 | " plt.minorticks_on()\n", 794 | " plt.title( 'Skewness of Euclidean Distances Distribution' )\n", 795 | " plt.xlabel( 'Dimension' )\n", 796 | " plt.ylabel( 'Skewness of Euclidean Distances' )\n", 797 | "\n", 798 | " plt.figure()\n", 799 | " plt.plot( dimension_vector, euclidean_distances_kurtosis_vector, 'r-', marker='o' )\n", 800 | " plt.grid( True, which='both')\n", 801 | " plt.minorticks_on()\n", 802 | " plt.title( 'Kurtosis of Euclidean Distances Distribution' )\n", 803 | " plt.xlabel( 'Dimension' )\n", 804 | " plt.ylabel( 'Kurtosis of Euclidean Distances' )\n", 805 | "\n", 806 | " matplotlib.pyplot.show()\n", 807 | "except: pass" 808 | ] 809 | }, 810 | { 811 | "cell_type": "code", 812 | "execution_count": null, 813 | "metadata": { 814 | "collapsed": true 815 | }, 816 | "outputs": [], 817 | "source": [] 818 | } 819 | ], 820 | "metadata": { 821 | "coursera": { 822 | "course_slug": "guided-tour-machine-learning-finance", 823 | "graded_item_id": "qoIPX", 824 | "launcher_item_id": "rsGVU" 825 | }, 826 | "kernelspec": { 827 | "display_name": "Python 3", 828 | "language": "python", 829 | "name": "python3" 830 | }, 831 | "language_info": { 832 | "codemirror_mode": { 833 | "name": "ipython", 834 | "version": 3 835 | }, 836 | "file_extension": ".py", 837 | "mimetype": "text/x-python", 838 | "name": "python", 839 | "nbconvert_exporter": "python", 840 | "pygments_lexer": "ipython3", 841 | "version": "3.6.0" 842 | } 843 | }, 844 | "nbformat": 4, 845 | "nbformat_minor": 2 846 | } 847 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Machine-Learning-and-Reinforcement-Learning-in-Finance 2 | 3 | ## Guided Tour of Machine Learning in Finance 4 | 1. [Euclidean Distance Calculation](Euclidian_Distance_m1_ex1_v3.ipynb) 5 | 2. [Linear Regression](linear_regress_m1_ex2_v3.ipynb) 6 | 3. [Tobit Regression](Tobit_regression_m1_ex3_v4.ipynb) 7 | 4. [Bank defaults prediction using FDIC dataset](Bank_failure_m1_ex4_v3.ipynb) 8 | 9 | ## Fundamentals of Machine Learning in Finance 10 | 1. [Random Forests And Decision Trees](Bank_failure_rand_forests_m2_ex1.ipynb) 11 | 2. [Eigen Portfolio construction via PCA](pca_eigen_portfolios_m2_ex3.ipynb) 12 | 3. [Data Visualization with t-SNE](DJI_tSNE_m2_ex4_corrected.ipynb) 13 | 4. [Absorption Ratio via PCA](absorp_ratio_m2_ex5.ipynb) 14 | 15 | ## Reinforcement Learning in Finance 16 | 1. [Discrete-time Black Scholes model](discrete_black_scholes_m3_ex1_v3.ipynb) 17 | 2. [QLBS Model Implementation](dp_qlbs_oneset_m3_ex2_v3.ipynb) 18 | 3. [Fitted Q-Iteration](dp_qlbs_oneset_m3_ex3_v4.ipynb) 19 | 4. IRL Market Model Calibration 20 | 21 | ## Overview of Advanced Methods of Reinforcement Learning in Finance 22 | -------------------------------------------------------------------------------- /Tobit_regression_m1_ex3_v4.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## Tobit regression with TensorFlow\n", 8 | "\n", 9 | "Tobit regression fits the following model for non-negative data $ y $: \n", 10 | "\n", 11 | "$ y({\\bf X}) = \\max (0, w_0 + \\sum_{i=1}^{N} w_i X_i + w_{N+1} \\cdot \\varepsilon) $ \n", 12 | "\n", 13 | "Here $ X_i $ are predictors, $ \\varepsilon \\sim N(0,1) $ is a standard Gaussian noise, and $ w_{N+1} $ is the noise\n", 14 | "volatility (standard deviation).\n", 15 | "\n", 16 | "Our problem is to fit parameters $ N+2 $ parameters $ w_{i} $ for $ i = 0, \\ldots, N+1 $ to the observed set of pairs $ \\left({\\bf X}_i, y_i \\right) $ \n", 17 | "\n", 18 | "We use synthetic data with known parameters to learn how to implement Tobit Regression in TensorFlow. " 19 | ] 20 | }, 21 | { 22 | "cell_type": "markdown", 23 | "metadata": {}, 24 | "source": [ 25 | "## About iPython Notebooks ##\n", 26 | "\n", 27 | "iPython Notebooks are interactive coding environments embedded in a webpage. You will be using iPython notebooks in this class. You only need to write code between the ### START CODE HERE ### and ### END CODE HERE ### comments. After writing your code, you can run the cell by either pressing \"SHIFT\"+\"ENTER\" or by clicking on \"Run Cell\" (denoted by a play symbol) in the upper bar of the notebook. \n", 28 | "\n", 29 | "We will often specify \"(≈ X lines of code)\" in the comments to tell you about how much code you need to write. It is just a rough estimate, so don't feel bad if your code is longer or shorter." 30 | ] 31 | }, 32 | { 33 | "cell_type": "code", 34 | "execution_count": 1, 35 | "metadata": { 36 | "collapsed": true 37 | }, 38 | "outputs": [], 39 | "source": [ 40 | "import numpy as np\n", 41 | "import tensorflow as tf\n", 42 | "\n", 43 | "import sys\n", 44 | "sys.path.append(\"..\")\n", 45 | "import grading\n", 46 | "\n", 47 | "try:\n", 48 | " import matplotlib.pyplot as plt\n", 49 | " %matplotlib inline\n", 50 | "except:\n", 51 | " pass\n", 52 | "\n", 53 | "try:\n", 54 | " from mpl_toolkits.mplot3d import Axes3D\n", 55 | "except:\n", 56 | " pass" 57 | ] 58 | }, 59 | { 60 | "cell_type": "code", 61 | "execution_count": 2, 62 | "metadata": { 63 | "collapsed": true 64 | }, 65 | "outputs": [], 66 | "source": [ 67 | "### ONLY FOR GRADING. DO NOT EDIT ###\n", 68 | "submissions=dict()\n", 69 | "assignment_key=\"w3Hc-vZdEeehlBIKDnZryg\" \n", 70 | "all_parts=[\"pLnY5\", \"RKR6p\", \"IU1pw\", \"ISVtY\", \"Cutr3\"]\n", 71 | "### ONLY FOR GRADING. DO NOT EDIT ###" 72 | ] 73 | }, 74 | { 75 | "cell_type": "code", 76 | "execution_count": 3, 77 | "metadata": { 78 | "collapsed": true 79 | }, 80 | "outputs": [], 81 | "source": [ 82 | "COURSERA_TOKEN = \" \"# the key provided to the Student under his/her email on submission page\n", 83 | "COURSERA_EMAIL = \" \"# the email" 84 | ] 85 | }, 86 | { 87 | "cell_type": "code", 88 | "execution_count": 4, 89 | "metadata": { 90 | "collapsed": true 91 | }, 92 | "outputs": [], 93 | "source": [ 94 | "# utility function to reset the TF graph to the same state each time\n", 95 | "def reset_graph(seed=42):\n", 96 | " # to make results reproducible across runs\n", 97 | " tf.reset_default_graph()\n", 98 | " tf.set_random_seed(seed)\n", 99 | " np.random.seed(seed)\n", 100 | " " 101 | ] 102 | }, 103 | { 104 | "cell_type": "markdown", 105 | "metadata": {}, 106 | "source": [ 107 | "## Tobit Regression class\n", 108 | "\n", 109 | "**Instructions**:\n", 110 | "Complete the code for the calculation of loss function (the negative log-likelihood)." 111 | ] 112 | }, 113 | { 114 | "cell_type": "code", 115 | "execution_count": 5, 116 | "metadata": { 117 | "collapsed": true 118 | }, 119 | "outputs": [], 120 | "source": [ 121 | "class Tobit_Regression:\n", 122 | " \n", 123 | " def __init__(self, n_features, learning_rate=0.005, L=0):\n", 124 | " \n", 125 | " self.input = tf.placeholder(tf.float32, [None, n_features], name=\"Input\")\n", 126 | " self.target = tf.placeholder(tf.float32, [None, 1], name=\"Target\")\n", 127 | " \n", 128 | " # the first weight is for the intercept, the last one is for a square root of the noise std \n", 129 | " self.weights = tf.Variable(tf.random_normal([n_features + 2, 1]))\n", 130 | " \n", 131 | " # Augmented data matrix is obtained by adding a column of ones to the data matrix\n", 132 | " self.data_plus_bias = tf.concat([tf.ones([tf.shape(self.input)[0], 1]), self.input], axis=1)\n", 133 | "\n", 134 | " #######################################################################\n", 135 | " # MLE for Tobit regression \n", 136 | " \n", 137 | " # noise volatility is obtained as a square of the last weight to ensure positivity \n", 138 | " self.sigma = 0.0001 + tf.square(self.weights[-1])\n", 139 | " \n", 140 | " # term1 and term2 are just placeholders initialized such that the code runs\n", 141 | " # students need to initialize them appropriately to solve this assignment\n", 142 | " term1 = tf.Variable(np.zeros(shape=(n_features + 2, 1)))\n", 143 | " term2 = tf.Variable(np.zeros(shape=(n_features + 2, 1)))\n", 144 | " # THIS IS THE PART THAT STUDENTS ARE SUPPOSED TO WRITE THEMSELVES TO COMPLETE THE IMPLEMENTATION \n", 145 | " # OF THE TOBIT REGRESSION MODEL\n", 146 | " \n", 147 | " # FOR THE ASSIGNMENT: complete the code for the calculation of loss function \n", 148 | " # (the negative log-likelihood)\n", 149 | " ### START CODE HERE ### (≈ 6-7 lines of code)\n", 150 | " self.y_out = tf.matmul(self.data_plus_bias[:, :], self.weights[:-1])\n", 151 | " norm = tf.distributions.Normal(loc=0.0, scale=1.0)\n", 152 | " term1 = tf.to_float(self.target > L) * tf.log(0.00001 + 1/self.sigma * norm.prob((self.target - self.y_out)/self.sigma))\n", 153 | " term2 = (1-tf.to_float(self.target > L)) * tf.log(0.00001 + 1 - norm.cdf((self.y_out - L)/self.sigma))\n", 154 | " ### END CODE HERE ###\n", 155 | " self.loss = - tf.reduce_mean(term1 + term2)\n", 156 | " \n", 157 | " #####################################################################\n", 158 | "\n", 159 | " # Use Adam optimization for training\n", 160 | " self.train_step = (tf.train.AdamOptimizer(learning_rate).minimize(self.loss), -self.loss)\n", 161 | " \n", 162 | " # prediction made from the model: Use a ReLU neuron!\n", 163 | " self.output = tf.nn.relu(tf.matmul(self.data_plus_bias[:, :], self.weights[:-1]))\n", 164 | " \n", 165 | " # Check the output L1-norm error \n", 166 | " self.output_L1_error = tf.reduce_mean(tf.abs(self.target - self.output))\n", 167 | "\n", 168 | " def generate_data(n_points,\n", 169 | " n_features,\n", 170 | " weights,\n", 171 | " noise_std):\n", 172 | "\n", 173 | " # Bounds of [-1,1] in space of n_points x n_features\n", 174 | " np.random.seed(42)\n", 175 | " bias = np.ones(n_points).reshape((-1,1))\n", 176 | " low = - np.ones((n_points,n_features),'float')\n", 177 | " high = np.ones((n_points,n_features),'float')\n", 178 | "\n", 179 | " # simulated features are uniformally distributed on [-1,1].\n", 180 | " # The size n_points x n_features of array X is inferred by broadcasting of 'low' and 'high'\n", 181 | " X = np.random.uniform(low=low, high=high)\n", 182 | " \n", 183 | " # simulated noise\n", 184 | " noise = np.random.normal(size=(n_points, 1))\n", 185 | " \n", 186 | " # outputs \n", 187 | " Y = weights[0] * bias + np.dot(X, weights[1:]).reshape((-1,1)) + noise_std * noise\n", 188 | "\n", 189 | " # truncate negative values of Y \n", 190 | " np.clip(Y, a_min=0, a_max=None, out=Y)\n", 191 | "\n", 192 | " return X, Y " 193 | ] 194 | }, 195 | { 196 | "cell_type": "code", 197 | "execution_count": 6, 198 | "metadata": { 199 | "collapsed": true 200 | }, 201 | "outputs": [], 202 | "source": [ 203 | "def gen_tobit_dataset(n_points, n_features, train_test_split=4):\n", 204 | " \"\"\"\n", 205 | " Generate dataset for Tobit regression model and split it into training and test portions\n", 206 | " \n", 207 | " \"\"\"\n", 208 | " # n_features + 1 weights (one for a constant feature)\n", 209 | " data_weights = np.array([-0.25, 0.5, 0.2, .1]) \n", 210 | " noise_std = 0.1\n", 211 | " \n", 212 | " # Generate dataset\n", 213 | " X, Y = Tobit_Regression.generate_data(n_points=n_points,\n", 214 | " n_features=n_features,\n", 215 | " weights=data_weights,\n", 216 | " noise_std=noise_std)\n", 217 | " \n", 218 | " # split to the train and test set\n", 219 | " # 1/4 of the data is used for a test\n", 220 | " \n", 221 | " n_test = int(n_points / train_test_split)\n", 222 | " n_train = n_points - n_test\n", 223 | " \n", 224 | " X_train = X[:n_train,:]\n", 225 | " Y_train = Y[:n_train].reshape((-1,1))\n", 226 | "\n", 227 | " X_test = X[n_train:,:]\n", 228 | " Y_test = Y[n_train:].reshape((-1,1))\n", 229 | " return X_train, Y_train, X_test, Y_test\n", 230 | "\n", 231 | "def train_model(n_features, learning_rate, n_steps=1000):\n", 232 | " \"\"\"\n", 233 | " Train Tobit Regression model\n", 234 | " \n", 235 | " Return:\n", 236 | " a tuple of:\n", 237 | " - Model fitted weights, np.array\n", 238 | " - loss, double \n", 239 | " - fitted noise std error, double\n", 240 | " - L1 error, double\n", 241 | " \"\"\"\n", 242 | " # create an instance of the Tobit Regression class \n", 243 | " model = Tobit_Regression(n_features=n_features, learning_rate=learning_rate)\n", 244 | "\n", 245 | " # train the model\n", 246 | " with tf.Session() as sess:\n", 247 | " sess.run(tf.global_variables_initializer())\n", 248 | " \n", 249 | " for _ in range(0, n_steps):\n", 250 | " (_, loss), weights = sess.run((model.train_step, model.weights), feed_dict={\n", 251 | " model.input: X_train,\n", 252 | " model.target: Y_train\n", 253 | " })\n", 254 | " \n", 255 | " # predictions for the test set\n", 256 | " # std_model = weights[-1]**2 \n", 257 | " output, std_model = sess.run([model.output,model.sigma], \n", 258 | " feed_dict={model.input: X_test})\n", 259 | " \n", 260 | " output_L1_error = sess.run(model.output_L1_error,\n", 261 | " feed_dict={model.input: X_test,\n", 262 | " model.target: Y_test})\n", 263 | " sess.close()\n", 264 | " return weights[:-1], loss, std_model[0], output_L1_error, output\n", 265 | "\n", 266 | "def plot_results(): \n", 267 | " # Plot a projection of test prediction on the first two predictors\n", 268 | " fig = plt.figure()\n", 269 | " ax = fig.add_subplot(111, projection='3d')\n", 270 | " ax.scatter(X_test[:,1], X_test[:,2], Y_test, s=1, c=\"#000000\")\n", 271 | " ax.scatter(X_test[:,1], X_test[:,2], output.reshape([-1,1]), s=1, c=\"#FF0000\")\n", 272 | " plt.xlabel('X_1')\n", 273 | " plt.ylabel('X_2')\n", 274 | " plt.show()" 275 | ] 276 | }, 277 | { 278 | "cell_type": "code", 279 | "execution_count": 7, 280 | "metadata": {}, 281 | "outputs": [ 282 | { 283 | "name": "stdout", 284 | "output_type": "stream", 285 | "text": [ 286 | "Submission successful, please check on the coursera grader page for the status\n" 287 | ] 288 | }, 289 | { 290 | "data": { 291 | "text/plain": [ 292 | "array([-0.24247025, 0.49173024, 0.19827977, 0.0930066 ], dtype=float32)" 293 | ] 294 | }, 295 | "execution_count": 7, 296 | "metadata": {}, 297 | "output_type": "execute_result" 298 | } 299 | ], 300 | "source": [ 301 | "### GRADED PART (DO NOT EDIT) ###\n", 302 | "n_points = 5000\n", 303 | "n_features = 3\n", 304 | "learning_rate = 0.05\n", 305 | "n_steps = 1000\n", 306 | "\n", 307 | "X_train, Y_train, X_test, Y_test = gen_tobit_dataset(n_points, n_features)\n", 308 | "reset_graph()\n", 309 | "weights, loss, std_model, error_L1, output = train_model(n_features, learning_rate, n_steps)\n", 310 | "\n", 311 | "part_1=list(weights.squeeze())\n", 312 | "try:\n", 313 | " part1 = \" \".join(map(repr, part_1))\n", 314 | "except TypeError:\n", 315 | " part1 = repr(part_1)\n", 316 | "submissions[all_parts[0]]=part1\n", 317 | "grading.submit(COURSERA_EMAIL, COURSERA_TOKEN, assignment_key, all_parts[0],all_parts,submissions)\n", 318 | "weights.squeeze()\n", 319 | "### GRADED PART (DO NOT EDIT) ###" 320 | ] 321 | }, 322 | { 323 | "cell_type": "code", 324 | "execution_count": 8, 325 | "metadata": {}, 326 | "outputs": [ 327 | { 328 | "name": "stdout", 329 | "output_type": "stream", 330 | "text": [ 331 | "Submission successful, please check on the coursera grader page for the status\n" 332 | ] 333 | }, 334 | { 335 | "data": { 336 | "text/plain": [ 337 | "[0.13414386, 0.098345175, 0.019833891]" 338 | ] 339 | }, 340 | "execution_count": 8, 341 | "metadata": {}, 342 | "output_type": "execute_result" 343 | } 344 | ], 345 | "source": [ 346 | "### GRADED PART (DO NOT EDIT) ###\n", 347 | "part_2=[loss, std_model, error_L1]\n", 348 | "try:\n", 349 | " part2 = \" \".join(map(repr, part_2))\n", 350 | "except TypeError:\n", 351 | " part2 = repr(part_2) \n", 352 | " \n", 353 | "submissions[all_parts[1]]=part2\n", 354 | "grading.submit(COURSERA_EMAIL, COURSERA_TOKEN, assignment_key, all_parts[:2],all_parts,submissions)\n", 355 | "[loss, std_model, error_L1]\n", 356 | "### GRADED PART (DO NOT EDIT) ###" 357 | ] 358 | }, 359 | { 360 | "cell_type": "code", 361 | "execution_count": 9, 362 | "metadata": {}, 363 | "outputs": [ 364 | { 365 | "data": { 366 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAWQAAADuCAYAAAAOR30qAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsfXl8G/WZ/jO6LNvyfceOb8exnYNcJAHCWcodCPRXoLQs\npWxZICxpgVLKkkJZllJgKYUWlqOlFMIVmuVYmpYz0CbExklMbh+SLd+SZVv3NcfvD+c7jKQZaUZW\nEsed5/PhA8ij74ykmWfeed73fV6K4zioUKFChYrjD83xPgAVKlSoUDEFlZBVqFChYoZAJWQVKlSo\nmCFQCVmFChUqZghUQlahQoWKGQKVkFWoUKFihkAlZBUqVKiYIVAJWYUKFSpmCFRCVqFChYoZAp3C\n7dW2PhUqVKhQDkrORmqErEKFChUzBCohq1ChQsUMgUrIKlSoUDFDoBKyChUqVMwQqISsQoUKFTME\nKiGrUKFCxQyBSsgqVKhQMUOgErIKFSpUzBCohKxChQoVMwQqIatQoULFDIFKyCpUqFAxQ6ASsgoV\nKlTMECg1F1KhIi44jgPDMAAArVYLipLlqaJChQqohKwiRWBZFgzDgKZpBINB/nWKoqDVavl/NBoN\nNBoNKIpSyVqFiiiohKxiWmBZFjRN81ExRVE84XLclFsrIWohQqEQWJZFdnY2dDqdStQqVEAlZBVJ\ngOM4cBwHl8uFtLQ0AOCJlJAweU34byE8Hg9cLheMRiNCoVDEezQaDbRarUrUKv7poBKyCtngOI6P\niFmWxa5du3DKKackRZTkPVqtVnQfDMNEEDXZNlr+UIlaxWyCSsgqEiKaiAkJipGh3+/H8PAw0tPT\nkZmZCaPRKEqYUiQqRbBCouY4jo/GR0ZGUFFRwRN0tE6tQsWJBJWQVUiCVEzQNM2ToBRher1emM1m\neL1eFBUVwel0YmhoCIFAABqNBhkZGcjIyEBmZiYyMzP59eVCbL8sy2JkZARz5syJIGoCQtBiCUUV\nKmYiVEJWEQMxItZoxEvWXS4XzGYzQqEQamtrkZ+fj3A4HEF6LMvC5/PB6/XC7XZjZGQEHo8HDMMg\nGAzyJE0iaql9SUFse6Jzh8NhhEIhlahVnBBQCVkFD47j4PV6wTAMDAZDXCKemJiAz+dDV1cXamtr\nkZeXx68RTWwajQYmkwkmkyni/Xa7HXPmzIHP54PH48Ho6Cj8fj8A8JIH+Sc9PV0RUceTPgCApmmE\nw2EAwPDwMHJzc5GZmakStYrjCpWQVYDjONA0DZqmMTIyAoZhUF1dLbqdw+GA2WyGwWBAWloali1b\nltQ+CclFEzUwFVH7/X54vV54vV7YbDZRok5PT1ckewj3KyRZj8eDrKwsAJFELXxPdNWH2vSi4mhA\nJeR/YgibOYCviSe6ZpjjOIyOjqK3txeZmZloaWlBZmYmtm/fnvS+o0vkhNBoNBFas/B4A4FABFF7\nvV60tbXBaDRGRNQZGRlJRdTRJBtdSy1MKMaTPlSyVpEMVEL+J4RYMwchEI1GA5Zl+e2Gh4fR19eH\nvLw8LF68GOnp6QnXF5MtohGPkKUgTA4WFRWB4zi0t7dj2bJlERG1w+GAz+cDy7KiRC1WahfvOIX/\njn6P8IZ2+PBhNDY28lKPTqdTiVqFIqiE/E8CYZKLEK4YQWg0GtA0DavViv7+fhQVFWHZsmV8A0iq\njykVa1AUFUHUwr8JI+rx8fEYos7IyIj4TuRCjKi9Xi8flbMsG9FCTrZVm15UxINKyLMcpH53cnIS\nGRkZAKQTXjRNY3R0FHa7HVVVVTj55JOh1+sTri+VPItHMseCgCiKQnp6OtLT01FYWBhxbMFgkCdq\nr9eLw4cPg6IopKWlxUTUOl3iy4TcXOJ1J6pNLyoSQSXkWYroZo6vvvoKq1evFr3AQ6EQ+vr6YLPZ\nkJeXh9LSUtTV1SXcB5EdhGuKtVDHe+/xAEVRMBqNMBqNKCgogMfjwdy5c2EymSKIenBwED6fDwzD\nIC0tLaKOOjMzM4Ko5co0iYja4/HAbrejpqYGAEQ1arXyY/ZCJeRZBqlmDiA2agsEAujt7cX4+Dgq\nKyuxevVqOJ1ODA8Py9rXdEl1uoScSlIi35OQqAk4jkMoFOKJenh4OKI8kFR8MAyDcDic8KlCat8E\nDMNAq9WKdicSqLXUsxMqIc8SKGnm8Pl8sFgscLlcqK6u5hNRwNSFLpcoSQIwOkkmBycScRApIy0t\nDfn5+fzrRJP3er1wuVwIh8PYu3cvaJqGXq+PiKYzMzNlETXLsvzvFi+iVpteZidUQj7BQWqIhVGU\nFBF7PB6YzWb4/X7U1NSgubk55kKlKEp2gms6EfLxlCyiIUduEANFUTAYDDAYDDCZTHA4HFi6dCkA\nRETUNpsNHo8ngqiF8ofBYFB0LHKaXsbGxjAxMcHXk6tEfWJAJeQTFBzHwefzRdTPShGx0+mEz+fD\nwYMH+fZmqQtRWPaWCLOFkFMF4XdKiJp0MBKQiNrr9cJut6O3txfhcBg6nY6vu6ZpGqFQCHq9XhFh\nCqUp8t0S6YOsG930QohZrERPxbGHSsgnGITNHLt378aiRYtES9I4jsP4+DjMZjO0Wi3S0tKwYsWK\nhOsnI1kQhEIh9Pb2YmRkBBqNJmEN8GwiZLlRtl6vR25uLnJzcyNeD4fD8Pl8GBkZgd/vx4EDBxAK\nhaDVamOkD9LWHg/R0ofw38JjBtSml5kElZBPEIg1c2i12pholuM42O12WCwWpKenY/78+cjKysKO\nHTsiLlIpJCNZBINB9Pb2wuFwoLKyEieffDIARFQsCGuA09PTkZaWhmAwCI/Ho7irLtVIVrJI5Rp6\nvR45OTkIBoNIS0vjpQaapiMaXvr7+xEMBqHVapGRkQGTycTLH2lpafwxyP2thf8WfhZgiqiJi19T\nUxO/rdr0cvSgEvIMRqJmDmGESryBe3t7kZ2djYULF/J1x8JtE12kSiQLlmXR3d0Nj8eD6upqNDQ0\nQKPRIBQKgeM4yRpgv9+PyclJjI+Po6+vDz6fD8D0DYWOJ1JB6kAskep0OuTk5CAnJydiO5qmeQe9\niYkJDAwMIBgM8m3nDMNAp9MhEAhEELUcREsf5OZPiFptejl6UAl5BiK6hhiI31U3MDAAq9WK/Px8\nLFmyBEajMWZNuUQrR7Lw+/0wm82YnJxEbW0tFi5cGFOLLLUG6arT6XSw2WxoaWkBEOlT4fF4YgyF\nTCZTBFHPtIs8VYQsdx2dTofs7GxkZ2dHvM4wDHw+H/r7+xEIBNDZ2RnhSR1tdZoK6YMct9r0Mn2o\nhDyDEG8yRzTIhdfR0YGysjIsX748IlsfDbmEHE+y8Pl8MJvN8Hg8qKmpAcMwKCgoSPrCEpJ2tE8F\ngdD5jXgpBwIBAEBGRgaCwSDGxsZkE4zUcRxvyYJAzlNMPGi1WmRlZSErKws6nQ5lZWUAvj5fvF5v\nwuEBwhseqYlOBDlNL3v27MHixYv57dWml1iohDwDIKwh3rt3L+rr6yWjwHA4DKvVipGREWi1WjQ1\nNUUQmBSURMjR2wnL5Wpra9HS0gKKomC326dVZSEHQue34uJi/nVieu90OuFyuTA8PAy/3x8TCZpM\nJsWP7MkglRFyKmSaaGIXErUQDMPE3PD8fj//JEOewnw+X1LDA4RETWQUQHwkF8E/c4meSsjHEWLN\nHOREjT75SAWD3W7H3LlzsWrVKvT09Mjel5IImZCs2+1GT08PQqEQ6urqYsrllOjN8faTDIjpvV6v\nR21tLf96vEhQ+LhuMplkVSvIRSojZDneGXLWkUOeWq1W0pNaWPVBbshAarR+telFHCohHwfEa+bQ\narV8JQUwpdf29vZiYmICVVVVqK+v57dVQohKCJlhGOzevRsMw/B1y1LbHs/WaTHEiwRJtYIwCabV\nahEIBDAyMoKcnBzZZWXRSNVnmSnSh3DKi8FgQGVlJb+unOEBRAZJJVED4Ico+Hw+zJ07lz/W2ULU\nKiEfQwgncwCIIGICQsherxcWiwVutxs1NTWYP39+zAkWTd7xIIeQJycn0dPTg2AwiIULF8bUykZj\nuo0hxxJarVY0CUbTNDo6OqDRaOBwOGC1WhEKhfhGjej633iYCUR6tNeROzzAbrfz1TNGoxEZGRmg\naTrpMkdhQpHo2nKaXp544gn89Kc/VewvcrygEvIxgNhkDqmLl6ZpdHV1gaIo1NTU8HqtGFIVIQsb\nSOrr67F///6EZKx0/9GYKZ16Op0Oer0eJSUlEdUppFGDuK8JO+qEFR/Eo+JYV1kkQqoIWaj7xoNU\nUpaUORJ92mq1KhoeIAaapvntEtVSv/XWW7jnnntkf97jDZWQjyJIxcThw4dRX18fl4gnJiZgNpvh\n9XpRXl4uy/5yOoRMOvl6enpgMBjQ2NgY85ifCLOldVqMBEmjRnT9L2l9JkNZvV4vaJrmq1MGBwd5\nwk5GCz5aSb3prJOMeRQBSQ4Srbq5uRmA/OEBpPFFeAw0TSccmCA8v04k6UIl5BRDrJnD4XDwhBy9\nrXBoaENDAxwOh+zpHFqtNqZAXwqEkDmOw9jYGMxmM9LT09Hc3ByT0JELKVI9kS4ApZBqfbbb7Rge\nHuYbdAhRE3tOkkhMZHgvltBNBnIJORwO49lnn8WyZcuwatWqmL8zDJOySFtIqhQlPTwgEAjwidmB\ngQF4vV6wLMsPD/B4PNDr9aBpWtZN70Q6H1VCThHiNXMQrZec2BzHwWazwWKxIDMzM4IUJycnU6oL\nE1AUxUfhJpMpppMvGSjxvRA7npkSIacCOp0O6enpqKio4F+L9lEeGhrifZSjJ5NkZmbymuixjJDt\ndjs2b96MkZERSUKeToSsdB0hUUd7UpNWfNLlOTo6Gnd4gFDaOFGgEvI0IaeZgxCyVqvl25tzc3NF\nh4ZqtdqY5IQU5CT1OG5qYvTAwAAyMzNlDyqVA7EmEtI8wrIs/+hOytNmKo5WYwhFSfsoR08mIVEg\nSfr6/X5Fumo05BLynDlz8Oyzz0bUeCezTiJMl9gp6uvhAcPDw6ipqUFmZqbk8ICtW7fio48+gsfj\nwfPPP4/m5mYsXLhQtiy3detW3HbbbWAYBjfccAN++tOfRvz9kUcewSuvvAJgSkI5ePAg7Ha7ZEWS\nXKiEnCSkJnOIXdgajQYDAwMYHR1FYWFh3KGhpAxLDuJFyBzHYXh4GL29vcjLy8PcuXORlpaWMjIG\nIqNcYRdfVVUVNBoNfD5fREJMr9dHkPRsghJSF5JLdBS4b98+ZGVlIRgMxhgyRSfA4hGlEiJtaGiQ\n/NuxjpDlrkWkCqmb3pIlS3DOOefgwQcfRCAQwMsvv4wLLrgAl1xyiaz1b7nlFnzwwQeoqKjAihUr\nsHbtWl7/BoA777wTd955JwDg3XffxeOPPz5tMgZUQlYMEt243W6YTCbR0jUCmqbR39+P8fFxGAwG\nWUNDxRzcpCBGyCzLYmhoCFarFQUFBTz5DwwMKHZxS0QwGo0GgUAA+/btg8fj4bv4yI0q2gs4FArB\n4/Hwj+8+nw+tra0R2Xaisx5LU6GZ0jpNzqX8/PyIsjKiq5LvbmxsLKKkLNrnQ66RlBzMlAhZCDna\nsUajgcFgQE1NDdavX69o/dbWVtTX1/MNR1dddRXefvvtCEIW4tVXX8XVV1+taB9SUAlZJoTNHF6v\nFz09PViyZInotqFQCFarFaOjoygvL0dpaSnKyspkPbZrNBrZGrJQsiAZfqvViqKiohhvC9ICKwdy\nRjP5fD4MDQ0hGAyiubk5bnkegcFgQH5+Ph9JuN1uLF++nLfhJBaT0e5vhHBmoqkQwdEsexPqqmIl\nZWKGTH6/H1arddrfXaqIVG4CTg7kJhqdTmdMlYwcDA4O8k0nAFBRUYGdO3eKbuvz+bB161Y89dRT\nivcjBpWQE0CsmcNgMIiSG/EFHhsb44eGajQadHZ2JkWyiUDIu6+vDwMDAygpKZGMwpUmAKUIWShN\n5OTkxJCEUggf34XZdjFTIaFXBSEaUjlyvIk6VQlKJREpKSkTM2RqbW3lKxKiDZmETyOJDJlSKVmk\nipABeZUTTqdTVj39dPDuu+/i1FNPTYlcAaiELIl4zRzRpOnz+dDb2wun04mqqireF5hACcnK3Zam\naQwNDWFsbAzZ2dlYuXJl3BNeSeQtVj0hJOK6ujq0tLRgeHhYdtmdGOKRmJSpkNCrYmJiAoFAAG1t\nbXydq5Bs5CYSU0XoM6UxhLQSRyfqiD+F1E1OzJozlZKF3HLOVCFZQi4vL0d/fz///wMDAygvLxfd\n9rXXXkuZXAGohBwDsckc0RcIiTaJC5rP50NtbS2amppELyalhBwvkqVpGlarFcPDwyguLkZOTk6E\nuY4Ukm0iESNiYXfUsa5DjvaqmJiYwIoVK/jJGqSzzmKx8ENFhYlEUl6Wahzr+uFkIPSnECKeIZPP\n5+MrdKKnkihBqiJtJTesycnJCOlBLlasWIGuri5YLBaUl5fjtddew6ZNm2K2czqd2LZtG15++WXF\n+5CCSshIPJkjGi6XS/bQUGCqRnW6EXI4HEZfXx9GR0dRUVGBVatWgeM4tLe3y1pXqWTh9XrR1dUF\nr9cbQ8TC7abzqJ5K4paarBGdSCR1wCSRaDKZwDBM0i3gQsyUCFkp4hkytbW1Qa/XxxgyKZ3zdzyq\nNVwuV1IRsk6nw1NPPYXzzjsPDMPg+uuvR0tLC5555hkAwL/9278BALZs2YJvfvObMb4e08E/NSEL\na4gPHjyIqqqquMkP4vlAMrhyhoYCUyd89BSFeNsKCTnadpPo0sBUNJVqtzefzweXy4XDhw+joaEB\nhYWFKfHSOF6ITiQCsW27gUAAHR0dAJKfTjLT/JBTAWLgQ0zuCYg/ssfjkW3IlKomDSXJQZfLlVRS\nDwAuvPBCXHjhhRGvESImuO6663Ddddcltb4U/ikJmdQQk8iIoiiEw2EwDCPa3kxajY1GI+/5sH37\ndtn7SyZRFz04VEjEwm2TnRAdDZ/Ph56eHni9XmRkZKCxsTHGGS0aJ2q3XXTb7tjYGE466SRQFCUr\nkSjmpZxKU6BUEfvRgtT4qGjZSGjIFAgEkJ6eDoZheEOmZKAkQj4WSb1U45+KkOM1c5BWS+G2o6Oj\nsFgsyMrKwoIFCyIeTZQkPJQQcigUgt/vR3t7O6oFg0OnCylCJkRMdPDCwkLs379f1gV9ohKyFBIl\nEj0eT8yjOyFpv9+fsqTVTKiJTgZSslE4HMaePXug0Whgs9ng8Xh4fV+YhJVjyKQkQnY6nTG18DMd\n/xSELEbE0SRHCFnYWJGXlyc5NJRsn8gjF5BHyGQqg8vlglarxerVq1N6UUUTshgRk/2lYv6eHMwU\nMk9EYFIaqzAidLvdcDgcGBkZiTEUOlqJRCmksikkFeegXq+HRqNBRUVFxPcgbHmWMmQSelMAKiGf\n0JBjCE+g0WgwMjKCzs5OFBcXJxwaqiTqjZfUix4c2tzcjB07dqQ8wiEk6/V6+cqQuro60SGlcqWQ\n6ZgLzQYII0Iyeqm0tBThcJhPJA4ODsLn88UkEuW0PyeLmdalJ7WWwWCAwWCIIE0pbwph2ZxGo4HL\n5Up4o/N6vSlNuB0LzEpC5jgObrebt+xLZAhvtVrR39+PnJychPW8BNESRzyIkbfU4NCjhUAgAJfL\nhX379kkSMYHcyFdMsiD2k6S13GQySSbGjnczRyohlMASJRI9Hk9E+7MwkZiKRpeZ2O4MyPu94xky\nhUIh9PX1IRgMRhgyiRndC8ecnUiYVYQsbOYYHR0FAH4WWDTIj2uz2VBRUYGGhgY+ASEHSmuLCXkn\nGhyaDOJdwKTN2+fzQa/X4+STT5blUSF3QjUhZELEFouF92Lwer0YHR3lE2PC6HAmmQulyssiHqT8\nf6M7EoPBINra2qY1lHWmmNMLMd0nKULURqMR2dnZKC0t5deNNrrfsWMHnnrqKQSDQdx7771YsGAB\nVq5ciZqaGln7SuT0BgCffvopNmzYgHA4jMLCQmzbtm1an49gVhCyWDOHXq/n+/qFCAQCsFgsmJiY\niKheEPoAyIHSCDkYDMoaHEqOX+5FJWUERIjY7/ejrq4OeXl5aG1tlXVBy5UiKGpqvtnIyAjMZjPy\n8vKwbNky6PV6hEKhiOMnHiCkVKqvrw9erxe7d+8+Jo0bRxvJlqtFJxIdDgdWrFghmUgUlpaR7yu6\nYiGVEfJMizBpmo7w8Ra70TU3N+N73/sezjrrLKxcuRL79++HXq+XRchynN4mJydx8803Y+vWrais\nrITNZkvZ5zuhCZk8xog1c0QTZqKhoUoIVsn2ZHBoIBDAggULZJXhkG49ORdDtLNXNBELpQm5UYoc\nyYKMgBobG4Ner8fSpUv55KfYe8WGjLa1taG5uVnUF1hoLBRP9pgpSLWWLieRKKxYIIkw4kCYqhK8\nY91dlwhy65m9Xi/y8vJwySWXyLLcJJDj9LZp0yZcfvnl/NO3lJd0MjihCVnYwhv9g+v1eoTDYbjd\nbpjNZgQCgbharVJCTiRZJDs4VLi2HPmEbBsMBiWJWCkS+SyPjY2hp6cHRqMRubm5kraEcqDX60X1\nVqGLmVD2IPXAYi3A08FM8bJIBLHSMtJlShKJ4+PjcLvdvLWpsNFFSSLxaI1vmg7kVllMTk4mVYMs\nx+mts7MT4XAYZ555JtxuN2677TZce+21ivclhhOakAFp8vD7/bwMkUgiAFITIadicCigzAiIZVkc\nOHCA16SnQ8TC/Ysl6xwOB3p6epCRkYFFixaBoigcPHgw6f3ESyqKuZgJH+OjZQ9hNH08ZI/j6TgX\nnUg0mUwYGxtDfX29rESilOvbTDenj4ej2RRC0zTa29vx0Ucfwe/3Y/Xq1Vi1ahXmzZs37bVPeEIW\nghAiaW9OT0/H8uXLZb03mQiZtEOncnAoWTsRIRNpwuPxYN68eSgvL08ZIURLFuPj4+ju7obRaIxo\nkAkEAtP2slDyfrHH+La2NrS0tPCkE519F5I0MW8/GpgpJvfA1xqy3ESisCNRqE8Hg8GUHM/xiJCT\n9UKW4/RWUVGBgoIC/rs6/fTT0dHRoRIyAcdxsNvtMJvNyMjIQFNTE9LT09HW1iZ7jWQi5HA4zHfz\nyRkcKveCi0fIQo24vr4eAJCdnZ3yJpJwOIyJiQl0d3dDr9eL3mTiSRtyjycV2qtUPSuZskH0Vp/P\nJ9oGnYpjSAWZHivHODkdiePj43A4HPx5kCiRGA+pJmQ5ayVrLCTH6e3SSy/F+vXrQdM0QqEQdu7c\niR/96EeK9yWGE56Qx8bGcPjwYeTk5GDRokURhKiki0xJkwPHcXA6nRgcHARN07IGh8qZwkGQqG65\nvr6eL5cjk3dTiUAggKGhIUxMTGD+/PmSsstMcnsTW1tsykZ09UJ/fz98Ph927doVEU3LaeMVYiZF\nyMlqv9FPIEajERqNBkVFRXziVSqRKJycHY1UTguRW82SbIQsx+mtqakJ559/PhYtWgSNRoMbbrgB\nCxYsULwv0f2nZJXjCIPBgKVLlx4T82vh4NDMzEzk5+ejpaVF1nuFk6flbEtuJoSIA4GAaN1yKh3X\nXC4Xurq6EAwGZX22VBDyse70k5I9Fi5cyEfTwu4wYVKMVHuIEUKqIuRUSCqpcowj3Yd6vR65ubkR\nEWd0R52YVES+s1Q5vSnB5OQkSkpKknqvHKc34ZDTVOKEJ+Tc3FxFUkMyEBscyjAMDh8+LHsNJd4X\nGo2G9+8NBoNxG0iU+hyLEYfb7UZ3dzcYhkFDQwOCwSCcTqes44xXjSHXtnImQK/XIy8vT1L28Hq9\nsNvtfK16dJNLKuSGVGvIyeKzzz6Dy+VCS0uLJJHG66iLTiQ6nU6+ckbJ+KjpwOVypUTTPdY44Qk5\nHpIZQSO8KOINDg0Gg4qkArmdfaTMi2EYNDc3J+zkU2rtKZRNPB4Puru7EQ6HUV9fz5PR2NhY0q3T\n5HW5bbIzGVKyB/EEEcoebreb1y2VuJcJkcoOu+lIBC+88ALGxsbw2GOPJbRgjYZYInFoaAgMwyA/\nP583YpJKJMYzu1dy0zsRjYWAWUDIiSZ1yI1KhdtrNBoMDAzEHRya6rplj8eDnp4eXi5IT09HQUFB\nwnWTGc0UCAT4ZhWiRwshV0qYLqGeqPadGo0mRvY4ePAgSkpKQFFUjCmO0FToaMsewPTn1z3wwAMI\nBoPgOC6lA04J8Ub/Tdj2HG12L9SnAShyejvRvJCBWUDI8aDX6xURslarhcVigd1uR1lZWVyjIaXa\nrRQhC4mYSBNKhocqiZA5jsP+/fv5fUnVLE9HlyYVL3a7PWE79IlKyFLQ6/XIysoSlT2Exu1E9hA2\nuWRmZqasEWO68gnpQDt48GDKpnxI3SDEOjiBKQ9lQtSjo6Pwer0IhUIIh8Po7u6O8PkQO0Y1Qp6B\nIKVpiUAc30h5jxzHN6UnfDRxRhOxMBpONOhUCDlNJH6/Hz09PXC73WhsbExYs5wsIY+Pj6OrqwsZ\nGRnIz89HIBCISPZEExAwczTk6UIquo1XCxztVUFsOg8fPhzxPSmVH2aa/abcZg4hxBKJLpcLVquV\nlz7EEol6vR52u31a45uOJ054Qo5HLCRClkL04NDS0lKUlJSkrERHCCJxxCNiAiWdeqRmWAyBQABm\nsxlOpxN1dXWgaRq5ubmy3N6UEKXT6URXVxd0Oh1aWlqQmZmJUCgUM+KITDZ2uVx8Wd2+ffuQnZ09\nLQKaCVAqN4hNgHa5XBgcHERJSQk8Hg9GRkbg8Xh4CUKYRIzXAp2qKouZ1qlHvgcpa1OPxwOr1Yr/\n+q//wqFDh3DuueeisbERF154Ib773e/K2kcip7dPP/0Ul156KW9UdPnll2Pjxo3T/mwEJ96ZrwBS\nEbLU4NDOzs6jVrFB0zRGRkag1WoliZhAqbVn9LbBYBBmsxkTExOora1FU1MTX7OcykkgbrcbPp8P\n3d3dmDdvHv/YKZXoi25G2LdvHyorK8GybAwBCbvsThRzoVSUvWm1WtESs2AwyJflORwOvgU6uskl\nLS0tZaZAM81XWWqd6OTrW2+9hTVr1mDHjh3o7u6W7eIox+kNANasWYP33ntv2p9HDCc8IctJ6hEk\nGhyqNFFZg2PFAAAgAElEQVQnp4qDRMQul0t23XIylRPA1I3GYrHA4XCIOtop8TmOt53X60V3dzdC\noRAMBgOWLVsm61ijQVEUryFGE5Cwy07MUznV5kLTRaoaQ8TOJYqiYDQaYTQaJWUP0qgUDAYRDAYR\nDoeRm5vLf0/JPHXMtAhZboMJCQhIh6lcyHF6O9o44Qk5HvR6PYLBYIQHcnWcwaHJVk6IrSUsKaur\nq+OL6JWsK3fbUCiEzs5O2O32uJ8vGeN5IYgW7fV6UV9fj4KCAkXTt8UgFU1LddkJk2MWiwVerxcd\nHR2yH+ePFo5H63S07MEwDMxmM9xuN4qKivhBDT09PYplD3I8MylClkvI5HtU+nvIcXoDgO3bt2PR\nokUoLy/Ho48+Krs5TA5mBSFLZesZhsHw8DBGRkZEI8Zo6HQ62dUNZHsyPZcgmoiJ1mW32xWRrBzi\nDIfDGBwchM1mQ2NjY0zEH41kh5cSCWRychJ1dXUoKio6LnaVYhn51tZWzJ8/X/RxPjqallttkwym\nS8hOpxN/+tOfsHTpUtTV1SW1RmtrKx588EGsW7eO93MRHh+RPbxeLxwOB/wuF7K6usC0tCCjoIAn\na2HDRip+51R16kWb00vB4/Ek5bIoB0uXLoXVaoXJZML777+Pyy67DF1dXSlbf1YQcjTI4NDJyUmk\np6dj6dKlsk4snU4nO4ol20ePZoomYuG209GFhaBpGn19fRgZGUFxcTEKCwtRUVGRcF2lkkU4HIbF\nYsHY2JisG5pSpKLsTdgxJtTlhc0bwvpWvV4fQdJyLnA5mC4hDw8P4/PPP0d2djZOP/30pNZoaGjA\npZdeiurq6hgCFJM9tH//Owx/+ANcN9+M8YqKCNlDq9UiEAhgYGAgKUMhgr///e944IEH8Nvf/pY3\nw0oWcond6XQqbmgB5Dm9Cde98MILcfPNN2NsbCxCSpoOZgUhkws7enBoZWUlent7ZV8oyUgWiYhY\nuK3ctaWqLEh53tDQEJ+M9Pv9su/QcgmZaJOtra2oqqrCqlWrjooEcDTrkMWaN4ApnZ1E08RYyOv1\nYt++fRFETYbjysV0CbmxsRH33HNPUkRCoNfrUVVVhbS0NFm/F7NwIcI//CEMZ5yBkuLiCO8Hmqbx\n5Zdf8slgr9fL1xMLnzykZI+XX34Z7e3tWLt2bdKfJ+Z4FXghJ1PyJsfpbWRkhG8Aam1tBcuyshq4\n5GJWELLb7UZXV1fM4FC/339UxjKRfY6Pj2NychJNTU0JDfCnUznBMAz6+/sxODiI8vJyrF69mo8U\nlNYsx9uW7GdgYAAAEkogBMfTnF0pxCZCt7a2oqamJiY5ptPpIjRXk8kkGaGlItIvKyubVpXPjh07\n8NBDD+Gaa67BKaeckvgNOTmgr75a9E86nQ5arTYiQiSGQmLyEDG8J9/Xnj170N7ejo0bN+I///M/\npx0dA0d/Wogcp7fNmzfj6aefhk6nQ3p6Ol577bWUnvuzgpBdLhcqKytjSJGMcZILOYQsjIhzc3NR\nXFyckIwBZYQs9NIgBCnVOai0ZlmMkIWeHWQ/ZPKxnGNNlpBnSqeesCRPGCWSbrFoBzgh+RCTHLLO\ndDDdG9upp56KjRs3yp5OnehYohFPHvL7/fwNbWhoCOvWrcMll1yC0dFRBINBTExM8I0byUKJOX2y\nbdOJnN7Wr1+P9evXJ7W2HMwKQq6oqBAlJSUkCMQnZDFpore3V3ZEo0RDZlkWoVAIO3bsQGlpacpa\nuKO3JXaiFosFxcXFop4diTAdUk0FIff19aGiogJlZWXTWkcMUraTYtM2iHSUlZWVdKnZdKsasrKy\ncM4556Ctre2YWoEKDYKiZY/wW2+h4K9/xdiGDbDQNG9lED1pXM6+lGjIJ6KPBTBLCFkKSk9KMUIW\nErHQEQ1QpgvLuTkQm8++vj6wLCuLIJXWLIfDYXAch9HRUZjNZuTn52PFihVJVSCwLIvR0VGEw+Gk\nsujTJWSv14t3330XNpsNN998c9LrKIHUvL+2tjaUlpbC5/PxpWZWqxUOhwNnnnkmT9TxxkgdU+mH\n4wCaBiTOr2RvDsLPoNPpkGaxQG82o3jOHHCFhRGyh9fr5XV8YWu9lD2n3IYXp9OZUl33WGJWEHKq\nTmJh/W08IibQ6XSyu4DikQ/HcRgaGkJvby+KioqwYsUKfPnll7KiVSWfnaIouN1u7Ny5E1lZWVi6\ndCn/uJ0MDh8+jE2bNqGoqIjXLDmOw+DgIAYGBpCenp4wYpwOIWdmZuLiiy+Wp5fKwMTExLQMaXJz\ncyPev3nzZuzYsQOrVq2CTqfjjYWIRCIkH4PBkLKWZznwPPEEJv/yF8zdtAmcCHklUzv8xRdfYOPG\njXjkkUewePFiAIBv/Xp0r1mDliNVCHJkD7fbjeHhYQQCAWi1Wj76DofDMWWmYnC73Xxzx4mGWUHI\niaAk8mAYBrt37wZN05JETKC0KkPsuEZGRmCxWFBQUBATqaYyYiLDXymKwpIlS1JS7lVeXo5TTz0V\nNTU1vMtbd3c38vPz0dzcjFAoBLfbjaGhIXz44YcoKipCS0sLT9A0TU9bsqiuro4pTUoG7e3teO65\n57B+/fqkx/FE/1Y33ngjLrjgArS0tET8TWx6djgcBsMwyMzMBMdxR73B5am2NnxhNuN/HA6UpIiQ\nKYqKuenSFAXIkA+EskfE+2mad31jGAZ79+5FOBwWbXIhx6tKFscZctqn5dxVu7u7EQgEsGDBAlmR\nklKNmoBIBhaLBbm5uVi2bFmMPSGJ1qdLyMT4R6vVorKyEoFAIGW1t9nZ2Tj11FP5EqDMzEx+nFY4\nHIbRaER2djZycnLgcDhQVFSEhoYGPkvvdDoxMTHBX1zH02CouLgYLS0tKasnBaZuFtXV1TGvi42R\n4jgOXV1d0Ov1CIVCsFqtfE280CWPRNPTPS++efvtmLNvHwoEkaTf78fLL7+MNWvWoLy8XPHNYOWK\nFXj/5ZcBQZJ7ul16Op0OOTk5yM7OxsDAAJYuXSopezAMg8ceewyBQACVlZWora1FTU2N7M+RyFiI\noK2tDatXr8Zrr72Gb33rW0l/NjHMCkKOh0SETIiYRMR+v1/23VVphMxxHGw2G3p6epCTk4MlS5ZI\nSgbx2rLlgHwulmXR0NDAkyIpU0oFXC4XP+ppwYIFEZaawpuJyWTChg0bkJ6eDqPRyLdEsyyLnJwc\n5Obm8kmyRNUMR0tjnTt3Lm699dajsrYcCH09oh/lo206hSV52RyHLL8fxoULodVqZT9xLF26FEuX\nLo14rb+/H5s2bYLX68V1112nmEj1zz0H/R//iMDvfw92/nwAqWubFurH8WSPe+65Bz//+c9htVpx\n5513QqPRYPPmzQnXl2ssxDAM7rrrLnzzm9+c9mcSw6wg5GQsOIVz5Orq6viIWG5EDciPkDmOw9jY\nGHw+H2w2G0466SRZU6oZhlFc9UDc1wKBABoaGmKSkKkYiOr1etHV1QWappGVlYX58+fHPGpGQ+yJ\nQ5j8ycnJiSjmJ9UM0ZoiISLyz0wom0sVxJ6IiF+FwWDAE088gXnz5uH73/8+wuEwPB4P0h57DMYd\nO7DvjjsQKChAeno6QqEQ7Ha74ptYQ0MDnnzySVRWVibV7szOmwe2uRmcwYC0W28Ffd55YFat4tex\n2WzQarVJJdzklLxpNBosXrwYDMPg5z//uaKnHbnGQk8++SSuuOIKtLW1Kf4McjArCDkeoi04pYhY\nuL1cQk4UIXMch/HxcXR3d/OPnfPnz5f1OK5UDvH7/byxDDH+Ebu4lRByNEGQ0U9utxsNDQ0oKChA\nR0fHUSl7E1YzELtOADwREYNy0lEofKzPyspKyWP9sUa8ygaO48CyLH9OkKGsmv/3/6Cpr8eib3wD\nnE4Ht9uNAwcOxCTG5EhCFEXx+rndbpccM/X4448jPT0dN910U8TfmLPOAnPWWaCsVmhbW8EVFoJe\nvpyP3G+//Xakp6fj2WefVfzdKLlBuN1uxZ16coyFBgcHsWXLFnzyyScqIScLQpoul4t3vRIj4ujt\nlawtBkLERqORf5xvb2+XXdwul5BDoRCCwSB2796Nuro6NDc3SxJRshOqw+EwzGYzHA5HzD6mM3k6\nmbK36OnQHo8Hy5Yti2hMIJ120b4VcutdjxfifV9paWn45S9/GfN3dvFisEcqGiiA96sQVhmQxJgS\nz+l4UoPZbI77RMRVVsL/7rvgcnPB2O3Q6XSgKArr1q1L2uBJydSR6Q55lcKGDRvw8MMPH9VzaFYQ\ncryLnmEYdHd3w2AwxCViAiWELEZGExMT6O7u5r1YhZ690zWeFyIcDqO3txc2mw06nS5iIna841Va\nszw4OIjh4WFUVVWJ2nqKkSqxPpRDtqmQHKQaE6J9K0iSbM6uXcgeH0fo+uuRdbznrjmd0L3/Ppgz\nz0xY+5tw0svOndD19UHT2BjxupQkJPScttlsYHp7Uf6Xv8B5xRXwFxfDYDAgHA5HPC1SHIenamrA\nzpsX91i4I081DMPw5+Xll18e9z3xoMQLOZlzSo6x0JdffomrrroKwNRk9vfffx86nQ6XXXaZ4v1J\nYVYQMhBLDCQi9nq9KCgoQFNTk6x1lBCy8AJxOp3o7u6GRqPB/PnzRe3/lBKyWOQpdHojJvu7d+9O\nifE8AekUbGtrQ0VFBVYJdECxNacjWRxNiPlWsCwL9sABMB4PbOPjsA4Nwev1Ys+ePTEucMcimtZY\nLNj3xz/CMz6OjLPOkv5O3G4YHnwQzJo1YC66SHQT/auvQtfTA9299ybcr5jntHZkBGm7dmHynHPQ\nWVwMr9eLvXv3gqZppDMMytrboSsvR/kjj4Btbob/kksS7kcYaVMjI+BycoAE+RMxKDWnV3puyTEW\nslgs/H9fd911uPjii1NKxsAsImQCoTRRX1+PYDAIt9st+/1KKycYhsGuXbvAcRzq6+vjaldKu+qE\n27IsC6vVioGBgRiSTNUkEFIXbTabwbIsli9fnjBZJ3fck9R7j3VSTqPRQHPttdCxLCpZlncHbGpq\nijHNiZ66kZWVNS0vhhhwHHRbtuAJhwOf/+53OM9sxgMPPCC+LU1DMzYG9khVixhCd98N38iIZPdd\nIjBnn41AZSUMdXUw9ffz+j3HcWD/8Q8Y33gD9ssug+P00zG8ahXG2toSek4TQqZGR2H84Q/BnHYa\nQj/5ieJjk0vIPp8v4TkrBjnGQscCs4aQieMbIWJSuuZwOI6K45uwbnnhwoWySuWUkD0hb6HxT2lp\nKd/1JcR0CZlUgXR3dyM3NxfLly/H/v37FZkLJYPjZi5EUYBWi70dHfjd736HM844A6fl5SFr82aE\nb7gBXEsLBgcH8cQTT+D8889HU1MTHA4H710irJvOGR2FaedO6OrrMTk5iV//+tc4//zzsWrVqsTH\nwbLQDA3hzsWLYZ+cRDAYnJJ7urqgOXQIzIUXAuTJJC8PgWeegX18HHkS5LTzwAHYfvtbLLngAiCZ\nsUMaDS9FCOUTiqKgXbUKzCOPILelBfjxj1EDoEpQkiflOU06E7ncXNBnnw12+XLlxwX5GvLk5GTS\n06YTGQsJ8eKLLya1j0SYNYRM5shFE2OiydPRSDQ1RDgRhNQty/WwVRohOxwOWCwWFBYWxvW1kLuu\nlObd1dUFo9GIxYsX800jcqWI6ZLqMSFkjoN22zawFRXgBDaQ9eXluGbpUmhLSgCfD5TDAYRCAKaS\nYyUlJSgtLY0wLooeOBrctQuBTz9FKCcH7e3taG1tRUlJCZYtW5Y4mtZqEXj6adRSFN6gKOzduxca\njQb6zZuh3b4dgaVLwQl0TLPViu9///tYt24dNmzYELPcn7dswUBHB06Kqi9OBjFJPZ0O7MqVEduI\nTc4GENG04fF40NnZCY1Gg/RvfGNq+7ExxZ7TNE3LavM/kbv0gFlEyDU1NaLRn9TkaSlITQ3xer3o\n6emB3+/ny8rI9nIbOOQQJ2ke6evrQ3p6umgXXzSUjGYSenUQY/umpqYYzVuuFCG173A4DI1Gw2vh\nwppqu92ON998E4sXL+bHqR9VOJ3Qvfkm2JYWhAWEnNvXhwu++AIdJhPYb34TwcWLgSNRWEFBAW6/\n/faYpWImb1x7LbBuHajDh7Fi/nw8+uijoI6QKyGRuNOzj+xPgyNVFgDC118P+oILwM2ZE7Hv/Px8\nLF26lPeJiMZdhYWYOPNMBK64YnrfF2JL8IibXUtLC+gjrm1S0oBQu5+YmEBDQwPS0tJiLDqFdeWJ\nPKeVWG8mGyHPBMwaQpa60yrVhKO39/l86Onpgc/nQ11dXUx9b6oaSTiOg8PhQHd3N0wmE6qrq8Gy\nbEIyTrSuEIRkv/rqK9HGESGSIXlgqla5u7ub7+CjKAq7du2Cw+HAD37wA354AEVRklF4X18f9u7d\ni3PPPTfh50+z2QC7HRA4r8UgNxehO+6AjaZx9w034Morr8S5554LdsEChG69FR7yRJRMqZRGA+Tk\ngKIoZGdnRzQSSE3PFtYFa7VajPT2YvkTT6BWo0FGZSV0DgcCTz8NUBT6+/tx6NAhnHXWWcjNzcXj\njz8ueSglHIdcioIlhZOidb//PXTbtuGdiy7Csy+9hPvvvx+///3vMTIygksvvRTt7e345S9/Kdno\nRKQGqUoYOZ7TxFhIJeRZgGQJWdhoUVdXh8LCQlHSV1o5ISaHkFI5g8GAhQsXIjMzEzabDS6XS9a6\ncsiTDCr1+/1obGyU/DzCNeXICcQgSDh/r66uDg0NDQCmIi232w2DwQCHw4H+/n4wDIPTTjuN/1sg\nEIh4fN2zZw8+/vhjLFmyBOXl5fB6vWhvb8fChQsjbyChEMrffhu6vj7QCew3uZoauM1m9PT0oLe3\nd+rFjAywK1aAbWuD2WxGRkYGSktLE35muZCank3TNLxOJzxOJz74/HO8/8ADuNfjwfySEvxvMAgu\nGMQ3/H4YMzOxZcsW/PWvf0V9fX3Cp4nQxo2wj45CEwhM+9hJhKzduxfajg6svPNOBK69FvPmzcOC\nBQtQVlaG/v5+9PT0IBgMxiXkeA0diTynSe30xMQEPB4PsrOzI4g6OhBSJYsZgnjNEEp0SoZhMD4+\njj179qC2tjZuowWgvJFEKIe4XC50dXWJlsqlYhIIMHXxWywW2O121NTUYHx8PIIcpCBHsrDb7Xjm\nmWf4ScmVlZX8/L1wOMz7D5x11lkR7yMXnNVqRSAQQGdnJ//4mpWVhcWLF6OxsZHXbvv6+vDaa6/B\nYDBEJssMBtjOPBMFUdqmFGpra/HOO++Iap7PPfccysrK8O///u+y1hJicnIy5hygenqg37QJ4euv\nj9CBganzoOg3v0FJdzcyHnkEVX/7G+pra/HVypV4/fXX4QyFMLe7G6FQCM3NzSgrK4PBYIDL5UJm\nZqY0wVEUWCAl5XqESIO/+hWCXi9KcnPx7SNdfKRDj2EYhEKhuDYAyfgqi3lO79mzB42Njbw+bbPZ\n4PF4IpKsBw8ehNlsljXwNxqJjIXefvtt3HvvvdBoNNDpdPj1r3+N0047TfF+EmHWEPJ0QSLI8fFx\naLVarFq1SlbCIRmTepIYpGmaN/6R2lbJukII5/DNnTuXJ0phLWU8yCmRczgcfBQYr1Y5Gk6nE3q9\nHjk5OcjMzORbVsPhMNxuNzweD1iWRXt7O4CpLrUrr7wS1dXVMY0KnoYGcLW1kFt1Gq2VcxwHg8GA\na6655uu/+f3QfvABuNJSUDYbmG98A5BIKI2NjWHVqlVoaWnB22+/zb+uGRmB5sABUGNjMYQMAGxl\nJTiGQd/wMOY99hgMpaUIt7XhV7/6FTZt2oR33nkH99xzD1iWjWgV93q94DguxngpLS0N7N69ONDV\nhaqTTpL5bUiDj2x1Okn7TK1Wm9CTJVUgxJuenh7T4EKSrAcPHsRHH32EsbExvPTSS2hpacFzzz2X\n8LyUYyx0zjnnYO3ataAoCl999RW+/e1v49ChQyn/nLOGkBORp1RbaigUgsVigcPhQHV1NebNm4fW\n1lZFk6rlEmc4HIbNZoPT6URDQ0PcWXxKKzIIeQqnjpSVlSkiSqk1ozE2Noauri7k5uZi/fr1SEtL\nk72PcDiMJ598EsXFxbj00kt5uWNwcJCfixjdyOH1elFYWAiXy4WhoaGIZBlN0/D5fMjIyJhWo4nQ\nA5lyOKD7/HOwhYXQjI+DXbJElFSBKWvMhQsXxthsMqecAqa5GZDQ6Okf/ABWqxXXrV2L5cuX4/nn\nnwcAlJWVQaPRYGJiAhzHwfjFF0jfvx95N9zAa9zRM+wGBwfB2Gzw/+IX+G+HA9/6yU9QXFw8rVZx\nycg2HIb+6afBnnQSmNNPT7hOqpp/pI5HmGS98847MTk5iYsuughnnHEGOjs7ZZ2XcoyFhE9VXq/3\nqDU1zRpCjgeS6Rf+OETztNvtqK6ujmgLViJxyJEsAoEAzGYzJiYmYDQasWLFioQ/qNIIORQK8aOD\nCgoKkpqPJ4RYOZvT6URnZycMBgNfItff3y+vMWRyEhTDQF9QgNWrV/M6H8dxaGtrw5/+9Cfceuut\nMe5aGo1G1DuYJMuGhobQ09PDSx45HIc5f/0rcO65SFu2LCEhkRs1NToKzmgEcnLAVVQgdPvt4LKy\nQLnd4CR0ZZvNhhdffBH//d//jfHx8egvUJKMWZbFl19+ySe4orXhDRs28NNDtB9/DO2ePaC//W3e\nZ1g0QcaymPR4sG7fPsybNy+iVVzMeEkUHg/S7rsP9GmngZPwQ6YmJqB/+20wbW3QvfYaQv/xHzHV\nIMcTLpcLubm5MBqNWLRokaz3yDEWAoAtW7bg7rvvhs1mw//93/+l7JiF+KcgZFL6ptVqEQ6H0dfX\nh9HRUVRVVckedS+FeMQZCoV4GaS2thZVVVXo7OyULYXI7YDzer0YGhpCUVGRrLFMcozvhRGyz+dD\nV1cXwuEwGhsbI+qu5ZbH6V97DZTLhdDtt+O8yUmApjE0dy44jkNDQwMuvPBC2dqfMFnW19eHBQsW\n8Lq1v7sbur4+jB44gJEjN5WSri5k+Xzg1q2DKSsLOp0Ouj17QFmtYC++GJpgEPrf/AZcZSXCN944\n9R2VlqKjowM7duyIlDME2Lt3L9544w3e50MuxsfH8corr2D58uXYsmVLzO9FqlAAIPTjH4NyuyNM\n30Wh0SB33Tqcc2QaDHHIi/ZT7u/vRygU4geNRrSKh8OgRkbgGRgAI3ET4oqL4X/+eWi+/BKGN94A\n4tTssyx7zB33XC7XUauyWLduHdatW4fPPvsM9957Lz788MOU72PWEHKixFsgEMDg4CBGRkYwd+7c\naROxcO3ouXokkWaz2VBdXY3GxkZQFIVQKKTIuChRhEwiVpZlUVxcHBNdSq0rZ1gkIbiDBw9icnIS\nDQ0Nov6ychpDqP5+cCYTmOXLAYqCxmIBcnNBHWlgKCoqwiUyfBESQafTYRBAwc9+horKSlQcuVmw\nf/4ztmzfjrScHJgKCvC7Rx/FtzIycLnRiIkFC0DrdKDPPRcQlGMB4DvQmPZ2aMrKwEaZ9px55pl4\n/vnn0dDQgM7OzrjHtn37duzYsQO33norCgsLccstt6CwsDDx9JasLHBHbgaUzQbt1q2g166V1HWj\nH+3jNW8QvV44uIC76SY8+sQTqOrrw7Jly/Dqq6+itbUVDz/88NcDCGpq8DezGR/W1uKn+fmQor9U\nmdMrmZzjdDoVz0WUYywkxOmnnw6z2YyxsbGUTpgBZhEhS4FhGPj9fnR0dKC6ulq2pir3JBBGyAzD\noK+vD8PDw6Kknyq3N5IUZBiGzzw7HA5Z68ohZJqm4XA44PF40NjYiPnz50+rikXT2QnK4QDb0ABo\nNAjfdhug0YA6opMmAjUyAv3//A/oyy8Hu3Ch5HZutxtvvvkm5s+fj29XVfHH57npJuz2+bAgHMap\neXmo6u1F4dlnw3vzzfBqNAgEg/giIwNanw+mw4enHu0zMnBKaSlWrl+PrO99D1xhIQIvvBCxP7vd\njkWLFoGm6YTnyhtvvIFt27bhiiuu4HMV0bDb7fjwww9x9tln8+dNX18fGIZBbW0ttB9/DMOjj4Ir\nKQFz3nkAvvYJ3rdvH3bu3IlTTz1VMtCYnJzE6OgoGhsbYTAYUFBQEDNxY3x8HM3NzcjNzcXevXux\nf/9+9PX1obu7G6WlpXw0bR8cxMjWrRgvKsLwxRfzQYcQqSJkJV7IyZS9yTEW6u7uRl1dHV9XHwwG\nj8pk61lDyGInQ39/PwYGBmAwGDB//vyIgvR4IIk6OYXoRA7p6+tDf38/ysvLJUlfiR+x2Lak6cLr\n9UYkBcfHx2WvG08KYVkWAwMD6O/vR2ZmJmpqaiLahkXXczoBnw+I0kEjhnquWQNm6VKAPEpGDXKV\nBY1mSpcVAVkjOzsb373mGuREXZDZlZX4yf33IyMjA4ZQCL+58cYpu8vmZmQzDCYmJrBs2TLQNM1X\nNIx/9hmoTZswePHFSL/6aqTl50PvcPAa7P79+/HYY4/h+uuvF88J0DT0zz0Hdv58MGedhXvvvRc/\n+MEPYmfs+XxT7mcUhVdeeQV79uzBW2+9hflHRiDdf//98Pl8ePXVV0FfdBG4wkIwR8qtxsbGcM89\n9+DCCy/Ezp078be//Q1VVVWSv9nzzz+Pbdu24ZlnnhGNADUaDQoLC3Hfffehra0NS5cuxUknnYRg\nMMjXBJPJ2fMqK3FZdjb+8Pbb+OjPf8avXnwRufn5yDSZkHZEgiGEzHEcXnjhBRgMBlx77bVxf2Yx\nyO3SI9sq9VyWYyz01ltv4aWXXoJer0d6ejpef/31oyLHzBpCBqZIgBBxf38/5syZg1WrVslPPB0B\nSdQlOglYloXD4eD9gsWMf6KPT8lnIRBWgtTX16OoqCji78lWZBCQoatmsxlFRUVYuXIlRkZGEn9n\nHAfTa69NnUTx/BMMhggSBgAEg9BOTn5tnhNvN6WlCElYSkZ8p4EAGt9/H2xT01SpmgB81GQwIHzb\nbVSWw6YAACAASURBVBgeHobz0KEI7Ven033dpJCdDa3RCNPy5fCmpfEarNVqRTgcRiAQQGNjI0wm\nk/icQo8Huq1bwQwNgTnrrJhoFJiSctJ++lOEv/UtMOvW4bLLLsP555+Puro6fpvrr78e4XB46nNm\nZfGfS/uPf8DjdILjOOTk5OBHP/oRLq+sROObbwISTxHnn38+5syZEzGBRQxC7Vej0fB6fWFhIaiB\nAaQ98ggC110H95tv4oyHHkLJ3/+OkS++wB333ovmujpc//zzvERCovW2tjYYjcakCFlucDQdX5RE\nxkJ33XUX7rrrrqTXl4tZRcj9/f3o6+uLcUVLxs+CTE0WA7GptFgsyM7ORl5eHuoFHgmpAsdx6Onp\nwcjICP+YK0bqSieBCLcdHx9HV1cXTCZTREJQo9Ek1rspCsEzz4Tb60WClFMMtB9/jKzWVjgvvVTh\nO+MfD2cygZPRbv7qq6/CbDbjwQcfFL9RZmeDOe88UABMiCx7IlOPm5ub4fF4YLVa4XQ60d7eHpEo\nY596Cpo4j89cRgbY6mpwRyLa6upqrFix4usNwmG++WDPnj144okncPfdd2NeTQ30Tz+NZ7u7sYdl\ncffdd6OoqAgVvb1gP/sMrtFRQCQpt2DBgojyPinEbebwemHv60Ouw4Gs0lI033cfFoyOIlhaitOf\nfRbzFixAVlYWr7/7fD7s2rULN954I0wmE5xOp+Kp4koiZODo+2wfTcwqQtbpdKLlXmKJt0TriJER\nx3Gw2+381Ohly5YBmMq2pxIsy/JlSzqdLmECMhnje7fbzbtwtbS0xCR95FZPMAsWIBClX5ORTwzD\n8CVr0aY6bFMTQuEwGBkOXrKRlgbfNdfgyy+/xLwECZfLL78ck5OTkgY58fRP4dTjwsJC3u+kqbwc\n9GefwdHcjGG3G10eD9jhYb7sLCsri/cMpigKKChA6MEHAcRGd1RfH4w33AB67VrQV13F+3qHQiEw\nGg2eaGxEaX09vl9SwssPwf/4D3SvWYM5Cio+lH72baOjeCgtDRvnzMFqAMjLA5uXBz2ADX//O79d\nUVERTCYTPB4P5s6dKzpCSu5UcbkaciAQOGaNKkcLs4qQ58yZI0oier1eti8EIE7IxPgnIyMjYmo0\nTdOKhpEC0glDjuMwPDwMi8WCkpISZGZmoupIcioelETILMuis7MTDMNg3rx5kgmQZOw3iYn+4OAg\nKioqoNPp4Bsfx+RXX8GZlQWtXj9FSkYj8i0WhGprkzP0EYH2gw8AisJwQwPeeOMNXHDBBTjvSOJL\nDETLFbvxDg8P46WXXsIll1wiXrkSDAIcF9G9R1EU0lpbkf2b3yDz/vvBrFmDQ9u2YfvmzVh77bVg\niooky86ysrJirCi1HR3QDAxAv2kTNAcPYuXvfoc33ngDGo0GgUAAe4aGUFdXh38XDho1meCbO1dW\n9RDHcejv70dZWVlMACMVIe/67DP0PvoolhQXo0xG7TFvUKTTITccRk5xMXDk5iF3qnhmZqbsCHly\nclK2Fe5MxawiZCkkYzBESHZychJdXV3Q6/X8sFIhlE6HFqtyEEbeeXl5WLFiBQwGA2w2m6xqDzk1\ny8JBpdXV1aipqUloLiTXfpNhGAwPD8NsNvNyEcdxoGkaJVYrtF99Bfo730G4qAhutxu+4WF4P/kE\nY2VlsC1YgFAoxEePWUfqhGP209EBLicHdpMJu3fvximnnPJ1GRbLgmtrAzQazD37bNxyyy2YI7NZ\nIeL7DYcRHBiA3mRCTk6OpGSV9rOfTdVU33QTDM88g8CPfjSVvzj1VAQ3bgRz5Mmp729/w+4tW/CN\nQ4dQuWVLRFJZ6Kk8NjYGn88Hv9+PQ4cOwWQyofTQIRgyMhD+9rfBHmlwICRpNBrxyCOPiCav5HpH\nHD58GHfccQe+853v4Dvf+U7E36Qi5NdffRV9u3fjj+vXIy06OSkCflrI8DCMt9wC+qyzEL71VgDS\nU8WFiVXSKh4MBmEwGMBxXESrePT5e6IbCwGzjJClCEav1yvWkD0eD3bt2gUAkjPy4u1TCoTAyQk/\nMTGBzs7OmMhbuG2i6CBezTLDMLBarRgaGkJVVRXmzJmD7OzshMctV7Jwu90YHR0FRVFYvnw5b5dJ\nboBsfT1gNIIrLIROp/t6YvRtt0FL08DkJObMmQO3242enh58+umnWLx4MUpLS78maZ0O2S++CHbu\nXBxeuRLvv/8+311JURTSnnwS/7N1K8KXXoqb/H40VFXFJhHjgaZBWSzY8+mn2Pryy/iXX/4yZlLE\ne++9h48++ggPPvggdPPmgfL7obFY8I8dO9BZVoYF3/rWVNLtzDP595yxYQNe3bEDv8/Px31RN3Ii\neZBEXzgcxt69e7/+Li66CKFFi+DKysLY2BhajkxzIWQULTERyCXksrIynHPOOViyZInoGmKE/KOf\n/xzuf/kXpB254SQCGXDKZWeDOflksIJ9sXY7+p98EuXf+Q50RypKgKjE6hH09vZCo9EgLS0tYqo4\nMaMymUwIBoOw2WwntPUmMMsIWQpKImSv14vBwUGEw2EsWrQo5XdcQrKJNFxgeqOZOI7D0NAQent7\nIzwturq6kjeedzqheeUVcGecAU91NTo7OxEKhZCXlyfdlJKTA1bsIikoAHXEM5nozH19fdi+fTtW\nrlyJuro6uN1uOJ1ODLjd0K5ZA212NvLz8/Hd734XZWVlvFTCZWSgqLAQTGkp9L/9LbjKStBHpgPL\nQWZfH/RvvIHcFSswZ/lyGKM6Bj/++GP84Q9/QFdXFzZs2ICMI9184Dh02u3oGRuD7tAhfPDBB7ju\nuuv4csS0wkJ884YbpkhXqJ+zLLZu3Yqqqiq0tLTwr2m1WmRnZ089dpeXAy0t+Pzzz/HHP/4RP/zh\nD9HQ0MCTkV6vR0lnJ7LCYVCXXcYPZZVLyKQyQwjtO+8AJhOYJUtE1ygtLVVkT8oHHhkZCB1xTwuF\nQjh06BCCX32Fh//4R9yo0+HcjRsTrpOVlYWCgoIYL2USTb/yyiv485//DLfbjWuuuQaLFy/GDTfc\nENcvhiCR09srr7yChx9+GBzHISsrC08//bTkkIDpQiXkI/D7/eju7obP50NxcTFYlj1qjz8HDhwA\ny7JoaGiIuw8lxvMEwvl4QvmDQAnJx2jIoRDY0VFY9u7FqNuNefPmQa/XizrICY9porcXr951F864\n6iq0rFsnuc/ly5fjgQce4NfNyMj4+gI8Ug9LNEeLxQK/3w+/348DZ56JFWvXwpSRgYObNyO7oADx\nC7u+BsdxCM6ZA+bSS1G1eDG+t3YtDA89BLa5GfSVV8Lr9eK+++5DKBTC22+/zWv6VF8fQh0daD14\nEGXl5fw5JvzOKIrC9yoqoH3vPYRWrODbnycnJ/HSSy9h4cKFPCFzHIcMqxW6r76a8q048qTR3NyM\n733ve1i1ahVyc3J4Yg+FQsi4+25Qo6PoaGnBvq4uUBSF0tJSDA4O8pGj7OoEmob+9dfB5eWBWbTo\nqDV0tLW14dFHH8W/3XgjLrj5ZjRdeWXCdaQkFL1ezz9x3XPPPWhoaMDQ0BDWrVuHjo4OWZ9djtNb\nTU0Ntm3bhry8PPzlL3/BD3/4Q1Gvi1RgVhGy1GN4PI01GAyip6cHTqcTdXV1KDqSfBkdHVW070SR\nCdnP5OQk72uRSoMh4OtW6rS0tBj5g0DJJBDhdj6fDxsffhiFeXn4lzPOwMrSUlAUBY/HE0PcHMeB\nYRjQNI1AIIDtW7di5549KK+tRcu6deA4DlarFZmZmRHv1ev1PEGJIfoxHwDa29tRVFQEn88Hc28v\nntu9GyVDQ/hWeTlPSiaTKSJxFQwGp5zUSANDRgYYYlkZCEwl7I4c1+233w6r1Yqnn34aNTk5QDgM\n6PXQfvIJ0j/6CPOam1FQXY3m5masE7nZUL290B4+DMrjAWcwQHPwIPIXLsQvfvELFH36KXRbtoBe\ntw4sy8Ly+ut4u7UVtyxciPQjx1M0NISrdu4EY7NB9/nnCD7+OLiiIhgMBrCPPw54PGhesgT/un49\nXC4Xrr76atTV1cFms/HT14XVDCSB+L//+79477338Ktf/Wrq+9TpEHzkEUCvlzyXWZZFb28vqqur\nobVYoDl8eKpjUIK8xeS25uZmfP/738fJK1fivPPPl/ythVAyvqmgoEB2eR8gz+ntlFNO4f971apV\nGBgYkLV2MphVhKwEwmaLmpoaNDU18QSpxFJTuL3YSSycpFFbWwuKopCVlaW4LTsevF4vfD4furu7\n4+rdgHIZhOM4DA4OorOzE06nkzdMJwgEAvjLX/4Ck8nEn9QMw4DjOGi1Wuz/4gvsslpx0yOPYNEZ\nZ0wZLnV349nnn8f89HRoR0dR++tfwyCUbBwOaL/8Eszq1UB2NliWxYsvvojCwkKsXbs24jjJ6KSC\nggLMnTsXd9xxB58scrvdsNvtfAkeIaatW7eCYRjeaD3itzAaEfrFL/j/7ejogM/nw8qaGqRffTWY\nc85B6Cc/Ab1uHTSrV+OmefMw6XTC++GH0L/zDsL/+q9Abi6CwSD8fj9yv/td0JddNtVo8o9/QP/s\nswj9+MeYt3AhMq+4AlxmJuh16wCXC507d+IzlsWV+fkg3mOU0wlqdBSYO3eK+KJKBwl+9rOf4YEH\nHsA777yDu+++m/9MUtUM3d3dcDgcGBkZgcFgmLLqPJIIZYaHRSPSTz75BA899BA2btyIb3z4IXSf\nfAJ/UxM4iUkmYpFtXl5ezG+YCHIJ2eVyKZ74ItfpjeCFF17ABRdcoGgfSjCrCFkOydE0jd7eXt7t\nTWi7SaC0kYQQpzAKEybThJM03G634pphKZCo2+VywWAw8HXR8aCEkJ1OJ7Zv346CggKsWbMGZ5xx\nRsx35XA48Pnnn6OyshLVR+YAUhTFlzt1bd6MbJ8PJ//nf+L/s3ee4VEV3Lf/Tc2k9x7SC4QQAqEl\n9N5BkA4iIE3AFyuIiiggKBYQBFSKAqIgRYqCNBGQFiCBJEB6b6SXmWRmMuV+SGbeCTX4vt77v9y7\nnocvZDicyZxZZ5+9116rIi2NK7t3k56URKCvL/YyGcVyOSqlEqFMhqiqCvHhwyjt7BBfuIDQ1xed\njQ16vZ6KigoE9fWcWb2a0P79cW+Mk7//Mze1srzfstNATC4uLlRWVnLjxg2j8dS9e/eMPg2mx3Rx\ncaG0tBQLZ2e07dqhbay8BMXFiA4eRD99OnozM/RCIUgkRsLctWsXiYmJrFixAutGKZa2bVv0CxY0\nmBQJhaR++SVOTk5IAeRyXistZZqFBVYmBKHt1g1tx45gZoZm/nzj36elpbFz507mzJmDp6cnQ4cO\nJdzamppVqxAlJho9P4xqBpkMj+PH0QcEoO3WjUhzc14vKeGeUtnEqtPS0hKtVotMJnsgDCA4OJie\nPXvi4eFB/ezZaPv1Q/8YtcXj9MyJiYmsX7+eN99886G+HqZ4mgr5n1RZnD17lm3btvGXid76v41n\nipCfhIyMDAoKCp7o9vafBKPqdDry8/PJyckxrm6bXpRPu+b8sNcabirFxcX4+/vTqlUrLl++3Oxj\nPulmU1NTw19//cXGjRuZOHEiXbt2bfiBXo8gKQmtszN6e3sKCws5e/Ys48aNo1+/fty7c4fyzExa\nDh6MoPF36+zujlVNDRKJhIrMTNLi4zEzN8fN2Zl6jQaxUIi6ogJzOzv0aWloDh5kl1qNb8eOuKek\nICssxKdHD1577TUKEhPZt2gR7nFxuKxa9UiLyIfBVGb1vEkqc01NDUlJSdTV1VFSUkJtbS0ikQgz\nMzM+/PBD2rRpw8GDBxs8pz/++N/Hq6xEWFAAtbXopVKU4eHUjx5t/HlYWBgymcxo/OTo6EilRkMq\n0FYopDA7mzlr1jB48GAWhoejcXHhr2XLMHdwoA1w/tw5UlNTmTFjBsKHbB4mJydz/vx5Bg4caFwM\n8bGxQalUNth1PvihIjlyBG1oKNpu3TC7fh3J5ctIJk3Co/Hx3BAGkJ2djUKhYOPGjezbt4+10dH4\ntmmDdOBAUlJS+Oqrr1i3bh1aLy/QaBDv2YMuLAzdfUkljyNkbXExqowMtHL5Ez+75poU/R1Cbq7T\nW3x8PDNnzuT48eP/iKmQAc8UIT+sQjYY5hjSbKOiop744f4dQq6vr6eoqIiMjAycnJweaRD/nzi+\nmZr/mMYyGfC0PsdNUFmJ5tw5UpydkYvFBAcH4+Pj09QMp7QU4Xff8U5mJnkWFkybNo2rV68ybtw4\ncnJyiP3mGwpSUxluY4O1rS3F164hr6hAWVeHTqPBq1s3ukuliBwcqKmpYc+6dcjv3qV/bi5OgYHo\nIiJQL1+O26FDWLRowY6PPiIkNJQpjTeEq6mpKIKD8XV15fjGjZRVVxM6a9ZD32e9QoFAJEIsk1Fa\nWopSqXyo37JUKsXMzKzJ+zS43VVWVlJUVERcXBx6vd4YVW9tbY1VRERDFSqTwf3m9DT0Grt06cKX\nX35JZWUl7733HnFxcezZs4c33ngDT09PevfuTVRUFNBQLIz5/HOUSiWHy8vZf/48d6qqGDNmzENJ\nZvDgwbRp06bJ47YuIoL4VavoYNLzNMLWFuWXXxqtPEXx8QikUnTe3saXGMIADO8zICAAGysr9u3f\nT7uLF+nSrRt+fn5IJBJiY2OxsrLCVqHAe/t2BFFRRkJOS0vj5s2beHh4PPK7FllRwV6BALVOx5Oe\n15rrvPhPOb3l5OQwevRodu3a9cRq/j/FM0XIpjCVfbm4uODo6PjYC8QUTxOMavA1SExMxNHRkcjI\nyMdG1z9NO8RAyKbmPy4uLnTu3PmBR7in8Tm+n5A1Gg3Jx46RsH49HRctotWoUdTX1zN9+nQ6NLYG\nAHByQjt1KvYHD6KsqqJz584EBwcb+3YBS5dSlp2NTUgIl7/5hp+3bWP41KkEde/O3vffJys2ltGr\nV1MTH0/yuXMEOzvzR0wMZzZvJuXzzym2sMA+KIj0tDRaeXhgaWlJp2HDjEsB4eHhODg4IIyKwm3P\nHmT37qGjoU8vFAoRCAQN70+j4ceFC7F1cOC5NWvYsmULxcXFrFmzpmFrMz+fisxMvKOjH/icRceO\nYf7NN0g2bODEiRMAxMbG4tk4JDQ4nmVmZqLRaDA3Nzd+TvenZwP07NkThUKBSCSiY8eOODk54e/v\nj1ihYPEbbxg3Fe3s7AgPD0ev19OyoICPQkMpmT69CcGIzp9HUFmJZsQIxDExBH//Paplyxq8MNRq\n9GIxrn/8gcXbb1P3008PxE7pTW5I9XPmoCksfDDVpL4eFAqENjYMj45mQH4+Y378EaGVFdN27mT5\nO+9AYztDLpcjNzcn+bXXqLKwQH3tGhYWFpw6dYorV64wefLkRz6FagYMQOflhe6/KB/7O17IzXF6\nW758OWVlZcxrTDYXi8Vcv379v3beTc7nHznq/0GYkpejo6NR9pWYmPhUVW9zYNjiU6vV+Pj44G1S\nbTwKIpEIZTNj2kUiEdXV1eTk5GBjY/NYsr9/4eRRuD9/z1Bx1zo4cD4wkCAvr38TmwlxG4hL27Il\nCxuHRgKBADdAvGQJul69sBk4EBtPT/JjYqjOy6OFmxu+bdpQL5Nh7+JCrbs7iro6UpOSqCopofdb\nb2GpVJKblISPuTluvr54tG/PzdxcrqemMnXNGvw6dEAgFHL27FksLS3p168fer0e7yFD0GVl4erq\n2qA40GrJu3oV+4AALBwd8QgKQmpmRtzu3fTu0gWdmZnxiSVm505uXbzI9PXrsfDwoLa2lqSkpAZP\n3/JyBCUlCJRKhEIhRUVFrFm1iqjOnXlt8WKsra2NQ01DX7qgoICqqiqSk5ONGmGDwiMoKKihL11c\njJVEQps2bajLzES0cCHSwYOpbxws2tjY8OWXXzaYVDW2HGzuG87KXnkFQVkZ8gEDEGZkIExKQlBe\njt7MDIuxY6nv0weL/HwEWVkIKisfmQMIoGvZEkwWMgyQbNqER0wMys8+Q5SWht2+ffw0bx7mFRUI\n7twBnY6SkhKOHj3K4MGDcXV15axUSkRYmFHtMmLECMLDwxEKhcTExBgXWaytrXE6ehSzggLqlyxB\n18y08ObCEN/0tHiS09vWrVuNmYf/NJ4pQtbr9cTExGBtbf1AlNHTbus9DnK53Lhg0bJlS0pLS5ut\n92xuy6KmpsbY24qIiHikCY4BT7vqbMjfM9htCoVCglu1Mj7Wm8re9Hp9Q/JG48DO8Of2oUMcW7YM\nx5ISrG/dYuzAgdz+5Re+/uAD7KytaT9sGHaNAaASk8fo9tHRaOrrUdTWkiCV4uDpicPrr6OXSBBa\nWjIpKwttZiYtAgIQNPa8X3vtNdR1dbzUvTutIiLw7dOHyMhIo8b6XmIiez76iOhBg/AdNYpDOTkM\niYjg/NatjJg5k8AhQ1Cr1QiVSjq4uOA1ZQr2vr4oVSou/fYbsVeusGLTJvymTEH13HPUazTIABdH\nRxZlZ+OtUsF99ouGvrStrS0ikcg4UDSkcdTU1DS0Syor6bhgARpPT+59+y1bt26lqrSUpa6uGOq5\nJlKzRiK+cuUKaWlpjBs3DqlUivLrr6GyEmQyNBMnohk0CBwcKMvOpsbREVtHR7IHDcLxk0/gb3o6\n6IKDUZSVIZHJ0EZFoRkyBMeTJ1F/8gkqBwcQiShITeXSpUuEhYVRUlLCp59+yuTJk5k+fbpRXufv\n78+1a9fo0KFDE/24MDMTcVYWGdevY2lvbyRqS0vLhxYTj1IvPQxqtfqJ8WX/0/FMEbJQKCQyMvKh\n5Pi0fWF4sHdlujwSHBxsfDyqrKxs9rGfRMh1dXWkpqaiUqmMldiTyNhw3OYQcm1tLUVFRWi12gdu\nWqZGRgaC12q1TY57/fp1o91oVmwsF+7dQ6TX45Wfz1igTqnExdkZeycnUs6epfvIkQ/tpYslEnTl\n5aTeuUOrNm3o3L17Q2JFfj6a/v1R5edzKyODux99RElGBq/On8/JHTu4duIEmuxs+r34YpOFF72D\nAwfz86krKKDXlSuc2r8fL7GYmeHhSP74gy1796KztKR3WBjRR49i9/rr1Gu11NfX43vvHik5Oejz\n8yEoiN0ffkhxSgrztm3DwtGRrgMHorOz41G38/uvkwfSOHQ6RGPGoHZwQK/XI7Wy4mxdHeIrVxjq\n6Iifn98Dv2dokGRlZmZSX1+PVCptkAEaIBAYF02mzpuHWq3m1+nTESUl/W0yBtAOHkyhtze+UimI\nxeg6dkRQV4deJqO4rIwbN27Qu3dvVq5caVygWrZs2SM3NU1ToZ2cnGDFCtDrCdfpHvCs0Ov1DwSy\nAv+4F/L/JDxThAwNlfDDPhyJRPK3DIbEYjFqtdq41GFYHjH9Aj6NveejCNkQiFpRUUFgYCBOTk6U\nlJRQ1bhe/CQ8KYOvtraWlJQUlEoldnZ2jxXOGypig7+CtbU1NjY2WH73HafOnMFz3DjstFqs7Oz4\n+JtvyEtIQCiVkpSUhD4wkFf276c2I4PkP/7AzmTodD9KU1IwNzOjU6Mnct7Vq2xbvJjnZs+m3ZQp\n+AB1ly9TkZODT4sWLFq9GsXdu1QLhSSmplKXnIywvp62Y8Ygs7DA29+fFgEBdHF1ZY6NDYMDA7G3\ntUWen49FXR3fnThBZkICndauhY4dKSsrIzs7m+Bp06gPCMC6VSsKCgr4/fZt3KRShGZm6PR6VKtW\nNTnvyuxsytLS8O/dG0HjvMHqyhUk+/dTP28emOTkXbhwgezsbCa98w4SoRAv4K233iIyMpKMjAw2\nb97M66+/jlQqpba2lvLycmQyGdbW1nTt2pW+ffsac/fKysoQiUQPPJaPHDmS+vr6p8qeexyMrS+5\nHJ23N9r33gPgxKFDfPfdd7i5uRk9MIRCId27d3/gGI8MOBUIQCBALBQ+4FlxfyBrTk4OKpUKjUZD\nenp6k0DWR73P/5u9kOEZJORHhW6KxWJUj0nIfdjrlUolRUVF3Lt3Dz8/v0dmyz2NcuL+St00h8/X\nJBD1aY/7qJaFKdEHBwcjlUofuuoM/yZivV6PXq8nKiqK2tpaampqKC4uRpafT0VFBdrcXK4WFnLl\n11+ZPHYsmfHx2AQFEWZnR0hICFU5ObiGheHRvj3yoiIklpZGGZwpAvv3x2vNGlI//JCQ7t2J/+03\nknNyjOdQXFyMpFMnRowYgZubG1KpFGH37g3rzioV33/6KaXFxUhDQqjKz6d1RQU2KSnUjBzJ+D17\nuJWUxJ4tW6iqqmLKvHl06dgRFw8PKjt3JjktDWtrazp27IhEIsHJ3x+tXo+bmxtz334bb29vRDIZ\nWq22yWcgFAq58sUX3Dh2jGGvvUZufj5tX3oJm9JShFlZoFY3IeSzZ88SHx/Pjz/+yNSpU5kwYQJi\nsZgBAwZw9+5d9Ho9rVq1QqFQoKmsxEsmo87V1RhAeu/ePVQqFUKhkBdffBF7e3tOnDjRhJRmT58O\nQiHX4+I4duwYQUFBxi1NlUrFvn37iIyMpJXJIsnjYGifSL7/npozZyhbuhTP9u0ZOnQovr6+tHlM\nrqEBfydP72GBrFVVVeTm5mJra2scqBqkiZaWlsYqWiqVPnV00/9EPHOE/CiIxWKj+P1J0Ol0KJVK\nYmNj8fX1faJB/NO0Q0yVE/n5+WRnZz9Ur2z62qc5rul7yM7OpqCgoAnRKxSKhxK3Yd3ZUGWVpaSQ\nffkyERMnYu3h0eA1/dlnDDhzBqlAQMW5c3Tr3BnbX36hrLoaSy8vFAoFSefPs/+DD+g1ejTeERH8\n8MEHjJw/nzATja4pJGo19RUVVKSkEDF8ONZOTvj060dsbCzm5uZEhIfz54YNLL10iSETJjBx4kTj\nY/DYFSuoVyhwa9uW+tpa1Ldv4xoejrq+nl9jYjh17Bjh1tZ4WVigt7bGPyQE67ffJjcvj1bz5hm/\n+Hq9njF9+2IulfJHfDy9e/emIjOTnLNnUdfWcvvcOcyLivjg/Hm+/uUXoioqCKmu5tT+/fxyjkCM\nsgAAIABJREFU9y7vSiS0eOEFrF96CWEjESorK9k1fz6OLi5MnTqVD95/n5Rr18DE9KhVq1ZGkqyp\nqcFr8WKsrl1DcPUqF+7e5ezZsyxZsgQbGxtUKhX9+/dHJpORmZlpJCVrmYzQuXPR+/pyuVMnLl26\nRElJiXHAfO/ePQ4dOoRcLm82IRvIdL9Wy4b0dOxXrWLb9u04ODg8tBp+GJ425eNx52IIAjANHNBo\nNCgUCmpqavjjjz/YuHEj9+7dY8KECbRt25YJEyY0WRJ6HJ5kLpSUlMT06dOJjY3lo48+4s033/yP\n39ej8P8MITdnqGcqlRMKhYSGhjYr5vtpK9m6ujouX76Mo6PjI/XKf+e4hsqyMD+f2q1bsQoMpMvE\niQ8sptyvnjAM7ADjwC7hyBHOHjmCR9u2eDTm5ek1GtTnzqESCrnw22906dsXl5UrWdSyJfVWVlRX\nV1NiYYG7nx81Zmak5+VRp1JRmJVFQF0dMpkMZUUFe199lXY6He3mzaPHzp0see01Dr74Iu18fWk/\nfjzZhYUEBwdTcu0aeQUFXDt1Cr1KhbOzMwApx49TW1FBhImPr8TCgkEmuXsThgwh9bvv8G/blpGf\nfUZOTg53MjIIDAqi2tOTmqwsbGxssLa2Rl9RwYiKCjxNKrNL27Zx9dw52nXuTE5qKsHV1YjUamrz\n86nPyCBbr+d6fj4DzMyI3LwZfUAAuUFB1Ny7R8iQISjlci7HxZGm0TDIwYH3Bg7k6rlzHHnnHQYO\nHoxlUhJqd3eEffqAhQV6vR5F587IpFL0jo7k5uaSnp5OXV0dNjY2mJmZsW7duoaTq6pCcvAgqm7d\nqHF0pN7JCeHdu3Tt3RuPWbOoqakhJyfHWDF+8sknzQ74hX9XyBW2ttiHhDB06NBH2n0+Ck0q5Orq\nhkHl32gnPMp+ViwWY2tri62tLVOnTqVLly6sWrWKZcuWcevWrWZ/b5pjLuTg4MD69es5dOjQU5//\n0+KZI+RH9ZAeV8UaHo/T09ONUrnMzMxmDwqaWyFXVlYapVHR0dFPjJtp7qDO8NrKykrS0tKwNTcn\ntKICQWkp6Tk5mJubG7XCpsT9MCIGSD9zhpgzZxgxYwbuJttXl7dt48tvv8XfwYHZH35IcM+eaK2s\nkFpZIQZq8/IIDAwkbPt29Ho9V7dv50ZaGuXbtmHboQNaiQRdVRU5WVlY6PX4lpYi9PbG3NKSnLw8\njufl4eLtzeQpU9DU1fHTRx/h5OLCy5s2YeHggIWTE8V37rB8/nxc7OzwiIhAUVKCX8+eACjy8pBd\nvUqORMKBlSvJz8igrb8/KSkpuLi40GbkSESjR+PYOFAyeDmfW7OGUkdH+s6dy5fPP4+zuzt9Zs0i\n/ehRHMvKGLR7N+ZWVozUaPhXVBRlVVUEBgZSlJnJ4oAA7GUyNN278+PSpSSkpzPVxYXQ0FDa+frS\nMTubkiNHSLK2Jjg4mKvnztHW3R3rP/9k24ULDJkxg9Yff9wQVnryJEK1GmQypk+fzuTwcCwKC9G5\nuiIoK0N4/Tra3r0RVFcjSEpCHByMbVAQokWLkM2eTXh5OVbduhk9lXfu3MmZM2dYsGABCoXCOCh7\nWEKJQcJp6A0LBAJebBycPtY4S6lE+uGH6Nq0QWNygzQQsvDOHaRLl1I/Zw7aAQOadS2b4ml8LOzs\n7Jo8dTQHzTEXcnFxwcXFhd9+++2pz/9p8cwR8qPwqArZEPJpaWnZRHUgkUieql3wOEI2lcmFhoaS\nkJDQrOyvJw3qTI9fWFiIWCwmPDy8wUVt5UpUej0fvPIK7u7ufNy49isQCFCr1ca4HFMiNkCn0aAH\nPNu2paawkKMrVuDbowcpCQmIhUKEYjFWNjbs+te/yMzKYvbXX/PTwYOUnj5N9y5dGLNuHQKBgLCh\nQ1lYUICTjw9+CgWW5eUIn38eBzMzvnvrLWqPHSPA3p7JU6eSmJDA/uxsLKys0Ov1SCwsmLR0KWY2\nNjiZbEeVJCeTW12Ns50dpzdsIOnKFd4JDka8YgVfvfACLTIy8OjVi5L6ekZ27Ejo7Nm0johAr9Nx\netcuOmVk4Ni3LzY9e5K0dy8pN24wZO5cOvXti9/AgVy/fBl1fT1pZWXsLyjgilKJ5bVrlCck0GXG\nDLw8PRGbmxP9xhtMDwrCNyCAektLyjIyyM7M5FRBAQnvvsu5c+dYsHEjkhdfZL9AgL5jRwa98w7F\nd+7g3q4d1ZGR6NLTuZ6XR0Cjv4miVy8QiUi4fRt3d3fcz53j5O3bZHTtyhylEum2bag2b0bbpQvq\nd98FKyuEV66gB+oOHGiIyaqtNfZhDS2OXr16IRaLjVK8goICo17aQNJnz57l4MGDLDcxVhIIBGRl\nZaFWq/99TcbGUrl7NzZLloCTE2g0CPPy0N9XgRsIWW9tjS4w0Bjk+rRobp5eZWXl39IgP6250D+N\nZ46Qm1shV1dXk5KSglgsfmg009P0hR/lDqdUKklPT0culxMUFNQss2xTPKlloVKpSEtLQy6X4+jo\n2KCEMLwPa2skej0vTp2K/fXrcPo02t69gYatsJs3b6LT6bC0tDQ+utvY2CAWiwkaOJAlAwei1+u5\n9fvvxMbE4ODrS1iHDmTeusXzCxYQNHAgVw4cIL2wkMTjx6lWKmnZoQNtTKogKzc3Bi1dSm1pKZ9H\nRxMITBwwgMDOnek5YgRmPj7IZDIU5eVcqKwkS6Ph4unTeMTENKzxuroisbamMCUFWycnLBwc+CM3\nl3J7eyYuW4aXlxehEgmp585hlpREeL9+OBYXYz95MkOqqnC2sODq7t34+vqSExfHl0uWMMrWFsfY\nWIZ36EBxdjZ5mZk4+vvTolMnAF7+8UejzGzz6dOYmZtze8cOzv72GzpHR4JmzuSP9euJ/ewz1CIR\nG1auxFEqJfH4cXx9fVnYpw+BjRV7QWEh38vlTHjhBYLffJPU33/H3s+P7IwMJr/6KnbV1ShPnKDd\nmTPUt2iB+bx5lNbX893nnxMVFYVdRQUrY2JQJCQw7scfsXdzQxsRQVFyMkVyOcGtWuHwzTcgFKLc\ntg19aSkKhYLq6mpsbGxo165dk0QQc3PzJnFJarXa+KTg7e1N//79jRLO/Px8rKyseP3115tUx/F7\n9vDVjh28kplJ6z17wMoK5bZtD2QjGgnZwwP1J5881XVvCo1G89jNVwOehfgmeAYJ+VEwkJtCoSA1\nNRWNRkNQUNAjI18McrenObYBGo2GzMxMSkpK8Pf3JzQ09G/JcR5FyKaOdQEBAYSGhpKXl/dAe0Mg\nENCnRw9Ee/agk8vR9ezZcB7p6STu3cvoFSsQ2dsbVRQG/1wLCwvEYjEVFRXYt2hBaFgYutpaOk6b\nRki/ftg2Dowmb9pElzNn8OrYkWF2dvy+bBm5t24Rct/Wk8zOjm6TJ2NmYUGNTEZuVhZuQ4cSHBzM\nDz/8wOFffsHNy4tpEREMadUKL7Ua1+ho5HI5uXFxfD5rFq2Cguj1wQeUlpVhZmuLVWAgNi4unI6M\nZNmRI7TZupVPP/2U8vJyzFu0oIenJwk//0xWejryoiLCunXjw1GjOBYby45Ll/A7fJghH3zAAKWy\nifXnli1bOHDgAN+vXUv7V19FO3gw8shIWhcV4d6uHS3c3Vk5axb5SiV1AgHvvfgiwQ4O6HQ6Jrz3\nHn7du2NtbY1Op0NibY2dvz9mXbpQnp7O9kazojCRiMKcHIpFInqHh1Pn7EyQnx8ODg5otVqmT5+O\nq6srhw8dop+3N8NXrcLM1xdVQABFcXGsnDOHUhsblnz+OZEmaRuGoZSNjQ2nT59+4vUllUpxcHDA\nwcEBHx8funbtilar5dq1a8bAXYPftYWFRUPe4ciRtL16taEnbWjpPUTd8HdUFg9DcyLM4O8TcnPN\nhf534Zkj5EcRn8GfNiEhgaCgoCc6Nj2NKsPwf5qmLj/M/Odpcb+nhqkyw9PTE0dHR2PL4X4Xt6Ki\nIlQqFV5eXtSvWAESifG1ZZmZZGdmoqysxM3bG2tra2MoqMH9rKa0lJLz58moriY1ORl5XR2ZWVn8\n/PPPWFtbM3/+fERSKcGN3rB6nY6UW7ewtLKi3/3vQywmbMoUPh41itikJHqMGYN/q1ZYW1vTr18/\nZDIZ6enp+AsEHN+yhXv79jFs8mTCZs5k3dq15MnlDAkKokOHDrRs2ZJZs2Zx9Ztv2Prrr4g6d0ar\n0+Hm5kZdXV2DMb9cjuTtt4kYPpyAAwdIPHSIuMOHGb5uHQ4XLuC9axch/fsjFIub+jADTk5OeHh4\nkBcbS3JJCW1razn13XeUlZQw0d2dewkJ2AoESCUSSuvr0arVTH3jDSpEIkrNzNCdPInY1ZWE3Fz8\n/Pzo99FH5F27xvFFi+g7fDitBwzAKz6e89HRrDxxgufHj6dLt27G60QkEhHR2LfvNn48UnNzfMPC\nqC4sxNrDAws3N7p16YJdu3YENLZLBAIBgsZ5QJcuXXCwt6c4IQGnVq0QPoTM1HL5A+9br9dz8+bN\nBrmfSNTEiEmn06GKj0d96RKqdu3ovHYtGWo1BcnJ/zZbsrJCJpMZvwvNbTU8CU9jvWnoAz8NmmMu\n9L8Tzxwh3w/TtGWxWEznzp2bbQ7f3JaFXq+nvr6ey5cv4+bm9lDzn4f9m+ZWzaaxTA4ODnTq1Im6\nujrmz59PSEgI7733XhOPDL1ez9atW6moqGDlypWcuHIFgOHDhwMQ9fLL1NXUkHXpEm6NicbG5Zey\nMlqFhZH3xx98s3s39RoNkydOpOfixdQoFKSlpQFw+fLlhq0qS0uktbU45eUxf906BPdZYhoSte/u\n349bfj7VcjkbLl2iW1QUL27fTlBQEG5WVtSp1dja2JBvb8/uQ4dIvnkTn5oa6m1smPfOOwyfOROR\nSGScrCuioihNT8enb1/MnJwYOXIkMpmMQ8uWIc/OxufOHSJCQhCEhHD70iXysrKISkmhtqKCF7du\nfShRAYwePZpRo0YxNzSUm4WF7IqPZ65USuXYsdjt3Il0xgxWfPABV37/Ha1Gw+CZM2kzZgxff/01\nCWfOILp5E+/QUPYkJTF27Fg6depE1a1bCEUiaNGCrPp60oKCSPvpJ5yTkghsvJneD61azc9ffIG1\njQ1d09M5umsXC9avx7NTJ6Zs2mRM9T5w4ABubm5ERUVRU1PDm2++SfW1a3zSmMqhFwh4fs0aJI3a\n6JyLF9n69tuMW7iwIZi1ETk5OaxevZr+/fvTvlFVY4BQKMT2998RxcVh168f+pAQ9Ho9SqXyAeN7\nQ/CoSqUyPin8J0XJP+2F3BxzoaKiIjp06EB1dTVCoZB169Zx586dhuzD/zKeWUI2XbgwGNFfaSSm\n5qC5Q72ysjJSU1ON1p7NEac3N00aGt7H9evXH4hlEovFTJ8+3VjZGgaAhhXc5557jrq6OsySkojZ\nvx+NhwfDhg4l7eRJ3Nq25crp04jFYjrOmkVmaiqllZUU/forcefOIfrgA4K7deOtOXPQVlfjN2sW\n1ra2WNvaGi9UkUhERWEhW2fOJCYuDk+NhiB/fzp89BHOfn6IlUr+XLuWK5cuERwSQmptLecEAhzU\namQSCYsaJ/IapZIvJk7EycmJ+fv34//66yyeNQu9ToeZrS1btm17oIeo0+mQtW5N23/9C39/fwaO\nGYNAIEBVXc0PFy+Sk5tLcmUliZ99RsG5c9RbW/PcG29wbMMGrl64wDwLC0L69n1ob1Iul5OcnMyw\n6dNpeeECZSkptBEIsOvXD2FcHFKJhK4LFtB1wYIm/25Shw4o3Nwo6NEDx8BA3HfvJjo6GltbW6Im\nTqTz+PHk5eWRn5+Pj48P+ogILp0+zfq33mJMXR1OwcHGXr5BBTFj5Uok5uao5HJahoZi5eZG6okT\nFN29S7d//QtVTQ3bP/mEoOBgbG1tMTc3x10qxX7PHrp6e1NRU0NudjbysjKsxGIKY2MRmZnh6uKC\n5X1yTk9PT15++WX8/PyoqKj49w80Gqivp/6ll9CUlKBv/HcCgQBzc3PMzc2NckSAmPPn+faTTxgy\ncSJqb2+Ki4sRCARN1qGtrKyaXT03t/Xxd42F4MnmQm5ubv9obJMpnjlCNuS15ebm4unp2WThwjCo\ne5Tu1xRPGuoZhoISiYTw8HBu3rzZrOOaHvtxhGzwtFAqlYSHhz/Q6xYIBPTp0wf4d7VdXlaGlZUV\ngupqzIqKaNW7N+K332ZpfT3qN9+k4Pp1NixeTN8hQ5i3ZQtl5eWc2rGD0xs3Mn7BAtx9fLhhbs47\nK1cyYfJk5tbVISgtpd5kEGR6zol793LoyhWEQGCLFlwsKsLu55/5MzubpPR03B0dKaioIOHPP3m+\nZ09kTk6Io6OpEonw69WL/Px8Dh48SEiHDrQw8SOWSKUUHjmCy7BhmN2nRjE8Kbi4uNCxY8cmX1ap\nlRVjFizAztWV6oMH+fHSJewlEtQKBa18fQl55RX8IyKQ+vhw+/Zt7uzejVouJ2rBAuycnCgvL0eh\nUODg4MARhQIhcEgmo/VPPyHz8iKudWvCRCIsaXjyKomPJ+PSJbrMmoXD2bM4FhTgvmoVwvh4NKdP\nszsri9k7diCVSrl8+TJ5eXlMmDABa2trvGfPxtXLi+Tz5+k+ZAj1NLSLKioqyM7ObjDKsbDApaAA\nOzs7xm3ciEwmY9+SJaSlpeHdsSMIhQTp9QTq9QQEBGBnZ4cgIwO73Fwmjh9P7bx5KKuqMHdyIvnX\nX/n6/feZMHcu8ydMQFBYiNpgWwqI09PpHR2NUq+nurra+DsV7doFt2+jXb4cvQnxPgrKu3epunkT\nq86d8evZE3t7e7RardG3wtCXNgyUTUn6YcXM01TIj5oH/d+EZ46QDR4MD2sbGKRv/wkh19bWGi03\ngxsrE9PXN+fYj1NPmLZYgoKCqKmpeeyFptfrKYyPJ+6XXzhz5AgDp07l9p9/cuf2bSavXo33iBHY\niERYWFriEhbG+NmzcY2IIK20FCsrK8IjI0l0c8PS0ZHQESNoM3kyb0+YQNzXX3Nx4kRs2rWj1SOm\n3K0GDeJf6ekEtGtHx3v3uJicTL61NYnJyTjb2zNv3TqUNTXsXrmSwZ07E6hUkj5gAKUtWhAfH09c\nXBw///wzy5YtI7pvX+NxM1esYN1XXzHuhRfotmEDJXfvsnXhQoqKiugxciRD3323iSnShS+/5NKx\nYwzy9+frkycJb9eOqRs3skwspuTuXT55+WVu7N3L0JUr8Wj08NDrdGyaOpWjWVk8n53NuLffNlbM\nN2/e5PLly4wZPBiUSj7YvBnNjRscv3uXDz/+mN82beJOWRmz+/ThzvXrBPr6UnriBNXR0bSrq8M8\nMpKMGTNIjInh2rVrBFVWYrZ6NbFWVvTu3BnrxjZRyJAhxgGoWKdj9+7dBPr4EGhlhbdIhNmkSejV\natQuLtzcsoW6ujpaTppEy8pKvnzlFexsbZm/aROB4eHIGh+f9f7+KE+cAFtbpCIR0salk3onJ3r0\n709gjx5IDx1Cr1KhaZxRCOLjMV+4EOWUKSjHj2/i9PdDWhrZmZm8pdfzxGe/qioGnjxJz/79Se7d\n23izNG01GWDqW2HwFKmvr0cmkzUhaY1G06yWx9/xQv6fiGeOkCUSCQEBAQ/92dNK2UyHZKYGQ0FB\nQQ9s8D0sV+9ReBgh63Q6cnNzyc3NbZL1l5aW9tA+nGGxIy8mhn0rV3Lz7l1c7e3x9PEh5I03aH/t\nGpH9+yOvraWipobsu3epra1FExZGkV5PCxcXHO3ssLSx4c1GwbuquprEffu4dP489Tod6i1bsJDJ\neCcqCom5OQ6Nv1dBWhpVGzbg1KsX2XfucDs2lvYeHtip1eyNjaV3z55M27IFkVSKqqoKsVhMYlER\nLb//nhBXV4L1epLWrmVAQADe77+Pg4MDV65cQSqVYmNjg7RTJ3qHhhIwYgQajYZtr77K4ZgYQszN\nsU9JecBisaKighPJyXRJSsJTr+evGzeIOnWKdlOm4NWlC6+sXo1nY97gzs2buXn9Oqs2bmTIpElc\nWrcO95Yt6dq1KxKJhJyLF7mycycrFizAqXVrTp8+TfHVqxTcvk0LiYQTZ85wNiMDb0dH+sybR/+y\nMtzCw1mRn8+5DRvofeAAEa1bY+nhwfTZs4lo3x73hARC5HJC3NyabAOaory8nG3btuEqFuNZWcnr\n8+fTxt4e3aBBCLt2NQ76ampqSLp7F682bTCzt0cuFvNXTAzu7u5cWLkStVLJ/H37qC0uZvvLLyMU\nCskoKKDY05NJM2bgGhbGhrNnsbK0ZGpjRaoLCEA7bBhVYWGkpKTg7e1tXKO37dIFxxYt0DVes3v3\n7qWoqAg7OzsmTZpk/Cwubd7M7bNnsb98mdbm5gimTn1sq8HUt8KwtPSwvnRdXR1xcXFGgra2tsbC\nwuKB78P/J+T/wXicwVBzCdmgcNBqtWRlZVFUVPRYg6G/42cBTQ31XVxc6NKlS5PK3rBZZ7gA79+w\n2/Xee+QWFPCvpUvx7tiRqrw8WkRH06LRf1iRm0vC9u34DB2K2Ny8Ia1CLCbjwgU+e/99uj33HEGj\nR2NjY8O19es59fvvdG7RouFmJBAw9fXXObxqFfEJCaz6/fcGA/rffmPNrl1Idu+m3saG9kOHEjdg\nAD++/z4SiQSvwECW9+vH0GnTKMvJoayiAszMiC8upo21NdnffcfK1asJt7PD0tKSF779FpGfH19O\nmUKb6GjCpk3Db9Mm0mpquHvuHG6urjzXuTP/iopCUVzM3cOHadXoEAfgPHAgWTt3stffn9yUFMYO\nHUrr557jXmIiTsHBtBw2DGi4qZ7bvJnikhKSLl8m/vffaW9lxSuvvmq8kVbk5ZGVk4PD6dOc2bOH\n6bNmMXP6dGK2b8exdWvOJydDY/++SqvFukUL8rVaBrzwAmXbtuFTV0fGuXMU2tjgduUKNa+8wpBR\nozC/eZOkTz+l4uZNWkgkHFy/HrfoaAaPGgU0qDu2b9+OvqSEzBMniLl5k+T+/RnWuKih1+uNPeiA\ngABW19SQdfMm4aNG8eabbzJ79GikN25Q4eTEtWvXqMvPJzEpieTiYqylUsa1bk37xgWIWrUacSOR\n5vz1FwV37mA1YgRarZZ2ISHIZDIqs7OJ2bWLHlOmYD5wICUlJbi4uJCTk0NcXBwqlYqePXtSUVHB\ngQMHCCgtJSk9HYVAQI1QSCe1+qm9LB7Wl46JiaFNmzZGs6Xs7Gxqa2sRCARGD+W8vDzUanWzlq3u\nx5N8LPR6PQsXLuTYsWNYWFjw/fffPzD0/G/imSTkR+FpTOp1Oh1qtZorV67g6en5RIOhv2MEVFFR\nQUpKCpaWlo9MAzFNAjEQcV1ZGRaOjgiEQsa//TaK8nLajBnD50OHcujKFT5ctIg+ixej0+m4cfQo\nB/fvZ0ZAAD3nzKEyM5PEo0dp0b49CAToy8sJDQ2lpqYGiYUFcqWS0a++irmlJSKNBq+BAzn07bdk\nl5VRX1sLwM3SUtKlUmoEAoJsbBi5YgUSjYaAgAA69O+Po78/vx88yNcff0xBVRWdgoLI9/Tk4xkz\n2D5wIKF79qBydeWGTkeL3Fzq4uKw8PJCqVQiEQpxdHSkrKwMFxcXLK5fR37jBsNbtqTM1pYd586R\ndfIkcy0s8GjdGhsbGzqHhfHz1KkkajQcjY2lj5MT+dev8+mCBYyePJl+S5YADVUonp709fWl7NYt\nZJaW6PR6asvLsWvsYbcdP54XVSo+/uADiqytG0x8pFKiGoc8wd27M3PmTAByLl9G1DhM9Rs2jLnR\n0dT/9BMu9vakJiTw89WrbJgzh9GbNrHpl1/47fhx/JOT6VRezsfbt2Nx5AjZBw7QZcgQIiZNonXr\n1gCEduvGir59sSoooGtmJp9NmkTLjh2xdXamZefOiKqrGdS2Lev/+ou5L76Ik4UFbTUa+up01M+f\njzYqCq1WS8t27YjZsQMXCwvaXLhAwfnzlAoEPP/889jY2KBWq9n38cckpaSw5Oef8TdZk087e5af\ntm/H9fx5yp57jr23bvHWW2/x1ltvoVAoKCkpwcfHh/T0dPLz85m8cCHDHR1Zt3IlFh07orKwaPbg\nTqNUgl6P+D5CNcxGJBKJUS9tgGGnID09nR07dpCXl0eHDh0ICQlh2rRpDBw48In/b3N8LI4fP05q\naiqpqalcvXqVl19++R/d5Pt/ipCbU8Wa+lrodLrHmv887bEN0Ol0pKWlIZFIaN269WONW0QiEWq1\nuoGY1WrOf/klB3fuZMaiRbSfOhX/xsEeQO8JE4hNSkKr1VJaWkp6ejq+Q4bwfmQk/o1LITcPHmTH\nN9/w1ief4OjgwI2YGMZLpcicnQkIC8PTyYmw1q1x79KF6upqqqur6TB0KG7x8aSXllKm11NdV0cL\nZ2fUERGMfeEFbO3suLBuHVqNhvAxYzB3dOTza9c4tXo1UnNz2gwfzj2NBrVajdTFhcX19YweO5bw\nCRMIUKkw79ABRCLePXWK6zducOfOHW7ExNBOp0P/xRd8XVlJkkLB4kGDmLZ6NTvfe4+D779PZM+e\nWLi742NtTfsDB1hSU0O1SkXtrVvYv/QSUVFRBDUGpJYVFZGZnMysadPwyMhg3tq13FUqWRYVhXtO\nDphUPaEjRvBqTQ0hgwbhpFIh+OUXdMOHN9lGK01O5pPZs+kQGcmAjz5CoVDgIhCw4a+/aB8RwcgF\nCwhxdyf/5ElsVSpi//yTXiNG0HbUKM5cvozawoIFU6eStH8/Dg4OTY2SNm/mvchIlEuWcP3yZQ4k\nJzMoLQ2dvT13LlzgXkkJzh4eKMViVAoFK+rq6OniguroUfSNa+YikQj3gABGLl8OOh3FPXuiMTcn\nQCTCbPFicocMId7Hh5bPP09obS3Y2FBSUmJUeLQdN4435HLCzp0j18mJbt264eLiYrS5LI2J4dSO\nHQx8912EcXHsmDuX+Rs3Yu7tTZ2ZGSqVyjjP0ev1iEQi45Pl/YXN+rFjUSmVvH3iRBN7a8ixAAAg\nAElEQVSb1scpLEQikXEb8fvvv6dHjx7ExMQYB+3NQXN8LA4fPszUqVMRCAR06dKFyspKCgsLjeER\n/208k4T8qJbFkypk04q1ffv2xMbGPlU005MI2dCHLi4uxs3NjZCQkMe+Xq/XI5FISE5Oxs7OjoJT\np1i3bh0OFhbYPSRBuf0LL/DVwIFk5uVRWFhIeJs2/PzKK4hEIgIbh2adp03D0dubkCFDmGhu3jBl\nFwqpzsujzZgxrBk9GomFBXqdjuS9ezmycydDxo9nwMaNfP7887j4+xM5bx4hkyejUCgwNzcnNTWV\nrKQksnNyUMnlmDs6IhSLGWjivpa4aRPF585Rv3Ah1r6+hLRsSfGJE3iMGYOuooL8e/dITE7ml19+\nYcywYaTv3ImVjw/TnZ3pqlDwh1yOX3w8Y2fNonX79lw4c4ZPN2+mtZsbWxMTISiI4YcPk7tlCwK5\nnKT8fFTA90uX0vfddzm1ciVajYZVY8Yg/v13xowcyc64OKI1GqRnzlD/3HPGc5XZ2RE9fz7l6enU\nHj2KbWoq3+blkZqTwwerViGRSLDz8aHPwIGYtWiBXC6nY8eOCHQ6JsyYgV+XLjhHR+MyeDAXNBp0\nOh1fjBhBWlYWN+7c4fzFi0jMzAhwdGRUnz5IZ85sMmwWKJXoFArik5Ox9/XFyd0d74ICRk+ZgmzE\nCA6vXs3FxEQ8vb2ZPWcOY62s0PTu3eAtYQJBQQF6KyuwsWHd0aPcvn2b7z78EHF9PSqVivbt22PT\nqxd1dXUocnPRnDnDXX9/lFotmrIy9n79NVHdujFq2DCmmJs3adVd2ruXq1ev0mnSJJw9PHB2dMTK\nzo7nn38ewOi9behFG1ptgPHvhEIhQqEQT19fNCrVA63A5iosDK8Ti8WPTC55GJrjY/Gw1+Tn5/9/\nQv5v4FHJHnK5nJSUFIAmFevT6IUf5WcBTTXRfn5+T4xkMg7s8vK4efMmffr0QafTYR4YiFylQqjT\nIXd0JHHbNhLPnuVPCwtemjEDXWUlIicn40q4Xqsl7sYNZJaW3Llzh9DQUCycnAgfNw6tWs03776L\ng60tL/v7s3jkSHp068bUb78F4F5CAl+vXUtFbS09qqq4GRtLWUUFATIZkY0DMmjYgKyuribq9ddp\nWVzM3YICpKWl2NjYGP/IZDKOHzzI0Vu36F9bS0SnThz49luyi4oYX1PDqYMH8fDwYPbevZSUlJB1\n+DCOCgUTR42iorIS0dWrtIiPp7S4GICRa9YgXrqUv776itq6OqrWrSPjxg1e0miY6+6OYNs29I6O\nLD9yhDKViv61tXj6+VFTXs61sDCcPD3pFx3NKBsbrJVK6qVSKC5GfOAAmokTwc4OtVzO8jFj8HJ2\n5q3t20lbvJg///yT3GHDcAgK4sjKlbTo2ZMOQ4YYTdIB2s+YwerXX+f4yy/jplQyLiqK6Tt20G/S\nJHI3bGDL2bN4mZtz8bffcJ81C8nNm+Q6O3OrXz+UR46Q9O23hC5YgNmsWbRp2RIrMzMuvPsugpYt\njVV89JgxxMbH8/6LLyJPSuLj+Hg6ZGbS8/XX/73wUl2N5M030YWGonzzTSzu3CHSzo7kujrc164l\nyNvbWKnW1tZy64cf6HvzJk7vvcfub77Bq3VrWnh6YuXmRkpyMskHD2Lp4EBQ4/sd9N579C4qwqV1\na5xatsS1Xz8ycnMJDAxsoks2rYZNw3UN17hWq2X0F18AUK/RNMlsbO62X1VV1T+ypPF/As8kIT/O\nYMi0QlYqlaSlpaFQKJpk5BlgiH1qLiHfXyEb/AAyMzObmNAb3Lbux/0Du7/++outW7cSHBxMZGQk\n7u7utLC3R6/X4x8QwJ6XXuJQairFLVogSEwkMzWVJR9/jC4gAI1Gw6mVKzmdno6riwu3585lQqtW\nPP/JJ8js7BBJJAwYORJLW1tqBQJSNBrcTc7fpXVr5i9ahM7RETNfX4KCgvgsJgbRfY+DZmZmJGzb\nxs2LF+k9YQI+ERHYBgYa2x2GSfnwRYvolJtLaGQkO7/6itO3bzPJz48APz+SFArs5HKKL14kVKkk\nQyqlwsUFUcuWKJYvJ0uvZ+wnn9Cv37+Xsge//z6h/fsjvHWL0k2b+Dwnhxfc3BgZFMQ9gYDkxETm\nzJuHi4sLUc89R3RjBWzQxFZXV5OXl4dcLgcg4PBhvDZtolavRzRnDhILCyzMzKi9fRvhgQNMGjsW\nh+pq6qVSzu7axYnjx3G/d49bhYW8/PLL3L59m9DQUGJ/+IGf9+1DY2ZGhUrFiXPnmE7DNVlw7x6j\noqMZv2AB1q6u6GfMQBMbi9OcOTjY2nJi82aOKRQElpXhbGtLcnIyZomJRCxdSvXAgcg/+ggLqRSh\nSMTyfftwbd2a9aNG8UdcHHfS0ggdMgRXQzSXpSXaIUPQe3lRk55OfnIyfr6+tG3b1hgJZcDJkyf5\n+swZ7GbMINDRkRuxsQAsPnGi4cY0YABX0tJo4+mJvqKCkPHjUel0KIRC4jds4NtVq2jh4MCyU6ce\na6BlIOf7WxamBG1aSRs24zQmRP2wOc4/6WPxv9vr4pkk5EfBQLD19fVkZmZSWlpqvKP/N5QTpiRb\nVlZGSkoKdnZ2dOzYsYno/f4B4KO8iYcNG0ZgYKBR8gTw4bZtQEPwaffly3E5cQK/GTNI2LSJuDt3\nSDxxguRz5/Dp1YuThw7R2s6OLlFR5Jw+zck7d4ieMAG/3r1BIGDQsmXUlpby8ejRRHp5MXrKFKBR\ngpefD+HhBPj54erq+tg174TLlzlw8SInYmJwt7Hhw23b8OnRo0mlZKikq6qqaP/ii5RmZdGhshK3\nwkIK9HryysooWruWu5mZbLl0CUs3NwSA1xdf8Km9PVYG347UVBCJEPr749ejB0RFUd+uHf/atQu/\nyZO5bm+PprSUVkFBnP/wQ1QBARiiQYtv38bS2RlbF5cHNLFyZ2dK7OwoiIyk6saNBmWLRoOyshL9\nzp14/vEHrYuKuLhtG8+99x75MTF8dfEiovh42vv789GaNbz9/vtYBgTwWsuWjH75ZbJUKjwbNcfe\nXbogE4tpKRBw4v33+UMi4YPz59HOmEFFRQVrFizgp4MHEVhYUNO5Mz7u7oj9/BC0bYtKJkMZFER1\ndTVXDhxg5/r1jBwzhi4LFvD8F18w/N49lKWluISGIr93j+MffUT05Ml4TJ6MeskShHfu8MZ339Gi\ndWvM7iNjgEGDBuHm5kZ4t25IpVJWHDmCZeNnJxAKsbayYnh0NDKplJ9/+IEPe/YksHdvcnJyyCws\nRK5WIxGLSUhI4Pi//kW3Xr0IbLT8/PLTT2kbEMDcd9555PVj6uMBDTfN1NRU5HK5MenGtN1h+u8E\nAsHfXgppjo/FiBEj+Oqrr5gwYQJXr17F1tb2H2tXwDNKyI8iD6FQSFVVFTExMfj4+DzR/OdpdcuG\nSJmUlBREItFDqxHDaw19NJ1Ox6mVK6lXqRi8fHmTc7eysqJDhw4ApJ8+zV8//sjI998HOzuuX7+O\ntb8/gz79FKlUSsu1a4kYPJjzP/3EtZgYQtq3p0KhYP6iRdw4fZoSYPyQIRQ1Vn0Gu02BWo1SLqdb\nXR2O+/aR5+tLXnExLi4udOrUifKUFNbOnMmIhQsJ7N8fQWIiwrg4tOPGQaMqZO6uXfQ9eZLMGzco\nys7G/mE6cJWKpMOHEQQEENarFwMnTEB77RpVHh6M8fGhVqEg4dgxapRKrh07Rqthw8jMzGTFihUs\nX76cDk5OoNcjnTsXzM1R/foruZcu8dm8efQdOJCcsjKUVVWEtmqFs7Mzp48f59c7dxjf+ChbnZfH\n0jFjaNOqFQsOHnzgurDx94dFizB09XU6Hf6//EJtXBzXzMxQnjxJyg8/EJOeTsvqav6MicFWrWaq\nSETdzJnUC4XUxcayJzMT2z59cH/pJUy/tvZ+frw4fTo+kZHcPHbMOKw1LBlpdTpUWi02KhU3tm7l\nxKpVdO3alYmbNyMaPx4HwAFwmzYNSW0tkW3a4D52LEkWFqyxs2PYnDnIr1/n8Kuv8mdSEgKJBOfc\nXFwdHWk5YgS2nTrBI651BwcH49YnYFScQEMSy+KTJ6G6mro5c+gxaBCO7dtz/fp1HBwcGLd8OSMW\nLsTS1ZW9W7Zw+I8/0BUVERoWBh4e3Dl7ltu//kpQUBDOISHG687a2vqhm3nl5eWkpKTg5eXVJF/S\n8Jncn/uo0+k4duwY+fn5D31vj0NzfCyGDPlf7L1nXJTn1vb9n2GGNsDQBZWONFEERBERxd5r1GiK\nokaNURNjicaCvRNrYjd2jRqNNYoVu2JBEEG6NEFEYOhT3w/AbIy6497387z7vr2f4xO/GWbmmmvO\na13rXOtYx9GTs2fP4urqiqGhIb/++uu//Dn/0jH9X333/yaoKx2kpaWhUqkIDg7+oNrUvyIwpFKp\nyM/P5/Xr17i7u//TLVTd+9YF5YsnT1Itl9NjwYK3bG6qS0q4tXUr2YmJ/Hb2LKcjIxkzezbBX375\nRi3692nTqK6sRFZczMBPPyVk8mSa9uyJhasrDZydcfXyokd4OEJdXcrKyki5do3T+/fjNXw4A9at\nQ3HoENOPHaNLdTXDN27U3khSr13j6oMH2J06hWO7dujduIFOVBTqzp3R2NpSmJzM4iFD6DV4MH2W\nLePM3Ln8PHIkUw4fRr+2BFRSUsLZJUvYs3cvHm3a4OblxfiFC3lqaEiT336jvZsbqi++wDsgAOdD\nh3Br357ivDwe/vEHxcXFJCQkIJVKMRGLsSsqQlNvlDs1L4+XBw/WjJKPGMGFOXNw9vVl57VrvJJI\n6FHrGC2xtiY0NBQTV1euXLmCQ0UFdzZvpv+GDRjWC0B1EAgEaHR1eWFlhZ2dHU6LFhH45AmDVCoa\nx8Sg/P57qlNT2XXzJhsUCiZ064Z727ZMq65GPzi45nfetw/x3LmkLV+OwtiYrrNng1CIW0gIr+7f\n58GDB1oX8xabNjF78WKuRUTQuGlTIvftw7pW5vSNdVZdjaFUirGFBVtzc/lTpSJGIqE30Lx5c44q\nFLSwtcXQ15ed06djb2vLZzt3YvzihVYvO+vGDcoKCxHp6uLeq9cHrW/EYow8PDBv1IiklBQ8a9X6\nACS14vR9hg9Hv6yMe4cPs3r8eCIuX2buV1+xcu1aCqKiCOzZE5lMRmFhoVb4Xl9fHxMTEwwNDXn1\n6hUKhYIWLVq8NfwDb2fSL1++ZOrUqQiFQtatW/dh3+Mv+DsdC4FAwM8///xvvfe/g486IGs0Gq34\nT13p4MGDBx/Mj/yQDLlO+zgvLw99fX1atmz5T7f3dcyJV69eUVVVhYmJCeO3bEFiaPhOZ+akyEg2\nrl9Pry5dGPrJJzy9cwdvPz9ESiVHv/sO/379cAoNJebBA14XFyNXqXDx8EAgFGqdNtx79dJeeIqK\nClJPnOCnefO4+PIlAx4/ZvLvv/OyRw86Fhfj1qkTcXFxABgbG3Pv3DkKKivZevAgUisrgj7/nAzA\nw8yMlxs2YJic/A9dXKAwN5fc/HwUVVUIqqt59vQpcoUCv4ED2Xr2LAcjIxFfuIB9kyZs2LePIbm5\n+Nva0iQ4mIb+/gyobRj+OX8+Vw8eZOX8+SRUVSEQCCgrKWF2djYmCgVt791DodHgZmODQq3mh23b\nMLa1Ze306cQmJ5NaXs6gQYNwqqU06ejq0m3aNBbMmEH07t2UZWaiUCqxPHoUabt2tGrVCoFAgCw7\nGzmQ+fIlenp6+Pv717AFxo7FMCUFp/Jy5D//TOdly9CJjOTFl1+i06wZvefPRyc+Hse9e8nPyOCx\npSXOcXE0ePmStXPmUFJYyJb16ynt1w9R377YPXyIxZkz5KtU3Lp1izZt2iC1tKTXkiWsW7eOHDc3\nrkRF4TlihFZASqNW8+vEiZy8fRsTXV3SKytpbm7O0U6daNa6NWXV1fSPiMDMzAxXd3fK792jobs7\nDRo0QFZczPOMDB5GRrJ5xQoM1GocTE1Z1bIlppaWf3tNFFVV8axbNxo2bEhLO7t3rnFDiQR3V1d2\nZWYS4ueHxNqawLAw+t6/j1e7dkgkEiQSiXbLXzeZl5OTU1Mvr91xxcfHvyG0VN9hu+51v//+O6tW\nrWLBggUMGDDg39Ib/++IjzIg19WVkpKSajiV7ykd/B3qas7vglqtJicnh8zMTBo3bkyLFi1ITU19\n78Ko217JKyoQqNUEBQVRVVVFSUkJMoWCVzIZqbdvax08TExMakZLW7Tgk+HDadG/P3oVFQT27Emj\n1q15fu0ax06coLq8HKfQUKZs386MAQNwd3Cg/6pV7zoAEAh4sGcP38yciUyhQA8oKi1FIhYT0qsX\n7Wsn2uAfza+gkSM5feMGKrWaKn19DoSHc/7yZfo9eMCxo0dpqafH3CtXiM7MZMeOHYzYvJmh1dW8\nePWKmHXrOPfrrzRxdmbKqVOMnTqV7XPnklldTYVazZdhYRglJDB/xw4mbt9Op3pTUr4DB/JZeTlP\nSkuZt2QJK6ZNY4CXF5sqKhCkp3OgoAAHT09Gb9tGWWkpCSkp5B85QuCYMbRs1w73EyfoqFSCXK4V\nUL+7Zw8Pr1zBzt6eGwoFPcRilh08SMq6dawZOZJ2vXvz4+efY2NhwfRz597Y5aj9/am+dOkf57NW\ngGdEWBhf1rFOnJworqhA5OKCs6MjZd99x6uOHQn+809uHjpEn2+/ZYJKRYvycowBHQMDfv/9dyIj\nI9mwYQMODg5at/C8vDyKioreaELLsrO5FxeHk7k5Go0Gc4mEHzt2xPvMGdIDA8nx86O5vz+PHz9m\nWOvWeACfDBiAwMiIBtOno3F0JN7JCY1UShtHRzr06UPeq1ekZGQAaEeT69ZeXRO8TuTKx8cHfT09\n0i9fxrZFCwwsLFArlSgqKqh49Yojs2fTrF07PO3s6Dl+fI3Vl40NE3///Z3XhFKp1JoitGnTRhuQ\nq6urtXZT+fn5VFZWoqOjw40bN1AoFFy/fp0GDRpw5cqVDzIh/p+EjzIgy2QykpOTcXd3/y/RYd7l\nGlKn75uSkoKlpaVWxKiqquqdtLe/NuxW9e5NUUkJK+/exdDQEENDQ2xtbUm9cIFKmQzHbt0oKSkh\nMzOT169fI1AquXbiBJePH+fxy5coNRrOXL2KfXAwi7ZuxcbHBwBLd3d6du+Oi58fOn+pzQkvX0a0\nezeKxYuxcHYGtRozkYiOHh7kFBSQfPQolqNHY2hlhbKyEpGBAQl//IFSLqflsGFsEIk4t20bx7du\nZdzs2RibmWHj64tZdDSn5HJcYmKIi4sjJyeHLl26kJubi0Sp5OjmzbyurKS1pSVCoZBhYWEM/fJL\nFnTtyvnly9lw+zaqTp3Q19XFd/DgN47Zpnlz+q9aRWFhIdXAkIsXkR46xBB3d+6mpSEtLMTZ2Rm1\noyP3798n7dw5dp8/T46uLosePybswQOE5eXEtmqFrpcXJiYmePXuTfGaNSQkJdGrbVsm+/uzLDUV\nHZGIvWvWwK5d+AYG4uLhgVlmJhqJBGpZJQUJCRiam2u35/LduxGmp6OpRwFEIGDu0qW8Li1FVyTi\nuwUL2DBrFk1sbHhtYsKDnBz2hYezS6GgS0gITeVy3N3dsbKyqnHkrqpCX1+fVatWvbU9B5Da2zNv\n/XpM7e1p0KwZAqGQgowMHvn4YNK9O/61mevpkyd5/OIFjkolut99h+LePTRmZmiMjBgweDAZR49y\n6+5dOoSG4uXpiaB2PL++hkRpaSlyubyGH11UxB+LFzN4/HgsnZyYOXo0vbt0YcSOHWz/4gsexMQw\neto0Lt++jYOnJ0s/QOa2bvjK6R1NYz09PfT09N4ItnK5nKioKM6fP4++vj5JSUn06dOHqKioD5K8\n/Z+CjzIgS6VSbTPsXfhQcXiRSERF7bgw8EbW7evr+8bs/F/LG/WbDhlRUTVeardu8TA5mQBPT/Lj\n4nh6/jytR4zAyNaWX374gficHMaPHIn90KFUZmTQUFcXN4GAmxYWnEtIwEwsxs7UlPmffsqACRNo\nOnAgSj091Go1Orq6dF2wgJXffou8QQOC6tcGRSIO5uTwfNs2Qvr2ZVhICLEJCXg3b0786dOMWLSI\nz48d47Pp01k1cyZTly5ly6JFlFdVEW5tjVuPHhRlZVFVVUXTbt0IHDkSgKBhw0iMj6eRvT3NmjXj\n2a1bHJ08GQs7O0zMzBg8fDj2Pj741A4LAIjOnMFZoeCFQIBaqeTur7+iL5Fg6uDAszNnaOTnR+yN\nG+y7coXx48eTnp7O9pUruaJWs3LsWIa6uVE4dy6VJSUA/LF1KzNXrSJs2DBmTZ3Ko8pKLCIjiVcq\nabZnD261wu0ymYwSHR36tW7NhdhYPhk2DMvu3VkrEJARHU3+woUEtG5N1eDBOCoU6M6YgeLHH1GH\nhJAXE0PXkBBcTUw4VquLqztlCsIHD6i6fh3qifJ37dmTrNRUniYkUPDiBXJra6qsrFi2YQM2332H\njkCAlbU1jYqKaHnnDhdycjCuqKDU35+cnByqq6vR19fXZqp1CUVxcTE2NjZ49OmDRq2mMDOTnJIS\nhEIh7qNHa4NSWX4+MysqGOvggPPixahEItDTQ1G7a9IFhk+fzvOxY9m6eTPevXph4+NT09is/TxL\nS0sSExPR19fHzs6O9GvXyC8q4tmTJ1Tb24NazdnISJoeOkRGejp2trY0HzSI1U2bYlOPEfQuyOVy\nEhMTEQgE2nLQ3yEvL48pU6ZgYmLC6dOntW4/VVVVH1Uwho80IP8z/CvDHjo6OigUCiorK0lKSkJe\nm9G8K+v+q2BQXcPuVWIis0eOpGXTpni1aoWhri5DZ83ij2XLWH32LM4//cSw7t2xb9yYE8+esX3X\nLvbOmEHEpEk8z8pip50dk0JDGTB1KoXPnyO1sSFizhwy797Fzs8Pmbm5lktbFRfHpVOnyLx+nYBn\nzxDr6/Ng926EYjFRXl7kxMfz5eTJBHbtSkxGBhsvX0ZeOyhjKpViIJViKZVilJXFtMGDOR8Xx/SR\nI1m4bh2B48cTWK/ZAXB+7Fhk169jtmoVFXZ2SEtKuHrvHnoPH2IgEjH//HnKq6q4d+8eOjo6GBsb\n02T3boQZGbzWaMiLjeXgr7+ip6uLmZ0d4VOm0NTBgTtpaVQ1bEj+gAEYGhqiU1nJ9epqzsTG8vul\nS7gGB9N20iRUcjm/r1qFjVzOgM8+4/K8eVgBB1+8wMLIiHXt26MLWFhYaC/iFpGRzJXLtbsQ2/Xr\naXXjBvfmzmXu0qXE7NvHujlzCP78c9RNm6JRq9k5dSqFKhXN6l38Be3asfn+fXpcukSzAQMQ79mD\nqmNHei9bhlqt5vT8+WxYtYrvZ86k688/o/nxRxbdvAnAnatX+ax/f75SqUhJSsJIqaRneDi4uqLR\naLQUwdLSUnJycli+fDl3795l165deHh4cD0igsMHDvBDRAQtP/1Ue0yFKSn0Dg5GIxBwrXNn1MHB\n8A7BHbugIKatW0fq7dtYeXpqH9doNOTm5pKZmfnGgIdZv37s7NwZsUSCRqMhb+BAMlNSuHXkCPHP\nn/PVrFkkZmRgbGVFUWkpxtRkuX+t++bn55Oeno6Li8sbZqvvg1qt5vDhw6xZs4YlS5bQp0+fN97z\nXY2//+n4XxeQP0Qcvj4KCwspLi5+awLpr6hbKHWOHXWPWbi4MKBfP9yDgqgoKsLGzAzjhg3pP2sW\n9x8/5lFeHvvPnMFOKsVOX5/RI0ZQnZ2Nh5cXbdq350xaGod272aESsWWPXvoHRqKvljM/vPnUarV\nTDh0SPu5pc7OtF66lKSSEm5HRXFn3Tp2XLuGm0RCxPXrNGrUCIlEQkFGBrlFRQhsbNiwfj0WajXu\nvXohEAr5qWtXxJMmIcjPR2fCBPQlEuxatUIll7Pp009x8vSk15IlaGplLl8XF9NZIiEgIABV8+Y0\n9vBATyJBrVJhY2pKwpkz+A8ZgkBXF5lMRu6PP+KTloY0J4dCY2NGL1uGsYkJ1k5ODOrVC+cWLWh0\n9SqBEyZQIRZjZ2fHzrVrWTF3Lv6hoWQ9fox/LUVLR1eX0WPHYmhigru7O1tSUojJzsZaImHk4sUU\nFha+4Z24OywMkVhM92XLSE1NxcbGhsYdOxL/7Bmr58yhXKmkoYEBaldXHlVX4zxmDBndunHjyROC\nLSzYcuuW9r1ee3nxuLIS8ebNTJo4kR9UKgY0bUr+sWM8y8zEulkz2vv74xYSgvL1azSNGiFasQKN\noyOunTrRa/hwOo8YwcjcXHRevAArK87Mncvho0ep8vBg5rx5WsfoDu3bc/3iRfYvX86Xs2cjMDXF\n3sqKUo2G5ORkbTatb2KCrYUFVRIJyi1b0P0n6mfOnTrhXE+DuqKigoSEBCS1v+VbWuK1jB6BQMCQ\n9esBKMvLo8u9e7j36kW1XE5paSklJSVkZ2dTVVWFnp6edlIzPz8fPT09WrZs+UFaEy9evOC7777D\n3NycqKiof9mx/X8qBO/SfPgn+Jf++T+Jd03CAcTGxuLk5PTGuOtfUWdWmpWVhY6ODm3atPlb5oRa\nrebhw4eoVCqMjY1raFq1dJ6615744Qd+O3yY/r170+Hrr5EbGvLg6FHMLCxIOH+ek1FRzJ47l2vH\nj3M0OpqGhob0aNeOlEePaKBUYhUQgJWLCys3bSJNqURfJOLxs2c8OXaMx1FRjNy2jXWDB3MzNpal\ne/ZweP589j96RAtTU+b9+Sfl5eU1maqREQZCIUaWlpibm7+ZyajVPFi/nuP79+MfEsLAiAiqioq4\nv38/I3/4AQtdXSIzMkhKSiL6xg1u3bnD2p9/fmdz5Vx4OOu2bGH+woW0Hjv2recVCoW2nCCTySh4\n+pT8Bw9o2L07RlIpthIJG0aOpLm/P9euXkWuUFAml/NLZCSW79ABkZeV8fv06dyMi+Nsbi5ClYrE\n5GREurpo1GomNW+OWq1m3MGDuNfKTEKNz9ym77/ns+nT8ejdG5G+PoLHjxGFh3wBu0IAACAASURB\nVFM4YQLX799H39YWQw8PdHR0uHXrFi08PWn19ddEA1+9fMmnhoYsMTYmdv16nEJD0dPT0w4uJCYm\nEj5nDnPT0vBr0QL5zp0A5Ofnk5ubi7Vczt1Dh1AqFOw/fZrnenr4GxuzZOdObJo3Jy89nW4BAbiZ\nmbHz4cOatVtSgjI/n2Jzc202XVFRgVgsfqPc8VeGQh2ybt0i5uRJusyaRV5REfn5+W/RNUtzc0m+\ndAmfoUPf6kt8CKqqqrTu6AYGBlq98DoGxbuOT61Wc/DgQTZs2MDSpUvp1avXx8Kg+KAv8dFmyP+O\nJnLdtqoue2rZsiXx8fF/y5yoy4h9fX1RKpXaAPPy5UsqKiq0wuutv/sOKxcXFs6dy4ucHGxdXHAP\nCCDz4UNOXr3KV59/TuvRo0l/9AjNvXsY6+oS0LUrNGjA0ZMn+XnKFPJv3EAsEtVYz2s06Ojpcevw\nYe4mJTEwJ4chixbhfvEiCnNz3L29CffyosPIkTRu1gyoCYJ1x5ednU1SUpL2+ExMTHh19y7zli+n\nVKGgQaNGVMtkXFq9mo07djCmbVvsPDxITU3Fy8uL+9HRPIuP53lMDJb1xprr0HL4cL5RKPCqNVdV\nlJfzOj1dO95bX1ZRqVSycd48/rx5k7lNm2Lh7Mzzp095/uIFJrXDExqNBksjI6Tv4OdCjYXTsE2b\nuDpuHPJnz9BXKIj97TdafPYZmZmZDF61ClcXFxrV0gHrYN+2Lcv+Iiqj8fFBcfQoJiIRvbp2pTw/\nn+SUFArKyti1axfNmzfHZcoUrMVitu/Zg15CAhmzZ+PQvj0VFRX4+/nRrXt31q5dy9mzZ4m6cYPe\ns2fj/eWX2s+YMmUKt2/f5vvgYM5fuMDyFSv4Y/16Tsydy687d5L/5AniRo3IffqUfRs3YtemDUcn\nTWLXqVPs7tsX91evqFq2DMu6cWlqarR1N7mCgoIa+dBaZTRjY2NycnK4dPEiRo8fcz4qCmHDhnj0\n6kVAQMBbQ1Jnlyxh9++/s1xPj+ZDhrzznL8PlZWVJCQkYGBgQNu2bbUZ9/uO78qVK2g0Gq5cuYKz\nszNRUVEfheD8v4qPNiC/D+8LyHVKb3XTcXq1zbJ3/W9dw+7hnj08jIwkNjYWZ0dHVEolwxYvxtbX\nF4taSpBAKEReGwQLCwupcnBg8MCB2Hp7s33pUpwuXeLriAj6PX9O+wkT0NHTY6C7OzdFIho0bIi5\noyNtTEywsbPDxccHR3d3lpuakvH6NZMXL2bx+PEs0NMjrnFjfhoxgvZjxhAyfjyGOjqsOHeOhubm\nfL5li/bYxWJxTU3V0BBhWhrqoCDkOjrIZDKyHj3i3MaNhPr7492zJw08PBjVvDleLi7079wZ98GD\naRoUVNP0OXUK45gYrF694uqaNfjXBmSNWs3GQYOQmpnx5c6d9K4dzADYO348Z69eZe3hwzRu00Z7\nLl/k5vL43Dk6jBuHVCqlWceOSO3tcXJyIiAxEbVQSFl5OQs6dOB1aSn37t1D38hIexMpLy9n5cqV\nDB06lNDQUJYsWcKonj15duoUFi1aEB0djZWVFe26dfsgOyDhpUtgZIS6dWvtYzO7dOHI8+cYWliw\nc+dOnJ2dMbew4GViIlfXruWhSsV8iYSitDQqo6Iwy8tDJyODhBs3aOroSLCPDxWxsVQLBNSpXo8a\nNQp3d3c+HzWKzk+e0PzXX1F8+SWFz58jFomokErJzs6m9YoVCEUiqj/9lCePH/O0qopdjx/T19eX\nJd27M2vRIiJrXZ8XLFjwRs0c3tyJPNyyhetnzjD0q6/4zM0N9+7d31vPNW/UiOb29sScO4dj27aY\nfICGQ30hfTc3t7dKDbq6um8dn1wu58KFC1y8eBFDQ0Pi4uLo3bs3UVFR/7LI/f90fLTf9kMlOMvL\ny0lKSkKj0bylTVzn1lEf9Rt2F/bt42ZcHCb6+ohzckjIyaHNnTvY+vqiUauZ3qoVBvr6hEdFUVRU\nhEwmo0VAAFcXLeLWpUt8N28eYktLSkxMaDZ5MpmlpZRkZHAxKYnrSiUhZWXMGT2agoqKGl0Hc3MC\nRo8mYMwYhGfOYCOR4OzgwMInT1CrVNwtKKBXVZV227n0t9+ITU7mVVYWxlIpevWakTrnzyOeORPF\njBnojhqFpaUlyUlJXIuNZerkyQSPHk1BZiYNzc0xsbTE46uvMDYyYn2/fpibm1NUXMzD1FR8nZzo\n9Nln2vdVK5XEJiRg/g6N5xadOlFUWIhp7bBGaWkpz5494/by5az680/C/PyITk5GJBbzxfbtNb+X\noSGClBRMpkxh+fLlKJo1Q2pvj1wu12b6ycnJpCQkcHnZMlrPm4dOeDgeHTpg4OREkUpF8+bNMTAw\nICsri7y8PAICAqC4GKTStyYjUSjQCwtDY2ZG1ePHAFQVF/P81SuaGRoydMYM2rVrR9TWrSQaGGAq\nFDJoyRJax8fTfNAghCIRApWK+97e5A4dytfDhuEoEtFFJuP4/fu4t2mDS2golenphLRrh42NDca2\ntlh99hkalQoNYGhqio2xMbYODjh6e6MePpz8/HwEaWnMO3mSnHbtyNTTY0l2No2trDBr3Jj4ixcp\nLy8nPzaWyPXr6TVzJhZNmmjXfN1O5NPu3Ql9/hyDnj2RuLlRWlrK8+fPKSsrQygUaocxTExMOHP4\nMPczM3mQnk7TkBD8a9k170NdHdrIyOgtA9r3IScnh8mTJ9OoUSPO1eN+V1ZW/q8LxvARB+T3oS5D\nlsvlpKSkIJPJ3nkn/yveJQA0bts2ej94gL6JCXaBgeQ9fkzjwEDta6QmJlSUl3Pul1/wHzyYJk2a\nkHjqFPEZGVgaG+MRHIzYwKBGO0AgoKKiAplMhigoCO+XL3GTSDAyNMRAVxdldTWu9coC/u3aMczU\nlAenT5NTWEhiaSkqwKheLffigQP8tGcPdnp6NGvUiNX376NRq7m9eTON7Oxo4uqKeOVK1KGhPM/J\nYe2KFXzaowcdvv+eapWKV+XljNizB1dXV/T09KgoLaWgqIjq6mq6T52KyYEDnLp1C5+HD7Hr1g0T\nExPEurqsmTiRl/v2QVISuLkhy87mxpYttOjfn6kjR6JQKHgSE0P84cM4deuGtZcXdpcvExASgoe3\nN0H1xNpl2dlsGDyYXpmZ+PfoQWl1NXrffINgzRosXV2xtLTE2dmZqkuXWLdtG08MDLBKSCBdKEQs\nFiORSMjJycHExISlS5cSFxfHseXLMfnhB05LpbSPiMDK05ObN29ibm6Op6cn8o0ba3SEa6GqrsZC\nICCgspJhEgnXTpxg1fz5GOnqUiKXM+HLL98YxtH4+iK/cQNzpZJup0/TWCCgXVYWrfr1w75vX86F\nh7PvyBH8unXj2P37TJkyhd7DhyMWi3n+4AEGVlbYNWqE7e+/I8zIQD5qFJPc3ZEePoyBnh4BXl6I\nunThz/PnmXvhApaWlsyQyZA2bkzsiRPsOXmSJ7Gx+LduzdCNGwG0Ax7VLi54nDyppWzWX/dFmZmU\nFhdreyjdJ06kXW4uBoaGWIaEUFJSgpGR0VuBts7pPS8v729lA+qgVqvZu3cvmzZtYuXKlXTr1u2N\n0uC/Y8f0MeB/XUDW0dEhJyeHrKwsnJ2d8fT0/NumgUqlIuv2bY4tW8bQxYuxcHXl2PTpGBgbE7F9\nO9UqFYunT6dzraKVRqPhVWEhgaNGcWL5cnauXEnop58iEAhIuHqV4upqXlZWsmb0aJ5mZtKvaVNG\nBwQgmTsXia0tI0eORHTrFnP378fDyIjNN29SLpeTkJ7Oox9+oEmHDojt7dmZnY2uUMjl27dZ8cUX\nnHz2jPjLl/GppUIlRkZiIpfT2s4Oh1p606tnz1iyaBEtnJxYPn06nDiBxswMYX4+Bnp62Ht5kfr8\nOakXLuDi7U2Ttm0pzsxE38WFrKgonO3teZiYSPyRI3y9cyctdu7EuWdPCgsLSU9PR5aVxS+rVpFd\nUcFPz57RytmZH7p25fTz54y/eZORe/eSmZlJ4tatzDl8GOn27Xh36MCNwsJ3/g7bx41jS1ISIn9/\n8vPy2BweToRUiqtM9kaHOfirr1BrNCgDA1H5+NDeyQkdHR2qqqq02/XQ0FAcHBzIKy/niVLJxtu3\n0du7l/YzZzJp0iTMzMy4cOEC9O37xjEYVlXxua8vYoWCTDs7Alu1orGpKQkFBQwOCSHgPfVVoUhE\ncuPGbN6/H7lMxvE5c7CwtCSwf39yU1Pp1qYNhtHRBFtY8OLFC3Lu3qX42TOy4+O58+QJ7V++xDM3\nF+MePejTvTsGRkZcOn8edXExMw4cYHL37gg0Gm6sX8+8RYto6ejIokuXsHJyYvW8echv3GAoNUMY\nx6ZO5fqNG/x06tR7g1147968Litjd2Ii9vb2eHt7v1eu1Ki2ZCQSicjKysLc3Pyddeh3ITs7m0mT\nJuHo6Mj169f/LaW2jxUfbUD+68Vdx7FMSUnBwMDgbz3y6jLi19HRrAwPx9bJiX1RUVzv1YvZK1aw\n+/ff8XN2xs3CAqmREW4hIUDNNjw5OZmK1FQ2zpyJq40NYb17k3zhAi6hoXT74Qeade/OhS1b8OvS\nhfyICLbfuYNNSgq9Jk1CU3uxdJsyhdRnz7Bt1AiJSsXeb76hRdu2nLx0Cf/0dIZt3cqQTp2wKC0l\nLjaWT+bNI33iRIRisXbwZeq+fYx8+BDv0aNBICAvNpb7Bw8ybcoUHFq25IpIxEEDAxYplTyNjOTT\nMWPQDw5GV61m27JlNDIzo01QEHtOnGDRkiVM/+EHsqqqaGlhgbFUisTams5/MYVMyM3lYWEhYh0d\nBGZmPHr0CL/AQERCIc0HDiTtxg2eHD2KX7duBF26REDHjrQOC3vvTTGgSxcG5+YyautWsqOjcWrc\nGP0NG1DXs1xKu3aNpJgYHIYNw93d/Q3RJX19ffT19bGyssLFxUXL87U4dIiZO3bQLDubtNOnkSoU\nxD9+zJkzZ+jatSt6enpk3brF/JEjCTQwYGVKClIdHc5YWmJgaIhthw6k37zJmF27kPyTjPDu3btk\nlpRgotGgqi2Vufj4MHPIEDTm5lgCT6OicPX3J/LYMa7GxbF+zx66ajTMWroUV6WSEdHRuIwahbGx\nMS2//RbzjAyYNg39EydQtGjBTytWgEbD8O+/R8/EBN8vvmCZnx+rNmxg165d+Pv706BBA+16jz92\njC3z5zN140YcatctQHBICLLi4n+I3FOTwEil0jeCpkqlorS0lIyMDEpKSt7SZqkre/y15KBWq9m9\nezdbt25l9erVdO7c+WNhUPwfw0dLe6tTUwN49eoVycnJmJmZYWVlxYsXL/Cu15muj/oTdknnzjFn\n3DhelpezdOVKbhw5wo1Hj/h6wQKoqMCuZUsaeXujfv2afd9/T/O+fZH6+tKkSRPESiULe/akXdeu\nOLRowbfjxtG/XTsu3b1LpVJJlUrF0NBQDIyMKMjJYdCYMTi/J9PaO2YM8w4dYkyrVjh5erL76FHG\nffYZA1avZrizMyKhkHX371P44gW3f/4ZC29vGrRqpW16SaVS9PX12T92LNuPH+fHb79l5/btmLi5\ncb+wkHWrVrFy+HCsjY3ZkpiIjlDImTlzMG/YEEVVFV8vWEBnR0ecHR0RiURMWLIE/YULUX73HerA\nQO05EwqFUF7OcmdnKg0NCU9ORqlWk5ycTHl5OZaWltyMiGD7gQOM/vxz1FVVuHfogFfv3u/cCv8d\nNBoNmRkZLO7WjRcyGYdiYjCqnZqLO3KEQz/9xKQtW7Cp1SQuzsjg9JIldJowAVtfX4T37iGePp30\n7t3xW7yYKmDv4sU0btMGYWYmOQUFbFq4kIGdO5MXF4e5mxtjDhxAR1eXhg0bUlpaypTgYEb/+CP2\nwcFcunQJW1tbrVkp1PQoqsrLMRAIMKzlsYu2bkU8bx7p06fz4y+/8KqykiMJCRQkJJB+5w7Bkyej\n1mjYvWsXHp6eBAUFvUURrCwpwSwxEVq25MXduxgZG+M7bJjWLf3mzZtMnDiRIUOG8ONftIivrl5N\nREQEiyMi8KlXHvpQlJaWkpCQgKWlJY6OjtpeS93odR0NT61WI5FIuHTpEtbW1uzfvx93d3dWrVr1\nT2mnH4pRo0Zx+vRprK2tefLkyVvP/zPH6L9zm/6/gP9He6trGolEIq3AUHl5+T+lvdU17AQCATvD\nw7lWWEh/OzuCwsIICgvju9JSdE1M2DVyJDkPHxI8ezZxhw5x4sYNCl+9osf48WhcXSnOzGRvXBz7\n4uK4cf48YQMGENCvH1ejo0ktK2OglxcHL10ivbqadePGvTcYFxYWUq1S4WZgwKBp0zCzt+dxdDRO\nvr4IhEKGDBiAQCTC3NycsqdP2XP0KJ2Skpg6Zoz24s3Ly6OyshLHvn35xtSU5NhYFCoVPXx9Gdaj\nBwb6+sycPx8DIyMi58+n8w8/cP7UKdQaDRtiYpgWG4uBRELy06eMXrkSnepqcpKSuLJkCXGvXpFv\na8vT/HzOnD2LVEeHeZ6eyPz8yHnxguzsbJydnfHy8qLk+XPOnD5NZx8fggcOZNyQIXjdvYv3+fN4\njxmDUE/vDQ6tkZGRNqsrSEhAYmWFYW2NvKSkhOiTJ9k4YwZtmzdncHAwSZGRxFy8yBebN5MWHc3D\n9HQKEhO1AfnpmTNsPXYMI6mUQKGQRaNGMfyTT2jz/fcE//ILyTIZXt7eiHbs4Mpvv+G3YgVrr12j\nvLqaJ0+eYFpayqbPPiN0xgzWrVvH3T/+4Pr58zQ7fBgTb28WLVqEp6enVk/3/MKFnDpyhPlHj/Lw\n3DmcAgNp2KoVuSEhlE+YQL6REXa2tvg3aICusTGNAwOxs7NDeOoUt16/5sicOSxcvBiCgrSNOZ3y\nck7Pn09BQQHfbN+OyMqK6+vXIzIwQFGrQy2XyxGr1TQpKoJLl6AuIJeWIl6yhNCuXWn56JH25vWh\nUKvVpKWlUVRUhJeX11sN8Lrfrc5Roy5Ip6WlaYXfi4uLmTNnzr8tl1kfI0eOZOLEiXxZj0pYH+9z\njP4Qt+n/FD7agFzHJ3Z3d39ju/U+q6V3OXZ8v307duHh9Jo8WSuNqSeVolGruXDtGjqAd3ExrcPC\naNqiBdPHjydt3jxmt2zJ61evsBAKqdZoKFer6ffTT0gkErZevcq9vXtpP3kyj48e5di2bUi8vFi6\ndCljx47VDljUsT90dHT47Jdf6Dp6NJe2baN/eDgr7t3THnv/iAhk2dkUZ2TwMimJheHhOLVpg66u\nLqZGRjzatAnPLl1o3KYN6TdvciU6mj9jYujTujW2np7oyeVcWrWKyw8eEOrry6noaBp5edGwQYMa\n1wyRiFF79nBp2TK2Hz1Kp3v3aPjNN0wsKODRs2foAGWxsZiamtYETyMjfhsxgmWLFzPT15d+w4fX\nZL5lZeju24exSISzmxtO7duzdPlyTu7Ywe7Tp9n9xRc4tm2rzbAyMzO1nX9ReTmzP/0Uc0ND1l28\nSIFcTkVFBa7u7pgZGdE0IIAmwcHMGDGCkooKuick0K9/f4J9fbEYMgSNRsMff/yBc5s2rF2zBs9e\nvXgRE0PW69fkl5Yi1NPjaFaWlof+47JlnNZo+M3amubu7vz000/s3LmTQY6OXL19m4ZPn+L/ySc4\nTp5M22bNsAsOpri4mK/69uXMpk3c//VXWoaF8TIzk+zCQp7fusWsRYsoFosZMXUqPXv2xM3Hh81j\nxvBHZSUN9PToe+cOdkFB6OzZg+jAAXY0aEB8ZSVVtWuyMDkZeWkpCZGRbIyMRKKjw+C4OFRyOeGH\nD+Oop8e+GTPIy8vD0dGRqtJSLA0M0ADR0dEYGxtjVlKC7MIF7I2MMKonSP8hKCkpITExkQYNGuDv\n7/9BteKsrCwmTpyIh4cHd+7cwcjICJVKRW5u7r/02e9DSEgIGbVKde/C+xyjMzIy/tZt+j+FjzYg\nW1tbvzWFBm/S3t4XiOvQoHlzJh4//tZ7F5eUMDYiAhOplGZ1o6COjszOzkbf2Ljmh/XyIvrlS8oq\nKiitzRIqKipIO3yYn7ZvZ3pxMX2XLcN32DC+69OHB4WFdO/eHalUSlpaGiUlJbi5uWk71o9OnGDH\nqVM08fMj+Ntvtcdy9NtvWbp7N0UqFVWAvVhMoJ0dax48IPXSJRatXUu/27eZ0acPVw4cYOfDh/Ry\ndKTr+PGsnzSJ5o6OeHTpQsHLlzT/5BNsPDxQu7oyfMcOTExMtGPmodOn496hA41at6asoIDUsjLU\nwCtAAWiKiylKTOS5gQEGjx7hUVyMT1UVf86di0hXlx4DBmB++jS/fvcdykmTAAgYPZqMmBjOP31K\nfmIiLl26YGpq+kaXXqlU8jo/H4mhIfdfvuTIggU0HzcOCwsL9KRSNsbEYGhoyFceHlwtLOTHDh1o\n6O+PvqsrjSsrqRw6lKzsbCZPnoyvry8nT55Eo1ZzaOlSOgcEMKCWHVFRUaEV1JmzfTsdb92ibe1A\nS5cuXSgtLWX0kCH479/P3PXr6XrxIqsWLMAoP59bV65wLzubbi4uFJWU8PjWLcR+frSdNYuOP/6I\nsbk5X967x55aPrSXlxeHDh7krlDIN92707BhQ2xrt9KqESPQeHszeP9+mjo40GboUAB+7NuXFyUl\n7L1zh00CAaWvXxN14ACdwsLo4+BAYGgoKpWKVq1aaYPlpsRE7TmUyWQ8zM5mjkZD++JiBt2798bE\nnEQieWeQValUpKamIpPJ8Pb2/luDXqjJjHfs2MGvv/7KmjVr6NChg/a60tHRecPF+f8m3ucY/SFu\n0/8pfLQBuW5s9V2P15Um3heI34eKigqSk5PRaDQEdOv21uJsWxto6qBrYIC5gQESkYgdw4fTolUr\n0mNjyVWpeJmdzdVt2yh99YqEuDgCnZzQ19cnOjoaR0dH3NzcSD53jmJDQxzbt6fbjz9i16wZjkFB\nJJ09i1uty4G9tzdigQANNT9mskKBbWEhAK5durBw1izcQ0PRGTeO1uXl2IjFKFQqsu7cQeTigtjX\nl97h4fQODwdqblLl5eXIZDLy8/NJTk5GrVbXXLiNGyMrLSUlOxvs7PCorOR+bbYjFQh4XlKCh6Mj\n3osX06tbN5Tt2zPN3R09kYju4eFUb9qExtX1jXPk0b49HmfPIrG2rmF2dOnyxvMikYiY+HjuKZWE\ntm7NkIULMWnUqGbSKzubeSEh2FhZ0X3oUF5u3YpKraaivBzh6tUI5HIQCrGzsyMiIoImzs5QXg4n\nTvDk2TOsLCzY/vnnJCUmMnDdOjybNdPeDOqyJ4BmzZphb2/Po0eP6DhzJvG6urTv1AmNWs20ceN4\nUF5OuUaDy9Ch/HH2LHotW2opjI8ePWLatGn07t2bjTt2IJVKa1xlBg4kRCBg2KRJmEoklGRmcnzB\nArp+/TX2ffrQIy2NnqamKGrHu/sPGkTBixeYNG7My+fPmbVnDxqNBkMzMzw8POgxbRrWTk7vXLei\n2pJWYGAgn4WF0blzZ5o0aaLdjWRkZFBeXq4tO9QFaYVCQVJSEo0aNaJJkyYfdI2kp6czadIkmjZt\nys2bNz8ogP8//AMfbUB+F+qaTyqViqSkJG33+O9Uo+pMUetEhv5VoZPKoiIepKYiEomY9OuvhP75\nJ549ejA0MJAm1tYsjYigwsiIsrKyGi7q8+fk5+Yya8QIpPr67E9LQ9/MDL8vv2RxaCiX4+LYsGUL\npa9eERAWxvVPP0VRWUn69euc2bKFsVu2aLUH2n77LWlpadwPD8epSRP25uQw9Ysv2LN/P9uuXKGq\nqAiNWs2LR4+wra1LGxkZYWRkRGV8PDfXrSM+KYnF586hFAjIzs4m9c8/6dKuHR379aP81i2Epqa4\ntGmDR8uW/zCq7NULAbD+yBGEOjpsHjKE+48fs/7WLQzrNXSOrlvHxbw8dObOJeP1a7YdOIBr167A\nPwTMAXr26sXYsWOxcHSkMDkZWW4ucz//nHSZjJ4NGtB3zhyijh/n3P37BEVFIWjQoEbcJiWFghs3\n2DZtGr0tLAi0sUFQUsLe0FAKFi9mWe/eZBUW0sLbm4vLlvHg1i1mnT79jyGa2iB09OhRfv75ZzZs\n2MCsefO0x99/yBAqzp8nNjubLQcP8nl5OQ3378fw8WOyDQwQCATY29sTEBCAi4uLdmRYqVTSuU0b\nMhISWPrppyhVKl4rFBgYGvJZcDDKersggB6LF2v/vnbtGnrAUD8/yoqKOHz1KsFRUW8FZI1ajUou\n59i0aUitrOgWHs6kekmDmZnZG+PJSqWS0tJSiouLiY2Npbq6GkNDQ8rKysjNzf3bTHr79u3s2bOH\ntWvXEhIS8h9nULzPMVqhUPz/6iT9r+CjDcjvor3VNez8/f21Da86DVpDQ0MtI8HY2BixWIxardaO\ngTo4OHxwlvBXmDo6cvD+ffRNTdE1MiLom2/QqNWM+uQTRBYWGPn54dukiZYfqlapeLBvH8P69kVi\nacnDhw+1WWpg376YmplxeetWDt68ydZz5/A+fhw9qZRmgwfTrFbovb6UooODA269e5MfF8eEYcMI\ncHFhfEQEs/v1I72wkG9GjGD1jh0sX7CAoIkTgZqa5bjhw9FoNFgYGiIxMMDY1hZRYSFrIyKw0Nen\nzyefYNStG/p6erx+/ZqHDx9qGzt1wjF2QUFAzeRVWXU16r/U7y3MzbETixk4aBDJsbHY+vjUSDzm\n5ZGRkYG9nR1d7e3pun496TdvMsrNjfxaDrK9mRkqtZro5GTOzJrFnAMHKMnOxq1HDwAtB/new4fE\nV1aik5/PSH9/ykeNIqO0lPXBwQzo359+ixahb2rKozt3uP3sGeO//ppWubl826AB8oMH2bt/P/Hx\n8XwSFIRLrUB9HYwqK3mRnY2+RkMFcPjlS0YfPYrFhAnYh4XhtnIlbTw9kdrba292AM/OnOHHsDC8\nbGyIkclob21N2MiRWLZqRVxcHGbV1UicnDCWStHR0dGuO4VCQdi6dQwuBb0uiQAAIABJREFUKKB1\nv34oCguxXrqUaVOnMuTMGbqNG4dz7QDRsi5deJSURElVFTYmJnSr3QW9DyKRCLVara1DN2zYUEtx\nk8lkb030QY0ZhKmpKd9//z0+Pj7cuHHjv01W/D7HaCsrq791m/5P4aMNyHV4V51YT08PKysrrZym\nRqOhsrKSkpISCgoKSE1Npbq6GoVCgVQqxcPDA6lU+l+645s0bqz9W6FQkJaWhvPIkTRp0uQtEZXs\n27eZ+v33+NvbIwC8g4Jw692bsrIybty7R+yTJwxr04ZvAee0NHJzcpCammqVs4qLi0lKSsLU1BQ/\nHx8UMhkCgQAja2vszMzIKShA0qAB/QYNIjslBff27Qm8fBm7etxeUwcHPu3WDfeWLWk7cSJCkYjz\n8+ez+uefMRWLiZXJyDtzhoGrV7M7LIz9Z86w5fffSbl8mdKSEhCJkCsUNPnkE0xMTBi2bRvjpVL0\n6w0lyLKzCdu8mXESCXomJmjUapZ37YpSpeKTX36p0RS5exe9YcNQfP01pc7OZBYVEejqirOrK2G7\nd/PzJ5+wMjKSSVu2cPvzz7XBGP7BQR62di0BvXohLy/nZGQkxgYGGAoEFFdVkZOZSWJGBlKplDG7\ndvHZ69cMGj2ahy9f8ig/nxUZGezYsYPkxETsq6sxTE7mu3PnyHnwgD+WLqUkJwc3sZhkhQKxQIBd\n27ZkW1tjOmAAkmHDeHjgAFO+/ZYpo0fTd8UK7bEZ29jgaG5OSKdOuDVpQugXX+DZty8aQH75Msbj\nxpEbFsafPj4sX76cAQMG0KNHD4qKinDy8KBhx44oKyp4mZaGUCjkaVUVv164wKHLlwmfMQM9AwOM\njY0xNzJi2fbtSP5Gf1g7yVddja+vr3bXKBKJ3ptJx8bGsnjxYhITE2nYsCEKhYJHjx4RXGvy+l/F\n31HTWrRoQXx8PEqlErFYjEqlYtWqVUgkEpYvX46RkZHW69LNzU3rGP0+t+n/DvhoA3KdNqupqam2\nRvy+gCoQCLR2Ska1pQNTU1NsbGyoqqoiNzeXZ8+eaWts75LW/BDU+fBlZ2fXZK1ubu98va2fH2P6\n9kUoErHl2DE637uHR9++mJiYUFxYSLZMRovZs5GOGYPMzo7n16+jEAgQ2dmhUCjQ0dHB3t4ea2tr\nfh40iHPR0eyKjMTSy4v2QUFsPXGCjJs36bFokfYzV/7FfVhHV5cx+/a98ZhSIMBIJKJH585knzhB\nfm2H28rMjEZ6ekhMTVmzZg3lCgUGIhFCgYDj8+eT8/QpcWfPIjQ1BV1dzD08yL90iYgVK2hoYsLu\n1FSUSiVpqanci4/H2MAAz9rJQrWLC6r27VGHhNA8OJgTHTtiaGmpZb1MPHaMax078jg1FZP31FAF\nQiE2QUGs6d2bP2JiEN+8See+fTmdlYVQLKaqVhBeJpMh02hYuXIl+/bt49affxLYrBkRCxci+eor\nZk2YgMGdOzwcPZqJx4/zQi7HSEeHRWPHotHV5dmDB7QJC6OhkxOqDh0AsKiqwsPamsZ/ueCtPD1x\nbtwYfQMDdHV1GRUWxk+LFxP0zTcYuLoi8PGhQfv2mKanI0tNpTo/n/LyckxNTbWTpneXL+dwVBRT\nx4xh/ahRNGrShFtnznBk926e5Ofze1QUk97Dt6+POksyR0dHbGxs/nZNi0QiCgsLWb58Oa1bt+bC\nhQuoVCpiYmL+S5Zp9fEh1LSYmBjt36dOnWLNmjVMnToVgOXLl3P16tX3eu69y236vwM+2sGQe/fu\nMXXqVEpKSvDw8MDf35+AgAB8fHzeOTpaXV1NSkoKVVVVNGnS5J0Lq760ZklJiVZasy5AS6VSrVHj\nX1FYWEhKSgoWFhY41g5Y/B00ajV5jx9j4+OjDUBqpRJ5aSn6tRmLorycHo6OWBgYMOXwYRwcHBCL\nxTXmqTIZd1av5lF0NOrAQDJyczmwaxfrBw+mWCZjTXQ04nrmr2V5eVxdt452X3+N1N6e0txcYg4f\nptnw4aTn5CAWi2t0LXR1iTt8GOfQUIwaNEA8ezaqAwcYqVYTXVREmUrF6kmTaOLnR/MhQ1jepQt/\nREejIxDgYmbGups36d+0KWUqFUPatCFo9mwUCgUJmzcjNTOj74IFmHwgR1bw+DF88QVlQ4cimT37\nrecrios5uWABNsHBONjZ8SwykltVVbQKDKR3PVPXN867RkNpcTHt7O3JUquZ1qYN/sOHs2raNEZI\nJFh07szCP/6gVKGgf7Nm9Fm9Ggtra5ydnT94uKUoLY1+vr646OkRL5cjFAg4evasVgVPUVHB8Rkz\nSE9K4vi9eyyZM4dO06YB8DI+nh/698ff25sXL16QlptLdmkpi/fswdLZmeLoaIpSU+k+bx6ifyIG\nL5fLSUpKQqVS4eHh8d61Wx8qlYpNmzZx6NAh1q9f/38sG/4rbt++zfz58zl//jwAy5YtA2DWrFnv\n/P/hw4cTGhrKV199BYBjrdfifyMT1P/dgyGtWrXi+vXrKBQK4uPjuXPnDvv372f69OkIhUJ8fX3x\n8/PDy8uL48eP06VLF7y8vLCysnpvhlDXra7f1Kuz2ykpKSErKwu5XI6hoaE2SItEItJqt5V1qmMf\nCoFQiG2ta0QdhCKRNhhrNBpel5bSt317rO3s3qA81S3EFvv2UVVVRUREBGVyOalZWchKS3leWEhG\naiqWjRtjZGjIFH9/SioqiH31ioVKJf1WreKPefP46fBhvs/IoHd4+Bvb1ub1rIOUX37J8hMniMnM\nxFJXl+zKSn46fZoby5ahUavp8/XX2p2KjYMD5+bPZ3BICE7NmlEtFGIqlWJrZcWPkZFY6+vjPmYM\nqowMjIyMkCck8OfEiYwYOJDGmza9cS5ep6YyY9AgBlhZ0adzZ+rr8mk0Ggry83kwfjwrL19mbHY2\nHY4cwalVK/zi4zkwaxY5VlY0qiexqT3vAgHGUin9/r/2zjw8prP945+TTVaJWCPIvguRRVv7vler\nr6LVSnmLFqVaW+miXqWttpTWWlpUKX5vq4oqaicZW0Qim0QQIiGJyZ5JZp7fH8mcd0YWg2gt87mu\nua45J+ecOXMyc89zvs99f+/AQM6mppKfl0eXIUMI69+fQpWKvPx8Pu7YkR8++oimbm4c+/RTEIJJ\n27YZ/L+t5+5OiYkJRwsKeKNzZ57q21cOxgDnd+3ii/Xr6ebtzbqff8a1SxfS09NxcnKiKCuLxKws\nTM+fZ/GRI5z//XeuxcfToW9f8gsKqFu3Lg5hYZw6fRpAzprQljSbmJiQkZFRLpu5u9P4Nl28OhIT\nE5k4cSJt27bl6NGjD9QA6G5S0woLC/njjz/4psJICcr/hz169MDU1JSxY8cypooGCQ8jj21A1mJu\nbk5QUBBBQUG88cYbCCHIz8/nxIkTLF++nPfeew8/Pz+ioqIIDQ0lJCSEtm3bVuqEWx1V6dGFhYVk\nZ2eTmJhIYWEhderUwcHBgaysrEoVaPdKfn6+3HB17IYN1Y5uJEnCysqK999/X15nPWsW15OTuXn2\nLC916MDgZ54hIyeHetbWzBgxgpDXXuP69es0at+e0Xl5DJg8uUazcOHjw0mlEnd7e36Ki+O9ceNw\n9Pbm6OLFvD9nDosWL6bXuHHMCA/HOy2N786epVO9eizZu5dS4PegIJQZGaiEwLtpU8LCwtBoNBQU\nFLBn+XJ+yc/H79AhrkZG6lXy5V+/zqnMTM5nZxNiZYV2TF1UVER8fDy2N2/SNzYWlasrYTr6Y/S2\nbaw7eJAWnp7c+Oorki5c4NOjRzHTybaRTEyYc+wY77ZpQ0RKCiW3buHYrBmOlN/tKP39+ezo0XLt\nOSCAfJWK4O3bkUxMaOjpWWW14e28MHo03y1fzuWrV3mnIvtBo9Hwf1u3MmvKFEa/8AIj3nmHxoGB\nTBg8mF1HjrBz3z58OnVi0FNPse7IEeJ27CB05Ej5mFX5TuiaA+Xm5srNQZs1a4aVlVV5AVANn8ey\nsjKWLl3Kli1bWLJkCe0qJmofFrZv30779u31BkpHjhzB2dmZzMxMevbsia+vL510fDseVh77gHw7\nkiRhZ2eHu7s7zZs3JyUlBQcHB9LT01EoFERERLBq1SoyMzPx9PQkJCSE0NBQ2rRpg62trUFBOicn\nh7S0NFq0aEHTpk3Lb4Fvq0DTdnHQSh1WFSlSd0I7Iai1Db0Xp6z5M2aQlpfH26++ysWyMmITE/kp\nOZlvBg/m1o0bpGRloblxAwd/f7o98wyxe/ZQVlBAEw8PfCtu8xN37aI4L0/uJPHDyZNAedeOL9et\nA+D40qVYmZkhgOeee47LajXW5uY0MzPjX0OHUvjTTzSxs8O9SxdMzMx4o29fnn7hBYRGw7SKFLp5\nR4/i07EjPgMGIFlakp+fj1Kp5L9TprB6yxZ6enlxPCWFy3Fx2Li5cePGDW7cuIGXlxeOQUGYrFjB\n8+7uCFdXAP6aP5/927Yxbfhwuk6YwJxBg4i5do0rERG4Vei+unzy11/kpadj17QpKpWKhIQENBoN\nQUFB8sTXqiNHUJeWMrJTJ0xNTPg+NrZStaH2h8Te3h5ra2vSExPp16cPpufO0bJilJ6bm0t8fDxX\nDx8mKyuLzNRUGld0erl+9iwlBQWIGzc4sGAB55KSQJJq7JsH/zMHqlu3LtevXyc3Nxd/f3/q1KlT\n7Tlq50dMTU2Jj49n4sSJtG/fnqNHj/5tjUWrS1mrik2bNvHSSy9V2h/KC8QGDRqEQqF4JALyY6sh\n3y9qtZqEhAQiIyOJjIzkzJkzlJaW0qpVKzlI+/v76zVs1OrEjo6OuLm51agTa81itFqvdiStq0fr\ntjgXQsiTOS4uLjg5Od1z1se5LVvITU+n/cSJnN24Ee/evcHMjL6urjSwtGR1XBz29vaUlZWx/MUX\n+fLPPxFAE3Nz5nz1FWYqFV/Nno1SpWL/tWuYWVoiNBrKiov1NGktZcXFdGvalBKNhmU7d+ITEIBN\nDT8kQqPhVQ8PzExN+eHChSq32TN3Lou++Yavvv8eG1dX8ihvF29iYoK1tTVKhYJ5H37Ie++8Q4+Z\nM+Vr9XHHjmyLjkYALz/zDJO2bmXBwIHsOHOGH7du1Wv8CXBVoWB4374M7tyZc7GxmJqa8m1MjKzp\n67Lo2WcxMTFhYoV0kZ6ejlKpxNPTU88YqKCggC+HDOFqXh4/Hj5MgxYtSE9PJy8vD/dmzZjXrx/Z\neXnMWLxYzhq5fvYsNxISCBwyhBEeHpy9cYMwJyc+2bKFxhVeHdVRXFxMXFwcderUKTe+qkJX1k1v\ny83N5cMPPyQlJYVbt24xduxYhgwZQkBAwH3f2Wm5UwbFvn376NmzJz4+Ppibm5OZmcm+ffsICAjQ\n2/eVV15h8eLFXLlyRU63KygokNNECwoK6NmzJx9++CF9+vSplXO/Rwz6shoD8l1QWFjImTNnUCgU\nKBQKzp8/j52dHR4eHsTFxfHiiy/y2muv3XMeZnFxsaxH5+bmolKpsLGxwcLCguzsbOrXr4+Hh0et\ndlLQzfm1zc+nqaur3GkCYFxAAJFpabzcrh1W1tZohGDNvn282rkz9Zo2xSc8HCsrK7ZPmsTaU6dY\n8u679P34Y3l/jUZDamoq6Wlp+Pr5Ub+Gzt26qFUqAL3mmp/16oW5uTlvb9tGaWEhderWRaVSkVTR\nc8/Hxwdra2tUKhWnNmxg+rvv0tjPjw5DhtCxY0fq1q1LHbUaZUIC2xYtok+FPHNgwQJ+37SJD375\nBStHR77+178I69GDLlOnkqpQMHHgQAZ0707kiROoNRq+T0ysMiDfTp8+fUhISODMmTOVTNv3vPMO\naYmJtJ0/n8ybNzE1NS0fleblMfFf/yK4WTMWnT2LJEmoVSpebN6cRqamLL90iatRUSivXsX/+efv\n+L/V/oh7e3vrtU2qibi4ON566y3atWtHnz59OHfuHKdPn2b16tV6g4R7Ra1W4+3trZdBsXHjRr0M\nigMHDjBt2jRu3bolp6bNmjWLpUuX8tFHHxEZGUmzZs3w8PAgMDCQnTt3yvumpKQwaNAgoFxuefnl\nl5lVxYTv38yTPan3ILC2tqZ9+/a0b98eKP9gTZ48mT///JOOHTvy119/sXbtWrkyKyQkhJCQEHlC\n605o82a1Pc60WmhOTg52dnYolUpOnTqld/tbXeWUIWjd8LR9BKsaOS08doyS/Hy5n1rsf/9L9s2b\nhC9ciGOFv3BxcTE7TE1RCsEHCxdS6uCAW48emJmZkZ2djZOTE8906HBX53l7l2Oh0XDw7Fkszc3J\neOYZTly8SPO6dfH18GDUjz/SqFEj+Rpf2rcPn44d+Tk2lueff57Lly/j7+8vj/5UzZrR7ZNPMLWy\nIiUlBZ9XXuFy48YogcLERLZGRnI+OZmGHTtSYGLC1jlzsDIzY3SFFFNdMBYaDZOCgrC3sWHsV19h\nEh9P36eekjN2TH/+GSkpibKZM+ny2WckJiaiUql45plnsLS0RKVS8WX//rRs3JiXFy0iIiKivAGt\nrS1OGg31S0ogMxPnsDCcw8JqvH5FRUWcP38eGxsbwsLCDPoRLysr4+uvv+a3335j6dKl5a2ugB5V\nNLC9HxQKhUHmPo0aNUKhY6QFyJPx2n3HjRtX6fju7u6crWi/9ahhDMj3gampKc8//zwLFy6U0520\nFoWRkZHs3buXTz/9lIKCAvz9/QkNDSU0NJRWrVrVmGKkVqu5dOmSrGPrpu5oJ2mUSqVcOaWrE2pL\nwWv6ASgtLSU5OZn8/Hx8fHxq9KatY29PHR15IeCFFwh44QV5WTtpOOuvv1CGhLA+Pp6iixdRqVQU\nVMz437x5kxs3bvxPo7Szw9LMDIu7uJOQTEzYdOoUkokJq8eMocn168RkZRFTVMS/btyg7MoVnEND\nuR4dzcsvvURbZ2e+jY1ly5YtXN2zh6xz53AOC5N/7LTFQLm5uezfv5/p06fTu3dv3nnnHd565RU+\n++EHfn73XWYfPoz18OFIRUWoR47U68GXdvw4lo6OLP7iCwoKChjx7LP8nJxMS2trekVEkJ6Tw2sN\nG8o/RKZbt2KSmMi1YcNIvnEDNzc3vcljCwsLEi9eJC03l+Cnn8bc2prioiLy8vOZumMHJZcucfzy\nZSwzM/UqInU/S0IIrly5wrVr1/Dx8TG4c/P58+d566236NatG0eOHDEoBe5eMTSD4tixY7Rq1Qpn\nZ2e++OILAgICHmpjoNrAGJDvk2632RiamJjg6emJp6cnwyuaf6pUKqKjo4mMjGT16tWcO3cOCwsL\n2rRpIwdpzwrTnejoaIqLi2nSpIleGpuWqjo4lFZ0tVYqlbL3saWlpV4Ri4WFhV45taurKz4+PrXq\nN1DPwYFGJiZYu7vj4uJS6YdEq1EuHzqUXyIi+Pirr3Bp315vYrM4OxsLOzu+HzmS4woFhaWlLD90\niGWjRuHXpg3+Y8bQ/qOPGOXjw/atW/lw9mw+HjaM+CtX+HnLFlw7duSl9u0J6tgRAKuCAia88w7+\n9eszfto0Ql97DbOKHyxtMVDfzp35PwcHejVujBAC5169GHjiBK169EChUFD3gw84vGED7XfuxL9v\nX0RZGUXZ2Qzu25d8tRoroK6ZGW+MGEGIrS1vjx9Ph0mT+LVNG1aOH8+NAQMY//vv5C1aREp0NOri\nYkJCQqq8/V9+6hRqlQpza2vO/vQTb4wfz9xp0+j+3nvQtq3c8SQ3N5dbt25x+fJlVCoVVlZWWFlZ\nkZ2dTb169QxuMlpaWsqiRYvYsWMHSyuqIx8GgoODuXz5Mra2tuzcuZPnn3+epKSkf/q0HjjGgPw3\nYGFhIQfe8ePHI4QgNzeXEydOEBkZyUcffURsbCylpaX4+voyevRo3N3dDQ6W5ubmeq3VtV9apVJJ\nTk4Oqampcim4ra2tbOtZm8E4OzubTu+/T8+8PIIHDKjyh0RrrdnMxYWGZ88SEBSEZYMG5ObmkpmZ\nyfXTp5k5aRK9/PzYn5BAdlkZZpLEnkWLWHX8OE8lJjKzf3/mDR/OoN696dytG5ZZWVzJyaGeqSmO\nrq6YWVnxzo4d5KSkoPjuO0LDw5n4wgsknTrFqKlT+ebWLbrdNoGkTE7m4rVrRPz+O90mT6ZVq1Z0\nreirp9FoOBQRwaJjx7heVESegwNzBw3C0cqKvq1asfHMGYLr1+frhQux7NOHnRkZ8nGbmphwODWV\nFunpPJeWRlpaGl5t29ao5erejUhmZliYmOhdS0mSKklbarWalJQUMjIysLe3Jz8/H4VCgY2NjTyK\n1vqz6BITE8PEiRPp1asXhw8ffqCjYl0MyaDQLczq168f48aN4+bNm3eVffEoYpzUewjYtGkTS5cu\nZerUqRQXF8uThtnZ2Xh7e8vBPCgo6K7LtbXdtQsLC2nRogWlpaUolUry8vIA9FLvbGxs7jpIl5SU\nyNVePj4+91UscCMujjahoTibm7Nw2TIOff89q44epam1NWlFRSyfNQu/rl0Z0rcvQ555hslDh7Js\n0iTmlZZSBkTv2kWjitSmjzt2ZEtUFD+tXk3r0FCutWvHRgsLXtq2jdxr1/Du2xfJxETW6XPPnqVV\n9+7Uq8KrNzM2luXjxzNy/nycw8IY6++PnbU1k7Zt41pMDGVmZlg6OlYyVpKKisiZOpVd6enU8fbm\n+U8+wdTUlIzoaEzMzWlYUR4OcOK775gyfTqfz5/PU3dRxJCfn8/58+epX78+bm5ucvDW5sPLJeG5\nuajVapKSkkhOTiYnJ4czZ86wcuVKua1RbXGnDIp169bx+uuv4+bmRr169cjJyeG///0vAQEBuLq6\nYmdnh0ajwdLSklOnTqFQKBg8eDCXLl2SJwT37duHs7MzYWFh/PTTTw+NF0UNGLMsHhUKCgqwsrKq\nNKosKysjLi6OiIgITpw4wZkzZxBC0Lp1azlI+/j4VDlhI4QgrWJU5u7urjfppUUrI2izOgoKCjAz\nM6tUCl5VkNZ1wvP09JQLY+4HodHwYfv22NrZMfWPP0g8c4a/vv2Wrw8epKGNDd/9/DN5+fmU5OVR\nFB9PxM8/M+jFF+kyfTp2wMWCAvlY5zZvZt+6dTR1ccElIIAOV6+i7tyZJcuXs2zPHr5bsIAmffrI\nretr0lpXDx/O/G3b+Gr8eDA3p9uECZXaH2llI+2jsLAQtVqNWq1m6sCBqITgxzVr8Bs4kI6NG+Ng\nYaE3mj72zTdMfP99vp4zh/YTJ97xWmmzV27evImfn59BPeo0Gg27d+9m8eLFlJSUIEkSpaWlzJ8/\nn563+VDfK4ZkUBw7doy0tDTef/998vLy5M7Vy5cvZ9asWSQkJLBp0yaWLVuGmZkZVlZWfPXVV3JB\nys6dO3n77bf1si8eAYwB+XFDax6vHTUoFAoSEhKoV6+enBsdFhZGbEVhQlBQEG5ubnfVPFSlUuml\n3hUXF2NlZaWnR2vbS2nzre+2OemdUCqVJCQkyL4fhw4domHDhnJjWo1Gw6IBA1hx6BBzx48nQaHA\n2ceHpydNQpOZSXFaGo3c3Uk6fJjJc+fibmfHL1evAhD144+sXbCAZ2fOxCU0VG9UWR0ZMTHs//Zb\nIlNTWX3oEFN69uTDX3+t/vwzMri5YQOie3fqOjlxcs0apnzyCQ3MzFi0dy+7pkzB3tGRMRs3cn7r\nVjYvWcL7v/2GZb16eh2fq0NbQNKwYUNcXFwMyl5RqVR88cUX7N27l+XLlxMUFCSvLysrw7qK/PF7\n4W49KHJycmjZsiVXK/4/D6EHRW1hTHt73JAkCVtbWzp37kznzp2B8iCdmZlJZGQk+/btY8qUKVhZ\nWdGyZUsuXrxIWFgYbdq0oW7dugbJERYWFjRo0ED+QmjT2pRKJZmZmcTExKBWq3F0dMTCwoL8/HzZ\nH+F+KS0tleWVgIAAOZ+7a9euetuZmJgwZs0anvnvf3lqzBhMzMxQq9VkZ2TwXLt2ZKpUtKpbl4T8\nfKYPH453hw4UFRVhZmaGZWgor6xZg6+v7x3zxbOSklg4YgRDJ09m2LJl2G3cyIHz5+k5alSV22vb\nHVlv2EDLxYsptbSkbNw4+s2YgTIzk6V793IhOZnXN20iNzeXc+fOMWPiRCKKimgyejSvrFlTpdar\ne3xtscbtTUZr4uzZs0yaNIkBAwZw6NAhvclECwuLWskt1nK3WRCrV6+mr45l6qPqQVFbGAPyI44k\nSTRu3JiBAweyadMmVqxYQf/+/UlKSiIiIoIdO3bwn//8h+LiYlq2bCm73gUEBBj0RdROImVlZXHr\n1i18fX1p0KABhYWFKJVKrl69Sl5eHpIk6enRd6N16xanuLq64uvre8d9bZs04RmdHFRTU1OKUlK4\nXlpKEwsL3n3vPeIjI+n78cdcPHqUg+vWYe7piZWVFY0aNaKwsBAzM7MaJ7ISdu9my7lz5M6dy/nx\n4/li4ULOXLok/z1682YSDx/mhYULyVEqSUpKwtnZmWZjxlCmUqHWsTQNGj2am7t2cenSJRo3biwb\n+syZO5f1S5bQZ9Ys2YtbrVZja2urNyGXl5dHfHw8Tk5OhIaGGnRtS0pKWLBgAfv37+e7776j1R0q\n+v5u9u/fz+rVqzly5Ii87lH1oKgtjJLFE0JJSQlRUVGyHh0TE4O1tTXBwcGyHu3q6lpppKuVDxwc\nHHB3d6+2wECtVutJHQUFBeVFDTpSR1U+CAUFBcTHx2NtbY2np2e1o0NDOb1uHc7BwTSukDeKi4vp\n3qQJxRoNxzIzEaBnoXq7O5/WoQ/KNe3oTZt4Y9w44kpL2Tx9Or102jeNcHdHceMGK1esoK6/P76+\nvtV7PZSWkrt5M7Y9emByB3c1rbGSUqlEqVRy8+ZN1Go1DRo0wLFi8vBOBlVRUVFMmjSJ559/nmnT\npt33dTUUQyWL6OhoBg0axK5du/D29q7yWLNnz8bW1pYpFbajjzhPloa8ZcsWZs+eTVxcHAqFotp8\nyupmgLOzsxk6dKg8Stu8ebPBSfWPIkIIcnJyOHHihBykU1NTadboKQ0zAAAZfElEQVSsGaGhofj6\n+rJt2zaGDx9Ou3btDL491kWlUskBWqlU6rXKsrW15datW+Tk5ODj43NPJkl3en/aScdzX38NwOiN\nG6vcruTUKWxHjeLKmDGkPfUUucnJFF26ROuXX6Zu3bqsGjqU9IwMvjh1Ck1ZGZkxMTRp04aTP//M\nub/+ou/s2eScOMH/LVzIhDVrOJOayrvvvsuKTz+l7dNPg709Jnv3UmfECMpef53SOXMMeg9ax0Bn\nZ2ecnJzkIJ2bmysbAulmddja2qJWq/nss884fPgwy5cvJ7DCnKg2uFP2hBCCt956ixUrVuDh4cHa\ntWsZM2YMP/30E1euXJH3HTx4MFu3bmXdunV6znEPqQdFbfFkBeS4uDhMTEwYO3YsX3zxRZUBuaYZ\n4GnTpuHo6MiMGTP49NNPycnJ4TOdljtPAhqNhosXL/LFF1+wdetW/P39ycnJwc/PT5Y67tbTWRdt\ndZy2FbuZmRlmZmbY2trKxS61YU2qvb3XjurvNOlocvo0FoMHUzp7NuoRIxjt48PBq1fZ8MMPWPv5\nkZ+fL7sEbvz3v1l69izvPP00zy1ejLe3N+bm5nzeqxfLjx1j7Zw5KL29mTRpEj+VlNDJ05OSAwcg\nPx+z1atR9+9fqfP27ZSVlZGUlERRURF+fn7VXm/dhgkKhYL//Oc/FBUV4enpyRtvvEGnTp1optM6\n7H4wJHti586dLFmyhAkTJvDmm2+SmZnJBx98wIwZM2jSpAmTJk1i2rRpNGnSBLVajVtFhxczMzNO\nnjz5sHpQ1BZP1qSen05OZ3XUVEO/bds2Dhw4AEB4eDhdunR54gKyiYkJTZo0oUGDBly4cAF7e3tK\nS0uJiYkhIiKCdevWER0djampqewpEBYWhpeXl0GZFtqcaCEETz/9NJaWlmg0GrkU/MqVK+Tl5d1z\nqyzdSS9fX1+DUsEANMHBFCcmQoVU8cqYMbj/8Qet+/TBouLOQCvJOFYUY1xLT6eoqIiLFy9ib29P\ndGIilpJE0LBh2DVtSt/evbF6+23U2qIFW9tKnaSr4ubNmyQlJeHi4nJHLV3bMMHa2prY2FicnJzk\nUv0TJ04ghJCrRe8XQ/wntm3bxogRI+jfvz+XL1/Gx8eHUaNGoVAoCA4Olj25p06dClSWMR5lD4ra\n4rEJyIZQ0wxwRkYGTk5OADRp0oQMnRzRJwkbGxv+o9Nrz9zcnDZt2tCmTRvefPNN2dv51KlTRERE\nMHfuXJKSkmjYsKFe6p2uR4OufODh4aGXs6x7261Fd+SnzbqoU6eOXpC+fTIuKytLnlQzdNJLDx1t\nvPO779K5ojebluLiYpKTk+n3+ecMycujoZ8fwsxMPs/6dnbYZGej2LsX1/btsbe3R71wocEZDKWl\npSQmJlJaWkpwcLDBVXMnT55k8uTJDB06lAMHDsj6d+/evQ1844ZhSPZEVdto74YeZ/+J2uSRCsg9\nevTg+vXrldZ/8sknPPfcc/d9fF0duaCggJycnEo6ckJCAkOHDpWXU1JSmDNnDm+//TazZ89m1apV\ncsCZN2/eQ9lI8X7QZlN07dpVTkfTemRoDf5XrFghm8Q3a9aMgwcPsmjRIkJDQw1yHauuVZZ2kkvr\n36DVTW/duoWJiYmeaXxtoVuAUZXWrU0R7Ni1K1tXryZ5505a9u5NTk4Oly5dki1UtT8mdnZ2la5B\nZmYmycnJlcyGaqK4uJh58+YRGRnJjz/+aNAdopGHn0cqIO/du/e+9q+pDr5x48a8//77dO/enfDw\ncFq1asWnn35aSbbw8fGRu92q1WqcnZ1l3Qtg8uTJj8ussMFIkiRfB+21yMvL480332TPnj089dRT\nTJ8+HbVaXcng31Bv5zp16tCoUSPZv0EbKNPS0rCzs6O0tJSoqCjs7OxkPfp+rEmhPMMkPj6eRo0a\nERoaWuOx+syaxdeSRPdJk6ink9amLWFWKpVkZGSQlJQkT1zZ2NiQlZWFmZlZtWZDVaFt4PvSSy+x\nf//+WvXHrg5DPCSq26a0tPSx9p+oTR6pgHy/hIWFkZSUxMWLF3F2dmbTpk389NNPAAwcOJAffviB\n6Oho1q5dy4svvsivv/5ao468b98+PDw8cHFx+bvewiNDnTp1GDhwIOvXr5dHfIWFhZw+fRqFQsGi\nRYuIi4ujbt26elKHs7PzHYOoNlXO1taWdu3ayQFJo9HIjnK61qS6UochrbLUajUXLlwgLy+Pli1b\nGtRwwLZxYwZXZHPoIkkSNjY22NjY0LRpU/n4ly9f5vLly1hbW1NUVERUVNQd87iLioqYO3cup0+f\nZsOGDfj6+t7xvGqLmr47WgYOHMg333zDsGHDiIyMxN7eHicnJxo2bHjHfY2U89hkWfzyyy+89dZb\n3LhxAwcHB4KCgti9ezfXrl3j9ddflzsKVFcHn5WVRZMmTXB1dcXFxYWff/4ZDw8Pbt26Ve1rjho1\niuDgYCZMmACU501+//332NjYoFQqsbS0xN3dvdoUOq2RiqmpqTzTDE9OCp4Qgps3b6JQKIiMjESh\nUJCWloaLi4ucGx0SEoK9vT2SJFFcXExaWhrZ2dkGp8rpekwolUqKiopqbJWl1aKbNWuGs7NzrTri\nQbn0EhcXh7m5uZyhAeW6+e2+IhYWFhQVFZGcnIy9vT0LFizg1VdfZdKkSbVerq6lps+e9rtTUlKC\nqakplpaW5OTk0LFjRzZv3owQgrZt23LmzBnMzc1p3rw5ixYtol+/fo+q/0Rt8mSlvRlKTTp0eHi4\nXgDWOlFVhUqlomnTpsTGxsq3pxkZGTRo0IAZM2Zw8uRJ3Nzc8Pb2rjaFrrq6/Sc5BU+j0ZCcnCwH\n6JMnT1JYWEjjxo25cOECM2fOZNCgQfelFVfVKsvKyori4mJMTU3x9/e/5zZc1SGEID09nUuXLuHl\n5WWQV4PWR3vevHnExsZiY2ODt7c34eHh/Otf/6rV89NiyGcvPT2d9PR0goODycvLIyQkhF9//RV/\nf//HrZijNjEG5LvFx8eHAwcO4OTkRHp6Ol26dCEhIaHKbbdt28a3337Ln3/+WeVx1q9fz6hRo9iz\nZ0+1x6kuIN/NeTzuaDQaRo8eTUJCAr169SIpKYmYmBjq1KmjZ/Dv4eFxT3qxtmw7JSVFnkTMy8tD\nCCHr0dpClnsdLRcVFREXF4eVlRVeXl4Ga77Hjh1j6tSphIeH89Zbb2FiYsKlS5coKSnBx8fnns7l\nTtzLZ++5555jwoQJ9OzZs8aAfOXKFTp16sSpU6dwdHQkJyeH4OBg9u/fj2tFV3AtUVFRvPnmm+Tm\n5mJqasqsWbP0JtMfQYwB+W6ZOnUq9evXl0cH2dnZfP7551VuO2zYMHr37s3IkSPldenp6Tg5OeHg\n4CA3Yty4cSP16tWrUvpwc3PD3t6+kpGKg4ODvL0Qotr9nxROnDihl8omhECpVMoG/wqFgpSUFJo2\nbSrnRoeGhtKgQYMag6i2G7OFhYWefAD6HU6USqVsTXp7KXhNx9dN9/P29tbLGqmJgoIC5syZQ0xM\nDCtXrsRLp+nsg+ZuP3upqal06tSJmJgY6tatK8t29vb2hIaG8uWXX+rJbZ9//jkXLlxg5cqVjB07\nFldX1yqd4BITE5EkCS8vL65du0ZISAhxcXGVmsU+QhgD8t2SlZXFkCFDuHz5Mi4uLmzevBlHR8dK\nOnRBQQEODg54eXnpjcrS0tKwt7cnLS2N/v37s2LFCpycnKqVPq5evapnpLJkyRI6deqEg4MDKSkp\nspaXmppKZmZmJR35ypUrjBgxgoyMDCRJYsyYMUyqKD54ElLwdBFCcPnyZTlAnzhxgpycnEoG/1ZW\nVmg0GuLi4sjPz8fLy8vgbsy6rbKUSiXFxcVYWlrq6dHaoF5YWEhcXBy2trZ4enoapPkKITh69CjT\np09n1KhRjBs37oFoxbUl2+Xn59O5c2dmzZrFCxV9FrWynSRJfPDBB6Snp7NmzRp5n9LSUkJCQhg1\nahSrVq0iKirKIJ+N1q1bs3Xr1r/1x6mWMQbkf4p7ue3TvdXz8fGhe/futGjRQk7BGzVqlFHLu0vK\nysqIjY0lMjJSNvgvLi6muLiYrl27Mnr0aPz8/O456GmtSXX16NLSUiRJQqVS4e7ujpOTk0HHLygo\nYPbs2cTHx7Ny5Uo8PDzu6ZzuF0M/u6WlpQwYMEBuDFsVqampDBgwgJiYGL31u3fvpk+fPvz5558G\nGeMrFArCw8OJjY2tFZvXfwiDAvIj++4eZgYOHMjatWsBWLt2bZVFKwUFBXIbpYKCAv7880/ZgH3g\nwIFs2bKF8PBwvRS823FycpLb79jZ2eHn5ycbfRspLzBp3bo1Y8aMYdWqVbz55ptYWVkxdepUvLy8\nWLBgAe3bt6dfv3588MEHbNu2jWvXrmHoIEXbcbtx48Z4e3vj6+uLqakpdnZ2uLm5kZuby8mTJ4mM\njCQuLo6rV6+Sn5+vd3whBIcOHaJnz574+/uzZ8+efywYg2GfXSEE//73v/Hz86sUjNPT0+Xnv/zy\ni/yZ1mXXrl04OTlVCtRVkZ6ezquvvsr333//KAdjgzGOkB8AhkgfNRmp3EsK3t1qeU8iV65coUmT\nJnq3yEIIMjIyiIyMlEfS169fx93dXTZUatOmDXZ2dtXqxRqNhkuXLpGZmYmfn59eGTjot8pSKpUU\nFhYSERFBfHw8OTk55OTksG7dOtkn4kFgaCpl8+bNycnJobS0lDp16pCamoqjoyOxsbF07tyZevXq\n4eDgwMmTJwkMDJSDpFYSe/XVV4mKikKSJFxdXWXZTktUVBTDhw9n165ddOjQgcjISL2/65Kbm0uX\nLl2YOXMmgwcPfjAX5u/DKFk87DxILW/jxo3Mnj0btVpN8+bNcXNz09PyhBBMmjSJnTt3Ym1tzQ8/\n/CCPtu9ks/i4o9FoSExMJCIiAoVCwenTp1GpVJUM/s3NzYmLiyMvL69Sk9GaEELw+++/s2TJEurV\nq4darSYtLY0PP/zwgQUeQ1MpH2QqphCCdu3aMWfOHHnOJCIigg0bNlTaVqVS0bdvX5599lnefvvt\nu3uzDyeGd2u4i4eRvwlvb29x7do1IYQQ165dE97e3lVup1KpRK9evcSXX34prysrKxPu7u4iOTlZ\nlJSUCF9fX+Hh4aG3344dO0SfPn2ERqMRx48fF23btq1y31atWonY2NgH9C4fHYqKisTx48fFwoUL\nxcsvvywCAwOFp6en8PLyEqtWrRIxMTEiLy9PFBQU1Pi4fv26GD16tOjZs6e4ePGifHyNRiNKSkoe\n2Pkb+nlycXERN27cuOf9a2LFihViyJAh8nJZWZlo06aNOHDgQKVt169fL8zMzETr1q3lx5kzZ+76\nNR8iDIqxxhHyQ4ohKXhCCMLDw3F0dGTRokXy+uPHj/Pee+/JdqL9+/fnypUrREdHy9uMHTuWLl26\n8NJLLwH/m8xJTU29qyaVTyL5+fl069aN/v37ExISwsmTJzlx4gSXLl2iefPmelWG9erVQ5IkhBAc\nOHCAmTNnMn78eF5//fW/VRM1NJ3NmIr5wHiy/JAfN2bMmMGQIUNYvXq1rEMDejr00aNHWb9+PYGB\ngXIX4Xnz5lFYWEhaWhqBgYFIkoSpqSkhISF6xzdaJd47tra2bN++Xa7QHDBgAPA/w6OIiAj279/P\nggULyMvLw9vbm8zMTKysrNi+fTstWrR4IOdVkwSmiyRJ1erhhvS0q2l/I/eHMSA/pNSvX599+/ZV\nWt+0aVM5H7pDhw5VZgRs3bqVLl268N133wGwfv36ew6q0dHRbNiwgT/++KNKPXnDhg189tlncmXb\nsmXLaN26NVC9V8fjQOMq+uKZmJjg7u6Ou7s7L7/8MlCeHhYdHc327dv58MMPH+iouCY3xMaNG8uF\nS+np6bJr3u1oXdgaNWrEoEGDUCgUdOrUyeD975Zz587x6quv6q2rU6fOEzsIePzzSJ5A7scqUXe9\nWq1mzZo1vP7665w/f56NGzdy/vx5veO4ublx8OBBzp07xwcffFCpbfv+/fuJiop6rILx3WBubk5I\nSAizZ89+YME4Ozubnj174uXlRc+ePauc/O3QoQPBwcEEBQURHBzMxYsXZZlr9uzZODs7ExgYSGBg\nIDt37qwyFfNO6XD3QmBgIFFRUXqPJzUYA8ZJvceR0tJS4ebmJlJSUuSJuZiYGL1tfv/9d71JvbCw\nsEr7Hjx4UNja2sr7zps3T8ybN6/a183OzhZNmzaVl6ubIDJSu0ydOlXMnz9fCCHE/PnzxbRp0ypt\nc/PmTdGtWzfh6ekpunXrJho2bChSU1PF1atXhaenp1iwYIFITk4WrVq1Eq1atRL+/v5i7ty5Ve7f\nvXt3kZWV9be9v8cEg2KsMSA/puzYsUN4eXkJd3d3+Yu1bNkysWzZMiFE+az+uHHjhLu7u2jZsqU4\nceJEpX0bNWokQkJC5PXr1q0T48ePr/Y1FyxYIP7973/Ly66urqJ169YiODhYrFixorbfopEK7jYD\nYvfu3aJdu3by8kcffSQWLFjwQM/RiDEgG7lPtmzZohdgawrIf/31l/D19RU3b96U16WlpQkhhMjI\nyBBubm6iefPmwsPDQx7N6bJ//35Rt25dOcXp448/lv+2a9cu4e3tXe2+Tzr29vbyc41Go7dcFSNH\njhRLliyRlz/66CPRokULERgYKEaOHCmys7Mf2Lk+wRgDspH749ixY6JXr17ycnWSxdmzZ4W7u7tI\nSEio8jhlZWWiXr16YsaMGdXmNu/fv1/079+/yn2NedFCdO/eXQQEBFR6/Prrr5UCsIODQ7XHKSkp\nEfXr1xfXr1+X112/fl2UlZUJtVotZs6cKUaOHPnA3scTjEEx1phlYaRaDGnbc/nyZV544QXWr1+P\nt7e3vL6goEDuHXfw4EE0Gg2dO3fGwsKiyhby1WFI+/kngdrIoIByH4ng4GC9LBHd56NHj5bT+Iz8\n/RizLB5Brly5gpubG9nZ2QDk5OTg5uZGampqldv36dMHBweHu/6imZmZ8c0339C7d2/8/PwYMmQI\nAQEBLF++nOXLlwMwZ84csrKyGDduHEFBQYSGhgLlNowdOnSgdevWhIeH4+npSZ8+fYD/5TzfzrFj\nx2jVqhV9+/YlNjYWqD5f2sj/uJsMiI0bN8rFQFoMMQQy8jdh6FBaGCWLh4rPPvtMjB49WgghxJgx\nY2rMfti7d6/47bffqpQE/g4M0aKVSqXIy8sTQpRPKnp6ela575QpU4SDg0O1evLnn38u69ABAQHC\nxMREzghwcXERLVu2FK1bt9abrHwY2bx5s/D39xeSJOlNuN7Orl27hIeHh7CyshL169eXMyCysrJE\nx44dhbW1tejRo4fIzs4W+fn5wtHRUdy6dUvvGK+88opo2bKlCAwMFM8++6w8QWikVjFqyI8zKpVK\nBAYGioULFwp/f3+hUqlq3L46jfbvwFAtWhdtypzuvmVlZcLR0VFMmTLFID35t99+E127dq10zEeB\n8+fPi/j4eNG5c+dqA3JN+rohqXBG/lYMirFGyeIRxdzcnAULFjB58mQWLVpkUNeFfwpdLVqlUrFp\n0yYGDhyot83169flqkOFQoFGo6F+/fp6+x49ehSVSsVrr72mp0VXR1W3548Kfn5+d+ybp6uv3349\ntm3bRnh4OADh4eFV+mkbefgwBuRHmLsx+v4nMUSL3rp1Ky1btqR169ZMnDiRTZs2IUmS3r4vvvgi\nPj4+BAQEADXryYWFhfzxxx963ZklSaJHjx6EhISwcuXKB//GHzA16esZGRmyz3CTJk3IyMj4R87R\nyN1hzLJ4RImKimLPnj1ERETQoUMHhg0bVq3R98NAv379KvX0e+ONN+TnEyZMYMKECTXuu3XrVv74\n4w+DXm/79u20b99er7GornGOr68v06dPx9nZucofNCEevF90TWZAtVWaDEYzoEcKQ7UN4+PheVBu\n5Xcc6Fmx/Baw4Q77dAF+/6fP/T7f9zPAbp3l94D3qtn2F+DlGo71PbAQiKnm7/2AXRXX+mkgsmK9\nKZAMuAMWwFnA/wG+5wNA6N1eDyABcKp47gQk/NP/P+Pjzg+jZPFoMhq4LITYU7G8FPCTJKlzVRtL\nknQY2AJ0lyQpTZKk3n/TedY2JwAvSZLcJEmyAIYBv92+kSRJ9kBnYJvOOhtJkuy0zwEfIKqG13oO\nWCfKiQAcJElyAtoCF4QQKUIIFbCpYtt/gpqux29AeMXzcHSuhZGHF2NAfgQRQqwUQgzVWVYLIYKF\nEAer2b6jEKKhEMJKCNFMCLH77zvb2kMIUQZMAHYDccBmIUSsJElvSJL0hs6mg4A/hRAFOusaA0ck\nSToLKIAdQJXXqwJn4IrOclrFuurW1yqSJA2SJCmN8lHwDkmSdlesbypJ0k6o/npUHOJToKckSUlA\nj4plIw85Rg3ZyCOFEGInsPO2dctvW/4B+OG2dSlAa911kiS5PoBTrBWEEL9QLrvcvv4a5XKKdrnS\n9ahYnwV0f5DnaKT2MQbkxwRJkgKB9betLhFCPPVPnM9jwFWguc5ys4p15tWsN2LkvjEG5McEIcQ5\nIOifPo/HiN+ACZIkbQKeApRCiHRJkm5QodtSHoiHAS//g+dp5DHCGJCNPJFIkrSR8syTBhVa7UeU\nj361EshOyqWBC0AhMLLib2WSJGl1W1NgjY5ua8TIfXG3XaeNGDFixMgDwphlYcSIESMPCcaAbMSI\nESMPCcaAbMSIESMPCf8P7Xo/8FcXObEAAAAASUVORK5CYII=\n", 367 | "text/plain": [ 368 | "" 369 | ] 370 | }, 371 | "metadata": {}, 372 | "output_type": "display_data" 373 | } 374 | ], 375 | "source": [ 376 | "plot_results()" 377 | ] 378 | }, 379 | { 380 | "cell_type": "markdown", 381 | "metadata": {}, 382 | "source": [ 383 | "### Fitting Linear regression and Neural Network to Non-linear data" 384 | ] 385 | }, 386 | { 387 | "cell_type": "code", 388 | "execution_count": 10, 389 | "metadata": {}, 390 | "outputs": [ 391 | { 392 | "data": { 393 | "text/plain": [ 394 | "((7500, 3), (7500, 1))" 395 | ] 396 | }, 397 | "execution_count": 10, 398 | "metadata": {}, 399 | "output_type": "execute_result" 400 | } 401 | ], 402 | "source": [ 403 | "def generate_data(n_points=10000, n_features=3, use_nonlinear=True, \n", 404 | " noise_std=0.1, train_test_split = 4):\n", 405 | " \"\"\"\n", 406 | " Arguments:\n", 407 | " n_points - number of data points to generate\n", 408 | " n_features - a positive integer - number of features\n", 409 | " use_nonlinear - if True, generate non-linear data\n", 410 | " train_test_split - an integer - what portion of data to use for testing\n", 411 | " \n", 412 | " Return:\n", 413 | " X_train, Y_train, X_test, Y_test, n_train, n_features\n", 414 | " \"\"\"\n", 415 | " # Linear data or non-linear data?\n", 416 | " if use_nonlinear:\n", 417 | " weights = np.array([[1.0, 0.5, 0.2],[0.5, 0.3, 0.15]], dtype=np.float32)\n", 418 | " else:\n", 419 | " weights = np.array([1.0, 0.5, 0.2], dtype=np.float32)\n", 420 | " \n", 421 | " np.random.seed(42)\n", 422 | " bias = np.ones(n_points).reshape((-1,1))\n", 423 | " low = - np.ones((n_points,n_features), dtype=np.float32)\n", 424 | " high = np.ones((n_points,n_features), dtype=np.float32)\n", 425 | " \n", 426 | " X = np.random.uniform(low=low, high=high)\n", 427 | " noise = np.random.normal(size=(n_points, 1))\n", 428 | " noise_std = 0.1\n", 429 | " \n", 430 | " if use_nonlinear:\n", 431 | " Y = (weights[0,0] * bias + np.dot(X, weights[0, :]).reshape((-1,1)) + \n", 432 | " np.dot(X*X, weights[1, :]).reshape([-1,1]) +\n", 433 | " noise_std * noise)\n", 434 | " else:\n", 435 | " Y = (weights[0] * bias + np.dot(X, weights[:]).reshape((-1,1)) + \n", 436 | " noise_std * noise)\n", 437 | " \n", 438 | " n_test = int(n_points/train_test_split)\n", 439 | " n_train = n_points - n_test\n", 440 | " \n", 441 | " X_train = X[:n_train,:]\n", 442 | " Y_train = Y[:n_train].reshape((-1,1))\n", 443 | "\n", 444 | " X_test = X[n_train:,:]\n", 445 | " Y_test = Y[n_train:].reshape((-1,1))\n", 446 | " \n", 447 | " return X_train, Y_train, X_test, Y_test, n_train, n_features\n", 448 | "\n", 449 | "X_train, Y_train, X_test, Y_test, n_train, n_features = generate_data(use_nonlinear=False)\n", 450 | "X_train.shape, Y_train.shape" 451 | ] 452 | }, 453 | { 454 | "cell_type": "code", 455 | "execution_count": 11, 456 | "metadata": {}, 457 | "outputs": [ 458 | { 459 | "data": { 460 | "text/plain": [ 461 | "((7500, 3), (7500, 1))" 462 | ] 463 | }, 464 | "execution_count": 11, 465 | "metadata": {}, 466 | "output_type": "execute_result" 467 | } 468 | ], 469 | "source": [ 470 | "np.random.seed(42)\n", 471 | "X_train, Y_train, X_test, Y_test, n_train, n_features = generate_data(use_nonlinear=True)\n", 472 | "X_train.shape, Y_train.shape" 473 | ] 474 | }, 475 | { 476 | "cell_type": "markdown", 477 | "metadata": {}, 478 | "source": [ 479 | "**Instructions**\n", 480 | "Implement sklearn_lin_regress() function which returns a tuple of\n", 481 | "\n", 482 | "- coefficients of linear regression\n", 483 | "- an instance of LinearRegression class trained to X_train, Y_train\n" 484 | ] 485 | }, 486 | { 487 | "cell_type": "code", 488 | "execution_count": 12, 489 | "metadata": { 490 | "collapsed": true 491 | }, 492 | "outputs": [], 493 | "source": [ 494 | "# GRADED FUNCTION: sklearn_lin_regress\n", 495 | "def sklearn_lin_regress(X_train, Y_train):\n", 496 | " \"\"\"\n", 497 | " Arguments:\n", 498 | " X_train - np.array of size (n by k) where n is number of observations \n", 499 | " of independent variables and k is number of variables\n", 500 | " Y_train - np.array of size (n by 1) where n is the number of observations of dependend variable\n", 501 | " \n", 502 | " Return: a tuple of \n", 503 | " - np.array of size (k+1 by 1) of regression coefficients\n", 504 | " - an instance of LinearRegression\n", 505 | " \"\"\"\n", 506 | " from sklearn.linear_model import LinearRegression\n", 507 | " lr_model = None\n", 508 | " theta_sklearn = np.array([], dtype=np.float32)\n", 509 | " ### START CODE HERE ### (≈ 2-3 lines of code)\n", 510 | " X_train = np.hstack((np.ones((X_train.shape[0], 1)), X_train))\n", 511 | " lr_model = LinearRegression(fit_intercept=False)\n", 512 | " lr_model.fit(X_train, Y_train)\n", 513 | " theta_sklearn = lr_model.coef_\n", 514 | " ### END CODE HERE ###\n", 515 | " return theta_sklearn, lr_model\n", 516 | "\n", 517 | "X_test = np.hstack((np.ones((X_test.shape[0], 1)), X_test))" 518 | ] 519 | }, 520 | { 521 | "cell_type": "code", 522 | "execution_count": 13, 523 | "metadata": {}, 524 | "outputs": [ 525 | { 526 | "name": "stdout", 527 | "output_type": "stream", 528 | "text": [ 529 | "Submission successful, please check on the coursera grader page for the status\n" 530 | ] 531 | }, 532 | { 533 | "data": { 534 | "text/plain": [ 535 | "array([ 1.31552735, 1.00221739, 0.50122384, 0.19928303])" 536 | ] 537 | }, 538 | "execution_count": 13, 539 | "metadata": {}, 540 | "output_type": "execute_result" 541 | } 542 | ], 543 | "source": [ 544 | "# you can make submission with answers so far to check yourself at this stage\n", 545 | "### GRADED PART (DO NOT EDIT) ###\n", 546 | "theta_sklearn, lr_model = sklearn_lin_regress(X_train, Y_train)\n", 547 | "\n", 548 | "part_3 = list(theta_sklearn.squeeze())\n", 549 | "try:\n", 550 | " part3 = \" \".join(map(repr, part_3))\n", 551 | "except TypeError:\n", 552 | " part3 = repr(part_3)\n", 553 | " \n", 554 | "submissions[all_parts[2]]=part3\n", 555 | "grading.submit(COURSERA_EMAIL, COURSERA_TOKEN, assignment_key, all_parts[:3],all_parts,submissions)\n", 556 | "\n", 557 | "theta_sklearn.squeeze()\n", 558 | "### GRADED PART (DO NOT EDIT) ###" 559 | ] 560 | }, 561 | { 562 | "cell_type": "markdown", 563 | "metadata": {}, 564 | "source": [ 565 | "LinearRegression.score() computes $R^2$ coefficient. The coefficient $R^2$ is defined as $(1 - \\frac{u}{v})$, where u is the residual sum of squares $\\sum (y\\_true - y\\_pred)^2$ and v is the total sum of squares $\\sum (y\\_true - \\bar{y\\_true})^2$" 566 | ] 567 | }, 568 | { 569 | "cell_type": "code", 570 | "execution_count": 14, 571 | "metadata": {}, 572 | "outputs": [ 573 | { 574 | "name": "stdout", 575 | "output_type": "stream", 576 | "text": [ 577 | "Submission successful, please check on the coursera grader page for the status\n" 578 | ] 579 | }, 580 | { 581 | "data": { 582 | "text/plain": [ 583 | "0.9065452090081394" 584 | ] 585 | }, 586 | "execution_count": 14, 587 | "metadata": {}, 588 | "output_type": "execute_result" 589 | } 590 | ], 591 | "source": [ 592 | "# you can make submission with answers so far to check yourself at this stage\n", 593 | "### GRADED PART (DO NOT EDIT) ###\n", 594 | "# calculate Linear Regression score\n", 595 | "model_score = 0.\n", 596 | "if lr_model is not None:\n", 597 | " model_score = lr_model.score(X_test, Y_test)\n", 598 | "part4=str(model_score)\n", 599 | "submissions[all_parts[3]]=part4\n", 600 | "grading.submit(COURSERA_EMAIL, COURSERA_TOKEN, assignment_key, all_parts[:4],all_parts,submissions)\n", 601 | "model_score\n", 602 | "### GRADED PART (DO NOT EDIT) ###" 603 | ] 604 | }, 605 | { 606 | "cell_type": "markdown", 607 | "metadata": {}, 608 | "source": [ 609 | "### Neural Network with Tensorflow \n", 610 | "\n", 611 | "**Instructions**\n", 612 | "\n", 613 | "Construct two-layer Neural Network utilizing neuron_layer() function. The number of nodes in two hidden layers are defined by n_hidden1 and n_hidden2, respectively. Use Gradient Descent Optimizer.\n", 614 | "\n", 615 | "The train the network using X_train / y_train and compute accuracy of the prediction using X_test data set." 616 | ] 617 | }, 618 | { 619 | "cell_type": "code", 620 | "execution_count": 15, 621 | "metadata": { 622 | "collapsed": true 623 | }, 624 | "outputs": [], 625 | "source": [ 626 | "def random_batch(X_train, y_train, batch_size):\n", 627 | " np.random.seed(42)\n", 628 | " rnd_indices = np.random.randint(0, len(X_train), batch_size)\n", 629 | " X_batch = X_train[rnd_indices]\n", 630 | " y_batch = y_train[rnd_indices]\n", 631 | " return X_batch, y_batch\n", 632 | " \n", 633 | "def neuron_layer(X, n_neurons, name, activation_fn=None):\n", 634 | " with tf.name_scope(name):\n", 635 | " n_inputs = int(X.get_shape()[1])\n", 636 | " stddev = 2 / np.sqrt(n_inputs)\n", 637 | " init = tf.truncated_normal((n_inputs, n_neurons), stddev=stddev)\n", 638 | " W = tf.Variable(init, name=\"kernel\")\n", 639 | " b = tf.Variable(tf.zeros([n_neurons]), name=\"bias\")\n", 640 | " Z = tf.matmul(X, W) + b\n", 641 | " if activation_fn is not None:\n", 642 | " return activation_fn(Z)\n", 643 | " else:\n", 644 | " return Z" 645 | ] 646 | }, 647 | { 648 | "cell_type": "code", 649 | "execution_count": 16, 650 | "metadata": { 651 | "collapsed": true 652 | }, 653 | "outputs": [], 654 | "source": [ 655 | "np.random.seed(42)\n", 656 | "X_train, Y_train, X_test, Y_test, n_train, n_features = generate_data(use_nonlinear=True)\n", 657 | "\n", 658 | "n_hidden1 = 100\n", 659 | "n_hidden2 = 120\n", 660 | "n_outputs = 1 # single value prediction\n", 661 | "n_inputs = X_test.shape[1]\n", 662 | "\n", 663 | "reset_graph()\n", 664 | "X = tf.placeholder(tf.float32, shape=(None, n_inputs), name=\"X\")\n", 665 | "y = tf.placeholder(tf.float32, shape=(None), name=\"y\")\n", 666 | "\n", 667 | "### START CODE HERE ### (≈ 10-15 lines of code)\n", 668 | "layer_1 = neuron_layer(X, n_hidden1, \"layer_1\", tf.nn.relu)\n", 669 | "layer_2 = neuron_layer(layer_1, n_hidden2, \"layer_2\", tf.nn.relu)\n", 670 | "output = neuron_layer(layer_2, n_outputs, \"layer_output\")\n", 671 | "loss = tf.reduce_mean(tf.square(output - y))\n", 672 | "### END CODE HERE ###\n", 673 | "\n", 674 | "init = tf.global_variables_initializer()" 675 | ] 676 | }, 677 | { 678 | "cell_type": "code", 679 | "execution_count": 17, 680 | "metadata": {}, 681 | "outputs": [], 682 | "source": [ 683 | "learning_rate = 0.01\n", 684 | "n_epochs = 200\n", 685 | "batch_size = 60\n", 686 | "num_rec = X_train.shape[0]\n", 687 | "n_batches = int(np.ceil(num_rec / batch_size))\n", 688 | "acc_test = 0. # assign the result of accuracy testing to this variable\n", 689 | "\n", 690 | "### START CODE HERE ### (≈ 9-10 lines of code)\n", 691 | "train = tf.train.GradientDescentOptimizer(learning_rate).minimize(loss)\n", 692 | "with tf.Session() as sess:\n", 693 | " sess.run(init)\n", 694 | " for i in range(n_epochs):\n", 695 | " X_batch, y_batch = random_batch(X_train, Y_train, batch_size)\n", 696 | " _, c = sess.run([loss, train], feed_dict={X: X_batch, y: y_batch})\n", 697 | " acc_test = sess.run(loss, feed_dict={X: X_test, y: Y_test})\n", 698 | "### END CODE HERE ###" 699 | ] 700 | }, 701 | { 702 | "cell_type": "code", 703 | "execution_count": 18, 704 | "metadata": {}, 705 | "outputs": [ 706 | { 707 | "name": "stdout", 708 | "output_type": "stream", 709 | "text": [ 710 | "Submission successful, please check on the coursera grader page for the status\n" 711 | ] 712 | }, 713 | { 714 | "data": { 715 | "text/plain": [ 716 | "0.041695457" 717 | ] 718 | }, 719 | "execution_count": 18, 720 | "metadata": {}, 721 | "output_type": "execute_result" 722 | } 723 | ], 724 | "source": [ 725 | "### GRADED PART (DO NOT EDIT) ###\n", 726 | "part5=str(acc_test)\n", 727 | "submissions[all_parts[4]]=part5\n", 728 | "grading.submit(COURSERA_EMAIL, COURSERA_TOKEN, assignment_key, all_parts[:5],all_parts,submissions)\n", 729 | "acc_test\n", 730 | "### GRADED PART (DO NOT EDIT) ###" 731 | ] 732 | }, 733 | { 734 | "cell_type": "code", 735 | "execution_count": null, 736 | "metadata": { 737 | "collapsed": true 738 | }, 739 | "outputs": [], 740 | "source": [] 741 | } 742 | ], 743 | "metadata": { 744 | "coursera": { 745 | "course_slug": "guided-tour-machine-learning-finance" 746 | }, 747 | "kernelspec": { 748 | "display_name": "Python 3", 749 | "language": "python", 750 | "name": "python3" 751 | }, 752 | "language_info": { 753 | "codemirror_mode": { 754 | "name": "ipython", 755 | "version": 3 756 | }, 757 | "file_extension": ".py", 758 | "mimetype": "text/x-python", 759 | "name": "python", 760 | "nbconvert_exporter": "python", 761 | "pygments_lexer": "ipython3", 762 | "version": "3.6.0" 763 | } 764 | }, 765 | "nbformat": 4, 766 | "nbformat_minor": 2 767 | } 768 | -------------------------------------------------------------------------------- /discrete_black_scholes_m3_ex1_v3.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## Discrete-Time Black Scholes\n", 8 | "Welcome to your 1st assignment in Reinforcement Learning in Finance. This exercise will introduce Black-Scholes model as viewed through the lens of pricing an option as discrete-time replicating portfolio of stock and bond.\n", 9 | "\n", 10 | "**Instructions:**\n", 11 | "- You will be using Python 3.\n", 12 | "- Avoid using for-loops and while-loops, unless you are explicitly told to do so.\n", 13 | "- Do not modify the (# GRADED FUNCTION [function name]) comment in some cells. Your work would not be graded if you change this. Each cell containing that comment should only contain one function.\n", 14 | "- After coding your function, run the cell right below it to check if your result is correct.\n", 15 | "\n", 16 | "\n", 17 | "Let's get started!" 18 | ] 19 | }, 20 | { 21 | "cell_type": "markdown", 22 | "metadata": {}, 23 | "source": [ 24 | "## About iPython Notebooks ##\n", 25 | "\n", 26 | "iPython Notebooks are interactive coding environments embedded in a webpage. You will be using iPython notebooks in this class. You only need to write code between the ### START CODE HERE ### and ### END CODE HERE ### comments. After writing your code, you can run the cell by either pressing \"SHIFT\"+\"ENTER\" or by clicking on \"Run Cell\" (denoted by a play symbol) in the upper bar of the notebook. \n", 27 | "\n", 28 | "We will often specify \"(≈ X lines of code)\" in the comments to tell you about how much code you need to write. It is just a rough estimate, so don't feel bad if your code is longer or shorter." 29 | ] 30 | }, 31 | { 32 | "cell_type": "code", 33 | "execution_count": 1, 34 | "metadata": { 35 | "collapsed": true 36 | }, 37 | "outputs": [], 38 | "source": [ 39 | "import numpy as np\n", 40 | "import pandas as pd\n", 41 | "import matplotlib.pyplot as plt\n", 42 | "%matplotlib inline\n", 43 | "\n", 44 | "from numpy.random import standard_normal, seed\n", 45 | "\n", 46 | "import scipy.stats as stats\n", 47 | "from scipy.stats import norm\n", 48 | "\n", 49 | "import sys\n", 50 | "\n", 51 | "sys.path.append(\"..\")\n", 52 | "import grading\n", 53 | "\n", 54 | "import datetime \n", 55 | "import time\n", 56 | "import bspline\n", 57 | "import bspline.splinelab as splinelab" 58 | ] 59 | }, 60 | { 61 | "cell_type": "code", 62 | "execution_count": 2, 63 | "metadata": { 64 | "collapsed": true 65 | }, 66 | "outputs": [], 67 | "source": [ 68 | "### ONLY FOR GRADING. DO NOT EDIT ###\n", 69 | "submissions=dict()\n", 70 | "assignment_key=\"J_L65CoiEeiwfQ53m1Mlug\" \n", 71 | "all_parts=[\"9jLRK\",\"YoMns\",\"Wc3NN\",\"fcl3r\"]\n", 72 | "### ONLY FOR GRADING. DO NOT EDIT ###" 73 | ] 74 | }, 75 | { 76 | "cell_type": "code", 77 | "execution_count": 3, 78 | "metadata": { 79 | "collapsed": true 80 | }, 81 | "outputs": [], 82 | "source": [ 83 | "COURSERA_TOKEN = '' # the key provided to the Student under his/her email on submission page\n", 84 | "COURSERA_EMAIL = '' # the email" 85 | ] 86 | }, 87 | { 88 | "cell_type": "code", 89 | "execution_count": 4, 90 | "metadata": { 91 | "collapsed": true 92 | }, 93 | "outputs": [], 94 | "source": [ 95 | "# The Black-Scholes prices\n", 96 | "def bs_put(t, S0, K, r, sigma, T):\n", 97 | " d1 = (np.log(S0/K) + (r + 1/2 * sigma**2) * (T-t)) / sigma / np.sqrt(T-t)\n", 98 | " d2 = (np.log(S0/K) + (r - 1/2 * sigma**2) * (T-t)) / sigma / np.sqrt(T-t)\n", 99 | " price = K * np.exp(-r * (T-t)) * norm.cdf(-d2) - S0 * norm.cdf(-d1)\n", 100 | " return price\n", 101 | "\n", 102 | "def bs_call(t, S0, K, r, sigma, T):\n", 103 | " d1 = (np.log(S0/K) + (r + 1/2 * sigma**2) * (T-t)) / sigma / np.sqrt(T-t)\n", 104 | " d2 = (np.log(S0/K) + (r - 1/2 * sigma**2) * (T-t)) / sigma / np.sqrt(T-t)\n", 105 | " price = S0 * norm.cdf(d1) - K * np.exp(-r * (T-t)) * norm.cdf(d2)\n", 106 | " return price\n", 107 | "\n", 108 | "def d1(S0, K, r, sigma, T):\n", 109 | " return (np.log(S0/K) + (r + sigma**2 / 2) * T)/(sigma * np.sqrt(T))\n", 110 | " \n", 111 | "def d2(S0, K, r, sigma, T):\n", 112 | " return (np.log(S0 / K) + (r - sigma**2 / 2) * T) / (sigma * np.sqrt(T))\n", 113 | " " 114 | ] 115 | }, 116 | { 117 | "cell_type": "markdown", 118 | "metadata": {}, 119 | "source": [ 120 | "Simulate $N_{MC}$ stock price sample paths with $T$ steps by the classical Black-Sholes formula.\n", 121 | "\n", 122 | "$$dS_t=\\mu S_tdt+\\sigma S_tdW_t\\quad\\quad S_{t+1}=S_te^{\\left(\\mu-\\frac{1}{2}\\sigma^2\\right)\\Delta t+\\sigma\\sqrt{\\Delta t}Z}$$\n", 123 | "\n", 124 | "where $Z$ is a standard normal random variable.\n", 125 | "\n", 126 | "MC paths are simulated by GeneratePaths() method of DiscreteBlackScholes class." 127 | ] 128 | }, 129 | { 130 | "cell_type": "markdown", 131 | "metadata": {}, 132 | "source": [ 133 | "### Part 1\n", 134 | "\n", 135 | "\n", 136 | "Class DiscreteBlackScholes implements the above calculations with class variables to math symbols mapping of:\n", 137 | "\n", 138 | "$$\\Delta S_t=S_{t+1} - e^{-r\\Delta t} S_t\\space \\quad t=T-1,...,0$$\n", 139 | " \n", 140 | "**Instructions:**\n", 141 | "Some portions of code in DiscreteBlackScholes have bee taken out. You are to implement the missing portions of code in DiscreteBlackScholes class.\n", 142 | "\n", 143 | "$$\\Pi_t=e^{-r\\Delta t}\\left[\\Pi_{t+1}-u_t \\Delta S_t\\right]\\quad t=T-1,...,0$$\n", 144 | "\n", 145 | "- implement DiscreteBlackScholes.function_A_vec() method\n", 146 | "$$A_{nm}^{\\left(t\\right)}=\\sum_{k=1}^{N_{MC}}{\\Phi_n\\left(X_t^k\\right)\\Phi_m\\left(X_t^k\\right)\\left(\\Delta\\hat{S}_t^k\\right)^2}\\quad\\quad$$ \n", 147 | "\n", 148 | "- implement DiscreteBlackScholes.function_B_vec() method\n", 149 | "$$B_n^{\\left(t\\right)}=\\sum_{k=1}^{N_{MC}}{\\Phi_n\\left(X_t^k\\right)\\left[\\hat\\Pi_{t+1}^k\\Delta\\hat{S}_t^k+\\frac{1}{2\\gamma\\lambda}\\Delta S_t^k\\right]}$$\n", 150 | "- implement DiscreteBlackScholes.gen_paths() method using the following relation:\n", 151 | "$$S_{t+1}=S_te^{\\left(\\mu-\\frac{1}{2}\\sigma^2\\right)\\Delta t+\\sigma\\sqrt{\\Delta t}Z}$$\n", 152 | "where $Z \\sim N(0,1)$\n", 153 | "- implement parts of DiscreteBlackScholes.roll_backward()\n", 154 | " - DiscreteBlackScholes.bVals corresponds to $B_t$ and is computed as $$B_t = e^{-r\\Delta t}\\left[B_{t+1} + (u_{t+1} - u_t)S_{t+1}\\right]\\quad t=T-1,...,0$$\n", 155 | " \n", 156 | "DiscreteBlackScholes.opt_hedge corresponds to $\\phi_t$ and is computed as \n", 157 | " $$\\phi_t=\\mathbf A_t^{-1}\\mathbf B_t$$" 158 | ] 159 | }, 160 | { 161 | "cell_type": "code", 162 | "execution_count": 14, 163 | "metadata": { 164 | "collapsed": true 165 | }, 166 | "outputs": [], 167 | "source": [ 168 | "class DiscreteBlackScholes:\n", 169 | " \"\"\"\n", 170 | " Class implementing discrete Black Scholes\n", 171 | " DiscreteBlackScholes is class for pricing and hedging under\n", 172 | " the real-world measure for a one-dimensional Black-Scholes setting\n", 173 | " \"\"\"\n", 174 | "\n", 175 | " def __init__(self,\n", 176 | " s0,\n", 177 | " strike,\n", 178 | " vol,\n", 179 | " T,\n", 180 | " r,\n", 181 | " mu,\n", 182 | " numSteps,\n", 183 | " numPaths):\n", 184 | " \"\"\"\n", 185 | " :param s0: initial price of the underlying\n", 186 | " :param strike: option strike\n", 187 | " :param vol: volatility\n", 188 | " :param T: time to maturity, in years\n", 189 | " :param r: risk-free rate,\n", 190 | " :param mu: real drift, asset drift\n", 191 | " :param numSteps: number of time steps\n", 192 | " :param numPaths: number of Monte Carlo paths\n", 193 | " \"\"\"\n", 194 | " self.s0 = s0\n", 195 | " self.strike = strike\n", 196 | " self.vol = vol\n", 197 | " self.T = T\n", 198 | " self.r = r\n", 199 | " self.mu = mu\n", 200 | " self.numSteps = numSteps\n", 201 | " self.numPaths = numPaths\n", 202 | "\n", 203 | " self.dt = self.T / self.numSteps # time step\n", 204 | " self.gamma = np.exp(-r * self.dt) # discount factor for one time step, i.e. gamma in the QLBS paper\n", 205 | "\n", 206 | " self.sVals = np.zeros((self.numPaths, self.numSteps + 1), 'float') # matrix of stock values\n", 207 | "\n", 208 | " # initialize half of the paths with stock price values ranging from 0.5 to 1.5 of s0\n", 209 | " # the other half of the paths start with s0\n", 210 | " half_paths = int(numPaths / 2)\n", 211 | "\n", 212 | " if False:\n", 213 | " # Grau (2010) \"Applications of Least-Squares Regressions to Pricing and Hedging of Financial Derivatives\"\n", 214 | " self.sVals[:, 0] = (np.hstack((np.linspace(0.5 * s0, 1.5 * s0, half_paths),\n", 215 | " s0 * np.ones(half_paths, 'float')))).T\n", 216 | "\n", 217 | " self.sVals[:, 0] = s0 * np.ones(numPaths, 'float')\n", 218 | " self.optionVals = np.zeros((self.numPaths, self.numSteps + 1), 'float') # matrix of option values\n", 219 | " self.intrinsicVals = np.zeros((self.numPaths, self.numSteps + 1), 'float')\n", 220 | "\n", 221 | " self.bVals = np.zeros((self.numPaths, self.numSteps + 1), 'float') # matrix of cash position values\n", 222 | " self.opt_hedge = np.zeros((self.numPaths, self.numSteps + 1),\n", 223 | " 'float') # matrix of optimal hedges calculated from cross-sectional information F_t\n", 224 | " self.X = None\n", 225 | " self.data = None # matrix of features, i.e. self.X as sum of basis functions\n", 226 | " self.delta_S_hat = None\n", 227 | "\n", 228 | " # coef = 1.0/(2 * gamma * risk_lambda)\n", 229 | " # override it by zero to have pure risk hedge\n", 230 | " self.coef = 0.\n", 231 | "\n", 232 | " def gen_paths(self):\n", 233 | " \"\"\"\n", 234 | " A simplest path generator\n", 235 | " \"\"\"\n", 236 | " np.random.seed(42)\n", 237 | " # Spline basis of order p on knots k\n", 238 | "\n", 239 | " ### START CODE HERE ### (≈ 3-4 lines of code)\n", 240 | " # self.sVals = your code goes here ...\n", 241 | " # for-loop or while loop is allowed heres\n", 242 | " \n", 243 | " Z = np.random.normal(0, 1, size=(self.numSteps + 1, self.numPaths)).T\n", 244 | " for t in range(0, self.numSteps):\n", 245 | " self.sVals[:, t + 1] = self.sVals[:, t] * np.exp((self.mu - 0.5 * self.vol**2) * self.dt + (self.vol * np.sqrt(self.dt) * Z[:, t + 1]))\n", 246 | " \n", 247 | " print(self.sVals)\n", 248 | " ### END CODE HERE ###\n", 249 | "\n", 250 | " # like in QLBS\n", 251 | " delta_S = self.sVals[:, 1:] - np.exp(self.r * self.dt) * self.sVals[:, :self.numSteps]\n", 252 | " self.delta_S_hat = np.apply_along_axis(lambda x: x - np.mean(x), axis=0, arr=delta_S)\n", 253 | "\n", 254 | " # state variable\n", 255 | " # delta_t here is due to their conventions\n", 256 | " self.X = - (self.mu - 0.5 * self.vol ** 2) * np.arange(self.numSteps + 1) * self.dt + np.log(self.sVals)\n", 257 | "\n", 258 | " X_min = np.min(np.min(self.X))\n", 259 | " X_max = np.max(np.max(self.X))\n", 260 | "\n", 261 | " print('X.shape = ', self.X.shape)\n", 262 | " print('X_min, X_max = ', X_min, X_max)\n", 263 | "\n", 264 | " p = 4 # order of spline (as-is; 3 = cubic, 4: B-spline?)\n", 265 | " ncolloc = 12\n", 266 | " tau = np.linspace(X_min, X_max, ncolloc) # These are the sites to which we would like to interpolate\n", 267 | "\n", 268 | " # k is a knot vector that adds endpoints repeats as appropriate for a spline of order p\n", 269 | " # To get meaningful results, one should have ncolloc >= p+1\n", 270 | " k = splinelab.aptknt(tau, p)\n", 271 | " basis = bspline.Bspline(k, p)\n", 272 | "\n", 273 | " num_basis = ncolloc # len(k) #\n", 274 | " self.data = np.zeros((self.numSteps + 1, self.numPaths, num_basis))\n", 275 | "\n", 276 | " print('num_basis = ', num_basis)\n", 277 | " print('dim self.data = ', self.data.shape)\n", 278 | "\n", 279 | " # fill it, expand function in finite dimensional space\n", 280 | " # in neural network the basis is the neural network itself\n", 281 | " t_0 = time.time()\n", 282 | " for ix in np.arange(self.numSteps + 1):\n", 283 | " x = self.X[:, ix]\n", 284 | " self.data[ix, :, :] = np.array([basis(el) for el in x])\n", 285 | " t_end = time.time()\n", 286 | " print('\\nTime Cost of basis expansion:', t_end - t_0, 'seconds')\n", 287 | "\n", 288 | " def function_A_vec(self, t, reg_param=1e-3):\n", 289 | " \"\"\"\n", 290 | " function_A_vec - compute the matrix A_{nm} from Eq. (52) (with a regularization!)\n", 291 | " Eq. (52) in QLBS Q-Learner in the Black-Scholes-Merton article\n", 292 | "\n", 293 | " Arguments:\n", 294 | " t - time index, a scalar, an index into time axis of data_mat\n", 295 | " reg_param - a scalar, regularization parameter\n", 296 | "\n", 297 | " Return:\n", 298 | " - np.array, i.e. matrix A_{nm} of dimension num_basis x num_basis\n", 299 | " \"\"\"\n", 300 | " X_mat = self.data[t, :, :]\n", 301 | " num_basis_funcs = X_mat.shape[1]\n", 302 | " this_dS = self.delta_S_hat[:, t]\n", 303 | " hat_dS2 = (this_dS ** 2).reshape(-1, 1)\n", 304 | " A_mat = np.dot(X_mat.T, X_mat * hat_dS2) + reg_param * np.eye(num_basis_funcs)\n", 305 | " return A_mat\n", 306 | "\n", 307 | " def function_B_vec(self, t, Pi_hat):\n", 308 | " \"\"\"\n", 309 | " function_B_vec - compute vector B_{n} from Eq. (52) QLBS Q-Learner in the Black-Scholes-Merton article\n", 310 | "\n", 311 | " Arguments:\n", 312 | " t - time index, a scalar, an index into time axis of delta_S_hat\n", 313 | " Pi_hat - pandas.DataFrame of dimension N_MC x T of portfolio values\n", 314 | " Return:\n", 315 | " B_vec - np.array() of dimension num_basis x 1\n", 316 | " \"\"\"\n", 317 | " tmp = Pi_hat * self.delta_S_hat[:, t] + self.coef * (np.exp((self.mu - self.r) * self.dt)) * self.sVals[:, t]\n", 318 | " X_mat = self.data[t, :, :] # matrix of dimension N_MC x num_basis\n", 319 | "\n", 320 | " B_vec = np.dot(X_mat.T, tmp)\n", 321 | " return B_vec\n", 322 | "\n", 323 | " def seed_intrinsic(self, strike=None, cp='P'):\n", 324 | " \"\"\"\n", 325 | " initilaize option value and intrinsic value for each node\n", 326 | " \"\"\"\n", 327 | " if strike is not None:\n", 328 | " self.strike = strike\n", 329 | "\n", 330 | " if cp == 'P':\n", 331 | " # payoff function at maturity T: max(K - S(T),0) for all paths\n", 332 | " self.optionVals = np.maximum(self.strike - self.sVals[:, -1], 0).copy()\n", 333 | " # payoff function for all paths, at all time slices\n", 334 | " self.intrinsicVals = np.maximum(self.strike - self.sVals, 0).copy()\n", 335 | " elif cp == 'C':\n", 336 | " # payoff function at maturity T: max(S(T) -K,0) for all paths\n", 337 | " self.optionVals = np.maximum(self.sVals[:, -1] - self.strike, 0).copy()\n", 338 | " # payoff function for all paths, at all time slices\n", 339 | " self.intrinsicVals = np.maximum(self.sVals - self.strike, 0).copy()\n", 340 | " else:\n", 341 | " raise Exception('Invalid parameter: %s'% cp)\n", 342 | "\n", 343 | " self.bVals[:, -1] = self.intrinsicVals[:, -1]\n", 344 | "\n", 345 | " def roll_backward(self):\n", 346 | " \"\"\"\n", 347 | " Roll the price and optimal hedge back in time starting from maturity\n", 348 | " \"\"\"\n", 349 | "\n", 350 | " for t in range(self.numSteps - 1, -1, -1):\n", 351 | "\n", 352 | " # determine the expected portfolio value at the next time node\n", 353 | " piNext = self.bVals[:, t+1] + self.opt_hedge[:, t+1] * self.sVals[:, t+1]\n", 354 | " pi_hat = piNext - np.mean(piNext)\n", 355 | "\n", 356 | " A_mat = self.function_A_vec(t)\n", 357 | " B_vec = self.function_B_vec(t, pi_hat)\n", 358 | " phi = np.dot(np.linalg.inv(A_mat), B_vec)\n", 359 | " self.opt_hedge[:, t] = np.dot(self.data[t, :, :], phi)\n", 360 | "\n", 361 | " ### START CODE HERE ### (≈ 1-2 lines of code)\n", 362 | " # implement code to update self.bVals\n", 363 | " # self.bVals[:,t] = your code goes here ....\n", 364 | " self.bVals[:,t] = np.exp(-self.r * self.dt) * (self.bVals[:, t+1] + (self.opt_hedge[:, t+1] - self.opt_hedge[:, t]) * self.sVals[:, t+1])\n", 365 | " \n", 366 | "\n", 367 | " ### END CODE HERE ###\n", 368 | "\n", 369 | " # calculate the initial portfolio value\n", 370 | " initPortfolioVal = self.bVals[:, 0] + self.opt_hedge[:, 0] * self.sVals[:, 0]\n", 371 | "\n", 372 | " # use only the second half of the paths generated with paths starting from S0\n", 373 | " optionVal = np.mean(initPortfolioVal)\n", 374 | " optionValVar = np.std(initPortfolioVal)\n", 375 | " delta = np.mean(self.opt_hedge[:, 0])\n", 376 | "\n", 377 | " return optionVal, delta, optionValVar" 378 | ] 379 | }, 380 | { 381 | "cell_type": "code", 382 | "execution_count": 15, 383 | "metadata": {}, 384 | "outputs": [ 385 | { 386 | "name": "stdout", 387 | "output_type": "stream", 388 | "text": [ 389 | "[[ 100. 98.23650359 98.6842395 ..., 111.52820537\n", 390 | " 111.93345414 111.50088104]\n", 391 | " [ 100. 99.47538589 100.18466561 ..., 69.58859259\n", 392 | " 69.36721589 68.46903615]\n", 393 | " [ 100. 99.57310236 100.94511135 ..., 110.66761375\n", 394 | " 110.53260244 110.37282496]\n", 395 | " ..., \n", 396 | " [ 100. 100.19783913 100.59050962 ..., 151.7887043 151.63565543\n", 397 | " 152.14692905]\n", 398 | " [ 100. 100.07733423 101.11151453 ..., 103.08321744\n", 399 | " 101.41095506 101.46651123]\n", 400 | " [ 100. 98.57422289 99.36322314 ..., 91.31429149\n", 401 | " 91.06798685 92.50219743]]\n", 402 | "X.shape = (100, 253)\n", 403 | "X_min, X_max = 4.10743882917 5.16553756345\n", 404 | "num_basis = 12\n", 405 | "dim self.data = (253, 100, 12)\n", 406 | "\n", 407 | "Time Cost of basis expansion: 7.709317445755005 seconds\n" 408 | ] 409 | }, 410 | { 411 | "data": { 412 | "text/plain": [ 413 | "" 414 | ] 415 | }, 416 | "execution_count": 15, 417 | "metadata": {}, 418 | "output_type": "execute_result" 419 | }, 420 | { 421 | "data": { 422 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAWkAAAEcCAYAAAAFlEU8AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAGrtJREFUeJzt3X+U3HV97/HnK5tN3KCycAlqFmK8/ginihIaPbT0B7+u\nsf4iordSwZZrKae9tS2WEyTiVejRIzQ9V+y1p23Aej38kCDEFbytASv01p6CJmxiDJArys8BZCms\ntWSBzeZ9/5jvhMlkZndmdma+n+/s63HOHnbmOzP7/gby2g/v7+fz+SoiMDOzNC3IuwAzM2vMIW1m\nljCHtJlZwhzSZmYJc0ibmSXMIW1mljCHtJlZwhzSZmYJc0ib1ZD0vyV9Ju86zMAhbR0g6UFJk5L+\no+rriznXc1oXP7v2XJ+T9N1u/Lx2SDpL0jZJE5KekXSHpJG867L2OKStU94TES+t+vpo3gV10QHn\nCvx+3gVVSDoHuBQ4DzgMeD1wDfBMjmXZHDikrWskvVbS05KOzx4vkzQu6aTs8YOS1ku6JxvxfVnS\nS6rev0zSTdl7HpD0x1XHjpa0OTv2b5WRu6SrgeXALdko98KZPid7zypJd0v6uaRNwEvoMkkfl3Rj\nzXNfkPSXVcdLWU27JZ3a5Ef/LvC3EbEtyp6KiKsiYk+nz8F6wyFtXRMRPwY+DlwjaQnwZeArEXFH\n1cvOAtYArwXeAHwSQNIC4BZgBzACnAqcL2mNpAHgm8BDwIrs+PXZz/ww8DDZaBf4i0afk/2cRcAo\ncDVwOPA14P2d/9M4yPXAOyW9LKtjAPhN4DpJK4GPAm+NiJdR/vN5sMnPnQQ+Iuk3JR3R+bKt1xzS\n1imjWQ+08vV7ABFxJXA/cBfwKuDimvd9MSIeiYingc8Cv5U9/1ZgaUT8WUS8EBE/Aa4EzgTeBiwD\n1kXEsxHxXEQ06gnP9DkAJwCDwBURMRURNwLfn8sfhKTLJf2zpKslDdZ7TUQ8BNwNvC976hRgT0Tc\nCUwDi4FfkDQYEQ9mv/Ca8dvAFsq/nH4q6RZJR1bVdrKk5W2emuXAIW2dsjYihqu+rqw6diXwJuB/\nRcTzNe97pOr7hyiHL8CrgWXVwQ98AngFcDTwUETsbaKumT6H7OeV4sA9ex9q4nPrkvQWYCQifhW4\nD/jADC+/jhd/KX0oe0xE3A+cD1wCPCnpeknL6n5CjYh4IiLOj4jllH+ZvZny/81UfATw/sQF4pC2\nrpL0UuAK4EvAJZIOr3nJ0VXfLwcey75/BHigJvhfFhHvzI4tl7SwwY+tDqGZPgfgcWBEkmrqaNcv\nA7dm338LOHGG134NOEnSUZRH1NftP4GI6yLiVyj/kgng8lYLiYhtwE7gEABJ7wXeDVwt6cOtfp7l\nwyFt3fYFYGtEnAv8H+Bvao7/oaSjsvC+GNiUPf894OfZBbQhSQOS3iTprdmxx4HLJB0i6SWSqsPw\np8B/buJzAP4V2Av8saRBSWdQHoG26zDg37Pvf0a5z11XRIwDd1Du1T8QEfcCSFop6RRJi4HnKPeZ\n9832gyVdJOlESYuzr3OAk7LPh3Iff1tEnBQRV7dzctZ7DmnrlMpsisrX1yWdDrwD+IPsNX8KHC/p\nrKr3XUd55PkT4MfAZwAiYpryqO844AHgKeAq4NDs2HuA11G+SPgo8MGqz/wc8MmstfGxRp+T/ZwX\ngDOAc4Cns8/ZPIc/hwng5dn3h2afOZPrgNOoGkVT7kdfltX6BHAksB5A0j9I+kSDz3o55UD+N8p/\nLh8ETo2Iu7LjrwN+1MrJWP7k22dZXiQ9CJwbEd/Ou5ZOkXQc8KcR8dtZmD4QEV/Nuy4ASe8DXh0R\nV+RdizXPI2mzDoqI7ZRnVfwz8EbgppxLqrYbOFeSQ7pAPJK23PTjSNqs0xzSZmYJc7vDzCxhDmkz\ns4Q1WgxQCEcccUSsWLEi7zLMzFq2bdu2pyJi6WyvK3RIr1ixgq1bt+ZdhplZyyQ1tf2A2x1mZglz\nSJuZJcwhbWaWMIe0mVnCHNJmZgkr9OwOM7O8jI6V2LBlN49NTLJseIh1a1aydlXnb8rukDYza9Ho\nWIn1m3cyOTUNQGlikvWbdwJ0PKjd7jAza9GGLbv3B3TF5NQ0G7bs7vjPckibmbXosYnJlp6fC4e0\nmVmLlg0PtfT8XDikzcxatG7NSoYGBw54bmhwgHVrVnb8Z/nCoZlZiyoXBz27w8wsUWtXjXQllGu5\n3WFmljCHtJlZwhzSZmYJc0ibmSXMIW1mljCHtJlZwhzSZmYJc0ibmSUsqZCWNCzpRkn3SbpX0i/l\nXZOZWZ5SW3H4BeBbEfEBSYuAJXkXZGaWp2RCWtKhwK8B5wBExAvAC3nWZGaWt5TaHa8BxoEvSxqT\ndJWkQ/IuyswsTymF9ELgeOCvI2IV8CxwUe2LJJ0naaukrePj472u0cysp1IK6UeBRyPiruzxjZRD\n+wARsTEiVkfE6qVLl/a0QDOzXksmpCPiCeARSZVds08F7smxJDOz3CVz4TDzR8C12cyOnwD/Led6\nzMxylVRIR8R2YHXedZiZpSKZdoeZmR3MIW1mljCHtJlZwhzSZmYJc0ibmSXMIW1mljCHtJlZwhzS\nZmYJc0ibmSXMIW1mljCHtJlZwpLau8PM+t/oWIkNW3bz2MQky4aHWLdmJWtXjeRdVrIc0mbWM6Nj\nJdZv3snk1DQApYlJ1m/eCeCgbsDtDjPrmQ1bdu8P6IrJqWk2bNmdU0Xpc0ibWc88NjHZ0vPmkDaz\nHlo2PNTS8+aQNrMeWrdmJUODAwc8NzQ4wLo1Kxu8w3zh0Mx6pnJx0LM7mueQNrOeWrtqxKHcAoe0\nmc2J5z13l0PazNrmec/d5wuHZtY2z3vuPoe0mbXN8567zyFtZm3zvOfuc0ibWds877n7fOHQzNrm\nec/d55A2sznxvOfuSq7dIWlA0pikb+Zdi5lZ3pILaeBPgHvzLsLMLAVJhbSko4B3AVflXYuZWQqS\nCmngCuBCYF+jF0g6T9JWSVvHx8d7V5mZWQ6SuXAo6d3AkxGxTdJJjV4XERuBjQCrV6+OHpVn1pe8\n70b6UhpJnwi8V9KDwPXAKZKuybcks/5V2XejNDFJUN534/xN21n1Z7cyOlbKuzzLJBPSEbE+Io6K\niBXAmcB3IuLsnMsy60ujYyUuuGHHQftuADyzZ4r1m3c6qBORTEibWW9URtDT0bhb6E2S0pFMT7pa\nRNwB3JFzGWZ9qd7OdfV4k6Q0eCRtNs80G77eJCkNDmmzeaaZ8PUmSelwSJvNM/V2rhtcIA5bMoiA\nkeEhPnfGsZ6Kl4gke9Jm1j2V8L30ll08s2cKgEMWL+TT73mjgzlBHkmbzVPPTb24sHdi0tPuUuWQ\nNpuHfG/C4nC7w6zg2lna7XsTFodH0mYFVm9pdzNtC9+bsDgc0mYF1m7bwvcmLA63O8wKrN22he9N\nWBwOabMCWzY8RKlOIDfTtvC9CYvB7Q6zAnPbov95JG1WYG5b9D+HtFnBuW3R3xzSZjnwbausWQ5p\nsx6rzG2uTJ2rzG0GHNR2EF84NOsxL8m2VjikzXrMS7KtFQ5psx7zkmxrhUParMc8t9la4QuHZj3m\nuc3WCoe0WQ48t9ma5ZA26wHPi7Z2OaTNuszzom0uHNJmXVIZPdfbpa4yL9ohbbNxSJt1SHVL49Ch\nQZ59YS9T09Hw9Z4Xbc1IZgqepKMl3S7pHkm7JP1J3jWZNav2NlYTk1MzBjR4XrQ1J6WR9F7ggoi4\nW9LLgG2SbouIe/IuzGw29ZZ6z8Tzoq1ZyYykI+LxiLg7+/7nwL2AG3ZWCK20LgS8/xc9Bc+ak0xI\nV5O0AlgF3FXn2HmStkraOj4+3uvSzA4yOlZigdT06wO4/T7/t2vNSandAYCklwI3AedHxL/XHo+I\njcBGgNWrV8/c9DPrgupZG6Icuq3yRUNrVlIhLWmQckBfGxGb867HDGaetdHuKMEXDa1ZyYS0JAFf\nAu6NiP+Zdz1mo2MlLr1lF8/smdr/3MTk1AzvaI4vGlorUupJnwh8GDhF0vbs6515F2XzU2VKXXVA\nt2NkeIizT1jOyPAQyh5/7oxjfdHQmpbMSDoivkv5wrdZ7lqdUlePgH+56JTOFGTzVkojabNkdOLC\nnvvO1gkOabM65hqw7jtbp7Qc0pI+JOl6SddKuk7Sb3WjMLM8nXzM0pbfM5DNlXbf2TqpnZ70r0fE\nmZUHkv4K+GrnSjLLX6PFJgtUDuOpfS9OvhsaHHAoW9e0E9KLJb0LeAQ4GnDjzfpOo570voCBBTA8\nNMjPJqe8gb91XTsh/d+BM4A3AY8CH+1oRWY5qyzzno76S1WmpoNDFi9k+6ff3uPKbD6aNaQl/WWj\nQ8BIRFzb2ZLM8lOZH90ooCu8rNt6pZmR9OnAp7pdiFkvzHavwWbnR3t6nfVKMyH9dER8peuVmHVZ\nvXsNnr9pO5fesot3vflV3H7feN1bXdXy9DrrpWZC2jvNWV9oNEp+Zs8U19z58IzvHZDYF+ELhdZz\nySwLN+u2ZkbJ9XiKneWpmZB+i6SD9nWmfOEwIuLlHa7JrC31+s1Awzt2N2PEI2fL2awhHREDvSjE\nbC7q9ZvX3biD6X3BvjYbdiPDQ94gyXLndof1hUtv2XVQv3m2u3XPxBcHLRXeYMkKb3SsNOd9n73n\ns6XKI2krvEtv2TWn9x+yaIDPrD22Q9WYdZZH0lZocx1FDywQn32fA9rS5ZG0FUrtDI5nn9/b9md5\n5oYVgUPaCqPeDI5WHbZkkE+/540OZisMh7Qlq3bUvOeFvW3fd/DBy97V4erMesMhbUnqxKi5YsSb\nIVmB+cKhJakTd+sGGByQ5ztboXkkbUnqxH7N7j9bP3BIW5KWDQ/Nab8NL+e2fuF2hyVp3ZqVDA22\nvm2Ml3Nbv/FI2pJUaVFcesuuGRerHLZkkCWLFja804pZ0SUV0pLeAXwBGACuiojLci7Jumi2W1kB\nPDe1r+H7hwYH3HO2vpdMSEsaAP4K+C+U70L+fUk3R8Q9+VZm3VBvit36zTuBF0fRM83w8GpBmy+S\nCWngbcD9EfETAEnXU74JrkO6D9UL4Mmpac7ftJ3zN21nZIYLhwJfGLR5I6ULhyPAI1WPH82esz4y\nOlbixMu+M+vMjdLEJGpwzHfqtvkkpZF0UySdB5wHsHz58pyrsVbUtjhmE2T3aKt6zrM3bL5JaSRd\nAo6uenxU9twBImJjRKyOiNVLly7tWXE2N6NjJS64YUfLqwgDvBm/zWspjaS/D7xe0msoh/OZwIfy\nLck6oTKCno7Wb2flhSk23yUT0hGxV9JHgS2Up+D9XUTM7ZYblpvq6XULpLYCWuDWhs17yYQ0QET8\nPfD3eddhc1Pbe243oM86YblbGzbvJRXSVlxzGTlXLg4OZO/zHGizFzmkbc7mMnJ2IJvNzCFtc3bJ\nzbva2vvZFwXNZpfSFDwrmNGxEsddeisTk63frdvznc2a45C2tlRaHM0G9NknLPd8Z7M2uN1hbWnl\n9lYjw0N8Zu2xXa7IrD95JG1tafb2Vm5rmM2NQ9ra0swmR25rmM2d2x3WlNoN+k8+Zik3bSvVbXkM\nLhAb/utbHM5mHeCRtM2qcpGwNDFJUN5G9KZtJd7/iyOMZCPqAZU3Fh0ZHnJAm3WQR9I2q0Yb9F9z\n58OMDA9xxQePcyibdYlD2vYbHSsdcOPX2r2c66l32ysz6xy3OwwoB/S6G3cccGfuZhd3T05Ns2HL\n7u4UZjbPOaQNKLc0pqZb362uotkpeWbWGoe0AXMPWd930Kw7HNIGzC1kvWDFrHsc0gbAycc0f7/I\nBYLhoUHvw2HWA57dMQ/UztoYHhrkkve+cX+wjo6V2PT9R5r6rKHBAYeyWQ85pPtcZdZG9UXBickp\n1n1tB1CeNjfTRcPDlgyyZNHC/SsNvUG/WW85pPtcowCe2hds2LKbtatGZrxoOLFnirFPvb2bJZrZ\nDNyT7nOlGQK4Es4zXTT0rA2zfDmk+1xlT416KgG8bs1KBgcOft3gAnnWhlnO3O7oczPdFLYSwJUe\n80wXF80sHw7pPjcg1Q1q6cC9NtauGnEgmyXI7Y4+12gkPcMA28wS4pF0gdXbiP/2+8YPmC43MjxU\n9+LhiC8ImhWCR9IF9cnRnXxs0/YDNuK/5s6HD3i87ms7OPmYpQwNDhzwXi/jNiuOJEJa0gZJ90n6\ngaSvSxrOu6aUjY6VuPbOh2fdSnRqX/DNHY/zuTOOZWR4yMu4zQoolXbHbcD6iNgr6XJgPfDxnGtK\n1oYtu5ve63licsoXBc0KLImRdETcGhF7s4d3AkflWU/qWt1WdHSs1KVKzKzbUhlJV/sIsKnRQUnn\nAecBLF++vFc15a76IuGCBtPqGqks/zaz4ulZSEv6NvDKOocujohvZK+5GNgLXNvocyJiI7ARYPXq\n1fNiIlntJkmtBDT4rilmRdazkI6I02Y6Lukc4N3AqRGexVs9cpZg3xz+RLz/hllxJdHukPQO4ELg\n1yNiT9715G10rMT6zTuZnJoG5rbwxNPtzIotiZAGvggsBm5TeUOgOyPi9/MtKT8btuzeH9CtGh4a\nRCpvMer9n82KL4mQjojX5V1DStrtIY8MD/EvF53S4WrMLE9JhPR8Vru0e92alSxrsJR7Jm5rmPWn\nJOZJz1eV3nP1Uu71m3fWXco9OKADbv569gnLvYrQbB7wSDono2MlLrhhx0HT6Sanpvcv5a4dYTuE\nzeYfh3SPjY6VuOTmXUxMTjV8TeWY+8tm5nZHD1XaGzMFdMWGLbt7UJGZpc4h3UOtTK3zKkEzA4d0\nT7USvF4laGbgkO6pZoPX0+nMrMIh3UPr1qw8aGpdLU+nM7Nqnt3RQ5Xg3bBlN6WJyf138h7xFDsz\na8Ah3WO+S4qZtcLtDjOzhHkkPQe1+26cfMxSbr9v3KsEzaxjHNJtqt3zuTQxyTV3Prz/eGUfDsBB\nbWZtc7ujTZfcvGvWhSmTU9NeOWhmc+KRdBPqtTWaWdoNXjloZnPjkJ5FvbbGtVVtjdl45aCZzYXb\nHbOot99Gs7cc9MpBM5srj6TrqG5vtHIP2EMWDTC8ZJFnd5hZxzika9S2N5o1NDjAZ9/n5dxm1llu\nd9Ro907d3m/DzLrBIV2jndkYI8NDDmgz6wqHdI1WZ2P44qCZdZNDukYz24lWeFtRM+s2XzisUb2d\n6EyzO4RvFGtm3eeQrqN6O9ETL/sOpTp9ai9SMbNeSKrdIekCSSHpiLxrqajX/nAf2sx6JZmRtKSj\ngbcDza+57oHa9ocXqZhZLyUT0sDngQuBb+RdSC3fTcXM8pJEu0PS6UApInY08drzJG2VtHV8fLwH\n1ZmZ5adnI2lJ3wZeWefQxcAnKLc6ZhURG4GNAKtXr25la42Dthx128LMUtezkI6I0+o9L+lY4DXA\nDkkARwF3S3pbRDzRqZ9fb8tR3znFzFKXe7sjInZGxJERsSIiVgCPAsd3MqCh/p4cvnOKmaUu95Du\nlUZ7cvjOKWaWsuRCOhtRP9Xpz220+MSLUswsZcmFdLd4UYqZFVFK86S7yotSzKyI5k1IgxelmFnx\nzJt2h5lZETmkzcwS5pA2M0uYQ9rMLGEOaTOzhCmipT2KkiJpHHioweEjgI4vismJzyU9/XIe4HPJ\ny6sjYulsLyp0SM9E0taIWJ13HZ3gc0lPv5wH+FxS53aHmVnCHNJmZgnr55DemHcBHeRzSU+/nAf4\nXJLWtz1pM7N+0M8jaTOzwnNIm5klrG9CWtKwpBsl3SfpXkm/JOlwSbdJ+lH2z8PyrnM2kj4maZek\nH0r6qqSXFOU8JP2dpCcl/bDquYa1S1ov6X5JuyWtyafq+hqcy4bsv68fSPq6pOGqY4U6l6pjF0gK\nSUdUPVe4c5H0R9m/m12S/rzq+WTPpWkR0RdfwFeAc7PvFwHDwJ8DF2XPXQRcnneds5zDCPAAMJQ9\nvgE4pyjnAfwacDzww6rn6tYO/AKwA1hM+UbEPwYG8j6HWc7l7cDC7PvLi3wu2fNHA1soLwg7oqjn\nApwMfBtYnD0+sgjn0uxXX4ykJR1K+V/elwAi4oWImABOpxzeZP9cm0+FLVkIDElaCCwBHqMg5xER\n/xd4uubpRrWfDlwfEc9HxAPA/cDbelJoE+qdS0TcGhF7s4d3Ur6zPRTwXDKfBy4EqmcPFPFc/gC4\nLCKez17zZPZ80ufSrL4Iacq/JceBL0sak3SVpEOAV0TE49lrngBekVuFTYiIEvAXwMPA48DPIuJW\nCnYeNRrVPgI8UvW6R7PniuIjwD9k3xfuXCSdDpQiYkfNocKdC/AG4Fcl3SXpnyS9NXu+iOdykH4J\n6YWU/xforyNiFfAs5f+13i/K//+T9HzDrF97OuVfOsuAQySdXf2aIpxHI0WuvZqki4G9wLV519IO\nSUuATwCfyruWDlkIHA6cAKwDbpCkfEvqnH4J6UeBRyPiruzxjZRD+6eSXgWQ/fPJBu9PxWnAAxEx\nHhFTwGbglyneeVRrVHuJck+04qjsuaRJOgd4N3BW9ksHincur6U8ENgh6UHK9d4t6ZUU71yg/Pd/\nc5R9D9hHeaOlIp7LQfoipCPiCeARSZVbf58K3APcDPxO9tzvAN/IobxWPAycIGlJNhI4FbiX4p1H\ntUa13wycKWmxpNcArwe+l0N9TZP0Dso93PdGxJ6qQ4U6l4jYGRFHRsSKiFhBOeSOz/4eFepcMqOU\nLx4i6Q2UJw48RTHP5WB5X7ns1BdwHLAV+AHlf2mHAf8J+EfgR5Sv/h6ed51NnMelwH3AD4GrKV+Z\nLsR5AF+l3EufovwX/3dnqh24mPIV993Ab+RdfxPncj/lHuf27OtvinouNccfJJvdUcRzoRzK12R/\nZ+4GTinCuTT75WXhZmYJ64t2h5lZv3JIm5klzCFtZpYwh7SZWcIc0mZmCXNIm5klzCFtlpH0KknX\nS9oq6f9Juj3vmswW5l2AWUKuBq6MiE0Ako7NuR4zL2YxA5A0ADwPHBXl5dFmSXC7wwyIiGnKy9Z3\nSPpbSSdWjqV6JxybHxzSZi/6DeD9wM+Ab0mq3KDg8/mVZPOde9JmmSj3/r4LfDcbPb9Z0nPAMZLW\nRcSGfCu0+cgjaTNA0hpJi7LvjwR+BbiN8paX1zigLS8OabOyDwD3StoBfBP4HxHxr8CbKd/M1CwX\nbneYARHxew0OPQWcK+mpiLi3lzWZgafgmZklze0OM7OEOaTNzBLmkDYzS5hD2swsYQ5pM7OEOaTN\nzBLmkDYzS5hD2swsYQ5pM7OE/X/tRcYsgGPH/wAAAABJRU5ErkJggg==\n", 423 | "text/plain": [ 424 | "" 425 | ] 426 | }, 427 | "metadata": {}, 428 | "output_type": "display_data" 429 | } 430 | ], 431 | "source": [ 432 | "np.random.seed(42)\n", 433 | "strike_k = 95\n", 434 | "test_vol = 0.2\n", 435 | "test_mu = 0.03\n", 436 | "dt = 0.01\n", 437 | "rfr = 0.05\n", 438 | "num_paths = 100\n", 439 | "num_periods = 252\n", 440 | "\n", 441 | "hMC = DiscreteBlackScholes(100, strike_k, test_vol, 1., rfr, test_mu, num_periods, num_paths)\n", 442 | "hMC.gen_paths()\n", 443 | "\n", 444 | "t = hMC.numSteps - 1\n", 445 | "piNext = hMC.bVals[:, t+1] + 0.1 * hMC.sVals[:, t+1]\n", 446 | "pi_hat = piNext - np.mean(piNext)\n", 447 | "\n", 448 | "A_mat = hMC.function_A_vec(t)\n", 449 | "B_vec = hMC.function_B_vec(t, pi_hat)\n", 450 | "phi = np.dot(np.linalg.inv(A_mat), B_vec)\n", 451 | "opt_hedge = np.dot(hMC.data[t, :, :], phi)\n", 452 | "\n", 453 | "# plot the results\n", 454 | "fig = plt.figure(figsize=(12,4))\n", 455 | "ax1 = fig.add_subplot(121)\n", 456 | "\n", 457 | "ax1.scatter(hMC.sVals[:,t], pi_hat)\n", 458 | "ax1.set_title(r'Expected $\\Pi_0$ vs. $S_t$')\n", 459 | "ax1.set_xlabel(r'$S_t$')\n", 460 | "ax1.set_ylabel(r'$\\Pi_0$')" 461 | ] 462 | }, 463 | { 464 | "cell_type": "code", 465 | "execution_count": 16, 466 | "metadata": {}, 467 | "outputs": [ 468 | { 469 | "name": "stdout", 470 | "output_type": "stream", 471 | "text": [ 472 | "Submission successful, please check on the coursera grader page for the status\n" 473 | ] 474 | }, 475 | { 476 | "data": { 477 | "text/plain": [ 478 | "array([ 0.81274895, -3.49043554, 0.69994334, 1.61239986, -0.25153316,\n", 479 | " -3.19082265, 0.8848621 , -2.0380868 , 0.45033564, 3.74872863,\n", 480 | " -0.6568227 , 1.74148929, 0.94314331, -4.19716113, 1.72135256,\n", 481 | " -0.66188482, 6.95675041, -2.20512677, -0.14942482, 0.30067272,\n", 482 | " 3.33419402, 0.68536713, 1.65097153, 2.69898611, 1.22528159,\n", 483 | " 1.47188744, -2.48129898, -0.37360224, 0.81064666, -1.05269459,\n", 484 | " 0.02476551, -1.88267258, 0.11748169, -0.9038195 , 0.69753811,\n", 485 | " -0.54805029, 1.97594593, -0.44331403, 0.62134931, -1.86191032,\n", 486 | " -3.21226413, 2.24508097, -2.23451292, -0.13488281, 3.64364848,\n", 487 | " -0.11270281, -1.15582237, -3.30169455, 1.74454841, -1.10425448,\n", 488 | " 2.10192819, 1.80570507, -1.68587001, -1.42113397, -2.70292006,\n", 489 | " 0.79454199, -2.05396827, 3.13973887, -1.08786662, 0.42347686,\n", 490 | " 1.32787012, 0.55924965, -3.54140814, -3.70258632, 2.14853641,\n", 491 | " 1.11495458, 3.69639676, 0.62864736, -2.62282995, -0.05315552,\n", 492 | " 1.05789698, 1.8023196 , -3.35217374, -2.30436466, -2.68609519,\n", 493 | " 0.95284884, -1.35963013, -0.56273408, -0.08311276, 0.79044269,\n", 494 | " 0.46247485, -1.04921463, -2.18122285, 1.82920128, 1.05635272,\n", 495 | " 0.90161346, -1.93870347, -0.37549305, -1.96383274, 1.9772888 ,\n", 496 | " -1.37386984, 0.95230068, 0.88842589, -1.42214528, -2.60256696,\n", 497 | " -1.53509699, 4.47491253, 4.87735375, -0.19068803, -1.08711941])" 498 | ] 499 | }, 500 | "execution_count": 16, 501 | "metadata": {}, 502 | "output_type": "execute_result" 503 | } 504 | ], 505 | "source": [ 506 | "### GRADED PART (DO NOT EDIT) ###\n", 507 | "\n", 508 | "part_1 = list(pi_hat)\n", 509 | "try:\n", 510 | " part1 = \" \".join(map(repr, part_1))\n", 511 | "except TypeError:\n", 512 | " part1 = repr(part_1)\n", 513 | "submissions[all_parts[0]]=part1\n", 514 | "grading.submit(COURSERA_EMAIL, COURSERA_TOKEN, assignment_key,all_parts[:1],all_parts,submissions)\n", 515 | "pi_hat\n", 516 | "### GRADED PART (DO NOT EDIT) ###" 517 | ] 518 | }, 519 | { 520 | "cell_type": "code", 521 | "execution_count": 17, 522 | "metadata": {}, 523 | "outputs": [ 524 | { 525 | "name": "stdout", 526 | "output_type": "stream", 527 | "text": [ 528 | "[[ 100. 101.44740793 119.84140463 ..., 192.78653975 210.7076386\n", 529 | " 167.37134738]\n", 530 | " [ 100. 98.79378416 81.67103247 ..., 78.75163254\n", 531 | " 104.69106128 114.29766651]\n", 532 | " [ 100. 116.62110943 127.89787986 ..., 85.9631909 79.72061217\n", 533 | " 78.03372489]\n", 534 | " ..., \n", 535 | " [ 100. 106.73222875 103.49782882 ..., 108.30352919\n", 536 | " 96.76512324 114.08668191]\n", 537 | " [ 100. 96.45073828 98.70345177 ..., 89.5899346 75.07626471\n", 538 | " 91.91332688]\n", 539 | " [ 100. 101.81014094 115.21893111 ..., 68.72837469\n", 540 | " 64.71929858 65.04500528]]\n", 541 | "X.shape = (50000, 7)\n", 542 | "X_min, X_max = 2.96880459823 6.37164911461\n", 543 | "num_basis = 12\n", 544 | "dim self.data = (7, 50000, 12)\n", 545 | "\n", 546 | "Time Cost of basis expansion: 96.63819098472595 seconds\n", 547 | "Option value = 13.1083499076\n", 548 | "Option value variance = 5.17079676287\n", 549 | "Option delta = -0.356133722933\n", 550 | "BS value 13.1458939003\n" 551 | ] 552 | } 553 | ], 554 | "source": [ 555 | "# input parameters\n", 556 | "s0 = 100.0\n", 557 | "strike = 100.0\n", 558 | "r = 0.05\n", 559 | "mu = 0.07 # 0.05\n", 560 | "vol = 0.4\n", 561 | "T = 1.0\n", 562 | "\n", 563 | "# Simulation Parameters\n", 564 | "numPaths = 50000 # number of Monte Carlo trials\n", 565 | "numSteps = 6\n", 566 | "\n", 567 | "# create the class object\n", 568 | "hMC = DiscreteBlackScholes(s0, strike, vol, T, r, mu, numSteps, numPaths)\n", 569 | "\n", 570 | "# calculation\n", 571 | "hMC.gen_paths()\n", 572 | "hMC.seed_intrinsic()\n", 573 | "option_val, delta, option_val_variance = hMC.roll_backward()\n", 574 | "bs_call_value = bs_put(0, s0, K=strike, r=r, sigma=vol, T=T)\n", 575 | "print('Option value = ', option_val)\n", 576 | "print('Option value variance = ', option_val_variance)\n", 577 | "print('Option delta = ', delta) \n", 578 | "print('BS value', bs_call_value)" 579 | ] 580 | }, 581 | { 582 | "cell_type": "code", 583 | "execution_count": 18, 584 | "metadata": {}, 585 | "outputs": [ 586 | { 587 | "name": "stdout", 588 | "output_type": "stream", 589 | "text": [ 590 | "Submission successful, please check on the coursera grader page for the status\n" 591 | ] 592 | }, 593 | { 594 | "data": { 595 | "text/plain": [ 596 | "13.10834990762385" 597 | ] 598 | }, 599 | "execution_count": 18, 600 | "metadata": {}, 601 | "output_type": "execute_result" 602 | } 603 | ], 604 | "source": [ 605 | "### GRADED PART (DO NOT EDIT) ###\n", 606 | "part2 = str(option_val)\n", 607 | "submissions[all_parts[1]]=part2\n", 608 | "grading.submit(COURSERA_EMAIL, COURSERA_TOKEN, assignment_key,all_parts[:2],all_parts,submissions)\n", 609 | "option_val\n", 610 | "### GRADED PART (DO NOT EDIT) ###" 611 | ] 612 | }, 613 | { 614 | "cell_type": "code", 615 | "execution_count": 19, 616 | "metadata": {}, 617 | "outputs": [ 618 | { 619 | "name": "stdout", 620 | "output_type": "stream", 621 | "text": [ 622 | "[[ 100. 101.44740793 119.84140463 ..., 192.78653975 210.7076386\n", 623 | " 167.37134738]\n", 624 | " [ 100. 98.79378416 81.67103247 ..., 78.75163254\n", 625 | " 104.69106128 114.29766651]\n", 626 | " [ 100. 116.62110943 127.89787986 ..., 85.9631909 79.72061217\n", 627 | " 78.03372489]\n", 628 | " ..., \n", 629 | " [ 100. 106.73222875 103.49782882 ..., 108.30352919\n", 630 | " 96.76512324 114.08668191]\n", 631 | " [ 100. 96.45073828 98.70345177 ..., 89.5899346 75.07626471\n", 632 | " 91.91332688]\n", 633 | " [ 100. 101.81014094 115.21893111 ..., 68.72837469\n", 634 | " 64.71929858 65.04500528]]\n", 635 | "X.shape = (50000, 7)\n", 636 | "X_min, X_max = 2.96880459823 6.37164911461\n", 637 | "num_basis = 12\n", 638 | "dim self.data = (7, 50000, 12)\n", 639 | "\n", 640 | "Time Cost of basis expansion: 95.9269437789917 seconds\n" 641 | ] 642 | }, 643 | { 644 | "data": { 645 | "text/plain": [ 646 | "array([ 6.70326307, 8.59543726, 10.74614496, 13.1458939 ,\n", 647 | " 15.78197485, 18.63949388])" 648 | ] 649 | }, 650 | "execution_count": 19, 651 | "metadata": {}, 652 | "output_type": "execute_result" 653 | } 654 | ], 655 | "source": [ 656 | "strikes = np.linspace(85, 110, 6)\n", 657 | "results = [None] * len(strikes)\n", 658 | "bs_prices = np.zeros(len(strikes))\n", 659 | "bs_deltas = np.zeros(len(strikes))\n", 660 | "numPaths = 50000\n", 661 | "hMC = DiscreteBlackScholes(s0, strike, vol, T, r, mu, numSteps, numPaths)\n", 662 | "hMC.gen_paths()\n", 663 | "for ix, k_strike in enumerate(strikes):\n", 664 | " hMC.seed_intrinsic(k_strike)\n", 665 | " results[ix] = hMC.roll_backward()\n", 666 | " bs_prices[ix] = bs_put(0, s0, K=k_strike, r=r, sigma=vol, T=T)\n", 667 | " bs_deltas[ix] = norm.cdf(d1(s0, K=k_strike, r=r, sigma=vol, T=T)) - 1\n", 668 | "bs_prices" 669 | ] 670 | }, 671 | { 672 | "cell_type": "code", 673 | "execution_count": 20, 674 | "metadata": { 675 | "collapsed": true 676 | }, 677 | "outputs": [], 678 | "source": [ 679 | "mc_prices = np.array([x[0] for x in results])\n", 680 | "mc_deltas = np.array([x[1] for x in results])\n", 681 | "price_variances = np.array([x[-1] for x in results])\n", 682 | "prices_diff = mc_prices - bs_prices\n", 683 | "deltas_diff = mc_deltas - bs_deltas\n", 684 | "# price_variances" 685 | ] 686 | }, 687 | { 688 | "cell_type": "code", 689 | "execution_count": 21, 690 | "metadata": {}, 691 | "outputs": [ 692 | { 693 | "name": "stdout", 694 | "output_type": "stream", 695 | "text": [ 696 | "Submission successful, please check on the coursera grader page for the status\n" 697 | ] 698 | }, 699 | { 700 | "data": { 701 | "text/plain": [ 702 | "array([-0.03641511, -0.04034139, -0.03996597, -0.03754399, -0.03240009,\n", 703 | " -0.02997062])" 704 | ] 705 | }, 706 | "execution_count": 21, 707 | "metadata": {}, 708 | "output_type": "execute_result" 709 | } 710 | ], 711 | "source": [ 712 | "### GRADED PART (DO NOT EDIT) ###\n", 713 | "\n", 714 | "part_3 = list(prices_diff)\n", 715 | "try:\n", 716 | " part3 = \" \".join(map(repr, part_3))\n", 717 | "except TypeError:\n", 718 | " part3 = repr(part_3)\n", 719 | "submissions[all_parts[2]]=part3\n", 720 | "grading.submit(COURSERA_EMAIL, COURSERA_TOKEN, assignment_key,all_parts[:3],all_parts,submissions)\n", 721 | "prices_diff\n", 722 | "### GRADED PART (DO NOT EDIT) ###" 723 | ] 724 | }, 725 | { 726 | "cell_type": "code", 727 | "execution_count": 22, 728 | "metadata": {}, 729 | "outputs": [ 730 | { 731 | "name": "stdout", 732 | "output_type": "stream", 733 | "text": [ 734 | "Submission successful, please check on the coursera grader page for the status\n" 735 | ] 736 | }, 737 | { 738 | "data": { 739 | "text/plain": [ 740 | "array([ 0.01279798, 0.01416019, 0.01532701, 0.01645681, 0.01715345,\n", 741 | " 0.01780652])" 742 | ] 743 | }, 744 | "execution_count": 22, 745 | "metadata": {}, 746 | "output_type": "execute_result" 747 | } 748 | ], 749 | "source": [ 750 | "### GRADED PART (DO NOT EDIT) ###\n", 751 | "part_4 = list(deltas_diff)\n", 752 | "try:\n", 753 | " part4 = \" \".join(map(repr, part_4))\n", 754 | "except TypeError:\n", 755 | " part4= repr(part_4)\n", 756 | "submissions[all_parts[3]]=part4\n", 757 | "grading.submit(COURSERA_EMAIL, COURSERA_TOKEN, assignment_key,all_parts[:4],all_parts,submissions)\n", 758 | "deltas_diff\n", 759 | "### GRADED PART (DO NOT EDIT) ###" 760 | ] 761 | }, 762 | { 763 | "cell_type": "code", 764 | "execution_count": null, 765 | "metadata": { 766 | "collapsed": true 767 | }, 768 | "outputs": [], 769 | "source": [] 770 | } 771 | ], 772 | "metadata": { 773 | "coursera": { 774 | "course_slug": "reinforcement-learning-in-finance" 775 | }, 776 | "kernelspec": { 777 | "display_name": "Python 3", 778 | "language": "python", 779 | "name": "python3" 780 | }, 781 | "language_info": { 782 | "codemirror_mode": { 783 | "name": "ipython", 784 | "version": 3 785 | }, 786 | "file_extension": ".py", 787 | "mimetype": "text/x-python", 788 | "name": "python", 789 | "nbconvert_exporter": "python", 790 | "pygments_lexer": "ipython3", 791 | "version": "3.6.0" 792 | } 793 | }, 794 | "nbformat": 4, 795 | "nbformat_minor": 2 796 | } 797 | -------------------------------------------------------------------------------- /linear_regress_m1_ex2_v3.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Linear Regression\n", 8 | "\n", 9 | "Welcome to your first assignment. This exercise gives you a brief introduction to linear regression. The exercise is to be implemented in Python. Even if you've used Python before, this will help familiarize you with functions we'll need. \n", 10 | "\n", 11 | "**Instructions:**\n", 12 | "- You will be using Python 3.\n", 13 | "- Avoid using for-loops and while-loops, unless you are explicitly told to do so.\n", 14 | "- Do not modify the (# GRADED FUNCTION [function name]) comment in some cells. Your work would not be graded if you change this. Each cell containing that comment should only contain one function.\n", 15 | "- After coding your function, run the cell right below it to check if your result is correct.\n", 16 | "\n", 17 | "**After this assignment you will:**\n", 18 | "- Be able to implement linear regression model using statsmodels, scikit-learn, and tensorflow\n", 19 | "- Work with simulated non-linear dataset\n", 20 | "- Compare model performance (quality of fit) of both models\n", 21 | "\n", 22 | "Let's get started!" 23 | ] 24 | }, 25 | { 26 | "cell_type": "markdown", 27 | "metadata": {}, 28 | "source": [ 29 | "## About iPython Notebooks ##\n", 30 | "\n", 31 | "iPython Notebooks are interactive coding environments embedded in a webpage. You will be using iPython notebooks in this class. You only need to write code between the ### START CODE HERE ### and ### END CODE HERE ### comments. After writing your code, you can run the cell by either pressing \"SHIFT\"+\"ENTER\" or by clicking on \"Run Cell\" (denoted by a play symbol) in the upper bar of the notebook. \n", 32 | "\n", 33 | "We will often specify \"(≈ X lines of code)\" in the comments to tell you about how much code you need to write. It is just a rough estimate, so don't feel bad if your code is longer or shorter." 34 | ] 35 | }, 36 | { 37 | "cell_type": "code", 38 | "execution_count": 1, 39 | "metadata": { 40 | "collapsed": true 41 | }, 42 | "outputs": [], 43 | "source": [ 44 | "import os\n", 45 | "import numpy as np\n", 46 | "\n", 47 | "import sys\n", 48 | "sys.path.append(\"..\")\n", 49 | "import grading\n", 50 | "\n", 51 | "try:\n", 52 | " import matplotlib.pyplot as plt\n", 53 | " %matplotlib inline\n", 54 | "except: pass\n", 55 | "\n", 56 | "import pandas as pd\n", 57 | "\n", 58 | "import tensorflow as tf\n", 59 | "from tensorflow.python.layers import core as core_layers\n", 60 | "try:\n", 61 | " from mpl_toolkits.mplot3d import Axes3D\n", 62 | "except: pass" 63 | ] 64 | }, 65 | { 66 | "cell_type": "code", 67 | "execution_count": 2, 68 | "metadata": { 69 | "collapsed": true 70 | }, 71 | "outputs": [], 72 | "source": [ 73 | "### ONLY FOR GRADING. DO NOT EDIT ###\n", 74 | "submissions=dict()\n", 75 | "assignment_key=\"QNZTAPW2Eeeg_w5MCivhhg\" \n", 76 | "all_parts=[\"dtA5d\", \"2inmf\", \"FCpek\",\"78aDd\",\"qlQVj\"]\n", 77 | "### ONLY FOR GRADING. DO NOT EDIT ###" 78 | ] 79 | }, 80 | { 81 | "cell_type": "code", 82 | "execution_count": 3, 83 | "metadata": { 84 | "collapsed": true 85 | }, 86 | "outputs": [], 87 | "source": [ 88 | "COURSERA_TOKEN = \" \" # the key provided to the Student under his/her email on submission page\n", 89 | "COURSERA_EMAIL = \" \" # the email" 90 | ] 91 | }, 92 | { 93 | "cell_type": "code", 94 | "execution_count": 4, 95 | "metadata": { 96 | "collapsed": true 97 | }, 98 | "outputs": [], 99 | "source": [ 100 | "def reset_graph(seed=42):\n", 101 | " \"\"\"\n", 102 | " Utility function to reset current tensorflow computation graph\n", 103 | " and set the random seed \n", 104 | " \"\"\"\n", 105 | " # to make results reproducible across runs\n", 106 | " tf.reset_default_graph()\n", 107 | " tf.set_random_seed(seed)\n", 108 | " np.random.seed(seed)" 109 | ] 110 | }, 111 | { 112 | "cell_type": "markdown", 113 | "metadata": { 114 | "collapsed": true 115 | }, 116 | "source": [ 117 | "## We use artificial data for the following two specifications of regression:\n", 118 | "\n", 119 | "### Linear Regression\n", 120 | "\n", 121 | "$ y(x) = a + b_1 \\cdot X_1 + b_2 \\cdot X_2 + b_3 \\cdot X_3 + \\sigma \\cdot \\varepsilon $ \n", 122 | "\n", 123 | "where $ \\varepsilon \\sim N(0, 1) $ is a Gaussian noise, and $ \\sigma $ is its volatility, \n", 124 | "with the following choice of parameters:\n", 125 | "\n", 126 | "$ a = 1.0 $\n", 127 | "\n", 128 | "$ b_1, b_2, b_3 = (0.5, 0.2, 0.1) $\n", 129 | "\n", 130 | "$ \\sigma = 0.1 $\n", 131 | "\n", 132 | "$ X_1, X_2, X_3 $ will be uniformally distributed in $ [-1,1] $\n", 133 | "\n", 134 | "### Non-Linear Regression\n", 135 | "\n", 136 | "$ y(x) = a + w_{00} \\cdot X_1 + w_{01} \\cdot X_2 + w_{02} \\cdot X_3 + + w_{10} \\cdot X_1^2 \n", 137 | "+ w_{11} \\cdot X_2^2 + w_{12} \\cdot X_3^2 + \\sigma \\cdot \\varepsilon $ \n", 138 | "\n", 139 | "where\n", 140 | "\n", 141 | "$ w = [[1.0, 0.5, 0.2],[0.5, 0.3, 0.15]] $\n", 142 | "\n", 143 | "and the rest of parameters is as above, with the same values of $ X_i $" 144 | ] 145 | }, 146 | { 147 | "cell_type": "markdown", 148 | "metadata": { 149 | "collapsed": true 150 | }, 151 | "source": [ 152 | "### Generate Data" 153 | ] 154 | }, 155 | { 156 | "cell_type": "code", 157 | "execution_count": 5, 158 | "metadata": {}, 159 | "outputs": [ 160 | { 161 | "data": { 162 | "text/plain": [ 163 | "((7500, 3), (7500, 1))" 164 | ] 165 | }, 166 | "execution_count": 5, 167 | "metadata": {}, 168 | "output_type": "execute_result" 169 | } 170 | ], 171 | "source": [ 172 | "def generate_data(n_points=10000, n_features=3, use_nonlinear=True, \n", 173 | " noise_std=0.1, train_test_split = 4):\n", 174 | " \"\"\"\n", 175 | " Arguments:\n", 176 | " n_points - number of data points to generate\n", 177 | " n_features - a positive integer - number of features\n", 178 | " use_nonlinear - if True, generate non-linear data\n", 179 | " train_test_split - an integer - what portion of data to use for testing\n", 180 | " \n", 181 | " Return:\n", 182 | " X_train, Y_train, X_test, Y_test, n_train, n_features\n", 183 | " \"\"\"\n", 184 | " \n", 185 | " # Linear data or non-linear data?\n", 186 | " if use_nonlinear:\n", 187 | " weights = np.array([[1.0, 0.5, 0.2],[0.5, 0.3, 0.15]])\n", 188 | " else:\n", 189 | " weights = np.array([1.0, 0.5, 0.2])\n", 190 | " \n", 191 | "\n", 192 | " \n", 193 | " bias = np.ones(n_points).reshape((-1,1))\n", 194 | " low = - np.ones((n_points,n_features),'float')\n", 195 | " high = np.ones((n_points,n_features),'float')\n", 196 | " \n", 197 | " np.random.seed(42)\n", 198 | " X = np.random.uniform(low=low, high=high)\n", 199 | " \n", 200 | " np.random.seed(42)\n", 201 | " noise = np.random.normal(size=(n_points, 1))\n", 202 | " noise_std = 0.1\n", 203 | " \n", 204 | " if use_nonlinear:\n", 205 | " Y = (weights[0,0] * bias + np.dot(X, weights[0, :]).reshape((-1,1)) + \n", 206 | " np.dot(X*X, weights[1, :]).reshape([-1,1]) +\n", 207 | " noise_std * noise)\n", 208 | " else:\n", 209 | " Y = (weights[0] * bias + np.dot(X, weights[:]).reshape((-1,1)) + \n", 210 | " noise_std * noise)\n", 211 | " \n", 212 | " n_test = int(n_points/train_test_split)\n", 213 | " n_train = n_points - n_test\n", 214 | " \n", 215 | " X_train = X[:n_train,:]\n", 216 | " Y_train = Y[:n_train].reshape((-1,1))\n", 217 | "\n", 218 | " X_test = X[n_train:,:]\n", 219 | " Y_test = Y[n_train:].reshape((-1,1))\n", 220 | " \n", 221 | " return X_train, Y_train, X_test, Y_test, n_train, n_features\n", 222 | "\n", 223 | "X_train, Y_train, X_test, Y_test, n_train, n_features = generate_data(use_nonlinear=False)\n", 224 | "X_train.shape, Y_train.shape" 225 | ] 226 | }, 227 | { 228 | "cell_type": "markdown", 229 | "metadata": {}, 230 | "source": [ 231 | "### Linear Regression with Numpy" 232 | ] 233 | }, 234 | { 235 | "cell_type": "code", 236 | "execution_count": 7, 237 | "metadata": { 238 | "collapsed": true 239 | }, 240 | "outputs": [], 241 | "source": [ 242 | "# GRADED FUNCTION: numpy_lin_regress\n", 243 | "def numpy_lin_regress(X_train, Y_train):\n", 244 | " \"\"\"\n", 245 | " numpy_lin_regress - Implements linear regression model using numpy module\n", 246 | " Arguments:\n", 247 | " X_train - np.array of size (n by k) where n is number of observations \n", 248 | " of independent variables and k is number of variables\n", 249 | " Y_train - np.array of size (n by 1) where n is the number of observations of dependend variable\n", 250 | " \n", 251 | " Return:\n", 252 | " np.array of size (k+1 by 1) of regression coefficients\n", 253 | " \"\"\"\n", 254 | " ### START CODE HERE ### (≈ 3 lines of code)\n", 255 | " \n", 256 | " # number of features\n", 257 | " ndim = X_train.shape[1] \n", 258 | " \n", 259 | " # add the column of ones\n", 260 | " X_train = np.hstack((np.ones((X_train.shape[0], 1)), X_train))\n", 261 | " theta_numpy = np.linalg.inv(X_train.T.dot(X_train)).dot(X_train.T).dot(Y_train)\n", 262 | "\n", 263 | " \n", 264 | " # default answer, replace this\n", 265 | " # theta_numpy = np.array([0.] * (ndim + 1)) \n", 266 | " ### END CODE HERE ###\n", 267 | " return theta_numpy" 268 | ] 269 | }, 270 | { 271 | "cell_type": "code", 272 | "execution_count": 8, 273 | "metadata": {}, 274 | "outputs": [ 275 | { 276 | "name": "stdout", 277 | "output_type": "stream", 278 | "text": [ 279 | "Submission successful, please check on the coursera grader page for the status\n" 280 | ] 281 | }, 282 | { 283 | "data": { 284 | "text/plain": [ 285 | "array([ 0.99946227, 0.99579039, 0.499198 , 0.20019798])" 286 | ] 287 | }, 288 | "execution_count": 8, 289 | "metadata": {}, 290 | "output_type": "execute_result" 291 | } 292 | ], 293 | "source": [ 294 | "### GRADED PART (DO NOT EDIT) ###\n", 295 | "theta_numpy = numpy_lin_regress(X_train, Y_train)\n", 296 | "part_1 = list(theta_numpy.squeeze())\n", 297 | "try:\n", 298 | " part1 = \" \".join(map(repr, part_1))\n", 299 | "except TypeError:\n", 300 | " part1 = repr(part_1)\n", 301 | "submissions[all_parts[0]]=part1\n", 302 | "grading.submit(COURSERA_EMAIL, COURSERA_TOKEN, assignment_key,all_parts[:1],all_parts,submissions)\n", 303 | "theta_numpy.squeeze()\n", 304 | "### GRADED PART (DO NOT EDIT) ###" 305 | ] 306 | }, 307 | { 308 | "cell_type": "markdown", 309 | "metadata": {}, 310 | "source": [ 311 | "### Linear Regression with Sklearn" 312 | ] 313 | }, 314 | { 315 | "cell_type": "code", 316 | "execution_count": 9, 317 | "metadata": { 318 | "collapsed": true 319 | }, 320 | "outputs": [], 321 | "source": [ 322 | "# GRADED FUNCTION: sklearn_lin_regress\n", 323 | "def sklearn_lin_regress(X_train, Y_train):\n", 324 | " \"\"\"\n", 325 | " Arguments:\n", 326 | " X_train - np.array of size (n by k) where n is number of observations \n", 327 | " of independent variables and k is number of variables\n", 328 | " Y_train - np.array of size (n by 1) where n is the number of observations of dependend variable\n", 329 | " \n", 330 | " Return:\n", 331 | " np.array of size (k+1 by 1) of regression coefficients\n", 332 | " \"\"\" \n", 333 | " ### START CODE HERE ### (≈ 3 lines of code)\n", 334 | " # use lin_reg to fit training data\n", 335 | " ndim = X_train.shape[1] \n", 336 | " ### START CODE HERE ### (≈ 3 lines of code)\n", 337 | " try:\n", 338 | " from sklearn.linear_model import LinearRegression\n", 339 | " except ImportError:\n", 340 | " raise(\"ImportError: No module named sklearn.linear_model found\")\n", 341 | " X_train = np.hstack((np.ones((X_train.shape[0], 1)), X_train))\n", 342 | " reg = LinearRegression(fit_intercept=False)\n", 343 | " reg.fit(X_train, Y_train)\n", 344 | " theta_sklearn = reg.coef_\n", 345 | "\n", 346 | " ### END CODE HERE ###\n", 347 | " return theta_sklearn" 348 | ] 349 | }, 350 | { 351 | "cell_type": "code", 352 | "execution_count": 10, 353 | "metadata": {}, 354 | "outputs": [ 355 | { 356 | "name": "stdout", 357 | "output_type": "stream", 358 | "text": [ 359 | "Submission successful, please check on the coursera grader page for the status\n" 360 | ] 361 | }, 362 | { 363 | "data": { 364 | "text/plain": [ 365 | "array([ 0.99946227, 0.99579039, 0.499198 , 0.20019798])" 366 | ] 367 | }, 368 | "execution_count": 10, 369 | "metadata": {}, 370 | "output_type": "execute_result" 371 | } 372 | ], 373 | "source": [ 374 | "### GRADED PART (DO NOT EDIT) ###\n", 375 | "theta_sklearn = sklearn_lin_regress(X_train, Y_train)\n", 376 | "part_2 = list(theta_sklearn.squeeze())\n", 377 | "try:\n", 378 | " part2 = \" \".join(map(repr, part_2))\n", 379 | "except TypeError:\n", 380 | " part2 = repr(part_2)\n", 381 | "submissions[all_parts[1]]=part2\n", 382 | "grading.submit(COURSERA_EMAIL, COURSERA_TOKEN, assignment_key,all_parts[:2],all_parts,submissions)\n", 383 | "theta_sklearn.squeeze()\n", 384 | "### GRADED PART (DO NOT EDIT) ###" 385 | ] 386 | }, 387 | { 388 | "cell_type": "markdown", 389 | "metadata": {}, 390 | "source": [ 391 | "### Linear Regression with Tensorflow" 392 | ] 393 | }, 394 | { 395 | "cell_type": "code", 396 | "execution_count": 11, 397 | "metadata": { 398 | "collapsed": true 399 | }, 400 | "outputs": [], 401 | "source": [ 402 | "# GRADED FUNCTION: tf_lin_regress\n", 403 | "def tf_lin_regress(X_train, Y_train):\n", 404 | " \"\"\"\n", 405 | " Arguments:\n", 406 | " X_train - np.array of size (n by k) where n is number of observations \n", 407 | " of independent variables and k is number of variables\n", 408 | " Y_train - np.array of size (n by 1) where n is the number of observations of dependend variable\n", 409 | " \n", 410 | " Return:\n", 411 | " np.array of size (k+1 by 1) of regression coefficients\n", 412 | " \"\"\"\n", 413 | " ### START CODE HERE ### (≈ 7-8 lines of code)\n", 414 | " # add the column of ones\n", 415 | " # define theta for later evaluation\n", 416 | " ndim = X_train.shape[1] \n", 417 | " ### START CODE HERE ### (≈ 7-8 lines of code)\n", 418 | " X_train = np.hstack((np.ones((X_train.shape[0], 1)), X_train))\n", 419 | " X = tf.constant(X_train, dtype=tf.float32, name=\"X\")\n", 420 | " Y = tf.constant(Y_train, dtype=tf.float32, name=\"Y\")\n", 421 | " XT = tf.transpose(X)\n", 422 | " theta = tf.matmul(tf.matmul(tf.matrix_inverse(tf.matmul(XT, X)), XT), Y)\n", 423 | " ### END CODE HERE ###\n", 424 | " with tf.Session() as sess:\n", 425 | " theta_value = theta.eval()\n", 426 | " return theta_value" 427 | ] 428 | }, 429 | { 430 | "cell_type": "code", 431 | "execution_count": 12, 432 | "metadata": {}, 433 | "outputs": [ 434 | { 435 | "name": "stdout", 436 | "output_type": "stream", 437 | "text": [ 438 | "Submission successful, please check on the coursera grader page for the status\n" 439 | ] 440 | }, 441 | { 442 | "data": { 443 | "text/plain": [ 444 | "array([ 0.99946201, 0.99579054, 0.49919799, 0.20019798], dtype=float32)" 445 | ] 446 | }, 447 | "execution_count": 12, 448 | "metadata": {}, 449 | "output_type": "execute_result" 450 | } 451 | ], 452 | "source": [ 453 | "### GRADED PART (DO NOT EDIT) ###\n", 454 | "theta_tf = tf_lin_regress(X_train, Y_train)\n", 455 | "part_3 = list(theta_tf.squeeze())\n", 456 | "try:\n", 457 | " part3 = \" \".join(map(repr, part_3))\n", 458 | "except TypeError:\n", 459 | " part3 = repr(part_3)\n", 460 | "submissions[all_parts[2]]=part3\n", 461 | "grading.submit(COURSERA_EMAIL, COURSERA_TOKEN, assignment_key,all_parts[:3],all_parts,submissions)\n", 462 | "theta_tf.squeeze()\n", 463 | "### GRADED PART (DO NOT EDIT) ###" 464 | ] 465 | }, 466 | { 467 | "cell_type": "code", 468 | "execution_count": 13, 469 | "metadata": { 470 | "collapsed": true 471 | }, 472 | "outputs": [], 473 | "source": [ 474 | "class LinRegressNormalEq:\n", 475 | " \"\"\"\n", 476 | " class LinRegressNormalEq - implements normal equation, maximum likelihood estimator (MLE) solution\n", 477 | " \"\"\"\n", 478 | " def __init__(self, n_features, learning_rate=0.05, L=0):\n", 479 | " import math as m\n", 480 | " # input placeholders\n", 481 | " self.X = tf.placeholder(tf.float32, [None, n_features], name=\"X\") \n", 482 | " self.Y = tf.placeholder(tf.float32, [None, 1], name=\"Y\")\n", 483 | " \n", 484 | " # regression parameters for the analytical solution using the Normal equation\n", 485 | " self.theta_in = tf.placeholder(tf.float32, [n_features+1,None])\n", 486 | "\n", 487 | " # Augmented data matrix is obtained by adding a column of ones to the data matrix\n", 488 | " data_plus_bias = tf.concat([tf.ones([tf.shape(self.X)[0], 1]), self.X], axis=1)\n", 489 | " \n", 490 | " XT = tf.transpose(data_plus_bias)\n", 491 | " \n", 492 | " #############################################\n", 493 | " # The normal equation for Linear Regression\n", 494 | " \n", 495 | " self.theta = tf.matmul(tf.matmul(\n", 496 | " tf.matrix_inverse(tf.matmul(XT, data_plus_bias)), XT), self.Y)\n", 497 | " \n", 498 | " # mean square error in terms of theta = theta_in\n", 499 | " self.lr_mse = tf.reduce_mean(tf.square(\n", 500 | " tf.matmul(data_plus_bias, self.theta_in) - self.Y))\n", 501 | " \n", 502 | " #############################################\n", 503 | " # Estimate the model using the Maximum Likelihood Estimation (MLE)\n", 504 | " \n", 505 | " # regression parameters for the Maximum Likelihood method\n", 506 | " # Note that there are n_features+2 parameters, as one is added for the intercept, \n", 507 | " # and another one for the std of noise \n", 508 | " self.weights = tf.Variable(tf.random_normal([n_features+2, 1]))\n", 509 | " \n", 510 | " # prediction from the model\n", 511 | " self.output = tf.matmul(data_plus_bias, self.weights[:-1, :])\n", 512 | "\n", 513 | " gauss = tf.distributions.Normal(loc=0.0, scale=1.0)\n", 514 | "\n", 515 | " # Standard deviation of the Gaussian noise is modelled as a square of the \n", 516 | " # last model weight\n", 517 | " sigma = 0.0001 + tf.square(self.weights[-1]) \n", 518 | " \n", 519 | " # though a constant sqrt(2*pi) is not needed to find the best parameters, here we keep it\n", 520 | " # to get the value of the log-LL right \n", 521 | " pi = tf.constant(m.pi)\n", 522 | " \n", 523 | " log_LL = tf.log(0.00001 + (1/( tf.sqrt(2*pi)*sigma)) * gauss.prob((self.Y - self.output) / sigma )) \n", 524 | " self.loss = - tf.reduce_mean(log_LL)\n", 525 | " \n", 526 | " self.train_step = (tf.train.AdamOptimizer(learning_rate).minimize(self.loss), -self.loss)" 527 | ] 528 | }, 529 | { 530 | "cell_type": "code", 531 | "execution_count": 14, 532 | "metadata": { 533 | "collapsed": true 534 | }, 535 | "outputs": [], 536 | "source": [ 537 | "# GRADED FUNCTION: run_normal_eq\n", 538 | "def run_normal_eq(X_train, Y_train, X_test, Y_test, learning_rate=0.05):\n", 539 | " \"\"\"\n", 540 | " Implements normal equation using tensorflow, trains the model using training data set\n", 541 | " Tests the model quality by computing mean square error (MSE) of the test data set\n", 542 | " \n", 543 | " Arguments:\n", 544 | " X_train - np.array of size (n by k) where n is number of observations \n", 545 | " of independent variables and k is number of variables\n", 546 | " Y_train - np.array of size (n by 1) where n is the number of observations of dependend variable\n", 547 | " \n", 548 | " X_test - np.array of size (n by k) where n is number of observations \n", 549 | " of independent variables and k is number of variables\n", 550 | " Y_test - np.array of size (n by 1) where n is the number of observations of dependend variable\n", 551 | " \n", 552 | " \n", 553 | " Return a tuple of:\n", 554 | " - np.array of size (k+1 by 1) of regression coefficients\n", 555 | " - mean square error (MSE) of the test data set\n", 556 | " - mean square error (MSE) of the training data set\n", 557 | " \"\"\"\n", 558 | " # create an instance of the Linear Regression model class \n", 559 | " ndim = X_train.shape[1]\n", 560 | " lr_mse_train = 0.\n", 561 | " lr_mse_test = 0.\n", 562 | " ### START CODE HERE ### (≈ 20 lines of code)\n", 563 | " X = tf.placeholder(tf.float32, [None, ndim], name=\"X\")\n", 564 | " Y = tf.placeholder(tf.float32, [None, 1], name=\"Y\")\n", 565 | " theta_in = tf.placeholder(tf.float32, [ndim + 1, None])\n", 566 | " data_plus_bias = tf.concat([tf.ones([tf.shape(X)[0], 1]), X], axis=1)\n", 567 | " XT = tf.transpose(data_plus_bias)\n", 568 | " \n", 569 | " theta = tf.matmul(tf.matmul(tf.matrix_inverse(tf.matmul(XT, data_plus_bias)), XT), Y)\n", 570 | " lr_mse = tf.reduce_mean(tf.square(tf.matmul(data_plus_bias, theta_in) - Y))\n", 571 | " \n", 572 | " with tf.Session() as sess:\n", 573 | " sess.run(tf.global_variables_initializer())\n", 574 | " \n", 575 | " theta_value = sess.run(theta, feed_dict={X: X_train, Y: Y_train})\n", 576 | " lr_mse_train = sess.run(lr_mse, feed_dict={X: X_train, Y: Y_train, theta_in: theta_value})\n", 577 | " lr_mse_test = sess.run(lr_mse, feed_dict={X: X_train, Y: Y_train, theta_in: theta_value})\n", 578 | " ### END CODE HERE ###\n", 579 | " return theta_value, lr_mse_train, lr_mse_test\n", 580 | "\n", 581 | "### (DO NOT EDIT) ###\n", 582 | "theta_value, lr_mse_train, lr_mse_test = run_normal_eq(X_train, Y_train, X_test, Y_test)\n", 583 | "### (DO NOT EDIT) ###" 584 | ] 585 | }, 586 | { 587 | "cell_type": "code", 588 | "execution_count": 15, 589 | "metadata": {}, 590 | "outputs": [ 591 | { 592 | "name": "stdout", 593 | "output_type": "stream", 594 | "text": [ 595 | "Submission successful, please check on the coursera grader page for the status\n" 596 | ] 597 | }, 598 | { 599 | "data": { 600 | "text/plain": [ 601 | "array([ 0.99946201, 0.99579054, 0.49919799, 0.20019798], dtype=float32)" 602 | ] 603 | }, 604 | "execution_count": 15, 605 | "metadata": {}, 606 | "output_type": "execute_result" 607 | } 608 | ], 609 | "source": [ 610 | "### GRADED PART (DO NOT EDIT) ###\n", 611 | "part_4 = list(theta_value.squeeze())\n", 612 | "try:\n", 613 | " part4 = \" \".join(map(repr, part_4))\n", 614 | "except TypeError:\n", 615 | " part4 = repr(part_4)\n", 616 | "submissions[all_parts[3]]=part4\n", 617 | "grading.submit(COURSERA_EMAIL, COURSERA_TOKEN, assignment_key,all_parts[:4],all_parts,submissions)\n", 618 | "theta_value.squeeze()\n", 619 | "### GRADED PART (DO NOT EDIT) ###" 620 | ] 621 | }, 622 | { 623 | "cell_type": "code", 624 | "execution_count": 16, 625 | "metadata": { 626 | "collapsed": true 627 | }, 628 | "outputs": [], 629 | "source": [ 630 | "# GRADED FUNCTION: run_mle# GRADED \n", 631 | "def run_mle(X_train, Y_train, X_test, Y_test, learning_rate=0.05, num_iter=5000):\n", 632 | " \"\"\"\n", 633 | " Maximum likelihood Estimate (MLE)\n", 634 | " Tests the model quality by computing mean square error (MSE) of the test data set\n", 635 | " \n", 636 | " Arguments:\n", 637 | " X_train - np.array of size (n by k) where n is number of observations \n", 638 | " of independent variables and k is number of variables\n", 639 | " Y_train - np.array of size (n by 1) where n is the number of observations of dependend variable\n", 640 | " \n", 641 | " X_test - np.array of size (n by k) where n is number of observations \n", 642 | " of independent variables and k is number of variables\n", 643 | " Y_test - np.array of size (n by 1) where n is the number of observations of dependend variable\n", 644 | " \n", 645 | " \n", 646 | " Return a tuple of:\n", 647 | " - np.array of size (k+1 by 1) of regression coefficients\n", 648 | " - mean square error (MSE) of the test data set\n", 649 | " - mean square error (MSE) of the training data set\n", 650 | " \"\"\"\n", 651 | " # create an instance of the Linear Regression model class \n", 652 | " n_features = X_train.shape[1]\n", 653 | " model = LinRegressNormalEq(n_features=n_features, learning_rate=learning_rate)\n", 654 | " \n", 655 | " # train the model\n", 656 | " with tf.Session() as sess:\n", 657 | " sess.run(tf.global_variables_initializer())\n", 658 | "\n", 659 | " # Now train the MLE parameters \n", 660 | " for _ in range(num_iter):\n", 661 | " (_ , loss), weights = sess.run((model.train_step, model.weights), feed_dict={\n", 662 | " model.X: X_train,\n", 663 | " model.Y: Y_train\n", 664 | " })\n", 665 | "\n", 666 | " # make test_prediction\n", 667 | " Y_test_predicted = sess.run(model.output, feed_dict={model.X: X_test})\n", 668 | "\n", 669 | " # output std sigma is a square of the last weight\n", 670 | " std_model = weights[-1]**2 \n", 671 | " sess.close()\n", 672 | " return weights[0:-1].squeeze(), loss, std_model\n", 673 | "\n", 674 | "weights, loss, std_model = run_mle(X_train, Y_train, X_test, Y_test)" 675 | ] 676 | }, 677 | { 678 | "cell_type": "code", 679 | "execution_count": 17, 680 | "metadata": {}, 681 | "outputs": [ 682 | { 683 | "name": "stdout", 684 | "output_type": "stream", 685 | "text": [ 686 | "Submission successful, please check on the coursera grader page for the status\n" 687 | ] 688 | }, 689 | { 690 | "data": { 691 | "text/plain": [ 692 | "array([ 0.99974787, 0.99632716, 0.49939191, 0.20030148], dtype=float32)" 693 | ] 694 | }, 695 | "execution_count": 17, 696 | "metadata": {}, 697 | "output_type": "execute_result" 698 | } 699 | ], 700 | "source": [ 701 | "### GRADED PART (DO NOT EDIT) ###\n", 702 | "part_5 = list(weights.squeeze())\n", 703 | "try:\n", 704 | " part5 = \" \".join(map(repr, part_5))\n", 705 | "except TypeError:\n", 706 | " part5 = repr(part_5)\n", 707 | "submissions[all_parts[4]]=part5\n", 708 | "grading.submit(COURSERA_EMAIL, COURSERA_TOKEN, assignment_key,all_parts[:5],all_parts,submissions)\n", 709 | "weights.squeeze()\n", 710 | "### GRADED PART (DO NOT EDIT) ###" 711 | ] 712 | } 713 | ], 714 | "metadata": { 715 | "anaconda-cloud": {}, 716 | "celltoolbar": "Edit Metadata", 717 | "coursera": { 718 | "course_slug": "guided-tour-machine-learning-finance", 719 | "graded_item_id": "dX8oQ", 720 | "launcher_item_id": "7Z9sN" 721 | }, 722 | "kernelspec": { 723 | "display_name": "Python 3", 724 | "language": "python", 725 | "name": "python3" 726 | }, 727 | "language_info": { 728 | "codemirror_mode": { 729 | "name": "ipython", 730 | "version": 3 731 | }, 732 | "file_extension": ".py", 733 | "mimetype": "text/x-python", 734 | "name": "python", 735 | "nbconvert_exporter": "python", 736 | "pygments_lexer": "ipython3", 737 | "version": "3.6.0" 738 | } 739 | }, 740 | "nbformat": 4, 741 | "nbformat_minor": 1 742 | } 743 | --------------------------------------------------------------------------------