├── environment.yml └── pytorch_vs_tf_simple_model.ipynb /environment.yml: -------------------------------------------------------------------------------- 1 | name: dl 2 | channels: 3 | - soumith 4 | - defaults 5 | dependencies: 6 | - appnope=0.1.0=py36_0 7 | - bleach=1.5.0=py36_0 8 | - cffi=1.10.0=py36_0 9 | - cycler=0.10.0=py36_0 10 | - decorator=4.0.11=py36_0 11 | - entrypoints=0.2.2=py36_1 12 | - freetype=2.5.5=2 13 | - html5lib=0.999=py36_0 14 | - icu=54.1=0 15 | - ipykernel=4.6.1=py36_0 16 | - ipython=6.1.0=py36_0 17 | - ipython_genutils=0.2.0=py36_0 18 | - ipywidgets=6.0.0=py36_0 19 | - jbig=2.1=0 20 | - jedi=0.10.2=py36_2 21 | - jinja2=2.9.6=py36_0 22 | - jpeg=9b=0 23 | - jsonschema=2.6.0=py36_0 24 | - jupyter=1.0.0=py36_3 25 | - jupyter_client=5.0.1=py36_0 26 | - jupyter_console=5.1.0=py36_0 27 | - jupyter_core=4.3.0=py36_0 28 | - libpng=1.6.27=0 29 | - libtiff=4.0.6=3 30 | - markupsafe=0.23=py36_2 31 | - matplotlib=2.0.2=np113py36_0 32 | - mistune=0.7.4=py36_0 33 | - mkl=2017.0.1=0 34 | - nbconvert=5.2.1=py36_0 35 | - nbformat=4.3.0=py36_0 36 | - notebook=5.0.0=py36_0 37 | - numpy=1.13.0=py36_0 38 | - olefile=0.44=py36_0 39 | - openssl=1.0.2l=0 40 | - pandocfilters=1.4.1=py36_0 41 | - path.py=10.3.1=py36_0 42 | - pexpect=4.2.1=py36_0 43 | - pickleshare=0.7.4=py36_0 44 | - pillow=4.1.1=py36_0 45 | - pip=9.0.1=py36_1 46 | - prompt_toolkit=1.0.14=py36_0 47 | - ptyprocess=0.5.1=py36_0 48 | - pycparser=2.17=py36_0 49 | - pygments=2.2.0=py36_0 50 | - pyparsing=2.1.4=py36_0 51 | - pyqt=5.6.0=py36_2 52 | - python=3.6.1=2 53 | - python-dateutil=2.6.0=py36_0 54 | - pytz=2017.2=py36_0 55 | - pyzmq=16.0.2=py36_0 56 | - qt=5.6.2=2 57 | - qtconsole=4.3.0=py36_0 58 | - readline=6.2=2 59 | - setuptools=27.2.0=py36_0 60 | - simplegeneric=0.8.1=py36_1 61 | - sip=4.18=py36_0 62 | - six=1.10.0=py36_0 63 | - sqlite=3.13.0=0 64 | - terminado=0.6=py36_0 65 | - testpath=0.3.1=py36_0 66 | - tk=8.5.18=0 67 | - tornado=4.5.1=py36_0 68 | - traitlets=4.3.2=py36_0 69 | - wcwidth=0.1.7=py36_0 70 | - wheel=0.29.0=py36_0 71 | - widgetsnbextension=2.0.0=py36_0 72 | - xz=5.2.2=1 73 | - zlib=1.2.8=3 74 | - pytorch=0.1.12=py36_2cu75 75 | - torchvision=0.1.8=py36_2 76 | - pip: 77 | - awscli==1.11.99 78 | - botocore==1.5.62 79 | - colorama==0.3.7 80 | - docutils==0.13.1 81 | - ipython-genutils==0.2.0 82 | - jmespath==0.9.3 83 | - jupyter-client==5.0.1 84 | - jupyter-console==5.1.0 85 | - jupyter-core==4.3.0 86 | - prompt-toolkit==1.0.14 87 | - protobuf==3.3.0 88 | - rsa==3.4.2 89 | - s3transfer==0.1.10 90 | - tensorflow==1.1.0 91 | - torch==0.1.12.post2 92 | - werkzeug==0.12.2 93 | prefix: /Users/kdubovikov/miniconda3/envs/dl 94 | 95 | -------------------------------------------------------------------------------- /pytorch_vs_tf_simple_model.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "This is a companion notebook for my post here. The notebook outlines similarities and differences between Pytorch and Tensorflow. \n", 8 | "\n", 9 | "First, let's create a simple SGD approximator for $f(x) = x^{exp}$, where $exp$ is a model parameter." 10 | ] 11 | }, 12 | { 13 | "cell_type": "markdown", 14 | "metadata": {}, 15 | "source": [ 16 | "# Plain Pytorch" 17 | ] 18 | }, 19 | { 20 | "cell_type": "code", 21 | "execution_count": 41, 22 | "metadata": {}, 23 | "outputs": [ 24 | { 25 | "name": "stdout", 26 | "output_type": "stream", 27 | "text": [ 28 | "Iteration 0\n", 29 | "loss = 31486.984375\n", 30 | "exp = 4.0\n", 31 | "Iteration 1\n", 32 | "loss = 14398.49609375\n", 33 | "exp = 3.64985728263855\n", 34 | "Iteration 2\n", 35 | "loss = 9984.84765625\n", 36 | "exp = 3.4879090785980225\n", 37 | "Iteration 3\n", 38 | "loss = 7705.31103515625\n", 39 | "exp = 3.3744404315948486\n", 40 | "Iteration 4\n", 41 | "loss = 6282.35546875\n", 42 | "exp = 3.2859721183776855\n", 43 | "Iteration 5\n", 44 | "loss = 5300.4287109375\n", 45 | "exp = 3.2130908966064453\n", 46 | "Iteration 6\n", 47 | "loss = 4578.4267578125\n", 48 | "exp = 3.1509552001953125\n", 49 | "Iteration 7\n", 50 | "loss = 4023.53076171875\n", 51 | "exp = 3.0967135429382324\n", 52 | "Iteration 8\n", 53 | "loss = 3582.866455078125\n", 54 | "exp = 3.0485341548919678\n", 55 | "Iteration 9\n", 56 | "loss = 3223.950927734375\n", 57 | "exp = 3.0051655769348145\n", 58 | "Iteration 10\n", 59 | "loss = 2925.66259765625\n", 60 | "exp = 2.965712785720825\n", 61 | "Iteration 11\n", 62 | "loss = 2673.640625\n", 63 | "exp = 2.9295122623443604\n", 64 | "Iteration 12\n", 65 | "loss = 2457.76611328125\n", 66 | "exp = 2.8960580825805664\n", 67 | "Iteration 13\n", 68 | "loss = 2270.693359375\n", 69 | "exp = 2.864955186843872\n", 70 | "Iteration 14\n", 71 | "loss = 2106.953857421875\n", 72 | "exp = 2.835888624191284\n", 73 | "Iteration 15\n", 74 | "loss = 1962.39306640625\n", 75 | "exp = 2.8086037635803223\n", 76 | "Iteration 16\n", 77 | "loss = 1833.79248046875\n", 78 | "exp = 2.782891035079956\n", 79 | "Iteration 17\n", 80 | "loss = 1718.62158203125\n", 81 | "exp = 2.7585763931274414\n", 82 | "Iteration 18\n", 83 | "loss = 1614.8603515625\n", 84 | "exp = 2.735513210296631\n", 85 | "Iteration 19\n", 86 | "loss = 1520.87744140625\n", 87 | "exp = 2.7135770320892334\n", 88 | "Iteration 20\n", 89 | "loss = 1435.34033203125\n", 90 | "exp = 2.6926612854003906\n", 91 | "Iteration 21\n", 92 | "loss = 1357.1495361328125\n", 93 | "exp = 2.6726739406585693\n", 94 | "Iteration 22\n", 95 | "loss = 1285.3892822265625\n", 96 | "exp = 2.6535348892211914\n", 97 | "Iteration 23\n", 98 | "loss = 1219.291259765625\n", 99 | "exp = 2.63517427444458\n", 100 | "Iteration 24\n", 101 | "loss = 1158.205078125\n", 102 | "exp = 2.617530345916748\n", 103 | "Iteration 25\n", 104 | "loss = 1101.576904296875\n", 105 | "exp = 2.600548505783081\n", 106 | "Iteration 26\n", 107 | "loss = 1048.931396484375\n", 108 | "exp = 2.5841801166534424\n", 109 | "Iteration 27\n", 110 | "loss = 999.8600463867188\n", 111 | "exp = 2.5683820247650146\n", 112 | "Iteration 28\n", 113 | "loss = 954.0079956054688\n", 114 | "exp = 2.5531153678894043\n", 115 | "Iteration 29\n", 116 | "loss = 911.0660400390625\n", 117 | "exp = 2.5383450984954834\n", 118 | "Iteration 30\n", 119 | "loss = 870.763427734375\n", 120 | "exp = 2.5240395069122314\n", 121 | "Iteration 31\n", 122 | "loss = 832.8624877929688\n", 123 | "exp = 2.5101699829101562\n", 124 | "Iteration 32\n", 125 | "loss = 797.1533813476562\n", 126 | "exp = 2.4967105388641357\n", 127 | "Iteration 33\n", 128 | "loss = 763.4500732421875\n", 129 | "exp = 2.4836373329162598\n", 130 | "Iteration 34\n", 131 | "loss = 731.5864868164062\n", 132 | "exp = 2.470928430557251\n", 133 | "Iteration 35\n", 134 | "loss = 701.4149169921875\n", 135 | "exp = 2.458563804626465\n", 136 | "Iteration 36\n", 137 | "loss = 672.8035888671875\n", 138 | "exp = 2.4465253353118896\n", 139 | "Iteration 37\n", 140 | "loss = 645.6333618164062\n", 141 | "exp = 2.43479585647583\n", 142 | "Iteration 38\n", 143 | "loss = 619.7974243164062\n", 144 | "exp = 2.4233598709106445\n", 145 | "Iteration 39\n", 146 | "loss = 595.19921875\n", 147 | "exp = 2.412202835083008\n", 148 | "Iteration 40\n", 149 | "loss = 571.7513427734375\n", 150 | "exp = 2.4013113975524902\n", 151 | "Iteration 41\n", 152 | "loss = 549.3745727539062\n", 153 | "exp = 2.3906731605529785\n", 154 | "Iteration 42\n", 155 | "loss = 527.9961547851562\n", 156 | "exp = 2.3802762031555176\n", 157 | "Iteration 43\n", 158 | "loss = 507.5509033203125\n", 159 | "exp = 2.370110034942627\n", 160 | "Iteration 44\n", 161 | "loss = 487.9783630371094\n", 162 | "exp = 2.3601644039154053\n", 163 | "Iteration 45\n", 164 | "loss = 469.2232971191406\n", 165 | "exp = 2.3504297733306885\n", 166 | "Iteration 46\n", 167 | "loss = 451.2353820800781\n", 168 | "exp = 2.34089732170105\n", 169 | "Iteration 47\n", 170 | "loss = 433.9684753417969\n", 171 | "exp = 2.3315589427948\n", 172 | "Iteration 48\n", 173 | "loss = 417.37933349609375\n", 174 | "exp = 2.322406530380249\n", 175 | "Iteration 49\n", 176 | "loss = 401.4288635253906\n", 177 | "exp = 2.3134329319000244\n", 178 | "Iteration 50\n", 179 | "loss = 386.0806884765625\n", 180 | "exp = 2.304631233215332\n", 181 | "Iteration 51\n", 182 | "loss = 371.30084228515625\n", 183 | "exp = 2.295994758605957\n", 184 | "Iteration 52\n", 185 | "loss = 357.0584716796875\n", 186 | "exp = 2.287517547607422\n", 187 | "Iteration 53\n", 188 | "loss = 343.3243713378906\n", 189 | "exp = 2.279193639755249\n", 190 | "Iteration 54\n", 191 | "loss = 330.0716552734375\n", 192 | "exp = 2.271017551422119\n", 193 | "Iteration 55\n", 194 | "loss = 317.2752380371094\n", 195 | "exp = 2.262984037399292\n", 196 | "Iteration 56\n", 197 | "loss = 304.9120788574219\n", 198 | "exp = 2.2550883293151855\n", 199 | "Iteration 57\n", 200 | "loss = 292.9603271484375\n", 201 | "exp = 2.2473256587982178\n", 202 | "Iteration 58\n", 203 | "loss = 281.39947509765625\n", 204 | "exp = 2.2396914958953857\n", 205 | "Iteration 59\n", 206 | "loss = 270.2108459472656\n", 207 | "exp = 2.2321817874908447\n", 208 | "Iteration 60\n", 209 | "loss = 259.376220703125\n", 210 | "exp = 2.224792242050171\n", 211 | "Iteration 61\n", 212 | "loss = 248.87936401367188\n", 213 | "exp = 2.2175192832946777\n", 214 | "Iteration 62\n", 215 | "loss = 238.70437622070312\n", 216 | "exp = 2.2103590965270996\n", 217 | "Iteration 63\n", 218 | "loss = 228.83676147460938\n", 219 | "exp = 2.203308343887329\n", 220 | "Iteration 64\n", 221 | "loss = 219.2627410888672\n", 222 | "exp = 2.196363687515259\n", 223 | "Iteration 65\n", 224 | "loss = 209.96939086914062\n", 225 | "exp = 2.1895220279693604\n", 226 | "Iteration 66\n", 227 | "loss = 200.944091796875\n", 228 | "exp = 2.1827800273895264\n", 229 | "Iteration 67\n", 230 | "loss = 192.1757049560547\n", 231 | "exp = 2.1761350631713867\n", 232 | "Iteration 68\n", 233 | "loss = 183.6532440185547\n", 234 | "exp = 2.169584274291992\n", 235 | "Iteration 69\n", 236 | "loss = 175.36647033691406\n", 237 | "exp = 2.1631250381469727\n", 238 | "Iteration 70\n", 239 | "loss = 167.3055877685547\n", 240 | "exp = 2.156754732131958\n", 241 | "Iteration 71\n", 242 | "loss = 159.4615478515625\n", 243 | "exp = 2.1504709720611572\n", 244 | "Iteration 72\n", 245 | "loss = 151.82554626464844\n", 246 | "exp = 2.1442713737487793\n", 247 | "Iteration 73\n", 248 | "loss = 144.38954162597656\n", 249 | "exp = 2.1381537914276123\n", 250 | "Iteration 74\n", 251 | "loss = 137.1457061767578\n", 252 | "exp = 2.1321160793304443\n", 253 | "Iteration 75\n", 254 | "loss = 130.0865936279297\n", 255 | "exp = 2.1261560916900635\n", 256 | "Iteration 76\n", 257 | "loss = 123.20499420166016\n", 258 | "exp = 2.120271682739258\n", 259 | "Iteration 77\n", 260 | "loss = 116.49455261230469\n", 261 | "exp = 2.1144611835479736\n", 262 | "Iteration 78\n", 263 | "loss = 109.94884490966797\n", 264 | "exp = 2.108722686767578\n", 265 | "Iteration 79\n", 266 | "loss = 103.56170654296875\n", 267 | "exp = 2.1030542850494385\n", 268 | "Iteration 80\n", 269 | "loss = 97.32771301269531\n", 270 | "exp = 2.09745454788208\n", 271 | "Iteration 81\n", 272 | "loss = 91.24112701416016\n", 273 | "exp = 2.09192156791687\n", 274 | "Iteration 82\n", 275 | "loss = 85.29692077636719\n", 276 | "exp = 2.086453914642334\n", 277 | "Iteration 83\n", 278 | "loss = 79.48998260498047\n", 279 | "exp = 2.081049919128418\n", 280 | "Iteration 84\n", 281 | "loss = 73.81563568115234\n", 282 | "exp = 2.0757081508636475\n", 283 | "Iteration 85\n", 284 | "loss = 68.26936340332031\n", 285 | "exp = 2.070427179336548\n", 286 | "Iteration 86\n", 287 | "loss = 62.8468017578125\n", 288 | "exp = 2.0652055740356445\n", 289 | "Iteration 87\n", 290 | "loss = 57.54397964477539\n", 291 | "exp = 2.060042142868042\n", 292 | "Iteration 88\n", 293 | "loss = 52.35681915283203\n", 294 | "exp = 2.0549354553222656\n", 295 | "Iteration 89\n", 296 | "loss = 47.28160095214844\n", 297 | "exp = 2.04988431930542\n", 298 | "Iteration 90\n", 299 | "loss = 42.31474304199219\n", 300 | "exp = 2.0448875427246094\n", 301 | "Iteration 91\n", 302 | "loss = 37.452754974365234\n", 303 | "exp = 2.0399439334869385\n", 304 | "Iteration 92\n", 305 | "loss = 32.69252014160156\n", 306 | "exp = 2.035052537918091\n", 307 | "Iteration 93\n", 308 | "loss = 28.0305233001709\n", 309 | "exp = 2.030211925506592\n", 310 | "Iteration 94\n", 311 | "loss = 23.46407699584961\n", 312 | "exp = 2.025421380996704\n", 313 | "Iteration 95\n", 314 | "loss = 18.99009132385254\n", 315 | "exp = 2.0206797122955322\n", 316 | "Iteration 96\n", 317 | "loss = 14.605827331542969\n", 318 | "exp = 2.0159859657287598\n", 319 | "Iteration 97\n", 320 | "loss = 10.308375358581543\n", 321 | "exp = 2.011338949203491\n", 322 | "Iteration 98\n", 323 | "loss = 6.095371246337891\n", 324 | "exp = 2.0067379474639893\n", 325 | "Iteration 99\n", 326 | "loss = 1.9643033742904663\n", 327 | "exp = 2.0021820068359375\n", 328 | "Iteration 100\n", 329 | "loss = 2.0871756076812744\n", 330 | "exp = 1.997670292854309\n", 331 | "Iteration 101\n", 332 | "loss = 1.9251511096954346\n", 333 | "exp = 2.002138614654541\n", 334 | "Iteration 102\n", 335 | "loss = 2.125523090362549\n", 336 | "exp = 1.9976273775100708\n", 337 | "Iteration 103\n", 338 | "loss = 1.8860043287277222\n", 339 | "exp = 2.0020952224731445\n", 340 | "Iteration 104\n", 341 | "loss = 2.1638729572296143\n", 342 | "exp = 1.9975844621658325\n", 343 | "Iteration 105\n", 344 | "loss = 1.8470734357833862\n", 345 | "exp = 2.002052068710327\n", 346 | "Iteration 106\n", 347 | "loss = 2.2021071910858154\n", 348 | "exp = 1.9975416660308838\n", 349 | "Iteration 107\n", 350 | "loss = 1.8079333305358887\n", 351 | "exp = 2.0020086765289307\n", 352 | "Iteration 108\n", 353 | "loss = 2.2404448986053467\n", 354 | "exp = 1.9974987506866455\n", 355 | "Iteration 109\n", 356 | "loss = 1.7690099477767944\n", 357 | "exp = 2.0019655227661133\n", 358 | "Iteration 110\n", 359 | "loss = 2.2786779403686523\n", 360 | "exp = 1.9974559545516968\n", 361 | "Iteration 111\n", 362 | "loss = 1.7298728227615356\n", 363 | "exp = 2.001922130584717\n", 364 | "Iteration 112\n", 365 | "loss = 2.3170087337493896\n", 366 | "exp = 1.9974130392074585\n", 367 | "Iteration 113\n", 368 | "loss = 1.6909598112106323\n", 369 | "exp = 2.0018789768218994\n", 370 | "Iteration 114\n", 371 | "loss = 2.3552298545837402\n", 372 | "exp = 1.9973702430725098\n", 373 | "Iteration 115\n", 374 | "loss = 1.65183424949646\n", 375 | "exp = 2.001835584640503\n", 376 | "Iteration 116\n", 377 | "loss = 2.393557071685791\n", 378 | "exp = 1.9973273277282715\n", 379 | "Iteration 117\n", 380 | "loss = 1.612924575805664\n", 381 | "exp = 2.0017924308776855\n", 382 | "Iteration 118\n", 383 | "loss = 2.4317753314971924\n", 384 | "exp = 1.9972845315933228\n", 385 | "Iteration 119\n", 386 | "loss = 1.5740151405334473\n", 387 | "exp = 2.001749277114868\n", 388 | "Iteration 120\n", 389 | "loss = 2.4699859619140625\n", 390 | "exp = 1.997241735458374\n", 391 | "Iteration 121\n", 392 | "loss = 1.534903645515442\n", 393 | "exp = 2.0017058849334717\n", 394 | "Iteration 122\n", 395 | "loss = 2.5083014965057373\n", 396 | "exp = 1.9971988201141357\n", 397 | "Iteration 123\n", 398 | "loss = 1.4960078001022339\n", 399 | "exp = 2.0016627311706543\n", 400 | "Iteration 124\n", 401 | "loss = 2.546504020690918\n", 402 | "exp = 1.997156023979187\n", 403 | "Iteration 125\n", 404 | "loss = 1.4568971395492554\n", 405 | "exp = 2.001619338989258\n", 406 | "Iteration 126\n", 407 | "loss = 2.5848121643066406\n", 408 | "exp = 1.9971131086349487\n", 409 | "Iteration 127\n", 410 | "loss = 1.4180049896240234\n", 411 | "exp = 2.0015761852264404\n", 412 | "Iteration 128\n", 413 | "loss = 2.623011827468872\n", 414 | "exp = 1.9970703125\n", 415 | "Iteration 129\n", 416 | "loss = 1.3789045810699463\n", 417 | "exp = 2.001532793045044\n", 418 | "Iteration 130\n", 419 | "loss = 2.6613118648529053\n", 420 | "exp = 1.9970273971557617\n", 421 | "Iteration 131\n", 422 | "loss = 1.3400185108184814\n", 423 | "exp = 2.0014896392822266\n", 424 | "Iteration 132\n", 425 | "loss = 2.6995036602020264\n", 426 | "exp = 1.996984601020813\n", 427 | "Iteration 133\n", 428 | "loss = 1.301140546798706\n", 429 | "exp = 2.001446485519409\n", 430 | "Iteration 134\n", 431 | "loss = 2.7375869750976562\n", 432 | "exp = 1.9969419240951538\n", 433 | "Iteration 135\n", 434 | "loss = 1.2622634172439575\n", 435 | "exp = 2.001403331756592\n", 436 | "Iteration 136\n", 437 | "loss = 2.775768995285034\n", 438 | "exp = 1.996899127960205\n", 439 | "Iteration 137\n", 440 | "loss = 1.2233916521072388\n", 441 | "exp = 2.0013601779937744\n", 442 | "Iteration 138\n", 443 | "loss = 2.8138437271118164\n", 444 | "exp = 1.996856451034546\n", 445 | "Iteration 139\n", 446 | "loss = 1.1845217943191528\n", 447 | "exp = 2.001317024230957\n", 448 | "Iteration 140\n", 449 | "loss = 2.852022886276245\n", 450 | "exp = 1.9968136548995972\n", 451 | "Iteration 141\n", 452 | "loss = 1.1456584930419922\n", 453 | "exp = 2.0012738704681396\n", 454 | "Iteration 142\n", 455 | "loss = 2.8900880813598633\n", 456 | "exp = 1.996770977973938\n", 457 | "Iteration 143\n", 458 | "loss = 1.1067979335784912\n", 459 | "exp = 2.0012307167053223\n", 460 | "Iteration 144\n", 461 | "loss = 2.928256034851074\n", 462 | "exp = 1.9967281818389893\n", 463 | "Iteration 145\n", 464 | "loss = 1.0679404735565186\n", 465 | "exp = 2.001187562942505\n", 466 | "Iteration 146\n", 467 | "loss = 2.9663162231445312\n", 468 | "exp = 1.99668550491333\n", 469 | "Iteration 147\n", 470 | "loss = 1.0290862321853638\n", 471 | "exp = 2.0011444091796875\n", 472 | "Iteration 148\n", 473 | "loss = 3.0044796466827393\n", 474 | "exp = 1.9966427087783813\n", 475 | "Iteration 149\n", 476 | "loss = 0.9902322292327881\n", 477 | "exp = 2.00110125541687\n", 478 | "Iteration 150\n", 479 | "loss = 3.042534112930298\n", 480 | "exp = 1.9966000318527222\n", 481 | "Iteration 151\n", 482 | "loss = 0.951382040977478\n", 483 | "exp = 2.0010581016540527\n", 484 | "Iteration 152\n", 485 | "loss = 3.0806922912597656\n", 486 | "exp = 1.9965572357177734\n", 487 | "Iteration 153\n", 488 | "loss = 0.9125458002090454\n", 489 | "exp = 2.0010149478912354\n", 490 | "Iteration 154\n", 491 | "loss = 3.1187374591827393\n", 492 | "exp = 1.9965145587921143\n", 493 | "Iteration 155\n", 494 | "loss = 0.8737033605575562\n", 495 | "exp = 2.000971794128418\n", 496 | "Iteration 156\n", 497 | "loss = 3.1568875312805176\n", 498 | "exp = 1.9964717626571655\n", 499 | "Iteration 157\n", 500 | "loss = 0.8348639607429504\n", 501 | "exp = 2.0009286403656006\n", 502 | "Iteration 158\n", 503 | "loss = 3.194924831390381\n", 504 | "exp = 1.9964290857315063\n", 505 | "Iteration 159\n", 506 | "loss = 0.7960334420204163\n", 507 | "exp = 2.000885486602783\n", 508 | "Iteration 160\n", 509 | "loss = 3.2330691814422607\n", 510 | "exp = 1.9963862895965576\n", 511 | "Iteration 161\n", 512 | "loss = 0.7572057247161865\n", 513 | "exp = 2.000842332839966\n", 514 | "Iteration 162\n", 515 | "loss = 3.2711000442504883\n", 516 | "exp = 1.9963436126708984\n", 517 | "Iteration 163\n", 518 | "loss = 0.7183753848075867\n", 519 | "exp = 2.0007991790771484\n", 520 | "Iteration 164\n", 521 | "loss = 3.309231996536255\n", 522 | "exp = 1.9963008165359497\n", 523 | "Iteration 165\n", 524 | "loss = 0.6795542240142822\n", 525 | "exp = 2.000756025314331\n", 526 | "Iteration 166\n", 527 | "loss = 3.3472626209259033\n", 528 | "exp = 1.9962581396102905\n", 529 | "Iteration 167\n", 530 | "loss = 0.6407375931739807\n", 531 | "exp = 2.0007128715515137\n", 532 | "Iteration 168\n", 533 | "loss = 3.385390520095825\n", 534 | "exp = 1.9962153434753418\n", 535 | "Iteration 169\n", 536 | "loss = 0.6019205451011658\n", 537 | "exp = 2.0006697177886963\n", 538 | "Iteration 170\n", 539 | "loss = 3.4234063625335693\n", 540 | "exp = 1.9961726665496826\n", 541 | "Iteration 171\n", 542 | "loss = 0.563326895236969\n", 543 | "exp = 2.000626802444458\n", 544 | "Iteration 172\n", 545 | "loss = 3.461317300796509\n", 546 | "exp = 1.996130108833313\n", 547 | "Iteration 173\n", 548 | "loss = 0.5245198607444763\n", 549 | "exp = 2.0005836486816406\n", 550 | "Iteration 174\n", 551 | "loss = 3.4993295669555664\n", 552 | "exp = 1.9960874319076538\n", 553 | "Iteration 175\n", 554 | "loss = 0.48592501878738403\n", 555 | "exp = 2.0005407333374023\n", 556 | "Iteration 176\n", 557 | "loss = 3.537228584289551\n", 558 | "exp = 1.9960448741912842\n", 559 | "Iteration 177\n", 560 | "loss = 0.44712793827056885\n", 561 | "exp = 2.000497579574585\n", 562 | "Iteration 178\n", 563 | "loss = 3.575230836868286\n", 564 | "exp = 1.996002197265625\n", 565 | "Iteration 179\n", 566 | "loss = 0.4085436463356018\n", 567 | "exp = 2.0004546642303467\n", 568 | "Iteration 180\n", 569 | "loss = 3.6131303310394287\n", 570 | "exp = 1.9959596395492554\n", 571 | "Iteration 181\n", 572 | "loss = 0.3699645400047302\n", 573 | "exp = 2.0004117488861084\n", 574 | "Iteration 182\n", 575 | "loss = 3.6509177684783936\n", 576 | "exp = 1.9959172010421753\n", 577 | "Iteration 183\n", 578 | "loss = 0.3313896059989929\n", 579 | "exp = 2.00036883354187\n", 580 | "Iteration 184\n", 581 | "loss = 3.6888065338134766\n", 582 | "exp = 1.9958746433258057\n", 583 | "Iteration 185\n", 584 | "loss = 0.2928203046321869\n", 585 | "exp = 2.000325918197632\n", 586 | "Iteration 186\n", 587 | "loss = 3.726686954498291\n", 588 | "exp = 1.995832085609436\n", 589 | "Iteration 187\n", 590 | "loss = 0.25403380393981934\n", 591 | "exp = 2.0002827644348145\n", 592 | "Iteration 188\n", 593 | "loss = 3.7646708488464355\n", 594 | "exp = 1.9957894086837769\n", 595 | "Iteration 189\n", 596 | "loss = 0.21546697616577148\n", 597 | "exp = 2.000239849090576\n", 598 | "Iteration 190\n", 599 | "loss = 3.802554130554199\n", 600 | "exp = 1.9957468509674072\n", 601 | "Iteration 191\n", 602 | "loss = 0.1769118458032608\n", 603 | "exp = 2.000196933746338\n", 604 | "Iteration 192\n", 605 | "loss = 3.8403210639953613\n", 606 | "exp = 1.9957044124603271\n", 607 | "Iteration 193\n", 608 | "loss = 0.13834820687770844\n", 609 | "exp = 2.0001540184020996\n", 610 | "Iteration 194\n", 611 | "loss = 3.8781914710998535\n", 612 | "exp = 1.9956618547439575\n", 613 | "Iteration 195\n", 614 | "loss = 0.09979522228240967\n", 615 | "exp = 2.0001111030578613\n", 616 | "Iteration 196\n", 617 | "loss = 3.91595721244812\n", 618 | "exp = 1.9956194162368774\n", 619 | "Iteration 197\n", 620 | "loss = 0.06124330684542656\n", 621 | "exp = 2.000068187713623\n", 622 | "Iteration 198\n", 623 | "loss = 3.953817129135132\n", 624 | "exp = 1.9955768585205078\n", 625 | "Iteration 199\n", 626 | "loss = 0.022694068029522896\n", 627 | "exp = 2.0000252723693848\n" 628 | ] 629 | } 630 | ], 631 | "source": [ 632 | "import torch\n", 633 | "from torch.autograd import Variable\n", 634 | "import numpy as np\n", 635 | "\n", 636 | "def rmse(y, y_hat):\n", 637 | " \"\"\"Compute root mean squared error\"\"\"\n", 638 | " return torch.sqrt(torch.mean((y - y_hat).pow(2).sum()))\n", 639 | "\n", 640 | "def forward(x, e):\n", 641 | " \"\"\"Forward pass for our function\"\"\"\n", 642 | " return x.pow(e.repeat(x.size(0)))\n", 643 | "\n", 644 | "# Let's define some settings\n", 645 | "n = 100 # number of examples\n", 646 | "learning_rate = 5e-6\n", 647 | "\n", 648 | "# Model definition\n", 649 | "x = Variable(torch.rand(n) * 10, requires_grad=False)\n", 650 | "\n", 651 | "# Model parameter and it's true value\n", 652 | "exp = Variable(torch.FloatTensor([2.0]), requires_grad=False)\n", 653 | "exp_hat = Variable(torch.FloatTensor([4]), requires_grad=True)\n", 654 | "y = forward(x, exp)\n", 655 | "\n", 656 | "loss_history = []\n", 657 | "exp_history = []\n", 658 | "\n", 659 | "# Training loop\n", 660 | "for i in range(0, 200):\n", 661 | " print(\"Iteration %d\" % i)\n", 662 | " \n", 663 | " # Compute current estimate\n", 664 | " y_hat = forward(x, exp_hat)\n", 665 | " \n", 666 | " # Calculate loss function\n", 667 | " loss = rmse(y, y_hat)\n", 668 | " \n", 669 | " # Do some recordings for plots\n", 670 | " loss_history.append(loss.data[0])\n", 671 | " exp_history.append(y_hat.data[0])\n", 672 | " \n", 673 | " # Compute gradients\n", 674 | " loss.backward()\n", 675 | " \n", 676 | " print(\"loss = %s\" % loss.data[0])\n", 677 | " print(\"exp = %s\" % exp_hat.data[0])\n", 678 | " \n", 679 | " # Update model parameters\n", 680 | " exp_hat.data -= learning_rate * exp_hat.grad.data\n", 681 | " exp_hat.grad.data.zero_()" 682 | ] 683 | }, 684 | { 685 | "cell_type": "code", 686 | "execution_count": 42, 687 | "metadata": {}, 688 | "outputs": [ 689 | { 690 | "data": { 691 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAmYAAAGrCAYAAABqslt9AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsvXt4XeV14P1bRxfLkizLkowshGooUHNrQm5AmmRKQ9K4\nKVPSadIhX79Ap2maDuQybTJNSDttZvrQIfO1SZuPks7XkgGaNEBzaSgNSVPIPQVKCAmX4GAHC1uR\nZfkiy5Is67a+P9592AchWUfy3to671q/5/Gjo7332Wf9thabdd797rVFVXEcx3Ecx3GKp1R0AI7j\nOI7jOE7ACzPHcRzHcZw1ghdmjuM4juM4awQvzBzHcRzHcdYIXpg5juM4juOsEbwwcxzHcRzHWSN4\nYeY4juM4jrNG8MLMWREisltEjonImIjsE5FbRKS1Yv0tIqIicsW8930kWf7rye+NIvJnIrI32ddu\nEfnzRT6n/O/GRWL6oIh8Iidlx3Ecx8kdL8yck+Hfq2orcCHwIuC6eet/CFxV/kVE6oFfBXZVbHMd\n8FLgImADcCnw8EKfU/HvHZlaOI7jOM4awQsz56RR1X3AlwgFWiX/CLxSRDYlv28Hvg/sq9jmZcDn\nVPXHGtitqrdlHaOInCsiXxWRERF5XER+qWLd60XkCRE5KiIDIvLeZHmXiNydvOeQiHxDRPy/Gcdx\nFkREThWRz4jIsIg8LSLvSpZ/QUT+rGK720Xk48nrXxeRb4nIjSJyRESeFJHLinJwisf/J+OcNCJy\nGvALwM55qyaBzwNXJr9fBcwvuu4HfldErhGRnxYRySG+BkKR+M/AKcA7gU+KyLZkk5uBt6vqBuAC\n4L5k+XuAvcBmoBv4AODPMHMc53kkX9r+Efge0AtcBvwXEXkd8BvAW0Tk1SLya4QrBO+uePvFhCsJ\nXcAfAZ8VkY7VjN9ZO3hh5pwM/yAiR4E9wH7CCWU+twFXiUg78LPAP8xb/z+BDwG/BjwEDIjI1Qt8\nzkjFv7ctM85LgFbgBlWdUtX7gLuBNyfrp4HzRKRNVQ+r6sMVy3uArao6rarfUH+4rOM4C/MyYLOq\n/o/kPPMj4K+BK5OrCv8ZuBX4C+AqVT1a8d79wJ8n55k7gB3AL65y/M4awQsz52R4QzLKdClwDuHb\n3nNQ1W8SRpx+H7hbVY/NWz+rqn+pqq8A2oHrgY+LyLnzPqe94t9fLzPOU4E9qjpXsayf8K0W4FeA\n1wP9IvI1EXl5svz/IYwC/rOI/EhE3r/Mz3Ucxw5bgVMrv0QSRtm7k/X/CNQBO5LzYiUD87709RPO\nW45BvDBzThpV/RpwC/Cni2zyCcJlwRPOHVPVY6r6l8Bh4LwMQ/wx0DdvfthPAAPJ5/6bql5BuMz5\nD8CdyfKjqvoeVf1J4JcIl1x97ofjOAuxB3h63pfIDar6+mT99cAPgB4RefO89/bOm8bxE4TzlmMQ\nL8ycrPhz4LUi8sIF1n0UeC3w9fkrROS/iMilIrJeROqTy5gbgO+uMI6SiDRV/FsHPABMAL8nIg0i\ncinw74Hbk3YdvyYiG1V1GhgF5pLYLheRs5IT5hFgtrzOcRxnHg8CR0Xkfcn5rE5ELhCRl4nIvwP+\nE2Ge7dXA/ysivRXvPQV4V3J+ehNwLvCFVTdw1gRemDmZoKrDhBGxP1xg3SFVvXeR+VkTwJ8R7tQ8\nAFwL/EoyP6PMP87rY/a5E4TyZuBYxb9dqjpFKMR+IfmMmwhzPJ5M3vMWYLeIjAK/TZjvBnA28C/A\nGPCvwE2q+pWljoXjOPZQ1VngcsLd6U8TzjV/Q5inehvwDlUdUNVvEG44+j8Vo2QPEM43Bwgja29U\n1YOrrOCsEcTnMjuO4zhOMSTNtn9TVV9ZdCzO2sBHzBzHcRzHcdYIXpg5juM4juOsEfxSpuM4juM4\nzhrBR8wcx3Ecx3HWCPVFB3ASLGuob2wMWlvzCmXtYMHTgiO45yJk/siugvDz1wK4Z1y45/Oo6vxl\nZsRsYKDoCFYHC54WHME9nRQrx8g948I9V4aZwqzDyONgLXhacAT3dFKsHCP3jAv3XBlmCrOZmaIj\nWB0seFpwBPd0UqwcI/eMC/dcGWYKs9HRoiNYHSx4WnAE93RSrBwj94wL91wZtdwuY1mBT05CU1Ne\noawdLHhacAT3XASTk/89F+LCPeNiGZ4++b+S/v6iI1gdLHhacAT3dFKsHCP3jAv3XBlmCrPGxqIj\nWB0seFpwBPd0UqwcI/eMC/dcGWYKs87OoiNYHSx4WnAE93RSrBwj94wL91wZZgqzwcGiI1gdLHha\ncAT3dFKsHCP3jAv3XBlmCrOurqIjWB0seFpwBPd0UqwcI/eMC/dcGWYKs8nJoiNYHSx4WnAE93RS\nrBwj94wL91wZZgqzsbGiI1gdLHhacAT3dFKsHCP3jAv3XBnexywyLHhacAT3XATvYxYx7hkX7vk8\nvI9ZJd5PJR4sOIJ7OilWjpF7xoV7rgwzhZmFqh1seFpwBPd0UqwcI/eMC/dcGdEXZsdnZnly3yil\npumiQ1kV2tuLjiB/LDiCezqB/oPjjDFRdBirgpVccM+4yNoz+sJs7+FjbP/zb3D3d/YXHcqqsG9f\n0RHkjwVHcE8n8K7bH+GP7nqs6DBWBSu54J5xkbVn9IVZScJcuw0bCg5kldi8uegI8seCI7inEygJ\n1DfU7E1ay8JKLrhnXGTtaaAwCz+PHbdxYhsfLzqC/LHgCO7pBEoizMwUHcXqYCUX3DMusvaMvjCT\n5O7U40Ya3U0YmIpiwRHc0wmUBGZmbXyxtJIL7hkXWXvGX5glI2YdnTZObFu3Fh1B/lhwBPd0AiJC\nQ6Ofv2LCPeMia08zhdmBA8XGsVpY6BtjwRHc0wmUBI4ZGfG3kgvuGRfex2yZlCf/W/nG2dxcdAT5\nY8ER3NMJ1JUEKfn5KybcMy6y9oy+MCuPmDU0FhvHatHSUnQE+WPBEdzTCZREELFRmFnJBfeMi6w9\noy/MyiNmR48WHMgqMTxcdAT5Y8ER3NMJiAhTNvpjm8kF94yLrD2jL8zKI2atG2x849yypegI8seC\nI7inEygJ1NX7+Ssm3DMusvaMvzBL2mWMT9g4sY2MFB1B/lhwBPd0Apb6mFnJBfeMi6w9oy/Myg1m\np41cCpg0cPeWBUdwTydQEpg10sfMSi64Z1xk7Rl9YSbJtcz2TTZObBb6xlhwBPd0AiJCfUPRUawO\nVnLBPePC+5gtk/KI2aFDxcaxWljoG2PBEdzTCZQEJo08Us5KLrhnXKx6HzMRaRKRB0XkeyLyuIj8\n92R5h4h8WUSeSn5uqnjPdSKyU0R2iMjrKpa/REQeTdZ9VJLhLBFZJyJ3JMsfEJHTsxIszzGz0i6j\ntbXoCPLHgiO4pxOoKwkY6WNmJRfcMy6y9qxmxOw48GpVfSFwIbBdRC4B3g/cq6pnA/cmvyMi5wFX\nAucD24GbRKQu2dfHgLcBZyf/tifL3wocVtWzgI8AH8rADQBJDOuN3NXU1FR0BPljwRHc0wmE769+\n/ooJ94yLrD2XLMw0MJb82pD8U+AK4NZk+a3AG5LXVwC3q+pxVX0a2AlcJCI9QJuq3q+qCtw27z3l\nfX0auKw8mlaJCFr+V7VguY/Z2BIbRoKFR09ZcAT3dAKW7sq0kgvuGRdZe9ZXs1Ey4vUd4CzgL1X1\nARHpVtXBZJN9QHfyuhe4v+Lte5Nl08nr+cvL79kDoKozInIE6AQW1T14MByMnp7wemoqTMDr74e2\nNqivD/PKNm0O2x+fUiYmYM8eaG8Py0ZGoK8PBgagVILu7vC6owNmZmB0NN1nYyN0dsLgIHR1hbsw\nxsbS9U1NYb/79sHmzTA+Hp44X17f3By6Aw8Ph54nIyNhH+X1ra1hH9U49fbC0BDMzYXXlU5jY+Fz\nY3Ka/3fasAF27IjLaaG/06ZNwTMmp4X+TuvWhe2qcdq2rZozVlyUBEp1NkbMenqKjmB1cM+4yNpT\nwuBVlRuLtAOfA94JfFNV2yvWHVbVTSJyI3C/qn4iWX4zcA+wG7hBVV+TLH8V8D5VvVxEHgO2q+re\nZN0u4GJVPVEdWlXgx6ZmOfcPv8hvXXwOH/jlM6t2rVWefhrOOKPoKPLFgiO45yI8byS9Rqn6xPs7\ndzzCA7sO8+0P/Fye8awJPOfjwj2fR1Xnr2XdlamqI8BXCHPDhpLLkyQ/9yebDQB9FW87LVk2kLye\nv/w57xGRemAjcHA5sS1G+YLozIyNb5xTU0VHkD8WHME9nYAIzM75+Ssm3DMusvas5q7MzclIGSKy\nHngt8CRwF3B1stnVwOeT13cBVyZ3Wp5BmOT/YHLZc1RELknmj1017z3lfb0RuE+XM5R3wvjDz43t\nJ94uFiz0jbHgCO7pBEoi1NUtvV0MWMkF94yLIvqY9QBfEZHvA/8GfFlV7wZuAF4rIk8Br0l+R1Uf\nB+4EngC+CFyrqrPJvq4B/oZwQ8AuwiVOgJuBThHZCfwuyR2eWVCe/H/osI1vnBb6xlhwBPd0AiWB\nqWk/f8WEe8ZF1p5LTv5X1e8DL1pg+UHgskXecz1w/QLLHwIuWGD5JPCmKuJdNuULuo1G+pi1tRUd\nQf5YcAT3dAJ1JQGxUZhZyQX3jIusPQ10/g+lmURvGqiv6j7b2saCI7inExARjEwxM5ML7hkXWXtG\nX66U55hNTNg4s1l49JQFR3BPJ1AyNPnfSi64Z1xk7WmgMAuVWeuGggNZJXp7l96m1rHgCO7pBEoi\niJFLmVZywT3jImvP6AszCKNmY2M2TmxDQ0VHkD8WHME9nUBJhNm5oqNYHazkgnvGRdaeJgqzkghz\nRk5sFjwtOIJ7OgERmMume9Cax0ouuGdcZO1ppDCDllYbJzYLQ8cWHME9nUDp+Y8NjhYrueCeceGX\nMleAIBwZLTqK1WHPnqIjyB8LjuCeTqAkMGNk8r+VXHDPuMja00ZhJrBunY0TW7uBJxxYcAT3dAKl\nkoCRS5lWcsE94yJrTzOFmZEvnI7jREZJBCNTdRzHwUhhVhJhctJGZTYyUnQE+WPBEdyzKESkT0S+\nIiJPiMjjIvLuZHmHiHxZRJ5Kfm6qeM91IrJTRHaIyOuyjKckMGfkm+Vay4W8cM+4yNrTTGHW2lp0\nFKtDX1/REeSPBUdwzwKZAd6jqucBlwDXish5hGf43quqZwP3Jr+TrLsSOB/YDtwkIpk9drwkgmKj\nMFuDuZAL7hkXWXuaKMwEGDXSx2xgoOgI8seCI7hnUajqoKo+nLw+CvwA6AWuAG5NNrsVeEPy+grg\ndlU9rqpPAzuBi+bvVwQt/1tOPJYeybTWciEv3DMusvY08SQrQ3ebUzJQaltwBPdcC4jI6cCLgAeA\nblUdTFbtA7qT173A/RVv25ssW5SDB+HAAejpCa+npmDrVujvDw9Erq8Pj3np7YUjh8N7xseVvXvl\n2YnGIyPhm/rAQDiG3d3hdUcHzMzA6Gi6z8ZG6OyEwUHo6oLJSRgbS9c3NYUJzPv2webNMD4OExPp\n+uZmaGmB4WHYsiV89uRkur61NeyjWqehodD7qbc33NFWdtq9OyyLyWmhv9Pu3WF9TE4L/Z127w77\ni8lpob/T/v1hn0s5bdtW5XlHa/dun6oDf+F//2d+8fxT+ZM3XpBnPGuCsTGiv2xrwRHccxFW7WuW\niLQCXwOuV9XPisiIqrZXrD+sqptE5EbgflX9RLL8ZuAeVf30CXZf9fnro/c+xYe//EN2/cnrqSvF\n/S3Tcz4u3PN5VPUf8Br+rpodJYGjY0VHsTpYGDq24AjuWSQi0gB8Bvikqn42WTwkIj3J+h5gf7J8\nAKicZXJasiwTyrWYhQeZr8VcyAP3jIusPU0UZiJCo5E+Zh0dRUeQPxYcwT2LQkQEuBn4gap+uGLV\nXcDVyeurgc9XLL9SRNaJyBnA2cCDWcVTSiozC49lWmu5kBfuGRdZe5qYYxZuNy86itVhZqboCPLH\ngiO4Z4G8AngL8KiIPJIs+wBwA3CniLwV6Ad+FUBVHxeRO4EnCHd0Xquqs1kFU34kk4G6bC3mQi64\nZ1xk7WmiMBMRjh8vOorVYXQ0TICMGQuO4J5FoarfZPG5IJct8p7rgevziKd8KdPCiNlay4W8cM+4\nyNrTxqVM7DzEfOvWoiPIHwuO4J5OoDxiZqEws5IL7hkXWXuaKMxKIhw9WnQUq0N/f9ER5I8FR3BP\nJyDPFmYFB7IKWMkF94yLrD1NFGYiQMnAWY3QXyV2LDiCezqB8qXMGm5tVDVWcsE94yJrTxOFWUmE\ndeuKjmJ16OwsOoL8seAI7ukESoZGzKzkgnvGRdaeJgozCF2zLTA4uPQ2tY4FR3BPJ2Cpj5mVXHDP\nuMja00RhVipBo5ERs66uoiPIHwuO4J5OoNzHzMKlTCu54J5xkbWnjcJMhJnZ+E9qEJ4hFjsWHME9\nnYClS5lWcsE94yJrTxOFmQBT00VHsTqMGXj0lAVHcE8nYKmPmZVccM+4yNrTRGFWEqG5uegoVgcL\nfWMsOIJ7OgHxPmbR4Z5x4X3MVoLA2Fj8JzWw0TfGgiO4pxOw9EgmK7ngnnHhfcxWQEmEUl3RUawO\nTU1FR5A/FhzBPZ2ApUuZVnLBPeMia08jhRnU18d/UgNoby86gvyx4Aju6QQsTf63kgvuGRdZe5oo\nzATh2DEDZzVg376iI8gfC47gnk5ADPUxs5IL7hkXWXvaKMzETh+zzZuLjiB/LDiCezqBOkN9zKzk\ngnvGRdaeRgozYdpIu4zx8aIjyB8LjuCeTsDSpUwrueCecZG1p4nCrCSYaTA7MVF0BPljwRHc0wlY\nmvxvJRfcMy6y9jRRmInAOiN3h1joG2PBEdzTCXgfs/hwz7jwPmYroCR2Jv9b6BtjwRHc0wl4H7P4\ncM+4yNqzPtvdrU1E5Nk7m2LHwhMOLDiCezoBS5cyreSCe8ZF1p4mRswEkFL8JzWAlpaiI8gfC47g\nnk7A0uR/K7ngnnGRtaeJwqwkMDVVdBSrw/Bw0RHkjwVHcE8nYKmPmZVccM+4yNrTzKXM+sb4T2oA\nW7YUHUH+WHAE93QClvqYWckF94yLrD3NjJhZ6WM2MlJ0BPljwRHc0wlYupRpJRfcMy6y9jRRmImI\nmT5mk5NFR5A/FhzBPZ2AGJr8byUX3DMusva0UZhh55FMFvrGWHAE93QCJe9jFh3uGRfex2wFlETM\nVO4W+sZYcAT3dALexyw+3DMusvY0UZiJQMlIu4zW1qIjyB8LjuCeTsBSHzMrueCecZG1p4nCrGSo\nwWyTgUdPWXAE93QCYmjyv5VccM+4yNpzycJMRPpE5Csi8oSIPC4i706Wf1BEBkTkkeTf6yvec52I\n7BSRHSLyuorlLxGRR5N1H5XkjCMi60TkjmT5AyJyepaSIjA1beCsBhw4UHQE+WPBEdzTCTw7Ymag\nMrOSC+4ZF1l7VjNiNgO8R1XPAy4BrhWR85J1H1HVC5N/XwBI1l0JnA9sB24Skbpk+48BbwPOTv5t\nT5a/FTisqmcBHwE+dPJqKSJCfUOWe1y79PQUHUH+WHAE93QC5T5mFi5lWskF94yLrD2XLMxUdVBV\nH05eHwV+APSe4C1XALer6nFVfRrYCVwkIj1Am6rer6FT4m3AGyrec2vy+tPAZeXRtEpE0PK/Kv2A\nch+z+E9qAAcPFh1B/lhwBPd0Apb6mFnJBfeMi6w9l9X5P7nE+CLgAeAVwDtF5CrgIcKo2mFC0XZ/\nxdv2Jsumk9fzl5P83AOgqjMicgToBBYdIDx4MAwf9vSE11NT4ZbV/n5oa4P6ejh0CHp74dgETEzC\nxATs2QPt7WEfIyPQ1wcDA1AqQXd3eN3RATMzMDqa7rOxETo7YXAQurpC35KxsXR9U1PY7759sHkz\njI+Hzyuvb24Oz9MaHg5dgkdGwj7K61tbwz6qdRoagrm58LrS6cknU49YnOb/nY4cCfuJyWmhv9Px\n47BjR1xOC/2dDh8Ov1fjtG3bcs5YcWCpj5mVR+e5Z1xk7SnVPuZDRFqBrwHXq+pnRaSbUDgp8MdA\nj6r+hojcCNyvqp9I3nczcA+wG7hBVV+TLH8V8D5VvVxEHgO2q+reZN0u4GJVPdGV26rPUm+95d8Y\nPDLJF979qmrfUrNMTsY/4dKCI7jnIsRyG0/V568fDI7yC3/xDf7q/34x2y+I+9qQ53xcuOfzqOr8\nVdVdmSLSAHwG+KSqfhZAVYdUdVZV54C/Bi5KNh8A+ireflqybCB5PX/5c94jIvXARiCzwUGRMPpg\nAQt9Yyw4gns6AUuXMq3kgnvGxar3MUvmet0M/EBVP1yxvPKr2y8DjyWv7wKuTO60PIMwyf9BVR0E\nRkXkkmSfVwGfr3jP1cnrNwL3aYZP7BURSiYag4TLTrFjwRHc0wlY6mNmJRfcMy6y9qxmjtkrgLcA\nj4rII8myDwBvFpELCUPyu4G3A6jq4yJyJ/AE4Y7Oa1V1NnnfNcAtwHrC5c17kuU3A38rIjuBQ4S7\nOjNDAF3e/QI1S/2yZg3WJhYcwT2dgKU+ZlZywT3jImvPJXenqt9k4euiXzjBe64Hrl9g+UPABQss\nnwTetFQsK6UkwvSMgbMaYYL25s1FR5EvFhzBPZ2ApT5mVnLBPeMia08TF/hKJTuVe++JGplEggVH\ncE8nYOkh5lZywT3jImtPE4WZYGfEbGio6Ajyx4IjuKcTSBvMFhzIKmAlF9wzLrL2tFGYCRj4sgmE\n/lKxY8ER3NMJWOpjZiUX3DMusvY0UpgJpbqlt4sBC0PHFhzBPZ1A+VJmhjeqr1ms5IJ7xoVfylwB\nlh7JtGdP0RHkjwVHcE8nYKmPmZVccM+4yNrTSGEmiAnT9PE4MWPBEdzTCVjqY2YlF9wzLrL2NFGu\nCDZOao7jxIelPmaO41gpzETMTEIcGSk6gvyx4Aju6QQs9TGzkgvuGRdZexopzEBK8Z/UAPr6lt6m\n1rHgCO7pBCz1MbOSC+4ZF1l7mijMSgKzs0tvFwMDA0tvU+tYcAT3dAIlQ33MrOSCe8ZF1p4mCjNB\nTHzbBEw8rN2CI7inEyhfyrTQLsNKLrhnXGTtaeKwlUp2EqS7u+gI8seCI7inE7B0KdNKLrhnXGTt\naaJcERFm/FJmNFhwBPd0Apb6mFnJBfeMC7+UuQKSvtkFR7E6dHQUHUH+WHAE93QClh7JZCUX3DMu\nsvY0UZiVRIyUZTAzU3QE+WPBEdzTCaSPZCo4kFXASi64Z1xk7WmiMBOBWQvXAYDR0aIjyB8LjuCe\nTqA8+d/COcxKLrhnXGTtaaIwK4k8e3KLna1bi44gfyw4gns6AUuT/63kgnvGRdaeJgozEZgx8G0T\noL+/6Ajyx4IjuKcTsNTHzEouuGdcZO1pozBDTMzPAGhsLDqC/LHgCO7ppJTERh8zK7ngnnGRtaeJ\nwqwkgMR/UgPo7Cw6gvyx4Aju6aSUxEaTbCu54J5xkbWnicIsTP4vOorVYXCw6Ajyx4IjuKeTEp5e\nUnQU+WMlF9wzLrL2NFGYhcmzBs5qQFdX0RHkjwVHcE8npVSyMfnfSi64Z1xk7WmiMENs9AACmJws\nOoL8seAI7umkWJknayUX3DMusvY0UZiVxMZJDWBsrOgI8seCI7inkyLY6GNmJRfcMy6y9jRSmIEa\nuZRpoW+MBUdwTyelrs7G5H8rueCeceF9zFZAmDgb/0kNbPSNseAI7ulUoDZG/a3kgnvGhfcxWwEl\nQ3PMmpqKjiB/LDiCezopdUYm/1vJBfeMi6w9TRRmGHqIeXt70RHkjwVHcE8npVSyMepvJRfcMy6y\n9jRRmJWfk2mhc/a+fUVHkD8WHME9nQrURh8zK7ngnnGRtaeRwszOs+Y2by46gvyx4Aju6aTUlWx8\nsbSSC+4ZF1l7mijMkgEzE5cCxseLjiB/LDiCezopgjBn4OklVnLBPeMia08ThVkpuZZpoC5jYqLo\nCPLHgiO4p/NcLHyxtJIL7hkXWXuaKMzKWDixWegbY8ER3NNJaagXZv38FQ3uGRfex2wFlOeYWcBC\n3xgLjuCeTorOeR+zmHDPuPA+ZiugXJdZGDFrbi46gvyx4Aju6aRY6WNmJRfcMy6y9jRRmKXtMoqN\nYzVoaSk6gvyx4Aju6aTUlWy0y7CSC+4ZF1l7GinMyu0y4j+zDQ8XHUH+WHAE93RS5ub8/BUT7hkX\nWXuaKMzKWPjGuWVL0RHkjwVHcE8npaFBTPQxs5IL7hkXWXuaKMyenfwf/3mNkZGiI8gfC47gnkUh\nIh8Xkf0i8ljFsg+KyICIPJL8e33FuutEZKeI7BCR1+URk87Z6GO21nIhL9wzLrL2NFGYWZr8PzlZ\ndAT5Y8ER3LNAbgG2L7D8I6p6YfLvCwAich5wJXB+8p6bRKQu84jUz18x4Z5xkbWnicKsPGIW/2nN\nRt8YC47gnkWhql8HDlW5+RXA7ap6XFWfBnYCFy20oQha/rfcmJrW2XiI+VrLhbxwz7jI2rM+292t\nTSyNmPX3w7ZtRUeRLxYcwT3XIO8UkauAh4D3qOphoBe4v2KbvcmyE3LwIBw4AD094fXUVDi59/dD\nWxvU18OhQ9DbC0NDMD4uTG+AHTugvT3sY2QE+vpgYABKJejuDq87OmBmBkZH0302NkJnJwwOQldX\n+IY/Npaub2oK+923Lzz3b3w8dDMvr29uDneeDQ+H+TQjI2Ef5fWtrWEfy3Gamwuv9+xJnR5+GH72\nZ+NyWujv9I1vwEtfGpfTQn+nr3wFzjknLqeF/k6PPx62X8qp2vOc1PCE0qoD/9v7+/lv//AYD/7+\nZZyyoSnPmApnYCAkXMxYcAT3XIRV6RYtIqcDd6vqBcnv3cABwnnnj4EeVf0NEbkRuF9VP5FsdzNw\nj6p+eomPWNaJ9xc+/E26NzVyy39acDAuGjzn48I9n0dV5y8jlzKTFzVbg1ZPU9x1J2DDEdxzLaGq\nQ6o6q6pzwF+TXq4cAPoqNj0tWZYpdXU2+pjVQi5kgXvGRdaeJgozodzHrOBAVoEDB4qOIH8sOIJ7\nriVEpKfi118Gynds3gVcKSLrROQM4Gzgwaw/f3YGE+0yaiEXssA94yJrTxNzzJ7t/G9gyKynZ+lt\nah0LjuDwfCnSAAAgAElEQVSeRSEinwIuBbpEZC/wR8ClInIhYdx9N/B2AFV9XETuBJ4AZoBrVXU2\n65isTP5fa7mQF+4ZF1l7LjliJiJ9IvIVEXlCRB4XkXcnyztE5Msi8lTyc1PFexbs6yMiLxGRR5N1\nHxUJ0/KTb5t3JMsfSOZ3ZEY6+T/Lva5NDh4sOoL8seAI7lkUqvpmVe1R1QZVPU1Vb1bVt6jqT6vq\nC1T1l1R1sGL761X1TFXdpqr35BHT7IyNPmZrLRfywj3jImvPai5lzhDuQDoPuAS4Nund837gXlU9\nG7g3+X2pvj4fA95GGO4/m7RX0FuBw6p6FvAR4EMZuD2LlNtlGPjGOTVVdAT5Y8ER3NN5LhZGzKzk\ngnvGRdaeSxZmqjqoqg8nr48CPyDcDn4FcGuy2a3AG5LXC/b1SeZotKnq/RoqpNvmvae8r08Dl5VH\n0ypZaR+gZ/uYxX9eM9E3xoIjuKeTsr7JxqVMK7ngnnFRaB+z5BLji4AHgO6K4fx9QHfyerG+PtPJ\n6/nLy+/ZA6CqMyJyBOgk3J6+IMvpAzS8P7xnfELZMex9gGrNaf7f6ciR8LkxOS30dzp+HNati8tp\nob/T4cPhfdU41Ui/s8yZmhJKDUVHkT811NPupHDPuMjas+o+ZiLSCnwNuF5VPysiI6raXrH+sKpu\nWqyvD2HC7A2q+ppk+auA96nq5ckz6bar6t5k3S7gYlU90b0OVX99/Nx39/I7d3yPr773Uk7vaqn2\nbTXJ4GD8Ey4tOIJ7LsKq9DFbBZY1/PUfb3qAKWb43DWvyCueNYHnfFy45/PIro+ZiDQAnwE+qaqf\nTRYPlW8hT34m41KL9vUZSF7PX/6c94hIPbARyGw6XdouI/5LAfUG7rO14Aju6aSUSjb6mFnJBfeM\ni6w9q7krU4CbgR+o6ocrVt0FXJ28vhr4fMXy5/X1SS57jorIJck+r5r3nvK+3gjcpxnO1C/PVjNw\nXuNQtU/4q2EsOIJ7OilW+phZyQX3jIusPaup814BvAV4VEQeSZZ9ALgBuFNE3gr0A78KS/b1uQa4\nBVhPuLxZvrX8ZuBvRWQn4eHBV56k13OwdFemhcdfWHAE93RS1q8XJib8/BUL7hkXWXsuWZip6jdZ\n/LroZYu853rg+gWWPwRcsMDySeBNS8WyUp5tMBv/eY2hoTCZO2YsOIJ7OinTUzb6mFnJBfeMi6w9\nTTySqdwuw8IcDQsnbwuO4J5OimBjjqyVXHDPuMja00RhVh7us3BiszB0bMER3NNJaV5vo4+ZlVxw\nz7jI2tNGYWaoweyePUVHkD8WHME9nZTJSRt3ZVrJBfeMi6w9jRRm4aeFb5zt7UtvU+tYcAT3dFLW\nNfr5KybcMy6y9jRRmJWe/3Qnx3GcmqEkYmLE33EcM4VZ+GnhG+fISNER5I8FR3BPJ2Vm2s9fMeGe\ncZG1p4nCLL2UWWwcq0Ff39Lb1DoWHME9nZTWVhuT/63kgnvGRdaeRgozOw1mBwaW3qbWseAI7umk\nTB4TZmb9/BUL7hkXWXvaKMySnxZGzEoG/qIWHME9nZTGuhLTs/E3hbKSC+4ZF1l7mjhs6eT/+Cuz\n7u6iI8gfC47gnk5K24YSUzPxF2ZWcsE94yJrTxOFmaU5ZhaGji04gns6KVPHSkwZGDGzkgvuGRd+\nKXMFlAw1mO3oKDqC/LHgCO7ppLS1lJg2MMfMSi64Z1xk7WmiMLPUYHZmpugI8seCI7ink1KSErNz\nymzkw/5WcsE94yJrTxuFGeWHmMd9UgMYHS06gvyx4Aju6aTMTYdTdew3AFjJBfeMi6w9TRRmJTtz\n/9m6tegI8seCI7ink3JKVziJHY/8BgArueCecZG1p4nCrNzHLPKrAAD09xcdQf5YcAT3dFLGjoRT\ndex3ZlrJBfeMi6w9TRRm5REzNTBk1thYdAT5Y8ER3NNJaWq0cSnTSi64Z1xk7WmiMLPULqOzs+gI\n8seCI7ink7Jpo40RMyu54J5xkbWnkcLMzuT/wcGiI8gfC47gnk7K2KiNETMrueCecZG1p4nC7NnO\n//HXZXR1FR1B/lhwBPd0Ujo3hVN17JP/reSCe8ZF1p4mCrP0WZnxV2aTk0VHkD8WHME9nRSdtTFi\nZiUX3DMusvY0UZhZ6vw/NlZ0BPljwRHc00mZnbIxx8xKLrhnXGTtaaIws9T530LfGAuO4J5OSt+p\n5RGzuM9hVnLBPePC+5itAENTzEz0jbHgCO7ppBwYTkbMZmcLjiRfrOSCe8aF9zFbAemlzPhLs6am\noiPIHwuO4J5OSuv68qXMuM9hVnLBPeMia08ThZmlPmbt7UVHkD8WHME9nZSO9nASm4p88r+VXHDP\nuMja00RhZmny/759RUeQPxYcwT2dlCOH64D4J/9byQX3jIusPU0UZpbaZWzeXHQE+WPBEdzTSene\nHM5isbfLsJIL7hkXWXvaKMzKI2YFx7EajI8XHUH+WHAE93RSpo/baJdhJRfcMy6y9jRSmIWfFib/\nT0wUHUH+WHAE93RSZo7baDBrJRfcMy6y9jRRmFmaY2ahb4wFR3BPJ+XM0208kslKLrhnXHgfsxVQ\nMtRg1kLfGAuO4J5Oyo8HbIyYWckF94wL72O2AiSZ/m+hXUZzc9ER5I8FR3BPJ6W1RagvSfRzzKzk\ngnvGRdaeNgozQ3PMWlqKjiB/LDiCezopLS3QWF+KfsTMSi64Z1xk7WmsMCs2jtVgeLjoCPLHgiO4\np5MyPAwNdaXoR8ys5IJ7xkXWniYKs2cn/xtomLFlS9ER5I8FR3BPJ2XLljBiNhX5Q8yt5IJ7xkXW\nnqYKMwtzzEZGio4gfyw4gns6KSMj0GhgxMxKLrhnXGTtaaIwE0N3ZU5OFh1B/lhwBPd0UiYnyyNm\ncRdmVnLBPeMia09ThZmBusxE3xgLjuCeTsrWrdBQJ0xHPmJmJRfcMy68j9kKKLfLsHBXpoW+MRYc\nwT2dlP5+GyNmVnLBPePC+5itgHKD2fjLMmhtLTqC/LHgCO7ppLS2hrsyY2+XYSUX3DMusvY0UZiV\nH2I+Z2D2f1NT0RHkjwVHcE8npakpTP6P/ZFMVnLBPeMia08ThZmlEbMDB4qOIH8sOIJ7OikHDtho\nMGslF9wzLrL2NFGYiaF2GT09RUeQPxYcwT2dlJ4eG+0yrOSCe8ZF1p5GCrPw08Lk/4MHi44gfyw4\ngns6KQcP2hgxs5IL7hkXWXuaKMye7fwff13G1FTREeSPBUdwTydlasrGI5ms5IJ7xkXWnksWZiLy\ncRHZLyKPVSz7oIgMiMgjyb/XV6y7TkR2isgOEXldxfKXiMijybqPSnJ9UUTWicgdyfIHROT0bBVJ\nmmXYaDBroW+MBUdwTydl69akXUbkhZmVXHDPuCiij9ktwPYFln9EVS9M/n0BQETOA64Ezk/ec5OI\n1CXbfwx4G3B28q+8z7cCh1X1LOAjwIdW6LIo6bMy48dC3xgLjuCeTkp/fzJiFvmzMq3kgnvGxar3\nMVPVrwOHqtzfFcDtqnpcVZ8GdgIXiUgP0Kaq92uY6HUb8IaK99yavP40cFl5NG0+Imj5X5XxlN8H\n2Bgxa2srOoL8seAI7umktLXBuvoSUzOzRYeSK1ZywT3jImvP+pN47ztF5CrgIeA9qnoY6AXur9hm\nb7JsOnk9fznJzz0AqjojIkeATuCEN6AePBhuUe3pCa+npsJwYn9/OEj19XDoEPT2wp4fh/ccPw47\ndkB7e/h9ZAT6+mBgAEol6O4Orzs6YGYGRkfTfTY2QmcnDA5CV1d4NtbYWLq+qSnsd98+2LwZxsdh\nYiJd39wMLS0wPByeRD8yEvZRXt/aGvZRrdPQEMzNJX57Uqfdu2Hjxric5v+dSqXwd4zJaaG/U3Nz\n8IzJaaG/E4T9VeO0bVuVZ6fIqK9PHskU+YhZ/cn8H6mGcM+4yNpTqrlTMZn3dbeqXpD83k0onBT4\nY6BHVX9DRG4E7lfVTyTb3QzcA+wGblDV1yTLXwW8T1UvT+aubVfVvcm6XcDFqrpUZ5Cqz1DTs3Oc\n/fv38N6f/yne8eqzq31bTbJjR/z/87LgCO65CAuOptcgy6qwduyAu3Y/yV997Ufs+pPXL/2GGsVz\nPi7c83lUdf5a0V2ZqjqkqrOqOgf8NXBRsmoA6KvY9LRk2UDyev7y57xHROqBjUCmN5+mk/+z3Ova\npLd36W1qHQuO4J5OSm9vmGM2O6fMRnwis5IL7hkXWXuuqDBL5oyV+WWgfMfmXcCVyZ2WZxAm+T+o\nqoPAqIhckswfuwr4fMV7rk5evxG4TzNuOGapXcbQUNER5I8FR3BPJ2VoCNbVh/uoYr4z00ouuGdc\nZO255JVREfkUcCnQJSJ7gT8CLhWRCwnD8buBtwOo6uMicifwBDADXKuq5dmq1xDu8FxPuLx5T7L8\nZuBvRWQn4SaDK7MQe65D+Glh8v9cvOfsZ7HgCO7ppMzNwfqG8D362PQs6xvrlnhHbWIlF9wzLrL2\nXLIwU9U3L7D45hNsfz1w/QLLHwIuWGD5JPCmpeI4GcRQuwwLQ8cWHME9nZTeXmgeCafriakZOloa\nC44oH6zkgnvGxZq4lFmLlESYNVC+79lTdAT5Y8ER3NNJ2bOHZ0fJJqbibZlhJRfcMy6y9jRTmDXU\nCTOR32oOaeuCmLHgCO7ppLS3Q8u6+AszK7ngnnGRtaehwqzEVOQPAHYcJ17WN6SXMh3HiRczhVmd\nlJg2UJiNjBQdQf5YcAT3dFJGRqA5uZR5LOIRMyu54J5xkbWnmcKsqaHE9Ez8lzL7+pbeptax4Aju\n6aT09aWF2XjEhZmVXHDPuMja00xhJoiJEbOBgaW3qXUsOIJ7OikDA9C8LlzKPBbxpUwrueCecZG1\np5nCrKFU4riBwqxk4C9qwRHc00kplaC5If7J/1ZywT3jImtPI48YhaZ1JaYj7phdpru76Ajyx4Ij\nuKeT0t0N9QbaZVjJBfeMi6w9jdSzwKyNyf8Who4tOIJ7OikDA7CuvkRJ4p78byUX3DMu/FLmClm/\nrsS0gT5mHR1FR5A/FhzBPZ2Ujo7wBJOWxnrGI55jZiUX3DMusvY0U5jVl8REH7OZeM/Zz2LBEdzT\nSSkfo/WNdVGPmFnJBfeMi6w9zRRmMmfjUuboaNER5I8FR3BPJ6V8jJob66KeY2YlF9wzLrL2NFOY\nbWi1UZht3Vp0BPljwRHcsyhE5OMisl9EHqtY1iEiXxaRp5KfmyrWXSciO0Vkh4i8Lo+YysdofWN9\n1IXZWsuFvHDPuMja00xhNn3cRoPZ/v6iI8gfC47gngVyC7B93rL3A/eq6tnAvcnviMh5wJXA+cl7\nbhKRuqwDKh+jlsa6qB/JtAZzIRfcMy6y9jRTmK2rt/GszMbGoiPIHwuO4J5FoapfBw7NW3wFcGvy\n+lbgDRXLb1fV46r6NLATuGih/Yqg5X/Ljal8jNZHfilzreVCXrhnXGTtaaaPWUuzMHUw/sKss7Po\nCPLHgiO45xqjW1UHk9f7gHLnol7g/ort9ibLTsjBg3DgAPT0hNdTU+FySH8/tLVBfT0cOgS9vTA0\nFOawTEzA3PE6jk4cZ2goPJ+vry/cql8qhV5KAwPhDrGZmfCe8j4bG8NxHhyEri6YnISxsXR9UxO0\nt8O+fbB5M4yPh88rr29uhpYWGB6GLVvCZ09OputbW8M+luM0Nxde79kTPhvC+rJHLE4L/Z0OHQr7\nj8lpob/ToeTrTUxOC/2djh+HHTuWdtq2rbqTjajW7OW9ZQX+2x//Pg8P7ufB339NXvGsCXbsqP6P\nX6tYcAT3XATJMZT0Q0ROB+5W1QuS30dUtb1i/WFV3SQiNwL3q+onkuU3A/eo6qeX+Ihlnb/Kx+h3\n7niEh/oP8Y3fe/Vy3l4zeM7HhXs+j6rOX2YuZba12Jj839VVdAT5Y8ER3HONMSQiPQDJz/3J8gGg\n8hHGpyXLMqV8jJob65g4Hu+lzBrJhZPGPeMia08zhZmojQazk5NFR5A/FhzBPdcYdwFXJ6+vBj5f\nsfxKEVknImcAZwMPZv3h5WMUe7uMGsmFk8Y94yJrTzNzzHTWxuT/sbGiI8gfC47gnkUhIp8CLgW6\nRGQv8EfADcCdIvJWoB/4VQBVfVxE7gSeAGaAa1U188qpfIzWN9ZzbHqWuTmlVFqVq7qrylrLhbxw\nz7jI2tNMYda1SZienUNVEYnvhFbGQt8YC47gnkWhqm9eZNVli2x/PXB9fhGlx6g5eZD55MwszY3x\nnb7XWi7khXvGhfcxWyHjR0uowuxc3JczLfSNseAI7umkVPYxA6K9nGklF9wzLryP2QppWhdUY7+c\n2dRUdAT5Y8ER3NNJKR+j9ckoWaw3AFjJBfeMi6w9zRRmbS1BNfbu/+3tS29T61hwBPd0UsrHqHwp\nc2I6zu7/VnLBPeMia08zhdmx8TCvLPYRs337io4gfyw4gns6KeVjtKEpjJgdnYyzMLOSC+4ZF1l7\nminMOtqTEbPIC7PNm4uOIH8sOIJ7OinlY9TW1ADA6LHpAqPJDyu54J5xkbWnmcJsdsZGYTY+XnQE\n+WPBEdzTSSkfo43rQ2F2JNLCzEouuGdcZO1ppjCbm7ZRmE1MFB1B/lhwBPd0UsrHqG193CNmVnLB\nPeMia08zhdmpW5K7MiOf/G+hb4wFR3BPJ6V8jNqSOWZHjsU5x8xKLrhnXHgfsxVy+GCY/B/7iJmF\nvjEWHME9nZTyMaqvK9HSWMfoZJwjZlZywT3jwvuYrZDWZht9zJqbi44gfyw4gns6KZXHaOP6hmjn\nmFnJBfeMi6w9zRRmG5rLfcziLsxaWoqOIH8sOIJ7OimVx6gt4sLMSi64Z1xk7WmmMBs7amPEbHi4\n6Ajyx4IjuKeTUnmM2tY3RDv530ouuGdcZO1ppjDrOaV8V2bck/+3bCk6gvyx4Aju6aRUHqO2pnhH\nzKzkgnvGRdaeZgqzY+M22mWMjBQdQf5YcAT3dFIqj9HG9Q3Rdv63kgvuGRdZe5opzOZmbNyVOTlZ\ndAT5Y8ER3NNJqTxGMU/+t5IL7hkXWXuaKcy29pX7mMVdmFnoG2PBEdzTSak8Rm3r6xk7PsNMhF8y\nreSCe8aF9zFbIfv32ZhjZqFvjAVHcE8npfIYlR/LFOPlTCu54J5x4X3MVsjGDeURs9mCI8mX1tai\nI8gfC47gnk5K5TEqP8g8xsuZVnLBPeMia08zhVlrc3mOWdwjZk1NRUeQPxYcwT2dlMpjVB4xi7H7\nv5VccM+4yNrTTGE2ethGH7MDB4qOIH8sOIJ7OimVx6j8IPMYR8ys5IJ7xkXWnmYKs77TbLTL6Okp\nOoL8seAI7umkVB6jjREXZlZywT3jImtPM4XZ4UNCfUmiL8wOHiw6gvyx4Aju6aRUHqNNLaEwOzw+\nVVA0+WElF9wzLrL2NFOYTU1BQ10p+jlmU/Gdq5+HBUdwTyel8hh1NDciAgfG4jtwVnLBPeMia08z\nhdnWrdBQJ97HLAIsOIJ7OimVx6i+rsSm5kYOjB0vLqCcsJIL7hkX3sdshfT3Q2N9KfpLmRb6xlhw\nBPd0UuYfo86WRg5GOGJmJRfcMy5WvY+ZiHxcRPaLyGMVyzpE5Msi8lTyc1PFuutEZKeI7BCR11Us\nf4mIPJqs+6iISLJ8nYjckSx/QEROz1Yx0NYWLmXGPmLW1lZ0BPljwRHc00mZf4y6WtdxcDy+ETMr\nueCecZG1ZzUjZrcA2+ctez9wr6qeDdyb/I6InAdcCZyfvOcmEalL3vMx4G3A2cm/8j7fChxW1bOA\njwAfWqnMiaivL88xi7swq68vOoL8seAI7umkzD9Gna2NUc4xs5IL7hkXWXsuWZip6teBQ/MWXwHc\nmry+FXhDxfLbVfW4qj4N7AQuEpEeoE1V71dVBW6b957yvj4NXFYeTZuPCFr+V51eyqFDyRyzyAuz\nQ/P/UhFiwRHc00mZf4y6WtdFOcfMSi64Z1xk7bnSOq9bVQeT1/uA7uR1L3B/xXZ7k2XTyev5y8vv\n2QOgqjMicgToBE7Ysu3gwdDUracnvJ6aChPw+vvDsGJ9fThYvb0wNATHjsG6+joOHJ5jaCjsY2QE\n+vpgYABKJejuDq87OmBmBkZH0302NkJnJwwOQldXeJr82Fi6vqkJ2tth3z7YvBnGx2FiIl3f3Awt\nLTA8DFu2hM+enEzXt7aGfSzHaW4uvN6zJ3w2pJ8bk9P8v9PGjbBjR1xOC/2dOjuDZ0xOC/2dmprC\ndtU4bdtW7SkqLnp7n/t7V2sjRydnmJyepamhbuE31SDzPWPFPeMia08JA1hLbBTmfd2tqhckv4+o\nanvF+sOquklEbgTuV9VPJMtvBu4BdgM3qOprkuWvAt6nqpcnc9e2q+reZN0u4GJVXaqX7rJGzXbt\nguv+5V8R4I63v3w5b60pdu2CM88sOop8seAI7rkIC46m1yDLPn9VHqNPPfgM1332Ub79/ldzavv6\nrGMrDM/5uHDP51HV+Wuld2UOJZcnSX7uT5YPAH0V252WLBtIXs9f/pz3iEg9sBHIvC3d3By0NNYx\nMRX3Q8zn4r5SC9hwBPd0UuYfo67WdQDR3ZlpJRfcMy6y9lxpYXYXcHXy+mrg8xXLr0zutDyDMMn/\nweSy56iIXJLMH7tq3nvK+3ojcJ9WM4y3THp7oXldPeNTM1nvek1hYejYgiO4p5My/xh1tjYCRDfP\nzEouuGdcZO1ZTbuMTwH/CmwTkb0i8lbgBuC1IvIU8Jrkd1T1ceBO4Angi8C1qloeoroG+BvCDQG7\nCJc4AW4GOkVkJ/C7JHd4Zs2ePWHE7FjkI2Z79hQdQf5YcAT3dFLmH6OuljBiFlthZiUX3DMusvZc\ncvK/qr55kVWXLbL99cD1Cyx/CLhggeWTwJuWiuNkaW+H5sZ6xo/HPWLW3r70NrWOBUdwTydl/jHq\n2hBGzA5G9rxMK7ngnnGRtaeZzv8AzckcsxyulDqO46wazY31tDTWMTQ6WXQojuNkjJnCbGQEWtbV\nMzOnUfcyGxkpOoL8seAI7umkLHSMetrXMzgSV2FmJRfcMy6y9jRTmPX1hREzgInj8c4z6+tbepta\nx4IjuKeTstAx6tnYxOCRY6sfTI5YyQX3jIusPc0UZgMD0NIYptTFfGfmwMDS29Q6FhzBPZ2UhY5R\nKMziGjGzkgvuGRdZe5opzEolaF4XRsxivjOzZOAvasER3NNJWegY9Wxcz/DYcaZm4pmaYSUX3DMu\nsvY0ctjC42HSEbN4C7Pu7qW3qXUsOIJ7OikLHaNT25tQJaobAKzkgnvGRdaeZgqzgYHKOWZ+KbOW\nseAI7umkLHwpMzyKKabLmVZywT3jImvPlT7EvObo6ICp4/GPmHV0FB1B/lhwBPd0UhY6Rj0bmwCi\nugHASi64Z1xk7WlmxGxmJp1jNhHx5P+ZeNWexYIjuKeTstAx6mmPb8TMSi64Z1xk7WmmMBsdrZhj\nFnG7jNHRoiPIHwuO4J5OykLHqHVdPRua6hkciWfEzEouuGdcZO1p5lLm1q0wRfwjZlu3Fh1B/lhw\nBPd0UhY7Rr3t6xmIqDCzkgvuGRdZe5oZMevvh+aGcmEW74hZf3/REeSPBUdwTydlsWPU19FM/8GJ\n1Q0mR6zkgnvGRdaeZgqzxkaoryuxrr4UdYPZxsaiI8gfC47gnk7KYsfo9M5mnjk0wdxcHM//tZIL\n7hkXWXuaKcw6O8PP5sa6qB/JVPaMGQuO4J5OymLHaGtnC8dn5hg6GscNAFZywT3jImtPM4XZ4GD4\n2dxYH/WIWdkzZiw4gns6KYsdo62dzQDsPhDH5UwrueCecZG1p5nCrKsr/GxZVxf1I5nKnjFjwRHc\n00lZ7Bid3tkCwDOHxlcxmvywkgvuGRdZe5opzCaTkf4wYhZvYTYZxxWNE2LBEdzTSVnsGPVsbKKh\nTtgdyQ0AVnLBPeMia08zhdnYWPjZsq4u6kcylT1jxoIjuKeTstgxqq8rcdqmZvoPxjFiZiUX3DMu\nsvY0U5iV+4w0N9YzFnFhZqFvjAVHcE8n5UTHaGtnczRzzKzkgnvGhfcxWyHlPiNtTQ0cnYy3MLPQ\nN8aCI7ink3KiY3Tm5lZ2DY8xG0HLDCu54J5x4X3MVkhTeN4v7c0NHJ6YKjaYHCl7xowFR3BPJ+VE\nx+inuls5PjPHnkO1P2pmJRfcMy6y9jRTmLW3h5+bmhuYmJrl+EycNwCUPWPGgiO4p5NyomN0dvcG\nAH44dHSVoskPK7ngnnGRtaeZwmzfvvCzvTm06B2ZmC4wmvwoe8aMBUdwTyflRMfo7FNaAXhqf+3P\ntLaSC+4ZF1l7minMNm8OPzclhVmslzPLnjFjwRHc00k50THa0NTAqRubohgxs5IL7hkXWXuaKczG\nk7vJNzU3AHB4PM4Rs/E47po/IRYcwT2dlKWO0dndG/jhUO2PmFnJBfeMi6w9zRRmE8m82PRSZpwj\nZhO1P/93SSw4gns6KUsdo21bNrBreIzp2bnVCSgnrOSCe8ZF1p5mCrNyn5FNLcmIWaRzzCz0jbHg\nCO7ppCx1jM7raWNqZo6dNT7PzEouuGdceB+zFVLuMxL7HDMLfWMsOIJ7OilLHaMLejcC8OjAkVWI\nJj+s5IJ7xoX3MVshzc3hZ1NDHU0NpWgvZZY9Y8aCI7ink7LUMfrJrhZaGut4vMYLMyu54J5xkbWn\nmcKspSV9vam5MdpLmZWesWLBEdzTSVnqGJVKwvmnbqz5ETMrueCecZG1p5nCbHg4fd3e3BjtiFml\nZ6xYcAT3dFKqOUYX9G7kicFRZmr4BgArueCecZG1p5nCbMuW9PWm5oZoR8wqPWPFgiO4p5NSzTF6\nwWkbmZyeq+lGs1ZywT3jImtPM4XZyEj6OlzKjHPErNIzViw4gns6KdUcoxf/xCYAvtN/OOdo8sNK\nLrhnXGTtaaYwm5xMX7c3N0T7SKZKz1ix4Aju6aRUc4z6OtazecO6mi7MrOSCe8ZF1p5mCrPKPiMd\nLfr7z0QAACAASURBVGGO2eycFhdQTljoG2PBEdzTSanmGIkIL926iYf6D+UfUE5YyQX3jAvvY7ZC\nKvuMnNLWxJzCgbHjxQWUExb6xlhwBPd0Uqo9Ri/Zuok9h46xf7Q2hyqs5IJ7xoX3MVshra3p6562\nJgD2HanNk9eJqPSMFQuO4J5OSrXH6GWndwDwwNO1OWpmJRfcMy6y9jRTmDU1pa+3bAy/DEZYmFV6\nxooFR3BPJ6XaY3T+qW1saKrn27sO5BtQTljJBfeMi6w9zRRmByrOU+XCbKhGh/tPxIHaPB8vCwuO\n4J5OSrXHqL6uxMt/spNv7TyYb0A5YSUX3DMusvY0U5j19KSvO5obaaiTKEfMKj1jxYIjuKeTspxj\n9Iqzunjm0ATPHJzIL6CcsJIL7hkXWXuaKcwOVnyBLJWE7ramKEfMDtbmF+VlYcER3NNJWc4xesVZ\nXQB8c2ftDVdYyQX3jIusPc0UZlPz+sluaWti8MixYoLJkfmeMWLBEdxzrSIiu0XkURF5REQeSpZ1\niMiXReSp5OemLD9zOcfozM0t9Lav574n92cZwqpQa7mwUtwzLrL2NFOYze8zsmVjE0Oj8bXLsNA3\nxoIjuOca5+dU9UJVfWny+/uBe1X1bODe5PfMWM4xEhEuO/cUvrlzmMnp2SzDyJ0azYVl455x4X3M\nVsj8PiPlETPVuJrMWugbY8ER3LPGuAK4NXl9K/CG+RuIoOV/y935co/RZed2Mzk9V3N3Z0aSC0vi\nnnGRtWd9trtbu7S1Pff3LRubmJyeY/TYDBubG4oJKgfme8aIBUdwzzWMAv8iIrPA/1bV/w/oVtXB\nZP0+oPtEOzh4MNzJ1dMTXk9NhW/d/f3heNTXw6FD0NsLQ0Nh274+2LMH2tvDPkZGwrKBASiVoLs7\nvO7ogNPXd7C+vo4vPrqfXummsRE6O2FwELq6wiNkxsbSz2xqCvvdtw82b4bxcZiYSNc3N0NLCwwP\nhwc2j4yEfZTXt7aGfSzHaW4uvK502r9/caeZGRgdTfdZK04L/Z3274/PaaG/0/798Tkt9HcaH4cd\nO5Z22ratuhOMnMyIkYjsBo4Cs8CMqr5URDqAO4DTgd3Ar6rq4WT764C3Jtu/S1W/lCx/CXALsB74\nAvBuXTqwZQU+PBwOUJkvPDrINZ98mH961ys5/9SNy9nVmma+Z4xYcAT3XATJMZTqAhDpVdUBETkF\n+DLwTuAuVW2v2Oawqp5ontlJnb+q4R1/9zDf3nWQBz9wGfV1tXFxxHM+LtzzeVR1/sriv9aq5lqI\nyHnAlcD5wHbgJhGpS97zMeBtwNnJv+0ZxPUcDs1rhH16ZwsAuw/U3i3lJ2K+Z4xYcAT3XKuo6kDy\ncz/wOeAiYEhEegCSn5nOvF/JMbr8BadyaHyK+39UOwe41nJhpbhnXGTtmcfXqMXmWlwB3K6qx1X1\naWAncFFyEmtT1fuTUbLbWGB+xsnS2/vc30/vagbg6QNjWX9Uocz3jBELjuCeaxERaRGRDeXXwM8D\njwF3AVcnm10NfD7Lz13JMbp022ZaGuu4+/s/zjKUXKmlXDgZ3DMusvY82cKsPNfiOyLyW8myxeZa\n9AJ7Kt67N1nWm7yev/x5nMzk2aGh5/7e3FhPd9s6no5sxGy+Z4xYcAT3XKN0A98Uke8BDwL/pKpf\nBG4AXisiTwGvSX7PjJUco6aGOl53/hb+6dFBjk3Vxt2ZNZYLK8Y94yJrz5Od/P/KyrkWIvJk5UpV\nVRHJ5bbH5U6e/dGPwraVk/26m1vYtX+Mp56KZ6Lpzp3h/TFPnj16NEy0jMlpob/T9HTwjMlpob/T\nkSPh86txqnbybF6o6o+AFy6w/CBwWV6fOze3svf9x5f18dnvDvBPjw7yxpeclm1QObBSz1rDPeMi\na8+Tmvz/nB2JfBAYI8wVu1RVB5PLlF9V1W3JxH9U9X8m238J+CDhBoGvqOo5yfI3J+9/+xIfuazA\nJybC/6Aque6z3+dLjw/x8H977XJ2taZZyDM2LDiCey5C4ZP/M+Kkz19VfYgql334a3Q0N/Lp//wz\ny9/BKuM5Hxfu+Tzynfy/grkWdwFXisg6ETmDMMn/weSy56iIXCIiAlxFxvMzIHyzn88ZXS0cGp/i\nyMR01h9XGAt5xoYFR3BPJ2Wlx0hEuPJlfTzUf5gfDh3NNqgcsJIL7hkXWXuezByzZc21UNXHgTuB\nJ4AvAteqanniwzXA3xBuCNgF3HMScS1Ie/vzl5XvzHz64HjWH1cYC3nGhgVHcE8n5WSO0a+8+DQa\n6oTbH1z7/5e0kgvuGRdZe654jtlK5lqo6vXA9Qssfwi4YKWxrJQzT2kF4IdDR7mwz0gGOY5jis7W\ndfz8eVv47Hf38nvbt9HUULf0mxzHKYza6DqYASMjz192emcL6xvqeOLHo6sfUE4s5BkbFhzBPZ2U\nkz1Gb3n5VkYmpvn77+xdeuMCsZIL7hkXWXuaKcz6+p6/rK4knNuzIarCbCHP2LDgCO7ppJzsMbr4\njA5e/BPt/NVXdzE9u3ZvlbOSC+4ZF1l7minMBgYWXn7eqW08MTjK3FwcDzNfzDMmLDiCezopJ3uM\nRIRrf+4sBkaOcdcja7fhrJVccM+4yNrTTGFWWsT0/FM3MnZ8hj2H42g0u5hnTFhwBPd0UrI4Rq8+\n5xTO2bKBm766c81+EbWSC+4ZF1l7GjlsodnlQpx/ahsAj0dyOXMxz5iw4Aju6aRkcYzKo2a7hsf5\nwmODS7+hAKzkgnvGRdaeZgqzxYYaf6p7Aw11wvf2xjFL0cLQsQVHcE8nJatj9Pqf7mFb9wb+1xd3\ncHxm7T2myUouuGdc+KXMFdLRsfDypoY6frp3I//2dMaPhy+IxTxjwoIjuKeTktUxqisJf3D5uTxz\naIJbvrU7m51miJVccM+4yNrTTGE2M7P4uped0cGjA0eYnF573yCXy4k8Y8GCI7ink5LlMXrV2Zt5\n9TmncON9Ozkwdjy7HWeAlVxwz7jI2tNMYTZ6gilkF5/RwfSs8t1nav9y5ok8Y8GCI7ink5L1MfrA\n68/l2PQsf/bPP8x2xyeJlVxwz7jI2tNMYbZ16+LrXrK1AxF4MILLmSfyjAULjuCeTkrWx+isU1r5\n9Z85nU89+Az/uutgtjs/CazkgnvGRdaeZgqz/v7F121c38B5PW18a+eB1QsoJ07kGQsWHME9nZQ8\njtF7fn4bWzubed9nvs/E1Nq45mQlF9wzLrL2NFOYNTaeeP3PbTuFh/oPMTIxtToB5cRSnjFgwRHc\n00nJ4xitb6zjQ7/yAp45NMH/+uKO7D9gBVjJBfeMi6w9zRRmnZ0nXv/qc09hTuFrPxxenYByYinP\nGLDgCO7ppOR1jC75yU6ufvlWbvn2br66Y38+H7IMrOSCe8ZF1p5mCrPBJfopvvC0djpaGrnvyeJP\nTifDUp4xYMER3NNJyfMYvf8XzuWcLRv4nTse4ccjx/L7oCqwkgvuGRdZe5opzLq6Try+riRcds4p\n3PuD/TXdNmMpzxiw4Aju6aTkeYzWN9Zx06+9mKmZOd7xdw8X+pBzK7ngnnGRtaeZwmxycultrriw\nl7HjMzU9alaNZ61jwRHc00nJ+xj95OZWPvTGF/DwMyP84ecfR7WYZ2layQX3jIusPc0UZmNjS2/z\n8jM72bxhHZ9/pHafI1GNZ61jwRHc00lZjWN0+QtO5ZpLz+RTDz7D//76j/L/wAWwkgvuGRdZe5op\nzKrpM1JXEn7phady35P7GT66tjpiV4uFvjEWHME9nZTVOkbv/fltXP6CHm6450n+8Xs/Xp0PrcBK\nLrhnXHgfsxVSbZ+R/+vin2B6Vrnj357JN6CcsNA3xoIjuKeTslrHqFQS/vRNL+Rlp2/id+54hH95\nYmh1PjjBSi64Z1x4H7MV0tRU3XZnbm7llWd18ckHnmGmwEmwK6Vaz1rGgiO4p5OymseoqaGOm3/9\nZZx/ahvXfPLhVW0hZCUX3DMusvY0U5i1t1e/7a//zOkMHpnkrgKG8k+W5XjWKhYcwT2dlNU+Rm1N\nDdz2Gxdz1imtvO22h/jyKo2cWckF94yLrD3NFGb79lW/7WXnnsI5WzZw41d2MjtXzN1JK2U5nrWK\nBUdwTyeliGO0sbmBT/7mxZzX08Zvf+I7/P1De3L/TCu54J5xkbWnmcJs8+bqtxUR3nXZ2fxoeJzP\nfbe27tBcjmetYsER3NNJKeoYbWpp5JO/eTE/c2Yn//XT3+cjX/4hczl+WbWSC+4ZF1l7minMxseX\nt/3287fwwtM28qdf2rFmHvBbDcv1rEUsOIJ7OilFHqOWdfX8zdUv5VdefBp/ce9T/PYnvsPY8XzO\niVZywT3jImtPM4XZxMTyti+VhD+4/Dz2jU5y43078wkqB5brWYtYcAT3dFKKPkbr6uv40ze9gD+8\n/DzufXI//+Gmb7H7QPb/1y3ac7Vwz7jI2lOK6vCcAcsKfHJyZXdOvPfvv8fnvjvAXe94BeefunH5\nO1hlVupZS1hwBPdcBMkxlNVkVc5fefCtnQe49u8eZmZW+R9XnM8vv6gXkWz+LGvJM0/cMy6W4VnV\nfyhmRsxW2mfkD37xXDY1N/KuT303t+H7LLHQN8aCI7ink7KWjtErzuri7ne+kvN62vjdO7/HOz71\nXUYmpjLZ91ryzBP3jAvvY7ZCmptX9r725kY+euWFPH1gnPd/5vuFPUOuWlbqWUtYcAT3dFLW2jE6\nbVMzn/qtS/ivr9vGlx7bx/Y//0YmzWjXmmdeuGdcZO1ppjBraVn5e3/mrC7e+7pt3P39QW759u7M\nYsqDk/GsFSw4gns6KWvxGNWVhGt/7iw+d80raFtfz2/e9hBv/9uH+PHIsRXvcy165oF7xkXWnmYK\ns+GTbF792//uTF5zbjd/fPcT3P39tdt49mQ9awELjuCeTspaPkY/fdpG7n7nq3jf9nP42g+Hec2H\nv8ZffW0Xk9Ozy97XWvbMEveMi6w9zUz+P3IENp7k3P2JqRn+//bONbat8zzAz0fyHF5FSaQk6+Kr\nGseu7bSxtyZp11xaYI3joU0vw9ahHdKh3fZjGdoBu7Trn/xa1wEd2mHtCmwtmg5t0w1tEQfrFiRe\nr2vrJI0d20ri2PJNlmRJJiVRJMX7tx+HEpXEdESKFMlz3gcgeHQOdc77fN/Rx1fvIb/z0Nef4cSV\nBb7ykUO8Z//gxnbYBBrh2e44wRHEswqO/PB/p5wLE/E0jxwd49jLs4z0+Pmr+/fwvrcO43Ktr9s6\nxXOjiKe9qMFTPvy/loWFje8jYHr4+sfexoGRbh7+9gkeP9l+k882wrPdcYIjiKdQoVPaaFskwNc+\n9ja+9Yk76QkYfOq7J3nvP/+cp16cWdfnczvFc6OIp71otKdjErNMpjH76fIZPPpHd3D7th4++dhJ\n/unYubb6QkCjPNsZJziCeAoVOq2NfuuWPp54+J188fdvJ5HJ88fffI4HvvQznnhh6qa3ues0z3oR\nT3vRaE/HXMps9Hwq2UKRz3zvNN8/Mcnh/YN87oO30Rs0G3eAOnHCvDFOcATxrIIjL2V28rlQKJY4\n+sIUX/7RecbnUoz2BfnTe0d58PYRfIb7Va/tZM9aEE97IfOY1Umj5xnxetx84ffeyt8e2cuxl2c4\n/KWf8rNzrf+koxPmjXGCI4inUKGT28jjdvHBQ1t56i/u5SsfOYTPcPM33zvNXZ87xt/98CWuxCrT\npneyZy2Ip71otKdjKmaTkzAy0pxATl9d5JPfPcGFuRTve+swn/2dN7Ml3Jp/E5rp2S44wRHEswqO\nrJjZ6VzQWnP8Ypxv/vIST47NUNKad+0Z4A/u2M6toX52bLd/vcBO/XkzxPN1rGv8ckxiFotBNNqs\nUCCTL/KVH4/z1Z+MY7gUH797lE/cvYuwz2jeQW9Asz3bASc4gnhWwZGJmV3PhenFZb5z/ArffmaC\n68ksvQGT9x8c5kOHtrJ/ONywWz21G3btz9cinq9DErO1nD0Le/Y0K5QKl2MpPv8/L/PD09fo9hs8\n9I6dfPSu7Qx0bU4FbbM8W4kTHEE8q2CXd+q2HL9aRb5Y4qevzPGNn1zl+MQsuWKJvYNdHLltiCO3\nDXLLQFerQ2wodu/PFcTzdUhitpZEAsLhZoXyes5MLvLFp89x7OUZPC7Fe98yzEPv2MlbtnY39b/A\nzfZsBU5wBPGsgiMTMyedCyVPjidOTfP4iUl+fWUereGWgRBHDgxy+MAQbx7q6vhKmpP6UzxfhSRm\na7l4EXbtalYoNznu9RSP/uIS//ncBKlckTf1B/nAwREevH2EbZHG30isVZ6biRMcQTyr0NnvyBU6\nYvzabF7rOZPI8OTYNf779DWOX4xR0rA9EuC+Pf3ct6eft4/24Tfd1XfYpji1P+1KDZ6SmK2l1SXV\nRCbPf52a5gfPT/LMpTgAB0bCvHvvFt69d4C3jHSve3bsm9Fqz83ACY4gnlVwZGIm5wJcT2Z56sUZ\nnn5xhl+Mx1jOFzE9Lu7cFeG+PQPcs7uPWwZCHVFNk/60F3Ips0LHzgM0EU/zxKkpjr00y4kr85Q0\n9IW83HtrP3eORrhjZ4Qd0UBdA0w7eTYLJziCeFah/d9110fHjl/NZL2emXyRZy/F+fHZOX58dpbx\nuRQAfSGTO3dFuWs0wl2j0bZN1KQ/7UWj5zFzTGLWrpl7PJXjJ6/M8r8vz/Hzc3PMp/MADHR5eduu\nCIe293JgOMy+4TBd6/iGZ7t6NhInOIJ4VqH93mXrwxbjV6Op13MinuaX4zF+dSHGLy/EmF60pmKP\nBk3uHI3wmzsiHNzew77hMF5P6y99Sn/aC6mYVagp8OlpGBpqViiNoVTSjM8lOX4xzrOX4jxzMb46\nwADsjAbYP9LNgeFubt0S4k39Ibb2+vG4K/P+dILnRnGCI4hnFRyZmMm5sH601kzEl/nVRStRO34h\nzuTCMgCm28W+4TAHt/dwcHsvB7f1sLXXv+lVNelPe1GDpyRma5mbg/7+ZoXSPGYTGcamEoxNLXJm\nMsHY9CIT8eXV7YZbsTMaZLQ/yJv6Q/T7QuzfEWCk189g2Ie7AZ9bazc6tS9rRTxviF1OaEeMX7XS\nLM9rixlOTsxz4soCJ64scGpygUy+BEBvwGDfcJj9w93sHw6zbyjMaH+oqWOn9Ke9qMFzXSeVZ0PR\ndBDxeGeeIANhHwNhH+/aO7C6bjGd5/xckgtzScbnUozPJTk3m+TYS7MU1twg2ONSDHb7GOnxs7XX\nSta29vjZ0u1joMtLf5eXSMBsyJcONpNO7ctaEU9hBae0UbM8B7t9HO4e4vABq6yRL5Y4e22JExML\njE0uMjaV4Bv/d4lc0UrWfIaLvYNh9g+H2TsUZvdAiN0DIaIhb0Pikf60F432bJuKmVLqMPAlwA38\nm9b679/gV2oKPJmEUKje6DqDfLHEy1fTxLPLTM4vM7mQ5ur8yvIyM4kMpde0mtul6AuZ9Hd5Gejy\n0R/yMhD2Eg2a9AZNegImPX6D3oBJT9Cgy+tp+YdpndCXIJ5V6Kz/Iqoj49cNaKVnvlhifC7J2GSC\nsakEL05bCdtSprD6mmjQ5JaBELu3hNg90LX63BcyaxoXpT/tRQ2enVMxU0q5gS8Dvw1cBZ5VSh3V\nWr/YqGPMzNj/BDHcLgKFELfdemPRfLHEtcUMM4kMc0tZZpeyzJUfs0sZZpcynJlc5Hoy+7oEbgWP\nS9ETMFYTtp6ASW/AIOw3CHk9dPmsR8hrECovd3k95WWDgOHecIXOCX0J4ilUcEobtdLTcFtVsr2D\nYT70G9Y6rTXXEhlemUlybmaJ87NJXplZ4vGTU69K2Lr9Bjv7guyMBtgZDbKzr/wcDdIbNF93LOlP\ne9Foz7ZIzIA7gPNa6wsASqnHgAeBhiVmpVKj9tTe3MzTcLvYFgm84cS2xZJmcTnPfDrHQjrHfMpa\nXlk3n86vrr86n+bMZJ5ktkAyW7jpfgGUgpBZTt58Hvymh4DhJmC68ZvWc8D04DfdBE23tb283m+4\n8RluJidhslhry9yYZtSLG1WFnpqCiXKTNiNOl1K4FLjL/+kXtaakoaR1zduKJY3WVv+6XQq3Umis\n169sc5XXu1zWscFK9LtLkSbY2QsZv1qDUoqhbj9D3X7uvbVyrUprzexSlnMzVqI2PpfkcizNc5fm\nOfrCFGuHgG6/YSVsfUF2RINs6/UzMVWifw78hpvF5TzRoEkslaM3YJDKFnC7XJgeF8lMnt6gSSyZ\nIxI0WcrkMT0uXEqRzhXpCRjEU9a2xHIer+FGYU0nEvYbzKdyREJeFpfzBAw3Ra3JFUp0+TwspPNE\ngiYLy3lCXjf5ovW3GvR6WEjniAa9xNM5wj4P2XwJjSZgelhctn4vlsrR7TfI5IooBT7DTSKTJxJY\ncTG5MFFgeEFheFyksgV6AwaxVI5IwGQpU7BcXIrlXIFuv0E8lV918ZlutNZkCyXCPoP5tOW5mM4T\n8LopljT5oqbL52E+lSMa8jKfztHl9ZArllZdFsuelotBtmC9efgMN4k1Lr0Bk3SugEspTI+LpUyB\nSNBYbftktoDH5cLjVmUXk+nFDO/a20+p5G/sedcOlzKVUr8LHNZaf6L88x8Cd2qtH3716yrvT7WG\nnU5DoPET7bcdrfQsljSpXIGlTIFkpkAymyexulxgKZMnmSmwlLVes5TJk84VWc4Vred8kXSusLqu\nUK1sJ9iGsM/Dr/76/lrOWUdeypTxq3PIFopMxNNcup7mUizFpViKy7E0F6+nmFpYrno1QuhMvvrR\nQ9wzOrTe87ZzLmXWQywG169bX1GNxSCXgx074PJl655VHo/1gbyREavMeP483H03TExAT4+1j4UF\n2LYNJifB5YItW6zlSAQKBev+Vyv7NE3r7vHT09DXZ00ol0xWtvt81n6vXbM+BJhKWYPMyvZAAIJB\n69sbg4PWsTOZyvZQyNpHLU6lkrW81un55+Hee1vlpMhkDHbsMEjOwGAIdvaVnXbV7hTsKrGcLzIT\nK9DbV+TyZJF8qUgqZcUWDkOxaMU1OGjFaRjQ3W0ds6fHOl46Xdnu9VptHYtBby8sL0M2q1a3+3zW\nY2EBohFYSlr7WNke8FvttrAIfVFYXIR8obI9GAS3G5YSVpvF41DSMNBfLneX78WcXLL6ZnYOXMrq\nn7m5Vzvl8mAallO4G+Ixyy2Xs+JeOaZpWk7xuOVsOb3a2e+3nCIRq4+zWdgyCFPTGp9PY5iwlFAM\n9CkSCevYAwOa6WsafwBMjyKZVAz0QywOxaImEtXMzUFXl1UJS6Us55lZTQlNJALxmKKnW6FLVlv2\nD2impjUejyYchoV5F6dOWX+H6zn3nDAn0o2YmHCGux08vR43twx03fDG69lCkemFDFMTbkZHYTlf\nLFeKskSCXhbSOYJeD8WSVSlaqQZFgqZVDfIZ5AolCiVN0OtmMW1V1OZTOcJ+g0y+iNbgN61KXG/A\nJF6ubKVzBdwuqxqUWC7QW64G9QasapDhVrhdilS2SG/A4PqaKt3K3G/pfIEev0kslSUa9LKwnCNg\neNBoMnkr3ngqRzRkMp/KMzvlYduOEoWiFe9KlS6esqpXuWKRYgkC5Xgj5epgt99gOW9V4rxr4o2n\ncvT4y9Urt8Jwu1jKWJ4rlbhEJo/X40IpRTpXideqxBXwGdb0Usv5ImGfQTydIxo0mU/nCZpuStrq\npy6fdbxoue1DXg/5oqZQKq1WFQe7rdkPzp9r7HnbLhWztwOPaK3vL//8GQCt9edu8ms1BT4zY70R\n2h0neDrBEcSzCo6smMm5YC/E017U4Lmu8cv1xi/ZFJ4FdiuldimlTODDwNEWxyQIgiAIgrCptEVi\nprUuAA8DTwIvAf+htR5r5DEWFhq5t/bFCZ5OcATxFCo4pY3E016IZ320xaXMOpEPz94AJ3g6wRHE\nswqOvJQp54K9EE97UYNnR13KbDqTk62OYHNwgqcTHEE8hQpOaSPxtBfiWR+OScxcDjF1gqcTHEE8\nhQpOaSPxtBfiWef+Gru79sUJ3wwBZ3g6wRHEU6jglDYST3shnvXhmMRMSqr2wQmOIJ5CBae0kXja\nC/GsD8ckZhGH3PXFCZ5OcATxFCo4pY3E016IZ304JjErvPFtHG2BEzyd4AjiKVRwShuJp70Qz/pw\nTGKWSLQ6gs3BCZ5OcATxFCo4pY3E016IZ304Zh6zTMa6/57dcYKnExxBPKvgyHnM5FywF+JpL2rw\nlHnM1nL5cqsj2Byc4OkERxBPoYJT2kg87YV41odjEjPTbHUEm4MTPJ3gCOIpVHBKG4mnvRDP+nBM\nYhaNtjqCzcEJnk5wBPEUKjiljcTTXohnfTgmMZuebnUEm4MTPJ3gCOIpVHBKG4mnvRDP+vA0dneb\nyro/BKyU9UFbrW3zweEb4gRPJziCeDoAGb9eg3jaC/GsH8dUzARBEARBENodScwEQRAEQRDaBEnM\nBEEQBEEQ2oROnmBWEARBEATBVkjFTBAEQRAEoU2QxEwQBEEQBKFNkMRMEARBEAShTbB9YqaUOqyU\nOquUOq+U+nSr42kkSqlLSqnTSqmTSqnnyusiSqmnlFLnys+9rY6zVpRSX1dKzSqlzqxZV9VLKfWZ\ncv+eVUrd35qoa6eK5yNKqclyn55USh1Zs61TPbcppX6klHpRKTWmlPpkeb3t+rTRyPgl41e7IuNX\nE/tUa23bB+AGxoFRwAReAPa1Oq4G+l0C+l6z7h+AT5eXPw18vtVx1uF1D3AIOPNGXsC+cr96gV3l\n/na32mEDno8Af3mD13ay5xBwqLzcBbxS9rFdnza43WT8kvGrbR8yfjWvT+1eMbsDOK+1vqC1zgGP\nAQ+2OKZm8yDwaHn5UeD9LYylLrTWPwXir1ldzetB4DGtdVZrfRE4j9XvbU8Vz2p0sue01vr58vIS\n8BIwgg37tMHI+CXjV9si41fz+tTuidkIMLHm56vldXZBA08rpX6tlPqT8rotWuuVO3ddA7a0HpEw\nvQAAAfZJREFUJrSGU83Ljn3850qpU+VLBSvlcVt4KqV2AgeB4zirT+vB7u0g45c9+1jGrw262j0x\nszvv1FrfDjwA/JlS6p61G7VVV7XdRHV29SrzL1iXrm4HpoEvtDacxqGUCgHfAz6ltU6s3WbzPhVu\njIxf9kPGrwZg98RsEti25uet5XW2QGs9WX6eBX6AVS6dUUoNAZSfZ1sXYUOp5mWrPtZaz2iti1rr\nEvCvVErgHe2plDKwBrVvaa2/X17tiD7dALZuBxm/AJv1sYxfQANc7Z6YPQvsVkrtUkqZwIeBoy2O\nqSEopYJKqa6VZeA9wBksv4fKL3sIeLw1ETacal5HgQ8rpbxKqV3AbuCZFsTXEFb+0Mt8AKtPoYM9\nlVIK+Brwktb6H9dsckSfbgAZv2T86ihk/FpdvzHXVn/jodkP4AjWtyjGgc+2Op4Geo1iffPjBWBs\nxQ2IAseAc8DTQKTVsdbh9h2sMnge6/r8x2/mBXy23L9ngQdaHf8GPf8dOA2cKv+BD9nA851YZf5T\nwMny44gd+7QJbSfjVxvEW6ObjF8yfm3IVe6VKQiCIAiC0CbY/VKmIAiCIAhCxyCJmSAIgiAIQpsg\niZkgCIIgCEKbIImZIAiCIAhCmyCJmSAIgiAIQpsgiZkgCIIgCEKbIImZIAiCIAhCm/D/VFRueYZk\nwbkAAAAASUVORK5CYII=\n", 692 | "text/plain": [ 693 | "" 694 | ] 695 | }, 696 | "metadata": {}, 697 | "output_type": "display_data" 698 | } 699 | ], 700 | "source": [ 701 | "import matplotlib.pyplot as plt\n", 702 | "%matplotlib inline\n", 703 | "\n", 704 | "def plot_results(loss_history, exp_history):\n", 705 | " fig = plt.figure(figsize=(10, 7))\n", 706 | " fig.add_axes()\n", 707 | "\n", 708 | " ax1 = fig.add_subplot(121)\n", 709 | " ax2 = fig.add_subplot(122)\n", 710 | "\n", 711 | " for ax in [ax1, ax2]:\n", 712 | " ax.spines[\"top\"].set_visible(False)\n", 713 | " ax.spines[\"right\"].set_visible(False)\n", 714 | " ax.spines[\"left\"].set_visible(False)\n", 715 | " ax.spines[\"bottom\"].set_visible(False)\n", 716 | " ax.grid(color='b', linestyle='--', linewidth=0.5, alpha=0.3)\n", 717 | " ax.tick_params(direction='out', color='b', width='2')\n", 718 | " \n", 719 | " ax1.set_title('RMSE Loss')\n", 720 | " ax2.set_title('exp')\n", 721 | " ax1.plot(np.arange(len(loss_history)), loss_history)\n", 722 | " ax2.plot(np.arange(len(exp_history)), exp_history)\n", 723 | " \n", 724 | "plot_results(loss_history, exp_history)" 725 | ] 726 | }, 727 | { 728 | "cell_type": "markdown", 729 | "metadata": {}, 730 | "source": [ 731 | "# Pytorch with optimizers" 732 | ] 733 | }, 734 | { 735 | "cell_type": "code", 736 | "execution_count": null, 737 | "metadata": { 738 | "collapsed": true 739 | }, 740 | "outputs": [], 741 | "source": [ 742 | "import torch\n", 743 | "from torch.autograd import Variable\n", 744 | "import numpy as np\n", 745 | "\n", 746 | "def rmse(y, y_hat):\n", 747 | " \"\"\"Compute root mean squared error\"\"\"\n", 748 | " return torch.sqrt(torch.mean((y - y_hat).pow(2)))\n", 749 | "\n", 750 | "def forward(x, e):\n", 751 | " \"\"\"Forward pass for our function\"\"\"\n", 752 | " return x.pow(e.repeat(x.size(0)))\n", 753 | "\n", 754 | "# Let's define some settings\n", 755 | "n = 1000 # number of examples\n", 756 | "learning_rate = 5e-10\n", 757 | "\n", 758 | "# Model definition\n", 759 | "x = Variable(torch.rand(n) * 10, requires_grad=False)\n", 760 | "y = forward(x, exp)\n", 761 | "\n", 762 | "# Model parameters\n", 763 | "exp = Variable(torch.FloatTensor([2.0]), requires_grad=False)\n", 764 | "exp_hat = Variable(torch.FloatTensor([4]), requires_grad=True)\n", 765 | "\n", 766 | "# Optimizer (NEW)\n", 767 | "opt = torch.optim.SGD([exp_hat], lr=learning_rate, momentum=0.9)\n", 768 | "\n", 769 | "loss_history = []\n", 770 | "exp_history = []\n", 771 | "\n", 772 | "# Training loop\n", 773 | "for i in range(0, 10000):\n", 774 | " opt.zero_grad()\n", 775 | " print(\"Iteration %d\" % i)\n", 776 | " \n", 777 | " # Compute current estimate\n", 778 | " y_hat = forward(x, exp_hat)\n", 779 | " \n", 780 | " # Calculate loss function\n", 781 | " loss = rmse(y, y_hat)\n", 782 | " \n", 783 | " # Do some recordings for plots\n", 784 | " loss_history.append(loss.data[0])\n", 785 | " exp_history.append(y_hat.data[0])\n", 786 | " \n", 787 | " # Update model parameters\n", 788 | " loss.backward()\n", 789 | " opt.step()\n", 790 | " \n", 791 | " print(\"loss = %s\" % loss.data[0])\n", 792 | " print(\"exp = %s\" % exp_hat.data[0])" 793 | ] 794 | }, 795 | { 796 | "cell_type": "code", 797 | "execution_count": null, 798 | "metadata": { 799 | "collapsed": true 800 | }, 801 | "outputs": [], 802 | "source": [] 803 | }, 804 | { 805 | "cell_type": "code", 806 | "execution_count": null, 807 | "metadata": { 808 | "collapsed": true 809 | }, 810 | "outputs": [], 811 | "source": [] 812 | }, 813 | { 814 | "cell_type": "code", 815 | "execution_count": null, 816 | "metadata": { 817 | "collapsed": true 818 | }, 819 | "outputs": [], 820 | "source": [ 821 | "plot_results(loss_history, exp_history)" 822 | ] 823 | }, 824 | { 825 | "cell_type": "markdown", 826 | "metadata": {}, 827 | "source": [ 828 | "# Tensorflow" 829 | ] 830 | }, 831 | { 832 | "cell_type": "code", 833 | "execution_count": null, 834 | "metadata": { 835 | "collapsed": true 836 | }, 837 | "outputs": [], 838 | "source": [ 839 | "import tensorflow as tf\n", 840 | "\n", 841 | "def rmse(y, y_hat):\n", 842 | " \"\"\"Compute root mean squared error\"\"\"\n", 843 | " return tf.sqrt(tf.reduce_mean(tf.square((y - y_hat))))\n", 844 | "\n", 845 | "def forward(x, e):\n", 846 | " \"\"\"Forward pass for our function\"\"\"\n", 847 | " # tensorflow has automatic broadcasting \n", 848 | " # so we do not need to reshape e manually\n", 849 | " return tf.pow(x, e) \n", 850 | "\n", 851 | "n = 100 # number of examples\n", 852 | "learning_rate = 5e-6\n", 853 | "\n", 854 | "# Placeholders for data\n", 855 | "x = tf.placeholder(tf.float32)\n", 856 | "y = tf.placeholder(tf.float32)\n", 857 | "\n", 858 | "# Model parameters\n", 859 | "exp = tf.constant(2.0)\n", 860 | "exp_hat = tf.Variable(4.0, name='exp_hat')\n", 861 | "\n", 862 | "# Model definition\n", 863 | "y_hat = forward(x, exp_hat)\n", 864 | "\n", 865 | "# Optimizer\n", 866 | "loss = rmse(y, y_hat)\n", 867 | "opt = tf.train.GradientDescentOptimizer(learning_rate)\n", 868 | "\n", 869 | "# We will run this operation to perform a single training step,\n", 870 | "# e.g. opt.step() in Pytorch.\n", 871 | "# Execution of this operation will also update model parameters\n", 872 | "train_op = opt.minimize(loss) \n", 873 | "\n", 874 | "# Let's generate some training data\n", 875 | "x_train = np.random.rand(n) + 10\n", 876 | "y_train = x_train ** 2\n", 877 | "\n", 878 | "loss_history = []\n", 879 | "exp_history = []\n", 880 | "\n", 881 | "# First, we need to create a Tensorflow session object\n", 882 | "with tf.Session() as sess:\n", 883 | " \n", 884 | " # Initialize all defined variables\n", 885 | " tf.global_variables_initializer().run()\n", 886 | " \n", 887 | " # Training loop\n", 888 | " for i in range(0, 500):\n", 889 | " print(\"Iteration %d\" % i)\n", 890 | " # Run a single trainig step\n", 891 | " curr_loss, curr_exp, _ = sess.run([loss, exp_hat, train_op], feed_dict={x: x_train, y: y_train})\n", 892 | " \n", 893 | " print(\"loss = %s\" % curr_loss)\n", 894 | " print(\"exp = %s\" % curr_exp)\n", 895 | " \n", 896 | " # Do some recordings for plots\n", 897 | " loss_history.append(curr_loss)\n", 898 | " exp_history.append(curr_exp)" 899 | ] 900 | }, 901 | { 902 | "cell_type": "code", 903 | "execution_count": null, 904 | "metadata": { 905 | "collapsed": true 906 | }, 907 | "outputs": [], 908 | "source": [ 909 | "plot_results(loss_history, exp_history)" 910 | ] 911 | }, 912 | { 913 | "cell_type": "markdown", 914 | "metadata": {}, 915 | "source": [ 916 | "And now let's introduce the Tensorboard. This tool is very useful for debugging and comparison of different training runs. For example, you can train a model then tune some hyperparameters and train it again. Both runs can be displayed at Tensorboard simultaneously to indicate possible differences. Tensorboard can:\n", 917 | "- Display model graph\n", 918 | "- Plot sclarar variables\n", 919 | "- Visualize disbtibutions and histograms\n", 920 | "- Visualize images\n", 921 | "- Visualize embeddings\n", 922 | "- Play audio" 923 | ] 924 | }, 925 | { 926 | "cell_type": "code", 927 | "execution_count": 1, 928 | "metadata": {}, 929 | "outputs": [ 930 | { 931 | "name": "stdout", 932 | "output_type": "stream", 933 | "text": [ 934 | "Iteration 0\n", 935 | "loss = 12361.6\n", 936 | "exp = 3.85277\n", 937 | "Iteration 1\n", 938 | "loss = 8699.6\n", 939 | "exp = 3.74877\n", 940 | "Iteration 2\n", 941 | "loss = 6781.7\n", 942 | "exp = 3.66742\n", 943 | "Iteration 3\n", 944 | "loss = 5577.4\n", 945 | "exp = 3.60028\n", 946 | "Iteration 4\n", 947 | "loss = 4743.76\n", 948 | "exp = 3.54299\n", 949 | "Iteration 5\n", 950 | "loss = 4129.67\n", 951 | "exp = 3.49294\n", 952 | "Iteration 6\n", 953 | "loss = 3657.2\n", 954 | "exp = 3.44847\n", 955 | "Iteration 7\n", 956 | "loss = 3281.74\n", 957 | "exp = 3.40843\n", 958 | "Iteration 8\n", 959 | "loss = 2975.81\n", 960 | "exp = 3.37201\n", 961 | "Iteration 9\n", 962 | "loss = 2721.51\n", 963 | "exp = 3.33858\n", 964 | "Iteration 10\n", 965 | "loss = 2506.64\n", 966 | "exp = 3.30769\n", 967 | "Iteration 11\n", 968 | "loss = 2322.6\n", 969 | "exp = 3.27897\n", 970 | "Iteration 12\n", 971 | "loss = 2163.12\n", 972 | "exp = 3.25214\n", 973 | "Iteration 13\n", 974 | "loss = 2023.56\n", 975 | "exp = 3.22695\n", 976 | "Iteration 14\n", 977 | "loss = 1900.36\n", 978 | "exp = 3.20321\n", 979 | "Iteration 15\n", 980 | "loss = 1790.79\n", 981 | "exp = 3.18077\n", 982 | "Iteration 16\n", 983 | "loss = 1692.69\n", 984 | "exp = 3.15949\n", 985 | "Iteration 17\n", 986 | "loss = 1604.33\n", 987 | "exp = 3.13925\n", 988 | "Iteration 18\n", 989 | "loss = 1524.31\n", 990 | "exp = 3.11995\n", 991 | "Iteration 19\n", 992 | "loss = 1451.51\n", 993 | "exp = 3.10151\n", 994 | "Iteration 20\n", 995 | "loss = 1384.98\n", 996 | "exp = 3.08386\n", 997 | "Iteration 21\n", 998 | "loss = 1323.93\n", 999 | "exp = 3.06693\n", 1000 | "Iteration 22\n", 1001 | "loss = 1267.72\n", 1002 | "exp = 3.05066\n", 1003 | "Iteration 23\n", 1004 | "loss = 1215.79\n", 1005 | "exp = 3.035\n", 1006 | "Iteration 24\n", 1007 | "loss = 1167.66\n", 1008 | "exp = 3.01991\n", 1009 | "Iteration 25\n", 1010 | "loss = 1122.93\n", 1011 | "exp = 3.00535\n", 1012 | "Iteration 26\n", 1013 | "loss = 1081.25\n", 1014 | "exp = 2.99129\n", 1015 | "Iteration 27\n", 1016 | "loss = 1042.32\n", 1017 | "exp = 2.97768\n", 1018 | "Iteration 28\n", 1019 | "loss = 1005.86\n", 1020 | "exp = 2.9645\n", 1021 | "Iteration 29\n", 1022 | "loss = 971.66\n", 1023 | "exp = 2.95172\n", 1024 | "Iteration 30\n", 1025 | "loss = 939.505\n", 1026 | "exp = 2.93933\n", 1027 | "Iteration 31\n", 1028 | "loss = 909.217\n", 1029 | "exp = 2.92729\n", 1030 | "Iteration 32\n", 1031 | "loss = 880.639\n", 1032 | "exp = 2.91559\n", 1033 | "Iteration 33\n", 1034 | "loss = 853.628\n", 1035 | "exp = 2.9042\n", 1036 | "Iteration 34\n", 1037 | "loss = 828.06\n", 1038 | "exp = 2.89312\n", 1039 | "Iteration 35\n", 1040 | "loss = 803.819\n", 1041 | "exp = 2.88233\n", 1042 | "Iteration 36\n", 1043 | "loss = 780.807\n", 1044 | "exp = 2.8718\n", 1045 | "Iteration 37\n", 1046 | "loss = 758.93\n", 1047 | "exp = 2.86154\n", 1048 | "Iteration 38\n", 1049 | "loss = 738.107\n", 1050 | "exp = 2.85152\n", 1051 | "Iteration 39\n", 1052 | "loss = 718.264\n", 1053 | "exp = 2.84173\n", 1054 | "Iteration 40\n", 1055 | "loss = 699.331\n", 1056 | "exp = 2.83217\n", 1057 | "Iteration 41\n", 1058 | "loss = 681.249\n", 1059 | "exp = 2.82282\n", 1060 | "Iteration 42\n", 1061 | "loss = 663.96\n", 1062 | "exp = 2.81368\n", 1063 | "Iteration 43\n", 1064 | "loss = 647.413\n", 1065 | "exp = 2.80473\n", 1066 | "Iteration 44\n", 1067 | "loss = 631.561\n", 1068 | "exp = 2.79596\n", 1069 | "Iteration 45\n", 1070 | "loss = 616.362\n", 1071 | "exp = 2.78738\n", 1072 | "Iteration 46\n", 1073 | "loss = 601.775\n", 1074 | "exp = 2.77897\n", 1075 | "Iteration 47\n", 1076 | "loss = 587.765\n", 1077 | "exp = 2.77072\n", 1078 | "Iteration 48\n", 1079 | "loss = 574.297\n", 1080 | "exp = 2.76264\n", 1081 | "Iteration 49\n", 1082 | "loss = 561.341\n", 1083 | "exp = 2.7547\n", 1084 | "Iteration 50\n", 1085 | "loss = 548.867\n", 1086 | "exp = 2.74691\n", 1087 | "Iteration 51\n", 1088 | "loss = 536.85\n", 1089 | "exp = 2.73927\n", 1090 | "Iteration 52\n", 1091 | "loss = 525.265\n", 1092 | "exp = 2.73176\n", 1093 | "Iteration 53\n", 1094 | "loss = 514.088\n", 1095 | "exp = 2.72438\n", 1096 | "Iteration 54\n", 1097 | "loss = 503.299\n", 1098 | "exp = 2.71713\n", 1099 | "Iteration 55\n", 1100 | "loss = 492.877\n", 1101 | "exp = 2.71001\n", 1102 | "Iteration 56\n", 1103 | "loss = 482.805\n", 1104 | "exp = 2.703\n", 1105 | "Iteration 57\n", 1106 | "loss = 473.064\n", 1107 | "exp = 2.69611\n", 1108 | "Iteration 58\n", 1109 | "loss = 463.639\n", 1110 | "exp = 2.68933\n", 1111 | "Iteration 59\n", 1112 | "loss = 454.514\n", 1113 | "exp = 2.68265\n", 1114 | "Iteration 60\n", 1115 | "loss = 445.676\n", 1116 | "exp = 2.67608\n", 1117 | "Iteration 61\n", 1118 | "loss = 437.11\n", 1119 | "exp = 2.66961\n", 1120 | "Iteration 62\n", 1121 | "loss = 428.805\n", 1122 | "exp = 2.66324\n", 1123 | "Iteration 63\n", 1124 | "loss = 420.749\n", 1125 | "exp = 2.65697\n", 1126 | "Iteration 64\n", 1127 | "loss = 412.93\n", 1128 | "exp = 2.65078\n", 1129 | "Iteration 65\n", 1130 | "loss = 405.339\n", 1131 | "exp = 2.64469\n", 1132 | "Iteration 66\n", 1133 | "loss = 397.965\n", 1134 | "exp = 2.63868\n", 1135 | "Iteration 67\n", 1136 | "loss = 390.8\n", 1137 | "exp = 2.63276\n", 1138 | "Iteration 68\n", 1139 | "loss = 383.834\n", 1140 | "exp = 2.62692\n", 1141 | "Iteration 69\n", 1142 | "loss = 377.059\n", 1143 | "exp = 2.62116\n", 1144 | "Iteration 70\n", 1145 | "loss = 370.468\n", 1146 | "exp = 2.61548\n", 1147 | "Iteration 71\n", 1148 | "loss = 364.053\n", 1149 | "exp = 2.60987\n", 1150 | "Iteration 72\n", 1151 | "loss = 357.807\n", 1152 | "exp = 2.60434\n", 1153 | "Iteration 73\n", 1154 | "loss = 351.724\n", 1155 | "exp = 2.59888\n", 1156 | "Iteration 74\n", 1157 | "loss = 345.797\n", 1158 | "exp = 2.59349\n", 1159 | "Iteration 75\n", 1160 | "loss = 340.021\n", 1161 | "exp = 2.58816\n", 1162 | "Iteration 76\n", 1163 | "loss = 334.389\n", 1164 | "exp = 2.58291\n", 1165 | "Iteration 77\n", 1166 | "loss = 328.896\n", 1167 | "exp = 2.57771\n", 1168 | "Iteration 78\n", 1169 | "loss = 323.538\n", 1170 | "exp = 2.57258\n", 1171 | "Iteration 79\n", 1172 | "loss = 318.308\n", 1173 | "exp = 2.56752\n", 1174 | "Iteration 80\n", 1175 | "loss = 313.204\n", 1176 | "exp = 2.56251\n", 1177 | "Iteration 81\n", 1178 | "loss = 308.219\n", 1179 | "exp = 2.55756\n", 1180 | "Iteration 82\n", 1181 | "loss = 303.351\n", 1182 | "exp = 2.55267\n", 1183 | "Iteration 83\n", 1184 | "loss = 298.595\n", 1185 | "exp = 2.54784\n", 1186 | "Iteration 84\n", 1187 | "loss = 293.946\n", 1188 | "exp = 2.54306\n", 1189 | "Iteration 85\n", 1190 | "loss = 289.403\n", 1191 | "exp = 2.53833\n", 1192 | "Iteration 86\n", 1193 | "loss = 284.96\n", 1194 | "exp = 2.53366\n", 1195 | "Iteration 87\n", 1196 | "loss = 280.616\n", 1197 | "exp = 2.52903\n", 1198 | "Iteration 88\n", 1199 | "loss = 276.365\n", 1200 | "exp = 2.52446\n", 1201 | "Iteration 89\n", 1202 | "loss = 272.206\n", 1203 | "exp = 2.51994\n", 1204 | "Iteration 90\n", 1205 | "loss = 268.136\n", 1206 | "exp = 2.51546\n", 1207 | "Iteration 91\n", 1208 | "loss = 264.151\n", 1209 | "exp = 2.51104\n", 1210 | "Iteration 92\n", 1211 | "loss = 260.25\n", 1212 | "exp = 2.50665\n", 1213 | "Iteration 93\n", 1214 | "loss = 256.429\n", 1215 | "exp = 2.50232\n", 1216 | "Iteration 94\n", 1217 | "loss = 252.686\n", 1218 | "exp = 2.49802\n", 1219 | "Iteration 95\n", 1220 | "loss = 249.018\n", 1221 | "exp = 2.49377\n", 1222 | "Iteration 96\n", 1223 | "loss = 245.424\n", 1224 | "exp = 2.48957\n", 1225 | "Iteration 97\n", 1226 | "loss = 241.901\n", 1227 | "exp = 2.4854\n", 1228 | "Iteration 98\n", 1229 | "loss = 238.447\n", 1230 | "exp = 2.48127\n", 1231 | "Iteration 99\n", 1232 | "loss = 235.061\n", 1233 | "exp = 2.47719\n", 1234 | "Iteration 100\n", 1235 | "loss = 231.739\n", 1236 | "exp = 2.47314\n", 1237 | "Iteration 101\n", 1238 | "loss = 228.481\n", 1239 | "exp = 2.46914\n", 1240 | "Iteration 102\n", 1241 | "loss = 225.284\n", 1242 | "exp = 2.46517\n", 1243 | "Iteration 103\n", 1244 | "loss = 222.146\n", 1245 | "exp = 2.46123\n", 1246 | "Iteration 104\n", 1247 | "loss = 219.067\n", 1248 | "exp = 2.45734\n", 1249 | "Iteration 105\n", 1250 | "loss = 216.045\n", 1251 | "exp = 2.45348\n", 1252 | "Iteration 106\n", 1253 | "loss = 213.077\n", 1254 | "exp = 2.44965\n", 1255 | "Iteration 107\n", 1256 | "loss = 210.163\n", 1257 | "exp = 2.44586\n", 1258 | "Iteration 108\n", 1259 | "loss = 207.301\n", 1260 | "exp = 2.4421\n", 1261 | "Iteration 109\n", 1262 | "loss = 204.489\n", 1263 | "exp = 2.43838\n", 1264 | "Iteration 110\n", 1265 | "loss = 201.727\n", 1266 | "exp = 2.43468\n", 1267 | "Iteration 111\n", 1268 | "loss = 199.012\n", 1269 | "exp = 2.43102\n", 1270 | "Iteration 112\n", 1271 | "loss = 196.345\n", 1272 | "exp = 2.4274\n", 1273 | "Iteration 113\n", 1274 | "loss = 193.723\n", 1275 | "exp = 2.4238\n", 1276 | "Iteration 114\n", 1277 | "loss = 191.146\n", 1278 | "exp = 2.42023\n", 1279 | "Iteration 115\n", 1280 | "loss = 188.611\n", 1281 | "exp = 2.41669\n", 1282 | "Iteration 116\n", 1283 | "loss = 186.119\n", 1284 | "exp = 2.41319\n", 1285 | "Iteration 117\n", 1286 | "loss = 183.669\n", 1287 | "exp = 2.40971\n", 1288 | "Iteration 118\n", 1289 | "loss = 181.258\n", 1290 | "exp = 2.40626\n", 1291 | "Iteration 119\n", 1292 | "loss = 178.886\n", 1293 | "exp = 2.40283\n", 1294 | "Iteration 120\n", 1295 | "loss = 176.553\n", 1296 | "exp = 2.39944\n", 1297 | "Iteration 121\n", 1298 | "loss = 174.257\n", 1299 | "exp = 2.39607\n", 1300 | "Iteration 122\n", 1301 | "loss = 171.997\n", 1302 | "exp = 2.39273\n", 1303 | "Iteration 123\n", 1304 | "loss = 169.773\n", 1305 | "exp = 2.38942\n", 1306 | "Iteration 124\n", 1307 | "loss = 167.584\n", 1308 | "exp = 2.38613\n", 1309 | "Iteration 125\n", 1310 | "loss = 165.429\n", 1311 | "exp = 2.38286\n", 1312 | "Iteration 126\n", 1313 | "loss = 163.306\n", 1314 | "exp = 2.37962\n", 1315 | "Iteration 127\n", 1316 | "loss = 161.217\n", 1317 | "exp = 2.37641\n", 1318 | "Iteration 128\n", 1319 | "loss = 159.158\n", 1320 | "exp = 2.37322\n", 1321 | "Iteration 129\n", 1322 | "loss = 157.131\n", 1323 | "exp = 2.37005\n", 1324 | "Iteration 130\n", 1325 | "loss = 155.134\n", 1326 | "exp = 2.36691\n", 1327 | "Iteration 131\n", 1328 | "loss = 153.166\n", 1329 | "exp = 2.36379\n", 1330 | "Iteration 132\n", 1331 | "loss = 151.227\n", 1332 | "exp = 2.3607\n", 1333 | "Iteration 133\n", 1334 | "loss = 149.317\n", 1335 | "exp = 2.35762\n", 1336 | "Iteration 134\n", 1337 | "loss = 147.434\n", 1338 | "exp = 2.35457\n", 1339 | "Iteration 135\n", 1340 | "loss = 145.579\n", 1341 | "exp = 2.35154\n", 1342 | "Iteration 136\n", 1343 | "loss = 143.749\n", 1344 | "exp = 2.34853\n", 1345 | "Iteration 137\n", 1346 | "loss = 141.946\n", 1347 | "exp = 2.34555\n", 1348 | "Iteration 138\n", 1349 | "loss = 140.168\n", 1350 | "exp = 2.34258\n", 1351 | "Iteration 139\n", 1352 | "loss = 138.415\n", 1353 | "exp = 2.33963\n", 1354 | "Iteration 140\n", 1355 | "loss = 136.686\n", 1356 | "exp = 2.33671\n", 1357 | "Iteration 141\n", 1358 | "loss = 134.981\n", 1359 | "exp = 2.33381\n", 1360 | "Iteration 142\n", 1361 | "loss = 133.3\n", 1362 | "exp = 2.33092\n", 1363 | "Iteration 143\n", 1364 | "loss = 131.641\n", 1365 | "exp = 2.32806\n", 1366 | "Iteration 144\n", 1367 | "loss = 130.004\n", 1368 | "exp = 2.32521\n", 1369 | "Iteration 145\n", 1370 | "loss = 128.39\n", 1371 | "exp = 2.32238\n", 1372 | "Iteration 146\n", 1373 | "loss = 126.797\n", 1374 | "exp = 2.31957\n", 1375 | "Iteration 147\n", 1376 | "loss = 125.225\n", 1377 | "exp = 2.31678\n", 1378 | "Iteration 148\n", 1379 | "loss = 123.674\n", 1380 | "exp = 2.31401\n", 1381 | "Iteration 149\n", 1382 | "loss = 122.143\n", 1383 | "exp = 2.31126\n", 1384 | "Iteration 150\n", 1385 | "loss = 120.631\n", 1386 | "exp = 2.30852\n", 1387 | "Iteration 151\n", 1388 | "loss = 119.14\n", 1389 | "exp = 2.30581\n", 1390 | "Iteration 152\n", 1391 | "loss = 117.667\n", 1392 | "exp = 2.30311\n", 1393 | "Iteration 153\n", 1394 | "loss = 116.213\n", 1395 | "exp = 2.30042\n", 1396 | "Iteration 154\n", 1397 | "loss = 114.778\n", 1398 | "exp = 2.29776\n", 1399 | "Iteration 155\n", 1400 | "loss = 113.36\n", 1401 | "exp = 2.29511\n", 1402 | "Iteration 156\n", 1403 | "loss = 111.961\n", 1404 | "exp = 2.29247\n", 1405 | "Iteration 157\n", 1406 | "loss = 110.578\n", 1407 | "exp = 2.28986\n", 1408 | "Iteration 158\n", 1409 | "loss = 109.213\n", 1410 | "exp = 2.28726\n", 1411 | "Iteration 159\n", 1412 | "loss = 107.864\n", 1413 | "exp = 2.28467\n", 1414 | "Iteration 160\n", 1415 | "loss = 106.532\n", 1416 | "exp = 2.2821\n", 1417 | "Iteration 161\n", 1418 | "loss = 105.216\n", 1419 | "exp = 2.27955\n", 1420 | "Iteration 162\n", 1421 | "loss = 103.916\n", 1422 | "exp = 2.27701\n", 1423 | "Iteration 163\n", 1424 | "loss = 102.631\n", 1425 | "exp = 2.27449\n", 1426 | "Iteration 164\n", 1427 | "loss = 101.361\n", 1428 | "exp = 2.27198\n", 1429 | "Iteration 165\n", 1430 | "loss = 100.107\n", 1431 | "exp = 2.26949\n", 1432 | "Iteration 166\n", 1433 | "loss = 98.8672\n", 1434 | "exp = 2.26701\n", 1435 | "Iteration 167\n", 1436 | "loss = 97.6419\n", 1437 | "exp = 2.26454\n", 1438 | "Iteration 168\n", 1439 | "loss = 96.4309\n", 1440 | "exp = 2.26209\n", 1441 | "Iteration 169\n", 1442 | "loss = 95.2339\n", 1443 | "exp = 2.25966\n", 1444 | "Iteration 170\n", 1445 | "loss = 94.0506\n", 1446 | "exp = 2.25723\n", 1447 | "Iteration 171\n", 1448 | "loss = 92.8808\n", 1449 | "exp = 2.25483\n", 1450 | "Iteration 172\n", 1451 | "loss = 91.7243\n", 1452 | "exp = 2.25243\n", 1453 | "Iteration 173\n", 1454 | "loss = 90.5809\n", 1455 | "exp = 2.25005\n", 1456 | "Iteration 174\n", 1457 | "loss = 89.4502\n", 1458 | "exp = 2.24768\n", 1459 | "Iteration 175\n", 1460 | "loss = 88.3321\n", 1461 | "exp = 2.24533\n", 1462 | "Iteration 176\n", 1463 | "loss = 87.2265\n", 1464 | "exp = 2.24299\n", 1465 | "Iteration 177\n", 1466 | "loss = 86.1331\n", 1467 | "exp = 2.24066\n", 1468 | "Iteration 178\n", 1469 | "loss = 85.0516\n", 1470 | "exp = 2.23834\n", 1471 | "Iteration 179\n", 1472 | "loss = 83.9819\n", 1473 | "exp = 2.23604\n", 1474 | "Iteration 180\n", 1475 | "loss = 82.9239\n", 1476 | "exp = 2.23375\n", 1477 | "Iteration 181\n", 1478 | "loss = 81.8773\n", 1479 | "exp = 2.23147\n", 1480 | "Iteration 182\n", 1481 | "loss = 80.8419\n", 1482 | "exp = 2.2292\n", 1483 | "Iteration 183\n", 1484 | "loss = 79.8176\n", 1485 | "exp = 2.22695\n", 1486 | "Iteration 184\n", 1487 | "loss = 78.8041\n", 1488 | "exp = 2.22471\n", 1489 | "Iteration 185\n", 1490 | "loss = 77.8014\n", 1491 | "exp = 2.22248\n", 1492 | "Iteration 186\n", 1493 | "loss = 76.8092\n", 1494 | "exp = 2.22026\n", 1495 | "Iteration 187\n", 1496 | "loss = 75.8274\n", 1497 | "exp = 2.21805\n", 1498 | "Iteration 188\n", 1499 | "loss = 74.8558\n", 1500 | "exp = 2.21586\n", 1501 | "Iteration 189\n", 1502 | "loss = 73.8942\n", 1503 | "exp = 2.21367\n", 1504 | "Iteration 190\n", 1505 | "loss = 72.9425\n", 1506 | "exp = 2.2115\n", 1507 | "Iteration 191\n", 1508 | "loss = 72.0006\n", 1509 | "exp = 2.20934\n", 1510 | "Iteration 192\n", 1511 | "loss = 71.0683\n", 1512 | "exp = 2.20719\n", 1513 | "Iteration 193\n", 1514 | "loss = 70.1454\n", 1515 | "exp = 2.20505\n", 1516 | "Iteration 194\n", 1517 | "loss = 69.2318\n", 1518 | "exp = 2.20292\n", 1519 | "Iteration 195\n", 1520 | "loss = 68.3273\n", 1521 | "exp = 2.2008\n", 1522 | "Iteration 196\n", 1523 | "loss = 67.4319\n", 1524 | "exp = 2.19869\n", 1525 | "Iteration 197\n", 1526 | "loss = 66.5454\n", 1527 | "exp = 2.19659\n", 1528 | "Iteration 198\n", 1529 | "loss = 65.6676\n", 1530 | "exp = 2.19451\n", 1531 | "Iteration 199\n", 1532 | "loss = 64.7986\n", 1533 | "exp = 2.19243\n", 1534 | "Iteration 200\n", 1535 | "loss = 63.938\n", 1536 | "exp = 2.19036\n", 1537 | "Iteration 201\n", 1538 | "loss = 63.0857\n", 1539 | "exp = 2.18831\n", 1540 | "Iteration 202\n", 1541 | "loss = 62.2418\n", 1542 | "exp = 2.18626\n", 1543 | "Iteration 203\n", 1544 | "loss = 61.4059\n", 1545 | "exp = 2.18422\n", 1546 | "Iteration 204\n", 1547 | "loss = 60.5782\n", 1548 | "exp = 2.1822\n", 1549 | "Iteration 205\n", 1550 | "loss = 59.7583\n", 1551 | "exp = 2.18018\n", 1552 | "Iteration 206\n", 1553 | "loss = 58.9462\n", 1554 | "exp = 2.17817\n", 1555 | "Iteration 207\n", 1556 | "loss = 58.1417\n", 1557 | "exp = 2.17617\n", 1558 | "Iteration 208\n", 1559 | "loss = 57.3449\n", 1560 | "exp = 2.17418\n", 1561 | "Iteration 209\n", 1562 | "loss = 56.5555\n", 1563 | "exp = 2.1722\n", 1564 | "Iteration 210\n", 1565 | "loss = 55.7735\n", 1566 | "exp = 2.17023\n", 1567 | "Iteration 211\n", 1568 | "loss = 54.9987\n", 1569 | "exp = 2.16827\n", 1570 | "Iteration 212\n" 1571 | ] 1572 | }, 1573 | { 1574 | "name": "stdout", 1575 | "output_type": "stream", 1576 | "text": [ 1577 | "loss = 54.2312\n", 1578 | "exp = 2.16632\n", 1579 | "Iteration 213\n", 1580 | "loss = 53.4707\n", 1581 | "exp = 2.16438\n", 1582 | "Iteration 214\n", 1583 | "loss = 52.7171\n", 1584 | "exp = 2.16244\n", 1585 | "Iteration 215\n", 1586 | "loss = 51.9703\n", 1587 | "exp = 2.16052\n", 1588 | "Iteration 216\n", 1589 | "loss = 51.2304\n", 1590 | "exp = 2.1586\n", 1591 | "Iteration 217\n", 1592 | "loss = 50.4972\n", 1593 | "exp = 2.15669\n", 1594 | "Iteration 218\n", 1595 | "loss = 49.7705\n", 1596 | "exp = 2.15479\n", 1597 | "Iteration 219\n", 1598 | "loss = 49.0503\n", 1599 | "exp = 2.1529\n", 1600 | "Iteration 220\n", 1601 | "loss = 48.3366\n", 1602 | "exp = 2.15102\n", 1603 | "Iteration 221\n", 1604 | "loss = 47.6292\n", 1605 | "exp = 2.14914\n", 1606 | "Iteration 222\n", 1607 | "loss = 46.9281\n", 1608 | "exp = 2.14728\n", 1609 | "Iteration 223\n", 1610 | "loss = 46.2331\n", 1611 | "exp = 2.14542\n", 1612 | "Iteration 224\n", 1613 | "loss = 45.5442\n", 1614 | "exp = 2.14357\n", 1615 | "Iteration 225\n", 1616 | "loss = 44.8613\n", 1617 | "exp = 2.14173\n", 1618 | "Iteration 226\n", 1619 | "loss = 44.1843\n", 1620 | "exp = 2.13989\n", 1621 | "Iteration 227\n", 1622 | "loss = 43.5132\n", 1623 | "exp = 2.13807\n", 1624 | "Iteration 228\n", 1625 | "loss = 42.8479\n", 1626 | "exp = 2.13625\n", 1627 | "Iteration 229\n", 1628 | "loss = 42.1883\n", 1629 | "exp = 2.13444\n", 1630 | "Iteration 230\n", 1631 | "loss = 41.5342\n", 1632 | "exp = 2.13264\n", 1633 | "Iteration 231\n", 1634 | "loss = 40.8858\n", 1635 | "exp = 2.13084\n", 1636 | "Iteration 232\n", 1637 | "loss = 40.2429\n", 1638 | "exp = 2.12905\n", 1639 | "Iteration 233\n", 1640 | "loss = 39.6053\n", 1641 | "exp = 2.12727\n", 1642 | "Iteration 234\n", 1643 | "loss = 38.9731\n", 1644 | "exp = 2.1255\n", 1645 | "Iteration 235\n", 1646 | "loss = 38.3461\n", 1647 | "exp = 2.12374\n", 1648 | "Iteration 236\n", 1649 | "loss = 37.7244\n", 1650 | "exp = 2.12198\n", 1651 | "Iteration 237\n", 1652 | "loss = 37.1078\n", 1653 | "exp = 2.12023\n", 1654 | "Iteration 238\n", 1655 | "loss = 36.4964\n", 1656 | "exp = 2.11849\n", 1657 | "Iteration 239\n", 1658 | "loss = 35.8899\n", 1659 | "exp = 2.11675\n", 1660 | "Iteration 240\n", 1661 | "loss = 35.2885\n", 1662 | "exp = 2.11502\n", 1663 | "Iteration 241\n", 1664 | "loss = 34.6919\n", 1665 | "exp = 2.1133\n", 1666 | "Iteration 242\n", 1667 | "loss = 34.1001\n", 1668 | "exp = 2.11158\n", 1669 | "Iteration 243\n", 1670 | "loss = 33.5131\n", 1671 | "exp = 2.10988\n", 1672 | "Iteration 244\n", 1673 | "loss = 32.9309\n", 1674 | "exp = 2.10818\n", 1675 | "Iteration 245\n", 1676 | "loss = 32.3533\n", 1677 | "exp = 2.10648\n", 1678 | "Iteration 246\n", 1679 | "loss = 31.7803\n", 1680 | "exp = 2.10479\n", 1681 | "Iteration 247\n", 1682 | "loss = 31.2119\n", 1683 | "exp = 2.10311\n", 1684 | "Iteration 248\n", 1685 | "loss = 30.648\n", 1686 | "exp = 2.10144\n", 1687 | "Iteration 249\n", 1688 | "loss = 30.0885\n", 1689 | "exp = 2.09977\n", 1690 | "Iteration 250\n", 1691 | "loss = 29.5335\n", 1692 | "exp = 2.09811\n", 1693 | "Iteration 251\n", 1694 | "loss = 28.9828\n", 1695 | "exp = 2.09646\n", 1696 | "Iteration 252\n", 1697 | "loss = 28.4363\n", 1698 | "exp = 2.09481\n", 1699 | "Iteration 253\n", 1700 | "loss = 27.8941\n", 1701 | "exp = 2.09317\n", 1702 | "Iteration 254\n", 1703 | "loss = 27.3561\n", 1704 | "exp = 2.09153\n", 1705 | "Iteration 255\n", 1706 | "loss = 26.8223\n", 1707 | "exp = 2.0899\n", 1708 | "Iteration 256\n", 1709 | "loss = 26.2925\n", 1710 | "exp = 2.08828\n", 1711 | "Iteration 257\n", 1712 | "loss = 25.7668\n", 1713 | "exp = 2.08666\n", 1714 | "Iteration 258\n", 1715 | "loss = 25.245\n", 1716 | "exp = 2.08505\n", 1717 | "Iteration 259\n", 1718 | "loss = 24.7273\n", 1719 | "exp = 2.08345\n", 1720 | "Iteration 260\n", 1721 | "loss = 24.2135\n", 1722 | "exp = 2.08185\n", 1723 | "Iteration 261\n", 1724 | "loss = 23.7035\n", 1725 | "exp = 2.08026\n", 1726 | "Iteration 262\n", 1727 | "loss = 23.1975\n", 1728 | "exp = 2.07867\n", 1729 | "Iteration 263\n", 1730 | "loss = 22.6952\n", 1731 | "exp = 2.07709\n", 1732 | "Iteration 264\n", 1733 | "loss = 22.1965\n", 1734 | "exp = 2.07552\n", 1735 | "Iteration 265\n", 1736 | "loss = 21.7016\n", 1737 | "exp = 2.07395\n", 1738 | "Iteration 266\n", 1739 | "loss = 21.2104\n", 1740 | "exp = 2.07238\n", 1741 | "Iteration 267\n", 1742 | "loss = 20.7228\n", 1743 | "exp = 2.07083\n", 1744 | "Iteration 268\n", 1745 | "loss = 20.2388\n", 1746 | "exp = 2.06928\n", 1747 | "Iteration 269\n", 1748 | "loss = 19.7583\n", 1749 | "exp = 2.06773\n", 1750 | "Iteration 270\n", 1751 | "loss = 19.2813\n", 1752 | "exp = 2.06619\n", 1753 | "Iteration 271\n", 1754 | "loss = 18.8078\n", 1755 | "exp = 2.06466\n", 1756 | "Iteration 272\n", 1757 | "loss = 18.3377\n", 1758 | "exp = 2.06313\n", 1759 | "Iteration 273\n", 1760 | "loss = 17.871\n", 1761 | "exp = 2.0616\n", 1762 | "Iteration 274\n", 1763 | "loss = 17.4076\n", 1764 | "exp = 2.06008\n", 1765 | "Iteration 275\n", 1766 | "loss = 16.9476\n", 1767 | "exp = 2.05857\n", 1768 | "Iteration 276\n", 1769 | "loss = 16.4908\n", 1770 | "exp = 2.05706\n", 1771 | "Iteration 277\n", 1772 | "loss = 16.0373\n", 1773 | "exp = 2.05556\n", 1774 | "Iteration 278\n", 1775 | "loss = 15.587\n", 1776 | "exp = 2.05407\n", 1777 | "Iteration 279\n", 1778 | "loss = 15.1398\n", 1779 | "exp = 2.05258\n", 1780 | "Iteration 280\n", 1781 | "loss = 14.6958\n", 1782 | "exp = 2.05109\n", 1783 | "Iteration 281\n", 1784 | "loss = 14.2549\n", 1785 | "exp = 2.04961\n", 1786 | "Iteration 282\n", 1787 | "loss = 13.8171\n", 1788 | "exp = 2.04813\n", 1789 | "Iteration 283\n", 1790 | "loss = 13.3824\n", 1791 | "exp = 2.04666\n", 1792 | "Iteration 284\n", 1793 | "loss = 12.9506\n", 1794 | "exp = 2.0452\n", 1795 | "Iteration 285\n", 1796 | "loss = 12.5218\n", 1797 | "exp = 2.04374\n", 1798 | "Iteration 286\n", 1799 | "loss = 12.096\n", 1800 | "exp = 2.04228\n", 1801 | "Iteration 287\n", 1802 | "loss = 11.6731\n", 1803 | "exp = 2.04083\n", 1804 | "Iteration 288\n", 1805 | "loss = 11.2531\n", 1806 | "exp = 2.03939\n", 1807 | "Iteration 289\n", 1808 | "loss = 10.836\n", 1809 | "exp = 2.03794\n", 1810 | "Iteration 290\n", 1811 | "loss = 10.4217\n", 1812 | "exp = 2.03651\n", 1813 | "Iteration 291\n", 1814 | "loss = 10.0101\n", 1815 | "exp = 2.03508\n", 1816 | "Iteration 292\n", 1817 | "loss = 9.60137\n", 1818 | "exp = 2.03365\n", 1819 | "Iteration 293\n", 1820 | "loss = 9.19538\n", 1821 | "exp = 2.03223\n", 1822 | "Iteration 294\n", 1823 | "loss = 8.79209\n", 1824 | "exp = 2.03081\n", 1825 | "Iteration 295\n", 1826 | "loss = 8.3915\n", 1827 | "exp = 2.0294\n", 1828 | "Iteration 296\n", 1829 | "loss = 7.9936\n", 1830 | "exp = 2.028\n", 1831 | "Iteration 297\n", 1832 | "loss = 7.59835\n", 1833 | "exp = 2.02659\n", 1834 | "Iteration 298\n", 1835 | "loss = 7.20567\n", 1836 | "exp = 2.0252\n", 1837 | "Iteration 299\n", 1838 | "loss = 6.81562\n", 1839 | "exp = 2.0238\n", 1840 | "Iteration 300\n", 1841 | "loss = 6.42812\n", 1842 | "exp = 2.02241\n", 1843 | "Iteration 301\n", 1844 | "loss = 6.04314\n", 1845 | "exp = 2.02103\n", 1846 | "Iteration 302\n", 1847 | "loss = 5.66066\n", 1848 | "exp = 2.01965\n", 1849 | "Iteration 303\n", 1850 | "loss = 5.28068\n", 1851 | "exp = 2.01828\n", 1852 | "Iteration 304\n", 1853 | "loss = 4.90318\n", 1854 | "exp = 2.01691\n", 1855 | "Iteration 305\n", 1856 | "loss = 4.52806\n", 1857 | "exp = 2.01554\n", 1858 | "Iteration 306\n", 1859 | "loss = 4.1554\n", 1860 | "exp = 2.01418\n", 1861 | "Iteration 307\n", 1862 | "loss = 3.7851\n", 1863 | "exp = 2.01282\n", 1864 | "Iteration 308\n", 1865 | "loss = 3.41721\n", 1866 | "exp = 2.01147\n", 1867 | "Iteration 309\n", 1868 | "loss = 3.05165\n", 1869 | "exp = 2.01012\n", 1870 | "Iteration 310\n", 1871 | "loss = 2.68842\n", 1872 | "exp = 2.00877\n", 1873 | "Iteration 311\n", 1874 | "loss = 2.32748\n", 1875 | "exp = 2.00743\n", 1876 | "Iteration 312\n", 1877 | "loss = 1.96884\n", 1878 | "exp = 2.0061\n", 1879 | "Iteration 313\n", 1880 | "loss = 1.61247\n", 1881 | "exp = 2.00477\n", 1882 | "Iteration 314\n", 1883 | "loss = 1.2583\n", 1884 | "exp = 2.00344\n", 1885 | "Iteration 315\n", 1886 | "loss = 0.906373\n", 1887 | "exp = 2.00211\n", 1888 | "Iteration 316\n", 1889 | "loss = 0.556617\n", 1890 | "exp = 2.0008\n", 1891 | "Iteration 317\n", 1892 | "loss = 0.209018\n", 1893 | "exp = 1.99948\n", 1894 | "Iteration 318\n", 1895 | "loss = 0.136407\n", 1896 | "exp = 2.00079\n", 1897 | "Iteration 319\n", 1898 | "loss = 0.207952\n", 1899 | "exp = 1.99948\n", 1900 | "Iteration 320\n", 1901 | "loss = 0.137469\n", 1902 | "exp = 2.00079\n", 1903 | "Iteration 321\n", 1904 | "loss = 0.206887\n", 1905 | "exp = 1.99947\n", 1906 | "Iteration 322\n", 1907 | "loss = 0.138533\n", 1908 | "exp = 2.00078\n", 1909 | "Iteration 323\n", 1910 | "loss = 0.20582\n", 1911 | "exp = 1.99947\n", 1912 | "Iteration 324\n", 1913 | "loss = 0.139564\n", 1914 | "exp = 2.00078\n", 1915 | "Iteration 325\n", 1916 | "loss = 0.204754\n", 1917 | "exp = 1.99946\n", 1918 | "Iteration 326\n", 1919 | "loss = 0.140627\n", 1920 | "exp = 2.00078\n", 1921 | "Iteration 327\n", 1922 | "loss = 0.203688\n", 1923 | "exp = 1.99946\n", 1924 | "Iteration 328\n", 1925 | "loss = 0.141689\n", 1926 | "exp = 2.00077\n", 1927 | "Iteration 329\n", 1928 | "loss = 0.202622\n", 1929 | "exp = 1.99946\n", 1930 | "Iteration 330\n", 1931 | "loss = 0.142752\n", 1932 | "exp = 2.00077\n", 1933 | "Iteration 331\n", 1934 | "loss = 0.201556\n", 1935 | "exp = 1.99945\n", 1936 | "Iteration 332\n", 1937 | "loss = 0.143815\n", 1938 | "exp = 2.00076\n", 1939 | "Iteration 333\n", 1940 | "loss = 0.20049\n", 1941 | "exp = 1.99945\n", 1942 | "Iteration 334\n", 1943 | "loss = 0.144878\n", 1944 | "exp = 2.00076\n", 1945 | "Iteration 335\n", 1946 | "loss = 0.199424\n", 1947 | "exp = 1.99944\n", 1948 | "Iteration 336\n", 1949 | "loss = 0.145941\n", 1950 | "exp = 2.00075\n", 1951 | "Iteration 337\n", 1952 | "loss = 0.198358\n", 1953 | "exp = 1.99944\n", 1954 | "Iteration 338\n", 1955 | "loss = 0.147003\n", 1956 | "exp = 2.00075\n", 1957 | "Iteration 339\n", 1958 | "loss = 0.197291\n", 1959 | "exp = 1.99944\n", 1960 | "Iteration 340\n", 1961 | "loss = 0.148066\n", 1962 | "exp = 2.00075\n", 1963 | "Iteration 341\n", 1964 | "loss = 0.196225\n", 1965 | "exp = 1.99943\n", 1966 | "Iteration 342\n", 1967 | "loss = 0.149097\n", 1968 | "exp = 2.00074\n", 1969 | "Iteration 343\n", 1970 | "loss = 0.195159\n", 1971 | "exp = 1.99943\n", 1972 | "Iteration 344\n", 1973 | "loss = 0.15016\n", 1974 | "exp = 2.00074\n", 1975 | "Iteration 345\n", 1976 | "loss = 0.194094\n", 1977 | "exp = 1.99942\n", 1978 | "Iteration 346\n", 1979 | "loss = 0.151223\n", 1980 | "exp = 2.00073\n", 1981 | "Iteration 347\n", 1982 | "loss = 0.193028\n", 1983 | "exp = 1.99942\n", 1984 | "Iteration 348\n", 1985 | "loss = 0.152285\n", 1986 | "exp = 2.00073\n", 1987 | "Iteration 349\n", 1988 | "loss = 0.191962\n", 1989 | "exp = 1.99942\n", 1990 | "Iteration 350\n", 1991 | "loss = 0.153348\n", 1992 | "exp = 2.00073\n", 1993 | "Iteration 351\n", 1994 | "loss = 0.190896\n", 1995 | "exp = 1.99941\n", 1996 | "Iteration 352\n", 1997 | "loss = 0.154411\n", 1998 | "exp = 2.00072\n", 1999 | "Iteration 353\n", 2000 | "loss = 0.18983\n", 2001 | "exp = 1.99941\n", 2002 | "Iteration 354\n", 2003 | "loss = 0.155474\n", 2004 | "exp = 2.00072\n", 2005 | "Iteration 355\n", 2006 | "loss = 0.188764\n", 2007 | "exp = 1.9994\n", 2008 | "Iteration 356\n", 2009 | "loss = 0.156537\n", 2010 | "exp = 2.00071\n", 2011 | "Iteration 357\n", 2012 | "loss = 0.187698\n", 2013 | "exp = 1.9994\n", 2014 | "Iteration 358\n", 2015 | "loss = 0.157599\n", 2016 | "exp = 2.00071\n", 2017 | "Iteration 359\n", 2018 | "loss = 0.186632\n", 2019 | "exp = 1.9994\n", 2020 | "Iteration 360\n", 2021 | "loss = 0.158662\n", 2022 | "exp = 2.00071\n", 2023 | "Iteration 361\n", 2024 | "loss = 0.185566\n", 2025 | "exp = 1.99939\n", 2026 | "Iteration 362\n", 2027 | "loss = 0.159693\n", 2028 | "exp = 2.0007\n", 2029 | "Iteration 363\n", 2030 | "loss = 0.1845\n", 2031 | "exp = 1.99939\n", 2032 | "Iteration 364\n", 2033 | "loss = 0.160756\n", 2034 | "exp = 2.0007\n", 2035 | "Iteration 365\n", 2036 | "loss = 0.183434\n", 2037 | "exp = 1.99938\n", 2038 | "Iteration 366\n", 2039 | "loss = 0.161818\n", 2040 | "exp = 2.00069\n", 2041 | "Iteration 367\n", 2042 | "loss = 0.182368\n", 2043 | "exp = 1.99938\n", 2044 | "Iteration 368\n", 2045 | "loss = 0.162881\n", 2046 | "exp = 2.00069\n", 2047 | "Iteration 369\n", 2048 | "loss = 0.181302\n", 2049 | "exp = 1.99938\n", 2050 | "Iteration 370\n", 2051 | "loss = 0.163943\n", 2052 | "exp = 2.00069\n", 2053 | "Iteration 371\n", 2054 | "loss = 0.180236\n", 2055 | "exp = 1.99937\n", 2056 | "Iteration 372\n", 2057 | "loss = 0.165006\n", 2058 | "exp = 2.00068\n", 2059 | "Iteration 373\n", 2060 | "loss = 0.179171\n", 2061 | "exp = 1.99937\n", 2062 | "Iteration 374\n", 2063 | "loss = 0.166068\n", 2064 | "exp = 2.00068\n", 2065 | "Iteration 375\n", 2066 | "loss = 0.178105\n", 2067 | "exp = 1.99936\n", 2068 | "Iteration 376\n", 2069 | "loss = 0.167131\n", 2070 | "exp = 2.00067\n", 2071 | "Iteration 377\n", 2072 | "loss = 0.177039\n", 2073 | "exp = 1.99936\n", 2074 | "Iteration 378\n", 2075 | "loss = 0.168193\n", 2076 | "exp = 2.00067\n", 2077 | "Iteration 379\n", 2078 | "loss = 0.175973\n", 2079 | "exp = 1.99935\n", 2080 | "Iteration 380\n", 2081 | "loss = 0.169225\n", 2082 | "exp = 2.00067\n", 2083 | "Iteration 381\n", 2084 | "loss = 0.174907\n", 2085 | "exp = 1.99935\n", 2086 | "Iteration 382\n", 2087 | "loss = 0.170287\n", 2088 | "exp = 2.00066\n", 2089 | "Iteration 383\n", 2090 | "loss = 0.173842\n", 2091 | "exp = 1.99935\n", 2092 | "Iteration 384\n", 2093 | "loss = 0.17135\n", 2094 | "exp = 2.00066\n", 2095 | "Iteration 385\n", 2096 | "loss = 0.172775\n", 2097 | "exp = 1.99934\n", 2098 | "Iteration 386\n", 2099 | "loss = 0.172412\n", 2100 | "exp = 2.00065\n", 2101 | "Iteration 387\n", 2102 | "loss = 0.17171\n", 2103 | "exp = 1.99934\n", 2104 | "Iteration 388\n", 2105 | "loss = 0.173475\n", 2106 | "exp = 2.00065\n", 2107 | "Iteration 389\n", 2108 | "loss = 0.170644\n", 2109 | "exp = 1.99933\n", 2110 | "Iteration 390\n", 2111 | "loss = 0.174537\n", 2112 | "exp = 2.00065\n", 2113 | "Iteration 391\n", 2114 | "loss = 0.169579\n", 2115 | "exp = 1.99933\n", 2116 | "Iteration 392\n", 2117 | "loss = 0.1756\n", 2118 | "exp = 2.00064\n", 2119 | "Iteration 393\n", 2120 | "loss = 0.168513\n", 2121 | "exp = 1.99933\n", 2122 | "Iteration 394\n", 2123 | "loss = 0.176662\n", 2124 | "exp = 2.00064\n", 2125 | "Iteration 395\n", 2126 | "loss = 0.167447\n", 2127 | "exp = 1.99932\n", 2128 | "Iteration 396\n", 2129 | "loss = 0.177725\n", 2130 | "exp = 2.00063\n", 2131 | "Iteration 397\n", 2132 | "loss = 0.166381\n", 2133 | "exp = 1.99932\n", 2134 | "Iteration 398\n", 2135 | "loss = 0.178787\n", 2136 | "exp = 2.00063\n", 2137 | "Iteration 399\n", 2138 | "loss = 0.165316\n", 2139 | "exp = 1.99931\n", 2140 | "Iteration 400\n", 2141 | "loss = 0.179819\n", 2142 | "exp = 2.00063\n", 2143 | "Iteration 401\n", 2144 | "loss = 0.16425\n", 2145 | "exp = 1.99931\n", 2146 | "Iteration 402\n", 2147 | "loss = 0.180881\n", 2148 | "exp = 2.00062\n", 2149 | "Iteration 403\n", 2150 | "loss = 0.163184\n", 2151 | "exp = 1.99931\n", 2152 | "Iteration 404\n", 2153 | "loss = 0.181943\n", 2154 | "exp = 2.00062\n", 2155 | "Iteration 405\n", 2156 | "loss = 0.162118\n", 2157 | "exp = 1.9993\n", 2158 | "Iteration 406\n", 2159 | "loss = 0.183006\n", 2160 | "exp = 2.00061\n", 2161 | "Iteration 407\n", 2162 | "loss = 0.161053\n", 2163 | "exp = 1.9993\n", 2164 | "Iteration 408\n", 2165 | "loss = 0.184068\n", 2166 | "exp = 2.00061\n", 2167 | "Iteration 409\n", 2168 | "loss = 0.159987\n", 2169 | "exp = 1.99929\n", 2170 | "Iteration 410\n", 2171 | "loss = 0.185131\n", 2172 | "exp = 2.0006\n", 2173 | "Iteration 411\n", 2174 | "loss = 0.158922\n", 2175 | "exp = 1.99929\n", 2176 | "Iteration 412\n", 2177 | "loss = 0.186193\n", 2178 | "exp = 2.0006\n", 2179 | "Iteration 413\n", 2180 | "loss = 0.157855\n", 2181 | "exp = 1.99929\n", 2182 | "Iteration 414\n", 2183 | "loss = 0.187255\n", 2184 | "exp = 2.0006\n", 2185 | "Iteration 415\n", 2186 | "loss = 0.15679\n", 2187 | "exp = 1.99928\n", 2188 | "Iteration 416\n", 2189 | "loss = 0.188317\n", 2190 | "exp = 2.00059\n", 2191 | "Iteration 417\n", 2192 | "loss = 0.155724\n", 2193 | "exp = 1.99928\n", 2194 | "Iteration 418\n", 2195 | "loss = 0.189348\n", 2196 | "exp = 2.00059\n", 2197 | "Iteration 419\n", 2198 | "loss = 0.154658\n", 2199 | "exp = 1.99927\n", 2200 | "Iteration 420\n", 2201 | "loss = 0.190411\n", 2202 | "exp = 2.00058\n", 2203 | "Iteration 421\n", 2204 | "loss = 0.153593\n", 2205 | "exp = 1.99927\n", 2206 | "Iteration 422\n", 2207 | "loss = 0.191473\n", 2208 | "exp = 2.00058\n", 2209 | "Iteration 423\n", 2210 | "loss = 0.152527\n", 2211 | "exp = 1.99927\n", 2212 | "Iteration 424\n", 2213 | "loss = 0.192535\n", 2214 | "exp = 2.00058\n", 2215 | "Iteration 425\n", 2216 | "loss = 0.151462\n", 2217 | "exp = 1.99926\n", 2218 | "Iteration 426\n", 2219 | "loss = 0.193598\n", 2220 | "exp = 2.00057\n", 2221 | "Iteration 427\n", 2222 | "loss = 0.150396\n", 2223 | "exp = 1.99926\n", 2224 | "Iteration 428\n", 2225 | "loss = 0.19466\n", 2226 | "exp = 2.00057\n", 2227 | "Iteration 429\n", 2228 | "loss = 0.149331\n", 2229 | "exp = 1.99925\n", 2230 | "Iteration 430\n", 2231 | "loss = 0.195722\n", 2232 | "exp = 2.00056\n" 2233 | ] 2234 | }, 2235 | { 2236 | "name": "stdout", 2237 | "output_type": "stream", 2238 | "text": [ 2239 | "Iteration 431\n", 2240 | "loss = 0.148265\n", 2241 | "exp = 1.99925\n", 2242 | "Iteration 432\n", 2243 | "loss = 0.196784\n", 2244 | "exp = 2.00056\n", 2245 | "Iteration 433\n", 2246 | "loss = 0.1472\n", 2247 | "exp = 1.99925\n", 2248 | "Iteration 434\n", 2249 | "loss = 0.197847\n", 2250 | "exp = 2.00056\n", 2251 | "Iteration 435\n", 2252 | "loss = 0.146134\n", 2253 | "exp = 1.99924\n", 2254 | "Iteration 436\n", 2255 | "loss = 0.198909\n", 2256 | "exp = 2.00055\n", 2257 | "Iteration 437\n", 2258 | "loss = 0.145068\n", 2259 | "exp = 1.99924\n", 2260 | "Iteration 438\n", 2261 | "loss = 0.19994\n", 2262 | "exp = 2.00055\n", 2263 | "Iteration 439\n", 2264 | "loss = 0.144003\n", 2265 | "exp = 1.99923\n", 2266 | "Iteration 440\n", 2267 | "loss = 0.201002\n", 2268 | "exp = 2.00054\n", 2269 | "Iteration 441\n", 2270 | "loss = 0.142938\n", 2271 | "exp = 1.99923\n", 2272 | "Iteration 442\n", 2273 | "loss = 0.202064\n", 2274 | "exp = 2.00054\n", 2275 | "Iteration 443\n", 2276 | "loss = 0.141872\n", 2277 | "exp = 1.99923\n", 2278 | "Iteration 444\n", 2279 | "loss = 0.203126\n", 2280 | "exp = 2.00054\n", 2281 | "Iteration 445\n", 2282 | "loss = 0.140807\n", 2283 | "exp = 1.99922\n", 2284 | "Iteration 446\n", 2285 | "loss = 0.204189\n", 2286 | "exp = 2.00053\n", 2287 | "Iteration 447\n", 2288 | "loss = 0.139741\n", 2289 | "exp = 1.99922\n", 2290 | "Iteration 448\n", 2291 | "loss = 0.205251\n", 2292 | "exp = 2.00053\n", 2293 | "Iteration 449\n", 2294 | "loss = 0.138675\n", 2295 | "exp = 1.99921\n", 2296 | "Iteration 450\n", 2297 | "loss = 0.206313\n", 2298 | "exp = 2.00052\n", 2299 | "Iteration 451\n", 2300 | "loss = 0.13761\n", 2301 | "exp = 1.99921\n", 2302 | "Iteration 452\n", 2303 | "loss = 0.207375\n", 2304 | "exp = 2.00052\n", 2305 | "Iteration 453\n", 2306 | "loss = 0.136544\n", 2307 | "exp = 1.99921\n", 2308 | "Iteration 454\n", 2309 | "loss = 0.208437\n", 2310 | "exp = 2.00052\n", 2311 | "Iteration 455\n", 2312 | "loss = 0.135479\n", 2313 | "exp = 1.9992\n", 2314 | "Iteration 456\n", 2315 | "loss = 0.209468\n", 2316 | "exp = 2.00051\n", 2317 | "Iteration 457\n", 2318 | "loss = 0.134414\n", 2319 | "exp = 1.9992\n", 2320 | "Iteration 458\n", 2321 | "loss = 0.210531\n", 2322 | "exp = 2.00051\n", 2323 | "Iteration 459\n", 2324 | "loss = 0.133349\n", 2325 | "exp = 1.99919\n", 2326 | "Iteration 460\n", 2327 | "loss = 0.211592\n", 2328 | "exp = 2.0005\n", 2329 | "Iteration 461\n", 2330 | "loss = 0.132283\n", 2331 | "exp = 1.99919\n", 2332 | "Iteration 462\n", 2333 | "loss = 0.212654\n", 2334 | "exp = 2.0005\n", 2335 | "Iteration 463\n", 2336 | "loss = 0.131217\n", 2337 | "exp = 1.99919\n", 2338 | "Iteration 464\n", 2339 | "loss = 0.213717\n", 2340 | "exp = 2.0005\n", 2341 | "Iteration 465\n", 2342 | "loss = 0.130152\n", 2343 | "exp = 1.99918\n", 2344 | "Iteration 466\n", 2345 | "loss = 0.214779\n", 2346 | "exp = 2.00049\n", 2347 | "Iteration 467\n", 2348 | "loss = 0.129087\n", 2349 | "exp = 1.99918\n", 2350 | "Iteration 468\n", 2351 | "loss = 0.215841\n", 2352 | "exp = 2.00049\n", 2353 | "Iteration 469\n", 2354 | "loss = 0.128021\n", 2355 | "exp = 1.99917\n", 2356 | "Iteration 470\n", 2357 | "loss = 0.216903\n", 2358 | "exp = 2.00048\n", 2359 | "Iteration 471\n", 2360 | "loss = 0.126956\n", 2361 | "exp = 1.99917\n", 2362 | "Iteration 472\n", 2363 | "loss = 0.217965\n", 2364 | "exp = 2.00048\n", 2365 | "Iteration 473\n", 2366 | "loss = 0.125891\n", 2367 | "exp = 1.99916\n", 2368 | "Iteration 474\n", 2369 | "loss = 0.219027\n", 2370 | "exp = 2.00048\n", 2371 | "Iteration 475\n", 2372 | "loss = 0.124826\n", 2373 | "exp = 1.99916\n", 2374 | "Iteration 476\n", 2375 | "loss = 0.220058\n", 2376 | "exp = 2.00047\n", 2377 | "Iteration 477\n", 2378 | "loss = 0.12376\n", 2379 | "exp = 1.99916\n", 2380 | "Iteration 478\n", 2381 | "loss = 0.22112\n", 2382 | "exp = 2.00047\n", 2383 | "Iteration 479\n", 2384 | "loss = 0.122695\n", 2385 | "exp = 1.99915\n", 2386 | "Iteration 480\n", 2387 | "loss = 0.222182\n", 2388 | "exp = 2.00046\n", 2389 | "Iteration 481\n", 2390 | "loss = 0.121629\n", 2391 | "exp = 1.99915\n", 2392 | "Iteration 482\n", 2393 | "loss = 0.223244\n", 2394 | "exp = 2.00046\n", 2395 | "Iteration 483\n", 2396 | "loss = 0.120564\n", 2397 | "exp = 1.99914\n", 2398 | "Iteration 484\n", 2399 | "loss = 0.224306\n", 2400 | "exp = 2.00045\n", 2401 | "Iteration 485\n", 2402 | "loss = 0.119499\n", 2403 | "exp = 1.99914\n", 2404 | "Iteration 486\n", 2405 | "loss = 0.225368\n", 2406 | "exp = 2.00045\n", 2407 | "Iteration 487\n", 2408 | "loss = 0.118434\n", 2409 | "exp = 1.99914\n", 2410 | "Iteration 488\n", 2411 | "loss = 0.22643\n", 2412 | "exp = 2.00045\n", 2413 | "Iteration 489\n", 2414 | "loss = 0.117368\n", 2415 | "exp = 1.99913\n", 2416 | "Iteration 490\n", 2417 | "loss = 0.227492\n", 2418 | "exp = 2.00044\n", 2419 | "Iteration 491\n", 2420 | "loss = 0.116303\n", 2421 | "exp = 1.99913\n", 2422 | "Iteration 492\n", 2423 | "loss = 0.228554\n", 2424 | "exp = 2.00044\n", 2425 | "Iteration 493\n", 2426 | "loss = 0.115238\n", 2427 | "exp = 1.99912\n", 2428 | "Iteration 494\n", 2429 | "loss = 0.229585\n", 2430 | "exp = 2.00043\n", 2431 | "Iteration 495\n", 2432 | "loss = 0.114173\n", 2433 | "exp = 1.99912\n", 2434 | "Iteration 496\n", 2435 | "loss = 0.230646\n", 2436 | "exp = 2.00043\n", 2437 | "Iteration 497\n", 2438 | "loss = 0.113107\n", 2439 | "exp = 1.99912\n", 2440 | "Iteration 498\n", 2441 | "loss = 0.231708\n", 2442 | "exp = 2.00043\n", 2443 | "Iteration 499\n", 2444 | "loss = 0.112042\n", 2445 | "exp = 1.99911\n" 2446 | ] 2447 | } 2448 | ], 2449 | "source": [ 2450 | "import tensorflow as tf\n", 2451 | "import numpy as np\n", 2452 | "\n", 2453 | "def rmse(y, y_hat):\n", 2454 | " \"\"\"Compute root mean squared error\"\"\"\n", 2455 | " return tf.sqrt(tf.reduce_mean(tf.square((y - y_hat))))\n", 2456 | "\n", 2457 | "def forward(x, e):\n", 2458 | " \"\"\"Forward pass for our function\"\"\"\n", 2459 | " # tensorflow has automatic broadcasting \n", 2460 | " # so we do not need to reshape e manually\n", 2461 | " return tf.pow(x, e) \n", 2462 | "\n", 2463 | "n = 100 # number of examples\n", 2464 | "learning_rate = 5e-6\n", 2465 | "\n", 2466 | "# Placeholders for data\n", 2467 | "x = tf.placeholder(tf.float32)\n", 2468 | "y = tf.placeholder(tf.float32)\n", 2469 | "\n", 2470 | "# Model parameters\n", 2471 | "exp = tf.constant(2.0)\n", 2472 | "exp_hat = tf.Variable(4.0, name='exp_hat')\n", 2473 | "\n", 2474 | "# Model definition\n", 2475 | "y_hat = forward(x, exp_hat)\n", 2476 | "\n", 2477 | "# Optimizer\n", 2478 | "loss = rmse(y, y_hat)\n", 2479 | "opt = tf.train.GradientDescentOptimizer(learning_rate)\n", 2480 | "\n", 2481 | "# Summaries (NEW)\n", 2482 | "loss_summary = tf.summary.scalar(\"loss\", loss)\n", 2483 | "exp_summary = tf.summary.scalar(\"exp\", exp_hat)\n", 2484 | "all_summaries = tf.summary.merge_all()\n", 2485 | "\n", 2486 | "# We will run this operation to perform a single training step,\n", 2487 | "# e.g. opt.step() in Pytorch.\n", 2488 | "# Execution of this operation will also update model parameters\n", 2489 | "train_op = opt.minimize(loss) \n", 2490 | "\n", 2491 | "# Let's generate some training data\n", 2492 | "x_train = np.random.rand(n) + 10\n", 2493 | "y_train = x_train ** 2\n", 2494 | "\n", 2495 | "loss_history = []\n", 2496 | "exp_history = []\n", 2497 | "\n", 2498 | "# First, we need to create a Tensorflow session object\n", 2499 | "with tf.Session() as sess:\n", 2500 | " \n", 2501 | " # Initialize all defined variables\n", 2502 | " tf.global_variables_initializer().run()\n", 2503 | " \n", 2504 | " summary_writer = tf.summary.FileWriter('./tensorboard', sess.graph)\n", 2505 | " \n", 2506 | " # Training loop\n", 2507 | " for i in range(0, 500):\n", 2508 | " print(\"Iteration %d\" % i)\n", 2509 | " # Run a single trainig step\n", 2510 | " summaries, curr_loss, curr_exp, _ = sess.run([all_summaries, loss, exp_hat, train_op], feed_dict={x: x_train, y: y_train})\n", 2511 | " \n", 2512 | " print(\"loss = %s\" % curr_loss)\n", 2513 | " print(\"exp = %s\" % curr_exp)\n", 2514 | " \n", 2515 | " # Do some recordings for plots\n", 2516 | " loss_history.append(curr_loss)\n", 2517 | " exp_history.append(curr_exp)\n", 2518 | " \n", 2519 | " summary_writer.add_summary(summaries, i)" 2520 | ] 2521 | }, 2522 | { 2523 | "cell_type": "markdown", 2524 | "metadata": {}, 2525 | "source": [ 2526 | "# PyTorch modules" 2527 | ] 2528 | }, 2529 | { 2530 | "cell_type": "code", 2531 | "execution_count": 51, 2532 | "metadata": { 2533 | "collapsed": true 2534 | }, 2535 | "outputs": [], 2536 | "source": [ 2537 | "import torch\n", 2538 | "import torch.nn as nn\n", 2539 | "import torch.nn.functional as F\n", 2540 | "import torch.optim as optim\n", 2541 | "from torchvision import datasets, transforms\n", 2542 | "from torch.autograd import Variable\n", 2543 | "\n", 2544 | "# download and transform train dataset\n", 2545 | "train_loader = torch.utils.data.DataLoader(datasets.MNIST('../mnist_data', \n", 2546 | " download=True, \n", 2547 | " train=True,\n", 2548 | " transform=transforms.Compose([\n", 2549 | " transforms.ToTensor(), # first, convert image to PyTorch tensor\n", 2550 | " transforms.Normalize((0.1307,), (0.3081,)) # normalize inputs\n", 2551 | " ])), \n", 2552 | " batch_size=10, \n", 2553 | " shuffle=True)\n", 2554 | "\n", 2555 | "# download and transform test dataset\n", 2556 | "train_loader = torch.utils.data.DataLoader(datasets.MNIST('../mnist_data', \n", 2557 | " download=True, \n", 2558 | " train=False,\n", 2559 | " transform=transforms.Compose([\n", 2560 | " transforms.ToTensor(), # first, convert image to PyTorch tensor\n", 2561 | " transforms.Normalize((0.1307,), (0.3081,)) # normalize inputs\n", 2562 | " ])), \n", 2563 | " batch_size=10, \n", 2564 | " shuffle=True)\n", 2565 | "\n", 2566 | "class CNNClassifier(nn.Module):\n", 2567 | " def __init__(self):\n", 2568 | " super(Net, self).__init__()\n", 2569 | " self.conv1 = nn.Conv2d(1, 10, kernel_size=5)\n", 2570 | " self.conv2 = nn.Conv2d(10, 20, kernel_size=5)\n", 2571 | " self.dropout = nn.Dropout2d()\n", 2572 | " self.fc1 = nn.Linear(320, 50)\n", 2573 | " self.fc2 = nn.Linear(50, 10)\n", 2574 | " \n", 2575 | " def forward(self, x):\n", 2576 | " # input is 28x28x1\n", 2577 | " # conv1(kernel=5, filters=10) 28x28x10 -> 24x24x10\n", 2578 | " # max_pool(kernel=2) 24x24x10 -> 12x12x10\n", 2579 | " x = F.relu(F.max_pool2d(self.conv1(x), 2))\n", 2580 | " \n", 2581 | " # conv2(kernel=5, filters=20) 12x12x20 -> 8x8x20\n", 2582 | " # max_pool(kernel=2) 8x8x20 -> 4x4x20\n", 2583 | " x = F.relu(F.max_pool2d(self.dropout(self.conv2(x)), 2))\n", 2584 | " \n", 2585 | " # flatten 4x4x20 = 320\n", 2586 | " x = x.view(-1, 320)\n", 2587 | " \n", 2588 | " # 320 -> 50\n", 2589 | " x = F.relu(self.fc1(x))\n", 2590 | " x = F.dropout(x, training=self.training)\n", 2591 | " \n", 2592 | " # 50 -> 10\n", 2593 | " x = self.fc2(x)\n", 2594 | " \n", 2595 | " # transform to logits\n", 2596 | " reutrn F.log_softmax(x)" 2597 | ] 2598 | }, 2599 | { 2600 | "cell_type": "code", 2601 | "execution_count": null, 2602 | "metadata": { 2603 | "collapsed": true 2604 | }, 2605 | "outputs": [], 2606 | "source": [ 2607 | "F.max_pool2d()" 2608 | ] 2609 | }, 2610 | { 2611 | "cell_type": "code", 2612 | "execution_count": null, 2613 | "metadata": { 2614 | "collapsed": true 2615 | }, 2616 | "outputs": [], 2617 | "source": [] 2618 | } 2619 | ], 2620 | "metadata": { 2621 | "kernelspec": { 2622 | "display_name": "Python 3", 2623 | "language": "python", 2624 | "name": "python3" 2625 | }, 2626 | "language_info": { 2627 | "codemirror_mode": { 2628 | "name": "ipython", 2629 | "version": 3 2630 | }, 2631 | "file_extension": ".py", 2632 | "mimetype": "text/x-python", 2633 | "name": "python", 2634 | "nbconvert_exporter": "python", 2635 | "pygments_lexer": "ipython3", 2636 | "version": "3.6.1" 2637 | } 2638 | }, 2639 | "nbformat": 4, 2640 | "nbformat_minor": 2 2641 | } 2642 | --------------------------------------------------------------------------------