├── Activation Functions └── ReLU.ipynb ├── Attention is all you need └── transformer(attn paper).py ├── BERT └── bert.py ├── BLEU └── bleu.py ├── CNN └── CNN.ipynb ├── DQN └── Deep Q Networks.py ├── Distillation (Hinton) └── distill_mnist.py ├── GAN └── GAN.py ├── GPT2 └── gpt2.py ├── Image Style Transfer └── Style_Transfer.ipynb ├── Llama2 ├── image-1.png ├── image-2.png ├── image-3.png ├── image-4.png ├── image.png ├── model.py └── readme.md ├── Llama4 └── main.py ├── LoRA └── lora.py ├── Mistral-7B ├── README.md ├── lol.py └── model.py ├── NeoBERT ├── model │ ├── BERT.py │ ├── MLM_NSP.py │ ├── RMS_Norm.py │ ├── SwiGLU.py │ ├── attention.py │ ├── embeddings.py │ ├── feedforward.py │ └── residual.py └── train.py ├── README.md ├── RoPE └── rope.py ├── SGD └── SGD.ipynb ├── VAE └── VAE.py └── Word2Vec └── main.py /Activation Functions/ReLU.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "nbformat": 4, 3 | "nbformat_minor": 0, 4 | "metadata": { 5 | "colab": { 6 | "provenance": [] 7 | }, 8 | "kernelspec": { 9 | "name": "python3", 10 | "display_name": "Python 3" 11 | }, 12 | "language_info": { 13 | "name": "python" 14 | } 15 | }, 16 | "cells": [ 17 | { 18 | "cell_type": "markdown", 19 | "source": [ 20 | "![image.png](data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAOQAAABNCAYAAAC2RMirAAAAAXNSR0IArs4c6QAAAARnQU1BAACxjwv8YQUAAAAJcEhZcwAADsMAAA7DAcdvqGQAAAglSURBVHhe7d1xTJTnHQfw7+YfZyTDtAukf0AUYmLG+Ud7XTEYo8cfE1IRY2HMDM6kTlaIOLDEeCOblEVvaEsweJQhrElBo15oM2Aab4mBWsKt2bh2FvjDnqRyJDV3SRsuKblLZt49790zBYHJwTGeO76f5PSe3733eon58jzv8zz38gNNABEp4YfybyJSAANJpBAGkkghDCSRQhhIIoUwkEQKYSCJFMJAEimEgSRSCANJpBAGkkghDCSRQhhIIoUwkEQKYSCJFMJAUvRmAgg9ls8pphhIWrKQ34PBy5XYu+NldN6TRYopBpKe7/NWZGbuRWmtDZ19TnhlmWKPgaTne+U4JiY+QU/XeZRslTVaFQwkkUIYSCKFMJBECmEgiRTCQBIphIEkUggDSaQQBpJIIQwkRSEIyD2sQe5lXRXrI5DfudBUWgPHw5AsRGGqDzWlTXB9J9vrUXjrXGZ4+1yNM1JqLdHb4lHVB3+kRDGQ+L/9asaNVkslpk/eRd1ugyxGJzRkw57mzWjrPg7TJlkkWgUJ3kOG4G6vR5fxPKqWGUadYXcVzhu7UNniEmckWj2JHciHvbBfAsoPm5EsS8uTDPPhcqRebkL3uCwRrYIEDmQILkcTBndbkJclSyuRlQfLbjc6brKXpNWTuIEMuTHc44fRbES6LK1Mevhc/rYBuJlIWiXxGcjHIXjHXXC5FnjcD0SOue+Gww+YtiwSxxkP+hqPoqy0DPty98Ha44H3n52oLCpGWf4+VH44Nq8nTN9iEn+6MPYg0iaKOX2WNZ5MOs9pb5oztIyMhR/ZFb3apDjO11ch2tma/V7kfXNNar1vl2rvfTodbgU/PRd5//73tBHfiGb/ZbZoV2u3feGXn7pn17LFcdU3n32BKDbiatnD21+D4urPkXvhfdQdMiIZXjiq98J6y4IPvmiAedbMzdgHB3DgbBouDreh8CVZlAJ/s+Jn/zqIu6dyoM+9Bu7U4+XybuyyfYIrpkEU59fDe6gNf23KQ0rkLRGP+lC5qwZTv+tH/1GjLC7EA0fNO+j7VjaXKO3nf0TjgdgMsCk+xU8gxztRXGBD6GQPek6YwkHS+fsrsbPaidqPJnD8FVkU3JcyUdyct2Ag9bumBTYkI1mexN0sjr2UIs7x2ZxzzCMD6RSfYUJ8hrWgL8YTMDExIZ8lGD2Q6gtqwxf0YWSpduNrWZJG39frBVrHmCxIIy36ELZC6/1GFhY1qnXs1489pw0HZWkx3/RqFfrQtmVEFohiK04mdTxw9/iBlBwYt8hSmBdjLr2eD9MzSxuGpP81pJzl0QOM6GuLh17Ftv92u49DCAQWn0o1Jj1vk4E+ZC1D2ZHoHtZ+3s9t3ZPBVJzsxY73arOnU4Juu1aQka0tNMniu1kter2FJnWCovcsCr92ZmBam3aeFs8ztNJr+lRQxOS1N7WC9lHZmkVO6lT0cVKHVkec9JBG5Lwhrtn+MQrPjCxNOWE724XNp9pw/vU5Uy9hKVt3iHf54fPJZZAnPHBdcQNbcrFrmw+3rzvC1R/JTi/0wIGm/lfRUDa/hw34fOKMKdixdf6/RxQLcTTLGoD78hlYP55C6o+B4CYjDh6tgiVnkXCEXLD9pBSuBWZEvbes+FWLB6lJych6qwGWDd34bZ0Twe3p2PiiGW/XH4PpBXnwLOGZ23Yzrt6tRc7zRq1rQb/FvyEZhg2yTfEn3E8mJDkRZLkRXpdcuUnthiVDy74wLM6slqDvK22gvULbI4bTdrcsUlxK4L2sBuTsL4dpqBvOWGwIH3eie8iM2pLI2qUSYn2L/y9bsbPWye83rqEEDqSQZUHtr33ouD4oBrwrEcDg9Q7gRBUOzpnlXWOxvsX/vwF/SIGNuuFtjfWwNVtRlm+FrdEKa10lit9N/I39iR1IvZf8TRuOjJ2GfWj5/5WhITtOjx1Bw1tPNyTQ6hm7YoM3rw51J0uQc98BV1Iudj0WI4D7XvjkMYkq8e8YoNNv4VF1A+k20ZNsiTJS+i08Tn+FX9hrkbPARI8a/Oir2omaW5i3Yykq+hD4z+n4zF44d8vgUvnd6LvugOPvU6Lhg89gRt0f6mBOEz/UvnSg/po7ctwitr3RgGM/NSAUEOOZ5GQYHjpQltuNrKtXUZezsm+0xo3wlSTFOZ/We1zfbbTCSR23fd5a71Lpa8Kl2RlaUcuIFtmyr2mj7QVaxu8HnrSjFV4jzj6jDXyrt4La9PfhckJL8CEr/V/oo4iiJgwbG9F8wvTk7gyGpM3AJkOUw3wPuosysfOsA7fvOES3aUSaGJmELxtueeQxiWt9DFkTXnRDVv8dG05+uMDU8/deDE8ZsGt7qizM8mIh3rlYgm2y+VQIrnf3oLTNj5I/fYHGfSKOM36M3bGj/uw0jnx0EYViyLp0fjhrj+IvoTQYtu/Axo8HgDyjCHc+jp3IidGXzdXFQCaEtbyGdKMpsxitIiqm3enYKCppWWYYjUaY94oArZNLv1jhkJVW5tGUGGQKWRY0dF3BFfFotB6D5QDDuBwMJK3MCymRYeTWVMwf6AbgGec2g2gwkAlhDW/xbzDh4KlnNv4LgYeDaC0vRufX4rPRkvEaMp7p13xFTbLxjNcvRr+euOx1yFkb/1P1q0hxHflaCUoOF8K0rAXN9YuBpKdWujGAVoyBpFlCCMwYkMzfX7JmGEgihXBSh0ghDCSRQhhIIoUwkEQKYSCJFMJAEimEgSRSCANJpBAGkkghDCSRQhhIIoUwkEQKYSCJFMJAEimEgSRSCANJpBAGkkgZwH8AhVeX75HIBNYAAAAASUVORK5CYII=)" 21 | ], 22 | "metadata": { 23 | "id": "Z1Ru8l8Cfz8b" 24 | } 25 | }, 26 | { 27 | "cell_type": "code", 28 | "execution_count": 4, 29 | "metadata": { 30 | "colab": { 31 | "base_uri": "https://localhost:8080/", 32 | "height": 626 33 | }, 34 | "id": "gHp1zX3Ffkxm", 35 | "outputId": "6a016c07-b4ba-4471-c691-e95d8105fda9" 36 | }, 37 | "outputs": [ 38 | { 39 | "output_type": "stream", 40 | "name": "stdout", 41 | "text": [ 42 | "[ 0. 0. 0. 0. 0. 0.\n", 43 | " 0. 0. 0. 0. 0. 0.\n", 44 | " 0. 0. 0. 0. 0. 0.\n", 45 | " 0. 0. 0. 0. 0. 0.\n", 46 | " 0. 0.20408163 0.6122449 1.02040816 1.42857143 1.83673469\n", 47 | " 2.24489796 2.65306122 3.06122449 3.46938776 3.87755102 4.28571429\n", 48 | " 4.69387755 5.10204082 5.51020408 5.91836735 6.32653061 6.73469388\n", 49 | " 7.14285714 7.55102041 7.95918367 8.36734694 8.7755102 9.18367347\n", 50 | " 9.59183673 10. ]\n" 51 | ] 52 | }, 53 | { 54 | "output_type": "execute_result", 55 | "data": { 56 | "text/plain": [ 57 | "[]" 58 | ] 59 | }, 60 | "metadata": {}, 61 | "execution_count": 4 62 | }, 63 | { 64 | "output_type": "display_data", 65 | "data": { 66 | "text/plain": [ 67 | "
" 68 | ], 69 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAh8AAAGzCAYAAACPa3XZAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA6tUlEQVR4nO3dd3hUdd7+8Tt1EkIKLQmB0DshAQWygA1lQQQFFSzL7rrqWrEDAq6AgBqaZVUedX12wf1ZAQWRVVkbIFKkGSAQegklCUWSkJA6398fgTxGQkmYmTPl/bquuS5z5sw59+E45GY+Z2b8jDFGAAAALuJvdQAAAOBbKB8AAMClKB8AAMClKB8AAMClKB8AAMClKB8AAMClKB8AAMClKB8AAMClKB8AAMClKB8AAMClKB8ALmj27Nny8/OruAUGBqpRo0b6y1/+ooMHD1Z7e0uWLJGfn5/mzZt3znX8/Pz0yCOPVHnfvHnz5OfnpyVLllR73wCsF2h1AACeY9KkSWrevLkKCwu1atUqzZ49W8uXL9fmzZsVEhJidTwAHoLyAeCi9e/fX127dpUk/fWvf1X9+vU1depULVy4ULfddpvF6QB4CsYuAGrsyiuvlCTt2rWrYll6erqGDBmiunXrKiQkRF27dtXChQutigjADVE+ANTY3r17JUl16tSRJKWlpel3v/udtm7dqjFjxuill15SWFiYBg8erPnz51uYFIA7YewC4KLl5OTo6NGjKiws1OrVqzVx4kTZbDYNHDhQkvT444+rSZMmWrNmjWw2myTp4Ycf1hVXXKHRo0fr5ptvtjI+ADfBKx8ALlqfPn3UoEEDxcfHa8iQIQoLC9PChQvVuHFjHT9+XN99951uu+025eXl6ejRozp69KiOHTumfv36aceOHTV6ZwwA78MrHwAu2syZM9WmTRvl5OToX//6l5YtW1bxCsfOnTtljNG4ceM0bty4Kh+fnZ2tRo0aOSyPn5+fw7YFwHUoHwAuWvfu3Sve7TJ48GBdccUV+sMf/qBt27bJbrdLkkaOHKl+/fpV+fhWrVpd9L5sNptOnTpV5X0FBQWSxNt7AQ9F+QBQIwEBAUpJSVHv3r31xhtv6J577pEkBQUFqU+fPpe8/aZNm2rbtm1V3ndmedOmTS95PwBcj2s+ANTYNddco+7du+vVV19VRESErrnmGr399ts6fPjwWeseOXKkWtu+4YYbtGrVKq1bt67S8hMnTuj9999X586dFRsbe0n5AViDVz4AXJJRo0Zp6NChmj17tmbOnKkrrrhCnTp10n333acWLVooKytLK1eu1IEDB5SamlrpsZ988onS09PP2uZdd92lMWPGaO7cubrqqqv0wAMPqF27djp06JBmz56tw4cPa9asWa46RAAORvkAcEluueUWtWzZUjNmzNB9992ntWvXauLEiZo9e7aOHTum6OhodenSRePHjz/rsR999FGV27zmmmt0xRVXaPXq1Xruuec0Z84cZWVlKSIiQj179tTHH3+s5ORkZx8aACfxM8YYq0MAAADfwTUfAADApSgfAADApSgfAADApSgfAADApSgfAADApSgfAADApdzucz7sdrsOHTqk8PBwvjQKAAAPYYxRXl6e4uLi5O9//tc23K58HDp0SPHx8VbHAAAANZCRkaHGjRufdx23Kx/h4eGSysNHRERYnAYAAFyM3NxcxcfHV/wePx+3Kx9nRi0RERGUDwAAPMzFXDLBBacAAMClKB8AAMClKB8AAMClKB8AAMClKB8AAMClKB8AAMClKB8AAMClKB8AAMClKB8AAMClKB8AAMClql0+li1bphtvvFFxcXHy8/PTggULKt1vjNH48ePVsGFDhYaGqk+fPtqxY4ej8gIAAA9X7fKRn5+vpKQkzZw5s8r7p02bptdee01vvfWWVq9erbCwMPXr10+FhYWXHBYAAHi+an+xXP/+/dW/f/8q7zPG6NVXX9Wzzz6rQYMGSZL+/e9/KyYmRgsWLNAdd9xx1mOKiopUVFRU8XNubm51IwEAgItQWmbXg++t021d49W3Y6xlORx6zceePXuUmZmpPn36VCyLjIxUcnKyVq5cWeVjUlJSFBkZWXGLj493ZCQAAHDa28t265ut2Ro5N1U5BSWW5XBo+cjMzJQkxcTEVFoeExNTcd9vjR07Vjk5ORW3jIwMR0YCAACStmXm6dVvtkuSJtzYUZG1gizLUu2xi6PZbDbZbDarYwAA4LVKyuwaOTdVJWVGfdpH65bLGlmax6GvfMTGls+PsrKyKi3PysqquA8AALjWW0t2adPBHEWGBunFmzvJz8/P0jwOLR/NmzdXbGysvv3224plubm5Wr16tXr06OHIXQEAgIuw9XCuXvuu/CMvJt7UUdERIRYnqsHY5eTJk9q5c2fFz3v27NHPP/+sunXrqkmTJnriiSf0/PPPq3Xr1mrevLnGjRunuLg4DR482JG5AQDABZSU2TViTvm4pW+HGA3qHGd1JEk1KB9r165V7969K35+6qmnJEl33XWXZs+eraefflr5+fm6//77deLECV1xxRX66quvFBJifdMCAMCXzPx+p7YczlWdWkF6wQ3GLWf4GWOM1SF+LTc3V5GRkcrJyVFERITVcQAA8EibD+Zo8MwfVWo3eu3OLropybmvelTn9zff7QIAgJcpLi1/d0up3ah/QqxuTGxodaRKKB8AAHiZN77bofTMPNUNC9bkwQluM245g/IBAIAX2XQgRzOX7JIkTR6UoPq13e+ztCgfAAB4iaLSMo2Y+7PK7EYDEhtqgJuNW86gfAAA4CVe+3aHtmedVP3awZo8KMHqOOdE+QAAwAukZpzQm6fHLc8P7qS6YcEWJzo3ygcAAB6usKRMI+amym6km5LidH2Ce3+lCeUDAAAP98o327Uz+6Tq17Zp4k0drY5zQZQPAAA82Pr9v+idZbslSS/enKA6bjxuOYPyAQCAhyosKdPI0+OWW7o0Ut+O7j1uOYPyAQCAh3rpv9u0+0i+osNtmnCj+49bzqB8AADggdbuPa7/Xb5HkjTl1k6KrBVkcaKLR/kAAMDDnCouH7cYIw25vLGubRdjdaRqoXwAAOBhpi1O195jBYqNCNG4gR2sjlNtlA8AADzI6t3HNOvHvZJOj1tCPWfccgblAwAAD1FQXKpR8zZKkm7vGq9r2kZbnKhmKB8AAHiIqV+ma//xAsVFhuhvA9tbHafGKB8AAHiAFbuO6t2V+yRJU4ckKiLE88YtZ1A+AABwcyeLSvX06XHLH5Kb6MrWDSxOdGkoHwAAuLmUL7bqwC+n1CgqVM/c4LnjljMoHwAAuLHlO47q/dX7JUnThySqti3Q4kSXjvIBAICbyiss0ehPysctf/pdU/VsVd/iRI5B+QAAwE29+MVWHTxxSvF1QzWmfzur4zgM5QMAADe0dPsRffhThiRp+pAkhXnBuOUMygcAAG4mt7BEY06PW/7Ss5l+16KexYkci/IBAICbeX7RFh3OKVSzerX09PVtrY7jcJQPAADcyPfp2Zqz9oD8/KTpQ5NUK9h7xi1nUD4AAHATOQUlGvNp+bjlnl7N1a1ZXYsTOQflAwAANzFp0RZl5RapRf0wjezrfeOWMygfAAC4gW+2ZOmT9Qfkf3rcEhocYHUkp6F8AABgsRMFxRo7f5Mk6b4rW+jypnUsTuRclA8AACw28fMtOpJXpJYNwvTk79tYHcfpKB8AAFhocVqm5m84KH8/6aXbOiskyHvHLWdQPgAAsMjx/GL9bf5mSdIDV7dU5/goawO5COUDAACLTFiYpqMni9Q6urYev6611XFchvIBAIAFvtx0WJ+nHlKAv59eui3JJ8YtZ1A+AABwsWMni/TsgvJxy0NXt1Ri4yhrA7kY5QMAABcb/1majuUXq11suB69rpXVcVyO8gEAgAst2nhI/9l0WIH+fpoxNEm2QN8Zt5xB+QAAwEWO5BVp3Olxy/DerZTQKNLiRNagfAAA4ALGGD27YJN+KShR+4YRGt7b98YtZ1A+AABwgYWph7Q4LUuB/n56aWiSggN991ew7x45AAAukp1bqPGfpUmSHruutTrERVicyFqUDwAAnMgYo2fmb1bOqRIlNIrQQ9e0tDqS5SgfAAA40YKfD+qbrVkKCih/d0tQAL96+RMAAMBJsnILNeH0uOWJPm3ULta3xy1nUD4AAHACY4zGfrpJuYWlSmwcqQeuamF1JLdB+QAAwAnmrTug79KzFRzgrxlDkxTIuKUCfxIAADjY4ZxTmvT5FknSk79vozYx4RYnci+UDwAAHMgYozGfbFJeUak6x0fpviubWx3J7VA+AABwoDlrM7R0+xEFBzJuORf+RAAAcJCDJ05p8qKtkqSRfduoVXRtixO5J8oHAAAOUD5u2aiTRaW6rEmU7r2Cd7ecC+UDAAAH+PCnDP2w46hsp8ctAf5+VkdyW5QPAAAuUcbxAr3wn/J3tzx9fTu1aMC45XwoHwAAXAK73Wj0JxuVX1ymbs3q6O6ezayO5PYcXj7Kyso0btw4NW/eXKGhoWrZsqUmT54sY4yjdwUAgOXeX71PK3YdU0iQv6YPSZI/45YLCnT0BqdOnao333xT7777rjp27Ki1a9fq7rvvVmRkpB577DFH7w4AAMvsP1agF79IlySNub6dmtUPsziRZ3B4+VixYoUGDRqkAQMGSJKaNWumDz/8UD/99JOjdwUAgGXsdqNR81J1qqRMyc3r6s89mlkdyWM4fOzSs2dPffvtt9q+fbskKTU1VcuXL1f//v2rXL+oqEi5ubmVbgAAuLt/r9yr1XuOq1ZwAOOWanL4Kx9jxoxRbm6u2rVrp4CAAJWVlemFF17QsGHDqlw/JSVFEydOdHQMAACcZu/RfE35qnzcMrZ/OzWpV8viRJ7F4a98zJkzR++//74++OADrV+/Xu+++65mzJihd999t8r1x44dq5ycnIpbRkaGoyMBAOAwZ8YthSV29WxZT8OSm1odyeM4/JWPUaNGacyYMbrjjjskSZ06ddK+ffuUkpKiu+6666z1bTabbDabo2MAAOAUs1bs1Zq9vygsOEBTb01k3FIDDn/lo6CgQP7+lTcbEBAgu93u6F0BAOBSu4+c1LTT45a/Deig+LqMW2rC4a983HjjjXrhhRfUpEkTdezYURs2bNDLL7+se+65x9G7AgDAZcrsRiPnpqqo1K4rW9fXnd3jrY7ksRxePl5//XWNGzdODz/8sLKzsxUXF6cHHnhA48ePd/SuAABwmX8u3631+0+oti1QU25NlJ8f45aa8jNu9tGjubm5ioyMVE5OjiIiIqyOAwCAdmbn6YbXlqu41K6pt3bS7d2aWB3J7VTn9zff7QIAwHmUltk1cu5GFZfadXWbBrqtK+OWS0X5AADgPN75YY9+zjih8JBATbm1E+MWB6B8AABwDtuz8vTK1+Wf2D1+YAc1jAy1OJF3oHwAAFCFkjK7RsxJVXGZXde2i9aQyxtbHclrUD4AAKjC20t3adPBHEWEBCrlFsYtjkT5AADgN9Izc/X3b3dIkiYO6qiYiBCLE3kXygcAAL9yZtxSUmbUp32MBnduZHUkr0P5AADgV/7n+11KO5SrqFpBevGWBMYtTkD5AADgtLRDOXr9u9Pjlps6KjqccYszUD4AAJBUXFo+bim1G13fMVY3JcVZHclrUT4AAJD0xvc7lZ6Zp7phwXr+ZsYtzkT5AAD4vM0HczTz+52SpEmDOqp+bZvFibwb5QMA4NOKSss0Yk6qyuxGAzo11MBExi3ORvkAAPi0177doW1ZeaoXFqxJgzpaHccnUD4AAD4rNeOE3lq6W5L0/OAE1WPc4hKUDwCATyosKdPIueXjlhuT4tS/U0OrI/kMygcAwCe9+s0O7cg+qfq1bZp0E+MWV6J8AAB8zvr9v+gfy3ZJkl68OUF1woItTuRbKB8AAJ9yZtxiN9LNXRqpb8dYqyP5HMoHAMCnvPz1du0+kq8G4TZNuLGD1XF8EuUDAOAz1u07rnd+KH93S8rNnRRVi3GLFSgfAACfcKq4TCPnbpQx0q2XNVafDjFWR/JZlA8AgE+Yvnib9hzNV0yETeMZt1iK8gEA8Ho/7TmuWSv2SJKm3JqoyNAgixP5NsoHAMCrFRSXatS8VBkj3da1sXq3jbY6ks+jfAAAvNq0r7Zp37ECNYwM0bMDGbe4A8oHAMBrrdp9TLNX7JUkTb01UREhjFvcAeUDAOCV8ovKxy2SdGf3JrqqTQOLE+EMygcAwCtN+TJdGcdPqVFUqJ65oZ3VcfArlA8AgNdZsfOo/t+qfZLKxy3hjFvcCuUDAOBVThaVatS8jZKkP/6uia5oXd/iRPgtygcAwKu8+MVWHTxxSo3rhGps//ZWx0EVKB8AAK/xw44j+mD1fknS9CFJCrMFWpwIVaF8AAC8Ql5hiUafHrf8uUdT9WhZz+JEOBfKBwDAK7zwn606lFOoJnVrafT1vLvFnVE+AAAeb8m2bH20JkOSNH1IIuMWN0f5AAB4tJxTJRrzySZJ0t29mim5BeMWd0f5AAB4tMmLtigzt1DN6tXS0/0Yt3gCygcAwGN9l56leesOyM9PmjE0SaHBAVZHwkWgfAAAPFJOwf+NW/56RXN1bVbX4kS4WJQPAIBHmvh5mrLzitSiQZhG9G1rdRxUA+UDAOBx/puWqU83HJT/6XFLSBDjFk9C+QAAeJRf8ov1zPzNkqT7rmyhy5rUsTgRqovyAQDwKBMWpunoySK1iq6tJ3/fxuo4qAHKBwDAY3y1+bAWph5i3OLhKB8AAI9w7GSR/nZ63PLg1S3VOT7K2kCoMcoHAMAjjF+YpmP5xWoTU1uP92ltdRxcAsoHAMDtLdp4SP/ZeFgB/n56aWhn2QIZt3gyygcAwK0dPVmk8Z+lSZKGX9NSnRpHWpwIl4ryAQBwW8YYPTt/s47nF6tdbLgeuZZxizegfAAA3NbnGw/rq7RMBfr7acbQJAUH8mvLG3AWAQBuKTuvUOM/K393y/DerZTQiHGLt6B8AADcjjFGf5u/WScKStShYYSG925ldSQ4EOUDAOB2Fvx8UF9vyVJQgJ9euo1xi7fhbAIA3EpWbqEmnH53y+PXtVb7hhEWJ4KjUT4AAG7DGKNnPt2k3MJSdWoUqQevbml1JDiBU8rHwYMH9cc//lH16tVTaGioOnXqpLVr1zpjVwAAL/LJ+oP6Nj1bwQH+mjE0SYEB/BvZGwU6eoO//PKLevXqpd69e+vLL79UgwYNtGPHDtWpw1ceAwDO7XDOKU38vHzc8sTvW6ttbLjFieAsDi8fU6dOVXx8vGbNmlWxrHnz5o7eDQDAixhjNOaTTcorLFVSfJTuv7KF1ZHgRA5/PWvhwoXq2rWrhg4dqujoaHXp0kXvvPPOOdcvKipSbm5upRsAwLfMXXtAS7cfUXCgv14amsi4xcs5/Ozu3r1bb775plq3bq3FixfroYce0mOPPaZ33323yvVTUlIUGRlZcYuPj3d0JACAGzt44pQmL9oiSRrZt41aRTNu8XZ+xhjjyA0GBwera9euWrFiRcWyxx57TGvWrNHKlSvPWr+oqEhFRUUVP+fm5io+Pl45OTmKiODtVQDgzYwx+vO/ftIPO47qsiZRmvtgTwX4+1kdCzWQm5uryMjIi/r97fBXPho2bKgOHTpUWta+fXvt37+/yvVtNpsiIiIq3QAAvuHDnzL0w46jsgX6a/rQJIqHj3B4+ejVq5e2bdtWadn27dvVtGlTR+8KAODBMo4X6IX/lI9bRvVrq5YNalucCK7i8PLx5JNPatWqVXrxxRe1c+dOffDBB/rHP/6h4cOHO3pXAAAPZbcbjf5ko/KLy9S1aR3d3Yt3RfoSh5ePbt26af78+frwww+VkJCgyZMn69VXX9WwYcMcvSsAgId6/6f9WrHrmEKCGLf4Iod/zockDRw4UAMHDnTGpgEAHm7/sQKlfLFVkjT6+nZqXj/M4kRwNd5IDQBwGbvdaNS8VBUUl6l787q6q0czqyPBApQPAIDL/L9V+7R6z3GFBgVoxpAk+TNu8UmUDwCAS+w9mq8pX6ZLksbe0E5N6tWyOBGsQvkAADjdmXHLqZIy9WxZT39M5uMXfBnlAwDgdLNW7NWavb8oLDhAU29NZNzi4ygfAACn2n3kpKZ9VT5ueWZAe8XXZdzi6ygfAACnKbMbjZq3UUWldl3Rqr7+0L2J1ZHgBigfAACn+dfyPVq37xfVtgVq6pBE+fkxbgHlAwDgJDuzT2r6f8u/6+vZAe3VKCrU4kRwF5QPAIDDldmNRs5NVXGpXVe1aaDbu8VbHQluhPIBAHC4d37YrZ8zTijcFqipt3Zi3IJKKB8AAIfakZWnl/+7XZI07sYOahjJuAWVUT4AAA5TWmbXiLmpKi6z69p20Rp6eWOrI8ENUT4AAA7z9rLd2nggRxEhgUq5hXELqkb5AAA4RHpmrl79pnzc8txNHRUTEWJxIrgrygcA4JKVlNk1cm6qSsqM+rSP1s1dGlkdCW6M8gEAuGRvLtmlzQdzFVUrSC/ezLgF50f5AABcki2HcvX6dzskSRNv6qhoxi24AMoHAKDGikv/b9zSr2OMbkqKszoSPADlAwBQYzO/36kth3NVp1aQnh/MuAUXh/IBAKiRzQdzNPP7nZKkSYMS1CDcZnEieArKBwCg2s6MW0rtRgM6NdSNjFtQDZQPAEC1vf7dDqVn5qleWLAmDepodRx4GMoHAKBaNh44of9ZskuSNHlwgurVZtyC6qF8AAAuWlFpmUbOTVWZ3ejGpDjd0Kmh1ZHggSgfAICL9vdvdmh71knVr23TpJsYt6BmKB8AgIvyc8YJvbW0fNzyws0JqhMWbHEieCrKBwDgggpLyjRizs+yG2lw5zj16xhrdSR4MMoHAOCCXvl6u3YdyVeDcJueY9yCS0T5AACc17p9v+idH3ZLkl68uZOiajFuwaWhfAAAzqmwpEyj5qbKbqRbLmuk33eIsToSvADlAwBwTjMWb9Puo/mKibBpwkDGLXAMygcAoEpr9h7XP3/cI0mackuiImsFWZwI3oLyAQA4y6ni8nGLMdLQyxurd7toqyPBi1A+AABnmbY4XXuPFahhZIieHdjB6jjwMpQPAEAlq3Yf06wf90qSptyaqMhQxi1wLMoHAKBCflGpnp63UZJ0R7d4Xd2mgcWJ4I0oHwCAClO/Stf+4wWKiwzR3wa0tzoOvBTlAwAgSVqx86j+vXKfJGnqkESFhzBugXNQPgAAOllUqqc/KR+3DEtuoitbM26B81A+AABK+WKrDvxySo3rhGrsDYxb4FyUDwDwcT/sOKL3V++XJE27NVG1bYEWJ4K3o3wAgA/LKyzR6NPvbvnT75qqZ6v6FieCL6B8AIAPe/GLrTqUU6gmdWtpTP92VseBj6B8AICPWrr9iD78KUOSNH1IosIYt8BFKB8A4INyTv3fuOXuXs2U3KKexYngSygfAOCDnl+0RZm5hWpWr5ae7se4Ba5F+QAAH/NdepbmrjsgPz9p+tAkhQYHWB0JPobyAQA+JKegRGM/3SRJurdXc3VrVtfiRPBFlA8A8CETF6UpK7dILeqHaWS/tlbHgY+ifACAj/h6S5Y+XX/w9LglUSFBjFtgDcoHAPiAX/KL9cz88nHLfVe20OVNGbfAOpQPAPABz32epiN5RWrZIExP/b6N1XHg4ygfAODlvtqcqc9+PiR/P+ml2zozboHlKB8A4MWO5xfr2QXl45YHr26pzvFR1gYCRPkAAK82/rPNOnqyWG1iauvxPq2tjgNIckH5mDJlivz8/PTEE084e1cAgF/5YtNhLdp4WAH+fpoxNEm2QMYtcA9OLR9r1qzR22+/rcTERGfuBgDwG0dPFunZBZslSQ9f01KJjaOsDQT8itPKx8mTJzVs2DC98847qlOnjrN2AwD4DWOMxi3YrOP5xWoXG65Hr2XcAvfitPIxfPhwDRgwQH369DnvekVFRcrNza10AwDU3KKNh/Xl5syKcUtwIJf3wb0EOmOjH330kdavX681a9ZccN2UlBRNnDjRGTEAwOdk5xVq3Gfl45bhvVspoVGkxYmAszm8DmdkZOjxxx/X+++/r5CQkAuuP3bsWOXk5FTcMjIyHB0JAHyCMUbPzt+sEwUl6tAwQo/0bmV1JKBKDn/lY926dcrOztZll11WsaysrEzLli3TG2+8oaKiIgUE/N8V1zabTTabzdExAMDnLEw9pP9uyVJQAOMWuDeHl4/rrrtOmzZtqrTs7rvvVrt27TR69OhKxQMA4BjZuYUa/1maJOmxa1urQ1yExYmAc3N4+QgPD1dCQkKlZWFhYapXr95ZywEAl84Yo2fmb1LOqRIlNIrQg9e0tDoScF68JgcAHu7T9Qf1zdZsBQX46aWhnRUUwF/tcG9OebfLby1ZssQVuwEAn5OZU6jnPi8ftzzRp43axoZbnAi4MOoxAHgoY4zGfrpReYWlSmocqQeuamF1JOCiUD4AwEPNXXdA3287ouAAf80YmqRAxi3wEPyfCgAe6NCJU5r8+RZJ0lN926h1DOMWeA7KBwB4GGOMRn+yUXlFperSJEr3Xcm4BZ6F8gEAHuajNRn6YcdR2QLLxy0B/n5WRwKqhfIBAB7kwC8Fen5R+bhlVL+2atmgtsWJgOqjfACAhzgzbskvLlPXpnV0d6/mVkcCaoTyAQAe4v3V+/XjzmMKCfLXtCGJjFvgsSgfAOABMo4X6MUvtkqSnu7XTi0Yt8CDUT4AwM3Z7Uaj5qWqoLhM3ZvV1V96NrM6EnBJKB8A4Ob+36p9WrX7uEKDAjR9aKL8GbfAw1E+AMCN7TuWrylfpkuSRl/fVk3rhVmcCLh0lA8AcFN2u9GouRt1qqRMv2tRV3/u0czqSIBDUD4AwE3NXrFXP+09rlrBAZo+JIlxC7wG5QMA3NCeo/matrh83DL2hvaKr1vL4kSA41A+AMDNlNmNRs1NVWGJXb1a1dOw7k2sjgQ4FOUDANzMrB/3aO2+XxQWHKCpt/LuFngfygcAuJGd2Sc1ffE2SdKzAzuocR3GLfA+lA8AcBNlpz9MrKjUritb19cd3eKtjgQ4BeUDANzE//6wWxv2n1C4LVBTb02Unx/jFngnygcAuIEdWXl66evtkqRxAzsoLirU4kSA81A+AMBipWV2jZybquJSu65p20BDuza2OhLgVJQPALDYP37YrdQDOQoPCdSUWxi3wPtRPgDAQtsy8/Tq1zskSRNu7KjYyBCLEwHOR/kAAIuUnBm3lNl1Xbto3XpZI6sjAS5B+QAAi7y1ZJc2HcxRZGiQXrylE+MW+AzKBwBYYOvhXL32Xfm4ZeJNHRUTwbgFvoPyAQAuVlJm14g5qSopM+rbIUaDOsdZHQlwKcoHALjYzO93asvhXEXVCtLzNycwboHPoXwAgAulHcrRG9/tlCRNGpSg6HDGLfA9lA8AcJHi0vJxS6ndqH9CrG5MbGh1JMASlA8AcJE3vtuh9Mw81Q0L1uTBjFvguygfAOACmw7kaOaSXZKkyYMSVL+2zeJEgHUoHwDgZEWlZRox92eV2Y0GJDbUAMYt8HGUDwBwsr9/s0Pbs06qfu1gTR6UYHUcwHKUDwBwotSME3prafm45fnBnVQ3LNjiRID1KB8A4CSFJWUaMTdVdiMN6hyn6xNirY4EuAXKBwA4ySvfbNfO7JOqX9um527saHUcwG1QPgDACdbt+0XvLNstSXrx5gTVYdwCVKB8AICDFZaUadTpccstXRqpb0fGLcCvUT4AwMFe+u827T6ar+hwmyYwbgHOQvkAAAdau/e4/nf5HknSlFs7KbJWkMWJAPdD+QAABzlVXKaRc1NljDTk8sa6tl2M1ZEAt0T5AAAHmbY4XXuPFSg2IkTjBnawOg7gtigfAOAAq3cf0+wVeyWdHreEMm4BzoXyAQCXqKC4VKPmbZQx0h3d4nVN22irIwFujfIBAJdo6pfp2n+8QHGRIfrbgPZWxwHcHuUDAC7Bil1H9e7KfZKkqUMSFR7CuAW4EMoHANRQflGpnp63UZL0h+QmurJ1A4sTAZ6B8gEANZTy5VYd+OWUGkWF6pkbGLcAF4vyAQA1sHzHUb23ar8kafqQRNW2BVqcCPAclA8AqKa8whKN/qR83PKn3zVVz1b1LU4EeBbKBwBU04tfbNXBE6cUXzdUY/q3szoO4HEoHwBQDUu3H9GHP2VIkqYPSVIY4xag2igfAHCRcgtLNOb0uOUvPZvpdy3qWZwI8EyUDwC4SM8v2qLDOYVqVq+Wnr6+rdVxAI/l8PKRkpKibt26KTw8XNHR0Ro8eLC2bdvm6N0AgEt9n56tOWsPyM9Pmj40SbWCGbcANeXw8rF06VINHz5cq1at0tdff62SkhL17dtX+fn5jt4VALhETkGJxnxaPm65p1dzdWtW1+JEgGdzeHX/6quvKv08e/ZsRUdHa926dbrqqqscvTsAcLpJi7YoK7dILeqHaWRfxi3ApXL664Y5OTmSpLp1q/6XQlFRkYqKiip+zs3NdXYkALho32zJ0ifrD8j/9LglNDjA6kiAx3PqBad2u11PPPGEevXqpYSEhCrXSUlJUWRkZMUtPj7emZEA4KKdKCjW2PmbJEn3XdlClzetY3EiwDs4tXwMHz5cmzdv1kcffXTOdcaOHaucnJyKW0ZGhjMjAcBFe25hmo7kFallgzA9+fs2VscBvIbTxi6PPPKIFi1apGXLlqlx48bnXM9ms8lmszkrBgDUyOK0TC34+ZD8/aQZQ5MUEsS4BXAUh5cPY4weffRRzZ8/X0uWLFHz5s0dvQsAcKrj+cX62+lxy/1XtVSXJoxbAEdyePkYPny4PvjgA3322WcKDw9XZmamJCkyMlKhoaGO3h0AONyEhWk6erJYraNr64k+ra2OA3gdh1/z8eabbyonJ0fXXHONGjZsWHH7+OOPHb0rAHC4Lzcd1uephxTg76eXbmPcAjiDU8YuAOCJjp0s0rMLNkuSHry6hRIbR1kbCPBSfLcLAJw2/rM0HcsvVrvYcD12HeMWwFkoHwAgadHGQ/rPpsMK9PfTjKFJsgUybgGchfIBwOcdySvSuNPjlod7t1JCo0iLEwHejfIBwKcZY/Tsgk36paBE7RtG6JHerayOBHg9ygcAn7Yw9ZAWp2Up0N9PLw1NUnAgfy0CzsazDIDPys4t1PjP0iRJj17bWh3iIixOBPgGygcAn2SM0TPzNyvnVIk6xkXo4d4trY4E+AzKBwCftODng/pma5aCAso/TCwogL8OAVfh2QbA52TlFmrC6XHLE33aqF0s4xbAlSgfAHyKMUZjP92k3MJSJTaO1ANXtbA6EuBzKB8AfMq8dQf0XXq2ggP8NWNokgIZtwAux7MOgM84nHNKkxZtkSQ9+fs2ahMTbnEiwDdRPgD4BGOMxnyySXmFpUqKj9J9Vza3OhLgsygfAHzCnLUZWrr9iIID/fXS0ETGLYCFePYB8HoHT5zS5EVbJUkj+7ZRq2jGLYCVKB8AvFr5uGWjThaV6rImUbr3Ct7dAliN8gHAq334U4Z+2HFUtsDyd7cE+PtZHQnweZQPAF4r43iBXvhP+btbRvVrqxYNalucCIBE+QDgpex2o9GfbFR+cZm6Nauju3vx7hbAXVA+AHil93/arxW7jikkyF/ThzBuAdwJ5QOA19l/rEApX5S/u2XM9e3UrH6YxYkA/BrlA4BXsduNRs1LVUFxmZKb19WfezSzOhKA36B8APAq/165V6v3HFet4ABNH5Ikf8YtgNuhfADwGnuP5mvKV+mSpLH926lJvVoWJwJQFcoHAK9wZtxSWGJXz5b1NCy5qdWRAJwD5QOAV5i1Yq/W7P1FYcEBmnprIuMWwI1RPgB4vN1HTmra6XHLMwPaK74u4xbAnVE+AHi0MrvRyLmpKiq164pW9fWH7k2sjgTgAigfADzaP5fv1vr9J1TbFqipQxLl58e4BXB3lA8AHmtn9knN+O92SdK4ge3VKCrU4kQALgblA4BHKi2za8TcVBWX2nV1mwa6rWu81ZEAXCTKBwCP9M4Pe5SacULhIYGacmsnxi2AB6F8APA427Py9MrX5eOW8QM7qGEk4xbAk1A+AHiUkjK7RsxJVXGZXde2i9aQyxtbHQlANVE+AHiUt5fu0qaDOYoICVTKLYxbAE9E+QDgMdIzc/X3b3dIkp67qaNiIkIsTgSgJigfADzCmXFLSZlRn/bRurlLI6sjAaghygcAj/A/3+9S2qFcRYYG6cWbGbcAnozyAcDtpR3K0evflY9bJg3qqGjGLYBHo3wAcGvFpeXjllK70fUdY3VTUpzVkQBcIsoHALf2xnc7lJ6Zpzq1gjR5cALjFsALUD4AuK3NB3M0c8kuSdLkwQlqEG6zOBEAR6B8AHBLRaVlGjEnVWV2owGdGmpgIuMWwFtQPgC4pde+3aFtWXmqFxasSYM6Wh0HgANRPgC4ndSME3rz9Ljl+cEJqlebcQvgTSgfANxKYUmZRs5Nld1INyXFqX+nhlZHAuBglA8AbuXVb3ZoR/ZJ1a9t08SbGLcA3ojyAcBtrN//i/6xrHzc8uLNCaoTFmxxIgDOQPkA4BZ+PW65uUsj9e0Ya3UkAE5C+QDgFl7+ert2H8lXdLhNE27sYHUcAE5E+QBguXX7juudH3ZLklJu6aSoWoxbAG9G+QBgqVPFZRo5d6OMkYZc3ljXtY+xOhIAJ6N8ALDUtMXp2nM0X7ERIRo3kHEL4AsCrQ4AwDcVlpTpuYVp+mhNhiQp5dZOigwNsjgVAFegfABwuZ3ZeRr+/gZty8qTn580sm9b9W4bbXUsAC5C+QDgUp+sO6BnF2zWqZIy1a9t06u3d9YVretbHQuAC1E+ALhEQXGpxn+WpnnrDkiSerWqp1du76zo8BCLkwFwNaddcDpz5kw1a9ZMISEhSk5O1k8//eSsXQFwc9sy83TTGz9q3roD8veTnvp9G/37nmSKB+CjnFI+Pv74Yz311FOaMGGC1q9fr6SkJPXr10/Z2dnO2B0AN2WM0cdr9mvQzOXamX1SMRE2fXDf7/TYda0V4O9ndTwAFvEzxhhHbzQ5OVndunXTG2+8IUmy2+2Kj4/Xo48+qjFjxlRat6ioSEVFRRU/5+bmKj4+Xjk5OYqIiHBYpqMnizTz+50O2x6AC9t/rEDfppf/o+OqNg30ym1JqlfbZnEqAM6Qm5uryMjIi/r97fBrPoqLi7Vu3TqNHTu2Ypm/v7/69OmjlStXnrV+SkqKJk6c6OgYZ8k9VaJZP+51+n4AVBbg76cRfdvowatayp9XOwDICeXj6NGjKisrU0xM5U8pjImJUXp6+lnrjx07Vk899VTFz2de+XC0qFrBGt67pcO3C+Dc/P389PsOMUpsHGV1FABuxPJ3u9hsNtlszn8Ztm5YsEb1a+f0/QAAgPNz+AWn9evXV0BAgLKysiotz8rKUmwsX5ENAICvc3j5CA4O1uWXX65vv/22Ypndbte3336rHj16OHp3AADAwzhl7PLUU0/prrvuUteuXdW9e3e9+uqrys/P19133+2M3QEAAA/ilPJx++2368iRIxo/frwyMzPVuXNnffXVV2ddhAoAAHyPUz7n41JU533CAADAPVTn97fTPl4dAACgKpQPAADgUpQPAADgUpQPAADgUpQPAADgUpQPAADgUpQPAADgUpQPAADgUpZ/q+1vnfnMs9zcXIuTAACAi3Xm9/bFfHap25WPvLw8SVJ8fLzFSQAAQHXl5eUpMjLyvOu43cer2+12HTp0SOHh4fLz83PotnNzcxUfH6+MjAyv/Oh2bz8+yfuPkePzfN5+jByf53PWMRpjlJeXp7i4OPn7n/+qDrd75cPf31+NGzd26j4iIiK89n8qyfuPT/L+Y+T4PJ+3HyPH5/mccYwXesXjDC44BQAALkX5AAAALuVT5cNms2nChAmy2WxWR3EKbz8+yfuPkePzfN5+jByf53OHY3S7C04BAIB386lXPgAAgPUoHwAAwKUoHwAAwKUoHwAAwKUoHwAAwKW8qny88MIL6tmzp2rVqqWoqKgq19m/f78GDBigWrVqKTo6WqNGjVJpael5t3v8+HENGzZMERERioqK0r333quTJ0864QiqZ8mSJfLz86vytmbNmnM+7pprrjlr/QcffNCFyS9es2bNzso6ZcqU8z6msLBQw4cPV7169VS7dm3deuutysrKclHi6tm7d6/uvfdeNW/eXKGhoWrZsqUmTJig4uLi8z7Onc/hzJkz1axZM4WEhCg5OVk//fTTedefO3eu2rVrp5CQEHXq1ElffPGFi5JWX0pKirp166bw8HBFR0dr8ODB2rZt23kfM3v27LPOVUhIiIsSV89zzz13VtZ27dqd9zGedP6kqv9O8fPz0/Dhw6tc393P37Jly3TjjTcqLi5Ofn5+WrBgQaX7jTEaP368GjZsqNDQUPXp00c7duy44Har+zyuLq8qH8XFxRo6dKgeeuihKu8vKyvTgAEDVFxcrBUrVujdd9/V7NmzNX78+PNud9iwYUpLS9PXX3+tRYsWadmyZbr//vudcQjV0rNnTx0+fLjS7a9//auaN2+url27nvex9913X6XHTZs2zUWpq2/SpEmVsj766KPnXf/JJ5/U559/rrlz52rp0qU6dOiQbrnlFhelrZ709HTZ7Xa9/fbbSktL0yuvvKK33npLzzzzzAUf647n8OOPP9ZTTz2lCRMmaP369UpKSlK/fv2UnZ1d5forVqzQnXfeqXvvvVcbNmzQ4MGDNXjwYG3evNnFyS/O0qVLNXz4cK1atUpff/21SkpK1LdvX+Xn55/3cREREZXO1b59+1yUuPo6duxYKevy5cvPua6nnT9JWrNmTaXj+/rrryVJQ4cOPedj3Pn85efnKykpSTNnzqzy/mnTpum1117TW2+9pdWrVyssLEz9+vVTYWHhObdZ3edxjRgvNGvWLBMZGXnW8i+++ML4+/ubzMzMimVvvvmmiYiIMEVFRVVua8uWLUaSWbNmTcWyL7/80vj5+ZmDBw86PPulKC4uNg0aNDCTJk0673pXX321efzxx10T6hI1bdrUvPLKKxe9/okTJ0xQUJCZO3duxbKtW7caSWblypVOSOh406ZNM82bNz/vOu56Drt3726GDx9e8XNZWZmJi4szKSkpVa5/2223mQEDBlRalpycbB544AGn5nSU7OxsI8ksXbr0nOuc6+8jdzRhwgSTlJR00et7+vkzxpjHH3/ctGzZ0tjt9irv96TzJ8nMnz+/4me73W5iY2PN9OnTK5adOHHC2Gw28+GHH55zO9V9HteEV73ycSErV65Up06dFBMTU7GsX79+ys3NVVpa2jkfExUVVemVhD59+sjf31+rV692eubqWLhwoY4dO6a77777guu+//77ql+/vhISEjR27FgVFBS4IGHNTJkyRfXq1VOXLl00ffr0847J1q1bp5KSEvXp06diWbt27dSkSROtXLnSFXEvWU5OjurWrXvB9dztHBYXF2vdunWV/uz9/f3Vp0+fc/7Zr1y5stL6Uvlz0pPOlaQLnq+TJ0+qadOmio+P16BBg87594072LFjh+Li4tSiRQsNGzZM+/fvP+e6nn7+iouL9d577+mee+4577eoe9L5+7U9e/YoMzOz0jmKjIxUcnLyOc9RTZ7HNeF232rrTJmZmZWKh6SKnzMzM8/5mOjo6ErLAgMDVbdu3XM+xir//Oc/1a9fvwt+K/Af/vAHNW3aVHFxcdq4caNGjx6tbdu26dNPP3VR0ov32GOP6bLLLlPdunW1YsUKjR07VocPH9bLL79c5fqZmZkKDg4+65qfmJgYtztfVdm5c6def/11zZgx47zrueM5PHr0qMrKyqp8jqWnp1f5mHM9Jz3hXNntdj3xxBPq1auXEhISzrle27Zt9a9//UuJiYnKycnRjBkz1LNnT6WlpTn9G7yrKzk5WbNnz1bbtm11+PBhTZw4UVdeeaU2b96s8PDws9b35PMnSQsWLNCJEyf0l7/85ZzreNL5+60z56E656gmz+OacPvyMWbMGE2dOvW862zduvWCF0V5kpoc84EDB7R48WLNmTPngtv/9fUqnTp1UsOGDXXddddp165datmyZc2DX6TqHN9TTz1VsSwxMVHBwcF64IEHlJKS4tbfvVCTc3jw4EFdf/31Gjp0qO67777zPtbqcwhp+PDh2rx583mviZCkHj16qEePHhU/9+zZU+3bt9fbb7+tyZMnOztmtfTv37/ivxMTE5WcnKymTZtqzpw5uvfeey1M5hz//Oc/1b9/f8XFxZ1zHU86f57E7cvHiBEjzttKJalFixYXta3Y2Nizrtg98y6I2NjYcz7mtxfZlJaW6vjx4+d8zKWqyTHPmjVL9erV00033VTt/SUnJ0sq/1e3K35xXco5TU5OVmlpqfbu3au2bduedX9sbKyKi4t14sSJSq9+ZGVlOe18VaW6x3jo0CH17t1bPXv21D/+8Y9q78/V57Aq9evXV0BAwFnvLDrfn31sbGy11ncXjzzySMXF59X9129QUJC6dOminTt3Oimd40RFRalNmzbnzOqp50+S9u3bp2+++abarxZ60vk7cx6ysrLUsGHDiuVZWVnq3LlzlY+pyfO4Rhx29YgbudAFp1lZWRXL3n77bRMREWEKCwur3NaZC07Xrl1bsWzx4sVudcGp3W43zZs3NyNGjKjR45cvX24kmdTUVAcnc7z33nvP+Pv7m+PHj1d5/5kLTufNm1exLD093a0vOD1w4IBp3bq1ueOOO0xpaWmNtuEu57B79+7mkUceqfi5rKzMNGrU6LwXnA4cOLDSsh49erjtBYt2u90MHz7cxMXFme3bt9doG6WlpaZt27bmySefdHA6x8vLyzN16tQxf//736u839PO369NmDDBxMbGmpKSkmo9zp3Pn85xwemMGTMqluXk5FzUBafVeR7XKKvDtuQG9u3bZzZs2GAmTpxoateubTZs2GA2bNhg8vLyjDHl/9MkJCSYvn37mp9//tl89dVXpkGDBmbs2LEV21i9erVp27atOXDgQMWy66+/3nTp0sWsXr3aLF++3LRu3drceeedLj++c/nmm2+MJLN169az7jtw4IBp27atWb16tTHGmJ07d5pJkyaZtWvXmj179pjPPvvMtGjRwlx11VWujn1BK1asMK+88or5+eefza5du8x7771nGjRoYP785z9XrPPb4zPGmAcffNA0adLEfPfdd2bt2rWmR48epkePHlYcwgUdOHDAtGrVylx33XXmwIED5vDhwxW3X6/jKefwo48+MjabzcyePdts2bLF3H///SYqKqriHWZ/+tOfzJgxYyrW//HHH01gYKCZMWOG2bp1q5kwYYIJCgoymzZtsuoQzuuhhx4ykZGRZsmSJZXOVUFBQcU6vz3GiRMnmsWLF5tdu3aZdevWmTvuuMOEhISYtLQ0Kw7hvEaMGGGWLFli9uzZY3788UfTp08fU79+fZOdnW2M8fzzd0ZZWZlp0qSJGT169Fn3edr5y8vLq/hdJ8m8/PLLZsOGDWbfvn3GGGOmTJlioqKizGeffWY2btxoBg0aZJo3b25OnTpVsY1rr73WvP766xU/X+h57AheVT7uuusuI+ms2/fff1+xzt69e03//v1NaGioqV+/vhkxYkSl5vv9998bSWbPnj0Vy44dO2buvPNOU7t2bRMREWHuvvvuikLjDu68807Ts2fPKu/bs2dPpT+D/fv3m6uuusrUrVvX2Gw206pVKzNq1CiTk5PjwsQXZ926dSY5OdlERkaakJAQ0759e/Piiy9WepXqt8dnjDGnTp0yDz/8sKlTp46pVauWufnmmyv9Mncns2bNqvL/2V+/KOlp5/D11183TZo0McHBwaZ79+5m1apVFfddffXV5q677qq0/pw5c0ybNm1McHCw6dixo/nPf/7j4sQX71znatasWRXr/PYYn3jiiYo/j5iYGHPDDTeY9evXuz78Rbj99ttNw4YNTXBwsGnUqJG5/fbbzc6dOyvu9/Tzd8bixYuNJLNt27az7vO083fmd9Zvb2eOwW63m3HjxpmYmBhjs9nMddddd9ZxN23a1EyYMKHSsvM9jx3BzxhjHDfEAQAAOD+f+pwPAABgPcoHAABwKcoHAABwKcoHAABwKcoHAABwKcoHAABwKcoHAABwKcoHAABwKcoHAABwKcoHAABwKcoHAABwqf8P3al1wo6TB68AAAAASUVORK5CYII=\n" 70 | }, 71 | "metadata": {} 72 | } 73 | ], 74 | "source": [ 75 | "import numpy as np\n", 76 | "import matplotlib.pyplot as plt\n", 77 | "\n", 78 | "def relu(x):\n", 79 | " return np.maximum(x,0)\n", 80 | "\n", 81 | "x=np.linspace(-10,10)\n", 82 | "y=relu(x)\n", 83 | "print(y)\n", 84 | "plt.title(\"ReLU\")\n", 85 | "plt.plot(x,y)" 86 | ] 87 | }, 88 | { 89 | "cell_type": "code", 90 | "source": [ 91 | "def leakyrelu(x):\n", 92 | " return np.maximum(x,0.1*x)\n", 93 | "\n", 94 | "x=np.linspace(-10,10)\n", 95 | "y=leakyrelu(x)\n", 96 | "print(y)\n", 97 | "plt.title(\"Leaky ReLU\")\n", 98 | "plt.plot(x,y)" 99 | ], 100 | "metadata": { 101 | "colab": { 102 | "base_uri": "https://localhost:8080/", 103 | "height": 626 104 | }, 105 | "id": "ZqJ_0n-ehGqp", 106 | "outputId": "5e6e6c86-45e9-4b22-b6cf-e3f006581a1e" 107 | }, 108 | "execution_count": 5, 109 | "outputs": [ 110 | { 111 | "output_type": "stream", 112 | "name": "stdout", 113 | "text": [ 114 | "[-1. -0.95918367 -0.91836735 -0.87755102 -0.83673469 -0.79591837\n", 115 | " -0.75510204 -0.71428571 -0.67346939 -0.63265306 -0.59183673 -0.55102041\n", 116 | " -0.51020408 -0.46938776 -0.42857143 -0.3877551 -0.34693878 -0.30612245\n", 117 | " -0.26530612 -0.2244898 -0.18367347 -0.14285714 -0.10204082 -0.06122449\n", 118 | " -0.02040816 0.20408163 0.6122449 1.02040816 1.42857143 1.83673469\n", 119 | " 2.24489796 2.65306122 3.06122449 3.46938776 3.87755102 4.28571429\n", 120 | " 4.69387755 5.10204082 5.51020408 5.91836735 6.32653061 6.73469388\n", 121 | " 7.14285714 7.55102041 7.95918367 8.36734694 8.7755102 9.18367347\n", 122 | " 9.59183673 10. ]\n" 123 | ] 124 | }, 125 | { 126 | "output_type": "execute_result", 127 | "data": { 128 | "text/plain": [ 129 | "[]" 130 | ] 131 | }, 132 | "metadata": {}, 133 | "execution_count": 5 134 | }, 135 | { 136 | "output_type": "display_data", 137 | "data": { 138 | "text/plain": [ 139 | "
" 140 | ], 141 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAh8AAAGzCAYAAACPa3XZAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABD1UlEQVR4nO3deVxU9cI/8M/MADOswyKrAuKCoghCqaml+cgv85pplim2WPlki7hrpc/jVipqPF5Ne8ruLe1er1tp5mNXvenVzFJzARcKlNwQBUSEYR2Gme/vD2MUWQScmTPL5/16zas4c86cz+EA8/Gc75wjE0IIEBEREVmIXOoARERE5FhYPoiIiMiiWD6IiIjIolg+iIiIyKJYPoiIiMiiWD6IiIjIolg+iIiIyKJYPoiIiMiiWD6IiIjIolg+iKjJLl26BJlMhpSUFKmjEJENY/kgslHr1q2DTCbD8ePHpY5iUo8//jhkMpnx4erqipiYGKxYsQIGg6FFr/nKK6/Aw8Ojwefv97186qmn0LZt2xatm4jqcpI6ABHRvdq0aYPk5GQAQEFBATZs2ICpU6fixo0bWLRokcTpiOhBsXwQkdVRq9V48cUXjV+/+eab6Ny5M1atWoX3338fCoVCwnRE9KB42oXIzuXk5OC1115DYGAglEolunbtii+++KLWPFVVVZg7dy4eeughqNVquLu747HHHsP+/fvv+/pCCIwfPx4uLi7Ytm0b+vfvj9jY2Hrn7dSpEwYNGtTsbVCpVOjRowdKSkqQn59f67n169fjoYcegqurK3x9fTF69GhkZ2c3ex1EZDksH0R2LC8vD4888gj27t2LpKQkrFy5Eh06dMC4ceOwYsUK43wajQZ//etf8fjjj2Pp0qWYP38+bty4gUGDBiEtLa3B19fr9XjllVfwt7/9Dd988w1GjBiBl156CadPn8bZs2drzXvs2DGcO3eu1hGN5qgZ7Ort7W2ctmjRIrz88svo2LEjli9fjilTpmDfvn3o168fioqKWrQeIrIAQUQ2ae3atQKAOHbsWIPzjBs3TgQHB4uCgoJa00ePHi3UarUoLy8XQghRXV0ttFptrXlu3bolAgMDxWuvvWacdvHiRQFAfPjhh0Kn04lRo0YJV1dXsWfPHuM8RUVFQqVSiXfffbfW602aNEm4u7uL0tLSRrerf//+onPnzuLGjRvixo0bIiMjQ8ycOVMAEEOGDDHOd+nSJaFQKMSiRYtqLX/mzBnh5ORUa/rYsWOFu7t7g+u83/dyyJAhIjw8vNHcRNR0PPJBZKeEENi6dSuGDh0KIQQKCgqMj0GDBqG4uBgnT54EACgUCri4uAAADAYDCgsLUV1djYcfftg4z92qqqowcuRI7Ny5E//85z/xxBNPGJ9Tq9UYNmwYNm7cCCEEgNtHSDZv3ozhw4fD3d39vtkzMjLg7+8Pf39/dO7cGR9++CGefvpprFu3zjjPtm3bYDAY8Pzzz9fatqCgIHTs2LFJp4yISBoccEpkp27cuIGioiJ89tln+Oyzz+qd5+7xE19++SX+53/+BxkZGdDpdMbpERERdZZLTk5GaWkpdu3ahccff7zO8y+//DI2b96MH3/8Ef369cPevXuRl5eHl156qUnZ27Zti7/85S8wGAz4/fffsWjRIty4cQMqlco4z/nz5yGEQMeOHet9DWdn5yatq6lkMplJX4/IkbF8ENmpmmtivPjiixg7dmy988TExAC4PWjzlVdewfDhwzFz5kwEBARAoVAgOTkZv//+e53lBg0ahN27d2PZsmV4/PHHa5WCmucDAwOxfv169OvXD+vXr0dQUBASEhKalN3d3b3WvH379kV8fDxmz56Njz76yLh9MpkMu3btqvfTL41d1+NeNfkrKirqfb68vLzONhJRy7F8ENkpf39/eHp6Qq/X3/dN/+uvv0a7du2wbdu2Wv/CnzdvXr3zP/LII3jzzTfx1FNPYeTIkfjmm2/g5HTnz4lCocCYMWOwbt06LF26FNu3b8frr7/e4o/IxsTE4MUXX8SaNWswY8YMhIWFoX379hBCICIiApGRkS163Rrh4eEAgMzMTDz22GN1nj937hyio6MfaB1EdAfHfBDZKYVCgWeffRZbt26t88kT4PZpmbvnBWAcowEAR48exeHDhxt8/YSEBGzatAm7d+/GSy+9VOfqoy+99BJu3bqFN954A6WlpS3+lEuNd955BzqdDsuXLwcAjBgxAgqFAgsWLKiVu2Y7bt682eTXfuihhxAQEIC//vWv0Gq1tZ7bvn07cnJyMHjw4AfKT0R38MgHkY374osvsHv37jrTJ0+ejCVLlmD//v3o1asXXn/9dXTp0gWFhYU4efIk9u7di8LCQgC3Lx++bds2PPPMMxgyZAguXryITz/9FF26dEFpaWmD6x4+fDjWrl2Ll19+GV5eXlizZo3xubi4OERHR+Orr75CVFQU4uPjH2g7u3Tpgj/96U/461//ijlz5qB9+/ZYuHAhZs2ahUuXLmH48OHw9PTExYsX8c0332D8+PGYMWOGcXmdToeFCxfWeV1fX1+8/fbbSElJwdixY9GjRw+MGjUKfn5+SE1NxRdffIGYmBiMHz/+gfIT0V2k+6ANET2Imo+HNvTIzs4WQgiRl5cnJkyYIEJDQ4Wzs7MICgoSAwcOFJ999pnxtQwGg1i8eLEIDw8XSqVSxMXFiZ07d4qxY8fW+ojp3R+1vdv//u//CgBixowZtaYvW7ZMABCLFy9u8nb1799fdO3atd7nDhw4IACIefPmGadt3bpVPProo8Ld3V24u7uLzp07iwkTJojMzEzjPGPHjm3w+9S+fXvjfLt27RIDBgwQXl5ewtnZWURERIhp06aJW7duNTk/Ed2fTIh7jlcSEZnIypUrMXXqVFy6dAlhYWFSxyEiK8HyQURmIYRAbGws/Pz8eM0NIqqFYz6IyKTKysqwY8cO7N+/H2fOnMG3334rdSQisjI88kFEJnXp0iVERETA29sbb7/9NhYtWiR1JCKyMiwfREREZFG8zgcRERFZFMsHERERWZTVDTg1GAy4du0aPD09eSMnIiIiGyGEQElJCUJCQiCXN35sw+rKx7Vr1xAaGip1DCIiImqB7OxstGnTptF5rK58eHp6Argd3svLS+I0RERE1BQajQahoaHG9/HGWF35qDnV4uXlxfJBRERkY5oyZIIDTomIiMiiWD6IiIjIolg+iIiIyKJYPoiIiMiiWD6IiIjIolg+iIiIyKJYPoiIiMiiWD6IiIjIolg+iIiIyKJYPoiIiMiiml0+Dh48iKFDhyIkJAQymQzbt2+v9bwQAnPnzkVwcDBcXV2RkJCA8+fPmyovERER2bhml4+ysjLExsbi448/rvf5ZcuW4aOPPsKnn36Ko0ePwt3dHYMGDUJlZeUDhyUiIiLb1+wbyw0ePBiDBw+u9zkhBFasWIH//u//xrBhwwAAf/vb3xAYGIjt27dj9OjRdZbRarXQarXGrzUaTXMjERERURNU6w146x8n8fzDofh/XQIly2HSMR8XL15Ebm4uEhISjNPUajV69eqFw4cP17tMcnIy1Gq18REaGmrKSERERPSHFXvP4/tf8zBtcxqKyqsky2HS8pGbmwsACAys3aYCAwONz91r1qxZKC4uNj6ys7NNGYmIiIgAHDpfgI8PZAEAFo/oBm83F8myNPu0i6kplUoolUqpYxAREdmtGyVaTN2SBiGAxJ6hGBobImkekx75CAoKAgDk5eXVmp6Xl2d8joiIiCzHYBCYtiUNN0q0iAz0wNynukodybTlIyIiAkFBQdi3b59xmkajwdGjR9G7d29TroqIiIiaYM3BC/jxfAFUznKsHhMPVxeF1JGaf9qltLQUWVlZxq8vXryItLQ0+Pr6IiwsDFOmTMHChQvRsWNHREREYM6cOQgJCcHw4cNNmZuIiIju48TlW0j5VyYAYP7QrogM9JQ40W3NLh/Hjx/HgAEDjF9PmzYNADB27FisW7cO77zzDsrKyjB+/HgUFRXh0Ucfxe7du6FSqUyXmoiIiBpVXK7DpI2p0BsEhsaGYFQP6/k0qUwIIaQOcTeNRgO1Wo3i4mJ4eXlJHYeIiMjmCCHw5voT2JOeh3A/N+yc+Cg8Vc5mXWdz3r95bxciIiI78/cjl7EnPQ/OChlWJcaZvXg0F8sHERGRHUm/VoyFO38DALw3OAoxbbylDVQPlg8iIiI7UaatxsQNqajSG5AQFYDX+raVOlK9WD6IiIjsxJztZ3GhoAzBahU+fC4WMplM6kj1YvkgIiKyA1+fuIptqTmQy4CVo+Pg4y7d5dPvh+WDiIjIxmXll2LO9rMAgKkJkegZ4StxosaxfBAREdmwSp0eSRtOokKnR5/2fnh7QAepI90XywcREZENW/jdr8jILYGfuwtWjOoOhdw6x3ncjeWDiIjIRu06cx3rj1wBAPzP87EI8LKNq4mzfBAREdmg7MJyvLP1NADgjf7t8HinAIkTNR3LBxERkY3R6Q2YuDEVJZXViAvzxownOkkdqVlYPoiIiGxMyp5MpGUXwUvlhI9Gx8FZYVtv57aVloiIyMHtz8zHmoMXAADLnotBqK+bxImaj+WDiIjIRuRpKjF9yykAwEuPhOPJ6GCJE7UMywcREZEN0BsEpmxKQ2FZFaKCvfBfQ6KkjtRiLB9EREQ2YPW/s3D4wk24uSjw8Zg4qJwVUkdqMZYPIiIiK3f0wk2s3HcOALBweDTa+XtInOjBsHwQERFZscKyKkzalAqDAJ6Nb4MR8W2kjvTAWD6IiIislBACM746hTyNFu383fH+sK5SRzIJlg8iIiIr9fmhi/h3Rj5cnORYnRgPd6WT1JFMguWDiIjICp2+WoSluzMAAHOGRKFLiJfEiUyH5YOIiMjKaCp1SNqQCp1eYHB0EF58JFzqSCbF8kFERGRFhBCYve0MrhSWo7W3K5Y8GwOZTCZ1LJNi+SAiIrIim45lY+fp63CSy7BqTBzUrs5SRzI5lg8iIiIrkZlbgvk70gEAMwZ1QnyYj8SJzIPlg4iIyApUVOmRtOEktNUG9Iv0x/jH2kkdyWxYPoiIiKzA/B3pOJ9figBPJZY/Hwu53L7GedyN5YOIiEhi36blYPPxbMhkwIpR3dHKQyl1JLNi+SAiIpLQpYIyzN52BgAwcUAH9OnQSuJE5sfyQUREJBFttR5JG0+irEqPnm19MWlgR6kjWQTLBxERkUSW7MrA2RwNfNycsTKxO5wUjvG27BhbSUREZGW+/zUPa3+6BABIGRmLYLWrtIEsiOWDiIjIwq4VVWDm16cAAOMejcDAqECJE1kWywcREZEFVesNmLwpFUXlOsS0UePdJztLHcniWD6IiIgsaOW+8zh26RY8lE5YlRgHFyfHeyt2vC0mIiKSyE9ZBVi9PwsAkDyiG8L93CVOJA2WDyIiIgu4UaLFlM1pEAJI7BmKobEhUkeSDMsHERGRmRkMAtO2pOFGiRaRgR6Y+1RXqSNJiuWDiIjIzNYcvIAfzxdA5SzH6jHxcHVRSB1JUiwfREREZnTi8i2k/CsTADB/aFdEBnpKnEh6LB9ERERmUlyuw6SNqdAbBIbGhmBUj1CpI1kFlg8iIiIzEELg3a2nkVNUgXA/Nyx+JhoymUzqWFaB5YOIiMgM1h+5jN3puXBWyLAqMQ6eKmepI1kNlg8iIiITS79WjA+++w0A8N7gKMS08ZY2kJVh+SAiIjKhMm01Jm5IRVW1AQlRAXitb1upI1kdlg8iIiITmvPtWVwoKEOwWoUPn4vlOI96sHwQERGZyNcnrmLbyRzIZcDK0XHwcXeROpJVYvkgIiIygaz8UszZfhYAMDUhEj0jfCVOZL1YPoiIiB5QpU6PpA0nUaHTo097P7w9oIPUkawaywcREdEDWvjdr8jILYGfuwtWjOoOhZzjPBpj8vKh1+sxZ84cREREwNXVFe3bt8cHH3wAIYSpV0VERCS5XWeuY/2RKwCA5aO6I8BLJXEi6+dk6hdcunQpPvnkE3z55Zfo2rUrjh8/jldffRVqtRqTJk0y9eqIiIgkk11Yjne2ngYAvNm/PfpH+kucyDaYvHz8/PPPGDZsGIYMGQIAaNu2LTZu3IhffvnF1KsiIiKSjE5vwMSNqSiprEZ8mDemPxEpdSSbYfLTLn369MG+fftw7tw5AMCpU6dw6NAhDB48uN75tVotNBpNrQcREZG1S9mTibTsInipnPBRYhycFRxG2VQmP/Lx3nvvQaPRoHPnzlAoFNDr9Vi0aBFeeOGFeudPTk7GggULTB2DiIjIbA5k5mPNwQsAgGXPxaCNj5vEiWyLyWvali1b8I9//AMbNmzAyZMn8eWXXyIlJQVffvllvfPPmjULxcXFxkd2drapIxEREZlMnqYS07acAgC83DscT0YHS5zI9pj8yMfMmTPx3nvvYfTo0QCAbt264fLly0hOTsbYsWPrzK9UKqFUKk0dg4iIyOT0BoEpm9JQWFaFqGAvzP5TlNSRbJLJj3yUl5dDLq/9sgqFAgaDwdSrIiIisqiP92fh8IWbcHNRYPWYOKicFVJHskkmP/IxdOhQLFq0CGFhYejatStSU1OxfPlyvPbaa6ZeFRERkcUcvXATK/be/jDFB8Oi0d7fQ+JEtsvk5WPVqlWYM2cO3n77beTn5yMkJARvvPEG5s6da+pVERERWURhWRUmb0qDQQAj4lvj2YfaSB3JpsmElV16VKPRQK1Wo7i4GF5eXlLHISIiByeEwH9+eRz7MvLRzt8d/5f0KNyVJv+3u81rzvs3P5RMRETUiM8PXcS+jHy4OMmxOjGexcMEWD6IiIgacPpqEZbuzgAAzBkShS4hPCJvCiwfRERE9dBU6pC0IRU6vcDg6CC8+Ei41JHsBssHERHRPYQQmL3tDK4UlqO1tyuWPBsDmUwmdSy7wfJBRER0j03HsrHz9HU4yWVYNSYOaldnqSPZFZYPIiKiu2TmlmD+jnQAwIxBnRAf5iNxIvvD8kFERPSHiio9kjachLbagH6R/hj/WDupI9kllg8iIqI/zN+RjvP5pQjwVGL587GQyznOwxxYPoiIiAB8m5aDzcezIZMBK0Z1RysP3vTUXFg+iIjI4V0qKMPsbWcAABMHdECfDq0kTmTfWD6IiMihaav1SNp4EmVVevRs64tJAztKHcnusXwQEZFDW7IrA2dzNPBxc8bKxO5wUvCt0dz4HSYiIof1/a95WPvTJQBAyshYBKtdpQ3kIFg+iIjIIV0rqsDMr08BAMY9GoGBUYESJ3IcLB9ERORwqvUGTNqYiqJyHWLaqPHuk52ljuRQWD6IiMjhrNx3Hscv34KH0gmrEuPg4sS3Q0vid5uIiBzKT1kFWL0/CwCQPKIbwv3cJU7keFg+iIjIYdwo0WLK5jQIAST2DMXQ2BCpIzkklg8iInIIBoPAtC1puFGiRWSgB+Y+1VXqSA6L5YOIiBzCmoMX8OP5Aqic5Vg9Jh6uLgqpIzkslg8iIrJ7Jy7fQsq/MgEAC57uishAT4kTOTaWDyIismvF5TpM2pgKvUHg6dgQPP9wqNSRHB7LBxER2S0hBN7Zego5RRUI93PDomeiIZPJpI7l8Fg+iIjIbv39yGXsSc+Ds0KG1Ynx8FQ5Sx2JwPJBRER2Kv1aMRbu/A0AMGtwFLq1UUuciGqwfBARkd0p01Zj4oZUVOkNSIgKwKt920odie7C8kFERHZnzvazuFBQhmC1Ch8+F8txHlaG5YOIiOzK1yeuYltqDuQyYOXoOPi4u0gdie7B8kFERHYjK78Uc7afBQBMTYhEzwhfiRNRfVg+iIjILlTq9EjacBIVOj36tPfD2wM6SB2JGsDyQUREdmHhd78iI7cEfu4uWDGqOxRyjvOwViwfRERk83aduY71R64AAJaP6o4AL5XEiagxLB9ERGTTsgvL8c7W0wCAN/q3Q/9If4kT0f2wfBARkc3S6Q2YtCkVJZXViAvzxownOkkdiZqA5YOIiGxWyr8ykXqlCF4qJ3w0Og7OCr6t2QLuJSIiskkHMvOx5ocLAIBlz8Ug1NdN4kTUVCwfRERkc/I0lZi+5RQA4KVHwvFkdLDEiag5WD6IiMim6A0CUzal4WZZFaKCvfBfQ6KkjkTNxPJBREQ25eP9WTh84SbcXBRYPSYOKmeF1JGomVg+iIjIZhy9cBMr9p4DAHwwLBrt/T0kTkQtwfJBREQ2obCsCpM3pcEggBHxrfHsQ22kjkQtxPJBRERWTwiBmV+dQq6mEu383fHBsGipI9EDYPkgIiKr9/mhi9iXkQ8XJzlWJ8bDXekkdSR6ACwfRERk1U5fLcLS3RkAgDlDotAlxEviRPSgWD6IiMhqaSp1SNqQCp1e4MmuQXjxkXCpI5EJsHwQEZFVEkJg9rYzuFJYjtberlj6XAxkMpnUscgEWD6IiMgqbTqWjZ2nr8NJLsOqMXFQuzpLHYlMhOWDiIisTmZuCebvSAcAzBjUCfFhPhInIlNi+SAiIqtSUaVH0oaT0FYb0C/SH+Mfayd1JDIxlg8iIrIq83ek43x+KQI8lVj+fCzkco7zsDdmKR85OTl48cUX4efnB1dXV3Tr1g3Hjx83x6qIiMiOfJuWg83HsyGTAStGdUcrD6XUkcgMTH6Vllu3bqFv374YMGAAdu3aBX9/f5w/fx4+PjxfR0REDbtUUIbZ284AACYO6IA+HVpJnIjMxeTlY+nSpQgNDcXatWuN0yIiIky9GiIisiPaaj2SNp5EWZUePdv6YtLAjlJHIjMy+WmXHTt24OGHH8bIkSMREBCAuLg4/OUvf2lwfq1WC41GU+tBRESOZemuTJzN0cDbzRkrE7vDScEhifbM5Hv3woUL+OSTT9CxY0fs2bMHb731FiZNmoQvv/yy3vmTk5OhVquNj9DQUFNHIiIiK7b31zx88dNFAEDKc7EIVrtKnIjMTSaEEKZ8QRcXFzz88MP4+eefjdMmTZqEY8eO4fDhw3Xm12q10Gq1xq81Gg1CQ0NRXFwMLy9ev5+IyJ5dK6rAnz76EUXlOrzWNwJzh3aROhK1kEajgVqtbtL7t8mPfAQHB6NLl9o/PFFRUbhy5Uq98yuVSnh5edV6EBGR/avWGzB5UyqKynXo1lqNdwd3kjoSWYjJy0ffvn2RmZlZa9q5c+cQHs6bARER0R0r953HsUu34KF0wuoxcVA6KaSORBZi8vIxdepUHDlyBIsXL0ZWVhY2bNiAzz77DBMmTDD1qoiIyEb9lFWA1fuzAACLR3RDuJ+7xInIkkxePnr06IFvvvkGGzduRHR0ND744AOsWLECL7zwgqlXRURENuhGiRZTNqdBCGB0j1A8HRsidSSyMJMPOH1QzRmwQkREtsVgEBi79hf8eL4AkYEe+HbCo3B14ekWeyDpgFMiIqKGrDl4AT+eL4DKWY7VY+JZPBwUywcREVnEicu3kPKv2x9ImD+0KyIDPSVORFJh+SAiIrMrLtdh0sZU6A0CQ2NDMKoHLyjpyFg+iIjIrIQQeGfrKeQUVSDczw2Ln4mGTCaTOhZJiOWDiIjM6u9HLmNPeh6cFTKsSoyDp8pZ6kgkMZYPIiIym/RrxVi48zcAwHuDoxDTxlvaQGQVWD6IiMgsyrTVmLghFVV6AxKiAvBa37ZSRyIrwfJBRERmMWf7WVwoKEOwWoUPn4vlOA8yYvkgIiKT+/rEVWxLzYFcBqwcHQcfdxepI5EVYfkgIiKTysovxZztZwEAUxMi0TPCV+JEZG1YPoiIyGQqdXokbTiJCp0efdr74e0BHaSORFaI5YOIiExm4Xe/IiO3BH7uLlgxqjsUco7zoLpYPoiIyCR2nbmO9UeuAACWj+qOAC+VxInIWrF8EBHRA8suLMc7W08DAN7s3x79I/0lTkTWjOWDiIgeiE5vwMSNqSiprEZ8mDemPxEpdSSyciwfRET0QFL2ZCItuwheKiesHB0HZwXfWqhx/AkhIqIW25+ZjzUHLwAAlj0Xg1BfN4kTkS1g+SAiohbJ01Ri+pZTAICXHgnHk9HBEiciW8HyQUREzaY3CEzZlIbCsipEBXvhv4ZESR2JbAjLBxERNdvqf2fh8IWbcHNRYPWYOKicFVJHIhvC8kFERM1y5MJNrNx3DgDwwbBotPf3kDgR2RqWDyIiarLCsipM3pQKgwBGxLfGsw+1kToS2SCWDyIiahIhBGZ8dQp5Gi3a+bvjg2HRUkciG8XyQURETfL5oYv4d0Y+XJzkWJ0YD3elk9SRyEaxfBAR0X2dvlqEpbszAABzhkShS4iXxInIlrF8EBFRozSVOiRtSIVOLzA4OggvPhIudSSycSwfRETUICEEZm87gyuF5Wjt7Yolz8ZAJpNJHYtsHMsHERE1aNOxbOw8fR1OchlWjYmD2tVZ6khkB1g+iIioXpm5JZi/Ix0AMGNQJ8SH+UiciOwFywcREdVRUaVH0oaT0FYb0C/SH+Mfayd1JLIjLB9ERFTHgv9Lx/n8Uvh7KrH8+VjI5RznQabD8kFERLXsOHUNm45lQyYDVozqjlYeSqkjkZ1h+SAiIqNLBWWYve0MACBpQAf07dBK4kRkj1g+iIgIAKCt1mPixlSUaqvRo60PJg/sKHUkslMsH0REBABYuisTZ3KK4e3mjI8S4+Ck4FsEmQd/soiICHt/zcMXP10EAKQ8F4tgtavEiciesXwQETm4a0UVmPH1KQDAa30jkNAlUOJEZO9YPoiIHFi13oDJm1JRVK5Dt9ZqvDu4k9SRyAGwfBARObCV+87j2KVb8FA6YfWYOCidFFJHIgfA8kFE5KB+yirA6v1ZAIDFI7oh3M9d4kTkKFg+iIgc0I0SLaZsToMQwOgeoXg6NkTqSORAWD6IiByMwSAwbUsabpRoERnogXlDu0odiRwMywcRkYNZc/ACfjxfAJWzHKvHxMPVheM8yLJYPoiIHMiJy7eQ8q9MAMD8oV0RGegpcSJyRCwfREQOorhch0kbU6E3CAyNDcGoHqFSRyIHxfJBROQAhBB4Z+sp5BRVINzPDYufiYZMJpM6Fjkolg8iIgfw9yOXsSc9D84KGVYlxsFT5Sx1JHJgLB9ERHYu/VoxFu78DQDw3uAoxLTxljYQOTyWDyIiO1amrcbEDamo0huQEBWA1/q2lToSEcsHEZE9m7P9LC4UlCFYrcKHz8VynAdZBbOXjyVLlkAmk2HKlCnmXhUREd3l6xNXsS01B3IZsHJ0HHzcXaSORATAzOXj2LFjWLNmDWJiYsy5GiIiukdWfinmbD8LAJiaEImeEb4SJyK6w2zlo7S0FC+88AL+8pe/wMfHx1yrISKie1Tq9EjacBIVOj36tPfD2wM6SB2JqBazlY8JEyZgyJAhSEhIaHQ+rVYLjUZT60FERC238LtfkZFbAj93F6wY1R0KOcd5kHVxMseLbtq0CSdPnsSxY8fuO29ycjIWLFhgjhhERA5n15nrWH/kCgBg+ajuCPBSSZyIqC6TH/nIzs7G5MmT8Y9//AMq1f1/6GfNmoXi4mLjIzs729SRiIgcQnZhOd7ZehoA8Gb/9ugf6S9xIqL6mfzIx4kTJ5Cfn4/4+HjjNL1ej4MHD2L16tXQarVQKO7cQVGpVEKpVJo6BhGRQ9HpDZi4MRUlldWID/PG9CcipY5E1CCTl4+BAwfizJkztaa9+uqr6Ny5M959991axYOIiEwjZU8m0rKL4KVywkeJcXBW8DJOZL1MXj48PT0RHR1da5q7uzv8/PzqTCcioge3PzMfaw5eAAAsey4GbXzcJE5E1DhWYyIiG5anqcT0LacAAC/3DseT0cESJyK6P7N82uVeBw4csMRqiIgcit4gMGVTGgrLqtAl2Auz/xQldSSiJuGRDyIiG7X631k4fOEm3FwUWD0mDipnjqkj28DyQURkg45cuImV+84BABYOj0Y7fw+JExE1HcsHEZGNKSyrwuRNqTAI4Nn4NhgR30bqSETNwvJBRGRDhBCY8dUp5Gm0aOfvjveHdZU6ElGzsXwQEdmQzw9dxL8z8uHiJMfqxHi4Ky3yuQEik2L5ICKyEaevFmHp7gwAwJwhUegS4iVxIqKWYfkgIrIBmkodkjakQqcXGBwdhBcfCZc6ElGLsXwQEVk5IQRmbzuDK4XlaO3tiiXPxkAmk0kdi6jFWD6IiKzcpmPZ2Hn6OpzkMqwaEwe1q7PUkYgeCMsHEZEVy8wtwfwd6QCAGYM6IT7MR+JERA+O5YOIyEpVVOmRtOEktNUG9Iv0x/jH2kkdicgkWD6IiKzU/B3pOJ9figBPJZY/Hwu5nOM8yD6wfBARWaFv03Kw+Xg2ZDJgxajuaOWhlDoSkcmwfBARWZlLBWWYve0MAGDigA7o06GVxImITIvlg4jIimir9UjaeBJlVXr0bOuLSQM7Sh2JyORYPoiIrMjSXZk4m6OBj5szViZ2h5OCf6bJ/vCnmojISuz9NQ9f/HQRAJAyMhbBaleJExGZB8sHEZEVuFZUgRlfnwIAjHs0AgOjAiVORGQ+LB9ERBKr1hsweVMqisp1iGmjxrtPdpY6EpFZsXwQEUls5b7zOHbpFjyUTliVGAcXJ/5pJvvGn3AiIgn9nFWA1fuzAADJI7oh3M9d4kRE5sfyQUQkkRslWkzenAYhgNE9QjE0NkTqSEQWwfJBRCQBg0Fg2pY03CjRIjLQA/OGdpU6EpHFsHwQEUlgzcEL+PF8AVTOcqweEw9XF4XUkYgshuWDiMjCTly+hZR/ZQIA5g/tishAT4kTEVkWywcRkQUVl+swaWMq9AaBobEhGNUjVOpIRBbH8kFEZCFCCLyz9RRyiioQ7ueGxc9EQyaTSR2LyOJYPoiILOTvRy5jT3oenBUyrEqMg6fKWepIRJJg+SAisoD0a8VYuPM3AMB7g6MQ08Zb2kBEEmL5ICIyszJtNSZuSEWV3oCEqAC81ret1JGIJMXyQURkZnO2n8WFgjIEq1X48LlYjvMgh8fyQURkRl+fuIptqTmQy4CVo+Pg4+4idSQiybF8EBGZSVZ+KeZsPwsAmJoQiZ4RvhInIrIOLB9ERGZQqdMjacNJVOj06NPeD28P6CB1JCKrwfJBRGQGC7/7FRm5JfBzd8GKUd2hkHOcB1ENlg8iIhPbdeY61h+5AgBYPqo7ArxUEicisi4sH0REJpRdWI53tp4GALzZvz36R/pLnIjI+rB8EBGZiE5vwMSNqSiprEZ8mDemPxEpdSQiq8TyQURkIil7MpGWXQQvlRM+SoyDs4J/Yonqw98MIiITOJCZjzUHLwAAlj0XgzY+bhInIrJeLB9ERA8oT1OJ6VtOAQBe7h2OJ6ODJU5EZN1YPoiIHoDeIDBlUxpullUhKtgLs/8UJXUkIqvH8kFE9AA+3p+Fwxduws1FgdVj4qByVkgdicjqsXwQEbXQ0Qs3sWLvOQDAB8Oi0d7fQ+JERLaB5YOIqAUKy6oweVMaDAIYEd8azz7URupIRDaD5YOIqJmEEJj51SnkairRzt8dHwyLljoSkU1h+SAiaqbPD13Evox8uDjJsToxHu5KJ6kjEdkUlg8iomY4fbUIS3dnAADmDIlClxAviRMR2R6WDyKiJtJU6pC0IRU6vcDg6CC8+Ei41JGIbBLLBxFREwghMHvbGVwpLEdrb1cseTYGMplM6lhENsnk5SM5ORk9evSAp6cnAgICMHz4cGRmZpp6NUREFrXpWDZ2nr4OJ7kMq8bEQe3qLHUkIptl8vLxww8/YMKECThy5Ai+//576HQ6PPHEEygrKzP1qoiILCIztwTzd6QDAGYM6oT4MB+JExHZNpMP0d69e3etr9etW4eAgACcOHEC/fr1M/XqiIjMqqJKj6QNJ6GtNqBfpD/GP9ZO6khENs/snw8rLi4GAPj6+tb7vFarhVarNX6t0WjMHYmIqMnm70jH+fxSBHgqsfz5WMjlHOdB9KDMOuDUYDBgypQp6Nu3L6Kj678IT3JyMtRqtfERGhpqzkhERE32bVoONh/PhkwGrBjVHa08lFJHIrILZi0fEyZMwNmzZ7Fp06YG55k1axaKi4uNj+zsbHNGIiJqkksFZZi97QwAYOKADujToZXEiYjsh9lOuyQlJWHnzp04ePAg2rRp+J4HSqUSSiX/NUFE1kNbrUfSxpMoq9KjZ1tfTBrYUepIRHbF5OVDCIGJEyfim2++wYEDBxAREWHqVRARmdWSXRk4m6OBj5szViZ2h5OCl0QiMiWTl48JEyZgw4YN+Pbbb+Hp6Ync3FwAgFqthqurq6lXR0RkUnt/zcPany4BAFJGxiJYzb9bRKZm8jr/ySefoLi4GI8//jiCg4ONj82bN5t6VUREJnWtqAIzvj4FABj3aAQGRgVKnIjIPpnltAsRka2p1hsweVMqisp1iGmjxrtPdpY6EpHd4olMIiIAK/edx7FLt+ChdMKqxDi4OPHPI5G58LeLiBzeT1kFWL0/CwCQPKIbwv3cJU5EZN9YPojIod0o0WLK5jQIAST2DMXQ2BCpIxHZPZYPInJYBoPAtC1puFGiRWSgB+Y+1VXqSEQOgeWDiBzWmoMX8OP5Aqic5Vg9Jh6uLgqpIxE5BJYPInJIJy7fQsq/MgEA84d2RWSgp8SJiBwHywcROZzich0mbUyF3iAwNDYEo3rwhpZElsTyQUQORQiBd7aeQk5RBcL93LD4mWjIZDKpYxE5FJYPInIofz9yGXvS8+CskGFVYhw8Vc5SRyJyOCwfROQw0q8VY+HO3wAA7w2OQkwbb2kDETkolg8icghl2mpM3JCKKr0BAzsH4LW+baWOROSwWD6IyCHM2X4WFwrKEKxWIWVkLMd5EEmI5YOI7N7XJ65iW2oO5DJg5eg4+Li7SB2JyKGxfBCRXcvKL8Wc7WcBAFMTItEzwlfiRETE8kFEdqtSp0fShpOo0OnRp70f3h7QQepIRASWDyKyY4u++w0ZuSXwc3fBilHdoZBznAeRNWD5ICK7tOvMdfz9yGUAwP88H4sAL5XEiYioBssHEdmd7MJyvLP1NADgjf7t8HinAIkTEdHdWD6IyK7o9AZM2pSKkspqxIV5Y8YTnaSORET3YPkgIruS8q9MpF4pgpfKCR+NjoOzgn/miKwNfyuJyG4cyMzHmh8uAACWPReDUF83iRMRUX1YPojILuRpKjF9yykAwEuPhOPJ6GCJExFRQ1g+iMjm6Q0CUzal4WZZFaKCvfBfQ6KkjkREjWD5ICKb9/H+LBy+cBNuLgqsHhMHlbNC6khE1AiWDyKyaUcv3MSKvecAAB8Mi0Z7fw+JExHR/bB8EJHNKiyrwuRNaTAIYER8azz7UBupIxFRE7B8EJFNEkJg5lenkKupRDt/d3wwLFrqSETURCwfRGSTPj90Efsy8uHiJMfqxHi4K52kjkRETcTyQUQ25/TVIizdnQEAmDMkCl1CvCRORETNwfJBRDZFU6lD0oZU6PQCg6OD8OIj4VJHIqJmYvkgIpshhMDsbWdwpbAcrb1dseTZGMhkMqljEVEzsXwQkc3YdCwbO09fh5NchlVj4qB2dZY6EhG1AMsHEdmEzNwSzN+RDgCYMagT4sN8JE5ERC3F8kFEVq+iSo+kDSehrTagX6Q/xj/WTupIRPQAWD6IyOrN35GO8/mlCPBUYvnzsZDLOc6DyJaxfBCRVfs2LQebj2dDJgNWjOqOVh5KqSMR0QNi+SAiq3WpoAz/9c1ZAMDEAR3Qp0MriRMRkSmwfBCRVdJW6zFxYypKtdXo2dYXkwZ2lDoSEZkIywcRWaWluzJxJqcYPm7OWJnYHU4K/rkishf8bSYiq7P31zx88dNFAEDKyFgEq10lTkREpsTyQURW5VpRBWZ8fQoAMO7RCAyMCpQ4ERGZGssHEVmNar0Bkzeloqhch5g2arz7ZGepIxGRGbB8EJHVWLnvPI5dugUPpRNWJcbBxYl/oojsEX+zicgq/JRVgNX7swAAySO6IdzPXeJERGQuLB9EJLkbJVpM2ZwGIYDEnqEYGhsidSQiMiOWDyKSlMEgMG1LGm6UaBEZ6IG5T3WVOhIRmRnLBxFJas3BC/jxfAFUznJ8PCYeri4KqSMRkZk5SR2AiBxTtd6A5d+fw/8e+B0AsODprugY6ClxKiKyBJYPIrK4a0UVmLQxFccv3wJw+3oezz8cKnEqIrIUlg8isqh/Z+Rh2pZTKCrXwVPphORnu+GpGA4wJXIkZhvz8fHHH6Nt27ZQqVTo1asXfvnlF3OtiohsgE5vwOJ//obX1h1HUbkO3VqrsXPSoyweRA7ILOVj8+bNmDZtGubNm4eTJ08iNjYWgwYNQn5+vjlWR0RWLruwHCM/PYzPDl4AALzSpy2+fqs3r+VB5KBkQghh6hft1asXevTogdWrVwMADAYDQkNDMXHiRLz33nuNLqvRaKBWq1FcXAwvLy9TRyMiC9uTnouZX52CprIaXionfDgyFoO6Bkkdi4hMrDnv3yYf81FVVYUTJ05g1qxZxmlyuRwJCQk4fPhwnfm1Wi20Wq3xa41GY+pIRGRBmkodrtwsx+Wb5TiUdQMbf8kGAHQP9caqxDiE+rpJnJCIpGby8lFQUAC9Xo/AwNp3ogwMDERGRkad+ZOTk7FgwQJTxyAiMzEYBHI1lbhSWH67ZBSW4UphBa7cLMPlwnIUlevqLPP6YxGYOagz79VCRACs4NMus2bNwrRp04xfazQahIbyI3dEUqrU6ZFdePvoxZXC24/LN8twpbAc2bcqUFVtaHT5Vh4uCPV1Q7ivG0bEt0G/SH8LJSciW2Dy8tGqVSsoFArk5eXVmp6Xl4egoLrneZVKJZRKpaljEFEjhBC4Va4zForbRzBu//dKYTlyNZWNLu8klyHE2xXhfm4I83Uz/jfM1x1hfm7wUEr+7xoismIm/wvh4uKChx56CPv27cPw4cMB3B5wum/fPiQlJZl6dUTUgGq9AdeLK41HLy4Xlt05mnGzHCXa6kaX91A63SkWNSXD1x3hfm4IVqvgpOApFCJqGbP882TatGkYO3YsHn74YfTs2RMrVqxAWVkZXn31VXOsjshhlWmr/zglUo4rhWV3/X85cm5VoNrQ+IfZgrxUdxWLu0qGnzt83Jwhk8kstCVE5EjMUj5GjRqFGzduYO7cucjNzUX37t2xe/fuOoNQiahxQgjcKNEaT4nc/m+ZcRxGQWlVo8u7OMkR6uOKcD/3P06L3D6SEe7nhjY+blA58yZuRGR5ZrnOx4PgdT7I0Wir9ci5VVFrzMXdRzIqdY0P7vRxc0aYsVy4IvyPcRfhfm4I9FRBLufRCyIyP0mv80FEdRWX6/74SOqdMRc1Ry+uFVegsX8CyGW4a3Cne+0Bnn5u8FI5W25DiIhMgOWDyAT0f1z74vLNsjtHL+46klFcUffaF3dzdVbc9YmRmkGe7gj3dUOItyuvj0FEdoXlg6iJKqr0yL51+8jF5bvGXVy5WY6rtypQpb/ftS+Ut8db+LrdvgbGH6dGQn3d4O+h5OBOInIYLB9EfxBCoLCs6s7gTuOpkTJcvlmO/BJto8s7K2Ro4+NmvLhWTbGoOaLh5sJfNyIigOWDHEy13oCcoopaH0mt+RRJdmE5Su9z7QtPldMfRy/c7xy9+ONIRoi3KxQc3ElEdF8sH2R3SrXVDY69yCmqgL6Ra1/IZECwl+qu0yLutY5kqF157QsiogfF8kE2x2AQyC/R1rrfyN1HMgrLGr/2hdJJbhzUeadY3C4ZbXxcee0LIiIzY/kgq6St1iO7sOL2tS7uue/IlcJyaO9zYzM/d5c6p0VqLrQV4KnktS+IiCTE8kGSEEKgqFx312mR2pcGz9VUNnrtC4VchhBv1Z0Lavneue5FmK8bPHntCyIiq8XyQWajNwhc+2NwZ333HympbHxwp5uL4p4Larkbx16EeLvCmTc2IyKySSwf9EDKq+7c2Kzmjqk1RzKuNuHGZgGeSuMRi5o7ptacLvFzd+HgTiIiO8TyQY0SQqCgtKr2HVPv+hTJjSZc+yLUx814aqRm7EW4nxtCfdzg6sLBnUREjoblg1BVbcC1oop6x15cKSxHeZW+0eXVrs73fHLkTskI8lLx2hdERFQLy4eD0FTq6r1j6uWb5bhWVIHGzo7IZECI2rX2x1P/OE0S5usGtRsHdxIRUdOxfNgJg0Egr6SyzmmRmiMZt8obv7GZylle+6qddx3JaOPjxhubERGRybB82JBKnR7Z91xQq+ZCW9m3KlB1n2tftPJw+ePoRe2rdob5usHfkzc2IyIiy2D5sCJCCNwq1925auc9F9fK1VQ2uryTXIbWPq61b8t+1ydIPJTc3UREJD2+G1lYtd6A68WVf3wktazWOIzswnKU3OfGZh5Kp3uufXFn7EWItwpOvPYFERFZOZYPMyjTVtc7sPNKYTlymnDtiyAv1V3F4s5VO8P93OHjxhubERGRbWP5aAEhBG4Yb2x253bsNadLCkobv7GZi5McoT6uxnuN3H0kI9TXjTc2IyIiu8by0QBttR45typqjbmoOTVypbAcFbrGr33h4+Zc65LgxiMZfm4I9FTxxmZEROSwHLp8FBtvbFZW6/LgVwrLca24otEbm8llQIi3652xF38M7KwpGV68sRkREVG9HKZ8XCuqwPojl+86RVKO4orGr31Rc2Oz+m5uFuLtymtfEBERtYDDlI/yqmr874Hf60z391TWOS1ScySjlQdvbEZERGRqDlM+2vi44aVHwo3l4vaFtlzh5uIw3wIiIiKr4DDvvCpnBT4YHi11DCIiIofHQQtERERkUSwfREREZFEsH0RERGRRLB9ERERkUSwfREREZFEsH0RERGRRLB9ERERkUSwfREREZFEsH0RERGRRLB9ERERkUSwfREREZFEsH0RERGRRLB9ERERkUVZ3V1shBABAo9FInISIiIiaquZ9u+Z9vDFWVz5KSkoAAKGhoRInISIiouYqKSmBWq1udB6ZaEpFsSCDwYBr167B09MTMpnMpK+t0WgQGhqK7OxseHl5mfS1rYG9bx9g/9vI7bN99r6N3D7bZ65tFEKgpKQEISEhkMsbH9VhdUc+5HI52rRpY9Z1eHl52e0PFWD/2wfY/zZy+2yfvW8jt8/2mWMb73fEowYHnBIREZFFsXwQERGRRTlU+VAqlZg3bx6USqXUUczC3rcPsP9t5PbZPnvfRm6f7bOGbbS6AadERERk3xzqyAcRERFJj+WDiIiILIrlg4iIiCyK5YOIiIgsiuWDiIiILMquyseiRYvQp08fuLm5wdvbu955rly5giFDhsDNzQ0BAQGYOXMmqqurG33dwsJCvPDCC/Dy8oK3tzfGjRuH0tJSM2xB8xw4cAAymazex7Fjxxpc7vHHH68z/5tvvmnB5E3Xtm3bOlmXLFnS6DKVlZWYMGEC/Pz84OHhgWeffRZ5eXkWStw8ly5dwrhx4xAREQFXV1e0b98e8+bNQ1VVVaPLWfM+/Pjjj9G2bVuoVCr06tULv/zyS6Pzf/XVV+jcuTNUKhW6deuGf/7znxZK2nzJycno0aMHPD09ERAQgOHDhyMzM7PRZdatW1dnX6lUKgslbp758+fXydq5c+dGl7Gl/QfU/zdFJpNhwoQJ9c5v7fvv4MGDGDp0KEJCQiCTybB9+/ZazwshMHfuXAQHB8PV1RUJCQk4f/78fV+3ub/HzWVX5aOqqgojR47EW2+9Ve/zer0eQ4YMQVVVFX7++Wd8+eWXWLduHebOndvo677wwgtIT0/H999/j507d+LgwYMYP368OTahWfr06YPr16/Xevznf/4nIiIi8PDDDze67Ouvv15ruWXLllkodfO9//77tbJOnDix0fmnTp2K//u//8NXX32FH374AdeuXcOIESMslLZ5MjIyYDAYsGbNGqSnp+PPf/4zPv30U8yePfu+y1rjPty8eTOmTZuGefPm4eTJk4iNjcWgQYOQn59f7/w///wzEhMTMW7cOKSmpmL48OEYPnw4zp49a+HkTfPDDz9gwoQJOHLkCL7//nvodDo88cQTKCsra3Q5Ly+vWvvq8uXLFkrcfF27dq2V9dChQw3Oa2v7DwCOHTtWa/u+//57AMDIkSMbXMaa919ZWRliY2Px8ccf1/v8smXL8NFHH+HTTz/F0aNH4e7ujkGDBqGysrLB12zu73GLCDu0du1aoVar60z/5z//KeRyucjNzTVO++STT4SXl5fQarX1vtavv/4qAIhjx44Zp+3atUvIZDKRk5Nj8uwPoqqqSvj7+4v333+/0fn69+8vJk+ebJlQDyg8PFz8+c9/bvL8RUVFwtnZWXz11VfGab/99psAIA4fPmyGhKa3bNkyERER0eg81roPe/bsKSZMmGD8Wq/Xi5CQEJGcnFzv/M8//7wYMmRIrWm9evUSb7zxhllzmkp+fr4AIH744YcG52no75E1mjdvnoiNjW3y/La+/4QQYvLkyaJ9+/bCYDDU+7wt7T8A4ptvvjF+bTAYRFBQkPjwww+N04qKioRSqRQbN25s8HWa+3vcEnZ15ON+Dh8+jG7duiEwMNA4bdCgQdBoNEhPT29wGW9v71pHEhISEiCXy3H06FGzZ26OHTt24ObNm3j11VfvO+8//vEPtGrVCtHR0Zg1axbKy8stkLBllixZAj8/P8TFxeHDDz9s9DTZiRMnoNPpkJCQYJzWuXNnhIWF4fDhw5aI+8CKi4vh6+t73/msbR9WVVXhxIkTtb73crkcCQkJDX7vDx8+XGt+4PbvpC3tKwD33V+lpaUIDw9HaGgohg0b1uDfG2tw/vx5hISEoF27dnjhhRdw5cqVBue19f1XVVWF9evX47XXXmv0Luq2tP/udvHiReTm5tbaR2q1Gr169WpwH7Xk97glrO6utuaUm5tbq3gAMH6dm5vb4DIBAQG1pjk5OcHX17fBZaTy+eefY9CgQfe9K/CYMWMQHh6OkJAQnD59Gu+++y4yMzOxbds2CyVtukmTJiE+Ph6+vr74+eefMWvWLFy/fh3Lly+vd/7c3Fy4uLjUGfMTGBhodfurPllZWVi1ahVSUlIanc8a92FBQQH0en29v2MZGRn1LtPQ76Qt7CuDwYApU6agb9++iI6ObnC+Tp064YsvvkBMTAyKi4uRkpKCPn36ID093ex38G6uXr16Yd26dejUqROuX7+OBQsW4LHHHsPZs2fh6elZZ35b3n8AsH37dhQVFeGVV15pcB5b2n/3qtkPzdlHLfk9bgmrLx/vvfceli5d2ug8v/32230HRdmSlmzz1atXsWfPHmzZsuW+r3/3eJVu3bohODgYAwcOxO+//4727du3PHgTNWf7pk2bZpwWExMDFxcXvPHGG0hOTrbqey+0ZB/m5OTgySefxMiRI/H66683uqzU+5CACRMm4OzZs42OiQCA3r17o3fv3sav+/Tpg6ioKKxZswYffPCBuWM2y+DBg43/HxMTg169eiE8PBxbtmzBuHHjJExmHp9//jkGDx6MkJCQBuexpf1nS6y+fEyfPr3RVgoA7dq1a9JrBQUF1RmxW/MpiKCgoAaXuXeQTXV1NQoLCxtc5kG1ZJvXrl0LPz8/PP30081eX69evQDc/le3Jd64HmSf9urVC9XV1bh06RI6depU5/mgoCBUVVWhqKio1tGPvLw8s+2v+jR3G69du4YBAwagT58++Oyzz5q9Pkvvw/q0atUKCoWizieLGvveBwUFNWt+a5GUlGQcfN7cf/06OzsjLi4OWVlZZkpnOt7e3oiMjGwwq63uPwC4fPky9u7d2+yjhba0/2r2Q15eHoKDg43T8/Ly0L1793qXacnvcYuYbPSIFbnfgNO8vDzjtDVr1ggvLy9RWVlZ72vVDDg9fvy4cdqePXusasCpwWAQERERYvr06S1a/tChQwKAOHXqlImTmd769euFXC4XhYWF9T5fM+D066+/Nk7LyMiw6gGnV69eFR07dhSjR48W1dXVLXoNa9mHPXv2FElJScav9Xq9aN26daMDTp966qla03r37m21AxYNBoOYMGGCCAkJEefOnWvRa1RXV4tOnTqJqVOnmjid6ZWUlAgfHx+xcuXKep+3tf13t3nz5omgoCCh0+matZw17z80MOA0JSXFOK24uLhJA06b83vcoqwmeyUrcPnyZZGamioWLFggPDw8RGpqqkhNTRUlJSVCiNs/NNHR0eKJJ54QaWlpYvfu3cLf31/MmjXL+BpHjx4VnTp1ElevXjVOe/LJJ0VcXJw4evSoOHTokOjYsaNITEy0+PY1ZO/evQKA+O233+o8d/XqVdGpUydx9OhRIYQQWVlZ4v333xfHjx8XFy9eFN9++61o166d6Nevn6Vj39fPP/8s/vznP4u0tDTx+++/i/Xr1wt/f3/x8ssvG+e5d/uEEOLNN98UYWFh4t///rc4fvy46N27t+jdu7cUm3BfV69eFR06dBADBw4UV69eFdevXzc+7p7HVvbhpk2bhFKpFOvWrRO//vqrGD9+vPD29jZ+wuyll14S7733nnH+n376STg5OYmUlBTx22+/iXnz5glnZ2dx5swZqTahUW+99ZZQq9XiwIEDtfZVeXm5cZ57t3HBggViz5494vfffxcnTpwQo0ePFiqVSqSnp0uxCY2aPn26OHDggLh48aL46aefREJCgmjVqpXIz88XQtj+/quh1+tFWFiYePfdd+s8Z2v7r6SkxPheB0AsX75cpKamisuXLwshhFiyZInw9vYW3377rTh9+rQYNmyYiIiIEBUVFcbX+I//+A+xatUq49f3+z02BbsqH2PHjhUA6jz2799vnOfSpUti8ODBwtXVVbRq1UpMnz69VvPdv3+/ACAuXrxonHbz5k2RmJgoPDw8hJeXl3j11VeNhcYaJCYmij59+tT73MWLF2t9D65cuSL69esnfH19hVKpFB06dBAzZ84UxcXFFkzcNCdOnBC9evUSarVaqFQqERUVJRYvXlzrKNW92yeEEBUVFeLtt98WPj4+ws3NTTzzzDO13sytydq1a+v9mb37oKSt7cNVq1aJsLAw4eLiInr27CmOHDlifK5///5i7NixtebfsmWLiIyMFC4uLqJr167iu+++s3DipmtoX61du9Y4z73bOGXKFOP3IzAwUPzpT38SJ0+etHz4Jhg1apQIDg4WLi4uonXr1mLUqFEiKyvL+Lyt778ae/bsEQBEZmZmnedsbf/VvGfd+6jZBoPBIObMmSMCAwOFUqkUAwcOrLPd4eHhYt68ebWmNfZ7bAoyIYQw3UkcIiIiosY51HU+iIiISHosH0RERGRRLB9ERERkUSwfREREZFEsH0RERGRRLB9ERERkUSwfREREZFEsH0RERGRRLB9ERERkUSwfREREZFEsH0RERGRR/x8xDovDPHPMcQAAAABJRU5ErkJggg==\n" 142 | }, 143 | "metadata": {} 144 | } 145 | ] 146 | }, 147 | { 148 | "cell_type": "code", 149 | "source": [ 150 | "def prelu(x,alpha):\n", 151 | " return np.maximum(x,alpha*x)\n", 152 | "\n", 153 | "x=np.linspace(-10,10)\n", 154 | "y=prelu(x,0.5)\n", 155 | "print(y)\n", 156 | "plt.title(\"PR ReLU (alpha=.5)\")\n", 157 | "plt.plot(x,y)" 158 | ], 159 | "metadata": { 160 | "colab": { 161 | "base_uri": "https://localhost:8080/", 162 | "height": 626 163 | }, 164 | "id": "Up17j6XahP-0", 165 | "outputId": "c9985bd6-4ce7-44db-ee3d-95481f6d5246" 166 | }, 167 | "execution_count": 9, 168 | "outputs": [ 169 | { 170 | "output_type": "stream", 171 | "name": "stdout", 172 | "text": [ 173 | "[-5. -4.79591837 -4.59183673 -4.3877551 -4.18367347 -3.97959184\n", 174 | " -3.7755102 -3.57142857 -3.36734694 -3.16326531 -2.95918367 -2.75510204\n", 175 | " -2.55102041 -2.34693878 -2.14285714 -1.93877551 -1.73469388 -1.53061224\n", 176 | " -1.32653061 -1.12244898 -0.91836735 -0.71428571 -0.51020408 -0.30612245\n", 177 | " -0.10204082 0.20408163 0.6122449 1.02040816 1.42857143 1.83673469\n", 178 | " 2.24489796 2.65306122 3.06122449 3.46938776 3.87755102 4.28571429\n", 179 | " 4.69387755 5.10204082 5.51020408 5.91836735 6.32653061 6.73469388\n", 180 | " 7.14285714 7.55102041 7.95918367 8.36734694 8.7755102 9.18367347\n", 181 | " 9.59183673 10. ]\n" 182 | ] 183 | }, 184 | { 185 | "output_type": "execute_result", 186 | "data": { 187 | "text/plain": [ 188 | "[]" 189 | ] 190 | }, 191 | "metadata": {}, 192 | "execution_count": 9 193 | }, 194 | { 195 | "output_type": "display_data", 196 | "data": { 197 | "text/plain": [ 198 | "
" 199 | ], 200 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAiIAAAGzCAYAAAASZnxRAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAABJYUlEQVR4nO3deVhU9eLH8Q+LLAqMgOz7priviPsGZaallZpbuVvmWt1K7/2ZV1vU8ppWZmoKmltqV9vrupsKiPuSC6uyCCjIDIsMMPP9/TE4SgICMpyZ4fN6nnme5sxZvscjzrtzzgwmQggBIiIiIgmYSj0AIiIiargYIkRERCQZhggRERFJhiFCREREkmGIEBERkWQYIkRERCQZhggRERFJhiFCREREkmGIEBERkWQYIkRUbz755BMEBwdDrVbXeFlfX19MmDChVtv19fXFkCFDarWsPpk3bx5CQ0OlHgZRnWKIED2ByMhImJiYaB9WVlZo3rw5Zs6ciczMTO18hw8fLjefmZkZnJ2dMXz4cFy5cqVW2zI3N4eHhwcmTJiAtLS0Wo3//rh2795d6TwmJiaYOXNmha/t3r0bJiYmOHz48GO3pVAosGzZMrz33nswNeU/PQ/z9fUtd2zvP15//fVy882dOxfnz5/Hjz/+KNFIieqeudQDIDIGixcvhp+fH4qKinDs2DGsWbMGv/76Ky5duoTGjRtr55s9ezZCQkJQUlKCCxcu4Ouvv8bhw4dx6dIluLq61nhb0dHRiIyMxLFjx3Dp0iVYWVnpahef2MaNG1FaWorRo0dLPRS91KFDB7z99tvlpjVv3rzcc1dXVwwdOhTLly/H888/X5/DI9IZhghRHRg0aBC6dOkCAJgyZQocHR2xYsUK/PDDD+XeeHv37o3hw4drn7do0QLTp0/H5s2b8e6779ZqW82aNcOyZcvw448/YuTIkXW4V3UrIiICzz//vF7HkpQ8PDwwbty4x843cuRIjBgxAomJifD396+HkRHpFs+PEunAgAEDAABJSUlVzte7d28AQEJCQq23Vdk6rl69iuHDh8PBwQFWVlbo0qWLZKf0k5KScOHCBYSHhz/y2vLly9GjRw84OjrC2toanTt3rvJS0X33L1UdPXoUr732GhwdHWFnZ4dXX30Vd+/erXCZY8eOoWvXrrCysoK/vz82b95c7vWcnBz84x//QNu2bWFjYwM7OzsMGjQI58+fr92O11BxcTEKCgqqnOf+n+EPP/xQH0Mi0jmGCJEO3I8CR0fHKudLTk4GANjb29d6WxWt4/Lly+jWrRuuXLmCefPm4T//+Q+aNGmCYcOGYc+ePbXeVm2dOHECANCpU6dHXlu1ahU6duyIxYsX4+OPP4a5uTlGjBiBX375pVrrnjlzJq5cuYJ///vfePXVV7F161YMGzYMQohy88XHx2P48OF46qmn8J///Af29vaYMGECLl++rJ0nMTERe/fuxZAhQ7BixQq88847uHjxIvr27Yv09PRy67tz5061Hkqlslr7cfDgQTRu3Bg2Njbw9fXFqlWrKpxPJpMhICAAx48fr9Z6ifQdL80Q1QG5XI47d+6gqKgIx48fx+LFi2Ftbf3IJzXy8vJw584d7T0ic+fOhYmJCV566aVabSsmJgaLFi2CpaVluW3NmTMH3t7eiI2NhaWlJQDgjTfeQK9evfDee+/hhRdeqJsdr6arV68CAPz8/B557fr167C2ttY+nzlzJjp16oQVK1Zg8ODBj123hYUFDhw4gEaNGgEAfHx88O677+Knn34qdx/FtWvXcPToUe0ZpJEjR8LLywsRERFYvnw5AKBt27a4fv16uZtpX3nlFQQHB2PDhg1YsGCBdrqTk1O19j0iIuKxn/Zp164devXqhRYtWiA7OxuRkZGYO3cu0tPTsWzZskfm9/f3x19//VWt7RPpO4YIUR34+yUHHx8fbN26FR4eHuWmT5o0qdxzJycnfPvttwgJCan1tnx9fbFlyxZ4enoC0FxeOHjwIBYvXoy8vDzk5eVp5x04cCAWLlyItLS0R8amS9nZ2TA3N4eNjc0jrz0cIXfv3oVKpULv3r2xffv2aq172rRp2ggBgOnTp+Of//wnfv3113Ih0qpVK22EAJo/+xYtWiAxMVE77X60AYBKpUJubi5sbGzQokULnDlzptx29+3bV63xtW7d+rHz/P2S2cSJEzFo0CCsWLECs2bN0h7b++zt7XH27NlqbZ9I3zFEiOrA6tWr0bx5c5ibm8PFxQUtWrSo8COq77//Pnr37o38/Hzs2bMHO3bsqPFHWe9vSy6XY+PGjTh69Gi5N9D4+HgIIbBgwYJy/wf/sKysrDoNERMTk1ov+/PPP+PDDz/EuXPnyl3GqO46g4KCyj23sbGBm5ub9pLVfd7e3o8sa29vX+5+ErVajVWrVuGrr75CUlISVCqV9rW/X2ar6H6XumJiYoI333wTf/zxBw4fPvzITaxCiCf6MyfSJwwRojrQtWtX7SdZqtK2bVvtG9iwYcNQWFiIqVOnolevXvDy8qrxtoYNG4ZevXphzJgxuHbtGmxsbLRfFvaPf/wDAwcOrHAdgYGB1doWoDlLcO/evQpfKywsBIDHfhLG0dERpaWlyMvLg62trXb6n3/+ieeffx59+vTBV199BTc3NzRq1AgRERHYtm1btcdYHWZmZhVOf/heko8//hgLFizApEmT8MEHH8DBwQGmpqaYO3fuI1/ClpGRUa3tymSycmd9quv+34ecnJxHXrt79y6aNWtW43US6SOGCJGEli5dij179uCjjz7C119/XePlzczMsGTJEvTv3x9ffvkl5s2bp/1IZ6NGjerk/9p9fHxw7dq1Cl+7P93Hx6fKdQQHBwPQfHqmXbt22unff/89rKys8Mcff5Q7qxMREVHt8cXFxaF///7a5/n5+bh16xaeffbZaq/jvt27d6N///7YsGFDuem5ubmPvPG7ublVa53VuUekIvcvGVV0L0pSUhLat29f43US6SN+aoZIQgEBAXjppZcQGRlZ7f/D/rt+/fqha9euWLlyJYqKiuDs7Ix+/fph7dq1uHXr1iPz3759u0brf/bZZxEdHY3Tp0+Xm56bm4utW7eiQ4cOj/0ytu7duwMATp06VW66mZkZTExMyl0CSU5Oxt69e6s9vnXr1qGkpET7fM2aNSgtLcWgQYOqvY6Hx/P3T9vs2rWrwm+u3bdvX7UeD5+VKikpwdWrV8sdl5ycnHL7f3++pUuXwsLColxkAZqblRMSEtCjR48a7x+RPuIZESKJvfPOO9i5cydWrlyJpUuX1nodI0aMQGRkJF5//XWsXr0avXr1Qtu2bTF16lT4+/sjMzMTUVFRSE1NfeR7Mb7//nvtJ1seNn78eMybNw+7du1Cnz598NprryE4OBjp6emIjIzErVu3qnX2wt/fH23atMH+/fvL3bA7ePBgrFixAs888wzGjBmDrKwsrF69GoGBgbhw4UK19r24uBhhYWEYOXIkrl27hq+++gq9evWq1TePDhkyBIsXL8bEiRPRo0cPXLx4EVu3bq3wi8Nqc7YpLS0NLVu2xPjx4xEZGQlAc6Pqhx9+iOHDh8PPzw85OTnYtm0bLl26hI8//viRyNu/fz+EEBg6dGiNt0+klwQR1VpERIQAIGJjY6uc79ChQwKA2LVrV4Wv9+vXT9jZ2Ync3NxabUulUomAgAAREBAgSktLhRBCJCQkiFdffVW4urqKRo0aCQ8PDzFkyBCxe/fuR8ZV2ePPP/8UQgiRmpoqpkyZIjw8PIS5ublwcHAQQ4YMEdHR0Y/9M7pvxYoVwsbGRhQWFpabvmHDBhEUFCQsLS1FcHCwiIiIEAsXLhR//+fJx8dHjB8//pE/jyNHjohp06YJe3t7YWNjI8aOHSuys7MfWXbw4MGPjKlv376ib9++2udFRUXi7bffFm5ubsLa2lr07NlTREVFPTJfbSUlJQkA5fbj1KlT4rnnnhMeHh7CwsJC2NjYiF69eomdO3dWuI6XX35Z9OrV64nHQqQvTIT423lIIiIdkMvl8Pf3xyeffILJkyc/8foiIyMxceJExMbGVutGYWOQkZEBPz8/7Nixg2dEyGjwHhEiqhcymQzvvvsuPv3000c+gULVs3LlSrRt25YRQkaFZ0SIyCA1xDMiRMaIZ0SIiIhIMjwjQkRERJLhGREiIiKSDEOEiIiIJKPXX2imVquRnp4OW1tb/oInIiIiAyGEQF5eHtzd3R/7iz31OkTS09Or/YvAiIiISL+kpKTA09Ozynn0OkTu/5bOlJQU2NnZSTwaIiIiqg6FQgEvL69yv227MnodIvcvx9jZ2TFEiIiIDEx1bqvgzapEREQkGYYIERERSYYhQkRERJJhiBAREZFkGCJEREQkGYYIERERSYYhQkRERJJhiBAREZFkGCJEREQkGYYIERERSabWIXL06FE899xzcHd3h4mJCfbu3VvudSEE3n//fbi5ucHa2hrh4eGIi4t70vESERGREal1iBQUFKB9+/ZYvXp1ha9/8skn+Pzzz/H1118jJiYGTZo0wcCBA1FUVFTrwRIREZFxqfUvvRs0aBAGDRpU4WtCCKxcuRL/93//h6FDhwIANm/eDBcXF+zduxejRo2qcDmlUgmlUql9rlAoajs8IiIiqkKmoghzdpzFxy+0hb+TjWTj0Mk9IklJScjIyEB4eLh2mkwmQ2hoKKKioipdbsmSJZDJZNqHl5eXLoZHRETUoGXIizBqXTSiE3Pwj13nIYSQbCw6CZGMjAwAgIuLS7npLi4u2tcqMn/+fMjlcu0jJSVFF8MjIiJqsG7J72HUuigk3SmAR1NrrBrVESYmJpKNp9aXZnTB0tISlpaWUg+DiIjIKKXn3sPo9dG4kV0IT3trbJ/aDV4OjSUdk07OiLi6ugIAMjMzy03PzMzUvkZERET1Jz33Hkat00SIl4M1dkyTPkIAHYWIn58fXF1dceDAAe00hUKBmJgYdO/eXRebJCIiokqklUXIzZxCeDs0xo5p3eFpL32EAE9waSY/Px/x8fHa50lJSTh37hwcHBzg7e2NuXPn4sMPP0RQUBD8/PywYMECuLu7Y9iwYXUxbiIiIqqG1LuFGL0+Gik598oipBvcm1pLPSytWofIqVOn0L9/f+3zt956CwAwfvx4REZG4t1330VBQQGmTZuG3Nxc9OrVC7///jusrKyefNRERET0WCk5mghJvXsPPo6aCHGT6U+EAICJkPIzO4+hUCggk8kgl8thZ2cn9XCIiIgMRkpOIUati0Za7j34Omoux7jK6udkQE3ev/XqUzNERET05B6OEL9mTbB9ard6i5CaYogQEREZkZvZmssxabn34N+sCbZP6wYXO/2MEIAhQkREZDRuZhdi1LoopMuL4O/UBDumdoOzHkcIwBAhIiIyCjeyCzBqXTRuyYsQ4KQ5E+Jsq98RAjBEiIiIDF7yHU2EZCiKEOhsg21TQw0iQgCGCBERkUFLulOAUeuikKlQIsjZBtumdoOTreH8uhSGCBERkYFKvJ2P0eujkalQormLDbZOMawIARgiREREBinhdj5Gr4tGVp4SLVxssXVqKJrZGFaEAAwRIiIigxOfpTkTcjtPiWBXW2ydEgpHA4wQgCFCRERkUOKz8jBqXQzu5Bt+hAAMESIiIoMRl5mH0es1EdLSzQ5bp4TCoYmF1MN6IqZSD4CIiIge73pmHkavj8adfCVaudlhmxFECMAzIkRERHrvWkYexqyPRnZBMVq722HL5FDYG0GEAAwRIiIivXY1Q4Gx62OQXVCMNh6aCGna2DgiBGCIEBER6a0rtxQY+00McgqK0dZDhi2TQyFr3EjqYdUphggREZEe+itdgbHfRONuYQnaecrw7STjixCAN6sSERHpncvpcm2EtPeU4VsjPBNyH8+IEBER6ZFLaXKM2xCD3MIStPdqim8nd4WdlXFGCMAQISIi0huX0uQY+00M5PdK0MGrKTYbeYQAvDRDRESkFy6mPoiQTt7GfybkPp4RISIiktiF1FyM+yYGiqJSdPaxR+TEENg2gAgBGCJERESSOp+Si3EbYpBXFiGbJnWFjWXDeXvmpRkiIiKJnHsoQkJ8G16EADwjQkREJImzN+/i1Q0nkacsRVdfB2ycGNLgIgRgiBAREdW7M2URkq8sRVc/B0RMCEGTBhghAEOEiIioXp2+cRfjN2oiJNTPARETQ9DYouG+HTfcPSciIqpnp2/k4NUNJ1FQrEI3fwdsnNCwIwRgiBAREdWL2OQcTNioiZDu/o7YOCEE1hZmUg9LcgwRIiIiHTuZlIMJESdRWKxCjwBHbBjPCLmPIUJERKRDMYnZmBgZi8JiFXoFNsP6V7swQh7CECEiItKR6MRsTIyIxb0SFXoHaSLEqhEj5GH8QjMiIiIdiEpghFQHz4gQERHVsRPxdzBpUyyKStTo29wJa1/pzAiphM7OiKhUKixYsAB+fn6wtrZGQEAAPvjgAwghdLVJIiIiyR1/KEL6tWCEPI7OzogsW7YMa9aswaZNm9C6dWucOnUKEydOhEwmw+zZs3W1WSIiIskci7uDyZtioSxVo38LJ6wZxwh5HJ2FyIkTJzB06FAMHjwYAODr64vt27fj5MmTutokERGRZP6Mu40pm05BWarGgGBnrBnXCZbmjJDH0dmlmR49euDAgQO4fv06AOD8+fM4duwYBg0aVOkySqUSCoWi3IOIiEjfHbl+G5PLIiS8JSOkJnR2RmTevHlQKBQIDg6GmZkZVCoVPvroI4wdO7bSZZYsWYJFixbpakhERER17sj125i6+RSKS9V4qpULVo/pBAtzfii1unT2J7Vz505s3boV27Ztw5kzZ7Bp0yYsX74cmzZtqnSZ+fPnQy6Xax8pKSm6Gh4REdETO3QtSxshTzNCasVE6OhjLF5eXpg3bx5mzJihnfbhhx9iy5YtuHr1arXWoVAoIJPJIJfLYWdnp4thEhER1cqhq1l47dvTKFapMbC1C74c0wmNzBghQM3ev3X2J1ZYWAhT0/KrNzMzg1qt1tUmiYiI6sXBq5naCBnUxpUR8gR0do/Ic889h48++gje3t5o3bo1zp49ixUrVmDSpEm62iQREZHO7f8rE9O3nkaJSuDZtq5YNaojI+QJ6OzSTF5eHhYsWIA9e/YgKysL7u7uGD16NN5//31YWFhUax28NENERPpk31+ZeKMsQga3dcPKUR0YIRWoyfu3zkKkLjBEiIhIX/zvcgZmbDuDEpXAkHZuWPlyB5gzQipUk/dv/q4ZIiKix/jjcgZmbD2DUrXAc+3d8dnI9oyQOsI/RSIioir8fulBhDzPCKlzPCNCRERUid8u3sKs7WdRqhYY2sEd/xnBCKlrDBEiIqIK/FoWISq1wAsdPbB8RHuYmZpIPSyjw6wjIiL6m18uPIiQFxkhOsUzIkRERA/56Xw65n53ThMhnTzw6XBGiC4xRIiIiMr8eD4dc3echVoAwzt7YtlL7RghOsYQISIiAvDDuTS8+d05qAUwoixCTBkhOsd7RIiIqMHbe/ZBhLzcxYsRUo8YIkRE1KDtOZuKt3ZqImRUiBeWvNiWEVKPeGmGiIgarO9Pp+Ifu89DCGB0V298NKwNI6Se8YwIERE1SLsfipAxoYwQqTBEiIiowdl5KgXvlEXIuG7e+HAoI0QqvDRDREQNys7YFLz33wsQAnilmw8WD20NExNGiFQYIkRE1GDsOHkT8/57EQAwvrsP/v08I0RqvDRDREQNwnZGiF7iGREiIjJ622Ju4p97NBEysacv3h/SihGiJxgiRERk1LZE38D/7b0EAJjU0w8LhrRkhOgRhggRERmtb6OSseCHywCAKb388K/BjBB9wxAhIiKjtDkqGe+XRci0Pv6YPyiYEaKHGCJERGR0Np1IxsIfNRHyWh9/zGOE6C2GCBERGZWI40lY9NNfAIDX+wbgvWdaMEL0GEOEiIiMxoZjSfjgZ02ETO8XgHcHMkL0HUOEiIiMwjd/JuLDX64AAGb2D8TbTzdnhBgAhggRERm89UcT8dGvmgiZNSAQbz3FCDEUDBEiIjJoa48kYMlvVwEAs8OC8GZ4ECPEgDBEiIjIYH19JAFLyyJkTlgQ3nyqucQjoppiiBARkUH66nA8Pvn9GgBgbngQ5oYzQgwRQ4SIiAzO6kPx+PQPTYS89VRzzA4LknhEVFsMESIiMihfHIjDf/ZdBwC8/VRzzGKEGDSGCBERGYzPD8RhRVmEvDOwBWb0D5R4RPSkGCJERGQQVu6/jpX74wAA7z7TAm/0Y4QYA4YIERHpvc/2XceqA5oIee+ZYEzvFyDxiKiuMESIiEhvCSHw2f44fF4WIfMHBeO1vowQY2Kqy5WnpaVh3LhxcHR0hLW1Ndq2bYtTp07pcpNERGQkhBBYse+6NkL+9WxLRogR0tkZkbt376Jnz57o378/fvvtNzg5OSEuLg729va62iQRERkJIQT+87/r+PJQPADg/wa3xJTe/hKPinRBZyGybNkyeHl5ISIiQjvNz89PV5sjIiIjIYTAp39cw1eHEwAAC4a0wuRefP8wVjq7NPPjjz+iS5cuGDFiBJydndGxY0esX7++ymWUSiUUCkW5BxERNRxCCHzyUIS8zwgxejoLkcTERKxZswZBQUH4448/MH36dMyePRubNm2qdJklS5ZAJpNpH15eXroaHhER6RkhBJb+fhVryiLk38+1wiRGiNEzEUIIXazYwsICXbp0wYkTJ7TTZs+ejdjYWERFRVW4jFKphFKp1D5XKBTw8vKCXC6HnZ2dLoZJRER6QAiBpb9dxdqjiQCARc+3xvgevtIOimpNoVBAJpNV6/1bZ2dE3Nzc0KpVq3LTWrZsiZs3b1a6jKWlJezs7Mo9iIjIuAkh8PGvV7QRsngoI6Qh0dnNqj179sS1a9fKTbt+/Tp8fHx0tUkiIjIwQgh8+MsVbDiWBAD4YFgbvNKN7xMNic7OiLz55puIjo7Gxx9/jPj4eGzbtg3r1q3DjBkzdLVJIiIyIEIIfPDzgwj56AVGSEOksxAJCQnBnj17sH37drRp0wYffPABVq5cibFjx+pqk0REZCCEEFj001/YeFwTIR+/0BZjQxkhDZHOblatCzW52YWIiAyDEAL//vEyNkXdAAAsfbEtRnX1lnhUVJdq8v7N3zVDRET1RgiBhT9exuaoGzAx0UTIyyGMkIaMIUJERPVCrRZ4/8dL2BJ9EyYmwLIX22FkCL8vqqFjiBARkc6p1QILfriErTGaCPnkpXYY0YURQgwRIiLSMbVa4F97L2H7SU2EfDq8PYZ39pR6WKQnGCJERKQzarXAP/dcxI7YFJiYAMuHt8dLjBB6CEOEiIh0Qq0WmP/fi/juVApMTYD/jGyPFzoyQqg8hggREdU5tVrgve8vYNfpVJiaAJ+93AFDO3hIPSzSQzr7QjMiImqYVGqBdxkhVE08I0JERHVGpRZ4d/cFfH8mFWamJlj5cgc8195d6mGRHmOIEBFRnVCpBd7ZdR7/PZsGM1MTrBrVAUPaMUKoagwRIiJ6Yiq1wD92nceesgj5fFRHDG7nJvWwyAAwRIiI6ImUqtR4e9d5/HAuHeamJvhidEcMassIoephiBARUa2VqtR4a+d5/HheEyFfjumIZ9owQqj6GCJERFQrpSo13tx5Hj9pI6QTnmnjKvWwyMAwRIiIqMZKVWrM+e4cfrlwC43MTLB6TCc83ZoRQjXHECEiohopUakxd8c5/HJREyFfje2Mp1q5SD0sMlAMESIiqrYSlRpzdpzFrxczYGFmijXjOiGsJSOEao8hQkRE1VKiUmPWtrP4/bImQr5+pRMGBDNC6MkwRIiI6LGKS9WYtf0M/ricCQszU6x9pTP6BztLPSwyAgwRIiKqUnGpGjO3ncH//sqEhXlZhLRghFDdYIgQEVGlikvVeGPrGey/oomQda90Rj9GCNUhhggREVVIWarCjK1nsP9KFizNTbH+1S7o09xJ6mGRkWGIEBHRI5SlKryx5QwOXNVEyDfju6B3ECOE6h5DhIiIylGWqjB9yxkcLIuQDeND0CuomdTDIiPFECEiIq2iEhWmbzmNQ9duw6qRJkJ6BjJCSHcYIkREBEATIa99expHrmsiZOOEEPQIYISQbjFEiIgIRSUqTPv2NI5evw3rRmbYOCEE3QMcpR4WNQAMESKiBq6oRIWpm0/hz7g7sG5khoiJIejmzwih+sEQISJqwO4VayLkWPwdNLYwQ8SEEIQyQqgeMUSIiBqoe8UqTNkci+Px2WhiYYbISV0R4usg9bCogWGIEBE1QPeKVZi8KRYnEjQRsmlSV3RhhJAEGCJERA1MYXEpJkeeQlRiNmwszbFpUgg6+zBCSBoMESKiBqSwuBSTImMRnZhTFiFd0dnHXuphUQPGECEiaiAKlKWYGBmLk0k5sLU0x6bJXdHJmxFC0jKtrw0tXboUJiYmmDt3bn1tkoiIyhQoSzEx4kGEbGaEkJ6olzMisbGxWLt2Ldq1a1cfmyMioofkK0sxMeIkYpPvwtbKHN9ODkUHr6ZSD4sIQD2cEcnPz8fYsWOxfv162NuzvomI6lO+shQTNj6IkC2MENIzOg+RGTNmYPDgwQgPD3/svEqlEgqFotyDiIhqJ6+oBOM3nsSpG3dhZ2WOrVNC0Z4RQnpGp5dmduzYgTNnziA2NrZa8y9ZsgSLFi3S5ZCIiBqE+xFy5mYuZNaNsGVyKNp6yqQeFtEjdHZGJCUlBXPmzMHWrVthZWVVrWXmz58PuVyufaSkpOhqeERERktRVIJXH4qQrVMYIaS/TIQQQhcr3rt3L1544QWYmZlpp6lUKpiYmMDU1BRKpbLcaxVRKBSQyWSQy+Wws7PTxTCJiIyKoqgEr244iXMpuWjaWHMmpI0HI4TqV03ev3V2aSYsLAwXL14sN23ixIkIDg7Ge++999gIISKimpHf05wJOc8IIQOisxCxtbVFmzZtyk1r0qQJHB0dH5lORERPRl5Yglc2xuBCqhz2jRth65RuaOXOM8mk//jNqkREBk5eWIJxG2JwMU0OhyYW2DolFC3dGCFkGOo1RA4fPlyfmyMiMnq5hcUYtyEGl9IUcGhigW1TQxHsygghw8EzIkREBiq3sBhjv4nB5XQFHJtYYNvUbmjhaiv1sIhqhCFCRGSA7hZoIuSvWwo0s9FESHMXRggZHoYIEZGBySmLkCtlEbJ9ajcEMULIQDFEiIgMSE5BMcasj8bVjDw0s7HE9qmhjBAyaAwRIiIDkZ2vxNhvYnA1Iw9OtpbYPrUbAp1tpB4W0RNhiBARGYA7+UqMXR+Da5mMEDIuDBEiIj13J1+JMeujcT0zH862ltg+rRsCnBghZBwYIkREeux2niZC4rLy4WKnORPizwghI8IQISLSU1l5RRizPgbxWflwtbPC9mnd4NesidTDIqpTDBEiIj2UpSjC6PXRSLhdADeZFbZP7QZfRggZIYYIEZGeyVIUYdT6aCTeLoC7THMmxMeREULGiSFCRKRHMhVFGL0uGol3CuDR1Brbp3aDt2NjqYdFpDMMESIiPZEh11yOSSqLkB3TusHLgRFCxs1U6gEQEREjhBounhEhIpJYeu49jF4fjRvZhfC011yOYYRQQ8EQISKSUHruPYxaF42bOYXwctBEiKc9I4QaDoYIEZFE0nLvYXRZhHg7NMb2ad3g0dRa6mER1SuGCBGRBFLvFmL0+mik5NyDt0Nj7JjWDe6MEGqAGCJERPUsJUcTIal378HHURMhbjJGCDVMDBEionqUklOIUeuikZZ7D76OjbFjWne4yqykHhaRZBgiRET15OEI8WvWBNundmOEUIPHECEiqgc3swsxal0U0uVF8G/WBNundYOLHSOEiCFCRKRjN7ILMGpdNG7Ji+Dv1AQ7pnaDMyOECABDhIhIp5LvaCIkQ1GEACfN5RhGCNEDDBEiIh1JulOA0WUREuhsg21TQ+FsywghehhDhIhIBxJv52P0+mhkKpQIcrbBtqnd4GRrKfWwiPQOQ4SIqI4l3M7H6HXRyMpTormLDbZOYYQQVYYhQkRUhx6OkBYuttg6NRTNbBghRJVhiBAR1ZH4LM3lmNt5SgS72mLrlFA4MkKIqsQQISKqA/FZeRi1LgZ38hkhRDXBECEiekJxmXkYvV4TIS3d7LB1SigcmlhIPSwig8AQISJ6ApoIicad/GK0KosQe0YIUbUxRIiIaulaRh7GrI9GdkExWrvbYctkRghRTTFEiIhq4WqGAmPXxyC7oBhtPDQR0rQxI4Sopkx1ufIlS5YgJCQEtra2cHZ2xrBhw3Dt2jVdbpKISOeu3FJgTFmEtPWQMUKInoBOQ+TIkSOYMWMGoqOjsW/fPpSUlODpp59GQUGBLjdLRKQzf6UrMGZ9NHIKitHOkxFC9KRMhBCivjZ2+/ZtODs748iRI+jTp89j51coFJDJZJDL5bCzs6uHERIRVe5yuhzjvonB3cIStPeUYfPkUMisG0k9LCK9U5P373q9R0QulwMAHBwcKnxdqVRCqVRqnysUinoZFxHR41xKk2PchhjkFpagvVdTbJ7UlRFCVAd0emnmYWq1GnPnzkXPnj3Rpk2bCudZsmQJZDKZ9uHl5VVfwyMiqtSlNDnGfqOJkA5eTfHtZEYIUV2pt0sz06dPx2+//YZjx47B09OzwnkqOiPi5eXFSzNEJJmLqZozIfJ7Jejo3RSbJnWFnRUjhKgqendpZubMmfj5559x9OjRSiMEACwtLWFpya9EJiL9cDFVjrHfRENRVIpOZRFiywghqlM6DREhBGbNmoU9e/bg8OHD8PPz0+XmiIjqzPmUXLyyIQaKolJ09rHHpkldYWPJr14iqms6/amaMWMGtm3bhh9++AG2trbIyMgAAMhkMlhbW+ty00REtXauLELyikrRxccekYwQIp3R6T0iJiYmFU6PiIjAhAkTHrs8P75LRPXt7M27eHXDSeQpS9HV1wEbJ4YwQohqSG/uEanHryghInpiZ27exfj7EeLngIgJIWjCCCHSKf6EEREBOH3jLsZvPIl8ZSlC/RwQMTEEjS34TySRrvGnjIgavFPJORi/8SQKilXo5u+AjRMYIUT1hT9pRNSgxSbnYEJZhPQIcMSG8SGwtjCTelhEDQZDhIgarJNJOZgQcRKFxSr0DHTEN68yQojqG0OEiBqkmMRsTIyMRWGxCr0Cm+Gb8V1g1YgRQlTfGCJE1OBEJ2ZjYkQs7pWo0DuoGda/ygghkgpDhIgalKiEbEyK1ERIn+ZOWPdKZ0YIkYQYIkTUYJyIv4NJm2JRVKJG3+ZOWMsIIZKcqdQDICKqD8cfipB+LRghRPqCZ0SIyOgdi7uDyZtioSxVY0CwM9aM6wRLc0YIkT7gGREiMmp/xt3WRkgYI4RI7/CMCBEZraPXb2PK5lMoLlUjvKUzVo9lhBDpG4YIERmlI9dvY2pZhDzVygWrx3SChTlPAhPpG/5UEpHROXQtSxshTzNCiPQaz4gQkVE5dDULr317GsUqNQa2dsEXoxkhRPqMIUJERuPg1Uy8/u0ZFKvUeKa1K74Y0xGNzBghRPqMIUJERmH/X5mYvvU0SlQCz7Z1xapRjBAiQ8CfUiIyePseipDBbd0YIUQGhGdEiMig/e9yBmZsO6OJkHZuWPVyB5gzQogMBkOEiAzW75cyMHPbGZSqBZ5r747PRrZnhBAZGP7EEpFB+v3SLW2EPM8IITJY/KklIoPz28VbmLntLErVAkM7uGMFI4TIYPHSDBEZlF8v3sKs7WehUgu80NEDy0e0h5mpidTDIqJa4v9CEJHB+OXCgwh5kRFCZBR4RoSIDMJP59Mx97tzmgjp5IFPhzNCiIwBQ4SI9N6P59Mxd8dZqAUwvLMnlr3UjhFCZCQYIkSk1344l4Y3vzsHtQBGdvHE0hfbwZQRQmQ0eI8IEemtvWcfRMjLXbwYIURGiCFCRHppz9lUvLVTEyGjQryw5MW2jBAiI8RLM0Skd74/nYp/7D4PIYDRXb3x0bA2jBAiI8UzIkSkV3Y/FCFjQxkhRMaOIUJEemPnqRS8UxYh47p544OhjBAiY8dLM0QkOZVaYNWBOHxxMA5CAK9088Hioa1hYsIIITJ2DBEiklSmoghzdpxFdGIOAGBSTz8sGNKSEULUQOj80szq1avh6+sLKysrhIaG4uTJk7reJBEZiCPXb+PZVX8iOjEHTSzMsGpUB7z/XCtGCFEDotMQ+e677/DWW29h4cKFOHPmDNq3b4+BAwciKytLl5slIj1XqlLjk9+vYvzGk8guKEZLNzv8NKsXhnbwkHpoRFTPTIQQQlcrDw0NRUhICL788ksAgFqthpeXF2bNmoV58+Y9Mr9SqYRSqdQ+VygU8PLyglwuh52dna6GSUT16Jb8HmZvP4vY5LsANDel/t/gVrBqZCbxyIiorigUCshksmq9f+vsjEhxcTFOnz6N8PDwBxszNUV4eDiioqIqXGbJkiWQyWTah5eXl66GR0QSOHg1E8+u+hOxyXdha2mOL8d0xIfD2jJCiBownYXInTt3oFKp4OLiUm66i4sLMjIyKlxm/vz5kMvl2kdKSoquhkdE9UhZqsLHv17BpMhTuFtYgrYeMvw8uxeGtHOXemhEJDG9+tSMpaUlLC0tpR4GEdURZakKO2NT8NXhBNySFwEAJvTwxfxng2FpzrMgRKTDEGnWrBnMzMyQmZlZbnpmZiZcXV11tVki0gNFJSrsPJWCrw4lIEOhCRAXO0sser4NnmnDn38iekBnIWJhYYHOnTvjwIEDGDZsGADNzaoHDhzAzJkzdbVZIpJQUYkKO07exJojCchUaG48d7WzwvR+AXg5xIv3ghDRI3R6aeatt97C+PHj0aVLF3Tt2hUrV65EQUEBJk6cqMvNElE9KypRYfvJm/j6oQBxk1nhjX4BGNGFAUJEldNpiLz88su4ffs23n//fWRkZKBDhw74/fffH7mBlYgMU1GJCltjNAFyO08TIO4yK7zRPxAjunjyPhAieiydfo/Ik6rJ55CJqP7cK1Zha8wNfH0kEXfyNQHi0dQab/QPwPDODBCihq4m79969akZItJvhcWl2BJ9A+uOJuJOfjEATYDM6B+I4Z09YWHOX+hNRDXDECGixyosLsW3UZoAyS7QBIinvTVm9g/Ei50YIERUewwRIqpUgbIUm6NuYP2ficgpCxBvh8aY2T8QL3TyQCMzBggRPRmGCBE9Il9Zik0nkvHNn4m4W1gCAPBx1ATIsI4MECKqOwwRItLKKyrRBMixJOSWBYivY2PMGhCEoR3cYc4AIaI6xhAhIiiKSrDpuCZA5Pc0AeLfrAlmDgjE8+0ZIESkOwwRogZMfq8EkceTseFYIhRFpQAAf6cmmD0gCM+1d4eZqYnEIyQiY8cQIWqA5PdKEHE8CRuOJSGvLEACnW0wa0AghrRjgBBR/WGIEDUg8sISbDiehIjjDwIkyNkGs8OC8GxbNwYIEdU7hghRA5BbWIwNx5IQeTwZeUpNgDR3KQuQNm4wZYAQkUQYIkRG7G5BWYCcSEZ+WYAEu9pidlgQnmntygAhIskxRIiMUE5BMb75MxGbTiSjoFgFQBMgc8OD8HQrBggR6Q+GCJERySkoxvo/E7H5oQBp5WaH2WFBeLqVCwOEiPQOQ4TICGTnK7Huz0R8G3UDhWUB0trdDnPCgvBUKxeYmDBAiEg/MUSIDNidfCXWHdUEyL0STYC08bDD3LDmCGvpzAAhIr3HECEyQLfzlFh3NAFbom9qA6SdpwxzwoIwIJgBQkSGgyFCZECy8oqw9kgitsbcQFGJGgDQ3lOGueHN0a+FEwOEiAwOQ4TIAGQpirDmSAK2xdyEslQTIB28mmJOeBD6NWeAEJHhYogQ6bFMRRHWHE7A9pMPAqSjd1PMDW+OPkHNGCBEZPAYIkR6KENehDWH47E9NgXFZQHS2ccec8KC0JsBQkRGhCFCpEfSc+9hzeEEfBebgmKVJkBCfO0xJ6w5egY6MkCIyOgwRIj0QFruPaw5HI+dsanaAOnq54C5YUHoHsAAISLjxRAhklDq3UJ8dTgBu06loEQlAAChfg6YEx6E7v4MECIyfgwRIgmk5GgCZPfpBwHS3d8Rc8KD0M3fUeLRERHVH4YIUT1KySnE6kPx2H06FaVqTYD0DHTEnLDm6OrnIPHoiIjqH0OEqB7czNYEyPdnHgRIr8BmmBMehBBfBggRNVwMESIdupFdgC8PxuO/Z9OgKguQ3kHNMDc8CJ19GCBERAwRIh1IvlOALw7GY++5BwHSt7kTZocFobOPvcSjIyLSHwwRojqUdKcAXxyMw96zaSjrD/RvoQmQjt4MECKiv2OIENWBhNv5+PJgPH449yBABgQ7Y3ZYEDp4NZV0bERE+owhQvQE4rPy8cXBOPx0Pl0bIGHBzpgTHoR2nk0lHRsRkSFgiBDVQnxWHj4/EI+fLqRDlAVIeEsXzAkLQltPmbSDIyIyIAwRohq4npmHzw/E4ZeLt7QB8nQrF8wOC0IbDwYIEVFN6SREkpOT8cEHH+DgwYPIyMiAu7s7xo0bh3/961+wsLDQxSaJdOpahiZAfr30IECeae2KWWGBaO3OACEiqi2dhMjVq1ehVquxdu1aBAYG4tKlS5g6dSoKCgqwfPlyXWySSCeu3FLg8wNx+O1ShnbaoDaumB0WhJZudhKOjIjIOJgIcf//73Tr008/xZo1a5CYmFjtZRQKBWQyGeRyOezs+I8+1Z+/0jUB8vtlTYCYmADPtnHDrLBABLvy7yIRUVVq8v5db/eIyOVyODhU/U2SSqUSSqVS+1yhUOh6WETlXEqT4/MDcfjfX5kANAEyuK0bZocFobmLrcSjIyIyPvUSIvHx8fjiiy8ee1lmyZIlWLRoUX0MiaicS2lyrNwfh/1XHgTIkHbumD0gEEEMECIinanRpZl58+Zh2bJlVc5z5coVBAcHa5+npaWhb9++6NevH7755psql63ojIiXlxcvzZDOXEjNxar9cThwNQsAYGoCPNfeHbMGBCLQmQFCRFQbNbk0U6MQuX37NrKzs6ucx9/fX/vJmPT0dPTr1w/dunVDZGQkTE1Nq7spALxHhHTnfEouVh2Iw8GHAmRoBw/MHBCIACcbiUdHRGTYdHaPiJOTE5ycnKo1b1paGvr374/OnTsjIiKixhFCpAtnb97FqgNxOHztNgBNgAwrCxB/BggRUb3TyT0iaWlp6NevH3x8fLB8+XLcvn1b+5qrq6suNklUpTM372LV/jgcua75u2hmaoKhHdwxa0AQ/Jo1kXh0REQNl05CZN++fYiPj0d8fDw8PT3LvVZPnxYmAgCcvpGDlfvj8GfcHQCaAHmxowdm9A+ELwOEiEhy9fY9IrXBe0Sotk4l52DVgQcBYm5qghc7aQLEx5EBQkSkS3r5PSJE9eFkUg5WHbiO4/Gam6rNTU0wvLMnZvQPhJdDY4lHR0REf8cQIaMQnZiNVfvjEJX4IEBGdPHCG/0CGCBERHqMIUIGLSohG6sOXEd0Yg4AoJGZCUZ28cL0fgHwtGeAEBHpO4YIGRwhBKISs7FyfxxOJmkCxMLMFCNDPDG9XyA8mlpLPEIiIqouhggZDCEETiRoLsGcTH4QIC+HaM6AuDNAiIgMDkOE9J4QAsfi72DV/jicunEXAGBhborRIV54vV8A3GQMECIiQ8UQIb0lhMDRuDtYtf86ztzMBaAJkDFdvfF63wC4yqykHSARET0xhgjpHSEEjly/jZX743AuJRcAYGluijGhmgBxsWOAEBEZC4YI6Q0hBA5fu42VB+JwvixArBqZYmyoD17r4w9nBggRkdFhiJDkhBA4eDULqw7E4UKqHIAmQMaF+mBaX3842zJAiIiMFUOEJCOEwP4rWfj8QBwupmkCxLqRGV7p7oOpvf3hZGsp8QiJiEjXGCJU74QQ2PdXJlYdiMPldAUATYC82t0HU/v4o5kNA4SIqKFgiFC9UasF/vdXJj4/EIe/bmkCpLGFGV7t7oupvf3gyAAhImpwGCKkc5oAycDK/XG4mpEHAGhiYYbxPXwxpbc/HJpYSDxCIiKSCkOEdEatFvj9cgY+P/AgQGwszTG+hw+m9PKHPQOEiKjBY4hQnVOrBX69dAtfHIjHtUxNgNhammNCT19M7uWHpo0ZIEREpMEQoTqjUgv8evEWPj8Qh7isfACaAJnYyw+Te/pB1riRxCMkIiJ9wxChJ6ZSC/x8IR1fHIxH/P0AsTLHpJ5+mMQAISKiKjBEqNbuB8jnB+KQcLsAAGBnZY7JvfwxoacvZNYMECIiqhpDhGqsVKXGT2VnQBLLAkRm3QhTevlhfE9f2FkxQIiIqHoYIlRtpSo1fjiXji8PxSPpjiZAmjYuC5AevrBlgBARUQ0xROixSlVq7DmbhtWH4pGcXQgAsG/cCFN6+2N8D1/YWPKvERER1Q7fQahSJWUB8uXBeNzM0QSIQxMLTO3tj1e6+zBAiIjoifGdhB5RolLjv2dS8eWheKTk3AMAODaxwNQ+/nilmw+aMECIiKiO8B2FtIpL1fj+TCpWH4pH6l1NgDSzscC0Pv4Y180HjS3414WIiOoW31kIxaVq7Dqdgq8OJSAt936AWOL1vv4YG+oDawsziUdIRETGiiHSgClLVdh5KhVrDsUjXV4EAHCytcRrfRggRERUPxgiDZCyVIWdsSn46nACbpUFiLOtJV7vG4Axod6wasQAISKi+sEQaUCKSlT4LjYFaw4nIEOhCRAXO0tM7xuAUV0ZIEREVP8YIg1AUYkKO07exJojCchUKAEArnZWmN4vAC+HeDFAiIhIMgwRI1ZUosL2kzfx9UMB4iazwhv9AjAyxAuW5gwQIiKSFkPECBWVqLA1RhMgt/M0AeIus8Ib/QMxoosnA4SIiPQGQ8SI3CtWYWvMDaw9mqgNEI+m1pjRPxDDO3vCwtxU4hESERGVxxAxAoXFpdgafRNrjybgTn4xAMDTXhMgL3VigBARkf7SeYgolUqEhobi/PnzOHv2LDp06KDrTTYYhcWl+DbqBtYdTUR2gSZAvBysMbN/IF7s5IlGZgwQIiLSbzoPkXfffRfu7u44f/68rjfVYBQoS7E56gbW/5mInLIA8XZojJkDAvFCRw8GCBERGQydhshvv/2G//3vf/j+++/x22+/6XJTDUK+shSbo5Kx/mgi7haWAAB8HBtjZv9ADGOAEBGRAdJZiGRmZmLq1KnYu3cvGjduXK1llEollEql9rlCodDV8AxKXlGJ9gxIblmA+Do2xqwBQRjawR3mDBAiIjJQOgkRIQQmTJiA119/HV26dEFycnK1lluyZAkWLVqkiyEZJEVRCTYdT8Y3x5Igv6cJEP9mTTBzQCCeb88AISIiw1ejEJk3bx6WLVtW5TxXrlzB//73P+Tl5WH+/Pk1Gsz8+fPx1ltvaZ8rFAp4eXnVaB3GQFFUgohjydhwLBGKolIAgL9TE8weEITn2rvDzNRE4hESERHVDRMhhKjuzLdv30Z2dnaV8/j7+2PkyJH46aefYGLy4A1TpVLBzMwMY8eOxaZNm6q1PYVCAZlMBrlcDjs7u+oO02DJ75Vg47EkbDyehLyyAAlwaoLZYUEY0o4BQkREhqEm7981CpHqunnzZrn7O9LT0zFw4EDs3r0boaGh8PT0rNZ6GkqIyAtLsOF4EiIeCpAgZxvMCgvC4LZuDBAiIjIoNXn/1sk9It7e3uWe29jYAAACAgKqHSENQW5hMTYcS0Lk8WTkKTUB0tzFBrPDgvBsGzeYMkCIiMjI8ZtVJXC3oCxATiQjvyxAgl1tMWtAEAa1cWWAEBFRg1EvIeLr6wsdXAEyODkFxfjmz0RsOpGMgmIVAE2AzA0PwtOtGCBERNTw8IxIPcgpKMb6PxOx+aEAaeVmh9lhQXi6lQsDhIiIGiyGiA5l5yux7s9EfBt1A4VlAdLa3Q5zwoLwVCuXcp8qIiIiaogYIjpwJ1+JdUc1AXKvRBMgbTzsMDesOcJaOjNAiIiIyjBE6tDtPCXWHU3Aluib2gBp5ynDnLAgDAhmgBAREf0dQ6QOZOUVYe2RRGyNuYGiEjUAoL2nDHPDm6NfCycGCBERUSUYIk8gS1GEr8sCRFmqCZAOXk0xJzwI/ZozQIiIiB6HIVILmYoirDmcgO0nb2oDpKN3U8wNb44+Qc0YIERERNXEEKmBDHkR1hyOx/bYFBSXBUhnH3vMCQtCbwYIERFRjTFEqiE99x7WHE7Ad7EpKFZpAiTE1x5zwpqjZ6AjA4SIiKiWGCJVSM+9h68Ox2NnbKo2QLr6OmBueBC6BzBAiIiInhRDpAKpdwvx1eEE7DqVghKV5qvpQ/0cMCc8CN39GSBERER1hSHykJQcTYDsPv0gQLr5O2BueHN083eUeHRERETGhyECTYCsPhSP3adTUarWBEiPAEfMCQtCKAOEiIhIZxp0iNzMLsSXh+Lw3zNp2gDpFdgMc8KDEOLrIPHoiIiIjF+DDJEb2QX48mA8/ns2DaqyAOkd1Axzw4PQ2YcBQkREVF8aZIjsPp2KXadTAQB9mjthTlgQOvvYSzwqIiKihqdBhsiknn6Iy8zHa3390dGbAUJERCSVBhki9k0s8PUrnaUeBhERUYNnKvUAiIiIqOFiiBAREZFkGCJEREQkGYYIERERSYYhQkRERJJhiBAREZFkGCJEREQkGYYIERERSYYhQkRERJJhiBAREZFkGCJEREQkGYYIERERSYYhQkRERJLR69++K4QAACgUColHQkRERNV1/337/vt4VfQ6RPLy8gAAXl5eEo+EiIiIaiovLw8ymazKeUxEdXJFImq1Gunp6bC1tYWJiUmdrluhUMDLywspKSmws7Or03XrA+6f4TP2feT+GT5j30dj3z9Ad/sohEBeXh7c3d1halr1XSB6fUbE1NQUnp6eOt2GnZ2d0f4FA7h/xsDY95H7Z/iMfR+Nff8A3ezj486E3MebVYmIiEgyDBEiIiKSTIMNEUtLSyxcuBCWlpZSD0UnuH+Gz9j3kftn+Ix9H419/wD92Ee9vlmViIiIjFuDPSNCRERE0mOIEBERkWQYIkRERCQZhggRERFJhiFCREREkjHaEPnoo4/Qo0cPNG7cGE2bNq1wnps3b2Lw4MFo3LgxnJ2d8c4776C0tLTK9ebk5GDs2LGws7ND06ZNMXnyZOTn5+tgD2rm8OHDMDExqfARGxtb6XL9+vV7ZP7XX3+9Hkdefb6+vo+MdenSpVUuU1RUhBkzZsDR0RE2NjZ46aWXkJmZWU8jrr7k5GRMnjwZfn5+sLa2RkBAABYuXIji4uIql9P347d69Wr4+vrCysoKoaGhOHnyZJXz79q1C8HBwbCyskLbtm3x66+/1tNIa27JkiUICQmBra0tnJ2dMWzYMFy7dq3KZSIjIx85XlZWVvU04pr597///chYg4ODq1zGkI5fRf+emJiYYMaMGRXObwjH7ujRo3juuefg7u4OExMT7N27t9zrQgi8//77cHNzg7W1NcLDwxEXF/fY9db057imjDZEiouLMWLECEyfPr3C11UqFQYPHozi4mKcOHECmzZtQmRkJN5///0q1zt27FhcvnwZ+/btw88//4yjR49i2rRputiFGunRowdu3bpV7jFlyhT4+fmhS5cuVS47derUcst98skn9TTqmlu8eHG5sc6aNavK+d9880389NNP2LVrF44cOYL09HS8+OKL9TTa6rt69SrUajXWrl2Ly5cv47PPPsPXX3+Nf/7zn49dVl+P33fffYe33noLCxcuxJkzZ9C+fXsMHDgQWVlZFc5/4sQJjB49GpMnT8bZs2cxbNgwDBs2DJcuXarnkVfPkSNHMGPGDERHR2Pfvn0oKSnB008/jYKCgiqXs7OzK3e8bty4UU8jrrnWrVuXG+uxY8cqndfQjl9sbGy5fdu3bx8AYMSIEZUuo+/HrqCgAO3bt8fq1asrfP2TTz7B559/jq+//hoxMTFo0qQJBg4ciKKiokrXWdOf41oRRi4iIkLIZLJHpv/666/C1NRUZGRkaKetWbNG2NnZCaVSWeG6/vrrLwFAxMbGaqf99ttvwsTERKSlpdX52J9EcXGxcHJyEosXL65yvr59+4o5c+bUz6CekI+Pj/jss8+qPX9ubq5o1KiR2LVrl3balStXBAARFRWlgxHWrU8++UT4+flVOY8+H7+uXbuKGTNmaJ+rVCrh7u4ulixZUuH8I0eOFIMHDy43LTQ0VLz22ms6HWddycrKEgDEkSNHKp2nsn+P9NHChQtF+/btqz2/oR+/OXPmiICAAKFWqyt83ZCOnRBCABB79uzRPler1cLV1VV8+umn2mm5ubnC0tJSbN++vdL11PTnuDaM9ozI40RFRaFt27ZwcXHRThs4cCAUCgUuX75c6TJNmzYtd4YhPDwcpqamiImJ0fmYa+LHH39EdnY2Jk6c+Nh5t27dimbNmqFNmzaYP38+CgsL62GEtbN06VI4OjqiY8eO+PTTT6u8lHb69GmUlJQgPDxcOy04OBje3t6Iioqqj+E+EblcDgcHh8fOp4/Hr7i4GKdPny73Z29qaorw8PBK/+yjoqLKzQ9ofiYN4VgBmuMF4LHHLD8/Hz4+PvDy8sLQoUMr/fdGH8TFxcHd3R3+/v4YO3Ysbt68Wem8hnz8iouLsWXLFkyaNKnK3/RuSMfu75KSkpCRkVHuGMlkMoSGhlZ6jGrzc1wbev3bd3UpIyOjXIQA0D7PyMiodBlnZ+dy08zNzeHg4FDpMlLZsGEDBg4c+NjfXjxmzBj4+PjA3d0dFy5cwHvvvYdr167hv//9bz2NtPpmz56NTp06wcHBASdOnMD8+fNx69YtrFixosL5MzIyYGFh8cg9Qi4uLnp3vP4uPj4eX3zxBZYvX17lfPp6/O7cuQOVSlXhz9jVq1crXKayn0l9P1YAoFarMXfuXPTs2RNt2rSpdL4WLVpg48aNaNeuHeRyOZYvX44ePXrg8uXLOv9N4zUVGhqKyMhItGjRArdu3cKiRYvQu3dvXLp0Cba2to/Mb8jHb+/evcjNzcWECRMqnceQjl1F7h+Hmhyj2vwc14ZBhci8efOwbNmyKue5cuXKY2+oMiS12efU1FT88ccf2Llz52PX//D9LW3btoWbmxvCwsKQkJCAgICA2g+8mmqyf2+99ZZ2Wrt27WBhYYHXXnsNS5Ys0dvfBVGb45eWloZnnnkGI0aMwNSpU6tcVurjRxozZszApUuXqryHAgC6d++O7t27a5/36NEDLVu2xNq1a/HBBx/oepg1MmjQIO1/t2vXDqGhofDx8cHOnTsxefJkCUdW9zZs2IBBgwbB3d290nkM6dgZGoMKkbfffrvKYgUAf3//aq3L1dX1kTt/73+awtXVtdJl/n6DTmlpKXJycipd5knVZp8jIiLg6OiI559/vsbbCw0NBaD5P/L6eCN7kmMaGhqK0tJSJCcno0WLFo+87urqiuLiYuTm5pY7K5KZmamz4/V3Nd2/9PR09O/fHz169MC6detqvL36Pn6VadasGczMzB75hFJVf/aurq41ml9fzJw5U3vjek3/z7hRo0bo2LEj4uPjdTS6utO0aVM0b9680rEa6vG7ceMG9u/fX+OziIZ07IAH72uZmZlwc3PTTs/MzESHDh0qXKY2P8e1Umd3m+ipx92smpmZqZ22du1aYWdnJ4qKiipc1/2bVU+dOqWd9scff+jVzapqtVr4+fmJt99+u1bLHzt2TAAQ58+fr+OR1b0tW7YIU1NTkZOTU+Hr929W3b17t3ba1atX9fZm1dTUVBEUFCRGjRolSktLa7UOfTp+Xbt2FTNnztQ+V6lUwsPDo8qbVYcMGVJuWvfu3fX2Zke1Wi1mzJgh3N3dxfXr12u1jtLSUtGiRQvx5ptv1vHo6l5eXp6wt7cXq1atqvB1Qzt+9y1cuFC4urqKkpKSGi2n78cOldysunz5cu00uVxerZtVa/JzXKux1tma9MyNGzfE2bNnxaJFi4SNjY04e/asOHv2rMjLyxNCaP4StWnTRjz99NPi3Llz4vfffxdOTk5i/vz52nXExMSIFi1aiNTUVO20Z555RnTs2FHExMSIY8eOiaCgIDF69Oh637/K7N+/XwAQV65ceeS11NRU0aJFCxETEyOEECI+Pl4sXrxYnDp1SiQlJYkffvhB+Pv7iz59+tT3sB/rxIkT4rPPPhPnzp0TCQkJYsuWLcLJyUm8+uqr2nn+vn9CCPH6668Lb29vcfDgQXHq1CnRvXt30b17dyl2oUqpqakiMDBQhIWFidTUVHHr1i3t4+F5DOn47dixQ1haWorIyEjx119/iWnTpommTZtqP6n2yiuviHnz5mnnP378uDA3NxfLly8XV65cEQsXLhSNGjUSFy9elGoXqjR9+nQhk8nE4cOHyx2vwsJC7Tx/38dFixaJP/74QyQkJIjTp0+LUaNGCSsrK3H58mUpdqFKb7/9tjh8+LBISkoSx48fF+Hh4aJZs2YiKytLCGH4x08IzZuqt7e3eO+99x55zRCPXV5enva9DoBYsWKFOHv2rLhx44YQQoilS5eKpk2bih9++EFcuHBBDB06VPj5+Yl79+5p1zFgwADxxRdfaJ8/7ue4LhhtiIwfP14AeORx6NAh7TzJycli0KBBwtraWjRr1ky8/fbb5ar40KFDAoBISkrSTsvOzhajR48WNjY2ws7OTkycOFEbN/pg9OjRokePHhW+lpSUVO7P4ObNm6JPnz7CwcFBWFpaisDAQPHOO+8IuVxejyOuntOnT4vQ0FAhk8mElZWVaNmypfj444/Lnb36+/4JIcS9e/fEG2+8Iezt7UXjxo3FCy+8UO7NXV9ERERU+Pf14ZOWhnj8vvjiC+Ht7S0sLCxE165dRXR0tPa1vn37ivHjx5ebf+fOnaJ58+bCwsJCtG7dWvzyyy/1POLqq+x4RUREaOf5+z7OnTtX++fh4uIinn32WXHmzJn6H3w1vPzyy8LNzU1YWFgIDw8P8fLLL4v4+Hjt64Z+/ITQnNEGIK5du/bIa4Z47O6/Z/39cX8/1Gq1WLBggXBxcRGWlpYiLCzskX338fERCxcuLDetqp/jumAihBB1d6GHiIiIqPoa7PeIEBERkfQYIkRERCQZhggRERFJhiFCREREkmGIEBERkWQYIkRERCQZhggRERFJhiFCREREkmGIEBERkWQYIkRERCQZhggRERFJ5v8Bo2COomA1R8oAAAAASUVORK5CYII=\n" 201 | }, 202 | "metadata": {} 203 | } 204 | ] 205 | }, 206 | { 207 | "cell_type": "code", 208 | "source": [ 209 | "def elu(x,alpha):\n", 210 | " return np.where(x >= 0,x,alpha*(np.exp(x)-1))\n", 211 | "\n", 212 | "x=np.linspace(-10,10)\n", 213 | "y=elu(x,0.2)\n", 214 | "print(y)\n", 215 | "plt.title(\"eLU\")\n", 216 | "plt.plot(x,y)" 217 | ], 218 | "metadata": { 219 | "colab": { 220 | "base_uri": "https://localhost:8080/", 221 | "height": 626 222 | }, 223 | "id": "1nWl_jYShfWl", 224 | "outputId": "d2b0562a-f5d8-416e-cd86-a53469713e0a" 225 | }, 226 | "execution_count": 13, 227 | "outputs": [ 228 | { 229 | "output_type": "stream", 230 | "name": "stdout", 231 | "text": [ 232 | "[-0.19999092 -0.19998634 -0.19997946 -0.19996911 -0.19995353 -0.19993011\n", 233 | " -0.19989489 -0.1998419 -0.19976221 -0.19964235 -0.19946208 -0.19919094\n", 234 | " -0.19878314 -0.19816977 -0.19724724 -0.19585971 -0.19377278 -0.19063394\n", 235 | " -0.18591295 -0.17881233 -0.16813263 -0.15206979 -0.12791044 -0.09157351\n", 236 | " -0.03692084 0.20408163 0.6122449 1.02040816 1.42857143 1.83673469\n", 237 | " 2.24489796 2.65306122 3.06122449 3.46938776 3.87755102 4.28571429\n", 238 | " 4.69387755 5.10204082 5.51020408 5.91836735 6.32653061 6.73469388\n", 239 | " 7.14285714 7.55102041 7.95918367 8.36734694 8.7755102 9.18367347\n", 240 | " 9.59183673 10. ]\n" 241 | ] 242 | }, 243 | { 244 | "output_type": "execute_result", 245 | "data": { 246 | "text/plain": [ 247 | "[]" 248 | ] 249 | }, 250 | "metadata": {}, 251 | "execution_count": 13 252 | }, 253 | { 254 | "output_type": "display_data", 255 | "data": { 256 | "text/plain": [ 257 | "
" 258 | ], 259 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAh8AAAGzCAYAAACPa3XZAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjcuMSwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy/bCgiHAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA9aUlEQVR4nO3deXhU5cH+8XtmkkwCZIEEEgIBArIGElxToFIXKiAqKIL66lur1qXigkEEtEDBBUUEW/Vnta1iX60FXBA3KC5IFUTWQNh3EpaELTuZJDPP7w8kNRKWhMmcmcn3c11zkTnnOefcJyeTuZlzMmMzxhgBAAD4iN3qAAAAoGGhfAAAAJ+ifAAAAJ+ifAAAAJ+ifAAAAJ+ifAAAAJ+ifAAAAJ+ifAAAAJ+ifAAAAJ+ifAAAAJ+ifACoV7t27ZLNZtO0adNOOaZdu3a65pprapy3YsUK2Ww2zZw5s54SAvA1ygcAAPApygcAAPApygeAc7J3717deeedio+Pl9PpVEpKit544w2rYwHwYyFWBwAQuHJzc/WLX/xCNptNDzzwgJo3b67PP/9cd911lwoLCzVy5EirIwLwQ5QPAHX2xBNPyO12a926dYqNjZUk3Xfffbrlllv0xz/+Uffee6/FCQH4I067AKgTY4zef/99XXvttTLG6NChQ1W3/v37q6CgQKtWrbI6JgA/xCsfAOrk4MGDys/P1+uvv67XX3+9xjF5eXlq1aqVV7Zns9m8sh4A1qN8AKgTj8cjSbrtttt0++231zgmNTVVpaWlZ1xXeHi4jh07VuO8E8uHh4fXMSkAf0P5AFAnzZs3V2RkpNxut/r163fKcbt27Trjutq2basNGzbUOG/z5s1VYwAEB675AFAnDodDQ4cO1fvvv6+srKyT5h88ePCs13X11VcrJydHc+fOrTbd5XLpb3/7m1q0aKELLrjgXCMD8BM2Y4yxOgSAwJSbm6v09HQdPHhQd999t7p166YjR45o1apV+uKLL3TkyBHt2rVLycnJGjBggPr06XPSOoYMGaIOHTroF7/4hTZs2KA777xT559/vg4fPqxZs2YpKytL//jHP3TbbbdZsIcA6gPlA8A5ycvL0+TJkzVv3jwdOHBAsbGxSklJ0U033aS77767qnycyv/93//ptttuU35+viZPnqy5c+cqJydHERERuvDCC/XYY49pwIABPtwjAPWN8gEAAHyKaz4AAIBPUT4AAIBPUT4AAIBPUT4AAIBPUT4AAIBPUT4AAIBP+d3bq3s8Hu3bt0+RkZF8kBQAAAHCGKOioiIlJibKbj/9axt+Vz727dunpKQkq2MAAIA6yM7OVuvWrU87xu/KR2RkpKTj4aOioixOAwAAzkZhYaGSkpKqnsdPx+/Kx4lTLVFRUZQPAAACzNlcMsEFpwAAwKcoHwAAwKcoHwAAwKcoHwAAwKcoHwAAwKcoHwAAwKcoHwAAwKcoHwAAwKcoHwAAwKcoHwAAwKdqXT4WL16sa6+9VomJibLZbJo7d261+cYYTZgwQS1btlRERIT69eunrVu3eisvAAAIcLUuHyUlJUpLS9Mrr7xS4/ypU6fqz3/+s/7yl79o2bJlaty4sfr376+ysrJzDgsAAAJfrT9YbuDAgRo4cGCN84wxevHFF/WHP/xBgwcPliT94x//UHx8vObOnaubb775pGVcLpdcLlfV/cLCwtpGAgAAZ6HS7dF9b6/S8Ita66qUBMtyePWaj507d+rAgQPq169f1bTo6Gilp6dr6dKlNS4zZcoURUdHV92SkpK8GQkAAPzotcU79MXGXI2ak6n80nLLcni1fBw4cECSFB8fX216fHx81byfGzdunAoKCqpu2dnZ3owEAAAkrd9XoBe/2CJJmnRdimIahVmWpdanXbzN6XTK6XRaHQMAgKDlqnQrY1amKtxG/VPidf35rSzN49VXPhISjp8/ys3NrTY9Nze3ah4AAPCtGQu3anNukWIbh+mZ63vIZrNZmser5SM5OVkJCQn68ssvq6YVFhZq2bJl6tWrlzc3BQAAzsKKXUf0+uLtkqRnbuih2CbWn22o9WmX4uJibdu2rer+zp07tWbNGjVr1kxt2rTRyJEj9dRTT6ljx45KTk7W+PHjlZiYqCFDhngzNwAAOIMSV6VGzcmUx0g3XNBK/S38C5efqnX5WLFihS6//PKq+xkZGZKk22+/XTNnztRjjz2mkpIS3XPPPcrPz9cvf/lLzZ8/X+Hh4d5LDQAAzmjK5xu1+3CpEqPDNfHaFKvjVLEZY4zVIX6qsLBQ0dHRKigoUFRUlNVxAAAISIu3HNRv3vhBkvTO79LV57y4et1ebZ6/+WwXAACCTEFphR57b60k6fZebeu9eNQW5QMAgCAzcV6WDhSWKTmuscYO7Gp1nJNQPgAACCKfr9uvuWv2yW6TXhiepogwh9WRTkL5AAAgSBwscunxD9dJku77VQdd0KapxYlqRvkAACAIGGM07oO1Olpaoa4tozSyXyerI50S5QMAgCAwZ2WOvtiYp1CHTdOHpyksxH+f4v03GQAAOCs5R0s1+eMNkqRHft1JXVv691tVUD4AAAhgHo/R6DlrVeyq1AVtYnRv3w5WRzojygcAAAHsraW7tHTHYUWEOvTC8J5y2K390LizQfkAACBAbT9YrGc/3yRJevzqLkqOa2xxorND+QAAIABVuj3KmJ0pV6VHl3aM022/aGt1pLNG+QAAIAC9umi7MrPzFRkeoqk3pspm8//TLSdQPgAACDBZewv0py+3SpImXZeiltERFieqHcoHAAABxFXp1qjZmar0GA1ISdD157eyOlKtUT4AAAgg0xdu0ebcIsU1CdPT13cPqNMtJ1A+AAAIECt2HdHri3dIkp65vodimzgtTlQ3lA8AAAJAiatSo+Zkyhhp6AWtdVVKgtWR6ozyAQBAAHjms43afbhUidHhmnhdN6vjnBPKBwAAfu6bLQf1zrI9kqRpw9IUFR5qcaJzQ/kAAMCPFZRW6LH3MiVJv+3dTr3Pi7M40bmjfAAA4McmzstSbqFL7eMaa8yALlbH8QrKBwAAfuqzdfs1d80+2W3StOFpighzWB3JKygfAAD4obyiMj3x4TpJ0v2XnacL2jS1OJH3UD4AAPAzxhg9/sE6HS2tULeWUXroyo5WR/IqygcAAH5mzsocfbExT2EOu6bflKawkOB6ug6uvQEAIMDlHC3V5I83SJIe+XUndUmIsjiR91E+AADwEx6P0eg5a1XsqtSFbZvqnr7trY5ULygfAAD4iZlLdmnpjsOKCHXohWFpctgD70PjzgblAwAAP7Atr1jPzd8kSXp8UFe1i2tscaL6Q/kAAMBilW6PRs3JlKvSo0s7xum29DZWR6pXlA8AACz26qLtyszOV1R4iKbemCqbLThPt5xA+QAAwEJZewv0py+3SpImD+6ultERFieqf5QPAAAsUlbhVsbsNar0GA3snqDBPROtjuQTlA8AACwyY+EWbcktVlyTMD01pHvQn245gfIBAIAFlu86otf/s0OSNOWGVMU2cVqcyHcoHwAA+FiJq1KjZmfKGGnYha31627xVkfyKcoHAAA+9vRnG7XnSKlaxURowrXdrI7jc5QPAAB8aNHmPP1z2R5J0vPDUhUZHmpxIt+jfAAA4CMFpRUa8/5aSdJve7dT7w5xFieyBuUDAAAfmTAvS7mFLrVv3lhjBnSxOo5lKB8AAPjAp2v366M1++Sw2zR9eE9FhDmsjmQZygcAAPUsr6hMf5i7TpJ0/2Ud1DMpxtpAFqN8AABQj4wxGvf+Oh0trVBKYpQevKKj1ZEsR/kAAKAezVmRoy835SnMYdf04T0VFsJTL98BAADqSfaRUk36eL0kadRVndQ5IdLiRP6B8gEAQD3weIxGv5epknK3Lm7XVL+7tL3VkfwG5QMAgHrw5pJd+n7HETUKc2jasDQ57A3jQ+POBuUDAAAv25ZXrKnzN0mSHr+6q9rGNrY4kX+hfAAA4EUVbo8yZq+Rq9Kjvp2a69b0NlZH8juUDwAAvOj/fb1da3MKFBUeoqlDU2Wzcbrl5ygfAAB4ybqcAr301VZJ0pNDuishOtziRP6J8gEAgBeUVbiVMXuNKj1GV/dI0HVpiVZH8luUDwAAvGD6wi3amlesuCZOPTWkB6dbToPyAQDAOVq247D++p8dkqTnhvZQs8ZhFifyb5QPAADOQbGrUo++lyljpOEXtdaVXeOtjuT3vF4+3G63xo8fr+TkZEVERKhDhw568sknZYzx9qYAALDc059uVPaRY2oVE6Hx13SzOk5ACPH2Cp977jm9+uqreuutt5SSkqIVK1bojjvuUHR0tB566CFvbw4AAMt8vTlP7/6wR5L0/LBURYaHWpwoMHi9fCxZskSDBw/WoEGDJEnt2rXTu+++qx9++KHG8S6XSy6Xq+p+YWGhtyMBAOB1+aXlGvPeWknSnX2S1btDnMWJAofXT7v07t1bX375pbZs2SJJyszM1LfffquBAwfWOH7KlCmKjo6uuiUlJXk7EgAAXjf+o/XKK3KpQ/PGemxAZ6vjBBSvv/IxduxYFRYWqkuXLnI4HHK73Xr66ad166231jh+3LhxysjIqLpfWFhIAQEA+LVP1u7Tx5n75LDbNH14T4WHOqyOFFC8Xj5mz56td955R//85z+VkpKiNWvWaOTIkUpMTNTtt99+0nin0ymn0+ntGAAA1Iu8wjL9YW6WJGnEZR2UlhRjbaAA5PXyMXr0aI0dO1Y333yzJKlHjx7avXu3pkyZUmP5AAAgUBhjNPaDdcovrVBKYpQeuKKj1ZECktev+SgtLZXdXn21DodDHo/H25sCAMCnZi3P1leb8hQWYteMm3oqLIS3y6oLr7/yce211+rpp59WmzZtlJKSotWrV2v69Om68847vb0pAAB8JvtIqZ78ZIMk6dGrOqlTfKTFiQKX18vHSy+9pPHjx+v+++9XXl6eEhMTde+992rChAne3hQAAD7h8Rg9OidTJeVuXdKume76ZXurIwU0m/Gztx4tLCxUdHS0CgoKFBUVZXUcAAD0t//s0FOfblSjMIfmP9xXbWIbWR3J79Tm+ZuTVQAAnMbW3CJNXbBZkvTEoK4UDy+gfAAAcAoVbo8yZmeqvNKjX3Vqrv+5pI3VkYIC5QMAgFN45ettWre3QFHhIXpuaKpsNpvVkYIC5QMAgBqsyynQy19tkyQ9OaS7EqLDLU4UPCgfAAD8TFmFWxmz16jSYzSoR0tdl5ZodaSgQvkAAOBnXvj3Zm3NK1ZcE6eeHNKd0y1eRvkAAOAnlu04rL99u1OS9NzQHmrWOMziRMGH8gEAwI+KXZUaNSdTxkjDL2qtK7vGWx0pKFE+AAD40dOfblDO0WNqFROh8dd0szpO0KJ8AAAg6etNeXr3h2xJ0rRhaYoMD7U4UfCifAAAGrz80nKNeX+tJOnOPsnq1SHW4kTBjfIBAGjwxn+0XnlFLrVv3liPDehsdZygR/kAADRoH2fu08eZ++Sw2zRjeE+FhzqsjhT0KB8AgAYrr7BM4z/KkiSNuKyD0pJirA3UQFA+AAANkjFGY95fq/zSCnVvFaUHruhodaQGg/IBAGiQZi3P1tebDyosxK7pw3sqLISnRF/hOw0AaHCyj5TqyU82SJIevaqTOsVHWpyoYaF8AAAaFI/HaNScTJWUu3VJu2a665ftrY7U4FA+AAANyhvf7dQPO4+oUZhD04alyWHnQ+N8jfIBAGgwtuYWaeqCzZKkPwzqpjaxjSxO1DBRPgAADUKF26OM2Zkqr/Toss7NdcslSVZHarAoHwCABuHlr7Zp3d4CRUeE6rmhqbLZON1iFcoHACDorc3J18tfb5MkPTmku+Kjwi1O1LBRPgAAQa2swq2M2Zlye4wGpbbUdWmJVkdq8CgfAICgNm3BZm3LK1bzSKeeGtzd6jgQ5QMAEMS+33FYf/9upyRp6tBUNW0cZnEiSJQPAECQKnZV6tE5mTJGuvniJF3epYXVkfAjygcAICg99ckG5Rw9ptZNI/SHa7pZHQc/QfkAAASdrzbl6l/Ls2WzSdOGpamJM8TqSPgJygcAIKgcLSnXmPfXSZLu6pOsX7SPtTgRfo7yAQAIKuM/ytLBIpfOa9FEj/bvbHUc1IDyAQAIGvMy9+mTtfvlsNs0fXiawkMdVkdCDSgfAICgkFtYpvFzsyRJD1x+nlJbx1gbCKdE+QAABDxjjMa+v1YFxyrUo1W0HrjiPKsj4TQoHwCAgPev5dn6evNBhYXYNX14mkIdPL35M44OACCg7Tlcqqc+2SBJeqx/Z3WMj7Q4Ec6E8gEACFhuj9GjczJVUu7WJcnNdGefZKsj4SxQPgAAAeuNb3fqh11H1DjMoReGpclut1kdCWeB8gEACEhbcov0/L83S5L+cE03JTVrZHEinC3KBwAg4FS4PcqYvUbllR5d1rm5br44yepIqAXKBwAg4Lz81TZl7S1UTKNQTR2aKpuN0y2BhPIBAAgomdn5evnrbZKkJwd3V4uocIsTobYoHwCAgFFW4daoOZlye4yuSW2pa9MSrY6EOqB8AAACxvMLNmtbXrGaRzr15ODuVsdBHVE+AAAB4fsdh/XGdzslSVOHpqpp4zCLE6GuKB8AAL9XVFahR+dkyhjplkuSdHmXFlZHwjmgfAAA/N5Tn2xUztFjSmoWoScGdbM6Ds4R5QMA4Ne+2pSrWSuyZbNJ025MUxNniNWRcI4oHwAAv3W0pFxj3l8nSbqrT7LS28danAjeQPkAAPglY4z+MDdLB4tcOq9FEz3av7PVkeAllA8AgF+al7lPn67brxC7TTOG91R4qMPqSPASygcAwO/kFpZpwkfrJUkPXHGeerSOtjgRvInyAQDwK8YYPfbeWhUcq1CPVtEacfl5VkeCl1E+AAB+5d0fsvXNloMKC7Fr+vA0hTp4qgo29XJE9+7dq9tuu02xsbGKiIhQjx49tGLFivrYFAAgiOw+XKKnPt0gSXqsf2d1jI+0OBHqg9f/WPro0aPq06ePLr/8cn3++edq3ry5tm7dqqZNm3p7UwCAIOL2GD06J1Ol5W6lJzfTnX2SrY6EeuL18vHcc88pKSlJb775ZtW05GR+gAAAp/f3b3do+a6jahzm0LRhabLbbVZHQj3x+mmXefPm6aKLLtKwYcPUokULnX/++frrX/96yvEul0uFhYXVbgCAhmVLbpGmLdgiSRp/TTclNWtkcSLUJ6+Xjx07dujVV19Vx44dtWDBAv3+97/XQw89pLfeeqvG8VOmTFF0dHTVLSkpyduRAAB+rLzSo0dmrVG526MrurTQTRfzPBDsbMYY480VhoWF6aKLLtKSJUuqpj300ENavny5li5detJ4l8sll8tVdb+wsFBJSUkqKChQVFSUN6MBAPzQ9H9v1p+/2qaYRqH698i+ahEVbnUk1EFhYaGio6PP6vnb6698tGzZUt26Vf/Ewa5du2rPnj01jnc6nYqKiqp2AwA0DJnZ+Xpl0XZJ0lNDulM8Ggivl48+ffpo8+bN1aZt2bJFbdu29famAAABrKzCrYzZa+T2GF2blqhrUhOtjgQf8Xr5eOSRR/T999/rmWee0bZt2/TPf/5Tr7/+ukaMGOHtTQEAAtjU+Zu1/WCJWkQ69eTgFKvjwIe8Xj4uvvhiffjhh3r33XfVvXt3Pfnkk3rxxRd16623entTAIAAtWT7Ib3x3U5J0nNDUxXTKMziRPAlr7/PhyRdc801uuaaa+pj1QCAAFdUVqHRc9ZKkm65pI0u79LC4kTwNd4wHwDgU09+skF7848pqVmEnhjU1eo4sADlAwDgM19syNXsFTmy2aRpN6apibNeXoCHn6N8AAB84khJucZ+sE6S9LtfJiu9fazFiWAVygcAoN4ZYzR+bpYOFbvUsUUTjbqqs9WRYCHKBwCg3s3L3KdP1+1XiN2mGTf1VHiow+pIsBDlAwBQrw4UlGn83CxJ0oNXdFT3VtEWJ4LVKB8AgHpjjNGY99eqsKxSqa2jdf/lHayOBD9A+QAA1Jt//rBH32w5qLAQu6YPT1Oog6cdUD4AAPVkz+FSPf3pRknSY/0767wWkRYngr+gfAAAvM7tMRo1Z41Ky91KT26mO/skWx0JfoTyAQDwur/9Z4eW7zqqJs4QTRuWJrvdZnUk+BHKBwDAqzYfKNIL/94iSRp/TVclNWtkcSL4G8oHAMBryis9ypi9RuVuj67o0kLDL0qyOhL8EOUDAOA1L3+1Vev3Fappo1A9O7SHbDZOt+BklA8AgFesyc7XK4u2S5KeGtJDLSLDLU4Ef0X5AACcs7IKtzJmr5HbY3RdWqIGpba0OhL8GOUDAHDOnpu/STsOlqhFpFOTB6dYHQd+jvIBADgnS7Yf0pvf7ZIkPXdjqmIahVkbCH6P8gEAqLPCsgqNnrNWkvQ/6W10eecWFidCIKB8AADq7MmPN2hv/jG1adZIT1zd1eo4CBCUDwBAnXyxIVdzVubIZpOmDUtTY2eI1ZEQICgfAIBaO1JSrrEfrJMk3X1pe12S3MziRAgklA8AQK0YY/TEh+t0qNilTvFNlPHrTlZHQoChfAAAauWjNfv0edYBhdhtmj68p8JDHVZHQoChfAAAztqBgjJN+ChLkvTQlR3VvVW0xYkQiCgfAICzYozRY++vVWFZpdJaR+v+yzpYHQkBivIBADgr7yzbo8VbDsoZYtcLw3sqxMFTCOqGnxwAwBntOlSipz/dKEkaM6CLzmvRxOJECGSUDwDAabk9Ro/OydSxCrd6tY/Vb3u3szoSAhzlAwBwWn/9zw6t2H1UTZwhen5Yqux2m9WREOAoHwCAU9p8oEjT/71FkjThmm5q3bSRxYkQDCgfAIAalVd69MisNSp3e3RllxYadlFrqyMhSFA+AAA1eumrrdqwv1BNG4VqytAestk43QLvoHwAAE6yes9RvfL1NknS09f3UIvIcIsTIZhQPgAA1Rwrd2vU7Ex5jDS4Z6Ku7tHS6kgIMpQPAEA1z83fpB2HShQf5dTk67pbHQdBiPIBAKiyZNshzVyyS5L03NBURTcKtTYQghLlAwAgSSosq9Do99ZKkm5Nb6PLOrewOBGCFeUDACBJmvzxBu3NP6Y2zRrp8au7Wh0HQYzyAQDQwg25em9ljmw26YXhaWrsDLE6EoIY5QMAGrjDxS6N++D46ZZ7Lm2vi9s1szgRgh3lAwAaMGOMnvgwS4eKy9Upvoke+XUnqyOhAaB8AEADNnfNXs1ff0AhdpumD++p8FCH1ZHQAFA+AKCB2l9wTBM+Wi9JevjKjureKtriRGgoKB8A0AAZY/TYe2tVVFaptKQY/f6yDlZHQgNC+QCABujtZXv0n62H5Ayx64VhaQpx8HQA3+GnDQAamF2HSvTMpxslSWMHdtF5LZpYnAgNDeUDABoQt8do1JxMHatwq1f7WN3eq53VkdAAUT4AoAF5ffEOrdx9VE2cIXp+WKrsdpvVkdAAUT4AoIHYdKBQMxZukSRNuLabWjdtZHEiNFSUDwBoAMorPXpkVqbK3R716xqvYRe2tjoSGjDKBwA0AH/+cqs27i9Us8ZhmnJDD9lsnG6BdSgfABDkVu05qv+3aJsk6ekh3dU80mlxIjR0lA8ACGLHyt16dHamPEYa0jNRA3u0tDoSQPkAgGD23PxN2nGoRAlR4Zp0XXer4wCSfFA+nn32WdlsNo0cObK+NwUA+Invth3SzCW7JElTb0xVdKNQawMBP6rX8rF8+XK99tprSk1Nrc/NAAB+prCsQqPnZEqSbvtFG/Xt1NziRMB/1Vv5KC4u1q233qq//vWvatq0aX1tBgBQg0nzNmhfQZnaxjbS41d3tToOUE29lY8RI0Zo0KBB6tev32nHuVwuFRYWVrsBAOru3+sP6P1VObLZpBeGpalRWIjVkYBq6uUn8l//+pdWrVql5cuXn3HslClTNGnSpPqIAQANzuFilx7/cJ0k6Z6+7XVRu2YWJwJO5vVXPrKzs/Xwww/rnXfeUXh4+BnHjxs3TgUFBVW37Oxsb0cCgAbBGKMnPszSoeJydY6PVMavO1kdCaiR11/5WLlypfLy8nTBBRdUTXO73Vq8eLFefvlluVwuORyOqnlOp1NOJ294AwDnau6avZq//oBCHTZNvylNzhDHmRcCLOD18nHllVdq3bp11abdcccd6tKli8aMGVOteAAAvGN/wTFN+Gi9JOnhKzsqJTHa4kTAqXm9fERGRqp79+pvZNO4cWPFxsaeNB0AcO6MMXrsvbUqKqtUWlKM7vtVB6sjAafFO5wCQIB7+/vd+s/WQwoPtWv68DSFOPjVDv/mk7+/WrRokS82AwANzq5DJXrms02SpDEDuqhD8yYWJwLOjHoMAAHK7THKmL1Gxyrc6t0hVrf3amd1JOCsUD4AIEC9tni7Vu3JV6QzRM8PS5PdbrM6EnBWKB8AEIA27i/UjIVbJEkTru2mVjERFicCzh7lAwACTHmlRxmzM1XhNurXNV43Xtja6khArVA+ACDA/OnLLdq4v1DNGodpyg09ZLNxugWBhfIBAAFk5e6jenXRdknSM9d3V/NI3iEagYfyAQABorS8Uo/OyZTHSNef30oDure0OhJQJ5QPAAgQz32+STsPlSghKlx/vC7F6jhAnVE+ACAAfLv1kN5auluS9PywVEVHhFqcCKg7ygcA+LmCYxUa/V6mJOl/f9FWl3ZsbnEi4NxQPgDAz036eL32F5SpXWwjjbu6i9VxgHNG+QAAP7Zg/QF9sGqv7DbpheFpahTmk4/kAuoV5QMA/NShYpce/2CdJOmevh10YdtmFicCvIPyAQB+yBijJz5cp8Ml5eqSEKlHft3R6kiA11A+AMAPfbBqrxasz1Wow6bpw3vKGeKwOhLgNZQPAPAz+/KP6Y/z1kuSRvbrpG6JURYnAryL8gEAfsTjMXrsvbUqclXq/DYxurdve6sjAV5H+QAAP/L2st36dtshhYfa9cKwNIU4+DWN4MNPNQD4iR0Hi/XMZxslSeMGdlX75k0sTgTUD8oHAPiBSrdHo+ZkqqzCoz7nxep/f9HW6khAvaF8AIAfeG3xDq3ek69IZ4ievzFNdrvN6khAvaF8AIDFNuwr1ItfbJEkTbwuRYkxERYnAuoX5QMALOSqdCtj9hpVuI2u6havoRe0sjoSUO8oHwBgoRe/2KpNB4oU2zhMz9zQQzYbp1sQ/CgfAGCRlbuP6LVvtkuSnr6+h+KaOC1OBPgG5QMALFBaXqlRszPlMdIN57fSgO4JVkcCfIbyAQAWePbzTdp1uFQto8M18boUq+MAPkX5AAAf+8/Wg/rH0t2SpOdvTFN0RKjFiQDfonwAgA8VHKvQ6DlrJUm/6dVWv+wYZ3EiwPcoHwDgQ5PmrdeBwjK1i22ksQO7WB0HsATlAwB8ZH7Wfn2weq/sNumF4T3VKCzE6kiAJSgfAOADB4tcevzDLEnSfb/qoAvbNrU4EWAdygcA1DNjjB7/cJ2OlJSrS0KkHu7X0epIgKUoHwBQz95ftVcLN+Qq1GHTjJt6yhnisDoSYCnKBwDUo735xzRp3npJ0sh+ndS1ZZTFiQDrUT4AoJ54PEaPvZepIlelLmgTo3v7trc6EuAXKB8AUE/+sXSXvtt2WBGhDr0wvKdCHPzKBSTKBwDUix0Hi/Xs/E2SpHFXd1FyXGOLEwH+g/IBAF5W6fYoY3amyio8+uV5cbotva3VkQC/QvkAAC97bfEOrcnOV2R4iKbemCq73WZ1JMCvUD4AwIvW7yvQi19skSRNui5FiTERFicC/A/lAwC8xFXpVsasTFW4jfqnxOv681tZHQnwS5QPAPCSGQu3anNukWIbh+mZ63vIZuN0C1ATygcAeMHK3Uf0+uLtkqRnbuih2CZOixMB/ovyAQDnqLS8UhmzM+Ux0g0XtFL/lASrIwF+jfIBAOdoymebtPtwqVpGh2vitSlWxwH8HuUDAM7Bf7Ye1P99v1uS9PyNaYqOCLU4EeD/KB8AUEcFpRUaPWetJOn2Xm31y45xFicCAgPlAwDq6I8fr9eBwjIlxzXW2IFdrY4DBAzKBwDUwfys/fpw9V7ZbdILw9MUEeawOhIQMCgfAFBLB4tcevzDLEnSfb/qoAvaNLU4ERBYKB8AUAvGGD3+4TodKSlXl4RIPdyvo9WRgIBD+QCAWnh/1V4t3JCrUIdNM27qKWcIp1uA2qJ8AMBZ2pt/TJPmrZckPfLrTuraMsriREBgonwAwFnweIxGz8lUkatSF7SJ0b19O1gdCQhYXi8fU6ZM0cUXX6zIyEi1aNFCQ4YM0ebNm729GQDwqX8s3aUl2w8rItShF4b3lMPOh8YBdeX18vHNN99oxIgR+v7777Vw4UJVVFToqquuUklJibc3BQA+seNgsZ6dv0mSNO7qLkqOa2xxIiCwhXh7hfPnz692f+bMmWrRooVWrlypvn37entzAFCvKt0eZczOVFmFR5d2jNNt6W2tjgQEPK+Xj58rKCiQJDVr1qzG+S6XSy6Xq+p+YWFhfUcCgLP2l2+2a012viLDQzT1xlTZOd0CnLN6veDU4/Fo5MiR6tOnj7p3717jmClTpig6OrrqlpSUVJ+RAOCsrd9XoD99uVWSNOm6FLWMjrA4ERAc6rV8jBgxQllZWfrXv/51yjHjxo1TQUFB1S07O7s+IwHAWXFVupUxK1MVbqP+KfG6/vxWVkcCgka9nXZ54IEH9Mknn2jx4sVq3br1Kcc5nU45nc76igEAdTJj4VZtzi1SXJMwPXN9D9lsnG4BvMXr5cMYowcffFAffvihFi1apOTkZG9vAgDq1YpdR/Ta4u2SpGeu76HYJvwHCfAmr5ePESNG6J///Kc++ugjRUZG6sCBA5Kk6OhoRURwvhSAfystr9SoOZkyRhp6QWtdlZJgdSQg6Hj9mo9XX31VBQUFuuyyy9SyZcuq26xZs7y9KQDwumc+26jdh0uVGB2uidd1szoOEJTq5bQLAASib7Yc1Nvf75EkPT8sTVHhoRYnAoITn+0CAJIKSiv02HuZkqTf9m6nPufFWZwICF6UDwCQNHFelnILXWof11hjBnSxOg4Q1CgfABq8z9ft19w1+2S3SdOGpykizGF1JCCoUT4ANGgHi1x6/MN1kqTfX9ZBF7RpanEiIPhRPgA0WMYYjftgrY6WVqhryyg9fGUnqyMBDQLlA0CDNWdljr7YmKcwh10zbkpTWAi/EgFf4JEGoEHKOVqqyR9vkCQ98utO6pIQZXEioOGgfABocDweo9Fz1qrYVakL2zbVPX3bWx0JaFAoHwAanLeW7tLSHYcVEerQC8PS5LDzoXGAL1E+ADQo2/KK9eznmyRJjw/qqnZxjS1OBDQ8lA8ADUal26NRczLlqvTo0o5xui29jdWRgAaJ8gGgwXh10XZlZucrMjxEU29Mlc3G6RbACpQPAA1C1t4C/enLrZKkyYNT1DI6wuJEQMNF+QAQ9Moq3MqYvUaVHqOB3RM0pGcrqyMBDRrlA0DQm7Fwi7bkFiuuSZieGtKd0y2AxSgfAILa8l1H9Pp/dkiSptyQqtgmTosTAaB8AAhaJa5KjZqdKWOkGy9srV93i7c6EgBRPgAEsWc+26g9R0rVKiZCE67tZnUcAD+ifAAISos25+mdZXskSc8PS1VUeKjFiQCcQPkAEHQKSis05v21kqTf9m6n3h3iLE4E4KcoHwCCzoR5WcotdKl9XGONGdDF6jgAfobyASCofLp2vz5as08Ou03Tb+qpiDCH1ZEA/AzlA0DQyCsq0x/mrpMk3X9ZB/VMirE2EIAaUT4ABAVjjMa9v05HSyuUkhilB6/oaHUkAKdA+QAQFOasyNGXm/IU5rBr+vCeCgvh1xvgr3h0Agh42UdKNfmTDZKkjKs6qXNCpMWJAJwO5QNAQPN4jEa/l6liV6UuattUd1/a3upIAM6A8gEgoL25ZJe+33FEjcIcemF4mhx2PjQO8HeUDwABa1tesabO3yRJevzqrmob29jiRADOBuUDQECqdHs0avYauSo96tupuW5Nb2N1JABnifIBICD9v0XblZlToKjwEE0dmiqbjdMtQKCgfAAIOOtyCvTnL7dKkiYP7q6E6HCLEwGoDcoHgIBSVuFWxuw1qvQYXd0jQYN7JlodCUAtUT4ABJTpC7doa16x4po49dSQHpxuAQIQ5QNAwPhh5xH99T87JEnP3tBDzRqHWZwIQF1QPgAEhGJXpUbNWSNjpGEXtla/bvFWRwJQR5QPAAHh6U83KvvIMbWKidCEa7tZHQfAOaB8APB7X2/O07s/7JEkPT8sVZHhoRYnAnAuKB8A/Fp+abnGvLdWkvTb3u3Uu0OcxYkAnCvKBwC/NuGj9corcql988YaM6CL1XEAeAHlA4Df+mTtPs3L3CeH3abpw3sqIsxhdSQAXkD5AOCX8grL9Ie5WZKk+y/roJ5JMdYGAuA1lA8AfscYo7EfrFN+aYVSEqP04BUdrY4EwIsoHwD8zuwV2fpqU57CHHZNH95TYSH8qgKCCY9oAH4l+0ipJn+8QZI06qpO6pwQaXEiAN5G+QDgNzweo0fnZKqk3K2L2zXV7y5tb3UkAPWA8gHAb7zx3U4t23lEjcIcmjYsTQ47HxoHBCPKBwC/sC2vSFMXbJYkPX51V7WNbWxxIgD1hfIBwHIVbo8yZmeqvNKjvp2a69b0NlZHAlCPKB8ALPf/vt6utTkFigoP0dShqbLZON0CBDPKBwBLrcsp0EtfbZUkPTmkuxKiwy1OBKC+UT4AWOZgkUsjZ61Wpcfo6h4Jui4t0epIAHwgxOoAABqmrblF+u2by7U3/5iaRzr11JAenG4BGgjKBwCf+27bId339koVlVWqXWwjvXnHJWrWOMzqWAB8hPIBwKdmr8jW4x+sU6XH6KK2TfX6by6ieAANTL1d8/HKK6+oXbt2Cg8PV3p6un744Yf62hSAAGCM0Qv/3qzH3lurSo/RdWmJevt36RQPoAGql/Ixa9YsZWRkaOLEiVq1apXS0tLUv39/5eXl1cfmAPg5V6VbI2et0UtfbZMkPXD5eXrxpp4KD3VYnAyAFWzGGOPtlaanp+viiy/Wyy+/LEnyeDxKSkrSgw8+qLFjx5522cLCQkVHR6ugoEBRUVHejgbAx46WlOue/1uh5buOKsRu0zPX99Dwi5OsjgXAy2rz/O31az7Ky8u1cuVKjRs3rmqa3W5Xv379tHTp0pPGu1wuuVyuqvuFhYXejgTAIrsOleiOmcu181CJIp0hevW2C/XLjnFWxwJgMa+Xj0OHDsntdis+Pr7a9Pj4eG3atOmk8VOmTNGkSZO8HQOAjxljlHP0mLL2FihrX4Gy9hZq5e6jKnZVqlVMhN6842J1io+0OiYAP2D5X7uMGzdOGRkZVfcLCwuVlMRLsoA/K6/0KPtoqdbvK9T6n5SNgmMVJ41NS4rRX39zoVpE8s6lAI7zevmIi4uTw+FQbm5utem5ublKSEg4abzT6ZTT6fR2DADnwOMxOljsUvaRUu05UqrsI8eUfbRU2UeO3w4UlslTw9VioQ6bOidEqntitFJaRSslMUpprWPksPPmYQD+y+vlIywsTBdeeKG+/PJLDRkyRNLxC06//PJLPfDAA97eHICz4PEYlZRXqqisUgXHKnS4uFyHS1w6WOTS4ZJyHS526XBxuQ79+HVekUvllZ7TrjMi1KEuLY8Xje6topSSGK1O8ZEKC+FTGwCcXr2cdsnIyNDtt9+uiy66SJdccolefPFFlZSU6I477qiPzQEByRgjjzn+cfIVbo/KKz2qcBuVV3pUXnX/v1+7Kt06Vu7RsQq3jlW4VVbuVtmPXx+rOP51scutorIKFZcdLxpFZRUqclWq2FWp2v5dm90mJcZEKKlpIyU1i1CbZo2U1KyRWjdtpDbNGimuSRhvhw6gTuqlfNx00006ePCgJkyYoAMHDqhnz56aP3/+SReh+tKhYpde+Xqb19bn/T9Q9h9n89fXNY34+WKmhlE1rdqcNO+/g05Mq/pX5idfnzxPP5lnjKk2pto0c3y8x/PfdXp+nHH86+PFwGPMj8seH3vivscYuY2Rx3Pi3x+neU6eXunxyO0xqnAfn1/p8ajSbVRZ03mLehZityk6IlRxTZyKbRKm2CZOxTYOU9xPvo5t4lTzJk61jAlXqINXMQB4X728z8e5qK/3+dhxsFhXvPCN19YH1IcQu01hIXaFOuwKC7Er7Md/Qx02RYQ65Ax1KOLELcyh8FC7wn+8Hx7qUGNniCKdIYoMD1FkeKgiw0PUJPz4/ajwUDlD7LxaAaBeWPo+H/4qplGYRlzeweoYNbLJt08GZ/Pcc1aJzmJFPx3x8+E/3e+fzjvVMj9/0rTZ/ruOE7NsP5n+82X/O+/4ffuPE2yS7DbbT+admH98zIl59h+nHf/6+BiHzSaH3Sa7/fjXdruqTbPbbAqx2xTqsMthP/51iMOmEPvx+6EO24//Hi8adi7MBNAANJjy0axxmEb372J1DAAAGjxO6AIAAJ+ifAAAAJ+ifAAAAJ+ifAAAAJ+ifAAAAJ+ifAAAAJ+ifAAAAJ+ifAAAAJ+ifAAAAJ+ifAAAAJ+ifAAAAJ+ifAAAAJ+ifAAAAJ/yu0+1NcZIkgoLCy1OAgAAztaJ5+0Tz+On43flo6ioSJKUlJRkcRIAAFBbRUVFio6OPu0YmzmbiuJDHo9H+/btU2RkpGw2m1fXXVhYqKSkJGVnZysqKsqr6/YHwb5/UvDvI/sX+IJ9H9m/wFdf+2iMUVFRkRITE2W3n/6qDr975cNut6t169b1uo2oqKig/aGSgn//pODfR/Yv8AX7PrJ/ga8+9vFMr3icwAWnAADApygfAADApxpU+XA6nZo4caKcTqfVUepFsO+fFPz7yP4FvmDfR/Yv8PnDPvrdBacAACC4NahXPgAAgPUoHwAAwKcoHwAAwKcoHwAAwKcoHwAAwKeCqnw8/fTT6t27txo1aqSYmJgax+zZs0eDBg1So0aN1KJFC40ePVqVlZWnXe+RI0d06623KioqSjExMbrrrrtUXFxcD3tQO4sWLZLNZqvxtnz58lMud9lll500/r777vNh8rPXrl27k7I+++yzp12mrKxMI0aMUGxsrJo0aaKhQ4cqNzfXR4lrZ9euXbrrrruUnJysiIgIdejQQRMnTlR5eflpl/PnY/jKK6+oXbt2Cg8PV3p6un744YfTjp8zZ466dOmi8PBw9ejRQ5999pmPktbelClTdPHFFysyMlItWrTQkCFDtHnz5tMuM3PmzJOOVXh4uI8S184f//jHk7J26dLltMsE0vGTav6dYrPZNGLEiBrH+/vxW7x4sa699lolJibKZrNp7ty51eYbYzRhwgS1bNlSERER6tevn7Zu3XrG9db2cVxbQVU+ysvLNWzYMP3+97+vcb7b7dagQYNUXl6uJUuW6K233tLMmTM1YcKE06731ltv1fr167Vw4UJ98sknWrx4se6555762IVa6d27t/bv31/t9rvf/U7Jycm66KKLTrvs3XffXW25qVOn+ih17U2ePLla1gcffPC04x955BF9/PHHmjNnjr755hvt27dPN9xwg4/S1s6mTZvk8Xj02muvaf369ZoxY4b+8pe/6PHHHz/jsv54DGfNmqWMjAxNnDhRq1atUlpamvr376+8vLwaxy9ZskS33HKL7rrrLq1evVpDhgzRkCFDlJWV5ePkZ+ebb77RiBEj9P3332vhwoWqqKjQVVddpZKSktMuFxUVVe1Y7d6920eJay8lJaVa1m+//faUYwPt+EnS8uXLq+3fwoULJUnDhg075TL+fPxKSkqUlpamV155pcb5U6dO1Z///Gf95S9/0bJly9S4cWP1799fZWVlp1xnbR/HdWKC0Jtvvmmio6NPmv7ZZ58Zu91uDhw4UDXt1VdfNVFRUcblctW4rg0bNhhJZvny5VXTPv/8c2Oz2czevXu9nv1clJeXm+bNm5vJkyefdtyvfvUr8/DDD/sm1Dlq27atmTFjxlmPz8/PN6GhoWbOnDlV0zZu3GgkmaVLl9ZDQu+bOnWqSU5OPu0Yfz2Gl1xyiRkxYkTVfbfbbRITE82UKVNqHD98+HAzaNCgatPS09PNvffeW685vSUvL89IMt98880px5zq95E/mjhxoklLSzvr8YF+/Iwx5uGHHzYdOnQwHo+nxvmBdPwkmQ8//LDqvsfjMQkJCeb555+vmpafn2+cTqd59913T7me2j6O6yKoXvk4k6VLl6pHjx6Kj4+vmta/f38VFhZq/fr1p1wmJiam2isJ/fr1k91u17Jly+o9c23MmzdPhw8f1h133HHGse+8847i4uLUvXt3jRs3TqWlpT5IWDfPPvusYmNjdf755+v5558/7WmylStXqqKiQv369aua1qVLF7Vp00ZLly71RdxzVlBQoGbNmp1xnL8dw/Lycq1cubLa995ut6tfv36n/N4vXbq02njp+GMykI6VpDMer+LiYrVt21ZJSUkaPHjwKX/f+IOtW7cqMTFR7du316233qo9e/accmygH7/y8nK9/fbbuvPOO0/7KeqBdPx+aufOnTpw4EC1YxQdHa309PRTHqO6PI7rwu8+1bY+HThwoFrxkFR1/8CBA6dcpkWLFtWmhYSEqFmzZqdcxip///vf1b9//zN+KvD//M//qG3btkpMTNTatWs1ZswYbd68WR988IGPkp69hx56SBdccIGaNWumJUuWaNy4cdq/f7+mT59e4/gDBw4oLCzspGt+4uPj/e541WTbtm166aWXNG3atNOO88djeOjQIbnd7hofY5s2bapxmVM9JgPhWHk8Ho0cOVJ9+vRR9+7dTzmuc+fOeuONN5SamqqCggJNmzZNvXv31vr16+v9E7xrKz09XTNnzlTnzp21f/9+TZo0SZdeeqmysrIUGRl50vhAPn6SNHfuXOXn5+u3v/3tKccE0vH7uRPHoTbHqC6P47rw+/IxduxYPffcc6cds3HjxjNeFBVI6rLPOTk5WrBggWbPnn3G9f/0epUePXqoZcuWuvLKK7V9+3Z16NCh7sHPUm32LyMjo2paamqqwsLCdO+992rKlCl+/dkLdTmGe/fu1YABAzRs2DDdfffdp13W6mMIacSIEcrKyjrtNRGS1KtXL/Xq1avqfu/evdW1a1e99tprevLJJ+s7Zq0MHDiw6uvU1FSlp6erbdu2mj17tu666y4Lk9WPv//97xo4cKASExNPOSaQjl8g8fvyMWrUqNO2Uklq3779Wa0rISHhpCt2T/wVREJCwimX+flFNpWVlTpy5MgplzlXddnnN998U7Gxsbruuutqvb309HRJx//X7YsnrnM5punp6aqsrNSuXbvUuXPnk+YnJCSovLxc+fn51V79yM3NrbfjVZPa7uO+fft0+eWXq3fv3nr99ddrvT1fH8OaxMXFyeFwnPSXRaf73ickJNRqvL944IEHqi4+r+3/fkNDQ3X++edr27Zt9ZTOe2JiYtSpU6dTZg3U4ydJu3fv1hdffFHrVwsD6fidOA65ublq2bJl1fTc3Fz17NmzxmXq8jiuE69dPeJHznTBaW5ubtW01157zURFRZmysrIa13XigtMVK1ZUTVuwYIFfXXDq8XhMcnKyGTVqVJ2W//bbb40kk5mZ6eVk3vf2228bu91ujhw5UuP8Execvvfee1XTNm3a5NcXnObk5JiOHTuam2++2VRWVtZpHf5yDC+55BLzwAMPVN13u92mVatWp73g9Jprrqk2rVevXn57waLH4zEjRowwiYmJZsuWLXVaR2VlpencubN55JFHvJzO+4qKikzTpk3Nn/70pxrnB9rx+6mJEyeahIQEU1FRUavl/Pn46RQXnE6bNq1qWkFBwVldcFqbx3GdsnptTX5g9+7dZvXq1WbSpEmmSZMmZvXq1Wb16tWmqKjIGHP8h6Z79+7mqquuMmvWrDHz5883zZs3N+PGjatax7Jly0znzp1NTk5O1bQBAwaY888/3yxbtsx8++23pmPHjuaWW27x+f6dyhdffGEkmY0bN540Lycnx3Tu3NksW7bMGGPMtm3bzOTJk82KFSvMzp07zUcffWTat29v+vbt6+vYZ7RkyRIzY8YMs2bNGrN9+3bz9ttvm+bNm5vf/OY3VWN+vn/GGHPfffeZNm3amK+++sqsWLHC9OrVy/Tq1cuKXTijnJwcc95555krr7zS5OTkmP3791fdfjomUI7hv/71L+N0Os3MmTPNhg0bzD333GNiYmKq/sLsf//3f83YsWOrxn/33XcmJCTETJs2zWzcuNFMnDjRhIaGmnXr1lm1C6f1+9//3kRHR5tFixZVO1alpaVVY36+j5MmTTILFiww27dvNytXrjQ333yzCQ8PN+vXr7diF05r1KhRZtGiRWbnzp3mu+++M/369TNxcXEmLy/PGBP4x+8Et9tt2rRpY8aMGXPSvEA7fkVFRVXPdZLM9OnTzerVq83u3buNMcY8++yzJiYmxnz00Udm7dq1ZvDgwSY5OdkcO3asah1XXHGFeemll6run+lx7A1BVT5uv/12I+mk29dff101ZteuXWbgwIEmIiLCxMXFmVGjRlVrvl9//bWRZHbu3Fk17fDhw+aWW24xTZo0MVFRUeaOO+6oKjT+4JZbbjG9e/eucd7OnTurfQ/27Nlj+vbta5o1a2acTqc577zzzOjRo01BQYEPE5+dlStXmvT0dBMdHW3Cw8NN165dzTPPPFPtVaqf758xxhw7dszcf//9pmnTpqZRo0bm+uuvr/Zk7k/efPPNGn9mf/qiZKAdw5deesm0adPGhIWFmUsuucR8//33VfN+9atfmdtvv73a+NmzZ5tOnTqZsLAwk5KSYj799FMfJz57pzpWb775ZtWYn+/jyJEjq74f8fHx5uqrrzarVq3yffizcNNNN5mWLVuasLAw06pVK3PTTTeZbdu2Vc0P9ON3woIFC4wks3nz5pPmBdrxO/Gc9fPbiX3weDxm/PjxJj4+3jidTnPllVeetN9t27Y1EydOrDbtdI9jb7AZY4z3TuIAAACcXoN6nw8AAGA9ygcAAPApygcAAPApygcAAPApygcAAPApygcAAPApygcAAPApygcAAPApygcAAPApygcAAPApygcAAPCp/w958Wb2lrE45wAAAABJRU5ErkJggg==\n" 260 | }, 261 | "metadata": {} 262 | } 263 | ] 264 | } 265 | ] 266 | } -------------------------------------------------------------------------------- /Attention is all you need/transformer(attn paper).py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | from torch.nn import functional as F 4 | 5 | batch_size = 16 6 | block_size = 32 7 | max_iters = 5000 8 | eval_interval = 100 9 | learning_rate = 1e-3 10 | device = 'cuda' if torch.cuda.is_available() else 'cpu' 11 | eval_iters = 200 12 | n_embd = 64 13 | n_head = 4 14 | n_layer = 4 15 | dropout = 0.0 16 | 17 | torch.manual_seed(1337) 18 | 19 | with open('input.txt', 'r', encoding='utf-8') as f: 20 | text = f.read() 21 | 22 | chars = sorted(list(set(text))) 23 | vocab_size = len(chars) 24 | stoi = { ch:i for i,ch in enumerate(chars) } 25 | itos = { i:ch for i,ch in enumerate(chars) } 26 | encode = lambda s: [stoi[c] for c in s] 27 | decode = lambda l: ''.join([itos[i] for i in l]) 28 | 29 | data = torch.tensor(encode(text), dtype=torch.long) 30 | n = int(0.9*len(data)) 31 | train_data = data[:n] 32 | val_data = data[n:] 33 | 34 | def get_batch(split): 35 | data = train_data if split == 'train' else val_data 36 | ix = torch.randint(len(data) - block_size, (batch_size,)) 37 | x = torch.stack([data[i:i+block_size] for i in ix]) 38 | y = torch.stack([data[i+1:i+block_size+1] for i in ix]) 39 | x, y = x.to(device), y.to(device) 40 | return x, y 41 | 42 | @torch.no_grad() 43 | def estimate_loss(): 44 | out = {} 45 | model.eval() 46 | for split in ['train', 'val']: 47 | losses = torch.zeros(eval_iters) 48 | for k in range(eval_iters): 49 | X, Y = get_batch(split) 50 | logits, loss = model(X, Y) 51 | losses[k] = loss.item() 52 | out[split] = losses.mean() 53 | model.train() 54 | return out 55 | 56 | class Head(nn.Module): 57 | def __init__(self, head_size): 58 | super().__init__() 59 | self.key = nn.Linear(n_embd, head_size, bias=False) 60 | self.query = nn.Linear(n_embd, head_size, bias=False) 61 | self.value = nn.Linear(n_embd, head_size, bias=False) 62 | self.register_buffer('tril', torch.tril(torch.ones(block_size, block_size))) 63 | self.dropout = nn.Dropout(dropout) 64 | 65 | def forward(self, x): 66 | B,T,C = x.shape 67 | k = self.key(x) 68 | q = self.query(x) 69 | wei = q @ k.transpose(-2,-1) * C**-0.5 70 | wei = wei.masked_fill(self.tril[:T, :T] == 0, float('-inf')) 71 | wei = F.softmax(wei, dim=-1) 72 | wei = self.dropout(wei) 73 | v = self.value(x) 74 | out = wei @ v 75 | return out 76 | 77 | class MultiHeadAttention(nn.Module): 78 | def __init__(self, num_heads, head_size): 79 | super().__init__() 80 | self.heads = nn.ModuleList([Head(head_size) for _ in range(num_heads)]) 81 | self.proj = nn.Linear(n_embd, n_embd) 82 | self.dropout = nn.Dropout(dropout) 83 | 84 | def forward(self, x): 85 | out = torch.cat([h(x) for h in self.heads], dim=-1) 86 | out = self.dropout(self.proj(out)) 87 | return out 88 | 89 | class FeedFoward(nn.Module): 90 | def __init__(self, n_embd): 91 | super().__init__() 92 | self.net = nn.Sequential( 93 | nn.Linear(n_embd, 4 * n_embd), 94 | nn.ReLU(), 95 | nn.Linear(4 * n_embd, n_embd), 96 | nn.Dropout(dropout), 97 | ) 98 | 99 | def forward(self, x): 100 | return self.net(x) 101 | 102 | class Block(nn.Module): 103 | def __init__(self, n_embd, n_head): 104 | super().__init__() 105 | head_size = n_embd // n_head 106 | self.sa = MultiHeadAttention(n_head, head_size) 107 | self.ffwd = FeedFoward(n_embd) 108 | self.ln1 = nn.LayerNorm(n_embd) 109 | self.ln2 = nn.LayerNorm(n_embd) 110 | 111 | def forward(self, x): 112 | x = x + self.sa(self.ln1(x)) 113 | x = x + self.ffwd(self.ln2(x)) 114 | return x 115 | 116 | class BigramLanguageModel(nn.Module): 117 | def __init__(self): 118 | super().__init__() 119 | self.token_embedding_table = nn.Embedding(vocab_size, n_embd) 120 | self.position_embedding_table = nn.Embedding(block_size, n_embd) 121 | self.blocks = nn.Sequential(*[Block(n_embd, n_head=n_head) for _ in range(n_layer)]) 122 | self.ln_f = nn.LayerNorm(n_embd) 123 | self.lm_head = nn.Linear(n_embd, vocab_size) 124 | 125 | def forward(self, idx, targets=None): 126 | B, T = idx.shape 127 | tok_emb = self.token_embedding_table(idx) 128 | pos_emb = self.position_embedding_table(torch.arange(T, device=device)) 129 | x = tok_emb + pos_emb 130 | x = self.blocks(x) 131 | x = self.ln_f(x) 132 | logits = self.lm_head(x) 133 | 134 | if targets is None: 135 | loss = None 136 | else: 137 | B, T, C = logits.shape 138 | logits = logits.view(B*T, C) 139 | targets = targets.view(B*T) 140 | loss = F.cross_entropy(logits, targets) 141 | 142 | return logits, loss 143 | 144 | def generate(self, idx, max_new_tokens): 145 | for _ in range(max_new_tokens): 146 | idx_cond = idx[:, -block_size:] 147 | logits, loss = self(idx_cond) 148 | logits = logits[:, -1, :] 149 | probs = F.softmax(logits, dim=-1) 150 | idx_next = torch.multinomial(probs, num_samples=1) 151 | idx = torch.cat((idx, idx_next), dim=1) 152 | return idx 153 | 154 | model = BigramLanguageModel() 155 | m = model.to(device) 156 | print(sum(p.numel() for p in m.parameters())/1e6, 'M parameters') 157 | 158 | optimizer = torch.optim.AdamW(model.parameters(), lr=learning_rate) 159 | 160 | for iter in range(max_iters): 161 | if iter % eval_interval == 0 or iter == max_iters - 1: 162 | losses = estimate_loss() 163 | print(f"step {iter}: train loss {losses['train']:.4f}, val loss {losses['val']:.4f}") 164 | 165 | xb, yb = get_batch('train') 166 | logits, loss = model(xb, yb) 167 | optimizer.zero_grad(set_to_none=True) 168 | loss.backward() 169 | optimizer.step() 170 | 171 | context = torch.zeros((1, 1), dtype=torch.long, device=device) 172 | print(decode(m.generate(context, max_new_tokens=2000)[0].tolist())) -------------------------------------------------------------------------------- /BERT/bert.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | import math 5 | 6 | class LayerNorm(nn.Module): 7 | def __init__(self, f_size, eps=1e-6): 8 | super().__init__() 9 | self.gamma = nn.Parameter(torch.ones(f_size)) 10 | self.beta = nn.Parameter(torch.zeros(f_size)) 11 | self.eps = eps 12 | 13 | def forward(self, x): 14 | mean = x.mean(-1, keepdim=True) 15 | std = x.std(-1, keepdim=True) 16 | return self.gamma * (x - mean) / (std + self.eps) + self.beta 17 | 18 | class Residual(nn.Module): 19 | def __init__(self, size, dropout): 20 | super().__init__() 21 | self.norm = LayerNorm(size) 22 | self.dropout = nn.Dropout(dropout) 23 | 24 | def forward(self, x, sublayer): 25 | return x + self.dropout(sublayer(self.norm(x))) 26 | 27 | class GELU(nn.Module): 28 | def forward(self, x): 29 | return 0.5 * x * (1 + torch.tanh(math.sqrt(2 / math.pi) * (x + 0.044715 * torch.pow(x, 3)))) 30 | 31 | class SingleHeadAttention(nn.Module): 32 | def forward(self, q, k, v, mask=False, dropout=None): 33 | ans = torch.matmul(q, k.transpose(-2, -1)) / torch.sqrt(q.size(-1)) 34 | if mask: 35 | ans = ans.masked_fill(mask == 0, -1e9) 36 | ans = F.softmax(ans, dim=-1) 37 | if dropout: 38 | ans = dropout(ans) 39 | return torch.matmul(ans, v) 40 | 41 | class MultiHeadAttention(nn.Module): 42 | def __init__(self, h, d_model, dropout=0.1): 43 | super().__init__() 44 | assert d_model % h == 0 45 | self.h = h 46 | self.d_k = d_model // h 47 | self.attn = SingleHeadAttention() 48 | self.linear = nn.ModuleList([nn.Linear(d_model, d_model) for _ in range(3)]) 49 | self.out_linear = nn.Linear(d_model, d_model) 50 | self.dropout = nn.Dropout(dropout) 51 | 52 | def forward(self, q, k, v, mask=False): 53 | bs = q.size(0) 54 | q, k, v = [l(x).view(bs, -1, self.h, self.d_k).transpose(1, 2) for l, x in zip(self.linear, (q, k, v))] 55 | ans = self.attn(q, k, v, mask, dropout=self.dropout) 56 | res = ans.transpose(1, 2).contiguous().view(bs, -1, self.h * self.d_k) 57 | return self.out_linear(res) 58 | 59 | class PosEmbedding(nn.Module): 60 | def __init__(self, d_model, max_seq_len=512): 61 | super().__init__() 62 | pe = torch.zeros(max_seq_len, d_model).float() 63 | pe.requires_grad = False 64 | pos = torch.arange(0, max_seq_len).float().unsqueeze(1) 65 | div = torch.exp(torch.arange(0, d_model, 2).float() * -torch.log(torch.tensor(10000.0)) / d_model) 66 | pe[:, 0::2] = torch.sin(pos * div) 67 | pe[:, 1::2] = torch.cos(pos * div) 68 | pe = pe.unsqueeze(0) 69 | self.register_buffer('pe', pe) 70 | 71 | def forward(self, x): 72 | return self.pe[:, :x.size(1)] 73 | 74 | class SegmentEmbedding(nn.Embedding): 75 | def __init__(self, embed_size=512): 76 | super().__init__(3, embed_size, padding_idx=0) 77 | 78 | class TokenEmbedding(nn.Embedding): 79 | def __init__(self, vocab_size, embed_size=512): 80 | super().__init__(vocab_size, embed_size, padding_idx=0) 81 | 82 | class BERTEmbedding(nn.Module): 83 | def __init__(self, vocab_size, embed_size, dropout=0): 84 | super().__init__() 85 | self.token = TokenEmbedding(vocab_size, embed_size) 86 | self.segment = SegmentEmbedding(embed_size) 87 | self.pos = PosEmbedding(embed_size) 88 | self.dropout = nn.Dropout(dropout) 89 | 90 | def forward(self, x, seg): 91 | ans = self.token(x) + self.segment(seg) + self.pos(x) 92 | return self.dropout(ans) 93 | 94 | class FeedForward(nn.Module): 95 | def __init__(self, d_model, d_FF, dropout=0.1): 96 | super(FeedForward, self).__init__() 97 | self.linear1 = nn.Linear(d_model, d_FF) 98 | self.linear2 = nn.Linear(d_FF, d_model) 99 | self.dropout = nn.Dropout(dropout) 100 | self.gelu = GELU() 101 | 102 | def forward(self, x): 103 | return self.linear2(self.dropout(self.gelu(self.linear1(x)))) 104 | 105 | class EncoderLayer(nn.Module): 106 | def __init__(self, hidden, attn_heads, feed_forward_hidden, dropout): 107 | super().__init__() 108 | self.attention = MultiHeadAttention(h=attn_heads, d_model=hidden) 109 | self.feed_forward = FeedForward(d_model=hidden, d_FF=feed_forward_hidden) 110 | self.residual1 = Residual(size=hidden, dropout=dropout) 111 | self.residual2 = Residual(size=hidden, dropout=dropout) 112 | self.dropout = nn.Dropout(dropout) 113 | 114 | def forward(self, x, mask): 115 | x = self.residual1(x, lambda x: self.attention(x, x, x, mask)) 116 | x = self.residual2(x, self.feed_forward) 117 | return self.dropout(x) 118 | 119 | class BERT(nn.Module): 120 | def __init__(self, vocab_size, hidden=768, n_layers=12, attn_heads=12, dropout=0.1): 121 | super().__init__() 122 | self.hidden = hidden 123 | self.n_layers = n_layers 124 | self.attn_heads = attn_heads 125 | self.feed_forward_hidden = hidden * 4 126 | self.dropout = dropout 127 | 128 | self.embedding = BERTEmbedding(vocab_size=vocab_size, embed_size=hidden, dropout=dropout) 129 | self.layers = nn.ModuleList([EncoderLayer(hidden, attn_heads, hidden * 4, dropout) for _ in range(n_layers)]) 130 | 131 | def forward(self, x, seg): 132 | mask = (x > 0).unsqueeze(1).repeat(1, x.size(1), 1).unsqueeze(1) 133 | x = self.embedding(x, seg) 134 | for layer in self.layers: 135 | x = layer(x, mask) 136 | return x 137 | -------------------------------------------------------------------------------- /BLEU/bleu.py: -------------------------------------------------------------------------------- 1 | from collections import Counter 2 | import numpy as np 3 | # removes punctuation and adds everything to lowercase and converts to list of list of str 4 | def text_prep(sentences): 5 | if not type(sentences)== list: 6 | raise ValueError("Please enter a list") 7 | return [["".join(char.lower() for char in word if char.isalnum()) for word in sentence.split()]for sentence in sentences] 8 | 9 | def ngram(candidate,references,n): 10 | if n<1: 11 | raise ValueError("Condition not met : N>=1") 12 | candidate_ngram = Counter([tuple(candidate[i:i+n]) for i in range(len(candidate)-n+1)]) 13 | max_ref=Counter() 14 | for ref in references: 15 | ref_ngram=Counter([tuple(ref[i:i+n]) for i in range(len(ref)-n+1)]) 16 | for n_gram in ref_ngram: 17 | max_ref[n_gram]=max(max_ref[n_gram],ref_ngram[n_gram]) 18 | clipped_cnt={k:min(count,max_ref[k]) for k,count in candidate_ngram.items()} 19 | return sum(clipped_cnt.values()),sum(candidate_ngram.values()) 20 | 21 | 22 | def bp(candidate,references): 23 | if len(candidate)>min(len(ref) for ref in references): 24 | return 1 25 | else: 26 | return float(np.exp(1-len(candidate)/min(len(ref) for ref in references))) 27 | 28 | 29 | def bleu(candidate_seq,reference_seq,n_max): 30 | candidate_seq: list[str] = text_prep([candidate_seq])[0] 31 | references_seq: list[list[str]] = text_prep(reference_seq) 32 | precision=[] 33 | for n in range(1,n_max+1): 34 | p_n,total=ngram(candidate_seq,reference_seq,n) 35 | precision.append(p_n/total if total>0 else 0) 36 | if all(p==0 for p in precision): 37 | return 0 38 | mean=np.exp(np.mean([np.log(p) for p in precision if p>0])) 39 | brev=bp(candidate_seq,reference_seq) 40 | return brev*mean 41 | -------------------------------------------------------------------------------- /DQN/Deep Q Networks.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from tensorflow import keras 3 | import tensorflow as tf 4 | import keras.backend.tensorflow_backend as backend 5 | from keras.models import Sequential 6 | from keras.layers import Dense, Dropout, Conv2d, MaxPooling2D, Activation, Flatten 7 | from keras.optimizers import Adam 8 | from keras.callbacks import TensorBoard 9 | from collections import deque 10 | import time 11 | import random 12 | from tqdm import tqdm 13 | import os 14 | from PIL import Image 15 | import cv2 16 | 17 | MODEL_NAME="256x2" 18 | MIN_REPLAY_MEMORY_SIZE = 1_000 19 | MINIBATCH_SIZE = 64 20 | UPDATE_TARGET_EVERY = 5 21 | MIN_REWARD = -200 22 | MEMORY_FRACTION = 0.20 23 | EPISODES = 20_000 24 | epsilon = 1 25 | EPSILON_DECAY = 0.99975 26 | MIN_EPSILON = 0.001 27 | AGGREGATE_STATS_EVERY = 50 28 | SHOW_PREVIEW = False 29 | 30 | class Blob: 31 | def __init__(self, size): 32 | self.size = size 33 | self.x = np.random.randint(0, size) 34 | self.y = np.random.randint(0, size) 35 | 36 | def __str__(self): 37 | return f"Blob ({self.x}, {self.y})" 38 | 39 | def __sub__(self, other): 40 | return (self.x-other.x, self.y-other.y) 41 | 42 | def __eq__(self, other): 43 | return self.x == self.x and self.y == self.y 44 | 45 | def action(self, choice): 46 | if choice == 0: 47 | self.move(x=1, y=1) 48 | elif choice == 1: 49 | self.move(x=-1, y=-1) 50 | elif choice == 2: 51 | self.move(x=-1, y=1) 52 | elif choice == 3: 53 | self.move(x=1, y=-1) 54 | elif choice == 4: 55 | self.move(x=1, y=0) 56 | elif choice == 5: 57 | self.move(x=-1, y=0) 58 | elif choice == 6: 59 | self.move(x=0, y=1) 60 | elif choice == 7: 61 | self.move(x=0, y=-1) 62 | elif choice == 8: 63 | self.move(x=0, y=0) 64 | 65 | def move(self, x=False, y=False): 66 | if not x: 67 | self.x += np.random.randint(-1, 2) 68 | else: 69 | self.x += x 70 | if not y: 71 | self.y += np.random.randint(-1, 2) 72 | else: 73 | self.y += y 74 | if self.x < 0: 75 | self.x = 0 76 | elif self.x > self.size-1: 77 | self.x = self.size-1 78 | if self.y < 0: 79 | self.y = 0 80 | elif self.y > self.size-1: 81 | self.y = self.size-1 82 | 83 | class BlobEnv: 84 | SIZE = 10 85 | RETURN_IMAGES = True 86 | MOVE_PENALTY = 1 87 | ENEMY_PENALTY = 300 88 | FOOD_REWARD = 25 89 | OBSERVATION_SPACE_VALUES = (SIZE, SIZE, 3) 90 | ACTION_SPACE_SIZE = 9 91 | PLAYER_N = 1 92 | FOOD_N = 2 93 | ENEMY_N = 3 94 | d = {1: (255, 175, 0), 2: (0, 255, 0), 3: (0, 0, 255)} 95 | 96 | def reset(self): 97 | self.player = Blob(self.SIZE) 98 | self.food = Blob(self.SIZE) 99 | while self.food == self.player: 100 | self.food = Blob(self.SIZE) 101 | self.enemy = Blob(self.SIZE) 102 | while self.enemy == self.player or self.enemy == self.food: 103 | self.enemy = Blob(self.SIZE) 104 | self.episode_step = 0 105 | if self.RETURN_IMAGES: 106 | observation = np.array(self.get_image()) 107 | else: 108 | observation = (self.player-self.food) + (self.player-self.enemy) 109 | return observation 110 | 111 | def step(self, action): 112 | self.episode_step += 1 113 | self.player.action(action) 114 | if self.RETURN_IMAGES: 115 | new_observation = np.array(self.get_image()) 116 | else: 117 | new_observation = (self.player-self.food) + (self.player-self.enemy) 118 | if self.player == self.enemy: 119 | reward = -self.ENEMY_PENALTY 120 | elif self.player == self.food: 121 | reward = self.FOOD_REWARD 122 | else: 123 | reward = -self.MOVE_PENALTY 124 | done = False 125 | if reward == self.FOOD_REWARD or reward == -self.ENEMY_PENALTY or self.episode_step >= 200: 126 | done = True 127 | return new_observation, reward, done 128 | 129 | def render(self): 130 | img = self.get_image() 131 | img = img.resize((300, 300)) 132 | cv2.imshow("image", np.array(img)) 133 | cv2.waitKey(1) 134 | 135 | def get_image(self): 136 | env = np.zeros((self.SIZE, self.SIZE, 3), dtype=np.uint8) 137 | env[self.food.x][self.food.y] = self.d[self.FOOD_N] 138 | env[self.enemy.x][self.enemy.y] = self.d[self.ENEMY_N] 139 | env[self.player.x][self.player.y] = self.d[self.PLAYER_N] 140 | img = Image.fromarray(env, 'RGB') 141 | return img 142 | 143 | env = BlobEnv() 144 | ep_rewards = [-200] 145 | random.seed(1) 146 | np.random.seed(1) 147 | tf.set_random_seed(1) 148 | 149 | if not os.path.isdir('models'): 150 | os.makedirs('models') 151 | 152 | class ModifiedTensorBoard(TensorBoard): 153 | def __init__(self, **kwargs): 154 | super().__init__(**kwargs) 155 | self.step = 1 156 | self.writer = tf.summary.FileWriter(self.log_dir) 157 | 158 | def set_model(self, model): 159 | pass 160 | 161 | def on_epoch_end(self, epoch, logs=None): 162 | self.update_stats(**logs) 163 | 164 | def on_batch_end(self, batch, logs=None): 165 | pass 166 | 167 | def on_train_end(self, _): 168 | pass 169 | 170 | def update_stats(self, **stats): 171 | self._write_logs(stats, self.step) 172 | 173 | class DQNAgent: 174 | def __init__(self): 175 | self.model=self.create_model() 176 | self.target_model=self.create_model() 177 | self.target_model.set_weights(self.model.get_weights()) 178 | self.replay_memory=deque(maxlen=50000) 179 | self.tensorboard=ModifiedTensorBoard(log_dir=f"logs/{MODEL_NAME}-{int(time.time())}") 180 | self.target_update_counter=0 181 | 182 | def create_model(self): 183 | model=Sequential() 184 | model.add(Conv2d(256,(3,3),input_shape=env.OBSERVATION_SPACE_VALUES)) 185 | model.add(Activation("relu")) 186 | model.add(MaxPooling2D(2,2)) 187 | model.add(Dropout(0.2)) 188 | model.add(Conv2d(256,(3,3))) 189 | model.add(Activation("relu")) 190 | model.add(MaxPooling2D(2,2)) 191 | model.add(Dropout(0.2)) 192 | model.add(Flatten()) 193 | model.add(Dense(64)) 194 | model.add(Dense(env.ACTION_SPACE_SIZE, activation ="linear")) 195 | model.compile(loss="mse",optimizer=Adam(lr=0.001),metrics=['accuracy']) 196 | return model 197 | 198 | def update_replay_memory(self,transistion): 199 | self.replay_memory.append(transistion) 200 | 201 | def get_qs(self,state): 202 | return self.model.predict(np.array(state).reshape(-1,*state.shape)/255)[0] 203 | 204 | def train(self, terminal_state, step): 205 | if len(self.replay_memory) < MIN_REPLAY_MEMORY_SIZE: 206 | return 207 | minibatch = random.sample(self.replay_memory, MINIBATCH_SIZE) 208 | current_states = np.array([transition[0] for transition in minibatch])/255 209 | current_qs_list = self.model.predict(current_states) 210 | new_current_states = np.array([transition[3] for transition in minibatch])/255 211 | future_qs_list = self.target_model.predict(new_current_states) 212 | X = [] 213 | y = [] 214 | for index, (current_state, action, reward, new_current_state, done) in enumerate(minibatch): 215 | if not done: 216 | max_future_q = np.max(future_qs_list[index]) 217 | new_q = reward + 0.99 * max_future_q 218 | else: 219 | new_q = reward 220 | current_qs = current_qs_list[index] 221 | current_qs[action] = new_q 222 | X.append(current_state) 223 | y.append(current_qs) 224 | self.model.fit(np.array(X)/255, np.array(y), batch_size=MINIBATCH_SIZE, verbose=0, shuffle=False, callbacks=[self.tensorboard] if terminal_state else None) 225 | if terminal_state: 226 | self.target_update_counter += 1 227 | if self.target_update_counter > UPDATE_TARGET_EVERY: 228 | self.target_model.set_weights(self.model.get_weights()) 229 | self.target_update_counter = 0 230 | 231 | agent=DQNAgent() 232 | 233 | for episode in tqdm(range(1,EPISODES+1),ascii=True,unit="episode"): 234 | agent.tensorboard.step=episode 235 | episode_reward=0 236 | step=1 237 | curr_state=env.reset() 238 | done=False 239 | while not done: 240 | if np.random.random()>epsilon: 241 | action=np.argmax(agent.get_qs(curr_state)) 242 | else: 243 | action=np.random.randint(0,env.ACTION_SPACE_SIZE) 244 | new_state,reward,done=env.step(action) 245 | episode_reward+=reward 246 | if SHOW_PREVIEW and not episode % AGGREGATE_STATS_EVERY: 247 | env.render() 248 | 249 | agent.update_replay_memory((curr_state,action,reward,new_state,done)) 250 | agent.train(done,step) 251 | curr_state=new_state 252 | step+=1 253 | ep_rewards.append(episode_reward) 254 | if not episode % AGGREGATE_STATS_EVERY or episode == 1: 255 | average_reward = sum(ep_rewards[-AGGREGATE_STATS_EVERY:])/len(ep_rewards[-AGGREGATE_STATS_EVERY:]) 256 | min_reward = min(ep_rewards[-AGGREGATE_STATS_EVERY:]) 257 | max_reward = max(ep_rewards[-AGGREGATE_STATS_EVERY:]) 258 | agent.tensorboard.update_stats(reward_avg=average_reward, reward_min=min_reward, reward_max=max_reward, epsilon=epsilon) 259 | if min_reward >= MIN_REWARD: 260 | agent.model.save(f'models/{MODEL_NAME}__{max_reward:_>7.2f}max_{average_reward:_>7.2f}avg_{min_reward:_>7.2f}min__{int(time.time())}.model') 261 | 262 | -------------------------------------------------------------------------------- /Distillation (Hinton)/distill_mnist.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.optim as optim 4 | import numpy as np 5 | import os 6 | from torch.utils.data import DataLoader, TensorDataset 7 | 8 | class MnistTeacher(nn.Module): 9 | def __init__(self): 10 | super(MnistTeacher, self).__init__() 11 | self.conv1 = nn.Conv2d(1, 32, kernel_size=3, stride=1, padding=1) 12 | self.pool1 = nn.MaxPool2d(kernel_size=2, stride=2) 13 | self.dropout1 = nn.Dropout(0.2) 14 | self.conv2 = nn.Conv2d(32, 64, kernel_size=3, stride=1, padding=1) 15 | self.pool2 = nn.MaxPool2d(kernel_size=2, stride=2) 16 | self.dropout2 = nn.Dropout(0.2) 17 | self.conv3 = nn.Conv2d(64, 128, kernel_size=3, stride=1, padding=1) 18 | self.pool3 = nn.MaxPool2d(kernel_size=2, stride=2) 19 | self.dropout3 = nn.Dropout(0.2) 20 | self.fc1 = nn.Linear(128 * 3 * 3, 625) 21 | self.dropout4 = nn.Dropout(0.5) 22 | self.fc2 = nn.Linear(625, 10) 23 | 24 | def forward(self, x): 25 | x = self.pool1(torch.relu(self.conv1(x))) 26 | x = self.dropout1(x) 27 | x = self.pool2(torch.relu(self.conv2(x))) 28 | x = self.dropout2(x) 29 | x = self.pool3(torch.relu(self.conv3(x))) 30 | x = self.dropout3(x) 31 | x = x.view(-1, 128 * 3 * 3) 32 | x = torch.relu(self.fc1(x)) 33 | x = self.dropout4(x) 34 | x = self.fc2(x) 35 | return x 36 | 37 | class MnistStudent(nn.Module): 38 | def __init__(self): 39 | super(MnistStudent, self).__init__() 40 | self.fc1 = nn.Linear(784, 512) 41 | self.fc2 = nn.Linear(512, 256) 42 | self.fc3 = nn.Linear(256, 10) 43 | 44 | def forward(self, x): 45 | x = torch.relu(self.fc1(x)) 46 | x = torch.relu(self.fc2(x)) 47 | x = self.fc3(x) 48 | return x 49 | 50 | def load_mnist_local(data_dir): 51 | def read_idx3_ubyte(file_path): 52 | with open(file_path, 'rb') as f: 53 | magic_number = int.from_bytes(f.read(4), 'big') 54 | num_images = int.from_bytes(f.read(4), 'big') 55 | rows = int.from_bytes(f.read(4), 'big') 56 | cols = int.from_bytes(f.read(4), 'big') 57 | data = np.frombuffer(f.read(), dtype=np.uint8).reshape(num_images, rows, cols) 58 | return data 59 | 60 | def read_idx1_ubyte(file_path): 61 | with open(file_path, 'rb') as f: 62 | magic_number = int.from_bytes(f.read(4), 'big') 63 | num_items = int.from_bytes(f.read(4), 'big') 64 | data = np.frombuffer(f.read(), dtype=np.uint8) 65 | return data 66 | 67 | train_images = read_idx3_ubyte(os.path.join(data_dir, 'train-images.idx3-ubyte')) 68 | train_labels = read_idx1_ubyte(os.path.join(data_dir, 'train-labels.idx1-ubyte')) 69 | test_images = read_idx3_ubyte(os.path.join(data_dir, 't10k-images.idx3-ubyte')) 70 | test_labels = read_idx1_ubyte(os.path.join(data_dir, 't10k-labels.idx1-ubyte')) 71 | 72 | return train_images, train_labels, test_images, test_labels 73 | 74 | data_dir = 'your dataset link' 75 | train_images, train_labels, test_images, test_labels = load_mnist_local(data_dir) 76 | 77 | train_images = torch.tensor(train_images, dtype=torch.float32).unsqueeze(1) / 255.0 78 | train_labels = torch.tensor(train_labels, dtype=torch.long) 79 | test_images = torch.tensor(test_images, dtype=torch.float32).unsqueeze(1) / 255.0 80 | test_labels = torch.tensor(test_labels, dtype=torch.long) 81 | 82 | train_dataset = TensorDataset(train_images, train_labels) 83 | test_dataset = TensorDataset(test_images, test_labels) 84 | train_loader = DataLoader(train_dataset, batch_size=128, shuffle=True) 85 | test_loader = DataLoader(test_dataset, batch_size=1000, shuffle=False) 86 | 87 | teacher = MnistTeacher() 88 | student = MnistStudent() 89 | 90 | optimizer_teacher = optim.RMSprop(teacher.parameters(), lr=1e-4) 91 | optimizer_student = optim.Adam(student.parameters(), lr=1e-3) 92 | 93 | def distillation_loss(student_output, teacher_output, temperature): 94 | soft_teacher = torch.softmax(teacher_output / temperature, dim=1) 95 | soft_student = torch.softmax(student_output / temperature, dim=1) 96 | return torch.mean(-torch.sum(soft_teacher * torch.log(soft_student), dim=1)) 97 | 98 | def student_loss(student_output, target): 99 | return torch.nn.functional.cross_entropy(student_output, target) 100 | 101 | def total_loss(student_output, teacher_output, target, temperature, alpha): 102 | loss1 = distillation_loss(student_output, teacher_output, temperature) 103 | loss2 = student_loss(student_output, target) 104 | return alpha * loss1 + (1 - alpha) * loss2 105 | 106 | temperature = 2.1 107 | alpha = 0.5 108 | 109 | def train_teacher(): 110 | for i, (data, target) in enumerate(train_loader): 111 | target = torch.nn.functional.one_hot(target, num_classes=10).float() 112 | optimizer_teacher.zero_grad() 113 | output = teacher(data) 114 | loss_teacher = torch.mean(-torch.sum(target * torch.log(torch.softmax(output, dim=1)), dim=1)) 115 | loss_teacher.backward() 116 | optimizer_teacher.step() 117 | if i % 50 == 0: 118 | _, predicted = torch.max(output.data, 1) 119 | correct = (predicted == torch.argmax(target, dim=1)).sum().item() 120 | accuracy = 100 * correct / target.size(0) 121 | print(f"Step {i}, Training Accuracy {accuracy}") 122 | # torch.save(teacher.state_dict(), './models/teacher1.pth') 123 | 124 | def train_student(): 125 | for i, (data, target) in enumerate(train_loader): 126 | data = data.view(data.size(0), -1) 127 | optimizer_student.zero_grad() 128 | student_output = student(data) 129 | with torch.no_grad(): 130 | teacher_output = teacher(data.view(-1, 1, 28, 28)) 131 | loss_student = total_loss(student_output, teacher_output, target, temperature, alpha) 132 | loss_student.backward() 133 | optimizer_student.step() 134 | if i % 50 == 0: 135 | _, predicted = torch.max(student_output.data, 1) 136 | correct = (predicted == target).sum().item() 137 | accuracy = 100 * correct / target.size(0) 138 | print(f"Step {i}, Training Accuracy {accuracy}") 139 | # torch.save(student.state_dict(), './models/student.pth') 140 | 141 | def test_model(model, test_loader, is_teacher=False): 142 | model.eval() 143 | correct = 0 144 | total = 0 145 | with torch.no_grad(): 146 | for data, target in test_loader: 147 | if is_teacher: 148 | output = model(data) 149 | else: 150 | data = data.view(data.size(0), -1) 151 | output = model(data) 152 | _, predicted = torch.max(output.data, 1) 153 | total += target.size(0) 154 | correct += (predicted == target).sum().item() 155 | accuracy = 100 * correct / total 156 | print(f"Test Accuracy of the {'Teacher' if is_teacher else 'Student'} Model is {accuracy}%") 157 | return accuracy 158 | 159 | print("Teacher Training Started...") 160 | train_teacher() 161 | print("Teacher Training Ended...") 162 | print("Student Training Started...") 163 | train_student() 164 | print("Student Training Ended...") 165 | 166 | teacher_accuracy = test_model(teacher, test_loader, is_teacher=True) 167 | student_accuracy = test_model(student, test_loader, is_teacher=False) -------------------------------------------------------------------------------- /GAN/GAN.py: -------------------------------------------------------------------------------- 1 | # Import necessary libraries 2 | import torch 3 | import torch.nn as nn 4 | import torch.optim as optim 5 | import torchvision 6 | import torchvision.datasets as datasets 7 | from torch.utils.data import DataLoader 8 | import torchvision.transforms as transforms 9 | from torch.utils.tensorboard import SummaryWriter 10 | 11 | # Define Discriminator class 12 | class Disc(nn.Module): 13 | def __init__(self, in_features) -> None: 14 | super().__init__() 15 | self.discriminator = nn.Sequential( 16 | nn.Linear(in_features, 128), 17 | nn.LeakyReLU(0.1), 18 | nn.Linear(128, 1), 19 | nn.Sigmoid() 20 | ) 21 | 22 | def forward(self, x): 23 | return self.discriminator(x) 24 | 25 | # Define Generator class 26 | class Gen(nn.Module): 27 | def __init__(self, z_dim, img_dim): 28 | super().__init__() 29 | self.generator = nn.Sequential( 30 | nn.Linear(z_dim, 256), 31 | nn.LeakyReLU(0.1), 32 | nn.Linear(256, img_dim), 33 | nn.Tanh() 34 | ) 35 | 36 | def forward(self, x): 37 | return self.generator(x) 38 | 39 | # Set device (GPU if available, otherwise CPU) 40 | if torch.cuda.is_available(): 41 | device = "cuda" 42 | else: 43 | device = "cpu" 44 | 45 | # Set hyperparameters 46 | lr = 3e-4 47 | z_dim = 64 48 | img_dim = 784 49 | batch = 32 50 | epochs = 50 51 | 52 | # Initialize Discriminator and Generator 53 | disc = Disc(img_dim).to(device) 54 | gen = Gen(z_dim, img_dim).to(device) 55 | 56 | # Create fixed noise for visualization 57 | fixed_noise = torch.randn((batch, z_dim)).to(device) 58 | 59 | # Define data transformations 60 | transforms = transforms.Compose([ 61 | transforms.ToTensor(), 62 | transforms.Normalize((0.5,), (0.5,)) 63 | ]) 64 | 65 | # Load MNIST dataset 66 | dataset = datasets.MNIST(root="dataset/", transform=transforms, download=True) 67 | loader = DataLoader(dataset, batch_size=batch, shuffle=True) 68 | 69 | # Initialize optimizers 70 | optim_disc = optim.Adam(disc.parameters(), lr=lr) 71 | optim_gen = optim.Adam(gen.parameters(), lr=lr) 72 | 73 | # Define loss function 74 | loss_fn = nn.BCELoss() 75 | 76 | # Initialize TensorBoard writers 77 | writer_r = SummaryWriter(f"runs/GAN/real") 78 | writer_f = SummaryWriter(f"runs/GAN/fake") 79 | step = 0 80 | 81 | # Training loop 82 | for epoch in range(epochs): 83 | for batch_idx, (real, _) in enumerate(loader): 84 | real = real.view(-1, 784).to(device) 85 | batch = real.shape[0] 86 | 87 | # Generate noise and fake images 88 | noise = torch.randn(batch, z_dim).to(device) 89 | fake = gen(noise) 90 | 91 | # Train Discriminator 92 | disc_real = disc(real).view(-1) 93 | lossD_real = loss_fn(disc_real, torch.ones_like(disc_real)) 94 | disc_fake = disc(fake).view(-1) 95 | lossD_fake = loss_fn(disc_fake, torch.zeros_like(disc_fake)) 96 | lossD = (lossD_fake + lossD_real) / 2 97 | disc.zero_grad() 98 | lossD.backward(retain_graph=True) 99 | optim_disc.step() 100 | 101 | # Train Generator 102 | out = disc(fake).view(-1) 103 | lossG = loss_fn(out, torch.ones_like(out)) 104 | gen.zero_grad() 105 | lossG.backward() 106 | optim_gen.step() 107 | 108 | # Print progress and generate images for TensorBoard 109 | if batch_idx == 0: 110 | print(f"Epoch : {epoch}/{epochs} | Loss Disc : {lossD:.4f} | Loss Gen : {lossG:.4f}") 111 | with torch.no_grad(): 112 | fake = gen(fixed_noise).reshape(-1, 1, 28, 28) 113 | data = real.reshape(-1, 1, 28, 28) 114 | img_grid_fake = torchvision.utils.make_grid(fake, normalize=True) 115 | img_grid_real = torchvision.utils.make_grid(data, normalize=True) 116 | writer_f.add_image("Fake Images", img_grid_fake, global_step=step) 117 | writer_r.add_image("Real Images", img_grid_real, global_step=step) 118 | step += 1 119 | -------------------------------------------------------------------------------- /GPT2/gpt2.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | 4 | GPT_CONFIG_124M = { 5 | "vocab_size": 50257, 6 | "context_length": 256, 7 | "emb_dim": 768, 8 | "n_heads": 12, 9 | "n_layers": 12, 10 | "drop_rate": 0.1, 11 | "qkv_bias": False 12 | } 13 | 14 | class MultiHeadAttention(nn.Module): 15 | def __init__(self, d_in, d_out, 16 | context_length, dropout, num_heads, qkv_bias=False): 17 | super().__init__() 18 | assert (d_out % num_heads == 0), \ 19 | "d_out must be divisible by num_heads" 20 | self.d_out = d_out 21 | self.num_heads = num_heads 22 | self.head_dim = d_out // num_heads 23 | self.W_query = nn.Linear(d_in, d_out, bias=qkv_bias) 24 | self.W_key = nn.Linear(d_in, d_out, bias=qkv_bias) 25 | self.W_value = nn.Linear(d_in, d_out, bias=qkv_bias) 26 | self.out_proj = nn.Linear(d_out, d_out) 27 | self.dropout = nn.Dropout(dropout) 28 | self.register_buffer( 29 | "mask", 30 | torch.triu(torch.ones(context_length, context_length), 31 | diagonal=1) 32 | ) 33 | def forward(self, x): 34 | b, num_tokens, d_in = x.shape 35 | keys = self.W_key(x) 36 | queries = self.W_query(x) 37 | values = self.W_value(x) 38 | keys = keys.view(b, num_tokens, self.num_heads, self.head_dim) 39 | values = values.view(b, num_tokens, self.num_heads, self.head_dim) 40 | queries = queries.view( 41 | b, num_tokens, self.num_heads, self.head_dim 42 | ) 43 | keys = keys.transpose(1, 2) 44 | queries = queries.transpose(1, 2) 45 | values = values.transpose(1, 2) 46 | attn_scores = queries @ keys.transpose(2, 3) 47 | mask_bool = self.mask.bool()[:num_tokens, :num_tokens] 48 | attn_scores.masked_fill_(mask_bool, -torch.inf) 49 | attn_weights = torch.softmax( 50 | attn_scores / keys.shape[-1]**0.5, dim=-1) 51 | attn_weights = self.dropout(attn_weights) 52 | context_vec = (attn_weights @ values).transpose(1, 2) 53 | context_vec = context_vec.contiguous().view( 54 | b, num_tokens, self.d_out 55 | ) 56 | context_vec = self.out_proj(context_vec) 57 | return context_vec 58 | 59 | class LayerNorm(nn.Module): 60 | def __init__(self, emb_dim): 61 | super().__init__() 62 | self.eps=1e-5 63 | self.scale=nn.Parameter(torch.ones(emb_dim)) 64 | self.shift=nn.Parameter(torch.zeros(emb_dim)) 65 | 66 | def forward(self,x): 67 | mean=x.mean(dim=-1,keepdim=True) 68 | var=x.var(dim=-1,keepdim=True,unbiased=False) 69 | norm_x=(x-mean)/torch.sqrt(var+self.eps) 70 | return self.scale*norm_x+self.shift 71 | 72 | class GELU(nn.Module): 73 | def __init__(self): 74 | super().__init__() 75 | def forward(self,x): 76 | return 0.5*x*(1+torch.tanh(torch.sqrt(torch.tensor(2.9/torch.pi))*(x+0.044715*torch.pow(x,3)))) 77 | 78 | class FeedForward(nn.Module): 79 | def __init__(self, cfg): 80 | super().__init__() 81 | self.layers=nn.Sequential(nn.Linear(cfg["emb_dim"],4*cfg["emb_dim"]), GELU(),nn.Linear(4*cfg["emb_dim"],cfg["emb_dim"])) 82 | def forward(self,x): 83 | return self.layers(x) 84 | 85 | 86 | class TransformerBlock(nn.Module): 87 | def __init__(self, cfg): 88 | super().__init__() 89 | self.att = MultiHeadAttention( 90 | d_in=cfg["emb_dim"], 91 | d_out=cfg["emb_dim"], 92 | context_length=cfg["context_length"], 93 | num_heads=cfg["n_heads"], 94 | dropout=cfg["drop_rate"], 95 | qkv_bias=cfg["qkv_bias"]) 96 | self.ff = FeedForward(cfg) 97 | self.norm1 = LayerNorm(cfg["emb_dim"]) 98 | self.norm2 = LayerNorm(cfg["emb_dim"]) 99 | self.drop_shortcut = nn.Dropout(cfg["drop_rate"]) 100 | def forward(self, x): 101 | shortcut=x 102 | x=self.drop_shortcut(self.ff(self.norm1(x))) 103 | x=x+shortcut 104 | shortcut=x 105 | x=self.drop_shortcut(self.ff(self.norm2(x))) 106 | x=x+shortcut 107 | return x 108 | 109 | class GPTModel(nn.Module): 110 | def __init__(self, cfg): 111 | super().__init__() 112 | self.tok_emb = nn.Embedding(cfg["vocab_size"], cfg["emb_dim"]) 113 | self.pos_emb = nn.Embedding(cfg["context_length"], cfg["emb_dim"]) 114 | self.drop_emb = nn.Dropout(cfg["drop_rate"]) 115 | self.trf_blocks = nn.Sequential( 116 | *[TransformerBlock(cfg) for _ in range(cfg["n_layers"])]) 117 | self.final_norm = LayerNorm(cfg["emb_dim"]) 118 | self.out_head = nn.Linear( 119 | cfg["emb_dim"], cfg["vocab_size"], bias=False 120 | ) 121 | def forward(self, in_idx): 122 | batch_size, seq_len = in_idx.shape 123 | tok_embeds = self.tok_emb(in_idx) 124 | pos_embeds = self.pos_emb( 125 | torch.arange(seq_len, device=in_idx.device) 126 | ) 127 | x = tok_embeds + pos_embeds 128 | x = self.drop_emb(x) 129 | x = self.trf_blocks(x) 130 | x = self.final_norm(x) 131 | logits = self.out_head(x) 132 | return logits -------------------------------------------------------------------------------- /Llama2/image-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cneuralnetwork/solving-ml-papers/4d39978b85d8cc9446e2dee2aa1d42709cfe1d50/Llama2/image-1.png -------------------------------------------------------------------------------- /Llama2/image-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cneuralnetwork/solving-ml-papers/4d39978b85d8cc9446e2dee2aa1d42709cfe1d50/Llama2/image-2.png -------------------------------------------------------------------------------- /Llama2/image-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cneuralnetwork/solving-ml-papers/4d39978b85d8cc9446e2dee2aa1d42709cfe1d50/Llama2/image-3.png -------------------------------------------------------------------------------- /Llama2/image-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cneuralnetwork/solving-ml-papers/4d39978b85d8cc9446e2dee2aa1d42709cfe1d50/Llama2/image-4.png -------------------------------------------------------------------------------- /Llama2/image.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cneuralnetwork/solving-ml-papers/4d39978b85d8cc9446e2dee2aa1d42709cfe1d50/Llama2/image.png -------------------------------------------------------------------------------- /Llama2/model.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | import math 5 | from dataclasses import dataclass 6 | from typing import Optional 7 | 8 | @dataclass 9 | class ModelArgs: 10 | dim: int=4096 11 | n_layers: int = 32 12 | n_heads: int = 32 #heads for Q 13 | n_kv_heads: Optional[int]=None # heads for K, V 14 | vocab_size: int = -1 # will set later 15 | multiple_of: int = 256 16 | ffn_dim_multiplier: Optional[float]= None 17 | norm_eps:float = 1e-5 18 | max_batch_size: int = 32 19 | max_seq_len: int = 2048 20 | device: str = None 21 | 22 | class RMSNorm(nn.Module): 23 | def __init__(self,dim:int,eps:float=1e-5): 24 | super().__init__() 25 | self.eps=eps 26 | self.weights=nn.Parameter(torch.ones(dim)) 27 | def _norm(self,x:torch.Tensor): 28 | return x*torch.rsqrt(x.pow(2).mean(-1,keepdim=True)+self.eps) 29 | def forward(self,x:torch.Tensor): 30 | return self.weight*self._norm(x.float()).type_as(x) 31 | 32 | def precompute_theta_pos_freqs(head_dim:int, seq_len:int, device:str,theta:float=10000.0): 33 | assert(head_dim%2)==0 34 | #head/2 35 | theta_num=torch.arange(0,head_dim,2).float() 36 | #head/2 37 | theta=1/(theta**(theta_num/head_dim)).to(device) 38 | #seq_len 39 | m=torch.arange(seq_len,device=device) 40 | #all combinations - (seq_len) outer (head/2) -> (seq_len,head/2) 41 | freqs=torch.outer(m,theta).float() 42 | #convert to polar 43 | freqs_complex=torch.polar(torch.ones_like(freqs),freqs) 44 | return freqs_complex 45 | 46 | def rope(x:torch.Tensor, freqs_complex:torch.Tensor, device: str): 47 | #(b,seq_len,h,head)->(b,seq_len,h,head/2) 48 | x_complex=torch.view_as_complex(x.float().reshape(*x.shape[:-1],-1,2)) 49 | #(seq_len,head/2)->(1,seq_len,1,head/2) 50 | freqs_complex=freqs_complex.unsqueeze(0).unsqueeze(2) 51 | #(b,seq_len,h,head/2)*(1,seq_len,1,head/2)->(b,seq_len,h,head/2) 52 | x_rotated=x_complex*freqs_complex 53 | #(b,seq_len,h,head/2)->(b,seq_len,h,head/2,2) 54 | x_out=torch.view_as_real(x_rotated) 55 | #(b,seq_len,h,head/2)->(b,seq_len,h,head) 56 | x_out=x_out.reshape(*x.shape) 57 | return x_out.type_as(x).to(device=device) 58 | 59 | class EncoderBlock(nn.Module): 60 | def __init__(self,args:ModelArgs): 61 | super().__init__() 62 | self.n_heads=args.n_heads 63 | self.dim=args.dim 64 | self.head_dm=args.dim//args.n_heads 65 | self.attention=SelfAttention(args) 66 | self.feed_forward=FeedForward(args) 67 | self.attention_norm=RMSNorm(args.dim,eps=args.norm_eps) 68 | self.ffn_norm=RMSNorm(args.dim,eps=args.norm_eps) 69 | 70 | def forward(self, x:torch.Tensor,start_pos: int,freqs_complex:torch.Tensor): 71 | h=x+self.attention.forward(self.attention_norm(x),start_pos,freqs_complex) 72 | out=h+self.feed_forward.forward(self.ffn_norm(h)) 73 | return out 74 | 75 | def repeat_kv(x:torch.Tensor,n_rep:int)->torch.Tensor: 76 | batch_size,seq_len,n_kv_heads,head_dim=x.shape 77 | if n_rep==1: 78 | return x 79 | else: 80 | return( 81 | x[:,:,:,None,:].expand(batch_size,seq_len,n_kv_heads,n_rep,head_dim).reshape(batch_size,seq_len,n_kv_heads*n_rep,head_dim) 82 | ) 83 | 84 | class SelfAttention(nn.Module): 85 | def __init__(self, args:ModelArgs): 86 | super().__init__() 87 | self.n_kv_heads=args.n_heads if args.n_kv_heads is None else args.n_kv_heads 88 | self.n_heads_q=args.n_heads 89 | self.n_rep=self.n_heads_q//self.n_kv_heads 90 | self.head_dim=args.dim//args.n_heads 91 | 92 | self.wq=nn.Linear(args.dim,args.n_heads*self.head_dim,bias=False) 93 | self.wk=nn.Linear(args.dim,self.n_kv_heads*self.head_dim,bias=False) 94 | self.wv=nn.Linear(args.dim,self.n_kv_heads*self.head_dim,bias=False) 95 | self.wo=nn.Linear(args.n_heads*self.head_dim,args.dim,bias=False) 96 | 97 | self.cache_k=torch.zeros((args.max_batch_size,args.max_seq_len,self.n_kv_heads,self.head_dim)) 98 | args.cache_v=torch.zeros((args.max_batch_size,args.max_seq_len,self.n_kv_heads,self.head_dim)) 99 | def forward(self,x:torch.Tensor,start_pos:int,freqs_complex:torch.Tensor): 100 | batch_size,seq_len,_=x.shape() #(B,1,dim) 101 | xq=self.wq(x) 102 | xk=self.wk(x) 103 | xv=self.wv(x) 104 | xq=xq.view(batch_size,seq_len,self.n_heads_q,self.head_dim) 105 | xk=xq.view(batch_size,seq_len,self.n_kv_heads,self.head_dim) 106 | xv=xq.view(batch_size,seq_len,self.n_kv_heads,self.head_dim) 107 | xq=rope(xq,freqs_complex) 108 | xk=rope(xk,freqs_complex) 109 | self.cache_k[:batch_size,start_pos:start_pos+seq_len]=xk 110 | self.cache_v[:batch_size,start_pos:start_pos+seq_len]=xv 111 | keys=self.cache_k[:batch_size,0:start_pos+seq_len] 112 | values=self.cache_v[:batch_size,0:start_pos+seq_len] 113 | keys=repeat_kv(keys,self.n_rep) 114 | values=repeat_kv(values,self.n_rep) 115 | xq=xq.transpose(1,2) 116 | keys=keys.transpose(1,2) 117 | values=values.transpose(1,2) 118 | scores=torch.matmul(xq,keys.transpose(2,3))/math.sqrt(self.head_dim) 119 | scores=F.softmax(scores.float(),dim=-1).type_as(xq) 120 | out=torch.matmul(scores,values) 121 | out=out.transpose(1,2).contiguous().view(batch_size,seq_len,-1) 122 | return self.wo(out) 123 | 124 | class FeedForward(nn.Module): 125 | def __init__(self, args:ModelArgs): 126 | super().__init__() 127 | hidden_dim=4*args.dim 128 | hidden_dim=int(2*hidden_dim/3) 129 | if args.ffn_dim_multiplier is not None: 130 | hidden_dim=int(args.ffn_dim_multiplier*hidden_dim) 131 | hidden=args.multiple_of*((hidden+args.multiple_of-1)//args.multiple_of) 132 | self.w1=nn.Linear(args.dim,hidden_dim,bias=False) 133 | self.w2=nn.Linear(hidden_dim,args.dim,bias=False) 134 | self.w3=nn.Linear(args.dim,hidden_dim,bias=False) 135 | def forward(self,x:torch.Tensor): 136 | swish=F.silu(self.w1(x)) 137 | x_V=self.w3(x) 138 | x=swish*x_V 139 | x=self.w2(x) 140 | return x 141 | 142 | class LLama2(nn.Module): 143 | def __init__(self, args: ModelArgs)->None: 144 | super().__init__() 145 | assert args.vocab_size!=-1, "Set a vocab size please." 146 | self.args=args 147 | self.vocab_size=args.vocab_size 148 | self.n_layers=args.n_layers 149 | self.tok_embeddings=nn.Embedding(self.vocab_size,args.dim) 150 | self.layers=nn.ModuleList() 151 | for _ in range(args.n_layers): 152 | self.layers.append(EncoderBlock(args)) 153 | self.norm=RMSNorm(args.dim,eps=args.norm_eps) 154 | self.output=nn.Linear(args.dim,self.vocab_size,bias=False) 155 | self.freqs_complex=precompute_theta_pos_freqs(self.args.dim//self.args.n_heads,self.args.max_seq_len*2,device=self.args.device) 156 | def forward(self, tokens: torch.Tensor, start_pos: int): 157 | batch_size, seq_len=tokens.shape 158 | assert seq_len==1,"only process one token per pass" 159 | h=self.tok_embeddings(tokens) 160 | freqs_complex=self.freqs_complex[start_pos:start_pos+seq_len] 161 | for layer in self.layers: 162 | h=layer(h,start_pos,freqs_complex) 163 | h=self.norm(h) 164 | out=self.output(h) 165 | return out.float() -------------------------------------------------------------------------------- /Llama2/readme.md: -------------------------------------------------------------------------------- 1 | ### Diagram 2 | 3 | ![alt text](image.png) 4 | 5 | ### Input Embedding 6 | Original Sentence -> Input IDs -> Embeddings (of Size 4096)\\ 7 | 8 | - repeated 32 times 9 | 10 | ### RoPE 11 | ![alt text](image-1.png) 12 | ![alt text](image-2.png) 13 | ![alt text](image-3.png) 14 | 15 | 16 | ### KV-Cache 17 | - in inference, we just need to know the last token, it will help computation as well. 18 | - only put only val of Q with all vals of K to get one row of Q.K^T and then get only one val of attention 19 | 20 | 21 | ### SwiGLU 22 | ![alt text](image-4.png) -------------------------------------------------------------------------------- /Llama4/main.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | import math 5 | from dataclasses import dataclass 6 | from typing import Optional, List 7 | 8 | @dataclass 9 | class ModelArgs: 10 | dim: int=4096 11 | n_layers: int = 32 12 | n_heads: int = 32 13 | n_kv_heads: Optional[int]=None # 14 | vocab_size: int = -1 15 | multiple_of: int = 256 16 | ffn_dim_multiplier: Optional[float]= None 17 | norm_eps:float = 1e-5 18 | max_batch_size: int = 32 19 | max_seq_len: int = 2048 20 | device: str = None 21 | num_local_experts: int = 16 22 | num_experts_per_tok: int = 2 23 | moe_layers: List[int] = None 24 | 25 | # def precompute_theta_pos_freqs(head_dim:int, seq_len:int, device:str,theta:float=10000.0): 26 | # assert(head_dim%2)==0 27 | # #head/2 28 | # theta_num=torch.arange(0,head_dim,2).float() 29 | # #head/2 30 | # theta=1/(theta**(theta_num/head_dim)).to(device) 31 | # #seq_len 32 | # m=torch.arange(seq_len,device=device) 33 | # #all combinations - (seq_len) outer (head/2) -> (seq_len,head/2) 34 | # freqs=torch.outer(m,theta).float() 35 | # #convert to polar 36 | # freqs_complex=torch.polar(torch.ones_like(freqs),freqs) 37 | # return freqs_complex 38 | 39 | # def rope(x:torch.Tensor, freqs_complex:torch.Tensor, device: str): 40 | # #(b,seq_len,h,head)->(b,seq_len,h,head/2) 41 | # x_complex=torch.view_as_complex(x.float().reshape(*x.shape[:-1],-1,2)) 42 | # #(seq_len,head/2)->(1,seq_len,1,head/2) 43 | # freqs_complex=freqs_complex.unsqueeze(0).unsqueeze(2) 44 | # #(b,seq_len,h,head/2)*(1,seq_len,1,head/2)->(b,seq_len,h,head/2) 45 | # x_rotated=x_complex*freqs_complex 46 | # #(b,seq_len,h,head/2)->(b,seq_len,h,head/2,2) 47 | # x_out=torch.view_as_real(x_rotated) 48 | # #(b,seq_len,h,head/2)->(b,seq_len,h,head) 49 | # x_out=x_out.reshape(*x.shape) 50 | 51 | class RMSNorm(nn.Module): 52 | def __init__(self,dim:int,eps:float=1e-5): 53 | super().__init__() 54 | self.eps=eps 55 | self.weights=nn.Parameter(torch.ones(dim)) 56 | 57 | def _norm(self,x:torch.Tensor): 58 | return x*torch.rsqrt(x.pow(2).mean(-1,keepdim=True)+self.eps) 59 | 60 | def forward(self,x:torch.Tensor): 61 | return self.weights*self._norm(x.float()) 62 | 63 | class L2Norm(nn.Module): 64 | def __init__(self, dim: int = None, eps: float = 1e-6): 65 | super().__init__() 66 | self.eps = eps 67 | 68 | def _norm(self, x): 69 | return x * torch.rsqrt(x.pow(2).mean(-1, keepdim=True) + self.eps) 70 | 71 | def forward(self, x): 72 | return self._norm(x.float()).type_as(x) 73 | 74 | def repeat_kv(x:torch.Tensor,n_rep:int)->torch.Tensor: 75 | batch_size,seq_len,n_kv_heads,head_dim=x.shape 76 | if n_rep==1: 77 | return x 78 | else: 79 | return( 80 | x[:,:,:,None,:].expand(batch_size,seq_len,n_kv_heads,n_rep,head_dim).reshape(batch_size,seq_len,n_kv_heads*n_rep,head_dim) 81 | ) 82 | 83 | class TextExperts(nn.Module): 84 | def __init__(self, config: ModelArgs): 85 | super().__init__() 86 | self.num_experts = config.num_local_experts 87 | hidden_dim = 4 * config.dim 88 | hidden_dim = int(2 * hidden_dim / 3) 89 | if config.ffn_dim_multiplier is not None: 90 | hidden_dim = int(config.ffn_dim_multiplier * hidden_dim) 91 | hidden_dim = config.multiple_of * ((hidden_dim + config.multiple_of - 1) // config.multiple_of) 92 | 93 | self.expert_dim = hidden_dim 94 | self.hidden_size = config.dim 95 | self.gate_up_proj = nn.Parameter(torch.empty(self.num_experts, self.hidden_size, 2 * self.expert_dim)) 96 | self.down_proj = nn.Parameter(torch.empty((self.num_experts, self.expert_dim, self.hidden_size))) 97 | 98 | def forward(self, hidden_states: torch.Tensor) -> torch.Tensor: 99 | hidden_states = hidden_states.view(self.num_experts, -1, self.hidden_size) 100 | gate_up = torch.bmm(hidden_states, self.gate_up_proj) 101 | gate, up = gate_up.chunk(2, dim=-1) 102 | next_states = torch.bmm((up * F.silu(gate)), self.down_proj) 103 | next_states = next_states.view(-1, self.hidden_size) 104 | return next_states 105 | 106 | class MoE(nn.Module): 107 | def __init__(self, config: ModelArgs): 108 | super().__init__() 109 | self.top_k = config.num_experts_per_tok 110 | self.hidden_dim = config.dim 111 | self.num_experts = config.num_local_experts 112 | self.experts = TextExperts(config) 113 | self.router = nn.Linear(config.dim, config.num_local_experts, bias=False) 114 | self.shared_expert = FeedForward(config) # Shared expert after MoE 115 | 116 | def forward(self, hidden_states): 117 | batch, seq_len, hidden_dim = hidden_states.shape 118 | hidden_states_flat = hidden_states.view(-1, self.hidden_dim) 119 | router_logits = self.router(hidden_states_flat).transpose(0, 1) 120 | tokens_per_expert = batch * seq_len 121 | 122 | router_top_value, router_indices = torch.topk(router_logits.transpose(0, 1), self.top_k, dim=1) 123 | router_scores = ( 124 | torch.full_like(router_logits.transpose(0, 1), float("-inf")) 125 | .scatter_(1, router_indices, router_top_value) 126 | .transpose(0, 1) 127 | ) 128 | 129 | router_indices = ( 130 | torch.arange(tokens_per_expert, device=hidden_states.device).view(1, -1).expand(router_scores.size(0), -1) 131 | ) 132 | router_scores = torch.sigmoid(router_scores.float()).to(hidden_states.dtype) 133 | 134 | router_indices = router_indices.reshape(-1, 1).expand(-1, hidden_dim) 135 | routed_in = torch.gather( 136 | input=hidden_states_flat, 137 | dim=0, 138 | index=router_indices, 139 | ).to(hidden_states.device) 140 | 141 | routed_in = routed_in * router_scores.reshape(-1, 1) 142 | routed_out = self.experts(routed_in) 143 | 144 | out = self.shared_expert(hidden_states) 145 | out_flat = out.view(-1, hidden_dim) 146 | out_flat.scatter_add_(dim=0, index=router_indices, src=routed_out.view(-1, hidden_dim)) 147 | return out_flat.view(batch, seq_len, -1), router_scores 148 | 149 | class EncoderBlock(nn.Module): 150 | def __init__(self, args: ModelArgs, layer_idx: int = 0): 151 | super().__init__() 152 | self.n_heads = args.n_heads 153 | self.dim = args.dim 154 | self.head_dm = args.dim // args.n_heads 155 | self.attention = SelfAttention(args, layer_idx) 156 | self.is_moe_layer = False 157 | if args.moe_layers is not None and layer_idx in args.moe_layers: 158 | self.is_moe_layer = True 159 | self.feed_forward = MoE(args) 160 | else: 161 | self.feed_forward = FeedForward(args) 162 | 163 | self.attention_norm = RMSNorm(args.dim, eps=args.norm_eps) 164 | self.ffn_norm = RMSNorm(args.dim, eps=args.norm_eps) 165 | self.layer_idx = layer_idx 166 | 167 | def forward(self, x: torch.Tensor, start_pos: int, freqs_complex: torch.Tensor): 168 | h = x + self.attention.forward( 169 | self.attention_norm(x), 170 | start_pos, 171 | freqs_complex 172 | ) 173 | 174 | residual = h 175 | h = self.ffn_norm(h) 176 | 177 | if self.is_moe_layer: 178 | h, _ = self.feed_forward(h) 179 | else: 180 | h = self.feed_forward(h) 181 | 182 | out = residual + h 183 | return out 184 | 185 | class SelfAttention(nn.Module): 186 | def __init__(self, args: ModelArgs, layer_idx: int = 0): 187 | super().__init__() 188 | self.n_kv_heads = args.n_heads if args.n_kv_heads is None else args.n_kv_heads 189 | self.n_heads_q = args.n_heads 190 | self.n_rep = self.n_heads_q // self.n_kv_heads 191 | self.head_dim = args.dim // args.n_heads 192 | self.layer_idx = layer_idx 193 | 194 | self.wq = nn.Linear(args.dim, args.n_heads * self.head_dim, bias=False) 195 | self.wk = nn.Linear(args.dim, self.n_kv_heads * self.head_dim, bias=False) 196 | self.wv = nn.Linear(args.dim, self.n_kv_heads * self.head_dim, bias=False) 197 | self.wo = nn.Linear(args.n_heads * self.head_dim, args.dim, bias=False) 198 | 199 | self.use_qk_norm = args.dim <= 4096 # Small model uses QK norm 200 | if self.use_qk_norm: 201 | self.qk_norm = L2Norm() 202 | 203 | self.cache_k = torch.zeros((args.max_batch_size, args.max_seq_len, self.n_kv_heads, self.head_dim)) 204 | args.cache_v = torch.zeros((args.max_batch_size, args.max_seq_len, self.n_kv_heads, self.head_dim)) 205 | 206 | def forward(self, x: torch.Tensor, start_pos: int, freqs_complex: torch.Tensor): 207 | batch_size, seq_len, _ = x.shape # Removed unnecessary parentheses 208 | 209 | xq = self.wq(x) 210 | xk = self.wk(x) 211 | xv = self.wv(x) 212 | 213 | xq = xq.view(batch_size, seq_len, self.n_heads_q, self.head_dim) 214 | xk = xk.view(batch_size, seq_len, self.n_kv_heads, self.head_dim) 215 | xv = xv.view(batch_size, seq_len, self.n_kv_heads, self.head_dim) 216 | 217 | if self.use_qk_norm: 218 | xq = self.qk_norm(xq) 219 | xk = self.qk_norm(xk) 220 | 221 | self.cache_k[:batch_size, start_pos:start_pos+seq_len] = xk 222 | self.cache_v[:batch_size, start_pos:start_pos+seq_len] = xv 223 | 224 | keys = self.cache_k[:batch_size, 0:start_pos+seq_len] 225 | values = self.cache_v[:batch_size, 0:start_pos+seq_len] 226 | 227 | keys = repeat_kv(keys, self.n_rep) 228 | values = repeat_kv(values, self.n_rep) 229 | 230 | xq = xq.transpose(1, 2) 231 | keys = keys.transpose(1, 2) 232 | values = values.transpose(1, 2) 233 | 234 | scores = torch.matmul(xq, keys.transpose(2, 3)) / math.sqrt(self.head_dim) 235 | scores = F.softmax(scores.float(), dim=-1).type_as(xq) 236 | 237 | out = torch.matmul(scores, values) 238 | out = out.transpose(1, 2).contiguous().view(batch_size, seq_len, -1) 239 | 240 | return self.wo(out) 241 | 242 | class FeedForward(nn.Module): 243 | def __init__(self, args: ModelArgs): 244 | super().__init__() 245 | hidden_dim = 4 * args.dim 246 | hidden_dim = int(2 * hidden_dim / 3) 247 | if args.ffn_dim_multiplier is not None: 248 | hidden_dim = int(args.ffn_dim_multiplier * hidden_dim) 249 | hidden_dim = args.multiple_of * ((hidden_dim + args.multiple_of - 1) // args.multiple_of) 250 | 251 | self.w1 = nn.Linear(args.dim, hidden_dim, bias=False) 252 | self.w2 = nn.Linear(hidden_dim, args.dim, bias=False) 253 | self.w3 = nn.Linear(args.dim, hidden_dim, bias=False) 254 | 255 | def forward(self, x: torch.Tensor): 256 | swish = F.silu(self.w1(x)) 257 | x_V = self.w3(x) 258 | x = swish * x_V 259 | x = self.w2(x) 260 | return x 261 | 262 | class LLama4(nn.Module): 263 | def __init__(self, args: ModelArgs) -> None: 264 | super().__init__() 265 | assert args.vocab_size != -1, "Set a vocab size please." 266 | 267 | if args.moe_layers is None: 268 | if args.dim > 4096: 269 | args.moe_layers = [i for i in range(args.n_layers) if i % 2 == 0] # Even layers are MoE 270 | else: 271 | args.moe_layers = [] 272 | 273 | self.args = args 274 | self.vocab_size = args.vocab_size 275 | self.n_layers = args.n_layers 276 | self.tok_embeddings = nn.Embedding(self.vocab_size, args.dim) 277 | 278 | self.layers = nn.ModuleList() 279 | for i in range(args.n_layers): 280 | self.layers.append(EncoderBlock(args, layer_idx=i)) 281 | 282 | self.norm = RMSNorm(args.dim, eps=args.norm_eps) 283 | self.output = nn.Linear(args.dim, self.vocab_size, bias=False) 284 | 285 | def forward(self, tokens: torch.Tensor, start_pos: int): 286 | batch_size, seq_len = tokens.shape 287 | assert seq_len == 1, "only process one token per pass" 288 | 289 | h = self.tok_embeddings(tokens) 290 | freqs_complex = None 291 | 292 | for layer in self.layers: 293 | h = layer(h, start_pos, freqs_complex) 294 | 295 | h = self.norm(h) 296 | out = self.output(h) 297 | 298 | return out.float() 299 | -------------------------------------------------------------------------------- /LoRA/lora.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | 4 | class LoRA(nn.Module): 5 | def __init__(self,in_f,out_f,rank=1,alpha=1,device:str="cpu"): 6 | super().__init__() 7 | self.A=nn.Parameter(torch.zeros((rank,out_f)).to(device)) 8 | self.B=nn.Parameter(torch.zeros((in_f,rank)).to(device)) 9 | self.scale=alpha/rank 10 | self.en=True 11 | def forward(self,wts): 12 | if self.en: 13 | return wts+torch.matmul(self.B,self.A).view(wts.shape)*self.scale 14 | else: 15 | return wts -------------------------------------------------------------------------------- /Mistral-7B/README.md: -------------------------------------------------------------------------------- 1 | paper link - [here](https://arxiv.org/pdf/2310.06825) -------------------------------------------------------------------------------- /Mistral-7B/lol.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/cneuralnetwork/solving-ml-papers/4d39978b85d8cc9446e2dee2aa1d42709cfe1d50/Mistral-7B/lol.py -------------------------------------------------------------------------------- /Mistral-7B/model.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | from typing import Optional,Tuple 5 | 6 | class ModelArgs: 7 | def __init__(self, dim:int, n_layers:int, head_dim:int, hidden_dim:int, n_heads:int, n_kv_head:int, norm_eps:int, vocab_size:int, rope_theta:float=10000): 8 | self.dim=dim 9 | self.n_layers=n_layers 10 | self.head_dim=head_dim 11 | self.hidden_dim=hidden_dim 12 | self.n_heads=n_heads 13 | self.n_kv_head=n_kv_head 14 | self.norm_eps=norm_eps 15 | self.vocab_size=vocab_size 16 | self.rope_theta=rope_theta 17 | 18 | class FFN(nn.Module): 19 | def __init__(self, args="ModelArgs"): 20 | super().__init__() 21 | self.l1=nn.Linear(args.dim, args.hidden_dim,bias=False) 22 | self.l2=nn.Linear(args.hidden_dim, args.dim,bias=False) 23 | self.l3=nn.Linear(args.dim, args.hidden_dim,bias=False) 24 | def forward(self,x)-> torch.Tensor: 25 | # out=l2(silu(l1(x).l3)) 26 | return self.l2(F.silu(self.l1(x))+self.l3(x)) 27 | 28 | class RMSNorm(nn.Module): 29 | def __init__(self, dims:int, eps:float=1e-5): 30 | super().__init__() 31 | self.weight=nn.Parameter(torch.ones(dims)) 32 | self.eps=eps 33 | def mid(self,x): 34 | # rec sqrt of rms 35 | k=x*((x**2).mean(-1,keepdim=True)+self.eps).rsqrt() 36 | return k 37 | def forward(self,x): 38 | # final output 39 | return self.weight*self.mid(x.float()).type(x.dtype) 40 | 41 | class rope(nn.Module): 42 | def __init__(self, dim:int, b:float=10000): 43 | super().__init__() 44 | self.dim=dim 45 | self.b=b 46 | self.freqs=self.fr(dim//2) 47 | def fr(self, n:int): 48 | # freq=1/base^(2i/dim) 49 | return 1.0/(10000**(torch.arange(0,n,2)/n)) 50 | def forward(self,x:torch.Tensor, offset:int=0): 51 | # position indices 52 | t=torch.arange(x.shape[2],device="cuda")+offset 53 | freqs=self.freqs.to("cuda") 54 | # [tθ_0, tθ_1, ..., tθ_{d/2-1}] 55 | t_sin=torch.sin(t[:,None]*freqs[None,:]) 56 | t_cos=torch.cos(t[:,None]*freqs[None,:]) 57 | # x' = x.cosθ + x.sinθ 58 | ans=torch.stack([x[...,0::2]*t_cos+x[...,1::2]*t_sin #even 59 | , -x[...,0::2]*t_sin+x[...,1::2]*t_cos] #odd 60 | ,dim=-1).flatten(-2,-1) 61 | return ans 62 | 63 | class attn(nn.Module): 64 | def __init__(self, args="ModelArgs"): 65 | super().__init__() 66 | self.args=args 67 | self.n_heads=args.n_heads 68 | self.n_kv_head=args.n_kv_head 69 | self.repeats=self.n_heads//self.n_kv_head #GQA 70 | self.scale=self.args.head_dim**-0.5 71 | self.wq=nn.Linear(args.dim, args.n_heads*args.head_dim, bias=False) 72 | self.wk=nn.Linear(args.dim, args.n_kv_heads*args.head_dim, bias=False) 73 | self.wv=nn.Linear(args.dim, args.n_kv_heads*args.head_dim, bias=False) 74 | self.wo=nn.Linear(args.n_heads*args.head_dim, args.dim, bias=False) 75 | self.rope=rope(args.head_dim,args.rope_theta) 76 | def forward(self,x:torch.Tensor, mask: Optional[torch.Tensor]=None, 77 | cache:Optional[Tuple[torch.Tensor, torch.Tensor]]=None)->torch.Tensor: 78 | # q,k,v 79 | q=self.wq(x) 80 | k=self.wk(x) 81 | v=self.wv(x) 82 | # reshape to [b,nh,l,hd] 83 | b,l,d=x.shape 84 | q=q.view(b,l,self.n_heads,-1).transpose(1,2) 85 | k=k.view(b,l,self.n_kv_head,-1).transpose(1,2) 86 | v=v.view(b,l,self.n_kv_head,-1).transpose(1,2) 87 | # repeat for gqa 88 | def repeat(c): 89 | return torch.cat([c.unsqueeze(2)]*self.repeats,dim=2).view([b,self.n_heads,l,-1]) 90 | k,v=map(repeat,[k,v]) 91 | # cache 92 | if cache is not None: 93 | key_c,val_c=cache 94 | q=self.rope(q,offset=key_c.shape[2]) 95 | k=self.rope(k,offset=key_c.shape[2]) 96 | k=torch.cat([key_c,k],dim=2) 97 | v=torch.cat([val_c,v],dim=2) 98 | else: 99 | q=self.rope(q) 100 | k=self.rope(k) 101 | # QK^T/√d_k 102 | atn=(q*self.scale)@k.transpose(-1,-2) 103 | # applymask (causal) 104 | if mask is not None: 105 | atn+=mask 106 | # softmax 107 | atn=F.softmax(atn.float(),dim=-1).type(atn.dtype) 108 | out=(atn@v).transpose(1,2).contiguous().reshape(b,l,-1) 109 | return self.wo(out),(k,v) 110 | 111 | class transformer(nn.Module): 112 | def __init__(self, args="ModelArgs"): 113 | super().__init__() 114 | self.attn=attn(args) 115 | self.ff=FFN(args) 116 | self.attn_norm=RMSNorm(args.dim,args.norm_eps) 117 | self.ff_norm=RMSNorm(args.dim,args.norm_eps) 118 | def forward(self,x:torch.Tensor, mask:Optional[torch.Tensor]=None, 119 | cache:Optional[Tuple[torch.Tensor, torch.Tensor]]=None)->torch.Tensor: 120 | #residual 121 | r,cache=self.attn(self.attn_norm(x),mask,cache) 122 | x=x+r 123 | r=self.ff(self.ff_norm(x)) 124 | ans=x+r 125 | return ans,cache 126 | 127 | class mistral(nn.Module): 128 | def __init__(self, args="ModelArgs"): 129 | super().__init__() 130 | assert args.vocab_size>0 131 | self.tok_embedding=nn.Embedding(args.vocab_size,args.dim) 132 | self.layer=nn.ModuleList([transformer(args) for _ in range(args.n_layers)]) 133 | self.norm=RMSNorm(args.dim,args.norm_eps) 134 | self.out=nn.Linear(args.dim,args.vocab_size,bias=False) #Unembedding 135 | def gen_mask(self,s): 136 | return torch.triu(torch.ones(s,s)*float("-inf"),diagonal=1) 137 | def forward(self,input:torch.Tensor,cache=None): 138 | h=self.token_embedding(input) 139 | m=None 140 | if h.shape[1]>1: 141 | m=self.gen_mask(h.shape[1]).to("cuda") 142 | if cache is None: 143 | cache=[None]*len(self.layer) 144 | for e,layer in enumerate(self.layer): 145 | h,cache[e]=layer(h,m,cache[e]) 146 | return self.out(self.norm(h)),cache 147 | 148 | 149 | 150 | -------------------------------------------------------------------------------- /NeoBERT/model/BERT.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | from model.attention import MultiHeadAttention 5 | from model.residual import Residual 6 | from model.feedforward import FeedForward 7 | from model.embeddings import BERTEmbedding 8 | 9 | class EncoderLayer(nn.Module): 10 | def __init__(self, hidden, attn_heads, feed_forward_hidden, dropout): 11 | super().__init__() 12 | self.attention = MultiHeadAttention(h=attn_heads, d_model=hidden) 13 | self.feed_forward = FeedForward(d_model=hidden, d_FF=feed_forward_hidden) 14 | self.residual1 = Residual(size=hidden, dropout=dropout) 15 | self.residual2 = Residual(size=hidden, dropout=dropout) 16 | self.dropout=nn.Dropout(dropout) 17 | def forward(self, x, mask): 18 | x = self.residual1(x, lambda x: self.attention(x, x, x, mask)) 19 | x = self.residual2(x, self.feed_forward) 20 | return self.dropout(x) 21 | 22 | class BERT(nn.Module): 23 | def __init__(self, vocab_size, hidden=768, n_layers=28, attn_heads=12, dropout=0.1): 24 | super().__init__() 25 | self.hidden = hidden 26 | self.n_layers = n_layers 27 | self.attn_heads=attn_heads 28 | self.feed_forward_hidden=hidden*4 29 | self.dropout=dropout 30 | self.embedding = BERTEmbedding(vocab_size=vocab_size, embed_size=hidden, dropout=dropout) 31 | self.layers = nn.ModuleList([EncoderLayer(hidden, attn_heads, hidden*4, dropout) for _ in range(n_layers)]) 32 | def forward(self, x, seg): 33 | mask=(x>0).unsqueeze(1).repeat(1,x.size(1),1).unsqueeze(1) 34 | x=self.embedding(x,seg) 35 | for layer in self.layers: 36 | x=layer(x,mask) 37 | return x 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | -------------------------------------------------------------------------------- /NeoBERT/model/MLM_NSP.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | from model.BERT import BERT 5 | 6 | class NSP(nn.Module): 7 | def __init__(self, hidden): 8 | super().__init__() 9 | self.linear = nn.Linear(hidden, 2) 10 | def forward(self, x): 11 | return F.log_softmax(self.linear(x[:,0]), dim=-1) 12 | 13 | class MLM(nn.Module): 14 | def __init__(self, hidden, vocab_size): 15 | super().__init__() 16 | self.linear = nn.Linear(hidden, vocab_size) 17 | def forward(self, x): 18 | return F.log_softmax(self.linear(x), dim=-1) 19 | 20 | class MLM_NSP(nn.Module): 21 | def __init__(self, BERT, vocab_size): 22 | super().__init__() 23 | self.bert = BERT() 24 | self.mlm = MLM(self.bert.hidden, vocab_size) 25 | self.nsp = NSP(self.bert.hidden) 26 | def forward(self, x, seg): 27 | x = self.bert(x, seg) 28 | return self.mlm(x), self.nsp(x) 29 | -------------------------------------------------------------------------------- /NeoBERT/model/RMS_Norm.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | 5 | class RMSNorm(nn.Module): 6 | def __init__(self, f_size, eps=1e-6): 7 | super().__init__() 8 | self.gamma = nn.Parameter(torch.ones(f_size)) 9 | self.eps = eps 10 | def forward(self, x): 11 | rms = torch.sqrt(torch.mean(x**2, dim=-1, keepdim=True) + self.eps) 12 | x_norm = x / rms 13 | return self.gamma * x_norm 14 | -------------------------------------------------------------------------------- /NeoBERT/model/SwiGLU.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | 5 | class SwiGLU(nn.Module): 6 | def __init__(self, in_features, hidden_features=None, bias=True): 7 | super().__init__() 8 | hidden_features = hidden_features or in_features 9 | self.w1 = nn.Linear(in_features, hidden_features, bias=bias) 10 | self.w2 = nn.Linear(in_features, hidden_features, bias=bias) 11 | 12 | def forward(self, x): 13 | return F.silu(self.w1(x)) * self.w2(x) 14 | -------------------------------------------------------------------------------- /NeoBERT/model/attention.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | 5 | class SingleHeadAttention(nn.Module): 6 | def forward(self,q,k,v,mask=False,dropout=None): 7 | ans=torch.matmul(q,k.transpose(-2,-1))/torch.sqrt(q.size(-1)) 8 | if mask: 9 | ans=ans.masked_fill(mask==0,-1e9) 10 | ans=F.softmax(ans,dim=-1) 11 | if dropout: 12 | ans=dropout(ans) 13 | return torch.matmul(ans,v) 14 | 15 | class MultiHeadAttention(nn.Module): 16 | def __init__(self, h, d_model, dropout=0.1): 17 | super().__init__() 18 | assert d_model%h==0 19 | self.h=h 20 | self.d_k=d_model//h 21 | self.attn=SingleHeadAttention() 22 | self.linear=nn.ModuleList([nn.Linear(d_model,d_model) for _ in range(3)]) 23 | self.out_linear=nn.Linear(d_model,d_model) 24 | self.dropout=nn.Dropout(dropout) 25 | def forward(self,q,k,v,mask=False): 26 | bs=q.size(0) 27 | q,k,v=[l(x).view(bs,-1,self.h,self.d_k).transpose(1,2) for l,x in zip(self.linear,(q,k,v))] 28 | ans=self.attn(q,k,v,mask,dropout=self.dropout) 29 | res = ans.transpose(1,2).contiguous().view(bs,-1,self.h*self.d_k) 30 | return self.out_linear(res) 31 | -------------------------------------------------------------------------------- /NeoBERT/model/embeddings.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | 5 | class RoPE(nn.Module): 6 | def __init__(self, d_model, max_seq_len=512): 7 | super().__init__() 8 | self.d_model = d_model 9 | self.max_seq_len = max_seq_len 10 | inv_freq = 1.0 / (10000 ** (torch.arange(0, d_model, 2).float() / d_model)) 11 | pos = torch.arange(0, max_seq_len).float() 12 | pos_freq = torch.outer(pos, inv_freq) 13 | cos_pos = torch.cos(pos_freq) 14 | sin_pos = torch.sin(pos_freq) 15 | self.register_buffer('cos_pos', cos_pos) 16 | self.register_buffer('sin_pos', sin_pos) 17 | 18 | def forward(self, x): 19 | seq_len = x.size(1) 20 | cos = self.cos_pos[:seq_len] 21 | sin = self.sin_pos[:seq_len] 22 | cos = cos.unsqueeze(0) 23 | sin = sin.unsqueeze(0) 24 | x_even = x[..., 0::2] 25 | x_odd = x[..., 1::2] 26 | rotated_x_even = x_even * cos - x_odd * sin 27 | rotated_x_odd = x_odd * cos + x_even * sin 28 | x_rotated = torch.zeros_like(x) 29 | x_rotated[..., 0::2] = rotated_x_even 30 | x_rotated[..., 1::2] = rotated_x_odd 31 | return x_rotated 32 | 33 | class SegmentEmbedding(nn.Embedding): 34 | def __init__(self, embed_size=512): 35 | super().__init__(3, embed_size, padding_idx=0) 36 | 37 | class TokenEmbedding(nn.Embedding): 38 | def __init__(self, vocab_size, embed_size=512): 39 | super().__init__(vocab_size, embed_size, padding_idx=0) 40 | 41 | class BERTEmbedding(nn.Module): 42 | def __init__(self, vocab_size, embed_size, dropout=0): 43 | super().__init__() 44 | self.token = TokenEmbedding(vocab_size, embed_size) 45 | self.segment = SegmentEmbedding(embed_size) 46 | self.rope = RoPE(embed_size) 47 | self.dropout = nn.Dropout(dropout) 48 | 49 | def forward(self, x, seg): 50 | combined = self.token(x) + self.segment(seg) 51 | position_encoded = self.rope(combined) 52 | return self.dropout(position_encoded) 53 | -------------------------------------------------------------------------------- /NeoBERT/model/feedforward.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | from model.SwiGLU import SwiGLU 5 | 6 | class FeedForward(nn.Module): 7 | def __init__(self, d_model, d_FF,dropout=0.1): 8 | super(FeedForward, self).__init__() 9 | self.linear1 = nn.Linear(d_model, d_FF) 10 | self.linear2 = nn.Linear(d_FF, d_model) 11 | self.dropout = nn.Dropout(dropout) 12 | self.swiglu=SwiGLU() 13 | 14 | def forward(self, x): 15 | return self.linear2(self.dropout(self.swiglu(self.linear1(x)))) -------------------------------------------------------------------------------- /NeoBERT/model/residual.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | from model.RMS_Norm import RMSNorm 5 | 6 | class Residual(nn.Module): 7 | def __init__(self,size,dropout): 8 | super().__init__() 9 | self.norm=RMSNorm(size) 10 | self.dropout=nn.Dropout(dropout) 11 | def forward(self,x,sublayer): 12 | return x + self.dropout(sublayer(self.norm(x))) -------------------------------------------------------------------------------- /NeoBERT/train.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | from torch.optim import Adam 5 | from torch.utils.data import DataLoader 6 | import tqdm 7 | from model.BERT import BERT 8 | from model.MLM_NSP import MLM_NSP 9 | 10 | 11 | class Optim(): 12 | def __init__(self,optimizer,d_model,warmup_steps): 13 | self.optimizer=optimizer 14 | self._step=0 15 | self.warmup_steps=warmup_steps 16 | self.init_lr=d_model**(-0.5) 17 | def zero_grad(self): 18 | self.optimizer.zero_grad() 19 | def step(self): 20 | self.update_lr() 21 | self.optimizer.step() 22 | self._step+=1 23 | def update_lr(self): 24 | lr=self.init_lr*min(self.step**(-0.5),self._step*self.warmup_steps**(-1.5)*self.step) 25 | for param_group in self.optimizer.param_groups: 26 | param_group['lr']=lr 27 | def load_state_dict(self,state_dict): 28 | self.optimizer.load_state_dict(state_dict) 29 | def state_dict(self): 30 | return self.optimizer.state_dict() 31 | 32 | class Trainer: 33 | def __init__(self, bert: BERT, vocab_size:int, train_data: torch.utils.data.DataLoader, test_data: torch.utils.data.DataLoader, lr: float=2e-4, beta=(0.9,0.999),weight_decay:float=0.01, warmup_steps=10000, cuda=True, device=None, log_freq=10): 34 | self.device=device 35 | if self.device is None: 36 | self.device = 'cuda' if cuda and torch.cuda.is_available() else 'cpu' 37 | self.bert=BERT 38 | self.model=MLM_NSP(self.bert,vocab_size).to(self.device) 39 | self.train_data=train_data 40 | self.test_data=test_data 41 | self.optim=Adam(self.model.parameters(),lr=lr,betas=beta,weight_decay=weight_decay) 42 | self.optim=Optim(self.optim,self.bert.hidden,warmup_steps) 43 | self.log_freq=log_freq 44 | self.lossfn=nn.NLLLoss(ignore_index=0) 45 | def train(self,epoch): 46 | self.iteration(epoch,self.train_data) 47 | def test(self,epoch): 48 | self.iteration(epoch,self.test_data,train=False) 49 | def iteration(self,epoch,data_loader,train=True): 50 | str_code='train' if train else 'test' 51 | data_iter=tqdm.tqdm(enumerate(data_loader),f'{str_code} epoch {epoch}',total=len(data_loader),bar_format='{l_bar}{bar:10}{r_bar}') 52 | avg_loss=0.0 53 | corr, total=0,0 54 | for i, data in data_iter: 55 | data={key:value.to(self.device) for key,value in data.items()} 56 | next_sentence_output, mask_lm_output=self.model(data['bert_input'],data['segment_label']) 57 | next_loss=self.lossfn(next_sentence_output,data['is_next']) 58 | mask_loss=self.lossfn(mask_lm_output.transpose(1,2),data['bert_label']) 59 | loss=next_loss+mask_loss 60 | if train: 61 | self.optim.zero_grad() 62 | loss.backward() 63 | self.optim.step() 64 | cor=next_sentence_output.argmax(dim=-1).eq(data['is_nextl']).sum().item() 65 | avg_loss+=loss.item() 66 | corr+=cor 67 | total+=data['is_next'].nelement() 68 | data_iter.set_postfix({'epoch':epoch,'loss':loss.item(),'acc':cor/total}) 69 | avg_loss/=len(data_iter) 70 | acc=corr/total 71 | print(f'{str_code} epoch {epoch} avg_loss={avg_loss} acc={acc}') 72 | def save(self,epoch,path): 73 | pass -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Solving ML Papers 2 | 3 | ## Count of Papers Solved - 19 4 | 5 | ### Info 6 | - Here I will try to implement research papers and deep learning concepts which I will practice with time! 7 | - I will keep a count and attach research papers as links for the same below 8 | 9 | # CheckList 10 | 11 | - [x] Activation Functions 12 | - [x] Attention is all you need 13 | - [x] BERT 14 | - [x] BLEU 15 | - [x] CNN 16 | - [x] DQN 17 | - [x] Distillation (Hinton) 18 | - [x] GAN 19 | - [x] GPT2 20 | - [x] Image Style Transfer 21 | - [x] Llama2 22 | - [x] Llama4 23 | - [x] LoRA 24 | - [x] Mistral-7B 25 | - [x] NeoBERT 26 | - [x] RoPE 27 | - [x] SGD 28 | - [x] VAE 29 | - [x] Word2Vec 30 | -------------------------------------------------------------------------------- /RoPE/rope.py: -------------------------------------------------------------------------------- 1 | import torch 2 | 3 | def precompute_theta_pos_freq(head_dim,seq,device,theta): 4 | assert head_dim%2==0 #dimension should be divisible by 2 5 | #theta_i=10000^(-2(i-1)/dim) for i={1,2,...,dim/2} 6 | theta_num=torch.arange(0,head_dim,2).float() 7 | theta=1./(theta**(theta_num/head_dim)).to(device) 8 | #write the m positions 9 | m=torch.arange(seq,device=device) 10 | #multiply m with every theta 11 | freqs=torch.outer(m,theta).float() 12 | #convert this into complex form (polar) 13 | freqs_complex=torch.polar(torch.ones_like(freqs),freqs) 14 | return freqs_complex 15 | 16 | def rotary_pos_embeds(x,freqs_complex,device): 17 | x_complex=torch.view_as_complex(x.float().reshape(*x.shape[:-1],-1,2)) 18 | freqs_complex=freqs_complex.unsqueeze(0).unsqueeze(1) 19 | x_rotated=x_complex*freqs_complex 20 | x_out=torch.view_as_real(x_rotated) 21 | x_out=x_out.reshape(*x.shape) 22 | return x_out.type_as(x).to(device) 23 | -------------------------------------------------------------------------------- /VAE/VAE.py: -------------------------------------------------------------------------------- 1 | # Import necessary libraries 2 | import torch 3 | import torch.nn as nn 4 | import numpy as np 5 | from tqdm import tqdm 6 | from torchvision.utils import save_imag,make_grid 7 | from torchvision.datasets import MNIST 8 | import torchvision.transforms as transforms 9 | from torch.utils.data import DataLoader 10 | from torch.optim import Adam 11 | 12 | # Set dataset path 13 | dataset_path='~/datasets' 14 | 15 | # Set device (GPU if available, otherwise CPU) 16 | if torch.cuda.is_available(): 17 | device = torch.device('cuda') 18 | else: 19 | device = torch.device('cpu') 20 | 21 | # Set hyperparameters 22 | batch=100 23 | x_dim=784 24 | hidden_dim=400 25 | latent_dim=200 26 | lr=10e-3 27 | epochs=30 28 | 29 | # Define MNIST dataset transformation 30 | mnist_transform=transforms.Compose([transforms.ToTensor()]) 31 | 32 | # Set DataLoader parameters 33 | kwargs={'num_workers':1,'pin_mem':True} 34 | 35 | # Load MNIST dataset 36 | train_dataset = MNIST(dataset_path, transform=mnist_transform, train=True, download=True) 37 | test_dataset = MNIST(dataset_path, transform=mnist_transform, train=False, download=True) 38 | 39 | # Create DataLoaders 40 | train_loader = DataLoader(dataset=train_dataset, batch_size=batch, shuffle=True, **kwargs) 41 | test_loader = DataLoader(dataset=test_dataset, batch_size=batch, shuffle=False, **kwargs) 42 | 43 | # Define Encoder class 44 | class Encoder(nn.Module): 45 | def __init__(self, input_dim,hidden_dim,latent_dim): 46 | super(Encoder,self).__init__() 47 | self.FC_in1=nn.Linear(input_dim,hidden_dim) 48 | self.FC_in2=nn.Linear(hidden_dim,hidden_dim) 49 | self.FC_mean=nn.Linear(hidden_dim,latent_dim) 50 | self.FC_var=nn.Linear(hidden_dim,latent_dim) 51 | self.LeakyReLU=nn.LeakyReLU(0.2) 52 | self.training=True 53 | 54 | def forward(self,x): 55 | fin=self.LeakyReLU(self.FC_in2(self.LeakyReLU(self.FC_in1(x)))) 56 | mean=self.FC_mean(fin) 57 | logvar=self.FC_var(fin) 58 | return mean,logvar 59 | 60 | # Define Decoder class 61 | class Decoder(nn.Module): 62 | def __init__(self,latent_dim,hidden_dim,output_dim): 63 | super(Decoder,self).__init__() 64 | self.FC_h1=nn.Linear(latent_dim,hidden_dim) 65 | self.FC_h2=nn.Linear(hidden_dim,hidden_dim) 66 | self.FC_out=nn.Linear(hidden_dim,output_dim) 67 | self.LeakyReLU=nn.LeakyReLU(0.2) 68 | 69 | def forward(self,x): 70 | x_h=torch.sigmoid(self.FC_out(self.LeakyReLU(self.FC_h2(self.LeakyReLU(self.FC_h1(x)))))) 71 | return x_h 72 | 73 | # Define VAE class 74 | class VAE(nn.Module): 75 | def __init__(self,Encoder,Decoder): 76 | super(VAE,self).__init__() 77 | self.Encoder=Encoder 78 | self.Decoder=Decoder 79 | 80 | def reparams(self,mean,var): 81 | e=torch.randn_like(var).to(device) 82 | z=mean+var*e 83 | return z 84 | 85 | def forward(self,x): 86 | mean,logvar=self.Encoder(x) 87 | z=self.reparams(mean,torch.exp(0.5*logvar)) 88 | x_h=self.Decoder(z) 89 | return x_h,mean,logvar 90 | 91 | # Initialize encoder, decoder, and VAE model 92 | encoder=Encoder(input_dim=x_dim,hidden_dim=hidden_dim,latent_dim=latent_dim) 93 | decoder=Decoder(latent_dim=latent_dim,hidden_dim=hidden_dim,output_dim=x_dim) 94 | model=VAE(Encoder=encoder,Decoder=decoder).to(device) 95 | 96 | # Define loss function 97 | BCE_loss=nn.BCELoss() 98 | def loss_fn(x,x_h,mean,logvar): 99 | rep_loss=nn.functional.binary_cross_entropy(x_h,x,reduction='sum') 100 | KLD=-0.5*torch.sum(1+logvar-mean.pow(2)-logvar.exp()) 101 | return rep_loss+KLD 102 | 103 | # Initialize optimizer 104 | optim=Adam(model.parameters(),lr=lr) 105 | 106 | # Training loop 107 | model.train() 108 | for ep in range(epochs): 109 | tot_loss=0 110 | for batch_idx,(x,_) in enumerate(train_loader): 111 | x=x.view(batch,x_dim) 112 | x=x.to(device) 113 | optim.zero_grad() 114 | x_h,mean,logvar=model(x) 115 | loss=loss_fn(x,x_h,mean,logvar) 116 | loss.backward() 117 | tot_loss+=loss.item() 118 | optim.step() 119 | print(f"Epoch {ep+1} | Loss : {tot_loss/(batch_idx*batch)}") 120 | -------------------------------------------------------------------------------- /Word2Vec/main.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import torch 3 | import torch.nn as nn 4 | import torch.optim as optim 5 | from torch.utils.data import Dataset, DataLoader 6 | import pandas as pd 7 | from scipy.spatial.distance import cosine 8 | 9 | def process_corpus(file_name): 10 | with open(file_name, 'r') as f: 11 | corpus = f.readlines() 12 | corpus = [sentence for sentence in corpus if sentence.count(" ") >= 2] 13 | return corpus 14 | 15 | class Tokenizer: 16 | def __init__(self, filters='!"#$%&()*+,-./:;<=>?@[\\]^_`{|}~\t\n\''): 17 | self.word_index = {} 18 | self.index_word = {} 19 | self.word_counts = {} 20 | self.filters = filters 21 | def fit_on_texts(self, texts): 22 | for sentence in texts: 23 | for char in self.filters: 24 | sentence = sentence.replace(char, ' ') 25 | words = sentence.lower().split() 26 | for word in words: 27 | if word: 28 | if word not in self.word_counts: 29 | self.word_counts[word] = 1 30 | else: 31 | self.word_counts[word] += 1 32 | for i, (word, _) in enumerate(sorted(self.word_counts.items(), 33 | key=lambda x: x[1], reverse=True), 1): 34 | self.word_index[word] = i 35 | self.index_word[i] = word 36 | def texts_to_sequences(self, texts): 37 | sequences = [] 38 | for sentence in texts: 39 | for char in self.filters: 40 | sentence = sentence.replace(char, ' ') 41 | words = sentence.lower().split() 42 | seq = [] 43 | for word in words: 44 | if word and word in self.word_index: 45 | seq.append(self.word_index[word]) 46 | sequences.append(seq) 47 | return sequences 48 | 49 | class SkipgramDataset(Dataset): 50 | def __init__(self, corpus, window_size, V): 51 | self.inputs = [] 52 | self.outputs = [] 53 | self.prepare_data(corpus, window_size, V) 54 | def prepare_data(self, corpus, window_size, V): 55 | for words in corpus: 56 | L = len(words) 57 | for index, word in enumerate(words): 58 | p = index - window_size 59 | n = index + window_size + 1 60 | for i in range(p, n): 61 | if i != index and 0 <= i < L: 62 | self.inputs.append(word) 63 | target = torch.zeros(V) 64 | target[words[i]] = 1 65 | self.outputs.append(target) 66 | def __len__(self): 67 | return len(self.inputs) 68 | def __getitem__(self, idx): 69 | return torch.tensor(self.inputs[idx], dtype=torch.long), self.outputs[idx] 70 | 71 | class CBOWDataset(Dataset): 72 | def __init__(self, corpus, window_size, V): 73 | self.inputs = [] 74 | self.outputs = [] 75 | self.prepare_data(corpus, window_size, V) 76 | def prepare_data(self, corpus, window_size, V): 77 | for sentence in corpus: 78 | L = len(sentence) 79 | for index, word in enumerate(sentence): 80 | start = index - window_size 81 | end = index + window_size + 1 82 | context_words = [] 83 | for i in range(start, end): 84 | if i != index: 85 | if 0 <= i < L: 86 | context_words.append(sentence[i]) 87 | else: 88 | context_words.append(0) 89 | self.inputs.append(context_words) 90 | target = torch.zeros(V) 91 | target[word] = 1 92 | self.outputs.append(target) 93 | def __len__(self): 94 | return len(self.inputs) 95 | def __getitem__(self, idx): 96 | return torch.tensor(self.inputs[idx], dtype=torch.long), self.outputs[idx] 97 | 98 | class SkipgramModel(nn.Module): 99 | def __init__(self, vocab_size, embedding_dim): 100 | super(SkipgramModel, self).__init__() 101 | self.embeddings = nn.Embedding(vocab_size, embedding_dim) 102 | self.linear = nn.Linear(embedding_dim, vocab_size) 103 | nn.init.xavier_uniform_(self.embeddings.weight) 104 | nn.init.xavier_uniform_(self.linear.weight) 105 | def forward(self, x): 106 | x = self.embeddings(x) 107 | x = self.linear(x) 108 | return x 109 | def get_weights(self): 110 | return [self.embeddings.weight.detach().cpu().numpy()] 111 | 112 | class CBOWModel(nn.Module): 113 | def __init__(self, vocab_size, embedding_dim): 114 | super(CBOWModel, self).__init__() 115 | self.embeddings = nn.Embedding(vocab_size, embedding_dim) 116 | self.linear = nn.Linear(embedding_dim, vocab_size) 117 | nn.init.xavier_uniform_(self.embeddings.weight) 118 | nn.init.xavier_uniform_(self.linear.weight) 119 | def forward(self, x): 120 | x = self.embeddings(x) 121 | x = torch.mean(x, dim=1) 122 | x = self.linear(x) 123 | return x 124 | def get_weights(self): 125 | return [self.embeddings.weight.detach().cpu().numpy()] 126 | 127 | def embed(word, embedding, tokenizer): 128 | int_word = tokenizer.texts_to_sequences([word])[0] 129 | one_hot = torch.zeros(len(tokenizer.word_index) + 1) 130 | one_hot[int_word[0]] = 1 131 | return one_hot.numpy() @ embedding 132 | 133 | def find_similar_words(target_word, embedding, tokenizer, top_n=10): 134 | if target_word not in tokenizer.word_index: 135 | return f"Word '{target_word}' not found in vocabulary" 136 | 137 | target_word_vector = embed(target_word, embedding, tokenizer) 138 | all_distances = {} 139 | 140 | for word, idx in tokenizer.word_index.items(): 141 | if word == target_word: 142 | continue 143 | 144 | word_vector = np.zeros((len(tokenizer.word_index) + 1)) 145 | word_vector[idx] = 1 146 | word_vector = word_vector @ embedding 147 | distance = cosine(target_word_vector, word_vector) 148 | all_distances[word] = distance 149 | 150 | similar_words = sorted(all_distances.items(), key=lambda x: x[1])[:top_n] 151 | return similar_words 152 | 153 | 154 | window_size = 2 155 | dims = [50,150,300] 156 | np.random.seed(42) 157 | torch.manual_seed(42) 158 | file_name = 'xxxx' 159 | corpus = process_corpus(file_name) 160 | tokenizer = Tokenizer() 161 | tokenizer.fit_on_texts(corpus) 162 | corpus = tokenizer.texts_to_sequences(corpus) 163 | V = len(tokenizer.word_index) + 1 164 | skipgram_models = [] 165 | for dim in dims: 166 | model_path=f"skipgram_model_{dim}.pth" 167 | print(f"Training Skipgram model with {dim} dimensions") 168 | model = SkipgramModel(V, dim) 169 | model.to('cuda' if torch.cuda.is_available() else 'cpu') 170 | dataset = SkipgramDataset(corpus, window_size, V) 171 | dataloader = DataLoader(dataset, batch_size=64, shuffle=True) 172 | optimizer = optim.Adam(model.parameters()) 173 | criterion = nn.CrossEntropyLoss() 174 | for epoch in range(50): 175 | total_loss = 0 176 | for inputs, targets in dataloader: 177 | inputs = inputs.to('cuda' if torch.cuda.is_available() else 'cpu') 178 | targets = targets.to('cuda' if torch.cuda.is_available() else 'cpu') 179 | optimizer.zero_grad() 180 | outputs = model(inputs) 181 | loss = criterion(outputs, targets) 182 | loss.backward() 183 | optimizer.step() 184 | total_loss += loss.item() 185 | print(f"Epoch {epoch+1}/50, Loss: {total_loss/len(dataloader):.4f}") 186 | skipgram_models.append(model) 187 | torch.save(model.state_dict(), model_path) 188 | weights = model.get_weights() 189 | embedding = weights[0] 190 | with open(f"vectors_skipgram_{dim}.txt", "w") as f: 191 | columns = ["word"] + [f"value_{i+1}" for i in range(embedding.shape[1])] 192 | f.write(" ".join(columns) + "\n") 193 | for word, i in tokenizer.word_index.items(): 194 | f.write(word + " " + " ".join(map(str, list(embedding[i,:]))) + "\n") 195 | cbow_models = [] 196 | for dim in dims: 197 | model_path=f"cbow_model_{dim}.pth" 198 | print(f"Training CBOW model with {dim} dimensions") 199 | model = CBOWModel(V, dim) 200 | model.to('cuda' if torch.cuda.is_available() else 'cpu') 201 | dataset = CBOWDataset(corpus, window_size, V) 202 | dataloader = DataLoader(dataset, batch_size=64, shuffle=True) 203 | optimizer = optim.Adam(model.parameters()) 204 | criterion = nn.CrossEntropyLoss() 205 | for epoch in range(50): 206 | total_loss = 0 207 | for inputs, targets in dataloader: 208 | inputs = inputs.to('cuda' if torch.cuda.is_available() else 'cpu') 209 | targets = targets.to('cuda' if torch.cuda.is_available() else 'cpu') 210 | optimizer.zero_grad() 211 | outputs = model(inputs) 212 | loss = criterion(outputs, targets) 213 | loss.backward() 214 | optimizer.step() 215 | total_loss += loss.item() 216 | print(f"Epoch {epoch+1}/50, Loss: {total_loss/len(dataloader):.4f}") 217 | cbow_models.append(model) 218 | torch.save(model.state_dict(), model_path) 219 | weights = model.get_weights() 220 | embedding = weights[0] 221 | with open(f"vectors_cbow_{dim}.txt", "w") as f: 222 | columns = ["word"] + [f"value_{i+1}" for i in range(embedding.shape[1])] 223 | f.write(" ".join(columns) + "\n") 224 | for word, i in tokenizer.word_index.items(): 225 | f.write(word + " " + " ".join(map(str, list(embedding[i,:]))) + "\n") 226 | all_models = skipgram_models + cbow_models 227 | model_names = [f"skipgram_{dim}" for dim in dims] + [f"cbow_{dim}" for dim in dims] 228 | embeddings = [model.get_weights()[0] for model in all_models] 229 | 230 | while True: 231 | query_word = input("Enter a word to find similar words (or 'exit' to quit): ") 232 | if query_word.lower() == 'exit': 233 | break 234 | 235 | for model_name, embedding in zip(model_names, embeddings): 236 | print(f"\nModel: {model_name}") 237 | similar = find_similar_words(query_word, embedding, tokenizer) 238 | if isinstance(similar, str): 239 | print(similar) 240 | else: 241 | print(f"Top 10 words similar to '{query_word}':") 242 | for word, similarity in similar: 243 | print(f"{word}: {1-similarity:.4f}") 244 | print("-" * 40) 245 | 246 | --------------------------------------------------------------------------------