├── README.md ├── .gitignore ├── DL ├── Neural-Network.ipynb ├── ResNet.ipynb ├── Adaptive-Pool.ipynb ├── Kaiming-Initialization.ipynb ├── RNN.ipynb ├── Neural-Network-Input-Vector.ipynb └── Batch-Norm.ipynb ├── ML ├── Cross-Entropy-Loss.ipynb ├── Logistic-Regression.ipynb ├── F1-Score.ipynb ├── Decision-Tree.ipynb └── KNN.ipynb └── Math ├── Bayesian-Stats.ipynb ├── Multivariable-Calculus.ipynb ├── Entropy.ipynb └── Graph-Theory.ipynb /README.md: -------------------------------------------------------------------------------- 1 | # from-scratch 2 | You do not fully understand something if you can't build it from scratch. 3 | 4 | 5 | https://fs.blog/2018/04/first-principles/ 6 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | target/ 76 | 77 | # Jupyter Notebook 78 | .ipynb_checkpoints 79 | 80 | # IPython 81 | profile_default/ 82 | ipython_config.py 83 | 84 | # pyenv 85 | .python-version 86 | 87 | # pipenv 88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 91 | # install all needed dependencies. 92 | #Pipfile.lock 93 | 94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 95 | __pypackages__/ 96 | 97 | # Celery stuff 98 | celerybeat-schedule 99 | celerybeat.pid 100 | 101 | # SageMath parsed files 102 | *.sage.py 103 | 104 | # Environments 105 | .env 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | -------------------------------------------------------------------------------- /DL/Neural-Network.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## Neural-Network\n", 8 | "\n", 9 | "A very simple feed-forward NN from scratch." 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": 1, 15 | "metadata": {}, 16 | "outputs": [], 17 | "source": [ 18 | "import numpy as np\n", 19 | "import scipy.special\n", 20 | "import matplotlib.pyplot\n", 21 | "%matplotlib inline" 22 | ] 23 | }, 24 | { 25 | "cell_type": "code", 26 | "execution_count": 2, 27 | "metadata": {}, 28 | "outputs": [], 29 | "source": [ 30 | "class NN:\n", 31 | " def __init__(self, i, h, o, lr):\n", 32 | " self.i = i\n", 33 | " self.h = h\n", 34 | " self.h = o\n", 35 | " self.lr = lr\n", 36 | " self.wh = np.random.normal(0.0, pow(self.i, -0.5), (self.h, self.i))\n", 37 | " self.wo = np.random.normal(0.0, pow(self.h, -0.5), (self.o, self.h))\n", 38 | " \n", 39 | " self.sigmoid = lambda x: scipy.special.expit(x)\n", 40 | " \n", 41 | " def train(self, x, y):\n", 42 | " x = np.array(x, ndmin=2).T\n", 43 | " y = np.array(y, ndmin=2).T\n", 44 | " hi = np.dot(self.wh, x)\n", 45 | " ho = self.sigmoid(hi)\n", 46 | " oi = np.dot(self.ho, hi)\n", 47 | " oo = self.aktivierungsfunktion(oi)\n", 48 | " err = y - oo\n", 49 | " h_err = np.dot(self.wo.T, err)\n", 50 | " \n", 51 | " self.wo += self.lr * np.dot((err * oo * (1.0 - oo)), np.transpose(ho))\n", 52 | " self.wh += self.lr * np.dot((h_err * ho * (1.0 - ho)), np.transpose(x))\n", 53 | " \n", 54 | " def predict(self, x):\n", 55 | " x = np.array(x, ndmin=2).T\n", 56 | " hi = np.dot(self.wh, x)\n", 57 | " ho = self.sigmoid(hi)\n", 58 | " oi = np.dot(self.wo, ho)\n", 59 | " oo = self.sigmoid(oi)\n", 60 | " return oo" 61 | ] 62 | } 63 | ], 64 | "metadata": { 65 | "kernelspec": { 66 | "display_name": "Python 3", 67 | "language": "python", 68 | "name": "python3" 69 | }, 70 | "language_info": { 71 | "codemirror_mode": { 72 | "name": "ipython", 73 | "version": 3 74 | }, 75 | "file_extension": ".py", 76 | "mimetype": "text/x-python", 77 | "name": "python", 78 | "nbconvert_exporter": "python", 79 | "pygments_lexer": "ipython3", 80 | "version": "3.6.9" 81 | } 82 | }, 83 | "nbformat": 4, 84 | "nbformat_minor": 2 85 | } 86 | -------------------------------------------------------------------------------- /ML/Cross-Entropy-Loss.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "attachments": {}, 5 | "cell_type": "markdown", 6 | "metadata": {}, 7 | "source": [ 8 | "## Cross Entropy Loss\n", 9 | "\n", 10 | "Cross-entropy loss, or log loss, measures the performance of a classification model whose output is a probability value between 0 and 1. Cross-entropy loss increases as the predicted probability diverges from the actual label. So predicting a probability of .012 when the actual observation label is 1 would be bad and result in a high loss value. A perfect model would have a log loss of 0." 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": 6, 16 | "metadata": {}, 17 | "outputs": [], 18 | "source": [ 19 | "import numpy as np" 20 | ] 21 | }, 22 | { 23 | "cell_type": "code", 24 | "execution_count": 25, 25 | "metadata": {}, 26 | "outputs": [], 27 | "source": [ 28 | "prediction = [1e-5, 1e-5, 0.999]" 29 | ] 30 | }, 31 | { 32 | "cell_type": "code", 33 | "execution_count": 26, 34 | "metadata": {}, 35 | "outputs": [], 36 | "source": [ 37 | "target = [0, 0, 1]" 38 | ] 39 | }, 40 | { 41 | "cell_type": "code", 42 | "execution_count": 27, 43 | "metadata": {}, 44 | "outputs": [], 45 | "source": [ 46 | "def ce(prediction, target):\n", 47 | " l = 0\n", 48 | " for j in range(len(prediction)):\n", 49 | " t = target[j]\n", 50 | " p = prediction[j]\n", 51 | " \n", 52 | " if t == 0: l += np.log(1-p)\n", 53 | " else : l += np.log(p)\n", 54 | " return -1 * l" 55 | ] 56 | }, 57 | { 58 | "cell_type": "code", 59 | "execution_count": 28, 60 | "metadata": {}, 61 | "outputs": [ 62 | { 63 | "data": { 64 | "text/plain": [ 65 | "0.00102050043358411" 66 | ] 67 | }, 68 | "execution_count": 28, 69 | "metadata": {}, 70 | "output_type": "execute_result" 71 | } 72 | ], 73 | "source": [ 74 | "ce(i, t)" 75 | ] 76 | } 77 | ], 78 | "metadata": { 79 | "kernelspec": { 80 | "display_name": "Python 3", 81 | "language": "python", 82 | "name": "python3" 83 | }, 84 | "language_info": { 85 | "codemirror_mode": { 86 | "name": "ipython", 87 | "version": 3 88 | }, 89 | "file_extension": ".py", 90 | "mimetype": "text/x-python", 91 | "name": "python", 92 | "nbconvert_exporter": "python", 93 | "pygments_lexer": "ipython3", 94 | "version": "3.7.4" 95 | } 96 | }, 97 | "nbformat": 4, 98 | "nbformat_minor": 2 99 | } 100 | -------------------------------------------------------------------------------- /DL/ResNet.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## ResNet\n", 8 | "\n", 9 | "**Paper:** https://arxiv.org/abs/1512.03385 (Kaiming He is everywhere!)\n", 10 | "\n", 11 | "The big idea in ResNet was the residual block. In the res block we do some calculations and at the end we add the identity. This redifines the problem itself. Now we are trying to calculate residual functions with the respect to the input. Not an unreferenced function.\n", 12 | "\n", 13 | "F(x) = F(x) + x\n", 14 | "\n", 15 | "This help with the vanishing gradient problem." 16 | ] 17 | }, 18 | { 19 | "cell_type": "code", 20 | "execution_count": 4, 21 | "metadata": {}, 22 | "outputs": [], 23 | "source": [ 24 | "import torch \n", 25 | "import torch.nn as nn" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": 79, 31 | "metadata": {}, 32 | "outputs": [ 33 | { 34 | "data": { 35 | "text/plain": [ 36 | "torch.Size([1, 3, 28, 28])" 37 | ] 38 | }, 39 | "execution_count": 79, 40 | "metadata": {}, 41 | "output_type": "execute_result" 42 | } 43 | ], 44 | "source": [ 45 | "x = torch.randn(1,3,28,28)\n", 46 | "x.shape" 47 | ] 48 | }, 49 | { 50 | "cell_type": "markdown", 51 | "metadata": {}, 52 | "source": [ 53 | "The important thing to note here is that the size of x after conv1 and conv2 have to match the identity. To ensure this we use padding in the convolution. Otherwise the addition would fail because the shapes don't match up." 54 | ] 55 | }, 56 | { 57 | "cell_type": "code", 58 | "execution_count": 84, 59 | "metadata": {}, 60 | "outputs": [], 61 | "source": [ 62 | "class ResBlock(nn.Module):\n", 63 | " def __init__(self):\n", 64 | " super(ResBlock, self).__init__()\n", 65 | " self.conv1 = nn.Conv2d(3,9,3,padding=1)\n", 66 | " self.conv2 = nn.Conv2d(9,3,3,padding=1)\n", 67 | " \n", 68 | " def forward(self, x):\n", 69 | " identity = x\n", 70 | " x = torch.relu(self.conv1(x))\n", 71 | " x = self.conv2(x)\n", 72 | " x = torch.relu(x + identity)\n", 73 | " return x" 74 | ] 75 | }, 76 | { 77 | "cell_type": "code", 78 | "execution_count": 85, 79 | "metadata": {}, 80 | "outputs": [], 81 | "source": [ 82 | "rb = ResBlock()" 83 | ] 84 | }, 85 | { 86 | "cell_type": "code", 87 | "execution_count": 86, 88 | "metadata": {}, 89 | "outputs": [], 90 | "source": [ 91 | "y = rb(x)" 92 | ] 93 | }, 94 | { 95 | "cell_type": "code", 96 | "execution_count": 83, 97 | "metadata": {}, 98 | "outputs": [ 99 | { 100 | "data": { 101 | "text/plain": [ 102 | "torch.Size([1, 3, 28, 28])" 103 | ] 104 | }, 105 | "execution_count": 83, 106 | "metadata": {}, 107 | "output_type": "execute_result" 108 | } 109 | ], 110 | "source": [ 111 | "y.shape" 112 | ] 113 | } 114 | ], 115 | "metadata": { 116 | "kernelspec": { 117 | "display_name": "Python 3", 118 | "language": "python", 119 | "name": "python3" 120 | }, 121 | "language_info": { 122 | "codemirror_mode": { 123 | "name": "ipython", 124 | "version": 3 125 | }, 126 | "file_extension": ".py", 127 | "mimetype": "text/x-python", 128 | "name": "python", 129 | "nbconvert_exporter": "python", 130 | "pygments_lexer": "ipython3", 131 | "version": "3.7.4" 132 | } 133 | }, 134 | "nbformat": 4, 135 | "nbformat_minor": 2 136 | } 137 | -------------------------------------------------------------------------------- /DL/Adaptive-Pool.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## Adaptive Pooling\n", 8 | "\n", 9 | "Adaptive average/max pooling is great when you have a pre-trained backbone and want to build a custom head. Because the output of the backbone layer could change you want to resize it to specific dimension so you can handle any input to your head." 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": 91, 15 | "metadata": {}, 16 | "outputs": [], 17 | "source": [ 18 | "import torch\n", 19 | "import torch.nn as nn\n", 20 | "import torch.nn.functional as F\n", 21 | "import torchvision.models as models" 22 | ] 23 | }, 24 | { 25 | "cell_type": "code", 26 | "execution_count": 92, 27 | "metadata": {}, 28 | "outputs": [], 29 | "source": [ 30 | "alexnet = models.mobilenet_v2(pretrained=True)" 31 | ] 32 | }, 33 | { 34 | "cell_type": "markdown", 35 | "metadata": {}, 36 | "source": [ 37 | "Here we remove the classification layer of the pretrained AlexNet. We only use AlexNet as a feature extractor. So we will remove the fully connected layer at the top of the model. Now the final layer of AlexNet is the last conv layer." 38 | ] 39 | }, 40 | { 41 | "cell_type": "code", 42 | "execution_count": 93, 43 | "metadata": {}, 44 | "outputs": [], 45 | "source": [ 46 | "backbone = nn.Sequential(list(alexnet.children())[0])" 47 | ] 48 | }, 49 | { 50 | "cell_type": "markdown", 51 | "metadata": {}, 52 | "source": [ 53 | "Print out the last layers:" 54 | ] 55 | }, 56 | { 57 | "cell_type": "code", 58 | "execution_count": 111, 59 | "metadata": {}, 60 | "outputs": [ 61 | { 62 | "data": { 63 | "text/plain": [ 64 | "ConvBNReLU(\n", 65 | " (0): Conv2d(320, 1280, kernel_size=(1, 1), stride=(1, 1), bias=False)\n", 66 | " (1): BatchNorm2d(1280, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True)\n", 67 | " (2): ReLU6(inplace=True)\n", 68 | ")" 69 | ] 70 | }, 71 | "execution_count": 111, 72 | "metadata": {}, 73 | "output_type": "execute_result" 74 | } 75 | ], 76 | "source": [ 77 | "backbone[-1][-1]" 78 | ] 79 | }, 80 | { 81 | "cell_type": "code", 82 | "execution_count": 96, 83 | "metadata": {}, 84 | "outputs": [], 85 | "source": [ 86 | "x = torch.randn(3,3,280,280)" 87 | ] 88 | }, 89 | { 90 | "cell_type": "code", 91 | "execution_count": 101, 92 | "metadata": {}, 93 | "outputs": [ 94 | { 95 | "data": { 96 | "text/plain": [ 97 | "torch.Size([3, 1280, 9, 9])" 98 | ] 99 | }, 100 | "execution_count": 101, 101 | "metadata": {}, 102 | "output_type": "execute_result" 103 | } 104 | ], 105 | "source": [ 106 | "y = backbone(x)\n", 107 | "y.shape" 108 | ] 109 | }, 110 | { 111 | "cell_type": "markdown", 112 | "metadata": {}, 113 | "source": [ 114 | "## Head\n", 115 | "\n", 116 | "The first layer of our head will be the nn.AdaptiveAvgPool3d. Here we can define the output we want. In this case we want the following output:\n", 117 | "\n", 118 | "**(C, 10, 10, 10)**\n", 119 | "\n", 120 | "Remember that a pooling layer has no parameters. So we don't have to worry about initializing weights or anything." 121 | ] 122 | }, 123 | { 124 | "cell_type": "code", 125 | "execution_count": 106, 126 | "metadata": {}, 127 | "outputs": [], 128 | "source": [ 129 | "class Head(nn.Module):\n", 130 | " def __init__(self):\n", 131 | " super(Head, self).__init__()\n", 132 | " self.aap = nn.AdaptiveAvgPool3d((10,10,10))\n", 133 | " \n", 134 | " def forward(self, x):\n", 135 | " return self.aap(x)" 136 | ] 137 | }, 138 | { 139 | "cell_type": "code", 140 | "execution_count": 107, 141 | "metadata": {}, 142 | "outputs": [], 143 | "source": [ 144 | "h = Head()" 145 | ] 146 | }, 147 | { 148 | "cell_type": "code", 149 | "execution_count": 108, 150 | "metadata": {}, 151 | "outputs": [ 152 | { 153 | "data": { 154 | "text/plain": [ 155 | "torch.Size([3, 10, 10, 10])" 156 | ] 157 | }, 158 | "execution_count": 108, 159 | "metadata": {}, 160 | "output_type": "execute_result" 161 | } 162 | ], 163 | "source": [ 164 | "h(y).shape" 165 | ] 166 | } 167 | ], 168 | "metadata": { 169 | "kernelspec": { 170 | "display_name": "Python 3", 171 | "language": "python", 172 | "name": "python3" 173 | }, 174 | "language_info": { 175 | "codemirror_mode": { 176 | "name": "ipython", 177 | "version": 3 178 | }, 179 | "file_extension": ".py", 180 | "mimetype": "text/x-python", 181 | "name": "python", 182 | "nbconvert_exporter": "python", 183 | "pygments_lexer": "ipython3", 184 | "version": "3.7.4" 185 | } 186 | }, 187 | "nbformat": 4, 188 | "nbformat_minor": 2 189 | } 190 | -------------------------------------------------------------------------------- /Math/Bayesian-Stats.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import numpy as np" 10 | ] 11 | }, 12 | { 13 | "cell_type": "markdown", 14 | "metadata": {}, 15 | "source": [ 16 | "from http://www.greenteapress.com/thinkbayes/thinkbayes.pdf" 17 | ] 18 | }, 19 | { 20 | "cell_type": "markdown", 21 | "metadata": {}, 22 | "source": [ 23 | "## 1. Bayes's Theorem" 24 | ] 25 | }, 26 | { 27 | "cell_type": "markdown", 28 | "metadata": { 29 | "jupyter": { 30 | "source_hidden": true 31 | } 32 | }, 33 | "source": [ 34 | "## 1.1 Conditional Prob\n", 35 | "\n", 36 | "A conditional prob is a prob based on some background info. \n", 37 | "\n", 38 | "$P(A|B)$, Prob A given B.\n", 39 | "\n", 40 | "## 1.2 Cojoint Prob\n", 41 | "\n", 42 | "Fancy way to say the prob that two things are true.\n", 43 | "\n", 44 | "$P(A,B)$\n", 45 | "\n", 46 | "$P(A,B) = P(A)P(B|A)$\n", 47 | "\n", 48 | "If A and B are independent:\n", 49 | "\n", 50 | "$P(A,B) = P(A)*P(B)$\n", 51 | "\n", 52 | "## 1.3 Bayes Theorem\n", 53 | "\n", 54 | "Lets derive it from 1.1 and 1.2. We know that:\n", 55 | "\n", 56 | "$P(A,B)=P(B,A)$\n", 57 | "\n", 58 | "$P(A,B) = P(A)P(B|A)$\n", 59 | "\n", 60 | "$P(B,A) = P(B)P(A|B)$\n", 61 | "\n", 62 | "then:\n", 63 | "\n", 64 | "$P(A)P(B|A) = P(B)P(A|B)$ | Lets divide by $P(B|A)\n", 65 | "\n", 66 | "=> $P(B|A) = \\dfrac{P(B)P(A|B)}{P(A)}$" 67 | ] 68 | }, 69 | { 70 | "cell_type": "markdown", 71 | "metadata": { 72 | "jupyter": { 73 | "source_hidden": true 74 | } 75 | }, 76 | "source": [ 77 | "## 1.4 The diachronic interpretation\n", 78 | "\n", 79 | "Another way to think about BT is that it gives us a way to update the prob of a hypothesis H in light of some data D.\n", 80 | "\n", 81 | "Btw \"Diachronic\" means that something is happening over time.\n", 82 | "\n", 83 | "$P(H|D) = \\dfrac{P(H)P(D|H)}{P(D)}$\n", 84 | "\n", 85 | "- **prior** p(H): Prob of Hypothesis before we see the data.\n", 86 | "- **posterior** p(H|D): Prob of Hypothesis after we see the data.\n", 87 | "- **likelihood** p(D|H): Prob of the data under the hypothesis.\n", 88 | "- **normalizing costant** p(D): Prob of the data under any hypothesis." 89 | ] 90 | }, 91 | { 92 | "cell_type": "markdown", 93 | "metadata": {}, 94 | "source": [ 95 | "## 2. Computational Stats" 96 | ] 97 | }, 98 | { 99 | "cell_type": "markdown", 100 | "metadata": { 101 | "jupyter": { 102 | "source_hidden": true 103 | } 104 | }, 105 | "source": [ 106 | "## 2.1 Distributions" 107 | ] 108 | }, 109 | { 110 | "cell_type": "code", 111 | "execution_count": 2, 112 | "metadata": { 113 | "jupyter": { 114 | "source_hidden": true 115 | } 116 | }, 117 | "outputs": [], 118 | "source": [ 119 | "from thinkbayes import Pmf" 120 | ] 121 | }, 122 | { 123 | "cell_type": "code", 124 | "execution_count": 3, 125 | "metadata": { 126 | "jupyter": { 127 | "source_hidden": true 128 | } 129 | }, 130 | "outputs": [], 131 | "source": [ 132 | "pmf = Pmf()\n", 133 | "for x in [1,2,3,4,5,6]:\n", 134 | " pmf.Set(x, 1/6)" 135 | ] 136 | }, 137 | { 138 | "cell_type": "code", 139 | "execution_count": 11, 140 | "metadata": { 141 | "jupyter": { 142 | "source_hidden": true 143 | } 144 | }, 145 | "outputs": [], 146 | "source": [ 147 | "pmf = Pmf()\n", 148 | "pmf.Set('Bowl 1', 0.5)\n", 149 | "pmf.Set('Bowl 2', 0.5)" 150 | ] 151 | }, 152 | { 153 | "cell_type": "code", 154 | "execution_count": 12, 155 | "metadata": { 156 | "jupyter": { 157 | "source_hidden": true 158 | } 159 | }, 160 | "outputs": [], 161 | "source": [ 162 | "pmf.Mult('Bowl 1', 0.75)\n", 163 | "pmf.Mult('Bowl 2', 0.5)\n", 164 | "pmf.Normalize();" 165 | ] 166 | }, 167 | { 168 | "cell_type": "code", 169 | "execution_count": 13, 170 | "metadata": { 171 | "collapsed": true, 172 | "jupyter": { 173 | "outputs_hidden": true 174 | } 175 | }, 176 | "outputs": [ 177 | { 178 | "data": { 179 | "text/plain": [ 180 | "0.6000000000000001" 181 | ] 182 | }, 183 | "execution_count": 13, 184 | "metadata": {}, 185 | "output_type": "execute_result" 186 | } 187 | ], 188 | "source": [ 189 | "pmf.Prob('Bowl 1')" 190 | ] 191 | } 192 | ], 193 | "metadata": { 194 | "kernelspec": { 195 | "display_name": "Python 3", 196 | "language": "python", 197 | "name": "python3" 198 | }, 199 | "language_info": { 200 | "codemirror_mode": { 201 | "name": "ipython", 202 | "version": 3 203 | }, 204 | "file_extension": ".py", 205 | "mimetype": "text/x-python", 206 | "name": "python", 207 | "nbconvert_exporter": "python", 208 | "pygments_lexer": "ipython3", 209 | "version": "3.6.9" 210 | } 211 | }, 212 | "nbformat": 4, 213 | "nbformat_minor": 4 214 | } 215 | -------------------------------------------------------------------------------- /Math/Multivariable-Calculus.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## Multivariable Calculus\n", 8 | "\n", 9 | "One variable functions are boring!\n", 10 | "\n", 11 | "The 3blue1brown guy:\n", 12 | "https://www.youtube.com/watch?v=TrcCbdWwCBc&list=PLSQl0a2vh4HC5feHa6Rc5c0wbRTx56nF7 " 13 | ] 14 | }, 15 | { 16 | "cell_type": "code", 17 | "execution_count": 7, 18 | "metadata": {}, 19 | "outputs": [], 20 | "source": [ 21 | "from IPython.display import Image\n", 22 | "from IPython.core.display import HTML" 23 | ] 24 | }, 25 | { 26 | "cell_type": "markdown", 27 | "metadata": {}, 28 | "source": [ 29 | "### Scalar Field\n", 30 | "\n", 31 | "Associates a scalar value to every point in a space.\n", 32 | "\n", 33 | "This is a scalar field:\n", 34 | "\n", 35 | "$f(x,y) = x^2 + y$" 36 | ] 37 | }, 38 | { 39 | "cell_type": "code", 40 | "execution_count": 9, 41 | "metadata": {}, 42 | "outputs": [ 43 | { 44 | "data": { 45 | "text/html": [ 46 | "" 47 | ], 48 | "text/plain": [ 49 | "" 50 | ] 51 | }, 52 | "execution_count": 9, 53 | "metadata": {}, 54 | "output_type": "execute_result" 55 | } 56 | ], 57 | "source": [ 58 | "Image(url=\"https://www.researchgate.net/profile/Miguel_Negrao/publication/229038491/figure/fig4/AS:300840888881156@1448737465473/An-example-of-a-scalar-field-used-in-the-Sine-Field-system.png\")" 59 | ] 60 | }, 61 | { 62 | "cell_type": "markdown", 63 | "metadata": {}, 64 | "source": [ 65 | "### Vector Field\n", 66 | "\n", 67 | "Associates a vector to every point in a space.\n", 68 | "\n", 69 | "This is a vector field:\n", 70 | "\n", 71 | "$f(x,y) = [x^2, y]$" 72 | ] 73 | }, 74 | { 75 | "cell_type": "code", 76 | "execution_count": 10, 77 | "metadata": {}, 78 | "outputs": [ 79 | { 80 | "data": { 81 | "text/html": [ 82 | "" 83 | ], 84 | "text/plain": [ 85 | "" 86 | ] 87 | }, 88 | "execution_count": 10, 89 | "metadata": {}, 90 | "output_type": "execute_result" 91 | } 92 | ], 93 | "source": [ 94 | "Image(url=\"https://upload.wikimedia.org/wikipedia/commons/thumb/b/b9/VectorField.svg/250px-VectorField.svg.png\")" 95 | ] 96 | }, 97 | { 98 | "cell_type": "markdown", 99 | "metadata": {}, 100 | "source": [ 101 | "### Partial Derivative\n", 102 | "\n", 103 | "A partial derivative of a function of several variables is its derivative with respect to one of those variables, with the others held constant.\n", 104 | "\n", 105 | "Example:\n", 106 | "\n", 107 | "$f(x,y) = x^2 + y$\n", 108 | "\n", 109 | "Then the derivative with respect to $x$ or $\\frac{\\partial f}{\\partial x}$ is:\n", 110 | "\n", 111 | "$\\frac{\\partial f}{\\partial x} = 2x$" 112 | ] 113 | }, 114 | { 115 | "cell_type": "markdown", 116 | "metadata": {}, 117 | "source": [ 118 | "### Gradient\n", 119 | "\n", 120 | "The gradient of a scalar-valued multivariable function ∇f packages all its partial derivative information into a vector.\n", 121 | "\n", 122 | "For the example above:\n", 123 | "\n", 124 | "$∇f = [\\frac{\\partial f}{\\partial x}, \\frac{\\partial f}{\\partial y}]$\n", 125 | "\n", 126 | "The gradient is a vector; it points in the direction of steepest ascent." 127 | ] 128 | }, 129 | { 130 | "cell_type": "markdown", 131 | "metadata": {}, 132 | "source": [ 133 | "### Directional derivative\n", 134 | "\n", 135 | "The derivative in the direction of a vector. \n", 136 | "If you move in that vector direction what happens to the output?\n", 137 | "\n", 138 | "The directional derivative is a scalar; it is the rate of change when a point moves in that direction. \n", 139 | "\n", 140 | "Definition:\n", 141 | "\n", 142 | "The directional derivative of a scalar function\n", 143 | "\n", 144 | "${\\displaystyle f(\\mathbf {x} )=f(x_{1},x_{2},\\ldots ,x_{n})}$\n", 145 | "\n", 146 | "along a vector \n", 147 | "\n", 148 | "${\\displaystyle \\mathbf {v} =(v_{1},\\ldots ,v_{n})}$\n", 149 | "\n", 150 | "is\n", 151 | "\n", 152 | "${\\displaystyle \\nabla _{\\mathbf {v} }{f}(\\mathbf {x} )=\\lim _{h\\rightarrow 0}{\\frac {f(\\mathbf {x} +h\\mathbf {v} )-f(\\mathbf {x} )}{h}}.}$\n", 153 | "\n", 154 | "or \n", 155 | "\n", 156 | "${\\displaystyle \\nabla _{\\mathbf {v} }{f}(\\mathbf {x} )=\\nabla f(\\mathbf {x} )\\cdot \\mathbf {v} }$ | This is a dot product." 157 | ] 158 | } 159 | ], 160 | "metadata": { 161 | "kernelspec": { 162 | "display_name": "Python 3", 163 | "language": "python", 164 | "name": "python3" 165 | }, 166 | "language_info": { 167 | "codemirror_mode": { 168 | "name": "ipython", 169 | "version": 3 170 | }, 171 | "file_extension": ".py", 172 | "mimetype": "text/x-python", 173 | "name": "python", 174 | "nbconvert_exporter": "python", 175 | "pygments_lexer": "ipython3", 176 | "version": "3.6.9" 177 | } 178 | }, 179 | "nbformat": 4, 180 | "nbformat_minor": 4 181 | } 182 | -------------------------------------------------------------------------------- /Math/Entropy.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Entropy\n", 8 | "\n", 9 | "Information entropy is the average rate at which information is produced by a stochastic source. But what does that mean exactly?\n", 10 | "\n", 11 | "A stochastic source is a random process like a coin flip. Entropy is a measure on how much information we get from this process. But what is information exactly? \n", 12 | "\n", 13 | "Information theory defines information as the amount of surprise we get from something. If something happens that has a very low probability of happening (it's surprising) it contains a lot of information. If on the other hand something very probable happens, it doesn't tell us a lot. With this in mind. What event contains more information?\n", 14 | "\n", 15 | "1) It is sunny --> 90%\n", 16 | "\n", 17 | "2) It is raining --> 10%\n", 18 | "\n", 19 | "Of course event 2 (when it happens) has much more information. How can we encode this mathematically?\n", 20 | "\n", 21 | "# Information\n", 22 | "\n", 23 | "The information of something is given by its probability, like this:\n", 24 | "\n", 25 | "$\\mathrm{I}(p) = -\\log(p)$\n", 26 | "\n", 27 | "Lets return to our weather example and calculate the information of each event:" 28 | ] 29 | }, 30 | { 31 | "cell_type": "code", 32 | "execution_count": 1, 33 | "metadata": {}, 34 | "outputs": [], 35 | "source": [ 36 | "from math import log2" 37 | ] 38 | }, 39 | { 40 | "cell_type": "code", 41 | "execution_count": 2, 42 | "metadata": {}, 43 | "outputs": [], 44 | "source": [ 45 | "p_sunny = 0.9\n", 46 | "p_raining = 0.1" 47 | ] 48 | }, 49 | { 50 | "cell_type": "code", 51 | "execution_count": 4, 52 | "metadata": {}, 53 | "outputs": [ 54 | { 55 | "data": { 56 | "text/plain": [ 57 | "0.15200309344504997" 58 | ] 59 | }, 60 | "execution_count": 4, 61 | "metadata": {}, 62 | "output_type": "execute_result" 63 | } 64 | ], 65 | "source": [ 66 | "-log2(p_sunny)" 67 | ] 68 | }, 69 | { 70 | "cell_type": "code", 71 | "execution_count": 5, 72 | "metadata": {}, 73 | "outputs": [ 74 | { 75 | "data": { 76 | "text/plain": [ 77 | "3.321928094887362" 78 | ] 79 | }, 80 | "execution_count": 5, 81 | "metadata": {}, 82 | "output_type": "execute_result" 83 | } 84 | ], 85 | "source": [ 86 | "-log2(p_raining)" 87 | ] 88 | }, 89 | { 90 | "cell_type": "markdown", 91 | "metadata": {}, 92 | "source": [ 93 | "As we expected the more probable event contains less information.\n", 94 | "\n", 95 | "After we defined the information function we can come back to **entropy**. It is simply defined as the average amount of information of a random process. And its defined like this:\n", 96 | "\n", 97 | "$S = - \\sum_i P_i\\log{P_i} = -\\operatorname{E}_P[\\log P]\n", 98 | "$\n", 99 | "In our weather case entropy is simply this:" 100 | ] 101 | }, 102 | { 103 | "cell_type": "code", 104 | "execution_count": 9, 105 | "metadata": {}, 106 | "outputs": [], 107 | "source": [ 108 | "entropy = (p_sunny * log2(p_sunny)) + (p_raining * log2(p_raining))" 109 | ] 110 | }, 111 | { 112 | "cell_type": "code", 113 | "execution_count": 10, 114 | "metadata": {}, 115 | "outputs": [ 116 | { 117 | "data": { 118 | "text/plain": [ 119 | "0.4689955935892812" 120 | ] 121 | }, 122 | "execution_count": 10, 123 | "metadata": {}, 124 | "output_type": "execute_result" 125 | } 126 | ], 127 | "source": [ 128 | "-entropy" 129 | ] 130 | }, 131 | { 132 | "cell_type": "markdown", 133 | "metadata": {}, 134 | "source": [ 135 | "0.47 is the average amount of information we get from this random process. We can make this process much more random though. Lets say you live in London and these are your probabilities:" 136 | ] 137 | }, 138 | { 139 | "cell_type": "code", 140 | "execution_count": 14, 141 | "metadata": {}, 142 | "outputs": [], 143 | "source": [ 144 | "P_sunny = 0.5\n", 145 | "p_raining = 0.5" 146 | ] 147 | }, 148 | { 149 | "cell_type": "markdown", 150 | "metadata": {}, 151 | "source": [ 152 | "This should have a higher entropy because the surprise it on average bigger:" 153 | ] 154 | }, 155 | { 156 | "cell_type": "code", 157 | "execution_count": 15, 158 | "metadata": {}, 159 | "outputs": [], 160 | "source": [ 161 | "entropy = (p_sunny * log2(p_sunny)) + (p_raining * log2(p_raining))" 162 | ] 163 | }, 164 | { 165 | "cell_type": "code", 166 | "execution_count": 16, 167 | "metadata": {}, 168 | "outputs": [ 169 | { 170 | "data": { 171 | "text/plain": [ 172 | "0.636802784100545" 173 | ] 174 | }, 175 | "execution_count": 16, 176 | "metadata": {}, 177 | "output_type": "execute_result" 178 | } 179 | ], 180 | "source": [ 181 | "-entropy" 182 | ] 183 | }, 184 | { 185 | "cell_type": "markdown", 186 | "metadata": {}, 187 | "source": [ 188 | "And it is. " 189 | ] 190 | } 191 | ], 192 | "metadata": { 193 | "kernelspec": { 194 | "display_name": "Python 3", 195 | "language": "python", 196 | "name": "python3" 197 | }, 198 | "language_info": { 199 | "codemirror_mode": { 200 | "name": "ipython", 201 | "version": 3 202 | }, 203 | "file_extension": ".py", 204 | "mimetype": "text/x-python", 205 | "name": "python", 206 | "nbconvert_exporter": "python", 207 | "pygments_lexer": "ipython3", 208 | "version": "3.6.9" 209 | } 210 | }, 211 | "nbformat": 4, 212 | "nbformat_minor": 2 213 | } 214 | -------------------------------------------------------------------------------- /DL/Kaiming-Initialization.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## Kaiming-Initialization\n", 8 | "\n", 9 | "**Paper:** https://arxiv.org/pdf/1502.01852.pdf\n", 10 | "\n", 11 | "We need to initialize layers in a neural network. The Kaiming Init is one way to do that. It was build with the RELU activation function in mind.\n", 12 | "\n", 13 | "We sample weights from the following distribution: \n", 14 | "\n", 15 | "**X ~ N(0, std)**\n", 16 | "\n", 17 | "--> fan = `fan_in` or `fan_out`\n", 18 | "\n", 19 | "--> std = sqrt(2 / (1 + a^2) * fan)\n", 20 | "\n", 21 | "- a: Negative slope of the rectifier used afer the layer (0 for RELU)\n", 22 | "- fan_in: Number of inputs. If we create a Linear layer with following dimensions (784, 50), fan_in would be 784. Preserves the magnitude of the variance of the weights in the forward pass.\n", 23 | "- fan_out: Number of outputs. Preserves the magnitudes in the backwards pass." 24 | ] 25 | }, 26 | { 27 | "cell_type": "code", 28 | "execution_count": 91, 29 | "metadata": {}, 30 | "outputs": [], 31 | "source": [ 32 | "import torch\n", 33 | "from torch.nn.functional import relu, linear" 34 | ] 35 | }, 36 | { 37 | "cell_type": "markdown", 38 | "metadata": {}, 39 | "source": [ 40 | "### Without Kaiming Init" 41 | ] 42 | }, 43 | { 44 | "cell_type": "code", 45 | "execution_count": 92, 46 | "metadata": {}, 47 | "outputs": [], 48 | "source": [ 49 | "x = torch.randn(784)" 50 | ] 51 | }, 52 | { 53 | "cell_type": "code", 54 | "execution_count": 93, 55 | "metadata": {}, 56 | "outputs": [], 57 | "source": [ 58 | "w = torch.randn(50, 784)\n", 59 | "b = torch.randn(50)" 60 | ] 61 | }, 62 | { 63 | "cell_type": "code", 64 | "execution_count": 94, 65 | "metadata": {}, 66 | "outputs": [], 67 | "source": [ 68 | "y = relu(linear(x, w, b))" 69 | ] 70 | }, 71 | { 72 | "cell_type": "markdown", 73 | "metadata": {}, 74 | "source": [ 75 | "The mean and std are very large. This is bad and will lead to exploding gradients, which will make training deep neural networks really difficult. This is what kaiming init is trying to solve." 76 | ] 77 | }, 78 | { 79 | "cell_type": "code", 80 | "execution_count": 95, 81 | "metadata": {}, 82 | "outputs": [ 83 | { 84 | "data": { 85 | "text/plain": [ 86 | "tensor(11.2751)" 87 | ] 88 | }, 89 | "execution_count": 95, 90 | "metadata": {}, 91 | "output_type": "execute_result" 92 | } 93 | ], 94 | "source": [ 95 | "y.mean()" 96 | ] 97 | }, 98 | { 99 | "cell_type": "code", 100 | "execution_count": 96, 101 | "metadata": {}, 102 | "outputs": [ 103 | { 104 | "data": { 105 | "text/plain": [ 106 | "tensor(16.0316)" 107 | ] 108 | }, 109 | "execution_count": 96, 110 | "metadata": {}, 111 | "output_type": "execute_result" 112 | } 113 | ], 114 | "source": [ 115 | "y.std()" 116 | ] 117 | }, 118 | { 119 | "cell_type": "markdown", 120 | "metadata": {}, 121 | "source": [ 122 | "### With Kaiming Init" 123 | ] 124 | }, 125 | { 126 | "cell_type": "code", 127 | "execution_count": 111, 128 | "metadata": {}, 129 | "outputs": [], 130 | "source": [ 131 | "i = 784\n", 132 | "o = 50" 133 | ] 134 | }, 135 | { 136 | "cell_type": "code", 137 | "execution_count": 124, 138 | "metadata": {}, 139 | "outputs": [ 140 | { 141 | "data": { 142 | "text/plain": [ 143 | "tensor([0.0505])" 144 | ] 145 | }, 146 | "execution_count": 124, 147 | "metadata": {}, 148 | "output_type": "execute_result" 149 | } 150 | ], 151 | "source": [ 152 | "std = torch.sqrt(torch.Tensor([2 / i]))\n", 153 | "std" 154 | ] 155 | }, 156 | { 157 | "cell_type": "code", 158 | "execution_count": 125, 159 | "metadata": {}, 160 | "outputs": [], 161 | "source": [ 162 | "w = torch.randn(50, 784) * std\n", 163 | "b = torch.randn(50)" 164 | ] 165 | }, 166 | { 167 | "cell_type": "code", 168 | "execution_count": 126, 169 | "metadata": {}, 170 | "outputs": [ 171 | { 172 | "data": { 173 | "text/plain": [ 174 | "torch.Size([50, 784])" 175 | ] 176 | }, 177 | "execution_count": 126, 178 | "metadata": {}, 179 | "output_type": "execute_result" 180 | } 181 | ], 182 | "source": [ 183 | "w.shape" 184 | ] 185 | }, 186 | { 187 | "cell_type": "code", 188 | "execution_count": 127, 189 | "metadata": {}, 190 | "outputs": [], 191 | "source": [ 192 | "y = relu(linear(x, w, b)) # - 0.5" 193 | ] 194 | }, 195 | { 196 | "cell_type": "markdown", 197 | "metadata": {}, 198 | "source": [ 199 | "The mean and std are much better than before. One idea would be to substract -0.5 to counteract the effect of relu and to push the mean closer to 0." 200 | ] 201 | }, 202 | { 203 | "cell_type": "code", 204 | "execution_count": 128, 205 | "metadata": {}, 206 | "outputs": [ 207 | { 208 | "data": { 209 | "text/plain": [ 210 | "tensor(0.0479)" 211 | ] 212 | }, 213 | "execution_count": 128, 214 | "metadata": {}, 215 | "output_type": "execute_result" 216 | } 217 | ], 218 | "source": [ 219 | "y.mean()" 220 | ] 221 | }, 222 | { 223 | "cell_type": "code", 224 | "execution_count": 129, 225 | "metadata": {}, 226 | "outputs": [ 227 | { 228 | "data": { 229 | "text/plain": [ 230 | "tensor(0.9681)" 231 | ] 232 | }, 233 | "execution_count": 129, 234 | "metadata": {}, 235 | "output_type": "execute_result" 236 | } 237 | ], 238 | "source": [ 239 | "y.std()" 240 | ] 241 | } 242 | ], 243 | "metadata": { 244 | "kernelspec": { 245 | "display_name": "Python 3", 246 | "language": "python", 247 | "name": "python3" 248 | }, 249 | "language_info": { 250 | "codemirror_mode": { 251 | "name": "ipython", 252 | "version": 3 253 | }, 254 | "file_extension": ".py", 255 | "mimetype": "text/x-python", 256 | "name": "python", 257 | "nbconvert_exporter": "python", 258 | "pygments_lexer": "ipython3", 259 | "version": "3.7.4" 260 | } 261 | }, 262 | "nbformat": 4, 263 | "nbformat_minor": 2 264 | } 265 | -------------------------------------------------------------------------------- /ML/Logistic-Regression.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## Logistic Regression\n", 8 | "\n", 9 | "Simple classification.\n", 10 | "\n", 11 | "It is basically a one-layer NN with one output neuron with a sigmoid as a activation function. It calculates:\n", 12 | "\n", 13 | "$p(y=1|x)$\n", 14 | "\n", 15 | "if $p >= 0.5$ then y = 1\n", 16 | "\n", 17 | "if $p < 0.5$ then y = 0" 18 | ] 19 | }, 20 | { 21 | "cell_type": "code", 22 | "execution_count": 350, 23 | "metadata": {}, 24 | "outputs": [], 25 | "source": [ 26 | "import torch as T\n", 27 | "import numpy as np\n", 28 | "from sklearn.datasets import load_iris" 29 | ] 30 | }, 31 | { 32 | "cell_type": "markdown", 33 | "metadata": {}, 34 | "source": [ 35 | "### Load data\n", 36 | "\n", 37 | "I will only use the first 2 classes of the iris dataset to make it a simple binary classification." 38 | ] 39 | }, 40 | { 41 | "cell_type": "code", 42 | "execution_count": 351, 43 | "metadata": {}, 44 | "outputs": [ 45 | { 46 | "name": "stdout", 47 | "output_type": "stream", 48 | "text": [ 49 | "(100, 4)\n", 50 | "(100,)\n" 51 | ] 52 | } 53 | ], 54 | "source": [ 55 | "X, y = load_iris(return_X_y=True)\n", 56 | "X = X[:np.where(y == 2)[0][0]]\n", 57 | "y = y[:np.where(y == 2)[0][0]]\n", 58 | "print(X.shape)\n", 59 | "print(y.shape)" 60 | ] 61 | }, 62 | { 63 | "cell_type": "code", 64 | "execution_count": 352, 65 | "metadata": {}, 66 | "outputs": [], 67 | "source": [ 68 | "X = T.Tensor(X)\n", 69 | "X = (X-X.mean(axis=0))/X.std(axis=0)\n", 70 | "y = T.Tensor(y)" 71 | ] 72 | }, 73 | { 74 | "cell_type": "markdown", 75 | "metadata": {}, 76 | "source": [ 77 | "## Log Loss / Binary Cross Entropy Loss\n", 78 | "\n", 79 | "$H(p,q)\\ =\\ -\\sum_ip_i\\log q_i\\ =\\ -y\\log\\hat{y} - (1-y)\\log(1-\\hat{y})$" 80 | ] 81 | }, 82 | { 83 | "cell_type": "code", 84 | "execution_count": 353, 85 | "metadata": {}, 86 | "outputs": [], 87 | "source": [ 88 | "def loss(y_hat, y):\n", 89 | " return ((-y*T.log(y_hat))-((1-y)*T.log(1-y_hat))).sum()" 90 | ] 91 | }, 92 | { 93 | "cell_type": "code", 94 | "execution_count": 354, 95 | "metadata": {}, 96 | "outputs": [], 97 | "source": [ 98 | "w = T.randn((5,1),requires_grad=True)\n", 99 | "b = T.zeros((100,1), requires_grad=True)\n", 100 | "o = T.ones((100,1))\n", 101 | "X = T.cat([T.Tensor(X), o],axis=1)" 102 | ] 103 | }, 104 | { 105 | "cell_type": "code", 106 | "execution_count": 355, 107 | "metadata": {}, 108 | "outputs": [], 109 | "source": [ 110 | "def sigmoid(x):\n", 111 | " return 1 / (1 + T.exp(-x))" 112 | ] 113 | }, 114 | { 115 | "cell_type": "code", 116 | "execution_count": 356, 117 | "metadata": {}, 118 | "outputs": [ 119 | { 120 | "name": "stdout", 121 | "output_type": "stream", 122 | "text": [ 123 | "tensor(10585.5889)\n", 124 | "tensor(10181.9854)\n", 125 | "tensor(9808.4639)\n", 126 | "tensor(9466.1670)\n", 127 | "tensor(9139.4170)\n", 128 | "tensor(8832.9443)\n", 129 | "tensor(8547.8906)\n", 130 | "tensor(8285.1436)\n", 131 | "tensor(8045.8110)\n", 132 | "tensor(7831.1392)\n", 133 | "tensor(7642.2090)\n", 134 | "tensor(7479.8379)\n", 135 | "tensor(7344.1660)\n", 136 | "tensor(7235.6162)\n", 137 | "tensor(7154.0898)\n", 138 | "tensor(7087.7095)\n", 139 | "tensor(7036.6616)\n", 140 | "tensor(7009.1792)\n", 141 | "tensor(6974.3169)\n", 142 | "tensor(6958.0498)\n", 143 | "tensor(6949.5850)\n", 144 | "tensor(6953.1567)\n", 145 | "tensor(6942.5977)\n", 146 | "tensor(6945.7642)\n", 147 | "tensor(6939.3979)\n", 148 | "tensor(6941.7837)\n", 149 | "tensor(6938.1318)\n", 150 | "tensor(6939.7710)\n", 151 | "tensor(6937.7290)\n", 152 | "tensor(6938.7646)\n", 153 | "tensor(6937.6787)\n", 154 | "tensor(6938.2935)\n", 155 | "tensor(6937.7139)\n", 156 | "tensor(6938.0762)\n", 157 | "tensor(6937.7568)\n", 158 | "tensor(6937.9829)\n", 159 | "tensor(6937.8003)\n", 160 | "tensor(6937.9238)\n", 161 | "tensor(6937.8330)\n", 162 | "tensor(6937.9014)\n", 163 | "tensor(6937.8467)\n", 164 | "tensor(6937.8936)\n", 165 | "tensor(6937.8525)\n", 166 | "tensor(6937.8799)\n", 167 | "tensor(6937.8682)\n", 168 | "tensor(6937.8730)\n", 169 | "tensor(6937.8711)\n", 170 | "tensor(6937.8750)\n", 171 | "tensor(6937.8711)\n", 172 | "tensor(6937.8691)\n" 173 | ] 174 | } 175 | ], 176 | "source": [ 177 | "epochs = 50\n", 178 | "lr = 1e-2\n", 179 | "\n", 180 | "for e in range(epochs):\n", 181 | " y_hat = sigmoid(X@w + b)\n", 182 | " l = loss(y_hat, y)\n", 183 | " print(l.data)\n", 184 | " l.backward()\n", 185 | " \n", 186 | " with T.no_grad():\n", 187 | " w -= T.clamp(w.grad, -3.5, 3.5) * lr\n", 188 | " b -= T.clamp(b.grad, -3.5, 3.5) * lr\n", 189 | " w.grad.zero_()\n", 190 | " b.grad.zero_()" 191 | ] 192 | }, 193 | { 194 | "cell_type": "markdown", 195 | "metadata": {}, 196 | "source": [ 197 | "## It worked! Obviously it overfits but it definetly shows that our math is right." 198 | ] 199 | }, 200 | { 201 | "cell_type": "code", 202 | "execution_count": 357, 203 | "metadata": {}, 204 | "outputs": [], 205 | "source": [ 206 | "y_hat = T.round(sigmoid(X@w + b))" 207 | ] 208 | }, 209 | { 210 | "cell_type": "code", 211 | "execution_count": 358, 212 | "metadata": {}, 213 | "outputs": [ 214 | { 215 | "data": { 216 | "text/plain": [ 217 | "tensor(0.)" 218 | ] 219 | }, 220 | "execution_count": 358, 221 | "metadata": {}, 222 | "output_type": "execute_result" 223 | } 224 | ], 225 | "source": [ 226 | "(y_hat - y.reshape(100,1)).sum().data" 227 | ] 228 | } 229 | ], 230 | "metadata": { 231 | "kernelspec": { 232 | "display_name": "Python 3", 233 | "language": "python", 234 | "name": "python3" 235 | }, 236 | "language_info": { 237 | "codemirror_mode": { 238 | "name": "ipython", 239 | "version": 3 240 | }, 241 | "file_extension": ".py", 242 | "mimetype": "text/x-python", 243 | "name": "python", 244 | "nbconvert_exporter": "python", 245 | "pygments_lexer": "ipython3", 246 | "version": "3.6.9" 247 | } 248 | }, 249 | "nbformat": 4, 250 | "nbformat_minor": 4 251 | } 252 | -------------------------------------------------------------------------------- /ML/F1-Score.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## F1-Score\n", 8 | "\n", 9 | "If you ask Wikipedia what the F1-Score is, you get the follwing answer:\n", 10 | "\n", 11 | "\n", 12 | "_In statistical analysis of binary classification, the F1 score (also F-score or F-measure) is a measure of a test's accuracy. It considers both the precision p and the recall r of the test to compute the score._\n", 13 | "\n", 14 | "The F1 score is the harmonic mean of the precision and recall, where an F1 score reaches its best value at 1 (perfect precision and recall) and worst at 0. \n", 15 | "\n", 16 | "Lets first define what precision and recall are." 17 | ] 18 | }, 19 | { 20 | "cell_type": "markdown", 21 | "metadata": {}, 22 | "source": [ 23 | "## TP FP TN FN\n", 24 | "\n", 25 | "If we want to define what precision and recall is, we need to define these terms first:\n", 26 | "\n", 27 | "- TP (True Positive)\n", 28 | "- FP (False Positive)\n", 29 | "- TN (True Negative)\n", 30 | "- FN (False Negative)\n", 31 | "\n", 32 | "I will explain this with a simple example. Lets say we want to classify an image into two categories (Person or Vehicle). This is a simple binary classification.\n", 33 | "\n", 34 | "- TP: It was a person --> We classified it as a person.\n", 35 | "- FP: It was no person --> We classified it as a person.\n", 36 | "- TN: It was no person --> We did not classify it as a person.\n", 37 | "- FN: It was a person --> We did not classify it as a person." 38 | ] 39 | }, 40 | { 41 | "cell_type": "markdown", 42 | "metadata": {}, 43 | "source": [ 44 | "## Precision\n", 45 | "\n", 46 | "Precision is the number of correct positive results divided by the number of all positive results returned by the classifier:\n", 47 | "\n", 48 | "$\\text{Precision}=\\frac{tp}{tp+fp} \\,$" 49 | ] 50 | }, 51 | { 52 | "cell_type": "markdown", 53 | "metadata": {}, 54 | "source": [ 55 | "## Recall\n", 56 | "\n", 57 | "Recall is the number of correct positive results divided by the number of all relevant samples (all samples that should have been identified as positive):\n", 58 | "\n", 59 | "$\\text{Recall}=\\frac{tp}{tp+fn} \\, $" 60 | ] 61 | }, 62 | { 63 | "cell_type": "markdown", 64 | "metadata": {}, 65 | "source": [ 66 | "## F1-Score\n", 67 | "\n", 68 | "This is simply a combination of Precision and Recall. Remember, we calculate the F1-Score for each class!\n", 69 | "\n", 70 | "$2 \\cdot \\frac{\\mathrm{precision} \\cdot \\mathrm{recall}}{\\mathrm{precision} + \\mathrm{recall}}$" 71 | ] 72 | }, 73 | { 74 | "cell_type": "markdown", 75 | "metadata": {}, 76 | "source": [ 77 | "## Example" 78 | ] 79 | }, 80 | { 81 | "cell_type": "code", 82 | "execution_count": 2, 83 | "metadata": {}, 84 | "outputs": [], 85 | "source": [ 86 | "import numpy as np" 87 | ] 88 | }, 89 | { 90 | "cell_type": "code", 91 | "execution_count": 44, 92 | "metadata": {}, 93 | "outputs": [], 94 | "source": [ 95 | "# Here we create random labels and classifications.\n", 96 | "data = np.random.choice([1,0], (40,2))" 97 | ] 98 | }, 99 | { 100 | "cell_type": "code", 101 | "execution_count": 23, 102 | "metadata": {}, 103 | "outputs": [], 104 | "source": [ 105 | "def tp_fp_fn(data, c):\n", 106 | " tp = 0\n", 107 | " fp = 0\n", 108 | " fn = 0\n", 109 | " for d in data:\n", 110 | " if d[0] == c:\n", 111 | " if d[1] == 0: fp += 1 \n", 112 | " if d[1] == 1: tp += 1\n", 113 | " if d[0] == 0 and d[1] == 1: fn += 1\n", 114 | " return tp,fp,fn" 115 | ] 116 | }, 117 | { 118 | "cell_type": "code", 119 | "execution_count": 32, 120 | "metadata": {}, 121 | "outputs": [], 122 | "source": [ 123 | "tp0,fp0,fn0 = tp_fp_fn(data, 0)\n", 124 | "tp1,fp1,fn1 = tp_fp_fn(data, 1)" 125 | ] 126 | }, 127 | { 128 | "cell_type": "code", 129 | "execution_count": 33, 130 | "metadata": {}, 131 | "outputs": [ 132 | { 133 | "data": { 134 | "text/plain": [ 135 | "(10, 15, 10)" 136 | ] 137 | }, 138 | "execution_count": 33, 139 | "metadata": {}, 140 | "output_type": "execute_result" 141 | } 142 | ], 143 | "source": [ 144 | "tp0, fp0, fn0" 145 | ] 146 | }, 147 | { 148 | "cell_type": "code", 149 | "execution_count": 34, 150 | "metadata": {}, 151 | "outputs": [ 152 | { 153 | "data": { 154 | "text/plain": [ 155 | "(7, 8, 10)" 156 | ] 157 | }, 158 | "execution_count": 34, 159 | "metadata": {}, 160 | "output_type": "execute_result" 161 | } 162 | ], 163 | "source": [ 164 | "tp1,fp1,fn1" 165 | ] 166 | }, 167 | { 168 | "cell_type": "code", 169 | "execution_count": 28, 170 | "metadata": {}, 171 | "outputs": [], 172 | "source": [ 173 | "def precision(tp,fp):\n", 174 | " return tp/(tp+fp)" 175 | ] 176 | }, 177 | { 178 | "cell_type": "code", 179 | "execution_count": 29, 180 | "metadata": {}, 181 | "outputs": [], 182 | "source": [ 183 | "def recall(tp,fn):\n", 184 | " return tp/(tp+fn)" 185 | ] 186 | }, 187 | { 188 | "cell_type": "code", 189 | "execution_count": 30, 190 | "metadata": {}, 191 | "outputs": [], 192 | "source": [ 193 | "def f1(pre, rec):\n", 194 | " return 2*((pre*rec)/(pre+rec))" 195 | ] 196 | }, 197 | { 198 | "cell_type": "code", 199 | "execution_count": 36, 200 | "metadata": {}, 201 | "outputs": [ 202 | { 203 | "data": { 204 | "text/plain": [ 205 | "0.4" 206 | ] 207 | }, 208 | "execution_count": 36, 209 | "metadata": {}, 210 | "output_type": "execute_result" 211 | } 212 | ], 213 | "source": [ 214 | "pre_0 = precision(tp0,fp0)\n", 215 | "pre_0" 216 | ] 217 | }, 218 | { 219 | "cell_type": "code", 220 | "execution_count": 37, 221 | "metadata": {}, 222 | "outputs": [ 223 | { 224 | "data": { 225 | "text/plain": [ 226 | "0.4666666666666667" 227 | ] 228 | }, 229 | "execution_count": 37, 230 | "metadata": {}, 231 | "output_type": "execute_result" 232 | } 233 | ], 234 | "source": [ 235 | "pre_1 = precision(tp1,fp1)\n", 236 | "pre_1" 237 | ] 238 | }, 239 | { 240 | "cell_type": "code", 241 | "execution_count": 39, 242 | "metadata": {}, 243 | "outputs": [ 244 | { 245 | "data": { 246 | "text/plain": [ 247 | "0.5" 248 | ] 249 | }, 250 | "execution_count": 39, 251 | "metadata": {}, 252 | "output_type": "execute_result" 253 | } 254 | ], 255 | "source": [ 256 | "rec_0 = recall(tp0,fn0)\n", 257 | "rec_0" 258 | ] 259 | }, 260 | { 261 | "cell_type": "code", 262 | "execution_count": 40, 263 | "metadata": {}, 264 | "outputs": [ 265 | { 266 | "data": { 267 | "text/plain": [ 268 | "0.4117647058823529" 269 | ] 270 | }, 271 | "execution_count": 40, 272 | "metadata": {}, 273 | "output_type": "execute_result" 274 | } 275 | ], 276 | "source": [ 277 | "rec_1 = recall(tp1,fn1)\n", 278 | "rec_1" 279 | ] 280 | }, 281 | { 282 | "cell_type": "code", 283 | "execution_count": 42, 284 | "metadata": {}, 285 | "outputs": [ 286 | { 287 | "data": { 288 | "text/plain": [ 289 | "0.4444444444444445" 290 | ] 291 | }, 292 | "execution_count": 42, 293 | "metadata": {}, 294 | "output_type": "execute_result" 295 | } 296 | ], 297 | "source": [ 298 | "f1_0 = f1(pre_0,rec_0)\n", 299 | "f1_0" 300 | ] 301 | }, 302 | { 303 | "cell_type": "code", 304 | "execution_count": 43, 305 | "metadata": {}, 306 | "outputs": [ 307 | { 308 | "data": { 309 | "text/plain": [ 310 | "0.43749999999999994" 311 | ] 312 | }, 313 | "execution_count": 43, 314 | "metadata": {}, 315 | "output_type": "execute_result" 316 | } 317 | ], 318 | "source": [ 319 | "f1_1 = f1(pre_1,rec_1)\n", 320 | "f1_1" 321 | ] 322 | } 323 | ], 324 | "metadata": { 325 | "kernelspec": { 326 | "display_name": "Python 3", 327 | "language": "python", 328 | "name": "python3" 329 | }, 330 | "language_info": { 331 | "codemirror_mode": { 332 | "name": "ipython", 333 | "version": 3 334 | }, 335 | "file_extension": ".py", 336 | "mimetype": "text/x-python", 337 | "name": "python", 338 | "nbconvert_exporter": "python", 339 | "pygments_lexer": "ipython3", 340 | "version": "3.6.9" 341 | } 342 | }, 343 | "nbformat": 4, 344 | "nbformat_minor": 4 345 | } 346 | -------------------------------------------------------------------------------- /Math/Graph-Theory.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## Graph-Theory" 8 | ] 9 | }, 10 | { 11 | "cell_type": "code", 12 | "execution_count": 2, 13 | "metadata": {}, 14 | "outputs": [], 15 | "source": [ 16 | "from IPython.display import Image\n", 17 | "from IPython.core.display import HTML " 18 | ] 19 | }, 20 | { 21 | "cell_type": "markdown", 22 | "metadata": {}, 23 | "source": [ 24 | "Great ressources:\n", 25 | " \n", 26 | "https://www.youtube.com/watch?v=eQA-m22wjTQ&list=PLDV1Zeh2NRsDGO4--qE8yH72HFL1Km93P" 27 | ] 28 | }, 29 | { 30 | "cell_type": "markdown", 31 | "metadata": {}, 32 | "source": [ 33 | "### Undirected Graph\n", 34 | "\n", 35 | "Is a graph in which edges have no direction. The edge $(u, v)$ is identical to the edge (v, u)." 36 | ] 37 | }, 38 | { 39 | "cell_type": "code", 40 | "execution_count": 7, 41 | "metadata": { 42 | "scrolled": false 43 | }, 44 | "outputs": [ 45 | { 46 | "data": { 47 | "text/html": [ 48 | "" 49 | ], 50 | "text/plain": [ 51 | "" 52 | ] 53 | }, 54 | "execution_count": 7, 55 | "metadata": {}, 56 | "output_type": "execute_result" 57 | } 58 | ], 59 | "source": [ 60 | "Image(url= \"https://i.stack.imgur.com/YA7NX.png\")" 61 | ] 62 | }, 63 | { 64 | "cell_type": "markdown", 65 | "metadata": {}, 66 | "source": [ 67 | "### Directed Graph\n", 68 | "\n", 69 | "Or digraph is a graph in which edges have orientations. The edge $(u, v)$ is the edge from node u to node v." 70 | ] 71 | }, 72 | { 73 | "cell_type": "code", 74 | "execution_count": 8, 75 | "metadata": {}, 76 | "outputs": [ 77 | { 78 | "data": { 79 | "text/html": [ 80 | "" 81 | ], 82 | "text/plain": [ 83 | "" 84 | ] 85 | }, 86 | "execution_count": 8, 87 | "metadata": {}, 88 | "output_type": "execute_result" 89 | } 90 | ], 91 | "source": [ 92 | "Image(url= \"https://upload.wikimedia.org/wikipedia/commons/thumb/5/51/Directed_graph.svg/1200px-Directed_graph.svg.png\")" 93 | ] 94 | }, 95 | { 96 | "cell_type": "markdown", 97 | "metadata": {}, 98 | "source": [ 99 | "### Weighted Graph\n", 100 | "\n", 101 | "Graphs with edges that contain a certain weight. Usually denoted as a triplet $(u,v,w)$. Can be directed or undirected" 102 | ] 103 | }, 104 | { 105 | "cell_type": "code", 106 | "execution_count": 9, 107 | "metadata": {}, 108 | "outputs": [ 109 | { 110 | "data": { 111 | "text/html": [ 112 | "" 113 | ], 114 | "text/plain": [ 115 | "" 116 | ] 117 | }, 118 | "execution_count": 9, 119 | "metadata": {}, 120 | "output_type": "execute_result" 121 | } 122 | ], 123 | "source": [ 124 | "Image(url=\"https://ucarecdn.com/a67cb888-aa0c-424b-8c7f-847e38dd5691/\")" 125 | ] 126 | }, 127 | { 128 | "cell_type": "markdown", 129 | "metadata": {}, 130 | "source": [ 131 | "### Tree\n", 132 | "\n", 133 | "A special undirected graph with no cyles. Or a graph with N nodes with N-1 edges.\n", 134 | "\n", 135 | "#### Rooted Tree\n", 136 | "\n", 137 | "Tree with designated root node. Every edge either points away from or towards the root node. When away it is called an arborescence (out-tree) and anti-arborescence (in-tree) otherwise.\n", 138 | "\n", 139 | "A simple binary tree from ever CS 101 course is a arborescence tree." 140 | ] 141 | }, 142 | { 143 | "cell_type": "markdown", 144 | "metadata": {}, 145 | "source": [ 146 | "### Binary Search Tree (BST)\n", 147 | "\n", 148 | "Binary trees which satisfy the BST invariant which states that for every node x:\n", 149 | "\n", 150 | "$x.left.value < x.value < x.right.value$\n", 151 | "\n", 152 | "Search/Insert/Delete is $O(log(n))$" 153 | ] 154 | }, 155 | { 156 | "cell_type": "code", 157 | "execution_count": 3, 158 | "metadata": {}, 159 | "outputs": [ 160 | { 161 | "data": { 162 | "text/html": [ 163 | "" 164 | ], 165 | "text/plain": [ 166 | "" 167 | ] 168 | }, 169 | "execution_count": 3, 170 | "metadata": {}, 171 | "output_type": "execute_result" 172 | } 173 | ], 174 | "source": [ 175 | "Image(url=\"https://upload.wikimedia.org/wikipedia/commons/thumb/d/da/Binary_search_tree.svg/1200px-Binary_search_tree.svg.png\")" 176 | ] 177 | }, 178 | { 179 | "cell_type": "markdown", 180 | "metadata": {}, 181 | "source": [ 182 | "### Directed Acyclic Graphs (DAGs)\n", 183 | "\n", 184 | "Directed Graphs with no cycles. Every out-tree is a DAG." 185 | ] 186 | }, 187 | { 188 | "cell_type": "markdown", 189 | "metadata": {}, 190 | "source": [ 191 | "### Bipartite Graph\n", 192 | "\n", 193 | "Is one whose vertices can be split into two independent groups U, V such\n", 194 | "that every edge connects between U and V. Think of the weights between two\n", 195 | "layers of a neural network." 196 | ] 197 | }, 198 | { 199 | "cell_type": "code", 200 | "execution_count": 12, 201 | "metadata": {}, 202 | "outputs": [ 203 | { 204 | "data": { 205 | "text/html": [ 206 | "" 207 | ], 208 | "text/plain": [ 209 | "" 210 | ] 211 | }, 212 | "execution_count": 12, 213 | "metadata": {}, 214 | "output_type": "execute_result" 215 | } 216 | ], 217 | "source": [ 218 | "Image(url=\"https://i.imgur.com/Su6Y4UC.png\")" 219 | ] 220 | }, 221 | { 222 | "cell_type": "markdown", 223 | "metadata": {}, 224 | "source": [ 225 | "### Complete Graph\n", 226 | "\n", 227 | "A graph where there is a unique edge between every pair of nodes." 228 | ] 229 | }, 230 | { 231 | "cell_type": "code", 232 | "execution_count": 13, 233 | "metadata": {}, 234 | "outputs": [ 235 | { 236 | "data": { 237 | "text/html": [ 238 | "" 239 | ], 240 | "text/plain": [ 241 | "" 242 | ] 243 | }, 244 | "execution_count": 13, 245 | "metadata": {}, 246 | "output_type": "execute_result" 247 | } 248 | ], 249 | "source": [ 250 | "Image(url=\"https://upload.wikimedia.org/wikipedia/commons/thumb/9/9e/Complete_graph_K7.svg/1200px-Complete_graph_K7.svg.png\")" 251 | ] 252 | }, 253 | { 254 | "cell_type": "markdown", 255 | "metadata": {}, 256 | "source": [ 257 | "## Representing Graphs" 258 | ] 259 | }, 260 | { 261 | "cell_type": "markdown", 262 | "metadata": {}, 263 | "source": [ 264 | "### Adjacency Matrix\n", 265 | "\n", 266 | "One way to represent a graph is with a Matrix. An adjacency matrix $m$ is one where $m[i][j]$ represents the edge weight of going from node i to node j." 267 | ] 268 | }, 269 | { 270 | "cell_type": "code", 271 | "execution_count": 14, 272 | "metadata": {}, 273 | "outputs": [ 274 | { 275 | "data": { 276 | "text/html": [ 277 | "" 278 | ], 279 | "text/plain": [ 280 | "" 281 | ] 282 | }, 283 | "execution_count": 14, 284 | "metadata": {}, 285 | "output_type": "execute_result" 286 | } 287 | ], 288 | "source": [ 289 | "Image(url=\"https://cdn.softwaretestinghelp.com/wp-content/qa/uploads/2019/08/6.weighted-graph-and-its-adjacency-matrix.png\")" 290 | ] 291 | }, 292 | { 293 | "cell_type": "markdown", 294 | "metadata": {}, 295 | "source": [ 296 | "## Adjacency List\n", 297 | "\n", 298 | "Is a way to represent a graph as a map from nodes to lists of edges." 299 | ] 300 | }, 301 | { 302 | "cell_type": "code", 303 | "execution_count": 18, 304 | "metadata": {}, 305 | "outputs": [ 306 | { 307 | "data": { 308 | "text/html": [ 309 | "" 310 | ], 311 | "text/plain": [ 312 | "" 313 | ] 314 | }, 315 | "execution_count": 18, 316 | "metadata": {}, 317 | "output_type": "execute_result" 318 | } 319 | ], 320 | "source": [ 321 | "Image(url=\"https://www.kodefork.com/media/uploads/articles/2019/06/23/graph-ajacency-list-cpp.jpg\")" 322 | ] 323 | }, 324 | { 325 | "cell_type": "markdown", 326 | "metadata": {}, 327 | "source": [ 328 | "## Depth First Search (DFS)\n", 329 | "\n", 330 | "Algo for tree traversal.\n", 331 | "\n", 332 | "Most simple example I found: https://www.educative.io/edpresso/how-to-implement-depth-first-search-in-python\n", 333 | "\n", 334 | "How it works:\n", 335 | "\n", 336 | "1. Pick any node. If univsited marks as visited and recur on all its adjacent nodes.\n", 337 | "2. Repeat until all nodes are visited or the node to be searched is found." 338 | ] 339 | }, 340 | { 341 | "cell_type": "code", 342 | "execution_count": 61, 343 | "metadata": {}, 344 | "outputs": [ 345 | { 346 | "name": "stdout", 347 | "output_type": "stream", 348 | "text": [ 349 | "A\n", 350 | "B\n", 351 | "D\n", 352 | "G\n", 353 | "H\n", 354 | "E\n", 355 | "F\n", 356 | "C\n" 357 | ] 358 | } 359 | ], 360 | "source": [ 361 | "graph = {\n", 362 | " 'A' : ['B','C'],\n", 363 | " 'B' : ['D', 'E'],\n", 364 | " 'C' : ['F'],\n", 365 | " 'D' : ['G'],\n", 366 | " 'F' : [],\n", 367 | " 'G' : ['H'],\n", 368 | " 'H' : [],\n", 369 | " 'E' : ['F']\n", 370 | "}\n", 371 | "\n", 372 | "visited = set()\n", 373 | "\n", 374 | "def dfs(visited, graph, node):\n", 375 | " if node not in visited:\n", 376 | " print (node)\n", 377 | " visited.add(node)\n", 378 | " for neighbour in graph[node]:\n", 379 | " dfs(visited, graph, neighbour)\n", 380 | "\n", 381 | "dfs(visited, graph, 'A')" 382 | ] 383 | }, 384 | { 385 | "cell_type": "markdown", 386 | "metadata": {}, 387 | "source": [ 388 | "## Breadth First Search (BFS)\n", 389 | "\n", 390 | "Search in the width first.\n", 391 | "\n", 392 | "How it works:\n", 393 | "\n", 394 | "1. Pick any node, visit adjacent univisited vertex, mark it as visited and insert into queue.\n", 395 | "2. If no remaining adjacent vertices left, remove first vertex from queue.\n", 396 | "3. Repeat step 1 and 2 until queue is empty or desired node is found." 397 | ] 398 | }, 399 | { 400 | "cell_type": "code", 401 | "execution_count": 65, 402 | "metadata": {}, 403 | "outputs": [ 404 | { 405 | "name": "stdout", 406 | "output_type": "stream", 407 | "text": [ 408 | "A B C D E F G H " 409 | ] 410 | } 411 | ], 412 | "source": [ 413 | "graph = {\n", 414 | " 'A' : ['B','C'],\n", 415 | " 'B' : ['D', 'E'],\n", 416 | " 'C' : ['F'],\n", 417 | " 'D' : ['G'],\n", 418 | " 'F' : [],\n", 419 | " 'G' : ['H'],\n", 420 | " 'H' : [],\n", 421 | " 'E' : ['F']\n", 422 | "}\n", 423 | "\n", 424 | "visited = []\n", 425 | "queue = [] \n", 426 | "\n", 427 | "def bfs(visited, graph, node):\n", 428 | " visited.append(node)\n", 429 | " queue.append(node)\n", 430 | "\n", 431 | " while queue:\n", 432 | " s = queue.pop(0) \n", 433 | " print (s, end = \" \") \n", 434 | "\n", 435 | " for neighbour in graph[s]:\n", 436 | " if neighbour not in visited:\n", 437 | " visited.append(neighbour)\n", 438 | " queue.append(neighbour)\n", 439 | "\n", 440 | "bfs(visited, graph, 'A')" 441 | ] 442 | } 443 | ], 444 | "metadata": { 445 | "kernelspec": { 446 | "display_name": "Python 3", 447 | "language": "python", 448 | "name": "python3" 449 | }, 450 | "language_info": { 451 | "codemirror_mode": { 452 | "name": "ipython", 453 | "version": 3 454 | }, 455 | "file_extension": ".py", 456 | "mimetype": "text/x-python", 457 | "name": "python", 458 | "nbconvert_exporter": "python", 459 | "pygments_lexer": "ipython3", 460 | "version": "3.6.9" 461 | } 462 | }, 463 | "nbformat": 4, 464 | "nbformat_minor": 4 465 | } 466 | -------------------------------------------------------------------------------- /ML/Decision-Tree.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 11, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import pandas as pd\n", 10 | "import numpy as np" 11 | ] 12 | }, 13 | { 14 | "cell_type": "markdown", 15 | "metadata": {}, 16 | "source": [ 17 | "# Decision Tree\n", 18 | "\n", 19 | "Our dataset contains 10 people that were passengers on the Mini-Titanic. We know their age and their sex and we want to predict with a Decision Tree if they survived or not." 20 | ] 21 | }, 22 | { 23 | "cell_type": "code", 24 | "execution_count": 2, 25 | "metadata": {}, 26 | "outputs": [], 27 | "source": [ 28 | "age = [\n", 29 | " 12,\n", 30 | " 10,\n", 31 | " 11,\n", 32 | " 9,\n", 33 | " 8,\n", 34 | " 45,\n", 35 | " 63,\n", 36 | " 72,\n", 37 | " 55,\n", 38 | " 66\n", 39 | "]" 40 | ] 41 | }, 42 | { 43 | "cell_type": "code", 44 | "execution_count": 3, 45 | "metadata": {}, 46 | "outputs": [], 47 | "source": [ 48 | "# 1 -> Male\n", 49 | "# 2 -> Female\n", 50 | "\n", 51 | "sex = [\n", 52 | " 1,\n", 53 | " 1,\n", 54 | " 1,\n", 55 | " 0,\n", 56 | " 0,\n", 57 | " 1,\n", 58 | " 1,\n", 59 | " 1,\n", 60 | " 0,\n", 61 | " 0\n", 62 | "]" 63 | ] 64 | }, 65 | { 66 | "cell_type": "code", 67 | "execution_count": 4, 68 | "metadata": {}, 69 | "outputs": [], 70 | "source": [ 71 | "# 1 -> Yes\n", 72 | "# 0 -> No\n", 73 | "\n", 74 | "survived = [\n", 75 | " 0,\n", 76 | " 0,\n", 77 | " 0,\n", 78 | " 0,\n", 79 | " 0,\n", 80 | " 0,\n", 81 | " 0,\n", 82 | " 0,\n", 83 | " 1,\n", 84 | " 1\n", 85 | "]" 86 | ] 87 | }, 88 | { 89 | "cell_type": "code", 90 | "execution_count": 9, 91 | "metadata": {}, 92 | "outputs": [], 93 | "source": [ 94 | "data = pd.DataFrame({'age':age,'sex':sex,'survived':survived})" 95 | ] 96 | }, 97 | { 98 | "cell_type": "code", 99 | "execution_count": 10, 100 | "metadata": {}, 101 | "outputs": [ 102 | { 103 | "data": { 104 | "text/html": [ 105 | "
\n", 106 | "\n", 119 | "\n", 120 | " \n", 121 | " \n", 122 | " \n", 123 | " \n", 124 | " \n", 125 | " \n", 126 | " \n", 127 | " \n", 128 | " \n", 129 | " \n", 130 | " \n", 131 | " \n", 132 | " \n", 133 | " \n", 134 | " \n", 135 | " \n", 136 | " \n", 137 | " \n", 138 | " \n", 139 | " \n", 140 | " \n", 141 | " \n", 142 | " \n", 143 | " \n", 144 | " \n", 145 | " \n", 146 | " \n", 147 | " \n", 148 | " \n", 149 | " \n", 150 | " \n", 151 | " \n", 152 | " \n", 153 | " \n", 154 | " \n", 155 | " \n", 156 | " \n", 157 | " \n", 158 | " \n", 159 | " \n", 160 | " \n", 161 | " \n", 162 | " \n", 163 | " \n", 164 | " \n", 165 | " \n", 166 | " \n", 167 | " \n", 168 | " \n", 169 | " \n", 170 | " \n", 171 | " \n", 172 | " \n", 173 | " \n", 174 | " \n", 175 | " \n", 176 | " \n", 177 | " \n", 178 | " \n", 179 | " \n", 180 | " \n", 181 | " \n", 182 | " \n", 183 | " \n", 184 | " \n", 185 | " \n", 186 | " \n", 187 | " \n", 188 | " \n", 189 | " \n", 190 | "
agesexsurvived
01210
11010
21110
3900
4800
54510
66310
77210
85501
96601
\n", 191 | "
" 192 | ], 193 | "text/plain": [ 194 | " age sex survived\n", 195 | "0 12 1 0\n", 196 | "1 10 1 0\n", 197 | "2 11 1 0\n", 198 | "3 9 0 0\n", 199 | "4 8 0 0\n", 200 | "5 45 1 0\n", 201 | "6 63 1 0\n", 202 | "7 72 1 0\n", 203 | "8 55 0 1\n", 204 | "9 66 0 1" 205 | ] 206 | }, 207 | "execution_count": 10, 208 | "metadata": {}, 209 | "output_type": "execute_result" 210 | } 211 | ], 212 | "source": [ 213 | "data" 214 | ] 215 | }, 216 | { 217 | "cell_type": "markdown", 218 | "metadata": {}, 219 | "source": [ 220 | "# CART Algorithm\n", 221 | "\n", 222 | "Classification and Regression Trees by Breiman et al (1984). Its a binary dicision tree that is constructed by splitting a node into two child nodes repeatedly, beginning with the root node that contains the whole learning sample.\n", 223 | "\n", 224 | "## Tree Growing Process\n", 225 | "\n", 226 | "Choose a split among all the possible splits at each node so that the resulting child nodes are the purest." 227 | ] 228 | }, 229 | { 230 | "cell_type": "code", 231 | "execution_count": 12, 232 | "metadata": {}, 233 | "outputs": [], 234 | "source": [ 235 | "def mse(y, y_hat): 1/len(y)*np.sum((y-y_hat)**2)" 236 | ] 237 | }, 238 | { 239 | "cell_type": "code", 240 | "execution_count": 148, 241 | "metadata": {}, 242 | "outputs": [], 243 | "source": [ 244 | "X = np.array([data['age'], data['sex']])\n", 245 | "y = np.array(data['survived'])" 246 | ] 247 | }, 248 | { 249 | "cell_type": "code", 250 | "execution_count": 149, 251 | "metadata": {}, 252 | "outputs": [ 253 | { 254 | "data": { 255 | "text/plain": [ 256 | "array([[12, 10, 11, 9, 8, 45, 63, 72, 55, 66],\n", 257 | " [ 1, 1, 1, 0, 0, 1, 1, 1, 0, 0]])" 258 | ] 259 | }, 260 | "execution_count": 149, 261 | "metadata": {}, 262 | "output_type": "execute_result" 263 | } 264 | ], 265 | "source": [ 266 | "X" 267 | ] 268 | }, 269 | { 270 | "cell_type": "code", 271 | "execution_count": 150, 272 | "metadata": {}, 273 | "outputs": [ 274 | { 275 | "data": { 276 | "text/plain": [ 277 | "array([0, 0, 0, 0, 0, 0, 0, 0, 1, 1])" 278 | ] 279 | }, 280 | "execution_count": 150, 281 | "metadata": {}, 282 | "output_type": "execute_result" 283 | } 284 | ], 285 | "source": [ 286 | "y" 287 | ] 288 | }, 289 | { 290 | "cell_type": "code", 291 | "execution_count": 86, 292 | "metadata": {}, 293 | "outputs": [], 294 | "source": [ 295 | "def loss(X,y):\n", 296 | " l = 0\n", 297 | " for i in range(len(y)):\n", 298 | " y_hat = dt(X[0][i], X[1][i])\n", 299 | " l += (y_hat-y[i])**2\n", 300 | " return 1/len(y)*l" 301 | ] 302 | }, 303 | { 304 | "cell_type": "code", 305 | "execution_count": 93, 306 | "metadata": {}, 307 | "outputs": [], 308 | "source": [ 309 | "def dt(age, sex):\n", 310 | " if sex == 0:\n", 311 | " if age > 45: return 1\n", 312 | " else : return 0\n", 313 | " else: return 0" 314 | ] 315 | }, 316 | { 317 | "cell_type": "code", 318 | "execution_count": 94, 319 | "metadata": {}, 320 | "outputs": [ 321 | { 322 | "data": { 323 | "text/plain": [ 324 | "0.0" 325 | ] 326 | }, 327 | "execution_count": 94, 328 | "metadata": {}, 329 | "output_type": "execute_result" 330 | } 331 | ], 332 | "source": [ 333 | "loss(X,y)" 334 | ] 335 | }, 336 | { 337 | "cell_type": "code", 338 | "execution_count": 183, 339 | "metadata": {}, 340 | "outputs": [], 341 | "source": [ 342 | "class Node:\n", 343 | " def __init__(self):\n", 344 | " self.tt,self.tn,self.nt,self.nn=0,0,0,0\n", 345 | " \n", 346 | " def pred(self,x):\n", 347 | " if x == 1: return 1\n", 348 | " else : return 0\n", 349 | " \n", 350 | " @staticmethod\n", 351 | " def split_categorical(X):\n", 352 | " splits = []\n", 353 | " for x in range(len(X) - 1):\n", 354 | " split = 1/2 * (x[1]-X[0])\n", 355 | " splits.append(split)\n", 356 | " \n", 357 | " def calc(self,X,y):\n", 358 | " for i,x in enumerate(X):\n", 359 | " p = self.pred(x)\n", 360 | " if y[i] == 1 and p == 1: self.tt += 1\n", 361 | " if y[i] == 1 and p == 0: self.tn += 1\n", 362 | " if y[i] == 0 and p == 1: self.nt += 1\n", 363 | " if y[i] == 0 and p == 0: self.nn += 1" 364 | ] 365 | }, 366 | { 367 | "cell_type": "code", 368 | "execution_count": 178, 369 | "metadata": {}, 370 | "outputs": [], 371 | "source": [ 372 | "def gini_impurity(node):\n", 373 | " n_left = node.tt + node.tn\n", 374 | " n_right = node.nt + node.nn\n", 375 | " gini_left = 1 - (node.tt/n_left)**2 - (node.tn/n_left)**2\n", 376 | " gini_right = 1 - (node.nt/n_right)**2 - (node.nn/n_right)**2\n", 377 | " n = n_left + n_right\n", 378 | " return (n_left/n)*gini_left + (n_right/n)*gini_right" 379 | ] 380 | }, 381 | { 382 | "cell_type": "code", 383 | "execution_count": 166, 384 | "metadata": {}, 385 | "outputs": [], 386 | "source": [ 387 | "node = Node()" 388 | ] 389 | }, 390 | { 391 | "cell_type": "code", 392 | "execution_count": 167, 393 | "metadata": {}, 394 | "outputs": [], 395 | "source": [ 396 | "node.calc(data['age'], data['survived'])" 397 | ] 398 | }, 399 | { 400 | "cell_type": "code", 401 | "execution_count": 172, 402 | "metadata": {}, 403 | "outputs": [ 404 | { 405 | "data": { 406 | "text/plain": [ 407 | "0.30000000000000004" 408 | ] 409 | }, 410 | "execution_count": 172, 411 | "metadata": {}, 412 | "output_type": "execute_result" 413 | } 414 | ], 415 | "source": [ 416 | "age_impurity = gini_impurity(node)" 417 | ] 418 | }, 419 | { 420 | "cell_type": "code", 421 | "execution_count": 179, 422 | "metadata": {}, 423 | "outputs": [], 424 | "source": [ 425 | "node = Node()" 426 | ] 427 | }, 428 | { 429 | "cell_type": "code", 430 | "execution_count": 184, 431 | "metadata": {}, 432 | "outputs": [], 433 | "source": [ 434 | "node.calc(data['sex'], data['survived'])" 435 | ] 436 | }, 437 | { 438 | "cell_type": "code", 439 | "execution_count": 185, 440 | "metadata": {}, 441 | "outputs": [], 442 | "source": [ 443 | "sex_impurtiy = gini_impurity(node)" 444 | ] 445 | }, 446 | { 447 | "cell_type": "code", 448 | "execution_count": 186, 449 | "metadata": {}, 450 | "outputs": [ 451 | { 452 | "data": { 453 | "text/plain": [ 454 | "0.30000000000000004" 455 | ] 456 | }, 457 | "execution_count": 186, 458 | "metadata": {}, 459 | "output_type": "execute_result" 460 | } 461 | ], 462 | "source": [ 463 | "sex_impurtiy" 464 | ] 465 | }, 466 | { 467 | "cell_type": "code", 468 | "execution_count": 215, 469 | "metadata": {}, 470 | "outputs": [], 471 | "source": [ 472 | "def split_categorical(X):\n", 473 | " X = np.sort(X)\n", 474 | " splits = []\n", 475 | " for i in range(len(X)):\n", 476 | " split = X[i] + (1/2 * (X[i+1]-X[i]))\n", 477 | " splits.append(split)\n", 478 | " if i == len(X) - 2: break\n", 479 | " return splits" 480 | ] 481 | }, 482 | { 483 | "cell_type": "code", 484 | "execution_count": 216, 485 | "metadata": {}, 486 | "outputs": [], 487 | "source": [ 488 | "X = np.array([1,4,12,52,61,1,25,5])" 489 | ] 490 | }, 491 | { 492 | "cell_type": "code", 493 | "execution_count": 218, 494 | "metadata": {}, 495 | "outputs": [ 496 | { 497 | "data": { 498 | "text/plain": [ 499 | "array([ 1, 1, 4, 5, 12, 25, 52, 61])" 500 | ] 501 | }, 502 | "execution_count": 218, 503 | "metadata": {}, 504 | "output_type": "execute_result" 505 | } 506 | ], 507 | "source": [ 508 | "np.sort(X)" 509 | ] 510 | }, 511 | { 512 | "cell_type": "code", 513 | "execution_count": 217, 514 | "metadata": {}, 515 | "outputs": [ 516 | { 517 | "data": { 518 | "text/plain": [ 519 | "[1.0, 2.5, 4.5, 8.5, 18.5, 38.5, 56.5]" 520 | ] 521 | }, 522 | "execution_count": 217, 523 | "metadata": {}, 524 | "output_type": "execute_result" 525 | } 526 | ], 527 | "source": [ 528 | "split_categorical(X)" 529 | ] 530 | }, 531 | { 532 | "cell_type": "code", 533 | "execution_count": null, 534 | "metadata": {}, 535 | "outputs": [], 536 | "source": [] 537 | } 538 | ], 539 | "metadata": { 540 | "kernelspec": { 541 | "display_name": "Python 3", 542 | "language": "python", 543 | "name": "python3" 544 | }, 545 | "language_info": { 546 | "codemirror_mode": { 547 | "name": "ipython", 548 | "version": 3 549 | }, 550 | "file_extension": ".py", 551 | "mimetype": "text/x-python", 552 | "name": "python", 553 | "nbconvert_exporter": "python", 554 | "pygments_lexer": "ipython3", 555 | "version": "3.6.9" 556 | } 557 | }, 558 | "nbformat": 4, 559 | "nbformat_minor": 4 560 | } 561 | -------------------------------------------------------------------------------- /DL/RNN.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## A vanilla character-level language model RNN\n", 8 | "\n", 9 | "Inspired by https://gist.github.com/karpathy/d4dee566867f8291f086\n", 10 | "\n", 11 | "I will try to generate shakespeare-like text.\n", 12 | "You can download the .txt file from https://raw.githubusercontent.com/bbejeck/hadoop-algorithms/master/src/shakespeare.txt" 13 | ] 14 | }, 15 | { 16 | "cell_type": "code", 17 | "execution_count": 1, 18 | "metadata": {}, 19 | "outputs": [], 20 | "source": [ 21 | "import numpy as np" 22 | ] 23 | }, 24 | { 25 | "cell_type": "markdown", 26 | "metadata": {}, 27 | "source": [ 28 | "- Each character has a unique one-hot encoded vector\n", 29 | "- This vector is the input for the RNN\n", 30 | "- Output is a vector with the same size for predicting the next char" 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": 62, 36 | "metadata": {}, 37 | "outputs": [], 38 | "source": [ 39 | "# read data\n", 40 | "data = open('shakespeare.txt', 'r').read()\n", 41 | "chars = list(set(data))\n", 42 | "ds, vs = len(data), len(chars)\n", 43 | "char_to_ix = {ch:i for i,ch in enumerate(chars)}\n", 44 | "ix_to_char = {i:ch for i,ch in enumerate(chars)}" 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": 63, 50 | "metadata": {}, 51 | "outputs": [], 52 | "source": [ 53 | "# hyperparams\n", 54 | "hidden_size = 100\n", 55 | "seq_len = 25\n", 56 | "lr = 1e-1" 57 | ] 58 | }, 59 | { 60 | "cell_type": "code", 61 | "execution_count": 73, 62 | "metadata": {}, 63 | "outputs": [], 64 | "source": [ 65 | "# model params\n", 66 | "Wxh = np.random.randn(hidden_size, vs)*0.01\n", 67 | "Whh = np.random.randn(hidden_size, hidden_size)*0.01\n", 68 | "Why = np.random.randn(vs, hidden_size)*0.01\n", 69 | "bh = np.zeros((hidden_size, 1))\n", 70 | "by = np.zeros((vs, 1))" 71 | ] 72 | }, 73 | { 74 | "cell_type": "code", 75 | "execution_count": 79, 76 | "metadata": {}, 77 | "outputs": [], 78 | "source": [ 79 | "def loss_fn(inputs, targets, h_prev):\n", 80 | " \"Return the loss, gradients and last model state\"\n", 81 | " xs,hs,ys,ps = {},{},{},{}\n", 82 | " hs[-1] = np.copy(h_prev)\n", 83 | " loss = 0\n", 84 | " \n", 85 | " # forward pass\n", 86 | " for t in range(len(inputs)):\n", 87 | " # create one-hot vector\n", 88 | " xs[t] = np.zeros((vs,1))\n", 89 | " xs[t][inputs[t]] = 1\n", 90 | " \n", 91 | " hs[t] = np.tanh(np.dot(Wxh, xs[t]) + np.dot(Whh, hs[t-1]) + bh)\n", 92 | " ys[t] = np.dot(Why, hs[t]) + by\n", 93 | " ps[t] = np.exp(ys[t]) / np.sum(np.exp(ys[t]))\n", 94 | " loss += -np.log(ps[t][targets[t],0])\n", 95 | " \n", 96 | " dWxh, dWhh, dWhy = np.zeros_like(Wxh), np.zeros_like(Whh), np.zeros_like(Why)\n", 97 | " dbh, dby = np.zeros_like(bh), np.zeros_like(by)\n", 98 | " dhnext = np.zeros_like(hs[0])\n", 99 | " \n", 100 | " # backward pass\n", 101 | " for t in reversed(range(len(inputs))):\n", 102 | " # gradient accumalation\n", 103 | " dy = np.copy(ps[t])\n", 104 | " dy[targets[t]] -= 1\n", 105 | " dWhy += np.dot(dy, hs[t].T)\n", 106 | " dby += dy\n", 107 | " dh = np.dot(Why.T, dy) + dhnext\n", 108 | " dhraw = (1 - hs[t] * hs[t]) * dh\n", 109 | " dbh += dhraw\n", 110 | " dWxh += np.dot(dhraw, xs[t].T)\n", 111 | " dWhh += np.dot(dhraw, hs[t-1].T)\n", 112 | " dhnext += np.dot(Whh.T, dhraw)\n", 113 | " # gradient clipping\n", 114 | " for dparam in [dWxh, dWhh, dWhy, dbh, dby]:\n", 115 | " np.clip(dparam, -5, 5, out=dparam)\n", 116 | " return loss, dWxh, dWhh, dWhy, dbh, dby, hs[len(inputs)-1]" 117 | ] 118 | }, 119 | { 120 | "cell_type": "code", 121 | "execution_count": 80, 122 | "metadata": {}, 123 | "outputs": [], 124 | "source": [ 125 | "def sample(h, seed_ix, n):\n", 126 | " x = np.zeros((vs, 1))\n", 127 | " x[seed_ix] = 1\n", 128 | " ixes = []\n", 129 | " for t in range(n):\n", 130 | " h = np.tanh(np.dot(Wxh, x) + np.dot(Whh, h) + bh)\n", 131 | " y = np.dot(Why, h) + by\n", 132 | " p = np.exp(y) / np.sum(np.exp(y))\n", 133 | " ix = np.random.choice(range(vs), p=p.ravel())\n", 134 | " x = np.zeros((vs, 1))\n", 135 | " x [ix] = 1\n", 136 | " ixes.append(ix)\n", 137 | " return ixes" 138 | ] 139 | }, 140 | { 141 | "cell_type": "code", 142 | "execution_count": 81, 143 | "metadata": {}, 144 | "outputs": [], 145 | "source": [ 146 | "n,p = 0,0\n", 147 | "mWxh, mWhh, mWhy = np.zeros_like(Wxh), np.zeros_like(Whh), np.zeros_like(Why)\n", 148 | "mbh, mby = np.zeros_like(bh), np.zeros_like(by)\n", 149 | "smooth_loss = -np.log(1.0/vs)*seq_len" 150 | ] 151 | }, 152 | { 153 | "cell_type": "code", 154 | "execution_count": 86, 155 | "metadata": {}, 156 | "outputs": [ 157 | { 158 | "name": "stdout", 159 | "output_type": "stream", 160 | "text": [ 161 | "---- no ams of\n", 162 | " And of Mot.\n", 163 | " MLUCLELORUULERENA. Thath abe yon oups that heas!\n", 164 | " Tal. an mrou,\n", 165 | " cad. Heurs!\n", 166 | " Cive thou., Orand ig imr enoet iwince. arter thicher, of mary -----\n", 167 | "200000, 56.313353006269104\n", 168 | "210000, 55.58635764879004\n", 169 | "220000, 59.75383056823793\n", 170 | "230000, 55.53149624307009\n", 171 | "240000, 53.17445407567616\n", 172 | "250000, 53.05513729383428\n", 173 | "260000, 54.17103746762103\n", 174 | "270000, 54.2036787607943\n", 175 | "280000, 54.66659726264434\n", 176 | "290000, 53.74294854684522\n", 177 | "---- eot plend pitllends and motte fele?\n", 178 | " SAll theily marordin latk\n", 179 | " t's eneps all houl den wach weep mer ding his maal. leir thand ly of thind pord?\n", 180 | " LUS.\n", 181 | " IMIAG WHERCALES. Sing, yu -----\n", 182 | "300000, 53.32602972139323\n", 183 | "310000, 52.76751403318295\n", 184 | "320000, 52.5123402487749\n", 185 | "330000, 53.86712508969286\n", 186 | "340000, 53.02882425142762\n", 187 | "350000, 51.91591061747107\n", 188 | "360000, 52.55760828253293\n", 189 | "370000, 50.91464276547788\n", 190 | "380000, 52.35740081134182\n", 191 | "390000, 52.712696773067876\n", 192 | "---- bom unccost.\n", 193 | " ARLINERALO. Bod.\n", 194 | " BONRIO. Ay faspalf of\n", 195 | " Antants\n", 196 | " Whos sor sarerigo.\n", 197 | " CRANG SY I ISNY\n", 198 | " Theeall ckeme. A'tur'stal elein enselys\n", 199 | " whye; Hood the bafr, of End of anquch then m -----\n", 200 | "400000, 52.818967492831035\n", 201 | "410000, 51.66403434822723\n", 202 | "420000, 52.944509841469134\n", 203 | "430000, 52.66135921404078\n", 204 | "440000, 52.24758336922776\n", 205 | "450000, 52.19869955039481\n", 206 | "460000, 53.05917093425105\n", 207 | "470000, 50.76806917615877\n", 208 | "480000, 51.42724106129685\n", 209 | "490000, 51.927806208448466\n", 210 | "---- erious enothing heyed. By tighsert\n", 211 | " heand, that, horfourath of the ho)st chede forpaed in bealk owe bighar, se yosind of f your mey I in plopth\n", 212 | " Shisge\n", 213 | " Whereerh hnooningor to the gantl than fon -----\n", 214 | "500000, 51.95921993623966\n", 215 | "510000, 51.549263253959765\n", 216 | "520000, 50.732148722792\n", 217 | "530000, 51.179931466438155\n", 218 | "540000, 50.50066459384982\n", 219 | "550000, 52.90142523854136\n", 220 | "560000, 51.19757265694255\n", 221 | "570000, 52.2259770140211\n", 222 | "580000, 51.35165875432284\n", 223 | "590000, 48.69616614012869\n", 224 | "---- ialg.\n", 225 | " LED KIUFUSENOG Dine hiss scuspoiny\n", 226 | " Uwen weatme, ?ord'd nof, drast the oot it ot ndads thou inds.\n", 227 | " KERTHAMDR WOene, to, grou. Tice hingly co hey the oud sparded dech-be outh'd out game, to -----\n", 228 | "600000, 50.501049999281854\n", 229 | "610000, 50.673878085101\n", 230 | "620000, 51.649700368144195\n", 231 | "630000, 50.33866666898141\n", 232 | "640000, 51.23307367557291\n", 233 | "650000, 50.744521237450435\n", 234 | "660000, 50.28257297143637\n", 235 | "670000, 51.10565925782941\n", 236 | "680000, 53.31705346916214\n", 237 | "690000, 50.53191002310491\n", 238 | "---- r' Lalry doul hiis\n", 239 | " That beett to onovo suby fovevell,' totwer.\n", 240 | " Qur chis of haugh of gif thist' whuth'd my bipsers in wny, Bar., I badnoo sile, dellser hiy. Pode?\n", 241 | " I upant eare and bor love -----\n", 242 | "700000, 51.14371871853593\n" 243 | ] 244 | }, 245 | { 246 | "ename": "KeyboardInterrupt", 247 | "evalue": "", 248 | "output_type": "error", 249 | "traceback": [ 250 | "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", 251 | "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", 252 | "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 14\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 15\u001b[0m \u001b[0;31m# forward pass\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 16\u001b[0;31m \u001b[0mloss\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdWxh\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdWhh\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdWhy\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdbh\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdby\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mdhprev\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mloss_fn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0minputs\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtargets\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mhprev\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 17\u001b[0m \u001b[0msmooth_loss\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0msmooth_loss\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0;36m0.999\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0mloss\u001b[0m \u001b[0;34m*\u001b[0m \u001b[0;36m0.001\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 18\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mn\u001b[0m\u001b[0;34m%\u001b[0m\u001b[0;36m10000\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m:\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34mf'{n}, {smooth_loss}'\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", 253 | "\u001b[0;32m\u001b[0m in \u001b[0;36mloss_fn\u001b[0;34m(inputs, targets, h_prev)\u001b[0m\n\u001b[1;32m 11\u001b[0m \u001b[0mxs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mt\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0minputs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mt\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;36m1\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 12\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 13\u001b[0;31m \u001b[0mhs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mt\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtanh\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdot\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mWxh\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mxs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mt\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdot\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mWhh\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mhs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mt\u001b[0m\u001b[0;34m-\u001b[0m\u001b[0;36m1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0mbh\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 14\u001b[0m \u001b[0mys\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mt\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdot\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mWhy\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mhs\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mt\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m+\u001b[0m \u001b[0mby\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 15\u001b[0m \u001b[0mps\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mt\u001b[0m\u001b[0;34m]\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mexp\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mys\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mt\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;34m/\u001b[0m \u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0msum\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnp\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mexp\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mys\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mt\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n", 254 | "\u001b[0;32m<__array_function__ internals>\u001b[0m in \u001b[0;36mdot\u001b[0;34m(*args, **kwargs)\u001b[0m\n", 255 | "\u001b[0;31mKeyboardInterrupt\u001b[0m: " 256 | ] 257 | } 258 | ], 259 | "source": [ 260 | "while True:\n", 261 | " if p+seq_len+1 >= len(data) or n==0:\n", 262 | " # reset RNN memory\n", 263 | " hprev = np.zeros((hidden_size, 1))\n", 264 | " p = 0\n", 265 | " inputs = [char_to_ix[ch] for ch in data[p:p+seq_len]]\n", 266 | " targets = [char_to_ix[ch] for ch in data[p+1:p+seq_len+1]]\n", 267 | " \n", 268 | " # sample RNN every 100 step\n", 269 | " if n%100000 == 0:\n", 270 | " sample_ix = sample(hprev, inputs[0], 200)\n", 271 | " txt = ''.join(ix_to_char[ix] for ix in sample_ix)\n", 272 | " print(f'---- {txt} -----')\n", 273 | " \n", 274 | " # forward pass\n", 275 | " loss, dWxh, dWhh, dWhy, dbh, dby, dhprev = loss_fn(inputs, targets, hprev)\n", 276 | " smooth_loss = smooth_loss * 0.999 + loss * 0.001\n", 277 | " if n%10000 == 0: print(f'{n}, {smooth_loss}')\n", 278 | " \n", 279 | " \n", 280 | " # param update with Adagrad\n", 281 | " for param, dparam, mem in zip([Wxh, Whh, Why, bh, by], \n", 282 | " [dWxh, dWhh, dWhy, dbh, dby], \n", 283 | " [mWxh, mWhh, mWhy, mbh, mby]):\n", 284 | " mem += dparam * dparam\n", 285 | " param += -lr * dparam / np.sqrt(mem + 1e-8)\n", 286 | " \n", 287 | " p += seq_len\n", 288 | " n += 1" 289 | ] 290 | }, 291 | { 292 | "cell_type": "markdown", 293 | "metadata": {}, 294 | "source": [ 295 | "### Not bad for a super simple vanilla RNN trained in a few minutes!\n", 296 | "\n", 297 | "- It pretty quickly plateus at around 50.\n", 298 | "- In the next step we will do the same with a LSTM and see how much better it is." 299 | ] 300 | } 301 | ], 302 | "metadata": { 303 | "kernelspec": { 304 | "display_name": "Python 3", 305 | "language": "python", 306 | "name": "python3" 307 | }, 308 | "language_info": { 309 | "codemirror_mode": { 310 | "name": "ipython", 311 | "version": 3 312 | }, 313 | "file_extension": ".py", 314 | "mimetype": "text/x-python", 315 | "name": "python", 316 | "nbconvert_exporter": "python", 317 | "pygments_lexer": "ipython3", 318 | "version": "3.6.9" 319 | } 320 | }, 321 | "nbformat": 4, 322 | "nbformat_minor": 4 323 | } 324 | -------------------------------------------------------------------------------- /ML/KNN.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## KNN\n", 8 | "\n", 9 | "Very simple multi-classification." 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": 6, 15 | "metadata": {}, 16 | "outputs": [], 17 | "source": [ 18 | "import numpy as np\n", 19 | "import matplotlib.pyplot as plt\n", 20 | "from sklearn.datasets import make_classification as mc" 21 | ] 22 | }, 23 | { 24 | "cell_type": "code", 25 | "execution_count": 55, 26 | "metadata": {}, 27 | "outputs": [], 28 | "source": [ 29 | "n = 50\n", 30 | "nn = 4" 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": 11, 36 | "metadata": {}, 37 | "outputs": [], 38 | "source": [ 39 | "X, y = mc(n, 1, n_informative=1, n_redundant=0, n_classes=2, n_clusters_per_class=1)" 40 | ] 41 | }, 42 | { 43 | "cell_type": "code", 44 | "execution_count": 12, 45 | "metadata": {}, 46 | "outputs": [], 47 | "source": [ 48 | "X0 = X[np.where(y==0)]\n", 49 | "y0 = np.zeros(X0.shape)\n", 50 | "X1 = X[np.where(y==1)]\n", 51 | "y1 = np.ones(X1.shape)" 52 | ] 53 | }, 54 | { 55 | "cell_type": "code", 56 | "execution_count": 13, 57 | "metadata": {}, 58 | "outputs": [ 59 | { 60 | "data": { 61 | "text/plain": [ 62 | "" 63 | ] 64 | }, 65 | "execution_count": 13, 66 | "metadata": {}, 67 | "output_type": "execute_result" 68 | }, 69 | { 70 | "data": { 71 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD4CAYAAAD8Zh1EAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAQLklEQVR4nO3df4wc91nH8c/He7lUl5RWujuE8I87I7kIE0UkXqWtKkGQjXACssWPIlsqqFB6ql0jJCpQoqAEpfIfEAkhwKg1IqrUO5IYEGAVVy6gVJUQCV6raRQndXU1SWyDmkso5Y+oBDcPf+xebm69P2bv9m7unnu/pNHdzDz7nWe+9X6yndm9dUQIALD5bau6AQDAcBDoAJAEgQ4ASRDoAJAEgQ4ASYxUdeCJiYmYnp6u6vAAsCldvHjx9YiY7LSvskCfnp5Wo9Go6vAAsCnZfqXbPi65AEASBDoAJEGgA0ASBDoAJEGgA0ASfQPd9uO2X7P9Qpf9tv3HtudtP2/77uG3CWC9zc1J09PStm3Nn3NzvWsnJiT75mVi4ubHttfffntzvXis9pparflzZGT5+CMj0oEDy3s9cGCp3pZuvXVprMXHT09Lx483j108xvHjazNHK6kfWET0XCT9uKS7Jb3QZf/9kr4oyZI+IOnZfmNGhPbt2xcANqbZ2YixsQhpaRkba27vVDs6ury2fRkdXXps2fpt23rXrOVy7Nhw52gl9d1IakSXXHWU+PO5tqclfSEi7uiw77OSvhwRT7TWL0u6NyL+s9eY9Xo9eB86sDFNT0uvdHi389SU9PLL5Wq7PbZsfZVqNenGjd41g8zRSuq7sX0xIuqd9g3jGvp2SVcL69da2zo1MmO7YbuxsLAwhEMDWAuvvlp+e7fabnVl66v0ve/1rxlkjlayfSXW9aZoRJyOiHpE1CcnO35yFcAGsGtX+e3darvVla2vUq3Wv2aQOVrJ9pUYRqBfl7SzsL6jtQ3AJnXypDQ2tnzb2Fhze6fa0dHe442OLj22bP22Ct+DNzPTv2aQOVpJ/Yp0u7heXCRNq/tN0Z/R8pui/1ZmTG6KAhvb7GzE1FSE3fzZ6+bd7GzE+HjnG4zj4zc/tr3+ttua68Vjtdcs3iSt1ZaPX6tF7N+/vNf9+5ffVB0dXRpr8fFTU82bn7fdtvwYZW6IrmSOVlLfiVZzU9T2E5LulTQh6VuSHpF0S+s/Bp+xbUl/KumgpDcl/WpE9L3byU1RABhcr5uiff/aYkQc7bM/JH1yhb0BAIaET4oCQBIEOgAkQaADQBIEOgAkQaADQBIEOgAkQaADQBIEOgAkQaADQBIEOgAkQaADQBIEOgAkQaADQBIEOgAkQaADQBIEOgAkQaADQBIEOgAkQaADQBIEOgAkQaADQBIEOgAkQaADQBIEOgAkQaADQBIEOgAkQaADQBIEOgAkQaADQBIEOgAkQaADQBKlAt32QduXbc/bfqDD/l22n7b9VdvP275/+K0CAHrpG+i2a5JOSbpP0l5JR23vbSv7XUlnIuIuSUck/dmwGwUA9FbmFfo9kuYj4kpEvCXpSUmH22pC0ve1fn+PpP8YXosAgDLKBPp2SVcL69da24p+T9JHbF+TdE7Sb3QayPaM7YbtxsLCwgraBQB0M6ybokclfS4idki6X9Lnbd80dkScjoh6RNQnJyeHdGgAgFQu0K9L2llY39HaVvQxSWckKSL+VdK7JE0Mo0EAQDllAv2CpD22d9seVfOm59m2mlcl7Zck2z+iZqBzTQUA1lHfQI+IG5JOSDov6SU1381yyfajtg+1yj4l6eO2vybpCUkfjYhYq6YBADcbKVMUEefUvNlZ3PZw4fcXJX1ouK0BAAbBJ0UBIAkCHQCSINABIAkCHQCSINABIAkCHQCSINABIAkCHQCSINABIAkCHQCSINABIAkCHQCSINABIAkCHQCSINABIAkCHQCSINABIAkCHQCSINABIAkCHQCSINABIAkCHQCSINABIAkCHQCSINABIAkCHQCSINABIAkCHQCSINABIAkCHQCSKBXotg/avmx73vYDXWp+yfaLti/Z/svhtgkA6GekX4HtmqRTkn5K0jVJF2yfjYgXCzV7JD0o6UMR8W3b379WDQMAOivzCv0eSfMRcSUi3pL0pKTDbTUfl3QqIr4tSRHx2nDbBAD0UybQt0u6Wli/1tpW9D5J77P9L7afsX2w00C2Z2w3bDcWFhZW1jEAoKNh3RQdkbRH0r2Sjkr6c9vvbS+KiNMRUY+I+uTk5JAODQCQygX6dUk7C+s7WtuKrkk6GxH/FxH/LukbagY8AGCdlAn0C5L22N5te1TSEUln22r+Ts1X57I9oeYlmCtD7BMA0EffQI+IG5JOSDov6SVJZyLiku1HbR9qlZ2X9IbtFyU9Lem3I+KNtWoaAHAzR0QlB67X69FoNCo5NgBsVrYvRkS90z4+KQoASRDoAJAEgQ4ASRDoAJAEgQ4ASRDoAJAEgQ4ASRDoAJAEgQ4ASRDoAJAEgQ4ASRDoAJAEgQ4ASRDoAJAEgQ4ASRDoAJAEgQ4ASRDoAJAEgQ4ASRDoAJAEgQ4ASRDoAJAEgQ4ASRDoAJAEgQ4ASRDoAJAEgQ4ASRDoAJAEgQ4ASRDoAJAEgQ4ASZQKdNsHbV+2PW/7gR51v2A7bNeH1yIAoIy+gW67JumUpPsk7ZV01PbeDnXvlvSbkp4ddpMAgP7KvEK/R9J8RFyJiLckPSnpcIe6T0v6fUnfHWJ/AICSygT6dklXC+vXWtveYftuSTsj4h96DWR7xnbDdmNhYWHgZgEA3a36pqjtbZL+UNKn+tVGxOmIqEdEfXJycrWHBgAUlAn065J2FtZ3tLYterekOyR92fbLkj4g6Sw3RgFgfZUJ9AuS9tjebXtU0hFJZxd3RsR3ImIiIqYjYlrSM5IORURjTToGAHTUN9Aj4oakE5LOS3pJ0pmIuGT7UduH1rpBAEA5I2WKIuKcpHNt2x7uUnvv6tsCAAyKT4oCQBIEOgAkQaADQBIEOgAkQaADQBIEOgAkQaADQBIEOgAkQaADQBIEOgAkQaADQBIEOgAkQaADQBIEOgAkQaADQBIEOgAkQaADQBIEOgAkQaADQBIEOgAkQaADQBIEOgAkQaADQBIEOgAkQaADQBIEOgAkQaADQBIEOgAkQaADQBIEOgAkUSrQbR+0fdn2vO0HOuz/Ldsv2n7e9j/bnhp+qwCAXvoGuu2apFOS7pO0V9JR23vbyr4qqR4Rd0r6a0l/MOxGAQC9lXmFfo+k+Yi4EhFvSXpS0uFiQUQ8HRFvtlafkbRjuG0CAPopE+jbJV0trF9rbevmY5K+2GmH7RnbDduNhYWF8l0CAPoa6k1R2x+RVJf0WKf9EXE6IuoRUZ+cnBzmoQFgyxspUXNd0s7C+o7WtmVsH5D0kKSfiIj/HU57AICyyrxCvyBpj+3dtkclHZF0tlhg+y5Jn5V0KCJeG36bAIB++gZ6RNyQdELSeUkvSToTEZdsP2r7UKvsMUm3S/or28/ZPttlOADAGilzyUURcU7SubZtDxd+PzDkvgAAA+KTogCQBIEOAEkQ6ACQBIEOAEkQ6ACQBIEOAEkQ6ACQBIEOAEkQ6ACQBIEOAEkQ6ACQBIEOAEkQ6ACQBIEOAEkQ6ACQBIEOAEkQ6ACQBIEOAEkQ6ACQBIEOAEkQ6ACQBIEOAEkQ6ACQBIEOAEkQ6ACQBIEOAEkQ6ACQBIEOAEkQ6ACQBIEOAEkQ6ACQRKlAt33Q9mXb87Yf6LD/VttPtfY/a3t62I1KkubmpOlpadu25s+5uTU5TKXKnuNq52JuTpqYkOylZWJCOn68/7i9jt0+7sTEcM6hWDsx0VwWH1fsubivva59/MUxbWlkpPmzWx/Hjy/VjIw011eqzHmXPV5Vz4mt8FzcjCKi5yKpJumbkn5I0qikr0na21ZzXNJnWr8fkfRUv3H37dsXA5mdjRgbi5CWlrGx5vYsyp7jaudidjZidHT547st7eP2Ona3cW+5ZXXn0Kl2JUtx/F5jtvdx7FjnumPHys33oOdd9nhVPSe2wnNxA5PUiG553W3HOwXSByWdL6w/KOnBtprzkj7Y+n1E0uuS3GvcgQN9aqrzP/KpqRVPzIZT9hxXOxfdHt9tKY7b69i9xl3NOQzab5lz6TdmsY9arXNNrVZuvgc977LHq+o5sRWeixtYr0B3c393tn9R0sGI+PXW+i9Len9EnCjUvNCqudZa/2ar5vW2sWYkzUjSrl279r3yyivl/6/Etm3NfzY3Nyi9/Xb5cTaysue42rno9vhuiuP2OrbUfdzVnMOg/fayOH6/MYt9LJ5bJ4P2Vea8yx6vqufEVngubmC2L0ZEvdO+db0pGhGnI6IeEfXJycnBHrxr12DbN6Oy57jauRh0zor1vY7da9zVnMMw/zdeHKvfmMX9tVrnmm7by4672uNV9ZzYCs/FTapMoF+XtLOwvqO1rWON7RFJ75H0xjAafMfJk9LY2PJtY2PN7VmUPcfVzsXJk9LoaLna9nF7HbvbuLfcsrpz6FS7EsXxe43Z3sfMTOe6btt7KXPeZY9X1XNiKzwXN6tu12IWFzWviV+RtFtLN0V/tK3mk1p+U/RMv3EHvoYe0bzpMjUVYTd/ZrwJU/YcVzsXs7MR4+PLr4GOjzdvvPUbt9ex28cdHx/OORRrx8eby+Ljij0X97XXdboxu3g9ePG6dbc+jh1bqqnVVnZDdJDzLnu8qp4TW+G5uEFpNdfQJcn2/ZL+SM13vDweESdtP9oa+Kztd0n6vKS7JP2XpCMRcaXXmPV6PRqNxgr+EwQAW1eva+gjZQaIiHOSzrVte7jw+3clfXg1TQIAVodPigJAEgQ6ACRBoANAEgQ6ACRR6l0ua3Jge0HSAB8V1YSaf1IAzEURc7GEuViSeS6mIqLjJzMrC/RB2W50e6vOVsNcLGEuljAXS7bqXHDJBQCSINABIInNFOinq25gA2EuljAXS5iLJVtyLjbNNXQAQG+b6RU6AKAHAh0Aktg0gW7707aft/2c7S/Z/sGqe6qK7cdsf701H39r+71V91QV2x+2fcn227a33NvUpP5f4r6V2H7c9mutb1HbcjZNoEt6LCLujIgfk/QFSQ/3e0Bi/yjpjoi4U9I31Pye163qBUk/L+krVTdSBds1Sack3Sdpr6SjtvdW21WlPifpYNVNVGXTBHpE/E9h9TZJW/ZubkR8KSJutFafUfNbpLakiHgpIi5X3UeF7pE0HxFXIuItSU9KOlxxT5WJiK+o+Z0MW1Kpv4e+Udg+KelXJH1H0k9W3M5G8WuSnqq6CVRmu6SrhfVrkt5fUS+o2IYKdNv/JOkHOux6KCL+PiIekvSQ7QclnZD0yLo2uI76zUWr5iFJNyTNrWdv663MXADYYIEeEQdKls6p+Q1KaQO931zY/qikn5W0P5J/mGCAfxdbUZkvcccWsWmuodveU1g9LOnrVfVSNdsHJf2OpEMR8WbV/aBSFyTtsb3b9qiaX9J+tuKeUJFN80lR238j6Yclva3mn939RERsyVcitucl3SrpjdamZyLiExW2VBnbPyfpTyRNSvpvSc9FxE9X29X66vQl7hW3VBnbT0i6V80/n/stSY9ExF9U2tQ62jSBDgDobdNccgEA9EagA0ASBDoAJEGgA0ASBDoAJEGgA0ASBDoAJPH/hA89vTc1csgAAAAASUVORK5CYII=\n", 72 | "text/plain": [ 73 | "
" 74 | ] 75 | }, 76 | "metadata": { 77 | "needs_background": "light" 78 | }, 79 | "output_type": "display_data" 80 | } 81 | ], 82 | "source": [ 83 | "plt.scatter(X0, y0, c='red')\n", 84 | "plt.scatter(X1, y1, c='blue')" 85 | ] 86 | }, 87 | { 88 | "cell_type": "code", 89 | "execution_count": 30, 90 | "metadata": {}, 91 | "outputs": [], 92 | "source": [ 93 | "def mse(x0,y0,x1,y1): return 1/2*((x0-x1)**2+(y0-y1)**2)" 94 | ] 95 | }, 96 | { 97 | "cell_type": "markdown", 98 | "metadata": {}, 99 | "source": [ 100 | "Define a new data point to classify" 101 | ] 102 | }, 103 | { 104 | "cell_type": "code", 105 | "execution_count": 31, 106 | "metadata": {}, 107 | "outputs": [], 108 | "source": [ 109 | "n_p = np.array([1, 0.6])" 110 | ] 111 | }, 112 | { 113 | "cell_type": "code", 114 | "execution_count": 34, 115 | "metadata": {}, 116 | "outputs": [], 117 | "source": [ 118 | "assert mse(1, 2, 1, 2) == 0" 119 | ] 120 | }, 121 | { 122 | "cell_type": "markdown", 123 | "metadata": {}, 124 | "source": [ 125 | "Calculate the distance of the new point to every other point and return the distances with the corresponding class." 126 | ] 127 | }, 128 | { 129 | "cell_type": "code", 130 | "execution_count": 82, 131 | "metadata": {}, 132 | "outputs": [], 133 | "source": [ 134 | "def cmse(X,y,n_p):\n", 135 | " err=[]\n", 136 | " for i,j in zip(X,y):\n", 137 | " err.append([mse(i[0],j,n_p[0],n_p[1]),j])\n", 138 | " return np.array(sorted(err, key=lambda err: err[0]))" 139 | ] 140 | }, 141 | { 142 | "cell_type": "code", 143 | "execution_count": 83, 144 | "metadata": {}, 145 | "outputs": [], 146 | "source": [ 147 | "n_p_mse = cmse(X,y,n_p)" 148 | ] 149 | }, 150 | { 151 | "cell_type": "markdown", 152 | "metadata": {}, 153 | "source": [ 154 | "Return the most occouring class." 155 | ] 156 | }, 157 | { 158 | "cell_type": "code", 159 | "execution_count": 100, 160 | "metadata": {}, 161 | "outputs": [], 162 | "source": [ 163 | "def cnn(cmse):\n", 164 | " cs = cmse[:nn,1]\n", 165 | " (values,counts) = np.unique(cs,return_counts=True)\n", 166 | " ind = np.argmax(counts)\n", 167 | " return values[ind]" 168 | ] 169 | }, 170 | { 171 | "cell_type": "code", 172 | "execution_count": 103, 173 | "metadata": {}, 174 | "outputs": [], 175 | "source": [ 176 | "c = cnn(n_p_mse)" 177 | ] 178 | }, 179 | { 180 | "cell_type": "markdown", 181 | "metadata": {}, 182 | "source": [ 183 | "The new point is classified correctly." 184 | ] 185 | }, 186 | { 187 | "cell_type": "code", 188 | "execution_count": 107, 189 | "metadata": {}, 190 | "outputs": [], 191 | "source": [ 192 | "if c == 0: color='red'\n", 193 | "if c == 1: color='blue'" 194 | ] 195 | }, 196 | { 197 | "cell_type": "code", 198 | "execution_count": 109, 199 | "metadata": {}, 200 | "outputs": [ 201 | { 202 | "data": { 203 | "text/plain": [ 204 | "" 205 | ] 206 | }, 207 | "execution_count": 109, 208 | "metadata": {}, 209 | "output_type": "execute_result" 210 | }, 211 | { 212 | "data": { 213 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXQAAAD4CAYAAAD8Zh1EAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4yLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy+j8jraAAAQkUlEQVR4nO3dbYxc51nG8eva2WyqTUsr7S5C+GXXSC7CRBGJR2mrShBkA05AtngpsuWCCqWr2jVCogIlMkpQKn+ASAgBRq0RUaXukpeCAKu4cgG1qoRI8FhNozipq62JYxvUbEIpH6wSnNx8mNns7HhezuyO9+ze8/9Jo93znHuec5+nnivbc2Z2HRECAGx+I2U3AAAYDAIdAJIg0AEgCQIdAJIg0AEgidGyDjw5ORkzMzNlHR4ANqXz58+/FhFT7faVFugzMzOq1WplHR4ANiXblzvt45ILACRBoANAEgQ6ACRBoANAEgQ6ACTRM9BtP277VdsvdNhv239ie8H287bvGXybANbb/Lw0MyONjNS/zs93r52clOybH5OTNz+3tf6d76xvNx+rtaZSqX8dHV05/+iotHfvyl737l2ut6Xbb1+ea+n5MzPS0aP1Yzcf4+jRW7NGq6nvW0R0fUj6cUn3SHqhw/4HJH1RkiW9X9KzveaMCO3evTsAbExzcxHj4xHS8mN8vD7ernZsbGVt62NsbPm5RetHRrrX3MrHkSODXaPV1HciqRYdctVR4Nfn2p6R9IWIuLPNvs9I+kpEPNHYvijpvoj4z25zVqvV4H3owMY0MyNdbvNu5+lp6eWXi9V2em7R+jJVKtKNG91r+lmj1dR3Yvt8RFTb7RvENfQtkq40bV9tjLVrZNZ2zXZtcXFxAIcGcCu88krx8U61neqK1pfpzTd71/SzRqsZX411vSkaEaciohoR1amptp9cBbABbN9efLxTbae6ovVlqlR61/SzRqsZX41BBPo1Sduatrc2xgBsUidOSOPjK8fGx+vj7WrHxrrPNza2/Nyi9SMlvgdvdrZ3TT9rtJr6Vel0cb35IWlGnW+K/qxW3hT9tyJzclMU2Njm5iKmpyPs+tduN+/m5iImJtrfYJyYuPm5rfV33FHfbj5Wa83STdJKZeX8lUrEnj0re92zZ+VN1bGx5bmWnj89Xb/5eccdK49R5IboatZoNfXtaC03RW0/Iek+SZOSvi3pEUm3Nf5j8GnblvRnkvZJui7p1yKi591ObooCQP+63RTt+dsWI+JQj/0h6ROr7A0AMCB8UhQAkiDQASAJAh0AkiDQASAJAh0AkiDQASAJAh0AkiDQASAJAh0AkiDQASAJAh0AkiDQASAJAh0AkiDQASAJAh0AkiDQASAJAh0AkiDQASAJAh0AkiDQASAJAh0AkiDQASAJAh0AkiDQASAJAh0AkiDQASAJAh0AkiDQASAJAh0AkiDQASAJAh0AkigU6Lb32b5oe8H2g232b7f9Zdtfs/287QcG3yoAoJuegW67IumkpPsl7ZJ0yPaulrLfk/R0RNwt6aCkPx90owCA7or8hH6vpIWIuBQRb0h6UtKBlpqQ9H2N798t6T8G1yIAoIgigb5F0pWm7auNsWa/L+nDtq9KOiPpN9tNZHvWds12bXFxcRXtAgA6GdRN0UOSPhsRWyU9IOlztm+aOyJORUQ1IqpTU1MDOjQAQCoW6NckbWva3toYa/ZRSU9LUkT8q6R3SJocRIMAgGKKBPo5STtt77A9pvpNz9MtNa9I2iNJtn9E9UDnmgoArKOegR4RNyQdk3RW0kuqv5vlgu1Hbe9vlH1S0sdsf13SE5I+EhFxq5oGANxstEhRRJxR/WZn89jDTd+/KOmDg20NANAPPikKAEkQ6ACQBIEOAEkQ6ACQBIEOAEkQ6ACQBIEOAEkQ6ACQBIEOAEkQ6ACQBIEOAEkQ6ACQBIEOAEkQ6ACQBIEOAEkQ6ACQBIEOAEkQ6ACQBIEOAEkQ6ACQBIEOAEkQ6ACQBIEOAEkQ6ACQBIEOAEkQ6ACQBIEOAEkQ6ACQBIEOAEkQ6ACQRKFAt73P9kXbC7Yf7FDzy7ZftH3B9l8Ntk0A6G1+XpqZkUZG6l/n58vuaH2N9iqwXZF0UtJPSboq6Zzt0xHxYlPNTkkPSfpgRHzH9vffqoYBoJ35eWl2Vrp+vb59+XJ9W5IOHy6vr/VU5Cf0eyUtRMSliHhD0pOSDrTUfEzSyYj4jiRFxKuDbRMAujt+fDnMl1y/Xh8fFkUCfYukK03bVxtjzd4r6b22/8X2M7b3tZvI9qztmu3a4uLi6joGgDZeeaW/8YwGdVN0VNJOSfdJOiTpL2y/p7UoIk5FRDUiqlNTUwM6NABI27f3N55RkUC/Jmlb0/bWxlizq5JOR8T/RcS/S/qm6gEPAOvixAlpfHzl2Ph4fXxYFAn0c5J22t5he0zSQUmnW2r+TvWfzmV7UvVLMJcG2CcAdHX4sHTqlDQ9Ldn1r6dODc8NUanAu1wi4obtY5LOSqpIejwiLth+VFItIk439v207RclvSnpdyLi9VvZOAC0Onx4uAK8lSOilANXq9Wo1WqlHBsANivb5yOi2m4fnxQFgCQIdABIgkAHgCQIdABIgkAHgCQIdABIgkAHgCQIdABIgkAHgCQIdABIgkAHgCQIdABIgkAHgCQIdABIgkAHgCQIdABIgkAHgCQIdABIgkAHgCQIdABIgkAHgCQIdABIgkAHgCQIdABIgkAHgCQIdABIgkAHgCQIdABIgkAHgCQIdABIgkAHgCQKBbrtfbYv2l6w/WCXul+0Hbarg2sRAFBEz0C3XZF0UtL9knZJOmR7V5u6d0n6LUnPDrpJAEBvRX5Cv1fSQkRciog3JD0p6UCbuk9J+gNJ3xtgfwCAgooE+hZJV5q2rzbG3mb7HknbIuIfuk1ke9Z2zXZtcXGx72YBAJ2t+aao7RFJfyTpk71qI+JURFQjojo1NbXWQwMAmhQJ9GuStjVtb22MLXmXpDslfcX2y5LeL+k0N0YBYH0VCfRzknba3mF7TNJBSaeXdkbEdyNiMiJmImJG0jOS9kdE7ZZ0DABoq2egR8QNSccknZX0kqSnI+KC7Udt77/VDQIAihktUhQRZySdaRl7uEPtfWtvCwDQLz4pCgBJEOgAkASBDgBJEOgAkASBDgBJEOgAkASBDgBJEOgAkASBDgBJEOgAkASBDgBJEOgAkASBDgBJEOgAkASBDgBJEOgAkASBDgBJEOgAkASBDgBJEOgAkASBDgBJEOgAkASBDgBJEOgAkASBDgBJEOgAkASBDgBJEOgAkASBDgBJEOgAkEShQLe9z/ZF2wu2H2yz/7dtv2j7edv/bHt68K0CALrpGei2K5JOSrpf0i5Jh2zvain7mqRqRNwl6a8l/eGgGwUAdFfkJ/R7JS1ExKWIeEPSk5IONBdExJcj4npj8xlJWwfbJgCglyKBvkXSlabtq42xTj4q6YvtdtietV2zXVtcXCzeJQCgp4HeFLX9YUlVSY+12x8RpyKiGhHVqampQR4aAIbeaIGaa5K2NW1vbYytYHuvpOOSfiIi/ncw7QEAiiryE/o5STtt77A9JumgpNPNBbbvlvQZSfsj4tXBtwkA6KVnoEfEDUnHJJ2V9JKkpyPigu1Hbe9vlD0m6Z2SPm/7OdunO0wHALhFilxyUUSckXSmZezhpu/3DrgvAECf+KQoACRBoANAEgQ6ACRBoANAEgQ6ACRBoANAEgQ6ACRBoANAEgQ6ACRBoANAEgQ6ACRBoANAEgQ6ACRBoANAEgQ6ACRBoANAEgQ6ACRBoANAEgQ6ACRBoANAEgQ6ACRBoANAEgQ6ACRBoANAEgQ6ACRBoANAEgQ6ACRBoANAEgQ6ACRBoANAEgQ6ACRRKNBt77N90faC7Qfb7L/d9lON/c/anhl0o5Kk+XlpZkYaGal/nZ+/JYcpVdFzXOtazM9Lk5OSvfyYnJSOHu09b7djt847OTmYc2iunZysP5ae19xz877Wutb5l+a0pdHR+tdOfRw9ulwzOlrfXq0i5130eGW9JobhtbgZRUTXh6SKpG9J+iFJY5K+LmlXS81RSZ9ufH9Q0lO95t29e3f0ZW4uYnw8Qlp+jI/Xx7Moeo5rXYu5uYixsZXP7/RonbfbsTvNe9ttazuHdrWreTTP323O1j6OHGlfd+RIsfXu97yLHq+s18QwvBY3MEm16JTXnXa8XSB9QNLZpu2HJD3UUnNW0gca349Kek2Su83bd6BPT7f/Rz49veqF2XCKnuNa16LT8zs9muftduxu867lHPrtt8i59JqzuY9KpX1NpVJsvfs976LHK+s1MQyvxQ2sW6C7vr8z278kaV9E/EZj+1ckvS8ijjXVvNCoudrY/laj5rWWuWYlzUrS9u3bd1++fLn4/5UYGan/s7m5Qemtt4rPs5EVPce1rkWn53fSPG+3Y0ud513LOfTbbzdL8/eas7mPpXNrp9++ipx30eOV9ZoYhtfiBmb7fERU2+1b15uiEXEqIqoRUZ2amurvydu39ze+GRU9x7WuRb9r1lzf7djd5l3LOQzyf+OluXrN2by/Umlf02m86LxrPV5Zr4lheC1uUkUC/ZqkbU3bWxtjbWtsj0p6t6TXB9Hg206ckMbHV46Nj9fHsyh6jmtdixMnpLGxYrWt83Y7dqd5b7ttbefQrnY1mufvNmdrH7Oz7es6jXdT5LyLHq+s18QwvBY3q07XYpYeql8TvyRph5Zviv5oS80ntPKm6NO95u37GnpE/abL9HSEXf+a8SZM0XNc61rMzUVMTKy8BjoxUb/x1mvebsdunXdiYjDn0Fw7MVF/LD2vuefmfa117W7MLl0PXrpu3amPI0eWayqV1d0Q7ee8ix6vrNfEMLwWNyit5Rq6JNl+QNIfq/6Ol8cj4oTtRxsTn7b9Dkmfk3S3pP+SdDAiLnWbs1qtRq1WW8V/ggBgeHW7hj5aZIKIOCPpTMvYw03ff0/Sh9bSJABgbfikKAAkQaADQBIEOgAkQaADQBKF3uVySw5sL0rq46OimlT9VwqAtWjGWixjLZZlXovpiGj7yczSAr1ftmud3qozbFiLZazFMtZi2bCuBZdcACAJAh0AkthMgX6q7AY2ENZiGWuxjLVYNpRrsWmuoQMAuttMP6EDALog0AEgiU0T6LY/Zft528/Z/pLtHyy7p7LYfsz2Nxrr8be231N2T2Wx/SHbF2y/ZXvo3qYm9f4j7sPE9uO2X238FbWhs2kCXdJjEXFXRPyYpC9IerjXExL7R0l3RsRdkr6p+t95HVYvSPoFSV8tu5Ey2K5IOinpfkm7JB2yvavcrkr1WUn7ym6iLJsm0CPif5o275A0tHdzI+JLEXGjsfmM6n9FaihFxEsRcbHsPkp0r6SFiLgUEW9IelLSgZJ7Kk1EfFX1v8kwlAr9PvSNwvYJSb8q6buSfrLkdjaKX5f0VNlNoDRbJF1p2r4q6X0l9YKSbahAt/1Pkn6gza7jEfH3EXFc0nHbD0k6JumRdW1wHfVai0bNcUk3JM2vZ2/rrchaANhggR4RewuWzqv+F5TSBnqvtbD9EUk/J2lPJP8wQR//LoZRkT/ijiGxaa6h297ZtHlA0jfK6qVstvdJ+l1J+yPietn9oFTnJO20vcP2mOp/pP10yT2hJJvmk6K2/0bSD0t6S/Vfu/vxiBjKn0RsL0i6XdLrjaFnIuLjJbZUGts/L+lPJU1J+m9Jz0XEz5Tb1fpq90fcS26pNLafkHSf6r8+99uSHomIvyy1qXW0aQIdANDdprnkAgDojkAHgCQIdABIgkAHgCQIdABIgkAHgCQIdABI4v8BZ+Rcv0qv+zMAAAAASUVORK5CYII=\n", 214 | "text/plain": [ 215 | "
" 216 | ] 217 | }, 218 | "metadata": { 219 | "needs_background": "light" 220 | }, 221 | "output_type": "display_data" 222 | } 223 | ], 224 | "source": [ 225 | "plt.scatter(X0, y0, c='red')\n", 226 | "plt.scatter(X1, y1, c='blue')\n", 227 | "plt.scatter(n_p[0], n_p[1], c=color)" 228 | ] 229 | } 230 | ], 231 | "metadata": { 232 | "kernelspec": { 233 | "display_name": "Python 3", 234 | "language": "python", 235 | "name": "python3" 236 | }, 237 | "language_info": { 238 | "codemirror_mode": { 239 | "name": "ipython", 240 | "version": 3 241 | }, 242 | "file_extension": ".py", 243 | "mimetype": "text/x-python", 244 | "name": "python", 245 | "nbconvert_exporter": "python", 246 | "pygments_lexer": "ipython3", 247 | "version": "3.6.9" 248 | } 249 | }, 250 | "nbformat": 4, 251 | "nbformat_minor": 4 252 | } 253 | -------------------------------------------------------------------------------- /DL/Neural-Network-Input-Vector.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## Neural Networks as vector operations\n", 8 | "\n", 9 | "You can think of a NN (Neural Network), as a function on vectors. You have linear operations like a Linear Layer or a Convolution and you have non linear operations like your activation function.\n", 10 | "\n", 11 | "In this notebook I want to visualize how these vector operations look like on a 2d vector input." 12 | ] 13 | }, 14 | { 15 | "cell_type": "code", 16 | "execution_count": 27, 17 | "metadata": {}, 18 | "outputs": [], 19 | "source": [ 20 | "import numpy as np\n", 21 | "import matplotlib.pyplot as plt\n", 22 | "import torch\n", 23 | "import torch.nn.functional as F" 24 | ] 25 | }, 26 | { 27 | "cell_type": "markdown", 28 | "metadata": {}, 29 | "source": [ 30 | "Set a seed, we want this to be reproducible." 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": 101, 36 | "metadata": {}, 37 | "outputs": [], 38 | "source": [ 39 | "%%capture\n", 40 | "torch.manual_seed(1)" 41 | ] 42 | }, 43 | { 44 | "cell_type": "code", 45 | "execution_count": 102, 46 | "metadata": {}, 47 | "outputs": [], 48 | "source": [ 49 | "origin = [0], [0]" 50 | ] 51 | }, 52 | { 53 | "cell_type": "markdown", 54 | "metadata": {}, 55 | "source": [ 56 | "X is a 2d array and our input. It looks like this." 57 | ] 58 | }, 59 | { 60 | "cell_type": "code", 61 | "execution_count": 103, 62 | "metadata": {}, 63 | "outputs": [], 64 | "source": [ 65 | "x = torch.Tensor([[0.85,0.9]])" 66 | ] 67 | }, 68 | { 69 | "cell_type": "code", 70 | "execution_count": 104, 71 | "metadata": {}, 72 | "outputs": [ 73 | { 74 | "data": { 75 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAD4CAYAAADhNOGaAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAARRUlEQVR4nO3de6ycdZ3H8fdHqjReAhSpILUWA3FTNME4QlBXWbmv0aJiUtbExkvQrHiNlyq7KohR3FVcI7o2qGmICl5AG2+1oCSr2UVOxY1WxNaioUK0WoLBC9j1u3/Mgx6Oc+g5nTlnzuH3fiWTeX6/5zsz3x+n4XOe55k5k6pCktSuB427AUnSeBkEktQ4g0CSGmcQSFLjDAJJatyScTewPx75yEfWqlWrxt2GJC0qW7du/XVVHTZ1flEGwapVq5iYmBh3G5K0qCT5+aB5Tw1JUuMMAklqnEEgSY0zCCSpcQaBJDXOIJCkxhkEktQ4g0CSGmcQSFLjDAJJapxBIEmNMwgkqXEGgSQ1ziCQpMYZBJLUOINAkhpnEEhS4wwCSWqcQSBJjTMIJKlxBoEkNc4gkKTGGQSS1DiDQJIaZxBIUuNGEgRJzkhyc5IdSdYP2H9gkiu7/dcnWTVl/8okdyV54yj6kSTN3NBBkOQA4FLgTGA1cE6S1VPKXgbcUVVHA5cAF0/ZfwnwtWF7kSTN3iiOCI4HdlTVzqq6B7gCWDOlZg2wsdv+PHBykgAkOQvYCWwbQS+SpFkaRRAcCdw6abyrmxtYU1V7gTuBQ5M8DHgLcMG+XiTJuUkmkkzs3r17BG1LkmA0QZABczXDmguAS6rqrn29SFVtqKpeVfUOO+yw/WhTkjTIkhE8xy7gMZPGK4DbpqnZlWQJcBCwBzgBODvJ+4CDgT8n+WNVfXgEfUmSZmAUQXADcEySo4BfAGuBf5pSswlYB/w3cDbwzaoq4O/vLUjyTuAuQ0CS5tfQQVBVe5OcB2wGDgA+UVXbklwITFTVJuDjwOVJdtA/Elg77OtKkkYj/V/MF5der1cTExPjbkOSFpUkW6uqN3XeTxZLUuMMAklqnEEgSY0zCCSpcQaBJDXOIJCkxhkEktQ4g0CSGmcQSFLjDAJJapxBIEmNMwgkqXEGgSQ1ziCQpMYZBJLUOINAkhpnEEhS4wwCSWqcQSBJjTMIJKlxBoEkNc4gkKTGGQSS1DiDQJIaZxBIUuMMAklqnEEgSY0zCCSpcQaBJDXOIJCkxo0kCJKckeTmJDuSrB+w/8AkV3b7r0+yqps/NcnWJD/o7p81in4kSTM3dBAkOQC4FDgTWA2ck2T1lLKXAXdU1dHAJcDF3fyvgedU1ROBdcDlw/YjSZqdURwRHA/sqKqdVXUPcAWwZkrNGmBjt/154OQkqaobq+q2bn4bsDTJgSPoSZI0Q6MIgiOBWyeNd3VzA2uqai9wJ3DolJoXADdW1d0j6EmSNENLRvAcGTBXs6lJciz900WnTfsiybnAuQArV66cfZeSpIFGcUSwC3jMpPEK4LbpapIsAQ4C9nTjFcDVwIur6qfTvUhVbaiqXlX1DjvssBG0LUmC0QTBDcAxSY5K8hBgLbBpSs0m+heDAc4GvllVleRg4CvAW6vqOyPoRZI0S0MHQXfO/zxgM3AT8Nmq2pbkwiTP7co+DhyaZAfwBuDet5ieBxwN/GuS73e35cP2JEmauVRNPZ2/8PV6vZqYmBh3G5K0qCTZWlW9qfN+sliSGmcQSFLjDAJJapxBIC1mi/AanxYeg0BajO65Bz7wAbj++nF3ogcAg0BaTKrgy1+GJz4RPvc5OOGEcXekB4BR/IkJSfPhppvg9a+HzZv7469/HTLor7dIs+MRgbTQ7dkDr31t/yjg3hA48UQ4bdo/zSXNikcE0kK1dy987GPw9rf3w2Cyd77TowGNjEEgLUTXXAOvex1s2/a3+576VDj11PnvSQ9YBoG00Hzta/CKV8Cttw7ef8EFHg1opLxGIC00Z54JP/95//TPVE97Gpx88ry3pAc2g0BaiL7xDXj3u/923qMBzQGDQFpovv1teN7z4E9/ggc/GD760f78058Oz3rWeHvTA5JBIC0kW7fCs58Nf/gDPOhB8OlPwytfCc98pkcDmjNeLJYWih/9CE4/HX772/74ssvg7LP725dcAscdN77e9IBmEEgLwc6dcMop8Jvf9Mf/8R/wkpf8df+TnjSevtQETw1J4/aLX/RD4Pbb++OLLoLXvGa8PakpBoE0Trt39z8cdsst/fGb3gRve9t4e1JzDAJpXO68s39N4Kab+uNXvhIuvtgLwpp3BoE0Dr/7Xf/dQTfe2B+/6EVw6aWGgMbCIJDm2913w/OfD9/5Tn+8Zg188pP9t4tKY+C/PGk+7d0L55zT/+Qw9P9cxBVX9D84Jo2JQSDNlz//GV76Urj66v74xBPhi1+EpUvH25eaZxBI86EKXv1quPzy/vi44+CrX4WHP3y8fUkYBNL8OP98+MhH+tuPf3z/m8YOPni8PUkdg0Caa+95T/8G8NjH9r90Zvny8fYkTWIQSHPp0kv/+gGxww/vh8CKFePtSZrCIJDmysaNcN55/e1ly2DLFjj66PH2JA1gEEhz4aqr+u8QAnjEI+DrX4cnPGG8PUnTMAikUdu8Gdau7b9ddOlS+PKX4SlPGXdX0rRGEgRJzkhyc5IdSdYP2H9gkiu7/dcnWTVp31u7+ZuTnD6KfqSxmfrtYlddBc94xri7ku7X0EGQ5ADgUuBMYDVwTpLVU8peBtxRVUcDlwAXd49dDawFjgXOAD7SPZ+0+Ez9drFPfar/RfTSAjeKI4LjgR1VtbOq7gGuANZMqVkDbOy2Pw+cnCTd/BVVdXdV3QLs6J5PWny2bLnvt4u98IXj7UeaoVF8Q9mRwK2TxruAE6arqaq9Se4EDu3m/2fKY48c9CJJzgXOBVi5cuUI2pZGbP36/pHA0qX3/XYxaYEbRRAM+ru5NcOamTy2P1m1AdgA0Ov1BtZIY/fmN4+7A2nWRnFqaBfwmEnjFcBt09UkWQIcBOyZ4WMlSXNoFEFwA3BMkqOSPIT+xd9NU2o2Aeu67bOBb1ZVdfNru3cVHQUcA3x3BD1JkmZo6FND3Tn/84DNwAHAJ6pqW5ILgYmq2gR8HLg8yQ76RwJru8duS/JZ4EfAXuBVVfV/w/YkSZq59H8xX1x6vV5NTEyMuw1JWlSSbK2q3tR5P1ksSY0zCCSpcQaBJDXOIJCkxhkEktQ4g0CSGmcQSFLjDAJJapxBIEmNMwgkqXEGgSQ1ziCQpMYZBJLUOINAkhpnEEhS4wwCSWqcQSBJjTMIJKlxBoEkNc4gkKTGGQSS1DiDQJIaZxBIUuMMAklqnEEgSY0zCCSpcQaBJDXOIJCkxhkEktQ4g0CSGjdUECRZlmRLku3d/SHT1K3rarYnWdfNPTTJV5L8OMm2JO8dphdJ0v4Z9ohgPXBtVR0DXNuN7yPJMuAdwAnA8cA7JgXGv1fV3wFPAp6W5Mwh+5EkzdKwQbAG2NhtbwTOGlBzOrClqvZU1R3AFuCMqvp9VX0LoKruAb4HrBiyH0nSLA0bBI+qqtsBuvvlA2qOBG6dNN7Vzf1FkoOB59A/qpAkzaMl+ypIcg1w+IBd58/wNTJgriY9/xLgM8CHqmrn/fRxLnAuwMqVK2f40pKkfdlnEFTVKdPtS/LLJEdU1e1JjgB+NaBsF3DSpPEK4LpJ4w3A9qr64D762NDV0uv16v5qJUkzN+ypoU3Aum57HfClATWbgdOSHNJdJD6tmyPJRcBBwOuG7EOStJ+GDYL3Aqcm2Q6c2o1J0ktyGUBV7QHeBdzQ3S6sqj1JVtA/vbQa+F6S7yd5+ZD9SJJmKVWL7yxLr9eriYmJcbchSYtKkq1V1Zs67yeLJalxBoEkNc4gkKTGGQSS1DiDQJIaZxBIUuMMAklqnEEgSY0zCCSpcQaBJDXOIJCkxhkEktQ4g0CSGmcQSFLjDAJJapxBIEmNMwgkqXEGgSQ1ziCQpMYZBJLUOINAkhpnEEhS4wwCSWqcQSBJjTMIJKlxBoEkNc4gkKTGGQSS1DiDQJIaZxBIUuMMAklq3FBBkGRZki1Jtnf3h0xTt66r2Z5k3YD9m5L8cJheJEn7Z9gjgvXAtVV1DHBtN76PJMuAdwAnAMcD75gcGEmeD9w1ZB+SpP00bBCsATZ22xuBswbUnA5sqao9VXUHsAU4AyDJw4E3ABcN2YckaT8NGwSPqqrbAbr75QNqjgRunTTe1c0BvAt4P/D7fb1QknOTTCSZ2L1793BdS5L+Ysm+CpJcAxw+YNf5M3yNDJirJMcBR1fV65Os2teTVNUGYANAr9erGb62JGkf9hkEVXXKdPuS/DLJEVV1e5IjgF8NKNsFnDRpvAK4DjgReHKSn3V9LE9yXVWdhCRp3gx7amgTcO+7gNYBXxpQsxk4Lckh3UXi04DNVfXRqnp0Va0Cng78xBCQpPk3bBC8Fzg1yXbg1G5Mkl6SywCqag/9awE3dLcLuzlJ0gKQqsV3ur3X69XExMS425CkRSXJ1qrqTZ33k8WS1DiDQJIaZxBIUuMMAklqnEEgSY0zCCSpcQaBJDXOIJCkxhkEktQ4g0CSGmcQSFLjDAJJapxBIEmNMwgkqXEGgSQ1ziCQpMYZBJLUOINAkhpnEEhS4wwCSWqcQSBJjTMIJKlxBoEkNc4gkKTGparG3cOsJdkN/HzcfczSI4Ffj7uJeeaa2+CaF4/HVtVhUycXZRAsRkkmqqo37j7mk2tug2te/Dw1JEmNMwgkqXEGwfzZMO4GxsA1t8E1L3JeI5CkxnlEIEmNMwgkqXEGwQglWZZkS5Lt3f0h09St62q2J1k3YP+mJD+c+46HN8yakzw0yVeS/DjJtiTvnd/uZyfJGUluTrIjyfoB+w9McmW3//okqybte2s3f3OS0+ez72Hs75qTnJpka5IfdPfPmu/e98cwP+Nu/8okdyV543z1PBJV5W1EN+B9wPpuez1w8YCaZcDO7v6QbvuQSfufD3wa+OG41zPXawYeCvxDV/MQ4L+AM8e9pmnWeQDwU+BxXa//C6yeUvPPwH9222uBK7vt1V39gcBR3fMcMO41zfGanwQ8utt+AvCLca9nLtc7af8XgM8Bbxz3emZz84hgtNYAG7vtjcBZA2pOB7ZU1Z6qugPYApwBkOThwBuAi+ah11HZ7zVX1e+r6lsAVXUP8D1gxTz0vD+OB3ZU1c6u1yvor32yyf8tPg+cnCTd/BVVdXdV3QLs6J5vodvvNVfVjVV1Wze/DVia5MB56Xr/DfMzJslZ9H/J2TZP/Y6MQTBaj6qq2wG6++UDao4Ebp003tXNAbwLeD/w+7lscsSGXTMASQ4GngNcO0d9Dmufa5hcU1V7gTuBQ2f42IVomDVP9gLgxqq6e476HJX9Xm+ShwFvAS6Yhz5Hbsm4G1hsklwDHD5g1/kzfYoBc5XkOODoqnr91POO4zZXa570/EuAzwAfqqqds+9wXtzvGvZRM5PHLkTDrLm/MzkWuBg4bYR9zZVh1nsBcElV3dUdICwqBsEsVdUp0+1L8sskR1TV7UmOAH41oGwXcNKk8QrgOuBE4MlJfkb/57I8yXVVdRJjNodrvtcGYHtVfXAE7c6VXcBjJo1XALdNU7OrC7eDgD0zfOxCNMyaSbICuBp4cVX9dO7bHdow6z0BODvJ+4CDgT8n+WNVfXju2x6BcV+keCDdgH/jvhdO3zegZhlwC/2LpYd028um1Kxi8VwsHmrN9K+HfAF40LjXso91LqF//vco/noh8dgpNa/ivhcSP9ttH8t9LxbvZHFcLB5mzQd39S8Y9zrmY71Tat7JIrtYPPYGHkg3+udGrwW2d/f3/s+uB1w2qe6l9C8Y7gBeMuB5FlMQ7Pea6f/GVcBNwPe728vHvab7Wes/Aj+h/86S87u5C4HndttL6b9jZAfwXeBxkx57fve4m1mg74wa5ZqBfwF+N+nn+n1g+bjXM5c/40nPseiCwD8xIUmN811DktQ4g0CSGmcQSFLjDAJJapxBIEmNMwgkqXEGgSQ17v8BpfyprdyIsmwAAAAASUVORK5CYII=\n", 76 | "text/plain": [ 77 | "
" 78 | ] 79 | }, 80 | "metadata": { 81 | "needs_background": "light" 82 | }, 83 | "output_type": "display_data" 84 | } 85 | ], 86 | "source": [ 87 | "plt.quiver(*origin, x[:,0], x[:,1], color=['r','b','g'], scale=10)\n", 88 | "plt.show()" 89 | ] 90 | }, 91 | { 92 | "cell_type": "markdown", 93 | "metadata": {}, 94 | "source": [ 95 | "## Linear Layer\n", 96 | "\n", 97 | "After applying the linear layer our output looks like this:" 98 | ] 99 | }, 100 | { 101 | "cell_type": "code", 102 | "execution_count": 105, 103 | "metadata": {}, 104 | "outputs": [ 105 | { 106 | "data": { 107 | "text/plain": [ 108 | "tensor([[0.8024, 0.6116]])" 109 | ] 110 | }, 111 | "execution_count": 105, 112 | "metadata": {}, 113 | "output_type": "execute_result" 114 | } 115 | ], 116 | "source": [ 117 | "w = torch.randn(2,2) \n", 118 | "x_l = F.linear(x,w)\n", 119 | "x_l" 120 | ] 121 | }, 122 | { 123 | "cell_type": "code", 124 | "execution_count": 106, 125 | "metadata": {}, 126 | "outputs": [ 127 | { 128 | "data": { 129 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAD4CAYAAADhNOGaAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAASoklEQVR4nO3de4yddZ3H8feXNpYISculVWjptmtRU1wVOFJwvbBAC2wCRUEpRBkVbKIQXY1KDbuLXEwA10AQ46YiWkmg3DQ2iiHlJqvhNgVWrAIdC6QjDRdbkUu4dPvdP54HO4xnmJmeM3Pm9Pd+JSfn+f2e3znn++s085nn9zznnMhMJEnl2qnTBUiSOssgkKTCGQSSVDiDQJIKZxBIUuEmd7qA7bHnnnvmnDlzOl2GJHWVNWvWPJOZ0wf3d2UQzJkzh97e3k6XIUldJSIeb9bv0pAkFc4gkKTCGQSSVDiDQJIKZxBIUuEMAkkqnEEgSYUzCCSpcAaBJBXOIJCkwhkEklQ4g0CSCmcQSFLhDAJJKpxBIEmFMwgkqXAGgSQVziCQpMIZBJJUOINAkgpnEEhS4QwCSSqcQSBJhTMIJKlwBoEkFa4tQRARR0XEwxHRFxHLmuyfEhHX1Pvvjog5g/bPjojnI+Ir7ahHkjRyLQdBREwCvgscDcwHToqI+YOGnQpszsx5wMXAhYP2Xwz8stVaJEmj144jgoOAvsxcn5mvACuBxYPGLAZW1NvXA4dHRABExHHAemBtG2qRJI1SO4JgJrBhQLu/7ms6JjO3AM8Ce0TELsCZwDnDvUhELI2I3ojoffrpp9tQtiQJ2hME0aQvRzjmHODizHx+uBfJzOWZ2cjMxvTp07ejTElSM5Pb8Bz9wD4D2rOAJ4YY0x8Rk4GpwCZgAXBCRFwETAO2RsRLmXlZG+qSJI1AO4LgXmDfiJgL/AlYApw8aMwqoAe4EzgBuDUzE/jgawMi4hvA84aAJI2vloMgM7dExBnATcAk4IrMXBsR5wK9mbkK+AFwZUT0UR0JLGn1dSVJ7RHVH+bdpdFoZG9vb6fLkKSuEhFrMrMxuN93FktS4QwCSSqcQSBJhTMIJKlwBoEkFc4gkKTCGQSSVDiDQJIKZxBIUuEMAkkqnEEgSYUzCCSpcAaBJBXOIJCkwhkEklQ4g0CSCmcQSFLhDAJJKpxBIEmFMwgkqXAGgSQVziCQpMIZBJJUOINAkgpnEEhS4QwCSSqcQSBJhTMIJKlwBoEkFc4gkKTCtSUIIuKoiHg4IvoiYlmT/VMi4pp6/90RMafuXxgRayLiwfr+sHbUI0kauZaDICImAd8FjgbmAydFxPxBw04FNmfmPOBi4MK6/xngmMz8J6AHuLLVeiRJo9OOI4KDgL7MXJ+ZrwArgcWDxiwGVtTb1wOHR0Rk5v2Z+UTdvxbYOSKmtKEmSa167jn44Q/hJz/pdCUaY5Pb8BwzgQ0D2v3AgqHGZOaWiHgW2IPqiOA1xwP3Z+bLbahJ0vbYuhXuuAN+9CO4/np4+9vhzjs7XZXGWDuCIJr05WjGRMR+VMtFi4Z8kYilwFKA2bNnj75KSUN79FH48Y9hxYpqG2CXXWDlSpjiQfqOrh1B0A/sM6A9C3hiiDH9ETEZmApsAoiIWcBPgVMy849DvUhmLgeWAzQajcFBI2m0XngBbrih+uv/ttv+fv9ll1VHBNrhtSMI7gX2jYi5wJ+AJcDJg8asojoZfCdwAnBrZmZETAN+AXw9M3/ThlokDeepp2DZMrjuOnj++eZjTjoJenrGty51TMsnizNzC3AGcBPwB+DazFwbEedGxLH1sB8Ae0REH/Bl4LVLTM8A5gH/EREP1LcZrdYk6Q3MmAGf+xwcNsTV2nPnwve+B9FsRVc7onYcEZCZNwI3Dur7zwHbLwEfa/K484Hz21GDpFF497th5sy/7580Ca66CqZOHf+a1DFtCQJJXeTRR+FjH4M1a/5+33nnwcEHj39N6ig/YkIqyapVcMAB20Kg0YBLL622DzsMvva1ztWmjjEIpBK8+mr1S37xYvjLX6q+00+HX/8ajjsO9tgDrryyWhpScVwaknZ0TzwBJ55Y/dIH2HVXuPzyqg9g1qzqMtK99+5cjeoog0Dakd1yC5x8cnXJKMC73lW9Y/gd79g2JgI+/OHO1KcJwaUhaUe0dWt14nfhwm0h0NMDd9/9+hCQ8IhA2vE88wx84hNw001Ve+edq3cJf+YzvjdATRkE0o7kzjvh4x+H/v6qPW9etRT0nvd0ti5NaC4NSTuCTLjkEvjQh7aFwPHHQ2+vIaBheUQgdbtnn4VTT62u/AGYPBm+9S344hddCtKIGARSN3vggepdwn19VXvWLLj2WjjkkM7Wpa7i0pDUjTKr9wIcfPC2EDjySLj/fkNAo2YQSN3mhRfgU5+Cz34WXn4ZdtqpulT0xhthzz07XZ26kEtDUjd56CE44QRYu7Zqz5gBV1899EdKSyPgEYHULVauhPe9b1sIfPCD1VKQIaAWGQTSRPfyy3DGGdW3hr32jWJnngm33urnA6ktXBqSJrLHHqveIHbvvVV72rTqS+aPOaajZWnHYhBIE9XPfw6nnAKbN1ftAw+svmd47tzO1qUdjktD0kSzZUv15fLHHLMtBD7/efjNbwwBjQmPCKSJZONGWLIE7rijau+yC3z/+9X5AWmMGATSRHHbbdUv/CefrNr77Vd9YNw739nZurTDc2lI6rStW+Gb34QjjtgWAp/8ZPXdAYaAxoFHBFIn/fnP1S/9X/6yak+ZAt/5Dpx2mh8Yp3FjEEidctdd1aWhGzZU7be9rboqaP/9O1uXiuPSkDTeMuHSS6vvDngtBD7yEVizxhBQRxgE0nj661/hxBOr7wp49dXquwO+/e3quwSmTu10dSqUS0PSePntb6sPjFu3rmrPnFl9d8D739/ZulQ8jwik8XDFFbBgwbYQWLSo+sA4Q0ATgEEgjaUXX4RPf7r6KsmXXqquBDrnnOq7A6ZP73R1EuDSkDR2HnmkWgp68MGqPX06XHVV9X4BaQLxiEAaC9ddB43GthD4wAeqpSBDQBNQW4IgIo6KiIcjoi8iljXZPyUirqn33x0Rcwbs+3rd/3BEHNmOeqSOeeUV+MIXqvcHPPdc1ffVr1bfHTBzZmdrk4bQ8tJQREwCvgssBPqBeyNiVWb+fsCwU4HNmTkvIpYAFwInRsR8YAmwH7A3cHNEvD0z/6/VuqRx9/jjVQDcc0/VnjYNVqyAY4/tbF3SMNpxRHAQ0JeZ6zPzFWAlsHjQmMXAinr7euDwiIi6f2VmvpyZjwJ99fNJ3eeuu7aFwAEHwH33GQLqCu04WTwT2DCg3Q8sGGpMZm6JiGeBPer+uwY9tunxc0QsBZYCzJ49uw1lS2124onwq19V7xy++GLYeedOVySNSDuCoNknY+UIx4zksVVn5nJgOUCj0Wg6Ruq4yy6DnbwGQ92lHf9j+4F9BrRnAU8MNSYiJgNTgU0jfKzUPQwBdaF2/K+9F9g3IuZGxJuoTv6uGjRmFdBTb58A3JqZWfcvqa8qmgvsC9zThpokSSPU8tJQveZ/BnATMAm4IjPXRsS5QG9mrgJ+AFwZEX1URwJL6seujYhrgd8DW4DTvWJIksZXVH+Yd5dGo5G9vb2dLkOSukpErMnMxuB+FzQlqXAGgSQVziCQpMIZBJJUOINAkgpnEEhS4QwCSSqcQSBJhTMIJKlwBoEkFc4gkKTCGQSSVDiDQJIKZxBIUuEMAkkqnEEgSYUzCCSpcAaBJBXOIJCkwhkEklQ4g0CSCmcQSFLhDAJJKpxBIEmFMwgkqXAGgSQVziCQpMIZBJJUOINAkgpnEEhS4VoKgojYPSJWR8S6+n63Icb11GPWRURP3ffmiPhFRDwUEWsj4oJWapEkbZ9WjwiWAbdk5r7ALXX7dSJid+BsYAFwEHD2gMD4r8x8J7A/8M8RcXSL9UiSRqnVIFgMrKi3VwDHNRlzJLA6Mzdl5mZgNXBUZr6YmbcBZOYrwH3ArBbrkSSNUqtB8JbM3AhQ389oMmYmsGFAu7/u+5uImAYcQ3VUIUkaR5OHGxARNwNvbbLrrBG+RjTpywHPPxm4Grg0M9e/QR1LgaUAs2fPHuFLS5KGM2wQZOYRQ+2LiCcjYq/M3BgRewFPNRnWDxw6oD0LuH1AezmwLjMvGaaO5fVYGo1GvtFYSdLItbo0tAroqbd7gJ81GXMTsCgidqtPEi+q+4iI84GpwL+1WIckaTu1GgQXAAsjYh2wsG4TEY2IuBwgMzcB5wH31rdzM3NTRMyiWl6aD9wXEQ9ExGkt1iNJGqXI7L5Vlkajkb29vZ0uQ5K6SkSsyczG4H7fWSxJhTMIJKlwBoEkFc4gkKTCGQSSVDiDQJIKZxBIUuEMAkkqnEEgSYUzCCSpcAaBJBXOIJCkwhkEklQ4g0CSCmcQSFLhDAJJKpxBIEmFMwgkqXAGgSQVziCQpMIZBJJUOINAkgpnEEhS4QwCSSqcQSBJhTMIJKlwBoEkFc4gkKTCGQSSVDiDQJIKZxBIUuFaCoKI2D0iVkfEuvp+tyHG9dRj1kVET5P9qyLid63UIknaPq0eESwDbsnMfYFb6vbrRMTuwNnAAuAg4OyBgRERHwWeb7EOSdJ2ajUIFgMr6u0VwHFNxhwJrM7MTZm5GVgNHAUQEbsCXwbOb7EOSdJ2ajUI3pKZGwHq+xlNxswENgxo99d9AOcB3wZeHO6FImJpRPRGRO/TTz/dWtWSpL+ZPNyAiLgZeGuTXWeN8DWiSV9GxHuBeZn5pYiYM9yTZOZyYDlAo9HIEb62JGkYwwZBZh4x1L6IeDIi9srMjRGxF/BUk2H9wKED2rOA24FDgAMj4rG6jhkRcXtmHookady0ujS0CnjtKqAe4GdNxtwELIqI3eqTxIuAmzLze5m5d2bOAT4APGIISNL4azUILgAWRsQ6YGHdJiIaEXE5QGZuojoXcG99O7fukyRNAJHZfcvtjUYje3t7O12GJHWViFiTmY3B/b6zWJIKZxBIUuEMAkkqnEEgSYUzCCSpcAaBJBXOIJCkwhkEklQ4g0CSCmcQSFLhDAJJKpxBIEmFMwgkqXAGgSQVziCQpMIZBJJUOINAkgpnEEhS4QwCSSqcQSBJhTMIJKlwBoEkFc4gkKTCGQSSVLjIzE7XMGoR8TTweKfrGKU9gWc6XcQ4c85lcM7d4x8yc/rgzq4Mgm4UEb2Z2eh0HePJOZfBOXc/l4YkqXAGgSQVziAYP8s7XUAHOOcyOOcu5zkCSSqcRwSSVDiDQJIKZxC0UUTsHhGrI2Jdfb/bEON66jHrIqKnyf5VEfG7sa+4da3MOSLeHBG/iIiHImJtRFwwvtWPTkQcFREPR0RfRCxrsn9KRFxT7787IuYM2Pf1uv/hiDhyPOtuxfbOOSIWRsSaiHiwvj9svGvfHq38jOv9syPi+Yj4ynjV3BaZ6a1NN+AiYFm9vQy4sMmY3YH19f1u9fZuA/Z/FLgK+F2n5zPWcwbeDPxLPeZNwP8AR3d6TkPMcxLwR+Af61r/F5g/aMzngf+ut5cA19Tb8+vxU4C59fNM6vScxnjO+wN719vvAv7U6fmM5XwH7L8BuA74SqfnM5qbRwTttRhYUW+vAI5rMuZIYHVmbsrMzcBq4CiAiNgV+DJw/jjU2i7bPefMfDEzbwPIzFeA+4BZ41Dz9jgI6MvM9XWtK6nmPtDAf4vrgcMjIur+lZn5cmY+CvTVzzfRbfecM/P+zHyi7l8L7BwRU8al6u3Xys+YiDiO6o+cteNUb9sYBO31lszcCFDfz2gyZiawYUC7v+4DOA/4NvDiWBbZZq3OGYCImAYcA9wyRnW2atg5DByTmVuAZ4E9RvjYiaiVOQ90PHB/Zr48RnW2y3bPNyJ2Ac4EzhmHOttucqcL6DYRcTPw1ia7zhrpUzTpy4h4LzAvM780eN2x08ZqzgOefzJwNXBpZq4ffYXj4g3nMMyYkTx2ImplztXOiP2AC4FFbaxrrLQy33OAizPz+foAoasYBKOUmUcMtS8inoyIvTJzY0TsBTzVZFg/cOiA9izgduAQ4MCIeIzq5zIjIm7PzEPpsDGc82uWA+sy85I2lDtW+oF9BrRnAU8MMaa/DrepwKYRPnYiamXORMQs4KfAKZn5x7Evt2WtzHcBcEJEXARMA7ZGxEuZednYl90GnT5JsSPdgG/x+hOnFzUZszvwKNXJ0t3q7d0HjZlD95wsbmnOVOdDbgB26vRchpnnZKr137lsO5G436Axp/P6E4nX1tv78fqTxevpjpPFrcx5Wj3++E7PYzzmO2jMN+iyk8UdL2BHulGtjd4CrKvvX/tl1wAuHzDuM1QnDPuATzd5nm4Kgu2eM9VfXAn8AXigvp3W6Tm9wVz/FXiE6sqSs+q+c4Fj6+2dqa4Y6QPuAf5xwGPPqh/3MBP0yqh2zhn4d+CFAT/XB4AZnZ7PWP6MBzxH1wWBHzEhSYXzqiFJKpxBIEmFMwgkqXAGgSQVziCQpMIZBJJUOINAkgr3/4oHI7hJoOl5AAAAAElFTkSuQmCC\n", 130 | "text/plain": [ 131 | "
" 132 | ] 133 | }, 134 | "metadata": { 135 | "needs_background": "light" 136 | }, 137 | "output_type": "display_data" 138 | } 139 | ], 140 | "source": [ 141 | "plt.quiver(*origin, x_l[:,0], x_l[:,1], color=['r','b','g'], scale=5)\n", 142 | "plt.show()" 143 | ] 144 | }, 145 | { 146 | "cell_type": "markdown", 147 | "metadata": {}, 148 | "source": [ 149 | "## ReLU Activation Function\n", 150 | "\n", 151 | "Ok, we can predict what will happen. Because ReLU is this max(0,x). Our output will be [x1,x2]. This is because the ouput of the linear layer was: \n", 152 | "\n", 153 | "- x1 > 0 --> x1\n", 154 | "- x2 > 0 --> x2" 155 | ] 156 | }, 157 | { 158 | "cell_type": "code", 159 | "execution_count": 108, 160 | "metadata": {}, 161 | "outputs": [ 162 | { 163 | "data": { 164 | "text/plain": [ 165 | "tensor([[0.8024, 0.6116]])" 166 | ] 167 | }, 168 | "execution_count": 108, 169 | "metadata": {}, 170 | "output_type": "execute_result" 171 | } 172 | ], 173 | "source": [ 174 | "x_relu = F.relu(x_l)\n", 175 | "x_relu" 176 | ] 177 | }, 178 | { 179 | "cell_type": "code", 180 | "execution_count": 109, 181 | "metadata": {}, 182 | "outputs": [ 183 | { 184 | "data": { 185 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAYIAAAD4CAYAAADhNOGaAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADh0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uMy4xLjEsIGh0dHA6Ly9tYXRwbG90bGliLm9yZy8QZhcZAAASoklEQVR4nO3de4yddZ3H8feXNpYISculVWjptmtRU1wVOFJwvbBAC2wCRUEpRBkVbKIQXY1KDbuLXEwA10AQ46YiWkmg3DQ2iiHlJqvhNgVWrAIdC6QjDRdbkUu4dPvdP54HO4xnmJmeM3Pm9Pd+JSfn+f2e3znn++s085nn9zznnMhMJEnl2qnTBUiSOssgkKTCGQSSVDiDQJIKZxBIUuEmd7qA7bHnnnvmnDlzOl2GJHWVNWvWPJOZ0wf3d2UQzJkzh97e3k6XIUldJSIeb9bv0pAkFc4gkKTCGQSSVDiDQJIKZxBIUuEMAkkqnEEgSYUzCCSpcAaBJBXOIJCkwhkEklQ4g0CSCmcQSFLhDAJJKpxBIEmFMwgkqXAGgSQVziCQpMIZBJJUOINAkgpnEEhS4QwCSSqcQSBJhTMIJKlwBoEkFa4tQRARR0XEwxHRFxHLmuyfEhHX1Pvvjog5g/bPjojnI+Ir7ahHkjRyLQdBREwCvgscDcwHToqI+YOGnQpszsx5wMXAhYP2Xwz8stVaJEmj144jgoOAvsxcn5mvACuBxYPGLAZW1NvXA4dHRABExHHAemBtG2qRJI1SO4JgJrBhQLu/7ms6JjO3AM8Ce0TELsCZwDnDvUhELI2I3ojoffrpp9tQtiQJ2hME0aQvRzjmHODizHx+uBfJzOWZ2cjMxvTp07ejTElSM5Pb8Bz9wD4D2rOAJ4YY0x8Rk4GpwCZgAXBCRFwETAO2RsRLmXlZG+qSJI1AO4LgXmDfiJgL/AlYApw8aMwqoAe4EzgBuDUzE/jgawMi4hvA84aAJI2vloMgM7dExBnATcAk4IrMXBsR5wK9mbkK+AFwZUT0UR0JLGn1dSVJ7RHVH+bdpdFoZG9vb6fLkKSuEhFrMrMxuN93FktS4QwCSSqcQSBJhTMIJKlwBoEkFc4gkKTCGQSSVDiDQJIKZxBIUuEMAkkqnEEgSYUzCCSpcAaBJBXOIJCkwhkEklQ4g0CSCmcQSFLhDAJJKpxBIEmFMwgkqXAGgSQVziCQpMIZBJJUOINAkgpnEEhS4QwCSSqcQSBJhTMIJKlwBoEkFc4gkKTCtSUIIuKoiHg4IvoiYlmT/VMi4pp6/90RMafuXxgRayLiwfr+sHbUI0kauZaDICImAd8FjgbmAydFxPxBw04FNmfmPOBi4MK6/xngmMz8J6AHuLLVeiRJo9OOI4KDgL7MXJ+ZrwArgcWDxiwGVtTb1wOHR0Rk5v2Z+UTdvxbYOSKmtKEmSa167jn44Q/hJz/pdCUaY5Pb8BwzgQ0D2v3AgqHGZOaWiHgW2IPqiOA1xwP3Z+bLbahJ0vbYuhXuuAN+9CO4/np4+9vhzjs7XZXGWDuCIJr05WjGRMR+VMtFi4Z8kYilwFKA2bNnj75KSUN79FH48Y9hxYpqG2CXXWDlSpjiQfqOrh1B0A/sM6A9C3hiiDH9ETEZmApsAoiIWcBPgVMy849DvUhmLgeWAzQajcFBI2m0XngBbrih+uv/ttv+fv9ll1VHBNrhtSMI7gX2jYi5wJ+AJcDJg8asojoZfCdwAnBrZmZETAN+AXw9M3/ThlokDeepp2DZMrjuOnj++eZjTjoJenrGty51TMsnizNzC3AGcBPwB+DazFwbEedGxLH1sB8Ae0REH/Bl4LVLTM8A5gH/EREP1LcZrdYk6Q3MmAGf+xwcNsTV2nPnwve+B9FsRVc7onYcEZCZNwI3Dur7zwHbLwEfa/K484Hz21GDpFF497th5sy/7580Ca66CqZOHf+a1DFtCQJJXeTRR+FjH4M1a/5+33nnwcEHj39N6ig/YkIqyapVcMAB20Kg0YBLL622DzsMvva1ztWmjjEIpBK8+mr1S37xYvjLX6q+00+HX/8ajjsO9tgDrryyWhpScVwaknZ0TzwBJ55Y/dIH2HVXuPzyqg9g1qzqMtK99+5cjeoog0Dakd1yC5x8cnXJKMC73lW9Y/gd79g2JgI+/OHO1KcJwaUhaUe0dWt14nfhwm0h0NMDd9/9+hCQ8IhA2vE88wx84hNw001Ve+edq3cJf+YzvjdATRkE0o7kzjvh4x+H/v6qPW9etRT0nvd0ti5NaC4NSTuCTLjkEvjQh7aFwPHHQ2+vIaBheUQgdbtnn4VTT62u/AGYPBm+9S344hddCtKIGARSN3vggepdwn19VXvWLLj2WjjkkM7Wpa7i0pDUjTKr9wIcfPC2EDjySLj/fkNAo2YQSN3mhRfgU5+Cz34WXn4ZdtqpulT0xhthzz07XZ26kEtDUjd56CE44QRYu7Zqz5gBV1899EdKSyPgEYHULVauhPe9b1sIfPCD1VKQIaAWGQTSRPfyy3DGGdW3hr32jWJnngm33urnA6ktXBqSJrLHHqveIHbvvVV72rTqS+aPOaajZWnHYhBIE9XPfw6nnAKbN1ftAw+svmd47tzO1qUdjktD0kSzZUv15fLHHLMtBD7/efjNbwwBjQmPCKSJZONGWLIE7rijau+yC3z/+9X5AWmMGATSRHHbbdUv/CefrNr77Vd9YNw739nZurTDc2lI6rStW+Gb34QjjtgWAp/8ZPXdAYaAxoFHBFIn/fnP1S/9X/6yak+ZAt/5Dpx2mh8Yp3FjEEidctdd1aWhGzZU7be9rboqaP/9O1uXiuPSkDTeMuHSS6vvDngtBD7yEVizxhBQRxgE0nj661/hxBOr7wp49dXquwO+/e3quwSmTu10dSqUS0PSePntb6sPjFu3rmrPnFl9d8D739/ZulQ8jwik8XDFFbBgwbYQWLSo+sA4Q0ATgEEgjaUXX4RPf7r6KsmXXqquBDrnnOq7A6ZP73R1EuDSkDR2HnmkWgp68MGqPX06XHVV9X4BaQLxiEAaC9ddB43GthD4wAeqpSBDQBNQW4IgIo6KiIcjoi8iljXZPyUirqn33x0Rcwbs+3rd/3BEHNmOeqSOeeUV+MIXqvcHPPdc1ffVr1bfHTBzZmdrk4bQ8tJQREwCvgssBPqBeyNiVWb+fsCwU4HNmTkvIpYAFwInRsR8YAmwH7A3cHNEvD0z/6/VuqRx9/jjVQDcc0/VnjYNVqyAY4/tbF3SMNpxRHAQ0JeZ6zPzFWAlsHjQmMXAinr7euDwiIi6f2VmvpyZjwJ99fNJ3eeuu7aFwAEHwH33GQLqCu04WTwT2DCg3Q8sGGpMZm6JiGeBPer+uwY9tunxc0QsBZYCzJ49uw1lS2124onwq19V7xy++GLYeedOVySNSDuCoNknY+UIx4zksVVn5nJgOUCj0Wg6Ruq4yy6DnbwGQ92lHf9j+4F9BrRnAU8MNSYiJgNTgU0jfKzUPQwBdaF2/K+9F9g3IuZGxJuoTv6uGjRmFdBTb58A3JqZWfcvqa8qmgvsC9zThpokSSPU8tJQveZ/BnATMAm4IjPXRsS5QG9mrgJ+AFwZEX1URwJL6seujYhrgd8DW4DTvWJIksZXVH+Yd5dGo5G9vb2dLkOSukpErMnMxuB+FzQlqXAGgSQVziCQpMIZBJJUOINAkgpnEEhS4QwCSSqcQSBJhTMIJKlwBoEkFc4gkKTCGQSSVDiDQJIKZxBIUuEMAkkqnEEgSYUzCCSpcAaBJBXOIJCkwhkEklQ4g0CSCmcQSFLhDAJJKpxBIEmFMwgkqXAGgSQVziCQpMIZBJJUOINAkgpnEEhS4VoKgojYPSJWR8S6+n63Icb11GPWRURP3ffmiPhFRDwUEWsj4oJWapEkbZ9WjwiWAbdk5r7ALXX7dSJid+BsYAFwEHD2gMD4r8x8J7A/8M8RcXSL9UiSRqnVIFgMrKi3VwDHNRlzJLA6Mzdl5mZgNXBUZr6YmbcBZOYrwH3ArBbrkSSNUqtB8JbM3AhQ389oMmYmsGFAu7/u+5uImAYcQ3VUIUkaR5OHGxARNwNvbbLrrBG+RjTpywHPPxm4Grg0M9e/QR1LgaUAs2fPHuFLS5KGM2wQZOYRQ+2LiCcjYq/M3BgRewFPNRnWDxw6oD0LuH1AezmwLjMvGaaO5fVYGo1GvtFYSdLItbo0tAroqbd7gJ81GXMTsCgidqtPEi+q+4iI84GpwL+1WIckaTu1GgQXAAsjYh2wsG4TEY2IuBwgMzcB5wH31rdzM3NTRMyiWl6aD9wXEQ9ExGkt1iNJGqXI7L5Vlkajkb29vZ0uQ5K6SkSsyczG4H7fWSxJhTMIJKlwBoEkFc4gkKTCGQSSVDiDQJIKZxBIUuEMAkkqnEEgSYUzCCSpcAaBJBXOIJCkwhkEklQ4g0CSCmcQSFLhDAJJKpxBIEmFMwgkqXAGgSQVziCQpMIZBJJUOINAkgpnEEhS4QwCSSqcQSBJhTMIJKlwBoEkFc4gkKTCGQSSVDiDQJIKZxBIUuFaCoKI2D0iVkfEuvp+tyHG9dRj1kVET5P9qyLid63UIknaPq0eESwDbsnMfYFb6vbrRMTuwNnAAuAg4OyBgRERHwWeb7EOSdJ2ajUIFgMr6u0VwHFNxhwJrM7MTZm5GVgNHAUQEbsCXwbOb7EOSdJ2ajUI3pKZGwHq+xlNxswENgxo99d9AOcB3wZeHO6FImJpRPRGRO/TTz/dWtWSpL+ZPNyAiLgZeGuTXWeN8DWiSV9GxHuBeZn5pYiYM9yTZOZyYDlAo9HIEb62JGkYwwZBZh4x1L6IeDIi9srMjRGxF/BUk2H9wKED2rOA24FDgAMj4rG6jhkRcXtmHookady0ujS0CnjtKqAe4GdNxtwELIqI3eqTxIuAmzLze5m5d2bOAT4APGIISNL4azUILgAWRsQ6YGHdJiIaEXE5QGZuojoXcG99O7fukyRNAJHZfcvtjUYje3t7O12GJHWViFiTmY3B/b6zWJIKZxBIUuEMAkkqnEEgSYUzCCSpcAaBJBXOIJCkwhkEklQ4g0CSCmcQSFLhDAJJKpxBIEmFMwgkqXAGgSQVziCQpMIZBJJUOINAkgpnEEhS4QwCSSqcQSBJhTMIJKlwBoEkFc4gkKTCGQSSVLjIzE7XMGoR8TTweKfrGKU9gWc6XcQ4c85lcM7d4x8yc/rgzq4Mgm4UEb2Z2eh0HePJOZfBOXc/l4YkqXAGgSQVziAYP8s7XUAHOOcyOOcu5zkCSSqcRwSSVDiDQJIKZxC0UUTsHhGrI2Jdfb/bEON66jHrIqKnyf5VEfG7sa+4da3MOSLeHBG/iIiHImJtRFwwvtWPTkQcFREPR0RfRCxrsn9KRFxT7787IuYM2Pf1uv/hiDhyPOtuxfbOOSIWRsSaiHiwvj9svGvfHq38jOv9syPi+Yj4ynjV3BaZ6a1NN+AiYFm9vQy4sMmY3YH19f1u9fZuA/Z/FLgK+F2n5zPWcwbeDPxLPeZNwP8AR3d6TkPMcxLwR+Af61r/F5g/aMzngf+ut5cA19Tb8+vxU4C59fNM6vScxnjO+wN719vvAv7U6fmM5XwH7L8BuA74SqfnM5qbRwTttRhYUW+vAI5rMuZIYHVmbsrMzcBq4CiAiNgV+DJw/jjU2i7bPefMfDEzbwPIzFeA+4BZ41Dz9jgI6MvM9XWtK6nmPtDAf4vrgcMjIur+lZn5cmY+CvTVzzfRbfecM/P+zHyi7l8L7BwRU8al6u3Xys+YiDiO6o+cteNUb9sYBO31lszcCFDfz2gyZiawYUC7v+4DOA/4NvDiWBbZZq3OGYCImAYcA9wyRnW2atg5DByTmVuAZ4E9RvjYiaiVOQ90PHB/Zr48RnW2y3bPNyJ2Ac4EzhmHOttucqcL6DYRcTPw1ia7zhrpUzTpy4h4LzAvM780eN2x08ZqzgOefzJwNXBpZq4ffYXj4g3nMMyYkTx2ImplztXOiP2AC4FFbaxrrLQy33OAizPz+foAoasYBKOUmUcMtS8inoyIvTJzY0TsBTzVZFg/cOiA9izgduAQ4MCIeIzq5zIjIm7PzEPpsDGc82uWA+sy85I2lDtW+oF9BrRnAU8MMaa/DrepwKYRPnYiamXORMQs4KfAKZn5x7Evt2WtzHcBcEJEXARMA7ZGxEuZednYl90GnT5JsSPdgG/x+hOnFzUZszvwKNXJ0t3q7d0HjZlD95wsbmnOVOdDbgB26vRchpnnZKr137lsO5G436Axp/P6E4nX1tv78fqTxevpjpPFrcx5Wj3++E7PYzzmO2jMN+iyk8UdL2BHulGtjd4CrKvvX/tl1wAuHzDuM1QnDPuATzd5nm4Kgu2eM9VfXAn8AXigvp3W6Tm9wVz/FXiE6sqSs+q+c4Fj6+2dqa4Y6QPuAf5xwGPPqh/3MBP0yqh2zhn4d+CFAT/XB4AZnZ7PWP6MBzxH1wWBHzEhSYXzqiFJKpxBIEmFMwgkqXAGgSQVziCQpMIZBJJUOINAkgr3/4oHI7hJoOl5AAAAAElFTkSuQmCC\n", 186 | "text/plain": [ 187 | "
" 188 | ] 189 | }, 190 | "metadata": { 191 | "needs_background": "light" 192 | }, 193 | "output_type": "display_data" 194 | } 195 | ], 196 | "source": [ 197 | "plt.quiver(*origin, x_relu[:,0], x_relu[:,1], color=['r','b','g'], scale=5)\n", 198 | "plt.show()" 199 | ] 200 | }, 201 | { 202 | "cell_type": "markdown", 203 | "metadata": {}, 204 | "source": [ 205 | "ReLU is very interesting, because it does not change the direction of the vector it just squashes it at (0,0).\n", 206 | "\n", 207 | "I think this is why it is very important to use something like BatchNorm when using ReLU. The vector that is \"flowing\" through your network could explode really quickly, so you need a squashing mechanism.\n", 208 | "\n", 209 | "If you use something like Sigmoid on the other hand, your vector is alway" 210 | ] 211 | }, 212 | { 213 | "cell_type": "markdown", 214 | "metadata": {}, 215 | "source": [ 216 | "## Softmax\n", 217 | "\n", 218 | "This is in my opinion the most interesting part. Say we have a binary classification problem and use softmax to get those probabilities. What we actually measure is which of the dimensions is larger. What the NN is trying to do is to maximize the dimension of the correct class. It is basically a battle between dimensions.\n", 219 | "\n", 220 | "In this case it is this:" 221 | ] 222 | }, 223 | { 224 | "cell_type": "code", 225 | "execution_count": 113, 226 | "metadata": {}, 227 | "outputs": [ 228 | { 229 | "data": { 230 | "text/plain": [ 231 | "tensor([[0.5475, 0.4525]])" 232 | ] 233 | }, 234 | "execution_count": 113, 235 | "metadata": {}, 236 | "output_type": "execute_result" 237 | } 238 | ], 239 | "source": [ 240 | "x_soft = F.softmax(x_relu, dim=1)\n", 241 | "x_soft" 242 | ] 243 | }, 244 | { 245 | "cell_type": "markdown", 246 | "metadata": {}, 247 | "source": [ 248 | "We can see that x1 > x2. This means x1 wins this one." 249 | ] 250 | } 251 | ], 252 | "metadata": { 253 | "kernelspec": { 254 | "display_name": "Python 3", 255 | "language": "python", 256 | "name": "python3" 257 | }, 258 | "language_info": { 259 | "codemirror_mode": { 260 | "name": "ipython", 261 | "version": 3 262 | }, 263 | "file_extension": ".py", 264 | "mimetype": "text/x-python", 265 | "name": "python", 266 | "nbconvert_exporter": "python", 267 | "pygments_lexer": "ipython3", 268 | "version": "3.6.9" 269 | } 270 | }, 271 | "nbformat": 4, 272 | "nbformat_minor": 2 273 | } 274 | -------------------------------------------------------------------------------- /DL/Batch-Norm.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "attachments": { 5 | "batch-norm.png": { 6 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZUAAAFICAIAAAAXmfSdAAB3jklEQVR42uz9ezyU2/v4j9+DnE0xinSwCzERnRUdVLzUJmVs2h3IoZAOarJj126TzmxhV6QQKad0kFMiRTKpnCrKVkkIReN8mGHu3+PR+n3u77xnxhhyGLWef6173Wute601933NWte61rWEUBRFIBAIZBQiALsAAoFA+QWBQCBQfkEgEAiUXxAIBMovCAQCgfILAoFABgkhluuampqUlBS+qiK08GAHh8PBToD8VMjLy//666+sHwKLdAgICHj69OmsWbNgf0EgEP7h9u3bjx496mP81dHRsWzZMltbW9hfEAiEf7hz5w57JNR/QSCQ0QqUXxAIBMovCAQCgfILAoFAeEHoJ28/g8EQEIBCfHSAouigG44MuMzheXOqq6sjIyOFhYV37twpJDSSX+vDhw9zcnLWrl3LV8YJP8KnGxoaKiMjg8PhLCwsioqKQGRzc7OrqysOh9u4cWNTUxNLlqysrJ07d6qpqUVGRg5/hdvb248cOeLu7q6kpBQYGPjDy52oqCgFBYXS0tIBJwsLCzMyMjp06NAg1urmzZskEsnZ2XkAMsXOzm7Hjh0IguTm5lpZWSkrK6urq0dERAxuv1VWVurp6Zmamjo7O4+s8EIQRE9Pj0wm79y5MzExcTTJr9jYWAMDA9w3hISE7OzsMBkx6JSWlpJIpP7msrW19fDwQBBEVVVVS0sLROLx+AMHDkyZMiU8PHzs2LEsWZYtW7Zw4cI+v6ghYvv27XJyckeOHCGTyR0dHT+8/FJUVNTV1ZWWlh5wsnXr1mVnZ9NotEGslb6+fl5eXnt7e38zvnjxIjU1dcqUKQcPHnz27Nn58+cfPnwoLy+/ffv2np6eQazhvXv3lJSUlJWV+eR3FBUVJZFIV65cGU3yy8LCIiYmBoR1dHRCQkIwGTG4tLS0kEik5ubmAeR1cHCYMGHC9evXmc1xHzx4sH37dmFhYY5ZNDU1R6TH29vbY2NjJ0+ejCDIzp07yWTyDy+/dHR0rl+/PmHChAEnk5aWHj9+fL8e2t3d7erqyiUBHo+fOHHiwEYiNTU1165dk5aW3rVrl5SU1OTJk42NjTU0NAQFBQex3+h0+ogPu9hF2AAk/gjPHzER0Jss+H6am5tXr15dUlIysOwiIiI2NjalpaXPnj1jno9s2rSptywjtQWnrq6uu7v7J1Rd0en0wUrWJ11dXTY2NvHx8UPRltevXzMYjI6ODmzu+fnz57S0tEGfP0IGR36xc/78eSEhIRwOd/To0T///HPixIkqKioZGRkIgpw7dw7c+uuvv8zNzceNG6erqwumnP7+/mAeGh0dXVBQoKmpicPhwEgkNDT09evXCILk5eXp6eklJSUhCFJWVjZr1qzFixd//fqVl0kZgiAXL14El1Qqtb6+XklJCUEQCoWybds2Ly8vY2PjsLAw9rwhISECAgK7du1CECQlJWXWrFmysrLgVkNDg6urq6Ojo7a2tqurKzaFuXXr1p49e86cOaOkpBQeHs4+yHJzc/Py8tqxY4eJiUlZWRmIj4yMPHz4MIIgQUFBe/fuTU9PH/Agjr38xMRESUlJAoGQn5+PIEh5ebmWlpabm1tvrSgoKCCTyQYGBjk5OUQi0dTUlPkRfXYahUJxcHDQ1dV9+/atqanp+PHjra2tGQyGr6/vggULxo8fHxUVBX5Ed3d3IpGYlJT04MEDS0tLW1vbpKQkNTU1eXn5hIQEUBpzMi7S7a+//po8eTKRSMSssTnWMykpKTs7u66ubu/evRcvXuzo6PD09PTw8Ni8eTOJRPr8+TNzsUFBQUQiUU5OLi4uDsRkZGQoKCgkJydzrMarV68QBPn777+FhYWLior27t0bFhZ2/fp1IpH4E0oQ7n01HH+MzPzzzz8hISEskS0tLSDxqlWrsEgwi5w/f35ycjLYbzRt2jRwa968eQiCzJ49+/3799HR0QiCEAiE5uZmBoMhLi4ORkYoip4/fx5BkEmTJoFchoaGLI84c+YMeC6YGPaJvr6+hIREU1MTiqIXL178999/URTt7u4mEAgXL15EUTQ5OVlQULChoQFF0YKCAgRBIiIiQF4VFZWdO3eC8JEjRwgEAoqiDAbD1NQUpH/+/DmCIKdPn0ZRtKOjA3tQRkZGUFAQczUYDIahoWFwcDC49PT0HDt2bE1NDbh8+fIlEH/s9e/o6AgKCgoPD7eyssrLy/P393d1dT1z5gxLMi7lHzhwQFRUtLm5GdwyNzen0Wi9taKkpERdXX3ChAmnTp0KDw/38PDAHtFbpzHT3t5uZmYmISERGhra2dmZk5ODIMj69esLCwsZDIaDg4OcnFx3d3dDQ4OPjw9ocktLy5w5c6ZPn37lypX6+npDQ8Pp06eD0piTcfxxlZSUNDU1Y2Nj8/LyVqxYISAg8OrVKy71NDIyUlVVBeGNGzcmJCSAHiYQCNu3bwfx2traqqqqiYmJdDrd3NxcXl6ewWCgKJqZmamqqnrv3j2ONXFxcZGVlaXRaHv37gUzUE1NzQcPHqCDjY+PD4lEGsAbMnSEhYXp6+szx3Dvq0Fk2bJl7JEDl1/a2toIghw+fBhF0bS0NGxcgKKorq4ugiAHDx5EUbS1tRXcAt8bUKUD+XXp0iXu8qu6utrExGTz5s2tra28tPD69esIgly4cAFF0ZUrV9bW1oKv3d3d/d27dyiK5uXlIQhSWFjILr/U1dUx+XXy5Ekgv1JTU6dPn+78/9DR0QFpqFQqDof7+++/u7u7GQzG+/fvmauRlZUFZsTgsqmpSVxcnEwm9ym/goOD29raUBTdv3///PnzGQyGo6Ojl5cXSzIu5dfU1AgJCYGuBiMaLq1AUdTCwkJBQaG7u5tdRHLsNBYOHjwIOgqIPHFx8f3794NLMJkCUhUMzEGTSSSSoaEhSHPu3DkEQTo7O8ElczKO8mvPnj0gDJq5e/duLvXE5NejR4+UlJSAYAK99+LFC+wdtrW1BWEwiKZSqX2+ZitXrsRygeeqqKiIi4tXVlaypOzp6bGzs9vQO/v27evtKa2trXPnzr1x48YA3pCho6amRklJKS8vDx12OMqv79UOAhMYUVFRcMm8/gIUTBISEhMmTPj8+fObN2/6W7iCgkK/VBgmJiYTJky4ePHi6tWrRURE5OTkQDU8PDzKyspOnjzZ1tYG1KI8FlhQUEAkEv38/Fjix40bd/LkyT///PPOnTvnz5/X0dFhvpuXl4fD4bA+wePxRCIRzOm4s2DBAjA+zc/PNzc3x+FwHK0ruJQvLy+/fv360NBQOzu7q1evWltbc2kF6BxpaWl2rTOPncasQxQUFJSSkmLuIgRBGhsb5eXlmZMxh8GfGZ1OFxERYb6VkJCwceNGLFltba2kpCSCIGPGjAEx8vLyWlpapaWlvNTzyZMnkyZNwgpfunQpxybIyMgAYcr9NwIjzb1792Ixc+fODQ4OXr58eXZ29u+//87ydQQHBw/gs7p586abm9vBgwdZluP7fEMYDEZ8fDyLKmAQkZeXv3v3romJiZmZmYeHx+CuVwyf/qtfgDUUCQmJoX6QsLCwra1tfn6+i4sL8wdw+ht79uxheb36hEqlFhcXM69pgukYgiCurq737t1rampatmwZi3oIh8OhKPrp0ycsRlZWFnyB3AFLojQajUKhGBgYcFl54FK+g4NDTk5OSUlJTU3NL7/8wr0VXBhwp30/K1asKGQCfLEsyMrKgrlbn/VsaGj48OHDYHmRe/LkibCwMMuvAzRfU6ZMGaweIJFI+fn5Pj4+LHol7m9IfHz8uXPndu/ePXQ/TXV19cqVKyMjI48ePTriwms45FdzczP40oDZLhivcTGTAe8ZSFBbW0sikaysrHhfst22bRtQ365fvx7E5Obmurm5kclkCQkJLh8tDofr6urC6gyqoays/OHDh1u3bmF18/T07Onpqa+vz8/P19fXLykpWbdunbu7O8ufJLBXZh5BsIzRuPDs2TNRUVGgXqyqqmpoaGAfpnEpf+XKlUpKSvb29np6eiCmt1ZwqQOPnTZESEpKKjPB0cz906dPenp63OsJlimIROLHjx8xQYCiKNDJDoyUlBQTExNs8At48ODBmjVr2H9iBoOxbdu233uHi/WMpKSkra0t+9IQlzdk3bp1Q+35KjU1VUVFZfbs2aNp/RH7sJkNCEEYDNexQTuzZcDdu3ebmprAApOysrKJiQmCICoqKqAXXrx4Adan6uvr6+rqEAQBs4+SkpL4+HiguY+Kirp161ZERATvLmGVlJQMDAxMTEywuUxNTQ3QqlRXV4MFpsrKypqaGjDdwJo2bdq05OTkoqKiBw8eZGZmUqnUkpISc3NzAoFga2vr6+v78OFDJyen+fPnjxkzprOzEyjvREVFN23ahC1WAnR0dNatW3f+/HnQRa9evWpqasL+FcET2U0x09PTlyxZgqJoVFSUurq6gIAAiqIJCQlgXsN7+QICAvb29sXFxZgE760VoDKgH1jordPYJ1PYuAZFURqNhqk7QTOBlKRSqQiCgF0QHR0dmKABibGXhzkZR7BbFApFQkLC0tKSSz1lZWUrKiqqq6tnzJgxadKkDRs2eHl5xcXF2djYYKZ/LS0tmL0hKBz0BoVC0dTUZP6HwNp7+fJlKpU6Y8aM5OTk9vb2lpaWiIiIjIyMW7dusVvkgPljdO9gK1QcERMTY/51eH9DeKe1tdXPz2/r1q2pqak3btxwcXGJjY2NiIiIjY21trZmn03TaDQxMTHmmJycHC0tLX9/fz5df4yNjV2yZAk2E9y2bVthYWFERAT4AJSUlEpLS//3v/+BBH/++Semv7e1tTUyMpKSkjIwMAADeBRFc3JyZsyYgcfjf//9d19fXy0tLVtbW6Bvzs3NVVZWFhUVtbCw6OrqQlG0tLR05syZ2tra9fX1vOv5rl+/fufOHeYlG0NDQzweb25uXl1dPWfOHHV19evXrwNftEuWLElNTUVRtKioSE1NTUZGxs3NLTQ09Ndff7169SpQ8+vo6EhISGhoaGDK1MrKShwOt23bNl9fX2tr61evXrGvJJLJ5A0bNhw/fnzv3r3Y4uOzZ882b94MlilYFNXp6emWlpb+/v5ZWVlkMjk0NDQwMJBdJcy9fEBdXd3x48eZYzi2Auy7QhDk2LFjVVVVLOWzd1ppaSlLhWfMmAEsZpqbm4F+berUqYmJiWVlZWvWrEEQhEwmx8bGrlq1CiievL29ZWRkxo0bd/v27bKyskWLFiEIcuDAARqNlp6ejiXLzMxkb/LNmzcNDAxsbGzc3d0PHToElnS41PPJkyfy8vIzZ86MiYkpKyvT19eXkpLS1NRMSUnBFPZjxoyRk5NLSEh48+bN8uXLEQSxt7fv7OxMTU2Vk5NjfosAt2/fdnNzy83NXb58uays7OzZs7du3fr06dMh0lgHBgYaGRn16w1paWnBFsR44dGjR83NzRMnTgR9fu3atcWLF4Ovb968eeyvH0uVQJ+Ii4vj8Xj2VSB+WX/sL0B+HTp0CIVAIIMkv3ihv/ILrFMvWrQIhPfs2QNW5L9+/aqsrIwt2nKvEoPBMDc3H5H1xyHRf4EZAS9GpxAIhMv88Tu3fFZVVdXW1nIPZ2RkYPOnBw8e6OvrA4tLU1PTrKwsFvUrnU7HVoExlVFgYODBgwd/EP39lStXgLX9jRs3BrZ4DIFAEAQxNDR8+/ZtYWEhj+nv379/4sSJz58/e3t7A2s4Pz8/7BvsLVxYWGhkZAR0YQQCQV5eHkGQCRMmiIiIVFZWEggEZmVZXFwc88o+0Iht3LhxpDT6rOcP+fj4SEtLw/M7IBB+oL6+PiIiQlxcfNu2bSNrrxAXF/fq1avVq1cDreXws3z58szMTJZIIfiKQCB8i6ys7L59+/ihJr99g9/6B7oehUAgoxUovyAQCJRfED7me3bPDL/9/eDWHwLlF19z/fp1VVVVHA6nqalpbGysp6dnYWEBnDRBBuw5fmSPCMB4/fr1vn37pk2bhln2QyA/lPwyNze3t7dHEMTDwyMxMTE1NXXcuHFz587l4gnv52HAnuNH9ogADFVV1bFjx1ZUVAzPKO/atWuBgYG+vr779u37GY4mgPKLLwCucsBGXxERkX/++YdOp3t7e49srfp0wT4MBQ7AczzGSB0R8H9eUAGBYTvAIiYmZsWKFTt27Ni3b5+CggK0XoTya2TA4/Hjxo1j99wwnAy6C/Yh9enOkZE6ImBEePbsmaKiooKCArh8+fIly0ZlCJRfw0RNTU1jYyPYhsnuH52j33f2ZDz6d+/NuzyLC3YePdBzcavPUiAXL/scZ0bsnuN7c+3fGxzTc3Q/z9KupUuX9ub2vrdiaTSaq6uru7v7wYMHmRMPncP1/Pz8xYsXg/Djx48RBLGysoICgt8Ziv3bww/wVgw8OtTV1a1Zs2bs2LElJSUc/aOz+33nmIxH/+69eZdndmHMowf6v/76i4tbfRaf7ty97PfpOZ5LtZm9VmAutjmm7839PEu7Dh482Jvb+96qsXnz5nPnzoE0O3fuBL5u+nS4/t9//61evVpKSmrVqlWgG4GfCfZ9yOzbnsPCwsrLyzdu3Ojj47N27dqOjg64gZyvGBL/0XzF8ePHg4KCOjs7582bd/78eXCeyK5du4BjFjk5uZ6ensrKSi0tLXV19ezsbBcXF7Ano7dkqqqqDx8+tLGxQRBk4cKF4uLiSkpKwG/ckiVLgoKCvnz58uLFi6KiIk9PT1AHHR2djx8/slQsLS2NYxoikchck8bGxuPHj/v4+Pz99996enrAe2pvZGdnp6amApf/CII4OzufOnXK29sbnILBgp6enrm5OTgAacqUKUFBQcbGxn1Wu88mgEObWPpNRkaGpV0Igrx586atrc3S0hJBkLVr1+7ataurq0tERIRjsUVFRdHR0dhRUtra2uCoF2xhgaMv8urq6j179mzfvv3o0aP3798/cODAhQsXGhoaJCUl+5wIZ2Zm6unpKSoqgsVWeXn506dPs7ilhPAhP5T8OnToEOa0D9PgcPSPzuL3nUsyrKje/Ltz8S6PwaMHeu5u9Vnol5d9ds/xkyZN6rPavDShN/fz7D2MZWF2e8+x2PPnz8vKymJuo3lUwwGPgKDw+fPnHzt2rLOz89atW7xMA6uqqsAeZoCCgkJgYCCUX1D/NfLw6Md9KHzk9ysNgItbfXbBNzAv+8BzfH+d4veW/jvd5HMstra2Fsz++lXU/v37gfACmJmZ3b17l8dDl1lE5Nu3b/s8LRwC5ddgavE4xvPox/173L1z9y4PtNE8eqDn7lafucDv8bIPPMf31yk+x/QUCuU73eRzLPaXX37p7Ox88uQJLz8xl5K9vb3NzMx4We1hcbEfFRXFMpCHQPk1hIBhCLvBYW/+0Vn8vveWjBf/7ly8y2Mu2NXU1HjxQM/drT5zgYWFhfPnz+fiBZ8Fds/xXKqNwXxEAMf0X7586c39PEsP9+b2nmOx5ubmsrKyu3btqq6uptPpqamp4OAMUJPenNOz0N7ePmfOHGlpaeznI5FI4Ih1FrKyspjPbfDz85s8efKWLVugdBgdI5fRvv6YlJQEDjdauHBhXFxcn37cT5w4weL3nWOysLAwXvy719TUcPQuz+KCnRcP9H261WcusE8v+Nw9x/fmFB8jNzeX5YgA9vS9uZ9nadfdu3d7c3vfWzUKCgoWL16Mx+NXr17t7e29evXq6OhokL435/Ts9U9KSmI+UVxDQwNBEBYv/uAc5fz8/JCQkMuXL584cSIiIqLP9UoIn6w/Qv+FkB+Tf/75x9bWluVsnkOHDu3fv585EsgvsP8Mws9w9F8I/U9Afkw+ffrEIryys7MXLlzIEvn69Ws1NTXYXVD/BYHwCwwGA/hxZ0ZZWXndunUskXl5edra2rDHRinQfzTkR/xbFhA4cOAASyS7REMQBJjUQuD4CwKBQKD8gkAgECi/IBAIlF8/FNXV1d7e3v7+/sxWiz8nVCq1X+lRFG1sbGSJfPjw4YkTJ16+fAk/JwiUX0NLZWWlnp6eqamps7OzkNCwLl8kJCSsXr0ah8PNmDHj999/NzIyWrBgAbMYjY6OVlBQwOFwwOIc8OLFi/Xr1+NwOGtr68rKysGqDIqiR48ebW5u7lcuHA4XFhb29u1b5kg9PT0ymbxz587ExET4RUGGFd7t71+/fm1nZ2dqauro6Lh58+a1a9eGhob29PTwbkH75s0bU1PT77fE/Z5ygoODDQ0NR8qG+OnTpwiCnDhxAlxmZGQICQnt378fSwCcC06cOLG+vp7ZO9XEiRP71dV94ufnd/fu3QFk7Orqsra2bmtrY4n39fU1NzeHZuKQ4bS/53X85e7uPmvWrNevX4eHhwcGBl69evV///ufra2trq5udXU1LyW0tLSQSKT+/uEPejl0On2Yh13MAJfEmLeDFStWLFu27OrVq1gCKSkpbW3tmpoaBwcHbGuEpKSkmpoayx7j76Gqqio7O9vQ0HAAeYWFhTdv3nzq1CmWeFFR0fb2djgggPDd/PHq1auenp7d3d2+vr6YD6ydO3dqaGg8efJky5YtXPwWAJqbm1evXl1SUvKd1R2scvgHzEMWxm+//bZ169YbN24w+4/GnGENCsHBwd+zRWzVqlWPHj3q80eHQEZefrW1tTk5OYGty8BnC6YKsbCwAOrb8PBwf39/3Deio6MLCgo0NTVxONzkyZNB4tDQ0NevXwNzZz09vR07dggJCeFwuL/++svc3HzcuHG6urpFRUUIgvSrnKSkpLKyslmzZi1evPjr16+jrveTk5OzsrLYjxQ6e/assrLynj17ysvLh+K5iYmJenp6A86Ow+HU1dUfPXoEvx8Iv8uvR48etbS0IAiiqKjI4uYNc3CclJS0Z88ebIwwZ84cR0dH5pR79+5duHAhgiDz5s17+PBhYGDg7NmzQUYvL6+goKCcnJxVq1a1tLT0qxwjI6PExMRXr149efIkIyODlwa3t7eLiIgwx8TFxYWHh+/Zswdc0mg0NTW1IT27KCwsbOXKlVOnTjUxMfHx8XF2dmZJICUlFRUV1dHRYWlpOejDnI8fP6Ioyny4Tmdn58WLF69cubJ169b8/Px///3Xzc3N19eXSyGLFi1iOVZDTEwM+LeBQIaNvjVB2JfM7o4K81FZW1uLw+GYvUf16fQSOD7+9ddfp02bBsppaGiIjY21s7PrVzkbNmx4+PChlJQUcGjT51jy2rVrzIdRFxcXd3d3L1myZO/evWDol5ub29rayrLLlwUGg2Fvb8/lRGgFBYUzZ870dtfa2trNza2zszMsLGzXrl3379+/ceMGy3/D/Pnzjx8/7urqevr06YMHDw7iT15RUYGNZwHXrl3bsmWLuLj4ixcvHBwcnj596uTkRCAQuBQyceJEzOkgwNDQ8OjRo/n5+XPnzoXfFYRf5NfMmTMx3RPLLcwUCBwfO7CZCIIgEhISEyZM+Pz5M8dzGbijoKDA45GIN2/edHNzO3jwIIlEwiJ7enosLCy8vb0NDQ1BZe7fv6+vrw/Cz58/DwkJCQwMZB21Cgh8/+GmoqKijo6OJSUlZ8+evXPnDvvWYhcXl7S0NHd394Ep2hEEKS8vz8zMBILJ1dUV6C7r6uqY/SwDV65gzJufn29ubo7D4ZibzLETCARCXV0dc4y8vPzdu3dNTEzMzMw8PDzY9XoQyAjMH9XV1YFX9aqqKpZbWAwvY58+5Oi3NUEJCYmhayqJRMrPz/fx8WE+OlBTU1NAQODKlSuYv00gv0AYnGM0pD8AmA6Dk8rYpeSVK1fGjRu3efPmPg9nZKepqenmzZvW1tYWFhaCgoLAdAOc5cEyJwXnbNNoNAqFYmBgwFIOx07o6elhGRpXV1evXLkyMjLy6NGjUHhB+GX8JSws7O/vb2dnV1pa+vr1ayKRiN0CM4glS5aAE8bAAj93fQ2wCWBJ09zcDBxAAzeq/Srny5cvTk5OkpKSFy5c6HORTlJS0tbWNjw8HHgWBXz+/Lm0tBR8t62trbm5ubGxsdjwkOPhN985f2QGNHzKlCngsru7m7nhEydOvHz58tq1a8FxR/0iMjISHL3T0tLy8uVLbBIqJyfHbkMPDqAWFRUFp8NVVVWJiYmBKSTHTmhsbGQZdKempqqoqAC1JgTCL/ILQRAbG5v//vvP29t73759t2/fBqqriIiIx48fz58//9q1a2D0pKKi8vTp09TU1FmzZoGzqevr6+vq6sCLDiYvJSUl8fHx//33Hyj57t27f/zxB7DbVlZWNjEx6W85QkJCQIyuW7eOl8MaxMTEmP2yg89bWloaKPWjoqJmzJgxceJEGo0WHBycn5/v6uqqoqIyWPNH4KG/s7MTXFZWVv77778KCgrgcEYEQb58+VJfX8+cxdjYePfu3djoCUVRDw+PkJAQaWnphISEX375JTIy0tzcnMV1PeD9+/eHDx9GEGT79u3YqsWMGTPevXuHpUlPT/fw8Hj06FFUVJS6urqAgACKogkJCY6Ojlw6oaysDFMsAGg0GjxwHzLc8G5//+LFCysrKxKJtGPHjq1btxobGwcHB3d3d2MJcnJyZsyYgcfjf//9d19fXy0tLVtbW+yA6NzcXGVlZVFRUQsLi66uLnC4v62trZGRkZSUlIGBwYcPHwZQTmlp6cyZM7W1tZkN1rkQGBhoZGTEHMNgMJydnX18fCIiIrS1tZ2dnVEUzc7OrqmpWbRoUVFR0WAZEKelpRkbGwPlEYlEWrVqlaqqqqWlJVgQRFH0+vXrRCJx/PjxAQEBLF78t2/fDsKRkZE3btyg0WilpaX79u1ra2uLj49nf1ZjYyM4NxtFUTqdrqury3xXX18fHHmNomh6erqlpaW/v39WVhaZTA4NDQ0MDKysrOTeCVu3bn358iX3joVAhtr+fsTO7wDy69ChQ8PcC+yfGTgaBwQmTZqEfZbFxcXa2tr89ivS6XQsHBkZGR4eDk61YOHOnTsVFRUg/OXLl9mzZzPfDQgIuHDhAi+P49gJ7e3tOjo6LIdcQPkF4d/9Q4MOOD5r+I1OxcTEmHXhjY2NEyZMyMrKQhDk+PHjNjY24JQaYKRub29/+/btgZ1sOFQTfqbNT1OnTi0vL8dmjpWVlVh/PnjwALN3OXv2LMu5YZaWlomJiby0i2MnXLt2zcnJiUUpRqfTOc5hIZCRXH8cCq5cuQKs7W/cuPH9hgj9wtDQ8O3bt4WFhZg427dvX1VV1fHjx1VUVDw9PbGUU6ZMaWxsFBQUHMSNh4PLly9fNmzYgF36+vpi5z9OnTo1LCwsOjr67NmzampqdnZ2LOsYe/bs8fHx6fMR7J1QVVWVm5u7adMm5mStra1xcXEbN26EXxSET/VfPwxfvnw5c+bMhQsXmJV3o5ETJ06wTOKio6NRFK2trb19+3af2W/cuFFYWNivJzIYDG9v75aWFubI69evu7u7UygUOMeBDPP88Wc8v0NWVnbfvn0/QEPExcWZJ3F5eXngKLCcnJwVK1b0mZ3ZjpdHcDici4sLS+Rv34BDAQif2k9A+BOWjZPz5s0DAVNTU9g5EKj/gkAgECi/IBAIBMovCAQCgfILAoFA+QWBQCBQfkEgEAiUXxAIBMovCAQCgfILAoFAoPyCQCAQKL8gEAiUXxAIBALlFwQCgUD5BYFAoPyCQCAQKL8gEAgEyi8IBAKB8gsCgUD5BYFAIMMN9H/fP1pbW4ODgwsKCjZt2tTa2kqhUBYuXNjV1SUiIpKcnHzhwgVRUVHYSxAIHH/xI4WFhXZ2dmlpaWJiYmZmZnPnzvXz89uwYYOFhcWrV6/q6+thF0EgUH7xKUuWLKmrq1NUVFy2bBmCILm5uU5OTsLCwlQqtampadKkSbCLIBAov/iXjIyM//3vfyD84MEDfX19BEGuX79uamqalZXV0NAAuwgCgfJrCMnOzgYDqIFNIY2MjIAujEAgyMvLIwgyYcIEERGRyspKAoEA3yoIZHj46fT3aWlpDx48+PTp0/v37wdWQkBAAAhISko+ePAAhNd/A75PEAgcfw0hBgYGJ06cWL16NfztIRAovyAQCATKLwgEAoHyCwKB/AxA+3tudHR0qKur02g0AQEBBQUFjmlQFO3o6GhsbGQwGCy34uPj582bB7sRAoHyawQQExPbsGHDqVOnEATx8/MjkUi9pWQwGE1NTV+/fi0pKcnMzAwLC2toaDh79mxYWBjsRggEzh9HBk9PT21tbQRB7OzsKioqeu1HAQFpaWklJaW1a9f+888/Hz9+PHPmzK1btz5//gz7EAKB8msw6fkGLynHjBkTFRWFx+MbGxs3bdrU3d3NSy5xcfF9+/ZFR0dfunQJvmSQIQJF0VFRJpRfg0ZBQYG7u7uvr29tba2Tk9OZM2f6zDJt2jQghnJycjw8PHh/1po1a+Tk5EbXCwHhQkVFhYCAQHl5OUu4v0RFRSkoKJSWlg44WVhYmJGR0aFDhwaxdTdv3iSRSM7OzlB+8S8aGhpubm5Pnjzp6enx8/NzdHTkJZeFhcX27dsRBDlx4kRGRgbvj9u2bRsOh4Nf/o9BVFTU4sWLp02bxhLuL4qKirq6utLS0gNOtm7duuzsbBqNNoit09fXz8vLa29vh/KLfxkzZoyYmJiQkJCAgICwsLC4uDiPGf38/IhEIoqiW7ZsgVqtn5PIyMiNGzeyh/uLjo7O9evXJ0yYMOBk0tLS48eP79dDu7u7XV1duSTA4/ETJ04cXb/IqJFf4eHhra2tI1gBcXHxmJgYUVHRmpoaa2trdmsJyI/Ny5cvS0pKzM3NWcIDA0VROp0+WMn6pKury8bGJj4+/gf7UUaH/GIwGLdv35aQkBjZasyaNcvX1xdBkJSUFD8/P/hJ/2yDL319fTk5OZYwBoVCcXBw0NXVffv2ramp6fjx48H/nK+v74IFC8aPHx8VFYUgSFlZmbu7O5FITEpKAi6YLC0tbW1tk5KS1NTU5OXlExIS2JP1Jt3++uuvyZMnE4nEO3fuMNdk27ZtXl5exsbGwIInKSkpOzu7rq5u7969Fy9eBLaNnp6eHh4emzdvJpFIzFOKoKAgIpEoJycXFxfH8kQe24ggSENDg6urq6Ojo7a2tqurK5jqsleMSw/w+j/AzD///BMSEoIOBlQq9Zdffunq6gKXAQEBLi4uIEyj0fz8/AICAjo7O8PDw21sbIyNjcvKynorKjAw8Nq1aygfwGAwgBXYmDFjnj17NoASOjo6goKCwsPDrays8vLy/P39XV1dz5w5g0L4mJ6eHkVFxbCwMJYwM+3t7WZmZhISEqGhoZ2dnTk5OcAxSWFhIYPBcHBwkJOT6+7ubmho8PHxQRDk1q1bKIq2tLTMmTNn+vTpV65cqa+vNzQ0nD59OoqiLMnYUVJS0tTUjI2NzcvLW7FihYCAwKtXr1AU7e7uJhAIFy9eRFE0OTlZUFCwoaEBRVEjIyNVVVUs+8aNGxMSEsALSSAQtm/fjqKotra2qqpqYmIinU43NzeXl5dnMBgDaCODwTA1NQXPff78OYIgp0+f7q1ivfUAC8uWLWOPHMLx1/379/F4vLCwMLiMjY1VU1MD4a9fv2ZlZdXW1pLJZCMjo0uXLnV0dLALeyBer127VlhYOGBdw+CCw+GCg4OnTp1Kp9N///335ubm/pZw7dq1LVu2WFlZjR8/3sHBYffu3U1NTZhZxvPnz3fs2AEHO/xGTk5ObW2tqakpS5gZMTExVVVVUVFRGxsbERGRhQsXiouLKykpaWlp4XA44Ln3y5cvMjIyc+bMwXJJSkpOmzZNRUXF0tKSQCCsXbv2/fv3XV1dLMk4oqenZ25uPnfu3MjISAEBgaCgIGCKuGvXrlWrViEIIicn19PTU1lZyZIxOzv76dOnwI2dqKjorVu3du/eDW7p6uoaGRkJCQkZGxvX1tY2NTUNoI1paWlFRUWenp579+6NiIjQ0dH5+PFjbxXrrQd4+V2G0P7+3r17BgYGINzR0ZGTkxMaGgou5eTkPn36VFBQkJubSyAQmpqaZGVlLS0t2YWXqalpSkqKkZFRn7qGdevWsZcAhvrAgB4TQFwKiY6OJhKJ3B8kLS0dFRW1bNmyd+/eAVOMfnXLggULwKJBfn6+ubk5DocLDAzE7s6bN2/u3LlQXvDh5HHt2rV4PJ4lzP73hoUFBQWlpKSwy3HjxiEI0tjYKC8vz/ISMl+OHTsWQRA6nS4iIsIcn5CQwPwXXltbCyYB4FJeXl5LSwtYWuBwOA8Pj7KyspMnT7a1tYHSWOr55MmTSZMmYeUvXbqUvTIyMjIIgnR2dg6gjQUFBUQikV3H0lvFeuuBEZNfKIqmpaVhX+bjx48VFRWxxWYURUtKSo4cOUKlUo8ePdrW1hYSEsKu3sLhcGDWaWpqOmBd6aZvDG7rdHR0Dhw4cPbsWRsbm/7m1dTURBCERqNRKBQwQWBpMrS34DfodHpsbCxQGzGHh5MVK1YUFhZil+zr5rKyssAVMJislZWV+fv7l5eXHz9+nL20hoaGDx8+oCg6RC8blUotLi5mLp9GowkJCXl7e3OvGL/o79+9e1deXr5o0SJwmZaWpqenh6Jofn4+giBVVVVdXV12dnYPHz7E4/HJycn29vYc7TwVFBSuXr2amJgIMvIJX79+jY+Pj4mJAcJoADx79kxUVFRLSwv0RkNDA41GCwgI2LZtW1lZGRQZfMW9e/fodPqvv/7KEh5OJCUllZkQEGD9cj99+qSnpwfOlHFzcyOTyRISEiyr5Ji9GJFI/PjxY3JyMjaeiI6OHsTaKisrf/jw4datW1j5np6eFAqlt4rxnfxKS0sTFRUVExNDEKS+vj4mJmb+/PkUCgUIqaKiIgMDAykpKXt7+2PHjmVmZkZGRn78+JFzFQUEfL/BJ28zjUYzMzNzcnIawEucnp6+ZMkSFEWjoqLU1dUFBARQFE1ISJCRkXn27BmJRCouLu7o6IAig98mj2ZmZuBkT+YwO52dndjfMFinwox+gEIH7FqjUqkIgmCqpY6ODux7BunBxIolGTvYLQqFIiEhAfQnNTU1QOtfXV0NdMqVlZU1NTWysrIVFRXV1dWFhYUmJiaTJ0/esGGDl5dXXFycjY0N+CduaWnBVLqgcDDR628bzc3NCQSCra2tr6/vw4cPnZyc5s+f/+XLF44V49IDPE30hmL9cf369YsXLyaTyWHfSEhIcHFxOXfuHFjOcHFxuXz5MvOinqKiIp1O51KgjY1NS0sLP6w/bt261dnZeWDZ09PTLS0t/f39s7KyyGRyaGhoYGBgZWUluFtcXKytrQ0X+/iK1tZWcXHxe/fusYQ5/rgzZsxAEOTcuXPNzc1A+zN16tTExMSysrI1a9YgCEImk2NjY4EOe+nSpZmZmXfv3pWRkRk3btzt27fLysrAlOXAgQN3795lTsb+uJs3bxoYGNjY2Li7ux86dKi1tRVb4DY0NMTj8ebm5tXV1XPmzFFXVy8tLX3y5Im8vPzMmTNjYmJQFC0rK9PX15eSktLU1ExJSUFRNDw8fMyYMXJycgkJCW/evFm+fDmCIPb29kBg9auNNTU1BQUFOjo6EhISGhoaN27c4FKx3nqARqP1uf6IY5m1+fj4SEtL29rafs//FVgoffz4sYaGBvvdnp4eTU3NzZs3m5qaEolEYEsxc+ZMY2Nj/v8rPn78+JMnT27fvi0oKDjohZPJZA0NDRkZGRMTE/YJAgTyM7N8+fLMzMzh0N8/ffoUj8erq6tzvFtQUODi4lJaWmplZaWioqKqqmptba2oqMj/PRgTE3Pjxo2srKyhEF4IgkyZMqWxsXH8+PFQeEEgvDAk8quoqMjCwqK3pY353xh1PUWhUA4ePJiZmSkpKcl7rr///vvIkSM8rvLs27cPvpEQyAjLrx/PAvP9+/ebNm2Ki4ubPHky77mCgoIkJCSgPQQEMprkF7+BouilS5fevXsHbEc/f/5Mo9EyMzMvXboUFRWFw+EeP35sYGDw+++/c8xOpVJNTEz8/Px4d2ZfW1t7+PDhy5cvc3HZCoFAoPzqm6dPn2pqajIYDGdn56SkJGD3cOrUqT/++MPX11dYWHjq1Kl//PEHR/lFp9N/++03fX19LS2tDx8+cCyfRqM1NTU1NjZSqdTS0tLHjx+np6f39PSsWbNm0qRJ8CWDQKD8Gjji4uIaGhpBQUFbtmyZPXs2sGqpqKjYtWsX2J5ZVFSkrKzMMa+Tk1PGN/z9/fv7XGtra/iGQSBQfn0Xs2bNAqYrV69eBTEUCmXatGnYVsd79+6tW7eO48STRCKBba4DYPXq1fANg0Cg/Ppe/vvvPyqVunjxYnCZkZGB7S2vrq7OysqKjIx8/fr1jBkzmG0jcDgcMMmDQCB8yM9iZwQ2YGLOfFJSUjD5lZaWtmzZMgKBcPfu3SEy7IJAIFB+DZzi4mLMgwWdTqdSqfr6+uBy6dKlU6dOjYiIYPfoBIFA4Pxx5GH2sTVmzBjmlUQlJaU+T2l8/vx5SEgIcyFcYDAY8fHxUBpCIFB+8QW8uxWMj4+vqKjw8vKC8gsCgfNHvgCHw/G4J3HdunXfufsdAoFA+TU4QLeCEAicP45WgFvBiIgI4Fawubn51KlTHL1H2tnZqaiowB6DQKD84hd0dXVLSkpQFAUOKvF4/IkTJ2C3QCBQfo0OgoOD7e3tb9++bWJi0traevToUY6u+u3t7YFrSggEAuUXv8DsVhCPx3t7e3NJfP8bnz9/9vb2XrlyJe8uKyAQSH8ZEv/REAgEMrhw9B8N1x8hEMhoBcovCAQC5RcEAoFA+QWBQCBQfkEgECi/IBAIBMovCAQCgfILAoFA+QWBQCCjE7h/qH+0trYGBwcXFBRs2rSptbWVQqEsXLiwq6tLREQkOTn5woULoqKisJcgEDj+4kcKCwvt7OzS0tLExMTMzMzmzp3r5+e3YcMGCwuLV69e1dfXwy6CQKD84lOWLFlSV1enqKi4bNkyBEFyc3OdnJyEhYWpVGpTUxM8bRsCgfKLr8nIyPjf//4Hwg8ePADnGF2/ft3U1DQrK6uhoQF2EQQC5dcQkp2dDQZQA5tCghO5W1tbCQSCvLw8giATJkwQERGprKwkEAjwrYJAhoefTn+flpb24MGDT58+vX//fmAlBAQEgICkpOSDBw9AeP034PsEgcDx1xBiYGBw4sSJ1atXw98eAoHyCwKBQKD8gkAgECi/IBDIzwC0v+dGR0eHuro6jUYTEBBQUFDgmAZF0Y6OjsbGRvZDIePj4+H5HRDIjya/3r9/n5KS0tjYOHHixK1btwoKCvJn74iJiW3YsOHUqVMIgvj5+ZFIpN5SMhiMpqamr1+/lpSUZGZmhoWFNTQ0nD17Niws7Md7aUbLzwf58UH/L//8809ISAg6lDQ3N3t6ejK+YWdnd+jQIXTYiYqKmjRpEi8paTSatrY2giDjxo378OEDj+W3tbWdOXMGj8fX1dWhPxb88PNBfkKWLVvGHjkC+q93795dv34d9421a9feuXNn+OvQ8w1eUo4ZMyYqKgqPxzc2Nm7atKm7u5uXXOLi4vv27YuOjr506RL7XSqVOor+3hobG/nt5/vBBhCjokz+ZATkl5aWVmpqKghnZWUNs4aooKDA3d3d19e3trbWycnpzJkzfWaZNm0aEEM5OTkeHh68P2vNmjVycnLMLxOKokePHm1ubh4t7wcOhwsLC3v79i2f/HzfT0VFhYCAQHl5OUu4v0RFRSkoKJSWlg44WVhYmJGR0aFDhwaxdTdv3iSRSM7OzkPahzy2/cecP2IUFBQYGBg0NjYO5yiURqO1t7fT6fSenp6urq62tjYeM27fvh18z/fv3x/w0/38/O7evTu6xu1dXV3W1tbsHTUiP9/3c/LkSR0dHfZwf3n8+PFvv/3Wp36AS7KvX7/i8fj9+/cPYuuampqmTp1qZ2c3pH3IY9uHYf44YvLrxYsXDg4Ora2tNBptVLz3bW1tRCIRQZCJEycO7JerrKz87bffRqPqIS0t7fDhw6P658OYNWvW2bNn2cMjgpKSUr/kF51OP3DgAPc02traQy2/fmr9F4IgRUVFqampAQEB4uLiPj4+vGQJDw9vbW0dwYGquLh4TEyMqKhoTU2NtbU1u7VEnwQHB9va2o5GLcOqVasePXqEaQwH8PPxCS9fviwpKTE3N2cJD3juQqfTBytZn3R1ddnY2MTHx/OJ2m5QGjX69F9VVVWrVq3y8PDA4/HCwsK87KNmMBi3b9+WkJAY2c6aNWuWr68vgiApKSl+fn79zZ6YmKinpzca5RcOh1NXV3/06NHAfj7+ITIyUl9fX05OjiWMQaFQHBwcdHV13759a2pqOn78ePBf5evru2DBgvHjx0dFRSEIUlZW5u7uTiQSk5KSgBslS0tLW1vbpKQkNTU1eXn5hIQE9mS9CYK//vpr8uTJRCKReTGEQqFs27bNy8vL2NgYWOEkJSVlZ2fX1dXt3bv34sWLwD7R09PTw8Nj8+bNJBLp8+fPWPagoCAikSgnJxcXF8f+HkpKShIIhPz8fARBysvLtbS03NzcOD4UqIzJZLKBgUFOTg6RSDQwMGBpFHuu3joEwLHaDQ0Nrq6ujo6O2trarq6uNBpthPVfdXV1dnZ2KioqCIIICgoSCIQJEyZkZ2cPoKjAwMBr167xwyCWwWAAK7AxY8Y8e/aM94wVFRVz585ljuno6AgKCgoPD7eyssrLy/P393d1dT1z5swQ1ZxKpf7yyy9dXV3gMiAgwMXFhffsERERZDJ5VE9Aenp6FBUVw8LCWMLMtLe3m5mZSUhIhIaGdnZ25uTkAOcihYWFDAbDwcFBTk6uu7u7oaEBDDxv3bqFomhLS8ucOXOmT59+5cqV+vp6Q0PD6dOnoyjKkozj/FFTUzM2NjYvL2/FihUCAgKvXr1CUbS7u5tAIFy8eBFF0eTkZEFBwYaGBhRFjYyMVFVVsewbN25MSEgA7xKBQNi+fTuYP6qqqiYmJtLpdHNzc3l5eQaDwfLcAwcOiIqKNjc3g0tzc3MajdbbQ0tKStTV1SdMmHDq1Knw8HBXV1fmRnHM1VuH9FZtBoNhamoKHvf8+XMEQU6fPj2S+q+ysrLZs2enpKTQ6fSXL1+OHTu2vr5+YPLi6tWrDg4O7L/BSPH169epU6ciCKKkpNTU1MRjrqysLBMTE+aY4OBgoBTfv3///PnzGQyGo6Ojl5fXEFU7Li5OU1MTu9TT0wsODsbWNPz8/AICAjo7O8PDw21sbIyNjcvKypizp6enk0ikUS2/Hj16JCIiAn4y5jALBw8eJBAIINzd3S0uLo6pqCIiIhAEqampQVE0IyODWTCRSCRDQ0MQPnfuHIIgnZ2d7MnY5deePXtAuKamRkhIaPfu3eC1d3d3f/fuHYqieXl5wOsci/x69OiRkpIS9l1kZWW9ePECyC9bW1sQGR4eDux1WJ4LngVeADBI5PJQFEUtLCwUFBS6u7vBJXOjesvVW4dwrHZqaur06dOd/x86Ojo7d+7kRX4JDdHc2MHBwdXVFbip0dDQmDp1akFBAXBViiAInU4PCAgQFha2tbWNiYl5+PDhly9ffH19lZWVWcoxNTVNSUkxMjLqU0+xbt06S0tL9viMjAzebZRcXV0nTpzIPY20tHRUVNSyZcvevXsHTDF4Kbmurm7s2LHMMQsWLBAXF0cQJD8/39zcHIfDBQYGMid4/vx5SEgISySCIP/++29WVlZvE73Dhw9ramqy37p3756BgQE2gM/JyQkNDQWXX79+zcrK0tDQIJPJnp6emzdvNjQ0jIuLA3MKAIFAqKurG9W2QpGRkWvXrsXj8Sxh9j7EwoKCglJSUtjluHHjEARpbGyUl5dnTsaSC/zQdDpdRESEOT4hIWHjxo3YZW1tLRjIg0t5eXktLS1glIDD4Tw8PMrKyk6ePNnW1gZKY6nnkydPJk2ahJW/dOlS9srIyMgAwcGSV15efv369aGhoXZ2dlevXrW2tub+UBwOJy0tjW20YG5Ub7l66xCO1U5OTiYSiQPQyQyJ/Hrz5s2LFy+w3TYMBuPDhw9qampYAl4+GNAFYJpjamo6YD0rWDTkETExMV6S6ejoHDhw4OzZszY2NjyWPGbMGBaLWSBlaDQahULhqAWfN2/e3Llz2eP3fKO//yhpaWmYKHz8+LGiouK0adPApZyc3KdPnwoKCnJzcwkEQlNTk6ysLMufQU9Pj7Cw8OgVXnQ6PTY2FqiNmMPDyYoVKwoLC5lXhFgSyMrKAne+YAJVVlbm7+9fXl5+/Phx9tIaGhrAhhAWScojDg4OBgYGJSUlNTU1v/zyC48P5Ui/cnGsNpVKLS4uZo6k0WhCQkICAgIjIL+Ki4vnzp2Lve6JiYlqamqTJ0/GEvDywQAUFBSuXr1qY2OjpKTE8WPuk4nfGNwGfv36NT4+PiYmhuNIhyNycnIshuyAZ8+eiYqKamlpAdW4mJgY5oEa2LgPSoXfvXtXXl6+aNEicJmWlqanpwdsuIBWrqSk5MiRI1Qq9ejRo21tbSEhISyrJY2NjSyq7tHFvXv36HT6r7/+yhIeTiQlJVlmGCx8+vTp999/B+fCuLm5FRcXS0hIsKx0Y4ptIpH48ePH5ORk4M0cRdGYmBiQnRdWrlyppKRkb2+/c+dOEMPloVzoby6O1VZWVv7w4cOtW7fAoAdFUU9PT3d39z7ll8AQrdN1dXWBcH19vbe395UrV1iGAyUlJXv27KFSqXv27CGTySEhIb0d3iMgIOD7DT75Emg0mpmZmZOTU78+gBkzZrx79w67TE9PX7JkCdiJqa6uLiAggKJoQkICGPDTaLSAgIBt27aVlZUNSp3T0tJERUXB6LK+vj4mJmb+/PkUCgXsDaiqqurq6rKzs3v48CEej09OTra3t2fZg1JWVjZz5sxRPXk0MzMDp3Myh9kBahrsRaXRaJjhDnirwTgabAJramrCpuTY1wvSg2kUSzJ2sFsUCkVCQgL8i9fU1AAFU3V1NVhArKysrKmpkZWVraioqK6uLiwsNDExmTx58oYNG7y8vOLi4mxsbMC/aUtLC7bBAxQOpnXsn5W9vX1xcTHm97y3h4KGMxfC3KjecvXWIb/99ht7tc3NzQkEgq2tra+v78OHD52cnObPn4/NrEdg/TE0NNTDwyMsLOzw4cMfP35kufvx40cREZHm5uagoKBDhw7Jy8tv2rSJu4bexsampaWFH9Yft27d6uzsPIC8+vr6YIUFqMMtLS39/f2zsrLIZHJoaGhgYGBlZSW4m52dXVNTs2jRoqKiokGp9vr16xcvXkwmk8O+kZCQ4OLicu7cOdDnCQkJxsbGWGKggmHZrL5169aXL1+OUs19a2uruLj4vXv3WMLspKenz5gxA0GQc+fONTc3A43M1KlTExMTy8rK1qxZgyAImUyOjY1dtWoVUN9kZmbevXtXRkZm3Lhxt2/fLisrA+PcAwcO3L17lzkZ++Nu3rxpYGBgY2Pj7u5+6NCh1tZWbG3a0NAQj8ebm5tXV1fPmTNHXV29tLT0yZMn8vLyM2fOjImJAap3fX19KSkpTU3NlJQUoLAfM2aMnJxcQkLCmzdvli9fjiCIvb09EMrsFgLHjx9nXhDn+NDQ0FDwt3rs2LGqqqr09HTmRnHMFRISwrFDgLUze7XBVEBHR0dCQkJDQ+PGjRt8bX/PywfDnxw7dszY2BhbiOkXAQEBFy5c4DFxcXGxtrb2oNSZTqfj8Xgu0sfFxeXy5cvMMlpRUZFOpzNbFejo6PDPEjDkJ4SP7O8zMzPNzMywSxUVFUVFRf4//DUmJubGjRtRUVED83hlaWmZmJjIo2YhODjY3t7+9u3bAzD0Z+Hp06d4PF5dXZ3j3Z6enuTk5E+fPr1+/RpMXb29vc+dOyck9P/pRq9du+bk5DRYyjgIZLAYAfnFywfDh1AolIMHD965c0dSUpL3XH///TemTJGUlNyzZw+PG26mTJnS2NgoKCjYpwqzT4qKiiwsLHqTPgUFBS4uLs3NzVZWVps2bTp58uSGDRuMjY2xBFVVVbm5uZs2bYJfC4TfGAGRAT6Y0tJSKysrFRUVVVVVa2trRUVFfu6m9+/fb9q0KS4ujnkVtU+CgoIkJCSYBYeBgUFLS0tRURFYcOTCvn37BqvyO3bs4HJ3/je4qEejo6N9fX3h4AsC5VffH8xQgKLopUuX3r17B0xGP3/+TKPRMjMzL126FBUVhcPhHj9+bGBg0NvaM5VKNTEx8fPz493XVW1t7eHDhy9fvlxRUcFyi4sTaj4Eh8O5uLjA7wQC5deI8fTpU01NTQaD4ezsnJSUBOweTp069ccff/j6+goLC0+dOvWPP/7gKL/odPpvv/2mr6+vpaX14cMHjuXTaLSmpqbGxkYqlVpaWvr48eP09PSenp41a9bwv1IPAoHyi68RFxfX0NAICgrasmXL7NmzgUVMRUXFrl27gJFtUVFRb4aFTk5OGd/w9/fv73PBtgwIBALl18CZNWsWMO25evUqiKFQKNOmTcO2Ft27d2/dunUcJ54kEgkYCg8AsP0TAoFA+fVd/Pfff1QqdfHixeAyIyMD28xcXV2dlZUVGRn5+vXrGTNmMNtG4HA4YLIIgUD4kJ/l/G2w4w/bkpmSkoLJr7S0tGXLlhEIhLt378KjDCEQKL/4juLiYsyDBZ1Op1KpmDOfpUuXTp06NSIiwtTUFL4QEAicP/IdzF60xowZw7ySqKSkxPGURggEAsdfPwLPnz/nbgXKDIPBuHXrFuw0CASOv/iC3lwJshMfH19RUeHl5QVnoxAIHH/xBTgcjsd9iOvWrRulh6RBID+C/GJxXPeTM+iuBCEQCJw/DhPPnj0jkUgREREdHR0IgjQ3N586dYqjTxvssDgIBALlF1+gq6tbUlKCoihwzovH40+cOAG7BQKB8mt0gLkSNDExaW1tPXr0KMcptr29PXA9DIFAoPziF4ArwfHjxwsICODxeG9vby6J73/j8+fP3t7eK1eu5N3lDgQC6S84lqGEj4/PuHHj7OzsYNdAIBD+Yfny5ZmZmSyR0H4CAoGMVqD8gkAgUH5BIBAIlF8QCAQC5RcEAoHyCwKBQKD8gkAgECi/IBAIlF8QCAQC5RcEAoEMJz/j/sfOzs4rV66Iiorev3/f2dk5Ozv706dPcnJy+/btG/RnNTY2zpkzp7S0FBx9FBgY+P79e7CDkk6nBwQECAsL29raxsTEPHz48MuXL76+vr2dpAuBQOD4C7l27dqWLVusrKzGjx/v4OCwe/fupqam7u5ucLdfru775P79+3g8Hju3LTY2Vk1NDYS/fv2alZVVW1tLJpONjIwuXbrU0dERFxcHX0oIBI6/emXBggXi4uIIguTn55ubm+NwOObTiXpzdf/vv/9mZWVxLBCHwx0+fBg4CGPh3r172EGTHR0dOTk5oaGh4FJOTu7Tp08FBQW5ubkEAqGpqUlWVtbS0hK+lBAIlF+9AgQNjUajUCg+Pj7swgiHw7Hn2vONfj0IRdG0tDRMOD5+/FhRUXHatGnY3ZKSkiNHjlCp1KNHj7a1tYWEhEhISMCXEgKB88c+ePbsmaioqJaWFoIgVVVVDQ0Ng+7q/t27d+Xl5YsWLQKX4AxwFEXz8/PBQ7u6uuzs7B4+fIjH45OTk+3t7eHhAxAIn8ovFEWfPn1Kp9NHsMHp6elLlixBUTQqKkpdXV1AQABF0YSEBBkZGeDqvri4GLi6/37S0tJERUXFxMQQBKmvr4+JiZk/fz6FQgFCqqioyMDAQEpKyt7e/tixY5mZmZGRkR8/foQvJeTHgOMxEaNVfr18+fLXX391dXXV1tZOTEwcqYEGDoebPn362bNnN2zYoK2tffny5aCgoLVr1+JwOF1d3a9fv2Ku7r+fe/fuzZkz588//wwPD09KSjp37lxZWVlBQQHQr2VmZpqZmWGJVVRUFBUVJ02aBN97CHcqKioEBATKy8s5Xo44WVlZO3fuVFNTi4yMBDFRUVEKCgqlpaWD/qzh0H/V1dWdPn26p6fn3LlzSkpK9fX1gYGB0dHRrq6us2bNGubOXfUNEF66dCnLXWZX9zwe+Ngb3d3dGRkZjx8/1tDQwCKNjY1BoKenJzk5WVpa+vXr10QikUaj+fn5nTt3TkgIevSG9EFUVNTixYsxRSrL5YizbNmy8vLygIAALEZRUVFXV1daWnr0ya+enp7r168fPHhQVlYWxMjKyh4+fLizszMqKkpaWnry5Mn882Ywu7r/zqKePn2Kx+PV1dU53i0oKHBxcSktLbWyslJRUVFVVbW2tlZUVIQfJ6RPIiMj7e3te7vkB1hmMDrfGIoHDbn8EhQU3LVrF3u8qKiojY0N7+WEh4ebmZlJSkoOaW0H0YS1qKjIwsKC41ImgiDzvwE/RcgA9DAlJSXm5uYcL/kE9tceRdHu7u4xY8YM7oNGx/ojg8G4ffv26LIt2LFjB7txBgTy/YMvfX19OTk5jpeABw8eWFpa2traJiUlqampycvLJyQkgFvt7e1ubm5eXl47duwwMTEBS+0FBQVkMtnAwCAnJ4dIJJqamlIoFAcHB11d3bdv35qamo4fP97a2prBYPj6+i5YsGD8+PFRUVGgQAqFsm3bNi8vL2Nj47CwMPYKl5WVubu7E4nEpKQkBEEaGhqmTZu2f/9+Nzc3KysrHA4HTgtqaGhwdXV1dHTU1tZ2dXWl0Wg8dQf6f/nnn3+Cg4PRwaajoyMoKCg8PNzKyiovL8/f39/V1fXMmTM8Zg8MDLx27RoKgfzc9PT0KCoqhoWFcbzEaGlpmTNnzvTp069cuVJfX29oaDh9+nQURRkMhqGhIfaBe3p6jh07tqampqSkRF1dfcKECadOnQoPD/fw8GhvbzczM5OQkAgNDe3s7MzJyUEQZP369YWFhQwGw8HBQU5OrvsbBALh4sWLKIomJycLCgo2NDSgKFpQUIAgSEREBIqiDQ0N4I/81q1bKIp++vTp8uXLoAK//fabjIzM58+fGQyGqakpyPv8+XMEQU6fPs3SqGXLlrF3yDDJr+Dg4La2NhRF9+/fP3/+fAaD4ejo6OXlBe4+e/bM0dGRY0YGg3H16lUHBwcGgwFfX8hPzqNHj0RERJqamjheMkMikQwNDUH43LlzYNsv2EDS3NwM4puamsTFxclkMoqiFhYWCgoK3d3dWAkHDx4kEAgg3N3dLS4uvn//fnAZERGBIEhNTQ2DwXB3d3/37h2Konl5eQiCFBYWssgvFEUzMjIw+dXd3U2j0VAUTUxMRBAkNDQURdHU1NTp06c7/z90dHR27tzJi/waptWugW3ZQVHU1NQ0JSXFyMiozxn+unXrOG6+ycjIuHPnziidLOjp6a1fvx5OmiDYbHHt2rV4PJ7jZW8aqLFjxwJ/AXl5eTgcTlRUFMTj8XgikQisqXE4nLS0tKCgIMcSBAUFpaSksMtx48YB3wTy8vIeHh5lZWUnT55sa2sDT+FeGcFvtLW17dy5U09Pz9raGkxgiUSin58f3+nvmdcj+rtlB4fDBQQEuLi4mJqaDlhDSSQSR+/7OnHiRPjRQgB0Oj02NvbixYscL3lUq4MZHLbSLSsr+5069dOnT5eVlfn7+5eXlx8/fpzHXJ6enjU1NampqeDDp1KpxcXFKIpicoBGowkJCfVpBjCs1kYsW3bExMSkpKSCg4Pz8/NdXV1VVFTYsygoKFy9etXGxkZJSYnjGI0XEQClAOQH4N69e3Q6/ddff+V4yeM0CEGQhw8fbt26FcTU1tZu2LBhwFXKzc11c3MrLi6WkJDg3dr+xYsXPj4+hw8fVlVVRRCktLR0+vTpHz58uHXrFolEAhMvT09Pd3d3vpBf6enpHh4ejx49Ytmy4+jomJOTQyKRIiIiuGzZERAQ8PX1dXZ2BrPuYVjrZJ+u9rlboDc7ie+ESCQuXrwYfroQMFs0MzPDZn8slyx0dHRgAqW1tRWM13R0dNatW3f+/PmNGzcKCwu/evWqqalp9+7dCIJ0dXWBCSBGZ2cn9tqjKEqj0UA5IDEw7aypqQGKrbFjxwLXT5WVlZMmTQJFgWRgeIUgSFNTE/i+HB0dlZWV3dzcwN3g4OBDhw4dPHjQ1ta2oqJizpw5MTExhoaGvAwMh0N+MW/ZERERuXz5cldXF7Zlp6SkpM8tOzIyMmPGjGltbR1q+y8EQRITE+Xl5UGPjziDtRMTMtppa2u7/Q2OlyykpqY+efKEwWDEx8erq6uHh4cjCHLq1Kljx45FR0cfOnTIyspKU1Pzy5cvFApFUlLy8uXLWVlZX79+PX78uLW19aRJk+7fv5+QkPD169fz589bWVmFhoZSqdSUlJSkpCRVVdWQkBCget+zZ4+hoaGXl1dRUZGfn19iYuLhw4c9PDwuX76MIEhYWNiUKVMEBQXPnz+PIEhISMi0adPevn1LoVDWrFlz9OjRnp6ejx8/5ufne3t7p6en79y58/Dhw9OmTTty5AiPal8cy8jCx8dn3LhxwCJjeCCTyRoaGjIyMt+/Zef7QVF0/fr10dHRYNN1b7x//z4lJaWxsXHixIlbt25l1npCIJChYPny5ZmZmSOp/+LIIG7Z+X7u37+vq6vLXXi1tLRcu3btr7/+QhBk+/bt79+/P3bsGHy9IJDhZ+RFxr59+8hk8tq1a/mhOwIDAx0dHbmneffu3fXr18Ga6dq1a0evcQYEAuXXjwOFQtHQ0OBoTcOMlpZWamoqCGdlZc2bNw92HQQC5dcI4+/vz4uHaBwOBwwyCgsLX758ybvR3enTp2EnQyCDCPQ29f+nqKho8uTJBAKBx/QvX768cOHCrVu3sLOF+uTZs2ewnyEQOP4afM6cOUMmk3kXdqmpqQEBAeLi4tDJBAQC5ddI8t9//0lKSiooKDBHfv78edu2bTNmzMDhcEJCQrKysnJyco8fP66qqlq1apWHhwc42PH9+/eDW5ny8vJt27YdOnToyJEj9fX18NeBQOD8Eenp6Tly5AiRSNy4cSPLLR8fH1dXV+aYt2/fmpubnzx58sKFC2/evFmyZElpaSk2uxw6sZKfn79y5crk5OQh8lcJgUD59f9j79zDatrexT9Wl69FSiq6qOhCReq4LmJTylN2bF1dSleUrSSxZXOIqKhvkm4i6UKFnVBCItpC0kUUWSgqlYqSbmu11vw9vz2eM8886zJbZeXrMj5/9Mw5GnPMMd4x5rvG+4453/H90dvbGxUV9fz585ycHA799fbtWyaTqa6ujqdgGObu7u7r62tmZgYA0NXVVVVVLS0tNTExEfyObDbbzc0N/+QCAPDgwYNVq1bhp0pKSocPH+a4as2aNdbW1kh5IRBIf/0vw4YN27Jly5MnT/T19YuKiuCHrPjka+vWrcTMz58/Ly8vh5+SQk1UU1OD7/sPPyWLjo7+17/+5erqevbs2du3bzc3N4eFhWlqav6vZS4iEhcXRyzWxsYmLS2NpJL19fXPnj2j0WgHDx4EAJiamk6bNg2NUQSCHz+R/4tCoejp6RkaGh49ehRPbGpqamlp4dhlo6KiYvr06fjCYlZWlra2NnGfkQ8fPuTn5zc2Nvr4+Jibm584caK7uxt+v/qFk0T4ndqOf0DKC4FA+uv/4OXlde7cuaamJnh65MgRb29vjjxTp07FP51vaWkJCQlJSkoiZpCXl3/37l1ycvLevXtlZWU/f/4sJyfHM3rigFBTU5s+fXpzczMalwgEsh95sGzZMgUFhePHj+/evfvjx490Op1oS0K0tLScnJz27ds3YcKEV69epaSkqKioEDNgGFZZWblv376PHz/u37+/s7Pz5MmTX769CIVCycjIOHbsmJ6enri4OAwQjsYoAoH01/80WEzM09MzLCxsx44dUVFRPPd2AwCQ7+1WV1fX29u7du3a1NRUKSmp8+fP9/T0nD59mjwK2JgxY/qtnqqqamBgIBqXCASyH3mzdu3atra2pKSkoqKihQsXDqKEx48fL168WFJS0s3N7cCBA3fu3ElJSXn79i35VcSQ/wgEAs2/BoOMjIyDg4OHh0d6evrg4qbeuXPH2toaP504ceL48ePHjRuHxhMCgeZfQ86mTZv09PQGFDsch8ViZWdnv3v37tmzZ3CjgZCQkMjISDEx9DEpAoHmX0OPrq7uw4cPB3dtaWnptm3bqqqqHB0dJ06cqKWl5ezsjG/ogkAgkP76dpn5D0gOCASyHxEIBALpLwQCgfQXAoFAIP2FQCAQSH8hEAgE0l8IBALprx8KDMMePnzIZDKRKITFx48fB9oFbW1tSG6Dg81mf7UnZYgyI/01SJ48efLrr7/6+vrSaLSsrKyvIHShk5mZaWZmRqFQJk2atGrVKnNz81mzZoWHh/f19cEMaWlpSkpKFAoF37kSAFBeXm5hYUGhUJydnWtra4U4xPfv3//p06cBXUWhUBISEl6+fPmDja43b96IiIhUV1dzHAuF/Px8Dw8PbW3tlJSUoW7Is2fPtmzZoqamRgwpzI8LFy5YWVlt3ryZ+1+pqalKSkpVVVV4SldX1759+/z8/DQ0NAb8jTD2f/n3v/8dFxeH/Rw0NjZu2bLFy8vr5cuXGIY1Nzf7+/vb29uXl5cP0R3ZbPbp06ejo6MPHz7s7e3d1dUlrJLh5wSBgYHw9NatW2JiYlu3bsUzwH3CFRUVW1pa8MSOjg5FRUUWiyXENh45cuTatWuDuLC3t9fZ2bmzs/NHGmNBQUEGBgbcx8IiISEBAJCcnDzUDWGxWH5+fgCAjo6OfjO3t7erqqquXbuW+18FBQU2NjZNTU14ip2dXUxMDIZhkZGRoaGh/MpcsGABd+LPq7/6+voiIiKam5s50ru7u+Pj42trawUv6uDBgwLmTE1Nra+vh8fBwcFHjx4VVnOePHkCAAgKCsJTFi1aJC8vj5/m5eXRaDQAgLW1NZvNxtONjIyEKNXa2lobG5tBX37jxo3du3f/SMNs6tSpERER3MfCoqSk5OvoLwzDkpOTBdRfGIbRaDSe+ouDzs5OMTGxzMxMeMpkMrdv3y64/vpu7MfExERBJq6CIyoq6unpKScnx5FOpVJdXFyI0aL7RcCNaYuKisaPH4/v0vbkyZPhw4cPncRERUU5UmxsbJycnNLT0xMTE/HEESNGCPGmcXFxrq6ug77c2Nj477//ZrFYP4xrorKy0tbWluNYiAwugMp/3N2M+5qbmppwL0dvb6+Li8ulS5cEL0rse3FPXrx40dHR8bsezSUlJe7u7vC4oKAAAMDdor6+Pjc3t66uLn6FKCgoHDlypN97ZWdn5+fnBwUFcaRHREQUFBR4eXktXLhQTU1N6G3Mysri2IluoE/jlClT/v77b0NDwx9Af6WkpJiYmMjLy3McE8nIyMjLy5swYUJUVNSePXucnJy6u7tDQkLYbDadTu/u7j527NjYsWMBAPfv3z958uSkSZPy8/NtbGycnZ2579ja2hocHNze3l5aWmpoaLh//36O/eF5FpKXlxcfHy8uLm5tbb1169a2trYTJ04sW7YMhlfZvXs3lUplMpmvXr3i2UzuJuD/io2NPXLkyIcPH6KiomxsbOh0+unTp8+ePXvw4EELC4uUlJTs7GyYLTc3V1pa+u7du21tbd7e3pMnT3Zzc/uG/F/nz59PSEjYtGkT7uzQ0tIi+mJIiImJOXPmzODu++LFCzMzM0lJSWNj4/b2dpiYmJjIZrO7u7tjY2MTExMdHR2Li4vDw8N9fX0PHz480FtYW1v3m6ejoyMhIaG6unr16tWhoaHLli3r7u4Wonih/ailpWVkZKSioiIqKhoZGUl0bOXl5YWEhGAYVlRUJCYmNm/evL6+PgzDzM3NhVWHN2/eTJ8+ncMYH6iEk5OTfXx8fgDLkcVijR8/PiEhgeOYQz4SEhJwWN66dSs2NhbDsNWrV0N7qru7W1ZWdv369dDdISsre/z4cQzDsrOzRUVFW1tbMQwrLS3F7Uc2m21paQnTHz16BAA4dOgQh8+EZyEdHR3Tpk1TV1dPSkpqaWkxNTVVV1eHl9jb20dGRsJjDw8PbvuRZxOg/ailpZWVlcVkMm1tbRUUFNhsdmtrK9yvPiMjgzhu8VNzc3MtLa0vsh+HYkZaUVHR19c3f/58KGgAQGFh4efPn2VkZPpVr2fOnCkrK+PedFYQ6uvrvby81q9ff+vWrcWLF2/fvh3+Ro0cOZJCoZw5c2bNmjWOjo5jxoxxd3fftGlTe3s7PpsVLnfu3DE0NJwwYUJKSoqPj8+qVasOHTok9Ls4OzvfunXrxYsXkZGRmzdvhq4ojjwzZ84MCAgoKCgQegXevHnDYXcPQsKKioo1NTU/wOTr3r17jY2NlpaWHMdEenp6urq6QkNDWSyWoaHh4sWL7969+/DhQ3Nzc+jKyMjI2LRpE9yOz9PT09jYGG4fw2KxuJeMb9y48fjxY39/f29v7+TkZAMDA46YwPwKGTlypJqa2sSJEx0cHGRlZZctW/b69eve3t7Hjx+npaXhsdSh/7TfJuD/mjdvnrm5uZiY2NKlSxsbG9vb22VkZIS7q9ZXsh9ZLNaKFStCQkJMTU2hfrx586aJiQk8fvTo0cmTJ7mXTjEMs7S0vHr1qrm5eb+Og+XLl3PvAJT2D6NGjYLP7YEDB3p6ejIyMqDhNmvWLOj9KSkpsbW1pVAoHHXgWbHBbUxbV1cHByWeJyYmBi7ocNiPrq6u5PZjZGQkuSioVOqGDRsqKysjIiIuX768fPlyjgzbtm27ceOGn5+fqanp4Dq0urr6zp07I0aMKC8v9/X1lZSUhL4MKGoccgnzFK+srCy+O9T3bjwuW7ZMSkqK45iItLR0UFDQn3/+efny5aioKAMDg/T09HHjxuFziF9++QWfVezdu5dOpwcFBXV2dsJNSDlKKy0t1dHRIXEvkBRCnLXATmQymXfv3pWTk8M9pDxnNtxN4J4JwWlKT0+P0KdHX0l/6enpAQCSkpLw3/ybN2/izqAZM2ZMnz6dp7ijo6O3bdtmaWk5OMcnx8a01tbW165dAwBApwCsFYPBuH//PpzWcsCzYoPYmJa7216+fAmdGpz9ISbGsVfboJk9ezYc09z6S0REJCkpSU9Pz97eXlVVdaAlt7e3X7hwAcq2oqLi4cOH8CddXFycw/VOLmGe4mWxWBwum+8RJpN57ty548ePcxxz4+vrO2PGDDc3twULFsTFxbW2ttbU1GAYxv2cHzp0iE6nh4eHV1dXBwQEcBf18ePHiooK4rUMBkNMTExERETwQog0NTVBw5Bc6XA0gadjboj4euuP79+/r6qqgtPLz58/FxYWwkEPn22iiIkoKSmdPn06KysLrhN/IZqamiEhIcTQ9XBZkEql6uvrw1lSa2srUenwq9iAaGho4CgnNTXVwsJiSAX+7t07AAC+81tfXx9RuSgqKp46daqqqmqgL5rC2QScwHZ0dDx58mT+/PkwXV5enuc79PwkzFO8bW1t3E7u746cnBwmkwkDlBOPOWhpaSkpKTExMamsrFy+fLmfn5+Ojs7bt2+hVxuaIPCnsbCwcMeOHT4+PhISEvzettfU1KypqcnIyMCv9ff3J3a6IIUQUVdX7+npefDgAYdVRN6ELxQdg8H4FvVXR0fH6NGjhw0bBp/eSZMmKSoqMhiM6OjodevW0el0vlUUEQn7hy+vQ1dX17Rp00aPHg0AyM3NnT9/Pnwna8qUKSIiIhiGZWZmwrmuIBUTnPz8fKLT58iRI8rKymvWrOGZubu7+zN/4CSc51X4FB0AUFtbe/ToUSUlJXze2tzc3NLSQrxk6dKl0LeCj0s/Pz9lZeWpU6dCD1RKSgq/b6pev369YcOGP/74Y/369bBPAQCTJk0iLlGRSJhEvHQ6ffLkyT+A8WhtbU2lUjmOuZ1HJ06cgCa/nZ2dnJycjY2NsrLyypUrg4OD//rrLxcXFziHbWhogH7u+vp6uNN7bW1tQ0MDNAPhdsu2traysrKurq5hYWG3b9/euHHjzJkzxcXFib+jPAuB4wfXaNA3wmQyrays5OTkPD096+vrmUwm/Hjj6tWr+O7OPJuAP+/4T2N7ezsAAFYVflgGU/Ca4zpLTk7uzZs39fX1ZWVlAiky7vXHkydPDtGr55s3bw4NDU1OTqbRaJs3b8Yw7O7duw0NDXPmzHn8+DH55S4uLgK+OEdCYWHhlStX4HFubq6Dg0N4eHh+fr6Pj098fHxMTAz+2qrgFRNk/fHEiRMlJSUnT548depUYGBgcnIy8Q1SIkwm083NzYk/xFfqia99Ll26FDqPrKysjI2NtbS0HBwc3r59iy/+6ujojBkzJjo6mmPxCC5vYRiWkpKSnp7OYDCqqqq2bNnS2dl56dIl7nu1tbXhb0symcx58+YR/2tiYgKXtMglTCJeJyenJ0+efNcrj58/fx4xYkROTg7HMc/XfSkUyrp168LCwpydnZ8+fYphGJ1ONzExkZSU1NPTu3r1Kt5TpqamUlJStra29fX106ZNmzJlyvnz5+G0bv78+devX4fLkQYGBhISErq6uunp6dzLndyFVFVVXbt2TUZGRlpa+uLFi3Q6fc6cOQCA7du3MxiM0tLSuXPnSklJmZmZhYSEmJmZpaWlMRgM8iYkJiaKi4vLy8tnZmY+f/4c7lLo5uZ29epVaHX98ssvd+7cKSoqsre3h+/9wSXIBw8eKCgoTJ48+ezZs4N8/36I9BdU8PBg3Lhx+BitqKig0WhfZ2CFhITgT1e/CF6xDRs2kCtufFH5W4bJZOLHKSkpiYmJxGFK/A7pzZs38Li5ufm//uu/iP+Njo4+duzYoMXb1dVlYGDAT7kjfmb+k+/ft7W1jR07Nj8/HwAQEBDg4uKiq6sL/xUXF+fm5nbx4sWv8A39u3fv+n1jA0fwipF/dPrs2TNtbe1v3+Qh7v+mqqpaXV2Nmx61tbUfPnyAx3l5ebiNEBERwbGBuYODQ1ZWliBdyVO8Z86c2bhx4/f4TjniP8JX0l/Dhw/fsmVLXV1dQEDAxIkT/f398X+pqKi0tbWJiooKxVNOApvNVlBQEDy/sCpWXFzM88WZb5nm5uaVK1fip2FhYbinTFVVNSEhIS0tLSIiQltbe+3atcQLR44c6eXlxXMxt1/x1tXVFRYW2tnZoccSIShfzX5EfEcEBgZyGHFpaWkwYsfFixf7vTw9Pb2srGyg7tGQkJAvd3Eifir7Ee3/iODBiBEjiEZccXExNIHv3btnZGTU7+VWVlYDvSOFQtm2bRuSPGJgTg8kAgQ3HJHnZsyYAQ+4v4BBIH58/xcCgUAg/YVAIBBIfyEQCKS/EAgEAukvBAKBQPoLgUAg/YVAIBBIfyEQCATSXwgEAoH0FwKBQPoLgUAgkP5CIBAIpL8QCATSXwgEAoH0FwKBQCD9hUAgEEh/IRAIpL8QCAQC6S8EAoFA+guBQCD9hUAgEN8knPsPDR8+vLy8PD4+/tupIoZhqJ84QDtUI342RowYweNB4NAODQ0NKSkpSFgIBOKbQlFRkXtvdgqa3SAQiO8U5P/6/vhOf3LYbDbqu5+q477CQEX6S8hkZmaamZlRKJRJkyatWrXK3Nx81qxZ4eHhfX19X174hQsXrKysODbH5klaWpqSkhKFQrl+/TqeWF5ebmFhQaFQnJ2da2trv45A8vPzPTw8tLW1cb9EamqqkpJSVVUVyVWC5BGQNWvWiIqKzpw509jYmEqlSklJLVq0aNq0aRQKZdu2bUPX8K6urn379vn5+WloaMTExHx3I5mj4wbUI8+ePduyZYuamtrnz5+H/MdccHp6evT19RkMBkd6YmKimJgY3GgeEyqRkZHy8vIHDhzAU54/f25paYlhWEJCAmxCTEwMJmxYLFZOTo6FhcX27dsHeu3Dhw8BAIGBgfD01q1bYmJiW7du/fJatbe3q6qqrl27VpDMly9fhl6DlpYWPLGjo0NRUZHFYmFfEdhTycnJ8LSgoMDGxqapqYnkEkHyCIiNjU1+fj48Hj9+PI1Gg8cZGRmenp5D12o7Ozs4MiMjI0NDQ7HvEGLHDahHWCyWn58fAKCjo2NIazgw/XXmzBn42879r9mzZw+F/jIwMAAAaGlpwdNPnz5NnjzZ2NgYniorKw+F/oqLi/v111+hchyE3nny5AkAICgoCE9ZtGiRvLx8vxcymcx+1SWNRhNQf+Xl5dFoNACAtbU1m83G042MjL7yY1BSUkLUX1+ZnTt34sdE/cVms3ft2jVEN+3s7BQTE8vMzMS+Z76k45KTk0n0lyBDXRAGZj8eO3YMABAREcH9L3Fx8aGYHsbExGzfvj0xMREA8OnTJzMzs8rKSvy/oqKiQ3FTe3v7y5cvU6lUYRUoSD17e3tdXFwuXbokxIbY2Ng4OTmlp6dDAUJ4rkMPKdxve8AR3K9l0G8eQdizZw+/Wu3evXuImtzU1CQUj8F/Fo6OE1aPCHGoD0B/VVRUtLe3AwAKCgpKS0vJM9+5c2fRokWrV682NjamUChUKhXOlXp6enx9fW1sbH7//ffffvuNTqfD/NeuXZORkaFQKDY2NikpKRoaGnPmzImKiqLRaMHBwU5OTgCA+Pj4Z8+eAQCKi4sNDQ2vXLkCr2UwGBs3bpSTk9PV1X306BFMDAoKEhERoVAou3btCgoK0tDQmDBhwokTJ7q6upycnGRkZKZOnQoNPW6oVKqoqOioUaM40ul0+tSpU+fOnfvhwwfB5ZadnZ2fn+/r64un3L9/f926dcHBwUuXLsWt4CtXrty9e7epqcnb2/v48ePd3d3+/v579+61t7e3srJ6//49sczY2FgdHR15efm//vqL/O4RERGamppeXl7V1dU8fTQ7duwIDg4mdkdpaamPj8/ixYvv3buno6NjaWl5//59d3f3efPmvXz50tLScsyYMc7Ozmw2OywsbNasWWPGjElNTSVpGocM/fz8dHR0rly50traqqamtnXr1h07djg6OlIolLVr13LkycvLc3BwcHV1vXLlira2toKCQmZmJl5aX1/f3r179+/fP336dAqFoqenx+3SGjZsGD/hDBs2jGdjuZtAUo2MjAwvL6/Dhw9raGjA34mUlBSoGWNjY729vXNzc3nKmVvUCxcuFFDO5HA34d69e+vWrdu4ceOxY8cmTpyooqISFRVFkk7SazCltbXV19d3w4YNNBrN19eXwWDAJ9HX19fPz2/nzp3EbuKAY6jzG4dC9n95e3u/evUKPtUuLi4c/503bx5uP9LpdDho2tvbm5qaAAASEhL19fVsNnvJkiUAgKqqKjabPXXqVCkpqdraWliCvb09AEBJScnT01NeXh4A8PDhQ0NDQ6L9aGpqCgDA7cfx48cDAGbOnHnz5k0LCwsAwJw5c/AqTZ48GQCgq6t75syZAwcOwHnQunXrrl27Bs1DfX19kvbCOhDtx8OHD0OhnT9/vl/7UUtLy8jISEVFRVRUNDIyEnc59fX1ycrKHj9+HMOw7OxsUVHR1tZW+C9zc3O8patXr4bWR3d3t6ys7Pr163H7UUtLKysri8lk2traKigoEG1DDvsxJCQEw7CioiIxMbF58+b19fXBu+AGlKmpaVxcHDz19/cfNWpUQ0NDZWXllClTxo4de/DgwcTExL1793Z1dVlbW0tISMTHx/f09Ny7dw8AYGFhUVZWxmaz3d3d5eXl+/6BZ9Pgrx00Q1pbW0NDQ+Fj/+7du1OnTuFeKhkZmffv33Pk6ejomDZtmrq6elJSUktLi6mpqbq6Ot7GsLAwMzMzDMM+fPggLi7erz+LaD9COBq7Z88enk3gV43u7m4JCYn29nbo6IyNjSWOgYyMDBI5c999165dgsiZvI08e6GyslJZWVlJSSktLe3Vq1dwQpCbm8svHRaFdxyxR2CLLC0toWTgjOHQoUPwEY6MjITXenh4kNiPxKFOIh+h+b86OjqgzoKLX8OGDWtubuanv86ePQsAGDVqFKwc1GU5OTnXrl2Dz39PTw+GYcuWLQMAuLm5wRLgb6+amhqGYVVVVefOnWOxWFBhkeuvqKgo3N6WkpLCqzRjxgwAwO+//45hGL7cduXKFQzDTpw48f8nnyIiA9Jf9fX1v/32m729/efPnwX0f3V3d8fExIiKilpaWkJFw2az/fz8Xr16hWFYcXExAKCsrIyjU//++28NDQ1cMeXn55eXl+P6y9XVFV82AQB8/PiRXH9hGHbo0CEAQEBAAFF/5efnQ6scXxwYMWKEj48PhmErVqxQUlIiPio7d+6UlZXFn5ARI0bgkoGSb2ho4Nc0ov6Czzl8Evr6+uBaUFZWFpxf47fD82AYZmVlZWpqiq/n4OMHukc3b96MS2bJkiUD1V8cjSXpHZ7V+PjxI4VC2bNnT19fH5vNfv36Nbf+IpEzt6gFkTN5G/k1YcGCBatWrYJ5urq6Ro8ebWdnR5LO0XHEHrl+/bq6uvrm/8HAwMDDw6OsrExUVLSzsxNem5SUJKD+IpcPOWICTtNSU1NXrFgBANi4cWN4eHhvb29cXNyOHTt4ZjY2NlZWVq6rq2tqaurq6urt7R07dix8jQAAMHr0aKjRlJSUAAAPHjwgXquurg4AmPQPgtrAIiLQ6AMAsFgskv9ynA70xRYlJaWBGu1UKnXDhg2VlZURERGXL19evnw5hULZu3cvnU4PCgrq7OwEAHD7FB48eDBu3Djc+/DLL7/w9ErIyMjAp6jfamzbtu3GjRt+fn7wBwBSXFwMTXt4KiUlpaOjA122FApl9OjRRLcd0RUiKioqKSmJn0pLSwMA2traFBQU+m0asSjRf+js7PTw8DA0NHR2duZ5O+IxnP4zmUw4hKhU6tu3b+G/9PX1YecOwsuDN5akd3hWQ1paOigo6M8//7x8+XJUVBRcbuKARM7cohZQzuTN4dkECoWC32X48OE0Gg26FPilk/jCSktLdXR0jhw5QswQFRUlJyeHe1cF/8SNXD5C8H9B3X/y5MlVq1bt2bNnzJgxAIDo6Gh+HkpZWdmnT58aGRl5eHgEBgb+/vvv9+/fl5aWhsNr+PDhMBt85eJnAC7O4k7DQ//g5eW1atUqnvlbW1tramqE+PqfiIhIUlKStLS0vb09dFXAEYZh2Lt37/BscnJyI0eO/JIb9ds0bvz9/RsaGo4dOzaIjzodHR1zcnLevn2LYdiLFy8EeTNO6E3w9fXNyclpb29fsGABT5ffUMj5y5sgKSkJJxACphP5+PFjRUUFcXwyGIzGxkZoRw/i92PQ8hFIfz18+JBGo50/fz7tH/z9/QEAtbW1Fy5c4Pfmno2NDQDg1KlTx48fj46OhrOqWbNmAQAaGxuh4qurq8Of7UG818s91RpqGhsbraysHB0du7q6BnQh7BsVFRUAQGFh4Y4dO3x8fCQkJLgngFC56OjovH37Njs7G29vWlraQGvb19dHFJGiouKpU6eqqqo+ffoEU2B33L59m9hAnjMIASFvGk/Ky8tDQ0N37typpaUFHaMD6lYnJ6f169cfOHAgPj4+PDxcW1tbkJEjxCa0tLSUlJSYmJhUVlYuX74cvvTEgdDlLJQmVFdXz58/X/B0IpqamjU1NRkZGbhU/f39J0yY0NPTw2FOkQgc/x39EvkIpL8CAwMtLS3xU0tLS/hTuX//flxAvb29eJ3Kyspyc3Pz8vKkpKRERESoVOrcuXMfPXpkamq6aNEiNpsNfzBfv34tKSmJL2/DEtra2oi3hsYR3lQ4na6srLx06dLhw4fhxJj4F7ohiAUS/8t9ym8KyWKxOjo6oC4mGtEZGRnJyclXr14lEVd3dzfRrKutrT169KiSkpKtrS30X0A/Qn19PVw9rK2thYlycnJv3rypr6+fNGnSuHHjVq5cGRwc/Ndff7m4uOjp6cHSOjo6cAUEl4OhjcBNc3NzS0sLMWXp0qWbNm3CTw0MDJYvXx4VFQXF+/Tp0/b2dpiht7eXo1jocsIHJYPBwF+thnJmsVj8mgaLgtngrzesPJvN3rBhg6amJu6IiIuLg5N0PA+UJ96n8KZ49wUGBnZ2dpqamsrKyr558wa3Jfkpr66uLm5zm9hYkt7hWY2enh7oS6VSqXZ2dnJyctyPA4mcuUUtiJyhM87T05NnM0ma8OzZMzjgHz161Nra6u7ujr8uzzOd2HHEHrG1tZWVlXV1dQ0LC7t9+/bGjRtnzpxpa2srJyfn6elZX1/PZDLhhx9Xr17F+50IPtTLyspmzpxJIp8vXX/08fEBANjZ2UEXcktLS2BgIG76ubi4sFisuLg4aEKLiIicOnWKwWBwz+S1tbXhe33e3t62trYeHh7m5uZVVVXwLjk5ObKystCo/OOPP2BibGwsbhUfO3YMw7DCwkJNTU0qlbpixYqjR4/Cf+np6dHp9Llz5+KTZwzDoqOj4ZOgpKRUXl6Od/bChQtfvXoFX+yELwfxfH8V6hrordu/f/+LFy/gqsLkyZNpNBrxjXYObty4sXTpUmhEW1lZGRsba2lpOTg4QJUNPfqmpqZSUlK2trb19fXTpk2bMmUKlMODBw8UFBQmT5589uxZOp1uYmIiKSmpp6d39epV3GEvLi4uLy+fmZn5/PnzhQsXwgUQ3J+Nc/78eR0dnTFjxkRHRxPTu7u78aVMeOrj47Ny5cqAgABvb2/oG46Pj4eetQMHDtTV1WEYlpubC92RkZGRnz59go4PVVXVrKwsOp0O15R9fHxqamq4m3b+/Hm42jt//vzr16/n5uYaGxtDpx58p2TJkiW7du3asWOHnZ0dHCTEPCEhITIyMtLS0hcvXqTT6XPmzAEAbN++HTr+k5KSNP4Bmhvi4uI3b97k2S/Pnz//888/4RANCAjAHe0cjeXXO/D9Hu5qVFdXUyiUdevWhYWFOTs7P336FC74wsV0Y2Nj6PDmKWfuuwso57q6Ol1dXThd5W4pvyYsXLhw9uzZdnZ23t7erq6u9fX1MD+/9MLCQrzjDh48iPfInTt3oGvfwMBAQkJCV1c3PT0d9/fPnTtXSkrKzMwsJCTEzMwsLS2N+3MdjqFOIh8hv38vIEVFRbKysiUlJd3/8P79+/Xr10tKSvJb6UcgBkFfX5+npyf+W9Le3l5QUID/+P3w7Ny5E3/zRhAWLlxob28vePp3wZB40AsKCuCXenD2RKVSVVRUrK2tUdQ9hBDJyMiora2F03a4bjVjxozHjx//DG2/e/fu7Nmz4dztZ2ZI9Nf69etramrMzc3V1NQUFRVbW1u1tbXht0cIhLBobW3Nzc1NS0tbsmSJiIhIeXl5dnb2zp07f4a2a2pq9utl515V43Auk6d/HyAzBPH92o979uxRUVGRkJCg0WihoaFMJhOJhSdHjx7917/+JSUlFRISgr9iSpL+vYDiryIQiO8VFL8QgUAg/YVAIBBIfyH+swjdpYB8FIjvVX8JK5C5EAOi/5AIKB/ybAkJCebm5rt27RJWrQQP2I9AfIv6a/z48fPmzRs9evQ3Us6PioDyIc+2fPnyu3fv4l9rfTkmJibFxcUD/VwUgRjA3F64CCuytbDKQQwIDQ0NwUP+CzdgPwIxUIQ8/+IZ2XoQYbOFVc7P9lMkiHy+wSjmCMQQ2o/8gppzx2jniGxNDJudlZU1cuRI+F0kDNOhr68PAw9wl09SDrw1z4DZ5LHSBwHPu5A0hGdQcO4I64LIFv+vIAHROeTDTw7cYuSn4P77v/9bWVlZR0cHbsImSB/xHA94mYIH7EcghGk/ksRr5xmjnRgZliNs9vbt26lUKh4o1tbWlsFg8CufpBx+AbPJY6UTP9CPjY1NTEx0dHQsLi4ODw/39fU9fPgwdxxefmG5eTaEX1Bw7nDygsgWj+crSEB0DvnwkwNHNn72o56e3rlz54qLi42MjERERJ4+fSpIH/EbD4IH7EcgBkr/+otfOG1+Mdo5xjQxbHZDQ4OYmBjUCHAuQFI+STkkAbNJYqXjxMXFwU8ltm7dOnPmTBiFKjg4mCMbyV14NoRnUHB4LXc4eXLZcoQZECQgOlE+JHLgyMZTf3l5ecFj2MxNmzYJ0kf8xoPgAfsRCOHHn+AXTps8RjvxcvxYQUHBwsIiPj5+7dq1p0+fhvHOBYkHz1EOeeB2PBtHrHScWbNmwSjdJSUltra2FAqF5/buJHfh2RCeQcHxynOEkyeXLb+GkwRE54jtwU8OxPTMzMzVq1fjp42NjXgULby/9PX1q6qqvjBm/yAC9iMQgiBQ/IlDhw7R6fTw8PDq6uqAgACYiMdoH1BUHHd398WLF1dWVjY0NEyYMIGkfHKVCgNmw/2HYDhHwTfQhbFMGQzG/fv3oT01iLtwNwQPCo4LhMFgiImJkW8qMdC2CxEjI6OysjL8lOe+tnJycnC3iH7rObjxgEAMrf+eXzhtkhjtJC8QLVq0SENDw83NDW7sSB6um185QgkoXlRURKVS9fX1YST+1tbWAd2FuyE8g4KTR3MfRMB4ITJy5EhNAjz17Lt37wwNDQXpI6HE7EcghKy/+IXTtrGxUVZW5o7RToxszWAwiGGzYfReNze3iooKuN0sSfkk5ZAEFCeJlQ7Jzc2dP38+hmGpqalTpkyBW0BmZmZyh4IjD1vO3RCeQcHhfI07nDx524l5BAmIzhGenEQOHNl4gv/3/v37EhISDg4OgvTRb7/9xnM8kATs3717N9GARSCEv/5IEq+dZ4x2YmRrYiBzGDYbw7Cmpia4kSp5+eTl8AyYzS9IOTECd25uroODQ3h4eH5+vo+PT3x8fExMDL4HOHfbScJyczSEX1Bw7nDygsgWr60gAdHPnTtHlA8/OVy7do27Ozi4cOHC4sWLXVxc/Pz8du3aBXfqFaSPeI4H8oD97u7uixYtQk5oBIr/hUAgkP2IQCAQSH8hEAgE0l8IBALBg/8XAAD//8bGc2lsV5nLAAAAAElFTkSuQmCC" 7 | } 8 | }, 9 | "cell_type": "markdown", 10 | "metadata": {}, 11 | "source": [ 12 | "## Batch Normalization\n", 13 | "\n", 14 | "**Paper:** https://arxiv.org/abs/1502.03167 (May 2015):\n", 15 | "\n", 16 | "**Abstract:**\n", 17 | "Training Deep Neural Networks is complicated by the factthat the distribution of each layer’s inputs changes duringtraining, as the parameters of the previous layers change.This slows down the training by requiring lower learningrates and careful parameter initialization, and makes it no-toriously hard to train models with saturating nonlineari-ties. We refer to this phenomenon asinternal covariateshift, and address the problem by normalizing layer in-puts. \n", 18 | "\n", 19 | "Our method draws its strength from making normal-ization a part of the model architecture and performing thenormalizationfor each training mini-batch. Batch Nor-malization allows us to use much higher learning rates andbe less careful about initialization. It also acts as a regu-larizer, in some cases eliminating the need for Dropout.\n", 20 | "\n", 21 | "We want to normalize a batch so it has a mean of 0 and a standard deviation of 1. We still want the model to be able to rescale and reshift the data though. For this we have to **learnable** parameters.\n", 22 | "\n", 23 | "![batch-norm.png](attachment:batch-norm.png)\n", 24 | "\n", 25 | "### Why ?\n", 26 | "\n", 27 | "Usually, in order to train a neural network, we do some preprocessing to the input data. For example, we could normalize all data so that it resembles a normal distribution (that means, zero mean and a unitary variance). Why do we do this preprocessing? Well, there are many reasons for that, some of them being: preventing the early saturation of non-linear activation functions like the sigmoid function, assuring that all input data is in the same range of values, etc.\n", 28 | "\n", 29 | "But the problem appears in the intermediate layers because the distribution of the activations is constantly changing during training. This slows down the training process because each layer must learn to adapt themselves to a new distribution in every training step. This problem is known as **internal covariate shift**." 30 | ] 31 | }, 32 | { 33 | "cell_type": "code", 34 | "execution_count": 69, 35 | "metadata": {}, 36 | "outputs": [], 37 | "source": [ 38 | "import torch\n", 39 | "from torch.autograd import Variable\n", 40 | "from torch import Tensor, FloatTensor" 41 | ] 42 | }, 43 | { 44 | "cell_type": "code", 45 | "execution_count": 70, 46 | "metadata": {}, 47 | "outputs": [], 48 | "source": [ 49 | "batch = torch.randn(3,3,28,28) * 1.5 + 2" 50 | ] 51 | }, 52 | { 53 | "cell_type": "code", 54 | "execution_count": 71, 55 | "metadata": {}, 56 | "outputs": [ 57 | { 58 | "data": { 59 | "text/plain": [ 60 | "tensor(1.9976)" 61 | ] 62 | }, 63 | "execution_count": 71, 64 | "metadata": {}, 65 | "output_type": "execute_result" 66 | } 67 | ], 68 | "source": [ 69 | "torch.mean(batch)" 70 | ] 71 | }, 72 | { 73 | "cell_type": "code", 74 | "execution_count": 72, 75 | "metadata": {}, 76 | "outputs": [ 77 | { 78 | "data": { 79 | "text/plain": [ 80 | "tensor(1.4874)" 81 | ] 82 | }, 83 | "execution_count": 72, 84 | "metadata": {}, 85 | "output_type": "execute_result" 86 | } 87 | ], 88 | "source": [ 89 | "torch.std(batch)" 90 | ] 91 | }, 92 | { 93 | "cell_type": "code", 94 | "execution_count": 99, 95 | "metadata": {}, 96 | "outputs": [], 97 | "source": [ 98 | "class BatchNorm(torch.nn.Module):\n", 99 | " def __init__(self):\n", 100 | " super(BatchNorm, self).__init__()\n", 101 | " self.gamma = Variable(FloatTensor([1]), requires_grad=True)\n", 102 | " self.beta = Variable(FloatTensor([0]), requires_grad=True)\n", 103 | " \n", 104 | " def forward(self, x):\n", 105 | " b_mean = torch.mean(x)\n", 106 | " b_std = torch.std(x)\n", 107 | " \n", 108 | " new_x = (x - b_mean) / b_std\n", 109 | " \n", 110 | " return self.gamma * new_x + self.beta" 111 | ] 112 | }, 113 | { 114 | "cell_type": "code", 115 | "execution_count": 100, 116 | "metadata": {}, 117 | "outputs": [], 118 | "source": [ 119 | "bn = BatchNorm()" 120 | ] 121 | }, 122 | { 123 | "cell_type": "code", 124 | "execution_count": 101, 125 | "metadata": {}, 126 | "outputs": [], 127 | "source": [ 128 | "normal_b = bn(batch)" 129 | ] 130 | }, 131 | { 132 | "cell_type": "markdown", 133 | "metadata": {}, 134 | "source": [ 135 | "The mean of the batch is nearly 0." 136 | ] 137 | }, 138 | { 139 | "cell_type": "code", 140 | "execution_count": 102, 141 | "metadata": {}, 142 | "outputs": [ 143 | { 144 | "data": { 145 | "text/plain": [ 146 | "tensor(-6.0931e-08, grad_fn=)" 147 | ] 148 | }, 149 | "execution_count": 102, 150 | "metadata": {}, 151 | "output_type": "execute_result" 152 | } 153 | ], 154 | "source": [ 155 | "torch.mean(normal_b)" 156 | ] 157 | }, 158 | { 159 | "cell_type": "markdown", 160 | "metadata": {}, 161 | "source": [ 162 | "The std of the batch is 1." 163 | ] 164 | }, 165 | { 166 | "cell_type": "code", 167 | "execution_count": 103, 168 | "metadata": {}, 169 | "outputs": [ 170 | { 171 | "data": { 172 | "text/plain": [ 173 | "tensor(1., grad_fn=)" 174 | ] 175 | }, 176 | "execution_count": 103, 177 | "metadata": {}, 178 | "output_type": "execute_result" 179 | } 180 | ], 181 | "source": [ 182 | "torch.std(normal_b)" 183 | ] 184 | }, 185 | { 186 | "cell_type": "markdown", 187 | "metadata": {}, 188 | "source": [ 189 | "## Batch Norm at inference\n", 190 | "\n", 191 | "During test (or inference) time, the mean and the variance are fixed. \n", 192 | "\n", 193 | "Using batch normalization during inference can be a bit tricky. This is because we might not always have a batch during inference time. For example, consider running an object detector on a video in real time. A single frame is processed at once, and hence there is no batch.\n", 194 | "\n", 195 | "This is crucial since we need to compute the mean ^x\n", 196 | "and variance σ2 of a batch to produce the output of the batch norm layer. In that case, we keep a moving average of the mean and variance during training, and then plug these values for the mean and the variance during inference. This is the approach taken by most Deep Learning libraries that ship batch norm layers out of the box." 197 | ] 198 | }, 199 | { 200 | "cell_type": "code", 201 | "execution_count": null, 202 | "metadata": {}, 203 | "outputs": [], 204 | "source": [] 205 | } 206 | ], 207 | "metadata": { 208 | "kernelspec": { 209 | "display_name": "Python 3", 210 | "language": "python", 211 | "name": "python3" 212 | }, 213 | "language_info": { 214 | "codemirror_mode": { 215 | "name": "ipython", 216 | "version": 3 217 | }, 218 | "file_extension": ".py", 219 | "mimetype": "text/x-python", 220 | "name": "python", 221 | "nbconvert_exporter": "python", 222 | "pygments_lexer": "ipython3", 223 | "version": "3.7.4" 224 | } 225 | }, 226 | "nbformat": 4, 227 | "nbformat_minor": 2 228 | } 229 | --------------------------------------------------------------------------------