├── K-Means-Clustering ├── K-Means-Clustering-without-ML-libraries.ipynb ├── K-Means-Clustering.py └── K-Means-Clustering.swift ├── K-Nearest-Neighbors ├── KNN-without-ML-libraries.ipynb ├── euclidean-distance.png ├── knn.py └── knn.swift ├── Neural-Network ├── Single-Layer-Neural-Net-without-ML-Libraries.ipynb └── single-layer-neural-net-without-ml-libraries.py ├── README.md └── Support-Vector-Machine ├── Support_Vector_Machine_from_Scratch.ipynb ├── formula.png ├── gradient.png ├── hingeloss.png ├── objectivefunc.png └── svm.py /K-Means-Clustering/K-Means-Clustering-without-ML-libraries.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## K-Means Clustering Without ML Libraries ##\n", 8 | "K-Means Clustering is a machine learning tecnique used in unsupervised learning where we don't have labeled data. I wrote this algorithm without uing any of Machine Learning Libraries.\n", 9 | "\n", 10 | "### How K-Means Clustering Algorithm Works? ###\n", 11 | "- First algorithm creates k number of centers randomly.\n", 12 | "- Than, all data points are labeled using nearest centers.\n", 13 | "- Than, centers are moved to \"mean\" of their labeled data points.\n", 14 | "- Than stepa 2 and 3 are repeated." 15 | ] 16 | }, 17 | { 18 | "cell_type": "markdown", 19 | "metadata": {}, 20 | "source": [ 21 | "### Import Libraries ###\n", 22 | "I use only numpy for math, random for random centers and matplotlib for data visualization" 23 | ] 24 | }, 25 | { 26 | "cell_type": "code", 27 | "execution_count": 217, 28 | "metadata": { 29 | "collapsed": true 30 | }, 31 | "outputs": [], 32 | "source": [ 33 | "import numpy as np\n", 34 | "import matplotlib.pyplot as plt\n", 35 | "import random\n", 36 | "# Use matplotlib in notebook output\n", 37 | "%matplotlib inline" 38 | ] 39 | }, 40 | { 41 | "cell_type": "markdown", 42 | "metadata": {}, 43 | "source": [ 44 | "### Define Data ###\n", 45 | "First I will define a data to test the algorithm on. The data I am using in this notebook is fake data but to understand it, let's think it is player statistics of a soccer game." 46 | ] 47 | }, 48 | { 49 | "cell_type": "code", 50 | "execution_count": 218, 51 | "metadata": { 52 | "collapsed": true 53 | }, 54 | "outputs": [], 55 | "source": [ 56 | "# Data - [average passes, average goals (player goals - opponent goals)]\n", 57 | "X = [[100,5], [90,5], [110,5], [97,4], [102,4], [112,4], [92,4], [95,3], [90,3], [100,3],\n", 58 | " [110,5], [100,5], [110,4], [93,3], [107,2], [117,3], [96,2], [105,3], [100,3], [110,3],\n", 59 | " [60,-1], [70,-1],[40,1], [70,-3], [50,-1], [80,0],[50,0],[60,-1],[60,1],[55,0],\n", 60 | " [40,-1], [45,-1],[40,0], [55,-3], [60,-1], [65,0],[70,0],[51,-2],[51,1],[48,0]]" 61 | ] 62 | }, 63 | { 64 | "cell_type": "markdown", 65 | "metadata": {}, 66 | "source": [ 67 | "### Plot the data on a 2d graph ###" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": 219, 73 | "metadata": {}, 74 | "outputs": [ 75 | { 76 | "data": { 77 | "text/plain": [ 78 | "[]" 79 | ] 80 | }, 81 | "execution_count": 219, 82 | "metadata": {}, 83 | "output_type": "execute_result" 84 | }, 85 | { 86 | "data": { 87 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXkAAAD8CAYAAACSCdTiAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4wLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvpW3flQAAFMRJREFUeJzt3X9sJGd9x/HPx3YJ3TRtgJgWCPYm\nIkpLTyHkVkCaKoLcCS4oCqIFKcgtSNBavaNtqIoK11OpUGW1FVVLf0DoCihSbwlVKddEpwIJPyJo\nVaA+yI8LSSAU26ShxNACUk9CXPLtH7PmbJ/tvfHM7jx+/H5Jo/E8O/vMd+bWH6/nWd/jiBAAIE9j\nTRcAABgeQh4AMkbIA0DGCHkAyBghDwAZI+QBIGOEPABkjJAHgIwR8gCQsYkmDnrRRRdFu91u4tAA\nsGOdOHHi2xExWeY5jYR8u93W/Px8E4cGgB3L9mLZ53C7BgAyRsgDQMYIeQDIGCEPABkj5AEgY7WE\nvO0F2/fZvts2H5sBRqTXk9ptaWysWPd6TVd0xqFD0sSEZBfrQ4earmit1OurS50foXxJRHy7xv4A\nbKHXk2ZnpVOniu3FxWJbkmZmmqtLKgLzllvObD/++Jntd7+7mZpWS72+OrmO6f9sL0jqnGvIdzqd\n4HPyQDXtdhHs601PSwsLo65mrYmJIjjXGx+XTp8efT3rpV7fZmyfiIhOmefUdU8+JN1h+4Tt2Y12\nsD1re972/PLyck2HBXavpaVy7aO0UYBu1T5qqddXp7pC/pqIuErS9ZLeaPva9TtERDciOhHRmZws\n9Ve5ADYwNVWufZTGx8u1j1rq9dWplpCPiEf768ckHZP0gjr6BbC5uTmp1Vrb1moV7U2b3fD3+c3b\nRy31+upUOeRtn2/7gpWvJb1U0smq/QLY2syM1O0W9+DtYt3tNj/oKhWDlwcPnnlnPD5ebKcyqJl6\nfXWqPPBq+1IV796l4tM6H4yILd9LMPAKAOVtZ+C18kcoI+I/JT2vaj8AgPrxF68AkDFCHgAyRsgD\nQMYIeQDIGCEPABkj5AEgY4Q8AGSMkAeAjBHyAJAxQh4AMkbIA0DGCHkAyBghDwAZI+QBIGOEPABk\nrLaQtz1u+0u2j9fVJwCgmjrfyd8s6YEa+wNQUa8ntdvS2Fix7vXS6GsY/dV9/Kbrq01EVF4kXSzp\nk5Kuk3R80P579+4NAMN19GhEqxUhnVlaraK9yb6G0V/dx2+6vs1Imo+S+Vx5jldJsv1hSX8s6QJJ\nb46IG7banzlegeFrt6XFxbPbp6elhYXm+hpGf3Ufv+n6NrOdOV4r366xfYOkxyLixID9Zm3P255f\nXl6uelgAAywtlWsfVV/D6K/u4zddX53quCd/jaQbbS9I+pCk62wfXb9TRHQjohMRncnJyRoOC2Ar\nU1Pl2kfV1zD6q/v4TddXp8ohHxGHI+LiiGhLuknSpyLiVypXBqCSuTmp1Vrb1moV7U32NYz+6j5+\n0/XVquxN/K0WSS8WA69AMo4ejZiejrCLdZWBwzr7GkZ/dR+/6fo2oqYGXsti4BUAymtk4BUAkC5C\nHgAyRsgDQMYIeQDIGCEPABkj5AEgY4Q8AGSMkAeAjBHyAJAxQh4AMkbIA0DGCHkAyBghDwAZI+QB\nIGOEPABkjJAHgIzVMZH3k21/wfY9tu+3/fY6CgMAVFfHO/kfSLouIp4n6UpJB2y/qIZ+AQzQ60nt\ntjQ2Vqx7vWr7l+1vK4cOSRMTkl2sDx3afl911zaM/lI73o+UnS9wq0VSS9IXJb1wq/2Y4xWo7ujR\niFYrQjqztFqbz0U6aP+y/W3l4MG1/awsBw+O5lxH3d+ojqem5ni1PS7phKTnSHpXRLxlq/2Z4xWo\nrt2WFhfPbp+elhYWyu9ftr+tTExIjz9+dvv4uHT6dLm+pHprG0Z/ozreduZ4rXUib9sXSjom6bci\n4uS6x2YlzUrS1NTU3sWNzhjAORsbK94TrmdLTzxRfv+y/W3F3vyx7UROnbUNo79RHa/xibwj4ruS\n7pJ0YIPHuhHRiYjO5ORknYcFdqWpqXrbyz5vK+Pj5doHqbO2YfSX2vFWq+PTNZP9d/Cy/eOS9kt6\nsGq/ALY2Nye1WmvbWq2ifTv7l+1vK7Oz5doHqbO2YfSX2vHWKHsTf/0i6QpJX5J0r6STkt426DkM\nvAL1OHo0Yno6wi7WgwbyBu1ftr+tHDwYMT5eDDKOj29/0HUYtQ2jv1EcT00NvJbFwCsAlNf4PXkA\nQFoIeQDIGCEPABkj5AEgY4Q8AGSMkAeAjBHyAJAxQh4AMkbIA0DGCHkAyBghDwAZI+QBIGOEPABk\njJAHgIwR8gCQMUIeADJWx/R/z7b9adsP2L7f9s11FAageb2e1G4XE1G328V2qnZSraM0UUMfpyX9\nbkR80fYFkk7YvjMivlxD3wAa0usVc7KeOlVsLy6emaN1Zqa5ujayk2odtdqn/7N9m6S/iYg7N9uH\n6f+A9LXbRViuNz0tLSyMupqt7aRaq2h8+j/bbUnPl/T5DR6btT1ve355ebnOwwIYgqWlcu1N2km1\njlptIW/7JyT9k6Q3RcT31z8eEd2I6EREZ3Jysq7DAhiSqaly7U3aSbWOWi0hb/vHVAR8LyI+Ukef\nAJo1Nye1WmvbWq2iPTU7qdZRq+PTNZb0PkkPRMSfVy8JQApmZqRut7ivbRfrbjfNgcydVOuoVR54\ntf2Lkj4r6T5JT/Sbfz8i/mWz5zDwCgDlbWfgtfJHKCPiXyW5aj8AgPrxF68AkDFCHgAyRsgDQMYI\neQDIGCEPABkj5AEgY4Q8AGSMkAeAjBHyAJAxQh4AMkbIA0DGCHkAyBghDwAZI+QBIGOEPABkrK7p\n/95v+zHbJ+voDwBQj7reyX9A0oGa+tpQrye129LYWLHu9YZ5tOpSqjelWgCMVuWZoSQpIj5ju11H\nXxvp9aTZWenUqWJ7cbHYltKcwzGlelOqBcDoVZ7j9UcdFSF/PCL2DNq37Byv7XYRTutNT0sLC+fc\nzcikVG9KtQCoZjtzvI5s4NX2rO152/PLy8ulnru0VK69aSnVm1ItAEZvZCEfEd2I6EREZ3JystRz\np6bKtTctpXpTqgXA6O2Ij1DOzUmt1tq2VqtoT1FK9aZUC4DRq+sjlLdK+ndJl9t+xPYb6uh3xcyM\n1O0W95HtYt3tpjtwmFK9KdUCYPRqG3gto+zAKwAg8YFXAMDoEfIAkDFCHgAyRsgDQMYIeQDIGCEP\nABkj5AEgY4Q8AGSMkAeAjBHyAJAxQh4AMkbIA0DGCHkAyBghDwAZI+QBIGOEPABkrK6ZoQ7Yfsj2\nw7bfWkefAIDqKoe87XFJ75J0vaTnSnqN7edW7Xe9Xk9qt6WxsWLd69V9hGrHr7O+ps91kLrrS/l8\nU64NOCcRUWmRdLWkj6/aPizp8FbP2bt3b5Rx9GhEqxUhnVlaraJ9FAYdv876mj7XQequL+XzTbk2\n7E6S5qNkRlee49X2qyQdiIhf62//qqQXRsRvbvacsnO8ttvS4uLZ7dPT0sJCyYK3YdDx66yv6XMd\npO76Uj7flGvD7tTUHK/eoO2snxy2Z23P255fXl4udYClpXLtdRt0/Drra/pcB6m7vpTPN+XagHNV\nR8g/IunZq7YvlvTo+p0iohsRnYjoTE5OljrA1FS59roNOn6d9TV9roPUXV/K55tybcC5qiPk/0PS\nZbYvsf0kSTdJur2Gfn9kbk5qtda2tVpF+ygMOn6d9TV9roPUXV/K55tybcA5K3sTf6NF0sslfUXS\n1yQdGbR/2YHXiGKwa3o6wi7Wox78GnT8Outr+lwHqbu+lM835dqw+6iJgdftKDvwCgBobuAVAJAo\nQh4AMkbIA0DGCHkAyBghDwAZI+QBIGOEPABkjJAHgIwR8gCQMUIeADJGyANAxgh5AMgYIQ8AGSPk\nASBjhDwAZIyQB4CMVQp526+2fb/tJ2yX+o/sy+r1pHZbGhsr1r1e3v3Vaf9+yT6z7N/fdEVrpXzt\ngB2v7FRSqxdJPyfpckl3Seqc6/PKTv939GhEqxUhnVlare1PxZZ6f3Xat29tXSvLvn1NV1ZI+doB\nqVFT0//ZvkvSmyPinOb0Kzv9X7stLS6e3T49LS0snHM3O6a/OtmbP9bAzI9nSfnaAalJevo/27O2\n523PLy8vl3ru0lK59p3e327CtQOGa2DI2/6E7ZMbLK8oc6CI6EZEJyI6k5OTpYqcmirXvtP72024\ndsBwDQz5iNgfEXs2WG4bRYGSNDcntVpr21qtoj3H/uq0b1+59lFL+doBWSh7E3+jRUMeeI0oBuKm\npyPsYl11YC71/uq0fvA1lUHXFSlfOyAlGvXAq+1XSvprSZOSvivp7oh42aDnlR14BQBsb+B1osoB\nI+KYpGNV+gAADA9/8QoAGSPkASBjhDwAZIyQB4CMEfIAkDFCHgAyRsgDQMYIeQDIGCEPABkj5AEg\nY4Q8AGSMkAeAjBHyAJAxQh4AMkbIA0DGCHkAyFilkLf9DtsP2r7X9jHbF9ZV2E7X60nttjQ2Vqx7\nvaYrArAbVX0nf6ekPRFxhaSvSDpcvaSdr9eTZmelxcViVtXFxWKboAcwapVCPiLuiIjT/c3PSbq4\nekk735Ej0qlTa9tOnSraAWCU6rwn/3pJH93sQduztudtzy8vL9d42PQsLZVrB4BhGRjytj9h++QG\nyytW7XNE0mlJm96QiIhuRHQiojM5OVlP9YmamirXDgDDMjFoh4jYv9Xjtl8n6QZJ+yIi6ipsJ5ub\nK+7Br75l02oV7QAwSlU/XXNA0lsk3RgRpwbtv1vMzEjdrjQ9LdnFutst2gFglFzlzbfthyWdJ+k7\n/abPRcRvDHpep9OJ+fn5bR8XAHYj2yciolPmOQNv12wlIp5T5fkAgOHiL14BIGOEPABkjJAHgIwR\n8gCQMUIeADJGyANAxgh5AMgYIQ8AGSPkASBjhDwAZIyQB4CMEfIAkDFCHgAyRsgDQMYIeQDIWNWZ\nof7I9r2277Z9h+1n1lUYAKC6qu/k3xERV0TElZKOS3pbDTXter2e1G5LY2PFurfp9Oh52G3nC4xS\n1Zmhvr9q83xJTORdUa+3dhLwxcViW8pzjtjddr7AqFWa41WSbM9Jeq2k70l6SUQsD3oOc7xurt0u\ngm696WlpYWHU1QzfbjtfoIrtzPE6MORtf0LSz2zw0JGIuG3VfoclPTki/nCTfmYlzUrS1NTU3sWN\nvrOhsTFpo38SW3riidHXM2y77XyBKrYT8gPvyUfE/ojYs8Fy27pdPyjpl7fopxsRnYjoTE5Olqlx\nV5maKte+0+228wVGreqnay5btXmjpAerlYO5OanVWtvWahXtOdpt5wuMWtVP1/yJ7ZO275X0Ukk3\n11DTrjYzI3W7xT1pu1h3u/kOQu628wVGrfLA63Yw8AoA5Q3lnjwAYOci5AEgY4Q8AGSMkAeAjBHy\nAJCxRj5dY3tZ0nb/5PUiSd+usZy6pVxfyrVJadeXcm1S2vWlXJuUdn3ra5uOiFJ/TdpIyFdhe77s\nR4hGKeX6Uq5NSru+lGuT0q4v5dqktOurozZu1wBAxgh5AMjYTgz5btMFDJByfSnXJqVdX8q1SWnX\nl3JtUtr1Va5tx92TBwCcu534Th4AcI6SD3nb47a/ZPt4f/sS25+3/VXb/2D7SQ3WtmD7vv5E5vP9\ntqfavrNf3522n9JgfRfa/rDtB20/YPvqFOqzfXn/mq0s37f9phRqW1Xj79i+v/+/rN5q+8mpvPZs\n39yv637bb+q3NXbtbL/f9mO2T65q27AeF/7K9sO277V9VQO1vbp/7Z6w3Vm3/+F+bQ/Zftkwa9ui\nvnf0v2fvtX3M9oVV6ks+5FX898UPrNr+U0l/ERGXSfpfSW9opKozXhIRV676mNNbJX2yX98n+9tN\n+UtJH4uIn5X0PBXXsfH6IuKh/jW7UtJeSackHUuhNkmy/SxJvy2pExF7JI1LukkJvPZs75H065Je\noOLf9Ib+vA5NXrsPSDqwrm2zeq6XdFl/mZV0SwO1nZT0S5I+s7rR9nNV/Dv/fP8577Y93kB9d0ra\nExFXSPqKpMOV6ouIZBdJF6t4gVwn6bgkq/jDgIn+41dL+niD9S1Iumhd20OSntH/+hmSHmqotp+U\n9HX1x11Sq29VPS+V9G8p1SbpWZK+IempKia7Py7pZSm89iS9WtJ7V23/gaTfa/raSWpLOjnodSbp\nbyW9ZqP9RlXbqva7VPwgX9k+LOnwqu2PS7p61Ndu3WOvlNSrUl/q7+TfqeIFvDLb59MkfTciTve3\nH1HxDdmUkHSH7RP9OWwl6acj4puS1F8/vaHaLpW0LOnv+re73mv7/ITqW3GTpFv7XydRW0T8l6Q/\nk7Qk6ZsqJqk/oTReeyclXWv7abZbkl4u6dlK5Nqtslk9Kz9AVzT9PbxairW9XtJH+19vq75kQ972\nDZIei4gTq5s32LXJjwddExFXqfgV9I22r22wlvUmJF0l6ZaIeL6k/1Ozt47O0r+nfaOkf2y6ltX6\n949fIekSSc+UdL6Kf+P1Rv7ai4gHVNw2ulPSxyTdI+n0lk9KS2rfw6slVZvtIyr+bXsrTRvsNrC+\nZENe0jWSbrS9IOlDKm7ZvFPShbYn+vtcLOnRZsqTIuLR/voxFfeUXyDpW7afIUn99WMNlfeIpEci\n4vP97Q+rCP1U6pOK4PxiRHyrv51KbfslfT0iliPih5I+IukXlMhrLyLeFxFXRcS1kv5H0leVzrVb\nsVk9j6j4zWNFo9/D6yRTm+3XSbpB0kz0781om/UlG/IRcTgiLo6Itopf6T8VETOSPi3pVf3dXifp\ntibqs32+7QtWvlZxb/mkpNv7dTVaX0T8t6Rv2L6837RP0peVSH19r9GZWzVSOrUtSXqR7ZZt68y1\nS+W19/T+ekrFAOKtSufardisntslvbb/KZsXSfreym2dBNwu6Sbb59m+RMXg8BdGXYTtA5LeIunG\niDhVub5hDyrUNDDxYknH+19f2j+xh1X8mn9eQzVdquJX5Xsk3S/pSL/9aSoGi7/aXz+1wet2paR5\nSfdK+mdJT0mlPkktSd+R9FOr2pKorV/L2yU9qOIH999LOi+h195nVfzQuUfSvqavnYofMt+U9EMV\n7zbfsFk9Km45vEvS1yTdp1UDnyOs7ZX9r38g6VtaNYAu6Ui/tockXd/QtXtYxb33u/vLe6rUx1+8\nAkDGkr1dAwCojpAHgIwR8gCQMUIeADJGyANAxgh5AMgYIQ8AGSPkASBj/w+8dVGazhuqkgAAAABJ\nRU5ErkJggg==\n", 88 | "text/plain": [ 89 | "" 90 | ] 91 | }, 92 | "metadata": {}, 93 | "output_type": "display_data" 94 | } 95 | ], 96 | "source": [ 97 | "plotx = []\n", 98 | "ploty = []\n", 99 | "for i in range(len(X)):\n", 100 | " plotx.append(X[i][0])\n", 101 | " ploty.append(X[i][1])\n", 102 | "plt.plot(plotx,ploty, 'bo')" 103 | ] 104 | }, 105 | { 106 | "cell_type": "markdown", 107 | "metadata": {}, 108 | "source": [ 109 | "### Helper Functions ###" 110 | ] 111 | }, 112 | { 113 | "cell_type": "code", 114 | "execution_count": 220, 115 | "metadata": {}, 116 | "outputs": [], 117 | "source": [ 118 | "def random_centers(dim,k):\n", 119 | " centers = []\n", 120 | " for i in range(k):\n", 121 | " center = []\n", 122 | " for d in range(dim):\n", 123 | " rand = random.randint(0,100)\n", 124 | " center.append(rand)\n", 125 | " centers.append(center)\n", 126 | " return centers\n", 127 | "\n", 128 | "def point_clustering(data, centers, dims, first_cluster=False):\n", 129 | " for point in data:\n", 130 | " nearest_center = 0\n", 131 | " nearest_center_dist = None\n", 132 | " for i in range(0, len(centers)):\n", 133 | " euclidean_dist = 0\n", 134 | " for d in range(0, dims):\n", 135 | " dist = abs(point[d] - centers[i][d])\n", 136 | " euclidean_dist += dist\n", 137 | " euclidean_dist = np.sqrt(euclidean_dist)\n", 138 | " if nearest_center_dist == None:\n", 139 | " nearest_center_dist = euclidean_dist\n", 140 | " nearest_center = i\n", 141 | " elif nearest_center_dist > euclidean_dist:\n", 142 | " nearest_center_dist = euclidean_dist\n", 143 | " nearest_center = i\n", 144 | " if first_cluster:\n", 145 | " point.append(nearest_center)\n", 146 | " else:\n", 147 | " point[-1] = nearest_center\n", 148 | " return data\n", 149 | "\n", 150 | "def mean_center(data, centers, dims):\n", 151 | " print('centers:', centers, 'dims:', dims)\n", 152 | " new_centers = []\n", 153 | " for i in range(len(centers)):\n", 154 | " new_center = []\n", 155 | " n_of_points = 0\n", 156 | " total_of_points = []\n", 157 | " for point in data:\n", 158 | " if point[-1] == i:\n", 159 | " n_of_points += 1\n", 160 | " for dim in range(0,dims):\n", 161 | " if dim < len(total_of_points):\n", 162 | " total_of_points[dim] += point[dim]\n", 163 | " else:\n", 164 | " total_of_points.append(point[dim])\n", 165 | " if len(total_of_points) != 0:\n", 166 | " for dim in range(0,dims):\n", 167 | " print(total_of_points, dim)\n", 168 | " new_center.append(total_of_points[dim]/n_of_points)\n", 169 | " new_centers.append(new_center)\n", 170 | " else: \n", 171 | " new_centers.append(centers[i])\n", 172 | " \n", 173 | " \n", 174 | " return new_centers" 175 | ] 176 | }, 177 | { 178 | "cell_type": "markdown", 179 | "metadata": {}, 180 | "source": [ 181 | "### K-Means Clustering Algorithm ###" 182 | ] 183 | }, 184 | { 185 | "cell_type": "code", 186 | "execution_count": 221, 187 | "metadata": { 188 | "collapsed": true 189 | }, 190 | "outputs": [], 191 | "source": [ 192 | "# Gets data and k, returns a list of center points.\n", 193 | "def train_k_means_clustering(data, k=2, epochs=5):\n", 194 | " dims = len(data[0])\n", 195 | " print('data[0]:',data[0])\n", 196 | " centers = random_centers(dims,k)\n", 197 | " \n", 198 | " clustered_data = point_clustering(data, centers, dims, first_cluster=True)\n", 199 | "\n", 200 | " for i in range(epochs):\n", 201 | " centers = mean_center(clustered_data, centers, dims)\n", 202 | " clustered_data = point_clustering(data, centers, dims, first_cluster=False)\n", 203 | " \n", 204 | " return centers\n", 205 | "\n", 206 | "def predict_k_means_clustering(point, centers):\n", 207 | " dims = len(point)\n", 208 | " center_dims = len(centers[0])\n", 209 | " \n", 210 | " if dims != center_dims:\n", 211 | " raise ValueError('Point given for prediction have', dims, 'dimensions but centers have', center_dims, 'dimensions')\n", 212 | "\n", 213 | " nearest_center = None\n", 214 | " nearest_dist = None\n", 215 | " \n", 216 | " for i in range(len(centers)):\n", 217 | " euclidean_dist = 0\n", 218 | " for dim in range(1, dims):\n", 219 | " dist = point[dim] - centers[i][dim]\n", 220 | " euclidean_dist += dist**2\n", 221 | " euclidean_dist = np.sqrt(euclidean_dist)\n", 222 | " if nearest_dist == None:\n", 223 | " nearest_dist = euclidean_dist\n", 224 | " nearest_center = i\n", 225 | " elif nearest_dist > euclidean_dist:\n", 226 | " nearest_dist = euclidean_dist\n", 227 | " nearest_center = i\n", 228 | " print('center:',i, 'dist:',euclidean_dist)\n", 229 | " \n", 230 | " return nearest_center" 231 | ] 232 | }, 233 | { 234 | "cell_type": "code", 235 | "execution_count": 222, 236 | "metadata": {}, 237 | "outputs": [ 238 | { 239 | "name": "stdout", 240 | "output_type": "stream", 241 | "text": [ 242 | "data[0]: [100, 5]\n", 243 | "centers: [[73, 42], [100, 42]] dims: 2\n", 244 | "[1120, -12] 0\n", 245 | "[1120, -12] 1\n", 246 | "[2036, 73] 0\n", 247 | "[2036, 73] 1\n", 248 | "centers: [[56.0, -0.6], [101.8, 3.65]] dims: 2\n", 249 | "[1120, -12] 0\n", 250 | "[1120, -12] 1\n", 251 | "[2036, 73] 0\n", 252 | "[2036, 73] 1\n", 253 | "centers: [[56.0, -0.6], [101.8, 3.65]] dims: 2\n", 254 | "[1120, -12] 0\n", 255 | "[1120, -12] 1\n", 256 | "[2036, 73] 0\n", 257 | "[2036, 73] 1\n", 258 | "centers: [[56.0, -0.6], [101.8, 3.65]] dims: 2\n", 259 | "[1120, -12] 0\n", 260 | "[1120, -12] 1\n", 261 | "[2036, 73] 0\n", 262 | "[2036, 73] 1\n", 263 | "centers: [[56.0, -0.6], [101.8, 3.65]] dims: 2\n", 264 | "[1120, -12] 0\n", 265 | "[1120, -12] 1\n", 266 | "[2036, 73] 0\n", 267 | "[2036, 73] 1\n" 268 | ] 269 | } 270 | ], 271 | "source": [ 272 | "centers = train_k_means_clustering(X, k=2, epochs=5)" 273 | ] 274 | }, 275 | { 276 | "cell_type": "code", 277 | "execution_count": 223, 278 | "metadata": {}, 279 | "outputs": [ 280 | { 281 | "name": "stdout", 282 | "output_type": "stream", 283 | "text": [ 284 | "[[56.0, -0.6], [101.8, 3.65]]\n" 285 | ] 286 | } 287 | ], 288 | "source": [ 289 | "print(centers)" 290 | ] 291 | }, 292 | { 293 | "cell_type": "code", 294 | "execution_count": 228, 295 | "metadata": {}, 296 | "outputs": [ 297 | { 298 | "name": "stdout", 299 | "output_type": "stream", 300 | "text": [ 301 | "center: 0 dist: 3.6\n", 302 | "center: 1 dist: 0.6499999999999999\n", 303 | "1\n" 304 | ] 305 | }, 306 | { 307 | "data": { 308 | "text/plain": [ 309 | "[,\n", 310 | " ,\n", 311 | " ,\n", 312 | " ]" 313 | ] 314 | }, 315 | "execution_count": 228, 316 | "metadata": {}, 317 | "output_type": "execute_result" 318 | }, 319 | { 320 | "data": { 321 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXkAAAD8CAYAAACSCdTiAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4wLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvpW3flQAAFgRJREFUeJzt3X9sJGd9x/HPxz4I3TRNgJgWCN5N\nBEpLA4ScxY+miiB3ggsKQbRBCnILErRWL7QNVVHheioVqqy2omrTH5B2BRSpt4Qql6SBUwMJSRC0\nKlAfP5ILSSAU+0hDiQMFpFpBudy3f8w6Z/ts741nvPP48fsljcbzePaZz6zXX6/n2d3HESEAQJ5G\nmg4AANg8FHkAyBhFHgAyRpEHgIxR5AEgYxR5AMgYRR4AMkaRB4CMUeQBIGM7mjjo2WefHZ1Op4lD\nA8CWdfjw4UcjYqzMbRop8p1ORzMzM00cGgC2LNtzZW/D5RoAyBhFHgAyRpEHgIxR5AEgYxR5AMhY\nLUXe9qzte2x/1TYvmwGGpNeTOh1pZKRY93pNJzrh6qulHTsku1hffXXTiZZLPV9d6nwJ5asj4tEa\n+wOwjl5PmpqSFhaK7bm5YluSJiebyyUVBfO6605sP/HEie0PfrCZTEulnq9OrmP6P9uzkiZOtchP\nTEwEr5MHqul0isK+Urstzc4OO81yO3YUhXOl0VHp2LHh51kp9XxrsX04IibK3Kaua/Ih6Tbbh21P\nrbaD7SnbM7Zn5ufnazossH0dPVqufZhWK6DrtQ9b6vnqVFeRvzgiLpJ0maR32L5k5Q4R0Y2IiYiY\nGBsr9a5cAKsYHy/XPkyjo+Xahy31fHWqpchHxMP99SOSbpb0sjr6BbC26Wmp1Vre1moV7U2bWvX/\n+bXbhy31fHWqXORtn277jMWvJb1G0pGq/QJY3+Sk1O0W1+DtYt3tNj/oKhWDl3v3nnhmPDpabKcy\nqJl6vjpVHni1fZ6KZ+9S8Wqdj0XEus8lGHgFgPI2MvBa+SWUEfFfkl5StR8AQP14xysAZIwiDwAZ\no8gDQMYo8gCQMYo8AGSMIg8AGaPIA0DGKPIAkDGKPABkjCIPABmjyANAxijyAJAxijwAZIwiDwAZ\no8gDQMZqK/K2R21/xfahuvoEAFRT5zP5ayTdV2N/ACrq9aRORxoZKda9Xhp9bUZ/dR+/6Xy1iYjK\ni6RzJN0h6VJJhwbtv3PnzgCwuQ4ciGi1IqQTS6tVtDfZ12b0V/fxm863FkkzUbI+V57jVZJsH5T0\np5LOkPSuiLh8vf2Z4xXYfJ2ONDd3cnu7Lc3ONtfXZvRX9/GbzreWjczxWvlyje3LJT0SEYcH7Ddl\ne8b2zPz8fNXDAhjg6NFy7cPqazP6q/v4TeerUx3X5C+WdIXtWUkfl3Sp7QMrd4qIbkRMRMTE2NhY\nDYcFsJ7x8XLtw+prM/qr+/hN56tT5SIfEfsi4pyI6Ei6StKdEfFrlZMBqGR6Wmq1lre1WkV7k31t\nRn91H7/pfLUqexF/vUXSq8TAK5CMAwci2u0Iu1hXGTiss6/N6K/u4zedbzVqauC1LAZeAaC8RgZe\nAQDposgDGLrePT11ru1o5H0j6lzbUe+erfpOo/TtaDoAgO2ld09PU5+c0sLjC5KkuR/NaeqTU5Kk\nyRdNNhktSzyTBzBU++/Y/2SBX7Tw+IL237G/oUR5o8gDGKqjP1r9HUVrtaMaijyAoRo/c/V3FK3V\njmoo8gCGanrXtFpPWf5Oo9ZTWpretRXfaZQ+ijyAoZp80aS6r++qfWZbltU+s63u67sMum4S3gwF\nAFsEb4YCACxDkQeAjFHkASBjFHkAyBhFHgAyRpEHgIxR5AEgY3VM5P0021+y/TXb99p+Xx3BAADV\n1fFM/ieSLo2Il0i6UNIe26+ooV8AA/R6UqcjjYwU696Aj2UftH/Z/tZz9dXSjh2SXayvvnrjfdWd\nTZJuuqmngwc7uvPOER082NFNN23uZ9rXnf+UlZ0vcL1FUkvSlyW9fL39mOMVqO7AgYhWK0I6sbRa\na89FOmj/sv2tZ+/e5f0sLnv3DudcB7nxxgNx662tuOsuPbncemsrbrxxcyZyrSu/mprj1faopMOS\nni/pAxHx7vX252MNgOo6HWlu7uT2dluanS2/f9n+1rNjh/TEEye3j45Kx46V60uqN5skHTzY0dln\nn9zho4+2deWVG+hwgLryb+RjDWr97BrbZ0m6WdLvRMSRFd+bkjQlSePj4zvnVjtjAKdsZKR4TriS\nLR0/Xn7/sv2tx177exspOXVmk6Q77xzRyMjJHR4/bl166QY6HKCu/I1/dk1E/FDSZyXtWeV73YiY\niIiJsbGxOg8LbEvja3z8+kbby95uPaOj5doHqTObJP3gB6vfcK32qurOX0Ydr64Z6z+Dl+2fkrRb\n0v1V+wWwvulpqbX8Y9nVahXtG9m/bH/rmZoq1z5IndkkaWRkWo89trzDxx5raWRkcz7Tvu78pZS9\niL9ykfRiSV+RdLekI5LeO+g2DLwC9ThwIKLdjrCL9aCBvEH7l+1vPXv3RoyOFoOMo6MbH3TdjGwR\nxeDrDTe04447HDfc0N60QddFdeRXUwOvZTHwCgDlNX5NHgCQFoo8AGSMIg8AGaPIA0DGKPIAkDGK\nPABkjCIPABmjyANAxijyAJAxijwAZIwiDwAZo8gDQMYo8gCQMYo8AGSMIg8AGaPIA0DG6pj+73m2\n77J9n+17bV9TRzAAzev1pE6nmIi60ym2U7WVsg7Tjhr6OCbp9yPiy7bPkHTY9u0R8fUa+gbQkF6v\nmJN1YaHYnps7MUfr5GRzuVazlbIOW+3T/9m+RdLfRcTta+3D9H9A+jqdoliu1G5Ls7PDTrO+rZS1\nisan/7PdkfRSSV9c5XtTtmdsz8zPz9d5WACb4OjRcu1N2kpZh622Im/7pyXdKOmdEfHjld+PiG5E\nTETExNjYWF2HBbBJxsfLtTdpK2UdtlqKvO2nqCjwvYi4qY4+ATRrelpqtZa3tVpFe2q2UtZhq+PV\nNZb0YUn3RcRfVo8EIAWTk1K3W1zXtot1t5vmQOZWyjpslQdebf+ypM9LukfS8X7zH0bEv651GwZe\nAaC8jQy8Vn4JZUT8myRX7QcAUD/e8QoAGaPIA0DGKPIAkDGKPABkjCIPABmjyANAxijyAJAxijwA\nZIwiDwAZo8gDQMYo8gCQMYo8AGSMIg8AGaPIA0DGKPIAkLG6pv/7iO1HbB+poz8AQD3qeib/UUl7\nauprVb2e1OlIIyPFutfbzKNVl1LelLIAGK7KM0NJUkR8znanjr5W0+tJU1PSwkKxPTdXbEtpzuGY\nUt6UsgAYvspzvD7ZUVHkD0XEBYP2LTvHa6dTFKeV2m1pdvaUuxmalPKmlAVANRuZ43VoA6+2p2zP\n2J6Zn58vddujR8u1Ny2lvCllATB8QyvyEdGNiImImBgbGyt12/Hxcu1NSylvSlkADN+WeAnl9LTU\nai1va7WK9hSllDelLACGr66XUF4v6T8knW/7Idtvr6PfRZOTUrdbXEe2i3W3m+7AYUp5U8oCYPhq\nG3gto+zAKwAg8YFXAMDwUeQBIGMUeQDIGEUeADJGkQeAjFHkASBjFHkAyBhFHgAyRpEHgIxR5AEg\nYxR5AMgYRR4AMkaRB4CMUeQBIGMUeQDIGEUeADJW18xQe2w/YPtB2++po08AQHWVi7ztUUkfkHSZ\npBdKerPtF1btd6VeT+p0pJGRYt3r1X2EasevM1/T5zpI3flSPt+UswGnJCIqLZJeKenTS7b3Sdq3\n3m127twZZRw4ENFqRUgnllaraB+GQcevM1/T5zpI3flSPt+Us2F7kjQTJWt05TlebV8paU9E/EZ/\n+9clvTwifnut25Sd47XTkebmTm5vt6XZ2ZKBN2DQ8evM1/S5DlJ3vpTPN+Vs2J6amuPVq7Sd9JfD\n9pTtGdsz8/PzpQ5w9Gi59roNOn6d+Zo+10Hqzpfy+aacDThVdRT5hyQ9b8n2OZIeXrlTRHQjYiIi\nJsbGxkodYHy8XHvdBh2/znxNn+sgdedL+XxTzgacqjqK/H9KeoHtc20/VdJVkj5RQ79Pmp6WWq3l\nba1W0T4Mg45fZ76mz3WQuvOlfL4pZwNOWdmL+Kstkl4n6RuSviVp/6D9yw68RhSDXe12hF2shz34\nNej4deZr+lwHqTtfyuebcjZsP2pi4HUjyg68AgCaG3gFACSKIg8AGaPIA0DGKPIAkDGKPABkjCIP\nABmjyANAxijyWwGfdwtgg3Y0HQAD9HrS1JS0sFBsz80V25I0OdlcLgBbAs/kU7d//4kCv2hhoWgH\ngAEo8qnj824BVECRTx2fdwugAop86vi8WwAVUORTNzkpdbvFnHN2se52GXQFcEp4dc1WMDlJUQew\nITyTB4CMVSrytt9k+17bx22X+iD7sup+P1Dq/dVp9+7iSs/isnt304mWS/m+A7a8slNJLV0k/YKk\n8yV9VtLEqd6u7PR/Bw5EtFoR0oml1dr4VGyp91enXbuW51pcdu1qOlkh5fsOSI2amv7P9mclvSsi\nTmlOv7LT/3U6xRs9V2q3pdnZU+5my/RXJ3vt7zUw8+NJUr7vgNQkPf2f7SnbM7Zn5ufnS9227vcD\npd7fdsJ9B2yugUXe9mdsH1lleUOZA0VENyImImJibGysVMi63w+Uen/bCfcdsLkGFvmI2B0RF6yy\n3DKMgFL97wdKvb867dpVrn3YUr7vgCyUvYi/2qJNHniNKAbi2u0Iu1hXHZhLvb86rRx8TWXQdVHK\n9x2QEg174NX2GyX9raQxST+U9NWIeO2g25UdeAUAbGzgtdI7XiPiZkk3V+kDALB5eMcrAGSMIg8A\nGaPIA0DGKPIAkDGKPABkjCIPABmjyANAxijyAJAxijwAZIwiDwAZo8gDQMYo8gCQMYo8AGSMIg8A\nGaPIA0DGKPIAkLFKRd72+23fb/tu2zfbPquuYFtdryd1OtLISLHu9ZpOBGA7qvpM/nZJF0TEiyV9\nQ9K+6pG2vl5PmpqS5uaKWVXn5optCj2AYatU5CPitog41t/8gqRzqkfa+vbvlxYWlrctLBTtADBM\ndV6Tf5ukW9f6pu0p2zO2Z+bn52s8bHqOHi3XDgCbZWCRt/0Z20dWWd6wZJ/9ko5JWvOCRER0I2Ii\nIibGxsbqSZ+o8fFy7QCwWXYM2iEidq/3fdtvlXS5pF0REXUF28qmp4tr8Esv2bRaRTsADFPVV9fs\nkfRuSVdExMKg/beLyUmp25Xabcku1t1u0Q4Aw+QqT75tPyjpNEnf7zd9ISJ+a9DtJiYmYmZmZsPH\nBYDtyPbhiJgoc5uBl2vWExHPr3J7AMDm4h2vAJAxijwAZIwiDwAZo8gDQMYo8gCQMYo8AGSMIg8A\nGaPIA0DGKPIAkDGKPABkjCIPABmjyANAxijyAJAxijwAZIwiDwAZqzoz1J/Yvtv2V23fZvs5dQUD\nAFRX9Zn8+yPixRFxoaRDkt5bQ6Ztr9eTOh1pZKRY99acHj0P2+18gWGqOjPUj5dsni6Jibwr6vWW\nTwI+N1dsS3nOEbvdzhcYtkpzvEqS7WlJb5H0I0mvjoj5Qbdhjte1dTpFoVup3ZZmZ4edZvNtt/MF\nqtjIHK8Di7ztz0j6uVW+tT8iblmy3z5JT4uIP16jnylJU5I0Pj6+c26132xoZERa7UdiS8ePDz/P\nZttu5wtUsZEiP/CafETsjogLVlluWbHrxyT96jr9dCNiIiImxsbGymTcVsbHy7VvddvtfIFhq/rq\nmhcs2bxC0v3V4mB6Wmq1lre1WkV7jrbb+QLDVvXVNX9m+4jtuyW9RtI1NWTa1iYnpW63uCZtF+tu\nN99ByO12vsCwVR543QgGXgGgvE25Jg8A2Loo8gCQMYo8AGSMIg8AGaPIA0DGGnl1je15SRt9y+vZ\nkh6tMU7dUs6XcjYp7XwpZ5PSzpdyNintfCuztSOi1LtJGynyVdieKfsSomFKOV/K2aS086WcTUo7\nX8rZpLTz1ZGNyzUAkDGKPABkbCsW+W7TAQZIOV/K2aS086WcTUo7X8rZpLTzVc625a7JAwBO3VZ8\nJg8AOEXJF3nbo7a/YvtQf/tc21+0/U3b/2z7qQ1mm7V9T38i85l+2zNs397Pd7vtpzeY7yzbB23f\nb/s+269MIZ/t8/v32eLyY9vvTCHbkoy/Z/ve/qesXm/7aak89mxf0891r+139tsau+9sf8T2I7aP\nLGlbNY8Lf2P7Qdt3276ogWxv6t93x21PrNh/Xz/bA7Zfu5nZ1sn3/v7v7N22b7Z9VpV8yRd5FR9f\nfN+S7T+X9FcR8QJJ/yvp7Y2kOuHVEXHhkpc5vUfSHf18d/S3m/LXkj4VET8v6SUq7sfG80XEA/37\n7EJJOyUtSLo5hWySZPu5kn5X0kREXCBpVNJVSuCxZ/sCSb8p6WUqfqaX9+d1aPK++6ikPSva1spz\nmaQX9JcpSdc1kO2IpF+R9LmljbZfqOLn/Iv923zQ9mgD+W6XdEFEvFjSNyTtq5QvIpJdJJ2j4gFy\nqaRDkqzijQE7+t9/paRPN5hvVtLZK9oekPTs/tfPlvRAQ9l+RtK31R93SS3fkjyvkfTvKWWT9FxJ\n35H0DBWT3R+S9NoUHnuS3iTpQ0u2/0jSHzR930nqSDoy6HEm6R8kvXm1/YaVbUn7Z1X8IV/c3idp\n35LtT0t65bDvuxXfe6OkXpV8qT+Tv1bFA3hxts9nSvphRBzrbz+k4heyKSHpNtuH+3PYStLPRsR3\nJam/flZD2c6TNC/pH/uXuz5k+/SE8i26StL1/a+TyBYR/y3pLyQdlfRdFZPUH1Yaj70jki6x/Uzb\nLUmvk/Q8JXLfLbFWnsU/oIua/h1eKsVsb5N0a//rDeVLtsjbvlzSIxFxeGnzKrs2+fKgiyPiIhX/\ngr7D9iUNZllph6SLJF0XES+V9H9q9tLRSfrXtK+QdEPTWZbqXz9+g6RzJT1H0ukqfsYrDf2xFxH3\nqbhsdLukT0n6mqRj694oLan9Di+VVDbb+1X8bHuLTavsNjBfskVe0sWSrrA9K+njKi7ZXCvpLNs7\n+vucI+nhZuJJEfFwf/2IimvKL5P0PdvPlqT++pGG4j0k6aGI+GJ/+6CKop9KPqkonF+OiO/1t1PJ\ntlvStyNiPiIel3STpF9SIo+9iPhwRFwUEZdI+oGkbyqd+27RWnkeUvGfx6JGf4dXSCab7bdKulzS\nZPSvzWiD+ZIt8hGxLyLOiYiOin/p74yISUl3Sbqyv9tbJd3SRD7bp9s+Y/FrFdeWj0j6RD9Xo/ki\n4n8kfcf2+f2mXZK+rkTy9b1ZJy7VSOlkOyrpFbZbtq0T910qj71n9dfjKgYQr1c6992itfJ8QtJb\n+q+yeYWkHy1e1knAJyRdZfs02+eqGBz+0rBD2N4j6d2SroiIhcr5NntQoaaBiVdJOtT/+rz+iT2o\n4t/80xrKdJ6Kf5W/JuleSfv77c9UMVj8zf76GQ3ebxdKmpF0t6R/kfT0VPJJakn6vqQzl7Qlka2f\n5X2S7lfxh/ufJJ2W0GPv8yr+6HxN0q6m7zsVf2S+K+lxFc82375WHhWXHD4g6VuS7tGSgc8hZntj\n/+ufSPqelgygS9rfz/aApMsauu8eVHHt/av95e+r5OMdrwCQsWQv1wAAqqPIA0DGKPIAkDGKPABk\njCIPABmjyANAxijyAJAxijwAZOz/AZKws77vqTPuAAAAAElFTkSuQmCC\n", 322 | "text/plain": [ 323 | "" 324 | ] 325 | }, 326 | "metadata": {}, 327 | "output_type": "display_data" 328 | } 329 | ], 330 | "source": [ 331 | "point = [110,3]\n", 332 | "print(predict_k_means_clustering(point, centers))\n", 333 | "\n", 334 | "plt.plot(plotx,ploty, 'bo', centers[0][0], centers[0][1],'ro', centers[1][0], centers[1][1], 'go', point[0], point[1], 'yo')" 335 | ] 336 | }, 337 | { 338 | "cell_type": "code", 339 | "execution_count": null, 340 | "metadata": { 341 | "collapsed": true 342 | }, 343 | "outputs": [], 344 | "source": [] 345 | } 346 | ], 347 | "metadata": { 348 | "kernelspec": { 349 | "display_name": "Python 3", 350 | "language": "python", 351 | "name": "python3" 352 | }, 353 | "language_info": { 354 | "codemirror_mode": { 355 | "name": "ipython", 356 | "version": 3 357 | }, 358 | "file_extension": ".py", 359 | "mimetype": "text/x-python", 360 | "name": "python", 361 | "nbconvert_exporter": "python", 362 | "pygments_lexer": "ipython3", 363 | "version": "3.6.3" 364 | } 365 | }, 366 | "nbformat": 4, 367 | "nbformat_minor": 2 368 | } 369 | -------------------------------------------------------------------------------- /K-Means-Clustering/K-Means-Clustering.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | import random 4 | 5 | def random_centers(dim,k): 6 | centers = [] 7 | for i in range(k): 8 | center = [] 9 | for d in range(dim): 10 | rand = random.randint(0,100) 11 | center.append(rand) 12 | centers.append(center) 13 | return centers 14 | 15 | def point_clustering(data, centers, dims, first_cluster=False): 16 | for point in data: 17 | nearest_center = 0 18 | nearest_center_dist = None 19 | for i in range(0, len(centers)): 20 | euclidean_dist = 0 21 | for d in range(0, dims): 22 | dist = abs(point[d] - centers[i][d]) 23 | euclidean_dist += dist 24 | euclidean_dist = np.sqrt(euclidean_dist) 25 | if nearest_center_dist == None: 26 | nearest_center_dist = euclidean_dist 27 | nearest_center = i 28 | elif nearest_center_dist > euclidean_dist: 29 | nearest_center_dist = euclidean_dist 30 | nearest_center = i 31 | if first_cluster: 32 | point.append(nearest_center) 33 | else: 34 | point[-1] = nearest_center 35 | return data 36 | 37 | def mean_center(data, centers, dims): 38 | print('centers:', centers, 'dims:', dims) 39 | new_centers = [] 40 | for i in range(len(centers)): 41 | new_center = [] 42 | n_of_points = 0 43 | total_of_points = [] 44 | for point in data: 45 | if point[-1] == i: 46 | n_of_points += 1 47 | for dim in range(0,dims): 48 | if dim < len(total_of_points): 49 | total_of_points[dim] += point[dim] 50 | else: 51 | total_of_points.append(point[dim]) 52 | if len(total_of_points) != 0: 53 | for dim in range(0,dims): 54 | print(total_of_points, dim) 55 | new_center.append(total_of_points[dim]/n_of_points) 56 | new_centers.append(new_center) 57 | else: 58 | new_centers.append(centers[i]) 59 | return new_centers 60 | 61 | # Gets data and k, returns a list of center points. 62 | def train_k_means_clustering(data, k=2, epochs=5): 63 | dims = len(data[0]) 64 | print('data[0]:',data[0]) 65 | centers = random_centers(dims,k) 66 | 67 | clustered_data = point_clustering(data, centers, dims, first_cluster=True) 68 | 69 | for i in range(epochs): 70 | centers = mean_center(clustered_data, centers, dims) 71 | clustered_data = point_clustering(data, centers, dims, first_cluster=False) 72 | 73 | return centers 74 | 75 | def predict_k_means_clustering(point, centers): 76 | dims = len(point) 77 | center_dims = len(centers[0]) 78 | 79 | if dims != center_dims: 80 | raise ValueError('Point given for prediction have', dims, 'dimensions but centers have', center_dims, 'dimensions') 81 | 82 | nearest_center = None 83 | nearest_dist = None 84 | 85 | for i in range(len(centers)): 86 | euclidean_dist = 0 87 | for dim in range(1, dims): 88 | dist = point[dim] - centers[i][dim] 89 | euclidean_dist += dist**2 90 | euclidean_dist = np.sqrt(euclidean_dist) 91 | if nearest_dist == None: 92 | nearest_dist = euclidean_dist 93 | nearest_center = i 94 | elif nearest_dist > euclidean_dist: 95 | nearest_dist = euclidean_dist 96 | nearest_center = i 97 | print('center:',i, 'dist:',euclidean_dist) 98 | 99 | return nearest_center 100 | -------------------------------------------------------------------------------- /K-Means-Clustering/K-Means-Clustering.swift: -------------------------------------------------------------------------------- 1 | import UIKit 2 | 3 | 4 | public class dataPoint{ 5 | var xLoc: Float 6 | var yLoc: Float 7 | var dataClass: String 8 | 9 | init(x:Float,y:Float,c:String) 10 | { 11 | self.xLoc = x 12 | self.yLoc = y 13 | self.dataClass = c 14 | } 15 | 16 | public func calcDistance(x:Float,y:Float) -> Float 17 | { 18 | let forX = (pow((self.xLoc - x), 2)) 19 | let forY = (pow((self.yLoc - y), 2)) 20 | var dist = forX + forY 21 | dist = sqrt(dist) 22 | return dist 23 | } 24 | } 25 | 26 | 27 | public class KMeansAlgorithm 28 | { 29 | var k:Int 30 | var epoch: Int 31 | var dataPoints: [dataPoint] 32 | var meanCenters: [[Float]] 33 | 34 | init(k: Int, epoch: Int, dp: [dataPoint]) 35 | { 36 | self.k = k - 1 37 | self.epoch = epoch 38 | self.dataPoints = dp 39 | self.meanCenters = [] 40 | } 41 | 42 | public func initRandomCenters() 43 | { 44 | for _ in 0...self.k 45 | { 46 | let center = [Float.random(in: -1 ... 1), Float.random(in: -1 ... 1)] 47 | self.meanCenters.append(center) 48 | } 49 | } 50 | 51 | public func findNearestCenter(point: dataPoint) -> String{ 52 | var nearestCenter = 0 53 | var nearestDistance = Float.greatestFiniteMagnitude 54 | for i in 0...meanCenters.count-1 55 | { 56 | let centerDist = point.calcDistance(x: meanCenters[i][0], y: meanCenters[i][1]) 57 | 58 | if centerDist < nearestDistance 59 | { 60 | nearestCenter = i 61 | nearestDistance = centerDist 62 | } 63 | } 64 | return String(nearestCenter) 65 | } 66 | 67 | public func pointClustering() 68 | { 69 | for point in dataPoints 70 | { 71 | let nearestCenter = findNearestCenter(point: point) 72 | point.dataClass = String(nearestCenter) 73 | } 74 | } 75 | 76 | public func changeCenters(){ 77 | var newCenters = [[Float]]() 78 | for i in 0...meanCenters.count-1 79 | { 80 | var totalX = Float(0) 81 | var totalY = Float(0) 82 | var numPoints = 0 83 | 84 | for point in dataPoints 85 | { 86 | if point.dataClass == String(i){ 87 | numPoints += 1 88 | totalX += point.xLoc 89 | totalY += point.yLoc 90 | } 91 | } 92 | let newCenterX = totalX / Float(numPoints) 93 | let newCenterY = totalY / Float(numPoints) 94 | newCenters.append([newCenterX, newCenterY]) 95 | } 96 | self.meanCenters = newCenters 97 | } 98 | 99 | public func train() 100 | { 101 | initRandomCenters() 102 | 103 | for _ in 0...self.epoch 104 | { 105 | pointClustering() 106 | changeCenters() 107 | } 108 | } 109 | 110 | public func predict(x:Float, y:Float) -> String{ 111 | let point2Predict = dataPoint(x:x, y:y, c: "N") 112 | let nearestCenter = findNearestCenter(point: point2Predict) 113 | return nearestCenter 114 | } 115 | } 116 | 117 | 118 | // TEST DATA 119 | var randomData = [dataPoint]() 120 | for _ in 0...20 { 121 | let randX = Float.random(in: -1...1) 122 | let randY = Float.random(in: -1...1) 123 | randomData.append(dataPoint(x: randX, y: randY , c: "N")) 124 | } 125 | 126 | // TRAIN 127 | var KMeans = KMeansAlgorithm(k: 3, epoch: 5, dp: randomData) 128 | KMeans.train() 129 | 130 | // PREDICT 131 | print(KMeans.predict(x: 0.3, y: -0.1)) 132 | -------------------------------------------------------------------------------- /K-Nearest-Neighbors/KNN-without-ML-libraries.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## K-Nearest Neighbor Algoritm ###\n", 8 | "KNN is a classification algorithm. It is basic to understand. \n", 9 | "\n", 10 | "K is the number of neighbors you want to look at. Algorithm looks at the classes of nearest k points and classify the point if a class have more points that are nearest to point. " 11 | ] 12 | }, 13 | { 14 | "cell_type": "markdown", 15 | "metadata": {}, 16 | "source": [ 17 | "### Import Libraries ###\n", 18 | "I will only use numpy for math and matplotlib for graphs. I will not use any ML libraries." 19 | ] 20 | }, 21 | { 22 | "cell_type": "code", 23 | "execution_count": 77, 24 | "metadata": { 25 | "collapsed": true 26 | }, 27 | "outputs": [], 28 | "source": [ 29 | "import numpy as np\n", 30 | "import matplotlib.pyplot as plt\n", 31 | "\n", 32 | "# Use matplotlib in Jupyter Notebook Outputs\n", 33 | "%matplotlib inline" 34 | ] 35 | }, 36 | { 37 | "cell_type": "markdown", 38 | "metadata": {}, 39 | "source": [ 40 | "### Define Data ###\n", 41 | "For this example I will use fake data, but for better understanding, my data is the accaptance of student to a university and their SAT Score and GPA." 42 | ] 43 | }, 44 | { 45 | "cell_type": "code", 46 | "execution_count": 78, 47 | "metadata": { 48 | "collapsed": true 49 | }, 50 | "outputs": [], 51 | "source": [ 52 | "# Input data - [SAT Score, GPA]\n", 53 | "X = [[1590,2.9], [1540,2.7], [1600,2.6], [1590,2.7], [1520,2.5], [1540,2.4], [1560,2.3], [1490,2.3], [1510,2.4],\n", 54 | " [1350,3.9], [1360,3.7], [1370,3.8], [1380,3.7], [1410,3.6], [1420,3.9], [1430,3.4], [1450,3.7], [1460,3.2],\n", 55 | " [1590,3.9], [1540,3.7], [1600,3.6], [1490,3.7], [1520,3.5], [1540,3.4], [1560,3.3], [1460,3.3], [1510,3.4],\n", 56 | " [1340,2.9], [1360,2.4], [1320,2.5], [1380,2.6], [1400,2.1], [1320,2.5], [1310,2.7], [1410,2.1], [1305,2.5],\n", 57 | " [1460,2.7], [1500,2.9], [1300,3.5], [1320,3.6], [1400,2.7], [1300,3.1], [1350,3.1], [1360,2.9], [1305,3.9], \n", 58 | " [1430,3.0], [1440,2.3], [1440,2.5], [1380,2.1], [1430,2.1], [1400,2.5], [1420,2.3], [1310,2.1], [1350,2.0]]\n", 59 | "\n", 60 | "# Labels - Accepted or Rejected\n", 61 | "Y = ['accepted','accepted','accepted','accepted','accepted','accepted','accepted','accepted','accepted',\n", 62 | " 'accepted','accepted','accepted','accepted','accepted','accepted','accepted','accepted','accepted',\n", 63 | " 'accepted','accepted','accepted','accepted','accepted','accepted','accepted','accepted','accepted',\n", 64 | " 'rejected','rejected','rejected','rejected','rejected','rejected','rejected','rejected','rejected',\n", 65 | " 'rejected','rejected','rejected','rejected','rejected','rejected','rejected','rejected','rejected',\n", 66 | " 'rejected','rejected','rejected','rejected','rejected','rejected','rejected','rejected','rejected']" 67 | ] 68 | }, 69 | { 70 | "cell_type": "markdown", 71 | "metadata": {}, 72 | "source": [ 73 | "### Plot data to a 2d graph ###\n", 74 | "Let's see our data on a graph. I like to see data on graphs. It helps me to understand the problem better when there is an error." 75 | ] 76 | }, 77 | { 78 | "cell_type": "code", 79 | "execution_count": 79, 80 | "metadata": {}, 81 | "outputs": [ 82 | { 83 | "data": { 84 | "text/plain": [ 85 | "[]" 86 | ] 87 | }, 88 | "execution_count": 79, 89 | "metadata": {}, 90 | "output_type": "execute_result" 91 | }, 92 | { 93 | "data": { 94 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAX4AAAD8CAYAAABw1c+bAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4wLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvpW3flQAAHH1JREFUeJzt3X+0XWV95/H3pyEGK1WiXIY0Pwhq\n7AIpJHqbxmFaFRmI91awq3Y14g/s0JW1uvBXq+0U6SKCdS1bO1pbxmJaWII1pQygpiQU06p1WJbg\nTQzhR0AjwhBDm4tBfsw46SR854+zL+x7cn7sc+8+d//6vNY6K+c8+znnPM/eJ9+7z7Of73kUEZiZ\nWXP8VNENMDOzueXAb2bWMA78ZmYN48BvZtYwDvxmZg3jwG9m1jAO/GZmDePAb2bWMA78ZmYNc0zR\nDejkhBNOiOXLlxfdDDOzytixY8fjETGSpW4pA//y5cuZmJgouhlmZpUh6ZGsdTMP9UiaJ+k7km7t\nsG2BpL+TtFfSdknLU9suTcoflHRe1vczM7PhGGSM/wPAni7bLgaeiIhXAp8G/hhA0mnAOuDVwFrg\ns5Lmzby5ZmY2W5kCv6QlwDjw112qXABcl9y/CXiTJCXlN0TEoYj4AbAXWD27JpuZ2WxkPeP/M+D3\ngWe7bF8MPAoQEYeBJ4GXpcsT+5IyMzMrSN/AL+lXgAMRsaNXtQ5l0aO80/uslzQhaWJycrJfs8zM\nbIaynPGfBZwv6WHgBuBsSX/TVmcfsBRA0jHAS4CD6fLEEmB/pzeJiI0RMRoRoyMjmWYkmZnZDPQN\n/BFxaUQsiYjltC7Ufi0i3tlWbTNwUXL/bUmdSMrXJbN+TgFWAHfl1nozMxvYjOfxS7oSmIiIzcA1\nwBck7aV1pr8OICLuk3QjcD9wGLgkIo7MvtkzND4OW7dOLxsbgy1bimlPH+Obxtn6ventHVsxxpYL\ny9neueB9YmVVpc+myrjm7ujoaAwlgUudLjkAJdwHALqic3tjQznbOxe8T6ysiv5sStoREaNZ6vq3\neszMGqb+gX98vHWm3+1sH57fPj4+d+3qYnzTOLpCXc8egOe2j28qvr1zwfvEyqqqn836D/X0Cvid\nFLw/en2AOmnCEIf3iZVVmT6bHuoxM7OuHPjNzBrGgd/MrGHqH/jHxoZTd0jGVmRvwyB1q8z7xMqq\nqp/N+l/cTfM8/srzPrGyKvqzOcjF3VKuwGXDUaXMQjMbnvoP9aR1GsopwfBON52+Gs7m62J70O9W\nVmZ57xOzvFTps9msoZ6GK/qrqJkNj4d67DmdhnfaTf1B8LCPWTM0a6ingQYZyqnasI+ZzYwDv5lZ\nwzjwm5k1jAO/mVnDZFls/VhJd0m6W9J9kq7oUOfTknYlt+9K+nFq25HUts15d8B6q2pmoZkNT5ZZ\nPYeAsyPiGUnzgTsk3RYRd05ViIjfmbov6X3AqtTzfxIRK3NrsQ0kPUvH0znNDDIE/mTR9GeSh/OT\nW69I8XZgw+ybZlnlnZHrDN/pyr4/yt6+LOrQhyrJNMYvaZ6kXcABYFtEbO9S72TgFOBrqeJjJU1I\nulPSW3u8x/qk3sTk5OQAXbCsGblZMwvrkOGbp7Lvj7K3L4s69KFKMiVwRcQRYKWk44EvSTo9Iu7t\nUHUdcFNSf8qyiNgv6eXA1yTdExHf7/AeG4GN0MrcHbgn1pfPnswMZvCTDZI2AP87Iv60w7bvAJdE\nxLe6PPfzwK0RcVOv9/BPNvSXJSN3SpavzHm/XtWVfX+UvX1Z1KEPZZLr0ouSRpIzfSS9EDgHeKBD\nvZ8DFgL/kipbKGlBcv8E4Czg/iwNs97yzsh1hu90Zd8fZW9fFnXoQ1VlGepZBFwnaR6tPxQ3RsSt\nkq4EJiJiaorm24EbYvpXiFOBz0l6NnnuJyLCgd/MrEBZZvXsZvr0zKnyy9sef7RDnW8BPz+L9pmZ\nWc6cuWtm1jAO/BWVd0auM3ynK/v+KHv7sqhDH6rKC7HUQN4Zuc7wna7s+6Ps7cuiDn0omhdiGYbx\ncdjaNrNgbAy2eIpZHpy5aU1Rhs+6h3qyag/63coKkPdan0WsHVrmzM2yr6Va9vZlUYc+ZFWGz7qH\nerJS56+ilHD/VZG/6ltTDOuznmsCV6ONj7cCfregD89vHx+fu3bVxPimcXSFuv5HAJ7bPr7J+9eq\nq2yfdZ/x99Ir4HdSwn1ZZr3+E3Tis3+rqrn4rPuM38zMunLgNzNrGAd+M7OGceDvZWyA6WSD1DXA\nmZvWHGX7rPviblaezjlUns5pTeHpnMOUnorpKZdmuUpPT/SU2+qpb+DPO9O201COh3dy06TMzToo\nQ/ZpVZXhs973t3okHQt8E1iQ1L8pIja01XkP8Engh0nRVRHx18m2i4A/TMr/KCKuy6fpc8y/yTNU\n/k0ea4oyfNaznPEfAs6OiDOBlcBaSWs61Pu7iFiZ3KaC/kuBDcAvAquBDZIW5tT2oznT1mxoypZ9\najPXN/BHyzPJw/nJLetViPOAbRFxMCKeALYBa2fU0iwGGcopyQ+smVWF18itj0xj/JLmSdoFHKAV\nyLd3qPZrknZLuknS0qRsMfBoqs6+pMzMzAqSKfBHxJGIWAksAVZLOr2tyt8DyyPiDOAfgalx/E7f\nCTt+W5C0XtKEpInJyclsrTczs4ENNKsnIn4MfIO24ZqI+FFEHEoe/hXw2uT+PmBpquoSYH+X194Y\nEaMRMToyMjJIs8zMbAB9A7+kEUnHJ/dfCJwDPNBWZ1Hq4fnAnuT+7cC5khYmF3XPTcqGw5m2ZkNT\ntuxTm7ksSy8uAq6TNI/WH4obI+JWSVcCExGxGXi/pPOBw8BB4D0AEXFQ0seAbyevdWVEHMy7E89J\nT7l0pq1ZrtLTEJ1pXW19A39E7AZWdSi/PHX/UuDSLs+/Frh2Fm00G1gZ1jWtoqL2m4/X3Kpv5q4z\nbRvNmaUzk3W/5Z196uM1t7IM9VSTM23NhsZn4tVW38BvjdNpuKDd1Ni0hxGeV9R+8/EqTn2Heqxx\nnFk6M0XtNx+v4jjwm5k1jAO/mVnDOPCbmTWMA7/VhjNLZ6ao/ebjVRyvuWu15MzSmSlqv/l4zd4g\na+56OqdZTpx9Wg4+Dv15qMdqqYh1TeuQfVrUerB5vm8djsOw+YzfaslndzNT1H7z8ZpbDvxms+Ds\n03LwcRiMh3rMZsHZp+Xg4zAYB34zs4Zx4Dcza5gsSy8eK+kuSXdLuk/SFR3q/K6k+yXtlvRPkk5O\nbTsiaVdy25x3B8zMbDBZzvgPAWdHxJnASmCtpDVtdb4DjEbEGcBNwJ+ktv0kIlYmt/NzabVZSTj7\ntBx8HAaTZenFAJ5JHs5PbtFW5+uph3cC78yrgWZl5nVoy8HHYTCZxvglzZO0CzgAbIuI7T2qXwzc\nlnp8rKQJSXdKemuP91if1JuYnJzM1HgzMxtcpsAfEUciYiWwBFgt6fRO9SS9ExgFPpkqXpb8fsSF\nwJ9JekWX99gYEaMRMToyMjJQJ8zMphSVfVwlAyVwRcSPJX0DWAvcm94m6RzgMuD1EXEo9Zz9yb8P\nJc9dBXx/ds02KwcnDpWP93F/WWb1jEg6Prn/QuAc4IG2OquAzwHnR8SBVPlCSQuS+ycAZwH359d8\ns2I5cciqKMsZ/yLgOknzaP2huDEibpV0JTAREZtpDe0cB/wPSQD/K5nBcyrwOUnPJs/9REQ48JuZ\nFSjLrJ7dtIZn2ssvT90/p8tzvwX8/GwaaGZm+XLmrplZwzjwm82CE4esivyzzGaz4MQhq6L6nvGP\nj4M0/TY+XnSrBlOHPphZ6dQ38G/tMHWuU1mZ1aEPDeLEIasKD/WY5cSJQ1YV9TrjTw+NdFP2IZM6\n9MHMSk2tH98sl9HR0ZiYmBj8ib2CZScl7Hst+mBmc07SjuR30fqq1xm/mZn15cBvZtYwDvxmZg1T\nr8A/NsDUuUHqzqU69MHMSq1e0zm3pKbTdbtIWvaLoXXog5mVWr3O+M2mOOvZrKv6Bv5OwyBVGxqp\nQx+K4qxns676DvVIOhb4JrAgqX9TRGxoq7MAuB54LfAj4Dci4uFk26W0FmA/Arw/Im7PswNdbalB\nFmUd+mBmpZPljP8QcHZEnAmsBNZKWtNW52LgiYh4JfBp4I8BJJ0GrANeTWud3s8mK3mZ5c9Zz2aZ\n9A380fJM8nB+cmu/ungBcF1y/ybgTWqtwXgBcENEHIqIHwB7gdW5tNys3SBDOR72sQbLNMYvaZ6k\nXcABYFtEbG+rshh4FCAiDgNPAi9Llyf2JWVmZlaQTIE/Io5ExEpgCbBa0ultVTp9t44e5UeRtF7S\nhKSJycnJLM0yM7MZGGhWT0T8GPgGrfH6tH3AUgBJxwAvAQ6myxNLgP1dXntjRIxGxOjIyMggzTIz\nswH0DfySRiQdn9x/IXAO8EBbtc3ARcn9twFfi9bPfm4G1klaIOkUYAVwV16NN5vGWc9mmWTJ3F0E\nXJfMxvkp4MaIuFXSlcBERGwGrgG+IGkvrTP9dQARcZ+kG4H7gcPAJRFxZBgdMXPWs1k2WWb17I6I\nVRFxRkScHhFXJuWXJ0GfiPi/EfHrEfHKiFgdEQ+lnv/xiHhFRPxcRNw2vK5UTJ6Zpc5StSz8ORmq\n8U3j6ApNu41vKuf+rddCLFWS5xmpz26PNj5+9JTNsbFmJ8X5czJUuqLz/o0Nc7N/B1mIpV4/0mY2\npckB3qyP+v5WTxnlmVnqLFXLwp+ToUoP73RTxmEfD/XMpTzX0/XavJaFPydD1SvgdzLMYR+vuWtm\nZl058JuZNYwDv5lZwzjwz6U8M0udpWpZ+HMyVGMrsu+zQeoOmy/uFsXz+G2u+XMyVFWax+8zfjOz\nhnHgL0qe6+l6bV7Lwp+Toeo0lFOm4Z00D/WYmdWAh3rMzKwrB34zs4Zx4DczaxgHfjOzhun7s8yS\nlgLXAycBzwIbI+IzbXV+D3hH6jVPBUYi4qCkh4GngSPA4awXH8zMbDiy/B7/YeBDEbFT0s8AOyRt\ni4j7pypExCeBTwJIegvwOxFxMPUab4yIx/NsuJmZzUyWpRcfi4idyf2ngT3A4h5PeTvwt/k0z8zM\n8jbQGL+k5cAqYHuX7T8NrAVuThUH8FVJOyStn1kzh8jrkA5Xk/Zvk/qasyqtV9tNlfqQOYFL0nHA\nPwMfj4hbutT5DeCdEfGWVNnPRsR+SScC24D3RcQ3Ozx3PbAeYNmyZa995JFHBu7MjPj3S4arSfu3\nSX3NWdG/c5OHovuQewKXpPm0zuK/2C3oJ9bRNswTEfuTfw8AXwJWd3piRGyMiNGIGB0ZGcnSLDMz\nm4G+gV+SgGuAPRHxqR71XgK8HvhKquxFyQVhJL0IOBe4d7aNnjWvQzpcTdq/Teprzqq6Xm1aVfvQ\nd6hH0n8C/idwD63pnAAfAZYBRMTVSb33AGsjYl3quS+ndZYPrRlEmyLi4/0aNfTf6vE6pMPVpP3b\npL7mrEzr1c5UmfowyFBP3+mcEXEH0Ld3EfF54PNtZQ8BZ2ZpiJmZzQ1n7pqZNYwDv5lZwzQz8Hsd\n0uFq0v5tUl9zVtX1atOq2gcvxOK518PVpP3bpL7mrOg58Hkoug9eiMVmxxmoZkNThgxfB36vQ3q0\nrVuzlWXRpP3bpL7mrErr1XaTtQ9bv3f0/6VOZcPkoR47mocszIZmWENCHuqxwTkD1Wxoypbh68Bv\nLYMM5cx02MesoQYZypmLYR8HfjOzhnHgNzNrGAd+M7OGceC3Fmegmg1N2TJ8mxX4Z5OYdOKJRz/3\nxBOH2965tGVLa7pmrymbU9u3bMn+umVPBit7+0qqDElIVbLlwi3Ehug5ZXNq+5YLB/j/NUPNmsc/\nm/npTZrbnmdfy77fyt6+kir65wmqzPP4rZycgWo2NGXIUs6y9OJSSV+XtEfSfZI+0KHOGyQ9KWlX\ncrs8tW2tpAcl7ZX0B3l3oK/ZJCalh3f6Pbeuwz6zHd7ppshhlbK3r6TKloRUVelhn7kc3knLsvTi\nImBRROxM1s/dAbw1Iu5P1XkD8OGI+JW2584Dvgv8Z2Af8G3g7enndpLrUM9slsbzsnozU/b9Vvb2\nlVSZlhm0o+U61BMRj0XEzuT+08AeYHHGtqwG9kbEQxHx78ANwAUZn2tmZkMw0Bi/pOXAKmB7h82v\nk3S3pNskvTopWww8mqqzjy5/NCStlzQhaWJycnKQZpmZ2QAyB35JxwE3Ax+MiKfaNu8ETo6IM4G/\nAL489bQOL9Xx+19EbIyI0YgYHRkZydosMzMbUKbAL2k+raD/xYi4pX17RDwVEc8k97cC8yWdQOsM\nf2mq6hJg/6xbPYjZJCYN8gfIf6yeV/ZksLK3r6TKloRkM5fl4q6A64CDEfHBLnVOAv4tIkLSauAm\n4GRg6uLum4Af0rq4e2FE3NfrPT2Pv0bKvt/K3r6S8jz+8hnk4u4xGeqcBbwLuEfSrqTsI8AygIi4\nGngb8NuSDgM/AdZF6y/KYUnvBW6n9Ufg2n5B32ZgfPzon0oeGxtsCqYZrSmb7T8LPLZibOjTDYt6\n36bqG/gj4g46j9Wn61wFXNVl21agHD/gPjbWOUBmMTIC7RedyzK8k+dSiXmbzT6fC2Vv3xzLuizg\n2IqxjoF62O9r+chyxl8fszkDPnAgv3Y0Sdm/dZS9fSXlM/Fq8082VJWzTy0nRWXkOhO4OM36kbY6\ncfap5aSojFxnAufLP9JmZmZdOfCbmTWMA7+ZWcM48FeVs08tJ0Vl5DoTuDi+uFsHzj61nBSVketM\n4NnzxV2rr7zXyK3Bmrte/3Y674/+HPjroElLJeadpVzmrOeM8sx6LWpZwDzf11nA/TUrc7eunH1q\nOSkqI9eZwHPLZ/xWfnlnKdcg69lZr9N5fwzGF3et/PLOUq5B1rOzXqfz/vDFXTMz68GB38ysYfoG\nfklLJX1d0h5J90n6QIc675C0O7l9S9KZqW0PS7pH0i5JHr8xMytYljP+w8CHIuJUYA1wiaTT2ur8\nAHh9RJwBfAzY2Lb9jRGxMuv4k9k0eWcp1yDr2Vmv03l/DGbgi7uSvgJcFRHbumxfCNwbEYuTxw8D\noxHxeNb38MVd6yrvLOUaZD0763W6pu6PoV3clbQcWAVs71HtYuC21OMAvipph6T1g7yfmZnlL3Pg\nl3QccDPwwYh4qkudN9IK/P81VXxWRLwGeDOtYaJf7vLc9ZImJE1Mtq9tazYl7yzlGmQ9F5VtW1be\nH/1lGuqRNB+4Fbg9Ij7Vpc4ZwJeAN0fEd7vU+SjwTET8aa/381CPmdlgch3qkSTgGmBPj6C/DLgF\neFc66Et6kaSfmboPnAvcm6VhZmY2HFl+q+cs4F3APZJ2JWUfAZYBRMTVwOXAy4DPtv5OcDj5y/Mf\ngC8lZccAmyLiH3LtgZmZDaRv4I+IO4Ce+dAR8VvAb3Uofwg48+hnmJlZUZy5a2bWMA78ZmYN48Bv\nZtYwDvxmZg3jwF+DNVdzl+c+yfpaXkvXbM54IZYa/FZL7vLcJ1lfy7/BYzYrXojFzMy6ambgr8Ga\nq7nLc58M8lpeS9dszjVzqKcGa67mLs99Muhr5fGeM3nfJhxXawwP9ZiZWVcO/GZmDePAb2bWMM0M\n/DVYczV3ee6TYeyzhqylazYXmnlxN83zvY/mefxmleOLu3Xh7FMzGwIH/jKvubp1a7ayvOW5T7K+\nltfSNZszfYd6JC0FrgdOAp4FNkbEZ9rqCPgMMAb8H+A9EbEz2XYR8IdJ1T+KiOv6Ncpr7iY8XGFm\nGeU91HMY+FBEnAqsAS6RdFpbnTcDK5LbeuAvk4a8FNgA/CKwGtggaWGmXjSVs0/NbMj6Bv6IeGzq\n7D0ingb2AIvbql0AXB8tdwLHS1oEnAdsi4iDEfEEsA1Ym2sP6maQoZy5GPYxs9oZaIxf0nJgFbC9\nbdNi4NHU431JWbdyMzMrSObAL+k44GbggxHxVPvmDk+JHuWdXn+9pAlJE5OTk1mbZWZmA8oU+CXN\npxX0vxgRt3Sosg9Ymnq8BNjfo/woEbExIkYjYnRkZCRLs8zMbAb6Bv5kxs41wJ6I+FSXapuBd6tl\nDfBkRDwG3A6cK2lhclH33KTMunH2qZkN2TEZ6pwFvAu4R9KupOwjwDKAiLga2EprKudeWtM5fzPZ\ndlDSx4BvJ8+7MiIO5tf8Gtqy5fn7ns5pZkPQN/BHxB10HqtP1wngki7brgWunVHrzMwsd87cLTNn\nn5rZEGQZ6rGipId9zMxy4jN+M7OGceA3M2sYB34zs4Zx4DczaxgHfjOzhnHgNzNrGAd+M7OGceA3\nM2uYvksvFkHSJPBIji95AvB4jq9XBPehHOrQB6hHP9yH6U6OiEw/bVzKwJ83SRNZ16IsK/ehHOrQ\nB6hHP9yHmfNQj5lZwzjwm5k1TFMC/8aiG5AD96Ec6tAHqEc/3IcZasQYv5mZPa8pZ/xmZpaobOCX\ndK2kA5LuTZV9TNJuSbskfVXSzyblkvTnkvYm21+Tes5Fkr6X3C4qcR/eIOnJpHyXpMtTz1kr6cGk\nf39QdB9S2z4sKSSdkDyuzHHo0YfKHAdJH5X0w1Rbx1LbLk3a+aCk86rWB0nLJf0kVX516jmvlXRP\n0oc/T9YNL6wPSfn7kv16n6Q/SZUXcxwiopI34JeB1wD3pspenLr/fuDq5P4YcButJSTXANuT8pcC\nDyX/LkzuLyxpH94A3NrhNeYB3wdeDrwAuBs4rcg+JOVLgdtp5WOcULXj0KMPlTkOwEeBD3eoe1rS\nvgXAKUm751WsD8vbj1dq213A65LP2W3AmwvuwxuBfwQWJI9PLPo4VPaMPyK+CRxsK3sq9fBFwNQF\njAuA66PlTuB4SYuA84BtEXEwIp4AtgFrh9/659o7SB+6WQ3sjYiHIuLfgRto9XdOdOpD4tPA7zO9\n/ZU5DolOfeimrMehkwuAGyLiUET8ANhLq/1V6kNHyefpxRHxL9GKrtcDb82jfVl06cNvA5+IiENJ\nnQNJeWHHobKBvxtJH5f0KPAOYOpr+GLg0VS1fUlZt/JCdekDwOsk3S3pNkmvTspK1wdJ5wM/jIi7\n2zZV5jj06ANU5Dgk3psMq10raWFSVpnjkOjUB4BTJH1H0j9L+qWkbDGtdk8pQx9eBfySpO1JW38h\nKS/sONQu8EfEZRGxFPgi8N6kuNMYX/QoL1SXPuyklZJ9JvAXwJeT8lL1QdJPA5cx/Q/Wc5s7lJXu\nOPTpQyWOQ+IvgVcAK4HHgP+WlFfiOCS69eExYFlErAJ+F9gk6cWUsw/H0BrCXAP8HnBjct2hsONQ\nu8Cfsgn4teT+PlrjtVOWAPt7lJfFc32IiKci4pnk/lZgfnLBsWx9eAWt8cq7JT2ctGenpJOoznHo\n2ocKHQci4t8i4khEPAv8Fa0hBKjOcejah2R45EfJ/R20xsRfRasPS1IvUXgfaLXplmSI8y7gWVq/\n0VPYcahV4Je0IvXwfOCB5P5m4N3JrJI1wJMR8RitC3fnSlqYfIU8NykrTLc+SDppanaCpNW0jt2P\ngG8DKySdIukFwDpa/S1ERNwTESdGxPKIWE7rQ/yaiPhXKnIcevWhKschad+i1MNfBaZmmmwG1kla\nIOkUYAWtC6KV6YOkEUnzkvsvp9WHh5LP09OS1iTH6d3AV+a42e2+DJwNIOlVtC7YPk6Rx2EYV7bn\n4gb8La2ve/+P1n/Mi4GbaX0wdgN/DyxO6gr477TOCu4BRlOv819oXVTZC/xmifvwXuA+Wlf47wT+\nY+p1xoDvJv27rOg+tG1/mOdnxFTmOPToQ2WOA/CFZD/vphU4FqXqX5a080FSs16q0gda34SnjsNO\n4C2p1xlN/g99H7iKJFG1wD68APibpE07gbOLPg7O3DUza5haDfWYmVl/DvxmZg3jwG9m1jAO/GZm\nDePAb2bWMA78ZmYN48BvZtYwDvxmZg3z/wG/mGq+cC46kgAAAABJRU5ErkJggg==\n", 95 | "text/plain": [ 96 | "" 97 | ] 98 | }, 99 | "metadata": {}, 100 | "output_type": "display_data" 101 | } 102 | ], 103 | "source": [ 104 | "for i in range(len(X)):\n", 105 | " if Y[i] == 'accepted':\n", 106 | " plt.scatter(X[i][0], X[i][1], s=120, marker='P', linewidths=2, color='green')\n", 107 | " else:\n", 108 | " plt.scatter(X[i][0], X[i][1], s=120, marker='P', linewidths=2, color='red')\n", 109 | " \n", 110 | "plt.plot()" 111 | ] 112 | }, 113 | { 114 | "cell_type": "markdown", 115 | "metadata": {}, 116 | "source": [ 117 | "### Helper Functions ###" 118 | ] 119 | }, 120 | { 121 | "cell_type": "code", 122 | "execution_count": 80, 123 | "metadata": { 124 | "collapsed": true 125 | }, 126 | "outputs": [], 127 | "source": [ 128 | "# Find which variable is the most in an array of variables\n", 129 | "def most_found(array):\n", 130 | " list_of_words = []\n", 131 | " for i in range(len(array)):\n", 132 | " if array[i] not in list_of_words:\n", 133 | " list_of_words.append(array[i])\n", 134 | " \n", 135 | " most_counted = ''\n", 136 | " n_of_most_counted = None\n", 137 | " \n", 138 | " for i in range(len(list_of_words)):\n", 139 | " counted = array.count(list_of_words[i])\n", 140 | " if n_of_most_counted == None:\n", 141 | " most_counted = list_of_words[i]\n", 142 | " n_of_most_counted = counted\n", 143 | " elif n_of_most_counted < counted:\n", 144 | " most_counted = list_of_words[i]\n", 145 | " n_of_most_counted = counted\n", 146 | " elif n_of_most_counted == counted:\n", 147 | " most_counted = None\n", 148 | " \n", 149 | " return most_counted" 150 | ] 151 | }, 152 | { 153 | "cell_type": "markdown", 154 | "metadata": {}, 155 | "source": [ 156 | "### KNN Algorithm ###\n", 157 | "I calculated euclidean distance of every point to the new point and found labels of nearest k points. \n", 158 | "\n", 159 | "#### Euclidean Distance ####\n", 160 | "square root of sum of square of distance between two points in every dimension. \n", 161 | "\n", 162 | "Like pythagorean theorem: a^2 + b^2 = c^2" 163 | ] 164 | }, 165 | { 166 | "cell_type": "code", 167 | "execution_count": 81, 168 | "metadata": {}, 169 | "outputs": [], 170 | "source": [ 171 | "def find_neighbors(point, data, labels, k=3):\n", 172 | " # How many dimentions do the space have?\n", 173 | " n_of_dimensions = len(point)\n", 174 | " \n", 175 | " #find nearest neighbors\n", 176 | " neighbors = []\n", 177 | " neighbor_labels = []\n", 178 | " \n", 179 | " for i in range(0, k):\n", 180 | " # To find it in data later, I get its order\n", 181 | " nearest_neighbor_id = None\n", 182 | " smallest_distance = None\n", 183 | " \n", 184 | " for i in range(0, len(data)):\n", 185 | " eucledian_dist = 0\n", 186 | " for d in range(0, n_of_dimensions):\n", 187 | " dist = abs(point[d] - data[i][d])\n", 188 | " eucledian_dist += dist\n", 189 | " \n", 190 | " eucledian_dist = np.sqrt(eucledian_dist)\n", 191 | " \n", 192 | " if smallest_distance == None:\n", 193 | " smallest_distance = eucledian_dist\n", 194 | " nearest_neighbor_id = i\n", 195 | " elif smallest_distance > eucledian_dist:\n", 196 | " smallest_distance = eucledian_dist\n", 197 | " nearest_neighbor_id = i\n", 198 | " \n", 199 | " neighbors.append(data[nearest_neighbor_id])\n", 200 | " neighbor_labels.append(labels[nearest_neighbor_id])\n", 201 | " \n", 202 | " data.remove(data[nearest_neighbor_id])\n", 203 | " labels.remove(labels[nearest_neighbor_id])\n", 204 | " return neighbor_labels\n", 205 | "\n", 206 | "def k_nearest_neighbor(point, data, labels, k=3):\n", 207 | " \n", 208 | " # If two different labels are most found, continue to search for 1 more k\n", 209 | " while True:\n", 210 | " neighbor_labels = find_neighbors(point, data, labels, k=k)\n", 211 | " label = most_found(neighbor_labels)\n", 212 | " if label != None:\n", 213 | " break\n", 214 | " k += 1\n", 215 | " if k >= len(data):\n", 216 | " break\n", 217 | " \n", 218 | " return label" 219 | ] 220 | }, 221 | { 222 | "cell_type": "markdown", 223 | "metadata": {}, 224 | "source": [ 225 | "### Predict label using KNN ###" 226 | ] 227 | }, 228 | { 229 | "cell_type": "code", 230 | "execution_count": 82, 231 | "metadata": {}, 232 | "outputs": [ 233 | { 234 | "data": { 235 | "text/plain": [ 236 | "'accepted'" 237 | ] 238 | }, 239 | "execution_count": 82, 240 | "metadata": {}, 241 | "output_type": "execute_result" 242 | } 243 | ], 244 | "source": [ 245 | "point = [1500, 2.3]\n", 246 | "k_nearest_neighbor(point, X, Y, k=5)" 247 | ] 248 | }, 249 | { 250 | "cell_type": "code", 251 | "execution_count": null, 252 | "metadata": { 253 | "collapsed": true 254 | }, 255 | "outputs": [], 256 | "source": [] 257 | } 258 | ], 259 | "metadata": { 260 | "kernelspec": { 261 | "display_name": "Python 3", 262 | "language": "python", 263 | "name": "python3" 264 | }, 265 | "language_info": { 266 | "codemirror_mode": { 267 | "name": "ipython", 268 | "version": 3 269 | }, 270 | "file_extension": ".py", 271 | "mimetype": "text/x-python", 272 | "name": "python", 273 | "nbconvert_exporter": "python", 274 | "pygments_lexer": "ipython3", 275 | "version": "3.6.3" 276 | } 277 | }, 278 | "nbformat": 4, 279 | "nbformat_minor": 2 280 | } 281 | -------------------------------------------------------------------------------- /K-Nearest-Neighbors/euclidean-distance.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CihanBosnali/Machine-Learning-without-Libraries/5c2f546e0a11f1b0769622edc9c61a6eb7d317db/K-Nearest-Neighbors/euclidean-distance.png -------------------------------------------------------------------------------- /K-Nearest-Neighbors/knn.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | 4 | # Find which variable is the most in an array of variables 5 | def most_found(array): 6 | list_of_words = [] 7 | for i in range(len(array)): 8 | if array[i] not in list_of_words: 9 | list_of_words.append(array[i]) 10 | 11 | most_counted = '' 12 | n_of_most_counted = None 13 | 14 | for i in range(len(list_of_words)): 15 | counted = array.count(list_of_words[i]) 16 | if n_of_most_counted == None: 17 | most_counted = list_of_words[i] 18 | n_of_most_counted = counted 19 | elif n_of_most_counted < counted: 20 | most_counted = list_of_words[i] 21 | n_of_most_counted = counted 22 | elif n_of_most_counted == counted: 23 | most_counted = None 24 | 25 | return most_counted 26 | 27 | def find_neighbors(point, data, labels, k=3): 28 | # How many dimentions do the space have? 29 | n_of_dimensions = len(point) 30 | 31 | #find nearest neighbors 32 | neighbors = [] 33 | neighbor_labels = [] 34 | 35 | for i in range(0, k): 36 | # To find it in data later, I get its order 37 | nearest_neighbor_id = None 38 | smallest_distance = None 39 | 40 | for i in range(0, len(data)): 41 | eucledian_dist = 0 42 | for d in range(0, n_of_dimensions): 43 | dist = abs(point[d] - data[i][d]) 44 | eucledian_dist += dist 45 | 46 | eucledian_dist = np.sqrt(eucledian_dist) 47 | 48 | if smallest_distance == None: 49 | smallest_distance = eucledian_dist 50 | nearest_neighbor_id = i 51 | elif smallest_distance > eucledian_dist: 52 | smallest_distance = eucledian_dist 53 | nearest_neighbor_id = i 54 | 55 | neighbors.append(data[nearest_neighbor_id]) 56 | neighbor_labels.append(labels[nearest_neighbor_id]) 57 | 58 | data.remove(data[nearest_neighbor_id]) 59 | labels.remove(labels[nearest_neighbor_id]) 60 | return neighbor_labels 61 | 62 | # point - the point to predict label 63 | # data - data of other points 64 | # labels - labels of data points 65 | def k_nearest_neighbor(point, data, labels, k=3): 66 | 67 | # If two different labels are most found, continue to search for 1 more k 68 | while True: 69 | neighbor_labels = find_neighbors(point, data, labels, k=k) 70 | label = most_found(neighbor_labels) 71 | if label != None: 72 | break 73 | k += 1 74 | if k >= len(data): 75 | break 76 | 77 | return label 78 | -------------------------------------------------------------------------------- /K-Nearest-Neighbors/knn.swift: -------------------------------------------------------------------------------- 1 | import Foundation 2 | 3 | 4 | public class dataPoint{ 5 | var xLoc: Float 6 | var yLoc: Float 7 | var dataClass: String 8 | 9 | init(x:Float,y:Float,c:String) 10 | { 11 | self.xLoc = x 12 | self.yLoc = y 13 | self.dataClass = c 14 | } 15 | 16 | public func calcDistance(x:Float,y:Float) -> Float 17 | { 18 | let forX = (pow((self.xLoc - x), 2)) 19 | let forY = (pow((self.yLoc - y), 2)) 20 | var dist = forX + forY 21 | dist = sqrt(dist) 22 | return dist 23 | } 24 | } 25 | 26 | 27 | public class KNNAlgorithm 28 | { 29 | var k:Int 30 | var dataPoints: [dataPoint] 31 | 32 | init(k: Int, dp: [dataPoint]) 33 | { 34 | self.k = k - 1 35 | self.dataPoints = dp 36 | 37 | } 38 | 39 | public func predict(x: Float, y: Float) -> String 40 | { 41 | var classesArray = [String]() 42 | var distArray = [Float]() 43 | var closests = [String]() 44 | 45 | for point in self.dataPoints { 46 | distArray.append(point.calcDistance(x: x, y: y)) 47 | classesArray.append(point.dataClass) 48 | } 49 | 50 | 51 | for _ in 0...k { 52 | let mindist = distArray.min()! 53 | let minindex = distArray.firstIndex(of: mindist)! 54 | let minDistClass = classesArray[minindex] 55 | closests.append(minDistClass) 56 | distArray.remove(at: minindex) 57 | } 58 | var counts: [String: Int] = [:] 59 | closests.forEach { counts[$0, default: 0] += 1 } 60 | 61 | if counts["A"] ?? 0 > counts["B"] ?? 0{ 62 | return "A" 63 | } else if counts["A"] ?? 0 < counts["B"] ?? 0{ 64 | return "B" 65 | } else { 66 | return "N" 67 | } 68 | 69 | } 70 | } 71 | 72 | 73 | 74 | // TEST DATA 75 | var data = [dataPoint]() 76 | for i in 0...10 { 77 | data.append(dataPoint(x: -0.1 * Float(i), y: -0.1 * Float(10 - i), c: "A")) 78 | } 79 | for t in 0...10 { 80 | data.append(dataPoint(x: 0.1 * Float(t), y: 0.1 * Float(10 - t), c: "B")) 81 | } 82 | 83 | // INIT ALGORITHM 84 | let Knn = KNNAlgorithm(k: 3, dp: data) 85 | 86 | // PREDICTION 87 | Knn.predict(x:-0.1,y:-0.1) 88 | 89 | -------------------------------------------------------------------------------- /Neural-Network/Single-Layer-Neural-Net-without-ML-Libraries.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## Neural Network without ML Libraries ##\n", 8 | "Neural networks are a system which is used in deep learning. I wrote a single-layer neural network without using any Machine Learning Libraries. I only used numpy for math and matplotlib for data visualization. " 9 | ] 10 | }, 11 | { 12 | "cell_type": "markdown", 13 | "metadata": {}, 14 | "source": [ 15 | "### Import Libraries ###" 16 | ] 17 | }, 18 | { 19 | "cell_type": "code", 20 | "execution_count": 12, 21 | "metadata": { 22 | "collapsed": true 23 | }, 24 | "outputs": [], 25 | "source": [ 26 | "import numpy as np" 27 | ] 28 | }, 29 | { 30 | "cell_type": "markdown", 31 | "metadata": {}, 32 | "source": [ 33 | "### Define Data ###\n", 34 | "I will use fake data in this example." 35 | ] 36 | }, 37 | { 38 | "cell_type": "code", 39 | "execution_count": 2, 40 | "metadata": { 41 | "collapsed": true 42 | }, 43 | "outputs": [], 44 | "source": [ 45 | "# Data\n", 46 | "X = [[1,1,0,0,1,1,1,1],[1,1,0,0,0,1,1,1],[1,0,0,0,1,1,1,1],[0,1,1,1,0,0,0,1],[0,0,0,1,0,0,0,0],[0,0,0,1,1,0,0,0]]\n", 47 | "\n", 48 | "# Labels - where players choose to attack mostly\n", 49 | "Y = [1,1,1,0,0,0]\n", 50 | "\n", 51 | "# What are the different labels\n", 52 | "labels = [0, 1]" 53 | ] 54 | }, 55 | { 56 | "cell_type": "markdown", 57 | "metadata": {}, 58 | "source": [ 59 | "### Create A Neural Network ###\n", 60 | "A Neural Network can be simply created as three arrays: weights, biases, activations, because every layer is consist of matrix operations: Weight * a + bias.\n", 61 | "\n", 62 | "Note: NOTE: This function can create multilayer neural networks." 63 | ] 64 | }, 65 | { 66 | "cell_type": "code", 67 | "execution_count": 3, 68 | "metadata": { 69 | "collapsed": true 70 | }, 71 | "outputs": [], 72 | "source": [ 73 | "def create_neural_net(layer_array, input_dims):\n", 74 | " weights = []\n", 75 | " biases = []\n", 76 | " activations = []\n", 77 | " \n", 78 | " for i in range(len(layer_array)):\n", 79 | " node_num = layer_array[i][0]\n", 80 | " weights_of_layer = []\n", 81 | " biases_of_layer = []\n", 82 | " if i == 0:\n", 83 | " last_layer_node_number = input_dims\n", 84 | " else:\n", 85 | " last_layer_node_number = layer_array[i-1][0]\n", 86 | " \n", 87 | " for n in range(0,node_num):\n", 88 | " weights_of_node = []\n", 89 | " for l in range(0, last_layer_node_number):\n", 90 | " weights_of_node.append(1) \n", 91 | " weights_of_layer.append(weights_of_node)\n", 92 | " biases_of_layer.append(0)\n", 93 | " \n", 94 | " weights.append(weights_of_layer)\n", 95 | " biases.append(biases_of_layer)\n", 96 | " activations.append(layer_array[i][1])\n", 97 | " return [weights, biases, activations]" 98 | ] 99 | }, 100 | { 101 | "cell_type": "markdown", 102 | "metadata": {}, 103 | "source": [ 104 | "### Test of create_neural_net() function ###" 105 | ] 106 | }, 107 | { 108 | "cell_type": "code", 109 | "execution_count": 4, 110 | "metadata": {}, 111 | "outputs": [ 112 | { 113 | "name": "stdout", 114 | "output_type": "stream", 115 | "text": [ 116 | " weights: [[[1, 1, 1, 1, 1, 1, 1, 1], [1, 1, 1, 1, 1, 1, 1, 1]]] \n", 117 | "\n", 118 | " biases: [[0, 0]] \n", 119 | "\n", 120 | " activations: ['sigmoid']\n" 121 | ] 122 | } 123 | ], 124 | "source": [ 125 | "layer_array = [[len(labels), 'sigmoid']]\n", 126 | "input_dims = 8\n", 127 | "neural_net = create_neural_net(layer_array, input_dims)\n", 128 | "\n", 129 | "print(' weights:',neural_net[0],'\\n\\n biases:',neural_net[1],'\\n\\n activations:', neural_net[2])" 130 | ] 131 | }, 132 | { 133 | "cell_type": "markdown", 134 | "metadata": {}, 135 | "source": [ 136 | "### Activation Functions ###\n", 137 | "Activation functions are functions that prevent values from getting too low or too high. There are many activation functions but I will wrote two of them: sigmoid and relu." 138 | ] 139 | }, 140 | { 141 | "cell_type": "code", 142 | "execution_count": 5, 143 | "metadata": { 144 | "collapsed": true 145 | }, 146 | "outputs": [], 147 | "source": [ 148 | "def sigmoid(x):\n", 149 | " return 1 / (1 + np.exp(-x))\n", 150 | " \n", 151 | "def sigmoid_deriv(x):\n", 152 | " return x * (1 - x)\n", 153 | "\n", 154 | "def relu(x):\n", 155 | " if x < 0:\n", 156 | " return 0\n", 157 | " else:\n", 158 | " return x" 159 | ] 160 | }, 161 | { 162 | "cell_type": "markdown", 163 | "metadata": {}, 164 | "source": [ 165 | "### Predict Using The Neural Network ###\n", 166 | "NOTE: Predict and predict_ratio functions are designed to be used both for single-layer and multilayer neural nets. " 167 | ] 168 | }, 169 | { 170 | "cell_type": "code", 171 | "execution_count": 6, 172 | "metadata": { 173 | "collapsed": true 174 | }, 175 | "outputs": [], 176 | "source": [ 177 | "def predict_ratio(data, neural_net):\n", 178 | " weights = neural_net[0]\n", 179 | " biases = neural_net[1]\n", 180 | " activations = neural_net[2]\n", 181 | " \n", 182 | " layer_num = len(weights)\n", 183 | " \n", 184 | " for l in range(0, layer_num):\n", 185 | " data = np.dot(weights[l], data)\n", 186 | " for t in range(len(data)):\n", 187 | " data[t] += biases[l][t]\n", 188 | " if activations[l] == 'sigmoid':\n", 189 | " data = sigmoid(data)\n", 190 | " elif activations[l] == 'relu':\n", 191 | " data = relu(data)\n", 192 | " else:\n", 193 | " # If not identified, do it with sigmoid\n", 194 | " data = sigmoid(data)\n", 195 | " print('activation function', activations[l], 'cannot be found. Sigmoid is used') \n", 196 | " return data\n", 197 | "\n", 198 | "def predict(data, neural_net):\n", 199 | " data = predict_ratio(data, neural_net)\n", 200 | " \n", 201 | " class_num = len(data)\n", 202 | " \n", 203 | " highest_class = None\n", 204 | " highest_class_probability = -1\n", 205 | " \n", 206 | " for i in range(0, class_num):\n", 207 | " if highest_class == None:\n", 208 | " highest_class = i\n", 209 | " highest_class_probability = data[i]\n", 210 | " elif data[i] > highest_class_probability:\n", 211 | " highest_class = i\n", 212 | " highest_class_probability = data[i]\n", 213 | " \n", 214 | " return highest_class, highest_class_probability" 215 | ] 216 | }, 217 | { 218 | "cell_type": "code", 219 | "execution_count": 7, 220 | "metadata": {}, 221 | "outputs": [ 222 | { 223 | "data": { 224 | "text/plain": [ 225 | "(0, 0.9933071490757153)" 226 | ] 227 | }, 228 | "execution_count": 7, 229 | "metadata": {}, 230 | "output_type": "execute_result" 231 | } 232 | ], 233 | "source": [ 234 | "# Of course, this prediction is absulutly wrong because we didn't trained the network yet\n", 235 | "predict(X[1], neural_net)" 236 | ] 237 | }, 238 | { 239 | "cell_type": "markdown", 240 | "metadata": {}, 241 | "source": [ 242 | "### Train Network ###\n", 243 | "The most important feature of a neural net is learning.\n", 244 | "`train_network()` function is the function of learning, but how a neural net learns?\n", 245 | "\n", 246 | "#### Backpropagation ####\n", 247 | "Backpropagation is the most used technique for training. It changes weights by the weight's error. For example, if a weight changes the prediction too much in a wrong way, Backpropagation will decrease it. If a weight changes the prediction not enough in a right way, Backpropagation will increase it.\n", 248 | " \n", 249 | "For more info visit: https://en.wikipedia.org/wiki/Backpropagation\n", 250 | "\n", 251 | "#### Gradient Descent ####\n", 252 | "Gradient Descent is a optimization function that we use in deep learning.\n", 253 | "\n", 254 | "For more info watch: https://www.youtube.com/watch?v=IHZwWFHWa-w\n", 255 | "\n", 256 | "NOTE: `train_network()` function can only be used in single-layer networks." 257 | ] 258 | }, 259 | { 260 | "cell_type": "code", 261 | "execution_count": 8, 262 | "metadata": {}, 263 | "outputs": [], 264 | "source": [ 265 | "def train_network(X, Y, labels, neural_net, epochs=1000):\n", 266 | " for epoch in range(0, epochs):\n", 267 | " for d in range(0, len(X)):\n", 268 | " prediction = predict_ratio(X[d], neural_net)\n", 269 | " \n", 270 | " # Calculate total error per label\n", 271 | " true_prediction = []\n", 272 | " for i in range(0, len(labels)):\n", 273 | " true_prediction.append(0)\n", 274 | " true_prediction[labels.index(Y[d])] = 1\n", 275 | " \n", 276 | " errors = []\n", 277 | " for t in range(len(prediction)):\n", 278 | " errors.append(true_prediction[t] - prediction[t]) \n", 279 | " adjust_deriv = errors * sigmoid_deriv(prediction)\n", 280 | " \n", 281 | " for k in range(0, len(adjust_deriv)):\n", 282 | " adjustment = np.dot(X[d], adjust_deriv[k])\n", 283 | " neural_net[0][0][k] += adjustment\n", 284 | " return neural_net" 285 | ] 286 | }, 287 | { 288 | "cell_type": "code", 289 | "execution_count": 9, 290 | "metadata": {}, 291 | "outputs": [], 292 | "source": [ 293 | "neural_net = train_network(X, Y, labels, neural_net, epochs=1000)" 294 | ] 295 | }, 296 | { 297 | "cell_type": "markdown", 298 | "metadata": {}, 299 | "source": [ 300 | "### Results  ###\n", 301 | "You may see, neural network can classify the data correctly with a good accuracy rate, but there can be problem named 'Overfitting'. Overfitting means a neural network is fit on the data it trained and cannot classify other possible data points. For better results, train the net with more data and test with data not used in training" 302 | ] 303 | }, 304 | { 305 | "cell_type": "code", 306 | "execution_count": 10, 307 | "metadata": { 308 | "scrolled": true 309 | }, 310 | "outputs": [ 311 | { 312 | "name": "stdout", 313 | "output_type": "stream", 314 | "text": [ 315 | "(1, 0.9919398375371878)\n", 316 | "(1, 0.9936373425420446)\n", 317 | "(1, 0.9925307416847557)\n", 318 | "(0, 0.9903682943291514)\n", 319 | "(0, 0.9836167677309535)\n", 320 | "(0, 0.9876082070557368)\n" 321 | ] 322 | } 323 | ], 324 | "source": [ 325 | "for i in range(len(X)):\n", 326 | " print(predict(X[i], neural_net))" 327 | ] 328 | }, 329 | { 330 | "cell_type": "markdown", 331 | "metadata": {}, 332 | "source": [ 333 | "You can see the accuracy rate decreses when predicting a data point which is different than training data." 334 | ] 335 | }, 336 | { 337 | "cell_type": "code", 338 | "execution_count": 11, 339 | "metadata": {}, 340 | "outputs": [ 341 | { 342 | "data": { 343 | "text/plain": [ 344 | "(0, 0.589227580166354)" 345 | ] 346 | }, 347 | "execution_count": 11, 348 | "metadata": {}, 349 | "output_type": "execute_result" 350 | } 351 | ], 352 | "source": [ 353 | "predict([1,1,0,1,0,1,0,1], neural_net)" 354 | ] 355 | }, 356 | { 357 | "cell_type": "code", 358 | "execution_count": null, 359 | "metadata": { 360 | "collapsed": true 361 | }, 362 | "outputs": [], 363 | "source": [] 364 | } 365 | ], 366 | "metadata": { 367 | "kernelspec": { 368 | "display_name": "Python 3", 369 | "language": "python", 370 | "name": "python3" 371 | }, 372 | "language_info": { 373 | "codemirror_mode": { 374 | "name": "ipython", 375 | "version": 3 376 | }, 377 | "file_extension": ".py", 378 | "mimetype": "text/x-python", 379 | "name": "python", 380 | "nbconvert_exporter": "python", 381 | "pygments_lexer": "ipython3", 382 | "version": "3.6.3" 383 | } 384 | }, 385 | "nbformat": 4, 386 | "nbformat_minor": 2 387 | } 388 | -------------------------------------------------------------------------------- /Neural-Network/single-layer-neural-net-without-ml-libraries.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | # Create a neural net 4 | def create_neural_net(layer_array, input_dims): 5 | weights = [] 6 | biases = [] 7 | activations = [] 8 | 9 | for i in range(len(layer_array)): 10 | node_num = layer_array[i][0] 11 | weights_of_layer = [] 12 | biases_of_layer = [] 13 | if i == 0: 14 | last_layer_node_number = input_dims 15 | else: 16 | last_layer_node_number = layer_array[i-1][0] 17 | 18 | for n in range(0,node_num): 19 | weights_of_node = [] 20 | for l in range(0, last_layer_node_number): 21 | weights_of_node.append(1) 22 | weights_of_layer.append(weights_of_node) 23 | biases_of_layer.append(0) 24 | 25 | weights.append(weights_of_layer) 26 | biases.append(biases_of_layer) 27 | activations.append(layer_array[i][1]) 28 | return [weights, biases, activations] 29 | 30 | # Activations 31 | def sigmoid(x): 32 | return 1 / (1 + np.exp(-x)) 33 | 34 | def sigmoid_deriv(x): 35 | return x * (1 - x) 36 | 37 | def relu(x): 38 | if x < 0: 39 | return 0 40 | else: 41 | return x 42 | 43 | # prediction 44 | def predict_ratio(data, neural_net): 45 | weights = neural_net[0] 46 | biases = neural_net[1] 47 | activations = neural_net[2] 48 | 49 | layer_num = len(weights) 50 | 51 | for l in range(0, layer_num): 52 | data = np.dot(weights[l], data) 53 | for t in range(len(data)): 54 | data[t] += biases[l][t] 55 | if activations[l] == 'sigmoid': 56 | data = sigmoid(data) 57 | elif activations[l] == 'relu': 58 | data = relu(data) 59 | else: 60 | # If not identified, do it with sigmoid 61 | data = sigmoid(data) 62 | print('activation function', activations[l], 'cannot be found. Sigmoid is used') 63 | return data 64 | 65 | def predict(data, neural_net): 66 | data = predict_ratio(data, neural_net) 67 | 68 | class_num = len(data) 69 | 70 | highest_class = None 71 | highest_class_probability = -1 72 | 73 | for i in range(0, class_num): 74 | if highest_class == None: 75 | highest_class = i 76 | highest_class_probability = data[i] 77 | elif data[i] > highest_class_probability: 78 | highest_class = i 79 | highest_class_probability = data[i] 80 | 81 | return highest_class, highest_class_probability 82 | 83 | # Training 84 | def train_network(X, Y, labels, neural_net, epochs=1000): 85 | for epoch in range(0, epochs): 86 | for d in range(0, len(X)): 87 | prediction = predict_ratio(X[d], neural_net) 88 | 89 | # Calculate total error per label 90 | true_prediction = [] 91 | for i in range(0, len(labels)): 92 | true_prediction.append(0) 93 | true_prediction[labels.index(Y[d])] = 1 94 | 95 | errors = [] 96 | for t in range(len(prediction)): 97 | errors.append(true_prediction[t] - prediction[t]) 98 | adjust_deriv = errors * sigmoid_deriv(prediction) 99 | 100 | for k in range(0, len(adjust_deriv)): 101 | adjustment = np.dot(X[d], adjust_deriv[k]) 102 | neural_net[0][0][k] += adjustment 103 | return neural_net 104 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Machine Learning Without Libraries 2 | 3 | Nowadays, using simple machine learning algorithms is as easy as `import knn from ...` but it doesn't make sense if you really want to learn how this algorithms work and how to write them. Therefore, I'm trying to write this algorithms using none of ML Libraries. (I only use the mathematics libraries and sometimes plotting libraries.) 4 | -------------------------------------------------------------------------------- /Support-Vector-Machine/Support_Vector_Machine_from_Scratch.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## Support Vector Machine from Scratch ##\n", 8 | "I tried to create a support vector machine for linear classification without using any ML libraries. I only used numpy for math and matplotlib for plotting graphs." 9 | ] 10 | }, 11 | { 12 | "cell_type": "markdown", 13 | "metadata": {}, 14 | "source": [ 15 | "### Import Libraries ###" 16 | ] 17 | }, 18 | { 19 | "cell_type": "code", 20 | "execution_count": 22, 21 | "metadata": { 22 | "collapsed": true 23 | }, 24 | "outputs": [], 25 | "source": [ 26 | "import numpy as np\n", 27 | "import matplotlib.pyplot as plt\n", 28 | "\n", 29 | "# Use Matplotlib in jupyter\n", 30 | "%matplotlib inline" 31 | ] 32 | }, 33 | { 34 | "cell_type": "markdown", 35 | "metadata": {}, 36 | "source": [ 37 | "### Define Data ###\n", 38 | "There are the data points to train SVM. X array is the coordinates of points and Y array is the labels of points." 39 | ] 40 | }, 41 | { 42 | "cell_type": "code", 43 | "execution_count": 23, 44 | "metadata": { 45 | "collapsed": true 46 | }, 47 | "outputs": [], 48 | "source": [ 49 | "# Input data - [X coordinate, Y coordinate]\n", 50 | "X = np.array([[1.6,0.3], [1.8,0.5], [2.0,0.7], [2.2,0.4], [2.4,0.6], [2.3,0.5], [2.1,0.5],\n", 51 | " [1.7,1.7], [2.5,1.0], [1.0,3.0], [2.0,1.5], [1.5,1.5], [1.5,2.0], [1.0,2.5],\n", 52 | " [1.6,1.6], [2.4,0.9], [0.9,2.9], [1.9,1.4], [1.0,1.4], [1.4,1.9], [0.9,2.4],\n", 53 | " [1.5,1.7], [2.3,1.1], [0.4,1.0], [1.0,0.7], [1.2,1.5], [1.2,1.0], [1.0,1.1],\n", 54 | " [1.0,1.7], [1.3,1.1], [0.7,1.0], [0.4,0.7], [0.2,1.5], [0.2,1.0], [0.4,1.1],\n", 55 | " [1.0,0.5], [1.3,0.1], [0.7,0.3], [0.4,0.4], [0.2,0.5], [0.2,0.1], [0.4,0.1],\n", 56 | " [1.0,2.4], [1.3,2.1], [0.7,2.0], [0.4,2.7], [0.2,2.5], [0.2,2.0], [0.4,2.1],\n", 57 | " [3.4,2.0], [3.5,2.1], [3.6,2.3], [3.4,2.4], [3.5,2.5], [3.1,2.6], [3.3,2.7],\n", 58 | " [2.0,3.1], [3.5,1.0], [4.0,1.5], [3.0,3.0], [3.0,2.0], [2.5,2.5], [3.3,1.5],\n", 59 | " [3.9,2.5], [3.9,2.0], [3.8,3.0], [3.8,2.9], [3.9,2.7], [3.9,2.5], [3.9,2.7],\n", 60 | " [2.1,3.1], [3.6,1.1], [3.8,1.7], [3.2,3.1], [2.9,2.1], [2.6,2.4], [3.2,1.4],\n", 61 | " [4.0,0.1], [3.9,0.2], [3.9,0.3], [3.7,0.5], [3.9,0.7], [3.9,0.4], [3.7,0.4]])\n", 62 | "\n", 63 | "# Labels (1 or -1)\n", 64 | "Y = np.array([-1, -1, -1, -1, -1, -1, -1,\n", 65 | " -1, -1, -1, -1, -1, -1, -1,\n", 66 | " -1, -1, -1, -1, -1, -1, -1,\n", 67 | " -1, -1, -1, -1, -1, -1, -1,\n", 68 | " -1, -1, -1, -1, -1, -1, -1,\n", 69 | " -1, -1, -1, -1, -1, -1, -1,\n", 70 | " -1, -1, -1, -1, -1, -1, -1,\n", 71 | " 1, 1, 1, 1, 1, 1, 1,\n", 72 | " 1, 1, 1, 1, 1, 1, 1,\n", 73 | " 1, 1, 1, 1, 1, 1, 1,\n", 74 | " 1, 1, 1, 1, 1, 1, 1,\n", 75 | " 1, 1, 1, 1, 1, 1, 1])" 76 | ] 77 | }, 78 | { 79 | "cell_type": "markdown", 80 | "metadata": {}, 81 | "source": [ 82 | "### Plot data on a 2d graph ###\n", 83 | "Let's plot the data to see the data points using matplotlib." 84 | ] 85 | }, 86 | { 87 | "cell_type": "code", 88 | "execution_count": 24, 89 | "metadata": {}, 90 | "outputs": [ 91 | { 92 | "data": { 93 | "text/plain": [ 94 | "[]" 95 | ] 96 | }, 97 | "execution_count": 24, 98 | "metadata": {}, 99 | "output_type": "execute_result" 100 | }, 101 | { 102 | "data": { 103 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAD8CAYAAACMwORRAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4wLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvpW3flQAAIABJREFUeJzt3XmYVNWZx/HvCyIqBJkoKMpmRkZF\nREVUcIloYgKCIBEFNSAdF3Bf0FFMAnFL1BGN2ygugKBEDAZFBM2iaFAgtCgogwtRWlACrQiKGhE5\n88epSrqL6q7b3VV1qm79Ps/TD923bnf9ngv1cvrUe88x5xwiIhIvjUIHEBGR7FNxFxGJIRV3EZEY\nUnEXEYkhFXcRkRhScRcRiSEVdxGRGFJxFxGJIRV3EZEY2i7UE++6666uY8eOoZ5eRKQovfrqqx87\n51plOi9Yce/YsSPl5eWhnl5EpCiZWUWU8zQtIyISQyruIiIxpOIuIhJDKu4iIjGk4i4iEkMq7iIi\nMaTiLiISQyruIiIxpOIuIhJDKu5SUtaNX8q68UtrfHzw+PkMHj8/j4nir2LoMCqGDgsdo+SouIuI\nxJCKu4hIDKm4i4jEULBVIUXyIXV+ffP7G6sdH7lmHQBN2jQDYOH76wG2mXefNqJnTnPGSer8+peL\nFqU93mHK5LxlKkUauYuIxJA554I8cffu3Z3Wc5d8S47YW4/omvbx5IhdI/XqBr72LgAzDu5U5+9N\njtiLcaQ+7dqrARg89qbASf7NzF51znXPdF7GkbuZ7WBmfzOzJWa2zMyuTXNOUzObZmYrzGyhmXWs\nX2wREcmGKNMyXwPHOecOBA4CeptZj5RzzgI+dc7tDdwO3JzdmCIiUhcZi7vzNiW+bJL4SJ3LGQA8\nnPh8OvADM7OspZTi8vHH8M03oVOIlLRIc+5m1hh4FdgbuMc5d1XK428CvZ1zqxNf/x043Dn3cU0/\nU3PudZOc88ykPnOiWbVpExx0EBx9NEyYAPo/viil/nubv+ELAHq2bFbtePB/b1mWnGNPWv1/bwLQ\ntnOXasdDzsFnbc4dwDn3rXPuIKAtcJiZdUk5Jd0reJv/NczsXDMrN7PyysrKKE8txaZ5c/jpT2HS\nJLjuutBpREpWnbtlzGws8IVz7tYqx54DfuWcm29m2wH/AFq5Wn64Ru4x5hz87Ge+wE+YAGVloRNJ\nAzWkW6aYLbvmSQD2//VJgZP8Wza7ZVqZWcvE5zsCPwTeSjltJnBm4vNBwPO1FXaJOTO4/344/ng4\n91z44x9DJxIpOVGmZdoAL5jZUmAR8Cfn3Cwzu87M+ifOeQjYxcxWAJcDV9fws6RUNGkC06dD584w\naBAsWRI6kUhJybj8gHNuKXBwmuNjqnz+T+CU7EaToteiBcyeDT16wAknwIIF0K5d6FQiJUF3qBaI\n2tYYr6qmOysL2htvwFFH+cI+bx60bBk6kUhaNa1FtP1eO1c7HvJ1mNVuGZEGOeAA+MMf4J134Cc/\ngc2bQycSiT2N3CV/pkyBYcN8q+TkyeqBl4KXaS2iEKKO3LXkr+TP0KHwwQfwi19Ahw5www2hE4nE\nloq75Nc118DKlXDjjdC+vW+VFJGsU3GX/DKDe++FDz+E88+Htm19J42IZJWKu+TfdtvB44/DMcfA\nqafCiy/CIYeETiWyjUKaa68rdctIGM2bw6xZsOuu0Levn6oRkaxRcZdw2rTxNzl9/TX06QOffho6\nkQQw7dqrt1mNsVj0nPNXes75a+gYaam4S1idO8OTT8J778FJJ/lCLyINpuIu4R1zDDz8MLz0Epx5\nJmzdGjqRSNHTG6pSGIYM8T3wV13le+Bv1k6NIg1RUsU96rxeIe10XlKuvNK/sXrLLb4H/oILQieS\nHKhpt6PU44X4OkydX39/h++kPT6/z9F5y1STkiruUuDM4M47YfVquPhiv9BY//6Zv09EtqG1ZaTw\nfPEFHHssvPkmzJ0Lhx0WOpHkUHLEXogj9UySI/Z8jtS1KqQUr2bN4OmnYffdoV8/+PvfQyeSWswY\nt5gZ4xaHjiEpVNylMO22G8yZA99+63vgP/44dCKRoqLiLoVrn31g5kzfRTNgAHz1VehEIkVDb6hK\nYTvySHjkEb8GzdChMG0aNG4cOpVkUTHOtScVQldMTVTcpfANGgTjxsHll/t2ydtuC52opKXOr3/0\n7oa0xweO6pa3TLItFXcpDpde6nvgb7/d3+R0ySWhE4kUNBV3KQ5mfsS+ahVcdplfB/7kk0Onyq6J\nff2fZc+EzZFB6og8OWIPMVKvGDoMgA5TJuf9uQFefvRgAI4847Ugz1+bjG+omlk7M3vBzJab2TIz\n22bIZGa9zGyjmb2e+BiTm7hS0ho3hkcfhcMP9/uwvvJK6EQiBStKt8wWYJRzbj+gB3CBmXVOc95f\nnXMHJT6uy2pKkaQdd/QdNG3b+rtX33kndCKRgpSxuDvn1jjnFic+/xxYDuyZ62AiNWrVyvfAm/ke\n+HXrQicSKTh1Wn7AzDoCLwFdnHOfVTneC3gCWA18BFzhnFtW28+q7/ID68YvjXReMW+PlU5ybjGT\nUHOPQSxc6JcpOOAAeOEF2Gmn0InqJjnHnlQxz//Z4ajqxwt8Dj6fUl8HXy5aBMBOhx5a7XiuXgfJ\nOfakf7bxZXCHNS2qHc/lHHzWlx8ws+b4An5p1cKesBjo4Jw7ELgLeLKGn3GumZWbWXllZWXUpxZJ\n7/DDYepUWLQITj/d380qIkDEkbuZNQFmAc855zI2GZvZSqC7c67Ge8a1cJhkzd13w0UX+SWC77rL\nT9cUo1q6ZYp5ca2GKnu2DICJvSdu81gpdstEHblnbIU0MwMeApbXVNjNbHdgrXPOmdlh+N8IPqlj\nZpH6ufBC3wM/bhx07AhXXBE6kUhwUfrcjwSGAm+Y2euJY9cA7QGcc/cBg4DzzGwL8BUwxIVaS1hK\n0y23+B74K6/068APHhw6kUhQGYu7c24eUOvvuc65u4G7sxVKpM4aNfL7sH70EQwbBm3awPe/HzqV\nSDDarKNADB4/v9bHV25/KwCd27So9bx085IlZf16OOIIWLvW3+S0336hE9VLTVvRte3cpdrxOM7B\nJ+fYk8rX+jrRfbfq08yl+m9dm3VIafrud30PfNOmvgf+H/8InUgkCI3cJZ7Ky+GYY2DffeHFF6F5\n89CJGkTdMvUcqRfJej11oZG7lLbu3f3a76+/7t9c3bIldCKRvFJxl/jq1w/+939h9mzfA68GLikh\nWvJX4m3ECN8Df9NNvgd+9OjQiUTyQnPuEn9bt/ot+qZO9Vv2nXFG6ESSKyWwXk/W7lAVKXqNGsGE\nCb4HvqzM98Afd1zoVCI5pZG7lI4NG/yG2x9+CPPmQZcumb9Hipu6ZURKQMuWvgd+p53ghBP8SF4k\nplTcpbS0bw/PPAOffuoL/Gepq1eLxIOKu5Segw+G6dPhzTfhlFPgm29CJxLJOhV3KU0//jHcfz/8\n8Y++XVI98PFU9kys5tvrQt0yUrp+9jOoqIDrrvM98GPGhE4kkjUq7lLafvUrX+DHjvXz8cOHh04k\nkhUq7lLazPz0zIcfwjnnwB57wI9+FDpV3s0YtxiAgaO6BU5SN8mlsqeN6Bk4SeHRnLvI9tv7N1j3\n2w8GDYIlS0InEmkwFXcRgJ139guMtWjhWyRXrQqdSKRBVNxFktq29Tc5bdrkC/yGDaETidRbSS0/\n8Ori0yOdd0i3qTlOUlgqhg6LdF6HKZNznKRA/OUv0Lu334N1zhw/bRMzyTn2pI/e9f+R7dGpZbXj\nhTYHn7od5cL31wNw+F7frXY8znPwWn5ApL5+8AN46CF4/nk4+2z1wEtRKqmRu0id3HAD/PKX8POf\n+89jLFO3TMiulIGvvQvAjIM7bfNY6G6ZEF1GWRu5m1k7M3vBzJab2TIzuyTNOWZmd5rZCjNbamaF\n9bucSH38/Od+5H7jjfDAA6HTiNRJlD73LcAo59xiM/sO8KqZ/ck5939VzukDdEp8HA7cm/hTpHiZ\n+W36Vq+G887zb7j26RM6lUgkGUfuzrk1zrnFic8/B5YDe6acNgCY7LwFQEsza5P1tCL51qQJPP44\ndO3qFxlbvDjz94gUgDrNuZtZR+AloItz7rMqx2cBNznn5iW+/gtwlXOuxkn1+s65T5w4MdJ5ZWVl\ndf7ZDZXagVCTQutAaKiyZ6Nd64m9o/3dFaQ1a6BHD9i8GebP92vRxFjIrpTkHHvS/A1fANCzZbNq\nx9PNwedaIXQZZb1bxsyaA08Al1Yt7MmH03zLNv9rmNm5ZlZuZuWVlZVRn1okvDZtfFvkV1/5HvhP\nPw2dSKRWkUbuZtYEmAU855y7Lc3j44G5zrnfJb5+G+jlnFtT089Ut4wUpblz/XLBPXr45YKbNg2d\nKC8KtVsmtGLvljHgIWB5usKeMBMYluia6QFsrK2wixStXr1g0iR46SW/guTWrYEDiaQXpVvmSGAo\n8IaZvZ44dg3QHsA5dx8wGzgBWAF8CeR/wlskX047DT74AK6+2i8TfPPNoROJbCNjcU+8SZpuTr3q\nOQ64IFuhRAref/+3Xwf+llugQwc4//zQiUSq0R2qklFq90JNCnFONKe2bIGBA/1qkjNmQP/+oRNJ\nCdDaMiK5tt128Nhj0K0bDBkCf/tb6EQi/6KdmCSjkhuR10WzZjBrFvTsCf36wYIF8L3vhU4lopG7\nSIPttpvvgf/2W788wSefhE4kouIukhX77ANPPeXfZO3f39/sJBKQirtIthx1FEyZ4pcnGDZMPfAS\nlIq7SDadcgrceqvfcPuKK0KnkRKmN1RFsu2yy2DlSrj9dt8Df8k2WyCI5JyKu0i2mfnCvnq1L/Tt\n2sFPfhI6ldRDcsXTYlzRVNMyIrnQuDE88ggcfjiccQa88kroRFJiVNxFcmWnnWDmTL+DU//+8G60\nO31FskHFXSSXWrXyPfBmvgd+3brQiaREaM69SLy6+PRI5x3SbWqOk6QxsW+088qe2fZbC3hnrazZ\ne294+mk49lg48UR44QU/qpeCk7qrWPna8rTHi2EOXiN3kXzo0QOmToVFi+D00/3drCI5pFUhRfLp\nrrvg4ovhwgvhzjv9dI0UrELslom6KqSmZUTy6aKLfA/8bbf5TbZHjQqdSGJKxV0k3/7nf2DVKn8H\na7t2cOqpoRNJDKm4i+Rbo0YweTJ89BEMHQpt2sDRR4dOJTGjOXeRUD75BI480rdHvvIK7Ltv6ERS\nBDTnLgVjxrjFkc4bOKpbjpMUmF128T3wPXr4Hvj582H33UOnkphQK6RISHvtBc8840fv/frBpk2h\nE0lMaOQuOVdyI/K66t4dpk2DAQP8XqxPPun3Zy0RhdhumA3JGw+D3FhIhJG7mU0ws3Vm9mYNj/cy\ns41m9nriY0z2Y4rEXL9+cM89fhR/4YUQ6L0wiY8ow4NJwN3A5FrO+atzrl9WEomUqpEjfQ/8zTf7\ndeBHjw6dSIpYxuLunHvJzDrmPoqI8OtfwwcfwDXXQPv2frlgkXrI1sReTzNbAnwEXOGcW5aln7uN\n1AV8ahK3+buGmnbt1ZHOGzz2prr/8AYsHJZrg8fPj3TetBE9c5wkokaNYOJE3wNfVgZ77OEXHIuR\nOC3OVVXq4n4bNixMezxfc/DZ6JZZDHRwzh0I3AU8WdOJZnaumZWbWXllZWUWnlokhpo2hRkzoFMn\nGDgQluVsrCQxFukmpsS0zCznXJcI564EujvnPq7tPN3EJJJBRYXvgW/SBBYs8KP4IrRu/FIAWo/o\nmvZxdcvUTdSbmBo8cjez3c380nZmdljiZ37S0J8rUvI6dIDZs2H9eujbFz7/PHQiKSJRWiF/B8wH\n9jGz1WZ2lpmNNLORiVMGAW8m5tzvBIa4UGsaiMTNwQfD9OnwxhtwyinwzTehE0mRiNItc1qGx+/G\nt0qKSC707g3jx8PZZ/t2yQcf1DrwklHp3AZX5Ap6m72ASmKbPoCzzvJz8Ndf76drxhTuvYLJOfak\nze9vTHs8OQcft7n2pNCvRRV3kWJx7bW+wI8d6wv8mWeGTiQFTEv+ihSTzZvhhBPgxRf9m63HHx86\nUUaZumWkbvLWLSMiebT99vDEE7DffnDyybBkSehEUqBU3EWKzc47+1F7ixa+RXL16tCJpACpuIsU\no7ZtfYH/7DO/0cfGjaETSYFRcRcpVl27+imat97yUzSbN4dOlFbrEV013x6AirtIMTv+eN/3/pe/\nwDnnaB14+Re1QooUuzPP9MsEjxnjlwm+/vrQiaQAqLiLxMEvfuF74G+4wffAn3126EQSmIq7SByY\nwb33+s6ZkSNhzz39G61SsjTnLhIXTZrA738PBxzgFxlbvDh0IglIxV0kTr7zHb/J9i67+B74iorQ\niSQQFXeRuNljD98D/9VXfmrm009DJ5IAVNxF4mj//f1WfStW+K36vv46dCLJMxV3kbg69liYNMkv\nMjZ8OGzdGjpR/k3sG30D95hRt4xInJ1+uu+BHz3at0jedFPoRJInKu4icXfVVbByJdx8sy/w550X\nOpHkgYq7SNyZwd13w4cfwoUX+kXHTjwxdCrJsZIq7jPGRev7HTiqW46T1F0ut5Mb+Nq7kc6bcXCn\nOv/sOCt7Ntq1Loht5LbbDh57DI45BoYMgblz4dBDQ6fKvtT59Yp56Y+XPZOfPAHpDVWRUtGsGcya\nBa1bQ79+8N57oRNJDmmbPZFS89ZbcMQR0KoVvPKKv+GpSCV/o63xN9bkiD1GI/WsbbNnZhPMbJ2Z\nvVnD42Zmd5rZCjNbamaFN6chIv+2777w1FP+7tUBA/zNThI7UaZlJgG9a3m8D9Ap8XEucG/DY4lI\nTh19NEyeDC+/DMOGlWYPfMxlLO7OuZeA9bWcMgCY7LwFQEsza5OtgCKSI6eeCrfeCtOnw5VXhk4j\nWZaNbpk9gVVVvl6dOLYmCz97GxVDh0U6r8OUyVn93igGj58f6bxpI3rW6+c3xLrxSyOdp+3Q8ufV\nxadHOu+QblNzF+Lyy30P/G23+R74iy/O3XNlQWrXWEViYbTU4/+ag4/RXHtdZaO4W5pjad+lNbNz\n8VM3tG/fPgtPLSINYga//a1fB/7SS6FdO78WjRS9SN0yZtYRmOWc65LmsfHAXOfc7xJfvw30cs7V\nOnJXt4xIAfnySzjuOFiyBJ5/Hnrm/7fL+sjYLRNDWeuWiWAmMCzRNdMD2JipsItIgdlpJ3j6ab+D\n04knwrvRbmyTwhWlFfJ3wHxgHzNbbWZnmdlIMxuZOGU28B6wAngAOD9naUUkd1q1gjlz/Od9+kBl\nZdg80iAZ59ydc6dleNwBF2QtkYiE06mTH8Efd5wfwT//vB/VS9HRHaoiOTLt2qsjnTd4bAEuwztj\nBpx8sr/Jafp0aNw4dCJJyOecu4jEzcCBvovmySfhsssg0CBQ6q+kVoUUyaeCHJHXxcUX+x7422/3\nPfCjRoVOJHWg4i4iNbv1Vli1Cq64Atq3h1NOCZ1IIlJxF5GaNWoEU6bAmjUwdCi0aQNHHRU6lUSg\nOXcRqd0OO/hVJDt0gP79/ZLBUvBU3EUks1128T3wTZr4Hvi1a0MnkgxU3EUkmu99z+/ktHat38np\niy9CJ5JaqLiLSHSHHgrTpsHixX4v1i1bQicKa2LfbfdnLRAq7iJSNyeeCHfd5UfxF12kHvgCpW4Z\nEam788/32/Tdcot/o/XqaHfjSv6ouItI/fzmN/DBBzB6tO+BPz3a5iOSH6VV3KPOjRXg7i0zxi2O\ndN7AUdqfvKqyZ6Ot8z2x98TMJ0l1jRrBpEm+B374cNhjD+jVK3CoHEutIRXz0h8vgBqiOXcRqb+m\nTf0iY3vvDSedBMuWhU4kCVoVUkQarqICevTwffALFvhRfClIjtjzOFLXqpAikj8dOsAzz8D69dC3\nL3z+eehEJU/FXUSyo1s3+P3v4Y03/AJj33wTOlFJU3EXkezp0wfuvReeew7OO0898AGVVreMiOTe\nOef4Ofgbb/TTNb/8ZehEuVMAXTE1UXEXyZGSbl+9/nrfAz9mjO+BP/PM0IlKjoq7iGSfGTz4IHz4\nIZx9Nuy5J/zwh6FTlRS1QopI7mzc6Df3qKiAefOga9fQiaoZPH4+ANNG9AycJLqstkKaWW8ze9vM\nVpjZNotImNlwM6s0s9cTH2fXJ7SIxMzOO8Ps2dCiBZxwAqxeHTpRychY3M2sMXAP0AfoDJxmZp3T\nnDrNOXdQ4uPBLOcUkWLVrp3vgf/sM1/gN24MnagkRBm5HwascM6955zbDDwGDMhtLBGJlQMPhCee\ngOXL4eSTYfPm0IliL8obqnsCq6p8vRo4PM15J5vZ94F3gMucc6vSnNNgA197N9J5Mw7ulIunD2ba\ntdGWVB089qYcJ8mzHC/2tm780kjntR6x7Vxxyf6d1Nfxx8MDD0BZmW+XnDTJv/GaR8k59qSF769P\ne7yY5uBrEmXknu7qp74L+zTQ0TnXFfgz8HDaH2R2rpmVm1l5ZWVl3ZKKSPEbPhyuvRYmT4axY0On\nibWM3TJm1hP4lXPux4mvRwM4535Tw/mNgfXOuZ1r+7nqlhEpUc759sgJE/xI/uxw/RdzfngSAH3+\n/GS9vj+5pHQ+l4zOZrfMIqCTme1lZtsDQ4CZKU/WpsqX/YHldQkrIiXEDO67D370Ixg5Ep59NnSi\nWMpY3J1zW4ALgefwRftx59wyM7vOzPonTrvYzJaZ2RLgYmB4rgKLSAw0aeIXGevSxS8y9tproRPF\nTqQ7VJ1zs4HZKcfGVPl8NDA6u9FEJNZatPA98D16+BbJBQv8WjSSFSV1h+qri6Pt8XhIt6k5TiLF\noGLosEjndZgyOcdJYm7ZMjjySL9Ewbx58B//kbOnSv07/XLRIgB2OvTQasdr+jtN3baxfK2vYd13\nqz4Fnss5eG3WISLFYf/9/VZ9774LAwfC11+HThQLJTVyF5EC9uij8NOfwmmnwSOP+A24cyw5kq/v\nb1+HTh8CwKJBj2UtUyZRR+5aFVJECsMZZ/hlgq+5xs+9/yZtt7VEpOIuIoXj6qth5Uq46SZf4EeO\nDJ2oaKm4i0jhMIN77vGrR15wAbRtC/36hU5VlDTnnkUTJ0Z7h7ysrCzzSTGSum5HTeKwnodkyaZN\n0KuXX2hs7lxI6WYJJXVtq/kbvgCgZ8tm1Y7ncm0rdcuISPFq3hxmzYLWrf3I/b33QicqOhq5i0jh\nWr7c98C3bg0vvwy77BI6UTXJkXw+V6HVyF1Eit9++8FTT8H778OAAfDPf4ZOVM2mz5ez6fPCXEpL\nxV1ECtvRR/slgl9+GYYNg61bQycqCuqWEZHCN3gwrFoFV14J7dvDrbeGTlTwVNxFpDiMGuV74MeN\n8z3wF10UOhE3NU/emdq/1vNCUHEXkeJgBnfc4XvgL7nEb7x90kl5jZC6+OCGDQvTHi+ExQc15y4i\nxaNxY5g61fe9n3aaXyZY0lIrpIgUn3XroGdP+OwzmD8f9t47SIzkiD3dSD15U2O2b1pUK6SIxFfr\n1jBnjt+PtU8fqKwMnajgqLiLSHH6r/+CmTP9HHz//vDll6ETFRQVdxEpXkcc4deBX7jQLxn87beh\nExWMoptzb9AiVBP7RnuSsmfqkKj4rRu/NNJ5rUd0zXESkXq64w649FLfHnnHHb6zJs9SFw6sqKgA\noEPKvrANnYPXZh0iUjouucT3wP/2t9CxI1x+eehEwRXdyF1EJK2tW+HUU+GJJ+Dxx+GUU4LGKYpu\nGTPrbWZvm9kKM7s6zeNNzWxa4vGFZtax7pFFRBqgUSOYMsXPww8dCvPmhU4UVMbibmaNgXuAPkBn\n4DQz65xy2lnAp865vYHbgZuzHVREJKMdd/SrSLZv71eRfPvt0ImCiTJyPwxY4Zx7zzm3GXgMGJBy\nzgDg4cTn04EfmAV4R0NEZNddfQ9848a+B37t2tCJgojyhuqewKoqX68GDq/pHOfcFjPbCOwCfJyN\nkCIidfKf/+l3curVy+/kNHcuNGuW6buyKvR2mlFG7ulG4KnvwkY5BzM718zKzay8UneUiUguHXYY\nPPaY3/Bju9JrDIxS3FcD7ap83Rb4qKZzzGw7YGdgfeoPcs7d75zr7pzr3qpVq/olFhGJqn9/v9FH\n06ahk+RdlOK+COhkZnuZ2fbAEGBmyjkzgTMTnw8CnneheixFRCTznHtiDv1C4DmgMTDBObfMzK4D\nyp1zM4GHgClmtgI/Yh+Sy9AiIlK7SBNRzrnZwOyUY2OqfP5PIOwdAyIi8i9aOExEJIZU3EVEYkjF\nXUQkhlTcRURiSMVdRCSGgi35a2aVQEXiy10p3KUKlK1+lK1+lK3uCjUX5CZbB+dcxrtAgxX3aiHM\nyqOsTxyCstWPstWPstVdoeaCsNk0LSMiEkMq7iIiMVQoxf3+0AFqoWz1o2z1o2x1V6i5IGC2gphz\nFxGR7CqUkbuIiGRRXot7IW+0HSHbcDOrNLPXEx9n5ynXBDNbZ2Zv1vC4mdmdidxLzaxbPnJFzNbL\nzDZWuWZj0p2Xo2ztzOwFM1tuZsvM7JI05+T92kXMFeS6mdkOZvY3M1uSyHZtmnOCvEYjZgvyGq3y\n/I3N7DUzm5XmsfxfN+dcXj7wywX/HfgesD2wBOiccs75wH2Jz4cA0woo23Dg7nxdryrP+32gG/Bm\nDY+fAMzB74bVA1hYQNl6AbPyfc0Sz90G6Jb4/DvAO2n+TvN+7SLmCnLdEteheeLzJsBCoEfKOaFe\no1GyBXmNVnn+y4Gp6f7uQly3fI7cC3mj7SjZgnDOvUSaXa2qGABMdt4CoKWZtSmQbME459Y45xYn\nPv8cWI7f67eqvF+7iLmCSFyHTYkvmyQ+Ut+UC/IajZgtGDNrC/QFHqzhlLxft3wW93Qbbaf+o662\n0TaQ3Gi7ELIBnJz49X26mbVL83gIUbOH0jPxq/QcM9s/RIDEr8AH40d7VQW9drXkgkDXLTG18Dqw\nDviTc67Ga5bn12iUbBDuNfpb4L+BrTU8nvfrls/inrWNtnMgyvM+DXR0znUF/sy//xcOLdQ1i2Ix\n/lbpA4G7gCfzHcDMmgNPAJc65z5LfTjNt+Tl2mXIFey6Oee+dc4dhN8r+TAz65JySrBrFiFbkNeo\nmfUD1jnnXq3ttDTHcnrd8lns6OiyAAABgElEQVTcs7bRdohszrlPnHNfJ758ADgkD7miiHJdg3DO\nfZb8Vdr53byamNmu+Xp+M2uCL6CPOuf+kOaUINcuU67Q1y3xvBuAuUDvlIdCvUYzZgv4Gj0S6G9m\nK/FTuseZ2SMp5+T9uuWzuBfyRtsZs6XMxfbHz5UWgpnAsETnRw9go3NuTehQAGa2e3Je0cwOw/97\n+yRPz234vX2XO+duq+G0vF+7KLlCXTcza2VmLROf7wj8EHgr5bQgr9Eo2UK9Rp1zo51zbZ1zHfG1\n43nn3E9TTsv7dYu0h2o2uALeaDtitovNrD+wJZFteD6ymdnv8N0Tu5rZamAs/s0knHP34fe2PQFY\nAXwJlOUjV8Rsg4DzzGwL8BUwJE//WYMfTQ0F3kjM0wJcA7Svki/EtYuSK9R1awM8bGaN8f+hPO6c\nm1UIr9GI2YK8RmsS+rrpDlURkRjSHaoiIjGk4i4iEkMq7iIiMaTiLiISQyruIiIxpOIuIhJDKu4i\nIjGk4i4iEkP/D5ybo5xD8goaAAAAAElFTkSuQmCC\n", 104 | "text/plain": [ 105 | "" 106 | ] 107 | }, 108 | "metadata": {}, 109 | "output_type": "display_data" 110 | } 111 | ], 112 | "source": [ 113 | "# For every point mark point with - if label is -1 and mark with + if label is 1\n", 114 | "for i in range(len(X)):\n", 115 | " if Y[i] == -1:\n", 116 | " plt.scatter(X[i][0], X[i][1], s=120, marker='_', linewidths=2)\n", 117 | " else:\n", 118 | " plt.scatter(X[i][0], X[i][1], s=120, marker='+', linewidths=2)\n", 119 | " \n", 120 | "# Red line is how it will look like...\n", 121 | "plt.plot([3.6,1.5],[0.0,3.0],'r')" 122 | ] 123 | }, 124 | { 125 | "cell_type": "markdown", 126 | "metadata": {}, 127 | "source": [ 128 | "### Real Time Plotting While Training ###" 129 | ] 130 | }, 131 | { 132 | "cell_type": "markdown", 133 | "metadata": {}, 134 | "source": [ 135 | "### Loss Function (A lot of Math)###\n", 136 | "I use the Hinge Loss which is used for training classifiers and used for \"maximum-margin\" which is mostly usable for Support Vector Machines.\n", 137 | "\n", 138 | "#### Loss Function ####\n", 139 | "Hinge loss is shown as:\n", 140 | " \n", 141 | "where: \n", 142 | "- c is loss function,\n", 143 | "- x is the vector of coordinates of point,\n", 144 | "- y is the correct label of point and,\n", 145 | "- f(x) is the label the SVM predicts\n", 146 | "\n", 147 | "#### Objective Function ####\n", 148 | "Objective Function conteins two terms: The first term is a regularizer, and the second term the loss. \n", 149 | "\n", 150 | "The part after the + sign is the loss function hinge loss, and the part before the + sign is the regularizer. Regularizer balances the margin maximization and loss.\n", 151 | "\n", 152 | "#### Minimize Loss ####\n", 153 | "To minimize loss, I will use gradient descent.\n", 154 | "\n", 155 | "\n", 156 | "#### At the end... ####\n", 157 | "At the end, we have a formula:\n", 158 | "\n", 159 | "where:\n", 160 | "- w is weight of SVM,\n", 161 | "- n is the learning rate\n", 162 | "- lambda is the 1/epoch (change rate gets smaller while it pass epochs)" 163 | ] 164 | }, 165 | { 166 | "cell_type": "code", 167 | "execution_count": 25, 168 | "metadata": { 169 | "collapsed": true 170 | }, 171 | "outputs": [], 172 | "source": [ 173 | "def train_svm(X, Y, epochs=10000):\n", 174 | " #Initialize our SVMs weight vector with zeros (3 values)\n", 175 | " w = np.zeros(len(X[0]))\n", 176 | "\n", 177 | " # The learning rate\n", 178 | " learning_rate = 1\n", 179 | " \n", 180 | " # See the change\n", 181 | " w0_per_epoch = []\n", 182 | " w1_per_epoch = []\n", 183 | " \n", 184 | " # Training\n", 185 | " print(\"starts training\")\n", 186 | " for epoch in range(1, epochs):\n", 187 | " error = 0\n", 188 | " for i, x in enumerate(X):\n", 189 | " # It there is an error\n", 190 | " if (Y[i] * np.dot(X[i], w)) < 1:\n", 191 | " w = w + learning_rate * ((X[i] * Y[i]) + (-2 * (1/epochs) * w))\n", 192 | " else:\n", 193 | " w = w + learning_rate * (-2 * (1/epochs) * w)\n", 194 | " \n", 195 | " w0_per_epoch.append(w[0])\n", 196 | " w1_per_epoch.append(w[1])\n", 197 | " \n", 198 | " return w, w0_per_epoch, w1_per_epoch" 199 | ] 200 | }, 201 | { 202 | "cell_type": "code", 203 | "execution_count": 26, 204 | "metadata": {}, 205 | "outputs": [ 206 | { 207 | "name": "stdout", 208 | "output_type": "stream", 209 | "text": [ 210 | "starts training\n" 211 | ] 212 | } 213 | ], 214 | "source": [ 215 | "w, w0array, w1array = train_svm(X, Y, epochs=10000)\n", 216 | "print(w)" 217 | ] 218 | }, 219 | { 220 | "cell_type": "code", 221 | "execution_count": 27, 222 | "metadata": {}, 223 | "outputs": [ 224 | { 225 | "name": "stdout", 226 | "output_type": "stream", 227 | "text": [ 228 | "[3.46279308 1.85732796]\n" 229 | ] 230 | } 231 | ], 232 | "source": [ 233 | "# You cannot see anything in the graph of 10000 numbers :)\n", 234 | "epochs = len(w0array)\n", 235 | "\n", 236 | "# It will divide epochs to this number\n", 237 | "number_of_weights_to_graph = 100\n", 238 | "\n", 239 | "num_per_epoch = epochs/number_of_weights_to_graph\n", 240 | "\n", 241 | "w0_to_graph = []\n", 242 | "w1_to_graph = []\n", 243 | "epoch_to_graph = []\n", 244 | "\n", 245 | "for i in range(number_of_weights_to_graph):\n", 246 | " epoch_to_graph.append(int(num_per_epoch*i))\n", 247 | " w0_to_graph.append(w0array[int(num_per_epoch*i)])\n", 248 | " w1_to_graph.append(w1array[int(num_per_epoch*i)])\n", 249 | " \n", 250 | "plt.plot(epoch_to_graph, w0_to_graph, 'r',epoch_to_graph, w1_to_graph,'b')" 251 | ] 252 | }, 253 | { 254 | "cell_type": "code", 255 | "execution_count": 28, 256 | "metadata": {}, 257 | "outputs": [ 258 | { 259 | "data": { 260 | "text/plain": [ 261 | "" 262 | ] 263 | }, 264 | "execution_count": 28, 265 | "metadata": {}, 266 | "output_type": "execute_result" 267 | }, 268 | { 269 | "data": { 270 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXcAAAD8CAYAAACMwORRAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMS4wLCBo\ndHRwOi8vbWF0cGxvdGxpYi5vcmcvpW3flQAAIABJREFUeJzt3XmYlMW99vHvj01UVAQFNwQXPAaR\nACIBNWo08QWJGhLe4DFKHDGCGpFglGERBFnE4xJCouKGSlyIGgwiahb1uKEygBAVFQQ5oBwZRDGI\nqDD1/lE978w0PTPPzHR3Pd19f65rLmq6a7rvq6F/1FTXU2XOOUREJL80Ch1ARETST8VdRCQPqbiL\niOQhFXcRkTyk4i4ikodU3EVE8pCKu4hIHlJxFxHJQyruIiJ5qEmoJ95vv/1chw4dQj29BPTOO/DV\nV9CoEXTuDE2bhk4kkjsWL168yTm3f239ghX3Dh06UFJSEurpJaBnn4U+faCsDE4+GWbMCJ1IJHeY\n2doo/TQtI1l3xhnwgx/49syZsHp12Dwi+UjFXbLODKZN8+1vv4Vrrw2bRyQfqbhLEMcfDwMG+PZD\nD8HSpWHziOQbFXcJZvJkaNzYt0eNCptFJN+ouEswRx0FF1/s288+C889FzaPSD5RcZegxo2D3Xf3\n7eJi0NkxIumh4i5BHXQQDB/u24sWweOPh80jki9U3CW4a66BVq18e8wYv4JGRBpGxV2Ca9kSRo/2\n7fffh3vvDZtHJB+ouEssXH45tGvn2xMmwLZtmXmejTOXs3Hm8mrvHzhzIQNnLszMkxeotRcMYu0F\ng0LHKDgq7hILzZv7og6wYQNMnx42j0iuU3GX2Bg0CI45xrdvuAE+/TRsHpFcpuIusdG4MUyZ4ttf\nfAFTp4bNI5LLzAVaWNyjRw+nXSElmXPw/e/DK6/Abrv5D1gPPbT+j5c8v/7Nmi0ANDtsHwCGbtgI\nQNMD9wTg9TWbAfjeYa2q/NycIb3rH6LAJM+vb1u0CIA9jj++yu3tZz+QtUz5xMwWO+d61NZPI3eJ\nFTM/JQPw9dcwfnzYPCK5SiN3iaWzz4Ynn/QHeixfXjEX31DlI/k2Q7qkvL98pYxG6lX1X7oSgLnd\nOtb5Z8tH8rk4Up8zoRiAgeNvCJykQtpG7mbW3MzeMLNlZva2mU1I0Wc3M5tjZqvM7HUz61C/2CLe\nlCl+FF9WVrEGXkSiizIt8zVwmnPuu0BXoI+Z9UrqMxj4zDl3JHArMC29MaXQdO4Mv/ylb8+b5+fg\nRSS6Wou787Ymvm2a+EqeyzkHuD/Rfgw43cwsbSmlIE2Y4D9UBRg5UpuKidRFpDl3M2sMLAaOBP7o\nnBuZdP9bQB/n3PrE9x8A33PObaruMTXnXjflc561qc+caJxddRXccotvz5sHZ50VNk+hSP73tvDz\nLwHo3XLPKrfn27+38jn2cuvfeQuAQzp1rnJ7yDn4tK6Wcc7tdM51BQ4BeppZ56QuqUbpu/yvYWaX\nmFmJmZWUlpZGeWopcKNHw957+/aoUbBzZ9g8IrmizqtlzGw88KVz7qZKtz0LXOecW2hmTYD/BfZ3\nNTy4Ru4S1ZQpfrdIgFmz4MILg8YpSA1ZLZPL3h79BADHTPlJ4CQV0rlaZn8za5lo7w78EHg3qds8\nIPHxFwOA52oq7CJ1ceWVcMABvj1uHGzfHjaPSC6IMi1zIPC8mS0HFgF/d87NN7OJZnZ2os89QGsz\nWwWMAIqreSyROttzz4qLmdatg9tuC5tHJBc0qa2Dc2450C3F7eMqtbcD/ze90UQqDB7sP1hdudIf\nrD14MOyzT+hUIvFVa3GX7Khpj/HKqruyMt81beqL+s9/Dps3w403+u8lOwplrj35fbhPWeuUt+fC\n+1B7y0jOGDAAeiQ+Rrr1Vr/vu4ikpr1lJKc89xycfrpvDxkCd9wRNo/kt9r2IgpBu0JKXjrtNDjj\nDN+++26/JbCI7ErFXXJO+ZbAO3fC2LFhs4jElYq75Jxu3eDcc3370UchcRaEiFSi4i45adIkaJJY\n61VcrE3FJDPaDOkSq/n2ulBxl5x0xBH+A1XwH7L+/e9h84jEjYq75Kxrr/VXr4IfvZeVhc0jEicq\n7pKz2rb1WwIDLF0Kc+aEzSP1M2dC8S5b7eaK3k+/RO+nXwodIyUVd8lpV10F++3n22PHwjffhM0j\nEhcq7pLT9t67Yjnk6tVw111h84jEhYq75LyhQ6FDB9+eOBG2bq2xu0hBKKjtB6LO64U8Qkvq509/\nggsu8O0JE/y+7xJPuXCUXXWS59fXNN8LgMO2/7vK7Qv7fj9jGbT9gBSU886DLonlyP/1X6BTHKXQ\nFdTIXfLbggXQr59vDxsG06eHzVMo5t68BID+V3Wv18+Xj+TjOFKvTflIPpMj9WQauUvB6dsXTjnF\nt2+/HdasCZtHJCQVd8kbZhWbin37rebdpbCpuEte6dUL+vf37QcfhGXLwuYRCUVz7pJ33n0XjjnG\nb0fQt6+fi5f0KZ9jL/fxys8BOKhjyyq313cOXmqmOXcpWEcfDRdd5NtPPw0vvBA0jkgQGrlLXvro\nIzjySNi+HXr2hNde83PysTYrsdSn6KmwOeqooatlGmLtBYMAaD/7gaw/N8ArD3YD4MRfLM3ac6Zt\n5G5m7czseTNbYWZvm9mVKfqcamZbzOzNxJc+ypKgDj4Yrkz8S33jDZg7N2wekWyLMi2zA7jKOfcd\noBdwuZl1StHvJedc18TXxLSmFKmHkSOhZWIaePRo2LEjbB6RbKq1uDvnNjjnliTa/wZWAAdnOphI\nQ+27L4wa5dvvvQf33Rc0jkhW1WnO3cw6AC8CnZ1zX1S6/VTgcWA98DHwW+fc2zU9Vn3n3DfOXB6p\nX64ejVWd8rnF2oSae4yrr76Cjh39HPxBB8HKlbDHHqFTJZTPsZdb+7L/s/1JVW/PsTn4TEp+H2xL\nHKC7x/HHV7k9U++D8jn2ctsP9GWw+Ya9q9yeyTn4tK+WMbMW+AI+vHJhT1gCtHfOfReYATxRzWNc\nYmYlZlZSqs0/JAt2391vJAbw8ccwY0bYPCLZEmnkbmZNgfnAs865WyL0/xDo4ZzbVF0frZaRbNmx\nA4491q9/b9kSPvgAWrUKnSqFGlbL5PL+Kw1V9EwRALP6zNrlPq2WqV6U1TIG3AOsqK6wm9kBiX6Y\nWc/E435at8gimdGkCUyZ4tuff16xRYFIPosyLXMicAFwWqWljmea2VAzG5roMwB4y8yWAb8HznWh\nFtCLpPCTn/itCcBPzaxfHzaPSKY1qa2Dc+5loMbLP5xzfwD+kK5QIulmBtOm+V0jt2+H666Du+8O\nnUokc3SFakwMnLmwxvs/bHYTAJ0O3LvGfqnmJaVCv35+r5lGjeBf/4JOqa7YiIFcPq2oocrn2MuV\nfOLrRI+2VaeZC/XfuvaWEUlh6lQ/ii8rgzFjQqcRyRyN3KXgDBoEs2f79quvQu/eYfNEodUy9Ryp\n5+h+PTXRyF2kGhMnQrNmvj1yJOijf8lHKu5ScDp0gMsu8+2XXtJ+75KfVNylII0ZA3vt5dujRsHO\nnWHziKSb5tylYF1/fcU5q/ff7+fiJccVwH49mnMXqcVvfgNt2/r2uHHw9ddh84ikU60XMYnkqxYt\nfFG//HJYuxZuvx2GDw+dShokeUSeh6tlotLIXQrar34FRxzh25MmwZYtYfOIpIuKuxS0pk19UQf4\n9FO46aaweUTSRcVdCt7Pfw7dE2c733IL/O//hs0jkg4q7lLwGjWq2AZ42za/ikbyRNFTBTnfDiru\nIgD86Edw+um+feedsGpV2DwiDaXiLpJQPnrfsQPGjg2bRaShVNxFEnr08PPvAHPmwOLFYfNk09yb\nlzD35iWhY9TZwJkLa90uu1CpuItUMmmSP5YPoLi45r4icabiLlJJx45w8cW+/Y9/+C+RXKTiLpJk\n3DjYYw/fLi72B3uI5JqC2jhs8ZLzIvU7rvtDGU4SL2sviLZjVvvZD2Q4SXyMHQuTJ/v2I4/AwIFh\n86Rb8vz6xys/B+Cgji2r3N7/qu5ZyxRF8vz662s2A/C9w1pVuX3OkBw4gaWetHGYSANcfTW0bu3b\nY8fCt9+GzSNSVwU1chepi1tvhREjfPu22+DSS8PmyaTykXx1I/XyEXOIEXH/pSsBmNut4y73hcwF\ntb9umZC2kbuZtTOz581shZm9bWZXpuhjZvZ7M1tlZsvNLF6/y4nUw6WXwqGH+vaECbB1a9g8InUR\nZVpmB3CVc+47QC/gcjPrlNSnL9Ax8XUJcHtaU4oE0Ly5P28V4JNP4He/C5tHpC5qLe7OuQ3OuSWJ\n9r+BFcDBSd3OAR5w3mtASzM7MO1pRbLs/POhc2ffvvFG2LQpbB6RqOo0525mHYAXgc7OuS8q3T4f\nuME593Li+38CI51z1U6q13fOfdasWZH6FRUV1fmxGyrqFX5xW4HQUEXPRHutZ/WJ9ncXN/Pnw1ln\n+fbw4X4uPt+FXJVSPsdebuHnXwLQu+WeVW5PNQefaXFYZZT21TJm1gJ4HBheubCX353iR3b5X8PM\nLjGzEjMrKS0tjfrUIkH16wcnJY7gvO02f2qTSNxFGrmbWVNgPvCsc+6WFPfPBF5wzj2c+P494FTn\n3IbqHlOrZSSXvPoqnHiibw8a5A/ULiRxXS0TWq6vljHgHmBFqsKeMA8YlFg10wvYUlNhF8k1J5wA\n55zj27Nnw7/+FTaPSG2iTMucCFwAnGZmbya+zjSzoWY2NNFnAbAaWAXcBVyWmbgi4UyZ4g/2cA5G\njQqdRqRmTWrrkPiQNNWceuU+Drg8XaFE4qhTJ/jlL2HWLHjqKXjpJfj+90OnEklNV6hKrZJXL1Qn\njnOi6bZund858uuvoXdveOUVsBqHPiLppb1lRDKgXTu44grfXrgQ/vrXsHlEqqORu0gdbd4Mhx8O\nW7bAd74Dy5dXHPAhkmkauYtkSKtWFac0bdnij+QTiRsVd5F6GDYMfvxjKC2FkSPhq69CJxKpSsVd\npB722MNvSfDtt/DRR/CHP4ROJFKVirtIPV10ERx1lG9PnQqffRY2j0hlKu4i9dSkib+wCXxhnzYt\nbB6RylTcRRrgpz+Fnj19e/p0P0UjEgcq7iINYAY33ODb27f7E5skfxQ9UxR5S+u4UXEXaaAf/AD6\n9PHte+6Bd98Nm0cEVNxF0mLqVP9nWRmMGRM2iwiouIukRdeucN55vv2Xv8Drr4fNI6LtB3LE4iXn\nRep3XPeHMpwkhVn9ovUremrXH43xsYl1tXo1HH20X/t+yinw/PPaVCzXJM+vl3zia1SPtlWv9g95\nZKS2HxDJssMPh6GJEw7++7/hmWfC5pHCppG7SBpt3AhHHAFbt8J3vwtLlvgDPiQ3lY/k43S4u0bu\nIgG0aQO//a1vL1sGDz8cNo8ULhV3kTQbMQL239+3x471B3uIZJuKu0ia7bUXXHutb3/4IcycGTSO\nFCjNuUvGzb15SaR+/a/qnuEk2fPNN37lzJo1sN9+8MEHsPfeoVNJPtCcu0hAzZrBpEm+vWkT3Hxz\n2DxSeDRyF8mQsjI47jh4803Yc08/em/bNnSq+InjipR0KL82Jd3XnmjkLhJYo0YVm4p9+WXFSF4k\nG2ot7mZ2r5ltNLO3qrn/VDPbYmZvJr7GpT+mSG464wy/sRjAHXf40btINkQZud8H9Kmlz0vOua6J\nr4kNjyWSHypvCbxjR8UqGpFMa1JbB+fci2bWIfNRoom6t3K+zd811JwJxZH6DRx/Q90fvAF7y2Ta\nwJkLI/WbM6R3xjL07Ak/+xk8/ri/qOnqq6Fbt4w9XexVt39L8u259h5O3v/p889fT3l7tvZ/Stec\ne28zW2ZmT5vZMdV1MrNLzKzEzEpKS0vT9NQi8Td5MjRu7NujRoXNIoUh0mqZxMh9vnOuc4r79gbK\nnHNbzexMYLpzrmNtj6nVMlJohgyBO+/07X/+E047LWyeuNBqmbrJ2moZ59wXzrmtifYCoKmZ7dfQ\nxxXJN+PHw+67+3ZxMQRahZx1G2cuZ+PM5aFjFJwGF3czO8DM71ptZj0Tj/lpQx9XJN8cdBAMH+7b\nixb5OXiRTImyFPJhYCHwH2a23swGm9lQM0vsXM0A4C0zWwb8HjjXhboySiTmrrkG9t3Xt0eP9gd7\niGSCrlAVybKbbvIrZsCvfR8yJGyeTCufkmkzpEvgJPkh6py7inuOiPUxewHl4jF927fDUUfBunVw\n4IGwcqXfniBfJM+vf7NmCwDNDtunyu0q9vWj7QdEYqp5c5gwwbc3bIDp08PmkfykkbtIADt3Qpcu\n8M47fivg1auhdevQqTJD0zLppZG7SIw1bgxTp/r2F19UtEXSRcVdJJCzzoITT/TtGTPgf/4nbB7J\nLyruIoFU3lTsm2/8RU4i6aLiLhLQSSfBj3/s2w88AG+l3Fg7t7UZ0kXz7QGouIsENnWqH8WXlfkL\nm0TSQcVdJLDOnWHQIN9+8kl4+eWweSQ/qLiLxMCECf5QbSisTcUkc1TcRWKgfXv49a99+5VXYP78\nsHkk96m4i8TE6NH+gibwB3rs3Bk2j+Q2FXeRmGjd2u8aCfD22zB7dtg8kttU3EViZPhwOOAA3x43\nzm8yJlIfKu4iMbLnnhUXM61bB3/8Y9g8OW9Wv+gHuOcZFXeRmBk8GDomTiGeMgW2bAmbR3KTirtI\nzDRtCpMn+/bmzXDjjWHzSG5ScReJoQEDoEdiU9dbb4WPPw6bR3JPk9ABsmnuzUsi9et/VfcMJ6m7\nTJ441H/pykj95nbrWOfHzmdFz0R7rWf1ifZ3V1n5pmI//CF89RVMnOiP5JNaJM+vr3059e1FT2Un\nT0AauYvE1Omnwxln+Pbdd8P774fNI7lFJzGJxNjSpdA98YvkgAHw6KNh88RN+W+01f7GWj5iz6OR\nuk5iEskD3brBuef69mOPwaJFYfNI7qi1uJvZvWa20cxS7jRt3u/NbJWZLTez+E1Yi+Sw66+HJolP\nx0aO1KZiEk2Ukft9QJ8a7u8LdEx8XQLc3vBYIlLuyCNhyBDffv55+NvfwuaR3BBpzt3MOgDznXOd\nU9w3E3jBOfdw4vv3gFOdcxtqesz6zrmvvWBQpH7tZz+Q1p+NYuDMhZH6zRnSu16P3xDlJ9DXRifm\nZM/iJedF6ndc94f45BM44gj48kvo2hUWL4ZGBTipmrxqbO3atQC0b9++yu31WTWWK7I5534wsK7S\n9+sTt6UKdYmZlZhZSWlpaRqeWqQwtG0LI0b49ptvwpw5YfNI/KVj5P4UMNU593Li+38C1zjnFtf0\nmFotI1I3X3zhR++bNsHhh8OKFRUHfBSqWlfL5KFsjtzXA+0qfX8IoOvpRNJs771h7FjfXr0a7rwz\nbB6Jt3QU93nAoMSqmV7Altrm20WkfoYOhQ4dfPv662Hr1qBxJMaiLIV8GFgI/IeZrTezwWY21MyG\nJrosAFYDq4C7gMsyllakwO22my/qABs3wi23hM0j8aUrVEVyTFmZv7hp+XJo0QI++ADatAmdSrIl\n6px7QW0cJpJNcyYUR+o3cPwNdXrcRo1g6lTo189Py0yeDNOn1yeh5LMCXCkrkvv69oVTTvHt22+H\nNWvC5pH40bSMSI567TXonbge7he/gD/9KWweyQ5tHCaS53r1gv79ffuhh2DZsrB5JF5U3EVy2JQp\nfg7eORg1KnQaiRMVd5EcdvTRcNFFvv300/DCC0HjSIyouIvkuOuug+bNfVtbAks5FXeRHHfwwTBs\nmG+/8QbMnRs2j8SDirtIHiguhpYtfXv0aNixI2yegjGr366Hb8eEirtIHth334oPVN97D5K2PZcC\npOIukieuuMJP0YCfh9+2LWgcCUzFXSRP7L47TJjg2x9/DDNmhM0jYRXWFapR58aKnspsjnqYe/OS\nSP36X6XzySsreibaIQ6z+uTHPMaOHXDssfDuu7DPPn7f91atQqfKI8k1ZO3L/s/2J1W9PYM1RFeo\nihSgJk38hU0AW7bADXXbk0zySGGN3EUKgHNwwgl+75nddoOVK6Fdu9p/TuqhfCSfxd/2NXIXKVBm\nMG2ab3/9tf9wVQqPirtIHjr5ZDjzTN++7z54552gcSQAFXeRPDV1qh/Fl5XBmDGh00i26SQmkQwJ\nvcKpSxc4/3yYPRueeAJefdXPxUsaxXBlXTmN3EXy2MSJ0KyZbxcXa1OxQqKRu0iGxOGagw4d4NJL\n/RmrL70ECxb4s1cl/2nkLpLnxoyBvfby7eJi2LkzbJ44GThzIQNnLgwdIyMiFXcz62Nm75nZKjPb\n5Uh3M7vQzErN7M3E18Xpjyoi9bH//nD11b791lvw4INh80h21Frczawx8EegL9AJ+E8z65Si6xzn\nXNfE191pzikiDfCb30Dbtr49bpxf/y75LcrIvSewyjm32jn3DfAIcE5mY4lIOrVo4Ys6wNq1cPvt\nYfNI5kX5QPVgYF2l79cD30vR72dmdjLwPvAb59y6FH0arP/SlZH6ze3WMRNPH8ycCbvMhqU0cHye\nbSaS4c3eNs5cHqlfmyFddrkt1/5OfvUruOUW+OADmDQJior85mKFJHl+/fU1m1PePmdI76xlypQo\nI3dLcVvygqongQ7OuS7AP4D7Uz6Q2SVmVmJmJaWlpXVLKiIN0rSpL+oAn34KN90UNo9kVq0bh5lZ\nb+A659z/SXw/CsA5N7Wa/o2Bzc65GscE2jhMJPvKyuD442HJEthjDz+KP+CA0KnCefqHPwGg7z+e\nqNfPl28pnc0to9O5cdgioKOZHWZmzYBzgXlJT3ZgpW/PBlbUJayIZEejRhXbAG/b5i9ykvxUa3F3\nzu0Afg08iy/af3bOvW1mE83s7ES3YWb2tpktA4YBF2YqsIg0zI9+BKef7tt33QWrVoXNI5kR6QpV\n59wCYEHSbeMqtUcBo9IbTUQy5YYb/PTMjh0wdiw88kjoRJJuBXVYx+Il50Xqd1z3hzKcRHLB2gsG\nRerXfvYDGU6SGQMHwp//7NslJXDccWHzZEPy3+m2RYsA2OP446vcXt3fafKxjSWf+BrWo23VKfBM\nzsHrsA4RqdGkSf5YPvDbEkh+KaiRu4hUddllFRc0/e1vfj6+kJSP5Ov729fxj50LwKIB2ZvX0shd\nRGo1bpxfEgl+9F5WFjaPpI+Ku0gBO+AAv+8M+LXvjz4aNo+kj4q7SIG7+mpo3dq3x46Fb78Nm0fS\nQ3PuaTRrVrRPyIuKimrvlEei7pedD/t55Kpbb4URI3z7ttv8AR+yq+S9rRZ+/iUAvVvuWeX2TO5t\npTl3EYns0kvh0EN9e8IE2Lo1bB5pOI3cRQSA+++HCy/07euv91M0UrMfveh3Yvn7yWfX0jN9NHIX\nkTo5/3zo3Nm3b7wRNm0Km0caRsVdRABo3BimJvZ6/fe/YfLksHmkYVTcReT/69cPTjrJt2+7zZ/a\nJLkp0sZhIlIYzGDaNDjxRPjmG3+R0/0pj94pTMn7UxXveD1xe9UrVOOwP5VG7iJSxQknwDmJU5Jn\nz4bl0U4ilJjRahkR2cU778Cxx/rtCPr1g/nzQyeKp/KRfDZH6lotIyL11qlTxbLIp56CF18MGicn\nzZo1K/KFjZmg4i4iKV13Hey2m2+PHAmBfsmXelJxF5GU2rWDK67w7ddeg7/+NWweqRsVdxGp1qhR\nsM8+vj16tD+WTyoc1/2hWKyMSSXnlkI2aBOqWf2iPUnRU3VIlPs2zoy2HKLNkC4ZTiJx06qV3+d9\n1ChYscIvixw8OHSqeEqeX1+buEgg+fZsbRyokbuI1GjYMDjoIN8ePx6++ipsHolGSyFFpFZ33glD\nhvj2jTf6PeClZuUj9nSP1NO6FNLM+pjZe2a2ysx2OUrXzHYzszmJ+183sw51jywicXXRRXDUUb49\ndSp89lnYPFK7Wou7mTUG/gj0BToB/2lmnZK6DQY+c84dCdwKTEt3UBEJp0kTmDLFtz/7zG9RIPEW\nZeTeE1jlnFvtnPsGeAQ4J6nPOUD5DhSPAaebmaUvpoiE9tOfQs+evj19Onz0Udg8UrMoxf1gYF2l\n79cnbkvZxzm3A9gCtE5HQBGJh/JNxQC2b/cnNkn1ioqKgh6pGaW4pxqBJ38KG6UPZnaJmZWYWUlp\naWmUfCISI6eeCn36+PY998C77waNIzWIUtzXA+0qfX8I8HF1fcysCbAPsDn5gZxzdzrnejjneuy/\n//71SywiQZUf6FFWBmPGhM0i1YtS3BcBHc3sMDNrBpwLzEvqMw/4ZaI9AHjOhVpjKSIZ1bUr/OIX\nvv2Xv/itCSR+ai3uiTn0XwPPAiuAPzvn3jaziWZWfirsPUBrM1sFjAB2WS4pIvnj+uuhaVPfLi7W\npmJxpIuYRKRehg2DGTN8e8EC6Ns3bJ5Cof3cRSSjxo6FFi18u7jYz8FLfKi4i0i9tGkDv/2tby9f\nDg/Fc3PEgqXiLiL1NmIElC98u/Za+PrrsHmkgoq7iNTbXnvBuHG+/eGHMHNm0DhSSbAPVM2sFFib\n+HY/YFOQILVTtvpRtvpRtrqLay7ITLb2zrlaLxQKVtyrhDArifLpbwjKVj/KVj/KVndxzQVhs2la\nRkQkD6m4i4jkobgU9ztDB6iBstWPstWPstVdXHNBwGyxmHMXEZH0isvIXURE0iirxT3OZ7FGyHah\nmZWa2ZuJr4uzlOteM9toZm9Vc7+Z2e8TuZebWfds5IqY7VQz21LpNRuXxWztzOx5M1thZm+b2ZUp\n+mT9tYuYK8jrZmbNzewNM1uWyLbLcRyh3qMRswV5j1Z6/sZmttTM5qe4L/uvm3MuK19AY+AD4HCg\nGbAM6JTU5zLgjkT7XGBOjLJdCPwhW69Xpec9GegOvFXN/WcCT+MPTOkFvB6jbKcC87P9miWe+0Cg\ne6K9F/B+ir/TrL92EXMFed0Sr0OLRLsp8DrQK6lPqPdolGxB3qOVnn8E8FCqv7sQr1s2R+5xPos1\nSrYgnHMvkuLgk0rOAR5w3mtASzM7MCbZgnHObXDOLUm0/43frjr5eMisv3YRcwWReB22Jr5tmvhK\n/lAuyHs0YrZgzOwQoB9wdzVdsv66ZbO4x/ks1ijZAH6W+PX9MTNrl+L+EKJmD6V34lfpp83smBAB\nEr8Cd8OP9ioL+trVkAsCvW4hO7cFAAACJklEQVSJqYU3gY3A351z1b5mWX6PRskG4d6jvwOuAarb\nGzPrr1s2i3vazmLNgCjP+yTQwTnXBfgHFf8LhxbqNYtiCf5S6e8CM4Ansh3AzFoAjwPDnXNfJN+d\n4key8trVkivY6+ac2+mc64o/TrOnmXVO6hLsNYuQLch71Mx+DGx0zi2uqVuK2zL6umWzuKftLNYQ\n2Zxznzrnyve8uws4Lgu5oojyugbhnPui/Fdp59wCoKmZ7Zet5zezpvgC+qBz7i8pugR57WrLFfp1\nSzzv58ALQJ+ku0K9R2vNFvA9eiJwtpl9iJ/SPc3M/pTUJ+uvWzaLe5zPYq01W9Jc7Nn4udI4mAcM\nSqz86AVscc5tCB0KwMwOKJ9XNLOe+H9vn2bpuQ1//OMK59wt1XTL+msXJVeo183M9jezlon27sAP\ngXeTugV5j0bJFuo96pwb5Zw7xDnXAV87nnPOnZ/ULeuvW5NMPnhlzrkdZlZ+Fmtj4F6XOIsVKHHO\nzcP/o59t/izWzfgXKi7Zhpk/M3ZHItuF2chmZg/jV0/sZ2brgfH4D5Nwzt0BLMCv+lgFbAOKspEr\nYrYBwKVmtgP4Cjg3S/9Zgx9NXQD8KzFPCzAaOLRSvhCvXZRcoV63A4H7zawx/j+UPzvn5sfhPRox\nW5D3aHVCv266QlVEJA/pClURkTyk4i4ikodU3EVE8pCKu4hIHlJxFxHJQyruIiJ5SMVdRCQPqbiL\niOSh/wd5f+DmqTMKjgAAAABJRU5ErkJggg==\n", 271 | "text/plain": [ 272 | "" 273 | ] 274 | }, 275 | "metadata": {}, 276 | "output_type": "display_data" 277 | } 278 | ], 279 | "source": [ 280 | "# Input data - [X coordinate, Y coordinate]\n", 281 | "X = np.array([[1.6,0.3], [1.8,0.5], [2.0,0.7], [2.2,0.4], [2.4,0.6], [2.3,0.5], [2.1,0.5],\n", 282 | " [1.7,1.7], [2.5,1.0], [1.0,3.0], [2.0,1.5], [1.5,1.5], [1.5,2.0], [1.0,2.5],\n", 283 | " [1.6,1.6], [2.4,0.9], [0.9,2.9], [1.9,1.4], [1.0,1.4], [1.4,1.9], [0.9,2.4],\n", 284 | " [1.5,1.7], [2.3,1.1], [0.4,1.0], [1.0,0.7], [1.2,1.5], [1.2,1.0], [1.0,1.1],\n", 285 | " [1.0,1.7], [1.3,1.1], [0.7,1.0], [0.4,0.7], [0.2,1.5], [0.2,1.0], [0.4,1.1],\n", 286 | " [1.0,0.5], [1.3,0.1], [0.7,0.3], [0.4,0.4], [0.2,0.5], [0.2,0.1], [0.4,0.1],\n", 287 | " [1.0,2.4], [1.3,2.1], [0.7,2.0], [0.4,2.7], [0.2,2.5], [0.2,2.0], [0.4,2.1],\n", 288 | " [3.4,2.0], [3.5,2.1], [3.6,2.3], [3.4,2.4], [3.5,2.5], [3.1,2.6], [3.3,2.7],\n", 289 | " [2.0,3.1], [3.5,1.0], [4.0,1.5], [3.0,3.0], [3.0,2.0], [2.5,2.5], [3.3,1.5],\n", 290 | " [3.9,2.5], [3.9,2.0], [3.8,3.0], [3.8,2.9], [3.9,2.7], [3.9,2.5], [3.9,2.7],\n", 291 | " [2.1,3.1], [3.6,1.1], [3.8,1.7], [3.2,3.1], [2.9,2.1], [2.6,2.4], [3.2,1.4],\n", 292 | " [4.0,0.1], [3.9,0.2], [3.9,0.3], [3.7,0.5], [3.9,0.7], [3.9,0.4], [3.7,0.4]])\n", 293 | "\n", 294 | "# Labels (1 or -1)\n", 295 | "Y = np.array([-1, -1, -1, -1, -1, -1, -1,\n", 296 | " -1, -1, -1, -1, -1, -1, -1,\n", 297 | " -1, -1, -1, -1, -1, -1, -1,\n", 298 | " -1, -1, -1, -1, -1, -1, -1,\n", 299 | " -1, -1, -1, -1, -1, -1, -1,\n", 300 | " -1, -1, -1, -1, -1, -1, -1,\n", 301 | " -1, -1, -1, -1, -1, -1, -1,\n", 302 | " 1, 1, 1, 1, 1, 1, 1,\n", 303 | " 1, 1, 1, 1, 1, 1, 1,\n", 304 | " 1, 1, 1, 1, 1, 1, 1,\n", 305 | " 1, 1, 1, 1, 1, 1, 1,\n", 306 | " 1, 1, 1, 1, 1, 1, 1])\n", 307 | "\n", 308 | "\n", 309 | "# For every point mark point with - if label is -1 and mark with + if label is 1\n", 310 | "for i in range(len(X)):\n", 311 | " if Y[i] == -1:\n", 312 | " plt.scatter(X[i][0], X[i][1], s=120, marker='_', linewidths=2)\n", 313 | " else:\n", 314 | " plt.scatter(X[i][0], X[i][1], s=120, marker='+', linewidths=2)\n", 315 | "\n", 316 | "# Print the hyperplane calculated by svm_sgd()\n", 317 | "x2=[w[0]*0.65,w[1],-w[1],w[0]]\n", 318 | "x3=[w[0]*0.65,w[1],w[1],-w[0]]\n", 319 | "\n", 320 | "x2x3 =np.array([x2,x3])\n", 321 | "X,Y,U,V = zip(*x2x3)\n", 322 | "ax = plt.gca()\n", 323 | "ax.quiver(X,Y,U,V,scale=1, color='blue')" 324 | ] 325 | }, 326 | { 327 | "cell_type": "code", 328 | "execution_count": null, 329 | "metadata": { 330 | "collapsed": true 331 | }, 332 | "outputs": [], 333 | "source": [] 334 | }, 335 | { 336 | "cell_type": "code", 337 | "execution_count": null, 338 | "metadata": { 339 | "collapsed": true 340 | }, 341 | "outputs": [], 342 | "source": [] 343 | } 344 | ], 345 | "metadata": { 346 | "kernelspec": { 347 | "display_name": "Python 3", 348 | "language": "python", 349 | "name": "python3" 350 | }, 351 | "language_info": { 352 | "codemirror_mode": { 353 | "name": "ipython", 354 | "version": 3 355 | }, 356 | "file_extension": ".py", 357 | "mimetype": "text/x-python", 358 | "name": "python", 359 | "nbconvert_exporter": "python", 360 | "pygments_lexer": "ipython3", 361 | "version": "3.6.3" 362 | } 363 | }, 364 | "nbformat": 4, 365 | "nbformat_minor": 2 366 | } 367 | -------------------------------------------------------------------------------- /Support-Vector-Machine/formula.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CihanBosnali/Machine-Learning-without-Libraries/5c2f546e0a11f1b0769622edc9c61a6eb7d317db/Support-Vector-Machine/formula.png -------------------------------------------------------------------------------- /Support-Vector-Machine/gradient.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CihanBosnali/Machine-Learning-without-Libraries/5c2f546e0a11f1b0769622edc9c61a6eb7d317db/Support-Vector-Machine/gradient.png -------------------------------------------------------------------------------- /Support-Vector-Machine/hingeloss.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CihanBosnali/Machine-Learning-without-Libraries/5c2f546e0a11f1b0769622edc9c61a6eb7d317db/Support-Vector-Machine/hingeloss.png -------------------------------------------------------------------------------- /Support-Vector-Machine/objectivefunc.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/CihanBosnali/Machine-Learning-without-Libraries/5c2f546e0a11f1b0769622edc9c61a6eb7d317db/Support-Vector-Machine/objectivefunc.png -------------------------------------------------------------------------------- /Support-Vector-Machine/svm.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | 4 | def weight_graph(w0array, w1array, number_of_weights_to_graph=100): 5 | # You cannot see anything in the graph of 10000 numbers :) 6 | epochs = len(w0array) 7 | 8 | # It will divide epochs to this number 9 | num_per_epoch = epochs/number_of_weights_to_graph 10 | 11 | w0_to_graph = [] 12 | w1_to_graph = [] 13 | epoch_to_graph = [] 14 | 15 | for i in range(number_of_weights_to_graph): 16 | epoch_to_graph.append(int(num_per_epoch*i)) 17 | w0_to_graph.append(w0array[int(num_per_epoch*i)]) 18 | w1_to_graph.append(w1array[int(num_per_epoch*i)]) 19 | 20 | plt.plot(epoch_to_graph, w0_to_graph, 'r',epoch_to_graph, w1_to_graph,'b') 21 | plt.show() 22 | 23 | 24 | def train_svm(X, Y, epochs=10000, learning_rate=1): 25 | #Initialize our SVMs weight vector with zeros (3 values) 26 | w = np.zeros(len(X[0])) 27 | 28 | # See the change 29 | w0_per_epoch = [] 30 | w1_per_epoch = [] 31 | 32 | # Training 33 | print("starts training") 34 | for epoch in range(1, epochs): 35 | for i, x in enumerate(X): 36 | # It there is an error 37 | if (Y[i] * np.dot(X[i], w)) < 1: 38 | w = w + learning_rate * ((X[i] * Y[i]) + (-2 * (1/epochs) * w)) 39 | else: 40 | w = w + learning_rate * (-2 * (1/epochs) * w) 41 | w0_per_epoch.append(w[0]) 42 | w1_per_epoch.append(w[1]) 43 | 44 | weight_graph(w0_per_epoch, w1_per_epoch) 45 | return w 46 | 47 | def predict(X, w): 48 | Y = np.dot(X, w) 49 | return Y 50 | 51 | def show_svm_graph(X, Y, w): 52 | # For every point mark point with - if label is -1 and mark with + if label is 1 53 | for i in range(len(X)): 54 | if Y[i] == -1: 55 | plt.scatter(X[i][0], X[i][1], s=120, marker='_', linewidths=2) 56 | else: 57 | plt.scatter(X[i][0], X[i][1], s=120, marker='+', linewidths=2) 58 | 59 | # Print the hyperplane calculated by svm_sgd() 60 | x2=[w[0]*0.65,w[1],-w[1],w[0]] 61 | x3=[w[0]*0.65,w[1],w[1],-w[0]] 62 | 63 | x2x3 =np.array([x2,x3]) 64 | X,Y,U,V = zip(*x2x3) 65 | ax = plt.gca() 66 | ax.quiver(X,Y,U,V,scale=1, color='blue') 67 | plt.show() 68 | 69 | # TEST 70 | # Input data - [X coordinate, Y coordinate] 71 | X = np.array([[1.6,0.3], [1.8,0.5], [2.0,0.7], [2.2,0.4], [2.4,0.6], [2.3,0.5], [2.1,0.5], 72 | [1.7,1.7], [2.5,1.0], [1.0,3.0], [2.0,1.5], [1.5,1.5], [1.5,2.0], [1.0,2.5], 73 | [1.6,1.6], [2.4,0.9], [0.9,2.9], [1.9,1.4], [1.0,1.4], [1.4,1.9], [0.9,2.4], 74 | [1.5,1.7], [2.3,1.1], [0.4,1.0], [1.0,0.7], [1.2,1.5], [1.2,1.0], [1.0,1.1], 75 | [1.0,1.7], [1.3,1.1], [0.7,1.0], [0.4,0.7], [0.2,1.5], [0.2,1.0], [0.4,1.1], 76 | [1.0,0.5], [1.3,0.1], [0.7,0.3], [0.4,0.4], [0.2,0.5], [0.2,0.1], [0.4,0.1], 77 | [1.0,2.4], [1.3,2.1], [0.7,2.0], [0.4,2.7], [0.2,2.5], [0.2,2.0], [0.4,2.1], 78 | [3.4,2.0], [3.5,2.1], [3.6,2.3], [3.4,2.4], [3.5,2.5], [3.1,2.6], [3.3,2.7], 79 | [2.0,3.1], [3.5,1.0], [4.0,1.5], [3.0,3.0], [3.0,2.0], [2.5,2.5], [3.3,1.5], 80 | [3.9,2.5], [3.9,2.0], [3.8,3.0], [3.8,2.9], [3.9,2.7], [3.9,2.5], [3.9,2.7], 81 | [2.1,3.1], [3.6,1.1], [3.8,1.7], [3.2,3.1], [2.9,2.1], [2.6,2.4], [3.2,1.4], 82 | [4.0,0.1], [3.9,0.2], [3.9,0.3], [3.7,0.5], [3.9,0.7], [3.9,0.4], [3.7,0.4]]) 83 | 84 | # Labels (1 or -1) 85 | Y = np.array([-1, -1, -1, -1, -1, -1, -1, 86 | -1, -1, -1, -1, -1, -1, -1, 87 | -1, -1, -1, -1, -1, -1, -1, 88 | -1, -1, -1, -1, -1, -1, -1, 89 | -1, -1, -1, -1, -1, -1, -1, 90 | -1, -1, -1, -1, -1, -1, -1, 91 | -1, -1, -1, -1, -1, -1, -1, 92 | 1, 1, 1, 1, 1, 1, 1, 93 | 1, 1, 1, 1, 1, 1, 1, 94 | 1, 1, 1, 1, 1, 1, 1, 95 | 1, 1, 1, 1, 1, 1, 1, 96 | 1, 1, 1, 1, 1, 1, 1]) 97 | 98 | w = train_svm(X, Y) 99 | show_svm_graph(X, Y, w) 100 | --------------------------------------------------------------------------------