├── 1 - Theano Basics ├── Exercises.ipynb ├── Theano Basics.ipynb └── spoilers │ ├── fib.py │ ├── life.py │ └── logistic.py ├── 2 - Lasagne Basics ├── Digit Recognizer.ipynb ├── Introduction to Lasagne.ipynb └── spoilers │ ├── hiddenlayer.py │ ├── logreg.py │ └── optimizer.py ├── 3 - Convolutional Networks ├── Art Style Transfer.ipynb ├── Convolutional Digit Recognizer.ipynb └── Finetuning for Image Classification.ipynb ├── 4 - Recurrent Networks ├── COCO Caption Generation.ipynb ├── COCO Preprocessing.ipynb ├── COCO RNN Training.ipynb ├── RNN Character Model - 2 Layer.ipynb ├── claims.txt.gz ├── googlenet.py ├── gru_2layer_trained.pkl └── spoilers │ └── tempsoftmax.py ├── 5 - Extending Lasagne └── Custom Layer Class.ipynb └── README.md /1 - Theano Basics/Exercises.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 32, 6 | "metadata": { 7 | "collapsed": true 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "import numpy as np\n", 12 | "import theano\n", 13 | "import theano.tensor as T" 14 | ] 15 | }, 16 | { 17 | "cell_type": "markdown", 18 | "metadata": {}, 19 | "source": [ 20 | "Exercises\n", 21 | "=====" 22 | ] 23 | }, 24 | { 25 | "cell_type": "markdown", 26 | "metadata": {}, 27 | "source": [ 28 | "1. Logistic function\n", 29 | "----------\n", 30 | "Create an expression for the logistic function $s(x) = \\frac{1}{1+exp(-x)}$. Plot the function and its derivative, and verify that $\\frac{ds}{dx} = s(x)(1-s(x))$." 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": 59, 36 | "metadata": { 37 | "collapsed": false 38 | }, 39 | "outputs": [], 40 | "source": [ 41 | "# Uncomment and run this cell for one solution\n", 42 | "#%load spoilers/logistic.py" 43 | ] 44 | }, 45 | { 46 | "cell_type": "markdown", 47 | "metadata": {}, 48 | "source": [ 49 | "2. Fibonacci sequence\n", 50 | "--------\n", 51 | "Calculate the 3rd to 10th terms of the sequence, defined by the recurrance relation $F_n = F_{n-2} + F_{n-1}$, with $F_1=1$ and $F_2=1$." 52 | ] 53 | }, 54 | { 55 | "cell_type": "code", 56 | "execution_count": 31, 57 | "metadata": { 58 | "collapsed": false 59 | }, 60 | "outputs": [], 61 | "source": [ 62 | "# Uncomment and run this cell for one solution\n", 63 | "#%load spoilers/fib.py" 64 | ] 65 | }, 66 | { 67 | "cell_type": "markdown", 68 | "metadata": {}, 69 | "source": [ 70 | "3. Game of Life\n", 71 | "-------\n", 72 | "Implement [Conway's Game of Life](https://en.wikipedia.org/wiki/Conway%27s_Game_of_Life) with periodic boundary conditions (wrapping borders)." 73 | ] 74 | }, 75 | { 76 | "cell_type": "code", 77 | "execution_count": 34, 78 | "metadata": { 79 | "collapsed": true, 80 | "slideshow": { 81 | "slide_type": "slide" 82 | } 83 | }, 84 | "outputs": [], 85 | "source": [ 86 | "board = theano.shared(np.zeros((100, 100), dtype='uint8'))\n", 87 | "\n", 88 | "initial = np.random.binomial(1, 0.1, size=(100, 100)).astype('uint8')\n", 89 | "board.set_value(initial)" 90 | ] 91 | }, 92 | { 93 | "cell_type": "code", 94 | "execution_count": null, 95 | "metadata": { 96 | "collapsed": true 97 | }, 98 | "outputs": [], 99 | "source": [ 100 | "# Create a function f that updates board with new values and return the current state\n", 101 | "# Uncomment the line below and run for a solution\n", 102 | "#%load spoilers/life.py" 103 | ] 104 | }, 105 | { 106 | "cell_type": "code", 107 | "execution_count": 44, 108 | "metadata": { 109 | "collapsed": false, 110 | "slideshow": { 111 | "slide_type": "slide" 112 | } 113 | }, 114 | "outputs": [ 115 | { 116 | "data": { 117 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAgwAAAINCAYAAABS2ieqAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAHrNJREFUeJzt3X+sdHddJ/D3h+02RdoF2i5mF23oQhCF4vI8JGqi2Epc\nEBo3ZUEF1LiEYLIa3Y1EGwWqsLr1B0vXgJD9w6qb3bpIQYXFEroBhc1C8BbSqg0tuCwWDMjzdLFg\nsVC++8fMA7fTee733Lkzd87MvF7JybTnx8x3zpw59/18z2e+p1prAQA4yEPW3QAAYPwEBgCgS2AA\nALoEBgCgS2AAALoEBgCgS2AAALoEBgCgS2AAALpWFhiq6vyquq6qPlFV91bVB6vq+1f1egDA6pyz\nwud+c5KnJvmZJHckeWGSG6rqIa21G1b4ugDAktUq7iVRVc9K8rYkz2+t/fd989+R5IlJLmmtfXnp\nLwwArMSqehiuSnJPkt+bmX99kv+W5FuS/O/9C6rqoiTPSPKxJF9YUbsAgAc6L8ljkryjtXbqbCut\nKjA8Kcntc3oRbps+PjEzgSGTsPBfV9QeAOBgL8zkH/VzrSowXJTkI3Pmn963fNbHVtSWnbO3t9dd\n5+TJk8fQEgA2yMcOWrjKosfDchliSU6cOLHuJgCweQ78O7yqn1WeyvxehAv3LQcANsSqAsOtSb6x\nqmaf/7Lp45+t6HUBOEBr7UETDLGqwPCWJOcnee7M/B9J8okk71/R6wIAK7CSGobW2k1V9c4kr6+q\nf5Tko0men+RfJHlhE2kBYKOssujxOUl+MckrM6lduD3JD7TW3rjC1wQAVmAlIz0uoqpOJOn/HpCu\nIZ9pVR1DS4CxmXd+cD5g6mRr7ZazLRzTzypZEl9+toE/bAdb5v6xrxnC7a0BgC6BAQDoEhgAgC41\nDMDaDb2GPrveNl5nH1qIvo3vnXHTwwAAdAkMAECXwAAAdAkMAEDXRhY9GsnwYAZhYcwcn6uzzHOj\n4ktm6WEAALoEBgCgS2AAALoEBgCga/RFj4sWSG1jYdUyb0W+jfuHzTBkBMfDbLurfIc5bnoYAIAu\ngQEA6BIYAIAugQEA6Bp90eMQu1z8s8xR2xSesS5jOaYWLSxeZvvHsi+GFqfuwi3HmdDDAAB0CQwA\nQJfAAAB0CQwAQNfoix6HFN7sSpHNUUbIG/Jc8yxzdEnYBkO/h5t+Xhr6no77HKE4e330MAAAXQID\nANAlMAAAXaOvYZjHtamDLbp/1Cuw62a/O0f5TmxjXQO7TQ8DANAlMAAAXQIDANAlMAAAXRtZ9MhX\nrWMwJ9gVR/l+Dfk+bdogRGMo5BzLvthFehgAgC6BAQDoEhgAgC6BAQDoUvQIcAi7UnS3K++T4fQw\nAABdAgMA0CUwAABdAgMA0KXocQspVoI+oxbC4ehhAAC6BAYAoEtgAAC6BAYAoEvRI8DUbCGkokT4\nKj0MAECXwAAAdAkMAECXwAAAdCl6BLbeoqM6Lns0SEWVRzfvM5nHvl0+PQwAQJfAAAB0CQwAQJca\nBmDrzbuePeRa+DLrFY76fIu85jZcx1/0PY3hbqTbRg8DANAlMAAAXQIDANAlMAAAXYoe2QirLlBj\newwd2GeeTT+GZtu/DYV/Q97TkO04Oj0MAECXwAAAdAkMAECXwAAAdCl6BDgmyxyJcdHizk0vhNyk\ntm4bPQwAQJfAAAB0CQwAQJfAAAB0KXpkIyh0YqhlFhIe5bkW3XZoUeKQ59/0AkfGRQ8DANAlMAAA\nXQIDANAlMAAAXYoega0ytNBvDAWBy27DMos2YZYeBgCgS2AAALoEBgCgSw0DsFXGWq8wz9C2Dt12\nDO+J7aWHAQDoGhwYqurpVfXbVXVHVX2+qu6qqt+vqhNz1j1RVTdX1T1VdXdV3VhVly636QDAcTlM\nD8OPJrkkyWuSfE+Sn0zyqCTvq6orzqxUVU9I8u5MLnc8L8mLkjw+yXuq6uLlNBsAOE51iOtlj2qt\nfXpm3sOSfCTJn7XWvns6741JvjPJY1trn5vOuyTJnUle01q7+izPfyLJ3qJvBOBsxlrDMM9Rahjg\niE621m4528LBPQyzYWE67/NJbk/ydUlSVeckuTLJjWfCwnS9jyd5V5KrhrcbYDmq6kHTWM1r6ya1\nn+11pKLHqnp4khNJ/nw667FJzkty65zVb0vyuKo69yivCQAcv6P+SuJ1SR6a5Ben/3/R9PH0nHVP\nJ6kkjzziawIAx2zhcRiq6lVJXpDkx1trH1xekwCAsVmoh6Gqrknyc0l+trX2G/sWnZo+XjhnswuT\ntCR3L/KaAMD6HLqHYRoWrklyTWvt2pnFH01yb5Inz9n0siR3ttbuO8zrbVJ1M7CbnKfYBYfqYaiq\nl2cSFl7VWnvV7PLW2peSvDXJc6rq/H3bXZLkiiRvPlpzAYB1GNzDUFU/leQXktyU5O1V9a37l7fW\n3jf9z2uSfCDJ26rq2kyKIl+Z5NNJXr2MRgMAx+swlySuzKQG4ZnTab+W5B8kSWvtw1V1eZJfTvKm\nJF9K8j+TvLS1dioAwMYZHBhaa1f01/rKurck+e6FWgQAjI7bW8OOMOTwcgwtcFQIybKt+zvs9tYA\nQJfAAAB0CQwAQJfAAAB0bWTR42zhh0IiYBWOUrg4pBDSuYvDGHq8rOo408MAAHQJDABAl8AAAHSN\nvobBNT5Yjm38Lq16IJujDMhk4Ca2jR4GAKBLYAAAugQGAKBLYAAAukZf9DjPkEInxUWwfRYdkGaZ\nBYgKHFkXd6sEAEZPYAAAugQGAKBLYAAAukYfGFprD5pmVdWDJmD7zH7P550f5k2rPkc4B3Ec5h1n\nix57+78fe3t7g7YZfWAAANZPYAAAugQGAKBLYAAAukY30uPe3l5OnDhx4DoKioDEuQAWtch3Rw8D\nANAlMAAAXQIDANAlMAAAXaMrejx58uS6mwAAzNDDAAB0CQwAQJfAAAB0CQwAQJfAAAB0CQwAQJfA\nAAB0CQwAQNfoBm4CgFVorXXXcQfUs9PDAAB0CQwAQJfAAAB0CQwAQJeiRwC2zrwCxyEFjYtutwv0\nMAAAXQIDANAlMAAAXQIDANA1+qJHI3MBcFjz/i5s0t+TMRZf6mEAALoEBgCgS2AAALoEBgCga3SB\nYW9vL621r0xV1Z32r39mAoD9hvw9GbN1/60bXWAAAMZHYAAAugQGAKBLYAAAukY/0uM8YyhqHNqG\nsRfRAOyy2XP5WM7ZY2nHfnoYAIAugQEA6BIYAICujQwMmz74xjrMG/DDYFfAthpyzpt33tuVc+P+\n97e3tzdom40MDADA8RIYAIAugQEA6BIYAICurR24adWFj2MurJy3f4a0d9HtANZp1QPpbeO5cZH2\n62EAALoEBgCgS2AAALoEBgCga/RFj4sWOG5jkcpQQ/fHkO0AxmTZ5/Z1nBs39XyshwEA6BIYAIAu\ngQEA6BIYAICu0Rc9zjOkGEQh5APtyvsEttuiRd1Hef6jGPJ3Z1Nuoa2HAQDoEhgAgC6BAQDoEhgA\ngK6NLHpcpl0uhATYBmMuch/Stk35m3OkHoaqenFVfbmq7pmz7ERV3VxV91TV3VV1Y1VdepTXAwDW\nY+HAUFWPTvJrST6ZpM0se0KSd2fSg/G8JC9K8vgk76mqixd9TQBgPY7Sw/CGJO9K8s4ks/0pr0xy\nb5IrW2s3tdbekuTZSf5xkpce4TUBgDVYKDBU1Q8m+Y4kP5aZsFBV5yS5MsmNrbXPnZnfWvt4JgHj\nqoOe++TJk6mqr0zztNYeMA1Z52zr7X+tg14TgM0x5nP7WNvVc+jAUFVfm+S6JFe31j45Z5XHJjkv\nya1zlt2W5HFVde5hXxcAWJ9Fehhel+QvWmtvOMvyi6aPp+csO51Jj8QjF3hdAGBNDvWzyqp6biaX\nG755Nc0BAMZocA9DVZ2f5LVJfj3Jp6rqEVX1iCTnTpc/vKoeluTUdJML5zzNhZn8ouLuoa/7vd/7\nvbnhhhuGrg4ArEANvUtWVT0myV92Vvv9TH5G+bdJfru19m9mnuOmJI9prT1hzvOfSLI3qDEAwLKd\nbK3dcraFh7kk8ddJrsgDx1yoJFcn+c4kz0zymdba/VX11iTPqaqfPvNLiaq6ZLr9qw/5BgCANRsc\nGFprf5/kj2fnV9W/TnJ/a+1P9s2+JskHkrytqq5N8tBMxmb4dAQGANg4y7j5VMvMSI+ttQ8nuTzJ\nF5O8Kcn1Se5I8rTW2qnZJwAAxm1wDcOqqWEAgLU6sIbB7a0BgC6BAQDoEhgAgC6BAQDoEhgAgK5D\n3UuC5Zj3y5RNusUpALtHDwMA0CUwAABdAgMA0CUwAABdih4HGFqkOJZhthd1lPYr2gTYbnoYAIAu\ngQEA6BIYAIAuNQxzzF7L35Xr87vyPgE4PD0MAECXwAAAdAkMAECXwAAAdO180eM67hy56PMPHVhJ\n8SIAy6aHAQDoEhgAgC6BAQDoEhgAgK6dL3rcpAJHxYwArIseBgCgS2AAALoEBgCgS2AAALp2vuhx\nnkVvbz1vvUWLFxU4AjAmehgAgC6BAQDoEhgAgC6BAQDoUvQ4x5CCw1WPxDj0+Y0ICcBx0MMAAHQJ\nDABAl8AAAHQJDABAl6LHBa2jsHDMBY6Ljo4JwIPNO9/Pc5znWj0MAECXwAAAdAkMAECXGoYVW/T6\n0lhqAMZ4HQ1gmxylPu04a9v0MAAAXQIDANAlMAAAXQIDANCl6HEkhhQXrqOwcOgdMg3cBLCYoefZ\noduuih4GAKBLYAAAugQGAKBLYAAAuhQ9rsGiI3ON5W6VChoBVmuM51k9DABAl8AAAHQJDABAl8AA\nAHQpelyDIaN6jbHgBYDdpYcBAOgSGACALoEBAOgSGACALkWPIzFb5DiWUR0BINHDAAAMIDAAAF0C\nAwDQpYZhpNQrADAmehgAgC6BAQDoEhgAgC6BAQDoUvQIcEzmDcg2S8EzY6WHAQDoEhgAgC6BAQDo\nEhgAgC6BAQDoEhgAgC6BAQDoEhgAgC6BAQDoMtIjwDExiiOb7NA9DFX17VX19qo6XVV/V1V3VNXL\nZtY5UVU3V9U9VXV3Vd1YVZcur9kAwHE6VGCoqhckeXeSu5P8UJLvSfLLM+s8YbrOOUmel+RFSR6f\n5D1VdfGRWwwAHLvBlySq6tFJ/nOSN7TWfnzfoj+eWfWVSe5NcmVr7XPTbfeS3JnkpUmuPlKLAYBj\nd5gehhcn+ZrM9CjsV1XnJLkyyY1nwkKStNY+nuRdSa5asJ0AwBodJjA8LcmpJN9UVR+qqi9W1aeq\n6vVVdcF0nccmOS/JrXO2vy3J46rq3KM1GRiitfaA6bhf72wTsJkOExgeneRhSd6Y5IYkT0/yq0l+\nOMnbp+tcNH08PWf700kqySMXaikAsDaH+VnlQzLpPfj51tqvTOf9SVXdl+S6qvquJF9YdgMBgPU7\nTA/DqenjO2bm3zR9fEqSz0z/+8I521+YpGXyCwsAYIMcpofhQ0m+5YDlLclHM/mFxJPnLL8syZ2t\ntfsO8ZrAAPNqA2YHCRqyzlFec+hzLbsdwPE4TA/DjdPHZ83Mf/b08f2ttfuTvDXJc6rq/DMrVNUl\nSa5I8uZFGwoArM/gHobW2s1V9bYkr6iqhyR5f5KnJnlFkre21v7XdNVrknwgyduq6tokD81kbIZP\nJ3n1MhsPAByPww4N/X1Jrkvykkx+GfGjSf5jkueeWaG19uEklyf5YpI3Jbk+yR1JntZaOxUAYOPU\nWH4XXVUnkuytux2widQwAEtwsrV2y9kWulslsJBVBxJgXA59t0oAYPcIDABAl8AAAHQJDABAl6JH\nDm3IL2sUux2veft70V8xrLINx9EOYDX0MAAAXQIDANAlMAAAXQIDANCl6JEDLVq0ptht/cawv8fQ\nBmA59DAAAF0CAwDQJTAAAF0CAwDQpeiRAw0dvW/IdjzQJu3HTWorsBp6GACALoEBAOgSGACAro2s\nYTjuu/DxQPb34W3SAFib1Fbg+OhhAAC6BAYAoEtgAAC6BAYAoGt0RY97e3s5ceLEgevMFlINGVRm\n3nZwXDZpAKxNaitwfPQwAABdAgMA0CUwAABdAgMA0DW6oseTJ08eeptFi7RgnTapSHAdo0uuox3A\n2elhAAC6BAYAoEtgAAC6BAYAoGt0gWFvby+tta9MwGL2f4828btUVQ+aNv09wSYbXWAAAMZHYAAA\nugQGAKBLYAAAurZipMd5jAjHLplXADjkNvBj+Z4MHa11LO2FXaSHAQDoEhgAgC6BAQDoEhgAgK7R\nFT0CJNtZ4Ljo6JTbuC84vHUXAuthAAC6BAYAoEtgAAC6RhcYhtytcvaOdfMm2CVD7uw4bx2O17zP\nYMjE5hry92ro37BlHhv7X2tvb2/QNqMLDADA+AgMAECXwAAAdAkMAEDX6AZuWuRulYqClmPdg4Kw\nXD47lm1IQbnjrm/IPlr1+XiR59LDAAB0CQwAQJfAAAB0CQwAQNfoih6HUFRzdEMLahRCHsz+WS/7\nn22wKcexHgYAoEtgAAC6BAYAoEtgAAC6BAYYaN4taIfcVtrt1pdnyH61/1fHrbgPb8g5Yh3c3hoA\nWAmBAQDoEhgAgC6BAQDoGl1g2NvbW3sxyC4YWqynqIl1GXI8zqMQlWWbd/wc5Zg67uLRZR3/owsM\nAMD4CAwAQJfAAAB0jf5ulZtyF69tYL8ezN08x8e+ZtctWo+wyHdHDwMA0CUwAABdAgMA0CUwAABd\noy96VNT0VUOLW8awzzaprUex6e1fh3UcG0MLVlfZhm21C/txzO0f0rZlDVSmhwEA6BIYAIAugQEA\n6BIYAICu0RU9njx5ct1NGIWjjCC4jtEHZ19zzG1lfBb9zI9y/CxaLOb4fCD7Y/yW9Rkdqoehqp5a\nVX9QVZ+sqs9X1e1V9fKqeujMeieq6uaquqeq7q6qG6vq0qW0GAA4doMDQ1VdluS9Sb4+yU8keXaS\n303yiiQ37FvvCUnenUnvxfOSvCjJ45O8p6ouXlbDAYDjc5hLEj+Q5Nwkz22t/eV03rur6p8keUlV\nPby19tkkr0xyb5IrW2ufS5Kq2ktyZ5KXJrl6aa0HAI7FYS5JfGH6+NmZ+Z9Ncn+S+6rqnCRXJrnx\nTFhIktbax5O8K8lVR2grALAmhwkM1yf5mySvr6pLq+qCqroyyUuSvK61dm+SxyY5L8mtc7a/Lcnj\nqurcozZ6F1TVg6bj2HZRs6/XWhs0raOtrMa8z3eeo3zmQ46foe0Y8vxHeZ+wbQZfkmit3VVVlyf5\nwyQf3bfoP7XW/t30vy+aPp6e8xSnk1SSRyb51OGbCgCsy+DAUFXfkOTmTMLCT2fS2/CtSV5WVRe0\n1l68miYCAOt2mKLHX8rkEsYzppcfkuS9VfWZJL9ZVb+Tr/YcXDhn+wuTtCR3L9pYAGA9DlPD8MQk\nf7EvLJzxp/uWfySTX0g8ec72lyW5s7V236FbCQCs1WECw18leVJVPWxm/rdNH+9qrd2f5K1JnlNV\n559ZoaouSXJFkjcfpbEMM6Qoa9WFW/OK0RQ4brfj+HyHFDgus2B4yDqOY3bFYQLDa5JcnOSdVfW8\nqvquqvrZJK9O8udJ/mi63jVJvibJ26rqmVV1VZL/keTT03UBgA0zODC01t6e5PJMxl24LpOehB9K\n8oYkT2utfWm63oen630xyZsy+TnmHdN1Ti2x7QDAMamx/Ia4qk4k2Vt3O7bBkBvmuKkO22DVx7Hv\nCTvmZGvtlrMtHN3dKlmNIcHQyZF1WfTYO8rxOeQ74fiHrzrU3SoBgN0kMAAAXQIDANAlMAAAXYoe\nR2K2AGtesdXQX7QM2XZoMZdCyO1xlONnHZZ57DmO4ej0MAAAXQIDANAlMAAAXQIDANCl6HHDLVoc\nuWkFcDCUAkdYDT0MAECXwAAAdAkMAECXwAAAdCl6HOA4CgRntz1K4daihZAKw7bbmD/fo4w8ytkd\nZX+N+XhhPfQwAABdAgMA0CUwAABdAgMA0KXocaSWffvpIc9nhDzGbtHjeFf5/rJMehgAgC6BAQDo\nEhgAgC41DAsaOjjSOq4hLnoN1/VOttVYvpuwyfQwAABdAgMA0CUwAABdAgMA0KXocYCj3CVymY5S\nuDW7rYKvzXAcd0rdNoverRU4mB4GAKBLYAAAugQGAKBLYAAAuhQ9jtSyR6ZTFDd+yyxqPcy2u8C+\ngKPTwwAAdAkMAECXwAAAdAkMAECXoseROspodQq8NpPP/IHWcZt2t4aHs9PDAAB0CQwAQJfAAAB0\nCQwAQJeixw2isGr37PJnvo73vsv7G3r0MAAAXQIDANAlMAAAXQIDANCl6BHYem7/DUenhwEA6BIY\nAIAugQEA6FLDwEba5bs4wrYa8r32nV4fPQwAQJfAAAB0CQwAQJfAAAB0KXpkI8wWQw0tfDJgD4zT\not9N3+n10cMAAHQJDABAl8AAAHQJDABAl6JHtoZiKM7GcTA+8z4T3+Fx08MAAHQJDABAl8AAAHQJ\nDABAl6JHNsJs4ZPiKNg+Qwohfc/XRw8DANAlMAAAXQIDANAlMAAAXYoe2UgKn2A3+K6Phx4GAKBL\nYAAAugQGAKBLYAAAugQGAKBLYAAAugQGAKBLYAAAugQGAKBLYAAAugQGAKBLYAAAusYUGM5bdwMA\nYIcd+Hd4TIHhMetuAADssMcctLBaa8fUjoNV1UVJnpHkY0m+sN7WAMDOOC+TsPCO1tqps600msAA\nAIzXmC5JAAAjJTAAAF0CAwDQJTAAAF2jCQxVdX5VXVdVn6iqe6vqg1X1/etu17apqqdX1W9X1R1V\n9fmququqfr+qTsxZ90RV3VxV91TV3VV1Y1Vduo52b7OqenFVfbmq7pmzzGewAlX17VX19qo6XVV/\nN/0+vGxmHft+BarqqVX1B1X1yek56PaqenlVPXRmPft/ZEYTGJK8OckPJ/n5JM9M8oEkN1TV89fZ\nqC30o0kuSfKaJN+T5CeTPCrJ+6rqijMrVdUTkrw7yTlJnpfkRUken+Q9VXXxMbd5a1XVo5P8WpJP\nJmkzy3wGK1BVL8hkv96d5Icy+R788sw69v0KVNVlSd6b5OuT/ESSZyf53SSvSHLDvvXs/zFqra19\nSvKsJF9O8v0z89+R5K4kD1l3G7dlSvKoOfMeluSvk7xz37w3JvlUkvP3zbskyd8nuXbd72NbpiRv\nTfKWJNcnuWdmmc9g+fv70Uk+l+S1nfXs+9Xs/1+cnuv/2cz8N0znP9z+H+80lh6Gq5Lck+T3ZuZf\nn+SfJvmWY2/RlmqtfXrOvM8nuT3J1yVJVZ2T5MokN7bWPrdvvY8neVcmnxdHVFU/mOQ7kvxYkppZ\n5jNYjRcn+ZrM9CjsZ9+v1JlB+T47M/+zSe5Pcp/9P15jCQxPSnJ7a+3LM/Nvmz4+8Zjbs1Oq6uFJ\nTiT58+msx2Yy8tetc1a/LcnjqurcY2reVqqqr01yXZKrW2ufnLOKz2A1npbkVJJvqqoPVdUXq+pT\nVfX6qrpguo59vzrXJ/mbJK+vqkur6oKqujLJS5K8rrV2b+z/0RpLYLgoyek580/vW87qvC7JQzPp\nLky+ur/P9plUkkceQ7u22euS/EVr7Q1nWe4zWI1HZ3IJ7o2ZXDN/epJfzaR+6u3Tdez7FWmt3ZXk\n8iRPSfLRTHoW/jDJb7XW/u10Nft/pM5ZdwNYr6p6VZIXJPnx1toH192eXVBVz82ky/Wb192WHfSQ\nTP71+vOttV+ZzvuTqrovyXVV9V1xL5uVqapvSHJzJmHhpzPpbfjWJC+rqgtaay9eZ/s42FgCw6nM\n70W4cN9ylqyqrknyc0l+trX2G/sWndnfFz54q1yYSTX/3Stu3laqqvOTvDbJryf5VFU9Yrro3Ony\nhyf5UnwGq3IqyeMyKaje76bp41MyKURN7PtV+KVMQtszppcfkuS9VfWZJL9ZVb+TSbFjYv+Pzlgu\nSdya5BurarY9l00f/+yY27P1pmHhmiTXtNaunVn80ST3JnnynE0vS3Jna+2+FTdxW12cyc9YX5pJ\n9+qZ6Qcy6Sq/O8l/SfKR+AxW4UOd5S2O/1V6YiaX4u6dmf+n+5Y79kdqLIHhLUnOT/Lcmfk/kuQT\nSd5/3A3aZlX18kzCwqtaa6+aXd5a+1Im/8p6zvRfxGe2uyTJFZmMmcFi/jqTfXj5vumKTP7F+4Xp\n/7+stXZ/fAarcOP08Vkz8589fXy/fb9Sf5XkSVX1sJn53zZ9vMv+H6/R3N66qt6R5KlJfiaThP/8\nTH4C9cLW2g0HbctwVfVTmRR53ZTkFzLzc77W2vum631DJoNn3ZLk2kyKIl+Z5BFJ/nk74J7pHF5V\n/VaSf9Vau2DfPJ/BClTVHyb57iT/PpN/jDw1k4GD3tla+5fTdez7FaiqZ2USBt6fyeBxpzKpYbg6\nyf9N8pTW2pfs/5Fa90AQZ6ZMumOvy2TEuy8k+WCS71t3u7ZtyuR3zPdnMkjK7HT/zLonkrwzk4Fu\n/l8m/zq7dN3vYRunTH5u9rdz5vsMlr+vz0vyHzL5A3Vfkv+TSXj4h/b9sez/70jyR5n0Hp8ZA+ZX\nkjzS/h/3NJoeBgBgvMZSwwAAjJjAAAB0CQwAQJfAAAB0CQwAQJfAAAB0CQwAQJfAAAB0CQwAQJfA\nAAB0CQwAQNf/B4i5Ftl789kaAAAAAElFTkSuQmCC\n", 118 | "text/plain": [ 119 | "" 120 | ] 121 | }, 122 | "metadata": {}, 123 | "output_type": "display_data" 124 | } 125 | ], 126 | "source": [ 127 | "# After creating your f function, run this cell to animate the output\n", 128 | "%matplotlib notebook\n", 129 | "import matplotlib.pyplot as plt\n", 130 | "\n", 131 | "from IPython import display\n", 132 | "import time\n", 133 | "\n", 134 | "for i in range(50):\n", 135 | " plt.gca().cla()\n", 136 | " current = f()\n", 137 | " plt.imshow(current, interpolation='nearest', cmap='gray')\n", 138 | " display.clear_output(wait=True)\n", 139 | " display.display(plt.gcf()) \n", 140 | " time.sleep(0.1) " 141 | ] 142 | } 143 | ], 144 | "metadata": { 145 | "kernelspec": { 146 | "display_name": "Python 2", 147 | "language": "python", 148 | "name": "python2" 149 | }, 150 | "language_info": { 151 | "codemirror_mode": { 152 | "name": "ipython", 153 | "version": 2 154 | }, 155 | "file_extension": ".py", 156 | "mimetype": "text/x-python", 157 | "name": "python", 158 | "nbconvert_exporter": "python", 159 | "pygments_lexer": "ipython2", 160 | "version": "2.7.6" 161 | } 162 | }, 163 | "nbformat": 4, 164 | "nbformat_minor": 0 165 | } 166 | -------------------------------------------------------------------------------- /1 - Theano Basics/Theano Basics.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": { 6 | "slideshow": { 7 | "slide_type": "slide" 8 | } 9 | }, 10 | "source": [ 11 | "Theano \n", 12 | "===\n", 13 | "An optimizing compiler for symbolic math expressions" 14 | ] 15 | }, 16 | { 17 | "cell_type": "code", 18 | "execution_count": 17, 19 | "metadata": { 20 | "collapsed": false, 21 | "slideshow": { 22 | "slide_type": "fragment" 23 | } 24 | }, 25 | "outputs": [], 26 | "source": [ 27 | "import theano\n", 28 | "import theano.tensor as T" 29 | ] 30 | }, 31 | { 32 | "cell_type": "markdown", 33 | "metadata": { 34 | "slideshow": { 35 | "slide_type": "slide" 36 | } 37 | }, 38 | "source": [ 39 | "Symbolic variables\n", 40 | "==========" 41 | ] 42 | }, 43 | { 44 | "cell_type": "code", 45 | "execution_count": 18, 46 | "metadata": { 47 | "collapsed": false, 48 | "slideshow": { 49 | "slide_type": "fragment" 50 | } 51 | }, 52 | "outputs": [], 53 | "source": [ 54 | "x = T.scalar()" 55 | ] 56 | }, 57 | { 58 | "cell_type": "code", 59 | "execution_count": 19, 60 | "metadata": { 61 | "collapsed": false, 62 | "slideshow": { 63 | "slide_type": "fragment" 64 | } 65 | }, 66 | "outputs": [ 67 | { 68 | "data": { 69 | "text/plain": [ 70 | "" 71 | ] 72 | }, 73 | "execution_count": 19, 74 | "metadata": {}, 75 | "output_type": "execute_result" 76 | } 77 | ], 78 | "source": [ 79 | "x" 80 | ] 81 | }, 82 | { 83 | "cell_type": "markdown", 84 | "metadata": { 85 | "slideshow": { 86 | "slide_type": "slide" 87 | } 88 | }, 89 | "source": [ 90 | "Variables can be used in expressions" 91 | ] 92 | }, 93 | { 94 | "cell_type": "code", 95 | "execution_count": 20, 96 | "metadata": { 97 | "collapsed": false, 98 | "slideshow": { 99 | "slide_type": "-" 100 | } 101 | }, 102 | "outputs": [], 103 | "source": [ 104 | "y = 3*(x**2) + 1" 105 | ] 106 | }, 107 | { 108 | "cell_type": "markdown", 109 | "metadata": { 110 | "slideshow": { 111 | "slide_type": "fragment" 112 | } 113 | }, 114 | "source": [ 115 | "Result is symbolic as well" 116 | ] 117 | }, 118 | { 119 | "cell_type": "code", 120 | "execution_count": 21, 121 | "metadata": { 122 | "collapsed": false, 123 | "slideshow": { 124 | "slide_type": "-" 125 | } 126 | }, 127 | "outputs": [ 128 | { 129 | "data": { 130 | "text/plain": [ 131 | "theano.tensor.var.TensorVariable" 132 | ] 133 | }, 134 | "execution_count": 21, 135 | "metadata": {}, 136 | "output_type": "execute_result" 137 | } 138 | ], 139 | "source": [ 140 | "type(y)" 141 | ] 142 | }, 143 | { 144 | "cell_type": "markdown", 145 | "metadata": { 146 | "slideshow": { 147 | "slide_type": "slide" 148 | } 149 | }, 150 | "source": [ 151 | "Investigating expressions" 152 | ] 153 | }, 154 | { 155 | "cell_type": "code", 156 | "execution_count": 22, 157 | "metadata": { 158 | "collapsed": false, 159 | "slideshow": { 160 | "slide_type": "fragment" 161 | } 162 | }, 163 | "outputs": [ 164 | { 165 | "name": "stdout", 166 | "output_type": "stream", 167 | "text": [ 168 | "Elemwise{add,no_inplace}.0\n" 169 | ] 170 | } 171 | ], 172 | "source": [ 173 | "print(y)" 174 | ] 175 | }, 176 | { 177 | "cell_type": "code", 178 | "execution_count": 23, 179 | "metadata": { 180 | "collapsed": false, 181 | "slideshow": { 182 | "slide_type": "fragment" 183 | } 184 | }, 185 | "outputs": [ 186 | { 187 | "data": { 188 | "text/plain": [ 189 | "'((TensorConstant{3} * ( ** TensorConstant{2})) + TensorConstant{1})'" 190 | ] 191 | }, 192 | "execution_count": 23, 193 | "metadata": {}, 194 | "output_type": "execute_result" 195 | } 196 | ], 197 | "source": [ 198 | "theano.pprint(y)" 199 | ] 200 | }, 201 | { 202 | "cell_type": "code", 203 | "execution_count": 24, 204 | "metadata": { 205 | "collapsed": false, 206 | "slideshow": { 207 | "slide_type": "fragment" 208 | } 209 | }, 210 | "outputs": [ 211 | { 212 | "name": "stdout", 213 | "output_type": "stream", 214 | "text": [ 215 | "Elemwise{add,no_inplace} [@A] '' \n", 216 | " |Elemwise{mul,no_inplace} [@B] '' \n", 217 | " | |TensorConstant{3} [@C]\n", 218 | " | |Elemwise{pow,no_inplace} [@D] '' \n", 219 | " | | [@E]\n", 220 | " | |TensorConstant{2} [@F]\n", 221 | " |TensorConstant{1} [@G]\n" 222 | ] 223 | } 224 | ], 225 | "source": [ 226 | "theano.printing.debugprint(y)" 227 | ] 228 | }, 229 | { 230 | "cell_type": "code", 231 | "execution_count": 25, 232 | "metadata": { 233 | "collapsed": false, 234 | "slideshow": { 235 | "slide_type": "slide" 236 | } 237 | }, 238 | "outputs": [ 239 | { 240 | "data": { 241 | "image/svg+xml": [ 242 | "\n", 243 | "\n", 244 | "G\n", 245 | "\n", 246 | "\n", 247 | "139670232979664\n", 248 | "\n", 249 | "Elemwise{pow,no_inplace}\n", 250 | "\n", 251 | "\n", 252 | "139670232268304\n", 253 | "\n", 254 | "Elemwise{mul,no_inplace}\n", 255 | "\n", 256 | "\n", 257 | "139670232979664->139670232268304\n", 258 | "\n", 259 | "\n", 260 | "1 TensorType(float32, scalar)\n", 261 | "\n", 262 | "\n", 263 | "139670232978704\n", 264 | "\n", 265 | "TensorType(float32, scalar)\n", 266 | "\n", 267 | "\n", 268 | "139670232978704->139670232979664\n", 269 | "\n", 270 | "\n", 271 | "0\n", 272 | "\n", 273 | "\n", 274 | "139670232978832\n", 275 | "\n", 276 | "val=2 TensorType(int8, scalar)\n", 277 | "\n", 278 | "\n", 279 | "139670232978832->139670232979664\n", 280 | "\n", 281 | "\n", 282 | "1\n", 283 | "\n", 284 | "\n", 285 | "139670232268368\n", 286 | "\n", 287 | "Elemwise{add,no_inplace}\n", 288 | "\n", 289 | "\n", 290 | "139670232268304->139670232268368\n", 291 | "\n", 292 | "\n", 293 | "0 TensorType(float32, scalar)\n", 294 | "\n", 295 | "\n", 296 | "139670232978640\n", 297 | "\n", 298 | "val=3 TensorType(int8, scalar)\n", 299 | "\n", 300 | "\n", 301 | "139670232978640->139670232268304\n", 302 | "\n", 303 | "\n", 304 | "0\n", 305 | "\n", 306 | "\n", 307 | "139670231482960\n", 308 | "\n", 309 | "TensorType(float32, scalar)\n", 310 | "\n", 311 | "\n", 312 | "139670232268368->139670231482960\n", 313 | "\n", 314 | "\n", 315 | "\n", 316 | "\n", 317 | "139670232978896\n", 318 | "\n", 319 | "val=1 TensorType(int8, scalar)\n", 320 | "\n", 321 | "\n", 322 | "139670232978896->139670232268368\n", 323 | "\n", 324 | "\n", 325 | "1\n", 326 | "\n", 327 | "\n", 328 | "" 329 | ], 330 | "text/plain": [ 331 | "" 332 | ] 333 | }, 334 | "execution_count": 25, 335 | "metadata": {}, 336 | "output_type": "execute_result" 337 | } 338 | ], 339 | "source": [ 340 | "from IPython.display import SVG\n", 341 | "SVG(theano.printing.pydotprint(y, return_image=True, format='svg'))" 342 | ] 343 | }, 344 | { 345 | "cell_type": "markdown", 346 | "metadata": { 347 | "slideshow": { 348 | "slide_type": "slide" 349 | } 350 | }, 351 | "source": [ 352 | "Evaluating expressions\n", 353 | "============\n", 354 | "\n", 355 | "Supply a `dict` mapping variables to values" 356 | ] 357 | }, 358 | { 359 | "cell_type": "code", 360 | "execution_count": 26, 361 | "metadata": { 362 | "collapsed": false 363 | }, 364 | "outputs": [ 365 | { 366 | "data": { 367 | "text/plain": [ 368 | "array(13.0, dtype=float32)" 369 | ] 370 | }, 371 | "execution_count": 26, 372 | "metadata": {}, 373 | "output_type": "execute_result" 374 | } 375 | ], 376 | "source": [ 377 | "y.eval({x: 2})" 378 | ] 379 | }, 380 | { 381 | "cell_type": "markdown", 382 | "metadata": { 383 | "slideshow": { 384 | "slide_type": "slide" 385 | } 386 | }, 387 | "source": [ 388 | "Or compile a function" 389 | ] 390 | }, 391 | { 392 | "cell_type": "code", 393 | "execution_count": 27, 394 | "metadata": { 395 | "collapsed": true 396 | }, 397 | "outputs": [], 398 | "source": [ 399 | "f = theano.function([x], y)" 400 | ] 401 | }, 402 | { 403 | "cell_type": "code", 404 | "execution_count": 28, 405 | "metadata": { 406 | "collapsed": false, 407 | "slideshow": { 408 | "slide_type": "fragment" 409 | } 410 | }, 411 | "outputs": [ 412 | { 413 | "data": { 414 | "text/plain": [ 415 | "array(13.0, dtype=float32)" 416 | ] 417 | }, 418 | "execution_count": 28, 419 | "metadata": {}, 420 | "output_type": "execute_result" 421 | } 422 | ], 423 | "source": [ 424 | "f(2)" 425 | ] 426 | }, 427 | { 428 | "cell_type": "markdown", 429 | "metadata": { 430 | "slideshow": { 431 | "slide_type": "slide" 432 | } 433 | }, 434 | "source": [ 435 | "Compiled function has been transformed" 436 | ] 437 | }, 438 | { 439 | "cell_type": "code", 440 | "execution_count": 29, 441 | "metadata": { 442 | "collapsed": false, 443 | "slideshow": { 444 | "slide_type": "fragment" 445 | } 446 | }, 447 | "outputs": [ 448 | { 449 | "data": { 450 | "image/svg+xml": [ 451 | "\n", 452 | "\n", 453 | "G\n", 454 | "\n", 455 | "\n", 456 | "139670211310224\n", 457 | "\n", 458 | "GpuFromHost\n", 459 | "\n", 460 | "\n", 461 | "139670211421072\n", 462 | "\n", 463 | "GpuElemwise{Composite{(i0 + (i1 * sqr(i2)))}}[(0, 2)]\n", 464 | "\n", 465 | "\n", 466 | "139670211310224->139670211421072\n", 467 | "\n", 468 | "\n", 469 | "2 CudaNdarrayType(float32, scalar)\n", 470 | "\n", 471 | "\n", 472 | "139670210791760\n", 473 | "\n", 474 | "TensorType(float32, scalar)\n", 475 | "\n", 476 | "\n", 477 | "139670210791760->139670211310224\n", 478 | "\n", 479 | "\n", 480 | "\n", 481 | "\n", 482 | "139670203407696\n", 483 | "\n", 484 | "HostFromGpu\n", 485 | "\n", 486 | "\n", 487 | "139670211421072->139670203407696\n", 488 | "\n", 489 | "\n", 490 | "CudaNdarrayType(float32, scalar)\n", 491 | "\n", 492 | "\n", 493 | "139670210995600\n", 494 | "\n", 495 | "val=1.0 CudaNdarrayType(float32, scalar)\n", 496 | "\n", 497 | "\n", 498 | "139670210995600->139670211421072\n", 499 | "\n", 500 | "\n", 501 | "0\n", 502 | "\n", 503 | "\n", 504 | "139670211312528\n", 505 | "\n", 506 | "val=3.0 CudaNdarrayType(float32, scalar)\n", 507 | "\n", 508 | "\n", 509 | "139670211312528->139670211421072\n", 510 | "\n", 511 | "\n", 512 | "1\n", 513 | "\n", 514 | "\n", 515 | "139670203405776\n", 516 | "\n", 517 | "TensorType(float32, scalar)\n", 518 | "\n", 519 | "\n", 520 | "139670203407696->139670203405776\n", 521 | "\n", 522 | "\n", 523 | "\n", 524 | "\n", 525 | "" 526 | ], 527 | "text/plain": [ 528 | "" 529 | ] 530 | }, 531 | "execution_count": 29, 532 | "metadata": {}, 533 | "output_type": "execute_result" 534 | } 535 | ], 536 | "source": [ 537 | "SVG(theano.printing.pydotprint(f, return_image=True, format='svg'))" 538 | ] 539 | }, 540 | { 541 | "cell_type": "markdown", 542 | "metadata": { 543 | "slideshow": { 544 | "slide_type": "slide" 545 | } 546 | }, 547 | "source": [ 548 | "Other tensor types\n", 549 | "==========" 550 | ] 551 | }, 552 | { 553 | "cell_type": "code", 554 | "execution_count": 30, 555 | "metadata": { 556 | "collapsed": true, 557 | "slideshow": { 558 | "slide_type": "-" 559 | } 560 | }, 561 | "outputs": [], 562 | "source": [ 563 | "X = T.vector()\n", 564 | "X = T.matrix()\n", 565 | "X = T.tensor3()\n", 566 | "X = T.tensor4()" 567 | ] 568 | }, 569 | { 570 | "cell_type": "markdown", 571 | "metadata": { 572 | "slideshow": { 573 | "slide_type": "slide" 574 | } 575 | }, 576 | "source": [ 577 | "Numpy style indexing\n", 578 | "===========" 579 | ] 580 | }, 581 | { 582 | "cell_type": "code", 583 | "execution_count": 31, 584 | "metadata": { 585 | "collapsed": true, 586 | "slideshow": { 587 | "slide_type": "-" 588 | } 589 | }, 590 | "outputs": [], 591 | "source": [ 592 | "X = T.vector()" 593 | ] 594 | }, 595 | { 596 | "cell_type": "code", 597 | "execution_count": 32, 598 | "metadata": { 599 | "collapsed": false 600 | }, 601 | "outputs": [ 602 | { 603 | "data": { 604 | "text/plain": [ 605 | "Subtensor{int64:int64:int64}.0" 606 | ] 607 | }, 608 | "execution_count": 32, 609 | "metadata": {}, 610 | "output_type": "execute_result" 611 | } 612 | ], 613 | "source": [ 614 | "X[1:-1:2]" 615 | ] 616 | }, 617 | { 618 | "cell_type": "code", 619 | "execution_count": 33, 620 | "metadata": { 621 | "collapsed": false, 622 | "slideshow": { 623 | "slide_type": "fragment" 624 | } 625 | }, 626 | "outputs": [ 627 | { 628 | "data": { 629 | "text/plain": [ 630 | "AdvancedSubtensor1.0" 631 | ] 632 | }, 633 | "execution_count": 33, 634 | "metadata": {}, 635 | "output_type": "execute_result" 636 | } 637 | ], 638 | "source": [ 639 | "X[[1,2,3]]" 640 | ] 641 | }, 642 | { 643 | "cell_type": "markdown", 644 | "metadata": { 645 | "slideshow": { 646 | "slide_type": "slide" 647 | } 648 | }, 649 | "source": [ 650 | "Many functions/operations are available through `theano.tensor` or variable methods" 651 | ] 652 | }, 653 | { 654 | "cell_type": "code", 655 | "execution_count": 34, 656 | "metadata": { 657 | "collapsed": false 658 | }, 659 | "outputs": [], 660 | "source": [ 661 | "y = X.argmax()" 662 | ] 663 | }, 664 | { 665 | "cell_type": "code", 666 | "execution_count": 35, 667 | "metadata": { 668 | "collapsed": true 669 | }, 670 | "outputs": [], 671 | "source": [ 672 | "y = T.cosh(X)" 673 | ] 674 | }, 675 | { 676 | "cell_type": "code", 677 | "execution_count": 36, 678 | "metadata": { 679 | "collapsed": false 680 | }, 681 | "outputs": [], 682 | "source": [ 683 | "y = T.outer(X, X)" 684 | ] 685 | }, 686 | { 687 | "cell_type": "markdown", 688 | "metadata": {}, 689 | "source": [ 690 | "But don't try to use numpy functions on Theano variables. Results may vary!" 691 | ] 692 | }, 693 | { 694 | "cell_type": "markdown", 695 | "metadata": { 696 | "slideshow": { 697 | "slide_type": "slide" 698 | } 699 | }, 700 | "source": [ 701 | "Automatic differention\n", 702 | "============\n", 703 | "- Gradients are free!" 704 | ] 705 | }, 706 | { 707 | "cell_type": "code", 708 | "execution_count": 37, 709 | "metadata": { 710 | "collapsed": false 711 | }, 712 | "outputs": [], 713 | "source": [ 714 | "x = T.scalar()\n", 715 | "y = T.log(x)" 716 | ] 717 | }, 718 | { 719 | "cell_type": "code", 720 | "execution_count": 38, 721 | "metadata": { 722 | "collapsed": false, 723 | "slideshow": { 724 | "slide_type": "fragment" 725 | } 726 | }, 727 | "outputs": [ 728 | { 729 | "data": { 730 | "text/plain": [ 731 | "array(0.5, dtype=float32)" 732 | ] 733 | }, 734 | "execution_count": 38, 735 | "metadata": {}, 736 | "output_type": "execute_result" 737 | } 738 | ], 739 | "source": [ 740 | "gradient = T.grad(y, x)\n", 741 | "gradient.eval({x: 2})" 742 | ] 743 | }, 744 | { 745 | "cell_type": "markdown", 746 | "metadata": { 747 | "slideshow": { 748 | "slide_type": "slide" 749 | } 750 | }, 751 | "source": [ 752 | "# Shared Variables\n", 753 | "\n", 754 | "- Symbolic + Storage" 755 | ] 756 | }, 757 | { 758 | "cell_type": "code", 759 | "execution_count": 39, 760 | "metadata": { 761 | "collapsed": false 762 | }, 763 | "outputs": [], 764 | "source": [ 765 | "import numpy as np\n", 766 | "x = theano.shared(np.zeros((2, 3), dtype=theano.config.floatX))" 767 | ] 768 | }, 769 | { 770 | "cell_type": "code", 771 | "execution_count": 40, 772 | "metadata": { 773 | "collapsed": false 774 | }, 775 | "outputs": [ 776 | { 777 | "data": { 778 | "text/plain": [ 779 | "" 780 | ] 781 | }, 782 | "execution_count": 40, 783 | "metadata": {}, 784 | "output_type": "execute_result" 785 | } 786 | ], 787 | "source": [ 788 | "x" 789 | ] 790 | }, 791 | { 792 | "cell_type": "markdown", 793 | "metadata": { 794 | "slideshow": { 795 | "slide_type": "slide" 796 | } 797 | }, 798 | "source": [ 799 | "We can get and set the variable's value" 800 | ] 801 | }, 802 | { 803 | "cell_type": "code", 804 | "execution_count": 41, 805 | "metadata": { 806 | "collapsed": false, 807 | "slideshow": { 808 | "slide_type": "-" 809 | } 810 | }, 811 | "outputs": [ 812 | { 813 | "name": "stdout", 814 | "output_type": "stream", 815 | "text": [ 816 | "(2, 3)\n", 817 | "[[ 0. 0. 0.]\n", 818 | " [ 0. 0. 0.]]\n" 819 | ] 820 | } 821 | ], 822 | "source": [ 823 | "values = x.get_value()\n", 824 | "print(values.shape)\n", 825 | "print(values)" 826 | ] 827 | }, 828 | { 829 | "cell_type": "code", 830 | "execution_count": 42, 831 | "metadata": { 832 | "collapsed": false 833 | }, 834 | "outputs": [], 835 | "source": [ 836 | "x.set_value(values)" 837 | ] 838 | }, 839 | { 840 | "cell_type": "markdown", 841 | "metadata": { 842 | "slideshow": { 843 | "slide_type": "slide" 844 | } 845 | }, 846 | "source": [ 847 | "Shared variables can be used in expressions as well" 848 | ] 849 | }, 850 | { 851 | "cell_type": "code", 852 | "execution_count": 43, 853 | "metadata": { 854 | "collapsed": false, 855 | "slideshow": { 856 | "slide_type": "-" 857 | } 858 | }, 859 | "outputs": [ 860 | { 861 | "data": { 862 | "text/plain": [ 863 | "Elemwise{pow,no_inplace}.0" 864 | ] 865 | }, 866 | "execution_count": 43, 867 | "metadata": {}, 868 | "output_type": "execute_result" 869 | } 870 | ], 871 | "source": [ 872 | "(x + 2) ** 2" 873 | ] 874 | }, 875 | { 876 | "cell_type": "markdown", 877 | "metadata": { 878 | "slideshow": { 879 | "slide_type": "fragment" 880 | } 881 | }, 882 | "source": [ 883 | "Their value is used as input when evaluating" 884 | ] 885 | }, 886 | { 887 | "cell_type": "code", 888 | "execution_count": 44, 889 | "metadata": { 890 | "collapsed": false 891 | }, 892 | "outputs": [ 893 | { 894 | "data": { 895 | "text/plain": [ 896 | "array([[ 4., 4., 4.],\n", 897 | " [ 4., 4., 4.]], dtype=float32)" 898 | ] 899 | }, 900 | "execution_count": 44, 901 | "metadata": {}, 902 | "output_type": "execute_result" 903 | } 904 | ], 905 | "source": [ 906 | "((x + 2) ** 2).eval()" 907 | ] 908 | }, 909 | { 910 | "cell_type": "code", 911 | "execution_count": 45, 912 | "metadata": { 913 | "collapsed": false 914 | }, 915 | "outputs": [ 916 | { 917 | "data": { 918 | "text/plain": [ 919 | "array([[ 4., 4., 4.],\n", 920 | " [ 4., 4., 4.]], dtype=float32)" 921 | ] 922 | }, 923 | "execution_count": 45, 924 | "metadata": {}, 925 | "output_type": "execute_result" 926 | } 927 | ], 928 | "source": [ 929 | "theano.function([], (x + 2) ** 2)()" 930 | ] 931 | }, 932 | { 933 | "cell_type": "markdown", 934 | "metadata": { 935 | "slideshow": { 936 | "slide_type": "slide" 937 | } 938 | }, 939 | "source": [ 940 | "# Updates\n", 941 | "\n", 942 | "- Store results of function evalution\n", 943 | "- `dict` mapping shared variables to new values" 944 | ] 945 | }, 946 | { 947 | "cell_type": "code", 948 | "execution_count": 46, 949 | "metadata": { 950 | "collapsed": false, 951 | "slideshow": { 952 | "slide_type": "slide" 953 | } 954 | }, 955 | "outputs": [], 956 | "source": [ 957 | "count = theano.shared(0)\n", 958 | "new_count = count + 1\n", 959 | "updates = {count: new_count}\n", 960 | "\n", 961 | "f = theano.function([], count, updates=updates)" 962 | ] 963 | }, 964 | { 965 | "cell_type": "code", 966 | "execution_count": 47, 967 | "metadata": { 968 | "collapsed": false, 969 | "slideshow": { 970 | "slide_type": "fragment" 971 | } 972 | }, 973 | "outputs": [ 974 | { 975 | "data": { 976 | "text/plain": [ 977 | "array(0)" 978 | ] 979 | }, 980 | "execution_count": 47, 981 | "metadata": {}, 982 | "output_type": "execute_result" 983 | } 984 | ], 985 | "source": [ 986 | "f()" 987 | ] 988 | }, 989 | { 990 | "cell_type": "code", 991 | "execution_count": 48, 992 | "metadata": { 993 | "collapsed": false, 994 | "slideshow": { 995 | "slide_type": "fragment" 996 | } 997 | }, 998 | "outputs": [ 999 | { 1000 | "data": { 1001 | "text/plain": [ 1002 | "array(1)" 1003 | ] 1004 | }, 1005 | "execution_count": 48, 1006 | "metadata": {}, 1007 | "output_type": "execute_result" 1008 | } 1009 | ], 1010 | "source": [ 1011 | "f()" 1012 | ] 1013 | }, 1014 | { 1015 | "cell_type": "code", 1016 | "execution_count": 49, 1017 | "metadata": { 1018 | "collapsed": false, 1019 | "slideshow": { 1020 | "slide_type": "fragment" 1021 | } 1022 | }, 1023 | "outputs": [ 1024 | { 1025 | "data": { 1026 | "text/plain": [ 1027 | "array(2)" 1028 | ] 1029 | }, 1030 | "execution_count": 49, 1031 | "metadata": {}, 1032 | "output_type": "execute_result" 1033 | } 1034 | ], 1035 | "source": [ 1036 | "f()" 1037 | ] 1038 | } 1039 | ], 1040 | "metadata": { 1041 | "celltoolbar": "Slideshow", 1042 | "kernelspec": { 1043 | "display_name": "Python 2", 1044 | "language": "python", 1045 | "name": "python2" 1046 | }, 1047 | "language_info": { 1048 | "codemirror_mode": { 1049 | "name": "ipython", 1050 | "version": 2 1051 | }, 1052 | "file_extension": ".py", 1053 | "mimetype": "text/x-python", 1054 | "name": "python", 1055 | "nbconvert_exporter": "python", 1056 | "pygments_lexer": "ipython2", 1057 | "version": "2.7.6" 1058 | } 1059 | }, 1060 | "nbformat": 4, 1061 | "nbformat_minor": 0 1062 | } 1063 | -------------------------------------------------------------------------------- /1 - Theano Basics/spoilers/fib.py: -------------------------------------------------------------------------------- 1 | a = theano.shared(1) 2 | b = theano.shared(1) 3 | f = a + b 4 | updates = {a: b, b: f} 5 | next_term = theano.function([], f, updates=updates) 6 | 7 | [next_term() for _ in range(3, 10)] -------------------------------------------------------------------------------- /1 - Theano Basics/spoilers/life.py: -------------------------------------------------------------------------------- 1 | neighbors = [] 2 | neighbors.append(T.roll(board, 1, 0)) 3 | neighbors.append(T.roll(board, 1, 1)) 4 | neighbors.append(T.roll(board, -1, 0)) 5 | neighbors.append(T.roll(board, -1, 1)) 6 | neighbors.append(T.roll(T.roll(board, 1, 1), 1, 0)) 7 | neighbors.append(T.roll(T.roll(board, 1, 1), -1, 0)) 8 | neighbors.append(T.roll(T.roll(board, -1, 1), -1, 0)) 9 | neighbors.append(T.roll(T.roll(board, -1, 1), 1, 0)) 10 | alive_neighbors = sum(neighbors) 11 | 12 | born = T.eq(board, 0) * T.eq(alive_neighbors, 3) 13 | survived = T.eq(board, 1) * (T.eq(alive_neighbors, 2) + T.eq(alive_neighbors, 3)) 14 | new_board = T.cast(survived + born, 'uint8') 15 | updates = {board: new_board} 16 | f = theano.function([], board, updates=updates) -------------------------------------------------------------------------------- /1 - Theano Basics/spoilers/logistic.py: -------------------------------------------------------------------------------- 1 | x = T.vector() 2 | s = 1/(1+T.exp(-x)) 3 | ds = T.grad(T.sum(s), x) # Need sum to make s scalar 4 | 5 | import matplotlib.pyplot as plt 6 | %matplotlib inline 7 | 8 | x0 = np.arange(-3, 3, 0.01).astype('float32') 9 | plt.plot(x0, s.eval({x:x0})) 10 | plt.plot(x0, ds.eval({x:x0})) 11 | 12 | np.allclose(ds.eval({x:x0}), s.eval({x:x0}) * (1-s.eval({x:x0}))) -------------------------------------------------------------------------------- /2 - Lasagne Basics/Digit Recognizer.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "collapsed": false, 8 | "slideshow": { 9 | "slide_type": "slide" 10 | } 11 | }, 12 | "outputs": [ 13 | { 14 | "name": "stderr", 15 | "output_type": "stream", 16 | "text": [ 17 | "Using gpu device 0: GeForce GTX TITAN (CNMeM is disabled)\n" 18 | ] 19 | } 20 | ], 21 | "source": [ 22 | "import numpy as np\n", 23 | "import theano\n", 24 | "import theano.tensor as T\n", 25 | "import lasagne\n", 26 | "\n", 27 | "import matplotlib.pyplot as plt\n", 28 | "%matplotlib inline\n", 29 | "\n", 30 | "import gzip\n", 31 | "import pickle" 32 | ] 33 | }, 34 | { 35 | "cell_type": "code", 36 | "execution_count": 2, 37 | "metadata": { 38 | "collapsed": true 39 | }, 40 | "outputs": [], 41 | "source": [ 42 | "# Seed for reproducibility\n", 43 | "np.random.seed(42)" 44 | ] 45 | }, 46 | { 47 | "cell_type": "code", 48 | "execution_count": 3, 49 | "metadata": { 50 | "collapsed": false, 51 | "slideshow": { 52 | "slide_type": "slide" 53 | } 54 | }, 55 | "outputs": [ 56 | { 57 | "name": "stdout", 58 | "output_type": "stream", 59 | "text": [ 60 | "--2015-11-08 15:37:00-- http://deeplearning.net/data/mnist/mnist.pkl.gz\r\n", 61 | "Resolving deeplearning.net (deeplearning.net)... 132.204.26.28\r\n", 62 | "Connecting to deeplearning.net (deeplearning.net)|132.204.26.28|:80... connected.\r\n", 63 | "HTTP request sent, awaiting response... 200 OK\r\n", 64 | "Length: 16168813 (15M) [application/x-gzip]\r\n", 65 | "Server file no newer than local file ‘mnist.pkl.gz’ -- not retrieving.\r\n", 66 | "\r\n" 67 | ] 68 | } 69 | ], 70 | "source": [ 71 | "# Download the MNIST digits dataset\n", 72 | "!wget -N http://deeplearning.net/data/mnist/mnist.pkl.gz" 73 | ] 74 | }, 75 | { 76 | "cell_type": "code", 77 | "execution_count": 4, 78 | "metadata": { 79 | "collapsed": true, 80 | "slideshow": { 81 | "slide_type": "slide" 82 | } 83 | }, 84 | "outputs": [], 85 | "source": [ 86 | "# Load training and test splits as numpy arrays\n", 87 | "train, val, test = pickle.load(gzip.open('mnist.pkl.gz'))\n", 88 | "\n", 89 | "X_train, y_train = train\n", 90 | "X_val, y_val = val" 91 | ] 92 | }, 93 | { 94 | "cell_type": "code", 95 | "execution_count": 5, 96 | "metadata": { 97 | "collapsed": false, 98 | "slideshow": { 99 | "slide_type": "fragment" 100 | } 101 | }, 102 | "outputs": [ 103 | { 104 | "data": { 105 | "text/plain": [ 106 | "(50000, 784)" 107 | ] 108 | }, 109 | "execution_count": 5, 110 | "metadata": {}, 111 | "output_type": "execute_result" 112 | } 113 | ], 114 | "source": [ 115 | "# The original 28x28 pixel images are flattened into 784 dimensional feature vectors\n", 116 | "X_train.shape" 117 | ] 118 | }, 119 | { 120 | "cell_type": "code", 121 | "execution_count": 6, 122 | "metadata": { 123 | "collapsed": false, 124 | "slideshow": { 125 | "slide_type": "slide" 126 | } 127 | }, 128 | "outputs": [ 129 | { 130 | "data": { 131 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAArsAAABZCAYAAAA+cOwiAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAFO9JREFUeJzt3WeQFNXbxuEffxUTBkyIoqJEE0ZQgUIFIyoGFERFzBEU\nc0KMJGMJKCgGFCyBEgTBiKCCoJS5ygCCCmLCgFnBxPvB996zPbvDLrszPdO99/VlYcPsmbMzPT13\nP+c5tVasWIGZmZmZWRr9r9ADMDMzMzPLF5/smpmZmVlq+WTXzMzMzFLLJ7tmZmZmllo+2TUzMzOz\n1PLJrpmZmZmllk92zczMzCy1fLJrZmZmZqnlk10zMzMzSy2f7JqZmZlZaq2erxuuVatWjdiHeMWK\nFbUq+701YU48H1GejyjPR1mVnRPPR5TnI8rzEeX5iKrp8+Fk18zMzMxSyye7ZmZmZpZaPtk1MzMz\ns9Tyya6ZmZmZpZZPds3MzMwstXyya2ZmZmaplbfWY1Zc9thjDwB69uwJwMknnwzAI488AsCQIUMA\neOuttwowOjOz4nPXXXcBcMEFFwDw3nvvAXD44YcDsGjRosIMzKyGmDZtGgC1av3XUax9+/ZVuh0n\nu2ZmZmaWWolOdldbbTUANthgg3K/rhRznXXWAaBZs2YAnH/++QDcdtttAHTr1q3kZ5YtWwbAwIED\nAbjhhhtyPezY7LrrriX/njp1KgDrr78+ACtW/Ndfunv37gB06tQJgI033jjOIRa9Dh06APDoo48C\nsO+++wIwb968go0pTn369AHC8+B///vv/fF+++0HwMsvv1yQcVm81ltvPQDq1KkDwGGHHQbApptu\nCsAdd9wBwPLlywswutxr2LAhACeddBIA//77LwDbb789AM2bNwdqTrLbtGlTANZYYw0A2rVrB8A9\n99wDhPmpyKRJkwA4/vjjSz73559/5myccdN8tG7dGoD+/fsD0KZNm4KNKQ3uvPPOkn9rbnUVuqqc\n7JqZmZlZahV1srv11lsDULt2bSCc4bdt2xaADTfcEIDOnTtX6vY+//xzAAYPHgzA0UcfDcAvv/xS\n8j3vvvsukOzEqlWrVgCMHz++5HNKv5Xo6j7rXbUS3b333hsItbuFfNet9EBje+KJJ2IfQ8uWLQF4\n/fXXY//dhXTKKacAcMUVVwBlkxs9jiydlGzq77/PPvsAsNNOO5X7/fXr1wdCbWvSffvttwDMmDED\nCFe+aoodd9wRCMeB4447DghXdrbYYgsgHBcqezzQPA4fPrzkc7179wbg559/ruao46fX1RdffBGA\nr7/+GoDNN9888n+rHF1RP+ecc0o+99dffwGhdreqnOyamZmZWWr5ZNfMzMzMUqvoyhhKL6qaPn06\nkH0BWmXpUosW2/z6669AWHT01VdflXzvDz/8ACRrAZIW4O2+++4AjB49GgiXFsszf/58AG655RYA\nxowZA8CsWbOAMFcDBgzIw4grR4ugmjRpAsRbxqDLddtuuy0A22yzDRDan6Sd7u9aa61V4JHk1157\n7QWEhUhagKjLuHLppZcC8OWXXwKhlErPtTlz5uR/sHmkBVe6pHziiScCsPbaawPhcb948WIglEFp\nwVaXLl2AsGBp7ty5cQw7b3777Teg5ixAy6TjfseOHfNy+2p9CfDAAw8A4bUnyVS+4DKGqlEZpRb+\nAbzyyisAjBs3rlq37WTXzMzMzFKr6JLdzz77rOTf33//PVD5ZFfpyo8//gjA/vvvD4RFVqNGjcrZ\nOIvJvffeC0RbqFVEKbBaCWlBntLUFi1a5HCEVaN3/6+++mrsv1up+JlnngmEBC/piVVFDjjgAAB6\n9eoV+bzut5rpL1myJN6B5VjXrl2BsGnAJptsAoQE86WXXgJCa61bb7018vP6Pn29dCulJNAxddCg\nQUCYD7UYy6QrQQcffDAQkhc9LjR/+ph0Wvy8yy67FHgkhaFWlZnJ7jfffAOENFZXwDIXsGoxua6U\n1BQ15cpfNlpUfs011wDhnGTp0qUr/Tl9nxbAfvzxxyVf01W16nKya2ZmZmapVXTJbul3AJdddhkQ\n0qS3334bCK3D5J133gHgwAMPBEK9leruLrzwwjyOuHC0BbAavGe+qyzdPm3y5MlA2EhDtYeaU9Uq\nayu+YniHqtSgEO6///7I/5VspZVqUB966CGg7NUUJZtJrWFcffX/DnV77rknACNGjABCvbtaTN10\n001AqBNbc801gVAvdtBBB0Vu94033sjnsPNGbRfPOOOMlX6fEhYdW1Wz27hx4zyOrvD0uFD7y0xq\nSahkO6nPi2yGDRsGwMSJEyOfVxuoimpRtXmRtldWqzIpfbtJfQ6VRy3Y0r7WIZv77rsPCOtsdthh\nByAcT7O5+uqrgdBmVFdUIbSDrS4nu2ZmZmaWWkWX7Jamd3/qyqAVwKqjOv3004GQVirRlffffx+A\ns846K/+DjZE6VmTbAviZZ54BojW8qp1SlwUll2qerndPqr1SWqzaXm0yEQfVC9erVy+235kpM9nU\nXKdVjx49gLIJjGpXq7tVY6Gp20JmYq+/q2pWMxvb6/OZia42qHn44YdzP9gYaJOATAsXLgTCJira\nVEKJrqgLQ1rpytfIkSMBuP766yNf1/+1PmTo0KFxDS0Wf//9N1D2715Zqu2uW7duuV/X8wfSs8V0\nabqC9NprrxV4JPH6/fffgcon3DqXUfcfnX/kIxl3smtmZmZmqVXUya5kpi0//fRT5P+q7xg7dixQ\ndmVoWjRt2hQItcxKH7/77jsg9AtW2qR+wgBPPfVU5GNF1F/zkksuAULfzThoBbDGECelyeqvK198\n8UXsY8m30ivnTzvtNCA8d5RY3XzzzfEPLIdUg6uaMCUO6gerKx3ZtirVquJM2hZXV0aSRsdMXfV6\n/vnnAViwYAEQVt1nU8irLnHS4ycz2bXyqSuJHl/ZjuF9+/aNbUz5pARc5yR6TW7UqFHBxlQIep7s\nvPPOAHz44YdA9nrbddddFwhXjlQjryT88ccfz/kYneyamZmZWWolItnNpHfZ6kagelT1CFVKkRZa\nEa7aZCWfqmFWP1qtas1lIpptNXI+NWvWLPJ/1V7HQXOs5Oqjjz4CwlynQcOGDQEYP3581u8ZMmQI\nAC+++GIcQ8qp0qmREl312n7uueeAkCj88ccfkZ9VrZhqdPX4V3cSJd2TJk3Ky9jjoprUqiaW++yz\nTw5HU/yy9ZOt6XTF78orrwRCl47SO2CVps5J6uqQdLoCNnPmTCB0jqopttpqKyAk+Uq6e/bsCWS/\n8nXHHXcAYe2Ajkdt2rTJ21id7JqZmZlZaiUy2VXXBb2bUKcA9c5UGqWk8+677wZCvV7S7LbbbkDZ\n3WyOPPJIINpPN420MjyX1MHikEMOAcJq/cxV96pF0jv4NNB9Lm+XvGnTpgFhZ7Ek0a5X5513Xsnn\n9JxXonvUUUeV+7NKpB599FEgXDUS1ZDdcsstORxx8VJNsmrrMqk2T2bPng0UZrfDOCjRTepryKrS\n1Z/u3bsD4appJvXnzjYvqoVX8vv0008DZa+oWLJop7MnnngCCOs/dEUw2zmJdkM75ZRTIp/v169f\nPoYZ4WTXzMzMzFIrkcmuaHcfvUvQ7k96N6qPSifUK1RdC5JC9S2qG9S7pnwkusVYm7bRRhut9Ovq\nu6z5UQrRoEEDAGrXrg1EO0rofiphmDNnDhB6PmrHrTfffLP6d6BIKNUcOHBgma9phxv1283seJIE\n+juX7jIhSio322wzAE499VQAOnXqBISkok6dOkBIqvRx9OjRQNle3kmnVdDa6ei6664Dyl5FynZc\nUK2d5vOff/7J32At7/Q8ePLJJ4Hqr9lQLat21qoptBNYGui1EMIV0AceeAAoe1xQLf9VV10FhHMX\nvYarRlev1Tonu/fee/N3B/6fk10zMzMzS61EJ7uiupH58+cD4d1Ehw4dAOjfvz8QdulQfUix907V\nyk7tMqKUSe+68yGzNk2rZ+OktFVjGD58OBBW1mdS7aneLWpFqHZz+eCDDwB48MEHS35G9dxKx5cs\nWQKEnX3U0WLu3LnVvj+FVpnuC5988gkQ5iGJ1HGh9ArgTTfdFIBPP/0UyF5bqIRSNYb169cHQg/r\nyZMn52HE8dMqea0D0GNC91fPPc2HanBV560kWJT6HHPMMUCo9dbfwpJJx1J9zKaiK4F6DTv00EOB\nsLtn2umKURqodzKEHSh1HNXfXf25tXOcPmpd0ZZbbgmE44yO0ervHgcnu2ZmZmaWWqlIduW9994D\noEuXLgAcccQRQKjlPfvsswFo0qQJAAceeGDcQ1wlShdVi6hdjbRTXC6oh29mv83p06cDofYmTlpN\nv2jRIgBat2690u//7LPPAJg4cSIQdm9ZlX3JtZOUkkAlnWmgnrIrq8Mur443adQxo3THhSlTpgCh\nZkx1/uqTO3LkSACWLl0KwJgxY4CQQOj/SabjB4SEdsKECZHvueGGG4DwvJ81axYQ5k2fV02n6Pky\nYMAAoOxzEUIdfJJlSzDbtWsHwNChQ2MfUz7oNXS//fYDQo2mupksW7ZspT9/+umnA9CrV688jbA4\nqQNUmvrsdu3aFQjnTxD6I+tYe8IJJwDwww8/AHD77bcDYe8DJby6QqBEWOsqFi9eDITHm47P+eBk\n18zMzMxSK1XJruhdx6hRo4BQZ6L6Mr0b17uJl156Kd4BVpESklx0k1Ci26dPHwAuu+wyINSs6h3a\nr7/+Wu3fVVWDBg2K7XepvltWVt+aFKr1zuwdLKV3AZs3b14sY4qDOmtASB4romOCEgkleElO+FWf\nq9QWwvNcVEOp/pg6dmre1BdVfXVVi6t+w0p6VZunPsUvvPBCye/Q81jpjxRiPUBVZeuzq1pldbPQ\n+oCk01W1Ve1/qiuENS3Z1RUN0XNP64Q0n0miK+Gl75t2kCyd9pamv7u6K2TbaVFJrxLxfCa64mTX\nzMzMzFIrVcmuVuUfe+yxALRs2RKI9omD8O57xowZMY6u+nLRhUFpnxIe1eUo5evcuXO1f0caqMNH\nkj3//PMA1K1bN/J51TJn7mJTk6k+PjPBS2LN7mqrrQaE3f+0axGEPsHa0Ur3T4muauxUg6quDep0\nc+655wIhkdFOhKqrVy/r0qvRp06dGhmf6vS23XbbKt/HuKkjjNKuTKr57927d2xjKkYHH3xwoYdQ\nEOoAJEoudQU1iXROULq+X8/dbFSLm1nb361bNyDUhIuuJMfBya6ZmZmZpVaik91mzZoB0LNnTyDU\nT22++eblfr9291HNazHtElaezF6HWmV+4YUXrvJtXXTRRQBce+21AGywwQZAqLE7+eSTqzdYKzra\nxSfzcX7PPfcAha3HLjZabZ4GShmV6KrfNIRkUqn/3nvvDYQd0NQPVUn3jTfeCIQavcxkR32Jn332\n2chHJTkQVmyLjkVJkoZ+25lUV1q6pl9dN9RvubL0+FGf5ZpGKageJ82bNwdC0q8OQ0myKn9LnU9o\nhzRd8VEt7rhx43I8ulXnZNfMzMzMUitRya4SW6UGSnS1Q1Q22i1LK0vzuQNZLqluUB91/wcPHgyE\nHcG+//57IKQ03bt3B2CXXXYpua0GDRoAYWWlkiylfPYfpehNmzYFVq1Xb7FQCqfeoJlmz54d53AS\nIU21hn379o38XzW8EGr1tWq+cePG5d6Gvq7+uboqVlmPPfZYuf9OKnWr0GrzRo0aRb6uq236vjhW\nl1dV27ZtAbjmmmuAaL951VFXVJup/ssdO3YEwq6lmTvsKSGuqD9vWuiKiXYMu/jiiws5nNgouVZN\nv/YEaN++fcHGlMnJrpmZmZmllk92zczMzCy1irqMoV69ekBo2K12OCr+zkZN5W+99VYgFI8X+4K0\niuhypC4ZqE2YFoloG+Ty6NK1WgZlXuq0/6hkJFsJQDFTW7kDDjgACI93bQRw9913A7BkyZICjK64\nbbfddoUeQs58/fXXQNgYonT7o9KlTRA2jVAbRm3zu3DhQmDVyxfS7v333wfKPl6S9Nqi19HM9lAA\nl19+OQC//PLLSm9DpQ+77747UHazDW3UNGzYMCC87tQUmg8de9NKm2acccYZQLjf9913HxBva7GK\nJO8V3czMzMyskoom2VXBu7aZg5BUVZS6KLXUFrdafLWq7VOKzauvvgrA66+/DoRNMkQL1pSAixas\nlW6IX5V2ZTWZtjkcOXJkYQeyCjbccEOgbOu9L774AohuLmBRM2fOBEKin6SkLpO2PlarQqVvEBaO\naHGrtvBNewKVK0qsjjjiiAKPJD+0wGhV6XE1efJkILze1JSFaZnUekvbaKdhk6LyaMMYJbyjR48G\n4LrrrivYmLJxsmtmZmZmqVWwZHevvfYCQiucVq1aAaFlx8qoSbpacPXv3x8IW2GmhepdtFmGGsL3\n6dOn3O9XE2jVSS1YsCDfQ0wdtR6zmkXbWGpbXF1NUoupb7/9tjADqwLVW44aNSry0apPW81/+OGH\nAGy//faFHE6VaJtwtVHr0aNHpX9WLdX0GqwrIkq8M7eDrWm6dOkCwPLly4HwOEkrtbnU1uRaH1WM\nnOyamZmZWWrVylxFmbMbrlVrpTc8cOBAICS75dG76ClTpgDw999/A6E298cff6z+QKtpxYoVlY4C\nK5qTNEjqfCjtUC3jiBEjgJCmV1Wc86Fa3bFjxwKhefynn34KZN9AIE7F/vjQ4+D+++8H4OWXXwZC\nCqZjUi5Vdk6K6fmST56PqHzNh7p06DEPcPPNNwNQt25dIHTnUG2mkjt1/CiEYn58aJ2MEv9OnToB\nsGjRorz9zmKej0LINh9Ods3MzMwstQqW7KZFsSdVcfN8RHk+oop9PrSKety4cUDoWTxhwgQATj31\nVCC36wOczER5PqI8H1GejyjPR5STXTMzMzOrcZzsVlOxJ1Vx83xEeT6ikjIfSnj79esHhP6jLVq0\nAHJbu+tkJsrzEeX5iPJ8RHk+opzsmpmZmVmN42S3mpKSVMXF8xHl+YjyfJTlZCbK8xHl+YjyfER5\nPqKc7JqZmZlZjZO3ZNfMzMzMrNCc7JqZmZlZavlk18zMzMxSyye7ZmZmZpZaPtk1MzMzs9Tyya6Z\nmZmZpZZPds3MzMwstXyya2ZmZmap5ZNdMzMzM0stn+yamZmZWWr5ZNfMzMzMUssnu2ZmZmaWWj7Z\nNTMzM7PU8smumZmZmaWWT3bNzMzMLLV8smtmZmZmqeWTXTMzMzNLLZ/smpmZmVlq+WTXzMzMzFLL\nJ7tmZmZmllo+2TUzMzOz1Po/Fv0oWLHGMx0AAAAASUVORK5CYII=\n", 132 | "text/plain": [ 133 | "" 134 | ] 135 | }, 136 | "metadata": {}, 137 | "output_type": "display_data" 138 | } 139 | ], 140 | "source": [ 141 | "# Plot the first few examples \n", 142 | "plt.figure(figsize=(12,3))\n", 143 | "for i in range(10):\n", 144 | " plt.subplot(1, 10, i+1)\n", 145 | " plt.imshow(X_train[i].reshape((28, 28)), cmap='gray', interpolation='nearest')\n", 146 | " plt.axis('off')" 147 | ] 148 | }, 149 | { 150 | "cell_type": "code", 151 | "execution_count": 7, 152 | "metadata": { 153 | "collapsed": true, 154 | "slideshow": { 155 | "slide_type": "slide" 156 | } 157 | }, 158 | "outputs": [], 159 | "source": [ 160 | "# For training, we want to sample examples at random in small batches\n", 161 | "def batch_gen(X, y, N):\n", 162 | " while True:\n", 163 | " idx = np.random.choice(len(y), N)\n", 164 | " yield X[idx].astype('float32'), y[idx].astype('int32')" 165 | ] 166 | }, 167 | { 168 | "cell_type": "code", 169 | "execution_count": 8, 170 | "metadata": { 171 | "collapsed": true, 172 | "slideshow": { 173 | "slide_type": "slide" 174 | } 175 | }, 176 | "outputs": [], 177 | "source": [ 178 | "# A very simple network, a single layer with one neuron per target class.\n", 179 | "# Using the softmax activation function gives us a probability distribution at the output.\n", 180 | "l_in = lasagne.layers.InputLayer((None, 784))\n", 181 | "l_out = lasagne.layers.DenseLayer(\n", 182 | " l_in,\n", 183 | " num_units=10,\n", 184 | " nonlinearity=lasagne.nonlinearities.softmax)" 185 | ] 186 | }, 187 | { 188 | "cell_type": "code", 189 | "execution_count": 9, 190 | "metadata": { 191 | "collapsed": true, 192 | "slideshow": { 193 | "slide_type": "slide" 194 | } 195 | }, 196 | "outputs": [], 197 | "source": [ 198 | "# Symbolic variables for our input features and targets\n", 199 | "X_sym = T.matrix()\n", 200 | "y_sym = T.ivector()" 201 | ] 202 | }, 203 | { 204 | "cell_type": "code", 205 | "execution_count": 10, 206 | "metadata": { 207 | "collapsed": true, 208 | "slideshow": { 209 | "slide_type": "fragment" 210 | } 211 | }, 212 | "outputs": [], 213 | "source": [ 214 | "# Theano expressions for the output distribution and predicted class\n", 215 | "output = lasagne.layers.get_output(l_out, X_sym)\n", 216 | "pred = output.argmax(-1)" 217 | ] 218 | }, 219 | { 220 | "cell_type": "code", 221 | "execution_count": 11, 222 | "metadata": { 223 | "collapsed": false, 224 | "slideshow": { 225 | "slide_type": "fragment" 226 | } 227 | }, 228 | "outputs": [], 229 | "source": [ 230 | "# The loss function is cross-entropy averaged over a minibatch, we also compute accuracy as an evaluation metric\n", 231 | "loss = T.mean(lasagne.objectives.categorical_crossentropy(output, y_sym))\n", 232 | "acc = T.mean(T.eq(pred, y_sym))" 233 | ] 234 | }, 235 | { 236 | "cell_type": "code", 237 | "execution_count": 12, 238 | "metadata": { 239 | "collapsed": false, 240 | "slideshow": { 241 | "slide_type": "slide" 242 | } 243 | }, 244 | "outputs": [ 245 | { 246 | "name": "stdout", 247 | "output_type": "stream", 248 | "text": [ 249 | "[W, b]\n" 250 | ] 251 | } 252 | ], 253 | "source": [ 254 | "# We retrieve all the trainable parameters in our network - a single weight matrix and bias vector\n", 255 | "params = lasagne.layers.get_all_params(l_out)\n", 256 | "print(params)" 257 | ] 258 | }, 259 | { 260 | "cell_type": "code", 261 | "execution_count": 13, 262 | "metadata": { 263 | "collapsed": false, 264 | "slideshow": { 265 | "slide_type": "fragment" 266 | } 267 | }, 268 | "outputs": [ 269 | { 270 | "name": "stdout", 271 | "output_type": "stream", 272 | "text": [ 273 | "OrderedDict([(W, Elemwise{sub,no_inplace}.0), (b, Elemwise{sub,no_inplace}.0)])\n" 274 | ] 275 | } 276 | ], 277 | "source": [ 278 | "# Compute the gradient of the loss function with respect to the parameters.\n", 279 | "# The stochastic gradient descent algorithm produces updates for each param\n", 280 | "grad = T.grad(loss, params)\n", 281 | "updates = lasagne.updates.sgd(grad, params, learning_rate=0.05)\n", 282 | "print(updates)" 283 | ] 284 | }, 285 | { 286 | "cell_type": "code", 287 | "execution_count": 14, 288 | "metadata": { 289 | "collapsed": false, 290 | "slideshow": { 291 | "slide_type": "slide" 292 | } 293 | }, 294 | "outputs": [], 295 | "source": [ 296 | "# We define a training function that will compute the loss and accuracy, and take a single optimization step\n", 297 | "f_train = theano.function([X_sym, y_sym], [loss, acc], updates=updates)" 298 | ] 299 | }, 300 | { 301 | "cell_type": "code", 302 | "execution_count": 15, 303 | "metadata": { 304 | "collapsed": false, 305 | "slideshow": { 306 | "slide_type": "-" 307 | } 308 | }, 309 | "outputs": [], 310 | "source": [ 311 | "# The validation function is similar, but does not update the parameters\n", 312 | "f_val = theano.function([X_sym, y_sym], [loss, acc])" 313 | ] 314 | }, 315 | { 316 | "cell_type": "code", 317 | "execution_count": 16, 318 | "metadata": { 319 | "collapsed": true, 320 | "slideshow": { 321 | "slide_type": "-" 322 | } 323 | }, 324 | "outputs": [], 325 | "source": [ 326 | "# The prediction function doesn't require targets, and outputs only the predicted class values\n", 327 | "f_predict = theano.function([X_sym], pred)" 328 | ] 329 | }, 330 | { 331 | "cell_type": "code", 332 | "execution_count": 17, 333 | "metadata": { 334 | "collapsed": true 335 | }, 336 | "outputs": [], 337 | "source": [ 338 | "# We'll choose a batch size, and calculate the number of batches in an \"epoch\"\n", 339 | "# (approximately one pass through the data).\n", 340 | "BATCH_SIZE = 64\n", 341 | "N_BATCHES = len(X_train) // BATCH_SIZE\n", 342 | "N_VAL_BATCHES = len(X_val) // BATCH_SIZE" 343 | ] 344 | }, 345 | { 346 | "cell_type": "code", 347 | "execution_count": 18, 348 | "metadata": { 349 | "collapsed": true 350 | }, 351 | "outputs": [], 352 | "source": [ 353 | "# Minibatch generators for the training and validation sets\n", 354 | "train_batches = batch_gen(X_train, y_train, BATCH_SIZE)\n", 355 | "val_batches = batch_gen(X_val, y_val, BATCH_SIZE)" 356 | ] 357 | }, 358 | { 359 | "cell_type": "code", 360 | "execution_count": 19, 361 | "metadata": { 362 | "collapsed": false 363 | }, 364 | "outputs": [ 365 | { 366 | "name": "stdout", 367 | "output_type": "stream", 368 | "text": [ 369 | "5\n" 370 | ] 371 | }, 372 | { 373 | "data": { 374 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAPwAAAD8CAYAAABTq8lnAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAADP9JREFUeJzt3WuMFfUZx/HfA0IUJVpjXUG2Qrw0TUMC0ZAa22AaihDi\nLUbJJtWVeCVKpTFR7IvKG+MlaqRvjI0ISFEoXihtUi+Yxnq/NAhaVxQVgwYXGgTZaILWpy92SNcF\n/rN7ZuacWZ7vJyGcM885Mw8DP+bM/ufM39xdAGIY1uoGADQPgQcCIfBAIAQeCITAA4EQeCCQhgNv\nZjPM7D0z+8DMbi6zKQDVsEbG4c1suKRNkqZJ+kzSG5I63L2rz2sY4AdayN2t/7JGj/BTJG129y3u\n/o2klZLOL9IcgOo1GvgTJW3t8/zTbBmAGms08HxcB4agRgP/maT2Ps/b1XuUB1BjjQb+TUmnmtl4\nMxspabakteW1BaAKhzXyJnf/1syul/S0pOGSFvf9CT2AempoWG5AK2ZYDmipMoflAAxBBB4IhMAD\ngRB4IBACDwRC4IFACDwQCIEHAiHwQCAEHgiEwAOBEHggEAIPBELggUAIPBAIgQcCIfBAIAQeCITA\nA4EQeCAQAg8EQuCBQAg8EAiBBwIh8EAgBB4IhMADgRB4IBACDwRC4IFACDwQyGFF3mxmWyR9Kem/\nkr5x9yllNIXWu/7663Nf4+5N6OTgxo0bl6zPnTu30u0//fTTyfrs2bMr3X4jCgVekks62913ltEM\ngGqV8ZHeSlgHgCYoGniXtM7M3jSzq8poCEB1in6kP8vdt5nZDyU9a2bvufsLZTQGoHyFjvDuvi37\nfYekJyXxQzugxhoOvJmNMrPR2eMjJU2X9HZZjQEoX5GP9G2SnjSzfetZ4e7PlNIVgEo0HHh3/1jS\npBJ7QYlGjBiRrHd0dCTrixYtyt1Gq8fhW23ChAmtbmHQuNIOCITAA4EQeCAQAg8EQuCBQAg8EAiB\nBwIpei09amrYsPT/5Z2dnYW38dFHHyXrGzduLLT+zZs3J+tLliwptP6LL744WZ80KX2ZyYoVKwpt\nvxU4wgOBEHggEAIPBELggUAIPBAIgQcCIfBAIFbVd5rNLPaXpSt29NFHJ+tLly5N1s8999xk/ZNP\nPsnt4ZxzzknW88bRUS133++O0hzhgUAIPBAIgQcCIfBAIAQeCITAA4EQeCAQvg8/ROXdd37y5MmF\n1j99+vTc13z44YeFtoHm4wgPBELggUAIPBAIgQcCIfBAIAQeCITAA4HkjsOb2UOSZkna7u4Ts2XH\nSlol6SRJWyRd4u67KuwznHHjxiXrc+fOTdbHjh2brHd1dSXrjLEfmgZyhF8iaUa/ZQskPevup0l6\nLnsOoOZyA+/uL0j6ot/i8yQtyx4vk3RByX0BqECj5/Bt7t6dPe6W1FZSPwAqVPiHdt57UzzuXwcM\nAY0GvtvMTpAkMxsjaXt5LQGoSqOBXytp3/SjnZLWlNMOgCrlBt7MHpX0sqQfm9lWM5sj6Q5JvzKz\n9yX9MnsOoOa4L31NdXR0JOvLly9P1vfu3Zusz5w5M1l//vnnk3XUH/elB4Ij8EAgBB4IhMADgRB4\nIBACDwRC4IFAuC99TbW3txd6/+rVq5P1MsbZR48enayb7TcMXKqvvvoqWf/2228r3f5QxBEeCITA\nA4EQeCAQAg8EQuCBQAg8EAiBBwJhHL5FrrjiimR9zpw5hdZ//PHHJ+u33XZbsj6QMfT58+cn6yNH\njsxdRxF33313sr5gAXdP748jPBAIgQcCIfBAIAQeCITAA4EQeCAQAg8Ewn3pW+Smm25K1m+//fYm\ndXJgw4blHwu+++67JnTSuB07diTr06ZNS9bfeeedMttpOu5LDwRH4IFACDwQCIEHAiHwQCAEHgiE\nwAOB5I7Dm9lDkmZJ2u7uE7NlCyVdKWnfQOct7v5Uv/cxDp9wxBFHJOsrV65M1mfNmlVo+6+88kqy\nvn79+tx15P3bufPOO5P1PXv2JOtTp05N1tesWZOs53nkkUeS9csuu6zQ+lut0XH4JZJm9F+XpHvd\nfXL266kDvA9AzeQG3t1fkPTFAUrVTisCoHRFzuHnmdkGM1tsZseU1hGAyjQa+PslTZA0SdI2SfeU\n1hGAyjQUeHff7hlJD0qaUm5bAKrQUODNbEyfpxdKerucdgBUKfc21Wb2qKSpko4zs62SbpV0tplN\nUu9P6z+WdE2lXQIoRW7g3b3jAIsfqqCXUL7++utkPe++9Icffnih7X/55ZfJek9PT6H1lyFvH+XZ\nvXt3sv7AAw8UWv9QxJV2QCAEHgiEwAOBEHggEAIPBELggUAIPBAI88PX1M6dO1vdQuXGjh2brC9c\nuLDQ+tetW5esv/TSS4XWPxRxhAcCIfBAIAQeCITAA4EQeCAQAg8EQuCBQA7ZcfjLL788Wb/22muT\n9fnz5yfrr7766mBbCmfMmDHJ+qpVq5L1M888M1nP+07/okWLkvWIOMIDgRB4IBACDwRC4IFACDwQ\nCIEHAiHwQCCH7Dj86aefnqyfccYZyfrq1auT9a6urmR93rx5yfqmTZuS9aGgra0tWb/mmvT8JEXH\n2Z944olk/eWXX07WI+IIDwRC4IFACDwQCIEHAiHwQCAEHgiEwAOBJMfhzaxd0sOSjpfkkv7o7n8w\ns2MlrZJ0kqQtki5x910V9zoop5xySqH3532XO68+ceLEZN3MkvW9e/cm61u3bk3WTz755GS9DEuX\nLk3W86512LNnT7KeN85+5ZVXJuvYX94R/htJv3X3n0r6maTrzOwnkhZIetbdT5P0XPYcQM0lA+/u\nn7v7W9njHkldkk6UdJ6kZdnLlkm6oMomAZRjwOfwZjZe0mRJr0lqc/furNQtKX2NJYBaGFDgzewo\nSY9LusHdv3fi5e6u3vN7ADWXG3gzG6HesC939zXZ4m4zOyGrj5G0vboWAZQlGXjr/VHyYknvuvt9\nfUprJXVmjzslren/XgD1k/f12LMk/VrSRjNbny27RdIdkv5sZlcoG5arrEMApbHeU/AKVmzW0vP6\nSy+9NFmfMmVKsn711Vcn68OHDx90T4Oxe/fuZD1v7vOLLrqo0PbzrhOQpJ6enmR91670pRk33nhj\nsv7YY4/l9oCDc/f9/hK50g4IhMADgRB4IBACDwRC4IFACDwQCIEHAjlkx+GL6uzsTNZHjRrVpE5a\nY9iw/GPBhg0bkvUXX3yxrHbQAMbhgeAIPBAIgQcCIfBAIAQeCITAA4EQeCAQxuGBQxTj8EBwBB4I\nhMADgRB4IBACDwRC4IFACDwQCIEHAiHwQCAEHgiEwAOBEHggEAIPBELggUCSgTezdjP7h5n928ze\nMbPfZMsXmtmnZrY++zWjOe0CKCL5fXgzO0HSCe7+lpkdJelfki6QdImkPe5+b+K9fB8eaKEDfR/+\nsJw3fC7p8+xxj5l1SToxK++3MgD1NuBzeDMbL2mypFezRfPMbIOZLTazYyroDUDJBhT47OP8Y5Ju\ncPceSfdLmiBpkqRtku6prEMApcm9p52ZjZD0N0l/d/f7DlAfL+mv7j6x33LO4YEWGvQ97czMJC2W\n9G7fsJvZmD4vu1DS22U1CaA6eT+l/7mkf0raKGnfC38nqUO9H+dd0seSrnH37n7v5QgPtNCBjvDc\npho4RHGbaiA4Ag8EQuCBQAg8EAiBBwIh8EAgBB4IhMADgRB4IBACDwRC4IFACDwQCIEHAiHwQCAE\nHgiEwAOBEHggkMrueAOgfjjCA4EQeCCQpgTezGaY2Xtm9oGZ3dyMbQ6GmW0xs43ZxJiv16Cfh8ys\n28ze7rPsWDN71szeN7NnWjnbz0H6q8UEo4kJUGux/1o9QWvl5/BmNlzSJknTJH0m6Q1JHe7eVemG\nB8HMPpZ0urvvbHUvkmRmv5DUI+nhfRN8mNldkv7j7ndl/2n+wN0X1Ki/W5UzwWiTejvYBKhzVIP9\nV2SC1jI04wg/RdJmd9/i7t9IWinp/CZsd7BqMzmmu78g6Yt+i8+TtCx7vEy9/0ha4iD9STXYh+7+\nubu/lT3ukbRvAtRa7L9Ef1IT9l8zAn+ipK19nn+q//8B68IlrTOzN83sqlY3cxBtfSb76JbU1spm\nDqJWE4z2mQD1NdVw/7VigtZmBH4ojPud5e6TJc2UdF32kbW2vPc8rG77tVYTjGYflx9X7wSoe/rW\n6rD/WjVBazMC/5mk9j7P29V7lK8Nd9+W/b5D0pPqPQ2pm+7s/G/f3H7bW9zP97j7ds9IelAt3IfZ\nBKiPS1ru7muyxbXZf336+9O+/pq1/5oR+DclnWpm481spKTZktY2YbsDYmajzGx09vhISdNVz8kx\n10rqzB53SlqTeG3T1WWC0YNNgKqa7L9WT9DalCvtzGympPskDZe02N1vr3yjA2RmE9R7VJekwySt\naHV/ZvaopKmSjlPv+ebvJf1F0p8l/UjSFkmXuPuumvR3q6SzlTPBaJN6O9AEqLdIel012H9FJmgt\nZftcWgvEwZV2QCAEHgiEwAOBEHggEAIPBELggUAIPBAIgQcC+R8s56mYXXoOYAAAAABJRU5ErkJg\ngg==\n", 375 | "text/plain": [ 376 | "" 377 | ] 378 | }, 379 | "metadata": {}, 380 | "output_type": "display_data" 381 | } 382 | ], 383 | "source": [ 384 | "# Try sampling from the batch generator.\n", 385 | "# Plot an image and corresponding label to verify they match.\n", 386 | "X, y = next(train_batches)\n", 387 | "plt.imshow(X[0].reshape((28, 28)), cmap='gray', interpolation='nearest')\n", 388 | "print(y[0])" 389 | ] 390 | }, 391 | { 392 | "cell_type": "code", 393 | "execution_count": 20, 394 | "metadata": { 395 | "collapsed": false, 396 | "slideshow": { 397 | "slide_type": "slide" 398 | } 399 | }, 400 | "outputs": [ 401 | { 402 | "name": "stdout", 403 | "output_type": "stream", 404 | "text": [ 405 | "Epoch 0, Train (val) loss 0.621 (0.379) ratio 0.610\n", 406 | "Train (val) accuracy 0.844 (0.900)\n", 407 | "Epoch 1, Train (val) loss 0.389 (0.346) ratio 0.890\n", 408 | "Train (val) accuracy 0.895 (0.907)\n", 409 | "Epoch 2, Train (val) loss 0.356 (0.320) ratio 0.901\n", 410 | "Train (val) accuracy 0.900 (0.913)\n", 411 | "Epoch 3, Train (val) loss 0.340 (0.303) ratio 0.893\n", 412 | "Train (val) accuracy 0.904 (0.913)\n", 413 | "Epoch 4, Train (val) loss 0.329 (0.299) ratio 0.909\n", 414 | "Train (val) accuracy 0.909 (0.916)\n", 415 | "Epoch 5, Train (val) loss 0.320 (0.299) ratio 0.935\n", 416 | "Train (val) accuracy 0.911 (0.919)\n", 417 | "Epoch 6, Train (val) loss 0.308 (0.286) ratio 0.929\n", 418 | "Train (val) accuracy 0.914 (0.919)\n", 419 | "Epoch 7, Train (val) loss 0.307 (0.302) ratio 0.985\n", 420 | "Train (val) accuracy 0.914 (0.918)\n", 421 | "Epoch 8, Train (val) loss 0.304 (0.286) ratio 0.941\n", 422 | "Train (val) accuracy 0.915 (0.921)\n", 423 | "Epoch 9, Train (val) loss 0.299 (0.281) ratio 0.940\n", 424 | "Train (val) accuracy 0.917 (0.922)\n" 425 | ] 426 | } 427 | ], 428 | "source": [ 429 | "# For each epoch, we call the training function N_BATCHES times,\n", 430 | "# accumulating an estimate of the training loss and accuracy.\n", 431 | "# Then we do the same thing for the validation set.\n", 432 | "# Plotting the ratio of val to train loss can help recognize overfitting.\n", 433 | "for epoch in range(10):\n", 434 | " train_loss = 0\n", 435 | " train_acc = 0\n", 436 | " for _ in range(N_BATCHES):\n", 437 | " X, y = next(train_batches)\n", 438 | " loss, acc = f_train(X, y)\n", 439 | " train_loss += loss\n", 440 | " train_acc += acc\n", 441 | " train_loss /= N_BATCHES\n", 442 | " train_acc /= N_BATCHES\n", 443 | "\n", 444 | " val_loss = 0\n", 445 | " val_acc = 0\n", 446 | " for _ in range(N_VAL_BATCHES):\n", 447 | " X, y = next(val_batches)\n", 448 | " loss, acc = f_val(X, y)\n", 449 | " val_loss += loss\n", 450 | " val_acc += acc\n", 451 | " val_loss /= N_VAL_BATCHES\n", 452 | " val_acc /= N_VAL_BATCHES\n", 453 | " \n", 454 | " print('Epoch {}, Train (val) loss {:.03f} ({:.03f}) ratio {:.03f}'.format(\n", 455 | " epoch, train_loss, val_loss, val_loss/train_loss))\n", 456 | " print('Train (val) accuracy {:.03f} ({:.03f})'.format(train_acc, val_acc))" 457 | ] 458 | }, 459 | { 460 | "cell_type": "code", 461 | "execution_count": 21, 462 | "metadata": { 463 | "collapsed": false, 464 | "slideshow": { 465 | "slide_type": "slide" 466 | } 467 | }, 468 | "outputs": [ 469 | { 470 | "name": "stdout", 471 | "output_type": "stream", 472 | "text": [ 473 | "(784, 10)\n" 474 | ] 475 | } 476 | ], 477 | "source": [ 478 | "# We can retrieve the value of the trained weight matrix from the output layer.\n", 479 | "# It can be interpreted as a collection of images, one per class\n", 480 | "weights = l_out.W.get_value()\n", 481 | "print(weights.shape)" 482 | ] 483 | }, 484 | { 485 | "cell_type": "code", 486 | "execution_count": 22, 487 | "metadata": { 488 | "collapsed": false 489 | }, 490 | "outputs": [ 491 | { 492 | "data": { 493 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAArsAAABZCAYAAAA+cOwiAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3UezvktVNvALxYA5YA6YURDxeBDxBAExlUVhQKTkAzBi\noE6YGD6EDnDiwInlQMuyKMCAHEE4Bw5BVDBnRTEB5nzegfX7d+9r781+7n+9u956n1rXZO8n9d29\neq113+vq1asf99hjj2UwGAwGg8FgMDhHfMj/6w4MBoPBYDAYDAa3hXnYHQwGg8FgMBicLeZhdzAY\nDAaDwWBwtpiH3cFgMBgMBoPB2WIedgeDwWAwGAwGZ4t52B0MBoPBYDAYnC3mYXcwGAwGg8FgcLaY\nh93BYDAYDAaDwdliHnYHg8FgMBgMBmeLedgdDAaDwWAwGJwtHn9bDf/Ij/zIY0nyvve9L0ny2Z/9\n2Rc+f+9735skefKTn5wk+Yu/+IskySd+4ife+c6HfdiHJUn+8A//MEnyX//1X0mSj/mYj0mSfPqn\nf3qS5O/+7u+SJJ/5mZ+ZJPnv//7vJMlf/uVfJkk+/uM/Pkny/ve/P0nyz//8z0mSz/iMz7jQ7n7N\nD//wD0+SfMRHfESS5E//9E8vtPWEJzwhSfI93/M9j7tZGv+L7/u+73ssSR73uP/9yR//8R9f+Pxj\nP/ZjkyQf/dEffWFcxvMJn/AJSZLHP/7xF973vc/5nM9JkrznPe/Jfp0d+v9Xf/VXF9r63M/93CTJ\nn/zJnyRJPu3TPi1J8p//+Z9Jkv/5n/9JsubH/JmD//iP/0iS/OiP/ujJ8vjJn/zJx5Ilczpgflzz\naU972oW+/NZv/VaSJY9P+ZRPSZL89V//dZLkSU96UpI1Z//yL/+SJPmkT/qkO9eml93/Jz7xiUkS\nx2j//u///oXv6SP5kIe5+IM/+IMkyRd90RclSb7zO7/zZHm84hWveCxZsqVjX/iFX3hBHvpGf+j9\nb/7mbyZZc6lP+vjud787ydIB7SVL5z7kQ/43/v37v//7JMs+P/CBDyRZMv/t3/7tC31kJ//+7/+e\nZNkUnSavF73oRSfL4/u///svnGXebdFV4/mHf/iHJFfPs3ll1//0T/90YZx05s///M8vvDZOvkJ7\nH/qhH5pk+SI6nCR/+7d/m2TJ/9/+7d8uXPvzPu/zkix/9EM/9EMnyeQlL3nJY8mao0/91E/N/poO\nf9RHfdQFeZibZM2jvvzrv/5rkiVT4zN/bEcbvse3kjV5/uM//mOSpRc7PvmTPzlJ8pEf+ZEX+qJt\nfXnFK15xkjxe/vKXP5asOeAf9rlIlj8hJ3ObLBswTm0YFx9Klvyl+ScvNmYs5McHsa9k2Sf7NRd8\nDFl6/6UvfelJ8vjxH//xx/a+QvfRHLnen/3Zn935Lp2is2yOT/qsz/qsJEvXfI/O+Z45Jo+P+7iP\nS7LmYp8j/aUPX/IlX3Lhu8CeXvayl50kjx/8wR98bG/HHLTu932Yr0iSL/iCL0iSPPLII0mWPJ76\n1KcmSR5++OEkyy7YPp0k6/vuuy/Jujf7vt9/2Zd92Z1r8rH8g/74Lf0wVz/wAz9wSB76SC5tw+TT\nPjBZOkSn2I++mCPjI1tjoDd/8zd/k2TpAf/KXj7/8z//zjXZpt/ol3stX0Off/iHf/hKedzaw64L\nmzATRbAG7+ZCwLuCE4ybO0fjZtE3XIZHYH7nWoTGEfl8f9hlnNpiEMbjcwpzBB4wjZHzd8PtGwSH\nwVHokwcuDzkcDQgsPHhc1Sal5LT9paQ+bxn1Qy5HddXN7SZ4SHdNjoSDoNS/8Ru/kWTJ3vybzz/6\noz9KsuTBeRurm4uHvWQFBtpgtOabIXmQ1leypsduoHTTX3p/BPSWs+NY3HB/53d+50Kf6fXv/u7v\nJlnzTk4cLjv68i//8iTrpuLGnizddE3Ol57TVeMiL07KuOkLZ6gvHNURkDHZ93jMIbl46CXHZD2s\nt4yMo/tLn+kcp8tPffEXf3GSZbPk4EaRLBmyU4EPm2L/5vlUsMF+aKQPbJg99A0oWfrKN9BvvyVz\n8nAt/tk8mwvj5g+0t893+y3ycS0+RR9Ohb6Zd32h1/rML1z1cOOhhSw9xPNnxrHbyt7npzzlKUmW\nvvBh5gT48OTy/QzMr3vlUfCb5NAP5v7yG+zEnCXrgZA/MA5+j180HjrGDvhyOk4e7Mr7dCJZ/pk9\nsDVt0hN9OhV8lz4aG7KEHvYDPHtNkne84x1JknvvvTfJmiO65CGWTzW35ICA8VCrD6754IMPXtt/\n/f7VX/3VJJfv90ftxfj0XTtea8+c85G7ftB7v2X3fAyd4h/6mQz4If7VXAku9mBIv+mD+6E2yHrX\nqaswaQyDwWAwGAwGg7PFrTG7ooWO/ERAns49tWOARArJiuR6yVBkhtnxPQyQyFX0IQ0CA6APfrcv\n5/qNiEw0JfIXPfRS0SkQFWKXtCkCbzalGSCRu0gX+yAKwkqQ084ukmsz7GQm4uxoTh+wJD7HdJDh\nvnR8Knp5wjjIRRQp8idzc4DBwC5CR3jktkeoInBtNPtvPD1efaMf5NKpB3fD/LuG+TY3WER903d2\nwcaM01xa+aBPdPsqtsp3/DWvroXRIWvXwNY1S6lPWDR9OwLjbSbfHOgTtvUqm+QrMFFYYDaC3fE9\nbIHvm1fvY4jJBaOBkUiWTHxm/lrH2PupIFNsGQasV1WwJXR0ZxDNL6ZFX62i0HfjNc/GixHWB36c\nzRnbzmxqWxv0zrWMi76fCvKVzsKP86/Gbw5dB6t2Vb/5CPcIfae/5GLlw7Wf/vSnX+ib+xn5+F6y\nZMivuS/QB3a69/MUkKvxsz19dk/u+9DOwtMV8+s3ZKtvdE5b/KA5YV9tk502llxeBXLP1S+y16dT\n8aVf+qVJlo6TB7/pfsMmsLj7iqiVIfcJ83jPPfdcGB9f2ytZ5OkabJPeeTbZ72GddtL3bit0neZx\nE3yf/zQH5pA+0hd2Y273cRiXPtFVv2XbWFfvmwv6wzfSq179SpYvxrj/3u/93oU22KpxXYdhdgeD\nwWAwGAwGZ4tbY3Y7qsTOiCJET83O7oyARHVsSrNuNg91JChi23M0k/Xkj3UTMYlWksusmci8NyDZ\nNHQEIjXX0A+ywDy7tvcxtqKp3tDVeaOi6p3R0f+OfkTuWBBMjr5hOlzL/IlMzdG+yeFUNOPQm2BE\n0c1Imi+RKMYGs4GN6cT1d77znXeuTbZf/dVfnWRFh+Tg2qJaUaVo2Ofa9pfse9PDKfjar/3aJEvG\nvVEPsADGR37YRXKSj0iexkjPyD9ZNkTfbXb7iq/4iiTJ6173uiRrjugFdhGz8TVf8zVJls6y+6P5\ndsnSMbplvHTReI3f93d7JkPzav4wD/JbyYw+0/+WS48bw7ezq5iIzmejM/p9lMmks/ruOs3GGjO2\nBXuSLP/FD/hu59SxpdZF7HRvKvtgeYT0Vz/J0m8xd3te6ymgz9pnq+RgrjBl7ht0YR+nNtgvn2L8\nvTGZnzT/mD9sIv0wZn4mWWye8ZOHfrqG+96pYOd0mRx6lYpeYO72zeN0hQ3RXfda8mGD5sz4tWVs\nvq8drNy+Eb03kLUuGldvPLwJWEf3Qez7W97ylguf89VWp9xXkqVj7Aaryh/2JlF+76u+6quSJL/2\na7+WZD3nYI/5l/vvvz/JxVXYvsd85Vd+5YVr0dmjq4edj0136SO96dWbnWUlS/OuTd/pDZrGQq/M\noT60vXjG2+eAHNzXtUHm5sjf6zDM7mAwGAwGg8HgbHFrzK6oAdsiIhAhezoX1XeFhf03osOO8LwP\nIhOshWhK1C6aEH2IRneGS4QiihB1Y2ObVT0CUZAoR/QjYhPdyOvBluiv/ouqescvGRrvXkIFRNzY\nMr8ha33zvpwkfcSKdPmSPQfrVBgnBmMvkZRcrkKhT8agryJyDAYdw2CIrnfmyHjpTO8mNV5s0aOP\nPnqhTXqgHXPVZeGOwHyKtDFT9Bmja04wtyJ9f+0cNgY29fznPz/Jyk/d5Y25MJ9WLugcxvPNb35z\nkmVb4Fr0gg669tHKA3ubbNB8Y1vZi++Ry15dhY/o6i9s3ued34+VxX4Zb5fqolO7D/FdbM11TETb\n701on4l56dUacmCT+wpP55gbB5n13gTsc5fW6pxWetLySi7nNZNxr+rt5bmOgG7SOTbMX2CbyYX+\nJMtm6IU+8HNWB8manrgWG8WE9gpZ5zQny4eYF6sLdPGqlZdT0OUk+UN9NlbXM4bdV3XFIv7RvLvv\n6WuvtpFXr7KZA/Lay2L2/bjtp1c2TwWZa9893OqTCglWrzC8e2UV77ET9t8+1qohH9vMpfHTCzIn\n730VWtvui+49fKg84q4QchPYLN/WlSXaV/IVe941G7b3oUvzaaP344DXXXLWX3O/r2roh7bNI1ny\nizf5j2F2B4PBYDAYDAZni1tjdkXvotM+rKEjJWzFnssDXagcwyVKEA0873nPu/A97KU29934ycpD\n2lkIUSSGC1Mlcml28QjknohMuj5kVwi4LgoUkYkWm20g4z2qMvYueN8yJKuuXYclEqkbi3ndC7Wf\nCgxj57mKlvUFeyZa/rqv+7oL19Y3eiKaFn127eBkMVhYHjL1+u1vf3uSlUdn3ruGZde2lNt7Nznd\nPc90EXPlc3lj5ERv2Jy5MXdeY29F41i4JHnWs56VZDF5ZOd1R+hYkT7IhU56be7uhtntii3mQN6f\nlQFMlZy8PV+6q2hgLdhB54+SAxaZ7MlB5QdoJi9Z9idvT51M4yCTfZfzKSBT49b3nldjpMP0JFl6\nbhx8Rq/QdGUQOqktNkuH6GDXI0+WHcsFtOrntT7s+ngK2FpXyOgan/rehwvtn2FusV/uHdqU30lu\nXZOUffDF5sIY9yL573rXuy58pw9icK2jq2XmCkvmmj3GZlL3FQafud+5f/vb/o6M2Rw9oidy/9kX\ne9pz3PlS12QffsPuj9bqNl591HfMvz0SmFP6tDO7bI6/ICt67r7J/vVZW10T27X7uWe/fz700ENJ\nVp43+bBdq2tH6+zKp24f3at0na+/s/AY6H424eu6/njXAncv+vVf//Uka66tQvDtu89iQ67Vtaz5\nu5uY7mF2B4PBYDAYDAZni1tjdj3Rd01BUbgIoBmXvfakSE4k0jV6HSMr+haNi75F8j7H+InSMCv7\nLj4RGWarj8l1jaM7Q5MVoRq7SKWj6D4xBHPVJ56I+ESuGAHf2xkMLKFoSETlGlhuERp2QN/Mj0gd\ni2B+7+bUH9Fs506L3LBp2AbzjUUwTpGpufn5n//5C33viiDJkh25aMs4XFNkbr472r2OTbrpNJer\n4Br+9o5w1+xavuag5WEsbVv6urMJnYuKsWUbXdNVLi6Wij7QabLu3dhH0LmcXpNHs8f0fWfV9a+Z\nCKsK/I+2u2YjmfXOZPJhTztz3Tl12Kxm7I6eOsif8Qf0wXj7FC/f33P3ya5rjHaeuL9sx+/IWN9b\nR7vCQLJkaEWCvdNbdt71sm9Cn9rnHoM9Y8PmDGvExyXrHqD/fEjn4ptn+tTskjn46Z/+6SRrzjH/\n+2oDXeF7zUVXY7hqz8UHQ9eyNpb2YXweHd/nCsPmfmec7KNXW7qWtHxk9ta53fRj9wfmqavuGD99\nP1pnt/2+uVDdoI/OJj96so8H6Br/rrKDvnlW6d8ZY9cG71NDk7VC1Tmq5qvPMDgV/JFx0wf3EXNm\npYG89hNaXbvPCKBT/Ab9J1vX+PZv//YLr7vesDnbK6aYD75Hm50FsFfhuQrD7A4Gg8FgMBgMzhbz\nsDsYDAaDwWAwOFvcWhpDb6ayTC5J2jJ5l8PYk9DR0pYALJ2g7yU5W75Ca0tBsBygTfS3ZTNLMpZu\nk7Xk5dqWECyJ91G/R4Ca72Us17QEYrnTNYxfvy0D6r+lEH21bLOXrrEM7btdqNk1LGH0cbH6aqmt\nN/sdLaOUrOW6PrLY8qaUEssoXejc39e+9rVJll50InsfVbiP02/e+ta3Jrm8NNJlnszVXt4qWUss\n5GMTzhHoH31smUvjsBxlSYlcfvEXfzHJWlK0nEuO7KSP50zWktnb3va2JGuZ3jKT8bADy3C9ocly\nraViy1OWtY7AuOh7lwU0TnPoe3Q9uVy+zlK/eWZr5pPes1X23kcu01ly2g9V6f6wU/ZtKfPocbBs\nzMYjsqV7vUGJ/uy2ad7YNR1Qxs642zf1Bi22S5foAXlpP1mytmxqg6lNjuRy9EhpcuQ3tCNdzdJq\nl8Hay1v6rJfQvXZ/okdKL+mrubWc7R7zmte8JsmS15ve9KY71+Sf6STfQnb0er8vnQJjYcu9TE1X\n2SJ92Q8d6dKK7EKKCV2iJ70hy/iNse9Zffzwfg3v+dtpWDcdGtAwLvqnD1IpXYeu6vu+CdV8m2d9\n6tRK9yxpfvyg9BaHUEihYKNSjfbjprUtpYSd98bzPd3iFGinfXcf+kKPXI8d7f3tNB/+w72WHtkE\n6FmG7WnTa2PmR/eysuxC/7osKjva+3kVhtkdDAaDwWAwGJwtbo3ZFUWLcESVvbmijzLcN351gW/M\nFLahC/h3MW0RighZ1NAboPaNXCIR7DBGWpSobczoEYhEsESiZFGPCKuLYPeRfNgUMtw3XiWLhdiZ\nIxGZyBQ7JCryufFqQ5/9bUZM5Or3RyDSFrn3fJsXr/vQAH97Mw2WQZRN3vuGLDI2r31oAH3VB3PU\nG526nNRVCfanQsRNL7pAPzapN5xhuOiLa9N/Y+ijYvcNUvpt3OSiT70BRZvYZgd30GGsg4Ltd7OB\nkZ32Jg5zR3cxuRiAfXMgppau0QHMCn3GXP3SL/1SkiWbPmyhmQzt77LUNl+G/dEvLEYfA30T6Jz2\n6D194d/62OSd2TUOumU8mCky1oY+9rGpfVyoMe2+FHymrBt95kOtJhzdgNQrfc1UWS3E5vP3+3Xo\nO1aIfpOT8XSJrT4YhN+wgc3c+N2+gVH5Jv6bv9I//dbWqTDfffwy30yX+zCe/T6hD5i16zaDug92\nabJmUfkPDB6b2Mt70UW/dS3z2ocenArt8UX8Iea/NxPzq7s8jB873odfdWkxr/k7bZGr9tw33aN2\n/9HHjPeRun2fOBWej7rsIn1oeZirfeNoH4/MDqxcdJlBf8mev2T72rMKSVd3G6XPbFUbnuv4/V5t\nbQyzOxgMBoPBYDA4W9was+tJXmTTuVFd0gRrt7Nvnui14bcijz4Gsdk0kY+IQASAAehDHJLFjoga\nRGj+YjZE40fQxZxFgyIrkWWP01/99Lk+dMkyr/cSTP7HBmIyRENKMYEo0jVFm818utbRoxyTNedd\nWkV0KOI3/+QkSvZ9uoQxksstSuwc7WTpGZkZl75gcIy7i8x3niSmQ2Tvmkcgl0xOlvk1PvLAvrMP\n0TUd7aOcf+zHfixJcs8991zoq6LlyWJNMDdeY0XkrrE1KyDKvGHT6AX2lQ0ezbfb26Sj7LtZWZ97\nfz+cgH5iJ/ibXiXqfmIcMNdy7dgDJgKzuZfnMfdy3rDe9JiMjuao0lGsIfuhH5gYn3vfWJPLh3+w\nGfNNt8hanl+vImiTHWnXnD3jGc+4c036Ri+VIKMj5LT74VPQJavYrOv1kehXHZPNZnoVhFzYID0x\nt8bbeZN93LbX+/3CNfSTX8J20Y+jhwaYf9c0TjaNPaSzfSR0snSsmX+/YWN8Lz1gg3Qco0d/lOMz\nxn3PS+e20z3j6fKkp4I/Zbtsnw47Ntjck9N+LzNu80lW3/zN35xk2TaGll+Qo2ss9913X5JlXy94\nwQuSXF5BS5Yt9j4K4/Hdo+VP7bvoEmvuWfSNDrBtdpssf+q7fWw6+ZCp8QJ5GZvvt98lx2TprawA\nukTHOt/3OgyzOxgMBoPBYDA4W9was9sFrEUJHSF7whfl7gyJCEQkLKr09C/qEomJVFzb7zs3riPH\nPdLHhugH5s7ro3lDO0TBWCTjErGKmrr4vPGKzLooNBnb0ek6+w54DOUrX/nKJCtaw+Aal6hOpK5v\noqeuCOD3RwtcJ0snzBO5iMCNFxugL3RI9Gdsb3zjG5MsZku7GB99339jvvWl86HpabMifkcnOw/q\nKGuXXD6ukhzogfnEumAo6L2oGVOGbcBsOWbye7/3e5NcZFf8huzI7IEHHkiybMZvHnnkkQt97woY\n+nw3VTqgd7x3IXN/zRF7wEInS4+xF9h+tkbvX/WqVyVZzP1TnvKUJIuRAfaByejjRJPLFR56FUQb\nR48L7goZZN1H+9JRPnVnCDExmEvMEvmwuc5Zdi0+S86ia5CbOXBEcrIYePPSTKPfHj1kgzzYizHx\n1eSLEcLu7/sLzJ825Eeya7LtHFw2SC5dnYf+Yw/3o39dw7h79YHd3nvvvaeI4Q7YrHbZnjkhX7Z8\n1bHy+sm26KpxN8uq7+yFPLCQ5NYHwuzy6pU7/TUOvtk8ngr3QX6RvtDxZvOt/O4VefoQDTJ0D7Yn\ngc65f7g2Hyb/WiUSjHcf7Z0k999//4X3+Df3Wve/m47HbegLX+W5SDs+50f0bc+fdZ9/5zvfeeE7\nfkNOv/zLv5xkrfCwL3PLBr1uee26L8edTmqLzvnNvpJ9FYbZHQwGg8FgMBicLW6N2RU19PGimAWR\nsQjP0/rOnHb02DkaIj85miLbzt3Qtsi5qxfsENWJSEVw/opg9lqep6KPlu3dkT7v3YXG1Wyh7+ur\nyBhzsLOtfTyw8XR+mz5pA0skYhOhq+bQu7mPQD6PNvq4UEwtebz61a9OsiJxfRHp6gO9oRcYjne8\n4x132sbyqgcrn0+kabe9uaKv5EYH5R/qE13d8yRPBcYF82B82AIRLtYJo9vjNFf0HIv93Oc+98Lr\nq3S4mQx96OOQzR3mh4316oW+dl3mU9BH+QImgq5iqDrHL1kMArvlfzB6naMNZISZwFTRMfLA0Ow1\nUc0j+9U/7CmZHWW96TnwB2y0awHr486YYonk+5IPH4Rlazlhq+kH30InyZ5e7Oyp/9mclShMm9+S\n5algc83w+kv+fBUd2Jm7rv/KzrGJ7Nz7rolldG3yYru9V2Xfi8JHdLWMnt+jOcxdH7VXNOmu8RvL\n/j3fwZLRC7bG/2GDselkS469csLXkBd7SpZO8hXa8tfK3r7qewr2Fdvkcu4nf2ju+K69ckbbqDmy\nqua3L37xi5Osew9/h9G1Cumva2And//BHr7hG74hyaqfbo9F28+paEbf773vnsfG3Y/35w59kwdt\nnNrQN88VXveKUR/hzIfx3fsZAV2H3xzQNf27Kcd9mN3BYDAYDAaDwdni1phdT+aewjGLnsK9L8ro\nqCK5XLOxc9a0ITrAcPUpJ3JORKedIyQ6TS6zJB0tu3azkKcAO+Aa2jYuEaz+i/7svhQVYqOwkfok\nyhJ9YS2T5KGHHkpymQ0hQ/Ml4nINMiMrjBeZ+/xozdBkRbPYA7lqWAV9+Zmf+Zkki+HEQpMflkkf\nRJP6Jq9qr58owpR7qg26guHAAoj66VjXRta2Pu61Xk9FsyiYHXODTTEXncskz4mtYQKwtHIm7Wbd\nmQ+2ol4iFsXfRx99NMmqP80eyBH7jvlhz3TxbupSu7a2uzIEX+Jz+r+vaNAhMjIvmAk69/Vf//VJ\n1jyrCdunLGEpm9naWSc20jlxZIMtPZpzpz0yxdCZO+/zofq6M4TmS//1tXNS+0Q5+i6PkO60b8VU\n7acV8lvGSxesiujTzqidAn3sutTGRA5sko1itZNlK/TZPPNBfAm9IA86+LznPS/JWhnovpDHzq5r\n23zxNWTLJ7tPnIpmPrGMnausr+R31Ultxs3XGI/VMfqCwTPu5zznOUmWL/M+HaQDO2OqDX7Kd9u3\nHM3ZdR8lZ78nB3sYjMX19r5ZkabfGOmuw0z23/iN35hk1Xa2z8I9qysJ8e385d5Pemsvh/t9+8NT\nYZz8CFvvSktWQq6qx97nJrBl9o7BtzJMB/kqutbsMXi9r/Lwb3xOn4jKrvfc86swzO5gMBgMBoPB\n4Gxxa8yuqMqTumhVNNE1REX1opn9MxEHJgCjg5XDmHQdVuystkUEfUrYzkqKIkS2fVKQiHXv56kQ\nQemfaFEk59r66/M+EQczITrSJ/IS6eynVolAfUeEJlIVqXV1ht5l3LspRerYqiMQQYtURdF0xLj0\nBfNBPlg784dF6XyxZkb3/vaJMGRH5qJhfTFe18QIAjliQo9AfzFhfeIWBogtqRQgCtZncmAfxqQd\nDPC+Q9zuWTLFrpATZgezZQ4w3RidriTRtnsEza6zn14VMi5933My2an54hNalvQaq8GXeC3nrOtw\n8xN7vph+sTn5muwVQ3G0Tia/4VoYF2Nhk+zKHO359Fg0et+sc7fdqykYLJ/TWdfy+/00I/cA18KK\n6gM93/XxFJAj28Syy6ek93Tvqlx1tuZ+ZBzmtdlQc/ut3/qtSZYc+Ak+hh/gb/caseaAfzdusiaX\nzuG9CfrWVRa6Yog5ch/ZWXjvdV1d/oyPpsvyXvmgXvExhq4Jvdey1b/OOXcNvudoJSTzrx33Ovr3\n1re+NcnyGz/7sz+b5OI9z5yw4be85S1JVlUF+mJF4Cd+4ieSrCocfDA5WCEwx+7RWOb9WnRO//tU\nQ/p8Kjw/0Ddz0itFXSVltxdtdMUo9u41W2QHPicvekBvXJt8d3tx/V5NtkLS1TSuwzC7g8FgMBgM\nBoOzxa0xuxiRhkhRBOh7orqrWFZP7l0DtneIY2NAhNKRS5/6tV9TpNFVBkQXor7rxvfBgJnBvIh+\nRDld49VfLIP+i6q8L5rqCgl7zrN+i27tEjW+rreJhekTgzB2cpGMpRngUyDKE/Wab9Gj/FFs4TOf\n+cwkq54wZse1sTDkIbKXP7TnfGGJ+6x2uiAfts9RJxd5r13LEsjxCPSBXor+sW4YMuMlH3Misu36\nw9oxZ30q4H5N9ROxC2wNG4XRoFvaFnWLyMnDXNzNCWpdVYAu9rzrqzHsubDY7j5lTK4Z9qvzwswf\nH2L82tMOu9hPb5RjL2/Pb7SBsbkqV/KDodkRTF1Xl+Fb9WnPUeVD2QadMj9ssWt6dn4p+RiTdjFX\ney4edgfMLWn5AAAbeElEQVTj2rnHZHi3TKbf6wOWlo/rurNX1R3GzPrOs5/97CSrXjA95mPoHLm1\njzZXzbonl0+llAfrtXvkUZvpEzmNqX2898lhv//1Xhu62qcZQp+g5lrmxD2p6zbvOZnuc/SB7nVt\n86PVKbrOsvacSEZH7Qkxx7s8+CB+0cmR7kHsyHitqhkT+zBeNqkPXTM7uXx6J1/rvkhnj+qHnF/t\nuDadp9PXrQgka4WnKyIAmWPj/e0VJWMyBjqv/f3kNbKkO/yZ+0LnHF+HYXYHg8FgMBgMBmeLW2N2\nPYWL5kVVIkERpOihd70n6ylfjo1oQJt2Uj/88MNJLkfMog/Miid/0RoWZ48YRbsiEJGqqBlL0DX8\nToFIqhlNkaS/GCwReNe7E9nIIxQ1yU0zzj0HTmQpisPqmQeMRJ98JVLVVwxFV8q4m5PlzDk2sE8v\nevDBB5MsFtX3yFHEJ/L3V2Tau9H36JnMRLmqDJhX7JAcNXop+sVC0CU6ij3xuyMga6wy1kBfsEds\nCWtCV0Xu+ur75MfWjHmvZdgVHqwyqOhAt+hoR/bNyriGudiZz1NhTjAZdIzu0gd99b2dReMLenxs\nsGs2m18yZINsji513tue19YsaNfm1add/qegT5mkH2TbOak7owt96pz50cfOF2wGBkvbrBt5dm3b\n/T32TaZ8kt8e9SF8sbl1HX6Fn6APXTkgWTKzGqJKB1sxfuNV4YZsMVfGhPH2Ob+w7/HgY3yHbPkz\n1z5am7pPMSQftti6Tg77Soj3WqfI1PjdL/yW7fEH3/Ed33Fh3F3Td/fFPjN/7nfu/+zk6Ooh/8H/\nkTMd770zdNmKTLJ8qzbYnlPZzLc55ZN+4Rd+4cIYAItPZ117Z8z9bx8EX0t/6XOz7DeBfrB1c2Hc\n7M+ql+eNfa+LVSRzxk96zZZf//rXJ1nyUBGJvui7HF3XIt/9/qlfdLLnos9quA7D7A4Gg8FgMBgM\nzha3xuxiBjx19y4+T+edp7mzHaLKZgswu32KkyhaBPeCF7wgyYqI5NKJeOGqeob6L8oRiYls9nzH\nUyESaeZHhCU67FN7Ol9YNNW7l7XTDHayco4wW10nU6QuwhI9Yl9EsBgQYxCZ3k01BswFRqPrCPfJ\nR3QEW0AezTrRGzomMrb7Nlky7PE3GyYP1px0TUjX8v4b3vCGJMfzMZM133RMziE2hf5irMwhpoL8\nzKE5kdurdqwofGe4RMXY4G/5lm9JsmTYJ02RWzO85qRrPe87vk+FNtpeemUEW4B92lkjMjB/7FkV\nCTqDPVddw/z1rmk25lpd6SW5zAZ63ez20bx/9mz+zbM8ud6zwK/t+ZH0lm6xg65oY77ID0PjtWsa\nt2s1m5pcPvmSLLue9Ac72fIq6KM56uor+tKnd1pB2fvi3kF32Bb7Jmu+WJ/5KnNNvtjJzpNM1l4N\nbfAd7mvkdFPd0Ibx9t4Wufrk26svu+7Sg2bTyME4ydD3+CA+xb2ZntBde0b2exN/TofoGLu/29r2\nTuyj/+43bFffMN3k813f9V132jDv9ip4LtAX1Sh+7ud+7sLn5tBKwStf+cok6z7Kd3VFkWT5efrt\nNX0h26NMtzlkj10FCVttDunH7k+7oow5o2v0hU1qi55guvWBPtFJ9rPX9nVP5qu1YX5d+6Z9MsPs\nDgaDwWAwGAzOFrfG7IrsPel7wvcUjmERbXct0eTyqUUiG5Fe13hsVkpE2zUlRSeih/20K3mw+u2z\nzjFudvgUYEdaJtgVUa/xYAX6rHByEDV2TWDRkXyY5PLOTQxV79wWxYkCsQtYGH0Q0bnWXjfxVIju\nsSsYDuMTyTX7L6oUkTt9Rg6Wee1qG3uOk++IGjtS7fnH/HTVELmsvk/n5Isdgeievu+1BpOlc+SC\nETFuv5MXhik2VsyR7+273zEVL3rRi5IsWYuisVGdo9cna9FF36MvR2vK7n1oxrJ3J5tXumqcO8yv\n37JjbWMJsK2+x4boWp8ARf/3PnY+r1xVsqGvR0+VI2v2oA/Gwh9iqJqlTZZO+G3Lluz4iz6Vjr/o\n/QRtR7utkWXn7bFz/e6+3ARy5Sf5D3aELSJvfd9Xb7BmXWcY+gRJDKZrskn3DTrpPsaP7v7Ae736\nyYf0aVOnoivj6GvLmS3q087skk3nMps7Muab/DVev3cvIz/y1bddHl1f3LXNxd3m7NIzc9Q59OTN\n9vvUuCR58YtfnOSyvyNb+4bcw/ggNsdvshPXZD/2RLzuda+7c82uosP3kH1X0zgVnaOtj33iYLOs\n7CZZ9xzjsopgnsnanGGPPVfwC67d9f/ZLv3Zr4Vl71q99Ma99zoMszsYDAaDwWAwOFvcGrMrWvM0\njjHBFIk2PdljzHaGsPM7RLwiDdGVaEn05NqiDhGRiKXZpj3Sx1BhQEQmGL4+f/4ImpHq6gLgzGtR\nkL+u3WdBY+rIWns7m0sWokUMjChR21hAMjYvXUdVhPfBzle/CfsO9mRFkr3DVfRLH4zXHInIMVp9\nQhDm09iTJQ/j7EoXxul156LSKTKWA+m1z4+A/mILRNyu3XUzrQg0y4KZ8NocddSslmiSfNu3fVuS\nFR1juI2/q3GQaZ/qQ8byylTUYHNHwBd05Yte+TCX7H5nMrF6ZOY3bKiZJ7Lxmg6qt4qx6ComV+0u\n158+4cf8HJWJuXBtPtQ8985ubKOVgGTl7vdOd7bIp9JFttjzbNwYOn7D73d2jG+gf/rTp3EdrU7R\ndWO7ik1XnmDj+x4Ncmh2i75jj41P/mfLtquSdM30PQeRfmLY6DU50bmjp3Sak66o4HWfzEjHd3nw\nwfI3sWq+0/dz4yMXcmsWtutd76tK9NoKhX65RrOGpwIjyj7IV1/4BHrpPmPOkyVDfo/Pdd/wvIDZ\nNbeubbzyhMmlnyvsGUhWPjRdcn9j19q07+BU9JywH3K10kQeVi33/Un6yaf5rr55n7z4OG2zeXNA\nZ+lX54YnawXEe+bTPUY/b6o7PMzuYDAYDAaDweBscWvMbp9MJGJshkxkKPoUESWX80NFQU4v6YgP\nC6Ft0QWmVEQkuhDB7bkvIhVRt2jYOcyYoZ3JORW9c72jGOMR2RuXKBIj3PUUsS2iJRHPXgu36wfb\nZW+euh4kpgdDKQLTZ5G8PtxNNQbzqp8YGOPWN3MiEpdnd8899yRZumWMzWSS386gdz4SFq7bIvOW\nn+/TLdGmOVarUUWQU6AN48Z4YBPoILbyp37qp5Is9oXtOJcd860KBeaCHPY6nuyv89vML9ZNlM92\n6AmbEsH7nDzozxFgNLRFz42zWQFM575SQ0YYmq7oQVfIBAPTuWf0wvt+73e7rflNV+pgI33W/ano\nGqX0ha+iB131ZWd0HnjggSSXV4/oBpvpyjCuTX7mnezpkrnZT6Eybv3rnOq7Zbr5i2bQ92snlxnd\nvYanE7HoNeaqfSs9xjaSm8/JnJ4ZS+cn7/3s0/jMk7aP7oPAbLEL1+Q/sI69D2HPHTff+t86ipGk\nw/rIPvgN4yZr92aMsWsny99h6PTf/ZtO3u0Jexhd4+xVO3pJh3eG0Nzwpcb9mte8Jsli9umP5wVy\nIgfjZQvkq4/78wSZ+3vvvfdeGBf/crSaC12lf1ZCycFcNvu+26VqTmyPbyMXK2NqBGNl6ToZ86PG\naEXxqtMDjdc89eq0fu65xVdhmN3BYDAYDAaDwdni1pjdfnLvWnc+79Mv5NIlK6ITVckHE0WKqvzF\nBImyuiac6MLv9Wk/DU1bohmROiak6+Qdgf40a9A7YHsHcJ8UAlgWzAZWirz2XCzRrGi55S8CxVhh\nEbVFRqJjn/v+0XPL93FjU7AK2sSAk5M8QixkM2Siwa7PaA73POtmE7BNxkVeIlURrD7rk0jUNTG6\nux6fis5/Ihd6agXA3KnGYSwi3u/+7u9OstgHTPCv/MqvJFmMuBqR+3jYjt3X5hsLgE1QpYDMMXq9\nY5b+HN1JvY9T250H2dU8MAA7s4eB7FUULIDvGidbwjS0rnlN1l3VJFnzZ8zs2W+7pvWpMD7t94lH\nfBTZu/6eG2/+umpAV1HAQOljM/T8iGvwqX26YbJ0R9vNfHeVnVNhhadZRq/lvncd9d3fs/uucGKu\n9BEjTn/ovVU0/rNP+ev6o3sb2D9soP6T19FTOpvB1V6fAuoeZqy7HtKPnnffpTd8M9mSB10z7l5t\n4xf3+wV9IHO+Vj44O9/zOE+BdvkgNvrmN785ydJhc8a3f9M3fdOdNuTqdw1zttZ5v+TnfmAl2Pt8\nNjtyj99PjsWG0mO5uRhZK3c35ag2uiZ0M/3ur2zZHKnuk1zes9F1la0Y9ymXzey6Z5ET+cJ+v6AP\nnpu6dm+vvlyHYXYHg8FgMBgMBmeLedgdDAaDwWAwGJwtbi2NoTcloMYth/RyMBp/p7NR2WjsPgbS\n0pNlCktJ6PjesNIlm65aeu8i+uh3Sz9dcugIurBybyiAPv6OLC1rW7aUtoG+tzTQG/WStYRhzJYb\n9ME4LaFps48o7Q1M5tWSwhH0BjV90Tfj0we6Qw6WTiyNWWKzDKTP+njVMocUATph6dBvybIPrLAc\ns29MStayXS/LnAJLPpYCLX3qm7bJje10eoxr6xs7euYzn5lkLee98Y1vvHNtS6rGZQnYaxtNzEkf\nEkMf7r///iRr6c3mCkthL3nJS06WRx//aPyW0iwB8g+WbS1TJqtEVBdUv24Dou9ZliRb7ZAtnWSb\n+/Gn2mBrrmE+zPN+XPMp6CVx7bLJLndnCXUvuacNqQ5siN73xmH+jn1737jNu7FJNbP8nyydoXf8\nHVn2JuZTwa8bi74Zr/Qd8mq/kCzfwafSOZuayNT4eiMan9Ublr3fhfqTdW/xGX1gz506cyo6naNL\nkWm/N/7uZSN9lx7YHGru2A/ZS51zz/U78mo/2EfB7/3RBj8vTcXro6Xp6IE5UyaMTyMPvu6FL3xh\nkov6YW72TY1739iFY9HJz0YufkPfH3nkkSTLTvRl91m+S8f4kd706550FO0/2YD7hNQ9c76XRTN/\ndIsf6E1u/AK76hRUNut3/azmGSC5fL8z/j5kim5eh2F2B4PBYDAYDAZni1tjdnvjh0hfRCCK8Peq\nTVi9acBf0RaWTTRps5Eo2ueiTBHTfhRdcjHKxDJg/DCd2nSNu9mQhT3tUjn634cqiLj0W1K8yMbv\n+oCOTpbf/8dA9KESrqENbYrUyEX0LarsY4SPoFmCLoqvb5Ldjde16RIWhVx6gxC5Y5j28TSz5+jI\nZkUxPF2ySSRvzryvMPgRaFtUa67IViSOLemjF43BphuHRtDl3vCHddjHQQfpEJn7LRmL8G0iwVYB\nGzWmu9mw15tgjJddYxUwU9gXrEtymVXto0i7VJBx0ymy0gcMBXvy+33VoA/BwbDSJW2zpaPo0mLG\n0sfh0vv96F5sUcuQLWJ9yK3t3CpaH+Vs06Ox7UwY22B/9NrfPhzjVHTZN3aCwaXL9Iic9hWvV73q\nVUlWaSi289rXvjZJ8oxnPCPJ5UNUzC07eMMb3pBk6QH56sN+lGmvONExMu5DMk5Fl8nr8lD0AeOJ\nKdyZTDpkHM3KswP3Q7ZHtl2qzrjpiXb3smrusb2xmmx709+pUOasS7r1CrG+YWPpy35Nv9FvK1ie\nUXqzORlb4er7Cxbe5lvMcJK86U1vSrLsvDei382qYXL5yGbArNObLiO339v7vsCve57wmq71cfLG\n0MeOmxvy2VdK+Cb+gc+iL+brpvKnw+wOBoPBYDAYDM4Wt8bsevLHhIiQRAkiIdGVCGgvEyNiFz11\nzk7n7IEoqnOB3va2tyW5fPzqfryoKLPz4ZSkEi3vpWROhajWeDoa3hmYZEUumJGOcETXIjDv69su\nS9GPtsjfeJotFan18X8ifZGp9/vo31OAZRNxkovXomR96GOA+2hK0aIIz0qAPu/lXbo8nd/QBbrR\nh2zQBzmoxoAZva4Y+ymgrx2hKjWDeaCj7OG+++5Lkjz00EMX+tSHq/g+5vj1r3/9nWsoNdYF79lW\nl8NxPDJ2CCOGFWD35Nb5t6dAv5sJZAd01PtYA79LFhPRbKjf0Bm2hpmga5gc9oIBxEaS6Z5fyYbI\nhN718d9HDw1wLfrP/2GZ6WrbLL1KLh+tiYHssj78Qe8vgM5txuBflbvfJdd8l1/ma4+WUjJXxs0f\nGAud6/J5+6pcl2u06qfP/EEfD86+6aK5ZA98r/d3/0jmxu1aXRby6D3GXGifnhhDH3RhNWMv1acN\n/WcfGEh6bu7YYLPS7Iec+Oj2+cmaR/cx1zAnbPdu9oUk697nABHMqP055INR9f7eX/IwV/pE53zP\neJ/1rGclWSsejukmT74c8/3www/fuSbfSy7tL8j+6D2XPZCH1TjywNL3iut+9HcfwsLf80G9L0Zb\n7gfAF/RzhLHt7XQeML/a7PL+HHcVhtkdDAaDwWAwGJwtbo3ZlbMi6vaEj3XAXomURKM7Q/Loo48m\nWVGDiFzbogERYB8qIGIVqfXRsKL1nVHFCvSBBaIGUfe+W/BUYGb0p3dGk42ox2s5lyIsEU4fPtFH\n2u7FwkViWA75mlgfrIdcO211VYrOtb6qiPypME+dB3wdU9O7U8nH942JXEWdnQObXM6xwr7ZmQsd\neXauIzlhPl3LnB0BdoCtmBNVE7CK5mzPuU0WA6yP9Ft7dgJjgvfcLTLso6ox2FgEttj5Y+aS7pkr\nY7ByczfAvmHCtE0e9N/rvUJGH7DQBfbpt/kjh2aLydzRy8ZLXjvrRI+xGVixzsHejyk/BfrGP2gH\nG+c1W9W3vRg/VqdZEW1jrOi973dBfzbm+77HB++rSvph3vitzik+emgA3bvOR5GvFS/v06Nk5V7S\nMf02n2yHjvWx0b5Hz+kJebjn7GPrXF12Tw7uZ5jXU0H2zTazyV516aOzkyUrOmZVTF+Mi66Rg/f5\nZmPhg/o49b0CketjhXvvijaPrg7xc3wPeRi/uSEH98Z9xcuxtw6DMO7eq9AHdLT9dHUqzwK93yJZ\n95DONdcGX3WU6e7nHn6JHPSdHtGbfW+BeTWOXl3tvnmfnmjL5+zCX+3u+eqeXcw/G/PXXNxkL8Ps\nDgaDwWAwGAzOFrfG7IqURf6iCYyCKKvz1kRXyXr6t5tZFNE7xj35Yymwxb1LWTQlQsAA7YyfCMM1\n5dR0buqe13MqjL0rIIhQXLtzN/2V0woiGvUkO0q8Kgeu64OSjWu2rLzfR012/cjejX8EokGsoShQ\nFI1tIR/z632Mngi9mTTMwJ4L27llmNjOzSMPkTt2Civj/a78cHRn+f4bfekcbblKIljzTRdVgNBH\n9iNKfv7zn59k7Yzd65rSbzZlPGTY+dJsDftA9trElKsfebRGZrLmjR50XmAfp0qP9vfJkAzoVteo\n7iN9yZrO8TGYCewQubHF5PKqCbgWvT2ak9msI7ax62b2ysaePy5flr3SBbmm2uh6083cmG8+iS+9\nard953WyQ/6wqwScCnPjXtN+QXts2KrVXgOYP7Qb3jzao9HHiJNpH23bK15dSWHf0W6+yIqMewXS\nKsSpaEZXfdQ+2rXzsXeWtXO3faZt/t8Y+ME+XrqPOvc97ezMf9t5r5bpk9enAqNrTswtvbEyYG7d\nyx588ME7bXgeoTvutX1MOn9nDF21xxw3C0l39SFZvpJv1Qcy5f+O1vo3Pj7MXLEH1/PM4/t7vm3v\nfyEHc6aOOtle99zHfxgrfaIfe8UUKx1qgrfukTHffh2G2R0MBoPBYDAYnC1ujdnt3ZqiBPkoIhoR\nIVZvf6LHaNkl2MwItgW7gHXShijLNfr0D8zLvjsXU6efvtNMnxytI8AKYOREN/vpS8llRk7kIpIh\n0z4pDjvj9d5H1yYjURyGFjMj8ur6ssaN+ek6pM1inYKO+r0W7YnA9WVnA5IlJ5GfaLPzw4xt33Xc\ntWpdo/Mhm3UHESjdpDd3u9M+WdF/y7zZVOOy+uBabMrcYBfo/atf/eoLfd7rVory6RRZmW+6imVx\nTTpK57ACGIAHHnggyXFWJrnMTJI5ndU3eoHx2HcP05HO9zevXR+arPkM45T/R5f0jdz2HH7Xuu7a\ndE9/T0Wvhul71w3l/3qn+N5//lgfsWCdi8xPaKNPLdJOV7jZWevOz8MOYoPo5547egroAZujeyoq\n0EFy0bedQcbM+U6fkIZN0zeMJ9ukB/SF/9COud/lQY/9lkyxe+RxdLd950m6z7BZfrHzj/d80T45\nkK8wPnLqij+dy9w5zRhyLOG+TwaDrc3Ol+9Tt06FCjP0oU9c7ZUhJ0zu+y2sgNAZ+kIv2J555j+x\nsXw2v2KujRlLue8V8RtycB/na31+tA6z+2OfJrnnsCfLR9DHfYWhT5RjU9qke5hr/gSj3as6/tIH\n8tifycwTZpcfYYP0Yn92vArD7A4Gg8FgMBgMzha3xux6GhcRi6o8nYuUmnXbWU4RhUhUtCjaFLmJ\nMp7+9KcnWVGSiLVPPxExi0p21k60g7HRv44m/m/stvdXBCsSx8yJWLqmX++WJDt/RZf7WdGiYxFp\nn8LSUW/vUO1TaERod1v7MFnzaX4638v8GxemRl9Ecn7fO+07h21nBozDdzBY5EQ+omvjdToXFoIO\n9WkuIt0joFtsB9tGz5vRxABjk9S+xQT4Xus/Oe85Ts1I+S5Ggmxd03i9j9k0R12n+W5OCyN7LCy7\nNy7z3Pnne/64eSJDv+262eZV/Uu2hS3sfOmuW72zyZ3P3DVbe1ynAutBN10bu9Z1NMlnPzmQDzKP\nzdy3nZNP5/D7Pfs33+0f9mtcd+oitu9onV190LfON6ff5M1Wd9/XJ3iau2aJupIDvTam1hO/Z7P7\nvhAyaqbNSotVzqN5/+adXIzJePn4rnKw665xuTb/ry3Mps97Jcsc9CoVubLHPd+UTvZOfvrRfv9U\nkIf7Cfmydf6y2WsrYnsfzCf2V1WWPkHOcwJZ88VdOcqYPYvce++9d66pag5do0P2ZGjj6D3GuOgd\nuzHHxm+ufW7FcB9X/9Y9xb26T6Y0d+bfX76aX+m69cmSPdl6DiIfMjTP12GY3cFgMBgMBoPB2eJx\nN+1gGwwGg8FgMBgM/n/FMLuDwWAwGAwGg7PFPOwOBoPBYDAYDM4W87A7GAwGg8FgMDhbzMPuYDAY\nDAaDweBsMQ+7g8FgMBgMBoOzxTzsDgaDwWAwGAzOFvOwOxgMBoPBYDA4W8zD7mAwGAwGg8HgbDEP\nu4PBYDAYDAaDs8U87A4Gg8FgMBgMzhbzsDsYDAaDwWAwOFvMw+5gMBgMBoPB4GwxD7uDwWAwGAwG\ng7PFPOwOBoPBYDAYDM4W87A7GAwGg8FgMDhbzMPuYDAYDAaDweBsMQ+7g8FgMBgMBoOzxTzsDgaD\nwWAwGAzOFvOwOxgMBoPBYDA4W8zD7mAwGAwGg8HgbPF/ACxYFxmbzJy/AAAAAElFTkSuQmCC\n", 494 | "text/plain": [ 495 | "" 496 | ] 497 | }, 498 | "metadata": {}, 499 | "output_type": "display_data" 500 | } 501 | ], 502 | "source": [ 503 | "# Plotting the weight images, we can recognize similarities to the target images\n", 504 | "plt.figure(figsize=(12,3))\n", 505 | "for i in range(10):\n", 506 | " plt.subplot(1, 10, i+1)\n", 507 | " plt.imshow(weights[:,i].reshape((28, 28)), cmap='gray', interpolation='nearest')\n", 508 | " plt.axis('off')" 509 | ] 510 | }, 511 | { 512 | "cell_type": "markdown", 513 | "metadata": { 514 | "collapsed": true 515 | }, 516 | "source": [ 517 | "Exercises\n", 518 | "=====" 519 | ] 520 | }, 521 | { 522 | "cell_type": "markdown", 523 | "metadata": { 524 | "collapsed": true 525 | }, 526 | "source": [ 527 | "1. Logistic regression\n", 528 | "----------------------\n", 529 | "\n", 530 | "The simple network we created is similar to a logistic regression model. Verify that the accuracy is close to that of `sklearn.linear_model.LogisticRegression`." 531 | ] 532 | }, 533 | { 534 | "cell_type": "code", 535 | "execution_count": 23, 536 | "metadata": { 537 | "collapsed": false 538 | }, 539 | "outputs": [], 540 | "source": [ 541 | "# Uncomment and execute this cell for an example solution\n", 542 | "#%load spoilers/logreg.py" 543 | ] 544 | }, 545 | { 546 | "cell_type": "markdown", 547 | "metadata": {}, 548 | "source": [ 549 | "2. Hidden layer\n", 550 | "---------------\n", 551 | "\n", 552 | "Try adding one or more \"hidden\" `DenseLayers` between the input and output. Experiment with different numbers of hidden units." 553 | ] 554 | }, 555 | { 556 | "cell_type": "code", 557 | "execution_count": 24, 558 | "metadata": { 559 | "collapsed": false 560 | }, 561 | "outputs": [], 562 | "source": [ 563 | "# Uncomment and execute this cell for an example solution\n", 564 | "#%load spoilers/hiddenlayer.py" 565 | ] 566 | }, 567 | { 568 | "cell_type": "markdown", 569 | "metadata": {}, 570 | "source": [ 571 | "3. Optimizer\n", 572 | "------------\n", 573 | "\n", 574 | "Try one of the other algorithms available in `lasagne.updates`. You may also want to adjust the learning rate.\n", 575 | "Visualize and compare the trained weights. Different optimization trajectories may lead to very different results, even if the performance is similar. This can be important when training more complicated networks." 576 | ] 577 | }, 578 | { 579 | "cell_type": "code", 580 | "execution_count": 25, 581 | "metadata": { 582 | "collapsed": false 583 | }, 584 | "outputs": [], 585 | "source": [ 586 | "# Uncomment and execute this cell for an example solution\n", 587 | "# %load spoilers/optimizer.py" 588 | ] 589 | } 590 | ], 591 | "metadata": { 592 | "kernelspec": { 593 | "display_name": "Python 2", 594 | "language": "python", 595 | "name": "python2" 596 | }, 597 | "language_info": { 598 | "codemirror_mode": { 599 | "name": "ipython", 600 | "version": 2 601 | }, 602 | "file_extension": ".py", 603 | "mimetype": "text/x-python", 604 | "name": "python", 605 | "nbconvert_exporter": "python", 606 | "pygments_lexer": "ipython2", 607 | "version": "2.7.6" 608 | } 609 | }, 610 | "nbformat": 4, 611 | "nbformat_minor": 0 612 | } 613 | -------------------------------------------------------------------------------- /2 - Lasagne Basics/Introduction to Lasagne.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 17, 6 | "metadata": { 7 | "collapsed": false 8 | }, 9 | "outputs": [ 10 | { 11 | "data": { 12 | "text/plain": [ 13 | "{u'scroll': True,\n", 14 | " u'start_slideshow_at': 'selected',\n", 15 | " u'theme': 'simple',\n", 16 | " u'transition': 'fade'}" 17 | ] 18 | }, 19 | "execution_count": 17, 20 | "metadata": {}, 21 | "output_type": "execute_result" 22 | } 23 | ], 24 | "source": [ 25 | "from notebook.services.config import ConfigManager\n", 26 | "cm = ConfigManager()\n", 27 | "cm.update('livereveal', {\n", 28 | " 'theme': 'simple',\n", 29 | " 'transition': 'fade',\n", 30 | " 'start_slideshow_at': 'selected',\n", 31 | "})" 32 | ] 33 | }, 34 | { 35 | "cell_type": "markdown", 36 | "metadata": { 37 | "slideshow": { 38 | "slide_type": "slide" 39 | } 40 | }, 41 | "source": [ 42 | "What is Lasagne?\n", 43 | "------------------\n", 44 | "\n", 45 | "Lasagne is a lightweight library to build and train neural networks in Theano.\n", 46 | "\n", 47 | "- Bookkeeping device\n", 48 | "- Collection of helper functions" 49 | ] 50 | }, 51 | { 52 | "cell_type": "markdown", 53 | "metadata": { 54 | "slideshow": { 55 | "slide_type": "slide" 56 | } 57 | }, 58 | "source": [ 59 | "Design Goals\n", 60 | "------------\n", 61 | "\n", 62 | "- Simplicity, Transparency, Modularity\n", 63 | "\n", 64 | "- Abstract Theano as little as possible" 65 | ] 66 | }, 67 | { 68 | "cell_type": "markdown", 69 | "metadata": { 70 | "slideshow": { 71 | "slide_type": "slide" 72 | } 73 | }, 74 | "source": [ 75 | "Layers\n", 76 | "-------\n", 77 | "\n", 78 | "`Layer` classes are the bulk of Lasagne\n", 79 | "- Abstraction of a layer of neurons in a network\n", 80 | "- But also used for other functions (pooling, reshaping, etc.)\n", 81 | "- Track connectivity and data shape\n", 82 | "- Create and manage parameters (weights)" 83 | ] 84 | }, 85 | { 86 | "cell_type": "markdown", 87 | "metadata": { 88 | "slideshow": { 89 | "slide_type": "slide" 90 | } 91 | }, 92 | "source": [ 93 | "Other Modules\n", 94 | "---------------\n", 95 | "\n", 96 | "- `init` - weight initialization algorithms\n", 97 | "- `objectives` - loss functions and evaluation metrics\n", 98 | "- `regularization` - penalty functions\n", 99 | "- `updates` - gradient descent variants" 100 | ] 101 | }, 102 | { 103 | "cell_type": "markdown", 104 | "metadata": { 105 | "slideshow": { 106 | "slide_type": "slide" 107 | } 108 | }, 109 | "source": [ 110 | "Typical Usage\n", 111 | "--------------\n", 112 | "- Define layer sequence\n", 113 | "![Imgur](http://i.imgur.com/KsOeduv.png)" 114 | ] 115 | }, 116 | { 117 | "cell_type": "markdown", 118 | "metadata": { 119 | "slideshow": { 120 | "slide_type": "slide" 121 | } 122 | }, 123 | "source": [ 124 | "Typical Usage\n", 125 | "--------------\n", 126 | "- Get expression for output\n", 127 | "![Imgur](http://i.imgur.com/k32Ob5U.png)" 128 | ] 129 | }, 130 | { 131 | "cell_type": "markdown", 132 | "metadata": { 133 | "slideshow": { 134 | "slide_type": "slide" 135 | } 136 | }, 137 | "source": [ 138 | "Typical Usage\n", 139 | "--------------\n", 140 | "- Get expression for loss function \n", 141 | "![Imgur](http://i.imgur.com/2kPwN5L.png)" 142 | ] 143 | }, 144 | { 145 | "cell_type": "markdown", 146 | "metadata": { 147 | "slideshow": { 148 | "slide_type": "slide" 149 | } 150 | }, 151 | "source": [ 152 | "Typical Usage\n", 153 | "--------------\n", 154 | "- Find gradient of loss wrt params and optimize\n", 155 | "![Imgur](http://i.imgur.com/kaVbU6P.png)" 156 | ] 157 | }, 158 | { 159 | "cell_type": "markdown", 160 | "metadata": { 161 | "slideshow": { 162 | "slide_type": "slide" 163 | } 164 | }, 165 | "source": [ 166 | "Typical Usage\n", 167 | "--------------\n", 168 | "- Update parameters\n", 169 | "![Imgur](http://i.imgur.com/PUiPpSX.png)" 170 | ] 171 | }, 172 | { 173 | "cell_type": "markdown", 174 | "metadata": { 175 | "slideshow": { 176 | "slide_type": "slide" 177 | } 178 | }, 179 | "source": [ 180 | "Resources\n", 181 | "----------\n", 182 | "\n", 183 | "- http://lasagne.readthedocs.org/\n", 184 | "- https://github.com/Lasagne/Recipes\n", 185 | "- https://groups.google.com/forum/#!forum/lasagne-users" 186 | ] 187 | }, 188 | { 189 | "cell_type": "code", 190 | "execution_count": null, 191 | "metadata": { 192 | "collapsed": true 193 | }, 194 | "outputs": [], 195 | "source": [] 196 | } 197 | ], 198 | "metadata": { 199 | "celltoolbar": "Slideshow", 200 | "kernelspec": { 201 | "display_name": "Python 2", 202 | "language": "python", 203 | "name": "python2" 204 | }, 205 | "language_info": { 206 | "codemirror_mode": { 207 | "name": "ipython", 208 | "version": 2 209 | }, 210 | "file_extension": ".py", 211 | "mimetype": "text/x-python", 212 | "name": "python", 213 | "nbconvert_exporter": "python", 214 | "pygments_lexer": "ipython2", 215 | "version": "2.7.6" 216 | } 217 | }, 218 | "nbformat": 4, 219 | "nbformat_minor": 0 220 | } 221 | -------------------------------------------------------------------------------- /2 - Lasagne Basics/spoilers/hiddenlayer.py: -------------------------------------------------------------------------------- 1 | l_in = lasagne.layers.InputLayer((None, 784)) 2 | l_hid = lasagne.layers.DenseLayer(l_in, 3 | num_units=100, 4 | nonlinearity=lasagne.nonlinearities.rectify) 5 | l_out = lasagne.layers.DenseLayer(l_hid, 6 | num_units=10, 7 | nonlinearity=lasagne.nonlinearities.softmax) 8 | 9 | X_sym = T.matrix() 10 | y_sym = T.ivector() 11 | 12 | output = lasagne.layers.get_output(l_out, X_sym) 13 | pred = output.argmax(-1) 14 | 15 | loss = T.mean(lasagne.objectives.categorical_crossentropy(output, y_sym)) 16 | 17 | acc = T.mean(T.eq(pred, y_sym)) 18 | 19 | params = lasagne.layers.get_all_params(l_out) 20 | grad = T.grad(loss, params) 21 | updates = lasagne.updates.sgd(grad, params, learning_rate=0.05) 22 | 23 | f_train = theano.function([X_sym, y_sym], [loss, acc], updates=updates) 24 | f_val = theano.function([X_sym, y_sym], [loss, acc]) 25 | f_predict = theano.function([X_sym], pred) 26 | 27 | BATCH_SIZE = 64 28 | N_BATCHES = len(X_train) // BATCH_SIZE 29 | N_VAL_BATCHES = len(X_val) // BATCH_SIZE 30 | 31 | for epoch in range(15): 32 | train_loss = 0 33 | train_acc = 0 34 | for _ in range(N_BATCHES): 35 | X, y = next(train_batches) 36 | loss, acc = f_train(X, y) 37 | train_loss += loss 38 | train_acc += acc 39 | train_loss /= N_BATCHES 40 | train_acc /= N_BATCHES 41 | 42 | val_loss = 0 43 | val_acc = 0 44 | for _ in range(N_VAL_BATCHES): 45 | X, y = next(val_batches) 46 | loss, acc = f_val(X, y) 47 | val_loss += loss 48 | val_acc += acc 49 | val_loss /= N_VAL_BATCHES 50 | val_acc /= N_VAL_BATCHES 51 | 52 | print('Epoch {}, Train (val) loss {:.03f} ({:.03f}) ratio {:.03f}'.format( 53 | epoch, train_loss, val_loss, val_loss/train_loss)) 54 | print('Train (val) accuracy {:.03f} ({:.03f})'.format(train_acc, val_acc)) -------------------------------------------------------------------------------- /2 - Lasagne Basics/spoilers/logreg.py: -------------------------------------------------------------------------------- 1 | import sklearn.linear_model 2 | 3 | clf = sklearn.linear_model.LogisticRegression() 4 | clf.fit(X_train, y_train) 5 | clf.score(X_val, y_val) -------------------------------------------------------------------------------- /2 - Lasagne Basics/spoilers/optimizer.py: -------------------------------------------------------------------------------- 1 | l_in = lasagne.layers.InputLayer((None, 784)) 2 | l_out = lasagne.layers.DenseLayer(l_in, 3 | num_units=10, 4 | nonlinearity=lasagne.nonlinearities.softmax) 5 | 6 | X_sym = T.matrix() 7 | y_sym = T.ivector() 8 | 9 | output = lasagne.layers.get_output(l_out, X_sym) 10 | pred = output.argmax(-1) 11 | 12 | loss = T.mean(lasagne.objectives.categorical_crossentropy(output, y_sym)) 13 | 14 | acc = T.mean(T.eq(pred, y_sym)) 15 | 16 | params = lasagne.layers.get_all_params(l_out) 17 | grad = T.grad(loss, params) 18 | updates = lasagne.updates.adam(grad, params, learning_rate=0.001) 19 | 20 | f_train = theano.function([X_sym, y_sym], [loss, acc], updates=updates) 21 | f_val = theano.function([X_sym, y_sym], [loss, acc]) 22 | f_predict = theano.function([X_sym], pred) 23 | 24 | BATCH_SIZE = 64 25 | N_BATCHES = len(X_train) // BATCH_SIZE 26 | N_VAL_BATCHES = len(X_val) // BATCH_SIZE 27 | 28 | for epoch in range(10): 29 | train_loss = 0 30 | train_acc = 0 31 | for _ in range(N_BATCHES): 32 | X, y = next(train_batches) 33 | loss, acc = f_train(X, y) 34 | train_loss += loss 35 | train_acc += acc 36 | train_loss /= N_BATCHES 37 | train_acc /= N_BATCHES 38 | 39 | val_loss = 0 40 | val_acc = 0 41 | for _ in range(N_VAL_BATCHES): 42 | X, y = next(val_batches) 43 | loss, acc = f_val(X, y) 44 | val_loss += loss 45 | val_acc += acc 46 | val_loss /= N_VAL_BATCHES 47 | val_acc /= N_VAL_BATCHES 48 | 49 | print('Epoch {}, Train (val) loss {:.03f} ({:.03f}) ratio {:.03f}'.format( 50 | epoch, train_loss, val_loss, val_loss/train_loss)) 51 | print('Train (val) accuracy {:.03f} ({:.03f})'.format(train_acc, val_acc)) 52 | 53 | weights = l_out.W.get_value() 54 | 55 | plt.figure(figsize=(12,3)) 56 | for i in range(10): 57 | plt.subplot(1, 10, i+1) 58 | plt.imshow(weights[:,i].reshape((28, 28)), cmap='gray', interpolation='nearest') 59 | plt.axis('off') -------------------------------------------------------------------------------- /3 - Convolutional Networks/Convolutional Digit Recognizer.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "collapsed": false 8 | }, 9 | "outputs": [ 10 | { 11 | "name": "stderr", 12 | "output_type": "stream", 13 | "text": [ 14 | "Using gpu device 0: GeForce GTX TITAN (CNMeM is disabled)\n" 15 | ] 16 | } 17 | ], 18 | "source": [ 19 | "import numpy as np\n", 20 | "import theano\n", 21 | "import theano.tensor as T\n", 22 | "import lasagne\n", 23 | "\n", 24 | "import matplotlib.pyplot as plt\n", 25 | "%matplotlib inline\n", 26 | "\n", 27 | "import gzip\n", 28 | "import pickle" 29 | ] 30 | }, 31 | { 32 | "cell_type": "code", 33 | "execution_count": 2, 34 | "metadata": { 35 | "collapsed": true 36 | }, 37 | "outputs": [], 38 | "source": [ 39 | "# Seed for reproduciblity\n", 40 | "np.random.seed(42)" 41 | ] 42 | }, 43 | { 44 | "cell_type": "code", 45 | "execution_count": 3, 46 | "metadata": { 47 | "collapsed": false 48 | }, 49 | "outputs": [ 50 | { 51 | "name": "stdout", 52 | "output_type": "stream", 53 | "text": [ 54 | "--2015-11-08 22:51:14-- http://deeplearning.net/data/mnist/mnist.pkl.gz\r\n", 55 | "Resolving deeplearning.net (deeplearning.net)... 132.204.26.28\r\n", 56 | "Connecting to deeplearning.net (deeplearning.net)|132.204.26.28|:80... connected.\r\n", 57 | "HTTP request sent, awaiting response... 200 OK\r\n", 58 | "Length: 16168813 (15M) [application/x-gzip]\r\n", 59 | "Server file no newer than local file ‘mnist.pkl.gz’ -- not retrieving.\r\n", 60 | "\r\n" 61 | ] 62 | } 63 | ], 64 | "source": [ 65 | "!wget -N http://deeplearning.net/data/mnist/mnist.pkl.gz" 66 | ] 67 | }, 68 | { 69 | "cell_type": "code", 70 | "execution_count": 4, 71 | "metadata": { 72 | "collapsed": true 73 | }, 74 | "outputs": [], 75 | "source": [ 76 | "train, val, test = pickle.load(gzip.open('mnist.pkl.gz'))\n", 77 | "\n", 78 | "X_train, y_train = train\n", 79 | "X_val, y_val = val" 80 | ] 81 | }, 82 | { 83 | "cell_type": "code", 84 | "execution_count": 5, 85 | "metadata": { 86 | "collapsed": true 87 | }, 88 | "outputs": [], 89 | "source": [ 90 | "def batch_gen(X, y, N):\n", 91 | " while True:\n", 92 | " idx = np.random.choice(len(y), N)\n", 93 | " yield X[idx].astype('float32'), y[idx].astype('int32')" 94 | ] 95 | }, 96 | { 97 | "cell_type": "code", 98 | "execution_count": 6, 99 | "metadata": { 100 | "collapsed": true 101 | }, 102 | "outputs": [], 103 | "source": [ 104 | "# We need to reshape from a 1D feature vector to a 1 channel 2D image.\n", 105 | "# Then we apply 3 convolutional filters with 3x3 kernel size.\n", 106 | "l_in = lasagne.layers.InputLayer((None, 784))\n", 107 | "\n", 108 | "l_shape = lasagne.layers.ReshapeLayer(l_in, (-1, 1, 28, 28))\n", 109 | "\n", 110 | "l_conv = lasagne.layers.Conv2DLayer(l_shape, num_filters=3, filter_size=3, pad=1)\n", 111 | "\n", 112 | "l_out = lasagne.layers.DenseLayer(l_conv,\n", 113 | " num_units=10,\n", 114 | " nonlinearity=lasagne.nonlinearities.softmax)\n" 115 | ] 116 | }, 117 | { 118 | "cell_type": "code", 119 | "execution_count": 7, 120 | "metadata": { 121 | "collapsed": false 122 | }, 123 | "outputs": [ 124 | { 125 | "name": "stdout", 126 | "output_type": "stream", 127 | "text": [ 128 | "Epoch 0, Train (val) loss 0.237 (0.133) ratio 0.559\n", 129 | "Train (val) accuracy 0.933 (0.965)\n", 130 | "Epoch 1, Train (val) loss 0.106 (0.125) ratio 1.180\n", 131 | "Train (val) accuracy 0.968 (0.966)\n", 132 | "Epoch 2, Train (val) loss 0.085 (0.105) ratio 1.229\n", 133 | "Train (val) accuracy 0.975 (0.971)\n", 134 | "Epoch 3, Train (val) loss 0.075 (0.123) ratio 1.642\n", 135 | "Train (val) accuracy 0.977 (0.971)\n", 136 | "Epoch 4, Train (val) loss 0.063 (0.105) ratio 1.681\n", 137 | "Train (val) accuracy 0.979 (0.972)\n" 138 | ] 139 | } 140 | ], 141 | "source": [ 142 | "# Compile and train the network.\n", 143 | "# Accuracy is much better than the single layer network, despite the small number of filters.\n", 144 | "X_sym = T.matrix()\n", 145 | "y_sym = T.ivector()\n", 146 | "\n", 147 | "output = lasagne.layers.get_output(l_out, X_sym)\n", 148 | "pred = output.argmax(-1)\n", 149 | "\n", 150 | "loss = T.mean(lasagne.objectives.categorical_crossentropy(output, y_sym))\n", 151 | "\n", 152 | "acc = T.mean(T.eq(pred, y_sym))\n", 153 | "\n", 154 | "params = lasagne.layers.get_all_params(l_out)\n", 155 | "grad = T.grad(loss, params)\n", 156 | "updates = lasagne.updates.adam(grad, params, learning_rate=0.005)\n", 157 | "\n", 158 | "f_train = theano.function([X_sym, y_sym], [loss, acc], updates=updates)\n", 159 | "f_val = theano.function([X_sym, y_sym], [loss, acc])\n", 160 | "f_predict = theano.function([X_sym], pred)\n", 161 | "\n", 162 | "BATCH_SIZE = 64\n", 163 | "N_BATCHES = len(X_train) // BATCH_SIZE\n", 164 | "N_VAL_BATCHES = len(X_val) // BATCH_SIZE\n", 165 | "\n", 166 | "train_batches = batch_gen(X_train, y_train, BATCH_SIZE)\n", 167 | "val_batches = batch_gen(X_val, y_val, BATCH_SIZE)\n", 168 | "\n", 169 | "for epoch in range(5):\n", 170 | " train_loss = 0\n", 171 | " train_acc = 0\n", 172 | " for _ in range(N_BATCHES):\n", 173 | " X, y = next(train_batches)\n", 174 | " loss, acc = f_train(X, y)\n", 175 | " train_loss += loss\n", 176 | " train_acc += acc\n", 177 | " train_loss /= N_BATCHES\n", 178 | " train_acc /= N_BATCHES\n", 179 | "\n", 180 | " val_loss = 0\n", 181 | " val_acc = 0\n", 182 | " for _ in range(N_VAL_BATCHES):\n", 183 | " X, y = next(val_batches)\n", 184 | " loss, acc = f_val(X, y)\n", 185 | " val_loss += loss\n", 186 | " val_acc += acc\n", 187 | " val_loss /= N_VAL_BATCHES\n", 188 | " val_acc /= N_VAL_BATCHES\n", 189 | " \n", 190 | " print('Epoch {}, Train (val) loss {:.03f} ({:.03f}) ratio {:.03f}'.format(\n", 191 | " epoch, train_loss, val_loss, val_loss/train_loss))\n", 192 | " print('Train (val) accuracy {:.03f} ({:.03f})'.format(train_acc, val_acc))" 193 | ] 194 | }, 195 | { 196 | "cell_type": "code", 197 | "execution_count": 8, 198 | "metadata": { 199 | "collapsed": true 200 | }, 201 | "outputs": [], 202 | "source": [ 203 | "# We can look at the output after the convolutional layer \n", 204 | "filtered = lasagne.layers.get_output(l_conv, X_sym)\n", 205 | "f_filter = theano.function([X_sym], filtered)" 206 | ] 207 | }, 208 | { 209 | "cell_type": "code", 210 | "execution_count": 9, 211 | "metadata": { 212 | "collapsed": false 213 | }, 214 | "outputs": [ 215 | { 216 | "name": "stdout", 217 | "output_type": "stream", 218 | "text": [ 219 | "(10, 3, 28, 28)\n" 220 | ] 221 | } 222 | ], 223 | "source": [ 224 | "# Filter the first few training examples\n", 225 | "im = f_filter(X_train[:10])\n", 226 | "print(im.shape)" 227 | ] 228 | }, 229 | { 230 | "cell_type": "code", 231 | "execution_count": 10, 232 | "metadata": { 233 | "collapsed": true 234 | }, 235 | "outputs": [], 236 | "source": [ 237 | "# Rearrange dimension so we can plot the result as RGB images\n", 238 | "im = np.rollaxis(np.rollaxis(im, 3, 1), 3, 1)" 239 | ] 240 | }, 241 | { 242 | "cell_type": "code", 243 | "execution_count": 11, 244 | "metadata": { 245 | "collapsed": false 246 | }, 247 | "outputs": [ 248 | { 249 | "data": { 250 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAA5oAAABrCAYAAAAW0/2qAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztnXmgFNWVxg+yKouA4oIKD1ARxQ3UhHFBNGhcEtc47kuG\niRqXMTHuxjgmLlnHmBgn6rgh0cSFqKiJGhRxRTETHRYFFVRAZZFdyAPe/FH31vkKmuVBvX79qn+/\nf+7Hqaru5na9W1V9v3tOs7q6OgMAAAAAAADIi40a+wMAAAAAAABAseBBEwAAAAAAAHKFB00AAAAA\nAADIFR40AQAAAAAAIFd40AQAAAAAAIBc4UETAAAAAAAAcoUHTQAAAAAAAMgVHjQBAAAAAAAgV3jQ\nBAAAAAAAgFxp0VAv3KxZ27qGeu2iUle3qNn6HEdf1x/6unzQ1+VhffvZjL6uL5zT5YO+Lh/0dfmg\nr8sD18Xysbq+ZkYTAAAAAAAAcoUHTQAAAAAAAMgVHjQBAAAAAAAgV3jQBAAAAAAAgFzhQRMAAAAA\nAAByhQdNAAAAAAAAyJUGK28CYJstdr2vy20OcT1kXtLOmuaxR19yPeM9eb0lm+T68QAAAHLlEL/u\ntdrfwxd2TdptR3vsp1Ncz3hRXmMF1zoAKAOn+3jVN0w9Hvmub/6pFHipe239xiVmNAEAAAAAACBX\neNAEAAAAAACAXCmgdbbWZTPVoW0pu4qds3Mr11uHeP/HPHbfybJ9heuT+ybt9u977NWn5bgJ8n7v\nV4kdplOYiv+Vh26+xvWJh7oee0nSjtjWY2d/7vrazvK6S3L7hBDZVuzNco5f9GjSPtHLY+//TY5b\nViXn8vqwr/SpnOtnh7FioFjnbvnA9Wsjg1hA30K5kXNWr5G7SrhT0vab5LHXm8u+H8pFtIi3Fmui\njfTf/S6fvN31pjcn7V/f8dgXDfupikk76Ws9/3YXfXzSDNncQz33cl0ndsBStB/n+nP5Pq+fGo4f\nW43nutyAxZvgIz10xoOu798taZe/zLWsIhnsf0NHj/DwjcHqf9WrHlvLn8o6wYwmAAAAAAAA5AoP\nmgAAAAAAAJArTWPOv7VYJfQT7xxasfft2d11x6NcD2gdYu3l8ONl311cL/l+0j7/scfOauf6OrHO\nLj4zaR9d4LH7Fln10UW+o58kzTXf99D8612fdY7rm85L2jvkpR7V150jum14j0UVbFvZWvpBXCNb\nBDvw5/rRZ1eAreQYl0OGu94o2Izef9BgXejr3/vW93n4iSdcd3k2aZ+8zWOvNfDHAliFjWWMGpQ0\n3fb20MBvuv7a11xv1TZp3xnksS73uh7R45/+jw8rbFxuaDrIb/an+A3C4Gc9HK9xv5PD9hGtSWch\n0EHO1XCO1pzioWP7u+4u1thjgqWzlWTPnCjbm6nltgRT57oeJzckB4V2ZG8/1+veXSZHtlnzCzdl\nWsmN751J84eLPLTsCNf3pvcN8v1ZBdzvVDuDku9jwB899JncjzxyVdIOlkMy9+PrCTOaAAAAAAAA\nkCs8aAIAAAAAAECuNKtbW/qt9X3hZm037IU7y5T7j1yevpPrwcEa212sPLWTXbeZ5brd0qSdI9PE\nw85y3VdcAdH8sLV8HHHhmtmBoncwM7Nd7K00ssTGplqS0drarAN1dYuarXGH1bDBfV0v5HvZSsI3\nurz64qQdKf/dfT5xLS5li47jP0jsFdG/F/1syED7uLpTpq+fHaPB+vp875/dv+rhnqcm7fDMzo1l\nJVnu8pClrp9xOT6035ajsjbPdf/sTeO83kBulL+LHi73PNF1dCT+Rg47QPSfU1Xec9qsEftaLffB\nBret2Dj3kb+hPlskbQ/xGD48z/XLovd4KWnf/9Jj06fI+y7esL+9JnFOt5e+3d9lT7EeXn9L0u68\nn8eW/cz18INdL3ghac+Xt3hI9JX63jFzbe2Gj3FNoq+tm+jtUrWtuXf28tBeJnvqfYVeAxvr2lBx\nfX2ln8MnXJe0J8mY0FE6bUw/10MXJq2sDDFZVbXWTJoHr0ZfG9oLJDY/c2SBr4ubyHgS7ucGSKbf\nZ2WpWVzV9lH2BRrmc62FJnldbCjGJN/hNpL5es9/c31YaGXFj/0l8wLr9wzDjCYAAAAAAADkSuWu\n2O/isuVVrvdt6/rjMMP1D6lbKeUY7e+i4zrw+WeUfjuZ10l/jD3BNpZoT9GrLuGvtUvTyFz7jmyX\nLEFFQGZodj7V9Q8udP1ImHnoN9FjveUltBzmZ6GVCeVMT48U/bwmBqpU5PzcTxLpxIn47IxmI9Fa\nznYZAYbILrGrXy/H52nKxDqkUneq5grX58qucTS5WGIytFUPveWXcZkG+3VIInawDAAdpANfD9Ps\nT0l93u4vu/62DPhLwuz8Uyd5rNh5raRPD0+amm956HrJtraj1HG9O1xnX/ylxzYRfYf59PJ8e8PM\nzE6Qd/1P0Xrt/URKWFcHE0tqnQ7RPHCwjsj1aXiYfZ/1pse+saNr7d9Qsthk13D2JmiKmq6hFSOF\nacnoMaJrQruFxLIzmgVmscxLdU7u2L4hm/cUvWlZPlCB0aSSYUr9eEks9nSN60XpTdpqZht3kte6\nOoTEvbav7Dp6pTYvmNEEAAAAAACAXOFBEwAAAAAAAHKlcq2z7w5IZa39NNXfX+i2lMOCRXW8H2VH\ni9ZaMDEv0FXpcm4zM/G4iXHiCnvbzMyWmmSSsLNFzxSdJAF4TzMWZYwZBamrtFnyf2p5i4fOlVXy\nv//AdfzmxP1sd4v+gehYyvSnVhq1w8Tym3nU9WkwJAHVtBGuB6+6Z+Mhzu4W8lPT5rJLTG2l/b80\nNZVXOVvJ33ewGQ6SJfCdHne9UA4LJWNtD9stjfWQdGEfWhEL8IZ6c1tIrbnbXd4g9s4V4Y/kF1Jo\n8B5NHdYuMb+eaLumoZ19q725meuYsGZU/T9w0+QrLncImRx+fKjHXpJVIEP/2/WBoe0kL3W2iR8x\nk64mufZOk+UgnWWrLnmQ3G/VgXbEDi5bytqDOBboXYXajTWLRt1GYYxZUeW1B6/7ayprw53eWLkD\nGJmmWDIzkwHAYsYgWd+TWaAjNV9DwqZ25tnE1AaqNto4Wu+x5k9dNcjtTubq1W3lHaF+SN3568LN\n8Q7HeeyJ39m6c5PosOxEbeKPiI6W5+ydyIaPQcxoAgAAAAAAQK7woAkAAAAAAAC5UrnWWZsi2g1S\ni8R48rB9Lyi3p3aXgo5qpPifVH0qUa385ebOTVLj4L/LdrVorI2CWAw7iUUwTLnX/NhDE6UWj5Qk\nTW1TV0kK1rbmVuin7bupvsOODUon8/07GmNenOnI0G4ve05uI59xSSPZjDrKZ/ATzcS5lqnJ2uic\n5nKZJFDWb2BcaDUbc2HO6w1F6k71Dynb5osHTkoW2lzR0T79VLDmJ3QQrVmuC8LuwTJ7rYeudOer\nffC567nDkvZh+w95AV2ykOSg1uyxR4rWzNZTQls1Z6xkA58VMguOeM5jfWRXNcbG0fVyGZ/7yGKU\nCTFNoSDDfmZ86LDyjtXEHDELvu4FpqdIzlK1GUY0g69myX92xcp7Vit/Fj3DzMzmqa9Q0n23tF+n\nujat+lwj+2oOYFnXYg+bmclfgFlz0dNFx3O/gm+cy4r2qCyOSG30ajuGerClywdCXe5RYpfVa53e\nTaTovbtUa9g11KBW+75mDvZVFfme4cxoAgAAAAAAQK7woAkAAAAAAAC5UsEOgHmixatmN4veK7Ru\nmVBjrD5FR6PEATI5/Ela1tfM7GupGpbJSFZltJMpd8lOelxI4zhV5tyXy2Efiv5jaDe2Q9LYIslU\neLe5p6u/3RCUVLyX0spqOe0Y2lMkNnmJNT7q35OUuJtKV2pfNRotwgcSj9B+snk30Q+V4/M0JTaS\nL1N8K7ODZXaI7Npe9DjRWjDcUcNRs5J7NDkGel/VhIx3N13im4ce41rzd4+xQWZm1kJ6qqN5ZelZ\np08wM7N2ctBLT7tW63ccYdQgV2gu9WvkF7VJpt6x9lEaG2VfpPrjzNma2LX72VVpZIJee8XCZZ8l\nTe2Gf9oConcbY0QPkT3uNLOsnVtyKhflrz9n7hfdNbRq5/YbgNqM4XVgaCXW28eljWRV1Ipbk3aS\n3MTUyCuptTCsIFopq3IBlzyURHPQJ/2u9RW092vK8GkKh1w37QiX8XK5neyqy8ccOV5XnMifSzyX\nL5bNutRnRqr0u95wmNEEAAAAAACAXKngGU3lWdFSMCx9TvZCMX+XX2Z/IovG44/gmV8N2/+f6wVe\npMYrdVZLDSv5JcTz9NiA411P/nnSniFJCh6Uyotj5HeRgTbczMymhDbBpx7a29hUj00raJaemnxC\ndK/Qak1Krc/ZaLQTLV2ppdVmWAVwemi9NJntO811V9m19OxbFRKTTUl+mi0kG8rXQ7uph9JUYmbZ\nGTvXBRxX9vMTf7c/efiUUFPwjvke04vOGKlYt1f4K3lTRuBZWtz1oKTZ7kwP6Sy81teNSSqyMw+t\nS370QlB7mPwjGaQnZKYjtVKxFy/dO1wv35ZZuKX6a/Zm4u4JM5odDVZFslqZTN/bfamKyZJula2a\n82eQ6GcMEtQL9PFK7RroE9JUHeShcyWf4zbbuH4uFPbWy7jOUb4melLJN6uWuWgdT5IxYrz0yETZ\nqimcYA20Lp1IsvUFrpeFW2dNR6rpsFKkzuYOcjnY5BbRoZVb+8z31lAwowkAAAAAAAC5woMmAAAA\nAAAA5EoTsc4qC0vE1CDlXsArZfnsk8FucazsOUlW1C5c4VPYL86OL6sGigLbI6QGoJ3pcuuhrrsF\nj8+lYipZZtf4duuf6k9tclBacc3n9RdYKZqXjKqhNhpxK854+HjpsP6KM7n0LjkiFgwtlCdO81ND\nHqy3T/ZYu4ddj5bD3s/zozVlgp9t99s8NEGKB0YH1lQ5RI1dL2derIi/64Xz7k6PbC6+69eDZVZt\n2Q/Jv7aU6oFvRnG673uMWJbfDZk4tLrxi6K1Zul4K0XpMaYY/ED0RaGtkVgX0V55bdz2iUeru2QF\nayHdNEmsy51Cp+r4q1feHdb5sxYdNcf6uBzvTDrJVq1DCCXQOtnhFqOtu+3twBrXHb18qXUNyVDO\nkHubV8X5+YwMxfEuURMPajI8KfsraQwLfD+4WrRqbpJ2c5FEvhCt5ziUINZev9RDu8s6sQMkyV28\nLurDmqYQjEVLe//WQzvJasMt5U/o9tBqMqHsGNQwia2KeOcDAAAAAAAAjQgPmgAAAAAAAJArTdA6\nW4r/Eu25B/8hJs2xwdB2guw5VsqFzRS/27JQKOyV7lI0Ur1xlWfe3DAk06ymtuolltB3Q7u7eZ+M\nlwy/H1k3eZG4z4ZneVSDSqyR2m+DXzVnppcOq1ltXuldnE7ib4j/aSkM2ElSPfYQH2JtsLwd2MFj\ne/+r675SWPaUa5NW7bL6Db1e8oO1Wd0nLi695LsI6dkWiy/zain5Gq1Df5HD9buWEcQK2Zcx3a5k\nmp3ljno7KrSXySE95Q9GL0AHhE48W3xrSyT73sWvJO0Hcoxm7dRFFW7jKthYvVo062mwzmrGXslC\neNwvXZ96UtIulqS0KyRD8O0nutYVFpGrRE8rsR2caFHTa1o1GjDXSkcZf690eWko/nfihR5bfLvr\nOU+6fiMcd+kVHqsRva+8XbR56hg+ws5IdbOMuTBmES7gWL5WZGAINlqtn65LFzSDb3UjWbv7iuE1\nnJ/3yVh812au9XEj3pLrMpFMVup7kmaxXIPbyZITvTIcGto7Mp+x4a+RzGgCAAAAAABArvCgCQAA\nAAAAALlSEOusGtTOFj0jVUeFfEvH2XfS2ADxYKmltmVoN5P56yelIvuKeWLtaMrWrB7h/3GAh7YU\nq4qaQ2KBV5mRty8zOcfURBHJN8tjLCM+dY17VQ6SBC9NBDtECkU3u9XPo7byHfTqnLRdpVJ0J0kT\ntuAm1xuHJGHvveuxH85xPVWyj1nQWqxXnIkmiSeFKvktqk3pwslnhj/vmWKXVYtm7OruEhMHlxXe\nYjUvjJbX1KahPrI52nb+IbFasb61FLvQ9GDTPE88WPo3FEfuvhL7nWh9j4bKnldZSE7uLmIiPidp\nTv2ahy6QrMCvtHI9929JO14ycvaQd9hFdMw5qeOvWhDvFz3T5E0gQ91qNAR6uWwvyZSnhnP4t5LU\ndy85bFNJexzvTA6V7W1X83ZxjNJrYXO7N9WPZPaOi00wPa9MrehYg0B7qSrPdbHLtvVTyu4INvD7\n5F5tx7Gu3bjtK6hksx17iusW4fGnudyca+GBx0Sr/bacVMldJAAAAAAAAJSLgsxoKh+JPkR08tv3\nI/K7wAtS2/FLm5LqI0KrC5q3lgwft2tRuNUkgmkSHBjaiz0kE2u2s+hY7U5rJeUzWxNmkgZJ6HmX\nOjsyKbSlk9Y0Jn8VvX+qRslsd+zLWdJliyRRz0xJxmG7J821MkUzVX7Omt9jH9k5/ox1kcQkc5BJ\nNg/7sZllZzTbi87+4tvSqgr5hXBXmXX+MJyAmoDqGNGxtqtMGFk2xVLRf8uLKWK8wtwfxffQy0aY\nWSanh11yi+tREh+Xzo959VGd0YxJO9RHka1TqhR1xkHcOzvKHIGMH4+Fk3G333hsgMwSfzr0JHm9\nmKZpjzQiZUxNR5qYW+gciemwrXUIZ3YISTA0f0gRbzfqQbOVWrOsF6t3GT9LRTPWvTULNtJxJZlL\nH2DfS2MvyDn1uXmBzRVhRl37epGkrtIZn5gY5TSJ6Xx8NqFbkWvx1oekB7+w99KIzlh2LhGrKnon\n97XNxXLzvZ6u73ohacV0kpk5f070taG9VLfLLedlw5JWjACZOptaE3ZIaJ/X03i5NThFvwsCAAAA\nAACAMsODJgAAAAAAAORKwb0sWm0mGlPcIDjbrkv1D8WQeaolq83/U44Wt5cNE7vsovbB+rmgCSYF\nunvVkCY1kbX19ttc31gSrwTvwN4yv/+2eIimSBGgmI9Fk7FUBm+LHpeqEyU9SeuQymTp+1Kt6xz9\n81PjVDQMqklwJ9G67P7q0PqS79axsJKZLe3zY991QtK8I0er0Vwd4VVhne20ar1MM7N35MSPJSHV\nwi15gezNki9cTfaqaCzrJLGhqVoUzLFt7d9k+xTRat6Oo4xb4I6VrdF1rhbNWZnP0gTH4HUmJP45\nxM1o20qilF+IN3lYOCmv9pB9aufLv3Q0f8DMzAZIRBPj6S/RMcnVMIltJ/rrovcOnsXnDvc6clM/\nlJpypXLHLSny9+eonVD/x/1FR5vyrJYyRtVWR/9kr3t6jUx82K/Kkqds9UtdMlJqWc9tqXpPFjs8\nH5ZTzZA99R0mrPnDVilxyYRbZ/W6GG2cutRq5kZyLq8o+LkcBt+D5TT8m1wil4YR93Jdt5ZJbeVJ\nTc+zGjMzm2q/T2Ot5cL3rdCKM9dGi35UdLoqrQx2WYUZTQAAAAAAAMgVHjQBAAAAAAAgVwpunRU6\nhrypW4oF8d3Su0Y73BsSU2uRGjtswQZ/sopCa6jpyeHZHTWjZj1Qm6IkAz4yuAH6Sb3HlmKXvUk8\njb3tYTNb+WurhDqFPxL9HdGek3Fpao09eDWvofk1Y84xN2y3sMdTvSxTMTB+H56/bKlszbxs8ABJ\n6cKMubzq+LnLbR50rRbA2H16zkmyYBuZqiqwGpckphU9QmJvperT9LzVhQj/kqqeYuP8IFpmZeAZ\nLenz4itoguZiIxb5wUmdzH53eei73o12h3jgoyt1YcbY6v2sBsMffJK0r8oQ9oTUkR0mhs6FwW54\nq+2axu6VL0uzlL8RsrR/U4b97X/luvn3XZ8/LQgpC1ocfFweGcb1zWWrWr918cRhoR2uqySqBh1t\nddFCtH93kZjm13xJdKms0+NL6mgCrdoMqevFq6tE9FSNFRu04rytsOohrC945i9ayFyXlyRzfDvJ\nMpKJJmsh7CupWhDGbr3z/qroOLL/l2R7nyULTPazy0t8wPLWOWZGEwAAAAAAAHKFB00AAAAAAADI\nleJZZ9vJZL3X/bXeoQr1hT/x2F5Sy36gWOeidXamvKxOgB8l+uP1+5QVyyTRl4mOdsvZ9cmoubd/\nF22+6+Hf/Nr1guCsu1GqhM8UC0D7zG8hMRvl/0qs0n4ruX018X+sJl6KbVeJLMv8qx725eddRrPE\n3rL54XV/peLxLZdtbnStZ3jUfysRy1Kt1tmInt/aQ8es8agPMmWow77LhqeRzrI1muQl6Xex6e9m\ntG73hJBkax0ldtkX5bB41NFyddpSth+xr+vh4Ro5aYrHnrQfyt5qQYxZY3+WRs6Qsfgr8m2dZd2S\no1/wo5eLLmXmKia+GGVSMM12FsOsLl3oKzpe6YZb0Ql+6Rr3VTaf4luXd5I7ki+itbAeY+2ucj+o\naZhPdbm4Ku3JG8ppoR2VRsaI5Tnm3r9QjpCVKsXnL2OC0Dsst9HvYleYmdm4kPF4FWSMji7vTyWF\n71DZ/Cc7Lyi9F/ZszJvZmRK/J7TlffSrtLt0AAAAAAAAaOI0q6trmCXQzZq1bfi11S3Cr1Veds3a\nf9v1db9x3TusjR0r+REW3CNaXjbWknxLFu/WSkWg7K+M+dUDqqtbVGoF+1pZ/75evEpE6wJqXcUz\nQztGsiLN1Vo8nh/C+gd96k0e2+dA1w/Im9w1OX4S/83jQFk1/kLm0zXlvm4s/DuOM5r6A+4Zou/J\nHFfgvo4ZSjy/kv1CNrcTHWeCXpFY9pfZOM/W+L/ZrW8/m1XKeb3qeCT5Y9LKdzpWZx0l5avNVpZz\nerT3x4AwsbO5XN/0AwwUvU9ou73vsXclB9m9U1z/IbR1acVYs26SKGKZuf1keppm7FB5N6nTa2IL\nSu0Cj0nM5zEPt11S/VRaqbD091dx40e9uFP0CDMzOzntdU2LlU04uGNodZZzqhoFljfMuV6Wvt5K\n/s7DqXS45EE5Sf7oz5da3/NStZr/ezN53ZDzsfP9HvqOFCpVB8BBodUkY+rD+FPmTQp8XawXcXZu\n/zRygF2S6jhSaB3N90yp/LHabEP6Ot5ZaXVLvcuIPSMzi4Pl/D3Z5a5nJe2/y9Ga9srPT03IN1u0\n9nx8j4a5X1ldXzf+3REAAAAAAAAUCh40AQAAAAAAIFeaRjKgFjKlrFkNwhrYi8R28fWjXb8w2PXr\nwQHkhp2sNfRBqUxzW7pq3CtefcNuzuxdVLQ60h9Eh9l7q5HsHF2l3trB57neMqwEv0ccVifc67qV\n1L6sDYvK+9sdaeyFzCcqn8Wi6ERPQweJVU0KG6nj2va+pF0khf/mSoaZ7i7T2moPrPaF+a0uN84J\n7X97qGoS/5RCEnm9FZytmjpJcqrZn0U/uXHS3tDLY6/J9i8yNU2TTBOb2m1p5KNMwTs1KsfbhSf1\n3UQfKTq++XNppF9q6jUbnbF2FXkUUpNmkiBJDeLqy9M6pPHO458SM12q0pS51uXl1ydt36c81kqy\nMGpavJrgDGy+/aoWezOzZXu5PuiUpO0qp/Jo2fcw0dEofovEpMwrlCT+Xfsg9bpsjdfLeySWtc4W\nnXvXsj2MpW3kXL7Y5VellOz2odXzU0514fmS0SyNc7/CXRIAAAAAAADkCg+aAAAAAAAAkCuVl3U2\nZg473kN9JFvY0Re5HhwS202W7LIjalyrFaVLaB+wPdLYmxkLUR/Z9+9mZrap2GUnZ0ylTTjjW4Y4\nAe8F2Y62Kam+TfaMRijNdvWyWLPekQyHsXdk9t+ezuTqdP9z85D6s2MsFmRmszNH1qNmZD1o2hnf\n6oNbM6JBTcrvZYxvQzLHFSy73kCxqATP/P7ih5WSmjZfdHRxqU0xS+VYu5t+1tloGPQz9ALZOjK0\n+v0UOuusfSo6SUPYLWQuNTP7hmydK3pYmt31fIn60pCN5Fo2wJI0ti9nKkVXzjltViHjRy5cGVo3\nwV1jn6dabaIxA2pbib2dea0mfA/ymI/FO4ebjJ287J+9Jbs+JHpySPe/XNc/Ce2kA6eF8o5DxSO7\nv+yr2VDvDF7envZJGpsr2YLnZOZj2lheFOO8Lm1jjmdnP4m9tLH848vKH6vNytDXN3j/7STh/a50\nvTC0Wks9W/883iPXo8Z9A0HWWQAAAAAAACgLPGgCAAAAAABArjRe1tkuMuV+oMs9QuXpS8XT10d2\nnTjS9Q+fSNreT3jsOHkL2dVuSm2wXtS0s5g0Otrpqf7AxpiZ2cyMTaKy7ET5EL0mnprtz/ZhqsdL\nSfCbQqu2yxZil9VJ+5vTrL07StRT5u0v+clGBxPA7Ez/NoxdttqJ32YXiVVNT78iujZp1IGlv7iJ\nI1xyZiobl4zChhJHb8+RulhsntEeqonHs9bZojFW9H+YmdlH5p6qWzO5ZkeJjosevptGOqcFxM3m\n2AGpfjm153JOl4/DUzVH8nJuJntUkDcyf47yrP3jw3nXxo5KY3rfNkd0m2OSVnMia77ix0XHXMk7\nSOznmtbTeotO7gObyfIdfd887bKFQ6oQ7CKdtk1oJ+u+X5bh8zQ1fCWfTbzc9VayS6wSsPprXeNb\nZtcGM5oAAAAAAACQK42XDOh8n6Y8/FEPXxBy8tzqE2vW4wPXOgMzNbTNpfLdg3aJ7OFzN83DUvot\n5Zff6ZlfjJX1Xju8QZR/cXj83WSExHzOsoNtl+rllhTNXGT/U3Jf/zbM4rLlg+xnaWRkpvZo488O\nF2Mh/rqg83ZvmJnZMxJRS8NBmeMKlgwoM0+Z/PR6mE1LIyfJ1gtFe5KVxj9n10ZFJz1YJ+Isw/Vp\npFNawddsrn1mZmbXyRG/Ev2FTm/UNuz3VZ5zWlPBhOkck6LRMp3QVdJVTbffB6VTCJpKonGub+tL\nZYwfeRALS+vs8TWiT0zVN0Od0cdXO4/RhJMBZer9LQrtaWmkRsblb8qesQqrGKlslMwDj7S7ZEtM\n76Zpsiakan/zDJKj01ll/V4a/m+kEOd1f0k2KLfTT4dWz241FTWFxG1m5ejrSaJ9rr6TpAZrH85F\nvduu1PsRkgEBAAAAAABAWeBBEwAAAAAAAHKl8ayzHY91PVdtldGuKd7ZzGLs2aJj5RldOntfqvYV\nY9XLdndamofqAAADOElEQVRQ6q+qLAtR41kpND2MFCqVpBxm54bWrSjtJZFBTxuY6nH2CzMzWyY1\nwrL93vgUwrayTvxJdGI831OqE863ialWS1LxrLNaRa1VaN0+tbH9S6oH2fRUP5WqyrSqKJVtEaoP\naq1bkqoZYYz5u2y9V/QfNSfC8mgKb2UNQWWc09VBcfo6pkh5WmJ6je2UqrNtkJmZjbAt0tg0+6fs\n2zB5HMvT11rh/Jeh1X7wxIwd0/s2s7lp3fPPZN/NRXcUnVzreogN/1Mx3X+ZMSI2zBixNopxXs8T\n7ffsx4ZlU49KPdIsWGcTzhXt52QvuS/bNCyheCvzN1KZ9yNYZwEAAAAAAKAs8KAJAAAAAAAAudJ4\n1tlMdthPRMfsYzolr6a+20Q/a2ZmvWyo7HmDbNePUPnP1MWwUjQNqqevtbJszAy8Zxo5XjJbPpz5\n2ymadVbt4Q+EVk2Yns/6Esm2/KuQKXJ5hVpVlMq2CNWHE0QvSlXvYPn+kdSBVDPh2aKXpqopZ+cE\nM/q6nJS/r6NtVfN+vyFa85Y+G464LI10kmvWZ5llIrGSZuXWGCzGeX2zaK/ocLBdYWZmY21AGpsr\no3JDWb9LUZzrYuWDdRYAAAAAAADKAg+aAAAAAAAAkCuNaJ2FlSmGlaJpQF+XD/q6PBTHItRO9FDR\nSQbab9mMNNLcLk/1g5ni7BGss00d+rp80Nflg74uD8W5LlY+WGcBAAAAAACgLJRvRS4AAMBaWSj6\nNNE/MjOzh2xcGjlc6mx2t86pnpomAwEAAIDGghlNAAAAAAAAyBUeNAEAAAAAACBXGiwZEAAAAAAA\nAFQnzGgCAAAAAABArvCgCQAAAAAAALnCgyYAAAAAAADkCg+aAAAAAAAAkCs8aAIAAAAAAECu8KAJ\nAAAAAAAAucKDJgAAAAAAAOQKD5oAAAAAAACQKzxoAgAAAAAAQK7woAkAAAAAAAC5woMmAAAAAAAA\n5AoPmgAAAAAAAJArPGgCAAAAAABArvCgCQAAAAAAALnCgyYAAAAAAADkCg+aAAAAAAAAkCs8aAIA\nAAAAAECu8KAJAAAAAAAAucKDJgAAAAAAAOQKD5oAAAAAAACQK/8POZrOtmvpc+oAAAAASUVORK5C\nYII=\n", 251 | "text/plain": [ 252 | "" 253 | ] 254 | }, 255 | "metadata": {}, 256 | "output_type": "display_data" 257 | } 258 | ], 259 | "source": [ 260 | "# We can see that each filter seems different features in the images\n", 261 | "# ie horizontal / diagonal / vertical segments\n", 262 | "plt.figure(figsize=(16,8))\n", 263 | "for i in range(10):\n", 264 | " plt.subplot(1, 10, i+1)\n", 265 | " plt.imshow(im[i], interpolation='nearest')\n", 266 | " plt.axis('off')" 267 | ] 268 | }, 269 | { 270 | "cell_type": "code", 271 | "execution_count": null, 272 | "metadata": { 273 | "collapsed": true 274 | }, 275 | "outputs": [], 276 | "source": [] 277 | } 278 | ], 279 | "metadata": { 280 | "kernelspec": { 281 | "display_name": "Python 2", 282 | "language": "python", 283 | "name": "python2" 284 | }, 285 | "language_info": { 286 | "codemirror_mode": { 287 | "name": "ipython", 288 | "version": 2 289 | }, 290 | "file_extension": ".py", 291 | "mimetype": "text/x-python", 292 | "name": "python", 293 | "nbconvert_exporter": "python", 294 | "pygments_lexer": "ipython2", 295 | "version": "2.7.6" 296 | } 297 | }, 298 | "nbformat": 4, 299 | "nbformat_minor": 0 300 | } 301 | -------------------------------------------------------------------------------- /4 - Recurrent Networks/COCO Preprocessing.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Image Captioning with LSTM\n", 8 | "\n", 9 | "This is a partial implementation of \"Show and Tell: A Neural Image Caption Generator\" (http://arxiv.org/abs/1411.4555), borrowing heavily from Andrej Karpathy's NeuralTalk (https://github.com/karpathy/neuraltalk)\n", 10 | "\n", 11 | "This example consists of three parts:\n", 12 | "1. COCO Preprocessing - prepare the dataset by precomputing image representations using GoogLeNet\n", 13 | "2. COCO RNN Training - train a network to predict image captions\n", 14 | "3. COCO Caption Generation - use the trained network to caption new images" 15 | ] 16 | }, 17 | { 18 | "cell_type": "markdown", 19 | "metadata": {}, 20 | "source": [ 21 | "### Output\n", 22 | "This notebook prepares the dataset by extracting a vector representation of each image using the GoogLeNet CNN pretrained on ImageNet. A link to download the final result is given in the next notebook.\n", 23 | "\n", 24 | "\n", 25 | "### Prerequisites\n", 26 | "\n", 27 | "To run this notebook, you'll need to download the MSCOCO [training](http://msvocds.blob.core.windows.net/coco2014/train2014.zip) and [validation](http://msvocds.blob.core.windows.net/coco2014/val2014.zip) datasets, and unzip them into './coco/'.\n", 28 | "\n", 29 | "The [captions](http://cs.stanford.edu/people/karpathy/deepimagesent/caption_datasets.zip) should be downloaded as well and unzipped into './captions/'" 30 | ] 31 | }, 32 | { 33 | "cell_type": "code", 34 | "execution_count": null, 35 | "metadata": { 36 | "collapsed": false 37 | }, 38 | "outputs": [], 39 | "source": [ 40 | "import sklearn\n", 41 | "import numpy as np\n", 42 | "import lasagne\n", 43 | "import skimage.transform\n", 44 | "\n", 45 | "from lasagne.utils import floatX\n", 46 | "\n", 47 | "import theano\n", 48 | "import theano.tensor as T\n", 49 | "\n", 50 | "import matplotlib.pyplot as plt\n", 51 | "%matplotlib inline\n", 52 | "\n", 53 | "import json\n", 54 | "import pickle" 55 | ] 56 | }, 57 | { 58 | "cell_type": "markdown", 59 | "metadata": {}, 60 | "source": [ 61 | "Functions for building the GoogLeNet model with Lasagne are defined in googlenet.py:" 62 | ] 63 | }, 64 | { 65 | "cell_type": "code", 66 | "execution_count": null, 67 | "metadata": { 68 | "collapsed": false 69 | }, 70 | "outputs": [], 71 | "source": [ 72 | "import googlenet" 73 | ] 74 | }, 75 | { 76 | "cell_type": "markdown", 77 | "metadata": {}, 78 | "source": [ 79 | "We need to download parameter values for the pretrained network" 80 | ] 81 | }, 82 | { 83 | "cell_type": "code", 84 | "execution_count": null, 85 | "metadata": { 86 | "collapsed": false 87 | }, 88 | "outputs": [], 89 | "source": [ 90 | "!wget -N https://s3.amazonaws.com/lasagne/recipes/pretrained/imagenet/blvc_googlenet.pkl" 91 | ] 92 | }, 93 | { 94 | "cell_type": "markdown", 95 | "metadata": { 96 | "collapsed": true 97 | }, 98 | "source": [ 99 | "Build the model and select layers we need - the features are taken from the final network layer, before the softmax nonlinearity." 100 | ] 101 | }, 102 | { 103 | "cell_type": "code", 104 | "execution_count": null, 105 | "metadata": { 106 | "collapsed": false 107 | }, 108 | "outputs": [], 109 | "source": [ 110 | "cnn_layers = googlenet.build_model()\n", 111 | "cnn_input_var = cnn_layers['input'].input_var\n", 112 | "cnn_feature_layer = cnn_layers['loss3/classifier']\n", 113 | "cnn_output_layer = cnn_layers['prob']\n", 114 | "\n", 115 | "get_cnn_features = theano.function([cnn_input_var], lasagne.layers.get_output(cnn_feature_layer))" 116 | ] 117 | }, 118 | { 119 | "cell_type": "markdown", 120 | "metadata": {}, 121 | "source": [ 122 | "Load the pretrained weights into the network" 123 | ] 124 | }, 125 | { 126 | "cell_type": "code", 127 | "execution_count": null, 128 | "metadata": { 129 | "collapsed": false 130 | }, 131 | "outputs": [], 132 | "source": [ 133 | "model_param_values = pickle.load(open('blvc_googlenet.pkl'))['param values']\n", 134 | "lasagne.layers.set_all_param_values(cnn_output_layer, model_param_values)" 135 | ] 136 | }, 137 | { 138 | "cell_type": "markdown", 139 | "metadata": {}, 140 | "source": [ 141 | "The images need some preprocessing before they can be fed to the CNN" 142 | ] 143 | }, 144 | { 145 | "cell_type": "code", 146 | "execution_count": null, 147 | "metadata": { 148 | "collapsed": true 149 | }, 150 | "outputs": [], 151 | "source": [ 152 | "MEAN_VALUES = np.array([104, 117, 123]).reshape((3,1,1))\n", 153 | "\n", 154 | "def prep_image(im):\n", 155 | " if len(im.shape) == 2:\n", 156 | " im = im[:, :, np.newaxis]\n", 157 | " im = np.repeat(im, 3, axis=2)\n", 158 | " # Resize so smallest dim = 224, preserving aspect ratio\n", 159 | " h, w, _ = im.shape\n", 160 | " if h < w:\n", 161 | " im = skimage.transform.resize(im, (224, w*224/h), preserve_range=True)\n", 162 | " else:\n", 163 | " im = skimage.transform.resize(im, (h*224/w, 224), preserve_range=True)\n", 164 | "\n", 165 | " # Central crop to 224x224\n", 166 | " h, w, _ = im.shape\n", 167 | " im = im[h//2-112:h//2+112, w//2-112:w//2+112]\n", 168 | " \n", 169 | " rawim = np.copy(im).astype('uint8')\n", 170 | " \n", 171 | " # Shuffle axes to c01\n", 172 | " im = np.swapaxes(np.swapaxes(im, 1, 2), 0, 1)\n", 173 | " \n", 174 | " # Convert to BGR\n", 175 | " im = im[::-1, :, :]\n", 176 | "\n", 177 | " im = im - MEAN_VALUES\n", 178 | " return rawim, floatX(im[np.newaxis])" 179 | ] 180 | }, 181 | { 182 | "cell_type": "markdown", 183 | "metadata": {}, 184 | "source": [ 185 | "Let's verify that GoogLeNet and our preprocessing are functioning properly" 186 | ] 187 | }, 188 | { 189 | "cell_type": "code", 190 | "execution_count": null, 191 | "metadata": { 192 | "collapsed": false 193 | }, 194 | "outputs": [], 195 | "source": [ 196 | "im = plt.imread('./coco/val2014/COCO_val2014_000000391895.jpg')\n", 197 | "plt.imshow(im)" 198 | ] 199 | }, 200 | { 201 | "cell_type": "code", 202 | "execution_count": null, 203 | "metadata": { 204 | "collapsed": false 205 | }, 206 | "outputs": [], 207 | "source": [ 208 | "rawim, cnn_im = prep_image(im)" 209 | ] 210 | }, 211 | { 212 | "cell_type": "code", 213 | "execution_count": null, 214 | "metadata": { 215 | "collapsed": false 216 | }, 217 | "outputs": [], 218 | "source": [ 219 | "plt.imshow(rawim)" 220 | ] 221 | }, 222 | { 223 | "cell_type": "code", 224 | "execution_count": null, 225 | "metadata": { 226 | "collapsed": false 227 | }, 228 | "outputs": [], 229 | "source": [ 230 | "p = get_cnn_features(cnn_im)\n", 231 | "CLASSES = pickle.load(open('blvc_googlenet.pkl'))['synset words']\n", 232 | "print(CLASSES[p.argmax()])" 233 | ] 234 | }, 235 | { 236 | "cell_type": "markdown", 237 | "metadata": {}, 238 | "source": [ 239 | "Load the caption data" 240 | ] 241 | }, 242 | { 243 | "cell_type": "code", 244 | "execution_count": null, 245 | "metadata": { 246 | "collapsed": true 247 | }, 248 | "outputs": [], 249 | "source": [ 250 | "dataset = json.load(open('./captions/dataset_coco.json'))['images']" 251 | ] 252 | }, 253 | { 254 | "cell_type": "markdown", 255 | "metadata": {}, 256 | "source": [ 257 | "Iterate over the dataset and add a field 'cnn features' to each item. This will take quite a while." 258 | ] 259 | }, 260 | { 261 | "cell_type": "code", 262 | "execution_count": null, 263 | "metadata": { 264 | "collapsed": false 265 | }, 266 | "outputs": [], 267 | "source": [ 268 | "def chunks(l, n):\n", 269 | " for i in xrange(0, len(l), n):\n", 270 | " yield l[i:i + n]\n", 271 | "\n", 272 | "for chunk in chunks(dataset, 256):\n", 273 | " cnn_input = floatX(np.zeros((len(chunk), 3, 224, 224)))\n", 274 | " for i, image in enumerate(chunk):\n", 275 | " fn = './coco/{}/{}'.format(image['filepath'], image['filename'])\n", 276 | " try:\n", 277 | " im = plt.imread(fn)\n", 278 | " _, cnn_input[i] = prep_image(im)\n", 279 | " except IOError:\n", 280 | " continue\n", 281 | " features = get_cnn_features(cnn_input)\n", 282 | " for i, image in enumerate(chunk):\n", 283 | " image['cnn features'] = features[i]" 284 | ] 285 | }, 286 | { 287 | "cell_type": "markdown", 288 | "metadata": {}, 289 | "source": [ 290 | "Save the final product" 291 | ] 292 | }, 293 | { 294 | "cell_type": "code", 295 | "execution_count": null, 296 | "metadata": { 297 | "collapsed": true 298 | }, 299 | "outputs": [], 300 | "source": [ 301 | "pickle.dump(dataset, open('coco_with_cnn_features.pkl','w'), protocol=pickle.HIGHEST_PROTOCOL)" 302 | ] 303 | } 304 | ], 305 | "metadata": { 306 | "kernelspec": { 307 | "display_name": "Python 2", 308 | "language": "python", 309 | "name": "python2" 310 | }, 311 | "language_info": { 312 | "codemirror_mode": { 313 | "name": "ipython", 314 | "version": 2 315 | }, 316 | "file_extension": ".py", 317 | "mimetype": "text/x-python", 318 | "name": "python", 319 | "nbconvert_exporter": "python", 320 | "pygments_lexer": "ipython2", 321 | "version": "2.7.6" 322 | } 323 | }, 324 | "nbformat": 4, 325 | "nbformat_minor": 0 326 | } 327 | -------------------------------------------------------------------------------- /4 - Recurrent Networks/COCO RNN Training.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Image Captioning with LSTM\n", 8 | "\n", 9 | "This is a partial implementation of \"Show and Tell: A Neural Image Caption Generator\" (http://arxiv.org/abs/1411.4555), borrowing heavily from Andrej Karpathy's NeuralTalk (https://github.com/karpathy/neuraltalk)\n", 10 | "\n", 11 | "This example consists of three parts:\n", 12 | "1. COCO Preprocessing - prepare the dataset by precomputing image representations using GoogLeNet\n", 13 | "2. COCO RNN Training - train a network to predict image captions\n", 14 | "3. COCO Caption Generation - use the trained network to caption new images" 15 | ] 16 | }, 17 | { 18 | "cell_type": "markdown", 19 | "metadata": {}, 20 | "source": [ 21 | "### Output\n", 22 | "This notebook defines and trains an RNN to predict captions starting from a vector image representation. A link to download the final result is given in the next notebook.\n", 23 | "\n", 24 | "\n", 25 | "### Prerequisites\n", 26 | "\n", 27 | "To run this notebook, you'll need the output from the previous notebook, 'coco_with_cnn_features.pkl'. It can also be downloaded from https://s3.amazonaws.com/lasagne/recipes/datasets/coco_with_cnn_features.pkl" 28 | ] 29 | }, 30 | { 31 | "cell_type": "code", 32 | "execution_count": null, 33 | "metadata": { 34 | "collapsed": true 35 | }, 36 | "outputs": [], 37 | "source": [ 38 | "!wget -N https://s3.amazonaws.com/lasagne/recipes/datasets/coco_with_cnn_features.pkl" 39 | ] 40 | }, 41 | { 42 | "cell_type": "code", 43 | "execution_count": null, 44 | "metadata": { 45 | "collapsed": false 46 | }, 47 | "outputs": [], 48 | "source": [ 49 | "import pickle\n", 50 | "import random\n", 51 | "import numpy as np\n", 52 | "\n", 53 | "import theano\n", 54 | "import theano.tensor as T\n", 55 | "import lasagne\n", 56 | "\n", 57 | "from collections import Counter\n", 58 | "from lasagne.utils import floatX" 59 | ] 60 | }, 61 | { 62 | "cell_type": "markdown", 63 | "metadata": {}, 64 | "source": [ 65 | "Load the preprocessed dataset containing features extracted by GoogLeNet" 66 | ] 67 | }, 68 | { 69 | "cell_type": "code", 70 | "execution_count": null, 71 | "metadata": { 72 | "collapsed": false 73 | }, 74 | "outputs": [], 75 | "source": [ 76 | "dataset = pickle.load(open('coco_with_cnn_features.pkl'))" 77 | ] 78 | }, 79 | { 80 | "cell_type": "markdown", 81 | "metadata": {}, 82 | "source": [ 83 | "Count words occuring at least 5 times and construct mapping int <-> word" 84 | ] 85 | }, 86 | { 87 | "cell_type": "code", 88 | "execution_count": null, 89 | "metadata": { 90 | "collapsed": false 91 | }, 92 | "outputs": [], 93 | "source": [ 94 | "allwords = Counter()\n", 95 | "for item in dataset:\n", 96 | " for sentence in item['sentences']:\n", 97 | " allwords.update(sentence['tokens'])\n", 98 | " \n", 99 | "vocab = [k for k, v in allwords.items() if v >= 5]\n", 100 | "vocab.insert(0, '#START#')\n", 101 | "vocab.append('#END#')\n", 102 | "\n", 103 | "word_to_index = {w: i for i, w in enumerate(vocab)}\n", 104 | "index_to_word = {i: w for i, w in enumerate(vocab)}" 105 | ] 106 | }, 107 | { 108 | "cell_type": "code", 109 | "execution_count": null, 110 | "metadata": { 111 | "collapsed": false 112 | }, 113 | "outputs": [], 114 | "source": [ 115 | "len(vocab)" 116 | ] 117 | }, 118 | { 119 | "cell_type": "code", 120 | "execution_count": null, 121 | "metadata": { 122 | "collapsed": false 123 | }, 124 | "outputs": [], 125 | "source": [ 126 | "SEQUENCE_LENGTH = 32\n", 127 | "MAX_SENTENCE_LENGTH = SEQUENCE_LENGTH - 3 # 1 for image, 1 for start token, 1 for end token\n", 128 | "BATCH_SIZE = 100\n", 129 | "CNN_FEATURE_SIZE = 1000\n", 130 | "EMBEDDING_SIZE = 256" 131 | ] 132 | }, 133 | { 134 | "cell_type": "code", 135 | "execution_count": null, 136 | "metadata": { 137 | "collapsed": true 138 | }, 139 | "outputs": [], 140 | "source": [ 141 | "# Returns a list of tuples (cnn features, list of words, image ID)\n", 142 | "def get_data_batch(dataset, size, split='train'):\n", 143 | " items = []\n", 144 | " \n", 145 | " while len(items) < size:\n", 146 | " item = random.choice(dataset)\n", 147 | " if item['split'] != split:\n", 148 | " continue\n", 149 | " sentence = random.choice(item['sentences'])['tokens']\n", 150 | " if len(sentence) > MAX_SENTENCE_LENGTH:\n", 151 | " continue\n", 152 | " items.append((item['cnn features'], sentence, item['cocoid']))\n", 153 | " \n", 154 | " return items" 155 | ] 156 | }, 157 | { 158 | "cell_type": "code", 159 | "execution_count": null, 160 | "metadata": { 161 | "collapsed": false 162 | }, 163 | "outputs": [], 164 | "source": [ 165 | "# Convert a list of tuples into arrays that can be fed into the network\n", 166 | "def prep_batch_for_network(batch):\n", 167 | " x_cnn = floatX(np.zeros((len(batch), 1000)))\n", 168 | " x_sentence = np.zeros((len(batch), SEQUENCE_LENGTH - 1), dtype='int32')\n", 169 | " y_sentence = np.zeros((len(batch), SEQUENCE_LENGTH), dtype='int32')\n", 170 | " mask = np.zeros((len(batch), SEQUENCE_LENGTH), dtype='bool')\n", 171 | "\n", 172 | " for j, (cnn_features, sentence, _) in enumerate(batch):\n", 173 | " x_cnn[j] = cnn_features\n", 174 | " i = 0\n", 175 | " for word in ['#START#'] + sentence + ['#END#']:\n", 176 | " if word in word_to_index:\n", 177 | " mask[j, i] = True\n", 178 | " y_sentence[j, i] = word_to_index[word]\n", 179 | " x_sentence[j, i] = word_to_index[word]\n", 180 | " i += 1\n", 181 | " #mask[j, 0] = False\n", 182 | " \n", 183 | " return x_cnn, x_sentence, y_sentence, mask" 184 | ] 185 | }, 186 | { 187 | "cell_type": "code", 188 | "execution_count": null, 189 | "metadata": { 190 | "collapsed": false 191 | }, 192 | "outputs": [], 193 | "source": [ 194 | "# sentence embedding maps integer sequence with dim (BATCH_SIZE, SEQUENCE_LENGTH - 1) to \n", 195 | "# (BATCH_SIZE, SEQUENCE_LENGTH-1, EMBEDDING_SIZE)\n", 196 | "l_input_sentence = lasagne.layers.InputLayer((BATCH_SIZE, SEQUENCE_LENGTH - 1))\n", 197 | "l_sentence_embedding = lasagne.layers.EmbeddingLayer(l_input_sentence,\n", 198 | " input_size=len(vocab),\n", 199 | " output_size=EMBEDDING_SIZE,\n", 200 | " )\n", 201 | "\n", 202 | "# cnn embedding changes the dimensionality of the representation from 1000 to EMBEDDING_SIZE, \n", 203 | "# and reshapes to add the time dimension - final dim (BATCH_SIZE, 1, EMBEDDING_SIZE)\n", 204 | "l_input_cnn = lasagne.layers.InputLayer((BATCH_SIZE, CNN_FEATURE_SIZE))\n", 205 | "l_cnn_embedding = lasagne.layers.DenseLayer(l_input_cnn, num_units=EMBEDDING_SIZE,\n", 206 | " nonlinearity=lasagne.nonlinearities.identity)\n", 207 | "\n", 208 | "l_cnn_embedding = lasagne.layers.ReshapeLayer(l_cnn_embedding, ([0], 1, [1]))\n", 209 | "\n", 210 | "# the two are concatenated to form the RNN input with dim (BATCH_SIZE, SEQUENCE_LENGTH, EMBEDDING_SIZE)\n", 211 | "l_rnn_input = lasagne.layers.ConcatLayer([l_cnn_embedding, l_sentence_embedding])\n", 212 | "\n", 213 | "l_dropout_input = lasagne.layers.DropoutLayer(l_rnn_input, p=0.5)\n", 214 | "l_lstm = lasagne.layers.LSTMLayer(l_dropout_input,\n", 215 | " num_units=EMBEDDING_SIZE,\n", 216 | " unroll_scan=True,\n", 217 | " grad_clipping=5.)\n", 218 | "l_dropout_output = lasagne.layers.DropoutLayer(l_lstm, p=0.5)\n", 219 | "\n", 220 | "# the RNN output is reshaped to combine the batch and time dimensions\n", 221 | "# dim (BATCH_SIZE * SEQUENCE_LENGTH, EMBEDDING_SIZE)\n", 222 | "l_shp = lasagne.layers.ReshapeLayer(l_dropout_output, (-1, EMBEDDING_SIZE))\n", 223 | "\n", 224 | "# decoder is a fully connected layer with one output unit for each word in the vocabulary\n", 225 | "l_decoder = lasagne.layers.DenseLayer(l_shp, num_units=len(vocab), nonlinearity=lasagne.nonlinearities.softmax)\n", 226 | "\n", 227 | "# finally, the separation between batch and time dimension is restored\n", 228 | "l_out = lasagne.layers.ReshapeLayer(l_decoder, (BATCH_SIZE, SEQUENCE_LENGTH, len(vocab)))" 229 | ] 230 | }, 231 | { 232 | "cell_type": "markdown", 233 | "metadata": {}, 234 | "source": [ 235 | "Define symbolic variables for the various inputs" 236 | ] 237 | }, 238 | { 239 | "cell_type": "code", 240 | "execution_count": null, 241 | "metadata": { 242 | "collapsed": true 243 | }, 244 | "outputs": [], 245 | "source": [ 246 | "# cnn feature vector\n", 247 | "x_cnn_sym = T.matrix()\n", 248 | "\n", 249 | "# sentence encoded as sequence of integer word tokens\n", 250 | "x_sentence_sym = T.imatrix()\n", 251 | "\n", 252 | "# mask defines which elements of the sequence should be predicted\n", 253 | "mask_sym = T.imatrix()\n", 254 | "\n", 255 | "# ground truth for the RNN output\n", 256 | "y_sentence_sym = T.imatrix()" 257 | ] 258 | }, 259 | { 260 | "cell_type": "code", 261 | "execution_count": null, 262 | "metadata": { 263 | "collapsed": false 264 | }, 265 | "outputs": [], 266 | "source": [ 267 | "output = lasagne.layers.get_output(l_out, {\n", 268 | " l_input_sentence: x_sentence_sym,\n", 269 | " l_input_cnn: x_cnn_sym\n", 270 | " })" 271 | ] 272 | }, 273 | { 274 | "cell_type": "code", 275 | "execution_count": null, 276 | "metadata": { 277 | "collapsed": false 278 | }, 279 | "outputs": [], 280 | "source": [ 281 | "def calc_cross_ent(net_output, mask, targets):\n", 282 | " # Helper function to calculate the cross entropy error\n", 283 | " preds = T.reshape(net_output, (-1, len(vocab)))\n", 284 | " targets = T.flatten(targets)\n", 285 | " cost = T.nnet.categorical_crossentropy(preds, targets)[T.flatten(mask).nonzero()]\n", 286 | " return cost\n", 287 | "\n", 288 | "loss = T.mean(calc_cross_ent(output, mask_sym, y_sentence_sym))" 289 | ] 290 | }, 291 | { 292 | "cell_type": "code", 293 | "execution_count": null, 294 | "metadata": { 295 | "collapsed": false 296 | }, 297 | "outputs": [], 298 | "source": [ 299 | "MAX_GRAD_NORM = 15\n", 300 | "\n", 301 | "all_params = lasagne.layers.get_all_params(l_out, trainable=True)\n", 302 | "\n", 303 | "all_grads = T.grad(loss, all_params)\n", 304 | "all_grads = [T.clip(g, -5, 5) for g in all_grads]\n", 305 | "all_grads, norm = lasagne.updates.total_norm_constraint(\n", 306 | " all_grads, MAX_GRAD_NORM, return_norm=True)\n", 307 | "\n", 308 | "updates = lasagne.updates.adam(all_grads, all_params, learning_rate=0.001)\n", 309 | "\n", 310 | "f_train = theano.function([x_cnn_sym, x_sentence_sym, mask_sym, y_sentence_sym],\n", 311 | " [loss, norm],\n", 312 | " updates=updates\n", 313 | " )\n", 314 | "\n", 315 | "f_val = theano.function([x_cnn_sym, x_sentence_sym, mask_sym, y_sentence_sym], loss)" 316 | ] 317 | }, 318 | { 319 | "cell_type": "code", 320 | "execution_count": null, 321 | "metadata": { 322 | "collapsed": false 323 | }, 324 | "outputs": [], 325 | "source": [ 326 | "for iteration in range(20000):\n", 327 | " x_cnn, x_sentence, y_sentence, mask = prep_batch_for_network(get_data_batch(dataset, BATCH_SIZE))\n", 328 | " loss_train, norm = f_train(x_cnn, x_sentence, mask, y_sentence)\n", 329 | " if not iteration % 250:\n", 330 | " print('Iteration {}, loss_train: {}, norm: {}'.format(iteration, loss_train, norm))\n", 331 | " try:\n", 332 | " batch = get_data_batch(dataset, BATCH_SIZE, split='val')\n", 333 | " x_cnn, x_sentence, y_sentence, mask = prep_batch_for_network(batch)\n", 334 | " loss_val = f_val(x_cnn, x_sentence, mask, y_sentence)\n", 335 | " print('Val loss: {}'.format(loss_val))\n", 336 | " except IndexError:\n", 337 | " continue " 338 | ] 339 | }, 340 | { 341 | "cell_type": "code", 342 | "execution_count": null, 343 | "metadata": { 344 | "collapsed": true 345 | }, 346 | "outputs": [], 347 | "source": [ 348 | "param_values = lasagne.layers.get_all_param_values(l_out)\n", 349 | "d = {'param values': param_values,\n", 350 | " 'vocab': vocab,\n", 351 | " 'word_to_index': word_to_index,\n", 352 | " 'index_to_word': index_to_word,\n", 353 | " }\n", 354 | "pickle.dump(d, open('lstm_coco_trained.pkl','w'), protocol=pickle.HIGHEST_PROTOCOL)" 355 | ] 356 | } 357 | ], 358 | "metadata": { 359 | "kernelspec": { 360 | "display_name": "Python 2", 361 | "language": "python", 362 | "name": "python2" 363 | }, 364 | "language_info": { 365 | "codemirror_mode": { 366 | "name": "ipython", 367 | "version": 2 368 | }, 369 | "file_extension": ".py", 370 | "mimetype": "text/x-python", 371 | "name": "python", 372 | "nbconvert_exporter": "python", 373 | "pygments_lexer": "ipython2", 374 | "version": "2.7.6" 375 | } 376 | }, 377 | "nbformat": 4, 378 | "nbformat_minor": 0 379 | } 380 | -------------------------------------------------------------------------------- /4 - Recurrent Networks/RNN Character Model - 2 Layer.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "RNN Character Model\n", 8 | "===========\n", 9 | "\n", 10 | "This example trains a RNN to predict the next character in a sequence. Sampling from the trained model produces somewhat intelligble text, with vocabulary and style resembling the training corpus. For more background and details:\n", 11 | "- http://karpathy.github.io/2015/05/21/rnn-effectiveness/\n", 12 | "- https://github.com/karpathy/char-rnn\n", 13 | "\n", 14 | "The data used for training is a collection of patent claims obtained from\n", 15 | "http://www.cl.uni-heidelberg.de/statnlpgroup/pattr/" 16 | ] 17 | }, 18 | { 19 | "cell_type": "code", 20 | "execution_count": 1, 21 | "metadata": { 22 | "collapsed": false 23 | }, 24 | "outputs": [ 25 | { 26 | "name": "stdout", 27 | "output_type": "stream", 28 | "text": [ 29 | "Couldn't import dot_parser, loading of dot files will not be possible.\n" 30 | ] 31 | }, 32 | { 33 | "name": "stderr", 34 | "output_type": "stream", 35 | "text": [ 36 | "Using gpu device 0: Graphics Device (CNMeM is disabled, CuDNN 4007)\n", 37 | "/home/eben/venv/local/lib/python2.7/site-packages/Theano-0.8.0rc1-py2.7.egg/theano/tensor/signal/downsample.py:6: UserWarning: downsample module has been moved to the theano.tensor.signal.pool module.\n", 38 | " \"downsample module has been moved to the theano.tensor.signal.pool module.\")\n" 39 | ] 40 | } 41 | ], 42 | "source": [ 43 | "import numpy as np\n", 44 | "import theano\n", 45 | "import theano.tensor as T\n", 46 | "import lasagne\n", 47 | "from lasagne.utils import floatX\n", 48 | "\n", 49 | "import pickle\n", 50 | "import gzip\n", 51 | "import random\n", 52 | "from collections import Counter" 53 | ] 54 | }, 55 | { 56 | "cell_type": "code", 57 | "execution_count": 2, 58 | "metadata": { 59 | "collapsed": false 60 | }, 61 | "outputs": [], 62 | "source": [ 63 | "# Load the corpus and look at an example\n", 64 | "corpus = gzip.open('claims.txt.gz').read()" 65 | ] 66 | }, 67 | { 68 | "cell_type": "code", 69 | "execution_count": 3, 70 | "metadata": { 71 | "collapsed": false 72 | }, 73 | "outputs": [ 74 | { 75 | "data": { 76 | "text/plain": [ 77 | "'A fixed spool fishing reel according to claim 2, characterised in that the cog wheel (236) of greater diameter has in the range from ten teeth to sixteen teeth, and the cog wheel (232) of smaller diameter has in the range from five teeth to ten teeth. '" 78 | ] 79 | }, 80 | "execution_count": 3, 81 | "metadata": {}, 82 | "output_type": "execute_result" 83 | } 84 | ], 85 | "source": [ 86 | "corpus.split('\\n')[0]" 87 | ] 88 | }, 89 | { 90 | "cell_type": "code", 91 | "execution_count": 4, 92 | "metadata": { 93 | "collapsed": false 94 | }, 95 | "outputs": [], 96 | "source": [ 97 | "# Find the set of characters used in the corpus and construct mappings between characters,\n", 98 | "# integer indices, and one hot encodings\n", 99 | "VOCABULARY = set(corpus)\n", 100 | "VOCAB_SIZE = len(VOCABULARY)\n", 101 | "\n", 102 | "CHAR_TO_IX = {c: i for i, c in enumerate(VOCABULARY)}\n", 103 | "IX_TO_CHAR = {i: c for i, c in enumerate(VOCABULARY)}\n", 104 | "CHAR_TO_ONEHOT = {c: np.eye(VOCAB_SIZE)[i] for i, c in enumerate(VOCABULARY)}" 105 | ] 106 | }, 107 | { 108 | "cell_type": "code", 109 | "execution_count": 5, 110 | "metadata": { 111 | "collapsed": true 112 | }, 113 | "outputs": [], 114 | "source": [ 115 | "SEQUENCE_LENGTH = 50\n", 116 | "BATCH_SIZE = 50\n", 117 | "RNN_HIDDEN_SIZE = 200" 118 | ] 119 | }, 120 | { 121 | "cell_type": "code", 122 | "execution_count": 6, 123 | "metadata": { 124 | "collapsed": true 125 | }, 126 | "outputs": [], 127 | "source": [ 128 | "# Reserve 10% of the data for validation\n", 129 | "train_corpus = corpus[:(len(corpus) * 9 // 10)]\n", 130 | "val_corpus = corpus[(len(corpus) * 9 // 10):]" 131 | ] 132 | }, 133 | { 134 | "cell_type": "code", 135 | "execution_count": 7, 136 | "metadata": { 137 | "collapsed": false 138 | }, 139 | "outputs": [], 140 | "source": [ 141 | "# Our batch generator will yield sequential portions of the corpus of size SEQUENCE_LENGTH,\n", 142 | "# starting from random locations and wrapping around the end of the data.\n", 143 | "def data_batch_generator(corpus, size=BATCH_SIZE):\n", 144 | " startidx = np.random.randint(0, len(corpus) - SEQUENCE_LENGTH - 1, size=size)\n", 145 | "\n", 146 | " while True:\n", 147 | " items = np.array([corpus[start:start + SEQUENCE_LENGTH + 1] for start in startidx])\n", 148 | " startidx = (startidx + SEQUENCE_LENGTH) % (len(corpus) - SEQUENCE_LENGTH - 1)\n", 149 | " yield items" 150 | ] 151 | }, 152 | { 153 | "cell_type": "code", 154 | "execution_count": 8, 155 | "metadata": { 156 | "collapsed": false 157 | }, 158 | "outputs": [ 159 | { 160 | "name": "stdout", 161 | "output_type": "stream", 162 | "text": [ 163 | "['(27), and (i) calibrating the measurement level obt']\n", 164 | "['tained in step (h) on the basis of the output signa']\n", 165 | "['al level obtained in step (h). \\nA method for manufa']\n" 166 | ] 167 | } 168 | ], 169 | "source": [ 170 | "# Test it out\n", 171 | "gen = data_batch_generator(corpus, size=1)\n", 172 | "print(next(gen))\n", 173 | "print(next(gen))\n", 174 | "print(next(gen))" 175 | ] 176 | }, 177 | { 178 | "cell_type": "code", 179 | "execution_count": 9, 180 | "metadata": { 181 | "collapsed": false 182 | }, 183 | "outputs": [], 184 | "source": [ 185 | "# After sampling a data batch, we transform it into a one hot feature representation\n", 186 | "# and create a target sequence by shifting by one character\n", 187 | "def prep_batch_for_network(batch):\n", 188 | " x_seq = np.zeros((len(batch), SEQUENCE_LENGTH, VOCAB_SIZE), dtype='float32')\n", 189 | " y_seq = np.zeros((len(batch), SEQUENCE_LENGTH), dtype='int32')\n", 190 | "\n", 191 | " for i, item in enumerate(batch):\n", 192 | " for j in range(SEQUENCE_LENGTH):\n", 193 | " x_seq[i, j] = CHAR_TO_ONEHOT[item[j]]\n", 194 | " y_seq[i, j] = CHAR_TO_IX[item[j + 1]]\n", 195 | "\n", 196 | " return x_seq, y_seq" 197 | ] 198 | }, 199 | { 200 | "cell_type": "code", 201 | "execution_count": 10, 202 | "metadata": { 203 | "collapsed": true 204 | }, 205 | "outputs": [], 206 | "source": [ 207 | "# Symbolic variables for input. In addition to the usual features and target,\n", 208 | "# we need initial values for the RNN layer's hidden states\n", 209 | "x_sym = T.tensor3()\n", 210 | "y_sym = T.imatrix()\n", 211 | "hid_init_sym = T.matrix()\n", 212 | "hid2_init_sym = T.matrix()" 213 | ] 214 | }, 215 | { 216 | "cell_type": "code", 217 | "execution_count": 11, 218 | "metadata": { 219 | "collapsed": false 220 | }, 221 | "outputs": [], 222 | "source": [ 223 | "l_input = lasagne.layers.InputLayer((None, SEQUENCE_LENGTH, VOCAB_SIZE))\n", 224 | "l_input_hid = lasagne.layers.InputLayer((None, RNN_HIDDEN_SIZE))\n", 225 | "l_input_hid2 = lasagne.layers.InputLayer((None, RNN_HIDDEN_SIZE))\n", 226 | "\n", 227 | "# Our network has two stacked GRU layers processing the input sequence.\n", 228 | "l_rnn = lasagne.layers.GRULayer(l_input,\n", 229 | " num_units=RNN_HIDDEN_SIZE,\n", 230 | " grad_clipping=5.,\n", 231 | " hid_init=l_input_hid,\n", 232 | " )\n", 233 | "\n", 234 | "l_rnn2 = lasagne.layers.GRULayer(l_rnn,\n", 235 | " num_units=RNN_HIDDEN_SIZE,\n", 236 | " grad_clipping=5.,\n", 237 | " hid_init=l_input_hid2,\n", 238 | " )\n", 239 | "\n", 240 | "\n", 241 | "l_shp = lasagne.layers.ReshapeLayer(l_rnn2, (-1, RNN_HIDDEN_SIZE))\n", 242 | "\n", 243 | "# Before the decoder layer, we need to reshape the sequence into the batch dimension,\n", 244 | "# so that timesteps are decoded independently.\n", 245 | "l_decoder = lasagne.layers.DenseLayer(l_shp,\n", 246 | " num_units=VOCAB_SIZE,\n", 247 | " nonlinearity=lasagne.nonlinearities.softmax)\n", 248 | "\n", 249 | "l_out = lasagne.layers.ReshapeLayer(l_decoder, (-1, SEQUENCE_LENGTH, VOCAB_SIZE))" 250 | ] 251 | }, 252 | { 253 | "cell_type": "code", 254 | "execution_count": 12, 255 | "metadata": { 256 | "collapsed": false 257 | }, 258 | "outputs": [], 259 | "source": [ 260 | "# We extract the hidden state of each GRU layer as well as the output of the decoder.\n", 261 | "# Only the hidden state at the last timestep is needed\n", 262 | "hid_out, hid2_out, prob_out = lasagne.layers.get_output([l_rnn, l_rnn2, l_out],\n", 263 | " {l_input: x_sym,\n", 264 | " l_input_hid: hid_init_sym,\n", 265 | " l_input_hid2: hid2_init_sym,\n", 266 | " })\n", 267 | "\n", 268 | "hid_out = hid_out[:, -1]\n", 269 | "hid2_out = hid2_out[:, -1]" 270 | ] 271 | }, 272 | { 273 | "cell_type": "code", 274 | "execution_count": 13, 275 | "metadata": { 276 | "collapsed": false 277 | }, 278 | "outputs": [], 279 | "source": [ 280 | "# We flatten the sequence into the batch dimension before calculating the loss\n", 281 | "def calc_cross_ent(net_output, targets):\n", 282 | " preds = T.reshape(net_output, (-1, VOCAB_SIZE))\n", 283 | " targets = T.flatten(targets)\n", 284 | " cost = T.nnet.categorical_crossentropy(preds, targets)\n", 285 | " return cost\n", 286 | "\n", 287 | "loss = T.mean(calc_cross_ent(prob_out, y_sym))" 288 | ] 289 | }, 290 | { 291 | "cell_type": "code", 292 | "execution_count": 14, 293 | "metadata": { 294 | "collapsed": false 295 | }, 296 | "outputs": [], 297 | "source": [ 298 | "# For stability during training, gradients are clipped and a total gradient norm constraint is also used\n", 299 | "MAX_GRAD_NORM = 15\n", 300 | "\n", 301 | "all_params = lasagne.layers.get_all_params(l_out, trainable=True)\n", 302 | "\n", 303 | "all_grads = T.grad(loss, all_params)\n", 304 | "all_grads = [T.clip(g, -5, 5) for g in all_grads]\n", 305 | "all_grads, norm = lasagne.updates.total_norm_constraint(\n", 306 | " all_grads, MAX_GRAD_NORM, return_norm=True)\n", 307 | "\n", 308 | "updates = lasagne.updates.adam(all_grads, all_params, learning_rate=0.002)\n", 309 | "\n", 310 | "f_train = theano.function([x_sym, y_sym, hid_init_sym, hid2_init_sym],\n", 311 | " [loss, norm, hid_out, hid2_out],\n", 312 | " updates=updates\n", 313 | " )\n", 314 | "\n", 315 | "f_val = theano.function([x_sym, y_sym, hid_init_sym, hid2_init_sym], [loss, hid_out, hid2_out])" 316 | ] 317 | }, 318 | { 319 | "cell_type": "code", 320 | "execution_count": null, 321 | "metadata": { 322 | "collapsed": false 323 | }, 324 | "outputs": [], 325 | "source": [ 326 | "# Training takes a while - you may want to skip this and the next cell, and load the pretrained weights instead\n", 327 | "hid = np.zeros((BATCH_SIZE, RNN_HIDDEN_SIZE), dtype='float32')\n", 328 | "hid2 = np.zeros((BATCH_SIZE, RNN_HIDDEN_SIZE), dtype='float32')\n", 329 | "\n", 330 | "train_batch_gen = data_batch_generator(train_corpus)\n", 331 | "\n", 332 | "for iteration in range(20000):\n", 333 | " x, y = prep_batch_for_network(next(train_batch_gen))\n", 334 | " loss_train, norm, hid, hid2 = f_train(x, y, hid, hid2)\n", 335 | " \n", 336 | " if iteration % 250 == 0:\n", 337 | " print('Iteration {}, loss_train: {}, norm: {}'.format(iteration, loss_train, norm))" 338 | ] 339 | }, 340 | { 341 | "cell_type": "code", 342 | "execution_count": 15, 343 | "metadata": { 344 | "collapsed": false 345 | }, 346 | "outputs": [], 347 | "source": [ 348 | "param_values = lasagne.layers.get_all_param_values(l_out)\n", 349 | "d = {'param values': param_values,\n", 350 | " 'VOCABULARY': VOCABULARY, \n", 351 | " 'CHAR_TO_IX': CHAR_TO_IX,\n", 352 | " 'IX_TO_CHAR': IX_TO_CHAR,\n", 353 | " }\n", 354 | "#pickle.dump(d, open('gru_2layer_trained.pkl','w'), protocol=pickle.HIGHEST_PROTOCOL)" 355 | ] 356 | }, 357 | { 358 | "cell_type": "code", 359 | "execution_count": 16, 360 | "metadata": { 361 | "collapsed": false 362 | }, 363 | "outputs": [], 364 | "source": [ 365 | "# Load pretrained weights into network\n", 366 | "d = pickle.load(open('gru_2layer_trained.pkl', 'r'))\n", 367 | "lasagne.layers.set_all_param_values(l_out, d['param values'])" 368 | ] 369 | }, 370 | { 371 | "cell_type": "code", 372 | "execution_count": 17, 373 | "metadata": { 374 | "collapsed": false 375 | }, 376 | "outputs": [], 377 | "source": [ 378 | "predict_fn = theano.function([x_sym, hid_init_sym, hid2_init_sym], [prob_out, hid_out, hid2_out])" 379 | ] 380 | }, 381 | { 382 | "cell_type": "code", 383 | "execution_count": 18, 384 | "metadata": { 385 | "collapsed": false 386 | }, 387 | "outputs": [ 388 | { 389 | "name": "stdout", 390 | "output_type": "stream", 391 | "text": [ 392 | "0.896837\n" 393 | ] 394 | } 395 | ], 396 | "source": [ 397 | "# Calculate validation loss\n", 398 | "hid = np.zeros((BATCH_SIZE, RNN_HIDDEN_SIZE), dtype='float32')\n", 399 | "hid2 = np.zeros((BATCH_SIZE, RNN_HIDDEN_SIZE), dtype='float32')\n", 400 | "\n", 401 | "val_batch_gen = data_batch_generator(val_corpus)\n", 402 | "\n", 403 | "losses = []\n", 404 | "\n", 405 | "for iteration in range(50):\n", 406 | " x, y = prep_batch_for_network(next(val_batch_gen))\n", 407 | " loss_val, hid, hid2 = f_val(x, y, hid, hid2)\n", 408 | " losses.append(loss_val)\n", 409 | "print(np.mean(losses))" 410 | ] 411 | }, 412 | { 413 | "cell_type": "code", 414 | "execution_count": 19, 415 | "metadata": { 416 | "collapsed": false 417 | }, 418 | "outputs": [], 419 | "source": [ 420 | "# For faster sampling, we rebuild the network with a sequence length of 1\n", 421 | "l_input = lasagne.layers.InputLayer((None, 1, VOCAB_SIZE))\n", 422 | "l_input_hid = lasagne.layers.InputLayer((None, RNN_HIDDEN_SIZE))\n", 423 | "l_input_hid2 = lasagne.layers.InputLayer((None, RNN_HIDDEN_SIZE))\n", 424 | "\n", 425 | "# Our network has two stacked GRU layers processing the input sequence.\n", 426 | "l_rnn = lasagne.layers.GRULayer(l_input,\n", 427 | " num_units=RNN_HIDDEN_SIZE,\n", 428 | " grad_clipping=5.,\n", 429 | " hid_init=l_input_hid,\n", 430 | " )\n", 431 | "\n", 432 | "l_rnn2 = lasagne.layers.GRULayer(l_rnn,\n", 433 | " num_units=RNN_HIDDEN_SIZE,\n", 434 | " grad_clipping=5.,\n", 435 | " hid_init=l_input_hid2,\n", 436 | " )\n", 437 | "\n", 438 | "\n", 439 | "l_shp = lasagne.layers.ReshapeLayer(l_rnn2, (-1, RNN_HIDDEN_SIZE))\n", 440 | "\n", 441 | "l_decoder = lasagne.layers.DenseLayer(l_shp,\n", 442 | " num_units=VOCAB_SIZE,\n", 443 | " nonlinearity=lasagne.nonlinearities.softmax)\n", 444 | "\n", 445 | "l_out = lasagne.layers.ReshapeLayer(l_decoder, (-1, 1, VOCAB_SIZE))\n", 446 | "\n", 447 | "hid_out, hid2_out, prob_out = lasagne.layers.get_output([l_rnn, l_rnn2, l_out],\n", 448 | " {l_input: x_sym,\n", 449 | " l_input_hid: hid_init_sym,\n", 450 | " l_input_hid2: hid2_init_sym,\n", 451 | " })\n", 452 | "hid_out = hid_out[:, -1]\n", 453 | "hid2_out = hid2_out[:, -1]\n", 454 | "prob_out = prob_out[0, -1]" 455 | ] 456 | }, 457 | { 458 | "cell_type": "code", 459 | "execution_count": 20, 460 | "metadata": { 461 | "collapsed": false 462 | }, 463 | "outputs": [], 464 | "source": [ 465 | "lasagne.layers.set_all_param_values(l_out, d['param values'])" 466 | ] 467 | }, 468 | { 469 | "cell_type": "code", 470 | "execution_count": 21, 471 | "metadata": { 472 | "collapsed": false 473 | }, 474 | "outputs": [], 475 | "source": [ 476 | "predict_fn = theano.function([x_sym, hid_init_sym, hid2_init_sym], [prob_out, hid_out, hid2_out])" 477 | ] 478 | }, 479 | { 480 | "cell_type": "code", 481 | "execution_count": 22, 482 | "metadata": { 483 | "collapsed": true 484 | }, 485 | "outputs": [], 486 | "source": [ 487 | "# We will use random sentences from the validation corpus to 'prime' the network\n", 488 | "primers = val_corpus.split('\\n')" 489 | ] 490 | }, 491 | { 492 | "cell_type": "code", 493 | "execution_count": 23, 494 | "metadata": { 495 | "collapsed": false 496 | }, 497 | "outputs": [ 498 | { 499 | "name": "stdout", 500 | "output_type": "stream", 501 | "text": [ 502 | "PRIMER: The connector assembly of claim 5, wherein said connector body includes means (56) for terminating two optical fibers and said connector body includes a pair of cantilevered arms (66) on each of two opposed sides thereof with a latching nub (70) attached to the free end (69) of each, and said housing including two openings on each of two opposed sides thereof. \n", 503 | "\n", 504 | "GENERATED: A spring mechanism with speciring machine comprising a first channel (130) to form step (IO, LS) of said second cascade (18) onlocing up to 0.65 and D. \n", 505 | "\n" 506 | ] 507 | } 508 | ], 509 | "source": [ 510 | "# We feed character one at a time from the priming sequence into the network.\n", 511 | "# To obtain a sample string, at each timestep we sample from the output probability distribution,\n", 512 | "# and feed the chosen character back into the network. We terminate after the first linebreak.\n", 513 | "sentence = ''\n", 514 | "hid = np.zeros((1, RNN_HIDDEN_SIZE), dtype='float32')\n", 515 | "hid2 = np.zeros((1, RNN_HIDDEN_SIZE), dtype='float32')\n", 516 | "x = np.zeros((1, 1, VOCAB_SIZE), dtype='float32')\n", 517 | "\n", 518 | "primer = np.random.choice(primers) + '\\n'\n", 519 | "\n", 520 | "for c in primer:\n", 521 | " p, hid, hid2 = predict_fn(x, hid, hid2)\n", 522 | " x[0, 0, :] = CHAR_TO_ONEHOT[c]\n", 523 | " \n", 524 | "for _ in range(500):\n", 525 | " p, hid, hid2 = predict_fn(x, hid, hid2)\n", 526 | " p = p/(1 + 1e-6)\n", 527 | " s = np.random.multinomial(1, p)\n", 528 | " sentence += IX_TO_CHAR[s.argmax(-1)]\n", 529 | " x[0, 0, :] = s\n", 530 | " if sentence[-1] == '\\n':\n", 531 | " break\n", 532 | " \n", 533 | "print('PRIMER: ' + primer)\n", 534 | "print('GENERATED: ' + sentence)" 535 | ] 536 | }, 537 | { 538 | "cell_type": "markdown", 539 | "metadata": {}, 540 | "source": [ 541 | "Exercises\n", 542 | "=====\n", 543 | "\n", 544 | "1. Implement sampling using the \"temperature softmax\": $$p(i) = \\frac{e^{\\frac{z_i}{T}}}{\\Sigma_k e^{\\frac{z_k}{T}}}$$\n", 545 | "\n", 546 | "This generalizes the softmax with a parameter $T$ which affects the \"sharpness\" of the distribution. Lowering $T$ will make samples less error-prone but more repetitive. " 547 | ] 548 | }, 549 | { 550 | "cell_type": "code", 551 | "execution_count": 24, 552 | "metadata": { 553 | "collapsed": false 554 | }, 555 | "outputs": [], 556 | "source": [ 557 | "# Uncomment and run this cell for a solution\n", 558 | "#%load spoilers/tempsoftmax.py" 559 | ] 560 | } 561 | ], 562 | "metadata": { 563 | "kernelspec": { 564 | "display_name": "Python 2", 565 | "language": "python", 566 | "name": "python2" 567 | }, 568 | "language_info": { 569 | "codemirror_mode": { 570 | "name": "ipython", 571 | "version": 2 572 | }, 573 | "file_extension": ".py", 574 | "mimetype": "text/x-python", 575 | "name": "python", 576 | "nbconvert_exporter": "python", 577 | "pygments_lexer": "ipython2", 578 | "version": "2.7.6" 579 | } 580 | }, 581 | "nbformat": 4, 582 | "nbformat_minor": 0 583 | } 584 | -------------------------------------------------------------------------------- /4 - Recurrent Networks/claims.txt.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ebenolson/pydata2015/13a50473ac9709da59fe5dfd6855e374026be780/4 - Recurrent Networks/claims.txt.gz -------------------------------------------------------------------------------- /4 - Recurrent Networks/googlenet.py: -------------------------------------------------------------------------------- 1 | from lasagne.layers import InputLayer 2 | from lasagne.layers import DenseLayer 3 | from lasagne.layers import ConcatLayer 4 | from lasagne.layers import NonlinearityLayer 5 | from lasagne.layers import GlobalPoolLayer 6 | from lasagne.layers.dnn import Conv2DDNNLayer as ConvLayer 7 | from lasagne.layers.dnn import MaxPool2DDNNLayer as PoolLayerDNN 8 | from lasagne.layers import MaxPool2DLayer as PoolLayer 9 | from lasagne.layers import LocalResponseNormalization2DLayer as LRNLayer 10 | from lasagne.nonlinearities import softmax, linear 11 | 12 | 13 | def build_inception_module(name, input_layer, nfilters): 14 | # nfilters: (pool_proj, 1x1, 3x3_reduce, 3x3, 5x5_reduce, 5x5) 15 | net = {} 16 | net['pool'] = PoolLayerDNN(input_layer, pool_size=3, stride=1, pad=1) 17 | net['pool_proj'] = ConvLayer(net['pool'], nfilters[0], 1) 18 | 19 | net['1x1'] = ConvLayer(input_layer, nfilters[1], 1) 20 | 21 | net['3x3_reduce'] = ConvLayer(input_layer, nfilters[2], 1) 22 | net['3x3'] = ConvLayer(net['3x3_reduce'], nfilters[3], 3, pad=1) 23 | 24 | net['5x5_reduce'] = ConvLayer(input_layer, nfilters[4], 1) 25 | net['5x5'] = ConvLayer(net['5x5_reduce'], nfilters[5], 5, pad=2) 26 | 27 | net['output'] = ConcatLayer([ 28 | net['1x1'], 29 | net['3x3'], 30 | net['5x5'], 31 | net['pool_proj'], 32 | ]) 33 | 34 | return {'{}/{}'.format(name, k): v for k, v in net.items()} 35 | 36 | 37 | def build_model(): 38 | net = {} 39 | net['input'] = InputLayer((None, 3, None, None)) 40 | net['conv1/7x7_s2'] = ConvLayer(net['input'], 64, 7, stride=2, pad=3) 41 | net['pool1/3x3_s2'] = PoolLayer(net['conv1/7x7_s2'], 42 | pool_size=3, 43 | stride=2, 44 | ignore_border=False) 45 | net['pool1/norm1'] = LRNLayer(net['pool1/3x3_s2'], alpha=0.00002, k=1) 46 | net['conv2/3x3_reduce'] = ConvLayer(net['pool1/norm1'], 64, 1) 47 | net['conv2/3x3'] = ConvLayer(net['conv2/3x3_reduce'], 192, 3, pad=1) 48 | net['conv2/norm2'] = LRNLayer(net['conv2/3x3'], alpha=0.00002, k=1) 49 | net['pool2/3x3_s2'] = PoolLayer(net['conv2/norm2'], pool_size=3, stride=2) 50 | 51 | net.update(build_inception_module('inception_3a', 52 | net['pool2/3x3_s2'], 53 | [32, 64, 96, 128, 16, 32])) 54 | net.update(build_inception_module('inception_3b', 55 | net['inception_3a/output'], 56 | [64, 128, 128, 192, 32, 96])) 57 | net['pool3/3x3_s2'] = PoolLayer(net['inception_3b/output'], 58 | pool_size=3, stride=2) 59 | 60 | net.update(build_inception_module('inception_4a', 61 | net['pool3/3x3_s2'], 62 | [64, 192, 96, 208, 16, 48])) 63 | net.update(build_inception_module('inception_4b', 64 | net['inception_4a/output'], 65 | [64, 160, 112, 224, 24, 64])) 66 | net.update(build_inception_module('inception_4c', 67 | net['inception_4b/output'], 68 | [64, 128, 128, 256, 24, 64])) 69 | net.update(build_inception_module('inception_4d', 70 | net['inception_4c/output'], 71 | [64, 112, 144, 288, 32, 64])) 72 | net.update(build_inception_module('inception_4e', 73 | net['inception_4d/output'], 74 | [128, 256, 160, 320, 32, 128])) 75 | net['pool4/3x3_s2'] = PoolLayer(net['inception_4e/output'], 76 | pool_size=3, stride=2) 77 | 78 | net.update(build_inception_module('inception_5a', 79 | net['pool4/3x3_s2'], 80 | [128, 256, 160, 320, 32, 128])) 81 | net.update(build_inception_module('inception_5b', 82 | net['inception_5a/output'], 83 | [128, 384, 192, 384, 48, 128])) 84 | 85 | net['pool5/7x7_s1'] = GlobalPoolLayer(net['inception_5b/output']) 86 | net['loss3/classifier'] = DenseLayer(net['pool5/7x7_s1'], 87 | num_units=1000, 88 | nonlinearity=linear) 89 | net['prob'] = NonlinearityLayer(net['loss3/classifier'], 90 | nonlinearity=softmax) 91 | return net 92 | -------------------------------------------------------------------------------- /4 - Recurrent Networks/gru_2layer_trained.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ebenolson/pydata2015/13a50473ac9709da59fe5dfd6855e374026be780/4 - Recurrent Networks/gru_2layer_trained.pkl -------------------------------------------------------------------------------- /4 - Recurrent Networks/spoilers/tempsoftmax.py: -------------------------------------------------------------------------------- 1 | def temperature_softmax(p, T): 2 | z = np.log(p) 3 | z /= T 4 | return np.exp(z) / np.exp(z).sum() 5 | 6 | primer = np.random.choice(primers) + '\n' 7 | print('PRIMER: ' + primer) 8 | 9 | for T in [1.5, 1, 0.5, 0.1, 0.05]: 10 | sentence = '' 11 | hid = np.zeros((1, RNN_HIDDEN_SIZE), dtype='float32') 12 | hid2 = np.zeros((1, RNN_HIDDEN_SIZE), dtype='float32') 13 | x = np.zeros((1, 1, VOCAB_SIZE), dtype='float32') 14 | 15 | for c in primer: 16 | p, hid, hid2 = predict_fn(x, hid, hid2) 17 | x[0, 0, :] = CHAR_TO_ONEHOT[c] 18 | 19 | for _ in range(500): 20 | p, hid, hid2 = predict_fn(x, hid, hid2) 21 | p = temperature_softmax(p, T) 22 | p = p/(1 + 1e-6) 23 | s = np.random.multinomial(1, p) 24 | sentence += IX_TO_CHAR[s.argmax(-1)] 25 | x[0, 0, :] = s 26 | if sentence[-1] == '\n': 27 | break 28 | 29 | print('GENERATED (Temperature = {}): {}\n'.format(T, sentence)) -------------------------------------------------------------------------------- /5 - Extending Lasagne/Custom Layer Class.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "Custom `Layer` Classes\n", 8 | "============\n", 9 | "\n", 10 | "Lasagne is intended to be simple to extend. If you need to do something that isn't provided by one or a combination of the existing `Layer` classes, it is easy to create your own.\n", 11 | "\n", 12 | "The procedure:\n", 13 | "- Subclass `lasagne.layers.base.Layer`\n", 14 | "- Implement `get_output_for` which take a Theano expression and returns a new expression.\n", 15 | "- Implement `get_output_shape_for` which takes a shape tuple and returns a new tuple (only needed if your operation changes the shape).\n", 16 | "\n", 17 | "More details: https://lasagne.readthedocs.org/en/latest/user/custom_layers.html" 18 | ] 19 | }, 20 | { 21 | "cell_type": "code", 22 | "execution_count": 27, 23 | "metadata": { 24 | "collapsed": false 25 | }, 26 | "outputs": [], 27 | "source": [ 28 | "import numpy as np\n", 29 | "import theano\n", 30 | "import theano.tensor as T\n", 31 | "import lasagne\n", 32 | "from lasagne.layers.base import Layer\n", 33 | "\n", 34 | "_srng = T.shared_randomstreams.RandomStreams()\n", 35 | "\n", 36 | "\n", 37 | "def theano_shuffled(input):\n", 38 | " n = input.shape[0]\n", 39 | "\n", 40 | " shuffled = T.permute_row_elements(input.T, _srng.permutation(n=n)).T\n", 41 | " return shuffled\n", 42 | "\n", 43 | "class FractionalPool2DLayer(Layer):\n", 44 | " \"\"\"\n", 45 | " Fractional pooling as described in http://arxiv.org/abs/1412.6071\n", 46 | " Only the random overlapping mode is currently implemented.\n", 47 | " \"\"\"\n", 48 | " def __init__(self, incoming, ds, pool_function=T.max, **kwargs):\n", 49 | " super(FractionalPool2DLayer, self).__init__(incoming, **kwargs)\n", 50 | " if type(ds) is not tuple:\n", 51 | " raise ValueError(\"ds must be a tuple\")\n", 52 | " if (not 1 <= ds[0] <= 2) or (not 1 <= ds[1] <= 2):\n", 53 | " raise ValueError(\"ds must be between 1 and 2\")\n", 54 | " self.ds = ds # a tuple\n", 55 | " if len(self.input_shape) != 4:\n", 56 | " raise ValueError(\"Only bc01 currently supported\")\n", 57 | " self.pool_function = pool_function\n", 58 | "\n", 59 | " def get_output_shape_for(self, input_shape):\n", 60 | " output_shape = list(input_shape) # copy / convert to mutable list\n", 61 | " output_shape[2] = int(np.ceil(float(output_shape[2]) / self.ds[0]))\n", 62 | " output_shape[3] = int(np.ceil(float(output_shape[3]) / self.ds[1]))\n", 63 | "\n", 64 | " return tuple(output_shape)\n", 65 | "\n", 66 | " def get_output_for(self, input, **kwargs):\n", 67 | " _, _, n_in0, n_in1 = self.input_shape\n", 68 | " _, _, n_out0, n_out1 = self.output_shape\n", 69 | "\n", 70 | " # Variable stride across the input creates fractional reduction\n", 71 | " a = theano.shared(\n", 72 | " np.array([2] * (n_in0 - n_out0) + [1] * (2 * n_out0 - n_in0)))\n", 73 | " b = theano.shared(\n", 74 | " np.array([2] * (n_in1 - n_out1) + [1] * (2 * n_out1 - n_in1)))\n", 75 | "\n", 76 | " # Randomize the input strides\n", 77 | " a = theano_shuffled(a)\n", 78 | " b = theano_shuffled(b)\n", 79 | "\n", 80 | " # Convert to input positions, starting at 0\n", 81 | " a = T.concatenate(([0], a[:-1]))\n", 82 | " b = T.concatenate(([0], b[:-1]))\n", 83 | " a = T.cumsum(a)\n", 84 | " b = T.cumsum(b)\n", 85 | "\n", 86 | " # Positions of the other corners\n", 87 | " c = T.clip(a + 1, 0, n_in0 - 1)\n", 88 | " d = T.clip(b + 1, 0, n_in1 - 1)\n", 89 | "\n", 90 | " # Index the four positions in the pooling window and stack them\n", 91 | " temp = T.stack(input[:, :, a, :][:, :, :, b],\n", 92 | " input[:, :, c, :][:, :, :, b],\n", 93 | " input[:, :, a, :][:, :, :, d],\n", 94 | " input[:, :, c, :][:, :, :, d])\n", 95 | "\n", 96 | " return self.pool_function(temp, axis=0)" 97 | ] 98 | }, 99 | { 100 | "cell_type": "code", 101 | "execution_count": 28, 102 | "metadata": { 103 | "collapsed": true 104 | }, 105 | "outputs": [], 106 | "source": [ 107 | "import matplotlib.pyplot as plt\n", 108 | "%matplotlib inline" 109 | ] 110 | }, 111 | { 112 | "cell_type": "code", 113 | "execution_count": 64, 114 | "metadata": { 115 | "collapsed": true 116 | }, 117 | "outputs": [], 118 | "source": [ 119 | "# Seed for reproducibility\n", 120 | "np.random.seed(42)" 121 | ] 122 | }, 123 | { 124 | "cell_type": "code", 125 | "execution_count": 80, 126 | "metadata": { 127 | "collapsed": false 128 | }, 129 | "outputs": [ 130 | { 131 | "name": "stdout", 132 | "output_type": "stream", 133 | "text": [ 134 | "--2015-11-08 20:19:08-- https://upload.wikimedia.org/wikipedia/commons/thumb/a/ae/Rubik's_cube_scrambled.svg/64px-Rubik's_cube_scrambled.svg.png\n", 135 | "Resolving upload.wikimedia.org (upload.wikimedia.org)... 208.80.154.240, 2620:0:861:ed1a::2:b\n", 136 | "Connecting to upload.wikimedia.org (upload.wikimedia.org)|208.80.154.240|:443... connected.\n", 137 | "HTTP request sent, awaiting response... 200 OK\n", 138 | "Length: 5321 (5.2K) [image/png]\n", 139 | "Server file no newer than local file ‘64px-Rubik's_cube_scrambled.svg.png’ -- not retrieving.\n", 140 | "\n" 141 | ] 142 | } 143 | ], 144 | "source": [ 145 | "# Get test image\n", 146 | "!wget -N \"https://upload.wikimedia.org/wikipedia/commons/thumb/a/ae/Rubik's_cube_scrambled.svg/64px-Rubik's_cube_scrambled.svg.png\"\n", 147 | "im = plt.imread(\"64px-Rubik's_cube_scrambled.svg.png\")\n", 148 | "im = im[:, :, :3]\n", 149 | "im = np.rollaxis(im, 2)[np.newaxis]" 150 | ] 151 | }, 152 | { 153 | "cell_type": "code", 154 | "execution_count": 81, 155 | "metadata": { 156 | "collapsed": false 157 | }, 158 | "outputs": [ 159 | { 160 | "data": { 161 | "text/plain": [ 162 | "(1, 3, 64, 64)" 163 | ] 164 | }, 165 | "execution_count": 81, 166 | "metadata": {}, 167 | "output_type": "execute_result" 168 | } 169 | ], 170 | "source": [ 171 | "im.shape" 172 | ] 173 | }, 174 | { 175 | "cell_type": "code", 176 | "execution_count": 82, 177 | "metadata": { 178 | "collapsed": true 179 | }, 180 | "outputs": [], 181 | "source": [ 182 | "l_in = lasagne.layers.InputLayer((1, 3, 64, 64))\n", 183 | "l_fracpool = FractionalPool2DLayer(l_in, ds=(1.5, 1.5))" 184 | ] 185 | }, 186 | { 187 | "cell_type": "code", 188 | "execution_count": 83, 189 | "metadata": { 190 | "collapsed": false 191 | }, 192 | "outputs": [ 193 | { 194 | "data": { 195 | "text/plain": [ 196 | "(1, 3, 43, 43)" 197 | ] 198 | }, 199 | "execution_count": 83, 200 | "metadata": {}, 201 | "output_type": "execute_result" 202 | } 203 | ], 204 | "source": [ 205 | "l_fracpool.output_shape" 206 | ] 207 | }, 208 | { 209 | "cell_type": "code", 210 | "execution_count": 84, 211 | "metadata": { 212 | "collapsed": false 213 | }, 214 | "outputs": [], 215 | "source": [ 216 | "output = lasagne.layers.get_output(l_fracpool)" 217 | ] 218 | }, 219 | { 220 | "cell_type": "code", 221 | "execution_count": 87, 222 | "metadata": { 223 | "collapsed": false 224 | }, 225 | "outputs": [ 226 | { 227 | "data": { 228 | "text/plain": [ 229 | "" 230 | ] 231 | }, 232 | "execution_count": 87, 233 | "metadata": {}, 234 | "output_type": "execute_result" 235 | }, 236 | { 237 | "data": { 238 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP0AAAD+CAYAAADxoQNSAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAFIZJREFUeJzt3Xt0VdWdB/DvLy+SFEIEhISXgWpURFFRbNUasahoK51Z\nWpCuUdaMy87yUV0ddaBOnWmZEez4wHFa7ZoZyijWB+MD8U1E0ZkCKhYEeRhBg/LIJfLKC/Ige/64\nl2XIb19yknPPufdkfz9rsVbuN+exD8kvJ9l3n73FGAMickdWuhtAROFi0RM5hkVP5BgWPZFjWPRE\njmHREznGV9GLyGQR2Swin4nIzFQ1ioiCIz19n15EsgF8CmASgB0APgQw3RizKXXNI6JU83OnnwBg\nizGm2hjTCuAZAD9KTbOIKCg5PvYdBuCrDq+3Aziv4wYiwuF+RGlijBFb7qfoWdAZICenr8oGlk9X\nWW7BYJUVDhwHAPh6yyIMOnEqACC/3wnW80hWnqf21FYtVNm+bS+r7OD+zzwdj1LPz6/3OwCM6PB6\nBOJ3eyLKYH6KfjWAk0SkTETyAEwDsCQ1zSKioPS49x4AROQKAA8DyAYw3xgzt9Pnnf0TYEpxscqG\n5uaqrMSSjczt4/k8S83JKtt03lzLlsk17F6NvoPP6dY+3dF+uFll9TUrrNvW1/xJZXs+fz7lbXJB\nEH/TwxjzOoDX/RyD0i/IgqfMwxF5RI5h0RM5hkVP5Bhff9P3ZrNKSlU2OFf/d51dUGjdv9TSQReE\nCyzZPEu2MuiGdFNWjv3/Lb//iSrre4EeO9C67WuVNW9v9N8wB/BOT+QYFj2RY1j0RI5h0RM5hkVP\n5Bhfw3C7PLhlGG5JSYna7tZbb7XuP2HCBJWNHDlSZUVFRT1p3jFVVVWprHT2bL1hbHfKz+3fMJUc\nbB+usuvLLvJ8xLrivSrLu/Zty5ZNno8ZhuYvGqz5rt+sU1nLroNBNydUyYbh8k5P5BgWPZFjWPRE\njmHREzkm8GG4fU7s1Mk2OtvzvmPHjk1xa7wrLy/X4ZNPqqj+0st8nkl3ujX2vVhlnxeNsu79hSV/\n0zK1Va1lX2svTxL9cEhlzXilG0dIjz6j9HRiADBy3nkqq1+hO2Vjj2xMeZvSjXd6Isew6Ikcw6In\ncgyLnsgxgY/IK5o09Kis5Genet7/3/LuVNm4ceN8t6uzpiY9iqyw8CXLlustWXWSo25TSWvrDJXN\nn3+Vyl7aFN2fxS3Fehb01kkPqiwr33uHrs0V0J281wy4RmVlZWXW/bOze37+WCxmzW+66SaVrVq1\nqsfn8Ysj8ogIAIueyDkseiLHsOiJHBN4R17f849eOHHozNN9HfMHLWeo7JqhugNn1KgPLHv/T5Kj\n7vLVJn/0ajazZz+tspV7wmhL99juGFMadUfe6I13q6z/SUnGA1qekn4OdSr7zSN6JRyR7owxTD1b\nLa1YoVfyeeutt1T27LPPqmz//v1+28OOPCJi0RM5h0VP5BgWPZFjAu/Iy+/0aO3IB89N+XmWn7tQ\nZdu2/VFlJ5zwaMrPHQz9OGh1teVRXwDX/u1AlY0+4TiVDfyyRWVzDlf2oG2ZK/+1V1WWG9JKQ37Z\nOu2eeeYZ67b333+/yg4e1PP7sSOPiACw6Imcw6IncgyLnsgxLHoix4S+wk354u9bNvR3HlvvvU1d\nnR7OCQBFRVdY0m9ZMr0a/GuvTbMec8GbugfdvtZK+vz9/k9Vds7OR5Jsbf+/i6onLvqeyi6fPl1l\no0ePVtl7771nPebpCxaoLG9n6od42yr24qqjv5Y1ra3svSeiOBY9kWNY9ESOYdETOSbwFW46az/Y\nprKswnCakWxJ65tvHqqyurobVbZnYOon5Uynfy0+WWUDi39n3faxjdcF3ZyAFNrjd5pV9Nj+x1T2\nq3V6MtTxvtuUeo2HD3velnd6Isew6Ikc02XRi8gfRCQmIus7ZANEpFJEqkRkqYgUB9tMIkoVL3f6\nBQAmd8pmAag0xpQDWJZ4TUQR0GUPmjHmf0WkrFM8BUBF4uPHASyHx8Jv26M7UPJC6shLZsyYCpU9\nd54e5de6S0+2WbRMd/hFWbL5N6eOeUJl43L0c93fX1mtsoZ+eg6FTwfaJ0jddNwYle3OSdIZl2KF\n6/4zlPP49WfLikz17e2e9+/p3/RDjDFH1vaJARjSw+MQUch8d+SZ+OD94AbwE1FK9bToYyJSAgAi\nUgpgd+qaRERB6mnRLwFwZAnWGQAWp6Y5RBS0LnvQRORpxDvtBonIVwD+EcB9ABaJyA2Ir9U81esJ\n25v0iLyoyC3Vj6I2t39p3bZP1sigmxOqikb9iGhtf73dQ+dGZfJRbUizpRtTL0CUdjta9CSn3eGl\n914/ZBw3ydeZiSgtOCKPyDEseiLHsOiJHMOiJ3JM6ONfm6v19JD5J1u6gUPUr1+/Hu/bfr19IsmG\n+laV9XnmlyrLLTjesrce6zS8rtp6nnP2fqKya5sWqSyIn+4fWYaDbjwngBOFZGzjZh1mYO/9J4f0\nElbdwTs9kWNY9ESOYdETOYZFT+SY0Dvybl3sfaWUhZcPC7Alwcrup9dFb/3hr1Qm8/S65C/kRONn\ncVM3nuGOgiE5+yypfTLVdKpp1Z3E3RGN7y4iShkWPZFjWPREjmHREzkmvTNSJkx9196MQZP0yLR5\n2T7XtU4jKS1Q2R2HduoN+/YNoTX+NbbblvOOriG5uvM1E21jRx4RdQeLnsgxLHoix7DoiRwTekfe\nvm4sqXvJPXpm7UtsG1b2vD0AkJOTvv7M49N4br92tw9OdxN6rLm+WmVj8/PDb8gxbG3Wq0EBwOaD\nfLSWiLqBRU/kGBY9kWNY9ESOCb0Xye9jgTZvfPd8lV2+4k8qE7GP5issDGcp5N6mtiC6ixXn7NPz\n4WUn+f4Ig+0x5bk1NYGci3d6Isew6Ikcw6IncgyLnsgxLHoix4Tee1/tc21tG9tQ1obLLldZ89NP\nWfdvCaBNLogVRHcYbvb+TTr0WA3/dbZ9GOxL15T1vEEWO2brYegAgI/qfR2Xd3oix7DoiRzDoidy\nDIueyDGhd+StaNBLVSdbJ8XrT6S9Hp/R7zP9J9Z8cLblTP9sW0KaOqotzKxhuDn121R2XNtK67a1\nZ32ssqumZ9b1NFd7Xw2qO3inJ3IMi57IMSx6Isew6IkckxGzMtYl6Ygrzs72tH+r0SvhdMcYSzas\nTj/3v6MoGiughKUppPO0tzWq7JbLH1bZlCkfeT7m+c/qjtq87jUrpdr26VGhbXuCGSnKOz2RY1j0\nRI7psuhFZISIvCMiG0TkExG5LZEPEJFKEakSkaUiUhx8c4nILy93+lYAPzfGnAbgOwBuEZFTAcwC\nUGmMKQewLPGaiDJclx15xpgaADWJjxtEZBOAYQCmAKhIbPY4gOXoYeGv/tnX1nzSo95GSA3xuUpM\n4WE9JvD39+31tO9VczJrFFeYfjBJd+Vdd91fqKyoKPP+iswblVnLgTet8fb9lgrd+mqISBmAswC8\nD2CIMSaW+FQMgLvf/UQR4rnoRaQvgOcB3G6MOeopfmOMAeDvfTMiCoWnoheRXMQLfqExZnEijolI\nSeLzpQCSTPNBRJnES++9AJgPYKMxpuOIiCUAZiQ+ngFgced9iSjzeOkBuwDAXwFYJyJrEtkvANwH\nYJGI3ACgGsDUnjaisNC+skjDE3pUXN/r9ai4vh5H7gXhzg/tnZAPnDso5JZ0X21bm8pWDtaPPgPA\nvHH9VDZ7lN4uEzvtrNK4mo1N255DoZ3LS+/9/yH5bwSTUtscIgpaRH4sE1GqsOiJHMOiJ3IMi57I\nMRnxPH0ypaX6eeKtv7P0uv40hMYkUfGifS6A+8/wNllnawBjmt4WvQLKyrP0l3rV1SfonZP0atsG\nrZ5e8oQlzehvqYzVVhte7z3v9ESOYdETOYZFT+QYFj2RYzKi16W2NtkaN9q3y5tVtmHmHXrDh37v\np0m+vfJrPTz39YEDVPbwsGEqq7nD7wSc4TzlfPzx6Rv+3Ns0rKoN7Vy80xM5hkVP5BgWPZFjWPRE\njsmIjrymJu8deTanXGY55kO+DhmI0y3ZC1eG3oyUyc/PrGfSo6Jtj+6MbjsQzGo2NrzTEzmGRU/k\nGBY9kWNY9ESOyYiOvMZGf4+XZme/a0lPs2QbfJ3Hr6yamMp2jNCj5zjOrfcwLbqTev9rX6WhJd/g\nnZ7IMSx6Isew6Ikcw6InckxGdOTFYt7mk0tOj3CaOubXKlu08a4k+9f4PH/PSRZHtWWyH3+yT2XX\nP+Vv9NwCy4i8+3wdsXt4pydyDIueyDEseiLHsOiJHMOiJ3JMRvTeHzjgt/e+ydNWT5wyx5pfv/lv\nfJ6fMtc/WNMr5ut3d65CgcpGbM1LeYv2Hfb7/e4P7/REjmHREzmGRU/kGBY9kWMyoiPvpZcarfmj\nj+rMvpLyHk/neSXLvnLM4m8/orIXtt7m6ZiUHh98MFNlEyZM8bz/zVsfTmVzuuXter2UeJh4pydy\nDIueyDEseiLHsOiJHJMRHXnJPPjgfpXdeWexZcuDvs6T0+c4ldmm6uST76mkl+0GgI8/vlplI0f+\nSGUTJuivWSY6YBl999mhQ2loyTd4pydyDIueyDHHLHoRyReR90VkrYhsFJG5iXyAiFSKSJWILBUR\n2+/cRJSBjln0xphDACYaY84EcAaAiSJyIYBZACqNMeUAliVeE1EEdNmRZ4w58txqHuKLr+wDMAVA\nRSJ/HMByBFD4Dz2kJyUcMUKv/zJtWqrPDDx19U6V/WTrUJXJ2tSfOyoaG/XqLdu3T1VZnz5/qbKy\nsjLrMceN892soxxO82OsnzfrSTDTrcu/6UUkS0TWAogBeMcYswHAEGPMkTWaYgD02kxElJG83Onb\nAZwpIv0BvCkiEzt93oiIv8XoiCg0nnvvjTEHALwKYDyAmIiUAICIlALYHUzziCjVuuq9H3SkZ15E\nCgBcCmANgCUAZiQ2mwFgcZCNJKLU6erX+1IAj4tIFuI/IBYaY5aJyBoAi0TkBgDVAHTvTUDuuWev\nyoYPT/3Awhtv7G9J9SPATZO/5fmYfezPBUeCOaz/glu9Wq/LUlFRobJ0qq2ttebev2r+fNnqbzWc\nIByzWowx6wGcbcn3ApgUVKOIKDgckUfkGBY9kWNY9ESOYdETOSajn6e3aWjQQz/vvVcP18VJ/s6T\nrUf7WrWPsudZX1iOGYHe+zP32oeNXvxPX6qsYlVm9dTb7Nixw5qXh3T+WEtrSGfyjnd6Isew6Ikc\nw6IncgyLnsgxkevIs1m7Vnc+nXGi7vCLjyZOrf7/YV+dZ+00y3/tLh1lFfr7EpTv0pOCzn3kgMry\n/HYi9ivyt3+atHzwQVrPv6KxIa3nt+GdnsgxLHoix7DoiRzDoidyTK/oyLNZ99x4lY378ZrQzn/m\ns20qW778XJVN/qPe95YNG/2dPAIj/8IyNoCOvP/+2r40+tyYpac2A/FOT+QYFj2RY1j0RI5h0RM5\nptd25Nns377UmhcPvyyU848f/77Kcn9p+RIUFITQGkfs1CsVJWNbvOFfdun9n9yrJ2eNEt7piRzD\noidyDIueyDEseiLHsOiJHONU7/22lTOt+Z7Bz4Vy/n799M/YfA6ZTZmVK1eq7Or169PQkszGOz2R\nY1j0RI5h0RM5hkVP5BinOvKSadj9ocoOHixTWUEBO92CtmXLFpW98cYbKpszZ04YzemVeKcncgyL\nnsgxLHoix7DoiRzDjrwk7r77a5U98MDxKvO6pLUrWlpaVFZZWamypUvtcxu8+OKLKmtr05OMUs/x\nTk/kGBY9kWNY9ESOYdETOUaMsU0HmKKDiwR38Ayxc+coX/t/9D3dE1ien+/rmEH49927VfbbWp1R\n5jDGWIeQ8k5P5BgWPZFjPBW9iGSLyBoReTnxeoCIVIpIlYgsFZHiYJtJRKni9U5/O4CN+GY9gFkA\nKo0x5QCWJV4TUQR0OSJPRIYDuBLAvQD+LhFPAVCR+PhxAMvhaOGfcsqX1nzz5pEht+QbLZbO2bfq\n6lR25/avVHY4kBZRJvFyp58H4C4A7R2yIcaYWOLjGIAhqW4YEQXjmEUvIj8EsNsYswaAtfvfxN/z\n6/VvzRH1Fl39en8+gCkiciWAfABFIrIQQExESowxNSJSCoBv2BJFxDHv9MaYu40xI4wxowBcC+Bt\nY8x1AJYAmJHYbAaAxcE2k4hSpbvv0x/5Nf4+AJeKSBWASxKviSgCPD9Pb4x5F8C7iY/3ApgUVKOi\npK7O3t+9aZN+rtymsb2Pr/N/3tyssp9uq1bZV62tvs5DvQdH5BE5hkVP5BgWPZFjWPREjuHz9ES9\nFJ+nJyIALHoi57DoiRzDoidyDIueyDGB9t4TUebhnZ7IMSx6IscEXvQiMllENovIZyIyM+jzpZqI\n/EFEYiKyvkMW2dmARWSEiLwjIhtE5BMRuS2RR+6aRCRfRN4XkbUislFE5ibyyF1LR0HPPh1o0YtI\nNoDfApgMYAyA6SJyapDnDMACxNvfUZRnA24F8HNjzGkAvgPglsTXJHLXZIw5BGCiMeZMAGcAmCgi\nFyKC19JJsLNPG2MC+wfguwDe6PB6FoBZQZ4zoOsoA7C+w+vNiE8OCgAlADanu40+rm0x4nMjRPqa\nABQC+BDAaVG+FgDDAbwFYCKAlxNZSq8n6F/vhwHoOM/y9kQWdb1iNmARKQNwFoD3EdFrEpEsEVmL\neJvfMcZsQESvJSHw2aeDLvpe/36gif/4jdx1ikhfAM8DuN0YU9/xc1G6JmNMu4n/ej8cwEUiMrHT\n5yNzLWHNPh100e8AMKLD6xGI3+2jLiYiJQAQxdmARSQX8YJfaIw5MqlppK/JGHMAwKsAxiO613Jk\n9ukvADwN4JKOs08DqbmeoIt+NYCTRKRMRPIATEN8Jt2oi+xswCIiAOYD2GiMebjDpyJ3TSIy6EhP\ntogUALgUwBpE8FqAEGefDqFj4goAnwLYAuAX6e4o6UH7nwawE0AL4v0Tfw1gAOKdLVUAlgIoTnc7\nu3E9FyL+9+JaxAtkDeLvTkTumgCcDuDPiWtZB+CuRB65a7FcWwWAJUFcD4fhEjmGI/KIHMOiJ3IM\ni57IMSx6Isew6Ikcw6IncgyLnsgxLHoix/w/gjkXKHduy7MAAAAASUVORK5CYII=\n", 239 | "text/plain": [ 240 | "" 241 | ] 242 | }, 243 | "metadata": {}, 244 | "output_type": "display_data" 245 | } 246 | ], 247 | "source": [ 248 | "# Evaluate output - each time will be slightly different due to the stochastic pooling\n", 249 | "outim = output.eval({l_in.input_var: im})\n", 250 | "outim = outim[0]\n", 251 | "outim = np.rollaxis(np.rollaxis(outim, 2), 2)\n", 252 | "plt.imshow(outim, interpolation='nearest')" 253 | ] 254 | }, 255 | { 256 | "cell_type": "code", 257 | "execution_count": 88, 258 | "metadata": { 259 | "collapsed": false 260 | }, 261 | "outputs": [ 262 | { 263 | "data": { 264 | "text/plain": [ 265 | "" 266 | ] 267 | }, 268 | "execution_count": 88, 269 | "metadata": {}, 270 | "output_type": "execute_result" 271 | }, 272 | { 273 | "data": { 274 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAP0AAAD+CAYAAADxoQNSAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAFBZJREFUeJzt3Xt0VWV6BvDnTQi5ghAuSZQ4cVRUENGiqIMjhkIXomJX\ndbCu1kGrtGOZkaV1lkjHTqdLvPSPwnS0Y7UMReo4S8t9AAcGYZxaYCFCgYSLoGEUzQkIBEIu5PL1\nj3OyjLxfyE7O3vucne/5reVanOfsc/a3wffs5D3f3p8YY0BE7shI9QCIKFwseiLHsOiJHMOiJ3IM\ni57IMSx6IsckVfQiMllE9onIRyLylF+DIqLgSE+/pxeRTAD7AUwEcATANgD3G2P2+jc8IvJbMmf6\nsQAOGmOqjDHNAH4F4G5/hkVEQemTxGsvAvBph8efAbix4wYiwul+RClijBFbnkzRs6DTwMDSSSrL\nHzpWZX1zi1RWUBT/jK6ufA3FI2YAACSjr+d9N536WGWHt85WWcPJjzy/JwUvmR/vjwAo7fC4FPGz\nPRGlsWSK/gMAl4tImYj0BXAfgJX+DIuIgtLj7j0AiMjtAOYDyASwwBjz/DnP96pfAWYXlwAADjc1\n4RvZ2QCAUbm51m2vz8sLZUzHLp6rsr8tuLhb71FX8wEKhl7vy3jaWptUdubYTpU1nf7E+vravIUq\naz58Rr/+M53R1wXxOz2MMWsBrE3mPaKoveB7C78KnqKBM/KIHMOiJ3IMi57IMUk18rp885AaeY88\n8ojKSkpKVDZq1Cjr64uLi1XW1tamsuN/9bDKRnbSyEulhsJHVTa/+GaV7YC1z5OU5itqVNZYqpt2\nfYrsjTzIBypqa2xVWc0r+1V2auMXHkbojs4aeTzTEzmGRU/kGBY9kWNY9ESOYdETOSb07v2zzz6r\ntrv4Yvu0UVtX3bZt//79ezK8bjtw4IDKSmZ+P5R9d0dD240qm9+vTGUfDtJ/l6cGHLe+Z99plm57\nRkW3xxak429XqezYfx0KfyBpgt17IgLAoidyDoueyDEseiLHBN7I6z/xwq9l035wh9puzqinra/P\nyckJZFx+ev/99615fv7LKvvlj29T2aFv3KWyeonGZ3H9/WtUJpnvpmAkcQMt2ZSdt1q3zc/P9/Se\n27dvV9ny5cut2zY0NHh6z7CwkUdEAFj0RM5h0RM5hkVP5JjQG3nFP7hKb9fJ6+f3fVJlo0eP9mNo\nXaqvr1dZXt4Ky5Zvd/IOugnZ3HybyhYs0I28FXuj+1l85t7HVZaRk+n59d+Evj/BFbhUZfcW3quy\nSy/V2wUhFotZ81WrVqlszRrd7NyyZYvvY7JhI4+IALDoiZzDoidyDIueyDGBN/Lyrh30tWzYT671\nfT+3v3u1yh58cJvKioqicuPEwSqpqppl3fLReSN7vJcb25pV9qeHV1u3LavXN6LMlD3edpTh/dxS\n87ReAPOb48erTMT/m3qm0smTJ1U2ZswY67ZeZ/6xkUdEAFj0RM5h0RM5hkVP5JikVq31oq1ON4v8\n9tRTT3ncclyg4/BPoyVbZt1yYMNulQ36w1mVPdeq7+8HHPY+pGT6ZpbVgjozdO5zKquzZDlrdMMx\nKyure+NyFM/0RI5h0RM5hkVP5BgWPZFjWPREjgm8e99a2xT0LjyrqHjVmo8c+dcB7E1/nhpzt8o+\n/lhPIV648DqV/d9p+1766cvPgVLLdOMqW/c+uhqn6BusNi5dorJ+/fqFMZykvffeeypbPmyYddvp\nVVUqq272/i0Zz/REjmHREzmGRU/kGBY9kWNCX6p6+Io/9n0/m25YnNTrbX8HM2dOU9mpUzNU9uWg\ncG7UGYRBluznlQ+EPg6/NJuxKps72n7t+ShLds8uPaU5lTqrzBv2VqrstGWqM6+nJyIALHoi53RZ\n9CLyCxGJicjuDlmhiKwXkQMisk5EBgQ7TCLyi5cz/UIAk8/JZgNYb4wZDmBD4jERRUCXM/KMMb8X\nkbJz4qkA2u9WuAjAJkS48G03WRwxQt+M8b9v1A3DnMV6Rl38Pb2v6pIqX1qyaSNet2/78VKVzR1+\nSGV1e4aobP8g3TbbO3CEdT81ffKseU/l7XrNmv8j9Ay4dPOhZZUlwN60646e/k5fZIxpX9snBqAo\nqVEQUWiSbuSZ+PddwX3vR0S+6mnRx0SkGABEpARAjX9DIqIg9bToVwKYnvjzdADL/RkOEQWty0ae\niLyJeNNusIh8CuAfALwA4C0ReRhAFQA9fS3iMjyuytL4gP2mnPW7blLZoF16eeUgDGrRs9CuP1Gh\nsgdr3lBZphzzvJ9pLZaZkFd6fnkoRp3UK/MAACLwJXNnjbxkeene39/JUxN9HgsRhYAz8ogcw6In\ncgyLnsgxLHoixwR+Y8yoys/PT+r1eddsUVnzVj21NytXT1u1zXUadqrKup9HD/1SZZdn7+tyfACS\nW6oqIoa2ef82It0cbQlmSTie6Ykcw6IncgyLnsgxLHoix4TeyFs1J6bDMvu27z48VGXzMqPbfWqZ\n8aLKnnxMX5M+rqDA+5tmJzOi3u+a3LpOnrEtDZRetgY0DZdneiLHsOiJHMOiJ3IMi57IMekxI6/K\nHk94Rt+QZ4Jtw/V+DiauT59w/mqGhLQfV12dk5PqIfTYvgb76jzJ4pmeyDEseiLHsOiJHMOiJ3JM\nr+girV69WmV33HFHUu+Zl+fvSiuUGpmW1YtcxzM9kWNY9ESOYdETOYZFT+SYXtHIy3x2rsreOXhQ\nZZNnzQpjOETdVp/k8tPdwTM9kWNY9ESOYdETOYZFT+QYFj2RY3pF9952TfrwX+upuU3f+5719dnZ\nvLskBaO2tVVlvxvdqLKf3TBYv/jJIEbEMz2Rc1j0RI5h0RM5hkVP5Jhe0cirN96mMJ698y5rXpmv\nr51fZPs4fLpfd4ZFaeBvWj+1P/EtvXrS4Zt1ljU0nBtrhnn7Tp7piRzDoidyDIueyDEseiLHhN7I\nO2mZoTQgMzOp9zzdmty1yJed0UsC/3um/jx84pRutxzpn5XUvunrrszYpLLy8vdVNnXqdk/vd9u2\n6z3vO5X/ki0nzoa2L57piRzDoidyTJdFLyKlIrJRRCpEZI+IPJbIC0VkvYgcEJF1IjIg+OESUbK8\nnOmbATxujBkJ4CYAM0XkKgCzAaw3xgwHsCHxmIjSXJeNPGNMNYDqxJ/rRGQvgIsATAUwPrHZIgCb\n4KHw9zbq5Xdvzi/wPGCb/Az/f0vJszQHX3nhuMrueq7I932nI2ur1DITMrf2gMrKr/9fnVmacwBw\n9dUnuju0XqH5C91MDkq3qkVEygBcB2ArgCJjTCzxVAyAG//3E0Wc56IXkQIASwDMMsac7vicMcYA\nMD6PjYgC4KnoRSQL8YJfbIxZnohjIlKceL4EQE0wQyQiP3np3guABQAqjTHzOzy1EsD0xJ+nA1h+\n7muJKP14mZE3DsBfAtglIjsS2dMAXgDwlog8DKAKwDQvOzQ/1g2Lumv0LL04fclrwXf1vKnirNTN\npVo1J2bNo9DgW1l4UmXzRtsvH37ogj9T2dqZ9/o+pmSYCP+C2VCh/y2C4qV7/z/o/CeCif4Oh4iC\nxhl5RI5h0RM5hkVP5BgWPZFjQr+evr5et1hLSjobhuUa4/U6q7lFVHZhCjv6gL2rP06OqOytE/pz\nd0J/3UHfOrLFup/ffvcylWXkeL0/gf6GobMJ0b86/XuVzfS4l7Ds2xfeNel+q9+jp3gHhWd6Isew\n6Ikcw6IncgyLnsgxoTfyzpzxf65k4d8/oUN92T7w81d833d3/KhwlMqWFurtnv87703IsD61TVNn\nU6XTx9GjljA39GH0yNlPzoS2L57piRzDoidyDIueyDEseiLHpGBGXnKr0dhc+SeW/azwfTdJ0208\nYOmU0IfRI22N6d/Iq67uq8NLwh9HT7TUcoUbIgoIi57IMSx6Isew6Ikc0ytm5GVm/k5lL72qP8++\n7/ueuyejWl9ue6RUX96a3MLdwYhCI+/YsWwdpmEjz5z1v5ndHTzTEzmGRU/kGBY9kWNY9ESOCb2R\nF4sF0RBqUsl7V/xEZfmWpZUB4KG90615GCRD398vHeWcSP/7z+1qtczI60SuZTmcOy2rzEx5T6/I\nNPgz/8vmCt/fsXM80xM5hkVP5BgWPZFjWPREjmHREzkm9O59bW0Q3XvdYbVNZV0r9s+4hzDAkoa3\nXngU5DfYV9hJJ3df9pjKHlvwz9ZtSw957fSHXiKB45meyDEseiLHsOiJHMOiJ3JM6F2KFSv0Sh4Z\nGbalSYCXXhqiMrHOWv1SJbbWnN4qbtqIn6mspemEypYe0o0iVwwOaRquMXrp7W3b7lHZ2LFTVXbL\nv1jukArvU3NdwTM9kWNY9ESOYdETOYZFT+SYtJhutGxZned83rzBKrvvPv3aIZYpeV92YzJgn+yB\nKnvjns9V9hdLLvT+phG2x9LIO3NG/+9z9OhilZWVlXnej61RO3as55dHwo56PYM0TDzTEzmGRU/k\nmPMWvYjkiMhWEdkpIpUi8nwiLxSR9SJyQETWiYjta3EiSkPnLXpjTCOAcmPMtQCuAVAuIrcAmA1g\nvTFmOIANicdEFAFdNvKMMe1dh76IX7F6AsBUAOMT+SIAmxBS4T/zzHGVDRsWTj9yxowLVHam0r6t\n7NVZtn06YVoxrfYViMqqdcMyP3+JJfN9SL3OH5pTe5PRLn+nF5EMEdkJIAZgozGmAkCRMaZ9jaYY\nAL02ExGlJS9n+jYA14rIBQB+IyLl5zxvRMT/BeqIKBCeu/fGmFoAqwGMARATkWIAEJESADXBDI+I\n/NZV935we2deRHIBTAKwA8BKAO0rREwHsDzIQRKRf7r68b4EwCIRyUD8A2KxMWaDiOwA8JaIPAyg\nCsC0YIf5lbo6vUrNd75TrbIRd+plobNyk2s9ZFpm+fX/V32pMACcnJ6nX/+pbuRl5PnfhLz6aKPK\npmzWP4x9e4tlaedO/HT8rUmNib6S6hl55/0/zhizG8AfWfLjACYGNSgiCg5n5BE5hkVP5BgWPZFj\nWPREjkmL6+mDUF35qspKxzwT2v5zX9Vd/apJWb7v5/Elh1U2YXuOZUvvnXqbfM6v9U1VU1NK988z\nPZFjWPREjmHREzmGRU/kmF7byDv+8VKV5fbXq6cAwODL7/d9/9nZesrtyX+6TmWT39CvnVnRyUX6\nVramnf+Ki4tD2U+6iTU3q2xt7Snrtr+u1asi7W7UU6JTjWd6Isew6Ikcw6IncgyLnsgxvbaRZ3Nk\n5zxrHkQjz2bMmK0qy/qR5Z8gNzeE0XRPVGfkdXYft811evWkDad1g25pba3K6lu7sVRSGuKZnsgx\nLHoix7DoiRzDoidyjFONPEDPrgKAvaunqKxvwTDf996vn/6MzYnAqjdA+s3I27x5s8ruqdiTgpFE\nD8/0RI5h0RM5hkVP5BgWPZFjWPREjnGse293tv4LT1lDQ5nKcnOj0X1PVklJia/vd/DgQWs+Y8YM\nle3fv9/XfbuOZ3oix7DoiRzDoidyDIueyDFs5HXDnDnHVDZv3pAUjCR9rV69WmXr1q1T2bJly6yv\nb2lp8X1M9HU80xM5hkVP5BgWPZFjWPREjhFjOrt1oA9vLhLcm6eJzz+/JKnXb/92psqG54Szao1N\nfVubNf+PCeUqKygoUNmLL77o+5ioZ4wx1umiPNMTOYZFT+QYFj2RY1j0RI7hjLwkvfaaXgEFAGbM\nuCDkkfhjwTE96xAAXn755ZBHQkHhmZ7IMSx6Isd4KnoRyRSRHSKyKvG4UETWi8gBEVknIgOCHSYR\n+cXrmX4WgEp8tQjobADrjTHDAWxIPCaiCOhyRp6IDAPwnwDmAnjCGHOXiOwDMN4YExORYgCbjDFX\nWl7b62fkdebtt/WKMOPG6SWog5iRt8ayvPI7p3T2m1N6aWbqPZKZkTcPwA8BdJyfWWSMiSX+HANQ\nlNzwiCgs5y16EbkTQI0xZgcA66eGif+o4OwZnShquvqe/lsAporIFAA5APqLyGIAMREpNsZUi0gJ\ngJqgB0pE/jjvmd4YM8cYU2qMuQTAnwN41xjzAICVAKYnNpsOYHmwwyQiv3T3e/r2H+NfADBJRA4A\nmJB4TEQRwOvpA3LRRfo3p9df1/3Oo49kq+y6vDzP+/lpjf7N6t+O8rct4vX0RJTAoidyDIueyDEs\neiLHsJFH1EuxkUdEAFj0RM5h0RM5hkVP5BgWPZFjAu3eE1H64ZmeyDEseiLHBF70IjJZRPaJyEci\n8lTQ+/ObiPxCRGIisrtDFtm7AYtIqYhsFJEKEdkjIo8l8sgdk4jkiMhWEdkpIpUi8nwij9yxdBT0\n3acDLXoRyQTwEoDJAEYAuF9ErgpynwFYiPj4O4ry3YCbATxujBkJ4CYAMxP/JpE7JmNMI4ByY8y1\nAK4BUC4ityCCx3KOYO8+bYwJ7D8ANwN4p8Pj2QBmB7nPgI6jDMDuDo/3IX5zUAAoBrAv1WNM4tiW\nA5gY9WMCkAdgG4CRUT4WAMMA/BZAOYBViczX4wn6x/uLAHza4fFniSzqesXdgEWkDMB1ALYiosck\nIhkishPxMW80xlQgoseSEPjdp4Mu+l7/faCJf/xG7jhFpADAEgCzjDGnOz4XpWMyxrSZ+I/3wwDc\nKiLl5zwfmWMJ6+7TQRf9EQClHR6XIn62j7r2RT4QxbsBi0gW4gW/2BjTflPTSB+TMaYWwGoAYxDd\nY2m/+/QnAN4EMKHj3acBf44n6KL/AMDlIlImIn0B3If4nXSjLrJ3AxYRAbAAQKUxZn6HpyJ3TCIy\nuL2TLSK5ACYB2IEIHgsQ4t2nQ2hM3A5gP4CDAJ5OdaOkB+N/E8DnAM4i3p94CEAh4s2WAwDWARiQ\n6nF243huQfz3xZ2IF8gOxL+diNwxARgF4MPEsewC8MNEHrljsRzbeAArgzgeTsMlcgxn5BE5hkVP\n5BgWPZFjWPREjmHREzmGRU/kGBY9kWNY9ESO+X+XzhVmCmkIqQAAAABJRU5ErkJggg==\n", 275 | "text/plain": [ 276 | "" 277 | ] 278 | }, 279 | "metadata": {}, 280 | "output_type": "display_data" 281 | } 282 | ], 283 | "source": [ 284 | "outim = output.eval({l_in.input_var: im})\n", 285 | "outim = outim[0]\n", 286 | "outim = np.rollaxis(np.rollaxis(outim, 2), 2)\n", 287 | "plt.imshow(outim, interpolation='nearest')" 288 | ] 289 | } 290 | ], 291 | "metadata": { 292 | "kernelspec": { 293 | "display_name": "Python 2", 294 | "language": "python", 295 | "name": "python2" 296 | }, 297 | "language_info": { 298 | "codemirror_mode": { 299 | "name": "ipython", 300 | "version": 2 301 | }, 302 | "file_extension": ".py", 303 | "mimetype": "text/x-python", 304 | "name": "python", 305 | "nbconvert_exporter": "python", 306 | "pygments_lexer": "ipython2", 307 | "version": "2.7.6" 308 | } 309 | }, 310 | "nbformat": 4, 311 | "nbformat_minor": 0 312 | } 313 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Neural networks with Theano and Lasagne 2 | ================== 3 | 4 | Syllabus 5 | ------- 6 | 7 | 1. Theano basics 8 | - Symbolic variables/tensors, expressions 9 | - Functions, shared variables and updates 10 | 2. Overview of Lasagne 11 | - Layer classes and building a network 12 | - Objectives, optimizers, and training 13 | 3. Convolutional neural networks 14 | - Image classification 15 | - Fine-tuning a pretrained network 16 | - Style transfer (“Neural Art”) 17 | 4. Recurrent neural networks 18 | - Language model (text generation) 19 | - CNN + RNN (image captioning) 20 | 5. Extending Lasagne 21 | - Defining custom Layers 22 | --------------------------------------------------------------------------------