├── .gitignore ├── .travis.yml ├── 00_multiply.ipynb ├── 00_multiply.py ├── 01_linear_regression.ipynb ├── 01_linear_regression.py ├── 02_logistic_regression.ipynb ├── 02_logistic_regression.py ├── 03_net.ipynb ├── 03_net.py ├── 04_modern_net.ipynb ├── 04_modern_net.py ├── 05_convolutional_net.ipynb ├── 05_convolutional_net.py ├── 06_autoencoder.ipynb ├── 06_autoencoder.py ├── 07_lstm.ipynb ├── 07_lstm.py ├── 08_word2vec.ipynb ├── 08_word2vec.py ├── 09_tensorboard.ipynb ├── 09_tensorboard.py ├── 10_save_restore_net.ipynb ├── 10_save_restore_net.py ├── 11_gan.ipynb ├── 11_gan.py ├── README.md └── nb2script /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | lib/ 17 | lib64/ 18 | parts/ 19 | sdist/ 20 | var/ 21 | *.egg-info/ 22 | .installed.cfg 23 | *.egg 24 | 25 | # PyInstaller 26 | # Usually these files are written by a python script from a template 27 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 28 | *.manifest 29 | *.spec 30 | 31 | # Installer logs 32 | pip-log.txt 33 | pip-delete-this-directory.txt 34 | 35 | # Unit test / coverage reports 36 | htmlcov/ 37 | .tox/ 38 | .coverage 39 | .cache 40 | nosetests.xml 41 | coverage.xml 42 | 43 | # Translations 44 | *.mo 45 | *.pot 46 | 47 | # Django stuff: 48 | *.log 49 | 50 | # Sphinx documentation 51 | docs/_build/ 52 | 53 | # PyBuilder 54 | target/ 55 | 56 | # DataDir 57 | MNIST_data/ 58 | 59 | # OSX stuff 60 | .DS_Store 61 | /in_vis.png 62 | /word2vec.png 63 | /pred_vis.png 64 | 65 | # pycharm stuff: 66 | .idea/ 67 | 68 | #tensorboard logs 69 | logs/ 70 | 71 | #ipython notebook 72 | .ipynb_checkpoints/ -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | # code below is taken from https://github.com/fchollet/keras/blob/master/.travis.yml 2 | sudo: required 3 | dist: trusty 4 | language: python 5 | python: # Only two versions for now 6 | - "2.7" 7 | - "3.4" 8 | # command to install dependencies 9 | install: 10 | - pip install numpy 11 | - pip install matplotlib 12 | # install TensorFlow from https://storage.googleapis.com/tensorflow/ 13 | - if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]]; then 14 | pip install https://storage.googleapis.com/tensorflow/linux/cpu/tensorflow-1.0.0a0-cp27-none-linux_x86_64.whl; 15 | elif [[ "$TRAVIS_PYTHON_VERSION" == "3.4" ]]; then 16 | pip install https://storage.googleapis.com/tensorflow/linux/cpu/tensorflow-1.0.0a0-cp34-cp34m-linux_x86_64.whl; 17 | fi 18 | script: 19 | - sed -i -- 's/range(100)/range(1)/g' ??_*.py # change range to 1 for quick testing 20 | - python 03_net.py # run this first to download MNIST file 21 | # run all python files in parallel, http://stackoverflow.com/questions/5015316 22 | - ls ??_*.py|xargs -n 1 -P 3 python 23 | -------------------------------------------------------------------------------- /00_multiply.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "collapsed": true 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "import tensorflow as tf" 12 | ] 13 | }, 14 | { 15 | "cell_type": "code", 16 | "execution_count": 2, 17 | "metadata": { 18 | "collapsed": true 19 | }, 20 | "outputs": [], 21 | "source": [ 22 | "a = tf.placeholder(\"float\") # Create a symbolic variable 'a'\n", 23 | "b = tf.placeholder(\"float\") # Create a symbolic variable 'b'\n", 24 | "\n", 25 | "y = tf.multiply(a, b) # multiply the symbolic variables" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": 3, 31 | "metadata": { 32 | "collapsed": false 33 | }, 34 | "outputs": [ 35 | { 36 | "name": "stdout", 37 | "output_type": "stream", 38 | "text": [ 39 | "2.000000 should equal 2.0\n", 40 | "9.000000 should equal 9.0\n" 41 | ] 42 | } 43 | ], 44 | "source": [ 45 | "with tf.Session() as sess: # create a session to evaluate the symbolic expressions\n", 46 | " print(\"%f should equal 2.0\" % sess.run(y, feed_dict={a: 1, b: 2})) # eval expressions with parameters for a and b\n", 47 | " print(\"%f should equal 9.0\" % sess.run(y, feed_dict={a: 3, b: 3}))" 48 | ] 49 | } 50 | ], 51 | "metadata": { 52 | "kernelspec": { 53 | "display_name": "Python 2", 54 | "language": "python", 55 | "name": "python2" 56 | }, 57 | "language_info": { 58 | "codemirror_mode": { 59 | "name": "ipython", 60 | "version": 2 61 | }, 62 | "file_extension": ".py", 63 | "mimetype": "text/x-python", 64 | "name": "python", 65 | "nbconvert_exporter": "python", 66 | "pygments_lexer": "ipython2", 67 | "version": "2.7.6" 68 | } 69 | }, 70 | "nbformat": 4, 71 | "nbformat_minor": 1 72 | } 73 | -------------------------------------------------------------------------------- /00_multiply.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import tensorflow as tf 4 | 5 | a = tf.placeholder("float") # Create a symbolic variable 'a' 6 | b = tf.placeholder("float") # Create a symbolic variable 'b' 7 | 8 | y = tf.multiply(a, b) # multiply the symbolic variables 9 | 10 | with tf.Session() as sess: # create a session to evaluate the symbolic expressions 11 | print("%f should equal 2.0" % sess.run(y, feed_dict={a: 1, b: 2})) # eval expressions with parameters for a and b 12 | print("%f should equal 9.0" % sess.run(y, feed_dict={a: 3, b: 3})) 13 | -------------------------------------------------------------------------------- /01_linear_regression.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "collapsed": false 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "import tensorflow as tf\n", 12 | "import numpy as np" 13 | ] 14 | }, 15 | { 16 | "cell_type": "code", 17 | "execution_count": 2, 18 | "metadata": { 19 | "collapsed": true 20 | }, 21 | "outputs": [], 22 | "source": [ 23 | "trX = np.linspace(-1, 1, 101)\n", 24 | "trY = 2 * trX + np.random.randn(*trX.shape) * 0.33 # create a y value which is approximately linear but with some random noise" 25 | ] 26 | }, 27 | { 28 | "cell_type": "code", 29 | "execution_count": 3, 30 | "metadata": { 31 | "collapsed": true 32 | }, 33 | "outputs": [], 34 | "source": [ 35 | "X = tf.placeholder(\"float\") # create symbolic variables\n", 36 | "Y = tf.placeholder(\"float\")\n", 37 | "\n", 38 | "def model(X, w):\n", 39 | " return tf.multiply(X, w) # lr is just X*w so this model line is pretty simple\n", 40 | "\n", 41 | "w = tf.Variable(0.0, name=\"weights\") # create a shared variable (like theano.shared) for the weight matrix\n", 42 | "y_model = model(X, w)\n", 43 | "\n", 44 | "cost = tf.square(Y - y_model) # use square error for cost function\n", 45 | "\n", 46 | "train_op = tf.train.GradientDescentOptimizer(0.01).minimize(cost) # construct an optimizer to minimize cost and fit line to my data" 47 | ] 48 | }, 49 | { 50 | "cell_type": "code", 51 | "execution_count": 5, 52 | "metadata": { 53 | "collapsed": false 54 | }, 55 | "outputs": [ 56 | { 57 | "name": "stdout", 58 | "output_type": "stream", 59 | "text": [ 60 | "2.00863\n" 61 | ] 62 | } 63 | ], 64 | "source": [ 65 | "# Launch the graph in a session\n", 66 | "with tf.Session() as sess:\n", 67 | " # you need to initialize variables (in this case just variable W)\n", 68 | " tf.global_variables_initializer().run()\n", 69 | "\n", 70 | " for i in range(100):\n", 71 | " for (x, y) in zip(trX, trY):\n", 72 | " sess.run(train_op, feed_dict={X: x, Y: y})\n", 73 | "\n", 74 | " print(sess.run(w)) # It should be something around 2" 75 | ] 76 | }, 77 | { 78 | "cell_type": "code", 79 | "execution_count": null, 80 | "metadata": { 81 | "collapsed": true 82 | }, 83 | "outputs": [], 84 | "source": [] 85 | } 86 | ], 87 | "metadata": { 88 | "kernelspec": { 89 | "display_name": "Python 2", 90 | "language": "python", 91 | "name": "python2" 92 | }, 93 | "language_info": { 94 | "codemirror_mode": { 95 | "name": "ipython", 96 | "version": 2 97 | }, 98 | "file_extension": ".py", 99 | "mimetype": "text/x-python", 100 | "name": "python", 101 | "nbconvert_exporter": "python", 102 | "pygments_lexer": "ipython2", 103 | "version": "2.7.13" 104 | } 105 | }, 106 | "nbformat": 4, 107 | "nbformat_minor": 0 108 | } 109 | -------------------------------------------------------------------------------- /01_linear_regression.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import tensorflow as tf 4 | import numpy as np 5 | 6 | trX = np.linspace(-1, 1, 101) 7 | trY = 2 * trX + np.random.randn(*trX.shape) * 0.33 # create a y value which is approximately linear but with some random noise 8 | 9 | X = tf.placeholder("float") # create symbolic variables 10 | Y = tf.placeholder("float") 11 | 12 | 13 | def model(X, w): 14 | return tf.multiply(X, w) # lr is just X*w so this model line is pretty simple 15 | 16 | 17 | w = tf.Variable(0.0, name="weights") # create a shared variable (like theano.shared) for the weight matrix 18 | y_model = model(X, w) 19 | 20 | cost = tf.square(Y - y_model) # use square error for cost function 21 | 22 | train_op = tf.train.GradientDescentOptimizer(0.01).minimize(cost) # construct an optimizer to minimize cost and fit line to my data 23 | 24 | # Launch the graph in a session 25 | with tf.Session() as sess: 26 | # you need to initialize variables (in this case just variable W) 27 | tf.global_variables_initializer().run() 28 | 29 | for i in range(100): 30 | for (x, y) in zip(trX, trY): 31 | sess.run(train_op, feed_dict={X: x, Y: y}) 32 | 33 | print(sess.run(w)) # It should be something around 2 34 | -------------------------------------------------------------------------------- /02_logistic_regression.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "collapsed": true 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "import tensorflow as tf\n", 12 | "import numpy as np\n", 13 | "from tensorflow.examples.tutorials.mnist import input_data" 14 | ] 15 | }, 16 | { 17 | "cell_type": "code", 18 | "execution_count": null, 19 | "metadata": { 20 | "collapsed": false 21 | }, 22 | "outputs": [], 23 | "source": [ 24 | "def init_weights(shape):\n", 25 | " return tf.Variable(tf.random_normal(shape, stddev=0.01))\n", 26 | "\n", 27 | "def model(X, w):\n", 28 | " return tf.matmul(X, w) # notice we use the same model as linear regression, this is because there is a baked in cost function which performs softmax and cross entropy\n", 29 | "\n", 30 | "mnist = input_data.read_data_sets(\"MNIST_data/\", one_hot=True)\n", 31 | "trX, trY, teX, teY = mnist.train.images, mnist.train.labels, mnist.test.images, mnist.test.labels" 32 | ] 33 | }, 34 | { 35 | "cell_type": "code", 36 | "execution_count": 3, 37 | "metadata": { 38 | "collapsed": false 39 | }, 40 | "outputs": [], 41 | "source": [ 42 | "X = tf.placeholder(\"float\", [None, 784]) # create symbolic variables\n", 43 | "Y = tf.placeholder(\"float\", [None, 10])\n", 44 | "\n", 45 | "w = init_weights([784, 10]) # like in linear regression, we need a shared variable weight matrix for logistic regression\n", 46 | "\n", 47 | "py_x = model(X, w)\n", 48 | "\n", 49 | "cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=py_x, labels=Y)) # compute mean cross entropy (softmax is applied internally)\n", 50 | "train_op = tf.train.GradientDescentOptimizer(0.05).minimize(cost) # construct optimizer\n", 51 | "predict_op = tf.argmax(py_x, 1) # at predict time, evaluate the argmax of the logistic regression" 52 | ] 53 | }, 54 | { 55 | "cell_type": "code", 56 | "execution_count": null, 57 | "metadata": { 58 | "collapsed": false 59 | }, 60 | "outputs": [], 61 | "source": [ 62 | "# Launch the graph in a session\n", 63 | "with tf.Session() as sess:\n", 64 | " # you need to initialize all variables\n", 65 | " tf.global_variables_initializer().run()\n", 66 | "\n", 67 | " for i in range(100):\n", 68 | " for start, end in zip(range(0, len(trX), 128), range(128, len(trX)+1, 128)):\n", 69 | " sess.run(train_op, feed_dict={X: trX[start:end], Y: trY[start:end]})\n", 70 | " print(i, np.mean(np.argmax(teY, axis=1) ==\n", 71 | " sess.run(predict_op, feed_dict={X: teX})))" 72 | ] 73 | }, 74 | { 75 | "cell_type": "code", 76 | "execution_count": null, 77 | "metadata": { 78 | "collapsed": true 79 | }, 80 | "outputs": [], 81 | "source": [] 82 | } 83 | ], 84 | "metadata": { 85 | "kernelspec": { 86 | "display_name": "Python 2", 87 | "language": "python", 88 | "name": "python2" 89 | }, 90 | "language_info": { 91 | "codemirror_mode": { 92 | "name": "ipython", 93 | "version": 2 94 | }, 95 | "file_extension": ".py", 96 | "mimetype": "text/x-python", 97 | "name": "python", 98 | "nbconvert_exporter": "python", 99 | "pygments_lexer": "ipython2", 100 | "version": "2.7.13" 101 | } 102 | }, 103 | "nbformat": 4, 104 | "nbformat_minor": 1 105 | } 106 | -------------------------------------------------------------------------------- /02_logistic_regression.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import tensorflow as tf 4 | import numpy as np 5 | from tensorflow.examples.tutorials.mnist import input_data 6 | 7 | 8 | def init_weights(shape): 9 | return tf.Variable(tf.random_normal(shape, stddev=0.01)) 10 | 11 | 12 | def model(X, w): 13 | return tf.matmul(X, w) # notice we use the same model as linear regression, this is because there is a baked in cost function which performs softmax and cross entropy 14 | 15 | 16 | mnist = input_data.read_data_sets("MNIST_data/", one_hot=True) 17 | trX, trY, teX, teY = mnist.train.images, mnist.train.labels, mnist.test.images, mnist.test.labels 18 | 19 | X = tf.placeholder("float", [None, 784]) # create symbolic variables 20 | Y = tf.placeholder("float", [None, 10]) 21 | 22 | w = init_weights([784, 10]) # like in linear regression, we need a shared variable weight matrix for logistic regression 23 | 24 | py_x = model(X, w) 25 | 26 | cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=py_x, labels=Y)) # compute mean cross entropy (softmax is applied internally) 27 | train_op = tf.train.GradientDescentOptimizer(0.05).minimize(cost) # construct optimizer 28 | predict_op = tf.argmax(py_x, 1) # at predict time, evaluate the argmax of the logistic regression 29 | 30 | # Launch the graph in a session 31 | with tf.Session() as sess: 32 | # you need to initialize all variables 33 | tf.global_variables_initializer().run() 34 | 35 | for i in range(100): 36 | for start, end in zip(range(0, len(trX), 128), range(128, len(trX)+1, 128)): 37 | sess.run(train_op, feed_dict={X: trX[start:end], Y: trY[start:end]}) 38 | print(i, np.mean(np.argmax(teY, axis=1) == 39 | sess.run(predict_op, feed_dict={X: teX}))) 40 | -------------------------------------------------------------------------------- /03_net.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "collapsed": true 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "import tensorflow as tf\n", 12 | "import numpy as np\n", 13 | "from tensorflow.examples.tutorials.mnist import input_data" 14 | ] 15 | }, 16 | { 17 | "cell_type": "code", 18 | "execution_count": null, 19 | "metadata": { 20 | "collapsed": false 21 | }, 22 | "outputs": [], 23 | "source": [ 24 | "def init_weights(shape):\n", 25 | " return tf.Variable(tf.random_normal(shape, stddev=0.01))\n", 26 | "\n", 27 | "def model(X, w_h, w_o):\n", 28 | " h = tf.nn.sigmoid(tf.matmul(X, w_h)) # this is a basic mlp, think 2 stacked logistic regressions\n", 29 | " return tf.matmul(h, w_o) # note that we dont take the softmax at the end because our cost fn does that for us\n", 30 | "\n", 31 | "mnist = input_data.read_data_sets(\"MNIST_data/\", one_hot=True)\n", 32 | "trX, trY, teX, teY = mnist.train.images, mnist.train.labels, mnist.test.images, mnist.test.labels" 33 | ] 34 | }, 35 | { 36 | "cell_type": "code", 37 | "execution_count": 5, 38 | "metadata": { 39 | "collapsed": false 40 | }, 41 | "outputs": [], 42 | "source": [ 43 | "X = tf.placeholder(\"float\", [None, 784])\n", 44 | "Y = tf.placeholder(\"float\", [None, 10])\n", 45 | "\n", 46 | "w_h = init_weights([784, 625]) # create symbolic variables\n", 47 | "w_o = init_weights([625, 10])\n", 48 | "\n", 49 | "py_x = model(X, w_h, w_o)\n", 50 | "\n", 51 | "cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=py_x, labels=Y)) # compute costs\n", 52 | "train_op = tf.train.GradientDescentOptimizer(0.05).minimize(cost) # construct an optimizer\n", 53 | "predict_op = tf.argmax(py_x, 1)" 54 | ] 55 | }, 56 | { 57 | "cell_type": "code", 58 | "execution_count": null, 59 | "metadata": { 60 | "collapsed": false 61 | }, 62 | "outputs": [], 63 | "source": [ 64 | "# Launch the graph in a session\n", 65 | "with tf.Session() as sess:\n", 66 | " # you need to initialize all variables\n", 67 | " tf.global_variables_initializer().run()\n", 68 | "\n", 69 | " for i in range(100):\n", 70 | " for start, end in zip(range(0, len(trX), 128), range(128, len(trX)+1, 128)):\n", 71 | " sess.run(train_op, feed_dict={X: trX[start:end], Y: trY[start:end]})\n", 72 | " print(i, np.mean(np.argmax(teY, axis=1) ==\n", 73 | " sess.run(predict_op, feed_dict={X: teX})))" 74 | ] 75 | }, 76 | { 77 | "cell_type": "code", 78 | "execution_count": null, 79 | "metadata": { 80 | "collapsed": true 81 | }, 82 | "outputs": [], 83 | "source": [] 84 | } 85 | ], 86 | "metadata": { 87 | "kernelspec": { 88 | "display_name": "Python 2", 89 | "language": "python", 90 | "name": "python2" 91 | }, 92 | "language_info": { 93 | "codemirror_mode": { 94 | "name": "ipython", 95 | "version": 2 96 | }, 97 | "file_extension": ".py", 98 | "mimetype": "text/x-python", 99 | "name": "python", 100 | "nbconvert_exporter": "python", 101 | "pygments_lexer": "ipython2", 102 | "version": "2.7.13" 103 | } 104 | }, 105 | "nbformat": 4, 106 | "nbformat_minor": 0 107 | } 108 | -------------------------------------------------------------------------------- /03_net.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import tensorflow as tf 4 | import numpy as np 5 | from tensorflow.examples.tutorials.mnist import input_data 6 | 7 | 8 | def init_weights(shape): 9 | return tf.Variable(tf.random_normal(shape, stddev=0.01)) 10 | 11 | 12 | def model(X, w_h, w_o): 13 | h = tf.nn.sigmoid(tf.matmul(X, w_h)) # this is a basic mlp, think 2 stacked logistic regressions 14 | return tf.matmul(h, w_o) # note that we dont take the softmax at the end because our cost fn does that for us 15 | 16 | 17 | mnist = input_data.read_data_sets("MNIST_data/", one_hot=True) 18 | trX, trY, teX, teY = mnist.train.images, mnist.train.labels, mnist.test.images, mnist.test.labels 19 | 20 | X = tf.placeholder("float", [None, 784]) 21 | Y = tf.placeholder("float", [None, 10]) 22 | 23 | w_h = init_weights([784, 625]) # create symbolic variables 24 | w_o = init_weights([625, 10]) 25 | 26 | py_x = model(X, w_h, w_o) 27 | 28 | cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=py_x, labels=Y)) # compute costs 29 | train_op = tf.train.GradientDescentOptimizer(0.05).minimize(cost) # construct an optimizer 30 | predict_op = tf.argmax(py_x, 1) 31 | 32 | # Launch the graph in a session 33 | with tf.Session() as sess: 34 | # you need to initialize all variables 35 | tf.global_variables_initializer().run() 36 | 37 | for i in range(100): 38 | for start, end in zip(range(0, len(trX), 128), range(128, len(trX)+1, 128)): 39 | sess.run(train_op, feed_dict={X: trX[start:end], Y: trY[start:end]}) 40 | print(i, np.mean(np.argmax(teY, axis=1) == 41 | sess.run(predict_op, feed_dict={X: teX}))) 42 | -------------------------------------------------------------------------------- /04_modern_net.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": { 7 | "collapsed": true 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "import tensorflow as tf\n", 12 | "import numpy as np\n", 13 | "from tensorflow.examples.tutorials.mnist import input_data" 14 | ] 15 | }, 16 | { 17 | "cell_type": "code", 18 | "execution_count": null, 19 | "metadata": { 20 | "collapsed": false 21 | }, 22 | "outputs": [], 23 | "source": [ 24 | "def init_weights(shape):\n", 25 | " return tf.Variable(tf.random_normal(shape, stddev=0.01))\n", 26 | "\n", 27 | "def model(X, w_h, w_h2, w_o, p_keep_input, p_keep_hidden): # this network is the same as the previous one except with an extra hidden layer + dropout\n", 28 | " X = tf.nn.dropout(X, p_keep_input)\n", 29 | " h = tf.nn.relu(tf.matmul(X, w_h))\n", 30 | "\n", 31 | " h = tf.nn.dropout(h, p_keep_hidden)\n", 32 | " h2 = tf.nn.relu(tf.matmul(h, w_h2))\n", 33 | "\n", 34 | " h2 = tf.nn.dropout(h2, p_keep_hidden)\n", 35 | "\n", 36 | " return tf.matmul(h2, w_o)\n", 37 | "\n", 38 | "mnist = input_data.read_data_sets(\"MNIST_data/\", one_hot=True)\n", 39 | "trX, trY, teX, teY = mnist.train.images, mnist.train.labels, mnist.test.images, mnist.test.labels" 40 | ] 41 | }, 42 | { 43 | "cell_type": "code", 44 | "execution_count": null, 45 | "metadata": { 46 | "collapsed": false 47 | }, 48 | "outputs": [], 49 | "source": [ 50 | "X = tf.placeholder(\"float\", [None, 784])\n", 51 | "Y = tf.placeholder(\"float\", [None, 10])\n", 52 | "\n", 53 | "w_h = init_weights([784, 625])\n", 54 | "w_h2 = init_weights([625, 625])\n", 55 | "w_o = init_weights([625, 10])\n", 56 | "\n", 57 | "p_keep_input = tf.placeholder(\"float\")\n", 58 | "p_keep_hidden = tf.placeholder(\"float\")\n", 59 | "py_x = model(X, w_h, w_h2, w_o, p_keep_input, p_keep_hidden)\n", 60 | "\n", 61 | "cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=py_x, labels=Y))\n", 62 | "train_op = tf.train.RMSPropOptimizer(0.001, 0.9).minimize(cost)\n", 63 | "predict_op = tf.argmax(py_x, 1)" 64 | ] 65 | }, 66 | { 67 | "cell_type": "code", 68 | "execution_count": null, 69 | "metadata": { 70 | "collapsed": false 71 | }, 72 | "outputs": [], 73 | "source": [ 74 | "# Launch the graph in a session\n", 75 | "with tf.Session() as sess:\n", 76 | " # you need to initialize all variables\n", 77 | " tf.global_variables_initializer().run()\n", 78 | "\n", 79 | " for i in range(100):\n", 80 | " for start, end in zip(range(0, len(trX), 128), range(128, len(trX)+1, 128)):\n", 81 | " sess.run(train_op, feed_dict={X: trX[start:end], Y: trY[start:end],\n", 82 | " p_keep_input: 0.8, p_keep_hidden: 0.5})\n", 83 | " print(i, np.mean(np.argmax(teY, axis=1) ==\n", 84 | " sess.run(predict_op, feed_dict={X: teX, Y: teY,\n", 85 | " p_keep_input: 1.0,\n", 86 | " p_keep_hidden: 1.0})))" 87 | ] 88 | }, 89 | { 90 | "cell_type": "code", 91 | "execution_count": null, 92 | "metadata": { 93 | "collapsed": true 94 | }, 95 | "outputs": [], 96 | "source": [] 97 | } 98 | ], 99 | "metadata": { 100 | "kernelspec": { 101 | "display_name": "Python 2", 102 | "language": "python", 103 | "name": "python2" 104 | }, 105 | "language_info": { 106 | "codemirror_mode": { 107 | "name": "ipython", 108 | "version": 2 109 | }, 110 | "file_extension": ".py", 111 | "mimetype": "text/x-python", 112 | "name": "python", 113 | "nbconvert_exporter": "python", 114 | "pygments_lexer": "ipython2", 115 | "version": "2.7.13" 116 | } 117 | }, 118 | "nbformat": 4, 119 | "nbformat_minor": 0 120 | } 121 | -------------------------------------------------------------------------------- /04_modern_net.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import tensorflow as tf 4 | import numpy as np 5 | from tensorflow.examples.tutorials.mnist import input_data 6 | 7 | 8 | def init_weights(shape): 9 | return tf.Variable(tf.random_normal(shape, stddev=0.01)) 10 | 11 | 12 | def model(X, w_h, w_h2, w_o, p_keep_input, p_keep_hidden): # this network is the same as the previous one except with an extra hidden layer + dropout 13 | X = tf.nn.dropout(X, p_keep_input) 14 | h = tf.nn.relu(tf.matmul(X, w_h)) 15 | 16 | h = tf.nn.dropout(h, p_keep_hidden) 17 | h2 = tf.nn.relu(tf.matmul(h, w_h2)) 18 | 19 | h2 = tf.nn.dropout(h2, p_keep_hidden) 20 | 21 | return tf.matmul(h2, w_o) 22 | 23 | 24 | mnist = input_data.read_data_sets("MNIST_data/", one_hot=True) 25 | trX, trY, teX, teY = mnist.train.images, mnist.train.labels, mnist.test.images, mnist.test.labels 26 | 27 | X = tf.placeholder("float", [None, 784]) 28 | Y = tf.placeholder("float", [None, 10]) 29 | 30 | w_h = init_weights([784, 625]) 31 | w_h2 = init_weights([625, 625]) 32 | w_o = init_weights([625, 10]) 33 | 34 | p_keep_input = tf.placeholder("float") 35 | p_keep_hidden = tf.placeholder("float") 36 | py_x = model(X, w_h, w_h2, w_o, p_keep_input, p_keep_hidden) 37 | 38 | cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=py_x, labels=Y)) 39 | train_op = tf.train.RMSPropOptimizer(0.001, 0.9).minimize(cost) 40 | predict_op = tf.argmax(py_x, 1) 41 | 42 | # Launch the graph in a session 43 | with tf.Session() as sess: 44 | # you need to initialize all variables 45 | tf.global_variables_initializer().run() 46 | 47 | for i in range(100): 48 | for start, end in zip(range(0, len(trX), 128), range(128, len(trX)+1, 128)): 49 | sess.run(train_op, feed_dict={X: trX[start:end], Y: trY[start:end], 50 | p_keep_input: 0.8, p_keep_hidden: 0.5}) 51 | print(i, np.mean(np.argmax(teY, axis=1) == 52 | sess.run(predict_op, feed_dict={X: teX, 53 | p_keep_input: 1.0, 54 | p_keep_hidden: 1.0}))) 55 | -------------------------------------------------------------------------------- /05_convolutional_net.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "collapsed": true 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "import tensorflow as tf\n", 12 | "import numpy as np\n", 13 | "from tensorflow.examples.tutorials.mnist import input_data" 14 | ] 15 | }, 16 | { 17 | "cell_type": "code", 18 | "execution_count": null, 19 | "metadata": { 20 | "collapsed": false 21 | }, 22 | "outputs": [], 23 | "source": [ 24 | "batch_size = 128\n", 25 | "test_size = 256\n", 26 | "\n", 27 | "def init_weights(shape):\n", 28 | " return tf.Variable(tf.random_normal(shape, stddev=0.01))\n", 29 | "\n", 30 | "def model(X, w, w2, w3, w4, w_o, p_keep_conv, p_keep_hidden):\n", 31 | " l1a = tf.nn.relu(tf.nn.conv2d(X, w, # l1a shape=(?, 28, 28, 32)\n", 32 | " strides=[1, 1, 1, 1], padding='SAME'))\n", 33 | " l1 = tf.nn.max_pool(l1a, ksize=[1, 2, 2, 1], # l1 shape=(?, 14, 14, 32)\n", 34 | " strides=[1, 2, 2, 1], padding='SAME')\n", 35 | " l1 = tf.nn.dropout(l1, p_keep_conv)\n", 36 | "\n", 37 | " l2a = tf.nn.relu(tf.nn.conv2d(l1, w2, # l2a shape=(?, 14, 14, 64)\n", 38 | " strides=[1, 1, 1, 1], padding='SAME'))\n", 39 | " l2 = tf.nn.max_pool(l2a, ksize=[1, 2, 2, 1], # l2 shape=(?, 7, 7, 64)\n", 40 | " strides=[1, 2, 2, 1], padding='SAME')\n", 41 | " l2 = tf.nn.dropout(l2, p_keep_conv)\n", 42 | "\n", 43 | " l3a = tf.nn.relu(tf.nn.conv2d(l2, w3, # l3a shape=(?, 7, 7, 128)\n", 44 | " strides=[1, 1, 1, 1], padding='SAME'))\n", 45 | " l3 = tf.nn.max_pool(l3a, ksize=[1, 2, 2, 1], # l3 shape=(?, 4, 4, 128)\n", 46 | " strides=[1, 2, 2, 1], padding='SAME')\n", 47 | " l3 = tf.reshape(l3, [-1, w4.get_shape().as_list()[0]]) # reshape to (?, 2048)\n", 48 | " l3 = tf.nn.dropout(l3, p_keep_conv)\n", 49 | "\n", 50 | " l4 = tf.nn.relu(tf.matmul(l3, w4))\n", 51 | " l4 = tf.nn.dropout(l4, p_keep_hidden)\n", 52 | "\n", 53 | " pyx = tf.matmul(l4, w_o)\n", 54 | " return pyx\n", 55 | "\n", 56 | "mnist = input_data.read_data_sets(\"MNIST_data/\", one_hot=True)\n", 57 | "trX, trY, teX, teY = mnist.train.images, mnist.train.labels, mnist.test.images, mnist.test.labels\n", 58 | "trX = trX.reshape(-1, 28, 28, 1) # 28x28x1 input img\n", 59 | "teX = teX.reshape(-1, 28, 28, 1) # 28x28x1 input img" 60 | ] 61 | }, 62 | { 63 | "cell_type": "code", 64 | "execution_count": 3, 65 | "metadata": { 66 | "collapsed": true 67 | }, 68 | "outputs": [], 69 | "source": [ 70 | "X = tf.placeholder(\"float\", [None, 28, 28, 1])\n", 71 | "Y = tf.placeholder(\"float\", [None, 10])\n", 72 | "\n", 73 | "w = init_weights([3, 3, 1, 32]) # 3x3x1 conv, 32 outputs\n", 74 | "w2 = init_weights([3, 3, 32, 64]) # 3x3x32 conv, 64 outputs\n", 75 | "w3 = init_weights([3, 3, 64, 128]) # 3x3x32 conv, 128 outputs\n", 76 | "w4 = init_weights([128 * 4 * 4, 625]) # FC 128 * 4 * 4 inputs, 625 outputs\n", 77 | "w_o = init_weights([625, 10]) # FC 625 inputs, 10 outputs (labels)\n", 78 | "\n", 79 | "p_keep_conv = tf.placeholder(\"float\")\n", 80 | "p_keep_hidden = tf.placeholder(\"float\")\n", 81 | "py_x = model(X, w, w2, w3, w4, w_o, p_keep_conv, p_keep_hidden)\n", 82 | "\n", 83 | "cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=py_x, labels=Y))\n", 84 | "train_op = tf.train.RMSPropOptimizer(0.001, 0.9).minimize(cost)\n", 85 | "predict_op = tf.argmax(py_x, 1)" 86 | ] 87 | }, 88 | { 89 | "cell_type": "code", 90 | "execution_count": null, 91 | "metadata": { 92 | "collapsed": false 93 | }, 94 | "outputs": [], 95 | "source": [ 96 | "# Launch the graph in a session\n", 97 | "with tf.Session() as sess:\n", 98 | " # you need to initialize all variables\n", 99 | " tf.global_variables_initializer().run()\n", 100 | "\n", 101 | " for i in range(100):\n", 102 | " training_batch = zip(range(0, len(trX), batch_size),\n", 103 | " range(batch_size, len(trX)+1, batch_size))\n", 104 | " for start, end in training_batch:\n", 105 | " sess.run(train_op, feed_dict={X: trX[start:end], Y: trY[start:end],\n", 106 | " p_keep_conv: 0.8, p_keep_hidden: 0.5})\n", 107 | "\n", 108 | " test_indices = np.arange(len(teX)) # Get A Test Batch\n", 109 | " np.random.shuffle(test_indices)\n", 110 | " test_indices = test_indices[0:test_size]\n", 111 | "\n", 112 | " print(i, np.mean(np.argmax(teY[test_indices], axis=1) ==\n", 113 | " sess.run(predict_op, feed_dict={X: teX[test_indices],\n", 114 | " Y: teY[test_indices],\n", 115 | " p_keep_conv: 1.0,\n", 116 | " p_keep_hidden: 1.0})))" 117 | ] 118 | }, 119 | { 120 | "cell_type": "code", 121 | "execution_count": null, 122 | "metadata": { 123 | "collapsed": true 124 | }, 125 | "outputs": [], 126 | "source": [] 127 | } 128 | ], 129 | "metadata": { 130 | "kernelspec": { 131 | "display_name": "Python 2", 132 | "language": "python", 133 | "name": "python2" 134 | }, 135 | "language_info": { 136 | "codemirror_mode": { 137 | "name": "ipython", 138 | "version": 2 139 | }, 140 | "file_extension": ".py", 141 | "mimetype": "text/x-python", 142 | "name": "python", 143 | "nbconvert_exporter": "python", 144 | "pygments_lexer": "ipython2", 145 | "version": "2.7.13" 146 | } 147 | }, 148 | "nbformat": 4, 149 | "nbformat_minor": 1 150 | } 151 | -------------------------------------------------------------------------------- /05_convolutional_net.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import tensorflow as tf 4 | import numpy as np 5 | from tensorflow.examples.tutorials.mnist import input_data 6 | 7 | batch_size = 128 8 | test_size = 256 9 | 10 | def init_weights(shape): 11 | return tf.Variable(tf.random_normal(shape, stddev=0.01)) 12 | 13 | 14 | def model(X, w, w2, w3, w4, w_o, p_keep_conv, p_keep_hidden): 15 | l1a = tf.nn.relu(tf.nn.conv2d(X, w, # l1a shape=(?, 28, 28, 32) 16 | strides=[1, 1, 1, 1], padding='SAME')) 17 | l1 = tf.nn.max_pool(l1a, ksize=[1, 2, 2, 1], # l1 shape=(?, 14, 14, 32) 18 | strides=[1, 2, 2, 1], padding='SAME') 19 | l1 = tf.nn.dropout(l1, p_keep_conv) 20 | 21 | l2a = tf.nn.relu(tf.nn.conv2d(l1, w2, # l2a shape=(?, 14, 14, 64) 22 | strides=[1, 1, 1, 1], padding='SAME')) 23 | l2 = tf.nn.max_pool(l2a, ksize=[1, 2, 2, 1], # l2 shape=(?, 7, 7, 64) 24 | strides=[1, 2, 2, 1], padding='SAME') 25 | l2 = tf.nn.dropout(l2, p_keep_conv) 26 | 27 | l3a = tf.nn.relu(tf.nn.conv2d(l2, w3, # l3a shape=(?, 7, 7, 128) 28 | strides=[1, 1, 1, 1], padding='SAME')) 29 | l3 = tf.nn.max_pool(l3a, ksize=[1, 2, 2, 1], # l3 shape=(?, 4, 4, 128) 30 | strides=[1, 2, 2, 1], padding='SAME') 31 | l3 = tf.reshape(l3, [-1, w4.get_shape().as_list()[0]]) # reshape to (?, 2048) 32 | l3 = tf.nn.dropout(l3, p_keep_conv) 33 | 34 | l4 = tf.nn.relu(tf.matmul(l3, w4)) 35 | l4 = tf.nn.dropout(l4, p_keep_hidden) 36 | 37 | pyx = tf.matmul(l4, w_o) 38 | return pyx 39 | 40 | mnist = input_data.read_data_sets("MNIST_data/", one_hot=True) 41 | trX, trY, teX, teY = mnist.train.images, mnist.train.labels, mnist.test.images, mnist.test.labels 42 | trX = trX.reshape(-1, 28, 28, 1) # 28x28x1 input img 43 | teX = teX.reshape(-1, 28, 28, 1) # 28x28x1 input img 44 | 45 | X = tf.placeholder("float", [None, 28, 28, 1]) 46 | Y = tf.placeholder("float", [None, 10]) 47 | 48 | w = init_weights([3, 3, 1, 32]) # 3x3x1 conv, 32 outputs 49 | w2 = init_weights([3, 3, 32, 64]) # 3x3x32 conv, 64 outputs 50 | w3 = init_weights([3, 3, 64, 128]) # 3x3x32 conv, 128 outputs 51 | w4 = init_weights([128 * 4 * 4, 625]) # FC 128 * 4 * 4 inputs, 625 outputs 52 | w_o = init_weights([625, 10]) # FC 625 inputs, 10 outputs (labels) 53 | 54 | p_keep_conv = tf.placeholder("float") 55 | p_keep_hidden = tf.placeholder("float") 56 | py_x = model(X, w, w2, w3, w4, w_o, p_keep_conv, p_keep_hidden) 57 | 58 | cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=py_x, labels=Y)) 59 | train_op = tf.train.RMSPropOptimizer(0.001, 0.9).minimize(cost) 60 | predict_op = tf.argmax(py_x, 1) 61 | 62 | # Launch the graph in a session 63 | with tf.Session() as sess: 64 | # you need to initialize all variables 65 | tf.global_variables_initializer().run() 66 | 67 | for i in range(100): 68 | training_batch = zip(range(0, len(trX), batch_size), 69 | range(batch_size, len(trX)+1, batch_size)) 70 | for start, end in training_batch: 71 | sess.run(train_op, feed_dict={X: trX[start:end], Y: trY[start:end], 72 | p_keep_conv: 0.8, p_keep_hidden: 0.5}) 73 | 74 | test_indices = np.arange(len(teX)) # Get A Test Batch 75 | np.random.shuffle(test_indices) 76 | test_indices = test_indices[0:test_size] 77 | 78 | print(i, np.mean(np.argmax(teY[test_indices], axis=1) == 79 | sess.run(predict_op, feed_dict={X: teX[test_indices], 80 | p_keep_conv: 1.0, 81 | p_keep_hidden: 1.0}))) 82 | -------------------------------------------------------------------------------- /06_autoencoder.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | import numpy as np 3 | from tensorflow.examples.tutorials.mnist import input_data 4 | 5 | import matplotlib # to plot images 6 | # Force matplotlib to not use any X-server backend. 7 | matplotlib.use('Agg') 8 | import matplotlib.pyplot as plt 9 | import matplotlib.gridspec as gridspec 10 | 11 | ## Visualizing reconstructions 12 | def vis(images, save_name): 13 | dim = images.shape[0] 14 | n_image_rows = int(np.ceil(np.sqrt(dim))) 15 | n_image_cols = int(np.ceil(dim * 1.0/n_image_rows)) 16 | gs = gridspec.GridSpec(n_image_rows,n_image_cols,top=1., bottom=0., right=1., left=0., hspace=0., wspace=0.) 17 | for g,count in zip(gs,range(int(dim))): 18 | ax = plt.subplot(g) 19 | ax.imshow(images[count,:].reshape((28,28))) 20 | ax.set_xticks([]) 21 | ax.set_yticks([]) 22 | plt.savefig(save_name + '_vis.png') 23 | 24 | mnist_width = 28 25 | n_visible = mnist_width * mnist_width 26 | n_hidden = 500 27 | corruption_level = 0.3 28 | 29 | # create node for input data 30 | X = tf.placeholder("float", [None, n_visible], name='X') 31 | 32 | # create node for corruption mask 33 | mask = tf.placeholder("float", [None, n_visible], name='mask') 34 | 35 | # create nodes for hidden variables 36 | W_init_max = 4 * np.sqrt(6. / (n_visible + n_hidden)) 37 | W_init = tf.random_uniform(shape=[n_visible, n_hidden], 38 | minval=-W_init_max, 39 | maxval=W_init_max) 40 | 41 | W = tf.Variable(W_init, name='W') 42 | b = tf.Variable(tf.zeros([n_hidden]), name='b') 43 | 44 | W_prime = tf.transpose(W) # tied weights between encoder and decoder 45 | b_prime = tf.Variable(tf.zeros([n_visible]), name='b_prime') 46 | 47 | 48 | def model(X, mask, W, b, W_prime, b_prime): 49 | tilde_X = mask * X # corrupted X 50 | 51 | Y = tf.nn.sigmoid(tf.matmul(tilde_X, W) + b) # hidden state 52 | Z = tf.nn.sigmoid(tf.matmul(Y, W_prime) + b_prime) # reconstructed input 53 | return Z 54 | 55 | # build model graph 56 | Z = model(X, mask, W, b, W_prime, b_prime) 57 | 58 | # create cost function 59 | cost = tf.reduce_sum(tf.pow(X - Z, 2)) # minimize squared error 60 | train_op = tf.train.GradientDescentOptimizer(0.02).minimize(cost) # construct an optimizer 61 | predict_op = Z 62 | # load MNIST data 63 | mnist = input_data.read_data_sets("MNIST_data/", one_hot=True) 64 | trX, trY, teX, teY = mnist.train.images, mnist.train.labels, mnist.test.images, mnist.test.labels 65 | 66 | # Launch the graph in a session 67 | with tf.Session() as sess: 68 | # you need to initialize all variables 69 | tf.global_variables_initializer().run() 70 | 71 | for i in range(100): 72 | for start, end in zip(range(0, len(trX), 128), range(128, len(trX)+1, 128)): 73 | input_ = trX[start:end] 74 | mask_np = np.random.binomial(1, 1 - corruption_level, input_.shape) 75 | sess.run(train_op, feed_dict={X: input_, mask: mask_np}) 76 | 77 | mask_np = np.random.binomial(1, 1 - corruption_level, teX.shape) 78 | print(i, sess.run(cost, feed_dict={X: teX, mask: mask_np})) 79 | # save the predictions for 100 images 80 | mask_np = np.random.binomial(1, 1 - corruption_level, teX[:100].shape) 81 | predicted_imgs = sess.run(predict_op, feed_dict={X: teX[:100], mask: mask_np}) 82 | input_imgs = teX[:100] 83 | # plot the reconstructed images 84 | vis(predicted_imgs,'pred') 85 | vis(input_imgs,'in') 86 | print('Done') -------------------------------------------------------------------------------- /07_lstm.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": { 7 | "collapsed": true 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "#Inspired by https://github.com/aymericdamien/TensorFlow-Examples/blob/master/examples/3%20-%20Neural%20Networks/recurrent_network.py\n", 12 | "import tensorflow as tf\n", 13 | "import numpy as np\n", 14 | "from tensorflow.contrib import rnn\n", 15 | "from tensorflow.examples.tutorials.mnist import input_data" 16 | ] 17 | }, 18 | { 19 | "cell_type": "code", 20 | "execution_count": null, 21 | "metadata": { 22 | "collapsed": false 23 | }, 24 | "outputs": [], 25 | "source": [ 26 | "# configuration\n", 27 | "# O * W + b -> 10 labels for each image, O[? 28], W[28 10], B[10]\n", 28 | "# ^ (O: output 28 vec from 28 vec input)\n", 29 | "# |\n", 30 | "# +-+ +-+ +--+\n", 31 | "# |1|->|2|-> ... |28| time_step_size = 28\n", 32 | "# +-+ +-+ +--+\n", 33 | "# ^ ^ ... ^\n", 34 | "# | | |\n", 35 | "# img1:[28] [28] ... [28]\n", 36 | "# img2:[28] [28] ... [28]\n", 37 | "# img3:[28] [28] ... [28]\n", 38 | "# ...\n", 39 | "# img128 or img256 (batch_size or test_size 256)\n", 40 | "# each input size = input_vec_size=lstm_size=28\n", 41 | "\n", 42 | "# configuration variables\n", 43 | "input_vec_size = lstm_size = 28\n", 44 | "time_step_size = 28\n", 45 | "\n", 46 | "batch_size = 128\n", 47 | "test_size = 256\n", 48 | "\n", 49 | "def init_weights(shape):\n", 50 | " return tf.Variable(tf.random_normal(shape, stddev=0.01))\n", 51 | "\n", 52 | "def model(X, W, B, lstm_size):\n", 53 | " # X, input shape: (batch_size, time_step_size, input_vec_size)\n", 54 | " XT = tf.transpose(X, [1, 0, 2]) # permute time_step_size and batch_size\n", 55 | " # XT shape: (time_step_size, batch_size, input_vec_size)\n", 56 | " XR = tf.reshape(XT, [-1, lstm_size]) # each row has input for each lstm cell (lstm_size=input_vec_size)\n", 57 | " # XR shape: (time_step_size * batch_size, input_vec_size)\n", 58 | " X_split = tf.split(XR, time_step_size, 0) # split them to time_step_size (28 arrays)\n", 59 | " # Each array shape: (batch_size, input_vec_size)\n", 60 | "\n", 61 | " # Make lstm with lstm_size (each input vector size)\n", 62 | " lstm = rnn.BasicLSTMCell(lstm_size, forget_bias=1.0, state_is_tuple=True)\n", 63 | "\n", 64 | " # Get lstm cell output, time_step_size (28) arrays with lstm_size output: (batch_size, lstm_size)\n", 65 | " outputs, _states = rnn.static_rnn(lstm, X_split, dtype=tf.float32)\n", 66 | "\n", 67 | " # Linear activation\n", 68 | " # Get the last output\n", 69 | " return tf.matmul(outputs[-1], W) + B, lstm.state_size # State size to initialize the stat\n", 70 | "\n", 71 | "mnist = input_data.read_data_sets(\"MNIST_data/\", one_hot=True)\n", 72 | "trX, trY, teX, teY = mnist.train.images, mnist.train.labels, mnist.test.images, mnist.test.labels\n", 73 | "trX = trX.reshape(-1, 28, 28)\n", 74 | "teX = teX.reshape(-1, 28, 28)" 75 | ] 76 | }, 77 | { 78 | "cell_type": "code", 79 | "execution_count": null, 80 | "metadata": { 81 | "collapsed": false 82 | }, 83 | "outputs": [], 84 | "source": [ 85 | "X = tf.placeholder(\"float\", [None, 28, 28])\n", 86 | "Y = tf.placeholder(\"float\", [None, 10])\n", 87 | "\n", 88 | "# get lstm_size and output 10 labels\n", 89 | "W = init_weights([lstm_size, 10])\n", 90 | "B = init_weights([10])\n", 91 | "\n", 92 | "py_x, state_size = model(X, W, B, lstm_size)\n", 93 | "\n", 94 | "cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=py_x, labels=Y))\n", 95 | "train_op = tf.train.RMSPropOptimizer(0.001, 0.9).minimize(cost)\n", 96 | "predict_op = tf.argmax(py_x, 1)" 97 | ] 98 | }, 99 | { 100 | "cell_type": "code", 101 | "execution_count": null, 102 | "metadata": { 103 | "collapsed": false 104 | }, 105 | "outputs": [], 106 | "source": [ 107 | "# Launch the graph in a session\n", 108 | "with tf.Session() as sess:\n", 109 | " # you need to initialize all variables\n", 110 | " tf.global_variables_initializer().run()\n", 111 | "\n", 112 | " for i in range(100):\n", 113 | " for start, end in zip(range(0, len(trX), batch_size), range(batch_size, len(trX)+1, batch_size)):\n", 114 | " sess.run(train_op, feed_dict={X: trX[start:end], Y: trY[start:end]})\n", 115 | "\n", 116 | " test_indices = np.arange(len(teX)) # Get A Test Batch\n", 117 | " np.random.shuffle(test_indices)\n", 118 | " test_indices = test_indices[0:test_size]\n", 119 | "\n", 120 | " print(i, np.mean(np.argmax(teY[test_indices], axis=1) ==\n", 121 | " sess.run(predict_op, feed_dict={X: teX[test_indices]})))" 122 | ] 123 | }, 124 | { 125 | "cell_type": "code", 126 | "execution_count": null, 127 | "metadata": { 128 | "collapsed": true 129 | }, 130 | "outputs": [], 131 | "source": [] 132 | } 133 | ], 134 | "metadata": { 135 | "kernelspec": { 136 | "display_name": "Python 2", 137 | "language": "python", 138 | "name": "python2" 139 | }, 140 | "language_info": { 141 | "codemirror_mode": { 142 | "name": "ipython", 143 | "version": 2 144 | }, 145 | "file_extension": ".py", 146 | "mimetype": "text/x-python", 147 | "name": "python", 148 | "nbconvert_exporter": "python", 149 | "pygments_lexer": "ipython2", 150 | "version": "2.7.13" 151 | } 152 | }, 153 | "nbformat": 4, 154 | "nbformat_minor": 0 155 | } 156 | -------------------------------------------------------------------------------- /07_lstm.py: -------------------------------------------------------------------------------- 1 | #Inspired by https://github.com/aymericdamien/TensorFlow-Examples/blob/master/examples/3%20-%20Neural%20Networks/recurrent_network.py 2 | import tensorflow as tf 3 | from tensorflow.contrib import rnn 4 | 5 | import numpy as np 6 | from tensorflow.examples.tutorials.mnist import input_data 7 | 8 | # configuration 9 | # O * W + b -> 10 labels for each image, O[? 28], W[28 10], B[10] 10 | # ^ (O: output 28 vec from 28 vec input) 11 | # | 12 | # +-+ +-+ +--+ 13 | # |1|->|2|-> ... |28| time_step_size = 28 14 | # +-+ +-+ +--+ 15 | # ^ ^ ... ^ 16 | # | | | 17 | # img1:[28] [28] ... [28] 18 | # img2:[28] [28] ... [28] 19 | # img3:[28] [28] ... [28] 20 | # ... 21 | # img128 or img256 (batch_size or test_size 256) 22 | # each input size = input_vec_size=lstm_size=28 23 | 24 | # configuration variables 25 | input_vec_size = lstm_size = 28 26 | time_step_size = 28 27 | 28 | batch_size = 128 29 | test_size = 256 30 | 31 | def init_weights(shape): 32 | return tf.Variable(tf.random_normal(shape, stddev=0.01)) 33 | 34 | 35 | def model(X, W, B, lstm_size): 36 | # X, input shape: (batch_size, time_step_size, input_vec_size) 37 | XT = tf.transpose(X, [1, 0, 2]) # permute time_step_size and batch_size 38 | # XT shape: (time_step_size, batch_size, input_vec_size) 39 | XR = tf.reshape(XT, [-1, lstm_size]) # each row has input for each lstm cell (lstm_size=input_vec_size) 40 | # XR shape: (time_step_size * batch_size, input_vec_size) 41 | X_split = tf.split(XR, time_step_size, 0) # split them to time_step_size (28 arrays) 42 | # Each array shape: (batch_size, input_vec_size) 43 | 44 | # Make lstm with lstm_size (each input vector size) 45 | lstm = rnn.BasicLSTMCell(lstm_size, forget_bias=1.0, state_is_tuple=True) 46 | 47 | # Get lstm cell output, time_step_size (28) arrays with lstm_size output: (batch_size, lstm_size) 48 | outputs, _states = rnn.static_rnn(lstm, X_split, dtype=tf.float32) 49 | 50 | # Linear activation 51 | # Get the last output 52 | return tf.matmul(outputs[-1], W) + B, lstm.state_size # State size to initialize the stat 53 | 54 | mnist = input_data.read_data_sets("MNIST_data/", one_hot=True) 55 | trX, trY, teX, teY = mnist.train.images, mnist.train.labels, mnist.test.images, mnist.test.labels 56 | trX = trX.reshape(-1, 28, 28) 57 | teX = teX.reshape(-1, 28, 28) 58 | 59 | X = tf.placeholder("float", [None, 28, 28]) 60 | Y = tf.placeholder("float", [None, 10]) 61 | 62 | # get lstm_size and output 10 labels 63 | W = init_weights([lstm_size, 10]) 64 | B = init_weights([10]) 65 | 66 | py_x, state_size = model(X, W, B, lstm_size) 67 | 68 | cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=py_x, labels=Y)) 69 | train_op = tf.train.RMSPropOptimizer(0.001, 0.9).minimize(cost) 70 | predict_op = tf.argmax(py_x, 1) 71 | 72 | session_conf = tf.ConfigProto() 73 | session_conf.gpu_options.allow_growth = True 74 | 75 | # Launch the graph in a session 76 | with tf.Session(config=session_conf) as sess: 77 | # you need to initialize all variables 78 | tf.global_variables_initializer().run() 79 | 80 | for i in range(100): 81 | for start, end in zip(range(0, len(trX), batch_size), range(batch_size, len(trX)+1, batch_size)): 82 | sess.run(train_op, feed_dict={X: trX[start:end], Y: trY[start:end]}) 83 | 84 | test_indices = np.arange(len(teX)) # Get A Test Batch 85 | np.random.shuffle(test_indices) 86 | test_indices = test_indices[0:test_size] 87 | 88 | print(i, np.mean(np.argmax(teY[test_indices], axis=1) == 89 | sess.run(predict_op, feed_dict={X: teX[test_indices]}))) 90 | -------------------------------------------------------------------------------- /08_word2vec.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "collapsed": false 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "# Inspired by https://www.tensorflow.org/versions/r0.7/tutorials/word2vec/index.html\n", 12 | "import collections\n", 13 | "import numpy as np\n", 14 | "import tensorflow as tf\n", 15 | "import matplotlib.pyplot as plt\n", 16 | "%matplotlib inline" 17 | ] 18 | }, 19 | { 20 | "cell_type": "code", 21 | "execution_count": 2, 22 | "metadata": { 23 | "collapsed": false 24 | }, 25 | "outputs": [ 26 | { 27 | "name": "stdout", 28 | "output_type": "stream", 29 | "text": [ 30 | "('Word count', [('cats', 10), ('dogs', 6), ('and', 5), ('are', 4), ('love', 3)])\n", 31 | "('Sample data', [8, 33, 24, 20, 17, 12, 8, 25, 30, 26], ['the', 'quick', 'brown', 'fox', 'jumped', 'over', 'the', 'lazy', 'dog', 'I'])\n", 32 | "('Context pairs', [[[8, 24], 33], [[33, 20], 24], [[24, 17], 20], [[20, 12], 17], [[17, 8], 12], [[12, 25], 8], [[8, 30], 25], [[25, 26], 30], [[30, 4], 26], [[26, 0], 4]])\n", 33 | "('skip-gram pairs', [[33, 8], [33, 24], [24, 33], [24, 20], [20, 24]])\n", 34 | "('Batches (x, y)', ([27, 15, 22], [[0], [28], [18]]))\n" 35 | ] 36 | } 37 | ], 38 | "source": [ 39 | "# Configuration\n", 40 | "batch_size = 20\n", 41 | "# Dimension of the embedding vector. Two too small to get\n", 42 | "# any meaningful embeddings, but let's make it 2 for simple visualization\n", 43 | "embedding_size = 2\n", 44 | "num_sampled = 15 # Number of negative examples to sample.\n", 45 | "\n", 46 | "# Sample sentences\n", 47 | "sentences = [\"the quick brown fox jumped over the lazy dog\",\n", 48 | " \"I love cats and dogs\",\n", 49 | " \"we all love cats and dogs\",\n", 50 | " \"cats and dogs are great\",\n", 51 | " \"sung likes cats\",\n", 52 | " \"she loves dogs\",\n", 53 | " \"cats can be very independent\",\n", 54 | " \"cats are great companions when they want to be\",\n", 55 | " \"cats are playful\",\n", 56 | " \"cats are natural hunters\",\n", 57 | " \"It's raining cats and dogs\",\n", 58 | " \"dogs and cats love sung\"]\n", 59 | "\n", 60 | "# sentences to words and count\n", 61 | "words = \" \".join(sentences).split()\n", 62 | "count = collections.Counter(words).most_common()\n", 63 | "print (\"Word count\", count[:5])\n", 64 | "\n", 65 | "# Build dictionaries\n", 66 | "rdic = [i[0] for i in count] #reverse dic, idx -> word\n", 67 | "dic = {w: i for i, w in enumerate(rdic)} #dic, word -> id\n", 68 | "voc_size = len(dic)\n", 69 | "\n", 70 | "# Make indexed word data\n", 71 | "data = [dic[word] for word in words]\n", 72 | "print('Sample data', data[:10], [rdic[t] for t in data[:10]])\n", 73 | "\n", 74 | "# Let's make a training data for window size 1 for simplicity\n", 75 | "# ([the, brown], quick), ([quick, fox], brown), ([brown, jumped], fox), ...\n", 76 | "cbow_pairs = [];\n", 77 | "for i in range(1, len(data)-1) :\n", 78 | " cbow_pairs.append([[data[i-1], data[i+1]], data[i]]);\n", 79 | "print('Context pairs', cbow_pairs[:10])\n", 80 | "\n", 81 | "# Let's make skip-gram pairs\n", 82 | "# (quick, the), (quick, brown), (brown, quick), (brown, fox), ...\n", 83 | "skip_gram_pairs = [];\n", 84 | "for c in cbow_pairs:\n", 85 | " skip_gram_pairs.append([c[1], c[0][0]])\n", 86 | " skip_gram_pairs.append([c[1], c[0][1]])\n", 87 | "print('skip-gram pairs', skip_gram_pairs[:5])\n", 88 | "\n", 89 | "def generate_batch(size):\n", 90 | " assert size < len(skip_gram_pairs)\n", 91 | " x_data=[]\n", 92 | " y_data = []\n", 93 | " r = np.random.choice(range(len(skip_gram_pairs)), size, replace=False)\n", 94 | " for i in r:\n", 95 | " x_data.append(skip_gram_pairs[i][0]) # n dim\n", 96 | " y_data.append([skip_gram_pairs[i][1]]) # n, 1 dim\n", 97 | " return x_data, y_data\n", 98 | "\n", 99 | "# generate_batch test\n", 100 | "print ('Batches (x, y)', generate_batch(3))" 101 | ] 102 | }, 103 | { 104 | "cell_type": "code", 105 | "execution_count": 6, 106 | "metadata": { 107 | "collapsed": false 108 | }, 109 | "outputs": [], 110 | "source": [ 111 | "# Input data\n", 112 | "train_inputs = tf.placeholder(tf.int32, shape=[batch_size])\n", 113 | "# need to shape [batch_size, 1] for nn.nce_loss\n", 114 | "train_labels = tf.placeholder(tf.int32, shape=[batch_size, 1])\n", 115 | "# Ops and variables pinned to the CPU because of missing GPU implementation\n", 116 | "with tf.device('/cpu:0'):\n", 117 | " # Look up embeddings for inputs.\n", 118 | " embeddings = tf.Variable(\n", 119 | " tf.random_uniform([voc_size, embedding_size], -1.0, 1.0))\n", 120 | " embed = tf.nn.embedding_lookup(embeddings, train_inputs) # lookup table\n", 121 | "\n", 122 | "# Construct the variables for the NCE loss\n", 123 | "nce_weights = tf.Variable(\n", 124 | " tf.random_uniform([voc_size, embedding_size],-1.0, 1.0))\n", 125 | "nce_biases = tf.Variable(tf.zeros([voc_size]))\n", 126 | "\n", 127 | "# Compute the average NCE loss for the batch.\n", 128 | "# This does the magic:\n", 129 | "# tf.nn.nce_loss(weights, biases, inputs, labels, num_sampled, num_classes ...)\n", 130 | "# It automatically draws negative samples when we evaluate the loss.\n", 131 | "loss = tf.reduce_mean(tf.nn.nce_loss(nce_weights, nce_biases, train_labels, embed, num_sampled, voc_size))\n", 132 | "# Use the adam optimizer\n", 133 | "train_op = tf.train.AdamOptimizer(1e-1).minimize(loss)" 134 | ] 135 | }, 136 | { 137 | "cell_type": "code", 138 | "execution_count": 7, 139 | "metadata": { 140 | "collapsed": false 141 | }, 142 | "outputs": [ 143 | { 144 | "name": "stdout", 145 | "output_type": "stream", 146 | "text": [ 147 | "('Loss at ', 0, 16.654182)\n", 148 | "('Loss at ', 10, 14.677063)\n", 149 | "('Loss at ', 20, 9.1576614)\n", 150 | "('Loss at ', 30, 3.9546738)\n", 151 | "('Loss at ', 40, 3.8289108)\n", 152 | "('Loss at ', 50, 3.3630223)\n", 153 | "('Loss at ', 60, 3.6222715)\n", 154 | "('Loss at ', 70, 3.1979971)\n", 155 | "('Loss at ', 80, 3.5327618)\n", 156 | "('Loss at ', 90, 3.4316573)\n" 157 | ] 158 | }, 159 | { 160 | "data": { 161 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAgkAAAFkCAYAAACq4KjhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzt3Xlc1XXe///HmyNuKEpi2eKCLC5lKkxO5l4khpltMxOg\nebX4nTYX1Mb6XjPtc+VVqWVzVVZmOtT55a3flKUGDZl0uaAGiVk4BzClaUpLDEdLDXh//zhIgB9N\nkHMOy/N+u52b53y29+vz9gPnyWc11lpEREREagsKdAEiIiLSOCkkiIiIiCOFBBEREXGkkCAiIiKO\nFBJERETEkUKCiIiIOFJIEBEREUcKCSIiIuJIIUFEREQcKSSIiIiII5+GBGPMg8aYilqvz33ZpoiI\niDSMVn5oYwdwBWAqP5f5oU0RERE5Q/4ICWXW2m/90I6IiIg0IH+ckxBtjPnKGFNkjEkzxnT3Q5si\nIiJyhowvHxVtjEkAOgD/AM4FHgLOAy6y1h52mL4LkADsBo74rDAREZHmpy3QC8iw1u5viAX6NCSc\n0JgxnYA9QKq1dqnD+GTgNb8VJCIi0vykWGtfb4gF+eOchCrW2lJjjAeIOskkuwHS0tLo16+f3+pq\nDlJTU1m4cGGgy2hS1Gf1o36rO/VZ/ajf6iY/P59JkyZB5XdpQ/BrSDDGdAAigeUnmeQIQL9+/YiN\njfVbXc1Bp06d1Gd1pD6rH/Vb3anP6kf9Vm8Ndrje1/dJeNIYM9IY09MYcxnwFt5LIN2+bFdERETO\nnK/3JFwAvA50Ab4F1gOXNtQJFSIiIuI7Pg0J1tokXy5fREREfEfPbmgmkpKUx+pKfVY/6re6U5/V\nj/ot8Px6CeQvMcbEAjk5OTk6WUVERKQOcnNziYuLA4iz1uY2xDK1J0FEREQcKSSIiIiII4UEERER\ncaSQICIiIo4UEkRERMSRQoKIiIg4UkgQERERRwoJIiIi4kghQURERBwpJIiIiIgjhQQRERFxpJAg\nIiIijhQSRERExJFCgoiIiDhSSBARERFHCgkiIiLiSCFBREREHCkkiIiIiCOFBBEREXGkkCAiIiKO\nFBJERETEkUKCiIiIOFJIEBEREUcKCSIiIuJIIUFEREQcKSSIiIiII4UEERERcaSQICIiIo4UEkRE\nRMSRQoKIiIg4UkgQERERRwoJIiIi4kghQURERBwpJIiIiIgjhQQRERFxpJAgIiIijhQSRERExJFC\ngoiIiDhSSBARERFHfgsJxpj7jTEVxpgF/mpTROR0LVu2jLCwsECXIdKo+CUkGGMuAaYCef5oT0Sk\nPowxgS5BpFHxeUgwxnQA0oDbge993Z6IiIg0DH/sSfgf4F1r7Vo/tCUiLUBGRgYjRowgLCyM8PBw\nJkyYwK5duwDYs2cPQUFBvPXWW1x++eWEhIQwaNAgsrOzayzj1VdfpWfPnnTo0IEbbriB/fv3B2JV\nRBo1n4YEY8xNwCDgfl+2IyIty+HDh5k9ezY5OTmsXbsWl8vFddddV2OaP/7xj/zhD38gLy+PmJgY\nkpOTqaioAGDz5s3cfvvtTJ8+nW3btjFmzBgee+yxQKyKSKNmrLW+WbAxFwAfA1daaz+tHPYh8Im1\ndtZJ5okFckaOHEmnTp1qjEtKSiIpKckntYpI0/btt99yzjnnsGPHDkJCQoiIiOCVV17hP/7jPwDI\nz8/noosuIj8/n5iYGFJSUjh48CDvvvtu1TKSkpLIyMigpKQkQGshcvrcbjdut7vGsNLSUj766COA\nOGttbkO006ohFnIScUBXIMf8fDaQCxhpjLkHaGNPklAWLlxIbGysD0sTkaassLCQBx54gM2bN/Pd\nd99RUVGBMYbi4mL69esHwIABA6qmP/fcc7HWsm/fPmJiYsjPz+f666+vscyhQ4eSkZHh1/UQqS+n\nP5xzc3OJi4tr0HZ8GRIygQG1hr0K5APzThYQRER+ydVXX01ERAQvv/wy5513HuXl5Vx00UUcO3as\naprg4OCq98f/Tjl+uMFaqysZRE6Dz0KCtfYw8Hn1YcaYw8B+a22+r9oVkeatpKQEj8fDkiVLGDZs\nGADr16+v0zL69+9/womMmzZtarAaRZoLX+5JcKK9ByJyRsLCwujSpQsvvvgi3bp1Y8+ePdx///11\n2jMwffp0hg8fzvz585k4cSLp6ek61CDiwK+3ZbbWXn6ykxZFRE6HMYY33niDnJwcBgwYwOzZs3nq\nqaeqxlX/t/Z8x/3617/mpZdeYtGiRQwaNIjMzEz+9Kc/+WcFRJoQn13dUB/Hr27IycnRiYsiIiJ1\nUO3ExSZxdYOISKPk8XgoKioiKiqK6OjoQJcj0mjpKZAi0mKUlJQwbtx4+vTpQ2JiIjExMYwbN54D\nBw4EujSRRkkhQURajOTkyWRmZuN9nEwxkEZmZjZJSZMCXJlI46TDDSLSIng8HjIy1uANCCmVQ1Mo\nL7dkZEymoKBAhx5EatGeBBFpEYqKiirfjaw1ZhTgvYujiNSkkCAiLUJkZGTlu49qjckCICoqyq/1\niDQFCgki0iLExMSQkJCIyzUd7yGHL4E0XK4ZJCQk6lCDiAOFBBFpMdzuNOLjLwUmAz2AycTHX4rb\nnRbgykQaJ524KCItRlhYGOnpqykoKKCwsFD3SRD5BQoJItLiREdHKxyInAYdbhARERFHCgkiIiLi\nSCFBREREHCkkiIiIiCOFBBEREXGkkCAiIiKOFBJERETEkUKCiIiIOFJIEBEREUcKCSIiIuJIIUFE\nREQcKSSIiIiII4UEERERcaSQICIiIo4UEkRERMSRQoKIiIg4UkgQERERRwoJIiIi4kghQURERBwp\nJIiIiIgjhQQRERFxpJAgIiIijhQSRERExJFCgoiIiDhSSBARERFHCgkiIiLiSCFBREREHCkkiIiI\niCOFBBEREXHk05BgjLnDGJNnjCmtfG00xozzZZsiIiLSMHy9J+FLYC4QV/laC6w0xvTzcbsiIiJy\nhlr5cuHW2tW1Bv3RGHMncCmQ78u2RURE5Mz4NCRUZ4wJAn4LtAc2+atdERERqR+fn7hojLnIGPNv\n4CjwHHCdtXanr9ttasaMGcOsWbMCXYaIiEgVf1zdsBMYCPwaeB5Ybozp64d2RURE5Az4/HCDtbYM\n2FX5MdcYMwSYAdx5snlSU1Pp1KlTjWFJSUkkJSX5rE4REZGmwu1243a7awwrLS1t8Hb8dk5CNUFA\nm1NNsHDhQmJjY/1Ujv/98MMP3HHHHbz11luEhoYye/bsGuO///57pk+fzqpVqzh69CijRo1i0aJF\nREVFVU3z0ksv8eijj1JSUkJCQgLDhw/nkUce4cCBAwBs376dmTNn8vHHH2OMISYmhsWLFzfrfhUR\naSmc/nDOzc0lLi6uQdvx9X0S/myMGW6M6Vl5bsLjwCggzZftNnZz5szhf//3f3n33Xd5//33Wbdu\nHTk5OVXjp0yZQm5uLqtWrSI7OxtrLYmJiZSXlwOwYcMG7rzzTlJTU9m2bRtXXnklf/7znzHGVC0j\nJSWF7t27k5OTQ25uLvfddx/BwcF+X1cREWm6fL0n4RxgOXAuUApsB8Zaa9f6uN1G6/Dhw7zyyiu8\n/vrrjB49GoBly5ZxwQUXAFBYWMi7777Lpk2b+PWvfw3Aa6+9Rvfu3Xn77be54YYb+Mtf/kJiYiKp\nqakAREVFsWHDBlav/vmK0+LiYv7whz8QHR0NQGRkpB/XUkREmgOf7kmw1t5ure1trW1nre1mrW3R\nAQGgqKiIn376iSFDhlQNCwsLo0+fPgDk5+cTHBxcY/xZZ51Fnz59yM/33lriH//4R43xwAmfZ82a\nxW233caVV17Jf//3f7Nr1y5ERETqQs9u8DNrLUCNQwNO452GH5+n+vuTzffggw/y+eefc/XVV7N2\n7VouvPBCVq5ceabli4hIC6KQ4GdRUVG0atWK7OzsqmEHDhzA4/EA0L9/f3766Sc2b95cNX7//v14\nPB769+8PQN++fdmyZUuN5W7dutWxrRkzZpCRkcF1113H0qVLfbFKIiLSTCkk+FlISAi33XYb9957\nLx9++CE7duzglltuweVyAd4v9okTJzJ16lQ2bNhAXl4ekyZNonv37lxzzTUATJs2jTVr1rBw4UIK\nCwtZvHgx6enpVXsXjhw5wrRp08jKyqK4uJgNGzawdevWqpAhIiJyOhQSAuDJJ59kxIgRXHPNNYwd\nO5YRI0bUuGxl6dKlxMXFMWHCBIYNG0ZQUBCrV6+uChKXXXYZL7zwAgsXLmTQoEG8//77pKam0rZt\nWwBcLhf79+9nypQp9OnTh5tuuonx48fz0EMPBWJ1RUSkiTInOwYeCMaYWCAnJydH1/PX0dSpU/F4\nPGRlZQW6FBERCYBq90mIs9bmNsQyA3EzJWkA8+fP58orryQkJIQ1a9bw17/+leeffz7QZYmISDOi\nkNBEbdmyhSeffJJ///vf9O7dm2effZZbbrkFAI/HQ1FREVFRUVX3SRAREakrhYQm6o033jhhWElJ\nCcnJk8nIWFM1LCEhEbc7jbCwMH+WJyIizYBOXGxGkpMnk5mZjfeu18VAGpmZ2SQlTQpwZSIi0hRp\nT0Iz4fF4KvcgpAEplUNTKC+3ZGRMpqCgQIceRESkTrQnoZkoKiqqfDey1phRgPeZECIiInWhkNBM\n/PwAp49qjfFeEln9MdMiIiKnQyGhmYiJiSEhIRGXazreQw5fAmm4XDNISEjUoQYREakzhYRmxO1O\nIz7+UmAy0AOYTHz8pbjdaQGuTEREmiKduNiMhIWFkZ6+moKCAgoLC3WfBBEROSMKCc1QdHS0woGI\niJwxHW4QERERRwoJIiIi4kghQURERBwpJIiIiIgjhQQRERFxpJAgIiIijhQSRERExJFCgoiIiDhS\nSBARERFHCgkiIiLiSCFBREREHCkkiIiIiCOFhAb28MMPM3jw4ECXISIicsYUEnzAGHPa0wYFBfHO\nO+/4sBoREZH6UUhwYK3liSeeIDo6mrZt29KrVy8ef/xxAO677z5iYmIICQkhMjKSBx54gPLycgCW\nLVvGww8/TF5eHkFBQbhcLpYvXw7AQw89RM+ePWnbti0XXHABM2fODNj6iYiInA6FBODQoUOkpKTQ\noUMHzj//fOLj4/nP//xPoqKiyM/P59ixY2zZsoUpU6bwzDPP0KdPH/Lz83nggQd48skn6dixI+Hh\n4bz55pvcfvvtXHjhhezdu5f33nuP5cuXExoaysMPP0znzp15++23efvttxkwYAAREREYY7j22msJ\nCgqid+/ege4KERGRKgoJQGpqKps2bWLVqlWsXLmSdevWERwcTL9+/YiIiKBNmzasW7eOQYMG8fnn\nn7No0SLOO+88nnjiCQYOHEhkZCQbNmygc+fOvPXWW7hcLrp27UpwcDC33nord999NxEREQwZMoRb\nbrmFfv36cdttt7F161astSxbtoxvvvmGrVu3BrorREREqrT4kHDo0CGWL1/O/PnzGT16NNZarLUn\nnFdwxRVXkJqaypYtW5g8eTLh4eHk5+eTl5fHt99+S58+fViyZAmlpaX8+9//BmDMmDEkJydz9913\nU15eTnp6Ovv372fevHmUl5cTHh4OQKdOnTj77LPp0qWL39dfRETkZFp8SNi1axdlZWVccsklALRr\n1w5jzAm7/uPi4sjOzmbSpElcffXVTJw4EZfLRUVFBXv37qVjx4506dKFsrIyjh07BsC+ffuYOnUq\nl19+OaWlpezfv5+ysjKeffZZRo0aVXUug4iISGPU4kOCtRb4+YqE4ycrHjx4sMZ0ISEhbNy4kV69\nenHfffcREhLCr371K6699lpCQ0PJy8sjLy+P2bNnExoaCsDNN9/M9u3befbZZ9m0aRM7duwgPDyc\ne+65h40bN/Lpp5/6d2VFRETqoMWHhMjISFq1asWWLVsAaNOmDTNmzKC4uJjPP/+cXbt2cfToUbKz\ns4mOjqa4uJg33niD7t278+mnn5KZmYnL5aJ379707t2b2NhYiouLycvLY8OGDdx111188803bNq0\niV27dvHdd9+xdetW2rdvT8+ePQkODtYeBRERaZRaBbqAQOvQoQNTpkxhzpw5hIWF0bVrVwoKCmjb\nti2bNm2if//+lJeXc+jQISZMmEBqairTpk3jyJEjBAUF0aVLF7755ht2797N7t272bBhA6NGjWLM\nmDEcOnSI+fPn8/vf/57nn3+enTt3Yq2loKCAVatWERYWRq9evfjggw+47LLLaNOmDZ07dw50l4iI\niADakwDAwoULueyyy5gwYQJjx45l+PDhDBw4kHvuuYcjR47QvXt34uPjAZg3bx779u3j4MGDFBYW\nMmzYMNq1a0f//v2ZOnUqZWVluN1uSkpK2LZtG+3atePee++lrKyMFStWEBERwcyZMxk9ejQA8+fP\n5+9//zs9evQgNjY2gL0gIiJSkzl+TL4xMMbEAjk5OTkB/cL84YcfOP/881mwYAG33HJLwOoQERE5\nXbm5ucTFxQHEWWtzG2KZLf5wA8C2bdvYuXMnQ4YM4fvvv+eRRx7BGMPEiRPPeNkej4eioiKioqKI\njo4+6TAREZHGxqchwRhzP3Ad0Bf4EdgIzLXWenzZbn089dRTeDweWrduTVxcHOvXr+ess86q9/JK\nSkpITp5MRsaaqmFjxsRjjGHt2r9XDUtISMTtTiMsLOyM6hcREWlovt6TMAJ4Fvi4sq3HgfeNMf2s\ntT/6uO3TNmjQID7++OMGXWZy8mQyM7OBNGAk8BEffngbxrSrMSwzczpJSZNIT1/doO2LiIicKZ+G\nBGttYvXPxpj/APYBccB6X7YdSB6Pp3IPQhqQUjn0EuAo1i6pNiyF8nJLRsZkCgoKdOhBREQaFX9f\n3dAZsECJn9v1q6Kiosp3I6sPdRgGMAqAwsJCH1clIiJSN34LCcZ7S8OngfXW2s/91W4gREZGVr77\nqPpQh2EAWQBERUX5uCoREZG68efVDc8B/YFhvzRhamoqnTp1qjEsKSmJpKQkH5XWsGJiYkhISCQz\nczrl5Rbv3oItQBuMuafyVtCjgCxcrhnExyfqUIOIiJw2t9uN2+2uMay0tLTB2/HLfRKMMX8BJgAj\nrLXFp5iuUdwnoSEcOHCApKRJNa5uuPzyKwF0dYOIiDS4JnmfhMqAMBEYdaqA0NyEhYWRnr6agoIC\nCgsLa9wTwWmYiIhIY+Pr+yQ8ByQB1wCHjTHnVI4qtdYe8WXbjUV0dPQJQcBpmIiISGPj6xMX7wBC\ngXXAv6q9fuvjdkVEROQM+fo+CXqAlIiISBOlL3ERERFxpJAgIiIijhQSRERExJFCgoiIiDhSSBAR\nERFHCgkiIiLiSCFBREREHCkkiIiIiCOFBBEREXGkkCAiIiKOFBJERETEkUKCiIiIOFJIEBEREUcK\nCSIiIuJIIUFEREQcKSSIiIiII4UEERERcaSQICIiIo4UEkRERMSRQoI0CmVlZYEuQUREalFIEJ/I\nyMhgxIgRhIWFER4ezoQJE9i1axcAe/bsISgoiBUrVjB69Gjat2/P66+/DsD69esZOXIk7du3p2fP\nnsyYMYMffvghkKsiItJiKSSITxw+fJjZs2eTk5PD2rVrcblcXHfddTWmuf/++5k5cyb5+fkkJCSw\na9currrqKn7zm9+wY8cO3njjDTZs2MC0adMCtBYiIi2bsdYGuoYqxphYICcnJ4fY2NhAlyMN6Ntv\nv+Wcc85hx44dhISEEBERwaJFi7jnnnuqppk6dSqtWrXi+eefrxq2fv16Ro8ezQ8//EDr1q0DUbqI\nSJOQm5tLXFwcQJy1Nrchlqk9CeIThYWFJCcnExkZSadOnejduzfGGIqLi6umqdyYq+Tl5fHqq6/S\nsWPHqte4ceMA+OKLL/xav4iIQKtAFyDN09VXX01ERAQvv/wy5513HuXl5Vx00UUcO3asapqQkJAa\n8xw6dIjf//73zJgxg9p7uHr06OGXukVE5GcKCdLgSkpK8Hg8LFmyhGHDhgHewwa/JDY2ls8++4yI\niAhflygiIqdBhxukwYWFhdGlSxdefPFFioqKWLt2LbNnz8YYc8r55s6dy6ZNm5g2bRp5eXkUFhay\ncuVKnbgoIhIgCgnS4IwxvPHGG+Tk5DBgwABmz57NU089VTWu+r/VDRgwgKysLAoKChg5ciSxsbE8\n9NBDnH/++X6tX0REvHS4QXzi8ssvZ8eOHTWGlZeXO76vLi4ujvT0dJ/WJiIip0chQRoNj8dDUVER\nUVFRREdHB7ocEZEWT4cbJOBKSkoYN248ffr0ITExkZiYGMaNG8+BAwcCXZqISIumkNBEjBkzhlmz\nZgW6DJ9ITp5MZmY2kAYUA2lkZmaTlDQpwJWJiLRsOtwgAeXxeMjIWIM3IKRUDk2hvNySkTGZgoIC\nHXoQEQkQ7UmQgCoqKqp8N7LWmFGA986NIiISGAoJTdD333/PzTffzFlnnUVISAiJiYlVX6YHDx6k\nffv2vP/++zXm+dvf/kZoaChHjhwB4J///Ce/+93vqp7SeO2117Jnzx6/r0tkZGTlu49qjckCICoq\nyq/1iIjIzxQSmqApU6aQm5vLqlWryM7OxlpLYmIi5eXlhIaGMn78eF577bUa87jdbq6//nratm1L\nWVkZCQkJdOrUiQ0bNrBhw4aq5ySUlZX5dV1iYmJISEjE5ZqO95DDl0AaLtcMEhISdahBRCSAFBIC\n7M033+Tiiy+mffv2hIeHM3bsWH744QfHExXfeecdbrzxRt59912WLFlCSkoKq1atokuXLhQUFNCt\nWzdeeuklUlJSePvttzly5AgbN25k4MCBvPnmm2RnZ7Ny5Upat27Njz/+yIsvvkj//v3p06cPS5Ys\nobi4mHXr1vm9D9zuNOLjLwUmAz2AycTHX4rbneb3WkRE5GcKCQH0zTffkJyczO23387OnTvJysri\n+uuvP+HhRtV9//33BAcHM2TIEAAWLFjA8OHD6d+/P4MHD+bOO+8kOjoal8vFihUruOaaa+jYsSPh\n4eE8/fTTzJ07F4Di4uIaT1vs0qULR48erXaOgP+EhYWRnr4aj8fDmjVr8Hg8pKevJiwszO+1iIjI\nz3R1QwB9/fXXlJeXc91119G9e3cALrzwwjotY/z48dxxxx0sXryYUaNGsX37djZs2MCNN97I/Pnz\nCQoKom3btkyaNIlx48Zx7733MnXqVPr378/KlStPCCRdu3ZtsPWrq+joaB1eEBFpRLQnIYAGDhzI\nFVdcwUUXXcRvf/tbXn75Zb7//vtTztO5c2d++uknNm/eDHifd7B//348Hg/9+vWjW7du7Nu3j5SU\nFD777DMiIyPJyspi0iTvPQeO74EoLi6ma9eu9O7du8arY8eOvl1pERFpMhQSAigoKIj333+f9PR0\nLrzwQp599ln69u3L7t27CQoKOuGv/IqKCkJDQ5k4cSJTp07l6NGj7Nu3j0mTJtG9e3cmTpyIMYaK\nigpGjRpFu3bt2LFjB7179yYuLg4Aay3GGDp37szEiRNZv349u3fvZt26dcyYMYN//etfgegKERFp\nhHwaEowxI4wx7xhjvjLGVBhjrvFle03V0KFDefDBB/nkk08IDg7m7bffpmvXrnz99dc1ptu/fz8A\nS5cuJS4ujr179/L0008TFBTE6tWrcblcNaa/7LLLOHToEDfddFPVsK1bt1Yto0ePHtxwww3079+/\nKnSEhob6eG1FRKSp8PU5CSHANuAV4P/3cVtNzpYtW/jggw8YO3YsZ599NtnZ2Xz33Xf069eP9u3b\nM3v2bNasWUNkZCQxMTF88skngPeQw6uvvkpWVhapqalMnz7dcflvvvkmERER7Nmzh507d7Jnzx7m\nz58PQHh4OEuXLvXbuoqISNPj05BgrU0H0gGMMcaXbTVFoaGhfPTRRzzzzDMcPHiQnj17smDBAhIS\nEigrK2P79u1MmTKFVq1akZqayuWXX15jfqcurT6sY8eOrFq1ijvvvJPBgwczYMAAHnzwQZKTk/n6\n66/55z//qScuiojISZlTXW7XoA0ZUwFca6195xTTxAI5OTk5xMbG+qWulmbx4sXcddddVFRUVA1L\nSEjE7U7TJYciIk1Ybm7u8fPP4qy1uQ2xTF0C2cw98cQTBAUFERcXR2lpKTNnzsTaVniPAI0EPiIz\nczpJSZNIT18d4GpFRKQxaZQhITU1lU6dOtUYlpSURFJSUoAqanpKSkpITp5c+YRFr7Zt21Y+u+EV\n9MRFEZGmy+1243a7awwrLS1t8HZ0uKGZGjduPJmZ2ZSXL+L4HoOgoGlUVJQCu4Hu1ab+EujBmjVr\nuOqqqwJQrYiInClfHG7QfRKaIY/HQ0bGmsqAkII3EKRQUbEIqABW1JpDT1wUEZET+fRwgzEmBIgC\njp9y39sYMxAosdZ+6cu2W7Kfn78wstaYUQAEBT1ERcU5lZ+zcLlmEB+vJy6KiEhNvt6T8CvgEyAH\nsMB8IBd42MfttmiRkZGV7z6qNca7x+Cyy+LQExdFROSX+Po+CVnokIbfxcTEkJCQSGbmdMrLLbX3\nGKSnr6agoIDCwkLdJ0FERE6qUV7dIGfO7U4jKWkSGRmTq4bFxydW7THQExdFROSXKCQ0U2FhYdpj\nICIiZ0QhoZnTHgMREakvnS8gIiIijhQSRERExJFCgoiIiDhSSBARERFHCgkiIiLiSCFBREREHCkk\niIiIiCOFBGlRxowZw6xZswJdhohIk6CQICIiIo4UEkRERMSRQoK0OGVlZUybNo3OnTvTtWtXHnjg\ngapxx44dY86cOVxwwQV06NCBoUOHkpWVFcBqRUQCRyFBWpxXX32V4OBgtm7dyqJFi1iwYAFLliwB\n4O6772bz5s2sWLGCTz/9lN/85jdcddVVFBUVBbhqERH/0wOepMXp0aMHCxYsALwPwNq+fTsLFy5k\n7NixvPrqq3z55Zd069YNgFmzZvHee++xdOlSHnvssUCWLSLidwoJ0uJceumlNT4PHTqUBQsW8Omn\nn1JeXk5MTAzW2qrxx44dIzw83N9liogEnEKCSKXDhw/TqlUrcnNzCQqqeSSuQ4cOAapKRCRwFBKk\nxcnOzq4SogofAAAQY0lEQVTxedOmTURHRzN48GDKysrYu3cvw4YNC1B1IiKNh05clBbnyy+/ZM6c\nOXg8HtxuN3/5y1+YOXMmUVFRpKSkcPPNN/PWW2+xe/dutmzZwrx583jvvfcCXbaIiN9pT4K0KMYY\nbr75Zn788UeGDBlCq1atSE1N5fbbbwe8Vz489thjzJkzh6+++oouXbowdOhQJkyYEODKRUT8z1Q/\nQSvQjDGxQE5OTg6xsbGBLkdERKTJyM3NJS4uDiDOWpvbEMvUngSRajweD0VFRURFRREdHR3ockRE\nAkrnJIgAJSUljBs3nj59+pCYmEhMTAzjxo3nwIEDgS5NRCRgFBKk0cnKyiIoKIiDBw/6rc3k5Mlk\nZmYDaUAxkEZmZjZJSZP8VoOISGOjkCAB5/T4ZmOM39r3eDxkZKyhvHwRkAJ0B1IoL3+GjIw1FBQU\n+K0WEZHGRCFBWryfn8swstaYUQAUFhb6tR4RkcZCIUEC6pZbbiErK4tnnnmGoKAgXC4Xu3fvBuDj\njz/mkksuISQkhGHDhp3wF/3KlSuJi4ujXbt2REVF8cgjj1BRUVHnGiIjIyvffVRrjPfpj1FRUXVe\npohIc6CQIAH1zDPPMHToUKZOncrevXv5+uuv6d69O9Za/vjHP7Jw4UJycnJo1aoVt956a9V869ev\nZ8qUKaSmprJz504WL17MsmXL+POf/1znGmJiYkhISMTlmo73nIQvgTRcrhkkJCTqKgcRabEUEiSg\nQkNDad26Ne3bt6dr166cffbZuFwujDH813/9F8OHD6dv377cd999bNy4kWPHjgHw8MMPc//99zNp\n0iR69uzJFVdcwSOPPMILL7xQrzrc7jTi4y8FJgM9gMnEx1+K253WYOsqItLU6D4J0mgNGDCg6v25\n554LwL59+7jgggvIy8tj48aNNR7fXF5ezrFjxzhy5Aht27atU1thYWGkp6+moKCAwsJC3SdBRASF\nBGnEgoODq94fv9rh+DkHhw4d4pFHHuH6668/Yb66BoTqoqOjFQ5ERCopJEjAtW7dmvLy8jrNExsb\nyz/+8Q969+7to6pEREQhQQKuV69ebN68mT179tChQwcqKipweqZI9WEPPPAAEyZMoHv37tx4440E\nBQWRl5fHjh07ePTRR/1ZvohIs6UTFyXg5syZg8vlon///px99tkUFxc73kyp+rCxY8eyatUq/v73\nvzNkyBCGDh3K008/Ta9evfxYuYhI86anQIqIiDQDvngKpPYkiIiIiCOdkyBNlh7rLCLiW9qTIE2O\nHussIuIfCgnS5OixziIi/uGXkGCMudsY84Ux5kdjTLYx5hJ/tCvNjx7rLCLiPz4PCcaY3wHzgQeB\nwUAekGGMCfd129L86LHOIiL+4489CanAYmvtcmvtTuAO4Afg1lPPJnIiPdZZRMR/fBoSjDHBQBzw\nwfFh1ntjhkxgqC/bluZJj3UWEfEfX+9JCAdcwN5aw/cC3XzctjRTeqyziIh/BOo+CQZoPLd6lCZF\nj3UWEfEPX4eE74By4Jxaw8/mxL0LVVJTU+nUqVONYUlJSSQlJTV4gdJ06bHOItJSud1u3G53jWGl\npaUN3o7Pn91gjMkGNltrZ1R+Nngvbl9krX2y1rR6doOIiEg9+OLZDf443LAAWGaMyQG24L3aoT3w\nqh/aFhERkXryeUiw1q6ovCfCI3gPO2wDEqy13/q6bREREak/v5y4aK19DnjOH22JiIhIw9CzG0RE\nRMSRQoKIiIg4UkgQERERRwoJIiIi4kghQURERBwpJIiIiIgjhQQRERFxpJAgIiIijhQSRERExJFC\ngoiIiDhSSBARERFHCgkiIiLiSCFBREREHCkkiIiIiCOFBBEREXGkkCAiIj6zbNkyzjrrrDrNM2bM\nGGbNmuWjiqQuWgW6ABERab5uuukmxo8fX6d53nrrLYKDg31UkdSFQoKIiNTLTz/99Itf5m3atKFN\nmzZ1Wm7nzp3PpCxpQDrcICIip2XMmDFMmzaN1NRUunbtyrhx41i4cCEXX3wxHTp0oEePHtx9990c\nPny4ap5ly5YRFhZW9fnhhx9m8ODBpKWlERERQefOnUlKSqoxT+3DDRERETz++OPcdttthIaG0rNn\nT1566aUatW3cuJHBgwfTrl07hgwZwsqVKwkKCmL79u0+7JHmTyFBRERO2/Lly2nTpg0bN27khRde\nwOVy8eyzz/LZZ5+xfPlyPvzwQ+bOnVtjHmNMjc9FRUWsXLmSNWvWsHr1arKyspg3b94p212wYAGX\nXHIJ27Zt46677uLOO+/E4/EAcOjQIa655hoGDhzIJ598wqOPPsrcuXNPaFfqTiFBREROW1RUFPPm\nzSM6Opro6GimT5/OqFGj6NmzJ6NHj+bRRx9lxYoVp1yGtZZly5bRr18/hg0bxuTJk/nggw9OOc/4\n8eO544476N27N3PnziU8PJx169YBkJaWRlBQEC+++CJ9+/YlISGBe++9t6FWuUXTOQkiInLafvWr\nX9X4nJmZybx589i5cycHDx6krKyMo0eP8uOPP9KuXTvHZfTq1Yv27dtXfT733HPZt2/fKdsdMGBA\njc/dunWrmsfj8XDxxRfTunXrqvFDhgyp03qJM+1JEBGR0xYSElL1fs+ePUyYMIFBgwbxt7/9jdzc\nXP7nf/4H8J7UeDK1T3Y0xlBRUXHKdk81j7X2hEML1tpfXhn5RQoJIiJSLzk5OVRUVPDUU08xZMgQ\noqKi+Oqrr/xeR9++fdm+fXuNYLJ161a/19EcKSSIiEi9REVFUVZWxqJFi/jiiy/461//yuLFi/1e\nR3JyMuXl5UydOpWdO3eSkZHB/PnzgRNPmpS6UUgQEZHTUvsL9+KLL2bBggU88cQTDBgwALfb/YtX\nKdSnHacv+urDOnbsyKpVq8jLy2Pw4MH86U9/4sEHHwSgbdu2Z1xPS2Ya03EbY0wskJOTk0NsbGyg\nyxERkSbqtdde47bbbqO0tLTON3NqqnJzc4mLiwOIs9bmNsQydXWDiIg0eU888QRBQUHExcVRWlrK\nfffdx+9+97sWExB8RSFBRESarJKSEpKTJ5ORsaZqWLt27bn11lt46qmnAlhZ86BzEkREpMlKTp5M\nZmY2kAYUA2kcO9aWwsIvdD5CA9CeBBERaZI8Hk/lHoQ0IKVyaArl5ZaMjMkUFBQQHR0dwAqbPu1J\nEBGRJqmoqKjy3chaY0YBUFhY6Nd6miOFBBERaZIiIyMr331Ua0wW4L2Pg5wZhQQREWmSYmJiSEhI\nxOWajveQw5dAGi7XDBISEnWooQEoJIiISJPldqcRH38pMBnoAUwmPv5S3O60AFfWPOjERRERabLC\nwsJIT19NQUEBhYWFREVFaQ9CA1JIEBGRJi86OlrhwAd0uEFEREQcKSSIiIiII4UEERERcaSQ0Ey4\n3e5Al9DkqM/qR/1Wd+qz+lG/BZ7PQoIx5v8aYzYYYw4bY0p81Y546Yep7tRn9aN+qzv1Wf2o3wLP\nl3sSgoEVwPM+bENERER8xGeXQFprHwYwxkzxVRsiIiLiOzonQURERBw1tpsptQXIz88PdB1NTmlp\nKbm5uYEuo0lRn9WP+q3u1Gf1o36rm2rfnW0bapnGWnv6ExvzODD3FJNYoJ+11lNtninAQmvtWaex\n/GTgtdMuSERERGpLsda+3hALquuehKeApb8wza561gKQAaQAu4EjZ7AcERGRlqYt0Avvd2mDqFNI\nsNbuB/Y3VOMnWX6DpB8REZEWaGNDLsxn5yQYY7oDZwE9AZcxZmDlqEJr7WFftSsiIiINo07nJNRp\nwcYsBW52GDXGWvuRTxoVERGRBuOzkCAiIiJNm+6TICIiIo4UEkRERMSR30OCMeZuY8wXxpgfjTHZ\nxphLTjHtFGNMhTGmvPLfCmPMD/6sN9CMMSOMMe8YY76qXP9rTmOe0caYHGPMEWOMpyXeGruu/WaM\nGVVtG6uott2d7a+aA80Yc78xZosx5qAxZq8x5i1jTMxpzPcbY0x+5c90njHmKn/U2xjUp8/0ew2M\nMXdUbiulla+NxphxvzBPi93OoO591lDbmV9DgjHmd8B84EFgMJAHZBhjwk8xWynQrdqrp6/rbGRC\ngG3A3XhvVnVKxphewCrgA2Ag8AzwsjHmSt+V2CjVqd8qWSCan7e1c621+3xTXqM0AngW+DUQj/ch\nbe8bY9qdbAZjzFC8ly2/BAwC3gbeNsb09325jUKd+6xSS/+99iXeG/PFVb7WAiuNMf2cJtZ2BtSx\nzyqd+XZmrfXbC8gGnqn22QD/BP5wkumnACX+rLExv4AK4JpfmOa/ge21hrmBNYGuv5H32yigHAgN\ndL2N5QWEV/bd8FNM8/8B79Qatgl4LtD1N+I+0+81537ZD9xyknHazureZw2ynfltT4IxJhhv+vng\n+DDrXZNMYOgpZu1gjNltjCk2xrS05Fgfl+Lt0+oyOHUfi5cBthlj/mWMed8Yc1mgCwqwznj3rpSc\nYpqhaHur7nT6DPR7rYoxJsgYcxPQHu8XvxNtZ9WcZp9BA2xn/jzcEA64gL21hu/FuxvEyT+AW4Fr\n8N6uOQjYaIw531dFNgPdcO7jUGNMmwDU01R8DfweuAG4Hu+uvXXGmEEBrSpAjDEGeBpYb639/BST\nnmx7O9nPdLNVhz7T7zXAGHORMebfwFHgOeA6a+3Ok0yu7Yw691mDbGeN4SmQhpMcM7bWZuM9ROGd\n0JhNQD7wf/Ce1yCnx1T+q5tinIT1PpTMU21QtjEmEkjFu9uupXkO6A8Mq8e8J/2ZbuZOq8/0e63K\nTrznTXXGG86XG2NGnuJLr7aWuJ2ddp811Hbmz5DwHd5jvufUGn42JyZER9baMmPMJ0BUA9fWnHyD\ncx8ftNYeC0A9TdkW6vcl2aQZY/4CJAIjrLVf/8LkJ9veTutnurmoY5/V0FJ/r1lry/j5gYC5xpgh\nwAzgTofJtZ1R5z47Yd76bGd+O9xgrf0JyAGuOD6scvfcFZzmAymMMUHARXh3DYuzTVTr40pjOfVx\nK3E2iBa2rVV+2U3Ee/v04tOYxWl7u5IWtL3Vo89qz6/fa15BwMkOibb47ewkTtVnNdR7O/PzmZi/\nBX7E+0yHvsBivGdndq0cvxz4r2rT/wnvhhCB95JJN3AY6Bvos0r92GcheHcvDcJ71vTMys/dK8c/\nDiyrNn0v4BDeqxz6AHcBx4D4QK9LI++3GXiP3UUCF+I9tvwTMDrQ6+LHPnsOOID3sr5zqr3aVptm\nWa2f0aGV29esyu3tIbyPee8f6PVpxH2m32vwZ2A43kvyLqr8eSwDLq8cX/u7oEVvZ/XsswbZzgKx\noncBu/GGhU3Ar6qNWwu8Uu3zAuCLymn/BbwLXBzo/yw/99eoyi+58lqvVyrHLwXWOsyTU9lvBcDk\nQK9HY+834N7KvjoMfIv3KpyRgV4PP/eZU3+VAzdXm6bGz2jlsBvwHiv9EdgOJAR6XRpzn+n3mgV4\nGe9u8x/xHkp4//iXnbazhumzhtrO9IAnERERcaRnN4iIiIgjhQQRERFxpJAgIiIijhQSRERExJFC\ngoiIiDhSSBARERFHCgkiIiLiSCFBREREHCkkiIiIiCOFBBEREXGkkCAiIiKO/h/w1QYloCQ6cwAA\nAABJRU5ErkJggg==\n", 162 | "text/plain": [ 163 | "" 164 | ] 165 | }, 166 | "metadata": {}, 167 | "output_type": "display_data" 168 | } 169 | ], 170 | "source": [ 171 | "# Launch the graph in a session\n", 172 | "with tf.Session() as sess:\n", 173 | " # Initializing all variables\n", 174 | " tf.global_variables_initializer().run()\n", 175 | "\n", 176 | " for step in range(100):\n", 177 | " batch_inputs, batch_labels = generate_batch(batch_size)\n", 178 | " _, loss_val = sess.run([train_op, loss],\n", 179 | " feed_dict={train_inputs: batch_inputs, train_labels: batch_labels})\n", 180 | " if step % 10 == 0:\n", 181 | " print(\"Loss at \", step, loss_val) # Report the loss\n", 182 | "\n", 183 | " # Final embeddings are ready for you to use. Need to normalize for practical use\n", 184 | " trained_embeddings = embeddings.eval()\n", 185 | "\n", 186 | "# Show word2vec if dim is 2\n", 187 | "if trained_embeddings.shape[1] == 2:\n", 188 | " labels = rdic[:10] # Show top 10 words\n", 189 | " for i, label in enumerate(labels):\n", 190 | " x, y = trained_embeddings[i,:]\n", 191 | " plt.scatter(x, y)\n", 192 | " plt.annotate(label, xy=(x, y), xytext=(5, 2),\n", 193 | " textcoords='offset points', ha='right', va='bottom')\n", 194 | " plt.savefig(\"word2vec.png\")" 195 | ] 196 | }, 197 | { 198 | "cell_type": "code", 199 | "execution_count": null, 200 | "metadata": { 201 | "collapsed": true 202 | }, 203 | "outputs": [], 204 | "source": [] 205 | } 206 | ], 207 | "metadata": { 208 | "kernelspec": { 209 | "display_name": "Python 2", 210 | "language": "python", 211 | "name": "python2" 212 | }, 213 | "language_info": { 214 | "codemirror_mode": { 215 | "name": "ipython", 216 | "version": 2 217 | }, 218 | "file_extension": ".py", 219 | "mimetype": "text/x-python", 220 | "name": "python", 221 | "nbconvert_exporter": "python", 222 | "pygments_lexer": "ipython2", 223 | "version": "2.7.13" 224 | } 225 | }, 226 | "nbformat": 4, 227 | "nbformat_minor": 1 228 | } 229 | -------------------------------------------------------------------------------- /08_word2vec.py: -------------------------------------------------------------------------------- 1 | # Inspired by https://www.tensorflow.org/versions/r0.7/tutorials/word2vec/index.html 2 | import collections 3 | import numpy as np 4 | import tensorflow as tf 5 | import matplotlib 6 | matplotlib.use('Agg') 7 | import matplotlib.pyplot as plt 8 | 9 | # Configuration 10 | batch_size = 20 11 | # Dimension of the embedding vector. Two too small to get 12 | # any meaningful embeddings, but let's make it 2 for simple visualization 13 | embedding_size = 2 14 | num_sampled = 15 # Number of negative examples to sample. 15 | 16 | # Sample sentences 17 | sentences = ["the quick brown fox jumped over the lazy dog", 18 | "I love cats and dogs", 19 | "we all love cats and dogs", 20 | "cats and dogs are great", 21 | "sung likes cats", 22 | "she loves dogs", 23 | "cats can be very independent", 24 | "cats are great companions when they want to be", 25 | "cats are playful", 26 | "cats are natural hunters", 27 | "It's raining cats and dogs", 28 | "dogs and cats love sung"] 29 | 30 | # sentences to words and count 31 | words = " ".join(sentences).split() 32 | count = collections.Counter(words).most_common() 33 | print ("Word count", count[:5]) 34 | 35 | # Build dictionaries 36 | rdic = [i[0] for i in count] #reverse dic, idx -> word 37 | dic = {w: i for i, w in enumerate(rdic)} #dic, word -> id 38 | voc_size = len(dic) 39 | 40 | # Make indexed word data 41 | data = [dic[word] for word in words] 42 | print('Sample data', data[:10], [rdic[t] for t in data[:10]]) 43 | 44 | # Let's make a training data for window size 1 for simplicity 45 | # ([the, brown], quick), ([quick, fox], brown), ([brown, jumped], fox), ... 46 | cbow_pairs = []; 47 | for i in range(1, len(data)-1) : 48 | cbow_pairs.append([[data[i-1], data[i+1]], data[i]]); 49 | print('Context pairs', cbow_pairs[:10]) 50 | 51 | # Let's make skip-gram pairs 52 | # (quick, the), (quick, brown), (brown, quick), (brown, fox), ... 53 | skip_gram_pairs = []; 54 | for c in cbow_pairs: 55 | skip_gram_pairs.append([c[1], c[0][0]]) 56 | skip_gram_pairs.append([c[1], c[0][1]]) 57 | print('skip-gram pairs', skip_gram_pairs[:5]) 58 | 59 | def generate_batch(size): 60 | assert size < len(skip_gram_pairs) 61 | x_data=[] 62 | y_data = [] 63 | r = np.random.choice(range(len(skip_gram_pairs)), size, replace=False) 64 | for i in r: 65 | x_data.append(skip_gram_pairs[i][0]) # n dim 66 | y_data.append([skip_gram_pairs[i][1]]) # n, 1 dim 67 | return x_data, y_data 68 | 69 | # generate_batch test 70 | print ('Batches (x, y)', generate_batch(3)) 71 | 72 | # Input data 73 | train_inputs = tf.placeholder(tf.int32, shape=[batch_size]) 74 | # need to shape [batch_size, 1] for nn.nce_loss 75 | train_labels = tf.placeholder(tf.int32, shape=[batch_size, 1]) 76 | # Ops and variables pinned to the CPU because of missing GPU implementation 77 | with tf.device('/cpu:0'): 78 | # Look up embeddings for inputs. 79 | embeddings = tf.Variable( 80 | tf.random_uniform([voc_size, embedding_size], -1.0, 1.0)) 81 | embed = tf.nn.embedding_lookup(embeddings, train_inputs) # lookup table 82 | 83 | # Construct the variables for the NCE loss 84 | nce_weights = tf.Variable( 85 | tf.random_uniform([voc_size, embedding_size],-1.0, 1.0)) 86 | nce_biases = tf.Variable(tf.zeros([voc_size])) 87 | 88 | # Compute the average NCE loss for the batch. 89 | # This does the magic: 90 | # tf.nn.nce_loss(weights, biases, inputs, labels, num_sampled, num_classes ...) 91 | # It automatically draws negative samples when we evaluate the loss. 92 | loss = tf.reduce_mean(tf.nn.nce_loss(nce_weights, nce_biases, train_labels, embed, num_sampled, voc_size)) 93 | 94 | # Use the adam optimizer 95 | train_op = tf.train.AdamOptimizer(1e-1).minimize(loss) 96 | 97 | # Launch the graph in a session 98 | with tf.Session() as sess: 99 | # Initializing all variables 100 | tf.global_variables_initializer().run() 101 | 102 | for step in range(100): 103 | batch_inputs, batch_labels = generate_batch(batch_size) 104 | _, loss_val = sess.run([train_op, loss], 105 | feed_dict={train_inputs: batch_inputs, train_labels: batch_labels}) 106 | if step % 10 == 0: 107 | print("Loss at ", step, loss_val) # Report the loss 108 | 109 | # Final embeddings are ready for you to use. Need to normalize for practical use 110 | trained_embeddings = embeddings.eval() 111 | 112 | # Show word2vec if dim is 2 113 | if trained_embeddings.shape[1] == 2: 114 | labels = rdic[:10] # Show top 10 words 115 | for i, label in enumerate(labels): 116 | x, y = trained_embeddings[i,:] 117 | plt.scatter(x, y) 118 | plt.annotate(label, xy=(x, y), xytext=(5, 2), 119 | textcoords='offset points', ha='right', va='bottom') 120 | plt.savefig("word2vec.png") -------------------------------------------------------------------------------- /09_tensorboard.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# TensorBoard" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "After Training the model, run \n", 15 | "\n", 16 | " tensorboard --logdir=path/to/log-directory\n", 17 | " \n", 18 | "Tensorboard provides a good visualization tool for all the variables you like and works on a browser." 19 | ] 20 | }, 21 | { 22 | "cell_type": "code", 23 | "execution_count": 1, 24 | "metadata": { 25 | "collapsed": true 26 | }, 27 | "outputs": [], 28 | "source": [ 29 | "import tensorflow as tf\n", 30 | "from tensorflow.examples.tutorials.mnist import input_data" 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": null, 36 | "metadata": { 37 | "collapsed": false 38 | }, 39 | "outputs": [], 40 | "source": [ 41 | "def init_weights(shape, name):\n", 42 | " return tf.Variable(tf.random_normal(shape, stddev=0.01), name=name)\n", 43 | "\n", 44 | "# This network is the same as the previous one except with an extra hidden layer + dropout\n", 45 | "def model(X, w_h, w_h2, w_o, p_keep_input, p_keep_hidden):\n", 46 | " # Add layer name scopes for better graph visualization\n", 47 | " with tf.name_scope(\"layer1\"):\n", 48 | " X = tf.nn.dropout(X, p_keep_input)\n", 49 | " h = tf.nn.relu(tf.matmul(X, w_h))\n", 50 | " with tf.name_scope(\"layer2\"):\n", 51 | " h = tf.nn.dropout(h, p_keep_hidden)\n", 52 | " h2 = tf.nn.relu(tf.matmul(h, w_h2))\n", 53 | " with tf.name_scope(\"layer3\"):\n", 54 | " h2 = tf.nn.dropout(h2, p_keep_hidden)\n", 55 | " return tf.matmul(h2, w_o)\n", 56 | "\n", 57 | "mnist = input_data.read_data_sets(\"MNIST_data/\", one_hot=True)\n", 58 | "trX, trY, teX, teY = mnist.train.images, mnist.train.labels, mnist.test.images, mnist.test.labels" 59 | ] 60 | }, 61 | { 62 | "cell_type": "code", 63 | "execution_count": 4, 64 | "metadata": { 65 | "collapsed": false 66 | }, 67 | "outputs": [], 68 | "source": [ 69 | "X = tf.placeholder(\"float\", [None, 784], name=\"X\")\n", 70 | "Y = tf.placeholder(\"float\", [None, 10], name=\"Y\")\n", 71 | "\n", 72 | "w_h = init_weights([784, 625], \"w_h\")\n", 73 | "w_h2 = init_weights([625, 625], \"w_h2\")\n", 74 | "w_o = init_weights([625, 10], \"w_o\")\n", 75 | "\n", 76 | "# Add histogram summaries for weights\n", 77 | "tf.summary.histogram(\"w_h_summ\", w_h)\n", 78 | "tf.summary.histogram(\"w_h2_summ\", w_h2)\n", 79 | "tf.summary.histogram(\"w_o_summ\", w_o)\n", 80 | "\n", 81 | "p_keep_input = tf.placeholder(\"float\", name=\"p_keep_input\")\n", 82 | "p_keep_hidden = tf.placeholder(\"float\", name=\"p_keep_hidden\")\n", 83 | "py_x = model(X, w_h, w_h2, w_o, p_keep_input, p_keep_hidden)\n", 84 | "\n", 85 | "with tf.name_scope(\"cost\"):\n", 86 | " cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=py_x, labels=Y))\n", 87 | " train_op = tf.train.RMSPropOptimizer(0.001, 0.9).minimize(cost)\n", 88 | " # Add scalar summary for cost\n", 89 | " tf.summary.scalar(\"cost\", cost)\n", 90 | "\n", 91 | "with tf.name_scope(\"accuracy\"):\n", 92 | " correct_pred = tf.equal(tf.argmax(Y, 1), tf.argmax(py_x, 1)) # Count correct predictions\n", 93 | " acc_op = tf.reduce_mean(tf.cast(correct_pred, \"float\")) # Cast boolean to float to average\n", 94 | " # Add scalar summary for accuracy\n", 95 | " tf.summary.scalar(\"accuracy\", acc_op)" 96 | ] 97 | }, 98 | { 99 | "cell_type": "code", 100 | "execution_count": null, 101 | "metadata": { 102 | "collapsed": false 103 | }, 104 | "outputs": [], 105 | "source": [ 106 | "with tf.Session() as sess:\n", 107 | " # create a log writer. run 'tensorboard --logdir=./logs/nn_logs'\n", 108 | " writer = tf.summary.FileWriter(\"./logs/nn_logs\", sess.graph) # for 1.0\n", 109 | " merged = tf.summary.merge_all()\n", 110 | "\n", 111 | " # you need to initialize all variables\n", 112 | " tf.global_variables_initializer().run()\n", 113 | "\n", 114 | " for i in range(100):\n", 115 | " for start, end in zip(range(0, len(trX), 128), range(128, len(trX)+1, 128)):\n", 116 | " sess.run(train_op, feed_dict={X: trX[start:end], Y: trY[start:end],\n", 117 | " p_keep_input: 0.8, p_keep_hidden: 0.5})\n", 118 | " summary, acc = sess.run([merged, acc_op], feed_dict={X: teX, Y: teY,\n", 119 | " p_keep_input: 1.0, p_keep_hidden: 1.0})\n", 120 | " writer.add_summary(summary, i) # Write summary\n", 121 | " print(i, acc) # Report the accuracy" 122 | ] 123 | }, 124 | { 125 | "cell_type": "code", 126 | "execution_count": null, 127 | "metadata": { 128 | "collapsed": true 129 | }, 130 | "outputs": [], 131 | "source": [] 132 | } 133 | ], 134 | "metadata": { 135 | "kernelspec": { 136 | "display_name": "Python 2", 137 | "language": "python", 138 | "name": "python2" 139 | }, 140 | "language_info": { 141 | "codemirror_mode": { 142 | "name": "ipython", 143 | "version": 2 144 | }, 145 | "file_extension": ".py", 146 | "mimetype": "text/x-python", 147 | "name": "python", 148 | "nbconvert_exporter": "python", 149 | "pygments_lexer": "ipython2", 150 | "version": "2.7.13" 151 | } 152 | }, 153 | "nbformat": 4, 154 | "nbformat_minor": 1 155 | } 156 | -------------------------------------------------------------------------------- /09_tensorboard.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import tensorflow as tf 4 | from tensorflow.examples.tutorials.mnist import input_data 5 | 6 | def init_weights(shape, name): 7 | return tf.Variable(tf.random_normal(shape, stddev=0.01), name=name) 8 | 9 | # This network is the same as the previous one except with an extra hidden layer + dropout 10 | def model(X, w_h, w_h2, w_o, p_keep_input, p_keep_hidden): 11 | # Add layer name scopes for better graph visualization 12 | with tf.name_scope("layer1"): 13 | X = tf.nn.dropout(X, p_keep_input) 14 | h = tf.nn.relu(tf.matmul(X, w_h)) 15 | with tf.name_scope("layer2"): 16 | h = tf.nn.dropout(h, p_keep_hidden) 17 | h2 = tf.nn.relu(tf.matmul(h, w_h2)) 18 | with tf.name_scope("layer3"): 19 | h2 = tf.nn.dropout(h2, p_keep_hidden) 20 | return tf.matmul(h2, w_o) 21 | 22 | mnist = input_data.read_data_sets("MNIST_data/", one_hot=True) 23 | trX, trY, teX, teY = mnist.train.images, mnist.train.labels, mnist.test.images, mnist.test.labels 24 | 25 | X = tf.placeholder("float", [None, 784], name="X") 26 | Y = tf.placeholder("float", [None, 10], name="Y") 27 | 28 | w_h = init_weights([784, 625], "w_h") 29 | w_h2 = init_weights([625, 625], "w_h2") 30 | w_o = init_weights([625, 10], "w_o") 31 | 32 | # Add histogram summaries for weights 33 | tf.summary.histogram("w_h_summ", w_h) 34 | tf.summary.histogram("w_h2_summ", w_h2) 35 | tf.summary.histogram("w_o_summ", w_o) 36 | 37 | p_keep_input = tf.placeholder("float", name="p_keep_input") 38 | p_keep_hidden = tf.placeholder("float", name="p_keep_hidden") 39 | py_x = model(X, w_h, w_h2, w_o, p_keep_input, p_keep_hidden) 40 | 41 | with tf.name_scope("cost"): 42 | cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=py_x, labels=Y)) 43 | train_op = tf.train.RMSPropOptimizer(0.001, 0.9).minimize(cost) 44 | # Add scalar summary for cost 45 | tf.summary.scalar("cost", cost) 46 | 47 | with tf.name_scope("accuracy"): 48 | correct_pred = tf.equal(tf.argmax(Y, 1), tf.argmax(py_x, 1)) # Count correct predictions 49 | acc_op = tf.reduce_mean(tf.cast(correct_pred, "float")) # Cast boolean to float to average 50 | # Add scalar summary for accuracy 51 | tf.summary.scalar("accuracy", acc_op) 52 | 53 | with tf.Session() as sess: 54 | # create a log writer. run 'tensorboard --logdir=./logs/nn_logs' 55 | writer = tf.summary.FileWriter("./logs/nn_logs", sess.graph) # for 1.0 56 | merged = tf.summary.merge_all() 57 | 58 | # you need to initialize all variables 59 | tf.global_variables_initializer().run() 60 | 61 | for i in range(100): 62 | for start, end in zip(range(0, len(trX), 128), range(128, len(trX)+1, 128)): 63 | sess.run(train_op, feed_dict={X: trX[start:end], Y: trY[start:end], 64 | p_keep_input: 0.8, p_keep_hidden: 0.5}) 65 | summary, acc = sess.run([merged, acc_op], feed_dict={X: teX, Y: teY, 66 | p_keep_input: 1.0, p_keep_hidden: 1.0}) 67 | writer.add_summary(summary, i) # Write summary 68 | print(i, acc) # Report the accuracy 69 | writer.close() -------------------------------------------------------------------------------- /10_save_restore_net.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "collapsed": true 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "import tensorflow as tf\n", 12 | "import numpy as np\n", 13 | "from tensorflow.examples.tutorials.mnist import input_data\n", 14 | "import os" 15 | ] 16 | }, 17 | { 18 | "cell_type": "code", 19 | "execution_count": null, 20 | "metadata": { 21 | "collapsed": false 22 | }, 23 | "outputs": [], 24 | "source": [ 25 | "# This shows how to save/restore your model (trained variables).\n", 26 | "# To see how it works, please stop this program during training and resart.\n", 27 | "# This network is the same as 3_net.py\n", 28 | "\n", 29 | "def init_weights(shape):\n", 30 | " return tf.Variable(tf.random_normal(shape, stddev=0.01))\n", 31 | "\n", 32 | "def model(X, w_h, w_h2, w_o, p_keep_input, p_keep_hidden): # this network is the same as the previous one except with an extra hidden layer + dropout\n", 33 | " X = tf.nn.dropout(X, p_keep_input)\n", 34 | " h = tf.nn.relu(tf.matmul(X, w_h))\n", 35 | "\n", 36 | " h = tf.nn.dropout(h, p_keep_hidden)\n", 37 | " h2 = tf.nn.relu(tf.matmul(h, w_h2))\n", 38 | "\n", 39 | " h2 = tf.nn.dropout(h2, p_keep_hidden)\n", 40 | "\n", 41 | " return tf.matmul(h2, w_o)\n", 42 | "\n", 43 | "mnist = input_data.read_data_sets(\"MNIST_data/\", one_hot=True)\n", 44 | "trX, trY, teX, teY = mnist.train.images, mnist.train.labels, mnist.test.images, mnist.test.labels" 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": 3, 50 | "metadata": { 51 | "collapsed": true 52 | }, 53 | "outputs": [], 54 | "source": [ 55 | "X = tf.placeholder(\"float\", [None, 784])\n", 56 | "Y = tf.placeholder(\"float\", [None, 10])\n", 57 | "\n", 58 | "w_h = init_weights([784, 625])\n", 59 | "w_h2 = init_weights([625, 625])\n", 60 | "w_o = init_weights([625, 10])\n", 61 | "\n", 62 | "p_keep_input = tf.placeholder(\"float\")\n", 63 | "p_keep_hidden = tf.placeholder(\"float\")\n", 64 | "py_x = model(X, w_h, w_h2, w_o, p_keep_input, p_keep_hidden)\n", 65 | "\n", 66 | "cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=py_x, labels=Y))\n", 67 | "train_op = tf.train.RMSPropOptimizer(0.001, 0.9).minimize(cost)\n", 68 | "predict_op = tf.argmax(py_x, 1)\n", 69 | "\n", 70 | "ckpt_dir = \"./ckpt_dir\"\n", 71 | "if not os.path.exists(ckpt_dir):\n", 72 | " os.makedirs(ckpt_dir)\n", 73 | "\n", 74 | "global_step = tf.Variable(0, name='global_step', trainable=False)\n", 75 | "\n", 76 | "# Call this after declaring all tf.Variables.\n", 77 | "saver = tf.train.Saver()\n", 78 | "\n", 79 | "# This variable won't be stored, since it is declared after tf.train.Saver()\n", 80 | "non_storable_variable = tf.Variable(777)" 81 | ] 82 | }, 83 | { 84 | "cell_type": "code", 85 | "execution_count": null, 86 | "metadata": { 87 | "collapsed": false 88 | }, 89 | "outputs": [], 90 | "source": [ 91 | "# Launch the graph in a session\n", 92 | "with tf.Session() as sess:\n", 93 | " # you need to initialize all variables\n", 94 | " tf.global_variables_initializer().run()\n", 95 | "\n", 96 | " ckpt = tf.train.get_checkpoint_state(ckpt_dir)\n", 97 | " if ckpt and ckpt.model_checkpoint_path:\n", 98 | " print(ckpt.model_checkpoint_path)\n", 99 | " saver.restore(sess, ckpt.model_checkpoint_path) # restore all variables\n", 100 | "\n", 101 | " start = global_step.eval() # get last global_step\n", 102 | " print(\"Start from:\", start)\n", 103 | "\n", 104 | " for i in range(start, 100):\n", 105 | " for start, end in zip(range(0, len(trX), 128), range(128, len(trX)+1, 128)):\n", 106 | " sess.run(train_op, feed_dict={X: trX[start:end], Y: trY[start:end],\n", 107 | " p_keep_input: 0.8, p_keep_hidden: 0.5})\n", 108 | "\n", 109 | " global_step.assign(i).eval() # set and update(eval) global_step with index, i\n", 110 | " saver.save(sess, ckpt_dir + \"/model.ckpt\", global_step=global_step)\n", 111 | " print(i, np.mean(np.argmax(teY, axis=1) ==\n", 112 | " sess.run(predict_op, feed_dict={X: teX, Y: teY,\n", 113 | " p_keep_input: 1.0,\n", 114 | " p_keep_hidden: 1.0})))" 115 | ] 116 | }, 117 | { 118 | "cell_type": "code", 119 | "execution_count": null, 120 | "metadata": { 121 | "collapsed": true 122 | }, 123 | "outputs": [], 124 | "source": [] 125 | } 126 | ], 127 | "metadata": { 128 | "kernelspec": { 129 | "display_name": "Python 2", 130 | "language": "python", 131 | "name": "python2" 132 | }, 133 | "language_info": { 134 | "codemirror_mode": { 135 | "name": "ipython", 136 | "version": 2 137 | }, 138 | "file_extension": ".py", 139 | "mimetype": "text/x-python", 140 | "name": "python", 141 | "nbconvert_exporter": "python", 142 | "pygments_lexer": "ipython2", 143 | "version": "2.7.13" 144 | } 145 | }, 146 | "nbformat": 4, 147 | "nbformat_minor": 1 148 | } 149 | -------------------------------------------------------------------------------- /10_save_restore_net.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import tensorflow as tf 4 | import numpy as np 5 | from tensorflow.examples.tutorials.mnist import input_data 6 | import os 7 | 8 | # This shows how to save/restore your model (trained variables). 9 | # To see how it works, please stop this program during training and resart. 10 | # This network is the same as 3_net.py 11 | 12 | def init_weights(shape): 13 | return tf.Variable(tf.random_normal(shape, stddev=0.01)) 14 | 15 | 16 | def model(X, w_h, w_h2, w_o, p_keep_input, p_keep_hidden): # this network is the same as the previous one except with an extra hidden layer + dropout 17 | X = tf.nn.dropout(X, p_keep_input) 18 | h = tf.nn.relu(tf.matmul(X, w_h)) 19 | 20 | h = tf.nn.dropout(h, p_keep_hidden) 21 | h2 = tf.nn.relu(tf.matmul(h, w_h2)) 22 | 23 | h2 = tf.nn.dropout(h2, p_keep_hidden) 24 | 25 | return tf.matmul(h2, w_o) 26 | 27 | 28 | mnist = input_data.read_data_sets("MNIST_data/", one_hot=True) 29 | trX, trY, teX, teY = mnist.train.images, mnist.train.labels, mnist.test.images, mnist.test.labels 30 | 31 | X = tf.placeholder("float", [None, 784]) 32 | Y = tf.placeholder("float", [None, 10]) 33 | 34 | w_h = init_weights([784, 625]) 35 | w_h2 = init_weights([625, 625]) 36 | w_o = init_weights([625, 10]) 37 | 38 | p_keep_input = tf.placeholder("float") 39 | p_keep_hidden = tf.placeholder("float") 40 | py_x = model(X, w_h, w_h2, w_o, p_keep_input, p_keep_hidden) 41 | 42 | cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=py_x, labels=Y)) 43 | train_op = tf.train.RMSPropOptimizer(0.001, 0.9).minimize(cost) 44 | predict_op = tf.argmax(py_x, 1) 45 | 46 | 47 | ckpt_dir = "./ckpt_dir" 48 | if not os.path.exists(ckpt_dir): 49 | os.makedirs(ckpt_dir) 50 | 51 | global_step = tf.Variable(0, name='global_step', trainable=False) 52 | 53 | # Call this after declaring all tf.Variables. 54 | saver = tf.train.Saver() 55 | 56 | # This variable won't be stored, since it is declared after tf.train.Saver() 57 | non_storable_variable = tf.Variable(777) 58 | 59 | # Launch the graph in a session 60 | with tf.Session() as sess: 61 | # you need to initialize all variables 62 | tf.global_variables_initializer().run() 63 | 64 | ckpt = tf.train.get_checkpoint_state(ckpt_dir) 65 | if ckpt and ckpt.model_checkpoint_path: 66 | print(ckpt.model_checkpoint_path) 67 | saver.restore(sess, ckpt.model_checkpoint_path) # restore all variables 68 | 69 | start = global_step.eval() # get last global_step 70 | print("Start from:", start) 71 | 72 | for i in range(start, 100): 73 | for start, end in zip(range(0, len(trX), 128), range(128, len(trX)+1, 128)): 74 | sess.run(train_op, feed_dict={X: trX[start:end], Y: trY[start:end], 75 | p_keep_input: 0.8, p_keep_hidden: 0.5}) 76 | 77 | global_step.assign(i).eval() # set and update(eval) global_step with index, i 78 | saver.save(sess, ckpt_dir + "/model.ckpt", global_step=global_step) 79 | print(i, np.mean(np.argmax(teY, axis=1) == 80 | sess.run(predict_op, feed_dict={X: teX, 81 | p_keep_input: 1.0, 82 | p_keep_hidden: 1.0}))) 83 | -------------------------------------------------------------------------------- /11_gan.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Generative Adversarial Networks\n", 8 | "\n", 9 | "This notebook implements a very basic GAN with MLPs for the 2 networks." 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": 1, 15 | "metadata": { 16 | "collapsed": true 17 | }, 18 | "outputs": [], 19 | "source": [ 20 | "%matplotlib inline\n", 21 | "\n", 22 | "import tensorflow as tf\n", 23 | "import numpy as np\n", 24 | "import matplotlib.pyplot as plt\n", 25 | "from tensorflow.examples.tutorials.mnist import input_data" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": 2, 31 | "metadata": { 32 | "collapsed": false 33 | }, 34 | "outputs": [ 35 | { 36 | "name": "stdout", 37 | "output_type": "stream", 38 | "text": [ 39 | "Extracting MNIST_data/train-images-idx3-ubyte.gz\n", 40 | "Extracting MNIST_data/train-labels-idx1-ubyte.gz\n", 41 | "Extracting MNIST_data/t10k-images-idx3-ubyte.gz\n", 42 | "Extracting MNIST_data/t10k-labels-idx1-ubyte.gz\n" 43 | ] 44 | } 45 | ], 46 | "source": [ 47 | "mnist = input_data.read_data_sets(\"MNIST_data/\")\n", 48 | "images = mnist.train.images" 49 | ] 50 | }, 51 | { 52 | "cell_type": "markdown", 53 | "metadata": {}, 54 | "source": [ 55 | "## Weight initialisation\n", 56 | "\n", 57 | "The weights will be initiliased using the Xavier initialisation method [1]. In this case, this is just a Gaussian distribution with a custom standard deviation: the standard deviation is inversely proportional to the number of neurons feeding into the neuron.\n", 58 | "\n", 59 | "$$ \\mathbf{w}_i \\sim \\mathcal{N}(0, \\frac{1}{n_{i-1}}) $$\n", 60 | "\n", 61 | "where $n_{i-1}$ is the number of inputs that feed into the current neuron.\n", 62 | "\n", 63 | "I also tried with regular Gaussian (i.e. constant $\\sigma$) and with uniform distribution, but I did not manage to get the network learning.\n", 64 | "\n", 65 | "[1] *Glorot, Xavier, and Yoshua Bengio. \"Understanding the difficulty of training deep feedforward neural networks.\" Aistats. Vol. 9. 2010.*" 66 | ] 67 | }, 68 | { 69 | "cell_type": "code", 70 | "execution_count": 3, 71 | "metadata": { 72 | "collapsed": true 73 | }, 74 | "outputs": [], 75 | "source": [ 76 | "def xavier_initializer(shape):\n", 77 | " return tf.random_normal(shape=shape, stddev=1/shape[0])" 78 | ] 79 | }, 80 | { 81 | "cell_type": "markdown", 82 | "metadata": {}, 83 | "source": [ 84 | "## Architecture" 85 | ] 86 | }, 87 | { 88 | "cell_type": "code", 89 | "execution_count": 4, 90 | "metadata": { 91 | "collapsed": true 92 | }, 93 | "outputs": [], 94 | "source": [ 95 | "# Generator\n", 96 | "z_size = 100 # Latent vector dimension\n", 97 | "g_w1_size = 400 \n", 98 | "g_out_size = 28 * 28\n", 99 | "\n", 100 | "# Discriminator\n", 101 | "x_size = 28 * 28\n", 102 | "d_w1_size = 400\n", 103 | "d_out_size = 1" 104 | ] 105 | }, 106 | { 107 | "cell_type": "code", 108 | "execution_count": 5, 109 | "metadata": { 110 | "collapsed": true 111 | }, 112 | "outputs": [], 113 | "source": [ 114 | "z = tf.placeholder('float', shape=(None, z_size))\n", 115 | "X = tf.placeholder('float', shape=(None, x_size))" 116 | ] 117 | }, 118 | { 119 | "cell_type": "markdown", 120 | "metadata": {}, 121 | "source": [ 122 | "## Weights" 123 | ] 124 | }, 125 | { 126 | "cell_type": "code", 127 | "execution_count": 6, 128 | "metadata": { 129 | "collapsed": true 130 | }, 131 | "outputs": [], 132 | "source": [ 133 | "g_weights = {\n", 134 | " 'w1': tf.Variable(xavier_initializer(shape=(z_size, g_w1_size))),\n", 135 | " 'b1': tf.Variable(tf.zeros(shape=[g_w1_size])),\n", 136 | " 'out': tf.Variable(xavier_initializer(shape=(g_w1_size, g_out_size))),\n", 137 | " 'b2': tf.Variable(tf.zeros(shape=[g_out_size])),\n", 138 | "}\n", 139 | "\n", 140 | "d_weights ={\n", 141 | " 'w1': tf.Variable(xavier_initializer(shape=(x_size, d_w1_size))),\n", 142 | " 'b1': tf.Variable(tf.zeros(shape=[d_w1_size])),\n", 143 | " 'out': tf.Variable(xavier_initializer(shape=(d_w1_size, d_out_size))),\n", 144 | " 'b2': tf.Variable(tf.zeros(shape=[d_out_size])),\n", 145 | "}" 146 | ] 147 | }, 148 | { 149 | "cell_type": "markdown", 150 | "metadata": {}, 151 | "source": [ 152 | "## Models\n", 153 | "\n", 154 | "The models were chosen to be very simple, so just an MLP with 1 hidden layer and 1 output layer." 155 | ] 156 | }, 157 | { 158 | "cell_type": "code", 159 | "execution_count": 7, 160 | "metadata": { 161 | "collapsed": false 162 | }, 163 | "outputs": [], 164 | "source": [ 165 | "def G(z, w=g_weights):\n", 166 | " h1 = tf.nn.relu(tf.matmul(z, w['w1']) + w['b1'])\n", 167 | " return tf.sigmoid(tf.matmul(h1, w['out']) + w['b2'])\n", 168 | "\n", 169 | "def D(x, w=d_weights):\n", 170 | " h1 = tf.nn.relu(tf.matmul(x, w['w1']) + w['b1'])\n", 171 | " return tf.sigmoid(tf.matmul(h1, w['out']) + w['b2'])" 172 | ] 173 | }, 174 | { 175 | "cell_type": "markdown", 176 | "metadata": {}, 177 | "source": [ 178 | "## Latent distribution\n", 179 | "\n", 180 | "This function generates a prior for G." 181 | ] 182 | }, 183 | { 184 | "cell_type": "code", 185 | "execution_count": 8, 186 | "metadata": { 187 | "collapsed": true 188 | }, 189 | "outputs": [], 190 | "source": [ 191 | "def generate_z(n=1):\n", 192 | " return np.random.normal(size=(n, z_size))" 193 | ] 194 | }, 195 | { 196 | "cell_type": "code", 197 | "execution_count": 9, 198 | "metadata": { 199 | "collapsed": false 200 | }, 201 | "outputs": [], 202 | "source": [ 203 | "sample = G(z) # To be called during session" 204 | ] 205 | }, 206 | { 207 | "cell_type": "markdown", 208 | "metadata": {}, 209 | "source": [ 210 | "## Cost\n", 211 | "\n", 212 | "The cost functions are the ones used in the original GAN paper [2], using the suggestion of switching the loss for G from minimising $\\frac{1}{m}\\sum_{i=1}^m(1-D(G(\\mathbf{z}))$ to maximising $\\frac{1}{m}\\sum_{i=1}^m(D(G(\\mathbf{z}))$.\n", 213 | "\n", 214 | "Note that because both need to be maximised, and TF is designed to minimise, we take the negative values below.\n", 215 | "\n", 216 | "\n", 217 | "[2] *Goodfellow, Ian, et al. \"Generative adversarial nets.\" Advances in neural information processing systems. 2014.*\n" 218 | ] 219 | }, 220 | { 221 | "cell_type": "code", 222 | "execution_count": 10, 223 | "metadata": { 224 | "collapsed": false 225 | }, 226 | "outputs": [], 227 | "source": [ 228 | "G_objective = -tf.reduce_mean(tf.log(D(G(z))))\n", 229 | "D_objective = -tf.reduce_mean(tf.log(D(X)) + tf.log(1 - D(G(z))))" 230 | ] 231 | }, 232 | { 233 | "cell_type": "markdown", 234 | "metadata": {}, 235 | "source": [ 236 | "## Optimisation\n", 237 | "\n", 238 | "Note that each of the optimiser takes a `var_list` argument to only consider the variables provided. This is because we don't want D to train G when D is trained, but rather freeze the weights from G and only concern about D (and the same for G)." 239 | ] 240 | }, 241 | { 242 | "cell_type": "code", 243 | "execution_count": 11, 244 | "metadata": { 245 | "collapsed": false 246 | }, 247 | "outputs": [], 248 | "source": [ 249 | "G_opt = tf.train.AdamOptimizer().minimize(\n", 250 | " G_objective, var_list=g_weights.values())\n", 251 | "D_opt = tf.train.AdamOptimizer().minimize(\n", 252 | " D_objective, var_list=d_weights.values())" 253 | ] 254 | }, 255 | { 256 | "cell_type": "markdown", 257 | "metadata": {}, 258 | "source": [ 259 | "## Training" 260 | ] 261 | }, 262 | { 263 | "cell_type": "code", 264 | "execution_count": 13, 265 | "metadata": { 266 | "collapsed": false 267 | }, 268 | "outputs": [ 269 | { 270 | "data": { 271 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAWEAAAFfCAYAAACfj30KAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJztnXuMLNtV3r893fM4Z849XMWOH8FB2BgHLITlXAKxwODI\nSA6OZEAgiEEBgyKF8BBCCliWSGxwEguQLYeHIyLeCiDxCAIk40sg2MSAcTCBYBNsQWwMmHvxA91z\n7pmZ7p6enT96Vt/VX6+1965+nJrH+qRSVe2urq5H169WfXvtXSnnjFAoFAr1o52+NyAUCoWuswLC\noVAo1KMCwqFQKNSjAsKhUCjUowLCoVAo1KMCwqFQKNSjAsKhUCjUowLCoVAo1KMCwqFQKNSjhn1v\nQErpSQBeAuADAE763ZpQKBTaiA4AfCKAh3POHy0tuDUIp5S+AcC/AfA0AH8I4Jtyzv/LWPQlAH5y\nW9sRCoVCPeorAfxUaYGt2BEppS8H8HoArwbwfMwg/HBK6cnG4h/YxjaEQqHQBdAHagtsyxP+FgA/\nmHP+iZzznwD4OgBHAL7WWDYsiFAodFVV5dvGIZxS2gXwEIBfl7I866rt1wC8YNO/FwqFQpdZ24iE\nnwxgAOBRKn8UM384FAqFQue6nylqCUB0XhwKhUJK24DwRwBMATyVyp+C5eg4FAqFrrU2DuGc8wTA\nuwC8WMpSSul8/rc3/XuhUCh0mbWtPOE3APjxlNK7ALwTs2yJmwB+bEu/FwqFQpdSW4FwzvlnznOC\nvxMzW+IPALwk5/zhbfxeKBQKXValvl/0mVL6h5jZF6FQKHTV9FDO+fdLC0QHPqFQKNSjAsKhUCjU\nowLCoVAo1KMCwqFQKNSjAsKhUCjUowLCoVAo1KMCwqFQKNSjAsKhUCjUowLCoVAo1KMCwqFQKNSj\nAsKhUCjUowLCoVAo1KMCwqFQKNSjAsKhUCjUowLCoVAo1KMCwqFQKNSjAsKhUCjUowLCoVAo1KMC\nwqFQKNSjAsKhUCjUowLCoVAo1KMCwqFQKNSjAsKhUCjUowLCoVAo1KMCwqFQKNSjAsKhUCjUowLC\noVAo1KMCwqFQKNSjAsKhUCjUowLCoVAo1KOGfW9AKBTqXyml4uc55/u0JddPAeFQ6BqrBl9ruQDy\nZhUQDoWuqVoBXPpeAHl9BYRDoWsoC8BWWQ2y8p2A8eqKirlQKORGxSml+bDK90N1BYRDoWsmBqae\nL8G0BuMA8WoKCIdC11gWgGvRb8B2swoIh0LXSF7U2xW4AejNKSrmQpderUBoAZCOBlu/s2lxJZee\nL1WAdakcW2c98rlXKZdSioq6DgoIhy6NSrC0yvRjtTXwMjs7O03T/Lst85YsUOWc5wPP81BaF4O7\ndTg7O1v4fs7ZhW0JxgHidgWEQxdOrZVDHkz1vAB0Z2dnYdr6TA+DwcAt49/h7eayFhhp6J2dnTWN\n+Xul8dnZmTlMp9OFeb29fDPg7eV9C/CupoBw6MKoxVaoRbcc2dbgqucHgwGGwyEGg0FxEBDrbbPA\n7O2PByoBbOtgRcze9HQ6bR68qJsha0XJepmAcpsCwqELoZbHeQuyJTtBwKrh6UF1OBw2DfKdLhE5\n4FsPPC+RqQBRR6tcVrMtdPnp6Wl1EGi2WiB6HwK4qysgHOpdXfJWW20GGQScLePd3d2lwSrXEPYA\nrMu1at6tgPb09HQhOuV5jlh1ZGwNp6enGI/HmEwmC4OO6nUkbvnE1nZb8A0gd9PGIZxSejWAV1Px\nn+Scn7vp3wpdXnmRrjdvWRKW3aBBXIpoBbB6fm9vbz6WQc/L9HA47GSLAG0ZDzpiFfDyoMtLAOay\n8Xi8NLC1It9LKS34zzs7O0sw1t8R6AaQV9O2IuF3A3gxALlyTrf0O6FLpFqFW8t0l4o0HcVyRGtF\nuPv7+9jb28P+/r47LTButUUsH5WnGcKTyWRh7JVxRZ03fXZ2hvF4jNFohNFoNI/+LQBPp1Nz+wTG\nsk4GrzcO1bUtCJ/mnD+8pXWHLplK8C0t62UbWL6vNdagFXhyxKsBvL+/j4ODg+pYQ5itEAvCIi9z\nQaZzzkt2QWlg+8Aay/TJyUkVvqenp/OoV6JhAAsAZvslQLu+tgXhT04p/RWAEwC/A+BVOee/2NJv\nhS6wWgDswdYbW/6vVfmmoVsb9vf3cePGDRwcHODg4GA+bZXt7e2ZAPbGQFsqmdgGk8nEHHMZZ0qU\n5iX6r0F4MBgAwJIPrAHM8I0oeD1tA8LvAPAKAO8F8HQArwHwmymlT8s539vC74UuoLpEv953PQBb\ntoSV9cCRcMluEMjKcPPmTXP6xo0b2N/fNysFvekWAAOY2wZiHVg+ri730tas8uFwWISvWB2DwcDN\ngij53eEHr66NQzjn/LCafXdK6Z0A/hzAlwH40U3/XuhiqBW63nJepgFPa8DWMh40ZGuDQFiAe/Pm\nzYVpPd7f3y/C14uEWyE8Go2wv7+/BN3RaIS9vb15WUvjCxnkd/RyAmCxN+TYWpWI0lJQgBvanLae\nopZzfiyl9D4Az972b4Xuj7pmNrSsr1SxpQcvw0FXtklZLfplSLMFoSviJDVNP85b28nybAdvWqd+\nsd0yHA4XoMrQnU6nc++Wy0s3Ls6DFtBKJZ3eRss60suEumvrEE4p3QLwSQB+Ytu/Fdq+alkMq0RJ\nrd4qV7Zx5ZpVAac939K0VQmnU9NKAPb2Wz+md4WwrFMALJaCbrKsK8wEwAJfqVibTqfzm5d+ivDG\nw+EMCVbLuVJlo6UAc5u2kSf8PQB+GTML4uMBfAdmKWo/venfCt1fdalAs75XWq+X88uP+K0VbV0H\nHRHv7e0tRcK6UqsWAeuIsQRanuY8XI6EpYJNPteRrwBYxgJhWa7VwhkOh270LuvT+2kp4NtN24iE\nnwHgpwA8CcCHAbwdwD/OOX90C78V6klepZkua1Vrvw47OzsL6WQ8WDD1GlxwmpoVJWsAa4Bx3xFe\nZZU+Fh6APSjzcfGiZA1fDWBtJ6SUliosS5aE9pDFD+Y0NS0r5S7Urm1UzL180+sMXRy1VJ6tCmHO\n9bXSzrR1oO0DnhcQl/KEa5/paQawt+8shq0HYQuyOhrWFWZWdoiGLw/T6bTJC9Zj4Am7QwYLwKH1\nFX1HhKryLAYLRCUgldbPLd28HszEKrDyeTmnl2FbG6wKPh0Jsx3Bx6cUIVoNM7yBAewdK93AwrNJ\nZJssAHtRsdXoo7UBSqi7AsKhohg21oXoXfit8nJ9LUDoSJjzeK18Xm6izIDV0zVQ6cjcO0Yl1eBr\n5dnq35J5bT9oKJfOQyuAh8PhvFJOsiusSshS5K/HoboCwiFXLRVqpaH1QuQKKOtxWQYrr/fmzZs4\nPDycT8uwv79fTWnTAC51e8ketailcqoFvqUUNZ6XBiDS/WQNwACWbiYegHXlnIa99QRQOwahNgWE\nQ6Y8AJeg25K2JfIqoCwIW7m/GsKHh4dLgwXh0mB1AFRqiKGPh9W4QU93BbCsV2ArvyHzbFu0HPda\nJKynJc1tMBjMQVyqfLT2u1QWWlRAOLSkEoB53gJBy+M5g8wDMVsHnh1xeHiIW7du4datW/Pp/f19\nEzLW9GCw3Eewd6MR1SDUBcY6/5ePL69HxqXtZJUiYcuOEAhbIC7tc6i7AsKhJlkesDfdFcQclVoZ\nDDLoSjm2JLQ1cXh4OI+EGTzWNHuvtemuEa4FXKvMutGVzoHcGLiDHY7ea4P1JNAKeFaAuZsCwqGt\nybuAuczrYtIa37hxY8FykAo4q2EFg8UDiZW9UJKutPL6a9BlFnytvoC96NY7ll3eG6dfjeRtc6mS\n0Cv3znuAuF0B4dBWxNGYNS3jllZwDGEBsETEXk5vS6UV4NfqW3DmDnBKY51p0DL2jpFVZsHUuyF4\nMPZ6YisBmc9zAHc9BYRDa0k/QvPjdKnlmy7zWqtZfTwcHByYqWjyeamizau8srzb0vTZ2dm85zHd\nC5me9t6AUep4XUO41HpQ5q1o2iurAbhml1igZQAHkFdTQDjUWZZ3yeJI2GuAIRkPLT2dCYS93s5K\ndkRLFkGrpzudTs2+fnkQGJf6+2UQe41UrBQ62WYeW2X8pma2S1qiYA+wJfgGmOsKCIe2Ig3hWu4v\nd67e0vevNba6nCxVNGmVfFueljcXyzvbTk5O5tNcxhCuWQC1zAX9uRxnPu7WfIsV4QHYAntocwoI\nh7YmC8BWM+Eu73fzXr4pg2dHtPrBnlWgp+UVQycnJ/Ph+Ph4YSzTo9GoCcKybs6LLjUyKaXRWZV4\nVhRcqjzsUhnHigi4XQHh0EbFNfwaxF4WhNcZj1VuVdZZvZ5x7m+pwYGe1paDB0uB8Gg0wvHxMY6P\nj3F0dISjo6P5tIxPTk6qFWdSnnPu1NdFqUEJDxZ8WzIkrGi4pIBvdwWEQ1uTrkTiHGA98As1uXMe\nPV/qeEfPWxFwrcEB+7UWrCTzQSB8cnKCo6Mj3Lt3b2GQsuPjY3Md1nTO2e1i05r2mlZb07UMiZoP\n7kmgG/BdXQHh0NqyKuo4EmYAW+9383pF09N7e3tFf5kr5VobHHB2AKd36WmOhAW4jz/++MJw9+5d\nHB8fL3yf4avHOWf3DSA168Ua68q+kifsVRauAuVQdwWEQxuXtiTYF9ZNj623HNcGeQzvkkHADR50\nmZYVAVsDe8Iawnfu3MHdu3dx9+5d3LlzB0dHR0sgt+Au0KtVSupjV+qQ6Oxs9jokkfV7NV+4poh+\nN6OAcGhBLc1SreazPK89SqtijkFsWRHWK+jFavA63OG0NJa2HjREdBqX1eBCT4sNob3ge/fuLcD3\nzp07eOyxx3Dv3r0loNcgbHVOr8skB1lbMHpeQ1XU2mBjHUsitJoCwqG5GKb8GbfYKrXqsvpo8PxL\nyybQj8gCwdPTUwCYtxaz4KvLBQ5W3qyMZfrs7Gz+G/r3eH46neLk5GQO3Lt37+Lxxx+f+7+SmjYe\njxcaa1gWBGdiWPutbwKDwQCTyWS+f11AqfOWed9KFkVL3nAAeD0FhEMAyi/xlGmrFt6bZ3/WA7EV\ntVogktZoOdutyiwos89rpaDpbAgNp9IwGo2W/F+pjNMQnkwmVQvAsgI8AHNlowdeC5oMXw1gq5KO\nASzr1ePQZhQQDhUBzH5qKQ2KIViLgK1WbSINJQ0jAEXw6s+sbIdS3woCeoGUNz0ajRYyIGRa0tU0\nhHUk3Bpp8hOAZbOwBVCLhjkK1haLdVysm5cF5ND6CgiHlmRVYFler9evgY6ErWjYgjbn7loAlui2\nBF79mfZ5a4OGbalPCIEw5wMLgC07QoO1NO1lZkhlYK21X6nM6tPCioS9pwfLwpF5PQ51V0A4BMDP\nINDTlgVhdchj9Q9RsyUssFgRIUOYwau3oeTzcjnDVkeyGmCTyWReMWe1mPPsiBZLhG8+kt/LrzHi\n42RFxLxMyQ+utaJr8Zz1b4W6KSAcmsvLctDzVhRqNQ5g8LZExOx1WnYBQ9iLxOW7tahWj3XllZ62\nPuO+IrjfCA3x0qO9LgfsNLnWCNiCr4j316qY0zeM1sFTwLhdAeHQgryGDZ4dUfJ6SxEwf5ftCA2h\nnZ2deTRoQdiLhs/OzuYwtABbGtfKvHldru0I65He81r1fntNrUsAtgBYsyJaU9YsBXDXU0D4mqvW\nkkwv01oRZ7XkqmVI1OwI+dwDrrU90+lyl5NiFXjzHlC5zLIwvChb9kfG3rTsnxUF6+NSqiTzpi1P\n2EpR82wTzxOuKQBdV0A4tCCvma+OykrwbYmELQvBi/QYRFYk7EXB3OVkbfDgzJ8JyLwGHXqsrQbZ\nfj22pq2bDx8XySvWKgHPA7DOkGjJ3ChVxAVwV1NAOLQkD8RerrBlQ3gv1fQ8ZCtF7exs9vLK6XS6\nUNYCYbEwuN9fHnS5Bq4ec5nuI5gBZj3Wa9VAxfvspe0NBoOF9dXW6+U8d8kVrt1AQqspIBwC0C0C\n9jxgDeBSipoFXw/AXCYt7LpCmPv8tfr/1cDVUOZoWRqNeAM/wnc5B3rfBca8boa7lU7I4kq5VbIi\nahWBtfKQrYDwNZF1YXITYytC9QBasx64S0b5XING0s6m0+mSPVGaL90k9PRkMjEjXi8i9jxfryFD\nS8ZAzv6roLi8lIdtDdz5u/UmDhlkWxjkMkhKn46Cd3Z2Fm4KYvMw7GWf5dxyQ5JQWQHhKyyrBZye\nTikVu4Tk3rksMHvz1roEktrrBTCvRGu1GWTba7nN0rCCX0FkTVvvhrMqr1rh2+Xc8Dmxnjz4Zmed\nn9LgwVcPfJw1jHV0rgcPugHkdgWEr6i8R1S+2CWist54weNahMrzlgessxwkfcsCsGc5eFGlJYGw\n5e1a09woo6VVGT+etwC5dHMsWT9elFvrW1kg7NkanA6oISwAtkAs26xhG9DtroDwFVYtUtzZeaJ5\nMXe0bvVjW4KkNa9/j6MngRlf+Ho91jrl+1xJZJWJJ1wauJ8HK9VM9//Q4pXqbfDOiTXdYke0PLG0\nRMLafpBzoMs0hBnAensDwOsrIHwF5V3cPC8XnkS73uuFZOx5sdY8UM6JlYoluXg9APMYwBIMvXmr\nGbLVOMNK36rl0nr71QqhGoxbINxqRQhc5W0bDGGZ5huiZ0Hw9jKAA8rdFBC+ovIuHg/CEvHqDtRv\n3rw5H27cuGGC3JuvpW1xLT8D14KwtjJKg674KzWosJryejm/ViQMlPN+LZ9Uj7nM2m/PjvDeqlGL\nhAXEMpZKUT32vPgWTzjA210B4WsiD8L6lUMS9d68eROHh4dLg6yH12vNe53myAUqkJTmyK1RtlgY\nGu5W81vuHc3qkrLUmQ2vx6qYA9brRYyh7FkSJTuiNUNC2xES+co6ZZptCYav9bTjAThg3K6A8BVW\nKQLWkZb2hAXCt27dwq1bt/DAAw/Mp0U1/1OsAHncFxtBWwU6Ui1tqxV5edC0yq1WbLVotxa9l/Jl\nW6FsAVgfh66ecA3EAlq2JDj6tQBsRcGyrToFzwNwALmsgPAVlRVdWY/77AnrSPiBBx7A7du3cfv2\nbTzwwAMAljMAvPnxeDx/lAYwB5sso7uQlAvZu9D1oCPoUjeVutlwS4QrjSLY0rCmaxVxJeDUrIgu\ndkSLDaHhzfC1hlKmSunGqPcpgNtNAeErqNJjLl/klh1xeHiIW7du4fbt23jwwQfxcR/3cbh9+3ZT\nZZhMj0ajhQhY94urI+HxeLwQTXkQlmmr/19rzFkN3lDK//XKZZ9Ym6qU21QkbNkR4v2WWjB6mSoM\nY2ufLB88oFxWQPiKygMwX+RexZxYEQLiBx980Kz88sp03wZnZ2dzW0K2S4BgQdjafpm2Oln3+gDW\n0a01tkAr21wa8/Q658c7RzUISxSs37hcgrC2IxjEnJPsVcZ50bB3PALAbQoIXzFZj7FWxCPwPTw8\nnGc/SCrawcHBvIGGbnKsZaUvMaQ42tRerIamrEu23xrL9HQ6dTMdvIYVHnS9vF/Zfj3WWgUsXjTJ\nEafO15Zpr8yzH/TNjs8D++W1SstSn8PesdPHKSBcV0D4kst6LNQeoldbLlHUrVu3FkB8cHAwv8g1\ngCV5X6R9XF2mAVcCsI5e2VPkaV3G8Ci1bPPshJKnLWV6zNOrnCOObr3BaiTjlXm2AgPYq6i0Uvi4\n3wwPxK3Hct1jdx0UEL6k8jw5hrB+XOVhb29vCcJykZcgbAFYZPmqXtQlg+QKl/ZLpvW6rPV6AC75\nu7z9Ja0CFK4I9caDwaDYapEHz7/lSLglAi41Wqk13dbnfZ3jdF0VEL6E8ip2ZJ4hLFDlYX9/fykK\nrkXCHoAt+FqPwRx9jcfjOYQtqPN+SoWel2ZW6+ehFhXr/bGmu6rm8XLHPAxaOR/WfMn3l+0Wq6cE\nYYZxKa2PLR597jd53K6TAsKXTLXHdSvrwYukDg4Omu2IwWCwBFm+6EVcSWdd/OzllgDM6y6lm3UF\nsGgbABZxJKytIbaJ9PmSc8FjmW557JcbnGdHeJGw13il5AeXfPSQr4DwJVIJwHqsL3QNYt0PhAxd\nPGGJhjWAWZYVYYFYe5DajrDWx/NWRZFV1ur/evDYpLfJkbDX8s2LhPVTioxLdgtXQHq+sNeUW3+H\np0s50wHk7uoM4ZTSCwF8K4CHADwdwBflnH+JlvlOAP8SwIMAfgvAv845/+n6mxsCygDmC13SzwTA\nMnj9Q2gISw28hrCVFSGq2REWgC0Ily7cWt4vQ0JvV226tA2bsiS8RhcyWLYDd6wkn/ENSE+ntNx/\nB9s4pUjYyqUuAThsiNW1SiR8COAPAPwIgJ/nD1NKrwTwjQC+GsD7Afx7AA+nlD415zxeY1tDSl4+\nLV/s2v9l6B4eHi6A2YuEh8OhmT9qyQKx50dqO4LX4c3rKK82rgG3FSCbALB1g9QQ1n03l+ArN9P9\n/X0TrCK5scmTQ80PlmE8Hi+1HiwBuHTsAsZt6gzhnPNbALwFAJJ9JX4zgNfmnH/5fJmvAvAogC8C\n8DOrb2oIKDd7lWmdB8zdVEqLOBksi6JkR3g9mwGLUOML1wOwRMKt/mzXgbfLW78HjE15wlwpxwDW\nN0u2I7hrURnYTtDHXvK2azdCKxK2cqxLN7ptHLPrpI16wimlZwJ4GoBfl7Kc852U0u8CeAECwhuR\n1dpKT0vFnLYjOBKWTnm8SiCrYq5mRchYD9bFzxe9RF7WuvRYr9+a9spK09a8V7aqrEiYo2ELwuwH\nM4zH47HbMEPEfnktT1gg7HnN3o0utLo2XTH3NAAZs8hX69Hzz0IrykpFs2Ds2RFyAYsNoSGsAaDH\nXiTs9R9Q84Q9X1jsiHUsA6+8C2C3CZQSgNmGKFkS2jZiW0iOt+6jg2+EHog9CMtxafHRQ6vpfmVH\nJMzgHFpRVjNXq9nrcDhceiMGP8bqgXOH+cWeXjNYvoj5RZnWW4y9VljavyyNeXqTWne9nkfuVZbq\njBVv0DdDfirx/PmSH19rgtySehYA3rw2DeFHMAPuU7EYDT8FwP/e8G9dGwmArc5WuGx3d9esbGMY\nCwCslnQc/VoNAHSKk35hpgzHx8c4Pj5eALH1CvkW8IpWufAlIqyVrSILvFxWAjCnDUqFm2UL6fOy\nSmaKlWJWq8AM3R9tFMI55/enlB4B8GIA/wcAUkq3AXwWgB/Y5G9dN1mPs1aLq729vYWUMwvGGsTc\nG5cVBZciYQ1hiX5PTk5wdHRUjIbFBy5ZEF5ZV7VAt7WxSO27lm3kVcaxxcDNxq1UQX4yAZYrRPU5\nsnJ8I9/3YmmVPOFDAM/GLOIFgGellJ4H4GM5578A8EYA355S+lMAHwDwWgB/CeAXN7LF11CW1+t1\nXSgVcDUAC4S9RgPcI1crhCUCloFfKW81gfUu9E2Cd1PRL/9Gy7TXjFxHwnLj5HQ1vjl6nnxrJNza\nojB0/7RKJPwZAH4DM483A3j9efmPA/janPN3p5RuAvhBzBpr/E8AX5AjR7izSo0wrER/ubg5EvbS\n0A4ODpYiac93BNoAfHR0tDBmO4IbA1gX/TowZgB6310Xyl6TcS6zzptlR2gI85MJ+8G1HO0WANci\nYVmfHoe2o1XyhN8GYKeyzGsAvGa1TQp5XqOX1sR5pp4VoRtjyNjqZ5jLukbC9+7dW7IjRqORGwnL\nelldL37vuMl6NhENl6Drgdnq1c6LhGvdj9YiYW1HMIi1FcHLhv3Qn6LviAsmDyTeY62V0mTZEVYk\nLN0hem9XWBXCEgVzJGxVzsk6RS3pYy0VYpuW5fW2jkuZEboZ+eHhYfGGaHnCOhXNsiMkZa1LJOyd\njwDzdhQQvkAqgYQzIEpNXLk/iJI3rK0OTnfjaaAMYQHwvXv3FirpvOyImgdZKq9B14uAu0bDJeBz\nIxmrzMqO8CLhw8PDpo7fa3naJTvC8uM9P7h0nLbhsV9XBYQvsPiCtnpHs2rZGb6lPFRu5KHneQCW\nIcyZEQJibUNYnrBVMbeN47YJ39dbt4xL03zeap5w1/MharUjukbCev2h7SggfEHkPe6KvIjKupgt\nEHt2RGtUJ8p58XX1nh2h4asH3ViD+4zYpiwg1yruamUtAC5lR1g3z9L/oPYfaY2EvUYasg5eZ2i7\nCghfAumL2aqY4wu5xYaQQdRysfEFXvKEpSKO34BsNdZovdD5ZlDKUNDzXeDb8tsciZYArCGsrSTr\n5nl4eFjMm66l87WkqNVayen1tRyTgPT6Cgj3KO8Rk8uGw6Hbh4COfKVzdg1h3R+B1R8EsAg060Ks\nXeBWfxClNyCvAmDr2NU+1z5w63Kl82CV1ZbZ3d1daoZcah7uAVfOkS5nf9iyL6xe72qWBx8XUQB3\nOwoI9yi5CKyKMD0Mh8MFyFpjGWodtHOak6gEX2C5M3VujVXqo8CKwFY5Vl2X8SJn/ZmUl6BVKisN\n+tzp88EA5hugFbnzfM7ZrbSzmrTraNxbXgbJWvEiXc5UCTivp4BwT7IeVb3p3d3dhYwHC8C6nBtn\n6JZXnObkZQ2U0p4swLb4jy0ZEascx1XLdDnfAC1YWTdML6qUaYawjoStzpF42zia55uHB14rza11\n30Q6+i6VBYjXU0C4R3FlG4+t/iAsEDOMSx20c3NkkWU9WLXtbEUwgK2XRJaaybaoViGly0r+cKmc\no0UvX1fma9EyQ5jPBz+VyLFv2VcpLz09lcBcA7HeFv2/4BtAgHgzCgj3qJSW05c0LPXLHy3wshUh\nn1lvVrb84NJjsAVjK7K1ot9S44B1KsRW/awEZpkv3Qx57OVRW1GyPMVof14/lXg3w9ZtLsF3VRDL\ndpTOVQB3cwoI9yS2I6z3jemk/hp49TvjtA/sVciVKmFKUXDJA671W8u18Zs8lnpc+swb6/PgDdYx\n9GwK9vPZjrD6gigB2NrHFvjWQGxtv3dzq52ziIZXU0C4R7EdwT1s6RQ0y46woHzjxo0lkJf6CpZK\nGFHJC7aGFhB7nvC2LlgLXN60jDna5WOm52ueKkOYX6Kq18tPJNa0t/0tUbBlpdSAzP8D/o+IArib\nUUC4J1kVc9wU2Xo3XEtEXLI2vP4HOPqpgbhUKWf5wpvwgnVZyYqwjrOetso4Eva6lJQo1gOfBWHL\no7eeStjx9LdNAAAgAElEQVRj9fZFb3MNxCWPuOYJC3z5Zu1V5oZWU0C4R1kXv/XaG+tV9Z43fOPG\njaqvWXv01B6xZ0l4IGYAl5rJbuoYlsZ6udJg2ULSqMKydjzgWZEoe/Nevra1b172RQnCVhRc84T1\nNDBLSdTwLVXK8TYHmLspINyjrOir1M1hqWJOe8K1dCWGcC1LwgNwa55wlz4KNqkScLlMw8qyhnRv\nddqSKMFN1tfiz+vjUbuBeH50bWi1USQH+ezsbKneICC7eQWEe5IXgXEk7FkRXmONGzduuLX2PF+K\nhBnAXppayY5o6SymdHxWOabWd2vRZKmClDvbERjzTc3zXcVmKnnzlg9s7ZN3Q2mJgq0IvQRhiYQB\nzGGsQRww3pwCwhdArReT19G31fG3Bx590QPLka9X4aY73qlZDi35wJY3q8dWmTWufcagtSLJlNI8\nFVB3es9vIalB2AMzd9Ru+fJcASYwZDAzfK0Ux8lkMr+JyLliWedFyvQ59dISrf+PpQB1XQHhCyAL\nWt4gy7Msa8GLdL3f9/qE4MFrilzaLh1ByaOu9kJrnm0J2h6Ia08Cel78Xwu6PBYIe5aPBWUrKmUA\n87njY+pFwRrAp6en2N/fX8rLLt3g+bNS8/PpdLqwDP8vLcsiQFxWQPiCyQJv62O8BSv+3PstK/NB\ng5d7QlsFxnqsQdwSscrYWlcJzq1+qQVhPdbTVpparREEl/HTiI6CU0oLUObjyVaWtlHYgxe1wFcG\nfW55WgBc+r9qKHv7EHpCAeELIOvP2zrUoFv6nH/LsyFWAbCXbiXwlWnZh1ZYdrEmShBkaLIdwdMM\n4dK6rW22bjY62tVZCPyZdU552wXCfB4Y1i2Vc/rmOxgM5udbPtPn07OfdPAQKisg3LP4sc3yaLtU\nZtWg620D+35WFGxZEgLimv+rpf3OnLMLRmvcxZJIKVUrzmSs+/i1Bg1khrAHXRl4u/i4eDdVfZys\nc6z3iwHM9k/rMBgMFjrfn0wmGAwGmEwmS/8rbT3oyltr/0K+AsIXRC1RsP7cU1cI8wVkRcEMYi8D\ngqNgvgi9aJ0jtVqOq15XLRK2KsqsshqE9bC7u2vCzfOb+bhYN14B2nQ6dQHM51h+ZzgcLj2NcLTc\nAl89yPnW+8I3DbFM9Fj/p/T5CBD7Cgj3KOvibBn0d0QWfFuA3OIJS0ftXiTMLeJY+uK1LkwvA8Qa\n8z6VxjW462wFsSO8zo9KEGbfmss0HHlajou2I/S5kJsOn0+9b2dnZxgOh0sWhLYrajc5y57RZVYU\nnHOe3zR0BMwpbaGyAsIXRCVPuOS3allRIX+mf49/22r5tolKOf59uTg5YvVS7vRnOhKugbgEXJ62\nmoxbDTX29/cX0gBrY4lurfxq9oTFmqmdbwYxe8D6M+4ys9WSsG54/F/R4OX/1iq22HVVQLgnefaC\nB99apOk9ltcugpZIeB0QW1GwLgcWbQOr1zI9Lu0rj+VRXa/Xi7J1E2UPwDIeDodLoPWmASw9NVj/\nAatBjAViKxouAfj09NTMT24ZGKT8X5EnIP4/iZXC5yRkKyB8QeRZD60gBmwo8efe79bsCAu+VpNk\nT6XtsawIq5WZVIjp75ZAzOssRdncTwT39WBBmIHIZbIdctw0zLQVoR/tObq1bmraMx4MBkvl0+l0\nblPIebKyQ2pgtv4nsp2yTgvCFohDvgLCF0wtfrAs58mDUu03LTtCR8AMYm5RVfOErXnLv/T6VRYI\nW/vnRcK6J7nSwP07WADWfT9YwPWidL3N+lgzlL2Wh9Z51SDW+yvw5XPTxY6wnqD0/0PykU9PT92n\nOLYkwh/2FRC+YLIuaP2ZNc3f66IS8Es5oJYP6YGoZJWklJYgqPtt4PmunnCpc3YuYxBb2yLdUHr7\nw9PWceBjaLVGK42l0YR345PfEl9Xvuv158HZLdYTkY6aV/2vhWwFhC+YrMdcz6PTy2/q9/h3eRus\nShyrTK/b8kt1mQdAC4Z63d7YsjlKlX06GraicKszfD6GwKLHq8tKPc2xvSOQ5fPKN8bSDVIv71lY\n/ATCx4rPqxcZ157YQnUFhHuSFym1eo0efFf14Vp+vwZg7S/W4K0HnZlQg3FXO6JUGWfB2HqjhlWx\nZXmhWuL5Wq0QS0D2zq0VPevftawq6zM+155HzOfSuymU7LKAcZsCwj2r9DjLMGsF8brbsEpUzHmm\n8jjMcLYaS3idp1tlViRcsiNa0tSsqNiyLBhKJfCUfHZvXiLhEoC1VVA6lyzLPmIAS6MPfWwsn7jF\nstK/GyorINyjvIiuFIl6lsQ2tqGrJcEXbi03t5SZ4FWOcSRciopLEZ4H6NpYbjClCinP720ZWgCs\nIWzdvK3j4UXD+hx6AK7ZEVaz9YBvuwLCPcsDcAm+pYuuy++WHlOtbWm1GUoRppeZYOXjWrm6tYo5\ntiO86Nub9x7J+fFcw0fEsOwCYavfXw++ukez0v9Gl3GWhQVg+T0GMf/vNGhLkXCAuE0B4QsgC6xW\nJFqKhjfx+942tNgQFtC0x8qNLnS51zLNg3ALgAG422eNu0wDftNczjhorZCzGnJYFsRwOFzK++Xz\nw/8dvT4+53q/GMJ8rrVKEbDnB1s3/lBA+EKpFonWouHW3+i6DTVbwvN+rcouKwPBesO01WeDhrDe\nF+94lHxrr6y0vzIPLFeAciTYtUJO5916AB4MBvOxdRPR8/J9gat1ji0AA4teuhcJd82KEABzNB0K\nCPcuD7weDCwQs7pkSNR+vwZeL1rkSLiW9dDag5mumGuBcc064ePq3fA8EFkgrkXCXhSsMx5KAPae\nPASmOmLnaN06PgJrKW9NUeN97QLmiIqfUEC4R+k/din6rYFhk9tjAbgVaKVI2HtzsX6xaUt/vl3t\niNqjOkOmZSzwKQG4C3j1IBCVVm4ylswJPehKQw1CBrBss5UdYUXJtUhY9rcVvAHbsgLCF0AMEi8a\n9sC7KoxLEaS1Da2DZ0XU3ljRMi/ZCbXtl2nvZmLd3EriCjgBZBcQt0TF3MRYfsO6kYhHzF1Z8v+C\nYavLxF6RclmnFQnzU4AHYj5epf9fADogfCFk/aH5QrbmS4+0DBdvmpustmZMyIXckvrFnrCOilte\nJ7RqJFx7qmAIW8eAy3QU7MHb8nT50Z37dtAparJs7QZS6m/CikZLy1r2irVPvH8lAAdg2xQQ7lFW\n2pF0oM65mlZOq444ZboETSua5o7avQ5kvMiY50vbaaWueZ97kZhEbgzcFpWifQ1iK0LTn5XWoT+z\nts8Clz7mvA0ppSXo60jWg7v+T0mUzDcDr4w78OdovQTdAHB3BYR7El9A8mdnAMsFx9DiHNzBYLDU\nuxdHepat4b2yyIqmSjZFKRL2+mqwYOxB2Hos1ttVkxUl8z7x8gxkXVbz7K3t4gjbAqJehwVf/lys\nCOuGLsdWZ11YDSy4rNRrnhdxW08PPM0KUM8UEO5R8sfnSNiqEPHga0G4xQeVcZdIWMYM4FoU7EW6\nvP1e9M+/03JcS2C2bib6My8K5uPQkknBgPce43UkzL+jt1lP63Ml2RM6j1j+UzoFreTlMoRL/Ud7\n1kcrjAPATygg3KM0hAXAXsRX814ZwrXMAAFP7e3JWhZcLAhbTX1LN48WO8KLgr3jytDU22/tjyUr\nCvaOQy0K9raR7QTru7zNMmYI6zS2wWCw0AVlycflYTweFyNhz3MOAK+mgHBP0heg1xQVwELaUa35\nr/cWYG+QSJhr6b2LrQZfy5bwLAjPG+Z1Wdu97kXtwdMDLgM95+xmWJSiYH3eLStAINwq75hZ82wd\nePOtkTB/h/dPz1vToSfUGcIppRcC+FYADwF4OoAvyjn/kvr8RwF8NX3tLTnnl66zoVdR7AnzRasv\nzJonzBBmgFnzKaWlVCl+3JTtZAB7IC5Fthwl16JhKwpmYGrrofUiLwGzBl8ALnj1umvnvRQJ124y\nMq8bcuhzbI3Zu2UA6zIvEvZu0BaMre0O2VolEj4E8AcAfgTAzzvL/AqAVwCQf+Nohd+58tKVKVYu\nJifg1yq3BMItF6X8ppXuVkp1avWEvW0tRfSlCjkNY0u1C96KTD2IWiDW83wzavWEazaAlSpYGmvv\nt3SzFevJikqtslYAl2yJAHC7OkM45/wWAG8BgOTf8kc55w+vs2FXXWxHcOWNjpClBtzLNNAd49Qs\nAv7MylnlSBgo92vhecIelFv87ZIv7EVc1meWrMi15AvzbwBt0XAtIm7J87WAJ9NyE22xnqzj5ZWV\n3qyt/yfe9lnrDij72pYn/KKU0qMA/hbA/wDw7Tnnj23pty6tNIStMl2z7aV5MYytiJQHuYg0hPmx\n2LqwVvGESzD2omDPF/YiVn3srGktC5RWhMzr4Ai51QO2QFyyIzgqLg1WhSufG5nvIrEjvEjYq5zz\n9jFU1jYg/CuY2RTvB/BJAF4H4M0ppRfkOCMLkouNLQiJbnS06nnA3Dy4FJFaUah1wXuVcjL2LAkN\nztLNwvOIaxGwjoStx+maSgC2KvwsO0JPt9oQfM49K6IEYmta35j4GPHY2gdrHli2IxjEtZtDXObd\ntHEI55x/Rs2+J6X0RwD+DMCLAPzGpn/vssqDnr64BMY5Z+zt7WE0Gi30w2tFxwDcyi5r2nv01fPW\nZwAWQCzr3N3dXWgo4EXs1n7UKuQscbl347CW58+tdZV+11reAytbPl4/Evr/UBtb3rQ33QXEXDFn\nVdwGbDenraeo5Zzfn1L6CIBnIyBsSl/sHvi4WfNoNFqAl3yfX09e82C7bONoNMJkMlmIiFJKCwAW\nr3Bvb2/e38PBwcFCPxD6DcoWjK1KOP3UULo51KJVXqa0v6VyDVk5N/r86GE0GuHk5ASj0ag4jMfj\nZgDLsffsCB53kYawBjFbEnz8IxJeTVuHcErpGQCeBOCvt/1bl1FyMTFYdBk36BiPxwvA0ssyhD2f\nVqepAeWeyGRae4W6YcHOzqxZtQYhQ5g7a2cQcxRsAVhDWH/GKlkDLSBugQhDWJ8ffaOUsQddnvaA\na5VZHrBVZoGxdAzl5mE11qhlz+h1BozbtEqe8CFmUa38i5+VUnoegI+dD6/GzBN+5Hy57wLwPgAP\nb2KDr6I0dBnIHG3pDn40rOQ70urOqtyqVXiVPMaUFps4cyS8u7sLAPP50tsyrCiYbxAanBpEDGdW\nSyRcOv6186S3h2+QViTMEXAtIm71gwXCrQNvv94PLrPyhEt2RAnIAeK6VomEPwMzWyGfD68/L/9x\nAF8P4NMBfBWABwF8CDP4/ruc82Ttrb1iYhtCg4AjFX2xj8fjJQDL5/IZg9abtirWvMFKUdKRsAay\n14m7FQ1b28YQFgBbIPU8TiuiL1WY1c4Vz3tRsI6EW8Cro+ZSJalVd2CdN2ueQelZCAJhyxfWUbAX\nDQd0u2uVPOG3ASiZTP909c25fmIQ63IRR8IegE9PTzEajaowtbIZLOuC562acQGvLCvLlF5fz3aE\n2CL8KK2PhewnsAxXfQxbodvl/HjlFojlJtkKYIaxB1zr2Msx51Zzkk1jAbg0yG9YecIWiL3jVPos\ntKzoO+ICSP9JPSBz3xLW4/BkMsFwOKx6hAxiK2vCSh3zsg0sz7X2TjltSXAtvgVPOQYtPq+XHdAF\nyiVw6GNfsiNabAj2jktw9CAs4N3ZeSKdUT7P+YmXfdY8ZgvCXsWcF0W3HL/QogLCF0zWn1e3nhOA\nMADG4/HcX7Vqx70yL4XMKrcg7gGe36aswcswZqhbx0SOiwYsywOuBd5VLAle1oqCS3ZEiy1RA68e\ndMMbDV8NaQvmVgMdhrDsS0vTZX3sAsTdFRC+AKpFXQAWmjZzBCzZEhK1epVs1rSuHNOVZVYZR8h6\nPRw5e6+259feS4Ue7693UcvvSeQH2J5wK4xrx790XhjEGsBdU9MYwi0DpyJyXrLkjQOLTaQ1fK2B\n36pRAnHtnIXqCgj3LO1nyry1jDRt1hGw5+vWIKSX8QCpx3IB7u7uzrdPflM8YQvi1sAAFgi3gkeO\nlUR5fAx5H7V/btkpq56zkh0xmUyafWArRa1lALAAX45qGYjcWMRqQKIhrBtoeC3m9PoDwKsrINyj\nNDz04zbDRSfGS5aCZS/ox3QvAuQyr8JMyk9PT7G3t7eUESGPvjoS1t+1Imkv2gbsSI2jYE5RA+A2\nOCntv15mnXOnIcYphDVP2Mof7hIJy75LxCtjzyZg4HLer9WbnvdGDQ/EfHy8z0KLCghfAAnM+A+r\ny8SO0JVTsoxXMVX7PKWE8Xi8kMFQe/TkLAhdJiDe399f6t2Nx3pap59Z+2hFw/oi18etBFyGrnUT\nbD1fOvLkKNgCsQddHqz99SDsebPWvpSAa02XxiVPuOuxDAWEL7Q0cHjc+l1rPVIm3qr1eOpFRKXo\nyIqULHhYZVpsnch2lyJ7vd/WurWHrOWV146tPl617Ajd+sy7ucm+tAJM76PeFlmPta2WBcHbYmVN\neOcwtBkFhC+ISsCtjVvWa5V5lTT8iD0YDJb6lC09onZ5pPZALNIWi54vVbJpOOnvesBtvanp9fCN\nqhQNy8BPGaUbUav0vlo3oy4VcjUAdzleoXYFhHtWCZJdIl/re7V1ciRsPZ4yaKzH1pboqRQNy7zI\ninbZ67ZArPdNP7LXQGx5zaVjbt2odEoXA9jKufUi4drv87G04MvLWhV3XvTbcg75twLQ6ykgfIGk\n4ckALkHZgi6vwxp70ZAFX91s2QMxX8StF7R14WrAckpaLRLmdbZGxLwOr9yzIiwAj0ajppxb2Wdr\n2tsufZy5TN9kxdIpnZuuFlLXp4eQr4DwBZAFX2t6W79dg7AXDTOAV4l+S/ulIatT0jx/mPdLax1r\nwoKfZUd4VoSVb8vHTO9zCcTy+wxQ6zN5YuBGG7XBOqeeArTrKyB8AWXB15r3LtAunrIFYOmPgCNh\nr1/ZLhVztWi4ljbmVcjpdejjwH1QeMt3mefjxZEw5wp7NzKvYq4lCpYyuZnIuuS7YlPoyk0+1t4N\nsfWcWdsT6q6A8AWRBq/Mi/ii7OIT63XzGHgizYkBrEHMVgT7xxZ4u1gRFng5vUx/x0pB4/21pmvR\nsPc9az21ijkdCVt+Ox8zvU8eiPV28Pbom5Kel8HbN2u6Bb4B3M0pIHyB5EWDXQFswdb7jB8/NYB1\nwxAvO6KLJVGLgjnKt0BsqVSh1QreFgDrsRUJe1kRLU8N3j6WomLvRla7SdWeNkrZES0wt+ZDvgLC\nF1DWheh91rIuhhpHwpYlIV6idBpk2RFWdsQqIK5Fwl2Og1XeJeL1PufI0/LIPRh7FWJsR+j9XGW7\nSrIg7U1bTzKl3wzgrqeA8CVQy5+cYVaKgvV0DcIaxuvmCfO+8COulhfNWVFjaSzw1SD2pq3ts8q6\n2BGj0WhpX71pyzpg1SJRb5s55a803fU8htZTQPgKqRQdWlGwzFsAtmC8aus5BnWp5t3bTg/GDDNv\nnbJPelqD2PNNvTK9T17FnOQIl86NpxKQLYjXyizYekMJvt4x4e0LtSsgfM2lH40ZxJwCpuHivape\nlue0rMlk4naVyQ0xStPAomfpTQsIdE9ztaGLPIBZ3YbK8l0e41eBtnzP8tI9CJd+34JvAHbzCghf\nYXkRsHVhWSAGFiNQDWB52aj1lmSGsACYQSwQLoHMitJK0bUeUkruW6a9bdf7XBqXBusGVjs3q55f\nllch693QrPVZN4vWiDcg3V0B4Wskzydmn5OjJrmwJPr1AGxBmAEsY92rGn+Xu+fU07KtVuUgT6eU\n3Nc16Uhev43EGuQ4yNj6zAKxbijhnQeZXgdoGrwlAPN+6LH+fsna0f+ZVbc3tKiA8BWX5QFzBKZB\nnNITXUnq7wCYv8POi371d4bDIU5PTzEcDhcAzK9PEjvCe10Sz3OFWKmvi5TS0quaZF6WkT54pYN6\n6ybgvcFDyloGD1r6MytKrVWQ6e2wAGxtr6fSTboEZVkutJoCwtdQDGINYBm4ly8BVcmC0Bfw7u7u\nHMICYu8ddiXPlj/TubnWWE8LhPVweno6n9YpYlJRJ5Gr7qdCH69W6GqgtwJYT6+i1u/VQFwDrvWd\n0OoKCF8DeRGOfrxPKc3HehkNaa8SjiO+nPMCgHnQQNYQ1sDlMhlzNoLut0HPS34zv2DU6zxHwKtf\nlinS+yXzNfCyz6zXo4+vVb4pcWVja0VgS+Tdss5QmwLC10RWlMWQ5WX15zs7O5hMJtUIWENYe6/W\noD1aa7Aq0c7OzpbeBqy7idTjwWAw/0wDWEf5ckwk0pexlDOARSUAe77xtkFW8oJLv+lNW99ZJVIO\nlRUQvmZiGJdAbHmzq0KYgcyfeUDmMoEwD9xvr0BYXtlU6sNXIkYu12Dlcl7GAjD3YMbnYJ1zyE83\nXNHG0/x966Zg3XwDtttXQPiaiqNA8UQFUmxVCIDH43Gx8kZA15qVUCqzlplOp3Pg6rFVJpGw9bZg\nkYanlgYpt6orgdgCMh8rq6z1nFnLMoCtsQVcXW4BuOW3Q+srIHzNpS+sWm9cDBYvmrYgbIHVsyus\ncm0lWC/ItF6cqV/LJNsl0vskmRH6M4EvA5i/3+IP8+/ysV/VE7aAzjcH6zf1eeP1Wd5waZnQ+goI\nX3PpCMiKoPS0Fd3xY6tkL5RsBp730shkmlPPvLcY87T0Bmc1kdYVgDpPWO+rthK4Us46Tnyz0mMB\nsQYgT7dWnFkqRedd18X77ME4tBkFhENFaUDoSFc3ipBO3/VFL8t6/iJbAtbvaqjrQQPXGutI2Iu4\nrQwN9nLFg7a2X0fRev2ShbG/v4/9/X1MJpOlhiQ8AHYfwAx2ORcWcK1jaVXGtoqjZb5JrBq9h5YV\nEA4tgLY21iC2ury0LArPZ/Qel/VvCAh1DrBlQXh2BLfy0/MMYdkW3WGRBrHebrYzNIAFwgcHB2b3\nnzzNEbEHXQ/EfAz53PL5KMmyKgLA21VAOGTKA7AHYQZDKfIFyp4lQ1hgKECrecHcx4UXAesWfJPJ\nxIyCrU6BZJtLkbCAeDKZLL0mSgCvZUXCHog9lbzfmtcr37eeUALA21VAOLQgr/ZeLj7uMEf3NdEa\nBTNsdN4uWx2cM+xVzFkglgpCDWIGr5Txb1iNOiwIcyTMdoRuWGI9McjxEt/YArE+ByWv1zqXPO8B\nVMPVg6+1ztD6CgiHXFnRlAVKCyqWPD8z5ycaSuimyfo1SwI7sSOs1whxmQawjDV4NYy5cYgFYKuC\nrhQFHxwcYDqduo1c9PHUFYGeJ6zPSZcKN34a8Z5OZL+sp5bWZUPdFRAOAbBr661pjoIB206QaS2G\nr+4ZTQCsMwrY7pCyWp4wR8IavB6MdWqc9xJTvR8cCXu+sGXZ6GMlAC5ZEJ4dwceZoWhNl6BpRcOl\n86ifkEKrKyAcMsXw1eUCD20n8Ode5MRA4YhQl+tpGVsQZvgybC3w6mkdDUufFzqjQdswel9qdoT4\nwdyPhN5fXfHYCuJSJNwCYp6uWRF6OeuzAPF6CgiH5mI/2LowuZMfKeco2fITGcAawrUULRm8VwhZ\nTZdzzgsAZggLfKWcOwbiSLi1Ym53d3fuB3NjD32stOVSi3i946EjXJ7n8+jNl9RiM8lyAeLVFBAO\nLahWMSfABJZbzOnITj63PGDurpJhz9/RY/FYW/qO0HYER8QMYunmslYpJ7aJ3he2IqSptEDYsmt0\nipqVYeIBmc9Lbb5UGadlRdbWOmvrCBB3V0A4tCTPF2QIW+WlZr6cU2x192h9T8uDMA+np6fIOS+9\nE89aVmdKtIBY74+Xosb9VFjwlU6OahCu2RE18Fnnc12FNbE5BYRDrizfUHf0I8voyiWrD13L59UV\nb60+JgDzrcbevETCHnwZxDUrosUTPj09xd7envl9bv0n27UKgPkYe8eudCw9KLfAOkC7OQWEQ02y\nKur0hajhnFJaatCh82O9Hsb496wy3Wm71QpNV6JZ2Rw6CmV4a5B6ETHffKx1W/MtMK/5wNbx4Ei7\nNK1/0zvG2nYqDaHNKSAcWlklT5IBVUrT6vKY7DUB1sBlADOI+Q0cVjRcArHeDmvgjuatrjS1RaBB\nLMdEZ0iUjn0JuB4svXIPwN46vBtlqJsCwqFm1S4w7Qfrx2+2KbhCr8tjsRedeg0rvIhVg9bykxnA\nDOFSdM3r5sjdioZ1BZx8ziDm41aCbykS9o41f8eb9xQAXk0B4dBGJBcg2xHcsEMvryHseZwM4rOz\ns6XIsvS4z1GwjoYFkuLNWu+u48hbQ5jXp6NgD8C8rZxl4R2bVjvCi5C1vHWV1mt95n0/1E0B4VBn\ntVxw3KzZW48V6ZXGbEN4DSq8aNjzhVtBvGokzHaElrYjZFtr4F0nCm6NaFutjgDweuoE4ZTSqwB8\nMYBPAXAM4LcBvDLn/D61zD6ANwD4cgD7AB4G8PU557/Z1EaH+pOGZuniE0iltNyiTsZ6GSufWE/L\nvFfpZUXCrQCW7Aa2I1bxhL3sDe89d5wPLDaEdVz4PLREwbXz5J1jHntRdQB4fXWNhF8I4PsA/N75\nd18H4FdTSp+acz4+X+aNAL4AwJcAuAPgBwD8/Pl3Q5dYDGAvJ1Q+0/nE+jP2aa0sAC8tiyPhmi/s\nVcxJTrC8h45bzFkRMNsRlsfMIPYAzJ6wRMLiq2sQyzL6GOpjycfWm+fvl85zl3FoPXWCcM75pXo+\npfQKAH8D4CEAb08p3QbwtQD+ec75befLfA2A/5tS+syc8zs3stWhXqUBbOX4lgAsvaVx5kRrbqzl\n75Yq5Rj43FBCGktoO0L6fChFwrwNniWh+xMuRcI6Tc27MfGxlHEpKtbLeusolQd4t691PeEHAWQA\nHzuff+h8nb8uC+Sc35tS+iCAFwAICF9ylSJgLQ1hgYLurlJHfVb+sFfGANdQs6JMa3kNYd2Kz4pe\nrWhY9q8EYG1F8Hq8SFiafHsd5fPxrIG3Bca1svCBt6+VIZxm/4w3Anh7zvmPz4ufBmCcc75Diz96\n/otOXzIAABV8SURBVFnoCqjmC2tfU5dJuY78GLS1Mo5sa+lpNV9YtsOqmPPS1KS1n+cHM4yt7dSy\nomHLntHHuwt4az5uFxB7y4dW1zqR8JsAPBfA5zQsmzCLmENXRC2RMPvHHPla0LXgzBDWUGUodwWw\nbEeX7AjP3vCiYcufljKdHwzAfTKwjj3vp/68FgWvC+bQ5rQShFNK3w/gpQBemHP+kProEQB7KaXb\nFA0/BbNoOHSFxY/YUmZ5mlaU56WyratSZR/36GYtY22b/r7VlaX0KWzdKKxB1NKpEWBD04t614Fx\nrTy0vjpD+BzAXwjg83LOH6SP3wXgFMCLAfzC+fLPAfAJAH5nvU0NXTZ5/m4p2q2VAU+AXVcOevMe\nbKVSrjTU7AGr9zTpvlLbL5zXzPaG+MzetnsVczXwbiL6LZWHNqOuecJvAvByAC8DcC+l9NTzjx7L\nOZ/knO+klH4YwBtSSn8L4C6A7wXwWzkyI66lWiJPz6LwHss5shVI8Lwu8wCs38QsrzhqATD3oMbd\nV8o2yNs62K7Q08ATFYi8b5Y8q8ACr/edAPDFUddI+Osw83bfSuVfA+Anzqe/BcAUwM9h1ljjLQC+\nYfVNDF12WfD1IFdLV5P1yZijYP2bpRuAwFODWJeVQMw2xO7urtkiTqJlbsosuckaspZfzvsK1P1g\nrVUj4lp5aLPqmidsd/G/uMwIwDedD6FrKgsmHgxboMvzpUd3jo5LNkQtEi5BkUHMAJasC/1GD/Z8\nBZLSvLtmQ8h3rLIamLtUtgWA75+i74jQVlWLRmuVYRaQrMjX+21ejwVkDd+aLyzr0wC24MsVduPx\neMlW0VkTHvA9GLdEwnrZ1vKA7/1XQDi0NXkAZlBZPqgHJKs1WctQ8oTZkmjJmGCIM4R1hCzrZQDr\nFoO1BhotsqLhiH4vvgLCoa2rBMRapZtVVrIuSr9dq5xryY7Q6xS7gcs0gMfj8Xxdel80hHXOsgV9\nVitsre+t8llouwoIh7amGnwFdgxbPW3ZEK32RRcAl7IjrAhV1qO3TQNYKup2d3fn3xfQeX1YlOwX\nUStsWwEd8O1fAeHQVlQCowU4/R2eLq1Pf25FjTq6bImEWyvndnZ2XAtCt67TWRDiHWsAswXSGuF7\n0XArVAO+F0cB4dDWxNDSIBTw6EjY+76WwK8WDetp/l2vQs4DsAdIWe7s7Gze+Y6ubMs5z19wqlvN\nCYDZrugC4FUU4L2YCgiHVpYXrQIoRpxSWSVDqRKKP9PAm06nS9MShepGFDzs7+/Ph4ODg/kgZXpZ\nXbHGcNat4iTDgaPSlNKS3WH5zBrcPL2JIXRxFRAOdVaLh8spWgIz6VtBz3s2giV5nC/1KyzTu7u7\nc8DeuHEDN27cwM2bNxfGepqhrEGsbxzSJacGpoayhihbLrIPOjIuNWf2uuoM0F4dBYRDnVR67Nef\n6whYg5eHvb29YkTNZVZfwgwthrDA1gOxhrA16IhYBu4JjeGbkt1rnEgvb4HX6rWtBGK9zgDy5VJA\nONRZLVkJVvaBQFegJmNJ39Lr9+Z1JFwb7+7uLgFYg1gGKWOrgiPh3d3dhUwGhrCOiGWaO2nX4oi4\n9O68Uledel163aHLoYBwqFmlFDALwgxgHf3qoQRhnq5Fv3qs7Qi2JHjQ9oM1sB1hQdgCsK500/vj\nAbhrh/V6XTItChBfDgWEQyvLSj3T01ZXj7pyTMaWneFNd4WwZ0UwjA8ODhaicx0BM4QlspVtEeDy\nGIBpR7B37PnCLVCW9Vnj0OVQQDjUWTX4ajtCtx7TINaP/RaEvUo/D7hW2d7enlkpx36whrBXeaiz\nJDhNTnvAGsCWLyxiEHcBr+X9BpAvrwLCoU7yfGArD5j7T9BvnJDxwcFBEcI8tiJhD8p7e3u4cePG\nUuUcWxGHh4fY399fsk+4NZ2OhHUfFhrIupz9YCvdrpYhUaqUA2zYBngvlwLCoc4qRcF6KHnCOhK2\nPFNvzBVzpWmOhEt2hEDY6tjHasihj4EGr46EASzcnLS8SNjL/LBALOvR6+TfCF18BYRDzfIq4Ri+\nViMNyxOWCjOGcAnEVkWWN11KUeNhf3/f7MDHmreOgQavLpde0nSZ5Qm32BFWJAwEfC+7AsKhlVQD\nseUHW1HwwcHBvCOcGogtT9h6d5v2hGvpaYeHh3M7wruZcJm1/6VI2LMjdBTcUjFXapwR4L28CgiH\nNiIPAgxRD3KexWEBzmvU4OUJ6zQ13TyZc4Gt/iK8/iO0D2wBlo+NlQnRxQ/2POHQ5VdAOLSSrJp6\nHQGWHqnZ4yxV8DEQGWalYXd3d+4Bc98QnO3Av2dF43rf+RgwLD2w6nl50aceWqLg0NVSQDjULK/B\ngIavqBXADBbOsGBLQLbDWg+PdZ6w7g/CSjkrRd+yXXws+JjU9rlkO3hDgPjqKyAcWkkaPDpCFZXA\nywPn3XKFmB7EkvBuCHoYDocLKWrafuBO3C0bRORZDd4NqStwORouVcYFgK+eAsKhTvJyVAWmAuJa\nLT9HrSIGMXe4Ln5saVtkejgczm0IbUesEglbx6AWAbeAWFsQLXaE3tfQ1VBAONRZErUKDDSApbwE\nXrYTLDvC64PYg6Il6U/Yq4gr9e/bWukmv+9BuQTiVgB7xyp0NRQQDq0khgFXsNVsCAvEnEFhdYWp\nGz0wHNlCGAwGxQ55uKN2vY4WS2JTEbEAWKwIhnHYEVdbAeFQs2oA0CBtyV6wwFKKhKXHNS9i5flS\nX8b8potaIxFrX63sCD0uNam2AGwtb1VgBoivlgLCoZWkQaDhK9MtENYwFulomJs87+3tmS3WPD/X\n6veB+4TQ0bD8vpYVDfO+t0TCLVaEVSkXdsTVV0A41EkMAe0P68ixBSYtdgRHwjpy9RpT6J7crP4g\nuE8IbUd4slLUSil7rZkRDOJaZkSA+OopIBzqLK6Y0+WADWErWmy1I3S/E+zjlgbOM+b+ILjF3irH\noVYhV4qIuZHG6empa20EfK+uAsKhlaSBa8HYAo+GzenpKSaTCSaTCYbDodtqjGEmOcUiDe3SmAcv\nL9i7sfC+8X7Jvujh9PQUJycnODk5wWg0wng8xng8Xvjc2k/La44o+OoqIBxaS9qG4HINYgHseDxe\neEWQhl/JXxVQSVaDZy2cnZ3NOwSycoe1vOwHK/eY90vAK2D1hpOTE9y9exePP/447t27h+PjYxwf\nH8+hPJlMFirh+FiUtj90NRQQDq0tC8QapjpaHAwGGI1GSxFoa4WW1dm6lJ2dnWE4HM6BxTDWEstE\npvVyFvz0eDqdzgE8Go2KA0P46OhoITLWEXH4v9dTAeHQRmSBmKPgyWRi9kbG/mcpv9ZKN5NyHU3W\nPF5diWhFoKWoVEfBAlpvOD4+XoqENYR1hVwLdAPMV08B4dDGpEFs2RHcJy8AtzGHV5ElzY739vbm\nANvd3TXT3DxxoxK+IdRgrCN7gbDYDDwcHR3h3r17ZiQs3rDXKCOi4uuhgHBoo9KZEwIsDWH2gL0I\nuJRJIADe39+ff0dLR9ssK59Yg7YFhuwHj0ajOXC9QQB8dHS04Alzpz362AR4r4cCwqGNiyvXBKJc\n+cWpXC09jOkMCquxh05BsxpelAYvI4FT7Dw7QmDLA0fH7Al7FXN8PENXUwHh0FakIetFwFZ/CV5q\nmwVgDScrVY2bMk+n04V5+Y6sw8rLtXJ2uWJO7AhtPchw7969JZ9YKu1aIMwwDl09BYRDWxFHjlwm\ncJWsBq6E45ziFgDrKFjDn+GrfWD2hNkesaZ1RSN7wgLhu3fvzivkdBaFTIuVoe0IKxNDj0NXUwHh\n0FYkwNISmA0GA5yens6Byf0o6EFSuHQDhxKEJW2NPWiBsR5L95tSUWiB1yqz7AhtRdy9exd37tzB\nnTt38Pjjj8+X1Q059LwVCeuxVxa6GgoIh7YihnDOGTs7O/MIVQ8c9VpAFmDpdVr9TEwmk3namsBW\nltWRr26BJ2XsZXuDZUdoEEskfOfOHdy9e9f1tTnCr1kQAeCrqYBwaGvSIJZpDT32ZDkNzPKFPRBZ\nPalJ1M29plnzXXp+09kQXlaERMVHR0dmU2xurmxVvgV0r4cCwqGtS9sGDBYdXUqkLJkUnE0h6W9e\nlgXbBNLhT0tPah6Erch4PB7jzp07eOyxx+bR7uOPP76UfqZbw3nr9lLRAsjXRwHh0FakG27oMksa\nTBaA9TK8Pm6Rp5sL67dn1AaBcEvl3Hg8Xqh4YwifnJwsddDD6XichdFynEJXUwHh0NbUChMdDXud\n+ljRIwNYe7TSss7rvpLnPQhbKWuTyWShFRznBHMk7NkaViqadcwCyldbAeHQfZMHGLEhSh2ns3/K\nqWLagpAXekqH7bo7S2tad/RTa6yRc8ZkMlnwf7UvLJEw9wvRYkUEgK+nAsKhrcvzhMWykIo6yeeV\nz3SEPBwOF0CsAby7u4vRaDTvU4LfwtEylFrM8SD9BAtw9XSpm0rP3rAsCX3cQldbnSCcUnoVgC8G\n8CkAjgH8NoBX5pzfp5Z5K4DPVV/LAH4w5/z1a29t6NJJQFurnLMq4TSEp9PpQk6x7hBeurLkceur\nkCRPmKNSb5hOp2a3ldwoQ2d0WLZGADgEdI+EXwjg+wD83vl3XwfgV1NKn5pzPj5fJgP4LwD+LQC5\nso42sK2hSyYNYAu+Ip3GJvaEpLNxXrGAeDgcLnQQz+lnOgrW6XDcZNnqO0JvC88DWMgTLo0lEi4B\nvcWSCF1tdYJwzvmlej6l9AoAfwPgIQBvVx8d5Zw/vPbWhS69agDWWRQ6SuRXEOmXd0rn8F6Fm65s\nax309tTG2g5pbYRRirL5t0PXS+t6wg9iFvl+jMq/MqX0LwA8AuCXAbxWRcqhayYGLZcBT1gS2raw\nhlZ7wYIsT3OZB0SeFhBzfxdeGe93aRy6floZwmn2z30jgLfnnP9YffSTAP4cwIcAfDqA7wbwHABf\nusZ2hi65PN+TwavHVhnDszavVZvvst3eUGqEEd5vyNI6kfCbADwXwGfrwpzzD6nZ96SUHgHwayml\nZ+ac37/G74WuoOJxPHTdtVNfZFkppe8H8FIAL8o5/3Vl8d/FrILu2av8VigUCl1ldY6EzwH8hQA+\nL+f8wYavPB8z37gG61AoFLp26pon/CYALwfwMgD3UkpPPf/osZzzSUrpWQC+AsCbAXwUwPMAvAHA\n23LO797cZodCodDVUNdI+Oswi2rfSuVfA+AnAIwBfD6AbwZwCOAvAPwsgP+w1laGQqHQFVXXPOGi\nh5xz/ksAL1png0KhUOg6aaWKuVAoFAptRgHhUCgU6lEB4VAoFOpRAeFQKBTqUQHhUCgU6lEB4VAo\nFOpRAeFQKBTqUQHhUCgU6lEB4VAoFOpRAeFQKBTqUQHhUCgU6lEB4VAoFOpRAeFQKBTqUQHhUCgU\n6lEB4VAoFOpRAeFQKBTqUQHhUCgU6lEB4VAoFOpRAeFQKBTqUQHhUCgU6lEB4VAoFOpRFwHCB31v\nQCgUCm1JVb5dBAh/Yt8bEAqFQlvSJ9YWSDnn+7AdhQ1I6UkAXgLgAwBOet2YUCgU2owOMAPwwznn\nj5YW7B3CoVAodJ11EeyIUCgUurYKCIdCoVCPCgiHQqFQjwoIh0KhUI+6kBBOKX1DSun9KaXjlNI7\nUkr/qO9t2oRSSq9OKZ3R8Md9b9cqSim9MKX0Symlvzrfj5cZy3xnSulDKaWjlNJ/Tyk9u49tXUW1\n/Usp/ahxLt/c1/a2KqX0qpTSO1NKd1JKj6aUfiGl9BxaZj+l9AMppY+klO6mlH4upfSUvra5ixr3\n76103qYppTf1tc0XDsIppS8H8HoArwbwfAB/CODhlNKTe92wzendAJ4K4Gnnw+f0uzkr6xDAHwD4\nBgBLKTYppVcC+EYA/wrAZwK4h9l53LufG7mGivt3rl/B4rl8+f3ZtLX0QgDfB+CzAHw+gF0Av5pS\nuqGWeSOAfwbgSwB8LoC/B+Dn7/N2rqqW/csA/gueOHdPB/Bt93k71dbkfKEGAO8A8J/UfALwlwC+\nre9t28C+vRrA7/e9HVvYrzMAL6OyDwH4FjV/G8AxgC/re3s3tH8/CuC/9b1tG9i3J5/v3+eo8zQC\n8MVqmX9wvsxn9r296+7fedlvAHhD39smw4WKhFNKuwAeAvDrUpZnR+3XALygr+3asD75/BH3z1JK\n/zWl9Pf73qBNK6X0TMwiDH0e7wD4XVyd8wgALzp/5P2TlNKbUkp/p+8NWkEPYhYZfux8/iEAQyye\nu/cC+CAu57nj/RN9ZUrpwymlP0op/UeKlO+rhn39sKMnAxgAeJTKH8XsbnzZ9Q4ArwDwXswegV4D\n4DdTSp+Wc77X43ZtWk/D7I9vncen3f/N2Yp+BbNH9PcD+CQArwPw5pTSC84DhwuvlFLCzHp4e85Z\n6iaeBmB8ftPUunTnztk/APhJAH+O2dPapwP4bgDPAfCl930jcfEg7CnB9+UujXLOD6vZd6eU3onZ\nn+HLMHu8veq6EucRAHLOP6Nm35NS+iMAfwbgRZg97l4GvQnAc9FWL3EZz53s32frwpzzD6nZ96SU\nHgHwaymlZ+ac338/NxC4eBVzHwEwxcww13oKlqOqS6+c82MA3gfg0mQNNOoRzC7aa3EeAeD84v0I\nLsm5TCl9P4CXAnhRzvlD6qNHAOyllG7TVy7VuaP9++vK4r+L2f+1l3N3oSCcc54AeBeAF0vZ+SPF\niwH8dl/btS2llG5h9ihb+5NcKp0D6REsnsfbmNVYX7nzCAAppWcAeBIuwbk8B9QXAvgnOecP0sfv\nAnCKxXP3HACfAOB37ttGrqHK/ll6PmZRfi/n7iLaEW8A8OMppXcBeCeAbwFwE8CP9blRm1BK6XsA\n/DJmFsTHA/gOzP7wP93ndq2ilNIhZpFDOi96VkrpeQA+lnP+C8y8uG9PKf0pZj3kvRazLJdf7GFz\nO6u0f+fDqzHzhB85X+67MHuqeXh5bRdH5/mwLwfwMgD3UkrytPJYzvkk53wnpfTDAN6QUvpbAHcB\nfC+A38o5v7OfrW5Xbf9SSs8C8BUA3gzgowCehxlz3pZzfncf29x7eoaTVvL1mF24x5jdfT+j723a\n0H79NGYgOsastvmnADyz7+1acV8+D7PUnykNP6KWeQ1mlR9HmMHp2X1v9yb2D7NuCt+CGYBPAPw/\nAP8ZwN/te7sb9svapymAr1LL7GOWa/sRzCD8swCe0ve2b2L/ADwDwFsBfPj8f/lezCpVb/W1zdGV\nZSgUCvWoC+UJh0Kh0HVTQDgUCoV6VEA4FAqFelRAOBQKhXpUQDgUCoV6VEA4FAqFelRAOBQKhXpU\nQDgUCoV6VEA4FAqFelRAOBQKhXpUQDgUCoV6VEA4FAqFetT/BxrEqH1v/YXoAAAAAElFTkSuQmCC\n", 272 | "text/plain": [ 273 | "" 274 | ] 275 | }, 276 | "metadata": {}, 277 | "output_type": "display_data" 278 | } 279 | ], 280 | "source": [ 281 | "# Hyper-parameters\n", 282 | "epochs = 50000\n", 283 | "batch_size = 128\n", 284 | "\n", 285 | "# Session\n", 286 | "with tf.Session() as sess:\n", 287 | " sess.run(tf.global_variables_initializer())\n", 288 | " \n", 289 | " for _ in range(epochs):\n", 290 | " sess.run(G_opt, feed_dict={\n", 291 | " z: generate_z(batch_size) \n", 292 | " })\n", 293 | " sess.run(D_opt, feed_dict={\n", 294 | " X: images[np.random.choice(range(len(images)), batch_size)].reshape(batch_size, x_size),\n", 295 | " z: generate_z(batch_size),\n", 296 | " })\n", 297 | "\n", 298 | " # Show a random image\n", 299 | " image = sess.run(sample, feed_dict={z:generate_z()})\n", 300 | " plt.imshow(image.reshape(28, 28), cmap='gray')" 301 | ] 302 | } 303 | ], 304 | "metadata": { 305 | "kernelspec": { 306 | "display_name": "Python 3", 307 | "language": "python", 308 | "name": "python3" 309 | }, 310 | "language_info": { 311 | "codemirror_mode": { 312 | "name": "ipython", 313 | "version": 3 314 | }, 315 | "file_extension": ".py", 316 | "mimetype": "text/x-python", 317 | "name": "python", 318 | "nbconvert_exporter": "python", 319 | "pygments_lexer": "ipython3", 320 | "version": "3.5.2" 321 | } 322 | }, 323 | "nbformat": 4, 324 | "nbformat_minor": 2 325 | } 326 | -------------------------------------------------------------------------------- /11_gan.py: -------------------------------------------------------------------------------- 1 | import tensorflow as tf 2 | import numpy as np 3 | from tensorflow.examples.tutorials.mnist import input_data 4 | #from PIL import Image 5 | 6 | mnist = input_data.read_data_sets("MNIST_data/") 7 | images = mnist.train.images 8 | 9 | def xavier_initializer(shape): 10 | return tf.random_normal(shape=shape, stddev=1.0/shape[0]) 11 | 12 | # Generator 13 | z_size = 100 # maybe larger 14 | g_w1_size = 400 15 | g_out_size = 28 * 28 16 | 17 | # Discriminator 18 | x_size = 28 * 28 19 | d_w1_size = 400 20 | d_out_size = 1 21 | 22 | z = tf.placeholder('float', shape=(None, z_size)) 23 | X = tf.placeholder('float', shape=(None, x_size)) 24 | 25 | # use dict to share variables 26 | g_weights = { 27 | 'w1': tf.Variable(xavier_initializer(shape=(z_size, g_w1_size))), 28 | 'b1': tf.Variable(tf.zeros(shape=[g_w1_size])), 29 | 'out': tf.Variable(xavier_initializer(shape=(g_w1_size, g_out_size))), 30 | 'b2': tf.Variable(tf.zeros(shape=[g_out_size])), 31 | } 32 | 33 | d_weights ={ 34 | 'w1': tf.Variable(xavier_initializer(shape=(x_size, d_w1_size))), 35 | 'b1': tf.Variable(tf.zeros(shape=[d_w1_size])), 36 | 'out': tf.Variable(xavier_initializer(shape=(d_w1_size, d_out_size))), 37 | 'b2': tf.Variable(tf.zeros(shape=[d_out_size])), 38 | } 39 | 40 | def G(z, w=g_weights): 41 | # here tanh is better than relu 42 | h1 = tf.tanh(tf.matmul(z, w['w1']) + w['b1']) 43 | # pixel output is in range [0, 255] 44 | return tf.sigmoid(tf.matmul(h1, w['out']) + w['b2']) * 255 45 | 46 | def D(x, w=d_weights): 47 | # here tanh is better than relu 48 | h1 = tf.tanh(tf.matmul(x, w['w1']) + w['b1']) 49 | h2 = tf.matmul(h1, w['out']) + w['b2'] 50 | return h2 # use h2 to calculate logits loss 51 | 52 | def generate_z(n=1): 53 | return np.random.normal(size=(n, z_size)) 54 | 55 | sample = G(z) 56 | 57 | 58 | dout_real = D(X) 59 | dout_fake = D(G(z)) 60 | 61 | G_obj = tf.reduce_mean( 62 | tf.nn.sigmoid_cross_entropy_with_logits(logits=dout_fake, labels=tf.ones_like(dout_fake))) 63 | D_obj_real = tf.reduce_mean( # use single side smoothing 64 | tf.nn.sigmoid_cross_entropy_with_logits(logits=dout_real, labels=(tf.ones_like(dout_real)-0.1))) 65 | D_obj_fake = tf.reduce_mean( 66 | tf.nn.sigmoid_cross_entropy_with_logits(logits=dout_fake, labels=tf.zeros_like(dout_fake))) 67 | D_obj = D_obj_real + D_obj_fake 68 | 69 | G_opt = tf.train.AdamOptimizer().minimize(G_obj, var_list=g_weights.values()) 70 | D_opt = tf.train.AdamOptimizer().minimize(D_obj, var_list=d_weights.values()) 71 | 72 | ## Training 73 | batch_size = 128 74 | 75 | with tf.Session() as sess: 76 | sess.run(tf.global_variables_initializer()) 77 | 78 | for i in range(200): 79 | sess.run(D_opt, feed_dict={ 80 | X: images[np.random.choice(range(len(images)), batch_size)].reshape(batch_size, x_size), 81 | z: generate_z(batch_size), 82 | }) 83 | # run two phases of generator 84 | sess.run(G_opt, feed_dict={ 85 | z: generate_z(batch_size) 86 | }) 87 | sess.run(G_opt, feed_dict={ 88 | z: generate_z(batch_size) 89 | }) 90 | 91 | g_cost = sess.run(G_obj, feed_dict={z: generate_z(batch_size)}) 92 | d_cost = sess.run(D_obj, feed_dict={ 93 | X: images[np.random.choice(range(len(images)), batch_size)].reshape(batch_size, x_size), 94 | z: generate_z(batch_size), 95 | }) 96 | image = sess.run(G(z), feed_dict={z:generate_z()}) 97 | df = sess.run(tf.sigmoid(dout_fake), feed_dict={z:generate_z()}) 98 | # print i, G cost, D cost, image max pixel, D output of fake 99 | print (i, g_cost, d_cost, image.max(), df[0][0]) 100 | 101 | # You may wish to save or plot the image generated 102 | # to see how it looks like 103 | image = sess.run(G(z), feed_dict={z:generate_z()}) 104 | image1 = image[0].reshape([28, 28]) 105 | #print image1 106 | #im = Image.fromarray(image1) 107 | #im.show() 108 | 109 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # TensorFlow-Tutorials 2 | [![Build Status](https://travis-ci.org/nlintz/TensorFlow-Tutorials.svg?branch=master)](https://travis-ci.org/nlintz/TensorFlow-Tutorials) 3 | [![Codacy Badge](https://api.codacy.com/project/badge/grade/2d3ed69cdbec4249ab5c2f7e4286bb8f)](https://www.codacy.com/app/hunkim/TensorFlow-Tutorials) 4 | 5 | Introduction to deep learning based on Google's TensorFlow framework. These tutorials are direct ports of 6 | Newmu's [Theano Tutorials](https://github.com/Newmu/Theano-Tutorials). 7 | 8 | ***Topics*** 9 | * [Simple Multiplication](00_multiply.py) 10 | * [Linear Regression](01_linear_regression.py) 11 | * [Logistic Regression](02_logistic_regression.py) 12 | * [Feedforward Neural Network (Multilayer Perceptron)](03_net.py) 13 | * [Deep Feedforward Neural Network (Multilayer Perceptron with 2 Hidden Layers O.o)](04_modern_net.py) 14 | * [Convolutional Neural Network](05_convolutional_net.py) 15 | * [Denoising Autoencoder](06_autoencoder.py) 16 | * [Recurrent Neural Network (LSTM)](07_lstm.py) 17 | * [Word2vec](08_word2vec.py) 18 | * [TensorBoard](09_tensorboard.py) 19 | * [Save and restore net](10_save_restore_net.py) 20 | * [Generative Adversarial Network](11_gan.py) 21 | 22 | ***Dependencies*** 23 | * TensorFlow 1.0 alpha 24 | * Numpy 25 | * matplotlib 26 | -------------------------------------------------------------------------------- /nb2script: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ## Usage ################################ 4 | # ./nb2script 5 | # Example: 6 | # nb2script 04_modern_net 7 | ######################################### 8 | 9 | if [ $# -ne "1" ]; then 10 | echo "Usage: ./nb2script " 11 | else 12 | jupyter nbconvert --to script $1.ipynb > $1.py 13 | fi 14 | --------------------------------------------------------------------------------