├── LICENSE ├── 1_Introduction ├── helloworld.ipynb └── basic_operations.ipynb ├── 0_Prerequisite ├── ml_introduction.ipynb └── mnist_dataset_intro.ipynb ├── 5_MultiGPU └── multigpu_basics.ipynb ├── 2_BasicModels ├── logistic_regression.ipynb ├── nearest_neighbor.ipynb └── linear_regression.ipynb ├── 3_NeuralNetworks ├── multilayer_perceptron.ipynb ├── recurrent_network.ipynb ├── bidirectional_rnn.ipynb └── convolutional_network.ipynb ├── 4_Utils └── save_restore_model.ipynb └── README.md /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 floydhub 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /1_Introduction/helloworld.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "collapsed": false 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "import tensorflow as tf" 12 | ] 13 | }, 14 | { 15 | "cell_type": "code", 16 | "execution_count": 2, 17 | "metadata": { 18 | "collapsed": true 19 | }, 20 | "outputs": [], 21 | "source": [ 22 | "# Simple hello world using TensorFlow\n", 23 | "\n", 24 | "# Create a Constant op\n", 25 | "# The op is added as a node to the default graph.\n", 26 | "#\n", 27 | "# The value returned by the constructor represents the output\n", 28 | "# of the Constant op.\n", 29 | "\n", 30 | "hello = tf.constant('Hello, TensorFlow!')" 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": 3, 36 | "metadata": { 37 | "collapsed": true 38 | }, 39 | "outputs": [], 40 | "source": [ 41 | "# Start tf session\n", 42 | "sess = tf.Session()" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": 4, 48 | "metadata": { 49 | "collapsed": false 50 | }, 51 | "outputs": [ 52 | { 53 | "name": "stdout", 54 | "output_type": "stream", 55 | "text": [ 56 | "Hello, TensorFlow!\n" 57 | ] 58 | } 59 | ], 60 | "source": [ 61 | "# Run graph\n", 62 | "print sess.run(hello)" 63 | ] 64 | } 65 | ], 66 | "metadata": { 67 | "kernelspec": { 68 | "display_name": "IPython (Python 2.7)", 69 | "language": "python", 70 | "name": "python2" 71 | }, 72 | "language_info": { 73 | "codemirror_mode": { 74 | "name": "ipython", 75 | "version": 2.0 76 | }, 77 | "file_extension": ".py", 78 | "mimetype": "text/x-python", 79 | "name": "python", 80 | "nbconvert_exporter": "python", 81 | "pygments_lexer": "ipython2", 82 | "version": "2.7.8" 83 | } 84 | }, 85 | "nbformat": 4, 86 | "nbformat_minor": 0 87 | } -------------------------------------------------------------------------------- /0_Prerequisite/ml_introduction.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Machine Learning\n", 8 | "\n", 9 | "Prior to start browsing the examples, it may be useful that you get familiar with machine learning, as TensorFlow is mostly used for machine learning tasks (especially Neural Networks). You can find below a list of useful links, that can give you the basic knowledge required for this TensorFlow Tutorial.\n", 10 | "\n", 11 | "## Machine Learning\n", 12 | "\n", 13 | "- [An Introduction to Machine Learning Theory and Its Applications: A Visual Tutorial with Examples](https://www.toptal.com/machine-learning/machine-learning-theory-an-introductory-primer)\n", 14 | "- [A Gentle Guide to Machine Learning](https://blog.monkeylearn.com/a-gentle-guide-to-machine-learning/)\n", 15 | "- [A Visual Introduction to Machine Learning](http://www.r2d3.us/visual-intro-to-machine-learning-part-1/)\n", 16 | "- [Introduction to Machine Learning](http://alex.smola.org/drafts/thebook.pdf)\n", 17 | "\n", 18 | "## Deep Learning & Neural Networks\n", 19 | "\n", 20 | "- [An Introduction to Neural Networks](http://www.cs.stir.ac.uk/~lss/NNIntro/InvSlides.html)\n", 21 | "- [An Introduction to Image Recognition with Deep Learning](https://medium.com/@ageitgey/machine-learning-is-fun-part-3-deep-learning-and-convolutional-neural-networks-f40359318721)\n", 22 | "- [Neural Networks and Deep Learning](http://neuralnetworksanddeeplearning.com/index.html)\n", 23 | "\n" 24 | ] 25 | } 26 | ], 27 | "metadata": { 28 | "kernelspec": { 29 | "display_name": "IPython (Python 2.7)", 30 | "language": "python", 31 | "name": "python2" 32 | }, 33 | "language_info": { 34 | "codemirror_mode": { 35 | "name": "ipython", 36 | "version": 2 37 | }, 38 | "file_extension": ".py", 39 | "mimetype": "text/x-python", 40 | "name": "python", 41 | "nbconvert_exporter": "python", 42 | "pygments_lexer": "ipython2", 43 | "version": "2.7.11" 44 | } 45 | }, 46 | "nbformat": 4, 47 | "nbformat_minor": 0 48 | } 49 | -------------------------------------------------------------------------------- /0_Prerequisite/mnist_dataset_intro.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "\n", 8 | "# MNIST Dataset Introduction\n", 9 | "\n", 10 | "Most examples are using MNIST dataset of handwritten digits. It has 60,000 examples for training and 10,000 examples for testing. The digits have been size-normalized and centered in a fixed-size image, so each sample is represented as a matrix of size 28x28 with values from 0 to 1.\n", 11 | "\n", 12 | "## Overview\n", 13 | "\n", 14 | "![MNIST Digits](http://neuralnetworksanddeeplearning.com/images/mnist_100_digits.png)\n", 15 | "\n", 16 | "## Usage\n", 17 | "In our examples, we are using TensorFlow [input_data.py](https://github.com/tensorflow/tensorflow/blob/r0.7/tensorflow/examples/tutorials/mnist/input_data.py) script to load that dataset.\n", 18 | "It is quite useful for managing our data, and handle:\n", 19 | "\n", 20 | "- Dataset downloading\n", 21 | "\n", 22 | "- Loading the entire dataset into numpy array: \n", 23 | "\n", 24 | "\n" 25 | ] 26 | }, 27 | { 28 | "cell_type": "code", 29 | "execution_count": null, 30 | "metadata": { 31 | "collapsed": true 32 | }, 33 | "outputs": [], 34 | "source": [ 35 | "# Import MNIST\n", 36 | "from tensorflow.examples.tutorials.mnist import input_data\n", 37 | "mnist = input_data.read_data_sets(\"/tmp/data/\", one_hot=True)\n", 38 | "\n", 39 | "# Load data\n", 40 | "X_train = mnist.train.images\n", 41 | "Y_train = mnist.train.labels\n", 42 | "X_test = mnist.test.images\n", 43 | "Y_test = mnist.test.labels" 44 | ] 45 | }, 46 | { 47 | "cell_type": "markdown", 48 | "metadata": {}, 49 | "source": [ 50 | "- A `next_batch` function that can iterate over the whole dataset and return only the desired fraction of the dataset samples (in order to save memory and avoid to load the entire dataset)." 51 | ] 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": null, 56 | "metadata": { 57 | "collapsed": true 58 | }, 59 | "outputs": [], 60 | "source": [ 61 | "# Get the next 64 images array and labels\n", 62 | "batch_X, batch_Y = mnist.train.next_batch(64)" 63 | ] 64 | }, 65 | { 66 | "cell_type": "markdown", 67 | "metadata": {}, 68 | "source": [ 69 | "Link: http://yann.lecun.com/exdb/mnist/" 70 | ] 71 | } 72 | ], 73 | "metadata": { 74 | "kernelspec": { 75 | "display_name": "IPython (Python 2.7)", 76 | "language": "python", 77 | "name": "python2" 78 | }, 79 | "language_info": { 80 | "codemirror_mode": { 81 | "name": "ipython", 82 | "version": 2 83 | }, 84 | "file_extension": ".py", 85 | "mimetype": "text/x-python", 86 | "name": "python", 87 | "nbconvert_exporter": "python", 88 | "pygments_lexer": "ipython2", 89 | "version": "2.7.11" 90 | } 91 | }, 92 | "nbformat": 4, 93 | "nbformat_minor": 0 94 | } 95 | -------------------------------------------------------------------------------- /5_MultiGPU/multigpu_basics.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "collapsed": true 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "# Basic Multi GPU computation example using TensorFlow library.\n", 12 | "\n", 13 | "# Author: Aymeric Damien\n", 14 | "# Project: https://github.com/aymericdamien/TensorFlow-Examples/\n", 15 | "\n", 16 | "# This tutorial requires your machine to have 2 GPUs\n", 17 | "# \"/cpu:0\": The CPU of your machine.\n", 18 | "# \"/gpu:0\": The first GPU of your machine\n", 19 | "# \"/gpu:1\": The second GPU of your machine\n", 20 | "# For this example, we are using 2 GTX-980" 21 | ] 22 | }, 23 | { 24 | "cell_type": "code", 25 | "execution_count": 2, 26 | "metadata": { 27 | "collapsed": true 28 | }, 29 | "outputs": [], 30 | "source": [ 31 | "import numpy as np\n", 32 | "import tensorflow as tf\n", 33 | "import datetime" 34 | ] 35 | }, 36 | { 37 | "cell_type": "code", 38 | "execution_count": 3, 39 | "metadata": { 40 | "collapsed": true 41 | }, 42 | "outputs": [], 43 | "source": [ 44 | "#Processing Units logs\n", 45 | "log_device_placement = True\n", 46 | "\n", 47 | "#num of multiplications to perform\n", 48 | "n = 10" 49 | ] 50 | }, 51 | { 52 | "cell_type": "code", 53 | "execution_count": null, 54 | "metadata": { 55 | "collapsed": false 56 | }, 57 | "outputs": [], 58 | "source": [ 59 | "# Example: compute A^n + B^n on 2 GPUs\n", 60 | "\n", 61 | "# Create random large matrix\n", 62 | "A = np.random.rand(1e4, 1e4).astype('float32')\n", 63 | "B = np.random.rand(1e4, 1e4).astype('float32')\n", 64 | "\n", 65 | "# Creates a graph to store results\n", 66 | "c1 = []\n", 67 | "c2 = []\n", 68 | "\n", 69 | "# Define matrix power\n", 70 | "def matpow(M, n):\n", 71 | " if n < 1: #Abstract cases where n < 1\n", 72 | " return M\n", 73 | " else:\n", 74 | " return tf.matmul(M, matpow(M, n-1))" 75 | ] 76 | }, 77 | { 78 | "cell_type": "code", 79 | "execution_count": 6, 80 | "metadata": { 81 | "collapsed": true 82 | }, 83 | "outputs": [], 84 | "source": [ 85 | "# Single GPU computing\n", 86 | "\n", 87 | "with tf.device('/gpu:0'):\n", 88 | " a = tf.constant(A)\n", 89 | " b = tf.constant(B)\n", 90 | " #compute A^n and B^n and store results in c1\n", 91 | " c1.append(matpow(a, n))\n", 92 | " c1.append(matpow(b, n))\n", 93 | "\n", 94 | "with tf.device('/cpu:0'):\n", 95 | " sum = tf.add_n(c1) #Addition of all elements in c1, i.e. A^n + B^n\n", 96 | "\n", 97 | "t1_1 = datetime.datetime.now()\n", 98 | "with tf.Session(config=tf.ConfigProto(log_device_placement=log_device_placement)) as sess:\n", 99 | " # Runs the op.\n", 100 | " sess.run(sum)\n", 101 | "t2_1 = datetime.datetime.now()" 102 | ] 103 | }, 104 | { 105 | "cell_type": "code", 106 | "execution_count": 7, 107 | "metadata": { 108 | "collapsed": true 109 | }, 110 | "outputs": [], 111 | "source": [ 112 | "# Multi GPU computing\n", 113 | "# GPU:0 computes A^n\n", 114 | "with tf.device('/gpu:0'):\n", 115 | " #compute A^n and store result in c2\n", 116 | " a = tf.constant(A)\n", 117 | " c2.append(matpow(a, n))\n", 118 | "\n", 119 | "#GPU:1 computes B^n\n", 120 | "with tf.device('/gpu:1'):\n", 121 | " #compute B^n and store result in c2\n", 122 | " b = tf.constant(B)\n", 123 | " c2.append(matpow(b, n))\n", 124 | "\n", 125 | "with tf.device('/cpu:0'):\n", 126 | " sum = tf.add_n(c2) #Addition of all elements in c2, i.e. A^n + B^n\n", 127 | "\n", 128 | "t1_2 = datetime.datetime.now()\n", 129 | "with tf.Session(config=tf.ConfigProto(log_device_placement=log_device_placement)) as sess:\n", 130 | " # Runs the op.\n", 131 | " sess.run(sum)\n", 132 | "t2_2 = datetime.datetime.now()" 133 | ] 134 | }, 135 | { 136 | "cell_type": "code", 137 | "execution_count": 8, 138 | "metadata": { 139 | "collapsed": false 140 | }, 141 | "outputs": [ 142 | { 143 | "name": "stdout", 144 | "output_type": "stream", 145 | "text": [ 146 | "Single GPU computation time: 0:00:11.833497\n", 147 | "Multi GPU computation time: 0:00:07.085913\n" 148 | ] 149 | } 150 | ], 151 | "source": [ 152 | "print \"Single GPU computation time: \" + str(t2_1-t1_1)\n", 153 | "print \"Multi GPU computation time: \" + str(t2_2-t1_2)" 154 | ] 155 | } 156 | ], 157 | "metadata": { 158 | "kernelspec": { 159 | "display_name": "Python 2", 160 | "language": "python", 161 | "name": "python2" 162 | }, 163 | "language_info": { 164 | "codemirror_mode": { 165 | "name": "ipython", 166 | "version": 2.0 167 | }, 168 | "file_extension": ".py", 169 | "mimetype": "text/x-python", 170 | "name": "python", 171 | "nbconvert_exporter": "python", 172 | "pygments_lexer": "ipython2", 173 | "version": "2.7.10" 174 | } 175 | }, 176 | "nbformat": 4, 177 | "nbformat_minor": 0 178 | } 179 | -------------------------------------------------------------------------------- /2_BasicModels/logistic_regression.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "collapsed": true 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "# A logistic regression learning algorithm example using TensorFlow library.\n", 12 | "# This example is using the MNIST database of handwritten digits \n", 13 | "# (http://yann.lecun.com/exdb/mnist/)\n", 14 | "\n", 15 | "# Author: Aymeric Damien\n", 16 | "# Project: https://github.com/aymericdamien/TensorFlow-Examples/" 17 | ] 18 | }, 19 | { 20 | "cell_type": "code", 21 | "execution_count": 1, 22 | "metadata": { 23 | "collapsed": false 24 | }, 25 | "outputs": [ 26 | { 27 | "name": "stdout", 28 | "output_type": "stream", 29 | "text": [ 30 | "Extracting MNIST_data/train-images-idx3-ubyte.gz\n", 31 | "Extracting MNIST_data/train-labels-idx1-ubyte.gz\n", 32 | "Extracting MNIST_data/t10k-images-idx3-ubyte.gz\n", 33 | "Extracting MNIST_data/t10k-labels-idx1-ubyte.gz\n" 34 | ] 35 | } 36 | ], 37 | "source": [ 38 | "import tensorflow as tf\n", 39 | "\n", 40 | "# Import MINST data\n", 41 | "from tensorflow.examples.tutorials.mnist import input_data\n", 42 | "mnist = input_data.read_data_sets(\"MNIST_data/\", one_hot=True)" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": 3, 48 | "metadata": { 49 | "collapsed": false 50 | }, 51 | "outputs": [], 52 | "source": [ 53 | "# Parameters\n", 54 | "learning_rate = 0.01\n", 55 | "training_epochs = 25\n", 56 | "batch_size = 100\n", 57 | "display_step = 1\n", 58 | "\n", 59 | "# tf Graph Input\n", 60 | "x = tf.placeholder(tf.float32, [None, 784]) # mnist data image of shape 28*28=784\n", 61 | "y = tf.placeholder(tf.float32, [None, 10]) # 0-9 digits recognition => 10 classes\n", 62 | "\n", 63 | "# Set model weights\n", 64 | "W = tf.Variable(tf.zeros([784, 10]))\n", 65 | "b = tf.Variable(tf.zeros([10]))\n", 66 | "\n", 67 | "# Construct model\n", 68 | "pred = tf.nn.softmax(tf.matmul(x, W) + b) # Softmax\n", 69 | "\n", 70 | "# Minimize error using cross entropy\n", 71 | "cost = tf.reduce_mean(-tf.reduce_sum(y*tf.log(pred), reduction_indices=1))\n", 72 | "# Gradient Descent\n", 73 | "optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)\n", 74 | "\n", 75 | "# Initializing the variables\n", 76 | "init = tf.global_variables_initializer()" 77 | ] 78 | }, 79 | { 80 | "cell_type": "code", 81 | "execution_count": null, 82 | "metadata": { 83 | "collapsed": false 84 | }, 85 | "outputs": [ 86 | { 87 | "name": "stdout", 88 | "output_type": "stream", 89 | "text": [ 90 | "Epoch: 0001 cost= 1.182138959\n", 91 | "Epoch: 0002 cost= 0.664778162\n", 92 | "Epoch: 0003 cost= 0.552686284\n", 93 | "Epoch: 0004 cost= 0.498628905\n", 94 | "Epoch: 0005 cost= 0.465469866\n", 95 | "Epoch: 0006 cost= 0.442537872\n", 96 | "Epoch: 0007 cost= 0.425462044\n", 97 | "Epoch: 0008 cost= 0.412185303\n", 98 | "Epoch: 0009 cost= 0.401311587\n", 99 | "Epoch: 0010 cost= 0.392326203\n", 100 | "Epoch: 0011 cost= 0.384736038\n", 101 | "Epoch: 0012 cost= 0.378137191\n", 102 | "Epoch: 0013 cost= 0.372363752\n", 103 | "Epoch: 0014 cost= 0.367308579\n", 104 | "Epoch: 0015 cost= 0.362704660\n", 105 | "Epoch: 0016 cost= 0.358588599\n", 106 | "Epoch: 0017 cost= 0.354823110\n" 107 | ] 108 | } 109 | ], 110 | "source": [ 111 | "# Launch the graph\n", 112 | "with tf.Session() as sess:\n", 113 | " sess.run(init)\n", 114 | "\n", 115 | " # Training cycle\n", 116 | " for epoch in range(training_epochs):\n", 117 | " avg_cost = 0.\n", 118 | " total_batch = int(mnist.train.num_examples/batch_size)\n", 119 | " # Loop over all batches\n", 120 | " for i in range(total_batch):\n", 121 | " batch_xs, batch_ys = mnist.train.next_batch(batch_size)\n", 122 | " # Fit training using batch data\n", 123 | " _, c = sess.run([optimizer, cost], feed_dict={x: batch_xs,\n", 124 | " y: batch_ys})\n", 125 | " # Compute average loss\n", 126 | " avg_cost += c / total_batch\n", 127 | " # Display logs per epoch step\n", 128 | " if (epoch+1) % display_step == 0:\n", 129 | " print \"Epoch:\", '%04d' % (epoch+1), \"cost=\", \"{:.9f}\".format(avg_cost)\n", 130 | "\n", 131 | " print \"Optimization Finished!\"\n", 132 | "\n", 133 | " # Test model\n", 134 | " correct_prediction = tf.equal(tf.argmax(pred, 1), tf.argmax(y, 1))\n", 135 | " # Calculate accuracy for 3000 examples\n", 136 | " accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))\n", 137 | " print \"Accuracy:\", accuracy.eval({x: mnist.test.images[:3000], y: mnist.test.labels[:3000]})" 138 | ] 139 | }, 140 | { 141 | "cell_type": "code", 142 | "execution_count": null, 143 | "metadata": { 144 | "collapsed": true 145 | }, 146 | "outputs": [], 147 | "source": [] 148 | } 149 | ], 150 | "metadata": { 151 | "kernelspec": { 152 | "display_name": "Python 2", 153 | "language": "python", 154 | "name": "python2" 155 | }, 156 | "language_info": { 157 | "codemirror_mode": { 158 | "name": "ipython", 159 | "version": 2 160 | }, 161 | "file_extension": ".py", 162 | "mimetype": "text/x-python", 163 | "name": "python", 164 | "nbconvert_exporter": "python", 165 | "pygments_lexer": "ipython2", 166 | "version": "2.7.13" 167 | } 168 | }, 169 | "nbformat": 4, 170 | "nbformat_minor": 0 171 | } 172 | -------------------------------------------------------------------------------- /1_Introduction/basic_operations.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "collapsed": true 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "# Basic Operations example using TensorFlow library.\n", 12 | "# Author: Aymeric Damien\n", 13 | "# Project: https://github.com/aymericdamien/TensorFlow-Examples/" 14 | ] 15 | }, 16 | { 17 | "cell_type": "code", 18 | "execution_count": 2, 19 | "metadata": { 20 | "collapsed": true 21 | }, 22 | "outputs": [], 23 | "source": [ 24 | "import tensorflow as tf" 25 | ] 26 | }, 27 | { 28 | "cell_type": "code", 29 | "execution_count": 3, 30 | "metadata": { 31 | "collapsed": true 32 | }, 33 | "outputs": [], 34 | "source": [ 35 | "# Basic constant operations\n", 36 | "# The value returned by the constructor represents the output\n", 37 | "# of the Constant op.\n", 38 | "a = tf.constant(2)\n", 39 | "b = tf.constant(3)" 40 | ] 41 | }, 42 | { 43 | "cell_type": "code", 44 | "execution_count": 4, 45 | "metadata": { 46 | "collapsed": false 47 | }, 48 | "outputs": [ 49 | { 50 | "name": "stdout", 51 | "output_type": "stream", 52 | "text": [ 53 | "a=2, b=3\n", 54 | "Addition with constants: 5\n", 55 | "Multiplication with constants: 6\n" 56 | ] 57 | } 58 | ], 59 | "source": [ 60 | "# Launch the default graph.\n", 61 | "with tf.Session() as sess:\n", 62 | " print \"a=2, b=3\"\n", 63 | " print \"Addition with constants: %i\" % sess.run(a+b)\n", 64 | " print \"Multiplication with constants: %i\" % sess.run(a*b)" 65 | ] 66 | }, 67 | { 68 | "cell_type": "code", 69 | "execution_count": 5, 70 | "metadata": { 71 | "collapsed": true 72 | }, 73 | "outputs": [], 74 | "source": [ 75 | "# Basic Operations with variable as graph input\n", 76 | "# The value returned by the constructor represents the output\n", 77 | "# of the Variable op. (define as input when running session)\n", 78 | "# tf Graph input\n", 79 | "a = tf.placeholder(tf.int16)\n", 80 | "b = tf.placeholder(tf.int16)" 81 | ] 82 | }, 83 | { 84 | "cell_type": "code", 85 | "execution_count": 6, 86 | "metadata": { 87 | "collapsed": true 88 | }, 89 | "outputs": [], 90 | "source": [ 91 | "# Define some operations\n", 92 | "add = tf.add(a, b)\n", 93 | "mul = tf.mul(a, b)" 94 | ] 95 | }, 96 | { 97 | "cell_type": "code", 98 | "execution_count": 7, 99 | "metadata": { 100 | "collapsed": false 101 | }, 102 | "outputs": [ 103 | { 104 | "name": "stdout", 105 | "output_type": "stream", 106 | "text": [ 107 | "Addition with variables: 5\n", 108 | "Multiplication with variables: 6\n" 109 | ] 110 | } 111 | ], 112 | "source": [ 113 | "# Launch the default graph.\n", 114 | "with tf.Session() as sess:\n", 115 | " # Run every operation with variable input\n", 116 | " print \"Addition with variables: %i\" % sess.run(add, feed_dict={a: 2, b: 3})\n", 117 | " print \"Multiplication with variables: %i\" % sess.run(mul, feed_dict={a: 2, b: 3})" 118 | ] 119 | }, 120 | { 121 | "cell_type": "code", 122 | "execution_count": 8, 123 | "metadata": { 124 | "collapsed": true 125 | }, 126 | "outputs": [], 127 | "source": [ 128 | "# ----------------\n", 129 | "# More in details:\n", 130 | "# Matrix Multiplication from TensorFlow official tutorial\n", 131 | "\n", 132 | "# Create a Constant op that produces a 1x2 matrix. The op is\n", 133 | "# added as a node to the default graph.\n", 134 | "#\n", 135 | "# The value returned by the constructor represents the output\n", 136 | "# of the Constant op.\n", 137 | "matrix1 = tf.constant([[3., 3.]])" 138 | ] 139 | }, 140 | { 141 | "cell_type": "code", 142 | "execution_count": 9, 143 | "metadata": { 144 | "collapsed": true 145 | }, 146 | "outputs": [], 147 | "source": [ 148 | "# Create another Constant that produces a 2x1 matrix.\n", 149 | "matrix2 = tf.constant([[2.],[2.]])" 150 | ] 151 | }, 152 | { 153 | "cell_type": "code", 154 | "execution_count": 10, 155 | "metadata": { 156 | "collapsed": true 157 | }, 158 | "outputs": [], 159 | "source": [ 160 | "# Create a Matmul op that takes 'matrix1' and 'matrix2' as inputs.\n", 161 | "# The returned value, 'product', represents the result of the matrix\n", 162 | "# multiplication.\n", 163 | "product = tf.matmul(matrix1, matrix2)" 164 | ] 165 | }, 166 | { 167 | "cell_type": "code", 168 | "execution_count": 11, 169 | "metadata": { 170 | "collapsed": false 171 | }, 172 | "outputs": [ 173 | { 174 | "name": "stdout", 175 | "output_type": "stream", 176 | "text": [ 177 | "[[ 12.]]\n" 178 | ] 179 | } 180 | ], 181 | "source": [ 182 | "# To run the matmul op we call the session 'run()' method, passing 'product'\n", 183 | "# which represents the output of the matmul op. This indicates to the call\n", 184 | "# that we want to get the output of the matmul op back.\n", 185 | "#\n", 186 | "# All inputs needed by the op are run automatically by the session. They\n", 187 | "# typically are run in parallel.\n", 188 | "#\n", 189 | "# The call 'run(product)' thus causes the execution of threes ops in the\n", 190 | "# graph: the two constants and matmul.\n", 191 | "#\n", 192 | "# The output of the op is returned in 'result' as a numpy `ndarray` object.\n", 193 | "with tf.Session() as sess:\n", 194 | " result = sess.run(product)\n", 195 | " print result" 196 | ] 197 | } 198 | ], 199 | "metadata": { 200 | "kernelspec": { 201 | "display_name": "IPython (Python 2.7)", 202 | "language": "python", 203 | "name": "python2" 204 | }, 205 | "language_info": { 206 | "codemirror_mode": { 207 | "name": "ipython", 208 | "version": 2.0 209 | }, 210 | "file_extension": ".py", 211 | "mimetype": "text/x-python", 212 | "name": "python", 213 | "nbconvert_exporter": "python", 214 | "pygments_lexer": "ipython2", 215 | "version": "2.7.8" 216 | } 217 | }, 218 | "nbformat": 4, 219 | "nbformat_minor": 0 220 | } -------------------------------------------------------------------------------- /3_NeuralNetworks/multilayer_perceptron.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": { 7 | "collapsed": true 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "'''\n", 12 | "A Multilayer Perceptron implementation example using TensorFlow library.\n", 13 | "This example is using the MNIST database of handwritten digits\n", 14 | "(http://yann.lecun.com/exdb/mnist/)\n", 15 | "\n", 16 | "Author: Aymeric Damien\n", 17 | "Project: https://github.com/aymericdamien/TensorFlow-Examples/\n", 18 | "'''" 19 | ] 20 | }, 21 | { 22 | "cell_type": "code", 23 | "execution_count": 1, 24 | "metadata": { 25 | "collapsed": false 26 | }, 27 | "outputs": [ 28 | { 29 | "name": "stdout", 30 | "output_type": "stream", 31 | "text": [ 32 | "Extracting MNIST_data/train-images-idx3-ubyte.gz\n", 33 | "Extracting MNIST_data/train-labels-idx1-ubyte.gz\n", 34 | "Extracting MNIST_data/t10k-images-idx3-ubyte.gz\n", 35 | "Extracting MNIST_data/t10k-labels-idx1-ubyte.gz\n" 36 | ] 37 | } 38 | ], 39 | "source": [ 40 | "# Import MINST data\n", 41 | "from tensorflow.examples.tutorials.mnist import input_data\n", 42 | "mnist = input_data.read_data_sets(\"MNIST_data/\", one_hot=True)\n", 43 | "\n", 44 | "import tensorflow as tf" 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": 2, 50 | "metadata": { 51 | "collapsed": true 52 | }, 53 | "outputs": [], 54 | "source": [ 55 | "# Parameters\n", 56 | "learning_rate = 0.001\n", 57 | "training_epochs = 15\n", 58 | "batch_size = 100\n", 59 | "display_step = 1\n", 60 | "\n", 61 | "# Network Parameters\n", 62 | "n_hidden_1 = 256 # 1st layer number of features\n", 63 | "n_hidden_2 = 256 # 2nd layer number of features\n", 64 | "n_input = 784 # MNIST data input (img shape: 28*28)\n", 65 | "n_classes = 10 # MNIST total classes (0-9 digits)\n", 66 | "\n", 67 | "# tf Graph input\n", 68 | "x = tf.placeholder(\"float\", [None, n_input])\n", 69 | "y = tf.placeholder(\"float\", [None, n_classes])" 70 | ] 71 | }, 72 | { 73 | "cell_type": "code", 74 | "execution_count": 3, 75 | "metadata": { 76 | "collapsed": true 77 | }, 78 | "outputs": [], 79 | "source": [ 80 | "# Create model\n", 81 | "def multilayer_perceptron(x, weights, biases):\n", 82 | " # Hidden layer with RELU activation\n", 83 | " layer_1 = tf.add(tf.matmul(x, weights['h1']), biases['b1'])\n", 84 | " layer_1 = tf.nn.relu(layer_1)\n", 85 | " # Hidden layer with RELU activation\n", 86 | " layer_2 = tf.add(tf.matmul(layer_1, weights['h2']), biases['b2'])\n", 87 | " layer_2 = tf.nn.relu(layer_2)\n", 88 | " # Output layer with linear activation\n", 89 | " out_layer = tf.matmul(layer_2, weights['out']) + biases['out']\n", 90 | " return out_layer" 91 | ] 92 | }, 93 | { 94 | "cell_type": "code", 95 | "execution_count": 5, 96 | "metadata": { 97 | "collapsed": false 98 | }, 99 | "outputs": [], 100 | "source": [ 101 | "# Store layers weight & bias\n", 102 | "weights = {\n", 103 | " 'h1': tf.Variable(tf.random_normal([n_input, n_hidden_1])),\n", 104 | " 'h2': tf.Variable(tf.random_normal([n_hidden_1, n_hidden_2])),\n", 105 | " 'out': tf.Variable(tf.random_normal([n_hidden_2, n_classes]))\n", 106 | "}\n", 107 | "biases = {\n", 108 | " 'b1': tf.Variable(tf.random_normal([n_hidden_1])),\n", 109 | " 'b2': tf.Variable(tf.random_normal([n_hidden_2])),\n", 110 | " 'out': tf.Variable(tf.random_normal([n_classes]))\n", 111 | "}\n", 112 | "\n", 113 | "# Construct model\n", 114 | "pred = multilayer_perceptron(x, weights, biases)\n", 115 | "\n", 116 | "# Define loss and optimizer\n", 117 | "cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=pred, labels=y))\n", 118 | "optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)\n", 119 | "\n", 120 | "# Initializing the variables\n", 121 | "init = tf.global_variables_initializer()" 122 | ] 123 | }, 124 | { 125 | "cell_type": "code", 126 | "execution_count": 6, 127 | "metadata": { 128 | "collapsed": false 129 | }, 130 | "outputs": [ 131 | { 132 | "name": "stdout", 133 | "output_type": "stream", 134 | "text": [ 135 | "Epoch: 0001 cost= 173.056566575\n", 136 | "Epoch: 0002 cost= 44.054413928\n", 137 | "Epoch: 0003 cost= 27.455470655\n", 138 | "Epoch: 0004 cost= 19.008652363\n", 139 | "Epoch: 0005 cost= 13.654873594\n", 140 | "Epoch: 0006 cost= 10.059267435\n", 141 | "Epoch: 0007 cost= 7.436018432\n", 142 | "Epoch: 0008 cost= 5.587794416\n", 143 | "Epoch: 0009 cost= 4.209882509\n", 144 | "Epoch: 0010 cost= 3.203879515\n", 145 | "Epoch: 0011 cost= 2.319920681\n", 146 | "Epoch: 0012 cost= 1.676204545\n", 147 | "Epoch: 0013 cost= 1.248805338\n", 148 | "Epoch: 0014 cost= 1.052676844\n", 149 | "Epoch: 0015 cost= 0.890117338\n", 150 | "Optimization Finished!\n", 151 | "Accuracy: 0.9459\n" 152 | ] 153 | } 154 | ], 155 | "source": [ 156 | "# Launch the graph\n", 157 | "with tf.Session() as sess:\n", 158 | " sess.run(init)\n", 159 | "\n", 160 | " # Training cycle\n", 161 | " for epoch in range(training_epochs):\n", 162 | " avg_cost = 0.\n", 163 | " total_batch = int(mnist.train.num_examples/batch_size)\n", 164 | " # Loop over all batches\n", 165 | " for i in range(total_batch):\n", 166 | " batch_x, batch_y = mnist.train.next_batch(batch_size)\n", 167 | " # Run optimization op (backprop) and cost op (to get loss value)\n", 168 | " _, c = sess.run([optimizer, cost], feed_dict={x: batch_x,\n", 169 | " y: batch_y})\n", 170 | " # Compute average loss\n", 171 | " avg_cost += c / total_batch\n", 172 | " # Display logs per epoch step\n", 173 | " if epoch % display_step == 0:\n", 174 | " print \"Epoch:\", '%04d' % (epoch+1), \"cost=\", \\\n", 175 | " \"{:.9f}\".format(avg_cost)\n", 176 | " print \"Optimization Finished!\"\n", 177 | "\n", 178 | " # Test model\n", 179 | " correct_prediction = tf.equal(tf.argmax(pred, 1), tf.argmax(y, 1))\n", 180 | " # Calculate accuracy\n", 181 | " accuracy = tf.reduce_mean(tf.cast(correct_prediction, \"float\"))\n", 182 | " print \"Accuracy:\", accuracy.eval({x: mnist.test.images, y: mnist.test.labels})" 183 | ] 184 | }, 185 | { 186 | "cell_type": "code", 187 | "execution_count": null, 188 | "metadata": { 189 | "collapsed": true 190 | }, 191 | "outputs": [], 192 | "source": [] 193 | } 194 | ], 195 | "metadata": { 196 | "kernelspec": { 197 | "display_name": "Python 2", 198 | "language": "python", 199 | "name": "python2" 200 | }, 201 | "language_info": { 202 | "codemirror_mode": { 203 | "name": "ipython", 204 | "version": 2 205 | }, 206 | "file_extension": ".py", 207 | "mimetype": "text/x-python", 208 | "name": "python", 209 | "nbconvert_exporter": "python", 210 | "pygments_lexer": "ipython2", 211 | "version": "2.7.13" 212 | } 213 | }, 214 | "nbformat": 4, 215 | "nbformat_minor": 0 216 | } 217 | -------------------------------------------------------------------------------- /4_Utils/save_restore_model.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": { 7 | "collapsed": true 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "'''\n", 12 | "Save and Restore a model using TensorFlow.\n", 13 | "This example is using the MNIST database of handwritten digits\n", 14 | "(http://yann.lecun.com/exdb/mnist/)\n", 15 | "\n", 16 | "Author: Aymeric Damien\n", 17 | "Project: https://github.com/aymericdamien/TensorFlow-Examples/\n", 18 | "'''" 19 | ] 20 | }, 21 | { 22 | "cell_type": "code", 23 | "execution_count": 1, 24 | "metadata": { 25 | "collapsed": false 26 | }, 27 | "outputs": [ 28 | { 29 | "name": "stdout", 30 | "output_type": "stream", 31 | "text": [ 32 | "Extracting MNIST_data/train-images-idx3-ubyte.gz\n", 33 | "Extracting MNIST_data/train-labels-idx1-ubyte.gz\n", 34 | "Extracting MNIST_data/t10k-images-idx3-ubyte.gz\n", 35 | "Extracting MNIST_data/t10k-labels-idx1-ubyte.gz\n" 36 | ] 37 | } 38 | ], 39 | "source": [ 40 | "# Import MINST data\n", 41 | "from tensorflow.examples.tutorials.mnist import input_data\n", 42 | "mnist = input_data.read_data_sets(\"MNIST_data/\", one_hot=True)\n", 43 | "\n", 44 | "import tensorflow as tf" 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": 3, 50 | "metadata": { 51 | "collapsed": false 52 | }, 53 | "outputs": [], 54 | "source": [ 55 | "# Parameters\n", 56 | "learning_rate = 0.001\n", 57 | "batch_size = 100\n", 58 | "display_step = 1\n", 59 | "model_path = \"/tmp/model.ckpt\"\n", 60 | "\n", 61 | "# Network Parameters\n", 62 | "n_hidden_1 = 256 # 1st layer number of features\n", 63 | "n_hidden_2 = 256 # 2nd layer number of features\n", 64 | "n_input = 784 # MNIST data input (img shape: 28*28)\n", 65 | "n_classes = 10 # MNIST total classes (0-9 digits)\n", 66 | "\n", 67 | "# tf Graph input\n", 68 | "x = tf.placeholder(\"float\", [None, n_input])\n", 69 | "y = tf.placeholder(\"float\", [None, n_classes])\n", 70 | "\n", 71 | "\n", 72 | "# Create model\n", 73 | "def multilayer_perceptron(x, weights, biases):\n", 74 | " # Hidden layer with RELU activation\n", 75 | " layer_1 = tf.add(tf.matmul(x, weights['h1']), biases['b1'])\n", 76 | " layer_1 = tf.nn.relu(layer_1)\n", 77 | " # Hidden layer with RELU activation\n", 78 | " layer_2 = tf.add(tf.matmul(layer_1, weights['h2']), biases['b2'])\n", 79 | " layer_2 = tf.nn.relu(layer_2)\n", 80 | " # Output layer with linear activation\n", 81 | " out_layer = tf.matmul(layer_2, weights['out']) + biases['out']\n", 82 | " return out_layer\n", 83 | "\n", 84 | "# Store layers weight & bias\n", 85 | "weights = {\n", 86 | " 'h1': tf.Variable(tf.random_normal([n_input, n_hidden_1])),\n", 87 | " 'h2': tf.Variable(tf.random_normal([n_hidden_1, n_hidden_2])),\n", 88 | " 'out': tf.Variable(tf.random_normal([n_hidden_2, n_classes]))\n", 89 | "}\n", 90 | "biases = {\n", 91 | " 'b1': tf.Variable(tf.random_normal([n_hidden_1])),\n", 92 | " 'b2': tf.Variable(tf.random_normal([n_hidden_2])),\n", 93 | " 'out': tf.Variable(tf.random_normal([n_classes]))\n", 94 | "}\n", 95 | "\n", 96 | "# Construct model\n", 97 | "pred = multilayer_perceptron(x, weights, biases)\n", 98 | "\n", 99 | "# Define loss and optimizer\n", 100 | "cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=pred, labels=y))\n", 101 | "optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)\n", 102 | "\n", 103 | "# Initializing the variables\n", 104 | "init = tf.global_variables_initializer()" 105 | ] 106 | }, 107 | { 108 | "cell_type": "code", 109 | "execution_count": 4, 110 | "metadata": { 111 | "collapsed": true 112 | }, 113 | "outputs": [], 114 | "source": [ 115 | "# 'Saver' op to save and restore all the variables\n", 116 | "saver = tf.train.Saver()" 117 | ] 118 | }, 119 | { 120 | "cell_type": "code", 121 | "execution_count": 5, 122 | "metadata": { 123 | "collapsed": false 124 | }, 125 | "outputs": [ 126 | { 127 | "name": "stdout", 128 | "output_type": "stream", 129 | "text": [ 130 | "Starting 1st session...\n", 131 | "Epoch: 0001 cost= 187.778896380\n", 132 | "Epoch: 0002 cost= 42.367902536\n", 133 | "Epoch: 0003 cost= 26.488964058\n", 134 | "First Optimization Finished!\n", 135 | "Accuracy: 0.9075\n", 136 | "Model saved in file: /tmp/model.ckpt\n" 137 | ] 138 | } 139 | ], 140 | "source": [ 141 | "# Running first session\n", 142 | "print \"Starting 1st session...\"\n", 143 | "with tf.Session() as sess:\n", 144 | " # Initialize variables\n", 145 | " sess.run(init)\n", 146 | "\n", 147 | " # Training cycle\n", 148 | " for epoch in range(3):\n", 149 | " avg_cost = 0.\n", 150 | " total_batch = int(mnist.train.num_examples/batch_size)\n", 151 | " # Loop over all batches\n", 152 | " for i in range(total_batch):\n", 153 | " batch_x, batch_y = mnist.train.next_batch(batch_size)\n", 154 | " # Run optimization op (backprop) and cost op (to get loss value)\n", 155 | " _, c = sess.run([optimizer, cost], feed_dict={x: batch_x,\n", 156 | " y: batch_y})\n", 157 | " # Compute average loss\n", 158 | " avg_cost += c / total_batch\n", 159 | " # Display logs per epoch step\n", 160 | " if epoch % display_step == 0:\n", 161 | " print \"Epoch:\", '%04d' % (epoch+1), \"cost=\", \\\n", 162 | " \"{:.9f}\".format(avg_cost)\n", 163 | " print \"First Optimization Finished!\"\n", 164 | "\n", 165 | " # Test model\n", 166 | " correct_prediction = tf.equal(tf.argmax(pred, 1), tf.argmax(y, 1))\n", 167 | " # Calculate accuracy\n", 168 | " accuracy = tf.reduce_mean(tf.cast(correct_prediction, \"float\"))\n", 169 | " print \"Accuracy:\", accuracy.eval({x: mnist.test.images, y: mnist.test.labels})\n", 170 | "\n", 171 | " # Save model weights to disk\n", 172 | " save_path = saver.save(sess, model_path)\n", 173 | " print \"Model saved in file: %s\" % save_path" 174 | ] 175 | }, 176 | { 177 | "cell_type": "code", 178 | "execution_count": 6, 179 | "metadata": { 180 | "collapsed": false 181 | }, 182 | "outputs": [ 183 | { 184 | "name": "stdout", 185 | "output_type": "stream", 186 | "text": [ 187 | "Starting 2nd session...\n", 188 | "Model restored from file: /tmp/model.ckpt\n", 189 | "Epoch: 0001 cost= 18.292712951\n", 190 | "Epoch: 0002 cost= 13.404136196\n", 191 | "Epoch: 0003 cost= 9.855191723\n", 192 | "Epoch: 0004 cost= 7.276933088\n", 193 | "Epoch: 0005 cost= 5.564581285\n", 194 | "Epoch: 0006 cost= 4.165259939\n", 195 | "Epoch: 0007 cost= 3.139393926\n", 196 | "Second Optimization Finished!\n", 197 | "Accuracy: 0.9385\n" 198 | ] 199 | } 200 | ], 201 | "source": [ 202 | "# Running a new session\n", 203 | "print \"Starting 2nd session...\"\n", 204 | "with tf.Session() as sess:\n", 205 | " # Initialize variables\n", 206 | " sess.run(init)\n", 207 | "\n", 208 | " # Restore model weights from previously saved model\n", 209 | " load_path = saver.restore(sess, model_path)\n", 210 | " print \"Model restored from file: %s\" % save_path\n", 211 | "\n", 212 | " # Resume training\n", 213 | " for epoch in range(7):\n", 214 | " avg_cost = 0.\n", 215 | " total_batch = int(mnist.train.num_examples / batch_size)\n", 216 | " # Loop over all batches\n", 217 | " for i in range(total_batch):\n", 218 | " batch_x, batch_y = mnist.train.next_batch(batch_size)\n", 219 | " # Run optimization op (backprop) and cost op (to get loss value)\n", 220 | " _, c = sess.run([optimizer, cost], feed_dict={x: batch_x,\n", 221 | " y: batch_y})\n", 222 | " # Compute average loss\n", 223 | " avg_cost += c / total_batch\n", 224 | " # Display logs per epoch step\n", 225 | " if epoch % display_step == 0:\n", 226 | " print \"Epoch:\", '%04d' % (epoch + 1), \"cost=\", \\\n", 227 | " \"{:.9f}\".format(avg_cost)\n", 228 | " print \"Second Optimization Finished!\"\n", 229 | "\n", 230 | " # Test model\n", 231 | " correct_prediction = tf.equal(tf.argmax(pred, 1), tf.argmax(y, 1))\n", 232 | " # Calculate accuracy\n", 233 | " accuracy = tf.reduce_mean(tf.cast(correct_prediction, \"float\"))\n", 234 | " print \"Accuracy:\", accuracy.eval(\n", 235 | " {x: mnist.test.images, y: mnist.test.labels})" 236 | ] 237 | }, 238 | { 239 | "cell_type": "code", 240 | "execution_count": null, 241 | "metadata": { 242 | "collapsed": true 243 | }, 244 | "outputs": [], 245 | "source": [] 246 | } 247 | ], 248 | "metadata": { 249 | "kernelspec": { 250 | "display_name": "Python 2", 251 | "language": "python", 252 | "name": "python2" 253 | }, 254 | "language_info": { 255 | "codemirror_mode": { 256 | "name": "ipython", 257 | "version": 2 258 | }, 259 | "file_extension": ".py", 260 | "mimetype": "text/x-python", 261 | "name": "python", 262 | "nbconvert_exporter": "python", 263 | "pygments_lexer": "ipython2", 264 | "version": "2.7.13" 265 | } 266 | }, 267 | "nbformat": 4, 268 | "nbformat_minor": 0 269 | } 270 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # TensorFlow Examples 2 | 3 | Forked from [https://github.com/aymericdamien/TensorFlow-Examples](https://github.com/aymericdamien/TensorFlow-Examples) 4 | 5 | TensorFlow Tutorial with popular machine learning algorithms implementation. This tutorial was designed for easily diving into TensorFlow, through examples. 6 | 7 | It is suitable for beginners who want to find clear and concise examples about TensorFlow. For readability, the tutorial includes both notebook and code with explanations. 8 | 9 | Note: If you are using older TensorFlow version (before 0.12), please have a [look here](https://github.com/floydhub/tensorflow-notebooks-examples/tree/0.11) 10 | 11 | ## Tutorial index 12 | 13 | #### 0 - Prerequisite 14 | - Introduction to Machine Learning ([notebook](https://github.com/floydhub/tensorflow-notebooks-examples/blob/master/0_Prerequisite/ml_introduction.ipynb)) 15 | - Introduction to MNIST Dataset ([notebook](https://github.com/floydhub/tensorflow-notebooks-examples/blob/master/0_Prerequisite/mnist_dataset_intro.ipynb)) 16 | 17 | #### 1 - Introduction 18 | - Hello World ([notebook](https://github.com/floydhub/tensorflow-notebooks-examples/blob/master/1_Introduction/helloworld.ipynb)) 19 | - Basic Operations ([notebook](https://github.com/floydhub/tensorflow-notebooks-examples/blob/master/1_Introduction/basic_operations.ipynb)) 20 | 21 | #### 2 - Basic Models 22 | - Nearest Neighbor ([notebook](https://github.com/floydhub/tensorflow-notebooks-examples/blob/master/2_BasicModels/nearest_neighbor.ipynb)) 23 | - Linear Regression ([notebook](https://github.com/floydhub/tensorflow-notebooks-examples/blob/master/2_BasicModels/linear_regression.ipynb)) 24 | - Logistic Regression ([notebook](https://github.com/floydhub/tensorflow-notebooks-examples/blob/master/2_BasicModels/logistic_regression.ipynb)) 25 | 26 | #### 3 - Neural Networks 27 | - Multilayer Perceptron ([notebook](https://github.com/floydhub/tensorflow-notebooks-examples/blob/master/3_NeuralNetworks/multilayer_perceptron.ipynb)) 28 | - Convolutional Neural Network ([notebook](https://github.com/floydhub/tensorflow-notebooks-examples/blob/master/3_NeuralNetworks/convolutional_network.ipynb)) 29 | - Recurrent Neural Network (LSTM) ([notebook](https://github.com/floydhub/tensorflow-notebooks-examples/blob/master/3_NeuralNetworks/recurrent_network.ipynb)) 30 | - Bidirectional Recurrent Neural Network (LSTM) ([notebook](https://github.com/floydhub/tensorflow-notebooks-examples/blob/master/3_NeuralNetworks/bidirectional_rnn.ipynb)) 31 | - Dynamic Recurrent Neural Network (LSTM) ([code](https://github.com/floydhub/tensorflow-notebooks-examples/blob/master/examples/3_NeuralNetworks/dynamic_rnn.py)) 32 | - AutoEncoder ([notebook](https://github.com/floydhub/tensorflow-notebooks-examples/blob/master/3_NeuralNetworks/autoencoder.ipynb)) 33 | 34 | #### 4 - Utilities 35 | - Save and Restore a model ([notebook](https://github.com/floydhub/tensorflow-notebooks-examples/blob/master/4_Utils/save_restore_model.ipynb)) 36 | - Tensorboard - Graph and loss visualization ([notebook](https://github.com/floydhub/tensorflow-notebooks-examples/blob/master/4_Utils/tensorboard_basic.ipynb)) 37 | - Tensorboard - Advanced visualization ([code](https://github.com/floydhub/tensorflow-notebooks-examples/blob/master/examples/4_Utils/tensorboard_advanced.py)) 38 | 39 | #### 5 - Multi GPU 40 | - Basic Operations on multi-GPU ([notebook](https://github.com/floydhub/tensorflow-notebooks-examples/blob/master/5_MultiGPU/multigpu_basics.ipynb)) 41 | 42 | ## Dataset 43 | Some examples require MNIST dataset for training and testing. Don't worry, this dataset will automatically be downloaded when running examples (with input_data.py). 44 | MNIST is a database of handwritten digits, for a quick description of that dataset, you can check [this notebook](https://github.com/floydhub/tensorflow-notebooks-examples/blob/master/0_Prerequisite/mnist_dataset_intro.ipynb). 45 | 46 | Official Website: [http://yann.lecun.com/exdb/mnist/](http://yann.lecun.com/exdb/mnist/) 47 | 48 | ## More Examples 49 | The following examples are coming from [TFLearn](https://github.com/tflearn/tflearn), a library that provides a simplified interface for TensorFlow. You can have a look, there are many [examples](https://github.com/tflearn/tflearn/tree/master/examples) and [pre-built operations and layers](http://tflearn.org/doc_index/#api). 50 | 51 | ### Tutorials 52 | - [TFLearn Quickstart](https://github.com/tflearn/tflearn/blob/master/tutorials/intro/quickstart.md). Learn the basics of TFLearn through a concrete machine learning task. Build and train a deep neural network classifier. 53 | 54 | ### Basics 55 | - [Linear Regression](https://github.com/tflearn/tflearn/blob/master/examples/basics/linear_regression.py). Implement a linear regression using TFLearn. 56 | - [Logical Operators](https://github.com/tflearn/tflearn/blob/master/examples/basics/logical.py). Implement logical operators with TFLearn (also includes a usage of 'merge'). 57 | - [Weights Persistence](https://github.com/tflearn/tflearn/blob/master/examples/basics/weights_persistence.py). Save and Restore a model. 58 | - [Fine-Tuning](https://github.com/tflearn/tflearn/blob/master/examples/basics/finetuning.py). Fine-Tune a pre-trained model on a new task. 59 | - [Using HDF5](https://github.com/tflearn/tflearn/blob/master/examples/basics/use_hdf5.py). Use HDF5 to handle large datasets. 60 | - [Using DASK](https://github.com/tflearn/tflearn/blob/master/examples/basics/use_dask.py). Use DASK to handle large datasets. 61 | 62 | ### Computer Vision 63 | - [Multi-layer perceptron](https://github.com/tflearn/tflearn/blob/master/examples/images/dnn.py). A multi-layer perceptron implementation for MNIST classification task. 64 | - [Convolutional Network (MNIST)](https://github.com/tflearn/tflearn/blob/master/examples/images/convnet_mnist.py). A Convolutional neural network implementation for classifying MNIST dataset. 65 | - [Convolutional Network (CIFAR-10)](https://github.com/tflearn/tflearn/blob/master/examples/images/convnet_cifar10.py). A Convolutional neural network implementation for classifying CIFAR-10 dataset. 66 | - [Network in Network](https://github.com/tflearn/tflearn/blob/master/examples/images/network_in_network.py). 'Network in Network' implementation for classifying CIFAR-10 dataset. 67 | - [Alexnet](https://github.com/tflearn/tflearn/blob/master/examples/images/alexnet.py). Apply Alexnet to Oxford Flowers 17 classification task. 68 | - [VGGNet](https://github.com/tflearn/tflearn/blob/master/examples/images/vgg_network.py). Apply VGG Network to Oxford Flowers 17 classification task. 69 | - [VGGNet Finetuning (Fast Training)](https://github.com/tflearn/tflearn/blob/master/examples/images/vgg_network_finetuning.py). Use a pre-trained VGG Network and retrain it on your own data, for fast training. 70 | - [RNN Pixels](https://github.com/tflearn/tflearn/blob/master/examples/images/rnn_pixels.py). Use RNN (over sequence of pixels) to classify images. 71 | - [Highway Network](https://github.com/tflearn/tflearn/blob/master/examples/images/highway_dnn.py). Highway Network implementation for classifying MNIST dataset. 72 | - [Highway Convolutional Network](https://github.com/tflearn/tflearn/blob/master/examples/images/convnet_highway_mnist.py). Highway Convolutional Network implementation for classifying MNIST dataset. 73 | - [Residual Network (MNIST)](https://github.com/tflearn/tflearn/blob/master/examples/images/residual_network_mnist.py). A bottleneck residual network applied to MNIST classification task. 74 | - [Residual Network (CIFAR-10)](https://github.com/tflearn/tflearn/blob/master/examples/images/residual_network_cifar10.py). A residual network applied to CIFAR-10 classification task. 75 | - [Google Inception (v3)](https://github.com/tflearn/tflearn/blob/master/examples/images/googlenet.py). Google's Inception v3 network applied to Oxford Flowers 17 classification task. 76 | - [Auto Encoder](https://github.com/tflearn/tflearn/blob/master/examples/images/autoencoder.py). An auto encoder applied to MNIST handwritten digits. 77 | 78 | ### Natural Language Processing 79 | - [Recurrent Neural Network (LSTM)](https://github.com/tflearn/tflearn/blob/master/examples/nlp/lstm.py). Apply an LSTM to IMDB sentiment dataset classification task. 80 | - [Bi-Directional RNN (LSTM)](https://github.com/tflearn/tflearn/blob/master/examples/nlp/bidirectional_lstm.py). Apply a bi-directional LSTM to IMDB sentiment dataset classification task. 81 | - [Dynamic RNN (LSTM)](https://github.com/tflearn/tflearn/blob/master/examples/nlp/dynamic_lstm.py). Apply a dynamic LSTM to classify variable length text from IMDB dataset. 82 | - [City Name Generation](https://github.com/tflearn/tflearn/blob/master/examples/nlp/lstm_generator_cityname.py). Generates new US-cities name, using LSTM network. 83 | - [Shakespeare Scripts Generation](https://github.com/tflearn/tflearn/blob/master/examples/nlp/lstm_generator_shakespeare.py). Generates new Shakespeare scripts, using LSTM network. 84 | - [Seq2seq](https://github.com/tflearn/tflearn/blob/master/examples/nlp/seq2seq_example.py). Pedagogical example of seq2seq reccurent network. See [this repo](https://github.com/ichuang/tflearn_seq2seq) for full instructions. 85 | - [CNN Seq](https://github.com/tflearn/tflearn/blob/master/examples/nlp/cnn_sentence_classification.py). Apply a 1-D convolutional network to classify sequence of words from IMDB sentiment dataset. 86 | 87 | ### Reinforcement Learning 88 | - [Atari Pacman 1-step Q-Learning](https://github.com/tflearn/tflearn/blob/master/examples/reinforcement_learning/atari_1step_qlearning.py). Teach a machine to play Atari games (Pacman by default) using 1-step Q-learning. 89 | 90 | ### Others 91 | - [Recommender - Wide & Deep Network](https://github.com/tflearn/tflearn/blob/master/examples/others/recommender_wide_and_deep.py). Pedagogical example of wide & deep networks for recommender systems. 92 | 93 | ### Notebooks 94 | - [Spiral Classification Problem](https://github.com/tflearn/tflearn/blob/master/examples/spiral.ipynb). TFLearn implementation of spiral classification problem from Stanford CS231n. 95 | 96 | ### Extending TensorFlow 97 | - [Layers](https://github.com/tflearn/tflearn/blob/master/examples/extending_tensorflow/layers.py). Use TFLearn layers along with TensorFlow. 98 | - [Trainer](https://github.com/tflearn/tflearn/blob/master/examples/extending_tensorflow/trainer.py). Use TFLearn trainer class to train any TensorFlow graph. 99 | - [Built-in Ops](https://github.com/tflearn/tflearn/blob/master/examples/extending_tensorflow/builtin_ops.py). Use TFLearn built-in operations along with TensorFlow. 100 | - [Summaries](https://github.com/tflearn/tflearn/blob/master/examples/extending_tensorflow/summaries.py). Use TFLearn summarizers along with TensorFlow. 101 | - [Variables](https://github.com/tflearn/tflearn/blob/master/examples/extending_tensorflow/variables.py). Use TFLearn variables along with TensorFlow. 102 | 103 | 104 | ## Dependencies 105 | ``` 106 | tensorflow 1.0alpha 107 | numpy 108 | matplotlib 109 | cuda 110 | tflearn (if using tflearn examples) 111 | ``` 112 | For more details about TensorFlow installation, you can check [TensorFlow Installation Guide](https://github.com/tensorflow/tensorflow/blob/master/tensorflow/g3doc/get_started/os_setup.md) 113 | 114 | ### Note 115 | This repo needs updating for the latest version of Tensorflow 116 | -------------------------------------------------------------------------------- /3_NeuralNetworks/recurrent_network.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": { 6 | "collapsed": true 7 | }, 8 | "source": [ 9 | "'''\n", 10 | "A Reccurent Neural Network (LSTM) implementation example using TensorFlow library.\n", 11 | "This example is using the MNIST database of handwritten digits (http://yann.lecun.com/exdb/mnist/)\n", 12 | "Long Short Term Memory paper: http://deeplearning.cs.cmu.edu/pdfs/Hochreiter97_lstm.pdf\n", 13 | "\n", 14 | "Author: Aymeric Damien\n", 15 | "Project: https://github.com/aymericdamien/TensorFlow-Examples/\n", 16 | "'''" 17 | ] 18 | }, 19 | { 20 | "cell_type": "code", 21 | "execution_count": null, 22 | "metadata": { 23 | "collapsed": false 24 | }, 25 | "outputs": [], 26 | "source": [ 27 | "import tensorflow as tf\n", 28 | "from tensorflow.contrib import rnn\n", 29 | "import numpy as np\n", 30 | "\n", 31 | "# Import MINST data\n", 32 | "from tensorflow.examples.tutorials.mnist import input_data\n", 33 | "mnist = input_data.read_data_sets(\"MNIST_data/\", one_hot=True)" 34 | ] 35 | }, 36 | { 37 | "cell_type": "markdown", 38 | "metadata": { 39 | "collapsed": true 40 | }, 41 | "source": [ 42 | "'''\n", 43 | "To classify images using a reccurent neural network, we consider every image\n", 44 | "row as a sequence of pixels. Because MNIST image shape is 28*28px, we will then\n", 45 | "handle 28 sequences of 28 steps for every sample.\n", 46 | "'''" 47 | ] 48 | }, 49 | { 50 | "cell_type": "code", 51 | "execution_count": 2, 52 | "metadata": { 53 | "collapsed": false 54 | }, 55 | "outputs": [], 56 | "source": [ 57 | "# Parameters\n", 58 | "learning_rate = 0.001\n", 59 | "training_iters = 100000\n", 60 | "batch_size = 128\n", 61 | "display_step = 10\n", 62 | "\n", 63 | "# Network Parameters\n", 64 | "n_input = 28 # MNIST data input (img shape: 28*28)\n", 65 | "n_steps = 28 # timesteps\n", 66 | "n_hidden = 128 # hidden layer num of features\n", 67 | "n_classes = 10 # MNIST total classes (0-9 digits)\n", 68 | "\n", 69 | "# tf Graph input\n", 70 | "x = tf.placeholder(\"float\", [None, n_steps, n_input])\n", 71 | "y = tf.placeholder(\"float\", [None, n_classes])\n", 72 | "\n", 73 | "# Define weights\n", 74 | "weights = {\n", 75 | " 'out': tf.Variable(tf.random_normal([n_hidden, n_classes]))\n", 76 | "}\n", 77 | "biases = {\n", 78 | " 'out': tf.Variable(tf.random_normal([n_classes]))\n", 79 | "}" 80 | ] 81 | }, 82 | { 83 | "cell_type": "code", 84 | "execution_count": 3, 85 | "metadata": { 86 | "collapsed": false 87 | }, 88 | "outputs": [], 89 | "source": [ 90 | "def RNN(x, weights, biases):\n", 91 | "\n", 92 | " # Prepare data shape to match `rnn` function requirements\n", 93 | " # Current data input shape: (batch_size, n_steps, n_input)\n", 94 | " # Required shape: 'n_steps' tensors list of shape (batch_size, n_input)\n", 95 | " \n", 96 | " # Permuting batch_size and n_steps\n", 97 | " x = tf.transpose(x, [1, 0, 2])\n", 98 | " # Reshaping to (n_steps*batch_size, n_input)\n", 99 | " x = tf.reshape(x, [-1, n_input])\n", 100 | " # Split to get a list of 'n_steps' tensors of shape (batch_size, n_input)\n", 101 | " x = tf.split(x, n_steps, 0)\n", 102 | "\n", 103 | " # Define a lstm cell with tensorflow\n", 104 | " lstm_cell = rnn.BasicLSTMCell(n_hidden, forget_bias=1.0)\n", 105 | "\n", 106 | " # Get lstm cell output\n", 107 | " outputs, states = rnn.static_rnn(lstm_cell, x, dtype=tf.float32)\n", 108 | "\n", 109 | " # Linear activation, using rnn inner loop last output\n", 110 | " return tf.matmul(outputs[-1], weights['out']) + biases['out']\n", 111 | "\n", 112 | "pred = RNN(x, weights, biases)\n", 113 | "\n", 114 | "# Define loss and optimizer\n", 115 | "cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=pred, labels=y))\n", 116 | "optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)\n", 117 | "\n", 118 | "# Evaluate model\n", 119 | "correct_pred = tf.equal(tf.argmax(pred,1), tf.argmax(y,1))\n", 120 | "accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))\n", 121 | "\n", 122 | "# Initializing the variables\n", 123 | "init = tf.global_variables_initializer()" 124 | ] 125 | }, 126 | { 127 | "cell_type": "code", 128 | "execution_count": 4, 129 | "metadata": { 130 | "collapsed": false 131 | }, 132 | "outputs": [ 133 | { 134 | "name": "stdout", 135 | "output_type": "stream", 136 | "text": [ 137 | "Iter 1280, Minibatch Loss= 1.576423, Training Accuracy= 0.51562\n", 138 | "Iter 2560, Minibatch Loss= 1.450179, Training Accuracy= 0.53906\n", 139 | "Iter 3840, Minibatch Loss= 1.160066, Training Accuracy= 0.64844\n", 140 | "Iter 5120, Minibatch Loss= 0.898589, Training Accuracy= 0.73438\n", 141 | "Iter 6400, Minibatch Loss= 0.685712, Training Accuracy= 0.75781\n", 142 | "Iter 7680, Minibatch Loss= 1.085666, Training Accuracy= 0.64844\n", 143 | "Iter 8960, Minibatch Loss= 0.681488, Training Accuracy= 0.73438\n", 144 | "Iter 10240, Minibatch Loss= 0.557049, Training Accuracy= 0.82812\n", 145 | "Iter 11520, Minibatch Loss= 0.340857, Training Accuracy= 0.92188\n", 146 | "Iter 12800, Minibatch Loss= 0.596482, Training Accuracy= 0.78906\n", 147 | "Iter 14080, Minibatch Loss= 0.486564, Training Accuracy= 0.84375\n", 148 | "Iter 15360, Minibatch Loss= 0.302493, Training Accuracy= 0.90625\n", 149 | "Iter 16640, Minibatch Loss= 0.334277, Training Accuracy= 0.92188\n", 150 | "Iter 17920, Minibatch Loss= 0.222026, Training Accuracy= 0.90625\n", 151 | "Iter 19200, Minibatch Loss= 0.228581, Training Accuracy= 0.92188\n", 152 | "Iter 20480, Minibatch Loss= 0.150356, Training Accuracy= 0.96094\n", 153 | "Iter 21760, Minibatch Loss= 0.415417, Training Accuracy= 0.86719\n", 154 | "Iter 23040, Minibatch Loss= 0.159742, Training Accuracy= 0.94531\n", 155 | "Iter 24320, Minibatch Loss= 0.333764, Training Accuracy= 0.89844\n", 156 | "Iter 25600, Minibatch Loss= 0.379070, Training Accuracy= 0.88281\n", 157 | "Iter 26880, Minibatch Loss= 0.241612, Training Accuracy= 0.91406\n", 158 | "Iter 28160, Minibatch Loss= 0.200397, Training Accuracy= 0.93750\n", 159 | "Iter 29440, Minibatch Loss= 0.197994, Training Accuracy= 0.93750\n", 160 | "Iter 30720, Minibatch Loss= 0.330214, Training Accuracy= 0.89062\n", 161 | "Iter 32000, Minibatch Loss= 0.174626, Training Accuracy= 0.92969\n", 162 | "Iter 33280, Minibatch Loss= 0.202369, Training Accuracy= 0.93750\n", 163 | "Iter 34560, Minibatch Loss= 0.240835, Training Accuracy= 0.94531\n", 164 | "Iter 35840, Minibatch Loss= 0.207867, Training Accuracy= 0.93750\n", 165 | "Iter 37120, Minibatch Loss= 0.313306, Training Accuracy= 0.90625\n", 166 | "Iter 38400, Minibatch Loss= 0.089850, Training Accuracy= 0.96875\n", 167 | "Iter 39680, Minibatch Loss= 0.184803, Training Accuracy= 0.92188\n", 168 | "Iter 40960, Minibatch Loss= 0.236523, Training Accuracy= 0.92969\n", 169 | "Iter 42240, Minibatch Loss= 0.174834, Training Accuracy= 0.94531\n", 170 | "Iter 43520, Minibatch Loss= 0.127905, Training Accuracy= 0.93750\n", 171 | "Iter 44800, Minibatch Loss= 0.120045, Training Accuracy= 0.96875\n", 172 | "Iter 46080, Minibatch Loss= 0.068337, Training Accuracy= 0.98438\n", 173 | "Iter 47360, Minibatch Loss= 0.141118, Training Accuracy= 0.95312\n", 174 | "Iter 48640, Minibatch Loss= 0.182404, Training Accuracy= 0.92188\n", 175 | "Iter 49920, Minibatch Loss= 0.176778, Training Accuracy= 0.93750\n", 176 | "Iter 51200, Minibatch Loss= 0.098927, Training Accuracy= 0.97656\n", 177 | "Iter 52480, Minibatch Loss= 0.158776, Training Accuracy= 0.96094\n", 178 | "Iter 53760, Minibatch Loss= 0.031863, Training Accuracy= 0.99219\n", 179 | "Iter 55040, Minibatch Loss= 0.101799, Training Accuracy= 0.96094\n", 180 | "Iter 56320, Minibatch Loss= 0.176387, Training Accuracy= 0.96094\n", 181 | "Iter 57600, Minibatch Loss= 0.096277, Training Accuracy= 0.96875\n", 182 | "Iter 58880, Minibatch Loss= 0.137416, Training Accuracy= 0.94531\n", 183 | "Iter 60160, Minibatch Loss= 0.062801, Training Accuracy= 0.97656\n", 184 | "Iter 61440, Minibatch Loss= 0.036346, Training Accuracy= 0.98438\n", 185 | "Iter 62720, Minibatch Loss= 0.153030, Training Accuracy= 0.92969\n", 186 | "Iter 64000, Minibatch Loss= 0.117716, Training Accuracy= 0.95312\n", 187 | "Iter 65280, Minibatch Loss= 0.048387, Training Accuracy= 0.99219\n", 188 | "Iter 66560, Minibatch Loss= 0.070802, Training Accuracy= 0.97656\n", 189 | "Iter 67840, Minibatch Loss= 0.221085, Training Accuracy= 0.96875\n", 190 | "Iter 69120, Minibatch Loss= 0.184049, Training Accuracy= 0.93750\n", 191 | "Iter 70400, Minibatch Loss= 0.094883, Training Accuracy= 0.95312\n", 192 | "Iter 71680, Minibatch Loss= 0.087278, Training Accuracy= 0.96875\n", 193 | "Iter 72960, Minibatch Loss= 0.153267, Training Accuracy= 0.95312\n", 194 | "Iter 74240, Minibatch Loss= 0.161794, Training Accuracy= 0.94531\n", 195 | "Iter 75520, Minibatch Loss= 0.103779, Training Accuracy= 0.96875\n", 196 | "Iter 76800, Minibatch Loss= 0.165586, Training Accuracy= 0.96094\n", 197 | "Iter 78080, Minibatch Loss= 0.137721, Training Accuracy= 0.95312\n", 198 | "Iter 79360, Minibatch Loss= 0.124014, Training Accuracy= 0.96094\n", 199 | "Iter 80640, Minibatch Loss= 0.051460, Training Accuracy= 0.99219\n", 200 | "Iter 81920, Minibatch Loss= 0.185836, Training Accuracy= 0.96094\n", 201 | "Iter 83200, Minibatch Loss= 0.147694, Training Accuracy= 0.94531\n", 202 | "Iter 84480, Minibatch Loss= 0.061550, Training Accuracy= 0.98438\n", 203 | "Iter 85760, Minibatch Loss= 0.093457, Training Accuracy= 0.96875\n", 204 | "Iter 87040, Minibatch Loss= 0.094497, Training Accuracy= 0.98438\n", 205 | "Iter 88320, Minibatch Loss= 0.093934, Training Accuracy= 0.96094\n", 206 | "Iter 89600, Minibatch Loss= 0.061550, Training Accuracy= 0.96875\n", 207 | "Iter 90880, Minibatch Loss= 0.082452, Training Accuracy= 0.97656\n", 208 | "Iter 92160, Minibatch Loss= 0.087423, Training Accuracy= 0.97656\n", 209 | "Iter 93440, Minibatch Loss= 0.032694, Training Accuracy= 0.99219\n", 210 | "Iter 94720, Minibatch Loss= 0.069597, Training Accuracy= 0.97656\n", 211 | "Iter 96000, Minibatch Loss= 0.193636, Training Accuracy= 0.96094\n", 212 | "Iter 97280, Minibatch Loss= 0.134405, Training Accuracy= 0.96094\n", 213 | "Iter 98560, Minibatch Loss= 0.072992, Training Accuracy= 0.96875\n", 214 | "Iter 99840, Minibatch Loss= 0.041049, Training Accuracy= 0.99219\n", 215 | "Optimization Finished!\n", 216 | "Testing Accuracy: 0.960938\n" 217 | ] 218 | } 219 | ], 220 | "source": [ 221 | "# Launch the graph\n", 222 | "with tf.Session() as sess:\n", 223 | " sess.run(init)\n", 224 | " step = 1\n", 225 | " # Keep training until reach max iterations\n", 226 | " while step * batch_size < training_iters:\n", 227 | " batch_x, batch_y = mnist.train.next_batch(batch_size)\n", 228 | " # Reshape data to get 28 seq of 28 elements\n", 229 | " batch_x = batch_x.reshape((batch_size, n_steps, n_input))\n", 230 | " # Run optimization op (backprop)\n", 231 | " sess.run(optimizer, feed_dict={x: batch_x, y: batch_y})\n", 232 | " if step % display_step == 0:\n", 233 | " # Calculate batch accuracy\n", 234 | " acc = sess.run(accuracy, feed_dict={x: batch_x, y: batch_y})\n", 235 | " # Calculate batch loss\n", 236 | " loss = sess.run(cost, feed_dict={x: batch_x, y: batch_y})\n", 237 | " print \"Iter \" + str(step*batch_size) + \", Minibatch Loss= \" + \\\n", 238 | " \"{:.6f}\".format(loss) + \", Training Accuracy= \" + \\\n", 239 | " \"{:.5f}\".format(acc)\n", 240 | " step += 1\n", 241 | " print \"Optimization Finished!\"\n", 242 | "\n", 243 | " # Calculate accuracy for 128 mnist test images\n", 244 | " test_len = 128\n", 245 | " test_data = mnist.test.images[:test_len].reshape((-1, n_steps, n_input))\n", 246 | " test_label = mnist.test.labels[:test_len]\n", 247 | " print \"Testing Accuracy:\", \\\n", 248 | " sess.run(accuracy, feed_dict={x: test_data, y: test_label})" 249 | ] 250 | }, 251 | { 252 | "cell_type": "code", 253 | "execution_count": null, 254 | "metadata": { 255 | "collapsed": true 256 | }, 257 | "outputs": [], 258 | "source": [] 259 | } 260 | ], 261 | "metadata": { 262 | "kernelspec": { 263 | "display_name": "Python 2", 264 | "language": "python", 265 | "name": "python2" 266 | }, 267 | "language_info": { 268 | "codemirror_mode": { 269 | "name": "ipython", 270 | "version": 2 271 | }, 272 | "file_extension": ".py", 273 | "mimetype": "text/x-python", 274 | "name": "python", 275 | "nbconvert_exporter": "python", 276 | "pygments_lexer": "ipython2", 277 | "version": "2.7.13" 278 | } 279 | }, 280 | "nbformat": 4, 281 | "nbformat_minor": 0 282 | } 283 | -------------------------------------------------------------------------------- /3_NeuralNetworks/bidirectional_rnn.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": { 6 | "collapsed": true 7 | }, 8 | "source": [ 9 | "'''\n", 10 | "A Bidirectional Reccurent Neural Network (LSTM) implementation example using TensorFlow library.\n", 11 | "This example is using the MNIST database of handwritten digits (http://yann.lecun.com/exdb/mnist/)\n", 12 | "Long Short Term Memory paper: http://deeplearning.cs.cmu.edu/pdfs/Hochreiter97_lstm.pdf\n", 13 | "\n", 14 | "Author: Aymeric Damien\n", 15 | "Project: https://github.com/aymericdamien/TensorFlow-Examples/\n", 16 | "'''" 17 | ] 18 | }, 19 | { 20 | "cell_type": "code", 21 | "execution_count": null, 22 | "metadata": { 23 | "collapsed": false 24 | }, 25 | "outputs": [], 26 | "source": [ 27 | "import tensorflow as tf\n", 28 | "from tensorflow.contrib import rnn\n", 29 | "import numpy as np\n", 30 | "\n", 31 | "# Import MINST data\n", 32 | "from tensorflow.examples.tutorials.mnist import input_data\n", 33 | "mnist = input_data.read_data_sets(\"MNIST_data/\", one_hot=True)" 34 | ] 35 | }, 36 | { 37 | "cell_type": "markdown", 38 | "metadata": { 39 | "collapsed": true 40 | }, 41 | "source": [ 42 | "'''\n", 43 | "To classify images using a bidirectional reccurent neural network, we consider\n", 44 | "every image row as a sequence of pixels. Because MNIST image shape is 28*28px,\n", 45 | "we will then handle 28 sequences of 28 steps for every sample.\n", 46 | "'''" 47 | ] 48 | }, 49 | { 50 | "cell_type": "code", 51 | "execution_count": 2, 52 | "metadata": { 53 | "collapsed": true 54 | }, 55 | "outputs": [], 56 | "source": [ 57 | "# Parameters\n", 58 | "learning_rate = 0.001\n", 59 | "training_iters = 100000\n", 60 | "batch_size = 128\n", 61 | "display_step = 10\n", 62 | "\n", 63 | "# Network Parameters\n", 64 | "n_input = 28 # MNIST data input (img shape: 28*28)\n", 65 | "n_steps = 28 # timesteps\n", 66 | "n_hidden = 128 # hidden layer num of features\n", 67 | "n_classes = 10 # MNIST total classes (0-9 digits)\n", 68 | "\n", 69 | "# tf Graph input\n", 70 | "x = tf.placeholder(\"float\", [None, n_steps, n_input])\n", 71 | "y = tf.placeholder(\"float\", [None, n_classes])\n", 72 | "\n", 73 | "# Define weights\n", 74 | "weights = {\n", 75 | " # Hidden layer weights => 2*n_hidden because of foward + backward cells\n", 76 | " 'out': tf.Variable(tf.random_normal([2*n_hidden, n_classes]))\n", 77 | "}\n", 78 | "biases = {\n", 79 | " 'out': tf.Variable(tf.random_normal([n_classes]))\n", 80 | "}" 81 | ] 82 | }, 83 | { 84 | "cell_type": "code", 85 | "execution_count": 3, 86 | "metadata": { 87 | "collapsed": false 88 | }, 89 | "outputs": [], 90 | "source": [ 91 | "def BiRNN(x, weights, biases):\n", 92 | "\n", 93 | " # Prepare data shape to match `bidirectional_rnn` function requirements\n", 94 | " # Current data input shape: (batch_size, n_steps, n_input)\n", 95 | " # Required shape: 'n_steps' tensors list of shape (batch_size, n_input)\n", 96 | " \n", 97 | " # Permuting batch_size and n_steps\n", 98 | " x = tf.transpose(x, [1, 0, 2])\n", 99 | " # Reshape to (n_steps*batch_size, n_input)\n", 100 | " x = tf.reshape(x, [-1, n_input])\n", 101 | " # Split to get a list of 'n_steps' tensors of shape (batch_size, n_input)\n", 102 | " x = tf.split(x, n_steps, 0)\n", 103 | "\n", 104 | " # Define lstm cells with tensorflow\n", 105 | " # Forward direction cell\n", 106 | " lstm_fw_cell = rnn.BasicLSTMCell(n_hidden, forget_bias=1.0)\n", 107 | " # Backward direction cell\n", 108 | " lstm_bw_cell = rnn.BasicLSTMCell(n_hidden, forget_bias=1.0)\n", 109 | "\n", 110 | " # Get lstm cell output\n", 111 | " try:\n", 112 | " outputs, _, _ = rnn.static_bidirectional_rnn(lstm_fw_cell, lstm_bw_cell, x,\n", 113 | " dtype=tf.float32)\n", 114 | " except Exception: # Old TensorFlow version only returns outputs not states\n", 115 | " outputs = rnn.static_bidirectional_rnn(lstm_fw_cell, lstm_bw_cell, x,\n", 116 | " dtype=tf.float32)\n", 117 | "\n", 118 | " # Linear activation, using rnn inner loop last output\n", 119 | " return tf.matmul(outputs[-1], weights['out']) + biases['out']\n", 120 | "\n", 121 | "pred = BiRNN(x, weights, biases)\n", 122 | "\n", 123 | "# Define loss and optimizer\n", 124 | "cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=pred, labels=y))\n", 125 | "optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)\n", 126 | "\n", 127 | "# Evaluate model\n", 128 | "correct_pred = tf.equal(tf.argmax(pred,1), tf.argmax(y,1))\n", 129 | "accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))\n", 130 | "\n", 131 | "# Initializing the variables\n", 132 | "init = tf.global_variables_initializer()" 133 | ] 134 | }, 135 | { 136 | "cell_type": "code", 137 | "execution_count": 4, 138 | "metadata": { 139 | "collapsed": false 140 | }, 141 | "outputs": [ 142 | { 143 | "name": "stdout", 144 | "output_type": "stream", 145 | "text": [ 146 | "Iter 1280, Minibatch Loss= 1.557283, Training Accuracy= 0.49219\n", 147 | "Iter 2560, Minibatch Loss= 1.358445, Training Accuracy= 0.56250\n", 148 | "Iter 3840, Minibatch Loss= 1.043732, Training Accuracy= 0.64062\n", 149 | "Iter 5120, Minibatch Loss= 0.796770, Training Accuracy= 0.72656\n", 150 | "Iter 6400, Minibatch Loss= 0.626206, Training Accuracy= 0.72656\n", 151 | "Iter 7680, Minibatch Loss= 1.025919, Training Accuracy= 0.65625\n", 152 | "Iter 8960, Minibatch Loss= 0.744850, Training Accuracy= 0.76562\n", 153 | "Iter 10240, Minibatch Loss= 0.530111, Training Accuracy= 0.84375\n", 154 | "Iter 11520, Minibatch Loss= 0.383806, Training Accuracy= 0.86719\n", 155 | "Iter 12800, Minibatch Loss= 0.607816, Training Accuracy= 0.82812\n", 156 | "Iter 14080, Minibatch Loss= 0.410879, Training Accuracy= 0.89062\n", 157 | "Iter 15360, Minibatch Loss= 0.335351, Training Accuracy= 0.89844\n", 158 | "Iter 16640, Minibatch Loss= 0.428004, Training Accuracy= 0.91406\n", 159 | "Iter 17920, Minibatch Loss= 0.307468, Training Accuracy= 0.91406\n", 160 | "Iter 19200, Minibatch Loss= 0.249527, Training Accuracy= 0.92188\n", 161 | "Iter 20480, Minibatch Loss= 0.148163, Training Accuracy= 0.96094\n", 162 | "Iter 21760, Minibatch Loss= 0.445275, Training Accuracy= 0.83594\n", 163 | "Iter 23040, Minibatch Loss= 0.173083, Training Accuracy= 0.93750\n", 164 | "Iter 24320, Minibatch Loss= 0.373696, Training Accuracy= 0.87500\n", 165 | "Iter 25600, Minibatch Loss= 0.509869, Training Accuracy= 0.85938\n", 166 | "Iter 26880, Minibatch Loss= 0.198096, Training Accuracy= 0.92969\n", 167 | "Iter 28160, Minibatch Loss= 0.228221, Training Accuracy= 0.92188\n", 168 | "Iter 29440, Minibatch Loss= 0.280088, Training Accuracy= 0.89844\n", 169 | "Iter 30720, Minibatch Loss= 0.300495, Training Accuracy= 0.91406\n", 170 | "Iter 32000, Minibatch Loss= 0.171746, Training Accuracy= 0.95312\n", 171 | "Iter 33280, Minibatch Loss= 0.263745, Training Accuracy= 0.89844\n", 172 | "Iter 34560, Minibatch Loss= 0.177300, Training Accuracy= 0.93750\n", 173 | "Iter 35840, Minibatch Loss= 0.160621, Training Accuracy= 0.95312\n", 174 | "Iter 37120, Minibatch Loss= 0.321745, Training Accuracy= 0.91406\n", 175 | "Iter 38400, Minibatch Loss= 0.188322, Training Accuracy= 0.93750\n", 176 | "Iter 39680, Minibatch Loss= 0.104025, Training Accuracy= 0.96875\n", 177 | "Iter 40960, Minibatch Loss= 0.291053, Training Accuracy= 0.89062\n", 178 | "Iter 42240, Minibatch Loss= 0.131189, Training Accuracy= 0.95312\n", 179 | "Iter 43520, Minibatch Loss= 0.154949, Training Accuracy= 0.92969\n", 180 | "Iter 44800, Minibatch Loss= 0.150411, Training Accuracy= 0.93750\n", 181 | "Iter 46080, Minibatch Loss= 0.117008, Training Accuracy= 0.96094\n", 182 | "Iter 47360, Minibatch Loss= 0.181344, Training Accuracy= 0.96094\n", 183 | "Iter 48640, Minibatch Loss= 0.209197, Training Accuracy= 0.94531\n", 184 | "Iter 49920, Minibatch Loss= 0.159350, Training Accuracy= 0.96094\n", 185 | "Iter 51200, Minibatch Loss= 0.124001, Training Accuracy= 0.95312\n", 186 | "Iter 52480, Minibatch Loss= 0.165183, Training Accuracy= 0.94531\n", 187 | "Iter 53760, Minibatch Loss= 0.046438, Training Accuracy= 0.97656\n", 188 | "Iter 55040, Minibatch Loss= 0.199995, Training Accuracy= 0.91406\n", 189 | "Iter 56320, Minibatch Loss= 0.057071, Training Accuracy= 0.97656\n", 190 | "Iter 57600, Minibatch Loss= 0.177065, Training Accuracy= 0.92188\n", 191 | "Iter 58880, Minibatch Loss= 0.091666, Training Accuracy= 0.96094\n", 192 | "Iter 60160, Minibatch Loss= 0.069232, Training Accuracy= 0.96875\n", 193 | "Iter 61440, Minibatch Loss= 0.127353, Training Accuracy= 0.94531\n", 194 | "Iter 62720, Minibatch Loss= 0.095795, Training Accuracy= 0.96094\n", 195 | "Iter 64000, Minibatch Loss= 0.202651, Training Accuracy= 0.96875\n", 196 | "Iter 65280, Minibatch Loss= 0.118779, Training Accuracy= 0.95312\n", 197 | "Iter 66560, Minibatch Loss= 0.043173, Training Accuracy= 0.98438\n", 198 | "Iter 67840, Minibatch Loss= 0.152280, Training Accuracy= 0.95312\n", 199 | "Iter 69120, Minibatch Loss= 0.085301, Training Accuracy= 0.96875\n", 200 | "Iter 70400, Minibatch Loss= 0.093421, Training Accuracy= 0.96094\n", 201 | "Iter 71680, Minibatch Loss= 0.096358, Training Accuracy= 0.96875\n", 202 | "Iter 72960, Minibatch Loss= 0.053386, Training Accuracy= 0.98438\n", 203 | "Iter 74240, Minibatch Loss= 0.065237, Training Accuracy= 0.97656\n", 204 | "Iter 75520, Minibatch Loss= 0.228090, Training Accuracy= 0.92188\n", 205 | "Iter 76800, Minibatch Loss= 0.106751, Training Accuracy= 0.95312\n", 206 | "Iter 78080, Minibatch Loss= 0.187795, Training Accuracy= 0.94531\n", 207 | "Iter 79360, Minibatch Loss= 0.092611, Training Accuracy= 0.96094\n", 208 | "Iter 80640, Minibatch Loss= 0.137386, Training Accuracy= 0.96875\n", 209 | "Iter 81920, Minibatch Loss= 0.106634, Training Accuracy= 0.98438\n", 210 | "Iter 83200, Minibatch Loss= 0.111749, Training Accuracy= 0.94531\n", 211 | "Iter 84480, Minibatch Loss= 0.191184, Training Accuracy= 0.94531\n", 212 | "Iter 85760, Minibatch Loss= 0.063982, Training Accuracy= 0.96094\n", 213 | "Iter 87040, Minibatch Loss= 0.092380, Training Accuracy= 0.96875\n", 214 | "Iter 88320, Minibatch Loss= 0.089899, Training Accuracy= 0.97656\n", 215 | "Iter 89600, Minibatch Loss= 0.141107, Training Accuracy= 0.94531\n", 216 | "Iter 90880, Minibatch Loss= 0.075549, Training Accuracy= 0.96094\n", 217 | "Iter 92160, Minibatch Loss= 0.186539, Training Accuracy= 0.94531\n", 218 | "Iter 93440, Minibatch Loss= 0.079639, Training Accuracy= 0.97656\n", 219 | "Iter 94720, Minibatch Loss= 0.156895, Training Accuracy= 0.95312\n", 220 | "Iter 96000, Minibatch Loss= 0.088042, Training Accuracy= 0.97656\n", 221 | "Iter 97280, Minibatch Loss= 0.076670, Training Accuracy= 0.96875\n", 222 | "Iter 98560, Minibatch Loss= 0.051336, Training Accuracy= 0.97656\n", 223 | "Iter 99840, Minibatch Loss= 0.086923, Training Accuracy= 0.98438\n", 224 | "Optimization Finished!\n", 225 | "Testing Accuracy: 0.960938\n" 226 | ] 227 | } 228 | ], 229 | "source": [ 230 | "# Launch the graph\n", 231 | "with tf.Session() as sess:\n", 232 | " sess.run(init)\n", 233 | " step = 1\n", 234 | " # Keep training until reach max iterations\n", 235 | " while step * batch_size < training_iters:\n", 236 | " batch_x, batch_y = mnist.train.next_batch(batch_size)\n", 237 | " # Reshape data to get 28 seq of 28 elements\n", 238 | " batch_x = batch_x.reshape((batch_size, n_steps, n_input))\n", 239 | " # Run optimization op (backprop)\n", 240 | " sess.run(optimizer, feed_dict={x: batch_x, y: batch_y})\n", 241 | " if step % display_step == 0:\n", 242 | " # Calculate batch accuracy\n", 243 | " acc = sess.run(accuracy, feed_dict={x: batch_x, y: batch_y})\n", 244 | " # Calculate batch loss\n", 245 | " loss = sess.run(cost, feed_dict={x: batch_x, y: batch_y})\n", 246 | " print \"Iter \" + str(step*batch_size) + \", Minibatch Loss= \" + \\\n", 247 | " \"{:.6f}\".format(loss) + \", Training Accuracy= \" + \\\n", 248 | " \"{:.5f}\".format(acc)\n", 249 | " step += 1\n", 250 | " print \"Optimization Finished!\"\n", 251 | "\n", 252 | " # Calculate accuracy for 128 mnist test images\n", 253 | " test_len = 128\n", 254 | " test_data = mnist.test.images[:test_len].reshape((-1, n_steps, n_input))\n", 255 | " test_label = mnist.test.labels[:test_len]\n", 256 | " print \"Testing Accuracy:\", \\\n", 257 | " sess.run(accuracy, feed_dict={x: test_data, y: test_label})" 258 | ] 259 | }, 260 | { 261 | "cell_type": "code", 262 | "execution_count": null, 263 | "metadata": { 264 | "collapsed": true 265 | }, 266 | "outputs": [], 267 | "source": [] 268 | } 269 | ], 270 | "metadata": { 271 | "kernelspec": { 272 | "display_name": "Python 2", 273 | "language": "python", 274 | "name": "python2" 275 | }, 276 | "language_info": { 277 | "codemirror_mode": { 278 | "name": "ipython", 279 | "version": 2 280 | }, 281 | "file_extension": ".py", 282 | "mimetype": "text/x-python", 283 | "name": "python", 284 | "nbconvert_exporter": "python", 285 | "pygments_lexer": "ipython2", 286 | "version": "2.7.13" 287 | } 288 | }, 289 | "nbformat": 4, 290 | "nbformat_minor": 0 291 | } 292 | -------------------------------------------------------------------------------- /2_BasicModels/nearest_neighbor.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "collapsed": true 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "# A nearest neighbor learning algorithm example using TensorFlow library.\n", 12 | "# This example is using the MNIST database of handwritten digits\n", 13 | "# (http://yann.lecun.com/exdb/mnist/)\n", 14 | "\n", 15 | "# Author: Aymeric Damien\n", 16 | "# Project: https://github.com/aymericdamien/TensorFlow-Examples/" 17 | ] 18 | }, 19 | { 20 | "cell_type": "code", 21 | "execution_count": 1, 22 | "metadata": { 23 | "collapsed": false 24 | }, 25 | "outputs": [ 26 | { 27 | "name": "stdout", 28 | "output_type": "stream", 29 | "text": [ 30 | "Extracting MNIST_data/train-images-idx3-ubyte.gz\n", 31 | "Extracting MNIST_data/train-labels-idx1-ubyte.gz\n", 32 | "Extracting MNIST_data/t10k-images-idx3-ubyte.gz\n", 33 | "Extracting MNIST_data/t10k-labels-idx1-ubyte.gz\n" 34 | ] 35 | } 36 | ], 37 | "source": [ 38 | "import numpy as np\n", 39 | "import tensorflow as tf\n", 40 | "\n", 41 | "# Import MINST data\n", 42 | "from tensorflow.examples.tutorials.mnist import input_data\n", 43 | "mnist = input_data.read_data_sets(\"MNIST_data/\", one_hot=True)" 44 | ] 45 | }, 46 | { 47 | "cell_type": "code", 48 | "execution_count": 2, 49 | "metadata": { 50 | "collapsed": false 51 | }, 52 | "outputs": [], 53 | "source": [ 54 | "# In this example, we limit mnist data\n", 55 | "Xtr, Ytr = mnist.train.next_batch(5000) #5000 for training (nn candidates)\n", 56 | "Xte, Yte = mnist.test.next_batch(200) #200 for testing\n", 57 | "\n", 58 | "# tf Graph Input\n", 59 | "xtr = tf.placeholder(\"float\", [None, 784])\n", 60 | "xte = tf.placeholder(\"float\", [784])\n", 61 | "\n", 62 | "# Nearest Neighbor calculation using L1 Distance\n", 63 | "# Calculate L1 Distance\n", 64 | "distance = tf.reduce_sum(tf.abs(tf.add(xtr, tf.negative(xte))), reduction_indices=1)\n", 65 | "# Prediction: Get min distance index (Nearest neighbor)\n", 66 | "pred = tf.arg_min(distance, 0)\n", 67 | "\n", 68 | "accuracy = 0.\n", 69 | "\n", 70 | "# Initializing the variables\n", 71 | "init = tf.global_variables_initializer()" 72 | ] 73 | }, 74 | { 75 | "cell_type": "code", 76 | "execution_count": 3, 77 | "metadata": { 78 | "collapsed": false 79 | }, 80 | "outputs": [ 81 | { 82 | "name": "stdout", 83 | "output_type": "stream", 84 | "text": [ 85 | "Test 0 Prediction: 7 True Class: 7\n", 86 | "Test 1 Prediction: 2 True Class: 2\n", 87 | "Test 2 Prediction: 1 True Class: 1\n", 88 | "Test 3 Prediction: 0 True Class: 0\n", 89 | "Test 4 Prediction: 4 True Class: 4\n", 90 | "Test 5 Prediction: 1 True Class: 1\n", 91 | "Test 6 Prediction: 4 True Class: 4\n", 92 | "Test 7 Prediction: 9 True Class: 9\n", 93 | "Test 8 Prediction: 8 True Class: 5\n", 94 | "Test 9 Prediction: 9 True Class: 9\n", 95 | "Test 10 Prediction: 0 True Class: 0\n", 96 | "Test 11 Prediction: 0 True Class: 6\n", 97 | "Test 12 Prediction: 9 True Class: 9\n", 98 | "Test 13 Prediction: 0 True Class: 0\n", 99 | "Test 14 Prediction: 1 True Class: 1\n", 100 | "Test 15 Prediction: 5 True Class: 5\n", 101 | "Test 16 Prediction: 4 True Class: 9\n", 102 | "Test 17 Prediction: 7 True Class: 7\n", 103 | "Test 18 Prediction: 3 True Class: 3\n", 104 | "Test 19 Prediction: 4 True Class: 4\n", 105 | "Test 20 Prediction: 9 True Class: 9\n", 106 | "Test 21 Prediction: 6 True Class: 6\n", 107 | "Test 22 Prediction: 6 True Class: 6\n", 108 | "Test 23 Prediction: 5 True Class: 5\n", 109 | "Test 24 Prediction: 4 True Class: 4\n", 110 | "Test 25 Prediction: 0 True Class: 0\n", 111 | "Test 26 Prediction: 7 True Class: 7\n", 112 | "Test 27 Prediction: 4 True Class: 4\n", 113 | "Test 28 Prediction: 0 True Class: 0\n", 114 | "Test 29 Prediction: 1 True Class: 1\n", 115 | "Test 30 Prediction: 3 True Class: 3\n", 116 | "Test 31 Prediction: 1 True Class: 1\n", 117 | "Test 32 Prediction: 3 True Class: 3\n", 118 | "Test 33 Prediction: 4 True Class: 4\n", 119 | "Test 34 Prediction: 7 True Class: 7\n", 120 | "Test 35 Prediction: 2 True Class: 2\n", 121 | "Test 36 Prediction: 7 True Class: 7\n", 122 | "Test 37 Prediction: 1 True Class: 1\n", 123 | "Test 38 Prediction: 2 True Class: 2\n", 124 | "Test 39 Prediction: 1 True Class: 1\n", 125 | "Test 40 Prediction: 1 True Class: 1\n", 126 | "Test 41 Prediction: 7 True Class: 7\n", 127 | "Test 42 Prediction: 4 True Class: 4\n", 128 | "Test 43 Prediction: 1 True Class: 2\n", 129 | "Test 44 Prediction: 3 True Class: 3\n", 130 | "Test 45 Prediction: 5 True Class: 5\n", 131 | "Test 46 Prediction: 1 True Class: 1\n", 132 | "Test 47 Prediction: 2 True Class: 2\n", 133 | "Test 48 Prediction: 4 True Class: 4\n", 134 | "Test 49 Prediction: 4 True Class: 4\n", 135 | "Test 50 Prediction: 6 True Class: 6\n", 136 | "Test 51 Prediction: 3 True Class: 3\n", 137 | "Test 52 Prediction: 5 True Class: 5\n", 138 | "Test 53 Prediction: 5 True Class: 5\n", 139 | "Test 54 Prediction: 6 True Class: 6\n", 140 | "Test 55 Prediction: 0 True Class: 0\n", 141 | "Test 56 Prediction: 4 True Class: 4\n", 142 | "Test 57 Prediction: 1 True Class: 1\n", 143 | "Test 58 Prediction: 9 True Class: 9\n", 144 | "Test 59 Prediction: 5 True Class: 5\n", 145 | "Test 60 Prediction: 7 True Class: 7\n", 146 | "Test 61 Prediction: 8 True Class: 8\n", 147 | "Test 62 Prediction: 9 True Class: 9\n", 148 | "Test 63 Prediction: 3 True Class: 3\n", 149 | "Test 64 Prediction: 7 True Class: 7\n", 150 | "Test 65 Prediction: 4 True Class: 4\n", 151 | "Test 66 Prediction: 6 True Class: 6\n", 152 | "Test 67 Prediction: 4 True Class: 4\n", 153 | "Test 68 Prediction: 3 True Class: 3\n", 154 | "Test 69 Prediction: 0 True Class: 0\n", 155 | "Test 70 Prediction: 7 True Class: 7\n", 156 | "Test 71 Prediction: 0 True Class: 0\n", 157 | "Test 72 Prediction: 2 True Class: 2\n", 158 | "Test 73 Prediction: 7 True Class: 9\n", 159 | "Test 74 Prediction: 1 True Class: 1\n", 160 | "Test 75 Prediction: 7 True Class: 7\n", 161 | "Test 76 Prediction: 3 True Class: 3\n", 162 | "Test 77 Prediction: 7 True Class: 2\n", 163 | "Test 78 Prediction: 9 True Class: 9\n", 164 | "Test 79 Prediction: 7 True Class: 7\n", 165 | "Test 80 Prediction: 7 True Class: 7\n", 166 | "Test 81 Prediction: 6 True Class: 6\n", 167 | "Test 82 Prediction: 2 True Class: 2\n", 168 | "Test 83 Prediction: 7 True Class: 7\n", 169 | "Test 84 Prediction: 8 True Class: 8\n", 170 | "Test 85 Prediction: 4 True Class: 4\n", 171 | "Test 86 Prediction: 7 True Class: 7\n", 172 | "Test 87 Prediction: 3 True Class: 3\n", 173 | "Test 88 Prediction: 6 True Class: 6\n", 174 | "Test 89 Prediction: 1 True Class: 1\n", 175 | "Test 90 Prediction: 3 True Class: 3\n", 176 | "Test 91 Prediction: 6 True Class: 6\n", 177 | "Test 92 Prediction: 9 True Class: 9\n", 178 | "Test 93 Prediction: 3 True Class: 3\n", 179 | "Test 94 Prediction: 1 True Class: 1\n", 180 | "Test 95 Prediction: 4 True Class: 4\n", 181 | "Test 96 Prediction: 1 True Class: 1\n", 182 | "Test 97 Prediction: 7 True Class: 7\n", 183 | "Test 98 Prediction: 6 True Class: 6\n", 184 | "Test 99 Prediction: 9 True Class: 9\n", 185 | "Test 100 Prediction: 6 True Class: 6\n", 186 | "Test 101 Prediction: 0 True Class: 0\n", 187 | "Test 102 Prediction: 5 True Class: 5\n", 188 | "Test 103 Prediction: 4 True Class: 4\n", 189 | "Test 104 Prediction: 9 True Class: 9\n", 190 | "Test 105 Prediction: 9 True Class: 9\n", 191 | "Test 106 Prediction: 2 True Class: 2\n", 192 | "Test 107 Prediction: 1 True Class: 1\n", 193 | "Test 108 Prediction: 9 True Class: 9\n", 194 | "Test 109 Prediction: 4 True Class: 4\n", 195 | "Test 110 Prediction: 8 True Class: 8\n", 196 | "Test 111 Prediction: 7 True Class: 7\n", 197 | "Test 112 Prediction: 3 True Class: 3\n", 198 | "Test 113 Prediction: 9 True Class: 9\n", 199 | "Test 114 Prediction: 7 True Class: 7\n", 200 | "Test 115 Prediction: 9 True Class: 4\n", 201 | "Test 116 Prediction: 9 True Class: 4\n", 202 | "Test 117 Prediction: 4 True Class: 4\n", 203 | "Test 118 Prediction: 9 True Class: 9\n", 204 | "Test 119 Prediction: 7 True Class: 2\n", 205 | "Test 120 Prediction: 5 True Class: 5\n", 206 | "Test 121 Prediction: 4 True Class: 4\n", 207 | "Test 122 Prediction: 7 True Class: 7\n", 208 | "Test 123 Prediction: 6 True Class: 6\n", 209 | "Test 124 Prediction: 7 True Class: 7\n", 210 | "Test 125 Prediction: 9 True Class: 9\n", 211 | "Test 126 Prediction: 0 True Class: 0\n", 212 | "Test 127 Prediction: 5 True Class: 5\n", 213 | "Test 128 Prediction: 8 True Class: 8\n", 214 | "Test 129 Prediction: 5 True Class: 5\n", 215 | "Test 130 Prediction: 6 True Class: 6\n", 216 | "Test 131 Prediction: 6 True Class: 6\n", 217 | "Test 132 Prediction: 5 True Class: 5\n", 218 | "Test 133 Prediction: 7 True Class: 7\n", 219 | "Test 134 Prediction: 8 True Class: 8\n", 220 | "Test 135 Prediction: 1 True Class: 1\n", 221 | "Test 136 Prediction: 0 True Class: 0\n", 222 | "Test 137 Prediction: 1 True Class: 1\n", 223 | "Test 138 Prediction: 6 True Class: 6\n", 224 | "Test 139 Prediction: 4 True Class: 4\n", 225 | "Test 140 Prediction: 6 True Class: 6\n", 226 | "Test 141 Prediction: 7 True Class: 7\n", 227 | "Test 142 Prediction: 2 True Class: 3\n", 228 | "Test 143 Prediction: 1 True Class: 1\n", 229 | "Test 144 Prediction: 7 True Class: 7\n", 230 | "Test 145 Prediction: 1 True Class: 1\n", 231 | "Test 146 Prediction: 8 True Class: 8\n", 232 | "Test 147 Prediction: 2 True Class: 2\n", 233 | "Test 148 Prediction: 0 True Class: 0\n", 234 | "Test 149 Prediction: 1 True Class: 2\n", 235 | "Test 150 Prediction: 9 True Class: 9\n", 236 | "Test 151 Prediction: 9 True Class: 9\n", 237 | "Test 152 Prediction: 5 True Class: 5\n", 238 | "Test 153 Prediction: 5 True Class: 5\n", 239 | "Test 154 Prediction: 1 True Class: 1\n", 240 | "Test 155 Prediction: 5 True Class: 5\n", 241 | "Test 156 Prediction: 6 True Class: 6\n", 242 | "Test 157 Prediction: 0 True Class: 0\n", 243 | "Test 158 Prediction: 3 True Class: 3\n", 244 | "Test 159 Prediction: 4 True Class: 4\n", 245 | "Test 160 Prediction: 4 True Class: 4\n", 246 | "Test 161 Prediction: 6 True Class: 6\n", 247 | "Test 162 Prediction: 5 True Class: 5\n", 248 | "Test 163 Prediction: 4 True Class: 4\n", 249 | "Test 164 Prediction: 6 True Class: 6\n", 250 | "Test 165 Prediction: 5 True Class: 5\n", 251 | "Test 166 Prediction: 4 True Class: 4\n", 252 | "Test 167 Prediction: 5 True Class: 5\n", 253 | "Test 168 Prediction: 1 True Class: 1\n", 254 | "Test 169 Prediction: 4 True Class: 4\n", 255 | "Test 170 Prediction: 9 True Class: 4\n", 256 | "Test 171 Prediction: 7 True Class: 7\n", 257 | "Test 172 Prediction: 2 True Class: 2\n", 258 | "Test 173 Prediction: 3 True Class: 3\n", 259 | "Test 174 Prediction: 2 True Class: 2\n", 260 | "Test 175 Prediction: 1 True Class: 7\n", 261 | "Test 176 Prediction: 1 True Class: 1\n", 262 | "Test 177 Prediction: 8 True Class: 8\n", 263 | "Test 178 Prediction: 1 True Class: 1\n", 264 | "Test 179 Prediction: 8 True Class: 8\n", 265 | "Test 180 Prediction: 1 True Class: 1\n", 266 | "Test 181 Prediction: 8 True Class: 8\n", 267 | "Test 182 Prediction: 5 True Class: 5\n", 268 | "Test 183 Prediction: 0 True Class: 0\n", 269 | "Test 184 Prediction: 2 True Class: 8\n", 270 | "Test 185 Prediction: 9 True Class: 9\n", 271 | "Test 186 Prediction: 2 True Class: 2\n", 272 | "Test 187 Prediction: 5 True Class: 5\n", 273 | "Test 188 Prediction: 0 True Class: 0\n", 274 | "Test 189 Prediction: 1 True Class: 1\n", 275 | "Test 190 Prediction: 1 True Class: 1\n", 276 | "Test 191 Prediction: 1 True Class: 1\n", 277 | "Test 192 Prediction: 0 True Class: 0\n", 278 | "Test 193 Prediction: 4 True Class: 9\n", 279 | "Test 194 Prediction: 0 True Class: 0\n", 280 | "Test 195 Prediction: 1 True Class: 3\n", 281 | "Test 196 Prediction: 1 True Class: 1\n", 282 | "Test 197 Prediction: 6 True Class: 6\n", 283 | "Test 198 Prediction: 4 True Class: 4\n", 284 | "Test 199 Prediction: 2 True Class: 2\n", 285 | "Done!\n", 286 | "Accuracy: 0.92\n" 287 | ] 288 | } 289 | ], 290 | "source": [ 291 | "# Launch the graph\n", 292 | "with tf.Session() as sess:\n", 293 | " sess.run(init)\n", 294 | "\n", 295 | " # loop over test data\n", 296 | " for i in range(len(Xte)):\n", 297 | " # Get nearest neighbor\n", 298 | " nn_index = sess.run(pred, feed_dict={xtr: Xtr, xte: Xte[i, :]})\n", 299 | " # Get nearest neighbor class label and compare it to its true label\n", 300 | " print \"Test\", i, \"Prediction:\", np.argmax(Ytr[nn_index]), \\\n", 301 | " \"True Class:\", np.argmax(Yte[i])\n", 302 | " # Calculate accuracy\n", 303 | " if np.argmax(Ytr[nn_index]) == np.argmax(Yte[i]):\n", 304 | " accuracy += 1./len(Xte)\n", 305 | " print \"Done!\"\n", 306 | " print \"Accuracy:\", accuracy" 307 | ] 308 | }, 309 | { 310 | "cell_type": "code", 311 | "execution_count": null, 312 | "metadata": { 313 | "collapsed": true 314 | }, 315 | "outputs": [], 316 | "source": [] 317 | } 318 | ], 319 | "metadata": { 320 | "kernelspec": { 321 | "display_name": "Python 2", 322 | "language": "python", 323 | "name": "python2" 324 | }, 325 | "language_info": { 326 | "codemirror_mode": { 327 | "name": "ipython", 328 | "version": 2 329 | }, 330 | "file_extension": ".py", 331 | "mimetype": "text/x-python", 332 | "name": "python", 333 | "nbconvert_exporter": "python", 334 | "pygments_lexer": "ipython2", 335 | "version": "2.7.13" 336 | } 337 | }, 338 | "nbformat": 4, 339 | "nbformat_minor": 0 340 | } 341 | -------------------------------------------------------------------------------- /3_NeuralNetworks/convolutional_network.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": { 7 | "collapsed": true 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "'''\n", 12 | "A Convolutional Network implementation example using TensorFlow library.\n", 13 | "This example is using the MNIST database of handwritten digits\n", 14 | "(http://yann.lecun.com/exdb/mnist/)\n", 15 | "\n", 16 | "Author: Aymeric Damien\n", 17 | "Project: https://github.com/aymericdamien/TensorFlow-Examples/\n", 18 | "'''" 19 | ] 20 | }, 21 | { 22 | "cell_type": "code", 23 | "execution_count": null, 24 | "metadata": { 25 | "collapsed": false 26 | }, 27 | "outputs": [], 28 | "source": [ 29 | "import tensorflow as tf\n", 30 | "\n", 31 | "# Import MNIST data\n", 32 | "from tensorflow.examples.tutorials.mnist import input_data\n", 33 | "mnist = input_data.read_data_sets(\"MNIST_data/\", one_hot=True)" 34 | ] 35 | }, 36 | { 37 | "cell_type": "code", 38 | "execution_count": 2, 39 | "metadata": { 40 | "collapsed": true 41 | }, 42 | "outputs": [], 43 | "source": [ 44 | "# Parameters\n", 45 | "learning_rate = 0.001\n", 46 | "training_iters = 200000\n", 47 | "batch_size = 128\n", 48 | "display_step = 10\n", 49 | "\n", 50 | "# Network Parameters\n", 51 | "n_input = 784 # MNIST data input (img shape: 28*28)\n", 52 | "n_classes = 10 # MNIST total classes (0-9 digits)\n", 53 | "dropout = 0.75 # Dropout, probability to keep units\n", 54 | "\n", 55 | "# tf Graph input\n", 56 | "x = tf.placeholder(tf.float32, [None, n_input])\n", 57 | "y = tf.placeholder(tf.float32, [None, n_classes])\n", 58 | "keep_prob = tf.placeholder(tf.float32) #dropout (keep probability)" 59 | ] 60 | }, 61 | { 62 | "cell_type": "code", 63 | "execution_count": 3, 64 | "metadata": { 65 | "collapsed": true 66 | }, 67 | "outputs": [], 68 | "source": [ 69 | "# Create some wrappers for simplicity\n", 70 | "def conv2d(x, W, b, strides=1):\n", 71 | " # Conv2D wrapper, with bias and relu activation\n", 72 | " x = tf.nn.conv2d(x, W, strides=[1, strides, strides, 1], padding='SAME')\n", 73 | " x = tf.nn.bias_add(x, b)\n", 74 | " return tf.nn.relu(x)\n", 75 | "\n", 76 | "\n", 77 | "def maxpool2d(x, k=2):\n", 78 | " # MaxPool2D wrapper\n", 79 | " return tf.nn.max_pool(x, ksize=[1, k, k, 1], strides=[1, k, k, 1],\n", 80 | " padding='SAME')\n", 81 | "\n", 82 | "\n", 83 | "# Create model\n", 84 | "def conv_net(x, weights, biases, dropout):\n", 85 | " # Reshape input picture\n", 86 | " x = tf.reshape(x, shape=[-1, 28, 28, 1])\n", 87 | "\n", 88 | " # Convolution Layer\n", 89 | " conv1 = conv2d(x, weights['wc1'], biases['bc1'])\n", 90 | " # Max Pooling (down-sampling)\n", 91 | " conv1 = maxpool2d(conv1, k=2)\n", 92 | "\n", 93 | " # Convolution Layer\n", 94 | " conv2 = conv2d(conv1, weights['wc2'], biases['bc2'])\n", 95 | " # Max Pooling (down-sampling)\n", 96 | " conv2 = maxpool2d(conv2, k=2)\n", 97 | "\n", 98 | " # Fully connected layer\n", 99 | " # Reshape conv2 output to fit fully connected layer input\n", 100 | " fc1 = tf.reshape(conv2, [-1, weights['wd1'].get_shape().as_list()[0]])\n", 101 | " fc1 = tf.add(tf.matmul(fc1, weights['wd1']), biases['bd1'])\n", 102 | " fc1 = tf.nn.relu(fc1)\n", 103 | " # Apply Dropout\n", 104 | " fc1 = tf.nn.dropout(fc1, dropout)\n", 105 | "\n", 106 | " # Output, class prediction\n", 107 | " out = tf.add(tf.matmul(fc1, weights['out']), biases['out'])\n", 108 | " return out" 109 | ] 110 | }, 111 | { 112 | "cell_type": "code", 113 | "execution_count": 4, 114 | "metadata": { 115 | "collapsed": true 116 | }, 117 | "outputs": [], 118 | "source": [ 119 | "# Store layers weight & bias\n", 120 | "weights = {\n", 121 | " # 5x5 conv, 1 input, 32 outputs\n", 122 | " 'wc1': tf.Variable(tf.random_normal([5, 5, 1, 32])),\n", 123 | " # 5x5 conv, 32 inputs, 64 outputs\n", 124 | " 'wc2': tf.Variable(tf.random_normal([5, 5, 32, 64])),\n", 125 | " # fully connected, 7*7*64 inputs, 1024 outputs\n", 126 | " 'wd1': tf.Variable(tf.random_normal([7*7*64, 1024])),\n", 127 | " # 1024 inputs, 10 outputs (class prediction)\n", 128 | " 'out': tf.Variable(tf.random_normal([1024, n_classes]))\n", 129 | "}\n", 130 | "\n", 131 | "biases = {\n", 132 | " 'bc1': tf.Variable(tf.random_normal([32])),\n", 133 | " 'bc2': tf.Variable(tf.random_normal([64])),\n", 134 | " 'bd1': tf.Variable(tf.random_normal([1024])),\n", 135 | " 'out': tf.Variable(tf.random_normal([n_classes]))\n", 136 | "}\n", 137 | "\n", 138 | "# Construct model\n", 139 | "pred = conv_net(x, weights, biases, keep_prob)\n", 140 | "\n", 141 | "# Define loss and optimizer\n", 142 | "cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=pred, labels=y))\n", 143 | "optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)\n", 144 | "\n", 145 | "# Evaluate model\n", 146 | "correct_pred = tf.equal(tf.argmax(pred, 1), tf.argmax(y, 1))\n", 147 | "accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))\n", 148 | "\n", 149 | "# Initializing the variables\n", 150 | "init = tf.global_variables_initializer()" 151 | ] 152 | }, 153 | { 154 | "cell_type": "code", 155 | "execution_count": 5, 156 | "metadata": { 157 | "collapsed": false 158 | }, 159 | "outputs": [ 160 | { 161 | "name": "stdout", 162 | "output_type": "stream", 163 | "text": [ 164 | "Iter 1280, Minibatch Loss= 26574.855469, Training Accuracy= 0.25781\n", 165 | "Iter 2560, Minibatch Loss= 11454.494141, Training Accuracy= 0.49219\n", 166 | "Iter 3840, Minibatch Loss= 10070.515625, Training Accuracy= 0.55469\n", 167 | "Iter 5120, Minibatch Loss= 4008.586426, Training Accuracy= 0.78125\n", 168 | "Iter 6400, Minibatch Loss= 3148.004639, Training Accuracy= 0.80469\n", 169 | "Iter 7680, Minibatch Loss= 6740.440430, Training Accuracy= 0.71875\n", 170 | "Iter 8960, Minibatch Loss= 4103.991699, Training Accuracy= 0.80469\n", 171 | "Iter 10240, Minibatch Loss= 2631.275391, Training Accuracy= 0.85938\n", 172 | "Iter 11520, Minibatch Loss= 1428.798828, Training Accuracy= 0.91406\n", 173 | "Iter 12800, Minibatch Loss= 3909.772705, Training Accuracy= 0.78906\n", 174 | "Iter 14080, Minibatch Loss= 1423.095947, Training Accuracy= 0.88281\n", 175 | "Iter 15360, Minibatch Loss= 1524.569824, Training Accuracy= 0.89062\n", 176 | "Iter 16640, Minibatch Loss= 2234.539795, Training Accuracy= 0.86719\n", 177 | "Iter 17920, Minibatch Loss= 933.932800, Training Accuracy= 0.90625\n", 178 | "Iter 19200, Minibatch Loss= 2039.046021, Training Accuracy= 0.89062\n", 179 | "Iter 20480, Minibatch Loss= 674.179932, Training Accuracy= 0.95312\n", 180 | "Iter 21760, Minibatch Loss= 3778.958984, Training Accuracy= 0.82812\n", 181 | "Iter 23040, Minibatch Loss= 1038.217773, Training Accuracy= 0.91406\n", 182 | "Iter 24320, Minibatch Loss= 1689.513672, Training Accuracy= 0.89062\n", 183 | "Iter 25600, Minibatch Loss= 1800.954956, Training Accuracy= 0.85938\n", 184 | "Iter 26880, Minibatch Loss= 1086.292847, Training Accuracy= 0.90625\n", 185 | "Iter 28160, Minibatch Loss= 656.042847, Training Accuracy= 0.94531\n", 186 | "Iter 29440, Minibatch Loss= 1210.589844, Training Accuracy= 0.91406\n", 187 | "Iter 30720, Minibatch Loss= 1099.606323, Training Accuracy= 0.90625\n", 188 | "Iter 32000, Minibatch Loss= 1073.128174, Training Accuracy= 0.92969\n", 189 | "Iter 33280, Minibatch Loss= 518.844543, Training Accuracy= 0.95312\n", 190 | "Iter 34560, Minibatch Loss= 540.856689, Training Accuracy= 0.92188\n", 191 | "Iter 35840, Minibatch Loss= 353.990906, Training Accuracy= 0.97656\n", 192 | "Iter 37120, Minibatch Loss= 1488.962891, Training Accuracy= 0.91406\n", 193 | "Iter 38400, Minibatch Loss= 231.191864, Training Accuracy= 0.98438\n", 194 | "Iter 39680, Minibatch Loss= 171.154480, Training Accuracy= 0.98438\n", 195 | "Iter 40960, Minibatch Loss= 2092.023682, Training Accuracy= 0.90625\n", 196 | "Iter 42240, Minibatch Loss= 480.594299, Training Accuracy= 0.95312\n", 197 | "Iter 43520, Minibatch Loss= 504.128143, Training Accuracy= 0.96875\n", 198 | "Iter 44800, Minibatch Loss= 143.534485, Training Accuracy= 0.97656\n", 199 | "Iter 46080, Minibatch Loss= 325.875580, Training Accuracy= 0.96094\n", 200 | "Iter 47360, Minibatch Loss= 602.813049, Training Accuracy= 0.91406\n", 201 | "Iter 48640, Minibatch Loss= 794.595093, Training Accuracy= 0.94531\n", 202 | "Iter 49920, Minibatch Loss= 415.539032, Training Accuracy= 0.95312\n", 203 | "Iter 51200, Minibatch Loss= 146.016022, Training Accuracy= 0.96094\n", 204 | "Iter 52480, Minibatch Loss= 294.180786, Training Accuracy= 0.94531\n", 205 | "Iter 53760, Minibatch Loss= 50.955730, Training Accuracy= 0.99219\n", 206 | "Iter 55040, Minibatch Loss= 1026.607056, Training Accuracy= 0.92188\n", 207 | "Iter 56320, Minibatch Loss= 283.756134, Training Accuracy= 0.96875\n", 208 | "Iter 57600, Minibatch Loss= 691.538208, Training Accuracy= 0.95312\n", 209 | "Iter 58880, Minibatch Loss= 491.075073, Training Accuracy= 0.96094\n", 210 | "Iter 60160, Minibatch Loss= 571.951660, Training Accuracy= 0.95312\n", 211 | "Iter 61440, Minibatch Loss= 284.041168, Training Accuracy= 0.97656\n", 212 | "Iter 62720, Minibatch Loss= 1041.941528, Training Accuracy= 0.92969\n", 213 | "Iter 64000, Minibatch Loss= 664.833923, Training Accuracy= 0.93750\n", 214 | "Iter 65280, Minibatch Loss= 1582.112793, Training Accuracy= 0.88281\n", 215 | "Iter 66560, Minibatch Loss= 783.135376, Training Accuracy= 0.94531\n", 216 | "Iter 67840, Minibatch Loss= 245.942398, Training Accuracy= 0.96094\n", 217 | "Iter 69120, Minibatch Loss= 752.858948, Training Accuracy= 0.96875\n", 218 | "Iter 70400, Minibatch Loss= 623.243286, Training Accuracy= 0.94531\n", 219 | "Iter 71680, Minibatch Loss= 846.498230, Training Accuracy= 0.93750\n", 220 | "Iter 72960, Minibatch Loss= 586.516479, Training Accuracy= 0.95312\n", 221 | "Iter 74240, Minibatch Loss= 92.774963, Training Accuracy= 0.98438\n", 222 | "Iter 75520, Minibatch Loss= 644.039612, Training Accuracy= 0.95312\n", 223 | "Iter 76800, Minibatch Loss= 693.247681, Training Accuracy= 0.96094\n", 224 | "Iter 78080, Minibatch Loss= 466.491882, Training Accuracy= 0.96094\n", 225 | "Iter 79360, Minibatch Loss= 964.212341, Training Accuracy= 0.93750\n", 226 | "Iter 80640, Minibatch Loss= 230.451904, Training Accuracy= 0.97656\n", 227 | "Iter 81920, Minibatch Loss= 280.434570, Training Accuracy= 0.95312\n", 228 | "Iter 83200, Minibatch Loss= 213.208252, Training Accuracy= 0.97656\n", 229 | "Iter 84480, Minibatch Loss= 774.836060, Training Accuracy= 0.94531\n", 230 | "Iter 85760, Minibatch Loss= 164.687729, Training Accuracy= 0.96094\n", 231 | "Iter 87040, Minibatch Loss= 419.967407, Training Accuracy= 0.96875\n", 232 | "Iter 88320, Minibatch Loss= 160.920151, Training Accuracy= 0.96875\n", 233 | "Iter 89600, Minibatch Loss= 586.063599, Training Accuracy= 0.96094\n", 234 | "Iter 90880, Minibatch Loss= 345.598145, Training Accuracy= 0.96875\n", 235 | "Iter 92160, Minibatch Loss= 931.361145, Training Accuracy= 0.92188\n", 236 | "Iter 93440, Minibatch Loss= 170.107117, Training Accuracy= 0.97656\n", 237 | "Iter 94720, Minibatch Loss= 497.162750, Training Accuracy= 0.93750\n", 238 | "Iter 96000, Minibatch Loss= 906.600464, Training Accuracy= 0.94531\n", 239 | "Iter 97280, Minibatch Loss= 303.382202, Training Accuracy= 0.92969\n", 240 | "Iter 98560, Minibatch Loss= 509.161652, Training Accuracy= 0.97656\n", 241 | "Iter 99840, Minibatch Loss= 359.561981, Training Accuracy= 0.97656\n", 242 | "Iter 101120, Minibatch Loss= 136.516541, Training Accuracy= 0.97656\n", 243 | "Iter 102400, Minibatch Loss= 517.199341, Training Accuracy= 0.96875\n", 244 | "Iter 103680, Minibatch Loss= 487.793335, Training Accuracy= 0.95312\n", 245 | "Iter 104960, Minibatch Loss= 407.351929, Training Accuracy= 0.96094\n", 246 | "Iter 106240, Minibatch Loss= 70.495193, Training Accuracy= 0.98438\n", 247 | "Iter 107520, Minibatch Loss= 344.783508, Training Accuracy= 0.96094\n", 248 | "Iter 108800, Minibatch Loss= 242.682465, Training Accuracy= 0.95312\n", 249 | "Iter 110080, Minibatch Loss= 169.181458, Training Accuracy= 0.96094\n", 250 | "Iter 111360, Minibatch Loss= 152.638245, Training Accuracy= 0.98438\n", 251 | "Iter 112640, Minibatch Loss= 170.795868, Training Accuracy= 0.96875\n", 252 | "Iter 113920, Minibatch Loss= 133.262726, Training Accuracy= 0.98438\n", 253 | "Iter 115200, Minibatch Loss= 296.063293, Training Accuracy= 0.95312\n", 254 | "Iter 116480, Minibatch Loss= 254.247543, Training Accuracy= 0.96094\n", 255 | "Iter 117760, Minibatch Loss= 506.795715, Training Accuracy= 0.94531\n", 256 | "Iter 119040, Minibatch Loss= 446.006897, Training Accuracy= 0.96094\n", 257 | "Iter 120320, Minibatch Loss= 149.467377, Training Accuracy= 0.97656\n", 258 | "Iter 121600, Minibatch Loss= 52.783600, Training Accuracy= 0.98438\n", 259 | "Iter 122880, Minibatch Loss= 49.041794, Training Accuracy= 0.98438\n", 260 | "Iter 124160, Minibatch Loss= 184.371246, Training Accuracy= 0.97656\n", 261 | "Iter 125440, Minibatch Loss= 129.838501, Training Accuracy= 0.97656\n", 262 | "Iter 126720, Minibatch Loss= 288.006531, Training Accuracy= 0.96875\n", 263 | "Iter 128000, Minibatch Loss= 187.284653, Training Accuracy= 0.97656\n", 264 | "Iter 129280, Minibatch Loss= 197.969955, Training Accuracy= 0.96875\n", 265 | "Iter 130560, Minibatch Loss= 299.969818, Training Accuracy= 0.96875\n", 266 | "Iter 131840, Minibatch Loss= 537.602173, Training Accuracy= 0.96094\n", 267 | "Iter 133120, Minibatch Loss= 4.519302, Training Accuracy= 0.99219\n", 268 | "Iter 134400, Minibatch Loss= 133.264191, Training Accuracy= 0.97656\n", 269 | "Iter 135680, Minibatch Loss= 89.662292, Training Accuracy= 0.97656\n", 270 | "Iter 136960, Minibatch Loss= 107.774078, Training Accuracy= 0.96875\n", 271 | "Iter 138240, Minibatch Loss= 335.904572, Training Accuracy= 0.96094\n", 272 | "Iter 139520, Minibatch Loss= 457.494568, Training Accuracy= 0.96094\n", 273 | "Iter 140800, Minibatch Loss= 259.131531, Training Accuracy= 0.95312\n", 274 | "Iter 142080, Minibatch Loss= 152.205383, Training Accuracy= 0.96094\n", 275 | "Iter 143360, Minibatch Loss= 252.535828, Training Accuracy= 0.95312\n", 276 | "Iter 144640, Minibatch Loss= 109.477585, Training Accuracy= 0.96875\n", 277 | "Iter 145920, Minibatch Loss= 24.468613, Training Accuracy= 0.99219\n", 278 | "Iter 147200, Minibatch Loss= 51.722107, Training Accuracy= 0.97656\n", 279 | "Iter 148480, Minibatch Loss= 69.715233, Training Accuracy= 0.97656\n", 280 | "Iter 149760, Minibatch Loss= 405.289246, Training Accuracy= 0.92969\n", 281 | "Iter 151040, Minibatch Loss= 282.976379, Training Accuracy= 0.95312\n", 282 | "Iter 152320, Minibatch Loss= 134.991119, Training Accuracy= 0.97656\n", 283 | "Iter 153600, Minibatch Loss= 491.618103, Training Accuracy= 0.92188\n", 284 | "Iter 154880, Minibatch Loss= 154.299988, Training Accuracy= 0.99219\n", 285 | "Iter 156160, Minibatch Loss= 79.480019, Training Accuracy= 0.96875\n", 286 | "Iter 157440, Minibatch Loss= 68.093750, Training Accuracy= 0.99219\n", 287 | "Iter 158720, Minibatch Loss= 459.739685, Training Accuracy= 0.92188\n", 288 | "Iter 160000, Minibatch Loss= 168.076843, Training Accuracy= 0.94531\n", 289 | "Iter 161280, Minibatch Loss= 256.141846, Training Accuracy= 0.97656\n", 290 | "Iter 162560, Minibatch Loss= 236.400391, Training Accuracy= 0.94531\n", 291 | "Iter 163840, Minibatch Loss= 177.011261, Training Accuracy= 0.96875\n", 292 | "Iter 165120, Minibatch Loss= 48.583298, Training Accuracy= 0.97656\n", 293 | "Iter 166400, Minibatch Loss= 413.800293, Training Accuracy= 0.96094\n", 294 | "Iter 167680, Minibatch Loss= 209.587387, Training Accuracy= 0.96875\n", 295 | "Iter 168960, Minibatch Loss= 239.407318, Training Accuracy= 0.98438\n", 296 | "Iter 170240, Minibatch Loss= 183.567017, Training Accuracy= 0.96875\n", 297 | "Iter 171520, Minibatch Loss= 87.937515, Training Accuracy= 0.96875\n", 298 | "Iter 172800, Minibatch Loss= 203.777039, Training Accuracy= 0.98438\n", 299 | "Iter 174080, Minibatch Loss= 566.378052, Training Accuracy= 0.94531\n", 300 | "Iter 175360, Minibatch Loss= 325.170898, Training Accuracy= 0.95312\n", 301 | "Iter 176640, Minibatch Loss= 300.142212, Training Accuracy= 0.97656\n", 302 | "Iter 177920, Minibatch Loss= 205.370193, Training Accuracy= 0.95312\n", 303 | "Iter 179200, Minibatch Loss= 5.594437, Training Accuracy= 0.99219\n", 304 | "Iter 180480, Minibatch Loss= 110.732109, Training Accuracy= 0.98438\n", 305 | "Iter 181760, Minibatch Loss= 33.320297, Training Accuracy= 0.99219\n", 306 | "Iter 183040, Minibatch Loss= 6.885544, Training Accuracy= 0.99219\n", 307 | "Iter 184320, Minibatch Loss= 221.144806, Training Accuracy= 0.96875\n", 308 | "Iter 185600, Minibatch Loss= 365.337372, Training Accuracy= 0.94531\n", 309 | "Iter 186880, Minibatch Loss= 186.558258, Training Accuracy= 0.96094\n", 310 | "Iter 188160, Minibatch Loss= 149.720322, Training Accuracy= 0.98438\n", 311 | "Iter 189440, Minibatch Loss= 105.281998, Training Accuracy= 0.97656\n", 312 | "Iter 190720, Minibatch Loss= 289.980011, Training Accuracy= 0.96094\n", 313 | "Iter 192000, Minibatch Loss= 214.382278, Training Accuracy= 0.96094\n", 314 | "Iter 193280, Minibatch Loss= 461.044312, Training Accuracy= 0.93750\n", 315 | "Iter 194560, Minibatch Loss= 138.653076, Training Accuracy= 0.98438\n", 316 | "Iter 195840, Minibatch Loss= 112.004883, Training Accuracy= 0.98438\n", 317 | "Iter 197120, Minibatch Loss= 212.691467, Training Accuracy= 0.97656\n", 318 | "Iter 198400, Minibatch Loss= 57.642502, Training Accuracy= 0.97656\n", 319 | "Iter 199680, Minibatch Loss= 80.503563, Training Accuracy= 0.96875\n", 320 | "Optimization Finished!\n", 321 | "Testing Accuracy: 0.984375\n" 322 | ] 323 | } 324 | ], 325 | "source": [ 326 | "# Launch the graph\n", 327 | "with tf.Session() as sess:\n", 328 | " sess.run(init)\n", 329 | " step = 1\n", 330 | " # Keep training until reach max iterations\n", 331 | " while step * batch_size < training_iters:\n", 332 | " batch_x, batch_y = mnist.train.next_batch(batch_size)\n", 333 | " # Run optimization op (backprop)\n", 334 | " sess.run(optimizer, feed_dict={x: batch_x, y: batch_y,\n", 335 | " keep_prob: dropout})\n", 336 | " if step % display_step == 0:\n", 337 | " # Calculate batch loss and accuracy\n", 338 | " loss, acc = sess.run([cost, accuracy], feed_dict={x: batch_x,\n", 339 | " y: batch_y,\n", 340 | " keep_prob: 1.})\n", 341 | " print \"Iter \" + str(step*batch_size) + \", Minibatch Loss= \" + \\\n", 342 | " \"{:.6f}\".format(loss) + \", Training Accuracy= \" + \\\n", 343 | " \"{:.5f}\".format(acc)\n", 344 | " step += 1\n", 345 | " print \"Optimization Finished!\"\n", 346 | "\n", 347 | " # Calculate accuracy for 256 mnist test images\n", 348 | " print \"Testing Accuracy:\", \\\n", 349 | " sess.run(accuracy, feed_dict={x: mnist.test.images[:256],\n", 350 | " y: mnist.test.labels[:256],\n", 351 | " keep_prob: 1.})" 352 | ] 353 | }, 354 | { 355 | "cell_type": "code", 356 | "execution_count": null, 357 | "metadata": { 358 | "collapsed": true 359 | }, 360 | "outputs": [], 361 | "source": [] 362 | } 363 | ], 364 | "metadata": { 365 | "kernelspec": { 366 | "display_name": "Python 2", 367 | "language": "python", 368 | "name": "python2" 369 | }, 370 | "language_info": { 371 | "codemirror_mode": { 372 | "name": "ipython", 373 | "version": 2 374 | }, 375 | "file_extension": ".py", 376 | "mimetype": "text/x-python", 377 | "name": "python", 378 | "nbconvert_exporter": "python", 379 | "pygments_lexer": "ipython2", 380 | "version": "2.7.13" 381 | } 382 | }, 383 | "nbformat": 4, 384 | "nbformat_minor": 0 385 | } 386 | -------------------------------------------------------------------------------- /2_BasicModels/linear_regression.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "metadata": { 7 | "collapsed": false 8 | }, 9 | "outputs": [], 10 | "source": [ 11 | "# A linear regression learning algorithm example using TensorFlow library.\n", 12 | "\n", 13 | "# Author: Aymeric Damien\n", 14 | "# Project: https://github.com/aymericdamien/TensorFlow-Examples/" 15 | ] 16 | }, 17 | { 18 | "cell_type": "code", 19 | "execution_count": 1, 20 | "metadata": { 21 | "collapsed": true 22 | }, 23 | "outputs": [], 24 | "source": [ 25 | "import tensorflow as tf\n", 26 | "import numpy\n", 27 | "import matplotlib.pyplot as plt\n", 28 | "rng = numpy.random" 29 | ] 30 | }, 31 | { 32 | "cell_type": "code", 33 | "execution_count": 2, 34 | "metadata": { 35 | "collapsed": true 36 | }, 37 | "outputs": [], 38 | "source": [ 39 | "# Parameters\n", 40 | "learning_rate = 0.01\n", 41 | "training_epochs = 1000\n", 42 | "display_step = 50" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": 3, 48 | "metadata": { 49 | "collapsed": true 50 | }, 51 | "outputs": [], 52 | "source": [ 53 | "# Training Data\n", 54 | "train_X = numpy.asarray([3.3,4.4,5.5,6.71,6.93,4.168,9.779,6.182,7.59,2.167,\n", 55 | " 7.042,10.791,5.313,7.997,5.654,9.27,3.1])\n", 56 | "train_Y = numpy.asarray([1.7,2.76,2.09,3.19,1.694,1.573,3.366,2.596,2.53,1.221,\n", 57 | " 2.827,3.465,1.65,2.904,2.42,2.94,1.3])\n", 58 | "n_samples = train_X.shape[0]" 59 | ] 60 | }, 61 | { 62 | "cell_type": "code", 63 | "execution_count": 4, 64 | "metadata": { 65 | "collapsed": true 66 | }, 67 | "outputs": [], 68 | "source": [ 69 | "# tf Graph Input\n", 70 | "X = tf.placeholder(\"float\")\n", 71 | "Y = tf.placeholder(\"float\")\n", 72 | "\n", 73 | "# Set model weights\n", 74 | "W = tf.Variable(rng.randn(), name=\"weight\")\n", 75 | "b = tf.Variable(rng.randn(), name=\"bias\")" 76 | ] 77 | }, 78 | { 79 | "cell_type": "code", 80 | "execution_count": 5, 81 | "metadata": { 82 | "collapsed": true 83 | }, 84 | "outputs": [], 85 | "source": [ 86 | "# Construct a linear model\n", 87 | "pred = tf.add(tf.mul(X, W), b)" 88 | ] 89 | }, 90 | { 91 | "cell_type": "code", 92 | "execution_count": 6, 93 | "metadata": { 94 | "collapsed": true 95 | }, 96 | "outputs": [], 97 | "source": [ 98 | "# Mean squared error\n", 99 | "cost = tf.reduce_sum(tf.pow(pred-Y, 2))/(2*n_samples)\n", 100 | "# Gradient descent\n", 101 | "optimizer = tf.train.GradientDescentOptimizer(learning_rate).minimize(cost)" 102 | ] 103 | }, 104 | { 105 | "cell_type": "code", 106 | "execution_count": 8, 107 | "metadata": { 108 | "collapsed": false 109 | }, 110 | "outputs": [], 111 | "source": [ 112 | "# Initializing the variables\n", 113 | "init = tf.global_variables_initializer()" 114 | ] 115 | }, 116 | { 117 | "cell_type": "code", 118 | "execution_count": 9, 119 | "metadata": { 120 | "collapsed": false 121 | }, 122 | "outputs": [ 123 | { 124 | "name": "stdout", 125 | "output_type": "stream", 126 | "text": [ 127 | "Epoch: 0050 cost= 0.195095107 W= 0.441748 b= -0.580876\n", 128 | "Epoch: 0100 cost= 0.181448311 W= 0.430319 b= -0.498661\n", 129 | "Epoch: 0150 cost= 0.169377610 W= 0.419571 b= -0.421336\n", 130 | "Epoch: 0200 cost= 0.158700854 W= 0.409461 b= -0.348611\n", 131 | "Epoch: 0250 cost= 0.149257123 W= 0.399953 b= -0.28021\n", 132 | "Epoch: 0300 cost= 0.140904188 W= 0.391011 b= -0.215878\n", 133 | "Epoch: 0350 cost= 0.133515999 W= 0.3826 b= -0.155372\n", 134 | "Epoch: 0400 cost= 0.126981199 W= 0.374689 b= -0.0984639\n", 135 | "Epoch: 0450 cost= 0.121201262 W= 0.367249 b= -0.0449408\n", 136 | "Epoch: 0500 cost= 0.116088994 W= 0.360252 b= 0.00539905\n", 137 | "Epoch: 0550 cost= 0.111567356 W= 0.35367 b= 0.052745\n", 138 | "Epoch: 0600 cost= 0.107568085 W= 0.34748 b= 0.0972751\n", 139 | "Epoch: 0650 cost= 0.104030922 W= 0.341659 b= 0.139157\n", 140 | "Epoch: 0700 cost= 0.100902475 W= 0.336183 b= 0.178547\n", 141 | "Epoch: 0750 cost= 0.098135538 W= 0.331033 b= 0.215595\n", 142 | "Epoch: 0800 cost= 0.095688373 W= 0.32619 b= 0.25044\n", 143 | "Epoch: 0850 cost= 0.093524046 W= 0.321634 b= 0.283212\n", 144 | "Epoch: 0900 cost= 0.091609895 W= 0.317349 b= 0.314035\n", 145 | "Epoch: 0950 cost= 0.089917004 W= 0.31332 b= 0.343025\n", 146 | "Epoch: 1000 cost= 0.088419855 W= 0.30953 b= 0.370291\n", 147 | "Optimization Finished!\n", 148 | "Training cost= 0.0884199 W= 0.30953 b= 0.370291 \n", 149 | "\n" 150 | ] 151 | }, 152 | { 153 | "data": { 154 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAgkAAAFkCAYAAACq4KjhAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzt3Xl8lNXZ//HPNRiJgQAqUiyCCSA06lM1sSqyuKFQC0GL\n+DSK+1JrEX4stSpUYk20UgVjRatVK25prViFKvJoqRuktIa6EtwAUdqioMZhUSNzfn/MJGSSCclM\nZuaemXzfr9e85D5zL9ctIXPNOec+lznnEBEREWnK53UAIiIikpqUJIiIiEhEShJEREQkIiUJIiIi\nEpGSBBEREYlISYKIiIhEpCRBREREIlKSICIiIhEpSRAREZGIlCSIiIhIRO1KEszsajMLmNncVvab\nYGY1ZrbDzF4zs++357oiIiKSeDEnCWb2PeAS4LVW9hsCPAL8DjgceAJ4wswOjvXaIiIikngxJQlm\n1hV4CLgY+LyV3acAS5xzc51zbzvnZgOrgEmxXFtERESSI9aehPnAYufcsjbsOwR4rknb0lC7iIiI\npKg9oj3AzH5EcNjgyDYe0hvY1KRtU6i9pWvsC4wC1gNfRhujiIhIB5YN5AFLnXNb2nOiqJIEMzsA\nuBU42TlX147rGuB28/4o4OF2nF9ERKSjO5vgnMCYRduTUATsB1SbmYXaOgEjzGwS0Nk51/TD/7/A\nt5q09aJ570Jj6wEeeughCgoKogwx9UydOpV58+Z5HUbc6H5SVybdC+h+Ulkm3Qtk1v3U1NQwceJE\nCH2Wtke0ScJzwP80absfqAF+FSFBAKgCTgJua9R2cqi9JV8CFBQUUFhYGGWIqad79+4ZcR/1dD+p\nK5PuBXQ/qSyT7gUy735C2j1cH1WS4JzbBqxu3GZm24Atzrma0PYCYKNz7prQLhXAC2Y2DXgKKCHY\nI3FJO2MXERGRBIrHiotNew/60mhSonOuimBicCnwKvBDYJxzbjUiIiKSsqJ+uqEp59yJu9sOtS0E\nFrb3WiIiIpI8qt2QBCUlJV6HEFe6n9SVSfcCup9Ulkn3Apl3P/FikecaesvMCoHq6urqTJxIIiIi\nkjCrVq2iqKgIoMg5t6o952r3cIOISCbasGEDmzdv9joMkWZ69uxJv379knItJQkiIk1s2LCBgoIC\ntm/f7nUoIs3k5ORQU1OTlERBSYKISBObN29m+/btGbOgm2SO+oWSNm/erCRBRMRLmbKgm0is9HSD\niIiIRKQkQURERCJSkiAiIiIRKUkQERGRiJQkiIhIzEpLS/H5Yvsouf/++/H5fGzYsCHOUe3ywQcf\n4PP5eOCBB2I6PhkxpjIlCSIiHdDq1auZOHEiBxxwANnZ2fTp04eJEyeyenV0tffMLOYkwcwws5iO\nTZb2xFhZWUlFRUWcI0ouJQkiIh3M448/TmFhIX/729+48MILufPOO7n44ot5/vnnKSws5Mknn2zz\nuX7xi1/EvOjUueeey44dO5K2emCyPfLII2mfJGidBBGROHDOJexbcTzPvXbtWs4991wGDhzIiy++\nyD777NPw3pQpUxg2bBjnnHMOr7/+Onl5eS2eZ/v27eTk5ODz+dhzzz1jisXMYj5WkkM9CSIiMfL7\n/cyePJmR+fmc1rcvI/PzmT15Mn6/P2XPPWfOHHbs2MHdd98dliAA7LPPPvz2t79l69atzJkzp6G9\nft5BTU0NZ511Fvvssw/Dhw8Pe6+xL7/8ksmTJ7PffvvRrVs3TjvtNP7973/j8/n45S9/2bBfpPH+\nvLw8iouLWb58OUcffTR77bUXAwYM4MEHHwy7xmeffcaMGTP47ne/S25uLt27d+fUU0/l9ddfj/n/\nzerVqznxxBPJycmhb9++lJeXEwgEmu23aNEixowZQ58+fcjOzmbgwIGUlZWF7XvCCSfw1FNPNcyJ\n8Pl89O/fH4C6ujquvfZajjzySHr06EHXrl0ZMWIEzz//fMyxJ4p6EkREYuD3+xk/ZAjTamooDQQw\nwAFL589n/LJlLKyqIjc3N+XO/Ze//IW8vDyOPfbYiO+PGDGCvLw8/vKXv3DHHXcANPRiTJgwgUGD\nBnHjjTdSX0E40pj9eeedx2OPPca5557L0UcfzQsvvMAPfvCDZvtFOtbMePfdd5kwYQIXXXQR559/\nPvfddx8XXHABRx55ZMMy2WvXrmXRokVMmDCB/Px8Nm3axF133cXxxx/P6tWr6d27d1T/XzZt2sTx\nxx9PIBDgmmuuIScnh7vvvpvs7Oxm+95///3k5uYyffp0unbtyrJly7j22mvx+/3cdNNNAMyaNYva\n2lo2btzIrbfeinOOrl27AvDFF19w3333UVJSwqWXXorf7+fee+9l9OjR/OMf/+C73/1uVLEnlHMu\n5V5AIeCqq6udiEiyVVdXu9Z+B117xRVuic/nHDR7Pe3zudmTJ8d8/USdu7a21pmZO/3003e737hx\n45zP53Nbt251zjlXWlrqzMydffbZzfYtLS11Pp+vYXvVqlXOzNz06dPD9rvgggucz+dz1113XUPb\n/fff73w+n/vggw8a2vLy8pzP53PLly9vaPvkk09cdna2+9nPftbQ9vXXXzeL5YMPPnDZ2dmurKys\noW39+vXOzNyCBQt2e8//7//9P+fz+dwrr7zS0LZ582bXo0ePZjF++eWXzY6/7LLLXNeuXcPiGjNm\njMvPz2+2byAQcHV1dWFttbW1rnfv3u7iiy/ebZxt+dms3wcodO38PNZwg4hIDJYvXsyoCF3RAKMD\nAZYvWpRy564fqmitF6L+/S+++KKhzcy47LLLWr3GM888g5nxk5/8JKz9iiuuaOh9aM3BBx8c1tPR\ns2dPBg8ezNq1axvasrKyGv4cCAT49NNPycnJYfDgwaxatapN12lsyZIlHHPMMRQVFTW07bvvvpx9\n9tnN9u3cuXPDn7du3cqWLVsYNmwY27dvZ82aNa1ey8zYY49gR75zjs8++4yvv/6aI488MqbYE0lJ\ngohIlJxzdKmro6WphAbk1NW1+UMxWeeu//BvbV5DS8lEfn5+q9eoH4Nvuu/AgQPbHGekpx323ntv\nPvvss4Zt5xzz5s1j0KBBdO7cmZ49e9KrVy/eeOMNamtr23ytxnEfdNBBzdoHDx7crG316tWcfvrp\n9OjRg27durHffvtxzjnnALT52gsWLOCwww4jOzubfffdl169evHUU0/FFHsiaU6CiEiUzIxtWVk4\niPhh7oBtWVkxPZGQyHN369aN/fffv9XJfa+//jp9+vRpGEOvt9dee0V9zXrRxNupU6eI7Y0To/Ly\ncq699louuugiysrK2GefffD5fEyZMiXiZMNYY2yajNXW1jJixAh69OhBWVkZ/fv3Jzs7m+rqaq66\n6qo2Xfuhhx7iggsu4Ic//CFXXnklvXr1olOnTtxwww1hvSWpQEmCiEgMho4dy9L58xkd4UPhGZ+P\nYcXFKXnuMWPGcM8997BixYqIkxdfeukl1q9f32y4oK0OPPBAAoEA69atY8CAAQ3t77zzTswxR7Jw\n4UJOPPFEfve734W1f/755+y3335Rn+/AAw+MGOPbb78dtv3888/z2Wef8eSTTzJ06NCG9vfff7/Z\nsS0lRgsXLmTAgAE89thjYe3XXntt1HEnmoYbRERiMKO8nLkFBSzx+aj/rumAJT4f8woKmF5WlpLn\n/tnPfkZ2djY//vGP+fTTT8Pe+/TTT7nsssvo0qULM2bMiOn8o0aNwjnX8GREvd/85jdxXUeiU6dO\nzb7l/+lPf2Ljxo0xne/UU0/l73//O6+88kpD2yeffEJlZWXE6zbuMfj666+b3S9Aly5dIg4fdOrU\nqdn/i5UrV1JVVRVT7ImkngQRkRjk5uaysKqKW2bNYu6iReTU1bE9K4uhxcUsLCuL+RHFRJ974MCB\nLFiwgIkTJ/I///M/XHTRReTn57Nu3Truu+8+tmzZwh/+8Ic2zT+IpLCwkPHjx3PrrbeyefNmjjnm\nGF544QXeffddILphh90ZM2YM119/PRdeeCHHHnssb7zxBg8//HBY70U0rrzySh588EFGjRrFlClT\nyMnJ4Xe/+x0HHnhg2PDMsccey9577825557L5MmTgeDwQaT7Kioq4tFHH2X69Ol873vfo2vXrowZ\nM4YxY8bw+OOPc9ppp/GDH/yAtWvXctddd3HIIYewdevW2P6HJEp7H49IxAs9AikiHmrLY2ZNBQKB\nhMWTiHO/+eab7uyzz3Z9+vRxnTt3dt/+9rfdxIkT3VtvvdVs3/rHHLds2RLxvU6dOoW17dixw11x\nxRWuZ8+erlu3bm78+PHu3XffdWbm5syZ07BfpEcg8/PzXXFxcbPrHH/88e7EE09s2P7qq6/cz372\nM9enTx/XpUsXN2LECLdy5Up3wgknhO23fv165/P5Wn0Esv7/yQknnOBycnJc37593Q033ODuu+++\nZjFWVVW5Y4891nXp0sUdcMAB7uqrr3bPPvus8/l87oUXXmjYb9u2bW7ixIlun332cT6fL+xxyF/9\n6lcuPz/f7bXXXq6oqMg9/fTT7vzzz3f9+/ffbYzJfgTSXAwzZBPNzAqB6urqagoLC70OR0Q6mFWr\nVlFUVIR+B8XPq6++SmFhIQ8//DAlJSVeh5O22vKzWb8PUOSca9czlZqTICIicfXVV181a7v11lvp\n1KkTI0aM8CAiiZXmJIiISFzNmTOH6upqjj/+ePbYYw+efvppli5dyo9//GP69OnjdXgSBSUJIiIS\nV0OGDOHZZ5+lrKyMrVu30q9fP6677jquueYar0OTKClJEBGRuBo5ciQjR470OgyJA81JEBERkYiU\nJIiIiEhEShJEREQkIiUJIiIiEpGSBBEREYlISYKIiIhEpCRBREREIlKSICIizRxwwAFceumlnsbw\n/vvv4/P5eOSRR3a731//+ld8Ph8rVqxoaJs4cSIHHXRQokPMeEoSREQ6kAULFuDz+SK+Gq+I6PP5\nwsofv/XWW1x33XV89NFHzc45f/58HnzwwaTE35KmpZrNDJ9PH3HtpRUXRUQ6GDPj+uuvJy8vL6z9\n0EMPbfjz+++/T6dOnRq233zzTa677jpOPvlkDjjggLDjbr/9dvr27cs555yT0Lijcf/995OKVY7T\njZIEEZEOaPTo0bstg52VlRW27Zxr9m09lTVOcCR26osREZFmGs9JuPfeeznrrLMAGDZsGD6fj06d\nOrFixQr69u3L22+/zXPPPdcwbHHKKac0nOfzzz9n8uTJ9OvXj+zsbAYNGsTNN9/c7HqfffYZ5557\nLj169GCfffbhoosu4osvvog5/qZzEurnN9x2223cddddDBgwgL322otjjjmGf/3rX82Or6mpYfz4\n8ey7777k5ORw1FFH8fTTT8ccT7qKqifBzC4DfgLkhZreAn7pnHumhf3PA34POKA+Bf3SOZcTU7Qi\nIhIXtbW1bNmyJaxt3333bfhz416DE044gZ/+9KfccccdzJ49u+HDd/Dgwdx+++1cfvnl7Lvvvlx9\n9dU459h///0B2L59O8OHD+fjjz/msssu44ADDuDll1/myiuv5OOPP2bOnDlAsJdi7NixrFy5kssv\nv5zBgwezcOFCLrjggph7L8ws4rELFixg+/btXH755TjnuOmmm/jhD3/YkEQAvPHGGwwfPpwDDzyQ\nq6++mpycHP74xz9SXFzME088wZgxY2KKKR1FO9zwIfBz4L3Q9vnAk2Z2uHOupoVjaoFB7EoSNEgk\nIuIh5xwnnXRSWJuZsXPnzoj79+/fn2HDhnHHHXdw8sknc+yxxza8N27cOK666ip69+5NSUlJ2HFz\n5sxhw4YNvPbaaw3zHy655BK+9a1vUVFRwbRp0+jduzePP/44K1as4NZbb2Xy5MkAXHbZZYwYMSKO\ndx20ceNG3nvvPbp27QrAgAEDOOOMM3juuecaekCuuOIKBg4cyMqVKxuGLS6//HKOOeYYrrrqKiUJ\nLXHOPdWkaZaZ/QQ4BmgpSXDOuU9iCU5EJB1s3w5r1iT2Gt/5DuTEqQ/WzLjjjjsS/ojgY489xvHH\nH09ubm5Yr8XIkSO5+eabeemll5gwYQJPP/00nTt3Dnvk0ufzMWnSpLDHGuPhrLPOakgQAIYPH45z\njrVr1wKwefNmXnzxRX71q1/x+eefN+znnGPUqFGUlZXxySefsN9++8U1rlQV88RFM/MBZwI5QNVu\ndu1qZusJzn9YBVzjnFsd63VFRFLNmjVQVJTYa1RXw27mGUbte9/73m4nLsbDu+++S01NTcQPVDPj\n448/BmDDhg306dOH7OzssH0GDx4c95j69u0btr333nsDwTkR9TEDXH311Vx11VUtxq0koQVmdijB\npCAb8AOnO+dayqHfBi4EXge6Az8DVpjZIc65jbGFLCKSWr7zneCHeKKvkW6cc4wePZrp06dHfL8+\nCWjpyYlEPMLY0lMP9dcKBAIA/PznP2fkyJER983Pz497XKkqlp6ENcBhQA9gPPCAmY2IlCg45/4O\n/L1+28yqCA5LXArMbu1CU6dOpXv37mFtJSUlzca9RES8lJMT32/5qWh3Ewhbeq9///5s27aNE088\ncbfnzsvLY/ny5Xz55ZdhvQlvv/12bMG2w4ABAwDYc889W43bS9u2bQOgsrKSysrKsPdqa2vjdp2o\nkwTn3DfA2tDmKjM7CphC8KmHVo81s38BA9tyrXnz5iW8O0xERFrXpUsXnHNh4/SN34vUfuaZZ1Je\nXs6yZcuafeB+/vnndOvWDZ/Px6mnnsp9993HXXfdxZQpUwDYuXMnt99+e9LXZujduzfDhg3jzjvv\n5PLLL6dXr15h72/evJmePXsmNaZIfnb++Tz76qsRvzivWrWKojiNf8VjMSUf0LktO4bmMRwKdLyH\nTUVEUkQs3fhHHHEEPp+PG2+8kc2bN9O5c2dOPvlk9tlnH4qKirj33nu54YYbGDBgAL179+a4447j\nqquuYvHixXz/+9/nggsu4IgjjmDr1q28/vrrPP7442zcuJFu3bpx+umnc8wxxzBjxgzef//9hkcg\nt2/fntB7asmdd97JiBEjOPTQQ7nkkkvIz89n06ZNrFixgk2bNvHKK6/E7VqxOnvdOm6ZNYvSioqE\nXifadRLKgSUEH4XMBc4GjgNOCb3/APCRc+6a0PYvCA43vEdweOJK4EDgnjjFLyIiUWrLt/Om6wx8\n+9vf5s477+Smm27i4osvZufOnbz00ksce+yxlJaW8tFHH3HTTTexdetWTjrpJI477jhycnJ4+eWX\nKS8v57HHHmPBggV0796dQYMGUVZW1vCUgZnx1FNPMWXKFB544AE6derEaaedxi233MKRRx4Z8z1F\nqufQ0n6N2w855BBeeeUVSktL+f3vf89nn31Gr169OOKII7j22mvbFE+iHescVy1aBAlOEiya7MvM\n7gFOBPYnuP7B68CvnHPLQu8vA9Y75y4Mbc8FTgd6A58B1cBM59zrrVynEKiurq7WcIOIJF19d61+\nB0mqafjZBK7r04cnPvywWeLTaLihyDm3qj3Xi3adhItbef/EJtvTgGkxxCUiIiItcMC2rKyEz9lQ\n7QYREZE0s8KMYcXFCb+OqkCKiIikmYfz83m2rCzh11FPgoiISJr59f33k5ubm/DrKEkQERFJM126\ndEnKdZQkiIiISERKEkRERCQiJQkiIiISkZ5uEBFpQU1NjdchiIRJ9s+kkgQRkSZ69uxJTk4OEydO\n9DoUkWZycnKSVmRKSYKISBP9+vWjpqaGzZs3ex2KNLFuHZxxRnhbdTVcNmYMd/7nP0Raf9ABP9l/\nf377l78kI8SE69mzJ/369UvKtZQkiIhE0K9fv6T9Ipa2aboC8caN8O1vB//8/TPO4JP58xkdCDQ7\nbonPx6kTJqgORww0cVFERFLab34TniBMnQrO7UoQAGaUlzO3oIAlPh/1ZQsdwQRhXkEB05OwOmEm\nUk+CiIikJL8funULbwsEmvcoAOTm5rKwqopbZs1i7qJF5NTVsT0ri6HFxSwsK0vK6oSZSEmCiIik\nnEMOgdWrd22/+CIMH777Y3JzcymtqICKCpxzCa+Q2BEoSRARkZTx8svhyUBBQXiy0FZKEOJDSYKI\niHjOOfA1mSVXW9t8uEGSSxMXRUTEUzNmhCcIt94aTBqUIHhPPQkiIuKJ//wn/AkFCCYHkjrUkyAi\nIknn84UnCG+9pQQhFSlJEBGRpFm4MPgIY31CUFwc/PPBB3sbl0Sm4QYREUm4L7+EvfYKb/vqK9hz\nT2/ikbZRT4KIJJRTH3KHZxaeIDz6aLD3QAlC6lOSICJx5/f7mT15MiPz8zmtb19G5ucze/Jk/H6/\n16FJEr38cvPVEZ2DCRO8iUeip+EGEYkrv9/P+CFDmFZTQ2kggBFcQ3/p/PmMX7aMhVVVWiK3A2ia\nHKxZA4MHexOLxE49CSISVzfPnMm0mhpGhxIEAANGBwJMranhllmzvAxPEuycc8IThEMOCfYeKEFI\nT0oSRCSuli9ezKgI5XohmCgsX7QoyRFJMmzeHEwOHnpoV9vOnfDmm97FJO2nJEFE4sY5R5e6Olpa\nNd+AnLo6TWbMMGaw3367th96KPIyy5J+NCdBROLGzNiWlYWDiImCA7ZlZan4ToaorISzzgpvU/6X\nWZTniUhcDR07lqUtfIV8xudjWHFxkiOSeAsEgr0HjROEjz9WgpCJlCSISFzNKC9nbkEBS3w+6j8z\nHLDE52NeQQHTy8q8DE/a6fDDoVOnXds/+lEwOWg83CCZQ8MNIhJXubm5LKyq4pZZs5i7aBE5dXVs\nz8piaHExC8vK9Phjmnr3XRg0KLxNPQeZT0mCiMRdbm4upRUVUFGBc05zENJc07++55+H447zJBRJ\nMg03iEhCKUFIX+XlkVdMVILQcagnQUREwkQqxrRjB2RnexOPeEc9CSIi0qBpMabS0mDvgRKEjkk9\nCSIiwssvw/Dh4W2amChKEkREOjgVY5KWaLhBRKSDUjEmaY16EkREOpjNm5svfrRzp2otSHP6kRAR\n6UBUjEmiEdWPhZldZmavmVlt6LXCzEa3cswEM6sxsx2hY7/fvpBFRCRalZWR1zw4+2xv4pH0EO1w\nw4fAz4H3QtvnA0+a2eHOuZqmO5vZEOCR0DFPAWcBT5jZEc651TFHLSIibRIIhNdagGAxJtVakLaI\nqifBOfeUc+4Z59x7odcsYCtwTAuHTAGWOOfmOufeds7NBlYBk9oXtoiItEbFmKS9Yp64aGY+4Ewg\nB6hqYbchwC1N2pYC42K9roiI7J6KMUm8RJ0kmNmhBJOCbMAPnO6cW9PC7r2BTU3aNoXaRUQkzlSM\nSeIplvmsa4DDgKOBO4EHzOw7URxvgHJaEZE4UjEmSYSoexKcc98Aa0Obq8zsKIJzD34SYff/At9q\n0taL5r0LEU2dOpXu3buHtZWUlFBSUhJVzCIimSpSMabt25u3SWaqrKyksrIyrK22tjZu5zfXzoEq\nM/sr8IFz7sII7/0B2Ms5N65R23LgNefc5bs5ZyFQXV1dTWFhYbviExHJVE17DkpLYfZsT0JpM+ec\nyocn2KpVqygqKgIocs6tas+5oupJMLNyYAnBRyFzgbOB44BTQu8/AHzknLsmdEgF8IKZTSP4CGQJ\nUARc0p6gRUQ6snQrxuT3+7l55kyWL15Ml7o6tmVlMXTsWGaUl5Obm+t1eLIb0Q43fAt4ANgfqAVe\nB05xzi0LvX8A8E39zs65KjMrAcpDr3eBcVojQUQkNulWjMnv9zN+yBCm1dRQGgg0TEpbOn8+45ct\nY2FVlRKFFBbtOgkXO+f6O+f2cs71ds41ThBwzp3YdNjBObfQOfed0DHfdc4tjVfwIiIdRdNiTAcf\nnB7FmG6eOZNpNTWMDiUIEJy9PjoQYGpNDbfMmuVleNIKrdYtIpIEsc7/2rIlmBw89NCutp074a23\n4hRYgi1fvJhRgUDE90YHAixftCjJEUk0lCSIiCSI3+9n9uTJjMzP57S+fRmZn8/syZPx+/1tOt4M\nevbctf3gg+lVjMk5R5e6OlqapmhATl1dzAmUJJ5KRYuIJEB7xuIrK+Gss8Lb0vFz1MzYlpWFg4iJ\nggO2ZWXpaYcUlib5qIhIeollLN65YO9B4wTh44/TM0GoN3TsWJa20PXxjM/HsOLiJEck0VCSICKS\nANGOxR9+ePgwQqYUY5pRXs7cggKW+HwNS+06YInPx7yCAqaXlXkZnrRCww0iInEWzVj8e+9ZRhdj\nys3NZWFVFbfMmsXcRYvIqatje1YWQ4uLWVhWpscfU5ySBBGROGvrWLzPF/5uphZjys3NpbSiAioq\ntOJimtFwg4hIAuxuLP4Cu4a/rl8X1tZRijEpQUgv6kkQkZSXjt8+Z5SXM37ZMlyjyYs76EwOX4bV\nwVUxJkll6kkQkZTU3jUGvFY/Fr9y0iROycvDcMEEIaS0NNh7oARBUpl6EkQk5WTKev+5ubkcfkIF\n191WEdaeSRMTJbOpJ0FEUk6mrPdvBqefvmt7zRolCJJelCSISMpJ9/X+Bw9uXq0xHYoxiTSl4QYR\nSSnRrDGQapMZ//1v6NMnvG3nzvSptSDSlH50RSSlNF5jIJJUXe/fLDxBKC9Pr2JMIpHox1dSgqrA\nSWPptN7/TTdFHlq45hpv4hGJJw03iGf8fj83z5zJ8sWL6VJXx7asLIaOHcuM8vK0mLkuiRNpjQFH\nMEGYV1DAwhRY7z8QgE6dwts+/BAOOMCbeEQSQUmCeCJTHnGTxEj19f6b9hzk5cG6dRF3FUlrShLE\nE40fcatX/4ibCz3iVlpR0fIJJOOl4nr///oXFBaGt2mkTDKZ5iSIJ9L9ETdJrlRIEMzCE4THHlOC\nIJlPSYIkXTSPuIl47ZxzIk9MHD/em3hEkknDDZJ0bS2jmwrfHqXj2r4dunQJb/P7oWtXb+IR8YJ6\nEsQT6fSIm3Q8ZuEJwoQJwd4DJQjS0ShJEE/MKC9nbkEBS3y+hkVzHLAk9Ijb9BR4xE06nnvuiTy0\n8Oij3sQj4jUNN4gnUv0RN+l4miYH//wnHHmkN7GIpAolCeKZVHzETTqeSD92mjMrEqThBkkJShAk\n2davb57pEtg+AAAdnElEQVQgfPONEgSRxpQkiEiHYwb5+bu2L700mBw0XWZZpKNTkiAiHcYVV0Se\nmHjXXd7EI5LqNCdBRDJepGJM778P/ft7E49IulCSICIZTRMTRWKn4QYRyUgvvRR5aEEJgkjbKUkQ\n6YAyvS6GGYwYsWv7zjuVHIjEQsMNIh2E3+/n5pkzWb54MV3q6tiWlcXQsWOZUV6eMYtXfe978Mor\n4W1KDkRipyRBpAPw+/2MHzKEaTU1lAYCGMFlsJfOn8/4ZctYWFWV1omC3w/duoW31dY2bxOR6Gi4\nQaQDuHnmTKbV1DA6lCBAsALn6ECAqTU13DJrlpfhtYtZeDJw6KHB3gMlCCLtpyRBpANYvngxowKB\niO+NDgRYvmhRkiNqv3vvjTwx8Y03vIlHJBNpuEEkwznn6FJXR0sLXxuQU1eXVvUzmoa5bBmccII3\nsYhkMiUJIhnOzNiWlYWDiImCA7ZlZaVFgpCsNQ/SKWESSSQNN4h0AEPHjmWpL/I/92d8PoYVFyc5\nouh88EHiizH5/X5mT57MyPx8Tuvbl5H5+cyePBm/3x+/i4ikGSUJIh3AjPJy5hYUsMTno/5z1QFL\nfD7mFRQwvazMy/B2ywzy8nZtX3JJ/Isx1T/9MWT+fJ5dv54nN27k2fXrGTJ/PuOHDFGiIB1WVEmC\nmV1tZv8wsy/MbJOZ/dnMBrVyzHlmFjCznaH/Bsxse/vCFpFo5ObmsrCqipWTJnFKXh7j+vThlLw8\nVk6alLKPP7ZUjOnuu+N/rUx++kOkPaKdkzAc+A3wSujYG4H/M7MC59yO3RxXCwxi15ColjcRSbLc\n3FxKKyqgoiKlx9wjFWN67z0YMCBx11y+eDGlu3n6Y+6iRVBRkbgARFJUVEmCc+7Uxttmdj7wMVAE\nvLz7Q90nUUcnIgmRqgmCF8WYMvHpD5F4ae+chB4EewU+bWW/rma23sw2mNkTZnZwO68rIhlkyRLv\nijE1fvojknR6+kMk3mJOEiz4L+ZW4GXn3Ord7Po2cCFQDJwduuYKM+sT67VFJHOYwamN+ijvuCP5\n9RbS/ekPkUSxWKvBmdmdwChgqHPuP1EctwdQAzzinJvdwj6FQPWIESPo3r172HslJSWUlJTEFLOI\npI6uXWHbtvA2r4ox1T/dMLXR5EVHMEGYV1CQspM7RSorK6msrAxrq62t5cUXXwQocs6tas/5Y0oS\nzOx2YCww3Dm3IYbjHwXqnHNnt/B+IVBdXV1NYWFh1PGJSOqqrYUePcLbPv0U9t7bm3jq+f1+bpk1\ni+WLFpFTV8f2rCyGFhczvaxMCYKklVWrVlFUVARxSBKiXnExlCCMA46LMUHwAYcCT0d7rIikt6bD\n+j4f7NzpTSxNpcvTHyLJFO06CXcQnFdwFrDNzL4VemU32meBmd3QaPsXZnaymeWb2RHAw8CBwD3x\nuQURSXU33RR5YmKqJAhNKUEQCYq2J+EygkN1zzdpvwB4IPTnvkDjf/p7A3cDvYHPgGpgiHNuTbTB\nimSqTP7m2vS2nnwSNA9QJD1Eu05Cqz0PzrkTm2xPA6ZFGZdIxvP7/dw8cybLFy+mS10d27KyGDp2\nLDPKyzNiDNyLNQ9EJL5UBVLEA/Wz6afV1FDaaDb90vnzGb9sWVrPpn/nHRg8OLztm2/iW2tBRJJD\nBZ5EPJCptQLMwhOEUaPiX4xJRJJHSYKIB5YvXsyo3dQKWL5oUZIjap/TTos8MfGZZ7yJR0TiQ8MN\nIkmWSbUCIhVjeustOFgLr4tkBCUJIknWuFZApBQgXWoFaGKiSObTcIOIB9K5VoCXxZhEJLmUJIh4\nYEZ5OXMLClji8zVUH3TAklCtgOllZV6G16KmxZiuu07JgUgm03CDiAdyc3NZWFXFLbNmMbdJrYCF\nKVgrIJWKMYlI8ihJEPFIOtQKSNViTCKSHEoSRFJAKiYITUMyCz7NICIdh+YkiEiYOXMiT0xUgiDS\n8agnQUQaqBiTiDSmJEFEtOaBiESk4QaRDuzdd5snCN98owRBRIKUJIh0UGYwaNCubRVjEpGmlCSI\ndDAqxiQibaU5CSIdhIoxiUi0lCSIdACamCgisdBwg0gGe+klFWMSkdgpSRDJUGYwYsSu7TvuUHIg\nItHRcINIhjnqKPjnP8PblByISCyUJIhkiK1boWnxyM8/h+7dvYlHRNKfhhtEMoBZeIJwyCHB3gMl\nCCLSHkoSRNLYffdFnpj45pvexCMimUXDDSJpqmlysGwZnHCCN7GISGZSkiCSZrTmgYgki4YbRNLE\nBx+oGJOIJJeSBJE0YAZ5ebu2L7lExZhEJPGUJIiksMmTI09MvPtub+IRkY5FcxJEUlCkYkzvvQcD\nBngTj4h0TEoSRFKMJiaKSKrQcINIilAxJhFJNUoSRJLAtfJJr2JMIpKKlCSIJIjf72f25MmMzM/n\ntL59GZmfz+zJk/H7/Q37nHtu5N6Dn/wkycGKiESgOQkiCeD3+xk/ZAjTamooDQQwwAFL589n/LJl\nPPjXKnr3Dq/GpGJMIpJq1JMgkgA3z5zJtJoaRocSBAADRgcCPPvWm2EJwoQJKsYkIqlJSYJIAixf\nvJhRgUBY25MUY4RPNHAOHn00mZGJiLSdhhtE4sw5R5e6OhpPNWiaHAzf7we8sOkvQITnHUVEUoR6\nEkTizMzYlpWFAwpY3SxBCGDs2WU1FmlBBBGRFKIkQSQBvnvi2fhwrKGgoe0bOuEwnvH5GFZc7GF0\nIiJto+EGkTgLdhCUNWxfz0xmcQMOWOLzMa+ggIVlZS0dLiKSMqLqSTCzq83sH2b2hZltMrM/m9mg\nNhw3wcxqzGyHmb1mZt+PPWSR1HTzzc3XPJg9eQov5D3CuD59OCUvj5WTJrGwqorc3NzIJxERSSHR\n9iQMB34DvBI69kbg/8yswDm3I9IBZjYEeAT4OfAUcBbwhJkd4ZxbHXPkIikiUjGmDRugb1+ACqio\nwDmnOQgiknaiShKcc6c23jaz84GPgSLg5RYOmwIscc7NDW3PNrNTgEnA5VFFK5Jimn7u9+sHH3wQ\naT8lCCKSfto7cbEHwYXkPt3NPkOA55q0LQ21i6Sl996LvJxypARBRCRdxZwkWPCr0a3Ay60MG/QG\nNjVp2xRqF0k7ZnDQQbu2Fy5UMSYRyUztebrhDuBgYGgMx9YvZb9bU6dOpXuTtWpLSkooKSmJ4ZIi\n7XPjjXDNNeFtSg5ExEuVlZVUVlaGtdXW1sbt/NZaCduIB5ndDowFhjvnNrSy7wfALc652xq1lQLj\nnHNHtHBMIVBdXV1NYWFh1PGJxNNXX0F2dnjb9u2w117exCMisjurVq2iqKgIoMg5t6o954p6uCGU\nIIwDTmgtQQipAk5q0nZyqF0kpZmFJwizZwd7D5QgiEhHENVwg5ndAZQAxcA2M/tW6K1a59yXoX0W\nABudc/UdsxXAC2Y2jeAjkCUEn4a4JA7xiyTE8uUwbFh4m4YWRKSjibYn4TKgG/A88O9GrzMb7dOX\nRpMSnXNVBBODS4FXgR8SHGrQGgmSkszCE4Q1a5QgiEjHFO06Ca0mFc65EyO0LQQWRnMtkWQ77zx4\n4IFd2wUFsFqprIh0YKrdIB3eli3Qs2d4286d4FP5MxHp4PRrUDo0s/AE4YEHgkMLShBERNSTIB3U\nH/8IP/pReJvmHYiIhFOSIB1KpF6Cjz+G/fbzJh4RkVSmTlXpMAoLwxOE//3fYNKgBEFEJDL1JEjG\ne++98FoLoKEFEZG2UE+CZLSmxZief14JgohIWylJkIx0442RSzkfd5w38YiIpCMNN0hGUTEmEZH4\nUU+CZAwVYxIRiS/1JEjaUzEmEZHEUJIgaa3pvIOaGvjOd7yJRUQk02i4QdLS+eeHJwgFBcHeAyUI\nIiLxo54ESSsqxiQikjz61SppQ8WYRESSSz0JkvJUjElExBtKEiRlqRiTiIi31FErKamoKDxBOPNM\nFWMSEUk29SRISnn/fRg4MLxNQwsiIt5QT4KkDLPwBOFvf1OCICLiJSUJacBl+CdlS8WYjj/ek3BE\nRCREww0pyu/3c/PMmSxfvJgudXVsy8pi6NixzCgvJzc31+vw4kLFmKQp5xzWNGMUEc+oJyEF+f1+\nxg8ZwpD583l2/Xqe3LiRZ9evZ8j8+YwfMgS/3+91iO2mYkxSz+/3M3vyZEbm53Na376MzM9n9uTJ\nGfFzLpLu1JOQgm6eOZNpNTWMDgQa2gwYHQjgamq4ZdYsSisqvAuwHVSMSRqrT4in1dRQGghggAOW\nzp/P+GXLWFhVlTE9ZyLpSD0JKWj54sWMapQgNDY6EGD5okVJjig+zMIThJoaJQgdXeOEuH6QoT4h\nnhpKiEXEO0oSUoxzji51dbQ0KmtATl1dWk1mzPRiTOn0d5FqMjUhFskUGm5IMWbGtqwsHERMFByw\nLSsrLSZ3ffop7LtveFumFGPqCBNLEy2ahDgdft5FMlEG/LrOPEPHjmVpC5+kz/h8DCsuTnJE0TML\nTxAyqRhTR5hYmgyNE+JI0ikhFslUGfArO/PMKC9nbkEBS3y+hl+gDlji8zGvoIDpZWVehrdbK1ZE\nXvPgnHO8iScRNI4eP5mQEItkMiUJKSg3N5eFVVWsnDSJU/LyGNenD6fk5bFy0qSUne3tXDA5GDp0\nV9vHH2fmxESNo8dPOifEIh2B5iSkqNzc3OBjjhUVKT8m+/Ofw5w5u7bnzoWpU72LJ5E0jh5f9Qnx\nLbNmMXfRInLq6tielcXQ4mIWlpWlZEIs0pEoSUgDqfph89//wv77h7dlYs9BY5k0sTRVpFNCLNLR\naLhBYpKVFZ4gvPlm5icI9TSOnjhKEERSi5IEicqf/xyce/DNN8HtMWOCycEhh4Tvl8lrB2gcXUQ6\nCg03SJvU1cGee4a3ffVVeFtHWTtA4+gi0lFYKn7jM7NCoLq6uprCwkKvw+nwJkyAxx7btf3HP8KZ\nZ4bv03gN/lGN1+D3+ZhbUJCyT2XEg8bRRSSVrFq1iqKiIoAi59yq9pxLPQnSorffbr50cks5ZSYX\npWqNEgQRyVSakyARmYUnCB99tPuJiVo7QEQk8yhJkDDz54evmDhlSjA56NOn5WMysSiViIhouEFC\ntm6FplMG2lqMSWsHiIhkpqh7EsxsuJktMrONZhYws90+FG5mx4X2a/zaaWa9Yg9b4umww8IThOef\nj74Yk9YOEBHJPLEMN3QBXgV+Ci0WcGvKAQcBvUOv/Z1zH8dwbYmj+mJMr78e3B40KJgcHHdc9OfS\n2gEiIpkn6uEG59wzwDMAFl3/8SfOuS+ivZ7EX6Regs8/h+7dYz+n1g4QEck8yZqTYMCrZpYNvAmU\nOudWJOna0kgiizFpDX4RkcySjCThP8CPgVeAzsAlwPNmdpRz7tUkXF9IfjEmJQgiIukv4UmCc+4d\n4J1GTX83swHAVOC8RF9fgsWY6mstQLAYU9NaCyIiIk159QjkP4Chre00depUujcZKC8pKaGkpCRR\ncWWUP/8ZfvjDXdtjxsDixd7FIyIi8VVZWUllZWVYW21tbdzO367aDWYWAE5zzkW1nJ6Z/R/whXPu\njBbeV+2GdmhLMSYREclM8azdEMs6CV3M7DAzOzzU1D+03Tf0/o1mtqDR/lPMrNjMBpjZIWZ2K3AC\ncHt7ApfIJkwITwb++Mfg3AMlCCIiEq1YhhuOBP5G8DF4B9wSal8AXEhwHYS+jfbfM7TPt4HtwOvA\nSc65F2OMWSJYswYKCsLbtAqyiIi0RyzrJLzAbnognHMXNNn+NfDr6EOTtmr6IMFHH+2+1oKIiEhb\nqMBTGoulGJOIiEhbqcBTGmpPMSYREZG20sdKmolHMSYREZG2UE9CmlixAoY2Wlli0CB4+23v4hER\nkcynJCHFJaIYk4iISFuokzqF/frX4QnC3LnBpEEJgoiIJIN6ElLQp5/CvvuGt2nNAxERSTb1JKSY\nk08OTxDWrVOCICIi3lCSkCJefjm45sFzzwW3r746mBzk5XkaloiIdGAabvDYN98ESzk39vXXzdtE\nRESSTT0JHnHOUVoangz87W/B3gMlCCIikgrUk5BEfr+fm2fO5K9/rmb5R8sb2ocM+YYVK/RXISIi\nqUU9CUni9/sZP2QI//rN4LAE4Q/Wi65fHI7f7/cwOhERkeaUJCTJ1Ivu4tm33mQxPwXgd1yMw/hf\n9wlTa2q4ZdYsjyMUEREJpyQhwb7+GgYMgHv/NAOAcTxBAONi7m3YZ3QgwPJFi7wKUUREJCIlCQn0\n299C586wdm1wex15PMHpWJP9DMipq8NpQQQREUkhmi2XAB9+CP367dq+7TZ4cm4+B67/IOL+DtiW\nlYVZ0/RBRETEO+pJiCPn4IwzdiUIffvCjh1wxRUwdOxYlrZQz/kZn49hxcVJjFRERKR1ShLi5Lnn\ngsWYFi4Mbq9YARs2QHZ2cHtGeTlzCwpY4vNRP6jggCU+H/MKCpheVuZF2CIiIi1K2yQhVcbvt26F\nrl2DNRcALr002KMwZEj4frm5uSysqmLlpEmckpfHuD59OCUvj5WTJrGwqorc3NzkBy8iIrIbaTUn\noX4xouWLF9Olro5tWVkMHTuWGeXlnnzIXn89XHvtru1Nm6BXr5b3z83NpbSiAioqcM5pDoKIiKS0\ntEkS6hcjmlZTQ2kggBHsrl86fz7jly1L6rfxNWugoGDX9sMPw1lnRXcOJQgiIpLq0ma44eaZM5lW\nU8PoUIIAwUcHRwcCSVuMaOdOGD58V4Jw9NHBAk3RJggiIiLpIG2ShOWLFzMqEIj4XjIWI/rTn2CP\nPYIlnQHefBP+/nfo1CmhlxUREfFMWiQJzjm61NU1W4SoXiIXI9qyBczgzDOD27/4RXBi4iGHxP1S\nIiIiKSUt5iSYGduysnAQMVFI1GJEkybB/PnBP++5J3zyCXTrFtdLiIiIpKy06EmA5C5G9M9/BnsP\n6hOEJUvgq6+UIIiISMeSNklCMhYj+vprGDgQjjoquD1uHAQCMHp0u08tIiKSdtImSUj0YkT1xZje\nfz+4vW4dPPFEsEdBRESkI0qLOQn1ErEYUaRiTFdc0e7TioiIpL20ShIaa2+C4BxMmLCr1kLfvvDO\nO7tqLYiIiHR0aTPcEE+tFWMSERGRNO5JiMXWrdC7N2zbFty+9FK46y5vYxIREUlVHaYn4frrITd3\nV4KwaZMSBBERkd3J+J6EeBRjEhER6YgyNknYuROOP35XrYWjjgrOPVCtBRERkbbJyOGGSMWYVq5U\ngiAiIhKNjEoSVIxJREQkfjJmuKFxMaasLNi8WbUWRERE2iPtk4R//nNXrQUIFmNSrQUREZH2S9vh\nhnQqxlRZWel1CHGl+0ldmXQvoPtJZZl0L5B59xMvUScJZjbczBaZ2UYzC5hZqzWazex4M6s2sy/N\n7B0zOy+2cINefDG9ijFl2g+f7id1ZdK9gO4nlWXSvUDm3U+8xNKT0AV4FfgpNFRtbpGZ5QF/Af4K\nHAZUAPeY2ckxXBuARx4J/ve224ITE/PyYj2TiIiItCTqOQnOuWeAZwCsbVWWfgKsdc5dGdp+28yG\nAVOBZ6O9PgTLOv/2t7EcKSIiIm2VjDkJxwDPNWlbCgxJwrVFREQkRsl4uqE3sKlJ2yagm5l1ds59\nFeGYbICamppEx5YUtbW1rFq1yusw4kb3k7oy6V5A95PKMuleILPup9FnZ7trG5tzrU4raPlgswBw\nmnNu0W72eRu4zzl3U6O2U4HFwF7Oua8jHHMW8HDMgYmIiMjZzrlH2nOCZPQk/Bf4VpO2XsAXkRKE\nkKXA2cB64MvEhSYiIpJxsoE8gp+l7ZKMJKEK+H6TtlNC7RE557YA7cp+REREOrAV8ThJLOskdDGz\nw8zs8FBT/9B239D7N5rZgkaH/BYYYGY3mdlgM7scOAOY2+7oRUREJGGinpNgZscBf6P5GgkLnHMX\nmtnvgQOdcyc2OWYucDDwEfBL59yD7YpcREREEqpdExdFREQkc6Vt7QYRERFJLCUJIiIiElHKJAlm\ndrWZ/cPMvjCzTWb2ZzMb5HVcsTKzy8zsNTOrDb1WmFkK1qiMXujvKmBmaTn51Mxmh+Jv/FrtdVzt\nYWbfNrMHzWyzmW0P/ewVeh1XLMxsXYS/n4CZ/cbr2KJlZj4zu97M1ob+Xt4zs1lex9UeZtbVzG41\ns/Whe3rZzI70Oq62aEuBQjP7pZn9O3Rvz5rZQC9ibU1r92Jmp5vZM2b2Sej978ZynZRJEoDhwG+A\no4GRQBbwf2a2l6dRxe5D4OdAUei1DHjSzAo8jaqdzOx7wCXAa17H0k5vEly/o3foNczbcGJnZj2A\n5cBXwCigAJgOfOZlXO1wJLv+XnoDJxOcKP2ol0HF6Crgx8DlwHeAK4ErzWySp1G1z73ASQTXsjmU\nYA2e58xsf0+japvdFig0s58Dkwj+nR0FbAOWmtmeyQyyjVorttgFeJng51DMkw9TduKimfUEPgZG\nOOde9jqeeDCzLcAM59zvvY4lFmbWFagmWLTrF8C/nHPTvI0qemY2GxjnnEvLb9pNmdmvgCHOueO8\njiURzOxW4FTnXNr1LJrZYuC/zrlLGrU9Bmx3zp3rXWSxMbNswA+MDRX7q29/BXjaOXetZ8FFKdKK\nwWb2b+DXzrl5oe1uBMsInOecS9kkdXerH5vZgcA64HDn3OvRnjuVehKa6kEw+/nU60DaK9Tl+CMg\nh90sIpUG5gOLnXPLvA4kDg4KddO9b2YP1a/zkabGAq+Y2aOhobpVZnax10HFg5llEfzGeq/XscRo\nBXCSmR0EYGaHAUOBpz2NKnZ7AJ0I9lo1toM07o0DMLN8gj1Xf61vc859AaykAxckTMaKi1ELlaC+\nFXjZOZe2Y8VmdijBpKA++z7dObfG26hiE0pyDifYFZzu/g6cD7wN7A+UAi+a2aHOuW0exhWr/gR7\nd24BygkO2d1mZl865x7yNLL2Ox3oDixobccU9SugG7DGzHYS/GI20zn3B2/Dio1zbquZVQG/MLM1\nBL9ln0XwQ/RdT4Nrv94Ev5hGKkjYO/nhpIaUTBKAOwguvDTU60DaaQ1wGMFekfHAA2Y2It0SBTM7\ngGDSdrJzrs7reNrLOdd4PfM3zewfwAfAmUA6DgX5gH84534R2n7NzA4hmDike5JwIbDEOfdfrwOJ\n0f8S/BD9EbCaYKJdYWb/TuMF5SYC9wEbgW+AVQSX0c+I4bsIjHaM6ae7lBtuMLPbgVOB451z//E6\nnvZwzn3jnFvrnFvlnJtJcLLfFK/jikERsB9QbWZ1ZlYHHAdMMbOvQz0/acs5Vwu8A6TkLOY2+A/Q\ntK56DdDPg1jixsz6EZzE/DuvY2mHOcCNzrk/Oefecs49DMwDrvY4rpg559Y5504gODGur3PuGGBP\nguPe6ey/BBOCSAUJm/YudBgplSSEEoRxwAnOuQ1ex5MAPqCz10HE4Dngfwh+Czos9HqF4LfUw1yq\nzn5to9CEzAEEP2zT0XJgcJO2wQR7R9LZhQR/Oafr+D0E5yE1/fcRIMV+98bCObfDObfJzPYm+FTN\nE17H1B7OuXUEE4WT6ttCExePJk7FkjwU8+/olBluMLM7gBKgGNhmZvXZXK1zLu3KRZtZObCE4KOQ\nuQQnXx1HsAJmWgmN04fNDTGzbcAW51zTb7Apz8x+DSwm+CHaB7iOYLdppZdxtcM8YLmZXU3wMcGj\ngYsJPqqalkK9U+cD9zvnAh6H0x6LgZlm9iHwFsEu+anAPZ5G1Q5mdgrBb9xvAwcR7C2pAe73MKw2\nMbMuBHsM63s/+4cmk37qnPuQ4LDqLDN7D1gPXE+w3tCTHoS7W63dSyh560fwd5wB3wn9u/qvc67t\nPSPOuZR4Ecyud0Z4net1bDHezz3AWoKzfv8L/B9wotdxxfH+lgFzvY4jxtgrCf7D3wFsIDiemu91\nXO28p1OB14HtBD+MLvQ6pnbez8mhf/8DvY6lnffRhWBxu3UEn7l/l2BSuofXsbXjniYA74X+/WwE\nKoBcr+NqY+zHtfBZc1+jfUqBf4f+LS1N1Z/B1u4FOK+F96+N5jopu06CiIiIeCvtx8VEREQkMZQk\niIiISERKEkRERCQiJQkiIiISkZIEERERiUhJgoiIiESkJEFEREQiUpIgIiIiESlJEBERkYiUJIiI\niEhEShJEREQkov8PMJtz3b7pz2EAAAAASUVORK5CYII=\n", 155 | "text/plain": [ 156 | "" 157 | ] 158 | }, 159 | "metadata": {}, 160 | "output_type": "display_data" 161 | } 162 | ], 163 | "source": [ 164 | "# Launch the graph\n", 165 | "with tf.Session() as sess:\n", 166 | " sess.run(init)\n", 167 | "\n", 168 | " # Fit all training data\n", 169 | " for epoch in range(training_epochs):\n", 170 | " for (x, y) in zip(train_X, train_Y):\n", 171 | " sess.run(optimizer, feed_dict={X: x, Y: y})\n", 172 | "\n", 173 | " #Display logs per epoch step\n", 174 | " if (epoch+1) % display_step == 0:\n", 175 | " c = sess.run(cost, feed_dict={X: train_X, Y:train_Y})\n", 176 | " print \"Epoch:\", '%04d' % (epoch+1), \"cost=\", \"{:.9f}\".format(c), \\\n", 177 | " \"W=\", sess.run(W), \"b=\", sess.run(b)\n", 178 | "\n", 179 | " print \"Optimization Finished!\"\n", 180 | " training_cost = sess.run(cost, feed_dict={X: train_X, Y: train_Y})\n", 181 | " print \"Training cost=\", training_cost, \"W=\", sess.run(W), \"b=\", sess.run(b), '\\n'\n", 182 | "\n", 183 | " #Graphic display\n", 184 | " plt.plot(train_X, train_Y, 'ro', label='Original data')\n", 185 | " plt.plot(train_X, sess.run(W) * train_X + sess.run(b), label='Fitted line')\n", 186 | " plt.legend()\n", 187 | " plt.show()" 188 | ] 189 | }, 190 | { 191 | "cell_type": "code", 192 | "execution_count": 1, 193 | "metadata": { 194 | "collapsed": false 195 | }, 196 | "outputs": [ 197 | { 198 | "data": { 199 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAyAAAAJYCAYAAACadoJwAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAPYQAAD2EBqD+naQAAIABJREFUeJzs3Xl4VOXd//HPmbAlIMFMQBYJSCAqW2kwgw8oAooaqCgI\nCRRsJa1an1oVccFSUXGpiojKU+qKEEFKkEXQRkSLK5KJCS5V1vhDKFvJBAMYCJCc3x8xCWdmEhJI\n5kxm3q/r4ro49zlnznfmYplPvvd9jmGapikAAAAACACH3QUAAAAACB8EEAAAAAABQwABAAAAEDAE\nEAAAAAABQwABAAAAEDAEEAAAAAABQwABAAAAEDAEEAAAAAABQwABAAAAEDAEEAAAAAABQwABAAAA\nEDAEEAAAAAABQwABAAAAEDAEEAAAAAABQwABAAAAEDAEEAAAAAABQwABAAAAEDAEEAAAAAABQwAB\nAAAAEDAEEAAAAAABQwABAAAAEDAEEAAAAAABQwABAAAAEDAEEAAAAAABQwABAAAAEDAEEAAAAAAB\nQwABAAAAEDAEEAAAAAABQwABAAAAEDAEEAAAAAABQwABAAAAEDAEEAAAAAABQwABAAAAEDAEEAAA\nAAABQwABAAAAEDAEEAAAAAABQwABAAAAEDAEEAAAAAABQwABAAAAEDAEEAAAAAABQwABAAAAEDAE\nEAAAAAABQwABAAAAEDAEEAAAAAABQwABAAAAEDAEEAAAAAABQwABAAAAEDAEEAAAAAABQwABAAAA\nEDAEEAAAAAABQwABAAAAEDAEkBp67LHH5HA41Lt37xodX1hYqJtvvllt2rRRixYtNGTIEG3YsKGe\nqwQAAACCm2Gapml3EcFu165dOv/88+VwONS5c2d9/fXX1R5vmqYuueQSffPNN7r33nvldDo1Z84c\n7dixQ7m5uYqPjw9Q5QAAAEBwIYDUwNixY+XxeHTixAl5PJ5TBpCMjAyNHTtWS5cu1ciRIyVJ+fn5\nSkhI0LBhw7RgwYJAlA0AAAAEHaZgncLHH3+sZcuW6dlnn63xOUuXLlXbtm0rwockxcbGKiUlRW+9\n9ZaOHz9eH6UCAAAAQY8AUo3S0lLdfvvtuummm9SjR48an7dhwwYlJib6jLtcLhUVFWnLli11WSYA\nAADQYBBAqvH3v/9dO3bs0COPPFKr8/bs2aN27dr5jJeP7d69u07qAwAAABoaAkgVCgoK9OCDD2ra\ntGmKiYmp1blHjhxR06ZNfcabNWsm0zR15MiRuioTAAAAaFAa2V1AsJo6daqcTqduu+22Wp8bGRmp\n4uJin/GjR4/KMAxFRkb6PS8/P1+rV69W586dqzwGAAAA9jly5Ii2b9+uq666SrGxsXaX0yARQPzY\ntm2bXn75ZT333HPatWuXpLJb6x49elTHjx/XDz/8oJYtW+rss8/2e367du20Z88en/Hysfbt2/s9\nb/Xq1ZowYUIdvQsAAADUlwULFmj8+PF2l9EgEUD82LVrl0zT1O23364//elPPvu7dOmiO+64Q888\n84zf8/v06aNPP/3UZ3z9+vWKiopSQkKC3/M6d+4sqewP9IUXXnj6byDETJo0SbNmzbK7jKDCZ+KL\nz8QXn4kVn4cvPhNffCa++EysNm7cqAkTJlR8b0PtEUD86Nmzp5YvX+4zPnXqVB0+fFjPP/+8unTp\nIknau3evCgsL1bVrV0VEREiSRo8eraVLl2rZsmUaNWqUpLLpVW+++aZGjBihxo0b+71u+bSrCy+8\n0O9dtMJVdHQ0n4cXPhNffCa++Eys+Dx88Zn44jPxxWfiH9PlTx8BxA+n06kRI0b4jM+aNUuGYeia\na66pGJsyZYrS09O1fft2xcXFSSoLIM8++6wmTpyob7/9VrGxsZozZ45KS0v10EMPBeptAAAAAEGH\nAFJLhmH4bDsc1puJORwOZWZm6p577tHs2bN15MgRuVwupaenq1u3boEsFwAAAAgqBJBaWLt2rc/Y\na6+9ptdee81nPDo6Wi+99JJeeumlQJQGAAAANAg8BwRBb9y4cXaXEHT4THzxmfjiM7Hi8/DFZ+KL\nz8QXnwnqmmGapml3ESiTm5urvn37Kicnh8VeAAAAQYjva2eODggAAACAgGENCAAAYWDHjh3Kz8+3\nuwwgqMXGxlbc1RT1hwACAECI27Fjhy688EIVFRXZXQoQ1KKiorRx40ZCSD0jgAAAEOLy8/NVVFSk\nBQsW6MILL7S7HCAolT/hPD8/nwBSzwggAACEiQsvvJBFswBsxyJ0AAAAAAFDAAEAAAAQMAQQAAAA\nAAFDAAEAAAAQMAQQAAAAAAFDAAEAAKgjDodDQ4YMOePXGTRokByOwH9N++GHH+RwOJSWllYnr1dX\nnwdCCwEEAACEnZycHE2cOFHx8fGKiopSdHS0evfurXvvvVe7d+8+7dc1DEOGYZxxfYZh2BJAglFd\nhyLYj+eAAACAsHLfffdpxowZaty4sYYOHaqUlBQdO3ZM69at09NPP605c+Zo/vz5uv7662v92hs3\nblRUVNQZ1/j666/z5HqELAIIAADw4Xa7tSYjQ5I0NCVFLpcrJK43ffp0zZgxQ126dNHbb7+tCy64\nwLJ/+fLlGj9+vMaNG6c1a9bosssuq9XrJyQk1Emd5557bp28TigwTdPuElDH6O0BAIAKBQUFuq5f\nPy0ZNkyDZ87U4JkztWTYMF3Xr58KCgoa9PV++OEHPfroo2rSpIlWrlzpEz4kaeTIkZo1a5ZOnDih\nW2+91bJv/vz5cjgcSk9P17vvvqvBgwerVatWioiIqDimqjUPe/fu1cSJE3XOOecoKipKv/zlL5We\nnq6PPvpIDodD06dPtxzvbw3Iycd+9dVXGj58uM4++2w1b95cgwYN0ueff+5z3T179mj69Om65JJL\n1K5dOzVt2lQdOnTQ+PHjtXHjxlp9flU5fvy4HnnkEXXt2lXNmjVTly5d9MADD+jYsWN+j69NTQ8/\n/LC6dOkiwzA0b948ORyOil/p6ekV1/+///s/DR8+XJ07d1azZs3kdDo1dOhQvfvuu3XyHlG36IAA\nAIAKacnJmuZ2K/Gksf4ej3I9HqUlJ2tFVlaDvd7cuXN14sQJjR07Vt27d6/yuN///veaPn26Nm/e\nrI8++sjSBTEMQ0uWLNG7776rYcOG6dZbb9WOHTuqve7+/ft18cUXa+fOnbrsssv0P//zP9q7d6/+\n+Mc/aujQoX7XjFS3liQ7O1tPPvmk+vfvr5tuukk7duzQm2++qSuuuEJffvmlunXrVnHsxx9/rKee\nekqDBw/W6NGj1aJFC23dulVLly7VypUrtW7dOvXq1etUH121xowZo5UrV6pr167605/+pGPHjum1\n117TN9984/f42tQ0ePBgFRYW6tlnn1WfPn103XXXVbxOnz59JJWF2DvvvFMDBgzQlVdeqdatW2vP\nnj1atWqVhg0bpldeeYX1I8HGRNDIyckxJZk5OTl2lwIACCE1/f8lKyvLvNvpNE3J76/JTqfpdrvr\nrK5AX+/yyy83HQ6H+corr5zy2PHjx5sOh8N87LHHKsbmzZtnGoZhRkREmO+9957f8wzDMAcPHmwZ\nS0tLMx0Oh3n//fdbxr/++muzadOmpsPhMB9++GHLvkGDBpkOh8My9uGHH5qGYZgOh8NMT0+37Hvx\nxRdNwzDMP/7xj5bx/fv3m4cPH/ap8+uvvzZbtGhhDhs2zDK+fft20zAMc+LEiX7fn7eFCxeahmGY\nAwYMMIuLiyvGDxw4YMbHx5sOh8Pn86jrmoqLi81du3b5jB88eNDs2bOn6XQ6zaNHj57yvdT07wnf\n184cU7AAAIAkaU1GhkZ6PFXuH+Xx6L3Fixvs9fbs2SNJ6tix4ymP7dixo0zT9HtHrOuuu05Dhw6t\n0TWPHz+uf/zjH4qOjtbUqVMt+3r16qXf/OY3NXqdk11yySW64YYbLGNpaWlq1KiR3G63ZTw2NlbN\nmzf3eY1evXppyJAhWrt2rUpKSmpdQ7nXXntNhmHo8ccfV5MmTSrGW7VqpQceeMDv+o26rqlJkyZq\n3769z/hZZ52ltLQ0HThwQNnZ2TV+PdQ/AggAAEAtJCUl1fjYzZs368iRI+rdu7ffL92XXHJJrRdZ\n9+3b12esUaNGOuecc3TgwAGffe+8846uueYatW/fXk2aNKlYQ7Fq1SoVFxcrPz+/Vtc/2YYNG+Rw\nODRgwACffYMGDaryvLqu6bvvvtONN95YcVvl8tebPHmyJGnXrl21ej3UL9aAAAAASWV3n1oyb576\nV9GVWOZ0KjU1tcFer23bttq0aZN27tx5ymN37twpwzD8/mS9bdu2Nb5mYWGhJOmcc87xu7+q8eq0\natXK73ijRo18OgfPPfecJk2apJiYGA0dOlRxcXGKioqSYRhavny5vv76axUXF9e6hnKFhYWKiYmx\nLMQvV9XnVNc1rV+/XpdffrlKSkp0+eWX69prr1XLli3lcDj05Zdf6q233jqj94i6RwABAACSJJfL\npcfj45Xr8VgWhUtSrqRt8fG1+ul/sF3vkksu0dq1a/X+++/rd7/7XZXHlZaW6sMPP5Qkn5/s1/ZB\ngy1btpQk7du3z+/+qsbrQklJiR5++GG1a9dOGzZsUJs2bSz7161bd8bXiI6OVkFBgUpKSnxCyN69\newNS06OPPqqjR4/qww8/1KWXXmrZ98QTT+itt96q9WuifjEFCwAAVJibmanpLpfudjq1TtI6SXc7\nnZrucmluZmaDvt6NN96oiIgILV++vNpb0L766qvavXu3Lrjgglo/B8TbBRdcoMjISH399df66aef\nfPZ/8skndfLkdH/y8/P1448/qn///j5f9H/66Sfl5uae8TUSExNVWlqqTz/91Gff2rVr66Sm8mBT\n1bqQvLw8xcTE+IQPSRVBEsGFAAIAACrExMRoRVaWUjMztXbyZK2dPFmpmZlakZWlmJiYBn298847\nT3/+85917NgxXXPNNX5DyIoVK3TnnXeqUaNG+vvf/37G12zcuLFSU1P1448/6tFHH7Xs++qrr/T6\n66+f8TWq0qZNG0VFRSknJ8cSfk6cOKHbb7/9jNZ+lJs4caJM09TUqVMt05wKCgr02GOP+YSr06np\n7LPPlmEYVd7uuHPnziooKNC///1vy/irr76q995770zeHuoJU7AAAICPpKSkOp3+FCzXe+ihh1RU\nVKRnnnlGv/jFL3TVVVepR48eOn78uNatW6esrCxFRUXpH//4hwYOHOhzfm0XjEtl04D+9a9/6amn\nntL69evVv39/7d69W0uWLNHw4cO1YsUKn4cO1gXDMHT77bfrySefVK9evXTttdfq2LFjWrt2rQ4c\nOKDBgwefcYdg3LhxWrx4sVatWqWePXvq2muv1fHjx/Xmm2/K5XIpLy/vjGtq3ry5+vXrp08++UQT\nJkxQQkKCIiIidO2116pnz5668847tXr1ag0YMEApKSmKjo7WF198oc8++0xjxozRkiVLzug9ou7R\nAQEAAGHDMAzNmDFDWVlZGj9+vL777jvNnj1bL7/8sn766Sfdc8892rJli0aNGlXl+ad6fX8/9f/8\n88/1m9/8Rt99952effZZffXVV3rhhRf061//WqZpVqwVOdW1TrUGxXvfo48+qpkzZyoqKkovvfSS\nli9fLpfLJbfbrbi4uNO6hrc333xTDz/8sEzT1N/+9jetWrVKv/vd75SRkeH3tU6npgULFmj48OFa\nvXq1pk+frmnTplVM17rqqqv09ttvq0ePHsrIyNDcuXMVGRmptWvXatiwYfU2xQ2nzzBPJ8qjXuTm\n5qpv377KyclRYqL3cjwAAE4P/78Er6lTp+qJJ57Qu+++W+Nni6B+1PTvCX+fzhwdEAAAgHpW/hDE\nk33zzTeaPXu2nE7nGS92BxoS1oAAAADUs4suukhdu3ZVz5491bx5c23dulXvvPOOTNPUyy+/bHmK\nOBDqCCAAAAD17A9/+INWrFihf/zjHzp06JBatWql5ORk3X333X5vHwuEMgIIAABAPXvggQf0wAMP\n2F0GEBRYAwIAAAAgYAggAAAAAAKGAAIAAAAgYAggAAAAAAKGAAIAAAAgYAggAAAAAAKGAAIAAAAg\nYAggAAAAAAKGAAIAAAAgYAggAAAAfkyYMEEOh0O7d++2u5QqXXLJJWrcuHGNjz/33HOVkJBgGXvl\nlVfkcDj0xhtv1HV5gF8EEAAAEDYcDke1v9LT0yuONQxDDof1q1JeXp4cDoduvvlmv6//wQcfyOFw\n6PHHH6/X91HOMAwZhlGr48/0NYAz1cjuAgAAAALJMAw99NBDMk3TZ1+fPn0qfv/000/rgQceUNu2\nbQNZXsClpKTo0ksvVfv27e0uBWGCAAIAAMLOAw88cMpjzjnnHJ1zzjmWMX+hpTb7g9FZZ52ls846\ny+4yEEaYggUAAOCH9xqQBx54QAkJCTIMo2LdhMPhUEREhN544w3dcMMNuvLKK2UYhv7yl79Y9q9b\nt87y2gsXLtTgwYN19tlnKzIyUj169NBf//pXHT9+3G8tCxcuVGJioiIjI3XOOefoxhtv1L59++rk\nfb766qt+14CUrxcpKirS5MmT1alTJzVr1kwJCQmaOXNmla/3+eef6/rrr1fbtm3VtGlTxcXF6dZb\nb9XevXvrpF40fHRAAAAA/PBeG3H55Zfr0KFDev7555WYmKgRI0ZU7Ovdu7eioqIUERGh9PR0DRky\nRAMHDqzYHxcXV/H73/72t3r99dfVqVMnjRkzRtHR0Vq3bp2mTp2qtWvXavXq1ZbrzpgxQ/fdd59i\nYmKUlpamli1bKjMzUwMGDFBUVFSdvVd/Y8eOHdMVV1yh/fv3a/jw4YqIiNDy5ct1zz336NixY7r/\n/vst57z88su69dZbFRUVpREjRujcc8/Vli1b9PLLL+vtt9+W2+1Wu3bt6qRmNFwEEAAAEHYefvhh\nn7HOnTvrt7/9bZXnDBo0SB07dqwIINOmTbPs79mzp84666yKAPLnP//Z5zVeeeUVvf7660pNTdX8\n+fPVpEmTin0PPvigHn30Ub3wwgu69dZbJUnff/+9pk6dqtatWys3N1cdOnSQJD3++OMaNWqU3nrr\nLTVqVH9f53bu3Kk+ffpo7dq1atq0qSTpL3/5S0UXZMqUKRXhZdOmTbrtttuUkJCgDz/8UG3atKl4\nnffff19XX3217rzzTi1evLje6kXDQAABAAB+FRVJmzYF/roXXCDV0Q/2qzR9+nSfscsuu6zaAFIX\nnnvuOTVt2lQvv/yyJXxI0rRp0zR79mwtXLiwIoC8/vrrKikp0R133FERPqSy7sSMGTO0cuXKeq1X\nkmbPnl0RPqSytTHXXHONFi1apK1bt1bc1vdvf/ubTpw4oeeee84SPiTpiiuu0LBhw7RixQodOXJE\nkZGR9V43ghcBBAAA+LVpk9S3b+Cvm5MjJSbW7zVKSkrq9wJ+HD58WN9++63atm3rdw2FaZpq1qyZ\nNm7cWDG2YcMGSbJM5yrXtWtXtW/fvs7WgvjjdDrVsWNHn/HysQMHDlSMrV+/XpL0r3/9y2fNiyTl\n5+frxIkT2rZtm3r16lVPFaMhIIAAAAC/LrigLAzYcd1QVFBQIEnat2+f3w5MuZMfLFhYWChJPnfj\nKte2bdt6DSCtWrXyO14+7evkIOfxeCRJTz31VJWvZxiGDh8+XIcVoiEigAAAAL+iouq/ExFOoqOj\nJUlJSUkV3YKanrNv3z5169bNZ38w3VmqvNaioiLLlC3AG7fhBQAAqKGIiAhJVU/hqm5/dHS0zj//\nfH3zzTc6ePBgja6XmJgo0zT10Ucf+ezbtm1bxS2Cg8HFF18sSfr4449trgTBjgACAABQQzExMZKk\nHTt2+N3vdDqr3X/XXXfpyJEjSktL8xtCDhw4oC+//LJie8KECWrUqJGee+457dy5s2K8tLRUd999\nd1A9+PBPf/qTIiIidMcddygvL89n//Hjx/XZZ5/ZUBmCDVOwAAAAaqhly5a66KKLtHbtWt1www1K\nSEiQw+HQddddpx49eqh79+5q166dFi5cKMMwFBcXJ8MwdOONN6pDhw666aablJubq5deekkfffSR\nrrzySsXFxamgoEDff/+9PvnkE9188816/vnnJUldunTRY489pilTpqhPnz5KSUlRdHS0MjMzVVRU\npJ49e2pTHdyqrC6CTPfu3fXqq6/qpptuUvfu3ZWcnKxu3bqpuLhYO3bs0CeffKIOHTro66+/PuNr\noWEjgAAAgLDi76F7tTn2jTfe0F133aXMzEwtWrRIpmnqvPPOU48ePRQREaEVK1ZoypQpysjI0KFD\nhyRJgwcPrriN7t///ncNHz5cL774ot5//339+OOPcjqd6tSpk6ZMmaLx48dbrnfPPffo3HPP1dNP\nP6358+erZcuWSk5O1hNPPKHRo0fX6v1U9Z6qeo3avvYNN9ygPn366JlnntGHH36o1atXq3nz5mrf\nvr3GjRunlJSUWr0eQpNhBlPvLszl5uaqb9++ysnJUSKr/gAAdYT/X4BTq+nfE/4+nTnWgAAAAAAI\nGAIIAAAAgIAhgAAAAAAIGAIIAAAAgIAhgAAAAAAIGAIIAAAAgIAhgAAAAAAIGAIIAAAAgIAhgAAA\nAAAIGAKIH999951SUlIUHx+v5s2bq3Xr1rrsssv09ttvn/Lc+fPny+Fw+PyKiIjQf//73wBUDwAA\nAASvRnYXEIx++OEHHT58WDfeeKPat2+voqIiLV26VCNGjNBLL72k3//+99WebxiGHnnkEXXu3Nky\n3qpVq3qsGgCA6m3cuNHuEoCgxd+PwCGA+JGcnKzk5GTL2G233abExEQ988wzpwwgknT11VcrMTGx\nvkoEAKDGYmNjFRUVpQkTJthdChDUoqKiFBsba3cZIY8AUkOGYahjx4764osvanzO4cOHFRUVJYeD\nmW4AAPvExcVp48aNys/Pt7sUwK+FC6Vnnin7fZs20qpVUiMbvqXGxsYqLi4u8BcOMwSQahQVFenI\nkSMqLCzUW2+9pczMTI0bN+6U55mmqUGDBunw4cNq0qSJrrrqKs2cOVNdu3YNQNUAAPiKi4vjixWC\nzsGDUnR05fby5dJ119lXDwKDAFKNyZMn68UXX5QkORwOXX/99Zo9e3a150RFRWnixIkaPHiwWrZs\nqZycHM2cOVMDBgxQbm6uOnToEIjSAQAAgtrLL0s331z2+xYtpP37pWbN7K0JgUEAqcakSZM0ZswY\n7d69WxkZGSopKVFxcXG154wZM0Zjxoyp2B4xYoSuvPJKDRw4UI899pjmzJlT32UDAAAErZ9+Kgsc\n5RYulH79a/vqQeCxOKEaCQkJGjJkiCZMmKCVK1fq0KFDGjFiRK1fZ8CAAerXr5/ef//9eqgSAACg\nYXjjDWv4+Oknwkc4ogNSC6NHj9Yf/vAHbd26Vd26davVuR07dtSWLVtqdOykSZMUffKESEnjxo2r\n0foTAACAYFNcLLVuLR06VLb94ouV06+C2aJFi7Ro0SLLWGFhoU3VhA4CSC0cOXJE0un9wfv+++/V\nunXrGh07a9YsbuELAABCwooV0siRlduFhVLLlvbVI0lut1trMjIkSUNTUuRyufwe5+8HwLm5uerb\nt2+91xjKmILlx/79+33GTpw4ofnz5ysyMlLdu3eXJO3du1ebN29WSUlJxXH+bnH4z3/+Uzk5OT7P\nFgEAAAhVx49L555bGT5mzZJM097wUVBQoOv69dOSYcM0eOZMDZ45U0uGDdN1/fqpoKDAvsLCDB0Q\nP2655RYdPHhQAwcOVIcOHbR3714tXLhQmzdv1jPPPKOoqChJ0pQpU5Senq7t27dX3Nqwf//++uUv\nf6mLLrpI0dHRysnJ0WuvvaZOnTrp/vvvt/NtAQAABMR770lXXVW5nZ8vOZ321VMuLTlZ09xunTzP\npL/Ho1yPR2nJyVqRlWVbbeGEAOLH2LFj9eqrr+qFF16Qx+PRWWedpb59+2rGjBkaPnx4xXGGYfg8\nZHDs2LF65513tGbNGhUVFaldu3a65ZZbNG3atBpPwQIAAGiISkqk3r2l774r237kEekvf7G3pnJu\nt1vd8vLkb5J7oqSueXnKzs5WUlJSoEsLO4ZpmqbdRaBM+ZzCnJwc1oAAAIAG5eOPpcsuq9zes0dq\n29a+erw9dvfdGjxzpvpXsX+dpLWTJ2vq009X+zp8XztzrAEBAADAaTNN6X/+pzJ83Hdf2VgwhQ8E\nFwIIAAAATkt2tuRwSOvXl23v2CE98YS9NVVlaEqKllezEGWZ06krU1MDWFH4IoAAAACgVkyzbJF5\n+d1r//d/y8Y6drS3ruq4XC5tjY9Xrp99uZK2xcez/iNAWIQOAACAGvv6a+kXv6jc3rZNio+3r57a\nmJuZqbTkZHXNy9Moj0dSWedjW3y85mZm2lxd+CCAAAAAoEZSUqQlS8p+P2GC9Prr9tZTWzExMVqR\nlaXs7Gy9t3ixJCk1NZXOR4ARQAAAAFCtzZulCy6o3P72W+nn5zI3SElJSYQOG7EGBAAAAFX63e8q\nw8eIEVJpacMOH7AfHRAAAAD42L5dOu+8yu2cHInHXqAu0AEBAACAxaRJleFj0KCyrgfhA3WFDggA\nAAAkSbt3Sx06VG6vW1f2kEGgLhFAAAAAQpTb7daajAxJZQ/ic5U/uMOPadOkRx4p+31iYuVDBoG6\nRgABAAAIMQUFBUpLTla3vDyN/Pl5F0vmzdPjPz/vIiYmpuLY/fulNm0qz/3gA2nIkEBXjHBCAAEA\nAAgxacnJmuZ26+RlG/09HuV6PEpLTtaKrCxJ0lNPSffdV7a/a1dp40apEd8OUc9orAEAAIQQt9ut\nbnl58rdmPFFS17w8ffBBrgyjMny8/ba0dSvhA4HBHzMAAIAQsiYjo2LalT+mJ1VXXFEWT1q3lv7z\nH6lJk0BVBxBAAAAAwsIhtVBLHarYXrJEGj3axoIQtpiCBQAAEEKGpqRoudNpGZun31aEjwgd1ccf\nf0H4gG3ogAAAAIQQl8ulx+Pjlevx6Dy1UowOVOx7SL/VBtcmXXpplo0VItzRAQEAAAgxczMzdc3Z\n71vCx59m2xKfAAAgAElEQVRiOmmDa5PmZmbaWBlABwQAACCkHD4sOZ0xki6XJLVx7tXtNz6tK1Pf\nVFJSkr3FASKAAAAAhIxbb5VeeKFy+//9P6lz57aSnratJsAbAQQAAKCBKy6WmjWr3I6NLXvCORCM\nWAMCAADQgE2dag0f335L+EBwowMCAADQAJ04ITVubB0zTXtqAWqDDggAAEAD8/TT1vCRnU34QMNB\nBwQAAKCBKC2VIiKsYwQPNDR0QAAAABqAV16xho8PPyR8oGGiAwIAABDETFNyOHzHgIaKDggAAECQ\nWrLEGj5WrSJ8oOGjAwIAABCEDMO6TfBAqKADAgAAEERWr7aGj4ULCR8ILXRAAAAAgoR316O01HcM\naOjogAAAANjss8+sQeNvfyvrehA+EIrogAAAANjIO2SUlPje9QoIJfzxBgAAsMGXX1rDx+OP+7/l\nLhBq6IAAAAAEWJMm0vHjldvHjkmNG9tXDxBIZGwAAIAA2bKlrOtRHj4mTy7rehA+EE7ogAAAAARA\nXJy0c2fldlGRFBlpXz2AXeiAAAAA1KOdO8u6HuXhY+LEsq4H4QPhig4IACCouN1urcnIkCQNTUmR\ny+WyuSLg9F10kZSTU7ldWCi1bGlfPUAwIIAAAIJCQUGB0pKT1S0vTyM9HknSknnz9Hh8vOZmZiom\nJsbmCoGa279fatOmcvuaa6SVK+2rBwgmBBAAQFBIS07WNLdbiSeN9fd4lOvxKC05WSuysmyrDagN\n7+d67N8vxcbaUwsQjFgDAgCwndvtVre8PEv4KJcoqWtenrKzswNdFlAr//2vb/gwTcIH4I0AAgCw\n3ZqMjIppV/6M8nj03uLFAawIqJ2zzpLOOadye+PGsvABwBdTsAAAAE7TwYNSdLR1jOABVI8OCADA\ndkNTUrTc6axy/zKnU1empgawIuDUune3ho+sLMIHUBN0QAAAtnO5XHo8Pl65Ho/POpBcSdvi45WU\nlGRHaYCPo0d9n+FB8ABqjg4IACAozM3M1HSXS3c7nVonaZ2ku51OTXe5NDcz0+7yAEnS5Zdbw8ea\nNYQPoLbogAAAgkJMTIxWZGUpOzu7YsF5amoqnQ8EhRMnpMaNrWMED+D0EEAAAEElKSmJ0IGgcsMN\n0oIFldtLlkijR9tXD9DQEUAAAAD8ME3J4fAdA3BmWAMCAADg5e67reHjhRcIH0BdoQMCAABwEn9P\nMwdQd+iAAAAASHrqKWv4eOwxwgdQH+iAAACAsOfd9Sgt9R0DUDfogAAAgLA1d641aNx+e1nXg/AB\n1B86IAAAICx5h4ySEt+7XgGoe/w1AwAAYWX5cmv4SE31f8tdAPWDDggAAAgb3l2PY8d8n3AOoH6R\n9QEAQMhbu9YaPgYOLOt6ED6AwKMDAgAAQpp316OoSIqMtKcWAHRAAABAiMrNtYaP+PiyrgfhA7AX\nHRAAABByvLseBw5IrVrZUwsAKzogAAAgZGzZYg0fTZuWdT0IH0DwoAMCAABCgnfXY88eqW1be2oB\nUDU6IAAAoEH7z398w4dpEj6AYEUA8eO7775TSkqK4uPj1bx5c7Vu3VqXXXaZ3n777RqdX1hYqJtv\nvllt2rRRixYtNGTIEG3YsKGeqwYAIPwYhtSxY+V2Xl5Z+AAQvJiC5ccPP/ygw4cP68Ybb1T79u1V\nVFSkpUuXasSIEXrppZf0+9//vspzTdPUsGHD9M033+jee++V0+nUnDlzNGjQIOXm5io+Pj6A7wQA\ngNBUUCA5ndYxggfQMBimyV/XmjBNU4mJiSouLtZ3331X5XEZGRkaO3asli5dqpEjR0qS8vPzlZCQ\noGHDhmnBggVVnpubm6u+ffsqJydHiYmJdf4eAAAIBe3bl63vKPfVV1Lv3vbVg/DC97UzxxSsGjIM\nQx07dtSPP/5Y7XFLly5V27ZtK8KHJMXGxiolJUVvvfWWjh8/Xt+lAgAQkn76qWzK1cnhwzQJH0BD\nQwCpRlFRkTwej77//nvNmjVLmZmZuuKKK6o9Z8OGDX7TsMvlUlFRkbZs2VJf5QIAELJcLqlFi8rt\nTz5hyhXQULEGpBqTJ0/Wiy++KElyOBy6/vrrNXv27GrP2bNnjy677DKf8Xbt2kmSdu/erR49etR9\nsQAAhKBjx8qe5XEyggfQsNEBqcakSZP0/vvvKz09XcOGDVNJSYmKi4urPefIkSNq6v0vpaRmzZrJ\nNE0dOXKkvsoFACCkXHedNXy8/TbhAwgFdECqkZCQoISEBEnShAkTdNVVV2nEiBFav359ledERkb6\nDSlHjx6VYRiKjIyst3oBAAgFJSVSI69vKAQPIHQQQGph9OjR+sMf/qCtW7eqW7dufo9p166d9py8\nOu5n5WPt27c/5XUmTZqk6Ohoy9i4ceM0bty406gaAICG45ZbpJdeqtx+/XVpwgT76kF4W7RokRYt\nWmQZKywstKma0EEAqYXy6VPV/cHr06ePPv30U5/x9evXKyoqqqKjUp1Zs2ZxWzcAQFgxTcnh8B0D\n7OTvB8Dlt+HF6WMNiB/79+/3GTtx4oTmz5+vyMhIde/eXZK0d+9ebd68WSUlJRXHjR49Wvv27dOy\nZcsqxvLz8/Xmm29qxIgRaty4cf2/AQAAGpBp06zh49lnCR9AKKMD4sctt9yigwcPauDAgerQoYP2\n7t2rhQsXavPmzXrmmWcUFRUlSZoyZYrS09O1fft2xcXFSSoLIM8++6wmTpyob7/9VrGxsZozZ45K\nS0v10EMP2fiuAAAIPoZh3SZ4AKGPDogfY8eOVUREhF544QX97//+r2bNmqWOHTtq5cqVuuOOOyqO\nMwxDDq9+scPhUGZmplJTUzV79mzde++9atOmjdauXVvluhEAAMLN889bw8fUqYQPIFwYpslf92BR\nPqcwJyeHNSAAgJDl3fUoLfUda8jcbrfWZGRIkoampMjlctlcEeoS39fOHB0QAAAQEG+8YQ0aN91U\n1vUIlfBRUFCg6/r105JhwzR45kwNnjlTS4YN03X9+qmgoMDu8oCgwRoQAABQ77xDxokTUkSEPbXU\nl7TkZE1zu3Xyz8T7ezzK9XiUlpysFVlZttUGBBM6IAAAoN5kZlrDxzXXlHU9Qi18uN1udcvLk78J\nOYmSuublKTs7O9BlAUGJDggAAKgX3l2P4mKpSRN7aqlvazIyNNLjqXL/KI9H7y1erKSkpABWBQQn\nAggAAGGmvhdJr1snDRhQud23r/TFF3V6CQANGAEEAIAwUVBQoLTkZHXLy6v4af2SefP0eHy85mZm\nKiYm5oyv4d31OHRIatHijF826A1NSdGSefPUv4ouyDKnU6mpqQGuCghOrAEBACBMlC+SnuHxqL+k\n/pJmeDya5nYrLTn5jF77m2+s4eOcc8rWeoRD+JAkl8ulrfHxyvWzL1fStvh4pl8BP6MDAgBAGKjp\nIunT+ZLs3fXIz5ecztMqs0Gbm5mptORkdc3L06ifOyHLnE5t+7nDBKAMAQQAgDBQH4uk/9//k7p0\nsY6F8+ONY2JitCIrS9nZ2Xpv8WJJUmpqKp0PwAsBBAAA1Jp312PnTuncc+2pJdgkJSUROoBqsAYE\nAIAwMDQlRcurmRe1zOnUlTVYJL1vn2/4ME3CB4CaI4AAABAG6mKRdFSU1LZt5famTeE95QrA6WEK\nFgAAYeJ0F0kXFkqtWlnHCB4AThcBBACAMHE6i6QTEqStWyu3s7Oliy6q70oBhDICCAAAYaYmi6SP\nHCmbcnUyuh4A6gJrQAAAgMXgwdbw8cEHhA8AdYcOCAAAkCSdOCE1bmwdI3gAqGt0QAAAYcPtduux\nu+/WY3ffLbfbbXc5QeXXv7aGj6VLCR8A6gcdEABAyCsoKFBacrK65eVVPA18ybx5evznuz/FxMTY\nXKF9TFNyOHzHAKC+0AEBAIS8tORkTXO7NcPjUX9J/SXN8Hg0ze1WWnKy3eXZZtIka/h45RXCB4D6\nRwcEABDS3G63uuXlKdHPvkRJXfPylJ2dfcq7QoUSuh4A7EQHBAAQ0tZkZFRMu/JnlMdT8UyMcPDb\n31rDx1//SvgAEFh0QAAACBOGYd0meACwAx0QAEBIG5qSouVOZ5X7lzmdujI1NYAVBd6f/2wNH1dc\nQfgAYB86IACAkOZyufR4fLxyPR6fdSC5krbFx4f0+g/vrkdJie/6DwAIJP4JAgCEvLmZmZruculu\np1PrJK2TdLfTqekul+ZmZtpdXr14/nlr+Oja1f/icwAINDogAICQFxMToxVZWcrOzq5YcJ6amhqy\nnQ/vrsexY75POAcAuxBAAPjldru1JiNDUtkcepfLZXNFwJlLSkoK2dAhSf/4hzRuXOV2RIR04oR9\n9QCAPwQQABY8MRpomLy7HocPS82b21MLAFSHAALAovyJ0Scv1u3v8SjX41FacrJWZGXZVhsAX++9\nJ111lXWMO1wBCGYsRQNQoaZPjAYQHAzDGj727yd8AAh+BBAAFXhiNNAwfPGF/4cKxsbaUw8A1AZT\nsAAAaEC8g8cPP0hxcfbUAgCngw4IgAo8MRoIXlu3+u96ED4ANDQEEAAVXC6XtsbHK9fPvnB4YjQQ\nrAxDSkio3P73v1nrAaDhYgoWAIu5mZlKS05W17w8jfp5Pcgyp1Pbfr4NL4DA2b1b6tDBOkbwANDQ\nEUAAWITbE6OBYOU93eqzz6T+/e2pBQDqEgEEgF+h/sRoIFj9+KN09tnWMboeAEIJa0AAAAgShmEN\nH6tWET4AhB46IAAA2Oynn6QWLaxjBA8AoYoOCAAANjIMa/h47TXCB4DQRgcEAAAbnDghNW5sHSN4\nAAgHdEAAAAgww7CGjz/+kfABIHzQAQEAIEBMU3I4fMcAIJzQAQEAIADi463hY/hwwgeA8EQHBACA\neub9UEGCB4BwRgcEAIB6ctVV1vDRvTvhAwDogAAAUA+8ux6lpb5jABCO6IAAAFCHbr7ZGjQaNy7r\nehA+AKAMHRAAAOqId8g4cUKKiLCnFgAIVnRAAAA4Q4884n+hOeEDAHzRAQEA4Ax4B4+iIiky0p5a\nAKAhoAMCAMBpeOUV/10PwgcAVI8OCAAAteQdPDweKSbGnloAoKEhgABAmHO73VqTkSFJGpqSIpfL\nZXNFwWvVKmnECOsYz/UAgNohgABAmCooKFBacrK65eVppMcjSVoyb54ej4/X3MxMxfAjfQvvrseO\nHVLHjvbUAgANGQEEAMJUWnKyprndSjxprL/Ho1yPR2nJyVqRlWVbbcHk88+l/v2tY3Q9AOD0sQgd\nAMKQ2+1Wt7w8S/golyipa16esrOzA11W0DEMa/j45hvCBwCcKQIIAIShNRkZFdOu/Bnl8ei9xYsD\nWFFw2bTJ/x2ueva0px4ACCUEEAAATmIY0oUXVm5//DFdDwCoSwQQAAhDQ1NStNzprHL/MqdTV6am\nBrAi++3e7b/rceml9tQDAKGKAAIAYcjlcmlrfLxy/ezLlbQtPl5JSUmBLss2hiF16FC5vXQpXQ8A\nqC/cBQsAwtTczEylJSera16eRv28HmSZ06ltP9+GNxwUFkqtWlnHCB4AUL8IIAAQpmJiYrQiK0vZ\n2dkVC85TU1PDpvPhPd1qzhzp1lvtqQUAwgkBBADCXFJSUtiEDkkqLpaaNbOO2d314Gn0AMIJAQQA\nEDa8ux5Tp0qPPmpPLRJPowcQnliE7scXX3yh2267TT179lSLFi3UqVMnpaamauvWrac8d/78+XI4\nHD6/IiIi9N///jcA1QMAvJWU+L/DlZ3hQ6p8Gv0Mj0f9JfWXNMPj0TS3W2nJyfYWBwD1hA6IH08+\n+aTWrVunMWPGqHfv3tq7d69mz56txMREZWVlqXv37tWebxiGHnnkEXXu3Nky3sp7pSMAoN5FR0sH\nD1Zu/+Y30vz59tVTrqZPow+n6XEAwgMBxI/Jkydr0aJFatSo8uNJSUlRr1699MQTTyg9Pf2Ur3H1\n1VcrMdHffysAgEAwTcnh8B0LFjV9Gj0BBECoYQqWHxdffLElfEhS165d1aNHD23cuLHGr3P48GGV\nlpbWdXkAgFO46CJr+Bg4MLjCBwCEMwJILezbt0+xsbGnPM40TQ0aNEgtW7ZUVFSUrr32Wm3bti0A\nFQIADEPKyancNk3po4/sq6cqPI0eQLgigNTQggULtGvXLo0dO7ba46KiojRx4kTNmTNHK1as0H33\n3acPPvhAAwYM0K5duwJULQCEn8RE60LzmJjg7nrwNHoA4cowzWD+5zk4bNq0SRdffLF69eqljz/+\nWIb3rVRO4bPPPtPAgQN1yy23aM6cOVUel5ubq759+yonJ4f1IwBQC97/LJeU+K7/CEblt+Gt6mn0\n3IYXCD58XztzLEI/hX379mn48OE6++yztWTJklqHD0kaMGCA+vXrp/fff78eKgSA8JWaKv38/L4K\nDenHauH+NHoA4YkAUo2DBw/q6quv1sGDB/Xpp5+qbdu2p/1aHTt21JYtW2p07KRJkxQdHW0ZGzdu\nnMaNG3fa1weAUOP986CjR6WmTe2p5UyF29PogYZi0aJFWrRokWWssLDQpmpCBwGkCsXFxfrVr36l\nbdu26YMPPtD5559/Rq/3/fffq3Xr1jU6dtasWbT0AKAK990nPfWUdawhdT0ANBz+fgBcPgULp48A\n4kdpaalSUlKUlZWllStXyuVy+T1u7969KiwsVNeuXRURESFJys/P97lT1j//+U/l5OTozjvvrPfa\nASCUeXc9fvyx7EGDAICGgwDix1133aVVq1ZpxIgRys/P18KFCy37x48fL0maMmWK0tPTtX37dsXF\nxUmS+vfvr1/+8pe66KKLFB0drZycHL322mvq1KmT7r///oC/FwAIBX/7m3TbbdYxuh4A0DARQPz4\n6quvZBiGVq1apVWrVvnsLw8ghmHI4XWblbFjx+qdd97RmjVrVFRUpHbt2umWW27RtGnTajwFCwBQ\nybvr8Z//SB062FMLAODMcRveIMJt3QCg0vLl0qhR1jH+xwJgN76vnTk6IACAoOPd9fj2W6l7d3tq\nAQDUrQbwmCYAQLj47DPf8GGahA8ACCV0QAAgwNxut9b8/PS8oSkpVd5pL9x4B4+PP5YuvdSeWgAA\n9YcAAgABUlBQoLTkZHXLy9NIj0eStGTePD0eH6+5mZmKiYmxuUJ7bNokXXihdYy1HgAQugggABAg\nacnJmuZ26+Qli/09HuV6PEpLTtaKrCzbarOLd9djyRJp9Gh7agEABAZrQAAgANxut7rl5cnf/VIS\nJXXNy1N2dnagy7LNnj3+13oQPgAg9BFAACAA1mRkVEy78meUx6P3Fi8OYEX2MQypffvK7eeeY8oV\nAIQTpmABAALi0CGpZUvrGMEDAMIPHRAACIChKSla7nRWuX+Z06krU1MDWFFgGYY1fNx1F+EDAMIV\nHRAACACXy6XH4+OV6/H4rAPJlbQtPl5JSUl2lFavjh2Tmja1jhE8ACC8EUAAIEDmZmYqLTlZXfPy\nNOrn9SDLnE5t+/k2vKHGe5H5qFHS0qX21AIACB4EEAAIkJiYGK3IylJ2dnbFgvPU1NSQ63yYpuRw\n+I4BACARQAAg4JKSkkIudJTz7nr06iV9/bU9tQAAghMBBABQJ/w91wMAAG/cBQsAcEbOPdcaPho1\nInwAAKpGBwQAcNq8ux6lpb5jAACcjA4IAKDWLr/c/5QrwgcA4FTogAAAasU7ZJw4IUVE2FMLAKDh\noQMCAKiRm27y3/UgfAAAaoMOCADglLyDx08/SVFR9tQCAGjY6IAAAKr02GP+ux6EDwDA6aIDAgDw\nyzt47N8vxcbaUwsAIHTQAQEAWMyb57/rQfgAANQFOiAAgAreweP776XzzrOnFgBAaKIDAgDQ6tX+\nux6EDwBAXaMDAgBhzjt45ORIiYn21AIACH0EEAAIU999J/XoYR0zTXtqAQCED6ZgAUAYMgxr+Pjk\nE8IHACAw6IAAQBj5z3+kjh2tYwQPAEAg0QEBgDBhGNbwsWIF4QMAEHh0QAAgxB04IMXEWMcIHgAA\nu9ABAYAQZhjW8PHCC4QPAIC96IAAQAg6elSKjLSOETwAAMGADggAhBjDsIaPBx8kfAAAggcdEAAI\nESUlUiOvf9UJHgCAYEMHBABCQPPm1vAxcSLhAwAQnOiAAEADZpqSw+E7BgBAsKIDAgANVO/e1vAx\naBDhAwAQ/OiAAEADZBjWbYIHAKChoAMCAA3IyJHW8BEXR/gAADQsdEAAoIHw7nqUlvqOAQAQ7OiA\nAECQu+MO/1OuCB8AgIaIDggABDHvkHHsmNS4sT21AABQF+iAAEAQmjHDf9eD8AEAaOjogABAkPEO\nHocOSS1a2FMLAAB1jQ4IAASJBQv8dz0IHwCAUEIHBACCgHfw+O9/pdat7akFAID6RAcEAGy0erX/\nrgfhAwAQquiAAIBNvINHXp7UpYs9tQAAECh0QAAgwDZu9N/1IHwAAMIBAQQAAsgwpO7dK7dzcsrC\nBwAA4YIpWAAQADt3SnFx1jGCBwAgHNEBAYB6ZhjW8JGVRfgAAIQvOiAAUE8KCiSn0zpG8AAAhDs6\nIABQDwzDGj4yMwkfAABIdEAAoE643W6tycjQseONNf35v1r2ETwAAKhEAAGAM1BQUKC05GR1y8vT\n857/6JiaVex74YXDuuWWFjZWBwBA8GEKFgCcgbTkZN3vztXTnnxL+MiRocy5l9tYGQAAwYkAAgCn\nye1265OcdF2s4xVjz2iSTBlKlNQ1L0/Z2dn2FQgAQBBiChYAnAbTlPr1c1nHZH28+SiPR+8tXqyk\npKRAlgYAQFCjAwIAtXT11ZLjpH8979FTPuEDAAD4RwcEAGrB8MoZdztj9ZTH4/fYZU6nUlNTA1AV\nAAANBx0QAKiBm26yho8JE8qmYW2Nj1eun+NzJW2Lj2f6FQAAXuiAAMApeHc9Sksrx+ZmZiotOVld\n8/I06udOyDKnU9vi4zU3MzPAlQIAEPwIIABQhWnTpEceqdweOFD66CPrMTExMVqRlaXs7Gy9t3ix\nJCk1NZXOBwAAVSCAAIAf3l2PkhLrwnNvSUlJhA4AAGqANSB+fPHFF7rtttvUs2dPtWjRQp06dVJq\naqq2bt1ao/MLCwt18803q02bNmrRooWGDBmiDRs21HPVAOrCihXW8NGpU9laj+rCBwAAqDk6IH48\n+eSTWrduncaMGaPevXtr7969mj17thITE5WVlaXu3btXea5pmho2bJi++eYb3XvvvXI6nZozZ44G\nDRqk3NxcxcfHB/CdAKgN767HsWNS48b21AIAQKgigPgxefJkLVq0SI0aVX48KSkp6tWrl5544gml\np6dXee6SJUv0+eefa+nSpRo5cqQkacyYMUpISNCDDz6oBQsW1Hv9AGrnww+lwYMrty+9VPr4Y9vK\nAQAgpBFA/Lj44ot9xrp27aoePXpo48aN1Z67dOlStW3btiJ8SFJsbKxSUlK0cOFCHT9+XI35kSoQ\nNLy7HkVFUmSkPbUAABAOmNVcC/v27VNsbGy1x2zYsEGJiYk+4y6XS0VFRdqyZUt9lQegFnJzreHj\nvPPK1noQPgAAqF8EkBpasGCBdu3apbFjx1Z73J49e9SuXTuf8fKx3bt310t9AGrOMKS+fSu3DxyQ\nvv/evnoAAAgnBJAa2LRpk2677TYNGDBAv/nNb6o99siRI2ratKnPeLNmzWSapo4cOVJfZQI4ha1b\nrV2Pxo3Luh6tWtlXEwAA4YY1IKewb98+DR8+XGeffbaWLFkiw3vCuJfIyEgVFxf7jB89elSGYSiS\n+R2ALbz/6u7ZI7Vta08tAACEMwJINQ4ePKirr75aBw8e1Keffqq2Nfi20q5dO+3Zs8dnvHysffv2\np3yNSZMmKTo62jI2btw4jRs3roaVAyi3a5d07rnWMdO0pxYAQMOyaNEiLVq0yDJWWFhoUzWhgwBS\nheLiYv3qV7/Stm3b9MEHH+j888+v0Xl9+vTRp59+6jO+fv16RUVFKSEh4ZSvMWvWLL8L2QHUjnfX\nIy9P6tLFnloAAA2Pvx8A5+bmqu/JCwlRa6wB8aO0tFQpKSnKysrSm2++KZfL5fe4vXv3avPmzSop\nKakYGz16tPbt26dly5ZVjOXn5+vNN9/UiBEjuAUvEAAFBb7hwzQJHwAABAM6IH7cddddWrVqlUaM\nGKH8/HwtXLjQsn/8+PGSpClTpig9PV3bt29XXFycpLIA8uyzz2rixIn69ttvFRsbqzlz5qi0tFQP\nPfRQoN8KEHbOPbds2lW5L7+UfvEL++oBAABWBBA/vvrqKxmGoVWrVmnVqlU++8sDiGEYcjisTSSH\nw6HMzEzdc889mj17to4cOSKXy6X09HR169YtIPUD4einn6QWLaxjrPUAACD4GKbJf9HBonxOYU5O\nDmtAgFpwuaTs7MrtTz6RLrnEvnoAAKGL72tnjg4IgAbr2DHJ+7E7/EgFAIDgxiJ0AA3SyJHW8LFq\nFeEDAICGgA4IgAaltFSKiLCOETwAAGg46IAAaDBuvdUaPtLTCR8AADQ0dEAABD3TlLxuOEfwAACg\ngaIDAiCoPfSQNXzMmkX4AACgIaMDAiBo+XuaOQAAaNjogAAIOv/3f9bw8ec/Ez4AAAgVdEAABBXv\nrkdpqe8YAABouOiAAAgKb7xhDRq//31Z14PwAQBAaKEDAsB23iHjxAnfZ30AAIDQQAcEgG0yM63h\n41e/Kut6ED4AAAhddEAA2MK761FcLDVpYk8tAAAgcOiAAAiozz+3ho9f/rKs60H4AAAgPNABARAw\n3l2PQ4ekFi3sqQUAANiDDgiAevfvf1vDR+vWZV0PwgcAAOGHDgiAeuXd9di/X4qNtacWAABgPzog\nAOrF9u2+4cM0CR8AAIQ7OiAA6px38NixQ+rY0Z5aAABAcCGAAKgz+/ZJbdtax0zTnloAAEBwYgoW\ngDrRvLk1fGzaRPgAAAC+6IAAOCOFhVKrVtYxggcAAKgKHRAAp+38863hIzub8AEAAKpHBwRArR09\nKnwKCh4AACAASURBVEVGWscIHsD/b+/Oo6Oq7z6Of2aQPWwZSCGUNSwqigElVXCJqMCgDRYhccGF\nVKTy0FjqbnvgkQii0NKj1apUBBQRkISiEhUXtDxiJiQcW0WEjKxhM2EnLCG5zx9jMo4TNEDm/mYy\n79c5nJPfTTL5cI2c+dzvXQAANcEEBMBpGTgwsHx88AHlAwAA1BwTEAA1cvKkVL9+4DaKBwAAOF1M\nQAD8rFGjAsvHm29SPgAAwJlhAgLglCxLcjqDtwEAAJwpJiAAqnX//YHl46WXKB8AAODsMQEBEMTh\nCFxTPAAAQG1hAgKgylNPBZaPqVMpHwAAoHYxAQEgiakHAACwBxMQIMr985+B5WPCBMoHAAAIHSYg\nQBT78dSjvDz4rlcAAAC1ibcaQBTKygosH7feWv0tdwEAAGobExAgyvx46lFWJp3DvwQAAMAmHO8E\nosRHHwWWj6uv9k09KB8AAMBOvPVAVPF4PFqxaJEk6brUVCUlJRlOZI8fTz1KS6XGjc1kAQAA0Y0C\ngqiwd+9epbvd6u716jclJZKkxXPmaGpCgmbn5Cg2NtZwwtD46ivpggv86+7dpQ0bzOUB7BCtBxoA\nIFJQQBAV0t1uTfR41PcH2/qXlKigpETpbreW5uYayxYqPXsGlo39+6UWLczlAUItWg80AECk4RoQ\n1Hkej0fdvd6A8lGpr6RuXq/y8vLsjhUyO3b4TrmqLB+33OK71oPygbqu8kDD9JIS9ZfUX9L0khJN\n9HiU7nabjgcA+B4FBHXeikWLqo6GVmd4SYneX7jQxkShc8UVUvv2/vW+fdLrr5vLA9gl2g40AEAk\no4AAdUBJiW/qsWqVbz1okG/q0bKl2VyAXaLpQAMARDoKCOq861JTle1ynfLzWS6XBqWl2Ziodg0f\nLrVu7V/v3i299565PAAAAD+FAoI6LykpSRsTElRQzecKJBUmJKhfv352xzprhw75ph7Z2b51nz6+\nqUdcnNlcgAl1/UADANQl3AULUWF2To7S3W5183o1/PvTNLJcLhV+f3ecSDNmjPTPf/rXW7ZIHTua\ny1PXcBvXyJOUlKSpCQkqKCkJug4kkg80AEBdRAFBVIiNjdXS3Fzl5eVVnQeelpYWcW9Ijh0LfIBg\nfLxUVGQuT13DbVwjW1070AAAdRUFBFGlX79+EVc6Kj38sPT00/71+vW+Z32g9kTj82LqkrpyoAEA\n6joKCBDmysqkBg3863r1pJMnzeWpq2p6G1fezIa/SD7QAADRgIvQgTD21FOB5aOggPIRKtzGFQAA\nezABAcJQRYVv0vFDlmUmCwAAQG1iAgKEmRdfDCwfn35K+bADt3EFAMAeTECAMGFZktMZvA324Dau\nAADYgwkIEAYWLgwsH8uXUz5MmJ2To8lJSXrA5dJnkj6T9IDLpclJSdzGFQCAWsIEBDDM4QhcUzzM\n4TauAACEHgUEMGT5cun66/3rhQul1FRzeeDHbVwBAAgdCghgwI+nHhUVwdsAAADqIq4BAWz06aeB\nReOll3ynXIWifHg8Hk154AFNeeABeTye2v8BAAAAZ4AJCGCTH5eM8vLgu17Vhr179yrd7VZ3r7fq\nwXqL58zR1IQEzc7JUWxsbO3/UAAAgBpiAgKEWEFBYPl46qnqb7lbW9Ldbk30eDS9pET9JfWXNL2k\nRBM9HqW73aH5oQAAADXEBAQIIacz8K5WZWXSOSH8v87j8ai71xv0HAtJ6iupm9ervLw8LrAGAADG\nMAEBQmD9et/Uo7J8PPKI7+NQlg9JWrFoUdVpV9UZXlJSdXtZAAAAE5iAALUsPl7audO/PnpUatTI\nXB4AAIBwwgQEqCVbt/qmHpXlY8wY39TDzvJxXWqqsl2uU34+y+XSoLQ0+wIBAAD8CAUEqAWJiVKn\nTv71oUO+W+zaLSkpSRsTElRQzecKJBUmJHD9BwAAMIpTsICzsHu31Latfz18uLRkibk8kjQ7J0fp\nbre6eb0a/v31IFkulwq/vw0vAACASUxATuHIkSOaNGmS3G63XC6XnE6n5s2bV6PvnTt3rpxOZ9Cf\nevXqac+ePSFODrsMGhRYPkpKzJcPSYqNjdXS3Fyl5eTo4/vv18f336+0nBwtzc3lGSAAAMA4JiCn\nUFxcrMzMTHXq1EmJiYlauXLlaX2/w+FQZmamOnfuHLC9ZcuWtRcSRuzfL7Vq5V9fcYXvCefhpl+/\nfpxuBQAAwg4F5BTi4+O1a9cuxcXFKT8//4zeyA0ZMkR9+1b3RAZEqltvlRYs8K937JDatTOXBwAA\nINJQQE6hfv36iouLO+vXOXz4sJo0aSJnqB57DVscOSLFxPjXPXv6nvUBAACA08O74hCxLEvJyclq\n3ry5mjRpomHDhqmwsNB0LJyB3/8+sHx4vZQPAACAM8UEJASaNGmi0aNH6+qrr1bz5s2Vn5+vv/zl\nLxowYIAKCgrUvn170xFRAydOSA0b+tctW0r79pnLAwAAUBcwAQmBkSNH6uWXX9aoUaOUkpKixx9/\nXO+9956Ki4s1ZcoU0/FQA//7v4Hl48svKR8AAAC1gQmITQYMGKBf/epX+uCDD0xHwU8oL5fO+dH/\nFZZlJgsAAEBdRAGxUYcOHbRhw4af/boJEyaoRYsWAdtuueUW3XLLLaGKBkl/+5s0YYJ/nZsrJSWZ\nywMAAMxasGCBFvzw9peSDhw4YChN3UEBsdG3336rNm3a/OzXzZw5k9v32siypB/fpIypBwAAqO4A\ncEFBgS6++GJDieoGrgE5S7t27dI333yj8vLyqm3FxcVBX7d8+XLl5+fL7XbbGQ8/Y86cwPLx4YeU\nDwAAgFBiAvITnnvuOe3fv19FRUWSpGXLlmnbtm2SpIyMDDVr1kyPPPKI5s2bp82bN6tjx46SpP79\n+6tPnz665JJL1KJFC+Xn5+uVV15Rp06d9Oijjxr7+8CPqQcAAIAZFJCfMGPGDG3dulWS5HA4lJ2d\nrezsbEnS7bffrmbNmsnhcAQ9ZPDmm2/WO++8oxUrVqi0tFTt2rXT2LFjNXHixBqdgoXQysqSbrrJ\nv166VBo2zFweAACAaOKwLI77hovKcwrz8/O5BiREHI7ANb/9AADgdPB+7exxDQiiQn5+YPmYN4/y\nAQAAYAKnYKHO69pV2rTJv66oCJ6EAAAAwB5MQFBnrVvnKxqV5SMryzf1oHwAAACYwwQEddKvfiV5\nPP51eXnwXa8AAABgP96SoU759lvfhKOyfMydW/0tdwEAAGAGExDUGW639O67/nVZmXQOv+EAAABh\nhePCiHhFRb6pR2X5ePZZ39SD8gEAABB+eIuGiHbbbdLrr/vXx45JDRuaywMAAICfxgQEEamkxDf1\nqCwfU6f6ph6UDwAAgPDGBAQR5+WXpbvv9q8PH5aaNjWXBwAAADVHAUHEKC0NLBpz50p33GEuDwAA\nAE4fBQQRYcEC6dZb/WumHgAAAJGJa0AQ1o4fl1q08JePF1/0XetB+QAAAIhMTEAQtv71L+nGG/3r\n/ft9ZQQAAACRiwkIwk5ZmdShg798/PWvvqkH5QMAACDyMQFBWHn/fWnwYP+6uFhyuczlAQAAQO1i\nAoKwUF4uXXihv3w8/rhv6kH5AAAAqFuYgMC4f/9buvJK/3rnTqltW3N5AAAAEDpMQGCMZUmXXeYv\nHw8+6NtG+QAAAKi7mIDAiDVrpH79/OstW6SOHc3lAQAAgD2YgMBWliW53f7y8bvf+bZRPgAAAKID\nExDY5r//lXr39q83bpS6dTuz1/J4PFqxaJEk6brUVCUlJdVCQgAAAIQaBQS2SEuTvu8LuvVWaf78\nM3udvXv3Kt3tVnevV78pKZEkLZ4zR1MTEjQ7J0exsbG1lBgAAAChQAFBSG3YIPXs6V9/+aXUq9eZ\nv166262JHo/6/mBb/5ISFZSUKN3t1tLc3DN/cQAAAIQc14AgZO6+218+fv1rqaLi7MqHx+NRd683\noHxU6iupm9ervLy8M/8BAAAACDkmIKh1mzdLXbr412vWSBdffPavu2LRoqrTrqozvKRE7y9cqH4/\nvL0WAAAAwgoTENSqP/7RXz6uvNI39aiN8gEAAIC6gQKCWrFjh+RwSDNn+tb/93/SJ5/4ttWW61JT\nle1ynfLzWS6XBqWl1d4PBAAAQK2jgOCsTZoktW/v+7hPH6m8XOrfv/Z/TlJSkjYmJKigms8VSCpM\nSOD0KwAAgDDHNSA4Y999J8XF+dcffCBdc01of+bsnBylu93q5vVq+PfXg2S5XCr8/ja8AAAACG8U\nEJyR6dOlhx7yfdy1q/TNN9I5Nvw2xcbGamlurvLy8vT+woWSpLS0NCYfAAAAEYICgtOyf7/UqpV/\n/dZb0g032J+jX79+lA4AAIAIxDUgqLF//MNfPlq3lo4fN1M+AAAAELmYgOBnHT4sNWvmXy9cKKWm\nmssDAACAyEUBiVAej0crFi2S5Ls9bVJSUkh+zty50l13+T5u0MB3ClbjxiH5UQAAAIgCFJAIs3fv\nXqW73eru9VY9FXzxnDma+v1doGJjY2vl5xw9KrVsKZ044VvPni2NHl0rLw0AAIAoRgGJMOlutyZ6\nPOr7g239S0pUUFKidLdbS3Nzz/pnLF4ceIrVoUNSTMxZvywAAADAReiRxOPxqLvXG1A+KvWV1M3r\nVV5e3hm//okTUps2/vLx979LlkX5AAAAQO2hgESQFYsWVZ12VZ3hJSVVz8Y4Xe+8IzVsKBUX+9Z7\n90r/8z9n9FIAAADAKVFAotzJk1L37v7b6U6b5pt6/PBZHwAAAEBtoYBEkOtSU5Xtcp3y81kulwal\npdX49T76SKpfXyos9K337JEefvhsUwIAAACnRgGJIElJSdqYkKCCaj5XIKkwIaFGTwevqJD69JGu\nuca3/vOffVOPNm1qNS4AAAAQhLtgRZjZOTlKd7vVzevV8O+vB8lyuVT4/W14f87q1VL//v719u1S\n+/ahSgsAAAAEooBEmNjYWC3NzVVeXl7VBedpaWk/O/mwLGngQGnlSt/6D3+QZs4McVgAAADgRygg\nEapfv341Ot1Kktaulfr+4N69mzZJnTuHJhcAAADwU7gGpA6zLGnYMH/5SE/3baN8AAAAwBQmIHXU\nunVSr17+9fr1Us+e5vIAAAAAEhOQOun22/3lY8QI39SD8gEAAIBwwASkDvF6pW7d/OsvvpB69zaX\nBwAAAPgxJiB1xLhx/vIxeLDvWR+UDwAAAIQbJiARbts2qWNH/zo3V0pKMpcHAAAA+ClMQCLYI4/4\ny8ell0rl5ZQPAAAAhDcKSITat0966infx5984nvCuZP/mgAAAAhznIIVoVq1kv77X+m886R69Uyn\nAQAAAGqGAhLBLrjAdAIAAADg9HDSDgAAAADbUEAAAAAA2IYCAgAAAMA2FBAAAAAAtqGAAAAAALAN\nBQQAAACAbSggAAAAAGxDAQEAAABgGwoIAAAAANtQQAAAAADYhgICAAAAwDYUkFM4cuSIJk2aJLfb\nLZfLJafTqXnz5tX4+w8cOKB77rlHcXFxiomJ0cCBA7V27doQJgYAAADCHwXkFIqLi5WZman169cr\nMTFRDoejxt9rWZaGDh2qN954QxkZGZo+fbq+++47JScny+v1hjA1AAAAEN4oIKcQHx+vXbt2adOm\nTXr66adlWVaNv3fx4sVavXq15s6dqz//+c+699579fHHH6tevXqaNGlSCFPXTQsWLDAdIeywT4Kx\nT4KxTwKxP4KxT4KxT4KxT1DbKCCnUL9+fcXFxZ3R9y5ZskRt27bVb37zm6ptrVu3Vmpqqv71r3+p\nrKystmJGBf7hC8Y+CcY+CcY+CcT+CMY+CcY+CcY+QW2jgITA2rVr1bdv36DtSUlJKi0t1YYNGwyk\nAgAAAMyjgITAzp071a5du6Dtldt27NhhdyQAAAAgLFBAQuDo0aNq2LBh0PZGjRrJsiwdPXrUQCoA\nAADAvHNMB6iLGjdurOPHjwdtP3bsmBwOhxo3blzt91UWk6+//jqk+SLNgQMHVFBQYDpGWGGfBGOf\nBGOfBGJ/BGOfBGOfBGOfBKp8n8YB5TNHAQmBdu3aaefOnUHbK7fFx8dX+32bN2+WJI0aNSpk2SLV\nxRdfbDpC2GGfBGOfBGOfBGJ/BGOfBGOfBGOfBNu8ebMGDBhgOkZEooCEQGJiolatWhW0/fPPP1eT\nJk3Uo0ePar9v8ODBeu2119S5c+dTTkkAAABgztGjR7V582YNHjzYdJSIRQE5S7t27dKBAwfUrVs3\n1atXT5I0YsQILVmyRFlZWRo+fLgk34MN33zzTaWkpKh+/frVvlbr1q1122232ZYdAAAAp4/Jx9lx\nWKfzhL0o89xzz2n//v0qKirSCy+8oOHDh6tPnz6SpIyMDDVr1kx33XWX5s2bp82bN6tjx46SpIqK\nCl1++eX66quv9MADD6h169Z6/vnntW3bNuXl5al79+4m/1oAAACAMRSQn9ClSxdt3bq12s9t2rRJ\nHTt21OjRo/Xqq6/q22+/rSogku+CrQcffFBLly7V0aNHlZSUpBkzZlQVGAAAACAaUUAAAAAA2Ibn\ngAAAAACwDQXEsDVr1mj8+PG64IILFBMTo06dOiktLU0bN240Hc2YdevWKTU1VQkJCWratKnatGmj\nq666Sm+//bbpaGFjypQpcjqd6t27t+koxnzyySdyOp1Bf+rVqyePx2M6njEFBQVKSUmRy+VS06ZN\ndeGFF+rvf/+76VjGjB49utrfk8rflepumV7XFRYW6uabb1aHDh3UtGlTnXfeecrMzIzqZxrk5+dr\nyJAhatGihZo3b67Bgwfriy++MB3LFkeOHNGkSZPkdrvlcrnkdDo1b968ar92/fr1GjJkiJo1ayaX\ny6U77rhDxcXFNicOvZruk7y8PI0bN06XXHKJGjRoUHUzIvw87oJl2FNPPaXPPvtMI0eOVO/evbVr\n1y49++yz6tu3r3Jzc3X++eebjmi7LVu26PDhw7rrrrsUHx+v0tJSLVmyRCkpKXrppZd09913m45o\nVFFRkZ588knFxMSYjhIW/vCHP+iSSy4J2NatWzdDacx6//33lZKSor59+2rixImKiYmR1+vV9u3b\nTUcz5ne/+52uu+66gG2WZWns2LHq2rWr2rVrZyiZGdu3b1e/fv3UqlUr/f73v1dsbKxWr16tSZMm\nqaCgQNnZ2aYj2q6goEBXXHGFOnbsqMcff1zl5eV6/vnnlZycLI/HU+dvHFNcXKzMzEx16tRJiYmJ\nWrlyZbVfV1RUpCuuuEKtWrXStGnTdOjQIU2fPl1ffvmlPB6Pzjmn7rylrOk+Wb58uWbPnq3evXsr\nISFBGzZssDdoJLNg1OrVq62ysrKAbRs3brQaNWpk3X777YZShZ+KigorMTHROu+880xHMS4tLc26\n9tprreTkZOvCCy80HceYlStXWg6Hw1qyZInpKGHh4MGDVtu2ba0RI0aYjhL2Vq1aZTkcDmvatGmm\no9huypQpltPptL7++uuA7XfeeafldDqt/fv3G0pmztChQy2Xy2Xt27evatvOnTutZs2aRcX/TydO\nnLB2795tWZZlrVmzxnI4HNbcuXODvu7ee++1mjZtam3fvr1q2wcffGA5HA5r1qxZtuW1Q033yZ49\ne6xjx45ZlmVZ48ePt5xOp605IxmnYBl26aWXBh016Natm3r16qWvv/7aUKrw43A41KFDB+3fv990\nFKM+/fRTZWVl6W9/+5vpKGHl8OHDKi8vNx3DqPnz52vPnj2aMmWKJKm0tFQW9xip1vz58+V0OnXL\nLbeYjmK7Q4cOSZLi4uICtrdt21ZOp1MNGjQwEcuoVatW6dprr1XLli2rtrVt27bq1N/S0lKD6UKv\nfv36Qb8P1cnKytINN9yg9u3bV2275ppr1KNHDy1atCiUEW1X033Spk0bNWzY0IZEdQ8FJEzt3r1b\nrVu3Nh3DqNLSUpWUlOjbb7/VzJkzlZOTo2uvvdZ0LGMqKiqUkZGhMWPGqFevXqbjhI3Ro0erefPm\natSokQYOHKj8/HzTkYz48MMP1bx5c23btk3nnnuuYmJi1Lx5c40bN07Hjx83HS9snDx5UosXL9aA\nAQMCbp0eLZKTk2VZltLT0/XFF19o+/btWrhwoV544QXdd999aty4semItjt+/Hi1f+8mTZroxIkT\n+vLLLw2kCi87duzQnj17gk53laSkpCStXbvWQCpEsrpzwl4d8tprr6moqEhPPPGE6ShG3X///Xrx\nxRclSU6nUzfddJOeffZZw6nM+cc//qGtW7fqo48+Mh0lLDRo0EAjRozQ0KFD1bp1a61bt04zZszQ\nlVdeqc8++0wXXXSR6Yi22rhxo8rKyjRs2DCNGTNG06ZN08qVK/XMM8/owIEDmj9/vumIYeHdd99V\nSUmJbrvtNtNRjBg8eLAyMzM1depULVu2TJJvwvynP/1JkydPNpzOjJ49e+rzzz+XZVlyOBySpLKy\nMuXm5kryXfsQ7Spv1lDdNVPt2rXT3r17VVZWpvr169sdDRGKAhJm1q9fr/Hjx2vAgAG64447TMcx\nasKECRo5cqR27NihRYsWqby8PGqP5O7du1eTJk3SxIkTFRsbazpOWLjssst02WWXVa1vuOEG3XTT\nTerdu7ceffRRLV++3GA6+x0+fFhHjx7Vvffeq5kzZ0qSbrzxRh0/flwvvfSSJk+erISEBMMpzXv9\n9dfVoEEDjRw50nQUYzp37qyrrrpKI0aMUGxsrN555x1NmTJFbdu21bhx40zHs924ceM0btw4paen\n66GHHlJ5ebmeeOIJ7dq1S5Ki+u5glSr3QXWnGzVq1KjqayggqClOwQoju3fv1vXXX69WrVpp8eLF\nVUdiolWPHj00cOBAjRo1SsuWLdOhQ4eUkpJiOpYRf/rTn+RyuTR+/HjTUcJaQkKChg0bpo8//jjq\nrn+oPIXk5ptvDth+6623yrIsrV692kSssHLkyBEtW7ZMQ4YMUatWrUzHMeKNN97QPffco5dfflnp\n6em68cYbNWvWLN155516+OGHtW/fPtMRbTd27Fg99thjWrBggXr16qWLLrpImzZt0kMPPSRJ3HFQ\n/n9fqjsIeOzYsYCvAWqCAhImDh48qCFDhujgwYN699131bZtW9ORws6IESOUl5cXdc9IKSws1KxZ\ns5SRkaGioiJt2bJFmzdv1rFjx1RWVqYtW7ZE5ZuGU+nQoYNOnDihI0eOmI5iq/j4eEnSL37xi4Dt\nlRdS8jsiZWdn6+jRo1F7+pXkO5Wzb9++QafSpKSkqLS0NGrP5c/MzNTu3bu1atUq/ec//1Fubm7V\njS169OhhOJ15lb8v1T03Z+fOnYqNjWX6gdNCAQkDx48f1w033KDCwkK988476tmzp+lIYalyBHzg\nwAHDSexVVFQky7KUkZGhLl26qEuXLuratatyc3P1zTffqGvXrsrMzDQdM2x4vV41atQo6o5aXnzx\nxZKCz1ffsWOHJN/dWqLd/PnzFRMTo1//+temoxize/fuau8YV1ZWJsl3kX60atGihfr37191k48V\nK1bol7/8pc4991zDycyLj49XmzZttGbNmqDPeTweJSYmGkiFSEYBMayiokKpqanKzc3Vm2++qaSk\nJNORjPvuu++Ctp08eVJz585V48aNo+7hjBdccIGys7OVnZ2tpUuXVv3p1auXOnXqpKVLl+q3v/2t\n6Zi2q+7pu1988YXeeustDR482EAis1JTU2VZll5++eWA7bNmzVL9+vWVnJxsJliYKC4u1ocffqjh\nw4dXnbMejXr06KG1a9eqsLAwYPvrr78up9Op3r17G0oWXhYuXKg1a9ZowoQJpqOEjZtuuklvv/12\nwEGODz/8UBs2bFBqaqrBZIhEXIRu2B//+Ee99dZbSklJUXFxcdCdaqLxVIGxY8fq4MGDuvLKK9W+\nfXvt2rVL8+fP1zfffKO//vWvatKkiemItnK5XNVe+zJz5kw5HI6oPZqblpamxo0bq3///oqLi9NX\nX32lWbNmKSYmRk8++aTpeLZLTExUenq6XnnlFZWVlemqq67Sxx9/rCVLluixxx6L+tM633jjDZWX\nl0flv6k/9OCDD+rdd9/V5ZdfrvHjx8vlcumtt97Se++9pzFjxkTl78m///1vTZ48WYMGDZLL5dLq\n1as1Z84cDR06VBkZGabj2eK5557T/v37q8rFsmXLtG3bNklSRkaGmjVrpscee0xvvvmmkpOTdd99\n9+nQoUOaMWOGLrroIt11110G04dGTfbJ1q1b9eqrr0pS1XSo8llMnTp10qhRowwkjxAGH4IIy7KS\nk5Mtp9N5yj/RaOHChdagQYOsdu3aWQ0aNLBcLpc1aNAg6+233zYdLawkJydbvXv3Nh3DmGeffda6\n9NJLrdatW1sNGjSw2rdvb915552W1+s1Hc2YkydPWpMnT7a6dOliNWzY0OrRo4f1zDPPmI4VFi67\n7DKrXbt2VkVFhekoxuXl5VnXX3+9FR8fbzVs2NA699xzrWnTplnl5eWmoxnh9XqtIUOGWHFxcVbj\nxo2t888/33r66aetsrIy09Fs07lz51O+D9myZUvV161bt84aMmSIFRMTY8XGxlp33HGHtWfPHoPJ\nQ6cm+2TlypWWw+Go9muuvvpqw3+D8OawrCi7VQwAAAAAY7gGBAAAAIBtKCAAAAAAbEMBAQAAAGAb\nCggAAAAA21BAAAAAANiGAgIAAADANhQQAAAAALahgAAAAACwDQUEAAAAgG0oIAAAAABsQwEBAAAA\nYBsKCAAAAADbUEAAAAAA2IYCAgAAAMA2FBAAAAAAtqGAAAAAALANBQQAAACAbSggAAAAAGxDAQEA\nAABgGwoIAAAAANtQQAAAAADYhgICAAAAwDYUEAAAAAC2oYAAAAAAsA0FBAAAAIBtKCAAAAAAbEMB\nAQAAAGAbCggAAAAA21BAAAAAANiGAgIAAADANhQQAAAAALahgAAAAACwDQUEAAAAgG0oIAAAIPA3\nVQAAAC5JREFUAABsQwEBAAAAYBsKCAAAAADbUEAAAAAA2IYCAgAAAMA2FBAAAAAAtvl//SeRCv5k\nbl4AAAAASUVORK5CYII=\n", 200 | "text/plain": [ 201 | "" 202 | ] 203 | }, 204 | "execution_count": 1, 205 | "metadata": {}, 206 | "output_type": "execute_result" 207 | } 208 | ], 209 | "source": [ 210 | "# Regression result" 211 | ] 212 | } 213 | ], 214 | "metadata": { 215 | "kernelspec": { 216 | "display_name": "Python 2", 217 | "language": "python", 218 | "name": "python2" 219 | }, 220 | "language_info": { 221 | "codemirror_mode": { 222 | "name": "ipython", 223 | "version": 2 224 | }, 225 | "file_extension": ".py", 226 | "mimetype": "text/x-python", 227 | "name": "python", 228 | "nbconvert_exporter": "python", 229 | "pygments_lexer": "ipython2", 230 | "version": "2.7.13" 231 | } 232 | }, 233 | "nbformat": 4, 234 | "nbformat_minor": 0 235 | } 236 | --------------------------------------------------------------------------------