├── LICENSE
├── README.md
├── Vhanilla_RNN
├── RNN.py
├── .ipynb_checkpoints
│ └── RNN-checkpoint.ipynb
└── RNN.ipynb
├── GRU
├── GRU.py
└── GRU.ipynb
├── LSTM
├── LSTM.py
└── LSTM.ipynb
├── Dynamic_batch_LSTM
└── dynamic_batch_lstm.ipynb
├── Two Layer Stacked LSTM
└── Two Later Stacked LSTM.py
├── Tensorboard
└── Tensorboard.ipynb
└── BiDirectional LSTM
└── bi_directional_lstm.py
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2016 Kazi Nazmul Haque Shezan
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ## Tensorflow tutorial to build any model from scratch.
2 |
3 | ## In this repo these are presented
4 | 1. Dynamic Vanilla RNN ---> Notebook , Code
5 | 2. Dynamic GRU ---> Notebook , Code
6 | 3. Dynamic LSTM ---> Notebook , Code
7 | 4. Dynamic 2layerStacked LSTM ---> Notebook , Code
8 | 5. Dynamic BiDirectional LSTM ---> Notebook , Code
9 | 6. Tensorboard Example --->Notebook
10 | 7. Tensorflow LSTM implementation with dynamic batch--->Notebook
11 |
12 | These RNN, GRU, LSTM and 2layer Stacked LSTM is implemented with 8 by 8 MNIST dataset for checking.
13 |
14 | This repository contains the simple example of dynamic seqence and batch vhanilla RNN,GRU, LSTM,2layer Stacked LSTM, BiDirectional LSTM written in tensorflow using scan and map ops.
15 |
16 | Every folder contains with .python file and ipython notebook for convenience.
17 |
18 | This examples gives a very good understanding of the implementation of Dynamic RNN in tensorflow.
19 | These code can be extended to create neural stack machine, neural turing machine, RNN-EMM in tensorflow.
20 |
21 | #### For any questions please ask them on twitter and I will love to answer those. Follow me on twitter for more updates.
22 |
--------------------------------------------------------------------------------
/Vhanilla_RNN/RNN.py:
--------------------------------------------------------------------------------
1 | import tensorflow as tf
2 | from sklearn import datasets
3 | from sklearn.cross_validation import train_test_split
4 | import sys
5 |
6 |
7 | # # Vhanilla RNN class and functions
8 |
9 | class RNN_cell(object):
10 |
11 | """
12 | RNN cell object which takes 3 arguments for initialization.
13 | input_size = Input Vector size
14 | hidden_layer_size = Hidden layer size
15 | target_size = Output vector size
16 |
17 | """
18 |
19 | def __init__(self, input_size, hidden_layer_size, target_size):
20 |
21 | # Initialization of given values
22 | self.input_size = input_size
23 | self.hidden_layer_size = hidden_layer_size
24 | self.target_size = target_size
25 |
26 | # Weights and Bias for input and hidden tensor
27 | self.Wx = tf.Variable(tf.zeros(
28 | [self.input_size, self.hidden_layer_size]))
29 | self.Wh = tf.Variable(tf.zeros(
30 | [self.hidden_layer_size, self.hidden_layer_size]))
31 | self.bi = tf.Variable(tf.zeros([self.hidden_layer_size]))
32 |
33 | # Weights for output layers
34 | self.Wo = tf.Variable(tf.truncated_normal(
35 | [self.hidden_layer_size, self.target_size], mean=0, stddev=.01))
36 | self.bo = tf.Variable(tf.truncated_normal(
37 | [self.target_size], mean=0, stddev=.01))
38 |
39 | # Placeholder for input vector with shape[batch, seq, embeddings]
40 | self._inputs = tf.placeholder(tf.float32,
41 | shape=[None, None, self.input_size],
42 | name='inputs')
43 |
44 | # Processing inputs to work with scan function
45 | self.processed_input = process_batch_input_for_RNN(self._inputs)
46 |
47 | '''
48 | Initial hidden state's shape is [1,self.hidden_layer_size]
49 | In First time stamp, we are doing dot product with weights to
50 | get the shape of [batch_size, self.hidden_layer_size].
51 | For this dot product tensorflow use broadcasting. But during
52 | Back propagation a low level error occurs.
53 | So to solve the problem it was needed to initialize initial
54 | hiddden state of size [batch_size, self.hidden_layer_size].
55 | So here is a little hack !!!! Getting the same shaped
56 | initial hidden state of zeros.
57 | '''
58 |
59 | self.initial_hidden = self._inputs[:, 0, :]
60 | self.initial_hidden = tf.matmul(
61 | self.initial_hidden, tf.zeros([input_size, hidden_layer_size]))
62 |
63 | # Function for vhanilla RNN.
64 | def vanilla_rnn(self, previous_hidden_state, x):
65 | """
66 | This function takes previous hidden state and input and
67 | outputs current hidden state.
68 | """
69 | current_hidden_state = tf.tanh(
70 | tf.matmul(previous_hidden_state, self.Wh) +
71 | tf.matmul(x, self.Wx) + self.bi)
72 |
73 | return current_hidden_state
74 |
75 | # Function for getting all hidden state.
76 | def get_states(self):
77 | """
78 | Iterates through time/ sequence to get all hidden state
79 | """
80 |
81 | # Getting all hidden state throuh time
82 | all_hidden_states = tf.scan(self.vanilla_rnn,
83 | self.processed_input,
84 | initializer=self.initial_hidden,
85 | name='states')
86 |
87 | return all_hidden_states
88 |
89 | # Function to get output from a hidden layer
90 | def get_output(self, hidden_state):
91 | """
92 | This function takes hidden state and returns output
93 | """
94 | output = tf.nn.relu(tf.matmul(hidden_state, self.Wo) + self.bo)
95 |
96 | return output
97 |
98 | # Function for getting all output layers
99 | def get_outputs(self):
100 | """
101 | Iterating through hidden states to get outputs for all timestamp
102 | """
103 | all_hidden_states = self.get_states()
104 |
105 | all_outputs = tf.map_fn(self.get_output, all_hidden_states)
106 |
107 | return all_outputs
108 |
109 |
110 | # Function to convert batch input data to use scan ops of tensorflow.
111 | def process_batch_input_for_RNN(batch_input):
112 | """
113 | Process tensor of size [5,3,2] to [3,5,2]
114 | """
115 | batch_input_ = tf.transpose(batch_input, perm=[2, 0, 1])
116 | X = tf.transpose(batch_input_)
117 |
118 | return X
119 |
120 |
121 | # # Placeholder and initializers
122 |
123 | hidden_layer_size = 110
124 | input_size = 8
125 | target_size = 10
126 |
127 |
128 | y = tf.placeholder(tf.float32, shape=[None, target_size], name='inputs')
129 |
130 |
131 | # # Models
132 |
133 |
134 | # Initializing rnn object
135 | rnn = RNN_cell(input_size, hidden_layer_size, target_size)
136 |
137 |
138 | # Getting all outputs from rnn
139 | outputs = rnn.get_outputs()
140 |
141 |
142 | # Getting final output through indexing after reversing
143 | last_output = outputs[-1]
144 |
145 |
146 | # As rnn model output the final layer through Relu activation softmax is
147 | # used for final output.
148 | output = tf.nn.softmax(last_output)
149 |
150 |
151 | # Computing the Cross Entropy loss
152 | cross_entropy = -tf.reduce_sum(y * tf.log(output))
153 |
154 |
155 | # Trainning with Adadelta Optimizer
156 | train_step = tf.train.AdamOptimizer().minimize(cross_entropy)
157 |
158 |
159 | # Calculatio of correct prediction and accuracy
160 | correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(output, 1))
161 | accuracy = (tf.reduce_mean(tf.cast(correct_prediction, tf.float32))) * 100
162 |
163 | sess = tf.InteractiveSession()
164 | sess.run(tf.initialize_all_variables())
165 |
166 |
167 | # Dataset Preparation
168 |
169 | # Using Sklearn MNIST dataset.
170 | digits = datasets.load_digits()
171 | X = digits.images
172 | Y_ = digits.target
173 |
174 | # One hot encoding
175 | Y = sess.run(tf.one_hot(indices=Y_, depth=target_size))
176 |
177 | # Getting Train and test Dataset
178 | X_train, X_test, y_train, y_test = train_test_split(
179 | X, Y, test_size=0.22, random_state=42)
180 |
181 | # Cuttting for simple iteration
182 | X_train = X_train[:1400]
183 | y_train = y_train[:1400]
184 |
185 |
186 | # Iterations to do trainning
187 | for epoch in range(120):
188 |
189 | start = 0
190 | end = 100
191 | for i in range(14):
192 |
193 | X = X_train[start:end]
194 | Y = y_train[start:end]
195 | start = end
196 | end = start + 100
197 | sess.run(train_step, feed_dict={rnn._inputs: X, y: Y})
198 |
199 | Loss = str(sess.run(cross_entropy, feed_dict={rnn._inputs: X, y: Y}))
200 | Train_accuracy = str(sess.run(accuracy, feed_dict={
201 | rnn._inputs: X_train, y: y_train}))
202 | Test_accuracy = str(sess.run(accuracy, feed_dict={
203 | rnn._inputs: X_test, y: y_test}))
204 |
205 | sys.stdout.flush()
206 | print("\rIteration: %s Loss: %s Train Accuracy: %s Test Accuracy: %s" %
207 | (epoch, Loss, Train_accuracy, Test_accuracy)),
208 | sys.stdout.flush()
209 |
--------------------------------------------------------------------------------
/GRU/GRU.py:
--------------------------------------------------------------------------------
1 | import tensorflow as tf
2 | from sklearn import datasets
3 | from sklearn.cross_validation import train_test_split
4 | import sys
5 |
6 |
7 | # # Vhanilla RNN class and functions
8 |
9 | class RNN_cell(object):
10 |
11 | """
12 | RNN cell object which takes 3 arguments for initialization.
13 | input_size = Input Vector size
14 | hidden_layer_size = Hidden layer size
15 | target_size = Output vector size
16 |
17 | """
18 |
19 | def __init__(self, input_size, hidden_layer_size, target_size):
20 |
21 | # Initialization of given values
22 | self.input_size = input_size
23 | self.hidden_layer_size = hidden_layer_size
24 | self.target_size = target_size
25 |
26 | # Weights for input and hidden tensor
27 | self.Wx = tf.Variable(
28 | tf.zeros([self.input_size, self.hidden_layer_size]))
29 | self.Wr = tf.Variable(
30 | tf.zeros([self.input_size, self.hidden_layer_size]))
31 | self.Wz = tf.Variable(
32 | tf.zeros([self.input_size, self.hidden_layer_size]))
33 |
34 | self.br = tf.Variable(tf.truncated_normal(
35 | [self.hidden_layer_size], mean=1))
36 | self.bz = tf.Variable(tf.truncated_normal(
37 | [self.hidden_layer_size], mean=1))
38 |
39 | self.Wh = tf.Variable(
40 | tf.zeros([self.hidden_layer_size, self.hidden_layer_size]))
41 |
42 | # Weights for output layer
43 | self.Wo = tf.Variable(tf.truncated_normal(
44 | [self.hidden_layer_size, self.target_size], mean=1, stddev=.01))
45 | self.bo = tf.Variable(tf.truncated_normal(
46 | [self.target_size], mean=1, stddev=.01))
47 | # Placeholder for input vector with shape[batch, seq, embeddings]
48 | self._inputs = tf.placeholder(tf.float32,
49 | shape=[None, None, self.input_size],
50 | name='inputs')
51 |
52 | # Processing inputs to work with scan function
53 | self.processed_input = process_batch_input_for_RNN(self._inputs)
54 |
55 | '''
56 | Initial hidden state's shape is [1,self.hidden_layer_size]
57 | In First time stamp, we are doing dot product with weights to
58 | get the shape of [batch_size, self.hidden_layer_size].
59 | For this dot product tensorflow use broadcasting. But during
60 | Back propagation a low level error occurs.
61 | So to solve the problem it was needed to initialize initial
62 | hiddden state of size [batch_size, self.hidden_layer_size].
63 | So here is a little hack !!!! Getting the same shaped
64 | initial hidden state of zeros.
65 | '''
66 |
67 | self.initial_hidden = self._inputs[:, 0, :]
68 | self.initial_hidden = tf.matmul(
69 | self.initial_hidden, tf.zeros([input_size, hidden_layer_size]))
70 |
71 | # Function for GRU cell
72 | def Gru(self, previous_hidden_state, x):
73 | """
74 | GRU Equations
75 | """
76 | z = tf.sigmoid(tf.matmul(x, self.Wz) + self.bz)
77 | r = tf.sigmoid(tf.matmul(x, self.Wr) + self.br)
78 |
79 | h_ = tf.tanh(tf.matmul(x, self.Wx) +
80 | tf.matmul(previous_hidden_state, self.Wh) * r)
81 |
82 | current_hidden_state = tf.multiply(
83 | (1 - z), h_) + tf.multiply(previous_hidden_state, z)
84 |
85 | return current_hidden_state
86 |
87 | # Function for getting all hidden state.
88 | def get_states(self):
89 | """
90 | Iterates through time/ sequence to get all hidden state
91 | """
92 |
93 | # Getting all hidden state throuh time
94 | all_hidden_states = tf.scan(self.Gru,
95 | self.processed_input,
96 | initializer=self.initial_hidden,
97 | name='states')
98 |
99 | return all_hidden_states
100 |
101 | # Function to get output from a hidden layer
102 | def get_output(self, hidden_state):
103 | """
104 | This function takes hidden state and returns output
105 | """
106 | output = tf.nn.relu(tf.matmul(hidden_state, self.Wo) + self.bo)
107 |
108 | return output
109 |
110 | # Function for getting all output layers
111 | def get_outputs(self):
112 | """
113 | Iterating through hidden states to get outputs for all timestamp
114 | """
115 | all_hidden_states = self.get_states()
116 |
117 | all_outputs = tf.map_fn(self.get_output, all_hidden_states)
118 |
119 | return all_outputs
120 |
121 |
122 | # Function to convert batch input data to use scan ops of tensorflow.
123 | def process_batch_input_for_RNN(batch_input):
124 | """
125 | Process tensor of size [5,3,2] to [3,5,2]
126 | """
127 | batch_input_ = tf.transpose(batch_input, perm=[2, 0, 1])
128 | X = tf.transpose(batch_input_)
129 |
130 | return X
131 |
132 | """
133 | Example of using GRU
134 | """
135 | # Initializing variables.
136 |
137 | hidden_layer_size = 30
138 | input_size = 8
139 | target_size = 10
140 |
141 |
142 | # Initializing placeholder
143 |
144 | y = tf.placeholder(tf.float32, shape=[None, target_size], name='inputs')
145 |
146 |
147 | # # Models
148 |
149 | # Initializing rnn object
150 | rnn = RNN_cell(input_size, hidden_layer_size, target_size)
151 |
152 |
153 | # Getting all outputs from rnn
154 | outputs = rnn.get_outputs()
155 |
156 |
157 | # Getting final output through indexing after reversing
158 | last_output = outputs[-1]
159 |
160 |
161 | # As rnn model output the final layer through Relu activation softmax is
162 | # used for final output.
163 | output = tf.nn.softmax(last_output)
164 |
165 |
166 | # Computing the Cross Entropy loss
167 | cross_entropy = -tf.reduce_sum(y * tf.log(output))
168 |
169 |
170 | # Trainning with Adadelta Optimizer
171 | train_step = tf.train.AdamOptimizer().minimize(cross_entropy)
172 |
173 |
174 | # Calculatio of correct prediction and accuracy
175 | correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(output, 1))
176 | accuracy = (tf.reduce_mean(tf.cast(correct_prediction, tf.float32))) * 100
177 |
178 |
179 | # # Dataset Preparation
180 |
181 |
182 | # Function to get on hot
183 | def get_on_hot(number):
184 | on_hot = [0] * 10
185 | on_hot[number] = 1
186 | return on_hot
187 |
188 |
189 | # Using Sklearn MNIST dataset.
190 | digits = datasets.load_digits()
191 | X = digits.images
192 | Y_ = digits.target
193 | Y = map(get_on_hot, Y_)
194 |
195 |
196 | # Getting Train and test Dataset
197 | X_train, X_test, y_train, y_test = train_test_split(
198 | X, Y, test_size=0.22, random_state=42)
199 |
200 | # Cuttting for simple iteration
201 | X_train = X_train[:1400]
202 | y_train = y_train[:1400]
203 |
204 |
205 | sess = tf.InteractiveSession()
206 | sess.run(tf.initialize_all_variables())
207 |
208 |
209 | # Iterations to do trainning
210 | for epoch in range(200):
211 |
212 | start = 0
213 | end = 100
214 | for i in range(14):
215 |
216 | X = X_train[start:end]
217 | Y = y_train[start:end]
218 | start = end
219 | end = start + 100
220 | sess.run(train_step, feed_dict={rnn._inputs: X, y: Y})
221 |
222 | Loss = str(sess.run(cross_entropy, feed_dict={rnn._inputs: X, y: Y}))
223 | Train_accuracy = str(sess.run(accuracy, feed_dict={
224 | rnn._inputs: X_train, y: y_train}))
225 | Test_accuracy = str(sess.run(accuracy, feed_dict={
226 | rnn._inputs: X_test, y: y_test}))
227 |
228 | sys.stdout.flush()
229 | print("\rIteration: %s Loss: %s Train Accuracy: %s Test Accuracy: %s" %
230 | (epoch, Loss, Train_accuracy, Test_accuracy)),
231 | sys.stdout.flush()
232 |
--------------------------------------------------------------------------------
/LSTM/LSTM.py:
--------------------------------------------------------------------------------
1 | import tensorflow as tf
2 | from sklearn import datasets
3 | from sklearn.cross_validation import train_test_split
4 |
5 | import sys
6 |
7 |
8 | class LSTM_cell(object):
9 |
10 | """
11 | LSTM cell object which takes 3 arguments for initialization.
12 | input_size = Input Vector size
13 | hidden_layer_size = Hidden layer size
14 | target_size = Output vector size
15 |
16 | """
17 |
18 | def __init__(self, input_size, hidden_layer_size, target_size):
19 |
20 | # Initialization of given values
21 | self.input_size = input_size
22 | self.hidden_layer_size = hidden_layer_size
23 | self.target_size = target_size
24 |
25 | # Weights and Bias for input and hidden tensor
26 | self.Wi = tf.Variable(tf.zeros(
27 | [self.input_size, self.hidden_layer_size]))
28 | self.Ui = tf.Variable(tf.zeros(
29 | [self.hidden_layer_size, self.hidden_layer_size]))
30 | self.bi = tf.Variable(tf.zeros([self.hidden_layer_size]))
31 |
32 | self.Wf = tf.Variable(tf.zeros(
33 | [self.input_size, self.hidden_layer_size]))
34 | self.Uf = tf.Variable(tf.zeros(
35 | [self.hidden_layer_size, self.hidden_layer_size]))
36 | self.bf = tf.Variable(tf.zeros([self.hidden_layer_size]))
37 |
38 | self.Wog = tf.Variable(tf.zeros(
39 | [self.input_size, self.hidden_layer_size]))
40 | self.Uog = tf.Variable(tf.zeros(
41 | [self.hidden_layer_size, self.hidden_layer_size]))
42 | self.bog = tf.Variable(tf.zeros([self.hidden_layer_size]))
43 |
44 | self.Wc = tf.Variable(tf.zeros(
45 | [self.input_size, self.hidden_layer_size]))
46 | self.Uc = tf.Variable(tf.zeros(
47 | [self.hidden_layer_size, self.hidden_layer_size]))
48 | self.bc = tf.Variable(tf.zeros([self.hidden_layer_size]))
49 |
50 | # Weights for output layers
51 | self.Wo = tf.Variable(tf.truncated_normal(
52 | [self.hidden_layer_size, self.target_size], mean=0, stddev=.01))
53 | self.bo = tf.Variable(tf.truncated_normal(
54 | [self.target_size], mean=0, stddev=.01))
55 |
56 | # Placeholder for input vector with shape[batch, seq, embeddings]
57 | self._inputs = tf.placeholder(tf.float32,
58 | shape=[None, None, self.input_size],
59 | name='inputs')
60 |
61 | # Processing inputs to work with scan function
62 | self.processed_input = process_batch_input_for_RNN(self._inputs)
63 |
64 | '''
65 | Initial hidden state's shape is [1,self.hidden_layer_size]
66 | In First time stamp, we are doing dot product with weights to
67 | get the shape of [batch_size, self.hidden_layer_size].
68 | For this dot product tensorflow use broadcasting. But during
69 | Back propagation a low level error occurs.
70 | So to solve the problem it was needed to initialize initial
71 | hiddden state of size [batch_size, self.hidden_layer_size].
72 | So here is a little hack !!!! Getting the same shaped
73 | initial hidden state of zeros.
74 | '''
75 |
76 | self.initial_hidden = self._inputs[:, 0, :]
77 | self.initial_hidden = tf.matmul(
78 | self.initial_hidden, tf.zeros([input_size, hidden_layer_size]))
79 |
80 | self.initial_hidden = tf.stack(
81 | [self.initial_hidden, self.initial_hidden])
82 | # Function for LSTM cell.
83 |
84 | def Lstm(self, previous_hidden_memory_tuple, x):
85 | """
86 | This function takes previous hidden state and memory
87 | tuple with input and
88 | outputs current hidden state.
89 | """
90 |
91 | previous_hidden_state, c_prev = tf.unstack(previous_hidden_memory_tuple)
92 |
93 | # Input Gate
94 | i = tf.sigmoid(
95 | tf.matmul(x, self.Wi) +
96 | tf.matmul(previous_hidden_state, self.Ui) + self.bi
97 | )
98 |
99 | # Forget Gate
100 | f = tf.sigmoid(
101 | tf.matmul(x, self.Wf) +
102 | tf.matmul(previous_hidden_state, self.Uf) + self.bf
103 | )
104 |
105 | # Output Gate
106 | o = tf.sigmoid(
107 | tf.matmul(x, self.Wog) +
108 | tf.matmul(previous_hidden_state, self.Uog) + self.bog
109 | )
110 |
111 | # New Memory Cell
112 | c_ = tf.nn.tanh(
113 | tf.matmul(x, self.Wc) +
114 | tf.matmul(previous_hidden_state, self.Uc) + self.bc
115 | )
116 |
117 | # Final Memory cell
118 | c = f * c_prev + i * c_
119 |
120 | # Current Hidden state
121 | current_hidden_state = o * tf.nn.tanh(c)
122 |
123 | return tf.stack([current_hidden_state, c])
124 |
125 | # Function for getting all hidden state.
126 | def get_states(self):
127 | """
128 | Iterates through time/ sequence to get all hidden state
129 | """
130 |
131 | # Getting all hidden state throuh time
132 | all_hidden_states = tf.scan(self.Lstm,
133 | self.processed_input,
134 | initializer=self.initial_hidden,
135 | name='states')
136 | all_hidden_states = all_hidden_states[:, 0, :, :]
137 |
138 | return all_hidden_states
139 |
140 | # Function to get output from a hidden layer
141 | def get_output(self, hidden_state):
142 | """
143 | This function takes hidden state and returns output
144 | """
145 | output = tf.nn.relu(tf.matmul(hidden_state, self.Wo) + self.bo)
146 |
147 | return output
148 |
149 | # Function for getting all output layers
150 | def get_outputs(self):
151 | """
152 | Iterating through hidden states to get outputs for all timestamp
153 | """
154 | all_hidden_states = self.get_states()
155 |
156 | all_outputs = tf.map_fn(self.get_output, all_hidden_states)
157 |
158 | return all_outputs
159 |
160 |
161 | # Function to convert batch input data to use scan ops of tensorflow.
162 | def process_batch_input_for_RNN(batch_input):
163 | """
164 | Process tensor of size [5,3,2] to [3,5,2]
165 | """
166 | batch_input_ = tf.transpose(batch_input, perm=[2, 0, 1])
167 | X = tf.transpose(batch_input_)
168 |
169 | return X
170 |
171 |
172 | # # Placeholder and initializers
173 |
174 |
175 | hidden_layer_size = 30
176 | input_size = 8
177 | target_size = 10
178 |
179 |
180 | y = tf.placeholder(tf.float32, shape=[None, target_size], name='inputs')
181 |
182 |
183 | # # Models
184 |
185 |
186 | # Initializing rnn object
187 | rnn = LSTM_cell(input_size, hidden_layer_size, target_size)
188 |
189 |
190 | # Getting all outputs from rnn
191 | outputs = rnn.get_outputs()
192 |
193 |
194 | # In[7]:
195 |
196 | # Getting final output through indexing after reversing
197 | last_output = outputs[-1]
198 |
199 |
200 | # As rnn model output the final layer through Relu activation softmax is
201 | # used for final output.
202 | output = tf.nn.softmax(last_output)
203 |
204 |
205 | # Computing the Cross Entropy loss
206 | cross_entropy = -tf.reduce_sum(y * tf.log(output))
207 |
208 |
209 | # Trainning with Adadelta Optimizer
210 | train_step = tf.train.AdamOptimizer().minimize(cross_entropy)
211 |
212 |
213 | # Calculatio of correct prediction and accuracy
214 | correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(output, 1))
215 | accuracy = (tf.reduce_mean(tf.cast(correct_prediction, tf.float32))) * 100
216 |
217 |
218 | # # Dataset Preparation
219 |
220 |
221 | # Function to get on hot
222 | def get_on_hot(number):
223 | on_hot = [0] * 10
224 | on_hot[number] = 1
225 | return on_hot
226 |
227 |
228 | # Using Sklearn MNIST dataset.
229 | digits = datasets.load_digits()
230 | X = digits.images
231 | Y_ = digits.target
232 |
233 | Y = map(get_on_hot, Y_)
234 |
235 |
236 | # Getting Train and test Dataset
237 | X_train, X_test, y_train, y_test = train_test_split(
238 | X, Y, test_size=0.22, random_state=42)
239 |
240 | # Cuttting for simple iteration
241 | X_train = X_train[:1400]
242 | y_train = y_train[:1400]
243 |
244 |
245 | sess = tf.InteractiveSession()
246 | sess.run(tf.initialize_all_variables())
247 |
248 |
249 | # Iterations to do trainning
250 | for epoch in range(120):
251 |
252 | start = 0
253 | end = 100
254 | for i in range(14):
255 |
256 | X = X_train[start:end]
257 | Y = y_train[start:end]
258 | start = end
259 | end = start + 100
260 | sess.run(train_step, feed_dict={rnn._inputs: X, y: Y})
261 |
262 | Loss = str(sess.run(cross_entropy, feed_dict={rnn._inputs: X, y: Y}))
263 | Train_accuracy = str(sess.run(accuracy, feed_dict={
264 | rnn._inputs: X_train[:500], y: y_train[:500]}))
265 | Test_accuracy = str(sess.run(accuracy, feed_dict={
266 | rnn._inputs: X_test, y: y_test}))
267 |
268 | sys.stdout.flush()
269 | print("\rIteration: %s Loss: %s Train Accuracy: %s Test Accuracy: %s" %
270 | (epoch, Loss, Train_accuracy, Test_accuracy)),
271 | sys.stdout.flush()
272 |
--------------------------------------------------------------------------------
/Dynamic_batch_LSTM/dynamic_batch_lstm.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import tensorflow as tf\n",
10 | "import numpy as np"
11 | ]
12 | },
13 | {
14 | "cell_type": "code",
15 | "execution_count": 2,
16 | "metadata": {},
17 | "outputs": [],
18 | "source": [
19 | "sequence_length = 96\n",
20 | "embedding_length = 64"
21 | ]
22 | },
23 | {
24 | "cell_type": "markdown",
25 | "metadata": {},
26 | "source": [
27 | "## Placeholder"
28 | ]
29 | },
30 | {
31 | "cell_type": "code",
32 | "execution_count": 3,
33 | "metadata": {},
34 | "outputs": [],
35 | "source": [
36 | "input_data = tf.placeholder(tf.float32,[None,sequence_length,embedding_length])"
37 | ]
38 | },
39 | {
40 | "cell_type": "markdown",
41 | "metadata": {},
42 | "source": [
43 | "## Build RNN Cell"
44 | ]
45 | },
46 | {
47 | "cell_type": "code",
48 | "execution_count": 4,
49 | "metadata": {
50 | "scrolled": true
51 | },
52 | "outputs": [
53 | {
54 | "name": "stdout",
55 | "output_type": "stream",
56 | "text": [
57 | "WARNING:tensorflow:From /usr/local/lib/python3.5/dist-packages/tensorflow/contrib/learn/python/learn/datasets/base.py:198: retry (from tensorflow.contrib.learn.python.learn.datasets.base) is deprecated and will be removed in a future version.\n",
58 | "Instructions for updating:\n",
59 | "Use the retry module or similar alternatives.\n"
60 | ]
61 | }
62 | ],
63 | "source": [
64 | "hidden_vector_size = 100\n",
65 | "\n",
66 | "rnn_cell = tf.contrib.rnn.LSTMCell(hidden_vector_size)"
67 | ]
68 | },
69 | {
70 | "cell_type": "markdown",
71 | "metadata": {},
72 | "source": [
73 | "### Get batch Size from input data rather than hard coding it"
74 | ]
75 | },
76 | {
77 | "cell_type": "code",
78 | "execution_count": 5,
79 | "metadata": {},
80 | "outputs": [],
81 | "source": [
82 | "initial_zero_h = tf.matmul(tf.reduce_mean(tf.zeros_like(input_data),2),\n",
83 | " tf.zeros([sequence_length,hidden_vector_size]))"
84 | ]
85 | },
86 | {
87 | "cell_type": "markdown",
88 | "metadata": {},
89 | "source": [
90 | "### Initial State with tuple of h,c"
91 | ]
92 | },
93 | {
94 | "cell_type": "code",
95 | "execution_count": 6,
96 | "metadata": {},
97 | "outputs": [],
98 | "source": [
99 | "initial_state = tf.contrib.rnn.LSTMStateTuple(initial_zero_h,initial_zero_h)"
100 | ]
101 | },
102 | {
103 | "cell_type": "markdown",
104 | "metadata": {},
105 | "source": [
106 | "### Getting the outputs"
107 | ]
108 | },
109 | {
110 | "cell_type": "code",
111 | "execution_count": 7,
112 | "metadata": {
113 | "scrolled": true
114 | },
115 | "outputs": [],
116 | "source": [
117 | "outputs, state = tf.nn.dynamic_rnn(rnn_cell, input_data,\n",
118 | " initial_state=initial_state,\n",
119 | " dtype=tf.float32)"
120 | ]
121 | },
122 | {
123 | "cell_type": "code",
124 | "execution_count": 8,
125 | "metadata": {},
126 | "outputs": [
127 | {
128 | "data": {
129 | "text/plain": [
130 | ""
131 | ]
132 | },
133 | "execution_count": 8,
134 | "metadata": {},
135 | "output_type": "execute_result"
136 | }
137 | ],
138 | "source": [
139 | "outputs"
140 | ]
141 | },
142 | {
143 | "cell_type": "code",
144 | "execution_count": 9,
145 | "metadata": {},
146 | "outputs": [],
147 | "source": [
148 | "sess = tf.InteractiveSession()\n",
149 | "sess.run(tf.global_variables_initializer())"
150 | ]
151 | },
152 | {
153 | "cell_type": "markdown",
154 | "metadata": {},
155 | "source": [
156 | "### Use any batch Size"
157 | ]
158 | },
159 | {
160 | "cell_type": "code",
161 | "execution_count": 10,
162 | "metadata": {},
163 | "outputs": [
164 | {
165 | "data": {
166 | "text/plain": [
167 | "array([[ 0.0133772 , 0.1248076 , 0.02860027, -0.00279607, -0.18480375,\n",
168 | " 0.00869518, 0.02967772, 0.03201196, -0.03233923, -0.12331957],\n",
169 | " [ 0.01717433, 0.2263697 , 0.06569117, -0.03991626, -0.21566994,\n",
170 | " -0.00274733, 0.01245386, 0.00521528, -0.03463598, -0.15695679],\n",
171 | " [ 0.04783954, 0.3069565 , 0.08629331, -0.03657083, -0.20743844,\n",
172 | " 0.01699171, -0.00409255, 0.02383869, -0.03702394, -0.18661733],\n",
173 | " [ 0.07255518, 0.27716413, 0.1319593 , -0.07308843, -0.27800056,\n",
174 | " 0.0886552 , -0.04429418, -0.03370504, -0.0502329 , -0.22744851],\n",
175 | " [ 0.13757218, 0.34861842, 0.1028095 , -0.1123821 , -0.3602204 ,\n",
176 | " 0.15615414, -0.06908301, -0.05146805, -0.01621163, -0.25484738],\n",
177 | " [ 0.15186062, 0.3146225 , 0.08943865, -0.15840401, -0.34275725,\n",
178 | " 0.14421731, -0.1636517 , -0.00490215, 0.0531736 , -0.20377146],\n",
179 | " [ 0.19039147, 0.27051648, 0.04969539, -0.13397585, -0.36124727,\n",
180 | " 0.20126095, -0.11209239, -0.03270925, 0.01869536, -0.21338326],\n",
181 | " [ 0.21901123, 0.30190438, 0.00503324, -0.16593175, -0.36799055,\n",
182 | " 0.23424742, -0.11466883, -0.02946316, 0.05092116, -0.159033 ],\n",
183 | " [ 0.17929147, 0.25147724, 0.01736976, -0.19413503, -0.37185222,\n",
184 | " 0.25556013, -0.15847237, 0.0221071 , 0.05562782, -0.18257754],\n",
185 | " [ 0.2271389 , 0.27966276, 0.06107444, -0.14930013, -0.36397922,\n",
186 | " 0.2083975 , -0.18683249, 0.06619405, 0.03500409, -0.20571777]],\n",
187 | " dtype=float32)"
188 | ]
189 | },
190 | "execution_count": 10,
191 | "metadata": {},
192 | "output_type": "execute_result"
193 | }
194 | ],
195 | "source": [
196 | "batch_size = 14\n",
197 | "fake_input = np.random.uniform(size=[batch_size,96,64])\n",
198 | "outputs.eval(feed_dict={input_data:fake_input})[0,:10,:10]"
199 | ]
200 | },
201 | {
202 | "cell_type": "code",
203 | "execution_count": 11,
204 | "metadata": {},
205 | "outputs": [
206 | {
207 | "data": {
208 | "text/plain": [
209 | "array([[ 0.03142734, 0.15519665, 0.02140723, -0.00588781, -0.10464185,\n",
210 | " 0.01756072, -0.00956259, 0.04701442, 0.08324529, -0.09299037],\n",
211 | " [ 0.01613303, 0.19432577, 0.0701172 , -0.03475946, -0.17193635,\n",
212 | " 0.03618715, -0.00548373, 0.02486533, 0.12223729, -0.09514315],\n",
213 | " [ 0.03262727, 0.2981133 , 0.08287209, -0.06223779, -0.25430405,\n",
214 | " 0.04866415, 0.01303787, 0.09578612, 0.03252373, -0.16682807],\n",
215 | " [ 0.10809346, 0.3251696 , 0.07644037, -0.08040279, -0.3162625 ,\n",
216 | " 0.09884892, -0.04423967, 0.15149339, 0.03924495, -0.22590007],\n",
217 | " [ 0.1504862 , 0.3340804 , 0.05860933, -0.09605774, -0.32826826,\n",
218 | " 0.19024172, -0.05101546, 0.147878 , 0.06868754, -0.13980682],\n",
219 | " [ 0.14726807, 0.31644508, 0.05159319, -0.1089751 , -0.32962084,\n",
220 | " 0.1447724 , -0.03714456, 0.1136796 , 0.10779426, -0.11198579],\n",
221 | " [ 0.22446983, 0.3208124 , 0.05245988, -0.12058066, -0.31481117,\n",
222 | " 0.10862615, -0.08089321, 0.10375389, 0.12114908, -0.1494493 ],\n",
223 | " [ 0.29125607, 0.33517447, 0.01976913, -0.09151037, -0.3436202 ,\n",
224 | " 0.19459113, -0.05055251, 0.12222207, 0.09918524, -0.22896896],\n",
225 | " [ 0.23131868, 0.319851 , 0.05228816, -0.11567234, -0.3553371 ,\n",
226 | " 0.15113856, -0.11206171, 0.06238765, 0.07839007, -0.20897834],\n",
227 | " [ 0.27803063, 0.32792312, 0.06652988, -0.03546051, -0.35768583,\n",
228 | " 0.09689046, -0.14316122, 0.05327737, 0.14563368, -0.22349374]],\n",
229 | " dtype=float32)"
230 | ]
231 | },
232 | "execution_count": 11,
233 | "metadata": {},
234 | "output_type": "execute_result"
235 | }
236 | ],
237 | "source": [
238 | "batch_size = 140\n",
239 | "fake_input = np.random.uniform(size=[batch_size,96,64])\n",
240 | "outputs.eval(feed_dict={input_data:fake_input})[0,:10,:10]\n"
241 | ]
242 | },
243 | {
244 | "cell_type": "code",
245 | "execution_count": null,
246 | "metadata": {},
247 | "outputs": [],
248 | "source": []
249 | }
250 | ],
251 | "metadata": {
252 | "kernelspec": {
253 | "display_name": "Python 3",
254 | "language": "python",
255 | "name": "python3"
256 | },
257 | "language_info": {
258 | "codemirror_mode": {
259 | "name": "ipython",
260 | "version": 3
261 | },
262 | "file_extension": ".py",
263 | "mimetype": "text/x-python",
264 | "name": "python",
265 | "nbconvert_exporter": "python",
266 | "pygments_lexer": "ipython3",
267 | "version": "3.5.2"
268 | }
269 | },
270 | "nbformat": 4,
271 | "nbformat_minor": 2
272 | }
273 |
--------------------------------------------------------------------------------
/Two Layer Stacked LSTM/Two Later Stacked LSTM.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | import tensorflow as tf
3 | from sklearn import datasets
4 | from sklearn.cross_validation import train_test_split
5 | import pylab as pl
6 | from IPython import display
7 | import sys
8 |
9 |
10 | # # STACKED LSTM class and functions
11 |
12 | class LSTM_cell(object):
13 |
14 | """
15 | LSTM cell object which takes 3 arguments for initialization.
16 | input_size = Input Vector size
17 | hidden_layer_size = Hidden layer size
18 | target_size = Output vector size
19 |
20 | """
21 |
22 | def __init__(self, input_size, hidden_layer_size, target_size):
23 |
24 | # Initialization of given values
25 | self.input_size = input_size
26 | self.hidden_layer_size = hidden_layer_size
27 | self.target_size = target_size
28 |
29 | # Weights and Bias for input and hidden tensor
30 | self.Wi_l1 = tf.Variable(tf.truncated_normal(
31 | [self.input_size, self.hidden_layer_size]))
32 | self.Ui_l1 = tf.Variable(tf.truncated_normal(
33 | [self.hidden_layer_size, self.hidden_layer_size]))
34 | self.bi_l1 = tf.Variable(tf.truncated_normal([self.hidden_layer_size]))
35 |
36 | self.Wf_l1 = tf.Variable(tf.truncated_normal(
37 | [self.input_size, self.hidden_layer_size]))
38 | self.Uf_l1 = tf.Variable(tf.truncated_normal(
39 | [self.hidden_layer_size, self.hidden_layer_size]))
40 | self.bf_l1 = tf.Variable(tf.truncated_normal([self.hidden_layer_size]))
41 |
42 | self.Wog_l1 = tf.Variable(tf.truncated_normal(
43 | [self.input_size, self.hidden_layer_size]))
44 | self.Uog_l1 = tf.Variable(tf.truncated_normal(
45 | [self.hidden_layer_size, self.hidden_layer_size]))
46 | self.bog_l1 = tf.Variable(
47 | tf.truncated_normal([self.hidden_layer_size]))
48 |
49 | self.Wc_l1 = tf.Variable(tf.truncated_normal(
50 | [self.input_size, self.hidden_layer_size]))
51 | self.Uc_l1 = tf.Variable(tf.truncated_normal(
52 | [self.hidden_layer_size, self.hidden_layer_size]))
53 | self.bc_l1 = tf.Variable(tf.truncated_normal([self.hidden_layer_size]))
54 |
55 | # Weights for layer 2
56 | self.Wi_l2 = tf.Variable(tf.truncated_normal(
57 | [self.hidden_layer_size, self.hidden_layer_size]))
58 | self.Ui_l2 = tf.Variable(tf.truncated_normal(
59 | [self.hidden_layer_size, self.hidden_layer_size]))
60 | self.bi_l2 = tf.Variable(tf.truncated_normal([self.hidden_layer_size]))
61 |
62 | self.Wf_l2 = tf.Variable(tf.truncated_normal(
63 | [self.hidden_layer_size, self.hidden_layer_size]))
64 | self.Uf_l2 = tf.Variable(tf.truncated_normal(
65 | [self.hidden_layer_size, self.hidden_layer_size]))
66 | self.bf_l2 = tf.Variable(tf.truncated_normal([self.hidden_layer_size]))
67 |
68 | self.Wog_l2 = tf.Variable(tf.truncated_normal(
69 | [self.hidden_layer_size, self.hidden_layer_size]))
70 | self.Uog_l2 = tf.Variable(tf.truncated_normal(
71 | [self.hidden_layer_size, self.hidden_layer_size]))
72 | self.bog_l2 = tf.Variable(
73 | tf.truncated_normal([self.hidden_layer_size]))
74 |
75 | self.Wc_l2 = tf.Variable(tf.truncated_normal(
76 | [self.hidden_layer_size, self.hidden_layer_size]))
77 | self.Uc_l2 = tf.Variable(tf.truncated_normal(
78 | [self.hidden_layer_size, self.hidden_layer_size]))
79 | self.bc_l2 = tf.Variable(tf.truncated_normal([self.hidden_layer_size]))
80 |
81 | # Weights for output layers
82 | self.Wo = tf.Variable(tf.truncated_normal(
83 | [self.hidden_layer_size, self.target_size], mean=0, stddev=.1))
84 | self.bo = tf.Variable(tf.truncated_normal(
85 | [self.target_size], mean=0, stddev=.1))
86 |
87 | # Placeholder for input vector with shape[batch, seq, embeddings]
88 | self._inputs = tf.placeholder(tf.float32,
89 | shape=[None, None, self.input_size],
90 | name='inputs')
91 |
92 | # Processing inputs to work with scan function
93 | self.processed_input = process_batch_input_for_RNN(self._inputs)
94 |
95 | '''
96 | Initial hidden state's shape is [1,self.hidden_layer_size]
97 | In First time stamp, we are doing dot product with weights to
98 | get the shape of [batch_size, self.hidden_layer_size].
99 | For this dot product tensorflow use broadcasting. But during
100 | Back propagation a low level error occurs.
101 | So to solve the problem it was needed to initialize initial
102 | hiddden state of size [batch_size, self.hidden_layer_size].
103 | So here is a little hack !!!! Getting the same shaped
104 | initial hidden state of zeros.
105 | '''
106 |
107 | self.initial_hidden = self._inputs[:, 0, :]
108 | self.initial_hidden = tf.matmul(
109 | self.initial_hidden, tf.zeros([input_size, hidden_layer_size]))
110 |
111 | self.initial_hidden = tf.stack(
112 | [self.initial_hidden, self.initial_hidden,
113 | self.initial_hidden, self.initial_hidden])
114 | # Function for LSTM cell.
115 |
116 | def Lstm(self, previous_hidden_memory_tuple, x):
117 | """
118 | This function takes previous hidden
119 | state and memory tuple with input and
120 | outputs current hidden state.
121 | """
122 |
123 | previous_hidden_state_l1, c_prev_l1,previous_hidden_state_l2, c_prev_l2 = tf.unstack(previous_hidden_memory_tuple)
124 |
125 | # Input Gate
126 | i_l1 = tf.sigmoid(
127 | tf.matmul(x, self.Wi_l1) +
128 | tf.matmul(previous_hidden_state_l1, self.Ui_l1) + self.bi_l1
129 | )
130 |
131 | # Forget Gate
132 | f_l1 = tf.sigmoid(
133 | tf.matmul(x, self.Wf_l1) +
134 | tf.matmul(previous_hidden_state_l1, self.Uf_l1) + self.bf_l1
135 | )
136 |
137 | # Output Gate
138 | o_l1 = tf.sigmoid(
139 | tf.matmul(x, self.Wog_l1) +
140 | tf.matmul(previous_hidden_state_l1, self.Uog_l1) + self.bog_l1
141 | )
142 |
143 | # New Memory Cell
144 | c__l1 = tf.nn.tanh(
145 | tf.matmul(x, self.Wc_l1) +
146 | tf.matmul(previous_hidden_state_l1, self.Uc_l1) + self.bc_l1
147 | )
148 |
149 | # Final Memory cell
150 | c_l1 = f_l1 * c_prev_l1 + i_l1 * c__l1
151 |
152 | # Current Hidden state
153 | current_hidden_state_l1 = o_l1 * tf.nn.tanh(c_l1)
154 |
155 | # Input Gate for layer 2
156 | i_l2 = tf.sigmoid(
157 | tf.matmul(current_hidden_state_l1, self.Wi_l2) +
158 | tf.matmul(previous_hidden_state_l2, self.Ui_l2) + self.bi_l2
159 | )
160 |
161 | # Forget Gate for layer 2
162 | f_l2 = tf.sigmoid(
163 | tf.matmul(current_hidden_state_l1, self.Wf_l2) +
164 | tf.matmul(previous_hidden_state_l2, self.Uf_l2) + self.bf_l2
165 | )
166 |
167 | # Output Gate for layer 2
168 | o_l2 = tf.sigmoid(
169 | tf.matmul(current_hidden_state_l1, self.Wog_l2) +
170 | tf.matmul(previous_hidden_state_l2, self.Uog_l2) + self.bog_l2
171 | )
172 |
173 | # New Memory Cell for layer 2
174 | c__l2 = tf.nn.tanh(
175 | tf.matmul(current_hidden_state_l1, self.Wc_l2) +
176 | tf.matmul(previous_hidden_state_l2, self.Uc_l2) + self.bc_l2
177 | )
178 |
179 | # Final Memory cell for layer 2
180 | c_l2 = f_l2 * c_prev_l2 + i_l2 * c__l2
181 |
182 | # Current Hidden state
183 | current_hidden_state_l2 = o_l2 * tf.nn.tanh(c_l2)
184 |
185 | return tf.stack([current_hidden_state_l1,
186 | c_l1, current_hidden_state_l2, c_l2])
187 |
188 | # Function for getting all hidden state.
189 | def get_states(self):
190 | """
191 | Iterates through time/ sequence to get all hidden state
192 | """
193 |
194 | # Getting all hidden state throuh time
195 | all_hidden_states = tf.scan(self.Lstm,
196 | self.processed_input,
197 | initializer=self.initial_hidden,
198 | name='states')
199 | all_hidden_states = all_hidden_states[:, 3, :, :]
200 |
201 | return all_hidden_states
202 |
203 | # Function to get output from a hidden layer
204 | def get_output(self, hidden_state):
205 | """
206 | This function takes hidden state and returns output
207 | """
208 | output = tf.nn.relu(tf.matmul(hidden_state, self.Wo) + self.bo)
209 |
210 | return output
211 |
212 | # Function for getting all output layers
213 | def get_outputs(self):
214 | """
215 | Iterating through hidden states to get outputs for all timestamp
216 | """
217 | all_hidden_states = self.get_states()
218 |
219 | all_outputs = tf.map_fn(self.get_output, all_hidden_states)
220 |
221 | return all_outputs
222 |
223 |
224 | # Function to convert batch input data to use scan ops of tensorflow.
225 | def process_batch_input_for_RNN(batch_input):
226 | """
227 | Process tensor of size [5,3,2] to [3,5,2]
228 | """
229 | batch_input_ = tf.transpose(batch_input, perm=[2, 0, 1])
230 | X = tf.transpose(batch_input_)
231 |
232 | return X
233 |
234 |
235 | # # Placeholder and initializers
236 |
237 | hidden_layer_size = 30
238 | input_size = 8
239 | target_size = 10
240 |
241 | y = tf.placeholder(tf.float32, shape=[None, target_size], name='inputs')
242 |
243 |
244 | # # Models
245 |
246 |
247 | # Initializing rnn object
248 | rnn = LSTM_cell(input_size, hidden_layer_size, target_size)
249 |
250 |
251 | # In[6]:
252 |
253 | # Getting all outputs from rnn
254 | outputs = rnn.get_outputs()
255 |
256 | # Getting final output through indexing after reversing
257 | last_output = outputs[-1]
258 |
259 |
260 | # As rnn model output the final layer through Relu activation softmax is
261 | # used for final output.
262 | output = tf.nn.softmax(last_output)
263 |
264 |
265 | # Computing the Cross Entropy loss
266 | cross_entropy = -tf.reduce_sum(y * tf.log(output))
267 |
268 |
269 | # Trainning with Adadelta Optimizer
270 | train_step = tf.train.AdamOptimizer().minimize(cross_entropy)
271 |
272 |
273 | # Calculatio of correct prediction and accuracy
274 | correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(output, 1))
275 | accuracy = (tf.reduce_mean(tf.cast(correct_prediction, tf.float32))) * 100
276 |
277 |
278 | # # Dataset Preparation
279 |
280 |
281 | # Function to get on hot
282 | def get_on_hot(number):
283 | on_hot = [0] * 10
284 | on_hot[number] = 1
285 | return on_hot
286 |
287 |
288 | # Using Sklearn MNIST dataset.
289 | digits = datasets.load_digits()
290 | X = digits.images
291 | Y_ = digits.target
292 |
293 | Y = map(get_on_hot, Y_)
294 |
295 |
296 | # Getting Train and test Dataset
297 | X_train, X_test, y_train, y_test = train_test_split(
298 | X, Y, test_size=0.22, random_state=42)
299 |
300 | # Cuttting for simple iteration
301 | X_train = X_train[:1400]
302 | y_train = y_train[:1400]
303 |
304 |
305 | sess = tf.InteractiveSession()
306 | sess.run(tf.initialize_all_variables())
307 |
308 |
309 | # Iterations to do trainning
310 | for epoch in range(200):
311 |
312 | start = 0
313 | end = 100
314 | for i in range(14):
315 |
316 | X = X_train[start:end]
317 | Y = y_train[start:end]
318 | start = end
319 | end = start + 100
320 | sess.run(train_step, feed_dict={rnn._inputs: X, y: Y})
321 |
322 | Loss = str(sess.run(cross_entropy, feed_dict={rnn._inputs: X, y: Y}))
323 | Train_accuracy = str(sess.run(accuracy, feed_dict={
324 | rnn._inputs: X_train, y: y_train}))
325 | Test_accuracy = str(sess.run(accuracy, feed_dict={
326 | rnn._inputs: X_test, y: y_test}))
327 |
328 | sys.stdout.flush()
329 | print("\rIteration: %s Loss: %s Train Accuracy: %s Test Accuracy: %s" %
330 | (epoch, Loss, Train_accuracy, Test_accuracy)),
331 | sys.stdout.flush()
332 |
--------------------------------------------------------------------------------
/Tensorboard/Tensorboard.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# This is a simple handwritting recognition example with conv-net to show how to use tensorboard"
8 | ]
9 | },
10 | {
11 | "cell_type": "markdown",
12 | "metadata": {},
13 | "source": [
14 | "# Importing libraries"
15 | ]
16 | },
17 | {
18 | "cell_type": "code",
19 | "execution_count": 1,
20 | "metadata": {
21 | "collapsed": false
22 | },
23 | "outputs": [],
24 | "source": [
25 | "import tensorflow as tf\n",
26 | "from tensorflow.examples.tutorials.mnist import input_data\n",
27 | "import os"
28 | ]
29 | },
30 | {
31 | "cell_type": "markdown",
32 | "metadata": {},
33 | "source": [
34 | "# Defining models placeholders"
35 | ]
36 | },
37 | {
38 | "cell_type": "code",
39 | "execution_count": 2,
40 | "metadata": {
41 | "collapsed": false
42 | },
43 | "outputs": [],
44 | "source": [
45 | "x = tf.placeholder(tf.float32, shape=[None, 784],name = \"Image\")\n",
46 | "y_ = tf.placeholder(tf.float32, shape=[None, 10],name = \"Correct_Image_label\")\n",
47 | "\n",
48 | "#Reshaping the input for convnet\n",
49 | "x_image = tf.reshape(x, [-1,28,28,1],name = \"Reshaped_Image\")"
50 | ]
51 | },
52 | {
53 | "cell_type": "markdown",
54 | "metadata": {},
55 | "source": [
56 | "# Definning some important functions"
57 | ]
58 | },
59 | {
60 | "cell_type": "code",
61 | "execution_count": 3,
62 | "metadata": {
63 | "collapsed": true
64 | },
65 | "outputs": [],
66 | "source": [
67 | "def weight_variable(shape):\n",
68 | " initial = tf.truncated_normal(shape, stddev=0.1)\n",
69 | " return tf.Variable(initial)\n",
70 | "\n",
71 | "def bias_variable(shape):\n",
72 | " initial = tf.constant(0.1, shape=shape)\n",
73 | " return tf.Variable(initial)"
74 | ]
75 | },
76 | {
77 | "cell_type": "code",
78 | "execution_count": 4,
79 | "metadata": {
80 | "collapsed": true
81 | },
82 | "outputs": [],
83 | "source": [
84 | "def conv2d(x, W):\n",
85 | " return tf.nn.conv2d(x, W, strides=[1, 1, 1, 1], padding='SAME')\n",
86 | "\n",
87 | "def max_pool_2x2(x):\n",
88 | " return tf.nn.max_pool(x, ksize=[1, 2, 2, 1],\n",
89 | " strides=[1, 2, 2, 1], padding='SAME')"
90 | ]
91 | },
92 | {
93 | "cell_type": "markdown",
94 | "metadata": {},
95 | "source": [
96 | "# Definning variables"
97 | ]
98 | },
99 | {
100 | "cell_type": "code",
101 | "execution_count": 5,
102 | "metadata": {
103 | "collapsed": false
104 | },
105 | "outputs": [],
106 | "source": [
107 | "#Variables for first convolutional layers\n",
108 | "W_conv1 = weight_variable([5, 5, 1, 32])\n",
109 | "b_conv1 = bias_variable([32])\n",
110 | "\n",
111 | "#Variables for second convolutional layers\n",
112 | "W_conv2 = weight_variable([5, 5, 32, 64])\n",
113 | "b_conv2 = bias_variable([64])\n",
114 | "\n",
115 | "#Variables for first fully connected layers\n",
116 | "W_fc1 = weight_variable([7 * 7 * 64, 1024])\n",
117 | "b_fc1 = bias_variable([1024])\n",
118 | "\n",
119 | "#Variables for final fully connected layers\n",
120 | "W_fc2 = weight_variable([1024, 10])\n",
121 | "b_fc2 = bias_variable([10])"
122 | ]
123 | },
124 | {
125 | "cell_type": "markdown",
126 | "metadata": {},
127 | "source": [
128 | "# Adding the layer of the graphs"
129 | ]
130 | },
131 | {
132 | "cell_type": "code",
133 | "execution_count": 6,
134 | "metadata": {
135 | "collapsed": true
136 | },
137 | "outputs": [],
138 | "source": [
139 | "# First convolutional layer\n",
140 | "h_conv1 = tf.nn.relu(conv2d(x_image, W_conv1) + b_conv1)\n",
141 | "\n",
142 | "# First pooling layer\n",
143 | "h_pool1 = max_pool_2x2(h_conv1)"
144 | ]
145 | },
146 | {
147 | "cell_type": "code",
148 | "execution_count": 7,
149 | "metadata": {
150 | "collapsed": true
151 | },
152 | "outputs": [],
153 | "source": [
154 | "# Second convolutional layer\n",
155 | "h_conv2 = tf.nn.relu(conv2d(h_pool1, W_conv2) + b_conv2)\n",
156 | "\n",
157 | "# Second pooling layer\n",
158 | "h_pool2 = max_pool_2x2(h_conv2)"
159 | ]
160 | },
161 | {
162 | "cell_type": "code",
163 | "execution_count": 8,
164 | "metadata": {
165 | "collapsed": true
166 | },
167 | "outputs": [],
168 | "source": [
169 | "# Flattening the final pooled layer\n",
170 | "h_pool2_flat = tf.reshape(h_pool2, [-1, 7*7*64])\n",
171 | "\n",
172 | "#First fully connected layer\n",
173 | "h_fc1 = tf.nn.relu(tf.matmul(h_pool2_flat, W_fc1) + b_fc1)"
174 | ]
175 | },
176 | {
177 | "cell_type": "code",
178 | "execution_count": 9,
179 | "metadata": {
180 | "collapsed": true
181 | },
182 | "outputs": [],
183 | "source": [
184 | "# Introducing dropoutacc\n",
185 | "keep_prob = tf.placeholder(tf.float32)\n",
186 | "h_fc1_drop = tf.nn.dropout(h_fc1, keep_prob)"
187 | ]
188 | },
189 | {
190 | "cell_type": "code",
191 | "execution_count": 10,
192 | "metadata": {
193 | "collapsed": true
194 | },
195 | "outputs": [],
196 | "source": [
197 | "# Final softmax output\n",
198 | "y_conv=tf.nn.softmax(tf.matmul(h_fc1_drop, W_fc2) + b_fc2)"
199 | ]
200 | },
201 | {
202 | "cell_type": "markdown",
203 | "metadata": {},
204 | "source": [
205 | "### Loss "
206 | ]
207 | },
208 | {
209 | "cell_type": "code",
210 | "execution_count": 11,
211 | "metadata": {
212 | "collapsed": false
213 | },
214 | "outputs": [],
215 | "source": [
216 | "cross_entropy = tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(y_conv), reduction_indices=[1]))"
217 | ]
218 | },
219 | {
220 | "cell_type": "markdown",
221 | "metadata": {},
222 | "source": [
223 | "### Trainning step"
224 | ]
225 | },
226 | {
227 | "cell_type": "code",
228 | "execution_count": 12,
229 | "metadata": {
230 | "collapsed": true
231 | },
232 | "outputs": [],
233 | "source": [
234 | "#Trainning step\n",
235 | "train_step = tf.train.AdamOptimizer(1e-4).minimize(cross_entropy)"
236 | ]
237 | },
238 | {
239 | "cell_type": "markdown",
240 | "metadata": {},
241 | "source": [
242 | "### Accuracy"
243 | ]
244 | },
245 | {
246 | "cell_type": "code",
247 | "execution_count": 13,
248 | "metadata": {
249 | "collapsed": true
250 | },
251 | "outputs": [],
252 | "source": [
253 | "#Correct prediction\n",
254 | "correct_prediction = tf.equal(tf.argmax(y_conv,1), tf.argmax(y_,1))\n",
255 | "\n",
256 | "#Accuracy\n",
257 | "accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))\n"
258 | ]
259 | },
260 | {
261 | "cell_type": "markdown",
262 | "metadata": {},
263 | "source": [
264 | "# Creating summery for log data"
265 | ]
266 | },
267 | {
268 | "cell_type": "code",
269 | "execution_count": 14,
270 | "metadata": {
271 | "collapsed": true
272 | },
273 | "outputs": [],
274 | "source": [
275 | "# Create a summary to monitor loss tensor\n",
276 | "tf.scalar_summary(\"loss\", cross_entropy)\n",
277 | "\n",
278 | "# Create a summary to monitor accuracy tensor\n",
279 | "tf.scalar_summary(\"accuracy\", accuracy)\n",
280 | "\n",
281 | "# Merge all summaries into a single op\n",
282 | "merged_summary_op = tf.merge_all_summaries()"
283 | ]
284 | },
285 | {
286 | "cell_type": "markdown",
287 | "metadata": {},
288 | "source": [
289 | "# Initializing the session variables"
290 | ]
291 | },
292 | {
293 | "cell_type": "code",
294 | "execution_count": 15,
295 | "metadata": {
296 | "collapsed": true
297 | },
298 | "outputs": [],
299 | "source": [
300 | "sess = tf.InteractiveSession()\n",
301 | "sess.run(tf.initialize_all_variables())"
302 | ]
303 | },
304 | {
305 | "cell_type": "markdown",
306 | "metadata": {},
307 | "source": [
308 | "# Defining the log directory where the summery will remian and also the summery writer"
309 | ]
310 | },
311 | {
312 | "cell_type": "code",
313 | "execution_count": 16,
314 | "metadata": {
315 | "collapsed": true
316 | },
317 | "outputs": [],
318 | "source": [
319 | "logs_path = os.path.join(os.getcwd(),\"logdata\")\n",
320 | "summary_writer = tf.train.SummaryWriter(logs_path, graph=tf.get_default_graph())"
321 | ]
322 | },
323 | {
324 | "cell_type": "markdown",
325 | "metadata": {},
326 | "source": [
327 | "# Loading the MNIST dataset"
328 | ]
329 | },
330 | {
331 | "cell_type": "code",
332 | "execution_count": 17,
333 | "metadata": {
334 | "collapsed": false
335 | },
336 | "outputs": [
337 | {
338 | "name": "stdout",
339 | "output_type": "stream",
340 | "text": [
341 | "Extracting MNIST_data/train-images-idx3-ubyte.gz\n",
342 | "Extracting MNIST_data/train-labels-idx1-ubyte.gz\n",
343 | "Extracting MNIST_data/t10k-images-idx3-ubyte.gz\n",
344 | "Extracting MNIST_data/t10k-labels-idx1-ubyte.gz\n"
345 | ]
346 | }
347 | ],
348 | "source": [
349 | "mnist = input_data.read_data_sets('MNIST_data', one_hot=True)"
350 | ]
351 | },
352 | {
353 | "cell_type": "markdown",
354 | "metadata": {},
355 | "source": [
356 | "# Trainning and writting the log data"
357 | ]
358 | },
359 | {
360 | "cell_type": "code",
361 | "execution_count": 18,
362 | "metadata": {
363 | "collapsed": false
364 | },
365 | "outputs": [
366 | {
367 | "name": "stdout",
368 | "output_type": "stream",
369 | "text": [
370 | "Please go to /home/shezan directory and write 'tensorboard --logdir logdata/' on terminal and hit enter\n",
371 | "Now open the browser and type http://0.0.0.0:6006 and now you can see the tensorboard\n",
372 | "step 0, training accuracy 0.02\n",
373 | "step 100, training accuracy 0.82\n",
374 | "step 200, training accuracy 0.94\n",
375 | "step 300, training accuracy 0.94\n",
376 | "step 400, training accuracy 0.98\n",
377 | "step 500, training accuracy 0.94\n",
378 | "step 600, training accuracy 1\n",
379 | "step 700, training accuracy 0.94\n",
380 | "step 800, training accuracy 0.86\n",
381 | "step 900, training accuracy 1\n",
382 | "step 1000, training accuracy 0.96\n",
383 | "step 1100, training accuracy 1\n"
384 | ]
385 | }
386 | ],
387 | "source": [
388 | "print \"Please go to %s directory and write 'tensorboard --logdir logdata/' on terminal and hit enter\"%os.getcwd()\n",
389 | "print \"Now open the browser and type http://0.0.0.0:6006 and now you can see the tensorboard\"\n",
390 | "\n",
391 | "try:\n",
392 | " for i in range(20000):\n",
393 | "\n",
394 | " #Getting the batch\n",
395 | " batch = mnist.train.next_batch(50)\n",
396 | "\n",
397 | " if i%100 == 0:\n",
398 | " train_accuracy = accuracy.eval(feed_dict={\n",
399 | " x:batch[0], y_: batch[1], keep_prob: 1.0})\n",
400 | " print(\"step %d, training accuracy %g\"%(i, train_accuracy))\n",
401 | "\n",
402 | " #Running the trainning and getting the summery out\n",
403 | " _,summary = sess.run([train_step,merged_summary_op],feed_dict={x: batch[0], y_: batch[1], keep_prob: 0.5})\n",
404 | "\n",
405 | " #Writting the summerys\n",
406 | " summary_writer.add_summary(summary, i)\n",
407 | "\n",
408 | " print(\"test accuracy %g\"%accuracy.eval(feed_dict={\n",
409 | " x: mnist.test.images, y_: mnist.test.labels, keep_prob: 1.0}))\n",
410 | " \n",
411 | "except KeyboardInterrupt:\n",
412 | " pass"
413 | ]
414 | }
415 | ],
416 | "metadata": {
417 | "kernelspec": {
418 | "display_name": "Python 2",
419 | "language": "python",
420 | "name": "python2"
421 | },
422 | "language_info": {
423 | "codemirror_mode": {
424 | "name": "ipython",
425 | "version": 2
426 | },
427 | "file_extension": ".py",
428 | "mimetype": "text/x-python",
429 | "name": "python",
430 | "nbconvert_exporter": "python",
431 | "pygments_lexer": "ipython2",
432 | "version": "2.7.6"
433 | }
434 | },
435 | "nbformat": 4,
436 | "nbformat_minor": 0
437 | }
438 |
--------------------------------------------------------------------------------
/BiDirectional LSTM/bi_directional_lstm.py:
--------------------------------------------------------------------------------
1 | import tensorflow as tf
2 | from sklearn import datasets
3 | from sklearn.cross_validation import train_test_split
4 | import pylab as pl
5 | from IPython import display
6 | import sys
7 |
8 |
9 | # # Bi-LSTM class and functions
10 |
11 | class Bi_LSTM_cell(object):
12 |
13 | """
14 | Bi directional LSTM cell object which takes 3 arguments for initialization.
15 | input_size = Input Vector size
16 | hidden_layer_size = Hidden layer size
17 | target_size = Output vector size
18 |
19 | """
20 |
21 | def __init__(self, input_size, hidden_layer_size, target_size):
22 |
23 | # Initialization of given values
24 | self.input_size = input_size
25 | self.hidden_layer_size = hidden_layer_size
26 | self.target_size = target_size
27 |
28 | # Weights and Bias for input and hidden tensor for forward pass
29 | self.Wi = tf.Variable(tf.zeros(
30 | [self.input_size, self.hidden_layer_size]))
31 | self.Ui = tf.Variable(tf.zeros(
32 | [self.hidden_layer_size, self.hidden_layer_size]))
33 | self.bi = tf.Variable(tf.zeros([self.hidden_layer_size]))
34 |
35 | self.Wf = tf.Variable(tf.zeros(
36 | [self.input_size, self.hidden_layer_size]))
37 | self.Uf = tf.Variable(tf.zeros(
38 | [self.hidden_layer_size, self.hidden_layer_size]))
39 | self.bf = tf.Variable(tf.zeros([self.hidden_layer_size]))
40 |
41 | self.Wog = tf.Variable(tf.zeros(
42 | [self.input_size, self.hidden_layer_size]))
43 | self.Uog = tf.Variable(tf.zeros(
44 | [self.hidden_layer_size, self.hidden_layer_size]))
45 | self.bog = tf.Variable(tf.zeros([self.hidden_layer_size]))
46 |
47 | self.Wc = tf.Variable(tf.zeros(
48 | [self.input_size, self.hidden_layer_size]))
49 | self.Uc = tf.Variable(tf.zeros(
50 | [self.hidden_layer_size, self.hidden_layer_size]))
51 | self.bc = tf.Variable(tf.zeros([self.hidden_layer_size]))
52 |
53 | # Weights and Bias for input and hidden tensor for backward pass
54 | self.Wi1 = tf.Variable(tf.zeros(
55 | [self.input_size, self.hidden_layer_size]))
56 | self.Ui1 = tf.Variable(tf.zeros(
57 | [self.hidden_layer_size, self.hidden_layer_size]))
58 | self.bi1 = tf.Variable(tf.zeros([self.hidden_layer_size]))
59 |
60 | self.Wf1 = tf.Variable(tf.zeros(
61 | [self.input_size, self.hidden_layer_size]))
62 | self.Uf1 = tf.Variable(tf.zeros(
63 | [self.hidden_layer_size, self.hidden_layer_size]))
64 | self.bf1 = tf.Variable(tf.zeros([self.hidden_layer_size]))
65 |
66 | self.Wog1 = tf.Variable(tf.zeros(
67 | [self.input_size, self.hidden_layer_size]))
68 | self.Uog1 = tf.Variable(tf.zeros(
69 | [self.hidden_layer_size, self.hidden_layer_size]))
70 | self.bog1 = tf.Variable(tf.zeros([self.hidden_layer_size]))
71 |
72 | self.Wc1 = tf.Variable(tf.zeros(
73 | [self.input_size, self.hidden_layer_size]))
74 | self.Uc1 = tf.Variable(tf.zeros(
75 | [self.hidden_layer_size, self.hidden_layer_size]))
76 | self.bc1 = tf.Variable(tf.zeros([self.hidden_layer_size]))
77 |
78 | # Weights for output layers
79 | self.Wo = tf.Variable(tf.truncated_normal(
80 | [self.hidden_layer_size * 2, self.target_size],
81 | mean=0, stddev=.01))
82 | self.bo = tf.Variable(tf.truncated_normal(
83 | [self.target_size], mean=0, stddev=.01))
84 |
85 | # Placeholder for input vector with shape[batch, seq, embeddings]
86 | self._inputs = tf.placeholder(tf.float32,
87 | shape=[None, None, self.input_size],
88 | name='inputs')
89 |
90 | # Reversing the inputs by sequence for backward pass of the LSTM
91 | self._inputs_rev = tf.reverse(self._inputs, [False, True, False])
92 |
93 | # Processing inputs to work with scan function
94 | self.processed_input = process_batch_input_for_RNN(self._inputs)
95 |
96 | # For bacward pass of the LSTM
97 | self.processed_input_rev = process_batch_input_for_RNN(
98 | self._inputs_rev)
99 |
100 | '''
101 | Initial hidden state's shape is [1,self.hidden_layer_size]
102 | In First time stamp, we are doing dot product with weights to
103 | get the shape of [batch_size, self.hidden_layer_size].
104 | For this dot product tensorflow use broadcasting. But during
105 | Back propagation a low level error occurs.
106 | So to solve the problem it was needed to initialize initial
107 | hiddden state of size [batch_size, self.hidden_layer_size].
108 | So here is a little hack !!!! Getting the same shaped
109 | initial hidden state of zeros.
110 | '''
111 |
112 | self.initial_hidden = self._inputs[:, 0, :]
113 | self.initial_hidden = tf.matmul(
114 | self.initial_hidden, tf.zeros([input_size, hidden_layer_size]))
115 |
116 | self.initial_hidden = tf.stack(
117 | [self.initial_hidden, self.initial_hidden])
118 |
119 | # Function for Forward LSTM cell.
120 | def Lstm_f(self, previous_hidden_memory_tuple, x):
121 | """
122 | This function takes previous hidden state
123 | and memory tuple with input and
124 | outputs current hidden state.
125 | """
126 |
127 | previous_hidden_state, c_prev = tf.unstack(previous_hidden_memory_tuple)
128 |
129 | # Input Gate
130 | i = tf.sigmoid(
131 | tf.matmul(x, self.Wi) +
132 | tf.matmul(previous_hidden_state, self.Ui) + self.bi
133 | )
134 |
135 | # Forget Gate
136 | f = tf.sigmoid(
137 | tf.matmul(x, self.Wf) +
138 | tf.matmul(previous_hidden_state, self.Uf) + self.bf
139 | )
140 |
141 | # Output Gate
142 | o = tf.sigmoid(
143 | tf.matmul(x, self.Wog) +
144 | tf.matmul(previous_hidden_state, self.Uog) + self.bog
145 | )
146 |
147 | # New Memory Cell
148 | c_ = tf.nn.tanh(
149 | tf.matmul(x, self.Wc) +
150 | tf.matmul(previous_hidden_state, self.Uc) + self.bc
151 | )
152 |
153 | # Final Memory cell
154 | c = f * c_prev + i * c_
155 |
156 | # Current Hidden state
157 | current_hidden_state = o * tf.nn.tanh(c)
158 |
159 | return tf.stack([current_hidden_state, c])
160 |
161 | # Function for Forward LSTM cell.
162 | def Lstm_b(self, previous_hidden_memory_tuple, x):
163 | """
164 | This function takes previous hidden
165 | state and memory tuple with input and
166 | outputs current hidden state.
167 | """
168 |
169 | previous_hidden_state, c_prev = tf.unstack(previous_hidden_memory_tuple)
170 |
171 | # Input Gate
172 | i = tf.sigmoid(
173 | tf.matmul(x, self.Wi1) +
174 | tf.matmul(previous_hidden_state, self.Ui1) + self.bi1
175 | )
176 |
177 | # Forget Gate
178 | f = tf.sigmoid(
179 | tf.matmul(x, self.Wf1) +
180 | tf.matmul(previous_hidden_state, self.Uf1) + self.bf1
181 | )
182 |
183 | # Output Gate
184 | o = tf.sigmoid(
185 | tf.matmul(x, self.Wog1) +
186 | tf.matmul(previous_hidden_state, self.Uog1) + self.bog1
187 | )
188 |
189 | # New Memory Cell
190 | c_ = tf.nn.tanh(
191 | tf.matmul(x, self.Wc1) +
192 | tf.matmul(previous_hidden_state, self.Uc1) + self.bc1
193 | )
194 |
195 | # Final Memory cell
196 | c = f * c_prev + i * c_
197 |
198 | # Current Hidden state
199 | current_hidden_state = o * tf.nn.tanh(c)
200 |
201 | return tf.stack([current_hidden_state, c])
202 |
203 | # Function to get the hidden and memory cells after forward pass
204 | def get_states_f(self):
205 | """
206 | Iterates through time/ sequence to get all hidden state
207 | """
208 |
209 | # Getting all hidden state throuh time
210 | all_hidden_memory_states = tf.scan(self.Lstm_f,
211 | self.processed_input,
212 | initializer=self.initial_hidden,
213 | name='states')
214 |
215 | all_hidden_states = all_hidden_memory_states[:, 0, :, :]
216 | all_memory_states = all_hidden_memory_states[:, 1, :, :]
217 |
218 | return all_hidden_states, all_memory_states
219 |
220 | # Function to get the hidden and memory cells after backward pass
221 | def get_states_b(self):
222 | """
223 | Iterates through time/ sequence to get all hidden state
224 | """
225 |
226 | all_hidden_states, all_memory_states = self.get_states_f()
227 |
228 | # Reversing the hidden and memory state to get the final hidden and
229 | # memory state
230 | last_hidden_states = all_hidden_states[-1]
231 | last_memory_states = all_memory_states[-1]
232 |
233 | # For backward pass using the last hidden and memory of the forward
234 | # pass
235 | initial_hidden = tf.stack([last_hidden_states, last_memory_states])
236 |
237 | # Getting all hidden state throuh time
238 | all_hidden_memory_states = tf.scan(self.Lstm_b,
239 | self.processed_input_rev,
240 | initializer=initial_hidden,
241 | name='states')
242 |
243 | # Now reversing the states to keep those in original order
244 | #all_hidden_states = tf.reverse(all_hidden_memory_states[
245 | # :, 0, :, :], [True, False, False])
246 | #all_memory_states = tf.reverse(all_hidden_memory_states[
247 | # :, 1, :, :], [True, False, False])
248 |
249 | return all_hidden_states, all_memory_states
250 |
251 | # Function to concat the hiddenstates for backward and forward pass
252 | def get_concat_hidden(self):
253 |
254 | # Getting hidden and memory for the forward pass
255 | all_hidden_states_f, all_memory_states_f = self.get_states_f()
256 |
257 | # Getting hidden and memory for the backward pass
258 | all_hidden_states_b, all_memory_states_b = self.get_states_b()
259 |
260 | # Concating the hidden states of forward and backward pass
261 | concat_hidden = tf.concat(
262 | [all_hidden_states_f, all_hidden_states_b],2)
263 |
264 | return concat_hidden
265 |
266 | # Function to get output from a hidden layer
267 | def get_output(self, hidden_state):
268 | """
269 | This function takes hidden state and returns output
270 | """
271 | output = tf.nn.sigmoid(tf.matmul(hidden_state, self.Wo) + self.bo)
272 |
273 | return output
274 |
275 | # Function for getting all output layers
276 | def get_outputs(self):
277 | """
278 | Iterating through hidden states to get outputs for all timestamp
279 | """
280 | all_hidden_states = self.get_concat_hidden()
281 |
282 | all_outputs = tf.map_fn(self.get_output, all_hidden_states)
283 |
284 | return all_outputs
285 |
286 |
287 | # Function to convert batch input data to use scan ops of tensorflow.
288 | def process_batch_input_for_RNN(batch_input):
289 | """
290 | Process tensor of size [5,3,2] to [3,5,2]
291 | """
292 | batch_input_ = tf.transpose(batch_input, perm=[2, 0, 1])
293 | X = tf.transpose(batch_input_)
294 |
295 | return X
296 |
297 |
298 | # # Placeholder and initializers
299 |
300 | hidden_layer_size = 30
301 | input_size = 8
302 | target_size = 10
303 |
304 |
305 | y = tf.placeholder(tf.float32, shape=[None, target_size], name='inputs')
306 |
307 |
308 | # # Models
309 |
310 | # Initializing rnn object
311 | rnn = Bi_LSTM_cell(input_size, hidden_layer_size, target_size)
312 |
313 | # Getting all outputs from rnn
314 | outputs = rnn.get_outputs()
315 |
316 |
317 | # Getting first output through indexing
318 | last_output = outputs[-1]
319 |
320 | # As rnn model output the final layer through Relu activation softmax is
321 | # used for final output.
322 | output = tf.nn.softmax(last_output)
323 |
324 | # Computing the Cross Entropy loss
325 | cross_entropy = -tf.reduce_sum(y * tf.log(output))
326 |
327 | # Trainning with Adadelta Optimizer
328 | train_step = tf.train.AdamOptimizer().minimize(cross_entropy)
329 |
330 |
331 | # Calculatio of correct prediction and accuracy
332 | correct_prediction = tf.equal(tf.argmax(y, 1), tf.argmax(output, 1))
333 | accuracy = (tf.reduce_mean(tf.cast(correct_prediction, tf.float32))) * 100
334 |
335 |
336 | # # Dataset Preparation
337 |
338 | # Function to get on hot
339 | def get_on_hot(number):
340 | on_hot = [0] * 10
341 | on_hot[number] = 1
342 | return on_hot
343 |
344 |
345 | # Using Sklearn MNIST dataset.
346 | digits = datasets.load_digits()
347 | X = digits.images
348 | Y_ = digits.target
349 |
350 | Y = map(get_on_hot, Y_)
351 |
352 |
353 | # Getting Train and test Dataset
354 | X_train, X_test, y_train, y_test = train_test_split(
355 | X, Y, test_size=0.22, random_state=42)
356 |
357 | # Cuttting for simple iteration
358 | X_train = X_train[:1400]
359 | y_train = y_train[:1400]
360 |
361 |
362 | sess = tf.InteractiveSession()
363 | sess.run(tf.initialize_all_variables())
364 |
365 |
366 | # Iterations to do trainning
367 | for epoch in range(200):
368 |
369 | start = 0
370 | end = 100
371 | for i in range(14):
372 |
373 | X = X_train[start:end]
374 | Y = y_train[start:end]
375 | start = end
376 | end = start + 100
377 | sess.run(train_step, feed_dict={rnn._inputs: X, y: Y})
378 |
379 | Loss = str(sess.run(cross_entropy, feed_dict={rnn._inputs: X, y: Y}))
380 | Train_accuracy = str(sess.run(accuracy, feed_dict={
381 | rnn._inputs: X_train[:500], y: y_train[:500]}))
382 | Test_accuracy = str(sess.run(accuracy, feed_dict={
383 | rnn._inputs: X_test, y: y_test}))
384 |
385 | sys.stdout.flush()
386 | print("\rIteration: %s Loss: %s Train Accuracy: %s Test Accuracy: %s" %
387 | (epoch, Loss, Train_accuracy, Test_accuracy)),
388 | sys.stdout.flush()
389 |
--------------------------------------------------------------------------------
/Vhanilla_RNN/.ipynb_checkpoints/RNN-checkpoint.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# VANILLA RNN ON 8*8 MNIST DATASET TO PREDICT TEN CLASS"
8 | ]
9 | },
10 | {
11 | "cell_type": "markdown",
12 | "metadata": {},
13 | "source": [
14 | "\n",
15 | "### Its a dynamic sequence and batch vhanilla rnn. This is created with tensorflow scan and map higher ops!!!! \n",
16 | "### This is a base rnn which can be used to create GRU, LSTM, Neural Stack Machine, Neural Turing Machine and RNN-EM and so on!"
17 | ]
18 | },
19 | {
20 | "cell_type": "markdown",
21 | "metadata": {},
22 | "source": [
23 | "# Importing Libraries"
24 | ]
25 | },
26 | {
27 | "cell_type": "code",
28 | "execution_count": 1,
29 | "metadata": {},
30 | "outputs": [
31 | {
32 | "name": "stderr",
33 | "output_type": "stream",
34 | "text": [
35 | "/usr/local/lib/python3.5/dist-packages/sklearn/cross_validation.py:44: DeprecationWarning: This module was deprecated in version 0.18 in favor of the model_selection module into which all the refactored classes and functions are moved. Also note that the interface of the new CV iterators are different from that of this module. This module will be removed in 0.20.\n",
36 | " \"This module will be removed in 0.20.\", DeprecationWarning)\n"
37 | ]
38 | }
39 | ],
40 | "source": [
41 | "import numpy as np\n",
42 | "import tensorflow as tf\n",
43 | "from sklearn.datasets import load_digits \n",
44 | "from sklearn.cross_validation import train_test_split\n",
45 | "import pylab as pl\n",
46 | "from IPython import display\n",
47 | "import sys\n",
48 | "%matplotlib inline"
49 | ]
50 | },
51 | {
52 | "cell_type": "markdown",
53 | "metadata": {},
54 | "source": [
55 | "# Vhanilla RNN class and functions"
56 | ]
57 | },
58 | {
59 | "cell_type": "code",
60 | "execution_count": 2,
61 | "metadata": {},
62 | "outputs": [],
63 | "source": [
64 | "class RNN_cell(object):\n",
65 | "\n",
66 | " \"\"\"\n",
67 | " RNN cell object which takes 3 arguments for initialization.\n",
68 | " input_size = Input Vector size\n",
69 | " hidden_layer_size = Hidden layer size\n",
70 | " target_size = Output vector size\n",
71 | "\n",
72 | " \"\"\"\n",
73 | "\n",
74 | " def __init__(self, input_size, hidden_layer_size, target_size):\n",
75 | "\n",
76 | " # Initialization of given values\n",
77 | " self.input_size = input_size\n",
78 | " self.hidden_layer_size = hidden_layer_size\n",
79 | " self.target_size = target_size\n",
80 | "\n",
81 | " # Weights and Bias for input and hidden tensor\n",
82 | " self.Wx = tf.Variable(tf.zeros(\n",
83 | " [self.input_size, self.hidden_layer_size]))\n",
84 | " self.Wh = tf.Variable(tf.zeros(\n",
85 | " [self.hidden_layer_size, self.hidden_layer_size]))\n",
86 | " self.bi = tf.Variable(tf.zeros([self.hidden_layer_size]))\n",
87 | "\n",
88 | " # Weights for output layers\n",
89 | " self.Wo = tf.Variable(tf.truncated_normal(\n",
90 | " [self.hidden_layer_size, self.target_size],mean=0,stddev=.01))\n",
91 | " self.bo = tf.Variable(tf.truncated_normal([self.target_size],mean=0,stddev=.01))\n",
92 | "\n",
93 | " # Placeholder for input vector with shape[batch, seq, embeddings]\n",
94 | " self._inputs = tf.placeholder(tf.float32,\n",
95 | " shape=[None, None, self.input_size],\n",
96 | " name='inputs')\n",
97 | "\n",
98 | " # Processing inputs to work with scan function\n",
99 | " self.processed_input = process_batch_input_for_RNN(self._inputs)\n",
100 | "\n",
101 | " '''\n",
102 | " Initial hidden state's shape is [1,self.hidden_layer_size]\n",
103 | " In First time stamp, we are doing dot product with weights to\n",
104 | " get the shape of [batch_size, self.hidden_layer_size].\n",
105 | " For this dot product tensorflow use broadcasting. But during\n",
106 | " Back propagation a low level error occurs.\n",
107 | " So to solve the problem it was needed to initialize initial\n",
108 | " hiddden state of size [batch_size, self.hidden_layer_size].\n",
109 | " So here is a little hack !!!! Getting the same shaped\n",
110 | " initial hidden state of zeros.\n",
111 | " '''\n",
112 | "\n",
113 | " self.initial_hidden = self._inputs[:, 0, :]\n",
114 | " self.initial_hidden = tf.matmul(\n",
115 | " self.initial_hidden, tf.zeros([input_size, hidden_layer_size]))\n",
116 | "\n",
117 | " # Function for vhanilla RNN.\n",
118 | " def vanilla_rnn(self, previous_hidden_state, x):\n",
119 | " \"\"\"\n",
120 | " This function takes previous hidden state and input and\n",
121 | " outputs current hidden state.\n",
122 | " \"\"\"\n",
123 | " current_hidden_state = tf.tanh(\n",
124 | " tf.matmul(previous_hidden_state, self.Wh) +\n",
125 | " tf.matmul(x, self.Wx) + self.bi)\n",
126 | "\n",
127 | " return current_hidden_state\n",
128 | "\n",
129 | " # Function for getting all hidden state.\n",
130 | " def get_states(self):\n",
131 | " \"\"\"\n",
132 | " Iterates through time/ sequence to get all hidden state\n",
133 | " \"\"\"\n",
134 | "\n",
135 | " # Getting all hidden state throuh time\n",
136 | " all_hidden_states = tf.scan(self.vanilla_rnn,\n",
137 | " self.processed_input,\n",
138 | " initializer=self.initial_hidden,\n",
139 | " name='states')\n",
140 | "\n",
141 | " return all_hidden_states\n",
142 | "\n",
143 | " # Function to get output from a hidden layer\n",
144 | " def get_output(self, hidden_state):\n",
145 | " \"\"\"\n",
146 | " This function takes hidden state and returns output\n",
147 | " \"\"\"\n",
148 | " output = tf.nn.relu(tf.matmul(hidden_state, self.Wo) + self.bo)\n",
149 | "\n",
150 | " return output\n",
151 | "\n",
152 | " # Function for getting all output layers\n",
153 | " def get_outputs(self):\n",
154 | " \"\"\"\n",
155 | " Iterating through hidden states to get outputs for all timestamp\n",
156 | " \"\"\"\n",
157 | " all_hidden_states = self.get_states()\n",
158 | "\n",
159 | " all_outputs = tf.map_fn(self.get_output, all_hidden_states)\n",
160 | "\n",
161 | " return all_outputs\n",
162 | "\n",
163 | "\n",
164 | "# Function to convert batch input data to use scan ops of tensorflow.\n",
165 | "def process_batch_input_for_RNN(batch_input):\n",
166 | " \"\"\"\n",
167 | " Process tensor of size [5,3,2] to [3,5,2]\n",
168 | " \"\"\"\n",
169 | " batch_input_ = tf.transpose(batch_input, perm=[2, 0, 1])\n",
170 | " X = tf.transpose(batch_input_)\n",
171 | "\n",
172 | " return X\n"
173 | ]
174 | },
175 | {
176 | "cell_type": "markdown",
177 | "metadata": {},
178 | "source": [
179 | "# Placeholder and initializers\n"
180 | ]
181 | },
182 | {
183 | "cell_type": "code",
184 | "execution_count": 3,
185 | "metadata": {
186 | "collapsed": true
187 | },
188 | "outputs": [],
189 | "source": [
190 | "hidden_layer_size = 110\n",
191 | "input_size = 8\n",
192 | "target_size = 10"
193 | ]
194 | },
195 | {
196 | "cell_type": "code",
197 | "execution_count": 4,
198 | "metadata": {
199 | "collapsed": true
200 | },
201 | "outputs": [],
202 | "source": [
203 | "y = tf.placeholder(tf.float32, shape=[None, target_size],name='inputs')"
204 | ]
205 | },
206 | {
207 | "cell_type": "markdown",
208 | "metadata": {},
209 | "source": [
210 | "# Models"
211 | ]
212 | },
213 | {
214 | "cell_type": "code",
215 | "execution_count": 5,
216 | "metadata": {
217 | "collapsed": true
218 | },
219 | "outputs": [],
220 | "source": [
221 | "#Initializing rnn object\n",
222 | "rnn=RNN_cell( input_size, hidden_layer_size, target_size)"
223 | ]
224 | },
225 | {
226 | "cell_type": "code",
227 | "execution_count": 6,
228 | "metadata": {},
229 | "outputs": [],
230 | "source": [
231 | "#Getting all outputs from rnn\n",
232 | "outputs = rnn.get_outputs()"
233 | ]
234 | },
235 | {
236 | "cell_type": "code",
237 | "execution_count": 7,
238 | "metadata": {
239 | "collapsed": true
240 | },
241 | "outputs": [],
242 | "source": [
243 | "#Getting final output through indexing after reversing\n",
244 | "last_output = outputs[-1]"
245 | ]
246 | },
247 | {
248 | "cell_type": "code",
249 | "execution_count": 8,
250 | "metadata": {
251 | "collapsed": true
252 | },
253 | "outputs": [],
254 | "source": [
255 | "#As rnn model output the final layer through Relu activation softmax is used for final output.\n",
256 | "output=tf.nn.softmax(last_output)"
257 | ]
258 | },
259 | {
260 | "cell_type": "code",
261 | "execution_count": 9,
262 | "metadata": {
263 | "collapsed": true
264 | },
265 | "outputs": [],
266 | "source": [
267 | "#Computing the Cross Entropy loss \n",
268 | "cross_entropy = -tf.reduce_sum(y * tf.log(output))"
269 | ]
270 | },
271 | {
272 | "cell_type": "code",
273 | "execution_count": 10,
274 | "metadata": {
275 | "collapsed": true
276 | },
277 | "outputs": [],
278 | "source": [
279 | "# Trainning with Adadelta Optimizer\n",
280 | "train_step = tf.train.AdamOptimizer().minimize(cross_entropy)"
281 | ]
282 | },
283 | {
284 | "cell_type": "code",
285 | "execution_count": 11,
286 | "metadata": {
287 | "collapsed": true
288 | },
289 | "outputs": [],
290 | "source": [
291 | "#Calculatio of correct prediction and accuracy\n",
292 | "correct_prediction = tf.equal(tf.argmax(y,1), tf.argmax(output,1))\n",
293 | "accuracy = (tf.reduce_mean(tf.cast(correct_prediction, tf.float32)))*100"
294 | ]
295 | },
296 | {
297 | "cell_type": "markdown",
298 | "metadata": {},
299 | "source": [
300 | "# Dataset Preparation"
301 | ]
302 | },
303 | {
304 | "cell_type": "code",
305 | "execution_count": 12,
306 | "metadata": {},
307 | "outputs": [],
308 | "source": [
309 | "sess=tf.InteractiveSession()\n",
310 | "sess.run(tf.global_variables_initializer())"
311 | ]
312 | },
313 | {
314 | "cell_type": "code",
315 | "execution_count": 13,
316 | "metadata": {},
317 | "outputs": [],
318 | "source": [
319 | "#Using Sklearn MNIST dataset.\n",
320 | "digits = load_digits()\n",
321 | "X=digits.images\n",
322 | "Y_=digits.target\n",
323 | "\n",
324 | "# One hot encoding\n",
325 | "Y = sess.run(tf.one_hot(indices=Y_, depth=target_size))"
326 | ]
327 | },
328 | {
329 | "cell_type": "code",
330 | "execution_count": null,
331 | "metadata": {},
332 | "outputs": [],
333 | "source": [
334 | "#Getting Train and test Dataset\n",
335 | "X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size=0.22, random_state=42)\n",
336 | "\n",
337 | "#Cuttting for simple iteration\n",
338 | "X_train=X_train[:1400]\n",
339 | "y_train=y_train[:1400]"
340 | ]
341 | },
342 | {
343 | "cell_type": "code",
344 | "execution_count": null,
345 | "metadata": {
346 | "scrolled": false
347 | },
348 | "outputs": [
349 | {
350 | "data": {
351 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAD8CAYAAAB5Pm/hAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAFZ9JREFUeJzt3X+s5fVd5/Hnu5eOl1FJyzLAbGGcVtndsJsdlInhxlqO\n0q5QqxRjmgqro2mWDmCiic0KGnvPZSLUpFZrZFhZ7RYTrSUrtKTLutuM3FribOulUkvbsMWWbqGX\nYYytPxZbwvD2j+/3zpyZe8+55557vufH5zwfyTff8/1+vuecD99wX9/PfL6f8/lGZiJJKtfLxl0B\nSVKzDHpJKpxBL0mFM+glqXAGvSQVzqCXpMIZ9JJUOINekgpn0EtS4c4adwUAzjvvvNy7d++4qyFJ\nU+XRRx/9m8zctdlxExH0e/fuZWVlZdzVkKSpEhFf7uc4u24kqXAGvSQVzqCXpMIZ9JJUOINekgpn\n0EtS4aY66I8ehTvvrNaSpI1NxDj6QRw9ClddBS+8ADt2wJEjsLAw7lpJ0uSZ2hb98nIV8idOVOvl\n5XHXSJIm09QGfatVteTn5qp1qzXuGknSZJrarpuFhaq7Znm5Cnm7bSRpY1Mb9FCFuwEvSb1NbdeN\nJKk/Br0kFc6gl6TCGfSSVDiDXpIKZ9BLUuEMekkqnEEvSYUz6CWpcAa9JBWu2KB3rnpJqkz1XDfd\nOFe9JJ1SZIveueol6ZQig9656iXplCK7bpyrXpJO2TToI+Ji4PeBC4AE7snM90bEucAHgb3AU8Bb\nMvNrERHAe4E3As8DP52Zn2qm+t05V70kVfrpunkR+IXMvBS4ArglIi4FbgWOZOYlwJF6G+Aa4JJ6\nuRG4e+i1liT1bdOgz8zVtRZ5Zv4D8HngVcC1wL31YfcCb65fXwv8flb+D/CKiNg99JpLkvqypZux\nEbEX+G7gE8AFmblaFz1L1bUD1UXgKx1ve7red+Zn3RgRKxGxcvz48S1WW5LUr76DPiK+Dfhj4Ocz\n8+87yzIzqfrv+5aZ92Tm/szcv2vXrq28VZK0BX0FfUS8nCrk/yAz7693H1vrkqnXz9X7nwEu7nj7\nRfU+SdIYbBr09Sia3wM+n5nv6Sh6EDhQvz4AfLhj/09F5Qrg7zq6eCRJI9bPOPrvA34S+ExEPFbv\n+yXgXcB9EfE24MvAW+qyh6iGVj5JNbzyZ4ZaY0nSlmwa9Jn5CBBdiq/a4PgEbtlmvSRJQ1LkFAiS\npFMMekkq3EwGvXPVS5olRU5q1otz1UuaNTPXoneuekmzZuaC3rnqJc2ameu6ca56SbNm5oIenKte\n0myZua4bSZo1Br0kFc6gl6TCGfSSVDiDXpIKZ9BLUuEMekkqnEEvSYUz6CWpcAb9GZzCWFJpZnIK\nhG6cwlhSiWzRd3AKY0klMug7OIWxpBLZddPBKYwllcigP4NTGEsqjV03klQ4g16SCmfQS1LhDHpJ\nKpxBL0mFM+glqXAGvSQVzqCXpMIZ9JJUOINekgpn0EtS4Qx6SSrcpkEfEe+LiOci4vGOfe2IeCYi\nHquXN3aU3RYRT0bEExHxQ01VXJLUn35a9O8Hrt5g/29k5mX18hBARFwKvBX4t/V7DkfE3LAqO24+\nZlDSNNp0muLM/LOI2Nvn510L/FFmfhP4UkQ8CXwvMPXR6GMGJU2r7fTR/2xE/FXdtfPKet+rgK90\nHPN0vW+diLgxIlYiYuX48ePbqMZo+JhBSdNq0KC/G/hO4DJgFfj1rX5AZt6Tmfszc/+uXbsGrMbo\n+JhBSdNqoCdMZeaxtdcR8V+Bj9SbzwAXdxx6Ub1v6vmYQUnTaqCgj4jdmblab14HrI3IeRD4w4h4\nD/AvgUuAT267lhPCxwxKmkabBn1EfABoAedFxNPAItCKiMuABJ4C3g6QmZ+NiPuAzwEvArdk5olm\nqi5J6kdk5rjrwP79+3NlZWXc1ZCkqRIRj2bm/s2O85exklQ4g16SCmfQS1LhDHpJKpxBL0mFM+gl\nqXAGvSQVzqCXpMIZ9JJUOINekgpn0EtS4Qx6SSqcQS9JhTPoJalwBr0kFc6gl6TCGfSSVDiDXpIK\nZ9BLUuEMekkqnEEvSYUz6CWpcAa9JBXOoJekwhn0Q3L0KNx5Z7WWpEly1rgrUIKjR+Gqq+CFF2DH\nDjhyBBYWxl0rSarYoh+C5eUq5E+cqNbLy+OukSSdYtAPQatVteTn5qp1qzXuGknSKXbdDMHCQtVd\ns7xchbzdNpImiUE/JAsLBrykyWTXjSQVzqCXpMIZ9JJUOINekgpn0EtS4TYN+oh4X0Q8FxGPd+w7\nNyI+GhFfqNevrPdHRPxWRDwZEX8VEd/TZOUlSZvrp0X/fuDqM/bdChzJzEuAI/U2wDXAJfVyI3D3\ncKopzYDVVbjySnj2WcsmrR7jKBumzNx0AfYCj3dsPwHsrl/vBp6oX/8O8BMbHddrufzyy1Nq1Fe/\nmvm612Wurg6vbNj1uOmmzJe9rFpbNln1GEdZH4CV7CfD+zpofdB/veN1rG0DHwFe21F2BNjf5TNv\nBFaAlT179gz0H6lCNRHKTfwhdvu+rdZjfr76UzxzmZ+f3bJuyyTVsamyLRhZ0NfbX8stBn3nYou+\nUJMQyk3+IW6lFdrr87761czrr8/cubPat3Nn5g03VOemo2yxNVhZE5/ZeNljj01GPcZRtgX9Bv2g\no26ORcRugHr9XL3/GeDijuMuqvepVL36GA8dgkcegdtv76/s7LMhAu6+G156qVpHVPsHLfviF+H6\n62Hnzuo7du6EG26AL31p8LJu37e2bLUeu3fDOefAN74B8/PV+pxz4MILTytbajFQWfv1c0P/zMbL\n9u2bjHqMo6wBgwb9g8CB+vUB4MMd+3+qHn1zBfB3mbm6zTpqkg0zsJsI5T4Dr1fZuvDt9n2PPTZY\nmAMcOwYHD9L+bwfg4MHTL5x1GTBQ2dJrTwz9M0dSNin1GEfZkG06qVlEfABoAedFxNPAIvAu4L6I\neBvwZeAt9eEPAW8EngSeB36mgTprEpx9dhVWa+6+u1rm56sgfMc74EMfguefrwLvuuvg3e+uOiy6\nlV14Ye8w7Ajl9p9uoWwt8M4/TPtf3Vz9K2RNH2Xt7z9B++Nzp8rOuECc/L6OVmjPMD/z8wDuvx+A\npaWgfVee3N1ebrO074GT23H+YTgfFpfb1fFdytqt9snPZOkw3HXXtj9zlGVXvr/Fx/Z9bOz1GEdZ\nu9Vm6Prp32l6sY9+wm3Un96rDzgz8+DBqp96fn59f3Vdtvj6ufVl112XefPNufiBt2fefHO1fUYZ\nbbZWVqNN1//ELZd1+75e9W+iHl3KFh9eTNqsWxYfXhx5XbZbNin1GEfZZuizj95pirW5zu6Zw4er\nfd1atf20Xnu1oru0bDvLzmyh9iprL7dZ+tjSye1YCgAWr1ysDu9StmmrqltdetW/i151HLR1126d\nahnGUpCL/dVFhernatD0Yot+AmzUat9s5MkIW9G9WqhNtF63+31NtLDPPLbfsiY+c5Rlk1KPcZRt\nhmEOr2x6MegHMOyx5hsNCexzCNgkB/Yoy5qu/6C2EySabAZ96YY11nyzVnvdn06b0943jYHdRIt4\n2P3KhrK2wqAvwVa7UwYp+5Zv6X1TddQ3Ofsoa+qfwcP8PsNco9Bv0DtN8STbaIz6sMeaP/VU17Hk\n7eU2se+BaugX1RCw2PcA7Xp42Hat3RDdalmvG5RNDE0b5PsGrb/UBIN+3Db6ZWmvHxUN4QdA68q6\n/LCm3WqTi3lyxMba6zODaloCe5Smvf4qi0E/bpu02tstTm+ZQ1+/eNxS2f33nxoieNddp4YO9qnk\nwJaK0E//TtPLTPbRD3gTtNMoR4rYryxNHvzB1ITrNU0AdMyDcbha1z8qGvQHQL3K1rpmev2wxpa5\nNL0M+nHpMclVr7lINvvF46Blkspl0I9Tl2kCxvXz9V43TiVNL4N+FFZX4a1vhQ9+8PT5pgeYF6XT\noKNdupXZPSOVyVE3I/DsLYd46eOP8OzNGzyAYxNNDE800KXZEtWN2/Hav39/rqysjLsaw3fmnO1r\n5udp/89fPO3m6JrtzFgoabZExKOZuX/T4wz6Bq2u8vg17+DVn/4Q38rz/H928qV91/Hv/uTdp3Xh\neHNU0iD6DXq7bpq0ezfnvfoc5vkGv9yaY55vcN5rmnsupCRtxKBv2IVxjOM/dpA7Wic4/mMHuZD1\nz4V0tIukJtl1MyJ2z0gaNrtuJkB7uU0sxclfoa69Htbsj5LUD1v0w9JtrHzNFr2kYbNFP2obzUIp\nSRPAoN+uXnPHd/CGq6RxMei3a7O542v+CErSuBj029UxC+VSi9Of3CRJE8CgH4aTc8ez/qlOkjRm\nBv02Nf0AbUnaLodXDpFDKCWNksMrm7C6CldeadeMpKli0G/FJmPlHUIpaRLZddOPjnnl2y1oL9f7\n5+fhn/5pXLWSNOPsuhmmjrHySy26jpWXpElk0PejY6w84Fh5SVPFoO9De7lNXPhfiHe+BEC88yXi\ngruHMoTy6FG4885qLUlNsI/+TCOchfLoUbjqKnjhBdixA44cgYWFoXy0pBkwkj76iHgqIj4TEY9F\nxEq979yI+GhEfKFev3I73zFyI5yFcnm5CvkTJ6r18nLjXylpBg2j6+YHMvOyjqvKrcCRzLwEOFJv\nT76OWSjbrxvNLJStVtWSn5ur1q3W0D5akk5qoo/+WuDe+vW9wJsb+I7h63NkzTBnoVxYqLprDh2y\n20ZSc87a5vsT+N8RkcDvZOY9wAWZuVqXPwtcsNEbI+JG4EaAPXv2bLMaQzCmkTULCwa8pGZtt0X/\n2sz8HuAa4JaIeF1nYVZ3eje8c5mZ92Tm/szcv2vXrm1WY/uaHFkjSeM0tFE3EdEG/hH4T0ArM1cj\nYjewnJn/utd7J2rUDU5OJmk6ND7qJiK+NSK+fe018B+Ax4EHgQP1YQeADw/6HY1ZXaX909/h5GSS\nZsJ2um4uAB6JiE8DnwT+R2b+CfAu4A0R8QXg9fX2ZDl0iKVX/z8nJ5M0E2brB1Mdk5NFG7Jd73dy\nMklTqN+um+2Oupkq7ftuZulT7zm5He1qvXj5LbTHUiNJat5MzXXT/pFfJ589SN5e/Wfn7S8jj91E\n+03vHnPNNuY8OJKGodyg73bDdUoe5L02D86v/Eq1NuwlDarcrpv6hmv79tvh8OFT+++/H4DF5V0w\nxF+5DttG8+D4wypJgyivRd8xZw3Qdc6aYU5l0ATnwZE0LMW16Eu54bo2D87ychXytuYlDarM4ZU3\n3QT33EO886Xqxuvb3356940kFWC2nxk7JTdcJWkUiuu6Aabmhut2HD1qt46k/pQZ9LVJv+E6KB9B\nKGkrprvrZnUVrrxy5rpmfAShpK2Y7qAf4fNdJ4lDLyVtxXSOuumYnOw0MzQ5Wa8+evvvpdlQ9qib\njue7tlt0fb5ryRYW4LbbNg55p06Q1Gk6g77j+a5LLUb2fNdpsFn/vROlSbNnekfdnBwrf7har65u\n+pZZsNZ/vzYip7P/3tE60myayhZ9e7lN7HuAOL/6tWucf5jY94AP8ubU1AmHDq0P8l6tfVv6Urmm\n82ZsBx/k3b9uLXpb+tJ08glTWqfbRGlOiSyVbeqD3gd5b83CwvoQ79WvL2n6TX3XjYbDcfnS9LHr\nRluyUUsf7L+XSjCVo240Os6rI00/g149bTavjsMypcln14166vVIQ7t1pOlg0GtT3frvNxuW6U1c\naTIY9BqY0y1I08E+eg1s0OkWwL59aZRs0WtbunXrbKe1b5ePNFwGvRrR6yZur7797VwEvEBIGzPo\n1ZhBWvuDXgS8QEjdGfQauV6t/UEvApN0gfDCoUlj0GssurX2B70ITMoFwvsPmkQGvSbOIBeBSblA\nNHX/QdoOg15TpdtFoFfZKC8QTdx/WCsf5B6D9yYEQGY2sgBXA08ATwK39jr28ssvT2kS/fmfZ95x\nR7Xut6zX/rPPzpybq9ad5XfcUe2Han3HHf29r4myQf+7mygb9PNmBbCS/eRxPwdtdQHmgL8GXgPs\nAD4NXNrteINes2LYF4EmykZ9YelWVsKFajtl/eg36Jvquvle4MnM/CJARPwRcC3wuYa+T5oKo7wJ\nPeqb18MuG/Uoq0kqG7ampkB4FfCVju2n632SulhYgNtuW//H3muqiSbKek1NPcqyQT+v1/Qb01I2\ndP00+7e6AD8O/G7H9k8Cv33GMTcCK8DKnj17Bvt3i6RGTEoXxqDvmYSup+2U9Ys+u24aeWZsRCwA\n7cz8oXr7tvqicudGx/vMWEnDNOpRSuMa+dTvM2ObCvqzgP8LXAU8A/wFcH1mfnaj4w16Sdq6sT4c\nPDNfjIifBf4X1Qic93ULeUlSsxr7wVRmPgQ81NTnS5L644NHJKlwBr0kFc6gl6TCGfSSVLhGhldu\nuRIRx4EvD/j284C/GWJ1SuF5Wc9zsp7nZL1pOiffkZm7NjtoIoJ+OyJipZ9xpLPG87Ke52Q9z8l6\nJZ4Tu24kqXAGvSQVroSgv2fcFZhQnpf1PCfreU7WK+6cTH0fvSSptxJa9JKkHqY66CPi6oh4IiKe\njIhbx12fcYiI90XEcxHxeMe+cyPioxHxhXr9ynHWcdQi4uKIeDgiPhcRn42In6v3z+x5iYj5iPhk\nRHy6PidL9f5XR8Qn6r+hD0bEjnHXddQiYi4i/jIiPlJvF3dOpjboI2IOuAu4BrgU+ImIuHS8tRqL\n91M9iL3TrcCRzLwEOFJvz5IXgV/IzEuBK4Bb6v83Zvm8fBP4wczcB1wGXB0RVwC/BvxGZn4X8DXg\nbWOs47j8HPD5ju3izsnUBj0dz6XNzBeAtefSzpTM/DPgb8/YfS1wb/36XuDNI63UmGXmamZ+qn79\nD1R/xK9ihs9L/UCif6w3X14vCfwg8N/r/TN1TgAi4iLgh4HfrbeDAs/JNAe9z6Xt7oLMXK1fPwtc\nMM7KjFNE7AW+G/gEM35e6i6Kx4DngI8Cfw18PTNfrA+Zxb+h3wT+M/BSvf0vKPCcTHPQqw/1cyVn\ncmhVRHwb8MfAz2fm33eWzeJ5ycwTmXkZcBHVv4j/zZirNFYR8Sbgucx8dNx1aVpjDx4ZgWeAizu2\nL6r3CY5FxO7MXI2I3VQtuJkSES+nCvk/yMz7690zf14AMvPrEfEwsAC8IiLOqluws/Y39H3Aj0bE\nG4F54BzgvRR4Tqa5Rf8XwCX1HfIdwFuBB8dcp0nxIHCgfn0A+PAY6zJydT/r7wGfz8z3dBTN7HmJ\niF0R8Yr69dnAG6juXTwM/Hh92Eydk8y8LTMvysy9VPnxp5l5AwWek6n+wVR9Jf5NTj2X9lfHXKWR\ni4gPAC2qGfeOAYvAh4D7gD1Us4K+JTPPvGFbrIh4LfBx4DOc6nv9Jap++pk8LxHx76luLM5RNfDu\ny8zbI+I1VAMZzgX+EviPmfnN8dV0PCKiBbwjM99U4jmZ6qCXJG1umrtuJEl9MOglqXAGvSQVzqCX\npMIZ9JJUOINekgpn0EtS4Qx6SSrcPwPmzq2YFmQ2pAAAAABJRU5ErkJggg==\n",
352 | "text/plain": [
353 | ""
354 | ]
355 | },
356 | "metadata": {},
357 | "output_type": "display_data"
358 | },
359 | {
360 | "name": "stdout",
361 | "output_type": "stream",
362 | "text": [
363 | "\r",
364 | "Iteration: 44 Loss: 0.71091 Train Accuracy: 99.9286 Test Accuracy: 96.4646\n"
365 | ]
366 | }
367 | ],
368 | "source": [
369 | "#Iterations to do trainning\n",
370 | "for epoch in range(120):\n",
371 | " \n",
372 | " start=0\n",
373 | " end=100\n",
374 | " for i in range(14):\n",
375 | " \n",
376 | " X=X_train[start:end]\n",
377 | " Y=y_train[start:end]\n",
378 | " start=end\n",
379 | " end=start+100\n",
380 | " sess.run(train_step,feed_dict={rnn._inputs:X, y:Y})\n",
381 | "\n",
382 | " Loss=str(sess.run(cross_entropy,feed_dict={rnn._inputs:X, y:Y}))\n",
383 | " Train_accuracy=str(sess.run(accuracy,feed_dict={rnn._inputs:X_train, y:y_train}))\n",
384 | " Test_accuracy=str(sess.run(accuracy,feed_dict={rnn._inputs:X_test, y:y_test}))\n",
385 | " \n",
386 | " pl.plot([epoch],Loss,'b.',)\n",
387 | " pl.plot([epoch],Train_accuracy,'r*',)\n",
388 | " pl.plot([epoch],Test_accuracy,'g+')\n",
389 | " display.clear_output(wait=True)\n",
390 | " display.display(pl.gcf()) \n",
391 | " \n",
392 | " sys.stdout.flush()\n",
393 | " print(\"\\rIteration: %s Loss: %s Train Accuracy: %s Test Accuracy: %s\"%(epoch,Loss,Train_accuracy,Test_accuracy)),\n",
394 | " sys.stdout.flush()\n"
395 | ]
396 | },
397 | {
398 | "cell_type": "code",
399 | "execution_count": null,
400 | "metadata": {
401 | "collapsed": true
402 | },
403 | "outputs": [],
404 | "source": []
405 | },
406 | {
407 | "cell_type": "code",
408 | "execution_count": null,
409 | "metadata": {
410 | "collapsed": true
411 | },
412 | "outputs": [],
413 | "source": []
414 | }
415 | ],
416 | "metadata": {
417 | "kernelspec": {
418 | "display_name": "Python 3",
419 | "language": "python",
420 | "name": "python3"
421 | },
422 | "language_info": {
423 | "codemirror_mode": {
424 | "name": "ipython",
425 | "version": 3
426 | },
427 | "file_extension": ".py",
428 | "mimetype": "text/x-python",
429 | "name": "python",
430 | "nbconvert_exporter": "python",
431 | "pygments_lexer": "ipython3",
432 | "version": "3.5.2"
433 | }
434 | },
435 | "nbformat": 4,
436 | "nbformat_minor": 1
437 | }
438 |
--------------------------------------------------------------------------------
/Vhanilla_RNN/RNN.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# VANILLA RNN ON 8*8 MNIST DATASET TO PREDICT TEN CLASS"
8 | ]
9 | },
10 | {
11 | "cell_type": "markdown",
12 | "metadata": {},
13 | "source": [
14 | "\n",
15 | "### Its a dynamic sequence and batch vhanilla rnn. This is created with tensorflow scan and map higher ops!!!! \n",
16 | "### This is a base rnn which can be used to create GRU, LSTM, Neural Stack Machine, Neural Turing Machine and RNN-EM and so on!"
17 | ]
18 | },
19 | {
20 | "cell_type": "markdown",
21 | "metadata": {},
22 | "source": [
23 | "# Importing Libraries"
24 | ]
25 | },
26 | {
27 | "cell_type": "code",
28 | "execution_count": 1,
29 | "metadata": {},
30 | "outputs": [
31 | {
32 | "name": "stderr",
33 | "output_type": "stream",
34 | "text": [
35 | "/usr/local/lib/python3.5/dist-packages/sklearn/cross_validation.py:44: DeprecationWarning: This module was deprecated in version 0.18 in favor of the model_selection module into which all the refactored classes and functions are moved. Also note that the interface of the new CV iterators are different from that of this module. This module will be removed in 0.20.\n",
36 | " \"This module will be removed in 0.20.\", DeprecationWarning)\n"
37 | ]
38 | }
39 | ],
40 | "source": [
41 | "import numpy as np\n",
42 | "import tensorflow as tf\n",
43 | "from sklearn.datasets import load_digits \n",
44 | "from sklearn.cross_validation import train_test_split\n",
45 | "import pylab as pl\n",
46 | "from IPython import display\n",
47 | "import sys\n",
48 | "%matplotlib inline"
49 | ]
50 | },
51 | {
52 | "cell_type": "markdown",
53 | "metadata": {},
54 | "source": [
55 | "# Vhanilla RNN class and functions"
56 | ]
57 | },
58 | {
59 | "cell_type": "code",
60 | "execution_count": 2,
61 | "metadata": {},
62 | "outputs": [],
63 | "source": [
64 | "class RNN_cell(object):\n",
65 | "\n",
66 | " \"\"\"\n",
67 | " RNN cell object which takes 3 arguments for initialization.\n",
68 | " input_size = Input Vector size\n",
69 | " hidden_layer_size = Hidden layer size\n",
70 | " target_size = Output vector size\n",
71 | "\n",
72 | " \"\"\"\n",
73 | "\n",
74 | " def __init__(self, input_size, hidden_layer_size, target_size):\n",
75 | "\n",
76 | " # Initialization of given values\n",
77 | " self.input_size = input_size\n",
78 | " self.hidden_layer_size = hidden_layer_size\n",
79 | " self.target_size = target_size\n",
80 | "\n",
81 | " # Weights and Bias for input and hidden tensor\n",
82 | " self.Wx = tf.Variable(tf.zeros(\n",
83 | " [self.input_size, self.hidden_layer_size]))\n",
84 | " self.Wh = tf.Variable(tf.zeros(\n",
85 | " [self.hidden_layer_size, self.hidden_layer_size]))\n",
86 | " self.bi = tf.Variable(tf.zeros([self.hidden_layer_size]))\n",
87 | "\n",
88 | " # Weights for output layers\n",
89 | " self.Wo = tf.Variable(tf.truncated_normal(\n",
90 | " [self.hidden_layer_size, self.target_size],mean=0,stddev=.01))\n",
91 | " self.bo = tf.Variable(tf.truncated_normal([self.target_size],mean=0,stddev=.01))\n",
92 | "\n",
93 | " # Placeholder for input vector with shape[batch, seq, embeddings]\n",
94 | " self._inputs = tf.placeholder(tf.float32,\n",
95 | " shape=[None, None, self.input_size],\n",
96 | " name='inputs')\n",
97 | "\n",
98 | " # Processing inputs to work with scan function\n",
99 | " self.processed_input = process_batch_input_for_RNN(self._inputs)\n",
100 | "\n",
101 | " '''\n",
102 | " Initial hidden state's shape is [1,self.hidden_layer_size]\n",
103 | " In First time stamp, we are doing dot product with weights to\n",
104 | " get the shape of [batch_size, self.hidden_layer_size].\n",
105 | " For this dot product tensorflow use broadcasting. But during\n",
106 | " Back propagation a low level error occurs.\n",
107 | " So to solve the problem it was needed to initialize initial\n",
108 | " hiddden state of size [batch_size, self.hidden_layer_size].\n",
109 | " So here is a little hack !!!! Getting the same shaped\n",
110 | " initial hidden state of zeros.\n",
111 | " '''\n",
112 | "\n",
113 | " self.initial_hidden = self._inputs[:, 0, :]\n",
114 | " self.initial_hidden = tf.matmul(\n",
115 | " self.initial_hidden, tf.zeros([input_size, hidden_layer_size]))\n",
116 | "\n",
117 | " # Function for vhanilla RNN.\n",
118 | " def vanilla_rnn(self, previous_hidden_state, x):\n",
119 | " \"\"\"\n",
120 | " This function takes previous hidden state and input and\n",
121 | " outputs current hidden state.\n",
122 | " \"\"\"\n",
123 | " current_hidden_state = tf.tanh(\n",
124 | " tf.matmul(previous_hidden_state, self.Wh) +\n",
125 | " tf.matmul(x, self.Wx) + self.bi)\n",
126 | "\n",
127 | " return current_hidden_state\n",
128 | "\n",
129 | " # Function for getting all hidden state.\n",
130 | " def get_states(self):\n",
131 | " \"\"\"\n",
132 | " Iterates through time/ sequence to get all hidden state\n",
133 | " \"\"\"\n",
134 | "\n",
135 | " # Getting all hidden state throuh time\n",
136 | " all_hidden_states = tf.scan(self.vanilla_rnn,\n",
137 | " self.processed_input,\n",
138 | " initializer=self.initial_hidden,\n",
139 | " name='states')\n",
140 | "\n",
141 | " return all_hidden_states\n",
142 | "\n",
143 | " # Function to get output from a hidden layer\n",
144 | " def get_output(self, hidden_state):\n",
145 | " \"\"\"\n",
146 | " This function takes hidden state and returns output\n",
147 | " \"\"\"\n",
148 | " output = tf.nn.relu(tf.matmul(hidden_state, self.Wo) + self.bo)\n",
149 | "\n",
150 | " return output\n",
151 | "\n",
152 | " # Function for getting all output layers\n",
153 | " def get_outputs(self):\n",
154 | " \"\"\"\n",
155 | " Iterating through hidden states to get outputs for all timestamp\n",
156 | " \"\"\"\n",
157 | " all_hidden_states = self.get_states()\n",
158 | "\n",
159 | " all_outputs = tf.map_fn(self.get_output, all_hidden_states)\n",
160 | "\n",
161 | " return all_outputs\n",
162 | "\n",
163 | "\n",
164 | "# Function to convert batch input data to use scan ops of tensorflow.\n",
165 | "def process_batch_input_for_RNN(batch_input):\n",
166 | " \"\"\"\n",
167 | " Process tensor of size [5,3,2] to [3,5,2]\n",
168 | " \"\"\"\n",
169 | " batch_input_ = tf.transpose(batch_input, perm=[2, 0, 1])\n",
170 | " X = tf.transpose(batch_input_)\n",
171 | "\n",
172 | " return X\n"
173 | ]
174 | },
175 | {
176 | "cell_type": "markdown",
177 | "metadata": {},
178 | "source": [
179 | "# Placeholder and initializers\n"
180 | ]
181 | },
182 | {
183 | "cell_type": "code",
184 | "execution_count": 3,
185 | "metadata": {
186 | "collapsed": true
187 | },
188 | "outputs": [],
189 | "source": [
190 | "hidden_layer_size = 110\n",
191 | "input_size = 8\n",
192 | "target_size = 10"
193 | ]
194 | },
195 | {
196 | "cell_type": "code",
197 | "execution_count": 4,
198 | "metadata": {
199 | "collapsed": true
200 | },
201 | "outputs": [],
202 | "source": [
203 | "y = tf.placeholder(tf.float32, shape=[None, target_size],name='inputs')"
204 | ]
205 | },
206 | {
207 | "cell_type": "markdown",
208 | "metadata": {},
209 | "source": [
210 | "# Models"
211 | ]
212 | },
213 | {
214 | "cell_type": "code",
215 | "execution_count": 5,
216 | "metadata": {
217 | "collapsed": true
218 | },
219 | "outputs": [],
220 | "source": [
221 | "#Initializing rnn object\n",
222 | "rnn=RNN_cell( input_size, hidden_layer_size, target_size)"
223 | ]
224 | },
225 | {
226 | "cell_type": "code",
227 | "execution_count": 6,
228 | "metadata": {},
229 | "outputs": [],
230 | "source": [
231 | "#Getting all outputs from rnn\n",
232 | "outputs = rnn.get_outputs()"
233 | ]
234 | },
235 | {
236 | "cell_type": "code",
237 | "execution_count": 7,
238 | "metadata": {
239 | "collapsed": true
240 | },
241 | "outputs": [],
242 | "source": [
243 | "#Getting final output through indexing after reversing\n",
244 | "last_output = outputs[-1]"
245 | ]
246 | },
247 | {
248 | "cell_type": "code",
249 | "execution_count": 8,
250 | "metadata": {
251 | "collapsed": true
252 | },
253 | "outputs": [],
254 | "source": [
255 | "#As rnn model output the final layer through Relu activation softmax is used for final output.\n",
256 | "output=tf.nn.softmax(last_output)"
257 | ]
258 | },
259 | {
260 | "cell_type": "code",
261 | "execution_count": 9,
262 | "metadata": {
263 | "collapsed": true
264 | },
265 | "outputs": [],
266 | "source": [
267 | "#Computing the Cross Entropy loss \n",
268 | "cross_entropy = -tf.reduce_sum(y * tf.log(output))"
269 | ]
270 | },
271 | {
272 | "cell_type": "code",
273 | "execution_count": 10,
274 | "metadata": {
275 | "collapsed": true
276 | },
277 | "outputs": [],
278 | "source": [
279 | "# Trainning with Adadelta Optimizer\n",
280 | "train_step = tf.train.AdamOptimizer().minimize(cross_entropy)"
281 | ]
282 | },
283 | {
284 | "cell_type": "code",
285 | "execution_count": 11,
286 | "metadata": {
287 | "collapsed": true
288 | },
289 | "outputs": [],
290 | "source": [
291 | "#Calculatio of correct prediction and accuracy\n",
292 | "correct_prediction = tf.equal(tf.argmax(y,1), tf.argmax(output,1))\n",
293 | "accuracy = (tf.reduce_mean(tf.cast(correct_prediction, tf.float32)))*100"
294 | ]
295 | },
296 | {
297 | "cell_type": "markdown",
298 | "metadata": {},
299 | "source": [
300 | "# Dataset Preparation"
301 | ]
302 | },
303 | {
304 | "cell_type": "code",
305 | "execution_count": 12,
306 | "metadata": {},
307 | "outputs": [],
308 | "source": [
309 | "sess=tf.InteractiveSession()\n",
310 | "sess.run(tf.global_variables_initializer())"
311 | ]
312 | },
313 | {
314 | "cell_type": "code",
315 | "execution_count": 13,
316 | "metadata": {},
317 | "outputs": [],
318 | "source": [
319 | "#Using Sklearn MNIST dataset.\n",
320 | "digits = load_digits()\n",
321 | "X=digits.images\n",
322 | "Y_=digits.target\n",
323 | "\n",
324 | "# One hot encoding\n",
325 | "Y = sess.run(tf.one_hot(indices=Y_, depth=target_size))"
326 | ]
327 | },
328 | {
329 | "cell_type": "code",
330 | "execution_count": 14,
331 | "metadata": {},
332 | "outputs": [],
333 | "source": [
334 | "#Getting Train and test Dataset\n",
335 | "X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size=0.22, random_state=42)\n",
336 | "\n",
337 | "#Cuttting for simple iteration\n",
338 | "X_train=X_train[:1400]\n",
339 | "y_train=y_train[:1400]"
340 | ]
341 | },
342 | {
343 | "cell_type": "code",
344 | "execution_count": 15,
345 | "metadata": {
346 | "scrolled": false
347 | },
348 | "outputs": [
349 | {
350 | "data": {
351 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAD8CAYAAAB5Pm/hAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAGSdJREFUeJzt3X+QXWV9x/H3dzeSBCwFJJAYEjZto05Wi8pq2aljrgZH\nQCtqFaNME9QxkOAUO7YKdXDPkmnBioqOJBoVSayDUAnKWKtiysLQicDGAkKQGhNAcPNDRGBEiNn9\n9o/n3N2zu3dz7+7eu+ee535eM3fOuc+999nn5Gw+99nnPvc55u6IiEi82vJugIiINJaCXkQkcgp6\nEZHIKehFRCKnoBcRiZyCXkQkcgp6EZHIKehFRCKnoBcRidysvBsAcPzxx3tHR0fezRARKZQdO3b8\nxt3nVXteUwR9R0cH/f39eTdDRKRQzOyRWp6noRsRkcgp6EVEIqegFxGJnIJeRCRyCnoRkcgp6EVE\nIlfooN++HS6/PGxFRKSypphHPxXbt8OKFXDwIBxxBGzbBt3debdKRKT5FLZH39cXQn5wMGz7+vJu\nkYhIcyps0JdKoSff3h62pVLeLRIRaU6FHbrp7g7DNX19IeQ1bCMiUllhgx5CuCvgRUQOr7BDNyIi\nUhsFvYhI5BT0IiKRU9CLiEROQS8iEjkFvYhI5BT0IiKRU9CLiEROQS8iEjkFvYhI5KIJeq1NLyJS\nWaHXuinT2vQiIhOLokevtelFRCYWRdBrbXoRkYlFMXSjtelFRCZWNejNbBGwBTgRcGCTu3/ezI4D\nrgc6gIeBc9z9STMz4PPAWcCzwHnu/tPGNH+E1qYXEamslqGbQ8BH3X0ZcBpwoZktAy4Gtrn7UmBb\neh/gTGBpelsDbKx7q0VEpGZVg97dB8o9cnd/BngQWAicDWxOn7YZeHu6fzawxYOfAMeY2YK6t1xE\nRGoyqQ9jzawDeBVwJ3Ciuw+kD+0lDO1AeBP4VeZlj6VlY+taY2b9ZtZ/4MCBSTZbRERqVXPQm9kL\ngRuBj7j709nH3N0J4/c1c/dN7t7l7l3z5s2bzEtFRGQSagp6M3sBIeS/6e5b0+J95SGZdLs/LX8c\nWJR5+UlpmYiI5KBq0KezaL4GPOjun808dDOwOt1fDXw3U77KgtOApzJDPCIiMsNqmUf/18DfAT8z\ns3vSsn8GrgBuMLMPAo8A56SPfZ8wtXIXYXrl++vaYhERmZSqQe/udwA2wcMrKjzfgQun2S4REamT\nKJZAEBGRiSnoRUQiF2XQa216EZERUSxqlqW16UVERouuR6+16UVERosu6LU2vYjIaNEN3WhtehGR\n0aILetDa9CIiWdEN3YiIyGgKehGRyCnoRUQip6AXEYmcgl5EJHIKehGRyCnoRUQip6AXEYmcgl5E\nJHJRB72WKxYRiXQJBNByxSIiZdH26LVcsYhIEG3Qa7liEZEg2qEbLVcsIhJEG/Sg5YpFRCDioRsR\nEQkU9CIikVPQi4hETkEvIhI5Bb2ISOQU9CIikVPQi4hETkEvIhI5Bb2ISOQU9CIikVPQi4hETkEv\nIhK5qkFvZteY2X4zuz9TlpjZ42Z2T3o7K/PYJWa2y8weMrM3N6rhk6WrTYlIq6pl9cprgS8CW8aU\nf87dr8wWmNkyYCXQCbwY+LGZvcTdB+vQ1inT1aZEpJVV7dG7++3Ab2us72zgW+7+vLvvAXYBr51G\n++pCV5sSkVY2nTH6D5vZfenQzrFp2ULgV5nnPJaWjWNma8ys38z6Dxw4MI1mVKerTYlIK5tq0G8E\n/hx4JTAAfGayFbj7JnfvcveuefPmTbEZtSlfbWr9eg3biEjrmdIVptx9X3nfzL4CfC+9+ziwKPPU\nk9Ky3OlqUyLSqqbUozezBZm77wDKM3JuBlaa2WwzWwIsBe6aXhNFRGQ6qvbozew6oAQcb2aPAT1A\nycxeCTjwMHA+gLs/YGY3ADuBQ8CFec+4ERFpdebuebeBrq4u7+/vz7sZIiKFYmY73L2r2vP0zVgR\nkcgp6EVEIqegFxGJnIJeRCRyCnoRkcgp6EVEIqegFxGJnIJeRCRyCnoRkcgp6EVEIqegFxGJnIJe\nRCRyCnoRkcgp6EVEIqegFxGJnIJeRCRyCnoRkcgp6EVEIqegFxGJnIJeRCRyCnoRkcgp6EVEIqeg\nFxGJnIJeRCRyLRf027fD5ZeHrYhIK5iVdwNm0vbtsGIFHDwIRxwB27ZBd3ferRIRaayW6tH39YWQ\nHxwM276+vFskItJ4LRX0pVLoybe3h22plHeLREQar6WGbrq7w3BNX18IeQ3biEgraKmghxDuCngR\naSUtNXQjItKKFPQiIpFT0IuIRE5BLyISuapBb2bXmNl+M7s/U3acmd1iZr9It8em5WZmXzCzXWZ2\nn5m9upGNFxGR6mrp0V8LnDGm7GJgm7svBbal9wHOBJamtzXAxvo0U3I1MADLl8O994bt3r0jZdn9\nSo/XWlavelR3PHUXoY31qnvv3sb+H3b3qjegA7g/c/8hYEG6vwB4KN3/MvDeSs873O3UU091maZf\n/9r99a93v+eesB0YGCnL7ld6vFrZ2rXubW3unZ1hu3btSFl2v9LjtZbVqx7VHU/dRWhjvepeu3ZK\n/+2Bfq8lw2t60vig/11m38r3ge8Br8s8tg3oqlZ/Swd9M4cx6KabbjN5mzNnUvExY0Gf3n8y3dYc\n9IShnX6gf/HixVONyeZXLcibOIw/3T2yf+8JmbL2dvf29pH9trbxj9daVq96VHc8dRehjfWqu73d\ne0q4H3mk+7nnhkyYhEYHvYZuqimH+urV+QT0NH6p//3loYwkLQPfcGqmjPH7lR6vtaxe9ajueOou\nQhvrVTcJUx6+qTXopzq98mZgdbq/GvhupnxVOvvmNOApdx+Y4s8onuwHK4sWwe23w+bNMDQEGzeG\n29AQPPDA+Ne2t0Nb29TLgLsXhrv/9GbCEp2DgyP7Q0P8z6Ixj09Q9vTsTN2Dg9DZyQm/Bzo7Q9mS\nJeGW2a/0eK1l9apHdcdTdxHaWK+6AbjggoZ+IGvhTeEwTzC7DigBxwP7gB7gO8ANwGLgEeAcd/+t\nmRnwRcIsnWeB97t7f7VGdHV1eX9/1ac1v3XrQphPpK0tBH1Ze3sI0tmz4fnngRDWr3kckjcYya0+\nqmz4+dnXlredndw4+AB/296JvfsBfHP4BbLVe4b3b5y9Z9zj2TIRyVfP8h6SUlLz881sh7t3VX1i\nLd3+Rt8KP3Qze/bwn2GVbj1vsOH9uxaOKevs9J7rzndfssR9yRL/9stCGQnjynre3zFc9sBrOkbK\n1q3zngte5iTU7ebuw9vsfqWyao/PdD2qO566i9DGetU9FTR46Eay3vOesJ01K2zToZSd88Ld3uU+\n/Gfao38CdHaGsnXr4CUvofehL8Pu3bB7N+9aCdyffjdtTFnvyQ8Pl3W+5eHhMjthA73zfz6qST3L\ne/Ce8BeB9/io/VrKRCQe7UmS5N0GNm3alKxZsybvZkze3Llw6aXhCxAAQ0MkJSjt8TCufd4BkmXr\n6D3qbpKbnoSLLqLziV6SG/bTe1svyb/dBe95D7239Q7fgFHbsWUApY5SeH0pofe2XrzHR+333tZL\n33l9w68r/ymYfc3hyso/I7utVjaV1zSyHtUdT91FaGO96p6s3t7egSRJNlV7XtUx+pnQ9GP0AwOw\nciVcf30YgFm5Er7wBTj/fHjxi+GHP4Rnn4X2duzSQXzPapK5d47rZdfKexzrtRDgfcmokK/ltUlf\nMhzalfarlYlIMWiMfrqy898PN9d92TL3tjbvOT1MTZxozLvn1h53b+yYYflniEhroMYx+lmNf88p\nqPXrw/TIBQtGyrIzaspTJHfuJClB7+vCbaxy77zW3nLP8p5R27H7h6MeuYhUog9jx5ozB8wqT5Os\nNId97lyShecCE3+oWSm0Jwry4SGVTGiX96vVIyJSicboYfQY/Mc/Dlu2hBk0hw5VnOteLiudB7d1\njK+uPBdW494i0ki1jtGrRw+jh2m2bAllhw6RlBj+MlJy7XnDUyR3vnoRdHZyWwf4/nX4ve8AQk8+\n+4UHhbyINIPW7tHPnQvPPTeueOfxsOzJkRk0PP00dspNw8Mx5Rkx5W22TERkpqhHX4vdu+F974Mj\njwz30y86dX6YMI0S4NprYevW4ZckfQkQgr28tV5j+cnLZ6jRIiKT09qzbhYsgKOPDr36OXPguedI\n3j0POIB9MqxJUw70sftlk12bQkRkprV2jx5g3z644AJKn+7EEujtPDDq4WpLCSjkRaTZtXaPHoaH\nZW6rMN5ebf67pjaKSBG0do++yoV5J5rrXt5Xb15EiqClZ93849rX8Jn543/u8pOXDy8KJiLSrGqd\nddOaQzfptMorgSvTIkvAr5gDf/hDjg0TEam/1hy62b2b+095H78nTKssb9mzJ8dGiYg0RmsG/YIF\nHL/kaObwHJ8otTOH5/jo3i6YPz/vlomI1F1rBj0w3/Zx4J0X8K+lQQ688wKu3Lco7yaJiDREa47R\nA2zdynyA3g3Mv/HqvFsjItIwLdmjT/qS4aULYGQZg/LyBiIiMWm9Hv3AAEnPrSTXD8D8+VqMTESi\n13o9+vXr4Y474LLL8m6JiMiMaJ2gnzt35MpRQ0Nha0bPHe15t0xEpKFaJ+gzSxInJcLSxOeeS/KN\nx/JumYhIQ7VO0GeWJO4tEZYmPvpozZ0Xkei1TtDD8JLEQNhOsJiZiEhMWibok74EO+Um7IQNANgJ\nG7BTbtKUShGJXstMr0xeej5Jz61w/fXYlxdoSqWItIyW6dFrWqWItKr4e/TpksTDNm6kpwRcMVdL\nEotIS4i/R19hWmWy8FwtSSwiLSP+oNe0ShFpcfEHPWhapYi0tOiDvpZpldu3w+WXh62ISGym9WGs\nmT0MPAMMAofcvcvMjgOuBzqAh4Fz3P3J6TVzimpYqXL7dlixAg4ehCOOgG3boLs7l9aKiDREPXr0\nb3D3V2auRH4xsM3dlwLb0vv5qGFKZV9fCPnBwbDt65ux1omIzIhGDN2cDWxO9zcDb2/Azzi8zEqV\nyesPv1JlqRR68u3tYVsqzXhrRUQaarpB78CPzGyHma1Jy05094F0fy9w4jR/xuRlplT2ljjsSpXd\n3WG4Zv16DduISJym+4Wp17n742Z2AnCLmf08+6C7u5lVXGsgfWNYA7B48eJpNmOMzJRKoOqUyu5u\nBbyIxGtaPXp3fzzd7gduAl4L7DOzBQDpdv8Er93k7l3u3jVv3rzpNGOcpC/B5n8J++QQAPbJIezE\njVrATERakrlPbXEvMzsKaHP3Z9L9W4DLgBXAE+5+hZldDBzn7h87XF1dXV3e398/pXZUbaeuCSsi\nkTKzHZmJMBOaTo/+ROAOM7sXuAv4T3f/AXAF8CYz+wVwenp/Zg0MkJx3sr4YJSLCNMbo3X03cEqF\n8icIvfr8rF9P75JHSS67jJ5zenJtiohI3uJavTK7UmVCmF65EZjzKa1UKSItK6qgT25YR+9PPzt8\n35Kw7Tn1QpJcWiQikr+o1rpJ/uYz+N4L8MvCYfllbfi+tSRvvbLmOrTujYjEJp6gL38A+8gjU16p\nsrzuzaWXhq3CXkRiEM/QTfkD2CPfAldfTU/fPCglk6qi0ro3+iKViBRd8Xv0mXVtgOF1bZIzPzXp\nqrTujYjEqPA9+np+AFte96avL4S8evMiEoMpfzO2nqb9zdi1a2HTJuyTQ+GD2PPPhw0b6tdAEZEm\nNBPfjG0eDbhUoGbfiEgsCj90A8DWrQBT+gC2El11SkRiEkePPpXUIeRBV50SkbgUO+gHBmD58rov\nXqbZNyISk2IHfQ3XhJ2KsVedAo3Xi0hxFXPWTXbxsqw5c+q+eJnG60WkWcU96yZzTdikxPA1Ydmz\np+4/aux4/ZYt6t2LSLEUc9ZN5pqwvSVIbj/8NWGnozxef/BgGLP/+tfh0CH17kWkOIrZo4eGzJ2v\nJDte/4EPhJAfHITnn4ckUc9eRJpfIYM+6UuwU27CTgjffrUTNmCn3NSwi393d8Mll8CqVaEn39YG\nQ0Pw4x9rlUsRaX7FDPpSgvf48EW/y/v1mkc/kXLv/vTTR8Je8+xFpNkVMujz1N0dhmxmz9Y8exEp\nhmJ+GJvRs3zmL/6tVS5FpEiKOY++yWzfrtAXkZlX6zz6wvfo86YvVIlIs9MY/TRpATQRaXYK+mnK\nLoDW3g6PPqrpliLSXBT001T+YPZDHwqXrv3KVzS3XkSai4K+Drq7YfHikW/Nak0cEWkm+jC2TiZa\nE6e9PSydsGqVPqQVkXxoemUdladZPvpoGMIZHAzlZmEF5auugiee0DRMEamPWqdXKugboDzl8rnn\noPzP29YWevdDQ+rli0h9KOhztn17GKcvD+GYhZAfGgqPV+rlg754JSK1U9A3ifJwzoteBB/5yOF7\n+Wbjx/Vh5PUa9hGRLAV9EzpcL98sPKd8OszgBS8I2z/+MTynrS0splb+K0DhL9LaFPRNbGwvvzxT\nxyzsZ8MeRu7DyF8Bg4Ojw19LL4i0Hq1108S6u0dC+RWvGBmXh9E9/nL4Z3v0bW0jIQ8ja+Jv2TJ+\niAdG163xf5HW1LAevZmdAXweaAe+6u5XTPTcVuvRV5NdDRNGB3j5r4Dnnx8J/1mzxr8hlMuybxga\n/xeJS65DN2bWDvwf8CbgMeBu4L3uvrPS8xX0k5Md+nniifHz9mH0sM90x/+htr8Wan3NdOvRm5FI\nkPfQzWuBXe6+O23Mt4CzgYpBL5OTHfqBEPybN1fu5Wd79OXxf/cQ8DAS/kND4fUf/vDo8f9a/1qo\n9HitZZOpJ883I9XdvG/+Ra+70R2YRgX9QuBXmfuPAX/VoJ/V8rJXvDrcL95Uxv8rvSFky8rPnejx\neteT15uR6m7eN/+i133oUOOvZZHbh7FmtgZYA7B48eK8mhGNsb38bHl2f9Wqxoz/z9R/jjzfjFR3\nc775x1B3+VoWRQv6x4FFmfsnpWXD3H0TsAnCGH2D2iFjjH1DGPuLVZ4F1Kx/puf1ZqS6m/fNv+h1\nl3v05d/3RmjUh7GzCB/GriAE/N3A+9z9gUrP14exMhljP4wu2nis6o6njXmP0ef+hSkzOwu4ijC9\n8hp3/5eJnqugFxGZvLxn3eDu3we+36j6RUSkNm15N0BERBpLQS8iEjkFvYhI5BT0IiKRU9CLiESu\nKdajN7MDwCNTfPnxwG/q2Jy8xXQ8OpbmpGNpTlM5lpPdfV61JzVF0E+HmfXXMo+0KGI6Hh1Lc9Kx\nNKdGHouGbkREIqegFxGJXAxBvynvBtRZTMejY2lOOpbm1LBjKfwYvYiIHF4MPXoRETmMQge9mZ1h\nZg+Z2S4zuzjv9kyGmS0ys1vNbKeZPWBmF6Xlx5nZLWb2i3R7bN5trZWZtZvZ/5rZ99L7S8zszvT8\nXG9mR+TdxlqY2TFm9m0z+7mZPWhm3UU9L2b2D+nv1/1mdp2ZzSnSeTGza8xsv5ndnymreC4s+EJ6\nXPeZ2avza/l4ExzLp9Pfs/vM7CYzOybz2CXpsTxkZm+ezs8ubNCnFyC/GjgTWAa818yW5duqSTkE\nfNTdlwGnARem7b8Y2ObuS4Ft6f2iuAh4MHP/U8Dn3P0vgCeBD+bSqsn7PPADd38ZcArhmAp3Xsxs\nIfD3QJe7v5ywZPhKinVergXOGFM20bk4E1ia3tYAG2eojbW6lvHHcgvwcnf/S8I1PC4BSLNgJdCZ\nvmZDmnlTUtigJ3MBcnc/CJQvQF4I7j7g7j9N958hhMlCwjFsTp+2GXh7Pi2cHDM7CXgL8NX0vgFv\nBL6dPqUQx2Jmfwq8HvgagLsfdPffUdDzQliKfG56MaAjgQEKdF7c/Xbgt2OKJzoXZwNbPPgJcIyZ\nLZiZllZX6Vjc/Ufufii9+xPC1fggHMu33P15d98D7CJk3pQUOegrXYB8YU5tmRYz6wBeBdwJnOju\nA+lDe4ETc2rWZF0FfAxIr5jJi4DfZX6Ji3J+lgAHgK+nw1BfNbOjKOB5cffHgSuBRwkB/xSwg2Ke\nl6yJzkXRM+EDwH+l+3U9liIHfRTM7IXAjcBH3P3p7GMepkQ1/bQoM3srsN/dd+TdljqYBbwa2Oju\nrwJ+z5hhmgKdl2MJPcMlwIuBoxg/dFBoRTkX1ZjZJwjDud9sRP1FDvqqFyBvdmb2AkLIf9Pdt6bF\n+8p/bqbb/Xm1bxL+GnibmT1MGEJ7I2Gc+5h0yACKc34eAx5z9zvT+98mBH8Rz8vpwB53P+DufwS2\nEs5VEc9L1kTnopCZYGbnAW8FzvWR+e51PZYiB/3dwNJ0BsERhA8ubs65TTVLx7C/Bjzo7p/NPHQz\nsDrdXw18d6bbNlnufom7n+TuHYTz8N/ufi5wK/Cu9GlFOZa9wK/M7KVp0QpgJwU8L4Qhm9PM7Mj0\n9618LIU7L2NMdC5uBlals29OA57KDPE0JTM7gzDk+TZ3fzbz0M3ASjObbWZLCB8w3zXlH+Tuhb0B\nZxE+qf4l8Im82zPJtr+O8CfnfcA96e0swtj2NuAXwI+B4/Ju6ySPqwR8L93/s/SXcxfwH8DsvNtX\n4zG8EuhPz813gGOLel6AXuDnwP3AN4DZRTovwHWEzxf+SPhr64MTnQvACDPxfgn8jDDbKPdjqHIs\nuwhj8eUM+FLm+Z9Ij+Uh4Mzp/Gx9M1ZEJHJFHroREZEaKOhFRCKnoBcRiZyCXkQkcgp6EZHIKehF\nRCKnoBcRiZyCXkQkcv8PeNfq14MufEoAAAAASUVORK5CYII=\n",
352 | "text/plain": [
353 | ""
354 | ]
355 | },
356 | "metadata": {},
357 | "output_type": "display_data"
358 | },
359 | {
360 | "name": "stdout",
361 | "output_type": "stream",
362 | "text": [
363 | "\r",
364 | "Iteration: 119 Loss: 0.0729073 Train Accuracy: 100.0 Test Accuracy: 97.2222\n"
365 | ]
366 | },
367 | {
368 | "data": {
369 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAD8CAYAAAB5Pm/hAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAGSdJREFUeJzt3X+QXWV9x/H3dzeSBCwFJJAYEjZto05Wi8pq2aljrgZH\nQCtqFaNME9QxkOAUO7YKdXDPkmnBioqOJBoVSayDUAnKWKtiysLQicDGAkKQGhNAcPNDRGBEiNn9\n9o/n3N2zu3dz7+7eu+ee535eM3fOuc+999nn5Gw+99nnPvc55u6IiEi82vJugIiINJaCXkQkcgp6\nEZHIKehFRCKnoBcRiZyCXkQkcgp6EZHIKehFRCKnoBcRidysvBsAcPzxx3tHR0fezRARKZQdO3b8\nxt3nVXteUwR9R0cH/f39eTdDRKRQzOyRWp6noRsRkcgp6EVEIqegFxGJnIJeRCRyCnoRkcgp6EVE\nIlfooN++HS6/PGxFRKSypphHPxXbt8OKFXDwIBxxBGzbBt3debdKRKT5FLZH39cXQn5wMGz7+vJu\nkYhIcyps0JdKoSff3h62pVLeLRIRaU6FHbrp7g7DNX19IeQ1bCMiUllhgx5CuCvgRUQOr7BDNyIi\nUhsFvYhI5BT0IiKRU9CLiEROQS8iEjkFvYhI5BT0IiKRU9CLiEROQS8iEjkFvYhI5KIJeq1NLyJS\nWaHXuinT2vQiIhOLokevtelFRCYWRdBrbXoRkYlFMXSjtelFRCZWNejNbBGwBTgRcGCTu3/ezI4D\nrgc6gIeBc9z9STMz4PPAWcCzwHnu/tPGNH+E1qYXEamslqGbQ8BH3X0ZcBpwoZktAy4Gtrn7UmBb\neh/gTGBpelsDbKx7q0VEpGZVg97dB8o9cnd/BngQWAicDWxOn7YZeHu6fzawxYOfAMeY2YK6t1xE\nRGoyqQ9jzawDeBVwJ3Ciuw+kD+0lDO1AeBP4VeZlj6VlY+taY2b9ZtZ/4MCBSTZbRERqVXPQm9kL\ngRuBj7j709nH3N0J4/c1c/dN7t7l7l3z5s2bzEtFRGQSagp6M3sBIeS/6e5b0+J95SGZdLs/LX8c\nWJR5+UlpmYiI5KBq0KezaL4GPOjun808dDOwOt1fDXw3U77KgtOApzJDPCIiMsNqmUf/18DfAT8z\ns3vSsn8GrgBuMLMPAo8A56SPfZ8wtXIXYXrl++vaYhERmZSqQe/udwA2wcMrKjzfgQun2S4REamT\nKJZAEBGRiSnoRUQiF2XQa216EZERUSxqlqW16UVERouuR6+16UVERosu6LU2vYjIaNEN3WhtehGR\n0aILetDa9CIiWdEN3YiIyGgKehGRyCnoRUQip6AXEYmcgl5EJHIKehGRyCnoRUQip6AXEYmcgl5E\nJHJRB72WKxYRiXQJBNByxSIiZdH26LVcsYhIEG3Qa7liEZEg2qEbLVcsIhJEG/Sg5YpFRCDioRsR\nEQkU9CIikVPQi4hETkEvIhI5Bb2ISOQU9CIikVPQi4hETkEvIhI5Bb2ISOQU9CIikVPQi4hETkEv\nIhK5qkFvZteY2X4zuz9TlpjZ42Z2T3o7K/PYJWa2y8weMrM3N6rhk6WrTYlIq6pl9cprgS8CW8aU\nf87dr8wWmNkyYCXQCbwY+LGZvcTdB+vQ1inT1aZEpJVV7dG7++3Ab2us72zgW+7+vLvvAXYBr51G\n++pCV5sSkVY2nTH6D5vZfenQzrFp2ULgV5nnPJaWjWNma8ys38z6Dxw4MI1mVKerTYlIK5tq0G8E\n/hx4JTAAfGayFbj7JnfvcveuefPmTbEZtSlfbWr9eg3biEjrmdIVptx9X3nfzL4CfC+9+ziwKPPU\nk9Ky3OlqUyLSqqbUozezBZm77wDKM3JuBlaa2WwzWwIsBe6aXhNFRGQ6qvbozew6oAQcb2aPAT1A\nycxeCTjwMHA+gLs/YGY3ADuBQ8CFec+4ERFpdebuebeBrq4u7+/vz7sZIiKFYmY73L2r2vP0zVgR\nkcgp6EVEIqegFxGJnIJeRCRyCnoRkcgp6EVEIqegFxGJnIJeRCRyCnoRkcgp6EVEIqegFxGJnIJe\nRCRyCnoRkcgp6EVEIqegFxGJnIJeRCRyCnoRkcgp6EVEIqegFxGJnIJeRCRyCnoRkcgp6EVEIqeg\nFxGJnIJeRCRyLRf027fD5ZeHrYhIK5iVdwNm0vbtsGIFHDwIRxwB27ZBd3ferRIRaayW6tH39YWQ\nHxwM276+vFskItJ4LRX0pVLoybe3h22plHeLREQar6WGbrq7w3BNX18IeQ3biEgraKmghxDuCngR\naSUtNXQjItKKFPQiIpFT0IuIRE5BLyISuapBb2bXmNl+M7s/U3acmd1iZr9It8em5WZmXzCzXWZ2\nn5m9upGNFxGR6mrp0V8LnDGm7GJgm7svBbal9wHOBJamtzXAxvo0U3I1MADLl8O994bt3r0jZdn9\nSo/XWlavelR3PHUXoY31qnvv3sb+H3b3qjegA7g/c/8hYEG6vwB4KN3/MvDeSs873O3UU091maZf\n/9r99a93v+eesB0YGCnL7ld6vFrZ2rXubW3unZ1hu3btSFl2v9LjtZbVqx7VHU/dRWhjvepeu3ZK\n/+2Bfq8lw2t60vig/11m38r3ge8Br8s8tg3oqlZ/Swd9M4cx6KabbjN5mzNnUvExY0Gf3n8y3dYc\n9IShnX6gf/HixVONyeZXLcibOIw/3T2yf+8JmbL2dvf29pH9trbxj9daVq96VHc8dRehjfWqu73d\ne0q4H3mk+7nnhkyYhEYHvYZuqimH+urV+QT0NH6p//3loYwkLQPfcGqmjPH7lR6vtaxe9ajueOou\nQhvrVTcJUx6+qTXopzq98mZgdbq/GvhupnxVOvvmNOApdx+Y4s8onuwHK4sWwe23w+bNMDQEGzeG\n29AQPPDA+Ne2t0Nb29TLgLsXhrv/9GbCEp2DgyP7Q0P8z6Ixj09Q9vTsTN2Dg9DZyQm/Bzo7Q9mS\nJeGW2a/0eK1l9apHdcdTdxHaWK+6AbjggoZ+IGvhTeEwTzC7DigBxwP7gB7gO8ANwGLgEeAcd/+t\nmRnwRcIsnWeB97t7f7VGdHV1eX9/1ac1v3XrQphPpK0tBH1Ze3sI0tmz4fnngRDWr3kckjcYya0+\nqmz4+dnXlredndw4+AB/296JvfsBfHP4BbLVe4b3b5y9Z9zj2TIRyVfP8h6SUlLz881sh7t3VX1i\nLd3+Rt8KP3Qze/bwn2GVbj1vsOH9uxaOKevs9J7rzndfssR9yRL/9stCGQnjynre3zFc9sBrOkbK\n1q3zngte5iTU7ebuw9vsfqWyao/PdD2qO566i9DGetU9FTR46Eay3vOesJ01K2zToZSd88Ld3uU+\n/Gfao38CdHaGsnXr4CUvofehL8Pu3bB7N+9aCdyffjdtTFnvyQ8Pl3W+5eHhMjthA73zfz6qST3L\ne/Ce8BeB9/io/VrKRCQe7UmS5N0GNm3alKxZsybvZkze3Llw6aXhCxAAQ0MkJSjt8TCufd4BkmXr\n6D3qbpKbnoSLLqLziV6SG/bTe1svyb/dBe95D7239Q7fgFHbsWUApY5SeH0pofe2XrzHR+333tZL\n33l9w68r/ymYfc3hyso/I7utVjaV1zSyHtUdT91FaGO96p6s3t7egSRJNlV7XtUx+pnQ9GP0AwOw\nciVcf30YgFm5Er7wBTj/fHjxi+GHP4Rnn4X2duzSQXzPapK5d47rZdfKexzrtRDgfcmokK/ltUlf\nMhzalfarlYlIMWiMfrqy898PN9d92TL3tjbvOT1MTZxozLvn1h53b+yYYflniEhroMYx+lmNf88p\nqPXrw/TIBQtGyrIzaspTJHfuJClB7+vCbaxy77zW3nLP8p5R27H7h6MeuYhUog9jx5ozB8wqT5Os\nNId97lyShecCE3+oWSm0Jwry4SGVTGiX96vVIyJSicboYfQY/Mc/Dlu2hBk0hw5VnOteLiudB7d1\njK+uPBdW494i0ki1jtGrRw+jh2m2bAllhw6RlBj+MlJy7XnDUyR3vnoRdHZyWwf4/nX4ve8AQk8+\n+4UHhbyINIPW7tHPnQvPPTeueOfxsOzJkRk0PP00dspNw8Mx5Rkx5W22TERkpqhHX4vdu+F974Mj\njwz30y86dX6YMI0S4NprYevW4ZckfQkQgr28tV5j+cnLZ6jRIiKT09qzbhYsgKOPDr36OXPguedI\n3j0POIB9MqxJUw70sftlk12bQkRkprV2jx5g3z644AJKn+7EEujtPDDq4WpLCSjkRaTZtXaPHoaH\nZW6rMN5ebf67pjaKSBG0do++yoV5J5rrXt5Xb15EiqClZ93849rX8Jn543/u8pOXDy8KJiLSrGqd\nddOaQzfptMorgSvTIkvAr5gDf/hDjg0TEam/1hy62b2b+095H78nTKssb9mzJ8dGiYg0RmsG/YIF\nHL/kaObwHJ8otTOH5/jo3i6YPz/vlomI1F1rBj0w3/Zx4J0X8K+lQQ688wKu3Lco7yaJiDREa47R\nA2zdynyA3g3Mv/HqvFsjItIwLdmjT/qS4aULYGQZg/LyBiIiMWm9Hv3AAEnPrSTXD8D8+VqMTESi\n13o9+vXr4Y474LLL8m6JiMiMaJ2gnzt35MpRQ0Nha0bPHe15t0xEpKFaJ+gzSxInJcLSxOeeS/KN\nx/JumYhIQ7VO0GeWJO4tEZYmPvpozZ0Xkei1TtDD8JLEQNhOsJiZiEhMWibok74EO+Um7IQNANgJ\nG7BTbtKUShGJXstMr0xeej5Jz61w/fXYlxdoSqWItIyW6dFrWqWItKr4e/TpksTDNm6kpwRcMVdL\nEotIS4i/R19hWmWy8FwtSSwiLSP+oNe0ShFpcfEHPWhapYi0tOiDvpZpldu3w+WXh62ISGym9WGs\nmT0MPAMMAofcvcvMjgOuBzqAh4Fz3P3J6TVzimpYqXL7dlixAg4ehCOOgG3boLs7l9aKiDREPXr0\nb3D3V2auRH4xsM3dlwLb0vv5qGFKZV9fCPnBwbDt65ux1omIzIhGDN2cDWxO9zcDb2/Azzi8zEqV\nyesPv1JlqRR68u3tYVsqzXhrRUQaarpB78CPzGyHma1Jy05094F0fy9w4jR/xuRlplT2ljjsSpXd\n3WG4Zv16DduISJym+4Wp17n742Z2AnCLmf08+6C7u5lVXGsgfWNYA7B48eJpNmOMzJRKoOqUyu5u\nBbyIxGtaPXp3fzzd7gduAl4L7DOzBQDpdv8Er93k7l3u3jVv3rzpNGOcpC/B5n8J++QQAPbJIezE\njVrATERakrlPbXEvMzsKaHP3Z9L9W4DLgBXAE+5+hZldDBzn7h87XF1dXV3e398/pXZUbaeuCSsi\nkTKzHZmJMBOaTo/+ROAOM7sXuAv4T3f/AXAF8CYz+wVwenp/Zg0MkJx3sr4YJSLCNMbo3X03cEqF\n8icIvfr8rF9P75JHSS67jJ5zenJtiohI3uJavTK7UmVCmF65EZjzKa1UKSItK6qgT25YR+9PPzt8\n35Kw7Tn1QpJcWiQikr+o1rpJ/uYz+N4L8MvCYfllbfi+tSRvvbLmOrTujYjEJp6gL38A+8gjU16p\nsrzuzaWXhq3CXkRiEM/QTfkD2CPfAldfTU/fPCglk6qi0ro3+iKViBRd8Xv0mXVtgOF1bZIzPzXp\nqrTujYjEqPA9+np+AFte96avL4S8evMiEoMpfzO2nqb9zdi1a2HTJuyTQ+GD2PPPhw0b6tdAEZEm\nNBPfjG0eDbhUoGbfiEgsCj90A8DWrQBT+gC2El11SkRiEkePPpXUIeRBV50SkbgUO+gHBmD58rov\nXqbZNyISk2IHfQ3XhJ2KsVedAo3Xi0hxFXPWTXbxsqw5c+q+eJnG60WkWcU96yZzTdikxPA1Ydmz\np+4/aux4/ZYt6t2LSLEUc9ZN5pqwvSVIbj/8NWGnozxef/BgGLP/+tfh0CH17kWkOIrZo4eGzJ2v\nJDte/4EPhJAfHITnn4ckUc9eRJpfIYM+6UuwU27CTgjffrUTNmCn3NSwi393d8Mll8CqVaEn39YG\nQ0Pw4x9rlUsRaX7FDPpSgvf48EW/y/v1mkc/kXLv/vTTR8Je8+xFpNkVMujz1N0dhmxmz9Y8exEp\nhmJ+GJvRs3zmL/6tVS5FpEiKOY++yWzfrtAXkZlX6zz6wvfo86YvVIlIs9MY/TRpATQRaXYK+mnK\nLoDW3g6PPqrpliLSXBT001T+YPZDHwqXrv3KVzS3XkSai4K+Drq7YfHikW/Nak0cEWkm+jC2TiZa\nE6e9PSydsGqVPqQVkXxoemUdladZPvpoGMIZHAzlZmEF5auugiee0DRMEamPWqdXKugboDzl8rnn\noPzP29YWevdDQ+rli0h9KOhztn17GKcvD+GYhZAfGgqPV+rlg754JSK1U9A3ifJwzoteBB/5yOF7\n+Wbjx/Vh5PUa9hGRLAV9EzpcL98sPKd8OszgBS8I2z/+MTynrS0splb+K0DhL9LaFPRNbGwvvzxT\nxyzsZ8MeRu7DyF8Bg4Ojw19LL4i0Hq1108S6u0dC+RWvGBmXh9E9/nL4Z3v0bW0jIQ8ja+Jv2TJ+\niAdG163xf5HW1LAevZmdAXweaAe+6u5XTPTcVuvRV5NdDRNGB3j5r4Dnnx8J/1mzxr8hlMuybxga\n/xeJS65DN2bWDvwf8CbgMeBu4L3uvrPS8xX0k5Md+nniifHz9mH0sM90x/+htr8Wan3NdOvRm5FI\nkPfQzWuBXe6+O23Mt4CzgYpBL5OTHfqBEPybN1fu5Wd79OXxf/cQ8DAS/kND4fUf/vDo8f9a/1qo\n9HitZZOpJ883I9XdvG/+Ra+70R2YRgX9QuBXmfuPAX/VoJ/V8rJXvDrcL95Uxv8rvSFky8rPnejx\neteT15uR6m7eN/+i133oUOOvZZHbh7FmtgZYA7B48eK8mhGNsb38bHl2f9Wqxoz/z9R/jjzfjFR3\nc775x1B3+VoWRQv6x4FFmfsnpWXD3H0TsAnCGH2D2iFjjH1DGPuLVZ4F1Kx/puf1ZqS6m/fNv+h1\nl3v05d/3RmjUh7GzCB/GriAE/N3A+9z9gUrP14exMhljP4wu2nis6o6njXmP0ef+hSkzOwu4ijC9\n8hp3/5eJnqugFxGZvLxn3eDu3we+36j6RUSkNm15N0BERBpLQS8iEjkFvYhI5BT0IiKRU9CLiESu\nKdajN7MDwCNTfPnxwG/q2Jy8xXQ8OpbmpGNpTlM5lpPdfV61JzVF0E+HmfXXMo+0KGI6Hh1Lc9Kx\nNKdGHouGbkREIqegFxGJXAxBvynvBtRZTMejY2lOOpbm1LBjKfwYvYiIHF4MPXoRETmMQge9mZ1h\nZg+Z2S4zuzjv9kyGmS0ys1vNbKeZPWBmF6Xlx5nZLWb2i3R7bN5trZWZtZvZ/5rZ99L7S8zszvT8\nXG9mR+TdxlqY2TFm9m0z+7mZPWhm3UU9L2b2D+nv1/1mdp2ZzSnSeTGza8xsv5ndnymreC4s+EJ6\nXPeZ2avza/l4ExzLp9Pfs/vM7CYzOybz2CXpsTxkZm+ezs8ubNCnFyC/GjgTWAa818yW5duqSTkE\nfNTdlwGnARem7b8Y2ObuS4Ft6f2iuAh4MHP/U8Dn3P0vgCeBD+bSqsn7PPADd38ZcArhmAp3Xsxs\nIfD3QJe7v5ywZPhKinVergXOGFM20bk4E1ia3tYAG2eojbW6lvHHcgvwcnf/S8I1PC4BSLNgJdCZ\nvmZDmnlTUtigJ3MBcnc/CJQvQF4I7j7g7j9N958hhMlCwjFsTp+2GXh7Pi2cHDM7CXgL8NX0vgFv\nBL6dPqUQx2Jmfwq8HvgagLsfdPffUdDzQliKfG56MaAjgQEKdF7c/Xbgt2OKJzoXZwNbPPgJcIyZ\nLZiZllZX6Vjc/Ufufii9+xPC1fggHMu33P15d98D7CJk3pQUOegrXYB8YU5tmRYz6wBeBdwJnOju\nA+lDe4ETc2rWZF0FfAxIr5jJi4DfZX6Ji3J+lgAHgK+nw1BfNbOjKOB5cffHgSuBRwkB/xSwg2Ke\nl6yJzkXRM+EDwH+l+3U9liIHfRTM7IXAjcBH3P3p7GMepkQ1/bQoM3srsN/dd+TdljqYBbwa2Oju\nrwJ+z5hhmgKdl2MJPcMlwIuBoxg/dFBoRTkX1ZjZJwjDud9sRP1FDvqqFyBvdmb2AkLIf9Pdt6bF\n+8p/bqbb/Xm1bxL+GnibmT1MGEJ7I2Gc+5h0yACKc34eAx5z9zvT+98mBH8Rz8vpwB53P+DufwS2\nEs5VEc9L1kTnopCZYGbnAW8FzvWR+e51PZYiB/3dwNJ0BsERhA8ubs65TTVLx7C/Bjzo7p/NPHQz\nsDrdXw18d6bbNlnufom7n+TuHYTz8N/ufi5wK/Cu9GlFOZa9wK/M7KVp0QpgJwU8L4Qhm9PM7Mj0\n9618LIU7L2NMdC5uBlals29OA57KDPE0JTM7gzDk+TZ3fzbz0M3ASjObbWZLCB8w3zXlH+Tuhb0B\nZxE+qf4l8Im82zPJtr+O8CfnfcA96e0swtj2NuAXwI+B4/Ju6ySPqwR8L93/s/SXcxfwH8DsvNtX\n4zG8EuhPz813gGOLel6AXuDnwP3AN4DZRTovwHWEzxf+SPhr64MTnQvACDPxfgn8jDDbKPdjqHIs\nuwhj8eUM+FLm+Z9Ij+Uh4Mzp/Gx9M1ZEJHJFHroREZEaKOhFRCKnoBcRiZyCXkQkcgp6EZHIKehF\nRCKnoBcRiZyCXkQkcv8PeNfq14MufEoAAAAASUVORK5CYII=\n",
370 | "text/plain": [
371 | ""
372 | ]
373 | },
374 | "metadata": {},
375 | "output_type": "display_data"
376 | }
377 | ],
378 | "source": [
379 | "#Iterations to do trainning\n",
380 | "for epoch in range(120):\n",
381 | " \n",
382 | " start=0\n",
383 | " end=100\n",
384 | " for i in range(14):\n",
385 | " \n",
386 | " X=X_train[start:end]\n",
387 | " Y=y_train[start:end]\n",
388 | " start=end\n",
389 | " end=start+100\n",
390 | " sess.run(train_step,feed_dict={rnn._inputs:X, y:Y})\n",
391 | "\n",
392 | " Loss=str(sess.run(cross_entropy,feed_dict={rnn._inputs:X, y:Y}))\n",
393 | " Train_accuracy=str(sess.run(accuracy,feed_dict={rnn._inputs:X_train, y:y_train}))\n",
394 | " Test_accuracy=str(sess.run(accuracy,feed_dict={rnn._inputs:X_test, y:y_test}))\n",
395 | " \n",
396 | " pl.plot([epoch],Loss,'b.',)\n",
397 | " pl.plot([epoch],Train_accuracy,'r*',)\n",
398 | " pl.plot([epoch],Test_accuracy,'g+')\n",
399 | " display.clear_output(wait=True)\n",
400 | " display.display(pl.gcf()) \n",
401 | " \n",
402 | " sys.stdout.flush()\n",
403 | " print(\"\\rIteration: %s Loss: %s Train Accuracy: %s Test Accuracy: %s\"%(epoch,Loss,Train_accuracy,Test_accuracy)),\n",
404 | " sys.stdout.flush()\n"
405 | ]
406 | },
407 | {
408 | "cell_type": "code",
409 | "execution_count": null,
410 | "metadata": {
411 | "collapsed": true
412 | },
413 | "outputs": [],
414 | "source": []
415 | },
416 | {
417 | "cell_type": "code",
418 | "execution_count": null,
419 | "metadata": {
420 | "collapsed": true
421 | },
422 | "outputs": [],
423 | "source": []
424 | }
425 | ],
426 | "metadata": {
427 | "kernelspec": {
428 | "display_name": "Python 3",
429 | "language": "python",
430 | "name": "python3"
431 | },
432 | "language_info": {
433 | "codemirror_mode": {
434 | "name": "ipython",
435 | "version": 3
436 | },
437 | "file_extension": ".py",
438 | "mimetype": "text/x-python",
439 | "name": "python",
440 | "nbconvert_exporter": "python",
441 | "pygments_lexer": "ipython3",
442 | "version": "3.5.2"
443 | }
444 | },
445 | "nbformat": 4,
446 | "nbformat_minor": 1
447 | }
448 |
--------------------------------------------------------------------------------
/GRU/GRU.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {
6 | "deletable": true,
7 | "editable": true
8 | },
9 | "source": [
10 | "# GRU ON 8*8 MNIST DATASET TO PREDICT TEN CLASS"
11 | ]
12 | },
13 | {
14 | "cell_type": "markdown",
15 | "metadata": {
16 | "deletable": true,
17 | "editable": true
18 | },
19 | "source": [
20 | "\n",
21 | "### Its a dynamic sequence and batch GRU rnn. This is created with tensorflow scan and map higher ops!!!! \n",
22 | "### This is a base rnn which can be used to create LSTM, Neural Stack Machine, Neural Turing Machine and RNN-EM and so on!"
23 | ]
24 | },
25 | {
26 | "cell_type": "markdown",
27 | "metadata": {
28 | "deletable": true,
29 | "editable": true
30 | },
31 | "source": [
32 | "# Importing Libraries"
33 | ]
34 | },
35 | {
36 | "cell_type": "code",
37 | "execution_count": 1,
38 | "metadata": {
39 | "collapsed": false,
40 | "deletable": true,
41 | "editable": true
42 | },
43 | "outputs": [
44 | {
45 | "name": "stderr",
46 | "output_type": "stream",
47 | "text": [
48 | "/home/jli183/tensorflow/local/lib/python2.7/site-packages/sklearn/cross_validation.py:44: DeprecationWarning: This module was deprecated in version 0.18 in favor of the model_selection module into which all the refactored classes and functions are moved. Also note that the interface of the new CV iterators are different from that of this module. This module will be removed in 0.20.\n",
49 | " \"This module will be removed in 0.20.\", DeprecationWarning)\n"
50 | ]
51 | }
52 | ],
53 | "source": [
54 | "import numpy as np\n",
55 | "import tensorflow as tf\n",
56 | "from sklearn import datasets\n",
57 | "from sklearn.cross_validation import train_test_split\n",
58 | "import pylab as pl\n",
59 | "from IPython import display\n",
60 | "import sys\n",
61 | "%matplotlib inline"
62 | ]
63 | },
64 | {
65 | "cell_type": "markdown",
66 | "metadata": {
67 | "deletable": true,
68 | "editable": true
69 | },
70 | "source": [
71 | "# GRU class and functions"
72 | ]
73 | },
74 | {
75 | "cell_type": "code",
76 | "execution_count": 2,
77 | "metadata": {
78 | "collapsed": false,
79 | "deletable": true,
80 | "editable": true
81 | },
82 | "outputs": [],
83 | "source": [
84 | "class RNN_cell(object):\n",
85 | "\n",
86 | " \"\"\"\n",
87 | " RNN cell object which takes 3 arguments for initialization.\n",
88 | " input_size = Input Vector size\n",
89 | " hidden_layer_size = Hidden layer size\n",
90 | " target_size = Output vector size\n",
91 | "\n",
92 | " \"\"\"\n",
93 | "\n",
94 | " def __init__(self, input_size, hidden_layer_size, target_size):\n",
95 | "\n",
96 | " #Initialization of given values\n",
97 | " self.input_size = input_size\n",
98 | " self.hidden_layer_size = hidden_layer_size\n",
99 | " self.target_size = target_size\n",
100 | " \n",
101 | " # Weights for input and hidden tensor\n",
102 | " self.Wx = tf.Variable(tf.zeros([self.input_size,self.hidden_layer_size]))\n",
103 | " self.Wr = tf.Variable(tf.zeros([self.input_size,self.hidden_layer_size]))\n",
104 | " self.Wz = tf.Variable(tf.zeros([self.input_size,self.hidden_layer_size]))\n",
105 | " \n",
106 | " self.br = tf.Variable(tf.truncated_normal([self.hidden_layer_size],mean=1))\n",
107 | " self.bz = tf.Variable(tf.truncated_normal([self.hidden_layer_size],mean=1))\n",
108 | " \n",
109 | " self.Wh = tf.Variable(tf.zeros([self.hidden_layer_size,self.hidden_layer_size]))\n",
110 | "\n",
111 | " \n",
112 | " #Weights for output layer\n",
113 | " self.Wo = tf.Variable(tf.truncated_normal([self.hidden_layer_size,self.target_size],mean=1,stddev=.01))\n",
114 | " self.bo = tf.Variable(tf.truncated_normal([self.target_size],mean=1,stddev=.01))\n",
115 | " # Placeholder for input vector with shape[batch, seq, embeddings]\n",
116 | " self._inputs = tf.placeholder(tf.float32,\n",
117 | " shape=[None, None, self.input_size],\n",
118 | " name='inputs')\n",
119 | "\n",
120 | " # Processing inputs to work with scan function\n",
121 | " self.processed_input = process_batch_input_for_RNN(self._inputs)\n",
122 | "\n",
123 | " '''\n",
124 | " Initial hidden state's shape is [1,self.hidden_layer_size]\n",
125 | " In First time stamp, we are doing dot product with weights to\n",
126 | " get the shape of [batch_size, self.hidden_layer_size].\n",
127 | " For this dot product tensorflow use broadcasting. But during\n",
128 | " Back propagation a low level error occurs.\n",
129 | " So to solve the problem it was needed to initialize initial\n",
130 | " hiddden state of size [batch_size, self.hidden_layer_size].\n",
131 | " So here is a little hack !!!! Getting the same shaped\n",
132 | " initial hidden state of zeros.\n",
133 | " '''\n",
134 | "\n",
135 | " self.initial_hidden = self._inputs[:, 0, :]\n",
136 | " self.initial_hidden = tf.matmul(\n",
137 | " self.initial_hidden, tf.zeros([input_size, hidden_layer_size]))\n",
138 | " \n",
139 | " \n",
140 | " #Function for GRU cell\n",
141 | " def Gru(self, previous_hidden_state, x):\n",
142 | " \"\"\"\n",
143 | " GRU Equations\n",
144 | " \"\"\"\n",
145 | " z= tf.sigmoid(tf.matmul(x,self.Wz)+ self.bz)\n",
146 | " r= tf.sigmoid(tf.matmul(x,self.Wr)+ self.br)\n",
147 | " \n",
148 | " h_= tf.tanh(tf.matmul(x,self.Wx) + tf.matmul(previous_hidden_state,self.Wh)*r)\n",
149 | " \n",
150 | " \n",
151 | " current_hidden_state = tf.multiply((1-z),h_) + tf.multiply(previous_hidden_state,z)\n",
152 | " \n",
153 | " return current_hidden_state \n",
154 | " \n",
155 | " # Function for getting all hidden state.\n",
156 | " def get_states(self):\n",
157 | " \"\"\"\n",
158 | " Iterates through time/ sequence to get all hidden state\n",
159 | " \"\"\"\n",
160 | "\n",
161 | " # Getting all hidden state throuh time\n",
162 | " all_hidden_states = tf.scan(self.Gru,\n",
163 | " self.processed_input,\n",
164 | " initializer=self.initial_hidden,\n",
165 | " name='states')\n",
166 | "\n",
167 | " return all_hidden_states\n",
168 | "\n",
169 | " # Function to get output from a hidden layer\n",
170 | " def get_output(self, hidden_state):\n",
171 | " \"\"\"\n",
172 | " This function takes hidden state and returns output\n",
173 | " \"\"\"\n",
174 | " output = tf.nn.relu(tf.matmul(hidden_state, self.Wo) + self.bo)\n",
175 | "\n",
176 | " return output\n",
177 | "\n",
178 | " # Function for getting all output layers\n",
179 | " def get_outputs(self):\n",
180 | " \"\"\"\n",
181 | " Iterating through hidden states to get outputs for all timestamp\n",
182 | " \"\"\"\n",
183 | " all_hidden_states = self.get_states()\n",
184 | "\n",
185 | " all_outputs = tf.map_fn(self.get_output, all_hidden_states)\n",
186 | "\n",
187 | " return all_outputs\n",
188 | "\n",
189 | "\n",
190 | "# Function to convert batch input data to use scan ops of tensorflow.\n",
191 | "def process_batch_input_for_RNN(batch_input):\n",
192 | " \"\"\"\n",
193 | " Process tensor of size [5,3,2] to [3,5,2]\n",
194 | " \"\"\"\n",
195 | " batch_input_ = tf.transpose(batch_input, perm=[2, 0, 1])\n",
196 | " X = tf.transpose(batch_input_)\n",
197 | "\n",
198 | " return X\n"
199 | ]
200 | },
201 | {
202 | "cell_type": "markdown",
203 | "metadata": {
204 | "deletable": true,
205 | "editable": true
206 | },
207 | "source": [
208 | "# Placeholder and initializers\n"
209 | ]
210 | },
211 | {
212 | "cell_type": "code",
213 | "execution_count": 3,
214 | "metadata": {
215 | "collapsed": true,
216 | "deletable": true,
217 | "editable": true
218 | },
219 | "outputs": [],
220 | "source": [
221 | "hidden_layer_size = 30\n",
222 | "input_size = 8\n",
223 | "target_size = 10"
224 | ]
225 | },
226 | {
227 | "cell_type": "code",
228 | "execution_count": 4,
229 | "metadata": {
230 | "collapsed": false,
231 | "deletable": true,
232 | "editable": true
233 | },
234 | "outputs": [],
235 | "source": [
236 | "y = tf.placeholder(tf.float32, shape=[None, target_size],name='inputs')"
237 | ]
238 | },
239 | {
240 | "cell_type": "markdown",
241 | "metadata": {
242 | "deletable": true,
243 | "editable": true
244 | },
245 | "source": [
246 | "# Models"
247 | ]
248 | },
249 | {
250 | "cell_type": "code",
251 | "execution_count": 5,
252 | "metadata": {
253 | "collapsed": true,
254 | "deletable": true,
255 | "editable": true
256 | },
257 | "outputs": [],
258 | "source": [
259 | "#Initializing rnn object\n",
260 | "rnn=RNN_cell( input_size, hidden_layer_size, target_size)"
261 | ]
262 | },
263 | {
264 | "cell_type": "code",
265 | "execution_count": 6,
266 | "metadata": {
267 | "collapsed": false,
268 | "deletable": true,
269 | "editable": true
270 | },
271 | "outputs": [],
272 | "source": [
273 | "#Getting all outputs from rnn\n",
274 | "outputs = rnn.get_outputs()"
275 | ]
276 | },
277 | {
278 | "cell_type": "code",
279 | "execution_count": 7,
280 | "metadata": {
281 | "collapsed": false,
282 | "deletable": true,
283 | "editable": true
284 | },
285 | "outputs": [],
286 | "source": [
287 | "#Getting final output through indexing after reversing\n",
288 | "last_output = outputs[-1]"
289 | ]
290 | },
291 | {
292 | "cell_type": "code",
293 | "execution_count": 8,
294 | "metadata": {
295 | "collapsed": true,
296 | "deletable": true,
297 | "editable": true
298 | },
299 | "outputs": [],
300 | "source": [
301 | "#As rnn model output the final layer through Relu activation softmax is used for final output.\n",
302 | "output=tf.nn.softmax(last_output)"
303 | ]
304 | },
305 | {
306 | "cell_type": "code",
307 | "execution_count": 9,
308 | "metadata": {
309 | "collapsed": true,
310 | "deletable": true,
311 | "editable": true
312 | },
313 | "outputs": [],
314 | "source": [
315 | "#Computing the Cross Entropy loss \n",
316 | "cross_entropy = -tf.reduce_sum(y * tf.log(output))"
317 | ]
318 | },
319 | {
320 | "cell_type": "code",
321 | "execution_count": 10,
322 | "metadata": {
323 | "collapsed": true,
324 | "deletable": true,
325 | "editable": true
326 | },
327 | "outputs": [],
328 | "source": [
329 | "# Trainning with Adadelta Optimizer\n",
330 | "train_step = tf.train.AdamOptimizer().minimize(cross_entropy)"
331 | ]
332 | },
333 | {
334 | "cell_type": "code",
335 | "execution_count": 11,
336 | "metadata": {
337 | "collapsed": true,
338 | "deletable": true,
339 | "editable": true
340 | },
341 | "outputs": [],
342 | "source": [
343 | "#Calculatio of correct prediction and accuracy\n",
344 | "correct_prediction = tf.equal(tf.argmax(y,1), tf.argmax(output,1))\n",
345 | "accuracy = (tf.reduce_mean(tf.cast(correct_prediction, tf.float32)))*100"
346 | ]
347 | },
348 | {
349 | "cell_type": "markdown",
350 | "metadata": {
351 | "deletable": true,
352 | "editable": true
353 | },
354 | "source": [
355 | "# Dataset Preparation"
356 | ]
357 | },
358 | {
359 | "cell_type": "code",
360 | "execution_count": 12,
361 | "metadata": {
362 | "collapsed": true,
363 | "deletable": true,
364 | "editable": true
365 | },
366 | "outputs": [],
367 | "source": [
368 | "#Function to get on hot\n",
369 | "def get_on_hot(number):\n",
370 | " on_hot=[0]*10\n",
371 | " on_hot[number]=1\n",
372 | " return on_hot\n",
373 | " "
374 | ]
375 | },
376 | {
377 | "cell_type": "code",
378 | "execution_count": 13,
379 | "metadata": {
380 | "collapsed": false,
381 | "deletable": true,
382 | "editable": true
383 | },
384 | "outputs": [],
385 | "source": [
386 | "#Using Sklearn MNIST dataset.\n",
387 | "digits = datasets.load_digits()\n",
388 | "X=digits.images\n",
389 | "Y_=digits.target\n",
390 | "Y=map(get_on_hot,Y_)"
391 | ]
392 | },
393 | {
394 | "cell_type": "code",
395 | "execution_count": 14,
396 | "metadata": {
397 | "collapsed": true,
398 | "deletable": true,
399 | "editable": true
400 | },
401 | "outputs": [],
402 | "source": [
403 | "#Getting Train and test Dataset\n",
404 | "X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size=0.22, random_state=42)\n",
405 | "\n",
406 | "#Cuttting for simple iteration\n",
407 | "X_train=X_train[:1400]\n",
408 | "y_train=y_train[:1400]"
409 | ]
410 | },
411 | {
412 | "cell_type": "code",
413 | "execution_count": 15,
414 | "metadata": {
415 | "collapsed": false,
416 | "deletable": true,
417 | "editable": true
418 | },
419 | "outputs": [
420 | {
421 | "name": "stdout",
422 | "output_type": "stream",
423 | "text": [
424 | "WARNING:tensorflow:From :2: initialize_all_variables (from tensorflow.python.ops.variables) is deprecated and will be removed after 2017-03-02.\n",
425 | "Instructions for updating:\n",
426 | "Use `tf.global_variables_initializer` instead.\n"
427 | ]
428 | }
429 | ],
430 | "source": [
431 | "sess=tf.InteractiveSession()\n",
432 | "sess.run(tf.initialize_all_variables())"
433 | ]
434 | },
435 | {
436 | "cell_type": "code",
437 | "execution_count": 16,
438 | "metadata": {
439 | "collapsed": false,
440 | "deletable": true,
441 | "editable": true,
442 | "scrolled": false
443 | },
444 | "outputs": [
445 | {
446 | "data": {
447 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAD8CAYAAAB5Pm/hAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAHFpJREFUeJzt3X+UVeV97/H3lwEBE0gUEMcIgvdSWrAFw9Q4q4nMraYx\nBqM0Xku1laykoga66kqyeqW5Zg4hsSZtkt6uKgSLDcmyMaYBY2+S2xupY2rF4JBIAkQqorGSAxiS\nXE0MIPC9fzz7zOyZOWfO7197f15rzTrP2Wefs5/Zc+ZznvPsZz/b3B0REUmuMc2ugIiI1JeCXkQk\n4RT0IiIJp6AXEUk4Bb2ISMIp6EVEEk5BLyKScAp6EZGEU9CLiCTc2GZXAGDq1Kk+a9asZldDRKSt\n7Nix4yfuPq3Yei0R9LNmzaK/v7/Z1RARaStm9qNS1lPXjYhIwinoRUQSTkEvIpJwCnoRkYRT0IuI\nJJyCXkQk4do+6Ldtg7/8y3ArIiIjtcQ4+kpt2waXXgrHj8Npp8HWrdDd3exaiYi0lrZu0ff1hZA/\neTLc9vU1u0YiIq2nrYO+pye05Ds6wm1PT7NrJCLSetq666a7O3TX9PWFkFe3jYjISG0d9DAY7rlu\nG4W9iMhQbR/0OiArIjK6tu6jBx2QFREppu2DXgdkRURG1/ZdNzogKyIyurYPegjhroAXEcmv7btu\nRERkdAp6EZGEU9CLiCScgl5EJOEU9CIiCaegFxFJOAW9iEjCJSrodbUpEZGREnHCFGhyMxGRQhLT\notfkZiIi+SUm6DW5mYhIfonputHkZiIi+SUm6EGTm4mI5FO068bMZpjZI2a2x8x2m9mfRcvPNLNv\nmdkz0e0ZseesNrN9ZrbXzN5Rz19ARERGV0of/QngQ+4+D7gYWGlm84DbgK3uPgfYGt0nemwZMB+4\nHLjbzDrqUXkRESmuaNC7e9bdvxuVXwF+CLwJuArYFK22Cbg6Kl8F3O/ux9z9OWAfcFGtKy4iIqUp\na9SNmc0CLgS+A0x392z00EFgelR+E/Cfsae9GC0b/lorzKzfzPpfeumlMqstIiKlKjnozez1wFeB\nW9395fhj7u6Al7Nhd9/g7l3u3jVt2rRynioiImUoKejNbBwh5O9z983R4kNm1hk93gkcjpYfAGbE\nnn5utExERJqglFE3BmwEfujun4k99BCwPCovB74WW77MzMab2WxgDrC9dlUWEZFylDKO/neAPwZ+\nYGZPRcv+ArgTeMDM3g/8CLgWwN13m9kDwB7CiJ2V7n6y5jUXEZGSFA16d38MsAIPX1rgOZ8APlFF\nvUREpEYSM9eNiIjkp6AXEUk4Bb2ISMIp6EVEEk5BLyKScIkLel03VkRkqETNR6/rxoqIjJSoFr2u\nGysiMlKigl7XjRURGSlRXTe6bqyIyEiJCnrQdWNFRIZLVNeNiIiMpKAXEUk4Bb2ISMIp6EVEEk5B\nLyKScAp6EZGEU9CLiCScgl5EJOEU9CIiCaegFxFJuMQGvealFxEJEjfXDWheehGRuES26DUvvYjI\noEQGvealFxEZlMiuG81LLyIyKJFBD5qXXkQkJ5FdNyIiMkhBLyKScAp6EZGEU9CLiCScgl5EJOES\nHfSaBkFEJMHDKzUNgohIULRFb2b3mtlhM9sVW5YxswNm9lT0c0XssdVmts/M9prZO+pV8WI0DYKI\nSFBK183ngcvzLP+suy+Mfr4BYGbzgGXA/Og5d5tZR60qWw5NgyAiEhTtunH3b5vZrBJf7yrgfnc/\nBjxnZvuAi4CG95JrGgQRkaCaPvo/NbMbgH7gQ+7+M+BNwBOxdV6MljWFpkEQEal81M064HxgIZAF\nPl3uC5jZCjPrN7P+l156qcJqiIhIMRUFvbsfcveT7n4KuIfQPQNwAJgRW/XcaFm+19jg7l3u3jVt\n2rRKqiEiIiWoKOjNrDN2dymQG5HzELDMzMab2WxgDrC9uiqKiEg1ivbRm9mXgB5gqpm9CPQCPWa2\nEHDgeeAmAHffbWYPAHuAE8BKdz9Zn6qLiEgpzN2bXQe6urq8v7+/2dUQEWkrZrbD3buKrZfoKRBy\nNBWCiKRZYqdAyNFUCCKSdolv0WsqBBFJu8QHvaZCEJG0S3zXjaZCEJG0S3zQg6ZCEJF0S3zXjYhI\n2inoRUQSTkEvIpJwCnoRkYRLTdDr7FgRSatUjLrR2bEikmapaNHr7FgRSbNUBL3OjhWRNEtF143O\njhWRNEtF0IPOjhWR9EpF142ISJop6EVEEk5BLyKScKkLep04JSJpk5qDsaATp0QknVLVoteJUyKS\nRqkKep04JSJplKquG504JSJplKqgB504JSLpk6quGxGRNFLQi4gkXCqDXmPpRSRNUtdHr7H0IpI2\nqWvRayy9iKRN6oJeY+lFJG1S13WjsfQikjapC3rQWHoRSZfUdd2IiKRN0aA3s3vN7LCZ7YotO9PM\nvmVmz0S3Z8QeW21m+8xsr5m9o14VrwUNsxSRNCilRf954PJhy24Dtrr7HGBrdB8zmwcsA+ZHz7nb\nzDpqVtsayg2zvP32cKuwbyPZLFx8ceh/27kTFi8Ot/Fl9ShrO9pOPbazeDEcPFjf/xl3L/oDzAJ2\nxe7vBTqjciewNyqvBlbH1vsXoLvY6y9atMgb7Y473Ds63CHc3nFHw6uQTD/+sfsll7g/9dTg7Vve\n4n7xxbUrL18e/nDgPn+++5gx4Ta+rB5lbUfbqcd2xoxxv+WWiv7dgH4vkq8etlRR0P88VrbcfeDv\ngD+KPbYRuKbY6zcj6B9/3H3ixBDyEyeG+6n14x/XLow7O93N6vuPpx/9JPVnwoSy/nUbFvTR/Z9F\ntyUHPbAC6Af6Z86cWWlEVeXxx0NLPpEhP1p4D29pD28hV1rWT6J/dk8Jt8estPLOsyp7XqFyI37H\nRv4+udfr7SG0Nq+/3j2bLevfvNSgr3TUzSEz6wSIbg9Hyw8AM2LrnRstG8HdN7h7l7t3TZs2rcJq\nVKe7G1avbtOhltns6H2Fq1fDd74DTzwB118/tPzYY0OXbdo0+Lq7d1derpM9U8Lt8SrGiD3ZGW5P\nVVF+8pzRH4+r53ZqVS53O4/OCuWOEsv/HiVBuc8rVG7Efmvk75N7vTU9wLFjMHkynH029VDpv85D\nwPKovBz4Wmz5MjMbb2azgTnA9uqqKMDIA5CLFsG//dvIEC8lvE+dqjigB0LXhpa/f1Yo1yOwHp0V\nyh0Oe6aG8vb5Z7Dn/EkwaRJ7Zk6ESZPYPvf1YdnYsSPKL7wBGDuWdV1UVp40iRcmh9tC68a3Wc/t\n1KRcwXbO+mUor19URnnSpIHbsp6Xp9yI/dbI3yf3OgDcfHNdD8haaP2PsoLZl4AeYCpwCOgFHgQe\nAGYCPwKudfefRut/BHgfcAK41d2/WawSXV1d3t/fX/lvUYVt21rsLNlsFpYuBTNYvx5uuimU584d\nGt5V2DMF5h0JLeSd0+G3syFQd3QWL39uEdyyA04abHjzyPJdXXD2L+A9h87gq9N/xnsOTOKuua+E\nZfvGctfCE1WVVy0B3zQbW/4cAN7r2BobuI0vq0c5jdtpVbXeb62gd3EvmZ5Myeub2Q537yq6XrGg\nb4RmBX1LzWSZzcKyZTB79mCgz59fsOV93GDfmVFgx8slhPe6WFg/OBfe83QsoKsJ4wOTsA+9AoDv\nXIot2FL3f7xi/+BSP4364GqGRn8Qe29lOVxq0Kf6zNiWmMky19c+YwZ8+9t5u1xyXSSvdsB9F4Ry\nwb4/j76GQsGvuPGvntcsA2bPZtWSqPzaa3nL9j9PFC9HIQ9gC7aE29g/a7nlUuTWL/c14v9Y5ZQL\nPd6o7dS6XM1rNEoz9lvSpHKum5zcTJa5Fn1DZrIc3jWzcOGIVeIt9F+ODSE+7wiMPwUvjw/rrF8U\nWuCMG8f6Ba+F8tixrI91b6xakuveeI1VA+H3yrBbBrpBoLLAbJcuiHq2ElupNVovvYt7h9yWW670\nebnymkfXVFX/UjTy98lXrpdUB33DZrKMh/vcueGAKYwI+SejrpVcC33eEfjkW2FtD3zgysH14mV4\nLVY+MVAqFN458X+cVv9aHf/HqvSfvZb/nKX8k9ZzO7Uul7puru843odcTrnS5w0v13O/NeP3KadP\nvmKljMGs908zTphqiNxZovFx6qP8/NOvh9t3XoeTKfzj7lWXK31e7yO97u7e+0hvSeVy1h3tNXIq\n3U4jxLcp0giUOI4+1Qdj42oy+qaEbpmc758Fv3U4HCR9eio8cEE0nraAWnZ1ZPoyZHoyZPoyAGWV\nG9L6EJGSlHowNtVdNzlVj76Jj5gZ1i1TaCjjv88IQb/kOvjmr+V/2eFdJLX6yl6Lr54i0j7UoidM\nVXz77WH0TUcHrF0bTiwdVS7c//ZvC7bcfzkWvrBglKGMJQxHjLeoRUTi1KIvQ9mjb7LZcGZqNjui\n5f5qB2z5Dbh+F0w4GZ39Rhgls2rJ8BcaORwx05cpeHBIRKQSCnrKGH2TzcK554YpBGKOjxkcJXPn\n28IomT+6prw6xPvORURqKdUnTMV1d4eQ7+srcBGSbBbOOWdIyOfmd/n4JWHIo2VCyP/T/fB3/3vw\nqb5z6WB5lBM0FPIiUg9q0UcKHpAt0IoHWH0pfGPuyNe6ZlkI8YGTlDZvhmHjzvON+RURqQcFfSTf\ndAjd3YSpCfKE/I1X5g/5uHJPPhERqQeNuokMb9H/4sQExrx2bMR6K6+Auy/K/xoaJSMijaRRN2WK\nH5B9+wVZxvzJG+DwYRg7Fk6EqQW2nxNC3r8yH/vvYcKx3HDIXItdAS8irUYt+rgC/fF7psK8n4SD\nrfksPm8xfe/tq3v1RETi1KKvRIH++Pmr8q+ugBeRdqCgB5g4EY4eBWAX85jLXsZxkt6e0Z+mkBeR\ndqCgz2ZhwYLQH//cc0zlJ3Rwko/2GGt78ndrxQ+6ioi0Op0wNWNGmIjsuTB/+0+nHuYw0/nX/3cD\nAI//6/8ABk9u0kFXEWk36T0YG+uuydl+DrxlRf7VFfAi0mp0zdhi9u+HOXOAcGUnGAz5O775IKwJ\nV27qWOv87phwgpNCXkTaUTr76CdMgGODJ0P91e/AVy4YfPgvLrgTdrwdgJP2KrPOPLfRNRQRqZl0\ntuj/4A/CrRk3Xjk05AGY8QQsv5TfHdPL5+46nf/68p/kn+hMRKQNpKtFH+uXv3EJ/H1X4VE1tsb4\n+GXbqrvylIhIC0hPiz43jHL2bI6Pgcv3ga/tGHg4N5VwfFbJfBOdiYi0m/S06GfMCIkNfLwnzBsP\nJwcetgVbWHze4iGzSm4bX+aVp0REWlDygz7WXXPLu2D9b49cpdBUBvGJzqZMGWzRq/tGRNpJ8oN+\n/35YuZLMz7fkDXkYfSqDXKirr15E2lXy++g7O2HvXjKPhEv8eWbwId+5tKQrPKmvXkTaWbJb9BMn\nkrn4KGuuHfnQ4vMWQ+9mMiW8TE9PaMkfOwZmoRtHRKRdJLtFv38/mXOuwz91+sAif+Z6ehd9qKyZ\nJ7u74W/+Bjo6wizGt95a4ALiIiItKNlBD/DII/CrXw3enzyZzJK/LvtljhwJIX/qlLpvRKS9JDro\nM595N3ZTFusdPDHKpq+raIrhXPdNR4eGWopIe0lmH300pDIDA33wlgG/c8LQ1n0ZNNRSRNpVVUFv\nZs8DrxDOPDrh7l1mdibwZWAW8Dxwrbv/rLpqlmn/fvjwh+HBB8lc9CqZ7acDrw7MOV8pDbUUkXZU\ni66b/+buC2NzIt8GbHX3OcDW6H5jdXbC5Mlw9ChreoCjR+n9RRecfXbVLx0fann0KHzhC1W/pIhI\nXdWjj/4qYFNU3gRcXYdtFHfoENx8cyjffDOZZ2fU5GV7ekI/PYA7/MM/aASOiLS2aoPegYfNbIeZ\n5a7NNN3ds1H5IDC9ym2ULdOXwRZswc66GwA7625swZaaXOe1uxve974wnh7gxAmNwBGR1lbtwdi3\nuvsBMzsL+JaZPR1/0N3dzPLOBRx9MKwAmDlzZpXViGSzsHQpGTMyc5fDF7+IffTUwPVea+WGG2DT\nptCF09EBL7wQWvXqqxeRVlRVi97dD0S3h4EtwEXAITPrBIhuDxd47gZ373L3rmnTplVTjUFr14YL\nfT/xREjiU6fCcrMwEqdGciNwbrwxvPQ994QDtOrCEZFWVHHQm9nrzGxSrgz8HrALeAhYHq22HPha\ntZUsauLEkLjr1gFwX+yKUb19wPXXVz3iZrjubpg5M3Td6MCsiLSyalr004HHzGwnsB34urv/H+BO\n4O1m9gxwWXS/vvbvh6VLB46Svjw+Wj5mDJlHLYzAqcGIm+F0YFZE2kHFQe/u+919QfQz390/ES0/\n4u6Xuvscd7/M3X9au+oW0NlJ5r+8gN1+EsvAWb+Eu7pg+gdPsf3di+DgwbpsVgdmRaQdJGYKhMyz\nM/FNs/E913LNMlh5ZDaHXw8XPfgkbN5ct+3ecANMmBBa9vEDsyIirSIxQc/mzWG2ylt+I9zfv78h\nm9WBWRFpdckJ+siaR9cAYGts4NbWWE3G0BeiA7Mi0soSOalZbty8rbGaj6EvJHdg9uTJcGB248aw\n/IYbNL5eRJorES36TF9moOUODCk3yvADs6+9Bp/7nLpxRKT5khH0PRm81wda77lyKdeDraXcgdlc\n2LurG0dEmi8RQV9IpifT0O3lDszedBOMGxeWaXy9iDRb4oK+0a344bq7wwm673//0G6cTEZhLyLN\nkYygz2Zh8WI4eLDhrfhCct04Y8aEKXceflj99SLSHMkI+rVr4bHH4GMfa3ZNBuS6cS67bDDs1V8v\nIs1g7o0Zfjiarq4u7+/vL/+J0bVhR5hQ+bVha23btjD08vjxcH/cuNCto2GXIlItM9sRu7pfQe3d\not+/H667Dk4/nUwPcPrpdZmpshoadikizdbeQZ/n2rD1mqmyGhp2KSLN1N5BDyOuDVuvmSqrUWjY\n5caNcMstatmLSH21ddDX89qwtVZo2KW6cUSk3tr7YGxMI+e1qca2bSHYjx4NrXoIwX/TTQMXyBIR\nKUk6Dsa2IXXjiEijJSbom31GbDlG68bp6VHgi0httX/QZ7Nk3nsemV+/udk1KVu+0TjHj8P69XDJ\nJbBhQ3PrJyLJ0P5Bv3Yta2a/0FJnxZYq3o0zfvxg4EO4iMkHPqDWvYhUr32DfuLEkIy5I5jr1oX7\nEyc2t15lynXjPPJICPyOjsHHTp5U615Eqte2QZ954ANYBiwT7ufKma+sbF6lqpAL/LvvDgdp1boX\nkVpp36C/8tP4wZvxj4VfwT82Bj90C5klf93kmlVnxQp49FG17kWkdto26IG2OCu2EqW07pcuVQtf\nREqTiBOmMn2ZlpmHvta2bQtz4txzT2jVD6fZMEXSq9QTphIR9GmwYQOsWhVa9Pn+ZGPHwgc/CG98\nYxiLr9AXSb5Sg35sIyoj1VuxAn7zN0PrfuPGcIJV3IkT8KlPhW6ejg6FvogMUou+DeW6cw4ehH/+\n5/xdOqDQF0k6dd2kRLEunZzhoT9lChw5ovAXaWcK+hTZtg36+uDnP4fPfra00HcfGv4vvxwe00Fd\nkfahoE+pckN/uHHj4F3vChfpuvBC+N73wnJ9AIi0HgW9FAz9XIu+HIU+AC68UF1AIs2ioJchcqGf\n65uvtMWfT74uoNwHwJQp+lYgUi8KeikqHv7f+14YxfP1r48culmp4d8cRvtWUKysDwmRkRT0UpHc\n0E0YDNpafwBUoljXUfybQ7kfIvpAkXbV9KA3s8uB/wV0AH/v7ncWWldB3/ryfQAATJ5cuAuokmMB\nlajldhr1gTJauZnb0fGW9tLUoDezDuA/gLcDLwJPAn/o7nvyra+gb2/Du4BgZJC0wreCarXjB1e5\n2yl0vKVdP7jaYTvVfLg2ewqEi4B97r4/qsz9wFVA3qCX9tbdXdqbtNC3gmLlYh8SjQrGRvVyNnM7\n7oPTadRKGj4gqzFmTLjC3Nat9fsmVa+gfxPwn7H7LwJvia9gZiuAFQAzZ86sUzWklZT6gZDPaB8S\ntWphtcoHStK2k4YPyGqcOhWuFd3X135BX5S7bwA2QOi6aVY9pD1U8yFRjkZ8oBQrN2s7ox1vqUbS\nPrjq0aI/7bTQfVMv9Qr6A8CM2P1zo2UiLa1RHyit6uqr8x9vaccPrnbZTiMOgNcr6J8E5pjZbELA\nLwOuq9O2RKRG0v5Bl1R1CXp3P2Fmq4B/IQyvvNfdd9djWyIiMrq69dG7+zeAb9Tr9UVEpDTtfXFw\nEREpSkEvIpJwCnoRkYRT0IuIJFxLzF5pZi8BP6riJaYCP6lRdWpJ9SqP6lW+Vq2b6lWeSut1nrtP\nK7ZSSwR9tcysv5SJfRpN9SqP6lW+Vq2b6lWeetdLXTciIgmnoBcRSbikBP2GZlegANWrPKpX+Vq1\nbqpXeepar0T00YuISGFJadGLiEgBbR30Zna5me01s31mdlsT6zHDzB4xsz1mttvM/ixanjGzA2b2\nVPRzRRPq9ryZ/SDafn+07Ewz+5aZPRPdntGEes2N7ZenzOxlM7u1GfvMzO41s8Nmtiu2rOA+MrPV\n0Xtur5m9o8H1+isze9rMvm9mW8zsjdHyWWb2q9h+W1+veo1St4J/uybvsy/H6vS8mT0VLW/YPhsl\nIxrzPnP3tvwhzIr5LHA+cBqwE5jXpLp0Am+OypMI18udB2SADzd5Pz0PTB227FPAbVH5NuCTLfC3\nPAic14x9BlwCvBnYVWwfRX/XncB4YHb0HuxoYL1+DxgblT8Zq9es+HpN2md5/3bN3mfDHv808NFG\n77NRMqIh77N2btEPXJfW3Y8DuevSNpy7Z939u1H5FeCHhMsptqqrgE1ReRNwdRPrAnAp8Ky7V3PS\nXMXc/dvAT4ctLrSPrgLud/dj7v4csI/wXmxIvdz9/7r7iejuE4SL+jRcgX1WSFP3WY6ZGXAt8KV6\nbHs0o2REQ95n7Rz0+a5L2/RwNbNZwIXAd6JFfxp9zb63GV0kgAMPm9mO6Dq9ANPdPRuVDwLTm1Cv\nuGUM/edr9j6Dwvuold537wO+Gbs/O+qCeNTM3takOuX727XKPnsbcMjdn4kta/g+G5YRDXmftXPQ\ntxwzez3wVeBWd38ZWEfoWloIZAlfGxvtre6+EHgnsNLMLok/6OF7YtOGXpnZacC7ga9Ei1phnw3R\n7H2Uj5l9BDgB3BctygIzo7/1B4F/NLPJDa5Wy/3thvlDhjYoGr7P8mTEgHq+z9o56FvqurRmNo7w\nB7zP3TcDuPshdz/p7qeAe6jT19XRuPuB6PYwsCWqwyEz64zq3QkcbnS9Yt4JfNfdD0Fr7LNIoX3U\n9Pedmb0XWAJcH4UD0Vf8I1F5B6FP99caWa9R/natsM/GAr8PfDm3rNH7LF9G0KD3WTsH/cB1aaNW\n4TLgoWZUJOr72wj80N0/E1veGVttKbBr+HPrXK/XmdmkXJlwIG8XYT8tj1ZbDnytkfUaZkgrq9n7\nLKbQPnoIWGZm4y1cE3kOsL1RlTKzy4E/B97t7q/Glk8zs46ofH5Ur/2Nqle03UJ/u6bus8hlwNPu\n/mJuQSP3WaGMoFHvs0Ycca7jkewrCEevnwU+0sR6vJXwlev7wFPRzxXAF4EfRMsfAjobXK/zCUfu\ndwK7c/sImAJsBZ4BHgbObNJ+ex1wBHhDbFnD9xnhgyYLvEboC33/aPsI+Ej0ntsLvLPB9dpH6LvN\nvc/WR+u+J/obPwV8F7iyCfus4N+umfssWv554OZh6zZsn42SEQ15n+nMWBGRhGvnrhsRESmBgl5E\nJOEU9CIiCaegFxFJOAW9iEjCKehFRBJOQS8iknAKehGRhPv/Q+ycqDSxBE8AAAAASUVORK5CYII=\n",
448 | "text/plain": [
449 | ""
450 | ]
451 | },
452 | "metadata": {},
453 | "output_type": "display_data"
454 | },
455 | {
456 | "name": "stdout",
457 | "output_type": "stream",
458 | "text": [
459 | "Iteration: 199 Loss: 0.33783 Train Accuracy: 100.0 Test Accuracy: 95.9596\n"
460 | ]
461 | },
462 | {
463 | "data": {
464 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAD8CAYAAAB5Pm/hAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAHFpJREFUeJzt3X+UVeV97/H3lwEBE0gUEMcIgvdSWrAFw9Q4q4nMraYx\nBqM0Xku1laykoga66kqyeqW5Zg4hsSZtkt6uKgSLDcmyMaYBY2+S2xupY2rF4JBIAkQqorGSAxiS\nXE0MIPC9fzz7zOyZOWfO7197f15rzTrP2Wefs5/Zc+ZznvPsZz/b3B0REUmuMc2ugIiI1JeCXkQk\n4RT0IiIJp6AXEUk4Bb2ISMIp6EVEEk5BLyKScAp6EZGEU9CLiCTc2GZXAGDq1Kk+a9asZldDRKSt\n7Nix4yfuPq3Yei0R9LNmzaK/v7/Z1RARaStm9qNS1lPXjYhIwinoRUQSTkEvIpJwCnoRkYRT0IuI\nJJyCXkQk4do+6Ldtg7/8y3ArIiIjtcQ4+kpt2waXXgrHj8Npp8HWrdDd3exaiYi0lrZu0ff1hZA/\neTLc9vU1u0YiIq2nrYO+pye05Ds6wm1PT7NrJCLSetq666a7O3TX9PWFkFe3jYjISG0d9DAY7rlu\nG4W9iMhQbR/0OiArIjK6tu6jBx2QFREppu2DXgdkRURG1/ZdNzogKyIyurYPegjhroAXEcmv7btu\nRERkdAp6EZGEU9CLiCScgl5EJOEU9CIiCaegFxFJOAW9iEjCJSrodbUpEZGREnHCFGhyMxGRQhLT\notfkZiIi+SUm6DW5mYhIfonputHkZiIi+SUm6EGTm4mI5FO068bMZpjZI2a2x8x2m9mfRcvPNLNv\nmdkz0e0ZseesNrN9ZrbXzN5Rz19ARERGV0of/QngQ+4+D7gYWGlm84DbgK3uPgfYGt0nemwZMB+4\nHLjbzDrqUXkRESmuaNC7e9bdvxuVXwF+CLwJuArYFK22Cbg6Kl8F3O/ux9z9OWAfcFGtKy4iIqUp\na9SNmc0CLgS+A0x392z00EFgelR+E/Cfsae9GC0b/lorzKzfzPpfeumlMqstIiKlKjnozez1wFeB\nW9395fhj7u6Al7Nhd9/g7l3u3jVt2rRynioiImUoKejNbBwh5O9z983R4kNm1hk93gkcjpYfAGbE\nnn5utExERJqglFE3BmwEfujun4k99BCwPCovB74WW77MzMab2WxgDrC9dlUWEZFylDKO/neAPwZ+\nYGZPRcv+ArgTeMDM3g/8CLgWwN13m9kDwB7CiJ2V7n6y5jUXEZGSFA16d38MsAIPX1rgOZ8APlFF\nvUREpEYSM9eNiIjkp6AXEUk4Bb2ISMIp6EVEEk5BLyKScIkLel03VkRkqETNR6/rxoqIjJSoFr2u\nGysiMlKigl7XjRURGSlRXTe6bqyIyEiJCnrQdWNFRIZLVNeNiIiMpKAXEUk4Bb2ISMIp6EVEEk5B\nLyKScAp6EZGEU9CLiCScgl5EJOEU9CIiCaegFxFJuMQGvealFxEJEjfXDWheehGRuES26DUvvYjI\noEQGvealFxEZlMiuG81LLyIyKJFBD5qXXkQkJ5FdNyIiMkhBLyKScAp6EZGEU9CLiCScgl5EJOES\nHfSaBkFEJMHDKzUNgohIULRFb2b3mtlhM9sVW5YxswNm9lT0c0XssdVmts/M9prZO+pV8WI0DYKI\nSFBK183ngcvzLP+suy+Mfr4BYGbzgGXA/Og5d5tZR60qWw5NgyAiEhTtunH3b5vZrBJf7yrgfnc/\nBjxnZvuAi4CG95JrGgQRkaCaPvo/NbMbgH7gQ+7+M+BNwBOxdV6MljWFpkEQEal81M064HxgIZAF\nPl3uC5jZCjPrN7P+l156qcJqiIhIMRUFvbsfcveT7n4KuIfQPQNwAJgRW/XcaFm+19jg7l3u3jVt\n2rRKqiEiIiWoKOjNrDN2dymQG5HzELDMzMab2WxgDrC9uiqKiEg1ivbRm9mXgB5gqpm9CPQCPWa2\nEHDgeeAmAHffbWYPAHuAE8BKdz9Zn6qLiEgpzN2bXQe6urq8v7+/2dUQEWkrZrbD3buKrZfoKRBy\nNBWCiKRZYqdAyNFUCCKSdolv0WsqBBFJu8QHvaZCEJG0S3zXjaZCEJG0S3zQg6ZCEJF0S3zXjYhI\n2inoRUQSTkEvIpJwCnoRkYRLTdDr7FgRSatUjLrR2bEikmapaNHr7FgRSbNUBL3OjhWRNEtF143O\njhWRNEtF0IPOjhWR9EpF142ISJop6EVEEk5BLyKScKkLep04JSJpk5qDsaATp0QknVLVoteJUyKS\nRqkKep04JSJplKquG504JSJplKqgB504JSLpk6quGxGRNFLQi4gkXCqDXmPpRSRNUtdHr7H0IpI2\nqWvRayy9iKRN6oJeY+lFJG1S13WjsfQikjapC3rQWHoRSZfUdd2IiKRN0aA3s3vN7LCZ7YotO9PM\nvmVmz0S3Z8QeW21m+8xsr5m9o14VrwUNsxSRNCilRf954PJhy24Dtrr7HGBrdB8zmwcsA+ZHz7nb\nzDpqVtsayg2zvP32cKuwbyPZLFx8ceh/27kTFi8Ot/Fl9ShrO9pOPbazeDEcPFjf/xl3L/oDzAJ2\nxe7vBTqjciewNyqvBlbH1vsXoLvY6y9atMgb7Y473Ds63CHc3nFHw6uQTD/+sfsll7g/9dTg7Vve\n4n7xxbUrL18e/nDgPn+++5gx4Ta+rB5lbUfbqcd2xoxxv+WWiv7dgH4vkq8etlRR0P88VrbcfeDv\ngD+KPbYRuKbY6zcj6B9/3H3ixBDyEyeG+6n14x/XLow7O93N6vuPpx/9JPVnwoSy/nUbFvTR/Z9F\ntyUHPbAC6Af6Z86cWWlEVeXxx0NLPpEhP1p4D29pD28hV1rWT6J/dk8Jt8estPLOsyp7XqFyI37H\nRv4+udfr7SG0Nq+/3j2bLevfvNSgr3TUzSEz6wSIbg9Hyw8AM2LrnRstG8HdN7h7l7t3TZs2rcJq\nVKe7G1avbtOhltns6H2Fq1fDd74DTzwB118/tPzYY0OXbdo0+Lq7d1derpM9U8Lt8SrGiD3ZGW5P\nVVF+8pzRH4+r53ZqVS53O4/OCuWOEsv/HiVBuc8rVG7Efmvk75N7vTU9wLFjMHkynH029VDpv85D\nwPKovBz4Wmz5MjMbb2azgTnA9uqqKMDIA5CLFsG//dvIEC8lvE+dqjigB0LXhpa/f1Yo1yOwHp0V\nyh0Oe6aG8vb5Z7Dn/EkwaRJ7Zk6ESZPYPvf1YdnYsSPKL7wBGDuWdV1UVp40iRcmh9tC68a3Wc/t\n1KRcwXbO+mUor19URnnSpIHbsp6Xp9yI/dbI3yf3OgDcfHNdD8haaP2PsoLZl4AeYCpwCOgFHgQe\nAGYCPwKudfefRut/BHgfcAK41d2/WawSXV1d3t/fX/lvUYVt21rsLNlsFpYuBTNYvx5uuimU584d\nGt5V2DMF5h0JLeSd0+G3syFQd3QWL39uEdyyA04abHjzyPJdXXD2L+A9h87gq9N/xnsOTOKuua+E\nZfvGctfCE1WVVy0B3zQbW/4cAN7r2BobuI0vq0c5jdtpVbXeb62gd3EvmZ5Myeub2Q537yq6XrGg\nb4RmBX1LzWSZzcKyZTB79mCgz59fsOV93GDfmVFgx8slhPe6WFg/OBfe83QsoKsJ4wOTsA+9AoDv\nXIot2FL3f7xi/+BSP4364GqGRn8Qe29lOVxq0Kf6zNiWmMky19c+YwZ8+9t5u1xyXSSvdsB9F4Ry\nwb4/j76GQsGvuPGvntcsA2bPZtWSqPzaa3nL9j9PFC9HIQ9gC7aE29g/a7nlUuTWL/c14v9Y5ZQL\nPd6o7dS6XM1rNEoz9lvSpHKum5zcTJa5Fn1DZrIc3jWzcOGIVeIt9F+ODSE+7wiMPwUvjw/rrF8U\nWuCMG8f6Ba+F8tixrI91b6xakuveeI1VA+H3yrBbBrpBoLLAbJcuiHq2ElupNVovvYt7h9yWW670\nebnymkfXVFX/UjTy98lXrpdUB33DZrKMh/vcueGAKYwI+SejrpVcC33eEfjkW2FtD3zgysH14mV4\nLVY+MVAqFN458X+cVv9aHf/HqvSfvZb/nKX8k9ZzO7Uul7puru843odcTrnS5w0v13O/NeP3KadP\nvmKljMGs908zTphqiNxZovFx6qP8/NOvh9t3XoeTKfzj7lWXK31e7yO97u7e+0hvSeVy1h3tNXIq\n3U4jxLcp0giUOI4+1Qdj42oy+qaEbpmc758Fv3U4HCR9eio8cEE0nraAWnZ1ZPoyZHoyZPoyAGWV\nG9L6EJGSlHowNtVdNzlVj76Jj5gZ1i1TaCjjv88IQb/kOvjmr+V/2eFdJLX6yl6Lr54i0j7UoidM\nVXz77WH0TUcHrF0bTiwdVS7c//ZvC7bcfzkWvrBglKGMJQxHjLeoRUTi1KIvQ9mjb7LZcGZqNjui\n5f5qB2z5Dbh+F0w4GZ39Rhgls2rJ8BcaORwx05cpeHBIRKQSCnrKGH2TzcK554YpBGKOjxkcJXPn\n28IomT+6prw6xPvORURqKdUnTMV1d4eQ7+srcBGSbBbOOWdIyOfmd/n4JWHIo2VCyP/T/fB3/3vw\nqb5z6WB5lBM0FPIiUg9q0UcKHpAt0IoHWH0pfGPuyNe6ZlkI8YGTlDZvhmHjzvON+RURqQcFfSTf\ndAjd3YSpCfKE/I1X5g/5uHJPPhERqQeNuokMb9H/4sQExrx2bMR6K6+Auy/K/xoaJSMijaRRN2WK\nH5B9+wVZxvzJG+DwYRg7Fk6EqQW2nxNC3r8yH/vvYcKx3HDIXItdAS8irUYt+rgC/fF7psK8n4SD\nrfksPm8xfe/tq3v1RETi1KKvRIH++Pmr8q+ugBeRdqCgB5g4EY4eBWAX85jLXsZxkt6e0Z+mkBeR\ndqCgz2ZhwYLQH//cc0zlJ3Rwko/2GGt78ndrxQ+6ioi0Op0wNWNGmIjsuTB/+0+nHuYw0/nX/3cD\nAI//6/8ABk9u0kFXEWk36T0YG+uuydl+DrxlRf7VFfAi0mp0zdhi9u+HOXOAcGUnGAz5O775IKwJ\nV27qWOv87phwgpNCXkTaUTr76CdMgGODJ0P91e/AVy4YfPgvLrgTdrwdgJP2KrPOPLfRNRQRqZl0\ntuj/4A/CrRk3Xjk05AGY8QQsv5TfHdPL5+46nf/68p/kn+hMRKQNpKtFH+uXv3EJ/H1X4VE1tsb4\n+GXbqrvylIhIC0hPiz43jHL2bI6Pgcv3ga/tGHg4N5VwfFbJfBOdiYi0m/S06GfMCIkNfLwnzBsP\nJwcetgVbWHze4iGzSm4bX+aVp0REWlDygz7WXXPLu2D9b49cpdBUBvGJzqZMGWzRq/tGRNpJ8oN+\n/35YuZLMz7fkDXkYfSqDXKirr15E2lXy++g7O2HvXjKPhEv8eWbwId+5tKQrPKmvXkTaWbJb9BMn\nkrn4KGuuHfnQ4vMWQ+9mMiW8TE9PaMkfOwZmoRtHRKRdJLtFv38/mXOuwz91+sAif+Z6ehd9qKyZ\nJ7u74W/+Bjo6wizGt95a4ALiIiItKNlBD/DII/CrXw3enzyZzJK/LvtljhwJIX/qlLpvRKS9JDro\nM595N3ZTFusdPDHKpq+raIrhXPdNR4eGWopIe0lmH300pDIDA33wlgG/c8LQ1n0ZNNRSRNpVVUFv\nZs8DrxDOPDrh7l1mdibwZWAW8Dxwrbv/rLpqlmn/fvjwh+HBB8lc9CqZ7acDrw7MOV8pDbUUkXZU\ni66b/+buC2NzIt8GbHX3OcDW6H5jdXbC5Mlw9ChreoCjR+n9RRecfXbVLx0fann0KHzhC1W/pIhI\nXdWjj/4qYFNU3gRcXYdtFHfoENx8cyjffDOZZ2fU5GV7ekI/PYA7/MM/aASOiLS2aoPegYfNbIeZ\n5a7NNN3ds1H5IDC9ym2ULdOXwRZswc66GwA7625swZaaXOe1uxve974wnh7gxAmNwBGR1lbtwdi3\nuvsBMzsL+JaZPR1/0N3dzPLOBRx9MKwAmDlzZpXViGSzsHQpGTMyc5fDF7+IffTUwPVea+WGG2DT\nptCF09EBL7wQWvXqqxeRVlRVi97dD0S3h4EtwEXAITPrBIhuDxd47gZ373L3rmnTplVTjUFr14YL\nfT/xREjiU6fCcrMwEqdGciNwbrwxvPQ994QDtOrCEZFWVHHQm9nrzGxSrgz8HrALeAhYHq22HPha\ntZUsauLEkLjr1gFwX+yKUb19wPXXVz3iZrjubpg5M3Td6MCsiLSyalr004HHzGwnsB34urv/H+BO\n4O1m9gxwWXS/vvbvh6VLB46Svjw+Wj5mDJlHLYzAqcGIm+F0YFZE2kHFQe/u+919QfQz390/ES0/\n4u6Xuvscd7/M3X9au+oW0NlJ5r+8gN1+EsvAWb+Eu7pg+gdPsf3di+DgwbpsVgdmRaQdJGYKhMyz\nM/FNs/E913LNMlh5ZDaHXw8XPfgkbN5ct+3ecANMmBBa9vEDsyIirSIxQc/mzWG2ylt+I9zfv78h\nm9WBWRFpdckJ+siaR9cAYGts4NbWWE3G0BeiA7Mi0soSOalZbty8rbGaj6EvJHdg9uTJcGB248aw\n/IYbNL5eRJorES36TF9moOUODCk3yvADs6+9Bp/7nLpxRKT5khH0PRm81wda77lyKdeDraXcgdlc\n2LurG0dEmi8RQV9IpifT0O3lDszedBOMGxeWaXy9iDRb4oK+0a344bq7wwm673//0G6cTEZhLyLN\nkYygz2Zh8WI4eLDhrfhCct04Y8aEKXceflj99SLSHMkI+rVr4bHH4GMfa3ZNBuS6cS67bDDs1V8v\nIs1g7o0Zfjiarq4u7+/vL/+J0bVhR5hQ+bVha23btjD08vjxcH/cuNCto2GXIlItM9sRu7pfQe3d\not+/H667Dk4/nUwPcPrpdZmpshoadikizdbeQZ/n2rD1mqmyGhp2KSLN1N5BDyOuDVuvmSqrUWjY\n5caNcMstatmLSH21ddDX89qwtVZo2KW6cUSk3tr7YGxMI+e1qca2bSHYjx4NrXoIwX/TTQMXyBIR\nKUk6Dsa2IXXjiEijJSbom31GbDlG68bp6VHgi0httX/QZ7Nk3nsemV+/udk1KVu+0TjHj8P69XDJ\nJbBhQ3PrJyLJ0P5Bv3Yta2a/0FJnxZYq3o0zfvxg4EO4iMkHPqDWvYhUr32DfuLEkIy5I5jr1oX7\nEyc2t15lynXjPPJICPyOjsHHTp5U615Eqte2QZ954ANYBiwT7ufKma+sbF6lqpAL/LvvDgdp1boX\nkVpp36C/8tP4wZvxj4VfwT82Bj90C5klf93kmlVnxQp49FG17kWkdto26IG2OCu2EqW07pcuVQtf\nREqTiBOmMn2ZlpmHvta2bQtz4txzT2jVD6fZMEXSq9QTphIR9GmwYQOsWhVa9Pn+ZGPHwgc/CG98\nYxiLr9AXSb5Sg35sIyoj1VuxAn7zN0PrfuPGcIJV3IkT8KlPhW6ejg6FvogMUou+DeW6cw4ehH/+\n5/xdOqDQF0k6dd2kRLEunZzhoT9lChw5ovAXaWcK+hTZtg36+uDnP4fPfra00HcfGv4vvxwe00Fd\nkfahoE+pckN/uHHj4F3vChfpuvBC+N73wnJ9AIi0HgW9FAz9XIu+HIU+AC68UF1AIs2ioJchcqGf\n65uvtMWfT74uoNwHwJQp+lYgUi8KeikqHv7f+14YxfP1r48culmp4d8cRvtWUKysDwmRkRT0UpHc\n0E0YDNpafwBUoljXUfybQ7kfIvpAkXbV9KA3s8uB/wV0AH/v7ncWWldB3/ryfQAATJ5cuAuokmMB\nlajldhr1gTJauZnb0fGW9tLUoDezDuA/gLcDLwJPAn/o7nvyra+gb2/Du4BgZJC0wreCarXjB1e5\n2yl0vKVdP7jaYTvVfLg2ewqEi4B97r4/qsz9wFVA3qCX9tbdXdqbtNC3gmLlYh8SjQrGRvVyNnM7\n7oPTadRKGj4gqzFmTLjC3Nat9fsmVa+gfxPwn7H7LwJvia9gZiuAFQAzZ86sUzWklZT6gZDPaB8S\ntWphtcoHStK2k4YPyGqcOhWuFd3X135BX5S7bwA2QOi6aVY9pD1U8yFRjkZ8oBQrN2s7ox1vqUbS\nPrjq0aI/7bTQfVMv9Qr6A8CM2P1zo2UiLa1RHyit6uqr8x9vaccPrnbZTiMOgNcr6J8E5pjZbELA\nLwOuq9O2RKRG0v5Bl1R1CXp3P2Fmq4B/IQyvvNfdd9djWyIiMrq69dG7+zeAb9Tr9UVEpDTtfXFw\nEREpSkEvIpJwCnoRkYRT0IuIJFxLzF5pZi8BP6riJaYCP6lRdWpJ9SqP6lW+Vq2b6lWeSut1nrtP\nK7ZSSwR9tcysv5SJfRpN9SqP6lW+Vq2b6lWeetdLXTciIgmnoBcRSbikBP2GZlegANWrPKpX+Vq1\nbqpXeepar0T00YuISGFJadGLiEgBbR30Zna5me01s31mdlsT6zHDzB4xsz1mttvM/ixanjGzA2b2\nVPRzRRPq9ryZ/SDafn+07Ewz+5aZPRPdntGEes2N7ZenzOxlM7u1GfvMzO41s8Nmtiu2rOA+MrPV\n0Xtur5m9o8H1+isze9rMvm9mW8zsjdHyWWb2q9h+W1+veo1St4J/uybvsy/H6vS8mT0VLW/YPhsl\nIxrzPnP3tvwhzIr5LHA+cBqwE5jXpLp0Am+OypMI18udB2SADzd5Pz0PTB227FPAbVH5NuCTLfC3\nPAic14x9BlwCvBnYVWwfRX/XncB4YHb0HuxoYL1+DxgblT8Zq9es+HpN2md5/3bN3mfDHv808NFG\n77NRMqIh77N2btEPXJfW3Y8DuevSNpy7Z939u1H5FeCHhMsptqqrgE1ReRNwdRPrAnAp8Ky7V3PS\nXMXc/dvAT4ctLrSPrgLud/dj7v4csI/wXmxIvdz9/7r7iejuE4SL+jRcgX1WSFP3WY6ZGXAt8KV6\nbHs0o2REQ95n7Rz0+a5L2/RwNbNZwIXAd6JFfxp9zb63GV0kgAMPm9mO6Dq9ANPdPRuVDwLTm1Cv\nuGUM/edr9j6Dwvuold537wO+Gbs/O+qCeNTM3takOuX727XKPnsbcMjdn4kta/g+G5YRDXmftXPQ\ntxwzez3wVeBWd38ZWEfoWloIZAlfGxvtre6+EHgnsNLMLok/6OF7YtOGXpnZacC7ga9Ei1phnw3R\n7H2Uj5l9BDgB3BctygIzo7/1B4F/NLPJDa5Wy/3thvlDhjYoGr7P8mTEgHq+z9o56FvqurRmNo7w\nB7zP3TcDuPshdz/p7qeAe6jT19XRuPuB6PYwsCWqwyEz64zq3QkcbnS9Yt4JfNfdD0Fr7LNIoX3U\n9Pedmb0XWAJcH4UD0Vf8I1F5B6FP99caWa9R/natsM/GAr8PfDm3rNH7LF9G0KD3WTsH/cB1aaNW\n4TLgoWZUJOr72wj80N0/E1veGVttKbBr+HPrXK/XmdmkXJlwIG8XYT8tj1ZbDnytkfUaZkgrq9n7\nLKbQPnoIWGZm4y1cE3kOsL1RlTKzy4E/B97t7q/Glk8zs46ofH5Ur/2Nqle03UJ/u6bus8hlwNPu\n/mJuQSP3WaGMoFHvs0Ycca7jkewrCEevnwU+0sR6vJXwlev7wFPRzxXAF4EfRMsfAjobXK/zCUfu\ndwK7c/sImAJsBZ4BHgbObNJ+ex1wBHhDbFnD9xnhgyYLvEboC33/aPsI+Ej0ntsLvLPB9dpH6LvN\nvc/WR+u+J/obPwV8F7iyCfus4N+umfssWv554OZh6zZsn42SEQ15n+nMWBGRhGvnrhsRESmBgl5E\nJOEU9CIiCaegFxFJOAW9iEjCKehFRBJOQS8iknAKehGRhPv/Q+ycqDSxBE8AAAAASUVORK5CYII=\n",
465 | "text/plain": [
466 | ""
467 | ]
468 | },
469 | "metadata": {},
470 | "output_type": "display_data"
471 | }
472 | ],
473 | "source": [
474 | "#Iterations to do trainning\n",
475 | "for epoch in range(200):\n",
476 | " \n",
477 | " start=0\n",
478 | " end=100\n",
479 | " for i in range(14):\n",
480 | " \n",
481 | " X=X_train[start:end]\n",
482 | " Y=y_train[start:end]\n",
483 | " start=end\n",
484 | " end=start+100\n",
485 | " sess.run(train_step,feed_dict={rnn._inputs:X, y:Y})\n",
486 | " \n",
487 | " Loss=str(sess.run(cross_entropy,feed_dict={rnn._inputs:X, y:Y}))\n",
488 | " Train_accuracy=str(sess.run(accuracy,feed_dict={rnn._inputs:X_train, y:y_train}))\n",
489 | " Test_accuracy=str(sess.run(accuracy,feed_dict={rnn._inputs:X_test, y:y_test}))\n",
490 | " \n",
491 | "\n",
492 | " pl.plot([epoch],Loss,'b.',)\n",
493 | " pl.plot([epoch],Train_accuracy,'r*',)\n",
494 | " pl.plot([epoch],Test_accuracy,'g+')\n",
495 | " display.clear_output(wait=True)\n",
496 | " display.display(pl.gcf()) \n",
497 | " \n",
498 | " sys.stdout.flush()\n",
499 | " print(\"\\rIteration: %s Loss: %s Train Accuracy: %s Test Accuracy: %s\"%(epoch,Loss,Train_accuracy,Test_accuracy)),\n",
500 | " sys.stdout.flush()"
501 | ]
502 | }
503 | ],
504 | "metadata": {
505 | "kernelspec": {
506 | "display_name": "Python 2",
507 | "language": "python",
508 | "name": "python2"
509 | },
510 | "language_info": {
511 | "codemirror_mode": {
512 | "name": "ipython",
513 | "version": 2
514 | },
515 | "file_extension": ".py",
516 | "mimetype": "text/x-python",
517 | "name": "python",
518 | "nbconvert_exporter": "python",
519 | "pygments_lexer": "ipython2",
520 | "version": "2.7.6"
521 | }
522 | },
523 | "nbformat": 4,
524 | "nbformat_minor": 0
525 | }
526 |
--------------------------------------------------------------------------------
/LSTM/LSTM.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {
6 | "deletable": true,
7 | "editable": true
8 | },
9 | "source": [
10 | "# LSTM RNN ON 8*8 MNIST DATASET TO PREDICT TEN CLASS"
11 | ]
12 | },
13 | {
14 | "cell_type": "markdown",
15 | "metadata": {
16 | "deletable": true,
17 | "editable": true
18 | },
19 | "source": [
20 | "\n",
21 | "### Its a dynamic sequence and batch LSTM . This is created with tensorflow scan and map higher ops!!!! \n",
22 | "### This is a base LSTM which can be used to create Neural Stack Machine, Neural Turing Machine and RNN-EM and so on!"
23 | ]
24 | },
25 | {
26 | "cell_type": "markdown",
27 | "metadata": {
28 | "deletable": true,
29 | "editable": true
30 | },
31 | "source": [
32 | "# Importing Libraries"
33 | ]
34 | },
35 | {
36 | "cell_type": "code",
37 | "execution_count": 1,
38 | "metadata": {
39 | "collapsed": false,
40 | "deletable": true,
41 | "editable": true
42 | },
43 | "outputs": [
44 | {
45 | "name": "stderr",
46 | "output_type": "stream",
47 | "text": [
48 | "/home/jli183/tensorflow/local/lib/python2.7/site-packages/sklearn/cross_validation.py:44: DeprecationWarning: This module was deprecated in version 0.18 in favor of the model_selection module into which all the refactored classes and functions are moved. Also note that the interface of the new CV iterators are different from that of this module. This module will be removed in 0.20.\n",
49 | " \"This module will be removed in 0.20.\", DeprecationWarning)\n"
50 | ]
51 | }
52 | ],
53 | "source": [
54 | "import numpy as np\n",
55 | "import tensorflow as tf\n",
56 | "from sklearn import datasets\n",
57 | "from sklearn.model_selection import train_test_split\n",
58 | "import pylab as pl\n",
59 | "from IPython import display\n",
60 | "import sys\n",
61 | "%matplotlib inline"
62 | ]
63 | },
64 | {
65 | "cell_type": "markdown",
66 | "metadata": {
67 | "deletable": true,
68 | "editable": true
69 | },
70 | "source": [
71 | "# LSTM class and functions"
72 | ]
73 | },
74 | {
75 | "cell_type": "code",
76 | "execution_count": 2,
77 | "metadata": {
78 | "collapsed": false,
79 | "deletable": true,
80 | "editable": true
81 | },
82 | "outputs": [],
83 | "source": [
84 | "class LSTM_cell(object):\n",
85 | "\n",
86 | " \"\"\"\n",
87 | " LSTM cell object which takes 3 arguments for initialization.\n",
88 | " input_size = Input Vector size\n",
89 | " hidden_layer_size = Hidden layer size\n",
90 | " target_size = Output vector size\n",
91 | "\n",
92 | " \"\"\"\n",
93 | "\n",
94 | " def __init__(self, input_size, hidden_layer_size, target_size):\n",
95 | "\n",
96 | " # Initialization of given values\n",
97 | " self.input_size = input_size\n",
98 | " self.hidden_layer_size = hidden_layer_size\n",
99 | " self.target_size = target_size\n",
100 | "\n",
101 | " # Weights and Bias for input and hidden tensor\n",
102 | " self.Wi = tf.Variable(tf.zeros(\n",
103 | " [self.input_size, self.hidden_layer_size]))\n",
104 | " self.Ui = tf.Variable(tf.zeros(\n",
105 | " [self.hidden_layer_size, self.hidden_layer_size]))\n",
106 | " self.bi = tf.Variable(tf.zeros([self.hidden_layer_size]))\n",
107 | "\n",
108 | " \n",
109 | " self.Wf = tf.Variable(tf.zeros(\n",
110 | " [self.input_size, self.hidden_layer_size]))\n",
111 | " self.Uf = tf.Variable(tf.zeros(\n",
112 | " [self.hidden_layer_size, self.hidden_layer_size]))\n",
113 | " self.bf = tf.Variable(tf.zeros([self.hidden_layer_size])) \n",
114 | " \n",
115 | " \n",
116 | " self.Wog = tf.Variable(tf.zeros(\n",
117 | " [self.input_size, self.hidden_layer_size]))\n",
118 | " self.Uog = tf.Variable(tf.zeros(\n",
119 | " [self.hidden_layer_size, self.hidden_layer_size]))\n",
120 | " self.bog = tf.Variable(tf.zeros([self.hidden_layer_size])) \n",
121 | " \n",
122 | " \n",
123 | " self.Wc = tf.Variable(tf.zeros(\n",
124 | " [self.input_size, self.hidden_layer_size]))\n",
125 | " self.Uc = tf.Variable(tf.zeros(\n",
126 | " [self.hidden_layer_size, self.hidden_layer_size]))\n",
127 | " self.bc = tf.Variable(tf.zeros([self.hidden_layer_size])) \n",
128 | " \n",
129 | " \n",
130 | " \n",
131 | " # Weights for output layers\n",
132 | " self.Wo = tf.Variable(tf.truncated_normal(\n",
133 | " [self.hidden_layer_size, self.target_size],mean=0,stddev=.01))\n",
134 | " self.bo = tf.Variable(tf.truncated_normal([self.target_size],mean=0,stddev=.01))\n",
135 | "\n",
136 | " # Placeholder for input vector with shape[batch, seq, embeddings]\n",
137 | " self._inputs = tf.placeholder(tf.float32,\n",
138 | " shape=[None, None, self.input_size],\n",
139 | " name='inputs')\n",
140 | "\n",
141 | " # Processing inputs to work with scan function\n",
142 | " self.processed_input = process_batch_input_for_RNN(self._inputs)\n",
143 | "\n",
144 | " '''\n",
145 | " Initial hidden state's shape is [1,self.hidden_layer_size]\n",
146 | " In First time stamp, we are doing dot product with weights to\n",
147 | " get the shape of [batch_size, self.hidden_layer_size].\n",
148 | " For this dot product tensorflow use broadcasting. But during\n",
149 | " Back propagation a low level error occurs.\n",
150 | " So to solve the problem it was needed to initialize initial\n",
151 | " hiddden state of size [batch_size, self.hidden_layer_size].\n",
152 | " So here is a little hack !!!! Getting the same shaped\n",
153 | " initial hidden state of zeros.\n",
154 | " '''\n",
155 | "\n",
156 | " self.initial_hidden = self._inputs[:, 0, :]\n",
157 | " self.initial_hidden= tf.matmul(\n",
158 | " self.initial_hidden, tf.zeros([input_size, hidden_layer_size]))\n",
159 | " \n",
160 | " \n",
161 | " self.initial_hidden=tf.stack([self.initial_hidden,self.initial_hidden])\n",
162 | " # Function for LSTM cell.\n",
163 | " def Lstm(self, previous_hidden_memory_tuple, x):\n",
164 | " \"\"\"\n",
165 | " This function takes previous hidden state and memory tuple with input and\n",
166 | " outputs current hidden state.\n",
167 | " \"\"\"\n",
168 | " \n",
169 | " previous_hidden_state,c_prev=tf.unstack(previous_hidden_memory_tuple)\n",
170 | " \n",
171 | " #Input Gate\n",
172 | " i= tf.sigmoid(\n",
173 | " tf.matmul(x,self.Wi)+tf.matmul(previous_hidden_state,self.Ui) + self.bi \n",
174 | " )\n",
175 | " \n",
176 | " #Forget Gate\n",
177 | " f= tf.sigmoid(\n",
178 | " tf.matmul(x,self.Wf)+tf.matmul(previous_hidden_state,self.Uf) + self.bf \n",
179 | " )\n",
180 | " \n",
181 | " #Output Gate\n",
182 | " o= tf.sigmoid(\n",
183 | " tf.matmul(x,self.Wog)+tf.matmul(previous_hidden_state,self.Uog) + self.bog\n",
184 | " )\n",
185 | " \n",
186 | " #New Memory Cell\n",
187 | " c_= tf.nn.tanh(\n",
188 | " tf.matmul(x,self.Wc)+tf.matmul(previous_hidden_state,self.Uc) + self.bc \n",
189 | " ) \n",
190 | " \n",
191 | " #Final Memory cell\n",
192 | " c= f*c_prev + i*c_\n",
193 | " \n",
194 | " #Current Hidden state\n",
195 | " current_hidden_state = o*tf.nn.tanh(c)\n",
196 | "\n",
197 | "\n",
198 | " return tf.stack([current_hidden_state,c])\n",
199 | "\n",
200 | " # Function for getting all hidden state.\n",
201 | " def get_states(self):\n",
202 | " \"\"\"\n",
203 | " Iterates through time/ sequence to get all hidden state\n",
204 | " \"\"\"\n",
205 | "\n",
206 | " # Getting all hidden state throuh time\n",
207 | " all_hidden_states = tf.scan(self.Lstm,\n",
208 | " self.processed_input,\n",
209 | " initializer=self.initial_hidden,\n",
210 | " name='states')\n",
211 | " all_hidden_states=all_hidden_states[:,0,:,:]\n",
212 | " \n",
213 | " return all_hidden_states\n",
214 | "\n",
215 | " # Function to get output from a hidden layer\n",
216 | " def get_output(self, hidden_state):\n",
217 | " \"\"\"\n",
218 | " This function takes hidden state and returns output\n",
219 | " \"\"\"\n",
220 | " output = tf.nn.relu(tf.matmul(hidden_state, self.Wo) + self.bo)\n",
221 | "\n",
222 | " return output\n",
223 | "\n",
224 | " # Function for getting all output layers\n",
225 | " def get_outputs(self):\n",
226 | " \"\"\"\n",
227 | " Iterating through hidden states to get outputs for all timestamp\n",
228 | " \"\"\"\n",
229 | " all_hidden_states = self.get_states()\n",
230 | "\n",
231 | " all_outputs = tf.map_fn(self.get_output, all_hidden_states)\n",
232 | "\n",
233 | " return all_outputs\n",
234 | "\n",
235 | "\n",
236 | "# Function to convert batch input data to use scan ops of tensorflow.\n",
237 | "def process_batch_input_for_RNN(batch_input):\n",
238 | " \"\"\"\n",
239 | " Process tensor of size [5,3,2] to [3,5,2]\n",
240 | " \"\"\"\n",
241 | " batch_input_ = tf.transpose(batch_input, perm=[2, 0, 1])\n",
242 | " X = tf.transpose(batch_input_)\n",
243 | "\n",
244 | " return X\n"
245 | ]
246 | },
247 | {
248 | "cell_type": "markdown",
249 | "metadata": {
250 | "deletable": true,
251 | "editable": true
252 | },
253 | "source": [
254 | "# Placeholder and initializers\n"
255 | ]
256 | },
257 | {
258 | "cell_type": "code",
259 | "execution_count": 3,
260 | "metadata": {
261 | "collapsed": true,
262 | "deletable": true,
263 | "editable": true
264 | },
265 | "outputs": [],
266 | "source": [
267 | "hidden_layer_size = 30\n",
268 | "input_size = 8\n",
269 | "target_size = 10"
270 | ]
271 | },
272 | {
273 | "cell_type": "code",
274 | "execution_count": 4,
275 | "metadata": {
276 | "collapsed": true,
277 | "deletable": true,
278 | "editable": true
279 | },
280 | "outputs": [],
281 | "source": [
282 | "y = tf.placeholder(tf.float32, shape=[None, target_size],name='inputs')"
283 | ]
284 | },
285 | {
286 | "cell_type": "markdown",
287 | "metadata": {
288 | "deletable": true,
289 | "editable": true
290 | },
291 | "source": [
292 | "# Models"
293 | ]
294 | },
295 | {
296 | "cell_type": "code",
297 | "execution_count": 5,
298 | "metadata": {
299 | "collapsed": false,
300 | "deletable": true,
301 | "editable": true
302 | },
303 | "outputs": [],
304 | "source": [
305 | "#Initializing rnn object\n",
306 | "rnn=LSTM_cell( input_size, hidden_layer_size, target_size)"
307 | ]
308 | },
309 | {
310 | "cell_type": "code",
311 | "execution_count": 6,
312 | "metadata": {
313 | "collapsed": false,
314 | "deletable": true,
315 | "editable": true
316 | },
317 | "outputs": [],
318 | "source": [
319 | "#Getting all outputs from rnn\n",
320 | "outputs = rnn.get_outputs()"
321 | ]
322 | },
323 | {
324 | "cell_type": "code",
325 | "execution_count": 7,
326 | "metadata": {
327 | "collapsed": true,
328 | "deletable": true,
329 | "editable": true
330 | },
331 | "outputs": [],
332 | "source": [
333 | "#Getting final output through indexing after reversing\n",
334 | "#last_output = tf.reverse(outputs,[True,False,False])[0,:,:]\n",
335 | "last_output = outputs[-1]"
336 | ]
337 | },
338 | {
339 | "cell_type": "code",
340 | "execution_count": 8,
341 | "metadata": {
342 | "collapsed": true,
343 | "deletable": true,
344 | "editable": true
345 | },
346 | "outputs": [],
347 | "source": [
348 | "#As rnn model output the final layer through Relu activation softmax is used for final output.\n",
349 | "output=tf.nn.softmax(last_output)"
350 | ]
351 | },
352 | {
353 | "cell_type": "code",
354 | "execution_count": 9,
355 | "metadata": {
356 | "collapsed": true,
357 | "deletable": true,
358 | "editable": true
359 | },
360 | "outputs": [],
361 | "source": [
362 | "#Computing the Cross Entropy loss \n",
363 | "cross_entropy = -tf.reduce_sum(y * tf.log(output))"
364 | ]
365 | },
366 | {
367 | "cell_type": "code",
368 | "execution_count": 10,
369 | "metadata": {
370 | "collapsed": false,
371 | "deletable": true,
372 | "editable": true
373 | },
374 | "outputs": [],
375 | "source": [
376 | "# Trainning with Adadelta Optimizer\n",
377 | "train_step = tf.train.AdamOptimizer().minimize(cross_entropy)"
378 | ]
379 | },
380 | {
381 | "cell_type": "code",
382 | "execution_count": 11,
383 | "metadata": {
384 | "collapsed": true,
385 | "deletable": true,
386 | "editable": true
387 | },
388 | "outputs": [],
389 | "source": [
390 | "#Calculatio of correct prediction and accuracy\n",
391 | "correct_prediction = tf.equal(tf.argmax(y,1), tf.argmax(output,1))\n",
392 | "accuracy = (tf.reduce_mean(tf.cast(correct_prediction, tf.float32)))*100"
393 | ]
394 | },
395 | {
396 | "cell_type": "markdown",
397 | "metadata": {
398 | "deletable": true,
399 | "editable": true
400 | },
401 | "source": [
402 | "# Dataset Preparation"
403 | ]
404 | },
405 | {
406 | "cell_type": "code",
407 | "execution_count": 12,
408 | "metadata": {
409 | "collapsed": true,
410 | "deletable": true,
411 | "editable": true
412 | },
413 | "outputs": [],
414 | "source": [
415 | "#Function to get on hot\n",
416 | "def get_on_hot(number):\n",
417 | " on_hot=[0]*10\n",
418 | " on_hot[number]=1\n",
419 | " return on_hot"
420 | ]
421 | },
422 | {
423 | "cell_type": "code",
424 | "execution_count": 13,
425 | "metadata": {
426 | "collapsed": false,
427 | "deletable": true,
428 | "editable": true
429 | },
430 | "outputs": [],
431 | "source": [
432 | "#Using Sklearn MNIST dataset.\n",
433 | "digits = datasets.load_digits()\n",
434 | "X=digits.images\n",
435 | "Y_=digits.target\n",
436 | "\n",
437 | "Y=map(get_on_hot,Y_)"
438 | ]
439 | },
440 | {
441 | "cell_type": "code",
442 | "execution_count": 14,
443 | "metadata": {
444 | "collapsed": true,
445 | "deletable": true,
446 | "editable": true
447 | },
448 | "outputs": [],
449 | "source": [
450 | "#Getting Train and test Dataset\n",
451 | "X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size=0.22, random_state=42)\n",
452 | "\n",
453 | "#Cuttting for simple iteration\n",
454 | "X_train=X_train[:1400]\n",
455 | "y_train=y_train[:1400]"
456 | ]
457 | },
458 | {
459 | "cell_type": "code",
460 | "execution_count": 15,
461 | "metadata": {
462 | "collapsed": false,
463 | "deletable": true,
464 | "editable": true
465 | },
466 | "outputs": [
467 | {
468 | "name": "stdout",
469 | "output_type": "stream",
470 | "text": [
471 | "WARNING:tensorflow:From :2: initialize_all_variables (from tensorflow.python.ops.variables) is deprecated and will be removed after 2017-03-02.\n",
472 | "Instructions for updating:\n",
473 | "Use `tf.global_variables_initializer` instead.\n"
474 | ]
475 | }
476 | ],
477 | "source": [
478 | "sess=tf.InteractiveSession()\n",
479 | "sess.run(tf.initialize_all_variables())"
480 | ]
481 | },
482 | {
483 | "cell_type": "code",
484 | "execution_count": 16,
485 | "metadata": {
486 | "collapsed": false,
487 | "deletable": true,
488 | "editable": true,
489 | "scrolled": false
490 | },
491 | "outputs": [
492 | {
493 | "data": {
494 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAD8CAYAAAB5Pm/hAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAHSRJREFUeJzt3Xt0HeV57/HvI8nYmCQNBhvExVjNcsKySOIcFIpOe2JT\naLgcGuKEpi5OMIusGGQ4K2QZAmoO0chugbSE0q5iHUxC4jROKAFCSE/SFhwUkoO5yJSLbXDt4uBA\n5EuhKamNbSQ95493tjSSddnaF23N7N9nLa09+917j97x5Tezn3nnHXN3REQku2oq3QERESkvBb2I\nSMYp6EVEMk5BLyKScQp6EZGMU9CLiGScgl5EJOMU9CIiGaegFxHJuLpKdwDg2GOP9Tlz5lS6GyIi\nqbJx48Z/d/eZY71vUgT9nDlz6OrqqnQ3RERSxcxeyed9Kt2IiGScgl5EJOMU9CIiGaegFxHJOAW9\niEjGKehFRDIu9UG/YQPcfHN4FBGRw02KcfSF2rABzj4bDh2CI46A9euhubnSvRIRmVxSfUTf2RlC\nvrc3PHZ2VrpHIiKTT6qDfuHCcCRfWxt+du5UCUdEZKhUB31zcyjXfO5zYAZ33RVKOQp7EZEBqQ56\nCGE/ezb09KiEIyIynNQHPQwu4RxxRHguIiJBqkfd5ORKOJ2dIeQ18kZEZEAmgh5CuOcCfsMGhb6I\nSE5mgj5HY+tFRAbLRI0+SWPrRUQGy1zQ68SsiMhgmSvd6MSsiMhgmQt6GHxiVkSk2mWudCMiIoMp\n6EVEMk5BLyKScQp6EZGMU9CLiGRcpoNetxkUEcno8ErQVAgiIjmZPaLXVAgiIkFmg15TIYiIBJkt\n3WgqBBGRYMwjejM72cweNbMtZrbZzD4ft88ws4fNbFv8eHTiM61mtt3MtprZueXcgNE0N0Nrq0Je\nRKpbPqWbHmCFu88DzgSuMrN5wA3AenefC6yPnxO/thhoBM4DVptZbTk6LyIiYxsz6N29292fiZd/\nA7wInAhcBKyN37YW+Hi8fBFwj7sfdPcdwHbgjFJ3XERE8jOuk7FmNgf4EPAkcJy7d8cv7QKOi5dP\nBH6Z+NircZuIiFRA3kFvZu8A7geucfc3k6+5uwM+nl9sZsvMrMvMuvbu3Tuej4qIyDjkFfRmNoUQ\n8uvc/YG4ebeZ1cev1wN74vbXgJMTHz8pbhvE3de4e5O7N82cObPQ/ouIyBjyGXVjwNeBF939tsRL\nDwFL4+WlwA8S7YvNbKqZNQBzgadK12URERmPfMbR/y7wGeAFM3s2bvtT4BbgXjP7LPAK8CkAd99s\nZvcCWwgjdq5y996S91xERPIyZtC7+88BG+Hls0f4zJ8Df15Ev0REpEQyOwWCiIgECnoRkYyrmqDX\n3PQiUq0yO6lZkuamF5FqVhVH9JqbXkSqWVUEveamF5FqVhWlG81NLyLVrCqCHkK4K+BFpBpVRelG\nRKSaKehFRDJOQS8iknEKehGRjFPQi4hknIJeRCTjFPQiIhmnoBcRyTgFvYhIxinoRUQyTkEvIpJx\nVRf0ugGJiFSbqpnUDHQDEhGpTlV1RK8bkIhINaqqoNcNSESkGlVV6UY3IBGRalRVQQ+6AYmIVJ+q\nKt2IiFQjBb2ISMYp6EVEMk5BLyKScQp6EZGMU9CLiGScgl5EJOMU9CIiGVfVQa+ZLEWkGowZ9GZ2\nt5ntMbNNibbIzF4zs2fjnwsSr7Wa2XYz22pm55ar48XKzWR5443hUWEvIlmVzxH9N4Hzhmn/K3ef\nH//8CMDM5gGLgcb4M6vNrLZUnS0lzWQpItVizKB398eAN/Jc30XAPe5+0N13ANuBM4roX9loJksR\nqRbF1Oj/l5k9H5d2jo7bTgR+mXjPq3HbpJObyXLVKt2ARESyrdCg7wB+G5gPdANfHe8KzGyZmXWZ\nWdfevXsL7EZxmpuhtVUhLyLZVlDQu/tud+919z7gLgbKM68BJyfeelLcNtw61rh7k7s3zZw5s5Bu\niIhIHgoKejOrTzxdBORG5DwELDazqWbWAMwFniquiyIiUowxbzxiZt8FFgLHmtmrQBuw0MzmAw78\nArgCwN03m9m9wBagB7jK3XvL03UREcmHuXul+0BTU5N3dXVVuhsiIqliZhvdvWms91X1lbEiItVA\nQS8iknEKejTnjYhk25gnY7MuN+fNoUPhClldPCUiWVP1R/Sa80ZEsq7qg15z3ohI1lV96SY3501n\nZwh5lW1EJGuqPughhLsCXkSyqupLNyIiWaegFxHJOAW9iEjGKehFRDJOQT+ErpIVkazRqJsEXSUr\nIlmkI/oEXSUrIlmkoE/QVbIikkUq3SToKlkRySIF/RC6SlZEskalGxGRjFPQi4hknIJeRCTjFPSj\n0MVTIpIFOhk7Al08JSJZoSP6EejiKRHJCgX9CHTxlIhkhUo3I9DFUyKSFQr6UejiKRHJApVuREQy\nTkEvIpJxCvo8aDy9iKSZavRj0Hh6EUk7HdGPQePpRSTtFPRj0Hh6EUk7lW7GoPH0IpJ2Yx7Rm9nd\nZrbHzDYl2maY2cNmti1+PDrxWquZbTezrWZ2brk6PpGam6G1VSEvIumUT+nmm8B5Q9puANa7+1xg\nffwcM5sHLAYa48+sNrPakvVWRKC7GxYsgOeeC4+7do3eNtbrE70erXv49ZSTu4/5A8wBNiWebwXq\n4+V6YGu83Aq0Jt73T0DzWOs//fTTPS0ef9z9ppvCo0xSv/qV+0c+4v7ss+Gxu7s0baVaT7Hrbmlx\nr6lxb2wMjy0to7eN9fpEr0frHn49BQC6PJ8Mz+tNhwf9rxPLlnsO/C3w6cRrXwcuHmv9aQn6xx93\nP/JI99ra8KiwL4FSh2ghQZiWwICq/jlUM/62Qj4z0etuW5h4bdq0cf33mbCgj5//R/yYd9ADy4Au\noGv27NmFxsSEuummEPIQHm+6qdI9KoNkYA7XVuqj16VLSxeYGQ+Mv2weWH5uVqKttta9pubwttra\n0V/Pt61U6ylw3d8+LbQdv2KcbbW14//MRK+7ttaJcJ8+3X3JksH/7/JQ7qCvytJNZo/ohzsabmkZ\nM4zbzqK/re0s8m9raRnYY5YwMMcKwor/py5y3UT0/7mtPj3RFm/zcG1jvT7R69G6R1hPgeWbfIO+\n0OGVDwFLgVvixx8k2r9jZrcBJwBzgacK/B2TzqQeatndDYsXw9//ffjnk1s+/vhRPxO1nknEWbz9\n88eYUl8PQLQQoo4O6OgI73vsMd6ugSm55wCbN3P9VuDR0BaFxjHbrq8bWF53GizZBLO/AN1fBWpq\nWDevb6Dt9lpwz68NuO7cXq59qhZ6e/l/J8MH9sB158K1G3oBeHNq6PqudxKugBtHWyGfKfW6IW5r\nbGTWvs3Q2AhshoYGAGbt23FYG+wY9fV820q1nvGs26KBzV7+h+GxkLZSraec67Yv9wEdtHXOIlqY\neKFELOwURnmD2XeBhcCxwG6gDXgQuBeYDbwCfMrd34jf/yXgcqAHuMbdfzxWJ5qamryrq6vwrahW\nyXBfuRLuvBOuuAKA6KUOopMvJbJOolueBPcQ6l94iOivPhbaTjqpP0zqV8RhC7xVB0f2hOVcGPe/\nngjj+hWHB++obUD9Nb39yx3ze2nZGP7BexR+X8fpFNzWv9zYyP29m/lkbSP2R5tL/+cuE87bHGs3\nvC3kVW4537ZCPjPR6y6EmW1096ax/wDzOOwv909aSjeTTkvLiLXe3FfD/q/7LS1hubHR99cNvG9Q\nWSBuO35FvFxXd9hXzbaFYTmtP+7e/5hczretkM9kad1p6GNa110Iyly6qXobNkxwCSd59D5nDhw8\nCMDsxJF4fxmjt7f/KHf/nxGO2js6CMcMm5nZCvsS5YBPXxweD/9KGg7rh37VvO8e2PUOuPpCeP3O\ndzNj2tHcP3VH/xG0rw1fw4drs6U7Bi/vWY7NWj1hR1iSPm0L2gY9FtJWqvWUc91llc/eoNw/aTui\nr8hJ2eSJzEsvDUfidXUDR+wMHLUe1Xr4crKNKJwU6jH8vlMTR+zLl7svWhSWFy1yX768v32yHgWN\nZ91tj7YNeiykrVTrSeu6K9FHGRmlHHVT7p+0Bf2EDrOcOtVzJZVcyWW08sl9p+J/25TnGf/GxlGD\nPLk8XJsCQ6SyFPRlNKFH9Imj9+RReS7QifAnL/jgwJF47L5TEwHe0ODe0BDaEuHuixYVFcYiUlkK\n+jIr+1QI06YNe/SeK7kQ4V5TM+KReL5HwSKSXvkG/ZjDKydC2odXlvzEbHc30XUfpn3ua8CQk58d\nv8WM6TP6T2RGfT8h6ngRgKgzKssYXBGZnPIdXqlRN0Uqy60GV62CX72G/8s8eOkl7Mt9+Moarr6w\njxm7fg1AW2cECyOixMcU8iIyHN1hqkglvdXgtGlgBh0dtC8AtmyBvr7w2pVX0rbr1P63KtRFJF8K\n+iIVfavB7m6iy04J81H/8R/z/CygLv6iNX06LFlC2+kr4I47+ks0IiLjodJNkQqe/yZ3AVRDA9ev\n2wlr64kWQvtyyF2oZF/cD6yj7R0TcEGFiGSWTsaW0LhOytbVDTvPzP2nwie31WI39uI7lsKbb8ID\nD5Sz2yKSUjoZO8HyPil75JFw4EA4el840JycfgDCDiC6bI5q8SJSNNXoSyTvk7IvvwyXXEL01HRW\n/5D+OWk8Av9eY1jes5y2Xacq5EWkJBT0JZLvSdlo653Ye7+DfXE/s/bBHfGXrqcuaoL3vjc80YlX\nESkhlW5KZOhJWYCbbz68Xh8tjIj+5nmor8cWr8b3LGfvrp9wxoNPA/H4eBGREtLJ2DLIt15fzA0H\nRETyPRmr0k0ZJOv1Mw52c+9fxOPkCdMU5EzIPNQiUvUU9KWQu+jpueeILjuFhe9/ndlTurls4Snc\nbK3cPn9nuNVfdzftP20fCH2dbBWRCaCgL0L/0fmqVbQ37IQlS2hv2Enzj27k3w6dzDc6d/KZ3rXh\nPR0dcMIJYXnlyor0V0Sqk2r0heru5q3ZJ/TfRDsnd2Pt5Dj5P30MbvrI4ato+3kt0cM9h78gIpIH\nXTBVLt3dRK1nEnEWM68L91496uDAPViTy6vvms+y157npBV99EbgNXXUfbkH/4vpsGgR/N2tFdsM\nEakeKt3kKVemiZacSHvDTqxhLfumwuofwn/eQv/FT/umxhc/RTCr7hW+xmfZ9U54kUZq+sLRu791\nAN71Ljj++EptjohUER3R56O7m+v/oB162omAuXthyaYwbUHLRmDuXGbt2waNjcBmaGgA4OKlO3j8\n+ffy+5sXMX0+rHlhAcf/tJc1NbVcvKWbYyq3RSJSRRT0+Vi1qr9MA+HovdfCcp9BzTnn8MKnLuGT\nC6NwwVNbBISLn5rbrmU94eKpG1+A3kfhqlp441xorcS2iEjVUdCPIjqnjvb/0QvHheerfwjLnoEH\n3we1Dm175lHTsjDU7ReuDp9JDJlMLuemSMhdRDXueetFRAqkUTejWbqUdc98iyUv1WH/uwdfWQPv\neQ+2ZBu+Z3mYU34cUwiX/N6yIlLVNOqmGPFUwgBvng70xEMg+/rgnHNoW3AJFHCxU3OzAl5EJp5G\n3QzV3c1HL5+CReFka26GyVn/BU9d8EHYtaskV7Ru2BDq9hs2FL0qEZFR6Yh+qFWrePi43+D3zoOX\nXsK+3IevrOGqD18B/3d1SX5F3jcpEREpAR3R5xx5JJiFqQoAtmwJpRqAK6/sn5+mFPK+SYmISAno\niD720a/O5+G9T/Q/z93ab0F9M7TdUdLfpRE4IjKRdEQfe3jvE/iuK8PIGsBX1uC7W+hc9njJf1fu\nJiWrVqlsIyLlV/VBH3VGYZgkwCuvhDINlLxcM1RzM7S2KuRFpPyqehx91BmF+eGHWHDKAjov65yQ\nPmhsvYgUKt9x9FUZ9FFnFIZIJsbLWxQmIgNg2jR4662y90Ojb0SkGBNyK0Ez+4WZvWBmz5pZV9w2\nw8weNrNt8ePRxfyOcmj/aTvWbtRfdYB1p8G++JT0oal1sGQJ7NgxIf3Q6BsRmQilqNGf5e7zE3uV\nG4D17j4XWB8/nxSS92v1Nqd7fwtvToWjesJNQI54u29Cpw/Ojb6prdXoGxEpn3KcjL0IiO+fx1rg\n42X4HeOWq8dbu7H/z+gfMz9rX/z6I72hrYwnYIcaOvoGdLWsiJResUHvwCNmttHMlsVtx7l7PIyF\nXfTP/TiYmS0zsy4z69q7d2+R3Rhb9L4rQod3LOW3Pw/MnQvTp/PC8cD06aFk8+qr45qkrBRyo28g\n1OtvvDE8KuxFpFSKDfrfc/f5wPnAVWY26M6oHs70Dnu2193XuHuTuzfNnDmzyG6MLOqMsHbjrdkn\nhJOta9ey653Atm2wfz9RJ+GEbIXv+KR6vYiUS1FB7+6vxY97gO8DZwC7zaweIH7cU2wnixGd/xU8\nCjfsXndaaGvrjF+cOxceeaTsY+bzoXq9iJRLwUFvZkeZ2Ttzy8BHgU3AQ8DS+G1LgR8U28li3Prt\n5XDJJTB9Om/Gd4iKflYb6vHnnBPqJHfcMeElm6F0tayIlEsxc90cB3zfzHLr+Y67/6OZPQ3ca2af\nBV4BPlV8Nwt33abbuPZdV8KBAyzYGTd+4hMwc+bAFbGThOarF5FyKDjo3f1l4IPDtL8OnF1Mp0pm\nyNQG85YtgzVrQvsdpZ2orJR0tayIlFImZ68cOrWB/c6PAWj7j5lEkzjgQVfLikjpZTPoz/8KUZjZ\nYMjUBl+Bt6LhPzRJDDf6RkEvIsXI5uyVL7/cfwIWGBgnP0FTGxRDo29EpNQyeURPfT1P/9dWPnzg\nAG0/r50U4+TzlRt9oxq9iJRKNoMe2PmvG/nwlcuJkidgUyI5+kYnZkWkWJkN+osXg+duATjJT8CO\nRCdmRaQUMlWjz013YO0G0L+cnLUyTTQtgoiUQraCfmGEtzm+uwUA392Ct3m4yUgK6cSsiJRCtko3\niTtGEQEdHeFngu4YVWrJE7PHHDNwRK/yjYiMR7aC/uWX4dpr4cEHaevcH4ZVLloEt95a6Z4VLBfq\nqtWLSKEyVbqhvj4MozxwgOiJaakaVjka1epFpBjZCnqA3bvDtMNPPDEpph8uhaG1+mOO0Z2oRCR/\nFu4NUllNTU3e1dVV6W5Marnx9MccA9dcozKOiICZbUzcr3tE2Tuiz6jcLQdff11lHBEZHwV9yiTL\nOLW1sHOnSjgiMjoFfcrkhlx+7nPhJll33aWbiYvI6BT0KdTcDLNnQ0/PQAnnW9/SCVoRGV62xtFX\nkVwJ59ChUML5xjdC8OsErYgMpaBPqeRVszt3hhJO8uheM16KSI6GV2ZAcpbL2tpQu+/pCcuXXw6X\nXhrep/AXyZZ8h1cq6DMiN84+eXQPIfSnTBk+/BX4IulWfePou7uJLjslE1fCFiI3zv7SS0Od3sJM\nzbjD228PHnt/550aqSNSTbIT9KtW0d6wE1aurHRPKipXu7/iCpg6NRzBT5lyePhrpI5I9Uh/6SYx\nNbFF4FHcntKpiUspeRtCCMGeG52TrOVrpI5IOlVNjT764Qran7ntsPa201cQXZje6YnLZbhafk0N\nnHMORJHCXiRNqiboAWhpgTVrsC/34StrQt1i9erSdTCDciN1Dh6Evr4Q9lOnwu23h/l0NDpHZPLL\nN+jTP46+uxu+9z34zGeAtWFq4u7uSvdq0svV8qMIHnkkhP3Bg3D11WH5iCMU+iJZkf6gX7WK6AOv\nE02fTtuCNkjp/WErobk5BP3PfhZOzpqFUs7Q0Nd4fJF0S2/pRidhS2a4ue7NQsj39YX3jDYeHwY+\nr28AIhMn+6WbxP1hIRv3h62U5uaBYH7/+weH/oEDYThmbjw+hOXe3jAe/+67Q/i//fbwtf5k+IO+\nDYhUQmqP6KPOiPafth/W3ragjUjlm5LYsGH4IZmHDoWwh8Fj83NqasJ7c2Wgmhqoqxs8nHO0HYG+\nGYjkpzpG3XziE1Bfj81aje9ZHk7CPvBA6TtY5fIZj588oq+pGVz2gcE7hNF2BOP5ZqBvC1LtqiPo\nY9ZueFvlt6OaDA3/ZPDmyj7JoZvJI/qh9X8Y/zeD5A6h0G8L423TTkQmm+zX6BPaFrRVugtVJ1nX\nzz1PStb6hwvRkXYEQ78Z5EIewmPyPMFwbbnRQvnuHPJtm4idyHCva8cipVC2I3ozOw/4a6AW+Jq7\n3zLSezV7ZfVJjvQZKQjz3SEU8m2hkLbxfsMYz05kuBJYIaWr8exEKrWesdatHVz+Klq6MbNa4F+B\nPwBeBZ4G/sTdtwz3fgW9jCSfHUIh3xYKCeNy7kRKUboaz06kUusZa935DN0db9tE7aAqURasdNA3\nA5G7nxs/bwVw95uHe7+CXkqpkJ1DpXcihZzUzretnDujcq07ed1GGnZQpSgLFjKxYKVr9CcCv0w8\nfxX4nTL9LpFBhp4/SLYX2zbWuYdSHhmWYseStiP63NDdoddtQH7naIZry+0oi11POdd96FD4uy9X\nyapiJ2PNbBmwDGD27NmV6obIuJRzJzLc66XYsUy2MsVo6x5r6O5k3UGV4og+92dQDirdiMikMtrQ\n3cm6g6rWGn0d4WTs2cBrhJOxl7j75uHer6AXERm/itbo3b3HzK4G/okwvPLukUJeRETKq2w1enf/\nEfCjcq1fRETyU1PpDoiISHkp6EVEMk5BLyKScQp6EZGMmxTTFJvZXuCVIlZxLPDvJepOpWlbJidt\ny+RU7dtyirvPHOtNkyLoi2VmXfmMJU0DbcvkpG2ZnLQt+VHpRkQk4xT0IiIZl5WgX1PpDpSQtmVy\n0rZMTtqWPGSiRi8iIiPLyhG9iIiMINVBb2bnmdlWM9tuZjdUuj/jYWYnm9mjZrbFzDab2efj9hlm\n9rCZbYsfj650X/NlZrVm9i9m9g/x81Rui5m928zuM7OXzOxFM2tO8ba0xv/GNpnZd81sWpq2xczu\nNrM9ZrYp0TZi/+Pt3R7nwrmV6fXwRtiWv4z/nT1vZt83s3cnXivZtqQ26OP70t4BnA/MA/7EzOZV\ntlfj0gOscPd5wJnAVXH/bwDWu/tcYH38PC0+D7yYeJ7Wbflr4B/d/VTgg4RtSt22mNkcws19Tnf3\n0wgzyS4mXdvyTeC8IW3D9j/+/7MYaIw/szrOicnimxy+LQ8Dp7n7BwhTu7dC6bcltUEPnAFsd/eX\n3f0QcA9wUYX7lDd373b3Z+Ll3xDC5ETCNqyN37YW+Hhlejg+ZnYS8D+BryWaU7ctZvZbwEeArwO4\n+yF3/zUp3BbgTeBt4Mj4HhHTgV+Rom1x98eAN4Y0j9T/i4B73P2gu+8AthNyYlIYblvc/Z/dvSd+\n+gRwUrxc0m1Jc9APd1/aEyvUl6LER14fAp4EjnP37vilXcBxFerWeN0OfBFI3Mo6ldvSAOwFvhGX\nob5mZkeRwm1x9zeAW4GdQDfwn+7+z6RwW4YYqf9pz4TLgR/HyyXdljQHfSaY2TuA+4Fr3P3N5Gse\nhkRN+mFRZnYhsMfdN470nrRsC+EeDf8N6HD3DwH7GFLaSMu2mNl7gC8Qdl4nAEeZ2aeT70nLtowk\n7f3PMbMvEcq568qx/jQH/WvAyYnnJ8VtqWFmUwghv87dH4ibd5tZffx6PbCnUv0bh98FPmZmvyCU\n0H7fzL5NOrflVeBVd38yfn4fIfjTuC1NwOPuvtfd3wYeAP476dyWpJH6n8pMMLPLgAuBJT4w3r2k\n25LmoH8amGtmDWZ2BOHExUMV7lPezMwIdeAX3f22xEsPAUvj5aXADya6b+Pl7q3ufpK7zyH8PfzE\n3T9NOrdlF/BLM3tf3HQ2sIUUbguwFTjTzKbH/97OJpwLSuO2JI3U/4eAxWY21cwagLnAUxXoX97M\n7DxCyfNj7r4/8VJpt8XdU/sDXEA4U/1vwJcq3Z9x9v33CF85nweejX8uAI4hjCTYBjwCzKh0X8e5\nXQuBf4iXU7ktwHygK/67eRA4OsXbcj1hR7UJ+Dtgapq2Bfgu4fzC24RvW58drf/Al+I82AqcX+n+\n57Et2wm1+FwG/J9ybIuujBURybg0l25ERCQPCnoRkYxT0IuIZJyCXkQk4xT0IiIZp6AXEck4Bb2I\nSMYp6EVEMu7/A095SpyJ3T8qAAAAAElFTkSuQmCC\n",
495 | "text/plain": [
496 | ""
497 | ]
498 | },
499 | "metadata": {},
500 | "output_type": "display_data"
501 | },
502 | {
503 | "name": "stdout",
504 | "output_type": "stream",
505 | "text": [
506 | "Iteration: 119 Loss: 0.69866 Train Accuracy: 100.0 Test Accuracy: 97.4747\n"
507 | ]
508 | },
509 | {
510 | "data": {
511 | "image/png": "iVBORw0KGgoAAAANSUhEUgAAAXoAAAD8CAYAAAB5Pm/hAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAHSRJREFUeJzt3Xt0HeV57/HvI8nYmCQNBhvExVjNcsKySOIcFIpOe2JT\naLgcGuKEpi5OMIusGGQ4K2QZAmoO0chugbSE0q5iHUxC4jROKAFCSE/SFhwUkoO5yJSLbXDt4uBA\n5EuhKamNbSQ95493tjSSddnaF23N7N9nLa09+917j97x5Tezn3nnHXN3REQku2oq3QERESkvBb2I\nSMYp6EVEMk5BLyKScQp6EZGMU9CLiGScgl5EJOMU9CIiGaegFxHJuLpKdwDg2GOP9Tlz5lS6GyIi\nqbJx48Z/d/eZY71vUgT9nDlz6OrqqnQ3RERSxcxeyed9Kt2IiGScgl5EJOMU9CIiGaegFxHJOAW9\niEjGKehFRDIu9UG/YQPcfHN4FBGRw02KcfSF2rABzj4bDh2CI46A9euhubnSvRIRmVxSfUTf2RlC\nvrc3PHZ2VrpHIiKTT6qDfuHCcCRfWxt+du5UCUdEZKhUB31zcyjXfO5zYAZ33RVKOQp7EZEBqQ56\nCGE/ezb09KiEIyIynNQHPQwu4RxxRHguIiJBqkfd5ORKOJ2dIeQ18kZEZEAmgh5CuOcCfsMGhb6I\nSE5mgj5HY+tFRAbLRI0+SWPrRUQGy1zQ68SsiMhgmSvd6MSsiMhgmQt6GHxiVkSk2mWudCMiIoMp\n6EVEMk5BLyKScQp6EZGMU9CLiGRcpoNetxkUEcno8ErQVAgiIjmZPaLXVAgiIkFmg15TIYiIBJkt\n3WgqBBGRYMwjejM72cweNbMtZrbZzD4ft88ws4fNbFv8eHTiM61mtt3MtprZueXcgNE0N0Nrq0Je\nRKpbPqWbHmCFu88DzgSuMrN5wA3AenefC6yPnxO/thhoBM4DVptZbTk6LyIiYxsz6N29292fiZd/\nA7wInAhcBKyN37YW+Hi8fBFwj7sfdPcdwHbgjFJ3XERE8jOuk7FmNgf4EPAkcJy7d8cv7QKOi5dP\nBH6Z+NircZuIiFRA3kFvZu8A7geucfc3k6+5uwM+nl9sZsvMrMvMuvbu3Tuej4qIyDjkFfRmNoUQ\n8uvc/YG4ebeZ1cev1wN74vbXgJMTHz8pbhvE3de4e5O7N82cObPQ/ouIyBjyGXVjwNeBF939tsRL\nDwFL4+WlwA8S7YvNbKqZNQBzgadK12URERmPfMbR/y7wGeAFM3s2bvtT4BbgXjP7LPAK8CkAd99s\nZvcCWwgjdq5y996S91xERPIyZtC7+88BG+Hls0f4zJ8Df15Ev0REpEQyOwWCiIgECnoRkYyrmqDX\n3PQiUq0yO6lZkuamF5FqVhVH9JqbXkSqWVUEveamF5FqVhWlG81NLyLVrCqCHkK4K+BFpBpVRelG\nRKSaKehFRDJOQS8iknEKehGRjFPQi4hknIJeRCTjFPQiIhmnoBcRyTgFvYhIxinoRUQyTkEvIpJx\nVRf0ugGJiFSbqpnUDHQDEhGpTlV1RK8bkIhINaqqoNcNSESkGlVV6UY3IBGRalRVQQ+6AYmIVJ+q\nKt2IiFQjBb2ISMYp6EVEMk5BLyKScQp6EZGMU9CLiGScgl5EJOMU9CIiGVfVQa+ZLEWkGowZ9GZ2\nt5ntMbNNibbIzF4zs2fjnwsSr7Wa2XYz22pm55ar48XKzWR5443hUWEvIlmVzxH9N4Hzhmn/K3ef\nH//8CMDM5gGLgcb4M6vNrLZUnS0lzWQpItVizKB398eAN/Jc30XAPe5+0N13ANuBM4roX9loJksR\nqRbF1Oj/l5k9H5d2jo7bTgR+mXjPq3HbpJObyXLVKt2ARESyrdCg7wB+G5gPdANfHe8KzGyZmXWZ\nWdfevXsL7EZxmpuhtVUhLyLZVlDQu/tud+919z7gLgbKM68BJyfeelLcNtw61rh7k7s3zZw5s5Bu\niIhIHgoKejOrTzxdBORG5DwELDazqWbWAMwFniquiyIiUowxbzxiZt8FFgLHmtmrQBuw0MzmAw78\nArgCwN03m9m9wBagB7jK3XvL03UREcmHuXul+0BTU5N3dXVVuhsiIqliZhvdvWms91X1lbEiItVA\nQS8iknEKejTnjYhk25gnY7MuN+fNoUPhClldPCUiWVP1R/Sa80ZEsq7qg15z3ohI1lV96SY3501n\nZwh5lW1EJGuqPughhLsCXkSyqupLNyIiWaegFxHJOAW9iEjGKehFRDJOQT+ErpIVkazRqJsEXSUr\nIlmkI/oEXSUrIlmkoE/QVbIikkUq3SToKlkRySIF/RC6SlZEskalGxGRjFPQi4hknIJeRCTjFPSj\n0MVTIpIFOhk7Al08JSJZoSP6EejiKRHJCgX9CHTxlIhkhUo3I9DFUyKSFQr6UejiKRHJApVuREQy\nTkEvIpJxCvo8aDy9iKSZavRj0Hh6EUk7HdGPQePpRSTtFPRj0Hh6EUk7lW7GoPH0IpJ2Yx7Rm9nd\nZrbHzDYl2maY2cNmti1+PDrxWquZbTezrWZ2brk6PpGam6G1VSEvIumUT+nmm8B5Q9puANa7+1xg\nffwcM5sHLAYa48+sNrPakvVWRKC7GxYsgOeeC4+7do3eNtbrE70erXv49ZSTu4/5A8wBNiWebwXq\n4+V6YGu83Aq0Jt73T0DzWOs//fTTPS0ef9z9ppvCo0xSv/qV+0c+4v7ss+Gxu7s0baVaT7Hrbmlx\nr6lxb2wMjy0to7eN9fpEr0frHn49BQC6PJ8Mz+tNhwf9rxPLlnsO/C3w6cRrXwcuHmv9aQn6xx93\nP/JI99ra8KiwL4FSh2ghQZiWwICq/jlUM/62Qj4z0etuW5h4bdq0cf33mbCgj5//R/yYd9ADy4Au\noGv27NmFxsSEuummEPIQHm+6qdI9KoNkYA7XVuqj16VLSxeYGQ+Mv2weWH5uVqKttta9pubwttra\n0V/Pt61U6ylw3d8+LbQdv2KcbbW14//MRK+7ttaJcJ8+3X3JksH/7/JQ7qCvytJNZo/ohzsabmkZ\nM4zbzqK/re0s8m9raRnYY5YwMMcKwor/py5y3UT0/7mtPj3RFm/zcG1jvT7R69G6R1hPgeWbfIO+\n0OGVDwFLgVvixx8k2r9jZrcBJwBzgacK/B2TzqQeatndDYsXw9//ffjnk1s+/vhRPxO1nknEWbz9\n88eYUl8PQLQQoo4O6OgI73vsMd6ugSm55wCbN3P9VuDR0BaFxjHbrq8bWF53GizZBLO/AN1fBWpq\nWDevb6Dt9lpwz68NuO7cXq59qhZ6e/l/J8MH9sB158K1G3oBeHNq6PqudxKugBtHWyGfKfW6IW5r\nbGTWvs3Q2AhshoYGAGbt23FYG+wY9fV820q1nvGs26KBzV7+h+GxkLZSraec67Yv9wEdtHXOIlqY\neKFELOwURnmD2XeBhcCxwG6gDXgQuBeYDbwCfMrd34jf/yXgcqAHuMbdfzxWJ5qamryrq6vwrahW\nyXBfuRLuvBOuuAKA6KUOopMvJbJOolueBPcQ6l94iOivPhbaTjqpP0zqV8RhC7xVB0f2hOVcGPe/\nngjj+hWHB++obUD9Nb39yx3ze2nZGP7BexR+X8fpFNzWv9zYyP29m/lkbSP2R5tL/+cuE87bHGs3\nvC3kVW4537ZCPjPR6y6EmW1096ax/wDzOOwv909aSjeTTkvLiLXe3FfD/q/7LS1hubHR99cNvG9Q\nWSBuO35FvFxXd9hXzbaFYTmtP+7e/5hczretkM9kad1p6GNa110Iyly6qXobNkxwCSd59D5nDhw8\nCMDsxJF4fxmjt7f/KHf/nxGO2js6CMcMm5nZCvsS5YBPXxweD/9KGg7rh37VvO8e2PUOuPpCeP3O\ndzNj2tHcP3VH/xG0rw1fw4drs6U7Bi/vWY7NWj1hR1iSPm0L2gY9FtJWqvWUc91llc/eoNw/aTui\nr8hJ2eSJzEsvDUfidXUDR+wMHLUe1Xr4crKNKJwU6jH8vlMTR+zLl7svWhSWFy1yX768v32yHgWN\nZ91tj7YNeiykrVTrSeu6K9FHGRmlHHVT7p+0Bf2EDrOcOtVzJZVcyWW08sl9p+J/25TnGf/GxlGD\nPLk8XJsCQ6SyFPRlNKFH9Imj9+RReS7QifAnL/jgwJF47L5TEwHe0ODe0BDaEuHuixYVFcYiUlkK\n+jIr+1QI06YNe/SeK7kQ4V5TM+KReL5HwSKSXvkG/ZjDKydC2odXlvzEbHc30XUfpn3ua8CQk58d\nv8WM6TP6T2RGfT8h6ngRgKgzKssYXBGZnPIdXqlRN0Uqy60GV62CX72G/8s8eOkl7Mt9+Moarr6w\njxm7fg1AW2cECyOixMcU8iIyHN1hqkglvdXgtGlgBh0dtC8AtmyBvr7w2pVX0rbr1P63KtRFJF8K\n+iIVfavB7m6iy04J81H/8R/z/CygLv6iNX06LFlC2+kr4I47+ks0IiLjodJNkQqe/yZ3AVRDA9ev\n2wlr64kWQvtyyF2oZF/cD6yj7R0TcEGFiGSWTsaW0LhOytbVDTvPzP2nwie31WI39uI7lsKbb8ID\nD5Sz2yKSUjoZO8HyPil75JFw4EA4el840JycfgDCDiC6bI5q8SJSNNXoSyTvk7IvvwyXXEL01HRW\n/5D+OWk8Av9eY1jes5y2Xacq5EWkJBT0JZLvSdlo653Ye7+DfXE/s/bBHfGXrqcuaoL3vjc80YlX\nESkhlW5KZOhJWYCbbz68Xh8tjIj+5nmor8cWr8b3LGfvrp9wxoNPA/H4eBGREtLJ2DLIt15fzA0H\nRETyPRmr0k0ZJOv1Mw52c+9fxOPkCdMU5EzIPNQiUvUU9KWQu+jpueeILjuFhe9/ndlTurls4Snc\nbK3cPn9nuNVfdzftP20fCH2dbBWRCaCgL0L/0fmqVbQ37IQlS2hv2Enzj27k3w6dzDc6d/KZ3rXh\nPR0dcMIJYXnlyor0V0Sqk2r0heru5q3ZJ/TfRDsnd2Pt5Dj5P30MbvrI4ato+3kt0cM9h78gIpIH\nXTBVLt3dRK1nEnEWM68L91496uDAPViTy6vvms+y157npBV99EbgNXXUfbkH/4vpsGgR/N2tFdsM\nEakeKt3kKVemiZacSHvDTqxhLfumwuofwn/eQv/FT/umxhc/RTCr7hW+xmfZ9U54kUZq+sLRu791\nAN71Ljj++EptjohUER3R56O7m+v/oB162omAuXthyaYwbUHLRmDuXGbt2waNjcBmaGgA4OKlO3j8\n+ffy+5sXMX0+rHlhAcf/tJc1NbVcvKWbYyq3RSJSRRT0+Vi1qr9MA+HovdfCcp9BzTnn8MKnLuGT\nC6NwwVNbBISLn5rbrmU94eKpG1+A3kfhqlp441xorcS2iEjVUdCPIjqnjvb/0QvHheerfwjLnoEH\n3we1Dm175lHTsjDU7ReuDp9JDJlMLuemSMhdRDXueetFRAqkUTejWbqUdc98iyUv1WH/uwdfWQPv\neQ+2ZBu+Z3mYU34cUwiX/N6yIlLVNOqmGPFUwgBvng70xEMg+/rgnHNoW3AJFHCxU3OzAl5EJp5G\n3QzV3c1HL5+CReFka26GyVn/BU9d8EHYtaskV7Ru2BDq9hs2FL0qEZFR6Yh+qFWrePi43+D3zoOX\nXsK+3IevrOGqD18B/3d1SX5F3jcpEREpAR3R5xx5JJiFqQoAtmwJpRqAK6/sn5+mFPK+SYmISAno\niD720a/O5+G9T/Q/z93ab0F9M7TdUdLfpRE4IjKRdEQfe3jvE/iuK8PIGsBX1uC7W+hc9njJf1fu\nJiWrVqlsIyLlV/VBH3VGYZgkwCuvhDINlLxcM1RzM7S2KuRFpPyqehx91BmF+eGHWHDKAjov65yQ\nPmhsvYgUKt9x9FUZ9FFnFIZIJsbLWxQmIgNg2jR4662y90Ojb0SkGBNyK0Ez+4WZvWBmz5pZV9w2\nw8weNrNt8ePRxfyOcmj/aTvWbtRfdYB1p8G++JT0oal1sGQJ7NgxIf3Q6BsRmQilqNGf5e7zE3uV\nG4D17j4XWB8/nxSS92v1Nqd7fwtvToWjesJNQI54u29Cpw/Ojb6prdXoGxEpn3KcjL0IiO+fx1rg\n42X4HeOWq8dbu7H/z+gfMz9rX/z6I72hrYwnYIcaOvoGdLWsiJResUHvwCNmttHMlsVtx7l7PIyF\nXfTP/TiYmS0zsy4z69q7d2+R3Rhb9L4rQod3LOW3Pw/MnQvTp/PC8cD06aFk8+qr45qkrBRyo28g\n1OtvvDE8KuxFpFSKDfrfc/f5wPnAVWY26M6oHs70Dnu2193XuHuTuzfNnDmzyG6MLOqMsHbjrdkn\nhJOta9ey653Atm2wfz9RJ+GEbIXv+KR6vYiUS1FB7+6vxY97gO8DZwC7zaweIH7cU2wnixGd/xU8\nCjfsXndaaGvrjF+cOxceeaTsY+bzoXq9iJRLwUFvZkeZ2Ttzy8BHgU3AQ8DS+G1LgR8U28li3Prt\n5XDJJTB9Om/Gd4iKflYb6vHnnBPqJHfcMeElm6F0tayIlEsxc90cB3zfzHLr+Y67/6OZPQ3ca2af\nBV4BPlV8Nwt33abbuPZdV8KBAyzYGTd+4hMwc+bAFbGThOarF5FyKDjo3f1l4IPDtL8OnF1Mp0pm\nyNQG85YtgzVrQvsdpZ2orJR0tayIlFImZ68cOrWB/c6PAWj7j5lEkzjgQVfLikjpZTPoz/8KUZjZ\nYMjUBl+Bt6LhPzRJDDf6RkEvIsXI5uyVL7/cfwIWGBgnP0FTGxRDo29EpNQyeURPfT1P/9dWPnzg\nAG0/r50U4+TzlRt9oxq9iJRKNoMe2PmvG/nwlcuJkidgUyI5+kYnZkWkWJkN+osXg+duATjJT8CO\nRCdmRaQUMlWjz013YO0G0L+cnLUyTTQtgoiUQraCfmGEtzm+uwUA392Ct3m4yUgK6cSsiJRCtko3\niTtGEQEdHeFngu4YVWrJE7PHHDNwRK/yjYiMR7aC/uWX4dpr4cEHaevcH4ZVLloEt95a6Z4VLBfq\nqtWLSKEyVbqhvj4MozxwgOiJaakaVjka1epFpBjZCnqA3bvDtMNPPDEpph8uhaG1+mOO0Z2oRCR/\nFu4NUllNTU3e1dVV6W5Marnx9MccA9dcozKOiICZbUzcr3tE2Tuiz6jcLQdff11lHBEZHwV9yiTL\nOLW1sHOnSjgiMjoFfcrkhlx+7nPhJll33aWbiYvI6BT0KdTcDLNnQ0/PQAnnW9/SCVoRGV62xtFX\nkVwJ59ChUML5xjdC8OsErYgMpaBPqeRVszt3hhJO8uheM16KSI6GV2ZAcpbL2tpQu+/pCcuXXw6X\nXhrep/AXyZZ8h1cq6DMiN84+eXQPIfSnTBk+/BX4IulWfePou7uJLjslE1fCFiI3zv7SS0Od3sJM\nzbjD228PHnt/550aqSNSTbIT9KtW0d6wE1aurHRPKipXu7/iCpg6NRzBT5lyePhrpI5I9Uh/6SYx\nNbFF4FHcntKpiUspeRtCCMGeG52TrOVrpI5IOlVNjT764Qran7ntsPa201cQXZje6YnLZbhafk0N\nnHMORJHCXiRNqiboAWhpgTVrsC/34StrQt1i9erSdTCDciN1Dh6Evr4Q9lOnwu23h/l0NDpHZPLL\nN+jTP46+uxu+9z34zGeAtWFq4u7uSvdq0svV8qMIHnkkhP3Bg3D11WH5iCMU+iJZkf6gX7WK6AOv\nE02fTtuCNkjp/WErobk5BP3PfhZOzpqFUs7Q0Nd4fJF0S2/pRidhS2a4ue7NQsj39YX3jDYeHwY+\nr28AIhMn+6WbxP1hIRv3h62U5uaBYH7/+weH/oEDYThmbjw+hOXe3jAe/+67Q/i//fbwtf5k+IO+\nDYhUQmqP6KPOiPafth/W3ragjUjlm5LYsGH4IZmHDoWwh8Fj83NqasJ7c2Wgmhqoqxs8nHO0HYG+\nGYjkpzpG3XziE1Bfj81aje9ZHk7CPvBA6TtY5fIZj588oq+pGVz2gcE7hNF2BOP5ZqBvC1LtqiPo\nY9ZueFvlt6OaDA3/ZPDmyj7JoZvJI/qh9X8Y/zeD5A6h0G8L423TTkQmm+zX6BPaFrRVugtVJ1nX\nzz1PStb6hwvRkXYEQ78Z5EIewmPyPMFwbbnRQvnuHPJtm4idyHCva8cipVC2I3ozOw/4a6AW+Jq7\n3zLSezV7ZfVJjvQZKQjz3SEU8m2hkLbxfsMYz05kuBJYIaWr8exEKrWesdatHVz+Klq6MbNa4F+B\nPwBeBZ4G/sTdtwz3fgW9jCSfHUIh3xYKCeNy7kRKUboaz06kUusZa935DN0db9tE7aAqURasdNA3\nA5G7nxs/bwVw95uHe7+CXkqpkJ1DpXcihZzUzretnDujcq07ed1GGnZQpSgLFjKxYKVr9CcCv0w8\nfxX4nTL9LpFBhp4/SLYX2zbWuYdSHhmWYseStiP63NDdoddtQH7naIZry+0oi11POdd96FD4uy9X\nyapiJ2PNbBmwDGD27NmV6obIuJRzJzLc66XYsUy2MsVo6x5r6O5k3UGV4og+92dQDirdiMikMtrQ\n3cm6g6rWGn0d4WTs2cBrhJOxl7j75uHer6AXERm/itbo3b3HzK4G/okwvPLukUJeRETKq2w1enf/\nEfCjcq1fRETyU1PpDoiISHkp6EVEMk5BLyKScQp6EZGMmxTTFJvZXuCVIlZxLPDvJepOpWlbJidt\ny+RU7dtyirvPHOtNkyLoi2VmXfmMJU0DbcvkpG2ZnLQt+VHpRkQk4xT0IiIZl5WgX1PpDpSQtmVy\n0rZMTtqWPGSiRi8iIiPLyhG9iIiMINVBb2bnmdlWM9tuZjdUuj/jYWYnm9mjZrbFzDab2efj9hlm\n9rCZbYsfj650X/NlZrVm9i9m9g/x81Rui5m928zuM7OXzOxFM2tO8ba0xv/GNpnZd81sWpq2xczu\nNrM9ZrYp0TZi/+Pt3R7nwrmV6fXwRtiWv4z/nT1vZt83s3cnXivZtqQ26OP70t4BnA/MA/7EzOZV\ntlfj0gOscPd5wJnAVXH/bwDWu/tcYH38PC0+D7yYeJ7Wbflr4B/d/VTgg4RtSt22mNkcws19Tnf3\n0wgzyS4mXdvyTeC8IW3D9j/+/7MYaIw/szrOicnimxy+LQ8Dp7n7BwhTu7dC6bcltUEPnAFsd/eX\n3f0QcA9wUYX7lDd373b3Z+Ll3xDC5ETCNqyN37YW+Hhlejg+ZnYS8D+BryWaU7ctZvZbwEeArwO4\n+yF3/zUp3BbgTeBt4Mj4HhHTgV+Rom1x98eAN4Y0j9T/i4B73P2gu+8AthNyYlIYblvc/Z/dvSd+\n+gRwUrxc0m1Jc9APd1/aEyvUl6LER14fAp4EjnP37vilXcBxFerWeN0OfBFI3Mo6ldvSAOwFvhGX\nob5mZkeRwm1x9zeAW4GdQDfwn+7+z6RwW4YYqf9pz4TLgR/HyyXdljQHfSaY2TuA+4Fr3P3N5Gse\nhkRN+mFRZnYhsMfdN470nrRsC+EeDf8N6HD3DwH7GFLaSMu2mNl7gC8Qdl4nAEeZ2aeT70nLtowk\n7f3PMbMvEcq568qx/jQH/WvAyYnnJ8VtqWFmUwghv87dH4ibd5tZffx6PbCnUv0bh98FPmZmvyCU\n0H7fzL5NOrflVeBVd38yfn4fIfjTuC1NwOPuvtfd3wYeAP476dyWpJH6n8pMMLPLgAuBJT4w3r2k\n25LmoH8amGtmDWZ2BOHExUMV7lPezMwIdeAX3f22xEsPAUvj5aXADya6b+Pl7q3ufpK7zyH8PfzE\n3T9NOrdlF/BLM3tf3HQ2sIUUbguwFTjTzKbH/97OJpwLSuO2JI3U/4eAxWY21cwagLnAUxXoX97M\n7DxCyfNj7r4/8VJpt8XdU/sDXEA4U/1vwJcq3Z9x9v33CF85nweejX8uAI4hjCTYBjwCzKh0X8e5\nXQuBf4iXU7ktwHygK/67eRA4OsXbcj1hR7UJ+Dtgapq2Bfgu4fzC24RvW58drf/Al+I82AqcX+n+\n57Et2wm1+FwG/J9ybIuujBURybg0l25ERCQPCnoRkYxT0IuIZJyCXkQk4xT0IiIZp6AXEck4Bb2I\nSMYp6EVEMu7/A095SpyJ3T8qAAAAAElFTkSuQmCC\n",
512 | "text/plain": [
513 | ""
514 | ]
515 | },
516 | "metadata": {},
517 | "output_type": "display_data"
518 | }
519 | ],
520 | "source": [
521 | "#Iterations to do trainning\n",
522 | "for epoch in range(120):\n",
523 | " \n",
524 | " start=0\n",
525 | " end=100\n",
526 | " for i in range(14):\n",
527 | " \n",
528 | " X=X_train[start:end]\n",
529 | " Y=y_train[start:end]\n",
530 | " start=end\n",
531 | " end=start+100\n",
532 | " sess.run(train_step,feed_dict={rnn._inputs:X, y:Y})\n",
533 | " \n",
534 | " Loss=str(sess.run(cross_entropy,feed_dict={rnn._inputs:X, y:Y}))\n",
535 | " Train_accuracy=str(sess.run(accuracy,feed_dict={rnn._inputs:X_train[:500], y:y_train[:500]}))\n",
536 | " Test_accuracy=str(sess.run(accuracy,feed_dict={rnn._inputs:X_test, y:y_test}))\n",
537 | " \n",
538 | "\n",
539 | " pl.plot([epoch],Loss,'b.',)\n",
540 | " pl.plot([epoch],Train_accuracy,'r*',)\n",
541 | " pl.plot([epoch],Test_accuracy,'g+')\n",
542 | " display.clear_output(wait=True)\n",
543 | " display.display(pl.gcf()) \n",
544 | " \n",
545 | " sys.stdout.flush()\n",
546 | " print(\"\\rIteration: %s Loss: %s Train Accuracy: %s Test Accuracy: %s\"%(epoch,Loss,Train_accuracy,Test_accuracy)),\n",
547 | " sys.stdout.flush()\n",
548 | "\n"
549 | ]
550 | },
551 | {
552 | "cell_type": "code",
553 | "execution_count": null,
554 | "metadata": {
555 | "collapsed": true,
556 | "deletable": true,
557 | "editable": true
558 | },
559 | "outputs": [],
560 | "source": []
561 | }
562 | ],
563 | "metadata": {
564 | "kernelspec": {
565 | "display_name": "Python 2",
566 | "language": "python",
567 | "name": "python2"
568 | },
569 | "language_info": {
570 | "codemirror_mode": {
571 | "name": "ipython",
572 | "version": 2
573 | },
574 | "file_extension": ".py",
575 | "mimetype": "text/x-python",
576 | "name": "python",
577 | "nbconvert_exporter": "python",
578 | "pygments_lexer": "ipython2",
579 | "version": "2.7.6"
580 | }
581 | },
582 | "nbformat": 4,
583 | "nbformat_minor": 0
584 | }
585 |
--------------------------------------------------------------------------------