├── 3layer.py ├── Illustration of network.JPG ├── README.md └── Screen shot of code being run.png /3layer.py: -------------------------------------------------------------------------------- 1 | from numpy import exp, array, random, dot 2 | 3 | class NeuralNetwork(): 4 | def __init__(self): 5 | random.seed(1) 6 | 7 | # setting the number of nodes in layer 2 and layer 3 8 | # more nodes --> more confidence in predictions (?) 9 | l2 = 5 10 | l3 = 4 11 | 12 | # assign random weights to matrices in network 13 | # format is (no. of nodes in previous layer) x (no. of nodes in following layer) 14 | self.synaptic_weights1 = 2 * random.random((3, l2)) -1 15 | self.synaptic_weights2 = 2 * random.random((l2, l3)) -1 16 | self.synaptic_weights3 = 2 * random.random((l3, 1)) -1 17 | 18 | def __sigmoid(self, x): 19 | return 1/(1+exp(-x)) 20 | 21 | # derivative of sigmoid function, indicates confidence about existing weight 22 | def __sigmoid_derivative(self, x): 23 | return x*(1-x) 24 | 25 | # train neural network, adusting synaptic weights each time 26 | def train(self, training_set_inputs, training_set_outputs, number_of_training_iterations): 27 | for iteration in xrange(number_of_training_iterations): 28 | 29 | # pass training set through our neural network 30 | # a2 means the activations fed to second layer 31 | a2 = self.__sigmoid(dot(training_set_inputs, self.synaptic_weights1)) 32 | a3 = self.__sigmoid(dot(a2, self.synaptic_weights2)) 33 | output = self.__sigmoid(dot(a3, self.synaptic_weights3)) 34 | 35 | # calculate 'error' 36 | del4 = (training_set_outputs - output)*self.__sigmoid_derivative(output) 37 | 38 | # find 'errors' in each layer 39 | del3 = dot(self.synaptic_weights3, del4.T)*(self.__sigmoid_derivative(a3).T) 40 | del2 = dot(self.synaptic_weights2, del3)*(self.__sigmoid_derivative(a2).T) 41 | 42 | # get adjustments (gradients) for each layer 43 | adjustment3 = dot(a3.T, del4) 44 | adjustment2 = dot(a2.T, del3.T) 45 | adjustment1 = dot(training_set_inputs.T, del2.T) 46 | 47 | # adjust weights accordingly 48 | self.synaptic_weights1 += adjustment1 49 | self.synaptic_weights2 += adjustment2 50 | self.synaptic_weights3 += adjustment3 51 | 52 | def forward_pass(self, inputs): 53 | # pass our inputs through our neural network 54 | a2 = self.__sigmoid(dot(inputs, self.synaptic_weights1)) 55 | a3 = self.__sigmoid(dot(a2, self.synaptic_weights2)) 56 | output = self.__sigmoid(dot(a3, self.synaptic_weights3)) 57 | return output 58 | 59 | if __name__ == "__main__": 60 | # initialise single neuron neural network 61 | neural_network = NeuralNetwork() 62 | 63 | print "Random starting synaptic weights (layer 1): " 64 | print neural_network.synaptic_weights1 65 | print "\nRandom starting synaptic weights (layer 2): " 66 | print neural_network.synaptic_weights2 67 | print "\nRandom starting synaptic weights (layer 3): " 68 | print neural_network.synaptic_weights3 69 | 70 | # the training set. 71 | training_set_inputs = array([[0,0,1],[1,1,1],[1,0,1],[0,1,1]]) 72 | training_set_outputs = array([[0,1,1,0]]).T 73 | 74 | neural_network.train(training_set_inputs, training_set_outputs, 10000) 75 | 76 | print "\nNew synaptic weights (layer 1) after training: " 77 | print neural_network.synaptic_weights1 78 | print "\nNew synaptic weights (layer 2) after training: " 79 | print neural_network.synaptic_weights2 80 | print "\nNew synaptic weights (layer 3) after training: " 81 | print neural_network.synaptic_weights3 82 | 83 | # test with new input 84 | print "\nConsidering new situation [1,0,0] -> ?" 85 | print neural_network.forward_pass(array([1,0,0])) 86 | -------------------------------------------------------------------------------- /Illustration of network.JPG: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiexunsee/Neural-Network-with-Python/0a619018ab59f54e430d96e194a336e879086cb4/Illustration of network.JPG -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## 3 Layer Neural Network 2 | In response to Siraj Raval's "How to Make a Neural Network - Intro to Deep Learning #2". 3 | 4 | This is a neural network with 3 layers (2 hidden), made using just numpy. It's an adapted version of Siraj's code which had just one layer. The activation function used in this network is the sigmoid function. 5 | 6 | Here is a pictorial illustration: 7 | 8 | ![Illustration of neural network](https://github.com/jiexunsee/Neural-Network-with-Python/blob/master/Illustration%20of%20network.JPG?raw=true "Illustration") 9 | 10 | A screenshot of the code where the weights are updated after running the backpropagation adjustments. 11 | 12 | ![screenshot](https://github.com/jiexunsee/Neural-Network-with-Python/blob/master/Screen%20shot%20of%20code%20being%20run.png?raw=true) 13 | -------------------------------------------------------------------------------- /Screen shot of code being run.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jiexunsee/Neural-Network-with-Python/0a619018ab59f54e430d96e194a336e879086cb4/Screen shot of code being run.png --------------------------------------------------------------------------------