├── Deep-Learning-for-Beginners-master ├── Python │ ├── README.md │ ├── ch.2 │ │ ├── DeltaBatch.py │ │ ├── DeltaSGD.py │ │ ├── DeltaXOR.py │ │ ├── SGDvsBatch.py │ │ ├── Sigmoid.py │ │ ├── TestDeltaBatch.py │ │ ├── TestDeltaSGD.py │ │ └── TestDeltaXOR.py │ ├── ch.3 │ │ ├── BackpropCE.py │ │ ├── BackpropMnt.py │ │ ├── BackpropXOR.py │ │ ├── CEvsSSE.py │ │ ├── Sigmoid.py │ │ ├── TestBackpropCE.py │ │ ├── TestBackpropMmt.py │ │ └── TestBackpropXOR.py │ ├── ch.4 │ │ ├── MultiClass.py │ │ ├── RealMultiClass.py │ │ ├── Sigmoid.py │ │ ├── Softmax.py │ │ └── TestMultiClass.py │ ├── ch.5 │ │ ├── DeepDropout.py │ │ ├── DeepReLU.py │ │ ├── Dropout.py │ │ ├── ReLU.py │ │ ├── Sigmoid.py │ │ ├── Softmax.py │ │ ├── TestDeepDropout.py │ │ └── TestDeepReLU.py │ └── ch.6 │ │ ├── Conv.py │ │ ├── LoadMnistData.py │ │ ├── MNIST │ │ ├── t10k-images-idx3-ubyte.gz │ │ ├── t10k-images.idx3-ubyte │ │ ├── t10k-labels-idx1-ubyte.gz │ │ ├── t10k-labels.idx1-ubyte │ │ └── train-labels.idx1-ubyte │ │ ├── MnistConv.py │ │ ├── Pool.py │ │ ├── ReLU.py │ │ ├── Sigmoid.py │ │ ├── Softmax.py │ │ └── TestMnistConv.py ├── README.md ├── ch.2 │ ├── DeltaBatch.m │ ├── DeltaSGD.m │ ├── DeltaXOR.m │ ├── SGDvsBatch.m │ ├── Sigmoid.m │ ├── TestDeltaBatch.m │ ├── TestDeltaSGD.m │ └── TestDeltaXOR.m ├── ch.3 │ ├── BackpropCE.m │ ├── BackpropMmt.m │ ├── BackpropXOR.m │ ├── CEvsSSE.m │ ├── Sigmoid.m │ ├── TestBackpropCE.m │ ├── TestBackpropMmt.m │ └── TestBackpropXOR.m ├── ch.4 │ ├── MultiClass.m │ ├── RealMultiClass.m │ ├── Sigmoid.m │ ├── Softmax.m │ ├── TestMultiClass.m │ └── rng.m ├── ch.5 │ ├── DeepDropout.m │ ├── DeepReLU.m │ ├── Dropout.m │ ├── ReLU.m │ ├── Sigmoid.m │ ├── Softmax.m │ ├── TestDeepDropout.m │ └── TestDeepReLU.m └── ch.6 │ ├── Conv.m │ ├── MNIST │ ├── t10k-images.idx3-ubyte │ ├── t10k-labels.idx1-ubyte │ └── train-labels.idx1-ubyte │ ├── MnistConv.m │ ├── MnistConv.mat │ ├── PlotFeatures.m │ ├── Pool.m │ ├── ReLU.m │ ├── Softmax.m │ ├── TestMnistConv.m │ ├── display_network.m │ ├── loadMNISTImages.m │ ├── loadMNISTLabels.m │ └── rng.m ├── LICENSE.txt ├── README.md └── contributing.md /Deep-Learning-for-Beginners-master/Python/README.md: -------------------------------------------------------------------------------- 1 | Numpy, Scipy, and Matplotlib are required. 2 | -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.2/DeltaBatch.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from Sigmoid import * 3 | 4 | def DeltaBatch(W, X, D): 5 | alpha = 0.9 6 | dWsum = np.zeros(3) 7 | 8 | N = 4 9 | for k in range(N): 10 | x = X[k, :].T 11 | d = D[k] 12 | 13 | v = np.matmul(W, x) 14 | y = Sigmoid(v) 15 | 16 | e = d - y 17 | delta = y*(1-y) * e 18 | dW = alpha * delta * x 19 | 20 | dWsum = dWsum + dW 21 | 22 | dWavg = dWsum / N 23 | 24 | W[0][0] = W[0][0] + dWavg[0] 25 | W[0][1] = W[0][1] + dWavg[1] 26 | W[0][2] = W[0][2] + dWavg[2] 27 | 28 | return W -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.2/DeltaSGD.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from Sigmoid import * 3 | 4 | 5 | def DeltaSGD(W, X, D): 6 | alpha = 0.9 7 | 8 | N = 4 9 | for k in range(N): 10 | x = X[k, :].T 11 | d = D[k] 12 | 13 | v = np.matmul(W, x) 14 | y = Sigmoid(v) 15 | 16 | e = d - y 17 | delta = y*(1-y) * e 18 | 19 | dW = alpha*delta*x 20 | 21 | W[0][0] = W[0][0] + dW[0] 22 | W[0][1] = W[0][1] + dW[1] 23 | W[0][2] = W[0][2] + dW[2] 24 | 25 | return W 26 | -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.2/DeltaXOR.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from Sigmoid import * 3 | 4 | 5 | def DeltaXOR(W, X, D): 6 | alpha = 0.9 7 | 8 | N = 4 9 | for k in range(N): 10 | x = X[k,:].T 11 | d = D[k] 12 | 13 | v = np.matmul(W, x) 14 | y = Sigmoid(v) 15 | 16 | e = d - y 17 | delta = y*(1-y) * e 18 | 19 | dW = alpha*delta*x 20 | 21 | W[0][0] = W[0][0] + dW[0] 22 | W[0][1] = W[0][1] + dW[1] 23 | W[0][2] = W[0][2] + dW[2] 24 | 25 | return W 26 | -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.2/SGDvsBatch.py: -------------------------------------------------------------------------------- 1 | from DeltaSGD import * 2 | from DeltaBatch import * 3 | from Sigmoid import * 4 | import matplotlib.pyplot as plt 5 | 6 | 7 | X = np.array([[0, 0, 1], 8 | [0, 1, 1], 9 | [1, 0, 1], 10 | [1, 1, 1]]) 11 | 12 | D = np.array([[0], 13 | [0], 14 | [1], 15 | [1]]) 16 | 17 | E1 = np.zeros(1000) 18 | E2 = np.zeros(1000) 19 | 20 | W1 = 2*np.random.random((1, 3)) - 1 21 | W2 = np.array(W1) 22 | 23 | for epoch in range(1000): 24 | W1 = DeltaSGD(W1, X, D) 25 | W2 = DeltaBatch(W2, X, D) 26 | 27 | es1 = 0 28 | es2 = 0 29 | N = 4 30 | for k in range(N): 31 | x = X[k, :].T 32 | d = D[k] 33 | 34 | v1 = np.matmul(W1, x) 35 | y1 = Sigmoid(v1) 36 | es1 = es1 + (d - y1)**2 37 | 38 | v2 = np.matmul(W2, x) 39 | y2 = Sigmoid(v2) 40 | es2 = es2 + (d - y2)**2 41 | 42 | E1[epoch] = es1/N 43 | E2[epoch] = es2/N 44 | 45 | 46 | SGD, = plt.plot(E1, 'r') 47 | Batch, = plt.plot(E2, 'b:') 48 | plt.xlabel("Epoch") 49 | plt.ylabel("Average of Training Error") 50 | plt.legend([SGD, Batch], ['SGD', 'Batch']) 51 | plt.show() -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.2/Sigmoid.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def Sigmoid(x): 5 | return 1.0 / (1.0 + np.exp(-x)) -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.2/TestDeltaBatch.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from Sigmoid import * 3 | from DeltaBatch import * 4 | 5 | 6 | def TestDeltaBatch(): 7 | X = np.array([[0, 0, 1], 8 | [0, 1, 1], 9 | [1, 0, 1], 10 | [1, 1, 1]]) 11 | 12 | D = np.array([[0], 13 | [0], 14 | [1], 15 | [1]]) 16 | 17 | W = 2*np.random.random((1, 3)) - 1 18 | 19 | for _epoch in range(40000): 20 | W = DeltaBatch(W, X, D) 21 | 22 | N = 4 23 | for k in range(N): 24 | x = X[k,:].T 25 | v = np.matmul(W, x) 26 | y = Sigmoid(v) 27 | print(y) 28 | 29 | if __name__ == '__main__': 30 | TestDeltaBatch() -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.2/TestDeltaSGD.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from Sigmoid import * 3 | from DeltaSGD import * 4 | 5 | 6 | def TestDeltaSGD(): 7 | X = np.array([[0, 0, 1], 8 | [0, 1, 1], 9 | [1, 0, 1], 10 | [1, 1, 1]]) 11 | 12 | D = np.array([[0], 13 | [0], 14 | [1], 15 | [1]]) 16 | 17 | W = 2*np.random.random((1, 3)) - 1 18 | 19 | for _epoch in range(10000): 20 | W = DeltaSGD(W, X, D) 21 | 22 | N = 4 23 | for k in range(N): 24 | x = X[k,:].T 25 | v = np.matmul(W, x) 26 | y = Sigmoid(v) 27 | print(y) 28 | 29 | if __name__ == '__main__': 30 | TestDeltaSGD() -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.2/TestDeltaXOR.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from Sigmoid import * 3 | from DeltaXOR import * 4 | 5 | 6 | def TestDeltaXOR(): 7 | X = np.array([[0, 0, 1], 8 | [0, 1, 1], 9 | [1, 0, 1], 10 | [1, 1, 1]]) 11 | 12 | D = np.array([[0], 13 | [1], 14 | [1], 15 | [0]]) 16 | 17 | W = 2*np.random.random((1, 3)) - 1 18 | 19 | for _epoch in range(40000): #train 20 | W = DeltaXOR(W, X, D) 21 | 22 | N = 4 #inference 23 | for k in range(N): 24 | x = X[k,:].T 25 | v = np.matmul(W, x) 26 | y = Sigmoid(v) 27 | print(y) 28 | 29 | if __name__ == '__main__': 30 | TestDeltaXOR() -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.3/BackpropCE.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from Sigmoid import * 3 | 4 | 5 | def BackpropCE(W1, W2, X, D): 6 | alpha = 0.9 7 | 8 | N = 4 9 | for k in range(N): 10 | x = X[k, :].T 11 | d = D[k] 12 | 13 | v1 = np.matmul(W1, x) 14 | y1 = Sigmoid(v1) 15 | v = np.matmul(W2, y1) 16 | y = Sigmoid(v) 17 | 18 | e = d - y 19 | delta = e 20 | 21 | e1 = np.matmul(W2.T, delta) 22 | delta1 = y1*(1-y1) * e1 23 | 24 | dW1 = (alpha*delta1).reshape(4, 1) * x.reshape(1, 3) 25 | W1 = W1 + dW1 26 | 27 | dW2 = alpha * delta * y1 28 | W2 = W2 + dW2 29 | 30 | return W1, W2 31 | -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.3/BackpropMnt.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from Sigmoid import * 3 | 4 | 5 | def BackPropMmt(W1, W2, X, D): 6 | alpha = 0.9 7 | beta = 0.9 8 | 9 | mmt1 = np.zeros_like(W1) 10 | mmt2 = np.zeros_like(W2) 11 | 12 | N = 4 13 | for k in range(N): 14 | x = X[k, :].T 15 | d = D[k] 16 | 17 | v1 = np.matmul(W1, x) 18 | y1 = Sigmoid(v1) 19 | v = np.matmul(W2, y1) 20 | y = Sigmoid(v) 21 | 22 | e = d - y 23 | delta = y*(1-y) * e 24 | 25 | e1 = np.matmul(W2.T, delta) 26 | delta1 = y1*(1-y1) * e1 27 | 28 | dW1 = (alpha*delta1).reshape(4, 1) * x.reshape(1, 3) 29 | mmt1 = dW1 + beta*mmt1 30 | W1 = W1 + mmt1 31 | 32 | dW2 = alpha * delta * y1 33 | mmt2 = dW2 + beta*mmt2 34 | W2 = W2 + mmt2 35 | 36 | return W1, W2 37 | -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.3/BackpropXOR.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from Sigmoid import * 3 | 4 | 5 | def BackpropXOR(W1, W2, X, D): 6 | alpha = 0.9 7 | 8 | N = 4 9 | for k in range(N): 10 | x = X[k, :].T 11 | d = D[k] 12 | 13 | v1 = np.matmul(W1, x) 14 | y1 = Sigmoid(v1) 15 | v = np.matmul(W2, y1) 16 | y = Sigmoid(v) 17 | 18 | e = d - y 19 | delta = y*(1-y) * e 20 | 21 | e1 = np.matmul(W2.T, delta) 22 | delta1 = y1*(1-y1) * e1 23 | 24 | dW1 = (alpha*delta1).reshape(4, 1) * x.reshape(1, 3) 25 | W1 = W1 + dW1 26 | 27 | dW2 = alpha * delta * y1 28 | W2 = W2 + dW2 29 | 30 | return W1, W2 31 | -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.3/CEvsSSE.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | from Sigmoid import * 4 | from BackpropCE import * 5 | from BackpropXOR import * 6 | 7 | 8 | X = np.array([[0, 0, 1], 9 | [0, 1, 1], 10 | [1, 0, 1], 11 | [1, 1, 1]]) 12 | 13 | D = np.array([[0], 14 | [0], 15 | [1], 16 | [1]]) 17 | 18 | E1 = np.zeros(1000) 19 | E2 = np.zeros(1000) 20 | 21 | W11 = 2*np.random.random((4, 3)) - 1 22 | W12 = 2*np.random.random((1, 4)) - 1 23 | W21 = np.array(W11) 24 | W22 = np.array(W12) 25 | 26 | for _epoch in range(1000): 27 | W11, W12 = BackpropCE(W11, W12, X, D) 28 | W21, W22 = BackpropXOR(W21, W22, X, D) 29 | 30 | es1 = 0 31 | es2 = 0 32 | N = 4 33 | for k in range(N): 34 | x = X[k, :].T 35 | d = D[k] 36 | 37 | v1 = np.matmul(W11, x) 38 | y1 = Sigmoid(v1) 39 | v = np.matmul(W12, y1) 40 | y = Sigmoid(v) 41 | es1 = es1 + (d - y)**2 42 | 43 | v1 = np.matmul(W21, x) 44 | y1 = Sigmoid(v1) 45 | v = np.matmul(W22, y1) 46 | y = Sigmoid(v) 47 | es2 = es2 + (d - y)**2 48 | 49 | E1[_epoch] = es1 / N 50 | E2[_epoch] = es2 / N 51 | 52 | 53 | CE, = plt.plot(E1, 'r') 54 | SSE, = plt.plot(E2, 'b:') 55 | plt.xlabel('Epoch') 56 | plt.ylabel('Average of Training error') 57 | plt.legend([CE, SSE], ["Cross Entropy", "Sum of Squared Error"]) 58 | plt.show() -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.3/Sigmoid.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def Sigmoid(x): 5 | return 1.0 / (1.0 + np.exp(-x)) -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.3/TestBackpropCE.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from Sigmoid import * 3 | from BackpropCE import * 4 | 5 | 6 | def TestBackpropCE(): 7 | X = np.array([[0, 0, 1], 8 | [0, 1, 1], 9 | [1, 0, 1], 10 | [1, 1, 1]]) 11 | 12 | D = np.array([[0], 13 | [0], 14 | [1], 15 | [1]]) 16 | 17 | W1 = 2*np.random.random((4, 3)) - 1 18 | W2 = 2*np.random.random((1, 4)) - 1 19 | 20 | for _epoch in range(10000): 21 | W1, W2 = BackpropCE(W1, W2, X, D) 22 | 23 | N = 4 24 | for k in range(N): 25 | x = X[k, :].T 26 | v1 = np.matmul(W1, x) 27 | y1 = Sigmoid(v1) 28 | v = np.matmul(W2, y1) 29 | y = Sigmoid(v) 30 | print(y) 31 | 32 | if __name__ == '__main__': 33 | TestBackpropCE() 34 | -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.3/TestBackpropMmt.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from BackpropMnt import * 3 | from Sigmoid import * 4 | 5 | 6 | def TestBackpropMmt(): 7 | X = np.array([[0, 0, 1], 8 | [0, 1, 1], 9 | [1, 0, 1], 10 | [1, 1, 1]]) 11 | 12 | D = np.array([[0], 13 | [1], 14 | [1], 15 | [0]]) 16 | 17 | W1 = 2*np.random.random((4, 3)) - 1 18 | W2 = 2*np.random.random((1, 4)) - 1 19 | 20 | for _epoch in range(10000): 21 | W1, W2 = BackPropMmt(W1, W2, X, D) 22 | 23 | N = 4 24 | for k in range(N): 25 | x = X[k, :].T 26 | v1 = np.matmul(W1, x) 27 | y1 = Sigmoid(v1) 28 | v = np.matmul(W2, y1) 29 | y = Sigmoid(v) 30 | print(y) 31 | 32 | if __name__ == '__main__': 33 | TestBackpropMmt() -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.3/TestBackpropXOR.py: -------------------------------------------------------------------------------- 1 | from Sigmoid import * 2 | from BackpropXOR import * 3 | import numpy as np 4 | 5 | 6 | def TestBackpropXOR(): 7 | X = np.array([[0, 0, 1], 8 | [0, 1, 1], 9 | [1, 0, 1], 10 | [1, 1, 1]]) 11 | 12 | D = np.array([[0], 13 | [1], 14 | [1], 15 | [0]]) 16 | 17 | W1 = 2*np.random.random((4, 3)) - 1 18 | W2 = 2*np.random.random((1, 4)) - 1 19 | 20 | for _epoch in range(10000): 21 | W1, W2 = BackpropXOR(W1, W2, X, D) 22 | 23 | N = 4 24 | for k in range(4): 25 | x = X[k ,:].T 26 | v1 = np.matmul(W1, x) 27 | y1 = Sigmoid(v1) 28 | v = np.matmul(W2, y1) 29 | y = Sigmoid(v) 30 | print(y) 31 | 32 | if __name__ == '__main__': 33 | TestBackpropXOR() 34 | -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.4/MultiClass.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from Sigmoid import * 3 | from Softmax import * 4 | 5 | 6 | def MultiClass(W1, W2, X, D): 7 | alpha = 0.9 8 | 9 | N = 5 10 | for k in range(N): 11 | x = np.reshape(X[:,:,k], (25, 1)) 12 | d = D[k, :].T 13 | 14 | v1 = np.matmul(W1, x) 15 | y1 = Sigmoid(v1) 16 | v = np.matmul(W2, y1) 17 | y = Softmax(v) 18 | 19 | e = d - y 20 | delta = e 21 | 22 | e1 = np.matmul(W2.T, delta) 23 | delta1 = y1*(1-y1) * e1 24 | 25 | dW1 = alpha * delta1 * x.T 26 | W1 = W1 + dW1 27 | 28 | dW2 = alpha * delta * y1.T 29 | W2 = W2 + dW2 30 | 31 | return W1, W2 -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.4/RealMultiClass.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from Sigmoid import * 3 | from Softmax import * 4 | from TestMultiClass import * 5 | 6 | 7 | def RealMultiClass(): 8 | W1, W2 = TestMultiClass() 9 | 10 | X = np.zeros((5, 5, 5)) 11 | 12 | X[:, :, 0] = [[0,0,1,1,0], 13 | [0,0,1,1,0], 14 | [0,1,0,1,0], 15 | [0,0,0,1,0], 16 | [0,1,1,1,0]] 17 | 18 | X[:, :, 1] = [[1,1,1,1,0], 19 | [0,0,0,0,1], 20 | [0,1,1,1,0], 21 | [1,0,0,0,1], 22 | [1,1,1,1,1]] 23 | 24 | X[:, :, 2] = [[1,1,1,1,0], 25 | [0,0,0,0,1], 26 | [0,1,1,1,0], 27 | [1,0,0,0,1], 28 | [1,1,1,1,0]] 29 | 30 | X[:, :, 3] = [[0,1,1,1,0], 31 | [0,1,0,0,0], 32 | [0,1,1,1,0], 33 | [0,0,0,1,0], 34 | [0,1,1,1,0]] 35 | 36 | X[:, :, 4] = [[0,1,1,1,1], 37 | [0,1,0,0,0], 38 | [0,1,1,1,0], 39 | [0,0,0,1,0], 40 | [1,1,1,1,0]] 41 | 42 | 43 | N = 5 44 | for k in range(N): 45 | x = np.reshape(X[:, :, k], (25, 1)) 46 | v1 = np.matmul(W1, x) 47 | y1 = Sigmoid(v1) 48 | v = np.matmul(W2, y1) 49 | y = Softmax(v) 50 | 51 | print("N = {}: ".format(k+1)) 52 | print(y) 53 | 54 | if __name__ == '__main__': 55 | RealMultiClass() -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.4/Sigmoid.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def Sigmoid(x): 5 | return 1.0 / (1.0 + np.exp(-x)) -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.4/Softmax.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def Softmax(x): 5 | x = np.subtract(x, np.max(x)) # prevent overflow 6 | ex = np.exp(x) 7 | 8 | return ex / np.sum(ex) -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.4/TestMultiClass.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from MultiClass import * 3 | from Sigmoid import * 4 | from Softmax import * 5 | 6 | 7 | def TestMultiClass(): 8 | X = np.zeros((5, 5, 5)) 9 | 10 | X[:, :, 0] = [[0,1,1,0,0], 11 | [0,0,1,0,0], 12 | [0,0,1,0,0], 13 | [0,0,1,0,0], 14 | [0,1,1,1,0]] 15 | 16 | X[:, :, 1] = [[1,1,1,1,0], 17 | [0,0,0,0,1], 18 | [0,1,1,1,0], 19 | [1,0,0,0,0], 20 | [1,1,1,1,1]] 21 | 22 | X[:, :, 2] = [[1,1,1,1,0], 23 | [0,0,0,0,1], 24 | [0,1,1,1,0], 25 | [0,0,0,0,1], 26 | [1,1,1,1,0]] 27 | 28 | X[:, :, 3] = [[0,0,0,1,0], 29 | [0,0,1,1,0], 30 | [0,1,0,1,0], 31 | [1,1,1,1,1], 32 | [0,0,0,1,0]] 33 | 34 | X[:, :, 4] = [[1,1,1,1,1], 35 | [1,0,0,0,0], 36 | [1,1,1,1,0], 37 | [0,0,0,0,1], 38 | [1,1,1,1,0]] 39 | 40 | D = np.array([[[1,0,0,0,0]], 41 | [[0,1,0,0,0]], 42 | [[0,0,1,0,0]], 43 | [[0,0,0,1,0]], 44 | [[0,0,0,0,1]]]) 45 | 46 | 47 | W1 = 2*np.random.random((50, 25)) - 1 48 | W2 = 2*np.random.random(( 5, 50)) - 1 49 | 50 | for _epoch in range(10000): 51 | W1, W2 = MultiClass(W1, W2, X, D) 52 | 53 | 54 | N = 5 55 | for k in range(N): 56 | x = np.reshape(X[:, :, k], (25, 1)) 57 | v1 = np.matmul(W1, x) 58 | y1 = Sigmoid(v1) 59 | v = np.matmul(W2, y1) 60 | y = Softmax(v) 61 | 62 | print("Y = {}: ".format(k+1)) 63 | print(y) 64 | 65 | return W1, W2 66 | 67 | if __name__ == '__main__': 68 | TestMultiClass() -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.5/DeepDropout.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from Sigmoid import * 3 | from Dropout import * 4 | from Softmax import * 5 | 6 | 7 | def DeepDropout(W1, W2, W3, W4, X, D): 8 | alpha = 0.01 9 | 10 | N = 5 11 | for k in range(N): 12 | x = np.reshape(X[:, :, k], (25, 1)) 13 | 14 | v1 = np.matmul(W1, x) 15 | y1 = Sigmoid(v1) 16 | y1 = y1 * Dropout(y1, 0.2) 17 | 18 | v2 = np.matmul(W2, y1) 19 | y2 = Sigmoid(v2) 20 | y2 = y2 * Dropout(y2, 0.2) 21 | 22 | v3 = np.matmul(W3, y2) 23 | y3 = Sigmoid(v3) 24 | y3 = y3 * Dropout(y3, 0.2) 25 | 26 | v = np.matmul(W4, y3) 27 | y = Softmax(v) 28 | 29 | d = D[k,:].T 30 | e = d-y 31 | delta = e 32 | 33 | e3 = np.matmul(W4.T, delta) 34 | delta3 = y3*(1-y3) * e3 35 | 36 | e2 = np.matmul(W3.T, delta3) 37 | delta2 = y2*(1-y2) * e2 38 | 39 | e1 = np.matmul(W2.T, delta2) 40 | delta1 = y1*(1-y1) * e1 41 | 42 | dW4 = alpha * delta * y3.T 43 | W4 = W4 + dW4 44 | 45 | dW3 = alpha * delta3 * y2.T 46 | W3 = W3 + dW3 47 | 48 | dW2 = alpha * delta2 * y1.T 49 | W2 = W2 + dW2 50 | 51 | dW1 = alpha * delta1 * x.T 52 | W1 = W1 + dW1 53 | 54 | return W1, W2, W3, W4 55 | -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.5/DeepReLU.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from Softmax import * 3 | from ReLU import * 4 | 5 | 6 | def DeepReLU(W1, W2, W3, W4, X, D): 7 | alpha = 0.01 8 | 9 | N = 5 10 | for k in range(N): 11 | x = np.reshape(X[:, :, k], (25, 1)) 12 | 13 | v1 = np.matmul(W1, x) 14 | y1 = ReLU(v1) 15 | 16 | v2 = np.matmul(W2, y1) 17 | y2 = ReLU(v2) 18 | 19 | v3 = np.matmul(W3, y2) 20 | y3 = ReLU(v3) 21 | 22 | v = np.matmul(W4, y3) 23 | y = Softmax(v) 24 | 25 | d = D[k, :].T 26 | e = d - y 27 | delta = e 28 | 29 | e3 = np.matmul(W4.T, delta) 30 | delta3 = (v3 > 0) * e3 31 | 32 | e2 = np.matmul(W3.T, delta3) 33 | delta2 = (v2 > 0) * e2 34 | 35 | e1 = np.matmul(W2.T, delta2) 36 | delta1 = (v1 > 0) * e1 37 | 38 | dW4 = alpha * delta * y3.T 39 | W4 = W4 + dW4 40 | 41 | dW3 = alpha * delta3 * y2.T 42 | W3 = W3 + dW3 43 | 44 | dW2 = alpha * delta2 * y1.T 45 | W2 = W2 + dW2 46 | 47 | dW1 = alpha * delta1 * x.T 48 | W1 = W1 + dW1 49 | 50 | return W1, W2, W3, W4 51 | -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.5/Dropout.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def Dropout(y, ratio): 5 | ym = np.zeros_like(y) 6 | 7 | num = round(y.size*(1-ratio)) 8 | idx = np.random.choice(y.size, num, replace=False) 9 | ym[idx] = 1.0 / (1.0 - ratio) 10 | 11 | return ym -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.5/ReLU.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def ReLU(x): 5 | return np.maximum(0, x) -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.5/Sigmoid.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def Sigmoid(x): 5 | return 1.0 / (1.0 + np.exp(-x)) -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.5/Softmax.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def Softmax(x): 5 | x = np.subtract(x, np.max(x)) # prevent overflow 6 | ex = np.exp(x) 7 | 8 | return ex / np.sum(ex) -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.5/TestDeepDropout.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from DeepDropout import * 3 | from Sigmoid import * 4 | from Softmax import * 5 | 6 | 7 | def TestDeepDropout(): 8 | X = np.zeros((5, 5, 5)) 9 | 10 | X[:, :, 0] = [[0,1,1,0,0], 11 | [0,0,1,0,0], 12 | [0,0,1,0,0], 13 | [0,0,1,0,0], 14 | [0,1,1,1,0]] 15 | 16 | X[:, :, 1] = [[1,1,1,1,0], 17 | [0,0,0,0,1], 18 | [0,1,1,1,0], 19 | [1,0,0,0,0], 20 | [1,1,1,1,1]] 21 | 22 | X[:, :, 2] = [[1,1,1,1,0], 23 | [0,0,0,0,1], 24 | [0,1,1,1,0], 25 | [0,0,0,0,1], 26 | [1,1,1,1,0]] 27 | 28 | X[:, :, 3] = [[0,0,0,1,0], 29 | [0,0,1,1,0], 30 | [0,1,0,1,0], 31 | [1,1,1,1,1], 32 | [0,0,0,1,0]] 33 | 34 | X[:, :, 4] = [[1,1,1,1,1], 35 | [1,0,0,0,0], 36 | [1,1,1,1,0], 37 | [0,0,0,0,1], 38 | [1,1,1,1,0]] 39 | 40 | D = np.array([[[1,0,0,0,0]], 41 | [[0,1,0,0,0]], 42 | [[0,0,1,0,0]], 43 | [[0,0,0,1,0]], 44 | [[0,0,0,0,1]]]) 45 | 46 | W1 = 2*np.random.random((20, 25)) - 1 47 | W2 = 2*np.random.random((20, 20)) - 1 48 | W3 = 2*np.random.random((20, 20)) - 1 49 | W4 = 2*np.random.random(( 5, 20)) - 1 50 | 51 | 52 | for _epoch in range(20000): 53 | W1, W2, W3, W4 = DeepDropout(W1, W2, W3, W4, X, D) 54 | 55 | N = 5 56 | for k in range(N): 57 | x = np.reshape(X[:, :, k], (25, 1)) 58 | 59 | v1 = np.matmul(W1, x) 60 | y1 = Sigmoid(v1) 61 | 62 | v2 = np.matmul(W2, y1) 63 | y2 = Sigmoid(v2) 64 | 65 | v3 = np.matmul(W3, y2) 66 | y3 = Sigmoid(v3) 67 | 68 | v = np.matmul(W4, y3) 69 | y = Softmax(v) 70 | 71 | print("Y = ", k+1, ": ") 72 | print(y) 73 | 74 | if __name__ == '__main__': 75 | TestDeepDropout() -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.5/TestDeepReLU.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from DeepReLU import * 3 | from Softmax import * 4 | from ReLU import * 5 | 6 | 7 | def TestDeepReLU(): 8 | X = np.zeros((5, 5, 5)) 9 | 10 | X[:, :, 0] = [[0,1,1,0,0], 11 | [0,0,1,0,0], 12 | [0,0,1,0,0], 13 | [0,0,1,0,0], 14 | [0,1,1,1,0]] 15 | 16 | X[:, :, 1] = [[1,1,1,1,0], 17 | [0,0,0,0,1], 18 | [0,1,1,1,0], 19 | [1,0,0,0,0], 20 | [1,1,1,1,1]] 21 | 22 | X[:, :, 2] = [[1,1,1,1,0], 23 | [0,0,0,0,1], 24 | [0,1,1,1,0], 25 | [0,0,0,0,1], 26 | [1,1,1,1,0]] 27 | 28 | X[:, :, 3] = [[0,0,0,1,0], 29 | [0,0,1,1,0], 30 | [0,1,0,1,0], 31 | [1,1,1,1,1], 32 | [0,0,0,1,0]] 33 | 34 | X[:, :, 4] = [[1,1,1,1,1], 35 | [1,0,0,0,0], 36 | [1,1,1,1,0], 37 | [0,0,0,0,1], 38 | [1,1,1,1,0]] 39 | 40 | D = np.array([[[1,0,0,0,0]], 41 | [[0,1,0,0,0]], 42 | [[0,0,1,0,0]], 43 | [[0,0,0,1,0]], 44 | [[0,0,0,0,1]]]) 45 | 46 | W1 = 2*np.random.random((20, 25)) - 1 47 | W2 = 2*np.random.random((20, 20)) - 1 48 | W3 = 2*np.random.random((20, 20)) - 1 49 | W4 = 2*np.random.random(( 5, 20)) - 1 50 | 51 | 52 | for _epoch in range(10000): 53 | W1, W2, W3, W4 = DeepReLU(W1, W2, W3, W4, X, D) 54 | 55 | N = 5 56 | for k in range(N): 57 | x = np.reshape(X[:, :, k], (25, 1)) 58 | 59 | v1 = np.matmul(W1, x) 60 | y1 = ReLU(v1) 61 | 62 | v2 = np.matmul(W2, y1) 63 | y2 = ReLU(v2) 64 | 65 | v3 = np.matmul(W3, y2) 66 | y3 = ReLU(v3) 67 | 68 | v = np.matmul(W4, y3) 69 | y = Softmax(v) 70 | 71 | print("Y = ", k+1 , ": ") 72 | print(y) 73 | 74 | if __name__ == '__main__': 75 | TestDeepReLU() -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.6/Conv.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from scipy import signal 3 | 4 | 5 | def Conv(x, W): 6 | (wrow, wcol, numFilters) = W.shape 7 | (xrow, xcol) = x.shape 8 | 9 | yrow = xrow - wrow + 1 10 | ycol = xcol - wcol + 1 11 | 12 | y = np.zeros((yrow, ycol, numFilters)) 13 | 14 | for k in range(numFilters): 15 | filter = W[:, :, k] 16 | filter = np.rot90(np.squeeze(filter), 2) 17 | y[:, :, k] = signal.convolve2d(x, filter, 'valid') 18 | 19 | return y -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.6/LoadMnistData.py: -------------------------------------------------------------------------------- 1 | """ 2 | Originally written by Martin Thoma 3 | https://martin-thoma.com/classify-mnist-with-pybrain/ 4 | """ 5 | 6 | from struct import unpack 7 | import gzip 8 | from numpy import uint8, zeros, float32 9 | 10 | 11 | # Read input images and labels(0-9). 12 | # Return it as list of tuples. 13 | # 14 | def LoadMnistData(imagefile, labelfile): 15 | # Open the images with gzip in read binary mode 16 | images = gzip.open(imagefile, 'rb') 17 | labels = gzip.open(labelfile, 'rb') 18 | 19 | # Read the binary data 20 | # We have to get big endian unsigned int. So we need '>I' 21 | 22 | # Get metadata for images 23 | images.read(4) # skip the magic_number 24 | number_of_images = images.read(4) 25 | number_of_images = unpack('>I', number_of_images)[0] 26 | rows = images.read(4) 27 | rows = unpack('>I', rows)[0] 28 | cols = images.read(4) 29 | cols = unpack('>I', cols)[0] 30 | 31 | # Get metadata for labels 32 | labels.read(4) # skip the magic_number 33 | N = labels.read(4) 34 | N = unpack('>I', N)[0] 35 | 36 | if number_of_images != N: 37 | raise Exception('number of labels did not match the number of images') 38 | 39 | # Get the data 40 | x = zeros((N, rows, cols), dtype=float32) # Initialize numpy array 41 | y = zeros((N, 1), dtype=uint8) # Initialize numpy array 42 | for i in range(N): 43 | if i % 1000 == 0: 44 | print("i: %i" % i) 45 | 46 | for row in range(rows): 47 | for col in range(cols): 48 | tmp_pixel = images.read(1) # Just a single byte 49 | tmp_pixel = unpack('>B', tmp_pixel)[0] 50 | x[i][row][col] = tmp_pixel 51 | 52 | tmp_label = labels.read(1) 53 | y[i] = unpack('>B', tmp_label)[0] 54 | 55 | return (x, y) -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.6/MNIST/t10k-images-idx3-ubyte.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Apress/matlab-deep-learning/00e2dfb1684b83ef16442b2518d74ed6b294353f/Deep-Learning-for-Beginners-master/Python/ch.6/MNIST/t10k-images-idx3-ubyte.gz -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.6/MNIST/t10k-images.idx3-ubyte: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Apress/matlab-deep-learning/00e2dfb1684b83ef16442b2518d74ed6b294353f/Deep-Learning-for-Beginners-master/Python/ch.6/MNIST/t10k-images.idx3-ubyte -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.6/MNIST/t10k-labels-idx1-ubyte.gz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Apress/matlab-deep-learning/00e2dfb1684b83ef16442b2518d74ed6b294353f/Deep-Learning-for-Beginners-master/Python/ch.6/MNIST/t10k-labels-idx1-ubyte.gz -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.6/MNIST/t10k-labels.idx1-ubyte: -------------------------------------------------------------------------------- 1 | '                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                             -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.6/MNIST/train-labels.idx1-ubyte: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Apress/matlab-deep-learning/00e2dfb1684b83ef16442b2518d74ed6b294353f/Deep-Learning-for-Beginners-master/Python/ch.6/MNIST/train-labels.idx1-ubyte -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.6/MnistConv.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from scipy import signal 3 | from Softmax import * 4 | from ReLU import * 5 | from Conv import * 6 | from Pool import * 7 | 8 | 9 | def MnistConv(W1, W5, Wo, X, D): 10 | alpha = 0.01 11 | beta = 0.95 12 | 13 | momentum1 = np.zeros_like(W1) 14 | momentum5 = np.zeros_like(W5) 15 | momentumo = np.zeros_like(Wo) 16 | 17 | N = len(D) 18 | 19 | bsize = 100 20 | blist = np.arange(0, N, bsize) 21 | 22 | for batch in range(len(blist)): 23 | dW1 = np.zeros_like(W1) 24 | dW5 = np.zeros_like(W5) 25 | dWo = np.zeros_like(Wo) 26 | 27 | begin = blist[batch] 28 | 29 | for k in range(begin, begin+bsize): 30 | # Forward pass = inference 31 | x = X[k, :, :] 32 | y1 = Conv(x, W1) 33 | y2 = ReLU(y1) 34 | y3 = Pool(y2) 35 | y4 = np.reshape(y3, (-1, 1)) 36 | v5 = np.matmul(W5, y4) 37 | y5 = ReLU(v5) 38 | v = np.matmul(Wo, y5) 39 | y = Softmax(v) 40 | 41 | # one-hot encoding 42 | d = np.zeros((10, 1)) 43 | d[D[k][0]][0] = 1 44 | 45 | # Backpropagation 46 | e = d - y 47 | delta = e 48 | 49 | e5 = np.matmul(Wo.T, delta) # Hidden(ReLU) 50 | delta5 = (y5 > 0) * e5 51 | 52 | e4 = np.matmul(W5.T, delta5) # Pooling layer 53 | 54 | e3 = np.reshape(e4, y3.shape) 55 | 56 | e2 = np.zeros_like(y2) # pooling 57 | W3 = np.ones_like(y2) / (2*2) 58 | for c in range(20): 59 | e2[:, :, c] = np.kron(e3[:, :, c], np.ones((2, 2))) * W3[:, :, c] 60 | 61 | delta2 = (y2 > 0) * e2 62 | 63 | delta1_x = np.zeros_like(W1) 64 | for c in range(20): 65 | delta1_x[:, :, c] = signal.convolve2d(x[:, :], np.rot90(delta2[:, :, c], 2), 'valid') 66 | 67 | 68 | dW1 = dW1 + delta1_x 69 | dW5 = dW5 + np.matmul(delta5, y4.T) 70 | dWo = dWo + np.matmul(delta, y5.T) 71 | 72 | dW1 = dW1 / bsize 73 | dW5 = dW5 / bsize 74 | dWo = dWo / bsize 75 | 76 | momentum1 = alpha*dW1 + beta*momentum1 77 | W1 = W1 + momentum1 78 | 79 | momentum5 = alpha*dW5 + beta*momentum5 80 | W5 = W5 + momentum5 81 | 82 | momentumo = alpha*dWo + beta*momentumo 83 | Wo = Wo + momentumo 84 | 85 | return W1, W5, Wo -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.6/Pool.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from scipy import signal 3 | 4 | 5 | def Pool(x): 6 | (xrow, xcol, numFilters) = x.shape 7 | y = np.zeros((int(xrow/2), int(xcol/2), numFilters)) 8 | 9 | for k in range(numFilters): 10 | filter = np.ones((2,2)) / (2*2) 11 | image = signal.convolve2d(x[:, :, k], filter, 'valid') 12 | 13 | y[:, :, k] = image[::2, ::2] 14 | 15 | return y 16 | -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.6/ReLU.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def ReLU(x): 5 | return np.maximum(0, x) -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.6/Sigmoid.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def Sigmoid(x): 5 | return 1.0 / (1.0 + np.exp(-x)) -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.6/Softmax.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def Softmax(x): 5 | x = np.subtract(x, np.max(x)) # prevent overflow 6 | ex = np.exp(x) 7 | 8 | return ex / np.sum(ex) -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/Python/ch.6/TestMnistConv.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from scipy import signal 3 | from LoadMnistData import * 4 | from Softmax import * 5 | from ReLU import * 6 | from Conv import * 7 | from Pool import * 8 | from MnistConv import * 9 | 10 | 11 | # Learn 12 | # 13 | Images, Labels = LoadMnistData('MNIST\\t10k-images-idx3-ubyte.gz', 'MNIST\\t10k-labels-idx1-ubyte.gz') 14 | Images = np.divide(Images, 255) 15 | 16 | W1 = 1e-2 * np.random.randn(9, 9, 20) 17 | W5 = np.random.uniform(-1, 1, (100, 2000)) * np.sqrt(6) / np.sqrt(360 + 2000) 18 | Wo = np.random.uniform(-1, 1, ( 10, 100)) * np.sqrt(6) / np.sqrt( 10 + 100) 19 | 20 | X = Images[0:8000, :, :] 21 | D = Labels[0:8000] 22 | 23 | for _epoch in range(3): 24 | print(_epoch) 25 | W1, W5, Wo = MnistConv(W1, W5, Wo, X, D) 26 | 27 | 28 | # Test 29 | # 30 | X = Images[8000:10000, :, :] 31 | D = Labels[8000:10000] 32 | 33 | acc = 0 34 | N = len(D) 35 | for k in range(N): 36 | x = X[k, :, :] 37 | 38 | y1 = Conv(x, W1) 39 | y2 = ReLU(y1) 40 | y3 = Pool(y2) 41 | y4 = np.reshape(y3, (-1, 1)) 42 | v5 = np.matmul(W5, y4) 43 | y5 = ReLU(v5) 44 | v = np.matmul(Wo, y5) 45 | y = Softmax(v) 46 | 47 | i = np.argmax(y) 48 | if i == D[k][0]: 49 | acc = acc + 1 50 | 51 | acc = acc / N 52 | print("Accuracy is : ", acc) 53 | 54 | 55 | -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/README.md: -------------------------------------------------------------------------------- 1 | # Deep-Learning-for-Beginners 2 | Sample code in MATLAB/Octave and Python for Deep Learning for Beginners 3 | -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.2/DeltaBatch.m: -------------------------------------------------------------------------------- 1 | function W = DeltaBatch(W, X, D) 2 | alpha = 0.9; 3 | 4 | dWsum = zeros(3, 1); 5 | 6 | N = 4; 7 | for k = 1:N 8 | x = X(k, :)'; 9 | d = D(k); 10 | 11 | v = W*x; 12 | y = Sigmoid(v); 13 | 14 | e = d - y; 15 | delta = y*(1-y)*e; 16 | 17 | dW = alpha*delta*x; 18 | 19 | dWsum = dWsum + dW; 20 | end 21 | dWavg = dWsum / N; 22 | 23 | W(1) = W(1) + dWavg(1); 24 | W(2) = W(2) + dWavg(2); 25 | W(3) = W(3) + dWavg(3); 26 | end -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.2/DeltaSGD.m: -------------------------------------------------------------------------------- 1 | function W = DeltaSGD(W, X, D) 2 | alpha = 0.9; 3 | 4 | N = 4; 5 | for k = 1:N 6 | x = X(k, :)'; 7 | d = D(k); 8 | 9 | v = W*x; 10 | y = Sigmoid(v); 11 | 12 | e = d - y; 13 | delta = y*(1-y)*e; 14 | 15 | dW = alpha*delta*x; % delta rule 16 | 17 | W(1) = W(1) + dW(1); 18 | W(2) = W(2) + dW(2); 19 | W(3) = W(3) + dW(3); 20 | end 21 | end -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.2/DeltaXOR.m: -------------------------------------------------------------------------------- 1 | function W = DeltaXOR(W, X, D) 2 | alpha = 0.9; 3 | 4 | N = 4; 5 | for k = 1:N 6 | x = X(k, :)'; 7 | d = D(k); 8 | 9 | v = W*x; 10 | y = Sigmoid(v); 11 | 12 | e = d - y; 13 | delta = y*(1-y)*e; 14 | 15 | dW = alpha*delta*x; % delta rule 16 | 17 | W(1) = W(1) + dW(1); 18 | W(2) = W(2) + dW(2); 19 | W(3) = W(3) + dW(3); 20 | end 21 | end 22 | -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.2/SGDvsBatch.m: -------------------------------------------------------------------------------- 1 | clear all 2 | 3 | X = [ 0 0 1; 4 | 0 1 1; 5 | 1 0 1; 6 | 1 1 1; 7 | ]; 8 | 9 | D = [ 0 10 | 0 11 | 1 12 | 1 13 | ]; 14 | 15 | 16 | E1 = zeros(1000, 1); 17 | E2 = zeros(1000, 1); 18 | 19 | W1 = 2*rand(1, 3) - 1; 20 | W2 = W1; 21 | 22 | for epoch = 1:1000 % train 23 | W1 = DeltaSGD(W1, X, D); 24 | W2 = DeltaBatch(W2, X, D); 25 | 26 | es1 = 0; 27 | es2 = 0; 28 | N = 4; 29 | for k = 1:N 30 | x = X(k, :)'; 31 | d = D(k); 32 | 33 | v1 = W1*x; 34 | y1 = Sigmoid(v1); 35 | es1 = es1 + (d - y1)^2; 36 | 37 | v2 = W2*x; 38 | y2 = Sigmoid(v2); 39 | es2 = es2 + (d - y2)^2; 40 | end 41 | E1(epoch) = es1 / N; 42 | E2(epoch) = es2 / N; 43 | end 44 | 45 | plot(E1, 'r') 46 | hold on 47 | plot(E2, 'b:') 48 | xlabel('Epoch') 49 | ylabel('Average of Training error') 50 | legend('SGD', 'Batch') 51 | 52 | -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.2/Sigmoid.m: -------------------------------------------------------------------------------- 1 | function y = Sigmoid(x) 2 | y = 1 / (1 + exp(-x)); 3 | end -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.2/TestDeltaBatch.m: -------------------------------------------------------------------------------- 1 | clear all 2 | 3 | X = [ 0 0 1; 4 | 0 1 1; 5 | 1 0 1; 6 | 1 1 1; 7 | ]; 8 | 9 | D = [ 0 10 | 0 11 | 1 12 | 1 13 | ]; 14 | 15 | W = 2*rand(1, 3) - 1; 16 | 17 | for epoch = 1:40000 18 | W = DeltaBatch(W, X, D); 19 | end 20 | 21 | N = 4; 22 | for k = 1:N 23 | x = X(k, :)'; 24 | v = W*x; 25 | y = Sigmoid(v) 26 | end -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.2/TestDeltaSGD.m: -------------------------------------------------------------------------------- 1 | clear all 2 | 3 | X = [ 0 0 1; 4 | 0 1 1; 5 | 1 0 1; 6 | 1 1 1; 7 | ]; 8 | 9 | D = [ 0 10 | 0 11 | 1 12 | 1 13 | ]; 14 | 15 | W = 2*rand(1, 3) - 1; 16 | 17 | for epoch = 1:10000 % train 18 | W = DeltaSGD(W, X, D); 19 | end 20 | 21 | N = 4; % inference 22 | for k = 1:N 23 | x = X(k, :)'; 24 | v = W*x; 25 | y = Sigmoid(v) 26 | end -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.2/TestDeltaXOR.m: -------------------------------------------------------------------------------- 1 | clear all 2 | 3 | X = [ 0 0 1; 4 | 0 1 1; 5 | 1 0 1; 6 | 1 1 1; 7 | ]; 8 | 9 | D = [ 0 10 | 1 11 | 1 12 | 0 13 | ]; 14 | 15 | W = 2*rand(1, 3) - 1; 16 | 17 | for epoch = 1:40000 % train 18 | W = DeltaXOR(W, X, D); 19 | end 20 | 21 | N = 4; % inference 22 | for k = 1:N 23 | x = X(k, :)'; 24 | v = W*x; 25 | y = Sigmoid(v) 26 | end -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.3/BackpropCE.m: -------------------------------------------------------------------------------- 1 | function [W1, W2] = BackpropCE(W1, W2, X, D) 2 | alpha = 0.9; 3 | 4 | N = 4; 5 | for k = 1:N 6 | x = X(k, :)'; % x = a column vector 7 | d = D(k); 8 | 9 | v1 = W1*x; 10 | y1 = Sigmoid(v1); 11 | v = W2*y1; 12 | y = Sigmoid(v); 13 | 14 | e = d - y; 15 | delta = e; 16 | 17 | e1 = W2'*delta; 18 | delta1 = y1.*(1-y1).*e1; 19 | 20 | dW1 = alpha*delta1*x'; 21 | W1 = W1 + dW1; 22 | 23 | dW2 = alpha*delta*y1'; 24 | W2 = W2 + dW2; 25 | end 26 | end -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.3/BackpropMmt.m: -------------------------------------------------------------------------------- 1 | function [W1, W2] = BackpropMmt(W1, W2, X, D) 2 | alpha = 0.9; 3 | beta = 0.9; 4 | 5 | mmt1 = zeros(size(W1)); 6 | mmt2 = zeros(size(W2)); 7 | 8 | N = 4; 9 | for k = 1:N 10 | x = X(k, :)'; 11 | d = D(k); 12 | 13 | v1 = W1*x; 14 | y1 = Sigmoid(v1); 15 | v = W2*y1; 16 | y = Sigmoid(v); 17 | 18 | e = d - y; 19 | delta = y.*(1-y).*e; 20 | 21 | e1 = W2'*delta; 22 | delta1 = y1.*(1-y1).*e1; 23 | 24 | dW1 = alpha*delta1*x'; 25 | mmt1 = dW1 + beta*mmt1; 26 | W1 = W1 + mmt1; 27 | 28 | dW2 = alpha*delta*y1'; 29 | mmt2 = dW2 + beta*mmt2; 30 | W2 = W2 + mmt2; 31 | end 32 | end -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.3/BackpropXOR.m: -------------------------------------------------------------------------------- 1 | function [W1, W2] = BackpropXOR(W1, W2, X, D) 2 | alpha = 0.9; 3 | 4 | N = 4; 5 | for k = 1:N 6 | x = X(k, :)'; 7 | d = D(k); 8 | 9 | v1 = W1*x; 10 | y1 = Sigmoid(v1); 11 | v = W2*y1; 12 | y = Sigmoid(v); 13 | 14 | e = d - y; 15 | delta = y.*(1-y).*e; 16 | 17 | e1 = W2'*delta; 18 | delta1 = y1.*(1-y1).*e1; 19 | 20 | dW1 = alpha*delta1*x'; 21 | W1 = W1 + dW1; 22 | 23 | dW2 = alpha*delta*y1'; 24 | W2 = W2 + dW2; 25 | end 26 | end -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.3/CEvsSSE.m: -------------------------------------------------------------------------------- 1 | clear all 2 | 3 | X = [ 0 0 1; 4 | 0 1 1; 5 | 1 0 1; 6 | 1 1 1; 7 | ]; 8 | 9 | D = [ 0 10 | 0 11 | 1 12 | 1 13 | ]; 14 | 15 | 16 | E1 = zeros(1000, 1); 17 | E2 = zeros(1000, 1); 18 | 19 | W11 = 2*rand(4, 3) - 1; % Cross entropy 20 | W12 = 2*rand(1, 4) - 1; % 21 | W21 = W11; % Sum of squared error 22 | W22 = W12; % 23 | 24 | for epoch = 1:1000 25 | [W11 W12] = BackpropCE(W11, W12, X, D); 26 | [W21 W22] = BackpropXOR(W21, W22, X, D); 27 | 28 | es1 = 0; 29 | es2 = 0; 30 | N = 4; 31 | for k = 1:N 32 | x = X(k, :)'; 33 | d = D(k); 34 | 35 | v1 = W11*x; 36 | y1 = Sigmoid(v1); 37 | v = W12*y1; 38 | y = Sigmoid(v); 39 | es1 = es1 + (d - y)^2; 40 | 41 | v1 = W21*x; 42 | y1 = Sigmoid(v1); 43 | v = W22*y1; 44 | y = Sigmoid(v); 45 | es2 = es2 + (d - y)^2; 46 | end 47 | E1(epoch) = es1 / N; 48 | E2(epoch) = es2 / N; 49 | end 50 | 51 | plot(E1, 'r') 52 | hold on 53 | plot(E2, 'b:') 54 | xlabel('Epoch') 55 | ylabel('Average of Training error') 56 | legend('Cross Entropy', 'Sum of Squared Error') 57 | 58 | -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.3/Sigmoid.m: -------------------------------------------------------------------------------- 1 | function y = Sigmoid(x) 2 | y = 1 ./ (1 + exp(-x)); 3 | end -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.3/TestBackpropCE.m: -------------------------------------------------------------------------------- 1 | clear all 2 | 3 | X = [ 0 0 1; 4 | 0 1 1; 5 | 1 0 1; 6 | 1 1 1; 7 | ]; 8 | 9 | D = [ 0 10 | 1 11 | 1 12 | 0 13 | ]; 14 | 15 | W1 = 2*rand(4, 3) - 1; 16 | W2 = 2*rand(1, 4) - 1; 17 | 18 | for epoch = 1:10000 % train 19 | [W1 W2] = BackpropCE(W1, W2, X, D); 20 | end 21 | 22 | N = 4; % inference 23 | for k = 1:N 24 | x = X(k, :)'; 25 | v1 = W1*x; 26 | y1 = Sigmoid(v1); 27 | v = W2*y1; 28 | y = Sigmoid(v) 29 | end -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.3/TestBackpropMmt.m: -------------------------------------------------------------------------------- 1 | clear all 2 | 3 | X = [ 0 0 1; 4 | 0 1 1; 5 | 1 0 1; 6 | 1 1 1; 7 | ]; 8 | 9 | D = [ 0 10 | 1 11 | 1 12 | 0 13 | ]; 14 | 15 | W1 = 2*rand(4, 3) - 1; 16 | W2 = 2*rand(1, 4) - 1; 17 | 18 | for epoch = 1:10000 % train 19 | [W1 W2] = BackpropMmt(W1, W2, X, D); 20 | end 21 | 22 | N = 4; % inference 23 | for k = 1:N 24 | x = X(k, :)'; 25 | v1 = W1*x; 26 | y1 = Sigmoid(v1); 27 | v = W2*y1; 28 | y = Sigmoid(v) 29 | end -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.3/TestBackpropXOR.m: -------------------------------------------------------------------------------- 1 | clear all 2 | 3 | X = [ 0 0 1; 4 | 0 1 1; 5 | 1 0 1; 6 | 1 1 1; 7 | ]; 8 | 9 | D = [ 0 10 | 1 11 | 1 12 | 0 13 | ]; 14 | 15 | W1 = 2*rand(4, 3) - 1; 16 | W2 = 2*rand(1, 4) - 1; 17 | 18 | for epoch = 1:10000 % train 19 | [W1 W2] = BackpropXOR(W1, W2, X, D); 20 | end 21 | 22 | N = 4; % inference 23 | for k = 1:N 24 | x = X(k, :)'; 25 | v1 = W1*x; 26 | y1 = Sigmoid(v1); 27 | v = W2*y1; 28 | y = Sigmoid(v) 29 | end -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.4/MultiClass.m: -------------------------------------------------------------------------------- 1 | function [W1, W2] = MultiClass(W1, W2, X, D) 2 | alpha = 0.9; 3 | 4 | N = 5; 5 | for k = 1:N 6 | x = reshape(X(:, :, k), 25, 1); 7 | d = D(k, :)'; 8 | 9 | v1 = W1*x; 10 | y1 = Sigmoid(v1); 11 | v = W2*y1; 12 | y = Softmax(v); 13 | 14 | e = d - y; 15 | delta = e; 16 | 17 | e1 = W2'*delta; 18 | delta1 = y1.*(1-y1).*e1; 19 | 20 | dW1 = alpha*delta1*x'; 21 | W1 = W1 + dW1; 22 | 23 | dW2 = alpha*delta*y1'; 24 | W2 = W2 + dW2; 25 | end 26 | end -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.4/RealMultiClass.m: -------------------------------------------------------------------------------- 1 | clear all 2 | 3 | TestMultiClass; % W1, W2 4 | 5 | X = zeros(5, 5, 5); 6 | 7 | X(:, :, 1) = [ 0 0 1 1 0; 8 | 0 0 1 1 0; 9 | 0 1 0 1 0; 10 | 0 0 0 1 0; 11 | 0 1 1 1 0 12 | ]; 13 | 14 | X(:, :, 2) = [ 1 1 1 1 0; 15 | 0 0 0 0 1; 16 | 0 1 1 1 0; 17 | 1 0 0 0 1; 18 | 1 1 1 1 1 19 | ]; 20 | 21 | X(:, :, 3) = [ 1 1 1 1 0; 22 | 0 0 0 0 1; 23 | 0 1 1 1 0; 24 | 1 0 0 0 1; 25 | 1 1 1 1 0 26 | ]; 27 | 28 | X(:, :, 4) = [ 0 1 1 1 0; 29 | 0 1 0 0 0; 30 | 0 1 1 1 0; 31 | 0 0 0 1 0; 32 | 0 1 1 1 0 33 | ]; 34 | 35 | X(:, :, 5) = [ 0 1 1 1 1; 36 | 0 1 0 0 0; 37 | 0 1 1 1 0; 38 | 0 0 0 1 0; 39 | 1 1 1 1 0 40 | ]; 41 | 42 | N = 5; % inference 43 | for k = 1:N 44 | x = reshape(X(:, :, k), 25, 1); 45 | v1 = W1*x; 46 | y1 = Sigmoid(v1); 47 | v = W2*y1; 48 | y = Softmax(v) 49 | end -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.4/Sigmoid.m: -------------------------------------------------------------------------------- 1 | function y = Sigmoid(x) 2 | y = 1 ./ (1 + exp(-x)); 3 | end -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.4/Softmax.m: -------------------------------------------------------------------------------- 1 | function y = Softmax(x) 2 | ex = exp(x); 3 | y = ex / sum(ex); 4 | end -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.4/TestMultiClass.m: -------------------------------------------------------------------------------- 1 | clear all 2 | 3 | rng(3); 4 | 5 | X = zeros(5, 5, 5); 6 | 7 | X(:, :, 1) = [ 0 1 1 0 0; 8 | 0 0 1 0 0; 9 | 0 0 1 0 0; 10 | 0 0 1 0 0; 11 | 0 1 1 1 0 12 | ]; 13 | 14 | X(:, :, 2) = [ 1 1 1 1 0; 15 | 0 0 0 0 1; 16 | 0 1 1 1 0; 17 | 1 0 0 0 0; 18 | 1 1 1 1 1 19 | ]; 20 | 21 | X(:, :, 3) = [ 1 1 1 1 0; 22 | 0 0 0 0 1; 23 | 0 1 1 1 0; 24 | 0 0 0 0 1; 25 | 1 1 1 1 0 26 | ]; 27 | 28 | X(:, :, 4) = [ 0 0 0 1 0; 29 | 0 0 1 1 0; 30 | 0 1 0 1 0; 31 | 1 1 1 1 1; 32 | 0 0 0 1 0 33 | ]; 34 | 35 | X(:, :, 5) = [ 1 1 1 1 1; 36 | 1 0 0 0 0; 37 | 1 1 1 1 0; 38 | 0 0 0 0 1; 39 | 1 1 1 1 0 40 | ]; 41 | 42 | D = [ 1 0 0 0 0; 43 | 0 1 0 0 0; 44 | 0 0 1 0 0; 45 | 0 0 0 1 0; 46 | 0 0 0 0 1 47 | ]; 48 | 49 | W1 = 2*rand(50, 25) - 1; 50 | W2 = 2*rand( 5, 50) - 1; 51 | 52 | for epoch = 1:10000 % train 53 | [W1 W2] = MultiClass(W1, W2, X, D); 54 | end 55 | 56 | N = 5; % inference 57 | for k = 1:N 58 | x = reshape(X(:, :, k), 25, 1); 59 | v1 = W1*x; 60 | y1 = Sigmoid(v1); 61 | v = W2*y1; 62 | y = Softmax(v) 63 | end -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.4/rng.m: -------------------------------------------------------------------------------- 1 | function rng(x) 2 | randn('seed', x) 3 | rand('seed', x) 4 | end -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.5/DeepDropout.m: -------------------------------------------------------------------------------- 1 | function [W1, W2, W3, W4] = DeepDropout(W1, W2, W3, W4, X, D) 2 | alpha = 0.01; 3 | 4 | N = 5; 5 | for k = 1:N 6 | x = reshape(X(:, :, k), 25, 1); 7 | v1 = W1*x; 8 | y1 = Sigmoid(v1); 9 | y1 = y1 .* Dropout(y1, 0.2); 10 | 11 | v2 = W2*y1; 12 | y2 = Sigmoid(v2); 13 | y2 = y2 .* Dropout(y2, 0.2); 14 | 15 | v3 = W3*y2; 16 | y3 = Sigmoid(v3); 17 | y3 = y3 .* Dropout(y3, 0.2); 18 | 19 | v = W4*y3; 20 | y = Softmax(v); 21 | 22 | d = D(k, :)'; 23 | 24 | e = d - y; 25 | delta = e; 26 | 27 | e3 = W4'*delta; 28 | delta3 = y3.*(1-y3).*e3; 29 | 30 | e2 = W3'*delta3; 31 | delta2 = y2.*(1-y2).*e2; 32 | 33 | e1 = W2'*delta2; 34 | delta1 = y1.*(1-y1).*e1; 35 | 36 | dW4 = alpha*delta*y3'; 37 | W4 = W4 + dW4; 38 | 39 | dW3 = alpha*delta3*y2'; 40 | W3 = W3 + dW3; 41 | 42 | dW2 = alpha*delta2*y1'; 43 | W2 = W2 + dW2; 44 | 45 | dW1 = alpha*delta1*x'; 46 | W1 = W1 + dW1; 47 | end 48 | end -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.5/DeepReLU.m: -------------------------------------------------------------------------------- 1 | function [W1, W2, W3, W4] = DeepReLU(W1, W2, W3, W4, X, D) 2 | alpha = 0.01; 3 | 4 | N = 5; 5 | for k = 1:N 6 | x = reshape(X(:, :, k), 25, 1); 7 | v1 = W1*x; 8 | y1 = ReLU(v1); 9 | 10 | v2 = W2*y1; 11 | y2 = ReLU(v2); 12 | 13 | v3 = W3*y2; 14 | y3 = ReLU(v3); 15 | 16 | v = W4*y3; 17 | y = Softmax(v); 18 | 19 | d = D(k, :)'; 20 | 21 | e = d - y; 22 | delta = e; 23 | 24 | e3 = W4'*delta; 25 | delta3 = (v3 > 0).*e3; 26 | 27 | e2 = W3'*delta3; 28 | delta2 = (v2 > 0).*e2; 29 | 30 | e1 = W2'*delta2; 31 | delta1 = (v1 > 0).*e1; 32 | 33 | dW4 = alpha*delta*y3'; 34 | W4 = W4 + dW4; 35 | 36 | dW3 = alpha*delta3*y2'; 37 | W3 = W3 + dW3; 38 | 39 | dW2 = alpha*delta2*y1'; 40 | W2 = W2 + dW2; 41 | 42 | dW1 = alpha*delta1*x'; 43 | W1 = W1 + dW1; 44 | end 45 | end -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.5/Dropout.m: -------------------------------------------------------------------------------- 1 | function ym = Dropout(y, ratio) 2 | [m, n] = size(y); 3 | ym = zeros(m, n); 4 | 5 | num = round(m*n*(1-ratio)); 6 | idx = randperm(m*n, num); 7 | ym(idx) = 1 / (1-ratio); 8 | end -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.5/ReLU.m: -------------------------------------------------------------------------------- 1 | function y = ReLU(x) 2 | y = max(0, x); 3 | end -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.5/Sigmoid.m: -------------------------------------------------------------------------------- 1 | function y = Sigmoid(x) 2 | y = 1 ./ (1 + exp(-x)); 3 | end -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.5/Softmax.m: -------------------------------------------------------------------------------- 1 | function y = Softmax(x) 2 | ex = exp(x); 3 | y = ex / sum(ex); 4 | end -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.5/TestDeepDropout.m: -------------------------------------------------------------------------------- 1 | clear all 2 | 3 | X = zeros(5, 5, 5); 4 | 5 | X(:, :, 1) = [ 0 1 1 0 0; 6 | 0 0 1 0 0; 7 | 0 0 1 0 0; 8 | 0 0 1 0 0; 9 | 0 1 1 1 0 10 | ]; 11 | 12 | X(:, :, 2) = [ 1 1 1 1 0; 13 | 0 0 0 0 1; 14 | 0 1 1 1 0; 15 | 1 0 0 0 0; 16 | 1 1 1 1 1 17 | ]; 18 | 19 | X(:, :, 3) = [ 1 1 1 1 0; 20 | 0 0 0 0 1; 21 | 0 1 1 1 0; 22 | 0 0 0 0 1; 23 | 1 1 1 1 0 24 | ]; 25 | 26 | X(:, :, 4) = [ 0 0 0 1 0; 27 | 0 0 1 1 0; 28 | 0 1 0 1 0; 29 | 1 1 1 1 1; 30 | 0 0 0 1 0 31 | ]; 32 | 33 | X(:, :, 5) = [ 1 1 1 1 1; 34 | 1 0 0 0 0; 35 | 1 1 1 1 0; 36 | 0 0 0 0 1; 37 | 1 1 1 1 0 38 | ]; 39 | 40 | D = [ 1 0 0 0 0; 41 | 0 1 0 0 0; 42 | 0 0 1 0 0; 43 | 0 0 0 1 0; 44 | 0 0 0 0 1 45 | ]; 46 | 47 | W1 = 2*rand(20, 25) - 1; 48 | W2 = 2*rand(20, 20) - 1; 49 | W3 = 2*rand(20, 20) - 1; 50 | W4 = 2*rand( 5, 20) - 1; 51 | 52 | for epoch = 1:20000 % train 53 | [W1, W2, W3, W4] = DeepDropout(W1, W2, W3, W4, X, D); 54 | end 55 | 56 | N = 5; % inference 57 | for k = 1:N 58 | x = reshape(X(:, :, k), 25, 1); 59 | v1 = W1*x; 60 | y1 = Sigmoid(v1); 61 | 62 | v2 = W2*y1; 63 | y2 = Sigmoid(v2); 64 | 65 | v3 = W3*y2; 66 | y3 = Sigmoid(v3); 67 | 68 | v = W4*y3; 69 | y = Softmax(v) 70 | end -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.5/TestDeepReLU.m: -------------------------------------------------------------------------------- 1 | clear all 2 | 3 | X = zeros(5, 5, 5); 4 | 5 | X(:, :, 1) = [ 0 1 1 0 0; 6 | 0 0 1 0 0; 7 | 0 0 1 0 0; 8 | 0 0 1 0 0; 9 | 0 1 1 1 0 10 | ]; 11 | 12 | X(:, :, 2) = [ 1 1 1 1 0; 13 | 0 0 0 0 1; 14 | 0 1 1 1 0; 15 | 1 0 0 0 0; 16 | 1 1 1 1 1 17 | ]; 18 | 19 | X(:, :, 3) = [ 1 1 1 1 0; 20 | 0 0 0 0 1; 21 | 0 1 1 1 0; 22 | 0 0 0 0 1; 23 | 1 1 1 1 0 24 | ]; 25 | 26 | X(:, :, 4) = [ 0 0 0 1 0; 27 | 0 0 1 1 0; 28 | 0 1 0 1 0; 29 | 1 1 1 1 1; 30 | 0 0 0 1 0 31 | ]; 32 | 33 | X(:, :, 5) = [ 1 1 1 1 1; 34 | 1 0 0 0 0; 35 | 1 1 1 1 0; 36 | 0 0 0 0 1; 37 | 1 1 1 1 0 38 | ]; 39 | 40 | D = [ 1 0 0 0 0; 41 | 0 1 0 0 0; 42 | 0 0 1 0 0; 43 | 0 0 0 1 0; 44 | 0 0 0 0 1 45 | ]; 46 | 47 | W1 = 2*rand(20, 25) - 1; 48 | W2 = 2*rand(20, 20) - 1; 49 | W3 = 2*rand(20, 20) - 1; 50 | W4 = 2*rand( 5, 20) - 1; 51 | 52 | for epoch = 1:10000 % train 53 | [W1, W2, W3, W4] = DeepReLU(W1, W2, W3, W4, X, D); 54 | end 55 | 56 | N = 5; % inference 57 | for k = 1:N 58 | x = reshape(X(:, :, k), 25, 1); 59 | v1 = W1*x; 60 | y1 = ReLU(v1); 61 | 62 | v2 = W2*y1; 63 | y2 = ReLU(v2); 64 | 65 | v3 = W3*y2; 66 | y3 = ReLU(v3); 67 | 68 | v = W4*y3; 69 | y = Softmax(v) 70 | end -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.6/Conv.m: -------------------------------------------------------------------------------- 1 | function y = Conv(x, W) 2 | % 3 | % 4 | 5 | [wrow, wcol, numFilters] = size(W); 6 | [xrow, xcol, ~ ] = size(x); 7 | 8 | yrow = xrow - wrow + 1; 9 | ycol = xcol - wcol + 1; 10 | 11 | y = zeros(yrow, ycol, numFilters); 12 | 13 | for k = 1:numFilters 14 | filter = W(:, :, k); 15 | filter = rot90(squeeze(filter), 2); 16 | y(:, :, k) = conv2(x, filter, 'valid'); 17 | end 18 | 19 | end 20 | 21 | -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.6/MNIST/t10k-images.idx3-ubyte: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Apress/matlab-deep-learning/00e2dfb1684b83ef16442b2518d74ed6b294353f/Deep-Learning-for-Beginners-master/ch.6/MNIST/t10k-images.idx3-ubyte -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.6/MNIST/t10k-labels.idx1-ubyte: -------------------------------------------------------------------------------- 1 | '                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                             -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.6/MNIST/train-labels.idx1-ubyte: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Apress/matlab-deep-learning/00e2dfb1684b83ef16442b2518d74ed6b294353f/Deep-Learning-for-Beginners-master/ch.6/MNIST/train-labels.idx1-ubyte -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.6/MnistConv.m: -------------------------------------------------------------------------------- 1 | function [W1, W5, Wo] = MnistConv(W1, W5, Wo, X, D) 2 | % 3 | % 4 | 5 | alpha = 0.01; 6 | beta = 0.95; 7 | 8 | momentum1 = zeros(size(W1)); 9 | momentum5 = zeros(size(W5)); 10 | momentumo = zeros(size(Wo)); 11 | 12 | N = length(D); 13 | 14 | bsize = 100; 15 | blist = 1:bsize:(N-bsize+1); 16 | 17 | % One epoch loop 18 | % 19 | for batch = 1:length(blist) 20 | dW1 = zeros(size(W1)); 21 | dW5 = zeros(size(W5)); 22 | dWo = zeros(size(Wo)); 23 | 24 | % Mini-batch loop 25 | % 26 | begin = blist(batch); 27 | for k = begin:begin+bsize-1 28 | % Forward pass = inference 29 | % 30 | x = X(:, :, k); % Input, 28x28 31 | y1 = Conv(x, W1); % Convolution, 20x20x20 32 | y2 = ReLU(y1); % 33 | y3 = Pool(y2); % Pooling, 10x10x20 34 | y4 = reshape(y3, [], 1); % 35 | v5 = W5*y4; % ReLU, 2000 36 | y5 = ReLU(v5); % 37 | v = Wo*y5; % Softmax, 10x1 38 | y = Softmax(v); % 39 | 40 | % One-hot encoding 41 | % 42 | d = zeros(10, 1); 43 | d(sub2ind(size(d), D(k), 1)) = 1; 44 | 45 | % Backpropagation 46 | % 47 | e = d - y; % Output layer 48 | delta = e; 49 | 50 | e5 = Wo' * delta; % Hidden(ReLU) layer 51 | delta5 = (y5 > 0) .* e5; 52 | 53 | e4 = W5' * delta5; % Pooling layer 54 | 55 | e3 = reshape(e4, size(y3)); 56 | 57 | e2 = zeros(size(y2)); 58 | W3 = ones(size(y2)) / (2*2); 59 | for c = 1:20 60 | e2(:, :, c) = kron(e3(:, :, c), ones([2 2])) .* W3(:, :, c); 61 | end 62 | 63 | delta2 = (y2 > 0) .* e2; % ReLU layer 64 | 65 | delta1_x = zeros(size(W1)); % Convolutional layer 66 | for c = 1:20 67 | delta1_x(:, :, c) = conv2(x(:, :), rot90(delta2(:, :, c), 2), 'valid'); 68 | end 69 | 70 | dW1 = dW1 + delta1_x; 71 | dW5 = dW5 + delta5*y4'; 72 | dWo = dWo + delta *y5'; 73 | end 74 | 75 | % Update weights 76 | % 77 | dW1 = dW1 / bsize; 78 | dW5 = dW5 / bsize; 79 | dWo = dWo / bsize; 80 | 81 | momentum1 = alpha*dW1 + beta*momentum1; 82 | W1 = W1 + momentum1; 83 | 84 | momentum5 = alpha*dW5 + beta*momentum5; 85 | W5 = W5 + momentum5; 86 | 87 | momentumo = alpha*dWo + beta*momentumo; 88 | Wo = Wo + momentumo; 89 | end 90 | 91 | end 92 | 93 | -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.6/MnistConv.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Apress/matlab-deep-learning/00e2dfb1684b83ef16442b2518d74ed6b294353f/Deep-Learning-for-Beginners-master/ch.6/MnistConv.mat -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.6/PlotFeatures.m: -------------------------------------------------------------------------------- 1 | clear all 2 | 3 | load('MnistConv.mat') 4 | 5 | k = 2; 6 | x = X(:, :, k); 7 | y1 = Conv(x, W1); % Convolution, 20x20x20 8 | y2 = ReLU(y1); % 9 | y3 = Pool(y2); % Pool, 10x10x20 10 | y4 = reshape(y3, [], 1); % 2000 11 | v5 = W5*y4; % ReLU, 360 12 | y5 = ReLU(v5); % 13 | v = Wo*y5; % Softmax, 10 14 | y = Softmax(v); % 15 | 16 | 17 | figure; 18 | display_network(x(:)); 19 | title('Input Image') 20 | 21 | convFilters = zeros(9*9, 20); 22 | for i = 1:20 23 | filter = W1(:, :, i); 24 | convFilters(:, i) = filter(:); 25 | end 26 | figure 27 | display_network(convFilters); 28 | title('Convolution Filters') 29 | 30 | fList = zeros(20*20, 20); 31 | for i = 1:20 32 | feature = y1(:, :, i); 33 | fList(:, i) = feature(:); 34 | end 35 | figure 36 | display_network(fList); 37 | title('Features [Convolution]') 38 | 39 | fList = zeros(20*20, 20); 40 | for i = 1:20 41 | feature = y2(:, :, i); 42 | fList(:, i) = feature(:); 43 | end 44 | figure 45 | display_network(fList); 46 | title('Features [Convolution + ReLU]') 47 | 48 | fList = zeros(10*10, 20); 49 | for i = 1:20 50 | feature = y3(:, :, i); 51 | fList(:, i) = feature(:); 52 | end 53 | figure 54 | display_network(fList); 55 | title('Features [Convolution + ReLU + MeanPool]') -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.6/Pool.m: -------------------------------------------------------------------------------- 1 | function y = Pool(x) 2 | % 3 | % 2x2 mean pooling 4 | % 5 | % 6 | [xrow, xcol, numFilters] = size(x); 7 | 8 | y = zeros(xrow/2, xcol/2, numFilters); 9 | for k = 1:numFilters 10 | filter = ones(2) / (2*2); % for mean 11 | image = conv2(x(:, :, k), filter, 'valid'); 12 | 13 | y(:, :, k) = image(1:2:end, 1:2:end); 14 | end 15 | 16 | end 17 | -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.6/ReLU.m: -------------------------------------------------------------------------------- 1 | function y = ReLU(x) 2 | y = max(0, x); 3 | end -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.6/Softmax.m: -------------------------------------------------------------------------------- 1 | function y = Softmax(x) 2 | ex = exp(x); 3 | y = ex / sum(ex); 4 | end -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.6/TestMnistConv.m: -------------------------------------------------------------------------------- 1 | clear all 2 | 3 | Images = loadMNISTImages('./MNIST/t10k-images.idx3-ubyte'); 4 | Images = reshape(Images, 28, 28, []); 5 | Labels = loadMNISTLabels('./MNIST/t10k-labels.idx1-ubyte'); 6 | Labels(Labels == 0) = 10; % 0 --> 10 7 | 8 | rng(1); 9 | 10 | % Learning 11 | % 12 | W1 = 1e-2*randn([9 9 20]); 13 | W5 = (2*rand(100, 2000) - 1) * sqrt(6) / sqrt(360 + 2000); 14 | Wo = (2*rand( 10, 100) - 1) * sqrt(6) / sqrt( 10 + 100); 15 | 16 | X = Images(:, :, 1:8000); 17 | D = Labels(1:8000); 18 | 19 | for epoch = 1:3 20 | epoch 21 | [W1, W5, Wo] = MnistConv(W1, W5, Wo, X, D); 22 | end 23 | 24 | save('MnistConv.mat'); 25 | 26 | 27 | % Test 28 | % 29 | X = Images(:, :, 8001:10000); 30 | D = Labels(8001:10000); 31 | 32 | acc = 0; 33 | N = length(D); 34 | for k = 1:N 35 | x = X(:, :, k); % Input, 28x28 36 | 37 | y1 = Conv(x, W1); % Convolution, 20x20x20 38 | y2 = ReLU(y1); % 39 | y3 = Pool(y2); % Pool, 10x10x20 40 | y4 = reshape(y3, [], 1); % 2000 41 | v5 = W5*y4; % ReLU, 360 42 | y5 = ReLU(v5); % 43 | v = Wo*y5; % Softmax, 10 44 | y = Softmax(v); % 45 | 46 | [~, i] = max(y); 47 | if i == D(k) 48 | acc = acc + 1; 49 | end 50 | end 51 | 52 | acc = acc / N; 53 | fprintf('Accuracy is %f\n', acc); 54 | 55 | 56 | -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.6/display_network.m: -------------------------------------------------------------------------------- 1 | function [h, array] = display_network(A, opt_normalize, opt_graycolor, cols, opt_colmajor) 2 | % This function visualizes filters in matrix A. Each column of A is a 3 | % filter. We will reshape each column into a square image and visualizes 4 | % on each cell of the visualization panel. 5 | % All other parameters are optional, usually you do not need to worry 6 | % about it. 7 | % opt_normalize: whether we need to normalize the filter so that all of 8 | % them can have similar contrast. Default value is true. 9 | % opt_graycolor: whether we use gray as the heat map. Default is true. 10 | % cols: how many columns are there in the display. Default value is the 11 | % squareroot of the number of columns in A. 12 | % opt_colmajor: you can switch convention to row major for A. In that 13 | % case, each row of A is a filter. Default value is false. 14 | warning off all 15 | 16 | if ~exist('opt_normalize', 'var') || isempty(opt_normalize) 17 | opt_normalize= true; 18 | end 19 | 20 | if ~exist('opt_graycolor', 'var') || isempty(opt_graycolor) 21 | opt_graycolor= true; 22 | end 23 | 24 | if ~exist('opt_colmajor', 'var') || isempty(opt_colmajor) 25 | opt_colmajor = false; 26 | end 27 | 28 | % rescale 29 | A = A - mean(A(:)); 30 | 31 | if opt_graycolor, colormap(gray); end 32 | 33 | % compute rows, cols 34 | [L M]=size(A); 35 | sz=sqrt(L); 36 | buf=1; 37 | if ~exist('cols', 'var') 38 | if floor(sqrt(M))^2 ~= M 39 | n=ceil(sqrt(M)); 40 | while mod(M, n)~=0 && n<1.2*sqrt(M), n=n+1; end 41 | m=ceil(M/n); 42 | else 43 | n=sqrt(M); 44 | m=n; 45 | end 46 | else 47 | n = cols; 48 | m = ceil(M/n); 49 | end 50 | 51 | array=-ones(buf+m*(sz+buf),buf+n*(sz+buf)); 52 | 53 | if ~opt_graycolor 54 | array = 0.1.* array; 55 | end 56 | 57 | 58 | if ~opt_colmajor 59 | k=1; 60 | for i=1:m 61 | for j=1:n 62 | if k>M, 63 | continue; 64 | end 65 | clim=max(abs(A(:,k))); 66 | if opt_normalize 67 | array(buf+(i-1)*(sz+buf)+(1:sz),buf+(j-1)*(sz+buf)+(1:sz))=reshape(A(:,k),sz,sz)/clim; 68 | else 69 | array(buf+(i-1)*(sz+buf)+(1:sz),buf+(j-1)*(sz+buf)+(1:sz))=reshape(A(:,k),sz,sz)/max(abs(A(:))); 70 | end 71 | k=k+1; 72 | end 73 | end 74 | else 75 | k=1; 76 | for j=1:n 77 | for i=1:m 78 | if k>M, 79 | continue; 80 | end 81 | clim=max(abs(A(:,k))); 82 | if opt_normalize 83 | array(buf+(i-1)*(sz+buf)+(1:sz),buf+(j-1)*(sz+buf)+(1:sz))=reshape(A(:,k),sz,sz)/clim; 84 | else 85 | array(buf+(i-1)*(sz+buf)+(1:sz),buf+(j-1)*(sz+buf)+(1:sz))=reshape(A(:,k),sz,sz); 86 | end 87 | k=k+1; 88 | end 89 | end 90 | end 91 | 92 | if opt_graycolor 93 | h=imagesc(array,'EraseMode','none',[-1 1]); 94 | else 95 | h=imagesc(array,'EraseMode','none',[-1 1]); 96 | end 97 | axis image off 98 | 99 | drawnow; 100 | 101 | warning on all 102 | -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.6/loadMNISTImages.m: -------------------------------------------------------------------------------- 1 | function images = loadMNISTImages(filename) 2 | %loadMNISTImages returns a 28x28x[number of MNIST images] matrix containing 3 | %the raw MNIST images 4 | 5 | fp = fopen(filename, 'rb'); 6 | assert(fp ~= -1, ['Could not open ', filename, '']); 7 | 8 | magic = fread(fp, 1, 'int32', 0, 'ieee-be'); 9 | assert(magic == 2051, ['Bad magic number in ', filename, '']); 10 | 11 | numImages = fread(fp, 1, 'int32', 0, 'ieee-be'); 12 | numRows = fread(fp, 1, 'int32', 0, 'ieee-be'); 13 | numCols = fread(fp, 1, 'int32', 0, 'ieee-be'); 14 | 15 | images = fread(fp, inf, 'unsigned char=>unsigned char'); 16 | images = reshape(images, numCols, numRows, numImages); 17 | images = permute(images,[2 1 3]); 18 | 19 | fclose(fp); 20 | 21 | % Reshape to #pixels x #examples 22 | images = reshape(images, size(images, 1) * size(images, 2), size(images, 3)); 23 | % Convert to double and rescale to [0,1] 24 | images = double(images) / 255; 25 | 26 | end 27 | -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.6/loadMNISTLabels.m: -------------------------------------------------------------------------------- 1 | function labels = loadMNISTLabels(filename) 2 | %loadMNISTLabels returns a [number of MNIST images]x1 matrix containing 3 | %the labels for the MNIST images 4 | 5 | fp = fopen(filename, 'rb'); 6 | assert(fp ~= -1, ['Could not open ', filename, '']); 7 | 8 | magic = fread(fp, 1, 'int32', 0, 'ieee-be'); 9 | assert(magic == 2049, ['Bad magic number in ', filename, '']); 10 | 11 | numLabels = fread(fp, 1, 'int32', 0, 'ieee-be'); 12 | 13 | labels = fread(fp, inf, 'unsigned char'); 14 | 15 | assert(size(labels,1) == numLabels, 'Mismatch in label count'); 16 | 17 | fclose(fp); 18 | 19 | end 20 | -------------------------------------------------------------------------------- /Deep-Learning-for-Beginners-master/ch.6/rng.m: -------------------------------------------------------------------------------- 1 | function rng(x) 2 | randn('seed', x) 3 | rand('seed', x) 4 | end -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Apress/matlab-deep-learning/00e2dfb1684b83ef16442b2518d74ed6b294353f/LICENSE.txt -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Apress Source Code 2 | 3 | This repository accompanies [*MATLAB Deep Learning*](http://www.apress.com/9781484228449) by Phil Kim (Apress, 2017). 4 | 5 | [comment]: #cover 6 | 7 | 8 | Download the files as a zip using the green button, or clone the repository to your machine using Git. 9 | 10 | ## Releases 11 | 12 | Release v1.0 corresponds to the code in the published book, without corrections or updates. 13 | 14 | ## Contributions 15 | 16 | See the file Contributing.md for more information on how you can contribute to this repository. 17 | -------------------------------------------------------------------------------- /contributing.md: -------------------------------------------------------------------------------- 1 | # Contributing to Apress Source Code 2 | 3 | Copyright for Apress source code belongs to the author(s). However, under fair use you are encouraged to fork and contribute minor corrections and updates for the benefit of the author(s) and other readers. 4 | 5 | ## How to Contribute 6 | 7 | 1. Make sure you have a GitHub account. 8 | 2. Fork the repository for the relevant book. 9 | 3. Create a new branch on which to make your change, e.g. 10 | `git checkout -b my_code_contribution` 11 | 4. Commit your change. Include a commit message describing the correction. Please note that if your commit message is not clear, the correction will not be accepted. 12 | 5. Submit a pull request. 13 | 14 | Thank you for your contribution! --------------------------------------------------------------------------------