├── Assignment1 ├── learn_perceptron.py └── plot_perceptron.py ├── LICENSE └── README.md /Assignment1/learn_perceptron.py: -------------------------------------------------------------------------------- 1 | """ 2 | Learns the weights of a perceptron and displays the results 3 | """ 4 | 5 | from plot_perceptron import plot_perceptron 6 | import numpy as np 7 | 8 | def learn_perceptron(neg_examples_nobias, pos_examples_nobias, w_init=None, w_gen_feas=None): 9 | """ 10 | % Learns the weights of a perceptron for a 2-dimensional dataset and plots 11 | % the perceptron at each iteration where an iteration is defined as one 12 | % full pass through the data. If a generously feasible weight vector 13 | % is provided then the visualization will also show the distance 14 | % of the learned weight vectors to the generously feasible weight vector. 15 | % Required Inputs: 16 | % neg_examples_nobias - The num_neg_examples x 2 matrix for the examples with target 0. 17 | % num_neg_examples is the number of examples for the negative class. 18 | % pos_examples_nobias - The num_pos_examples x 2 matrix for the examples with target 1. 19 | % num_pos_examples is the number of examples for the positive class. 20 | % w_init - A 3-dimensional initial weight vector. The last element is the bias. 21 | % w_gen_feas - A generously feasible weight vector. 22 | % Returns: 23 | % w - The learned weight vector. 24 | """ 25 | 26 | #Bookkeeping 27 | num_neg_examples = neg_examples_nobias.shape[0] 28 | num_pos_examples = pos_examples_nobias.shape[0] 29 | num_err_history = [] 30 | w_dist_history = [] 31 | 32 | #Here we add a column of ones to the examples in order to allow us to learn 33 | #bias parameters. 34 | neg_examples = np.c_[neg_examples_nobias,np.ones(num_neg_examples)] 35 | pos_examples = np.c_[pos_examples_nobias,np.ones(num_pos_examples)] 36 | 37 | #If weight vectors have not been provided, initialize them appropriately. 38 | 39 | if (w_init is None or len(w_init) == 0): 40 | w = np.random.rand(3,1) 41 | else: 42 | w = w_init 43 | 44 | if (w_gen_feas is None): 45 | w_gen_feas = []; 46 | 47 | #Find the data points that the perceptron has incorrectly classified 48 | #and record the number of errors it makes. 49 | it = 0 50 | 51 | mistakes0, mistakes1 = eval_perceptron(neg_examples, pos_examples, w) 52 | num_errs = len(mistakes0) + len(mistakes1) 53 | num_err_history.append(num_errs) 54 | print('Number of errors in iteration {0}:\t{1}'.format(it, num_errs)) 55 | print('weights:') 56 | print(w) 57 | 58 | plot_perceptron(neg_examples, pos_examples, mistakes0, mistakes1, num_err_history, w, w_dist_history) 59 | 60 | key = input("") 61 | if key == 'q': 62 | return w 63 | 64 | #If a generously feasible weight vector exists, record the distance 65 | #to it from the initial weight vector. 66 | 67 | if len(w_gen_feas) != 0: 68 | w_dist_history.append(np.linalg.norm(w-w_gen_feas)) 69 | 70 | #Iterate until the perceptron has correctly classified all points. 71 | while (num_errs > 0): 72 | 73 | it += 1 74 | 75 | #Update the weights of the perceptron. 76 | w = update_weights(neg_examples,pos_examples,w) 77 | 78 | #If a generously feasible weight vector exists, record the distance 79 | #to it from the current weight vector. 80 | 81 | if len(w_gen_feas) != 0: 82 | w_dist_history.append(np.linalg.norm(w-w_gen_feas)) 83 | 84 | #Find the data points that the perceptron has incorrectly classified. 85 | #and record the number of errors it makes. 86 | mistakes0, mistakes1 = eval_perceptron(neg_examples, pos_examples, w) 87 | num_errs = len(mistakes0) + len(mistakes1) 88 | num_err_history.append(num_errs) 89 | print('Number of errors in iteration {0}:\t{1}'.format(it, num_errs)) 90 | print('weights:') 91 | print(w) 92 | 93 | plot_perceptron(neg_examples, pos_examples, mistakes0, mistakes1, num_err_history, w, w_dist_history) 94 | 95 | key = input("") 96 | if key == 'q': 97 | return w 98 | 99 | 100 | def update_weights(neg_examples, pos_examples, w_current): 101 | """ 102 | % Updates the weights of the perceptron for incorrectly classified points 103 | % using the perceptron update algorithm. This function makes one sweep 104 | % over the dataset. 105 | % Inputs: 106 | % neg_examples - The num_neg_examples x 3 matrix for the examples with target 0. 107 | % num_neg_examples is the number of examples for the negative class. 108 | % pos_examples- The num_pos_examples x 3 matrix for the examples with target 1. 109 | % num_pos_examples is the number of examples for the positive class. 110 | % w_current - A 3-dimensional weight vector, the last element is the bias. 111 | % Returns: 112 | % w - The weight vector after one pass through the dataset using the perceptron 113 | % learning rule. 114 | """ 115 | 116 | w = w_current 117 | num_neg_examples = neg_examples.shape[0] 118 | num_pos_examples = pos_examples.shape[0] 119 | for i in range(num_neg_examples): 120 | x = neg_examples[[i],:] 121 | activation = x.dot(w) 122 | if (activation >= 0): 123 | #YOUR CODE HERE 124 | pass 125 | 126 | for i in range(num_pos_examples): 127 | x = pos_examples[[i],:] 128 | activation = x.dot(w) 129 | if activation < 0: 130 | #YOUR CODE HERE 131 | pass 132 | 133 | return w 134 | 135 | 136 | def eval_perceptron(neg_examples, pos_examples, w): 137 | """ 138 | % Evaluates the perceptron using a given weight vector. Here, evaluation 139 | % refers to finding the data points that the perceptron incorrectly classifies. 140 | % Inputs: 141 | % neg_examples - The num_neg_examples x 3 matrix for the examples with target 0. 142 | % num_neg_examples is the number of examples for the negative class. 143 | % pos_examples- The num_pos_examples x 3 matrix for the examples with target 1. 144 | % num_pos_examples is the number of examples for the positive class. 145 | % w - A 3-dimensional weight vector, the last element is the bias. 146 | % Returns: 147 | % mistakes0 - A vector containing the indices of the negative examples that have been 148 | % incorrectly classified as positive. 149 | % mistakes1 - A vector containing the indices of the positive examples that have been 150 | % incorrectly classified as negative. 151 | """ 152 | 153 | num_neg_examples = neg_examples.shape[0] 154 | num_pos_examples = pos_examples.shape[0] 155 | mistakes0 = [] 156 | mistakes1 = [] 157 | 158 | for i in range(num_neg_examples): 159 | x = neg_examples[i,:].T 160 | activation = x.dot(w) 161 | if (activation >= 0): 162 | mistakes0.append(i) 163 | 164 | for i in range(num_pos_examples): 165 | x = pos_examples[i,:].T 166 | activation = x.dot(w) 167 | if activation < 0: 168 | mistakes1.append(i) 169 | 170 | return (mistakes0, mistakes1) 171 | -------------------------------------------------------------------------------- /Assignment1/plot_perceptron.py: -------------------------------------------------------------------------------- 1 | """ 2 | Plots information about a perceptron classifier on a 2-dimensional dataset. 3 | """ 4 | 5 | import matplotlib.pyplot as plt 6 | 7 | def plot_perceptron(neg_examples, pos_examples, mistakes0, mistakes1, num_err_history, w, w_dist_history): 8 | """ 9 | % The top-left plot shows the dataset and the classification boundary given by 10 | % the weights of the perceptron. The negative examples are shown as circles 11 | % while the positive examples are shown as squares. If an example is colored 12 | % green then it means that the example has been correctly classified by the 13 | % provided weights. If it is colored red then it has been incorrectly classified. 14 | % The top-right plot shows the number of mistakes the perceptron algorithm has 15 | % made in each iteration so far. 16 | % The bottom-left plot shows the distance to some generously feasible weight 17 | % vector if one has been provided (note, there can be an infinite number of these). 18 | % Points that the classifier has made a mistake on are shown in red, 19 | % while points that are correctly classified are shown in green. 20 | % The goal is for all of the points to be green (if it is possible to do so). 21 | % Inputs: 22 | % neg_examples - The num_neg_examples x 3 matrix for the examples with target 0. 23 | % num_neg_examples is the number of examples for the negative class. 24 | % pos_examples- The num_pos_examples x 3 matrix for the examples with target 1. 25 | % num_pos_examples is the number of examples for the positive class. 26 | % mistakes0 - A vector containing the indices of the datapoints from class 0 incorrectly 27 | % classified by the perceptron. This is a subset of neg_examples. 28 | % mistakes1 - A vector containing the indices of the datapoints from class 1 incorrectly 29 | % classified by the perceptron. This is a subset of pos_examples. 30 | % num_err_history - A vector containing the number of mistakes for each 31 | % iteration of learning so far. 32 | % w - A 3-dimensional vector corresponding to the current weights of the 33 | % perceptron. The last element is the bias. 34 | % w_dist_history - A vector containing the L2-distance to a generously 35 | % feasible weight vector for each iteration of learning so far. 36 | % Empty if one has not been provided. 37 | %% 38 | """ 39 | 40 | fig = plt.figure() 41 | ax1 = fig.add_subplot(221) 42 | 43 | neg_correct_ind = [i for i in range(len(neg_examples)) if i not in mistakes0] 44 | pos_correct_ind = [i for i in range(len(pos_examples)) if i not in mistakes1] 45 | 46 | if neg_examples.any(): 47 | ax1.scatter(neg_examples[neg_correct_ind,0], neg_examples[neg_correct_ind,1], marker='o', s=80, color='green') 48 | 49 | if pos_examples.any(): 50 | ax1.scatter(pos_examples[pos_correct_ind,0], pos_examples[pos_correct_ind,1], marker='s', s=80, color='green') 51 | 52 | if mistakes0: 53 | ax1.scatter(neg_examples[mistakes0,0], neg_examples[mistakes0,1], marker='o', s=80, color='red') 54 | 55 | if mistakes1: 56 | ax1.scatter(pos_examples[mistakes1,0], pos_examples[mistakes1,1], marker='s', s=80, color='red') 57 | 58 | ax1.plot([-5,5], [(-w[-1]+5*w[0])/w[1], (-w[-1]-5*w[0])/w[1]]) 59 | ax1.set_xlim([-1,1]) 60 | ax1.set_ylim([-1,1]) 61 | 62 | ax1.set_title('Classifier') 63 | 64 | ax2 = fig.add_subplot(222) 65 | ax2.plot(num_err_history) 66 | ax2.set_xlim([-1,max(15,len(num_err_history))]); 67 | ax2.set_ylim([0,neg_examples.shape[0]+pos_examples.shape[0]+1]); 68 | ax2.set_title('Number of errors'); 69 | ax2.set_xlabel('Iteration'); 70 | ax2.set_ylabel('Number of errors'); 71 | 72 | ax3 = fig.add_subplot(223) 73 | ax3.plot(w_dist_history) 74 | ax3.set_xlim([-1,max(15,len(num_err_history))]); 75 | ax3.set_ylim([0,15]); 76 | ax3.set_title('Distance') 77 | ax3.set_xlabel('Iteration'); 78 | ax3.set_ylabel('Distance'); 79 | 80 | 81 | plt.show() 82 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2016 Adam Klimont 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Neural-Networks-for-Machine-Learning-in-Python 2 | Assignments from Geoffrey Hinton's NN course translated into Python 3 | 4 | In order to import .mat files, start up IPython console and run 5 | 6 | ```python 7 | import scipy.io 8 | mat = scipy.io.loadmat('Datasets/dataset1.mat') 9 | ``` --------------------------------------------------------------------------------