├── cs231n ├── __init__.py ├── __init__.pyc ├── classifiers │ ├── __init__.py │ ├── __init__.pyc │ ├── k_nearest_neighbor.py │ ├── k_nearest_neighbor.pyc │ ├── linear_classifier.py │ ├── linear_classifier.pyc │ ├── linear_svm.py │ ├── linear_svm.pyc │ ├── softmax.py │ └── softmax.pyc ├── data_utils.py ├── data_utils.pyc ├── features.py ├── features.pyc ├── gradient_check.py └── gradient_check.pyc ├── features.ipynb ├── knn.ipynb ├── softmax.ipynb └── svm.ipynb /cs231n/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/manuchopra/ImageClassificationKNN-Softmax-SVM/d3f7715fd36052080a8819e42a736b7fd4bcde6d/cs231n/__init__.py -------------------------------------------------------------------------------- /cs231n/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/manuchopra/ImageClassificationKNN-Softmax-SVM/d3f7715fd36052080a8819e42a736b7fd4bcde6d/cs231n/__init__.pyc -------------------------------------------------------------------------------- /cs231n/classifiers/__init__.py: -------------------------------------------------------------------------------- 1 | from cs231n.classifiers.k_nearest_neighbor import * 2 | from cs231n.classifiers.linear_classifier import * 3 | -------------------------------------------------------------------------------- /cs231n/classifiers/__init__.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/manuchopra/ImageClassificationKNN-Softmax-SVM/d3f7715fd36052080a8819e42a736b7fd4bcde6d/cs231n/classifiers/__init__.pyc -------------------------------------------------------------------------------- /cs231n/classifiers/k_nearest_neighbor.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | class KNearestNeighbor: 4 | """ a kNN classifier with L2 distance """ 5 | 6 | def __init__(self): 7 | pass 8 | 9 | def train(self, X, y): 10 | """ 11 | Train the classifier. For k-nearest neighbors this is just 12 | memorizing the training data. 13 | 14 | Input: 15 | X - A num_train x dimension array where each row is a training point. 16 | y - A vector of length num_train, where y[i] is the label for X[i, :] 17 | """ 18 | self.X_train = X 19 | self.y_train = y 20 | 21 | def predict(self, X, k=1, num_loops=0): 22 | """ 23 | Predict labels for test data using this classifier. 24 | 25 | Input: 26 | X - A num_test x dimension array where each row is a test point. 27 | k - The number of nearest neighbors that vote for predicted label 28 | num_loops - Determines which method to use to compute distances 29 | between training points and test points. 30 | 31 | Output: 32 | y - A vector of length num_test, where y[i] is the predicted label for the 33 | test point X[i, :]. 34 | """ 35 | if num_loops == 0: 36 | dists = self.compute_distances_no_loops(X) 37 | elif num_loops == 1: 38 | dists = self.compute_distances_one_loop(X) 39 | elif num_loops == 2: 40 | dists = self.compute_distances_two_loops(X) 41 | else: 42 | raise ValueError('Invalid value %d for num_loops' % num_loops) 43 | 44 | return self.predict_labels(dists, k=k) 45 | 46 | def compute_distances_two_loops(self, X): 47 | """ 48 | Compute the distance between each test point in X and each training point 49 | in self.X_train using a nested loop over both the training data and the 50 | test data. 51 | 52 | Input: 53 | X - An num_test x dimension array where each row is a test point. 54 | 55 | Output: 56 | dists - A num_test x num_train array where dists[i, j] is the distance 57 | between the ith test point and the jth training point. 58 | """ 59 | num_test = X.shape[0] 60 | num_train = self.X_train.shape[0] 61 | dists = np.zeros((num_test, num_train)) 62 | for i in xrange(num_test): 63 | for j in xrange(num_train): 64 | 65 | ##################################################################### 66 | # TODO: # 67 | # Compute the l2 distance between the ith test point and the jth # 68 | # training point, and store the result in dists[i, j] # 69 | ##################################################################### 70 | 71 | dists[i, j] = np.sqrt(np.sum(np.square(X[i, :] - self.X_train[j, :]))) 72 | 73 | pass 74 | ##################################################################### 75 | # END OF YOUR CODE # 76 | ##################################################################### 77 | return dists 78 | 79 | def compute_distances_one_loop(self, X): 80 | """ 81 | Compute the distance between each test point in X and each training point 82 | in self.X_train using a single loop over the test data. 83 | 84 | Input / Output: Same as compute_distances_two_loops 85 | """ 86 | num_test = X.shape[0] 87 | num_train = self.X_train.shape[0] 88 | dists = np.zeros((num_test, num_train)) 89 | for i in xrange(num_test): 90 | ####################################################################### 91 | # TODO: # 92 | # Compute the l2 distance between the ith test point and all training # 93 | # points, and store the result in dists[i, :]. # 94 | ####################################################################### 95 | 96 | dists[i, :] = np.sqrt(np.sum(np.square(self.X_train - X[i, :]), axis=1)) 97 | 98 | ####################################################################### 99 | # END OF YOUR CODE # 100 | ####################################################################### 101 | return dists 102 | 103 | def compute_distances_no_loops(self, X): 104 | """ 105 | Compute the distance between each test point in X and each training point 106 | in self.X_train using no explicit loops. 107 | 108 | Input / Output: Same as compute_distances_two_loops 109 | """ 110 | num_test = X.shape[0] 111 | num_train = self.X_train.shape[0] 112 | dists = np.zeros((num_test, num_train)) 113 | ######################################################################### 114 | # TODO: # 115 | # Compute the l2 distance between all test points and all training # 116 | # points without using any explicit loops, and store the result in # 117 | # dists. # 118 | # HINT: Try to formulate the l2 distance using matrix multiplication # 119 | # and two broadcast sums. # 120 | ######################################################################### 121 | 122 | 123 | 124 | train_sums = np.sum(np.square(self.X_train), axis=1) # sum over each row #a^2 125 | 126 | test_sums = np.sum(np.square(X), axis=1) # sum over each row #b^2 127 | 128 | dists = (train_sums - 2.0 * np.dot(X, self.X_train.T)).T + test_sums # (a- b)^2 = a2 + b2 - 2ab 129 | 130 | dists = np.sqrt(dists.T) 131 | 132 | 133 | pass 134 | ######################################################################### 135 | # END OF YOUR CODE # 136 | ######################################################################### 137 | return dists 138 | 139 | def predict_labels(self, dists, k=1): 140 | """ 141 | Given a matrix of distances between test points and training points, 142 | predict a label for each test point. 143 | 144 | Input: 145 | dists - A num_test x num_train array where dists[i, j] gives the distance 146 | between the ith test point and the jth training point. 147 | 148 | Output: 149 | y - A vector of length num_test where y[i] is the predicted label for the 150 | ith test point. 151 | """ 152 | num_test = dists.shape[0] 153 | y_pred = np.zeros(num_test) 154 | for i in xrange(num_test): 155 | # A list of length k storing the labels of the k nearest neighbors to 156 | # the ith test point. 157 | closest_y = [] 158 | ######################################################################### 159 | # TODO: # 160 | # Use the distance matrix to find the k nearest neighbors of the ith # 161 | # training point, and use self.y_train to find the labels of these # 162 | # neighbors. Store these labels in closest_y. # 163 | # Hint: Look up the function numpy.argsort. # 164 | ######################################################################### 165 | 166 | 167 | 168 | indices = np.argsort(dists[i, :])[:k] #k indices corresponding to the distance 169 | 170 | for idx in indices: #for every index, append the label to closest_y 171 | 172 | closest_y.append(self.y_train[idx]) 173 | 174 | ######################################################################### 175 | # TODO: # 176 | # Now that you have found the labels of the k nearest neighbors, you # 177 | # need to find the most common label in the list closest_y of labels. # 178 | # Store this label in y_pred[i]. Break ties by choosing the smaller # 179 | # label. # 180 | ######################################################################### 181 | 182 | y_pred[i] = np.argmax(np.bincount(closest_y)) 183 | 184 | pass 185 | ######################################################################### 186 | # END OF YOUR CODE # 187 | ######################################################################### 188 | 189 | return y_pred 190 | 191 | -------------------------------------------------------------------------------- /cs231n/classifiers/k_nearest_neighbor.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/manuchopra/ImageClassificationKNN-Softmax-SVM/d3f7715fd36052080a8819e42a736b7fd4bcde6d/cs231n/classifiers/k_nearest_neighbor.pyc -------------------------------------------------------------------------------- /cs231n/classifiers/linear_classifier.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from cs231n.classifiers.linear_svm import * 3 | from cs231n.classifiers.softmax import * 4 | 5 | class LinearClassifier: 6 | 7 | def __init__(self): 8 | self.W = None 9 | 10 | def train(self, X, y, learning_rate=1e-3, reg=1e-5, num_iters=100, 11 | batch_size=200, verbose=False): 12 | """ 13 | Train this linear classifier using stochastic gradient descent. 14 | 15 | Inputs: 16 | - X: D x N array of training data. Each training point is a D-dimensional 17 | column. 18 | - y: 1-dimensional array of length N with labels 0...K-1, for K classes. 19 | - learning_rate: (float) learning rate for optimization. 20 | - reg: (float) regularization strength. 21 | - num_iters: (integer) number of steps to take when optimizing 22 | - batch_size: (integer) number of training examples to use at each step. 23 | - verbose: (boolean) If true, print progress during optimization. 24 | 25 | Outputs: 26 | A list containing the value of the loss function at each training iteration. 27 | """ 28 | dim, num_train = X.shape 29 | num_classes = np.max(y) + 1 # assume y takes values 0...K-1 where K is number of classes 30 | if self.W is None: 31 | # lazily initialize W 32 | self.W = np.random.randn(num_classes, dim) * 0.001 33 | 34 | # Run stochastic gradient descent to optimize W 35 | loss_history = [] 36 | for it in xrange(num_iters): 37 | X_batch = None 38 | y_batch = None 39 | 40 | ######################################################################### 41 | # TODO: # 42 | # Sample batch_size elements from the training data and their # 43 | # corresponding labels to use in this round of gradient descent. # 44 | # Store the data in X_batch and their corresponding labels in # 45 | # y_batch; after sampling X_batch should have shape (dim, batch_size) # 46 | # and y_batch should have shape (batch_size,) # 47 | # # 48 | # Hint: Use np.random.choice to generate indices. Sampling with # 49 | # replacement is faster than sampling without replacement. # 50 | ######################################################################### 51 | 52 | batch_indices = np.random.choice(num_train, batch_size) #each element is b/w 0 and num_train and size of batch is batch_size=200 53 | 54 | X_batch = X[:, batch_indices] 55 | y_batch = y[batch_indices] 56 | 57 | ######################################################################### 58 | # END OF YOUR CODE # 59 | ######################################################################### 60 | 61 | # evaluate loss and gradient 62 | loss, grad = self.loss(X_batch, y_batch, reg) 63 | loss_history.append(loss) 64 | 65 | # perform parameter update 66 | ######################################################################### 67 | # TODO: # 68 | # Update the weights using the gradient and the learning rate. # 69 | ######################################################################### 70 | self.W += - learning_rate * grad 71 | 72 | pass 73 | ######################################################################### 74 | # END OF YOUR CODE # 75 | ######################################################################### 76 | 77 | if verbose and it % 100 == 0: 78 | print 'iteration %d / %d: loss %f' % (it, num_iters, loss) 79 | 80 | return loss_history 81 | 82 | def predict(self, X): 83 | """ 84 | Use the trained weights of this linear classifier to predict labels for 85 | data points. 86 | 87 | Inputs: 88 | - X: D x N array of training data. Each column is a D-dimensional point. 89 | 90 | Returns: 91 | - y_pred: Predicted labels for the data in X. y_pred is a 1-dimensional 92 | array of length N, and each element is an integer giving the predicted 93 | class. 94 | """ 95 | y_pred = np.zeros(X.shape[1]) 96 | ########################################################################### 97 | # TODO: # 98 | # Implement this method. Store the predicted labels in y_pred. # 99 | ########################################################################### 100 | 101 | y_pred = np.argmax(self.W.dot(X), axis=0) 102 | 103 | 104 | ########################################################################### 105 | # END OF YOUR CODE # 106 | ########################################################################### 107 | return y_pred 108 | 109 | def loss(self, X_batch, y_batch, reg): 110 | """ 111 | Compute the loss function and its derivative. 112 | Subclasses will override this. 113 | 114 | Inputs: 115 | - X_batch: D x N array of data; each column is a data point. 116 | - y_batch: 1-dimensional array of length N with labels 0...K-1, for K classes. 117 | - reg: (float) regularization strength. 118 | 119 | Returns: A tuple containing: 120 | - loss as a single float 121 | - gradient with respect to self.W; an array of the same shape as W 122 | """ 123 | pass 124 | 125 | 126 | class LinearSVM(LinearClassifier): 127 | """ A subclass that uses the Multiclass SVM loss function """ 128 | 129 | def loss(self, X_batch, y_batch, reg): 130 | return svm_loss_vectorized(self.W, X_batch, y_batch, reg) 131 | 132 | 133 | class Softmax(LinearClassifier): 134 | """ A subclass that uses the Softmax + Cross-entropy loss function """ 135 | 136 | def loss(self, X_batch, y_batch, reg): 137 | return softmax_loss_vectorized(self.W, X_batch, y_batch, reg) 138 | 139 | -------------------------------------------------------------------------------- /cs231n/classifiers/linear_classifier.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/manuchopra/ImageClassificationKNN-Softmax-SVM/d3f7715fd36052080a8819e42a736b7fd4bcde6d/cs231n/classifiers/linear_classifier.pyc -------------------------------------------------------------------------------- /cs231n/classifiers/linear_svm.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from random import shuffle 3 | 4 | def svm_loss_naive(W, X, y, reg): 5 | """ 6 | Structured SVM loss function, naive implementation (with loops) 7 | Inputs: 8 | - W: C x D array of weights 9 | - X: D x N array of data. Data are D-dimensional columns 10 | - y: 1-dimensional array of length N with labels 0...K-1, for K classes 11 | - reg: (float) regularization strength 12 | Returns: 13 | a tuple of: 14 | - loss as single float 15 | - gradient with respect to weights W; an array of same shape as W 16 | """ 17 | dW = np.zeros(W.shape) # initialize the gradient as zero 18 | 19 | # compute the loss and the gradient 20 | num_classes = W.shape[0] 21 | num_train = X.shape[1] 22 | loss = 0.0 23 | for i in xrange(num_train): 24 | scores = W.dot(X[:, i]) 25 | correct_class_score = scores[y[i]] 26 | for j in xrange(num_classes): 27 | if j == y[i]: 28 | continue 29 | margin = scores[j] - correct_class_score + 1 # note delta = 1 30 | if margin > 0: 31 | loss += margin 32 | dW[j, :] += X[:, i] 33 | dW[y[i], :] += - X[:, i] 34 | 35 | # Right now the loss is a sum over all training examples, but we want it 36 | # to be an average instead so we divide by num_train. 37 | loss /= num_train 38 | dW /= num_train 39 | 40 | # Add regularization to the loss. 41 | loss += 0.5 * reg * np.sum(W * W) 42 | dW += reg * W #regularization to the gradient 43 | 44 | ############################################################################# 45 | # TODO: # 46 | # Compute the gradient of the loss function and store it dW. # 47 | # Rather that first computing the loss and then computing the derivative, # 48 | # it may be simpler to compute the derivative at the same time that the # 49 | # loss is being computed. As a result you may need to modify some of the # 50 | # code above to compute the gradient. # 51 | ############################################################################# 52 | 53 | return loss, dW 54 | 55 | 56 | def svm_loss_vectorized(W, X, y, reg): 57 | """ 58 | Structured SVM loss function, vectorized implementation. 59 | 60 | Inputs and outputs are the same as svm_loss_naive. 61 | """ 62 | loss = 0.0 63 | dW = np.zeros(W.shape) # initialize the gradient as zero 64 | 65 | ############################################################################# 66 | # TODO: # 67 | # Implement a vectorized version of the structured SVM loss, storing the # 68 | # result in loss. # 69 | ############################################################################# 70 | 71 | scores = W.dot(X) 72 | 73 | Z = np.arange(0,len(y)) #What does this do? - if len(y) = 3, gives 0, 1, 2 74 | correct_scores = scores.T[Z, y] 75 | 76 | margin = scores - correct_scores + 1 77 | loss = np.sum(np.maximum(0, margin)) 78 | loss /= X.shape[1] 79 | 80 | loss -= 1 #deducts current score loss 81 | loss += 0.5 * reg * np.sum(W * W) 82 | 83 | ############################################################################# 84 | # END OF YOUR CODE # 85 | ############################################################################# 86 | 87 | margin[margin > 0] = 1 88 | margin[margin < 0] = 0 89 | 90 | margin[y, range(X.shape[1])] = 0 91 | 92 | margin[y, range(X.shape[1])] = -1.0 * np.sum(margin, axis=0) #set zeroed elements to opp. 93 | 94 | dW = np.dot(margin, X.T) 95 | dW /= X.shape[1] 96 | 97 | # Add regularization to the gradient 98 | dW += reg * W 99 | 100 | 101 | ############################################################################# 102 | # TODO: # 103 | # Implement a vectorized version of the gradient for the structured SVM # 104 | # loss, storing the result in dW. # 105 | # # 106 | # Hint: Instead of computing the gradient from scratch, it may be easier # 107 | # to reuse some of the intermediate values that you used to compute the # 108 | # loss. # 109 | ############################################################################# 110 | pass 111 | ############################################################################# 112 | # END OF YOUR CODE # 113 | ############################################################################# 114 | 115 | return loss, dW 116 | -------------------------------------------------------------------------------- /cs231n/classifiers/linear_svm.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/manuchopra/ImageClassificationKNN-Softmax-SVM/d3f7715fd36052080a8819e42a736b7fd4bcde6d/cs231n/classifiers/linear_svm.pyc -------------------------------------------------------------------------------- /cs231n/classifiers/softmax.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from random import shuffle 3 | 4 | def softmax_loss_naive(W, X, y, reg): 5 | """ 6 | Softmax loss function, naive implementation (with loops) 7 | Inputs: 8 | - W: C x D array of weights 9 | - X: D x N array of data. Data are D-dimensional columns 10 | - y: 1-dimensional array of length N with labels 0...K-1, for K classes 11 | - reg: (float) regularization strength 12 | Returns: 13 | a tuple of: 14 | - loss as single float 15 | - gradient with respect to weights W, an array of same size as W 16 | """ 17 | # Initialize the loss and gradient to zero. 18 | loss = 0.0 19 | dW = np.zeros_like(W) 20 | 21 | ############################################################################# 22 | # TODO: Compute the softmax loss and its gradient using explicit loops. # 23 | # Store the loss in loss and the gradient in dW. If you are not careful # 24 | # here, it is easy to run into numeric instability. Don't forget the # 25 | # regularization! # 26 | ############################################################################# 27 | 28 | for i in range(X.shape[1]): 29 | scores = W.dot(X[:, i]) 30 | scores -= np.max(scores) #prevents numerical instability 31 | prob = 0.0 32 | loss -= scores[y[i]] 33 | 34 | for curr_score in scores: 35 | prob += np.exp(curr_score) 36 | 37 | for j in range(W.shape[0]): 38 | prob_ji = np.exp(scores[j]) / prob 39 | margin = - prob_ji * X[:, i].T 40 | 41 | if j == y[i]: 42 | margin = (1 - prob_ji) * X[:, i].T 43 | dW[j, :] += -margin 44 | 45 | loss += np.log(prob) 46 | 47 | loss /= X.shape[1] 48 | dW /= X.shape[1] 49 | 50 | # Regularization 51 | loss += 0.5 * reg * np.sum(W * W) 52 | dW += reg * W 53 | 54 | 55 | ############################################################################# 56 | # END OF YOUR CODE # 57 | ############################################################################# 58 | 59 | return loss, dW 60 | 61 | 62 | def softmax_loss_vectorized(W, X, y, reg): 63 | """ 64 | Softmax loss function, vectorized version. 65 | 66 | Inputs and outputs are the same as softmax_loss_naive. 67 | """ 68 | # Initialize the loss and gradient to zero. 69 | loss = 0.0 70 | dW = np.zeros_like(W) 71 | 72 | ############################################################################# 73 | # TODO: Compute the softmax loss and its gradient using no explicit loops. # 74 | # Store the loss in loss and the gradient in dW. If you are not careful # 75 | # here, it is easy to run into numeric instability. Don't forget the # 76 | # regularization! # 77 | ############################################################################# 78 | 79 | scores = W.dot(X) 80 | scores += -np.max(scores) #prevents numerical instability 81 | 82 | correct_scores = scores[y, range(len(y))] 83 | exp_score = np.exp(scores) 84 | exp_sum = np.sum(exp_score,axis = 0) #adding up each column 85 | 86 | loss = - np.mean(np.log(np.exp(correct_scores) / exp_sum)) 87 | 88 | n_probs = - exp_score / exp_sum 89 | n_probs[y, range(len(y))] += 1 90 | 91 | loss += 0.5 * reg * np.sum(W * W) 92 | 93 | dW = n_probs.dot(X.T) 94 | dW /= -X.shape[1] 95 | dW += reg * W 96 | 97 | pass 98 | ############################################################################# 99 | # END OF YOUR CODE # 100 | ############################################################################# 101 | 102 | return loss, dW 103 | -------------------------------------------------------------------------------- /cs231n/classifiers/softmax.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/manuchopra/ImageClassificationKNN-Softmax-SVM/d3f7715fd36052080a8819e42a736b7fd4bcde6d/cs231n/classifiers/softmax.pyc -------------------------------------------------------------------------------- /cs231n/data_utils.py: -------------------------------------------------------------------------------- 1 | import cPickle as pickle 2 | import numpy as np 3 | import os 4 | 5 | def load_CIFAR_batch(filename): 6 | """ load single batch of cifar """ 7 | with open(filename, 'r') as f: 8 | datadict = pickle.load(f) 9 | X = datadict['data'] 10 | Y = datadict['labels'] 11 | X = X.reshape(10000, 3, 32, 32).transpose(0,2,3,1).astype("float") 12 | Y = np.array(Y) 13 | return X, Y 14 | 15 | def load_CIFAR10(ROOT): 16 | """ load all of cifar """ 17 | xs = [] 18 | ys = [] 19 | for b in range(1,6): 20 | f = os.path.join(ROOT, 'data_batch_%d' % (b, )) 21 | X, Y = load_CIFAR_batch(f) 22 | xs.append(X) 23 | ys.append(Y) 24 | Xtr = np.concatenate(xs) 25 | Ytr = np.concatenate(ys) 26 | del X, Y 27 | Xte, Yte = load_CIFAR_batch(os.path.join(ROOT, 'test_batch')) 28 | return Xtr, Ytr, Xte, Yte 29 | -------------------------------------------------------------------------------- /cs231n/data_utils.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/manuchopra/ImageClassificationKNN-Softmax-SVM/d3f7715fd36052080a8819e42a736b7fd4bcde6d/cs231n/data_utils.pyc -------------------------------------------------------------------------------- /cs231n/features.py: -------------------------------------------------------------------------------- 1 | import matplotlib 2 | import numpy as np 3 | from scipy.ndimage import uniform_filter 4 | 5 | 6 | def extract_features(imgs, feature_fns, verbose=False): 7 | """ 8 | Given pixel data for images and several feature functions that can operate on 9 | single images, apply all feature functions to all images, concatenating the 10 | feature vectors for each image and storing the features for all images in 11 | a single matrix. 12 | 13 | Inputs: 14 | - imgs: N x H X W X C array of pixel data for N images. 15 | - feature_fns: List of k feature functions. The ith feature function should 16 | take as input an H x W x D array and return a (one-dimensional) array of 17 | length F_i. 18 | - verbose: Boolean; if true, print progress. 19 | 20 | Returns: 21 | An array of shape (F_1 + ... + F_k, N) where each column is the concatenation 22 | of all features for a single image. 23 | """ 24 | num_images = imgs.shape[0] 25 | if num_images == 0: 26 | return np.array([]) 27 | 28 | # Use the first image to determine feature dimensions 29 | feature_dims = [] 30 | first_image_features = [] 31 | for feature_fn in feature_fns: 32 | feats = feature_fn(imgs[0].squeeze()) 33 | assert len(feats.shape) == 1, 'Feature functions must be one-dimensional' 34 | feature_dims.append(feats.size) 35 | first_image_features.append(feats) 36 | 37 | # Now that we know the dimensions of the features, we can allocate a single 38 | # big array to store all features as columns. 39 | total_feature_dim = sum(feature_dims) 40 | imgs_features = np.zeros((total_feature_dim, num_images)) 41 | imgs_features[:total_feature_dim, 0] = np.hstack(first_image_features) 42 | 43 | # Extract features for the rest of the images. 44 | for i in xrange(1, num_images): 45 | idx = 0 46 | for feature_fn, feature_dim in zip(feature_fns, feature_dims): 47 | next_idx = idx + feature_dim 48 | imgs_features[idx:next_idx, i] = feature_fn(imgs[i].squeeze()) 49 | idx = next_idx 50 | if verbose and i % 1000 == 0: 51 | print 'Done extracting features for %d / %d images' % (i, num_images) 52 | 53 | return imgs_features 54 | 55 | 56 | def rgb2gray(rgb): 57 | """Convert RGB image to grayscale 58 | 59 | Parameters: 60 | rgb : RGB image 61 | 62 | Returns: 63 | gray : grayscale image 64 | 65 | """ 66 | return np.dot(rgb[...,:3], [0.299, 0.587, 0.144]) 67 | 68 | 69 | def hog_feature(im): 70 | """Compute Histogram of Gradient (HOG) feature for an image 71 | 72 | Modified from skimage.feature.hog 73 | http://pydoc.net/Python/scikits-image/0.4.2/skimage.feature.hog 74 | 75 | Reference: 76 | Histograms of Oriented Gradients for Human Detection 77 | Navneet Dalal and Bill Triggs, CVPR 2005 78 | 79 | Parameters: 80 | im : an input grayscale or rgb image 81 | 82 | Returns: 83 | feat: Histogram of Gradient (HOG) feature 84 | 85 | """ 86 | 87 | # convert rgb to grayscale if needed 88 | if im.ndim == 3: 89 | image = rgb2gray(im) 90 | else: 91 | image = np.at_least_2d(im) 92 | 93 | sx, sy = image.shape # image size 94 | orientations = 9 # number of gradient bins 95 | cx, cy = (8, 8) # pixels per cell 96 | 97 | gx = np.zeros(image.shape) 98 | gy = np.zeros(image.shape) 99 | gx[:, :-1] = np.diff(image, n=1, axis=1) # compute gradient on x-direction 100 | gy[:-1, :] = np.diff(image, n=1, axis=0) # compute gradient on y-direction 101 | grad_mag = np.sqrt(gx ** 2 + gy ** 2) # gradient magnitude 102 | grad_ori = np.arctan2(gy, (gx + 1e-15)) * (180 / np.pi) + 90 # gradient orientation 103 | 104 | n_cellsx = int(np.floor(sx / cx)) # number of cells in x 105 | n_cellsy = int(np.floor(sy / cy)) # number of cells in y 106 | # compute orientations integral images 107 | orientation_histogram = np.zeros((n_cellsx, n_cellsy, orientations)) 108 | for i in range(orientations): 109 | # create new integral image for this orientation 110 | # isolate orientations in this range 111 | temp_ori = np.where(grad_ori < 180 / orientations * (i + 1), 112 | grad_ori, 0) 113 | temp_ori = np.where(grad_ori >= 180 / orientations * i, 114 | temp_ori, 0) 115 | # select magnitudes for those orientations 116 | cond2 = temp_ori > 0 117 | temp_mag = np.where(cond2, grad_mag, 0) 118 | orientation_histogram[:,:,i] = uniform_filter(temp_mag, size=(cx, cy))[cx/2::cx, cy/2::cy].T 119 | 120 | return orientation_histogram.ravel() 121 | 122 | 123 | def color_histogram_hsv(im, nbin=10, xmin=0, xmax=255, normalized=True): 124 | """ 125 | Compute color histogram for an image using hue. 126 | 127 | Inputs: 128 | - im: H x W x C array of pixel data for an RGB image. 129 | - nbin: Number of histogram bins. (default: 10) 130 | - xmin: Minimum pixel value (default: 0) 131 | - xmax: Maximum pixel value (default: 255) 132 | - normalized: Whether to normalize the histogram (default: True) 133 | 134 | Returns: 135 | 1D vector of length nbin giving the color histogram over the hue of the 136 | input image. 137 | """ 138 | ndim = im.ndim 139 | bins = np.linspace(xmin, xmax, nbin+1) 140 | hsv = matplotlib.colors.rgb_to_hsv(im/xmax) * xmax 141 | imhist, bin_edges = np.histogram(hsv[:,:,0], bins=bins, density=normalized) 142 | imhist = imhist * np.diff(bin_edges) 143 | 144 | # return histogram 145 | return imhist 146 | 147 | 148 | pass 149 | -------------------------------------------------------------------------------- /cs231n/features.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/manuchopra/ImageClassificationKNN-Softmax-SVM/d3f7715fd36052080a8819e42a736b7fd4bcde6d/cs231n/features.pyc -------------------------------------------------------------------------------- /cs231n/gradient_check.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from random import randrange 3 | 4 | def eval_numerical_gradient(f, x): 5 | """ 6 | a naive implementation of numerical gradient of f at x 7 | - f should be a function that takes a single argument 8 | - x is the point (numpy array) to evaluate the gradient at 9 | """ 10 | 11 | fx = f(x) # evaluate function value at original point 12 | grad = np.zeros(x.shape) 13 | h = 0.00001 14 | 15 | # iterate over all indexes in x 16 | it = np.nditer(x, flags=['multi_index'], op_flags=['readwrite']) 17 | while not it.finished: 18 | 19 | # evaluate function at x+h 20 | ix = it.multi_index 21 | x[ix] += h # increment by h 22 | fxh = f(x) # evalute f(x + h) 23 | x[ix] -= h # restore to previous value (very important!) 24 | 25 | # compute the partial derivative 26 | grad[ix] = (fxh - fx) / h # the slope 27 | print ix, grad[ix] 28 | it.iternext() # step to next dimension 29 | 30 | return grad 31 | 32 | def grad_check_sparse(f, x, analytic_grad, num_checks): 33 | """ 34 | sample a few random elements and only return numerical 35 | in this dimensions. 36 | """ 37 | h = 1e-5 38 | 39 | x.shape 40 | for i in xrange(num_checks): 41 | ix = tuple([randrange(m) for m in x.shape]) 42 | 43 | x[ix] += h # increment by h 44 | fxph = f(x) # evaluate f(x + h) 45 | x[ix] -= 2 * h # increment by h 46 | fxmh = f(x) # evaluate f(x - h) 47 | x[ix] += h # reset 48 | 49 | grad_numerical = (fxph - fxmh) / (2 * h) 50 | grad_analytic = analytic_grad[ix] 51 | rel_error = abs(grad_numerical - grad_analytic) / (abs(grad_numerical) + abs(grad_analytic)) 52 | print 'numerical: %f analytic: %f, relative error: %e' % (grad_numerical, grad_analytic, rel_error) 53 | 54 | -------------------------------------------------------------------------------- /cs231n/gradient_check.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/manuchopra/ImageClassificationKNN-Softmax-SVM/d3f7715fd36052080a8819e42a736b7fd4bcde6d/cs231n/gradient_check.pyc -------------------------------------------------------------------------------- /softmax.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "name": "", 4 | "signature": "sha256:737c77e39bc954128c473258803c18b1972afd9bc863fc091fb71afe1ff093db" 5 | }, 6 | "nbformat": 3, 7 | "nbformat_minor": 0, 8 | "worksheets": [ 9 | { 10 | "cells": [ 11 | { 12 | "cell_type": "markdown", 13 | "metadata": {}, 14 | "source": [ 15 | "# Softmax exercise\n", 16 | "\n", 17 | "*Complete and hand in this completed worksheet (including its outputs and any supporting code outside of the worksheet) with your assignment submission. For more details see the [assignments page](http://vision.stanford.edu/teaching/cs231n/assignments.html) on the course website.*\n", 18 | "\n", 19 | "This exercise is analogous to the SVM exercise. You will:\n", 20 | "\n", 21 | "- implement a fully-vectorized **loss function** for the Softmax classifier\n", 22 | "- implement the fully-vectorized expression for its **analytic gradient**\n", 23 | "- **check your implementation** with numerical gradient\n", 24 | "- use a validation set to **tune the learning rate and regularization** strength\n", 25 | "- **optimize** the loss function with **SGD**\n", 26 | "- **visualize** the final learned weights\n" 27 | ] 28 | }, 29 | { 30 | "cell_type": "code", 31 | "collapsed": false, 32 | "input": [ 33 | "import random\n", 34 | "import numpy as np\n", 35 | "from cs231n.data_utils import load_CIFAR10\n", 36 | "import matplotlib.pyplot as plt\n", 37 | "%matplotlib inline\n", 38 | "plt.rcParams['figure.figsize'] = (10.0, 8.0) # set default size of plots\n", 39 | "plt.rcParams['image.interpolation'] = 'nearest'\n", 40 | "plt.rcParams['image.cmap'] = 'gray'\n", 41 | "\n", 42 | "# for auto-reloading extenrnal modules\n", 43 | "# see http://stackoverflow.com/questions/1907993/autoreload-of-modules-in-ipython\n", 44 | "%load_ext autoreload\n", 45 | "%autoreload 2" 46 | ], 47 | "language": "python", 48 | "metadata": {}, 49 | "outputs": [], 50 | "prompt_number": 1 51 | }, 52 | { 53 | "cell_type": "code", 54 | "collapsed": false, 55 | "input": [ 56 | "def get_CIFAR10_data(num_training=49000, num_validation=1000, num_test=1000):\n", 57 | " \"\"\"\n", 58 | " Load the CIFAR-10 dataset from disk and perform preprocessing to prepare\n", 59 | " it for the linear classifier. These are the same steps as we used for the\n", 60 | " SVM, but condensed to a single function. \n", 61 | " \"\"\"\n", 62 | " # Load the raw CIFAR-10 data\n", 63 | " cifar10_dir = 'cs231n/datasets/cifar-10-batches-py'\n", 64 | " X_train, y_train, X_test, y_test = load_CIFAR10(cifar10_dir)\n", 65 | " \n", 66 | " # subsample the data\n", 67 | " mask = range(num_training, num_training + num_validation)\n", 68 | " X_val = X_train[mask]\n", 69 | " y_val = y_train[mask]\n", 70 | " mask = range(num_training)\n", 71 | " X_train = X_train[mask]\n", 72 | " y_train = y_train[mask]\n", 73 | " mask = range(num_test)\n", 74 | " X_test = X_test[mask]\n", 75 | " y_test = y_test[mask]\n", 76 | " \n", 77 | " # Preprocessing: reshape the image data into rows\n", 78 | " X_train = np.reshape(X_train, (X_train.shape[0], -1))\n", 79 | " X_val = np.reshape(X_val, (X_val.shape[0], -1))\n", 80 | " X_test = np.reshape(X_test, (X_test.shape[0], -1))\n", 81 | " \n", 82 | " # Normalize the data: subtract the mean image\n", 83 | " mean_image = np.mean(X_train, axis = 0)\n", 84 | " X_train -= mean_image\n", 85 | " X_val -= mean_image\n", 86 | " X_test -= mean_image\n", 87 | " \n", 88 | " # add bias dimension and transform into columns\n", 89 | " X_train = np.hstack([X_train, np.ones((X_train.shape[0], 1))]).T\n", 90 | " X_val = np.hstack([X_val, np.ones((X_val.shape[0], 1))]).T\n", 91 | " X_test = np.hstack([X_test, np.ones((X_test.shape[0], 1))]).T\n", 92 | " \n", 93 | " return X_train, y_train, X_val, y_val, X_test, y_test\n", 94 | "\n", 95 | "\n", 96 | "# Invoke the above function to get our data.\n", 97 | "X_train, y_train, X_val, y_val, X_test, y_test = get_CIFAR10_data()\n", 98 | "print 'Train data shape: ', X_train.shape\n", 99 | "print 'Train labels shape: ', y_train.shape\n", 100 | "print 'Validation data shape: ', X_val.shape\n", 101 | "print 'Validation labels shape: ', y_val.shape\n", 102 | "print 'Test data shape: ', X_test.shape\n", 103 | "print 'Test labels shape: ', y_test.shape" 104 | ], 105 | "language": "python", 106 | "metadata": {}, 107 | "outputs": [ 108 | { 109 | "output_type": "stream", 110 | "stream": "stdout", 111 | "text": [ 112 | "Train data shape: (3073, 49000)\n", 113 | "Train labels shape: (49000,)\n", 114 | "Validation data shape: (3073, 1000)\n", 115 | "Validation labels shape: (1000,)\n", 116 | "Test data shape: (3073, 1000)\n", 117 | "Test labels shape: (1000,)\n" 118 | ] 119 | } 120 | ], 121 | "prompt_number": 2 122 | }, 123 | { 124 | "cell_type": "markdown", 125 | "metadata": {}, 126 | "source": [ 127 | "## Softmax Classifier\n", 128 | "\n", 129 | "Your code for this section will all be written inside **cs231n/classifiers/softmax.py**. \n" 130 | ] 131 | }, 132 | { 133 | "cell_type": "code", 134 | "collapsed": false, 135 | "input": [ 136 | "# First implement the naive softmax loss function with nested loops.\n", 137 | "# Open the file cs231n/classifiers/softmax.py and implement the\n", 138 | "# softmax_loss_naive function.\n", 139 | "\n", 140 | "from cs231n.classifiers.softmax import softmax_loss_naive\n", 141 | "import time\n", 142 | "\n", 143 | "# Generate a random softmax weight matrix and use it to compute the loss.\n", 144 | "W = np.random.randn(10, 3073) * 0.0001\n", 145 | "loss, grad = softmax_loss_naive(W, X_train, y_train, 0.0)\n", 146 | "\n", 147 | "# As a rough sanity check, our loss should be something close to -log(0.1).\n", 148 | "print 'loss: %f' % loss\n", 149 | "print 'sanity check: %f' % (-np.log(0.1))" 150 | ], 151 | "language": "python", 152 | "metadata": {}, 153 | "outputs": [ 154 | { 155 | "output_type": "stream", 156 | "stream": "stdout", 157 | "text": [ 158 | "loss: 2.348915\n", 159 | "sanity check: 2.302585\n" 160 | ] 161 | } 162 | ], 163 | "prompt_number": 4 164 | }, 165 | { 166 | "cell_type": "markdown", 167 | "metadata": {}, 168 | "source": [ 169 | "## Inline Question 1:\n", 170 | "Why do we expect our loss to be close to -log(0.1)? Explain briefly.**\n", 171 | "\n", 172 | "**Your answer:** *We expect the loss to be close to -log(0.1) as we have 10 different classes and since we are generate a random weight matrix, each of them have the same equal probability. Hence P(total) is the sum of ten equal probabilities. Let's assume each of these individual probability is p, then our expected loss will be -log(p/10p) or -log(0.1)*\n" 173 | ] 174 | }, 175 | { 176 | "cell_type": "code", 177 | "collapsed": false, 178 | "input": [ 179 | "# Complete the implementation of softmax_loss_naive and implement a (naive)\n", 180 | "# version of the gradient that uses nested loops.\n", 181 | "loss, grad = softmax_loss_naive(W, X_train, y_train, 0.0)\n", 182 | "\n", 183 | "# As we did for the SVM, use numeric gradient checking as a debugging tool.\n", 184 | "# The numeric gradient should be close to the analytic gradient.\n", 185 | "from cs231n.gradient_check import grad_check_sparse\n", 186 | "f = lambda w: softmax_loss_naive(w, X_train, y_train, 0.0)[0]\n", 187 | "grad_numerical = grad_check_sparse(f, W, grad, 10)" 188 | ], 189 | "language": "python", 190 | "metadata": {}, 191 | "outputs": [ 192 | { 193 | "output_type": "stream", 194 | "stream": "stdout", 195 | "text": [ 196 | "numerical: 0.522142 analytic: 0.522142, relative error: 5.385949e-08\n", 197 | "numerical: -1.916828 analytic: -1.916828, relative error: 2.002732e-09" 198 | ] 199 | }, 200 | { 201 | "output_type": "stream", 202 | "stream": "stdout", 203 | "text": [ 204 | "\n", 205 | "numerical: 1.761582 analytic: 1.761582, relative error: 4.181897e-08" 206 | ] 207 | }, 208 | { 209 | "output_type": "stream", 210 | "stream": "stdout", 211 | "text": [ 212 | "\n", 213 | "numerical: -1.574561 analytic: -1.574561, relative error: 1.829238e-08" 214 | ] 215 | }, 216 | { 217 | "output_type": "stream", 218 | "stream": "stdout", 219 | "text": [ 220 | "\n", 221 | "numerical: -2.546487 analytic: -2.546487, relative error: 1.503163e-08" 222 | ] 223 | }, 224 | { 225 | "output_type": "stream", 226 | "stream": "stdout", 227 | "text": [ 228 | "\n", 229 | "numerical: -1.676335 analytic: -1.676335, relative error: 1.326739e-08" 230 | ] 231 | }, 232 | { 233 | "output_type": "stream", 234 | "stream": "stdout", 235 | "text": [ 236 | "\n", 237 | "numerical: 0.905532 analytic: 0.905532, relative error: 3.011785e-08" 238 | ] 239 | }, 240 | { 241 | "output_type": "stream", 242 | "stream": "stdout", 243 | "text": [ 244 | "\n", 245 | "numerical: 0.575503 analytic: 0.575503, relative error: 2.850763e-08" 246 | ] 247 | }, 248 | { 249 | "output_type": "stream", 250 | "stream": "stdout", 251 | "text": [ 252 | "\n", 253 | "numerical: -0.177720 analytic: -0.177720, relative error: 9.747113e-08" 254 | ] 255 | }, 256 | { 257 | "output_type": "stream", 258 | "stream": "stdout", 259 | "text": [ 260 | "\n", 261 | "numerical: 0.487658 analytic: 0.487658, relative error: 2.576010e-08" 262 | ] 263 | }, 264 | { 265 | "output_type": "stream", 266 | "stream": "stdout", 267 | "text": [ 268 | "\n" 269 | ] 270 | } 271 | ], 272 | "prompt_number": 5 273 | }, 274 | { 275 | "cell_type": "code", 276 | "collapsed": false, 277 | "input": [ 278 | "# Now that we have a naive implementation of the softmax loss function and its gradient,\n", 279 | "# implement a vectorized version in softmax_loss_vectorized.\n", 280 | "# The two versions should compute the same results, but the vectorized version should be\n", 281 | "# much faster.\n", 282 | "tic = time.time()\n", 283 | "loss_naive, grad_naive = softmax_loss_naive(W, X_train, y_train, 0.00001)\n", 284 | "toc = time.time()\n", 285 | "print 'naive loss: %e computed in %fs' % (loss_naive, toc - tic)\n", 286 | "\n", 287 | "from cs231n.classifiers.softmax import softmax_loss_vectorized\n", 288 | "tic = time.time()\n", 289 | "loss_vectorized, grad_vectorized = softmax_loss_vectorized(W, X_train, y_train, 0.00001)\n", 290 | "toc = time.time()\n", 291 | "print 'vectorized loss: %e computed in %fs' % (loss_vectorized, toc - tic)\n", 292 | "\n", 293 | "# As we did for the SVM, we use the Frobenius norm to compare the two versions\n", 294 | "# of the gradient.\n", 295 | "grad_difference = np.linalg.norm(grad_naive - grad_vectorized, ord='fro')\n", 296 | "print 'Loss difference: %f' % np.abs(loss_naive - loss_vectorized)\n", 297 | "print 'Gradient difference: %f' % grad_difference" 298 | ], 299 | "language": "python", 300 | "metadata": {}, 301 | "outputs": [ 302 | { 303 | "output_type": "stream", 304 | "stream": "stdout", 305 | "text": [ 306 | "naive loss: 2.348915e+00 computed in 25.429312s\n", 307 | "vectorized loss: 2.348915e+00 computed in 0.565030s" 308 | ] 309 | }, 310 | { 311 | "output_type": "stream", 312 | "stream": "stdout", 313 | "text": [ 314 | "\n", 315 | "Loss difference: 0.000000\n", 316 | "Gradient difference: 0.000000\n" 317 | ] 318 | } 319 | ], 320 | "prompt_number": 12 321 | }, 322 | { 323 | "cell_type": "code", 324 | "collapsed": false, 325 | "input": [ 326 | "# Use the validation set to tune hyperparameters (regularization strength and\n", 327 | "# learning rate). You should experiment with different ranges for the learning\n", 328 | "# rates and regularization strengths; if you are careful you should be able to\n", 329 | "# get a classification accuracy of over 0.35 on the validation set.\n", 330 | "from cs231n.classifiers import Softmax\n", 331 | "results = {}\n", 332 | "best_val = -1\n", 333 | "best_softmax = None\n", 334 | "learning_rates = [1e-7, 5e-7, 1e-6, 5e-6]\n", 335 | "regularization_strengths = [1e-4, 5e-4, 1e-5, 5e-5]\n", 336 | "\n", 337 | "################################################################################\n", 338 | "# TODO: #\n", 339 | "# Use the validation set to set the learning rate and regularization strength. #\n", 340 | "# This should be identical to the validation that you did for the SVM; save #\n", 341 | "# the best trained softmax classifer in best_softmax. #\n", 342 | "################################################################################\n", 343 | "\n", 344 | "for strength in regularization_strengths:\n", 345 | " for rate in learning_rates:\n", 346 | " smax = Softmax()\n", 347 | " smax.train(X_train, y_train, learning_rate=rate, reg=strength, num_iters=1500, verbose=True)\n", 348 | " y_train_pred = smax.predict(X_train)\n", 349 | " train_accuracy = np.mean(y_train == y_train_pred)\n", 350 | " y_val_pred = smax.predict(X_val)\n", 351 | " val_accuracy = np.mean(y_val == y_val_pred)\n", 352 | " results[(rate, strength)] = (train_accuracy, val_accuracy)\n", 353 | " \n", 354 | " if val_accuracy > best_val:\n", 355 | " best_val = val_accuracy\n", 356 | " best_softmax = smax\n", 357 | "\n", 358 | "################################################################################\n", 359 | "# END OF YOUR CODE #\n", 360 | "################################################################################\n", 361 | " \n", 362 | "# Print out results.\n", 363 | "for lr, reg in sorted(results):\n", 364 | " train_accuracy, val_accuracy = results[(lr, reg)]\n", 365 | " print 'lr %e reg %e train accuracy: %f val accuracy: %f' % (\n", 366 | " lr, reg, train_accuracy, val_accuracy)\n", 367 | " \n", 368 | "print 'best validation accuracy achieved during cross-validation: %f' % best_val" 369 | ], 370 | "language": "python", 371 | "metadata": {}, 372 | "outputs": [ 373 | { 374 | "output_type": "stream", 375 | "stream": "stdout", 376 | "text": [ 377 | "iteration 0 / 1500: loss 4.958326\n", 378 | "iteration 100 / 1500: loss 3.680611" 379 | ] 380 | }, 381 | { 382 | "output_type": "stream", 383 | "stream": "stdout", 384 | "text": [ 385 | "\n", 386 | "iteration 200 / 1500: loss 3.740273" 387 | ] 388 | }, 389 | { 390 | "output_type": "stream", 391 | "stream": "stdout", 392 | "text": [ 393 | "\n", 394 | "iteration 300 / 1500: loss 3.644725" 395 | ] 396 | }, 397 | { 398 | "output_type": "stream", 399 | "stream": "stdout", 400 | "text": [ 401 | "\n", 402 | "iteration 400 / 1500: loss 3.108178" 403 | ] 404 | }, 405 | { 406 | "output_type": "stream", 407 | "stream": "stdout", 408 | "text": [ 409 | "\n", 410 | "iteration 500 / 1500: loss 3.113381" 411 | ] 412 | }, 413 | { 414 | "output_type": "stream", 415 | "stream": "stdout", 416 | "text": [ 417 | "\n", 418 | "iteration 600 / 1500: loss 2.958375" 419 | ] 420 | }, 421 | { 422 | "output_type": "stream", 423 | "stream": "stdout", 424 | "text": [ 425 | "\n", 426 | "iteration 700 / 1500: loss 2.840080" 427 | ] 428 | }, 429 | { 430 | "output_type": "stream", 431 | "stream": "stdout", 432 | "text": [ 433 | "\n", 434 | "iteration 800 / 1500: loss 2.922493" 435 | ] 436 | }, 437 | { 438 | "output_type": "stream", 439 | "stream": "stdout", 440 | "text": [ 441 | "\n", 442 | "iteration 900 / 1500: loss 2.672992" 443 | ] 444 | }, 445 | { 446 | "output_type": "stream", 447 | "stream": "stdout", 448 | "text": [ 449 | "\n", 450 | "iteration 1000 / 1500: loss 3.181549" 451 | ] 452 | }, 453 | { 454 | "output_type": "stream", 455 | "stream": "stdout", 456 | "text": [ 457 | "\n", 458 | "iteration 1100 / 1500: loss 2.697582" 459 | ] 460 | }, 461 | { 462 | "output_type": "stream", 463 | "stream": "stdout", 464 | "text": [ 465 | "\n", 466 | "iteration 1200 / 1500: loss 3.002257" 467 | ] 468 | }, 469 | { 470 | "output_type": "stream", 471 | "stream": "stdout", 472 | "text": [ 473 | "\n", 474 | "iteration 1300 / 1500: loss 2.525300" 475 | ] 476 | }, 477 | { 478 | "output_type": "stream", 479 | "stream": "stdout", 480 | "text": [ 481 | "\n", 482 | "iteration 1400 / 1500: loss 2.690483" 483 | ] 484 | }, 485 | { 486 | "output_type": "stream", 487 | "stream": "stdout", 488 | "text": [ 489 | "\n", 490 | "iteration 0 / 1500: loss 4.967297" 491 | ] 492 | }, 493 | { 494 | "output_type": "stream", 495 | "stream": "stdout", 496 | "text": [ 497 | "\n", 498 | "iteration 100 / 1500: loss 3.194414" 499 | ] 500 | }, 501 | { 502 | "output_type": "stream", 503 | "stream": "stdout", 504 | "text": [ 505 | "\n", 506 | "iteration 200 / 1500: loss 2.872499" 507 | ] 508 | }, 509 | { 510 | "output_type": "stream", 511 | "stream": "stdout", 512 | "text": [ 513 | "\n", 514 | "iteration 300 / 1500: loss 2.677165" 515 | ] 516 | }, 517 | { 518 | "output_type": "stream", 519 | "stream": "stdout", 520 | "text": [ 521 | "\n", 522 | "iteration 400 / 1500: loss 2.462660" 523 | ] 524 | }, 525 | { 526 | "output_type": "stream", 527 | "stream": "stdout", 528 | "text": [ 529 | "\n", 530 | "iteration 500 / 1500: loss 2.207803" 531 | ] 532 | }, 533 | { 534 | "output_type": "stream", 535 | "stream": "stdout", 536 | "text": [ 537 | "\n", 538 | "iteration 600 / 1500: loss 2.340353" 539 | ] 540 | }, 541 | { 542 | "output_type": "stream", 543 | "stream": "stdout", 544 | "text": [ 545 | "\n", 546 | "iteration 700 / 1500: loss 2.521113" 547 | ] 548 | }, 549 | { 550 | "output_type": "stream", 551 | "stream": "stdout", 552 | "text": [ 553 | "\n", 554 | "iteration 800 / 1500: loss 2.396097" 555 | ] 556 | }, 557 | { 558 | "output_type": "stream", 559 | "stream": "stdout", 560 | "text": [ 561 | "\n", 562 | "iteration 900 / 1500: loss 2.191677" 563 | ] 564 | }, 565 | { 566 | "output_type": "stream", 567 | "stream": "stdout", 568 | "text": [ 569 | "\n", 570 | "iteration 1000 / 1500: loss 2.284636" 571 | ] 572 | }, 573 | { 574 | "output_type": "stream", 575 | "stream": "stdout", 576 | "text": [ 577 | "\n", 578 | "iteration 1100 / 1500: loss 2.234501" 579 | ] 580 | }, 581 | { 582 | "output_type": "stream", 583 | "stream": "stdout", 584 | "text": [ 585 | "\n", 586 | "iteration 1200 / 1500: loss 2.032115" 587 | ] 588 | }, 589 | { 590 | "output_type": "stream", 591 | "stream": "stdout", 592 | "text": [ 593 | "\n", 594 | "iteration 1300 / 1500: loss 2.182227" 595 | ] 596 | }, 597 | { 598 | "output_type": "stream", 599 | "stream": "stdout", 600 | "text": [ 601 | "\n", 602 | "iteration 1400 / 1500: loss 2.076903" 603 | ] 604 | }, 605 | { 606 | "output_type": "stream", 607 | "stream": "stdout", 608 | "text": [ 609 | "\n", 610 | "iteration 0 / 1500: loss 5.438989" 611 | ] 612 | }, 613 | { 614 | "output_type": "stream", 615 | "stream": "stdout", 616 | "text": [ 617 | "\n", 618 | "iteration 100 / 1500: loss 2.768368" 619 | ] 620 | }, 621 | { 622 | "output_type": "stream", 623 | "stream": "stdout", 624 | "text": [ 625 | "\n", 626 | "iteration 200 / 1500: loss 2.426721" 627 | ] 628 | }, 629 | { 630 | "output_type": "stream", 631 | "stream": "stdout", 632 | "text": [ 633 | "\n", 634 | "iteration 300 / 1500: loss 2.443105" 635 | ] 636 | }, 637 | { 638 | "output_type": "stream", 639 | "stream": "stdout", 640 | "text": [ 641 | "\n", 642 | "iteration 400 / 1500: loss 2.368479" 643 | ] 644 | }, 645 | { 646 | "output_type": "stream", 647 | "stream": "stdout", 648 | "text": [ 649 | "\n", 650 | "iteration 500 / 1500: loss 2.184866" 651 | ] 652 | }, 653 | { 654 | "output_type": "stream", 655 | "stream": "stdout", 656 | "text": [ 657 | "\n", 658 | "iteration 600 / 1500: loss 2.413913" 659 | ] 660 | }, 661 | { 662 | "output_type": "stream", 663 | "stream": "stdout", 664 | "text": [ 665 | "\n", 666 | "iteration 700 / 1500: loss 2.247023" 667 | ] 668 | }, 669 | { 670 | "output_type": "stream", 671 | "stream": "stdout", 672 | "text": [ 673 | "\n", 674 | "iteration 800 / 1500: loss 1.949829" 675 | ] 676 | }, 677 | { 678 | "output_type": "stream", 679 | "stream": "stdout", 680 | "text": [ 681 | "\n", 682 | "iteration 900 / 1500: loss 1.928240" 683 | ] 684 | }, 685 | { 686 | "output_type": "stream", 687 | "stream": "stdout", 688 | "text": [ 689 | "\n", 690 | "iteration 1000 / 1500: loss 1.961031" 691 | ] 692 | }, 693 | { 694 | "output_type": "stream", 695 | "stream": "stdout", 696 | "text": [ 697 | "\n", 698 | "iteration 1100 / 1500: loss 1.943412" 699 | ] 700 | }, 701 | { 702 | "output_type": "stream", 703 | "stream": "stdout", 704 | "text": [ 705 | "\n", 706 | "iteration 1200 / 1500: loss 1.906948" 707 | ] 708 | }, 709 | { 710 | "output_type": "stream", 711 | "stream": "stdout", 712 | "text": [ 713 | "\n", 714 | "iteration 1300 / 1500: loss 1.902960" 715 | ] 716 | }, 717 | { 718 | "output_type": "stream", 719 | "stream": "stdout", 720 | "text": [ 721 | "\n", 722 | "iteration 1400 / 1500: loss 2.021826" 723 | ] 724 | }, 725 | { 726 | "output_type": "stream", 727 | "stream": "stdout", 728 | "text": [ 729 | "\n", 730 | "iteration 0 / 1500: loss 5.328158" 731 | ] 732 | }, 733 | { 734 | "output_type": "stream", 735 | "stream": "stdout", 736 | "text": [ 737 | "\n", 738 | "iteration 100 / 1500: loss 2.237349" 739 | ] 740 | }, 741 | { 742 | "output_type": "stream", 743 | "stream": "stdout", 744 | "text": [ 745 | "\n", 746 | "iteration 200 / 1500: loss 2.044778" 747 | ] 748 | }, 749 | { 750 | "output_type": "stream", 751 | "stream": "stdout", 752 | "text": [ 753 | "\n", 754 | "iteration 300 / 1500: loss 1.900411" 755 | ] 756 | }, 757 | { 758 | "output_type": "stream", 759 | "stream": "stdout", 760 | "text": [ 761 | "\n", 762 | "iteration 400 / 1500: loss 1.957876" 763 | ] 764 | }, 765 | { 766 | "output_type": "stream", 767 | "stream": "stdout", 768 | "text": [ 769 | "\n", 770 | "iteration 500 / 1500: loss 1.979841" 771 | ] 772 | }, 773 | { 774 | "output_type": "stream", 775 | "stream": "stdout", 776 | "text": [ 777 | "\n", 778 | "iteration 600 / 1500: loss 1.845112" 779 | ] 780 | }, 781 | { 782 | "output_type": "stream", 783 | "stream": "stdout", 784 | "text": [ 785 | "\n", 786 | "iteration 700 / 1500: loss 1.736386" 787 | ] 788 | }, 789 | { 790 | "output_type": "stream", 791 | "stream": "stdout", 792 | "text": [ 793 | "\n", 794 | "iteration 800 / 1500: loss 2.000306" 795 | ] 796 | }, 797 | { 798 | "output_type": "stream", 799 | "stream": "stdout", 800 | "text": [ 801 | "\n", 802 | "iteration 900 / 1500: loss 1.921555" 803 | ] 804 | }, 805 | { 806 | "output_type": "stream", 807 | "stream": "stdout", 808 | "text": [ 809 | "\n", 810 | "iteration 1000 / 1500: loss 1.923534" 811 | ] 812 | }, 813 | { 814 | "output_type": "stream", 815 | "stream": "stdout", 816 | "text": [ 817 | "\n", 818 | "iteration 1100 / 1500: loss 1.657865" 819 | ] 820 | }, 821 | { 822 | "output_type": "stream", 823 | "stream": "stdout", 824 | "text": [ 825 | "\n", 826 | "iteration 1200 / 1500: loss 1.542236" 827 | ] 828 | }, 829 | { 830 | "output_type": "stream", 831 | "stream": "stdout", 832 | "text": [ 833 | "\n", 834 | "iteration 1300 / 1500: loss 1.764741" 835 | ] 836 | }, 837 | { 838 | "output_type": "stream", 839 | "stream": "stdout", 840 | "text": [ 841 | "\n", 842 | "iteration 1400 / 1500: loss 2.048181" 843 | ] 844 | }, 845 | { 846 | "output_type": "stream", 847 | "stream": "stdout", 848 | "text": [ 849 | "\n", 850 | "iteration 0 / 1500: loss 5.556132" 851 | ] 852 | }, 853 | { 854 | "output_type": "stream", 855 | "stream": "stdout", 856 | "text": [ 857 | "\n", 858 | "iteration 100 / 1500: loss 3.871212" 859 | ] 860 | }, 861 | { 862 | "output_type": "stream", 863 | "stream": "stdout", 864 | "text": [ 865 | "\n", 866 | "iteration 200 / 1500: loss 3.447654" 867 | ] 868 | }, 869 | { 870 | "output_type": "stream", 871 | "stream": "stdout", 872 | "text": [ 873 | "\n", 874 | "iteration 300 / 1500: loss 3.317779" 875 | ] 876 | }, 877 | { 878 | "output_type": "stream", 879 | "stream": "stdout", 880 | "text": [ 881 | "\n", 882 | "iteration 400 / 1500: loss 3.160515" 883 | ] 884 | }, 885 | { 886 | "output_type": "stream", 887 | "stream": "stdout", 888 | "text": [ 889 | "\n", 890 | "iteration 500 / 1500: loss 2.982331" 891 | ] 892 | }, 893 | { 894 | "output_type": "stream", 895 | "stream": "stdout", 896 | "text": [ 897 | "\n", 898 | "iteration 600 / 1500: loss 3.158425" 899 | ] 900 | }, 901 | { 902 | "output_type": "stream", 903 | "stream": "stdout", 904 | "text": [ 905 | "\n", 906 | "iteration 700 / 1500: loss 3.034883" 907 | ] 908 | }, 909 | { 910 | "output_type": "stream", 911 | "stream": "stdout", 912 | "text": [ 913 | "\n", 914 | "iteration 800 / 1500: loss 2.645824" 915 | ] 916 | }, 917 | { 918 | "output_type": "stream", 919 | "stream": "stdout", 920 | "text": [ 921 | "\n", 922 | "iteration 900 / 1500: loss 3.033230" 923 | ] 924 | }, 925 | { 926 | "output_type": "stream", 927 | "stream": "stdout", 928 | "text": [ 929 | "\n", 930 | "iteration 1000 / 1500: loss 2.808701" 931 | ] 932 | }, 933 | { 934 | "output_type": "stream", 935 | "stream": "stdout", 936 | "text": [ 937 | "\n", 938 | "iteration 1100 / 1500: loss 2.924924" 939 | ] 940 | }, 941 | { 942 | "output_type": "stream", 943 | "stream": "stdout", 944 | "text": [ 945 | "\n", 946 | "iteration 1200 / 1500: loss 2.862980" 947 | ] 948 | }, 949 | { 950 | "output_type": "stream", 951 | "stream": "stdout", 952 | "text": [ 953 | "\n", 954 | "iteration 1300 / 1500: loss 2.763875" 955 | ] 956 | }, 957 | { 958 | "output_type": "stream", 959 | "stream": "stdout", 960 | "text": [ 961 | "\n", 962 | "iteration 1400 / 1500: loss 2.658483" 963 | ] 964 | }, 965 | { 966 | "output_type": "stream", 967 | "stream": "stdout", 968 | "text": [ 969 | "\n", 970 | "iteration 0 / 1500: loss 5.520888" 971 | ] 972 | }, 973 | { 974 | "output_type": "stream", 975 | "stream": "stdout", 976 | "text": [ 977 | "\n", 978 | "iteration 100 / 1500: loss 2.884167" 979 | ] 980 | }, 981 | { 982 | "output_type": "stream", 983 | "stream": "stdout", 984 | "text": [ 985 | "\n", 986 | "iteration 200 / 1500: loss 2.578114" 987 | ] 988 | }, 989 | { 990 | "output_type": "stream", 991 | "stream": "stdout", 992 | "text": [ 993 | "\n", 994 | "iteration 300 / 1500: loss 2.586291" 995 | ] 996 | }, 997 | { 998 | "output_type": "stream", 999 | "stream": "stdout", 1000 | "text": [ 1001 | "\n", 1002 | "iteration 400 / 1500: loss 2.524869" 1003 | ] 1004 | }, 1005 | { 1006 | "output_type": "stream", 1007 | "stream": "stdout", 1008 | "text": [ 1009 | "\n", 1010 | "iteration 500 / 1500: loss 2.472486" 1011 | ] 1012 | }, 1013 | { 1014 | "output_type": "stream", 1015 | "stream": "stdout", 1016 | "text": [ 1017 | "\n", 1018 | "iteration 600 / 1500: loss 2.257119" 1019 | ] 1020 | }, 1021 | { 1022 | "output_type": "stream", 1023 | "stream": "stdout", 1024 | "text": [ 1025 | "\n", 1026 | "iteration 700 / 1500: loss 2.228732" 1027 | ] 1028 | }, 1029 | { 1030 | "output_type": "stream", 1031 | "stream": "stdout", 1032 | "text": [ 1033 | "\n", 1034 | "iteration 800 / 1500: loss 2.410699" 1035 | ] 1036 | }, 1037 | { 1038 | "output_type": "stream", 1039 | "stream": "stdout", 1040 | "text": [ 1041 | "\n", 1042 | "iteration 900 / 1500: loss 2.133893" 1043 | ] 1044 | }, 1045 | { 1046 | "output_type": "stream", 1047 | "stream": "stdout", 1048 | "text": [ 1049 | "\n", 1050 | "iteration 1000 / 1500: loss 2.060936" 1051 | ] 1052 | }, 1053 | { 1054 | "output_type": "stream", 1055 | "stream": "stdout", 1056 | "text": [ 1057 | "\n", 1058 | "iteration 1100 / 1500: loss 2.058419" 1059 | ] 1060 | }, 1061 | { 1062 | "output_type": "stream", 1063 | "stream": "stdout", 1064 | "text": [ 1065 | "\n", 1066 | "iteration 1200 / 1500: loss 2.179789" 1067 | ] 1068 | }, 1069 | { 1070 | "output_type": "stream", 1071 | "stream": "stdout", 1072 | "text": [ 1073 | "\n", 1074 | "iteration 1300 / 1500: loss 2.245901" 1075 | ] 1076 | }, 1077 | { 1078 | "output_type": "stream", 1079 | "stream": "stdout", 1080 | "text": [ 1081 | "\n", 1082 | "iteration 1400 / 1500: loss 2.021147" 1083 | ] 1084 | }, 1085 | { 1086 | "output_type": "stream", 1087 | "stream": "stdout", 1088 | "text": [ 1089 | "\n", 1090 | "iteration 0 / 1500: loss 6.147450" 1091 | ] 1092 | }, 1093 | { 1094 | "output_type": "stream", 1095 | "stream": "stdout", 1096 | "text": [ 1097 | "\n", 1098 | "iteration 100 / 1500: loss 2.660279" 1099 | ] 1100 | }, 1101 | { 1102 | "output_type": "stream", 1103 | "stream": "stdout", 1104 | "text": [ 1105 | "\n", 1106 | "iteration 200 / 1500: loss 2.613013" 1107 | ] 1108 | }, 1109 | { 1110 | "output_type": "stream", 1111 | "stream": "stdout", 1112 | "text": [ 1113 | "\n", 1114 | "iteration 300 / 1500: loss 2.260954" 1115 | ] 1116 | }, 1117 | { 1118 | "output_type": "stream", 1119 | "stream": "stdout", 1120 | "text": [ 1121 | "\n", 1122 | "iteration 400 / 1500: loss 2.306914" 1123 | ] 1124 | }, 1125 | { 1126 | "output_type": "stream", 1127 | "stream": "stdout", 1128 | "text": [ 1129 | "\n", 1130 | "iteration 500 / 1500: loss 2.053824" 1131 | ] 1132 | }, 1133 | { 1134 | "output_type": "stream", 1135 | "stream": "stdout", 1136 | "text": [ 1137 | "\n", 1138 | "iteration 600 / 1500: loss 2.108897" 1139 | ] 1140 | }, 1141 | { 1142 | "output_type": "stream", 1143 | "stream": "stdout", 1144 | "text": [ 1145 | "\n", 1146 | "iteration 700 / 1500: loss 2.060994" 1147 | ] 1148 | }, 1149 | { 1150 | "output_type": "stream", 1151 | "stream": "stdout", 1152 | "text": [ 1153 | "\n", 1154 | "iteration 800 / 1500: loss 2.284109" 1155 | ] 1156 | }, 1157 | { 1158 | "output_type": "stream", 1159 | "stream": "stdout", 1160 | "text": [ 1161 | "\n", 1162 | "iteration 900 / 1500: loss 2.010592" 1163 | ] 1164 | }, 1165 | { 1166 | "output_type": "stream", 1167 | "stream": "stdout", 1168 | "text": [ 1169 | "\n", 1170 | "iteration 1000 / 1500: loss 1.979574" 1171 | ] 1172 | }, 1173 | { 1174 | "output_type": "stream", 1175 | "stream": "stdout", 1176 | "text": [ 1177 | "\n", 1178 | "iteration 1100 / 1500: loss 1.988916" 1179 | ] 1180 | }, 1181 | { 1182 | "output_type": "stream", 1183 | "stream": "stdout", 1184 | "text": [ 1185 | "\n", 1186 | "iteration 1200 / 1500: loss 1.868916" 1187 | ] 1188 | }, 1189 | { 1190 | "output_type": "stream", 1191 | "stream": "stdout", 1192 | "text": [ 1193 | "\n", 1194 | "iteration 1300 / 1500: loss 1.978286" 1195 | ] 1196 | }, 1197 | { 1198 | "output_type": "stream", 1199 | "stream": "stdout", 1200 | "text": [ 1201 | "\n", 1202 | "iteration 1400 / 1500: loss 1.920719" 1203 | ] 1204 | }, 1205 | { 1206 | "output_type": "stream", 1207 | "stream": "stdout", 1208 | "text": [ 1209 | "\n", 1210 | "iteration 0 / 1500: loss 6.383394" 1211 | ] 1212 | }, 1213 | { 1214 | "output_type": "stream", 1215 | "stream": "stdout", 1216 | "text": [ 1217 | "\n", 1218 | "iteration 100 / 1500: loss 2.260407" 1219 | ] 1220 | }, 1221 | { 1222 | "output_type": "stream", 1223 | "stream": "stdout", 1224 | "text": [ 1225 | "\n", 1226 | "iteration 200 / 1500: loss 2.026473" 1227 | ] 1228 | }, 1229 | { 1230 | "output_type": "stream", 1231 | "stream": "stdout", 1232 | "text": [ 1233 | "\n", 1234 | "iteration 300 / 1500: loss 2.058813" 1235 | ] 1236 | }, 1237 | { 1238 | "output_type": "stream", 1239 | "stream": "stdout", 1240 | "text": [ 1241 | "\n", 1242 | "iteration 400 / 1500: loss 2.066017" 1243 | ] 1244 | }, 1245 | { 1246 | "output_type": "stream", 1247 | "stream": "stdout", 1248 | "text": [ 1249 | "\n", 1250 | "iteration 500 / 1500: loss 2.173727" 1251 | ] 1252 | }, 1253 | { 1254 | "output_type": "stream", 1255 | "stream": "stdout", 1256 | "text": [ 1257 | "\n", 1258 | "iteration 600 / 1500: loss 2.120332" 1259 | ] 1260 | }, 1261 | { 1262 | "output_type": "stream", 1263 | "stream": "stdout", 1264 | "text": [ 1265 | "\n", 1266 | "iteration 700 / 1500: loss 1.982146" 1267 | ] 1268 | }, 1269 | { 1270 | "output_type": "stream", 1271 | "stream": "stdout", 1272 | "text": [ 1273 | "\n", 1274 | "iteration 800 / 1500: loss 2.005600" 1275 | ] 1276 | }, 1277 | { 1278 | "output_type": "stream", 1279 | "stream": "stdout", 1280 | "text": [ 1281 | "\n", 1282 | "iteration 900 / 1500: loss 1.954096" 1283 | ] 1284 | }, 1285 | { 1286 | "output_type": "stream", 1287 | "stream": "stdout", 1288 | "text": [ 1289 | "\n", 1290 | "iteration 1000 / 1500: loss 1.770794" 1291 | ] 1292 | }, 1293 | { 1294 | "output_type": "stream", 1295 | "stream": "stdout", 1296 | "text": [ 1297 | "\n", 1298 | "iteration 1100 / 1500: loss 1.755688" 1299 | ] 1300 | }, 1301 | { 1302 | "output_type": "stream", 1303 | "stream": "stdout", 1304 | "text": [ 1305 | "\n", 1306 | "iteration 1200 / 1500: loss 1.975574" 1307 | ] 1308 | }, 1309 | { 1310 | "output_type": "stream", 1311 | "stream": "stdout", 1312 | "text": [ 1313 | "\n", 1314 | "iteration 1300 / 1500: loss 1.728263" 1315 | ] 1316 | }, 1317 | { 1318 | "output_type": "stream", 1319 | "stream": "stdout", 1320 | "text": [ 1321 | "\n", 1322 | "iteration 1400 / 1500: loss 1.804383" 1323 | ] 1324 | }, 1325 | { 1326 | "output_type": "stream", 1327 | "stream": "stdout", 1328 | "text": [ 1329 | "\n", 1330 | "iteration 0 / 1500: loss 5.699174" 1331 | ] 1332 | }, 1333 | { 1334 | "output_type": "stream", 1335 | "stream": "stdout", 1336 | "text": [ 1337 | "\n", 1338 | "iteration 100 / 1500: loss 4.114586" 1339 | ] 1340 | }, 1341 | { 1342 | "output_type": "stream", 1343 | "stream": "stdout", 1344 | "text": [ 1345 | "\n", 1346 | "iteration 200 / 1500: loss 3.564873" 1347 | ] 1348 | }, 1349 | { 1350 | "output_type": "stream", 1351 | "stream": "stdout", 1352 | "text": [ 1353 | "\n", 1354 | "iteration 300 / 1500: loss 3.412351" 1355 | ] 1356 | }, 1357 | { 1358 | "output_type": "stream", 1359 | "stream": "stdout", 1360 | "text": [ 1361 | "\n", 1362 | "iteration 400 / 1500: loss 3.583487" 1363 | ] 1364 | }, 1365 | { 1366 | "output_type": "stream", 1367 | "stream": "stdout", 1368 | "text": [ 1369 | "\n", 1370 | "iteration 500 / 1500: loss 3.101659" 1371 | ] 1372 | }, 1373 | { 1374 | "output_type": "stream", 1375 | "stream": "stdout", 1376 | "text": [ 1377 | "\n", 1378 | "iteration 600 / 1500: loss 3.145693" 1379 | ] 1380 | }, 1381 | { 1382 | "output_type": "stream", 1383 | "stream": "stdout", 1384 | "text": [ 1385 | "\n", 1386 | "iteration 700 / 1500: loss 3.005699" 1387 | ] 1388 | }, 1389 | { 1390 | "output_type": "stream", 1391 | "stream": "stdout", 1392 | "text": [ 1393 | "\n", 1394 | "iteration 800 / 1500: loss 2.729675" 1395 | ] 1396 | }, 1397 | { 1398 | "output_type": "stream", 1399 | "stream": "stdout", 1400 | "text": [ 1401 | "\n", 1402 | "iteration 900 / 1500: loss 2.556861" 1403 | ] 1404 | }, 1405 | { 1406 | "output_type": "stream", 1407 | "stream": "stdout", 1408 | "text": [ 1409 | "\n", 1410 | "iteration 1000 / 1500: loss 3.001767" 1411 | ] 1412 | }, 1413 | { 1414 | "output_type": "stream", 1415 | "stream": "stdout", 1416 | "text": [ 1417 | "\n", 1418 | "iteration 1100 / 1500: loss 2.830760" 1419 | ] 1420 | }, 1421 | { 1422 | "output_type": "stream", 1423 | "stream": "stdout", 1424 | "text": [ 1425 | "\n", 1426 | "iteration 1200 / 1500: loss 2.805978" 1427 | ] 1428 | }, 1429 | { 1430 | "output_type": "stream", 1431 | "stream": "stdout", 1432 | "text": [ 1433 | "\n", 1434 | "iteration 1300 / 1500: loss 2.998624" 1435 | ] 1436 | }, 1437 | { 1438 | "output_type": "stream", 1439 | "stream": "stdout", 1440 | "text": [ 1441 | "\n", 1442 | "iteration 1400 / 1500: loss 2.555056" 1443 | ] 1444 | }, 1445 | { 1446 | "output_type": "stream", 1447 | "stream": "stdout", 1448 | "text": [ 1449 | "\n", 1450 | "iteration 0 / 1500: loss 6.526394" 1451 | ] 1452 | }, 1453 | { 1454 | "output_type": "stream", 1455 | "stream": "stdout", 1456 | "text": [ 1457 | "\n", 1458 | "iteration 100 / 1500: loss 2.964467" 1459 | ] 1460 | }, 1461 | { 1462 | "output_type": "stream", 1463 | "stream": "stdout", 1464 | "text": [ 1465 | "\n", 1466 | "iteration 200 / 1500: loss 2.646672" 1467 | ] 1468 | }, 1469 | { 1470 | "output_type": "stream", 1471 | "stream": "stdout", 1472 | "text": [ 1473 | "\n", 1474 | "iteration 300 / 1500: loss 2.543586" 1475 | ] 1476 | }, 1477 | { 1478 | "output_type": "stream", 1479 | "stream": "stdout", 1480 | "text": [ 1481 | "\n", 1482 | "iteration 400 / 1500: loss 2.562548" 1483 | ] 1484 | }, 1485 | { 1486 | "output_type": "stream", 1487 | "stream": "stdout", 1488 | "text": [ 1489 | "\n", 1490 | "iteration 500 / 1500: loss 2.696087" 1491 | ] 1492 | }, 1493 | { 1494 | "output_type": "stream", 1495 | "stream": "stdout", 1496 | "text": [ 1497 | "\n", 1498 | "iteration 600 / 1500: loss 2.374014" 1499 | ] 1500 | }, 1501 | { 1502 | "output_type": "stream", 1503 | "stream": "stdout", 1504 | "text": [ 1505 | "\n", 1506 | "iteration 700 / 1500: loss 2.266463" 1507 | ] 1508 | }, 1509 | { 1510 | "output_type": "stream", 1511 | "stream": "stdout", 1512 | "text": [ 1513 | "\n", 1514 | "iteration 800 / 1500: loss 2.100029" 1515 | ] 1516 | }, 1517 | { 1518 | "output_type": "stream", 1519 | "stream": "stdout", 1520 | "text": [ 1521 | "\n", 1522 | "iteration 900 / 1500: loss 2.028599" 1523 | ] 1524 | }, 1525 | { 1526 | "output_type": "stream", 1527 | "stream": "stdout", 1528 | "text": [ 1529 | "\n", 1530 | "iteration 1000 / 1500: loss 2.215197" 1531 | ] 1532 | }, 1533 | { 1534 | "output_type": "stream", 1535 | "stream": "stdout", 1536 | "text": [ 1537 | "\n", 1538 | "iteration 1100 / 1500: loss 2.288198" 1539 | ] 1540 | }, 1541 | { 1542 | "output_type": "stream", 1543 | "stream": "stdout", 1544 | "text": [ 1545 | "\n", 1546 | "iteration 1200 / 1500: loss 2.058949" 1547 | ] 1548 | }, 1549 | { 1550 | "output_type": "stream", 1551 | "stream": "stdout", 1552 | "text": [ 1553 | "\n", 1554 | "iteration 1300 / 1500: loss 2.176902" 1555 | ] 1556 | }, 1557 | { 1558 | "output_type": "stream", 1559 | "stream": "stdout", 1560 | "text": [ 1561 | "\n", 1562 | "iteration 1400 / 1500: loss 2.077978" 1563 | ] 1564 | }, 1565 | { 1566 | "output_type": "stream", 1567 | "stream": "stdout", 1568 | "text": [ 1569 | "\n", 1570 | "iteration 0 / 1500: loss 5.473334" 1571 | ] 1572 | }, 1573 | { 1574 | "output_type": "stream", 1575 | "stream": "stdout", 1576 | "text": [ 1577 | "\n", 1578 | "iteration 100 / 1500: loss 2.658421" 1579 | ] 1580 | }, 1581 | { 1582 | "output_type": "stream", 1583 | "stream": "stdout", 1584 | "text": [ 1585 | "\n", 1586 | "iteration 200 / 1500: loss 2.237966" 1587 | ] 1588 | }, 1589 | { 1590 | "output_type": "stream", 1591 | "stream": "stdout", 1592 | "text": [ 1593 | "\n", 1594 | "iteration 300 / 1500: loss 2.526233" 1595 | ] 1596 | }, 1597 | { 1598 | "output_type": "stream", 1599 | "stream": "stdout", 1600 | "text": [ 1601 | "\n", 1602 | "iteration 400 / 1500: loss 2.413027" 1603 | ] 1604 | }, 1605 | { 1606 | "output_type": "stream", 1607 | "stream": "stdout", 1608 | "text": [ 1609 | "\n", 1610 | "iteration 500 / 1500: loss 2.329377" 1611 | ] 1612 | }, 1613 | { 1614 | "output_type": "stream", 1615 | "stream": "stdout", 1616 | "text": [ 1617 | "\n", 1618 | "iteration 600 / 1500: loss 2.160669" 1619 | ] 1620 | }, 1621 | { 1622 | "output_type": "stream", 1623 | "stream": "stdout", 1624 | "text": [ 1625 | "\n", 1626 | "iteration 700 / 1500: loss 1.837735" 1627 | ] 1628 | }, 1629 | { 1630 | "output_type": "stream", 1631 | "stream": "stdout", 1632 | "text": [ 1633 | "\n", 1634 | "iteration 800 / 1500: loss 2.101660" 1635 | ] 1636 | }, 1637 | { 1638 | "output_type": "stream", 1639 | "stream": "stdout", 1640 | "text": [ 1641 | "\n", 1642 | "iteration 900 / 1500: loss 2.118303" 1643 | ] 1644 | }, 1645 | { 1646 | "output_type": "stream", 1647 | "stream": "stdout", 1648 | "text": [ 1649 | "\n", 1650 | "iteration 1000 / 1500: loss 2.006285" 1651 | ] 1652 | }, 1653 | { 1654 | "output_type": "stream", 1655 | "stream": "stdout", 1656 | "text": [ 1657 | "\n", 1658 | "iteration 1100 / 1500: loss 1.803047" 1659 | ] 1660 | }, 1661 | { 1662 | "output_type": "stream", 1663 | "stream": "stdout", 1664 | "text": [ 1665 | "\n", 1666 | "iteration 1200 / 1500: loss 1.873162" 1667 | ] 1668 | }, 1669 | { 1670 | "output_type": "stream", 1671 | "stream": "stdout", 1672 | "text": [ 1673 | "\n", 1674 | "iteration 1300 / 1500: loss 1.870098" 1675 | ] 1676 | }, 1677 | { 1678 | "output_type": "stream", 1679 | "stream": "stdout", 1680 | "text": [ 1681 | "\n", 1682 | "iteration 1400 / 1500: loss 2.020179" 1683 | ] 1684 | }, 1685 | { 1686 | "output_type": "stream", 1687 | "stream": "stdout", 1688 | "text": [ 1689 | "\n", 1690 | "iteration 0 / 1500: loss 5.701622" 1691 | ] 1692 | }, 1693 | { 1694 | "output_type": "stream", 1695 | "stream": "stdout", 1696 | "text": [ 1697 | "\n", 1698 | "iteration 100 / 1500: loss 2.456894" 1699 | ] 1700 | }, 1701 | { 1702 | "output_type": "stream", 1703 | "stream": "stdout", 1704 | "text": [ 1705 | "\n", 1706 | "iteration 200 / 1500: loss 2.112120" 1707 | ] 1708 | }, 1709 | { 1710 | "output_type": "stream", 1711 | "stream": "stdout", 1712 | "text": [ 1713 | "\n", 1714 | "iteration 300 / 1500: loss 1.955361" 1715 | ] 1716 | }, 1717 | { 1718 | "output_type": "stream", 1719 | "stream": "stdout", 1720 | "text": [ 1721 | "\n", 1722 | "iteration 400 / 1500: loss 1.933045" 1723 | ] 1724 | }, 1725 | { 1726 | "output_type": "stream", 1727 | "stream": "stdout", 1728 | "text": [ 1729 | "\n", 1730 | "iteration 500 / 1500: loss 1.889374" 1731 | ] 1732 | }, 1733 | { 1734 | "output_type": "stream", 1735 | "stream": "stdout", 1736 | "text": [ 1737 | "\n", 1738 | "iteration 600 / 1500: loss 1.957893" 1739 | ] 1740 | }, 1741 | { 1742 | "output_type": "stream", 1743 | "stream": "stdout", 1744 | "text": [ 1745 | "\n", 1746 | "iteration 700 / 1500: loss 1.907350" 1747 | ] 1748 | }, 1749 | { 1750 | "output_type": "stream", 1751 | "stream": "stdout", 1752 | "text": [ 1753 | "\n", 1754 | "iteration 800 / 1500: loss 1.799658" 1755 | ] 1756 | }, 1757 | { 1758 | "output_type": "stream", 1759 | "stream": "stdout", 1760 | "text": [ 1761 | "\n", 1762 | "iteration 900 / 1500: loss 1.856011" 1763 | ] 1764 | }, 1765 | { 1766 | "output_type": "stream", 1767 | "stream": "stdout", 1768 | "text": [ 1769 | "\n", 1770 | "iteration 1000 / 1500: loss 1.899460" 1771 | ] 1772 | }, 1773 | { 1774 | "output_type": "stream", 1775 | "stream": "stdout", 1776 | "text": [ 1777 | "\n", 1778 | "iteration 1100 / 1500: loss 1.751583" 1779 | ] 1780 | }, 1781 | { 1782 | "output_type": "stream", 1783 | "stream": "stdout", 1784 | "text": [ 1785 | "\n", 1786 | "iteration 1200 / 1500: loss 1.866720" 1787 | ] 1788 | }, 1789 | { 1790 | "output_type": "stream", 1791 | "stream": "stdout", 1792 | "text": [ 1793 | "\n", 1794 | "iteration 1300 / 1500: loss 1.851474" 1795 | ] 1796 | }, 1797 | { 1798 | "output_type": "stream", 1799 | "stream": "stdout", 1800 | "text": [ 1801 | "\n", 1802 | "iteration 1400 / 1500: loss 1.902803" 1803 | ] 1804 | }, 1805 | { 1806 | "output_type": "stream", 1807 | "stream": "stdout", 1808 | "text": [ 1809 | "\n", 1810 | "iteration 0 / 1500: loss 5.340453" 1811 | ] 1812 | }, 1813 | { 1814 | "output_type": "stream", 1815 | "stream": "stdout", 1816 | "text": [ 1817 | "\n", 1818 | "iteration 100 / 1500: loss 4.555385" 1819 | ] 1820 | }, 1821 | { 1822 | "output_type": "stream", 1823 | "stream": "stdout", 1824 | "text": [ 1825 | "\n", 1826 | "iteration 200 / 1500: loss 3.653644" 1827 | ] 1828 | }, 1829 | { 1830 | "output_type": "stream", 1831 | "stream": "stdout", 1832 | "text": [ 1833 | "\n", 1834 | "iteration 300 / 1500: loss 3.482234" 1835 | ] 1836 | }, 1837 | { 1838 | "output_type": "stream", 1839 | "stream": "stdout", 1840 | "text": [ 1841 | "\n", 1842 | "iteration 400 / 1500: loss 3.391561" 1843 | ] 1844 | }, 1845 | { 1846 | "output_type": "stream", 1847 | "stream": "stdout", 1848 | "text": [ 1849 | "\n", 1850 | "iteration 500 / 1500: loss 2.955781" 1851 | ] 1852 | }, 1853 | { 1854 | "output_type": "stream", 1855 | "stream": "stdout", 1856 | "text": [ 1857 | "\n", 1858 | "iteration 600 / 1500: loss 2.947452" 1859 | ] 1860 | }, 1861 | { 1862 | "output_type": "stream", 1863 | "stream": "stdout", 1864 | "text": [ 1865 | "\n", 1866 | "iteration 700 / 1500: loss 2.706025" 1867 | ] 1868 | }, 1869 | { 1870 | "output_type": "stream", 1871 | "stream": "stdout", 1872 | "text": [ 1873 | "\n", 1874 | "iteration 800 / 1500: loss 2.978936" 1875 | ] 1876 | }, 1877 | { 1878 | "output_type": "stream", 1879 | "stream": "stdout", 1880 | "text": [ 1881 | "\n", 1882 | "iteration 900 / 1500: loss 2.938775" 1883 | ] 1884 | }, 1885 | { 1886 | "output_type": "stream", 1887 | "stream": "stdout", 1888 | "text": [ 1889 | "\n", 1890 | "iteration 1000 / 1500: loss 2.695621" 1891 | ] 1892 | }, 1893 | { 1894 | "output_type": "stream", 1895 | "stream": "stdout", 1896 | "text": [ 1897 | "\n", 1898 | "iteration 1100 / 1500: loss 2.463390" 1899 | ] 1900 | }, 1901 | { 1902 | "output_type": "stream", 1903 | "stream": "stdout", 1904 | "text": [ 1905 | "\n", 1906 | "iteration 1200 / 1500: loss 2.878846" 1907 | ] 1908 | }, 1909 | { 1910 | "output_type": "stream", 1911 | "stream": "stdout", 1912 | "text": [ 1913 | "\n", 1914 | "iteration 1300 / 1500: loss 2.875995" 1915 | ] 1916 | }, 1917 | { 1918 | "output_type": "stream", 1919 | "stream": "stdout", 1920 | "text": [ 1921 | "\n", 1922 | "iteration 1400 / 1500: loss 2.842894" 1923 | ] 1924 | }, 1925 | { 1926 | "output_type": "stream", 1927 | "stream": "stdout", 1928 | "text": [ 1929 | "\n", 1930 | "iteration 0 / 1500: loss 5.907662" 1931 | ] 1932 | }, 1933 | { 1934 | "output_type": "stream", 1935 | "stream": "stdout", 1936 | "text": [ 1937 | "\n", 1938 | "iteration 100 / 1500: loss 3.176728" 1939 | ] 1940 | }, 1941 | { 1942 | "output_type": "stream", 1943 | "stream": "stdout", 1944 | "text": [ 1945 | "\n", 1946 | "iteration 200 / 1500: loss 2.591779" 1947 | ] 1948 | }, 1949 | { 1950 | "output_type": "stream", 1951 | "stream": "stdout", 1952 | "text": [ 1953 | "\n", 1954 | "iteration 300 / 1500: loss 2.836213" 1955 | ] 1956 | }, 1957 | { 1958 | "output_type": "stream", 1959 | "stream": "stdout", 1960 | "text": [ 1961 | "\n", 1962 | "iteration 400 / 1500: loss 2.658742" 1963 | ] 1964 | }, 1965 | { 1966 | "output_type": "stream", 1967 | "stream": "stdout", 1968 | "text": [ 1969 | "\n", 1970 | "iteration 500 / 1500: loss 2.448282" 1971 | ] 1972 | }, 1973 | { 1974 | "output_type": "stream", 1975 | "stream": "stdout", 1976 | "text": [ 1977 | "\n", 1978 | "iteration 600 / 1500: loss 2.207766" 1979 | ] 1980 | }, 1981 | { 1982 | "output_type": "stream", 1983 | "stream": "stdout", 1984 | "text": [ 1985 | "\n", 1986 | "iteration 700 / 1500: loss 2.223970" 1987 | ] 1988 | }, 1989 | { 1990 | "output_type": "stream", 1991 | "stream": "stdout", 1992 | "text": [ 1993 | "\n", 1994 | "iteration 800 / 1500: loss 2.238106" 1995 | ] 1996 | }, 1997 | { 1998 | "output_type": "stream", 1999 | "stream": "stdout", 2000 | "text": [ 2001 | "\n", 2002 | "iteration 900 / 1500: loss 2.239893" 2003 | ] 2004 | }, 2005 | { 2006 | "output_type": "stream", 2007 | "stream": "stdout", 2008 | "text": [ 2009 | "\n", 2010 | "iteration 1000 / 1500: loss 2.243168" 2011 | ] 2012 | }, 2013 | { 2014 | "output_type": "stream", 2015 | "stream": "stdout", 2016 | "text": [ 2017 | "\n", 2018 | "iteration 1100 / 1500: loss 2.084762" 2019 | ] 2020 | }, 2021 | { 2022 | "output_type": "stream", 2023 | "stream": "stdout", 2024 | "text": [ 2025 | "\n", 2026 | "iteration 1200 / 1500: loss 2.078790" 2027 | ] 2028 | }, 2029 | { 2030 | "output_type": "stream", 2031 | "stream": "stdout", 2032 | "text": [ 2033 | "\n", 2034 | "iteration 1300 / 1500: loss 1.955464" 2035 | ] 2036 | }, 2037 | { 2038 | "output_type": "stream", 2039 | "stream": "stdout", 2040 | "text": [ 2041 | "\n", 2042 | "iteration 1400 / 1500: loss 1.790405" 2043 | ] 2044 | }, 2045 | { 2046 | "output_type": "stream", 2047 | "stream": "stdout", 2048 | "text": [ 2049 | "\n", 2050 | "iteration 0 / 1500: loss 5.816207" 2051 | ] 2052 | }, 2053 | { 2054 | "output_type": "stream", 2055 | "stream": "stdout", 2056 | "text": [ 2057 | "\n", 2058 | "iteration 100 / 1500: loss 2.800041" 2059 | ] 2060 | }, 2061 | { 2062 | "output_type": "stream", 2063 | "stream": "stdout", 2064 | "text": [ 2065 | "\n", 2066 | "iteration 200 / 1500: loss 2.402860" 2067 | ] 2068 | }, 2069 | { 2070 | "output_type": "stream", 2071 | "stream": "stdout", 2072 | "text": [ 2073 | "\n", 2074 | "iteration 300 / 1500: loss 2.303943" 2075 | ] 2076 | }, 2077 | { 2078 | "output_type": "stream", 2079 | "stream": "stdout", 2080 | "text": [ 2081 | "\n", 2082 | "iteration 400 / 1500: loss 2.432407" 2083 | ] 2084 | }, 2085 | { 2086 | "output_type": "stream", 2087 | "stream": "stdout", 2088 | "text": [ 2089 | "\n", 2090 | "iteration 500 / 1500: loss 2.122186" 2091 | ] 2092 | }, 2093 | { 2094 | "output_type": "stream", 2095 | "stream": "stdout", 2096 | "text": [ 2097 | "\n", 2098 | "iteration 600 / 1500: loss 2.237336" 2099 | ] 2100 | }, 2101 | { 2102 | "output_type": "stream", 2103 | "stream": "stdout", 2104 | "text": [ 2105 | "\n", 2106 | "iteration 700 / 1500: loss 2.290283" 2107 | ] 2108 | }, 2109 | { 2110 | "output_type": "stream", 2111 | "stream": "stdout", 2112 | "text": [ 2113 | "\n", 2114 | "iteration 800 / 1500: loss 2.069125" 2115 | ] 2116 | }, 2117 | { 2118 | "output_type": "stream", 2119 | "stream": "stdout", 2120 | "text": [ 2121 | "\n", 2122 | "iteration 900 / 1500: loss 2.070238" 2123 | ] 2124 | }, 2125 | { 2126 | "output_type": "stream", 2127 | "stream": "stdout", 2128 | "text": [ 2129 | "\n", 2130 | "iteration 1000 / 1500: loss 2.057821" 2131 | ] 2132 | }, 2133 | { 2134 | "output_type": "stream", 2135 | "stream": "stdout", 2136 | "text": [ 2137 | "\n", 2138 | "iteration 1100 / 1500: loss 2.076778" 2139 | ] 2140 | }, 2141 | { 2142 | "output_type": "stream", 2143 | "stream": "stdout", 2144 | "text": [ 2145 | "\n", 2146 | "iteration 1200 / 1500: loss 1.875496" 2147 | ] 2148 | }, 2149 | { 2150 | "output_type": "stream", 2151 | "stream": "stdout", 2152 | "text": [ 2153 | "\n", 2154 | "iteration 1300 / 1500: loss 2.014139" 2155 | ] 2156 | }, 2157 | { 2158 | "output_type": "stream", 2159 | "stream": "stdout", 2160 | "text": [ 2161 | "\n", 2162 | "iteration 1400 / 1500: loss 1.934812" 2163 | ] 2164 | }, 2165 | { 2166 | "output_type": "stream", 2167 | "stream": "stdout", 2168 | "text": [ 2169 | "\n", 2170 | "iteration 0 / 1500: loss 5.252469" 2171 | ] 2172 | }, 2173 | { 2174 | "output_type": "stream", 2175 | "stream": "stdout", 2176 | "text": [ 2177 | "\n", 2178 | "iteration 100 / 1500: loss 2.476320" 2179 | ] 2180 | }, 2181 | { 2182 | "output_type": "stream", 2183 | "stream": "stdout", 2184 | "text": [ 2185 | "\n", 2186 | "iteration 200 / 1500: loss 2.254365" 2187 | ] 2188 | }, 2189 | { 2190 | "output_type": "stream", 2191 | "stream": "stdout", 2192 | "text": [ 2193 | "\n", 2194 | "iteration 300 / 1500: loss 2.172740" 2195 | ] 2196 | }, 2197 | { 2198 | "output_type": "stream", 2199 | "stream": "stdout", 2200 | "text": [ 2201 | "\n", 2202 | "iteration 400 / 1500: loss 2.164184" 2203 | ] 2204 | }, 2205 | { 2206 | "output_type": "stream", 2207 | "stream": "stdout", 2208 | "text": [ 2209 | "\n", 2210 | "iteration 500 / 1500: loss 1.912648" 2211 | ] 2212 | }, 2213 | { 2214 | "output_type": "stream", 2215 | "stream": "stdout", 2216 | "text": [ 2217 | "\n", 2218 | "iteration 600 / 1500: loss 2.059137" 2219 | ] 2220 | }, 2221 | { 2222 | "output_type": "stream", 2223 | "stream": "stdout", 2224 | "text": [ 2225 | "\n", 2226 | "iteration 700 / 1500: loss 1.909939" 2227 | ] 2228 | }, 2229 | { 2230 | "output_type": "stream", 2231 | "stream": "stdout", 2232 | "text": [ 2233 | "\n", 2234 | "iteration 800 / 1500: loss 1.779846" 2235 | ] 2236 | }, 2237 | { 2238 | "output_type": "stream", 2239 | "stream": "stdout", 2240 | "text": [ 2241 | "\n", 2242 | "iteration 900 / 1500: loss 1.919529" 2243 | ] 2244 | }, 2245 | { 2246 | "output_type": "stream", 2247 | "stream": "stdout", 2248 | "text": [ 2249 | "\n", 2250 | "iteration 1000 / 1500: loss 1.675232" 2251 | ] 2252 | }, 2253 | { 2254 | "output_type": "stream", 2255 | "stream": "stdout", 2256 | "text": [ 2257 | "\n", 2258 | "iteration 1100 / 1500: loss 1.729700" 2259 | ] 2260 | }, 2261 | { 2262 | "output_type": "stream", 2263 | "stream": "stdout", 2264 | "text": [ 2265 | "\n", 2266 | "iteration 1200 / 1500: loss 1.948204" 2267 | ] 2268 | }, 2269 | { 2270 | "output_type": "stream", 2271 | "stream": "stdout", 2272 | "text": [ 2273 | "\n", 2274 | "iteration 1300 / 1500: loss 1.828781" 2275 | ] 2276 | }, 2277 | { 2278 | "output_type": "stream", 2279 | "stream": "stdout", 2280 | "text": [ 2281 | "\n", 2282 | "iteration 1400 / 1500: loss 1.691668" 2283 | ] 2284 | }, 2285 | { 2286 | "output_type": "stream", 2287 | "stream": "stdout", 2288 | "text": [ 2289 | "\n", 2290 | "lr 1.000000e-07 reg 1.000000e-05 train accuracy: 0.235367 val accuracy: 0.249000" 2291 | ] 2292 | }, 2293 | { 2294 | "output_type": "stream", 2295 | "stream": "stdout", 2296 | "text": [ 2297 | "\n", 2298 | "lr 1.000000e-07 reg 5.000000e-05 train accuracy: 0.246633 val accuracy: 0.242000\n", 2299 | "lr 1.000000e-07 reg 1.000000e-04 train accuracy: 0.238898 val accuracy: 0.236000\n", 2300 | "lr 1.000000e-07 reg 5.000000e-04 train accuracy: 0.250633 val accuracy: 0.272000\n", 2301 | "lr 5.000000e-07 reg 1.000000e-05 train accuracy: 0.315694 val accuracy: 0.325000\n", 2302 | "lr 5.000000e-07 reg 5.000000e-05 train accuracy: 0.323816 val accuracy: 0.322000\n", 2303 | "lr 5.000000e-07 reg 1.000000e-04 train accuracy: 0.317082 val accuracy: 0.306000\n", 2304 | "lr 5.000000e-07 reg 5.000000e-04 train accuracy: 0.318347 val accuracy: 0.298000\n", 2305 | "lr 1.000000e-06 reg 1.000000e-05 train accuracy: 0.347204 val accuracy: 0.359000\n", 2306 | "lr 1.000000e-06 reg 5.000000e-05 train accuracy: 0.346286 val accuracy: 0.348000\n", 2307 | "lr 1.000000e-06 reg 1.000000e-04 train accuracy: 0.346653 val accuracy: 0.341000\n", 2308 | "lr 1.000000e-06 reg 5.000000e-04 train accuracy: 0.347980 val accuracy: 0.337000\n", 2309 | "lr 5.000000e-06 reg 1.000000e-05 train accuracy: 0.392653 val accuracy: 0.355000\n", 2310 | "lr 5.000000e-06 reg 5.000000e-05 train accuracy: 0.383551 val accuracy: 0.353000\n", 2311 | "lr 5.000000e-06 reg 1.000000e-04 train accuracy: 0.390245 val accuracy: 0.356000\n", 2312 | "lr 5.000000e-06 reg 5.000000e-04 train accuracy: 0.386510 val accuracy: 0.352000\n", 2313 | "best validation accuracy achieved during cross-validation: 0.359000\n" 2314 | ] 2315 | } 2316 | ], 2317 | "prompt_number": 14 2318 | }, 2319 | { 2320 | "cell_type": "code", 2321 | "collapsed": false, 2322 | "input": [ 2323 | "# evaluate on test set\n", 2324 | "# Evaluate the best svm on test set\n", 2325 | "y_test_pred = best_softmax.predict(X_test)\n", 2326 | "test_accuracy = np.mean(y_test == y_test_pred)\n", 2327 | "print 'softmax on raw pixels final test set accuracy: %f' % (test_accuracy, )" 2328 | ], 2329 | "language": "python", 2330 | "metadata": {}, 2331 | "outputs": [ 2332 | { 2333 | "output_type": "stream", 2334 | "stream": "stdout", 2335 | "text": [ 2336 | "softmax on raw pixels final test set accuracy: 0.336000\n" 2337 | ] 2338 | } 2339 | ], 2340 | "prompt_number": 15 2341 | }, 2342 | { 2343 | "cell_type": "code", 2344 | "collapsed": false, 2345 | "input": [ 2346 | "# Visualize the learned weights for each class\n", 2347 | "w = best_softmax.W[:,:-1] # strip out the bias\n", 2348 | "w = w.reshape(10, 32, 32, 3)\n", 2349 | "\n", 2350 | "w_min, w_max = np.min(w), np.max(w)\n", 2351 | "\n", 2352 | "classes = ['plane', 'car', 'bird', 'cat', 'deer', 'dog', 'frog', 'horse', 'ship', 'truck']\n", 2353 | "for i in xrange(10):\n", 2354 | " plt.subplot(2, 5, i + 1)\n", 2355 | " \n", 2356 | " # Rescale the weights to be between 0 and 255\n", 2357 | " wimg = 255.0 * (w[i].squeeze() - w_min) / (w_max - w_min)\n", 2358 | " plt.imshow(wimg.astype('uint8'))\n", 2359 | " plt.axis('off')\n", 2360 | " plt.title(classes[i])" 2361 | ], 2362 | "language": "python", 2363 | "metadata": {}, 2364 | "outputs": [ 2365 | { 2366 | "metadata": {}, 2367 | "output_type": "display_data", 2368 | "png": "iVBORw0KGgoAAAANSUhEUgAAAkwAAAF/CAYAAAC7YpZdAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzsnXeYnVW1/9c7vffee8n0Se8JJCEJISFUAamCIl70qngt\n11v0qj+9iqJcG0UI0kMCAZIQ0nudZGqmt0zvvc+c8/sjcX/e4SrjlROw7O/z+LiYnPO+u6y93ves\n7/6ubVitVtHQ0NDQ0NDQ0PjTsPukG6ChoaGhoaGh8dcO/cKkoaGhoaGhoTED9AuThoaGhoaGhsYM\n0C9MGhoaGhoaGhozQL8waWhoaGhoaGjMAP3CpKGhoaGhoaExA/4hX5gMw1huGEbDJ90ODQ0NYBhG\nrWEY1/yRvy8xDKPUFtfS0ND4y2EYxnOGYXz3k27HJ4V/yBemK9AFqDQ0/gZgtVqPWa3W1E+6HRof\nH/QLr8ZfI/6RX5g0NBQMw7D/pNug8X+HnjcNjb9t/C2t4b/rF6Yrv1K+YRhGiWEYXYZhPGsYhtMf\n+dzXDcOoMgyj3zCMYsMwbjT9272GYRw1DOPHhmF0G4ZRbRjGWtO/exmG8YxhGM2GYTQYhvFfhmEY\nH1cfNS7DMIwIwzC2GYbRbhhGh2EYvzAMI84wjP2GYXRe+fuLhmF4mb5TaxjGvxiGUSAig4Zh/F2v\nh78RzPvgev0ghf7H5s0wjLsNw6i7Mvff+gTbr/EB/F/XpmEYL4hIlIi8cyUmP/bJ9uAfF4Zh5BiG\nkWcYRp9hGK+KiIvp3zYYhnHBMIwewzCOGYaRYfq3UMMw3rgyt9WGYTxq+rf/MAxjq2EYvzcMo1dE\n7v14e/WX4x/hAXGniKwWkXgRSRaRb/+Rz1SJyGKr1eolIt8RkRcNwwg2/fs8ESkVEX8R+bGIPGv6\nty0iMi4icSKSc+VeD9q4DxofgisvOu+KSK1cDrThIvLqlX/+gYiEiEiqiESIyH9+4OufEpF1IuJj\ntVotH0d7NT4Uf2q9fpBCV/N25XO/EpG7RCRMLq/T8I+jsRofjr9kbVqt1ntE5JKIbLBarV5Wq/Un\nH3OzNUTEMAxHEXlTLj/j/ERkq4jcfOXfsuXyc/ChK//2WxF52zAMxysJg3dE5IKIhIrItSLyJcMw\nVpsuv1FEXrdarT4i8tLH06OPjn+EF6YnrVZrs9Vq7RWR78vlgDwNVqt1m9VqbbtibxWRSrn8kvQH\n1Fut1t9ZLx+8t0VEQg3DCDIMI0guB+0vW63WUavV2ikiT4jIHVe5TxrTMU8uL8x/uTIP41ar9YTV\naq2xWq37rVbrpNVq7RKRn4nI8g989+dX/GPsY2+1xh/DjOv1CszzdouIvGO1Wo9brdYJEfk30XsU\n/1rwUdamztR/slggIg5Wq/UXVqt1ymq1bhORs1f+7bMi8hur1XrOehm/F5GxK9+ZKyIBVqv1+1e+\nVyciz8jlHzl/wEmr1fqOiMjfUux1+KQb8DGg0WTXy+XFOw2GYdwjIl8WkZgrf3IXkQDTR1r/YFit\n1pErjJuHXP4l6ygiLVf+Zlz53yWbtV7jz0GkXH6pnZYhuvJC+3MRWSqX58teRLo/8N1G0fhrwozr\n9Y98LkxEFGVntVqHDcPougpt0/i/46OsTY1PFmEi0vSBv9Vf+f9oEbnXRLUZcvlZGCYiFhEJNwyj\n2/RvdiJyxHSdv0mV+j9ChinSZEeLSLP5Hw3DiBKRp0TkEavV6mu1Wn1FpET+vF83DSIyKiL+VqvV\n78r3faxWa6aN2q7x56FBRKL+yB6kH8jlxZt2JfX7afnf86ozEX9d+ND1aoJ53lrM3zMMw00u/5jR\n+OTxl65NvS4/ebTI/6a2o678/yUR+d6V594fnn0eVqv1Nbk85zUf+Ddvq9V6g+k6f5Pz+4/wwvQF\nwzDCDcPwE5FvCfz5Hxanu1xeuJ1XNo/eLyLpf86FrVZrq4i8LyI/MwzD07iMOMMwltm4DxofjjNy\neXH/0DAMN8MwnA3DWCSXf7kOisiAYRjhIvK1T7KRGn8WZlqvfwxviMgGwzAWXdl38d0ZPq/x8eEv\nXZutcnlfqMYnh5MiMmkYxqOGYTgYhnGTsFXlGRH5vGEY80REDMNwNwxjvWEY7nJ5zgeuCDNcDMOw\nNwwjzTCMOZ9MN2yHf4QXppfl8ktNlVzem/T9K3+3iohYrdZSEXlcRE7J5UWaJiLHZrim+e34HhFx\nEpGLcjmlvFUub2TU+JhwJd1/g4gkyuVfPg0icptc3sA/W0R65fImxG0f/OrH2EyNmWGVGdbrH7HF\narVeFJEviMgrcjkj1SWaav2rwEdYmz8UkX8zLiuTv/LxtVjjD7iyH/AmEblfLq+pW+XKPFmt1jy5\nLG76nyvUW4VcUbtdmfMNIpItlzf7t4vI0yLiJX/jMC7vY/77hGEYtSLyGavVeuCTbouGhoaGhobG\n3y7+ETJMGhoaGhoaGhofCX/vL0x/v+kzDQ0NDQ0NjY8Nf9eUnIaGhoaGhoaGLfD3nmHS0NDQ0NDQ\n0PjIuGqFK5/8j0dV6qp3sk/9vSVgpbJjTqt6kJKS7Kvsg0aBskMuRilbgkqVORi/YNr9zlVdVHaq\nhVpb4/33K9vXrlbZo9f0Kjt+sF/ZnUGOyk7Yt1HZJ6PeVnbspLOyD0fwzmmcmFT252dXKbumfoWy\nO4KPKjtganxaH7wtdyu7umcL3xlNVLaDT5uy4w4wZifjzyl7VjIV6L/1rW/bRF79ndu+p+YzNLJa\n/d3fmqzslqXUCmytQhF8fcdxZR9cyrhbDzN27/evUfaSeaPK7qymBmhSTZKyQ3r4bmUcn5mKoARW\ncC01Dy/aPT2tPwGpCcpeUVOi7CfDh5U928BXy7ynlJ2a700femjrxUkPZQdudFf2sv1Fyt4Xxd9d\n3sLXRv+lTtkJz3KdlpgsZf/khbttMpc7v3q9msvxeMbotXzWWuwNzGXzYdoQbqHe3FlHxvqxKfz6\nvGkcRES2WVj/i/16lL2rYZay74yhbNLIvDplR/fUKPvMOdqxOdRP2dst1MPLGJ6t7NHJV5TtkEDh\n/tnFpvG14MsHnWlDajb3EhFxPb9H2RGCz5f4s+Zd2pienjA+31nwX1zIGf966qk7bTKfX/v+Y2o+\nHcs61d+b4ulPkgvnm465pip7tJ75tLdTx4TJ4DB/D6nL4Zpfof39rxHj0uYQEysujig73Xmusnf3\nsfajPdDhzFq6ZFp/mksqlb1ghQ9tev11ZdeMfp5rRVOqa2gUxqSrn7Hw9SBeTHjhg24V1yj7XE+7\nsiNCWdcjrvHKdnHkKNLg2/5H2V/MLrTJXD724x+rDrzXWKz+/ivPdcre64cA1K6O9rhHMMe/r65X\n9vy5tF/2s57iPfnzpfk8TxcO7VP2sWbWrjWYtRJewbxWOHOhYFzo8udSI5TdVML850Qzx41naN+F\nKOpFf+Vwh7Lf/Az+ONUXpuyNFsZoy3bGyP9zNCS87qyyzw7VKTszgljcfpqxe3zLlj85lzrDpKGh\noaGhoaExA65ahqnblV/nji2HlO0SSPYoPpRffceefZy/f+kBZRcvpontfZuUbZ3k7VNExMWPt1SL\nB0cS7e6vU/ZtPWSSnF7gV3/R5/jFuOwHvJmeXbld2UENlJBolNPc2JU2PSz84jpy/Dpl280jM1ZX\nzy8ah/ST0/oQerBQ2WP+/Ar0WsAbdcQUb8WHeOEX14wblV01Zft9abn+/Go83UBmpN6LrITDXj7f\n1PhzZf/0Wo4DyyxkvCb9OFQ+3LlF2UEDZAy6nG5SdqlxRtkHvsp837ado4gq/Pkl0t7MnM33rJjW\nH6OY7EhRAL70lUZqq10IcVV2tIUMk3scGakRd7Jefo70YfAMk1P8IPfK3U1G5+z1/Fofa8Mf++8m\nc1jrUie2RuNizpUeOcsvae9sMnVjlbco227xKWWfP0h2zcMgq+Rox6/eptCYafdLz2d+QmKpCftI\nLydhTHixNh2/Vafsli8tVHZuxgv8/Qi+U5t8s7IHTvDdL69l3M+0kvG8OEFWaE8Sv7ijiylqvOPg\n9JNVXg2YUPbrIfzb+6PMZ5ofvnfNJPHvXWf8K6P+TdNV/9Qxef83hAyRma69L0/Zwc++qOyyeSnK\n9nPGfzf48Ut/S1GQsu9N4Jd+oSfZ+4TD/BJvNmXXIi4Qu1zmU7d36NV8ZV/bwZh4XU+G98xOriMi\nkrGeePzedrIS1wUQC8aW8Z3hN4hH0S0cV7br05TTe/gUvlbqz5w5RFH+KcQhTdlhaeXKPvQi8TTu\nn/lM9ePX0+jfi03g9h5tWxdMm996m6xY/Fri0olbhpSd+hRx89+WElvqD/Pd7oWscbvyW5WdeYws\njIMfc9y2ijW0LIv10egA47LmB8TMLt8V0/qz82384s5Qnl21vfTTPYRTc9Y54Gv/vYTnXmoO8Tdh\nD7F8S818ZQcuK1N2wAniSYs7mezoYtZubSXZr6wlxMEPg84waWhoaGhoaGjMAP3CpKGhoaGhoaEx\nA64aJReR/qyyp6yxyu6tJdW5Y5z0vt8j/6LsLf5sZHvAl83PAe+xWfNI2/RN35W3kVKL8D6k7BVP\nsgHaPZe/uz5EqjF2gOs232XaBOkBPTdeSEr7oh+pwswUNpL/cgPpfbt3+cy8o3XKXrsQ2makj9Sz\niMi3cqG3Vr8LjRHkHajsPRbSkQ/FkIq+OEbKMrBo+mZyW6A0nXfr8lI2/I2Fs7nwrlmM11AdNEGw\nI2n/Ord3lN10hv7/dyh0wI/d2Wy/xqCPvYlsLuzdzsb+tsWc13pjKdTJe/NfVvbprdPPQ26dz+Z5\n974vKvtZ04bVR+9ns7rrBEulYQ8bjl3rmfOGAOimiMDzyg4/Rd8m459UdoYD9z0+wMba+R6skfwf\nutFohvQjYbiYVHx1KJTJYh/S9a81ME/Xl7DxtvdGKJ/yZ6DC3lz6K2U7puL7IiI+nqT4lw0zn08t\nYkNp8AXS+qXzWAeLiti42+FCXIgMghLYOMJ3292IAyXDbERuPQuNPhUFLbzejjZkR7yq7ES76RuR\nz9RmK3v8JNRCwmaoj+CoVcoe2Us/pYw2tbmxKd1W2CPQ3xtep2/nVy1Vdko36zSvETrEOggtGGlh\nndauYx10PY1feAYwl87BMcpuToYuK8p7T9lRC65V9sQIbWjxYl9thjCGIiJtL0LDxTkydvYe2I3P\nsKYKrVwrJ/H/KXuBE7Rf6Dv4y/Fvf0HZXjlQOg4XWGvBp6EkFz2M31WeIL74pNo+3zD0eeLGvDeI\nMwVLmZuaWTw3Y3/Ds2sgnBjX3I6gqtVzg7LTo6BpX3SBRo95GnoxOWWrsgPOEmdzingOJQQ+pOwn\n5r2m7Dt2r53WnyVr2Kbi8TpUbWM4684ljPXe4wN9OHDuoLKr99ym7AsLmO/Ep5mPlGtZ781+xGuP\nfq4z7meKZXFsfcgPJN59GHSGSUNDQ0NDQ0NjBugXJg0NDQ0NDQ2NGXDVKLmO0+xMH5hLinWsifS5\nMQ51FBuCkiznGVKOx3PNu+Cho/qbD0+737r3oGsu3U+61vk6UorOZ2KUHSFQEXvrSBUGx6Os6O3J\nUHZFL8qStcGL+czrUDK+q0i3+xZCATgk0OcdDoxLoAGdJyKyspw0/oJc0qXdBmnaO/2gRKourFd2\naTZp47pQ6q/cI7ZB2DCqgyV9pETf74La8vJGNVV3BpWfpNH/vhDS8nPWobrYWcj13afoy9leru/s\nBZUwsYBUtMvIBWX/yB/lQ3wN383cMJ2SOz5EOvYGF1RgLveS1m15jfpXDS4oMOaGQ8kETJjUVMmk\nmSvr6dupIOjSKDf64Lgf6sL9Gvy3ZTvUQNQPptc0sgXKC99Q9rlgfLathvZnePH39qPQAYNLUCeG\nz6cuzrujUFa3/ADKVkQke9l+Ze/sN9HcRXxn0FR/ZfH4TmV35EJLpGzF3yctKIgO3k8YS3GGyq7b\nzRoKvQG6rHMWCsb65/nukP93aY/r9BR9arRJaZSJr7a25Ji+A7U0dzHr32s9c+hQgG/bCtFe+Gzt\nJFRMkhcKqo7zKKs2z2PbQWvdW8ru9WLejj8NleK5EXpyqMREL/Zy3znNqP/mpLLWxgzWWeUQnzdO\ncp2UlcRiEZESnxBle1eyBgvy+HuuqT7X2DrW1BFv5nPjBdSgR1fia6EVxIXgPOK677zNyp4sorbT\nkSnGIjKXe/W/bqLLbYR6H/x6LIbtJMHlP1C2Xcs3lJ2bsEvZE07E/aJJlH3eF1Fp7k+F/nIaw4+H\nvovEuf8Nxjz6EuPwchfPq7D0QWXf2rWCDszmWSUi4nSGdVebwfN+2J3tDgt6oX8Lm/Cd9XGmrQyt\nxPjSN3iGRv0H99rfjur82hCoOocQxsVw4n3C/Tmo5sEe1M4fJl7VGSYNDQ0NDQ0NjRmgX5g0NDQ0\nNDQ0NGbAVaPkikpQLo3uJhV3ayxURekLTyh7x9cgj3y+CCW3qIXUYsjXUWi4L/3MtPtVhNKVxvd5\nD1wyjIKk0kTvDTtDqwxMkHLdEERqbscuUtpug6TVF+ZwzMZPllMwMqaWlKWxCrXKgCuUTMbFQ8pO\ntEBDiojUCSqEZ9JI7ye8A2UYHYQCKSMZ+mlXR52yb40g/W4rbOukDT8eID3s5HW7sksr7lL2NQsY\nu+4DjK/XBgrOhb5JWr1lEz4yuQ/a0T2Ho21yO0mhOvVAnb5/gmKgqX6oEJ1c8MGmdhQzIiK3noa6\nORa3QtnzTlFA0n0ZxzestjDuew5BLfiYjgeZ10s6eiCCtLGfI8XYms9CPSUlkXLucKX/5Q8wdkXP\nmc4vMNXJ+yjwCb9B2T8IJB1e0Ruj7OohqJ0AD9ad3wu07VIuafnZQ1BnCXOZAxGRQ2OLlJ0WSNq8\nKpV7Sy3qFX/TGqyoek7Z3Q/ja/VvogCLqYUWdZo0HQ2Sw317naBh/LqgIkLtiEdjQtp/KIX2iIi8\nVw4dFG5la0DoJPTe2l7m9og9n08e4x5dw1ARtoJ9B7GibRYxJdVEZ+/0ge4fymed5kxBV9h5Q7fO\nWggVM7UN/10dTNydMh0j9aYb41DRRd+dYuuUXRvImvt6CZTJgT3TT6KYlUa8sERD559vgfaLWYYa\n65JJSXnratrhVAXFeMHPdCTVRSjJoXiOwJkqYuyiBqBUZ9fjR85u0FXxHdMLD9sCOc9QzDn+bopJ\nXqimDYv8qJK5rYu4PB7L8+eOOvpVs+p9ZXtPmY6EiiA+jv+IuTllOkkl4zbW9YVW/Djy4m5lvzob\n6ntNKdtYRERqT0Jhhn4a9a+vF3Tj2S6+H+jPmPaOQR+OJTHuTuMUlPZoIQ4sHKRg9TuvE5uSV0K7\nBvaxVqKXEX+Pv/nnqVd1hklDQ0NDQ0NDYwboFyYNDQ0NDQ0NjRlw1Si5xDuhKsaPkx6L9Cb1V7yF\ns4XuWI5CJ6Sb1OubzV9Rtn0K6bTe61HSiIhkVUKZTfSgjvCrpzhifQPvhyEJ0H7LvUgjPjPJ2TdJ\nIXz+Kz6cZP2LE6TvvuJL6n6XI3SOdwVqh/oNFL30yYFGK+vgMyIiu0qh5P7zKdpXk4aCwX4MldHx\nDSh3An5iojcsph3/NsKCcxQz23cjapL3focazv/Gc8oesEBb3eJI6ve4MNaF93CO09g50r3XDj6s\n7IMRfGZLN+n2B06b6KxEqBD3RO61rxl1ZvxePiMi0r0UddB1DVBOddceUnZya66yazI4A9HBEQoh\n7BRzdmohFIX/MyjLhlKhKHw7Ga8D10Il3fwGdOO5YXz5h7PfNbX6G2ILNOdBeeaTGZfiRtO5XUuh\njtpjoEX725njyfZPK7veAYr0jRH6LiKyyYmCnscToQTSz6GOGumBqj2bQhq/N5R7X6iHClvTAKW+\nq4eUvm8IFE5cGxSFayAp/T1BrJv7S7ivdy5+8H4VVJWISBwhRXo8oAE8Jhiz3Y3EMF9v1LkdFSjF\nsp0HxNZIH2BNdRfgR92O+N2DcWwLGDmPLzumQT3lNnKml/87qMqq7oTae3M32yjKLyIncujD9zck\nQdueGeA6S1KgRhpgniT4BPMhIjJlh7Jq0qQ2nh9BsWJrG/E44p8Y0zdeYv17ZPF4W9cOReWUQ2y6\n2EUcGe8mhhpZPDdim6AnD5mEcc/ezlmVRMSPhpPxbEF5d2sxbUvC350iGetEL545Bfug7/da2JZR\ntJJzFOf0sbWg/Zesv/rriI/W4/R3vBDfTRylCGlgKOM5ywEKPti+blp/nOfQhyo75mORF2st3wLt\nl30Jxyj0gHqcN4RSc3cvW2uqp2K4vmkLxWf2Qse+38h5lOenWK9Ze4hFHstRLItskD8FnWHS0NDQ\n0NDQ0JgB+oVJQ0NDQ0NDQ2MGXDVKrrIPlU3PIOqD/mQKFya2kq5tKYDayAtfo+xgk0go2g9KrqOE\nFLOIyIjp3c+xkXT4+CLSpv3ufGbIh/vZ1Z5R9pdNBQ0vBpHq/nksiq6VPqT+noyFPpiVR6qwMog0\n9nAD9M/UcSiGoGhS4CIi967eo+zO8eto9wUUIdUrTbTiAdKLzreTyj2+1XQWn9gGXpHQKmUupNbX\nbaI9JT24U8Y5Usgvb0Yd4WJSPc4pRCnT7AeV8KIHlEFyLecQpteibjr6JfqYshv/2lnFmC4/x1ym\nLOSaIiJvjzGmVbPqlB3URpq2bozrdr/O350zoTpGTOK7kKP0+a6HSQN/7XEKPEZfR8o5vRcayzmW\n4nIOcYz1qxdYC9NPT/zLYf0a81F5Cir0mrUstsqWO5Q91gpF2LMYCsOtH7r0C67QKG+eh8oUEdlX\nDZWaPnhI2eN29LMhCL/wfAu14ex1pOs7+1lffabCkEsnoQZbHEjj+yfjI4eGoCHnnYNXKb6GwpOn\nF8K7PVg9nZLb6gP1ETEK5VhyFl+4IwC11sVQpEZ+cyjcub8RSp7T1z4ayn2P0DbXW5Q94ga10lFE\n/HJL5u8tFcyBU3SMsqtboDm7CvE8t2hox4dNBUlP7IT28SiFkvNOYqxOmopqBhVDyUxGTfeXtiT6\nE76H7Q8DD3A/72HWS9cOqKLVIdBVkU7E6R3x9CehM03ZzS34hesG7hU8As20NyRG2bUTryh70yXm\n3laItlDQc2E2stixYyeU3RqC5yxygKZud8Ovh5N4Bt5qwfbZBv1ZHYPvNw+wVtYuZW4G2hhPx7rH\nlR0VQVya8sKna0ZZryIiaYmsiaMX2KZz3p3C05snuN/jRcxr5mMoA/ftQTmZIVCqPYtQttp7rFP2\nwbX009WHZ8J1NdD0fZM8H9wsvA98GHSGSUNDQ0NDQ0NjBugXJg0NDQ0NDQ2NGXDVKLnM96HPmtNI\n+1b93nR2VTypOLcw0rUpA6TfHE1nAx0f4/NipaiViEiN6XyvWe2kjavO047QOaRZJ0dRCzzXhfLs\n/kLu3b8S+jC9kVRjQ61pd30PKoC0SO6bN8n1r5skpd2SQ7p6tI10qoiI1Y20YPsr9DtqOe1oepcU\nZ1kFRcuW30U/J2b/1nTVW8QWmLiEnMo1HkqucxVp8vlNjGNrKoUY76+noFhxGu3vckTJUB5AmjXH\nE8pg1BdF2sVM0q/ub0PPNKRx3+wBqAf7k6Scj940/bdBWD7z7HGMdtR8GuVa0l7UN6E3oaAZO0zf\n6mJpa6gP819wFJXZeAx0U3vz95Ud2BLD3+Pxl8RtKHSGvaGCbIVLdai81odCq9WdpghrSAo0cv4Y\n9FRyH22THtO4lZJiXz4fqlhE5IlQ/D+4J0bZ7tlQBSt6oAat10MtdLWzZsNaKWrXksx6DzGgNptM\nipv6KVLx8wOh5q3x+OPYbubvwXPQRx2GKdaISKvpHLSYbO6RSWiTGgf8JcyB9Rv0Fqob+9Uohm2F\nICfo3+IM1lejHX7ndgZqZUk9isGxQIo4XhplvFYHsKZKYWGlcwTq5vd2KNWSl0Nx13RCq4w2Unzx\npmKUVaEm+uvsxH9O60/RLrYjhG2irbO86M+BLaiTY8N/p+wLsyhuKu00PHCSORs1UX4pBuuuyaSG\nO7aEQpeux/jMohWoXOMr2EZgK8yJZqvJIVfacHdYtLJ/2UrRyOZk+hXiQ3ys9mE+2k/ju/1r6Itz\nMfTUokKuX7MaPxhZw3M5d/d9yt7p8ryy06eIaX51z07rz0AZ/rLpgVeV/eJRlNwO2SgbQ6IOcu8L\nxB37aOyT5/DZpFOoYsPT8bVjTmyzyfJEjd/sSiyKHcRPjVP48odBZ5g0NDQ0NDQ0NGaAfmHS0NDQ\n0NDQ0JgBV42SC/UiTWpdiuImKh6lU2kLtmcnhbaMDtKwUVmkXgfifq3sobp/nn7DPApi1iw0nQkU\nCHUXsudmZVtuIHWYa6Eo1tuOpP5WOaCaqbRDTeGbyGeGuqAojgaQ0n/ICQXYiSpSfxIP7TbeBz0n\nItKXtELZrbGkZpeuIM1e0UFK1f/6OmW7GSYF4WsUypQPDNNfiglTGvvZMpSBDx1CWVV7DaoGQ2hz\ndQDFDgdN1EtYE7TVqBNj2tSGwih7CLrFbTVnxp3/rqlY2jjv/YHHmMuuR5iz4T3rp/XHMeOUstuW\nk76d1Ug/yx1uVfbiw6S1m4Pxr0W5MfThHa7THgblsHkWCp033DfS7sehrpr8UXVEr6Vvjh0oSW2F\nr3egxCnoQjE06QftGFrFfY8GsTZHiqDhPB2hP4vS8ddMr+lh5dE21EQOLdBwh6fw0+hR6JOtCVBv\n3x7lfo234V+1r6Cks6QxH7F2+EhMAHM5Wg1tdSIfKiXhAVLx51tYvz0j039LPtr2ZWVfrIZKX9RJ\n4cu32qBroiYYv4E1rPNJY/r5kbZAxRRrcKAIpe0tvVCs1enE0da5+5Td0Q4963KOuXHKwDeT7KEz\nPUpMBXknURG3u+O/7gJln9MZo+zyVOjIzpOcx5nlhR+JiGxfwue6WinaG1JDLJ9/HwWGyw59k/u1\n0H+7SOgkl18zN4uzH1L2BSE2Bxtlyu7uJX57V0IltudCx3dM4vOchPrR8Pxe4maIB7FyayJrMDYf\nymwolrU2pxPVl58n8+Edz5hU5D+i7LERaDFjDYUbl9RDie7ZiNKwzPGQshdNLVb2KxeYi6w4xk1E\npG42WzkL0G+NAAAgAElEQVRi8tnusGQTqr+p/+aZmJoDx53VCS1eMidG2VEhfH4yiz5PHOYZ+Fg4\nhZ8L8qA2h9NYK/XBtMfXYjpA70OgM0waGhoaGhoaGjNAvzBpaGhoaGhoaMyAq0bJfT+LFPvNT6C4\nSV/PjvXTLaTqpyykQ+19oFV6ClAevdaJ2uSxSVKjIiIBc/l+SQ2UXnMFCjOXW0iHd56iyFVAEOl9\nV3/aXdlySNkT7aRu16WR7rwwyOcDmigIeNAJqi4yn6Jux4+h+hr0Nh3kJSKL90MV2HteVHZ7EdRF\nk+kcs4H2Wcou6DMVsfRCRWAr7J+CuvhGBBRDbDI0yZd7KHD4n8E7lF1zhmKNLs6k9Bt6UaU4mdQL\nt5+kENorMaTbvUZQh6Tl4DuDXtAfHUuY4+Qe5ubokkPT+vPpV6HYtt2KL6WexPdiTIqVokD8a3AH\nlMGxeBOd0ISKZ64LNMakM2ng+wKhGCoeMPlCJYVOPbvvVvbAMegTkbvFFnj6GO384voVyt6fjf+9\n9lPogEAP6JyadbuUHd4MDT45TGr8ndPTi9fdHAfVN1UCrbI8BAroDT/adP0JVHIXG6HJDjaSro+c\noBjmSQOVTdcQIS1mGwrDlhjS+MuX8Jl9z25SdkA2xfhiz6IgEhEpmAVV4isof5y8iU9Zo1AT5RsZ\nm+49qDUjY21f7LDRSqxYc/bzyj6fChXhNs59kxuh6qLaoK1dylDGvRzM5+vL65R9m0kZ6dUMdWy/\nlDWR2kR8eOmLrPG51cx9sxvbHSyB0+nyHzZBpb00AWXsaxJf+vVRBFE2QZGef8+ktm0m1tilMxZn\n5vMMmuzDj9oN4pRTmUkZN4z68bbTxIqfueOztsIN8/D9LGcopueHTWechrG+XvDkMz3xUHVz3PDf\nkTMUuqwMq1P2jTfTr/e2rlD2O/EmZfnjbJVwGGGsuhZBfc+f/6KyiwzTvIhI2Jn3+Y4jz9DQeNSM\n727ieZXdyXO5se2Hyh7Lw8djvFh/dq3QwmOb8ceBXmJF3iRbP+J6+e41YfTzfDrr4MOgM0waGhoa\nGhoaGjNAvzBpaGhoaGhoaMyAq0bJbTz/KWVXLiVtPfA6Z0BlLmQXvJc9725NfRSD7C6JUfYS0zlG\n72aSthURmT1EOrVmEDos0/FtZdu/DpV22AOqI+sMCppgb3b5OyyDCgt2QN20s+U1ZSc2QfuUFlJM\nsSc6QNnrboKqs/clndp1mH6KiIxEQHulN5DanJggvVjhjOxtUzRnaLmb6LDuMOgAWyEujDGt2s/Y\nR36ewpUb8qCwDg5SVDN8lNTvYD3nRv3iK8z5jaeZD6cQCoNmukCpjvugauiwJz9f24wiIqKFVPHr\ny6CP5r08nQqpmUMhv4CzKFDybiLlHrUfaqG7YwX9iac/CadJFTdG85m4XqiksmDuVW0qGngqAF/z\nmkSJE+QMVTfpAJ1lK6SvYB18M+BdZS/+JWrWT81C2fhtoT1rHKBt6g/h40lZFEhNNY27iMilPNLd\no75QLF6bUesFvAkd2L2c+fAfQZHpWcJ8xIffxg26oQqcErjO2474YFgr5435+uOz8b6oMDtdoB86\n15iUrSLieAqauMDCGHgsJN2/zUQTJ25jnQ8l5is7yBFqSOTbYgt8djH3qrGDunEZYXw9TAVWK99B\nPRh5HZRGnzOxLCWN/geYnhLFgRQMXe2GzzaXHlL22S62Ciw+Bv3VUU/sn3sNhWPH6jkvUkRk1wBt\nWlpZp+zf+fH9zWGotBaW0e5DE1Db9hH0P8wOu3Yr/h9yO7GypZuiiUEPoGx9wQ3fsQsg9mXk2f7x\nWT1qOucxHlrcr5pClO5NbDu4Ow86csoHarrwLPG6O5y1HBXP3DgVblH2UBKq3sxGlJMDIdzXKeS/\nlG3fhQ85HSKmL/os9L2ISEmYyb/yKG67+xXWRHy6qUDwMdR681dwEuqT0ajuvTvYmuO8i2uu9WFr\nxk+L2ELhdQf0b8YZ0zaLKtZNoCsK+g+DzjBpaGhoaGhoaMwA/cKkoaGhoaGhoTEDrholF5FMyrV7\nFTv2+46Qfl3Xi7JifxBn2SQmoUiZvJ6UaUopqb8oy/QCWa92oz4ajyYdN3GJLnrEkXK/2wf6LG+E\nAprOUVA9T+VDh3guxr65DdVLlTdtvTWbdG18IbvxHzsGlbAujlR3+BRpchGRgQLoiurVbyi76TSU\nwyIPlDzNdYxfbiztK3ZAlWcr2E2SDh+fzzj2v3iDsjf7k07Oi0aVdGIB45ItpGKjDpNK/7YX6pba\ndOYs9iUKAx4OxBdcg1D6uCdAq/hZoIOyLVC4OyNIy4qILPVAUZF8kLE7fY50dMIXmKuq/Sg9J9P5\nTLUwtyuytip7139DkWZk8ruk2ZM2faoBCqejGKVIdSrrxS0LKshWaOhFiXNDLOM+uZF5PVCM4uZO\nQQk5XIKyJHwxyrihONLn3g4ojEREIgqgNIrioSftn3tM2WldTyo70peCepUVpOstyxjrC8eYs1iT\n0ikmFhq9rh6lol/4S8ou348SJ3AT/fEwna3V4zu9qGzgIooxppdDt7oPxyg7rph2Wyw7lf3FO6B5\nL/ycc+xsheE9xIEmf5SHTZdQtNmFsu6y4lDn1l2CUvZbyFhPvo8v2C2DxmhuggJyNikej9hDH3Vn\nECvu7mG+G6NoQ9VLbIPwi+XzIiL+odBhEgTdvtiV++XWmoqmBhC/sz7H2WK7drBNI/NWfMRF+Hxy\nO/1062JtlhzEj+JMFJhvFdtIAlbaPs6WW1ALp5QxHyMdjO/R9azN4pehrP95DXT/+cRfKTsyEOWv\nQztxpr8LSjXvWrZBBGXhExKOr1uOo2xryGMckpcyF88+jq+IiDzizPaVnKx/VfbPz0OLB2ZC87YY\n+FePJ1tUQnxRGnvvYC6P5UDBn++nnwuSib+9+VDozfuYS9cbeP70VVNIU2SZ/CnoDJOGhoaGhoaG\nxgzQL0waGhoaGhoaGjPgqlFydcOk+/wfIX3ucRMp0yNN0BxBk1AkLaOkAWPfIWXafpQ0bsm901Pm\nkkAq85Z3Sa3uT0I5EBDDGVXOW0if93yaAmTO59jVvz6Kv7ufghpyqoZucskgPXi+gTTl6dmoVTY6\nMBbV5+inX+h0usVI4Swy9ye+pOzo2ag6SjuhOg1PxnUwGWowppY+2wqLQknv756Eeqtz4ZyiY52o\n+dxdock8O0mnejVBjX3Pifa3GaSTg9pI8Z5dTnrYLZhx75oitWodIKXrLtAfrYdJ4X81ChpKRKSq\nECqq/RrOEYrwhB7oqSXd+90pKOOOIr77q2DaF3SU+yXfCVU5dIJz61LsUPo15aBk6c0+RH/KobQ8\nc/m8rdDWhg/6voXSrSkG5UtiHLTzBdOZSwubUSF517ImWotQbDb5sP5ERIY8ocZmVTL/ZdZnaNNa\n1mNvFWl9/0lo6/kXONfKsQ2VXFUCtM2gqTDq8CxoBpcJ1IDLM6FbJo7R551pKK+8GqYXCe1K5+xJ\np16oOyMLX7D3N7WjnHscfpnfpb4mCtRWKEjF/+Pegi62ZDEPgZNQNx5RqKYMA8psSFgHPV1QI0vK\noCiaIlnvvQZz6T3MdxfHE3/OtuAvEb9iTJ1Xsc6GnIkbIiLRRcTOnnG2YYR4E1+dQmiH/w6UeEd6\nTQrWufTN7/dQlWWj3DvvHLSw9ZvEtfYjqB9DI1n7U5P001KAX9sK6y/hs/IpngcHJn6g7I1b8K1l\npiKWI9WHlO00+3vK9m5gq8CYE3RepxOP//uKue+pAtb1qlnE4nNujPniJGL6cSvbWx6+g/EXEdnW\nE6Ps7a6vK3thGOvAw7QdIT+SmHhgDzRk7CLU0pVDPAdcJ6+nra74tV8518+rI552fo5tHT3Hofyy\nQ6YXwv5T0BkmDQ0NDQ0NDY0ZoF+YNDQ0NDQ0NDRmwFWj5NqK65SdEEEKbW3bE8ou7UIltCcNVVlM\nAWnYxhDog+YNpMIj0qa/6y0+TPr5rU3QeIGnKQ7pXU0asTPbpL6rgZY43UtbM3pQzB0NJNW4Mpr0\nYGQsqduFJaRK60tJB4cnc52hBL5b7DhdZbHwEvcO/RZUxOnd0HCxkaROne1IIb9QRKr1JufpSgVb\n4Fg5BcUWl5L2f28OCrioBRSHS3iPtG5ALoUIX1kMzelaz9xMTkB/pbtR/Mzdi5Rz8jFotSMxfD5r\n9Splnxki9e5rh4rrYOf0Yp6+QdCBdbW/UfbihaT3TzRDE9rHnlV2i+msrMRuUvTPpXO/XB+oHi/B\nvyqcoBXquhgvSwxp7TvroRt/fBgK++ufFZtg1kKUWoGT3CventS4bwNrLbMataDjEJ85EAAd+YqF\nxt3h+/y0+2VkouIqzaefTo++ouygV5jn6NVQgJaTrNnqINZL4CWuGeZMOybbKVwqJ1gfHSaKdFEb\nsSKvgGJ3y/yhj5pd66b1QSpYw0bRZ/jc81BXsc+yZs8VQGHnhFKUtUCgOm2F8TKo4FU5/4+2daAG\nDBrA72SU9XhJWAefNimKv3sTiqjiXT9S9k3j1yn77BDrxnIhVdldX2adug8z7jUPE5duHyNuHhha\nMb0/19+r7EpvaCm/WrYw5LtA1/x6Hv3/chWFivfn8nzxv5u5yb34kLILeznrrEzwtcxroBVj9rB+\nHV2JayNZnJcp8ojYAmfjiLM5v8J/w+bxTLQmM2dlEdCZQSNsZZGa7crMn4tfR1YTu10y+UxZH/cK\nuAk6azQAqsrrXe5rH0aMDq4nVoTsZj2IiKxeTUFb32G2xFieY6ztPoWibflG1n71cdRtY/34bLQj\nc3DRg+fJ2Umo6dR1KC2H9+CzlkZoxWtHeOcoaGeOPww6w6ShoaGhoaGhMQP0C5OGhoaGhoaGxgy4\napTcVCaqrYtuqB4aE0h1Li4jnRhQQIouJ5K0f6GJRrMjCytd709X4hjdFMTc3Eqa9c02UuNd8dBZ\n96aQ1ju7n/R+qOshZaemk+pvqEYFdGY91/fOv0vZp+w5J6rHiXZfPMVZXHEupDujMqcr/ULepQ+l\nMah9XC6iHIhPMqmA3KB0rsvmup67pp9RZwtMiinNnkGq9JpY1E3tjfTzSBDp95hR0p0Pm86h+00d\nheWSUul7u0mF5JOJusfuDGqPkHiKfI7/kmsmuEMNtNiTAk7K5QwlEZFBJxQSC0K4d08V9OeNjdCc\nL/mxVOZt5rpTDZuUvbSC+U84ik8dj2Zcplzx/386Qgp5S4ap+KCpON630qEYbQX/JOjC0gsUbpzd\ny/rYZeX8t/FY+nj7eQq8pSWgKF3cA70WY8X3RUSmqlAJJmxYwXWPms6GbIby/hdf1GanyuqU3VED\nxeKYZDqf0gUq3JLHum5ZDo3m0AaFcPQovtk6il9HddO2QV9UdSIigVUodO02Mm/V/rQp5MyLyt68\nH2Xs7s3MvwzZXlllLIFu2n8I/x8MJQa/fpwY8vBaqOOlHVAXr05C6dzxa4o7nnkUKnRfDkrFoH3Q\nq7mDUKHzzlGU8KAnBXijHIjrWzuh1zrdUKGJiDxSCwXocWK9su3jWAvDHWy7+PqtTyvbcpSzzx6s\nIb5e2oVP+aZw/tisLHx1ziVizWF7CiP7ZEH7JOwgDhRYp59PaQtcW0UcGLie8928/JiDttdRfcXW\n/1zZYeuJxeV9qMZnb4emLPE1FcgNRv24dIgtB5cKeMa4ZkGPeiXzHLev5PpLhtke0ZTIc1JE5K1S\nFIlrknhuuGfxjKqIY0wXPkffLtmxXUAm+G5ALOfU3l72grIb/LlOoyP+1ZDD+gsrZduLe5KJ+q+c\nfgben4LOMGloaGhoaGhozAD9wqShoaGhoaGhMQOuGiX3QD7qtvw5pEnbhzmT6Xw9KVPHaNJvJ8Kh\nT1YeK1B2SAbp5rzm6UUfez3qlH3gs9Bw1xaaChaOUtjq6Du8K46ZjhlrbYlR9rwh0oC+k6RKA15A\nWZNikL60DKCA2Z6AomClHwU6G51vUrZ/NeoAEZGau6CDRs/SH8+10A/t/lAo3ZWmlOoPUVc0hpPG\nthXi/Ojn3g7O6ottgEpxC6XA5gIPVIJjflBmzU3QiD+xoL6qCCTt/fwEnwlvhCYojmNey/Zzr6RE\n0urR9k8p2xhDDRa8E7pMROSYK+nrNS7Qc9EJqFGa3Wn3+hTOHTr1Du3zC+C6746cV/bm5ShxJjpI\nCbdOcM2X7q9TdtZJ1CXbk7n+zan4na1QPsR5hA6T0BNDSahp4qycN5aXBzXdvBRqasz1eWVH9zI3\nQSOcHSci8vQ4NN78JtL6wV7QZ9HuUFW7hqG8ApJQxyywh9qt64eC7mqFnrWfYhxD/VBYTbyCL0w9\nynoyjrA2RzqhquxvNanKRCShE5+/wHGFMjoLdV9j1/3Kdki/oOwoU+HOiR7b/0bddAa1z87lXD8z\nGBqu3471ODoJhdJSB600fgO+VrwEStK5Gur8VtxX8oRrOhgoAXcZxPVspxiuWca8zvGCGjrpOb04\na9VxihGez6QAbkRoprJnDeBH5XmoZPPdoGFlkLPrbrHHX8pdoF9Kkhk7r18TR+KHoOfqN9Yp2z+e\nOJgQZ/sCwSciuX7wBWg4Fy/6NbUA3woIp18v5fE8mYrmLMPuUxuVneUDHZ30Neiy179+j7LTnXjm\nDrax/WB2N2uispn5KwkxPZMOPT+tP/M/g8rXoxcK3z6Gs+5W9kLjbfXDjxIcWbPjLazrqTLoQ/s7\n8ImmS1D/gfnQ0VmBKDL9J6COz40RZ31aTefnfQh0hklDQ0NDQ0NDYwboFyYNDQ0NDQ0NjRmgX5g0\nNDQ0NDQ0NGbAVdvD1O7EQZb9rsiYk95BQpoWQnXR3x5hP4Pk8Pcdc6mk3N9Qp+yF97EXSkSk/RX4\nZF9TFdGLk/D4qUPsh3FMMR1ca0E+7p0Bz37w9+xbuXMTe2x+WYZUtiGQPUkJn4LXDSumP5PLkTJ2\nmg6HdKiHixYRGS7hO9XX0W7nZnhqnxrG78gBxjX9MfjhrnNIvEXuEFugz7JI2XOuhbMOfoW+lU+x\nH2RBAXt4SkLo50Ag4/7Dz7IPbUUec/Pv9cz5bwR572gPHHr2Ovj94w1UT59yYE+GdzB7k5qDp0vd\n5/vxHWs3B0y+2cV1M/xfUnZb4xeV/Zk+9tIddWDfj6dpj8Wpcnxq3ih7m1Z4seRe6WUsrOPIdO+d\nyx4T117bL1G3qTpl93SwH++cP2UbbrqEDHtoJ+NeFsbexCRH9ip52bN/4DfezJmISNwSfLmpC7l6\nVglVh2P6kbcX/gZ/D8hgDYY4sPeoZCHjPtLK/iL/YVM14ieo2h6Wy+edK7hm6xj7uRJC2bO4/Tn2\n54iIvDvI3P5bjEmiPmkqXRBJTDoYsIQ2nWD/xbULzHvpHhRb4Egw+22SDuMvhcEcDO7hwJo6McLn\nV0UgS2+s/7yyay+yhy1QiFkH17O/KP091vukH4eF+zZQXqG0kZjYcAvrJuwo8dQuJWZaf2rd2T/l\n20sVb+8LSNHPr2PfS3oPez/ndrAP61I9vpAWxT2e6WDvyurz7GHKGyUedX+Z/ZVe+eyFaq1gT92w\nxfYlIiw9xJnCXPb5XBNGCZ6KYnyxrJWSGgFuzNONzewd2hG6V9mBKYxn717ictp21oSlztRfDyT8\n9cU8e879+GVlf/Y0vjVlTD+94uJeYpmRy7jXBTH/3X3Ei3lD+K/zOOu624NTF/JnEUPmF/AM3FRp\nOvB6Y4yyW6bYzxR3lHIUPj3M3+aFutK3hoaGhoaGhoZNoF+YNDQ0NDQ0NDRmwFWj5I77kQ7tPk/1\nzmEHUn8nUknFLfbi8/0NHPhX6EEKNGQdtIr/K7dOu59fKNW338sjtep4L3TIyXpS1K2JyKPnVG9Q\n9kDrT5QdeQfp518cRxodehsHU84yVSIuPkw/U4JIX7bu473UPQU5vEPb9LICzn7QW7OOYscuIY1/\nJv+Qsh98BLno7qdJx0b/8xfE1qhvhZaxXMTuaiLFeT6ewz59IijhMFHLuJRHQ0+kWJnzX7pQ/iF2\nNZWLA8aQ/weaUrGlY0XKXl9OG5oWYc9vR0L7Qvl0mig2Exn8lg5ovLW5+MvhlruV/a9l0MSH50IN\nLCzHJ58xOIDT9+hBZV+8y3Rwayt0TtwgpesnJvCF/FLskbnQtCTHPxqSJ6CUDzvgp7NrodGPhDF2\nlodoZ6wn6e3vlDJ/S725zi0XuI6IiM96JLu9F6BMihKZ54p10DWzOqEN/Prx/T3boA+jBpFfd6yj\nAnTmc5QGOJNgOqzVVFXftQMK9lbT3O9YCnWxtARqXkTk+myohanHoB26V0P7dkzhe19KhtJ7Iov5\nb3WkAritUOPOwdLescjA32sh1n4uFcr7WBftafIi9tm7m+jl6xg7u1ool5r3iTNTflCenj6HlO07\nyNryMJXsCCzgml72xHi3Qg7xFRHZ1EpsPpoFDVubCC0Tsh3faQikzsM+FyjWgEgo35ZoKONHavjM\n+SSo82BvyhscPQ5lv9Y+Rtnd32GMwoUTCWyFO0zbUU5exK8rTJRwYBn0tYMHc+Pax1rZnk77Q7zw\n5WfO4H+/ukgc6HPAv4eyoac8+ynf4JsGHZ/4LnHvkCeV3a/3Jl6LiKxcz7+96mUq1bCLLQhxDVDZ\nDzgy9zuCaIe1lefG+kTm7EwMZWCmuijxMfiO6Rk1u07ZjhM8Qy70c6/GnJ8q+7dC3P8gdIZJQ0ND\nQ0NDQ2MG6BcmDQ0NDQ0NDY0ZcNUoOQ+TesTIJX3XXo4CJreJ9FhDIOm0pAiUC45H+Ez2edKqewep\nRioiElrJIbgxyXzObS8pO8lG3RV6hpS5hw+flxbUBeftoFX811HFO+ZJKJnheGiCxjjSzCGuHFJZ\n5ANV1TlI1dHrE/iuiEhVBalTfxf6fep91DvxzvRz2I8quJVrUB20vgtVIuuvE1sgORBKw5oBZXTK\nmwrt2etJ15/4JWn54Sbmf1kK17HfBYVx8wNQeMZOKgvbmSqwWtJIy+YehQ6odoPya7YybmdbmI9E\nD6g2EZGJEeb/rn3QRO72zNXw4PPc+zQqQV8nlIf5PYz17QOvKbswm7l1sOczeR1QrW7ZpNb3lUFJ\nhppUZrebRVW3i03Q/wa+7PoZaIiORuicyR7WaZIDiqHGGqrw384USPwJFDAj8ahbRETqS/nvkWAo\nyZQUKtX311B1OMOFdVBmogSqb4QatLpAixkTjFd5sqlKcZAp7kxAXeQuJ9V/wReqPbkkRtldU/iy\niMjEOP9dvxl6JMeKXzmEQNccMh1+GirQqh1jUCW2gv8p2jCYzFpYEMjhrb2T25Tt3s7E1WURE+3e\no/2tzoxd1izmJjKd39i+3qjThoo5iNcvljGtyMePou1RzDlmUJ198rcmRxKRX3/DpMRs4x6Vx/hc\nkAvj6OL1O2V7RHBvS52JlnqH7RVPG/w9aB3X9/OG5p9X/3VlV9fwPIp6g+fU4j7TaRPTu/AX41UH\n00HiFpRxC9ugESu9Oc0g+wSfef0+Yu6NVVCwpY3QbV9IZY0fH/yqsosaobBuXPOssostt2D7Et9W\n7SBGh9UyPtYcqFIRkebf4oNLQnlWBCxjTSzt5Pl2NttEkZbSh1lRULuHu1HDbZyiTW+vJUB6jxPX\nBl6Gmi6Yx9q/7xTPitM7TBsePiTO6gyThoaGhoaGhsYM0C9MGhoaGhoaGhoz4KpRch1LSb9G5EEB\nlM16W9nBg6hhXOqgarztoZcyBkmfhieQCk/3RUEgIuKKOEbGnkal08Ktpa2U4o4LJl9X9nsON/Dd\niU3KDkxETRN+iFSjZTm0QtdxUoVLTcq49yvpwyMufHebO9RA7SCqARGREUcomlUroJ8CTYdrXgyB\n3nmuCJrg5jAoIy9/E8VoI9i3oCbqyOeARLtWBn7hHhQtrV/kgMv5HhQXG3gFlUaZD5/P+CYpfc+1\n9GsylDHt6SGN/2401KafqSBpeDNKmtZ+0vNOmRy6KCIyf5zinrt+TGHFkH/HPyc2Q70dJ6srTjXc\n2ysAWqnCj8/35b6pbI83UDElBzCOp6rp53Ur6WdELXR2nSfftRXs0/Ah1yb8ZnKC9XHdRVL3TS74\ncl4CvrW0hnFzugHarnv79CJwLi5QZjEGa/jwUxwY7TUvgXZ0EwuGD1PcMfZr0LYBp6F6Wqqhdi2n\nGd+0H6HY8fif55U9XkDBzT2ZpPfXmYp1us6l/yIivgegDSYmKKxYGQHF6neJWFWVAq2UHBKj7L11\ne8TWiOkgbtgtW6HslONQoQ4p/67se02x79VR6OjkZcSZtHMoivO6Teo2K8qiTkGdFjXCfAzEsQ2i\nux8fb59E2ThpMDd9m6fTOJ+vwOefiUPpdqtAnxVnQcl21NPPoAKTqnoOiuQJX6gep4tQV151rNOG\nRHwzdpz42zLFGjmQwecH2mjDPIG6+iiosdQp26cZ2ss/mTmr9+H55pkDZZ3dTiHG4SHGN/gBth8M\nbSWGWucyf2GmAFddwCHSXUHEQ48hE+2e8w739eJee7KhhEVEDCdeMeLseSaE2EHPNUZDwbcm8t7w\nwM8Y38cjWfvBtxNfzlWjrk22oMgrq6Wt/g9RhNV1F+8cT6wnli2fMheRflT+FHSGSUNDQ0NDQ0Nj\nBugXJg0NDQ0NDQ2NGXDVKLnkN+r4j02xyoyoIbU9ONd01tN7FC8rNhXTk3V892clUAnLLqEOEBEp\n9CPV7bSBFP1EnUmV5EA6LmD4s8rO3o9SJG4254eV/4qU7px4qLS3+9mBnzYXNUlTHH+Ps0C3dW4k\nPX/PWdKmB+pIiYqILMpAWfX8buywSRR3TZOM2bIBUtljZ6A9es6hTBCyqx8Jvp/i+mfzURJmxaNc\nOpt4VtmBv4fODBiAFzVydyh7uQ+p3MPZ+EVzPbTdxkRou8EIJGNxJVwzJABKLrCJVGx1KmOVHGFS\nDorIi9+OUXZK9yra9wOUMnO2Q70Mr6J9x5pQeMSnoi6ZKKZ46oInoSjK0qB6xt3wtfs7SZWfrEDd\nNbmIWScAACAASURBVGuC9HjFMtsvUVeT+iYwGAXU8F7a3+CID8XMRRV54xQ+3uhJ+rz1AuugNoGz\nqEREsiNQ1uW3ozL6xijr8S0HxiKvjHs4TzG3wVugBoPnkd73rGXO2h5k3RX+imt6z4IuDUyjQGVK\nGVSwxYO5TD9i4vJFpDALaqnzfeaz/27S+oEvEiPmxrNe6uyhWDcMUJjQVrBLpZ/NQdBq9p7EwQP9\nxBrnLGiMG96GRixbBhUWnM1WA79o/H1qJ6rV5W3Mh0Mun3+rnb4H5TKX1nbTuaD9UDpLglZM688v\n2/i+UWwaxxv5TEce83/TIGcgnveFJj7fF6Ps6zspuGr4PaPsU19DFTvykkmpWgKtWLwG2su7EvXj\na22cX/o9sQ0WNeMr/mvoe8UR5jgq4ZCyG5I5s9PT+l/K7pz7mLJdz/1M2f0xUGF+x3jGTK40Ffbd\n+GNlGgegGnPbya+MBPAcKwnB5xZWT183gQEo+gqHeBidfoH7JdyPb3a+TVufvOPXyl5ux3wEd/Pc\nK4qC5j3VBL3umQD1mHHA9CwKYOtAXwP+7lnwNRrNcX7/CzrDpKGhoaGhoaExA/QLk4aGhoaGhobG\nDLhqlJwEojxLLyalOewSo+ypkv/g79nfVvbRIj6/1pf0d+IxQ9nV81FDiIh4TJEejW9B+dHiSCEw\nL39Sec79pOkcMziXrnOA+0UkLVH26+6kt517SEU35VIUrGc7u/rHpqCStuZDaSS7kEpOnIqZ1gf7\nYQrHxSw8o+ypC6gl0q//obLbG6GuQstQtUyFTx8bW6DxCfoZ7w/dNChQocOpqDfag1A+hCUzdvX7\noGH61jCfN+aTHi3I4GyhA4eh/BaY6sQFeNCe1nGTanEOKdfSAxQm83sLKkhEJPnr+IJdHnSCx29I\ny18apQ/1h1CQhQkKzYIBlHj/lMDcXuAoNgnvD1Z2i//jXL/p88oOvJl21xZBDXVWQQHYCv310Fzt\nHvh40xD+93YsqpHvvrNc2bkBUHXNmVDn6fZQLPbd089hG0pFxXX+RdRKZ+OgaJzdoSJunYCGP3I/\naqXTnszn2kLowNeXQy0sO0MRx8/ZUbj05620wXWC9Z7SznXc50E3vRYIVSciktUPpROwEh+Z9Sxz\nW5rN2Li1oCQNSEZl5bbim2JrFLtCkQ8fxY9cMokDy88QBxtegQ7p/xFFeGunUMCVvs2YetQQv8Ki\nUYmVdMGRVV2AavZ3hHbMPYPi8fgcxs3JnTnuKodWERHJSoL+jHQkLjTuYg0HLqxTdmE5yti53VDy\n6ZGmM0mToBVrmqGQHnoKlWtlHz7clUE8fXiAONVmd0jZQ+t5zohMP9v0L8VAG+vxmA/jNRnLnN3q\nw319dhAHHaJQgzW5Q7WH50GJl8exPSQvnO/Oi2b+5KXb+HsOY/72YJ2yF3aZzke1EEv3DaLmExFZ\n5078LonDB29JgRrcfxIqLXfiLWWPDBFzB9ayZWGqiLEoNtGKyZvx94K90PQn5jOOge70ediLbRZV\nXlDHHwadYdLQ0NDQ0NDQmAH6hUlDQ0NDQ0NDYwZcNUpupHunsrfLA8peMv57ZdevQqmWtIMzYbrS\n2Wnf1A5NkO2MoqHQQMEkIhJXSVGzssg7lR0c/7SyPTspWHfEmZRwgAeqnhFPUreNMG8yJ5E0ce9W\n0tjFJvHV52ZDVT1egQJs7jFogpr1pPbtQ6AtRUQaXSimGJTPmKXeBy1Rt5Oia2FW0osnqym6duo6\naII/XYLr/4bxzSi3poagLuo86XN8JcX+7HyhCzumSIEHLKX9mT4odI55QMMlzoLaKgqKUbalmfR8\nRx+USW+f6WyoAdKvfeFMTpdpbkREmof4fmowvjM8Cf2ZMz9P2VM731C24xL8664m1CtbB6FDMp2g\n0l65kXTygndRa3Ws4vql+Sj6ltYyFh5p2CLT/eUvxeha6KmoWvw0vpX0++wMVCM1c6CXjpdBZa+w\nMq9H+qE20udPp+SKyqHhblwMTZJ8Edq51Q567kwMFIhXMfM5mkxxy4n5rKnVO/ndl7YA/zozTNG9\n9Wfw32Ne+EvmFlS0pzqghu5fMb1gaFkIbXIZxs9rxzfSjlLWRUU0/TnVwvoN7jUpgDeLTTBZBEW8\nIJYzsY72/FLZ4fH4+6RJRJtVy3gFHGWsR9ZB27odgvK7VAp10z0bf4+xQy3okMBjpaXjCWVHFENB\nl8Xeo+zzC6bHcsc2YuQXLVD4TZ5QOo61zKfdeub/vIXY0VdQp+wJC7696FGUdO++hWp3036K8HZG\n4ZtdSWwp6K+BYmxaOZ22tQXGwlF55lahdOuKoKCyMRffyutAOXzLcp5pzU9DTeatppjr/Cm2L9zi\naNq+MGHqVzTU94VmtpnMLYN6i3DlmuUBPK+jE6fHWeth1LN+AazZ3V7EiNE1tLWiElrthlrW075y\n2j3fwtp0WcLWl7H8OmUvzGa9OkeYzpV7nbXSNAGl/K1r/sfU6rvkT0FnmDQ0NDQ0NDQ0ZoB+YdLQ\n0NDQ0NDQmAFXjZKbSEFxccN7tcpuXEbRvDO7SKt2uaB0eKiWHfTPe/H5nwdQ4Opfh6EDREQq1yKh\nSju+TdmFsAFS14JaJa3hK8p+Ua5X9sOtnIXj48VO++p0lHsrV7Azv7kLSuanJey6D5lHWnmlHZ8/\nfYA+L8jgfBwREbt3GbPFg6RIf2dKFc8aYMoaTeqrCFPhsW8UoZAQ2Si2wIiBmmihL/TLeC1ysP5s\nqJvEPlKlpe2kYp1M8zxUR1o3eg6p3NZ2aNTcX0CjVm6ESwjooL9u8aSQXfugPMJroY8a06EIRURc\n9v9K2V3JG5RtN3BM2cNtUEAVk6S7b6rHPwdDSe8Xz4XqCXkVSiO6h7msjFyj7L56fPaOevy5cQF9\nGCyAIrYVht6AC+r3hLaKWUIb/JpQ6vX50p7gUNbHgUso6UY9mL+aIehhEZHeZgoL9rnjs95RpNa9\nx/jt1mDdouzxOObfsRtfczoBNRRq8rX8t6D8YyyfUfbFbsbxhggUeS0boW3dF6GWfLacrQAiIr6T\nBBLXX1MUz3Ij67zZD6qz6gTUwvws1vyQC75gK6RFshZGVxGn3Ltps38HtMfgWSiKbXZQXjd9Cerw\nxMvMx4ZYtktMWjjbrcU0B73fQ6U8+xXouRcCf6vsORUUH4zypZ2zGxh3EZGxfuiU0n5TUeEQCm5a\niw4peyQVf/N/Etv1flOx3WGUnvH/DQ2X7oI/7v0OdFjPHrZghAxzzYR5n1P2/ztEkeMtiNs+EkKs\nbPfwj71b2ZZ4fD/kLfxvdj0U0x4/5jJiDXHs0CD+nmhATb54Arqtp4X4Fj6L+XBiWqVlGfGwZwL1\nW0gDfl9/ingoIlIQD93a1U/87pxijv9rFGrwmWbmo6OLMztj+z7NdQKhzr3SUDN2mItmDnNNj9/x\nfJgdvl7ZlcWs3cfr+S4R5H9DZ5g0NDQ0NDQ0NGaAfmHS0NDQ0NDQ0JgBV42Sc5pCcbFt0feV/QUv\nU4GvCG4fNo79dB/UlgyRGl0xHKDstzzZsS8icv8J7rfzPKnYkGtRHxVbULcUryT9+nAxqcmSFNLS\nYyOkHUN3kJYtFQqH5a8jZXxtPCm+CdO76GsTpB9vdoKeaLgIvSEiMjpJyvnETfRvfjP9qZ5E0bfY\nHUVI2gB00OOZpGwhJT4aYtpJfZaVQ6UF+dM251OMY38J9KKvAzRUQORBZcc7o06TRiiQPDuUO4vu\noDCk0UJKX5aQWh4uoPBZkTPzlzoOBXvgIKlrEZEsT+7h5cKZRR43XqPsQi+otwVdjGlPCH5RX4ov\nBFShAAu4Fpq3sJ1+pvSSZm7p5ZplflCGCfmk088vIM1uKxh1JnomF/XNuWTGbnEtVFVIFSn6rZ8u\nU/bGp5nvi9dAtVr7mWMRkSR71k5wv+ksRQfGZe4hKIfwJNLjnRZiQfgUnyl0gqoLbYIuXvpNOATX\nCxRZbK+hYOjZpJ8ou2I3VOJ8C9Rj76QpBolIfDv9Nm6moJ7/xHtc11Qo8aF7WMvbSzijL9JEQ9sK\nPeX4tv884p11K+uldBG0xHxflKC1w9ApF7cTQ+f0Qs9uX04Mlufwx7hFKEfTDkEfnY+MUfZdVuLG\naQtqO/uLxLHWRdOVVSMD+MuYM9+vq2C9tCahPP3OCejWlqdQVm17hHgccgvUYM8gdPOUIxRp8E+g\ndn03oUTrKDui7DNneE59IwyftxUCfeqU3V6DgtW5kbEbDT2s7LgqnjM17fzdpRs/uzmca74Qyd/v\n6mJsfQNQ4y6ewj/2tEEtj3ZytmqWPbRu9y62nMTczlYEERGjnrlcEMf67SuHetw2DnUckcA2itYc\nnhs+TZynKh0o5kL3cZ0LPtzLPQNFpfjiXy+PvKhsz+UxynYsh8L7MOgMk4aGhoaGhobGDNAvTBoa\nGhoaGhoaM+CqUXKRE6Txo5PWKvvlAdKMwTWks0tqSYeHBlJMrtCH9PfdcaRhOyZN2/dF5JSQWm3d\nyHugEQBVtdhCmtn5LMUHG3NNZ9018Bm3bFKF7Q2kq30Wke5L3kZRs5HNpDUPhZHG/lQ/xeTGTnKu\nT3o85/qIiJy7jjT1qB27+QNefk7ZDun8vShhN9fdTbt9vmJKR9oI7sWoY04spKLnt7o4Z+rVNtLh\ngatJg1om+a5nKcqoN25HmTFeBq2yqg8V3pY2Pp8WgtrI1YUzizLdSdVfGOYzLvNR7WUcn+4v32yh\n6OdeR+izX7SwJOY0kOI9nXuWdlSsUrZTK3Oe4YhCpKuVlPV1x6H/BnKhKoft/1XZhjPn8LmbhFRV\nJ1D0yOduF1vA7XYKpF7fT5G2ujrmNaaLoppP5TBPG9opOPeaqVDexhd+quzDXyWVLiKS2QL9PRkF\nfbqgm7FOX8I45j+DetBtCmoscQJFalMj4+J233X0oRRFUEc1iptLFdBHfu7M602erP0j3dCrUWOm\ngwtF5FzoCu7dulfZq4egZ8POQ4Ft8UABa2T+s7L7PfBzW6EqB38MeoPz0ObPZ7wsoahlmzwZo3sc\n8NN9ndA1qQFQIAkGftfxBdZNWCU0umvEvyjb7iS0tuN86K+4xS8oe7CVNd43ZKqkKSI5c57luq03\nK7vLlTl0C+f7p3qgsPd/H3/5wT3Qv0dOEx+9Fpq2YBzg+TD3dmLHkTz8f6ED9OqR2dBSKUPTY4ot\nsPcg13xsNm3YUYnPvmMPBflgOM/TOe2o0UMToZj2Gg8q+/YtKB49/Rmr5nVQkK+fQfI3P4LCmPXd\nbBtoHoSOLFpPkc9AB9aDiEjzbJ6hAX1sBTAcUS3KStS2YWf4fulFVOrBXlDKAybVaXDkvcq+ue5V\nZW8/DoUZaGErz6g7fpCzj7ns9btP/hzoDJOGhoaGhoaGxgzQL0waGhoaGhoaGjPgqlFyL05AVV3n\nCq1mPYYqY+JT7Jpv9SUtZz9Foca1nRSrrA0mzWjdSbpdRCQgiNRvyilShzcUQHu9ZZDurBbSiFlz\nUX6kt5GyLPPjPLDI26FkLu5ERbDm9jpll/wchcpX7iHVv8UBtVV9NLv61/pDz4mIhHbGKLuzC3qu\n+UFUQyMGSr/gYugNSzQ0pkc+qXihJuNHQssGKLbYSFLdDxxhTkacSOV+rg6KxSENSrFjvemsJFMB\nvWant5R9xFQc7/4RUqiZnRQ0/GYy14nvpA0b7qdoZ+yPUIAd+Cb0nIjIP+28TdmR8aR+syZR/c3P\nJD1sOfJVZffORcnhcA0Kke5D5cr26oBKbLwPfx6r+46yjc5vKHujQbm0HVOku695gIJttsJoIGqu\nXeX4U7IFZdx7ZdDoK02p8dFU0uE3j6LOGnqEvlz/S8ZBRETmoJoL3Md33kxhDRdV0+dlKXCS78+H\n5i2sYs7CLqEQzelBhZY8GaPsg6HEl+j7UKombCMmtC1l/hacw3fC81m/IiKDsLYSGwB15Wkq2tfk\nDqVZ20D887kWesvjZegwkYfEFghfAU3mfQD6dzCRgrGFpgKQrY3EFjs/4ubCrSbK5Z+g28r34S8J\nAbS/yA7ao9KBmFDjAg0zdYD1tCQMKuz8OGNVctp0IKeIzK27T9m+fabCrb4otpJaWF+/r+WM0UAn\n5vbfTbEjdBQ6qNt0ZqB/Lsq4d7ejGpttD83fdg2xtXYY29ERH4EY+mj4bBzq30cdoZo/64Bq2ukE\nY1c4FyWg4zjfnWhgfBz6UHhGeZrOgJsHrd213XTWYBjbHQpKWIv9OcSxje9DS5+dzTp29JuueEze\nQ1v7lkGrSQsxJbkHtfhZHz5/UzfzdM73JmWPO/Os2NnKWXUJpmLWOfaMy5kFbOX5Yg3xa38Y2yOC\ne3hGi6mQ9QehM0waGhoaGhoaGjNAvzBpaGhoaGhoaMyAq0bJPeKISu7oPhRwjktJvybthMLJH4KS\nk3uhoGrLSDdPHkcNEuxJmk1EpDmJNHCNqWjmjyegveaajpqZHY7Cp/Ig6cHU91AN9TtC1Y2GP6ls\np6WkCttKob8CryFVeKmBtqa1Qhlsivqysl36OM9MRGTHG4zTRCg7+D8/D8ppZwOKkv45nNkkaaRO\nUw6Q1rQVLGG0J3kXKrz+XMbds490eP4SiiA6vMoc+IdDIzq7QxnMqkXpMusmUsJdk3x3mxPp5N5e\nqJSejfR98nGKIV6axzyF/m56Ach2B6iFqkCoG+9OaAMpJ41f5sFczSmFDmzthSYIiyANnnEJ6vXl\nt1BGrbyd+XPshqr6fgQqld8NQZ9se9NU3NMkLPkocG64T9nGUujFvFAoTF+TKrAHsY7kuUCFJfWY\nipaWQXE7rkK5IyLS8ipU+GkrfhGdCh1yjTe0/bNtpNNDy6FG5hxEedYTAVXj4oIvnHeGVhuKhiro\nqoA+GQrBjw6NcK/MUNRdbgHTi28ejYWKeeQtqJuJa5l/r1fxowWmwqqWHbS18AaK7tkKljP0v2kt\nvjbRAOUwWkt/7htBAfjkIFTPpjuJwe55cPlJ4cTaUAeUWEOnoOSW3Uh7zrkSs3udaMPpUeJgfzZr\n5XNtZppS5JAjMbIjGqo6rh+qPnwHMahuPYUlZ1lYvwsPQ/mvcuPz+8eJKfkBUDTrryV+nb3EfM82\niFM9rcSE6LDPiq1x+PMmtfDz9LfJQjwZjSP+DCzj+VPyFnFjTgrrKdKXYrndy9kqkdnygLL/P3vv\nHd7XWd7/30fL2lvWnrYsee9tJ15J7AxnESALQgKUVUr5FmhLgZa2lJZdVimUkJAF2cPZ3nvLS7Yl\nW1vW3tYe5/eHxHl9REk+tMik/PJ+XRcXd+Sjc57zrPPofj/3/fSsIlow2KFP1ztIXnP2Is3u9okW\n/Otq5KwDyeOj5LLC6ZsPVzLH3Z+CfNZxmHJPSuNb941iJMnrZzFOp29l/A4O+UQJFjL2z3dy/7t3\n0t4v+0RHX7z8mmc3+siub4c8TEIIIYQQftCCSQghhBDCD1dMktsewI76CB8X7eZjuNye7iOhVOi1\nuOsCGjnvJbiCpG/bfdyAN/ePP5NppAtpYX8ryfVuCCWxYsAZ3LLnSnCtdkVw39734Zqc0obrvWQ7\nEXN/dRuRJfclECWzphT54OacrZ79aixu3/oBon4i2nFFmpktmEcEUUcOsmJJ/SzPjsp6xrNPDRE1\nNPsSkWgBHx6fCG4iCP4V9VicyHvOvcw5bMEzcjx7ZRXJ7trDiMSqzWSN3lWKfTHlF56duIdumZ6J\ntBvVTp1cU0TbDMfQpzJXkQDyxRYSzm2+4zvj3mfaOeSk1D1Ieic3vMezv1uKHLAom8R/6SVE6725\nhLq++Q1c5a/GoP/OicFlfaydaL28SBKarj5FX3jF5+iuX9xf4dl/YRPDYC/nPQVsY5wG5CGvnq0g\nqWzoUqI/I3tom6Bgn0SSIzs8+1zQ+DMSl99J31m1hWic4A4k5e1VuNCtBGl6bR9t0/J+yvr8IONo\nRgXj5uptSP4NX6ROp22jPSbdScK+dfuQPBf6JEYsDaIfmZl9xSdYq6WWcjQFc9Zh8HTav70Tmakj\niJ//TR8RSxNFTBDjaP82tgLMW0QduX3MX/WhzFmzI9maMJJKZN/lRJJ4nnmV9ou42uc8v6W0zbRJ\nyHytRcyPqVOJtgvwSUKcfopovgtnxkewts9nfK02rnvYp80/uRj7J8G82+6zSJ6vz6APj5wjAnDm\nMFGCv3KRi+8KQX7qb0B+OhFFkszrdyPz1f0159OZrbKJIO9z9NNtBT7bFHy2IyxpIJnkw20+Zy0m\n8+4HAohWu/0C/e+VVq5ZXc283NXFnG45SHLLz+V4dvVa2v6arYzjZ8L+xrM7HmL+NDMruwnpdXM9\nMn+jETGXmEGZ4nt4XvwtSPBDZci/T8f4bINpIVrvxGHOqZ0+nS00I4G3enbPJ3huShmJhod6GdNv\nhzxMQgghhBB+0IJJCCGEEMIPV0ySq1pF5MKClxATfn0NekPbz4li+WuXKJatQ5ybk4hHzwov4MJv\njURSMzNriUcGuD+JZyStw81+Yjdu49WLkABrH0PaainA5Zq6HPf+qrm4B5+rwpVX2IaMMXUd1Xls\nT45n9/q4HPvTiWBLaiYqx8zs8gDuwu6nierqv5oIl+wRpIIjoUS1zNlDsq1tPuevfXiCIquO3EBd\nTD9GNMkx+7RnZwbgcn05kvc8MQO3/6p85M9pcVwTVoFrPLMc93BvN673oBza/CYfl/ODKbjSA6qR\nQq9ax7N2G9F5ZmZd6URvrMpHMi0O4Jy11RH0keiD9M+w2bidb730U89+agHS6doLtGXpPbiQ0/4J\nV3ZgKrJHw/uRPaafJwngnxf5RDyiCv5B9Lm8b8tCpLC0ROr6phk8d89M3P6Fp3iv6AYfueU058fl\nTcalb2bWOIR0U7WCsRlUw9lrIVHI8+s/QP899SJ11Pc6Zbr/Zp+pK53IvbL347qPfxTppXkV42la\nNJJkWQ1RNo0XaeOQMJ/kr2ZWVUCUUmsUY2H+rk949mTn3z3byaU9Fx4geudvRu70bHrqH8aFYspz\nXSFRbAG5zA9Dh67x7JpcpMrmAWSW9vlI24tfwC4oQCY6HMF2h4Ahn6TAGUQZ3XoN2yOGU4lUPfim\nTwRfBJJl81fHR8klnmSMnKyij123CHmu7CQRlmcqkQBzZvPON5YzVz43h+0YS2uYswOKkfmLt3BO\n56QpROr25JM0sXwFc9z9XbTxRDH7/bRN1nnGwcvL+S7t6iUJ66rj1HtKNMlfG+fQDxr+nW/jijiS\nyiYH8h1ru3qnZ++bxHc5shifSr/P2O9PoI0zQogsz09lC4WZWcRJ6nr3DCIyh8Ipd+pR5qDFwXz7\nz83l+qiTzBvNhTwjMp65f0k3fXZf9ArPzpuMZD/rKP09oo26frWGZ70d8jAJIYQQQvhBCyYhhBBC\nCD9cMUnuywOsxYoCvufZA/W4+0KjcMV98yGi0wrn4zab1kskwssBXB8VPT4SbOYF3Ivnctj9n3KU\n6IjCeNz7205P9eyWGmSY5HxkgpYuokliW4jcGehjR/0ih3OivrcTyeCeGuSZuAjcl80LcWNXFiNV\nmZmFd+B+vuZryE9nniSiq3USSdT+oROZKTAXd2SbEUln9mWbCNZswxU93IYEMHKOZIdtKbhZs/KR\nleZV4+6d+/B6z95yA/UV6SCxNabSF6KNaJWAEOr90VDu33OYekhZR5bF/YOUc8H4o8HsQhyScXAh\nic0KSuifPU20wdRVtGHPK1x/OORb3GcB7u6ytcjHTS8xFlatI7Hec830qcjzJH4MrESCbW3C/T5R\nJERxzzVbkSqe6SBZ4exrkDxiH+Rd2qczNtPnIxnsPIWMEpNKcjwzs6ZGZLLMV5EKHv8QdZFxindO\n3Ek01fG5yHsLM3Hvb9lHYsjZ85B9Erc86dkn51OmNfW8Q8UlJMaAZGTXVV1EWzWlj49gbZ2DTHxL\nFs84nU/fvnyJaMiGaqTqskUkftwQ7ZMYdYJY2E19593OO++5RF9unVvh2QuKkMZmTOH67h//l2fX\nhfydZw8msq0hrAjporYAmW/um7z7cAv9/cki5KC02BzPjpqFvHPVI0QxmZl1D/7Cs4OmM6+9fOIu\nz145hGR6vJGkhv0VyGRLNtEvbjHG7ItHGdefXOxzZt7tyPSpDlFvLznM2bOm0XfePM8WD2bBP4yi\nQ/SVmipk5IRBvn25QXxbymOJ0jw7RARm6BESSBZ9kjGUXE4UZUMuc9r130MS75nLFpK65cilbgnj\nrzeW9juxBrn6rn9mXjYz23Mr8v+Nrcifuw5zr/WTkdWKZtEHG7uZB0LmMoFvCKD9LocT2dn9K+6Z\nNov3OeYjQffV3eTZV01j7C7LpA+9HfIwCSGEEEL4QQsmIYQQQgg/XDFJ7t9KiKwYcH127Lu45e6O\nwoU2YxHSWbfPmTBXn8cN+8Walz37tPlkkjOzxPQXPTuZo3Os4V6id+qO8w8zfM6Gy7kX19/pg0hD\ni2qQTM4uwiXaFUCkyAv1uNj/rgCXbmsOUXX787jnvK24eocDiAwzM1uGh9SanycSYFkvyfW6gwmV\nerwHOaXyPHUZdNWnPBvrD2NSO67c4jm4OxMmI3utfBpX/6Fm5MzyFCS2kM/g4l11EnftkWRc95cq\nfCKSMnGxxzskkIuPf8Kz1wchF7b0UL+ZIdznSOH4iJbEh+ifQalEUfTNw80em01ZD7xGWZdcR5RV\nwi6iVwIikKtCa3if6Hm4nCN30U7Bkys8e6VPXbTn4E4/vxBpbKKI3EN9XYzD/b5+Ju796nNIm1OM\nxJVn85E5nnjNJxJsHeO6O5hEtWZm9YFPe3bWNUganz+EO72sjki3wGympc52xuClEhIcphUi1fYX\nU3dNa5BqpvYS9Vc9RJ0e2E8kzrxI+u+khcgzk4u4p5lZw0zmpMNVSCIj55HVhyq+5tnxD1CXU/bT\n/m9cJCnv30xQssOiZfSp0ieRGQZzkRhznR2efeIyMtehIupi01Suj65D1l94jPnL1t/umeePPpSC\njwAAIABJREFUInPWJNAXXl+LlHT7Rc6S6y0livB4E9eX9b0y7n2mpiNwle5jUixMI9ryXBQ/z08k\nCvtCJrJ40TmeMbOXudL5IBFXraX0o5IYtlfMyEK2LoyjP7snKNugT9LSiaLGZWtJKk1jP9nNmC2c\nQv3OLUUam55OkudzK/kefvbxlzx7dzBSa+c25uujyci3biiJLgteY1x25DLnDthXPHvxr3d49s5b\nxkfJpe/g/MCzS5C1y+uXePbeO0nQOeUC0d4Zl9lC0xXncxZkDXNWwG7m3Et3sUUgtJytK8HVzOnJ\nKdynqx0peGCQ9cDbIQ+TEEIIIYQftGASQgghhPCD47qu/6uEEEIIId7FyMMkhBBCCOEHLZiEEEII\nIfygBZMQQgghhB+0YBJCCCGE8IMWTEIIIYQQftCCSQghhBDCD1owCSGEEEL4QQsmIYQQQgg/aMEk\nhBBCCOEHLZiEEEIIIfygBZMQQgghhB+0YBJCCCGE8IMWTEIIIYQQftCCSQghhBDCD1owCSGEEEL4\nQQsmIYQQQgg/aMEkhBBCCOEHLZiEEEIIIfygBZMQQgghhB+0YBJCCCGE8IMWTEIIIYQQftCCSQgh\nhBDCD1owCSGEEEL4QQsmIYQQQgg/aMEkhBBCCOEHLZiEEEIIIfygBZMQQgghhB+0YBJCCCGE8IMW\nTEIIIYQQftCCSQghhBDCD1owCSGEEEL4QQsmIYQQQgg/aMEkhBBCCOEHLZiEEEIIIfygBZMQQggh\nhB+0YBJCCCGE8IMWTEIIIYQQftCCSQghhBDCD1owCSGEEEL4QQsmIYQQQgg/aMEkhBBCCOEHLZiE\nEEIIIfygBZMQQgghhB+0YBJCCCGE8IMWTEIIIYQQftCCSQghhBDCD1owCSGEEEL4QQsmIYQQQgg/\naMEkhBBCCOEHLZiEEEIIIfygBZMQQgghhB+0YBJCCCGE8IMWTEIIIYQQftCCSQghhBDCD1owCSGE\nEEL4QQsmIYQQQgg/aMEkhBBCCOEHLZiEEEIIIfygBZMQQgghhB+0YBJCCCGE8IMWTEIIIYQQftCC\nSQghhBDCD1owCSGEEEL4QQsmIYQQQgg/aMEkhBBCCOEHLZiEEEIIIfygBZMQQgghhB+0YBJCCCGE\n8IMWTEIIIYQQftCCSQghhBDCD1owCSGEEEL4QQsmIYQQQgg/aMEkhBBCCOEHLZiEEEIIIfygBZMQ\nQgghhB+0YBJCCCGE8IMWTEIIIYQQftCCSQghhBDCD1owCSGEEEL4QQsmIYQQQgg/aMEkhBBCCOEH\nLZiEEEIIIfygBZMQQgghhB+0YBJCCCGE8IMWTEIIIYQQftCCSQghhBDCD1owCSGEEEL4QQsmIYQQ\nQgg/aMEkhBBCCOEHLZiEEEIIIfygBZMQQgghhB+0YBJCCCGE8IMWTEIIIYQQftCCSQghhBDCD1ow\nCSGEEEL4QQsmIYQQQgg/aMEkhBBCCOEHLZiEEEIIIfygBZMQQgghhB+0YBJCCCGE8IMWTEIIIYQQ\nftCCSQghhBDCD1owCSGEEEL4QQsmIYQQQgg/aMEkhBBCCOEHLZiEEEIIIfygBZMQQgghhB+0YBJC\nCCGE8IMWTEIIIYQQftCCSQghhBDCD1owCSGEEEL4QQsmIYQQQgg/aMEkhBBCCOEHLZiEEEIIIfyg\nBZMQQgghhB+0YBJCCCGE8IMWTEIIIYQQftCCSQghhBDCD1owCSGEEEL4QQsmIYQQQgg/aMEkhBBC\nCOEHLZiEEEIIIfygBZMQQgghhB+0YBJCCCGE8IMWTEIIIYQQftCCSQghhBDCD1owCSGEEEL4QQsm\nIYQQQgg/aMEkhBBCCOEHLZiEEEIIIfygBZMQQgghhB+0YBJCCCGE8IMWTGbmOM6DjuN89Z0uh/if\n4zjONMdxjjuO0+E4zqfe6fKI3w/Hccodx1n3TpdD/PFwHOcrjuP88m3+/bTjOFf9Mcsk/vg4jjPi\nOE7eO12O/w1B73QBhPgD+byZbXNdd/47XRAhhF/ct/wH1531xyyIeGscxyk3swdc1912BW7/ln3g\n/zryMIk/dbLN7Mzv+gfHcdS//3+M4ziB73QZhHi3MQHjzpmQgrwDvCs/KI7jzHcc5+iYjPOEmYX6\n/NtHHMcpdRyn2XGc5xzHSfX5t2sdxznnOE6b4zg/dBxnh+M4978jLyHMcZytZrbWzH7oOE6n4ziP\nOo7zI8dxtjiO02VmaxzHKXQcZ/tYm51yHOcmn9+PdxznxbF+cNBxnH90HGf3O/ZC7z7mO45zYqxt\nHnccJ8TM7xgccRznE47jlJhZydjPvuM4TsNYO55wHGfG2M9DHMf5puM4lY7j1I31jUnvyJu+y3Ac\n5wuO49SMjcuzjuOsHfunSY7jPDT281OO4yzw+R1Pph2T7550HOeJsWuPOI4z5x15mXcZjuM8bGZZ\nZvbSWN1/bmzc3e84TqWZbXUc52rHcap/6/d82y/AcZy/dRznwti4POw4TvrveNYqx3Gq/lSk2Hfd\ngslxnGAze9bMHjKzeDN70sxuH/u3tWb2NTN7j5mlmlmVmT0x9m+JY9d+wcwSzOy8mS3/Ixdf+OC6\n7noz221mn3BdN9rMBszsTjP7R9d1o8zskJm9aGavmlmSmX3azB51HCd/7BY/MrMuM5tsZveZ2Qft\nT9hd/CfIHWZ2rZnlmtlcM7vv7cagDzeb2WIzm+E4zrVmttrMprquG2Nm7zWzlrHr/tXMpprZnLH/\nTzezL1/JFxKj+wrN7JNmtnBsXF5nZhVj/3yTmT1mZjE2OjZ/+Da32mxmvzKzODN73Myek1fxyuO6\n7gdsdNzdMNZ+vx77p6vMrNBG29Ps7efK/2dm7zOzjWPj8n4z6/G9wHGcjWb2qJnd6rrurol7gyvH\nu27BZGbLzCzIdd1/d1132HXdp83s8Ni/3W1m/+W67gnXdQfN7G/MbJnjOFlmtsnMTruu+7zruiOu\n6/67mTW8I28gfhtfF+/zruseGLPnmVmE67r/6rrukOu6283sJTO7c0yuu83Mvuy6br/rumdtdBEt\n/nh8z3XdBtd122304znffvcYXD42Bn/D11zX7XBdt9/MBs0s0kYXT47ruudd1/3NuPyImf3l2LXd\nZvZ1G11QiyvLsJmFmNksx3GCXNetcl23fOzf9riu+5rruq6Z/dJGF7NvxVHXdZ91XXfYzL5to0rA\nsitacuGL77zqmtlXXNftHRt3/njAzL7ouu4FMzPXdU+5rtvm8+/vNbMf2+iC6uiElfgK825cMKWZ\nWe1v/azSRjtH2phtZmZjk2yrjf5lmmZm1b/1ezVXrpjif4lvG/2uNqu00fZMstGgB982/O1rxZXF\n9w+OHhtd+KTafx+DLTbaZr+hxufft5vZD2zUU9HgOM5/OI4T6ThOkpmFm9lRx3FaHcdpNbNXbNQ7\nLK4gruteNLPPmNnfm1mj4ziP+ciq9T6X9phZ6NvsNfTG49gCq8ZGx7R4Z/iffO8yzazsbf79L8zs\n12N/qP7J8G5cMNXZ+MnXbFSvdW10IZXzmx86jhNhoxNs7djvZf7W72VcsVKK/y2+buJL9t/bLMtG\n27PJzIZsfBv+9rXij4tro22W85sf+IzBmt+6jv9w3R+4rrvIzGaYWYGZfc7Mmm30gzzTdd34sf/F\njskD4grjuu4TruuuttHxZjYqj/5P8caj4ziOjY7VSxNQPOGf3yW3+f6s20b/IDEzbyN4ks+/V5vZ\nlLe59x1mdqvjOJ/+A8v5R+XduGDab2ZDjuP8ueM4QY7j3GZmS8b+7Qkb3UcxZ2xz6NfM7IDrulVm\ntsVGXcybHccJdEZz/iS/I28gfl8OmlmP4zifH2vrNWZ2o5k97rruiJk9bWZ/7zhOmOM4hWb2gXew\nrGKUx+13j8Hf6f1zHGeR4zhLHMcJMrNeM+szs5Exj8RPzey7Y94mcxwnfWzPk7iCOKO50daObeIf\nsNF2GX6ry9/mVgsdx7ll7GP8lzbatgfe5noxcdSb2W9yJTn239upxEa9g5vGxt7f2agM+xt+Zmb/\n6DjOVDMzx3FmO44T53O/S2a23sw+7TjOx67QO0w477oF09i+iNvM7EM26uq/w0Y/nOa67lYz+5KZ\nPWOjXohcM3v/2L/95tpv2Ohfr4VmdsTMfh89V1w53i6vy6CNbjK93kbb7Admdq/ruqVjl/y5mcXa\nqPfwIRvdjKr2/OPwO9ttLO/L7xyDb/F70Ta6MGo1s3IbbedvjP3bF8zsgpkdcByn3cxeN7NpE1R+\n8dZMstH9Yk02+mFMstG9aL8L9y1sM7PnbXTjcJuN7m27dWw/k7jyfN3MvjQmZd9u/92r22lmnzCz\n/7JR72+XjfcCf9tGN4u/7jhOh40uoMJ+8+tj96g2sw1m9gXnTyTa3Bn9Q0z8TxlzEdeY2V2u6+58\np8sj/nAcx/m6mSW7rvuhd7osQrybcRznK2Y2ZSxiS4j/E7zrPEx/CM5oHqaYMangi2M/lov4TxTH\ncQocx5k9Zi+x0ciOZ97ZUgkhhPi/iI5G+Z+x3EZlm2AzKzazm3/PEEvxf5MoM3t8LIKnwcy+4bru\ni+9wmYQQQvwfRJKcEEIIIYQfJMkJIYQQQvjhikly33rPX3iuq+qFfd7PIxPXe3ZH3SnPzm7jmlqf\n055m9cV7dlPMCc+eFBxmvjidXkoIqyxgs/6FPoJi1k/K9+yzLxGlnDtEAtKs9Ku55zKeV7SXQjVl\nnvbs+fEknr3YfMizZ+QSYRndWuDZJZdf8+yBwvFJbnuOkqUguZP3S0x/2LMPlJK3bUnQbZ59ziGz\n/LnMQc/e8pUnJuSgww+9/jWvPZPbSFdU5BR59gd7I/h5LUrlcO+IZ1fV0Z735JDD7uyhcs8esUTP\nTruRdn26iffaENTq2TnTOeT8sS3nPbvtbp618MEW86U1j/uezvykZy85wn0jY2jnkmyendtA2268\n1OzZr8TSX0ZGeIepNtOze5rIm3is9RHPzp+81LMbF1/n2VlP/cKzP/fazyekLb/8sV97bZk4/5j3\n83MXBjy7Pi3Ss2+tjfXsrTFdnj00NNmzky/Q9kmbdox73rm9HBMVP3WqZ88M+ppnH8rM9eyUX2E3\nT+d5kzsrPDtkkBMymlZyTc8O0mq1T7ns2XPraO+etSso99ELnj0y1MGzolPGvcO+ykbPjmhj2qz6\nG97n5v9H7MdTG7jmU5WMly2rmGv+9c7vTEh7bqi63WvPjz892/t52IUcz/56COW5Pp9cgf3n5nm2\nm77Xs+t29Hr2DWGk13kmfciz067p9Oy5JxkTLx8l1VXaXYyJKY38bnkLYzazFtvMbGY9xzmeWE0q\nn8NRzJerY57z7H3NtPOkjlWenVTFHN9eEOXZ0y5iH+g/59lxG1Fbjm75C8/edPFHnn2u4C7Pvtf5\nd8/e+J+/nJC2/Kd1X2KeXb/B+3lDLH2zvXO7Z5+JvN2zV5057Nkno5mvgkM4mzzY5ftR2801c7Jp\ns4HDvEpyOvNm8EiwZ1/oZ37Ij+n2ee7cce/TlM28EFO1xbPnBlGPZ5vpF1MzeLf8w4ybvbGlnh0+\nY7pnx+5lPJ2/Nc+zl/8jv3tmzuc9e2Y+64+oMAIumy+y/vjM97/+lm0pD5MQQgghhB+umIcpYnq0\nZ4fvLPbsxjmslLODebzj80dG0+srPXvPewhaSq3iOKnY0+NXsqdz+Iso7Th/EW2YiRcr6hR/Kffc\nd9Gzt7y6yLNDE5717LWteHxywvAYZCfx19Sek3iebpsR59nPFfOeaWvwnjQ89FeenXGKlbmZWeOi\n5z17wRDeo+bWW/mdKVWeHdD6imdnpuI9i7g4/q/jiWDS9/lrpGsGdZq3Bk9S2U7eP7SJv0YSllDv\niee2evbOMv66HVpE/V7O46+DgL3swS5YRIL2MhwjNnAZj+J7Q/nLJ73IO2HDvrn6lnHvk1BU59m3\nnd3j2Z3xL3h2VDyekQ1dnKbTVknf/v4gz/vYFO/gdSsK5C+inCD641cT8IYsbaGfB7RwTcOBH3i2\ncx1/fU0U9Vn8BT8UgkciJxIvTM6PeG7R1dTJdJw/9sIw6ZGycnd49pO7xidM/+QGxubRU9/z7KTu\nVM/Oq+ea+ug3PTs5nr+yo0Lwwrn5PC+9eqNn9xW0e/aKKsb4zibGR9NWvII3DdKutZHURdC28Un8\nB9bwF2jQLDyAzd/5vmefn85csPY83ooTgfx84fO880SdanfvI2s9e+d+vKoLluDBvHUqnpreF/By\nJs7Fo3osEo9MShh/iZ8IwXOxahbemR/7eNpGGld79g0LefeqS3gYTu8q8ezWj5/07KnP4iEyM/tu\nMGPBuYw3oTINj86cy7ynnWCcvm8W7/P0MspXUIY3e67D3DwSiaJweBgP65oVX/DsoFl8BwbKn/Ts\nV6voR/TAPwz3X5g3L339F57dnXKPZ7+QSZ1eX/Vf/PI11Mnkw/T32Ai+m5NDOY0oLZJ+mdbA3Hp0\nCe/YUs23Z8l05vqNgYyVN5qZr2adHb+kONTBvH5zAKpJd/tPPHuxQ1/b1knbtIXiUUw5h1c0cCae\nzYBA5oRlf8Gcdfmqv/Xsq9du9uySYjxYu8oY4/0ZlPMz9tbIwySEEEII4QctmIQQQggh/HDFJLno\nRuSZrJW43EJ8NlgnNe7w7P7IBzz7qljWcedrcbcGRHH4ccLk8Zt4BxNDPTtlEpuMW1twNS5oYlPj\nxQtIAEvm+mxau4i7OjmQDYQxvTz7Qieby9aXolGcDcBVGrYQ9+7GV3ETn099yrOdU2yaNTOLCcPV\nvz+RMyYLQnFZt6ZSf8OP5Hh2qUudzUkbXzcTQX4C7Zk8Ddfsa68igQxmUe+zRmjzljo2w5+cgos9\nZLZPCqvjlHn6Pur0uTzk0s2Z1INTttizu7KPe3Z9Na7o1wa4T+jwvnHvk9bPdT15uHj7j6zx7A6f\nTcBTohd6duA5hk3SZ5EZzhZx/fFGZKnXuyj3XWuRPZwLuNAjprAZs6UFGSowEjlkosiopj9mliMR\nHoqhLTu+imQ5Uvs+z85pYpN0Tx/Xt9YwJlbHjT8fddchxlQ6v2LPJPPsvs3v8ey1z7zu2Z2TqNPB\nxiOefXH7NZ4dX4krPj4XyevSIPWea7R3/nQ2+hYdp0BnQinPB/4Z172ZWdgrlCOkjHkkN5n5pfQS\n2wQSQtlKUDKTuaClmk3iE0XxUSTTwrS7eVYU7z9yAkk9dRD7WGCPZ58bQCJOvwHpJi+m0LN3vUK9\n3LQGObptNvLU9t0EOfRnPeHZ3R9jDln3IvPVkQXrxr3Pmje2eXbPFKQ0p5pxEVPHfNw8l/fcWk4f\n6Q/nd4sb2eLwy1Vcc0sD16wuIjhn8Qb6/CP/wBaHZXOQ7wvmI+NMFMu/SF/+/lQkrE0P0OfuPcOW\nkAsXkdEH0ujLEfG067ydbOO4eAvjICgJ+e9gO9sXsvYx51ZvoH93HyWo4ekk+kfuXMqwfy8yuJnZ\nsmHmzeIQvl3FUcyhfY3I3TfMoV+UBtOn4iuZX2oucX33LciK6+Yzrg/Wf9Cz632+Xa1z6LNBMdiR\nRwheejvkYRJCCCGE8IMWTEIIIYQQfrhiktypHFx8kYZrLagDd1/x0ns9+/RkXHnrAskJUduJJBH4\nKm65rigiF8zMKvOQGZL6iHxYePEhzy6aigRWM4n8FdN27ffsDV3c95H+lz07O4uogDk/J7fT2Wv5\n3eFGXJMx1bgsf70Jl27sI+zMz0oZn3/E6dzh2YHDuBpfTiDK7Na9PNu5CvlhwMV12lHfZBPNiVXk\nrMgeICdNdzIu5OptuOLzZr3h2cV9pLXIW4U79cI+ypwbRWTMvsTHPPv2i0g1u+fxrI/MJtLrp7uo\n9+DJuFYHqpFdN3eSt8fM7PXZuKOnbae+Su4jwmfk2zzvlffTnnNuIuok+hRu3QPZ9J2RVp73/l4i\n/ZrbPuHZF+9ASknr5ln5p5H5eiOQGyeKgCzGUchLSGyv/xn1ePth3iUsB1d8eyb1cNNJ2m/fQsZs\nbgVueDOzrDJk2N7Z1NfMEaIbz79AzquT4YyRlo5sz449g7R153KurwpFCt4dSD0mTWKsdE1GCp80\nfLNnl+XTj/4+vcKzf3SCfmpmlnEWuWp6JnW2Iw8p+XKMT36jqYTAJVTwjLZhIlsnKkxu0jKiic7F\nUe6Ro8T7HN/A/PoPZdR7GWqFbfoGbVM5i3k3ouDTnp26Hnkn9SBzQnAC8vrLC37p2ct6/9KzAx/+\noWc3Dr3Xs1t3UJ9mZstCabdnf8zcMX85fW93ho8ka0TeJi1BJut/gmdEfBGJ6q5n+ewNR/I+rxlt\nfPR5pLqQ24hyTm856NlFcXxPbrSJoSSXPHIzE9hSUHQMSS6oDLnpujnIqx0PEaX75rXIedeWvOTZ\nP+lmLN51im9mbB7zYWIV35XOF37m2Us28l09c455snUL2yOWvoe528ysfjt1NzkSaf7qDLYm5G/l\nebvX8E0cCWAezPwcawg7wpwQ8ipbLZ44ybyx9g5k/ZrSJZ49NJvxuvh7fGMDXPrB2yEPkxBCCCGE\nH7RgEkIIIYTwwxWT5DpKcJM3rsVlXFGEVLEsi6RxM4uIVDqbjpQyoxl3cOdHkTAm1xO1YmbW+33c\ndItX4MqrSSVh2/FY3LsRk3jG2QyOSnCqkZJu3YoLsfOjJCI8+EGigGIuI0NFTyWbYs2TJNSaHoKr\nPiqa99+ViIvZzCwljXdIf466sanzPbOrHmlhaBCXcOl6yvdAKC73iSKhmvT6Nf3U6bpU6rFkCi76\n3em0eXojEl70PuSvBem4ZWv7eMcZXUgVA8t+7tlpT5Ggr2gV7744EPkgPmCTZ5e9l3Z69F+JDjEz\ny8qi7p9Zh+t70wlc4jHTl3t26sAOz26pxw0eUYXEEnYe+WF4Je0XPMAxAI0NSCMLX0Cq3dVOhFbp\nh5DGsi5xbMtE4TTTTuf/kXF31S9x6fcPUP7kVUhhl1KIBA24jNu/8DRlvuvSeKn5W0ZkUaoh+/RU\n0E8Tk3l2RhBy2ytHkWpTunGhP3yC64NTaYPmDMoxZzYRXSdPMrbCakmMOTOIfretmj419FvzS99U\n3ruvFRk+Ohx5pPNFopS6+nh2YDNyfuhM+vxEUX+KvhMYR8RYzzCRZBsvEun2T7lcHx3B/HXN9YyJ\n2HKiPKOWIgsP7KP9dnQS8RcYxlhbsQvJN+hjSFjpLcwbASEk6b23YMa49znchnxWfpp+mBRHNHPP\nIG2+opo5tTKa9k9cTz8afpx5qj2VPr/dvd6z53UjyywPRdo8fBqZ97UskiBmhk58guCX6pEaN63i\nuCsrvdYz3RHKUxt5wLPrBpC/pp9h28DTsYyhe0L5Rp3OZ/yG9yBfnolmDuxL4dt40ue4q+UoexZw\nFmnv+DN8M83MZq1j7rjYRx8pjCZp8a4Y+unhBqLi3XbGzfoEJP8TkxiLBT7bVbIWcQzPUPNNnp25\nkCSpS3roE4d9joM5Fsuc9Sl7a+RhEkIIIYTwgxZMQgghhBB+uGKSXOMU3Nuzm9mBXzGX3fXBLbiw\nR/Jwj83biQsxajHuutIGEosV+5yObWaWOpXkiKdDcNF2LsaNG1fJ78+o4pyltqEPeHZEHmclzYjD\njb2nEVdewiTepySSCI+L6cgt967CzTzSyDWnAvjdRYZMYGbW1IwM8PQduL7/8xBu1xc3+kSZVeFm\nvsPn3d4IxnWKw/kPYzAGifXGSs6S2+2TEPC6HUQDnVrEu5X1cH3dIC7XaU/g0j95M27dzUMkTbxQ\ngvv1RCx10nCMOgkcJnrjkx1IElu+g9ywOpmkZmZmJ+YRTbO5H5ftsslEhz23GDnQaaGsxyuRP28t\n5FTzSX1Eo+SdIlKsKxlXf8wJ+umOKKTA1TOIDBo6jWRWdYE6tXs+ahPB8alIYQt3IL1MvZbx2OCS\n3K+uhuiTq5+iX+8aQaYtzmL8bikiqs7MLDMeKTwwhSjGoUh+nt5KoseelR/y7NxBH4l0D/U1eA+y\nzdwfID8ExTB+S54mGej8KUjkfdVPe/ahGbzPZ3xkpZ5rmHfMzCL2899vFiAH5o4gNxfei4TS0YVU\nV32aMVKwjvafKLL3ftizh/8S+SzwDJFFbceQ8ldOZlaIfRgZ4/k7faKMplCnyQf4TDQHIrmkhPC7\nC7aQOPilD/OO6e2cspawbJlnXzjN96HjvE8fN7OaLp6x8X4iMR9z0IE2naL9g8OYE6szmF9zH+UM\n0/P5tEFwCs/O/QnJFG9NoP9XvJe5dVIN0XNtZ4me25zzK59ST0yc3PpP0J+KzyBbLSlFRjzv8F6B\nuUhmXQuQPMMr2KYwdznft+BSpOLwQOrzXO+Dnp0XxraGxvnMFUXdbC0J62AszjiJlBvgI9WZmTWm\nIvsl7WJsvu4iTS/JYoxnhRExl+QyV7x8kDVBWz/nggZ9/GHPTj/Ew0+VbfHsm3/EN+ToTOTidT4R\nhtP2/X7JnuVhEkIIIYTwgxZMQgghhBB+uGKS3NXLcJl+tx454++SiXqre/OvPHtqHG65i4txy4VW\nc65QdAI72W+NxDVqZvbYeuSahc0kxxw8tcOztxUijcQfRK5pn44cFNKGfNaFR99KDuFaXJ+DHJSW\ngxt79UnOBosvJBLFqcdV+Is63id3yfh3mHIaaSi0GJf4/UG4u28NwD1cUEK0z3NzKd/m5vEu7okg\neSkyX4fPeW3F4bixw1Z/07PTO3Z4dvhxym9TiTwLvwnJ698O4ir98RLaIK4YqXHBMrLsXbWfM7NK\nphAx880+ZM15d9MeDcW4Zc3M+rfjct8+ExlvKIV+m7IfWbEqG3f3jDm4x79XRl/Iuw55J/gVXP17\nkpEkb1lPX5jSiFxc1E90ZmE1cljSZc60mijSq3iXvnRc0WX7iELc0EmdPHQ15UzvR3rLmc847XHo\nB02Pj/87LPUIkTntX0eqHPo7nt3yPMnlOo7Rbt2XmTtWzvyWZ7e+SLK8+kVExSYMUL+qbp/kAAAg\nAElEQVTZs5FJvnEeSfamYQb2Az0/8ew3s6iXuJ+OT1zZm8d5Z1Pqj3LdFrYGDM9BpnDjGOcReSQu\n7dyP9Gocb/YHcfAWIs5uLUY6nzaZyKeztUSPnc2jb0YEY6/fwRjPuxH55Nf19PHOFN43uTnas3d8\ni+iuhu8ja+ekMGYD44nu6golsXH8EeRSM7PBm+lvD19GupmRgBTue/bkUCKRaxvf5L6HM5DqC6KR\nKmu7kIIXb2De3D9M3+5ORrarqEF6To1lLntjALnxVpsYag9Q/tJh2jJsLTJw83akpOm7kcMmhTJv\npKUTVVdxGgmvPgJZbeQk7ZSRy9mfZ5p8vh8NvG9iL1JucA3bD5L/kjNQu3aM/6YdeZ0+tfkccumS\nKMbjj6uZE1bfxtaXpjMMkODTPtG8y4kG7H6Na3ZvJYFpzl0+ke+pzL9xLu+8a4DklktD/5/9PsjD\nJIQQQgjhBy2YhBBCCCH8cMUkueP7cHt+yOdMmOGdX/DsuOkkNTuyZbVn125EjrrtCDJV5SWSVB1u\nQkYzM7vK50yz3Xm4X1e8iayS18u5cknXkbAuPxzX8qsurv4DbUQILNjI/Q+mI1d0+siNX3sVd9+j\nMUg+ZTtxp35+I/KEveSTnNLMTsbhWm6eQoLHB15GAtgxBxfprBnIGImnacqYDFy2E0XPL3FRF2/m\n/tc+hhu7IeufPPvlFmSPa9fgBg4Px9X/xAne5ZjhGp/ZTmRM0w1IABWDP/LstggixiJH6BerQuhH\nMS8gi2SOcE8zs9cS6T/L2omqnHeJvvcFbmXTi5Au0gvo2x+Oov1tu08Ct1DqJXzXHZ49tOgHnn3u\nJBGD4Ytoe3cyD652kZonioGLlLl/Cm0wu4Gon/LFtN+UA0QtlaUxVpbvRhZZk0tf3rmH9zUz6+yl\n/94yl379cDt9KuZmxnnCeSSEtB7c/UUO5z+GD9LfswKZX8JmIxM+vZ/73LCIMiw7PNezhxP+zLP7\n4nwiYbORsMzMbNa/euauyWwlWBPGWVanipBoVt7C3BGcwVxQ9QxbDyaK5Cbk5lOXkVNezMEOfuAe\nyraHZIIXwoik655EO198DSkleMb7Pbsg+F88e9Jtf+vZ8fUkJ3XzkE8m9zPP7prD2ZzLixjvUWk+\nUpuZRfUzLiyQsZnUjaRT9V7mlNBAn0jKAznY19Iny8p8kh328v5OJVLy9HYiIR9LYNzd3ISM+oMM\nxvWtVb5Rcr+fpOOPuNf4Xl33ZaI2dx5FFl15E9Hb54+xLeO+JKStnU2U3x1C2ps/l3nsdDv9Y3IK\n2yxifeacgWDm3/hWtrQ8FccWivQdlLO5NWfc+wTfQaRb8EcZ72eOMY/cXIiMd3QHUlpQMFJaxw1s\ntahLpb2Lg0jse98CIvC/NeCTfPOST1LcJiTixEwiR5/ejLSJkP3fkYdJCCGEEMIPWjAJIYQQQvjh\niklyi0JIkHW5j+iTLVNxxUaE4mabOoRrsSCec8jmbiIZ5J7DyAQhkbhGzcxKAoisuHo3kVgD9gS/\nMxW3dG8jstrIABFts8/jim6aSWK2xi4id9y+ez17RRzy33dScAEXVuCWLl9HIsK6y0RutB8fLyt2\n3kY0zdU+rsPYQFyK87qQn3pyKZPTTjTJ4X7c2DfYxNB4PXWUfpZIjhafLnQkH1fxonMkPwsKRuro\nCSaqcNVlXL9ZH3zOs8++ggyT/ZJP9FUirvHp0bjkm3wivRKn0A/ePIR0NgO11MzMis9xLtniAOSX\nby6iTq+tInKtIJ/oq+f24k7OMpIgdufhcl65l+uDl+Na/nki581tuA0X9/l6JKnUGpK0hc0Yn0Bx\nIph3+u89u/1OpMCIcqITsx362YLJyHbPZCF/tc4ggvHRy/zttdT53rjnhcZzJtSDuUS65pXirp/W\ncb9nH0ykHImRtEdBP/JXay5juegcUTMnu4i+ee88xlB7ClLr2ZlIT7kRz3p20wXmptPtOePeoXAH\nZ1PNSqW/9R9DBph0FfLW4ROce9nnEoF0Q6pP6O0EERVHRN/gIc4tXLgGCWzrBWT0so2M36wYpKrT\nOxibrfFIFGs7macqQm727IJnuc+RHKKOB6IrPLsy6peevfrxNZ6dnkyU1Us+2yDMzGKD6GP5JUSt\nVvlEHoeWICs3dCC/pGQSIZ0ygCTfUPKAZ3d1Ui9RkZSpPI35Yugs88hQOGPzugzGeFT7l2yiGVqG\n7Ni9l0krYAn13tv5Q8+OmH2fZ+/x+Z6UDyNNpwUyj+3djYSVfIZ+fHqI7+y0bhJmtg4yd5Vd4Gvy\nhXnf9+xdYYyB2LrxZ3bGkY/WiquR3kZm0E5N0fSFpCbG6ScX8y3+Ra9PUspj9M2rppGgs7eH78P8\nM0iGiyqYT57o5vpbo+gTMc5en1K/dYJgeZiEEEIIIfygBZMQQgghhB+umCRXW+9zxs0grsLQTs6B\nKcx93LOjZ7LDv/G/iJo4EoZksHku5+y81oEsYmaW1MpO/ct5uFB7duPim/0r1of9GUQjdCxDnmsJ\n4frsIiSTwaXIJH3VnJW0pRNX4TURyJAtKSQOS67AhVjl4irMvXd8lFxDPFJiTRuS09By3ufUkRR+\nYSFSSfAQ7vThsE/YRJPzc6J+YmfQVtErcHfOLKMejxcTSfRSLtFN7/shkZH7NiP7TKpFtk1Opi1P\nXIOEseg/SHDXcC9SVcZeEgZeGOKeUTHUb9Yg8q+ZWf0aouY2nufsuvOHkQDDU6nT89X0i3syaM9S\nQxrucZB6ahfimk4pRD74+0dxcT8Ti1S7xMENPjQbafZMFlLVRNE9HykhrhwJq2MzruhPVyF/3Teb\ncfCZ/fS5M9Op9wU7OZtvVsL4iMQ9y2nDWcNEyV1kSNnkOPrF5iKkl7YR6qt+IdGD9a8wBnMWkjDU\nHURqbYpEmg56nOjU5Wv2e3ZrCe/whQwkkGfmIs+YmYUWIT/1hyLd9a5Duinsox81tjOW3eXca/Ip\nZLuJoq+f/jjj40QGBkymgqc+6vMLJyjDqanMzcHNSDQ3JvFez/jMm5sWIp/tWYCkumQY6Wz/SfpU\nzFXM30Vzfu3ZrWlsa1hV6TOnmdmpNKK3Ag6xHcH5T/pC4ibKXbsIyffCYeYjKyYasiud5JOvrmXM\nrtnDnDUnmm9KvsM8vTOe787KXupl7ykSH35sgk7t7ExirE0foV3rQ5krUp+jDC/EEMUWF4U0OZhO\nNHJVD/fM9UkEu2wt99kazDaIuCOMrfBExmtqwZc9u9SI0E7uZktDVeb4sd/kI5eunUYfuVDBPD1c\nQNLTZcsYv9/ZyTd6WgTzUWYuz7hUwpw722VcN/XTB7evpX/dd5Sxe3DAJ3ruZZIfG1Xx35CHSQgh\nhBDCD1owCSGEEEL44YpJck0hRE1kBuPqbs3BjXtwINOzr8n7Z8+OriMJWEsObuIAn132Vy/HxWpm\n1noKqa+hCwkrIJPIrefakQ3+/HrkwP+6iCt20iDSQN2N7OTPqiXqoGkWUQfZ2URiHN7KeVMpZ5Cw\nLAPX8Mxuzrtp8ZHqzMxWtyLD7evATXlkEe7nOS4J4kpSkRxm1xBZmJdG1NDbp+H6/QmcxRl2DcNI\nDG0XcLkPniWxZPwaJLlZFUS9HP0zyjnzVWS1xnzuGVVJhGHhJ4n+e+Fb9Jfle5AFi9NJKlp18T89\nOzMNKWh/iY/+Y2b3H8bN/r1GXMqxU0gQNylhlWenn0aKKu1BnmuNx4U8JYEEccdWII20bEUa3Loc\nt/SmVp5beYRkep3B9LulIb7RGxPj9u+bw9h5toGkiivTiZT5VATRU2HPYT+7irZsq8Jl3uMgbaQt\nWDbuecldjNveo7j7CwvxfZ+IInplQRSJEn8Y8aRnXxP0iGfPmE40WEkNbvn4csJyuucRqdnbjuS5\n+yKSz+IYola/1vhdz95Yw/VmZqd7iNab5eLif3kX5W5+L+W4Kpw5IsZFfgipRxKYKHKnI2EOdNM3\nBw/z3OgIxlTCtchNFwaJ+loyx+cstV2Mx1XvYf5ObyCi6WQdEml/DZ+S4GVEU+U7zLM9kdRpcCMR\nyK07xieujHWJmB6OQioZeR/3LU0kMiv7JPVeUs7YiZ7yQd4hgHMSE7uR9qI2shUgLJQ+7PpEwiYd\np4663kDqmn8zMtFEkfUS893uucwPS75HP3v9DhJ7xjYjA6d1IFUlhfD9yT7OHLV3Lu1XdJQ6WRiK\nlHlqmLZ0Cqif84eRY4ejd3p2rvGs9peZ98zM7ruPefNfe3jGdUeJBuz5FFsffnGR+WV1FN/+2qWM\nvxame8vu5nnuZqT5/C3MZU2X+a7+ch4S/CbjezU8Ql95O+RhEkIIIYTwgxZMQgghhBB+uGKSXGoQ\nu9RHjuBau3YGMlLLMC79E80k1FpoRDS1HsLdWrMMuSxsN1FoZmZlqbgF8/uRNw4voxzr0nFd73yQ\nM3UWFCKT7Dn8Ps9uKuX6wOFdnt22JMezP3wEV+ETebzbcAKu4T2lSDVfSuNspdBGEjqamRWvIrIw\nug3ZJMfnOLEzpUQyrRhCrgmYjFuzLWPiI3GGs6mv4PIbPbuqDRfyYCOu65RKZJiMASTF4TZcuX3J\n3Geq4U5tySYi6cRCojS+cqTCs18rJ6nZlNMk5Vu/muiN+sM8qzee+5iZBaYRSdgRQfvXr6CyU1Po\nR6+W0idX4Mm20K3IHj8KoP1vLSLSrzQcqbYmjbFQHYvMW1Ba4dnnpxO9Ul4xPqnfRNCcRX+c24eE\nc6EfN/nMeqL8ipcR6bKyiTHbHoj0G3Uv99zeQ18xMyveSXTj5hm0mzOJPnvS50+3x3JJxLjqIO1R\n5VC+wo1EpDYcIxnmddXIR0PDjAOfo8ssvvfvPHtXOUkAF2chnW8fzhn3Dg9E0c87IpAZVmzk3abv\nR8Y610v0UvUQHeblGUhJ44XL/z353fSjnyYjOeQOsDUhP4rylJ1na0JQP9L0iin02Uf6mIsys4my\nerAcmeja60gKfCp2g2cP7aNf7Kxk/l3u/ptnH6okeeS8DMafmVlsFhGGda1IicFHiBSz25FuekuJ\nVM78MJJ6TjWS7PEoJJqQWsZ+5X8gEddm0E5L1iL/7mtiW8fe2+hfBeU+50hOEAF3fMSzV9a86tkZ\n70dGveyTdDcinraPT6OuJw0gtV9cw9aCeXUkat0xSKLHmtlsj3DfYI6a+wTft9QkvoHHHJ7bFUQ/\nm34vY8jMrOIS204WzEeerV39Dc+u38G4KRxhfqnPRTo9M4hcfHUfg/mlaMoRXsIkMimadUNaBAls\ng6OY04sSuedIHX387ZCHSQghhBDCD1owCSGEEEL44YpJcn2dRDTl5yE9vVzDGm04hUiJmyKImDnv\nE5GUt5pok8pWzuEamow72MwsuZ4IjL3xuE1XnSCipedl3PuzknDXV8VQvs8FczbcrqtwDTed59yZ\n3iMf9+znZnLGz5o6ZJ89TbgvZ80hsVpzFe7q4pO4es3MurJxhU6tJALjl7VEHH4Q77MdWIa8N3iA\naIGgCuTAiUphefI47t6oDKSR4auQ23Izb/Xs6mzcnSXDXD/tOJJB1hoimvZuJSqwIYF6n9L6mmc3\nZVA/zemc7ZWagEv4hcPUVVUecsn8POQGM7Pba496dlcG7+A+gos+bg7y5yoXyanlZ/zupLuJrLmp\nkTJdikTCmx9OX+g4hSTV3UBZ4zKQFVN+wu/G3ktUpdktNhGEFNEfzwRwht/fXiQS8qEVjM3UVsp5\noBw5IH8FEUwNexjvHW3jExHeHcKYiowi6ubXQ9Td+zqQc8PCcY/3z6Uf9UZ8zLOP7yDaNvESctOZ\ndCIjMwcZQ5MSSZ4acR+SXMRhpsDuftomqhJpwMxsewLvMDCMjPnsVSQvfPA4dXBxJpFMGfN+7HMj\n+vZEceACdRdtj3l2WuBnPLup9z88OzXO5/zAC5R5q0sfXFeOjHFkKtLprMlIIC9sR0a/vhrprOZa\nIrFSynjf9gTKFpFAgtiykPF1nZjPWB06kePZh9Yj8y98kvP5Gr7E3Bf8CpJ8cwRtlt7AM/r7SKAZ\nsBCJNCYL+9WXkcCyZjKfBpTxbuVZjJeJorKdBKtVw0SnLr/ImFgQynjMbeZ7cjDnV569Nw1pr+Cl\nI9wnhvopzEcu2x1N21+/GQm+s4c5/SeLkbOueoS6TZmHJD5QyvxgZlbWUsEzjlGmWTOR3ReX0qdy\n8pnLA4v4XgeU0x6X5rI1Z10o0nHRdrao5Lf5nPnnsEUgdA39YGjPDs9uDR0vJb4V8jAJIYQQQvhB\nCyYhhBBCCD9cMUkuaAMJtQbPE1mxLJwEgg3FRCXELEEWu5TNLvihKKKQClq/79nNnbgKzcwu3YRM\ncvePcfe91oU99wESe31zCxLTqu3sqP/Bt7j+/MO4/W+rRybJzifqKW4vLtT94bj9l2Tg3j/cgrv6\ncBWSQdodyBNmZklJuKxbXRJpfTgcl2L1MNEMg08QGZh+jDpuW/cdn7v+uU0E8xbiBm6rIFolpA0X\nbMkKfl5+gmiM6x3kz0mtSHJd5UTrrB+iv9Reph5iq6mv3hdyPDvlbqJkApuIsqlbiPyXWIgUFluL\nBGBm9noi7t4TQ0SExW5CAroujAiiC/248dNuJrqv7ZxP5M553NQZrUgdkcFIYJGTqa+wlB3cP4rn\n1t6CBBJfSr1MFDMmveDZZxKRdQ+eRQpprycqbOYw71I1C6km5Sna8vjtuOXTM2gbM7PLcSRb3XqS\npJSJGbjQu6YzzkdqqaPGMvrdSpeomZ0+kZGlnyMZ6vpqXPpNe5H8WnOYH5Y873M+WQL1G3qKflQ4\nd/zU2HKWuSpwGjLpFx5FSj7UjMQcyfC38oeR3petQs6bKHqnMzf1xKHZHznE2VppK9jy0PkmUUmR\n6ws8O2obUsfuldR13AG2OIQEEt32wI0f9uzSri2ePcnnPL/hPMaZE8X9+5PoR7FpRNiZmdW+TpRs\ndDDfhQ/28Dt1IYztGT97j2cfSKJtAnqRZS74JLRMeT+JZKcGkUxxWzFRrrFriFSddPnbnn0wg/no\ngT6umSh2xLI9YPVRIv4Cl3zPs5091GPsSrYQXHeSvpg0yWccLCLhbdIuto00zSRh6PwjdNiSYPpE\n1jGk+TsSmOtK5hJN3nKATJLBScwbZmbL1pAYNjYA6bz+OAmfj02hzd6M/rxnzwhBYmxcwNiaEYAU\nfO4A6wlnoY9Emk2iz8A9fPdbB/jmDlxDnOrArgr7fZCHSQghhBDCD1owCSGEEEL44YpJcm29JH2s\nPEWCrJEhHxnpRp9kkFUk15o2j8i44fPsgrdFuJjPfROXvJnZVZtx4+7/DC67zE7cyW1VSD2rr8Yd\nNzUROSvoZyQjmxdAJN6JDfz8qufXeHaDy7MGJxHJcCgOiSLAqIvoEc4i6h7g/c3M3HLOLyreidt4\n5lJcnpfaiVRIXUzzVcwhsvDjdUTuTBRh+ylbYBpSR1UbEsDMB3EVT19Je9S2cAbUog246EvrqJfg\nTqJPsiZxz5a7aL/2J4hi6QimTqKa6FMbZyMFxr+A+7UmEJnLzGxSPNJIbzvRIp2Gi75xJUnbBnKQ\nDw9vRSLdFLvGs5/PRC79y0TkvBMndnj2mVuI3OveReTd5Fj6Tv422vjSZ6nHiaJqhL4fWMNYqwsk\n8ugr2Zy/9NWRFz376kykKbeM6KT0i4zH2lDfswzN3DTk5dhB+v/0SiSEve0kap3aQZRdeD+SzGmf\nJpxT7nNm2jGk8/TjtM1zw0TOZvZVePYLCcgq7nYidGagTlr/AG1pZnapCblyZg7vcLmbPt+yBmnp\nQ9VE9WwLoM+XnRh/34mgtQ6p4xYXOaW2Gqk5s4D+1TvVJxL4AnJmchxlDj+LPOUGU6dTlnB9+XHe\nvW0l8ldkJ+O0PZvtCAPnOP8vbAGybeWxL457n/A+tjZMa6St+srpY3FpSEjb5xENOW030ntIPPN6\n3dKfefZVbzKnnAkgEu2qUMZaR+RGz246xTx1+/XI0K+cYZwiTv5hvG8f87izCOm4xecM0tRYyr+/\niuiugQAkwrnp9InOB5/37MO3I0/l70Ri68yh7ZOGkfkiNjPoisuRCOPr+XlyFNtbTqUjfZqZFbUw\njzSVMm7yO5lPMzNzPLtj/4OefWI6fTnuNcrdPQMZbsEyIgAzapHUdzv03z2X2TazroTx17UcOb4h\nlu/12yEPkxBCCCGEH7RgEkIIIYTwwxWT5LoO47qdnkrCsSoft+HRYpI+DjlIIbdV4m79VjTutKQ3\ncFeu3ojLzczszTdw3SYPco5OUy7PC28hiVp0PhEXnY/hlq/JRs7qzSIaat9RZKipObi0e5OQD4Yj\nkIYmV/LcQ5OJ9FkfQJTCq8fGR1l8LI1ohtSbeMb2y7iWp8VTlwmXkaU+/jqRKf8xm+iC6228u/t/\ny4nJlDW2lgiElizc9R9ehwv15Baivro/TF2f2EfbnCvDJRrbT39JTOc+waeIjBy44U7PTuuiHntu\noi9kdiJ/RN1IAraXnxx/7lNyK30peQX1ONlwcbeXImOkFxPd1R6H6/exMCSt5Sdx17+0lnaafQBp\n5Oa9RJ0kFOZ4dvFJ6qL0fvr8Jxwk34mispP3XRVHO01ZiMt88CRyVlw3Esv2HlzsBbcj4fU1EB2z\n7gXe18zszCH68uxP7/bs1i4it5IPU6cZR5FVdv01CSCTo6jrhnLaZtFR2vLoZ5FJeg7nUKbmT3n2\n4Zce9eyi23lWXSju+tQ3Vo97hyNLiQqa3EQd9PIIizCijr4dT2Tc2U1Ihh/5Dv18oqhOoe9caEOK\nyVvF38MXGpBPu4eQyTb7zEcnc4hUtB7u2ZdAFF7zMc42rJuJhNUZQBRiTRPzWNlJpPwFOYzBS2X0\ng+VxSHBmZkdOMW5ro4jQnBuDhNQxA7k8oQ856fhSxmBYEPUeGUU/ejicMjWXcp+NOURuHTiKXB7t\nExU9uxIZctYMpLGJws1Hgr7cgUbc344Mdd5lrCQWUqfz20kk++vniBy0KbTlwLNIe3EdyGo3+kit\nJZ+nf4c9RhvHPc94D72Jflb0is9ZdfuIrjQza0pinGYMI8Gf98lV2tdLO027zPw42InEffHa/+Qd\nztLeEaeooyddvlF/9RjtV3E3ctvW6ZzPF/A03+609Xzf3w55mIQQQggh/KAFkxBCCCGEH7RgEkII\nIYTwwxXbwxTeQ8bl/Plk+k1pRH9siyC0NDMU/fURF702o4z9CVNuv9azq59CMzczy5jPnou6FsIf\n80vRJrORU62hjH1LvZvQ0yNfonw2Hz355/Foub96mENZE9/P4aXll0gF0BdGdud5Kx/27OJWnntd\nBWG2ZmYvBrOHImIfa9noMHTqoWifA2grCHW+eDOicGMYOvBEsbEGHf/XMYSOvqcPTf9HbRyUGzWb\nvQ7DJ9jbkbeH/TP5xvu2/pw9Io1/xX0K7yQlwZFztFOgsR8tOYnsw880fZJnPcv9C+4afyDsxfA3\nKd9B9pskLaQed9aiiff4ZJle1cM+ieQR9lv199N3Vu1hz8iO97HHyonmuTufIjS+wOU9A84yLH86\nif0KC++wCSH4WvYkJe6/27P/JZG0HR9MZJ+HW8e+h83V/+7Ze4593bNb5hCGHXcTe7DMzDpjyF58\nuJMUA5t30Z7FZ0klMZLN+O/7CfvxhtYy/iPPsn/k8O07edaH2PO1cRN7dZ7rIbu5eyshypMD2RfT\ne5g9FnV5hJubmd0RxJg6soA9PQMdtP+CMO6b2UJ/Wf8MezKPzfM9NPwDNhFEpjEeFw1d9Ozyc/Sd\nAJ8Q+9DJ7D8538QenqTqn3r2lE4OOt95hL09hxcwH0fuZ29i5O3sW5s8g/47awHzQ8JD3Gd6Dvtn\nDs9injUzm7me70JWIu1c/RTvc6iFVCCzKmjbpRX0o6CN7Gs9dpS5IPdq2il3N3tnT6ZQL+uS+B5F\nzmAfVV8le94qd9IPJuqU8wCfg64np7FX9LJPOoDwnaROiTrik+ZgOnuC8+bwzbncTNsXFLK3yYn+\npWc/vosx5H6auTv4eb4xaVXsU+ysJd3P7D76XPLM8el+XjzK/rQH8mmDlh723M6pZRy0J/M9DTjG\nN/7M7E979vxWsspf7OZ7+IFY+uDjn+J3M8/SBzNWsZcxOpv2qzpP2om3Qx4mIYQQQgg/aMEkhBBC\nCOGHKybJdffj1nsxBbdZUgtrtDlluOq7upBYEmYT3nxpFpKa82tC0pOHcM+bme2ejntx8xHchUeG\nyNJ6cQruwv4R3Kw3HFji2Scm4Uo/7nJI5RsHkWryFxCa6O4jDD0tGbkpOx/XZ92bhE3mRXCw8Inp\nhDebmd3kcxBkZ+FfeXZpL5lXc4aRMeqvJ2XAS+VIQNkRuKUniuobvuvZi360ybOfmY+cVbCKsOT9\nx0ixkHCMLMBr78Gl/UoR7ud53+P6pEW4SivOIDfEVeFyjVqHDFfzMuW5aQEu5EOZZPpu9AmfNzPL\nLrrHs+scQquHR6jrZZUf8ezLvbx/6SqkgVm9hOOWNtBXH0nwCYfvw/0cMoD0lrgS6Wk4nX7a+4SP\npDFz4tMKZB8kq/a5VEJ337OPg6e3rqTvrw9ECwzr53DMnFjc7bHFjOvyLyHHm5mFPoIkObmNsX0g\nBvf4yNXIe8G5HNg5803a4JTR9xvuZKzkPUEdxeSTzmH7EuSmtTtpm85dyI3HbiANxaw65oGwWcjC\nZmYhB5FbE6YgOU6ejERVUU8G9XSf1CatZcyFizYirUwUSSeRXF6rJ5y6w0ixMCOD7QKXQuib0WcY\njwlTyFd9thOZLOtm3r3eIUXEHJ85/s1TPGuKMZan+9TDr9YwDj64E7m7Yv54rbnjLPNl+3m2Qoxs\nZE/FiuObPbvlI8hz/d1IeO4O2nbfKmTRJUN/69m5t5O2JCAdae+lPYyLOwKRsC85jNmYm8enKpkI\nwjMZ+4cuINkX/hupAQI/yjgdvIx0XHeZjNsNl5iXZxsSZ9Uw38DkOubHqV1Ie6SD9z8AACAASURB\nVNvCuM/HEmnXmtSFnj00Qv+4OJzj2d0V41OKbJiLjDepnTZ43ug75RH8fN4sviGB2T5tVko/Pb+K\n7/vCV0hB8YtUn60AZcirycPMP3WtfB8anqdvZi34/Q6pl4dJCCGEEMIPWjAJIYQQQvjhiklyCeu/\n4dnZL+L62rMIt3Xv60TlBHwI92nCy0SrJKbi3u0Jx81WkjA+Eifm5L2efdqIjgmej4u2I+ZHnt3U\nyuGojz5NRFv1jbgywyqR2EbCyBY6NxaJqWSjzzUvEpWxezdu0LU1uDgv/RlVnlDKs8zMHmvi/SYv\nfNyzu4Zu8mwnEokqbRuH956+gHQxcu1XbKLpaCWaavArHK7qniWCqmdbs2fPCyZqKisNt/rBFuTC\nFXFIslsbkUzaUqijKRXUb/FU6ifmdbLORg0hNxw5QztNSic6s/s4rmUzs3OpRM1l9/lklq6hL8zf\nQDTGyblc//4fIsMGFfIOZ/+JNp//H8hKdoo6ujwZd3pGFtmknRLkigMFuM3ju572KfW9NhF0hSG3\nDT/3jGfHpNB/ryqlvna2Ed0068M5nr3reepkdR4RPeHfQfIzM7swB/f90sv03yemEZmT0ow8ULuY\n55V2MX7n1CLtZ7Tgiq80pIumOUgCG7/DHPHkLKTwFfcizyaXImHN+SgRjM8fIXrOzMxZTpRcjE9k\nXGUAktyaLCSjo1XIWEPX0P+Hg3neRBHUhlSZG8SBrcFRRCVdLEPGCF6AvLMhkrFWN8TY7J7CIajR\np8nInuMwBvesRg5a9m3aJuIeJPXXa9jKkBDF+NiXQJTctHLa28wsfCN1uv1x6jSth36VkuxzqsDP\nkPdisjiMNbmDfvHVEeb7oGIicmOqiMI8G0ibnYmjf9VPpr8E1FNfMafHHzI9ERzvYL5b2Yc8FZbN\nHPVyNW18W9Eaz+7agJSZVkXk2oXAv/fsqYls49h18H7PvnEx36h15RWe3ZecQ9ni+N6UZdJG91yg\nzMuq+QaYmZ06xVzzbFwCz8tj3plaj3Tecph5sHwa/TSqCvlzw7InPXvrppWeffeb+H+KQnwO2V3A\nXBwSiWQ/7xrWJduOjz855K2Qh0kIIYQQwg9aMAkhhBBC+OHKJa58DKkicCPRUNFhuGLd9+LqTN9N\nYsTzgcgWScuJHgopJUIh8zQuZjOz0HIiHLrjcK2GXfJJWHYLhwSGFRGttHU9hw0unImUGPRTpJeR\nRKJyHuonEVbKLnb7OwtxU76njcRZTiPXtJwnCi/dJYLAzGzaVNzXl+OQK2uP42rtb6JMJ6ciVxQs\n/aZn93Yie0wUR8uRicIv0La5UUhj7UO0c1g4bTgQiBv0Qiiuz/gBXPfXRtMVT5TgKp4eijSQMLLW\ns2s3Eu3woW5k3rwBEivWnkPCcqdwvZlZeyWu4x6jHbK7fNrHJ3qyqgk3eF36Xs/eU42sds3PibZz\nY4mkDImifNe9uN2zn1qM3FwVQaTPukTGwqkF48s9EUSO0E7lk5CIgnN594gOpM3VcylPZT/SQPoA\nkWSRsxlDfftwk5uZLc1Z6tk7fsHvZ1b7RDHmU46lLxL11jOLsXYugvFfFoWLfukg8uGscw959tE/\nJxI2+ECiZ/fXkth2RjLzyDeeRT54TzDlNDPbE0NSwNUZtP+On9GfzxXyjCkFSEmzupnntg6MP3B7\nIuiLRkYeKPgHz+46SoLdWdkVnv2rOqSe3Ex+t6OUqN1j6SQTXBnP+MgLpC2bOr7q2THZ9PG2y9RD\neDSybcrrHNR99B6fPvISEZJmZuGHGOfVyUShRo8QyZQygPR24zT6Tr/Luy0LICJz22ns7grm0EPu\nDs/+5xjGb2s6kYdBbWwXKbuMjFNXu8+n1J+1iWBxOPLU0SQidi2e+adtN/Pprjvop2k/pC9e/KBP\nVOdBZMfty/CRXJ/I2HrqEttMbmn+C8+emUFCxxefoh4++lmfRKjJSPm7RnxO1TWz8lhk0YAypNeF\nA0Qad16mfw3HIp+FX2C8X1rANaUljMWhJqTzn9/P4dwFr9KvbRpy7CmfiNWuSmT2y4u45u2Qh0kI\nIYQQwg9aMAkhhBBC+OGKSXJJ9+Die7KXXe3LK5HkNo3gqu7M8UkwebuPu+6nuBnjfHL4BS3CrW5m\nFvIcO/ULy1kHftnBLX/9OVz3l8uQTMJ7p3l22kHOrNn9WZ69vIZIlHuKiUI7PYjbNOJRJL+qpfx8\nbS7yXOsDSB0/++XJce/wsaaXPLsimHdwXNyF80sqPDuhENdyURzu7pV7eJ+JYorP0VptEUQiNZ/h\nnLj8jT/x7JYXcbNHxlIv9RVEDMUuoT0e2sm5enkRyKuPxvHgTQm4Zec/wvt+570venZdGzLl1alE\nscSHU04zs+F6Elfm3lTh2eGvciZh/cdxNYf8E67mztPc6x/+lvfZWkV0Zk4lbVCWQMTKgx+h7tZX\n0f+bEnN4hyeJkLTY8fLWRPCrBOS/W66lnwUUcc5hwq1EQxUVMbbubSPi7cHE/6+9M42Oq7zS9VdV\nkjUPpXlWSdY8y/Moy8YYDBYmMQTTkARC5xL6Nhk6dCfd93I76aS5K6vTIenAJU06gJNAAAdswMaD\nbGzHxrJsWfNkWZZL8zyrVKWSVLr/znNq9SK1VlP+1fv5tZdcdeqc8w3neL/fuz9S9A0nkWTWp7DH\nmFJK9Szhhgu7GxnHNIGsMjtv1eKbqbgezbW0R/4W5KyPxvgNUyZS4sTI32px0osUs11+DfnXoZPR\nhouQVfbHItMn3aItlVIqIxC3YusJjhXxfaSPkWHmrWUHU+tSK3PhRIJu/zEvUZ7FOBr/ECdwjRnn\n00go15O5g7FjP4N06Ktzye03cZ6fmnGwJg7jHPXrZcw6d3Efxhr5TOkyRR8bklgqkHvMqsX+TyD5\nKqVUzRvIOGZ/zmPzJfqRbZn5/7SBIsaWbCStpbtwYg1dYLyvfJPvfqkSqevqINLVhYCntfhAMM+K\n7fFIhInz+5S3CVg+rcVrHCx3aBylMHFpAS7yeCfj0bGFpRJ5um7WVsr9zXwHafLkV1lm0heIlNnq\neEuLO15jmUnuw7Srsxa5d3yZZQYjEzgqlVJqyYBc+s0cxt3Lo9y79FDm2YAR3LZbQnCRV07Rf+PN\njPcO3Z6EpZW4yQc20sY+XRTlnGyl8Orjk/SVjin3OeuzkAyTIAiCIAiCB+SFSRAEQRAEwQOGlZUV\nz5/6L/DVv3lCO/AqJ5LM7D4KUy28RtrMVUTauvQ4abypx9DhYodwFdXa3IuGZWTxfUcR0k3w26Qj\nXdvY4yq9HYnN5xx71nzyz6Qsc04iURidpKUH+zn+dCAFzhJDuM64WIqsGVeRcuy9hgPEp8S9+GZ4\nD66s2gDkirxhZClTFlKPj+3vOa5uXy7VTsGvX7z+E6w/n4OX/vEnWnum3sSlcSGU9vSJJm0abcAl\nlzLIfTxWYtHi5GDuUZ+N9GhMH+4pQz33uquCY67EIUPk/BzHVFoicsNyMJ/pLXKXQizppKMr55Ai\nMuaQUiY+pdBemK5/WU24gAqbSFNH+ODumwmmD9/Q/XSmH9LzvAsHZ+oibbyURWp9oBA33w8rjnql\nLf/5+49rbbkUwb5iC124TOZScZksfoT0VmJBAnBmMSbmYunX3R9TJFIppaJy6ecdMdxTRyif29lF\nOn02BLdSwi4uua32hBYH+OP0m7bS5mFJSLJOXdHLuF6rFl8w6OTD+d9ocbwv13Aqn2MqpVSpSyex\nXkEynL2LdivpJU7ROdEO+dL/v7AKh903fvojr7Tndw//QmvPmgX6y8pRZNW969k/z1qExB9YyTwa\nm8T/nwMNuAfn/OkX1bO0jaUAOSu/AbfW7S7mwZlF+k5mMsUtexzI1NFd7nsPTtI8yh6K3FZykyUM\np7by7FgTjtRz/hxffiCEv388xFzwvQYktveyHtXitjgkXz9/novRkzTT+hXab8GBTP+tn3tnnv31\nT5/VfnimjkKP0ztpm+IB5oczC7RH6hKO3dhupL2PQ5GkgoJKtXhPCO3xsQ/HDHTSXw3hzFHrA2hL\nl4v57VYn/b5nEalOKaU2Of+vFp+10TcTfJhze/zop/cu0486wrm2wo1cT+AK+87ajzJfHzYj30f3\n0cfLDtK/YvqtWvyhnXcLixP33A9e2P6ZbSkZJkEQBEEQBA/IC5MgCIIgCIIH7phLLseXNN2ttaT1\nMlmAr5wG0nKuIBwm8wdw5QyP4HQJXUReq0hwL1zZ1IHbZc5Ber83jOJcDzYhGTUvk77zCSQdmXQE\nqS54B2nc0Urkg56/p4jjvZ/gCrho5XyCCnB61L3CvUjbgqSxbHZ3s8WYcNZ99way2jF/ir8tteCk\nG12Hq2dhmn1+NvRReMxbHKmjiFhJNPc0bANyWGA36fCLGZx/diOSpzPgYy0OddDmO/5Ae7Y/Siq9\n2I80fPYKzormk+wTZy5CPno3lXPzm0AyKjvO+SilVP/XSb8vvEj6PesLuIZuxdNHAgYpzLgjCOdI\n+lacHFfnKIQWPIbcuFhC6tfez/GXO3HBjAYnanFhL/3udpeuOB7d+nPhLCQF3m5CYihx6VLrJtom\nrhi3lT17rxaPbUUijfmjTjZ+gGtUSqlqO/t+lbUjVeXsQBI4q3PJrazXnd+rHCtwBXfqaCqOpsJY\n7m/SGP3l/CgOmv77yLJnOmjLAF1Bw44CXIsFn7hn5VtDuL6FTYztvf6cd08Ux62exE2V04e0cuuA\nvtihdxg/y3y5MU5XcHM/0kXSy/RNn3bk/kHLNzhOA+7PtFKcT5YuxtdcDGPZ/9+R44c2cq+Tohmb\n0wm4lJ2zyII+uiKRCc1PuV2PueYFzi+Q3wjSyXuR3+JZMPO/rBzrBvNghK6g4sMjyOg1a3ZzTCOf\n2TuDO7VVV2x4aBP9POEa/WXYxfl4i5vv4NYyBuEGy+ihD52fo538/JmjelKYc+rzOP/Hg1jKcrjj\nNS2+HfAdjhOO3F1QzVw8NcF325dYTrBwjeUgao5nZua637pdz1ABz7j8Qd2ehzm4f3O6We7Qfur3\nWpxUcYBzNdFOl5t5b0jL55rvjsBh19xFYc3BRfqg1c6zdJ2J+xvcr38+bFefhWSYBEEQBEEQPCAv\nTIIgCIIgCB64Y5LcxGpkqzVXkaEanU9qcZo/6bT023xmKI+Ud1IDOsRkLHuq1Xewkl8ppcIWcFk5\njaT+bP6kBHucpPrDjbhgFh7AMXX9IjJX/Au4YVabWeFf9FvSwaPBpOFzAh7W4k8+/LkWF6aSzl+0\nIhmkVlPsTSmlWrI4v+vppCDVKNfgysadYLGTpp1d4lhDa36mO+oO5Q12+5O+PDvOPdpgQtpqDtUV\nmZtE/rx0EMkzsh9Xim0B6a3uPopVrmujGOQ7ecgku9spSrjtwENafOoUUmauzjxZZqNtzhx0319v\n87vIdVNlFi2+WoWkkRNHgccbd+v2NepgP6nGAaSI2WSO41S0R1Ez199h4foNIUgpCXn02Y4e9o9z\nGjmmtwgc55iZs4y7hmzOs6SLAqu3t9LnrvUgWzz5Iv3vl3mk68tH3IvAZS7g0jEF0P8b+5FoUqKQ\nTJZakMPOrmKcri9DGol34aS80ohsE2FnXlg3xDlVT1DkdfYme+CtWodLsGmB8bcpBoedUkoV9SE1\npJ1AKvjxExSlfaiO7ywNIDcW5Z7X4uV2dwehNwhKOavFtiZkcdcYUurgdvaA27YN+fc3o+9qcYID\nuelqKXt0xS3iBI5wMD6avoYUmt7B/DBwk3tqDKZt+o/hKDU+gizWZ6YoqFJKLQU/qMVROcwjLdFI\n1REJSIahRubjOIVkfLKcMZVRS1+Y8eec9hhY4vDSDI/DoXIkrX0f/B8tDp7h+M/k0l++rrxDZSn9\nurAA2cq1GqfidBtjZc0gbXCrg/ERlcQcMjXBeMpbhRM7beZlLb79AYU6Aw9QJNM1g3N0NAwp0BTO\nUhnzFiS8OjsSv1JKlVTjUIvOxT1ZdZxlOvMmxvi9G3i2uGZxM/Z8j31B9+yir720wLy8fTPzSW4p\nc39sL+3U7cP8UxPGUoGU3e7LCD4LyTAJgiAIgiB4QF6YBEEQBEEQPHDHJLm5Wp2DYDM/E95Cyq4t\nllSZwR/J62QEroydOjkgw0axuh6Xu9vEZ5F0eNQKRc0MC6RxrWtxZpRaSV1PTFMcsWgA19Pl75Ie\njegm3WkZIIU4PY+cN+fzH1p8fzJF3RonOOZWxTHjV7HaXymlJhTSXeco17BpjnRyiK4w3XQ9adeV\nu0n1n3A9ocX/oLxDXCjtUJBI6n7kArLVqnjcZqZQ7qnlArLqWIjO4eEi9XsgZa0WHwqjzcozSK0a\nrpdrce1HyH+Ld+GyyLlOqvj/FVH0ruAQ56+UUkOr0e6+6ESKejfNqsWX80hTb3kfmXNfPHLVz4pJ\nA+eMIzFfCUeqdI4jGcw34VLJNiOTDA3iNgyapq9dL9NtoOglCmpIP/9aV9wxqof+6EhAspxv4xoL\nM5Cd35lGLjFnINsstCHnKKVUuG6vxu7VtJW5hvFYv4CLK3s9fXlPLm69/iucU5+ZcZf2IMc598Z9\nWpzix75Rvhk6V5aL/fxst/g/Y9YiEmlTp3th3B0rzFWTDzHuHlqhb28u4pxe9sdJmX0Xqf/SVu/v\nJZeYi8QWMMUcshLHPb3Rj2vI2cZ84m+lPXxn+e7gMcZjeCLycpudcR34Hu2flsz4sj7JHJV2Aldd\nQCz9qzeD5Q6Dy5yPUkolzXKslkVcr3EN9MNR6xe1eOEW931+D7Jf0gjSW58P7RzeyPm9dxvJP+Z5\n5Mb1E0e02M8Pee7ECkslfun9LTvVY+uQV2Ou8FsfDCFzP1zHD1uLrVo8nqWToa7gwB1OYH6rb6If\nBMcji06X4TDrrUMWG97LUonUOtxmg0sUo+09h6wfOkJhTKWUCgjhWVHYghw2Es+4NsyyrOFkMAVN\n/9rOMzT42+wjWBnK793dSp+tj+Hzq9OZi2de1u0ZF8o8e986pMrGF3XPB9S//4RkmARBEARBEDwg\nL0yCIAiCIAgeuGN7yX37K/dpB85yPqf9fXQtroyxahxDM5mk2QoMpCWrinV5z0Ok0KIeJtWrlFJZ\nOrdSWDcp/f4KUuDd7aR00+b4e2gcbp+bc9yPmhbSxvuLKLhn7SLVHbmRtGlmF+nd/pOkCv1L+O6s\nLwWylueRAJRSaiUMGSA/DinGLwppwWAkpXpmGjeZeZT0qnGUz/z4V894ZY+j5z/cqd2YdivOh/Jx\nftdgRN6priO1+tijpHIPVZ3X4md01/K9cWSuZwOQs1Q57Vrlhxxgf08n4cThqpzw474N6IrYhWfg\nOFFKqa5ovlMWTKrYtxuZ8Mgit25LALLKUDhuzcTjyE+G55ES42+REj/VhJzz8HXu3Z8OII3F6CSg\n+WAko1Qbn3n2l696pS0ff+N1rS2NJtL1uZM6d8wSrrKsM7jcah+2cqB+itoFWJEXl83IKEopNR5C\nuj87ikKJV5ZpzydNpMQ/9UHOulWNPBkRg3TjjGJs5q4wfofspPrDhxibVS5uXdkCn7eHcw5zvsjF\nIfsfc7uGgg+Q24+kspei7zztufoi7rlwM/fmvXDO496nkI++XfiKV9rzwWePae25VVm1vw/HcHjf\nRWSopUWkKsWfVdg+xmN3HeMrLBOpI/Ac1z67mTZIaaYgYLNOhtlixFHaFor8t3yL+7OU7j4PJoww\nB4dHMWf/chJp5b0Axl3HgJXf1u1peNRC277eyLz7t0a+W+7PcomV1TyDfM7Q7zLTcNi+nPGqFm/y\no3jjPz1a4ZW2/MrLT2oXf5+T+9B0m+ddUDDLXWacLHe5K/agFr/RSXHSGBtjfNUI7s24PO7nYIxV\ni6dDkNWWQunfYX3Mb5FHmRMKnz2qxTf+jWegUkqFbOf8mqMYd6UFzPdDp+lHfrnI7vHdSKSXTDzT\n5+z0nTVjHL8/nufsnyaZB76TSn9sDP9Ei2ujGeMl/cy/P3nuZ7KXnCAIgiAIwn8VeWESBEEQBEHw\nwB1zyUWuww02F4YMF/42Rc3MT7KxnMOlW4HfjUuuNJwUdtEjuOR+d400oFJKOfexH9GSL2ng3Ivn\ntTiol8t1ZrBa3k9nuAvQufJ2zJCWnc9BJiqfJkU7YGY1/lQS6b7OLCSgnQ+Rcr54jJTlPYs66Ukp\nNW3g/JpGSMHWL+PSSJsnpdpi/YIW754iBZla4O7w8QaLncinzikcfG2JyE2ZLtKpKVEUzbtwlRR4\ncQ7X8lYTRQa/U4IccqqH+xBzA3fEqJP0/N33I9XNtCEFJk1QWHGq4G4tjhzmvimllCG7Q4uNSUgO\nfTXIUj9IxtXTU4ZMFPAf9NXYCgqnjb2CjPFaOtf8pWAK6F19mu+m9FKAzZKIlFg1yP9jYuNIfXuL\nDBv7sE25cL31uhgTY1E4Qd98HKfMM020X08o40yNUag2QVGQVCmlxlPZb7HDF6nWcYGx9nsf0vWW\nMgZkQinp9yw78kmtnXuUUYyE0LNEuzpC6S9DgciEg63YYFbsyBILm5hr2o/rtCqlVPwKEmv8HH3p\ndCYSVc4AfSHqGmMwcQv3yXlUV7iSae5z4YikeKY5jXG3dIM5qLX0tBY/V8tY/tEibZWyzFII/wn6\nQkgb89fEaq5xfJYLMAfiHKyY4reubqCoobORsZUbibs2ORlno1JKXXFSiHZkmOdIUTjy9OttyC/G\nUiTyogbabcvC17R4LIg9LOMXKJo4Gn+vFjcbuc5tDyGHfdCLVDcx+Fda3D3L3K8e9c5Gj1O6gre9\nifR9Wy73PfdMuRZ3+iO9VQa9ocVL48x9s6UUunyknP7Rb+Y+OKr5rfgg9lJra2FpiaWT+3DhIGNl\n4ShS6/BT7gVf+2uQ9MrS6Kez79NfUjZz3xcHkU6vJfF88J/iGmJDWAZiS2CcRU4zz95jQYYL60bC\nXNzMuT53EifodBDX8+eQDJMgCIIgCIIH5IVJEARBEATBA3dMkmsYp9DWrhNIDx25pPjm3mL1flAK\nafWkCdwNJ43IJbE3SX8/sIwjQCmlLvb9UYsDx9izxjGFrNKUhlvlkQhS0e/6U3Tv/nOkPq3xuj2q\ndMX4/HtJsTcVkpZd208q05CMo8AwiMNqeeN5LR76ucXtGooNOE3qQ5C3vrpA6vBizOtaXLZMirv1\nf3DN0xfdJRFvYE5Chgu2c/33x/HO/Wk7qfS2dNyNBSNII/OJSI07q3+lxafyaJvCS7h15ox8tzwy\nVYtPO3Sp2AnStUFZX+KkW5Ce1s1RPFIppXpPkZp1LpAe9vmfXFv3Ce7vmy1IqXnrkQO6ZykW51tA\nWnvbbVLCi/kcc9cfSDP/7AAyX0Ytrr1CC79V1UmaWXdln4uckziJPvoy/TelibGyuh1pa6+5RIsN\nHUhvt8NJv29/4SMtnj+Bw04ppcYaccHsKKGfbstiXnB14Pg8Z+T6I9uRzKoL6Gs7HUi4bx/GRWpJ\n43qWLcg599qQAoONL2hxfgeS5PlZnDvJsfyuUkq9ZaJQX+EHVi3+0R7a+WIC0lDjBuT8DQ6dtHDz\njPI2uXPMU4evsORhxxgygy2TPcTOLjK+hrZYtbighvYPDGVM9OexH+VUC0shEuxVWhyiK+zbMI9s\na89Fas7VLTN4zQ/H42470pBSSiWm8Fiymvi3L84zhis3/LUWR04yvxriudfjdRSxtevmo2gLc6tp\njudRdCXLC6J38hxYHmKcrvRyX9akubtBvcH6BiQ5VzZO7ooWlm+cjUaGG2tirm9tRYLdHcFnum8h\nZf+xW/dsWKJ4c2woywkudSPHRyYy76/o5MLgRSS/+bv5rRErRVSVUmrDDtr86usUxCze874WD73/\nYy02Z7zBceeQYIPtOOaspvMcx8KzO2aI/UWbyulDz/tQYPUr735Xiy9Hs3zDkujuov4sJMMkCIIg\nCILgAXlhEgRBEARB8MAdk+RSakgTR36Z9On4KKn0xWrcEU2ByG2Wu0jXPmYjnfhpmkWLE8ZIfyul\nlCGZlfApU6TTfztNys7HiOOoyp93xQ1jOFfaQpEiQpaQBg4FkJb9m6QiLc5eRi6zX2S/qpFtpB8d\n3ch5f6ErXLgU716wracVWWam+J+0uM6BvBdWx7lmFiD1lLyJvFFpJlbqW8ob9E0je6U6OM9DTZzb\nvI3rWTeGGyyoHGmg920cRuNmUuDF55ESLjmQUgJiKRjov8L1ZjXS3mFm5JzhNmTehG2kWdtc7mn/\nqPNIcueSSH2HHSdl3WhEeisKIUU/0EN633SLc02OIcUdWsz191aR1j6ynpT+ul4cK03JuBy3WnVy\nTiHpZG/xzoO0x7d+Qn9q/TJSuNMf6e2SD26oqLV8N+kG+0K2HEdSTQjgWpRSKiqUdPrUPL/RWUcq\nP9AXWWX76Qe12FGMMyc37iUt7i14nPOo5xp6ApkviuNw3LQ14m4KWNDtYXk/0oB1nN/KGscJq5RS\nuw7iGsvvRnrt6sGpOhV6TIudafTnyVBkopA09+N6g4QqrjNlL+6uq+FIT8kd9NN3E05o8fb+Z7TY\nz4W7zRWClBLSitw6vIs5NPgw9f0sQcwPLWVIpPYlHjGVPozfX5cwNs8Zue9KKdU9forfCPtLjuvS\nuUq7H9DizYFISKMxSIDR44zf93bzHAis36vFEdtpy+Ji2tJ5Bbkm1A8JcyXNosUuXYFZb+Ffwfks\nD3D+l4eQflcHMN8NbWD+3Ri2T4u79nG9hpcYp8nJPLuSwpDKTwwgd5o6ka+3RuFGHsnVOd4+oY2u\nP8Pymyw7SxSUUsrYyTy7aS/HrT1t0eLcg7jLZ4bpv3kmjrukk3yD+lkiMGfczDU4Wb6QfII5enfv\niBY7fvAK8Tu47ZqH2ONTPe2+3EePZJgEQRAEQRA8IC9MgiAIgiAIHrhjkpzJiLTx9sgRLQ7XFYAM\nLEfCeMofyavZSMrtyjwFx4rSyrXYbkMWUkqp/mokvVf7SQ8/+A1SmVNW5ENwFwAACPFJREFU0sw+\ns5yfdRw3VGARqf4mB1LCg1dJax6PobCe+QyF0oo3UbxsTwcp0epe3EQ+Gazkjwtyl1ua1pLy3H3+\nX7S4ay2p6NllUo3TSchM9dW4DG0+7kUavUHpPPLk9SHSwEWJ3MeuCRwn4Xm4ldIU55mYvF+LWwZx\n1VUvcvyKYlKrc2OkZaezuY/3TJFm7Z/hd9tsOOlcfyBVX53ovmfi3nzOKSMJGadnDPlp0wRSQdUl\npKjkLZx31Q2kmtQSXFa26/Spzem6e1TH9YxHICuWbiUVfzuCsTChu6eUG/x8lF5grJzbimstahpJ\nprEbp1t8MQUm+2I555h6zihhFnnRmYZkoJRS8b44XY90IX/u3Eu63uc20nbXfoqVJr7Kub7f9IgW\nF+Qy7sZTkDmf7GZKe3MemW80gc9kLiAZRl1BYlqVjDw16KT9lFJqopbvRyfSzsNLuOeiYrln94wy\nZt8aYzzGVuJGUt9WXiEqjf77sm4XrJ2K/f3yNyN/+toowum4QQHJ2XDaI0y3DGC2geUVKdPI7nVR\n/JhJVwTx8gzy9erSX2hxdh1FIn9Vyf105tH2SikV488eX8tO9murGfgB19PCfPzHCeTgpOc/0OK5\n/Yzl+EpkS1MkEukVG8+g7TPntDhgK3P5Fd1YjrEj+Y9+jPSovqe8wqp+JLBrv8MlVpypk5VSmMti\nA5CXOzp4Zlz+abkWr9/M/W1+l2fX6A5kTWMEjsLFWOTSqWmeSbbbjOv41TjmjL9hKcpksvvYX45l\nXndGWrQ4dBdFUgMv4IRWSzw3zC7OuyabOXRrxYuc0+H/rcWbonm+B5uZNwoWkWknh5h/j1fgov7m\nUe7Fn0MyTIIgCIIgCB6QFyZBEARBEAQP3DFJLr4MZ5CrmxToESNp0t3tpBbnI0gVJvvyHufrIN06\nc/qQFk/sdy80deBj3CeXi0i5jz+HnDCQSnoxv4Cia6ZiZJzmTtKsXwpkBX5HGrfq6/O4+F60IGnE\nTf1Wi48pCnvds5Xjz2bgLjC1IBkppdTOcApXjvojOQR3UjjONkSq8UY1koOfD4Uc14RTQNNb2No4\nnx25pI2DL/L3/lhKK54+96YWF67bo8W9c6Rc8wqRd55uIKV/cx3H97tMP1plRP66EYxEdq2FlOu8\nBZmgYoV0uyPH3YkzYWDvspAaZBV/J1LEb+6j7+RfZU+sJCtSanEIcoW9ifT4WhP3ZUjh4updQxvf\nFUBbnmnj75sncWqqCfcCit7A8EOko4jnSZnbCkhXr3MiKVWtQg5ImGD8+jkp3LeqGaktoJljKqVU\nUvZ5Lf7XTMbp2SZcXH4O9qubVvSXl+JwmJaWMBdY2rhHvnOk9B3RzCm5kUhMrnrdXoB+3OsTa45z\nfDsp+v5FHERKKeW6yXwz4sDttbwRmdiRjITf9Qn9cM8VnYO1AvnMWwyjeKrVQciqo8XISn0OHF32\ni4zNuaQDWjzWgxNp5xQSSPA+pJGJm3/S4vtC+UxvD31/dzzzWtcHyHCtRmSx1O20a+YIY1EppeyL\nPAuqdO6tjdnIiktlyJxbT1Fwc2ySJQtNdtowJpG/Bw8wfu2jXPO18zwfdgYxJ6RuQGKcCUJ69o3F\nSeotbgdyziGl3LsQv8NanDXFMpO+ViTSrxTh5lt9hXGwqgHZNW8/89KtbvplWRfS/2Qvz9+PM1ge\nEZ/DUpk3W5A+zWu5P0mN7s+0+u08Zxf+xPwdZWFfWN9mluyk78Ah297BfJS4wti6eeyLWnw1j3kg\nauhpLS610VfeGmUJRf48c07EUZ6fvZksoflzSIZJEARBEATBA/LCJAiCIAiC4IE7JsnVByJVfTmJ\nIn79JlJfFaUUsXy1iXR72TTpVnss8tqRmSe0+Lm/0q2sV0r97l8pxvbMNVKl/1JOunZnDNLCaBMy\n4VIBq+t9HbhYbOtJ47Yf5/PBMUhJqXbko9ha3Hn33MueOsZBzm1AIecFLSITKKVUW0C5FleswR34\nYT8OKuVPavKH6ewJdsj3Gp8xIAF5i+503A/Tw6TfW+aRG0sXKCxoXouMuDJGKj7L16LFPlM4Xc5n\n6/b8ayAlvz0BabNmFWnyfSbe9fP3UZTv8DLHPNtCWtYy7+6CaAhEGnTFISHtO0sx1fbLyHhGI+nu\n2yGk9MdrdfKknTa7EEtq3ddJ8VBXI3txOdfg9Nyo2zuwb4GCdfYc+qy3sP0dEohzPfdhZMyixeZb\n3N9NaYzHxlpS9JaNjNnyfBxZ369DAlBKqfUhSDptryC9+m+hjwxm4LSZf4vP5Ech3fxlPX8/lYqE\nEBJH25wORYaLOs1YHo6lSF1KJm0QM8keaz0x9KONqexNqZRSXZ8i19VGcD2P3mAuaO6maJ95mXHe\n9xRuwpgepGRvEWBgbMbZkUd8riKTvt3E39OT6e97BpHVLofS31OSmO9e7UfCOzhCm18P5b4PxyBN\nttbQl0vScCH2Z9HfY+sYQ/9m0rnNlFIHQ5Ett8Ux90Xc5tnRMoAs9ebiSS3efILlCBm7GJvWZRxn\n4boim9fW64rW2mi/06txHS8HIsHe+/uvabHvQZ4b3sLvKr9lMyNnOYeQDt/w5/6mhzPPNHzC/d2x\nj/FxKRz5rO/3LJsYUFZ+1xfnZ0Q5RaH3zfMM7KzlfGKTePbEtPN8X9rB+FNKqdiRSi0uy6Q9XNM4\nYavi+e3UfvrFcRfz1FOBSIx2F21vieJ5uMrwI64hgT0ir+VQtPauFpb15GXSxqmd7u8Tn4VkmARB\nEARBEDwgL0yCIAiCIAgeMKysrHj+lCAIgiAIwn9jJMMkCIIgCILgAXlhEgRBEARB8IC8MAmCIAiC\nIHhAXpgEQRAEQRA8IC9MgiAIgiAIHpAXJkEQBEEQBA/IC5MgCIIgCIIH5IVJEARBEATBA/LCJAiC\nIAiC4AF5YRIEQRAEQfCAvDAJgiAIgiB4QF6YBEEQBEEQPCAvTIIgCIIgCB6QFyZBEARBEAQPyAuT\nIAiCIAiCB+SFSRAEQRAEwQPywiQIgiAIguABeWESBEEQBEHwgLwwCYIgCIIgeEBemARBEARBEDzw\n/wFfIV1oj0I8LAAAAABJRU5ErkJggg==\n", 2369 | "text": [ 2370 | "" 2371 | ] 2372 | } 2373 | ], 2374 | "prompt_number": 16 2375 | }, 2376 | { 2377 | "cell_type": "code", 2378 | "collapsed": false, 2379 | "input": [], 2380 | "language": "python", 2381 | "metadata": {}, 2382 | "outputs": [] 2383 | } 2384 | ], 2385 | "metadata": {} 2386 | } 2387 | ] 2388 | } --------------------------------------------------------------------------------