├── README.md ├── accept_reject └── accept_reject.R ├── clustering ├── kmedoids.py ├── spectral.py ├── em.borked.py ├── em.shivaram.py ├── em.py └── clustering.py ├── stochastic_gradient_ascent └── classify.R ├── least_mean_squares └── lms.R ├── gibbs └── toroidal_ising_mean.py ├── naive_mean_field └── toroidal_ising_mean.py ├── sum_product └── sumproduct.py ├── iterative_proportional_fitting └── ipf.py ├── EM_HMM └── hmm.py └── notebook.ipynb /README.md: -------------------------------------------------------------------------------- 1 | machine_learning 2 | =================== 3 | 4 | Repository of various statistics and machine learning algorithms I've implemented. Not intended for general use, but hopefully useful to other students of these algorithms. -------------------------------------------------------------------------------- /accept_reject/accept_reject.R: -------------------------------------------------------------------------------- 1 | # (b) Implement an accept-reject sampler as specified. 2 | 3 | sampler <- function() { 4 | ret <- list() 5 | draws <- 0 6 | sampled <- FALSE 7 | while (!sampled) { 8 | u <- runif(n=1) 9 | # The distribution we are sampling from is 6x(1-x). 10 | # Setting M = 3 means when u = 1/2 we have a 1/2 chance of accepting. 11 | x <- (6*u*(1-u)) / 3 12 | v <- runif(n=1) 13 | draws <- draws + 2 14 | if (v < x) { sampled <- TRUE } 15 | } 16 | ret[[1]] <- u # the sample 17 | ret[[2]] <- draws # amount uniform sampler was called 18 | return(ret) 19 | } 20 | 21 | n <- 10000 # number of samples 22 | samples <- vector(mode="numeric", length=n) 23 | actual.draws <- 0 # number of times uniform sampler is called 24 | 25 | # Do the sampling. 26 | for (i in seq(1,n)) { 27 | r <- sampler() 28 | samples[i] <- r[[1]] 29 | actual.draws <- actual.draws + r[[2]] 30 | } 31 | 32 | # Plot the sample histogram. 33 | pdf("q3.pdf") 34 | hist(samples) 35 | dev.off() 36 | 37 | # (c) Count the actual number of times per sample the uniform sampler was called. 38 | # > actual.draws / n 39 | # [1] 5.9328 40 | -------------------------------------------------------------------------------- /clustering/kmedoids.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import numpy as np 4 | import random 5 | 6 | def cluster(distances, k=3): 7 | 8 | m = distances.shape[0] # number of points 9 | 10 | # Pick k random medoids. 11 | curr_medoids = np.array([-1]*k) 12 | while not len(np.unique(curr_medoids)) == k: 13 | curr_medoids = np.array([random.randint(0, m - 1) for _ in range(k)]) 14 | old_medoids = np.array([-1]*k) # Doesn't matter what we initialize these to. 15 | new_medoids = np.array([-1]*k) 16 | 17 | # Until the medoids stop updating, do the following: 18 | while not ((old_medoids == curr_medoids).all()): 19 | # Assign each point to cluster with closest medoid. 20 | clusters = assign_points_to_clusters(curr_medoids, distances) 21 | 22 | # Update cluster medoids to be lowest cost point. 23 | for curr_medoid in curr_medoids: 24 | cluster = np.where(clusters == curr_medoid)[0] 25 | new_medoids[curr_medoids == curr_medoid] = compute_new_medoid(cluster, distances) 26 | 27 | old_medoids[:] = curr_medoids[:] 28 | curr_medoids[:] = new_medoids[:] 29 | 30 | return clusters, curr_medoids 31 | 32 | def assign_points_to_clusters(medoids, distances): 33 | distances_to_medoids = distances[:,medoids] 34 | clusters = medoids[np.argmin(distances_to_medoids, axis=1)] 35 | clusters[medoids] = medoids 36 | return clusters 37 | 38 | def compute_new_medoid(cluster, distances): 39 | mask = np.ones(distances.shape) 40 | mask[np.ix_(cluster,cluster)] = 0. 41 | cluster_distances = np.ma.masked_array(data=distances, mask=mask, fill_value=10e9) 42 | costs = cluster_distances.sum(axis=1) 43 | return costs.argmin(axis=0, fill_value=10e9) 44 | -------------------------------------------------------------------------------- /clustering/spectral.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import clustering 4 | import kmedoids 5 | import numpy as np 6 | import scipy 7 | import scipy.linalg as la 8 | from scipy.sparse.linalg import eigsh 9 | from scipy.sparse.linalg import svds 10 | 11 | from scipy.cluster.vq import whiten, kmeans 12 | 13 | import matplotlib.pyplot as plt 14 | 15 | def cluster(distances, k=4, sigma=1.): 16 | np.seterr(all='print') 17 | L = compute_special_L(distances, sigma) 18 | X = compute_special_eigenmatrix(L, k) 19 | Y = renormalize(X) 20 | return cluster_rows_in_k(Y, k) 21 | 22 | def compute_special_L(distances, sigma): 23 | affinity = compute_affinity(distances, sigma) 24 | s = affinity.sum(axis=1) + 1e-10 25 | 26 | # While seemingly sensible, the exponentiation of a large negative number 27 | # and underflow results in a singular matrix. So instead we rely on the 28 | # fact that D is diagonal to invert it. We do not do: 29 | # Dinvsqrt = scipy.real(la.sqrtm(la.inv(D))) 30 | # Also note that D = np.diagflat(s) but we don't need this, we compute 31 | # D^(-1/2) directly below. 32 | 33 | Dinvsqrt = np.diagflat(np.sqrt(1./s)) 34 | 35 | # If you don't cast those to matrices first, and the matrices are stored 36 | # as ndarrays, it will multiply pairwise. 37 | return np.mat(Dinvsqrt) * np.mat(affinity) * np.mat(Dinvsqrt) 38 | 39 | def compute_affinity(distances, sigma): 40 | affinity = np.exp(-1.*(np.multiply(distances, distances)) / (2.*(sigma**2.))) 41 | np.fill_diagonal(affinity, 0.) 42 | return affinity 43 | 44 | def compute_special_eigenmatrix(L, k): 45 | return eigsh(scipy.real(L), k=k)[1] # Freakin' magic ... 46 | 47 | def renormalize(X): 48 | Xnorm = (np.sqrt(X**2)).sum(axis=1) 49 | for row in range(X.shape[0]): # TODO(salspaugh): Change this implementation -- looping is inefficient. 50 | X[row,:] = X[row,:] / Xnorm[row] 51 | return X 52 | 53 | def cluster_rows_in_k(kdata, k): 54 | distances = compute_distances_in_k(kdata) 55 | return kmedoids.cluster(distances, k=k) 56 | 57 | def l2norm(a, b): 58 | return np.linalg.norm(a - b) 59 | 60 | def compute_distances_in_k(data): 61 | return clustering.build_distance_matrix(data, l2norm) 62 | -------------------------------------------------------------------------------- /stochastic_gradient_ascent/classify.R: -------------------------------------------------------------------------------- 1 | cdata <- read.table("classification2d.dat", header=FALSE) 2 | names(cdata) <- c("X1", "X2", "class") 3 | 4 | % for plotting x's and o's 5 | ones <- subset(cdata, class == 1) 6 | zeros <- subset(cdata, class == 0) 7 | 8 | X <- as.matrix(cbind(cdata$X1, cdata$X2)) 9 | cov.X <- cov(X) 10 | eigen.cov.X <- eigen(cov.X) 11 | max.lambda <- max(eigen.cov.X$values) 12 | rho <- 1/max.lambda 13 | rho <- rho/4 14 | 15 | % the logistic step 16 | step <- function(theta.t) { 17 | i <- sample(1:dim(X)[1], 1) 18 | x.i <- as.matrix(X[i,]) 19 | y.i <- as.matrix(Y[i,]) 20 | theta.T.x <- t(theta.t) %*% x.i 21 | y.err <- y.i - (1 / (1 + exp(-1*theta.T.x))) 22 | s <- rho * (y.err[1,1] * x.i) 23 | return(s) 24 | } 25 | 26 | % LMS iters times 27 | build.lms.path <- function(iters) { 28 | theta.path.1 <- vector(mode="numeric", length=iters) 29 | theta.path.2 <- vector(mode="numeric", length=iters) 30 | theta.curr <- as.matrix(c(theta.path.1[1], theta.path.2[1])) 31 | for (i in 2:iters) { 32 | s <- step(theta.curr) 33 | theta.next <- theta.curr + s 34 | theta.path.1[i] <- theta.next[1] 35 | theta.path.2[i] <- theta.next[2] 36 | theta.curr <- theta.next 37 | } 38 | return(rbind(theta.path.1, theta.path.2)) 39 | } 40 | 41 | lms.path <- build.lms.path(100) 42 | 43 | % for plotting the line = .5 44 | theta.bar <- lms.path[,100] 45 | slope <- -1* theta.bar[1] / theta.bar[2] 46 | intercept <- .5 / theta.bar[2] 47 | 48 | % for solving the linear regression and plotting the line = .5 49 | Y <- as.matrix(cdata$class) 50 | Xt.X <- t(X) %*% X 51 | Xt.Y <- t(X) %*% Y 52 | Xt.X.inv <- solve(Xt.X) 53 | theta.star <- Xt.X.inv %*% Xt.Y 54 | slope.lin <- -1 * theta.star[1,1] / theta.star[2,1] 55 | intercept.lin <- .5 / theta.star[2,1] 56 | 57 | % plot the points and both lines 58 | plot.classification <- function() { 59 | pdf("ps2_2.pdf") 60 | plot(ones$X1, ones$X2, typ="p", pch="x") 61 | points(zeros$X1, zeros$X2, pch="o") 62 | abline(intercept, slope) 63 | abline(intercept.lin, slope.lin, lty=2) 64 | dev.off() 65 | } 66 | 67 | % try it on the new data 68 | tdata <- read.table("testing.dat", header=FALSE) 69 | names(tdata) <- c("X1", "X2", "class") 70 | new.zeros <- subset(tdata, class == 0) 71 | new.ones <- subset(tdata, class == 1) 72 | plot.new.classification <- function() { 73 | pdf("ps2_2d.pdf") 74 | plot(new.ones$X1, new.ones$X2, typ="p", pch="x", xlim=c(-4,4), ylim=c(-4, 4)) 75 | points(new.zeros$X1, new.zeros$X2, pch="o") 76 | abline(intercept, slope) 77 | abline(intercept.lin, slope.lin, lty=2) 78 | dev.off() 79 | } 80 | 81 | plot.new.classification() 82 | -------------------------------------------------------------------------------- /least_mean_squares/lms.R: -------------------------------------------------------------------------------- 1 | data <- read.table("lms.dat", header=FALSE) 2 | X <- as.matrix(cbind(data$V1, data$V2)) 3 | Y <- as.matrix(data$V3) 4 | Xt.X <- t(X) %*% X 5 | Xt.Y <- t(X) %*% Y 6 | Xt.X.inv <- solve(Xt.X) 7 | theta.star <- Xt.X.inv %*% Xt.Y 8 | print(theta.star) 9 | [,1] 10 | [1,] 1.0394654 11 | [2,] -0.9764485 12 | 13 | data <- read.table("lms.dat", header=FALSE) 14 | 15 | X <- as.matrix(cbind(data$V1, data$V2)) 16 | Y <- as.matrix(data$V3) 17 | 18 | % compute optimal theta 19 | Xt.X <- t(X) %*% X 20 | Xt.X.inv <- solve(t(X) %*% X) 21 | Xt.Y <- t(X) %*% Y 22 | theta.star <- Xt.X.inv %*% Xt.Y 23 | 24 | print(theta.star) 25 | [,1] 26 | [1,] 1.0394654 27 | [2,] -0.9764485 28 | 29 | % covariance matrix of X 30 | cov.X <- cov(X) 31 | eigen.cov.X <- eigen(cov.X) 32 | 33 | print(eigen.cov.X) 34 | $values 35 | [1] 2.624552 1.026767 36 | $vectors 37 | [,1] [,2] 38 | [1,] -0.8530643 -0.5218059 39 | [2,] 0.5218059 -0.8530 40 | 41 | % parameter space coordinates 42 | t.x <- seq(theta.star[1]-.5, theta.star[1]+.5, length=100) 43 | t.y <- seq(theta.star[2]-.5, theta.star[2]+.5, length=100) 44 | 45 | % the loss at point (i, j) in parameter space 46 | loss.at <- function(i, j) { 47 | theta.hat <- rbind(t.x[i], t.y[j]) 48 | Y.hat <- X %*% theta.hat 49 | Y.err <- Y - Y.hat 50 | loss <- t(Y.err) %*% Y.err 51 | return(loss) 52 | } 53 | 54 | % computes loss over all parameter space coordinates from above 55 | compute.loss <- function() { 56 | Z <- matrix(nrow=100, ncol=100) 57 | for (i in seq(1,100)) { 58 | for(j in seq(1,100)) { 59 | Z[i,j] <- loss.at(i,j) 60 | } 61 | } 62 | return(Z) 63 | } 64 | 65 | Z <- compute.loss() 66 | 67 | pdf("ps2_1b.pdf") 68 | contour(t.x, t.y, Z) 69 | dev.off() 70 | 71 | data <- read.table("lms.dat", header=FALSE) 72 | X <- as.matrix(cbind(data$V1, data$V2)) 73 | Y <- as.matrix(data$V3) 74 | 75 | % compute optimal theta 76 | Xt.X <- t(X) %*% X 77 | Xt.X.inv <- solve(t(X) %*% X) 78 | Xt.Y <- t(X) %*% Y 79 | theta.star <- Xt.X.inv %*% Xt.Y 80 | 81 | % compute step sizes to use 82 | cov.X <- cov(X) 83 | eigen.cov.X <- eigen(cov.X) 84 | lambda.max <- max(eigen.cov.X$values) 85 | rho.max <- 1/lambda.max 86 | rho.half <- rho.max/2 87 | rho.quart <- rho.max/4 88 | 89 | % returns a step in the right direction 90 | step <- function(rho, theta.t) { 91 | i <- sample(1:dim(X)[1], 1) 92 | x.i <- as.matrix(X[i,]) 93 | y.i <- as.matrix(Y[i,]) 94 | y.err <- y.i - (t(theta.t) %*% x.i) 95 | s <- rho * (y.err[1,1] * x.i) 96 | return(s) 97 | } 98 | 99 | % take iters number of LMS steps 100 | build.lms.path <- function(rho, iters) { 101 | theta.path.1 <- vector(mode="numeric", length=iters) 102 | theta.path.2 <- vector(mode="numeric", length=iters) 103 | theta.curr <- as.matrix(c(theta.path.1[1], theta.path.2[1])) 104 | for (i in 2:iters) { 105 | s <- step(rho, theta.curr) 106 | theta.next <- theta.curr + s 107 | theta.path.1[i] <- theta.next[1] 108 | theta.path.2[i] <- theta.next[2] 109 | theta.curr <- theta.next 110 | } 111 | return(rbind(theta.path.1, theta.path.2)) 112 | } 113 | 114 | % plot the path 115 | plot.lms.path <- function(out, iters, stepsize) { 116 | lms <- build.lms.path(stepsize, iters) 117 | pdf(out) 118 | plot(lms[1,], lms[2,], typ="l") 119 | points(theta.star[1], theta.star[2], pch=1, cex=3, col="red", lwd=4) 120 | dev.off() 121 | } 122 | 123 | plot.lms.path("ps2_1c_rho_max.pdf", 100, rho.max) 124 | plot.lms.path("ps2_1c_rho_half.pdf", 100, rho.half) 125 | plot.lms.path("ps2_1c_rho_quart.pdf", 100, rho.quart) 126 | 127 | savehistory("ps2_1c.R") 128 | -------------------------------------------------------------------------------- /clustering/em.borked.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import math 4 | import numpy as np 5 | import random 6 | 7 | MAX_ITERATIONS = 10000 8 | CONVERGENCE_THRESH = 1e-14 9 | 10 | def cluster(data, likelihood_fn, initial_parameters, k=4): 11 | posterior = expectation_maximization(data, likelihood_fn, initial_parameters, k=k) 12 | #print posterior 13 | clusters = np.apply_along_axis(np.argmax, 1, posterior) 14 | return clusters, [0.]*k 15 | 16 | def expectation_maximization(data, likelihood_fn, initial_parameters, k=4): 17 | 18 | n = data.shape[0] # number of observations 19 | d = data.shape[1] # dimensionality 20 | 21 | prior = initialize_cluster_probabilities(k) 22 | parameters = initial_parameters(data, k) 23 | curr_ll = compute_log_likelihood(likelihood_fn, data, prior, *parameters) 24 | 25 | for _ in range(MAX_ITERATIONS): 26 | 27 | # E step 28 | likelihood = likelihood_fn(data, prior, *parameters) 29 | posterior = compute_posterior(likelihood) 30 | 31 | # M step 32 | prior = update_cluster_probabilities(posterior) 33 | parameters = update_parameters(data, prior, posterior) 34 | print parameters 35 | print "prior", prior 36 | print "posterior0", posterior[0,:] 37 | print "posterior0", posterior[0,:] 38 | #print 'in main', parameters 39 | next_ll = compute_log_likelihood(likelihood_fn, data, prior, *parameters) 40 | print curr_ll, next_ll 41 | 42 | if converged(curr_ll, next_ll): 43 | break 44 | 45 | curr_ll = next_ll 46 | return posterior 47 | 48 | def initialize_cluster_probabilities(k): 49 | return np.ones((k,1)) / float(k) # uniform 50 | 51 | def compute_log_likelihood(likelihood_fn, data, prior, *parameters): 52 | #print 'in compute_log_likelihood', parameters 53 | likelihood = likelihood_fn(data, prior, *parameters) 54 | return np.log(likelihood.sum(axis=1)).sum() 55 | 56 | def compute_posterior(likelihood): 57 | return likelihood / np.transpose(np.matrix(likelihood.sum(axis=1))) 58 | 59 | def update_cluster_probabilities(posterior): 60 | n = posterior.shape[0] 61 | return np.transpose(posterior.sum(axis=0)) / n 62 | 63 | def update_parameters(data, prior, posterior): 64 | #print "UPDATE PARAMETERS" 65 | n = data.shape[0] 66 | d = data.shape[1] 67 | k = posterior.shape[1] 68 | mu = np.ones((k,d)) 69 | sigmasq = np.ones((k,1)) 70 | for c in range(k): 71 | dp = np.ones((n,d)) 72 | for i in range(n): 73 | dp[i,:] = data[i,:]*posterior[i,c] 74 | mu[c,:] = dp.sum(axis=0) / (n*prior[c,:]) 75 | diff = data - mu[c,:] 76 | sigmasq[c,:] = np.multiply(diff, diff).sum(axis=1).sum() / n 77 | return mu, sigmasq 78 | 79 | def converged(old, new): 80 | return (new - old <= CONVERGENCE_THRESH) 81 | 82 | # For various distributions ... 83 | def isotropic_bi_normal_likelihood(data, prior, *parameters): 84 | mu = parameters[0] 85 | sigmasq = parameters[1] 86 | 87 | n = data.shape[0] 88 | k = mu.shape[0] 89 | d = data.shape[1] 90 | 91 | class_likelihood = np.zeros((n,k)) 92 | z = 2.*math.pi*sigmasq 93 | 94 | for c in range(k): 95 | distances = np.matrix(data - mu[c,:]) 96 | for j in range(n): 97 | distancessq = distances[j,:]*np.transpose(distances[j,:]) 98 | exponent = np.exp(-1.*distancessq/(2.*sigmasq[c])) 99 | class_likelihood[j,c] = prior[c]*(1./z[c])*exponent 100 | 101 | return class_likelihood 102 | 103 | def isotropic_bi_normal_initial_parameters(data, k): 104 | n = data.shape[0] 105 | d = data.shape[1] 106 | 107 | mu = data[[random.randint(0, n - 1) for _ in range(k)], :] # (k x d) 108 | 109 | diff = data - mu[0,:] 110 | s = np.multiply(diff, diff).sum(axis=1).sum() / n 111 | sigmasq = np.ones((k,1))*s # isotropriorc => cov = sigma*I 112 | 113 | mu = np.matrix([[.15, .231], 114 | [-.121, .435], 115 | [-.489, -.890], 116 | [.98, -.678]]) 117 | sigmasq = np.ones((k,1)) 118 | 119 | return mu, sigmasq 120 | -------------------------------------------------------------------------------- /clustering/em.shivaram.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import math 4 | 5 | CONVERGENCE_THRESHOLD = 1e-4 6 | 7 | NUM_STATES = 4 8 | 9 | FILE = "hmm-gauss.dat" 10 | TEST_FILE = "hmm-test.dat" 11 | 12 | def read_input(f): 13 | data = [] 14 | with open(f) as datafile: 15 | for line in datafile.readlines(): 16 | points = line.split() 17 | data.append([float(pt) for pt in points]) 18 | return data 19 | 20 | def calculate_isotropic_bivariate_normal_pdf(x, mu, sigma): 21 | exp1 = -1.*(x[0] - mu[0])*(x[0] - mu[0]) / (2.*sigma[0]) 22 | exp2 = -1.*(x[1] - mu[1])*(x[1] - mu[1]) / (2.*sigma[1]) 23 | dr1 = math.sqrt(2.*math.pi * sigma[0]) 24 | dr2 = math.sqrt(2.*math.pi * sigma[1]) 25 | return math.exp(exp1) * math.exp(exp2) * (1. / dr1) * (1. / dr2) 26 | 27 | def get_obs_likelihood(obs, mu, sigma): 28 | pdfs = [] 29 | for i in range(NUM_STATES): 30 | pdfs.append(calculate_isotropic_bivariate_normal_pdf(obs, mu[i], sigma[i])) 31 | return pdfs 32 | 33 | def normalize(vec): 34 | sum = float(sum(vec)) 35 | if (sum == 0.): 36 | sum = 1.0 37 | return map(lambda x: x/sum, vec) 38 | 39 | def emstep(data, prior, mu, sigma, loglikOnly): 40 | 41 | num_samples = len(data) 42 | loglik = 0. 43 | 44 | # E-step 45 | # First compute the activations for each sample point 46 | activations = [] 47 | for x in range(num_samples): 48 | a = [] 49 | for s in range(NUM_STATES): 50 | a.append((0.,0.)) 51 | activations.append(a) 52 | for x in range(num_samples): 53 | activations[x] = get_obs_likelihood(data[x], mu, sigma) 54 | 55 | # Compute the log-likelihood by multiplying activations with the prior 56 | for d in range(num_samples): 57 | prob_s = 0. 58 | for s in range(NUM_STATES): 59 | prob_s = prob_s + (activations[d][s] * prior[s]) 60 | loglik = loglik + math.log(prob_s) 61 | 62 | if (loglikOnly): 63 | return loglik 64 | 65 | # Now calculate the posterior distribution for each sample point 66 | post = [] 67 | for x in range(num_samples): 68 | post.append([0.]*NUM_STATES) 69 | for d in range(num_samples): 70 | for s in range(NUM_STATES): 71 | post[d][s] = prior[s] * activations[d][s] 72 | if d < 10: 73 | print "unnorm", post[d] 74 | 75 | #print "Posterior:\n", post[0] 76 | # Normalize posterior for each sample 77 | # This will make the posterior for each sample to add up to 1. 78 | for d in range(num_samples): 79 | sample_sum = float(sum(post[d])) 80 | if d < 10: 81 | print "sample_sum", sample_sum 82 | if sample_sum != 0: 83 | for s in range(NUM_STATES): 84 | post[d][s] = post[d][s] / sample_sum 85 | if d < 10: 86 | print post[d] 87 | 88 | #print "Posterior:\n", post[0] 89 | # M-step 90 | # Update the prior by using the posterior 91 | for s in range(NUM_STATES): 92 | sample_sum = 0. 93 | for x in range(num_samples): 94 | sample_sum += post[x][s] 95 | prior[s] = sample_sum / num_samples 96 | 97 | # Update mu = post(x) * obs / sum_t(post(x)) 98 | for s in range(NUM_STATES): 99 | prob1 = 0. 100 | prob2 = 0. 101 | for d in range(num_samples): 102 | prob1 = prob1 + (post[d][s] * data[d][0]) 103 | prob2 = prob2 + (post[d][s] * data[d][1]) 104 | dr = float(prior[s]*num_samples) 105 | mu[s] = (prob1/dr, prob2/dr) 106 | 107 | # Update sigma = post(x) * (obs - mu) (obs - mu)' / sum_t(post(x)) 108 | for s in range(NUM_STATES): 109 | nr = 0. 110 | for x in range(num_samples): 111 | omu = (data[x][0] - mu[s][0], data[x][1] - mu[s][1]) 112 | omu_omut = (omu[0]*omu[0]) + (omu[1]*omu[1]) 113 | nr += float(post[x][s]*omu_omut) 114 | dr = prior[s]*2.*num_samples 115 | sigma[s] = (nr / dr, nr / dr) 116 | 117 | return loglik 118 | 119 | def learn_gmm(file, iter, test_file): 120 | data = read_input(file) 121 | 122 | prior = map(lambda x: x/NUM_STATES, [1.]*NUM_STATES) 123 | 124 | mu = [(0.,0.)]*NUM_STATES 125 | 126 | mu[0] = (0.15, 0.231) 127 | mu[1] = (-0.121, 0.435) 128 | mu[2] = (-0.489, -0.890) 129 | mu[3] = (0.98, -0.678) 130 | 131 | sigma = [(1., 1.)]*NUM_STATES 132 | current_ll = 0. 133 | 134 | for it in range(iter): 135 | prev_log_lik = current_ll 136 | current_ll = emstep(data, prior, mu, sigma, False) 137 | print "new log_lik", current_ll 138 | for s in range(NUM_STATES): 139 | print mu[s] 140 | print "Priors:" 141 | for s in range(NUM_STATES): 142 | print prior[s] 143 | if (abs(current_ll - prev_log_lik) < CONVERGENCE_THRESHOLD): 144 | print "converged to", current_ll, "in", it, "iterations" 145 | break 146 | 147 | print "Means of component densities:\n", mu 148 | print "Priors:\n", prior 149 | print "Sigmas:\n", sigma 150 | 151 | learn_gmm(FILE, 100, TEST_FILE) 152 | -------------------------------------------------------------------------------- /gibbs/toroidal_ising_mean.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import math 4 | import numpy as np 5 | import random 6 | import sys 7 | 8 | # Core functionality: 9 | 10 | def ps7q1(): 11 | 12 | # Problem parameters: 13 | sqrtd = 7 14 | d = sqrtd*sqrtd # number of nodes 15 | 16 | burn_in_period = 1000 17 | n = 5000 # number of samples 18 | theta_edges = (lambda x: .25) 19 | #zero_one_init = (lambda x: max((-1.)**x, 0.)) # for testing 20 | neg_pos_one_init = (lambda x: (-1.)**x) 21 | #theta_node = zero_one_init 22 | theta_node = neg_pos_one_init 23 | #x = (lambda x: random.uniform(0, 1.)) 24 | x = (lambda x: random.uniform(-1., 1.)) 25 | 26 | # Do Gibb's sampling. 27 | gibbs_grid = create_toroidal_grid(sqrtd, x, theta_node, theta_edges) 28 | mu = moments_via_gibbs(gibbs_grid, burn_in_period, n, d) 29 | 30 | print "\nGibb's samples moments:" 31 | for (id, node) in gibbs_grid.iteritems(): 32 | print "ID: %d, mean = %3.4f" % (id, mu[0,id-1]) 33 | 34 | # Do naive mean field updates. 35 | nmf_grid = create_toroidal_grid(sqrtd, x, theta_node, theta_edges) 36 | tau = moments_via_naive_mean_field(nmf_grid, 2*n) 37 | 38 | print "\nNaive mean field moments:" 39 | #for (id, node) in nmf_grid.iteritems(): 40 | # print "ID: %d, mean = %3.4f" % (id, tau[id-1]) 41 | id = 1 42 | for i in range(sqrtd): 43 | print "" 44 | for j in range(sqrtd): 45 | sys.stdout.write("%3.4f\t" % tau[id-1]) 46 | id += 1 47 | print "" 48 | 49 | # Error: 50 | print "\nError:" 51 | print np.absolute(tau - mu).sum() / float(d) 52 | 53 | def create_toroidal_grid(sqrtd, value, theta_node, theta_edge): 54 | n = sqrtd * sqrtd 55 | grid = {} 56 | for i in range(1, n+1): 57 | grid[i] = IsingNode(i, value(i), theta_node(i), theta_edge(i)) 58 | for (id, node) in grid.iteritems(): 59 | node.neighbors = get_toroidal_neighbors(id, sqrtd, grid) 60 | return grid 61 | 62 | def moments_via_gibbs(grid, burn_in_period, n, d): 63 | # Burn-in period. 64 | for t in range(burn_in_period): 65 | do_gibbs_burn_in_iteration(grid) 66 | 67 | # Take samples. 68 | samples = np.matrix([[0.]*d]*n) # (n x d) 69 | for t in range(n): 70 | samples[t,:] = do_gibbs_sample_iteration(grid) 71 | 72 | return np.array(samples.sum(axis=0) / float(n)) 73 | 74 | def do_gibbs_burn_in_iteration(grid): 75 | for (id, node) in grid.iteritems(): 76 | node.do_gibbs_update() 77 | 78 | def do_gibbs_sample_iteration(grid): 79 | sample = np.array([0.]*len(grid.values())) 80 | for (id, node) in grid.iteritems(): 81 | node.do_gibbs_update() 82 | sample[id-1] = node.value 83 | return sample 84 | 85 | def moments_via_naive_mean_field(grid, n): 86 | # Update the mean for a while. 87 | for t in range(n): 88 | do_nmf_iteration(grid) 89 | 90 | return np.array([node.mean for node in grid.values()]) 91 | 92 | def do_nmf_iteration(grid): 93 | for (id, node) in grid.iteritems(): 94 | node.do_nmf_update() 95 | 96 | class IsingNode(object): 97 | 98 | def __init__(self, id, value, theta_node, theta_edge): 99 | self.id = int(id) 100 | self.value = value 101 | self.mean = value 102 | self.theta_node = theta_node 103 | self.theta_edge = theta_edge 104 | self.neighbors = {'up' : None, 105 | 'down' : None, 106 | 'left' : None, 107 | 'right' : None} 108 | 109 | def __cmp__(self, other): 110 | return self.id == other.id 111 | 112 | def __str__(self): 113 | return str(self.id) 114 | 115 | def __repr__(self): 116 | return "ID: %d, neighbors = {%s, %s, %s, %s}" % (self.id, 117 | str(self.neighbors['up']), 118 | str(self.neighbors['down']), 119 | str(self.neighbors['left']), 120 | str(self.neighbors['right'])) 121 | 122 | def zero_one_rule(self): 123 | neighbor_sum = sum([n.value for n in self.neighbors.values()]) 124 | return (1. / (1. + math.exp(-1.*(self.theta_node + self.theta_edge*neighbor_sum)))) 125 | 126 | def negative_positive_one_rule(self): 127 | neighbor_sum = sum([n.value for n in self.neighbors.values()]) 128 | #return (1. / (1. + math.exp(-2.*(self.theta_edge*neighbor_sum - 2.*self.theta_node))) 129 | return (1. / (1. + math.exp(-2.*(self.theta_node + self.theta_edge*neighbor_sum)))) 130 | 131 | def do_gibbs_update(self): # FIXME 132 | u = random.uniform(0.,1.) 133 | thresh = self.negative_positive_one_rule() 134 | #thresh = self.zero_one_rule() 135 | self.value = (1. if (u <= thresh) else -1.) 136 | 137 | def do_nmf_update(self): 138 | neighbor_sum = sum([n.mean for n in self.neighbors.values()]) 139 | y = math.exp(2.*(self.theta_node + self.theta_edge*neighbor_sum)) 140 | self.mean = ((y - 1.)/(1. + y)) 141 | 142 | # Necessary but not particularly interesting functions: 143 | 144 | def get_toroidal_neighbors(id, sqrtd, grid): 145 | right_edge = is_right_edge(id, sqrtd) 146 | left_edge = is_left_edge(id, sqrtd) 147 | top_edge = is_top_edge(id, sqrtd) 148 | bottom_edge = is_bottom_edge(id, sqrtd) 149 | row = get_row(id, sqrtd) 150 | col = get_col(id, sqrtd) 151 | up = ((id - sqrtd) if not top_edge else ((sqrtd * sqrtd) - sqrtd + col)) 152 | down = ((id + sqrtd) if not bottom_edge else col) 153 | left = ((id - 1) if not left_edge else ((id - 1) + sqrtd)) 154 | right = ((id + 1) if not right_edge else ((id + 1) - sqrtd)) 155 | return {'up' : grid[up], 156 | 'down' : grid[down], 157 | 'left' : grid[left], 158 | 'right' : grid[right]} 159 | 160 | def is_right_edge(id, sqrtd): 161 | return int((id + 1) % sqrtd == 1) 162 | 163 | def is_left_edge(id, sqrtd): 164 | return int((id - 1) % sqrtd == 0) 165 | 166 | def is_top_edge(id, sqrtd): 167 | return int((id - sqrtd) <= 0) 168 | 169 | def is_bottom_edge(id, sqrtd): 170 | return int((id + sqrtd) > (sqrtd * sqrtd)) 171 | 172 | def get_row(id, sqrtd): 173 | return ((int(id) - 1) / int(sqrtd)) + 1 174 | 175 | def get_col(id, sqrtd): 176 | return (int(id) % int(sqrtd)) + (sqrtd * is_right_edge(id, sqrtd)) 177 | 178 | if __name__ == "__main__": 179 | ps7q1() 180 | -------------------------------------------------------------------------------- /naive_mean_field/toroidal_ising_mean.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import math 4 | import numpy as np 5 | import random 6 | import sys 7 | 8 | # Core functionality: 9 | 10 | def ps7q1(): 11 | 12 | # Problem parameters: 13 | sqrtd = 7 14 | d = sqrtd*sqrtd # number of nodes 15 | 16 | burn_in_period = 1000 17 | n = 5000 # number of samples 18 | theta_edges = (lambda x: .25) 19 | #zero_one_init = (lambda x: max((-1.)**x, 0.)) # for testing 20 | neg_pos_one_init = (lambda x: (-1.)**x) 21 | #theta_node = zero_one_init 22 | theta_node = neg_pos_one_init 23 | #x = (lambda x: random.uniform(0, 1.)) 24 | x = (lambda x: random.uniform(-1., 1.)) 25 | 26 | # Do Gibb's sampling. 27 | gibbs_grid = create_toroidal_grid(sqrtd, x, theta_node, theta_edges) 28 | mu = moments_via_gibbs(gibbs_grid, burn_in_period, n, d) 29 | 30 | print "\nGibb's samples moments:" 31 | for (id, node) in gibbs_grid.iteritems(): 32 | print "ID: %d, mean = %3.4f" % (id, mu[0,id-1]) 33 | 34 | # Do naive mean field updates. 35 | nmf_grid = create_toroidal_grid(sqrtd, x, theta_node, theta_edges) 36 | tau = moments_via_naive_mean_field(nmf_grid, 2*n) 37 | 38 | print "\nNaive mean field moments:" 39 | #for (id, node) in nmf_grid.iteritems(): 40 | # print "ID: %d, mean = %3.4f" % (id, tau[id-1]) 41 | id = 1 42 | for i in range(sqrtd): 43 | print "" 44 | for j in range(sqrtd): 45 | sys.stdout.write("%3.4f\t" % tau[id-1]) 46 | id += 1 47 | print "" 48 | 49 | # Error: 50 | print "\nError:" 51 | print np.absolute(tau - mu).sum() / float(d) 52 | 53 | def create_toroidal_grid(sqrtd, value, theta_node, theta_edge): 54 | n = sqrtd * sqrtd 55 | grid = {} 56 | for i in range(1, n+1): 57 | grid[i] = IsingNode(i, value(i), theta_node(i), theta_edge(i)) 58 | for (id, node) in grid.iteritems(): 59 | node.neighbors = get_toroidal_neighbors(id, sqrtd, grid) 60 | return grid 61 | 62 | def moments_via_gibbs(grid, burn_in_period, n, d): 63 | # Burn-in period. 64 | for t in range(burn_in_period): 65 | do_gibbs_burn_in_iteration(grid) 66 | 67 | # Take samples. 68 | samples = np.matrix([[0.]*d]*n) # (n x d) 69 | for t in range(n): 70 | samples[t,:] = do_gibbs_sample_iteration(grid) 71 | 72 | return np.array(samples.sum(axis=0) / float(n)) 73 | 74 | def do_gibbs_burn_in_iteration(grid): 75 | for (id, node) in grid.iteritems(): 76 | node.do_gibbs_update() 77 | 78 | def do_gibbs_sample_iteration(grid): 79 | sample = np.array([0.]*len(grid.values())) 80 | for (id, node) in grid.iteritems(): 81 | node.do_gibbs_update() 82 | sample[id-1] = node.value 83 | return sample 84 | 85 | def moments_via_naive_mean_field(grid, n): 86 | # Update the mean for a while. 87 | for t in range(n): 88 | do_nmf_iteration(grid) 89 | 90 | return np.array([node.mean for node in grid.values()]) 91 | 92 | def do_nmf_iteration(grid): 93 | for (id, node) in grid.iteritems(): 94 | node.do_nmf_update() 95 | 96 | class IsingNode(object): 97 | 98 | def __init__(self, id, value, theta_node, theta_edge): 99 | self.id = int(id) 100 | self.value = value 101 | self.mean = value 102 | self.theta_node = theta_node 103 | self.theta_edge = theta_edge 104 | self.neighbors = {'up' : None, 105 | 'down' : None, 106 | 'left' : None, 107 | 'right' : None} 108 | 109 | def __cmp__(self, other): 110 | return self.id == other.id 111 | 112 | def __str__(self): 113 | return str(self.id) 114 | 115 | def __repr__(self): 116 | return "ID: %d, neighbors = {%s, %s, %s, %s}" % (self.id, 117 | str(self.neighbors['up']), 118 | str(self.neighbors['down']), 119 | str(self.neighbors['left']), 120 | str(self.neighbors['right'])) 121 | 122 | def zero_one_rule(self): 123 | neighbor_sum = sum([n.value for n in self.neighbors.values()]) 124 | return (1. / (1. + math.exp(-1.*(self.theta_node + self.theta_edge*neighbor_sum)))) 125 | 126 | def negative_positive_one_rule(self): 127 | neighbor_sum = sum([n.value for n in self.neighbors.values()]) 128 | #return (1. / (1. + math.exp(-2.*(self.theta_edge*neighbor_sum - 2.*self.theta_node))) 129 | return (1. / (1. + math.exp(-2.*(self.theta_node + self.theta_edge*neighbor_sum)))) 130 | 131 | def do_gibbs_update(self): # FIXME 132 | u = random.uniform(0.,1.) 133 | thresh = self.negative_positive_one_rule() 134 | #thresh = self.zero_one_rule() 135 | self.value = (1. if (u <= thresh) else -1.) 136 | 137 | def do_nmf_update(self): 138 | neighbor_sum = sum([n.mean for n in self.neighbors.values()]) 139 | y = math.exp(2.*(self.theta_node + self.theta_edge*neighbor_sum)) 140 | self.mean = ((y - 1.)/(1. + y)) 141 | 142 | # Necessary but not particularly interesting functions: 143 | 144 | def get_toroidal_neighbors(id, sqrtd, grid): 145 | right_edge = is_right_edge(id, sqrtd) 146 | left_edge = is_left_edge(id, sqrtd) 147 | top_edge = is_top_edge(id, sqrtd) 148 | bottom_edge = is_bottom_edge(id, sqrtd) 149 | row = get_row(id, sqrtd) 150 | col = get_col(id, sqrtd) 151 | up = ((id - sqrtd) if not top_edge else ((sqrtd * sqrtd) - sqrtd + col)) 152 | down = ((id + sqrtd) if not bottom_edge else col) 153 | left = ((id - 1) if not left_edge else ((id - 1) + sqrtd)) 154 | right = ((id + 1) if not right_edge else ((id + 1) - sqrtd)) 155 | return {'up' : grid[up], 156 | 'down' : grid[down], 157 | 'left' : grid[left], 158 | 'right' : grid[right]} 159 | 160 | def is_right_edge(id, sqrtd): 161 | return int((id + 1) % sqrtd == 1) 162 | 163 | def is_left_edge(id, sqrtd): 164 | return int((id - 1) % sqrtd == 0) 165 | 166 | def is_top_edge(id, sqrtd): 167 | return int((id - sqrtd) <= 0) 168 | 169 | def is_bottom_edge(id, sqrtd): 170 | return int((id + sqrtd) > (sqrtd * sqrtd)) 171 | 172 | def get_row(id, sqrtd): 173 | return ((int(id) - 1) / int(sqrtd)) + 1 174 | 175 | def get_col(id, sqrtd): 176 | return (int(id) % int(sqrtd)) + (sqrtd * is_right_edge(id, sqrtd)) 177 | 178 | if __name__ == "__main__": 179 | ps7q1() 180 | -------------------------------------------------------------------------------- /clustering/em.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import math 4 | import numpy as np 5 | import random 6 | 7 | MAX_ITERATIONS = 100 8 | CONVERGENCE_THRESHOLD = 1e-4 9 | VERY_SMALL_NUMBER = -1e12 10 | 11 | np.seterr(all='raise') 12 | 13 | def cluster(data, pdf, init_params, update_params, k=4): 14 | 15 | log_likelihood, posterior, parameters = expectation_maximization(data, pdf, init_params, update_params, k) 16 | # TODO(salspaugh): The "centers" computed below are not actually central. 17 | # It's obvious if you think about it. 18 | 19 | clusters = np.apply_along_axis(np.argmax, 1, posterior) 20 | centers = [np.argmax(posterior[:,c]) for c in range(k)] 21 | for c in range(k): 22 | clusters[clusters == c] = centers[c] 23 | #print centers 24 | return clusters, centers 25 | 26 | def expectation_maximization(data, pdf, init_parameters, update_parameters, k): 27 | 28 | n = data.shape[0] # number of observations 29 | d = data.shape[1] # dimensionality of observations 30 | 31 | prior = initialize_prior(k) # probability of being in each class 32 | # initially this is uniform 33 | posterior = np.zeros((n,k)) # probability of being in a class given a data point 34 | emissions = np.zeros((n,k)) # probability of a data point given the class it's in 35 | 36 | parameters = init_parameters(k, d) 37 | #(mu, sigma) = parameters 38 | curr_ll = VERY_SMALL_NUMBER # very unlikely (log likelihood is negative) 39 | 40 | iterations = 0. 41 | for _ in range(MAX_ITERATIONS): 42 | prev_ll = curr_ll 43 | #print prior 44 | #print posterior 45 | #print emissions 46 | curr_ll = do_EM_iteration(k, data, emissions, prior, posterior, pdf, update_parameters, *parameters) 47 | #print curr_ll 48 | iterations += 1. 49 | if converged(prev_ll, curr_ll): 50 | print "Converged in ", iterations , "iterations" 51 | break 52 | 53 | #print "Means of component densities:\n", mu 54 | #print "Priors:\n", prior 55 | #print "Sigmas:\n", sigma 56 | 57 | return curr_ll, posterior, parameters 58 | 59 | def initialize_prior(k, uniform=False): 60 | if uniform: 61 | return np.ones(k) / k 62 | else: # Sometimes uniform will get you stuck? Or so I've read. 63 | prior = np.random.rand(k) 64 | return prior / prior.sum() 65 | 66 | def do_EM_iteration(k, data, emissions, prior, posterior, pdf, update_parameters, *parameters): 67 | 68 | n = data.shape[0] 69 | 70 | # E-step 71 | update_emissions(k, data, emissions, pdf, *parameters) 72 | update_posterior(emissions, prior, posterior) 73 | log_likelihood = compute_log_likelihood(posterior) 74 | normalize_posterior(posterior) 75 | 76 | # M-step 77 | update_prior(prior, posterior) 78 | update_parameters(data, prior, posterior, *parameters) 79 | 80 | return log_likelihood 81 | 82 | def update_emissions(k, data, emissions, pdf, *parameters): 83 | n = data.shape[0] 84 | for data_idx in range(n): 85 | emissions[data_idx] = emission_probabilities(k, data[data_idx], pdf, *parameters) 86 | 87 | def emission_probabilities(k, data, pdf, *parameters): 88 | per_class_probabilities = [] 89 | for cls in range(k): 90 | per_class_probabilities.append(pdf(data, cls, *parameters)) 91 | return per_class_probabilities 92 | 93 | def update_posterior(emissions, priors, posterior): 94 | n = emissions.shape[0] # number of observations 95 | k = priors.shape[0] # number of classes 96 | for d in range(n): 97 | for s in range(k): 98 | posterior[d][s] = priors[s] * emissions[d][s] 99 | 100 | def compute_log_likelihood(posterior): 101 | return np.log(posterior.sum(axis=1)).sum() 102 | 103 | def normalize_posterior(posterior): 104 | n = posterior.shape[0] 105 | k = posterior.shape[1] 106 | for d in range(n): 107 | sample_sum = float(sum(posterior[d])) 108 | if sample_sum != 0: 109 | for s in range(k): 110 | posterior[d][s] = posterior[d][s] / sample_sum 111 | 112 | def update_prior(prior, posterior): 113 | k = prior.shape[0] 114 | n = posterior.shape[0] 115 | for s in range(k): 116 | sample_sum = 0. 117 | for x in range(n): 118 | sample_sum += posterior[x][s] 119 | prior[s] = sample_sum / n 120 | 121 | def converged(old_log_likelihood, new_log_likelihood): 122 | return (new_log_likelihood - old_log_likelihood < CONVERGENCE_THRESHOLD) 123 | 124 | # Distribution-specific functions: 125 | 126 | # Multinomial 127 | def multinomial_parameter_init(k, d): 128 | p = np.random.rand(k,d,d) 129 | # p[x,y,k] = P_k(x|y) = prob of seeing x after y in class k 130 | # the columns should sum to unity for each class 131 | for c in range(k): 132 | for j in range(d): # normalize across columns 133 | p[c,:,j] = p[c,:,j] / p[c,:,j].sum() 134 | return p, 135 | 136 | def multinomial_pdf(counts, cls, *parameters): 137 | p = parameters[0][cls] 138 | d = p.shape[0] 139 | # counts[x,y] = c when x appears after y c times in observation 140 | return np.power(p, counts.reshape(d,d)).prod() 141 | 142 | def multinomial_parameter_update(data, prior, posterior, *parameters): 143 | n = data.shape[0] 144 | k = prior.shape[0] 145 | d = data.shape[1] 146 | p = parameters[0] 147 | 148 | for cls_idx in range(k): 149 | prob = np.zeros((d,d)) 150 | for obs_idx in range(n): 151 | prob += (data[obs_idx].reshape(d,d))*(posterior[obs_idx][cls_idx]) 152 | p[cls_idx] = prob 153 | for j in range(d): # normalize across columns 154 | normalization = p[cls_idx,:,j].sum() 155 | if normalization > 0: 156 | p[cls_idx,:,j] = p[cls_idx,:,j] / p[cls_idx,:,j].sum() 157 | 158 | # Isotropic Bivariate Normal 159 | def isotropic_bivariate_normal_parameter_init(k, d): 160 | mu = np.zeros((k,d)) 161 | mu[0,:] = (0.15, 0.231) 162 | mu[1,:] = (-0.121, 0.435) 163 | mu[2,:] = (-0.489, -0.890) 164 | mu[3,:] = (0.98, -0.678) 165 | sigma = np.ones((k,d)) 166 | return mu, sigma 167 | 168 | def isotropic_bivariate_normal_pdf(point, cls, *parameters): 169 | mu = parameters[0][cls] 170 | sigma = parameters[1][cls] 171 | exp1 = -1.*(point[0] - mu[0])*(point[0] - mu[0]) / (2.*sigma[0]) 172 | exp2 = -1.*(point[1] - mu[1])*(point[1] - mu[1]) / (2.*sigma[1]) 173 | dr1 = math.sqrt(2.*math.pi*sigma[0]) 174 | dr2 = math.sqrt(2.*math.pi*sigma[1]) 175 | return math.exp(exp1) * math.exp(exp2) * (1. / dr1) * (1. / dr2) 176 | 177 | def isotropic_bivariate_normal_parameter_update(data, prior, posterior, *parameters): 178 | k = prior.shape[0] 179 | n = data.shape[0] 180 | 181 | (mu, sigma) = parameters 182 | 183 | # Update mu = posterior(x) * obs / sum_t(posterior(x)) 184 | for s in range(k): 185 | prob1 = 0. 186 | prob2 = 0. 187 | for d in range(n): 188 | prob1 = prob1 + (posterior[d][s] * data[d][0]) 189 | prob2 = prob2 + (posterior[d][s] * data[d][1]) 190 | dr = float(prior[s]*n) 191 | mu[s] = (prob1/dr, prob2/dr) 192 | 193 | # Update sigma = posterior(x) * (obs - mu) (obs - mu)' / sum_t(posterior(x)) 194 | for s in range(k): 195 | nr = 0. 196 | for x in range(n): 197 | omu = (data[x][0] - mu[s][0], data[x][1] - mu[s][1]) 198 | omu_omut = (omu[0]*omu[0]) + (omu[1]*omu[1]) 199 | nr += float(posterior[x][s]*omu_omut) 200 | dr = prior[s]*2.*n 201 | sigma[s] = (nr / dr, nr / dr) 202 | -------------------------------------------------------------------------------- /sum_product/sumproduct.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Purpose: Program to compute Sum-Product over arbitrary graphs and also over 3 | # the graphs given in Problem Set 4. 4 | # Author: Sara Alspaugh 5 | # Date: 14 October 2012 6 | 7 | from collections import defaultdict 8 | import numpy as np 9 | 10 | DEBUG = False 11 | 12 | def debug(msg): 13 | """Prints debugging messages if the user sets the DEBUG flag.""" 14 | if DEBUG: 15 | try: 16 | print(msg) 17 | except IOError: 18 | import traceback 19 | traceback.print_exception(sys.exc_info()) 20 | 21 | class Graph(object): 22 | """An object to store the structure and potential functions of the graph 23 | we're computing Sum-Product over. Also has functions defined on it which .""" 24 | 25 | def __init__(self, m, nodes, edges, neighbors, node_potentials, edge_potentials): 26 | self.m = m 27 | self.nodes = nodes 28 | self.edges = edges 29 | self.neighbors = neighbors 30 | self.node_potentials = node_potentials 31 | self.edge_potentials = edge_potentials 32 | 33 | def initialize_messages(self): 34 | """Initializes all node messages to 1. Also initializes the list of 35 | which neighbors each node has sent to / received from.""" 36 | self.messages = {} 37 | self.neighbors_received = {} 38 | self.neighbors_sent = {} 39 | for node in self.nodes: 40 | self.neighbors_received[node] = set([]) 41 | self.neighbors_sent[node] = set([]) 42 | self.messages[node] = {} 43 | for (i,j) in self.edges: 44 | self.messages[i][j] = np.matrix([[1.]]*self.m, float) 45 | self.messages[j][i] = np.matrix([[1.]]*self.m, float) 46 | 47 | def collect_messages_from_neighbors(self, node, recipient): 48 | """Multiplies all messages from a given node's neighbors together except 49 | the one that is being sent to i.e., the recipient.""" 50 | messages_product = np.matrix([[1.]]*self.m, float) 51 | for neighbor in self.neighbors[node]: 52 | if neighbor != recipient: 53 | messages_product = np.multiply(messages_product, self.messages[neighbor][node]) 54 | return messages_product 55 | 56 | def send_messages(self, to_node, from_node): 57 | """Computes the message from node from_node to node to_node.""" 58 | debug("Sending message from node %d to node %d" % (from_node, to_node)) 59 | shared_edge = (to_node, from_node) if to_node < from_node else (from_node, to_node) 60 | messages_product = self.collect_messages_from_neighbors(from_node, to_node) 61 | node_potentials_times_messages = np.multiply(self.node_potentials[from_node], messages_product) 62 | self.messages[from_node][to_node] = self.edge_potentials[shared_edge] * node_potentials_times_messages 63 | debug("Message(%d, %d) = \n%s" % (from_node, to_node, str(self.messages[from_node][to_node]))) 64 | 65 | def should_send(self, node, neighbor): 66 | """Returns true if node has received all messages from all nodes except 67 | neighbor, meaning that the node can send a message to neighbor now.""" 68 | return (self.neighbors_received[node] >= self.neighbors[node] - set([neighbor]) 69 | and not neighbor in self.neighbors_sent[node]) 70 | 71 | def do_iteration(self): 72 | """Iterates through all nodes and has them send a message if they can 73 | send a message this iteration.""" 74 | for node in self.nodes: 75 | for neighbor in self.neighbors[node]: 76 | if self.should_send(node, neighbor): 77 | self.send_messages(neighbor, node) 78 | self.neighbors_received[neighbor].add(node) 79 | self.neighbors_sent[node].add(neighbor) 80 | if self.neighbors_received[node] != self.neighbors[node]: 81 | self.unconverged = True 82 | 83 | def compute_marginals(self): 84 | """Computes the marginals once all of the final messages have been 85 | received by all nodes.""" 86 | self.marginals = {} 87 | for node in self.nodes: 88 | messages_product = np.matrix([[1.]]*self.m, float) 89 | for neighbor in self.neighbors[node]: 90 | messages_product = np.multiply(messages_product, self.messages[neighbor][node]) 91 | self.marginals[node] = np.multiply(self.node_potentials[node], messages_product) 92 | self.marginals[node] = np.divide(self.marginals[node], np.sum(self.marginals[node])) 93 | 94 | def sum_product(self): 95 | """Runs the Sum-Product algorithm according to the rule that nodes 96 | only send to a neighbor once they have received all messages from 97 | their other neighbors. Nodes send messages until all messages have 98 | been sent, marking convergence. Lastly, marginals are computed and 99 | returned.""" 100 | self.initialize_messages() 101 | self.unconverged = True 102 | iterations = 0 103 | while self.unconverged: 104 | self.unconverged = False 105 | debug("Iteration %d" % iterations) 106 | self.do_iteration() 107 | iterations += 1 108 | self.compute_marginals() 109 | return self.marginals 110 | 111 | def print_marginals(self): 112 | for (node, marginal) in self.marginals.iteritems(): 113 | print("Node %d marginal = \n%s" % (node, str(marginal))) 114 | 115 | def ps4q1_graph(a=1., b=.5): 116 | """Returns an instance of the graph specific to Problem Set 4.""" 117 | nodes = set([1, 2, 3, 4, 5, 6]) 118 | edges = [(1,2), (1,3), (2, 4), (2, 5), (3,6)] 119 | neighbors = { 120 | 1 : set([2, 3]), 121 | 2 : set([1, 4, 5]), 122 | 3 : set([1, 6]), 123 | 4 : set([2]), 124 | 5 : set([2]), 125 | 6 : set([3])} 126 | node_potentials = {} 127 | edge_potentials = {} 128 | 129 | for node in nodes: 130 | if node % 2 == 0: 131 | #node_potentials[node] = np.matrix([[3., 1., 2.]], float) 132 | node_potentials[node] = np.matrix([[3.], [1.], [2.]], float) 133 | else: 134 | #node_potentials[node] = np.matrix([[1., 2., 3.]], float) 135 | node_potentials[node] = np.matrix([[1.], [2.], [3.]], float) 136 | 137 | edge_potential = np.matrix([[a, b, b], [b, a, b], [b, b, a]], float) 138 | for edge in edges: 139 | edge_potentials[edge] = edge_potential 140 | 141 | g = Graph(3, nodes, edges, neighbors, node_potentials, edge_potentials) 142 | return g 143 | 144 | def solve_ps4q1_inst(a=1., b=.5): 145 | """Solves an instance of the graph given in Problem Set 4.""" 146 | g = ps4q1_graph(a=a, b=b) 147 | g.sum_product() 148 | g.print_marginals() 149 | 150 | def solve_ps4q1(): 151 | """Solves both versions of the graph given in Problem Set 4.""" 152 | marginals_part1 = solve_ps4q1_inst() 153 | marginals_part2 = solve_ps4q1_inst(a=1., b=2.) 154 | 155 | if __name__ == '__main__': 156 | """Program entry point and option parsing. 157 | Options are to turn on debugging and to provide an arbitrary graph 158 | instance to compute Sum-Product over. Program assumes that the graph 159 | is provided in JSON form with the nodes, edges, neighbors, and 160 | potential functions included.""" 161 | 162 | from optparse import OptionParser 163 | parser = OptionParser() 164 | parser.add_option("-f", "--file", dest="filename", 165 | help="write report to FILE", metavar="FILE") 166 | parser.add_option("-d", "--debug", dest="debug", 167 | action="store_true", default=False, 168 | help="output debugging messages", metavar="DEBUG") 169 | 170 | (options, args) = parser.parse_args() 171 | 172 | if options.debug: 173 | DEBUG = True 174 | 175 | if options.filename is None: 176 | solve_ps4q1() 177 | else: # assumes graph information in is JSON 178 | import json 179 | json_data = open(options.filename).read() 180 | data = json.loads(json_data) 181 | graph_data = data['graph'] 182 | graph = Graph(int(graph_data['m']), 183 | graph_data['nodes'], 184 | graph_data['edges'], 185 | graph_data['neigbors'], 186 | np.matrix(graph_data['node_potentials']), 187 | np.matrix(graph_data['edge_potentials'])) 188 | graph.sum_product() 189 | -------------------------------------------------------------------------------- /clustering/clustering.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import em 4 | import json 5 | import kmedoids 6 | import math 7 | import matplotlib.pyplot as plt 8 | import numpy as np 9 | import pygraphviz as pg 10 | import random 11 | import spectral 12 | 13 | from collections import defaultdict 14 | from splparser.parser import parse as splparse 15 | from splparser.parser import SPLSyntaxError 16 | from zss.compare import distance as tree_dist 17 | 18 | TEST_FILE = "points.dat" 19 | TEST_POINTS = 1000 20 | 21 | def main(): 22 | 23 | np.set_printoptions(linewidth=200, threshold=1000000) 24 | 25 | (options, args) = parse_args() 26 | 27 | k = (4 if options.k is None else options.k) 28 | f = (TEST_FILE if options.file is None else options.file) 29 | i = options.intermediate_in 30 | o = options.intermediate_out 31 | d = options.distances_file 32 | 33 | em_gaussian = (options.em and options.euclidean) 34 | em_multinomial = (options.em and not options.euclidean) 35 | 36 | clusterer_selected = False 37 | 38 | if em_multinomial: 39 | clusterer_selected = True 40 | clusterer = em.cluster 41 | pdf = em.multinomial_pdf 42 | param_init = em.multinomial_parameter_init 43 | param_update = em.multinomial_parameter_update 44 | 45 | if em_gaussian: 46 | enforce_single_selection(clusterer_selected) 47 | clusterer_selected = True 48 | clusterer = em.cluster 49 | pdf = em.isotropic_bivariate_normal_pdf 50 | param_init = em.isotropic_bivariate_normal_parameter_init 51 | param_update = em.isotropic_bivariate_normal_parameter_update 52 | 53 | if options.spectral: 54 | enforce_single_selection(clusterer_selected) 55 | clusterer_selected = True 56 | clusterer = spectral.cluster 57 | 58 | if options.kmedoids: 59 | enforce_single_selection(clusterer_selected) 60 | clusterer_selected = True 61 | clusterer = kmedoids.cluster 62 | 63 | if not clusterer_selected: 64 | print "Please select one clustering method." 65 | exit() 66 | 67 | distances = None 68 | if em_multinomial: 69 | counts = None 70 | if not options.distances_file is None: 71 | distances = get_data(datafile=d, datareader=read_distance_matrix) 72 | if not options.intermediate_in is None: 73 | counts = get_data(datafile=i, datareader=read_counts_matrix) 74 | else: 75 | counts = count_bigrams(f, savefile=o) 76 | clusters, centers = clusterer(counts, pdf, param_init, param_update, k=k) 77 | elif em_gaussian: 78 | data = get_data(datafile=f, datareader=read_points) 79 | clusters, centers = clusterer(data, pdf, param_init, param_update, k=k) 80 | 81 | elif options.kmedoids or options.spectral: 82 | r = (read_points if options.euclidean else read_queries) 83 | data = get_data(datafile=f, datareader=r) 84 | if not options.intermediate_in is None: 85 | distances = get_data(datafile=i, datareader=read_distance_matrix) 86 | elif not options.distances_file is None: 87 | distances = get_data(datafile=i, datareader=read_distance_matrix) 88 | else: 89 | d = build_euclidean_distance_matrix if options.euclidean \ 90 | else build_tree_edit_distance_matrix 91 | distances = compute_distances(data, distancer=d, normalize=options.normalize, savefile=o) 92 | clusters, centers = clusterer(distances, k=k) 93 | 94 | if options.euclidean: 95 | output_point_clusters(data, clusters, centers) 96 | else: 97 | output_query_clusters(f, distances, clusters, centers) 98 | 99 | def parse_args(): 100 | from optparse import OptionParser 101 | parser = OptionParser() 102 | parser.add_option("-k", "--num_clusters", dest="k", type="int", 103 | help="number of clusters to look for") 104 | parser.add_option("-f", "--file", dest="file", type="string", metavar="RAW_IN", 105 | help="file containing the data to cluster (supported types: splunk queries and x,y coordinates)") 106 | parser.add_option("-i", "--intermediate_in", dest="intermediate_in", type="string", metavar="INTERMEDIATE_IN", 107 | help="file containing intermediate data that has been preprocessed to speed later computations") 108 | parser.add_option("-o", "--intermediate_out", dest="intermediate_out", type="string", metavar="INTERMEDIATE_OUT", 109 | help="file to write intermediate data, to speed later computations") 110 | parser.add_option("-d", "--distances_file", dest="distances_file", type="string", metavar="DISTANCES_IN", 111 | help="file containing distances to use for plotting or otherwise") 112 | parser.add_option("-e", "--em", 113 | action="store_true", default=False, dest="em", 114 | help="run expectation-maximization on a data in FILE (defaults to n data points of four Gaussian x,y clusters)") 115 | parser.add_option("-m", "--kmedoids", 116 | action="store_true", default=False, dest="kmedoids", 117 | help="run k-medoids on a data in FILE (defaults to n data points of four Gaussian x,y clusters)") 118 | parser.add_option("-s", "--spectral", 119 | action="store_true", default=False, dest="spectral", 120 | help="run spectral clustering on data in FILE (defaults to n data points of four Gaussian x,y clusters)") 121 | parser.add_option("-q", "--queries", 122 | action="store_false", default=True, dest="euclidean", 123 | help="assume data is SPL queries (defaults to Gaussian Euclidean x,y points)") 124 | parser.add_option("-n", "--normalize", 125 | action="store_true", default=False, dest="normalize", 126 | help="normalize distances when distances are computed (only applies to spectral and kmedoids)") 127 | return parser.parse_args() 128 | 129 | def enforce_single_selection(clusterer_selected): 130 | if clusterer_selected: 131 | print "Please select one clustering method." 132 | exit() 133 | 134 | def is_npy(filename): 135 | return (filename[filename.rfind('.'):] == '.npy') 136 | 137 | def read_counts_matrix(datafile): 138 | return np.array(np.load(datafile)) 139 | 140 | def count_bigrams(datafile, savefile=None): 141 | if savefile is None: 142 | savefile = 'bigrams.npy' 143 | word_idxs = get_word_idxs(datafile) 144 | print json.dumps(word_idxs) 145 | d = len(word_idxs.keys()) 146 | all_counts = [] 147 | with open(datafile) as data: 148 | for line in data.readlines(): 149 | counts = np.zeros((d,d)) 150 | stages = line.split('|') 151 | fragments = [stage.split() for stage in stages] 152 | words = [fragment[0] for fragment in fragments] 153 | fidx = word_idxs['^'] 154 | sidx = word_idxs[words[0]] 155 | counts[fidx,sidx] += 1. 156 | for i in range(len(words)-1): 157 | fidx = word_idxs[words[i]] 158 | sidx = word_idxs[words[i+1]] 159 | counts[fidx,sidx] += 1. 160 | fidx = word_idxs[words[len(words)-1]] 161 | sidx = word_idxs['$'] 162 | counts[fidx,sidx] += 1. 163 | all_counts.append(counts) 164 | all_counts = np.array(all_counts) 165 | np.save(savefile, all_counts) 166 | return all_counts 167 | 168 | def get_word_idxs(datafile): 169 | unique_words = defaultdict(int) 170 | with open(datafile) as data: 171 | for line in data.readlines(): 172 | stages = line.split('|') 173 | fragments = [stage.split() for stage in stages] 174 | words = [fragment[0] for fragment in fragments] 175 | unique_words['^'] += 1 176 | unique_words['$'] += 1 177 | for i in range(len(words)): 178 | unique_words[words[i]] += 1 179 | d = len(unique_words.keys()) 180 | return dict(zip(unique_words.keys(), range(d))) 181 | 182 | def read_distance_matrix(datafile): 183 | return np.array(np.load(datafile)) 184 | 185 | def read_points(datafile): 186 | points = [] 187 | with open(datafile) as data: 188 | for line in data.readlines(): 189 | (x,y) = line.split(',') 190 | point = (float(x), float(y)) 191 | points.append(point) 192 | return np.array(points) 193 | 194 | def generate_random_point_clusters(datafile, n=TEST_POINTS): 195 | points = [] 196 | with open(datafile, 'w') as out: 197 | sign = [-1, 1] 198 | mu = 4 199 | sigma = 1 200 | for _ in range(n): 201 | xsign = random.choice(sign) 202 | ysign = random.choice(sign) 203 | x = random.gauss(xsign*mu, sigma) 204 | y = random.gauss(ysign*mu, sigma) 205 | s = "%3.2f, %3.2f\n" % (x, y) 206 | points.append((x,y)) 207 | out.write(s) 208 | out.flush() 209 | return points 210 | 211 | def read_queries(datafile): 212 | parsetrees = [] 213 | with open(datafile) as data: 214 | for query in data.readlines(): 215 | try: 216 | query = query.strip('\n') 217 | parsetree = splparse(query) 218 | parsetrees.append(parsetree) 219 | except SPLSyntaxError: 220 | print "Syntax error encountered while parsing SPL." 221 | print "\t" + query 222 | continue 223 | print "Done parsing queries." 224 | return parsetrees 225 | 226 | def build_tree_edit_distance_matrix(parsetrees, normalize=False): 227 | return build_distance_matrix(parsetrees, tree_dist, normalize=normalize) 228 | 229 | def build_euclidean_distance_matrix(points, normalize=False): 230 | return build_distance_matrix(points, euclidean_dist, normalize=normalize) 231 | 232 | def euclidean_dist(a, b): 233 | return math.sqrt((a[0] - b[0])**2 + (a[1] - b[1])**2) 234 | 235 | def build_distance_matrix(data, distfn, normalize=False): 236 | m = len(data) 237 | distances = np.zeros((m,m)) 238 | i = j = 0. 239 | max_distance = -1e10 240 | for i in range(m): 241 | for j in range(i+1, m): # The distance matrix is symmetric. 242 | p = data[i] 243 | q = data[j] 244 | distance = distfn(p, q) 245 | max_distance = max(max_distance, distance) 246 | distances[i,j] = distances[j,i] = distance 247 | if normalize: 248 | return distances / max_distance 249 | return distances 250 | 251 | def get_data(datafile=TEST_FILE, datareader=read_points): 252 | try: 253 | data = datareader(datafile) 254 | except IOError: 255 | data = generate_random_point_clusters(datafile) 256 | return data 257 | 258 | def compute_distances(data, distancer=build_euclidean_distance_matrix, normalize=False, savefile=None): 259 | distances = distancer(data, normalize=normalize) 260 | if savefile is None: 261 | savefile = 'distances.npy' 262 | np.save(savefile, distances) 263 | return distances 264 | 265 | def output_point_clusters(data, clusters, centers): 266 | print_results(data, clusters) 267 | plot_euclidean_results(data, clusters, centers) 268 | 269 | def output_query_clusters(datafile, distances, clusters, centers): 270 | with open(datafile) as rawdata: 271 | print_results(rawdata.readlines(), clusters) 272 | plot_query_results(distances, clusters) 273 | 274 | def print_results(data, clusters): 275 | for i in range(len(data)): 276 | print clusters[i], data[i] 277 | 278 | def plot_euclidean_results(data, cluster_idxs, medoid_idxs): 279 | data = np.array(data) 280 | colors = ['b', 'g', 'r', 'c', 'm', 'y'] 281 | cluster_idx_count = 0 282 | for medoid_idx in medoid_idxs: 283 | cluster_idx = np.where(cluster_idxs == medoid_idx)[0] 284 | print cluster_idx 285 | x = [pt[0] for pt in data[cluster_idx]] 286 | y = [pt[1] for pt in data[cluster_idx]] 287 | plt.plot(x, y, color=colors[cluster_idx_count], marker='o', linestyle='None') 288 | plt.plot(data[medoid_idx][0], data[medoid_idx][1], color='k', marker='o', linestyle='None') 289 | cluster_idx_count += 1 290 | plt.savefig("clusters.png") 291 | 292 | def plot_query_results(distances, cluster_idxs): 293 | colors = ['#006600','#990000','#3333CC','#000000', '#FFCC00'] 294 | shapes = ['circle', 'triangle', 'square', 'diamond', 'pentagon'] 295 | d = distances.shape[0] 296 | seen_cluster_idxs = {} 297 | G = pg.AGraph() 298 | G.node_attr['style'] = 'filled' 299 | G.node_attr['label'] = ' ' 300 | G.node_attr['height'] = .3 301 | G.node_attr['width'] = .3 302 | G.node_attr['fixedsize'] = 'true' 303 | for node_idx in range(d): 304 | c = colors[0] 305 | cluster = int(cluster_idxs[node_idx]) 306 | if cluster in seen_cluster_idxs: # already assigned a color 307 | color_shape_idx = seen_cluster_idxs[cluster] 308 | else: 309 | color_shape_idx = len(seen_cluster_idxs.keys()) 310 | seen_cluster_idxs[cluster] = color_shape_idx 311 | c = colors[color_shape_idx] 312 | s = shapes[color_shape_idx] 313 | G.add_node(node_idx, fillcolor=c, shape=s) 314 | for i in range(d): 315 | for j in range(i+1, d): 316 | if distances[i,j] > 0: 317 | G.add_edge(i,j, len=distances[i,j]*10.) 318 | else: 319 | G.add_edge(i,j, len=.01) 320 | G.edge_attr['style'] = 'setlinewidth(.001)' 321 | G.layout() 322 | G.draw("query_clusters.png") 323 | 324 | if __name__ == "__main__": 325 | main() 326 | -------------------------------------------------------------------------------- /iterative_proportional_fitting/ipf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import math 4 | import numpy as np 5 | 6 | from collections import defaultdict 7 | 8 | ROWS = 0 9 | COLS = 1 10 | 11 | class Graph(object): 12 | 13 | def __init__(self, nodes, cliques, data, debug=True): 14 | """ 15 | An object to represent an undirected graph of variables `nodes` and 16 | potentials on `cliques` of these nodes for the purpose of running 17 | iterative proportional fit given a set of samples `data`. 18 | Assumes variables (nodes) are binary. 19 | """ 20 | self.nodes = nodes 21 | self.cliques = cliques 22 | 23 | self.configurations = {} 24 | self.empirical_marginals = {} 25 | self.estimated_marginals = {} 26 | self.potentials = {} 27 | self.joints = {} 28 | self.configurations[nodes] = self.build_configuration_table(nodes) 29 | for clique in self.cliques: 30 | self.configurations[clique] = self.build_configuration_table(clique) 31 | self.empirical_marginals[clique] = np.matrix([[0.]]*2**len(clique)) 32 | self.estimated_marginals[clique] = np.matrix([[0.]]*2**len(clique)) 33 | self.potentials[clique] = np.matrix([[1.]]*2**len(clique)) 34 | 35 | self.data = data 36 | self.number_of_samples = self.data.shape[COLS] 37 | 38 | self.debug = debug 39 | 40 | def build_configuration_table(self, variables): 41 | """ 42 | Build a list of column vectors (actually, numpy matrices) such that 43 | each entry in the list is a `variable`-length vector that represents a 44 | `variable`-length number that is one larger than the previous one in 45 | list. In other words, the list is a set of binary numbers of length 46 | `variable` that count from 0 to 2**length(`variable`). Return the list. 47 | Assumes the possible states a variable can be in is {0,1}. 48 | """ 49 | old_configuration = np.matrix([[0.]]*len(variables)) 50 | configurations = [] 51 | configurations.append(old_configuration) 52 | for i in xrange(2**len(variables)-1): 53 | curr_configuration = self.inc(old_configuration) 54 | configurations.append(curr_configuration) 55 | old_configuration = curr_configuration 56 | return configurations 57 | 58 | def inc(self, zero_one_matrix): 59 | """ 60 | Increment the binary number represented by the column vector (actually, 61 | numpy matrix) `zero_one_matrix` and return that new value in a new 62 | column vector. 63 | Assumes the possible states a variable can be in is {0,1}. 64 | """ 65 | new_matrix = np.matrix(zero_one_matrix, copy=True) 66 | carry = 1 67 | for i in reversed(range(zero_one_matrix.shape[ROWS])): 68 | sum = int(zero_one_matrix[i,0]) + carry 69 | bit = sum % 2 70 | carry = sum / 2 71 | new_matrix[i,0] = bit 72 | return new_matrix 73 | 74 | def count(self, clique, entry): 75 | """ 76 | Count the number of instances in the data where the nodes in the given 77 | clique have values equal to the given configuration entry. 78 | 79 | Assumes that each column of the data represents a new sample of the 80 | variables represented by all nodes {X_1,...X_d} so that if there are n 81 | samples then data is a dxn matrix. 82 | """ 83 | count = 0. 84 | for sample in range(self.number_of_samples): 85 | if (self.data[clique,sample] == entry).all(): 86 | count += 1. 87 | return count 88 | 89 | def compute_empirical_marginals(self): 90 | """ 91 | Compute the empirical marginals of a certain graph with nodes {X_1,...,X_d} 92 | given data from n samples of these nodes i.e. a dxn matrix of data. 93 | """ 94 | normalization_factor = self.number_of_samples 95 | for clique in self.cliques: 96 | row = 0 97 | for entry in self.configurations[clique]: 98 | self.empirical_marginals[clique][row,0] = self.count(clique, entry) 99 | row += 1 # Laplace smoothing: 100 | if (self.empirical_marginals[clique] == 0.).any(): # if any entries are zero 101 | self.empirical_marginals[clique] += 1. # add one to all counts 102 | self.empirical_marginals[clique] /= normalization_factor 103 | 104 | def matrix_column_to_string(self, matrix, col): 105 | s = '' 106 | for row in range(matrix.shape[ROWS]): 107 | s = ''.join([s, str(int(matrix[row,col]))]) 108 | return s 109 | 110 | def potential_products(self, configuration): 111 | """ 112 | Multiplies the clique potentials together at the d-dimensional point 113 | `configuration`. 114 | """ 115 | product = 1. 116 | for clique in self.cliques: 117 | these_vars = configuration[clique,0] 118 | idx = int(self.matrix_column_to_string(these_vars, 0), 2) 119 | product = product * math.exp(self.potentials[clique][idx]) 120 | return product 121 | 122 | def joint(self, configuration): 123 | """ 124 | Computes the joint at the d-dimensional point `configuration`. The 125 | joint is the normalized product of potentials at `configuration`. 126 | """ 127 | normalization_factor = 0. 128 | for entry in self.configurations[self.nodes]: 129 | normalization_factor += self.potential_products(entry) 130 | self.joints[configuration] = self.potential_products(configuration) / normalization_factor 131 | return self.joints[configuration] 132 | 133 | def estimated_marginal(self, clique, entry): 134 | """ 135 | Computes the marginal of `clique` at the size(clique)-dimensional 136 | point `entry` by summing over all joint configurations where the 137 | given clique points of that configuration are equal to `entry`. 138 | """ 139 | sum = 0. 140 | for configuration in self.configurations[self.nodes]: 141 | if (configuration[clique,0] == entry).all(): 142 | sum += self.joint(configuration) 143 | return sum 144 | 145 | def update_estimated_marginals(self): 146 | """ 147 | Updates all of the clique marginals based on the current values of the 148 | potential functions. 149 | """ 150 | for clique in self.cliques: 151 | row = 0 152 | for entry in self.configurations[clique]: 153 | self.estimated_marginals[clique][row,0] = self.estimated_marginal(clique, entry) 154 | row += 1 155 | 156 | def log_empirical_over_estimated_marginal(self, clique): 157 | """ 158 | Returns the log of the empirical marginal for `clique` over the 159 | estimated marginal for the `clique`. 160 | """ 161 | if self.debug: 162 | print "Taking log of: " 163 | self.print_functions_of_clique(self.empirical_marginals[clique], clique, 'muhat') 164 | self.print_functions_of_clique(self.estimated_marginals[clique], clique, 'mutilde') 165 | return np.log(self.empirical_marginals[clique] / self.estimated_marginals[clique]) 166 | 167 | def do_IPF_update(self): 168 | """ 169 | Does one iteration of iterative proportional fitting. For each clique 170 | potential, it computes the new clique potential at this timestep based 171 | on that at the last timestep and the IPF update rule, also updating 172 | the estimated marginals each time a clique potential is updated. 173 | """ 174 | for clique in self.cliques: 175 | self.update_estimated_marginals() 176 | self.potentials[clique] = self.potentials[clique] + \ 177 | self.log_empirical_over_estimated_marginal(clique) 178 | 179 | def do_IPF(self, iterations): 180 | """ 181 | Main loop for iterative proportional fitting. Does `iteration` number 182 | of iterations, updating the graph potentials and marginals as it goes. 183 | """ 184 | for t in xrange(iterations): 185 | if self.debug: 186 | print "Iteration %d" % t 187 | self.print_potentials() 188 | self.do_IPF_update() 189 | 190 | def compute_log_likelihood(self): 191 | self.log_likelihood = 0. 192 | for sample in xrange(self.data.shape[COLS]): 193 | self.log_likelihood += np.log(self.joint(self.data[:,sample])) 194 | 195 | def compute_singleton_node_entropy(self, node): 196 | singleton = np.matrix([[0.]]*2) 197 | row = 0 198 | for entry in [(0),(1)]: 199 | singleton[row,0] = self.estimated_marginal((node), entry) 200 | row += 1 201 | return singleton * np.log(singleton) 202 | 203 | def compute_joint_edge_entropy(self, edge): 204 | joint = np.matrix([[0.]]*4) 205 | row = 0 206 | for entry in [(0,0),(0,1),(1,0),(1,1)]: 207 | joint[row,0] = self.estimated_marginal(edge, entry) 208 | row += 1 209 | return joint * np.log(joint) 210 | 211 | def compute_mutual_information(self, edges): 212 | trees = [[(0,1),(1,3),(2,3)], 213 | [(0,2),(1,3),(2,3)], 214 | [(0,1),(0,2),(2,3)], 215 | [(0,1),(0,2),(1,3)], 216 | [(0,1),(1,2),(1,3)], 217 | [(0,1),(0,3),(0,2)], 218 | [(0,2),(1,2),(2,3)], 219 | [(0,3),(1,3),(2,3)], 220 | ] 221 | self.mutual_informatons = {} 222 | self.max_mutual_information = 0. 223 | for tree in trees: 224 | mutual_information = 0. 225 | for edge in tree: 226 | mutual_information += self.compute_joint_edge_entropy(edge) 227 | for node in self.nodes: 228 | mutual_information += self.compute_singleton_node_entropy(node) 229 | if mutual_information > self.max_mutual_information: 230 | self.max_mutual_information = mutual_information 231 | self.max_mutual_information_tree = tree 232 | self.mutual_information[tree] = mutual_information 233 | 234 | def print_functions_of_clique(self, function, clique, fnname): 235 | print "\tclique: %s" % str(clique) 236 | for entry in self.configurations[clique]: 237 | input = self.matrix_column_to_string(entry, 0) 238 | output = '{0:2.2f}'.format(function[int(input, 2),0]) 239 | s = '\t\t' + fnname + '(' + input + ') = ' + output 240 | print s 241 | 242 | def print_functions(self, functions, fnname): 243 | for (clique, function) in functions.iteritems(): 244 | self.print_functions_of_clique(function, clique, fnname) 245 | 246 | def print_empirical_marginals(self): 247 | self.print_functions(self.empirical_marginals, 'mu') 248 | 249 | def print_potentials(self): 250 | self.print_functions(self.potentials, 'theta') 251 | 252 | def print_joint(self): # different from other functions --- just a map FIXME 253 | for entry in self.configurations[self.nodes]: 254 | input = self.matrix_column_to_string(entry, 0) 255 | s = '\t\tP(' + input + ') = ' + str(self.joints[entry]) 256 | print s 257 | 258 | def print_log_likelihood(self): 259 | print "\tL(...) =", self.log_likelihood 260 | 261 | def print_configurations(self): 262 | for (variables, configuration) in self.configurations.iteritems(): 263 | print "variables: %s" % str(variables) 264 | for entry in configuration: 265 | print self.matrix_column_to_string(entry, 0) 266 | 267 | def print_data(self): 268 | np.set_printoptions(linewidth=150) 269 | print self.data 270 | 271 | def print_results(self): 272 | print "Final empirical marginals:" 273 | self.print_empirical_marginals() 274 | print "Final potentials:" 275 | self.print_potentials() 276 | print "Final joint:" 277 | self.print_joint() 278 | print "Final log likelihood:" 279 | self.print_log_likelihood() 280 | 281 | def iterative_proportional_fit(self): 282 | self.compute_empirical_marginals() 283 | self.do_IPF(100) 284 | self.compute_log_likelihood() 285 | self.print_results() 286 | self.compute_mutual_information() 287 | print self.max_mutual_information_tree 288 | print self.max_mutual_information 289 | 290 | def ps5q3_graph(cliques, debug=False): 291 | nodes = (0,1,2,3) 292 | data = [] 293 | with open("Pairwise.dat") as datafile: 294 | rows = [line.split() for line in datafile.readlines()] 295 | data = [[float(element) for element in row] for row in rows] 296 | data = np.matrix(data) 297 | return Graph(nodes, cliques, data, debug=debug) 298 | 299 | def solve_ps5q3_part(part, cliques, debug=False): 300 | print "Solution to part %s " % str(part) 301 | g = ps5q3_graph(cliques, debug=debug) 302 | g.iterative_proportional_fit() 303 | 304 | def solve_ps5q3(debug=False): 305 | 306 | cliques_part1 = [(0,1), (0,3), (1,2), (2,3)] 307 | solve_ps5q3_part("1", cliques_part1) 308 | 309 | #cliques_part2 = [(0,1,2), (0,3)] 310 | cliques_part2 = [(0,1), (0,2), (0,3), (1,2)] 311 | solve_ps5q3_part("2", cliques_part2) 312 | 313 | #cliques_part3 = [(0,1,2,3)] 314 | cliques_part3 = [(0,1), (0,2), (0,3), (1,2), (1,3), (2,3)] 315 | solve_ps5q3_part("3", cliques_part3) 316 | 317 | 318 | if __name__ == '__main__': 319 | """Program entry point and option parsing. 320 | Options are to turn on debugging and to provide an arbitrary graph 321 | instance and data to compute Iterative Proportional Fitting over. 322 | Program assumes that the graph is provided in JSON form with the nodes, 323 | cliques, and data provided. Program assumes nodes represent binary 324 | variables. 325 | """ 326 | 327 | from optparse import OptionParser 328 | parser = OptionParser() 329 | parser.add_option("-f", "--file", dest="filename", 330 | help="write report to FILE", metavar="FILE") 331 | parser.add_option("-d", "--debug", dest="debug", 332 | action="store_true", default=False, 333 | help="output debugging messages", metavar="DEBUG") 334 | 335 | (options, args) = parser.parse_args() 336 | 337 | if options.filename is not None: 338 | import json 339 | json_data = open(options.filename).read() 340 | data = json.loads(json_data) 341 | graph_data = data['graph'] 342 | graph = Graph(int(graph_data['nodes']), 343 | graph_data['cliques'], 344 | graph_data['data'], 345 | debug=graph_data['debug']) 346 | graph.iterative_proportional_fit() 347 | else: 348 | solve_ps5q3(debug=options.debug) 349 | 350 | -------------------------------------------------------------------------------- /EM_HMM/hmm.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # Description: A program to find the MLEs of an HMM with bivariate emission 3 | # probabilities and m states using Expectation Maximization. In fact 4 | # implements the Baum-Welch algorithm as described in Rabiner's tutorial 5 | # with normalization of the forward and backward variables. Another way 6 | # to normalize is to use logarithms to handle underflow issues [1,2]. 7 | # References: 8 | # [1] Rabiner, Lawrence. "A Tutorial on Hidden Markov Models and Selected Applications in Speech Recognition". 1989. 9 | # [2] Mann, Tobias. "Numerically Stable Hidden Markov Model". 2006. 10 | # Author: Sara Alspaugh 11 | # Date: 30 October 2012 12 | 13 | 14 | from collections import defaultdict 15 | import numpy as np 16 | 17 | LOGZERO = 'LOGZERO' 18 | 19 | def eexp(x): 20 | if x == LOGZERO: 21 | return 0. 22 | else: 23 | return np.exp(x) 24 | 25 | def eln(x): 26 | if x == 0: 27 | return LOGZERO 28 | else: 29 | return np.log(x) 30 | 31 | def elnsum(x, y): 32 | if x == LOGZERO or y == LOGZERO: 33 | if x == LOGZERO: 34 | return y 35 | else: 36 | return x 37 | elif x > y: 38 | return x + eln(1 + np.exp(y - x)) 39 | else: 40 | return y + eln(1 + np.exp(x - y)) 41 | 42 | def elnproduct(x, y): 43 | if x == LOGZERO or y == LOGZERO: 44 | return LOGZERO 45 | else: 46 | return x + y 47 | 48 | class HMM(object): 49 | 50 | def __init__(self, m, training_data, test_data): 51 | """ 52 | m the number of states 53 | A an mxm (from x to) transition matrix 54 | A[i,j] is the probability of transitioning from state i to state j 55 | pi an m-length vector of initial state probabilities 56 | pi[i,:] is the probability of starting in state i 57 | b an m-length vector of emission probabilities 58 | updated every timestep 59 | assume emission probabilies are bivariate normal 60 | represents the probability of an observation given the current state 61 | mu an 2xm matrix of bivariate normal means for the emission probabilities 62 | sigma an m-length array of 2x2 bivariate normal covariances for the emission 63 | probabilities 64 | valid because we're assuming isotropic covariance matrices 65 | for simplicity 66 | alpha an m-length vector of forward variables updated every timestep 67 | ... 68 | beta an m-length vector of backward variables updated every timestep 69 | ... 70 | gamma an m-length vector updated every timestep 71 | the probability of being in state i given the observations 72 | delta an m-length vector updated every timestep 73 | the highest probability along a path ending in state i given 74 | the observations 75 | psi an m-length vector updated every timestep 76 | the delta for the state j preceeding state i's current delta 77 | in other words, the argument which maximizes delta_t(i)*a_ij 78 | xi an mxm matrix updated every timestep which represents the 79 | probability of being in state i now and state j in the next 80 | timestep 81 | """ 82 | 83 | self.m = int(m) # m 84 | 85 | self.transition_probs = np.matrix([[1./m]*m]*m) # A 86 | self.initial_probs = np.matrix([[1./m]]*m) # pi 87 | 88 | self.emission_probs = defaultdict(self.column_vector_matrix) # HACK 89 | self.emission_probs[0] = np.matrix([[1./m]]*m) # b 90 | #self.mean = np.matrix([[0.]*m]*2) # bad mu --- doesn't work 91 | self.mean = np.matrix([[2.,-2.,-2.,2.],[2.,2., -2.,-2.]]) # good mu 92 | self.covariance = [np.matrix([[1., 0.],[0., 1.]])]*m # sigma 93 | 94 | # it doesn't matter what the following are set to because we initialize 95 | # them based on the above probabilities to start 96 | self.forwards = defaultdict(self.column_vector_matrix) # HACK 97 | self.forwards[0] = np.matrix([[1.]]*m) # alpha 98 | self.backwards = defaultdict(self.column_vector_matrix) # HACK 99 | self.backwards[0] = np.matrix([[1.]]*m) # beta 100 | 101 | self.gamma = {} 102 | self.delta = {} # not used TODO 103 | self.psi = {} # not used TODO 104 | self.xi = defaultdict(self.symmetric_matrix) 105 | 106 | self.training_data = training_data 107 | self.test_data = test_data 108 | self.timesteps = len(training_data.keys()) 109 | 110 | self.debug = False 111 | 112 | def column_vector_matrix(self): 113 | return np.matrix([[1./self.m]]*self.m) 114 | 115 | def symmetric_matrix(self): 116 | return np.matrix([[1.]*self.m]*self.m) 117 | 118 | def print_vector_probabilities(self, p, name, timesteps=10, end=False): 119 | if not end: 120 | for t in range(timesteps): 121 | print "time:", t, name, "=", p[t].transpose() 122 | else: 123 | for t in range(self.timesteps - timesteps, self.timesteps): 124 | print "time:", t, name, "=", p[t].transpose() 125 | 126 | def print_matrix_probabilities(self, p, name, timesteps=10, end=False): 127 | if not end: 128 | for t in range(self.timesteps): 129 | for row in range(p.shape[0]): 130 | print "time:", t, name, "=", p[t][row,:] 131 | else: 132 | for t in range(self.timesteps - timesteps, self.timesteps): 133 | for row in range(p[t].shape[0]): 134 | print "time:", t, name, "=", p[t][row,:] 135 | 136 | def print_to_check(self): 137 | self.print_vector_probabilities(self.emission_probs, "b") 138 | self.print_vector_probabilities(self.forwards, "alpha") 139 | self.print_vector_probabilities(self.backwards, "beta", end=True) 140 | self.print_vector_probabilities(self.gamma, "gamma", end=True) 141 | self.print_matrix_probabilities(self.xi, "xi", end=True) 142 | 143 | def bivariate_normal_pdf(self, data, mean, cov): 144 | k = 2 # because it's bivariate, would have higher k for multivariate 145 | two_pi_to_neg_half_k = np.exp(-0.5 * k * np.log(2. * np.pi)) 146 | sqrt_det = np.power(np.linalg.det(cov), -0.5) 147 | dev = data - mean 148 | e_to_the_stuff = np.exp(-0.5 * np.dot(np.dot(dev.transpose(), np.linalg.inv(cov)), dev)) 149 | return (two_pi_to_neg_half_k * sqrt_det * e_to_the_stuff) 150 | 151 | def update_emission_probabilities(self): 152 | for (timestep, observation) in self.training_data.iteritems(): 153 | for state in range(self.m): # TODO: use vector operations here instead of iterating 154 | mu = self.mean[:,state] 155 | sigma = self.covariance[state] 156 | b = self.bivariate_normal_pdf(observation, mu, sigma) 157 | self.emission_probs[timestep][state,:] = b 158 | 159 | def update_forward_probabilities(self, log_scaling=False, normalize=True): 160 | if log_scaling: 161 | for state in range(self.m): 162 | self.forwards[0][state,:] = elnproduct(eln(self.initial_probs[state,:]), eln(self.emission_probs[0][state,:])) 163 | for t in range(self.timesteps)[1:]: 164 | for statej in range(self.m): 165 | logalpha = LOGZERO 166 | for statei in range(self.m): 167 | forward_to_trans_from = elnproduct(self.forwards[t-1][statei,:], eln(self.transition_probs[statei,statej])) 168 | logalpha = elnsum(logalpha, forward_to_trans_from) 169 | self.forwards[t][statej,:] = elnproduct(logalpha, eln(self.emission_probs[t][statej,:])) 170 | else: 171 | self.forwards_sum = {} 172 | self.forwards[0] = np.multiply(self.initial_probs, self.emission_probs[0]) 173 | self.forwards_sum[0] = np.sum(self.forwards[0]) 174 | if normalize: 175 | self.forwards[0] = self.forwards[0] / self.forwards_sum[0] 176 | for t in range(self.timesteps)[1:]: 177 | sum_forwards_to_this_state = 0. 178 | for state in range(self.m): # TODO: change the following to use an apply for row of forwards 179 | forward_to_trans_from = np.multiply(self.forwards[t-1], self.transition_probs[:,state]) 180 | sum_forwards_to_this_state = np.sum(forward_to_trans_from) 181 | self.forwards[t][state,:] = sum_forwards_to_this_state * self.emission_probs[t][state,:] 182 | self.forwards_sum[t] = np.sum(self.forwards[t]) 183 | if normalize: 184 | self.forwards[t] = self.forwards[t] / self.forwards_sum[t] 185 | 186 | def update_backward_probabilities(self, log_scaling=False, normalize=True): 187 | if log_scaling: 188 | self.backwards[self.timesteps-1][:,:] = 0. 189 | for t in reversed(range(self.timesteps-1)): 190 | for statei in range(self.m): 191 | logbeta = LOGZERO 192 | for statej in range(self.m): 193 | logbeta = elnsum(logbeta, elnproduct(eln(self.emission_probs[t+1]), elnproduct(self.emission_probs[t+1][statej,:], self.backwards[t+1][statej,:]))) 194 | self.backwards[t][statei,:] = logbeta 195 | else: 196 | self.backwards[self.timesteps-1][:,:] = 1. 197 | if normalize: 198 | self.backwards[self.timesteps-1] = self.backwards[self.timesteps-1]/np.sum(self.backwards[self.timesteps-1]) 199 | for t in reversed(range(self.timesteps-1)): 200 | for state in range(self.m): 201 | self.backwards[t][state,:] = np.dot(self.transition_probs[state,:], np.multiply(self.emission_probs[t+1], self.backwards[t+1])) 202 | if normalize: 203 | self.backwards[t] = self.backwards[t] / np.sum(self.backwards[t]) 204 | 205 | def update_gamma(self): 206 | for t in range(self.timesteps): 207 | self.gamma[t] = np.multiply(self.forwards[t], self.backwards[t]) 208 | self.gamma[t] = self.gamma[t] / np.sum(self.gamma[t]) 209 | 210 | def update_delta(self): # TODO 211 | pass 212 | 213 | def update_psi(self): # TODO 214 | pass 215 | 216 | def update_xi(self): 217 | for t in range(self.timesteps-1): 218 | normalization = 0. 219 | for statei in range(self.m): 220 | for statej in range(self.m): 221 | forward_backward = self.forwards[t][statei,:] * \ 222 | self.transition_probs[statei,statej] * \ 223 | self.emission_probs[t+1][statej,:] * \ 224 | self.backwards[t+1][statej,:] 225 | normalization += forward_backward 226 | self.xi[t][statei,statej] = forward_backward 227 | self.xi[t] = self.xi[t] / normalization 228 | 229 | def update_means(self): 230 | top = np.matrix([[0.]*self.m]*2) 231 | bottom = np.matrix([[0.]]*self.m) 232 | for t in range(self.timesteps): 233 | for state in range(self.m): 234 | top[:,state] += float(self.gamma[t][state,:]) * self.training_data[t] 235 | bottom[state,:] += self.gamma[t][state,:] 236 | for state in range(self.m): 237 | self.mean[:,state] = top[:,state] / bottom[state,:] 238 | 239 | def update_covariances(self): 240 | top = [np.matrix([[0.]*2]*2)]*self.m 241 | bottom = np.matrix([[0.]]*self.m) 242 | for t in range(self.timesteps): 243 | for state in range(self.m): 244 | distance_from_mean_squared = (self.training_data[t] - self.mean[:,state]) * (self.training_data[t] - self.mean[:,state]).transpose() 245 | top[state] += float(self.gamma[t][state,:]) * distance_from_mean_squared 246 | bottom[state,:] += self.gamma[t][state,:] 247 | for state in range(self.m): 248 | self.covariance[state] = top[state] / bottom[state,:] 249 | 250 | def update_initial_probs(self): 251 | for state in range(self.m): 252 | self.initial_probs[state,:] = self.gamma[0][state,:] 253 | 254 | def update_transition_probs(self): 255 | self.xi_sum = np.matrix([[0.]*self.m]*self.m) 256 | for statei in range(self.m): 257 | for statej in range(self.m): 258 | for t in range(self.timesteps-1): 259 | self.xi_sum[statei, statej] += self.xi[t][statei, statej] 260 | self.transition_probs[statei, statej] = self.xi_sum[statei, statej] 261 | 262 | def update_parameters(self): 263 | self.update_means() 264 | self.update_covariances() 265 | 266 | def viterbi(self): # TODO: This is to handle a case I'm not sure we have 267 | self.update_delta() 268 | self.update_psi() 269 | self.update_gamma() 270 | 271 | def forward_backward(self): 272 | self.update_forward_probabilities() 273 | self.update_backward_probabilities() 274 | self.viterbi() 275 | self.update_xi() 276 | 277 | def E_step(self): 278 | self.update_emission_probabilities() 279 | self.forward_backward() 280 | 281 | def M_step(self): 282 | self.update_parameters() 283 | self.update_transition_probs() 284 | self.update_initial_probs() 285 | 286 | def compute_log_likelihood(self, data): 287 | self.log_likelihood = 0. 288 | for t in range(self.timesteps): 289 | self.log_likelihood += np.log(self.forwards_sum[t]) 290 | 291 | def print_log_likelihood(self, data): 292 | print self.log_likelihood 293 | 294 | def print_means(self): 295 | for col in range(self.mean.shape[1]): 296 | print "mu" + str(col), self.mean[:,col].transpose() 297 | 298 | def do_baum_welch(self, iterations=10): # also known as EM for HMMs 299 | for iteration in range(iterations): 300 | self.E_step() 301 | self.M_step() 302 | if self.debug: 303 | self.print_to_check() 304 | self.compute_log_likelihood(self.training_data) 305 | self.compute_log_likelihood(self.test_data) 306 | print "Log likelihood (training data):" 307 | self.print_log_likelihood(self.training_data) 308 | print "Means (training data):" 309 | self.print_means() 310 | #print "Log likelihood (test data):" 311 | #self.print_log_likelihood(self.test_data) 312 | #print "Means (test data): 313 | 314 | def read_ps3q1_data(filename): 315 | data = {} 316 | t = 0 317 | with open(filename) as datafile: 318 | for line in datafile.readlines(): 319 | pts = line.split() 320 | pts = [float(pt) for pt in pts] 321 | data[t] = np.matrix(pts).transpose() 322 | t += 1 323 | return data 324 | 325 | def do_ps3q1(): 326 | training_data = read_ps3q1_data("hmm-gauss.dat") 327 | test_data = read_ps3q1_data("hmm-test.dat") 328 | hmm = HMM(4, training_data, test_data) 329 | hmm.do_baum_welch() 330 | 331 | if __name__ == '__main__': 332 | do_ps3q1() 333 | -------------------------------------------------------------------------------- /notebook.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "metadata": { 3 | "name": "", 4 | "signature": "sha256:4ef452109bc5b2742567214766e82adac13ea7c6140b8b5a8da66ef8b88fa684" 5 | }, 6 | "nbformat": 3, 7 | "nbformat_minor": 0, 8 | "worksheets": [ 9 | { 10 | "cells": [ 11 | { 12 | "cell_type": "code", 13 | "collapsed": false, 14 | "input": [ 15 | "import matplotlib.pyplot as plt\n", 16 | "import numpy as np\n", 17 | "import random\n", 18 | "%matplotlib inline\n", 19 | "D = 5\n", 20 | "K = 4\n", 21 | "POINTS_PER_CLUSTER = 100" 22 | ], 23 | "language": "python", 24 | "metadata": {}, 25 | "outputs": [], 26 | "prompt_number": 172 27 | }, 28 | { 29 | "cell_type": "code", 30 | "collapsed": true, 31 | "input": [ 32 | "# Create four Gaussian clusters and plot.\n", 33 | "means = np.zeros((4,2))\n", 34 | "x = y = -D\n", 35 | "for idx, mean in enumerate(means):\n", 36 | " means[idx,:] = [x, y]\n", 37 | " y = y+2*D if y == -D and x == D else y\n", 38 | " x = x+2*D if x == -D else x-2*D\n", 39 | "cov = np.identity(2)\n", 40 | "points = np.random.multivariate_normal(means[0,:], cov, POINTS_PER_CLUSTER)\n", 41 | "for idx, mean in enumerate(means[1:]):\n", 42 | " points = np.append(points, np.random.multivariate_normal(means[idx+1,:], cov, POINTS_PER_CLUSTER), axis=0)\n", 43 | "_ = plt.scatter(points[:,0], points[:,1])" 44 | ], 45 | "language": "python", 46 | "metadata": {}, 47 | "outputs": [ 48 | { 49 | "metadata": {}, 50 | "output_type": "display_data", 51 | "png": "iVBORw0KGgoAAAANSUhEUgAAAXkAAAEACAYAAABWLgY0AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJztnX+MHOd537/vcbHQSnfice9O5MkkJWP940rzEK5sGAdc\niyMK3Z2aPy4lD0ls18Y2MaQWTWJEXSc0K9gUqjVoNT67dYrWoCub1yK2EEA449Iiu7wGJFu2aFPF\nCi3XYf0rEqwkoqOyf9TAAbTKp3/MzM6vd2Zndmd2Zme/H2Bxu7OzM+/tPu93nvd5n+cdJSIghBBS\nTCaybgAhhJD0oMgTQkiBocgTQkiBocgTQkiBocgTQkiBocgTQkiBGVjklVJfVUrdVkq96thWVUrt\nKaW+p5S6opSaHvQ8hBBC4pOEJ/81AE94tn0KwJ6IvAfAH5mvCSGEDBmVRDGUUupRAH8gIovm61sA\nVkTktlLqCIBrIrIw8IkIIYTEIq2Y/GERuW0+vw3gcErnIYQQEkLqE69iDBW4dgIhhGRAKaXj3lZK\nHRGRN5VS8wB+4t1BKUXhJ4SQPhARFXXftDz5XQAN83kDwDd1O4kIHwk9Lly4kHkbivTg98nvM6+P\nuCSRQvkNAP8VwHuVUj9WSv0KgM8BWFVKfQ/A3zZfE0IIGTIDh2tE5MMBbz0+6LEJIYQMBiteC8Lp\n06ezbkKh4PeZLPw+syORPPm+TqyUZHVuQggZVZRSkBxMvBJCCMkBFHlCCCkwFHlCCCkwFHlCCCkw\nFHlCCCkwFHlCCCkwFHlCCCkwFHlCCCkwFHlCCCkwFHlCCCkwFHlCCCkwFHlCCCkwFHlCCCkwFHlC\nCCkwFHlCCCkwFHlCCCkwFHlCCCkwFHlCCCkwFHlCCCkwFHlCCCkwFHlCiItOp4O1tU2srW2i0+lk\n3RwyIBT5jGGHInmi0+ngzJkG9vY2sLe3gTNnGrTLEYcinyHsUCQvWM7GRz7ya9jf/yiABoAG9vef\nx9bWJe2+dExGg1LWDRhntrYuYX//eRgdCtjfN7atr69n2zAyVljOhmGLGwA+CWAVgN8O3fsCN240\nsLOzTZvNMfTkRwh6UCQN3M5GA8DnATwLYBuVyjk0m08F7qvz9J3QZrOHnnyGNJtP4caNBvb3jddG\nh9rW7ksPigyTavWv8f7376LZ7N/GhmGznU6ne5FpNp9if9AhIpk8jFOTdrstq6tnZXX1rLTb7cD9\nVlfPCnBZADEfl6VarYV+hpAotNttqVQOm/Z1WSqVw4F2FWdfnc2urp7NpN1FwtTOyFrLcM0Ic+fO\nXCKTtRxSjzfr6+vY2dnG6uouVld3Q73tOPumTdzQ0dgS54qQ5AP05AfyoIDDArQH9o50bWi1WpFG\nF4SEkbannfZIIa8gpidPkc+QuEbabrelWq0JsGQK/OCGrWvDxMTM2A2BSW+ihhYH/Uyc9jBcQ5HP\nNf14Ikkbtq4NxkVkvLwjEk6Sdpek8Kd5EckrFPkRwu44TQGWZGJiRlqtVqTPJdlJnJ13YuKQ2R6K\nPLFJauJ/XL3vJKHIjxitVssU1mCjb7fbUq8vS7Vak3p9JZVhb72+ItVqTWq1E1Iuz7ETjihpebZB\nI7649jGucfQkiSvyzJPPmOvXv4V7976IoKrXTqeDjY0P4e7dEoDP484dYGPjY9jd/Xexshq8+cQA\nuq9XVh7DrVu3sL//PO7cAcrl30S9/jXMzs5086SZj5x/gvLSAQz823lrOoBzALaxv/8mq7TzTpwr\nQpIP0JMXkWDPxvLI7InWaN6PzpPzDpHL5Tkpl6cjh2g4xM6OOJ65zpbq9ZW+frsgOxp04r/VaolS\nUwIcFeCklMvTuZjEHSXAcM1oEZTCaG9biizyQWIcZXI1bLKVQ+xsiHtxDYqbJzm53+s9nQhb2+v1\nZSmVDgow2/18qTQT+aJjn7cpExMz3dDluIk/RX4EcRppq9XydMy2AA+6Oka5PKc15iAxjiLyYWmT\nFPls6CfF1ivA9fpy7N+u13mjjBYtG3JvD3dYwsS6Xl8xP9cWo0ZEPyodh1FmXJFnTD5jnLHulZXH\n8NnP/i7299/p2GMdwJOYnPw6yuXn8MgjR3HxYrx4vDeeWi7/FoCf4e5dI15bqZzDM888jevXd839\n3VWMcdbYIdlhVaPa8XfjNzLi9MY+Sfx26+vrPvsLWlHVeG5t3wXwl9pjhq1z0+l0cPPmd8w9LwGw\nz3P3LgB82XHeV/GRj/wa3v/+n+PckUWcK0KSD9CTD0lfdHsrTq8obFgalqnjHS3UaiekVHpIpqaO\nDz1tk0Sjn7mQIC87zm/Xz3ltTztsFBk8Kg2aT7DnpTbNPuEfCdij0rbr2EX16sFwzXAZRPzCwyht\nARakVHpI6vUVT5zeb8BRc+7b7bY5vNWHfyjm+SLO75FVwVKYTXntUqkpqVaP+ByMXpXXxrFbAiwL\nMO06jx2uiZ6gMMpQ5IfIoJ0q3LCbptcTLQMmavzW2M/fGarVWs8LCck3w5g70Ym/fd62AIZ91evL\n3X1rtVOi1KTWrvWxe729W3ZbLk93vfx2u92dxyqVHgrtI0UhrsgzJj8A/dzZyRuDv3HjnCteasXG\n/+RPbuLOnS91j33vHmDczOHzqfwvd+5U8JnPfBH37m3F+n/I+OCNm1+//jG8733vweuvvwnjjlLr\n5mMbwFdc+xp3mzoCIy5v27VlY1euvIRnnvkNfOELzwEADh2axw9/uOg6v73G/YvdWP3588/h5s3v\nmrUmAPAJ8+8i544s4lwRknygAJ58EtkPQSs+6mKcxjC16QuxBB1b54W3223Tq7KH1sAhAebFyF2O\nl1VB8kPa9QzB4UW/d66337MC+Ldb9hRWy+Gdl6rXl83KbP2otMg2CoZrhkcSecxB+e6GgdtGbkw6\nNU0hXtIWkXgnV4PT0ZYFWBCgJsApRwf1h4ismGlYihyFPz+k+XvoRf6s+bzpElf9vkdMm7ZtzMqT\nD5t4tY7pT8e0zl/8OLwTivyQGbQiMThuflmMSaYl05Dbjm3uuKe17oz1utfFR99ZrDY1Bah2PbSw\ngqp+qynJ8Ollp4Ydha+P5LUrYzSor3xttVouMTeeN83nc13bVmoq0L68fcO9j3MeYLxsMFciD+A1\nAN8G8AqAP/a8l+b3kEuiev6GMTdNL3va0TnuF8DOOFBqUkolZwZCVZSakqmp45G8otXVs9p9ddWv\nuk44OTnvuwgV3YsaRaJc9MMyrrzHMsIlK65wincy1Lbhs2KMGL2TqGe7thYUrqnXl10XJXcIyJku\n6a6ALTp5E/k/B1ANeC+9byFHeD0oZyfxGrGF3ws6JMAJMYa7znVDgnKG/duDKlqjZDXoO+G0KOUP\nJ1Hk80cvLzko4yrK8gdG9swJly34K22DYvmHBdjshnmsEKP3AmLZq3FM51zSgzI1dWzsQoVxRX4Y\n2TVqCOfIJbpshGPH5vCTn/wUP/3p/4XIrwJY9N3F/vr1b8GZgQC8CuCrAKwMggaMDAYdDwN4CsBH\nu1smJp7GvXu/Cl3WjLdKcmWlaVbdLgJ4FRMTl/GjHz2M8+cvYmFhAcBXMDt7GG+99XN45ZVfcbQR\nmJhootn8vYG+MzI6WPb68z//9yDycRiZM8D+/kcBvIxKxcoceyfsrBeYzx8G8DcB7OHOnS9hbw+4\nceNc1xbv3v3ncNrr+fMXzSyeIwC+AuAwgCfxrnf9NwB2dS0zwTTEuSLEfQD4EYxQzcsAnvS8l+bF\nLhfoJ5+qDk9kTowCjyXXDRj8n9N77EpNyoEDhxzHc8ZI7YmwuOuXWF6T7dX7qwiDYvQkf7hHYf7Q\nRr8Fcu22tSrlYc/IsyqVylx3tGovTGaMMkulg934v84u/bblnCfaNJ+flFLpgbG89wFyFq6ZN//O\nAfhTAH/L8Z5cuHCh+7h69WqKX0v6hBeJeIepTuN1V+/pJk6NcI33OEcFaEq5PC212imZmjrmKDiJ\nvnJg0P9hd8DgpZA56To6uC/cehvRTbwGLZURPAnbNm3csM+gdEorLNPbttwZX84JXCNEVPzip6tX\nr7q0Mlci7zoRcAFA0/E6vW9lyISlF7o7gtco/R665Q07LxqPP/64xtBb3c/UaotSrdakUpmXSmW2\nW73qbaM3C8eLu0P3TlFj+uRoERab1/2W7XbbMZfTy9u2Mr+c9m7M09gOg10RW6stRho96BMDzore\naQpOSS6SneZG5AHcD2DKfP4AgP8CYM3xfqpfxDCJ0nnq9RUplR4QfxGSe1h64MCcLx3S6GhWpoIz\na8EaDXgvAJs90ib1GRbuDt0WY1isD9eQ0SPITu1wjRFOsWow9BOyhmjrQi26Yjor1dewfTs1V6mD\nYi/GZ6cEOwm6yDhFPmyJbOsYRRtx5knk32mGaP4UwHcAnPe8n+oXMUyi5r+3Wi2ZnJyXUukhqdVO\nSa12QpxpYG6xnjXT0qxceV3YJyiUU/O1QR9DX+56OP7zWCsGLgmwIEodGpsUtaISJHj+rJXZrm0Y\ndqkLnXjtdU6AYxpbrEqj0TBF3e3hAydD+4z//O5wTbk85yr60xUADmM9n2GTG5HveeICiXwUb0G3\nT6vVcnhQei/I8Jiajo6xJMAhqdVOmMNZXcdyi7w9QeZMv2y6Yq1GfHPZ0XHHY0W/cUMXutB55VYi\ngGGz1jK/VnhlxbSjprko2IK5bVnciQWzYk30+keiS659dX3GvfCZZb8tsTz/ycn5nvNOFHmKfJdB\n43a9Pm97yu7CIcu7t4uc3CJvV5VuinOS1jJifWXhZo8JsmbAMNgKz1gFLMXqHESPbgLUmhsy7sk6\n7bMfw3NvSqUy57G/+6VSsYrkLIdiQbwjBWBS5ucf7YZ+gpbFDq7MNrb1EvMoYcpRi9dT5Psg7bid\nEVt0pjoaXrmxyJLlHTU9HaEqwANSq52SVqul9dgt0bWWWp2aOt718MOGq1YGhb/TOCe5xuMGDMRK\nobRTEZ0ToMHr1ViJBCe19uXe5t8HeDBS+qM9p7Xs2t/oQ7az1GteTJc5pCsEDEpKyBMU+T5Ic0hn\nh0rcx7dylf1x8CXTS7LTIcvlOfMO9+79nLn13nO6Y+29PRyjwy4K4Iyd3i/Aoch3jyKjS5BH6+4b\nln1aIcSTosu+8q5ppBs1Rll6Q9dGf9ixt8ceJ5TjHB3kFYp8H6Ql8rZx+YeatdqpwHJyXajEWNJg\nRoywTbCHrVsDJOhGx1ZapVJWmMYaQUxKpTLnK1XPs+GTdLC9fO9I80HTFnvfqlJ3Mxqd89ErU8Zq\nT5iY975Q2f07bJSS59AkRb4P0grXuCeO3HFxa1jozRU2Os+C1sMw/vqzaZyVpkGVqEEeUtj+aY1u\nyGjRbrcD8tWtidPeC4R5BVgXKola2BRWgRtH5P2jWXcIKK9Q5PskjQmYsKFuULyw1Wr5KhPdyxXo\nFx/rlTKWhJeTZ8Mn6aKzByutNolkhbhLb+iOFTdcY71Xr684MoDyP2qlyOeIoMyWKEbkrvhzejhN\nccfN5wRY6Mbng1I143aAIhaRkGgEVb+6bfmg1GqLkW2ilxPlPb5Sk92QZpRz9HJKopx/VLJsKPID\nkvSPbWcHBC8tHIauUOW++6zKwVMCTIn3Bh/e/6HfDjBKhk+SIQmvVxeaCcrecWIsqzEjRrgyPIfe\nyziNPCnyA5BH79Wu+rMLSezUy2jruY9TByCDoQsxhq+Q6ncYvH2oVlv0fUa3Yql97Pj2mse+mxZx\nRX4Y68mPDFtbl8y13xsA3OuuZ0Wz+RRu3LDXpK9UzuHixW2cP3/Rt5478GUY63QHHQPdY/Au9iSc\nDgzbeh537gBnzhj3POiFrg/98IdPw7DNXRj3OgBef/2NRFvrvS9Cs7nNteUt4lwRknwgh558Xj1e\nXdhE11bnBGyUYxDiJSztN0oFqT4t0bvUwYO+xcjc5+aieGGA4Zr+GaUhny4FzSpYoqCTQQgq4HNm\nhAXZl3+C1p8aqdShwLkf+xaAywNl7hQZivyAjJJARsmC0N0QuZ/jkvFiEIfHXZnqT410xuNHybHK\nCxT5jOlXIAcR1l75xtbQO2rqJjsdERncJqOEXvIaIs0zFPkM6UcgBy3GiFI56LzJQq8OxE5HksJO\nHw4OvdDe4hNX5JldkyBxs3M6nQ7OnLEzZ4BzALaxv/985Kwe7znv3QOA3wSwaO7xCQC/3ff/REi/\nrK+v97RhZn6lD0U+Q7wCbXAJwMaARz4KI10NAJ4E8O8BHI3Ugdjpxo9Op+NIPXxqqKmHTH0cAnHc\n/iQfYLgmkaVOo4RrqtUaJ16JFs7BjB5gTD5b4gikTqD7uWmBNwWNnZZEhTHx0SOuyDNckzBR4pDO\nfd1D1W/0NVT1nvMDH/gAh7+EEACAMi4MGZxYKcnq3IQQA+/kf6VyDjs7dAzyjFIKIqIi70+RJ2S8\nyXLilcSHIk8IIQUmrshPpNkYQggh2UKRJ4SQAkORJ4SQAkORJ6QgdDodrK1tYm1tE51OJ+vmkJzA\niVdCCgBTIccHZtcQMoasrW1ib28D9jpI21hd3cWVKy9l2SySAsyuIYQQ0oXLGhBSALh6KAmC4RpC\nCgIrV8cDxuQJIaTAMCZPCCGkC0WeEEIKDEWeEEIKDEWeEEIKDEWeEEIKDEWeEEIKDEWeEEIKDEWe\nEEIKDEWeEEIKDEWeEEIKDEWeEEIKDEWeEEIKDEWeEEIKDEWeEEIKTGoir5R6Qil1Syn1faXUubTO\nQwghJJhU1pNXSh0A8L8APA7gLwD8DwAfFpE/c+zD9eQJISQmeVlP/oMAfiAir4nIzwC8COAXUjoX\nIYSQANIS+XcA+LHj9RvmNkIIIUMkrRt5R4rDPPvss93np0+fxunTp1NqDiGEjCbXrl3DtWvX+v58\nWjH5JQDPisgT5uvzAO6JyPOOfRiTJ4SQmOQlJv8ygHcrpR5VSpUB/DKA3ZTORQghJIBUwjUi8rZS\n6tcBdAAcAPCCM7OGEELIcEglXBPpxAzXEEJIbPISriGEEJIDKPKEEFJgKPKEEFJgKPKEEFJgKPKE\nEFJgKPKEEFJgKPKEEFJgKPKEEFJgKPKEEFJgKPKEEFJgKPKEEFJgKPKEEFJgKPKEEFJgKPKEkETp\ndDpYW9vE2tomOp1O1s0Ze7jUMCEkMTqdDs6caWB/37gJXKVyDjs721hfX8+4ZcUh7lLDFHlCSGKs\nrW1ib28DQMPcso3V1V1cufJSls0qFFxPnhBCSJdUbv9HCBlPms2ncONGA/v7xutK5Ryaze1sGzXm\nMFxDCEmUTqeDra1LAAzRZzw+WRiTJ4SQAsOYPEkEpsERUgwo8sSHlQa3t7eBvb0NnDnToNATH3QE\nRgOKPPGxtXXJzHNuADBynq0YKyGAIfAbGx/rOgIbGx+LJPS8MAwfijwhJDbnz1/E3bu/A8sRuHv3\nd3D+/MXQzwx7hMgLiomIZPIwTk3ySLvdlkrlsACXBbgslcphabfb3fdWV8/K6urZ7jYyflSrNdM+\nxHxclmq1FvqZ1dWzvs+srp5NpX1hNjzqmNoZWWuZJ098rK+vY2dn25EGZ5Sle0vWb9xosGR9THnk\nkSO4c+eTji2fxCOPvDez9nhxhxyB/X1j2zjaKkWeaLE6w9bWpa7Ys+MQi4sXP42NjQ/h7t0vAwDK\n5bdx8eKnQz/DQqlsoMgTLTqvfWFhIeNWkbywvr6O3d0XHaO9Z3te7INGiGnAC4oNi6GIFt1CU/X6\nV3Dr1g+4wmCBKVK1apH+Fydxi6HoyeeEUTDI2dnD2Nn59FA8MTIcnHa3svIYPvvZ3x25OZegvrO+\nvp77tg+FOLO0ST7A7JouecsEaLfbUq8vy8TEody0iSSP1+6M37s5lOwXZxuiZGsF7Ze3vjMMEDO7\nhiKfAIOmFdqpZW0BzgqwJPX6ct/HHqQ97k7TlImJGanXVwrfccYRXUojsDQ0kY8q0GH7DTMtMy9Q\n5IdMP56EV4QNQ20K4PaqWq2WeeymAEsyMTEjrVYr8fY4GcdOM67ofuuJiZmhecVRbS1sv3G0V4r8\nkNEZWb2+EuhJ60S41Wo5Opd9nKmpYwIsCGCHTSYmDoV2vEGNfhw7zbgSZItJFLtFGU3GF3n9SJfh\nGop8qsT1hoIMu15f0QydD5nD5+iiO6hIj2OnGSe84hsW604m5GfbkO7cUcM15fK0ALPdfcvludAq\n7CJXZlPkh0zcyasgEfYeBzgowKbpuUQXbV17eoV4dMeIIgRktEgiBh6FoNFtFOEPQucEBfWDojsq\nFPkMcBpqvb4caoxGaEaftWKHbZbMC8WDApwSYMZ8fVnK5eluOCjI82o0Gq7jDGLkScw5kHyQRAw8\nCHcf8Auybq2btMKIRQ85UuQzptfiXmETqW7jbLuGp8CDMj//qJTLc1pvKM5oIo4Ix+0wRfeiRhl7\ngv+s+WiGiHzv/Sy8v3mpdNB0UGzbrdUWBxLeOHZFkafIp06QiPYyPvf7/n2DvCFdpwxKhYsrwnE7\nTNE72CjTarV84qsL5UXZzz969drfpGmDS6aDclw7uozjcMTJqS+yo0GRzzG9BNBtnP4J1yCRNzqZ\n0+ufFaWmtEaetmdOkc8vUexvdfVsz9CK1yaUmhSg6rI/IyvM+nxTjDkme/9a7ZTU68vakamXYdeK\n5B2KfM5wGpud9x48+WSlsNVqJ0Spade+QZ83hsJLphfVFuCy1GqnIo4mmlKt1kI7Q5wO451XKJoX\nNcqEiXy4g+G2Ef9x/A4JcDLgfWcYsnfmWNG98n6gyOeIKHnIQfsExe51mS/OiVyjoCo4huqtaHUO\nywftQEGZPUX2qkaJsDmh4Pkgv434kwt0Il91fL7qeF8XkrTz3ycn57vOTr2+IgcOzJhtWRRgRZw5\n8uMKRT5HRAld6PaJk4mg+7xSVanXlwPzhq0LzaAZD1HaEpQ6R5InysU0KLsraISnsxHvb1ouT0up\nZNeGALNSKj0gtdqiIwnA6b1b8ftlMWL3zlDjQQGOindOwBkO6lUQWHQo8jkiSgx0auq4b59K5WFt\nx4p2jqbYRVTN0DBPkCjX6ytSrdakXl8eOD866QsJ0RM1rBG1TsNK1Q36/bwXFMO7d4cM3bbdFmBJ\nlLrfI+AHfcc3RD58pDDONkSRzxHR0yndk6b33Vf1bQtasCws/GKFbsI6qrtjz5npb/Z5y+XpyFkQ\nuv+3V90ASYagC3aU/ZxxeWsi354QjRbSM467KUDNfGyK4X07s242xaj5WDJFXy/gwEOJiHxRw4QU\n+ZwRLZ2ybRpyzSHK/jxlu7x7SYyJrUmZmjomtdoJqdUW5cCBOU2HOSqlkrfT+Du2naXj/fySNuQS\ntMZJ+EWI4Zq00In3xMSM77uO8nv0MznfaDTEH2I57timc0Da5vZpz+c2xVsj4gzXRC3IG7TyO69Q\n5DNmkDJtYEEmJmakVjtlirnbqN2pktaE1kkBrCGwzityVs/anabRaPjaqW/TknYkEGe1wqJ6VHki\nzgR8r9+jnzRYnY0YXrtuwtVpm5aonxQjzGgV8RmOz4EDczI5OS+12qK20juIqBe9USQXIg/gWQBv\nAHjFfDyh2SfVLyILonqttkfu9FYecKVMlstzrsnTdrvt8Mjb4lyW2PCEmprts2IPi5tijBSMkYFO\npI2LiDvfWanJQA+fIZh8oYuL6+LnOqKk+oahm0dyi7Z+vsY5IqzVTsW+uAShE3lgqRB2mheRvwDg\nH/fYJ71vISOiekD2fnbqWKVyJPCz9sXjpLlP2M0erNDPrKODieNzwSJtx1WPixEXPdqdfM36DkKk\nN+HpuG7B7iXqYUsOWw6BMTm/YlbIlsUfrtkUe5ls92hSlyHTT2jPnkdY8TlFcVKLR4k8iXyzxz7p\nfQsZEV/krf2ajs5gf3Zq6rgnC6YtwJzowzLOuKaRwuasJjQ6WDNUpMPK2Qf19Mhw8Hrt+gnZ5b4v\n2kGjUDv18agYI8aWacPHTOdkxhT9JQHc8XFdem+UUYe3YtZybCx7HLQwL69hxjyJ/GsAbgJ4AcC0\nZp9Uv4gsiBOucXcya5VJfwjGn6HSFqNk3FkmPi2NRkNqtVNy4MCcTEwclFrthDQajW6usxWDr9eX\npVY75ThneEplWGfPYwcgbqKmtUYNvxnH8zoZ1mt3uLBUmjFz5W2B9y7KF9d7D6/MtUe5VmZRv3aa\n54SBoYk8gD0Ar2oeGwAeAqDMRwvAC5rPy4ULF7qPq1evDuHrSZ+oRuXOallxdBKrEy07PK/g7BZr\nmFqvL/sKUowJ2abrM14PzpkL38+EG8k3+rTWFd/vHHUiPVzkLSdkScrlw67kAa+tuY8X3ebc++vC\nlme7/88gopynvnD16lWXVubCk3edAHgUwKua7el9KyOGMQT2DjvbLuPSDWndw9WgzBo7Dh934ak8\neS+kf6KktUa97Z8uXFMqHXQ5GFbSQH+hy8vd2LquLf7UY3c9iT0iHiz+nieR95ILkQcw73j+NICv\na/ZJ8WvIL0GefqvVkqmp42au+32BQhs8XA2ajLVFXldd670fLcMw40Gv3znsfe/Eq9NurJFl1Epn\nXaVt2MqURpaZc8Q6KUodMhMXFsSbWTTI95NXhycvIv9vAXzbjMl/E8BhzT6pfhF5JMhwvNtLpRmZ\nnJzvppg5CR6u6rwaO1wDzEqtdsJX4arLxyfjg07M4whc8IR89MXv9KHL4NGmsbSxtVa9MXeVxhpJ\neXV4ciHykU48hiIfNAQMyunVGWvYcLVcnpZa7ZRUqzWZnJwX+x6x7qpZu0P5h9RBa+SQ4hEk5lFD\nFb1Ta3tXynrpde6w/Pe8inLSxBX5EkhOeRhAA/v7wNbWJayvrwMAms2ncOOGsR0AyuW38b73fQ2z\nszNoNl/E+vo6Op0Ozp+/iJs3r+Hevb8PYBGVyjk0m9uec/h//ps3v4NOpwPAOK91Tuv8ZLh0Op3U\nfoetrUvY338eQAMAurYWtT1vvXXb9fl79wDgy5HO7f2/rPa89dZtlMu/hbt3jf30dutmYuL7aDaf\nxfr6Ou1UR5wrQpIPjKEnHzVco5t49R5HN8TW5Q47sxp08U9nla01YWUPffX3oiXDIe24cJDXbJ/X\n/ft7EwQl68nyAAAIX0lEQVQM23Hn19tZOsHhml5xeN3N6sPse9xsEwzX5JugIaVz4qpUeqAbc7RW\ngQz7fK/c4VrtlIjoO7UR1nGXwtsLpLmH4nEFZlyGz2mRdoZH2EVEt+687mbcznoN55pIYROv/v9L\nv8qks0845450F4FxgiI/IoSJvdurmes5IdY7d3i6myKnS1dzTpZNTMyYVYoLAwlMnrMTRoVhpPEF\n2aE+l/6gxyFoilHJelyAeQEWuktih7U9ish7q3J7jW7HCYr8CBAmgPE6h27iti3uJQ7sNeXDwkX1\n+rJnrQ9rGYT+BCbPecajQlYXyna77Qi7WL+f+2bchn04l8CYNZ2EQ4G5+FHDNUEFW8504F62VORR\nZFyR58RrBgRNePU7aeSdjAXehjEB9jCAbQBvwshkBRYW3oXXX38Ohw7djwcfXMDW1iWsrDyG119/\nE/fuvRfAEQBWO54GsAgg2gQYSZb19XXs7Gw7Jii3hzKxuLV1yZywP+fY+lUA/wKWzRr29Q8drwFg\nF/fufRFbW5dw5cpL3ba/9db/BvCu7v/h/79e7J7XeL0dMAH8lwC2e9pip9PBmTMNs48BN240sLMz\nnO8ul8S5IiT5wBh78mFebhwPyPue5bnobuDQaDQCcpiDbubgL5SKA8M1o4ttn/YyG0YILyjE4r7p\nTRJV1H5v3730drT2F3MUCYZr8k8vw+9VbRhFeI0K2mNSKj1k3nDBmRPfK4Zv3Mxh0KyFIg+Zi4zO\nPv2Ow/1m5an39pVuuxlEcPu1H4o8RT4X6NaiSVIMvR3VqBI8aYr6Sg+R93tkcf8nivpo4/0tDeF0\n35IyaPkCa/5HJBvBLfookiKfQ3p55mkYZPhCTs5Js+BwDbNpikESF9+41dpBNSDDnDwuqsNBkc8Z\nvYw8LU+nV1plrbboGknU6yvaNeb7O99wPDbSm6RENnohn99JKLLgZgFFPmf4xc+9nocuVSyJ9WN6\nFUgFrUXSb2ekyOeTJH+XsNoOI2yzJMxlTx+KfM4IC5sYVYQnxLt6pFVQMihBpeBxMhyiij7DNflk\nWBdf/v7DgyKfM3p51PYSAu7VIqMcN47X3c/+/d5UmcPy/DBM8Q1acoM2kSwU+RxiGbouE6GfdbCj\ndNxBOxfDL8UhK6Gld58OFPkcEzZ5FacT9hLgJDqXLmWOIk/iQEchHeKKPJc1GCJhZerDWCc8zjlW\nVh7D3t4/A/Alc8snsLLy24m1kRAyHCjyQyaJGxt416pJY12Z69e/BUPgG45tu3jmmURPQwrMMOyU\n9IYiP4L0WriKnYvkgawWWCNulBHiyeDESklW5x4HBr1tnHclv0rlHHZ2jAsFbwtISHYopSAiKvL+\nFPnxIo746+7DqRN+Cj0hw4MiTwIJ8s6jivTa2ib29jZgx+m3sbq6iytXXkqnwYQQH3FFnjH5MSLp\nm5UQQvIPRZ5EhhO6hIweDNeMEYOGa6xjcOKVkOxgTJ6EQpEmWUHbSwaKPCEkdyQxiiQGFHlCSO5g\nZlZyxBX5iTQbQwghJFuYXUMISR1mZmUHwzWEkKHAiddkYEyeEEIKDGPyhBBCulDkCSGkwFDkCSGk\nwFDkCSGkwFDkCSGkwFDkCSGkwFDkCSGkwFDkCSGkwFDkCSGkwFDkCSGkwFDkCSGkwFDkCSGkwFDk\nCSGkwFDkCSGkwPQt8kqpX1RK/U+l1P9TSj3mee+8Uur7SqlbSqm1wZtJCCGkHwbx5F8FcAbAf3Ju\nVEqdAPDLAE4AeALAv1JKccSQMteuXcu6CYWC32ey8PvMjr7FV0Ruicj3NG/9AoBviMjPROQ1AD8A\n8MF+z0OiwU6ULPw+k4XfZ3ak4WE/DOANx+s3ALwjhfMQQgjpQeiNvJVSewCOaN76JyLyBzHOw/v8\nEUJIBgx8j1el1FUATRH5lvn6UwAgIp8zX7cBXBCR/+75HIWfEEL6IM49XkM9+Rg4T7gL4OtKqS/A\nCNO8G8Afez8Qp5GEEEL6Y5AUyjNKqR8DWALwH5RSfwgAIvJdAL8P4LsA/hDAP5JBhwuEEEL6YuBw\nDSGEkPwy9Px1FlGlh1LqWaXUG0qpV8zHE1m3adRQSj1h2t/3lVLnsm7PqKOUek0p9W3THn1hWxKO\nUuqrSqnbSqlXHduqSqk9pdT3lFJXlFLTYcfIokiJRVTpIQC+ICJ189HOukGjhFLqAIB/CcP+TgD4\nsFLqb2TbqpFHAJw27ZH1MvH5Ggx7dPIpAHsi8h4Af2S+DmToIsoiqtThhHb/fBDAD0TkNRH5GYAX\nYdglGQzaZJ+IyH8G8H88mzcAbJvPtwH83bBj5MlTZhFVMvyGUuqmUuqFXsM44uMdAH7seE0bHBwB\n8B+VUi8rpZ7MujEF4bCI3Daf3wZwOGznpFIoXbCIKj1CvttnAPxrAP/UfP0cgC0AHx9S04oA7S15\nlkXkr5RScwD2lFK3TO+UJICISK+ao1REXkRW+/jYXwA45nh91NxGHET9bpVS/wZAnAsq8dvgMbhH\nlyQmIvJX5t+/VkrtwAiJUeQH47ZS6oiIvKmUmgfwk7Cdsw7XeIuoPqSUKiul3omAIioSjPmDW5yB\nMclNovMygHcrpR5VSpVhJALsZtymkUUpdb9Sasp8/gCANdAmk2AXQMN83gDwzbCdU/Hkw1BKnQHw\nJQCzMIqoXhGRvyMi31VKWUVUb4NFVP3wvFLqFIyww58D+AcZt2ekEJG3lVK/DqAD4ACAF0TkzzJu\n1ihzGMCOUgowtOb3RORKtk0aLZRS3wCwAmDWLD79DIDPAfh9pdTHAbwG4JdCj0EdJYSQ4pJ1uIYQ\nQkiKUOQJIaTAUOQJIaTAUOQJIaTAUOQJIaTAUOQJIaTAUOQJIaTAUOQJIaTA/H+1KVAU92y3RgAA\nAABJRU5ErkJggg==\n", 52 | "text": [ 53 | "" 54 | ] 55 | } 56 | ], 57 | "prompt_number": 173 58 | }, 59 | { 60 | "cell_type": "code", 61 | "collapsed": false, 62 | "input": [ 63 | "def compute_distances(points):\n", 64 | " n = points.shape[0]\n", 65 | " distances = np.zeros((n, n))\n", 66 | " for i in range(n):\n", 67 | " for j in range(n):\n", 68 | " distances[i,j] = np.linalg.norm(points[i,:] - points[j,:])\n", 69 | " return distances\n", 70 | "\n", 71 | "distances = compute_distances(points)\n", 72 | "print distances.shape" 73 | ], 74 | "language": "python", 75 | "metadata": {}, 76 | "outputs": [ 77 | { 78 | "output_type": "stream", 79 | "stream": "stdout", 80 | "text": [ 81 | "(400, 400)\n" 82 | ] 83 | } 84 | ], 85 | "prompt_number": 174 86 | }, 87 | { 88 | "cell_type": "code", 89 | "collapsed": false, 90 | "input": [ 91 | "def cluster(distances, k=3):\n", 92 | "\n", 93 | " m = distances.shape[0] # number of points\n", 94 | "\n", 95 | " # Pick k random medoids.\n", 96 | " curr_medoids = np.array([-1]*k)\n", 97 | " while not len(np.unique(curr_medoids)) == k:\n", 98 | " curr_medoids = np.array([random.randint(0, m - 1) for _ in range(k)])\n", 99 | " old_medoids = np.array([-1]*k) # Doesn't matter what we initialize these to.\n", 100 | " new_medoids = np.array([-1]*k)\n", 101 | "\n", 102 | " # Until the medoids stop updating, do the following:\n", 103 | " while not ((old_medoids == curr_medoids).all()):\n", 104 | " # Assign each point to cluster with closest medoid.\n", 105 | " clusters = assign_points_to_clusters(curr_medoids, distances)\n", 106 | "\n", 107 | " # Update cluster medoids to be lowest cost point.\n", 108 | " for curr_medoid in curr_medoids:\n", 109 | " cluster = np.where(clusters == curr_medoid)[0]\n", 110 | " new_medoids[curr_medoids == curr_medoid] = compute_new_medoid(cluster, distances)\n", 111 | "\n", 112 | " old_medoids[:] = curr_medoids[:]\n", 113 | " curr_medoids[:] = new_medoids[:]\n", 114 | "\n", 115 | " return clusters, curr_medoids\n", 116 | "\n", 117 | "def assign_points_to_clusters(medoids, distances):\n", 118 | " distances_to_medoids = distances[:,medoids]\n", 119 | " clusters = medoids[np.argmin(distances_to_medoids, axis=1)]\n", 120 | " assert (clusters[medoids] == medoids).all()\n", 121 | " return clusters\n", 122 | " \n", 123 | "def compute_new_medoid(cluster, distances):\n", 124 | " mask = np.ones(distances.shape)\n", 125 | " mask[np.ix_(cluster,cluster)] = 0.\n", 126 | " cluster_distances = np.ma.masked_array(data=distances, mask=mask, fill_value=10e9)\n", 127 | " costs = cluster_distances.sum(axis=1)\n", 128 | " return costs.argmin(axis=0, fill_value=10e9)" 129 | ], 130 | "language": "python", 131 | "metadata": {}, 132 | "outputs": [], 133 | "prompt_number": 175 134 | }, 135 | { 136 | "cell_type": "code", 137 | "collapsed": false, 138 | "input": [ 139 | "m = distances.shape[0]\n", 140 | "curr_medoids = np.array([-1]*K)\n", 141 | "while not len(np.unique(curr_medoids)) == K:\n", 142 | " curr_medoids = np.array([random.randint(0, m - 1) for _ in range(K)])\n", 143 | "old_medoids = np.array([-1]*K) # Doesn't matter what we initialize these to.\n", 144 | "new_medoids = np.array([-1]*K)\n", 145 | "print curr_medoids" 146 | ], 147 | "language": "python", 148 | "metadata": {}, 149 | "outputs": [ 150 | { 151 | "output_type": "stream", 152 | "stream": "stdout", 153 | "text": [ 154 | "[397 43 253 115]\n" 155 | ] 156 | } 157 | ], 158 | "prompt_number": 176 159 | }, 160 | { 161 | "cell_type": "code", 162 | "collapsed": false, 163 | "input": [ 164 | "distances_to_medoids = distances[:,curr_medoids]\n", 165 | "print distances_to_medoids\n", 166 | "print distances_to_medoids.shape\n", 167 | "print np.linalg.norm(points[0,:] - points[179,:]) \n", 168 | "print np.linalg.norm(points[1,:] - points[22,:])\n", 169 | "print np.linalg.norm(points[22,:] - points[22,:])\n", 170 | "clusters = curr_medoids[np.argmin(distances_to_medoids, axis=1)]\n", 171 | "print clusters[curr_medoids]\n", 172 | "assert (clusters[curr_medoids] == curr_medoids).all()" 173 | ], 174 | "language": "python", 175 | "metadata": {}, 176 | "outputs": [ 177 | { 178 | "output_type": "stream", 179 | "stream": "stdout", 180 | "text": [ 181 | "[[ 16.82801749 2.45804751 11.2836516 11.10337124]\n", 182 | " [ 15.64709263 1.35559217 10.70764811 9.49203679]\n", 183 | " [ 14.09528204 1.88648451 8.14738005 10.37657413]\n", 184 | " ..., \n", 185 | " [ 0. 14.4147361 8.00811339 11.30889477]\n", 186 | " [ 2.08044493 13.90343909 8.89720264 9.6209678 ]\n", 187 | " [ 3.09463156 12.34087753 8.0780855 8.2247126 ]]\n", 188 | "(400, 4)\n", 189 | "10.6057741324\n", 190 | "1.65313667826\n", 191 | "0.0\n", 192 | "[397 43 253 115]\n" 193 | ] 194 | } 195 | ], 196 | "prompt_number": 177 197 | }, 198 | { 199 | "cell_type": "code", 200 | "collapsed": false, 201 | "input": [ 202 | "colors, medoids = cluster(distances, k=4)" 203 | ], 204 | "language": "python", 205 | "metadata": {}, 206 | "outputs": [], 207 | "prompt_number": 178 208 | }, 209 | { 210 | "cell_type": "code", 211 | "collapsed": false, 212 | "input": [ 213 | "_ = plt.scatter(points[:,0], points[:,1], c=colors)" 214 | ], 215 | "language": "python", 216 | "metadata": {}, 217 | "outputs": [ 218 | { 219 | "metadata": {}, 220 | "output_type": "display_data", 221 | "png": "iVBORw0KGgoAAAANSUhEUgAAAXkAAAEACAYAAABWLgY0AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3Xd0VMXbwPHv3Z5NL5ACAUJJofceCL13RAGR3kUEERBQ\nEBGQJkgTQRRUivReAoTem5DQWwgkgTRSNslmszvvHxvzk5ciSAmE+zlnj+zuvXNnx+TZm5lnZiQh\nBDKZTCbLnRQ5XQGZTCaTvTpykJfJZLJcTA7yMplMlovJQV4mk8lyMTnIy2QyWS4mB3mZTCbLxV44\nyEuStFiSpHuSJJ3/x2sukiQFS5J0RZKknZIkOb3odWQymUz2/F7GnfwvQOP/99pIIFgI4Qvsznou\nk8lkstdMehmToSRJKgRsEkKUynp+CagthLgnSZIHsFcI4f/CF5LJZDLZc3lVffLuQoh7Wf++B7i/\nouvIZDKZ7Cle+cCrsP6pIK+dIJPJZDlA9YrKvSdJkocQIlqSJE/g/v8/QJIkOfDLZDLZfyCEkJ71\n2Fd1J78R6Jr1767A+scdJISQHy/pMXbs2ByvQ256yO0pt+eb+nheLyOFcjlwGPCTJClCkqTuwGSg\ngSRJV4C6Wc9lMplM9pq9cHeNEKLjE96q/6Jly2QymezFyDNec4mgoKCcrkKuIrfnyyW3Z855KXny\n/+nCkiRy6toymUz2tpIkCfEGDLzKZDKZ7A0gB3mZTCbLxeQgL5PJZLmYHORlMpksF5ODvEwmk+Vi\ncpCXyWSyXEwO8jKZTJaLyUFeJpPJcjE5yMtkMlkuJgd5mUwmy8XkIC+TyWS5mBzkZTKZLBeTg7xM\nJpPlYnKQl8lkslxMDvIymUyWi8lBXiaTyXIxOcjLZDJZLiYHeZlMJsvF5CAvk8lkuZgc5GUymSwX\nk4O8TCZ7yK1bt+jfpw8d27Vj1apVOV0d2QtS5XQF3nXbt2/nqwmjMRgMdGj3AWO++BKlUpnT1ZK9\nYwwGA9evX8dsNtOobl2KJyXhYLEwaPt27t+7x8CPP87pKsr+I0kIkTMXliSRU9d+Uxw9epSmrRrx\n3k+VcfDUs2HwaT5o3I3xY7/J6arJ3iEnTpygWcOGaM1mYtPSyGux0MNiASAS2Obuzu3o6OzjTSYT\noaGhqFQqSpQogUIhdwi8TpIkIYSQnvl4OcjnnGHDP+Oyw14ajykHwJ2zcfzZ8SzXL97M4ZrJ3hVC\nCPJ7eFDz/n2KA0nAAqATkA+IAda4uhIVGwtAXFwc9QIDiYmIIFMIAkqXZtvu3djY2OTYZ3jXPG+Q\nl7+Cc5CNzobUOFP285TYdGxsdE88PjExkZUrV7Js2TLi4uJeRxVluVxqaioxcXEUz3ruAHgDJ4Gb\nwFa9nu49e2YfP2zwYGyuX6d3Sgr9DAYSzpzh22+e/JenyWRi69atrFy5ksjIyFfyGRISEggLC8Ng\nMLyS8t92cp98DurTuy8LqixAoTyBvaeO/TOuMG/mgsceGx0dTdWalXH206JUK/hsxBA2rdtChQoV\nkKRn/lKXyR6i1+txcXTkanw8xQADcF+nw8fXl1CLhV7vv8/IUaOyjw87f57iGRlIgAQUTU8n9OzZ\nx5adnp5OvcBAoi5fxgEYAOwMCaFChQovrf6LFi5kyCef4KBWky5JrNu0iVq1ar208nMDOcjnIG9v\nb04cOcnc+XNIvpXMyqUTqVev3mOPHTdhLMVau9B6WkUAto47Ta36NalVM5C1f25Ar9e/zqrLcglJ\nkli1fj2tmzXjqEJBbEYGHw8ezIRJkx57fKmyZQm7dIlCGRlYgKs2NrQuX/6xxy5atIgHYWF0SUtD\nAfwF9O3WjZPnz7+Uul+9epVhgwfTPT0d1/R0rgFtW7YkKiYGtVr9Uq6RG8jdNTksMjIST3cv6gTW\nJSgo6MnHRd/Bu5JL9vNC1fLiXcGFBLsIRn31xX++fmZmJpOnTKJl+2YMHjqI+Ph4wHoXduvWLYxG\n438uW/Z2CAwM5Fp4OEu2bOF0aOgTAzzAtJkzMfn68qOdHfP1evJWrMioMWMee+zt8HA8sgI8WLuB\n7kZFvbR6X7x4EW+1Gtes50UBi8lE9D8GiWVykM9RP/70Iy3fa8amG78yZsZntG7fAktWVsP/F1Sz\nLodnXyM1wYgxxUTI96EUCfSgSu/CHD959D/XoXvvrvy+8yfc3kvjr/S91Ayqztq1a/HI507lwPJ4\neXuwe/fu/1y+7O3g7OxMjRo1KFy48L8ed+zMGXYdOcL+kyfZuXcvOt3jx5Fq1KzJRVtbkgELcFyt\nplq1ai+tzkWLFuWuyURS1vM7gBnImzfvS7tGriCEyJGH9dLvLpPJJHR6nRh79T0xR/QUszK6i0Kl\nPcWOHTsee7zZbBb9B/UTSpVCKFSSqNqtmJiV0V00/qKc6NT1g/9Uh8TERKHTa8X0lI/EHNFTzLb0\nEH7VCwhbexvx2ZEWYo7oKT4JaSqc3RxFYmLii3xcWS6wdetW0bhOHdEoKEhs2rTpmc75euxYoVGp\nhFalEjWrVBFxcXEvtU6Tv/1WOOh0opijo3C0tRUbN258qeW/ibJi5zPHWvlOPoekpqYihAW3IvYA\nKNUK8vo5EZuVqvb/KRQK5v0wn+ioexQvFUDchXR+rBfC5dXxTJ04/T/VwWKxICkkFCrrj4EkSUgq\ncPS0w6eq9W7IN8gTBw89N27c+E/XkOUOO3bsoHO7duhDQrDdu5eu77/Pli1b/vW8r8aNIyU1lZj4\neA4cPYqTkxPTpk4lsHJlWjdtyvkX7J8fMWoUZ8LCWLhhA5evX6dFixYvVF5uJA+85hAHBwcCSvqz\nbdxZ/Bt7se3rM0SciCOxTuJTz3Nzc+PE4VMcPHgQs9lMjRo1sLOz+091cHJyon7D+vzW8RDV+hfh\n+r77pNzOJDkxlfjbKbgUsCPmWhLxd5LIly/ff7qG7PWzWCw8ePAAJyenlzZRae7331M7LY3Sf7+Q\nmsroYcPw8PD412wZtVqdPRA65osvWD5nDjVSU4mXJGofOMCpc+fw8fH5z3UrXLjwv3YzvdOe57b/\nZT54x7trhBAiIiJClKtURqhtlKLNtMrio6W1RN6CLuLXJb88dNy6devEh106i2bNm4l58+YJg8Hw\n0uqQlpYmho0YKvzLFBO2TjqhUEiiqH9h4ZTHXpRpVEw45XEQPy1a8NKuJ3u19u7dK1wdHYVeoxF5\nnJ3FgQMHXkq5rRo3Fi1BjMt6tAbhplAIF71ezJ4165nLcXV0FJ/8o5wqarWYMmXKS6nju4Ln7K6R\nZ7zmsE8/G8xV+wM0HWdNQ7u8O5L9X0Rw9rj1z9hJ301k+g/fkZqWTvkOPsRcSUbzwImjB45ja2v7\n2DINBgPJycm4u7tn59Bv27aN4WOGkZyURMsWrRj/1QS2bt1KSkoKnp6edOvdhd7bgvAq6cLmUadJ\nPKzmq1Hj8PPzo2jRoly8eJFzWXdclStXfj2NI3suDx48oLC3N81SUigKXAW22dtz7uJFFsyfz/Ur\nV6heqxb9Bwx47jv8PXv20K55cwLT0pCA3UAbwBVYqNUSHRODvb39v5bj7uLCewkJ5Ml6vkWj4YNJ\nkxg6dOhz1edd9rwzXuXumhxmNptRqP/3C6fUKLLSGifzV+gZ1vy5FueCenouqYt//XwIIVjYMoQl\nS5YwYMCAR8r7ZuJ4Jn47EY2NmoIFC7Jt4w6io6Pp1PUDOi6pimshezYM2YpvyRV4lHDEwdOGs+tv\n4lvXE+9ybgA0GV+W4Y5/0LRpUyRJYvGvixk2YghFA724fTKGjzp1Z8rEqa+tjd5VFouFeXPmsDc4\nGO9ChRg9dixubm5PPP7y5cs4KRQUzXpeDDgA1KtdG/2dO+Q3Gvl+yxZOHT/O4qVLn1hOcnIyw4cM\n4fjRoxT19eX7OXOoW7cuazZv5quRI7l45gxtMzMpknW8jUpFXFzcU4N8ZGQkkZGRdO3Rgx9nzkRt\nNiMAk1JJhw4dnrk9Vq5cydWrVylSpAgdOnSQ8+GfgXwnn8NOnz5NvUZ1aDq5NHZ5dGwZ/hdOujyo\n8qXj39KDVQOPoHfVMvJMa5y8rHfuG0eepIZtW7768quHytq+fTs9BnVh0IEG2LvbsG3sX6QftyOw\nWhBHjRtoMdHadxp7I4kpFTfwXdyHSJLEyRXXWfvpcSbcfR+FUsGNw/f4o8Mxou/cx2Aw4O6Zh6En\nmuLu50RqgpEppbawZ9s+SpUq9drb613ycb9+bP3tN8qmphKlVhPj6cnZsLAnjsFERERQwteXPunp\n2GNdh+ZHjQZXtZoeBgMSYAS+V6uJjonB0dHxkTKEENSuXp3UM2coZTRyU6XitocH5y9dwtbWlqio\nKIoXK0Yrg4FCwBngtLs7NyIiSElJ4dtvvuH2jRvUrleP/gMHolAomDJ5MhO+/hontZp7ycnYA3qs\nee1hSiUdevdm9vz5T20LIQQd2rTh8I4dJKSnYwYUKhXL/vyTVq1acfjwYRISEqhcuTLu7u7/vdHf\nAvLaNW+Z8uXLs3n9Vh5stuPqPCMdW37EnahweqyvTWDfAIo3yY/eUcOG4SdITTBy+1QsJ5fcpH69\n+o+UdeLECUq/lx8HDz2SJFFzoB+nT57BztaOpMj/TWp6cDcVtV6V3ZWTv4wrIhNm19zFyl7HWNx6\nPz/O+QmAmJgY9I463P2cANA7a/Eq7sqdO3deQ+u8u0wmEz/9/DMdUlMpAzQ2mdDEx7N9+/YnnuPt\n7c0Xo0fzq17PBnt7ftXr6dipE7ZKJX9HBBWglCRMJtNjy7h79y7nzp6lmdFIIaBOZiaKpCSOHTsG\ngKenJ6vWr2eLkxMTFAouFCjA9t27MZlMVKtYkQNz52LasIEZX3zBx/37c+bMGb775ht6p6cTmJyM\nEyCArkBtoLvZzOLFi4mJicFgMLBs2TIWLVpEeHj4Q/WaN28e2zduJCU9nWbAKKBLZibdOnemYd26\nvN+kCV98+CEBRYuyYcMGEhISXqD1cxe5uyaH/fzLzyz+bREatZqO7T5k+Befo7AFhcr6a/nR77X5\n1nctl7ZHMcpzOY7ODsyaNofq1as/UlbBggVZtiges8mCUq3g6t4o8hfIR/fu3ZldeRYreh3ByceG\ng7OvIAklsTeScPDQE/xNKG1ataVl09bExsZSc0hNSpQoAUC+fPlQSRpOrbxBhfcLc+vYfW6fuS/f\nxb9iFosFhHjoF1SNdYby03wxZgyNmjbl8uXLBAQE4OPjQ4mtWzloMFDQbOasVkvlSpVwdXV97Plq\ntRqzENY7ZawB2SQEKtX/alK/fn3ux8djNBqzJ0Jt2LABERNDk6x1bfwMBmb8/DNVqlenoFKJAxCV\nVaYd/ws8OqzdPZGRkXRs3x6iorAVguEKBTv27KFSpUocO3aMEUOH4iAEKUDJrHPzAXmFIOzoUXoZ\njaQCvwAd27QBtZqPP/6Y76ZNk9d2ep5R2pf5QM6uEQt/Xii8iuUR/bc0FN1X1BF6J52o0cdfeFdw\nE0GDS4ihh5uLWgOKC/9SvsJoNAohhLhx44Y4fPiwiI+Pf6S8yMhI4VeymMhT2EmUrF9IuOR1FseO\nHRNCCBETEyMmfDtBDBv+mQgODhaVqlYUemet0DloRK26gSI5OfmJ9Tx16pTIV9BT2DnqhYOz/Tsx\n4eRN8F7r1qKkjY3oCqKBQiE8XF1FTEzMU89Zvny5eK9VK9G7e3dx/fp1IYQQN2/eFC0aNRKlfX1F\nr27dRFJS0r9e10+vF21AlNdqRYVSpURGRsZTz/ntt9+En06XnTUzBoRGpRIHDx4ULnq9+BREXxB2\nIGxANAUxGEQgiOJFi4rxX38tymk0Yuw/snd8CxYU77dtK8qWLCmqgLAFoQYxMOuY4SDs1WpRTpLE\nOBBFQdQCMTbrPS9bW7Fu3brna/S3AM+ZXSMH+ReQkZEhho0YKnz8CopSFYo/8yzAv1UJrCAG7mgk\n5oieYo7oKdrPqioCGuUTk2M6i4qdigi7PFqhVCuEWqsWg4cOEl+OGyMc3exFsUoFhEveh9Pj4uLi\nRMEi3iKwb3FR++MA4ZjXXkycPPGRa5pMJlHE30d4lXQWA3c2Fp0W1RT2zrYiNDRUCCHEosWLRIEi\n+UVeLzcxaMjH2b/cFotFxMbGiszMzBdoMdnzSE9PF599+qmoWKqUaN20qbh27dpTj/9h1izhodeL\nViCCFArh5ugobt++/dzXNZlM4rtJk0S75s3FyOHDn3oDIIQQSUlJwq9wYaEDUQ9EdxAlNBrRskkT\nYTabRb/evYUahB6EBMJeoxFaEBoQZQICxJ07d8SAvn1Fw3+kVvbP+jJoCaKqJAk9iK4gnLLO8wHh\nYmMj3m/fXrjr9eKzrC+Bof8oozaIMWPGPPfnf9M9b5CXu2tewMjRw9l+aj0frCrPgzsGPurWmW0b\nd1KlSpVnOl+lUpOR+r8/vzMMmUSdT8QQl46khCI1Pei+og4ZhkzmN1hN7K0kRl1shX0eG8K2RtD+\ng3ZERUQjSRLLly/Ho5It7/9oXRukep94ZjaewRcjHl68bPXq1UTdi2TIwWZ4FncGIPrCA8qVL0vv\n3r1ZvWklH62qiV0eHX/22sSX42yY/O0UJEl64p/4sldDq9Uy7fvvn/n4qRMn0io1FU8Ai4U0g4Hf\nfvuNUf9YKvhZqFQqho8c+dj3hBAcPnyYiIgIypUrh5+fHwsXLkQXGUlvrKmV5wDJxoZJ/fvj5uRE\ncnIyWqwbkYQA+owMmmDdkGTtrVvcu3ePeg0bsva33yiemooe2IN1YLa89aLEA0eA6sAhnY5W/frR\nuXNnKlasyPixY5nw7bcIs5nDQGMgE7hra0uRIkV418lB/gWsWrOKrluq4uHvRL5SLlTtF8P6jeuf\nGuSjoqIYNHQgly5fxNnBhTX9T5J8L530JBP7Z1yhY4cPWVB3NYb0FPpvbYBKo0SlUVKlT2F2TwnF\nPo91B57iTfLzc2wIqamp2NrakpaWht7tf+lkdm460tMeXUHy3r17KDUKTGnm7NeMBhNeZZ349Y/F\nNP6qLIUqW7OYm08py4Ye65n87ZSX1WSyVyjTbH7oF1olxL/24T+vgX37snbZMrwUCm5kZtLxo4+4\nfv06LunpuAIdgHhghVLJRx070sZgoCAQBqwA0oGhgA1QAChuNhMSEkKPHj0o6OfHgjNn+Psns/8/\nruug1aKvVIm8BQuysksXGjVqhMFg4NChQxw9fBgvnY586emcN5u5rtWiVKmoFBhIly5dXurnfxvJ\n2TUvQG+rJykqNft5SpQRO9snLzFgNBqp06A2Bp9bNF3gi7a8AQc7J6RD+XC64k9I8D7mzZ1PVMQ9\n3FzduHbgHmC9e7q2L5oHUSnsmxsGwLn14Ti5OWWvI9+8eXPOrAjn5PLrRJyJZWWPo3R4/9H84xo1\namBJFyzusIcjv1xh81enOPH7de78FQ+S4P6V/y2rEHsjOTvNLjo6mlbtW+DjW4AGTevJa9m8gXr2\n7s0WvZ7rwCkgTKd75hz0Z3H06FHWLFtGD4OBVsnJdEpLY9GCBYTv28cx4D7WFM39Wi2lS5fGQ6Wi\nYNa5JbAGGyVwL+s1AcSrVDg5OdGgdm0IC6MDUEGtxtHRkW06HbeAE5LEDY2G3/74gzoNGtCzSxcc\nbW3xdHOjQ6NGHNi1iw8NBhqazfQFksxmlq5bx/otW1AqlS/t87+t5Dv5F/DNV9/Sv3Mfqg8qSmJE\nGtd3xNPreK8nHn/u3DmMCgPNvg1EkiQKVHRj4sZNjB7xVXY2C8DSpUvBNpNdU84RtjUCS6YFY0om\ng3Y1YVbQVg7Mv0RSVCpahZ6MjAy0Wi1eXl6sWbmOMeNHcST+Do0btiCwWi1mzZpFhQoVqFmzJgAV\nK1Zk6ODPmThpAqsHH8HGSYtCpWD4iVbonTVMLr+etMQMHD31HPrpMmNHfU1mZiYNmtQlXyMdH35T\nmbDNd6jToDZhf13Ezs6Omzdvcu/ePfz9/XFycnrl7S57vK8nTMDBwYE1K1bg6OzMzilT8Pf3f2nl\n37lzB0+FAm3Wc3esAaSVycQeYJFCAQoFzRs25IuvvqJBYCCpWHPiE4CUrH8vwxr044FYk4mAgADu\n3LhB34wMFEAhk4nbZjM1OnTg7MmTuLu7EzJzJtevX2fIgAG0TU1lJ+AP5AUOYs08Amvmjp1GQ9Gi\nReUNxrPIrfAC3mv/HmtXbKBYfCB13Ntz6tiZp07E0Gq1pCUbsWRaJ4FlZljISLUG6X8KvXCeUu/l\no+KHRUhPzqDesFIMPdScQlXyIiyCzDQzaQ9MpJtSWbNmDRWqlMMtrxtNmjamZrVALpy9TGxsDMO+\nGcSGq4tp17kV076fll3+0CFDcXR24sOfA/lgfnXyl3HBI8AJBw89X5xtQ+im26TEptN6SiWmzpzM\n+vXriUm4T/NJ5fAIcKLe5yXRuik4e/YsY8aOplzlMnQZ1IGi/kU4cuTIq2ls2b9SKBR8PnIkR8+e\nZUdICJUqVcp+z2KxEBYWxtmzZx+bI79z506CqlenhK8vn332GampqY8cU65cOW5lZvL3th+nsXa7\n6IFqgHuePKRnZLBm40a8vb2xAPOwBvUFQEOsXTWVgWigFJBuMvFx375kCsHfUyMFkCkEg4cO5XRY\nGNv27KF06dJs3byZsqmp5AceAAGAF9a+/VCsXUGHFQqc3Nzw9vZ+scbMRV7pnbwkSbewTrwzAyYh\nRK5b9KRWrVr/uqekyWQiLi6OgIAAypQox+I2+wlo4UHo6rvUrFHrkcGhQgV8WDjhR9KSrF8Ip1fe\noHiT/GwceQK1TknzbytQrp0PR5dcoWe/bth72FCgsgvxt1L4cfE8Nm5aT1xyDKMutUZjo6LuyOKM\n8R3Fd1Mmk56aRqs2rVj75zq69uzC7RsRaO3VJESk4OxtR0KEAYVKwQfza6DRq0iKTmPf/n2kJRvJ\nMGSitVNjNllIiUvj0qVLzF0wm7pfBFCkhjtJ99J4r2M77tx6NRs2y/6b9PR0mjdsyLlTp1ArFLjm\ny8eegwezl0gICQmhfatWmNPT8QJWzJjBn7//zrlLl3B2ds4up0iRIixaupTuH31EpsmEMJtpKwQZ\nwHaNBu9Chdi7dy916tThjz/+oJjFQjmsg7EVgb9HqkoCl7EG+e3ArWvXKF6qFOtCQ/FNS+OaTofW\n3p7O7dvj7uHB9NmzKVu2LC5ubsQqFGCxkBfrdoJBQHtglSSRqVRStmRJdq5Z81Be/7vuVd/JCyBI\nCFEuNwb4Z7F+w3pc87rgX8oXb5/8jB31NQ1LtCV4/AUu7rvN3j372LRp00PnHD5xkOJN8jM9+SMm\nRnci4nQcn2p+5eCCS5gzBSd+u4Yh3khKjJEige58eak9Q/Y3p3ofP/Quagq00ONYQIfGxvqD7pzf\nFoVWosOSioy53oZLySf5fcVSbl27TUaGiW/HT2Za+a3MDdzDrNpbqftZSTR667nJkUa8vLxo06YN\nPzbcw+7p51nYLISyJSqw7M/fUdgIosMesKj9HuJuJBN99z7p6emvvZ1lTzZl8mTunTxJv9RUeqek\nYHfjBkMHDcp+/6e5c7FPT6cG8CHQG3CLiWHyxImPlNW+fXseJCdz9949vpsxg622tkyVJKIyM0k/\nc4aOzZvz6ccfYzQa0Vos5Af8gLtYM14EcB7rJKg/ABesY1VqtZo6PXpgadoUqUgRbB48oOq1a+gP\nHqRuYCDh4eHkzZuXyxYLqwAH4DAwDVin09Hxo49Iz8jg2Jkz8rLD/8/r+Lp7Z6ebRURE0L1XV/rt\nrEvBSnk4u/YWdRoGoVCC1k7NkEPNQUDXZl04efR09h398RPH6bCyLAqlAlsXLeU6+LD3+zC6/h6E\nT7W8BE/+i5/b78a1sD0lmxdAobA2cYkm3pxdfYugT0pw5OcrhG65jV89L/bPvYRGr8K/nhcKpYKm\nE0vza9MdgDVdbujgobRp2Ybw8HAuX7nMqLEjkCSJiONxXN0XxXHjWOxdbHGwdyA52I6+LbtQtWpV\nGrWqz6hLbdDZa0iISOGbgDV45nd/4nZwspxx/swZiqal8fcQpL/JxNlz57LfV6lUpGPdgxWsv7AF\nhOD2EwbXlUolLi4uDP70U+rVr0+5UqXoIQQpGRmUzshg8U8/sWztWr7TasmbmYkHcEShYKYkoVer\nSTQasc3qnvEEWgjB7aNHWXXlCmGXL+Pl4cGnJhM2WXWKMpkoV7o0aQYDFqz97/ZAF+BnoGlgIHdv\n32bJkiV07dpVnuH6/7yOO/ldkiSdlCSp9yu+1hsnNDSUAuXyUrCSNSWxbNtCaG2VdFseRIMRpZnT\nYBv2eXXYumvo1a8HPy6Yj8VioWCBAlzba92M2GIRhG68TeGa7pRqUQA7Nx2tplTm1vEY/lp/i8OL\nrmBMMWGxCA7Mu4h3eVccPfV0WxbE4vdDGGb/Oyfn3cGnkgcKpfV/971Libi4ujxUVx8fH4KCgujb\npy/rVm7E6055ru27R7Xevti5a/nglyo0mRXAuUtncfdwJz4+Hi9/V3T2GgCcve3Q2KiY/f3c19jC\nsmdRunx5rtnYYMbab3pMqcTB2Zm7d+8C8PGQIaQplRzCeredBpzRagmsVw+w9ucvX76cCRMmZO8G\nZTKZ+HTQIKpVqIASWAzsADYCGSYTPTp3pmzp0kSWLctpX18qBQaiUKtxVCrRabX4V65MsiTRDmsg\nr2Gx4GI0sn//ftRZXzp/izEaEUlJ2JjNOGHtpjkErMMa8C3BwdiHhDBq4EBm//DDq27Ot86rvpOv\nIYSIkiQpDxAsSdIlIcSBv98cN25c9oFBQUEEBQW94uq8OteuXWPQ0AGE3w6nSuVqzJr2A97e3twN\njSUlNh07Nx3Rlx5gSjdTrLYnJZsW4PTKG/xQbxveZV3xaGRmxi8TOXv+LHNn/khQ/Vpc2RJDckwq\n0gMbks1GLGYLCqWChNspSBJ8uLgWG4adZJTHCjQ6NWazmWrdihG2LYLDc6/TulVrVvz+J4mJiVSp\nUYlf2h7AIZ+WMyvCWbNy3WM/R3R0NImJiXjnK4B/UAHuX0mk9ZRKBDTMD0DTSUaWLFvMz/N/5e75\neC7uvINWAZe5AAAgAElEQVRf/XwcXXwFJzsXmjVr9jqbXfYMho8cyYGQEOYdP0660YhOCJTnzlHK\n35/NO3ZQvXp1Nm7fTrdOnZgUG4skSfTr0YOuXbvSr2dPVixbhk1GBkWFYJ5eT7eBAzEajWxbvJhu\nGRmkAH8C9bF2xZwDEpKSuHTkCJKbG8EhIVSvVIne6ek4YB00XXj2LJJCgclsRon1btCINTlhxMiR\nzP/uO8qnphKlVBJlNlMP6+SoU1jvTHtj/VIJB6pmfU7H1FR+mD6dTwYPfp3N+8rt3buXvXv3/ufz\nX9tSw5IkjQVShBDTs56L13XtVy0hIYESZQKoNtiHokHuHJxzBW1kXnZvD+HLcaP5afGPeJR04srB\nCFp9V5laAwIQQvBVwZVo7NSMCWuLJEmkJWYw2mMFN6/fQqvVcujQIWxsbFi/aR2//r4Yd39HfKrl\n5cQf12g0uixBg0pwYccdTnwTQ92aDbh8+TI3I26g0ippVK8xY8eMQ6Ox3mknJibSq3cvTpw+jrd3\nPqZ/N/ORzT8mTZ7EhEnj8S7nRuy1JJQ2CrxKO+HfKB81+1hT8Q7Mv4hpXx7WrFjP3r176fTRB9y7\nG0Ox4kVYs2LdQ6mgsjeHxWJhypQpLBo/nk5ZXTcXgL98fFi4ZAltW7Yk02jEIkn8tnw5LVu25IN2\n7Ti5eTNxGRl8jPWO0ADMUatxdXKiRUwMf+eSHQBuABasM1uVwFogRq1m2IQJ/DhxIm0TEzmGNZhf\nVatJN5txtlioCNySJISvLyfOnkWr1bJs2TJ2bN7MubAwos6f5++dEwQwA+iBNdjPB/6em3sH2JM/\nP9cjIh767CkpKXw3aRLXLl2iSo0aDBo8+K3On39jNg2RJEkPKIUQyZIk2WLNoPr6VV0vJx08eJA8\n/nbU/cwa4N5f6MIo1xUkJCQw4euJtG3VnkuXLjHozMccmHcBS6aFy7sjSY5Jp3Axh7//p7Fr2nky\nMzPxKVqIcuXLsG3TTkJDQ/npp58Ye7U9l3dHEh+egtkkKFLD+ut1+3gsFy9eJCo1HO+yrlzafROV\nQkWTBk2zAzzArNkzOX3jMM3nlSLhtoFGzRpwaN8RihcvDsCixYv4ZvI4Ov8SSNm2hchIz2RcoVVE\nXUjk0u67pD3IACHYN+0K2zZZ+/ODgoKIvB2NyWSSN294wykUCsxmM15GY3bffEFgW3Q0rZo1I19y\nMreyXv+gXTtuRESwbuNG2mRmcoz/BYqLWLtj4mJjSYTsIJ+ANYWxKmTn0VcC1prNeHh4EJ2SwvdY\nA3NhrN095bAOvN4GIlQqfvrmm+zxnM6dO3MpLIzgdeswYu1mUmL9gsjA2k0TDiBJHBMCe+CAXs/w\n/7fDlMlkIqhGDSyXL+NtNDJv+3ZOnTjBb8uXv4RWfTu8yu4ad2Bd1iCICvhDCLHzFV4vx+h0OgwJ\nRut+ipKEMcVEpsmcHWTLly9P+fLluRF+g3FfjmXzmFMohAoHZ1tiriaxa+o5zJmC0ytu8O3djti6\nalnW6yANm9XD2ckZvbMGZ287qnbzBeD8xgjWfnwS5/z2hG4Px6WInuEnW6FQSAQODGBW0Bbm/jiH\nDzt2wc/PD4BFvyzkw/VVyFfK2hcffTGRzt06cvnCFWz0OtQaNeZMC751PQFQqhQoNGDnpsXVx5aD\n8y9hI9mzc+uuh/KvATnAvyWqVKnCDzodFVNTccDaN18iIIBLoaEkAN2xBtPlmZnMmjkTjVqNY2Ym\nscBZrBON9gADgTghWIs1NTIZ67IFCuA6UBrr4O0tQKnVcvTgQbyEoAHWQL0Sa179IWAE1i+DTVrt\nI7n5C+bP532jkZ3Ab1i7a/4CNMB64K5azY8LF7Jx9WoSkpLoXKkSUVFRzJgxg969e2Nvb8+hQ4eI\nvXmTbkYjElAyNZWZa9cyMy7unVmL6ZUNvAohbgohymY9SgohJr2qa+W02rVr46TKw9KOBzkw/yI/\nNd5Lj149HtrB5/Dhw/wwfwbjbr7HtKSPaPJNGVKT02k0qgw3Dt8n5PtQqvf2wz6vDQqlggYjSxMa\nFkpScjJKjYKQWaEcXXKFH+pt5f6VRLo06Uv/piOoUK4C+cu6ZGfYeJZ0JiPVjIevdWDt3Llz+JUs\nxp2bd1nQMphbx2MAuLYvGnOeRMbdbs+A/fVItxhQ2yjZ+0MYQggu7boLSAze15QBWxvx1dX2GE1p\n8ozWt1j9+vX5bMwY5qnVTNVoSPL3Z9HSpRhNJupg3a81L9bc89PHjvHV2LFssLHBDtiANeVRCURk\nHdsZazdNDNZAkgFcARZIEguBo2QN2v7xB5EWC8uwZsMUxLoEggnrJJpzwFWz+ZE9EiRJQgDvYw3s\nF7AuUNYCa8BXKpV07NiRNZs20b5TJ36dP5/jU6eyZPRoqlaogMFgICMjA40kPbxpikJBRkbGy27e\nN5Y84xXrn3RTpk3hw+6d+HbSt8+d563RaNgbvJ/mJTtjc7oon/ccw5yZD2eZHD16lLwB9tw6FkNq\ngpFaAwNIT8lgy6hzXNgWgSE2nSt7IrFYrOMUNw7dR61TUbViNWztbNkzI5TNY05RsVMRag0ozpwf\nZ9OoUSOaNmnG2TW3uHn0PhlpmWwYcQJ7dx1RFxMoXLgwjZs3pPLnXszM6E6baZWZ12QHm744Scz1\nJJpPKo+tixYPfyfqDitJerKJM6tu8YX7Mha23o0kwd/ZaH//N7eMo7yrhn/xBYnJyURERXH6/HlK\nlCiBr78/cf84Jl6SyF+gAJ+PGEHtJk2wUSgYCQzHejd/GFiE9U5dJUnEYx0IHQMUx7pcQTFgCNDD\naCQlNZX2WGe7dsE6OBuO9W5/FtYJURqNBqPx4QX1Bg8Zwnq9nouAkCRcgHKAL9aMHHN6OvVr1yYz\nM5ORn3/Oe6mpBAFt0tMhMpJVq1ZRvXp1jLa27FcquQls1mgoW7YsHh4eAOzatYvhn3/OlClTSEpK\neqlt/aZ456eFCSFo37EdN1NCKdU+H2s2nSCk9S52bt39XGtf2NnZ8dWYrx77XmxsLN/Pno7K3cSR\nxVdY+9kxmn1dHmc3J1S2YJbUuJSw48ahe3xXfj2uhey4duAexmQT0feiaVi9GctW/s6Qg83JV9oF\nY4qJ2KvJLFq0iDFjxhCyfw+z623DZDSjtVWhkjT8uXIlycnJqGyhatdiAJRr58OOr8+hupAPP19B\n1IUE8pW2dt/cOROHyBQMOdgMU6oZtY2SCQFrWPzBHnyquRN+OB5/3+IULVr0sZ9R9vbQarUPLaXx\n67JlBNWoQbzRiFmhINzWlsVjxwJwNzycGhYLWqzplSWBm0BT4HcgvxB4An9vL14f+EsIamMN4kmA\nLWRv+p0PcAIeSBI6lYqeJhNOwJnERFo3bcrV8PDsPPcvxowhj7s7G1atoqy9Pfv37WNjfDxuWP9K\naACcCw1l165dGNLS+HvHWgmwN5tJTk7Gzs6OPQcOUL9WLfZGRiKZTHTy8cFsNvPzokWM/uwzSqem\nkqDVMnv6dHoNGEDlypVp0qTJK2j5nPHOB/kbN25w6PB+xtxsg1qrpEq3Ykzy28S5c+coW7bsC5V9\n//59Pur5Ifv37ad8x0J0XGBdJGzLuNOsG3qC8uXKc+LkCT7Z05QCFdyIvvSAyWXXEXsjCbPJQp0h\nJUh3ucHOGVcQZtDYqri8O5JfOoag0auYFPwtBX0KsmPrTu7du0dSUhKOjo6sWbuaLt07k2pIw4KZ\n+NspuBSwI/WBEcN9EzNWf09sbCzNWjXhZkgsMeGJXD8UhV0eHd/4raHjghpEhSWQkZZJ6KYI7pxK\nIDU2g3Wr5sqLPuVCZcuW5dS5c6xevTq7CyRfvnwA5PX0JALrAOnvWLNn0rDejZuw9r+fz3pdAUQC\nGoWCMIuFklizYQxALNYvgkQgWamkQ8eOXF63DqesdXTKApsiItCoVNja2DBpyhT6DxhA7z596N2n\nDwBXr16luJ8f5YWgBda/FsIliaSkJJo3acL2nTupZTQSA1xSKGjQoAEAvy9dijYhgREAQrBmwwam\nTp7M9KlT6ZCaah08NhpZev8+v48fzwIbG7oNHMjE7757lc3+2rzzQd5oNKKxUaPSWIOXQimhs1W/\nlD67tu+3RlculaJKd4rW9sx+vWigB8d+uo6XR35sHP+iQAXrfZCHvxOeJZ2xy6OjQAU3WkyoCIBb\nUQeW9z7I1MobyDRa6LepAb51vIi++IBPan1M7cDaFCxYEHd3d7Zt28a4yV/Sa0dtHDxs+KPrIaZV\n3ES5NkW4tvceH33YFV9fX3x9fTl++CQffzIQw/37jA9/HxsnLX/02M/vPQ4gMq05+aMvt8XZ244L\n2+/Q+aOORN+9Lwf6XKhIkSKMGDHikden//AD/lu2cFoIqgI1sGbRLMYa+Etj7VNfAOTT6biuUDB9\n2jS+HjOGQyYTSRkZtGvRgt+2bCGfRkNkRgbjx42jQqVKdFq3jnSsSxzcxJoxM9RiIclg4KvPP8fX\nz496WROyAIoVK0bF8uVRnDtHHpOJ81jHB2rWrEnTpk3p36sXfwYH4+riwtoFC7JX4NwbHEz5tLTs\nrJ9yqansDQ4mNStv/28ugJvFQimDgVkzZzJk2DDy5MnzMps5R7zzv62+vr64u3qx7tOT3Dx6n81f\nnEEn7ClTpswLlZuWlsbxwydoNbU8vkGeHJx/kbTEDDLSMtn7QxgZRhO9uvcmJdbIjcPWFbajLz7g\n3sVE7l9Mxj6vTXZZ9nlt0Lto6fp7bXQOanzreAHgEeCEYyEt7T9oR0pKCgDbg7dRbUAR8pVywT6P\nDa2mVsDezoH3Svfllzm/M3ParOxyixUrhtZWTd0RJbB10aFQSFT6sCgO9g58P2U2Jer64OxtHTwu\n3jg/aelpxMfHv1C7yN4uPj4+bNu1i3SsAR2sQdkv6983gbxqNVovL/rMnMmJs2fp378/t+7eZfex\nY4TfvcuKVas4HRrK1GXLOHrmDMOGDycoKIj2XbqwUK9nhaMjy4FmWNMv82DNggnZs+eR+mzavh2X\nunX5w8mJawEBbN+9Gy8vL+zs7Fi6fDm7Dxxgwa+/UrFixexzvAsWJPIfefFRKhX5CxWiRbNmbNPp\niAMuYR3YLYK1e8lOrebBgwcvuTVzxjt/J69SqQjeuptPP/+E4EHn8PcLICR49iPL/z4vjUaDUqUi\nPjyF2oOKc+9yIiPcfkdSSJRuVQAJBQEBASxZvJTeTXvi4K4n+V4a346fRKohlZmTppPX1xEbJw3L\n+xwkcEAARWt5YkozE3E6Fu/ybiREpBAfnoJzlXiGDv+Un+Ytws0lD6cupWTXI/riAzw8PLC1tWXD\n5nWEXQhjQP8B2emdJfxLsWfdesq1L4SkkAhde5fmTVpQpUoVRo+PJjEqFUdPPZd3R6LVaHFxcXnS\nR5blUnXr1qVsmTJc/OsvKmPNorkGGCSJ88WKUbl6dbZNn/7Qz4ZOpyMgICD7eeHChR9aOEySJGbP\nn0/v/v2Jjo5mUN++aG7dAqxdPHE6HXkfs2y3m5sbm7Zvf+R1IQQ9u3Zlw+rVOKnVpKhU7Nq7l1Kl\nSjFx6lSq7tlDbFaKZpKdHSsmT8bBwYGP+/Zl9fbtPEhIoKLFgh3WdXZsnZ0pVKjQizfem+B5NoR9\nmQ/e4I28k5OTxeXLl4XBYHihcmbP/UHkLeAiGn5eRvhUdRfO3rbivbnVRMkGPqLd+22ExWIRQgjx\n4MEDcebMGREbG5t97urVq0WZSqWEQx5bYeuqFTMzuok5oqfo9keQUNsohXcFV2HnphPtZlYR782p\nKjzzu4tjx46J+Ph4UcTPR1Rs6yvqDCwlnNwcRIvWzUTRyvlEm+mVRekmhUW9xnWyN+ROSUkRNYOq\nCc8ieUR+f3dRtlJpERcXJ4QQYtKUicLBxU4UregtnPM4iT179rxQe8jeDnfv3hXLli0TGzZsEOnp\n6UIIIS5evCi88uQRHmq10IFQg8jr5CR27dr1TGWeOHFC1KxcWfgVKiQ+7tdPpKWlPfT+nj17hKNe\nLypqtaKAJAmdQiFqVqsmfvnll4d+L55k9erVooCtrRiVtYl3KxCl/f2z309ISBArVqwQK1euFA8e\nPHjk/FOnToniRYsKvVYrKpct+6+bpucknnMj79e2rMH/96Yua7B23Vq69+yGrbOWtMQMVi5bRcOG\nDf9zeXv37uXw4cO4urpyJzKCazevUr5MRYYMHvKva16HhoZSv0VtXP1tQJIIaJiPY79eRZ/hgjKf\nkaYTyrBq0FHibiaTv7QbcZdTGT38S7p37cGKFSswGAxUqVKFxs0aMvZ2O2wcNJgzLUwttZU/F6+j\nWjXrpt9ms5nQ0FDMZjOlSpV6aHJTREQEd+/exc/P76G1xWW50+nTp2kQFEQBIFkInAoXZv+RI+j1\negwGA1UrVkRz9Sr1zWYigU16PafOnXtoT4S4uDi2bduGJEk0bdqUBw8eUL50aWqnpOAOHNRqKduq\nFX+sXPnQtYODg2nZvDkBGRncwrqcsFqpJMXJicMnTuDj4/PEek+aNImtX35JfbN1h9hUYK5OR0pa\n2ktuoZz3xixr8Da6d+8ePXp3p19wXQpUcOPagWg+aNuBW9dv4+Dg8O8FPMaLLLwmhEBSKOi1rj6H\nfrxEzLUkkiMzWL72F3oP6MXcBjuo2LkI5TsUZu+sMOoNK8no0aPp2b0Xffv2BSA8PBytXoPO3hq4\nlSoFDnn0D80uVCqVTxyD8Pb2lnfZeYcM6NWLwORkymDNhz9x/jwVSpbk1+XLKVOmDJeuXGGUxYIC\n64SkogoFBw8ezA7y4eHhVKtUCbfUVJAkRuj19Bk4kAIGA+WyrtHSaGT6mjX8njVD/G/Xrl2jtFKJ\nAmvmTFMAs5kDCQkMGzyYNRs3PrHeJUuWZK5OR5rBgA1wXpII8PV9BS309nnnB17/6cqVK3j4Omdn\nuxQN9MAujw23svoKX7fixYtT0MuH1f2Ok6eYA5kG8PP1p1q1anTu8CHl2hXmg/k1qD+sFN1X1OHg\ngkvYOumIi/vf1BZvb28KeBdkw7BTRF1IIOT7MBJvpz+yNIFMBhAVGUl+YB/WdWpaCYHvzZs0rleP\n69evo1GrScg61oJ14tM/++JHDx+OX3w87QwG2qWkUPD+fSaMHUu4EPyGdZu+VEBYLI9c29bWlhSF\ngmQg/z9ez2excOf27afWu3nz5nTo0YO5Wi0/2dsT6u7OH6tW/feGyEXkIP8PBQsWJOpyHHG3kgHr\noGVCZDL58+f/lzNfDaVSyc4tu6jgUo/Ls9Op4FyP4K27USqVZJgysPf4XwaOXR4dqQkZ2GhsH6qv\nQqFgx+ZgHO768EfrEzzYqSMkeN9//stElrvVCAzkmEbDWaA11sXEygMl0tJYs2YN02bMYJleT7BK\nxXJbWwqWKfPQxKG7ERF4ZnWZgDVYq7FOXKqIdXngxVi7Yf55Fx8dHY2/vz8WDw8SlUqOYs3HzwBO\n2tgQVL/+U+stSRIzfviBS9evs+PwYa7euoWvfCdv9Twd+C/zwRs68Dpn3mzh6GYvSgQVFo6u9uLX\npb/mdJUe69SpU8Ipj4PovbaeGHG6lShYJY9w83IRFy5cEL//8ZuoXqeqqFm3mli7dm1OV1X2FklI\nSBB1AwOFBkSfrEHMcSCqqFRi4sSJQgghDhw4ICZPniyWLFkiMjIyHjp//LhxwlevFyNBjACRD0SJ\nf5TTAYQTiEZ16mSfM+7LL4WdVivy2dsLd1dXMXjwYFG2VCmhUiiEWqkUHdq2zR4Alj3/wKsc5B/j\n+vXrYufOneLWrVv/6XyLxSLMZvNLrtWjgoODRYVq5YRvySJi2IjPREZGhli+YrlwL+Qq+m1uIHqv\nqydcPB1E3759xdq1a5+5Tvv37xedun4gOnX9QBw8ePAVfwrZm2jGtGnCXa8XLUHUViiEm6OjiIiI\n+NfzTCaT6NWtm1CAUIDIA6LJP4J8WxB57OzE/fv3hRBC7Nu3T+TR68WwrPdbgyhWqJAQQgij0SgH\n98d43iAvZ9e8ZFOnT2H8+PEY0420bNuSJYuWYmtr+9RzDAYDe/bsYe/+EGxs9PTp1YcCBQo88zUP\nHz7Mjp07cHF2Yd3m1RTtp6FcO2smwpFfrhAyPQwbnQ2lC1dk9Yq1T52xGhISQtv3W9NgbAkQEDw+\njA2rN1GrVq1nro8sd/jzzz9Zu2IFTi4ujBg9+qnZLf/fli1b6NS+Pd5mM9dMJupiXcHyoI0Ny9eu\npXHjxgDMmzePpcOG0SQrC8YMfCtJZJhMb/XGHq/S82bXyEH+JVq/fj0Dh/ehz4462OfVsbz7Ucq6\n1mTh/J+feM6MmdMZNXoUkkqgtVNTvJE317bFcvzIyWeajLFi5QoGftqPyj0LE3fVwJU9UTSZUIqa\nfa0TUfbNucCtY/fp/HMgM8pv55cf/qBu3bpPLK9Fu2Y4Nk+mWndrf+ahhZdI3+XK2pUbnq8xZO+8\nsLAwdu3aRWxsLJdDQ0EI+n3yyUM/f8HBwXRt04aPsrJiLgJHvby4lbX/rOxRcgplDtoVspMqfX1w\n87EHoOFXJVjRfvcTjz906BCTZ3zLmEttcPa2Y/eM85z58yZluuRn9rzZTJ8y/V+v+fkXn9FtTSCF\nq1tnBy5qtYfVg4+SabRgzrSwdexp+m1piEqjxN3fiZiYmKeWZzZnotb97w5KbaMiOdP0LB9fJntI\niRIl/nU7yPr169OhWzcWLF6Mq0bDAyHYsnbta6rhu0HOrnmJPPJ6EXX2f2tS3zkbT568eZ94/KlT\npyjZwjt7fZhaAwK4fSoWew8tBkPyM10zJSkF16wvFQDnwnqKBXly91w89y4+IK+fI5HnE7i8J5Kr\n+yKpUqXKU8vr060/m4f/xV/rbnFmzU22jjxH3+4DnnqO7O125MgR/AsXxkGvp27NmkRGRr62a0uS\nxMw5czh29iy/bt7MtfDwf/0ZlT0fubvmJUpMTKRaYBXU+TKxd9dxYetdtm/e8ciG2X/buHEjn47t\nz6DDDdDYqAjbGsHyfodQmtWsXLr6oRX4nuTDbp24nHqKltPKEXM1iZ/a7KLX6roENLSmUW4YeYJd\nU8/j5e3BLwuXZC+/+jSr16xm9oJZKCSJT/oNoU2bNs/XELK3RmRkJCX9/GiQkkJB4LhSyQM/P86E\nhj6U4ih7c8h98jnMYDCwfv16UlNTadCgwVP71YUQfNitEyGHduHobcPNE9F4enkxecJ3vN/h/We+\n3oBP+rFt2zacnB1xcnTBtryRNjMrEh+ewvy6u1n+y6qn9sPL3l1r1qxhfI8etM3aFUkA0zQawiMj\n35k9UN82cpB/ywghOHbsGLGxsVSoUAFPT89/P+kp4uLiaN+xLQf3HkajVfPdd9/x8YBBL6m2stxm\nz549dGvVim4pKSix7uQ0V60mMTn5hVdilb0acpCXAZCRkYFarZb/5JY9lcVioVnDhlw7ehTPtDQu\n63QMHT2aEaNG5XTVZE8gB3mZTPZcMjMzWbZsGREREVSuXPmZxm1kOUcO8jKZTJaLPW+Ql1MoZTKZ\nLBeTg7xMJpPlYnKQl8lkslxMXtZAJsslrly5QkhICE5OTrRu3VpOgZQB8sCrTJYrBAcH817r1vgJ\nQYJCgVOxYuw/cgSdTpfTVZO9ZPLAq0z2DurfsyfNU1NpmpZGJ4OBlCtXWLp0aU5XS/YGkIO8TJYL\nxMbH8/dcaQlwS0/n/v37OVkl2RtCDvIyWS5QKzCQ/RoNmcB94IJOR+3atXO6WrI3gNwnL5PlAgkJ\nCbzfpg0hBw+i12qZNnMmvXv3zulqyV4BecarTPYOM5vNKP6PvfsOj6L6Gjj+ne0lvRBC6BBKIPTe\ne5feQVAQFBURAQsWEFCxoSKoKFVBEPmhUkQENdIEpEkRQm+hd8Kmbfa8f8wS4KWYACEQ7ud59jE7\nO+XOuJyZPbcZDGrMomxMzQylKA8xNS+q8v+pnLyiKEo2poK8oihKNqaCvKIoSjamgryiKEo2poK8\noihKNqaCvKIoSjamgryiKEo2poK8oihKNqaCvKIoSjamgryiKEo2poK8oihKNqaCvKIoSjamgryi\nKEo2lmlBXtO0Jpqm7dA0bZemaS9l1nEURVGUm8uU8eQ1TTMCsUADIA74G+giItuvWkeNJ68oipJB\n98tE3pWA3SKyX0RSgFlAq0w6lqIoinITmRXkI4BDV70/7F2mKIqi3EOZNTNUuvIww4cPT/u7Tp06\n1KlTJ5OKoyiK8mCKiYkhJibmtrfPrJx8FWC4iDTxvn8F8IjIu1eto3LyiqIoGXS/5OTXAZGapuXX\nNM0CdALmZdKxFEVRlJvIlHSNiLg1TXsWWAwYgUlXt6xRFEVR7o1MSdek68AqXaMoipJh90u6RlEU\nRbkPqCCvKIqSjakgryiKko2pIK8oipKNqSCvKIqSjakgryiKko2pIK8oipKNqSCvKIqSjakgryiK\nko2pIK8oipKNqSCvKIqSjakgryiKko2pIK8oipKNqSCvKMpdIyJ8PPZTipatRMlK1Zk7d25WF+mh\nl1nT/ymK8hAa99nnvPrxBFx9PoOEeB59qg8+Pj40atQoq4v20FLjySuKctdEV67B1hZvQun6+oKf\nP6Nz4gZmTp2YtQXLRtR48oqiZBmb3QbxZ9Lea/FncNisWVgiRaVrFEW5a0a9MoQ2XXuQcOIAWuJF\nnL9+xqDlf2Z1sR5qKl2jKMpdtXLlSqZMn4nVYqZ/vycpVqxYVhcpW8loukYFeeWGRIQDBw4gIuTP\nnx9NS/d3SlGUTKRy8sodS0hIoG7dxkRFlaNEifLUqFGPS5cuZXWxFEW5DSrIK9cZPnwka9YcJyHh\nWRISnmXDhvO88sprWV0s5T5z4sQJRo4cxaAhL7FixYqsLo5yEyrIK9dZvXo9iYnFASNgJDGxOGvX\nbsjqYin3kbi4OAoWL8kbf+5hTJyThq3a87///e8/tzt37hzjxo1j9OjR/PPPP5laxsTERGJjYzlz\n5nnydL4AACAASURBVMx/r5yNqSCvXCc6uhgWyx5AAMFi2UNUlKo8U65o0aoNl0o3heemQOc3SBz4\nLQOHvnHLbc6ePUt0hcoMmbOC19eeolq9hixevDhTyrdx40ZyF4ykQoPm5MpXgDEfj82U4zwIVMWr\ncp3z589TvXpdDhw4iaYZyJXLn1WrYggKCuLAgQPExsZSsGBBChcunNVFVbKA2+3GbLND+1eg6wh9\n4eFYfN+sy4XjR2663ejR7zJs6b8kPzdNX7BuIZE/vcHOf9bf1fKJCLkKFOZY+7egVmc4eRDHK1VZ\n/st8ypUrd1ePlRUyWvGq2skr1/H392fjxjWsX78eEaF8+fJYLBamT59O377PYLHkIjn5KCNGvMHg\nwS9kdXGVe0zTNIyaRuovX0Kx6hCaFz5/isrly99yu9NnzpKcM/LKgvBIzp87d9fL53K5OHk0Dmp2\n0heE5kUrVZctW7ZkiyCfUSpdo9yQyWRi5cq/6Nr1caKiyjJu3Hj69OlHQkJ3zp/vTEJCL9544032\n7t2b1UVV7jGj0UjvJ/th8Q+CL5+F1+piP7KdGVMn3XK75s2a4FjyBcSugdNx2L4ZQotmTe96+RwO\nB74BgbD5d31B/Flk+6qH95eniGTJSz+0cr8aP/4zcTgiBHoLPCY2W6DYbKECw9Ne/v5F5Lfffsvq\noipZwO12y/tjPpL6LdrI4337SVxcXLq2m/b1N5IjbwHxCQ6VR3v3lYSEhEwp3++//y4+QaHiX6aW\n2ENyyvNDXs6U42QFb+xMd6xVOXnlhipWrMG6dfmAIt4lf2M0/kZqakegIHAUh2Mmu3ZtJ1euXFlX\nUOWuSklJYfLkyezZt5/KFSvQtm3bB7Yj3MmTJ9myZQu5cuXKVr1uVU7+AZWcnExMTAxJSUnUqFGD\nwMDALC2Pj48TuNIBStOSqFWrJuvWzcfjMePxJDB16mQV4B9wa9asofsTT3Hk8EHKVahESkoSWy4a\ncRWvjfP74fz19wY+GP1WVhfzln7//Xe+nvU9TruN5599mshIPe8fGhpKvXr1srh0WU89yd8H4uPj\nqVq1NgcOnEXTbFgsZ/nrr+VZkkMUEaZPn87UqdNZtmw5bndlNC0Vh2MLq1cvp1ChQsTFxREeHo7T\n6bzn5VPunmPHjlGkZGku9voUStbBMH8MsvgrZOoxMJnhwinMffNx6thR/Pz8MqUMLpeL0e99wLZd\ne6hSrjQDBzyHyXT9s2dsbCxTpn0NQI/u3YiKigLghx9+oHvfZ3C1fgnt4ml8fpvAhr9WZuv8e0af\n5FVO/i6Ii4uT77//XpYsWSJutzvD2w8bNlys1jICbQV8BYwSFBQu586dk8TERBkzZoz07dtPJk+e\nLB6P5z/3t3LlSunYsbt06NBNli1blqGyvPHGcHE6IwRaiMlUUpzOAHn22edkx44dGT4v5f42d+5c\n8avSXPhJ9NePHsHmFL45mfbeFhgiR48ezZTjp6SkSIUatcVWs4PQf5I4yjWQVh27XLfepk2bxBkU\nIlq7l0XrMFScgSGybt06ERGJqlBFeH1h2jlo7V+R5wcNyZTy3i/IYE5epWvu0OrVq2nYsBmalgeR\ns5QvX5QlS37GbDbfdJtTp04RExOD1WqlYcOG7Ny5l6QkJ/Ar0AUI5syZhXTr9hjnzp1j3br9JCX5\nMG3aD6xcuYaJE7+46b6XL19OkyYtcbmqArBgwSMsXPgDdevW/c9zERHeffc9kpKeBPxxuytgtc6h\nQoVyFC1aNGMXRrnvBQQE4DlxAFLdYDTBmaOQkgwr50C5xph++ZxCBQoSFhaWKcdft24dO+JOkjjm\ndzAYcNXswuK+eYiLiyMiIiJtvTdHf8Cl1kOh1UAALgXmYtg777NgziySkpLA50pqU5yBuBIPZUp5\nH1SqCeUdevTR3sTHN+DixbbExz/G338f4O2336ZYsVLYbE6io8sTGxubtv7OnTspWrQEvXq9Sbdu\nL1CmTCUqVSqH2fwvUAqIAGxAY5YuXcLq1WtJSjoNGEhKSmDKlGmcOnXqpuV5992PcblqAJWByiQk\n1OGddz5M17mICKmpbsB61TIbycnJGb8wyn2vdu3aVCyaH+fw+himv4rzjdo8/fTTlN40g6DhtamT\nvIvffp53WxWv69atY8ALg3nx5aHs2bMnbXlKSgopKSmAXg9lsDnB4A1DZiuayXLd9+1C/CUIygWp\nqbB7PSRc5Oz58wD0ebQbjolPw7bl8NcP2Od/wKOdOwJw8eJF/SbwkFNB/g4dPRoH5PW+M5KQEM7o\n0e8TG1uQpKT+bNuWk9q1G5CYmAhAv34DOHu2LBcvtufixW7s32/mxImTVKoUDRxBH0oA4CRJScne\noPsk0Bboi8fjYdeuXTctj/4P6OpfESZSUtzpOheDwUC7dh2w2+cBh9Bb1OylSZMm6b4eyv1twYIF\n1GzSghqNmjN//nx+nfcD4wY+zrCSVuZ89Snjx37Mpr+W8/Pc7zlz9iwVatWjzzPPkZCQkO5jxMTE\nULtxM8Ye9+fD3R7KVq7G1q1b6d6rD3anD3anD4/37UfZsmXxT7mA6dvXYfsqLF/2o1hkIfLly3fN\n/np0bIt91mvwUjV4vxMsmcjOHTs4fvw4Lw5+geFP9qDo94Mps+wT5nwzhejoaGrUb0xQWE58/AN4\n6dXXL6eIH04Zye3czRfZJCdfq1YDMZlqCgwTeEFsthBxOHJe057c1zdCNm/eLCIihQqVEHjiqs9b\nSJcuPeTSpUtSrFgpsVgKi8FQUcAiUEog7Jp9GQyhsmrVqpuWZ/78+eJwBAl0FOgkYJc8eQrI7t27\n03U+iYmJ0r//QImMLCk1a9aXtWvXygsvDJFKlWpK164905WfTU5Olj179sjZs2fTdUzl3li4cKHY\nQ8KFwbOEId+JI0eEzJs377r19uzZI87AEGHgN8LHm8RWrbW079Yj3cepUq+R8MKMK3nyrm9K2crV\nxF6ugTDzgjDzvDhK15a3Rr8nhw8flpYdukjRspWkW68+N/3ONGrWQqjYQpjrFn70iLntoJuWqUP3\nnmJp1FuYmyJ8fUKchaJl5syZ6S7//Y4M5uRVkL9DR48elejocmKx2MVstsqAAS+I3R4o8Io3ML8k\nNpuvHDp0SERE6tRpKJoWJfCawEvicOSXCRO+FBGRZ555TiyWIIEogULeSliLgFmguEA7cTr95K23\n3pKxY8fKsWPHRETk3Llz0rFjN4mIKCBVqtSSUaNGicHgFAgXaCMGQxPJk6fQbVUKN2nyiNjtJQW6\nidlcU/LkKSjx8fE3XX/79u0SHp5XnM5QsVgc8s47797GVVUyQ6NW7YQB065UtL4wQ+o2b3XdeuPH\njxdbo15X1vv2nJistltW+v/2228SXbm65ClaQgJzFxBGLL2yfb8vJDhvIeG1+VeWvfw/qdO0ZbrL\n3qpTN2HA1Cvbv71MipevcsN1wwsWET7ddmXdxz+Qp/oPSPex7ncZDfKq4vUO5cyZk3/+Wce5c+dw\nOBxYrVbOnTvPnDnTSUrKh9W6j169niB37tzMmTOHNWs2IGIHRgMe6tRpQZ8+T5CcnMyECZ/jdg8E\nHMBRYCrQHQgFfsFgWIjHY+HNN+ehaW6GD3+LjRvX0qVLD9atc5Gc3IQjRw6yZcsHOJ25uHixGwAe\nD5w+vZbDhw+n/RT+559/OHLkCKVKlbqmkutqZ86c4fffl5KcPAg97RPJ+fMzvJW7N07htGzZnmPH\nohGpCFxg5Mj3qVmzOtWrV7+LV125HUajEZJdsHY+JLngwklMRuN16zkcDji6C95pC64LULQyZqvt\nprn5zZs380j7Trge/wTsfjBpAHzxNLwwHRLiYcZrFC0bzZpda0mt2AIA0+615I0IT3fZK5WJ5tcf\nZpNQszMYzViWz6Bc6egbrps7d26O7lgJeaNABNuuvyjwSOV0Hyu7UUH+LtA07ZrOS1OmfEWrVj8S\nGxtLyZIlad68OQBjx04gIaEuUBJIArbi8ejb67l0DbB497Lfu97lfH9TRGJJSKgBVALA7f6d114b\nzt9/ryUlZQhwCpFgRHKQnHwcuJyfv4DbnUBAQAAAzz77PFOmTMdsDsPtjmPOnFk3DNp6e1y4Uk8g\niHhu+o/d4/Gwe/d2RNp6l/jh8RRm06ZNKsjfB/o/8Ri/tO+M5CoKAaGwJYYOn3583XqlSpUiadcG\n6Pku5CwEkwdRrVq1a9ZZtGgRCxcvISwkmMSEBBIiq8GEZ8Dhp99AXBf0/LlmANdFwkODMS3+HMOq\n77GE5cVxdAe9v/2GAUNeYt7PvxAUFMTY0aNu+j0Z/MJAlv21hmVP5kMzWymcN4JPZyy84bqTxn1M\nzfqN8GxaiJw7TkGnxrPPPnPnF/ABpYJ8JtA0jTZt2ly3XG9WebkS1Ap4MJttADidTurUqc/y5QtJ\nSqqAXgl7Dj3AasApDAYjqakhaftLTQ3i9OmziKQCi4DtQDAu11Gio4uzd+83pKREYDLt5pVXXsPf\n358VK1YwdeosXK4+6K14DtChQxcOHNhDUFDQNeUNDAykWbPm/PrrXBISSmI2HyI01EytWrVueN4G\ng4GwsAiOHduDPhxCMkbjIQoWLHi7l1K5izZu+gdzuUYkD54Nmoa2eAKTvp1Nnz59AFi6dCkTpk4n\n9t+taI2eQJp5A2NoPja9c+UhYPxnX/DiqHdxNXoay7Z/sW1ahFxywVsxUKgcbFoKb7eCT7eC1QEv\n1+LHDbtJ7TwS49bf8Py7jAtuN/UeaYMnT0mk92T2x8XS6JHWrF+1/JohCA4fPkzbrj3YtG4NOXLl\nZsr4TyhXrhwFCxbUf5ncQHR0NLFbNrFs2TIcDgcNGzbEYrHccN2HgQrymWzjxo388ccfBAUFMWTI\nc6xe3QWXKxnwYLf/xUsvLWTZsmX07z+YM2fOUKCAk8TE5eTMGcrevWc4e3Y6ImGYzf/SokULFi5c\nicsVBLhxONbSocPbmEwa8+b9ArQGCgP7iIv7lenTJ7Bv3z7Kli1LnTp1ANi3bx+alhs9wHuAbcTH\nXyBnztxUrVqdBQvm4uvrm1b+77//lrfeeofly1dTuHA53n57BHa7/abn+91302nSpAUGQyAezyXa\nt2+tWufcJ/YfiiO5aHXw/hKTotU4vOQTQG910/HxPiS0fwMMJyDRdWXDJBcpSYl8+eWXVK1alaHD\nh+N6fSnkK0kywNAa4LqkB3iAMg3ANxhOHICQPLB7LalTj4FPAKniIeH4Af2G0DsvDJoJwbmgcHlS\ndq1m+vTptGrViiJFiuDn50eDFq3YVawZnsk/Erf1T3r368WOzRtvGuAvCwsLo0OHDnf/Ij6IMpLA\nv5svsknF663MnTtXHI4AsViqitNZTKKjy8uSJUukQ4du0qlTd1m1apVs27ZNHA5/gQ4CT4rdXky6\ndu0hefIUFJstWgyGwmIy2WTcuHHidrulf//nxccnQPz9g2XkyLdlz5494usbJJDL2xInr8BQMRiM\nkpiYeE15PB6PbNq0SRyOQIHnBB7xVs6+JPC6WK3lpUePXrd9vklJSVKvXhNxOHKIw5FbcuSIkAMH\nDoiIyP79+2XVqlVy5syZO7qmyn+7WQX7jBkzxFmolPD1CWHiQTFXai5tO3cTEZGKtRsIL83RKyon\nHhCcgUKX4cKAaWLwDxFLwdLiaNxLHEE5xGi1XekV+5OIpU5XMfkGCpMP68s+3SaYbcJzU4T6j+t/\nz03RP2v8pNB3nP53cITw/lp9Hd8gwSdQNLNF/IqUFWdQsJSqWEUvh8kiOPwEq0MwWaRR80duqxFB\ndoFqXXP/yJEjt8Dj3lY2w8ThKC6TJk26Zp3Ro0eLwVDlqmaSL4jJZBWzuexVy7pIZGTJGx6jQYNm\nYjA08K73hrdlTnHRNIuEh+eT+fPni8vlkrZtO4nJZBG73Vdat24nVqtDjEanQPOrjtNHChaMkjNn\nzkhKSkqGz3fMmDFitxcTeF1guBiN9aVBg2by5pujxGbzEz+/guLjEyh//PHH7VxO5T+sWLFCcuYr\nKJrBIAWjomXr1q3XfO7xeGTIK6+KZrYKFrsYcxcVR0CwLFq0SMpWr3PN8AB0Hib5ipWU8lVriLVQ\n6StB+qMNYrQ7xVqlpfDpVuHF2eIIDJHO3R4V7L5C0SqCzSnFo0tL/mIlRQvJI+QtIdTqKnzwt1C+\nmZCnuNDwCaFSS8E3WKj4iDD1qPD+GsE/h9B/kpAjn9ByoPDmEqFAWSFftDDliDDtuFijqsqoh7jV\nVkaDvOoMlYkuXDiL3jIGQCMlJYjTp08D+hRqgwa9yKhR7+DxnL9qq0u43R5SUq4ehTKYw4dv3FV7\nz559eDwFvO8MQDCwB5H8HD1ajE6dutOtW09+/vlf3O4XSEh4nF9/XcmUKRN5+uknsFrjuFyxajAc\nIC4ujrCwCGw2J2XLVmTcuPF4PJ50ne/WrTtISMiPPgE4pKZGsnnzFt599yMSE/tw4UIP4uNb0Lp1\n+3TvU0mfU6dO0bRVW451/xiZk8TeegOo17TFNb1HNU2jz+M9sfn4wbhtpI7fgevln2jftTtP9eyK\nY8pzsG4h/PktxgWfcOH8efbv3wf5ovVhDwDylUJSkuhaKoJcH7ejZMzH/PLTXH77cxk8/iF0Hgbv\n/cXeM/Hs3xWLvLMc3l0FNifaqBZY96wBg0lP7RgMkJIEvT+CwJxQpBI0fQomPAs+QdB7jJ76CckN\nnd6AoHAIyEFSu1dZsOT3LLrSDx4V5DNRnTr1sFj+ABKBw5hM29LGkHnppaF88cUPxMe3BQ4DPwGr\ngFnorWLWAHHARWAJbrebxMREnnvuBaKiytKgQTPWr19P5crlsVj+Qc+vHwJWA9XRx3xfQXJyOH/8\nsYzExOroefggXK7SxMQs5+23R1GoEPj6foOf32zgT5KSypKS8hKpqb3ZtGkbQ4a8zYABgxg79lMK\nFSpBkSKlmDp12g3Pt0KFMjgcu4FkQDCbtxERkQujMS9wOc9fiISEBM5lwrRvD7PNmzdjyFMMKj2i\nB+SGvbmUqrFv375r1tu1axeWyHIQ5n0wKF4drA4aNmjAuJGvUz5mDGE/DMdUsDRnX13M6XZvkvTX\nj7BrHaS6Mc4eQXT5ykye8Blxu3ewZc1Kqlatyqkjh/T8+7yPYOqLJAXnBxG4eBpGNIO/FyAGE0kX\nz8EjA6DOo/DyXDBb4cjOKwU8shuKVobEeLxNu8AvBPZsSFvFcGAz4WGh/BcRITY2lg0bNqT1OH8o\nZeSx/26+eAjSNWfPnpXGjVuI2WyT4OCc8t1336V9Fh6eX6CfN00yRCC/N6/eQOx2PzEa8wv4C9gF\niktwcLi0adNRLJb83ry7TcAgYBaz2UdMJpu341T9q9IvHQSsomk2gTZpy83m8jJ8+JsiovdwXbRo\nkXz77bdiNFq8PXcvb19SoJkYDCax28O9qace4nCEypw5c2TJkiUye/bstI5ebrdb2rfvLDabnzid\nOaRIkZLy22+/eesABnr32VUCA3OkazRNJf22bNkijhwRwszzelplyhHBbJPFixdfs97OnTvFHhQq\nfLlXX++d5eIMDL5mhqacBSL1VMzl1E2NTmJx+olmNEqZKjXk8OHD1x0/JDy3YHUKZRoJpRvo+fO8\nJQVngPD0BGHYYsHhr6dmyjYW8kQJM84IuYvp+fbmzwpV2+rrF66op21qdxMGzRSiaurbVmkthmpt\nJSBH+H/24Ha73dKqYxexhYSLPW8xCctbMK1+6EGHyslnHY/HI5MnT5YmTVpKt249ZefOnTddN2/e\nSIFHrwqo5cVsziEWi126d+8ugYEhYjRWE2gmDkcO+eSTsWIwGAUu59HreYN6GW+PWLNAhECzq/bZ\nXSCPN8CbRB8moaAYDDbZs2ePLFu2TH7++Wc5ffq0pKamitXqEOjr3XaoQLBAV+/NpNNV+20twcER\n4uOTR3x9S4uPT4AsX7487dwOHDggO3bsSMvrv/feB2K1+oifXx7x8wuWFStWZPr/i4dR/aYthNB8\nQqM+ek67zqOSv7hel5OamppWWTnusy/E5hforeAMkV9++eWa/RSKLisMX5wW5M1N+sqIESNvWU8T\nEJFP6PDqlRtDmyGCX6hQsJz+vmxjod/nVz6v/5hQuoEYHb5iiSisV85GVhaqtNGHPP72nFCuiViD\nc4rZ6SeaySxFoqLlww8/TNfQGhMmTBBLVDXh+4QrdQzFo+/g6t4/7osgDwxHz0Fs9L6a3GCdTL0Q\nWWH06PfE4cgl0FYMhnri5xcsBw8evG69v//+2xtQ7d5gXV6czgAJDY0Qu724WCxVxW73lzZt2smj\njz4uP/74o8TFxQlYBdp5W8b4ChQRKOAN9rW9Nw0fgc4CPbxBukXaTURfv7X4+BSV4sWjxccnt/j5\nFZfAwByydetWbysdu+hDKgQIFBGzubDkyJFHoNVVQb6hGI2BaRWs0Fny5Yu85bU5evSobNy4US5c\nuJBZl/+hN3r0aDFUays8OV54Z7nw7Tkx2x0yYNCLYrLaxGSxyqO9+khycrIcPXpUYmJi5NSpUyKi\nt35q0a6TRFWoKk1atBR7cJhoXYaLuXEfCY3ImzaExmUpKSnX/BqzhoT/v2EL5gr+YYJPkDAnSShQ\nRvhg7ZXPnxwvYfkjZefOnTJ79mzp27evGJ1+Qu+PrqzzyT+Su0jUbV2LJ556Wnjs/Sv7Gr9DcPhJ\nbGzs7V/g+0RGg3xm5eQFGCMiZb2vXzLpOPeVDz74GJerFVAKj6cWCQmFmDlz5nXrvfji6yQl1QM6\no+frT5M/f17OnPEjIaEjycmNSUh4hA0btvD115Np0qQJNWrUQ+8NawZ+Qx9KuCvQEyiNfk8tBLQA\nYoDZ6G3my6P/77gIRAKlSUlJYs+eM8THP86FC504d64SPXv2ITHRBXQD/NArT49RvXo+vvvuaxyO\nGOBP4A8slpWI5OdyBSvk5cSJo7e8Njlz5qRMmTLXtMFX7q7y5ctjP/gPVG0LUTUwLJ5AWM4Ivlr4\nB+6vDuKeepz//bOfgYNfpF6zR2jQuAkR+fIzbvxnVKxRm0X2kvzbdjQxF2yULFaEIfmSGFYzP1vW\nr00bU/7ChQs0aN4Kq92O1eGk5+O92LFjB0kXzsKcd8B1Ue/tOnc0lGsMrvPwcjW9U9SsN/XesKeP\nYJz/MR+MHEZkZCQdOnRgwoQJTBz3CfbfJ8K5E+BOwTp/DDWrVb3lOf/666+8+PIrDB48mMq161Ow\nZBme6v884aHBsOI7SPKOnrlyNkaHH4cOZWysebfbffmh9IGVmZ2hHszZf+9AamoqVwIfiBhxu68f\n5levdCwM5PO+1rB9+1I8nipcuWyhxMUdYt26dbjdbk6dSgAaAj+jj21T/qo95gG2ALHoN4EkjEYN\no3EbBgO43SdITT2BSHEslsVYrRe5cCE6rawiBThwYD1VqlRnxYr/IeIEwoFYOnZsR506dVi+/Hcm\nT56GyWSidOmePPvsS7hc5wE/jMY1lC1b4a5dR+X2NGjQgCFP9uLtpyMxOXwJDQ4kf9GiHI7qAv56\nRaWr5YtMHtud5CbPkDpsLe64WAa/VhtjzgKkdngNgMQiVdj0aBBLFs7H39//mmP06vcsy5MD8dTq\nhmf7Sr7ecJAZ5SvpzxFHd0O3QECD0vWh+1sYVnwHRhOe04fh8A7o7A8aPPPcALp16wrow2PPnj2b\nU6dO0a5WJWb1yQuaRtU69fnik2+vO8+UlBTGfjqO7+b+wKbYvaQ06gu7tsHBfTDga6YtHEPDcDOm\n43tw98mvV9ymJGJOuZQ2beB/uXDhAu269eCPxT9jslgZNXIkgwc+f9v/b7JURh770/sChqEPvvIP\nMAkIuME6mfZzJqsMHfq6OBz5vHnspuLjEyC7du26br133nlPHI78As8IPClms69o2uVRJ/uK3jmp\nuEBucTj85ZtvvhEfn3DR28G3FQgSyCf6SJcviskUIa1atZbcuQuLyeQjdruvNGvWQj799FNp166d\n9O3bV0aMGCVNm7aSjh27SO3a9cRk8vNWpA4Tk6mGNG78iHz00UdiMOTzHkevJL1ZGua9994Xs9kq\nFotdSpQoI3FxcZl9eZV0On/+vBw4cEDcbrf0fbq/mNoMujLsb493BM0g/C/5Smemxr3Fmqugngv/\nSYSZF8Rktd1wtNHgiLz6SJZ5ooTZLn39/pP0jk3fnteX13tMeOozMUREijMkTCyBoWIrUk5sZeuL\n3S/gmqGyU1JSpHr9RuIsVUssrQaIIzSXfDHhy5uOdOrxeKTRI23EXr6RYPcTxv17ZerCck30ss26\nKJrRJDExMeIXnEMsvgFi9wuQ+fPnp/satu/WQ6wNegpzEoUv94ojoqAsXLgw4/8zMgH3ahRKTdOW\nADlv8NGrwOfACO/7kcCHQO//v+Lw4cPT/q5Tp05a1/sH1ciRwwkKCuS7734gODiQ0aNjbjih8Isv\nDuLChQt8+eVEDAYjlSrVYtGi04iUAmaiD15mAAbicq3n55+XEBVViM2bfyQxsSA2Ww4cjvOcP/8+\noNG0aQsef7wnS5f2xO3Oi9udzM8//8HPP/+C2VwMi8VNgQIOxo79gBYt2uBylQfKAdOxWCwUKVKE\nr7/+H1999RX6gGiXs3i5OHXq+A3PdciQwQwY8ByXLl0iICDgtmYPUjKHn59f2sTbw197hXlVa3Dx\n+G4wWzFvX46EhnE+djWUqAkpyZj3b8LP5OH0Z31ILl4Tx++T6PBozxtO1J4jLCend6yCyEpg9Q5v\nkS9af5K3+8DoFfDjh/DNULSQXFwashgObsM68VlGvtaZbt2+JleuXGn7W7hwIf8cPc+lUSvBaCS5\n4ZM8P7gKffs8ccNz27FjByvWriNh3C7o4qc32wR9qIaQPHrTy0vnEM3A6rXrOHP8CCdOnCAkJOSW\nU3L+fzF//knSa7/pTTzDCuCq25vf//iTZs2apXsfd0tMTAwxMTG3v4OM3BFu5wXkB7bcYHnmQyBj\n+AAAHF5JREFU3eoeMP/++6/YbD7ep/Q+ojenrOx9mm4mHTp0lUuXLsmrr74u5ctXE5PJJr6++bw9\nYx3i719MDAaHXNt7taJAYdF7wA4Tm62YFC9eRvShDC6v00SaNm2VVoH2xx9/iMMRItBf4HUxm6tK\n/fpNs/jqKHfq7NmzMm3aNJk8ebIcO3ZMFi5cKI7AEPGt00l8CkVL09bt5PTp0zLoxZelVaduMubj\nT246bMCKFSvE5hugV6h+Fqs/Rfceow9rUKebMPQnMVRvL5rdR/jmdNqvBVuTPjJu3Ljr9jdp0iRx\nNuhxpYJ0rlsMZrPs2rVLVq5cmVYxfNmGDRvEN39x/cm9eke9J+0Xu4VXftCf7Du9ofewrd1NajVu\ncdvXLKp8ZWHId1cmNK/RTj788MPb3t/dxH3Suib8qr8HAt/eYJ1MvAz3r4MHD0rXrj2kRo0GMnLk\n22nN0mbOnCmBgTm8TSFt3iaL7cRuD0gbBuDMmTNit/sKPCnwqre1zeUmjzkFel8VwFt4W8kU9r6v\n6m0lc3VTyDZStmxlCQvLIzabUxo3biHvvvueWCx2MRhMUrlyTTl58mQWXi0ls+zatUu++eYbWbx4\nsSQmJsqpU6euaS0zc+YsadK2o3Tq8fh1wyPs3btXejz2mJhsDrH6BUreyGKyfv166ffcQCldtZaU\nq1JNTA6fKzeBn0QcVVteN6SHiEhsbKw4AkOEkb8JM8+Lqe1giShYRGwBweIfVUmcQSGyZMmStPWT\nkpIkT2QxofUgvQVRwXJ6cHf6CwVKC437CoO+FUOn16VLz9sfh2nlypXiDAwRR4NHxadsXYkqV/GW\nk+XcS/dLkP8a2Iyek/8RCLvBOpl6Ie5HZ86ckRw5conRWEegszgckfLYY0/IzJmzxG73F1/f0uJw\n5JB8+QqLptkFbFKzZp20jiobN24UP7883gD9vDeHfzlgV/c2p3zJ+1mI6G3qm3pvCk4JCsrhnZqw\np0APsdmCxWJxeN+/KBZLJalfv6mkpqZeN7iZkj1Nnfa1WJ0+YvH1l7yRxSQ2NlY++3yCOHIXEgZ+\nI1rP98QnOPSGTQ+TkpLkwzEfScGSZaRAyTIyYOALYg8IFjq+LpSqJwSFC70+FEvDxyVvZDE5f/78\nDcuwaNEiCctXUMx2h5StXF3sITmFyXH6DWLUH+IbFJL2MLRmzRqx+wcJATn0jlP5SwtTj4o9R27x\nDwkTR+1O4qzVQYLDc8v+/fvv6Nrs3btXJk6cKLNmzbqms1hWuy+CfLoO/BAG+RkzZoiPT/RVgfll\nMRhMYrM5BZ7yLntF9LbuHQVeFputhPTr119E/v+T/GsCDoFu3u2eEqNRfwIHk5hMDm+QN3r3V1/C\nw/PL559/IZGR0VKkSCnp2rWrmM2VrirPUDEYTKo36kNi8+bN4ggOS6u81PqOkwLFS0reYiWF0Suv\nVNa2fVFeHvrqddvP+HamfjN4+09h9Aox5MgnNOp7JfVSs7MULVVWhr85It2jj86ZM0f8qre6so+f\nRGwBwWnt9MPzFxJe/p/+2TenhJyFhJe+F4vDR3bu3CkTJ06USZMmyYkTJ+7qtbqfZDTIq7Fr7gP6\nPe9yHbYVyIU+Fo2NxMTqLFq0BNAn8Zg2bTJ2+0z8/KZjsQh2+zx8fD7HZpvOpEmf43YnM2LEmxgM\nAiSgj2HTA7s9jjZtWqJpGuHh4RQvXoTIyEhSUo5wZeanU3g8GseOHePzz7/A3z8Eq9VO27adcLlc\nKPfev//+y5w5c9i8efNd3/f69evRyjSEPMUBkGZPc3D3TtwpKWC6UkkpRjNudyqgN//t9WQ/Ktdt\nxCvDR+Jq9zqUqAXFq+Pp9REc2qZvlJIMF06xZ+9ePp88lZ/mzb/m2Bs2bKBtl0dp3KYDs2d/z/nz\n51mxYgVms5mUHav1segBNizGajYREhJCSkoKxw7ug8qt9c/8gqFYNcyf92XYG28QGRlJ79696dWr\nF6Gh/z22zUMjI3eEu/niIXySv1G6pmfP3hIenleu9Ch9yptr7+9930oqV655zX5Onjwpa9askePH\nj4vL5ZLY2Fj54osJUrNmAylbtqLYbGECAwReFigkFouP9O37tLz11tvicEQIdBRNayJ2u483LVRI\noKqAj9jtOeXdd98VqzXA+0vAKWCWunUbZtFVe3iNHfeZ2IPDxK96a3GE5pIRb4++q/tfunSpOPMV\nE2Zf0p+M318jzoAgefeDD8VRoITQ8z0hMFzQNMkTWVw2btwoQRF5hdrd9afpck2EPCWuNL186nMx\nBIULH20QanYRStQSvtovvLdaHGG503LrmzdvFmdgiPDEx8LA6WILySWOwBDxL1FZ7KG5pEK1mmLz\nCxC/QiXFNySH/PnnnzJx0mQJCs8t2HyEwbP04007LpbQ3PLpp5/e1etyv0Ola+5vN6p43bp1q4SH\n5xWr1UdsNqf4+weLxZJTzOY84nD4y99//522vcfjkQULFsj48eNl9erVIiLy2Wefe3PtHURvP9/0\nqhRMXzEanbJ//34JC8vjTfXonxkMlbyDktUVaCDQXiwWp3Ts2MVb+dtZLo8zD+YbDkx1K2vXrpUy\nZSpJzpz5pEuXHmpIgww4efKkWH39hS/3pQ04ZgsMkb179961Y3g8Hun2+BPizBMpfjXaiCMwRH76\n6SfxeDzy7nsfiMHuIwz9SfhfsmhPTxCf4BxC2FXt6eck6ZWezfsLnYcJPoFisjslV+FiYvQNFD7Z\nfCXt0mO0PPv8CyIi8syAgUKXN698FlFUn2DkJxFmXRSfyDIyefJkefvttyWqQhXJH1VazP5B+s3j\nzSWCw0/MEZFi9QuU14aPuGvX40GR0SCv0jX3WJ48eZg2bRINGtRi/vxFdOjQDZvNRlzcfg4f3seK\nFX/idqeSmpoHkVDM3p+qoN+QO3d+lM6d+zF48NfUq9ecsWPH8eGHn+JyNQVKoPegvXqIgWOkpmp0\n6NDturbsmmaiefNmOBwbsNt3Aj+iaXbmzp3rXePyXJsRGAw5+Pfff9N9ngcPHqRevcZs2hTBsWMt\nmDt3K+3bd729i/YQOnLkCJaQCAjLry8ICscaEcnhw4fv2jE0TeObSV/yy7eTmfhcF7asW0PLlnpK\nz+mwYcxTDCq3BJMZafgEl5JS9KGDn4uG36fBsT16B+2//ge/TIAKLTBaraxf/gdFihSB43vTjmU+\nsZfgAL33rIiA4arp+04dgireOZHtPiSWrM/vv//OqLFf8G+zN9jf8T1STA69x2yZBjAqBp9UF3v+\n3cLIYa/f8NwSExPZsmULR44cuWvX64GVkTvC3XzxkD7Ji4j07v2kOByRAl3FYKgvgYE55Pjx4yIi\n0qhRC7l6JEmDoY489tgTIiKyatUqcTpzeptPDhcYIBaLXQoWLC7wmHfZi6I3w4wUfYRKh0CAWK0O\neffd965K1zQWH59A2bVrl2zcuFGsVqfAE959PCn6qJWXZ7UaLFarn2zfvj3d5zh58mRxOstf9Yvi\nVTEYTJKcnJwp1zS7iY+PF7/QMOGNn/Un3LeXiTMw5J40aV29erXY/AKEgDBh5gX9+O1fEfKV1J/O\n31kuBOTUUyfdRuqpmwKlhYLlxODwE5fLJUuWLBF7YIgY2w4Ra73ukjNvgbTK0PXr1+vNJp/5Unh5\nrmi+QaL1GasfZ/ppceYrJmWr1RIGfnPlaf/F74UKzfW/h/4kRctUuGn5t23bJqG584lv/mJi9QuU\nQS8NzfRrdi9xr3q8KrfH4/EwbdoU3O7nAQceTxGSkk6zYMECevXqxalTp9EHGru8fiAnT+qzSR0/\nfhyjMRR9fBqAQAwGC0899TjDh3+Iy1UTuIA+6UgUkArUAX4jKOgsgwcP4ty5cyxY8CsOh42cOevx\n3nsfUq9ebUwmJ0lJHu+24dhsOUlJmYnJlBdNO8GQIS9QrFgx0svhcKBp8eiVuhpwCZPJ9J8TMCs6\np9PJwrlzeKRdBxKTUzAbNObMmpH2qy4zvf/pZyR2GgFxsTCkMhSvBqt/gFd+hPzR+kpRNcBkgY76\neDcULAuDK2ENycXmzZtp0KABf/2xlPnzF5CSYqdQz4bs3LmTkJAQypUrx9KF8xj+7odcciXQ4tWX\n+OTzj7m0eBzJZ0/Q96mn2HPgIBsvXTVjmus8hsPbMU4agHn5t4z/ftZNy9+2aw9OtRyKNO4LF07z\nxWvVaVi3Fo0bN87Eq3b/UkE+C+hpE7nqvWAw6Jmztm1bsmPHV7hcwYAbh2MN7dqNAqBChQqkph4C\n9gD50bS/yZEjlEGDBpErVwSTJ0/H6fRlw4YCxMUlo49UeQrYyUcfTaN798eYN28xInZcrqPoN4BT\nTJw4FRErsNBbrkfweM4SE7OEU6dOUbBgQUqVKpWhc2zZsiUREW+xf/+PJCXlwOHYwtChw9LOU/lv\nNWrU4NSRw5w6dYrg4GBMpnvzzzUlxa2PGtn3U9i4GFbNxWa1knj6qlRR/FkMIRF4ADYshj++gZQk\nDKT9Uqd06dKcOHGCNp26YixeldRDO2hepzqzpk2matWqLP5xTtruBj4/gN27dxMUFER4eDhr165l\naZPmuBLjwWDE/uO79OvVg/DwcJq+HUOJEiVuWv4927ciQ7vpb/yCSSnThK1btz60QV6la7LAs88O\nEIejgEAHMRprS2hoeFr3bbfbLYMGDRFf3yAJCAiVUaPeuabd+tKlSyUkJFw0zSDFipW64QBoO3fu\nlLx5C4vZ7BCDwSz16jWQYcOGidOZW/RZqCJFH0JhuEAt0Yc+eEP0WaGqCFjFZHJeN5lERl28eFFG\njx4tzzzznPzwww93tC/l3vnll1/EERIuvDRHeHWeOMLzS7du3QWbU58YpNkzgsUmjoBgfWiBwHDh\niU+ENkPEYPe5ZrKcoJwRwsjf9TTLbJf4FIpO90Bf69evl8f79pPH+jyV1sggPQqWKCUMnJ422Jqz\nYMkMDU52vyOD6RpNRP7jNpA5NE2TrDp2VhARXC4XTqcTj8fDxx+PZeHCX8mdO5wWLZqwb98+8uTJ\nQ8eOHdOV0vB4PLd8Kk5ISKB06YocPKiRlBSKwbAWjycBPXXiBJoDRYE5XB5nXrcPWApUo3z5I6xb\ntyLd53jy5Elee204e/bsp27dmrz00uB79vSp3F3z5s3jrY/GkZqaysAnezPx2++ICaigT7xtsoCm\n0eDcOlat/RvXgBlQSp+7mC/7E771Z7p16cywV1/Bz98fmZMM3u+0/fM+fNiyHP369cu0sm/cuJH6\nTVvgCc5N8omDdOvYgS/HfZJtBtHTNA0RSffJqCB/DyxdupT27TsTH3+BsLBc/PzzT5QurQfVDz8c\nwxtvvENyclGs1qNUr16cRYvm3XFa47vvvuOJJ4YRH98Z2I2einkMfUKQr4GzQCv0kScuoE8WYgDm\no2fxooiK2sK2bevTdbz4+HhKlizLkSOhpKTkxuH4h9atqzJjxtQ7Og/lzq1fv56//vqL8PBwWrdu\nfVv1ItUbNmNV+d5QrZ2+YOkUmh/9lU2bNhH3zEwoWEZfPutNOPgvdk8SdXOa2bN3HzsrdEdaPg/H\n9uJ4rSYxC3+kYsWKd/EMr3fhwgW2bt1KcHAwRYsWzdRj3WsZDfIqXZPJjh07Jk5ngHd8mGECbSUk\nJFySkpIkKSlJzGar6GPNDBd95MdQ+fXXX+/4uF999ZU4HBWuSsnUuqqly/MCJilUqISUKFFOypSp\nKBaLr+hT/4UJdBOHI7eMGfNxuo/3008/ia9v0auO8YqYTBa5dOnSHZ+LcvumTJ0mjpCcYmv2lDiL\nV5IGLVrddITJW5k5c5Y4wvPrU/y9PFfsoblk4cKF8vrwEeIoXkkfBuHF7wX/UH2avzmJYnb4yPr1\n6yVvkeJi9Q8Si8Mp48Z/ngln+XBBta65v2zevBmTKSdQwLukFC7XMubNm0epUqXQb8iXZ98xkpLi\nx/jxn9OwYcM7Om79+vXRtCHANvQn9APoQyUYgDiKFClGbOwWQL/RHzp0iAULFjB+/CRSUv6hb9+B\nPP/8c+k+nsfj4epZsfTjaN7lSlYQEfr170/i239B3ihwp7D6lcosXrw4w+Oid+7cidTUVD6a8BEG\ng4GhX46nWbNmNGnSBJPJxIRJT3Ds9Dk8L8yAyIr6NH8iFCpUiP07tnHixAn8/f2x2WyZdLbKzah0\nTSbbtm0blSrVwuXqA9iBv4Df8PPLTXLyaYxGE5cuRQPVgIPAD+TMmYOjR/ff8bFXrlzJE088w4kT\nxxERUlIcQCCwj8WLF1CtWrVbbi8i7NixA5fLRcmSJbFarTdd9/z58xQrFs3Jk4VITY3Abt9Ew4bF\n+OmnOTfdRslciYmJ+Pj5kzo7AbzpP5+x3Rn3aEN69ux5V4/lcrkoWb4ShwvXJSWqNvbfvqJZkRx8\nP+Nrzp07h6+vr6qfuUtUTv4+9NxzLzB58rd4PGEkJGwH+gKhwFGMxqmkpjqAePR8eVmKFj3Bjh3/\n3HKfGzdu5IcffsTX14fHHnvsPwdkSklJ4ZdffuH8+fPUqlWLvHnz3nJ9t9tNq1btiYlZgdFoJyjI\nxooVf5A7d+6bbnP48GEGDXqZvXsPUKdOdUaNevOWNwYl85WuVI1thRqS2n4o7Pobx3tt2Lh6pd4j\n9S47deoUrw4bwa4DB6ldpRLt27SiedsOHI07jEGDiRMm0K1rl7t+3IeNysnfp5YvXy6vvvqq+Pjk\nvypvPVx8fCLEzy9IjMYqAnXF4QiQefPm3XJfixcvFofDXzStplgs5SVHjlxpQ7GK6HN89ur1pERH\nV5AOHbpe81l6jR07VhyOIqIPaTxcjMa60rBh8wzvR8lahw8flnLVaonBZJKgnBGyYMGCe3bsAsVL\nitZ3nN6UcewWcQTlkG3btt2z42dXqCaU96+jR49SqFAxEhK6AzmAIzgcs1i7dhXffTeb+Ph42rdv\n959plKiosmzfXozLY8uYTIt45ZXGjBjxJh6PhypVarJ5cxJJSSUxm/cSEXGc7dv/yVA+tFevvkyZ\n8idwEbAApYiI2Mrhw3v/Y0vlfiQi97QJYXx8PIEhobi/c+nzrwI+H3fls8eb8uijj96zcmRHGX2S\nV0myeyg8PJwvvxxP375PYzYH4Xaf5euvp1CiRAlGjHgz3fu5cOECEJD23u325cyZc4A+MNjWrdtJ\nSuoPGEhJyc/p09NYt24dNWrUSPcx4uIOozetbO7973zy5MncZm9K5rnXbcQdDgcWmx33ng1QuDwk\nuZC9G8mdu889LYeigvw91717dxo3bsyBAwcoUKAAwcHBGd5H+/at+fLLhSQkNAbicTg20KbNywCY\nTCZEUrnSkkYQcWe40mvz5m1Aa65MZnKcsmWjM1xW5eFkMBj4etJEevRtirFkbTz7N9Oqfi3q1KmT\n1UV76KggnwVCQ0PvaOaa998fTXJyCt99Nxubzc7o0Z9Qv359ACIiIqhbtw4xMf8jIaEYNtt+ChcO\np0KFChk6htVqAxLT3huNyYSGZv7gWEr20a5dW0qVimb9+vXkytWfmjVrZptepw8SlZPPhpKTk3nv\nvQ9YvXodJUsW47XXhuLj45P2+dmzZ0lJSSE0NPSm/+hmzJhB374DcLkqYjTG4+sby+bN67l48SJ9\n+jzLwYMHqVGjGl988Sn+/v433IeiKHefakKp3FRqaio9ez7B7NnfYTAYqVChAosWzcPX1/eG6y9e\nvJiZM78nIMCPgQMH4HA4KFKkBOfPV0QkL1br31Ss6Mfy5b/f4zNRlIeXCvLKTX3yyacMHToWl6sD\nYMJq/ZkuXcoyZcpX6dp+zpw59Oo1gosXveOXkIrJ9B6nT5/Az88v08qtKMoVqnWNclMrVvyFyxUF\n6B2UkpJK89dfa9O9vd1uR+QSVyYCSULEg8ViyYziKopyF6gZHB4iRYsWxmo9xOUJS4zGAxQqVODW\nG12lQYMG5M8fgNX6I7Aah2Mmzz77nBqPRFHuYypd8xCJj4+natXa7N9/GoPBit3uYs2aFeTLly/d\n+7h06RIff/yJd+iCGnTv3l21mFDSze12k5qaqoa7uAMqJ6/cUnJyMitWrCA5OZlq1aqpXLpyT4gI\nLw59jY/HfIiI0LhFS76fPg2Hw5HVRXvgqCCvKMp9Z+rUaTzz9se4Xv8VHH7YPnmU7tE5+Wr82Kwu\n2gMno0Fe5eQVRcl0S5evxFW/L/iHgtlKYqsh/L4s/VNLKrdPBXlFUTJdvlzhWPb8Dd5f79rOtUTk\nCs/iUj0cVLpGUZRMd+7cOSpUr8VxWyg4AzHuWMmqmN+IiorK6qI9cFROXlGU+5LL5WLRokUkJSVR\nv359wsLCsrpIDyQV5BVFUbIxVfGqKIqipFFBXlEUJRtTQV5RFCUbU0FeURQlG1NBXlEUJRtTQV5R\nFCUbU0FeURQlG1NBXlEUJRtTQV5RFCUbU0FeURQlG1NBXlEUJRtTQV5RFCUbU0FeURQlG1NBXlEU\nJRu77SCvaVoHTdO2aZqWqmlauf/32Suapu3SNG2HpmmN7ryYiqIoyu24kyf5LUAbYNnVCzVNiwI6\nAVFAE+AzTdPUL4ZMFhMTk9VFyFbU9by71PXMOrcdfEVkh4jsvMFHrYCZIpIiIvuB3UCl2z2Okj7q\nH9Hdpa7n3aWuZ9bJjCfsXMDhq94fBiIy4TiKoijKfzDd6kNN05YAOW/w0VARmZ+B46h5/hRFUbLA\nHc/xqmnaH8AgEdngff8ygIiM9r7/BRgmImv+33Yq8CuKotyGjMzxessn+Qy4+oDzgG81TRuDnqaJ\nBNb+/w0yUkhFURTl9txJE8o2mqYdAqoACzVNWwQgIv8Cs4F/gUXA03KnPxcURVGU23LH6RpFURTl\n/nXP26+rTlSZR9O04ZqmHdY0baP31SSry/Sg0TStiff7t0vTtJeyujwPOk3T9muattn7fbwubavc\nmqZpkzVNO65p2parlgVpmrZE07Sdmqb9qmlawK32kRWdlFQnqswjwBgRKet9/ZLVBXqQaJpmBMah\nf/+igC6aphXP2lI98ASo4/0+qv4yGTcF/ft4tZeBJSJSBPjN+/6m7nkQVZ2oMp2q0L59lYDdIrJf\nRFKAWejfS+XOqO/kbRKR5cDZ/7e4JTDN+/c0oPWt9nE/PSmrTlR3R39N0/7RNG3Sf/2MU64TARy6\n6r36Dt45AZZqmrZO07Q+WV2YbCJMRI57/z4OhN1q5bvVhPIaqhNV5rnFtX0V+BwY4X0/EvgQ6H2P\nipYdqO/b3VddRI5qmhYKLNE0bYf36VS5C0RE/qvPUaYEeRFpeBubxQF5rnqf27tMuUp6r62maROB\njNxQleu/g3m49telkkEictT735Oapv2AnhJTQf7OHNc0LafI/7VvhyoRRHEUxr+DsEGxLYhNg903\nsAnGLWLbYLD4DGatdqMIFmWbPoNg06hRfIaFa5jZIuwgogxz+X7phsvwhxkOw5055SPJJvDZtbnv\n45rvJaqjJKMk2ywpUWm59oYvTGg+cuvnnoCdJFtJRjQ/Asx6nmmwkqwmWW/Xa8A+PpN/YQZM2/UU\nuO/a/C9v8l2STIBLYExTonoupRyUUl6SLEpUcyxR/cZFkl2aY4c34KTneQallDJPcgo8ACvAVSnl\nteexhmwDuEsCTdZcl1Ie+x1pWJLcAHvAuC2fngHnwG2SY+AdOOy8hjkqSfXq+7hGkvSPDHlJqpgh\nL0kVM+QlqWKGvCRVzJCXpIoZ8pJUMUNekir2BdrxP/0OGHDLAAAAAElFTkSuQmCC\n", 222 | "text": [ 223 | "" 224 | ] 225 | } 226 | ], 227 | "prompt_number": 179 228 | }, 229 | { 230 | "cell_type": "code", 231 | "collapsed": false, 232 | "input": [], 233 | "language": "python", 234 | "metadata": {}, 235 | "outputs": [], 236 | "prompt_number": 179 237 | } 238 | ], 239 | "metadata": {} 240 | } 241 | ] 242 | } --------------------------------------------------------------------------------