├── .gitignore ├── CF ├── cf.R ├── cos_iuf_sim.R ├── cos_sim.R ├── data_load.R ├── pearson_dv_sim.R ├── pearson_iuf_sim.R ├── pearson_sim.R └── pred.R ├── Linear Regression ├── LinearRegression.py ├── input.csv └── linearRegressionTest.py ├── Logistic Regression ├── Python │ ├── LogisticRegression.py │ ├── LogisticRegressionTest.py │ └── input.csv └── R │ ├── LogReg.R │ └── LogReg_function.R ├── Matrix Factorization └── mf_train │ ├── App.java │ ├── DataImport.java │ ├── RatingData.java │ ├── RegSVD.java │ └── randomMatrixGenerator.java ├── README.md ├── SVM └── SVM │ ├── SVM.R │ ├── SVMTest.R │ ├── SVMTestSimple.R │ └── SVM_func.R ├── kmeans └── kmeans.py ├── knn └── knn.py ├── mixture_model ├── __init__.py ├── gmm.py └── gmm_example.py ├── naiveBayes ├── Naivebayes.py └── data.mat ├── nnet └── nnet.py └── tree ├── dtree.py └── rf.py /.gitignore: -------------------------------------------------------------------------------- 1 | .RData 2 | .Rhistory 3 | *.pyc 4 | -------------------------------------------------------------------------------- /CF/cf.R: -------------------------------------------------------------------------------- 1 | #clear all 2 | rm(list=ls()) 3 | 4 | #load functions 5 | source("data_load.R") 6 | source("pearson_sim.R") 7 | source("cos_sim.R") 8 | source("pearson_iuf_sim.R") 9 | source("cos_iuf_sim.R") 10 | source("pearson_dv_sim.R") 11 | source("pred.R") 12 | 13 | # split data if data are not splitted 14 | if(!file.exists("train_data.csv")){ 15 | 16 | #split data into training & testing 17 | temp.data<-split.data("./ml-1m/ratings.dat", test.size=0.3) 18 | train.data<-temp.data$train.data 19 | test.data<-temp.data$test.data 20 | 21 | #rite train & test data to csv file 22 | write.csv(train.data,"train_data.csv", row.names=F) 23 | write.csv(test.data,"test_data.csv", row.names=F) 24 | rm(temp.data) 25 | }else{ 26 | #load data 27 | train.data<-read.csv("train_data.csv") 28 | test.data<-read.csv("test_data.csv") 29 | } 30 | 31 | #transform data in to user & item list 32 | usr.list<-usr.rating.list(train.data) 33 | item.list<-item.rating.list(train.data) 34 | 35 | #testing 36 | #c<-pearson.similarity(usr.list[['1']], usr.list[['4']]) 37 | #d<-cosine.similarity(usr.list[['1']], usr.list[['4']]) 38 | #e<-user.base.pred(usr.list, item.list, '4132', '1200', pearson.similarity) 39 | 40 | #run user based prediction 41 | k<-200 42 | test.data$pred<-0 43 | n<-nrow(test.data) 44 | test.data<-test.data[order(test.data$user),] 45 | all.user<-unique(train.data$user) 46 | pre.user<-0 47 | top.K.sim <-NULL 48 | 49 | #item base prediction 50 | for(i in 1:1000){ 51 | 52 | user<-test.data[i,]$user 53 | item<-test.data[i,]$item 54 | 55 | if(user != pre.user){ 56 | 57 | pre.user<-user 58 | #get all similiarity for user and other 59 | sim.list<-data.frame('uid'=all.user, 'sim'=rep(0, length(all.user))) 60 | 61 | for(user2 in all.user){ 62 | 63 | rowNum <- which(sim.list$uid == user2) 64 | if(user2 == user){ 65 | sim.list$sim[rowNum]<-(-100) 66 | } 67 | else{ 68 | #pearson 69 | #sim.list$sim[rowNum]<-pearson.similarity(usr.list[[as.character(user)]], usr.list[[as.character(user2)]]) 70 | #cosine 71 | sim.list$sim[rowNum]<-cosine.similarity(usr.list[[as.character(user)]], usr.list[[as.character(user2)]]) 72 | # iuf pearson 73 | #sim.list$sim[rowNum]<-iuf.pearson.similarity(usr.list, item.list, usr.list[[as.character(user)]], usr.list[[as.character(user2)]]) 74 | # iuf cosine 75 | #sim.list$sim[rowNum]<-iuf.cosine.similarity(usr.list, item.list, usr.list[[as.character(user)]], usr.list[[as.character(user2)]]) 76 | 77 | #default voting pearson 78 | #sim.list$sim[rowNum]<-dv.pearson.similarity(usr.list[[as.character(user)]], usr.list[[as.character(user2)]]) 79 | 80 | 81 | }#end if-else 82 | }#end for 83 | 84 | #sort list 85 | top.K.sim<-sim.list[order(-sim.list$sim),][1:k,] 86 | }#end if 87 | 88 | test.data[i,]$pred <-user.base.pred(usr.list, item.list, user, item, top.K.sim) 89 | }#end pred for 90 | 91 | #first 1000 sample test 92 | new<-test.data[1:1000,] 93 | new$err<-abs(new$pred - new$rating) 94 | mae<-mean(new$err) 95 | 96 | #write result to csv file 97 | #write.csv(test.data,"user-based_pred_pearson.csv", row.names=F) 98 | #write.csv(test.data,"user-based_pred_cosine.csv", row.names=F) 99 | #write.csv(test.data,"user-based_pred_pearson_iuf.csv", row.names=F) 100 | #write.csv(test.data,"user-based_pred_cosine_iuf.csv", row.names=F) 101 | #write.csv(test.data,"user-based_pred_pearson_default_vote.csv", row.names=F) 102 | 103 | 104 | #run item based prediction 105 | k<-200 106 | test.data$pred<-0 107 | n<-nrow(test.data) 108 | 109 | #change user -> item 110 | test.data<-test.data[order(test.data$item),] 111 | #change user -> item 112 | all.user<-unique(train.data$item) 113 | #change user -> item 114 | pre.user<-0 115 | top.K.sim <-NULL 116 | 117 | #item base prediction 118 | for(i in 1:1000){ 119 | 120 | #exchange user & item 121 | user<-test.data[i,]$item 122 | item<-test.data[i,]$user 123 | 124 | if(user != pre.user){ 125 | 126 | pre.user<-user 127 | #get all similiarity for user and other 128 | sim.list<-data.frame('uid'=all.user, 'sim'=rep(0, length(all.user))) 129 | 130 | for(user2 in all.user){ 131 | 132 | rowNum <- which(sim.list$uid == user2) 133 | if(user2 == user){ 134 | sim.list$sim[rowNum]<-(-100) 135 | } 136 | else{ 137 | #cosine 138 | #change user -> item 139 | #sim.list$sim[rowNum]<-cosine.similarity(item.list[[as.character(user)]], item.list[[as.character(user2)]]) 140 | 141 | # iuf cosine 142 | #change user -> item 143 | sim.list$sim[rowNum]<-iuf.cosine.similarity(item.list, usr.list, item.list[[as.character(user)]], item.list[[as.character(user2)]]) 144 | 145 | }#end if-else 146 | }#end for 147 | 148 | #sort list 149 | top.K.sim<-sim.list[order(-sim.list$sim),][1:k,] 150 | }#end if 151 | 152 | test.data[i,]$pred <-user.base.pred(item.list, usr.list, user, item, top.K.sim) 153 | }#end pred for 154 | 155 | 156 | #first 1000 sample test 157 | new<-test.data[1:1000,] 158 | new$err<-abs(new$pred - new$rating) 159 | mae<-mean(new$err) 160 | -------------------------------------------------------------------------------- /CF/cos_iuf_sim.R: -------------------------------------------------------------------------------- 1 | #similarity for inverse user frequency( modified cosine similairy) 2 | iuf.cosine.similarity<-function(usr.list, item.list, user1, user2){ 3 | 4 | user1.item<-user1$item 5 | user1.rating<-user1$rating 6 | 7 | user2.item<-user2$item 8 | user2.rating<-user2$rating 9 | 10 | common.item<-intersect(user1.item, user2.item) 11 | 12 | #return 0 if no common item 13 | if(!any(common.item)) return(0) 14 | 15 | #total user 16 | n<-length(usr.list) 17 | 18 | common.length<-length(common.item) 19 | common.item.rating1<-rep(0, common.length) 20 | common.item.rating2<-rep(0, common.length) 21 | fi<-rep(0, common.length) 22 | 23 | for( index in 1:common.length ){ 24 | item<-common.item[index] 25 | rated.times <-length (item.list[[as.character(item)]]$rating) 26 | 27 | common.item.rating1[index]<-user1.rating[which(user1.item==item)] 28 | common.item.rating2[index]<-user2.rating[which(user2.item==item)] 29 | fi[index]<-log(n/rated.times) 30 | } 31 | 32 | #calculate similarity 33 | 34 | rating1.sum.sqr<-sum(user1.rating^2) 35 | rating2.sum.sqr<-sum(user2.rating^2) 36 | 37 | #inner product 38 | rating12.sum <- sum(common.item.rating1 * common.item.rating2 * fi * fi) 39 | 40 | num<-rating12.sum 41 | den<-sqrt(rating1.sum.sqr*rating2.sum.sqr) 42 | 43 | iuf.cos.sim<-num/den 44 | 45 | return(iuf.cos.sim) 46 | } -------------------------------------------------------------------------------- /CF/cos_sim.R: -------------------------------------------------------------------------------- 1 | cosine.similarity<-function(user1, user2){ 2 | 3 | user1.item<-user1$item 4 | user1.rating<-user1$rating 5 | 6 | user2.item<-user2$item 7 | user2.rating<-user2$rating 8 | 9 | common.item<-intersect(user1.item, user2.item) 10 | #return 0 if no common item 11 | if(!any(common.item)) return(0) 12 | 13 | common.length<-length(common.item) 14 | common.item.rating1<-rep(0, common.length) 15 | common.item.rating2<-rep(0, common.length) 16 | 17 | for( index in 1:common.length ){ 18 | 19 | item<-common.item[index] 20 | common.item.rating1[index]<-user1.rating[which(user1.item==item)] 21 | common.item.rating2[index]<-user2.rating[which(user2.item==item)] 22 | } 23 | 24 | #calculate similarity 25 | 26 | rating1.sum.sqr<-sum(user1.rating^2) 27 | rating2.sum.sqr<-sum(user2.rating^2) 28 | 29 | #inner product 30 | rating12.sum <- sum(common.item.rating1 * common.item.rating2) 31 | 32 | num<-rating12.sum 33 | den<-sqrt(rating1.sum.sqr*rating2.sum.sqr) 34 | 35 | cos.sim<-num/den 36 | 37 | return(cos.sim) 38 | } -------------------------------------------------------------------------------- /CF/data_load.R: -------------------------------------------------------------------------------- 1 | #import rating data from file 2 | 3 | split.data<-function(path, test.size=0.3){ 4 | 5 | if(file.exists(path)){ 6 | 7 | #import rating data 8 | temp.table<-read.table(path,header=F, sep=":") 9 | 10 | #only take value in coumn 1.3,5 11 | temp.table<-temp.table[,c(1,3,5)] 12 | names(temp.table)<-c("user", "item", "rating") 13 | 14 | test.size<-as.double(test.size) 15 | #get random index 16 | n<-nrow(temp.table) 17 | indices<-1:n 18 | test.indices<-sample(n, round(test.size*n)) 19 | train.indices<-indices[!indices %in% test.indices] 20 | 21 | train.data<-temp.table[train.indices,] 22 | test.data <-temp.table[test.indices,] 23 | 24 | #return list 25 | return.list<-list('train.data'=train.data, 'test.data'=test.data) 26 | return(return.list) 27 | 28 | } 29 | else 30 | { 31 | print("no such file") 32 | return(list()) 33 | } 34 | 35 | } 36 | 37 | usr.rating.list<-function(data){ 38 | 39 | #transform table into list 40 | usr.item.list<-list() 41 | user.id<-unique(data[,1]) 42 | 43 | for(index in user.id){ 44 | sub.table<-subset(data, user==index) 45 | 46 | content.list<-list("item"=sub.table[,2], "rating"=sub.table[,3]) 47 | 48 | usr.item.list[[as.character(index)]]<-content.list 49 | } 50 | 51 | return(usr.item.list) 52 | } 53 | 54 | item.rating.list<-function(data){ 55 | 56 | #transform table into list 57 | item.usr.list<-list() 58 | item.id<-unique(data[,2]) 59 | 60 | for(index in item.id){ 61 | sub.table<-subset(data, item==index) 62 | 63 | content.list<-list("item"=sub.table[,1], "rating"=sub.table[,3]) 64 | 65 | item.usr.list[[as.character(index)]]<-content.list 66 | } 67 | 68 | return(item.usr.list) 69 | } -------------------------------------------------------------------------------- /CF/pearson_dv_sim.R: -------------------------------------------------------------------------------- 1 | dv.pearson.similarity<-function(user1, user2){ 2 | 3 | user1.item<-user1$item 4 | user1.rating<-user1$rating 5 | 6 | 7 | user2.item<-user2$item 8 | user2.rating<-user2$rating 9 | 10 | 11 | common.item<-intersect(user1.item, user2.item) 12 | 13 | #return 0 if no common item 14 | if(!any(common.item)) return(0) 15 | 16 | #assign mean value as default value 17 | user1.mean<-mean(user1$rating) 18 | user2.mean<-mean(user1$rating) 19 | 20 | total.item<-union(user1.item, user2.item) 21 | 22 | total.length<-length(total.item) 23 | common.item.rating1<-rep(0, total.length) 24 | common.item.rating2<-rep(0, total.length) 25 | 26 | for( index in 1:total.length){ 27 | 28 | item<-total.item[index] 29 | 30 | if(any(user1.item==item)){ 31 | common.item.rating1[index]<-user1.rating[which(user1.item==item)] 32 | } 33 | else{ 34 | common.item.rating1[index]<-user1.mean 35 | }#end if-else 36 | 37 | if(any(user2.item==item)){ 38 | common.item.rating2[index]<-user2.rating[which(user2.item==item)] 39 | } 40 | else{ 41 | common.item.rating2[index]<-user2.mean 42 | }#end if-else 43 | 44 | } 45 | #print(common.item.rating1) 46 | 47 | #calculate similarity 48 | 49 | #mean over all item or similary item 50 | rating1.mean<-mean(common.item.rating1) 51 | rating2.mean<-mean(common.item.rating2) 52 | 53 | # minus mean value 54 | common.item.rating1<-common.item.rating1-rating1.mean 55 | common.item.rating2<-common.item.rating1-rating2.mean 56 | 57 | 58 | 59 | #inner product 60 | num <- sum(common.item.rating1 * common.item.rating2) 61 | 62 | den<-sqrt( sum(common.item.rating1^2) * sum(common.item.rating2^2) ) 63 | 64 | if(den!=0){ 65 | pear.sim<-num/den 66 | } 67 | else{ 68 | pear.sim<-0 69 | } 70 | 71 | return(pear.sim) 72 | } -------------------------------------------------------------------------------- /CF/pearson_iuf_sim.R: -------------------------------------------------------------------------------- 1 | iuf.pearson.similarity<-function(usr.list, item.list, user1, user2){ 2 | 3 | user1.item<-user1$item 4 | user1.rating<-user1$rating 5 | 6 | user2.item<-user2$item 7 | user2.rating<-user2$rating 8 | 9 | common.item<-intersect(user1.item, user2.item) 10 | 11 | #return 0 if no common item 12 | if(!any(common.item)) return(0) 13 | 14 | #total user 15 | n<-length(usr.list) 16 | 17 | common.length<-length(common.item) 18 | common.item.rating1<-rep(0, common.length) 19 | common.item.rating2<-rep(0, common.length) 20 | fi<-rep(0, common.length) 21 | 22 | for( index in 1:common.length ){ 23 | 24 | item<-common.item[index] 25 | rated.times <-length (item.list[[as.character(item)]]$rating) 26 | 27 | common.item.rating1[index]<-user1.rating[which(user1.item==item)] 28 | common.item.rating2[index]<-user2.rating[which(user2.item==item)] 29 | 30 | fi[index]<-log(n/rated.times) 31 | } 32 | #print(common.item.rating1) 33 | 34 | #calculate similarity 35 | 36 | #mean over all item or similary item 37 | sum.fi<-sum(fi) 38 | sum.f.r1.r2 <-sum( fi * common.item.rating1 * common.item.rating2 ) 39 | sum.f.r1 <-sum( fi * common.item.rating1) 40 | sum.f.r2 <-sum( fi * common.item.rating2) 41 | 42 | sum.f.r1.sqr <-sum( fi * (common.item.rating1^2) ) 43 | sum.f.r2.sqr <-sum( fi * (common.item.rating2^2) ) 44 | 45 | U<-sum.fi * (sum.f.r1.sqr - (sum.f.r1)^2 ) 46 | V<-sum.fi * (sum.f.r2.sqr - (sum.f.r2)^2 ) 47 | 48 | num <- (sum.fi*sum.f.r1.r2) - (sum.f.r1*sum.f.r2) 49 | 50 | den<-sqrt(U * V) 51 | 52 | if(den!=0){ 53 | iuf.pear.sim<-num/den 54 | } 55 | else{ 56 | iuf.pear.sim<-0 57 | } 58 | 59 | return(iuf.pear.sim) 60 | } -------------------------------------------------------------------------------- /CF/pearson_sim.R: -------------------------------------------------------------------------------- 1 | pearson.similarity<-function(user1, user2){ 2 | 3 | user1.item<-user1$item 4 | user1.rating<-user1$rating 5 | 6 | user2.item<-user2$item 7 | user2.rating<-user2$rating 8 | 9 | common.item<-intersect(user1.item, user2.item) 10 | 11 | #return 0 if no common item 12 | if(!any(common.item)) return(0) 13 | 14 | common.length<-length(common.item) 15 | common.item.rating1<-rep(0, common.length) 16 | common.item.rating2<-rep(0, common.length) 17 | 18 | for( index in 1:common.length ){ 19 | 20 | item<-common.item[index] 21 | 22 | common.item.rating1[index]<-user1.rating[which(user1.item==item)] 23 | common.item.rating2[index]<-user2.rating[which(user2.item==item)] 24 | } 25 | #print(common.item.rating1) 26 | 27 | #calculate similarity 28 | 29 | #mean over all item or similary item 30 | rating1.mean<-mean(as.vector(common.item.rating1)) 31 | rating2.mean<-mean(as.vector(common.item.rating2)) 32 | 33 | # 34 | common.item.rating1<-common.item.rating1-rating1.mean 35 | common.item.rating2<-common.item.rating1-rating2.mean 36 | 37 | 38 | 39 | #inner product 40 | num <- sum(common.item.rating1 * common.item.rating2) 41 | 42 | den<-sqrt( sum(common.item.rating1^2) * sum(common.item.rating2^2) ) 43 | 44 | if(den!=0){ 45 | pear.sim<-num/den 46 | } 47 | else{ 48 | pear.sim<-0 49 | } 50 | 51 | return(pear.sim) 52 | } -------------------------------------------------------------------------------- /CF/pred.R: -------------------------------------------------------------------------------- 1 | user.base.pred<-function(usr.data,item.data, pred.user, pred.item, top.K.sim){ 2 | 3 | #get avg rating for pred.user 4 | mean.pred<-mean(usr.data[[as.character(pred.user)]]$rating) 5 | 6 | #get top K user id 7 | top.K.user<-top.K.sim$uid 8 | 9 | #get usr who rate pred item 10 | rated.user<-item.data[[as.character(pred.item)]]$item 11 | 12 | #common users 13 | common.user<-NULL 14 | common.user<-intersect(top.K.user, rated.user) 15 | 16 | if(!any(common.user)){ 17 | return(mean.pred) 18 | } 19 | else{ 20 | 21 | num=0.0 22 | den=0.0 23 | 24 | for(user in common.user){ 25 | 26 | user.rating<-usr.data[[as.character(user)]]$rating 27 | mean.user.rating <- mean(user.rating) 28 | 29 | user.item <- usr.data[[as.character(user)]]$item 30 | index<-which(user.item==as.character(pred.item)) 31 | item.rating<-user.rating[index] 32 | 33 | user.sim<-top.K.sim[which(top.K.sim$uid==user),]$sim 34 | 35 | num<-num + user.sim * (item.rating - mean.user.rating) 36 | den<-den + abs(user.sim) 37 | } 38 | pred<-mean.pred + (num/den) 39 | return(pred) 40 | } 41 | #end else 42 | } 43 | #end pred -------------------------------------------------------------------------------- /Linear Regression/LinearRegression.py: -------------------------------------------------------------------------------- 1 | import random 2 | import numpy 3 | 4 | 5 | class LinearRegression: 6 | 7 | # initialize 8 | 9 | def __init__(self, X, Y, alpha=0.0005, lam=0.1, printIter=True): 10 | 11 | x = numpy.array(X) 12 | m, n = x.shape 13 | 14 | # normalize data 15 | self.xMean = numpy.mean(x, axis=0) 16 | self.xStd = numpy.std(x, axis=0) 17 | x = (x - self.xMean) / self.xStd 18 | 19 | # add const column to X 20 | const = numpy.array([1] * m).reshape(m, 1) 21 | self.X = numpy.append(const, x, axis=1) 22 | 23 | self.Y = numpy.array(Y) 24 | self.alpha = alpha 25 | self.lam = lam 26 | self.theta = numpy.array([0.0] * (n + 1)) 27 | 28 | self.printIter = printIter 29 | 30 | # caluclate cost 31 | def __costFunc(self): 32 | "calculate sum square error" 33 | m, n = self.X.shape 34 | pred = numpy.dot(self.X, self.theta) 35 | err = pred - self.Y 36 | cost = sum(err ** 2) / (2 * m) + self.lam * \ 37 | sum(self.theta[1:] ** 2) / (2 * m) 38 | return(cost) 39 | 40 | # gradient descend 41 | def __gradientDescend(self, iter): 42 | """ 43 | gradient descend: 44 | X: feature matrix 45 | Y: response 46 | theta: predict parameter 47 | alpha: learning rate 48 | lam: lambda, penality on theta 49 | """ 50 | 51 | m, n = self.X.shape 52 | 53 | # print "m,n=" , m,n 54 | # print "theta", len(self.theta) 55 | 56 | for i in range(0, iter): 57 | theta_temp = self.theta 58 | # update theta[0] 59 | pred = numpy.dot(self.X, self.theta) 60 | err = pred - self.Y 61 | 62 | # print "grad" , self.alpha*(1.0/m)*sum(err*self.X[:,0]) 63 | 64 | self.theta[0] = theta_temp[0] - self.alpha * \ 65 | (1.0 / m) * sum(err * self.X[:, 0]) 66 | for j in range(1, n): 67 | val = theta_temp[ 68 | j] - self.alpha * (1.0 / m) * (sum(err * self.X[:, j]) + self.lam * m * theta_temp[j]) 69 | # print val 70 | self.theta[j] = val 71 | # calculate cost and print 72 | cost = self.__costFunc() 73 | 74 | if self.printIter: 75 | print "Iteration", i, "\tcost=", cost 76 | # print "theta", self.theta 77 | 78 | # simple name 79 | def run(self, iter, printIter=True): 80 | self.printIter = printIter 81 | self.__gradientDescend(iter) 82 | 83 | # prediction 84 | def predict(self, X): 85 | 86 | # add const column 87 | m, n = X.shape 88 | x = numpy.array(X) 89 | x = (x - self.xMean) / self.xStd 90 | const = numpy.array([1] * m).reshape(m, 1) 91 | X = numpy.append(const, x, axis=1) 92 | 93 | pred = numpy.dot(X, self.theta) 94 | return pred 95 | 96 | 97 | def main(): 98 | print "This is a simple linear regression test..." 99 | # generate feature X 100 | x = numpy.arange(0, 20).reshape(10, 2) 101 | 102 | # generate sample response 103 | y = numpy.arange( 104 | 0, 10) + numpy.array([random.random() for r in range(0, 10)]) 105 | 106 | lm_model = LinearRegression(x, y) 107 | lm_model.run(100) 108 | lm_model.predict(x) 109 | 110 | 111 | if __name__ == "__main__": 112 | main() 113 | -------------------------------------------------------------------------------- /Linear Regression/input.csv: -------------------------------------------------------------------------------- 1 | 12.2809387398,6.39617970071,3.14991736586 2 | 12.9461699468,5.1611485311,7.16989810922 3 | 5.70577624272,2.92870908699,4.74300256643 4 | 7.583835754,1.38822347268,6.32250003081 5 | 5.95897244725,4.84527542612,7.27740247787 6 | 8.6603127905,1.58733925991,8.25059146398 7 | 15.2277444898,8.73562518167,6.99222607445 8 | 8.20301401121,-2.53123129641,6.56208036965 9 | 12.9961491986,4.82192825946,5.56684886751 10 | 3.70927535492,2.25574968928,3.02697407446 11 | 6.69150324382,0.382082690486,8.74110404174 12 | 5.94855028356,3.25250114101,3.56642455435 13 | 7.17953742678,5.65449820604,3.51260254618 14 | 9.02061858713,3.48100630296,4.15276054292 15 | 10.6038721879,1.84660448006,9.21141728002 16 | 8.08657183195,-0.498429019071,3.301444282 17 | 11.4294950605,1.6714315233,8.61069855069 18 | 18.911525084,12.196252869,9.0645428371 19 | 6.20152914164,1.98899736773,4.17786779115 20 | 9.96233178384,4.35588070914,3.69298162518 21 | 6.04550619136,0.47344139491,5.63268852182 22 | 3.4867474204,-0.31934116939,-0.891807795934 23 | 6.46097375004,3.80311198207,4.92482362859 24 | 14.8584156741,6.22133705108,5.18778399633 25 | 9.52217966045,5.76068650932,3.36388416652 26 | 10.0791942297,1.85577501068,5.5895806349 27 | 11.2646475515,6.29695451868,3.65759979702 28 | 10.0159393298,6.49593342489,4.31826442511 29 | 11.250602432,3.91574305903,8.53640567259 30 | 5.17084832932,1.40814342602,-0.617161368519 31 | 14.9984271486,6.29428746778,5.26561715989 32 | 15.3102735851,9.28128180836,6.99212568428 33 | 7.2089195216,6.58028378511,4.21910854472 34 | 19.0424287234,10.358241462,8.08314887525 35 | 11.5598691292,3.45968878778,7.42754413181 36 | 13.7266240846,8.69943740082,4.92502363015 37 | 13.1773475168,12.2076798321,3.90777966378 38 | 14.3639589522,6.54423245323,8.80092597024 39 | 9.79489751701,10.6441288878,0.870162736755 40 | 14.3749163306,8.04632531674,12.1661393673 41 | 6.76183507375,7.23881282269,1.20626866188 42 | 14.4416502856,6.19740696113,9.6985650137 43 | 14.8168841236,7.13038261977,9.17868434937 44 | 15.6966881493,3.14551962301,7.01171975911 45 | 11.2356344536,3.10428093874,8.72872302973 46 | 12.366528698,8.91864370324,3.71549760764 47 | 14.2013218503,8.35842120099,7.54991680717 48 | 8.5942663928,3.05604966256,4.58105135057 49 | 10.9986312676,7.13604373369,0.184479280132 50 | 14.3620667879,4.00475176341,8.58356533377 51 | 6.79071105455,3.71039114168,5.84115394924 52 | 7.16900131916,6.69090882659,1.89632571825 53 | 6.96883769392,3.16371992857,2.81199485961 54 | 12.7701363259,4.84903993928,6.56580416588 55 | 8.01604007075,7.47762940938,1.09056107184 56 | 8.60950138203,7.24905773322,-1.51364197132 57 | 5.16856626659,1.05575477679,4.39685640073 58 | 10.7815148591,3.72103652584,3.77311800628 59 | 9.38278910188,1.79699814067,5.3588134541 60 | 4.27769572769,0.956667087146,2.21765030634 61 | 14.4657743894,8.62083916697,6.41080805853 62 | 4.97452300898,1.72335923348,4.49744393344 63 | 7.50051520531,6.71203243368,4.66258291639 64 | 9.61721934165,8.88984120831,4.0470616186 65 | 1.41187452227,-0.263090234745,-1.96915984929 66 | 18.3906659047,9.40885662614,10.0962176876 67 | 12.3566902705,2.16043739266,10.3888470738 68 | 11.6233719076,6.42323045198,4.53904077197 69 | 5.30295318496,6.44095339872,1.64118803068 70 | 8.44781507866,7.77419359342,-1.80948853246 71 | 1.57858822553,1.29481995283,-0.369779204329 72 | 6.97391193229,6.14933124388,3.53459697383 73 | 10.0417771225,6.7682208007,0.0985027674219 74 | 9.62962893081,5.04367597493,2.72531322464 75 | 10.3849216103,6.67741850698,7.45355014781 76 | 11.3935609206,5.86460915507,7.16379482745 77 | 10.6034371572,6.98423093242,3.98394414992 78 | 15.4562969426,4.90433269271,8.52278909234 79 | 4.96643805149,5.08518557312,2.08929674532 80 | 6.72821120404,0.205911440095,6.83479099393 81 | 6.70315319287,2.42142506752,9.18386025292 82 | 6.18320297264,4.2705477489,0.306132334997 83 | 5.06003197955,0.669810336427,3.88599928773 84 | 12.2285294252,8.32469503926,2.86247107251 85 | 7.70878580127,10.2944912712,1.58728990274 86 | 8.6675280019,4.36269624671,2.00474439524 87 | 5.85920519926,2.15965363775,4.00115863819 88 | 17.0889329528,8.94792609893,8.67854949887 89 | 18.8507304487,10.3902475142,9.95391921759 90 | 8.71267198205,0.49313432209,7.86745832144 91 | 15.3784402103,6.10053680126,10.0465091515 92 | 11.0427333073,3.80238004072,5.58612502455 93 | 9.7128705718,-0.731050763111,8.76452707679 94 | 6.48863309833,1.85828985431,5.73179189774 95 | 11.5952864359,3.94742938086,6.92265847293 96 | 2.86393011582,2.85921112585,-0.773115089539 97 | 11.8064934642,7.42080976269,0.925811064595 98 | 10.90074637,3.796046387,3.18334691991 99 | 10.3073463188,2.595810334,7.73683224908 100 | 9.83366559123,7.87262205007,4.46472312324 101 | 13.3736257369,4.10969825207,7.35220056462 102 | 11.811482512,5.827470828,7.55478035804 103 | 11.3392711303,7.46071463376,2.85836462443 104 | 9.96101367862,7.12642573881,4.78588758854 105 | 5.94855088453,1.34744883134,5.58840160523 106 | 14.9288188609,12.3019764911,8.9034258992 107 | 10.5059559198,6.24050567173,4.11988925559 108 | 11.6891499622,7.42307430862,7.8968068423 109 | 6.03639720162,4.78069186774,2.71083026954 110 | 1.46849728748,-1.54811949671,-1.45182786393 111 | 3.73505700392,1.04790663712,1.35985495152 112 | 6.76505429802,2.79253437719,4.29395967473 113 | 8.1877188574,5.99567539327,-0.13831806083 114 | 15.1520999394,10.3959501375,9.21784662299 115 | 4.98631098978,2.57431703535,5.90422272588 116 | 11.8848722325,2.75620397604,5.9558951491 117 | 1.57677740885,-0.779200537087,-1.10315812762 118 | 15.4397664367,6.04872191034,7.5857827128 119 | 4.75470922202,2.81830879217,1.72099785305 120 | 5.53739579558,4.10331232639,1.06466399923 121 | 7.79762109272,5.84484011038,2.41655104757 122 | 12.9637643024,5.18039842985,8.61108955569 123 | 10.8504490189,4.07464351497,8.63785978918 124 | 14.2435202652,6.40138307408,8.41541216594 125 | 8.88253729162,0.541971039948,9.60907450042 126 | 12.582806292,5.37297100334,9.32019098724 127 | 5.72019845262,-0.607166457447,6.20931833434 128 | 9.56214427538,8.55361212969,6.34389853886 129 | 11.4877901379,2.87180822598,9.00493565914 130 | 11.5392326939,11.754298065,2.90872835992 131 | 10.2864465256,8.31810430038,-1.48705246163 132 | 8.72449697029,3.34429234513,7.00559065005 133 | 11.1122356307,5.86188998912,6.85652130397 134 | 11.6394491895,7.24889730165,1.26208877955 135 | 12.1880395268,10.2123503229,2.63890732507 136 | 6.94907175912,4.20969474216,7.05374542303 137 | 16.1225488057,6.88453672647,8.88661718441 138 | 6.6595964975,-1.63086488384,6.15377005217 139 | 10.803777196,5.16944924876,7.68675855225 140 | 15.4387311596,6.06515064071,4.25521115202 141 | 13.0737947087,8.02830226886,4.4443692234 142 | 7.95881463027,-0.156612338875,5.37836913276 143 | 10.4305146709,5.84056069993,1.59145002986 144 | 7.21715767099,2.73958168659,6.00329831423 145 | 6.63097115718,3.48488691547,3.9734842099 146 | 3.39581209821,2.58865806993,1.15750512931 147 | 2.82429260577,0.607660247378,2.27286586185 148 | 15.569300927,4.55251874159,7.12805202523 149 | 11.6798604861,8.66807701755,1.28491595034 150 | 5.80286848495,5.3993238231,1.80405147887 151 | 4.04704004998,2.00288145114,4.92296822784 152 | 12.3997689173,9.42806002844,8.1464022819 153 | 11.0292905535,7.07231244914,4.02098312019 154 | 10.357276552,1.49533452236,9.36896775772 155 | 7.31097897408,2.69792566717,1.04187542767 156 | 10.4567354606,9.36618223255,3.40126166 157 | 3.35822762927,6.0152843736,1.96537175643 158 | 7.45829798793,2.73488511288,2.70949164532 159 | 7.73446707096,6.98780186978,2.97528343777 160 | 14.18595617,4.28221124353,8.94686367628 161 | 3.31773186954,2.48010488862,4.60634695514 162 | 13.1938212418,6.54255565133,5.11750276552 163 | 10.2051666658,5.78707604366,2.89819047648 164 | 11.7148964078,2.46500958855,1.81830573294 165 | 9.84763905649,-0.290304326317,10.400967488 166 | 9.47654224021,11.723226816,1.64783948287 167 | 5.59464226208,2.95287314921,3.18614457277 168 | 7.94399345365,4.47501614748,2.71795357665 169 | 6.72502520594,1.90147717961,3.02957944993 170 | 10.1201250684,8.67679112979,1.51488560656 171 | 11.8439381586,4.58711121614,7.55777878985 172 | 6.23008836036,4.46476190599,2.12349594182 173 | 15.5310537309,7.12276603179,4.98649381124 174 | 7.48611170555,0.449895305878,6.29986627531 175 | 8.17941203899,3.50112872583,2.30690236478 176 | 8.46816038735,7.40755848038,-1.00833865777 177 | 12.0988148488,7.36475765505,3.35350644596 178 | 2.9166126846,2.77521629324,3.711880665 179 | 5.35893528334,0.820705685125,0.362410599022 180 | 12.5524097382,10.956277288,3.73815448875 181 | 10.4407201303,0.790478350091,8.0532226463 182 | 5.6521572374,2.99983764946,2.2949639714 183 | 8.0519698614,3.7438641111,6.50404615556 184 | 10.486940843,7.40016728111,4.35219768705 185 | 8.63662462446,2.92805864551,8.07857455132 186 | 16.1407776057,12.257679179,8.58183118298 187 | 5.4659634604,0.665832355926,4.04090084757 188 | 6.51537317238,6.07668009628,1.08409959604 189 | 6.26897463357,5.71741443945,3.49347044657 190 | 13.8920968513,2.68928188801,6.4201789826 191 | 9.71696506978,4.38407176126,2.5605584886 192 | 10.9633850299,2.99720120029,6.35172847122 193 | 13.6206695689,6.65320997212,5.87162888374 194 | 10.4778037876,5.30653270589,3.73661197727 195 | 11.4241778878,7.4932255776,5.64901577653 196 | 10.0073142402,10.2437869865,-1.04229389525 197 | 12.659567592,1.64283533432,7.81010686725 198 | 6.92636794003,7.91203046938,2.57107577674 199 | 16.6643447517,9.97448884294,7.10123412634 200 | 14.4378501458,7.23935092951,8.69022360959 201 | 8.2875487477,8.79048229995,3.67689289205 202 | 5.98131038397,0.812619174058,2.70733713681 203 | 11.0164892868,3.2177733706,10.3470208784 204 | 10.3200789594,12.4504241225,5.59931142881 205 | 6.08100095336,8.18690503083,2.48058531393 206 | 12.7943602587,3.22224061839,3.0453105762 207 | 16.2244913245,9.91107228791,7.97227509454 208 | 3.5164916547,-0.101875903142,0.869133887684 209 | 7.58974766293,6.74295176143,4.61683279657 210 | 19.0381622253,7.40206646472,9.50503622207 211 | 14.7644296447,6.53882212558,5.41177777067 212 | 13.5687824811,10.9130526215,7.26864695467 213 | 14.9683022936,12.4134660734,7.71540170282 214 | 12.5880778214,8.91852777999,3.41240912292 215 | 16.5540724481,6.07777202709,7.21690412382 216 | 7.94935273616,4.35596645394,-0.503878771485 217 | 11.4148382274,6.04164260673,5.19852234471 218 | 18.4563254163,8.17693786439,8.34170735292 219 | 4.29256144837,-1.01386594886,2.09529648448 220 | 12.1921105903,9.55677384032,6.24174001585 221 | 6.14996131499,5.42226004466,2.35660830715 222 | 1.77621873932,-0.270876224221,0.423494990521 223 | 13.0424506474,4.80857312945,5.77248638506 224 | 4.23254953061,-0.528203024857,-0.586293875475 225 | 9.62861393031,8.54739514958,-0.979039407599 226 | 13.0285853586,6.83915608927,5.91915708547 227 | 12.2015979605,6.35180609654,4.1956455869 228 | 10.9646581865,9.52364390297,2.88163821407 229 | 5.40374339325,-1.3865483747,2.32409068353 230 | 8.79456479394,5.48630738871,1.95325283447 231 | 11.7601313283,8.00135502243,0.719784382717 232 | 6.95408958923,3.3681712228,4.97120138632 233 | 18.6766993152,9.19904436181,8.11508061415 234 | 7.3450681863,3.34241242761,5.64617915597 235 | 7.00562235538,2.33678709781,4.67915363308 236 | 11.3875826396,9.7205920848,4.49537934666 237 | 12.8184654806,6.37718288447,9.6857546935 238 | 7.50522532197,2.53195533749,7.9673939628 239 | 9.73322339465,8.18119451486,4.20008034632 240 | 8.59670364789,0.401591983115,7.89300039729 241 | 13.4140332986,4.61764286652,7.3641136044 242 | 9.50763934836,4.3310437302,4.35628422738 243 | 5.86460577983,2.84131010465,3.80232882273 244 | 8.94076063382,5.48577076318,1.70869776832 245 | 12.0948644037,5.30094078852,4.68317153984 246 | 11.6653928715,5.84111254752,7.74011988546 247 | 9.05783890976,1.85207070308,3.95424159249 248 | 4.5627034081,-0.0927044512772,2.52841385113 249 | 19.5563662044,10.7089993782,10.4055190594 250 | 8.45717924729,-0.889539894696,3.8606867427 251 | 12.002763369,6.82581492998,2.34856967104 252 | 11.6494356636,5.80803162619,3.11808715073 253 | 14.3855990131,4.89953341767,9.39255333597 254 | 10.9710903166,4.62381885558,8.94467524486 255 | 10.054817252,3.24202500476,4.69469904373 256 | 11.2910204408,8.61204599421,0.323842289643 257 | 15.2078582539,5.98402814978,2.53067960805 258 | 13.7587644447,9.65208157357,6.69219045041 259 | 10.8361173351,2.19125581144,7.05213949162 260 | 17.4977837689,11.6907660684,10.6065791347 261 | 11.551180515,5.13598201911,9.74878736919 262 | 11.3245948494,1.22576952649,10.9555155592 263 | 17.5435526503,8.43091617465,7.08921618462 264 | 9.23878035351,7.19692177503,2.51293597677 265 | 15.196997898,4.66006313016,9.33249559461 266 | 5.30334308977,-0.336271509855,0.824916993186 267 | 17.116621455,8.52832586478,7.39975581958 268 | 17.7585134713,6.40951972224,8.10449362611 269 | 12.1353700991,3.92714271635,6.22733603125 270 | 9.55889965916,6.34036370328,1.7980527589 271 | 8.23327712457,7.01909967315,3.60748960989 272 | 14.7918863618,4.58678742745,9.33129936762 273 | 12.5741227527,5.92460965808,1.49965994508 274 | 7.11474088229,3.40377173206,4.9637073926 275 | 16.0171895596,8.52090097722,5.55630875544 276 | 10.3709523615,8.7689788091,3.14783678278 277 | 15.8770364603,3.01043785071,7.54133096717 278 | 14.0833296449,10.4101988734,4.31563094444 279 | 6.21423389777,2.23482457086,1.80888540766 280 | 5.60995799543,3.96103089743,5.9367176671 281 | 4.25723282489,4.61547482839,2.23537092349 282 | 13.9369457918,6.14014208097,11.8048426081 283 | 11.9874221533,4.10153685689,6.64919938184 284 | 8.59661482619,1.96201766549,4.11960742065 285 | 18.7791913267,10.5671284118,10.2894261046 286 | 9.76479632122,7.13653423999,7.40705487599 287 | 10.1532077684,7.09323676761,5.39364477532 288 | 3.54410457599,3.33929079043,4.37664183779 289 | 8.49276822997,3.79345774637,3.49538961686 290 | 8.86020563801,3.86781794411,5.19054562241 291 | 5.57695195246,-1.09374306068,5.33214712709 292 | 3.11648626513,2.17364226208,-1.84954210373 293 | 10.9066782413,9.07304316593,2.57355935988 294 | 13.0977259115,4.95655556601,7.76908190686 295 | 7.18604109606,0.676498428716,7.73143599884 296 | 16.756762319,11.2218094736,7.90107918941 297 | 15.9669585084,10.4346764773,6.74800870601 298 | 17.9923451305,10.2706395533,8.25206166764 299 | 16.9943101522,9.89521883234,8.98442343088 300 | 12.881493557,5.69126451254,5.01631149556 301 | 8.41869721738,8.06021827162,0.935624313544 302 | 8.54562854241,6.88794003538,0.958902639837 303 | 10.6220951902,7.92067004666,6.67513002798 304 | 8.92292715222,1.66167977735,5.73715650117 305 | 9.84602890309,7.09726244064,1.65861191837 306 | 10.5511418578,10.7821381032,2.28258789106 307 | 10.7104213523,8.24306081527,6.98266226938 308 | 18.3239942727,9.08165913551,7.71484643763 309 | 8.02920220975,1.87077634923,5.44130135917 310 | 10.9884031443,-1.10446489574,7.41025235773 311 | 11.1212207119,9.35889786039,4.11952184438 312 | 5.24871026137,5.08655665672,1.22770662029 313 | 3.43852006905,1.79375573635,6.25342969799 314 | 5.79513292683,1.78810610206,3.49205519578 315 | 15.4234990035,8.05202705914,8.40323435209 316 | 16.003543786,6.3572082385,8.93289295834 317 | 14.1079279846,10.582754795,8.19094919007 318 | 9.08424521561,3.26925461883,3.2233659432 319 | 9.03110095021,5.29947168236,2.50050139066 320 | 14.1755001646,6.59518266096,9.60158868974 321 | 11.3960470019,2.32655807409,3.49620340629 322 | 13.5552414598,6.26846104747,5.18735478035 323 | 8.52289295172,4.04997198447,3.98156331769 324 | 18.6039545478,8.60213368757,7.64827416994 325 | 11.0455886216,7.68522622821,-0.54463047832 326 | 9.96241299827,7.26264927258,6.9271690376 327 | 10.9968334938,8.36180594133,2.35448631121 328 | 13.9537822056,4.26099401571,9.07891343849 329 | 15.6083130177,5.72357054516,8.09600484592 330 | 4.97321351264,-1.71984389515,3.11742373975 331 | 4.16865513862,1.3894615338,5.64328056524 332 | 12.440439148,8.79518243264,8.15243094314 333 | 3.33351422605,1.57798901574,1.95525942935 334 | 11.0613006736,3.54981724405,10.5547670008 335 | 13.0386092571,8.86130143732,3.59453521941 336 | 4.05879930023,1.75862816597,3.90857597576 337 | 12.6180861694,6.08284644471,4.45898430167 338 | 18.7128690334,7.75524126148,8.17761653711 339 | 9.85902829618,0.863113175646,8.51562754245 340 | 1.26378377576,2.08324442814,2.73567896239 341 | 14.9886246329,8.36469109765,12.4142554275 342 | 6.80437320963,3.17931497172,3.23633334292 343 | 16.7562317906,8.54664951925,9.90229884277 344 | 7.51790477762,0.98450936025,5.88279923063 345 | 7.60573715332,2.2075810015,9.73950174298 346 | 16.0228593172,5.97495919186,6.58935600268 347 | 8.40167943381,4.84985840838,0.0990422879527 348 | 3.96678981905,0.881712355366,5.56434415424 349 | 14.8754326475,7.93715914659,3.3719629368 350 | 11.0093219453,6.96609667874,6.26722427203 351 | 11.3825540274,5.10038441064,7.87584326723 352 | 8.95152947723,4.89124907846,6.09698096149 353 | 4.66244628013,2.99192052957,2.48866256667 354 | 3.9847171575,3.8550685004,6.29286036437 355 | 11.2588434668,10.1742841327,2.53923042142 356 | 10.8205063252,5.46982077242,11.5367212308 357 | 9.4730288842,3.51651327073,0.878522550695 358 | 10.9149352332,7.58442947892,-0.0169368145907 359 | 6.45134625583,5.85987141951,0.905726182686 360 | 4.84474539012,1.70037944656,-1.80648666951 361 | 1.50115705837,-1.05976378267,-0.585588575741 362 | 5.28335557154,4.42920495214,4.01564828912 363 | 12.8413031802,4.17291255574,9.42918051148 364 | 16.6478202567,11.6767174016,9.18019360854 365 | 14.876509042,5.75606515479,4.60433259472 366 | 8.35299798266,6.3249125288,-1.67247708296 367 | 9.55233793533,6.13829964006,-1.09695996555 368 | 16.1262551916,11.5385051956,7.6058631901 369 | 12.5706478722,3.30878039696,5.43209887239 370 | 8.52867650196,7.48599530809,1.66341972704 371 | 17.8180271599,11.81249399,11.2308847779 372 | 7.70700905311,2.62069255292,4.81786181073 373 | 15.7724541685,3.67640026234,5.79803101641 374 | 11.115140209,3.5640141636,10.2347782339 375 | 11.1291384668,8.39612360289,1.1381124705 376 | 14.3028895011,6.36274780966,9.57613599974 377 | 8.05244433805,6.57986375467,5.24517451417 378 | 8.86135056229,6.37634137477,0.91380234482 379 | 11.372136922,2.70882992612,9.82099289847 380 | 8.18082460911,0.680936500852,4.95270712892 381 | 2.77029817461,0.711370750735,0.482785237524 382 | 14.491702228,7.92566597899,7.03822429969 383 | 13.9576446119,7.08192769932,2.12320156993 384 | 3.27810370468,-0.270088176478,-0.968383062396 385 | 7.20379687706,4.61287357874,2.37382921506 386 | 5.81081559635,1.84456612332,4.00655248773 387 | 11.9722265591,8.81275757634,3.31939881928 388 | 10.4541587118,3.33420717957,5.96519867456 389 | 9.98221438184,6.33032012386,8.61909093452 390 | 15.8607640229,4.25983213233,8.3911999583 391 | 14.7253732346,8.08035813365,3.80575471766 392 | 8.8433560395,6.79866724177,1.59358577875 393 | 10.6861379956,3.36127264298,8.41093011693 394 | 10.0124947023,4.45211176537,8.02580946618 395 | 17.3041984918,9.35692178864,4.490521773 396 | 13.7690534436,7.24677407459,4.06389063006 397 | 17.1523568176,8.30587059532,8.43810265512 398 | 6.49748823701,5.3605286086,1.31177690155 399 | 4.02240420174,1.61120808095,3.90804272923 400 | 2.10847971242,2.89130992681,2.71790467013 401 | 9.79582909642,9.55981578852,2.12193515513 402 | 8.98914581014,8.28136265509,1.3578929662 403 | 7.49282872645,4.22820071291,4.07997593764 404 | 14.2547316384,11.7678239519,3.69818424487 405 | 13.0554162452,9.19528722464,2.61872220821 406 | 1.27592877809,3.72617703244,0.509172826799 407 | 11.7118745766,8.98576102702,1.98250009059 408 | 4.83733678997,0.458418286392,3.47630232265 409 | 13.8872055372,8.93840957006,6.44611335447 410 | 7.32931005503,5.36675528765,1.01001182021 411 | 11.2552304531,1.41855773778,11.2038214005 412 | 3.05875514743,-0.274756140268,1.45756712614 413 | 11.069169257,6.35680075064,6.44338627088 414 | 7.02249550197,4.48824704589,3.38202929101 415 | 8.8270466709,1.00279684976,5.54744371262 416 | 10.6787249317,7.40161990063,6.46182422603 417 | 11.3463317832,2.84518792737,9.91006547565 418 | 8.39187302787,4.78820897777,4.02966519603 419 | 5.53504548591,6.53128808476,1.20945247543 420 | 8.53927056995,8.04341838378,5.75801839377 421 | 14.8558033869,6.82926530389,9.49901243652 422 | 11.4276487298,0.731565426005,10.3555639977 423 | 14.4441648893,5.70321344558,6.02328043955 424 | 3.31134586092,2.75432726845,3.42186089338 425 | 12.6070599275,6.15316822779,8.15286916951 426 | 15.8084970776,10.8335366691,3.04589605556 427 | 14.5992433316,11.3317370584,6.21851584226 428 | 5.95203911133,1.07330821203,6.48045788209 429 | 9.72472923362,4.91972210921,3.32559231593 430 | 8.86342599191,1.10966740681,7.42193781673 431 | 7.90820422284,8.70759144555,1.63920325329 432 | 3.44619599007,2.61025134262,-0.335035787608 433 | 6.34922135103,3.20652958302,3.50056801947 434 | 9.49636252221,-0.644705580326,9.30560930125 435 | 17.1599487945,8.37754292495,11.5826084832 436 | 12.7609732673,9.57090324615,3.60092079536 437 | 4.47352673146,0.115561396187,3.26328856394 438 | 13.583762249,2.18414233257,7.15211475094 439 | 10.1023663551,3.02850208064,4.48068204318 440 | 17.6036802814,7.98187588814,7.28014814406 441 | 5.72216628452,5.28368518868,3.10296223977 442 | 17.903406629,7.68256052992,6.67492337567 443 | 2.98846626137,0.829241939641,1.00665796117 444 | 12.211574951,4.2953323853,11.0950308999 445 | 12.313371576,9.49883959094,4.57357533074 446 | 11.4890217625,0.831608872987,8.12990645066 447 | 13.4372484156,7.26977963528,6.93778608426 448 | 10.6286449981,10.3580561965,1.87312281619 449 | 4.73473316073,5.3622434081,4.33114898486 450 | 14.1575316631,9.64803537037,8.7657884635 451 | 12.4329094182,9.20600774611,4.96683732745 452 | 13.0281009924,8.56878392409,5.79487129099 453 | 8.82561072278,1.70531137084,3.0452066245 454 | 10.6533620152,9.08941994497,0.384012934086 455 | 17.4984237017,8.17553539006,8.88271481266 456 | 11.2899871665,10.3860655019,1.89910511556 457 | 0.882235177526,2.51248007352,1.35540209432 458 | 8.37022171502,1.3534967351,7.14590851821 459 | 6.64660718636,5.82428411413,0.80486542196 460 | 8.05635959232,1.77128714139,3.70288404718 461 | 11.916069662,1.93686251641,6.79494611682 462 | 8.3331114287,9.36607336874,-0.209388170241 463 | 8.82646615563,4.58974942648,3.83923246275 464 | 13.4363791241,2.44730682248,5.59616737409 465 | 16.8101384327,7.55015042733,10.8699320712 466 | 11.4212623517,7.46604141902,9.1106949312 467 | 14.9490686366,3.11003956831,6.99902563048 468 | 10.8245522422,7.4724850205,2.03481124404 469 | 7.4586153788,3.76481050411,3.40249737706 470 | 7.72686789881,5.9994842537,-0.728453949167 471 | 12.6585426212,7.3513377336,1.77123948123 472 | 10.847416317,6.06148243478,7.2967217356 473 | 11.3191239475,6.42756195075,2.2654389781 474 | 7.00848605133,2.70146669158,5.69743023171 475 | 15.2492532108,4.92199660659,10.5716132723 476 | 14.3283637621,11.0816990509,3.98346722429 477 | 2.924463628,0.567486380577,1.26214147335 478 | 9.722962902,6.06918484291,2.58507570444 479 | 2.40991359395,0.805645145317,-0.0950464571311 480 | 9.9015808113,8.38788303412,4.59095952792 481 | 7.91244971245,2.81112520956,5.89129334919 482 | 9.62892549665,9.43202527894,-0.224603079926 483 | 9.23260400235,6.14839529718,1.39989492642 484 | 6.78151477824,1.36971167619,6.78970024999 485 | 2.11961538789,-0.0716752172511,-0.226301262099 486 | 7.79290134309,2.35447043402,4.20465285953 487 | 3.31503463329,1.00849510701,3.45417626642 488 | 1.55008777272,1.77425536902,1.76484417879 489 | 16.1508105028,7.07929639222,8.23252592208 490 | 7.21669148803,3.18666596768,4.1253443015 491 | 11.4144543047,9.50733954594,1.35931539079 492 | 10.4670625681,6.05757888361,-0.297234833828 493 | 7.16859842408,6.18183857332,3.30026190574 494 | 8.57170844557,9.84541341911,1.45730813218 495 | 9.28112089116,2.97663040665,6.30796751661 496 | 12.4722868471,2.97561503918,11.0178752627 497 | 4.79833118067,1.98931379136,0.916787461797 498 | 17.6348306598,10.0035362202,8.05781198915 499 | 7.48115235375,2.44991841449,4.48492789489 500 | 9.79386808018,4.44649014833,7.46289199033 501 | 13.7098458119,7.11432840546,8.10392551602 502 | 18.464752393,7.90881508465,9.07983274058 503 | 7.36528789448,3.61274244039,4.39263680122 504 | 8.63635948196,2.33526507223,1.11901866744 505 | 16.4016569856,6.73957202145,8.47189751733 506 | 5.79258061562,3.72397701289,2.99035230956 507 | 12.8844634526,5.90675486463,10.1553452464 508 | 9.59864195181,2.41078322736,6.44878522739 509 | 5.4744103257,0.229044332788,5.32417228635 510 | 7.77681715749,9.55261943895,3.4413866057 511 | 4.8971318191,5.93669227053,2.18290763498 512 | 11.1728638037,5.27321555371,5.54560884301 513 | 8.09605505802,0.0693321945633,5.42121289023 514 | 14.6426499308,8.21678635393,2.296559303 515 | 8.05618069641,1.70121700518,4.36222931498 516 | 9.75887169137,1.24397675776,7.20388099618 517 | 10.0461488431,0.540419315744,7.58176459161 518 | 11.9228542649,8.60059327398,3.97886389952 519 | 6.0173424046,4.82382398725,2.81666830989 520 | 11.1321362005,7.7509603461,3.86539815899 521 | 2.71543930479,-0.0299303069217,1.0942351912 522 | 7.89845629909,4.30775063425,7.48095652907 523 | 14.4463732307,12.1529507742,7.63880584643 524 | 15.1922326612,4.60541168131,6.21146490572 525 | 5.9972062532,5.92753086663,3.30921133207 526 | 7.68817900784,-0.242117455093,5.98080805184 527 | 10.0849589797,0.548509231216,10.9560102139 528 | 3.96979271081,1.48406983758,0.463532518417 529 | 4.62191416501,3.33763283069,-0.206781614387 530 | 14.0389499879,4.75226043149,7.64215968624 531 | 11.3979483128,4.3278692732,5.4971177988 532 | 14.0103833685,8.4148001709,7.87819253575 533 | 8.80131290765,2.94109657226,6.39913100436 534 | 10.9587703659,0.567689534421,8.06990400404 535 | 4.85401341233,4.20372402212,6.78664576498 536 | 10.5703426887,6.91790529181,-1.44668845281 537 | 6.52405933523,1.13490009017,6.9398496858 538 | 8.20467773996,5.35194309059,5.00602108753 539 | 4.91555027501,0.383881443864,2.89705710635 540 | 6.67176827851,4.73621287306,-0.634327395766 541 | 15.2944038934,6.99853126563,7.75473562401 542 | 4.76544738996,0.567461641575,4.97341036103 543 | 8.20248070204,5.95024086696,-1.98735618509 544 | 7.69495809513,7.15095562613,0.074004326647 545 | 18.4472638618,9.53882599867,8.79172231744 546 | 3.11956843384,0.8482058479,-0.762935001817 547 | 9.92398805334,5.8996819595,2.51988984057 548 | 11.2556594909,6.93089173041,3.46677405418 549 | 7.33020339986,4.5127906948,5.02266099204 550 | 13.809413376,6.49277535468,3.3462062237 551 | 9.7108696197,2.0484646575,13.8000975128 552 | 9.44514669102,0.329933063623,6.42650162186 553 | 10.1131491823,1.32834792992,6.58969492867 554 | 11.9336320621,2.43018434428,7.14356151571 555 | 13.0693709962,4.75521016428,9.10478394999 556 | 6.10321913413,0.0265397463491,3.78419760756 557 | 9.80941958906,5.98254137853,1.81231912698 558 | 8.04470579948,3.84177291999,5.25751522618 559 | 4.09629589985,0.361235314719,4.14029587204 560 | 9.19608762701,8.78408556204,1.95387434528 561 | 10.2978240222,6.06936055521,3.78114166485 562 | 5.92802205835,2.82096460089,4.95041141598 563 | 11.6273931265,4.87808443518,11.0648974926 564 | 12.7897694835,3.1598518581,5.90871343084 565 | 11.6031752997,0.474024357521,10.3270912882 566 | 10.9204868203,4.42052445221,2.73782771642 567 | 13.4306841889,8.59293910979,3.68509430466 568 | 10.8055894372,4.95682243509,3.69614164867 569 | 9.32527671103,3.46974355533,3.24840339127 570 | 6.08206055822,-0.937287939406,3.97972188241 571 | 13.7286095221,11.4336143064,5.34066168163 572 | 10.2648535451,5.8946121702,4.63554907699 573 | 10.5605089445,5.633603328,2.06106326903 574 | 8.12295650525,3.70298826279,0.772251641739 575 | 7.09434960466,-0.0452149259417,4.15257337236 576 | 11.4428525496,7.27723232422,2.77213714197 577 | 8.09857070905,8.37380261465,2.06277663162 578 | 14.7411609583,5.85683019716,8.15945690256 579 | 3.57735252853,1.90171895596,0.725602989708 580 | 2.85624323696,1.47188318959,-1.35779763186 581 | 5.48253791977,2.37899133672,3.83836329453 582 | 2.16993119205,2.18990638091,0.401213103229 583 | 2.82688471224,-1.18230720854,1.7001139941 584 | 9.16015366622,6.26554390001,9.48260432176 585 | 1.84378624085,-2.10446806645,-0.210290973998 586 | 16.9928137503,8.96300571019,8.95079712534 587 | 13.8464536998,6.09254622115,5.44718604963 588 | 18.3236588261,9.59134790389,10.9654191743 589 | 3.65273168329,-0.637019926317,2.43107396537 590 | 2.33663231929,1.69505698161,-1.61250390317 591 | 8.43761261077,-2.36364304713,7.81640340561 592 | 10.3965202646,7.8820322948,4.64735726963 593 | 9.54946990528,8.94979594159,-0.891169860756 594 | 12.5097079068,4.04682997609,7.04184870216 595 | 6.00328725628,2.44844180022,1.82241306579 596 | 15.9872864389,4.94464784717,6.80379198293 597 | 8.40699623921,1.43158711419,8.32921772007 598 | 10.0869753216,4.45545350342,3.30718102857 599 | 15.8704711111,6.70756443491,8.58853556363 600 | 16.1329682099,10.0797511292,8.84232931781 601 | 10.2556355911,7.46938595154,3.41416072605 602 | 8.45108999454,3.75832087307,4.44479414435 603 | 8.84055854097,4.7653926125,1.30870847115 604 | 12.0201051255,2.20772595813,9.25149695147 605 | 10.916804415,3.57331008171,9.09877882312 606 | 12.6668488773,3.31989924226,6.73772567791 607 | 11.0219972139,6.7173827822,3.11868864709 608 | 2.16151769505,-0.73830405424,0.144736825815 609 | 11.629267318,7.08707381266,5.3252231827 610 | 4.34468964173,0.76019840254,1.03521907566 611 | 8.724964788,7.59472532628,1.69559306488 612 | 2.36840973759,3.25873410482,2.41001932239 613 | 12.9511142017,7.44554845254,8.2834338584 614 | 8.55402271007,4.19466883338,3.00678725878 615 | 11.0150991585,2.13960317806,8.94199241681 616 | 13.6824426815,8.57475253411,6.62453442235 617 | 6.87029117204,4.81602061524,2.46783615408 618 | 10.4218294266,7.08762695559,2.57366190669 619 | 15.5136543811,9.43668175057,9.96598488158 620 | 4.54462496468,0.961203169885,1.43517649038 621 | 13.9687716454,8.03113106378,8.73570682159 622 | 8.1505336714,2.62455523778,5.79758203856 623 | 12.6386935846,9.00796316949,2.04060632066 624 | 10.6354383052,3.56967610309,7.10875273679 625 | 15.2436488233,6.95152518922,9.98388418066 626 | 16.1655424979,5.55850615972,9.53191881186 627 | 5.43951004308,5.54390015903,2.16530576584 628 | 10.4969758577,5.12660920413,6.85581250195 629 | 10.4046458457,7.1046426052,2.21502410778 630 | 10.8769841097,-0.58938447986,11.8971477359 631 | 14.7078255875,6.02756670989,7.02998980821 632 | 16.8088382966,5.8420312104,8.99751477573 633 | 12.3674036195,1.966077086,6.76827617221 634 | 9.01532445784,2.14599010676,7.92027290392 635 | 8.86426382764,5.56305009978,3.72177668814 636 | 9.56669267986,6.81317272593,4.87216322366 637 | 7.34604545609,5.86442522358,2.95537477454 638 | 7.79009141191,8.35927004961,1.785210479 639 | 8.64449392839,10.3296467788,1.22329320311 640 | 10.445847737,-0.359604542217,8.31738878343 641 | 18.2020760294,8.70178151612,7.97315375915 642 | 11.959104409,5.90809461709,2.20877086222 643 | 15.8854793843,8.95490622553,5.9429228791 644 | 1.74453299239,0.514093574872,0.953167229452 645 | 15.7614187196,8.87555719768,8.36278583297 646 | 17.134881597,10.5182502077,8.4550620714 647 | 1.43764585784,0.763831187734,-0.884090619624 648 | 1.99816171245,0.557888912668,-0.607010596305 649 | 15.4758806438,10.261220986,9.46627602047 650 | 6.8332028579,2.8133761403,5.7465401592 651 | 6.32168255239,2.94143516325,2.54057959811 652 | 11.1059135724,4.6949371434,8.62782331087 653 | 15.695478538,11.0982281722,11.8345998343 654 | 3.8393821853,-1.85171736514,2.97193321989 655 | 14.2317208378,10.6797607976,7.30032581193 656 | 14.265319233,1.86845529796,7.62989894292 657 | 12.6454956125,2.62872137135,9.58775105519 658 | 7.93648670081,3.05966866376,2.75581959911 659 | 5.02218551329,0.637159465278,2.36640621561 660 | 9.52053220688,1.33623278707,7.03480965256 661 | 6.84618966172,1.95190797154,1.32655388762 662 | 11.8334823992,7.46494782413,8.38089543267 663 | 2.12287569737,0.0644465625484,3.92116877056 664 | 11.9130564127,7.12991288487,6.42819495556 665 | 15.4484080059,8.33869059507,7.5000373273 666 | 12.6839714103,2.75137115376,8.30527243164 667 | 16.9828383361,10.8035929702,12.0688459808 668 | 8.00125167475,-0.0935059547074,8.81659076293 669 | 4.82377656346,0.840735454102,0.776664802652 670 | 5.84291836392,0.538630276669,4.81325578723 671 | 8.85853507357,8.48330705,1.64122048886 672 | 10.3117343509,12.472645592,2.41480770268 673 | 18.9452466176,9.30662598908,6.85431614503 674 | 8.33489134545,4.95340770495,10.3556303311 675 | 7.52592572046,7.41113541814,9.88965856156 676 | 15.6748315605,9.81357971961,4.30314056306 677 | 11.3369987197,6.96841789714,5.92540424523 678 | 5.19901578827,3.08275428469,6.94081400602 679 | 9.12281288484,4.90934147472,5.23210308854 680 | 3.97447310334,2.37102196189,-0.0988526016566 681 | 11.2134460803,6.36150975948,4.08323106241 682 | 4.11842888949,0.170703484265,-0.584673592798 683 | 8.65036717702,2.18503151266,5.62256989519 684 | 7.16702900809,5.60851090582,3.92162562137 685 | 10.1108827624,4.93591685729,1.34539543815 686 | 9.62453398242,7.83577019661,2.67243066998 687 | 5.13968754187,2.34183855011,5.69326596373 688 | 5.40828298448,1.89331351819,5.82370823131 689 | 12.7698841863,6.32190460645,9.31298786296 690 | 5.53785192569,3.80150702221,-0.398412092005 691 | 9.7013021368,4.9548450873,7.19075579184 692 | 16.9030026407,6.99352793696,8.97919136313 693 | 6.24389741415,5.54608230328,0.465147695064 694 | 8.52399988271,1.61647807014,3.85029490856 695 | 16.8607958448,9.18365299646,7.35846251852 696 | 16.1916714379,10.1513087188,7.46998858231 697 | 2.73968352113,-0.483485060096,3.82727802803 698 | 7.17016977139,3.93153506387,4.42666866109 699 | 15.4725488834,10.1396060413,6.35399652779 700 | 3.21657068215,0.549702073103,3.33436509304 701 | 14.1335889578,9.59435241118,8.42971949838 702 | 17.9587848528,9.53128982967,9.68450271387 703 | 11.1923462917,6.59964547062,2.52425474638 704 | 2.47337155407,1.2895805481,-0.4161484118 705 | 14.8581067121,7.89357589388,7.37182914743 706 | 13.2341267877,5.90805540421,6.52106834901 707 | 13.0251793946,5.04328854287,10.9116905285 708 | 7.8616963918,-0.0440756528868,8.79430216288 709 | 10.1419285174,0.476275276602,8.01747486787 710 | 12.1313368064,11.4836686824,6.45177595549 711 | 7.2067209484,6.74807889288,5.22695969846 712 | 2.74310632382,0.702207425862,-2.06527653932 713 | 9.81212068358,5.19995865435,5.72636506246 714 | 18.0750862545,8.36826715724,9.32928173318 715 | 16.2620969709,9.25508537555,8.08703132564 716 | 8.43322452643,5.76389746059,1.29245705702 717 | 1.00936500108,0.600863614956,3.60453851873 718 | 11.6131444554,8.95570763854,2.85839118271 719 | 12.7807655068,5.83405500827,7.80447303157 720 | 7.63728780564,3.92476110115,2.42416598425 721 | 9.70564563721,8.17473251965,0.480802840976 722 | 8.68591867443,7.96826064607,1.71078923542 723 | 6.27816380428,-0.805032306443,2.04642737886 724 | 9.76859316374,6.14631241341,5.7835031285 725 | 8.83951425175,2.20903236757,1.68682356549 726 | 8.46144815572,2.30478656963,2.25273158826 727 | 6.44510438188,2.08764438676,2.9902824738 728 | 14.6296468539,9.07703450758,4.52687508673 729 | 7.30415688423,0.300218859299,5.3543457922 730 | 12.9421974356,7.08626950311,4.27402819261 731 | 13.1334025623,1.33611888204,6.49399522169 732 | 11.2744434923,1.52717844729,9.67845667821 733 | 4.81441240105,-1.16864803853,1.11918522388 734 | 5.95726978659,2.41102688321,1.42978895018 735 | 7.03259781077,1.88973115982,6.8760557644 736 | 10.8849083885,7.64189158494,4.33336469651 737 | 14.0847973865,8.77781395835,8.31996118082 738 | 7.92619005398,5.55647445857,1.91969239723 739 | 16.9570403167,8.18180591308,6.71229779383 740 | 9.79015652781,0.202263309488,6.62407662096 741 | 13.1279953655,5.79340717399,1.28362195114 742 | 7.8539280691,1.65095583988,7.50478281987 743 | 10.2622399645,3.44768566751,2.40901504254 744 | 13.3892524102,8.44068118302,2.96891562121 745 | 11.0421242941,5.74639580799,3.28784756917 746 | 13.5140874486,9.32891603412,6.53824267523 747 | 8.92101343446,4.92089953999,4.04075730855 748 | 4.66389371135,5.24796063264,3.45076791818 749 | 9.97349450906,6.90874085456,2.74540385372 750 | 7.78382281354,0.97623012369,3.96651853945 751 | 6.97186732171,2.91608915821,2.65044589582 752 | 14.6998869691,10.9698889978,6.3146557328 753 | 17.5552223436,7.72424336557,9.19512111365 754 | 4.81220357356,4.67661805965,1.92432314198 755 | 11.5790575154,3.22873702223,11.4923590711 756 | 4.54950898555,4.98388454768,2.21406191862 757 | 9.21540233172,0.75893123081,7.10857750705 758 | 16.1569312337,7.17963569167,6.02859669141 759 | 9.25744377831,3.63666275124,5.30657437161 760 | 10.2128260884,5.27043158981,4.54815291537 761 | 9.59486467666,11.7536127104,1.96525870349 762 | 12.9280063276,9.357243736,5.73302931159 763 | 10.4114618222,7.43992379663,6.69417942762 764 | 3.20618294798,3.13667092775,4.83011249196 765 | 14.4540938457,9.05562286513,3.5836773168 766 | 17.8050716266,8.94191729166,9.64683712599 767 | 9.59580605991,4.80946496845,4.39832108364 768 | 12.5197175658,8.60885143996,3.76700894396 769 | 16.4058288823,9.34668239114,11.8803935348 770 | 8.65939861669,5.24776946607,6.26408055873 771 | 8.09898237268,0.0681765678928,6.28637030454 772 | 7.35642932275,1.92213318639,1.60474188027 773 | 13.7436821122,7.7281577996,6.79546965571 774 | 9.83039749576,8.31912305135,2.9737304052 775 | 14.8086966198,5.08424737748,10.7027826517 776 | 14.7989196746,4.90549684468,10.0235079184 777 | 10.5891433882,7.21466696796,3.60979030445 778 | 1.80267832912,1.91820008245,2.5051655694 779 | 6.18670559351,3.53319876058,-0.591644111155 780 | 8.12208222634,4.41093552156,2.83408900273 781 | 18.2681735872,6.39204486004,7.72823445577 782 | 9.0591804389,4.48358649866,-0.389895209153 783 | 5.16658374364,-0.765566125922,3.8864407586 784 | 7.12935717773,1.67923609047,5.3259239545 785 | 9.46617962329,1.79123925671,7.16118635371 786 | 4.8662698627,2.87773365834,4.81755114352 787 | 6.1795965232,0.699847220813,4.64872065712 788 | 5.79461757607,4.28913084195,0.741296732899 789 | 14.6602247343,8.04181639831,6.34378705366 790 | 6.3286247549,4.53429011523,0.560484563916 791 | 15.0467535036,4.99399489074,10.6762099153 792 | 4.41502270148,0.549119755857,4.32828803524 793 | 10.7973191063,7.84738617443,1.58499374414 794 | 5.14130824307,5.10093215377,-1.12460326407 795 | 18.8416208405,9.04773929371,8.76069456778 796 | 7.03429140106,6.44456792001,0.200338360202 797 | 9.24682653697,5.07644492725,3.53702233347 798 | 10.2564709628,7.35347902151,1.24368515671 799 | 17.1720671485,8.38876051437,7.57267435984 800 | 8.45493083766,1.94512802599,6.47684152762 801 | 11.8503564609,1.28884355161,9.78436573314 802 | 9.17917237604,5.19439609325,2.06017298153 803 | 11.4507379799,8.61897303746,2.51896153452 804 | 11.2001770418,7.31345919997,6.56189600627 805 | 9.90591888202,7.31545379146,7.32198372869 806 | 6.60292683823,5.28660160401,3.03761936785 807 | 15.4036845157,11.007283457,6.83206905728 808 | 13.153312927,0.574132404231,7.70254101694 809 | 4.21643107727,3.99910981904,0.609953645064 810 | 4.27352657339,3.42264196962,1.46351863606 811 | 16.0131501866,8.60860726338,4.16763218263 812 | 13.6243247728,5.1744004566,10.2521274442 813 | 12.1048133451,2.17548384927,7.19014905452 814 | 10.5691776126,8.46791918756,3.19567116821 815 | 9.97639420055,1.50166849531,12.9624708624 816 | 9.26884761714,8.08079497289,3.89281934878 817 | 4.32280411371,2.3869813416,-1.47272569972 818 | 16.4565847842,9.18075549498,8.50046446561 819 | 15.1042914528,11.4754091149,11.5040837425 820 | 14.5789332673,9.61533156786,4.66125391479 821 | 6.13326251978,4.92171279656,5.31241879862 822 | 11.4488077714,2.83556952147,8.81560105755 823 | 11.1600853008,5.60632547501,7.71332075317 824 | 6.83132154343,2.07936520416,0.0825369144044 825 | 3.04117517065,-0.160035261023,1.05165008665 826 | 5.05776546707,3.67901026203,3.92944184953 827 | 11.1124402848,2.32608756561,7.40615963375 828 | 13.3060070586,6.10620330954,5.83325021132 829 | 16.0135939776,8.01639913366,7.0759680426 830 | 8.30167635628,3.98915380849,4.48791663617 831 | 10.3096770569,9.46793538542,0.847579611459 832 | 11.4709030683,9.51769260825,2.8635104483 833 | 8.66768553072,6.0224493966,4.99067870439 834 | 17.3596686376,7.52305619389,9.4441898244 835 | 14.3295559501,5.13184899722,6.39338889838 836 | 10.7157048404,6.07702613316,4.70557231041 837 | 7.63838344264,1.85203185567,4.19153927354 838 | 15.0897361712,7.18414737735,7.38778571238 839 | 5.12862583418,1.83879935971,0.976088483993 840 | 11.98061109,9.5531080638,2.88916364094 841 | 10.2289894778,0.299976549859,9.44644833626 842 | 6.04792914752,2.483116945,5.86850631282 843 | 18.1040731611,7.37982378147,8.0951402511 844 | 6.53484760102,5.07769035163,0.396577634391 845 | 11.4236239589,11.0238608269,2.67567117023 846 | 13.2638572677,1.38028702549,7.41708079616 847 | 2.16260574674,1.09862880346,0.773170235039 848 | 12.1862148994,3.68943403232,7.4779984473 849 | 17.2445272904,6.29619004703,7.62236096399 850 | 5.70560540616,0.206553503035,2.82116136491 851 | 9.04827740484,6.12626074567,-0.242263228762 852 | 6.52760055706,1.37124189209,5.59895836122 853 | 10.8149852877,8.4791896915,1.54753464814 854 | 13.7457785019,10.6989545975,5.17854396336 855 | 2.33592551565,-0.470905859375,1.9202782653 856 | 12.2289539846,7.42219893654,7.18536041916 857 | 9.81826275282,3.70335812307,7.78816993945 858 | 10.4051973925,8.10398148541,1.87006715889 859 | 11.3801314813,8.2914753077,-0.93609583845 860 | 3.85337015619,3.84169541554,4.07008566131 861 | 13.5101526986,9.26086986985,6.02818211962 862 | 10.6637811021,5.13268863865,7.57480783643 863 | 0.370286937707,0.0512915514285,-2.78151680772 864 | 6.45577958678,2.0244018736,4.00712645637 865 | 16.9056014624,8.52708010297,7.7841499375 866 | 11.3259014506,6.92862696896,2.6925319026 867 | 10.7380285504,6.77253907137,5.24821152961 868 | 12.5973196655,8.70089105295,5.20724877003 869 | 9.82365051823,3.46512337415,7.13649179003 870 | 6.47701538573,1.04632435182,3.96287834853 871 | 10.6035056861,2.44352758096,9.95303177418 872 | 6.85268921334,0.436818741413,5.89879783035 873 | 13.6112201636,3.19987706838,8.1094268425 874 | 7.5400977061,-1.09358551076,6.29570056562 875 | 8.08814580345,6.83867131349,-0.0190928145045 876 | 7.10842697382,1.16944353971,4.98106407541 877 | 11.1626889927,10.1715124421,2.83134075032 878 | 10.0230510592,4.31709012956,3.62250569313 879 | 2.60769246131,0.539126475398,-1.6607848218 880 | 15.8585695286,7.80135374369,9.92607514535 881 | 12.2409122928,6.39478206731,4.03736742726 882 | 15.1154023149,3.1204785702,7.90502242969 883 | 14.7246460006,6.12758909035,9.07257644127 884 | 5.34689239416,3.83386068048,-0.92556576407 885 | 13.0041988347,3.21709249592,9.31104015394 886 | 12.4876791738,8.44780535641,5.02113897339 887 | 9.27495570296,7.36702861929,4.50370903201 888 | 18.2262784051,10.2563174099,12.3779178905 889 | 17.8564485947,6.58472811154,7.47205216264 890 | 7.848436973,6.79383384636,3.86816636519 891 | 12.8296588916,9.91819155964,3.5437126885 892 | 6.03674560301,5.89978888302,1.44991233933 893 | 10.8437839752,9.30642488747,2.37666127195 894 | 14.3288393676,6.6671835054,7.0615902984 895 | 12.8482923193,8.22073544106,4.070540646 896 | 7.24474277934,4.81396027454,1.69647146043 897 | 6.17877055665,-0.844803470817,7.1681393531 898 | 9.60616151263,5.78628655194,4.17198586186 899 | 10.7968329556,-0.0250495543708,9.86961496633 900 | 8.45968448471,4.15287607457,1.1044771809 901 | 15.9217120119,5.87426528343,5.73909215875 902 | 15.0978941096,4.3703365212,8.59650300893 903 | 7.04972411278,4.49085128121,4.7735836771 904 | 7.14829526596,1.31593632941,8.48749798139 905 | 13.558549084,10.3485626916,5.14471202564 906 | 9.83618558492,8.68864808348,2.27469491924 907 | 8.47491636638,3.69090631244,3.17615134122 908 | 9.612584703,-3.79797442916,9.64711404772 909 | 16.3324105348,6.45969532237,9.30940477929 910 | 5.70375949834,3.17810774026,3.83470690568 911 | 13.4792661504,9.68606065764,7.09894960809 912 | 4.46969241959,3.91942445838,0.207625491354 913 | 16.3579707763,12.0740895326,8.95276109199 914 | 10.9750551551,2.97150692587,9.10369534973 915 | 14.7343734074,5.58380531234,7.17530119731 916 | 6.44678211346,2.88715801168,1.72976647827 917 | 12.3330377714,8.68257030043,7.43280946989 918 | 10.1853714849,7.07124666849,2.41811466423 919 | 11.2870271048,8.40763818268,2.35127019676 920 | 8.38116892062,8.89940781676,0.497983203916 921 | 7.53978673282,8.36858749748,0.650867206584 922 | 12.4887197038,8.08732004281,3.29709983595 923 | 15.5188919786,8.619862547,7.41971168136 924 | 11.278879166,3.3009808667,8.67125773136 925 | 9.52041437335,7.38650191229,2.99125874421 926 | 11.3461394573,5.38148774599,9.49548949348 927 | 14.1755421333,6.22994170807,10.2076156412 928 | 12.3541443752,4.5951862271,8.25316612706 929 | 10.3498612481,2.9010431965,10.1526854417 930 | 14.5523477628,6.49742267642,9.51519744344 931 | 8.48285253443,4.64874664757,1.82467313533 932 | 7.91756047303,2.20185735221,6.45614922839 933 | 18.6892482928,7.47364954034,8.95299268445 934 | 5.44211544325,2.04314459468,0.805717748644 935 | 14.7902591982,6.91180338998,8.50209731246 936 | 7.26119197852,5.08007464061,5.91844215529 937 | 14.9153847185,9.36729554064,4.36114450003 938 | 12.9556232889,8.06683186261,4.57978400201 939 | 16.6097118053,5.82161789547,7.79380829903 940 | 12.7358140738,9.42235921504,4.17384101899 941 | 8.73322309437,2.07898173508,4.57922278346 942 | 6.73362351402,6.54299145786,4.26229482166 943 | 3.17318174706,1.17107642668,1.86006781542 944 | 4.89034843754,5.59461783221,1.12467803863 945 | 11.1239577604,6.40259419757,3.54956173783 946 | 10.59970135,5.80957982697,1.02820578227 947 | 9.01206811811,1.21895990551,7.7736587545 948 | 8.12100718274,2.70911534994,3.79631648482 949 | 13.0804642235,6.26511399043,5.67761584661 950 | 7.24327554518,1.42728973415,3.32809394599 951 | 12.3048487744,6.81241785468,5.72699539887 952 | 11.8153227705,9.10160965646,5.11235218788 953 | 15.6184027362,8.1955110603,7.10039441016 954 | 9.86410432878,9.64877330669,1.2137290547 955 | 7.58118843195,2.76246800042,2.21458989577 956 | 12.0337642488,7.23332161648,3.59608399078 957 | 4.85513100508,-0.349742414893,1.49630297396 958 | 13.2628093825,6.85007080618,10.280994844 959 | 16.1665157502,7.03098332042,8.0143942169 960 | 3.63449946566,3.1016500626,2.8406677216 961 | 7.38508092355,5.40297200865,5.00509179428 962 | 11.8880873347,2.75408426478,8.55552225896 963 | 9.87106746621,6.34212879055,1.65500112803 964 | 8.90599907631,7.90227391707,0.0733337232325 965 | 13.4354593831,9.06783786413,5.75053737379 966 | 3.69086243996,4.23176131333,3.58314741136 967 | 8.74119011734,7.78831677247,-2.52289038012 968 | 17.2784903701,8.90023782741,10.1159802355 969 | 8.05583831326,7.90017939619,4.2691203793 970 | 2.58341601234,1.18616838565,3.25402834414 971 | 7.36136017232,4.90169610968,2.80537895175 972 | 8.33189657459,-0.858715207425,5.84672922029 973 | 8.32735003841,6.16604895624,3.25570227623 974 | 5.47017716419,2.57011948829,4.54611555198 975 | 9.54974217388,4.64876501136,6.7234774571 976 | 12.0344141763,3.88296631428,9.81997484579 977 | 6.37440354917,8.60638397339,1.22747738815 978 | 6.23724434623,1.65022794683,6.8331552866 979 | 9.17088269002,0.487735115604,6.35815602745 980 | 11.5439629403,9.136702947,2.71506353406 981 | 19.449313308,11.4481087299,11.1028409084 982 | 7.47794439142,5.66849360587,1.48296371088 983 | 14.3541429607,8.02688344737,4.4425915551 984 | 9.509334478,3.2163567347,7.00572850866 985 | 11.1038999154,3.57791344765,2.38745071948 986 | 3.27227060396,0.649654013595,-0.760807038755 987 | 6.19268772017,-0.346480654863,3.21328118733 988 | 14.4958415997,7.40833606217,8.266771745 989 | 13.1510869703,5.38203177773,7.53471045367 990 | 14.2244853411,5.97466151395,9.84490381828 991 | 12.143870354,9.32405355325,4.32236490181 992 | 14.1905105041,3.87157513539,8.51931207032 993 | 10.7645494966,9.18790871454,2.37532433795 994 | 3.54807882135,4.42795825481,1.77602947135 995 | 16.1410932249,7.66878028646,7.93139334958 996 | 10.667741,4.37564011763,11.2446600129 997 | 10.4740811709,3.48924613017,6.66267985453 998 | 7.93573835503,6.49430131422,2.02520744419 999 | 10.9346242384,10.4129129251,2.59889734318 1000 | 17.6298896395,8.13459133438,8.9639380239 1001 | -------------------------------------------------------------------------------- /Linear Regression/linearRegressionTest.py: -------------------------------------------------------------------------------- 1 | import os 2 | import csv 3 | import random 4 | import LinearRegression as LR 5 | import numpy 6 | 7 | #import data 8 | data = numpy.genfromtxt('input.csv', delimiter=',') 9 | # response is in the first column 10 | Y = data[:, 0] 11 | X = data[:, 1:] 12 | 13 | # n-fold cross validation 14 | # shuffle data 15 | m = len(Y) 16 | index = range(0, m) 17 | random.shuffle(index) 18 | X = X[index, :] 19 | Y = Y[index] 20 | 21 | # n-fold 22 | nfold = 10 23 | foldSize = int(m / nfold) 24 | 25 | # arrage to store training and testing error 26 | trainErr = [0.0] * nfold 27 | testErr = [0.0] * nfold 28 | allIndex = range(0, m) 29 | for i in range(0, nfold): 30 | 31 | testIndex = range((foldSize * i), foldSize * (i + 1)) 32 | trainIndex = list(set(allIndex) - set(testIndex)) 33 | 34 | trainX = X[trainIndex, :] 35 | trainY = Y[trainIndex] 36 | testX = X[testIndex, :] 37 | testY = Y[testIndex] 38 | 39 | # set parameter 40 | alpha = 0.01 41 | lam = 0.1 42 | model = LR.LinearRegression(trainX, trainY, alpha, lam) 43 | model.run(400, printIter=False) 44 | 45 | trainPred = model.predict(trainX) 46 | trainErr[i] = sum((trainPred - trainY) ** 2) / len(trainY) 47 | 48 | testPred = model.predict(testX) 49 | testErr[i] = sum((testPred - testY) ** 2) / len(testY) 50 | 51 | print "train Err=", trainErr[i], "test Err=", testErr[i] 52 | print " " 53 | 54 | print "summary:" 55 | print "average train err=", numpy.mean(trainErr) 56 | print "average test err=", numpy.mean(testErr) 57 | -------------------------------------------------------------------------------- /Logistic Regression/Python/LogisticRegression.py: -------------------------------------------------------------------------------- 1 | import random 2 | import numpy 3 | 4 | 5 | class LogisticRegression(object): 6 | 7 | # initialize 8 | 9 | def __init__(self, X, Y, alpha=0.0005, lam=0.1, printIter=True): 10 | 11 | x = numpy.array(X) 12 | m, n = x.shape 13 | 14 | # normalize data 15 | self.xMean = numpy.mean(x, axis=0) 16 | self.xStd = numpy.std(x, axis=0) 17 | x = (x - self.xMean) / self.xStd 18 | 19 | # add const column to X 20 | const = numpy.array([1] * m).reshape(m, 1) 21 | self.X = numpy.append(const, x, axis=1) 22 | 23 | self.Y = numpy.array(Y) 24 | self.alpha = alpha 25 | self.lam = lam 26 | self.theta = numpy.array([0.0] * (n + 1)) 27 | 28 | self.printIter = printIter 29 | print "lambda=", self.lam 30 | 31 | # transform function 32 | def _sigmoid(self, x): 33 | #m,n = x.shape 34 | #z = numpy.array([0.0]*(m*n)).reshape(m,n) 35 | z = 1.0 / (1.0 + numpy.exp((-1) * x)) 36 | return z 37 | 38 | # caluclate cost 39 | def _costFunc(self): 40 | "calculate cost" 41 | m, n = self.X.shape 42 | h_theta = self.__sigmoid(numpy.dot(self.X, self.theta)) 43 | 44 | cost1 = (-1) * self.Y * numpy.log(h_theta) 45 | cost2 = (1.0 - self.Y) * numpy.log(1.0 - h_theta) 46 | 47 | cost = ( 48 | sum(cost1 - cost2) + 0.5 * self.lam * sum(self.theta[1:] ** 2)) / m 49 | return cost 50 | 51 | # gradient descend 52 | def _gradientDescend(self, iters): 53 | """ 54 | gradient descend: 55 | X: feature matrix 56 | Y: response 57 | theta: predict parameter 58 | alpha: learning rate 59 | lam: lambda, penality on theta 60 | """ 61 | 62 | m, n = self.X.shape 63 | 64 | # print "m,n=" , m,n 65 | # print "theta", len(self.theta) 66 | 67 | for i in xrange(0, iters): 68 | theta_temp = self.theta 69 | 70 | # update theta[0] 71 | h_theta = self.__sigmoid(numpy.dot(self.X, self.theta)) 72 | diff = h_theta - self.Y 73 | self.theta[0] = theta_temp[0] - self.alpha * \ 74 | (1.0 / m) * sum(diff * self.X[:, 0]) 75 | 76 | for j in xrange(1, n): 77 | val = theta_temp[ 78 | j] - self.alpha * (1.0 / m) * (sum(diff * self.X[:, j]) + self.lam * m * theta_temp[j]) 79 | # print val 80 | self.theta[j] = val 81 | # calculate cost and print 82 | cost = self.__costFunc() 83 | 84 | if self.printIter: 85 | print "Iteration", i, "\tcost=", cost 86 | # print "theta", self.theta 87 | 88 | # simple name 89 | def run(self, iters, printIter=True): 90 | self.printIter = printIter 91 | self._gradientDescend(iters) 92 | 93 | # prediction 94 | def predict(self, X): 95 | 96 | # add const column 97 | m, n = X.shape 98 | x = numpy.array(X) 99 | x = (x - self.xMean) / self.xStd 100 | const = numpy.array([1] * m).reshape(m, 1) 101 | X = numpy.append(const, x, axis=1) 102 | 103 | pred = self.__sigmoid(numpy.dot(X, self.theta)) 104 | numpy.putmask(pred, pred >= 0.5, 1.0) 105 | numpy.putmask(pred, pred < 0.5, 0.0) 106 | 107 | return pred 108 | -------------------------------------------------------------------------------- /Logistic Regression/Python/LogisticRegressionTest.py: -------------------------------------------------------------------------------- 1 | import csv 2 | import random 3 | import numpy 4 | import LogisticRegression as LR 5 | 6 | #import data 7 | data = numpy.genfromtxt('input.csv', delimiter=',') 8 | # response is in the first column 9 | Y = data[:, 0] 10 | X = data[:, 1:] 11 | 12 | # n-fold cross validation 13 | # shuffle data 14 | m = len(Y) 15 | index = range(0, m) 16 | random.shuffle(index) 17 | X = X[index, :] 18 | Y = Y[index] 19 | 20 | # n-fold 21 | nfold = 10 22 | foldSize = int(m / nfold) 23 | 24 | # arrage to store training and testing error 25 | trainErr = [0.0] * nfold 26 | testErr = [0.0] * nfold 27 | allIndex = range(0, m) 28 | for i in range(0, nfold): 29 | 30 | testIndex = range((foldSize * i), foldSize * (i + 1)) 31 | trainIndex = list(set(allIndex) - set(testIndex)) 32 | 33 | trainX = X[trainIndex, :] 34 | trainY = Y[trainIndex] 35 | testX = X[testIndex, :] 36 | testY = Y[testIndex] 37 | 38 | # set parameter 39 | alpha = 0.05 40 | lam = 0.1 41 | model = LR.LogisticRegression(trainX, trainY, alpha, lam) 42 | model.run(400, printIter=False) 43 | 44 | trainPred = model.predict(trainX) 45 | trainErr[i] = float(sum(trainPred != trainY)) / len(trainY) 46 | 47 | testPred = model.predict(testX) 48 | testErr[i] = float(sum(testPred != testY)) / len(testY) 49 | 50 | print "train Err=", trainErr[i], "test Err=", testErr[i] 51 | print " " 52 | 53 | print "summary:" 54 | print "average train err =", numpy.mean(trainErr) * 100, "%" 55 | print "average test err =", numpy.mean(testErr) * 100, "%" 56 | -------------------------------------------------------------------------------- /Logistic Regression/Python/input.csv: -------------------------------------------------------------------------------- 1 | 1.0,3.43263476905,0.280417833321 2 | 0.0,-1.3415030109,3.0869815446 3 | 1.0,3.84602211727,1.20644517642 4 | 0.0,-0.403113022084,3.1513941904 5 | 1.0,3.06249095529,0.21673470383 6 | 0.0,-0.513603326464,3.20774962201 7 | 1.0,1.62591942014,-0.761276577663 8 | 0.0,-1.16777216112,1.02745799204 9 | 1.0,3.3952054082,1.67713225089 10 | 0.0,1.14279348721,1.6247239178 11 | 1.0,1.59191060627,1.3527023018 12 | 0.0,-0.395883726874,2.6262684097 13 | 1.0,0.388437606847,0.823992575494 14 | 0.0,1.00434313096,0.867656000143 15 | 1.0,1.14575471179,-0.664971938767 16 | 0.0,-1.33130731637,2.11435823895 17 | 1.0,2.23202151152,2.04918445882 18 | 0.0,-1.60307773207,2.98072778636 19 | 1.0,1.74627667526,-0.109711648021 20 | 0.0,0.294772294409,1.12695456757 21 | 1.0,2.16233985884,0.613826694578 22 | 0.0,-0.485176923421,3.15464740682 23 | 1.0,1.43325958389,0.524274119211 24 | 0.0,0.809250282018,2.76761795756 25 | 1.0,0.624817406824,-1.18442201158 26 | 0.0,0.521403017457,0.554967143885 27 | 1.0,2.27808391884,-0.992008918201 28 | 0.0,-0.669368870634,1.68996485096 29 | 1.0,1.77597337879,-1.60115423355 30 | 0.0,1.87589094286,2.74199471794 31 | 1.0,3.35241082579,0.903356827299 32 | 0.0,1.85764883758,0.710226405279 33 | 1.0,1.36263669161,1.01259518817 34 | 0.0,1.22629986303,3.01108332779 35 | 1.0,-1.09775134139,-0.516381824002 36 | 0.0,0.803623876024,2.4294266281 37 | 1.0,2.8788819566,1.78708966805 38 | 0.0,0.300322914248,0.954746684988 39 | 1.0,1.07997491634,0.729518216169 40 | 0.0,-1.05338311322,1.90833338691 41 | 1.0,2.71244924461,0.024550597636 42 | 0.0,0.0735941842553,1.65457602838 43 | 1.0,2.10064042081,-2.02005333149 44 | 0.0,1.34066986526,0.39531675432 45 | 1.0,3.06457763418,-0.623600821272 46 | 0.0,1.44613325491,0.910056715944 47 | 1.0,1.20265776431,-1.1102720538 48 | 0.0,-0.0487653606296,2.51136703534 49 | 1.0,1.47360530164,1.10509264604 50 | 0.0,0.75359434249,2.92970400141 51 | 1.0,2.59361975928,1.85833730482 52 | 0.0,1.77182149421,3.03139048762 53 | 1.0,0.406884670632,0.776234666187 54 | 0.0,-0.115092710203,1.22110197971 55 | 1.0,1.65236476464,-0.737079830882 56 | 0.0,1.45988973942,2.30638486805 57 | 1.0,-0.836846523865,1.82596505828 58 | 0.0,1.09078884885,2.81464627242 59 | 1.0,2.66507277892,0.154571111591 60 | 0.0,-0.342356983573,1.09098575999 61 | 1.0,2.34056162778,2.13808126401 62 | 0.0,-1.25610907993,3.04957235945 63 | 1.0,1.93737973584,-0.495577432364 64 | 0.0,-0.93654687428,1.98973278655 65 | 1.0,2.7365239038,-1.26855489544 66 | 0.0,-0.545384431956,2.96872916663 67 | 1.0,-0.0454464938199,1.37397753999 68 | 0.0,-0.41590780694,1.40346831901 69 | 1.0,2.51880961503,-0.375340565826 70 | 0.0,-0.203939402004,1.95203752272 71 | 1.0,1.45250090651,0.96948634968 72 | 0.0,0.13780559377,1.90052424327 73 | 1.0,1.56109525662,0.0588144584803 74 | 0.0,0.216069829616,2.33578797225 75 | 1.0,2.84403685987,-0.499160323248 76 | 0.0,-0.447200084022,0.557823978032 77 | 1.0,1.51125763105,0.49020727461 78 | 0.0,0.722669872933,3.29474240236 79 | 1.0,3.55999992493,-0.725895690771 80 | 0.0,-0.490990548517,1.18500631327 81 | 1.0,1.75988660333,0.152521038706 82 | 0.0,0.54182842584,2.93271790103 83 | 1.0,2.11523181785,0.697638275854 84 | 0.0,-0.64593381358,2.59946572414 85 | 1.0,-0.214758504663,-0.690488342876 86 | 0.0,0.36890160349,0.496261127813 87 | 1.0,2.77141527449,1.73556096781 88 | 0.0,0.447634274462,0.589489891673 89 | 1.0,3.60603933582,0.184355196652 90 | 0.0,-0.165800167225,2.03953959787 91 | 1.0,1.15108727077,1.24826791648 92 | 0.0,0.005002955177,2.96012589935 93 | 1.0,4.29860482604,-0.404384317013 94 | 0.0,1.15864833206,3.03423302282 95 | 1.0,2.03979113717,-2.33415533703 96 | 0.0,-1.63747362657,2.47004178894 97 | 1.0,0.990219318979,-0.800531882607 98 | 0.0,1.45758180848,3.20129527164 99 | 1.0,0.335679448142,-0.8045551053 100 | 0.0,0.342539289115,2.37316360774 101 | 1.0,1.61131650531,0.188476817959 102 | 0.0,0.999007415605,3.18786891122 103 | 1.0,2.98582953466,-0.903841736719 104 | 0.0,-0.349076503705,1.79371285967 105 | 1.0,1.79192567447,-1.94091044662 106 | 0.0,-0.489865294895,2.81445656766 107 | 1.0,2.74527197835,-0.924511274 108 | 0.0,1.11279210711,4.38251492828 109 | 1.0,2.97205651115,1.42932128554 110 | 0.0,0.561187800945,3.26772157757 111 | 1.0,1.81550958171,-0.902428776318 112 | 0.0,-1.43414579098,2.77094654246 113 | 1.0,2.02937538841,0.701810606509 114 | 0.0,-1.83951113419,2.19904455925 115 | 1.0,1.27847351893,1.2392389569 116 | 0.0,-0.60648123137,0.677682233615 117 | 1.0,2.59062665077,-0.572174201302 118 | 0.0,-0.382734796951,1.78261479774 119 | 1.0,2.75942993509,2.10776431619 120 | 0.0,-0.0412523818649,1.74178394808 121 | 1.0,2.50543654396,0.0436122556161 122 | 0.0,-0.377594314087,3.78148318395 123 | 1.0,0.767165271551,0.105504322873 124 | 0.0,1.87295783607,3.41754908923 125 | 1.0,3.59519329002,0.265863610794 126 | 0.0,0.497382877459,0.430056842954 127 | 1.0,2.33833057469,0.928704270135 128 | 0.0,-0.0990428600917,1.22478269814 129 | 1.0,1.86015569974,0.566217200276 130 | 0.0,1.46428599126,1.46101347166 131 | 1.0,2.57259418557,-2.25133340117 132 | 0.0,-1.00035070301,2.40158235142 133 | 1.0,3.76841375117,0.630479534647 134 | 0.0,-0.114159517884,2.50999751417 135 | 1.0,2.9710903047,-1.60252714174 136 | 0.0,-0.215371912437,2.08147488772 137 | 1.0,3.91671527142,-0.411176835071 138 | 0.0,-0.914105916465,-0.305639924267 139 | 1.0,0.0598725552017,-0.886176377649 140 | 0.0,-0.570502253547,0.936851682966 141 | 1.0,0.940245525687,-1.07356531317 142 | 0.0,0.144363290577,1.31182139383 143 | 1.0,1.32382876182,0.4884250036 144 | 0.0,-1.07239063977,2.02064578162 145 | 1.0,1.92196132753,1.04513530966 146 | 0.0,-0.762231153394,2.5160148043 147 | 1.0,-0.229961228353,0.0443823251046 148 | 0.0,0.365898205656,1.25360484418 149 | 1.0,1.14739851168,-0.804860108546 150 | 0.0,1.13091329749,2.55846632891 151 | 1.0,0.310011029908,-0.427489456595 152 | 0.0,0.0470542273997,1.44194079089 153 | 1.0,2.51832149095,-1.38524286322 154 | 0.0,-1.04317066697,0.4900448001 155 | 1.0,1.09083804593,-0.434252936206 156 | 0.0,-1.43457250747,2.07290003142 157 | 1.0,1.3815649963,0.34137739257 158 | 0.0,-0.224897633556,1.38291738119 159 | 1.0,2.05565641742,1.22965474205 160 | 0.0,-0.740507451698,1.97450016008 161 | 1.0,0.551689423603,-0.422778643023 162 | 0.0,-0.324079503171,2.95079586679 163 | 1.0,2.47882196864,-0.364884927807 164 | 0.0,-0.241064738247,1.44332193851 165 | 1.0,2.94752668624,-0.198780709627 166 | 0.0,1.11226520622,1.50063944447 167 | 1.0,2.37428175533,0.167978618207 168 | 0.0,0.692900247411,0.102040415071 169 | 1.0,1.48717998023,-0.101325295076 170 | 0.0,-1.71722444706,3.60665963251 171 | 1.0,3.09815937648,-1.07225239725 172 | 0.0,-1.70677868369,1.75579834705 173 | 1.0,1.97512720162,1.14942244165 174 | 0.0,-0.716176321152,2.57020057989 175 | 1.0,3.03963731897,-0.0870565038104 176 | 0.0,-0.146673887839,1.84976361666 177 | 1.0,1.98627430035,-0.671463114157 178 | 0.0,1.00595821454,3.05801891859 179 | 1.0,0.610430951789,0.450834237697 180 | 0.0,2.34744601083,1.24539644039 181 | 1.0,0.397349350492,0.135151642433 182 | 0.0,0.774819616973,1.43300543598 183 | 1.0,1.7102242147,-0.398662921522 184 | 0.0,-0.604683624443,2.73230514439 185 | 1.0,-0.406429976241,1.58323399575 186 | 0.0,-0.20235402784,1.08676509572 187 | 1.0,3.0911353091,0.0581986623022 188 | 0.0,-1.54499455939,1.76571315 189 | 1.0,0.992925961921,-0.874845421925 190 | 0.0,-1.33651859053,2.49753855894 191 | 1.0,2.73447742803,0.0741017538822 192 | 0.0,0.94112908482,1.98453519337 193 | 1.0,2.33833521202,-0.074034693378 194 | 0.0,-0.580940583961,-0.438015574718 195 | 1.0,1.55741896049,0.0398320979356 196 | 0.0,0.45884922229,1.74514478435 197 | 1.0,1.93917874944,0.310842295027 198 | 0.0,-0.233727460216,1.82738553975 199 | 1.0,-0.148370204831,1.12007044175 200 | 0.0,0.131952482112,2.8289397006 201 | 1.0,1.2460817762,0.523648682008 202 | 0.0,-0.844349765346,0.0627238570588 203 | 1.0,1.64092046478,1.95498568576 204 | 0.0,0.376877790095,3.19360917583 205 | 1.0,2.49182573986,-0.207936331825 206 | 0.0,0.410729611806,0.951082241238 207 | 1.0,2.44572158248,0.471508722045 208 | 0.0,0.9960628329,3.14393467484 209 | 1.0,2.99838851886,-0.674104633726 210 | 0.0,0.39746714171,1.09456129171 211 | 1.0,0.895181694412,1.01615445915 212 | 0.0,0.0221658580718,1.05929625384 213 | 1.0,2.72738415603,-0.00861739977793 214 | 0.0,-0.854469434495,3.44334671035 215 | 1.0,2.90963070459,1.37295685135 216 | 0.0,-1.69373368713,3.41848165812 217 | 1.0,1.97743215223,-0.438886155253 218 | 0.0,-1.48148746239,3.41382983097 219 | 1.0,1.98551992666,-0.690588981713 220 | 0.0,-0.50304692326,2.64722650122 221 | 1.0,1.9019718793,-1.41375372367 222 | 0.0,-0.142677907113,2.42899995695 223 | 1.0,0.971208455907,-0.603524864685 224 | 0.0,0.671808984482,2.31871071946 225 | 1.0,3.41476096801,1.79381275799 226 | 0.0,-0.0163610629611,2.77351152372 227 | 1.0,2.30573620992,-0.130939959332 228 | 0.0,-0.365019055198,2.67569252328 229 | 1.0,3.36272935751,-1.62045498019 230 | 0.0,-0.408733121796,1.95336094573 231 | 1.0,1.94274901245,-0.78345715209 232 | 0.0,1.80007917498,1.73533344319 233 | 1.0,1.23666295395,0.296761416582 234 | 0.0,-0.686328558187,3.02293444288 235 | 1.0,2.30511459405,-0.163397994149 236 | 0.0,0.97102824588,0.965988961501 237 | 1.0,1.20897304027,-1.47804145326 238 | 0.0,0.833876428418,2.61986395487 239 | 1.0,0.879105508704,-0.574721752044 240 | 0.0,-1.08779079975,2.57856300966 241 | 1.0,0.573767058861,-0.463790392405 242 | 0.0,0.40011357967,2.11945008793 243 | 1.0,2.02725761734,-0.449499363392 244 | 0.0,-0.435894388484,0.391212165917 245 | 1.0,0.698696777171,-1.11942848351 246 | 0.0,-0.0944178066031,2.76462805297 247 | 1.0,3.46362133672,0.90029829781 248 | 0.0,-0.492735159407,4.29673559958 249 | 1.0,0.294864701559,0.770385870251 250 | 0.0,-0.020508665222,1.27526730174 251 | 1.0,0.683094078031,0.241634361569 252 | 0.0,2.2870172022,1.11300144635 253 | 1.0,4.21551199421,0.750380642055 254 | 0.0,1.07933408365,1.13393610369 255 | 1.0,2.97896256129,0.0666951187167 256 | 0.0,-0.3671850126,4.43672437022 257 | 1.0,1.86615559854,-1.77322706363 258 | 0.0,-0.393669669648,2.19314426021 259 | 1.0,1.99333725203,-0.153898267941 260 | 0.0,0.908675216907,1.3935752148 261 | 1.0,2.27780210248,0.14598388735 262 | 0.0,1.1195384743,1.20487045481 263 | 1.0,2.03020499765,0.801363396389 264 | 0.0,-0.336427166461,3.42482543045 265 | 1.0,1.81670747269,1.68699323766 266 | 0.0,-0.779010079224,0.888489790138 267 | 1.0,1.67757012019,0.320935803359 268 | 0.0,2.07574205886,2.96214494437 269 | 1.0,0.100325966238,-0.315366763467 270 | 0.0,-0.247522666775,2.21475413073 271 | 1.0,2.51590673134,-0.453066698618 272 | 0.0,-0.119552918344,1.90592855678 273 | 1.0,3.62847957127,-0.627645065073 274 | 0.0,-1.09587645297,1.40141293081 275 | 1.0,1.03662297135,-1.43735369592 276 | 0.0,0.654414068516,1.75703017225 277 | 1.0,1.39028840249,-0.243467270481 278 | 0.0,-0.951042134011,1.98511969682 279 | 1.0,1.3448296491,0.30336494279 280 | 0.0,-1.19253357437,1.32081165048 281 | 1.0,0.624073267926,0.925253590822 282 | 0.0,0.403312990382,2.70626160187 283 | 1.0,1.47633018226,2.23581918484 284 | 0.0,1.59180008226,-0.642574714668 285 | 1.0,2.23022870876,-1.03476796652 286 | 0.0,0.374746688138,3.3471756396 287 | 1.0,1.87064571311,0.706998632288 288 | 0.0,1.47232506522,3.34250620834 289 | 1.0,2.9956245869,0.471109010843 290 | 0.0,-2.0281991086,1.73345318636 291 | 1.0,2.78324058083,-2.13515558258 292 | 0.0,0.249407029841,1.6710470978 293 | 1.0,1.30293465799,-0.587285908939 294 | 0.0,0.830835063748,2.33856918038 295 | 1.0,1.59704105702,-0.88600809962 296 | 0.0,-0.459267404329,1.48403933883 297 | 1.0,2.59148490798,0.0632279513837 298 | 0.0,-1.26710535942,3.09598962709 299 | 1.0,2.66507948685,0.348389319082 300 | 0.0,0.313612108619,1.69906379624 301 | 1.0,1.63066612029,0.726086187142 302 | 0.0,-1.38588357573,1.91851044899 303 | 1.0,1.67840957063,0.535110026282 304 | 0.0,0.917361894066,1.72085186047 305 | 1.0,1.35338436683,-1.62959488891 306 | 0.0,0.980481695957,0.984031648981 307 | 1.0,1.66324018354,0.470803918835 308 | 0.0,-1.04178508851,1.09869544322 309 | 1.0,1.48820390054,-0.565255642705 310 | 0.0,-0.373138103429,3.85000357889 311 | 1.0,2.55925419632,-1.672694604 312 | 0.0,-0.218360517021,2.26489756071 313 | 1.0,1.49359421351,0.241029705956 314 | 0.0,0.613083128853,1.74561997205 315 | 1.0,1.97735877921,0.0460204796393 316 | 0.0,0.740386627486,3.88347017171 317 | 1.0,1.74436102504,-0.423240044444 318 | 0.0,-0.175876760578,3.30459060191 319 | 1.0,0.864367438609,0.502157912496 320 | 0.0,0.948238349465,1.86864329829 321 | 1.0,1.22365127566,0.355757609288 322 | 0.0,1.2778474217,2.61821825433 323 | 1.0,1.64323713126,0.609955945571 324 | 0.0,-0.390482320174,0.508363540166 325 | 1.0,1.82244106008,0.32450614094 326 | 0.0,1.16896727837,1.50834984734 327 | 1.0,1.38647822887,1.07389075576 328 | 0.0,-0.707865686063,2.83693835939 329 | 1.0,1.84525472832,0.936150153985 330 | 0.0,0.856309313703,2.81649954428 331 | 1.0,1.539927592,0.222617435158 332 | 0.0,-0.82286012599,2.59680515157 333 | 1.0,1.17990909058,0.631576318647 334 | 0.0,0.469709319374,3.48213721525 335 | 1.0,1.39525541086,-0.85282238993 336 | 0.0,0.514587240152,2.99452355093 337 | 1.0,2.11234501354,0.90422588872 338 | 0.0,-0.924849439765,2.19424631099 339 | 1.0,2.39367477698,0.111925488329 340 | 0.0,-0.889820118995,2.18520191356 341 | 1.0,2.1829874123,0.42277191651 342 | 0.0,-0.54363323693,3.08768947243 343 | 1.0,1.67396653877,-0.120469738835 344 | 0.0,0.573536519515,2.25402991169 345 | 1.0,2.71881561028,-0.454958037209 346 | 0.0,-0.782729675208,1.08156038254 347 | 1.0,2.2017229179,0.101840370616 348 | 0.0,-0.265037024805,1.92437914162 349 | 1.0,0.204449798687,-0.319839068363 350 | 0.0,-1.27373827119,1.1010654037 351 | 1.0,1.47303203964,-0.274552236123 352 | 0.0,0.265448823349,4.37508172692 353 | 1.0,2.42145938466,-0.80747248507 354 | 0.0,-1.70178113714,1.26610018191 355 | 1.0,1.76574856723,-0.813289535276 356 | 0.0,1.13639085824,3.38894716113 357 | 1.0,2.66358335376,-0.232330322826 358 | 0.0,0.185547362471,3.56606072095 359 | 1.0,2.27841109201,0.420014435352 360 | 0.0,-0.965424605539,0.451577413554 361 | 1.0,2.49098656067,-1.42497542325 362 | 0.0,-0.993771554791,0.912287175447 363 | 1.0,1.94076448546,0.86583258919 364 | 0.0,1.68208601754,2.38802554809 365 | 1.0,2.5024791672,0.472033712717 366 | 0.0,-0.0330715665932,2.96125338963 367 | 1.0,1.4978532465,-0.589655363854 368 | 0.0,-0.620057108372,0.790511980187 369 | 1.0,4.01012886085,0.596133177985 370 | 0.0,0.247383326314,0.104900054497 371 | 1.0,0.889261116265,-1.75620324629 372 | 0.0,0.117758954416,2.31557257091 373 | 1.0,0.980692174018,-0.243730034445 374 | 0.0,0.295679043304,2.08508561007 375 | 1.0,3.39178168648,1.30469955243 376 | 0.0,0.695962653824,0.61818605813 377 | 1.0,2.77454041852,2.51155012988 378 | 0.0,-0.531960317636,1.84361859395 379 | 1.0,2.7361961668,0.717732464022 380 | 0.0,-0.62954016235,2.0552616378 381 | 1.0,1.62051566198,-0.234688768569 382 | 0.0,-0.82491595171,0.81078758994 383 | 1.0,0.20237641933,0.365720240001 384 | 0.0,1.57678076338,2.16503347631 385 | 1.0,2.77369860194,0.328553580793 386 | 0.0,0.866196966355,2.41531268976 387 | 1.0,0.479073551094,0.496830487424 388 | 0.0,0.344471877245,3.0841937486 389 | 1.0,2.54865051718,0.587205413108 390 | 0.0,0.553500619109,0.837552589572 391 | 1.0,1.62595039781,0.740604834255 392 | 0.0,0.734722071523,0.410140113595 393 | 1.0,0.454482752465,-1.73784509836 394 | 0.0,-0.00722239968302,2.65479424237 395 | 1.0,0.777278452701,1.16255201292 396 | 0.0,0.557119149227,2.39832719747 397 | 1.0,2.0931384387,0.448319891484 398 | 0.0,-1.12049423311,1.3173798126 399 | 1.0,3.59097449345,-0.254741139603 400 | 0.0,0.318017928271,2.93661415798 401 | 1.0,0.551170956953,1.05897441003 402 | 0.0,-1.5596790277,1.88014663512 403 | 1.0,2.09255661179,-2.27083901747 404 | 0.0,-1.98968195591,3.87351644924 405 | 1.0,2.75062665775,0.539442176816 406 | 0.0,-1.37305463008,2.50873046437 407 | 1.0,0.939115501038,-1.01504863816 408 | 0.0,-1.25475641495,1.21411955475 409 | 1.0,1.08722524044,0.452283152974 410 | 0.0,2.23472285554,1.73060338923 411 | 1.0,2.11640788272,-0.295606947336 412 | 0.0,0.290964342974,3.02914450122 413 | 1.0,1.82649445755,2.05973068245 414 | 0.0,-0.240985465955,0.768250192298 415 | 1.0,2.60951273947,0.0925450797488 416 | 0.0,-1.99141409557,2.35821533625 417 | 1.0,1.63859233125,-1.05537670128 418 | 0.0,-1.55842482471,1.2494827917 419 | 1.0,1.36677172898,1.12603563938 420 | 0.0,1.58141321325,2.30131783591 421 | 1.0,-0.153282661431,-0.930747362787 422 | 0.0,0.292914479133,2.21218254191 423 | 1.0,0.747424425592,-0.67530614294 424 | 0.0,-0.339707734527,3.49995769041 425 | 1.0,2.64799144514,1.57556210599 426 | 0.0,-1.30540512832,2.66124406988 427 | 1.0,2.28029039436,-0.152690374087 428 | 0.0,0.0357846085413,0.372681916163 429 | 1.0,1.67114679575,1.16754109286 430 | 0.0,-0.874792235802,0.994606656889 431 | 1.0,2.62738913651,1.35282494458 432 | 0.0,1.94965780095,1.99572953978 433 | 1.0,2.92176808307,0.637918535978 434 | 0.0,-1.32426853498,1.50300229321 435 | 1.0,2.33902077066,0.176396187084 436 | 0.0,0.261249512159,0.396532879445 437 | 1.0,1.59326674284,-0.729338704707 438 | 0.0,0.783165705323,1.24134788536 439 | 1.0,0.226249711912,0.234254352303 440 | 0.0,-0.798163164378,3.11147172037 441 | 1.0,3.09607874517,1.53117805971 442 | 0.0,-0.510953338776,2.77890618754 443 | 1.0,3.69438402407,0.397840471112 444 | 0.0,-0.499036675143,2.40741352335 445 | 1.0,3.19101430258,-0.343878988754 446 | 0.0,-1.12633017184,1.84280864073 447 | 1.0,0.724525096778,0.703161730175 448 | 0.0,-0.103619588475,3.65141112084 449 | 1.0,1.67197709355,0.286778796119 450 | 0.0,-0.316352824001,3.66769790594 451 | 1.0,2.69828894104,1.16378603337 452 | 0.0,-1.52529957972,3.37154955921 453 | 1.0,4.03896531459,-0.555101359712 454 | 0.0,-0.382385315863,1.42180404446 455 | 1.0,1.56889775666,-0.963257592556 456 | 0.0,1.33502850954,1.911270598 457 | 1.0,2.56769228997,-1.15813862435 458 | 0.0,2.14538906113,2.29333077372 459 | 1.0,2.79753455444,-1.67660751036 460 | 0.0,0.0223320243911,1.32399307444 461 | 1.0,1.51692495408,0.686412724189 462 | 0.0,-0.641407574064,2.66933227317 463 | 1.0,1.26761425007,-0.677065685981 464 | 0.0,-2.2972498704,4.01968206133 465 | 1.0,2.06019503009,2.25413751371 466 | 0.0,1.06821852719,0.0468072892293 467 | 1.0,1.56663818651,-1.80420438951 468 | 0.0,0.764960433665,2.41005686126 469 | 1.0,2.72407063104,1.09709710337 470 | 0.0,0.80766420561,4.15840061965 471 | 1.0,0.795893893619,-1.76554689997 472 | 0.0,-1.65618943775,3.37640488438 473 | 1.0,1.75640954142,1.2606588243 474 | 0.0,-0.793129131203,2.68368074901 475 | 1.0,2.09242928507,-0.956292761323 476 | 0.0,-0.525098864354,2.54718600142 477 | 1.0,1.63350527923,0.567976459999 478 | 0.0,1.20879344471,1.6433733387 479 | 1.0,0.551237025522,-1.70321889586 480 | 0.0,-0.753494468904,1.55885884396 481 | 1.0,1.83872004781,-0.776512838916 482 | 0.0,1.47911206642,2.42653405093 483 | 1.0,2.69927815733,0.956267511787 484 | 0.0,-0.0236890622222,1.61716721204 485 | 1.0,1.54730073473,0.751299891533 486 | 0.0,1.03102779129,-0.222412807178 487 | 1.0,2.28029965634,-1.00208887274 488 | 0.0,0.245208346924,-0.251226366891 489 | 1.0,1.47445455064,0.841128284784 490 | 0.0,0.030194597259,2.4552761227 491 | 1.0,0.176896996887,0.00640089552888 492 | 0.0,0.649625492747,1.50675716959 493 | 1.0,2.19676398577,1.74601961656 494 | 0.0,-1.24529049008,1.1261070844 495 | 1.0,0.460005859786,1.28481306825 496 | 0.0,-0.802100662724,2.56809246099 497 | 1.0,0.885593173511,-1.41324057733 498 | 0.0,1.5682266562,1.45845748611 499 | 1.0,1.52700824651,0.673676770963 500 | 0.0,0.555631546791,1.69546744303 501 | 1.0,1.12866872938,-1.07745821848 502 | 0.0,-1.47081613679,1.72569196682 503 | 1.0,1.14006156373,1.99480233226 504 | 0.0,0.464456339031,1.19983889136 505 | 1.0,2.03272200602,2.20125473395 506 | 0.0,0.470401510413,2.11732813214 507 | 1.0,1.295644661,0.690739205761 508 | 0.0,-0.231530812826,2.29747304888 509 | 1.0,2.55783223553,0.140700409343 510 | 0.0,0.161030429568,3.006576146 511 | 1.0,2.9147106792,-0.751449917809 512 | 0.0,1.55507299608,1.91559538471 513 | 1.0,2.35852572,0.891298495187 514 | 0.0,0.421550976781,3.37268409356 515 | 1.0,2.4806908034,-0.19565623073 516 | 0.0,-1.57211583335,2.20852015052 517 | 1.0,1.66646029285,-0.623982878985 518 | 0.0,-0.772786835748,1.96223676754 519 | 1.0,0.9313388542,0.138474133457 520 | 0.0,1.00218740538,1.83182853206 521 | 1.0,1.90883463333,-0.0983554839721 522 | 0.0,0.528998591744,1.7488597086 523 | 1.0,2.25132138507,-2.73708304902 524 | 0.0,-0.633978039791,1.12610505094 525 | 1.0,2.73728194691,1.36076061021 526 | 0.0,-0.478680613291,2.18783698557 527 | 1.0,2.62238230227,-0.0578214038407 528 | 0.0,1.07489994622,1.26604689934 529 | 1.0,1.24367529187,-1.55788915902 530 | 0.0,-1.8031853124,1.93329264248 531 | 1.0,0.010835667801,-0.494620950412 532 | 0.0,0.520941782678,0.706279934176 533 | 1.0,2.95051280494,1.84944797582 534 | 0.0,1.49366026033,3.10530812271 535 | 1.0,3.40694632281,2.43344293643 536 | 0.0,-1.29733394192,3.71106159441 537 | 1.0,0.115821008615,-0.295276956329 538 | 0.0,-2.1369425999,3.12495248953 539 | 1.0,3.39560713227,0.781871147066 540 | 0.0,-0.400781042951,2.10489679683 541 | 1.0,1.47951405009,-0.37530401513 542 | 0.0,-0.684101257051,1.81946263845 543 | 1.0,2.36997602652,-1.72123221964 544 | 0.0,-0.603628830602,0.512675889674 545 | 1.0,1.38728552207,-1.22970182266 546 | 0.0,-0.226586353694,1.01346746336 547 | 1.0,3.18157457839,0.0926668181591 548 | 0.0,2.30360435676,2.70012334535 549 | 1.0,0.247297356788,-2.35630032469 550 | 0.0,-0.0886650786982,2.01146034976 551 | 1.0,3.57474295692,2.01119195251 552 | 0.0,-1.87516471626,1.68096991304 553 | 1.0,3.94736175852,-0.247240296552 554 | 0.0,-0.951911975812,1.30004307554 555 | 1.0,2.34346716385,0.527389187616 556 | 0.0,1.63976608163,0.595868252904 557 | 1.0,2.81303818368,-0.000655802379835 558 | 0.0,1.6060052692,1.78997343871 559 | 1.0,3.87963107844,-0.306494257704 560 | 0.0,0.188492462811,2.10382762889 561 | 1.0,1.36884704898,1.08199550648 562 | 0.0,-0.496332204315,2.44281532001 563 | 1.0,2.28029205065,0.724807828122 564 | 0.0,-1.54277222907,1.39090074809 565 | 1.0,1.76487874899,0.735617485536 566 | 0.0,-0.664815319168,2.51739028094 567 | 1.0,1.66076209086,-0.647061839669 568 | 0.0,1.5272192587,3.79209142929 569 | 1.0,0.46513717445,1.49429604624 570 | 0.0,0.65246280951,2.7639541404 571 | 1.0,2.97709764351,0.688740744843 572 | 0.0,1.61322223807,2.50719073429 573 | 1.0,2.67560077398,-0.380317861264 574 | 0.0,-0.337562270381,2.4294279674 575 | 1.0,2.74186054546,-1.03860983728 576 | 0.0,-1.82189091054,1.82872532156 577 | 1.0,3.15784236955,0.468989780236 578 | 0.0,-0.270087339543,2.50417920743 579 | 1.0,2.96665682706,-0.768260123272 580 | 0.0,-0.878741825657,0.903581987959 581 | 1.0,2.66164175157,0.201776360601 582 | 0.0,-0.786083271344,2.59284453074 583 | 1.0,1.73892396409,-0.867156490245 584 | 0.0,0.99188910425,1.87842382698 585 | 1.0,1.94055144398,0.198882417998 586 | 0.0,-0.0322893183724,1.36731359407 587 | 1.0,3.97428343539,0.301795291784 588 | 0.0,-0.118737332297,2.11253655324 589 | 1.0,0.775028520413,0.441995560467 590 | 0.0,-0.461338878182,2.61493867337 591 | 1.0,1.51905799294,-0.494253588809 592 | 0.0,0.771280559927,0.66696532068 593 | 1.0,2.20339894777,1.53238885189 594 | 0.0,0.340892565828,0.512921333712 595 | 1.0,0.767771586319,0.0841333878625 596 | 0.0,-0.775855576936,1.84358232485 597 | 1.0,2.59351847173,1.11190795482 598 | 0.0,-0.0502078654765,0.524325078878 599 | 1.0,2.45796963339,-0.857740605623 600 | 0.0,0.0630519106022,0.320501843666 601 | 1.0,1.90420977879,1.57274791991 602 | 0.0,0.679521236507,2.19944009103 603 | 1.0,3.6116211504,1.28985703859 604 | 0.0,0.303703655173,1.62775654034 605 | 1.0,0.759975759553,0.506502833282 606 | 0.0,-0.482649258448,3.44350859027 607 | 1.0,1.99032422019,-1.17014330184 608 | 0.0,-0.838805459517,1.19848046335 609 | 1.0,1.42730492404,-0.885279063206 610 | 0.0,-0.384272771431,2.54480190479 611 | 1.0,2.46198940585,-0.241952825849 612 | 0.0,-0.303458461591,1.82245173488 613 | 1.0,1.18864581992,0.245148837459 614 | 0.0,0.541800081695,1.60536043589 615 | 1.0,1.03740374182,0.315604802111 616 | 0.0,0.886582534204,2.1481244579 617 | 1.0,2.43464096021,0.598895752048 618 | 0.0,0.0577825077583,2.35630915838 619 | 1.0,1.64052596645,1.15719282145 620 | 0.0,0.496783898206,3.11777424908 621 | 1.0,1.79212321938,1.13659601715 622 | 0.0,1.82269245137,1.24727216835 623 | 1.0,0.79106064064,0.252729721193 624 | 0.0,-0.0357024853114,1.85989129399 625 | 1.0,1.78573883058,0.24792762874 626 | 0.0,0.672786182558,0.97389652661 627 | 1.0,1.78351806649,0.395910916458 628 | 0.0,1.04114917572,1.75128441339 629 | 1.0,2.35709157036,0.108641379863 630 | 0.0,0.046701869602,-0.413966231611 631 | 1.0,1.53208929863,0.625096434158 632 | 0.0,0.98029712361,2.70432749088 633 | 1.0,2.76140383537,0.366564555547 634 | 0.0,0.0156890022828,3.06093604536 635 | 1.0,3.05498622995,0.29471221953 636 | 0.0,-0.641093451509,1.30787120418 637 | 1.0,2.41395162432,0.302384573892 638 | 0.0,0.293430356674,2.2664511201 639 | 1.0,1.52009189619,0.124121089708 640 | 0.0,-0.476517947999,1.14002382633 641 | 1.0,3.52817299106,-0.523498996339 642 | 0.0,1.04082920108,1.5836212811 643 | 1.0,2.13511420375,-0.486013148829 644 | 0.0,0.101821942108,1.15232635523 645 | 1.0,1.8272527249,0.223747399872 646 | 0.0,-1.35184825165,1.95341419927 647 | 1.0,0.0405126826224,-1.33493088536 648 | 0.0,-0.502478342142,2.95995801002 649 | 1.0,2.35807289989,0.343206772034 650 | 0.0,0.364000767733,4.08670635899 651 | 1.0,0.849606288054,-0.364958636607 652 | 0.0,0.277863435822,2.27073462387 653 | 1.0,1.75491804472,0.382236264942 654 | 0.0,2.61544732003,3.31456794341 655 | 1.0,3.99587599904,-0.832082101062 656 | 0.0,-0.1173158567,2.8051074484 657 | 1.0,2.2410033275,0.684182828315 658 | 0.0,-2.79606478133,2.96736804885 659 | 1.0,2.66584722115,0.00752102004019 660 | 0.0,1.2516860928,1.2629042194 661 | 1.0,1.77262364638,0.995548661584 662 | 0.0,0.481137930157,1.66008522316 663 | 1.0,1.25897069722,1.01732330599 664 | 0.0,-0.120334260384,1.82964254194 665 | 1.0,3.71647331979,-2.68951762289 666 | 0.0,0.184784915464,1.74930772298 667 | 1.0,3.08787284979,-0.60441553695 668 | 0.0,1.33681874916,2.33309781921 669 | 1.0,3.13545065097,0.122458064661 670 | 0.0,-1.08608366686,2.06474922033 671 | 1.0,1.24765687662,-0.0254714614727 672 | 0.0,-0.826620929866,-0.81342829539 673 | 1.0,0.8946741117,-1.25894280383 674 | 0.0,-0.344760691682,2.0607680044 675 | 1.0,1.18980358106,0.766494525017 676 | 0.0,-0.270105643873,2.35563243782 677 | 1.0,1.93311982269,2.07564594593 678 | 0.0,-0.0895320138566,3.75279948053 679 | 1.0,2.47563351095,-2.18901209526 680 | 0.0,-0.129186574752,1.34127771631 681 | 1.0,0.277028580697,0.159423246987 682 | 0.0,-3.07910975235,3.50262454401 683 | 1.0,1.33645688138,0.522892350544 684 | 0.0,-1.95465955096,1.81319408282 685 | 1.0,2.88335123193,-1.39048062499 686 | 0.0,-0.775994991903,2.94781447484 687 | 1.0,1.32175900842,-2.062083543 688 | 0.0,0.799961092467,2.2412281433 689 | 1.0,1.12977345618,-0.567883999738 690 | 0.0,0.147385746218,0.974418384809 691 | 1.0,2.67420569572,0.876700191718 692 | 0.0,-1.151290624,2.84902816232 693 | 1.0,1.25965722751,0.115750576876 694 | 0.0,-0.244532391043,3.46129837823 695 | 1.0,2.67371369649,-1.52695565865 696 | 0.0,-1.21506978471,1.38361275616 697 | 1.0,3.40777000927,-1.04291841849 698 | 0.0,-1.42234087849,1.98560906369 699 | 1.0,2.1152005324,-0.541469050047 700 | 0.0,-2.1563810318,2.02341265959 701 | 1.0,1.47075060355,0.0751349211483 702 | 0.0,0.266555693736,1.97513267451 703 | 1.0,1.55021125059,-0.612440161056 704 | 0.0,-0.661777601953,0.983124191754 705 | 1.0,0.537565265008,0.786064397788 706 | 0.0,0.515974398137,2.46410862888 707 | 1.0,2.39686226112,0.313953979095 708 | 0.0,-1.88795937338,2.72114706619 709 | 1.0,1.80723971289,-0.522429057079 710 | 0.0,0.0875926332603,2.10380236893 711 | 1.0,1.75196258088,-0.0992455655882 712 | 0.0,0.125894878005,1.71561854886 713 | 1.0,1.91811523936,-0.585023919385 714 | 0.0,-0.620046835682,2.11830175933 715 | 1.0,1.53753428605,0.176526177933 716 | 0.0,-2.19281819944,2.4599489617 717 | 1.0,0.86440297357,0.305437378399 718 | 0.0,-0.898491919739,4.04749644542 719 | 1.0,2.54621241865,1.39276681473 720 | 0.0,-0.134836764218,0.969141331511 721 | 1.0,0.677012846022,-0.503196975625 722 | 0.0,0.953137188721,2.00966669751 723 | 1.0,0.910473515933,-1.24306979651 724 | 0.0,-1.29122965756,0.177685346943 725 | 1.0,2.58620435265,-0.236829201013 726 | 0.0,0.946737877073,3.04445000076 727 | 1.0,1.46330114705,-0.506460218741 728 | 0.0,1.12011553729,2.28635510144 729 | 1.0,3.12729798231,0.375531776679 730 | 0.0,0.270431675054,2.33940377308 731 | 1.0,1.77413529247,0.888444208216 732 | 0.0,0.535915709644,0.651683668389 733 | 1.0,1.62916314194,0.421595808291 734 | 0.0,-0.102784278089,1.99505539456 735 | 1.0,3.14006716504,0.299629291025 736 | 0.0,-0.807814102184,2.27435103817 737 | 1.0,1.08855737961,-0.217934162888 738 | 0.0,0.805780934951,1.30219820421 739 | 1.0,3.54923572517,-0.87494810273 740 | 0.0,-0.703358485411,1.45431774065 741 | 1.0,3.51334566105,-1.79170785603 742 | 0.0,0.0864413111402,3.98724894942 743 | 1.0,2.29184644843,0.520435474592 744 | 0.0,1.62090351534,3.1955537768 745 | 1.0,3.09277053494,0.247400677549 746 | 0.0,0.0925488283939,0.990134017077 747 | 1.0,1.23776176337,0.374049614445 748 | 0.0,0.984353338801,1.92827416516 749 | 1.0,2.91720879199,-0.608319917647 750 | 0.0,-1.51391723031,1.97481287514 751 | 1.0,2.33438437875,0.434292993579 752 | 0.0,1.00626948273,1.47603600564 753 | 1.0,2.94582224405,0.00618992286917 754 | 0.0,-0.940039165174,1.52644839988 755 | 1.0,1.41005509612,1.60842596995 756 | 0.0,0.697367625949,4.21656308999 757 | 1.0,0.967503059453,-0.637320118214 758 | 0.0,-0.512887673973,1.71949574796 759 | 1.0,1.8591900264,-0.389345982029 760 | 0.0,-1.08926103202,2.16678091736 761 | 1.0,1.882945286,0.801689036203 762 | 0.0,-0.261310036153,0.53428290204 763 | 1.0,0.781855184935,0.899484235889 764 | 0.0,0.520943557271,1.40099896282 765 | 1.0,1.16405549377,1.18260214819 766 | 0.0,-0.470547062712,2.12594285757 767 | 1.0,3.91506105472,0.0587044319451 768 | 0.0,-0.340897994511,2.59040606477 769 | 1.0,2.04335997992,0.0239984781353 770 | 0.0,-1.55652435316,2.88844935438 771 | 1.0,2.25365080747,-1.58601403351 772 | 0.0,-1.24770224481,2.25628096327 773 | 1.0,-0.883809034174,-0.0401600924649 774 | 0.0,0.345433928468,2.52280835499 775 | 1.0,1.35874472598,1.25785988325 776 | 0.0,-0.112789243854,2.02344042464 777 | 1.0,3.40487016759,1.28387509716 778 | 0.0,-0.812449828936,0.880359425499 779 | 1.0,1.12847775301,-0.0917650021625 780 | 0.0,0.126121136773,0.96966320677 781 | 1.0,3.44106213938,0.659298134892 782 | 0.0,0.583317545137,1.67818053374 783 | 1.0,0.733439488784,0.5800588345 784 | 0.0,-0.230873908695,0.455865940106 785 | 1.0,-0.131901380807,-0.748646121674 786 | 0.0,1.04658744362,3.10889211442 787 | 1.0,1.7212966401,-2.01483407248 788 | 0.0,-1.25793259878,1.68973442715 789 | 1.0,3.83523026874,-1.03776308521 790 | 0.0,-0.72924465883,0.958800604314 791 | 1.0,3.26989715991,0.68767946677 792 | 0.0,-0.446302150233,3.12576408576 793 | 1.0,0.618857725824,-0.465259688626 794 | 0.0,-1.81463673416,2.39380485101 795 | 1.0,2.19437679668,-0.150767791436 796 | 0.0,-1.09318064539,0.973972186845 797 | 1.0,0.948615356617,0.902715184571 798 | 0.0,0.036054849056,1.99744813088 799 | 1.0,3.0495871119,0.383251911014 800 | 0.0,1.45567482952,3.79249653118 801 | 1.0,1.7776473577,-0.36605328313 802 | 0.0,0.159915757211,3.06703920094 803 | 1.0,2.46877321985,2.1398987405 804 | 0.0,-0.623430294455,0.429407270957 805 | 1.0,1.95690139619,0.943906161681 806 | 0.0,-0.947555707636,2.59406710712 807 | 1.0,2.28896250364,0.270837801181 808 | 0.0,-1.59117642579,2.45820846156 809 | 1.0,2.47879809062,-0.271660913708 810 | 0.0,-0.241347965983,2.32954543734 811 | 1.0,0.269747529302,-2.03487563888 812 | 0.0,-0.271636310469,2.89751997346 813 | 1.0,2.4116168482,-1.68568216666 814 | 0.0,-0.664852310094,2.02153607591 815 | 1.0,2.03589399734,-0.811256334891 816 | 0.0,1.6451426676,2.16023845776 817 | 1.0,1.54849034865,0.557225784555 818 | 0.0,2.09222088849,0.233467661989 819 | 1.0,2.41193522267,0.172018686469 820 | 0.0,-0.787723710417,1.07457613859 821 | 1.0,0.441656717449,0.402827900169 822 | 0.0,-0.85909354376,0.809702855992 823 | 1.0,2.77766451409,0.711899385135 824 | 0.0,-0.15486283632,2.08567011439 825 | 1.0,3.16410652843,0.301935291947 826 | 0.0,1.25818775562,2.17095235824 827 | 1.0,2.66628071096,-1.0672610133 828 | 0.0,0.61556373048,2.22919366156 829 | 1.0,2.39378167291,-2.61124677005 830 | 0.0,0.491370160595,1.00630155092 831 | 1.0,1.14496249408,1.18969489532 832 | 0.0,0.594226201635,3.59367582356 833 | 1.0,1.10772899509,0.884072878825 834 | 0.0,-1.28841891307,1.23706084115 835 | 1.0,1.94499095542,0.226509801669 836 | 0.0,-1.38129594393,1.68932223449 837 | 1.0,2.57317905033,0.0178284813088 838 | 0.0,-1.10320228364,2.71354428798 839 | 1.0,2.94861531741,1.86487789039 840 | 0.0,-0.265907929431,2.8886672245 841 | 1.0,2.22353722708,1.16433365481 842 | 0.0,1.62566356768,1.30888242452 843 | 1.0,1.03959279833,0.0531877759569 844 | 0.0,-0.528106790481,2.45410679039 845 | 1.0,2.81427636452,0.720216600871 846 | 0.0,-0.661322554556,2.76613218471 847 | 1.0,3.22098404542,0.212528813044 848 | 0.0,-1.69308525105,4.27088583989 849 | 1.0,2.12656513832,0.265721138227 850 | 0.0,-0.328068748421,2.28597550628 851 | 1.0,1.73349732016,0.502059408822 852 | 0.0,0.350915661923,3.42263253202 853 | 1.0,2.46781409697,-0.241198451237 854 | 0.0,-0.439103022117,1.30473273208 855 | 1.0,2.69415018165,-0.728908692685 856 | 0.0,-0.601495381934,3.72616442113 857 | 1.0,3.81153687119,0.476742760484 858 | 0.0,-0.919527519684,1.57719535975 859 | 1.0,2.91306648369,-0.574081480105 860 | 0.0,-0.561563034141,1.12637288764 861 | 1.0,2.79996011803,-0.651147818814 862 | 0.0,2.68881376757,1.77059692728 863 | 1.0,3.18792627624,-0.564343507128 864 | 0.0,0.224205088493,0.848255147507 865 | 1.0,3.6459588836,1.07458040491 866 | 0.0,1.2754160719,1.3409135363 867 | 1.0,0.926938555952,1.40278815739 868 | 0.0,1.96129590122,2.79893082492 869 | 1.0,2.59809253773,1.29597047207 870 | 0.0,-0.667507476558,2.16742967836 871 | 1.0,2.1448407567,-0.69289653447 872 | 0.0,-0.368667243464,2.72735252724 873 | 1.0,2.32763521414,1.78012718287 874 | 0.0,-0.484117060232,1.51637442674 875 | 1.0,3.39196078102,1.07440168861 876 | 0.0,0.882286553222,2.20954742066 877 | 1.0,2.96404541527,0.139294315249 878 | 0.0,0.0751525494288,1.46209259579 879 | 1.0,2.65808754924,-1.06802923979 880 | 0.0,0.312868068362,2.87459854278 881 | 1.0,1.92543517875,-0.282359547955 882 | 0.0,0.28681974972,2.71359079175 883 | 1.0,3.66101307514,0.547246137274 884 | 0.0,1.54671133244,2.76683318036 885 | 1.0,1.45065245646,-0.809151067806 886 | 0.0,2.10413520336,2.63821651309 887 | 1.0,1.15526433602,0.439649272461 888 | 0.0,-1.23743345217,0.974871848676 889 | 1.0,2.18049693135,-0.00460652176368 890 | 0.0,0.182200002745,2.02603079236 891 | 1.0,1.81263007191,0.703220210501 892 | 0.0,0.320866460109,0.888439743285 893 | 1.0,2.63906847118,-0.334633129909 894 | 0.0,-1.01119364364,1.62627516512 895 | 1.0,0.290645226344,1.26379776522 896 | 0.0,-0.31160788609,2.76098986208 897 | 1.0,2.8597016484,1.22411917424 898 | 0.0,2.47069299392,3.16508516014 899 | 1.0,-0.601496805002,0.771399569905 900 | 0.0,0.813535332428,2.55048162523 901 | 1.0,2.62836945651,-2.59831870207 902 | 0.0,0.989825018746,1.42858525524 903 | 1.0,0.112875843593,1.53743548082 904 | 0.0,-0.672955904478,1.09451560597 905 | 1.0,1.27666046825,0.112106603795 906 | 0.0,0.771428878861,2.76067848205 907 | 1.0,1.59520165485,0.289483543527 908 | 0.0,-0.710815702191,3.33746803737 909 | 1.0,2.29911177125,-0.849705384238 910 | 0.0,0.485290718075,1.81420251014 911 | 1.0,2.38691625992,-0.00447371519169 912 | 0.0,-1.46758146747,-0.932779020786 913 | 1.0,2.1789564048,0.0250219955608 914 | 0.0,0.393133745469,1.70825641701 915 | 1.0,1.59168225671,-1.79719269928 916 | 0.0,-2.16645211546,2.30866657739 917 | 1.0,1.15269445774,0.631945303186 918 | 0.0,-0.566714269417,3.71392018308 919 | 1.0,3.31313357485,1.02985830404 920 | 0.0,-0.281242055313,1.27851185686 921 | 1.0,1.74181838564,0.481261909155 922 | 0.0,1.08143608896,1.96809855004 923 | 1.0,2.61991219914,-0.298428469952 924 | 0.0,-1.20890991886,3.07288479626 925 | 1.0,1.73926646131,-0.578131038682 926 | 0.0,0.788110182291,1.35269731204 927 | 1.0,1.80669985509,0.64729857159 928 | 0.0,-0.360287346599,2.89186885921 929 | 1.0,0.246946226317,-0.653159858155 930 | 0.0,-0.755982675122,2.60367156851 931 | 1.0,2.81526658944,-0.744376989507 932 | 0.0,-1.19402646975,1.92899498322 933 | 1.0,3.04752954039,0.460106816818 934 | 0.0,-0.0192180644028,1.26294578433 935 | 1.0,4.92567580566,0.327928959954 936 | 0.0,-0.591377785589,3.240402618 937 | 1.0,2.16274753869,0.995726150989 938 | 0.0,0.03655900456,2.40207650314 939 | 1.0,2.94367051759,0.77873837601 940 | 0.0,-0.162988297345,1.04779852146 941 | 1.0,-0.20972974086,0.47699586095 942 | 0.0,-0.610167607995,1.06895743991 943 | 1.0,1.32242545269,-0.315036511766 944 | 0.0,0.148304928437,2.25707802907 945 | 1.0,1.17093247335,0.687738103834 946 | 0.0,-0.192858911331,2.45128513497 947 | 1.0,2.44255446244,1.59907850226 948 | 0.0,0.851659546004,2.63850517067 949 | 1.0,2.78693221145,-0.567521856374 950 | 0.0,0.735126564226,4.2239703739 951 | 1.0,1.22577793169,1.03541229713 952 | 0.0,0.00862305784267,0.557813755553 953 | 1.0,2.86834519962,1.20293010584 954 | 0.0,1.49949274433,0.773438987281 955 | 1.0,2.79825889643,-1.14849893528 956 | 0.0,-0.0271475893986,1.46842024887 957 | 1.0,0.865443766613,-0.884327580424 958 | 0.0,0.638344262957,3.56384399295 959 | 1.0,2.68068889033,-0.577858231363 960 | 0.0,-0.00813788081765,2.58070231447 961 | 1.0,2.35656094104,0.376602032189 962 | 0.0,-0.423016422571,1.62033867724 963 | 1.0,2.68627616572,1.62620659341 964 | 0.0,-0.809832311629,4.08199984523 965 | 1.0,1.78347320373,0.348444406301 966 | 0.0,-0.533372444076,1.25656952426 967 | 1.0,0.0815107664956,0.65425637493 968 | 0.0,0.21940354372,2.48726064495 969 | 1.0,1.079081518,-1.41154641364 970 | 0.0,0.17975639955,1.63185236049 971 | 1.0,2.80405766933,0.11781688093 972 | 0.0,0.00353068178194,2.05096843216 973 | 1.0,2.92335642627,-1.23958489459 974 | 0.0,0.317519903518,2.66296009003 975 | 1.0,2.40931288581,-1.84555282015 976 | 0.0,2.29511719066,2.07588965474 977 | 1.0,2.5565250589,-1.63727016887 978 | 0.0,-0.604491082993,2.07633713062 979 | 1.0,2.69400351423,0.962925121041 980 | 0.0,0.619291168995,2.20537691985 981 | 1.0,1.99206696446,-0.403151714355 982 | 0.0,1.45019647489,1.18715582225 983 | 1.0,2.9648885846,1.54695547141 984 | 0.0,1.55299209039,2.97136892421 985 | 1.0,2.84085708074,-0.00115428921141 986 | 0.0,-0.916125041913,1.95959703439 987 | 1.0,3.48350971579,1.51200090614 988 | 0.0,-0.0754274706403,1.84172976061 989 | 1.0,2.96045622155,-0.620827422636 990 | 0.0,-1.72203843856,2.90540268589 991 | 1.0,2.34660853077,-1.13040007415 992 | 0.0,1.4054815571,1.62208844092 993 | 1.0,-0.72769605766,0.218470232384 994 | 0.0,0.00510707747473,1.98628170341 995 | 1.0,1.23680633544,-0.527192183543 996 | 0.0,-0.0605718320019,-0.230167722189 997 | 1.0,2.18121970475,0.32275772464 998 | 0.0,-0.678020363374,2.97796112323 999 | 1.0,2.97423187907,1.65802574456 1000 | 0.0,-1.02675785952,0.780660511934 1001 | 1.0,2.68076172233,1.11234259192 1002 | 0.0,-0.964068894251,2.8087569853 1003 | 1.0,1.74263785713,0.3871530399 1004 | 0.0,0.675918158207,2.68077991351 1005 | 1.0,2.24054932966,-0.482907681831 1006 | 0.0,1.54045032704,2.51320759479 1007 | 1.0,0.276218358593,-1.86099141923 1008 | 0.0,-1.19992987882,2.32769873069 1009 | 1.0,2.83787753982,0.979402202897 1010 | 0.0,0.450266745614,1.94361845858 1011 | 1.0,1.08449241397,-0.85970466445 1012 | 0.0,0.966060386928,1.8235434674 1013 | 1.0,1.43719934875,0.456727733174 1014 | 0.0,0.214086516124,2.10640933007 1015 | 1.0,0.694677417159,0.133734369949 1016 | 0.0,-0.164017070935,2.82345515948 1017 | 1.0,4.09137011045,-0.0758261848301 1018 | 0.0,0.66095142311,2.93911914039 1019 | 1.0,3.13419486867,0.411150653073 1020 | 0.0,0.928949929973,0.489861455319 1021 | 1.0,2.51357554433,-0.429814142461 1022 | 0.0,-1.08793076797,2.39857070159 1023 | 1.0,3.27844746069,-1.16811027165 1024 | 0.0,0.626004088261,1.74943265919 1025 | 1.0,1.48246656997,-0.411208965419 1026 | 0.0,1.39641571548,1.35441331878 1027 | 1.0,2.94051393837,-2.25166722197 1028 | 0.0,-0.648939727843,2.79381591963 1029 | 1.0,2.24234820101,-1.85413952434 1030 | 0.0,-0.228887690071,0.387349027321 1031 | 1.0,2.54774925524,-1.31668354096 1032 | 0.0,0.459552469496,-0.0306228895327 1033 | 1.0,3.25719558366,0.422082243022 1034 | 0.0,-2.63667210799,3.14941497523 1035 | 1.0,2.3136634298,-0.542837799471 1036 | 0.0,-0.0231603493773,0.643686331726 1037 | 1.0,2.88420540394,-0.208337157534 1038 | 0.0,0.624815413238,0.574372946769 1039 | 1.0,3.07688717882,-0.841563871947 1040 | 0.0,0.713045062159,1.40729581076 1041 | 1.0,3.06185415624,0.214384723406 1042 | 0.0,1.07874881518,2.10763726517 1043 | 1.0,1.44067510159,0.990557332691 1044 | 0.0,-0.847891189018,0.952275216369 1045 | 1.0,2.91777927999,-0.111003990062 1046 | 0.0,0.44673545947,1.98947351507 1047 | 1.0,4.23674207071,-0.464316723481 1048 | 0.0,-0.286593534343,3.39724549466 1049 | 1.0,1.95524343686,-2.16207176487 1050 | 0.0,-2.21937827395,2.26984427741 1051 | 1.0,3.59025571015,1.20088060845 1052 | 0.0,-0.983391106905,1.4957194641 1053 | 1.0,2.55143113487,-0.671644008219 1054 | 0.0,-0.388074164281,1.78469591194 1055 | 1.0,1.33664847774,0.647268854389 1056 | 0.0,0.321556338214,1.5209901131 1057 | 1.0,3.06610353032,-0.88244318158 1058 | 0.0,-0.33210408171,2.61953390093 1059 | 1.0,2.70856134223,-0.230873378667 1060 | 0.0,-0.629794044172,2.53414696422 1061 | 1.0,3.84689496408,1.61410675632 1062 | 0.0,0.600768539915,2.95243344756 1063 | 1.0,1.87698749207,1.37422876072 1064 | 0.0,0.477089478841,0.853812603019 1065 | 1.0,2.55673209473,-0.0969838535082 1066 | 0.0,-2.19518802634,1.81908570691 1067 | 1.0,1.9250443735,-0.583981919819 1068 | 0.0,-1.99357599862,1.10410149948 1069 | 1.0,2.8576337648,-0.35688554927 1070 | 0.0,-0.380067421798,2.02715325949 1071 | 1.0,1.77562205093,-0.394283518622 1072 | 0.0,1.23872574289,2.49743782883 1073 | 1.0,2.74914606035,-0.72047900465 1074 | 0.0,-1.76525407297,2.76639617396 1075 | 1.0,2.88881913497,-1.21527973293 1076 | 0.0,0.622641583975,2.16184683395 1077 | 1.0,1.87025769486,1.23560723493 1078 | 0.0,0.42206727003,2.62598749307 1079 | 1.0,-0.409061363633,0.944131737408 1080 | 0.0,-0.142663620823,2.85347891218 1081 | 1.0,2.57003299927,0.30088104312 1082 | 0.0,0.0779620822693,0.954651429141 1083 | 1.0,2.55492471016,0.0393811819172 1084 | 0.0,0.117455407857,2.30986728606 1085 | 1.0,2.74529514436,-0.425060684686 1086 | 0.0,-0.0271044105067,2.70596376573 1087 | 1.0,0.16395747753,-0.516126090246 1088 | 0.0,-1.41531095546,2.63324617842 1089 | 1.0,2.41216128491,0.693312028341 1090 | 0.0,0.426211763134,2.59849628732 1091 | 1.0,2.82067099442,-0.223697795895 1092 | 0.0,0.570936147383,3.28217651706 1093 | 1.0,3.68598273667,-1.0804781383 1094 | 0.0,-1.73501120224,1.3819982316 1095 | 1.0,0.908929174703,-1.68390264298 1096 | 0.0,0.39464821848,3.17747670803 1097 | 1.0,4.20699396179,0.435341247187 1098 | 0.0,0.503071173534,1.66144631559 1099 | 1.0,1.43719093698,0.867866841494 1100 | 0.0,0.0139404387285,2.31310268847 1101 | 1.0,2.34668685241,0.738684068766 1102 | 0.0,0.459694081102,0.226236860247 1103 | 1.0,2.50559286684,0.45440795494 1104 | 0.0,0.252150113152,2.71445487339 1105 | 1.0,1.53757212123,0.0276545201145 1106 | 0.0,0.68202234402,0.185747911261 1107 | 1.0,2.21684038396,0.517639149102 1108 | 0.0,0.565735292361,1.90339680422 1109 | 1.0,1.53243326179,0.355572165252 1110 | 0.0,0.137164746535,2.40579835857 1111 | 1.0,2.97642217482,1.08289187362 1112 | 0.0,-0.838078666165,2.64180293746 1113 | 1.0,2.10110071464,-2.5299175303 1114 | 0.0,0.200045035921,2.30761629794 1115 | 1.0,2.08430687853,0.229981778816 1116 | 0.0,0.838909019249,1.75145298218 1117 | 1.0,0.0252884692361,-0.723917936097 1118 | 0.0,-1.07483391246,3.40582207245 1119 | 1.0,1.9650488364,0.306694683683 1120 | 0.0,-0.0107934031358,2.17147480416 1121 | 1.0,2.21649749201,-0.932851649735 1122 | 0.0,-0.0301023117929,0.379843491301 1123 | 1.0,0.836585255498,0.0859055306486 1124 | 0.0,-0.00782146260776,2.19642193478 1125 | 1.0,1.32100095396,0.168222004588 1126 | 0.0,-0.40024760018,2.85819001463 1127 | 1.0,2.06596681041,-0.967038454832 1128 | 0.0,1.12090109714,1.7177039638 1129 | 1.0,1.01512725694,0.315020054648 1130 | 0.0,0.942738004037,4.76238797585 1131 | 1.0,3.1068074009,0.803080107169 1132 | 0.0,0.496171678247,0.193897421656 1133 | 1.0,1.60527687158,0.827792699717 1134 | 0.0,0.966680054795,2.28960041023 1135 | 1.0,2.04087505809,-1.05063404544 1136 | 0.0,0.584201161185,1.37950338104 1137 | 1.0,0.632735237842,1.90595264907 1138 | 0.0,-0.0988800005962,2.42112244179 1139 | 1.0,1.99747753346,0.702346598933 1140 | 0.0,-2.04820363877,-0.0570499000158 1141 | 1.0,2.76219522549,1.21231389135 1142 | 0.0,-0.486856655562,4.30316161677 1143 | 1.0,1.88951714647,0.166197539754 1144 | 0.0,0.231985430181,2.23166202628 1145 | 1.0,0.762316778359,-1.224601279 1146 | 0.0,0.0316336964382,2.85370053554 1147 | 1.0,1.26294737172,-0.625248480804 1148 | 0.0,-2.15629463848,1.32907965947 1149 | 1.0,3.64958493508,-0.832927777223 1150 | 0.0,0.792080117557,0.877001739914 1151 | 1.0,1.78495414386,-0.269891968826 1152 | 0.0,-0.321517229212,2.19593903268 1153 | 1.0,0.220898370602,-0.0607682997183 1154 | 0.0,-1.46226356953,0.657640892283 1155 | 1.0,2.50811605276,-1.24057697987 1156 | 0.0,-0.650308972534,2.68314435557 1157 | 1.0,2.08760438734,-1.02962795204 1158 | 0.0,0.717306798425,3.90706227621 1159 | 1.0,1.05744388405,-0.271288259775 1160 | 0.0,-0.310795300221,0.994841536377 1161 | 1.0,3.27724591526,1.19607072479 1162 | 0.0,-1.20158594612,1.50030372844 1163 | 1.0,2.48921138736,-1.34604448454 1164 | 0.0,1.38583515781,2.31992764531 1165 | 1.0,2.94372516484,0.0628061287042 1166 | 0.0,-0.466188657363,0.962731517178 1167 | 1.0,2.00359503169,0.767654078465 1168 | 0.0,1.21545075471,0.803550518075 1169 | 1.0,2.37918769921,1.17801250764 1170 | 0.0,-0.194911406847,1.61655641012 1171 | 1.0,3.25273442753,0.305276859168 1172 | 0.0,1.03514972329,2.1267783677 1173 | 1.0,0.901275179219,1.06534239561 1174 | 0.0,-1.69181984713,1.02543891258 1175 | 1.0,2.41247353949,-1.47350539623 1176 | 0.0,0.601727945323,1.57573003819 1177 | 1.0,2.15520944868,0.993395174826 1178 | 0.0,0.88964809389,2.8544683878 1179 | 1.0,2.707314789,0.626515071127 1180 | 0.0,-1.46375872965,1.0787157377 1181 | 1.0,1.69030028207,-0.300993352364 1182 | 0.0,0.976133675686,1.6539956383 1183 | 1.0,2.10802247494,-0.173039535038 1184 | 0.0,-0.530494606137,1.5074640248 1185 | 1.0,2.4067255159,1.4629142124 1186 | 0.0,-0.092259200739,1.55493116615 1187 | 1.0,2.19245317174,0.964106503938 1188 | 0.0,-0.662480217983,2.27429777948 1189 | 1.0,1.06787341324,0.0176135339736 1190 | 0.0,1.00673183557,3.2599211052 1191 | 1.0,1.2710936538,0.0911632075477 1192 | 0.0,-1.20692071242,2.74259919934 1193 | 1.0,3.16834413465,0.909804597145 1194 | 0.0,1.43889734083,0.722557908988 1195 | 1.0,1.61429321883,2.3451059839 1196 | 0.0,-0.759451373807,1.08396114255 1197 | 1.0,0.921656255864,-0.31071628817 1198 | 0.0,0.729164535181,3.01407656145 1199 | 1.0,1.5955040894,-0.605694515034 1200 | 0.0,-0.316023237316,2.89719358213 1201 | 1.0,2.77045159187,-0.304461260459 1202 | 0.0,0.806735676257,1.50543917251 1203 | 1.0,2.54687672766,-0.894652826636 1204 | 0.0,1.11023372269,2.23166698935 1205 | 1.0,0.9586079588,-0.706436906781 1206 | 0.0,0.287316424512,2.60364711697 1207 | 1.0,-0.682980226922,0.597253596215 1208 | 0.0,0.752502373096,1.96477590509 1209 | 1.0,2.0762938458,0.491914681068 1210 | 0.0,-0.418443144023,1.26022418083 1211 | 1.0,2.08878551798,-1.64518111047 1212 | 0.0,0.408441981648,2.31837026818 1213 | 1.0,1.43776838699,-0.948639462987 1214 | 0.0,0.427172270484,1.99601068278 1215 | 1.0,2.53826773671,1.65203647614 1216 | 0.0,-0.606956348861,1.0653955708 1217 | 1.0,1.86900679656,-0.028644010025 1218 | 0.0,0.0909578704308,2.60201353022 1219 | 1.0,1.15700111301,0.194460544962 1220 | 0.0,-0.19386790023,0.889379532856 1221 | 1.0,1.41952991424,-0.173124697835 1222 | 0.0,-0.498967138207,2.95574938652 1223 | 1.0,3.77317822458,0.40063617496 1224 | 0.0,-0.466205375656,1.34505017312 1225 | 1.0,0.812696717095,-1.43865183324 1226 | 0.0,2.22222977918,2.23149737018 1227 | 1.0,1.89334883603,0.949474196173 1228 | 0.0,-1.04848337688,2.09584085887 1229 | 1.0,1.59304877705,-1.36169112709 1230 | 0.0,0.473222230538,2.8214018202 1231 | 1.0,0.721799671782,-0.535854591231 1232 | 0.0,-0.479102553137,1.99285579551 1233 | 1.0,2.69306012051,1.56758230533 1234 | 0.0,0.0067430231211,0.912420615158 1235 | 1.0,0.766382093158,-1.070393264 1236 | 0.0,0.403848203188,1.33029336591 1237 | 1.0,1.87846101871,-0.109358717661 1238 | 0.0,-0.323231569332,3.04197995025 1239 | 1.0,3.29128257256,1.07175870738 1240 | 0.0,0.379382303139,2.34685297093 1241 | 1.0,1.60053428786,1.55468485751 1242 | 0.0,-0.783940794088,1.81112080782 1243 | 1.0,1.04476008546,0.0756767049854 1244 | 0.0,-1.43032110237,3.22156286478 1245 | 1.0,1.67052842576,0.0660295099019 1246 | 0.0,-1.51416077563,1.88439662293 1247 | 1.0,0.189584186216,-1.28148630755 1248 | 0.0,-0.306531455402,1.49978878147 1249 | 1.0,0.695434526251,0.238573609903 1250 | 0.0,-0.448166532197,1.02511316093 1251 | 1.0,0.00290383274064,-0.290751084486 1252 | 0.0,1.47418570902,1.59340471608 1253 | 1.0,2.84206202118,1.12506772792 1254 | 0.0,0.109397546878,2.82987828034 1255 | 1.0,3.69671623492,0.0396049141899 1256 | 0.0,0.143800105236,2.61237385881 1257 | 1.0,1.20652404875,0.57089669805 1258 | 0.0,-1.08730758724,1.81702082035 1259 | 1.0,2.130419648,-0.342372168049 1260 | 0.0,-0.862249110479,2.22141970402 1261 | 1.0,0.999288397882,-2.21404684778 1262 | 0.0,-1.23719548915,0.697359834524 1263 | 1.0,0.765567898283,0.157680828878 1264 | 0.0,-0.262072655861,1.861498052 1265 | 1.0,3.04772595309,-0.42902904931 1266 | 0.0,-1.68758749414,1.00054482142 1267 | 1.0,1.80255508983,-1.28479884825 1268 | 0.0,1.47965088164,1.33081997014 1269 | 1.0,0.51916779866,0.767913090862 1270 | 0.0,-1.10946900323,0.761349586483 1271 | 1.0,3.54859891432,1.38071313325 1272 | 0.0,-0.304844794576,1.43730691169 1273 | 1.0,1.53665242395,1.26804612981 1274 | 0.0,-2.57345082923,0.811532179743 1275 | 1.0,0.585812075827,-0.602452931056 1276 | 0.0,-0.575990215671,5.15197400489 1277 | 1.0,1.49917322561,0.656996493747 1278 | 0.0,0.549863595451,2.39108530174 1279 | 1.0,1.63065854357,-1.18722488155 1280 | 0.0,1.21487691325,3.39402104011 1281 | 1.0,0.677072391389,-0.871498565828 1282 | 0.0,3.04093092335,1.48758437313 1283 | 1.0,4.08018117386,0.594175384247 1284 | 0.0,-1.17121836851,0.716206145261 1285 | 1.0,2.14828635545,0.215484752264 1286 | 0.0,0.147401159766,3.08066997641 1287 | 1.0,0.714360756656,1.45301208647 1288 | 0.0,0.0381504660789,1.33400779515 1289 | 1.0,1.97388922717,-2.33660253449 1290 | 0.0,-0.993396522349,1.93358412389 1291 | 1.0,2.64292123178,-0.00671660985447 1292 | 0.0,0.24590044671,3.19838291773 1293 | 1.0,2.53570681409,0.0273556170514 1294 | 0.0,2.25969751759,2.0496022143 1295 | 1.0,1.21467661561,0.53919098141 1296 | 0.0,-0.773910368787,2.36243782502 1297 | 1.0,2.05164320551,-0.0177721376486 1298 | 0.0,-0.0198186125009,2.36698905939 1299 | 1.0,2.00754550439,2.15175182888 1300 | 0.0,-1.27108946767,0.529874773551 1301 | 1.0,-0.0247595343063,0.438490244318 1302 | 0.0,-0.474858210545,2.69641346253 1303 | 1.0,2.17285894669,1.16540379174 1304 | 0.0,1.20197309436,3.10917926062 1305 | 1.0,0.966290559622,-0.0130277199303 1306 | 0.0,2.77457007742,1.44489442143 1307 | 1.0,3.9205998616,0.514683345309 1308 | 0.0,-1.18074600543,3.0075305373 1309 | 1.0,2.83253723548,-0.18134241154 1310 | 0.0,-0.0590222132158,1.57891345132 1311 | 1.0,3.26168801572,-0.260939120219 1312 | 0.0,0.155811608209,1.68193831767 1313 | 1.0,1.11655516596,-0.517696921012 1314 | 0.0,-1.94348331559,1.17395898584 1315 | 1.0,2.79860316556,-0.0882201643726 1316 | 0.0,0.707355060091,0.250117230171 1317 | 1.0,3.2418294671,-0.400717626689 1318 | 0.0,0.74728663002,3.80243203654 1319 | 1.0,0.913835236631,-0.541228165058 1320 | 0.0,-0.528995007426,2.8153766525 1321 | 1.0,0.761531724372,-0.0773944629795 1322 | 0.0,0.411688577435,2.59886681924 1323 | 1.0,1.56019033542,1.14815394427 1324 | 0.0,-0.321991604253,1.61834876547 1325 | 1.0,2.75797865607,0.563953499138 1326 | 0.0,-1.62746919526,1.39232381961 1327 | 1.0,1.57356640701,-2.01160215447 1328 | 0.0,0.320669170838,2.45167911142 1329 | 1.0,1.84393418985,1.16094794954 1330 | 0.0,-0.718603628555,0.412358985298 1331 | 1.0,3.42755660048,-1.46229191075 1332 | 0.0,0.740145565833,2.65196945465 1333 | 1.0,1.9141206369,0.114764440633 1334 | 0.0,-0.189982997292,3.52560932823 1335 | 1.0,1.09299924422,-0.383855357454 1336 | 0.0,-0.00416820959935,3.07958711335 1337 | 1.0,1.67923513078,-1.28132103814 1338 | 0.0,1.09435305548,3.05071659416 1339 | 1.0,1.82906157144,-0.264312476094 1340 | 0.0,-2.15736672866,1.03494267861 1341 | 1.0,2.49637266459,-1.97829455031 1342 | 0.0,0.475947752035,1.44916235651 1343 | 1.0,3.85090417093,0.387113179402 1344 | 0.0,1.60447124219,2.06711990363 1345 | 1.0,2.22520997199,2.1883293175 1346 | 0.0,-1.31484495312,2.34598214384 1347 | 1.0,2.52969792119,-0.165849686764 1348 | 0.0,0.387573774739,2.52722428864 1349 | 1.0,1.48001661385,-0.53147498956 1350 | 0.0,-0.249014668106,1.57764663959 1351 | 1.0,3.44594768975,-0.875292573022 1352 | 0.0,-0.213364239149,3.18168493449 1353 | 1.0,2.62782445472,0.431754731365 1354 | 0.0,0.603383688905,0.992757469263 1355 | 1.0,2.27359837621,0.4314623936 1356 | 0.0,0.838726221141,1.59275593172 1357 | 1.0,3.35028903839,2.1615828155 1358 | 0.0,2.12628025424,1.48976649232 1359 | 1.0,2.93531706255,-0.341434123589 1360 | 0.0,0.232717986097,1.74578410835 1361 | 1.0,0.518192998857,2.20347529643 1362 | 0.0,-0.78780051689,2.05819687554 1363 | 1.0,2.69919882251,-0.000821622516704 1364 | 0.0,1.05431215503,2.97804666414 1365 | 1.0,2.65335261599,-0.229474083023 1366 | 0.0,-0.283077726651,3.03249759151 1367 | 1.0,2.63936479149,-1.14752548031 1368 | 0.0,0.0180181324333,2.87081536651 1369 | 1.0,2.84015617142,0.438769265232 1370 | 0.0,1.43718302832,0.482458050931 1371 | 1.0,1.42432028164,-0.0281965554896 1372 | 0.0,0.15751450933,2.25858205124 1373 | 1.0,2.32750341167,0.656128227628 1374 | 0.0,0.111361089469,1.80746165537 1375 | 1.0,3.20408682117,-1.94248661348 1376 | 0.0,-0.683013355673,1.90878172611 1377 | 1.0,1.19988861531,0.0371615919587 1378 | 0.0,-0.112122816725,0.136499402799 1379 | 1.0,1.74357053569,-0.637522014748 1380 | 0.0,0.938805588986,1.9285702353 1381 | 1.0,1.3913028845,0.473138461159 1382 | 0.0,0.205334914026,2.13034647181 1383 | 1.0,2.76768028723,-1.0103163792 1384 | 0.0,1.76867288844,1.22012493292 1385 | 1.0,1.73972373628,0.450001414626 1386 | 0.0,1.30346283511,1.93742273562 1387 | 1.0,1.4048903569,-0.611785902766 1388 | 0.0,0.305902390786,1.86230045553 1389 | 1.0,0.540723545111,0.408842966706 1390 | 0.0,-0.76055891606,1.18852446322 1391 | 1.0,3.06730013223,1.1599317178 1392 | 0.0,-1.84387325529,2.46137576423 1393 | 1.0,0.639832357404,-0.700009495022 1394 | 0.0,-1.00506418212,2.44760850924 1395 | 1.0,2.42314209462,-2.16811108725 1396 | 0.0,-0.53283801445,1.79572902249 1397 | 1.0,1.90460850902,-0.190420572222 1398 | 0.0,-0.0202429227223,1.0010612998 1399 | 1.0,2.02831651101,-0.950771972559 1400 | 0.0,0.341103511874,1.76688274562 1401 | 1.0,2.39475992312,-0.801632601704 1402 | 0.0,0.230452697826,4.12060237459 1403 | 1.0,2.08004244512,-0.16546124883 1404 | 0.0,-0.863392742243,3.11121092235 1405 | 1.0,2.94799323063,-0.9721258816 1406 | 0.0,0.150109035942,2.98375287706 1407 | 1.0,2.20249604031,-0.863353664489 1408 | 0.0,0.890634159448,2.38041758605 1409 | 1.0,2.27426631398,-0.665717642481 1410 | 0.0,1.14947516638,2.34024452503 1411 | 1.0,1.88426707076,-0.295820585312 1412 | 0.0,0.320374945039,1.50710268017 1413 | 1.0,2.46079448426,-0.470339784789 1414 | 0.0,-1.60565819108,1.92715013402 1415 | 1.0,1.66856266036,0.315216013713 1416 | 0.0,0.124585398354,1.54348306997 1417 | 1.0,3.528509183,-0.263654169258 1418 | 0.0,-0.608986991923,1.18645212688 1419 | 1.0,1.98741970088,1.01128851324 1420 | 0.0,1.52842862881,2.54906759157 1421 | 1.0,0.77074552848,-2.19649044584 1422 | 0.0,-0.33718303598,2.94953646346 1423 | 1.0,1.46302538645,0.735534224332 1424 | 0.0,-0.249010800511,0.941881163289 1425 | 1.0,1.95717999237,1.04091555519 1426 | 0.0,1.3633462416,1.05500315619 1427 | 1.0,1.14608002039,2.20667416036 1428 | 0.0,0.713399777205,2.54592871137 1429 | 1.0,1.88695387483,-0.103050229063 1430 | 0.0,0.0843606648202,0.583176366307 1431 | 1.0,3.31375401839,1.43047375017 1432 | 0.0,1.13392016266,0.988946623132 1433 | 1.0,-0.552450328201,0.00773052434494 1434 | 0.0,-0.260740792064,2.18883253133 1435 | 1.0,2.69755883671,-2.46630438678 1436 | 0.0,1.17009710706,1.54630669925 1437 | 1.0,2.58513340934,1.18549447569 1438 | 0.0,-0.667187085135,0.710587155408 1439 | 1.0,1.11210177086,0.176953869223 1440 | 0.0,-0.588838702194,2.16249422234 1441 | 1.0,1.92899733708,1.4185915664 1442 | 0.0,1.17658838436,0.792123327171 1443 | 1.0,3.792801485,-0.502940513692 1444 | 0.0,0.192743626629,2.10686239284 1445 | 1.0,1.31073966047,1.19217733771 1446 | 0.0,-1.28190489052,1.0936793676 1447 | 1.0,1.23959161181,-0.366134868175 1448 | 0.0,0.0322687734562,2.77617279114 1449 | 1.0,0.720386844861,0.956535295907 1450 | 0.0,0.896672734718,2.54855782887 1451 | 1.0,2.87741262578,-1.43862875401 1452 | 0.0,-1.16146528912,3.03528959979 1453 | 1.0,1.69103517495,1.76977998724 1454 | 0.0,-0.642224635445,1.43648508155 1455 | 1.0,2.48613898986,1.31168563846 1456 | 0.0,1.04933047938,1.64240582997 1457 | 1.0,1.67516251784,-0.529486228379 1458 | 0.0,-0.419937991851,1.45152226527 1459 | 1.0,1.718953079,0.898210334347 1460 | 0.0,-0.767967988945,1.22557595285 1461 | 1.0,0.727299061702,-1.33600137771 1462 | 0.0,-0.158507303039,2.1104908588 1463 | 1.0,1.34727703956,0.489506140706 1464 | 0.0,1.52786432062,0.423394882728 1465 | 1.0,2.93704156457,-0.125066451321 1466 | 0.0,-1.26363011723,1.15212627536 1467 | 1.0,3.23635108585,-1.44669930896 1468 | 0.0,-1.29325598057,2.25404555177 1469 | 1.0,2.89337725688,0.661454634075 1470 | 0.0,-0.680342192131,2.20357270702 1471 | 1.0,1.77937945105,0.597661636943 1472 | 0.0,-0.776359838491,2.66353896889 1473 | 1.0,2.80078714857,-1.12204692049 1474 | 0.0,-1.05805712042,1.13170645705 1475 | 1.0,0.914837699655,-0.29915618213 1476 | 0.0,0.874077490387,2.70658556642 1477 | 1.0,1.18580855008,1.11144901391 1478 | 0.0,-0.0868776273946,3.24040986603 1479 | 1.0,0.650168989922,-0.251501481875 1480 | 0.0,-1.27361088092,2.85533197077 1481 | 1.0,2.1712348675,0.122463069185 1482 | 0.0,0.522037201118,0.446986125324 1483 | 1.0,2.86736151337,-0.202029634943 1484 | 0.0,0.706329325636,1.20091196282 1485 | 1.0,2.43064025587,-0.602360672324 1486 | 0.0,-0.391040816459,1.97868979676 1487 | 1.0,2.27761376277,-0.661372711329 1488 | 0.0,1.24875612633,1.91215680594 1489 | 1.0,1.20571850631,-0.882373135879 1490 | 0.0,1.78764954494,-0.152254318905 1491 | 1.0,-0.152765551299,0.815514587907 1492 | 0.0,-0.330139380408,2.27216592381 1493 | 1.0,2.96482473772,0.0248482791355 1494 | 0.0,-0.0403995812133,0.704831339688 1495 | 1.0,3.63430807897,0.231272977399 1496 | 0.0,-0.864660712392,1.73822205387 1497 | 1.0,2.23581159877,1.97493563256 1498 | 0.0,-0.257374126394,2.10359433754 1499 | 1.0,3.60552343642,0.174001680502 1500 | 0.0,-0.470067575301,2.14769074706 1501 | 1.0,1.31716177847,1.12529367707 1502 | 0.0,1.18409010462,2.69040738326 1503 | 1.0,1.96283225723,0.294953413824 1504 | 0.0,0.24525294286,3.14022022522 1505 | 1.0,2.11959219183,-2.12147931064 1506 | 0.0,-0.519644660775,3.61550987222 1507 | 1.0,0.291427366657,-0.127396231896 1508 | 0.0,-1.16268002892,1.32158267405 1509 | 1.0,0.492679179483,-1.82599128681 1510 | 0.0,-2.04228576699,2.47016535431 1511 | 1.0,2.95657066335,0.855702074883 1512 | 0.0,-1.04730166652,0.768080133456 1513 | 1.0,3.22530439613,-0.822733994243 1514 | 0.0,-0.503987748365,1.31292521809 1515 | 1.0,3.37682300106,-0.288593628253 1516 | 0.0,0.0765632301316,2.65401131473 1517 | 1.0,1.85460013003,-0.885744592502 1518 | 0.0,1.15118262788,2.10581377089 1519 | 1.0,2.33755190138,-0.173758764987 1520 | 0.0,0.314626041106,1.13635954857 1521 | 1.0,0.72188559761,0.510817538003 1522 | 0.0,-2.88600446734,2.12334457433 1523 | 1.0,0.592195180145,0.69157698739 1524 | 0.0,-0.343295030662,2.08807349409 1525 | 1.0,1.4361261062,0.0763709613791 1526 | 0.0,0.523569772682,2.8448324757 1527 | 1.0,2.008595349,-1.16713752599 1528 | 0.0,-1.49137558707,0.333403166951 1529 | 1.0,2.61873027657,-0.212417674296 1530 | 0.0,0.207974613172,1.93950229699 1531 | 1.0,-0.0933181780316,-0.567828809498 1532 | 0.0,-0.53431050657,2.54459630236 1533 | 1.0,2.04685979193,-0.664867357146 1534 | 0.0,1.66650952371,3.49994847242 1535 | 1.0,0.600901712273,-0.0634859419272 1536 | 0.0,0.119561468049,1.3339685815 1537 | 1.0,3.56984502739,0.826542719685 1538 | 0.0,0.97028421847,2.54427486366 1539 | 1.0,3.30919739246,0.193507757762 1540 | 0.0,0.0281720489479,2.09159215266 1541 | 1.0,3.92000696756,0.0781227249886 1542 | 0.0,-0.934187492685,1.4010903778 1543 | 1.0,1.92220855837,0.302268093717 1544 | 0.0,-0.895500945062,2.18862123571 1545 | 1.0,2.46103391393,-0.484437851224 1546 | 0.0,-0.675413887121,1.76034455592 1547 | 1.0,0.139202918001,0.275377508702 1548 | 0.0,-1.28943537075,2.85539359703 1549 | 1.0,2.69340777577,-0.493060542752 1550 | 0.0,-1.31456115987,2.76677595295 1551 | 1.0,0.395898270429,0.732493083186 1552 | 0.0,0.742944249692,2.34448975762 1553 | 1.0,0.0117651249745,-0.014334497692 1554 | 0.0,-0.715917643825,-0.203051178721 1555 | 1.0,3.19044911062,-0.65125950351 1556 | 0.0,-0.821621093201,3.900507677 1557 | 1.0,1.4724699193,-0.996884285765 1558 | 0.0,-0.998973604794,2.71657625743 1559 | 1.0,1.54240392665,-1.61112087377 1560 | 0.0,0.994838705805,1.8265019831 1561 | 1.0,0.968694478629,0.620656488603 1562 | 0.0,-1.53700362085,1.37668676638 1563 | 1.0,1.87284440668,-0.19709815072 1564 | 0.0,-1.66951871065,2.58252086662 1565 | 1.0,1.41796268155,-1.16165549421 1566 | 0.0,0.832378568,1.96644018946 1567 | 1.0,1.7729054136,0.878253385885 1568 | 0.0,-0.286728043973,2.33771114311 1569 | 1.0,2.91878042225,1.56581566612 1570 | 0.0,0.532772525789,0.907646436873 1571 | 1.0,1.0463324087,1.55278742588 1572 | 0.0,-0.148843088677,2.74101150019 1573 | 1.0,1.61984020387,0.334756552803 1574 | 0.0,1.31072244103,2.31759672968 1575 | 1.0,2.13556090586,1.9081501096 1576 | 0.0,0.0255023798732,2.48351528236 1577 | 1.0,1.61159801419,0.351981838371 1578 | 0.0,0.39326340199,2.19643081308 1579 | 1.0,1.10695612566,0.859878519198 1580 | 0.0,-0.805748006481,1.69652811545 1581 | 1.0,0.964712956907,0.427765990766 1582 | 0.0,1.33562300759,3.9760280977 1583 | 1.0,0.771788186601,0.594269046068 1584 | 0.0,1.3242878684,2.99312898056 1585 | 1.0,1.62180310459,0.650144608352 1586 | 0.0,1.26504924771,1.42376963567 1587 | 1.0,2.65321862305,0.395092461635 1588 | 0.0,0.416981035197,2.86906874257 1589 | 1.0,1.2339500244,0.780241063407 1590 | 0.0,-0.280842158031,1.4908035116 1591 | 1.0,0.474153883371,-0.612561282165 1592 | 0.0,0.830703364732,3.83462976032 1593 | 1.0,1.15674972544,-0.705763107466 1594 | 0.0,1.65265142531,2.2155344675 1595 | 1.0,1.92469779245,-0.286949190016 1596 | 0.0,0.180799080285,2.70991321803 1597 | 1.0,3.4862912082,0.810705216021 1598 | 0.0,0.294518670312,1.95522761914 1599 | 1.0,1.23100293954,0.31961850847 1600 | 0.0,0.366377075614,1.91414616981 1601 | 1.0,2.50117039242,-1.17769560167 1602 | 0.0,-1.77678330837,3.57231317325 1603 | 1.0,1.9340303114,-0.0331577087206 1604 | 0.0,-1.0634613604,2.50999627812 1605 | 1.0,3.53710254214,-1.65207897081 1606 | 0.0,-1.43408814673,2.96892334394 1607 | 1.0,0.82124612492,0.729161853222 1608 | 0.0,-0.539210196839,2.58838334811 1609 | 1.0,2.0712365923,-0.00863846278557 1610 | 0.0,-0.61712228898,2.6226583403 1611 | 1.0,1.95658202,1.23519430981 1612 | 0.0,-0.180405495886,2.9422128611 1613 | 1.0,2.87351163402,0.685936253392 1614 | 0.0,2.03288119478,1.39731430081 1615 | 1.0,2.96868749998,-1.34808660993 1616 | 0.0,1.17661902415,1.41659546969 1617 | 1.0,2.1946413029,-1.13216603495 1618 | 0.0,-0.0270449267964,1.26879434362 1619 | 1.0,4.30692338686,-0.451797669488 1620 | 0.0,2.52351413952,2.46411611648 1621 | 1.0,0.430929973907,0.892938800699 1622 | 0.0,1.16203849785,-0.181526891583 1623 | 1.0,1.3958464374,-0.280438015002 1624 | 0.0,0.157487752348,2.05502769311 1625 | 1.0,1.49034551711,-0.666643475039 1626 | 0.0,-0.85839476368,3.43852353001 1627 | 1.0,1.25159624457,1.86347969293 1628 | 0.0,0.633423655484,3.86377719073 1629 | 1.0,1.46749577201,0.0429112976996 1630 | 0.0,-1.25373384888,1.3295051907 1631 | 1.0,1.05334089685,-0.885246392714 1632 | 0.0,-0.155616544796,1.94159502069 1633 | 1.0,2.8331806253,1.44150443715 1634 | 0.0,2.42546789682,3.16241371218 1635 | 1.0,2.4647825356,0.40195966918 1636 | 0.0,0.206016165051,2.95333061184 1637 | 1.0,3.58567360078,-1.27937467205 1638 | 0.0,0.605167397496,1.43823543445 1639 | 1.0,1.21686296408,2.76146737668 1640 | 0.0,-0.479573562025,3.73904878521 1641 | 1.0,0.857888012963,2.25024242204 1642 | 0.0,-2.31368015395,2.66819195495 1643 | 1.0,2.5069999688,1.75264843736 1644 | 0.0,0.256481957053,3.21398054445 1645 | 1.0,2.33523586902,-0.975406534634 1646 | 0.0,0.604790722516,2.07954141909 1647 | 1.0,1.57843807015,-2.18377637589 1648 | 0.0,0.436309163782,1.08742901908 1649 | 1.0,2.18931786329,0.443075606541 1650 | 0.0,0.252060580265,3.35154441583 1651 | 1.0,4.3085630012,0.0457895790632 1652 | 0.0,-0.512482705987,0.83603036371 1653 | 1.0,2.37906184659,0.727101816268 1654 | 0.0,-1.39537440271,1.99014459985 1655 | 1.0,2.13424788053,1.82983499041 1656 | 0.0,0.956544429563,1.26449294055 1657 | 1.0,-0.138693026746,0.903434928748 1658 | 0.0,-1.46643392419,2.86809796866 1659 | 1.0,2.25041259221,2.25802951467 1660 | 0.0,-0.302065820953,2.33354126992 1661 | 1.0,2.72780032514,-0.390155492369 1662 | 0.0,0.527572921506,0.831676788855 1663 | 1.0,2.55050789328,-1.82760811318 1664 | 0.0,-0.199206527157,0.913110523523 1665 | 1.0,1.68662971501,1.27931079497 1666 | 0.0,1.37442473301,1.74742695307 1667 | 1.0,2.90154624292,-0.744618458186 1668 | 0.0,-0.00603670212486,1.27639523408 1669 | 1.0,1.80262784568,0.780537967516 1670 | 0.0,0.43067135182,2.79470357137 1671 | 1.0,1.5038470839,0.451658619772 1672 | 0.0,-0.837216031104,2.59424578802 1673 | 1.0,3.19990811288,-0.721946418712 1674 | 0.0,0.226751957046,1.48009127652 1675 | 1.0,1.84070010337,1.12497162904 1676 | 0.0,0.530329902574,3.4653762437 1677 | 1.0,1.7240753337,-0.671286692143 1678 | 0.0,-0.912081940365,1.6768963841 1679 | 1.0,2.61486590509,1.14834815093 1680 | 0.0,-0.24668243499,1.65197643256 1681 | 1.0,0.108705029676,1.9922238724 1682 | 0.0,-1.24128465665,1.52440364959 1683 | 1.0,0.86026194222,0.483086211838 1684 | 0.0,-1.07356690234,3.5158435103 1685 | 1.0,2.01339683373,0.606484294289 1686 | 0.0,0.249071830162,2.22905595777 1687 | 1.0,2.27849804312,-0.113149732491 1688 | 0.0,-0.759360276108,1.81025012306 1689 | 1.0,0.596846313684,1.27245781576 1690 | 0.0,-1.57657564171,1.82021855114 1691 | 1.0,2.03151257454,0.494161813482 1692 | 0.0,0.141993119763,1.64251907754 1693 | 1.0,1.02406411227,1.00697146029 1694 | 0.0,-1.01193616407,2.24627512486 1695 | 1.0,1.99687537068,-1.01852877172 1696 | 0.0,-0.415868418701,0.763325898687 1697 | 1.0,2.6520380592,0.658369812683 1698 | 0.0,0.0144519690723,2.71933517039 1699 | 1.0,1.15293815897,-1.10539590637 1700 | 0.0,-2.70504344041,3.39401403265 1701 | 1.0,1.18449325036,0.143030291658 1702 | 0.0,2.64536833092,1.914107268 1703 | 1.0,2.48945282972,-0.885357247522 1704 | 0.0,-0.673748953388,3.85678818641 1705 | 1.0,2.7168545825,-0.046273782616 1706 | 0.0,0.136812134596,0.433796279549 1707 | 1.0,2.15597760832,-0.701575004169 1708 | 0.0,-0.0327948276832,0.568631715987 1709 | 1.0,2.93205305549,0.441438333032 1710 | 0.0,2.4834416062,0.403828048385 1711 | 1.0,2.23517412483,2.43017267276 1712 | 0.0,1.64827585002,1.97245235492 1713 | 1.0,0.509076178251,-0.412349254988 1714 | 0.0,0.944450951274,1.70917431591 1715 | 1.0,1.40321283345,1.87496631857 1716 | 0.0,1.01344494438,2.97074376964 1717 | 1.0,3.01780840007,-0.737891966083 1718 | 0.0,-0.611935749638,0.728851645504 1719 | 1.0,1.87970221251,0.495244565186 1720 | 0.0,0.0579104320351,0.945111389284 1721 | 1.0,2.86141454424,-0.192217970524 1722 | 0.0,1.11467290845,1.36611621132 1723 | 1.0,2.60954322107,0.985565056151 1724 | 0.0,0.963900260862,2.9182725042 1725 | 1.0,2.81170594555,0.9710376426 1726 | 0.0,-0.533130313063,3.42375266814 1727 | 1.0,3.64576344899,-0.833917635154 1728 | 0.0,2.19524676248,1.19774402052 1729 | 1.0,0.981892260196,-0.746521130024 1730 | 0.0,1.21323284667,1.71154732609 1731 | 1.0,2.11357880024,-0.980734500701 1732 | 0.0,-1.66248161892,0.485356956428 1733 | 1.0,1.18670724241,0.308680302722 1734 | 0.0,0.58257876795,2.17209358234 1735 | 1.0,0.763574002382,-1.22287620251 1736 | 0.0,0.795482429459,3.13001362683 1737 | 1.0,1.52355122361,0.912007260298 1738 | 0.0,-1.29912171002,1.90773166243 1739 | 1.0,1.80799215541,-1.38591793415 1740 | 0.0,0.0299635401154,2.3771594814 1741 | 1.0,1.72220468542,1.40753518046 1742 | 0.0,0.50565207509,2.37744399422 1743 | 1.0,1.61318617264,-0.68842613286 1744 | 0.0,0.588330926656,3.09601870971 1745 | 1.0,-0.172646133696,0.0296277828711 1746 | 0.0,0.227785909636,2.18746189416 1747 | 1.0,2.84540193345,-1.3490755142 1748 | 0.0,0.395661820395,0.419088782751 1749 | 1.0,0.707579884526,-0.0439777245839 1750 | 0.0,0.0112100810882,0.968024206249 1751 | 1.0,1.20359053759,-1.45654878223 1752 | 0.0,-0.710418884791,2.48893384535 1753 | 1.0,3.28030594245,0.798542380946 1754 | 0.0,-0.621235191408,2.85002603212 1755 | 1.0,1.08023059815,0.728952575546 1756 | 0.0,0.562802318989,3.17631476919 1757 | 1.0,1.69256691612,-0.252703452781 1758 | 0.0,0.979134579176,2.02894342048 1759 | 1.0,2.25760891307,-0.729912078729 1760 | 0.0,0.522668720778,2.3914527684 1761 | 1.0,2.34318638784,-1.08263529084 1762 | 0.0,0.469080124012,2.4698876158 1763 | 1.0,1.79178947639,-0.0896649505318 1764 | 0.0,0.419673179687,2.13198646439 1765 | 1.0,2.29025617327,0.804625595177 1766 | 0.0,0.71214783866,4.75218348975 1767 | 1.0,3.12221357671,1.57128606216 1768 | 0.0,-0.361732370032,-0.118463091771 1769 | 1.0,1.57305416667,0.430534158068 1770 | 0.0,-1.13671490858,0.821346760696 1771 | 1.0,0.411696073803,-0.120326706348 1772 | 0.0,1.28702004443,3.29192987816 1773 | 1.0,2.07547247902,-0.210820382057 1774 | 0.0,0.556001329582,2.56091190054 1775 | 1.0,2.33124229485,-0.764298840693 1776 | 0.0,0.740496338441,0.924092652903 1777 | 1.0,3.84160773364,-0.74327271163 1778 | 0.0,-1.15804308485,1.23120053454 1779 | 1.0,0.445478165091,0.587198593559 1780 | 0.0,1.63632254861,1.95123174176 1781 | 1.0,0.594845475454,-0.833836121333 1782 | 0.0,-0.629934616536,2.02856674125 1783 | 1.0,1.06969339296,0.046629848535 1784 | 0.0,-0.902320594773,0.916245894309 1785 | 1.0,3.69421031492,0.524976336206 1786 | 0.0,0.0511946936791,2.63093246218 1787 | 1.0,3.71520206794,-1.06573392805 1788 | 0.0,-0.486035656336,1.32931681833 1789 | 1.0,2.53816770402,-1.02853645959 1790 | 0.0,-0.316777624426,0.940497741538 1791 | 1.0,1.5766106503,0.815260547239 1792 | 0.0,0.569161027511,3.52725248686 1793 | 1.0,2.45046790923,-0.0376153313909 1794 | 0.0,0.679096173766,2.45971617205 1795 | 1.0,2.81295884908,-1.05546925356 1796 | 0.0,0.045508548911,2.48999634106 1797 | 1.0,2.06476501439,0.533921368562 1798 | 0.0,0.0976349298097,2.63238871 1799 | 1.0,2.22935847193,0.830208653633 1800 | 0.0,0.988898976392,1.64064408158 1801 | 1.0,0.819870760037,-0.0272695255678 1802 | 0.0,-0.406368619226,2.81172546908 1803 | 1.0,1.91728034238,0.261143381956 1804 | 0.0,-0.978394369163,0.801468736553 1805 | 1.0,2.60664645297,1.21271231282 1806 | 0.0,-0.0786373162843,0.834356521517 1807 | 1.0,2.28640380536,-1.77408919977 1808 | 0.0,-1.24031080181,2.32362812358 1809 | 1.0,2.18291435831,0.867767010361 1810 | 0.0,1.40725955286,1.78467260273 1811 | 1.0,1.71450411885,0.543172901162 1812 | 0.0,0.952371270759,2.82248704343 1813 | 1.0,2.42834103694,-0.46813724409 1814 | 0.0,0.978553448202,1.50721625032 1815 | 1.0,3.02284277866,1.09257076678 1816 | 0.0,-0.796387370716,2.98870565373 1817 | 1.0,2.18526256171,-1.60566657467 1818 | 0.0,0.0499471041231,4.27461549338 1819 | 1.0,0.800731703843,-0.509121331156 1820 | 0.0,1.40995479814,-1.14194054651 1821 | 1.0,3.50097082103,0.368601081639 1822 | 0.0,-0.254396279496,2.99995467556 1823 | 1.0,1.29443905197,-0.840980461576 1824 | 0.0,-1.75651757562,1.85736868624 1825 | 1.0,1.78187875676,0.423879276788 1826 | 0.0,0.179339237985,2.60667044602 1827 | 1.0,1.84025054065,-0.28179090812 1828 | 0.0,-0.132523699516,0.54834815204 1829 | 1.0,4.42110689624,-1.00224258733 1830 | 0.0,1.91080216335,0.614791149195 1831 | 1.0,2.16394382546,-0.789462540703 1832 | 0.0,-0.487852600372,1.88653718176 1833 | 1.0,1.93645817299,-1.01171541143 1834 | 0.0,-0.527947181156,2.66797647649 1835 | 1.0,3.00521011997,-0.417438274098 1836 | 0.0,0.402519349079,1.39593329527 1837 | 1.0,3.41539070869,0.0299350404661 1838 | 0.0,-0.278474230546,2.56022486231 1839 | 1.0,1.69260843444,0.578283868182 1840 | 0.0,-1.18089199978,2.84869920168 1841 | 1.0,0.578907827887,-0.21614659969 1842 | 0.0,2.06721731298,2.74681274502 1843 | 1.0,2.16696391064,-0.797878108614 1844 | 0.0,-0.773124759087,0.768600989338 1845 | 1.0,1.8738205691,0.645431754798 1846 | 0.0,-1.08697642077,3.84292169974 1847 | 1.0,2.38894574894,0.943318984016 1848 | 0.0,-0.398662773041,3.2875228431 1849 | 1.0,1.54721578031,0.589833716285 1850 | 0.0,-0.456423658699,1.31253129927 1851 | 1.0,1.18257716756,0.708634398252 1852 | 0.0,-1.11141219129,2.29305578276 1853 | 1.0,2.622317288,1.58130481344 1854 | 0.0,0.28515363564,1.1546954199 1855 | 1.0,1.54614414793,1.21213423855 1856 | 0.0,0.117919723334,4.1860791314 1857 | 1.0,1.58422694468,-0.326828942314 1858 | 0.0,0.256410597845,2.79899774044 1859 | 1.0,-0.746727538447,0.35628633304 1860 | 0.0,-0.0406475961461,1.23134660641 1861 | 1.0,2.42624805761,-0.913484142484 1862 | 0.0,0.239629051922,1.42805551868 1863 | 1.0,0.812698841678,0.426825308356 1864 | 0.0,-1.15378198384,2.56539588218 1865 | 1.0,1.83646181824,-0.810164067189 1866 | 0.0,0.34906604903,0.938897221504 1867 | 1.0,2.3085368304,-1.56841008354 1868 | 0.0,1.47124557123,2.43734785626 1869 | 1.0,2.33483987511,0.273612897053 1870 | 0.0,0.652137815501,1.9227395045 1871 | 1.0,3.40804411658,-1.62456763653 1872 | 0.0,0.352831103886,0.657113436031 1873 | 1.0,2.57160102006,1.22655128488 1874 | 0.0,0.0470812414949,4.43809593088 1875 | 1.0,1.74645538271,-0.916590334353 1876 | 0.0,-0.640615805779,3.61668878324 1877 | 1.0,1.50281447241,0.100939643762 1878 | 0.0,-1.14808699856,2.88781325506 1879 | 1.0,1.67340039479,0.549988664646 1880 | 0.0,-0.708168444382,1.14454082444 1881 | 1.0,1.42716433341,0.483098474643 1882 | 0.0,-0.105964960417,2.29824488119 1883 | 1.0,2.40138792158,-0.769590364345 1884 | 0.0,0.315515962561,1.77499964549 1885 | 1.0,0.955603149556,-0.413824978119 1886 | 0.0,-0.279845386124,0.0289132946613 1887 | 1.0,1.62589012687,-0.412947668263 1888 | 0.0,0.698027077579,2.34146842473 1889 | 1.0,2.2782202656,0.751636461094 1890 | 0.0,0.671008771119,2.7072466275 1891 | 1.0,3.88084330099,-1.82571063697 1892 | 0.0,-0.704948712903,0.903004328659 1893 | 1.0,2.63278003716,1.44961060443 1894 | 0.0,-0.803317887834,-0.430821104256 1895 | 1.0,2.75938386651,0.167864414496 1896 | 0.0,0.0116484854056,0.795823327858 1897 | 1.0,2.28141931392,-1.32616184226 1898 | 0.0,0.529481187762,1.07367642229 1899 | 1.0,3.76298479184,-0.770263021925 1900 | 0.0,0.46263336572,3.66240942578 1901 | 1.0,1.54284874101,1.68214405522 1902 | 0.0,0.292989017126,1.49547943156 1903 | 1.0,0.999720242926,0.211283618817 1904 | 0.0,-0.524513471786,3.91243602189 1905 | 1.0,2.27365078323,0.626792202372 1906 | 0.0,1.03992430504,0.909291537662 1907 | 1.0,1.59854892367,-0.833984483136 1908 | 0.0,0.233026192497,1.46926834704 1909 | 1.0,3.03415387062,0.727457167736 1910 | 0.0,1.78913537216,2.96873455741 1911 | 1.0,1.99127177385,-0.0227155782245 1912 | 0.0,1.79570949918,2.52154799795 1913 | 1.0,2.24153752681,-0.866375034589 1914 | 0.0,-0.139812323043,3.0448951467 1915 | 1.0,1.56461894459,1.72608839912 1916 | 0.0,-0.39951487213,2.31948822471 1917 | 1.0,0.615247403429,1.68951394653 1918 | 0.0,0.0421419930683,2.31889128708 1919 | 1.0,0.30684830254,-1.35058029832 1920 | 0.0,-0.238810840996,1.94636384743 1921 | 1.0,2.85310323468,0.0442617858334 1922 | 0.0,-0.347308717848,2.41808683081 1923 | 1.0,2.19550738044,-0.202041289843 1924 | 0.0,-1.36382183653,4.07255270422 1925 | 1.0,2.74313967192,-0.423719765385 1926 | 0.0,-0.194136278727,1.62031393429 1927 | 1.0,0.944505675606,1.05898910607 1928 | 0.0,0.812408509496,1.4961755662 1929 | 1.0,2.80875630059,-0.357151496934 1930 | 0.0,-0.613253545729,2.85172360268 1931 | 1.0,1.84355137938,1.31451810463 1932 | 0.0,-2.54037923965,0.974137259922 1933 | 1.0,2.00346934162,-1.91814337144 1934 | 0.0,0.405279356017,0.833123132403 1935 | 1.0,0.0918121708327,0.232180914461 1936 | 0.0,-1.17136760366,3.24939171126 1937 | 1.0,3.0242252739,-1.28171958601 1938 | 0.0,0.846780966569,1.98926097355 1939 | 1.0,1.18793971472,-0.275324720767 1940 | 0.0,0.0591728951588,1.82320769635 1941 | 1.0,2.20768869996,-1.14815899265 1942 | 0.0,-0.0659484640182,1.36702324486 1943 | 1.0,1.00351210353,0.0707694611421 1944 | 0.0,-0.65319154857,2.12606055251 1945 | 1.0,0.542229356252,0.0169368311423 1946 | 0.0,-1.66384057796,3.4639056937 1947 | 1.0,3.55986315785,-1.14980883502 1948 | 0.0,-0.548950565636,1.36451200244 1949 | 1.0,1.83809207076,0.708834191353 1950 | 0.0,1.02527542733,0.0893705835527 1951 | 1.0,2.8832145227,0.506319038828 1952 | 0.0,-0.370541295791,0.834908840702 1953 | 1.0,2.55396152755,-0.814093874813 1954 | 0.0,1.39454219278,3.60062790582 1955 | 1.0,0.281073573886,0.963080378671 1956 | 0.0,-0.990228571003,1.59756954588 1957 | 1.0,2.28985749452,2.28139899001 1958 | 0.0,0.49669371413,2.03034767299 1959 | 1.0,1.04163042787,0.886806884407 1960 | 0.0,0.250411130817,1.6315726902 1961 | 1.0,2.69387037712,-0.366664374103 1962 | 0.0,9.29024478354e-05,2.63164906184 1963 | 1.0,3.14261322697,2.62308902697 1964 | 0.0,-0.6944882418,1.53222777411 1965 | 1.0,1.32991315696,-1.15933693025 1966 | 0.0,1.21684844105,2.13251025093 1967 | 1.0,2.05074901831,-2.76900178227 1968 | 0.0,-1.65896422608,0.963262644293 1969 | 1.0,3.63496975496,1.1792699606 1970 | 0.0,-1.29036093824,2.0200787757 1971 | 1.0,2.56870298956,0.310456662007 1972 | 0.0,2.83491293814,2.16353651262 1973 | 1.0,1.41730694701,2.50927469181 1974 | 0.0,-0.00676792769182,2.92316221453 1975 | 1.0,1.17839449658,0.555110069112 1976 | 0.0,-0.691921650784,3.44860887119 1977 | 1.0,1.49494471602,1.43990524178 1978 | 0.0,0.195775903462,2.71333875313 1979 | 1.0,1.98045561296,-0.466417111208 1980 | 0.0,-0.0418372902616,3.11397822688 1981 | 1.0,2.90295238049,1.59727945954 1982 | 0.0,-1.34213825433,1.5850645971 1983 | 1.0,1.92153970389,-0.860814982493 1984 | 0.0,-0.989544167147,1.11496080526 1985 | 1.0,2.09871613356,1.33159407725 1986 | 0.0,-0.148044455403,-0.180751334056 1987 | 1.0,1.86163298369,1.09362566544 1988 | 0.0,-0.998640996578,2.22809543062 1989 | 1.0,1.78361667707,0.285446435729 1990 | 0.0,0.18643783038,1.54600407765 1991 | 1.0,2.78606844056,-0.0562725222455 1992 | 0.0,1.13391419935,0.48791823117 1993 | 1.0,0.203676523954,0.882053548303 1994 | 0.0,-1.38257909673,3.72611717745 1995 | 1.0,1.58412064173,0.0993917530652 1996 | 0.0,-0.212025129542,2.07260763084 1997 | 1.0,1.73749034499,-1.79972548059 1998 | 0.0,1.17250541349,2.02061665777 1999 | 1.0,0.910563683871,1.19477206318 2000 | 0.0,1.61597405448,1.97112346256 2001 | -------------------------------------------------------------------------------- /Logistic Regression/R/LogReg.R: -------------------------------------------------------------------------------- 1 | 2 | #remove all previous data 3 | rm(list=ls()) 4 | 5 | #load library & data 6 | library(kernlab) 7 | data(spam) 8 | source('~/LogReg_function.R') 9 | 10 | #number of samples 11 | n=dim(spam)[1]; 12 | # number of features 13 | m=dim(spam)[2]; 14 | 15 | Y=array(rep(0,n), c(n,1)); 16 | # let spam==1, nonspam==0 17 | Y[ which( spam$type == 'spam'),] = 1 18 | 19 | #separate data X and Y 20 | X = spam[,1:(m-1)]; 21 | 22 | #set inital theta to 0 23 | theta=array(rep(0,m), c(m,1)); 24 | #regularization factor lambda 25 | lambda = 500; 26 | #learning rate alpha 27 | alpha=1; 28 | 29 | round=20; 30 | result = as.data.frame(cbind(train = rep(0,round), test = rep(0,round))); 31 | 32 | for(k in 1:round){ 33 | #get random sample 34 | indices = 1:n 35 | train.indices = sample(n, as.integer(n/2)) 36 | test.indices = indices[!indices %in% train.indices] 37 | train.X = X[train.indices,] 38 | test.X = X[test.indices,] 39 | train.Y = as.data.frame(Y[train.indices,]) 40 | test.Y = as.data.frame(Y[test.indices,]) 41 | 42 | #normalize training data 43 | train.X = scale(train.X); 44 | train.mu = attr(train.X, "scaled:center"); 45 | train.var = attr(train.X, "scaled:scale"); 46 | train.X = cbind( const=array(rep(1,as.integer(n/2)), c(as.integer(n/2),1)) ,train.X); 47 | #apply normalized data to test data 48 | test.X = scale(test.X, center=train.mu, scale=train.var); 49 | test.X = cbind( const=array(rep(1,n-as.integer(n/2)), c(n-as.integer(n/2),1)) ,test.X); 50 | 51 | #Training 52 | max_iter=20; 53 | iter=0; 54 | train.cost=rep(0,max_iter+1); 55 | test.cost=rep(0,max_iter+1); 56 | #initial cost 57 | train.cost[1]=cost.fun(theta,train.X,train.Y,lambda); 58 | test.cost[1]=cost.fun(theta,test.X,test.Y,lambda); 59 | 60 | for(i in 1:max_iter){ 61 | 62 | theta = grad.fun(theta, train.X,train.Y, lambda, alpha) 63 | train.cost[i+1]=cost.fun(theta,train.X,train.Y,lambda); 64 | test.cost[i+1]=cost.fun(theta,test.X,test.Y,lambda); 65 | 66 | if(abs(train.cost[i+1]-train.cost[i])<0.001){ 67 | iter=i+1; 68 | break; 69 | } 70 | 71 | } 72 | 73 | #prediction 74 | train.h_theta = sigmoid(as.matrix(train.X) %*% theta); 75 | test.h_theta = sigmoid(as.matrix(test.X) %*% theta); 76 | 77 | train.y_hat= as.data.frame(array(rep(0,dim(train.Y)[1]), c(dim(train.Y[1]),1))); 78 | test.y_hat = as.data.frame(array(rep(0,dim(test.Y)[1]), c(dim(test.Y[1]),1))); 79 | 80 | train.y_hat[which(as.data.frame(train.h_theta)$V1 >=0.5 ), ]=1; 81 | test.y_hat[which(as.data.frame(test.h_theta)$V1 >=0.5 ), ]=1; 82 | 83 | M= as.data.frame(cbind(y=train.Y, y_hat=train.y_hat)); 84 | N= as.data.frame(cbind(y=test.Y, y_hat=test.y_hat)); 85 | names(M)<-c('y', 'y_hat') 86 | names(N)<-c('y', 'y_hat') 87 | train.accuracy = (dim ( M[ M$y== M$y_hat, ] )[1]) / dim(M)[1]; 88 | test.accuracy = (dim ( N[ N$y== N$y_hat, ] )[1]) / dim(N)[1]; 89 | 90 | #plot(train.cost[1:iter], type="l") 91 | result$train[k]=train.accuracy; 92 | result$test[k]=test.accuracy; 93 | } 94 | avg.train = mean(result$train); 95 | avg.test = mean(result$test); 96 | -------------------------------------------------------------------------------- /Logistic Regression/R/LogReg_function.R: -------------------------------------------------------------------------------- 1 | sigmoid<-function(X){ 2 | Z= 1/(1 + exp(-1*X)); 3 | return(Z); 4 | } 5 | 6 | cost.fun<-function(theta, X, y, lambda){ 7 | #n=number of samples 8 | n=dim(X)[1]; 9 | #m=number of features 10 | m=dim(X)[2]; 11 | X=as.matrix(X); 12 | 13 | h_theta = sigmoid(X %*% theta); 14 | J = (1/n)* (sum( ((-1)*y*log(h_theta)) - ((1-y)*log(1-h_theta))) + lambda*0.5*sum(theta[2:m,]^2)); 15 | 16 | return(J); 17 | } 18 | 19 | grad.fun<-function(theta, X, y, lambda, alpha){ 20 | #n=number of samples 21 | n=dim(X)[1]; 22 | #m=number of features 23 | m=dim(X)[2]; 24 | X=as.matrix(X); 25 | 26 | theta_next = array(rep(0,m), c(m,1)); 27 | 28 | h_theta = sigmoid(X %*% theta); 29 | 30 | #theta0 31 | theta_next[1,] = theta[1,] - alpha * (1/n) * sum( (h_theta - y)dia*X[,1] ); 32 | 33 | #theta 1 to m-1 34 | for(i in 2:m){ 35 | theta_next[i,] = theta[i,] - alpha * (1/n) * (sum( (h_theta - y)*X[,i])+lambda*theta[i,]); 36 | } 37 | return(theta_next); 38 | } -------------------------------------------------------------------------------- /Matrix Factorization/mf_train/App.java: -------------------------------------------------------------------------------- 1 | package mf_train.mf_train; 2 | 3 | import java.util.List; 4 | import java.util.Map; 5 | 6 | import org.apache.mahout.math.DenseMatrix; 7 | import org.apache.mahout.math.DenseVector; 8 | 9 | public class App 10 | { 11 | /*file path*/ 12 | final static String UserIDList = "./data/user_id.csv"; 13 | final static String ItemIDList = "./data/item_id.csv"; 14 | final static String trainData = "./data/train_data.csv"; 15 | final static String testData = "./data/test_data.csv"; 16 | 17 | public static void main( String[] args ) 18 | { 19 | 20 | /*settings: 21 | * K: number of features 22 | * maxIter: max iterations 23 | * learningRate: learning rate 24 | * convergeValue: abs value between two iterations 25 | * RegParm: regularized parameter 26 | */ 27 | int k=5; 28 | int maxIter=100; 29 | double learningRate=0.04; 30 | double convergevalue=0.0001; 31 | double regParm = 0.01; 32 | 33 | double RMSE=0.0; 34 | 35 | Map userIdMap = DataImport.getMapFromCSV(UserIDList); 36 | Map itemIdMap = DataImport.getMapFromCSV(ItemIDList); 37 | List trainingData = DataImport.getArrayFromCSV(trainData); 38 | List testingData = DataImport.getArrayFromCSV(testData); 39 | 40 | RegSVD svdModel = new RegSVD(userIdMap.size(), itemIdMap.size(),k); 41 | svdModel.training(userIdMap, itemIdMap, trainingData, maxIter, regParm, learningRate, convergevalue); 42 | 43 | double[] pred = svdModel.ratingPrediction(userIdMap, itemIdMap, testingData); 44 | 45 | RMSE = RegSVD.RMSE(pred, testingData); 46 | System.out.println("RMSE=" + RMSE); 47 | } 48 | } 49 | 50 | -------------------------------------------------------------------------------- /Matrix Factorization/mf_train/DataImport.java: -------------------------------------------------------------------------------- 1 | package mf_train.mf_train; 2 | 3 | import java.io.BufferedReader; 4 | import java.io.File; 5 | import java.io.FileReader; 6 | import java.io.IOException; 7 | import java.util.ArrayList; 8 | import java.util.HashMap; 9 | import java.util.List; 10 | import java.util.Map; 11 | import java.util.StringTokenizer; 12 | 13 | public class DataImport { 14 | 15 | public static Map getMapFromCSV(String path){ 16 | 17 | Map map = new HashMap(); 18 | int key; 19 | int val; 20 | 21 | try{ 22 | File file = new File(path); 23 | 24 | //check if file exists. 25 | if(file.isFile()){ 26 | System.out.println("File: " + path + " is found."); 27 | } 28 | else{ 29 | System.out.println("File: " + path + " is not found."); 30 | } 31 | 32 | BufferedReader bufRdr = new BufferedReader(new FileReader(file)); 33 | String line = null; 34 | 35 | 36 | //ignore the first line 37 | bufRdr.readLine(); 38 | 39 | 40 | //read lines 41 | while((line = bufRdr.readLine()) != null){ 42 | 43 | StringTokenizer st = new StringTokenizer(line,","); 44 | key = Integer.valueOf(st.nextToken()); 45 | val = Integer.valueOf(st.nextToken()); 46 | 47 | map.put(key, val); 48 | 49 | } 50 | 51 | 52 | } 53 | catch(IOException e){ 54 | System.out.println(e); 55 | } 56 | 57 | return(map); 58 | 59 | } 60 | 61 | 62 | public static List getArrayFromCSV(String path){ 63 | 64 | List ratingList = new ArrayList(); 65 | int userID; 66 | int itemID; 67 | int rating; 68 | 69 | try{ 70 | File file = new File(path); 71 | 72 | //check if file exists. 73 | if(file.isFile()){ 74 | System.out.println("File: " + path + " is found."); 75 | } 76 | else{ 77 | System.out.println("File: " + path + " is not found."); 78 | } 79 | 80 | BufferedReader bufRdr = new BufferedReader(new FileReader(file)); 81 | String line = null; 82 | 83 | 84 | //ignore the first line 85 | bufRdr.readLine(); 86 | 87 | 88 | //read lines 89 | while((line = bufRdr.readLine()) != null){ 90 | 91 | StringTokenizer st = new StringTokenizer(line,","); 92 | userID = Integer.valueOf(st.nextToken()); 93 | itemID = Integer.valueOf(st.nextToken()); 94 | rating = Integer.valueOf(st.nextToken()); 95 | 96 | ratingList.add(new RatingData(userID, itemID, rating)); 97 | 98 | } 99 | 100 | 101 | } 102 | catch(IOException e){ 103 | System.out.println(e); 104 | } 105 | 106 | 107 | return ratingList; 108 | } 109 | } -------------------------------------------------------------------------------- /Matrix Factorization/mf_train/RatingData.java: -------------------------------------------------------------------------------- 1 | package mf_train.mf_train; 2 | 3 | public class RatingData { 4 | 5 | private int userID; 6 | private int itemID; 7 | private int Rating; 8 | 9 | public RatingData(int uid, int iid, int rating){ 10 | 11 | userID = uid;//generate F,G matrix 12 | itemID = iid; 13 | Rating = rating; 14 | } 15 | 16 | public int getUserID(){ 17 | return userID; 18 | } 19 | 20 | public int getItemID(){ 21 | return itemID; 22 | } 23 | 24 | public int getRating(){ 25 | return Rating; 26 | } 27 | 28 | public String toString(){ 29 | String s ="User:" + userID + ", Item:" + itemID + " ,rating:" + Rating ; 30 | 31 | return s; 32 | 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /Matrix Factorization/mf_train/RegSVD.java: -------------------------------------------------------------------------------- 1 | package mf_train.mf_train; 2 | 3 | import java.util.Iterator; 4 | import java.util.List; 5 | import java.util.Map; 6 | import java.util.Random; 7 | 8 | import org.apache.mahout.math.DenseMatrix; 9 | 10 | public class RegSVD { 11 | 12 | private DenseMatrix F; 13 | private DenseMatrix G; 14 | private Random generator = new Random(); 15 | 16 | /*Initialize 17 | * 18 | * */ 19 | public RegSVD(int rowOfF, int rowOfG, int k){ 20 | 21 | F = new DenseMatrix(rowOfF, k); 22 | G = new DenseMatrix(rowOfG, k); 23 | 24 | //generate random number 25 | for(int i=0 ; i userIdMap, Map itemIdMap, 35 | List trainingData, int maxIteration, double regParm, 36 | double learningRate, double convergevalue) 37 | { 38 | int iteration=0; 39 | int k = F.columnSize(); 40 | int u,v; //u:row for F, v: row for G 41 | double Buv, Ruv, Fus, Gvs; //Buv: current estimating rating, Ruv: current error for 42 | double[] MSE = new double[maxIteration]; 43 | 44 | 45 | RatingData tempRating; 46 | 47 | System.out.println("start traing..........."); 48 | while(iteration < maxIteration){ 49 | 50 | System.out.println("Iteration " + iteration); 51 | Iterator iterator = trainingData.iterator(); 52 | 53 | while(iterator.hasNext()){ //for all u,v in training set 54 | 55 | tempRating = iterator.next(); //get next training data 56 | 57 | u = userIdMap.get(tempRating.getUserID()); //get to row # of user 58 | v = itemIdMap.get(tempRating.getItemID()); // get the row # of item 59 | 60 | //System.out.println("u:" + u + " v:" + v); 61 | 62 | Buv=F.viewRow(u).dot(G.viewRow(v)); //get current prediction 63 | Ruv= tempRating.getRating()-Buv; //get current error 64 | 65 | for(int s=0; s=1){ 80 | 81 | if( Math.abs(MSE[iteration]-MSE[iteration-1]) < convergevalue){ 82 | System.out.println("RMSE converged, stop running"); 83 | break; 84 | } 85 | } 86 | 87 | iteration++; 88 | }//end while 89 | 90 | }//end function 91 | 92 | private double calculateMSE(Map userIdMap, Map itemIdMap, 93 | List trainingData) 94 | { 95 | double SSE=0.0; 96 | int u,v; //u:row for F, v: row for G 97 | Iterator iterator = trainingData.iterator(); 98 | RatingData tempRating; 99 | double Buv, Ruv; 100 | 101 | while(iterator.hasNext()){ //for all u,v in training set 102 | 103 | tempRating = iterator.next(); //get next training data 104 | 105 | u = userIdMap.get(tempRating.getUserID()); //get to row # of user 106 | v = itemIdMap.get(tempRating.getItemID()); // get the row # of item 107 | 108 | Buv=F.viewRow(u).dot(G.viewRow(v)); //get current prediction 109 | Ruv= tempRating.getRating()-Buv; //get current error 110 | 111 | SSE+=Math.pow(Ruv, 2.0); 112 | //SSE+=Math.abs(Ruv); 113 | } 114 | 115 | SSE = Math.sqrt(SSE/trainingData.size()); 116 | return SSE; 117 | } 118 | 119 | public double[] ratingPrediction(Map userIdMap, Map itemIdMap, 120 | List testingData) 121 | { 122 | double[] prediction = new double[testingData.size()]; 123 | 124 | 125 | int u,v; //u:row for F, v: row for G 126 | RatingData tempRating; 127 | // double Err=0,MAE=0; 128 | 129 | for(int i=0; i< testingData.size(); ++i){ 130 | 131 | tempRating = testingData.get(i); //get next training data 132 | 133 | u = userIdMap.get(tempRating.getUserID()); //get to row # of user 134 | v = itemIdMap.get(tempRating.getItemID()); // get the row # of item 135 | 136 | prediction[i]=F.viewRow(u).dot(G.viewRow(v)); //get current prediction 137 | // Err= tempRating.getRating()-prediction[i]; //get current error 138 | 139 | // if(Err > 4.0) System.out.println("Error > 4.0"); 140 | 141 | // MAE+=Math.abs(Err); 142 | } 143 | // System.out.println("MAE="+ MAE/testingData.size()); 144 | 145 | return prediction; 146 | } 147 | 148 | public static double MAE(double[] prediction, List testingData){ 149 | 150 | double MAE=0.0, Err=0.0; 151 | RatingData tempRating; 152 | 153 | for(int i=0; i< testingData.size(); ++i){ 154 | 155 | tempRating = testingData.get(i); //get next training data 156 | 157 | if(prediction[i] > 5.0) prediction[i]=5.0; 158 | if(prediction[i] < 1.0) prediction[i]=1.0; 159 | 160 | Err= tempRating.getRating()-prediction[i]; //get current error 161 | MAE+=Math.abs(Err); 162 | } 163 | 164 | //System.out.println("MAE="+ MAE/testingData.size()); 165 | 166 | return MAE/testingData.size(); 167 | 168 | } 169 | 170 | public static double RMSE(double[] prediction, List testingData){ 171 | 172 | double RMSE=0.0, Err=0.0; 173 | RatingData tempRating; 174 | 175 | for(int i=0; i< testingData.size(); ++i){ 176 | 177 | tempRating = testingData.get(i); //get next training data 178 | 179 | if(prediction[i] > 5.0) prediction[i]=5.0; 180 | if(prediction[i] < 1.0) prediction[i]=1.0; 181 | 182 | Err= tempRating.getRating()-prediction[i]; //get current error 183 | RMSE+=Math.pow(Err,2.0); 184 | } 185 | 186 | //System.out.println("MAE="+ MAE/testingData.size()); 187 | 188 | return Math.sqrt(RMSE/testingData.size()); 189 | 190 | } 191 | } 192 | 193 | -------------------------------------------------------------------------------- /Matrix Factorization/mf_train/randomMatrixGenerator.java: -------------------------------------------------------------------------------- 1 | package mf_train.mf_train; 2 | 3 | import java.util.Random; 4 | 5 | import org.apache.mahout.math.DenseMatrix; 6 | import org.apache.mahout.math.DenseVector; 7 | 8 | public class randomMatrixGenerator { 9 | 10 | static public DenseMatrix matrixgenerator(int nRow, int nCol){ 11 | 12 | DenseMatrix matrix = new DenseMatrix(nRow, nCol); 13 | Random generator = new Random(); 14 | 15 | //generate random number 16 | for(int i=0 ; i 2 | Hope this can help beginners who are interested in R/Python programming and ML.
3 | Note: All code are NOT optimized! 4 | 5 | Current Models: 6 | - Colaborative Filtering (R) 7 | - Matrix Factorization (java) 8 | - linear regression (python) 9 | - logistic regression (python, R) 10 | - Naive Bayes (python) 11 | - Add SVM with SMO method (pyhton) 12 | - Gaussian mixture model (python, PyMC) 13 | 14 | TODO: 15 | - kernel for SVM 16 | - GLMNET for linear 17 | -------------------------------------------------------------------------------- /SVM/SVM/SVM.R: -------------------------------------------------------------------------------- 1 | #implement SVM with SMO method as a S3 calss 2 | #(only linear kernel for now) 3 | # 4 | #algorithm follows the paper: 5 | # Sequential Minimal Optimization: A Fast Algorithm for Training Support Vector Machines 6 | # by John C. Platt 7 | 8 | #Author:Chyi-Kwei Yau 9 | #date: 2012.03.07 10 | 11 | source("SVM_func.R") 12 | 13 | SVM<-function(x, y, C, max.iter=3000, tolerance=1e-3){ 14 | #x is the input matrix 15 | #y is the response (+1 or -1) 16 | m<-dim(x)[1] 17 | n<-dim(x)[2] 18 | 19 | #error check 20 | #if(m != length(y)){ 21 | # print("input and response have different length") 22 | # return; 23 | #} 24 | 25 | model.attr<-list(); #build class 26 | class(model.attr) <- "SVM" #set class name 27 | # set attribute 28 | model.attr$x<-x 29 | model.attr$y<-y 30 | model.attr$C<-C 31 | model.attr$m<-m 32 | model.attr$n<-dim(x)[2] 33 | model.attr$max.iter<-max.iter 34 | model.attr$tolerance<-tolerance 35 | model.attrkernel<-kernel 36 | 37 | # initialize parameter 38 | model.attr$alpha<-rep(0, m) 39 | #model.attr$errorCache<-rep(0,m) 40 | model.w<-rep(0,n) 41 | model.attr$b<-0 42 | 43 | #training 44 | iter=0 45 | numChanged=0 46 | examAll=T; #go through all data first 47 | 48 | start.time<-proc.time() 49 | 50 | while( numChanged >0 || examAll){ 51 | 52 | numChanged=0 53 | non.zero.alpha<-which( (model.attr$alpha!=0) & (model.attr$alpha!=model.attr$C) ) 54 | 55 | cat("iter=", iter, ", support vector number=",length(non.zero.alpha),"\n", sep="") 56 | 57 | if(examAll || length(non.zero.alpha)==0 ){ 58 | cat(" --exam all in this iteration\n") 59 | update.cand<-seq(1,model.attr$m) 60 | } 61 | else{ 62 | cat(" --exam ", length(non.zero.alpha), " alphas in this iteration\n",sep="") 63 | update.cand<-non.zero.alpha 64 | } 65 | 66 | #train Full first 67 | for( index in update.cand){ 68 | #for( index in 1:1){ 69 | err<-getErr(model.attr, index) 70 | #cat(err,"=err\n") 71 | 72 | label<-model.attr$y[index] 73 | 74 | # if err is grater than tolerance and not at bound 75 | if( ((err*label) < (-1)*model.attr$tolerance && (model.attr$alpha[index] < model.attr$C) 76 | ) || ((err*label) > model.attr$tolerance && (model.attr$alpha[index] >0) )){ 77 | 78 | #select 2nd index 79 | #TODO: repace by selectIndex later 80 | index2<-selectIndex(index, err, model.attr) 81 | #index2<-randSelectIndex(index, model.attr$m) 82 | #cat(length(index2)," = index2\n") 83 | 84 | label2<-model.attr$y[index2] 85 | err2<-getErr(model.attr, index2) 86 | #cat(err2,"= err2\n") 87 | 88 | s<-(label * label2) 89 | C<-model.attr$C 90 | 91 | #old alpha 92 | alpha<-model.attr$alpha[index] 93 | alpha2<-model.attr$alpha[index2] 94 | 95 | #upper & lower bound for new aplha2 96 | if(label != label2){ 97 | high = min(C, C + alpha2 - alpha ) 98 | low = max(0 , alpha2 - alpha) 99 | }else{ 100 | #label == label2 101 | high = min(C, alpha2 + alpha ) 102 | low = max(0 , alpha2 + alpha - C) 103 | } 104 | 105 | #make sure low != high 106 | if(low == high){ 107 | #cat("high = low, go to next alpha\n") 108 | next 109 | } 110 | 111 | #get eta 112 | x<-model.attr$x[index,] 113 | x2<-model.attr$x[index2,] 114 | #use linear kernel for now 115 | #TODO: replace by kernel function.. 116 | eta = sum(x*x) + sum(x2*x2) - 2*sum(x*x2) 117 | 118 | if(eta<=0){ 119 | #cat("eat <=0\n") 120 | next 121 | } 122 | 123 | #update alpha2 124 | new.alpha2 <- (alpha2 + (label2*(err-err2)/eta)) 125 | #cat("new.alpha2=",new.alpha2, " high=",high, " low=",low,"\n") 126 | new.alpha2 <- getAlpha(new.alpha2, high, low) 127 | 128 | #check update value 129 | if( abs(new.alpha2 - alpha2) < 1e-5){ 130 | #alpha2 change too small 131 | #cat(" --alpha change too small\n") 132 | next 133 | } 134 | 135 | model.attr$alpha[index2]<-new.alpha2 136 | 137 | #update err in index2 138 | #model.attr$errorCache[index2]<-getErr(model.attr, index2) 139 | 140 | 141 | #update alpha 142 | new.alpha<- alpha + s*(alpha2 - new.alpha2) 143 | model.attr$alpha[index]<-new.alpha 144 | 145 | #update err in index 146 | #model.attr$errorCache[index]<-getErr(model.attr, index) 147 | 148 | #calculate b1, formula (20) 149 | b1 = err + (label*(new.alpha - alpha)*sum(x*x))+(label2*(new.alpha2-alpha2)*sum(x*x2)) + model.attr$b 150 | 151 | #calculate b2, formula (21) 152 | b2 = err2 + (label*(new.alpha - alpha)*sum(x*x2))+(label2*(new.alpha2-alpha2)*sum(x2*x2)) + model.attr$b 153 | 154 | #update b 155 | model.attr$b <- update.b(b1, b2, new.alpha, new.alpha2, model.attr$C) 156 | #cat("b1=",b1, "b2=", b2, "b=", model.attr$b,"\n") 157 | 158 | #alpha change number 159 | numChanged<-numChanged+1 160 | } 161 | }#end for i 162 | 163 | 164 | #check next iter run full or not 165 | if(examAll == T){ 166 | examAll=F 167 | } 168 | else if(numChanged==0){ 169 | cat(" --no alpha changed in this iteration. Exam all in next iteration\n") 170 | examAll=T 171 | } 172 | cat(" --num alpha changed=",numChanged,"\n",sep="") 173 | iter=iter+1 174 | cat("\n") 175 | 176 | #force break 177 | if(iter > max.iter){ 178 | cat("Warning: SVM not converge but reach max iterations!","\n") 179 | break 180 | } 181 | 182 | }#end while 183 | 184 | #calculate w 185 | model.attr$w<-getW(model.attr) 186 | 187 | #end time 188 | end.time<-proc.time() 189 | running.time<-end.time - start.time 190 | cat("\n") 191 | cat("SVM training finished. Elapsed time=", running.time["elapsed"], " secs") 192 | 193 | return(model.attr) 194 | } 195 | 196 | -------------------------------------------------------------------------------- /SVM/SVM/SVMTest.R: -------------------------------------------------------------------------------- 1 | #This is a SVM test with spam dataset 2 | #converge may take more than a hour 3 | #change the max iteration for fast training 4 | 5 | #remove all previous data 6 | rm(list=ls()) 7 | source("SVM.R") 8 | #load library & data 9 | library(kernlab) 10 | data(spam) 11 | 12 | #number of samples 13 | m=dim(spam)[1]; 14 | # number of features 15 | n=dim(spam)[2]; 16 | 17 | Y=rep(0,m) 18 | # let spam=1, nonspam=(-1), in SVM the label has to be +1 or -1 19 | Y[ which( spam$type == 'spam')] = 1 20 | Y[ which( spam$type != 'spam')] = (-1) 21 | 22 | 23 | #separate data X and Y 24 | X = spam[,1:(n-1)] 25 | 26 | 27 | #get random sample 28 | indices = 1:m 29 | 30 | #50% data for training and 50% for testing 31 | train.indices = sample(m, as.integer(m/2)) 32 | test.indices = indices[!indices %in% train.indices] 33 | train.X = X[train.indices,] 34 | test.X = X[test.indices,] 35 | train.Y = Y[train.indices] 36 | test.Y = Y[test.indices] 37 | 38 | #normalize training data 39 | train.X = scale(train.X); 40 | train.mu = attr(train.X, "scaled:center"); 41 | train.var = attr(train.X, "scaled:scale"); 42 | #apply normalized data to test data 43 | test.X = scale(test.X, center=train.mu, scale=train.var); 44 | 45 | #run SVM 46 | svm.model<-SVM(train.X, train.Y, 0.1, max.iter=3000) 47 | 48 | #exam result 49 | 50 | #train error 51 | pred.train<-predict(svm.model, train.X) 52 | table(train.Y, pred.train) 53 | 54 | train.err.rate<- sum(pred.train != train.Y)/length(pred.train) 55 | train.accurate.rate <- 1 - train.err.rate 56 | 57 | 58 | #test error 59 | pred.test<-predict(svm.model, test.X) 60 | table(test.Y, pred.test) 61 | 62 | test.err.rate<- sum(pred.test != test.Y)/length(pred.test) 63 | test.accurate.rate <- 1 -test.err.rate 64 | 65 | -------------------------------------------------------------------------------- /SVM/SVM/SVMTestSimple.R: -------------------------------------------------------------------------------- 1 | #remove all previous data 2 | rm(list=ls()) 3 | #This is a SVM test with small data set (iris dataset) 4 | #training time is less than a second. 5 | 6 | #load SVM 7 | source("SVM.R") 8 | #load data & function 9 | data(iris) 10 | 11 | #select only the first two Species from data 12 | data = iris[iris$Species==c('setosa') | iris$Species==c('versicolor') , ] 13 | 14 | #define class y=+1,-1 15 | data$y=0; 16 | data[data$Species==c('setosa'),]$y = -1 17 | data[data$Species==c('versicolor'),]$y=1 18 | 19 | #ignore old Species term 20 | data = data[, -5] 21 | 22 | #set label to x1,x2,x3,x4 23 | names(data)<-c('x1','x2','x3','x4','y') 24 | 25 | #number of samples 26 | m=dim(data)[1]; 27 | # number of features 28 | n=dim(data)[2]; 29 | 30 | X = data[,1:(n-1)] 31 | Y = data$y 32 | 33 | #get random sample 34 | indices = 1:m 35 | train.indices = sample(m, as.integer(m/2)) 36 | test.indices = indices[!indices %in% train.indices] 37 | train.X = X[train.indices,] 38 | test.X = X[test.indices,] 39 | train.Y = Y[train.indices] 40 | test.Y = Y[test.indices] 41 | 42 | #normalize training data 43 | train.X = scale(train.X); 44 | train.mu = attr(train.X, "scaled:center"); 45 | train.var = attr(train.X, "scaled:scale"); 46 | #apply normalized data to test data 47 | test.X = scale(test.X, center=train.mu, scale=train.var); 48 | 49 | #run SVM 50 | svm.model<-SVM(train.X, train.Y, 1,500) 51 | 52 | #train error 53 | pred.train<-predict(svm.model, train.X) 54 | table(train.Y, pred.train) 55 | 56 | train.err.rate<- sum(pred.train != train.Y)/length(pred.train) 57 | train.accurate.rate <- 1 - train.err.rate 58 | 59 | 60 | #test error 61 | pred.test<-predict(svm.model, test.X) 62 | table(test.Y, pred.test) 63 | 64 | test.err.rate<- sum(pred.test != test.Y)/length(pred.test) 65 | test.accurate.rate <- 1 -test.err.rate 66 | 67 | -------------------------------------------------------------------------------- /SVM/SVM/SVM_func.R: -------------------------------------------------------------------------------- 1 | #calculate the error E at index k 2 | getErr<-function(svm.obj, k){ 3 | 4 | y.a.x<- svm.obj$y*svm.obj$alpha*(svm.obj$x) 5 | x.k<- as.matrix(svm.obj$x[k,], ncol=1) 6 | z<- as.matrix(y.a.x) %*% x.k 7 | pred<- sum(z)-svm.obj$b 8 | 9 | err<- (pred - svm.obj$y[k]) 10 | return(err) 11 | } 12 | 13 | getW<-function(svm.obj){ 14 | z<- svm.obj$y*svm.obj$alpha*(svm.obj$x) 15 | w<-colSums(z) 16 | return(w) 17 | } 18 | 19 | selectIndex<-function(i, err.i, svm.obj){ 20 | C<-svm.obj$C 21 | non.zero.index<-which((svm.obj$alpha != 0) && (svm.obj$alpha < C)) 22 | 23 | max.index<-(-1) 24 | maxE<-0 25 | if(length(non.zero.index) >0){ 26 | for(k in non.zero.index){ 27 | if(k == i){ 28 | next 29 | } 30 | err = abs(getErr(svm.obj, k)-err.i) 31 | if(err>maxE){ 32 | maxE<-err 33 | max.index<-k 34 | } 35 | }#end for 36 | } 37 | 38 | if(max.index < 0){ 39 | max.index<-randSelectIndex(i, svm.obj$m) 40 | } 41 | 42 | return(max.index) 43 | } 44 | 45 | updateIndex<-function(svm.obj, index){ 46 | 47 | err<-getErr(svm.obj, index) 48 | label<-svm.obj$y[index] 49 | if( ((err*label) < (-1)*svm.obj$tolerance && (svm.obj$alpha[index] < svm.obj$C) 50 | ) || ((err*label) > svm.obj$tolerance && (svm.obj$alpha[index] >0) )){ 51 | 52 | #select 2nd index 53 | #TODO: repace by selectIndex later 54 | index2<-randSelectIndex(index, svm.obj$m) 55 | label2<-svm.obj$y[index2] 56 | err2<-getErr(svm.obj, index2) 57 | s<-(lable * label2) 58 | 59 | #old alpha 60 | alpha<-svm.obj$alpha[index] 61 | alpha2<-svm.obj$alpha[index2] 62 | 63 | #upper & lower bound for new aplha2 64 | if(label != label2){ 65 | high = min(svm.obj$C, svm.obj$C + alpha2 - alpha ) 66 | low = max(0 , alpha2 - alpha) 67 | }else{ 68 | high = min(svm.obj$C, alpha2 + alpha ) 69 | low = max(0 , alpha2 + alpha - svm.obj$C) 70 | } 71 | 72 | #make sure low != high 73 | if(low == high){ 74 | return; 75 | } 76 | 77 | #get eta 78 | x<-svm.obj$x[index,] 79 | x2<-svm.obj$x[index2,] 80 | eta = sum(x*x) + sum(x2*x2) - 2*sum(x*x2) 81 | 82 | #make sure eta > 0 83 | if(eta < 0){ 84 | print("eta > 0") 85 | return; 86 | } 87 | 88 | #update alpha2 89 | new.alpha2 <- alpha2 + (label2*(err-err2)/eta) 90 | new.alpha2 <- getAlpha(new.alpha2, high, low) 91 | svm.obj$alpha[index2]<-new.alpha2 92 | 93 | #update err in svm.obj 94 | 95 | #check update value 96 | if( abs(new.alpha2 - alpha2) < 1e-5){ 97 | #alpha2 change too small 98 | return(0); 99 | } 100 | 101 | #update alpha 102 | new.alpha<- s*(alpha2 - new.alpha2) 103 | svm.obj$alpha[index]<-new.alpha 104 | 105 | 106 | } 107 | 108 | } 109 | 110 | randSelectIndex<-function(i, m){ 111 | j=i 112 | while(T){ 113 | j= sample(m,1) 114 | 115 | if(j!=i) break 116 | } 117 | return(j) 118 | } 119 | 120 | getAlpha<-function(current, high, low){ 121 | 122 | if(current > high){ 123 | current = high 124 | } 125 | else if(current < low){ 126 | current = low 127 | } 128 | 129 | return(current) 130 | } 131 | 132 | update.b<-function(b1, b2, alpha, alpha2, C){ 133 | 134 | if(alpha>0 && alpha 0 && alpha20) 152 | 153 | result<-rep(-1, dim(data)[1]) 154 | result[pos.index]<-1 155 | #table(test.Y, pred.test) 156 | 157 | return(result) 158 | } -------------------------------------------------------------------------------- /kmeans/kmeans.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | TRAIN_FILE = '' 4 | 5 | 6 | def load_file(file_name): 7 | with open(file_name, 'rb') as f: 8 | lines = f.readlines() 9 | lines = [l.strip('\n').split(' ') for l in lines] 10 | lines = [filter(lambda x: len(x) > 0, l) for l in lines] 11 | lines = [[float(i) for i in line] for line in lines] 12 | X = np.array(lines) 13 | return X 14 | 15 | 16 | def euclidean_distance(x_1, x_2): 17 | """ 18 | euclidean_distance. can ignore `np.sqrt` in this case to return L2 cost directly. 19 | """ 20 | if x_1.shape != x_2.shape: 21 | raise ValueError("shape mismatch") 22 | diff = x_1 - x_2 23 | distance = np.sqrt(sum(np.power(diff, 2))) 24 | return distance 25 | 26 | 27 | class Kmeans(object): 28 | 29 | """ 30 | Kmean with EM udpate. 31 | """ 32 | 33 | def __init__(self, k, dist_func): 34 | self.k = k 35 | self.dist_func = dist_func 36 | 37 | def random_pick_center(self, X): 38 | n_row = X.shape[0] 39 | k = self.k 40 | idx = np.random.choice(xrange(n_row), k, replace=False) 41 | centers = X[idx, :].copy() 42 | return centers 43 | 44 | def group_idx(self, x): 45 | """ 46 | return nearest indx group 47 | """ 48 | centers = self.centers 49 | dist = [self.dist_func(x, center) for center in centers] 50 | dist = np.array(dist) 51 | group = np.argmin(dist) 52 | return group 53 | 54 | def _e_step(self, X): 55 | """ 56 | fix center, assign group 57 | """ 58 | n_row = X.shape[0] 59 | cluster_idx = [self.group_idx(X[i, :]) for i in xrange(n_row)] 60 | cluster_idx = np.array(cluster_idx) 61 | return cluster_idx 62 | 63 | def _m_step(self, X, clusters): 64 | """ 65 | update centers 66 | """ 67 | k = self.k 68 | new_centers = [] 69 | for i in xrange(k): 70 | idx = np.where(clusters == i)[0] 71 | cluster_X = X[idx, :] 72 | cluster_size = cluster_X.shape[0] 73 | new_center = cluster_X.sum(axis=0) / cluster_size 74 | new_centers.append(new_center) 75 | 76 | #print("group %d: size: %d" % (i, cluster_size)) 77 | #print("new_center: %s" % new_center) 78 | new_centers = np.array(new_centers) 79 | return new_centers 80 | 81 | def cal_cost(self, X, groups): 82 | """ 83 | return cost of all clusters 84 | """ 85 | k = self.k 86 | total_cost = 0. 87 | for i in xrange(k): 88 | idx = np.where(groups == i) 89 | group_X = X[idx, :] 90 | diff = group_X - self.centers[i, :] 91 | cost = np.power(diff, 2).sum() 92 | total_cost += cost 93 | avg_cost = total_cost / X.shape[0] 94 | return avg_cost 95 | 96 | def fit(self, X, iterations=15): 97 | self.centers = self.random_pick_center(X) 98 | 99 | for n_iter in xrange(iterations): 100 | cluster_idx = self._e_step(X) 101 | # calcualte cost 102 | cost = self.cal_cost(X, cluster_idx) 103 | print("iteration %d: cost: %.3f" % (n_iter, cost)) 104 | 105 | new_centers = self._m_step(X, cluster_idx) 106 | self.centers = new_centers 107 | 108 | # result 109 | cluster_idx = self._e_step(X) 110 | cost = self.cal_cost(X, cluster_idx) 111 | print("iteration: %d, cost: %.3f" % (iterations, cost)) 112 | 113 | return self 114 | 115 | def transform(self, X): 116 | """ 117 | assign cluster number for X 118 | """ 119 | return self._e_step(X) 120 | 121 | def fit_transform(self, X, iterations=15): 122 | """ 123 | fit then transform 124 | """ 125 | return self.fit(X, iterations=iterations).transform(X) 126 | 127 | 128 | def main(): 129 | train_X = load_file(TRAIN_FILE) 130 | n_test = 100 131 | cost_results = [] 132 | for _ in xrange(n_test): 133 | transformer = Kmeans(k=10, dist_func=euclidean_distance) 134 | cluster_idx = transformer.fit_transform(train_X, iterations=10) 135 | cost = transformer.cal_cost(train_X, cluster_idx) 136 | cost_results.append(cost) 137 | 138 | return cost_results 139 | -------------------------------------------------------------------------------- /knn/knn.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | TRAIN_FILE = '' 4 | TEST_FILE = '' 5 | 6 | 7 | def load_file(file_name): 8 | with open(file_name, 'rb') as f: 9 | lines = f.readlines() 10 | lines = [l.strip('\n').split(' ') for l in lines] 11 | lines = [filter(lambda x: len(x) > 0, l) for l in lines] 12 | lines = [[float(i) for i in line] for line in lines] 13 | y = np.array([l[-1] for l in lines]) 14 | X = np.array([l[:-1] for l in lines]) 15 | return X, y 16 | 17 | 18 | def euclidean_distance(x_1, x_2): 19 | """ 20 | euclidean_distance. can ignore sqrt in the end in this case. 21 | """ 22 | if x_1.shape != x_2.shape: 23 | raise ValueError("shape mismatch") 24 | diff = x_1 - x_2 25 | distance = np.sqrt(sum(np.power(diff, 2))) 26 | return distance 27 | 28 | 29 | class KNN(object): 30 | 31 | """ 32 | k nearest neighbor 33 | """ 34 | 35 | def __init__(self, k, distance_func): 36 | self.k = k 37 | self.cal_dist = distance_func 38 | 39 | def fit(self, X, y): 40 | """ 41 | lazy fit 42 | """ 43 | self.train_X = X 44 | self.train_y = y 45 | return self 46 | 47 | def _single_predict(self, x): 48 | n_row = self.train_X.shape[0] 49 | distance = [self.cal_dist(x, self.train_X[i,:]) for i in xrange(n_row)] 50 | distance = np.array(distance) 51 | index = np.argsort(distance) 52 | sum_neighbor = sum(self.train_y[index[:self.k]]) 53 | return 1. if sum_neighbor > 0 else -1. 54 | 55 | 56 | def predict(self, X): 57 | """ 58 | find neighbor 59 | """ 60 | n_row = X.shape[0] 61 | pred = [self._single_predict(X[i,:]) for i in xrange(n_row)] 62 | return np.array(pred) 63 | 64 | 65 | def experiment(n_neighbor): 66 | train_X, train_y = load_file(TRAIN_FILE) 67 | test_X, test_y = load_file(TEST_FILE) 68 | # KNN 69 | clf = KNN(k=n_neighbor, distance_func=euclidean_distance) 70 | clf.fit(train_X, train_y) 71 | train_pred = clf.predict(train_X) 72 | train_acc = float(sum(train_pred == train_y)) / train_X.shape[0] 73 | 74 | test_pred = clf.predict(test_X) 75 | test_acc = float(sum(test_pred == test_y)) / test_X.shape[0] 76 | 77 | print("train accuracy: %.3f" % train_acc) 78 | print("est accuracy: %.3f" % test_acc) 79 | 80 | def main(): 81 | # 1-NN 82 | experiment(1) 83 | 84 | # 5-NN 85 | experiment(5) 86 | -------------------------------------------------------------------------------- /mixture_model/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chyikwei/MachineLearning/c1df271d6e79c13e48fd57f1eb360b2870b0eb14/mixture_model/__init__.py -------------------------------------------------------------------------------- /mixture_model/gmm.py: -------------------------------------------------------------------------------- 1 | """ 2 | A bayesian gaussian mixture model implementation with PyMC. 3 | 4 | Note: 5 | We use MCMC here but it is not an efficient method for mixture model. 6 | A better implementation should consider EM algorithm or Gibbs sampling. 7 | So don't test it with large dataset, it will take long to converge. 8 | """ 9 | 10 | import numpy as np 11 | import pymc as pm 12 | 13 | 14 | def gmm_model(data, K, mu_0=0.0, alpha_0=0.1, beta_0=0.1, alpha=1.0): 15 | """ 16 | K: number of component 17 | n_samples: number of n_samples 18 | n_features: number of features 19 | 20 | mu_0: prior mean of mu_k 21 | alpha_0: alpha of Inverse Gamma tau_k 22 | beta_0: beta of Inverse Gamma tau_k 23 | alpha = prior of dirichlet distribution phi_0 24 | 25 | latent variable: 26 | phi_0: shape = (K-1, ), dirichlet distribution 27 | phi: shape = (K, ), add K-th value back to phi_0 28 | z: shape = (n_samples, ), Categorical distribution, z[k] is component indicator 29 | mu_k: shape = (K, n_features), normal distribution, mu_k[k] is mean of k-th component 30 | tau_k : shape = (K, n_features), inverse-gamma distribution, tau_k[k] is variance of k-th component 31 | """ 32 | 33 | n_samples, n_features = data.shape 34 | 35 | # latent variables 36 | tau_k = pm.InverseGamma( 37 | 'tau_k', alpha_0 * np.ones((K, n_features)), beta_0 * np.ones((K, n_features)), value=beta_0 * np.ones((K, n_features))) 38 | mu_k = pm.Normal('mu_k', np.ones((K, n_features)) * 39 | mu_0, tau_k, value=np.ones((K, n_features)) * mu_0) 40 | phi_0 = pm.Dirichlet('phi_0', theta=np.ones(K) * alpha) 41 | 42 | @pm.deterministic(dtype=float) 43 | def phi(value=np.ones(K) / K, phi_0=phi_0): 44 | val = np.hstack((phi_0, (1 - np.sum(phi_0)))) 45 | return val 46 | 47 | z = pm.Categorical( 48 | 'z', p=phi, value=pm.rcategorical(np.ones(K) / K, size=n_samples)) 49 | 50 | # observed variables 51 | x = pm.Normal('x', mu=mu_k[z], tau=tau_k[z], value=data, observed=True) 52 | 53 | return pm.Model([mu_k, tau_k, phi_0, phi, z, x]) 54 | -------------------------------------------------------------------------------- /mixture_model/gmm_example.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pymc as pm 3 | 4 | from gmm import gmm_model 5 | 6 | 7 | def generate_data(): 8 | 9 | mu_s = np.array([[0.0, 0.0, 0.0], 10 | [10.0, 10.0, 10.0], 11 | [-10.0, -10.0, -10.0]]) 12 | tau_s = np.array([1.0, 1.0, 1.0]) 13 | size_s = np.array([10, 20, 10]) 14 | 15 | data = [] 16 | for mu, tau, size in zip(mu_s, tau_s, size_s): 17 | data.append(np.random.multivariate_normal( 18 | mu, tau * np.eye(len(mu_s)), size)) 19 | 20 | data = np.vstack(data) 21 | # np.random.shuffle(data) 22 | 23 | return data 24 | 25 | 26 | def gmm_example(): 27 | data = generate_data() 28 | 29 | # set parameters 30 | K = 3 31 | mu_0 = 0.0 32 | alpha_0 = 0.1 33 | beta_0 = 0.1 34 | alpha = 1.0 35 | 36 | model = gmm_model( 37 | data, K, mu_0=mu_0, alpha_0=alpha_0, beta_0=beta_0, alpha=alpha) 38 | 39 | mcmc = pm.MCMC(model) 40 | mcmc.sample(50000, burn=10000, thin=100) 41 | 42 | print "mu_k:" 43 | print mcmc.trace('mu_k')[-1] 44 | print 'z:' 45 | print mcmc.trace('z')[-1] 46 | 47 | if __name__ == '__main__': 48 | gmm_example() 49 | -------------------------------------------------------------------------------- /naiveBayes/Naivebayes.py: -------------------------------------------------------------------------------- 1 | import numpy 2 | import os 3 | import scipy.io as sio 4 | from scipy.stats import norm 5 | 6 | 7 | def main(): 8 | # laod data 9 | mat = sio.loadmat("data") 10 | trainX = mat['Xtrn'] 11 | trainY = mat['Ytrn'] 12 | trainY = trainY[:, 0] 13 | testX = mat['Xtst'] 14 | testY = mat['Ytst'] 15 | testY = testY[:, 0] 16 | 17 | m, n = trainX.shape 18 | index0 = numpy.where(trainY == 0)[0] 19 | #index0 = list(index0) 20 | index1 = numpy.where(trainY == 1)[0] 21 | #index1 = list(index1) 22 | 23 | # separate data by trainY = 0 or 1 24 | trainX0 = trainX[index0, :] 25 | trainX1 = trainX[index1, :] 26 | 27 | # generate normal distribution for each feature 28 | trainX0_mean = numpy.mean(trainX0, axis=0) 29 | trainX1_mean = numpy.mean(trainX1, axis=0) 30 | trainX0_std = numpy.std(trainX0, axis=0) 31 | trainX1_std = numpy.std(trainX1, axis=0) 32 | 33 | list0 = [] 34 | list1 = [] 35 | for num in range(0, n): 36 | list0.append(norm(trainX0_mean[num], trainX0_std[num])) 37 | list1.append(norm(trainX1_mean[num], trainX1_std[num])) 38 | 39 | # build prob matrix for 0 and 1 40 | train_prob0 = numpy.array([0.0] * (m * n)).reshape(m, n) 41 | train_prob1 = numpy.array([0.0] * (m * n)).reshape(m, n) 42 | 43 | for num in range(0, n): 44 | train_prob0[:, num] = numpy.log(list0[num].pdf(trainX[:, num])) 45 | train_prob1[:, num] = numpy.log(list1[num].pdf(trainX[:, num])) 46 | 47 | # column sum 48 | prob0 = numpy.sum(train_prob0, axis=1) 49 | prob1 = numpy.sum(train_prob1, axis=1) 50 | 51 | pred = numpy.array([0] * m) 52 | numpy.putmask(pred, prob1 > prob0, 1) 53 | 54 | trainErr = float(sum(trainY != pred)) / m 55 | 56 | print "train err=", trainErr 57 | print "trainX=", trainX.shape 58 | # print "trainY=", trainY.shape 59 | 60 | 61 | if __name__ == '__main__': 62 | main() 63 | -------------------------------------------------------------------------------- /naiveBayes/data.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chyikwei/MachineLearning/c1df271d6e79c13e48fd57f1eb360b2870b0eb14/naiveBayes/data.mat -------------------------------------------------------------------------------- /nnet/nnet.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | TRAIN_FILE = '' 4 | TEST_FILE = '' 5 | 6 | 7 | def load_file(file_name): 8 | with open(file_name, 'rb') as f: 9 | lines = f.readlines() 10 | lines = [l.strip('\n').split(' ') for l in lines] 11 | lines = [filter(lambda x: len(x) > 0, l) for l in lines] 12 | lines = [[float(i) for i in line] for line in lines] 13 | y = np.array([l[-1] for l in lines]) 14 | X = np.array([l[:-1] for l in lines]) 15 | return X, y 16 | 17 | 18 | def d_tanh(x): 19 | """ 20 | derivative of tanh(x) = 1. - (tanh(x) ^.2) 21 | """ 22 | return 1. - np.power(np.tanh(x), 2) 23 | 24 | 25 | class NNet(object): 26 | 27 | """ 28 | multi-layer Neural Network with SGD training and tanh transformation. 29 | """ 30 | 31 | def __init__(self, layers, r_min, r_max, learn_rate): 32 | """ 33 | layers should be list of neurons in each layer, ex. [2, 3, 1] 34 | """ 35 | 36 | if not isinstance(layers, list) or len(layers) < 3: 37 | raise ValueError('invalid layer parammeter') 38 | self.layers = layers 39 | n_layer = len(layers) 40 | self.n_layer = n_layer 41 | self.r_min = r_min 42 | self.r_max = r_max 43 | self.learn_rate = learn_rate 44 | 45 | # initialize ws 46 | self.ws = [] 47 | for layer_idx in range(n_layer - 1): 48 | layer_size = (layers[layer_idx] + 1, layers[layer_idx + 1]) 49 | w = self.init_w(layer_size) 50 | self.ws.append(w) 51 | 52 | def init_w(self, size): 53 | """ 54 | initialize wight from uniform distribution 55 | """ 56 | return np.random.uniform(self.r_min, self.r_max, size=size) 57 | 58 | def forward_prop(self, x): 59 | """ 60 | forward propagation 61 | """ 62 | scores = [] 63 | xs = [] 64 | xs.append(x) 65 | 66 | # input layer 67 | for layer_idx in range(self.n_layer - 1): 68 | x_add_1 = np.append(1., xs[-1]) 69 | w = self.ws[layer_idx] 70 | # remove later 71 | # print x_add_1.shape, w.shape 72 | assert(x_add_1.shape[0] == w.shape[0]) 73 | next_score = np.dot(x_add_1, w) 74 | next_x = np.tanh(next_score) 75 | scores.append(next_score) 76 | xs.append(next_x) 77 | return xs, scores 78 | 79 | def backward_prop(self, xs, scores, y): 80 | """ 81 | backward propagation for 1 record. 82 | """ 83 | deltas = [] 84 | # output layer 85 | # print xs[-1].shape 86 | #assert(xs[-1].shape == ()) 87 | #assert(scores[-1].shape == ()) 88 | 89 | deltas = [] 90 | delta_L = -2 * (y - xs[-1]) * d_tanh(xs[-1]) 91 | # use reverse order first. reverse in the end 92 | deltas.append(delta_L) 93 | 94 | for layer_idx in range(self.n_layer - 1, 0, -1): 95 | prev_layer_idx = layer_idx - 1 96 | w = self.ws[prev_layer_idx] 97 | x = xs[prev_layer_idx] 98 | no_bias_w = w[1:, ] 99 | delta = deltas[-1] 100 | #print 101 | #print no_bias_w.shape 102 | #print delta.shape 103 | #print x.shape 104 | #print 105 | prev_delta = np.dot(no_bias_w, delta) * d_tanh(x) 106 | deltas.append(prev_delta) 107 | 108 | deltas.reverse() 109 | return deltas 110 | 111 | def sgd(self, X, y): 112 | xs, scores = self.forward_prop(X) 113 | deltas = self.backward_prop(xs, scores, y) 114 | 115 | new_ws = [] 116 | for layer_idx in xrange(self.n_layer - 1): 117 | old_w = self.ws[layer_idx] 118 | x = xs[layer_idx] 119 | x_add_1 = np.append(1., x) 120 | delta = deltas[layer_idx + 1] 121 | diff = np.outer(x_add_1, delta) 122 | assert(old_w.shape == diff.shape) 123 | new_w = old_w - self.learn_rate * diff 124 | new_ws.append(new_w) 125 | self.ws = new_ws 126 | 127 | def fit(self, X, y, iterations): 128 | n_row = X.shape[0] 129 | for iter_idx in xrange(iterations): 130 | update_idx = np.random.randint(0, n_row) 131 | self.sgd(X[update_idx, :], y[update_idx]) 132 | 133 | #if iter_idx > 0 and iter_idx % 1000 == 0: 134 | # # calculate predict score 135 | # iter_error = self.cal_error(X, y) 136 | # msg = "iteration: %d, error: %.3f" % (iter_idx, iter_error) 137 | # print(msg) 138 | #error = self.cal_error(X, y) 139 | #msg = "end iteration, error: %.3f" % error 140 | #print(msg) 141 | 142 | 143 | def predict(self, X): 144 | n_row = X.shape[0] 145 | current_x = X 146 | for w in self.ws: 147 | bias = np.ones((n_row, 1)) 148 | x_add_1 = np.hstack([bias, current_x]) 149 | score = np.dot(x_add_1, w) 150 | current_x = np.tanh(score) 151 | return current_x[:, 0] 152 | 153 | def cal_r2_error(self, X, y): 154 | pred_X = self.predict(X) 155 | diff = y - pred_X 156 | error = np.power(diff, 2).sum() 157 | error /= len(y) 158 | return error 159 | 160 | def cal_error(self, X, y): 161 | pred_X = self.predict(X) 162 | pred_y = np.ones((X.shape[0],)) 163 | pred_y[np.where(pred_X < 0.0)] = -1. 164 | err = 1. - float((y == pred_y).sum()) / len(y) 165 | return err 166 | 167 | def main(): 168 | train_X, train_y = load_file(TRAIN_FILE) 169 | test_X, test_y = load_file(TEST_FILE) 170 | 171 | # Q11: M = 1,6,11,16,21 172 | # Q12: r = {0,0.001,0.1,10,1000} 173 | # Q13: learn_rate = {0.001,0.01,0.1,1,10} 174 | params = { 175 | 'layers': [2, 8, 3, 1], 176 | 'r_min': -.1, 177 | 'r_max': .1, 178 | 'learn_rate': .01, 179 | } 180 | 181 | n_experiment = 50 182 | train_errs = [] 183 | test_errs = [] 184 | for i in xrange(n_experiment): 185 | clf = NNet(**params) 186 | clf.fit(train_X, train_y, iterations=50000) 187 | 188 | train_err = clf.cal_error(train_X, train_y) 189 | test_err = clf.cal_error(test_X, test_y) 190 | train_errs.append(train_err) 191 | test_errs.append(test_err) 192 | print("experiment %d, test err: %.3f" % (i, test_err)) 193 | 194 | #print(params) 195 | print("mean train err: %.6f" % np.mean(train_errs)) 196 | print("mean test err: %.6f" % np.mean(test_errs)) 197 | -------------------------------------------------------------------------------- /tree/dtree.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from collections import Counter 3 | 4 | TRAIN_FILE = '' 5 | TEST_FILE = '' 6 | 7 | 8 | def load_file(file_name): 9 | with open(file_name, 'rb') as f: 10 | lines = f.readlines() 11 | lines = [l.split(' ') for l in lines] 12 | lines = [filter(lambda x: len(x) > 0, l) for l in lines] 13 | lines = [filter(lambda x: len(x) > 0, l) for l in lines] 14 | lines = [[float(i) for i in line] for line in lines] 15 | y = np.array([l[-1] for l in lines]) 16 | X = np.array([l[:-1] for l in lines]) 17 | return X, y 18 | 19 | 20 | class DTree(object): 21 | """ 22 | decision tree with gini index 23 | """ 24 | 25 | def __init__(self, level): 26 | self.level = level 27 | self.left = None # left tree 28 | self.right = None # right tree 29 | 30 | # for branch node 31 | self.branch_params = None 32 | # for leaf node 33 | self.value = None 34 | print('create Node in level %d' % level) 35 | 36 | def single_pred(self, record): 37 | """ 38 | pred class os instance X 39 | """ 40 | if self.value: 41 | return self.value 42 | else: 43 | # goes to sub tree 44 | feature_idx, theta = self.branch_params 45 | branch = record[feature_idx] - theta 46 | if branch < 0: 47 | return self.left.single_pred(record) 48 | else: 49 | return self.right.single_pred(record) 50 | 51 | def predict(self, X): 52 | n, _ = X.shape 53 | ret = [self.single_pred(X[i]) for i in xrange(n)] 54 | return np.array(ret) 55 | 56 | def node_count(self): 57 | """ 58 | return sub node count in tree 59 | """ 60 | if self.value: 61 | cnt = 0 62 | else: 63 | left_cnt = self.left.node_count() 64 | right_cnt = self.right.node_count() 65 | cnt = 1 + left_cnt + right_cnt 66 | return cnt 67 | 68 | @classmethod 69 | def _terminate(cls, y): 70 | if len(y) < 2 or np.unique(y).shape[0] == 1: 71 | ret = True 72 | else: 73 | ret = False 74 | if ret: 75 | print('terminate condtion met') 76 | return ret 77 | 78 | @classmethod 79 | def _gini_index(cls, ys): 80 | ys = np.array(ys) 81 | total = ys.sum() 82 | norm_y = ys / float(total) 83 | gini_idx = 1. - np.power(norm_y, 2).sum() 84 | return gini_idx 85 | 86 | @classmethod 87 | def _best_split(cls, X, y): 88 | """ 89 | return tuple (feature_idx, sign, theta) 90 | """ 91 | n = X.shape[0] 92 | num_feature = X.shape[1] 93 | y_types = np.unique(y) 94 | 95 | # initialize 96 | min_score = float(n) 97 | feature_idx = None 98 | best_theta = None 99 | best_idx = None 100 | 101 | for feature_idx in xrange(num_feature): 102 | # counter for y 103 | cumulate_y = Counter() 104 | rest_y = Counter() 105 | for y_type in y_types: 106 | cnt = np.where(y == y_type)[0].shape[0] 107 | rest_y[y_type] = cnt 108 | 109 | # sorted data 110 | sorted_idx = np.argsort(X[:, feature_idx]) 111 | sorted_X = np.copy(X) 112 | sorted_y = np.copy(y) 113 | sorted_X = sorted_X[sorted_idx] 114 | sorted_y = sorted_y[sorted_idx] 115 | #print "_best_split:", sorted_X.shape, sorted_y.shape 116 | 117 | for idx in xrange(n-1): 118 | theta = (sorted_X[idx, feature_idx] + sorted_X[idx + 1, feature_idx]) / 2 119 | y_label = sorted_y[idx] 120 | cumulate_y[y_label] += 1 121 | rest_y[y_label] -= 1 122 | left_cnt = sum(cumulate_y.values()) 123 | right_cnt = sum(rest_y.values()) 124 | w_1 = left_cnt * cls._gini_index(cumulate_y.values()) 125 | w_2 = right_cnt * cls._gini_index(rest_y.values()) 126 | score = w_1 + w_2 127 | if score < min_score: 128 | min_score = score 129 | best_theta = theta 130 | best_idx = feature_idx 131 | #print('new min score: %.3f' % score) 132 | #print('feature: %d, theta: %.3f' % (best_idx, best_theta)) 133 | #print('left: %d, right: %d' % (left_cnt, right_cnt)) 134 | print('feature: %d, theta: %.3f' % (best_idx, best_theta)) 135 | return (best_idx, best_theta) 136 | 137 | def _data_split(self, X, y): 138 | feature_idx, theta = self.branch_params 139 | X_feature = X[:, feature_idx] 140 | pos_idx = np.where(X_feature > theta)[0] 141 | neg_idx = np.where(X_feature < theta)[0] 142 | #print X_feature.shape 143 | #print pos_idx 144 | #print neg_idx 145 | X_pos = X[pos_idx] 146 | y_pos = y[pos_idx] 147 | X_neg = X[neg_idx] 148 | y_neg = y[neg_idx] 149 | 150 | msg = 'split data: pos=%d, %d, neg=%d, %d' % (X_pos.shape[0], y_pos.shape[0], X_neg.shape[0], y_neg.shape[0]) 151 | print(msg) 152 | return X_pos, y_pos, X_neg, y_neg 153 | 154 | def fit(self, X, y): 155 | if self._terminate(y): 156 | self.value = y[0] 157 | # print "terminate: pred=%d" % self.value 158 | else: 159 | if self.level > 10: 160 | self.value = y[0] 161 | return self 162 | 163 | self.branch_params = self._best_split(X, y) 164 | self.left = DTree(self.level + 1) 165 | self.right = DTree(self.level + 1) 166 | 167 | pos_X, pos_y, neg_X, neg_y = self._data_split(np.copy(X), np.copy(y)) 168 | # neg sample in left tree, pos in right tree 169 | print('fit left tree, level %d' %self.left.level) 170 | self.left.fit(neg_X, neg_y) 171 | print('fit right tree, level %d' %self.right.level) 172 | self.right.fit(pos_X, pos_y) 173 | return self 174 | 175 | 176 | class DecisionStump(object): 177 | def __init__(self, level): 178 | self.branch_params = None 179 | # for leaf node 180 | self.value = None 181 | # print('create Node in level %d' % level) 182 | 183 | def single_pred(self, record): 184 | """ 185 | pred class os instance X 186 | """ 187 | # goes to sub tree 188 | sign, feature_idx, theta = self.branch_params 189 | branch = sign * (record[feature_idx] - theta) 190 | return -1. if branch < 0 else 1. 191 | 192 | def predict(self, X): 193 | n, _ = X.shape 194 | ret = [self.single_pred(X[i]) for i in xrange(n)] 195 | return np.array(ret) 196 | 197 | @classmethod 198 | def _gini_index(cls, ys): 199 | ys = np.array(ys) 200 | total = ys.sum() 201 | norm_y = ys / float(total) 202 | gini_idx = 1. - np.power(norm_y, 2).sum() 203 | return gini_idx 204 | 205 | @classmethod 206 | def _best_split(cls, X, y): 207 | """ 208 | return tuple (feature_idx, sign, theta) 209 | """ 210 | n = X.shape[0] 211 | num_feature = X.shape[1] 212 | y_types = np.unique(y) 213 | 214 | # initialize 215 | min_score = float(n) 216 | feature_idx = None 217 | best_theta = None 218 | best_idx = None 219 | 220 | for feature_idx in xrange(num_feature): 221 | # counter for y 222 | cumulate_y = Counter() 223 | rest_y = Counter() 224 | for y_type in y_types: 225 | cnt = np.where(y == y_type)[0].shape[0] 226 | rest_y[y_type] = cnt 227 | 228 | # sorted data 229 | sorted_idx = np.argsort(X[:, feature_idx]) 230 | sorted_X = np.copy(X) 231 | sorted_y = np.copy(y) 232 | sorted_X = sorted_X[sorted_idx] 233 | sorted_y = sorted_y[sorted_idx] 234 | #print "_best_split:", sorted_X.shape, sorted_y.shape 235 | 236 | for idx in xrange(n-1): 237 | theta = (sorted_X[idx, feature_idx] + sorted_X[idx + 1, feature_idx]) / 2 238 | y_label = sorted_y[idx] 239 | cumulate_y[y_label] += 1 240 | rest_y[y_label] -= 1 241 | left_cnt = sum(cumulate_y.values()) 242 | right_cnt = sum(rest_y.values()) 243 | w_1 = left_cnt * cls._gini_index(cumulate_y.values()) 244 | w_2 = right_cnt * cls._gini_index(rest_y.values()) 245 | score = w_1 + w_2 246 | if score < min_score: 247 | min_score = score 248 | best_theta = theta 249 | best_idx = feature_idx 250 | #print('new min score: %.3f' % score) 251 | #print('feature: %d, theta: %.3f' % (best_idx, best_theta)) 252 | #print('left: %d, right: %d' % (left_cnt, right_cnt)) 253 | print('feature: %d, theta: %.3f' % (best_idx, best_theta)) 254 | return (best_idx, best_theta) 255 | 256 | def _stump_sign(self, X, y, feature_idx, theta): 257 | X_feature = X[:, feature_idx] 258 | pos_idx = np.where(X_feature > theta)[0] 259 | neg_idx = np.where(X_feature < theta)[0] 260 | #print X_feature.shape 261 | #print pos_idx 262 | #print neg_idx 263 | return 1. if y[pos_idx].sum() > 0.0 else -1. 264 | 265 | 266 | def fit(self, X, y): 267 | feature_idx, theta = self._best_split(X, y) 268 | sign = self._stump_sign(X, y, feature_idx, theta) 269 | self.branch_params = (sign, feature_idx, theta) 270 | return self 271 | 272 | 273 | def main(): 274 | X_train, y_train = load_file(TRAIN_FILE) 275 | X_test, y_test = load_file(TEST_FILE) 276 | d_tree = DTree(0) 277 | d_tree.fit(X_train, y_train) 278 | 279 | train_pred = d_tree.predict(X_train) 280 | train_acc = (y_train == train_pred).sum() / float(len(y_train)) 281 | test_pred = d_tree.predict(X_test) 282 | test_acc = (y_test == test_pred).sum() / float(len(y_test)) 283 | 284 | print train_acc, test_acc 285 | 286 | return d_tree 287 | 288 | -------------------------------------------------------------------------------- /tree/rf.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | TRAIN_FILE = '' 4 | TEST_FILE = '' 5 | 6 | from dtree import DTree 7 | 8 | def boot_strap(X, y): 9 | n = X.shape[0] 10 | sample_idx = np.random.choice(n, n, replace=True) 11 | 12 | new_X = X[sample_idx] 13 | new_y = y[sample_idx] 14 | return new_X, new_y 15 | 16 | 17 | def rf_prediction(trees, X): 18 | pred_sum = np.zeros((X.shape[0],)) 19 | for tree in trees: 20 | pred = tree.predict(X) 21 | pred_sum += pred 22 | 23 | pos_idx = np.where(pred_sum >= 0.)[0] 24 | neg_idx = np.where(pred_sum < 0.)[0] 25 | 26 | pred_sum[pos_idx] = 1. 27 | pred_sum[neg_idx] = -1. 28 | return pred_sum 29 | 30 | 31 | def RF(X_train, y_train): 32 | """ 33 | random forest 34 | """ 35 | 36 | trees = [] 37 | for i in xrange(300): 38 | d_tree = DTree(0) 39 | bootstrap_X, bootstrap_y = boot_strap(X_train, y_train) 40 | d_tree.fit(bootstrap_X, bootstrap_y) 41 | trees.append(d_tree) 42 | return trees 43 | 44 | def stump(X_train, y_train): 45 | trees = [] 46 | for i in xrange(300): 47 | d_tree = DecisionStump(0) 48 | bootstrap_X, bootstrap_y = boot_strap(X_train, y_train) 49 | d_tree.fit(bootstrap_X, bootstrap_y) 50 | trees.append(d_tree) 51 | return trees 52 | 53 | def experiment(): 54 | X_train, y_train = load_file(TRAIN_FILE) 55 | X_test, y_test = load_file(TEST_FILE) 56 | 57 | train_ret = [] 58 | test_ret = [] 59 | for i in xrange(10): 60 | #trees = RF(X_train, y_train) 61 | trees = stump(X_train, y_train) 62 | 63 | train_pred = rf_prediction(trees, X_train) 64 | train_acc = (y_train == train_pred).sum() / float(len(y_train)) 65 | 66 | test_pred = rf_prediction(trees, X_test) 67 | test_acc = (y_test == test_pred).sum() / float(len(y_test)) 68 | 69 | train_ret.append(train_acc) 70 | test_ret.append(test_acc) 71 | print train_acc, test_acc 72 | --------------------------------------------------------------------------------