├── .gitignore ├── Analysis ├── 1405091.py ├── UnbalancedDataSet.py ├── sampledcreditcard.csv └── untitled0.py ├── ChannelEquilizer ├── Details of assignment on Channel Equalization.docx ├── Readme.txt ├── channelEq.py ├── input.txt ├── newTest.txt ├── test.txt └── testOnline.txt ├── DcsnT_Adaboost ├── ML Assignment 1.pdf ├── adult_test.csv ├── adult_test.txt ├── adult_train.csv ├── adult_train.txt └── offline.py ├── NN ├── nn.py ├── testNN.txt ├── testNN1.txt ├── trainNN.txt └── trainNN1.txt ├── PCA_EMClusteringGMM ├── ML 2.pdf ├── PCA.py └── data.txt ├── Pattern ├── .idea │ ├── Pattern.iml │ ├── inspectionProfiles │ │ └── Project_Default.xml │ ├── misc.xml │ ├── modules.xml │ └── workspace.xml ├── D.txt ├── Data.csv ├── command ├── csvread.py ├── output.csv └── test.csv ├── Perceptron ├── .idea │ ├── misc.xml │ ├── modules.xml │ ├── online.iml │ └── workspace.xml ├── BinaryPerceptron │ ├── Basic.py │ ├── Pocket.py │ ├── Reward&Punishment.py │ ├── Test.txt │ ├── Test1.txt │ ├── Train.txt │ ├── Train1.txt │ ├── testLinearlyNonSeparable.txt │ ├── testLinearlySeparable.txt │ ├── trainLinearlyNonSeparable.txt │ └── trainLinearlySeparable.txt ├── MultiClass-KERSEL │ ├── Test.txt │ ├── Train.txt │ └── kesler.py ├── Test.txt ├── Train.txt └── online.py ├── Recommend ├── 1405091.py ├── ML 3.pdf ├── U.txt ├── V.txt ├── dd.txt └── uvSet.txt ├── TemplateMatch ├── Assignment Description.pdf ├── reference.jpg └── tm.py ├── templateMatching ├── .idea │ ├── misc.xml │ ├── modules.xml │ ├── templateMatching.iml │ └── workspace.xml ├── movie.mov ├── output.mov ├── reference.jpg └── tm.py └── testing ├── .idea ├── misc.xml ├── modules.xml ├── other.xml ├── testing.iml └── workspace.xml ├── offline1[sample] ├── Untitled Document.txt ├── test.csv ├── testing.py ├── train.csv └── train.txt └── starting ├── Data.csv ├── Salary_Data.csv ├── data_processing_template.py └── simple_linear_regression.py /.gitignore: -------------------------------------------------------------------------------- 1 | /Analysis/creditcard.csv 2 | /DcsnT_Adaboost/creditcard.csv 3 | /ChannelEquilizer/train.txt 4 | /PCA_EMClusteringGMM/onlineDataset.txt 5 | /Recommend/data.txt 6 | /TemplateMatch/movie.mov 7 | /ChannelEquilizer/trainOnline.txt 8 | /Recommend/dataOnline.txt -------------------------------------------------------------------------------- /Analysis/1405091.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import math 4 | import matplotlib.pyplot as plt 5 | from sklearn.preprocessing import Imputer 6 | 7 | def dummy_df(df, todummy_list): 8 | for x in todummy_list: 9 | dummies = pd.get_dummies(df[x], prefix=x, dummy_na=False) 10 | df = df.drop(x, 1) 11 | df = pd.concat([df, dummies], axis=1) 12 | return df 13 | 14 | 15 | def entropyCalc(x,y): 16 | p1=x/(x+y) 17 | p2=y/(x+y) 18 | res=-p1*math.log2(p1)-p2*math.log2(p2) 19 | return res 20 | 21 | 22 | 23 | 24 | 25 | df = pd.read_csv('adult_train.csv', na_values=['#NAME?']) 26 | #print(df.head(5)) 27 | 28 | #print(df['age'].value_counts()) 29 | 30 | df['income']= [0 if x == '<=50K' else 1 for x in df['income']] 31 | X = df.drop('income', 1) 32 | Y = df.income 33 | 34 | total_zeros = Y[Y == 1].count() 35 | total_ones = Y[Y == 0].count() 36 | total_output= total_zeros + total_ones 37 | total_entropy=entropyCalc(total_zeros,total_ones) 38 | print(total_entropy) 39 | 40 | 41 | def plot_histogram(x): 42 | plt.hist(x, color='gray', alpha=0.5) 43 | plt.title("Histogram of '{var_name}'".format(var_name=x.name)) 44 | plt.xlabel("Value") 45 | plt.ylabel("Frequency") 46 | plt.show() 47 | #print(X['age'].value_counts()) 48 | 49 | #plot_histogram(X['age']) 50 | 51 | def plot_histogram_dv(x,y): 52 | plt.hist(list(x[y==0]), alpha=0.5, label='Outcome=0') 53 | plt.hist(list(x[y==1]), alpha=0.5, label='Outcome=1') 54 | plt.title("Histogram of '{var_name}' by Outcome Category".format(var_name=x.name)) 55 | plt.xlabel("Value") 56 | plt.ylabel("Frequency") 57 | plt.legend(loc='upper right') 58 | plt.show() 59 | 60 | #plot_histogram_dv(X['age'], Y) 61 | #print(Y.value_counts()) 62 | #print(X.head(5)) 63 | #print(Y.head(5)) 64 | #plot_histogram_dv(X['native_country'], Y) 65 | #print(Y) 66 | ''' 67 | for col_name in X.columns: 68 | if X[col_name].dtypes == 'object': 69 | unique_cat = len(X[col_name].unique()) 70 | print(col_name) 71 | print("Feature '{col_name}' has {unique_cat} unique categories\ 72 | ".format(col_name=col_name, unique_cat=unique_cat)) 73 | elif X[col_name].dtypes == 'int64': 74 | unique_value = len(X[col_name].unique()) 75 | print(" Feature '{col_name}' has {unique_value} unique values\ 76 | ".format(col_name=col_name, unique_value=unique_value)) 77 | 78 | 79 | print(X['native_country'].value_counts().sort_values(ascending=False).head(10)) 80 | ''' 81 | 82 | #print(df['native_country'].value_counts()) 83 | #print(X['native_country'].value_counts()) 84 | X['native_country'] = ['United-States' if x == 'United-States' else 'Other' for x in X['native_country']] 85 | 86 | #print(X['native_country'].value_counts().sort_values(ascending=False)) 87 | #plot_histogram_dv(X['native_country'], Y) 88 | 89 | dummy_list=[] 90 | 91 | for col_name in X.columns: 92 | if X[col_name].dtypes == 'object': 93 | unique_cat = len(X[col_name].unique()) 94 | dummy_list.append(col_name) 95 | 96 | #print(dummy_list) 97 | 98 | X = dummy_df(X, dummy_list) 99 | #print(X.head(5)) 100 | 101 | 102 | #print(X.isnull().sum().sort_values(ascending=False).head()) 103 | 104 | 105 | imp = Imputer(missing_values='NaN', strategy='median', axis=0) 106 | imp.fit(X) 107 | X = pd.DataFrame(data=imp.transform(X) , columns=X.columns) 108 | 109 | #print(X.isnull().sum().sort_values(ascending=False).head()) 110 | #print(X.head(5)) 111 | ''' 112 | 113 | ''' 114 | #print(X['native_country_United-States'].value_counts()) 115 | print(Y.dtypes) 116 | 117 | 118 | # 119 | #print(ff) 120 | 121 | 122 | for col_name in X.columns: 123 | unique_cat = len(X[col_name].unique()) 124 | if(unique_cat > 2): 125 | print(col_name) 126 | ff=np.mean(X[col_name]) 127 | print(ff) 128 | X[col_name]= [ 0.0 if x <= ff else 1.0 for x in X[col_name]] 129 | print(X[col_name].value_counts()) 130 | 131 | 132 | 133 | ''' 134 | for col_name in X.columns: 135 | unique_cat = len(X[col_name].unique()) 136 | print(col_name) 137 | print("Feature '{col_name}' has {unique_cat} unique categories\ 138 | ".format(col_name=col_name, unique_cat=unique_cat)) 139 | ''' 140 | 141 | 142 | 143 | #print(Z.value_counts()) 144 | 145 | #binarization(Z) 146 | #Z=X.age 147 | #print(Z) 148 | #uniAge=set(Z) 149 | #uniAge=X['fnlwgt'].unique() 150 | #uniAge.sort() 151 | #print(np.mean(uniAge)) 152 | #print(len(uniAge)) 153 | 154 | #print(entropyCalc(7,17)) 155 | 156 | 157 | 158 | 159 | 160 | #X.age=Z 161 | #print(X['age'].value_counts()) 162 | 163 | 164 | print('Hello') 165 | -------------------------------------------------------------------------------- /Analysis/UnbalancedDataSet.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Sun Nov 25 17:48:29 2018 5 | 6 | @author: yeaseen 7 | """ 8 | 9 | import pandas as pd 10 | import math 11 | from sklearn.model_selection import train_test_split 12 | 13 | def dataCleaning(dataframe): 14 | for col_name in dataframe.columns: 15 | if(dataframe[col_name].dtypes == 'object'): 16 | dataframe[[col_name]] = dataframe[[col_name]].fillna(dataframe[col_name].mode().iloc[0]) 17 | elif(col_name != dataframe.columns.values[-1]): 18 | dataframe[[col_name]]=dataframe[[col_name]].fillna(dataframe[col_name].mean()) 19 | new = dataframe.filter([col_name,dataframe.columns[-1]], axis=1) 20 | new = new.sort_values(col_name) 21 | binPoint = binningPoint(new) 22 | dataframe[col_name]= [ 0 if x <= binPoint else 1 for x in dataframe[col_name]] 23 | return dataframe 24 | 25 | def binningPoint(dataframe): 26 | #print(dataframe) 27 | countZero=0 28 | countOne=0 29 | entropy=1 30 | collectorZero=dataframe.iloc[:,-1][dataframe.iloc[:,-1] == -1].count() 31 | #print(collectorZero) 32 | collectorOne= dataframe.iloc[:,-1][dataframe.iloc[:,-1] == 1].count() 33 | #print(collectorOne) 34 | feature=dataframe.columns[-1] 35 | total=len(dataframe) 36 | index=0 37 | current=0 38 | for row in dataframe.itertuples(index=True, name='Pandas'): 39 | # print(getattr(row, feature)) 40 | #print(countZero,countOne,collectorZero,collectorOne) 41 | if(getattr(row, feature)==-1): 42 | countZero+=1 43 | collectorZero-=1 44 | else: 45 | countOne+=1 46 | collectorOne-=1 47 | #print(countZero,countOne,collectorZero,collectorOne) 48 | if(not(collectorZero==0 and collectorOne==0)): 49 | current+=1 50 | res1= ((countZero+countOne) / total) * entropyCalc(countZero,countOne) 51 | #print(res1) 52 | res2= ((collectorZero+collectorOne) / total) * entropyCalc(collectorZero,collectorOne) 53 | #print(res2) 54 | #print(res1+res2) 55 | if(entropy>(res1+res2)): 56 | index=current 57 | #print(res1+res2) 58 | entropy=res1+res2 59 | #print(dataframe.columns[0]) 60 | #print(dataframe.iloc[index][dataframe.columns[0]]) 61 | p= dataframe.iloc[index][dataframe.columns[0]] 62 | q= dataframe.iloc[index+1][dataframe.columns[0]] 63 | #print(p,q) 64 | r=(p+q)/2 65 | return r 66 | 67 | 68 | 69 | def entropyCalc(x,y): 70 | p1=x/(x+y) 71 | p2=y/(x+y) 72 | if(p1==0 or p2==0): 73 | res=0 74 | else: 75 | res=-p1*math.log2(p1)-p2*math.log2(p2) 76 | return res 77 | 78 | df = pd.read_csv('creditcard.csv') 79 | 80 | #print(df.info()) 81 | 82 | 83 | isFraud = df[df['Class'] == 1] 84 | print(len(isFraud)) 85 | isNotFraud= df.iloc[200:5900] 86 | isNotFraud= isNotFraud[isNotFraud['Class'] == 0 ] 87 | print(len(isNotFraud)) 88 | 89 | 90 | 91 | 92 | 93 | frames=[isNotFraud,isFraud] 94 | 95 | result= pd.concat(frames) 96 | 97 | print(len(result)) 98 | 99 | #result.to_csv('sampledcreditcard.csv', encoding='utf-8', index=False) 100 | 101 | dfFull=dataCleaning(result) 102 | 103 | 104 | 105 | #result_output=dfN.iloc[:-1].values 106 | 107 | #print(len(result_output)) 108 | 109 | 110 | 111 | 112 | 113 | dfN,dfTestN= train_test_split(dfFull,test_size=0.2,shuffle=True,) 114 | 115 | 116 | print(dfN.head(5)) 117 | 118 | print(dfTestN.head(5)) 119 | print(dfN['Class'].value_counts()) 120 | print(dfTestN['Class'].value_counts()) -------------------------------------------------------------------------------- /Analysis/untitled0.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Sat Nov 17 16:57:27 2018 5 | 6 | @author: yeaseen 7 | """ 8 | 9 | 10 | import matplotlib.pyplot as plt 11 | class run: 12 | def __init__(self, name): 13 | self.name = name 14 | 15 | 16 | def sumI(arg1,arg2,arg3,arg4): 17 | p=arg2 18 | print(p.name) 19 | return (arg1+arg3) 20 | 21 | def sumII(arg3,arg4): 22 | return (arg3+arg4) 23 | 24 | def mainF(func, arg1, arg2, arg3, arg4,func2,arg5,arg6,classReal,t): 25 | x=sumI(arg1,arg2,arg3,arg4) 26 | y=sumII(arg5,arg6) 27 | return x,y,t 28 | 29 | t=run('itsme') 30 | 31 | print(mainF(sumI,2,t,3,4,sumII,5,6,run,9)) 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | def plot_histogram(x): 44 | plt.hist(x, color='gray', alpha=0.5) 45 | plt.title("Histogram of '{var_name}'".format(var_name=x.name)) 46 | plt.xlabel("Value") 47 | plt.ylabel("Frequency") 48 | plt.show() 49 | #print(X['age'].value_counts()) 50 | 51 | #plot_histogram(X['age']) 52 | 53 | def plot_histogram_dv(x,y): 54 | plt.hist(list(x[y==0]), alpha=0.5, label='Outcome=0') 55 | plt.hist(list(x[y==1]), alpha=0.5, label='Outcome=1') 56 | plt.title("Histogram of '{var_name}' by Outcome Category".format(var_name=x.name)) 57 | plt.xlabel("Value") 58 | plt.ylabel("Frequency") 59 | plt.legend(loc='upper right') 60 | plt.show() 61 | 62 | #plot_histogram_dv(X['age'], Y) -------------------------------------------------------------------------------- /ChannelEquilizer/Details of assignment on Channel Equalization.docx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yeaseen/ML_Pattern/4db2fbb97eba5e485879a0c877df190ee68c7132/ChannelEquilizer/Details of assignment on Channel Equalization.docx -------------------------------------------------------------------------------- /ChannelEquilizer/Readme.txt: -------------------------------------------------------------------------------- 1 | Here input.txt file contains two lines. 1st line denotes h and 2nd line denotes the mean and variance of noise function to distort the channel data. -------------------------------------------------------------------------------- /ChannelEquilizer/channelEq.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Sat Dec 15 18:37:40 2018 4 | 5 | @author: Asus 6 | """ 7 | 8 | import numpy as np 9 | from scipy.stats import multivariate_normal 10 | 11 | with open('trainOnline.txt') as f: 12 | content = f.read().split("\n") 13 | with open('testOnline.txt') as f: 14 | testcontent = f.read().split("\n") 15 | 16 | 17 | def split(s, delim): 18 | words = [] 19 | word = [] 20 | for c in s: 21 | if c not in delim: 22 | word.append(c) 23 | else: 24 | if word: 25 | words.append(''.join(word)) 26 | word = [] 27 | if word: 28 | words.append(''.join(word)) 29 | return words 30 | 31 | def loadfile(filename): 32 | file = open(filename, "r") 33 | rows = list() 34 | for line in file: 35 | vals = split(line, [' ' ,'\t', '\n']) 36 | rows.append(vals) 37 | return rows 38 | 39 | class channelClass: 40 | def __init__(self,contents,p): 41 | inputs=loadfile('input.txt') 42 | self.h = np.array(inputs[0],dtype=float) 43 | self.n=len(self.h) 44 | self.mu=float(inputs[1][0]) 45 | self.var=np.array(inputs[1][1],dtype=float) 46 | lst =[] 47 | for i in range(len(contents[0])-1): 48 | ans=float(contents[0][i+1])*self.h[0]+float(contents[0][i])*self.h[1]\ 49 | +np.random.normal(self.mu,self.var) 50 | lst.append(ans) 51 | l=p 52 | dictionary=[] 53 | clusterMeans=[] 54 | clusterCovs=[] 55 | clusterPriorProb=[] 56 | for i in range(np.power(l+1,2)-1): 57 | dictionary.append([]) 58 | clusterMeans.append([]) 59 | clusterCovs.append([]) 60 | clusterPriorProb.append([]) 61 | 62 | for i in range(l,len(content[0])): 63 | bs='' 64 | for j in range(0,l+1): 65 | bs+=content[0][i-j] 66 | #bs=bs[::-1] 67 | clss=int(bs,2) 68 | xv=[] 69 | for k in range(0,l): 70 | xv.append(lst[i-l+k]) 71 | xv.reverse() 72 | dictionary[clss].append(xv) 73 | for i in range(len(dictionary)): 74 | countermean=np.mean(np.array(dictionary[i]).T,axis =1) 75 | countercov=np.cov(np.array(dictionary[i]).T) 76 | clusterMeans[i]=countermean 77 | clusterCovs[i]=countercov 78 | clusterPriorProb[i]=(len(dictionary[i]) / (len(lst)-1)) 79 | 80 | self.dictionary=dictionary 81 | self.clusterMeans=clusterMeans 82 | self.clusterCovs=clusterCovs 83 | self.clusterPriorProb=clusterPriorProb 84 | 85 | def distortedOutput(self,contents): 86 | lst =[] 87 | for i in range(len(contents[0])-1): 88 | ans=float(contents[0][i+1])*self.h[0]+float(contents[0][i])*self.h[1]\ 89 | +np.random.normal(self.mu,self.var) 90 | lst.append(ans) 91 | return lst 92 | 93 | 94 | l=2 95 | model=channelClass(content,l) 96 | #print(model.clusterCovs) 97 | 98 | testXvector=model.distortedOutput(testcontent) 99 | 100 | 101 | 102 | #l=2 103 | pathsarray=np.zeros((len(testXvector)-1,np.power(l+1,2)-1), dtype=float)+np.finfo(np.float).eps 104 | 105 | 106 | for i in range(len(testXvector)-1): 107 | #print(i) 108 | if(i==0): 109 | xv=[] 110 | xv.append(testXvector[i]) 111 | xv.append(testXvector[i+1]) 112 | xv.reverse() 113 | for j in range(np.power(l+1,2)-1): 114 | pathsarray[i][j]+=np.log(model.clusterPriorProb[j])+multivariate_normal.pdf(xv, model.clusterMeans[j], model.clusterCovs[j]) 115 | #print(multivariate_normal.pdf(xv, model.clusterMeans[j], model.clusterCovs[j])) 116 | else: 117 | xv=[] 118 | xv.append(testXvector[i]) 119 | xv.append(testXvector[i+1]) 120 | xv.reverse() 121 | for j in range(np.power(l+1,2)-1): 122 | par=(j%4)*2 123 | parmax=max(pathsarray[i-1][par],pathsarray[i-1][par+1]) 124 | pathsarray[i][j]+=parmax+np.log(0.5)+multivariate_normal.pdf(xv, model.clusterMeans[j], model.clusterCovs[j]) 125 | 126 | output=[] 127 | lastNode=0 128 | for i in range(len(pathsarray)-1,0,-1): 129 | #print(i) 130 | if(i==len(pathsarray)-1): 131 | row=pathsarray[i] 132 | lastNode=np.argmax(row) 133 | output.append(lastNode) 134 | #print(lastNode) 135 | par=(lastNode%4)*2 136 | if(pathsarray[i-1][par] > pathsarray[i-1][par+1]): 137 | lastNode=par 138 | else: 139 | lastNode=par+1 140 | output.append(lastNode) 141 | 142 | output.reverse() 143 | newFile=[] 144 | for i in range(len(output)): 145 | if(i==0): 146 | strr='{0:03b}'.format(output[i]) 147 | newFile.append(strr[2]) 148 | newFile.append(strr[1]) 149 | newFile.append(strr[0]) 150 | continue 151 | strr='{0:03b}'.format(output[i]) 152 | #print(strr) 153 | newFile.append(strr[0]) 154 | 155 | with open('newTest.txt', 'w') as f: 156 | for item in newFile: 157 | f.write("%s" % item) 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | 173 | 174 | 175 | 176 | 177 | 178 | -------------------------------------------------------------------------------- /ChannelEquilizer/input.txt: -------------------------------------------------------------------------------- 1 | 1 0.6 2 | 0 0.01 -------------------------------------------------------------------------------- /ChannelEquilizer/newTest.txt: -------------------------------------------------------------------------------- 1 | 1000000100101100011110010000011111110101001001001101010111011011011101001111110010000000001010001101 -------------------------------------------------------------------------------- /ChannelEquilizer/test.txt: -------------------------------------------------------------------------------- 1 | 1100100000111111101010010010011010101110110110111010011111100100000000010100011011000000100101100011 -------------------------------------------------------------------------------- /ChannelEquilizer/testOnline.txt: -------------------------------------------------------------------------------- 1 | 1000000100101100011110010000011111110101001001001101010111011011011101001111110010000000001010001101 -------------------------------------------------------------------------------- /DcsnT_Adaboost/ML Assignment 1.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yeaseen/ML_Pattern/4db2fbb97eba5e485879a0c877df190ee68c7132/DcsnT_Adaboost/ML Assignment 1.pdf -------------------------------------------------------------------------------- /DcsnT_Adaboost/offline.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Thu Nov 15 15:49:22 2018 5 | 6 | @author: yeaseen 7 | """ 8 | import numpy as np 9 | import pandas as pd 10 | import math 11 | import matplotlib.pyplot as plt 12 | from sklearn.model_selection import train_test_split 13 | 14 | 15 | class dcsnTreeNodeClass: 16 | def __init__(self, name): 17 | self.name = name 18 | self.lst = {} 19 | 20 | 21 | def entropyCalc(x,y): 22 | p1=x/(x+y) 23 | p2=y/(x+y) 24 | if(p1==0 or p2==0): 25 | res=0 26 | else: 27 | res=-p1*math.log2(p1)-p2*math.log2(p2) 28 | return res 29 | 30 | def Importance(attributes,dataframe): 31 | nodeName='' 32 | entropy=100 33 | dataframelength=len(dataframe) 34 | #print(dataframelength) 35 | 36 | for att in attributes: 37 | uniqueValues= dataframe[att].unique() 38 | #print(uniqueValues) 39 | res=0 40 | for value in uniqueValues: 41 | attvalueFrames=dataframe.loc[dataframe[att] == value] 42 | subframelength=len(attvalueFrames) 43 | valueEntropy=subEntropy(attvalueFrames) 44 | #print(subframelength) 45 | res+=(subframelength / dataframelength)*valueEntropy 46 | #print(valueEntropy) 47 | #print(res,att) 48 | if(res < entropy): 49 | #print('once upon a time in mumbai') 50 | #print(res) 51 | entropy = res 52 | nodeName= att 53 | 54 | return nodeName 55 | 56 | def pluralityValue(dataframe): 57 | zeroInOutput=dataframe.iloc[:,-1][dataframe.iloc[:,-1] == -1].count() 58 | oneInOutput= dataframe.iloc[:,-1][dataframe.iloc[:,-1] == 1].count() 59 | if(zeroInOutput>= oneInOutput): 60 | return -1 61 | else: 62 | return 1 63 | 64 | def classificationFactor(dataframe): 65 | if(len(dataframe.iloc[:,-1].unique()) == 1): 66 | #print(dataframe.iloc[:,-1].unique()[0]) 67 | return True 68 | else: 69 | return False 70 | 71 | def specificClss(dataframe): 72 | return dataframe.iloc[:,-1].unique()[0] 73 | 74 | 75 | def dcsnTreeRoot(sampleFrames, attributelist, parentFrames, currDepth, maxDepth): 76 | if(len(sampleFrames) == 0): 77 | return pluralityValue(parentFrames) 78 | elif(classificationFactor(sampleFrames) == True): 79 | return specificClss(sampleFrames) 80 | elif(len(attributelist) == 0): 81 | return pluralityValue(sampleFrames) 82 | elif(currDepth == maxDepth): 83 | return pluralityValue(sampleFrames) 84 | else: 85 | newnode=Importance(attributelist,sampleFrames) 86 | #print('newnodes =========='+newnode) 87 | treeRoot= dcsnTreeNodeClass(newnode) 88 | uniqueValuesRoot=sampleFrames[newnode].unique().tolist() 89 | uniqueValuesTestRoot=dfTestN[newnode].unique().tolist() 90 | 91 | uniqeVals= list(set(uniqueValuesRoot+uniqueValuesTestRoot)) 92 | currD=currDepth+1 93 | if newnode in attributelist : attributelist.remove(newnode) 94 | #print(attributelist) 95 | #print('befor for loop') 96 | for eachValue in uniqeVals: 97 | atListCopy=attributelist.copy() 98 | cuttingFrame=sampleFrames.loc[sampleFrames[newnode] == eachValue] 99 | subRoot=dcsnTreeRoot(cuttingFrame,atListCopy,sampleFrames,currD,maxDepth) 100 | treeRoot.lst[eachValue] =subRoot 101 | #print('after for loop') 102 | return treeRoot 103 | 104 | 105 | def subEntropy(dataframe): 106 | zeroInOutput=dataframe.iloc[:,-1][dataframe.iloc[:,-1] == -1].count() 107 | oneInOutput= dataframe.iloc[:,-1][dataframe.iloc[:,-1] == 1].count() 108 | res=entropyCalc(zeroInOutput,oneInOutput) 109 | return res 110 | 111 | 112 | def binningPoint(dataframe): 113 | #print(dataframe) 114 | countZero=0 115 | countOne=0 116 | entropy=1 117 | collectorZero=dataframe.iloc[:,-1][dataframe.iloc[:,-1] == -1].count() 118 | #print(collectorZero) 119 | collectorOne= dataframe.iloc[:,-1][dataframe.iloc[:,-1] == 1].count() 120 | #print(collectorOne) 121 | feature=dataframe.columns[-1] 122 | total=len(dataframe) 123 | index=0 124 | current=0 125 | for row in dataframe.itertuples(index=True, name='Pandas'): 126 | # print(getattr(row, feature)) 127 | #print(countZero,countOne,collectorZero,collectorOne) 128 | if(getattr(row, feature)==-1): 129 | countZero+=1 130 | collectorZero-=1 131 | else: 132 | countOne+=1 133 | collectorOne-=1 134 | #print(countZero,countOne,collectorZero,collectorOne) 135 | if(not(collectorZero==0 and collectorOne==0)): 136 | current+=1 137 | res1= ((countZero+countOne) / total) * entropyCalc(countZero,countOne) 138 | #print(res1) 139 | res2= ((collectorZero+collectorOne) / total) * entropyCalc(collectorZero,collectorOne) 140 | #print(res2) 141 | #print(res1+res2) 142 | if(entropy>(res1+res2)): 143 | index=current 144 | #print(res1+res2) 145 | entropy=res1+res2 146 | #print(dataframe.columns[0]) 147 | #print(dataframe.iloc[index][dataframe.columns[0]]) 148 | p= dataframe.iloc[index][dataframe.columns[0]] 149 | q= dataframe.iloc[index+1][dataframe.columns[0]] 150 | #print(p,q) 151 | r=(p+q)/2 152 | return r 153 | 154 | 155 | 156 | def dataCleaning(dataframe): 157 | for col_name in dataframe.columns: 158 | if(dataframe[col_name].dtypes == 'object'): 159 | dataframe[[col_name]] = dataframe[[col_name]].fillna(dataframe[col_name].mode().iloc[0]) 160 | elif(col_name != dataframe.columns.values[-1]): 161 | dataframe[[col_name]]=dataframe[[col_name]].fillna(dataframe[col_name].mean()) 162 | new = dataframe.filter([col_name,dataframe.columns[-1]], axis=1) 163 | new = new.sort_values(col_name) 164 | binPoint = binningPoint(new) 165 | dataframe[col_name]= [ 0 if x <= binPoint else 1 for x in dataframe[col_name]] 166 | return dataframe 167 | 168 | 169 | 170 | def classPrint(dataframe,rootOriginal): 171 | predictOut=[] 172 | for row in dataframe.itertuples(index=True, name='Pandas'): 173 | r = rootOriginal 174 | #print(r.name) 175 | #print('before while') 176 | if(np.issubdtype(type(r), int)): 177 | #print(r) 178 | predictOut.append(r) 179 | #break 180 | else: 181 | while(1): 182 | featName=r.name 183 | #print(featName) 184 | featValue = getattr(row, featName) 185 | #print(featValue) 186 | rootans = r.lst[featValue] 187 | if(np.issubdtype(type(rootans), int)): 188 | #print(rootans) 189 | predictOut.append(rootans) 190 | break 191 | else: 192 | r=rootans 193 | return predictOut 194 | 195 | def AdaBoost(funcDcsn, sampleFrames, attList, \ 196 | parentFrames,strtdepth, maxdepth,funcClassify,rootClass, K): 197 | Y=sampleFrames.iloc[:,-1].values.tolist().copy() 198 | #print(Y) 199 | N=len(sampleFrames) 200 | w= [1/N] * N 201 | h= [] 202 | z= [] 203 | #print(w) 204 | 205 | for x in range(0,K): 206 | data=sampleFrames.sample(n=N,weights=w,replace=True).copy() 207 | error=0.0001 208 | attList=list(data.drop(data.columns[-1],axis=1).columns.values) 209 | roott=dcsnTreeRoot(data,attList.copy(),data,0,maxdepth) 210 | predictAns=classPrint(data,roott) 211 | for i in range(0,N): 212 | if(predictAns[i] != Y[i]): 213 | error=error + w[i] 214 | if(error > 0.5): 215 | #print(x) 216 | continue 217 | for j in range(0,N): 218 | if(predictAns[j] == Y[j]): 219 | w[j] = w[j] *(error/(1-error)) 220 | maxVal=sum(w) 221 | w = [float(i)/maxVal for i in w] 222 | h.append(roott) 223 | #print(type(roott)) 224 | wT= math.log2(((1-error)/error)) 225 | z.append(wT) 226 | return h,z 227 | 228 | 229 | def learnersAggregation(learner,weights,dfTestData): 230 | predictOutAgg=[] 231 | numLearner=len(learner) 232 | # print(numLearner) 233 | for row in dfTestData.itertuples(index=True, name='Pandas'): 234 | aggAns=0 235 | for i in range(0,numLearner): 236 | r=learner[i] 237 | z=weights[i] 238 | if(np.issubdtype(type(r), int)): 239 | aggAns+=r*z 240 | #print('hello') 241 | else: 242 | while(1): 243 | featName=r.name 244 | #print(featName) 245 | featValue = getattr(row, featName) 246 | #print(featValue) 247 | rootans = r.lst[featValue] 248 | if(np.issubdtype(type(rootans), int)): 249 | #print(rootans) 250 | #predictOut.append(rootans) 251 | aggAns+=rootans*z 252 | #print('hello') 253 | break 254 | else: 255 | r=rootans 256 | #print(aggAns) 257 | if(aggAns<0): 258 | predictOutAgg.append(-1) 259 | else: 260 | predictOutAgg.append(1) 261 | return predictOutAgg 262 | 263 | 264 | def perf_measure(y_actual, y_hat): 265 | TP = 0 266 | FP = 0 267 | TN = 0 268 | FN = 0 269 | 270 | for i in range(len(y_hat)): 271 | if y_actual[i]==y_hat[i]==1: 272 | TP += 1 273 | if y_hat[i]==1 and y_actual[i]!=y_hat[i]: 274 | FP += 1 275 | if y_actual[i]==y_hat[i]== -1: 276 | TN += 1 277 | if y_hat[i]== -1 and y_actual[i]!=y_hat[i]: 278 | FN += 1 279 | 280 | return (TP, FP, TN, FN) 281 | 282 | 283 | def printMeasure(TP,FP,TN,FN): 284 | print('sensitivity,recall,hitrate: ') 285 | TPR = TP/(TP+FN) 286 | print(TPR) 287 | print('specificity: ') 288 | TNR = TN/(TN+FP) 289 | print(TNR) 290 | print('Precision: ') 291 | PPV = TP/(TP+FP) 292 | print(PPV) 293 | print('false discovery rate: ') 294 | FDR = FP/(TP+FP) 295 | print(FDR) 296 | print('accuracy: ') 297 | ACC = (TP+TN)/(TP+FP+FN+TN) 298 | print(ACC) 299 | print('fi score: ') 300 | FI_SCORE= 2*((PPV*TPR)/(PPV+TPR)) 301 | print(FI_SCORE) 302 | 303 | 304 | 305 | 306 | ''' 307 | 308 | ###preproecesing od ADULT DATA SET 309 | df = pd.read_csv('adult_train.csv',skipinitialspace=True) 310 | 311 | 312 | df[df.columns.values[-1]]= [-1 if x == '<=50K' else 1 for x in df[df.columns.values[-1]]] 313 | 314 | dfTest = pd.read_csv('adult_test.csv',skipinitialspace=True) 315 | 316 | dfTest[dfTest.columns.values[-1]]= [-1 if x == '<=50K' else 1 for x in dfTest[dfTest.columns.values[-1]]] 317 | 318 | 319 | df['native_country'] = ['United-States' if x == 'United-States' else 'Other' for x in df['native_country']] 320 | 321 | 322 | dfTest['native_country'] = ['United-States' if x == 'United-States' else 'Other' for x in dfTest['native_country']] 323 | 324 | 325 | 326 | dfN=dataCleaning(df) 327 | dfTestN=dataCleaning(dfTest) 328 | 329 | ###end of ADULT DATASET 330 | 331 | 332 | 333 | ''' 334 | 335 | ###preprocessing of CREDIT CARD FRAUD DETECTION DATA 336 | 337 | df = pd.read_csv('creditcard.csv') 338 | 339 | isFraud = df[df['Class'] == 1] 340 | #print(len(isFraud)) 341 | isNotFraud= df.iloc[100:1400] 342 | isNotFraud= isNotFraud[isNotFraud['Class'] == 0 ] 343 | #print(len(isNotFraud)) 344 | 345 | 346 | 347 | 348 | 349 | frames=[isNotFraud,isFraud] 350 | 351 | result= pd.concat(frames) 352 | 353 | #print(len(result)) 354 | 355 | dfFull=dataCleaning(result) 356 | dfFull[dfFull.columns.values[-1]]= [-1 if x == 0 else 1 for x in dfFull[dfFull.columns.values[-1]]] 357 | dfN,dfTestN= train_test_split(dfFull,test_size=0.2,shuffle=True,) 358 | 359 | 360 | #for col_name in dfN.columns: 361 | # unique_cat = len(dfN[col_name].unique()) 362 | # print(col_name) 363 | #print("Feature '{col_name}' has {unique_cat} unique categories\ 364 | # ".format(col_name=col_name, unique_cat=unique_cat)) 365 | 366 | 367 | ### end of preprocessing of CREDIT CARD FRAUD DETECTION DATA 368 | 369 | 370 | 371 | 372 | 373 | 374 | 375 | 376 | 377 | 378 | ## WORKING SCENARIO STARTED HERE 379 | #print(dfN['Class'].value_counts()) 380 | #print(dfTestN['Class'].value_counts()) 381 | 382 | #print(dfN.info()) 383 | 384 | #print(dfTestN.info()) 385 | #print(df.head(5)) 386 | #print(dfTest.head(5)) 387 | 388 | 389 | 390 | 391 | 392 | 393 | 394 | ###starting of table1 395 | Yaw=dfN.iloc[:,-1].values.tolist().copy() 396 | testOutput=dfTestN.iloc[:,-1].values.tolist().copy() 397 | 398 | 399 | #print(len(Yaw)) 400 | #print(dfN.head(5)) 401 | #print(dfTestN.head(5)) 402 | 403 | 404 | 405 | att=list(dfN.drop(dfN.columns[-1],axis=1).columns.values) 406 | print(len(att)) 407 | 408 | root=dcsnTreeRoot(dfN,att.copy(),dfN,0,15) 409 | 410 | #print(type(root)) 411 | 412 | 413 | #print(len(testOutput)) 414 | 415 | predictOutputTrain=classPrint(dfN,root) 416 | 417 | #print(len(predictOutputTrain)) 418 | 419 | predictedOutputTest=classPrint(dfTestN,root) 420 | 421 | #print(len(predictedOutputTest)) 422 | 423 | 424 | TP,FP,TN,FN=perf_measure(Yaw,predictOutputTrain) 425 | print("===========Over train DATA =========") 426 | printMeasure(TP,FP,TN,FN) 427 | 428 | 429 | 430 | print("===========Over test DATA =========") 431 | TP,FP,TN,FN=perf_measure(testOutput,predictedOutputTest) 432 | printMeasure(TP,FP,TN,FN) 433 | 434 | ###end of table 1 435 | 436 | 437 | 438 | 439 | 440 | 441 | #att=list(dfN.drop(dfN.columns[-1],axis=1).columns.values) 442 | #print(att) 443 | 444 | learner, weights=AdaBoost(dcsnTreeRoot,dfN,att.copy(),dfN,0,1,classPrint,dcsnTreeNodeClass,20) 445 | 446 | 447 | predictionArr=learnersAggregation(learner,weights,dfN) 448 | 449 | print("===========AdaBoost Result on Train =========") 450 | 451 | TP,FP,TN,FN=perf_measure(Yaw,predictionArr) 452 | #print(TP,FP,TN,FN) 453 | printMeasure(TP,FP,TN,FN) 454 | 455 | 456 | predictionArr=learnersAggregation(learner,weights,dfTestN) 457 | #my_set = set(predictionArr) 458 | #my_new_list = list(my_set) 459 | #print(my_new_list) 460 | 461 | 462 | print("===========AdaBoost Result on Test =========") 463 | 464 | TP,FP,TN,FN=perf_measure(testOutput,predictionArr) 465 | #print(TP,FP,TN,FN) 466 | printMeasure(TP,FP,TN,FN) 467 | 468 | 469 | 470 | 471 | 472 | 473 | 474 | 475 | -------------------------------------------------------------------------------- /NN/nn.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Sun Dec 2 18:10:04 2018 4 | 5 | @author: Asus 6 | """ 7 | import numpy as np 8 | from sklearn import preprocessing 9 | from sklearn.metrics import accuracy_score 10 | import time 11 | 12 | 13 | def split(s, delim): 14 | words = [] 15 | word = [] 16 | for c in s: 17 | if c not in delim: 18 | word.append(c) 19 | else: 20 | if word: 21 | words.append(''.join(word)) 22 | word = [] 23 | if word: 24 | words.append(''.join(word)) 25 | return words 26 | 27 | def loadfile(filename): 28 | file = open(filename, "r") 29 | rows = list() 30 | for line in file: 31 | vals = split(line, [' ' ,'\t', '\n']) 32 | rows.append(vals) 33 | return rows 34 | 35 | 36 | 37 | def sigmoid(x): 38 | return 1 / (1 + np.exp(-1*x)) 39 | 40 | #def sigmoid_der(x): 41 | # return 1*sigmoid(x)*(1-sigmoid(x)) 42 | 43 | 44 | 45 | def derivative(x): 46 | return x * (1 - x) 47 | 48 | 49 | 50 | train=loadfile('trainNN1.txt') 51 | 52 | test=loadfile('testNN1.txt') 53 | 54 | train=np.array(train) 55 | train= train.astype(np.float) 56 | 57 | train_output_col=train[:,-1].copy() 58 | train_output_col=np.array(train_output_col) 59 | train_output_col=train_output_col.astype(np.int) 60 | train_output_col=np.array(train_output_col).tolist() 61 | 62 | train_Y=train[:,-1].copy() 63 | train_Y=np.array(train_Y) 64 | train_Y = np.eye(np.max(train_Y).astype(int))[train_Y.astype(int)-1] 65 | 66 | min_max_scaler = preprocessing.StandardScaler() 67 | train = min_max_scaler.fit_transform(train) 68 | train[:,-1]=np.ones((train.shape[0])) 69 | 70 | 71 | 72 | 73 | test=np.array(test) 74 | test= test.astype(np.float) 75 | 76 | test_output_col=test[:,-1].copy() 77 | test_output_col=np.array(test_output_col) 78 | test_output_col=test_output_col.astype(np.int) 79 | test_output_col=np.array(test_output_col).tolist() 80 | 81 | 82 | test_Y=test[:,-1].copy() 83 | test_Y=np.array(test_Y) 84 | test_Y= np.eye(np.max(test_Y).astype(int))[test_Y.astype(int)-1] 85 | test = min_max_scaler.fit_transform(test) 86 | test[:,-1]=np.ones((test.shape[0])) 87 | 88 | 89 | 90 | 91 | layerNeurons=[train.shape[1], 3, 4, 5, train_Y.shape[1]] 92 | 93 | weight = [] 94 | for i in range(len(layerNeurons)-1): 95 | w = np.random.uniform(-1,1,(layerNeurons[i],layerNeurons[i+1])) 96 | weight.append(w) 97 | 98 | 99 | 100 | epochRange = 1000 101 | learningRate= 0.01 102 | 103 | 104 | min_err = np.inf 105 | best_w = [] 106 | 107 | 108 | start = time.time() 109 | for epoch in range(epochRange): 110 | 111 | for i in range(train.shape[0]): 112 | v=[] 113 | y=[] 114 | inputNeuron = [train[i]] 115 | v.append(inputNeuron) 116 | y.append(inputNeuron) 117 | 118 | for r in range(len(layerNeurons) -1): 119 | w=weight[r] 120 | matout=np.matmul(inputNeuron,w) 121 | v.append(matout) 122 | matout=sigmoid(matout) 123 | y.append(matout) 124 | inputNeuron=matout 125 | 126 | 127 | lastY=len(y)-1 128 | errs = 0.5 * (y[lastY] - train_Y[i])*(y[lastY] - train_Y[i]) 129 | if errs.sum() < min_err: 130 | min_err = errs.sum() 131 | best_w = weight 132 | 133 | delta =[] 134 | d=(y[lastY] - train_Y[i])* derivative(y[lastY]) 135 | delta.append(d) 136 | #print(delta[0]) 137 | 138 | for r in range(len(layerNeurons)-2,0,-1): 139 | d= (np.matmul(weight[r],delta[len(layerNeurons)-r-2].T)).T 140 | d= d*derivative(y[r]) 141 | delta.append(d) 142 | 143 | 144 | delta.reverse() 145 | 146 | delw=[] 147 | for i in range(len(layerNeurons)-1): 148 | w = np.random.uniform(0,0,(layerNeurons[i],layerNeurons[i+1])) 149 | delw.append(w) 150 | 151 | for r in range(len(delw)-1,0,-1): 152 | delw[r]=np.matmul(np.array(y[r]).T,delta[r]) 153 | 154 | 155 | for i in range(len(weight)): 156 | weight[i] -= learningRate * delw[i] 157 | 158 | 159 | 160 | print("Training finished, time needed: ", time.time() - start) 161 | 162 | weight = best_w 163 | output = [] 164 | for i in range(train.shape[0]): 165 | inputNeuron = [train[i]] 166 | for r in range(len(layerNeurons)-1): 167 | w=weight[r] 168 | matout=np.matmul(inputNeuron,w) 169 | matout=sigmoid(matout) 170 | inputNeuron=matout 171 | 172 | output.append(np.argmax(inputNeuron)+1) 173 | 174 | #print(output) 175 | 176 | 177 | print("Accuracy on Train Data set: "+str(accuracy_score(train_output_col, output))) 178 | 179 | 180 | output = [] 181 | for i in range(test.shape[0]): 182 | inputNeuron = [test[i]] 183 | for r in range(len(layerNeurons)-1): 184 | w=weight[r] 185 | matout=np.matmul(inputNeuron,w) 186 | matout=sigmoid(matout) 187 | inputNeuron=matout 188 | 189 | output.append(np.argmax(inputNeuron)+1) 190 | 191 | print("Accuracy is on Test Data Set: "+str(accuracy_score(test_output_col, output))) 192 | 193 | 194 | 195 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | -------------------------------------------------------------------------------- /PCA_EMClusteringGMM/ML 2.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Yeaseen/ML_Pattern/4db2fbb97eba5e485879a0c877df190ee68c7132/PCA_EMClusteringGMM/ML 2.pdf -------------------------------------------------------------------------------- /PCA_EMClusteringGMM/PCA.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Spyder Editor 4 | 5 | This is a temporary script file. 6 | """ 7 | import itertools 8 | from numpy.linalg import eig 9 | from scipy.stats import multivariate_normal 10 | import numpy as np 11 | from scipy import linalg 12 | import matplotlib.pyplot as plt 13 | import matplotlib as mpl 14 | 15 | 16 | def split(s, delim): 17 | words = [] 18 | word = [] 19 | for c in s: 20 | if c not in delim: 21 | word.append(c) 22 | else: 23 | if word: 24 | words.append(''.join(word)) 25 | word = [] 26 | if word: 27 | words.append(''.join(word)) 28 | return words 29 | 30 | def loadfile(filename): 31 | file = open(filename, "r") 32 | rows = list() 33 | for line in file: 34 | vals = split(line, [' ' ,'\t', '\n']) 35 | rows.append(vals) 36 | return rows 37 | 38 | 39 | def plot_results(X, Y_, means, covariances, index, title): 40 | splot = plt.subplot(2, 1, 1 + index) 41 | for i, (mean, covar, color) in enumerate(zip( 42 | means, covariances, color_iter)): 43 | v, w = linalg.eigh(covar) 44 | v = 2. * np.sqrt(2.) * np.sqrt(v) 45 | u = w[0] / linalg.norm(w[0]) 46 | # as the DP will not use every component it has access to 47 | # unless it needs it, we shouldn't plot the redundant 48 | # components. 49 | if not np.any(Y_ == i): 50 | continue 51 | plt.scatter(X[Y_ == i, 0], X[Y_ == i, 1], s=7, color=color, marker= '*') 52 | 53 | # Plot an ellipse to show the Gaussian component 54 | angle = np.arctan(u[1] / u[0]) 55 | angle = 180. * angle / np.pi # convert to degrees 56 | ell = mpl.patches.Ellipse(mean, v[0], v[1], 180. + angle, color=color) 57 | ell.set_clip_box(splot.bbox) 58 | ell.set_alpha(0.5) 59 | splot.add_artist(ell) 60 | 61 | plt.xlim(-10., 10.) 62 | plt.ylim(-10., 10.) 63 | #plt.xticks(()) 64 | #plt.yticks(()) 65 | plt.xlabel('PC1') 66 | plt.ylabel('PC2') 67 | plt.title(title) 68 | plt.show() 69 | 70 | 71 | 72 | train=loadfile('onlineDataset.txt') 73 | train=np.array(train) 74 | train= train.astype(np.float) 75 | 76 | 77 | M = np.mean(train.T, axis=1) 78 | C= M -train 79 | V=np.cov(C.T) 80 | 81 | eigenValues, eigenVectors =eig(V) 82 | top_vectors=2 83 | idx = eigenValues.argsort()[-top_vectors:][::-1] 84 | eigenValues = eigenValues[idx] 85 | eigenVectors = eigenVectors[:,idx] 86 | 87 | P = eigenVectors.T.dot(C.T) 88 | PrincipalComponents=P.T 89 | 90 | plt.scatter(PrincipalComponents[:,0], PrincipalComponents[:,1], color=['red'], marker= '*', s=7) 91 | plt.xlabel('PC1') 92 | plt.ylabel('PC2') 93 | plt.xlim(-10., 10.) 94 | plt.ylim(-10., 10.) 95 | plt.title('Scatter plot DATASET after PCA') 96 | plt.show() 97 | 98 | 99 | 100 | 101 | ## ALL INITIALISATIONS 102 | 103 | numOfGauss=4 ## it is determined from the above plot 104 | 105 | color_iter = itertools.cycle(['red', 'green', 'blue','yellow']) 106 | 107 | 108 | #initial mean, cov, w 109 | mu = np.random.uniform(min(PrincipalComponents[:,0]),max(PrincipalComponents[:,0]),\ 110 | size=(numOfGauss,len(PrincipalComponents[0]))) 111 | 112 | cov = np.zeros((numOfGauss,len(PrincipalComponents[0]),len(PrincipalComponents[0]))) 113 | ## it must be remembered that any of them shouldnt be a singular matrix, so we fill main diagonal 114 | ## line witha a value 115 | for dim in range(len(cov)): 116 | np.fill_diagonal(cov[dim],5.00) 117 | 118 | w=[1/numOfGauss]*numOfGauss 119 | 120 | 121 | N=PrincipalComponents.shape[0] 122 | epsilon=1e-6 123 | old_log_likelihood = 0 124 | probs =np.zeros((N, numOfGauss), np.float) 125 | iterationNo=0 126 | while(True): 127 | 128 | #GAUSSIAN MULTIVARIATE FINDING 129 | norm_columns=[] 130 | for i in np.arange(numOfGauss): 131 | ans=np.array(multivariate_normal.pdf(PrincipalComponents, mu[i], cov[i])) 132 | norm_columns.append(ans) 133 | 134 | norm_densities=np.column_stack(norm_columns) 135 | 136 | ## LOGLIKELIHOOD calculation 137 | innerSumVector=np.log(np.array([np.dot(np.array(w).T,norm_densities[i]) for i in np.arange(N)])) 138 | 139 | log_likelihood = np.dot(innerSumVector.T, np.ones(N)) 140 | 141 | ## END CHECK 142 | if(np.absolute(log_likelihood - old_log_likelihood) < epsilon): 143 | break 144 | ## E STEP 145 | counter=0 146 | for i in norm_densities: 147 | mul=i*w 148 | sumrow=np.sum(mul) 149 | mul=mul/sumrow 150 | probs[counter]=mul 151 | counter+=1 152 | 153 | ## M step 154 | for i in range(numOfGauss): 155 | probabilty=(probs.T)[i] 156 | denominator= np.dot(probabilty.T, np.ones(N)) 157 | mu[i] = np.dot(probabilty.T,PrincipalComponents) / denominator 158 | diff=PrincipalComponents - np.tile(mu[i], (N, 1)) 159 | cov[i]=np.dot(np.multiply(probabilty.reshape(N,1),diff).T,diff) / denominator 160 | w[i]= denominator / N 161 | 162 | old_log_likelihood=log_likelihood 163 | 164 | ## MAXEXPECTAION MODEL FINDS FOR EACH ROW 165 | MaxExpectations=[] 166 | for i in probs: 167 | MaxExpectations.append(np.argmax(i)) 168 | MaxExpectations=np.array(MaxExpectations) 169 | 170 | ##Plotting at each iteration 171 | plot_results(PrincipalComponents, MaxExpectations, mu, cov, 0, 172 | 'Gaussian Mixture Model at iteration ' + str(iterationNo)) 173 | iterationNo+=1 174 | 175 | print(np.array(w)*N) 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | -------------------------------------------------------------------------------- /Pattern/.idea/Pattern.iml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 11 | -------------------------------------------------------------------------------- /Pattern/.idea/inspectionProfiles/Project_Default.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 6 | -------------------------------------------------------------------------------- /Pattern/.idea/misc.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | -------------------------------------------------------------------------------- /Pattern/.idea/modules.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /Pattern/.idea/workspace.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 37 | 38 | 39 | 44 | 45 | 46 | 47 | 48 | true 49 | DEFINITION_ORDER 50 | 51 | 52 | 53 | 54 | 55 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 95 | 96 | 97 | 98 | 99 | 116 | 117 | 118 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 1541999757966 152 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | 173 | 174 | 175 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 191 | 192 | 193 | 194 | 196 | 197 | 199 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | 207 | 208 | 209 | 210 | 211 | 212 | 213 | 214 | 215 | -------------------------------------------------------------------------------- /Perceptron/BinaryPerceptron/Basic.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Sun Nov 18 21:22:40 2018 5 | 6 | @author: yeaseen 7 | """ 8 | import numpy as np 9 | 10 | 11 | def split(s, delim=[" ", '\n']): 12 | words = [] 13 | word = [] 14 | for c in s: 15 | if c not in delim: 16 | word.append(c) 17 | else: 18 | if word: 19 | words.append(''.join(word)) 20 | word = [] 21 | if word: 22 | words.append(''.join(word)) 23 | return words 24 | 25 | def loadfile(filename,checkTrain): 26 | file = open(filename, "r") 27 | first = checkTrain 28 | rows = list() 29 | for line in file: 30 | if(first) == True: 31 | dims = split(line) 32 | first = False 33 | else: 34 | vals = split(line, [' ' ,'\t', '\n']) 35 | #print(vals) 36 | rows.append(vals) 37 | 38 | if(checkTrain): 39 | return dims, rows 40 | else: 41 | return rows 42 | 43 | 44 | 45 | dims, rows = loadfile('Train1.txt',True) 46 | 47 | test = loadfile('Test1.txt',False) 48 | 49 | 50 | 51 | dims=np.array(dims) 52 | dims = dims.astype(np.float) 53 | rows=np.array(rows) 54 | mat = rows.astype(np.float) 55 | 56 | test=np.array(test) 57 | test= test.astype(np.float) 58 | 59 | 60 | 61 | 62 | att=int(dims[0]) 63 | #print(att) 64 | clss=int(dims[1]) 65 | #print(clss) 66 | 67 | 68 | wactive =np.random.random_sample(((att+1),)) 69 | 70 | #wactive=[0,0,0,1] 71 | #print(w) 72 | 73 | 74 | matrix=np.array(mat) 75 | 76 | Y=matrix[:,-1].copy() 77 | Y=np.array(Y) 78 | Y=Y.astype(np.int) 79 | Y=np.array(Y).tolist() 80 | #print(Y) 81 | 82 | 83 | matrix[:,-1]=np.ones((matrix.shape[0])) 84 | 85 | targetOutput=test[:,-1].copy() 86 | targetOutput=np.array(targetOutput) 87 | targetOutput=targetOutput.astype(np.int) 88 | targetOutput=np.array(targetOutput).tolist() 89 | 90 | 91 | 92 | 93 | test[:,-1] = np.ones((test.shape[0])) 94 | 95 | #print(test) 96 | 97 | discriminant= True 98 | while(discriminant): 99 | running_weights = wactive.copy() 100 | counter=0 101 | for i in matrix: 102 | #print(Y[counter]) 103 | product =np.dot(i,wactive) 104 | 105 | if(product < 0): 106 | #print('noooo') 107 | classed=1 108 | else: 109 | classed=2 110 | if(classed != Y[counter]): 111 | if(classed == 1): 112 | #print('sub') 113 | running_weights=np.add(running_weights,i) 114 | else: 115 | #print('add') 116 | running_weights=np.subtract(running_weights,i) 117 | counter+=1 118 | if(np.array_equal(running_weights,wactive)): 119 | discriminant = False 120 | wactive = running_weights 121 | 122 | 123 | 124 | print(wactive) 125 | 126 | 127 | predictedOutput=[] 128 | 129 | for eachrow in test: 130 | val=0 131 | got=np.dot(eachrow,wactive) 132 | if(got< 0): 133 | predictedOutput.append(1) 134 | else: 135 | predictedOutput.append(2) 136 | 137 | 138 | 139 | 140 | from sklearn.metrics import classification_report 141 | target_names = [] 142 | for i in range(clss): 143 | target_names.append('class'+str(i)) 144 | 145 | 146 | 147 | print(classification_report(targetOutput, predictedOutput, target_names=target_names)) 148 | 149 | from sklearn.metrics import accuracy_score 150 | print("Accuracy is: "+str(accuracy_score(targetOutput, predictedOutput))) 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | -------------------------------------------------------------------------------- /Perceptron/BinaryPerceptron/Pocket.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Mon Nov 19 00:44:32 2018 5 | 6 | @author: yeaseen 7 | """ 8 | 9 | import numpy as np 10 | 11 | 12 | def split(s, delim=[" ", '\n']): 13 | words = [] 14 | word = [] 15 | for c in s: 16 | if c not in delim: 17 | word.append(c) 18 | else: 19 | if word: 20 | words.append(''.join(word)) 21 | word = [] 22 | if word: 23 | words.append(''.join(word)) 24 | return words 25 | 26 | def loadfile(filename,checkTrain): 27 | file = open(filename, "r") 28 | first = checkTrain 29 | rows = list() 30 | for line in file: 31 | if(first) == True: 32 | dims = split(line) 33 | first = False 34 | else: 35 | vals = split(line, [' ' ,'\t', '\n']) 36 | #print(vals) 37 | rows.append(vals) 38 | 39 | if(checkTrain): 40 | return dims, rows 41 | else: 42 | return rows 43 | 44 | 45 | 46 | dims, rows = loadfile('Train.txt',True) 47 | 48 | test = loadfile('Test.txt',False) 49 | 50 | 51 | 52 | dims=np.array(dims) 53 | dims = dims.astype(np.float) 54 | rows=np.array(rows) 55 | mat = rows.astype(np.float) 56 | 57 | test=np.array(test) 58 | test= test.astype(np.float) 59 | 60 | 61 | 62 | 63 | att=int(dims[0]) 64 | #print(att) 65 | clss=int(dims[1]) 66 | #print(clss) 67 | 68 | 69 | wactive =np.random.random_sample(((att+1),)) 70 | 71 | #wactive=[0,0,1] 72 | #print(w) 73 | 74 | 75 | matrix=np.array(mat) 76 | 77 | Y=matrix[:,-1].copy() 78 | Y=np.array(Y) 79 | Y=Y.astype(np.int) 80 | Y=np.array(Y).tolist() 81 | #print(Y) 82 | 83 | 84 | matrix[:,-1]=np.ones((matrix.shape[0])) 85 | 86 | targetOutput=test[:,-1].copy() 87 | targetOutput=np.array(targetOutput) 88 | targetOutput=targetOutput.astype(np.int) 89 | targetOutput=np.array(targetOutput).tolist() 90 | 91 | 92 | print(len(targetOutput)) 93 | 94 | 95 | test[:,-1] = np.ones((test.shape[0])) 96 | 97 | 98 | 99 | def checkWithNewWeight(data,weight): 100 | rowCount=0 101 | accurate=0 102 | for row in data: 103 | prod=np.dot(row,weight) 104 | if(prod < 0): 105 | classed=1 106 | else: 107 | classed=2 108 | if(classed == Y[rowCount]): 109 | accurate+=1 110 | rowCount+=1 111 | return accurate 112 | 113 | 114 | 115 | counter=0 116 | maxCounter=matrix.shape[0] 117 | 118 | discriminant= True 119 | bestH=0 120 | pocketStart=0 121 | pocketMax=1000 122 | while(discriminant): 123 | 124 | countRow=0 125 | for i in matrix: 126 | product=np.dot(i,wactive) 127 | counter+=1 128 | if(product < 0): 129 | classed=1 130 | else: 131 | classed=2 132 | if(classed != Y[countRow]): 133 | pocketStart+=1 134 | counter =0 135 | if(classed == 1): 136 | #print('sub') 137 | wactive=np.add(wactive,i) 138 | else: 139 | #print('add') 140 | wactive=np.subtract(wactive,i) 141 | getH=checkWithNewWeight(matrix,wactive) 142 | if(getH > bestH): 143 | running_weights=wactive 144 | bestH=getH 145 | countRow+=1 146 | if((pocketStart==pocketMax) or (counter ==maxCounter) ): 147 | discriminant=False 148 | if((pocketStart==pocketMax) or (counter == maxCounter) ): 149 | discriminant=False 150 | 151 | 152 | print(wactive) 153 | 154 | 155 | predictedOutput=[] 156 | 157 | for eachrow in test: 158 | val=0 159 | got=np.dot(eachrow,wactive) 160 | if(got< 0): 161 | predictedOutput.append(1) 162 | else: 163 | predictedOutput.append(2) 164 | 165 | print(predictedOutput) 166 | 167 | 168 | 169 | from sklearn.metrics import classification_report 170 | target_names = [] 171 | for i in range(clss): 172 | target_names.append('class'+str(i)) 173 | 174 | 175 | 176 | print(classification_report(targetOutput, predictedOutput, target_names=target_names)) 177 | 178 | from sklearn.metrics import accuracy_score 179 | print("Accuracy is: "+str(accuracy_score(targetOutput, predictedOutput))) 180 | 181 | 182 | 183 | 184 | 185 | -------------------------------------------------------------------------------- /Perceptron/BinaryPerceptron/Reward&Punishment.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Sun Nov 18 23:42:40 2018 5 | 6 | @author: yeaseen 7 | """ 8 | import numpy as np 9 | 10 | 11 | def split(s, delim=[" ", '\n']): 12 | words = [] 13 | word = [] 14 | for c in s: 15 | if c not in delim: 16 | word.append(c) 17 | else: 18 | if word: 19 | words.append(''.join(word)) 20 | word = [] 21 | if word: 22 | words.append(''.join(word)) 23 | return words 24 | 25 | def loadfile(filename,checkTrain): 26 | file = open(filename, "r") 27 | first = checkTrain 28 | rows = list() 29 | for line in file: 30 | if(first) == True: 31 | dims = split(line) 32 | first = False 33 | else: 34 | vals = split(line, [' ' ,'\t', '\n']) 35 | #print(vals) 36 | rows.append(vals) 37 | 38 | if(checkTrain): 39 | return dims, rows 40 | else: 41 | return rows 42 | 43 | 44 | 45 | dims, rows = loadfile('Train.txt',True) 46 | 47 | test = loadfile('Test.txt',False) 48 | 49 | 50 | 51 | dims=np.array(dims) 52 | dims = dims.astype(np.float) 53 | rows=np.array(rows) 54 | mat = rows.astype(np.float) 55 | 56 | test=np.array(test) 57 | test= test.astype(np.float) 58 | 59 | 60 | 61 | 62 | att=int(dims[0]) 63 | #print(att) 64 | clss=int(dims[1]) 65 | #print(clss) 66 | 67 | 68 | wactive =np.random.random_sample(((att+1),)) 69 | 70 | #wactive=[0,0,1] 71 | #print(w) 72 | 73 | 74 | matrix=np.array(mat) 75 | 76 | Y=matrix[:,-1].copy() 77 | Y=np.array(Y) 78 | Y=Y.astype(np.int) 79 | Y=np.array(Y).tolist() 80 | #print(Y) 81 | 82 | 83 | matrix[:,-1]=np.ones((matrix.shape[0])) 84 | 85 | targetOutput=test[:,-1].copy() 86 | targetOutput=np.array(targetOutput) 87 | targetOutput=targetOutput.astype(np.int) 88 | targetOutput=np.array(targetOutput).tolist() 89 | 90 | 91 | print(len(targetOutput)) 92 | 93 | 94 | test[:,-1] = np.ones((test.shape[0])) 95 | 96 | 97 | counter=0 98 | maxCounter=matrix.shape[0] 99 | #print(maxCounter) 100 | 101 | discriminant= True 102 | while(discriminant): 103 | countRow=0 104 | for i in matrix: 105 | product=np.dot(i,wactive) 106 | #print(product) 107 | counter+=1 108 | if(product < 0): 109 | classed=1 110 | else: 111 | classed=2 112 | 113 | if(classed != Y[countRow]): 114 | if(classed == 1): 115 | #print('sub') 116 | wactive=np.add(wactive,i) 117 | counter=0 118 | else: 119 | wactive=np.subtract(wactive,i) 120 | counter=0 121 | countRow+=1 122 | if(counter == maxCounter): 123 | discriminant = False 124 | 125 | #print(wactive) 126 | 127 | 128 | predictedOutput=[] 129 | 130 | for eachrow in test: 131 | val=0 132 | got=np.dot(eachrow,wactive) 133 | if(got< 0): 134 | predictedOutput.append(1) 135 | else: 136 | predictedOutput.append(2) 137 | 138 | #print(len(predictedOutput)) 139 | #print(predictedOutput) 140 | 141 | 142 | from sklearn.metrics import classification_report 143 | target_names = [] 144 | for i in range(clss): 145 | target_names.append('class'+str(i)) 146 | 147 | 148 | 149 | print(classification_report(targetOutput, predictedOutput, target_names=target_names)) 150 | 151 | from sklearn.metrics import accuracy_score 152 | print("Accuracy is: "+str(accuracy_score(targetOutput, predictedOutput))) 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | -------------------------------------------------------------------------------- /Perceptron/BinaryPerceptron/Test.txt: -------------------------------------------------------------------------------- 1 | 10.7496 9.3209 8.2053 1 2 | 9.9154 8.3233 7.2704 1 3 | 9.5856 8.3885 7.8567 1 4 | 10.0442 9.9109 6.3326 1 5 | 8.7696 7.9728 5.4523 1 6 | 9.8077 9.1897 7.2788 1 7 | 8.3563 8.0633 6.4292 1 8 | 11.3246 8.4540 6.4637 1 9 | 9.5811 9.7408 6.8965 1 10 | 10.6617 9.8422 5.7458 1 11 | 11.7670 10.0406 8.2927 1 12 | 9.2242 8.9054 6.9867 1 13 | 9.6976 8.8699 7.5056 1 14 | 9.4685 8.8554 7.9053 1 15 | 9.7187 9.5133 6.3626 1 16 | 10.8723 9.2491 6.8805 1 17 | 8.2823 7.9436 6.2555 1 18 | 11.0003 8.6869 7.9054 1 19 | 10.6458 10.2867 8.3352 1 20 | 10.1684 9.1351 7.4302 1 21 | 10.5657 7.9420 6.9983 1 22 | 9.6799 8.5231 7.9082 1 23 | 11.2159 9.0892 7.0128 1 24 | 10.1067 9.4304 8.4042 1 25 | 9.4741 9.9736 5.8608 1 26 | 9.6669 9.9259 6.7895 1 27 | 11.2978 9.3248 7.0669 1 28 | 10.0609 8.8317 4.8644 1 29 | 9.7942 9.3057 6.2757 1 30 | 10.4391 9.6415 7.0820 1 31 | 9.9227 9.2828 6.5822 1 32 | 10.3362 9.1440 7.8288 1 33 | 9.8612 8.4255 6.4015 1 34 | 10.4651 8.5358 7.6798 1 35 | 11.2254 9.3084 7.0281 1 36 | 9.6275 8.3777 5.7954 1 37 | 10.5991 8.3389 8.1433 1 38 | 9.6252 9.0393 7.2551 1 39 | 9.6875 7.7061 5.8133 1 40 | 9.9448 9.0206 7.2244 1 41 | 9.7555 9.6354 6.9679 1 42 | 9.4738 8.8236 6.4481 1 43 | 8.7259 8.0653 5.9340 1 44 | 9.0374 8.2800 7.0893 1 45 | 10.5013 9.1165 8.1145 1 46 | 11.3381 9.1392 7.1045 1 47 | 8.8808 7.9195 7.8346 1 48 | 9.5629 9.4856 7.2201 1 49 | 10.7089 9.9517 8.2742 1 50 | 9.9142 8.8241 6.4651 1 51 | 11.0792 9.8872 7.9406 1 52 | 9.3191 8.6497 5.0807 1 53 | 9.6302 10.0221 9.0362 1 54 | 8.4015 7.7601 6.1689 1 55 | 9.0907 7.9145 7.2521 1 56 | 11.0728 9.9912 8.6062 1 57 | 10.2721 9.6519 7.8759 1 58 | 9.5712 8.9366 5.8259 1 59 | 10.2762 9.7077 6.9175 1 60 | 9.4316 10.1369 6.6926 1 61 | 10.4818 8.7339 7.4721 1 62 | 8.3660 7.8901 6.5090 1 63 | 10.9893 11.0249 7.6186 1 64 | 10.1229 8.6427 8.8048 1 65 | 10.5275 7.3832 8.7825 1 66 | 9.4120 9.1836 8.9143 1 67 | 8.8757 7.8621 7.5715 1 68 | 8.8438 8.4579 5.7583 1 69 | 8.2532 7.9388 4.7198 1 70 | 10.1229 8.8196 6.3134 1 71 | 8.6431 9.2262 7.2631 1 72 | 11.0683 9.7726 7.3603 1 73 | 9.4771 8.4230 6.6976 1 74 | 11.1353 8.4275 7.1301 1 75 | 9.9507 10.5714 8.3463 1 76 | 9.0736 8.9618 7.6584 1 77 | 10.0982 7.9032 6.7917 1 78 | 11.4285 9.8467 8.0225 1 79 | 9.6425 8.9651 8.3227 1 80 | 9.7184 8.7312 6.6780 1 81 | 10.3583 9.5181 4.5244 1 82 | 10.4453 8.6843 8.0855 1 83 | 9.6115 8.7506 4.0352 1 84 | 10.5446 7.9432 7.2141 1 85 | 9.3538 8.9172 6.7143 1 86 | 9.8656 8.8498 6.6526 1 87 | 11.0322 7.3265 6.4970 1 88 | 10.8494 7.3252 7.6209 1 89 | 11.3436 9.9368 8.7048 1 90 | 10.5592 8.5825 7.6395 1 91 | 10.7046 9.0759 7.6274 1 92 | 9.4037 7.9561 6.5548 1 93 | 10.3769 9.0183 8.0351 1 94 | 10.5254 8.3985 7.6241 1 95 | 9.8589 10.0712 5.1050 1 96 | 11.4078 9.9535 7.8718 1 97 | 11.2654 7.7387 7.6893 1 98 | 9.6725 10.0025 6.5671 1 99 | 8.9853 8.5895 6.5827 1 100 | 8.5839 8.8262 5.9282 2 101 | 17.5285 1.4180 12.2175 1 102 | 16.4128 -0.0407 11.9689 2 103 | 16.9632 1.1576 13.5849 2 104 | 15.8849 1.0354 12.2533 2 105 | 16.1697 1.3018 12.3130 2 106 | 15.5909 1.5734 12.3182 2 107 | 15.4403 -0.0415 10.1679 2 108 | 14.2622 -0.4111 13.1860 2 109 | 14.7134 2.1461 11.2449 2 110 | 16.2419 3.1025 14.2964 2 111 | 15.6727 1.5943 11.9458 2 112 | 15.1118 0.3178 12.2374 2 113 | 17.2321 1.7972 12.0216 2 114 | 16.7825 1.3698 13.7772 2 115 | 15.6128 -0.6657 11.7872 2 116 | 16.8771 1.3038 12.3235 2 117 | 14.9679 0.6117 12.1597 2 118 | 17.8482 1.8721 13.7013 2 119 | 16.0070 1.0712 11.0600 2 120 | 16.6740 -1.4397 10.6692 2 121 | 16.4870 0.2892 12.7287 2 122 | 15.3756 -0.2828 11.9268 2 123 | 16.9785 0.6853 10.6489 2 124 | 16.5681 -0.2331 12.9144 2 125 | 15.3176 0.6043 11.1883 2 126 | 16.2156 0.3677 12.4405 2 127 | 14.9139 1.1300 12.7612 2 128 | 15.7218 0.3950 12.7093 2 129 | 14.6119 1.0417 10.9676 2 130 | 15.4401 -0.2049 11.6526 2 131 | 15.8061 -0.4928 12.6179 2 132 | 16.9766 0.5331 13.3870 2 133 | 15.8456 1.4158 12.5492 2 134 | 15.2686 1.6535 13.2734 2 135 | 15.9948 0.5385 12.8065 2 136 | 14.9503 1.8699 12.2682 2 137 | 15.4160 0.7405 11.1267 2 138 | 17.4192 1.4996 12.7690 2 139 | 16.1175 0.8153 12.7136 2 140 | 15.2741 0.3736 12.6791 2 141 | 15.9075 1.1702 11.9828 2 142 | 15.3974 1.2581 11.7523 2 143 | 15.3631 0.6612 12.7981 2 144 | 15.8013 -1.0799 12.8994 2 145 | 15.5932 2.5039 12.1062 2 146 | 16.1271 0.3167 11.3173 2 147 | 17.4273 2.8554 12.6826 2 148 | 15.7319 1.6870 12.1909 2 149 | 15.6830 -1.1028 12.2377 2 150 | 16.7876 1.2223 9.2320 2 151 | 16.7412 1.4016 13.6490 2 152 | 16.5391 1.7503 10.6173 2 153 | 16.0358 1.4555 11.2579 2 154 | 14.9832 1.1421 10.2485 2 155 | 16.6160 1.3955 11.5014 2 156 | 15.7546 1.2370 10.7228 2 157 | 16.1481 2.3019 12.5719 2 158 | 15.5728 1.2641 13.2301 2 159 | 16.4078 0.9841 11.2996 2 160 | 15.7553 0.9370 11.2900 2 161 | 15.4845 1.3268 10.7419 2 162 | 14.7214 0.6046 12.2015 2 163 | 15.0460 0.1300 10.4994 2 164 | 17.5946 0.6751 12.3626 2 165 | 17.9025 1.7707 11.8716 2 166 | 15.0771 1.4698 11.4130 2 167 | 15.8988 1.9530 12.3669 2 168 | 16.0568 1.6940 12.7293 2 169 | 15.6035 0.3362 10.5542 2 170 | 17.6563 2.0920 11.6360 2 171 | 16.5054 0.3906 11.3645 2 172 | 15.4404 0.7454 11.0945 2 173 | 14.8745 -0.3407 12.0165 2 174 | 16.0859 2.0249 11.5900 2 175 | 16.0599 1.3560 11.2541 2 176 | 14.7614 1.3415 9.4460 2 177 | 16.6093 1.0796 11.8355 2 178 | 16.0511 1.2431 12.4508 2 179 | 17.1943 0.5318 13.1299 2 180 | 15.7902 -0.0414 12.4695 2 181 | 16.4649 0.3576 11.4622 2 182 | 15.4115 2.1869 11.2169 2 183 | 16.0127 1.2792 13.2640 2 184 | 17.0948 1.0749 9.9216 2 185 | 17.2735 0.9872 13.5025 2 186 | 16.0781 0.2255 12.0559 2 187 | 18.2975 2.6378 11.9994 2 188 | 15.9226 1.7147 12.5535 2 189 | 17.1615 1.6739 11.6896 2 190 | 16.2235 1.3994 13.2076 2 191 | 15.7003 1.8888 13.6422 2 192 | 17.6646 2.3303 14.2385 2 193 | 16.2591 2.1734 12.0790 2 194 | 15.7570 0.1781 11.0215 2 195 | 15.8355 0.4728 11.0913 2 196 | 16.2953 1.9901 11.2354 2 197 | 15.9943 1.8201 11.4980 2 198 | 16.0581 1.8387 10.0752 2 199 | 15.8480 1.0582 12.3381 2 200 | 1.8164 4.9567 1.9919 2 -------------------------------------------------------------------------------- /Perceptron/BinaryPerceptron/Test1.txt: -------------------------------------------------------------------------------- 1 | 2.4661 3.9251 1 2 | 1.7149 3.3708 1 3 | 1.2507 4.1563 1 4 | 1.9748 5.3451 1 5 | 2.2765 4.1448 1 6 | 2.0417 3.2886 1 7 | 2.7888 4.1234 1 8 | 1.8346 3.2821 1 9 | 2.3976 4.0743 1 10 | 1.6076 3.1535 1 11 | 1.3684 4.3596 1 12 | 2.3333 4.5709 1 13 | 1.3037 4.7760 1 14 | 1.3497 4.6918 1 15 | 1.6975 3.6210 1 16 | 1.2557 4.2213 1 17 | 2.2793 4.4555 1 18 | 1.8613 3.4630 1 19 | 1.3532 4.1009 1 20 | 1.5558 4.3814 1 21 | 1.5067 3.3559 1 22 | 1.9642 3.5235 1 23 | 0.7927 4.3891 1 24 | 1.6528 3.9968 1 25 | 1.3043 4.2622 1 26 | 2.1648 4.6821 1 27 | 2.2993 4.2410 1 28 | 2.0736 3.6065 1 29 | 1.9493 4.3760 1 30 | 0.6825 3.9166 1 31 | 2.0140 3.5919 1 32 | 1.5618 5.0470 1 33 | 1.8673 4.0401 1 34 | 1.8362 3.5314 1 35 | 1.4209 4.3179 1 36 | 2.2900 4.8410 1 37 | 2.1199 4.2968 1 38 | 1.8246 4.3951 1 39 | 2.4460 4.0526 1 40 | 2.7891 3.9207 1 41 | 1.4459 4.4355 1 42 | 1.9870 3.9026 1 43 | 1.4447 4.0377 1 44 | 2.3754 3.7367 1 45 | 2.2501 3.6573 1 46 | 1.7414 3.8658 1 47 | 1.7204 3.4058 1 48 | 1.6233 4.1243 1 49 | 8.4629 10.0512 1 50 | 8.8757 10.9795 1 51 | 1.8762 1.4066 2 52 | 1.7446 1.7209 2 53 | 8.1246 10.3361 2 54 | 8.1846 10.0693 2 55 | 8.0896 9.5702 2 56 | 7.9814 9.6239 2 57 | 7.1983 10.6148 2 58 | 8.1697 10.5754 2 59 | 7.9344 9.6960 2 60 | 8.2426 10.4031 2 61 | 8.2994 10.1086 2 62 | 7.9570 9.8133 2 63 | 8.1626 9.5840 2 64 | 7.8324 10.1434 2 65 | 7.8388 9.0906 2 66 | 7.8088 9.2135 2 67 | 7.5233 11.0078 2 68 | 8.1168 9.9640 2 69 | 8.6176 11.3145 2 70 | 7.7107 9.8783 2 71 | 7.7492 10.0866 2 72 | 8.3614 10.4616 2 73 | 8.0197 9.9107 2 74 | 8.7706 9.7391 2 75 | 7.1495 10.7160 2 76 | 7.4831 9.5649 2 77 | 7.6181 10.4038 2 78 | 9.0882 9.7447 2 79 | 8.2158 10.3718 2 80 | 7.7781 10.4239 2 81 | 8.0150 9.5850 2 82 | 7.8422 10.2665 2 83 | 8.4889 10.5164 2 84 | 8.0091 9.4740 2 85 | 8.4090 10.1811 2 86 | 8.3512 9.9816 2 87 | 7.8844 9.3862 2 88 | 7.9432 9.8625 2 89 | 8.0640 9.9198 2 90 | 7.6003 9.4582 2 91 | 7.8807 9.0229 2 92 | 7.9553 9.5453 2 93 | 7.4884 9.9972 2 94 | 8.4688 9.1383 2 95 | 7.4341 10.6315 2 96 | 7.6446 9.6998 2 97 | 7.4152 8.9680 2 98 | 8.5327 10.0555 2 99 | 7.6598 10.7438 2 100 | 7.1371 10.0265 2 -------------------------------------------------------------------------------- /Perceptron/BinaryPerceptron/Train.txt: -------------------------------------------------------------------------------- 1 | 3 2 300 2 | 11.0306 9.0152 8.0199 1 3 | 11.4008 8.7768 6.7652 1 4 | 11.2489 9.5744 8.0812 1 5 | 9.3157 7.4360 5.6128 1 6 | 10.3996 7.9322 7.8283 1 7 | 10.8224 10.7516 5.4981 1 8 | 9.0305 8.6674 6.9254 1 9 | 10.1821 11.0287 6.6184 1 10 | 10.6886 9.0148 6.4989 1 11 | 8.8373 8.7963 6.7583 1 12 | 9.1398 9.4470 6.4768 1 13 | 10.7024 9.0675 8.1215 1 14 | 9.9943 8.6061 6.8525 1 15 | 10.4734 10.2125 8.2261 1 16 | 11.2314 8.6995 6.9041 1 17 | 10.4351 9.7321 7.2843 1 18 | 9.2896 8.4121 7.0801 1 19 | 10.6788 9.7539 8.3300 1 20 | 9.8494 9.5942 7.5975 1 21 | 9.2633 8.1645 6.4084 1 22 | 11.8901 9.9436 6.4256 1 23 | 10.0723 9.7973 6.4827 1 24 | 9.1540 7.9661 6.9238 1 25 | 10.5738 9.4370 6.3062 1 26 | 11.5182 9.4084 7.7851 1 27 | 10.5358 8.2310 8.0972 1 28 | 10.7132 8.9980 7.4457 1 29 | 10.0950 8.9067 7.1707 1 30 | 9.8569 8.1443 7.2301 1 31 | 10.7861 7.7566 8.2188 1 32 | 9.8242 8.2658 8.1978 1 33 | 10.0681 9.0154 6.0752 1 34 | 9.5247 7.5678 6.8740 1 35 | 9.3813 9.7719 7.4452 1 36 | 9.7578 8.4790 6.6197 1 37 | 8.9274 7.1403 8.7561 1 38 | 10.2244 9.1478 7.8777 1 39 | 10.0925 10.1449 6.3775 1 40 | 9.9630 9.0292 7.7425 1 41 | 7.9713 8.5391 6.9682 1 42 | 9.5390 8.8474 7.5424 1 43 | 10.2250 9.1951 5.5091 1 44 | 10.3332 9.7589 8.6131 1 45 | 10.1617 9.1835 5.5835 1 46 | 9.9663 9.5582 8.0813 1 47 | 8.5528 8.9663 5.5248 1 48 | 10.3063 9.1871 7.4727 1 49 | 9.8816 9.1766 7.8767 1 50 | 10.4379 8.3869 7.8186 1 51 | 10.5404 8.4455 7.1350 1 52 | 9.9223 8.9679 6.1164 1 53 | 10.2936 9.3507 7.0285 1 54 | 9.6975 8.2405 6.0747 1 55 | 9.7090 9.5269 5.4338 1 56 | 9.6549 8.9832 6.0586 1 57 | 9.1395 10.8065 7.4928 1 58 | 10.2108 9.3696 7.6209 1 59 | 11.1149 8.3377 6.8811 1 60 | 9.4778 9.1386 6.9128 1 61 | 9.5473 9.2455 7.3125 1 62 | 10.6525 8.8034 7.3615 1 63 | 10.0357 9.1869 7.2712 1 64 | 11.3912 8.9501 9.3456 1 65 | 8.4646 8.5746 4.3721 1 66 | 9.0669 9.0473 7.0317 1 67 | 9.3107 9.6975 7.1383 1 68 | 11.9645 10.1440 8.3523 1 69 | 10.3896 7.1240 6.3609 1 70 | 9.5995 9.0995 7.7090 1 71 | 10.0271 9.6288 7.2416 1 72 | 9.7151 8.9121 7.6865 1 73 | 10.8826 9.8989 8.2690 1 74 | 10.0165 9.5872 8.2965 1 75 | 10.7383 10.1939 7.3091 1 76 | 10.6339 10.1079 8.5437 1 77 | 9.7913 9.6607 7.2247 1 78 | 9.8974 7.7267 5.4962 1 79 | 10.1155 8.2886 6.8130 1 80 | 9.2784 9.3079 6.3476 1 81 | 9.7846 8.2052 5.2413 1 82 | 9.9192 8.5207 6.5325 1 83 | 9.0764 9.2510 6.1770 1 84 | 10.8462 9.2581 7.9573 1 85 | 8.9785 9.5064 7.0592 1 86 | 9.3585 7.8076 7.4774 1 87 | 8.9444 9.0573 6.9446 1 88 | 10.9617 8.0478 7.0488 1 89 | 9.3859 7.7390 6.5788 1 90 | 8.4423 8.7735 6.7609 1 91 | 10.7340 9.1053 8.3151 1 92 | 11.3014 8.5240 8.0890 1 93 | 10.6068 8.9544 5.9283 1 94 | 10.1252 8.5980 7.1027 1 95 | 9.2242 8.1033 6.4497 1 96 | 9.3210 8.9919 7.3185 1 97 | 11.1099 8.1989 6.9694 1 98 | 11.0387 9.5340 8.4314 1 99 | 9.4512 7.3199 6.4664 1 100 | 10.7276 9.6067 5.9398 1 101 | 10.1960 9.3145 8.3873 1 102 | 15.7777 1.5879 11.4440 2 103 | 15.8685 2.7902 11.2532 2 104 | 14.9448 0.7798 12.7481 2 105 | 15.9801 1.0142 14.2029 2 106 | 16.5581 1.8898 14.3712 2 107 | 17.6841 1.4798 14.7022 2 108 | 16.0739 2.0809 11.9011 2 109 | 17.4514 2.7384 13.1174 2 110 | 15.6564 1.5667 11.8511 2 111 | 14.8270 1.5208 11.6988 2 112 | 15.3465 1.1154 11.4126 2 113 | 15.4900 0.9971 11.6626 2 114 | 16.5611 0.6397 13.0409 2 115 | 14.7946 0.6621 12.0221 2 116 | 15.8889 0.1369 10.6020 2 117 | 15.0046 2.1485 12.0825 2 118 | 13.5150 0.1619 13.5822 2 119 | 16.2275 0.5147 11.9629 2 120 | 15.2254 0.0327 12.0838 2 121 | 17.0248 1.9667 10.5073 2 122 | 15.7311 1.2597 11.2062 2 123 | 17.0419 1.9678 12.0131 2 124 | 16.9442 2.0781 12.5975 2 125 | 17.9197 1.4727 13.4691 2 126 | 15.4081 1.9933 10.4642 2 127 | 14.9689 0.8315 11.3650 2 128 | 16.8566 1.8505 11.7138 2 129 | 15.6348 -0.4010 11.0338 2 130 | 15.6531 1.0313 12.2401 2 131 | 16.4351 0.7610 13.4342 2 132 | 16.4006 0.5650 12.4969 2 133 | 16.3440 -0.5039 12.5386 2 134 | 16.9950 -0.0752 13.2183 2 135 | 16.7730 1.1827 13.1649 2 136 | 14.9363 0.6297 10.4119 2 137 | 16.3628 0.2098 11.9967 2 138 | 15.4727 0.2816 11.8770 2 139 | 15.1159 1.4582 14.0080 2 140 | 16.1039 0.8925 12.8527 2 141 | 16.0618 0.9679 12.0856 2 142 | 15.5217 1.5430 12.2514 2 143 | 16.4884 2.2366 10.3939 2 144 | 16.6153 0.3362 9.9553 2 145 | 16.4861 2.5028 11.9847 2 146 | 15.5396 0.9811 13.0444 2 147 | 14.8067 0.4445 12.0818 2 148 | 15.4488 2.0822 11.7918 2 149 | 15.4897 1.3544 11.8519 2 150 | 16.0778 0.7619 13.7838 2 151 | 16.6242 0.9808 13.3966 2 152 | 17.9259 1.0688 12.2722 2 153 | 15.9974 0.3185 11.6156 2 154 | 15.9192 1.5156 12.1130 2 155 | 15.7699 0.9284 12.3213 2 156 | 15.2109 -1.3428 10.3493 2 157 | 16.3817 0.9972 9.5883 2 158 | 15.8796 1.3996 11.5157 2 159 | 16.4870 1.7061 14.2754 2 160 | 16.7899 1.1013 11.6953 2 161 | 14.8710 0.2383 12.8888 2 162 | 16.7837 1.5001 13.4048 2 163 | 15.2736 0.3787 12.3897 2 164 | 15.3206 1.2400 11.7071 2 165 | 15.3268 -0.6637 11.5670 2 166 | 15.7205 1.3122 11.0238 2 167 | 14.6263 1.8092 10.7134 2 168 | 16.7460 1.7650 12.9012 2 169 | 15.4467 1.2589 12.6625 2 170 | 16.8663 0.6373 14.1387 2 171 | 17.7808 1.1063 12.0114 2 172 | 16.2662 2.0597 12.7493 2 173 | 15.6456 0.5354 13.2460 2 174 | 16.5198 -0.1682 14.0027 2 175 | 14.9697 0.0075 10.8758 2 176 | 16.0551 1.7093 10.6853 2 177 | 16.0111 1.0340 11.8104 2 178 | 15.8483 1.2674 12.4402 2 179 | 15.3796 1.2708 11.5423 2 180 | 15.1058 2.1898 12.6256 2 181 | 15.9551 1.5240 10.7409 2 182 | 16.6492 -0.5519 13.0733 2 183 | 15.7445 1.9646 12.9405 2 184 | 14.7138 1.6761 12.1394 2 185 | 16.4166 0.7784 13.2647 2 186 | 16.9852 0.8106 12.8300 2 187 | 15.0574 1.4985 12.1890 2 188 | 13.4341 -0.1047 9.7688 2 189 | 16.8997 0.9559 11.4215 2 190 | 16.0691 2.0721 10.8096 2 191 | 14.3151 0.7270 10.0798 2 192 | 15.4461 2.2549 12.0680 2 193 | 17.0555 0.7020 12.0805 2 194 | 15.4810 0.4294 11.4795 2 195 | 15.7610 2.2161 12.6564 2 196 | 16.0043 0.5165 11.9801 2 197 | 15.9644 -0.0796 11.8461 2 198 | 15.5438 0.3007 12.1923 2 199 | 14.9549 0.8183 11.1740 2 200 | 16.6412 1.4686 11.2776 2 201 | 16.6573 1.3231 12.1964 2 -------------------------------------------------------------------------------- /Perceptron/BinaryPerceptron/Train1.txt: -------------------------------------------------------------------------------- 1 | 2 2 100 2 | 1.7044 3.6651 1 3 | 1.6726 4.6705 1 4 | 1.4597 4.1940 1 5 | 1.9761 4.1965 1 6 | 2.1897 3.1463 1 7 | 1.8348 4.1139 1 8 | 1.7501 4.3428 1 9 | 1.9820 3.6816 1 10 | 1.9126 3.4987 1 11 | 1.5214 3.9072 1 12 | 2.6463 3.4730 1 13 | 2.2205 3.9642 1 14 | 2.6405 4.1396 1 15 | 1.7511 4.6866 1 16 | 1.4406 4.0899 1 17 | 2.4038 3.7290 1 18 | 2.0206 4.8171 1 19 | 1.6219 4.4126 1 20 | 1.9554 4.1154 1 21 | 0.9956 4.3358 1 22 | 2.5420 3.7460 1 23 | 1.5094 4.4282 1 24 | 1.6558 4.1343 1 25 | 2.6697 4.3125 1 26 | 1.5454 3.4763 1 27 | 1.7936 4.7678 1 28 | 1.7469 4.2172 1 29 | 2.8099 3.0414 1 30 | 2.0405 4.2350 1 31 | 1.4595 4.6372 1 32 | 1.4377 4.3193 1 33 | 2.8678 4.6904 1 34 | 2.9687 4.6599 1 35 | 2.8175 3.5453 1 36 | 1.3720 2.8472 1 37 | 1.8932 4.8944 1 38 | 1.9005 4.1954 1 39 | 2.1537 4.0102 1 40 | 1.7138 3.7970 1 41 | 1.5112 3.2326 1 42 | 1.7766 4.1107 1 43 | 2.5410 3.3128 1 44 | 3.1863 3.5804 1 45 | 2.1146 3.8957 1 46 | 1.8667 4.3780 1 47 | 2.3508 4.1879 1 48 | 1.7562 3.3273 1 49 | 2.9312 4.7409 1 50 | 2.5534 4.0164 1 51 | 1.3862 4.9352 1 52 | 7.3955 9.7069 2 53 | 7.6087 10.7687 2 54 | 7.6164 10.0700 2 55 | 7.9464 9.0686 2 56 | 7.5115 9.7729 2 57 | 7.5180 9.6740 2 58 | 6.8104 10.0517 2 59 | 7.5809 9.8897 2 60 | 8.1287 9.8605 2 61 | 7.9081 9.6332 2 62 | 7.9162 9.9677 2 63 | 7.9415 9.2780 2 64 | 8.0842 10.3062 2 65 | 7.7494 9.3382 2 66 | 7.6475 9.6692 2 67 | 8.2541 9.9269 2 68 | 7.7895 10.1240 2 69 | 8.1146 9.9617 2 70 | 7.5203 10.8691 2 71 | 7.9270 10.8110 2 72 | 8.3723 10.3132 2 73 | 7.5548 10.0459 2 74 | 8.0695 9.5962 2 75 | 7.8819 9.7693 2 76 | 7.9623 9.2970 2 77 | 7.8207 9.8127 2 78 | 6.9612 9.7645 2 79 | 7.9282 10.8756 2 80 | 8.6967 10.3766 2 81 | 8.3259 10.0325 2 82 | 7.8114 9.8536 2 83 | 7.6693 10.0414 2 84 | 8.1245 10.3831 2 85 | 7.8082 11.1184 2 86 | 7.7358 10.1634 2 87 | 8.0277 10.4317 2 88 | 8.6269 10.3397 2 89 | 6.7400 10.2774 2 90 | 8.2924 10.5008 2 91 | 7.4960 10.6297 2 92 | 8.4721 10.0221 2 93 | 6.7880 9.8429 2 94 | 7.8881 10.1134 2 95 | 8.0290 10.4983 2 96 | 7.7877 10.6080 2 97 | 7.8985 9.7286 2 98 | 7.2435 10.4561 2 99 | 7.4368 9.9139 2 100 | 7.5925 9.8320 2 101 | 8.1833 10.2707 2 -------------------------------------------------------------------------------- /Perceptron/MultiClass-KERSEL/Test.txt: -------------------------------------------------------------------------------- 1 | 10.7496 9.3209 8.2053 1 2 | 9.9154 8.3233 7.2704 1 3 | 9.5856 8.3885 7.8567 1 4 | 10.0442 9.9109 6.3326 1 5 | 8.7696 7.9728 5.4523 1 6 | 9.8077 9.1897 7.2788 1 7 | 8.3563 8.0633 6.4292 1 8 | 11.3246 8.4540 6.4637 1 9 | 9.5811 9.7408 6.8965 1 10 | 10.6617 9.8422 5.7458 1 11 | 11.7670 10.0406 8.2927 1 12 | 9.2242 8.9054 6.9867 1 13 | 9.6976 8.8699 7.5056 1 14 | 9.4685 8.8554 7.9053 1 15 | 9.7187 9.5133 6.3626 1 16 | 10.8723 9.2491 6.8805 1 17 | 8.2823 7.9436 6.2555 1 18 | 11.0003 8.6869 7.9054 1 19 | 10.6458 10.2867 8.3352 1 20 | 10.1684 9.1351 7.4302 1 21 | 10.5657 7.9420 6.9983 1 22 | 9.6799 8.5231 7.9082 1 23 | 11.2159 9.0892 7.0128 1 24 | 10.1067 9.4304 8.4042 1 25 | 9.4741 9.9736 5.8608 1 26 | 9.6669 9.9259 6.7895 1 27 | 11.2978 9.3248 7.0669 1 28 | 10.0609 8.8317 4.8644 1 29 | 9.7942 9.3057 6.2757 1 30 | 10.4391 9.6415 7.0820 1 31 | 9.9227 9.2828 6.5822 1 32 | 10.3362 9.1440 7.8288 1 33 | 9.8612 8.4255 6.4015 1 34 | 10.4651 8.5358 7.6798 1 35 | 11.2254 9.3084 7.0281 1 36 | 9.6275 8.3777 5.7954 1 37 | 10.5991 8.3389 8.1433 1 38 | 9.6252 9.0393 7.2551 1 39 | 9.6875 7.7061 5.8133 1 40 | 9.9448 9.0206 7.2244 1 41 | 9.7555 9.6354 6.9679 1 42 | 9.4738 8.8236 6.4481 1 43 | 8.7259 8.0653 5.9340 1 44 | 9.0374 8.2800 7.0893 1 45 | 10.5013 9.1165 8.1145 1 46 | 11.3381 9.1392 7.1045 1 47 | 8.8808 7.9195 7.8346 1 48 | 9.5629 9.4856 7.2201 1 49 | 10.7089 9.9517 8.2742 1 50 | 9.9142 8.8241 6.4651 1 51 | 11.0792 9.8872 7.9406 1 52 | 9.3191 8.6497 5.0807 1 53 | 9.6302 10.0221 9.0362 1 54 | 8.4015 7.7601 6.1689 1 55 | 9.0907 7.9145 7.2521 1 56 | 11.0728 9.9912 8.6062 1 57 | 10.2721 9.6519 7.8759 1 58 | 9.5712 8.9366 5.8259 1 59 | 10.2762 9.7077 6.9175 1 60 | 9.4316 10.1369 6.6926 1 61 | 10.4818 8.7339 7.4721 1 62 | 8.3660 7.8901 6.5090 1 63 | 10.9893 11.0249 7.6186 1 64 | 10.1229 8.6427 8.8048 1 65 | 10.5275 7.3832 8.7825 1 66 | 9.4120 9.1836 8.9143 1 67 | 8.8757 7.8621 7.5715 1 68 | 8.8438 8.4579 5.7583 1 69 | 8.2532 7.9388 4.7198 1 70 | 10.1229 8.8196 6.3134 1 71 | 8.6431 9.2262 7.2631 1 72 | 11.0683 9.7726 7.3603 1 73 | 9.4771 8.4230 6.6976 1 74 | 11.1353 8.4275 7.1301 1 75 | 9.9507 10.5714 8.3463 1 76 | 9.0736 8.9618 7.6584 1 77 | 10.0982 7.9032 6.7917 1 78 | 11.4285 9.8467 8.0225 1 79 | 9.6425 8.9651 8.3227 1 80 | 9.7184 8.7312 6.6780 1 81 | 10.3583 9.5181 4.5244 1 82 | 10.4453 8.6843 8.0855 1 83 | 9.6115 8.7506 4.0352 1 84 | 10.5446 7.9432 7.2141 1 85 | 9.3538 8.9172 6.7143 1 86 | 9.8656 8.8498 6.6526 1 87 | 11.0322 7.3265 6.4970 1 88 | 10.8494 7.3252 7.6209 1 89 | 11.3436 9.9368 8.7048 1 90 | 10.5592 8.5825 7.6395 1 91 | 10.7046 9.0759 7.6274 1 92 | 9.4037 7.9561 6.5548 1 93 | 10.3769 9.0183 8.0351 1 94 | 10.5254 8.3985 7.6241 1 95 | 9.8589 10.0712 5.1050 1 96 | 11.4078 9.9535 7.8718 1 97 | 11.2654 7.7387 7.6893 1 98 | 9.6725 10.0025 6.5671 1 99 | 8.9853 8.5895 6.5827 1 100 | 8.5839 8.8262 5.9282 2 101 | 17.5285 1.4180 12.2175 1 102 | 16.4128 -0.0407 11.9689 2 103 | 16.9632 1.1576 13.5849 2 104 | 15.8849 1.0354 12.2533 2 105 | 16.1697 1.3018 12.3130 2 106 | 15.5909 1.5734 12.3182 2 107 | 15.4403 -0.0415 10.1679 2 108 | 14.2622 -0.4111 13.1860 2 109 | 14.7134 2.1461 11.2449 2 110 | 16.2419 3.1025 14.2964 2 111 | 15.6727 1.5943 11.9458 2 112 | 15.1118 0.3178 12.2374 2 113 | 17.2321 1.7972 12.0216 2 114 | 16.7825 1.3698 13.7772 2 115 | 15.6128 -0.6657 11.7872 2 116 | 16.8771 1.3038 12.3235 2 117 | 14.9679 0.6117 12.1597 2 118 | 17.8482 1.8721 13.7013 2 119 | 16.0070 1.0712 11.0600 2 120 | 16.6740 -1.4397 10.6692 2 121 | 16.4870 0.2892 12.7287 2 122 | 15.3756 -0.2828 11.9268 2 123 | 16.9785 0.6853 10.6489 2 124 | 16.5681 -0.2331 12.9144 2 125 | 15.3176 0.6043 11.1883 2 126 | 16.2156 0.3677 12.4405 2 127 | 14.9139 1.1300 12.7612 2 128 | 15.7218 0.3950 12.7093 2 129 | 14.6119 1.0417 10.9676 2 130 | 15.4401 -0.2049 11.6526 2 131 | 15.8061 -0.4928 12.6179 2 132 | 16.9766 0.5331 13.3870 2 133 | 15.8456 1.4158 12.5492 2 134 | 15.2686 1.6535 13.2734 2 135 | 15.9948 0.5385 12.8065 2 136 | 14.9503 1.8699 12.2682 2 137 | 15.4160 0.7405 11.1267 2 138 | 17.4192 1.4996 12.7690 2 139 | 16.1175 0.8153 12.7136 2 140 | 15.2741 0.3736 12.6791 2 141 | 15.9075 1.1702 11.9828 2 142 | 15.3974 1.2581 11.7523 2 143 | 15.3631 0.6612 12.7981 2 144 | 15.8013 -1.0799 12.8994 2 145 | 15.5932 2.5039 12.1062 2 146 | 16.1271 0.3167 11.3173 2 147 | 17.4273 2.8554 12.6826 2 148 | 15.7319 1.6870 12.1909 2 149 | 15.6830 -1.1028 12.2377 2 150 | 16.7876 1.2223 9.2320 2 151 | 16.7412 1.4016 13.6490 2 152 | 16.5391 1.7503 10.6173 2 153 | 16.0358 1.4555 11.2579 2 154 | 14.9832 1.1421 10.2485 2 155 | 16.6160 1.3955 11.5014 2 156 | 15.7546 1.2370 10.7228 2 157 | 16.1481 2.3019 12.5719 2 158 | 15.5728 1.2641 13.2301 2 159 | 16.4078 0.9841 11.2996 2 160 | 15.7553 0.9370 11.2900 2 161 | 15.4845 1.3268 10.7419 2 162 | 14.7214 0.6046 12.2015 2 163 | 15.0460 0.1300 10.4994 2 164 | 17.5946 0.6751 12.3626 2 165 | 17.9025 1.7707 11.8716 2 166 | 15.0771 1.4698 11.4130 2 167 | 15.8988 1.9530 12.3669 2 168 | 16.0568 1.6940 12.7293 2 169 | 15.6035 0.3362 10.5542 2 170 | 17.6563 2.0920 11.6360 2 171 | 16.5054 0.3906 11.3645 2 172 | 15.4404 0.7454 11.0945 2 173 | 14.8745 -0.3407 12.0165 2 174 | 16.0859 2.0249 11.5900 2 175 | 16.0599 1.3560 11.2541 2 176 | 14.7614 1.3415 9.4460 2 177 | 16.6093 1.0796 11.8355 2 178 | 16.0511 1.2431 12.4508 2 179 | 17.1943 0.5318 13.1299 2 180 | 15.7902 -0.0414 12.4695 2 181 | 16.4649 0.3576 11.4622 2 182 | 15.4115 2.1869 11.2169 2 183 | 16.0127 1.2792 13.2640 2 184 | 17.0948 1.0749 9.9216 2 185 | 17.2735 0.9872 13.5025 2 186 | 16.0781 0.2255 12.0559 2 187 | 18.2975 2.6378 11.9994 2 188 | 15.9226 1.7147 12.5535 2 189 | 17.1615 1.6739 11.6896 2 190 | 16.2235 1.3994 13.2076 2 191 | 15.7003 1.8888 13.6422 2 192 | 17.6646 2.3303 14.2385 2 193 | 16.2591 2.1734 12.0790 2 194 | 15.7570 0.1781 11.0215 2 195 | 15.8355 0.4728 11.0913 2 196 | 16.2953 1.9901 11.2354 2 197 | 15.9943 1.8201 11.4980 2 198 | 16.0581 1.8387 10.0752 2 199 | 15.8480 1.0582 12.3381 2 200 | 15.1319 0.9429 12.2790 3 201 | 1.8164 4.9567 1.9919 2 202 | 1.2310 4.0781 -0.3326 3 203 | 2.0197 5.3545 1.7589 3 204 | 1.4547 3.7354 1.7839 3 205 | 1.7128 4.2666 0.6692 3 206 | 2.5213 5.2036 1.8368 3 207 | 2.5548 5.6332 -0.1709 3 208 | 0.8311 5.1981 1.4703 3 209 | 2.8595 5.4070 -0.3497 3 210 | 1.4417 6.2914 -0.4817 3 211 | 0.9073 5.7923 0.1632 3 212 | -0.3713 3.7263 0.0262 3 213 | 0.7592 5.6503 2.6293 3 214 | 2.0231 5.4192 1.8244 3 215 | 1.3609 6.2634 0.8114 3 216 | 0.3345 4.9827 1.3353 3 217 | 2.9122 5.2973 1.4189 3 218 | 3.4713 5.7521 0.9685 3 219 | 1.8546 6.5332 0.7816 3 220 | 2.3321 5.1221 0.9638 3 221 | 3.2252 3.6075 1.7314 3 222 | 1.5446 3.4930 0.9457 3 223 | 1.3652 4.0634 1.1635 3 224 | 2.5601 4.1342 1.6843 3 225 | 1.9095 4.4044 -1.0665 3 226 | 1.6357 4.6097 1.7723 3 227 | 1.6546 4.3777 0.7446 3 228 | 2.1156 3.7935 1.2355 3 229 | 2.9869 5.0101 2.3796 3 230 | 2.4258 5.7579 1.6964 3 231 | 3.1429 5.0799 1.3263 3 232 | 3.0622 4.4711 3.0234 3 233 | 2.1928 5.1576 1.8888 3 234 | 1.5785 4.6584 0.4092 3 235 | 1.3691 4.9437 1.2340 3 236 | 2.4168 5.3974 1.0279 3 237 | 2.8507 3.5408 3.6910 3 238 | 2.3769 5.5979 0.4664 3 239 | 2.5088 5.3869 2.7400 3 240 | 1.3100 4.3662 0.0991 3 241 | 1.2245 5.2319 0.1541 3 242 | -0.0476 4.6948 -0.1417 3 243 | 3.4946 5.8640 0.6367 3 244 | 1.1026 3.6993 0.9014 3 245 | 2.3103 4.6145 0.6502 3 246 | 3.1079 4.8379 1.6201 3 247 | 2.0626 4.2884 1.6934 3 248 | 0.5821 4.5090 -1.2249 3 249 | 0.3734 5.0369 -1.0103 3 250 | 3.1608 6.2064 2.6966 3 251 | 1.1508 5.4127 0.2888 3 252 | 2.2635 5.1343 0.2223 3 253 | 2.8428 4.3838 0.8225 3 254 | 3.1883 5.1876 1.5448 3 255 | 2.3003 5.3843 0.0186 3 256 | 2.4622 5.1545 1.2134 3 257 | 2.0459 5.0830 1.4316 3 258 | 2.1523 4.4064 0.9489 3 259 | 1.5985 3.5951 0.6608 3 260 | 1.5203 4.7185 -0.5255 3 261 | 1.8597 5.5182 0.3609 3 262 | 1.6102 4.8970 -0.1958 3 263 | 2.0869 5.4048 1.1729 3 264 | 2.5044 5.1007 0.8618 3 265 | 2.0184 5.4557 0.6170 3 266 | 2.8007 6.1917 0.8120 3 267 | 1.7718 4.0774 0.5363 3 268 | 3.1455 4.4267 0.4325 3 269 | 1.0722 3.9010 0.2414 3 270 | 2.1054 5.8186 -0.2248 3 271 | 0.7817 5.2241 0.8202 3 272 | 1.2759 4.2293 -0.2104 3 273 | 1.1294 5.0194 -0.4815 3 274 | 2.4199 4.1450 1.8761 3 275 | 1.7063 4.7967 -0.0270 3 276 | 1.2922 6.3988 1.3599 3 277 | 2.0843 4.8762 0.9703 3 278 | 1.9565 5.4790 3.0156 3 279 | 1.6440 5.1725 -0.9646 3 280 | 0.9005 4.5087 1.1111 3 281 | 2.6250 4.3635 1.6641 3 282 | 2.1382 3.6103 1.5594 3 283 | 2.5736 4.8799 1.2019 3 284 | 3.2041 4.7465 1.2511 3 285 | 0.8804 3.7839 1.5572 3 286 | 2.2413 5.5871 0.2361 3 287 | 0.8975 4.1012 -0.1284 3 288 | 1.7408 5.0865 0.9688 3 289 | 2.0951 5.3398 1.4135 3 290 | 0.7351 4.4120 -1.1720 3 291 | 0.8174 4.9502 0.7998 3 292 | 2.2111 6.2948 1.3464 3 293 | 2.0793 3.9244 0.8402 3 294 | 2.8200 4.7741 1.0633 3 295 | 0.3174 4.1101 -0.0933 3 296 | 2.5083 4.0213 2.5067 3 297 | 2.4330 5.1049 2.6796 3 298 | 3.3125 5.9690 0.7334 3 299 | 1.9635 6.6452 1.5356 3 300 | 2.1617 6.0891 1.3641 3 301 | -------------------------------------------------------------------------------- /Perceptron/MultiClass-KERSEL/Train.txt: -------------------------------------------------------------------------------- 1 | 3 3 300 2 | 11.0306 9.0152 8.0199 1 3 | 11.4008 8.7768 6.7652 1 4 | 11.2489 9.5744 8.0812 1 5 | 9.3157 7.4360 5.6128 1 6 | 10.3996 7.9322 7.8283 1 7 | 10.8224 10.7516 5.4981 1 8 | 9.0305 8.6674 6.9254 1 9 | 10.1821 11.0287 6.6184 1 10 | 10.6886 9.0148 6.4989 1 11 | 8.8373 8.7963 6.7583 1 12 | 9.1398 9.4470 6.4768 1 13 | 10.7024 9.0675 8.1215 1 14 | 9.9943 8.6061 6.8525 1 15 | 10.4734 10.2125 8.2261 1 16 | 11.2314 8.6995 6.9041 1 17 | 10.4351 9.7321 7.2843 1 18 | 9.2896 8.4121 7.0801 1 19 | 10.6788 9.7539 8.3300 1 20 | 9.8494 9.5942 7.5975 1 21 | 9.2633 8.1645 6.4084 1 22 | 11.8901 9.9436 6.4256 1 23 | 10.0723 9.7973 6.4827 1 24 | 9.1540 7.9661 6.9238 1 25 | 10.5738 9.4370 6.3062 1 26 | 11.5182 9.4084 7.7851 1 27 | 10.5358 8.2310 8.0972 1 28 | 10.7132 8.9980 7.4457 1 29 | 10.0950 8.9067 7.1707 1 30 | 9.8569 8.1443 7.2301 1 31 | 10.7861 7.7566 8.2188 1 32 | 9.8242 8.2658 8.1978 1 33 | 10.0681 9.0154 6.0752 1 34 | 9.5247 7.5678 6.8740 1 35 | 9.3813 9.7719 7.4452 1 36 | 9.7578 8.4790 6.6197 1 37 | 8.9274 7.1403 8.7561 1 38 | 10.2244 9.1478 7.8777 1 39 | 10.0925 10.1449 6.3775 1 40 | 9.9630 9.0292 7.7425 1 41 | 7.9713 8.5391 6.9682 1 42 | 9.5390 8.8474 7.5424 1 43 | 10.2250 9.1951 5.5091 1 44 | 10.3332 9.7589 8.6131 1 45 | 10.1617 9.1835 5.5835 1 46 | 9.9663 9.5582 8.0813 1 47 | 8.5528 8.9663 5.5248 1 48 | 10.3063 9.1871 7.4727 1 49 | 9.8816 9.1766 7.8767 1 50 | 10.4379 8.3869 7.8186 1 51 | 10.5404 8.4455 7.1350 1 52 | 9.9223 8.9679 6.1164 1 53 | 10.2936 9.3507 7.0285 1 54 | 9.6975 8.2405 6.0747 1 55 | 9.7090 9.5269 5.4338 1 56 | 9.6549 8.9832 6.0586 1 57 | 9.1395 10.8065 7.4928 1 58 | 10.2108 9.3696 7.6209 1 59 | 11.1149 8.3377 6.8811 1 60 | 9.4778 9.1386 6.9128 1 61 | 9.5473 9.2455 7.3125 1 62 | 10.6525 8.8034 7.3615 1 63 | 10.0357 9.1869 7.2712 1 64 | 11.3912 8.9501 9.3456 1 65 | 8.4646 8.5746 4.3721 1 66 | 9.0669 9.0473 7.0317 1 67 | 9.3107 9.6975 7.1383 1 68 | 11.9645 10.1440 8.3523 1 69 | 10.3896 7.1240 6.3609 1 70 | 9.5995 9.0995 7.7090 1 71 | 10.0271 9.6288 7.2416 1 72 | 9.7151 8.9121 7.6865 1 73 | 10.8826 9.8989 8.2690 1 74 | 10.0165 9.5872 8.2965 1 75 | 10.7383 10.1939 7.3091 1 76 | 10.6339 10.1079 8.5437 1 77 | 9.7913 9.6607 7.2247 1 78 | 9.8974 7.7267 5.4962 1 79 | 10.1155 8.2886 6.8130 1 80 | 9.2784 9.3079 6.3476 1 81 | 9.7846 8.2052 5.2413 1 82 | 9.9192 8.5207 6.5325 1 83 | 9.0764 9.2510 6.1770 1 84 | 10.8462 9.2581 7.9573 1 85 | 8.9785 9.5064 7.0592 1 86 | 9.3585 7.8076 7.4774 1 87 | 8.9444 9.0573 6.9446 1 88 | 10.9617 8.0478 7.0488 1 89 | 9.3859 7.7390 6.5788 1 90 | 8.4423 8.7735 6.7609 1 91 | 10.7340 9.1053 8.3151 1 92 | 11.3014 8.5240 8.0890 1 93 | 10.6068 8.9544 5.9283 1 94 | 10.1252 8.5980 7.1027 1 95 | 9.2242 8.1033 6.4497 1 96 | 9.3210 8.9919 7.3185 1 97 | 11.1099 8.1989 6.9694 1 98 | 11.0387 9.5340 8.4314 1 99 | 9.4512 7.3199 6.4664 1 100 | 10.7276 9.6067 5.9398 1 101 | 10.1960 9.3145 8.3873 1 102 | 15.7777 1.5879 11.4440 2 103 | 15.8685 2.7902 11.2532 2 104 | 14.9448 0.7798 12.7481 2 105 | 15.9801 1.0142 14.2029 2 106 | 16.5581 1.8898 14.3712 2 107 | 17.6841 1.4798 14.7022 2 108 | 16.0739 2.0809 11.9011 2 109 | 17.4514 2.7384 13.1174 2 110 | 15.6564 1.5667 11.8511 2 111 | 14.8270 1.5208 11.6988 2 112 | 15.3465 1.1154 11.4126 2 113 | 15.4900 0.9971 11.6626 2 114 | 16.5611 0.6397 13.0409 2 115 | 14.7946 0.6621 12.0221 2 116 | 15.8889 0.1369 10.6020 2 117 | 15.0046 2.1485 12.0825 2 118 | 13.5150 0.1619 13.5822 2 119 | 16.2275 0.5147 11.9629 2 120 | 15.2254 0.0327 12.0838 2 121 | 17.0248 1.9667 10.5073 2 122 | 15.7311 1.2597 11.2062 2 123 | 17.0419 1.9678 12.0131 2 124 | 16.9442 2.0781 12.5975 2 125 | 17.9197 1.4727 13.4691 2 126 | 15.4081 1.9933 10.4642 2 127 | 14.9689 0.8315 11.3650 2 128 | 16.8566 1.8505 11.7138 2 129 | 15.6348 -0.4010 11.0338 2 130 | 15.6531 1.0313 12.2401 2 131 | 16.4351 0.7610 13.4342 2 132 | 16.4006 0.5650 12.4969 2 133 | 16.3440 -0.5039 12.5386 2 134 | 16.9950 -0.0752 13.2183 2 135 | 16.7730 1.1827 13.1649 2 136 | 14.9363 0.6297 10.4119 2 137 | 16.3628 0.2098 11.9967 2 138 | 15.4727 0.2816 11.8770 2 139 | 15.1159 1.4582 14.0080 2 140 | 16.1039 0.8925 12.8527 2 141 | 16.0618 0.9679 12.0856 2 142 | 15.5217 1.5430 12.2514 2 143 | 16.4884 2.2366 10.3939 2 144 | 16.6153 0.3362 9.9553 2 145 | 16.4861 2.5028 11.9847 2 146 | 15.5396 0.9811 13.0444 2 147 | 14.8067 0.4445 12.0818 2 148 | 15.4488 2.0822 11.7918 2 149 | 15.4897 1.3544 11.8519 2 150 | 16.0778 0.7619 13.7838 2 151 | 16.6242 0.9808 13.3966 2 152 | 17.9259 1.0688 12.2722 2 153 | 15.9974 0.3185 11.6156 2 154 | 15.9192 1.5156 12.1130 2 155 | 15.7699 0.9284 12.3213 2 156 | 15.2109 -1.3428 10.3493 2 157 | 16.3817 0.9972 9.5883 2 158 | 15.8796 1.3996 11.5157 2 159 | 16.4870 1.7061 14.2754 2 160 | 16.7899 1.1013 11.6953 2 161 | 14.8710 0.2383 12.8888 2 162 | 16.7837 1.5001 13.4048 2 163 | 15.2736 0.3787 12.3897 2 164 | 15.3206 1.2400 11.7071 2 165 | 15.3268 -0.6637 11.5670 2 166 | 15.7205 1.3122 11.0238 2 167 | 14.6263 1.8092 10.7134 2 168 | 16.7460 1.7650 12.9012 2 169 | 15.4467 1.2589 12.6625 2 170 | 16.8663 0.6373 14.1387 2 171 | 17.7808 1.1063 12.0114 2 172 | 16.2662 2.0597 12.7493 2 173 | 15.6456 0.5354 13.2460 2 174 | 16.5198 -0.1682 14.0027 2 175 | 14.9697 0.0075 10.8758 2 176 | 16.0551 1.7093 10.6853 2 177 | 16.0111 1.0340 11.8104 2 178 | 15.8483 1.2674 12.4402 2 179 | 15.3796 1.2708 11.5423 2 180 | 15.1058 2.1898 12.6256 2 181 | 15.9551 1.5240 10.7409 2 182 | 16.6492 -0.5519 13.0733 2 183 | 15.7445 1.9646 12.9405 2 184 | 14.7138 1.6761 12.1394 2 185 | 16.4166 0.7784 13.2647 2 186 | 16.9852 0.8106 12.8300 2 187 | 15.0574 1.4985 12.1890 2 188 | 13.4341 -0.1047 9.7688 2 189 | 16.8997 0.9559 11.4215 2 190 | 16.0691 2.0721 10.8096 2 191 | 14.3151 0.7270 10.0798 2 192 | 15.4461 2.2549 12.0680 2 193 | 17.0555 0.7020 12.0805 2 194 | 15.4810 0.4294 11.4795 2 195 | 15.7610 2.2161 12.6564 2 196 | 16.0043 0.5165 11.9801 2 197 | 15.9644 -0.0796 11.8461 2 198 | 15.5438 0.3007 12.1923 2 199 | 14.9549 0.8183 11.1740 2 200 | 16.6412 1.4686 11.2776 2 201 | 16.6573 1.3231 12.1964 2 202 | 2.3979 5.6525 2.7566 3 203 | 2.5103 6.3484 1.4272 3 204 | 1.3739 3.2679 1.2037 3 205 | 2.7527 4.6571 3.1138 3 206 | -0.0195 4.5524 0.0118 3 207 | 2.9907 5.6814 2.0605 3 208 | 1.9985 4.5837 1.2220 3 209 | 0.5427 4.8057 -0.2565 3 210 | 0.8909 3.7593 -1.8304 3 211 | 2.1872 4.9112 -1.1518 3 212 | 2.1994 4.8305 0.7212 3 213 | 1.0919 5.4586 0.2100 3 214 | 1.5911 4.4816 0.4354 3 215 | 3.2632 4.6854 0.0431 3 216 | 1.5830 4.2092 0.9880 3 217 | 2.0295 5.2180 1.2271 3 218 | 2.7210 4.6464 0.5148 3 219 | 2.8095 6.4380 2.1198 3 220 | 2.1245 5.4675 -0.4251 3 221 | 0.5385 4.8213 -0.4479 3 222 | 0.5138 4.4593 0.2458 3 223 | 2.3870 5.3485 -0.0398 3 224 | 1.3346 5.8191 2.0320 3 225 | 2.5099 3.4641 2.3567 3 226 | 0.7506 5.6132 0.9061 3 227 | 2.4154 4.8348 -0.1897 3 228 | 2.5681 5.1651 1.8034 3 229 | 2.3429 5.7635 -1.1672 3 230 | 1.0854 5.1752 0.2447 3 231 | 1.6866 3.6964 3.7094 3 232 | 2.3989 3.9841 1.4085 3 233 | 0.5646 5.0189 -0.5025 3 234 | 1.3669 4.1336 1.5222 3 235 | 1.0273 3.4943 0.2604 3 236 | 2.9046 4.9898 2.2343 3 237 | 3.5610 5.1494 1.1610 3 238 | 2.6400 4.3108 2.3558 3 239 | 1.3249 3.9717 1.6030 3 240 | 2.2066 5.2779 1.2942 3 241 | 1.7983 3.5060 0.4844 3 242 | 1.2298 5.7672 1.0902 3 243 | 2.3120 5.5898 0.9895 3 244 | 2.0991 4.8216 0.9363 3 245 | 0.9773 4.6890 1.2952 3 246 | 1.3834 4.1403 -0.0278 3 247 | 1.7492 4.1434 0.0807 3 248 | 2.5910 5.4506 1.2283 3 249 | 0.8732 5.3541 0.0232 3 250 | 1.4607 5.8063 -0.4487 3 251 | 1.5651 4.7785 1.6444 3 252 | 2.8876 5.7157 1.1124 3 253 | 3.5905 6.9277 0.2697 3 254 | 3.2884 7.0739 1.2774 3 255 | 2.8230 4.9552 1.1151 3 256 | 2.2950 6.7673 -0.8576 3 257 | 2.0628 4.8981 -0.2475 3 258 | 0.6463 4.0825 -1.2036 3 259 | 1.6225 5.3152 1.9456 3 260 | 1.9810 4.9567 0.8577 3 261 | 2.2062 5.9340 2.2026 3 262 | 1.0900 5.2339 1.2645 3 263 | 1.4001 3.6624 1.0991 3 264 | 2.5038 2.8674 0.2747 3 265 | 0.9272 5.0983 0.9448 3 266 | 1.3000 4.0404 -0.5611 3 267 | 2.2446 5.7529 0.4953 3 268 | 3.3855 6.5875 1.0011 3 269 | 1.0502 3.9911 0.7716 3 270 | 2.5646 5.9199 -0.3763 3 271 | 1.2801 4.9124 -0.5147 3 272 | 1.7170 4.4775 1.0338 3 273 | 1.4564 6.0273 2.3124 3 274 | 3.1364 4.9380 1.6813 3 275 | 2.7749 6.1456 0.9649 3 276 | 0.0997 5.6449 0.4465 3 277 | 1.6742 3.3360 0.7745 3 278 | 2.4996 7.3610 3.2431 3 279 | 0.5952 5.6160 0.5731 3 280 | 1.8135 5.7359 1.8975 3 281 | 1.6159 4.3129 0.7268 3 282 | 2.4457 4.9343 0.3891 3 283 | 1.2139 3.7429 0.8952 3 284 | 2.0721 4.0677 1.8840 3 285 | 1.5292 4.1920 -0.7789 3 286 | 0.7238 5.0763 -0.3921 3 287 | 1.6531 6.2856 0.8682 3 288 | 1.5867 5.8947 1.4070 3 289 | 1.7369 4.5551 0.0121 3 290 | 1.7281 3.2848 0.1128 3 291 | 0.5661 4.7663 1.1067 3 292 | 2.9877 4.7308 0.0381 3 293 | 3.1952 4.0055 2.2186 3 294 | 1.8858 5.3040 1.7988 3 295 | 1.3346 5.2458 0.2305 3 296 | 2.1929 5.6994 1.7522 3 297 | 1.6385 4.6960 2.0489 3 298 | 2.0586 4.7030 1.2737 3 299 | 0.4132 4.3895 1.2338 3 300 | 3.5225 5.3062 1.6239 3 301 | 2.2955 5.2485 1.1357 3 302 | -------------------------------------------------------------------------------- /Perceptron/MultiClass-KERSEL/kesler.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | """ 4 | Created on Sun Nov 18 12:56:26 2018 5 | 6 | @author: yeaseen 7 | """ 8 | import numpy as np 9 | 10 | 11 | def split(s, delim=[" ", '\n']): 12 | words = [] 13 | word = [] 14 | for c in s: 15 | if c not in delim: 16 | word.append(c) 17 | else: 18 | if word: 19 | words.append(''.join(word)) 20 | word = [] 21 | if word: 22 | words.append(''.join(word)) 23 | return words 24 | 25 | def loadfile(filename,checkTrain): 26 | file = open(filename, "r") 27 | first = checkTrain 28 | rows = list() 29 | for line in file: 30 | if(first) == True: 31 | dims = split(line) 32 | first = False 33 | else: 34 | vals = split(line, [' ' ,'\t', '\n']) 35 | #print(vals) 36 | rows.append(vals) 37 | 38 | if(checkTrain): 39 | return dims, rows 40 | else: 41 | return rows 42 | 43 | 44 | 45 | dims, rows = loadfile('Train.txt',True) 46 | 47 | test = loadfile('Test.txt',False) 48 | 49 | 50 | 51 | dims=np.array(dims) 52 | dims = dims.astype(np.float) 53 | rows=np.array(rows) 54 | mat = rows.astype(np.float) 55 | 56 | test=np.array(test) 57 | test= test.astype(np.float) 58 | #print(test) 59 | #print(dims) 60 | #print(mat) 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | ''' 69 | matrix=[[11.0306 , 9.0152 , 8.0199 , 7.62 , 1], 70 | [11.4008 , 8.7768 , 6.7652 , 8.9 , 1], 71 | [14.6263 , 1.8092 , 10.7134 , 6.7 , 2], 72 | [15.4467 , 1.2589 , 12.6625 , 8.2 , 2], 73 | [1.3346 , 5.8191 , 2.0320 , 7.5 , 3], 74 | [0.7506 , 5.6132 , 0.9061 , 7.7 , 3] 75 | ] 76 | matrixOutput=[11.0306 , 9.0152 , 8.0199 , 7.62 ] 77 | ''' 78 | #matrixOutput=matrixOutput+ [1] 79 | 80 | att=int(dims[0]) 81 | #print(att) 82 | clss=int(dims[1]) 83 | #print(clss) 84 | 85 | finalMat=np.empty((0,(att+1)*clss)) 86 | w =np.random.random_sample(((att+1)*clss,)) 87 | #print(w) 88 | 89 | matrix=np.array(mat) 90 | 91 | Y=matrix[:,-1].copy() 92 | Y=np.array(Y) 93 | Y=Y.astype(np.int) 94 | Y=np.array(Y).tolist() 95 | #print([Y]) 96 | matrix[:,-1]=np.ones((matrix.shape[0])) 97 | 98 | targetOutput=test[:,-1].copy() 99 | targetOutput=np.array(targetOutput) 100 | targetOutput=targetOutput.astype(np.int) 101 | targetOutput=np.array(targetOutput).tolist() 102 | 103 | test[:,-1] = np.ones((test.shape[0])) 104 | 105 | #print(test) 106 | #print(matrix) 107 | count=0 108 | for i in matrix: 109 | #print(i) 110 | a=np.zeros(((att+1)*clss)) 111 | #print(Y[count]) 112 | #print(count) 113 | classVal=int(Y[count]) 114 | #print(classVal) 115 | a[(classVal-1)*(att+1) : classVal*(att+1)]=i 116 | #print([a]) 117 | for j in range(clss): 118 | if( (j+1) != classVal): 119 | x=a.copy() 120 | x[j*(att+1) : (j+1)*(att+1)] = -i 121 | finalMat = np.vstack([finalMat, x]) 122 | #print(x) 123 | count+=1 124 | 125 | 126 | #print(finalMat) 127 | 128 | counter=0 129 | maxCounter=finalMat.shape[0] 130 | constantTerm= 0.5 131 | discriminant= True 132 | while(discriminant): 133 | for i in finalMat: 134 | product=np.dot(i,w) 135 | counter+=1 136 | if(product < 0): 137 | w=w+(i*constantTerm) 138 | #print(w) 139 | counter = 0 140 | #print(product) 141 | if(counter == maxCounter): 142 | discriminant = False 143 | 144 | #print(w) 145 | 146 | predictedOutput=[] 147 | for eachrow in test: 148 | val=0 149 | classed=0 150 | for k in range(clss): 151 | a=np.zeros(((att+1)*clss)) 152 | a[k*(att+1) : (k+1)*(att+1)] = eachrow 153 | got=np.dot(a,w) 154 | if(val= 0 and jj >= 0 and ii + ref.shape[0] <= frame.shape[0]\ 59 | and jj + ref.shape[1] <= frame.shape[1]: 60 | temp = np.sum(ref.astype(int) * frame[ii:ii+ref.shape[0], jj:jj+ref.shape[1]].astype(int))/\ 61 | (np.linalg.norm(ref) * np.linalg.norm(frame[ii:ii+ref.shape[0], jj:jj+ref.shape[1]])) 62 | if temp > best: 63 | best = temp 64 | argbest = i, j 65 | return argbest[0], argbest[1] 66 | 67 | 68 | def logarithmic_search(frame, ref): 69 | l = int(frame.shape[1]/4) 70 | x, y = int(frame.shape[0]/2), int(frame.shape[1]/2) 71 | 72 | best = -math.inf 73 | while(True): 74 | for i in range(-1, 2): 75 | for j in range(-1, 2): 76 | #print('p1 ',x+i*l, y+j*l) 77 | ii = x + i * l - int(ref.shape[0] / 2) 78 | jj = y + j * l - int(ref.shape[1] / 2) 79 | if ii >= 0 and jj >= 0 and ii + ref.shape[0] <= frame.shape[0]\ 80 | and jj + ref.shape[1] <= frame.shape[1]: 81 | #print(ii,jj) 82 | temp = np.sum(ref.astype(int) * frame[ii:ii+ref.shape[0], jj:jj+ref.shape[1]].astype(int))/\ 83 | (np.linalg.norm(ref) * np.linalg.norm(frame[ii:ii+ref.shape[0], jj:jj+ref.shape[1]])) 84 | if temp > best: 85 | best = temp 86 | argbest = i, j 87 | x, y = x + argbest[0] * l, y + argbest[1] * l 88 | l = int(l / 2) 89 | if l < 1: break 90 | return x + argbest[0] * l * 2, y + argbest[1] * l * 2 91 | 92 | 93 | 94 | def search(frame, ref, x, y, p, method): 95 | threshold = lambda x : 0 if x < 0 else x 96 | xt, yt = method(frame[threshold(x - p): threshold(x + p), threshold(y - p): threshold(y + p)], ref) 97 | return threshold(x - p) + xt, threshold(y - p) + yt 98 | 99 | ''' 100 | test = cv2.imread('test.jpg') 101 | ref = cv2.imread('ref.jpg') 102 | 103 | print(exhaustive_search(test, ref)) 104 | x, y = logarithmic_search(test, ref) 105 | print(x, y) 106 | frame = cv2.rectangle(test,(int(y - ref.shape[1]/2), int(x - ref.shape[0]/2)), \ 107 | (int(y + ref.shape[1]/2), int(x + ref.shape[0]/2)), (0, 0, 255), 3) 108 | _debug_print(frame) 109 | ''' 110 | 111 | cap = cv2.VideoCapture('movie.mov') 112 | ref = cv2.imread('reference.jpg') 113 | 114 | out = cv2.VideoWriter('output.mov', cv2.VideoWriter_fourcc(*'XVID'), cap.get(cv2.CAP_PROP_FPS),\ 115 | (int(cap.get(cv2.CAP_PROP_FRAME_WIDTH)),int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT)))) 116 | 117 | if (cap.isOpened()== False): 118 | print("Error opening video stream or file") 119 | exit(0) 120 | 121 | 122 | ret, frame = cap.read() 123 | while not ret: ret, frame = cap.read() 124 | 125 | x, y = exhaustive_search(frame, ref) 126 | 127 | num = 1 128 | while cap.isOpened(): 129 | ret, frame = cap.read() 130 | if ret == True: 131 | x, y = search(frame, ref, x, y, P, logarithmic_search) 132 | num+=1 133 | frame = cv2.rectangle(frame,(int(y - ref.shape[1]/2), int(x - ref.shape[0]/2)), \ 134 | (int(y + ref.shape[1]/2), int(x + ref.shape[0]/2)), (0, 0, 255), 3) 135 | #_debug_print(frame) 136 | out.write(frame) 137 | #break 138 | else: break 139 | 140 | 141 | cap.release() 142 | out.release() 143 | -------------------------------------------------------------------------------- /templateMatching/.idea/misc.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 6 | 7 | -------------------------------------------------------------------------------- /templateMatching/.idea/modules.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | -------------------------------------------------------------------------------- /templateMatching/.idea/templateMatching.iml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 12 | -------------------------------------------------------------------------------- /templateMatching/.idea/workspace.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | true 35 | DEFINITION_ORDER 36 | 37 | 38 | 39 | 40 | 41 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 |