├── GP
├── ClassificationFPT
│ ├── algorithms_gp.py
│ ├── example_classification_fpt_gp.py
│ ├── functions.py
│ ├── fuzzify.py
│ ├── paper_complete_script.py
│ └── selection.py
└── SwitchingSelectionMethods
│ └── genetic.py
├── LICENSE
├── README.md
├── algorithms.py
├── datasets
├── DowNorm_test.txt
├── DowNorm_train.txt
├── Pagie1_test.txt
├── Pagie1_train.txt
├── SPECT.test
├── SPECT.train
├── USA_census_test.csv
├── USA_census_train.csv
├── Vladislavleva4_test.txt
├── Vladislavleva4_train.txt
├── adult.data
├── adult.test
├── adult_test.csv
├── adult_test_targets.csv
├── adult_training.csv
├── australian.dat
├── bank-additional-full.csv
├── banknote_Test.csv
├── banknote_Train.csv
├── compas-scores-two-years-violent.csv
├── compas-scores-two-years.csv
├── crx.data
├── data_lawsuit.mat
├── german.data
├── haberman.csv
├── haberman_labels.csv
├── horse-colic.data
├── horse-colic.test
├── iris.data
├── labels_lawsuit.mat
├── lupus.csv
├── lupus_labels.csv
├── mushroom.csv
├── parity3.csv
├── parity4.csv
├── parity5.csv
├── pima.csv
├── pima_labels.csv
├── processed.cleveland.data
├── satellite_test.csv
├── satellite_train.csv
├── segmentation.data
├── segmentation.test
├── spaceshipTitanic_sample_submision.csv
├── spaceshipTitanic_test.csv
├── spaceshipTitanic_train.csv
├── spambase.csv
├── transfusion.csv
├── transfusion_labels.csv
├── vehicle.csv
└── wine.data
├── example_classification.py
├── example_increment.py
├── example_parity.py
├── example_regression.py
├── functions.py
├── grammars
├── Banknote.bnf
├── Dow.bnf
├── Pagie1.bnf
├── TwoBoxes.bnf
├── Vladislavleva4.bnf
├── heartDisease.bnf
├── lawnmower64.bnf
├── parity3.bnf
├── parity4.bnf
├── parity4_v2.bnf
├── parity4_v3.bnf
├── parity5.bnf
├── parity6.bnf
├── quinticPolynomial.bnf
├── simpleIncrement.bnf
└── spambase.bnf
└── grape.py
/GP/ClassificationFPT/functions.py:
--------------------------------------------------------------------------------
1 | import random
2 | import numpy as np
3 | import math
4 | import re
5 |
6 | def replace_substrings(input_string, replacements):
7 | # Function to replace matched pattern with corresponding list value
8 | def replacer(match):
9 | index = int(match.group(1)) # Extract the number after 'IN'
10 | return replacements[index] # Return the corresponding list value
11 |
12 | # Regular expression to find patterns like 'IN0', 'IN1', etc.
13 | pattern = r'IN(\d+)'
14 |
15 | # Replace all occurrences of the pattern in the input string
16 | result = re.sub(pattern, replacer, input_string)
17 |
18 | return result
19 |
20 | def median_abs_deviation(arr, axis=0):
21 | if not isinstance(arr, np.ndarray):
22 | raise ValueError("Input must be a NumPy array.")
23 |
24 | # Calculate the median along axis 0
25 | median = np.median(arr, axis=0)
26 |
27 | # Calculate the absolute deviations from the median along axis 0
28 | abs_deviations = np.abs(arr - median)
29 |
30 | # Calculate the median of the absolute deviations along axis 0
31 | mad = np.median(abs_deviations, axis=0)
32 |
33 | return mad
34 |
35 | def count_zeros_except_first_row(array):
36 | # Exclude the first row
37 | rows_to_count = array[1:]
38 |
39 | # Count the number of zeros
40 | zero_count = np.count_nonzero(rows_to_count == 0)
41 |
42 | return zero_count
43 |
44 | def count_zeros(array):
45 | # Count the number of zeros
46 | zero_count = np.count_nonzero(array == 0)
47 |
48 | return zero_count
49 |
50 | def shuffle_rows(arr):
51 | """
52 | It receives an array n x m, shuffles the rows, and returns the new array.
53 | """
54 | np.random.shuffle(arr)
55 | return arr
56 |
57 | def shuffle_rows_except_first(arr):
58 | """
59 | It receives an array n x m, shuffles the rows, except the first, and
60 | returns the new array.
61 | """
62 | arr_copy = np.copy(arr)
63 | first_row = arr_copy[0]
64 | np.random.shuffle(arr_copy[1:])
65 | return np.vstack((first_row, arr_copy[1:]))
66 |
67 | def remove_row(arr, index):
68 | """
69 | It receives an array n x m, removes the row according to the index, and
70 | returns the new array.
71 | """
72 | return np.delete(arr, index, axis=0)
73 |
74 | def add_index_column(arr):
75 | """
76 | It receives an array n x m, adds a column with the indexes, and returns
77 | the new array.
78 | """
79 | row_indices = np.arange(arr.shape[0]).reshape(-1, 1)
80 | return np.hstack((row_indices, arr))
81 |
82 | def remove_columns(arr, x):
83 | """
84 | It receives an array n x m and a value x. It checks the second row of the array,
85 | and remove the columns where the respective value is greater than x.
86 | """
87 | row = arr[1, :]
88 | mask = row <= x
89 | return arr[:, mask]
90 |
91 | def remove_columns_with_different_value(A, x):
92 | """
93 | It receives an array n x m and a value x. It checks the second row of the array,
94 | and remove the columns where the respective value is different than x.
95 | """
96 | second_row = A[1] # Extract the second row
97 | mask = second_row == x # Create a boolean mask
98 | result = A[:, mask] # Apply the mask to the original array
99 | return result
100 |
101 | def represent_matrix_behaviour(A, threshold):
102 | """
103 | It receives a numpy array A n x m and an array threshold with length n.
104 | It checks each of the columns for each row of A, and replaces the value
105 | by 0 if it is smaller than the respective value in threshold.
106 | """
107 | mask = A < threshold[:, None] # Create a boolean mask using broadcasting
108 | result = np.where(mask, 0, 1) # Use np.where to replace values
109 |
110 | return result
111 |
112 | def remove_equal_rows(A):
113 | """
114 | It receives an array A, and remove equal rows.
115 | """
116 | unique_rows, indices = np.unique(A, axis=0, return_index=True)
117 | result = A[indices]
118 | return result
119 |
120 | def remove_equal_columns(A):
121 | """
122 | It receives an array A, and remove equal columns.
123 | It does not consider the first row while checking if two columns are equal
124 | to each other.
125 | """
126 | transposed_A = A.T # Transpose the array
127 | unique_cols, indices = np.unique(transposed_A[:,1:], axis=0, return_index=True) # Remove first row
128 | result = transposed_A[indices]
129 | return result.T # Transpose back to original shape
130 |
131 | def find_equal_columns(A, column_index):
132 | """
133 | It receives an array A, and an index, and check which columns are equal
134 | to the column with that index.
135 | It ignores the first row when checking if a column is equal to another one.
136 | """
137 | equal_column_indices = np.where(np.all(A[1:] == A[1:, column_index][:, None], axis=0))[0]
138 | return equal_column_indices
139 |
140 | def aggregate_rows(arr, batch_size):
141 | """
142 | It receives an array n x m, and a batch_size.
143 | Row 0 is kept untouched, since it contains the indexes.
144 | From row 1, it aggregates the values of the rows with their average
145 | according to the batch_size.
146 | """
147 | l, m = arr.shape
148 | n = l - 1 #first row has indexes, so we don't count it
149 | new_n = math.ceil(n / batch_size) + 1
150 |
151 | result = np.zeros((new_n, m))
152 | result[0] = arr[0]
153 |
154 | for i in range(1, new_n):
155 | start = (i - 1) * batch_size + 1
156 | end = min(i * batch_size, n)
157 | result[i] = np.sum(arr[start:end+1], axis=0) / (end - start + 1)
158 |
159 | return result
160 |
161 | def aggregate_rows_sum(arr, batch_size):
162 | """
163 | It receives an array n x m, and a batch_size.
164 | Row 0 is kept untouched, since it contains the indexes.
165 | From row 1, it aggregates the values of the rows with their addition
166 | according to the batch_size.
167 | """
168 | l, m = arr.shape
169 | n = l - 1 #first row has indexes, so we don't count it
170 | new_n = math.ceil(n / batch_size) + 1
171 |
172 | result = np.zeros((new_n, m))
173 | result[0] = arr[0]
174 |
175 | for i in range(1, new_n):
176 | start = (i - 1) * batch_size + 1
177 | end = min(i * batch_size, n)
178 | result[i] = np.sum(arr[start:end+1], axis=0)
179 |
180 | return result
181 |
182 | def WA(a, b, x):
183 | x = float(x)
184 | return x*a+(1-x)*b
185 |
186 | def OWA(a, b, x):
187 | x = float(x)
188 | return x*np.maximum(a, b)+(1-x)*np.minimum(a, b)
189 |
190 | def minimum(a, b):
191 | return np.minimum(a, b)
192 |
193 | def maximum(a, b):
194 | return np.maximum(a, b)
195 |
196 | def dilator(b):
197 | return b**0.5
198 |
199 | def dilator3(b):
200 | return b**(1/3)
201 |
202 | def dilator4(b):
203 | return b**0.25
204 |
205 | def concentrator(b):
206 | return b**2
207 |
208 | def concentrator3(b):
209 | return b**3
210 |
211 | def concentrator4(b):
212 | return b**4
213 |
214 | def fuzzy_AND(a, b):
215 | return a * b
216 |
217 | def fuzzy_OR(a, b):
218 | return a + b - a*b
219 |
220 | def complement(b):
221 | return 1 - b
--------------------------------------------------------------------------------
/GP/ClassificationFPT/fuzzify.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 | """
4 | Created on Thu Jun 13 22:37:00 2019
5 |
6 | @author: allan
7 | """
8 |
9 | import numpy as np
10 | import pandas as pd
11 | import re
12 | import skfuzzy as fuzz
13 | import math
14 |
15 | def matrixDomain(DataFrame, numeric_columns=[], categorical_columns=[]):
16 | """
17 | Return a list of lists in the format:
18 | [
19 | ['c', 'S', 'M', 'A'], => it means it's categorical, and it's followed by the categories
20 | ['n', 0, 5] => it means it's numerical and it's followed by the domain
21 |
22 | ]
23 | """
24 |
25 | df = DataFrame.copy()
26 | _, numColumns = np.shape(df)
27 |
28 | if numColumns != len(numeric_columns) + len(categorical_columns):
29 | raise ValueError("There are columns not defined as numeric or categorical")
30 |
31 | df.dropna(inplace = True)
32 |
33 | if numeric_columns:
34 | if categorical_columns:
35 | dfNumeric = df.drop(columns=categorical_columns)
36 | else:
37 | dfNumeric = df
38 | numColumnsCategorical = 0
39 | _, numColumnsReal = np.shape(dfNumeric)
40 | nameColReal = dfNumeric.columns
41 | minimum = np.zeros([numColumnsReal], dtype=float)
42 | maximum = np.zeros([numColumnsReal], dtype=float)
43 | array = dfNumeric.values
44 | for i in range(numColumnsReal):
45 | minimum[i] = min(array[:,i])
46 | maximum[i] = max(array[:,i])
47 | if categorical_columns:
48 | if numeric_columns:
49 | dfCategorical = df.drop(columns=numeric_columns)
50 | else:
51 | dfCategorical = df
52 | numColumnsReal = 0
53 | _, numColumnsCategorical = np.shape(dfCategorical)
54 | nameColCategorical = dfCategorical.columns
55 |
56 | #Matrix domain, where the number of rows is the number of features
57 | matrixDomain = np.empty([numColumns,2], dtype=float)
58 |
59 | #Warning
60 | if (numColumnsReal + numColumnsCategorical) != numColumns:
61 | print("Attention! Presence of discreet non-categorical columns.")
62 | print("The domain matrix will not be filled.")
63 | print()
64 | return
65 |
66 | nameCol = df.columns #all feature names
67 |
68 | j, k = 0, 0 #indexes of numerical and categorical columns, respectively
69 | for i in range(numColumns):
70 | if j < numColumnsReal:
71 | if nameCol[i] == nameColReal[j]:
72 | #fill row i with min and max of feature j
73 | matrixDomain[i][:] = minimum[j], maximum[j]
74 | j += 1
75 | if k < numColumnsCategorical:
76 | if nameCol[i] == nameColCategorical[k]:
77 | #fill row with the number of categories
78 | matrixDomain[i] = len(dfCategorical[nameColCategorical[k]].unique())
79 | k += 1
80 |
81 | listDomain = []
82 | j, k = 0, 0 #indexes of numerical and categorical columns, respectively
83 | for i in range(numColumns):
84 | if j < numColumnsReal:
85 | if nameCol[i] == nameColReal[j]:
86 | #fill list i with min and max of feature j
87 | listDomain.append(['n', minimum[j], maximum[j]])
88 | j += 1
89 | if k < numColumnsCategorical:
90 | if nameCol[i] == nameColCategorical[k]:
91 | list_ = ['c']
92 | #fill list i with categories
93 | for l in range(len(dfCategorical[nameColCategorical[k]].unique())):
94 | list_.append(str(dfCategorical[nameColCategorical[k]].unique()[l]))
95 | listDomain.append(list_)
96 | k += 1
97 |
98 | return listDomain
99 |
100 | def fuzzifyDataFrame(DataFrame, nSets, matrixDomain):
101 |
102 | df = DataFrame.copy()
103 |
104 | numRows, numColumns = np.shape(df)
105 |
106 | #check if all features are referred in the matrixDomain
107 | if len(matrixDomain) == ():
108 | numVariables = 0
109 | else:
110 | numVariables = len(matrixDomain)
111 | if numVariables != numColumns:
112 | print("Domain matrix does not represent all the variables")
113 | return
114 |
115 | totalIndexes = list(df.index)
116 |
117 | df.dropna(inplace = True)
118 |
119 | numRowsAposRemocao,_ = np.shape(df)
120 | numLinhasEliminadas = numRows - numRowsAposRemocao
121 | if numLinhasEliminadas != 0:
122 | print("Warning: %i lines were deleted because they didn't contain all the attributes" %numLinhasEliminadas)
123 |
124 | #indexes of non-removed data
125 | validIndexes = list(df.index)
126 |
127 | validNumberRows, _ = np.shape(df) #new number of rows (the number of columns didn't change)
128 |
129 | #keep in totalIndexes only the removed values
130 | for i in range(validNumberRows):
131 | totalIndexes.remove(validIndexes[i])
132 |
133 | dataReal = df.copy()
134 | dataCategorical = df.copy()
135 |
136 | nonReal = [] #eliminate categorical data in dataReal
137 | nonCategorical = [] #eliminate non-categorical data in dataCategorical
138 |
139 | for i in range(numVariables):
140 | if matrixDomain[i][0] == 'c': #categorical data
141 | nonReal.append(i)
142 | else: #non-categorical data
143 | nonCategorical.append(i)
144 |
145 | dataReal.drop(dataReal.columns[nonReal], axis=1, inplace=True)
146 | dataCategorical.drop(dataCategorical.columns[nonCategorical], axis=1, inplace=True)
147 |
148 | _, numColumnsReal = np.shape(dataReal) #the number of lines doesn't matter, because it's the same in validNumberRows
149 |
150 | _, numColumnsCategorical = np.shape(dataCategorical)
151 |
152 | nameCol = df.columns #names of all features
153 | nameColReal = dataReal.columns #names of fuzzy features
154 | nameColCategorical = dataCategorical.columns #names of categorical features
155 |
156 | arraySets = np.empty(numColumns, dtype=int)
157 | #arraySets keeps the number of sets to split each feature
158 | #If nSets is integer, all non-categorical features will be splitted with the same number of sets
159 | #If it's an array, each position refers to each column
160 | #The position reffering to a categorical feature should have the number of categories of that feature
161 | j, k = 0, 0
162 | if type(nSets) == int:
163 | if nSets < 2:
164 | print("Number of sets must be greater than or equal to 2")
165 | return
166 | else:
167 | for i in range(numColumns):
168 | if numColumnsReal > j: #if there are any more columns to verify
169 | if nameCol[i] == nameColReal[j]:
170 | arraySets[i] = nSets
171 | j += 1
172 | if numColumnsCategorical > k: #same for non-categorical
173 | if nameCol[i] == nameColCategorical[k]:
174 | arraySets[i] = len(matrixDomain[i]) - 1 #-1 because the first position is 'n' or 'c'
175 | k += 1
176 |
177 | else: #if it's an array
178 | nSetsSize = len(nSets)
179 | if numVariables != nSetsSize:
180 | print("Size of the array nSets must be equal to the number of variables.")
181 | return
182 | for i in range(numColumns):
183 | if nSets[i] < 2:
184 | print("Number of sets must be greater than or equal to 2")
185 | return
186 | arraySets[i] = nSets[i]
187 |
188 | #if some rows in the dataframe were removed (for missing values), the pointing could be difficult,
189 | #so, we send the values of the dataframe to a matrix
190 | #The respective positions with removed data in the dataframe will get zero in the matrix
191 | matrixDataReal = np.zeros([numRows,numColumnsReal], dtype=float)
192 |
193 | sumSets = int(sum(arraySets)) #total of sets
194 | for i in range(numColumns):
195 | if matrixDomain[i][0] == 'c' and arraySets[i] == 2:
196 | sumSets -= 1
197 | pertinenceMatrix = np.zeros([numRows,sumSets], dtype=float)
198 | pertinenceMatrixDF = {} #final dataframe
199 |
200 | i = 0 #we cannot use the index as pointer because of possible missing rows
201 | for index, row in dataReal.iterrows():
202 | for j in range(numColumnsReal):
203 | matrixDataReal[validIndexes[i]][j] = row[nameColReal[j]]
204 | i += 1
205 |
206 | actualColumn = 0 #column to the filled now
207 | actualIndexSets = 0
208 | for i in range(numColumns):
209 | if matrixDomain[i][0] == 'c': #categorical data
210 | if arraySets[i] != 2:
211 | arrayCategories = []
212 | for j in range(arraySets[i]):
213 | arrayCategories.append(matrixDomain[i][j+1])
214 | j = 0 #position in the array of valid data
215 | for index in range(validNumberRows): #for each valid row
216 | for k in range(arraySets[i]): #for each set of the current feature
217 | if arrayCategories[k] == str(df.loc[validIndexes[j]][i]): #quando as categorias são números, às vezes ocorrem problemas. Verificar se str() corrigiu
218 | #se o objeto pertence à categoria atual, a posição na matriz de pertinência será 1
219 | pertinenceMatrix[validIndexes[j],actualColumn+k] = 1
220 | else:
221 | #senão será 0
222 | pertinenceMatrix[validIndexes[j],actualColumn+k] = 0
223 | j += 1
224 | actualColumn += arraySets[i] #todas as colunas referentes aos conjuntos da variável atual foram preenchidas
225 | actualIndexSets += 1
226 | else: #only two categories, we codified one columns with 0 and 1
227 | arrayCategories = []
228 | for j in range(arraySets[i]):
229 | arrayCategories.append(matrixDomain[i][j+1])
230 | j = 0 #posição no vetor de índices válidos
231 | for index in range(validNumberRows): #para cada linha válida
232 | if arrayCategories[0] == str(df.loc[validIndexes[j]][i]):
233 | pertinenceMatrix[validIndexes[j],actualColumn] = 0
234 | elif arrayCategories[1] == str(df.loc[validIndexes[j]][i]):
235 | pertinenceMatrix[validIndexes[j],actualColumn] = 1
236 | else:
237 | raise TypeError("check fuzzification")
238 | j += 1
239 | actualColumn += 1 #todas as colunas referentes aos conjuntos da variável atual foram preenchidas
240 | actualIndexSets += 1
241 | else:# matrixDomain[i,0] != matrixDomain[i,1]: #dados reais
242 | lowerBound = matrixDomain[i][1] #início do domínio
243 | upperBound = matrixDomain[i][2] #fim do domínio
244 | width = (upperBound - lowerBound) / (arraySets[i] - 1) #largura do conjunto fuzzy, isto é, a largura da subida ou da descida
245 | step = (upperBound - lowerBound) / 1000
246 |
247 | #fuzzificação
248 |
249 | x = np.arange(lowerBound, upperBound + step, step)
250 |
251 | qual = [[[] for _ in range(validNumberRows)] for _ in range(arraySets[i])] #conjuntos fuzzy
252 | qual_level = [[] for _ in range(arraySets[i])] #valores de pertinência
253 |
254 | #primeiro termo fuzzy
255 | a = lowerBound - step
256 | b = lowerBound
257 | c = lowerBound + width
258 | qual[0] = fuzz.trimf(x, [a, b, c])
259 |
260 | #termos fuzzy do meio
261 | if arraySets[i] > 2:
262 | for j in range(arraySets[i]-2):#-1): #com o -1 vale para os do meio e o último
263 | a = b
264 | b = c
265 | c = c + width
266 | qual[j+1] = fuzz.trimf(x, [a, b, c])
267 |
268 | #último termo fuzzy
269 | a = upperBound - width
270 | b = upperBound
271 | c = upperBound + step
272 | qual[arraySets[i]-1] = fuzz.trimf(x, [a, b, c])
273 |
274 | m = 0
275 | for index in range(validNumberRows):
276 | data = DataFrame.loc[validIndexes[m]][i]
277 | #para evitar problemas com as extremidades
278 | if data <= lowerBound:
279 | qual_level[0] = 1
280 | pertinenceMatrix[validIndexes[m],actualColumn] = 1
281 | for k in range(arraySets[i]-1):
282 | qual_level[k+1] = 0
283 | pertinenceMatrix[validIndexes[m],actualColumn+k+1] = 0
284 | elif data >= upperBound:
285 | qual_level[arraySets[i]-1] = 1
286 | pertinenceMatrix[validIndexes[m],actualColumn+arraySets[i]-1] = 1
287 | for k in range(arraySets[i]-1):
288 | qual_level[k] = 0
289 | pertinenceMatrix[validIndexes[m],actualColumn+k] = 0
290 | else:
291 | for k in range(arraySets[i]):
292 | qual_level[k] = fuzz.interp_membership(x, qual[k], data)
293 | pertinenceMatrix[validIndexes[m],actualColumn+k] = qual_level[k]
294 | m += 1
295 | actualColumn += arraySets[i]
296 | actualIndexSets += 1
297 |
298 | #cria dataframe a partir da matriz
299 | actualColumn = 0
300 | for i in range(numColumns):
301 | if arraySets[i] == 2:
302 | pertinenceMatrixDF['{0}'.format(nameCol[i])] = pertinenceMatrix[:,actualColumn]
303 | actualColumn += 1
304 | else:
305 | for j in range(arraySets[i]):
306 | pertinenceMatrixDF['{0}{1}{2}'.format(nameCol[i],'-',j)] = pertinenceMatrix[:,actualColumn+j]
307 | actualColumn += arraySets[i]
308 | pertinenceDataFrame = pd.DataFrame(pertinenceMatrixDF)
309 |
310 | #elimina do dataframe final as mesmas linhas que foram removidas do dataframe inicial
311 | finalPertinenceDataFrame = pertinenceDataFrame.drop(totalIndexes)
312 |
313 | return finalPertinenceDataFrame
--------------------------------------------------------------------------------
/GP/SwitchingSelectionMethods/genetic.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | Created on Tue May 10 06:53:28 2022
4 |
5 | @author: allan
6 | """
7 |
8 | import re
9 | import math
10 | from operator import attrgetter
11 | import numpy as np
12 | import random
13 | import copy
14 | import statistics
15 |
16 | from functions import shuffle_rows_except_first, remove_row, add_index_column, remove_columns, aggregate_rows, represent_matrix_behaviour, remove_equal_rows, remove_equal_columns, find_equal_columns, remove_columns_with_different_value, aggregate_rows_sum, count_zeros_except_first_row, count_zeros
17 |
18 | def median_abs_deviation(arr, axis=0):
19 | if not isinstance(arr, np.ndarray):
20 | raise ValueError("Input must be a NumPy array.")
21 |
22 | # Calculate the median along axis 0
23 | median = np.median(arr, axis=0)
24 |
25 | # Calculate the absolute deviations from the median along axis 0
26 | abs_deviations = np.abs(arr - median)
27 |
28 | # Calculate the median of the absolute deviations along axis 0
29 | mad = np.median(abs_deviations, axis=0)
30 |
31 | return mad
32 |
33 | def selEpsilonLexi2_nodesCountTies(individuals, k):
34 | """
35 | same as selEpsilonLexi2_nodesCount, but also registers the number of ties in the selected individual in the attribute 'ties'
36 |
37 | """
38 | selected_individuals = []
39 | l_samples = np.shape(individuals[0].fitness_each_sample)[0]
40 |
41 | cases = list(range(0,l_samples))
42 | candidates = individuals
43 |
44 | error_vectors = [ind.fitness_each_sample for ind in candidates]
45 |
46 | fitness_cases_matrix = np.array(error_vectors) # inds (rows) x samples (cols)
47 | min_ = np.nanmin(fitness_cases_matrix, axis=0)
48 |
49 | mad = median_abs_deviation(fitness_cases_matrix, axis=0)
50 | epsilon = mad
51 | avg_epsilon = np.mean(epsilon)
52 |
53 | for i in range(len(candidates)):
54 | for j in range(l_samples):
55 | if fitness_cases_matrix[i][j] <= min_[j] + epsilon[j]:
56 | fitness_cases_matrix[i][j] = 0
57 | #candidates[i].fitness_each_sample_discrete[j] = 1
58 | else:
59 | fitness_cases_matrix[i][j] = 1
60 | #candidates[i].fitness_each_sample_discrete[j] = 0
61 | candidates[i].fitness_each_sample_discrete = list(fitness_cases_matrix[i,:])
62 |
63 | n_zeros = count_zeros(fitness_cases_matrix) #number of zeros in the matrix with discrete fitness cases
64 | avg_zeros = n_zeros / len(individuals) #average number of zeros per individual
65 | avg_zeros = avg_zeros / l_samples #represent as a percentage of the number of samples
66 |
67 | error_vectors = list(fitness_cases_matrix)
68 |
69 | unique_error_vectors = list(set([tuple(i) for i in error_vectors]))
70 | unique_error_vectors = [list(i) for i in unique_error_vectors]
71 |
72 | candidates_prefiltered_set = []
73 | for i in range(len(unique_error_vectors)):
74 | cands = [ind for ind in candidates if ind.fitness_each_sample_discrete == unique_error_vectors[i]]
75 | for ind in cands:
76 | ind.ties = len(cands)
77 | f = min
78 | best_val_for_nodes = f(map(lambda x: x.nodes, cands))
79 | cands = [ind for ind in cands if ind.nodes == best_val_for_nodes]
80 | candidates_prefiltered_set.append(cands) #list of lists, each one with the inds with the same error vectors and same number of nodes
81 |
82 | indexes = []
83 | for i in range(k):
84 | #fill the pool only with candidates with unique error vectors
85 | pool = []
86 | for list_ in candidates_prefiltered_set:
87 | pool.append(random.choice(list_))
88 |
89 | random.shuffle(cases)
90 | count_ = 0
91 | while len(cases) > 0 and len(pool) > 1:
92 | count_ += 1
93 | f = min
94 | best_val_for_case = f(map(lambda x: x.fitness_each_sample_discrete[cases[0]], pool))
95 | pool = [ind for ind in pool if ind.fitness_each_sample_discrete[cases[0]] == best_val_for_case]
96 | del cases[0]
97 |
98 | pool[0].n_cases = count_
99 | pool[0].avg_zeros = avg_zeros
100 | pool[0].avg_epsilon = avg_epsilon
101 | selected_individuals.append(pool[0]) #Select the remaining candidate
102 | cases = list(range(0,l_samples)) #Recreate the list of cases
103 |
104 | index = individuals.index(pool[0])
105 | indexes.append(index)
106 |
107 | selected_individuals[0].unique_selected = len(set(indexes)) / len(individuals) # percentage of unique inds selected
108 |
109 | return selected_individuals
110 |
111 | def selDownSampledEpsilonLexi2_nodesCountTies(individuals, k, s=0.1):
112 | """
113 |
114 | """
115 | selected_individuals = []
116 | l_samples = np.shape(individuals[0].fitness_each_sample)[0]
117 |
118 | #Parameters for down-sampling
119 | num_columns = l_samples
120 | sample_size = int(num_columns * s)
121 |
122 | cases = list(range(0,sample_size))
123 | candidates = individuals
124 |
125 | error_vectors = [ind.fitness_each_sample for ind in candidates]
126 | #down_sampled_error_vectors = random.sample(error_vectors, int(s * len(individuals)))
127 |
128 | fitness_cases_matrix = np.array(error_vectors) # inds (rows) x samples (cols)
129 |
130 | #Down-sampling
131 | sampled_columns_indices = np.random.choice(num_columns, size=sample_size, replace=False)
132 | sampled_array = fitness_cases_matrix[:, sampled_columns_indices]
133 |
134 | #Pre-filtering
135 | min_ = np.nanmin(sampled_array, axis=0)
136 | mad = median_abs_deviation(sampled_array, axis=0)
137 | avg_epsilon = np.mean(mad)
138 |
139 | for i in range(len(candidates)):
140 | candidates[i].fitness_each_downsampled = [None] * sample_size
141 | for j in range(sample_size):
142 | if sampled_array[i][j] <= min_[j] + mad[j]:
143 | sampled_array[i][j] = 1
144 | candidates[i].fitness_each_downsampled[j] = 1
145 | else:
146 | sampled_array[i][j] = 0
147 | candidates[i].fitness_each_downsampled[j] = 0
148 |
149 | n_zeros = count_zeros(sampled_array) #number of zeros in the matrix with discrete fitness cases
150 | avg_zeros = n_zeros / len(individuals) #average number of zeros per individual
151 | avg_zeros = avg_zeros / sample_size #represent as a percentage of the number of samples
152 |
153 | error_vectors = list(sampled_array)
154 |
155 | unique_error_vectors = list(set([tuple(i) for i in error_vectors]))
156 | unique_error_vectors = [list(i) for i in unique_error_vectors]
157 |
158 | candidates_prefiltered_set = []
159 | for i in range(len(unique_error_vectors)):
160 | cands = [ind for ind in candidates if ind.fitness_each_downsampled == unique_error_vectors[i]]
161 | for ind in cands:
162 | ind.ties = len(cands)
163 | f = min
164 | best_val_for_nodes = f(map(lambda x: x.nodes, cands))
165 | cands = [ind for ind in cands if ind.nodes == best_val_for_nodes]
166 | candidates_prefiltered_set.append(cands) #list of lists, each one with the inds with the same error vectors and same number of nodes
167 |
168 | indexes = []
169 | for i in range(k):
170 | #fill the pool only with candidates with unique error vectors
171 | pool = []
172 | for list_ in candidates_prefiltered_set:
173 | pool.append(random.choice(list_))
174 | random.shuffle(cases)
175 | count_ = 0
176 | while len(cases) > 0 and len(pool) > 1:
177 | count_ += 1
178 | f = max
179 | best_val_for_case = f(map(lambda x: x.fitness_each_downsampled[cases[0]], pool))
180 | pool = [ind for ind in pool if ind.fitness_each_downsampled[cases[0]] == best_val_for_case]
181 | del cases[0]
182 |
183 | pool[0].n_cases = count_
184 | pool[0].avg_zeros = avg_zeros
185 | pool[0].avg_epsilon = avg_epsilon
186 | selected_individuals.append(pool[0]) #Select the remaining candidate
187 | cases = list(range(0,sample_size)) #Recreate the list of cases
188 |
189 | index = individuals.index(pool[0])
190 | indexes.append(index)
191 |
192 | selected_individuals[0].unique_selected = len(set(indexes)) / len(individuals) # percentage of unique inds selected
193 |
194 | return selected_individuals
195 |
196 | def selDownSampledEpsilonLexicase(individuals, k, s=0.1):
197 | """
198 |
199 | """
200 | selected_individuals = []
201 | l_samples = np.shape(individuals[0].fitness_each_sample)[0]
202 |
203 | #Parameters for down-sampling
204 | num_columns = l_samples
205 | sample_size = int(num_columns * s)
206 |
207 | cases = list(range(0,sample_size))
208 | candidates = individuals
209 |
210 | error_vectors = [ind.fitness_each_sample for ind in candidates]
211 | #down_sampled_error_vectors = random.sample(error_vectors, int(s * len(individuals)))
212 |
213 | fitness_cases_matrix = np.array(error_vectors) # inds (rows) x samples (cols)
214 |
215 | #Down-sampling
216 | sampled_columns_indices = np.random.choice(num_columns, size=sample_size, replace=False)
217 | sampled_array = fitness_cases_matrix[:, sampled_columns_indices]
218 |
219 | #Pre-filtering
220 | min_ = np.nanmin(sampled_array, axis=0)
221 | mad = median_abs_deviation(sampled_array, axis=0)
222 |
223 | for i in range(len(candidates)):
224 | candidates[i].fitness_each_downsampled = [None] * sample_size
225 | for j in range(sample_size):
226 | if sampled_array[i][j] <= min_[j] + mad[j]:
227 | sampled_array[i][j] = 1
228 | candidates[i].fitness_each_downsampled[j] = 1
229 | else:
230 | sampled_array[i][j] = 0
231 | candidates[i].fitness_each_downsampled[j] = 0
232 |
233 | error_vectors = list(sampled_array)
234 |
235 | unique_error_vectors = list(set([tuple(i) for i in error_vectors]))
236 | unique_error_vectors = [list(i) for i in unique_error_vectors]
237 |
238 | candidates_prefiltered_set = []
239 | for i in range(len(unique_error_vectors)):
240 | cands = [ind for ind in candidates if ind.fitness_each_downsampled == unique_error_vectors[i]]
241 | for ind in cands:
242 | ind.ties = len(cands)
243 | candidates_prefiltered_set.append(cands) #list of lists, each one with the inds with the same error vectors and same number of nodes
244 |
245 | for i in range(k):
246 | #fill the pool only with candidates with unique error vectors
247 | pool = []
248 | for list_ in candidates_prefiltered_set:
249 | pool.append(random.choice(list_))
250 | random.shuffle(cases)
251 | count_ = 0
252 | while len(cases) > 0 and len(pool) > 1:
253 | count_ += 1
254 | f = max
255 | best_val_for_case = f(map(lambda x: x.fitness_each_downsampled[cases[0]], pool))
256 | pool = [ind for ind in pool if ind.fitness_each_downsampled[cases[0]] == best_val_for_case]
257 | del cases[0]
258 |
259 | pool[0].n_cases = count_
260 | selected_individuals.append(pool[0]) #Select the remaining candidate
261 | cases = list(range(0,sample_size)) #Recreate the list of cases
262 |
263 | return selected_individuals
264 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | BSD 3-Clause License
2 |
3 | Copyright (c) 2022-2023, BDS Research Group at University of Limerick
4 |
5 | Redistribution and use in source and binary forms, with or without
6 | modification, are permitted provided that the following conditions are met:
7 |
8 | 1. Redistributions of source code must retain the above copyright notice, this
9 | list of conditions and the following disclaimer.
10 |
11 | 2. Redistributions in binary form must reproduce the above copyright notice,
12 | this list of conditions and the following disclaimer in the documentation
13 | and/or other materials provided with the distribution.
14 |
15 | 3. Neither the name of the copyright holder nor the names of its
16 | contributors may be used to endorse or promote products derived from
17 | this software without specific prior written permission.
18 |
19 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
20 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
21 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
22 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
23 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
24 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
25 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
26 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
27 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # GRAPE: Grammatical Algorithms in Python for Evolution
2 |
3 | [GRAPE](https://www.mdpi.com/2624-6120/3/3/39) is an implementation of Grammatical Evolution (GE) in [DEAP](https://deap.readthedocs.io/en/master/), an Evolutionary Computation framework in Python,
4 |
5 | How to cite:
6 | ```
7 | Allan de Lima, Samuel Carvalho, Douglas Mota Dias, Enrique Naredo, Joseph P.
8 | Sullivan, and Conor Ryan. 2022. GRAPE: Grammatical Algorithms in Python for
9 | Evolution. Signals 3, 3 (2022), 642–663. https://doi.org/10.3390/signals3030039
10 | ```
11 |
--------------------------------------------------------------------------------
/algorithms.py:
--------------------------------------------------------------------------------
1 | # This file is part of DEAP.
2 | #
3 | # DEAP is free software: you can redistribute it and/or modify
4 | # it under the terms of the GNU Lesser General Public License as
5 | # published by the Free Software Foundation, either version 3 of
6 | # the License, or (at your option) any later version.
7 | #
8 | # DEAP is distributed in the hope that it will be useful,
9 | # but WITHOUT ANY WARRANTY; without even the implied warranty of
10 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
11 | # GNU Lesser General Public License for more details.
12 | #
13 | # You should have received a copy of the GNU Lesser General Public
14 | # License along with DEAP. If not, see .
15 |
16 | import random
17 | import math
18 | import numpy as np
19 | import time
20 | import warnings
21 |
22 | from deap import tools
23 |
24 | def varAnd(population, toolbox, cxpb, mutpb,
25 | bnf_grammar, codon_size, max_tree_depth, codon_consumption,
26 | genome_representation, max_genome_length):
27 | """Part of an evolutionary algorithm applying only the variation part
28 | (crossover **and** mutation). The modified individuals have their
29 | fitness invalidated. The individuals are cloned so returned population is
30 | independent of the input population.
31 |
32 | :param population: A list of individuals to vary.
33 | :param toolbox: A :class:`~deap.base.Toolbox` that contains the evolution
34 | operators.
35 | :param cxpb: The probability of mating two individuals.
36 | :param mutpb: The probability of mutating an individual.
37 | :returns: A list of varied individuals that are independent of their
38 | parents.
39 |
40 | """
41 | offspring = [toolbox.clone(ind) for ind in population]
42 |
43 | # Apply crossover and mutation on the offspring
44 | for i in range(1, len(offspring), 2):
45 | if random.random() < cxpb:
46 | offspring[i - 1], offspring[i] = toolbox.mate(offspring[i - 1],
47 | offspring[i],
48 | bnf_grammar,
49 | max_tree_depth,
50 | codon_consumption,
51 | genome_representation,
52 | max_genome_length)
53 |
54 | for i in range(len(offspring)):
55 | offspring[i], = toolbox.mutate(offspring[i], mutpb,
56 | codon_size, bnf_grammar,
57 | max_tree_depth, codon_consumption,
58 | max_genome_length)
59 |
60 | return offspring
61 |
62 | class hofWarning(UserWarning):
63 | pass
64 |
65 | def ge_eaSimpleWithElitism(population, toolbox, cxpb, mutpb, ngen, elite_size,
66 | bnf_grammar, codon_size, max_tree_depth,
67 | max_genome_length=None,
68 | points_train=None, points_test=None, codon_consumption='eager',
69 | report_items=None,
70 | genome_representation='list',
71 | stats=None, halloffame=None,
72 | verbose=__debug__):
73 | """This algorithm reproduce the simplest evolutionary algorithm as
74 | presented in chapter 7 of [Back2000]_, with some adaptations to run GE
75 | on GRAPE.
76 | :param population: A list of individuals.
77 | :param toolbox: A :class:`~deap.base.Toolbox` that contains the evolution
78 | operators.
79 | :param cxpb: The probability of mating two individuals.
80 | :param mutpb: The probability of mutating an individual.
81 | :param ngen: The number of generation.
82 | :param elite_size: The number of best individuals to be copied to the
83 | next generation.
84 | :params bnf_grammar, codon_size, max_tree_depth: Parameters
85 | used to mapper the individuals after crossover and
86 | mutation in order to check if they are valid.
87 | :param stats: A :class:`~deap.tools.Statistics` object that is updated
88 | inplace, optional.
89 | :param halloffame: A :class:`~deap.tools.HallOfFame` object that will
90 | contain the best individuals, optional.
91 | :param verbose: Whether or not to log the statistics.
92 | :returns: The final population
93 | :returns: A class:`~deap.tools.Logbook` with the statistics of the
94 | evolution
95 | """
96 |
97 | logbook = tools.Logbook()
98 |
99 | if halloffame is None:
100 | if elite_size != 0:
101 | raise ValueError("You should add a hof object to use elitism.")
102 | else:
103 | warnings.warn('You will not register results of the best individual while not using a hof object.', hofWarning)
104 | logbook.header = ['gen', 'invalid'] + (stats.fields if stats else []) + ['avg_length', 'avg_nodes', 'avg_depth', 'avg_used_codons', 'behavioural_diversity', 'structural_diversity', 'fitness_diversity', 'selection_time', 'generation_time']
105 | else:
106 | if halloffame.maxsize < 1:
107 | raise ValueError("HALLOFFAME_SIZE should be greater or equal to 1")
108 | if elite_size > halloffame.maxsize:
109 | raise ValueError("HALLOFFAME_SIZE should be greater or equal to ELITE_SIZE")
110 | if points_test:
111 | logbook.header = ['gen', 'invalid'] + (stats.fields if stats else []) + ['fitness_test', 'best_ind_length', 'avg_length', 'best_ind_nodes', 'avg_nodes', 'best_ind_depth', 'avg_depth', 'avg_used_codons', 'best_ind_used_codons', 'behavioural_diversity', 'structural_diversity', 'fitness_diversity', 'selection_time', 'generation_time']
112 | else:
113 | logbook.header = ['gen', 'invalid'] + (stats.fields if stats else []) + ['best_ind_length', 'avg_length', 'best_ind_nodes', 'avg_nodes', 'best_ind_depth', 'avg_depth', 'avg_used_codons', 'best_ind_used_codons', 'behavioural_diversity', 'structural_diversity', 'fitness_diversity', 'selection_time', 'generation_time']
114 |
115 | start_gen = time.time()
116 | # Evaluate the individuals with an invalid fitness
117 | for ind in population:
118 | if not ind.fitness.valid:
119 | ind.fitness.values = toolbox.evaluate(ind, points_train)
120 |
121 | valid0 = [ind for ind in population if not ind.invalid]
122 | valid = [ind for ind in valid0 if not math.isnan(ind.fitness.values[0])]
123 | if len(valid0) != len(valid):
124 | warnings.warn("Warning: There are valid individuals with fitness = NaN in the population. We will avoid them.")
125 | invalid = len(population) - len(valid0) #We use the original number of invalids in this case, because we just want to count the completely mapped individuals
126 |
127 | list_structures = []
128 | if 'fitness_diversity' in report_items:
129 | list_fitnesses = []
130 | if 'behavioural_diversity' in report_items:
131 | behaviours = np.zeros([len(valid), len(valid[0].fitness_each_sample)], dtype=float)
132 |
133 | #for ind in offspring:
134 | for idx, ind in enumerate(valid):
135 | list_structures.append(str(ind.structure))
136 | if 'fitness_diversity' in report_items:
137 | list_fitnesses.append(str(ind.fitness.values[0]))
138 | if 'behavioural_diversity' in report_items:
139 | behaviours[idx, :] = ind.fitness_each_sample
140 |
141 | unique_structures = np.unique(list_structures, return_counts=False)
142 | if 'fitness_diversity' in report_items:
143 | unique_fitnesses = np.unique(list_fitnesses, return_counts=False)
144 | if 'behavioural_diversity' in report_items:
145 | unique_behaviours = np.unique(behaviours, axis=0)
146 |
147 | structural_diversity = len(unique_structures)/len(population)
148 | fitness_diversity = len(unique_fitnesses)/(len(points_train[1])+1) if 'fitness_diversity' in report_items else 0 #TODO generalise for other problems, because it only works if the fitness is proportional to the number of testcases correctly predicted
149 | behavioural_diversity = len(unique_behaviours)/len(population) if 'behavioural_diversity' in report_items else 0
150 |
151 | # Update the hall of fame with the generated individuals
152 | if halloffame is not None:
153 | halloffame.update(valid)
154 | best_ind_length = len(halloffame.items[0].genome)
155 | best_ind_nodes = halloffame.items[0].nodes
156 | best_ind_depth = halloffame.items[0].depth
157 | best_ind_used_codons = halloffame.items[0].used_codons
158 | if not verbose:
159 | print("gen =", 0, ", Best fitness =", halloffame.items[0].fitness.values)
160 |
161 | length = [len(ind.genome) for ind in valid]
162 | avg_length = sum(length)/len(length)
163 |
164 | nodes = [ind.nodes for ind in valid]
165 | avg_nodes = sum(nodes)/len(nodes)
166 |
167 | depth = [ind.depth for ind in valid]
168 | avg_depth = sum(depth)/len(depth)
169 |
170 | used_codons = [ind.used_codons for ind in valid]
171 | avg_used_codons = sum(used_codons)/len(used_codons)
172 |
173 | end_gen = time.time()
174 | generation_time = end_gen-start_gen
175 |
176 | selection_time = 0
177 |
178 | if points_test:
179 | fitness_test = np.NaN
180 |
181 | record = stats.compile(population) if stats else {}
182 | if points_test:
183 | logbook.record(gen=0, invalid=invalid, **record,
184 | fitness_test=fitness_test,
185 | best_ind_length=best_ind_length, avg_length=avg_length,
186 | best_ind_nodes=best_ind_nodes,
187 | avg_nodes=avg_nodes,
188 | best_ind_depth=best_ind_depth,
189 | avg_depth=avg_depth,
190 | avg_used_codons=avg_used_codons,
191 | best_ind_used_codons=best_ind_used_codons,
192 | behavioural_diversity=behavioural_diversity,
193 | structural_diversity=structural_diversity,
194 | fitness_diversity=fitness_diversity,
195 | selection_time=selection_time,
196 | generation_time=generation_time)
197 | else:
198 | logbook.record(gen=0, invalid=invalid, **record,
199 | best_ind_length=best_ind_length, avg_length=avg_length,
200 | best_ind_nodes=best_ind_nodes,
201 | avg_nodes=avg_nodes,
202 | best_ind_depth=best_ind_depth,
203 | avg_depth=avg_depth,
204 | avg_used_codons=avg_used_codons,
205 | best_ind_used_codons=best_ind_used_codons,
206 | behavioural_diversity=behavioural_diversity,
207 | structural_diversity=structural_diversity,
208 | fitness_diversity=fitness_diversity,
209 | selection_time=selection_time,
210 | generation_time=generation_time)
211 | if verbose:
212 | print(logbook.stream)
213 |
214 | # Begin the generational process
215 | for gen in range(logbook.select("gen")[-1]+1, ngen + 1):
216 | start_gen = time.time()
217 |
218 | # Select the next generation individuals
219 | start = time.time()
220 | offspring = toolbox.select(valid, len(population)-elite_size)
221 | end = time.time()
222 | selection_time = end-start
223 | # Vary the pool of individuals
224 | offspring = varAnd(offspring, toolbox, cxpb, mutpb,
225 | bnf_grammar, codon_size, max_tree_depth,
226 | codon_consumption, genome_representation,
227 | max_genome_length)
228 |
229 | # Evaluate the individuals with an invalid fitness
230 | for ind in offspring:
231 | if not ind.fitness.valid:
232 | ind.fitness.values = toolbox.evaluate(ind, points_train)
233 |
234 | #Update population for next generation
235 | population[:] = offspring
236 | #Include in the population the elitist individuals
237 | for i in range(elite_size):
238 | population.append(halloffame.items[i])
239 |
240 | valid0 = [ind for ind in population if not ind.invalid]
241 | valid = [ind for ind in valid0 if not math.isnan(ind.fitness.values[0])]
242 | if len(valid0) != len(valid):
243 | warnings.warn("Warning: There are valid individuals with fitness = NaN in the population. We will avoid in the statistics.")
244 | invalid = len(population) - len(valid0) #We use the original number of invalids in this case, because we just want to count the completely mapped individuals
245 |
246 | list_structures = []
247 | if 'fitness_diversity' in report_items:
248 | list_fitnesses = []
249 | if 'behavioural_diversity' in report_items:
250 | behaviours = np.zeros([len(valid), len(valid[0].fitness_each_sample)], dtype=float)
251 |
252 | for idx, ind in enumerate(valid):
253 | list_structures.append(str(ind.structure))
254 | if 'fitness_diversity' in report_items:
255 | list_fitnesses.append(str(ind.fitness.values[0]))
256 | if 'behavioural_diversity' in report_items:
257 | behaviours[idx, :] = ind.fitness_each_sample
258 |
259 | unique_structures = np.unique(list_structures, return_counts=False)
260 | if 'fitness_diversity' in report_items:
261 | unique_fitnesses = np.unique(list_fitnesses, return_counts=False)
262 | if 'behavioural_diversity' in report_items:
263 | unique_behaviours = np.unique(behaviours, axis=0)
264 |
265 | structural_diversity = len(unique_structures)/len(population)
266 | fitness_diversity = len(unique_fitnesses)/(len(points_train[1])+1) if 'fitness_diversity' in report_items else 0 #TODO generalise for other problems, because it only works if the fitness is proportional to the number of testcases correctly predicted
267 | behavioural_diversity = len(unique_behaviours)/len(population) if 'behavioural_diversity' in report_items else 0
268 |
269 | # Update the hall of fame with the generated individuals
270 | if halloffame is not None:
271 | halloffame.update(valid)
272 | best_ind_length = len(halloffame.items[0].genome)
273 | best_ind_nodes = halloffame.items[0].nodes
274 | best_ind_depth = halloffame.items[0].depth
275 | best_ind_used_codons = halloffame.items[0].used_codons
276 | if not verbose:
277 | print("gen =", gen, ", Best fitness =", halloffame.items[0].fitness.values, ", Number of invalids =", invalid)
278 | if points_test:
279 | if gen < ngen:
280 | fitness_test = np.NaN
281 | else:
282 | fitness_test = toolbox.evaluate(halloffame.items[0], points_test)[0]
283 |
284 | length = [len(ind.genome) for ind in valid]
285 | avg_length = sum(length)/len(length)
286 |
287 | nodes = [ind.nodes for ind in valid]
288 | avg_nodes = sum(nodes)/len(nodes)
289 |
290 | depth = [ind.depth for ind in valid]
291 | avg_depth = sum(depth)/len(depth)
292 |
293 | used_codons = [ind.used_codons for ind in valid]
294 | avg_used_codons = sum(used_codons)/len(used_codons)
295 |
296 | end_gen = time.time()
297 | generation_time = end_gen-start_gen
298 |
299 | # Append the current generation statistics to the logbook
300 | record = stats.compile(population) if stats else {}
301 | if points_test:
302 | logbook.record(gen=gen, invalid=invalid, **record,
303 | fitness_test=fitness_test,
304 | best_ind_length=best_ind_length, avg_length=avg_length,
305 | best_ind_nodes=best_ind_nodes,
306 | avg_nodes=avg_nodes,
307 | best_ind_depth=best_ind_depth,
308 | avg_depth=avg_depth,
309 | avg_used_codons=avg_used_codons,
310 | best_ind_used_codons=best_ind_used_codons,
311 | behavioural_diversity=behavioural_diversity,
312 | structural_diversity=structural_diversity,
313 | fitness_diversity=fitness_diversity,
314 | selection_time=selection_time,
315 | generation_time=generation_time)
316 | else:
317 | logbook.record(gen=gen, invalid=invalid, **record,
318 | best_ind_length=best_ind_length, avg_length=avg_length,
319 | best_ind_nodes=best_ind_nodes,
320 | avg_nodes=avg_nodes,
321 | best_ind_depth=best_ind_depth,
322 | avg_depth=avg_depth,
323 | avg_used_codons=avg_used_codons,
324 | best_ind_used_codons=best_ind_used_codons,
325 | behavioural_diversity=behavioural_diversity,
326 | structural_diversity=structural_diversity,
327 | fitness_diversity=fitness_diversity,
328 | selection_time=selection_time,
329 | generation_time=generation_time)
330 |
331 | if verbose:
332 | print(logbook.stream)
333 |
334 | return population, logbook
--------------------------------------------------------------------------------
/datasets/Pagie1_train.txt:
--------------------------------------------------------------------------------
1 | x0 x1 response
2 | -5.0 -5.0 1.99680511182
3 | -5.0 -4.6 1.9961741218
4 | -5.0 -4.2 1.99519916903
5 | -5.0 -3.8 1.99362959606
6 | -5.0 -3.4 1.99097498954
7 | -5.0 -3.0 1.98620743396
8 | -5.0 -2.6 1.97698817915
9 | -5.0 -2.2 1.95746190348
10 | -5.0 -1.8 1.91142788294
11 | -5.0 -1.4 1.79185926444
12 | -5.0 -1.0 1.49840255591
13 | -5.0 -0.6 1.1131334341
14 | -5.0 -0.2 1.0
15 | -5.0 0.2 1.0
16 | -5.0 0.6 1.1131334341
17 | -5.0 1.0 1.49840255591
18 | -5.0 1.4 1.79185926444
19 | -5.0 1.8 1.91142788294
20 | -5.0 2.2 1.95746190348
21 | -5.0 2.6 1.97698817915
22 | -5.0 3.0 1.98620743396
23 | -5.0 3.4 1.99097498954
24 | -5.0 3.8 1.99362959606
25 | -5.0 4.2 1.99519916903
26 | -5.0 4.6 1.9961741218
27 | -5.0 5.0 1.99680511182
28 | -4.6 -5.0 1.9961741218
29 | -4.6 -4.6 1.99554313179
30 | -4.6 -4.2 1.99456817902
31 | -4.6 -3.8 1.99299860605
32 | -4.6 -3.4 1.99034399952
33 | -4.6 -3.0 1.98557644394
34 | -4.6 -2.6 1.97635718914
35 | -4.6 -2.2 1.95683091346
36 | -4.6 -1.8 1.91079689292
37 | -4.6 -1.4 1.79122827442
38 | -4.6 -1.0 1.49777156589
39 | -4.6 -0.6 1.11250244408
40 | -4.6 -0.2 0.999369009983
41 | -4.6 0.2 0.999369009983
42 | -4.6 0.6 1.11250244408
43 | -4.6 1.0 1.49777156589
44 | -4.6 1.4 1.79122827442
45 | -4.6 1.8 1.91079689292
46 | -4.6 2.2 1.95683091346
47 | -4.6 2.6 1.97635718914
48 | -4.6 3.0 1.98557644394
49 | -4.6 3.4 1.99034399952
50 | -4.6 3.8 1.99299860605
51 | -4.6 4.2 1.99456817902
52 | -4.6 4.6 1.99554313179
53 | -4.6 5.0 1.9961741218
54 | -4.2 -5.0 1.99519916903
55 | -4.2 -4.6 1.99456817902
56 | -4.2 -4.2 1.99359322625
57 | -4.2 -3.8 1.99202365328
58 | -4.2 -3.4 1.98936904675
59 | -4.2 -3.0 1.98460149117
60 | -4.2 -2.6 1.97538223637
61 | -4.2 -2.2 1.95585596069
62 | -4.2 -1.8 1.90982194015
63 | -4.2 -1.4 1.79025332165
64 | -4.2 -1.0 1.49679661312
65 | -4.2 -0.6 1.11152749131
66 | -4.2 -0.2 0.998394057213
67 | -4.2 0.2 0.998394057213
68 | -4.2 0.6 1.11152749131
69 | -4.2 1.0 1.49679661312
70 | -4.2 1.4 1.79025332165
71 | -4.2 1.8 1.90982194015
72 | -4.2 2.2 1.95585596069
73 | -4.2 2.6 1.97538223637
74 | -4.2 3.0 1.98460149117
75 | -4.2 3.4 1.98936904675
76 | -4.2 3.8 1.99202365328
77 | -4.2 4.2 1.99359322625
78 | -4.2 4.6 1.99456817902
79 | -4.2 5.0 1.99519916903
80 | -3.8 -5.0 1.99362959606
81 | -3.8 -4.6 1.99299860605
82 | -3.8 -4.2 1.99202365328
83 | -3.8 -3.8 1.99045408031
84 | -3.8 -3.4 1.98779947378
85 | -3.8 -3.0 1.9830319182
86 | -3.8 -2.6 1.9738126634
87 | -3.8 -2.2 1.95428638772
88 | -3.8 -1.8 1.90825236718
89 | -3.8 -1.4 1.78868374868
90 | -3.8 -1.0 1.49522704015
91 | -3.8 -0.6 1.10995791834
92 | -3.8 -0.2 0.996824484243
93 | -3.8 0.2 0.996824484243
94 | -3.8 0.6 1.10995791834
95 | -3.8 1.0 1.49522704015
96 | -3.8 1.4 1.78868374868
97 | -3.8 1.8 1.90825236718
98 | -3.8 2.2 1.95428638772
99 | -3.8 2.6 1.9738126634
100 | -3.8 3.0 1.9830319182
101 | -3.8 3.4 1.98779947378
102 | -3.8 3.8 1.99045408031
103 | -3.8 4.2 1.99202365328
104 | -3.8 4.6 1.99299860605
105 | -3.8 5.0 1.99362959606
106 | -3.4 -5.0 1.99097498954
107 | -3.4 -4.6 1.99034399952
108 | -3.4 -4.2 1.98936904675
109 | -3.4 -3.8 1.98779947378
110 | -3.4 -3.4 1.98514486725
111 | -3.4 -3.0 1.98037731168
112 | -3.4 -2.6 1.97115805687
113 | -3.4 -2.2 1.9516317812
114 | -3.4 -1.8 1.90559776065
115 | -3.4 -1.4 1.78602914215
116 | -3.4 -1.0 1.49257243363
117 | -3.4 -0.6 1.10730331181
118 | -3.4 -0.2 0.994169877717
119 | -3.4 0.2 0.994169877717
120 | -3.4 0.6 1.10730331181
121 | -3.4 1.0 1.49257243363
122 | -3.4 1.4 1.78602914215
123 | -3.4 1.8 1.90559776065
124 | -3.4 2.2 1.9516317812
125 | -3.4 2.6 1.97115805687
126 | -3.4 3.0 1.98037731168
127 | -3.4 3.4 1.98514486725
128 | -3.4 3.8 1.98779947378
129 | -3.4 4.2 1.98936904675
130 | -3.4 4.6 1.99034399952
131 | -3.4 5.0 1.99097498954
132 | -3.0 -5.0 1.98620743396
133 | -3.0 -4.6 1.98557644394
134 | -3.0 -4.2 1.98460149117
135 | -3.0 -3.8 1.9830319182
136 | -3.0 -3.4 1.98037731168
137 | -3.0 -3.0 1.9756097561
138 | -3.0 -2.6 1.96639050129
139 | -3.0 -2.2 1.94686422562
140 | -3.0 -1.8 1.90083020507
141 | -3.0 -1.4 1.78126158657
142 | -3.0 -1.0 1.48780487805
143 | -3.0 -0.6 1.10253575624
144 | -3.0 -0.2 0.989402322138
145 | -3.0 0.2 0.989402322138
146 | -3.0 0.6 1.10253575624
147 | -3.0 1.0 1.48780487805
148 | -3.0 1.4 1.78126158657
149 | -3.0 1.8 1.90083020507
150 | -3.0 2.2 1.94686422562
151 | -3.0 2.6 1.96639050129
152 | -3.0 3.0 1.9756097561
153 | -3.0 3.4 1.98037731168
154 | -3.0 3.8 1.9830319182
155 | -3.0 4.2 1.98460149117
156 | -3.0 4.6 1.98557644394
157 | -3.0 5.0 1.98620743396
158 | -2.6 -5.0 1.97698817915
159 | -2.6 -4.6 1.97635718914
160 | -2.6 -4.2 1.97538223637
161 | -2.6 -3.8 1.9738126634
162 | -2.6 -3.4 1.97115805687
163 | -2.6 -3.0 1.96639050129
164 | -2.6 -2.6 1.95717124649
165 | -2.6 -2.2 1.93764497081
166 | -2.6 -1.8 1.89161095027
167 | -2.6 -1.4 1.77204233177
168 | -2.6 -1.0 1.47858562324
169 | -2.6 -0.6 1.09331650143
170 | -2.6 -0.2 0.980183067333
171 | -2.6 0.2 0.980183067333
172 | -2.6 0.6 1.09331650143
173 | -2.6 1.0 1.47858562324
174 | -2.6 1.4 1.77204233177
175 | -2.6 1.8 1.89161095027
176 | -2.6 2.2 1.93764497081
177 | -2.6 2.6 1.95717124649
178 | -2.6 3.0 1.96639050129
179 | -2.6 3.4 1.97115805687
180 | -2.6 3.8 1.9738126634
181 | -2.6 4.2 1.97538223637
182 | -2.6 4.6 1.97635718914
183 | -2.6 5.0 1.97698817915
184 | -2.2 -5.0 1.95746190348
185 | -2.2 -4.6 1.95683091346
186 | -2.2 -4.2 1.95585596069
187 | -2.2 -3.8 1.95428638772
188 | -2.2 -3.4 1.9516317812
189 | -2.2 -3.0 1.94686422562
190 | -2.2 -2.6 1.93764497081
191 | -2.2 -2.2 1.91811869514
192 | -2.2 -1.8 1.87208467459
193 | -2.2 -1.4 1.7525160561
194 | -2.2 -1.0 1.45905934757
195 | -2.2 -0.6 1.07379022576
196 | -2.2 -0.2 0.960656791659
197 | -2.2 0.2 0.960656791659
198 | -2.2 0.6 1.07379022576
199 | -2.2 1.0 1.45905934757
200 | -2.2 1.4 1.7525160561
201 | -2.2 1.8 1.87208467459
202 | -2.2 2.2 1.91811869514
203 | -2.2 2.6 1.93764497081
204 | -2.2 3.0 1.94686422562
205 | -2.2 3.4 1.9516317812
206 | -2.2 3.8 1.95428638772
207 | -2.2 4.2 1.95585596069
208 | -2.2 4.6 1.95683091346
209 | -2.2 5.0 1.95746190348
210 | -1.8 -5.0 1.91142788294
211 | -1.8 -4.6 1.91079689292
212 | -1.8 -4.2 1.90982194015
213 | -1.8 -3.8 1.90825236718
214 | -1.8 -3.4 1.90559776065
215 | -1.8 -3.0 1.90083020507
216 | -1.8 -2.6 1.89161095027
217 | -1.8 -2.2 1.87208467459
218 | -1.8 -1.8 1.82605065405
219 | -1.8 -1.4 1.70648203555
220 | -1.8 -1.0 1.41302532702
221 | -1.8 -0.6 1.02775620521
222 | -1.8 -0.2 0.914622771114
223 | -1.8 0.2 0.914622771114
224 | -1.8 0.6 1.02775620521
225 | -1.8 1.0 1.41302532702
226 | -1.8 1.4 1.70648203555
227 | -1.8 1.8 1.82605065405
228 | -1.8 2.2 1.87208467459
229 | -1.8 2.6 1.89161095027
230 | -1.8 3.0 1.90083020507
231 | -1.8 3.4 1.90559776065
232 | -1.8 3.8 1.90825236718
233 | -1.8 4.2 1.90982194015
234 | -1.8 4.6 1.91079689292
235 | -1.8 5.0 1.91142788294
236 | -1.4 -5.0 1.79185926444
237 | -1.4 -4.6 1.79122827442
238 | -1.4 -4.2 1.79025332165
239 | -1.4 -3.8 1.78868374868
240 | -1.4 -3.4 1.78602914215
241 | -1.4 -3.0 1.78126158657
242 | -1.4 -2.6 1.77204233177
243 | -1.4 -2.2 1.7525160561
244 | -1.4 -1.8 1.70648203555
245 | -1.4 -1.4 1.58691341705
246 | -1.4 -1.0 1.29345670853
247 | -1.4 -0.6 0.908187586713
248 | -1.4 -0.2 0.795054152616
249 | -1.4 0.2 0.795054152616
250 | -1.4 0.6 0.908187586713
251 | -1.4 1.0 1.29345670853
252 | -1.4 1.4 1.58691341705
253 | -1.4 1.8 1.70648203555
254 | -1.4 2.2 1.7525160561
255 | -1.4 2.6 1.77204233177
256 | -1.4 3.0 1.78126158657
257 | -1.4 3.4 1.78602914215
258 | -1.4 3.8 1.78868374868
259 | -1.4 4.2 1.79025332165
260 | -1.4 4.6 1.79122827442
261 | -1.4 5.0 1.79185926444
262 | -1.0 -5.0 1.49840255591
263 | -1.0 -4.6 1.49777156589
264 | -1.0 -4.2 1.49679661312
265 | -1.0 -3.8 1.49522704015
266 | -1.0 -3.4 1.49257243363
267 | -1.0 -3.0 1.48780487805
268 | -1.0 -2.6 1.47858562324
269 | -1.0 -2.2 1.45905934757
270 | -1.0 -1.8 1.41302532702
271 | -1.0 -1.4 1.29345670853
272 | -1.0 -1.0 1.0
273 | -1.0 -0.6 0.614730878187
274 | -1.0 -0.2 0.501597444089
275 | -1.0 0.2 0.501597444089
276 | -1.0 0.6 0.614730878187
277 | -1.0 1.0 1.0
278 | -1.0 1.4 1.29345670853
279 | -1.0 1.8 1.41302532702
280 | -1.0 2.2 1.45905934757
281 | -1.0 2.6 1.47858562324
282 | -1.0 3.0 1.48780487805
283 | -1.0 3.4 1.49257243363
284 | -1.0 3.8 1.49522704015
285 | -1.0 4.2 1.49679661312
286 | -1.0 4.6 1.49777156589
287 | -1.0 5.0 1.49840255591
288 | -0.6 -5.0 1.1131334341
289 | -0.6 -4.6 1.11250244408
290 | -0.6 -4.2 1.11152749131
291 | -0.6 -3.8 1.10995791834
292 | -0.6 -3.4 1.10730331181
293 | -0.6 -3.0 1.10253575624
294 | -0.6 -2.6 1.09331650143
295 | -0.6 -2.2 1.07379022576
296 | -0.6 -1.8 1.02775620521
297 | -0.6 -1.4 0.908187586713
298 | -0.6 -1.0 0.614730878187
299 | -0.6 -0.6 0.229461756374
300 | -0.6 -0.2 0.116328322276
301 | -0.6 0.2 0.116328322276
302 | -0.6 0.6 0.229461756374
303 | -0.6 1.0 0.614730878187
304 | -0.6 1.4 0.908187586713
305 | -0.6 1.8 1.02775620521
306 | -0.6 2.2 1.07379022576
307 | -0.6 2.6 1.09331650143
308 | -0.6 3.0 1.10253575624
309 | -0.6 3.4 1.10730331181
310 | -0.6 3.8 1.10995791834
311 | -0.6 4.2 1.11152749131
312 | -0.6 4.6 1.11250244408
313 | -0.6 5.0 1.1131334341
314 | -0.2 -5.0 1.0
315 | -0.2 -4.6 0.999369009983
316 | -0.2 -4.2 0.998394057213
317 | -0.2 -3.8 0.996824484243
318 | -0.2 -3.4 0.994169877717
319 | -0.2 -3.0 0.989402322138
320 | -0.2 -2.6 0.980183067333
321 | -0.2 -2.2 0.960656791659
322 | -0.2 -1.8 0.914622771114
323 | -0.2 -1.4 0.795054152616
324 | -0.2 -1.0 0.501597444089
325 | -0.2 -0.6 0.116328322276
326 | -0.2 -0.2 0.00319488817891
327 | -0.2 0.2 0.00319488817891
328 | -0.2 0.6 0.116328322276
329 | -0.2 1.0 0.501597444089
330 | -0.2 1.4 0.795054152616
331 | -0.2 1.8 0.914622771114
332 | -0.2 2.2 0.960656791659
333 | -0.2 2.6 0.980183067333
334 | -0.2 3.0 0.989402322138
335 | -0.2 3.4 0.994169877717
336 | -0.2 3.8 0.996824484243
337 | -0.2 4.2 0.998394057213
338 | -0.2 4.6 0.999369009983
339 | -0.2 5.0 1.0
340 | 0.2 -5.0 1.0
341 | 0.2 -4.6 0.999369009983
342 | 0.2 -4.2 0.998394057213
343 | 0.2 -3.8 0.996824484243
344 | 0.2 -3.4 0.994169877717
345 | 0.2 -3.0 0.989402322138
346 | 0.2 -2.6 0.980183067333
347 | 0.2 -2.2 0.960656791659
348 | 0.2 -1.8 0.914622771114
349 | 0.2 -1.4 0.795054152616
350 | 0.2 -1.0 0.501597444089
351 | 0.2 -0.6 0.116328322276
352 | 0.2 -0.2 0.00319488817891
353 | 0.2 0.2 0.00319488817891
354 | 0.2 0.6 0.116328322276
355 | 0.2 1.0 0.501597444089
356 | 0.2 1.4 0.795054152616
357 | 0.2 1.8 0.914622771114
358 | 0.2 2.2 0.960656791659
359 | 0.2 2.6 0.980183067333
360 | 0.2 3.0 0.989402322138
361 | 0.2 3.4 0.994169877717
362 | 0.2 3.8 0.996824484243
363 | 0.2 4.2 0.998394057213
364 | 0.2 4.6 0.999369009983
365 | 0.2 5.0 1.0
366 | 0.6 -5.0 1.1131334341
367 | 0.6 -4.6 1.11250244408
368 | 0.6 -4.2 1.11152749131
369 | 0.6 -3.8 1.10995791834
370 | 0.6 -3.4 1.10730331181
371 | 0.6 -3.0 1.10253575624
372 | 0.6 -2.6 1.09331650143
373 | 0.6 -2.2 1.07379022576
374 | 0.6 -1.8 1.02775620521
375 | 0.6 -1.4 0.908187586713
376 | 0.6 -1.0 0.614730878187
377 | 0.6 -0.6 0.229461756374
378 | 0.6 -0.2 0.116328322276
379 | 0.6 0.2 0.116328322276
380 | 0.6 0.6 0.229461756374
381 | 0.6 1.0 0.614730878187
382 | 0.6 1.4 0.908187586713
383 | 0.6 1.8 1.02775620521
384 | 0.6 2.2 1.07379022576
385 | 0.6 2.6 1.09331650143
386 | 0.6 3.0 1.10253575624
387 | 0.6 3.4 1.10730331181
388 | 0.6 3.8 1.10995791834
389 | 0.6 4.2 1.11152749131
390 | 0.6 4.6 1.11250244408
391 | 0.6 5.0 1.1131334341
392 | 1.0 -5.0 1.49840255591
393 | 1.0 -4.6 1.49777156589
394 | 1.0 -4.2 1.49679661312
395 | 1.0 -3.8 1.49522704015
396 | 1.0 -3.4 1.49257243363
397 | 1.0 -3.0 1.48780487805
398 | 1.0 -2.6 1.47858562324
399 | 1.0 -2.2 1.45905934757
400 | 1.0 -1.8 1.41302532702
401 | 1.0 -1.4 1.29345670853
402 | 1.0 -1.0 1.0
403 | 1.0 -0.6 0.614730878187
404 | 1.0 -0.2 0.501597444089
405 | 1.0 0.2 0.501597444089
406 | 1.0 0.6 0.614730878187
407 | 1.0 1.0 1.0
408 | 1.0 1.4 1.29345670853
409 | 1.0 1.8 1.41302532702
410 | 1.0 2.2 1.45905934757
411 | 1.0 2.6 1.47858562324
412 | 1.0 3.0 1.48780487805
413 | 1.0 3.4 1.49257243363
414 | 1.0 3.8 1.49522704015
415 | 1.0 4.2 1.49679661312
416 | 1.0 4.6 1.49777156589
417 | 1.0 5.0 1.49840255591
418 | 1.4 -5.0 1.79185926444
419 | 1.4 -4.6 1.79122827442
420 | 1.4 -4.2 1.79025332165
421 | 1.4 -3.8 1.78868374868
422 | 1.4 -3.4 1.78602914215
423 | 1.4 -3.0 1.78126158657
424 | 1.4 -2.6 1.77204233177
425 | 1.4 -2.2 1.7525160561
426 | 1.4 -1.8 1.70648203555
427 | 1.4 -1.4 1.58691341705
428 | 1.4 -1.0 1.29345670853
429 | 1.4 -0.6 0.908187586713
430 | 1.4 -0.2 0.795054152616
431 | 1.4 0.2 0.795054152616
432 | 1.4 0.6 0.908187586713
433 | 1.4 1.0 1.29345670853
434 | 1.4 1.4 1.58691341705
435 | 1.4 1.8 1.70648203555
436 | 1.4 2.2 1.7525160561
437 | 1.4 2.6 1.77204233177
438 | 1.4 3.0 1.78126158657
439 | 1.4 3.4 1.78602914215
440 | 1.4 3.8 1.78868374868
441 | 1.4 4.2 1.79025332165
442 | 1.4 4.6 1.79122827442
443 | 1.4 5.0 1.79185926444
444 | 1.8 -5.0 1.91142788294
445 | 1.8 -4.6 1.91079689292
446 | 1.8 -4.2 1.90982194015
447 | 1.8 -3.8 1.90825236718
448 | 1.8 -3.4 1.90559776065
449 | 1.8 -3.0 1.90083020507
450 | 1.8 -2.6 1.89161095027
451 | 1.8 -2.2 1.87208467459
452 | 1.8 -1.8 1.82605065405
453 | 1.8 -1.4 1.70648203555
454 | 1.8 -1.0 1.41302532702
455 | 1.8 -0.6 1.02775620521
456 | 1.8 -0.2 0.914622771114
457 | 1.8 0.2 0.914622771114
458 | 1.8 0.6 1.02775620521
459 | 1.8 1.0 1.41302532702
460 | 1.8 1.4 1.70648203555
461 | 1.8 1.8 1.82605065405
462 | 1.8 2.2 1.87208467459
463 | 1.8 2.6 1.89161095027
464 | 1.8 3.0 1.90083020507
465 | 1.8 3.4 1.90559776065
466 | 1.8 3.8 1.90825236718
467 | 1.8 4.2 1.90982194015
468 | 1.8 4.6 1.91079689292
469 | 1.8 5.0 1.91142788294
470 | 2.2 -5.0 1.95746190348
471 | 2.2 -4.6 1.95683091346
472 | 2.2 -4.2 1.95585596069
473 | 2.2 -3.8 1.95428638772
474 | 2.2 -3.4 1.9516317812
475 | 2.2 -3.0 1.94686422562
476 | 2.2 -2.6 1.93764497081
477 | 2.2 -2.2 1.91811869514
478 | 2.2 -1.8 1.87208467459
479 | 2.2 -1.4 1.7525160561
480 | 2.2 -1.0 1.45905934757
481 | 2.2 -0.6 1.07379022576
482 | 2.2 -0.2 0.960656791659
483 | 2.2 0.2 0.960656791659
484 | 2.2 0.6 1.07379022576
485 | 2.2 1.0 1.45905934757
486 | 2.2 1.4 1.7525160561
487 | 2.2 1.8 1.87208467459
488 | 2.2 2.2 1.91811869514
489 | 2.2 2.6 1.93764497081
490 | 2.2 3.0 1.94686422562
491 | 2.2 3.4 1.9516317812
492 | 2.2 3.8 1.95428638772
493 | 2.2 4.2 1.95585596069
494 | 2.2 4.6 1.95683091346
495 | 2.2 5.0 1.95746190348
496 | 2.6 -5.0 1.97698817915
497 | 2.6 -4.6 1.97635718914
498 | 2.6 -4.2 1.97538223637
499 | 2.6 -3.8 1.9738126634
500 | 2.6 -3.4 1.97115805687
501 | 2.6 -3.0 1.96639050129
502 | 2.6 -2.6 1.95717124649
503 | 2.6 -2.2 1.93764497081
504 | 2.6 -1.8 1.89161095027
505 | 2.6 -1.4 1.77204233177
506 | 2.6 -1.0 1.47858562324
507 | 2.6 -0.6 1.09331650143
508 | 2.6 -0.2 0.980183067333
509 | 2.6 0.2 0.980183067333
510 | 2.6 0.6 1.09331650143
511 | 2.6 1.0 1.47858562324
512 | 2.6 1.4 1.77204233177
513 | 2.6 1.8 1.89161095027
514 | 2.6 2.2 1.93764497081
515 | 2.6 2.6 1.95717124649
516 | 2.6 3.0 1.96639050129
517 | 2.6 3.4 1.97115805687
518 | 2.6 3.8 1.9738126634
519 | 2.6 4.2 1.97538223637
520 | 2.6 4.6 1.97635718914
521 | 2.6 5.0 1.97698817915
522 | 3.0 -5.0 1.98620743396
523 | 3.0 -4.6 1.98557644394
524 | 3.0 -4.2 1.98460149117
525 | 3.0 -3.8 1.9830319182
526 | 3.0 -3.4 1.98037731168
527 | 3.0 -3.0 1.9756097561
528 | 3.0 -2.6 1.96639050129
529 | 3.0 -2.2 1.94686422562
530 | 3.0 -1.8 1.90083020507
531 | 3.0 -1.4 1.78126158657
532 | 3.0 -1.0 1.48780487805
533 | 3.0 -0.6 1.10253575624
534 | 3.0 -0.2 0.989402322138
535 | 3.0 0.2 0.989402322138
536 | 3.0 0.6 1.10253575624
537 | 3.0 1.0 1.48780487805
538 | 3.0 1.4 1.78126158657
539 | 3.0 1.8 1.90083020507
540 | 3.0 2.2 1.94686422562
541 | 3.0 2.6 1.96639050129
542 | 3.0 3.0 1.9756097561
543 | 3.0 3.4 1.98037731168
544 | 3.0 3.8 1.9830319182
545 | 3.0 4.2 1.98460149117
546 | 3.0 4.6 1.98557644394
547 | 3.0 5.0 1.98620743396
548 | 3.4 -5.0 1.99097498954
549 | 3.4 -4.6 1.99034399952
550 | 3.4 -4.2 1.98936904675
551 | 3.4 -3.8 1.98779947378
552 | 3.4 -3.4 1.98514486725
553 | 3.4 -3.0 1.98037731168
554 | 3.4 -2.6 1.97115805687
555 | 3.4 -2.2 1.9516317812
556 | 3.4 -1.8 1.90559776065
557 | 3.4 -1.4 1.78602914215
558 | 3.4 -1.0 1.49257243363
559 | 3.4 -0.6 1.10730331181
560 | 3.4 -0.2 0.994169877717
561 | 3.4 0.2 0.994169877717
562 | 3.4 0.6 1.10730331181
563 | 3.4 1.0 1.49257243363
564 | 3.4 1.4 1.78602914215
565 | 3.4 1.8 1.90559776065
566 | 3.4 2.2 1.9516317812
567 | 3.4 2.6 1.97115805687
568 | 3.4 3.0 1.98037731168
569 | 3.4 3.4 1.98514486725
570 | 3.4 3.8 1.98779947378
571 | 3.4 4.2 1.98936904675
572 | 3.4 4.6 1.99034399952
573 | 3.4 5.0 1.99097498954
574 | 3.8 -5.0 1.99362959606
575 | 3.8 -4.6 1.99299860605
576 | 3.8 -4.2 1.99202365328
577 | 3.8 -3.8 1.99045408031
578 | 3.8 -3.4 1.98779947378
579 | 3.8 -3.0 1.9830319182
580 | 3.8 -2.6 1.9738126634
581 | 3.8 -2.2 1.95428638772
582 | 3.8 -1.8 1.90825236718
583 | 3.8 -1.4 1.78868374868
584 | 3.8 -1.0 1.49522704015
585 | 3.8 -0.6 1.10995791834
586 | 3.8 -0.2 0.996824484243
587 | 3.8 0.2 0.996824484243
588 | 3.8 0.6 1.10995791834
589 | 3.8 1.0 1.49522704015
590 | 3.8 1.4 1.78868374868
591 | 3.8 1.8 1.90825236718
592 | 3.8 2.2 1.95428638772
593 | 3.8 2.6 1.9738126634
594 | 3.8 3.0 1.9830319182
595 | 3.8 3.4 1.98779947378
596 | 3.8 3.8 1.99045408031
597 | 3.8 4.2 1.99202365328
598 | 3.8 4.6 1.99299860605
599 | 3.8 5.0 1.99362959606
600 | 4.2 -5.0 1.99519916903
601 | 4.2 -4.6 1.99456817902
602 | 4.2 -4.2 1.99359322625
603 | 4.2 -3.8 1.99202365328
604 | 4.2 -3.4 1.98936904675
605 | 4.2 -3.0 1.98460149117
606 | 4.2 -2.6 1.97538223637
607 | 4.2 -2.2 1.95585596069
608 | 4.2 -1.8 1.90982194015
609 | 4.2 -1.4 1.79025332165
610 | 4.2 -1.0 1.49679661312
611 | 4.2 -0.6 1.11152749131
612 | 4.2 -0.2 0.998394057213
613 | 4.2 0.2 0.998394057213
614 | 4.2 0.6 1.11152749131
615 | 4.2 1.0 1.49679661312
616 | 4.2 1.4 1.79025332165
617 | 4.2 1.8 1.90982194015
618 | 4.2 2.2 1.95585596069
619 | 4.2 2.6 1.97538223637
620 | 4.2 3.0 1.98460149117
621 | 4.2 3.4 1.98936904675
622 | 4.2 3.8 1.99202365328
623 | 4.2 4.2 1.99359322625
624 | 4.2 4.6 1.99456817902
625 | 4.2 5.0 1.99519916903
626 | 4.6 -5.0 1.9961741218
627 | 4.6 -4.6 1.99554313179
628 | 4.6 -4.2 1.99456817902
629 | 4.6 -3.8 1.99299860605
630 | 4.6 -3.4 1.99034399952
631 | 4.6 -3.0 1.98557644394
632 | 4.6 -2.6 1.97635718914
633 | 4.6 -2.2 1.95683091346
634 | 4.6 -1.8 1.91079689292
635 | 4.6 -1.4 1.79122827442
636 | 4.6 -1.0 1.49777156589
637 | 4.6 -0.6 1.11250244408
638 | 4.6 -0.2 0.999369009983
639 | 4.6 0.2 0.999369009983
640 | 4.6 0.6 1.11250244408
641 | 4.6 1.0 1.49777156589
642 | 4.6 1.4 1.79122827442
643 | 4.6 1.8 1.91079689292
644 | 4.6 2.2 1.95683091346
645 | 4.6 2.6 1.97635718914
646 | 4.6 3.0 1.98557644394
647 | 4.6 3.4 1.99034399952
648 | 4.6 3.8 1.99299860605
649 | 4.6 4.2 1.99456817902
650 | 4.6 4.6 1.99554313179
651 | 4.6 5.0 1.9961741218
652 | 5.0 -5.0 1.99680511182
653 | 5.0 -4.6 1.9961741218
654 | 5.0 -4.2 1.99519916903
655 | 5.0 -3.8 1.99362959606
656 | 5.0 -3.4 1.99097498954
657 | 5.0 -3.0 1.98620743396
658 | 5.0 -2.6 1.97698817915
659 | 5.0 -2.2 1.95746190348
660 | 5.0 -1.8 1.91142788294
661 | 5.0 -1.4 1.79185926444
662 | 5.0 -1.0 1.49840255591
663 | 5.0 -0.6 1.1131334341
664 | 5.0 -0.2 1.0
665 | 5.0 0.2 1.0
666 | 5.0 0.6 1.1131334341
667 | 5.0 1.0 1.49840255591
668 | 5.0 1.4 1.79185926444
669 | 5.0 1.8 1.91142788294
670 | 5.0 2.2 1.95746190348
671 | 5.0 2.6 1.97698817915
672 | 5.0 3.0 1.98620743396
673 | 5.0 3.4 1.99097498954
674 | 5.0 3.8 1.99362959606
675 | 5.0 4.2 1.99519916903
676 | 5.0 4.6 1.9961741218
677 | 5.0 5.0 1.99680511182
678 |
--------------------------------------------------------------------------------
/datasets/SPECT.test:
--------------------------------------------------------------------------------
1 | class,F1,F2,F3,F4,F5,F6,F7,F8,F9,F10,F11,F12,F13,F14,F15,F16,F17,F18,F19,F20,F21,F22
2 | 1,1,0,0,1,1,0,0,0,1,1,0,0,0,1,1,1,0,0,1,1,0,0
3 | 1,1,0,0,1,1,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0
4 | 1,0,0,0,1,0,1,0,0,1,0,1,0,0,1,1,0,0,0,0,0,0,1
5 | 1,0,1,1,1,0,0,1,0,1,0,0,1,1,1,0,1,0,0,0,0,1,0
6 | 1,0,0,1,0,0,0,0,1,0,0,1,0,1,1,0,1,0,0,0,0,0,1
7 | 1,0,0,1,1,0,1,0,0,1,0,1,0,1,0,0,1,0,0,0,0,1,1
8 | 1,1,0,0,1,0,0,1,1,1,1,0,1,1,1,0,1,0,0,0,1,0,1
9 | 1,1,0,0,1,0,0,0,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0
10 | 1,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0
11 | 1,1,0,0,1,1,1,0,0,1,1,1,0,0,1,0,1,1,0,1,0,0,0
12 | 1,1,0,0,0,1,0,0,0,1,1,0,0,1,1,1,0,0,0,1,0,0,0
13 | 1,1,0,0,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0
14 | 1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1,0,0,0,1
15 | 1,1,0,0,0,1,0,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0
16 | 1,1,0,0,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0
17 | 1,1,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0,1,0
18 | 1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,1
19 | 1,1,0,0,0,0,1,1,0,0,1,1,1,0,0,0,0,1,0,0,0,0,1
20 | 1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0
21 | 1,1,1,1,0,1,0,1,1,0,1,0,1,1,0,0,1,0,0,0,1,1,0
22 | 1,1,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,1,0,1,0,0
23 | 1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,1,1,1
24 | 1,0,0,1,1,1,0,0,1,1,1,0,0,1,1,1,1,0,1,0,1,1,0
25 | 1,1,1,0,1,1,1,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,0
26 | 1,1,1,0,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0
27 | 1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0
28 | 1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1,0
29 | 1,1,1,1,1,0,1,1,1,0,1,0,0,1,1,1,1,0,0,1,1,0,0
30 | 1,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0
31 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0
32 | 1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,1,1,0,0,1,1,0,0
33 | 1,1,1,0,0,0,1,1,0,0,1,1,0,0,0,1,1,0,0,1,1,1,0
34 | 1,0,0,0,0,0,0,1,1,0,0,0,1,1,0,0,1,0,0,0,0,1,1
35 | 1,1,0,0,0,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,1,1
36 | 1,0,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,1,0,0
37 | 1,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,1,1
38 | 1,0,1,1,0,0,0,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0
39 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0
40 | 1,1,0,1,1,1,0,0,1,1,1,0,0,1,0,1,1,0,0,0,1,1,1
41 | 1,0,1,1,1,0,0,1,1,1,0,0,1,1,1,0,0,0,0,0,0,1,1
42 | 1,0,1,1,0,0,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,1,1
43 | 1,0,0,0,0,0,1,1,0,0,0,1,1,1,0,0,0,1,1,0,0,0,0
44 | 1,0,0,1,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,1,1,1
45 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1
46 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0
47 | 1,0,0,1,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,1
48 | 1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1
49 | 1,0,0,1,0,1,0,0,1,1,1,0,0,1,1,0,0,1,1,0,0,1,1
50 | 1,1,0,0,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0
51 | 1,1,0,1,1,1,0,0,1,1,0,0,0,0,1,1,1,0,0,0,1,1,0
52 | 1,0,0,1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,1
53 | 1,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,1
54 | 1,0,1,1,0,0,0,1,1,0,0,0,1,1,0,0,1,1,1,0,0,1,1
55 | 1,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1
56 | 1,1,0,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,1
57 | 1,1,0,1,0,1,1,0,1,1,0,1,1,1,1,1,1,0,0,1,0,1,1
58 | 1,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,1,1
59 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1
60 | 1,0,1,1,0,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,1,0,0
61 | 1,0,1,1,0,0,0,1,0,0,0,0,1,1,0,0,0,1,0,0,0,0,1
62 | 1,1,1,1,1,1,1,0,0,1,0,1,0,1,0,1,1,0,0,1,1,0,1
63 | 1,0,1,0,0,0,1,1,0,0,0,1,1,0,0,0,0,1,1,0,0,0,0
64 | 1,0,1,0,0,1,1,0,0,0,1,1,0,1,0,0,0,0,0,0,0,1,1
65 | 1,1,0,1,1,1,0,0,1,1,0,0,0,1,1,1,1,0,0,0,1,1,1
66 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0
67 | 1,1,1,1,0,1,1,1,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0
68 | 1,0,0,0,0,0,0,0,1,0,0,0,0,1,1,0,1,0,0,0,1,1,1
69 | 1,1,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,1,1,0,0
70 | 1,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,1,0,0,1,1,0,0
71 | 1,1,1,1,0,1,0,1,1,0,1,0,1,1,0,0,0,0,1,1,0,1,1
72 | 1,1,1,0,0,1,1,1,1,0,0,0,1,1,0,0,1,1,0,0,1,1,1
73 | 1,0,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,0,0,1,1
74 | 1,1,0,1,1,1,0,0,1,1,1,0,0,1,1,1,1,0,1,1,1,1,1
75 | 1,0,0,1,0,1,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,1
76 | 1,0,0,0,1,0,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,0
77 | 1,0,0,1,0,0,0,1,1,0,0,0,1,1,0,0,0,0,1,1,0,1,1
78 | 1,0,1,1,0,0,0,1,1,0,0,0,1,1,0,0,1,1,1,0,1,1,1
79 | 1,1,0,1,1,1,0,0,1,1,1,0,0,1,1,0,0,0,0,0,0,1,0
80 | 1,0,1,1,1,1,1,1,1,1,1,1,0,1,1,1,0,0,0,1,0,1,1
81 | 1,0,0,0,0,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,1,1,1
82 | 1,1,0,0,0,1,0,0,1,0,0,0,0,1,1,1,1,1,0,0,1,1,1
83 | 1,0,1,1,1,0,1,0,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1
84 | 1,0,0,1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,1,1
85 | 1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1
86 | 1,1,1,1,1,1,1,0,0,1,1,0,1,0,1,1,1,0,0,1,1,0,0
87 | 1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1
88 | 1,1,1,1,1,1,0,1,1,1,1,0,1,1,1,1,0,0,0,0,1,0,0
89 | 1,0,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,1,0,1,1,0,1
90 | 1,1,0,1,0,1,0,0,1,0,0,0,0,1,1,0,1,0,0,0,1,1,1
91 | 1,0,0,0,0,0,0,0,1,0,0,1,0,1,0,0,0,0,0,0,0,1,0
92 | 1,0,0,0,1,0,0,0,0,1,0,0,0,1,1,1,1,0,0,1,1,0,0
93 | 1,1,0,1,1,0,0,0,1,1,0,0,0,1,1,1,1,0,0,1,1,1,1
94 | 1,0,0,1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0
95 | 1,0,0,1,0,0,1,0,1,1,0,1,0,1,1,1,0,1,0,0,0,0,0
96 | 1,1,0,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,1,1,0,0,1
97 | 1,0,0,0,1,0,0,1,1,1,0,1,1,1,1,0,0,0,0,0,1,0,0
98 | 1,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,1,0,1
99 | 1,1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,0,0,0,1,1,1
100 | 1,0,0,1,1,0,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,1,1
101 | 1,1,0,1,0,1,1,1,1,0,1,1,1,1,0,0,1,0,1,0,1,1,1
102 | 1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0
103 | 1,0,0,1,1,0,0,0,1,1,0,0,0,1,1,1,0,0,0,0,0,1,1
104 | 1,1,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,1,1,1,1,1,0
105 | 1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,0
106 | 1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0
107 | 1,1,0,0,0,0,0,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,1
108 | 1,1,0,0,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0
109 | 1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0
110 | 1,1,0,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0
111 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
112 | 1,1,0,0,0,1,0,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0
113 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1
114 | 1,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0
115 | 1,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0
116 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
117 | 1,0,0,1,0,1,0,0,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0
118 | 1,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,1
119 | 1,1,1,0,0,1,1,1,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0
120 | 1,1,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0
121 | 1,1,1,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,0,0,0,1,1
122 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
123 | 1,1,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0
124 | 1,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0
125 | 1,1,0,1,1,1,0,0,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1
126 | 1,0,0,1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,1,1
127 | 1,1,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0
128 | 1,0,0,0,1,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0
129 | 1,0,0,1,1,0,1,0,0,0,0,1,0,0,1,0,0,0,1,1,1,0,0
130 | 1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0
131 | 1,1,1,1,0,1,1,1,1,0,1,1,1,0,0,0,0,0,0,0,1,0,0
132 | 1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0
133 | 1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1
134 | 1,0,0,1,1,0,0,1,1,1,0,0,1,0,0,1,1,0,0,0,0,1,0
135 | 1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1
136 | 1,1,0,0,0,1,1,0,0,1,1,1,0,0,1,0,0,0,0,0,0,0,0
137 | 1,1,1,1,0,0,0,0,0,0,0,0,1,1,0,0,1,1,1,0,0,1,1
138 | 1,1,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,1,0,1,1,1,0
139 | 1,1,1,0,1,1,0,0,1,1,1,0,1,0,0,1,1,0,0,0,1,1,0
140 | 1,0,0,1,1,0,0,0,1,0,0,0,0,1,1,0,1,0,0,0,1,1,1
141 | 1,0,0,1,0,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0
142 | 1,0,1,1,1,0,1,1,1,1,0,1,0,1,1,1,1,0,0,0,1,1,1
143 | 1,0,1,1,0,0,1,0,1,0,0,1,1,1,0,0,1,0,0,1,1,0,1
144 | 1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1
145 | 1,1,0,1,0,1,0,1,0,1,1,0,0,1,1,1,1,0,0,1,1,0,1
146 | 1,1,0,0,1,1,0,0,0,1,1,0,0,0,1,1,1,0,0,1,1,0,1
147 | 1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1
148 | 1,1,1,1,0,0,1,1,0,1,0,0,1,0,1,0,1,0,0,0,1,1,1
149 | 1,0,0,1,0,0,0,0,1,0,0,1,0,1,0,0,1,0,0,0,1,1,1
150 | 1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,1,1,0,0,1,1,1,1
151 | 1,1,0,0,0,1,0,0,1,1,1,0,0,1,1,0,1,0,0,1,1,0,0
152 | 1,0,0,1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,1,1
153 | 1,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0
154 | 1,1,1,1,0,1,0,1,1,0,1,0,1,1,0,0,0,0,0,0,0,1,0
155 | 1,1,1,1,0,0,1,1,1,0,1,1,1,1,0,0,0,1,1,1,1,0,0
156 | 1,0,0,1,0,0,0,0,1,0,0,0,0,1,0,0,1,0,0,1,0,1,1
157 | 1,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,1,1
158 | 1,0,0,0,1,0,0,0,1,1,0,0,0,1,1,0,0,0,0,0,0,1,0
159 | 1,1,1,0,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,1,0,0,0
160 | 1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1
161 | 1,1,1,1,1,1,1,1,1,0,1,1,1,1,0,1,1,1,1,1,1,1,1
162 | 1,0,1,0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,1,0,0
163 | 1,1,0,1,1,1,1,1,1,0,0,1,1,1,1,1,0,1,0,0,0,1,1
164 | 1,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,1,0
165 | 1,0,0,1,1,0,0,0,1,1,1,0,1,1,1,1,0,0,0,0,0,1,1
166 | 1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0
167 | 1,0,0,1,0,1,0,0,1,0,1,0,0,1,0,1,0,0,1,0,0,1,0
168 | 1,0,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,1,1,0,0,1,1
169 | 1,0,1,1,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,1
170 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
171 | 1,1,1,1,0,1,1,0,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0
172 | 1,1,0,1,1,1,0,0,1,1,1,0,0,1,1,0,1,0,1,1,1,0,0
173 | 1,1,0,0,0,1,0,0,0,1,1,0,0,0,1,0,0,0,0,0,1,1,0
174 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
175 | 0,0,0,1,1,0,0,0,1,1,0,0,0,1,1,0,0,0,0,0,0,1,0
176 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
177 | 0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1,0,0,1
178 | 0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0
179 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
180 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
181 | 0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0
182 | 0,0,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,1,0,0
183 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
184 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
185 | 0,1,1,0,0,0,1,0,0,0,1,1,1,0,0,0,1,0,0,0,0,0,0
186 | 0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0
187 | 0,1,0,1,0,1,0,0,1,1,0,0,0,0,1,0,1,0,0,0,0,0,0
188 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
--------------------------------------------------------------------------------
/datasets/SPECT.train:
--------------------------------------------------------------------------------
1 | class,F1,F2,F3,F4,F5,F6,F7,F8,F9,F10,F11,F12,F13,F14,F15,F16,F17,F18,F19,F20,F21,F22
2 | 1,0,0,0,1,0,0,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0
3 | 1,0,0,1,1,0,0,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,1
4 | 1,1,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0
5 | 1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,1,1
6 | 1,0,0,0,0,0,0,0,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0
7 | 1,0,0,0,1,0,0,0,0,1,0,0,0,1,1,0,1,0,0,0,1,0,1
8 | 1,1,0,1,1,0,0,0,1,0,1,0,1,1,0,0,0,0,0,0,0,1,1
9 | 1,0,0,1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,1
10 | 1,0,0,1,0,0,0,1,1,0,0,0,0,1,0,1,0,0,0,0,0,1,1
11 | 1,0,1,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0
12 | 1,1,1,0,0,1,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,0,1
13 | 1,1,1,0,0,1,1,1,0,1,1,1,1,0,1,0,0,1,0,1,1,0,0
14 | 1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,1
15 | 1,0,1,0,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,1,1
16 | 1,1,0,1,1,0,0,1,1,1,0,1,1,1,1,1,1,0,1,1,0,1,1
17 | 1,0,1,1,0,0,1,1,1,0,0,0,1,1,0,0,1,1,1,0,1,1,1
18 | 1,0,0,1,1,0,0,0,1,1,0,0,0,1,1,0,1,0,0,0,0,1,0
19 | 1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1
20 | 1,1,0,1,0,1,0,1,1,0,1,0,1,1,0,0,0,1,0,0,1,1,0
21 | 1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,0
22 | 1,0,0,0,0,0,0,1,0,0,0,1,1,1,0,0,0,0,0,0,0,1,1
23 | 1,1,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,1,1,0,0
24 | 1,1,1,0,0,1,1,1,0,0,1,1,1,0,0,0,0,0,0,1,0,0,0
25 | 1,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0
26 | 1,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0
27 | 1,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0
28 | 1,0,0,0,1,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,0,1,0
29 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0
30 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1
31 | 1,1,0,1,1,1,0,0,0,0,1,0,0,1,1,0,1,0,0,0,1,1,1
32 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
33 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
34 | 1,1,1,0,0,1,1,1,0,0,1,1,1,0,0,0,0,1,0,1,0,0,1
35 | 1,0,1,1,1,0,0,1,1,1,0,1,1,1,0,0,1,1,1,0,0,1,1
36 | 1,1,0,1,1,1,0,0,1,1,1,0,0,1,1,1,0,0,0,0,0,1,0
37 | 1,1,1,1,1,1,1,0,0,0,0,1,1,1,1,0,1,1,1,1,1,0,1
38 | 1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,1,0,0,0,1,1,1
39 | 1,1,0,0,1,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0
40 | 1,1,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0
41 | 1,1,0,1,1,0,0,0,1,1,1,0,0,1,1,1,1,0,0,1,1,0,0
42 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1
43 | 0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0
44 | 0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0
45 | 0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
46 | 0,0,1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0
47 | 0,1,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1
48 | 0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,0,0
49 | 0,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0
50 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
51 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0
52 | 0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,1,0,1
53 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
54 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
55 | 0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0
56 | 0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1
57 | 0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
58 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0
59 | 0,1,1,1,0,1,0,1,1,1,1,1,0,0,1,0,1,0,0,1,0,1,0
60 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
61 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
62 | 0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0
63 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
64 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1
65 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
66 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
67 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
68 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1
69 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
70 | 0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
71 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
72 | 0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0
73 | 0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0
74 | 0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0
75 | 0,1,0,1,0,0,0,0,1,0,1,0,0,1,0,0,0,0,0,0,0,1,0
76 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
77 | 0,1,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0
78 | 0,1,0,0,0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,1,1,0,0
79 | 0,1,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0
80 | 0,0,0,1,1,0,0,1,0,0,0,0,1,1,1,0,0,0,0,0,0,1,1
81 | 0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
--------------------------------------------------------------------------------
/datasets/banknote_Test.csv:
--------------------------------------------------------------------------------
1 | x0 x1 x2 x3 y
2 | -2.218300 -1.254000 2.998600 0.363780 1.000000
3 | -1.804600 -6.814100 6.701900 1.168100 1.000000
4 | -4.577000 3.451500 0.667190 -0.947420 1.000000
5 | 2.716100 -4.200600 4.191400 0.169810 -1.000000
6 | 3.890500 -2.152100 2.630200 1.104700 -1.000000
7 | 0.210840 9.435900 -0.094543 -1.859000 -1.000000
8 | -3.127300 -7.112100 11.389700 -0.083634 1.000000
9 | 3.235100 9.647000 -3.207400 -2.594800 -1.000000
10 | -0.218880 -2.203800 -0.095400 0.564210 1.000000
11 | -1.080200 2.199600 -2.586200 -1.275900 1.000000
12 | -2.596100 -9.349000 9.794200 -0.280180 1.000000
13 | 2.522700 2.236900 2.723600 0.794380 -1.000000
14 | 3.896200 -4.790400 3.395400 -0.537510 -1.000000
15 | -0.643260 2.474800 -2.945200 -1.027600 1.000000
16 | -2.343000 12.951600 3.328500 -5.942600 -1.000000
17 | 3.032900 2.294800 2.113500 0.350840 -1.000000
18 | -1.940900 -8.684800 9.155000 0.940490 1.000000
19 | -1.758900 -6.462400 8.477300 0.319810 1.000000
20 | 3.627700 0.982900 0.688610 0.634030 -1.000000
21 | -0.450620 -1.367800 7.085800 -0.403030 -1.000000
22 | 0.518000 0.258650 -0.840850 0.961180 1.000000
23 | 3.230300 7.838400 -3.534800 -1.215100 -1.000000
24 | 0.607310 3.954400 -4.772000 -4.485300 1.000000
25 | 0.004054 0.629050 -0.641210 0.758170 1.000000
26 | 0.740540 0.366250 2.199200 0.484030 -1.000000
27 | 5.032000 8.202600 -2.625600 -1.034100 -1.000000
28 | -3.571300 -12.492200 14.888100 -0.470270 1.000000
29 | -2.640600 -4.415900 5.983000 -0.139240 1.000000
30 | 2.092200 -6.810000 8.463600 -0.602160 -1.000000
31 | 2.005100 -6.863800 8.132000 -0.240100 -1.000000
32 | -2.575400 -5.657400 6.103000 0.652140 1.000000
33 | -1.682000 -6.812100 7.139800 1.332300 1.000000
34 | 1.899400 0.974620 4.226500 0.813770 -1.000000
35 | -3.917200 2.665200 0.788860 -0.781900 1.000000
36 | -0.062025 6.197500 1.099000 -1.131000 -1.000000
37 | -2.054500 -10.867900 9.492600 -1.411600 1.000000
38 | 3.205100 8.688900 -2.903300 -0.781900 -1.000000
39 | 3.459100 11.112000 -4.203900 -5.093100 -1.000000
40 | 0.660180 10.387800 -1.402900 -3.915100 -1.000000
41 | 1.028400 9.767000 -1.368700 -1.785300 -1.000000
42 | 0.268770 4.987000 -5.150800 -6.391300 1.000000
43 | -1.139100 1.812700 6.914400 0.701270 -1.000000
44 | -0.553550 -7.923300 6.715600 0.743940 1.000000
45 | 0.209770 -0.461460 7.726700 0.909460 -1.000000
46 | -4.746200 3.120500 1.075000 -1.296600 1.000000
47 | -0.240370 -1.783700 2.135000 1.241800 1.000000
48 | 1.304900 -0.155210 6.491100 -0.753460 -1.000000
49 | 4.570700 7.209400 -3.279400 -1.494400 -1.000000
50 | 2.829700 6.348500 -0.735460 -0.586650 -1.000000
51 | 4.827800 7.759800 -2.449100 -1.221600 -1.000000
52 | -2.004200 -9.367600 9.333300 -0.103030 1.000000
53 | -0.814790 -5.738100 4.391900 0.321100 1.000000
54 | -1.557200 -9.880800 8.108800 -1.080600 1.000000
55 | 3.971900 1.036700 0.759730 1.001300 -1.000000
56 | -3.579800 0.459370 2.345700 -0.457340 1.000000
57 | -6.367900 8.010200 0.424700 -3.220700 1.000000
58 | -1.769700 3.432900 -1.214400 -2.378900 1.000000
59 | 2.400800 9.359300 -3.356500 -3.352600 -1.000000
60 | 3.966000 3.921300 0.705740 0.336620 -1.000000
61 | -0.782890 11.360300 -0.376440 -7.049500 -1.000000
62 | 0.539360 3.894400 -4.816600 -4.341800 1.000000
63 | 3.941400 -3.290200 3.167400 1.086600 -1.000000
64 | -1.878200 -6.586500 4.848600 -0.021566 1.000000
65 | -1.706400 3.308800 -2.282900 -2.197800 1.000000
66 | -3.380000 -0.707700 2.532500 0.718080 1.000000
67 | -3.355300 0.355910 2.647300 -0.378460 1.000000
68 | 0.541500 6.031900 1.682500 -0.461220 -1.000000
69 | -2.096200 -7.105900 6.618800 -0.337080 1.000000
70 | -2.217300 1.467100 -0.726890 -1.172400 1.000000
71 | -3.000000 -9.156600 9.576600 -0.730180 1.000000
72 | -1.532200 -5.096600 6.677900 0.174980 1.000000
73 | -0.981930 2.795600 -1.234100 -1.566800 1.000000
74 | 1.545600 8.548200 0.418700 -2.178400 -1.000000
75 | -1.727900 -6.841000 8.949400 0.680580 1.000000
76 | 3.108800 3.112200 0.808570 0.433600 -1.000000
77 | -4.676500 -5.663600 10.969000 -0.334490 1.000000
78 | -2.726400 3.921300 -0.492120 -3.637100 1.000000
79 | -1.945800 11.221700 1.907900 -3.440500 -1.000000
80 | -5.301200 7.391500 0.029699 -7.398700 1.000000
81 | 3.357700 -4.306200 6.024100 0.182740 -1.000000
82 | 2.852100 9.171000 -3.646100 -1.204700 -1.000000
83 | 3.779100 2.576200 1.309800 0.565500 -1.000000
84 | 3.183600 7.232100 -1.071300 -2.590900 -1.000000
85 | -1.663700 3.288100 -2.270100 -2.222400 1.000000
86 | 0.872560 9.293100 -0.784300 -2.197800 -1.000000
87 | -2.063100 -1.514700 1.219000 0.445240 1.000000
88 | 4.068000 -2.936300 2.199200 0.500840 -1.000000
89 | 0.265170 2.406600 -2.841600 -0.599580 1.000000
90 | -2.714300 11.453500 2.109200 -3.962900 -1.000000
91 | 3.491600 8.570900 -3.032600 -0.591820 -1.000000
92 | -1.522000 -6.638300 5.749100 -0.106910 1.000000
93 | -1.693600 2.785200 -2.183500 -1.927600 1.000000
94 | -1.797600 -6.768600 6.675300 0.899120 1.000000
95 | 2.668200 10.216000 -3.441400 -4.006900 -1.000000
96 | 0.335650 6.836900 0.697180 -0.556910 -1.000000
97 | 1.749600 -0.175900 5.182700 1.292200 -1.000000
98 | 5.045200 3.896400 -1.430400 0.862910 -1.000000
99 | 4.042200 -4.391000 4.746600 1.137000 -1.000000
100 | -5.873000 9.175200 -0.274480 -6.042200 1.000000
101 | -1.284600 3.271500 -1.767100 -3.260800 1.000000
102 | -1.399500 -1.916200 2.515400 0.599120 1.000000
103 | 3.525100 0.720100 1.692800 0.644380 -1.000000
104 | -1.427500 11.879700 0.416130 -6.997800 -1.000000
105 | 4.885100 1.599500 -0.000291 1.640100 -1.000000
106 | 2.313600 10.665100 -3.528800 -4.767200 -1.000000
107 | 1.787500 4.780000 -5.136200 -3.236200 1.000000
108 | -0.278000 8.188100 -3.133800 -2.527600 -1.000000
109 | -0.690780 -0.500770 -0.354170 0.474980 1.000000
110 | 1.698800 2.909400 2.904400 0.110330 -1.000000
111 | 0.947320 -0.571130 7.190300 -0.675870 -1.000000
112 | 0.570600 -0.024800 1.242100 -0.562100 -1.000000
113 | -2.367500 -0.436630 1.692000 -0.430180 1.000000
114 | 2.448600 -6.317500 7.963200 0.206020 -1.000000
115 | -1.701500 -0.010356 -0.993370 -0.531040 1.000000
116 | 2.229000 9.632500 -3.112300 -2.716400 -1.000000
117 | 1.219800 2.098200 -3.195400 0.128430 1.000000
118 | 0.964410 5.839500 2.323500 0.066365 -1.000000
119 | -3.460500 2.690100 0.161650 -1.022400 1.000000
120 | -1.094100 2.307200 -2.523700 -1.445300 1.000000
121 | -3.816700 5.140100 -0.650630 -5.430600 1.000000
122 | 2.401200 1.622300 3.031200 0.716790 -1.000000
123 | 1.551400 3.801300 -4.914300 -3.748300 1.000000
124 | -1.224400 1.748500 -1.480100 -1.418100 1.000000
125 | -3.848300 -12.804700 15.682400 -1.281000 1.000000
126 | 0.030219 -1.051200 1.402400 0.773690 1.000000
127 | 1.567300 7.927400 -0.056842 -2.169400 -1.000000
128 | -3.929700 -6.081600 10.095800 -1.014700 1.000000
129 | 0.123260 8.984800 -0.935100 -2.433200 -1.000000
130 | 2.015300 1.847900 3.137500 0.428430 -1.000000
131 | -0.295100 9.048900 -0.527250 -2.078900 -1.000000
132 | 4.992300 7.865300 -2.351500 -0.719840 -1.000000
133 | 2.419600 6.466500 -0.756880 0.228000 -1.000000
134 | 3.858400 0.784250 1.103300 1.700800 -1.000000
135 | -0.835350 0.804940 -1.641100 -0.192250 1.000000
136 | 0.913150 3.337700 -4.055700 -1.674100 1.000000
137 | 5.061700 -0.357990 0.446980 0.998680 -1.000000
138 | -6.523500 9.601400 -0.253920 -6.964200 1.000000
139 | 0.190810 9.129700 -3.725000 -5.822400 -1.000000
140 | -0.280150 3.072900 -3.385700 -2.915500 1.000000
141 | -1.803000 11.881800 2.045800 -5.272800 -1.000000
142 | -0.735100 1.736100 -1.493800 -1.158200 1.000000
143 | -0.620430 0.558700 -0.385870 -0.664230 1.000000
144 | 3.466300 1.111200 1.742500 1.338800 -1.000000
145 | 2.736500 -5.032500 6.660800 -0.578890 -1.000000
146 | 5.438000 9.466900 -4.941700 -3.920200 -1.000000
147 | -3.920400 4.072300 -0.236780 -2.115100 1.000000
148 | -1.839100 -9.088300 9.241600 -0.104320 1.000000
149 | 1.915700 6.081600 0.237050 -2.011600 -1.000000
150 | 2.567800 3.513600 0.614060 -0.406910 -1.000000
151 | 2.688100 6.019500 -0.466410 -0.692680 -1.000000
152 | -2.280400 -0.306260 1.334700 1.376300 1.000000
153 | 2.259600 -0.033118 4.735500 -0.277600 -1.000000
154 | 2.923300 6.046400 -0.111680 -0.586650 -1.000000
155 | -0.965110 9.411100 1.730500 -4.862900 -1.000000
156 | -4.142900 2.774900 0.682610 -0.719840 1.000000
157 | 0.062525 2.930100 -3.546700 -2.673700 1.000000
158 | 0.318030 -0.993260 1.094700 0.886190 1.000000
159 | -0.779950 3.232200 -3.282000 -3.100400 1.000000
160 | -1.180400 11.509300 0.155650 -6.819400 -1.000000
161 | -2.664900 -12.813000 12.668900 -1.908200 1.000000
162 | 5.886200 5.874700 -2.816700 -0.300870 -1.000000
163 | 2.550300 -4.951800 6.372900 -0.415960 -1.000000
164 | 1.489600 3.428800 -4.030900 -1.425900 1.000000
165 | -3.591600 -6.228500 10.238900 -1.154300 1.000000
166 | 2.867200 10.000800 -3.204900 -3.109500 -1.000000
167 | 0.895120 4.773800 -4.843100 -5.590900 1.000000
168 | 1.270600 8.035000 -0.196510 -2.188800 -1.000000
169 | 4.165400 -3.449500 3.643000 1.087900 -1.000000
170 | -4.293200 3.341900 0.772580 -0.997850 1.000000
171 | 3.464700 -3.917200 3.974600 0.361190 -1.000000
172 | -3.115800 -8.628900 10.440300 0.971530 1.000000
173 | -1.130600 1.845800 -1.357500 -1.380600 1.000000
174 | -4.017300 -8.312300 12.454700 -1.437500 1.000000
175 | 3.779800 -3.310900 2.649100 0.066365 -1.000000
176 | -4.124400 3.790900 -0.653200 -4.180200 1.000000
177 | 3.775800 7.178300 -1.519500 0.401280 -1.000000
178 | 1.733100 3.954400 -4.741200 -2.501700 1.000000
179 | 2.614000 8.008100 -3.725800 -1.306900 -1.000000
180 | -3.993400 5.833300 0.547230 -4.937900 1.000000
181 | -5.067600 -5.187700 10.426600 -0.867250 1.000000
182 | -0.743240 -0.329020 -0.427850 0.233170 1.000000
183 | -2.647900 10.137400 -1.331000 -5.470700 -1.000000
184 | 5.591000 10.464300 -4.383900 -4.337900 -1.000000
185 | -0.660080 -3.226000 3.805800 1.183600 1.000000
186 | -1.118800 3.335700 -1.345500 -1.957300 1.000000
187 | 2.729600 2.870100 0.511240 0.509900 -1.000000
188 | -0.172960 -1.181600 1.381800 0.733600 1.000000
189 | 0.758960 0.291760 -1.650600 0.838340 1.000000
190 | 2.423500 9.533200 -3.078900 -2.774600 -1.000000
191 | 4.433800 9.887000 -4.679500 -3.748300 -1.000000
192 | 1.323400 3.296400 0.236200 -0.119840 -1.000000
193 | -2.065900 1.051200 -0.462980 -1.097400 1.000000
194 | 3.535800 6.708600 -0.818570 0.478860 -1.000000
195 | -2.761100 -10.509900 9.023900 -1.954700 1.000000
196 | 1.518000 5.694600 0.094818 -0.026738 -1.000000
197 | 1.116600 8.649600 -0.962520 -1.811200 -1.000000
198 | -4.288700 -7.863300 11.838700 -1.897800 1.000000
199 | 2.560500 9.268300 -3.591300 -1.356000 -1.000000
200 | 5.275600 0.138630 0.121380 1.143500 -1.000000
201 | -0.206200 9.220700 -3.704400 -6.810300 -1.000000
202 | 1.526800 -5.587100 8.656400 -1.722000 -1.000000
203 | -3.750300 -13.458600 17.593200 -2.777100 1.000000
204 | 1.274600 8.817200 -1.532300 -1.795700 -1.000000
205 | 0.896060 10.547100 -1.417500 -4.032700 -1.000000
206 | 0.184800 6.507900 2.013300 -0.872420 -1.000000
207 | 4.071300 10.402300 -4.172200 -4.758200 -1.000000
208 | 0.406140 1.349200 -1.450100 -0.559490 1.000000
209 | -6.153600 7.929500 0.616630 -3.264600 1.000000
210 | -1.834800 11.033400 3.186300 -4.888800 -1.000000
211 | -0.490810 2.845200 -3.643600 -3.100400 1.000000
212 | -1.651400 -8.498500 9.112200 1.237900 1.000000
213 | -1.074400 -6.311300 5.355000 0.804720 1.000000
214 | 0.705700 -5.498100 8.336800 -2.871500 -1.000000
215 | -1.706300 2.795600 -2.378000 -2.349100 1.000000
216 | 2.739100 7.401800 0.071684 -2.530200 -1.000000
217 | 1.742500 3.683300 -4.012900 -1.720700 1.000000
218 | -0.595870 2.481100 -2.867300 -0.898280 1.000000
219 | 1.645000 7.861200 -0.875980 -3.556900 -1.000000
220 | -0.526450 -0.248320 -0.456130 0.419380 1.000000
221 | -1.306600 0.252440 0.762300 1.775800 1.000000
222 | 1.105000 7.443200 0.410990 -3.033200 -1.000000
223 | -4.839200 6.675500 -0.242780 -6.577500 1.000000
224 | -0.206200 9.220700 -3.704400 -6.810300 -1.000000
225 | 0.882980 0.660090 6.009600 -0.432770 -1.000000
226 | -3.718100 -8.508900 12.363000 -0.955180 1.000000
227 | 3.884000 10.027700 -3.929800 -4.081900 -1.000000
228 | 1.642600 3.014900 0.228490 -0.147000 -1.000000
229 | 3.692200 -3.958500 4.343900 1.351700 -1.000000
230 | 5.745600 10.180800 -4.785700 -4.336600 -1.000000
231 | -2.702800 1.632700 0.835980 -0.091393 1.000000
232 | 0.051979 7.052100 -2.054100 -3.150800 -1.000000
233 | 3.271800 1.783700 2.116100 0.613340 -1.000000
234 | -2.456100 -4.556600 6.453400 -0.056479 1.000000
235 | 3.300400 7.081100 -1.325800 0.222830 -1.000000
236 | 1.341900 -4.422100 8.090000 -1.734900 -1.000000
237 | 1.131500 7.921200 1.093000 -2.844400 -1.000000
238 | 1.334900 6.118900 0.464970 0.498260 -1.000000
239 | -1.955100 -6.975600 5.538300 -0.128890 1.000000
240 | -1.788600 -6.348600 5.615400 0.425840 1.000000
241 | 3.431200 6.263700 -1.951300 -0.361650 -1.000000
242 | -6.200300 8.680600 0.009134 -3.703000 1.000000
243 | -2.483500 -7.449400 6.896400 -0.644840 1.000000
244 | -2.434900 -9.249700 8.992200 -0.500010 1.000000
245 | 4.296900 7.617000 -2.387400 -0.961640 -1.000000
246 | -3.019300 1.777500 0.737450 -0.453460 1.000000
247 | 1.603200 -4.786300 8.519300 -2.120300 -1.000000
248 | 0.343400 0.124150 -0.287330 0.146540 1.000000
249 | 3.989900 -2.706600 2.394600 0.862910 -1.000000
250 | -0.954030 1.982400 -2.316300 -1.195700 1.000000
251 | -0.826010 2.961100 -1.286400 -1.464700 1.000000
252 | -1.131300 1.903700 7.533900 1.022000 -1.000000
253 | -0.774610 -1.876800 2.402300 1.131900 1.000000
254 | -4.842600 -4.993200 10.405200 -0.531040 1.000000
255 | -0.247450 1.936800 -2.469700 -0.805180 1.000000
256 | -1.394600 2.313400 -0.444990 -1.490500 1.000000
257 | -3.601200 -6.538900 10.523400 -0.489670 1.000000
258 | -0.348100 -0.386960 -0.478410 0.626270 1.000000
259 | -2.460400 12.730200 0.917380 -7.641800 -1.000000
260 | 2.172100 -0.738740 5.467200 -0.723710 -1.000000
261 | -6.959900 8.993100 0.218200 -4.572000 1.000000
262 | 1.821600 -6.474800 8.051400 -0.418550 -1.000000
263 | 5.203200 3.511600 -1.253800 1.012900 -1.000000
264 | -1.507500 1.922400 7.146600 0.891360 -1.000000
265 | 2.796100 2.121000 1.838500 0.383170 -1.000000
266 | -0.492410 0.893920 -1.628300 -0.568540 1.000000
267 | 3.820000 10.927900 -4.011200 -5.028400 -1.000000
268 | -1.992200 11.654200 2.654200 -5.210700 -1.000000
269 | 3.269700 -4.341400 3.688400 -0.298290 -1.000000
270 | 0.935840 8.885500 -1.683100 -1.659900 -1.000000
271 | -2.659000 -1.605800 1.364700 0.164640 1.000000
272 | 2.289300 3.733000 0.631200 -0.397860 -1.000000
273 | 2.252600 9.963600 -3.174900 -2.994400 -1.000000
274 | 3.811700 10.145700 -4.046300 -4.562900 -1.000000
275 | 4.002600 -3.594300 3.557300 0.268090 -1.000000
276 | 1.563100 0.895990 -1.970200 0.654720 1.000000
277 | 3.498500 3.163900 0.226770 -0.165100 -1.000000
278 | 1.230900 3.892300 -4.827700 -4.006900 1.000000
279 | 4.636100 -2.661100 2.835800 1.199100 -1.000000
280 | -4.855400 -5.903700 10.981800 -0.821990 1.000000
281 | 3.093400 -2.917700 2.223200 0.222830 -1.000000
282 | -1.835600 -6.756200 5.058500 -0.550440 1.000000
283 | -1.437500 -1.862400 4.026000 0.551270 1.000000
284 | 0.117390 6.276100 -1.549500 -2.474600 -1.000000
285 | 4.044600 11.174100 -4.358200 -4.740100 -1.000000
286 | 1.910500 8.871000 -2.338600 -0.756040 -1.000000
287 | -1.732200 -9.282800 7.719000 -1.716800 1.000000
288 | 1.774800 -0.769780 5.585400 1.303900 -1.000000
289 | -4.477900 7.370800 -0.312180 -6.775400 1.000000
290 | -1.029200 -6.387900 5.525500 0.799550 1.000000
291 | -0.776880 0.130360 -0.031137 -0.353890 1.000000
292 | -0.878740 -2.212100 -0.051701 0.099985 1.000000
293 | -2.772300 3.277700 -0.935100 -3.145700 1.000000
294 | -4.499600 3.428800 0.562650 -1.167200 1.000000
295 | 1.351800 1.059500 -2.343700 0.399980 1.000000
296 | 1.011700 0.902200 -2.350600 0.427140 1.000000
297 | -1.568100 -7.244600 6.553700 -0.127600 1.000000
298 | -2.537300 -6.959000 8.805400 1.528900 1.000000
299 | -1.410600 -7.108000 5.645400 0.313350 1.000000
300 | -0.614420 -0.091058 -0.318180 0.502140 1.000000
301 | 3.525700 1.282900 1.927600 1.799100 -1.000000
302 | 4.175700 10.261500 -3.855200 -4.305600 -1.000000
303 | 3.245000 6.630000 -0.634350 0.869370 -1.000000
304 | 4.429500 -2.350700 1.704800 0.909460 -1.000000
305 | -1.786000 -8.115700 7.085800 -1.211200 1.000000
306 | 3.819700 8.995100 -4.383000 -4.032700 -1.000000
307 | 4.393700 0.357980 2.041600 1.200400 -1.000000
308 | 4.063200 3.584000 0.725450 0.394810 -1.000000
309 | -0.126900 -1.150500 -0.951380 0.578430 1.000000
310 | 1.569100 6.346500 -0.182800 -2.409900 -1.000000
311 | -1.818700 -9.036600 9.016200 -0.122430 1.000000
312 | -2.462100 2.764500 -0.625780 -2.857300 1.000000
313 | -2.215300 11.962500 0.078538 -7.785300 -1.000000
314 | 5.807000 5.009700 -2.238400 0.438780 -1.000000
315 | 2.203400 5.994700 0.530090 0.849980 -1.000000
316 | -2.570100 -6.845200 8.999900 2.135300 1.000000
317 | -2.861900 4.519300 -0.581230 -4.262900 1.000000
318 | -2.628600 0.180020 1.795600 0.972820 1.000000
319 | -2.252700 11.532100 2.589900 -3.273700 -1.000000
320 | 3.466700 -4.072400 4.288200 1.541800 -1.000000
321 | 2.694600 6.797600 -0.403010 0.449120 -1.000000
322 | 4.012700 10.147700 -3.936600 -4.072800 -1.000000
323 | -1.600100 -9.582800 9.404400 0.081882 1.000000
324 | -0.025314 -0.173830 -0.113390 1.219800 1.000000
325 | 4.668900 1.309800 0.055404 1.909000 -1.000000
326 | 0.600500 1.932700 -3.288800 -0.324150 1.000000
327 | 6.563300 9.818700 -4.411300 -3.225800 -1.000000
328 | -3.793000 -12.709500 12.795700 -2.825000 1.000000
329 | -4.387600 -7.726700 11.965500 -1.454300 1.000000
330 | 3.577000 2.400400 1.890800 0.732310 -1.000000
331 | -0.166820 5.897400 0.498390 -0.700440 -1.000000
332 | -1.759900 11.921100 2.675600 -3.324100 -1.000000
333 | 2.428700 9.382100 -3.247700 -1.454300 -1.000000
334 | 2.555900 3.360500 2.032100 0.268090 -1.000000
335 | -1.421700 11.654200 -0.057699 -7.102500 -1.000000
336 | 4.245800 1.198100 0.666330 0.946960 -1.000000
337 | 3.884600 -3.033600 2.533400 0.202140 -1.000000
338 | 3.607700 6.857600 -1.162200 0.282310 -1.000000
339 | -1.409400 -2.125200 -0.103970 -0.192250 1.000000
340 | 0.559390 -0.310400 0.183070 0.446530 1.000000
341 | -0.121960 8.806800 0.945660 -4.226700 -1.000000
342 | 0.552980 -3.461900 1.704800 1.100800 1.000000
343 | -3.855200 3.521900 -0.384150 -3.860800 1.000000
344 | 2.188100 2.735600 1.327800 -0.183200 -1.000000
345 | 0.815830 4.840000 -5.261300 -6.082300 1.000000
346 | -1.585100 -2.156200 1.708200 0.901700 1.000000
347 | -0.873400 -0.033118 -0.201650 0.557740 1.000000
348 | -2.741900 11.403800 2.539400 -5.579300 -1.000000
349 | 0.836250 1.107100 -2.470600 -0.062945 1.000000
350 | 3.936400 10.588500 -3.725000 -4.313300 -1.000000
351 | 4.711400 2.075500 -0.270200 1.237900 -1.000000
352 | -2.298700 -5.227000 5.630000 0.917220 1.000000
353 | 2.533100 2.913500 -0.822000 -0.122430 -1.000000
354 | -1.821500 2.752100 -0.722610 -2.353000 1.000000
355 | 4.718100 10.015300 -3.948600 -3.858200 -1.000000
356 | -3.882600 4.898000 -0.923110 -5.080100 1.000000
357 | -0.106480 -0.767710 7.757500 0.641790 -1.000000
358 | -0.609750 -4.002000 1.847100 0.601700 1.000000
359 | 1.879900 2.470700 2.493100 0.376710 -1.000000
360 | 0.876030 6.814100 0.841980 -0.171560 -1.000000
361 | 3.534000 9.361400 -3.631600 -1.246100 -1.000000
362 | 4.364000 -3.103900 2.375700 0.785320 -1.000000
363 | 1.181100 8.384700 -2.056700 -0.903450 -1.000000
364 | 1.155800 6.400300 1.550600 0.696100 -1.000000
365 | 2.017700 1.798200 -2.958100 0.209900 1.000000
366 | 3.929200 -2.915600 2.212900 0.308170 -1.000000
367 | 1.363800 -4.775900 8.418200 -1.883600 -1.000000
368 | 1.507700 1.959600 -3.058400 -0.122430 1.000000
369 | 0.390120 -0.142790 -0.031994 0.350840 1.000000
370 | -0.942550 0.039307 -0.241920 0.315930 1.000000
371 | 0.600500 0.999450 -2.212600 0.097399 1.000000
372 | 2.016500 -0.252460 5.170700 1.076300 -1.000000
373 | -2.075900 10.822300 2.643900 -4.837000 -1.000000
374 |
--------------------------------------------------------------------------------
/datasets/data_lawsuit.mat:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bdsul/grape/4b1d765867d3079feaba4cce028450546fcf6760/datasets/data_lawsuit.mat
--------------------------------------------------------------------------------
/datasets/haberman.csv:
--------------------------------------------------------------------------------
1 | d1,d2,d3
2 | 30,64,1
3 | 30,62,3
4 | 30,65,0
5 | 31,59,2
6 | 31,65,4
7 | 33,58,10
8 | 33,60,0
9 | 34,58,30
10 | 34,60,1
11 | 34,61,10
12 | 34,67,7
13 | 34,60,0
14 | 35,64,13
15 | 35,63,0
16 | 36,60,1
17 | 36,69,0
18 | 37,60,0
19 | 37,63,0
20 | 37,58,0
21 | 37,59,6
22 | 37,60,15
23 | 37,63,0
24 | 38,59,2
25 | 38,60,0
26 | 38,60,0
27 | 38,62,3
28 | 38,64,1
29 | 38,66,0
30 | 38,66,11
31 | 38,60,1
32 | 38,67,5
33 | 39,63,0
34 | 39,67,0
35 | 39,58,0
36 | 39,59,2
37 | 39,63,4
38 | 40,58,2
39 | 40,58,0
40 | 40,65,0
41 | 41,58,0
42 | 41,59,8
43 | 41,59,0
44 | 41,64,0
45 | 41,69,8
46 | 41,65,0
47 | 41,65,0
48 | 42,58,0
49 | 42,60,1
50 | 42,59,2
51 | 42,61,4
52 | 42,62,20
53 | 42,65,0
54 | 42,63,1
55 | 43,63,14
56 | 43,64,2
57 | 43,64,3
58 | 43,60,0
59 | 43,63,2
60 | 43,65,0
61 | 43,66,4
62 | 44,61,0
63 | 44,63,1
64 | 44,61,0
65 | 44,67,16
66 | 45,60,0
67 | 45,67,0
68 | 45,59,14
69 | 45,64,0
70 | 45,68,0
71 | 45,67,1
72 | 46,62,0
73 | 46,58,3
74 | 46,63,0
75 | 47,61,0
76 | 47,63,6
77 | 47,66,0
78 | 47,67,0
79 | 47,58,3
80 | 47,60,4
81 | 47,68,4
82 | 47,66,12
83 | 48,61,8
84 | 48,62,2
85 | 48,64,0
86 | 48,66,0
87 | 49,61,1
88 | 49,62,0
89 | 49,66,0
90 | 49,60,1
91 | 49,62,1
92 | 49,63,3
93 | 49,61,0
94 | 49,67,1
95 | 50,59,0
96 | 50,61,6
97 | 50,61,0
98 | 50,63,1
99 | 50,58,1
100 | 50,59,2
101 | 50,61,0
102 | 50,64,0
103 | 50,65,4
104 | 50,66,1
105 | 51,64,7
106 | 51,59,1
107 | 51,65,0
108 | 51,66,1
109 | 52,61,0
110 | 52,63,4
111 | 52,69,0
112 | 52,60,4
113 | 52,60,5
114 | 52,62,0
115 | 52,62,1
116 | 52,64,0
117 | 52,65,0
118 | 52,68,0
119 | 53,58,1
120 | 53,60,1
121 | 53,60,2
122 | 53,61,1
123 | 53,63,0
124 | 54,59,7
125 | 54,60,3
126 | 54,66,0
127 | 54,67,46
128 | 54,62,0
129 | 54,69,7
130 | 54,63,19
131 | 54,58,1
132 | 54,62,0
133 | 55,58,1
134 | 55,58,0
135 | 55,58,1
136 | 55,66,18
137 | 55,66,0
138 | 55,69,3
139 | 55,69,22
140 | 55,67,1
141 | 56,60,0
142 | 56,66,2
143 | 56,66,1
144 | 56,67,0
145 | 56,60,0
146 | 57,64,9
147 | 57,69,0
148 | 57,61,0
149 | 57,62,0
150 | 57,63,0
151 | 57,64,0
152 | 57,64,0
153 | 57,67,0
154 | 58,59,0
155 | 58,60,3
156 | 58,61,1
157 | 58,67,0
158 | 58,58,0
159 | 58,58,3
160 | 58,61,2
161 | 59,60,0
162 | 59,63,0
163 | 59,64,1
164 | 59,64,4
165 | 59,64,0
166 | 59,64,7
167 | 59,67,3
168 | 60,61,1
169 | 60,67,2
170 | 60,61,25
171 | 60,64,0
172 | 61,59,0
173 | 61,59,0
174 | 61,64,0
175 | 61,65,8
176 | 61,68,0
177 | 61,59,0
178 | 62,62,6
179 | 62,66,0
180 | 62,66,0
181 | 62,58,0
182 | 63,61,0
183 | 63,62,0
184 | 63,63,0
185 | 63,63,0
186 | 63,66,0
187 | 63,61,9
188 | 63,61,28
189 | 64,58,0
190 | 64,65,22
191 | 64,66,0
192 | 64,61,0
193 | 64,68,0
194 | 65,58,0
195 | 65,64,0
196 | 65,67,0
197 | 65,59,2
198 | 65,64,0
199 | 65,67,1
200 | 66,58,0
201 | 66,58,1
202 | 66,68,0
203 | 67,66,0
204 | 67,66,0
205 | 67,61,0
206 | 67,65,0
207 | 68,67,0
208 | 68,68,0
209 | 69,60,0
210 | 69,65,0
211 | 69,66,0
212 | 70,66,14
213 | 70,67,0
214 | 70,68,0
215 | 70,59,8
216 | 70,63,0
217 | 71,68,2
218 | 72,58,0
219 | 72,64,0
220 | 72,67,3
221 | 73,62,0
222 | 73,68,0
223 | 74,63,0
224 | 75,62,1
225 | 76,67,0
226 | 77,65,3
227 | 34,59,0
228 | 34,66,9
229 | 38,69,21
230 | 39,66,0
231 | 41,60,23
232 | 41,64,0
233 | 41,67,0
234 | 42,69,1
235 | 42,59,0
236 | 43,58,52
237 | 43,59,2
238 | 43,64,0
239 | 43,64,0
240 | 44,64,6
241 | 44,58,9
242 | 44,63,19
243 | 45,65,6
244 | 45,66,0
245 | 45,67,1
246 | 46,58,2
247 | 46,69,3
248 | 46,62,5
249 | 46,65,20
250 | 47,63,23
251 | 47,62,0
252 | 47,65,0
253 | 48,58,11
254 | 48,58,11
255 | 48,67,7
256 | 49,63,0
257 | 49,64,10
258 | 50,63,13
259 | 50,64,0
260 | 51,59,13
261 | 51,59,3
262 | 52,69,3
263 | 52,59,2
264 | 52,62,3
265 | 52,66,4
266 | 53,58,4
267 | 53,65,1
268 | 53,59,3
269 | 53,60,9
270 | 53,63,24
271 | 53,65,12
272 | 54,60,11
273 | 54,65,23
274 | 54,65,5
275 | 54,68,7
276 | 55,63,6
277 | 55,68,15
278 | 56,65,9
279 | 56,66,3
280 | 57,61,5
281 | 57,62,14
282 | 57,64,1
283 | 59,62,35
284 | 60,59,17
285 | 60,65,0
286 | 61,62,5
287 | 61,65,0
288 | 61,68,1
289 | 62,59,13
290 | 62,58,0
291 | 62,65,19
292 | 63,60,1
293 | 65,58,0
294 | 65,61,2
295 | 65,62,22
296 | 65,66,15
297 | 66,58,0
298 | 66,61,13
299 | 67,64,8
300 | 67,63,1
301 | 69,67,8
302 | 70,58,0
303 | 70,58,4
304 | 72,63,0
305 | 74,65,3
306 | 78,65,1
307 | 83,58,2
308 |
--------------------------------------------------------------------------------
/datasets/haberman_labels.csv:
--------------------------------------------------------------------------------
1 | class
2 | 1
3 | 1
4 | 1
5 | 1
6 | 1
7 | 1
8 | 1
9 | 1
10 | 1
11 | 1
12 | 1
13 | 1
14 | 1
15 | 1
16 | 1
17 | 1
18 | 1
19 | 1
20 | 1
21 | 1
22 | 1
23 | 1
24 | 1
25 | 1
26 | 1
27 | 1
28 | 1
29 | 1
30 | 1
31 | 1
32 | 1
33 | 1
34 | 1
35 | 1
36 | 1
37 | 1
38 | 1
39 | 1
40 | 1
41 | 1
42 | 1
43 | 1
44 | 1
45 | 1
46 | 1
47 | 1
48 | 1
49 | 1
50 | 1
51 | 1
52 | 1
53 | 1
54 | 1
55 | 1
56 | 1
57 | 1
58 | 1
59 | 1
60 | 1
61 | 1
62 | 1
63 | 1
64 | 1
65 | 1
66 | 1
67 | 1
68 | 1
69 | 1
70 | 1
71 | 1
72 | 1
73 | 1
74 | 1
75 | 1
76 | 1
77 | 1
78 | 1
79 | 1
80 | 1
81 | 1
82 | 1
83 | 1
84 | 1
85 | 1
86 | 1
87 | 1
88 | 1
89 | 1
90 | 1
91 | 1
92 | 1
93 | 1
94 | 1
95 | 1
96 | 1
97 | 1
98 | 1
99 | 1
100 | 1
101 | 1
102 | 1
103 | 1
104 | 1
105 | 1
106 | 1
107 | 1
108 | 1
109 | 1
110 | 1
111 | 1
112 | 1
113 | 1
114 | 1
115 | 1
116 | 1
117 | 1
118 | 1
119 | 1
120 | 1
121 | 1
122 | 1
123 | 1
124 | 1
125 | 1
126 | 1
127 | 1
128 | 1
129 | 1
130 | 1
131 | 1
132 | 1
133 | 1
134 | 1
135 | 1
136 | 1
137 | 1
138 | 1
139 | 1
140 | 1
141 | 1
142 | 1
143 | 1
144 | 1
145 | 1
146 | 1
147 | 1
148 | 1
149 | 1
150 | 1
151 | 1
152 | 1
153 | 1
154 | 1
155 | 1
156 | 1
157 | 1
158 | 1
159 | 1
160 | 1
161 | 1
162 | 1
163 | 1
164 | 1
165 | 1
166 | 1
167 | 1
168 | 1
169 | 1
170 | 1
171 | 1
172 | 1
173 | 1
174 | 1
175 | 1
176 | 1
177 | 1
178 | 1
179 | 1
180 | 1
181 | 1
182 | 1
183 | 1
184 | 1
185 | 1
186 | 1
187 | 1
188 | 1
189 | 1
190 | 1
191 | 1
192 | 1
193 | 1
194 | 1
195 | 1
196 | 1
197 | 1
198 | 1
199 | 1
200 | 1
201 | 1
202 | 1
203 | 1
204 | 1
205 | 1
206 | 1
207 | 1
208 | 1
209 | 1
210 | 1
211 | 1
212 | 1
213 | 1
214 | 1
215 | 1
216 | 1
217 | 1
218 | 1
219 | 1
220 | 1
221 | 1
222 | 1
223 | 1
224 | 1
225 | 1
226 | 1
227 | 2
228 | 2
229 | 2
230 | 2
231 | 2
232 | 2
233 | 2
234 | 2
235 | 2
236 | 2
237 | 2
238 | 2
239 | 2
240 | 2
241 | 2
242 | 2
243 | 2
244 | 2
245 | 2
246 | 2
247 | 2
248 | 2
249 | 2
250 | 2
251 | 2
252 | 2
253 | 2
254 | 2
255 | 2
256 | 2
257 | 2
258 | 2
259 | 2
260 | 2
261 | 2
262 | 2
263 | 2
264 | 2
265 | 2
266 | 2
267 | 2
268 | 2
269 | 2
270 | 2
271 | 2
272 | 2
273 | 2
274 | 2
275 | 2
276 | 2
277 | 2
278 | 2
279 | 2
280 | 2
281 | 2
282 | 2
283 | 2
284 | 2
285 | 2
286 | 2
287 | 2
288 | 2
289 | 2
290 | 2
291 | 2
292 | 2
293 | 2
294 | 2
295 | 2
296 | 2
297 | 2
298 | 2
299 | 2
300 | 2
301 | 2
302 | 2
303 | 2
304 | 2
305 | 2
306 | 2
307 | 2
308 |
--------------------------------------------------------------------------------
/datasets/horse-colic.test:
--------------------------------------------------------------------------------
1 | surgery,Age,Hospital_Number,rectal_temperature,pulse,respiratory_rate,temperature_of_extremities,peripheral_pulse,mucous_membranes,capillary_refill_time,pain,peristalsis,abdominal_distension,nasogastric_tube,nasogastric_reflux,nasogastric_reflux_PH,rectal_examination,abdomen,packed_cell_volume,total_protein,abdominocentesis_appearance,abdomcentesis_total_protein,outcome,surgical_lesion,type_of_lesion1,type_of_lesion2,type_of_lesion3,cp_data
2 | 2,1,528626,38.50,54,20,?,1,2,2,3,4,1,2,2,5.90,?,2,42.00,6.30,?,?,1,2,03111,00000,00000,1
3 | 2,1,527950,37.60,48,36,?,?,1,1,?,3,?,?,?,?,?,?,44.00,6.30,1,5.00,1,2,03111,00000,00000,1
4 | 1,1,535263,37.7,44,28,?,4,3,2,5,4,4,1,1,?,3,5,45,70,3,2,1,1,03205,00000,00000,2
5 | 1,1,534523,37,56,24,3,1,4,2,4,4,3,1,1,?,?,?,35,61,3,2,3,2,02205,02208,00000,2
6 | 2,1,528926,38.00,42,12,3,?,3,1,1,?,1,?,?,?,?,2,37.00,5.80,?,?,1,2,03111,00000,00000,2
7 | 1,1,534922,?,60,40,3,?,1,1,?,4,?,3,2,?,?,5,42,72,?,?,1,1,03111,00000,00000,2
8 | 2,1,527642,38.40,80,60,3,2,2,1,3,2,1,2,2,?,1,1,54.00,6.90,?,?,1,2,00000,00000,00000,2
9 | 2,1,5279821,37.80,48,12,2,1,2,1,3,?,1,2,?,?,2,?,48.00,7.30,1,?,1,2,00000,00000,00000,1
10 | ?,1,534790,38.0,65,40,?,1,4,2,2,?,?,?,?,?,?,5,?,?,?,?,?,1,03111,03205,00000,2
11 | 2,1,5275211,37.90,45,36,3,3,3,2,2,3,1,2,1,?,3,?,33.00,5.70,3,?,1,1,02205,00000,00000,1
12 | 2,1,5278332,39.00,84,12,3,1,5,1,2,4,2,1,2,7.00,?,4,62.00,5.90,2,2.20,2,1,02208,00000,00000,1
13 | 2,1,528959,38.20,60,24,3,1,3,2,3,3,2,3,3,?,4,4,53.00,7.50,2,1.40,1,2,01124,00000,00000,1
14 | 1,1,534921,?,140,?,?,?,4,2,5,4,4,1,1,?,?,5,30,69,?,?,2,2,06112,00000,00000,2
15 | 1,1,528999,37.90,120,60,3,3,3,1,5,4,4,2,2,7.50,4,5,52.00,6.60,3,1.80,2,1,03205,00000,00000,1
16 | 2,1,528067,38.00,72,36,1,1,3,1,3,?,2,2,1,?,3,5,38.00,6.80,2,2.00,1,2,03124,00000,00000,1
17 | 2,9,5291329,38.00,92,28,1,1,2,1,1,3,2,3,?,7.20,?,?,37.00,6.10,1,1.10,1,2,00000,00000,00000,1
18 | 1,1,529478,38.30,66,30,2,3,1,1,2,4,3,3,2,8.50,4,5,37.00,6.00,?,?,1,1,05111,00000,00000,2
19 | 2,1,529991,37.50,48,24,3,1,1,1,2,1,?,1,1,?,3,2,43.00,6.00,1,2.80,1,2,07111,00000,00000,1
20 | 1,1,530033,37.50,88,20,2,3,3,1,4,3,3,?,?,?,?,?,35.00,6.40,1,?,2,1,02205,00000,00000,2
21 | 2,9,5299049,?,150,60,4,4,4,2,5,4,4,?,?,?,?,?,?,?,?,?,2,1,01400,00000,00000,2
22 | 1,1,534497,39.7,100,30,?,?,6,2,4,4,3,1,?,?,4,5,65,75,?,?,3,1,03205,00000,00000,2
23 | 1,1,528369,38.30,80,?,3,3,4,2,5,4,3,2,1,?,4,4,45.00,7.50,2,4.60,1,1,03209,00000,00000,1
24 | 2,1,530107,37.50,40,32,3,1,3,1,3,2,3,2,1,?,?,5,32.00,6.40,1,1.10,1,1,03124,00000,00000,1
25 | 1,1,530239,38.40,84,30,3,1,5,2,4,3,3,2,3,6.50,4,4,47.00,7.50,3,?,2,1,06111,00000,00000,2
26 | 1,1,530505,38.10,84,44,4,?,4,2,5,3,1,1,3,5.00,?,4,60.00,6.80,?,5.70,2,1,02209,00000,00000,1
27 | 2,1,529567,38.70,52,?,1,1,1,1,1,3,1,?,?,?,1,3,4.00,74.00,?,?,1,2,00000,00000,00000,2
28 | 2,1,529597,38.10,44,40,2,1,3,1,3,3,1,?,?,?,1,3,35.00,6.80,?,?,1,2,00000,00000,00000,2
29 | 2,1,534429,38.4,52,20,2,1,3,1,1,3,2,2,1,?,3,5,41,63,1,1,1,2,00000,00000,00000,2
30 | 1,1,529629,38.20,60,?,1,?,3,1,2,1,1,1,1,?,4,4,43.00,6.20,2,3.90,1,1,02206,00000,00000,1
31 | 2,1,528382,37.70,40,18,1,1,1,?,3,2,1,1,1,?,3,3,36.00,3.50,?,?,1,2,00400,00000,00000,2
32 | 1,1,534898,39.1,60,10,?,1,1,?,2,3,?,?,?,?,4,4,?,?,?,?,1,1,02113,00000,00000,2
33 | 2,1,529615,37.80,48,16,1,1,1,1,?,1,1,2,1,?,4,3,43.00,7.50,?,?,1,2,00000,00000,00000,2
34 | 1,1,526090,39.00,120,?,4,3,5,2,2,4,3,2,3,8.00,?,?,65.00,8.20,3,4.60,1,2,05110,00000,00000,2
35 | 1,1,529765,38.20,76,?,2,3,2,1,5,3,3,1,2,6.00,1,5,35.00,6.50,2,0.90,1,1,03205,00000,00000,1
36 | 2,1,528310,38.30,88,?,?,?,6,?,?,?,?,?,?,?,?,?,?,?,?,?,2,2,02300,00000,00000,2
37 | 1,1,529925,38.00,80,30,3,3,3,1,?,?,?,?,?,6.00,?,?,48.00,8.30,?,4.30,1,1,02111,00000,00000,2
38 | 1,1,527807,?,?,?,3,1,1,1,2,3,3,1,3,6.00,4,4,?,?,2,?,2,1,02113,00000,00000,1
39 | 1,1,5281441,37.60,40,?,1,1,1,1,1,1,1,?,?,?,1,1,?,?,2,2.10,1,1,31110,00000,00000,1
40 | 2,1,530695,37.50,44,?,1,1,1,1,3,3,2,?,?,?,?,?,45.00,5.80,2,1.40,1,2,03111,00000,00000,1
41 | 2,1,533889,38.2,42,16,1,1,3,1,1,3,1,?,?,?,1,?,35,60,1,1,1,2,00000,00000,00000,2
42 | 2,1,533815,38,56,44,3,3,3,?,?,1,1,2,1,?,4,?,47,70,2,1,1,2,00000,00000,00000,2
43 | 2,1,527664,38.30,45,20,3,3,2,2,2,4,1,2,?,?,4,?,?,?,?,?,1,2,00000,00000,00000,2
44 | 1,1,5262542,?,48,96,1,1,3,1,?,4,1,2,1,?,1,4,42.00,8.00,1,?,1,1,02208,00000,00000,2
45 | 1,1,528268,37.70,55,28,2,1,2,1,2,3,3,?,3,5.00,4,5,?,?,?,?,1,1,03209,00000,00000,2
46 | 2,1,528919,36.00,100,20,4,3,6,2,2,4,3,1,1,?,4,5,74.00,5.70,2,2.50,3,1,03205,00000,00000,1
47 | 1,1,527494,37.10,60,20,2,?,4,1,3,?,3,?,2,5.00,3,4,64.00,8.50,2,?,1,1,07111,00000,00000,1
48 | 2,1,529980,37.10,114,40,3,?,3,2,2,2,1,?,?,?,?,3,32.00,?,3,6.50,1,2,00400,00000,00000,2
49 | 1,1,533954,38.1,72,30,3,3,3,1,4,4,3,2,1,?,3,5,37,56,3,1,1,1,04206,00000,00000,2
50 | 1,1,5281092,37.00,44,12,3,1,1,2,1,1,1,?,?,?,4,2,40.00,6.70,3,8.00,1,1,02208,00000,00000,2
51 | 1,1,534686,38.6,48,20,3,1,1,1,4,3,1,?,?,?,3,?,37,75,?,?,1,1,03111,00000,00000,2
52 | 1,1,534475,?,82,72,3,1,4,1,2,3,3,?,3,?,4,4,53,65,3,2,3,1,02209,03205,00000,2
53 | 1,9,5274919,38.20,78,60,4,4,6,?,3,3,3,?,?,?,1,?,59.00,5.80,3,3.10,2,1,01400,00000,00000,1
54 | 2,1,533815,37.8,60,16,1,1,3,1,2,3,2,1,2,?,3,?,41,73,?,?,3,2,04124,00000,00000,2
55 | 1,1,534156,38.7,34,30,2,?,3,1,2,3,?,?,?,?,?,?,33,69,?,2,3,1,07113,00000,00000,2
56 | 1,1,514279,?,36,12,1,1,1,1,1,2,1,1,1,?,1,5,44.00,?,?,?,1,1,31110,00000,00000,1
57 | 2,1,528433,38.30,44,60,?,?,1,1,?,?,?,?,?,?,?,?,6.40,36.00,?,?,1,1,00000,00000,00000,2
58 | 2,1,527465,37.40,54,18,3,?,1,1,3,4,3,2,2,?,4,5,30.00,7.10,2,?,1,1,07111,00000,00000,1
59 | 1,1,534268,?,?,?,4,3,?,2,2,4,1,?,?,?,?,?,54,76,3,2,1,1,08405,00000,00000,2
60 | 1,1,535337,36.6,48,16,3,1,3,1,4,1,1,1,1,?,?,?,27,56,?,?,3,1,04206,00000,00000,2
61 | 1,1,534111,38.5,90,?,1,1,3,1,3,3,3,2,3,2,4,5,47,79,?,?,1,1,06112,00000,00000,2
62 | 1,1,530576,?,75,12,1,1,4,1,5,3,3,?,3,5.80,?,?,58.00,8.50,1,?,1,1,02209,00000,00000,1
63 | 2,1,529930,38.20,42,?,3,1,1,1,1,1,2,2,1,?,3,2,35.00,5.90,2,?,1,2,03113,00000,00000,2
64 | 1,9,5274919,38.20,78,60,4,4,6,?,3,3,3,?,?,?,1,?,59.00,5.80,3,3.10,2,1,02205,00000,00000,1
65 | 2,1,529695,38.60,60,30,1,1,3,1,4,2,2,1,1,?,?,?,40.00,6.00,1,?,1,1,03205,00000,00000,2
66 | 2,1,528452,37.80,42,40,1,1,1,1,1,3,1,?,?,?,3,3,36.00,6.20,?,?,1,2,04124,00000,00000,2
67 | 1,1,534783,38,60,12,1,1,2,1,2,1,1,1,1,?,1,4,44,65,3,2,3,1,02209,00000,00000,2
68 | 2,1,528926,38.00,42,12,3,?,3,1,1,1,1,?,?,?,?,1,37.00,5.80,?,?,1,2,03111,00000,00000,2
69 | 2,1,530670,37.60,88,36,3,1,1,1,3,3,2,1,3,1.50,?,?,44.00,6.00,?,?,2,1,02112,00000,00000,2
70 |
--------------------------------------------------------------------------------
/datasets/iris.data:
--------------------------------------------------------------------------------
1 | sepal-length,sepal-width,petal-length,petal-width,class
2 | 5.1,3.5,1.4,0.2,Iris-setosa
3 | 4.9,3.0,1.4,0.2,Iris-setosa
4 | 4.7,3.2,1.3,0.2,Iris-setosa
5 | 4.6,3.1,1.5,0.2,Iris-setosa
6 | 5.0,3.6,1.4,0.2,Iris-setosa
7 | 5.4,3.9,1.7,0.4,Iris-setosa
8 | 4.6,3.4,1.4,0.3,Iris-setosa
9 | 5.0,3.4,1.5,0.2,Iris-setosa
10 | 4.4,2.9,1.4,0.2,Iris-setosa
11 | 4.9,3.1,1.5,0.1,Iris-setosa
12 | 5.4,3.7,1.5,0.2,Iris-setosa
13 | 4.8,3.4,1.6,0.2,Iris-setosa
14 | 4.8,3.0,1.4,0.1,Iris-setosa
15 | 4.3,3.0,1.1,0.1,Iris-setosa
16 | 5.8,4.0,1.2,0.2,Iris-setosa
17 | 5.7,4.4,1.5,0.4,Iris-setosa
18 | 5.4,3.9,1.3,0.4,Iris-setosa
19 | 5.1,3.5,1.4,0.3,Iris-setosa
20 | 5.7,3.8,1.7,0.3,Iris-setosa
21 | 5.1,3.8,1.5,0.3,Iris-setosa
22 | 5.4,3.4,1.7,0.2,Iris-setosa
23 | 5.1,3.7,1.5,0.4,Iris-setosa
24 | 4.6,3.6,1.0,0.2,Iris-setosa
25 | 5.1,3.3,1.7,0.5,Iris-setosa
26 | 4.8,3.4,1.9,0.2,Iris-setosa
27 | 5.0,3.0,1.6,0.2,Iris-setosa
28 | 5.0,3.4,1.6,0.4,Iris-setosa
29 | 5.2,3.5,1.5,0.2,Iris-setosa
30 | 5.2,3.4,1.4,0.2,Iris-setosa
31 | 4.7,3.2,1.6,0.2,Iris-setosa
32 | 4.8,3.1,1.6,0.2,Iris-setosa
33 | 5.4,3.4,1.5,0.4,Iris-setosa
34 | 5.2,4.1,1.5,0.1,Iris-setosa
35 | 5.5,4.2,1.4,0.2,Iris-setosa
36 | 4.9,3.1,1.5,0.1,Iris-setosa
37 | 5.0,3.2,1.2,0.2,Iris-setosa
38 | 5.5,3.5,1.3,0.2,Iris-setosa
39 | 4.9,3.1,1.5,0.1,Iris-setosa
40 | 4.4,3.0,1.3,0.2,Iris-setosa
41 | 5.1,3.4,1.5,0.2,Iris-setosa
42 | 5.0,3.5,1.3,0.3,Iris-setosa
43 | 4.5,2.3,1.3,0.3,Iris-setosa
44 | 4.4,3.2,1.3,0.2,Iris-setosa
45 | 5.0,3.5,1.6,0.6,Iris-setosa
46 | 5.1,3.8,1.9,0.4,Iris-setosa
47 | 4.8,3.0,1.4,0.3,Iris-setosa
48 | 5.1,3.8,1.6,0.2,Iris-setosa
49 | 4.6,3.2,1.4,0.2,Iris-setosa
50 | 5.3,3.7,1.5,0.2,Iris-setosa
51 | 5.0,3.3,1.4,0.2,Iris-setosa
52 | 7.0,3.2,4.7,1.4,Iris-versicolor
53 | 6.4,3.2,4.5,1.5,Iris-versicolor
54 | 6.9,3.1,4.9,1.5,Iris-versicolor
55 | 5.5,2.3,4.0,1.3,Iris-versicolor
56 | 6.5,2.8,4.6,1.5,Iris-versicolor
57 | 5.7,2.8,4.5,1.3,Iris-versicolor
58 | 6.3,3.3,4.7,1.6,Iris-versicolor
59 | 4.9,2.4,3.3,1.0,Iris-versicolor
60 | 6.6,2.9,4.6,1.3,Iris-versicolor
61 | 5.2,2.7,3.9,1.4,Iris-versicolor
62 | 5.0,2.0,3.5,1.0,Iris-versicolor
63 | 5.9,3.0,4.2,1.5,Iris-versicolor
64 | 6.0,2.2,4.0,1.0,Iris-versicolor
65 | 6.1,2.9,4.7,1.4,Iris-versicolor
66 | 5.6,2.9,3.6,1.3,Iris-versicolor
67 | 6.7,3.1,4.4,1.4,Iris-versicolor
68 | 5.6,3.0,4.5,1.5,Iris-versicolor
69 | 5.8,2.7,4.1,1.0,Iris-versicolor
70 | 6.2,2.2,4.5,1.5,Iris-versicolor
71 | 5.6,2.5,3.9,1.1,Iris-versicolor
72 | 5.9,3.2,4.8,1.8,Iris-versicolor
73 | 6.1,2.8,4.0,1.3,Iris-versicolor
74 | 6.3,2.5,4.9,1.5,Iris-versicolor
75 | 6.1,2.8,4.7,1.2,Iris-versicolor
76 | 6.4,2.9,4.3,1.3,Iris-versicolor
77 | 6.6,3.0,4.4,1.4,Iris-versicolor
78 | 6.8,2.8,4.8,1.4,Iris-versicolor
79 | 6.7,3.0,5.0,1.7,Iris-versicolor
80 | 6.0,2.9,4.5,1.5,Iris-versicolor
81 | 5.7,2.6,3.5,1.0,Iris-versicolor
82 | 5.5,2.4,3.8,1.1,Iris-versicolor
83 | 5.5,2.4,3.7,1.0,Iris-versicolor
84 | 5.8,2.7,3.9,1.2,Iris-versicolor
85 | 6.0,2.7,5.1,1.6,Iris-versicolor
86 | 5.4,3.0,4.5,1.5,Iris-versicolor
87 | 6.0,3.4,4.5,1.6,Iris-versicolor
88 | 6.7,3.1,4.7,1.5,Iris-versicolor
89 | 6.3,2.3,4.4,1.3,Iris-versicolor
90 | 5.6,3.0,4.1,1.3,Iris-versicolor
91 | 5.5,2.5,4.0,1.3,Iris-versicolor
92 | 5.5,2.6,4.4,1.2,Iris-versicolor
93 | 6.1,3.0,4.6,1.4,Iris-versicolor
94 | 5.8,2.6,4.0,1.2,Iris-versicolor
95 | 5.0,2.3,3.3,1.0,Iris-versicolor
96 | 5.6,2.7,4.2,1.3,Iris-versicolor
97 | 5.7,3.0,4.2,1.2,Iris-versicolor
98 | 5.7,2.9,4.2,1.3,Iris-versicolor
99 | 6.2,2.9,4.3,1.3,Iris-versicolor
100 | 5.1,2.5,3.0,1.1,Iris-versicolor
101 | 5.7,2.8,4.1,1.3,Iris-versicolor
102 | 6.3,3.3,6.0,2.5,Iris-virginica
103 | 5.8,2.7,5.1,1.9,Iris-virginica
104 | 7.1,3.0,5.9,2.1,Iris-virginica
105 | 6.3,2.9,5.6,1.8,Iris-virginica
106 | 6.5,3.0,5.8,2.2,Iris-virginica
107 | 7.6,3.0,6.6,2.1,Iris-virginica
108 | 4.9,2.5,4.5,1.7,Iris-virginica
109 | 7.3,2.9,6.3,1.8,Iris-virginica
110 | 6.7,2.5,5.8,1.8,Iris-virginica
111 | 7.2,3.6,6.1,2.5,Iris-virginica
112 | 6.5,3.2,5.1,2.0,Iris-virginica
113 | 6.4,2.7,5.3,1.9,Iris-virginica
114 | 6.8,3.0,5.5,2.1,Iris-virginica
115 | 5.7,2.5,5.0,2.0,Iris-virginica
116 | 5.8,2.8,5.1,2.4,Iris-virginica
117 | 6.4,3.2,5.3,2.3,Iris-virginica
118 | 6.5,3.0,5.5,1.8,Iris-virginica
119 | 7.7,3.8,6.7,2.2,Iris-virginica
120 | 7.7,2.6,6.9,2.3,Iris-virginica
121 | 6.0,2.2,5.0,1.5,Iris-virginica
122 | 6.9,3.2,5.7,2.3,Iris-virginica
123 | 5.6,2.8,4.9,2.0,Iris-virginica
124 | 7.7,2.8,6.7,2.0,Iris-virginica
125 | 6.3,2.7,4.9,1.8,Iris-virginica
126 | 6.7,3.3,5.7,2.1,Iris-virginica
127 | 7.2,3.2,6.0,1.8,Iris-virginica
128 | 6.2,2.8,4.8,1.8,Iris-virginica
129 | 6.1,3.0,4.9,1.8,Iris-virginica
130 | 6.4,2.8,5.6,2.1,Iris-virginica
131 | 7.2,3.0,5.8,1.6,Iris-virginica
132 | 7.4,2.8,6.1,1.9,Iris-virginica
133 | 7.9,3.8,6.4,2.0,Iris-virginica
134 | 6.4,2.8,5.6,2.2,Iris-virginica
135 | 6.3,2.8,5.1,1.5,Iris-virginica
136 | 6.1,2.6,5.6,1.4,Iris-virginica
137 | 7.7,3.0,6.1,2.3,Iris-virginica
138 | 6.3,3.4,5.6,2.4,Iris-virginica
139 | 6.4,3.1,5.5,1.8,Iris-virginica
140 | 6.0,3.0,4.8,1.8,Iris-virginica
141 | 6.9,3.1,5.4,2.1,Iris-virginica
142 | 6.7,3.1,5.6,2.4,Iris-virginica
143 | 6.9,3.1,5.1,2.3,Iris-virginica
144 | 5.8,2.7,5.1,1.9,Iris-virginica
145 | 6.8,3.2,5.9,2.3,Iris-virginica
146 | 6.7,3.3,5.7,2.5,Iris-virginica
147 | 6.7,3.0,5.2,2.3,Iris-virginica
148 | 6.3,2.5,5.0,1.9,Iris-virginica
149 | 6.5,3.0,5.2,2.0,Iris-virginica
150 | 6.2,3.4,5.4,2.3,Iris-virginica
151 | 5.9,3.0,5.1,1.8,Iris-virginica
152 |
153 |
--------------------------------------------------------------------------------
/datasets/labels_lawsuit.mat:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bdsul/grape/4b1d765867d3079feaba4cce028450546fcf6760/datasets/labels_lawsuit.mat
--------------------------------------------------------------------------------
/datasets/lupus.csv:
--------------------------------------------------------------------------------
1 | d1,d2,d3
2 | 157.0,1.0,0.69
3 | 268.0,10.0,2.4
4 | 134.0,0.1,0.1
5 | 150.0,25.0,3.26
6 | 221.0,12.0,2.56
7 | 225.0,18.0,2.94
8 | 264.0,2.0,1.1
9 | 195.0,0.1,0.1
10 | 237.0,0.1,0.1
11 | 212.0,1.0,0.69
12 | 179.0,5.0,1.79
13 | 182.0,0.1,0.1
14 | 107.0,6.0,1.95
15 | 200.0,0.1,0.1
16 | 248.0,1.0,0.69
17 | 108.0,7.0,2.08
18 | 192.0,0.1,0.1
19 | 112.0,0.1,0.1
20 | 169.0,2.0,1.1
21 | 136.0,0.1,0.1
22 | 126.0,0.1,0.1
23 | 256.0,0.1,0.1
24 | 220.0,0.1,0.1
25 | 186.0,0.1,0.1
26 | 166.0,1.0,0.69
27 | 237.0,5.0,1.79
28 | 148.0,44.0,3.81
29 | 127.0,0.1,0.1
30 | 189.0,4.0,1.61
31 | 214.0,17.0,2.89
32 | 226.0,0.1,0.1
33 | 138.0,1.0,0.69
34 | 253.0,4.0,1.61
35 | 81.0,16.0,2.83
36 | 169.0,3.0,1.39
37 | 235.0,7.0,2.08
38 | 239.0,0.1,0.1
39 | 263.0,7.0,2.08
40 | 168.0,0.1,0.1
41 | 276.0,99.0,4.61
42 | 240.0,75.0,4.33
43 | 128.0,1.0,0.69
44 | 37.0,7.0,2.08
45 | 147.0,3.0,1.39
46 | 100.0,1.0,0.69
47 | 158.0,14.0,2.71
48 | 191.0,1.0,0.69
49 | 189.0,0.1,0.1
50 | 209.0,75.0,4.33
51 | 78.0,0.1,0.1
52 | 107.0,3.0,1.39
53 | 89.0,0.1,0.1
54 | 209.0,2.0,1.1
55 | 21.0,0.1,0.1
56 | 28.0,7.0,2.08
57 | 46.0,24.0,3.22
58 | 169.0,2.0,1.1
59 | 39.0,22.0,3.14
60 | 99.0,0.1,0.1
61 | 132.0,0.1,0.1
62 | 114.0,1.0,0.69
63 | 127.0,4.0,1.61
64 | 103.0,11.0,2.48
65 | 39.0,57.0,4.06
66 | 63.0,1.0,0.69
67 | 159.0,0.1,0.1
68 | 75.0,1.0,0.69
69 | 32.0,1.0,0.69
70 | 138.0,10.0,2.4
71 | 145.0,0.1,0.1
72 | 182.0,14.0,2.71
73 | 4.0,106.0,4.67
74 | 103.0,1.0,0.69
75 | 180.0,7.0,2.08
76 | 54.0,1.0,0.69
77 | 188.0,0.1,0.1
78 | 19.0,0.1,0.1
79 | 118.0,10.0,2.4
80 | 48.0,1.0,0.69
81 | 13.0,6.0,1.95
82 | 165.0,0.1,0.1
83 | 86.0,8.0,2.2
84 | 25.0,15.0,2.77
85 | 4.0,34.0,3.56
86 | 65.0,0.1,0.1
87 | 62.0,8.0,2.2
88 | 44.0,77.0,4.36
89 |
--------------------------------------------------------------------------------
/datasets/lupus_labels.csv:
--------------------------------------------------------------------------------
1 | class
2 | 1
3 | 1
4 | 1
5 | 1
6 | 1
7 | 1
8 | 1
9 | 1
10 | 1
11 | 1
12 | 1
13 | 1
14 | 1
15 | 1
16 | 1
17 | 1
18 | 1
19 | 1
20 | 1
21 | 1
22 | 1
23 | 1
24 | 1
25 | 1
26 | 1
27 | 1
28 | 1
29 | 1
30 | 1
31 | 1
32 | 1
33 | 1
34 | 1
35 | 1
36 | 1
37 | 1
38 | 1
39 | 1
40 | 1
41 | 1
42 | 1
43 | 1
44 | 1
45 | 1
46 | 1
47 | 1
48 | 1
49 | 1
50 | 1
51 | 1
52 | 1
53 | 1
54 | 2
55 | 2
56 | 2
57 | 2
58 | 2
59 | 2
60 | 2
61 | 2
62 | 2
63 | 2
64 | 2
65 | 2
66 | 2
67 | 2
68 | 2
69 | 2
70 | 2
71 | 2
72 | 2
73 | 2
74 | 2
75 | 2
76 | 2
77 | 2
78 | 2
79 | 2
80 | 2
81 | 2
82 | 2
83 | 2
84 | 2
85 | 2
86 | 2
87 | 2
88 | 2
89 |
--------------------------------------------------------------------------------
/datasets/parity3.csv:
--------------------------------------------------------------------------------
1 | d0 d1 d2 output
2 | 0 0 0 1
3 | 0 0 1 0
4 | 0 1 0 0
5 | 0 1 1 1
6 | 1 0 0 0
7 | 1 0 1 1
8 | 1 1 0 1
9 | 1 1 1 0
10 |
--------------------------------------------------------------------------------
/datasets/parity4.csv:
--------------------------------------------------------------------------------
1 | d0 d1 d2 d3 output
2 | 0 0 0 0 1
3 | 0 0 0 1 0
4 | 0 0 1 0 0
5 | 0 0 1 1 1
6 | 0 1 0 0 0
7 | 0 1 0 1 1
8 | 0 1 1 0 1
9 | 0 1 1 1 0
10 | 1 0 0 0 0
11 | 1 0 0 1 1
12 | 1 0 1 0 1
13 | 1 0 1 1 0
14 | 1 1 0 0 1
15 | 1 1 0 1 0
16 | 1 1 1 0 0
17 | 1 1 1 1 1
18 |
--------------------------------------------------------------------------------
/datasets/parity5.csv:
--------------------------------------------------------------------------------
1 | d0 d1 d2 d3 d4 output
2 | 0 0 0 0 0 1
3 | 0 0 0 0 1 0
4 | 0 0 0 1 0 0
5 | 0 0 0 1 1 1
6 | 0 0 1 0 0 0
7 | 0 0 1 0 1 1
8 | 0 0 1 1 0 1
9 | 0 0 1 1 1 0
10 | 0 1 0 0 0 0
11 | 0 1 0 0 1 1
12 | 0 1 0 1 0 1
13 | 0 1 0 1 1 0
14 | 0 1 1 0 0 1
15 | 0 1 1 0 1 0
16 | 0 1 1 1 0 0
17 | 0 1 1 1 1 1
18 | 1 0 0 0 0 0
19 | 1 0 0 0 1 1
20 | 1 0 0 1 0 1
21 | 1 0 0 1 1 0
22 | 1 0 1 0 0 1
23 | 1 0 1 0 1 0
24 | 1 0 1 1 0 0
25 | 1 0 1 1 1 1
26 | 1 1 0 0 0 1
27 | 1 1 0 0 1 0
28 | 1 1 0 1 0 0
29 | 1 1 0 1 1 1
30 | 1 1 1 0 0 0
31 | 1 1 1 0 1 1
32 | 1 1 1 1 0 1
33 | 1 1 1 1 1 0
34 |
--------------------------------------------------------------------------------
/datasets/pima_labels.csv:
--------------------------------------------------------------------------------
1 | y
2 | 0
3 | 0
4 | 0
5 | 0
6 | 0
7 | 0
8 | 0
9 | 0
10 | 0
11 | 0
12 | 0
13 | 0
14 | 0
15 | 0
16 | 0
17 | 0
18 | 0
19 | 0
20 | 0
21 | 0
22 | 0
23 | 0
24 | 0
25 | 0
26 | 0
27 | 0
28 | 0
29 | 0
30 | 0
31 | 0
32 | 0
33 | 0
34 | 0
35 | 0
36 | 0
37 | 0
38 | 0
39 | 0
40 | 0
41 | 0
42 | 0
43 | 0
44 | 0
45 | 0
46 | 0
47 | 0
48 | 0
49 | 0
50 | 0
51 | 0
52 | 0
53 | 0
54 | 0
55 | 0
56 | 0
57 | 0
58 | 0
59 | 0
60 | 0
61 | 0
62 | 0
63 | 0
64 | 0
65 | 0
66 | 0
67 | 0
68 | 0
69 | 0
70 | 0
71 | 0
72 | 0
73 | 0
74 | 0
75 | 0
76 | 0
77 | 0
78 | 0
79 | 0
80 | 0
81 | 0
82 | 0
83 | 0
84 | 0
85 | 0
86 | 0
87 | 0
88 | 0
89 | 0
90 | 0
91 | 0
92 | 0
93 | 0
94 | 0
95 | 0
96 | 0
97 | 0
98 | 0
99 | 0
100 | 0
101 | 0
102 | 0
103 | 0
104 | 0
105 | 0
106 | 0
107 | 0
108 | 0
109 | 0
110 | 0
111 | 0
112 | 0
113 | 0
114 | 0
115 | 0
116 | 0
117 | 0
118 | 0
119 | 0
120 | 0
121 | 0
122 | 0
123 | 0
124 | 0
125 | 0
126 | 0
127 | 0
128 | 0
129 | 0
130 | 0
131 | 0
132 | 0
133 | 0
134 | 0
135 | 0
136 | 0
137 | 0
138 | 0
139 | 0
140 | 0
141 | 0
142 | 0
143 | 0
144 | 0
145 | 0
146 | 0
147 | 0
148 | 0
149 | 0
150 | 0
151 | 0
152 | 0
153 | 0
154 | 0
155 | 0
156 | 0
157 | 0
158 | 0
159 | 0
160 | 0
161 | 0
162 | 0
163 | 0
164 | 0
165 | 0
166 | 0
167 | 0
168 | 0
169 | 0
170 | 0
171 | 0
172 | 0
173 | 0
174 | 0
175 | 0
176 | 0
177 | 0
178 | 0
179 | 0
180 | 0
181 | 0
182 | 0
183 | 0
184 | 0
185 | 0
186 | 0
187 | 0
188 | 0
189 | 0
190 | 0
191 | 0
192 | 0
193 | 0
194 | 0
195 | 0
196 | 0
197 | 0
198 | 0
199 | 0
200 | 0
201 | 0
202 | 0
203 | 0
204 | 0
205 | 0
206 | 0
207 | 0
208 | 0
209 | 0
210 | 0
211 | 0
212 | 0
213 | 0
214 | 0
215 | 0
216 | 0
217 | 0
218 | 0
219 | 0
220 | 0
221 | 0
222 | 0
223 | 0
224 | 0
225 | 0
226 | 0
227 | 0
228 | 0
229 | 0
230 | 0
231 | 0
232 | 0
233 | 0
234 | 0
235 | 0
236 | 0
237 | 0
238 | 0
239 | 0
240 | 0
241 | 0
242 | 0
243 | 0
244 | 0
245 | 0
246 | 0
247 | 0
248 | 0
249 | 0
250 | 0
251 | 0
252 | 0
253 | 0
254 | 0
255 | 0
256 | 0
257 | 0
258 | 0
259 | 0
260 | 0
261 | 0
262 | 0
263 | 0
264 | 0
265 | 0
266 | 0
267 | 0
268 | 0
269 | 0
270 | 0
271 | 0
272 | 0
273 | 0
274 | 0
275 | 0
276 | 0
277 | 0
278 | 0
279 | 0
280 | 0
281 | 0
282 | 0
283 | 0
284 | 0
285 | 0
286 | 0
287 | 0
288 | 0
289 | 0
290 | 0
291 | 0
292 | 0
293 | 0
294 | 0
295 | 0
296 | 0
297 | 0
298 | 0
299 | 0
300 | 0
301 | 0
302 | 0
303 | 0
304 | 0
305 | 0
306 | 0
307 | 0
308 | 0
309 | 0
310 | 0
311 | 0
312 | 0
313 | 0
314 | 0
315 | 0
316 | 0
317 | 0
318 | 0
319 | 0
320 | 0
321 | 0
322 | 0
323 | 0
324 | 0
325 | 0
326 | 0
327 | 0
328 | 0
329 | 0
330 | 0
331 | 0
332 | 0
333 | 0
334 | 0
335 | 0
336 | 0
337 | 0
338 | 0
339 | 0
340 | 0
341 | 0
342 | 0
343 | 0
344 | 0
345 | 0
346 | 0
347 | 0
348 | 0
349 | 0
350 | 0
351 | 0
352 | 0
353 | 0
354 | 0
355 | 0
356 | 0
357 | 0
358 | 0
359 | 0
360 | 0
361 | 0
362 | 0
363 | 0
364 | 0
365 | 0
366 | 0
367 | 0
368 | 0
369 | 0
370 | 0
371 | 0
372 | 0
373 | 0
374 | 0
375 | 0
376 | 0
377 | 0
378 | 0
379 | 0
380 | 0
381 | 0
382 | 0
383 | 0
384 | 0
385 | 0
386 | 0
387 | 0
388 | 0
389 | 0
390 | 0
391 | 0
392 | 0
393 | 0
394 | 0
395 | 0
396 | 0
397 | 0
398 | 0
399 | 0
400 | 0
401 | 0
402 | 0
403 | 0
404 | 0
405 | 0
406 | 0
407 | 0
408 | 0
409 | 0
410 | 0
411 | 0
412 | 0
413 | 0
414 | 0
415 | 0
416 | 0
417 | 0
418 | 0
419 | 0
420 | 0
421 | 0
422 | 0
423 | 0
424 | 0
425 | 0
426 | 0
427 | 0
428 | 0
429 | 0
430 | 0
431 | 0
432 | 0
433 | 0
434 | 0
435 | 0
436 | 0
437 | 0
438 | 0
439 | 0
440 | 0
441 | 0
442 | 0
443 | 0
444 | 0
445 | 0
446 | 0
447 | 0
448 | 0
449 | 0
450 | 0
451 | 0
452 | 0
453 | 0
454 | 0
455 | 0
456 | 0
457 | 0
458 | 0
459 | 0
460 | 0
461 | 0
462 | 0
463 | 0
464 | 0
465 | 0
466 | 0
467 | 0
468 | 0
469 | 0
470 | 0
471 | 0
472 | 0
473 | 0
474 | 0
475 | 0
476 | 0
477 | 0
478 | 0
479 | 0
480 | 0
481 | 0
482 | 0
483 | 0
484 | 0
485 | 0
486 | 0
487 | 0
488 | 0
489 | 0
490 | 0
491 | 0
492 | 0
493 | 0
494 | 0
495 | 0
496 | 0
497 | 0
498 | 0
499 | 0
500 | 0
501 | 0
502 | 1
503 | 1
504 | 1
505 | 1
506 | 1
507 | 1
508 | 1
509 | 1
510 | 1
511 | 1
512 | 1
513 | 1
514 | 1
515 | 1
516 | 1
517 | 1
518 | 1
519 | 1
520 | 1
521 | 1
522 | 1
523 | 1
524 | 1
525 | 1
526 | 1
527 | 1
528 | 1
529 | 1
530 | 1
531 | 1
532 | 1
533 | 1
534 | 1
535 | 1
536 | 1
537 | 1
538 | 1
539 | 1
540 | 1
541 | 1
542 | 1
543 | 1
544 | 1
545 | 1
546 | 1
547 | 1
548 | 1
549 | 1
550 | 1
551 | 1
552 | 1
553 | 1
554 | 1
555 | 1
556 | 1
557 | 1
558 | 1
559 | 1
560 | 1
561 | 1
562 | 1
563 | 1
564 | 1
565 | 1
566 | 1
567 | 1
568 | 1
569 | 1
570 | 1
571 | 1
572 | 1
573 | 1
574 | 1
575 | 1
576 | 1
577 | 1
578 | 1
579 | 1
580 | 1
581 | 1
582 | 1
583 | 1
584 | 1
585 | 1
586 | 1
587 | 1
588 | 1
589 | 1
590 | 1
591 | 1
592 | 1
593 | 1
594 | 1
595 | 1
596 | 1
597 | 1
598 | 1
599 | 1
600 | 1
601 | 1
602 | 1
603 | 1
604 | 1
605 | 1
606 | 1
607 | 1
608 | 1
609 | 1
610 | 1
611 | 1
612 | 1
613 | 1
614 | 1
615 | 1
616 | 1
617 | 1
618 | 1
619 | 1
620 | 1
621 | 1
622 | 1
623 | 1
624 | 1
625 | 1
626 | 1
627 | 1
628 | 1
629 | 1
630 | 1
631 | 1
632 | 1
633 | 1
634 | 1
635 | 1
636 | 1
637 | 1
638 | 1
639 | 1
640 | 1
641 | 1
642 | 1
643 | 1
644 | 1
645 | 1
646 | 1
647 | 1
648 | 1
649 | 1
650 | 1
651 | 1
652 | 1
653 | 1
654 | 1
655 | 1
656 | 1
657 | 1
658 | 1
659 | 1
660 | 1
661 | 1
662 | 1
663 | 1
664 | 1
665 | 1
666 | 1
667 | 1
668 | 1
669 | 1
670 | 1
671 | 1
672 | 1
673 | 1
674 | 1
675 | 1
676 | 1
677 | 1
678 | 1
679 | 1
680 | 1
681 | 1
682 | 1
683 | 1
684 | 1
685 | 1
686 | 1
687 | 1
688 | 1
689 | 1
690 | 1
691 | 1
692 | 1
693 | 1
694 | 1
695 | 1
696 | 1
697 | 1
698 | 1
699 | 1
700 | 1
701 | 1
702 | 1
703 | 1
704 | 1
705 | 1
706 | 1
707 | 1
708 | 1
709 | 1
710 | 1
711 | 1
712 | 1
713 | 1
714 | 1
715 | 1
716 | 1
717 | 1
718 | 1
719 | 1
720 | 1
721 | 1
722 | 1
723 | 1
724 | 1
725 | 1
726 | 1
727 | 1
728 | 1
729 | 1
730 | 1
731 | 1
732 | 1
733 | 1
734 | 1
735 | 1
736 | 1
737 | 1
738 | 1
739 | 1
740 | 1
741 | 1
742 | 1
743 | 1
744 | 1
745 | 1
746 | 1
747 | 1
748 | 1
749 | 1
750 | 1
751 | 1
752 | 1
753 | 1
754 | 1
755 | 1
756 | 1
757 | 1
758 | 1
759 | 1
760 | 1
761 | 1
762 | 1
763 | 1
764 | 1
765 | 1
766 | 1
767 | 1
768 | 1
769 | 1
770 |
--------------------------------------------------------------------------------
/datasets/transfusion.csv:
--------------------------------------------------------------------------------
1 | d1,d2,d3,d4
2 | 1,24,6000,77
3 | 4,4,1000,4
4 | 1,12,3000,35
5 | 4,23,5750,58
6 | 0,3,750,4
7 | 1,13,3250,47
8 | 4,11,2750,28
9 | 9,9,2250,16
10 | 4,14,3500,40
11 | 4,6,1500,14
12 | 4,8,2000,21
13 | 1,14,3500,58
14 | 2,16,4000,64
15 | 2,5,1250,16
16 | 2,5,1250,16
17 | 2,9,2250,36
18 | 2,2,500,2
19 | 2,2,500,2
20 | 2,2,500,2
21 | 2,11,2750,46
22 | 2,6,1500,22
23 | 2,12,3000,52
24 | 2,16,4000,81
25 | 3,6,1500,21
26 | 2,7,1750,29
27 | 2,10,2500,49
28 | 3,16,4000,74
29 | 0,2,500,4
30 | 4,7,1750,25
31 | 1,9,2250,51
32 | 2,4,1000,16
33 | 2,4,1000,16
34 | 2,2,500,4
35 | 2,2,500,4
36 | 2,2,500,4
37 | 2,2,500,4
38 | 2,4,1000,16
39 | 2,4,1000,16
40 | 2,4,1000,16
41 | 2,6,1500,28
42 | 4,2,500,4
43 | 4,2,500,4
44 | 4,2,500,4
45 | 4,2,500,4
46 | 4,2,500,4
47 | 4,2,500,4
48 | 12,11,2750,23
49 | 4,7,1750,28
50 | 3,17,4250,86
51 | 4,9,2250,40
52 | 2,5,1250,26
53 | 2,5,1250,26
54 | 6,17,4250,70
55 | 0,8,2000,59
56 | 3,5,1250,26
57 | 2,3,750,14
58 | 2,10,2500,64
59 | 4,9,2250,46
60 | 4,5,1250,23
61 | 2,12,3000,82
62 | 11,24,6000,64
63 | 4,11,2750,61
64 | 1,7,1750,57
65 | 2,4,1000,26
66 | 2,5,1250,34
67 | 2,4,1000,26
68 | 4,5,1250,28
69 | 2,12,3000,95
70 | 2,2,500,10
71 | 4,6,1500,35
72 | 2,11,2750,88
73 | 2,3,750,19
74 | 2,5,1250,37
75 | 2,12,3000,98
76 | 9,5,1250,19
77 | 2,2,500,11
78 | 2,9,2250,74
79 | 5,14,3500,86
80 | 4,3,750,16
81 | 4,3,750,16
82 | 6,3,750,14
83 | 2,2,500,11
84 | 2,2,500,11
85 | 4,6,1500,39
86 | 4,11,2750,78
87 | 2,1,250,2
88 | 2,1,250,2
89 | 2,1,250,2
90 | 2,1,250,2
91 | 2,1,250,2
92 | 2,1,250,2
93 | 2,1,250,2
94 | 2,1,250,2
95 | 2,1,250,2
96 | 2,1,250,2
97 | 2,1,250,2
98 | 2,1,250,2
99 | 2,1,250,2
100 | 2,1,250,2
101 | 2,1,250,2
102 | 2,1,250,2
103 | 2,1,250,2
104 | 2,1,250,2
105 | 11,10,2500,35
106 | 2,3,750,22
107 | 10,4,1000,16
108 | 2,4,1000,35
109 | 4,12,3000,88
110 | 13,8,2000,26
111 | 11,9,2250,33
112 | 4,5,1250,34
113 | 4,4,1000,26
114 | 8,15,3750,77
115 | 4,7,1750,52
116 | 4,7,1750,52
117 | 2,4,1000,35
118 | 11,11,2750,42
119 | 2,2,500,14
120 | 4,6,1500,47
121 | 11,7,1750,29
122 | 9,9,2250,45
123 | 4,6,1500,52
124 | 4,7,1750,58
125 | 4,7,1750,58
126 | 11,9,2250,38
127 | 11,6,1500,26
128 | 2,2,500,16
129 | 2,7,1750,76
130 | 11,6,1500,27
131 | 11,3,750,14
132 | 4,1,250,4
133 | 4,1,250,4
134 | 4,1,250,4
135 | 4,1,250,4
136 | 4,1,250,4
137 | 4,1,250,4
138 | 4,1,250,4
139 | 4,1,250,4
140 | 4,1,250,4
141 | 4,1,250,4
142 | 4,1,250,4
143 | 4,1,250,4
144 | 4,3,750,24
145 | 4,1,250,4
146 | 4,1,250,4
147 | 4,1,250,4
148 | 4,1,250,4
149 | 10,8,2000,39
150 | 14,7,1750,26
151 | 8,10,2500,63
152 | 11,3,750,15
153 | 4,2,500,14
154 | 2,4,1000,43
155 | 8,9,2250,58
156 | 11,22,5500,98
157 | 9,2,500,11
158 | 4,5,1250,46
159 | 11,12,3000,58
160 | 7,12,3000,86
161 | 11,2,500,11
162 | 11,2,500,11
163 | 11,2,500,11
164 | 2,6,1500,75
165 | 12,13,3250,59
166 | 2,3,750,35
167 | 16,8,2000,28
168 | 11,7,1750,37
169 | 4,3,750,28
170 | 12,12,3000,58
171 | 4,4,1000,41
172 | 2,2,500,23
173 | 4,5,1250,58
174 | 3,2,500,23
175 | 11,8,2000,46
176 | 4,7,1750,82
177 | 13,4,1000,21
178 | 16,11,2750,40
179 | 16,7,1750,28
180 | 7,2,500,16
181 | 4,5,1250,58
182 | 4,5,1250,58
183 | 4,4,1000,46
184 | 14,13,3250,57
185 | 4,3,750,34
186 | 14,18,4500,78
187 | 11,8,2000,48
188 | 14,16,4000,70
189 | 14,5,1250,26
190 | 8,2,500,16
191 | 11,5,1250,33
192 | 11,2,500,14
193 | 4,2,500,23
194 | 16,12,3000,50
195 | 11,4,1000,28
196 | 11,5,1250,35
197 | 11,5,1250,35
198 | 2,4,1000,70
199 | 14,5,1250,28
200 | 14,2,500,14
201 | 14,2,500,14
202 | 14,2,500,14
203 | 14,2,500,14
204 | 14,2,500,14
205 | 14,2,500,14
206 | 2,3,750,52
207 | 14,6,1500,34
208 | 4,5,1250,74
209 | 11,3,750,23
210 | 16,4,1000,23
211 | 16,3,750,19
212 | 11,5,1250,38
213 | 11,2,500,16
214 | 12,9,2250,60
215 | 9,1,250,9
216 | 9,1,250,9
217 | 4,2,500,29
218 | 11,2,500,17
219 | 14,4,1000,26
220 | 11,5,1250,41
221 | 15,16,4000,82
222 | 11,4,1000,34
223 | 16,7,1750,38
224 | 14,2,500,16
225 | 2,2,500,41
226 | 14,16,4000,98
227 | 16,7,1750,39
228 | 14,7,1750,47
229 | 16,6,1500,35
230 | 16,2,500,16
231 | 11,3,750,28
232 | 11,7,1750,64
233 | 9,3,750,34
234 | 14,4,1000,30
235 | 23,38,9500,98
236 | 11,6,1500,58
237 | 11,1,250,11
238 | 11,1,250,11
239 | 11,1,250,11
240 | 11,1,250,11
241 | 11,1,250,11
242 | 11,1,250,11
243 | 11,1,250,11
244 | 11,1,250,11
245 | 11,2,500,21
246 | 11,5,1250,50
247 | 11,2,500,21
248 | 16,4,1000,28
249 | 4,2,500,41
250 | 16,6,1500,40
251 | 14,3,750,26
252 | 9,2,500,26
253 | 21,16,4000,64
254 | 14,6,1500,51
255 | 11,2,500,24
256 | 4,3,750,71
257 | 21,13,3250,57
258 | 11,6,1500,71
259 | 23,15,3750,57
260 | 14,4,1000,38
261 | 11,2,500,26
262 | 14,3,750,31
263 | 4,2,500,52
264 | 9,4,1000,65
265 | 14,4,1000,40
266 | 14,5,1250,50
267 | 14,1,250,14
268 | 14,1,250,14
269 | 14,1,250,14
270 | 14,1,250,14
271 | 14,1,250,14
272 | 14,1,250,14
273 | 14,1,250,14
274 | 14,1,250,14
275 | 14,7,1750,72
276 | 14,1,250,14
277 | 14,1,250,14
278 | 9,3,750,52
279 | 14,7,1750,73
280 | 11,4,1000,58
281 | 11,4,1000,59
282 | 4,2,500,59
283 | 11,4,1000,61
284 | 16,4,1000,40
285 | 16,10,2500,89
286 | 21,3,750,26
287 | 16,8,2000,76
288 | 18,2,500,23
289 | 23,5,1250,33
290 | 23,8,2000,46
291 | 16,3,750,34
292 | 14,5,1250,64
293 | 14,3,750,41
294 | 16,1,250,16
295 | 16,1,250,16
296 | 16,1,250,16
297 | 16,1,250,16
298 | 16,1,250,16
299 | 16,1,250,16
300 | 16,1,250,16
301 | 16,4,1000,45
302 | 16,1,250,16
303 | 16,1,250,16
304 | 16,1,250,16
305 | 16,1,250,16
306 | 16,1,250,16
307 | 16,2,500,26
308 | 21,2,500,23
309 | 16,2,500,27
310 | 21,2,500,23
311 | 21,2,500,23
312 | 14,4,1000,57
313 | 16,5,1250,60
314 | 23,2,500,23
315 | 14,5,1250,74
316 | 23,3,750,28
317 | 16,3,750,40
318 | 9,2,500,52
319 | 9,2,500,52
320 | 14,4,1000,64
321 | 14,2,500,35
322 | 16,7,1750,93
323 | 21,2,500,25
324 | 14,3,750,52
325 | 23,14,3500,93
326 | 18,8,2000,95
327 | 16,3,750,46
328 | 11,3,750,76
329 | 11,2,500,52
330 | 11,3,750,76
331 | 23,12,3000,86
332 | 21,3,750,35
333 | 23,2,500,26
334 | 23,2,500,26
335 | 23,8,2000,64
336 | 16,3,750,50
337 | 23,3,750,33
338 | 21,3,750,38
339 | 23,2,500,28
340 | 21,1,250,21
341 | 21,1,250,21
342 | 21,1,250,21
343 | 21,1,250,21
344 | 21,1,250,21
345 | 21,1,250,21
346 | 21,1,250,21
347 | 21,1,250,21
348 | 21,1,250,21
349 | 21,1,250,21
350 | 21,1,250,21
351 | 21,5,1250,60
352 | 23,4,1000,45
353 | 21,4,1000,52
354 | 11,2,500,70
355 | 23,5,1250,58
356 | 23,3,750,40
357 | 23,3,750,41
358 | 14,3,750,83
359 | 21,2,500,35
360 | 23,6,1500,70
361 | 23,1,250,23
362 | 23,1,250,23
363 | 23,1,250,23
364 | 23,1,250,23
365 | 23,1,250,23
366 | 23,1,250,23
367 | 23,1,250,23
368 | 23,1,250,23
369 | 23,4,1000,53
370 | 21,6,1500,86
371 | 23,3,750,48
372 | 21,2,500,41
373 | 21,3,750,64
374 | 16,2,500,70
375 | 21,3,750,70
376 | 23,4,1000,87
377 | 23,3,750,89
378 | 23,2,500,87
379 | 35,3,750,64
380 | 38,1,250,38
381 | 38,1,250,38
382 | 40,1,250,40
383 | 74,1,250,74
384 | 2,44,11000,98
385 | 2,11,2750,23
386 | 2,11,2750,26
387 | 2,11,2750,28
388 | 3,14,3500,35
389 | 4,6,1500,14
390 | 4,9,2250,28
391 | 2,4,1000,11
392 | 2,15,3750,64
393 | 5,24,6000,79
394 | 4,8,2000,28
395 | 2,4,1000,14
396 | 2,6,1500,26
397 | 2,10,2500,52
398 | 1,14,3500,95
399 | 7,14,3500,48
400 | 2,3,750,11
401 | 4,4,1000,16
402 | 2,3,750,14
403 | 2,4,1000,23
404 | 4,4,1000,18
405 | 5,6,1500,28
406 | 4,6,1500,30
407 | 14,5,1250,14
408 | 3,8,2000,50
409 | 4,9,2250,52
410 | 7,10,2500,47
411 | 4,14,3500,86
412 | 2,9,2250,75
413 | 4,6,1500,35
414 | 4,9,2250,55
415 | 2,6,1500,45
416 | 2,6,1500,47
417 | 4,2,500,9
418 | 2,2,500,11
419 | 9,9,2250,38
420 | 11,5,1250,18
421 | 2,3,750,21
422 | 2,1,250,2
423 | 2,1,250,2
424 | 2,1,250,2
425 | 2,1,250,2
426 | 2,1,250,2
427 | 2,1,250,2
428 | 2,1,250,2
429 | 2,1,250,2
430 | 2,1,250,2
431 | 11,11,2750,38
432 | 2,3,750,22
433 | 5,11,2750,75
434 | 3,5,1250,38
435 | 4,6,1500,43
436 | 2,3,750,24
437 | 12,11,2750,39
438 | 2,2,500,14
439 | 4,6,1500,46
440 | 9,3,750,14
441 | 14,8,2000,26
442 | 4,2,500,13
443 | 4,11,2750,95
444 | 2,7,1750,77
445 | 2,7,1750,77
446 | 4,1,250,4
447 | 4,1,250,4
448 | 4,1,250,4
449 | 4,1,250,4
450 | 4,1,250,4
451 | 4,1,250,4
452 | 4,1,250,4
453 | 4,1,250,4
454 | 4,1,250,4
455 | 4,1,250,4
456 | 4,7,1750,62
457 | 4,1,250,4
458 | 11,6,1500,28
459 | 7,5,1250,35
460 | 9,9,2250,54
461 | 11,2,500,11
462 | 2,5,1250,63
463 | 7,11,2750,89
464 | 8,9,2250,64
465 | 2,2,500,22
466 | 6,3,750,26
467 | 12,15,3750,71
468 | 13,3,750,16
469 | 11,16,4000,89
470 | 4,5,1250,58
471 | 14,7,1750,35
472 | 11,4,1000,27
473 | 7,5,1250,52
474 | 11,6,1500,41
475 | 10,5,1250,38
476 | 14,2,500,14
477 | 14,2,500,14
478 | 2,2,500,33
479 | 11,3,750,23
480 | 14,8,2000,46
481 | 9,1,250,9
482 | 16,5,1250,27
483 | 14,4,1000,26
484 | 4,2,500,30
485 | 14,3,750,21
486 | 16,16,4000,77
487 | 4,2,500,31
488 | 14,8,2000,50
489 | 11,3,750,26
490 | 14,7,1750,45
491 | 15,5,1250,33
492 | 16,2,500,16
493 | 16,3,750,21
494 | 11,8,2000,72
495 | 11,1,250,11
496 | 11,1,250,11
497 | 11,1,250,11
498 | 11,1,250,11
499 | 2,3,750,77
500 | 16,4,1000,28
501 | 16,15,3750,87
502 | 16,14,3500,83
503 | 16,10,2500,62
504 | 16,3,750,23
505 | 14,3,750,26
506 | 23,19,4750,62
507 | 11,7,1750,75
508 | 14,3,750,28
509 | 4,2,500,46
510 | 11,2,500,25
511 | 11,3,750,37
512 | 16,4,1000,33
513 | 21,7,1750,38
514 | 13,7,1750,76
515 | 16,6,1500,50
516 | 14,3,750,33
517 | 14,1,250,14
518 | 14,1,250,14
519 | 14,1,250,14
520 | 14,1,250,14
521 | 14,1,250,14
522 | 14,1,250,14
523 | 14,3,750,35
524 | 14,3,750,35
525 | 16,7,1750,64
526 | 21,2,500,21
527 | 16,3,750,35
528 | 16,1,250,16
529 | 16,1,250,16
530 | 16,1,250,16
531 | 16,1,250,16
532 | 16,1,250,16
533 | 14,2,500,29
534 | 11,4,1000,74
535 | 21,6,1500,48
536 | 23,2,500,23
537 | 23,6,1500,45
538 | 16,6,1500,81
539 | 16,4,1000,58
540 | 16,5,1250,71
541 | 21,2,500,26
542 | 21,3,750,35
543 | 21,3,750,35
544 | 23,8,2000,69
545 | 21,3,750,38
546 | 23,3,750,35
547 | 21,3,750,40
548 | 23,2,500,28
549 | 21,1,250,21
550 | 21,1,250,21
551 | 25,6,1500,50
552 | 21,1,250,21
553 | 21,1,250,21
554 | 23,3,750,39
555 | 21,2,500,33
556 | 14,3,750,79
557 | 23,1,250,23
558 | 23,1,250,23
559 | 23,1,250,23
560 | 23,1,250,23
561 | 23,1,250,23
562 | 23,1,250,23
563 | 23,4,1000,52
564 | 23,1,250,23
565 | 23,7,1750,88
566 | 16,3,750,86
567 | 23,2,500,38
568 | 21,2,500,52
569 | 23,3,750,62
570 | 39,1,250,39
571 | 72,1,250,72
572 | 2,50,12500,98
573 | 0,13,3250,28
574 | 1,16,4000,35
575 | 2,20,5000,45
576 | 2,7,1750,14
577 | 2,9,2250,22
578 | 5,46,11500,98
579 | 2,10,2500,28
580 | 2,6,1500,15
581 | 2,5,1250,11
582 | 2,14,3500,48
583 | 2,15,3750,49
584 | 2,6,1500,15
585 | 2,3,750,4
586 | 2,3,750,4
587 | 2,6,1500,16
588 | 2,6,1500,16
589 | 4,12,3000,34
590 | 4,5,1250,11
591 | 4,10,2500,28
592 | 4,10,2500,28
593 | 4,9,2250,26
594 | 2,8,2000,28
595 | 2,12,3000,47
596 | 4,6,1500,16
597 | 2,14,3500,57
598 | 4,7,1750,22
599 | 2,13,3250,53
600 | 2,5,1250,16
601 | 4,20,5000,69
602 | 4,9,2250,28
603 | 2,11,2750,46
604 | 4,5,1250,14
605 | 4,19,4750,69
606 | 4,8,2000,26
607 | 2,7,1750,28
608 | 2,8,2000,35
609 | 4,5,1250,16
610 | 2,3,750,9
611 | 2,4,1000,14
612 | 4,17,4250,71
613 | 2,2,500,4
614 | 2,2,500,4
615 | 2,4,1000,16
616 | 4,6,1500,23
617 | 2,6,1500,28
618 | 2,7,1750,35
619 | 4,2,500,4
620 | 4,9,2250,38
621 | 4,4,1000,14
622 | 5,7,1750,26
623 | 4,8,2000,34
624 | 2,13,3250,76
625 | 4,5,1250,23
626 | 4,8,2000,40
627 | 2,7,1750,46
628 | 2,11,2750,79
629 | 2,3,750,16
630 | 4,5,1250,26
631 | 2,6,1500,41
632 | 2,5,1250,33
633 | 4,8,2000,46
634 | 4,8,2000,48
635 | 2,2,500,10
636 | 4,2,500,9
637 | 4,3,750,16
638 | 2,2,500,11
639 | 2,7,1750,58
640 | 2,1,250,2
641 | 2,1,250,2
642 | 2,1,250,2
643 | 2,1,250,2
644 | 11,4,1000,16
645 | 4,5,1250,33
646 | 4,6,1500,41
647 | 4,4,1000,26
648 | 4,5,1250,35
649 | 2,5,1250,47
650 | 9,8,2000,38
651 | 6,2,500,11
652 | 4,1,250,4
653 | 4,1,250,4
654 | 4,1,250,4
655 | 4,1,250,4
656 | 4,1,250,4
657 | 4,1,250,4
658 | 8,8,2000,52
659 | 4,3,750,25
660 | 11,17,4250,79
661 | 11,8,2000,41
662 | 11,3,750,16
663 | 11,14,3500,73
664 | 2,3,750,38
665 | 4,4,1000,43
666 | 14,4,1000,22
667 | 9,2,500,16
668 | 14,5,1250,28
669 | 14,3,750,19
670 | 14,4,1000,23
671 | 11,5,1250,37
672 | 11,9,2250,72
673 | 9,5,1250,51
674 | 14,8,2000,50
675 | 14,4,1000,28
676 | 16,6,1500,35
677 | 11,7,1750,62
678 | 16,3,750,21
679 | 11,1,250,11
680 | 14,2,500,21
681 | 16,5,1250,40
682 | 4,2,500,51
683 | 11,3,750,40
684 | 21,2,500,21
685 | 21,3,750,26
686 | 16,7,1750,87
687 | 21,1,250,21
688 | 22,1,250,22
689 | 26,5,1250,49
690 | 2,43,10750,86
691 | 6,22,5500,28
692 | 2,34,8500,77
693 | 0,26,6500,76
694 | 2,41,10250,98
695 | 3,21,5250,42
696 | 2,21,5250,52
697 | 2,13,3250,32
698 | 4,4,1000,4
699 | 4,16,4000,38
700 | 3,5,1250,12
701 | 4,33,8250,98
702 | 3,10,2500,33
703 | 4,10,2500,28
704 | 2,11,2750,40
705 | 2,11,2750,41
706 | 4,13,3250,39
707 | 1,10,2500,43
708 | 2,5,1250,16
709 | 2,6,1500,22
710 | 4,5,1250,16
711 | 2,4,1000,14
712 | 4,5,1250,16
713 | 2,7,1750,32
714 | 2,6,1500,26
715 | 2,8,2000,38
716 | 2,2,500,4
717 | 2,6,1500,28
718 | 4,16,4000,70
719 | 4,2,500,4
720 | 4,2,500,4
721 | 2,12,3000,70
722 | 4,7,1750,32
723 | 2,6,1500,35
724 | 4,6,1500,28
725 | 4,11,2750,64
726 | 4,16,4000,98
727 | 4,6,1500,35
728 | 2,2,500,11
729 | 2,2,500,11
730 | 4,6,1500,38
731 | 3,4,1000,29
732 | 2,1,250,2
733 | 2,1,250,2
734 | 9,11,2750,49
735 | 3,1,250,3
736 | 4,1,250,4
737 | 4,1,250,4
738 | 4,4,1000,34
739 | 13,3,750,14
740 | 7,9,2250,89
741 | 11,8,2000,52
742 | 14,2,500,14
743 | 11,1,250,11
744 | 2,3,750,75
745 | 20,14,3500,69
746 | 17,7,1750,58
747 | 11,2,500,38
748 | 14,2,500,35
749 | 23,1,250,23
750 |
--------------------------------------------------------------------------------
/datasets/transfusion_labels.csv:
--------------------------------------------------------------------------------
1 | class
2 | 1
3 | 1
4 | 1
5 | 1
6 | 1
7 | 1
8 | 1
9 | 1
10 | 1
11 | 1
12 | 1
13 | 1
14 | 1
15 | 1
16 | 1
17 | 1
18 | 1
19 | 1
20 | 1
21 | 1
22 | 1
23 | 1
24 | 1
25 | 1
26 | 1
27 | 1
28 | 1
29 | 1
30 | 1
31 | 1
32 | 1
33 | 1
34 | 1
35 | 1
36 | 1
37 | 1
38 | 1
39 | 1
40 | 1
41 | 1
42 | 1
43 | 1
44 | 1
45 | 1
46 | 1
47 | 1
48 | 1
49 | 1
50 | 1
51 | 1
52 | 1
53 | 1
54 | 1
55 | 1
56 | 1
57 | 1
58 | 1
59 | 1
60 | 1
61 | 1
62 | 1
63 | 1
64 | 1
65 | 1
66 | 1
67 | 1
68 | 1
69 | 1
70 | 1
71 | 1
72 | 1
73 | 1
74 | 1
75 | 1
76 | 1
77 | 1
78 | 1
79 | 1
80 | 1
81 | 1
82 | 1
83 | 1
84 | 1
85 | 1
86 | 1
87 | 1
88 | 1
89 | 1
90 | 1
91 | 1
92 | 1
93 | 1
94 | 1
95 | 1
96 | 1
97 | 1
98 | 1
99 | 1
100 | 1
101 | 1
102 | 1
103 | 1
104 | 1
105 | 1
106 | 1
107 | 1
108 | 1
109 | 1
110 | 1
111 | 1
112 | 1
113 | 1
114 | 1
115 | 1
116 | 1
117 | 1
118 | 1
119 | 1
120 | 1
121 | 1
122 | 1
123 | 1
124 | 1
125 | 1
126 | 1
127 | 1
128 | 1
129 | 1
130 | 1
131 | 1
132 | 1
133 | 1
134 | 1
135 | 1
136 | 1
137 | 1
138 | 1
139 | 1
140 | 1
141 | 1
142 | 1
143 | 1
144 | 1
145 | 1
146 | 1
147 | 1
148 | 1
149 | 1
150 | 1
151 | 1
152 | 1
153 | 1
154 | 1
155 | 1
156 | 1
157 | 1
158 | 1
159 | 1
160 | 1
161 | 1
162 | 1
163 | 1
164 | 1
165 | 1
166 | 1
167 | 1
168 | 1
169 | 1
170 | 1
171 | 1
172 | 1
173 | 1
174 | 1
175 | 1
176 | 1
177 | 1
178 | 1
179 | 1
180 | 1
181 | 1
182 | 1
183 | 1
184 | 1
185 | 1
186 | 1
187 | 1
188 | 1
189 | 1
190 | 1
191 | 1
192 | 1
193 | 1
194 | 1
195 | 1
196 | 1
197 | 1
198 | 1
199 | 1
200 | 1
201 | 1
202 | 1
203 | 1
204 | 1
205 | 1
206 | 1
207 | 1
208 | 1
209 | 1
210 | 1
211 | 1
212 | 1
213 | 1
214 | 1
215 | 1
216 | 1
217 | 1
218 | 1
219 | 1
220 | 1
221 | 1
222 | 1
223 | 1
224 | 1
225 | 1
226 | 1
227 | 1
228 | 1
229 | 1
230 | 1
231 | 1
232 | 1
233 | 1
234 | 1
235 | 1
236 | 1
237 | 1
238 | 1
239 | 1
240 | 1
241 | 1
242 | 1
243 | 1
244 | 1
245 | 1
246 | 1
247 | 1
248 | 1
249 | 1
250 | 1
251 | 1
252 | 1
253 | 1
254 | 1
255 | 1
256 | 1
257 | 1
258 | 1
259 | 1
260 | 1
261 | 1
262 | 1
263 | 1
264 | 1
265 | 1
266 | 1
267 | 1
268 | 1
269 | 1
270 | 1
271 | 1
272 | 1
273 | 1
274 | 1
275 | 1
276 | 1
277 | 1
278 | 1
279 | 1
280 | 1
281 | 1
282 | 1
283 | 1
284 | 1
285 | 1
286 | 1
287 | 1
288 | 1
289 | 1
290 | 1
291 | 1
292 | 1
293 | 1
294 | 1
295 | 1
296 | 1
297 | 1
298 | 1
299 | 1
300 | 1
301 | 1
302 | 1
303 | 1
304 | 1
305 | 1
306 | 1
307 | 1
308 | 1
309 | 1
310 | 1
311 | 1
312 | 1
313 | 1
314 | 1
315 | 1
316 | 1
317 | 1
318 | 1
319 | 1
320 | 1
321 | 1
322 | 1
323 | 1
324 | 1
325 | 1
326 | 1
327 | 1
328 | 1
329 | 1
330 | 1
331 | 1
332 | 1
333 | 1
334 | 1
335 | 1
336 | 1
337 | 1
338 | 1
339 | 1
340 | 1
341 | 1
342 | 1
343 | 1
344 | 1
345 | 1
346 | 1
347 | 1
348 | 1
349 | 1
350 | 1
351 | 1
352 | 1
353 | 1
354 | 1
355 | 1
356 | 1
357 | 1
358 | 1
359 | 1
360 | 1
361 | 1
362 | 1
363 | 1
364 | 1
365 | 1
366 | 1
367 | 1
368 | 1
369 | 1
370 | 1
371 | 1
372 | 1
373 | 1
374 | 1
375 | 1
376 | 1
377 | 1
378 | 1
379 | 1
380 | 1
381 | 1
382 | 1
383 | 1
384 | 1
385 | 1
386 | 1
387 | 1
388 | 1
389 | 1
390 | 1
391 | 1
392 | 1
393 | 1
394 | 1
395 | 1
396 | 1
397 | 1
398 | 1
399 | 1
400 | 1
401 | 1
402 | 1
403 | 1
404 | 1
405 | 1
406 | 1
407 | 1
408 | 1
409 | 1
410 | 1
411 | 1
412 | 1
413 | 1
414 | 1
415 | 1
416 | 1
417 | 1
418 | 1
419 | 1
420 | 1
421 | 1
422 | 1
423 | 1
424 | 1
425 | 1
426 | 1
427 | 1
428 | 1
429 | 1
430 | 1
431 | 1
432 | 1
433 | 1
434 | 1
435 | 1
436 | 1
437 | 1
438 | 1
439 | 1
440 | 1
441 | 1
442 | 1
443 | 1
444 | 1
445 | 1
446 | 1
447 | 1
448 | 1
449 | 1
450 | 1
451 | 1
452 | 1
453 | 1
454 | 1
455 | 1
456 | 1
457 | 1
458 | 1
459 | 1
460 | 1
461 | 1
462 | 1
463 | 1
464 | 1
465 | 1
466 | 1
467 | 1
468 | 1
469 | 1
470 | 1
471 | 1
472 | 1
473 | 1
474 | 1
475 | 1
476 | 1
477 | 1
478 | 1
479 | 1
480 | 1
481 | 1
482 | 1
483 | 1
484 | 1
485 | 1
486 | 1
487 | 1
488 | 1
489 | 1
490 | 1
491 | 1
492 | 1
493 | 1
494 | 1
495 | 1
496 | 1
497 | 1
498 | 1
499 | 1
500 | 1
501 | 1
502 | 1
503 | 1
504 | 1
505 | 1
506 | 1
507 | 1
508 | 1
509 | 1
510 | 1
511 | 1
512 | 1
513 | 1
514 | 1
515 | 1
516 | 1
517 | 1
518 | 1
519 | 1
520 | 1
521 | 1
522 | 1
523 | 1
524 | 1
525 | 1
526 | 1
527 | 1
528 | 1
529 | 1
530 | 1
531 | 1
532 | 1
533 | 1
534 | 1
535 | 1
536 | 1
537 | 1
538 | 1
539 | 1
540 | 1
541 | 1
542 | 1
543 | 1
544 | 1
545 | 1
546 | 1
547 | 1
548 | 1
549 | 1
550 | 1
551 | 1
552 | 1
553 | 1
554 | 1
555 | 1
556 | 1
557 | 1
558 | 1
559 | 1
560 | 1
561 | 1
562 | 1
563 | 1
564 | 1
565 | 1
566 | 1
567 | 1
568 | 1
569 | 1
570 | 1
571 | 1
572 | 2
573 | 2
574 | 2
575 | 2
576 | 2
577 | 2
578 | 2
579 | 2
580 | 2
581 | 2
582 | 2
583 | 2
584 | 2
585 | 2
586 | 2
587 | 2
588 | 2
589 | 2
590 | 2
591 | 2
592 | 2
593 | 2
594 | 2
595 | 2
596 | 2
597 | 2
598 | 2
599 | 2
600 | 2
601 | 2
602 | 2
603 | 2
604 | 2
605 | 2
606 | 2
607 | 2
608 | 2
609 | 2
610 | 2
611 | 2
612 | 2
613 | 2
614 | 2
615 | 2
616 | 2
617 | 2
618 | 2
619 | 2
620 | 2
621 | 2
622 | 2
623 | 2
624 | 2
625 | 2
626 | 2
627 | 2
628 | 2
629 | 2
630 | 2
631 | 2
632 | 2
633 | 2
634 | 2
635 | 2
636 | 2
637 | 2
638 | 2
639 | 2
640 | 2
641 | 2
642 | 2
643 | 2
644 | 2
645 | 2
646 | 2
647 | 2
648 | 2
649 | 2
650 | 2
651 | 2
652 | 2
653 | 2
654 | 2
655 | 2
656 | 2
657 | 2
658 | 2
659 | 2
660 | 2
661 | 2
662 | 2
663 | 2
664 | 2
665 | 2
666 | 2
667 | 2
668 | 2
669 | 2
670 | 2
671 | 2
672 | 2
673 | 2
674 | 2
675 | 2
676 | 2
677 | 2
678 | 2
679 | 2
680 | 2
681 | 2
682 | 2
683 | 2
684 | 2
685 | 2
686 | 2
687 | 2
688 | 2
689 | 2
690 | 2
691 | 2
692 | 2
693 | 2
694 | 2
695 | 2
696 | 2
697 | 2
698 | 2
699 | 2
700 | 2
701 | 2
702 | 2
703 | 2
704 | 2
705 | 2
706 | 2
707 | 2
708 | 2
709 | 2
710 | 2
711 | 2
712 | 2
713 | 2
714 | 2
715 | 2
716 | 2
717 | 2
718 | 2
719 | 2
720 | 2
721 | 2
722 | 2
723 | 2
724 | 2
725 | 2
726 | 2
727 | 2
728 | 2
729 | 2
730 | 2
731 | 2
732 | 2
733 | 2
734 | 2
735 | 2
736 | 2
737 | 2
738 | 2
739 | 2
740 | 2
741 | 2
742 | 2
743 | 2
744 | 2
745 | 2
746 | 2
747 | 2
748 | 2
749 | 2
750 |
--------------------------------------------------------------------------------
/datasets/wine.data:
--------------------------------------------------------------------------------
1 | class,Alcohol,Malic-acid,Ash,Alcalinity-of-ash,Magnesium,Total-phenols,Flavanoids,Nonflavanoid-phenols,Proanthocyanins,Color-intensity,Hue,OD280/OD315-of-diluted-wines,Proline
2 | 1,14.23,1.71,2.43,15.6,127,2.8,3.06,.28,2.29,5.64,1.04,3.92,1065
3 | 1,13.2,1.78,2.14,11.2,100,2.65,2.76,.26,1.28,4.38,1.05,3.4,1050
4 | 1,13.16,2.36,2.67,18.6,101,2.8,3.24,.3,2.81,5.68,1.03,3.17,1185
5 | 1,14.37,1.95,2.5,16.8,113,3.85,3.49,.24,2.18,7.8,.86,3.45,1480
6 | 1,13.24,2.59,2.87,21,118,2.8,2.69,.39,1.82,4.32,1.04,2.93,735
7 | 1,14.2,1.76,2.45,15.2,112,3.27,3.39,.34,1.97,6.75,1.05,2.85,1450
8 | 1,14.39,1.87,2.45,14.6,96,2.5,2.52,.3,1.98,5.25,1.02,3.58,1290
9 | 1,14.06,2.15,2.61,17.6,121,2.6,2.51,.31,1.25,5.05,1.06,3.58,1295
10 | 1,14.83,1.64,2.17,14,97,2.8,2.98,.29,1.98,5.2,1.08,2.85,1045
11 | 1,13.86,1.35,2.27,16,98,2.98,3.15,.22,1.85,7.22,1.01,3.55,1045
12 | 1,14.1,2.16,2.3,18,105,2.95,3.32,.22,2.38,5.75,1.25,3.17,1510
13 | 1,14.12,1.48,2.32,16.8,95,2.2,2.43,.26,1.57,5,1.17,2.82,1280
14 | 1,13.75,1.73,2.41,16,89,2.6,2.76,.29,1.81,5.6,1.15,2.9,1320
15 | 1,14.75,1.73,2.39,11.4,91,3.1,3.69,.43,2.81,5.4,1.25,2.73,1150
16 | 1,14.38,1.87,2.38,12,102,3.3,3.64,.29,2.96,7.5,1.2,3,1547
17 | 1,13.63,1.81,2.7,17.2,112,2.85,2.91,.3,1.46,7.3,1.28,2.88,1310
18 | 1,14.3,1.92,2.72,20,120,2.8,3.14,.33,1.97,6.2,1.07,2.65,1280
19 | 1,13.83,1.57,2.62,20,115,2.95,3.4,.4,1.72,6.6,1.13,2.57,1130
20 | 1,14.19,1.59,2.48,16.5,108,3.3,3.93,.32,1.86,8.7,1.23,2.82,1680
21 | 1,13.64,3.1,2.56,15.2,116,2.7,3.03,.17,1.66,5.1,.96,3.36,845
22 | 1,14.06,1.63,2.28,16,126,3,3.17,.24,2.1,5.65,1.09,3.71,780
23 | 1,12.93,3.8,2.65,18.6,102,2.41,2.41,.25,1.98,4.5,1.03,3.52,770
24 | 1,13.71,1.86,2.36,16.6,101,2.61,2.88,.27,1.69,3.8,1.11,4,1035
25 | 1,12.85,1.6,2.52,17.8,95,2.48,2.37,.26,1.46,3.93,1.09,3.63,1015
26 | 1,13.5,1.81,2.61,20,96,2.53,2.61,.28,1.66,3.52,1.12,3.82,845
27 | 1,13.05,2.05,3.22,25,124,2.63,2.68,.47,1.92,3.58,1.13,3.2,830
28 | 1,13.39,1.77,2.62,16.1,93,2.85,2.94,.34,1.45,4.8,.92,3.22,1195
29 | 1,13.3,1.72,2.14,17,94,2.4,2.19,.27,1.35,3.95,1.02,2.77,1285
30 | 1,13.87,1.9,2.8,19.4,107,2.95,2.97,.37,1.76,4.5,1.25,3.4,915
31 | 1,14.02,1.68,2.21,16,96,2.65,2.33,.26,1.98,4.7,1.04,3.59,1035
32 | 1,13.73,1.5,2.7,22.5,101,3,3.25,.29,2.38,5.7,1.19,2.71,1285
33 | 1,13.58,1.66,2.36,19.1,106,2.86,3.19,.22,1.95,6.9,1.09,2.88,1515
34 | 1,13.68,1.83,2.36,17.2,104,2.42,2.69,.42,1.97,3.84,1.23,2.87,990
35 | 1,13.76,1.53,2.7,19.5,132,2.95,2.74,.5,1.35,5.4,1.25,3,1235
36 | 1,13.51,1.8,2.65,19,110,2.35,2.53,.29,1.54,4.2,1.1,2.87,1095
37 | 1,13.48,1.81,2.41,20.5,100,2.7,2.98,.26,1.86,5.1,1.04,3.47,920
38 | 1,13.28,1.64,2.84,15.5,110,2.6,2.68,.34,1.36,4.6,1.09,2.78,880
39 | 1,13.05,1.65,2.55,18,98,2.45,2.43,.29,1.44,4.25,1.12,2.51,1105
40 | 1,13.07,1.5,2.1,15.5,98,2.4,2.64,.28,1.37,3.7,1.18,2.69,1020
41 | 1,14.22,3.99,2.51,13.2,128,3,3.04,.2,2.08,5.1,.89,3.53,760
42 | 1,13.56,1.71,2.31,16.2,117,3.15,3.29,.34,2.34,6.13,.95,3.38,795
43 | 1,13.41,3.84,2.12,18.8,90,2.45,2.68,.27,1.48,4.28,.91,3,1035
44 | 1,13.88,1.89,2.59,15,101,3.25,3.56,.17,1.7,5.43,.88,3.56,1095
45 | 1,13.24,3.98,2.29,17.5,103,2.64,2.63,.32,1.66,4.36,.82,3,680
46 | 1,13.05,1.77,2.1,17,107,3,3,.28,2.03,5.04,.88,3.35,885
47 | 1,14.21,4.04,2.44,18.9,111,2.85,2.65,.3,1.25,5.24,.87,3.33,1080
48 | 1,14.38,3.59,2.28,16,102,3.25,3.17,.27,2.19,4.9,1.04,3.44,1065
49 | 1,13.9,1.68,2.12,16,101,3.1,3.39,.21,2.14,6.1,.91,3.33,985
50 | 1,14.1,2.02,2.4,18.8,103,2.75,2.92,.32,2.38,6.2,1.07,2.75,1060
51 | 1,13.94,1.73,2.27,17.4,108,2.88,3.54,.32,2.08,8.90,1.12,3.1,1260
52 | 1,13.05,1.73,2.04,12.4,92,2.72,3.27,.17,2.91,7.2,1.12,2.91,1150
53 | 1,13.83,1.65,2.6,17.2,94,2.45,2.99,.22,2.29,5.6,1.24,3.37,1265
54 | 1,13.82,1.75,2.42,14,111,3.88,3.74,.32,1.87,7.05,1.01,3.26,1190
55 | 1,13.77,1.9,2.68,17.1,115,3,2.79,.39,1.68,6.3,1.13,2.93,1375
56 | 1,13.74,1.67,2.25,16.4,118,2.6,2.9,.21,1.62,5.85,.92,3.2,1060
57 | 1,13.56,1.73,2.46,20.5,116,2.96,2.78,.2,2.45,6.25,.98,3.03,1120
58 | 1,14.22,1.7,2.3,16.3,118,3.2,3,.26,2.03,6.38,.94,3.31,970
59 | 1,13.29,1.97,2.68,16.8,102,3,3.23,.31,1.66,6,1.07,2.84,1270
60 | 1,13.72,1.43,2.5,16.7,108,3.4,3.67,.19,2.04,6.8,.89,2.87,1285
61 | 2,12.37,.94,1.36,10.6,88,1.98,.57,.28,.42,1.95,1.05,1.82,520
62 | 2,12.33,1.1,2.28,16,101,2.05,1.09,.63,.41,3.27,1.25,1.67,680
63 | 2,12.64,1.36,2.02,16.8,100,2.02,1.41,.53,.62,5.75,.98,1.59,450
64 | 2,13.67,1.25,1.92,18,94,2.1,1.79,.32,.73,3.8,1.23,2.46,630
65 | 2,12.37,1.13,2.16,19,87,3.5,3.1,.19,1.87,4.45,1.22,2.87,420
66 | 2,12.17,1.45,2.53,19,104,1.89,1.75,.45,1.03,2.95,1.45,2.23,355
67 | 2,12.37,1.21,2.56,18.1,98,2.42,2.65,.37,2.08,4.6,1.19,2.3,678
68 | 2,13.11,1.01,1.7,15,78,2.98,3.18,.26,2.28,5.3,1.12,3.18,502
69 | 2,12.37,1.17,1.92,19.6,78,2.11,2,.27,1.04,4.68,1.12,3.48,510
70 | 2,13.34,.94,2.36,17,110,2.53,1.3,.55,.42,3.17,1.02,1.93,750
71 | 2,12.21,1.19,1.75,16.8,151,1.85,1.28,.14,2.5,2.85,1.28,3.07,718
72 | 2,12.29,1.61,2.21,20.4,103,1.1,1.02,.37,1.46,3.05,.906,1.82,870
73 | 2,13.86,1.51,2.67,25,86,2.95,2.86,.21,1.87,3.38,1.36,3.16,410
74 | 2,13.49,1.66,2.24,24,87,1.88,1.84,.27,1.03,3.74,.98,2.78,472
75 | 2,12.99,1.67,2.6,30,139,3.3,2.89,.21,1.96,3.35,1.31,3.5,985
76 | 2,11.96,1.09,2.3,21,101,3.38,2.14,.13,1.65,3.21,.99,3.13,886
77 | 2,11.66,1.88,1.92,16,97,1.61,1.57,.34,1.15,3.8,1.23,2.14,428
78 | 2,13.03,.9,1.71,16,86,1.95,2.03,.24,1.46,4.6,1.19,2.48,392
79 | 2,11.84,2.89,2.23,18,112,1.72,1.32,.43,.95,2.65,.96,2.52,500
80 | 2,12.33,.99,1.95,14.8,136,1.9,1.85,.35,2.76,3.4,1.06,2.31,750
81 | 2,12.7,3.87,2.4,23,101,2.83,2.55,.43,1.95,2.57,1.19,3.13,463
82 | 2,12,.92,2,19,86,2.42,2.26,.3,1.43,2.5,1.38,3.12,278
83 | 2,12.72,1.81,2.2,18.8,86,2.2,2.53,.26,1.77,3.9,1.16,3.14,714
84 | 2,12.08,1.13,2.51,24,78,2,1.58,.4,1.4,2.2,1.31,2.72,630
85 | 2,13.05,3.86,2.32,22.5,85,1.65,1.59,.61,1.62,4.8,.84,2.01,515
86 | 2,11.84,.89,2.58,18,94,2.2,2.21,.22,2.35,3.05,.79,3.08,520
87 | 2,12.67,.98,2.24,18,99,2.2,1.94,.3,1.46,2.62,1.23,3.16,450
88 | 2,12.16,1.61,2.31,22.8,90,1.78,1.69,.43,1.56,2.45,1.33,2.26,495
89 | 2,11.65,1.67,2.62,26,88,1.92,1.61,.4,1.34,2.6,1.36,3.21,562
90 | 2,11.64,2.06,2.46,21.6,84,1.95,1.69,.48,1.35,2.8,1,2.75,680
91 | 2,12.08,1.33,2.3,23.6,70,2.2,1.59,.42,1.38,1.74,1.07,3.21,625
92 | 2,12.08,1.83,2.32,18.5,81,1.6,1.5,.52,1.64,2.4,1.08,2.27,480
93 | 2,12,1.51,2.42,22,86,1.45,1.25,.5,1.63,3.6,1.05,2.65,450
94 | 2,12.69,1.53,2.26,20.7,80,1.38,1.46,.58,1.62,3.05,.96,2.06,495
95 | 2,12.29,2.83,2.22,18,88,2.45,2.25,.25,1.99,2.15,1.15,3.3,290
96 | 2,11.62,1.99,2.28,18,98,3.02,2.26,.17,1.35,3.25,1.16,2.96,345
97 | 2,12.47,1.52,2.2,19,162,2.5,2.27,.32,3.28,2.6,1.16,2.63,937
98 | 2,11.81,2.12,2.74,21.5,134,1.6,.99,.14,1.56,2.5,.95,2.26,625
99 | 2,12.29,1.41,1.98,16,85,2.55,2.5,.29,1.77,2.9,1.23,2.74,428
100 | 2,12.37,1.07,2.1,18.5,88,3.52,3.75,.24,1.95,4.5,1.04,2.77,660
101 | 2,12.29,3.17,2.21,18,88,2.85,2.99,.45,2.81,2.3,1.42,2.83,406
102 | 2,12.08,2.08,1.7,17.5,97,2.23,2.17,.26,1.4,3.3,1.27,2.96,710
103 | 2,12.6,1.34,1.9,18.5,88,1.45,1.36,.29,1.35,2.45,1.04,2.77,562
104 | 2,12.34,2.45,2.46,21,98,2.56,2.11,.34,1.31,2.8,.8,3.38,438
105 | 2,11.82,1.72,1.88,19.5,86,2.5,1.64,.37,1.42,2.06,.94,2.44,415
106 | 2,12.51,1.73,1.98,20.5,85,2.2,1.92,.32,1.48,2.94,1.04,3.57,672
107 | 2,12.42,2.55,2.27,22,90,1.68,1.84,.66,1.42,2.7,.86,3.3,315
108 | 2,12.25,1.73,2.12,19,80,1.65,2.03,.37,1.63,3.4,1,3.17,510
109 | 2,12.72,1.75,2.28,22.5,84,1.38,1.76,.48,1.63,3.3,.88,2.42,488
110 | 2,12.22,1.29,1.94,19,92,2.36,2.04,.39,2.08,2.7,.86,3.02,312
111 | 2,11.61,1.35,2.7,20,94,2.74,2.92,.29,2.49,2.65,.96,3.26,680
112 | 2,11.46,3.74,1.82,19.5,107,3.18,2.58,.24,3.58,2.9,.75,2.81,562
113 | 2,12.52,2.43,2.17,21,88,2.55,2.27,.26,1.22,2,.9,2.78,325
114 | 2,11.76,2.68,2.92,20,103,1.75,2.03,.6,1.05,3.8,1.23,2.5,607
115 | 2,11.41,.74,2.5,21,88,2.48,2.01,.42,1.44,3.08,1.1,2.31,434
116 | 2,12.08,1.39,2.5,22.5,84,2.56,2.29,.43,1.04,2.9,.93,3.19,385
117 | 2,11.03,1.51,2.2,21.5,85,2.46,2.17,.52,2.01,1.9,1.71,2.87,407
118 | 2,11.82,1.47,1.99,20.8,86,1.98,1.6,.3,1.53,1.95,.95,3.33,495
119 | 2,12.42,1.61,2.19,22.5,108,2,2.09,.34,1.61,2.06,1.06,2.96,345
120 | 2,12.77,3.43,1.98,16,80,1.63,1.25,.43,.83,3.4,.7,2.12,372
121 | 2,12,3.43,2,19,87,2,1.64,.37,1.87,1.28,.93,3.05,564
122 | 2,11.45,2.4,2.42,20,96,2.9,2.79,.32,1.83,3.25,.8,3.39,625
123 | 2,11.56,2.05,3.23,28.5,119,3.18,5.08,.47,1.87,6,.93,3.69,465
124 | 2,12.42,4.43,2.73,26.5,102,2.2,2.13,.43,1.71,2.08,.92,3.12,365
125 | 2,13.05,5.8,2.13,21.5,86,2.62,2.65,.3,2.01,2.6,.73,3.1,380
126 | 2,11.87,4.31,2.39,21,82,2.86,3.03,.21,2.91,2.8,.75,3.64,380
127 | 2,12.07,2.16,2.17,21,85,2.6,2.65,.37,1.35,2.76,.86,3.28,378
128 | 2,12.43,1.53,2.29,21.5,86,2.74,3.15,.39,1.77,3.94,.69,2.84,352
129 | 2,11.79,2.13,2.78,28.5,92,2.13,2.24,.58,1.76,3,.97,2.44,466
130 | 2,12.37,1.63,2.3,24.5,88,2.22,2.45,.4,1.9,2.12,.89,2.78,342
131 | 2,12.04,4.3,2.38,22,80,2.1,1.75,.42,1.35,2.6,.79,2.57,580
132 | 3,12.86,1.35,2.32,18,122,1.51,1.25,.21,.94,4.1,.76,1.29,630
133 | 3,12.88,2.99,2.4,20,104,1.3,1.22,.24,.83,5.4,.74,1.42,530
134 | 3,12.81,2.31,2.4,24,98,1.15,1.09,.27,.83,5.7,.66,1.36,560
135 | 3,12.7,3.55,2.36,21.5,106,1.7,1.2,.17,.84,5,.78,1.29,600
136 | 3,12.51,1.24,2.25,17.5,85,2,.58,.6,1.25,5.45,.75,1.51,650
137 | 3,12.6,2.46,2.2,18.5,94,1.62,.66,.63,.94,7.1,.73,1.58,695
138 | 3,12.25,4.72,2.54,21,89,1.38,.47,.53,.8,3.85,.75,1.27,720
139 | 3,12.53,5.51,2.64,25,96,1.79,.6,.63,1.1,5,.82,1.69,515
140 | 3,13.49,3.59,2.19,19.5,88,1.62,.48,.58,.88,5.7,.81,1.82,580
141 | 3,12.84,2.96,2.61,24,101,2.32,.6,.53,.81,4.92,.89,2.15,590
142 | 3,12.93,2.81,2.7,21,96,1.54,.5,.53,.75,4.6,.77,2.31,600
143 | 3,13.36,2.56,2.35,20,89,1.4,.5,.37,.64,5.6,.7,2.47,780
144 | 3,13.52,3.17,2.72,23.5,97,1.55,.52,.5,.55,4.35,.89,2.06,520
145 | 3,13.62,4.95,2.35,20,92,2,.8,.47,1.02,4.4,.91,2.05,550
146 | 3,12.25,3.88,2.2,18.5,112,1.38,.78,.29,1.14,8.21,.65,2,855
147 | 3,13.16,3.57,2.15,21,102,1.5,.55,.43,1.3,4,.6,1.68,830
148 | 3,13.88,5.04,2.23,20,80,.98,.34,.4,.68,4.9,.58,1.33,415
149 | 3,12.87,4.61,2.48,21.5,86,1.7,.65,.47,.86,7.65,.54,1.86,625
150 | 3,13.32,3.24,2.38,21.5,92,1.93,.76,.45,1.25,8.42,.55,1.62,650
151 | 3,13.08,3.9,2.36,21.5,113,1.41,1.39,.34,1.14,9.40,.57,1.33,550
152 | 3,13.5,3.12,2.62,24,123,1.4,1.57,.22,1.25,8.60,.59,1.3,500
153 | 3,12.79,2.67,2.48,22,112,1.48,1.36,.24,1.26,10.8,.48,1.47,480
154 | 3,13.11,1.9,2.75,25.5,116,2.2,1.28,.26,1.56,7.1,.61,1.33,425
155 | 3,13.23,3.3,2.28,18.5,98,1.8,.83,.61,1.87,10.52,.56,1.51,675
156 | 3,12.58,1.29,2.1,20,103,1.48,.58,.53,1.4,7.6,.58,1.55,640
157 | 3,13.17,5.19,2.32,22,93,1.74,.63,.61,1.55,7.9,.6,1.48,725
158 | 3,13.84,4.12,2.38,19.5,89,1.8,.83,.48,1.56,9.01,.57,1.64,480
159 | 3,12.45,3.03,2.64,27,97,1.9,.58,.63,1.14,7.5,.67,1.73,880
160 | 3,14.34,1.68,2.7,25,98,2.8,1.31,.53,2.7,13,.57,1.96,660
161 | 3,13.48,1.67,2.64,22.5,89,2.6,1.1,.52,2.29,11.75,.57,1.78,620
162 | 3,12.36,3.83,2.38,21,88,2.3,.92,.5,1.04,7.65,.56,1.58,520
163 | 3,13.69,3.26,2.54,20,107,1.83,.56,.5,.8,5.88,.96,1.82,680
164 | 3,12.85,3.27,2.58,22,106,1.65,.6,.6,.96,5.58,.87,2.11,570
165 | 3,12.96,3.45,2.35,18.5,106,1.39,.7,.4,.94,5.28,.68,1.75,675
166 | 3,13.78,2.76,2.3,22,90,1.35,.68,.41,1.03,9.58,.7,1.68,615
167 | 3,13.73,4.36,2.26,22.5,88,1.28,.47,.52,1.15,6.62,.78,1.75,520
168 | 3,13.45,3.7,2.6,23,111,1.7,.92,.43,1.46,10.68,.85,1.56,695
169 | 3,12.82,3.37,2.3,19.5,88,1.48,.66,.4,.97,10.26,.72,1.75,685
170 | 3,13.58,2.58,2.69,24.5,105,1.55,.84,.39,1.54,8.66,.74,1.8,750
171 | 3,13.4,4.6,2.86,25,112,1.98,.96,.27,1.11,8.5,.67,1.92,630
172 | 3,12.2,3.03,2.32,19,96,1.25,.49,.4,.73,5.5,.66,1.83,510
173 | 3,12.77,2.39,2.28,19.5,86,1.39,.51,.48,.64,9.899999,.57,1.63,470
174 | 3,14.16,2.51,2.48,20,91,1.68,.7,.44,1.24,9.7,.62,1.71,660
175 | 3,13.71,5.65,2.45,20.5,95,1.68,.61,.52,1.06,7.7,.64,1.74,740
176 | 3,13.4,3.91,2.48,23,102,1.8,.75,.43,1.41,7.3,.7,1.56,750
177 | 3,13.27,4.28,2.26,20,120,1.59,.69,.43,1.35,10.2,.59,1.56,835
178 | 3,13.17,2.59,2.37,20,120,1.65,.68,.53,1.46,9.3,.6,1.62,840
179 | 3,14.13,4.1,2.74,24.5,96,2.05,.76,.56,1.35,9.2,.61,1.6,560
180 |
--------------------------------------------------------------------------------
/example_classification.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | Created on Fri Aug 27 15:21:08 2021
4 |
5 | @author: allan
6 | """
7 |
8 | import grape
9 | import algorithms
10 | from functions import add, sub, mul, pdiv, neg, and_, or_, not_, less_than_or_equal, greater_than_or_equal
11 |
12 | from os import path
13 | import pandas as pd
14 | import numpy as np
15 | from deap import creator, base, tools
16 | import random
17 |
18 | from sklearn.model_selection import train_test_split
19 | import csv
20 |
21 | problem = 'heartDisease'
22 |
23 | def setDataSet(problem, RANDOM_SEED):
24 | np.random.seed(RANDOM_SEED)
25 | if problem == 'australian': #66
26 | data = pd.read_csv(r"datasets/australian.dat", sep=" ")
27 | l = data.shape[0]
28 | Y = np.zeros([l,], dtype=int)
29 | for i in range(l):
30 | Y[i] = data['output'].iloc[i]
31 | data.pop('output')
32 | #continuous features: d1, d2, d6, d9, d12, d13
33 | #categorical features:
34 | #d0: two
35 | #d3: three => change 3 to 0
36 | #data['d3'] = data['d3'].replace([3], 0)
37 | #d4: 14 => change 14 to 0
38 | #data['d4'] = data['d4'].replace([14], 0)
39 | #d5: 9 => change 9 to 0
40 | #data['d5'] = data['d5'].replace([9], 0)
41 | #d7: two
42 | #d8: two
43 | #d10: two
44 | #d11: three => change 3 to 0
45 | #data['d11'] = data['d11'].replace([3], 0)
46 |
47 | #Convert categorical using one-hot enconding
48 | dataOneHot = pd.get_dummies(data, columns=['d3', 'd4', 'd5', 'd11'])
49 |
50 | X = dataOneHot.to_numpy()
51 |
52 | X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.3, random_state=RANDOM_SEED)
53 |
54 | X_train = np.transpose(X_train)
55 | X_test = np.transpose(X_test)
56 |
57 | GRAMMAR_FILE = 'australian.bnf'
58 |
59 | if problem == 'carEvaluation':
60 | Y = np.zeros([1727,], dtype=int)
61 |
62 | column_names = ["buying", "maint", "doors", "persons", "lug_boot", "safety", "class"]
63 |
64 | data = pd.read_csv(r"datasets/car.data", sep=",", header=0, names=column_names)
65 |
66 | for i in range(1727):
67 | if data['class'].iloc[i] == 'unacc':
68 | Y[i] = 0
69 | elif data['class'].iloc[i] == 'acc':
70 | Y[i] = 1
71 | elif data['class'].iloc[i] == 'good':
72 | Y[i] = 2
73 | elif data['class'].iloc[i] == 'vgood':
74 | Y[i] = 3
75 |
76 | data = data.drop(['class'], axis=1)
77 |
78 | #Using oneHot encoding on categorical (non binary) features
79 | dataOneHot = pd.get_dummies(data)
80 |
81 | X = dataOneHot.to_numpy()
82 |
83 | X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.3, random_state=RANDOM_SEED)
84 |
85 | X_train = np.transpose(X_train)
86 | X_test = np.transpose(X_test)
87 |
88 | GRAMMAR_FILE = 'carEvaluation.bnf'
89 |
90 | if problem == 'Banknote':
91 | #There 1813 samples with class 1
92 | #We'll split into 70% for training and 30% for test, assuring the balanced data
93 | X_train = np.zeros([1000, 4], dtype=float)
94 | Y_train = np.zeros([1000,], dtype=bool)
95 | X_test = np.zeros([372, 4], dtype=float)
96 | Y_test = np.zeros([372,], dtype=bool)
97 |
98 | data = pd.read_table(r"datasets/banknote_Train.csv", sep=" ")
99 | for i in range(1000):
100 | for j in range(4):
101 | X_train[i,j] = data['x'+ str(j)].iloc[i]
102 | for i in range(1000):
103 | Y_train[i] = data['y'].iloc[i] > 0
104 |
105 | data = pd.read_table(r"datasets/banknote_Test.csv", sep=" ")
106 | for i in range(372):
107 | for j in range(4):
108 | X_test[i,j] = data['x'+ str(j)].iloc[i]
109 | for i in range(372):
110 | Y_test[i] = data['y'].iloc[i] > 0
111 |
112 | X_train = np.transpose(X_train)
113 | X_test = np.transpose(X_test)
114 |
115 | GRAMMAR_FILE = 'Banknote.bnf'
116 |
117 | if problem == 'spambase':
118 | #There 1813 samples with class 1
119 | #We'll split into 70% for training and 30% for test, assuring the balanced data
120 | X = np.zeros([4601, 57], dtype=float)
121 | Y = np.zeros([4601,], dtype=int)
122 |
123 | data = pd.read_table(r"datasets/spambase.csv")
124 | for i in range(4601):
125 | for j in range(57):
126 | X[i,j] = data['d'+ str(j)].iloc[i]
127 | for i in range(4601):
128 | Y[i] = data['class'].iloc[i]
129 |
130 | X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.3, random_state=RANDOM_SEED)
131 |
132 | X_train = np.transpose(X_train)
133 | X_test = np.transpose(X_test)
134 |
135 | GRAMMAR_FILE = 'spambase.bnf'
136 |
137 | if problem == 'heartDisease':
138 | data = pd.read_csv(r"datasets/processed.cleveland.data", sep=",")
139 | #There are some data missing on columns d11 and d12, so let's remove the rows
140 | data = data[data.ca != '?']
141 | data = data[data.thal != '?']
142 |
143 | #There are 160 samples with class 0, 54 with class 1, 35 with class 2,
144 | #35 with class 3 and 13 with class 4
145 | #Let's consider the class 0 and all the remaining as class 1
146 | Y = data['class'].to_numpy()
147 | for i in range(len(Y)):
148 | Y[i] = 1 if Y[i] > 0 else 0
149 | data = data.drop(['class'], axis=1)
150 |
151 | data.loc[:, ['age', 'trestbps', 'chol', 'thalach', 'oldpeak']] = (data.loc[:, ['age', 'trestbps', 'chol', 'thalach', 'oldpeak']] - data.loc[:, ['age', 'trestbps', 'chol', 'thalach', 'oldpeak']].mean())/data.loc[:, ['age', 'trestbps', 'chol', 'thalach', 'oldpeak']].std()
152 |
153 | data = pd.get_dummies(data, columns=['cp', 'restecg', 'slope', 'ca', 'thal'])#, prefix = ['cp']) = pd.get_dummies(data, columns=['cp', 'restecg', 'slope', 'ca', 'thal'])#, prefix = ['cp'])
154 |
155 | X = data.to_numpy()
156 |
157 | X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.3, random_state=RANDOM_SEED)
158 |
159 | X_train = np.transpose(X_train)
160 | X_test = np.transpose(X_test)
161 |
162 | GRAMMAR_FILE = 'heartDisease.bnf'
163 |
164 | BNF_GRAMMAR = grape.Grammar(r"grammars/" + GRAMMAR_FILE)
165 |
166 | return X_train, Y_train, X_test, Y_test, BNF_GRAMMAR
167 |
168 | def mae(y, yhat):
169 | """
170 | Calculate mean absolute error between inputs.
171 |
172 | :param y: The expected input (i.e. from dataset).
173 | :param yhat: The given input (i.e. from phenotype).
174 | :return: The mean absolute error.
175 | """
176 |
177 | compare = np.equal(y,yhat)
178 |
179 | return 1 - np.mean(compare)
180 |
181 | def fitness_eval(individual, points):
182 | x = points[0]
183 | Y = points[1]
184 |
185 | if individual.invalid == True:
186 | return np.NaN,
187 |
188 | # Evaluate the expression
189 | try:
190 | pred = eval(individual.phenotype)
191 | except (FloatingPointError, ZeroDivisionError, OverflowError,
192 | MemoryError):
193 | # FP err can happen through eg overflow (lots of pow/exp calls)
194 | # ZeroDiv can happen when using unprotected operators
195 | return np.NaN,
196 | assert np.isrealobj(pred)
197 |
198 | try:
199 | Y_class = [1 if pred[i] > 0 else 0 for i in range(len(Y))]
200 | except (IndexError, TypeError):
201 | return np.NaN,
202 | fitness = mae(Y, Y_class)
203 |
204 | return fitness,
205 |
206 | toolbox = base.Toolbox()
207 |
208 | # define a single objective, minimising fitness strategy:
209 | creator.create("FitnessMin", base.Fitness, weights=(-1.0,))
210 |
211 | creator.create('Individual', grape.Individual, fitness=creator.FitnessMin)
212 |
213 | toolbox.register("populationCreator", grape.sensible_initialisation, creator.Individual)
214 | #toolbox.register("populationCreator", grape.random_initialisation, creator.Individual)
215 | #toolbox.register("populationCreator", grape.PI_Grow, creator.Individual)
216 |
217 | toolbox.register("evaluate", fitness_eval)
218 |
219 | # Tournament selection:
220 | toolbox.register("select", tools.selTournament, tournsize=7) #selLexicaseFilter
221 |
222 | # Single-point crossover:
223 | toolbox.register("mate", grape.crossover_onepoint)
224 |
225 | # Flip-int mutation:
226 | toolbox.register("mutate", grape.mutation_int_flip_per_codon)
227 |
228 | POPULATION_SIZE = 1000
229 | MAX_INIT_TREE_DEPTH = 13
230 | MIN_INIT_TREE_DEPTH = 4
231 |
232 | MAX_GENERATIONS = 200
233 | P_CROSSOVER = 0.8
234 | P_MUTATION = 0.01
235 | ELITE_SIZE = 0#round(0.01*POPULATION_SIZE) #it should be smaller or equal to HALLOFFAME_SIZE
236 | HALLOFFAME_SIZE = 1#round(0.01*POPULATION_SIZE) #it should be at least 1
237 |
238 | MIN_INIT_GENOME_LENGTH = 95#*6
239 | MAX_INIT_GENOME_LENGTH = 115#*6
240 | random_initilisation = False #put True if you use random initialisation
241 |
242 | CODON_CONSUMPTION = 'lazy'
243 | GENOME_REPRESENTATION = 'list'
244 | MAX_GENOME_LENGTH = None#'auto'
245 |
246 | MAX_TREE_DEPTH = 35 #equivalent to 17 in GP with this grammar
247 | MAX_WRAPS = 0
248 | CODON_SIZE = 255
249 |
250 | REPORT_ITEMS = ['gen', 'invalid', 'avg', 'std', 'min', 'max',
251 | 'fitness_test',
252 | 'best_ind_length', 'avg_length',
253 | 'best_ind_nodes', 'avg_nodes',
254 | 'best_ind_depth', 'avg_depth',
255 | 'avg_used_codons', 'best_ind_used_codons',
256 | 'structural_diversity', 'fitness_diversity',
257 | 'selection_time', 'generation_time']
258 |
259 | N_RUNS = 1
260 |
261 | for i in range(N_RUNS):
262 | print()
263 | print()
264 | print("Run:", i)
265 | print()
266 |
267 | RANDOM_SEED = i + 1
268 |
269 | X_train, Y_train, X_test, Y_test, BNF_GRAMMAR = setDataSet(problem, RANDOM_SEED) #We set up this inside the loop for the case in which the data is defined randomly
270 |
271 | random.seed(RANDOM_SEED)
272 |
273 | # create initial population (generation 0):
274 | if random_initilisation:
275 | population = toolbox.populationCreator(pop_size=POPULATION_SIZE,
276 | bnf_grammar=BNF_GRAMMAR,
277 | min_init_genome_length=MIN_INIT_GENOME_LENGTH,
278 | max_init_genome_length=MAX_INIT_GENOME_LENGTH,
279 | max_init_depth=MAX_TREE_DEPTH,
280 | codon_size=CODON_SIZE,
281 | codon_consumption=CODON_CONSUMPTION,
282 | genome_representation=GENOME_REPRESENTATION
283 | )
284 | else:
285 | population = toolbox.populationCreator(pop_size=POPULATION_SIZE,
286 | bnf_grammar=BNF_GRAMMAR,
287 | min_init_depth=MIN_INIT_TREE_DEPTH,
288 | max_init_depth=MAX_INIT_TREE_DEPTH,
289 | codon_size=CODON_SIZE,
290 | codon_consumption=CODON_CONSUMPTION,
291 | genome_representation=GENOME_REPRESENTATION
292 | )
293 |
294 | # define the hall-of-fame object:
295 | hof = tools.HallOfFame(HALLOFFAME_SIZE)
296 |
297 | # prepare the statistics object:
298 | stats = tools.Statistics(key=lambda ind: ind.fitness.values)
299 | stats.register("avg", np.nanmean)
300 | stats.register("std", np.nanstd)
301 | stats.register("min", np.nanmin)
302 | stats.register("max", np.nanmax)
303 |
304 | # perform the Grammatical Evolution flow:
305 | population, logbook = algorithms.ge_eaSimpleWithElitism(population, toolbox, cxpb=P_CROSSOVER, mutpb=P_MUTATION,
306 | ngen=MAX_GENERATIONS, elite_size=ELITE_SIZE,
307 | bnf_grammar=BNF_GRAMMAR,
308 | codon_size=CODON_SIZE,
309 | max_tree_depth=MAX_TREE_DEPTH,
310 | max_genome_length=MAX_GENOME_LENGTH,
311 | points_train=[X_train, Y_train],
312 | points_test=[X_test, Y_test],
313 | codon_consumption=CODON_CONSUMPTION,
314 | report_items=REPORT_ITEMS,
315 | genome_representation=GENOME_REPRESENTATION,
316 | stats=stats, halloffame=hof, verbose=False)
317 |
318 | import textwrap
319 | best = hof.items[0].phenotype
320 | print("Best individual: \n","\n".join(textwrap.wrap(best,80)))
321 | print("\nTraining Fitness: ", hof.items[0].fitness.values[0])
322 |
323 | print("Depth: ", hof.items[0].depth)
324 | print("Length of the genome: ", len(hof.items[0].genome))
325 | print(f'Used portion of the genome: {hof.items[0].used_codons/len(hof.items[0].genome):.2f}')
326 |
327 | max_fitness_values, mean_fitness_values = logbook.select("max", "avg")
328 | min_fitness_values, std_fitness_values = logbook.select("min", "std")
329 | fitness_test = logbook.select("fitness_test")
330 |
331 | best_ind_length = logbook.select("best_ind_length")
332 | avg_length = logbook.select("avg_length")
333 |
334 | selection_time = logbook.select("selection_time")
335 | generation_time = logbook.select("generation_time")
336 | gen, invalid = logbook.select("gen", "invalid")
337 | avg_used_codons = logbook.select("avg_used_codons")
338 | best_ind_used_codons = logbook.select("best_ind_used_codons")
339 |
340 | best_ind_nodes = logbook.select("best_ind_nodes")
341 | avg_nodes = logbook.select("avg_nodes")
342 |
343 | best_ind_depth = logbook.select("best_ind_depth")
344 | avg_depth = logbook.select("avg_depth")
345 |
346 | structural_diversity = logbook.select("structural_diversity")
347 | fitness_diversity = logbook.select("fitness_diversity")
348 |
349 | r = RANDOM_SEED
350 | header = REPORT_ITEMS
351 |
352 | with open(r"./results/" + str(r) + ".csv", "w", encoding='UTF8', newline='') as csvfile:
353 | writer = csv.writer(csvfile, delimiter='\t')
354 | writer.writerow(header)
355 | for value in range(len(max_fitness_values)):
356 | writer.writerow([gen[value], invalid[value], mean_fitness_values[value],
357 | std_fitness_values[value], min_fitness_values[value],
358 | max_fitness_values[value],
359 | fitness_test[value],
360 | best_ind_length[value],
361 | avg_length[value],
362 | best_ind_nodes[value],
363 | avg_nodes[value],
364 | best_ind_depth[value],
365 | avg_depth[value],
366 | avg_used_codons[value],
367 | best_ind_used_codons[value],
368 | structural_diversity[value],
369 | fitness_diversity[value],
370 | selection_time[value],
371 | generation_time[value]])
--------------------------------------------------------------------------------
/example_increment.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | Created on Fri Sep 20 16:09:54 2024
4 |
5 | @author: Allan.DeLima
6 | """
7 |
8 | import grape
9 | import algorithms
10 |
11 | from os import path
12 | import pandas as pd
13 | import numpy as np
14 | from deap import creator, base, tools
15 | import random
16 |
17 | GRAMMAR_FILE = 'simpleIncrement.bnf'
18 | BNF_GRAMMAR = grape.Grammar(r"grammars/" + GRAMMAR_FILE)
19 |
20 | RANDOM_SEED = 42
21 |
22 | toolbox = base.Toolbox()
23 |
24 | # define a single objective, minimising fitness strategy:
25 | creator.create("FitnessMin", base.Fitness, weights=(-1.0,))
26 |
27 | creator.create('Individual', grape.Individual, fitness=creator.FitnessMin)
28 |
29 | toolbox.register("populationCreator", grape.sensible_initialisation, creator.Individual)
30 | #toolbox.register("populationCreator", grape.random_initialisation, creator.Individual)
31 |
32 | POPULATION_SIZE = 10
33 | MAX_INIT_TREE_DEPTH = 6
34 | MIN_INIT_TREE_DEPTH = 4
35 |
36 | CODON_CONSUMPTION = 'lazy'
37 | GENOME_REPRESENTATION = 'list'
38 |
39 | CODON_SIZE = 255
40 |
41 | random.seed(RANDOM_SEED)
42 |
43 | population = toolbox.populationCreator(pop_size=POPULATION_SIZE,
44 | bnf_grammar=BNF_GRAMMAR,
45 | min_init_depth=MIN_INIT_TREE_DEPTH,
46 | max_init_depth=MAX_INIT_TREE_DEPTH,
47 | codon_size=CODON_SIZE,
48 | codon_consumption=CODON_CONSUMPTION,
49 | genome_representation=GENOME_REPRESENTATION
50 | )
51 |
52 | for i in range(POPULATION_SIZE):
53 | print(population[i].phenotype)
54 |
55 | exec(population[i].phenotype)
56 |
57 | print("a = ", a)
58 | print("b = ", b)
59 |
60 | print()
61 | print()
62 |
--------------------------------------------------------------------------------
/example_parity.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | Created on Fri Aug 27 15:21:08 2021
4 |
5 | @author: allan
6 | """
7 |
8 | import grape
9 | import algorithms
10 | from functions import not_, and_, or_, nand_, nor_
11 |
12 | from os import path
13 | import pandas as pd
14 | import numpy as np
15 | from deap import creator, base, tools
16 |
17 | import random
18 |
19 | problem = 'parity4'
20 |
21 | if problem == 'parity3':
22 | X_train = np.zeros([3,8], dtype=bool)
23 | Y_train = np.zeros([8,], dtype=bool)
24 |
25 | data = pd.read_table(r"datasets/parity3.csv")
26 | for i in range(3):
27 | for j in range(8):
28 | X_train[i,j] = data['d'+ str(i)].iloc[j]
29 | for i in range(8):
30 | Y_train[i] = data['output'].iloc[i]
31 |
32 | GRAMMAR_FILE = 'parity3.bnf'
33 |
34 | elif problem == 'parity4':
35 | X_train = np.zeros([4,16], dtype=bool)
36 | Y_train = np.zeros([16,], dtype=bool)
37 |
38 | data = pd.read_table(r"datasets/parity4.csv")
39 | for i in range(4):
40 | for j in range(16):
41 | X_train[i,j] = data['d'+ str(i)].iloc[j]
42 | for i in range(16):
43 | Y_train[i] = data['output'].iloc[i]
44 |
45 | GRAMMAR_FILE = 'parity4.bnf'
46 |
47 | elif problem == 'parity5':
48 | X_train = np.zeros([5,32], dtype=bool)
49 | Y_train = np.zeros([32,], dtype=bool)
50 |
51 | data = pd.read_table(r"datasets/parity5.csv")
52 | for i in range(5):
53 | for j in range(32):
54 | X_train[i,j] = data['d'+ str(i)].iloc[j]
55 | for i in range(32):
56 | Y_train[i] = data['output'].iloc[i]
57 |
58 | GRAMMAR_FILE = 'parity5.bnf'
59 |
60 | BNF_GRAMMAR = grape.Grammar(r"grammars/" + GRAMMAR_FILE)
61 |
62 | def mae(y, yhat):
63 | """
64 | Calculate mean absolute error between inputs.
65 |
66 | :param y: The expected input (i.e. from dataset).
67 | :param yhat: The given input (i.e. from phenotype).
68 | :return: The mean absolute error.
69 | """
70 |
71 | compare = np.equal(y,yhat)
72 |
73 | return 1 - np.mean(compare)
74 |
75 | def fitness_eval(individual, points, penalty_divider=None, penalise_greater_than=None):
76 | x = points[0]
77 | Y = points[1]
78 |
79 | if individual.invalid == True:
80 | return np.NaN,
81 |
82 | # Evaluate the expression
83 | try:
84 | pred = eval(individual.phenotype)
85 | except (FloatingPointError, ZeroDivisionError, OverflowError,
86 | MemoryError):
87 | # FP err can happen through eg overflow (lots of pow/exp calls)
88 | # ZeroDiv can happen when using unprotected operators
89 | return np.NaN,
90 | assert np.isrealobj(pred)
91 |
92 | fitness = mae(Y, pred)
93 | individual.fitness_each_sample = np.equal(Y, pred)
94 |
95 | if penalise_greater_than and penalty_divider:
96 | if len(individual.genome) > penalise_greater_than:
97 | fitness += len(individual.genome) / penalty_divider
98 |
99 | return fitness,
100 |
101 |
102 |
103 | POPULATION_SIZE = 1000
104 | MAX_GENERATIONS = 50
105 | P_CROSSOVER = 0.8
106 | P_MUTATION = 0.01
107 | ELITE_SIZE = 1#round(0.01*POPULATION_SIZE) #it should be smaller or equal to HALLOFFAME_SIZE
108 | HALLOFFAME_SIZE = 1#round(0.01*POPULATION_SIZE) #it should be at least 1
109 |
110 | RANDOM_SEED = 42 #Pay attention that the seed is set up inside the loop of runs, so you are going to have similar runs
111 |
112 | MIN_INIT_GENOME_LENGTH = 30 #used only for random initialisation
113 | MAX_INIT_GENOME_LENGTH = 50
114 | random_initilisation = False #put True if you use random initialisation
115 |
116 | MAX_INIT_TREE_DEPTH = 8 #equivalent to 6 in GP with this grammar
117 | MIN_INIT_TREE_DEPTH = 3
118 | MAX_TREE_DEPTH = 35 #equivalent to 17 in GP with this grammar
119 | MAX_WRAPS = 0
120 | CODON_SIZE = 255
121 |
122 | CODON_CONSUMPTION = 'lazy'
123 | GENOME_REPRESENTATION = 'list'
124 | MAX_GENOME_LENGTH = None
125 |
126 | #Set the next two parameters with integer values, if you want to use the penalty approach
127 | PENALTY_DIVIDER = None
128 | PENALISE_GREATER_THAN = None
129 |
130 | TOURNAMENT_SIZE = 7
131 |
132 | toolbox = base.Toolbox()
133 |
134 | # define a single objective, minimising fitness strategy:
135 | creator.create("FitnessMin", base.Fitness, weights=(-1.0,))
136 |
137 | creator.create('Individual', grape.Individual, fitness=creator.FitnessMin)
138 |
139 | toolbox.register("populationCreator", grape.sensible_initialisation, creator.Individual)
140 | #toolbox.register("populationCreator", grape.random_initialisation, creator.Individual)
141 | #toolbox.register("populationCreator", grape.PI_Grow, creator.Individual)
142 |
143 | toolbox.register("evaluate", fitness_eval, penalty_divider=PENALTY_DIVIDER, penalise_greater_than=PENALISE_GREATER_THAN)
144 | #toolbox.register("evaluate", fitness_eval)
145 |
146 | # Tournament selection:
147 | toolbox.register("select", tools.selTournament, tournsize=TOURNAMENT_SIZE)
148 |
149 | # Single-point crossover:
150 | toolbox.register("mate", grape.crossover_onepoint)
151 |
152 | # Flip-int mutation:
153 | toolbox.register("mutate", grape.mutation_int_flip_per_codon)
154 |
155 | REPORT_ITEMS = ['gen', 'invalid', 'avg', 'std', 'min', 'max',
156 | 'best_ind_length', 'avg_length',
157 | 'best_ind_nodes', 'avg_nodes',
158 | 'best_ind_depth', 'avg_depth',
159 | 'avg_used_codons', 'best_ind_used_codons',
160 | 'behavioural_diversity',
161 | 'structural_diversity', 'fitness_diversity',
162 | 'selection_time', 'generation_time']
163 |
164 | N_RUNS = 3
165 |
166 | for i in range(N_RUNS):
167 | print()
168 | print()
169 | print("Run:", i+1)
170 | print()
171 |
172 | random.seed(RANDOM_SEED) #Comment this line or set a different RANDOM_SEED each run if you want distinct results
173 |
174 | # create initial population (generation 0):
175 | if random_initilisation:
176 | population = toolbox.populationCreator(pop_size=POPULATION_SIZE,
177 | bnf_grammar=BNF_GRAMMAR,
178 | min_init_genome_length=MIN_INIT_GENOME_LENGTH,
179 | max_init_genome_length=MAX_INIT_GENOME_LENGTH,
180 | max_init_depth=MAX_TREE_DEPTH,
181 | codon_size=CODON_SIZE,
182 | codon_consumption=CODON_CONSUMPTION,
183 | genome_representation=GENOME_REPRESENTATION
184 | )
185 | else:
186 | population = toolbox.populationCreator(pop_size=POPULATION_SIZE,
187 | bnf_grammar=BNF_GRAMMAR,
188 | min_init_depth=MIN_INIT_TREE_DEPTH,
189 | max_init_depth=MAX_INIT_TREE_DEPTH,
190 | codon_size=CODON_SIZE,
191 | codon_consumption=CODON_CONSUMPTION,
192 | genome_representation=GENOME_REPRESENTATION
193 | )
194 |
195 | # define the hall-of-fame object:
196 | hof = tools.HallOfFame(HALLOFFAME_SIZE)
197 |
198 | # prepare the statistics object:
199 | stats = tools.Statistics(key=lambda ind: ind.fitness.values)
200 | stats.register("avg", np.nanmean)
201 | stats.register("std", np.nanstd)
202 | stats.register("min", np.nanmin)
203 | stats.register("max", np.nanmax)
204 |
205 | # perform the Grammatical Evolution flow:
206 | population, logbook = algorithms.ge_eaSimpleWithElitism(population, toolbox, cxpb=P_CROSSOVER, mutpb=P_MUTATION,
207 | ngen=MAX_GENERATIONS, elite_size=ELITE_SIZE,
208 | bnf_grammar=BNF_GRAMMAR,
209 | codon_size=CODON_SIZE,
210 | max_tree_depth=MAX_TREE_DEPTH,
211 | max_genome_length=MAX_GENOME_LENGTH,
212 | points_train=[X_train, Y_train],
213 | codon_consumption=CODON_CONSUMPTION,
214 | report_items=REPORT_ITEMS,
215 | genome_representation=GENOME_REPRESENTATION,
216 | stats=stats, halloffame=hof, verbose=False)
217 |
218 | import textwrap
219 | best = hof.items[0].phenotype
220 | print("Best individual: \n","\n".join(textwrap.wrap(best,80)))
221 | print("\nTraining Fitness: ", hof.items[0].fitness.values[0])
222 | print("Depth: ", hof.items[0].depth)
223 | print("Length of the genome: ", len(hof.items[0].genome))
224 | print(f'Used portion of the genome: {hof.items[0].used_codons/len(hof.items[0].genome):.2f}')
225 |
226 | max_fitness_values, mean_fitness_values = logbook.select("max", "avg")
227 | min_fitness_values, std_fitness_values = logbook.select("min", "std")
228 | best_ind_length = logbook.select("best_ind_length")
229 | avg_length = logbook.select("avg_length")
230 |
231 | selection_time = logbook.select("selection_time")
232 | generation_time = logbook.select("generation_time")
233 | gen, invalid = logbook.select("gen", "invalid")
234 | avg_used_codons = logbook.select("avg_used_codons")
235 | best_ind_used_codons = logbook.select("best_ind_used_codons")
236 |
237 | best_ind_nodes = logbook.select("best_ind_nodes")
238 | avg_nodes = logbook.select("avg_nodes")
239 |
240 | best_ind_depth = logbook.select("best_ind_depth")
241 | avg_depth = logbook.select("avg_depth")
242 |
243 | behavioural_diversity = logbook.select("behavioural_diversity")
244 | structural_diversity = logbook.select("structural_diversity")
245 | fitness_diversity = logbook.select("fitness_diversity")
246 |
247 | import csv
248 | r = RANDOM_SEED
249 |
250 | header = REPORT_ITEMS
251 | with open("results/" + str(r) + ".csv", "w", encoding='UTF8', newline='') as csvfile:
252 | writer = csv.writer(csvfile, delimiter='\t')
253 | writer.writerow(header)
254 | for value in range(len(max_fitness_values)):
255 | writer.writerow([gen[value], invalid[value], mean_fitness_values[value],
256 | std_fitness_values[value], min_fitness_values[value],
257 | max_fitness_values[value],
258 | best_ind_length[value],
259 | avg_length[value],
260 | best_ind_nodes[value],
261 | avg_nodes[value],
262 | best_ind_depth[value],
263 | avg_depth[value],
264 | avg_used_codons[value],
265 | best_ind_used_codons[value],
266 | behavioural_diversity[value],
267 | structural_diversity[value],
268 | fitness_diversity[value],
269 | selection_time[value],
270 | generation_time[value]])
--------------------------------------------------------------------------------
/example_regression.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | Created on Fri Aug 27 15:21:08 2021
4 |
5 | @author: allan
6 | """
7 |
8 | import grape
9 | import algorithms
10 | from functions import add, sub, mul, pdiv, plog, exp, psqrt
11 |
12 | import random
13 |
14 | from os import path
15 | import pandas as pd
16 | import numpy as np
17 | from deap import creator, base, tools
18 |
19 | import warnings
20 | warnings.filterwarnings("ignore")
21 |
22 | problem = 'vladislavleva4'
23 |
24 | def setDataSet(problem):
25 | if problem == 'pagie1':
26 | X_train = np.zeros([2,676], dtype=float)
27 | Y_train = np.zeros([676,], dtype=float)
28 |
29 | data_train = pd.read_table(r"datasets/Pagie1_train.txt")
30 | for i in range(2):
31 | for j in range(676):
32 | X_train[i,j] = data_train['x'+ str(i)].iloc[j]
33 | for i in range(676):
34 | Y_train[i] = data_train['response'].iloc[i]
35 |
36 | X_test = np.zeros([2,10000], dtype=float)
37 | Y_test = np.zeros([10000,], dtype=float)
38 |
39 | data_test = pd.read_table(r"datasets/Pagie1_test.txt")
40 | for i in range(2):
41 | for j in range(10000):
42 | X_test[i,j] = data_test['x'+ str(i)].iloc[j]
43 | for i in range(10000):
44 | Y_test[i] = data_test['response'].iloc[i]
45 |
46 | GRAMMAR_FILE = 'Pagie1.bnf'
47 |
48 | elif problem == 'vladislavleva4':
49 | X_train = np.random.uniform(0.05, 6.05, (5, 1024))
50 | Y_train = np.zeros([1024,], dtype=float)
51 | for i in range(1024):
52 | Y_train[i] = 10/(5 + (X_train[0,i] - 3)**2 + (X_train[1,i] - 3)**2 + (X_train[2,i] - 3)**2 + (X_train[3,i] - 3)**2 + (X_train[4,i] - 3)**2)
53 |
54 | X_test = np.random.uniform(-0.25, 6.35, (5, 5000))
55 | Y_test = np.zeros([5000,], dtype=float)
56 | for i in range(5000):
57 | Y_test[i] = 10/(5 + (X_test[0,i] - 3)**2 + (X_test[1,i] - 3)**2 + (X_test[2,i] - 3)**2 + (X_test[3,i] - 3)**2 + (X_test[4,i] - 3)**2)
58 |
59 | GRAMMAR_FILE = 'Vladislavleva4.bnf'
60 |
61 | elif problem == 'Dow':
62 | X_train = np.zeros([57,747], dtype=float)
63 | Y_train = np.zeros([747,], dtype=float)
64 |
65 | data_train = pd.read_table(r"datasets/DowNorm_train.txt")
66 | for i in range(56):
67 | for j in range(747):
68 | X_train[i,j] = data_train['x'+ str(i+1)].iloc[j]
69 | for i in range(747):
70 | Y_train[i] = data_train['y'].iloc[i]
71 |
72 | X_test = np.zeros([57,319], dtype=float)
73 | Y_test = np.zeros([319,], dtype=float)
74 |
75 | data_test = pd.read_table(r"datasets/DowNorm_test.txt")
76 | for i in range(56):
77 | for j in range(319):
78 | X_test[i,j] = data_test['x'+ str(i+1)].iloc[j]
79 | for i in range(319):
80 | Y_test[i] = data_test['y'].iloc[i]
81 |
82 | GRAMMAR_FILE = 'Dow.bnf'
83 |
84 | BNF_GRAMMAR = grape.Grammar(r"grammars/" + GRAMMAR_FILE)
85 |
86 | return X_train, Y_train, X_test, Y_test, BNF_GRAMMAR
87 |
88 | def fitness_eval(individual, points):
89 | #points = [X, Y]
90 | x = points[0]
91 | y = points[1]
92 |
93 | if individual.invalid == True:
94 | return np.NaN,
95 |
96 | try:
97 | pred = eval(individual.phenotype)
98 | except (FloatingPointError, ZeroDivisionError, OverflowError,
99 | MemoryError, ValueError):
100 | return np.NaN,
101 | except Exception as err:
102 | # Other errors should not usually happen (unless we have
103 | # an unprotected operator) so user would prefer to see them.
104 | print("evaluation error", err)
105 | raise
106 | assert np.isrealobj(pred)
107 |
108 | try:
109 | fitness = np.mean(np.square(y - pred))
110 | except (FloatingPointError, ZeroDivisionError, OverflowError,
111 | MemoryError, ValueError):
112 | fitness = np.NaN
113 | except Exception as err:
114 | # Other errors should not usually happen (unless we have
115 | # an unprotected operator) so user would prefer to see them.
116 | print("fitness error", err)
117 | raise
118 |
119 | if fitness == float("inf"):
120 | return np.NaN,
121 |
122 | return fitness,
123 |
124 | toolbox = base.Toolbox()
125 |
126 | # define a single objective, minimising fitness strategy:
127 | creator.create("FitnessMin", base.Fitness, weights=(-1.0,))
128 |
129 | creator.create('Individual', grape.Individual, fitness=creator.FitnessMin)
130 |
131 | toolbox.register("populationCreator", grape.sensible_initialisation, creator.Individual)
132 | #toolbox.register("populationCreator", grape.random_initialisation, creator.Individual)
133 | #toolbox.register("populationCreator", grape.PI_Grow, creator.Individual)
134 |
135 | toolbox.register("evaluate", fitness_eval)
136 |
137 | # Tournament selection:
138 | toolbox.register("select", tools.selTournament, tournsize=7)
139 |
140 | # Single-point crossover:
141 | toolbox.register("mate", grape.crossover_onepoint)
142 |
143 | # Flip-int mutation:
144 | toolbox.register("mutate", grape.mutation_int_flip_per_codon)
145 |
146 | POPULATION_SIZE = 200
147 | MAX_GENERATIONS = 200
148 | P_CROSSOVER = 0.8
149 | P_MUTATION = 0.01
150 | ELITE_SIZE = 0#round(0.01*POPULATION_SIZE) #it should be smaller or equal to HALLOFFAME_SIZE
151 | HALLOFFAME_SIZE = 1#round(0.01*POPULATION_SIZE) #it should be at least 1
152 |
153 | MIN_INIT_GENOME_LENGTH = 30 #used only for random initialisation
154 | MAX_INIT_GENOME_LENGTH = 50
155 | random_initilisation = False #put True if you use random initialisation
156 |
157 | MAX_INIT_TREE_DEPTH = 13 #equivalent to 6 in GP with this grammar
158 | MIN_INIT_TREE_DEPTH = 3
159 | MAX_TREE_DEPTH = 35 #equivalent to 17 in GP with this grammar
160 | MAX_WRAPS = 0
161 | CODON_SIZE = 255
162 |
163 | CODON_CONSUMPTION = 'lazy'
164 | GENOME_REPRESENTATION = 'list'
165 | MAX_GENOME_LENGTH = None
166 |
167 | REPORT_ITEMS = ['gen', 'invalid', 'avg', 'std', 'min', 'max',
168 | 'fitness_test',
169 | 'best_ind_length', 'avg_length',
170 | 'best_ind_nodes', 'avg_nodes',
171 | 'best_ind_depth', 'avg_depth',
172 | 'avg_used_codons', 'best_ind_used_codons',
173 | # 'behavioural_diversity',
174 | 'structural_diversity', #'fitness_diversity',
175 | 'selection_time', 'generation_time']
176 |
177 | N_RUNS = 1
178 |
179 | for i in range(N_RUNS):
180 | print()
181 | print()
182 | print("Run:", i)
183 | print()
184 |
185 | RANDOM_SEED = i
186 |
187 | np.random.seed(RANDOM_SEED)
188 | X_train, Y_train, X_test, Y_test, BNF_GRAMMAR = setDataSet(problem) #We set up this inside the loop for the case in which the data is defined randomly
189 |
190 | random.seed(RANDOM_SEED)
191 |
192 | # create initial population (generation 0):
193 | if random_initilisation:
194 | population = toolbox.populationCreator(pop_size=POPULATION_SIZE,
195 | bnf_grammar=BNF_GRAMMAR,
196 | min_init_genome_length=MIN_INIT_GENOME_LENGTH,
197 | max_init_genome_length=MAX_INIT_GENOME_LENGTH,
198 | max_init_depth=MAX_TREE_DEPTH,
199 | codon_size=CODON_SIZE,
200 | codon_consumption=CODON_CONSUMPTION,
201 | genome_representation=GENOME_REPRESENTATION
202 | )
203 | else:
204 | population = toolbox.populationCreator(pop_size=POPULATION_SIZE,
205 | bnf_grammar=BNF_GRAMMAR,
206 | min_init_depth=MIN_INIT_TREE_DEPTH,
207 | max_init_depth=MAX_INIT_TREE_DEPTH,
208 | codon_size=CODON_SIZE,
209 | codon_consumption=CODON_CONSUMPTION,
210 | genome_representation=GENOME_REPRESENTATION
211 | )
212 |
213 | # define the hall-of-fame object:
214 | hof = tools.HallOfFame(HALLOFFAME_SIZE)
215 |
216 | # prepare the statistics object:
217 | stats = tools.Statistics(key=lambda ind: ind.fitness.values)
218 | stats.register("avg", np.nanmean)
219 | stats.register("std", np.nanstd)
220 | stats.register("min", np.nanmin)
221 | stats.register("max", np.nanmax)
222 |
223 | # perform the Grammatical Evolution flow:
224 | population, logbook = algorithms.ge_eaSimpleWithElitism(population, toolbox, cxpb=P_CROSSOVER, mutpb=P_MUTATION,
225 | ngen=MAX_GENERATIONS, elite_size=ELITE_SIZE,
226 | bnf_grammar=BNF_GRAMMAR,
227 | codon_size=CODON_SIZE,
228 | max_tree_depth=MAX_TREE_DEPTH,
229 | max_genome_length=MAX_GENOME_LENGTH,
230 | points_train=[X_train, Y_train],
231 | points_test=[X_test, Y_test],
232 | codon_consumption=CODON_CONSUMPTION,
233 | report_items=REPORT_ITEMS,
234 | genome_representation=GENOME_REPRESENTATION,
235 | stats=stats, halloffame=hof, verbose=False)
236 |
237 | import textwrap
238 | best = hof.items[0].phenotype
239 | print("Best individual: \n","\n".join(textwrap.wrap(best,80)))
240 | print("\nTraining Fitness: ", hof.items[0].fitness.values[0])
241 | print("Test Fitness: ", fitness_eval(hof.items[0], [X_test,Y_test])[0])
242 | print("Depth: ", hof.items[0].depth)
243 | print("Length of the genome: ", len(hof.items[0].genome))
244 | print(f'Used portion of the genome: {hof.items[0].used_codons/len(hof.items[0].genome):.2f}')
245 |
246 | max_fitness_values, mean_fitness_values = logbook.select("max", "avg")
247 | min_fitness_values, std_fitness_values = logbook.select("min", "std")
248 | best_ind_length = logbook.select("best_ind_length")
249 | avg_length = logbook.select("avg_length")
250 |
251 | selection_time = logbook.select("selection_time")
252 | generation_time = logbook.select("generation_time")
253 | gen, invalid = logbook.select("gen", "invalid")
254 | avg_used_codons = logbook.select("avg_used_codons")
255 | best_ind_used_codons = logbook.select("best_ind_used_codons")
256 |
257 | fitness_test = logbook.select("fitness_test")
258 |
259 | best_ind_nodes = logbook.select("best_ind_nodes")
260 | avg_nodes = logbook.select("avg_nodes")
261 |
262 | best_ind_depth = logbook.select("best_ind_depth")
263 | avg_depth = logbook.select("avg_depth")
264 |
265 | structural_diversity = logbook.select("structural_diversity")
266 |
267 | import csv
268 | r = RANDOM_SEED
269 |
270 | header = REPORT_ITEMS
271 |
272 | with open(r"./results/" + str(r) + ".csv", "w", encoding='UTF8', newline='') as csvfile:
273 | writer = csv.writer(csvfile, delimiter='\t')
274 | writer.writerow(header)
275 | for value in range(len(max_fitness_values)):
276 | writer.writerow([gen[value], invalid[value], mean_fitness_values[value],
277 | std_fitness_values[value], min_fitness_values[value],
278 | max_fitness_values[value],
279 | fitness_test[value],
280 | best_ind_length[value],
281 | avg_length[value],
282 | best_ind_nodes[value],
283 | avg_nodes[value],
284 | best_ind_depth[value],
285 | avg_depth[value],
286 | avg_used_codons[value],
287 | best_ind_used_codons[value],
288 | # behavioural_diversity[value],
289 | structural_diversity[value],
290 | # fitness_diversity[value],
291 | selection_time[value],
292 | generation_time[value]])
--------------------------------------------------------------------------------
/functions.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 | """
4 | Created on Wed Jul 7 10:31:49 2021
5 |
6 | @author: allan
7 | """
8 |
9 | import numpy as np
10 | import math
11 |
12 | def sigmoid(arr):
13 | """
14 | Calculate the sigmoid of a given input x, which could be an array.
15 |
16 | Arguments:
17 | x -- A numeric value.
18 |
19 | Returns:
20 | s -- The sigmoid of x.
21 | """
22 | if np.isscalar(arr):
23 | arr = np.array([arr])
24 | return 1 / (1 + np.exp(-arr))
25 |
26 | def minimum(a, b):
27 | return np.minimum(a, b)
28 |
29 | def maximum(a, b):
30 | return np.maximum(a, b)
31 |
32 | def pdiv(a, b):
33 | try:
34 | with np.errstate(divide='ignore', invalid='ignore'):
35 | return np.where(b == 0, np.ones_like(a), a / b)
36 | except ZeroDivisionError:
37 | # In this case we are trying to divide two constants, one of which is 0
38 | # Return a constant.
39 | return 1.0
40 |
41 | def psin(n):
42 | return np.sin(n)
43 |
44 | def pcos(n):
45 | return np.cos(n)
46 |
47 | def add(a, b):
48 | return np.add(a,b)
49 |
50 | def sub(a, b):
51 | return np.subtract(a,b)
52 |
53 | def mul(a, b):
54 | return np.multiply(a,b)
55 |
56 | def psqrt(a):
57 | return np.sqrt(abs(a))
58 |
59 | def max_(a,b):
60 | return np.maximum(a, b)
61 |
62 | def min_(a,b):
63 | return np.minimum(a, b)
64 |
65 | def plog(a):
66 | return np.log(1.0 + np.abs(a))
67 |
68 | def not_(a):
69 | return np.logical_not(a)
70 |
71 | def and_(a, b):
72 | return np.logical_and(a,b)
73 |
74 | def or_(a, b):
75 | return np.logical_or(a,b)
76 |
77 | def nand_(a, b):
78 | return np.logical_not(np.logical_and(a,b))
79 |
80 | def nor_(a, b):
81 | return np.logical_not(np.logical_or(a,b))
82 |
83 | def greater_than_or_equal(a, b):
84 | return a >= b
85 |
86 | def less_than_or_equal(a, b):
87 | return a <= b
88 |
89 | def if_(i, o0, o1):
90 | """If _ than _ else _"""
91 | return np.where(i, o0, o1)
--------------------------------------------------------------------------------
/grammars/Banknote.bnf:
--------------------------------------------------------------------------------
1 | ::= ( ) | () | (, ) | |
2 | ::= + | * | -
3 | ::= psqrt | plog
4 | ::= pdiv
5 | ::= x[0] | x[1] | x[2] | x[3]
6 | ::= -1.0 | -0.1 | -0.01 | -0.001 | 0.001 | 0.01 | 0.1 | 1.0
--------------------------------------------------------------------------------
/grammars/Dow.bnf:
--------------------------------------------------------------------------------
1 | ::= +|-|*|pdiv(,)|psqrt()|np.sin()|np.tanh()|exp()|plog()|+|-|*|pdiv(,)|psqrt()|np.sin()|np.tanh()|exp()|plog()|+|-|*|pdiv(,)|psqrt()|np.sin()|np.tanh()|exp()|plog()|+|-|*|pdiv(,)|psqrt()|np.sin()|np.tanh()|exp()|plog()|+|-|*|pdiv(,)|psqrt()|np.sin()|np.tanh()|exp()|plog()|+|-|*|pdiv(,)|psqrt()|np.sin()|np.tanh()|exp()|plog()|+|-|*|pdiv(,)|psqrt()|np.sin()|np.tanh()|exp()|plog()|+|-|*|pdiv(,)|psqrt()|np.sin()|np.tanh()|exp()|plog()|+|-|*|pdiv(,)|psqrt()|np.sin()|np.tanh()|exp()|plog()|+|-|*|pdiv(,)|psqrt()|np.sin()|np.tanh()|exp()|plog()|+|-|*|pdiv(,)|psqrt()|np.sin()|np.tanh()|exp()|plog()|+|-|*|pdiv(,)|psqrt()|np.sin()|np.tanh()|exp()|plog()|+|-|*|pdiv(,)|psqrt()|np.sin()|np.tanh()|exp()|plog()|x[0]|x[1]|x[2]|x[3]|x[4]|x[5]|x[6]|x[7]|x[8]|x[9]|x[10]|x[11]|x[12]|x[13]|x[14]|x[15]|x[16]|x[17]|x[18]|x[19]|x[20]|x[21]|x[22]|x[23]|x[24]|x[25]|x[26]|x[27]|x[28]|x[29]|x[30]|x[31]|x[32]|x[33]|x[34]|x[35]|x[36]|x[37]|x[38]|x[39]|x[40]|x[41]|x[42]|x[43]|x[44]|x[45]|x[46]|x[47]|x[48]|x[49]|x[50]|x[51]|x[52]|x[53]|x[54]|x[55]|x[56]|.|.|.|.|.|.|.|.|.|.|.|.|.|.|.|.|.|