├── Part Three
├── PCA.m
├── main.m
├── example.m
├── output.m
├── Untitled2.m
├── labeltest.m
├── linearSVM.m
├── biaccuracy.m
├── cheby2filter.m
├── output_final.m
├── test_output.m
├── totaloutput.m
├── usebagging.m
├── test_validation.m
├── Testdatapreprocessing.m
├── fisheriris.m
├── average_output.m
├── topoplotEEG.m
└── trainClassifier.m
├── images
├── Model.png
└── Model_Identification.png
├── common
├── __pycache__
│ ├── utils.cpython-36.pyc
│ ├── classifier.cpython-36.pyc
│ ├── datawrapper.cpython-36.pyc
│ ├── optimizer.cpython-36.pyc
│ └── regressor.cpython-36.pyc
├── regressor.py
├── ButterWorth_filter.py
├── standard.py
├── optimizer.py
├── CSP(Spatial Filter).py
├── utils.py
├── datawrapper.py
└── classifier.py
├── .idea
├── modules.xml
├── misc.xml
├── 代码.iml
└── workspace.xml
├── README.md
├── p300_CNN_1_GRU_A.py
└── EEG_Preprocessor.py
/Part Three/PCA.m:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/chenzhuoshuai/EEG-P300Speller-Toolkit/HEAD/Part Three/PCA.m
--------------------------------------------------------------------------------
/Part Three/main.m:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/chenzhuoshuai/EEG-P300Speller-Toolkit/HEAD/Part Three/main.m
--------------------------------------------------------------------------------
/images/Model.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/chenzhuoshuai/EEG-P300Speller-Toolkit/HEAD/images/Model.png
--------------------------------------------------------------------------------
/Part Three/example.m:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/chenzhuoshuai/EEG-P300Speller-Toolkit/HEAD/Part Three/example.m
--------------------------------------------------------------------------------
/Part Three/output.m:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/chenzhuoshuai/EEG-P300Speller-Toolkit/HEAD/Part Three/output.m
--------------------------------------------------------------------------------
/Part Three/Untitled2.m:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/chenzhuoshuai/EEG-P300Speller-Toolkit/HEAD/Part Three/Untitled2.m
--------------------------------------------------------------------------------
/Part Three/labeltest.m:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/chenzhuoshuai/EEG-P300Speller-Toolkit/HEAD/Part Three/labeltest.m
--------------------------------------------------------------------------------
/Part Three/linearSVM.m:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/chenzhuoshuai/EEG-P300Speller-Toolkit/HEAD/Part Three/linearSVM.m
--------------------------------------------------------------------------------
/Part Three/biaccuracy.m:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/chenzhuoshuai/EEG-P300Speller-Toolkit/HEAD/Part Three/biaccuracy.m
--------------------------------------------------------------------------------
/Part Three/cheby2filter.m:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/chenzhuoshuai/EEG-P300Speller-Toolkit/HEAD/Part Three/cheby2filter.m
--------------------------------------------------------------------------------
/Part Three/output_final.m:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/chenzhuoshuai/EEG-P300Speller-Toolkit/HEAD/Part Three/output_final.m
--------------------------------------------------------------------------------
/Part Three/test_output.m:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/chenzhuoshuai/EEG-P300Speller-Toolkit/HEAD/Part Three/test_output.m
--------------------------------------------------------------------------------
/Part Three/totaloutput.m:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/chenzhuoshuai/EEG-P300Speller-Toolkit/HEAD/Part Three/totaloutput.m
--------------------------------------------------------------------------------
/Part Three/usebagging.m:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/chenzhuoshuai/EEG-P300Speller-Toolkit/HEAD/Part Three/usebagging.m
--------------------------------------------------------------------------------
/Part Three/test_validation.m:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/chenzhuoshuai/EEG-P300Speller-Toolkit/HEAD/Part Three/test_validation.m
--------------------------------------------------------------------------------
/images/Model_Identification.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/chenzhuoshuai/EEG-P300Speller-Toolkit/HEAD/images/Model_Identification.png
--------------------------------------------------------------------------------
/Part Three/Testdatapreprocessing.m:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/chenzhuoshuai/EEG-P300Speller-Toolkit/HEAD/Part Three/Testdatapreprocessing.m
--------------------------------------------------------------------------------
/common/__pycache__/utils.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/chenzhuoshuai/EEG-P300Speller-Toolkit/HEAD/common/__pycache__/utils.cpython-36.pyc
--------------------------------------------------------------------------------
/common/__pycache__/classifier.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/chenzhuoshuai/EEG-P300Speller-Toolkit/HEAD/common/__pycache__/classifier.cpython-36.pyc
--------------------------------------------------------------------------------
/common/__pycache__/datawrapper.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/chenzhuoshuai/EEG-P300Speller-Toolkit/HEAD/common/__pycache__/datawrapper.cpython-36.pyc
--------------------------------------------------------------------------------
/common/__pycache__/optimizer.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/chenzhuoshuai/EEG-P300Speller-Toolkit/HEAD/common/__pycache__/optimizer.cpython-36.pyc
--------------------------------------------------------------------------------
/common/__pycache__/regressor.cpython-36.pyc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/chenzhuoshuai/EEG-P300Speller-Toolkit/HEAD/common/__pycache__/regressor.cpython-36.pyc
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/.idea/misc.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/Part Three/fisheriris.m:
--------------------------------------------------------------------------------
1 | load fisheriris
2 | data=meas;
3 | groups=ismember(species,'setosa');
4 | [train,test]=crossvalind('holdOut',groups);
5 | model1=svmtrain(data(train,:),groups(train,:));
6 | model2=svmtrain(data(test,:),groups(test,:));
7 | classes1=svmclassify(model1,data);
8 | classes2=svmclassify(model2,data);
9 |
10 | classes=(classes1+classes2)/2;
11 |
--------------------------------------------------------------------------------
/.idea/代码.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
--------------------------------------------------------------------------------
/Part Three/average_output.m:
--------------------------------------------------------------------------------
1 | output_averaged=ones(size(labels_validation_total,1),1);
2 | for i=1:size(labels_validation_total,1)
3 | output_averaged(i)=mean(labels_validation_total(i,:));
4 | end
5 | output_avg_final=ones(size(labels_validation_total,1),1);
6 | output_final=-ones(size(output_averaged));
7 | for i=1:(size(labels_validation_total,1)/12)
8 | a=output_averaged((i-1)*12+1:12*i,:);
9 | [~,I]=sort(a);
10 | output_final((i-1)*12+I(11))=1;
11 | output_final((i-1)*12+I(12))=1;
12 | end
13 |
14 |
--------------------------------------------------------------------------------
/common/regressor.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | import numpy as np
4 |
5 |
6 | ## Ridge regression
7 | # X: N by P feature matrix, N number of samples, P number of features
8 | # y: N by 1 target vector
9 | # b: P by 1 regression coefficients
10 | # b0: the intercept
11 |
12 | # 岭回归(英文名:ridge regression)
13 | # 是一种专用于共线性数据分析的有偏估计回归方法,
14 | # 实质上是一种改良的最小二乘估计法,
15 | # 通过放弃最小二乘法的无偏性,以损失部分信息、降低精度为代价
16 | # 获得回归系数更为符合实际、更可靠的结果,
17 | # 对病态数据的拟合要强于最小二乘法。
18 | def ridgereg(y, X, coeff = 1e-4):
19 | N, P = X.shape
20 | PHI = np.concatenate((np.ones([N,1]), X), axis=1)
21 | invC = np.linalg.inv(coeff*np.eye(P+1)+ np.matmul(PHI.T, PHI))
22 | w = np.matmul(np.matmul(invC, PHI.T), y)
23 | b = w[1:]
24 | b0 = w[0]
25 | return b, b0
--------------------------------------------------------------------------------
/common/ButterWorth_filter.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | import numpy as np
4 | from scipy.signal import butter, lfilter, filtfilt, decimate
5 |
6 |
7 | def show_filter(b, a, fs=None):
8 | from scipy.signal import freqz
9 | import matplotlib.pyplot as plt
10 | w, h = freqz(b, a)
11 | if fs == None:
12 | plt.semilogx(w, 20 * np.log10(abs(h)))
13 | else:
14 | plt.plot((fs * 0.5 / np.pi) * w, 20 * np.log10(abs(h)))
15 | plt.title('Butterworth filter frequency response')
16 | plt.xlabel('Frequency (Hz)')
17 | plt.ylabel('Amplitude [dB]')
18 | plt.grid(True)
19 | plt.show()
20 |
21 |
22 | def show_filtering_result(b, a, x):
23 | import matplotlib.pyplot as plt
24 | x1 = lfilter(b, a, x)
25 | x2 = filtfilt(b, a, x)
26 | plt.plot(x, 'k-', label='input')
27 | plt.plot(x1, 'b-', label='lfilter')
28 | plt.plot(x2, 'c-', label='filtfilt')
29 | plt.legend(loc='best')
30 | plt.show()
31 |
--------------------------------------------------------------------------------
/common/standard.py:
--------------------------------------------------------------------------------
1 | import numpy as np
2 | '''
3 | 1:正类
4 | 0:负类
5 | TP:预测为1,标签为1
6 | TN:预测为0,标签为0
7 | FP:预测为1,标签为0
8 | FN:预测为0,标签为1
9 | '''
10 |
11 |
12 | def zhi_biao_result(predict, real):
13 | tp = sum([1 for i in range(len(real)) if predict[i] == 1 and real[i] == 1])
14 | tn = sum([1 for i in range(len(real)) if predict[i] == 0 and real[i] == 0])
15 | fp = sum([1 for i in range(len(real)) if predict[i] == 1 and real[i] == 0])
16 | fn = sum([1 for i in range(len(real)) if predict[i] == 0 and real[i] == 1])
17 | # 准确率
18 | acc = (tp + tn)/(tp + tn + fp + fn)
19 | # 查全率
20 | recall = tp/(tp + fn)
21 | silence = 1 - recall
22 | # 查准率
23 | precision = tp/(tp + fp)
24 | noise = 1 - precision
25 | # 错误率
26 | error = 1 - acc
27 | # F1度量
28 | f1_measure = 2*recall*precision/(precision + recall)
29 | return {"TP": tp,
30 | "TN": tn,
31 | "FP": fp,
32 | "FN": fn,
33 | "Acc": acc,
34 | "Recall": recall,
35 | "Silence": silence,
36 | "Precision": precision,
37 | "Noise": noise,
38 | "Error": error,
39 | "F1-measure": f1_measure}
40 |
41 |
42 | def zhi_biao(predict, real):
43 | if predict.size == real.size:
44 | return zhi_biao_result(predict, real)
45 | else:
46 | return "预测与标签数量不一致"
47 |
48 |
49 | if __name__ == "__main__":
50 | y_predict = np.array((1, 1, 1, 1, 0, 0))
51 | y_real = np.array((1, 1, 1, 0, 0, 0))
52 | print(zhi_biao(y_predict, y_real))
53 |
54 |
--------------------------------------------------------------------------------
/common/optimizer.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | import numpy as np
4 | from scipy.optimize import fmin_l_bfgs_b
5 | from abc import ABCMeta, abstractmethod
6 |
7 | class GradientDescentOptimizer(object):
8 | __metaclass__ = ABCMeta
9 |
10 | @abstractmethod
11 | def minimize(self, fojb, x0, args):
12 | raise NotImplementedError()
13 |
14 | class AdamOptimizer(GradientDescentOptimizer):
15 | def __init__(self, maxit=500, stopeps=1e-6):
16 | self.maxit = maxit
17 | self.stopeps = stopeps
18 |
19 | def minimize(self, fobj, x0, args):
20 | alpha = 0.01
21 | beta_1 = 0.9
22 | beta_2 = 0.999 # initialize the values of the parameters
23 | epsilon = 1e-8
24 |
25 | it = 0
26 | m_t = 0
27 | v_t = 0
28 | theta_0 = x0
29 | d_theta = np.Inf
30 | while (d_theta > self.stopeps) and (it < self.maxit): # till it gets converged
31 | it = it + 1
32 | theta_prev = theta_0
33 | f_t, g_t = fobj(theta_0, *args)
34 | m_t = beta_1 * m_t + (1 - beta_1) * g_t # updates the moving averages of the gradient
35 | v_t = beta_2 * v_t + (1 - beta_2) * (g_t * g_t) # updates the moving averages of the squared gradient
36 | m_cap = m_t / (1 - (beta_1 ** it)) # calculates the bias-corrected estimates
37 | v_cap = v_t / (1 - (beta_2 ** it)) # calculates the bias-corrected estimates
38 | theta_0 = theta_0 - (alpha * m_cap) / (np.sqrt(v_cap) + epsilon) # updates the parameters
39 | d_theta = np.linalg.norm(theta_0-theta_prev)
40 | print('Iteration %d: FuncValue = %f, d_theta = %f' % (it, f_t, d_theta))
41 |
42 | return theta_0
43 |
44 |
45 | class LbfgsOptimizer(GradientDescentOptimizer):
46 | def __init__(self, maxit=500, stopeps=1e-5):
47 | self.maxit = maxit
48 | self.stopeps = stopeps
49 |
50 | def minimize(self, fobj, x0, args):
51 | theta, obj, info = fmin_l_bfgs_b(fobj, x0, args=args, maxiter=self.maxit, epsilon=self.stopeps, disp=1)
52 | return theta
--------------------------------------------------------------------------------
/common/CSP(Spatial Filter).py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | import numpy as np
4 | import scipy as sp
5 |
6 |
7 | """ Compute Common Spatial Patterns
8 | W: the transform matrix of CSP whose columns are the weight vector
9 | R1: the covariance matrix of one class n-by-n (number of channels)
10 | R1 = X*X'/trace(X*X'); normalized covariance matrix
11 | X is N-by-T matrix where N is the number of channels and T is the
12 | number of samples in time.
13 | R2: the covariance matrix of the other class with the same dimension
14 | """
15 | def CSP1(R1, R2):
16 | R = R1 + R2
17 | V1, U1 = np.linalg.eig(R)
18 | P = np.dot(np.diag(V1**(-1/2)), U1.T)
19 | S1 = np.dot(np.dot(P, R1), P.T)
20 | # S2 = np.dot(P, np.dot(R2, P.T))
21 | V2, U2 = np.linalg.eig(S1)
22 | W = np.dot(P.T, U2)
23 | ind = np.argsort(V2)
24 | W = W[:, ind]
25 | return W
26 |
27 |
28 | def CSP2(R1, R2):
29 | V, U = sp.linalg.eig(R1-R2, R1+R2)
30 | ind = np.argsort(V)
31 | W = U[:, ind]
32 | return W
33 |
34 |
35 | """ Train CSP filters
36 | Rs: variance of the multichannel signal
37 | label: class label -1, 1
38 | dim: number of eigenvectors used
39 | """
40 | def trainCSP2(Rs, labels, dim):
41 | num_trials = Rs.shape[0]
42 | num_channels = Rs.shape[1]
43 |
44 | # sort to make sure that -1 corresponds to left
45 | cc = np.unique(labels)
46 | # cc = np.sort(np.unique(labels))
47 | count_c1 = 0
48 | count_c2 = 0
49 | cov_c1 = np.zeros([num_channels, num_channels])
50 | cov_c2 = np.zeros([num_channels, num_channels])
51 | for i in range(num_trials):
52 | c = labels[i]
53 | if c == cc[0]:
54 | cov_c1 += Rs[i]
55 | count_c1 += 1
56 | elif c == cc[1]:
57 | cov_c2 += Rs[i]
58 | count_c2 += 1
59 |
60 | cov_c1 = cov_c1/count_c1
61 | cov_c2 = cov_c2/count_c2
62 |
63 | W_full = CSP1(cov_c1,cov_c2)
64 |
65 | if dim < 0:
66 | W = W_full
67 | else:
68 | W = np.concatenate((W_full[:, 0:int(dim/2)], W_full[:, num_channels-int(dim/2):]), axis=1)
69 |
70 | return W
71 |
--------------------------------------------------------------------------------
/common/utils.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | import numpy as np
4 |
5 |
6 | ## sigmoid activation function
7 | def sigmoid(x):
8 | return 1. / (1. + np.exp(-x))
9 |
10 |
11 | ## softmax activation function
12 | def softmax(X):
13 | expvx = np.exp(X - np.max(X, axis=1)[..., np.newaxis])
14 | return expvx / np.sum(expvx, axis=1, keepdims=True)
15 |
16 |
17 | ## Compute numerical gradient for cost function J with respect to theta
18 | # theta: a vector of parameters
19 | # J: a function that outputs a real-number. Calling y = J(theta) will return the
20 | # function value at theta.
21 | def numgrad(J, theta, *args):
22 | epsilon = 1e-4
23 | # Initialize numgrad with zeros
24 | grad = np.zeros(theta.shape)
25 | for i in range(len(theta)):
26 | e = np.zeros(theta.shape)
27 | e[i] = 1
28 | costp, gradp = J(theta + e * epsilon, *args)
29 | costm, gradm = J(theta - e * epsilon, *args)
30 | grad[i] = (costp-costm)/(2*epsilon)
31 | return grad
32 |
33 |
34 | ## Generate a full matrix given the sparse representation
35 | def sparse(ind1, ind2, values):
36 | m = np.max(ind1) + 1
37 | n = np.max(ind2) + 1
38 | A = np.zeros([m,n])
39 | for i in range(len(values)):
40 | A[ind1[i],ind2[i]] = values[i]
41 | return A
42 |
43 |
44 | ## Generate one-hot coded labels
45 | def onehot(labels, num_classes):
46 | num_labels = labels.shape[0]
47 | index_offset = np.arange(num_labels) * num_classes
48 | labels_onehot = np.zeros((num_labels, num_classes))
49 | labels_onehot.flat[index_offset + labels.ravel()] = 1
50 | return labels_onehot
51 |
52 |
53 | ## Calculate the Woodbury identity
54 | # (A + BD^{-1}C)^{-1} = A^{-1} - A^{-1}B(D+CA^{-1}B)^{-1}CA^{-1}
55 | # which is useful when A is large and diagonal, and hence easy to invert,
56 | # while B has many rows but few columns (and conversely for C) so that
57 | # the right-hand side is much cheaper to evaluate than the left-hand side.
58 | # We consider this condition only.
59 | def woodburyinv(A,B,C,D):
60 | invA = np.diag(1/np.diag(A))
61 | # WD = invA - invA*B*(D+C*invA*B)^(-1)*C*invA;
62 | aa = np.matmul(invA,B)
63 | bb = np.linalg.inv(D+np.matmul(C,aa))
64 | cc = np.matmul(C,invA)
65 | WD = invA - np.matmul(np.matmul(aa,bb),cc)
66 | return WD
67 |
68 |
69 | if __name__ == "__main__":
70 | a = np.random.rand(5)
71 | A = sparse(range(5), range(5), a)
72 | X = np.random.rand(5,10)
73 | D = np.eye(10)
74 |
75 | Z1 = woodburyinv(A, X, X.T, D)
76 | Z2 = np.linalg.inv(A+np.matmul(np.matmul(X,np.linalg.inv(D)),X.T))
77 |
78 | pass
--------------------------------------------------------------------------------
/common/datawrapper.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # import h5py
4 | import numpy as np
5 | import scipy.io as sio
6 |
7 |
8 | def read_matdata(filepath, keys):
9 | data = {}
10 | f = sio.loadmat(filepath)
11 | # f = h5py.File(filepath, 'r')
12 | for key in keys:
13 | data[key] = f[key]
14 | return data
15 |
16 |
17 | class Dataset(object):
18 | def __init__(self, inputs):
19 | self._data = inputs
20 | self._num_examples = self._data.shape[0]
21 | self._indices = np.arange(self._num_examples)
22 | self._epochs_completed = 0
23 | self._index_in_epoch = 0
24 |
25 | @property
26 | def data(self):
27 | return self._data
28 |
29 | @property
30 | def num_examples(self):
31 | return self._num_examples
32 |
33 | @property
34 | def epochs_completed(self):
35 | return self._epochs_completed
36 |
37 | def shuffle(self):
38 | perm = np.arange(self._num_examples)
39 | np.random.shuffle(perm)
40 | self._indices = perm
41 |
42 | def get_portiondata(self, indices):
43 | return self._data[indices]
44 |
45 | def get_subset(self, ratio, shuffle=True):
46 | ratio = ratio / np.sum(ratio)
47 | num_total = self.num_examples
48 | num_each = (num_total * ratio).astype(int)
49 | ends = np.cumsum(num_each)
50 | ends[-1] = num_total
51 | starts = np.copy(ends)
52 | starts[1:] = starts[0:-1]
53 | starts[0] = 0
54 | if shuffle: self.shuffle()
55 | return [Dataset(self.get_portiondata(self._indices[start:end])) for (start, end) in (starts, ends)]
56 |
57 | def next_batch(self, batch_size, shuffle=True):
58 | '''Return the next `batch_size` examples from this data set.'''
59 | start = self._index_in_epoch
60 | # Shuffle for the first epoch
61 | if self._epochs_completed == 0 and start == 0 and shuffle:
62 | self.shuffle()
63 | # Go to the next epoch
64 | if start + batch_size > self._num_examples:
65 | # Finished epoch
66 | self._epochs_completed += 1
67 | # Get the rest examples in this epoch
68 | rest_num_examples = self._num_examples - start
69 | indices_rest_part = self._indices[start:self._num_examples]
70 | # Shuffle the data
71 | if shuffle: self.shuffle()
72 | # Start next epoch
73 | start = 0
74 | self._index_in_epoch = batch_size - rest_num_examples
75 | end = self._index_in_epoch
76 | indices_new_part = self._indices[start:end]
77 | batch = self.get_portiondata(np.concatenate((indices_rest_part, indices_new_part), axis=0))
78 | else:
79 | self._index_in_epoch += batch_size
80 | end = self._index_in_epoch
81 | batch = self.get_portiondata(self._indices[start:end])
82 | return batch
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # EEG-P300Speller_Model-util
2 | This repository contains 3 part. Including PYTHON codes and MATLAB codes.
3 |
4 | # Part One: EEG Signal Classificatrion with Deep Learning Model (Stacked CNN and RNN)-Keras
5 | P300_CNN_1_GRU_A.py: A Python File in which is the model of a combination of CNN and GRU to determine if the EEG signal sequence contains P300 wave.
6 |
7 | The brief of the model-1 (Stacked CNN & RNN):
8 | 1, A CNN is responsbile for spatial domain feature extraction.
9 | 2, A GRU is repsonsible for temporal domain feature extraction.
10 | 3, Dropout is introduced to prevent overfitting and to improve the accuracy.
11 |
12 | 
13 |
14 | The brief of model-2 (Stacked CNN)-not contained in repository:
15 | 1, A CNN is responsbile for spatial domain feature extraction.
16 | 2, A CNN is repsonsible for temporal domain feature extraction.
17 | 3, Dropout is introduced to prevent overfitting and to improve the accuracy.
18 |
19 |
20 | 
21 |
22 |
23 | # Part Two: A Toolkit package for EEG Signals preprocessing.
24 | List and brief description:
25 | 1, EEG_Preprocessor.py: A python file where the code for a series of EEG Signals preprocessing procedure is included.
26 | Including: load_data, extract_eegdata, extract_feature, etc.
27 |
28 | Operations before classification:
29 |
30 | 1) band-pass butter-6 filter to remove the noises irrelevant to classification;
31 | 2) using sliding averaging window to downsample signal data;
32 |
33 | (the file below please see in the common folder)
34 | 2, classifier.py: A python file where the code for several simple classifiers for P300 classification is included.
35 | Including:
36 | 1) Fisher's Linear Discirminant Analysis;
37 | 2) Logistic Regression (Iterative reweighted least square (IRLS) by Newton-Raphson);
38 | 3) Logistic regression for binary classification using SGD algorithm;
39 | 4) Softmax Regression using SGD;
40 |
41 | 3, optimizer.py: A python file where the code for some classic optimizers is included.
42 | Including:
43 | 1) AdamOptimizer;
44 | 2) LbfgsOptimizaer;
45 |
46 | 4, regressor.py: A python file where the code for Ridge Regression is included.
47 |
48 | 5, standard.py: A python file where the code for some criterions for ML model evaluation is included.
49 |
50 | 6, CSP(Spatial_Filter).py: A python file where the code for Common Spatial Pattern, a powerful tool to realize signal data feature extraction, is included.
51 | For more about the CSP, please consult:
52 | https://blog.csdn.net/missxy_/article/details/81264953
53 |
54 | 7, ButterWorth(temporalfilter).py: A python file where the code for Common Spatial Pattern, a powerful tool to remove noises, is included.
55 |
56 | 8, utils.py: includes several machine learning gears.
57 |
58 | # Part Three: A Bagging Ensemble SVM to classify EEG Signals
59 | (the file below please see in the Part Three folder)
60 | It is all written in Matlab.
61 | Consult the main.m file for the code logics.
62 |
63 |
64 |
65 |
--------------------------------------------------------------------------------
/.idea/workspace.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 | 1565610754467
64 |
65 |
66 | 1565610754467
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
--------------------------------------------------------------------------------
/p300_CNN_1_GRU_A.py:
--------------------------------------------------------------------------------
1 | # -*- coding:utf-8 -*-
2 |
3 | import numpy as np
4 | from EEG_Preprocessor import *
5 | from common.regressor import *
6 | import random
7 | import tensorflow as tf
8 |
9 | # 6 × 6 字符矩阵
10 | matrix='ABCDEF'+'GHIJKL'+'MNOPQR'+'STUVWX'+'YZ1234'+'56789_'
11 |
12 | # 需要修改路径
13 | datapath = 'E:/bcicompetition/bci2005/II/'
14 |
15 | subject = 'Subject_A'
16 | featureTrain, labelTrain, targetTrain, featureTest, labelTest, targetTest = load_dataset(datapath, subject)
17 | # featureTrain.shape:(85, 12, 15, 1536)
18 | # labelTrain.shape:(85, 12, 15)
19 | # featureTest.shape:(100, 12, 15, 1536)
20 | # labelTest.shape:(100, 12, 15)
21 |
22 | num_train, num_chars, num_repeats, num_features = featureTrain.shape
23 | # num_train: 85 (Subject_B注视的85个字符)
24 | # num_chars: 12 (6行+6列=12种闪烁)
25 | # num_repeats: 15 (12次闪烁,重复15次,对应一个字符的完整输入)
26 | # num_features: 1536 (64个通道与24个时间步值)
27 |
28 | num_test = featureTest.shape[0]
29 | # featureTest.shape:(100, 12, 15, 1536) # 1536=24* 64 拉成一维向量
30 | X_train = np.reshape(featureTrain, [-1, 24, 64]) # 拉成二维
31 | # X_train:(85*12*15=15300, 1536)
32 | y_train = np.reshape(labelTrain, [-1])
33 | # y_train.shape:(15300,)
34 | X_test = np.reshape(featureTest, [-1, 24, 64])
35 | # X_test.shape :(100*12*15=18000, 1536)
36 | y_test = np.reshape(labelTest, [-1])
37 |
38 |
39 | from keras.models import Sequential
40 | from keras.layers import Dense, Dropout, BatchNormalization, GRU
41 | from keras.layers import Conv1D, GlobalAveragePooling1D, MaxPooling1D,Conv2D,Flatten
42 | from keras.layers.core import Reshape
43 |
44 | # 每个信道降采样滤波后的信号长度
45 | seq_length = 24
46 |
47 | model = Sequential()
48 | #转为2D图像的形式
49 | model.add(Reshape((seq_length, 64, 1), input_shape=(seq_length, 64)))
50 |
51 | # 输入形状:3D张量与形状: (batch, steps, channels)
52 | model.add(Conv2D(filters=10,
53 | kernel_size=(1, 64),
54 | padding='valid',
55 | activation='sigmoid'))
56 |
57 | #输入已经变成了 24*1*10
58 | model.add(Reshape((seq_length, 10)))
59 |
60 | #为什么 GRU的时间维度上的输入 是对应着十个卷积核?
61 | model.add(GRU(24))
62 |
63 | model.add(Dropout(0.5))
64 |
65 | # output layer
66 | model.add(Dense(1, activation='sigmoid'))
67 |
68 |
69 | model.compile(loss='binary_crossentropy',
70 | optimizer='rmsprop',
71 | metrics=['accuracy'])
72 |
73 | history = model.fit(X_train, y_train,epochs=10,batch_size=128,validation_data=(X_test, y_test))
74 |
75 | score = model.evaluate(X_test, y_test, batch_size=128)
76 | print(score)
77 | y_predict = model.predict(X_test)
78 |
79 | targetPredict = np.zeros([num_test, num_repeats], dtype=np.str)
80 | # targetPredict.shape:(100, 15)
81 |
82 | for trial in range(num_test): #range(0, 100)
83 | ytrial = y_predict[trial*num_chars*num_repeats:(trial+1)*num_chars*num_repeats]
84 | # 如果trial=0, ytrial = y_predict[0:12*15=180]
85 | ytrial = np.reshape(ytrial, [num_chars, num_repeats])
86 | # 改变形状:ytrial.shape :(12, 15)
87 | for repeat in range(num_repeats): # range(0, 15)
88 | yavg = np.mean(ytrial[:,0:repeat+1], axis=1)
89 | # 取repeat次重复后的ytrial在0:repeat+1轴的平均值
90 | row = np.argmax(yavg[6:])
91 | col = np.argmax(yavg[0:6])
92 | # 取行中最大的概率和列中最大的概率
93 | targetPredict[trial, repeat] = matrix[int(row*6+col)]
94 | # 预测字符
95 |
96 | accTest = np.zeros(num_repeats)
97 | # accTest.shape:(15,)
98 | for i in range(num_repeats): # range(0, 15)
99 | accTest[i] = np.mean(np.array(targetPredict[:,i] == targetTest).astype(int))
100 | # 计算1,2,...,15次重复后的准确率的平均值
101 |
102 | # 计算指标
103 | from common.standard import *
104 | y = y_predict.copy()
105 | for i in range(len(y_predict)):
106 | if y[i] >= 0.5:
107 | y[i] = 1
108 | else:
109 | y[i] = 0
110 | print(zhi_biao(y, y_test.T))
111 |
112 | # 画图
113 | import matplotlib.pyplot as plt
114 | plt.plot(np.arange(num_repeats)+1, accTest*100, 'k-')
115 | plt.title('Character Recognition Rate for ' + subject)
116 | plt.xlabel('Repeat [n]')
117 | plt.ylabel('Accuracy [%]')
118 | plt.grid(which='both', axis='both')
119 | plt.xlim(0, 16)
120 | plt.ylim(0, 100)
121 | plt.show()
122 |
123 | # 绘制训练 & 验证的准确率值
124 | plt.plot(history.history['acc'])
125 | plt.plot(history.history['val_acc'])
126 | plt.title('Model accuracy')
127 | plt.ylabel('Accuracy')
128 | plt.xlabel('Epoch')
129 | plt.legend(['Train', 'Test'], loc='upper left')
130 | plt.show()
131 |
132 |
133 | # 绘制训练 & 验证的损失值
134 | # plt.plot(history.history['loss'])
135 | # plt.plot(history.history['val_loss'])
136 | # plt.title('Model loss')
137 | # plt.ylabel('Loss')
138 | # plt.xlabel('Epoch')
139 | # plt.legend(['Train', 'Test'], loc='upper left')
140 | # plt.show()
141 |
--------------------------------------------------------------------------------
/common/classifier.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | import numpy as np
4 | from .utils import *
5 | from .optimizer import *
6 |
7 | ## Fisher's Linear Discriminant Analysis
8 | # y: N by 1 labels
9 | # X: N by P matrix, N observation of P dimensional feature vectors
10 | # wd: weight decay coefficient
11 | def FLDA(y, X, wd = 1e-4):
12 | N, P = X.shape
13 |
14 | index1 = np.argwhere(y==1)
15 | index2 = np.argwhere(y==-1)
16 | N1 = len(index1)
17 | N2 = len(index2)
18 |
19 | X1 = X[index1,:]
20 | X2 = X[index2,:]
21 |
22 | mu1 = np.squeeze(np.mean(X1, axis=0))
23 | mu2 = np.squeeze(np.mean(X2, axis=0))
24 |
25 | Sw = np.cov(np.transpose(X))
26 |
27 | b = np.dot(np.linalg.inv(Sw + wd*np.eye(P)), (mu1 - mu2).T)
28 | b0 = -np.dot(mu1 + mu2, b)/2
29 |
30 | return b, b0
31 |
32 |
33 | ## Logistic regression for binary classification (Page 205-208 of PRML)
34 | # Iterative reweighted least square (IRLS) by Newton-Raphson
35 | # iterative optimization scheme.
36 | # w_new = w_old - (PHI'*R*PHI)^(-1)*PHI'*(y-t);
37 | #
38 | # X: N by P design matrix with N samples of M features
39 | # y: N by 1 target values {0,1}
40 | # wd: weight decay coefficient
41 | # b: P by 1 weight vector
42 | # b0: bias
43 | def logistic(y, X, wd = 1e-4):
44 | # add a constant column to cope with bias
45 | PHI = np.concatenate((np.ones([X.shape[0],1]),X),axis=1)
46 | N, P = PHI.shape
47 | y[y==-1] = 0 # the class label should be 1 or 0
48 | # initialization
49 | w = np.zeros(P) # rough initialization
50 | w[0] = np.log(np.mean(y)/(1-np.mean(y)))
51 | # stop conditions
52 | d_w = np.Inf
53 | maxit = 500
54 | stopeps = 1e-6
55 |
56 | i = 1
57 | while (d_w > stopeps) and (i < maxit) :
58 | wold = w
59 |
60 | t = 1/(1+np.exp(-np.dot(PHI,w))) # predicted target value
61 | R = np.diag(np.squeeze(t*(1-t))) # the variance matrix of target value
62 | # update with a norm2 regularization of w
63 | # H = PHI'*R*PHI + wd*eye(P);
64 | if (P > N):
65 | invH = woodburyinv(wd*np.eye(P), PHI.T, PHI, R)
66 | else:
67 | invH = np.linalg.inv(wd*np.eye(P) + np.dot(np.dot(PHI.T,R),PHI))
68 |
69 | w = w - np.dot(invH, np.dot(PHI.T,t-y) + wd*w)
70 | d_w = np.linalg.norm(wold-w)
71 |
72 | print('Iteration %d: wchange = %f' % (i, d_w))
73 | i = i + 1
74 |
75 | if (i >= maxit):
76 | print('Optimization finished with maximum iterations = %d' % maxit)
77 |
78 | return w[1:], w[0]
79 |
80 |
81 | ## Logistic regression for binary classification using SGD algorithm
82 | # X: N by P design matrix with N samples of M features
83 | # y: N by 1 target values {0,1}
84 | # wd: weight decay coefficient
85 | # b: P by 1 weight vector
86 | # b0: bias
87 | # need package pylbfgs installed
88 | def logistic_sgd(y, X, wd=1e-4, optimizer=LbfgsOptimizer()):
89 | ## min sum(log(1 + exp(-t.*(PHI * W)))) + wd *norm(w)
90 | def _logisticCost(w, *args):
91 | wd, PHI, y = args
92 | y = np.squeeze(y)
93 | z = y * np.matmul(PHI, w)
94 | t = 1 / (1 + np.exp(-z))
95 | grad = np.matmul(-PHI.T, y * (1 - t)) + wd * w
96 | cost = -np.sum(np.log(t)) + 0.5 * wd * np.dot(w.T, w)
97 | return cost, grad
98 |
99 | # add a constant column to cope with bias
100 | PHI = np.concatenate((np.ones([X.shape[0],1]),X),axis=1)
101 | N, P = PHI.shape
102 | # y[y==-1] = 0 # the class label should be 1 or 0
103 | # initialization
104 | w = np.zeros(P) # rough initialization
105 |
106 | # cost, grad = _logisticCost(w, wd, PHI, y)
107 | # grad1 = numgrad(_logisticCost, w, wd, PHI, y)
108 | # diff = np.linalg.norm(grad1 - grad) / np.linalg.norm(grad1 + grad)
109 | # print(diff)
110 |
111 | w = optimizer.minimize(_logisticCost, w, args=(wd, PHI, y))
112 | return w[1:], w[0]
113 |
114 |
115 | ## softmax activation function
116 | def _softmax(X):
117 | expvx = np.exp(X - np.max(X, axis=1)[..., np.newaxis])
118 | return expvx / np.sum(expvx, axis=1, keepdims=True)
119 |
120 | ## Softmax regression using stocastic gradient descent algorithm
121 | # X: N by P feature matrix, N number of samples, P number of features
122 | # y: N by 1 class labels (t=k indicate belong to class k)
123 | # wd: weight decay coefficient
124 | # W: P by K regression coefficients
125 | def softmax_train(y, X, wd=1e-4, optimizer=LbfgsOptimizer()):
126 | ## Cross entropy error cost function
127 | def _softmaxCost(theta, *args):
128 | wd, PHI, y = args
129 | N, P = PHI.shape
130 | W = np.reshape(theta, [P,-1])
131 | t = _softmax(np.matmul(PHI, W))
132 | grad = (1./N)*np.matmul(PHI.T,t-y) + wd*W
133 | grad = grad.flatten()
134 | cost = -(1./N)*np.dot(y.flatten().T,np.log(t.flatten())) + 0.5*wd*np.sum(W.flatten()**2)
135 | return cost, grad
136 |
137 | K = len(np.unique(y))
138 | if len(y.shape) == 1 or y.shape[1] == 1:
139 | y = onehot(y, K)
140 | # add a constant column to cope with bias
141 | PHI = np.concatenate((np.ones([X.shape[0], 1]), X), axis=1)
142 | N, P = PHI.shape
143 | W = np.ones([P, K]) # rough initialization
144 | theta = W.flatten()
145 |
146 | # cost, grad = _softmaxCost(theta, wd, PHI, y)
147 | # grad1 = numgrad(_softmaxCost, theta, wd, PHI, y)
148 | # diff = np.linalg.norm(grad1 - grad) / np.linalg.norm(grad1 + grad)
149 | # print(diff)
150 |
151 | opttheta = optimizer.minimize(_softmaxCost, theta, args=(wd, PHI, y))
152 | W = np.reshape(opttheta, W.shape)
153 | return W
154 |
155 | def softmax_predict(X, W):
156 | PHI = np.concatenate((np.ones([X.shape[0], 1]), X), axis=1)
157 | t = _softmax(np.matmul(PHI, W))
158 | y = np.argmax(t, axis=1)
159 | return y, t
160 |
161 |
162 | if __name__ == "__main__":
163 |
164 | X = np.random.rand(5, 50)
165 | y = np.array([0, 1, 2, 3, 4])
166 | W = softmax_train(y, X)
--------------------------------------------------------------------------------
/EEG_Preprocessor.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | import numpy as np
4 | from common.datawrapper import *
5 | from common.ButterWorth_filter import *
6 | from scipy.stats import zscore
7 |
8 |
9 | def load_traindata(filepath, filter):
10 | data = read_matdata(filepath, ['Signal', 'Flashing', 'StimulusCode', 'TargetChar'])
11 | signal = data['Signal']
12 | flashing = data['Flashing']
13 | stimuluscode = data['StimulusCode']
14 | targetchar = data['TargetChar'][0]
15 | return extract_eegdata(signal, flashing, stimuluscode, targetchar, filter)
16 |
17 |
18 | def load_testdata(filepath, labelpath, filter):
19 | data = read_matdata(filepath, ['Signal', 'Flashing', 'StimulusCode'])
20 | signal = data['Signal']
21 | flashing = data['Flashing']
22 | stimuluscode = data['StimulusCode']
23 | with open(labelpath, 'r') as myfile:
24 | targetchar = myfile.read().replace('\n', '')
25 | # 去掉换行和空格
26 | return extract_eegdata(signal, flashing, stimuluscode, targetchar, filter)
27 |
28 |
29 | def extract_eegdata(signal, flashing, stimuluscode, targetchar, filter):
30 | num_chars = 12
31 | num_repeats = 15
32 | num_samples = 240
33 | num_trials = signal.shape[0]
34 | # 85个字符
35 | num_channels = signal.shape[2]
36 | # 64个通道
37 |
38 | fb, fa = filter
39 | # show_filtering_result(fb, fa, signal[0,:,0])
40 |
41 | signal_filtered = np.zeros(signal.shape)
42 | for i in range(num_trials): # 85个字符
43 | for j in range(num_channels): # 64个通道
44 | signal_channel = signal[i,:,j]
45 | signal_filtered[i,:,j] = filtfilt(fb, fa, signal_channel) # 信号滤波
46 |
47 | target = np.array(list(targetchar))
48 | data = np.zeros([num_trials, num_chars, num_repeats, num_samples, num_channels])
49 | for i in range(num_trials):
50 | repeat = np.zeros([num_chars,1], dtype=int)
51 | for n in range(1, signal.shape[1]):
52 | if flashing[i, n-1] == 0 and flashing[i, n] == 1:
53 | event = int(stimuluscode[i, n])
54 | data[i, event-1, repeat[event-1], :, :] = signal_filtered[i, n:n+num_samples, :]
55 | repeat[event - 1] += 1
56 |
57 | return data, target
58 |
59 |
60 | def extract_feature(data, target, sampleseg, chanset, dfs):
61 |
62 | num_trials, num_chars, num_repeats, num_samples, num_channels = data.shape
63 | # data.shape: (85, 12, 15, 240, 64)
64 | sample_begin = sampleseg[0] # 0
65 | sample_end = sampleseg[1] # 144
66 | num_samples_used = int(np.ceil((sample_end - sample_begin) / dfs))
67 | # np.ceil((144-0)/6 = 24)对于输入24,返回最小的整数24使得 24>=24。
68 | num_channel_used = len(chanset)
69 | # 64个通道
70 | num_features = num_samples_used * num_channel_used
71 |
72 | np.seterr(divide='ignore', invalid='ignore')
73 | labels = np.zeros([num_trials, num_chars, num_repeats])
74 | feature = np.zeros([num_trials, num_chars, num_repeats, num_features])
75 | for trial in range(num_trials):
76 | target_index = matrix.find(target[trial])
77 | target_row = int(np.floor((target_index)/6))
78 | target_col = target_index - target_row * 6
79 | labels[trial, (target_col, target_row + 6), :] = 1
80 |
81 | signal_trial = data[trial]
82 | for char in range(num_chars):
83 | for repeat in range(num_repeats):
84 | signal_epoch = signal_trial[char, repeat, :, :]
85 | signal_filtered = signal_epoch[sample_begin:sample_end, chanset]
86 | # signal_filtered.shape:[144,64]
87 | signal_downsampled = np.transpose(decimate(signal_filtered.T, dfs, zero_phase=True))
88 | # signal_downsampled.shape:[144/6=24,64]
89 | # scipy.signal.decimate(要下采样的信号,降采样因子(等价地表示采样率变成原来的几分之一,通常不需要相移所以True)
90 | signal_normalized = np.zeros(signal_downsampled.shape)
91 | for c in range(num_channel_used):
92 | # 如果通道仅包含直流成分,则将值设置为零。
93 | if not (np.max(signal_downsampled[:, c]) == np.min(signal_downsampled[:, c])):
94 | signal_normalized[:, c] = zscore(signal_downsampled[:, c])
95 | feature[trial, char, repeat, :] = np.reshape(signal_normalized, [-1])
96 | # np.reshape(signal_normalized, [-1]):把【24,64】的矩阵拉成一维向量
97 | return feature, labels
98 |
99 |
100 | def load_dataset(datapath, subject):
101 | f = np.load(datapath+'processed/'+subject+'.npz')
102 | return f['featureTrain'], f['labelTrain'], f['targetTrain'], f['featureTest'], f['labelTest'], f['targetTest']
103 |
104 |
105 | if __name__ == '__main__':
106 | # 6 by 6 matrix
107 | matrix = 'ABCDEF' + 'GHIJKL' + 'MNOPQR' + 'STUVWX' + 'YZ1234' + '56789_'
108 |
109 | datapath = 'E:/bcicompetition/bci2005/II/'
110 |
111 | import os
112 | if not os.path.isdir(datapath + 'processed/'):
113 | os.mkdir(datapath + 'processed/')
114 |
115 | fs = 240
116 | # 采样频率
117 | f2 = 20
118 | # 截止频率
119 | order = 6
120 | # 滤波器的阶数
121 | fb, fa = butter(order, 2 * f2 / fs, btype='low')
122 | # 归一化频率的具体计算方法是(2 * 截止频率) / 采样频率
123 | # btype='low':低通滤波器
124 | # fb, fa : Butterworth滤波器系数,分子(b)和分母(a)IIR滤波器的多项式
125 | # show_filter(fb, fa, fs)
126 |
127 | dfs = 6
128 | sampleseg = [0, int(0.6 * fs)]
129 | # 序列长度【0,0.6秒*240HZ=144】
130 | chanset = np.arange(64)
131 | # chanset:[0,1,2,...,63]
132 |
133 | subject = 'Subject_A'
134 | file_train = datapath + subject + '_Train.mat'
135 | file_test = datapath + subject + '_Test.mat'
136 | file_label = datapath + 'true_labels_' + subject[-1].lower() + '.txt'
137 |
138 | print('Load and extract continuous EEG into epochs for train data')
139 | dataTrain, targetTrain = load_traindata(file_train, [fb, fa])
140 | print('Extract P300 features from epochs for train data')
141 | featureTrain, labelTrain = extract_feature(dataTrain, targetTrain, sampleseg, chanset, dfs)
142 |
143 | print('Load and extract continuous EEG into epochs for test data')
144 | dataTest, targetTest = load_testdata(file_test, file_label, [fb, fa])
145 | print('Extract P300 features from epochs for test data')
146 | featureTest, labelTest = extract_feature(dataTest, targetTest, sampleseg, chanset, dfs)
147 |
148 | np.savez(datapath+'processed/'+subject+'.npz',
149 | featureTrain=featureTrain, labelTrain=labelTrain, targetTrain=targetTrain,
150 | featureTest=featureTest, labelTest=labelTest, targetTest=targetTest)
151 |
152 |
153 |
--------------------------------------------------------------------------------
/Part Three/topoplotEEG.m:
--------------------------------------------------------------------------------
1 | % topoplot() - plot a topographic map of an EEG field as a 2-D
2 | % circular view (looking down at the top of the head)
3 | % using cointerpolation on a fine cartesian grid.
4 | % Usage:
5 | % >> topoplot(datavector,'eloc_file');
6 | % >> topoplot(datavector,'eloc_file', 'Param1','Value1', ...)
7 | % Inputs:
8 | % datavector = vector of values at the corresponding locations.
9 | % 'eloc_file' = name of an EEG electrode position file {0 -> 'chan_file'}
10 | %
11 | % Optional Parameters & Values (in any order):
12 | % Param Value
13 | % 'colormap' - any sized colormap
14 | % 'interplimits' - 'electrodes' to furthest electrode
15 | % 'head' to edge of head
16 | % {default 'head'}
17 | % 'gridscale' - scaling grid size {default 67}
18 | % 'maplimits' - 'absmax' +/- the absolute-max
19 | % 'maxmin' scale to data range
20 | % [clim1,clim2] user-definined lo/hi
21 | % {default = 'absmax'}
22 | % 'style' - 'straight' colormap only
23 | % 'contour' contour lines only
24 | % 'both' both colormap and contour lines
25 | % 'fill' constant color between lines
26 | % 'blank' just head and electrodes
27 | % {default = 'both'}
28 | % 'numcontour' - number of contour lines
29 | % {default = 6}
30 | % 'shading' - 'flat','interp' {default = 'flat'}
31 | % 'headcolor' - Color of head cartoon {default black}
32 | % 'electrodes' - 'on','off','labels','numbers'
33 | % 'efontsize','electcolor','emarker','emarkersize' - details
34 | %
35 | % Note: topoplot() only works when map limits are >= the max and min
36 | % interpolated data values.
37 | % Eloc_file format:
38 | % chan_number degrees radius reject_level amp_gain channel_name
39 | % (Angle-0 = Cz-to-Fz; C3-angle =-90; Radius at edge of image = 0.5)
40 | %
41 | % For a sample eloc file: >> topoplot('example')
42 | %
43 | % Note: topoplot() will ignore any electrode with a position outside
44 | % the head (radius > 0.5)
45 |
46 | % Topoplot Version 2.1
47 |
48 | % Begun by Andy Spydell, NHRC, 7-23-96
49 | % 8-96 Revised by Colin Humphries, CNL / Salk Institute, La Jolla CA
50 | % -changed surf command to imagesc (faster)
51 | % -can now handle arbitrary scaling of electrode distances
52 | % -can now handle non integer angles in eloc_file
53 | % 4-4-97 Revised again by Colin Humphries, reformat by SM
54 | % -added parameters
55 | % -changed eloc_file format
56 | % 2-26-98 Revised by Colin
57 | % -changed image back to surface command
58 | % -added fill and blank styles
59 | % -removed extra background colormap entry (now use any colormap)
60 | % -added parameters for electrode colors and labels
61 | % -now each topoplot axes use the caxis command again.
62 | % -removed OUTPUT parameter
63 | % 3-11-98 changed default emarkersize, improve help msg -sm
64 |
65 | function handle = topoplot(Vl,loc_file,p1,v1,p2,v2,p3,v3,p4,v4,p5,v5,p6,v6,p7,v7,p8,v8,p9,v9)
66 |
67 | % User Defined Defaults:
68 | MAXCHANS = 256;
69 | DEFAULT_ELOC = 'eloc64.txt';
70 | INTERPLIMITS = 'head'; % head, electrodes
71 | MAPLIMITS = 'absmax'; % absmax, maxmin, [values]
72 | GRID_SCALE = 67;
73 | CONTOURNUM = 6;
74 | STYLE = 'both'; % both,straight,fill,contour,blank
75 | HCOLOR = [0 0 0];
76 | ECOLOR = [0 0 0];
77 | CONTCOLOR = [0 0 0];
78 | ELECTROD = 'on'; % ON OFF LABEL
79 | EMARKERSIZE = 6;
80 | EFSIZE = get(0,'DefaultAxesFontSize');
81 | HLINEWIDTH = 2;
82 | EMARKER = '.';
83 | SHADING = 'flat'; % flat or interp
84 |
85 | %%%%%%%%%%%%%%%%%%%%%%%
86 | nargs = nargin;
87 | if nargs < 2
88 | loc_file = DEFAULT_ELOC;
89 | end
90 | if nargs == 1
91 | if isstr(Vl)
92 | if any(strcmp(lower(Vl),{'example','demo'}))
93 | fprintf(['This is an example of an electrode location file,\n',...
94 | 'an ascii file consisting of the following four columns:\n',...
95 | ' channel_number degrees arc_length channel_name\n\n',...
96 | 'Example:\n',...
97 | ' 1 -18 .352 Fp1.\n',...
98 | ' 2 18 .352 Fp2.\n',...
99 | ' 5 -90 .181 C3..\n',...
100 | ' 6 90 .181 C4..\n',...
101 | ' 7 -90 .500 A1..\n',...
102 | ' 8 90 .500 A2..\n',...
103 | ' 9 -142 .231 P3..\n',...
104 | '10 142 .231 P4..\n',...
105 | '11 0 .181 Fz..\n',...
106 | '12 0 0 Cz..\n',...
107 | '13 180 .181 Pz..\n\n',...
108 | 'The model head sphere has a diameter of 1.\n',...
109 | 'The vertex (Cz) has arc length 0. Channels with arc \n',...
110 | 'lengths > 0.5 are not plotted nor used for interpolation.\n'...
111 | 'Zero degrees is towards the nasion. Positive angles\n',...
112 | 'point to the right hemisphere; negative to the left.\n',...
113 | 'Channel names should each be four chars, padded with\n',...
114 | 'periods (in place of spaces).\n'])
115 | return
116 |
117 | end
118 | end
119 | end
120 | if isempty(loc_file)
121 | loc_file = 0;
122 | end
123 | if loc_file == 0
124 | loc_file = DEFAULT_ELOC;
125 | end
126 |
127 | if nargs > 2
128 | if ~(round(nargs/2) == nargs/2)
129 | error('topoplot(): Odd number of inputs?')
130 | end
131 | for i = 3:2:nargs
132 | Param = eval(['p',int2str((i-3)/2 +1)]);
133 | Value = eval(['v',int2str((i-3)/2 +1)]);
134 | if ~isstr(Param)
135 | error('topoplot(): Parameter must be a string')
136 | end
137 | Param = lower(Param);
138 | switch lower(Param)
139 | case 'colormap'
140 | if size(Value,2)~=3
141 | error('topoplot(): Colormap must be a n x 3 matrix')
142 | end
143 | colormap(Value)
144 | case {'interplimits','headlimits'}
145 | if ~isstr(Value)
146 | error('topoplot(): interplimits value must be a string')
147 | end
148 | Value = lower(Value);
149 | if ~strcmp(Value,'electrodes') & ~strcmp(Value,'head')
150 | error('topoplot(): Incorrect value for interplimits')
151 | end
152 | INTERPLIMITS = Value;
153 | case 'maplimits'
154 | MAPLIMITS = Value;
155 | case 'gridscale'
156 | GRID_SCALE = Value;
157 | case 'style'
158 | STYLE = lower(Value);
159 | case 'numcontour'
160 | CONTOURNUM = Value;
161 | case 'electrodes'
162 | ELECTROD = lower(Value);
163 | case 'emarker'
164 | EMARKER = Value;
165 | case {'headcolor','hcolor'}
166 | HCOLOR = Value;
167 | case {'electcolor','ecolor'}
168 | ECOLOR = Value;
169 | case {'emarkersize','emsize'}
170 | EMARKERSIZE = Value;
171 | case {'efontsize','efsize'}
172 | EFSIZE = Value;
173 | case 'shading'
174 | SHADING = lower(Value);
175 | if ~any(strcmp(SHADING,{'flat','interp'}))
176 | error('Invalid Shading Parameter')
177 | end
178 | otherwise
179 | error('Unknown parameter.')
180 | end
181 | end
182 | end
183 |
184 | [r,c] = size(Vl);
185 | if r>1 & c>1,
186 | error('topoplot(): data should be a single vector\n');
187 | end
188 | fid = fopen(loc_file);
189 | if fid<1,
190 | fprintf('topoplot(): cannot open eloc_file (%s).\n',loc_file);
191 | return
192 | end
193 | A = fscanf(fid,'%d %f %f %s',[7 MAXCHANS]);
194 | fclose(fid);
195 |
196 | A = A';
197 |
198 | if length(Vl) ~= size(A,1),
199 | fprintf(...
200 | 'topoplot(): data vector must have the same rows (%d) as eloc_file (%d)\n',...
201 | length(Vl),size(A,1));
202 | A
203 | error('');
204 | end
205 |
206 | labels = setstr(A(:,4:7));
207 | idx = find(labels == '.'); % some labels have dots
208 | labels(idx) = setstr(abs(' ')*ones(size(idx))); % replace them with spaces
209 |
210 | Th = pi/180*A(:,2); % convert degrees to radians
211 | Rd = A(:,3);
212 | ii = find(Rd <= 0.5); % interpolate on-head channels only
213 | Th = Th(ii);
214 | Rd = Rd(ii);
215 | Vl = Vl(ii);
216 | labels = labels(ii,:);
217 |
218 | [x,y] = pol2cart(Th,Rd); % transform from polar to cartesian coordinates
219 | rmax = 0.5;
220 |
221 | ha = gca;
222 | cla
223 | hold on
224 |
225 | if ~strcmp(STYLE,'blank')
226 | % find limits for interpolation
227 | if strcmp(INTERPLIMITS,'head')
228 | xmin = min(-.5,min(x)); xmax = max(0.5,max(x));
229 | ymin = min(-.5,min(y)); ymax = max(0.5,max(y));
230 | else
231 | xmin = max(-.5,min(x)); xmax = min(0.5,max(x));
232 | ymin = max(-.5,min(y)); ymax = min(0.5,max(y));
233 | end
234 |
235 | xi = linspace(xmin,xmax,GRID_SCALE); % x-axis description (row vector)
236 | yi = linspace(ymin,ymax,GRID_SCALE); % y-axis description (row vector)
237 |
238 | [Xi,Yi,Zi] = griddata(y,x,Vl,yi',xi,'invdist'); % Interpolate data
239 |
240 | % Take data within head
241 | mask = (sqrt(Xi.^2+Yi.^2) <= rmax);
242 | ii = find(mask == 0);
243 | Zi(ii) = NaN;
244 |
245 | % calculate colormap limits
246 | m = size(colormap,1);
247 | if isstr(MAPLIMITS)
248 | if strcmp(MAPLIMITS,'absmax')
249 | amin = -max(max(abs(Zi)));
250 | amax = max(max(abs(Zi)));
251 | elseif strcmp(MAPLIMITS,'maxmin')
252 | amin = min(min(Zi));
253 | amax = max(max(Zi));
254 | end
255 | else
256 | amin = MAPLIMITS(1);
257 | amax = MAPLIMITS(2);
258 | end
259 | delta = xi(2)-xi(1); % length of grid entry
260 |
261 | % Draw topoplot on head
262 | if strcmp(STYLE,'contour')
263 | contour(Xi,Yi,Zi,CONTOURNUM,'k');
264 | elseif strcmp(STYLE,'both')
265 | surface(Xi-delta/2,Yi-delta/2,zeros(size(Zi)),Zi,'EdgeColor','none',...
266 | 'FaceColor',SHADING);
267 | contour(Xi,Yi,Zi,CONTOURNUM,'k');
268 | elseif strcmp(STYLE,'straight')
269 | surface(Xi-delta/2,Yi-delta/2,zeros(size(Zi)),Zi,'EdgeColor','none',...
270 | 'FaceColor',SHADING);
271 | elseif strcmp(STYLE,'fill')
272 | contourf(Xi,Yi,Zi,CONTOURNUM,'k');
273 | else
274 | error('Invalid style')
275 | end
276 | caxis([amin amax]) % set coloraxis
277 | end
278 |
279 | set(ha,'Xlim',[-rmax*1.3 rmax*1.3],'Ylim',[-rmax*1.3 rmax*1.3])
280 |
281 | % %%% Draw Head %%%%
282 | l = 0:2*pi/100:2*pi;
283 | basex = .18*rmax;
284 | tip = rmax*1.15; base = rmax-.004;
285 | EarX = [.497 .510 .518 .5299 .5419 .54 .547 .532 .510 .489];
286 | EarY = [.0555 .0775 .0783 .0746 .0555 -.0055 -.0932 -.1313 -.1384 -.1199];
287 |
288 | % Plot Electrodes
289 | if strcmp(ELECTROD,'on')
290 | hp2 = plot(y,x,EMARKER,'Color',ECOLOR,'markersize',EMARKERSIZE);
291 | elseif strcmp(ELECTROD,'labels')
292 | for i = 1:size(labels,1)
293 | text(y(i),x(i),labels(i,:),'HorizontalAlignment','center',...
294 | 'VerticalAlignment','middle','Color',ECOLOR,...
295 | 'FontSize',EFSIZE)
296 | end
297 | elseif strcmp(ELECTROD,'numbers')
298 | whos y x
299 | for i = 1:size(labels,1)
300 | text(y(i),x(i),int2str(i),'HorizontalAlignment','center',...
301 | 'VerticalAlignment','middle','Color',ECOLOR,...
302 | 'FontSize',EFSIZE)
303 | end
304 | end
305 |
306 | % Plot Head, Ears, Nose
307 | plot(cos(l).*rmax,sin(l).*rmax,...
308 | 'color',HCOLOR,'Linestyle','-','LineWidth',HLINEWIDTH);
309 |
310 | plot([.18*rmax;0;-.18*rmax],[base;tip;base],...
311 | 'Color',HCOLOR,'LineWidth',HLINEWIDTH);
312 |
313 | plot(EarX,EarY,'color',HCOLOR,'LineWidth',HLINEWIDTH)
314 | plot(-EarX,EarY,'color',HCOLOR,'LineWidth',HLINEWIDTH)
315 |
316 | hold off
317 | axis off
318 |
319 |
--------------------------------------------------------------------------------
/Part Three/trainClassifier.m:
--------------------------------------------------------------------------------
1 | function [trainedClassifier, validationAccuracy] = trainClassifier(trainingData)
2 | % trainClassifier(trainingData)
3 | % returns a trained classifier and its accuracy.
4 | % This code recreates the classification model trained in
5 | % Classification Learner app.
6 | %
7 | % Input:
8 | % trainingData: the training data of same data type as imported
9 | % in the app (table or matrix).
10 | %
11 | % Output:
12 | % trainedClassifier: a struct containing the trained classifier.
13 | % The struct contains various fields with information about the
14 | % trained classifier.
15 | %
16 | % trainedClassifier.predictFcn: a function to make predictions
17 | % on new data. It takes an input of the same form as this training
18 | % code (table or matrix) and returns predictions for the response.
19 | % If you supply a matrix, include only the predictors columns (or
20 | % rows).
21 | %
22 | % validationAccuracy: a double containing the accuracy in
23 | % percent. In the app, the History list displays this
24 | % overall accuracy score for each model.
25 | %
26 | % Use the code to train the model with new data.
27 | % To retrain your classifier, call the function from the command line
28 | % with your original data or new data as the input argument trainingData.
29 | %
30 | % For example, to retrain a classifier trained with the original data set
31 | % T, enter:
32 | % [trainedClassifier, validationAccuracy] = trainClassifier(T)
33 | %
34 | % To make predictions with the returned 'trainedClassifier' on new data T,
35 | % use
36 | % yfit = trainedClassifier.predictFcn(T)
37 | %
38 | % To automate training the same classifier with new data, or to learn how
39 | % to programmatically train classifiers, examine the generated code.
40 |
41 | % Auto-generated by MATLAB on 2019-01-01 19:20:30
42 |
43 |
44 | % Extract predictors and response
45 | % This code processes the data into the right shape for training the
46 | % classifier.
47 | % Convert input to table
48 | inputTable = array2table(trainingData, 'VariableNames', {'column_1', 'column_2', 'column_3', 'column_4', 'column_5', 'column_6', 'column_7', 'column_8', 'column_9', 'column_10', 'column_11', 'column_12', 'column_13', 'column_14', 'column_15', 'column_16', 'column_17', 'column_18', 'column_19', 'column_20', 'column_21', 'column_22', 'column_23', 'column_24', 'column_25', 'column_26', 'column_27', 'column_28', 'column_29', 'column_30', 'column_31', 'column_32', 'column_33', 'column_34', 'column_35', 'column_36', 'column_37', 'column_38', 'column_39', 'column_40', 'column_41', 'column_42', 'column_43', 'column_44', 'column_45', 'column_46', 'column_47', 'column_48', 'column_49', 'column_50', 'column_51', 'column_52', 'column_53', 'column_54', 'column_55', 'column_56', 'column_57', 'column_58', 'column_59', 'column_60', 'column_61', 'column_62', 'column_63', 'column_64', 'column_65', 'column_66', 'column_67', 'column_68', 'column_69', 'column_70', 'column_71', 'column_72', 'column_73', 'column_74', 'column_75', 'column_76', 'column_77', 'column_78', 'column_79', 'column_80', 'column_81', 'column_82', 'column_83', 'column_84', 'column_85', 'column_86', 'column_87', 'column_88', 'column_89', 'column_90', 'column_91', 'column_92', 'column_93', 'column_94', 'column_95', 'column_96', 'column_97', 'column_98', 'column_99', 'column_100', 'column_101', 'column_102', 'column_103', 'column_104', 'column_105', 'column_106', 'column_107', 'column_108', 'column_109', 'column_110', 'column_111', 'column_112', 'column_113', 'column_114', 'column_115', 'column_116', 'column_117', 'column_118', 'column_119', 'column_120', 'column_121', 'column_122', 'column_123', 'column_124', 'column_125', 'column_126', 'column_127', 'column_128', 'column_129', 'column_130', 'column_131', 'column_132', 'column_133', 'column_134', 'column_135', 'column_136', 'column_137', 'column_138', 'column_139', 'column_140', 'column_141', 'column_142', 'column_143', 'column_144', 'column_145', 'column_146', 'column_147', 'column_148', 'column_149', 'column_150', 'column_151', 'column_152', 'column_153', 'column_154', 'column_155', 'column_156', 'column_157', 'column_158', 'column_159', 'column_160', 'column_161', 'column_162', 'column_163', 'column_164', 'column_165', 'column_166', 'column_167', 'column_168', 'column_169', 'column_170', 'column_171', 'column_172', 'column_173', 'column_174', 'column_175', 'column_176', 'column_177', 'column_178', 'column_179', 'column_180', 'column_181', 'column_182', 'column_183', 'column_184', 'column_185', 'column_186', 'column_187', 'column_188', 'column_189', 'column_190', 'column_191', 'column_192', 'column_193', 'column_194', 'column_195', 'column_196', 'column_197', 'column_198', 'column_199', 'column_200', 'column_201', 'column_202', 'column_203', 'column_204', 'column_205', 'column_206', 'column_207', 'column_208', 'column_209', 'column_210', 'column_211', 'column_212', 'column_213', 'column_214', 'column_215', 'column_216', 'column_217', 'column_218', 'column_219', 'column_220', 'column_221', 'column_222', 'column_223', 'column_224', 'column_225', 'column_226', 'column_227', 'column_228', 'column_229', 'column_230', 'column_231', 'column_232', 'column_233', 'column_234', 'column_235', 'column_236', 'column_237', 'column_238', 'column_239', 'column_240', 'column_241', 'column_242', 'column_243', 'column_244', 'column_245', 'column_246', 'column_247', 'column_248', 'column_249', 'column_250', 'column_251', 'column_252', 'column_253', 'column_254', 'column_255', 'column_256', 'column_257', 'column_258', 'column_259', 'column_260', 'column_261', 'column_262', 'column_263', 'column_264', 'column_265', 'column_266', 'column_267', 'column_268', 'column_269', 'column_270', 'column_271', 'column_272', 'column_273', 'column_274', 'column_275', 'column_276', 'column_277', 'column_278', 'column_279', 'column_280', 'column_281', 'column_282', 'column_283', 'column_284', 'column_285', 'column_286', 'column_287', 'column_288', 'column_289', 'column_290', 'column_291', 'column_292', 'column_293', 'column_294', 'column_295', 'column_296', 'column_297', 'column_298', 'column_299', 'column_300', 'column_301', 'column_302', 'column_303', 'column_304', 'column_305', 'column_306', 'column_307', 'column_308', 'column_309', 'column_310', 'column_311', 'column_312', 'column_313', 'column_314', 'column_315', 'column_316', 'column_317', 'column_318', 'column_319', 'column_320', 'column_321', 'column_322', 'column_323', 'column_324', 'column_325', 'column_326', 'column_327', 'column_328', 'column_329', 'column_330', 'column_331', 'column_332', 'column_333', 'column_334', 'column_335', 'column_336', 'column_337', 'column_338', 'column_339', 'column_340', 'column_341', 'column_342', 'column_343', 'column_344', 'column_345', 'column_346', 'column_347', 'column_348', 'column_349', 'column_350', 'column_351', 'column_352', 'column_353', 'column_354', 'column_355', 'column_356', 'column_357', 'column_358', 'column_359', 'column_360', 'column_361', 'column_362', 'column_363', 'column_364', 'column_365', 'column_366', 'column_367', 'column_368', 'column_369', 'column_370', 'column_371', 'column_372', 'column_373', 'column_374', 'column_375', 'column_376', 'column_377', 'column_378', 'column_379', 'column_380', 'column_381', 'column_382', 'column_383', 'column_384', 'column_385', 'column_386', 'column_387', 'column_388', 'column_389', 'column_390', 'column_391', 'column_392', 'column_393', 'column_394', 'column_395', 'column_396', 'column_397', 'column_398', 'column_399', 'column_400', 'column_401', 'column_402', 'column_403', 'column_404', 'column_405', 'column_406', 'column_407', 'column_408', 'column_409', 'column_410', 'column_411', 'column_412', 'column_413', 'column_414', 'column_415', 'column_416', 'column_417', 'column_418', 'column_419', 'column_420', 'column_421', 'column_422', 'column_423', 'column_424', 'column_425', 'column_426', 'column_427', 'column_428', 'column_429', 'column_430', 'column_431', 'column_432', 'column_433', 'column_434', 'column_435', 'column_436', 'column_437', 'column_438', 'column_439', 'column_440', 'column_441', 'column_442', 'column_443', 'column_444', 'column_445', 'column_446', 'column_447', 'column_448', 'column_449', 'column_450', 'column_451', 'column_452', 'column_453', 'column_454', 'column_455', 'column_456', 'column_457', 'column_458', 'column_459', 'column_460', 'column_461', 'column_462', 'column_463', 'column_464', 'column_465', 'column_466', 'column_467', 'column_468', 'column_469', 'column_470', 'column_471', 'column_472', 'column_473', 'column_474', 'column_475', 'column_476', 'column_477', 'column_478', 'column_479', 'column_480', 'column_481', 'column_482', 'column_483', 'column_484', 'column_485', 'column_486', 'column_487', 'column_488', 'column_489', 'column_490', 'column_491', 'column_492', 'column_493', 'column_494', 'column_495', 'column_496', 'column_497', 'column_498', 'column_499', 'column_500', 'column_501', 'column_502', 'column_503', 'column_504', 'column_505', 'column_506', 'column_507', 'column_508', 'column_509', 'column_510', 'column_511', 'column_512', 'column_513', 'column_514', 'column_515', 'column_516', 'column_517', 'column_518', 'column_519', 'column_520', 'column_521', 'column_522', 'column_523', 'column_524', 'column_525', 'column_526', 'column_527', 'column_528', 'column_529', 'column_530', 'column_531', 'column_532', 'column_533', 'column_534', 'column_535', 'column_536', 'column_537', 'column_538', 'column_539', 'column_540', 'column_541', 'column_542', 'column_543', 'column_544', 'column_545', 'column_546', 'column_547', 'column_548', 'column_549', 'column_550', 'column_551', 'column_552', 'column_553', 'column_554', 'column_555', 'column_556', 'column_557', 'column_558', 'column_559', 'column_560', 'column_561', 'column_562', 'column_563', 'column_564', 'column_565', 'column_566', 'column_567', 'column_568', 'column_569', 'column_570', 'column_571', 'column_572', 'column_573', 'column_574', 'column_575', 'column_576', 'column_577', 'column_578', 'column_579', 'column_580', 'column_581', 'column_582', 'column_583', 'column_584', 'column_585', 'column_586', 'column_587', 'column_588', 'column_589', 'column_590', 'column_591', 'column_592', 'column_593', 'column_594', 'column_595', 'column_596', 'column_597', 'column_598', 'column_599', 'column_600', 'column_601', 'column_602', 'column_603', 'column_604', 'column_605', 'column_606', 'column_607', 'column_608', 'column_609', 'column_610', 'column_611', 'column_612', 'column_613', 'column_614', 'column_615', 'column_616', 'column_617', 'column_618', 'column_619', 'column_620', 'column_621', 'column_622', 'column_623', 'column_624', 'column_625', 'column_626', 'column_627', 'column_628', 'column_629', 'column_630', 'column_631', 'column_632', 'column_633', 'column_634', 'column_635', 'column_636', 'column_637', 'column_638', 'column_639', 'column_640', 'column_641', 'column_642', 'column_643', 'column_644', 'column_645', 'column_646', 'column_647', 'column_648', 'column_649', 'column_650', 'column_651', 'column_652', 'column_653', 'column_654', 'column_655', 'column_656', 'column_657', 'column_658', 'column_659', 'column_660', 'column_661', 'column_662', 'column_663', 'column_664', 'column_665', 'column_666', 'column_667', 'column_668', 'column_669', 'column_670', 'column_671', 'column_672', 'column_673', 'column_674', 'column_675', 'column_676', 'column_677', 'column_678', 'column_679', 'column_680', 'column_681', 'column_682', 'column_683', 'column_684', 'column_685', 'column_686', 'column_687', 'column_688', 'column_689', 'column_690', 'column_691', 'column_692', 'column_693', 'column_694', 'column_695', 'column_696', 'column_697', 'column_698', 'column_699', 'column_700', 'column_701', 'column_702', 'column_703', 'column_704', 'column_705', 'column_706', 'column_707', 'column_708', 'column_709', 'column_710', 'column_711', 'column_712', 'column_713', 'column_714', 'column_715', 'column_716', 'column_717', 'column_718', 'column_719', 'column_720', 'column_721', 'column_722', 'column_723', 'column_724', 'column_725', 'column_726', 'column_727', 'column_728', 'column_729', 'column_730', 'column_731', 'column_732', 'column_733', 'column_734', 'column_735', 'column_736', 'column_737', 'column_738', 'column_739', 'column_740', 'column_741', 'column_742', 'column_743', 'column_744', 'column_745', 'column_746', 'column_747', 'column_748', 'column_749', 'column_750', 'column_751', 'column_752', 'column_753', 'column_754', 'column_755', 'column_756', 'column_757', 'column_758', 'column_759', 'column_760', 'column_761', 'column_762', 'column_763', 'column_764', 'column_765', 'column_766', 'column_767', 'column_768', 'column_769', 'column_770', 'column_771', 'column_772', 'column_773', 'column_774', 'column_775', 'column_776', 'column_777', 'column_778', 'column_779', 'column_780', 'column_781', 'column_782', 'column_783', 'column_784', 'column_785', 'column_786', 'column_787', 'column_788', 'column_789', 'column_790', 'column_791', 'column_792', 'column_793', 'column_794', 'column_795', 'column_796', 'column_797', 'column_798', 'column_799'});
49 |
50 | predictorNames = {'column_1', 'column_2', 'column_3', 'column_4', 'column_5', 'column_6', 'column_7', 'column_8', 'column_9', 'column_10', 'column_11', 'column_12', 'column_13', 'column_14', 'column_15', 'column_16', 'column_17', 'column_18', 'column_19', 'column_20', 'column_21', 'column_22', 'column_23', 'column_24', 'column_25', 'column_26', 'column_27', 'column_28', 'column_29', 'column_30', 'column_31', 'column_32', 'column_33', 'column_34', 'column_35', 'column_36', 'column_37', 'column_38', 'column_39', 'column_40', 'column_41', 'column_42', 'column_43', 'column_44', 'column_45', 'column_46', 'column_47', 'column_48', 'column_49', 'column_50', 'column_51', 'column_52', 'column_53', 'column_54', 'column_55', 'column_56', 'column_57', 'column_58', 'column_59', 'column_60', 'column_61', 'column_62', 'column_63', 'column_64', 'column_65', 'column_66', 'column_67', 'column_68', 'column_69', 'column_70', 'column_71', 'column_72', 'column_73', 'column_74', 'column_75', 'column_76', 'column_77', 'column_78', 'column_79', 'column_80', 'column_81', 'column_82', 'column_83', 'column_84', 'column_85', 'column_86', 'column_87', 'column_88', 'column_89', 'column_90', 'column_91', 'column_92', 'column_93', 'column_94', 'column_95', 'column_96', 'column_97', 'column_98', 'column_99', 'column_100', 'column_101', 'column_102', 'column_103', 'column_104', 'column_105', 'column_106', 'column_107', 'column_108', 'column_109', 'column_110', 'column_111', 'column_112', 'column_113', 'column_114', 'column_115', 'column_116', 'column_117', 'column_118', 'column_119', 'column_120', 'column_121', 'column_122', 'column_123', 'column_124', 'column_125', 'column_126', 'column_127', 'column_128', 'column_129', 'column_130', 'column_131', 'column_132', 'column_133', 'column_134', 'column_135', 'column_136', 'column_137', 'column_138', 'column_139', 'column_140', 'column_141', 'column_142', 'column_143', 'column_144', 'column_145', 'column_146', 'column_147', 'column_148', 'column_149', 'column_150', 'column_151', 'column_152', 'column_153', 'column_154', 'column_155', 'column_156', 'column_157', 'column_158', 'column_159', 'column_160', 'column_161', 'column_162', 'column_163', 'column_164', 'column_165', 'column_166', 'column_167', 'column_168', 'column_169', 'column_170', 'column_171', 'column_172', 'column_173', 'column_174', 'column_175', 'column_176', 'column_177', 'column_178', 'column_179', 'column_180', 'column_181', 'column_182', 'column_183', 'column_184', 'column_185', 'column_186', 'column_187', 'column_188', 'column_189', 'column_190', 'column_191', 'column_192', 'column_193', 'column_194', 'column_195', 'column_196', 'column_197', 'column_198', 'column_199', 'column_200', 'column_201', 'column_202', 'column_203', 'column_204', 'column_205', 'column_206', 'column_207', 'column_208', 'column_209', 'column_210', 'column_211', 'column_212', 'column_213', 'column_214', 'column_215', 'column_216', 'column_217', 'column_218', 'column_219', 'column_220', 'column_221', 'column_222', 'column_223', 'column_224', 'column_225', 'column_226', 'column_227', 'column_228', 'column_229', 'column_230', 'column_231', 'column_232', 'column_233', 'column_234', 'column_235', 'column_236', 'column_237', 'column_238', 'column_239', 'column_240', 'column_241', 'column_242', 'column_243', 'column_244', 'column_245', 'column_246', 'column_247', 'column_248', 'column_249', 'column_250', 'column_251', 'column_252', 'column_253', 'column_254', 'column_255', 'column_256', 'column_257', 'column_258', 'column_259', 'column_260', 'column_261', 'column_262', 'column_263', 'column_264', 'column_265', 'column_266', 'column_267', 'column_268', 'column_269', 'column_270', 'column_271', 'column_272', 'column_273', 'column_274', 'column_275', 'column_276', 'column_277', 'column_278', 'column_279', 'column_280', 'column_281', 'column_282', 'column_283', 'column_284', 'column_285', 'column_286', 'column_287', 'column_288', 'column_289', 'column_290', 'column_291', 'column_292', 'column_293', 'column_294', 'column_295', 'column_296', 'column_297', 'column_298', 'column_299', 'column_300', 'column_301', 'column_302', 'column_303', 'column_304', 'column_305', 'column_306', 'column_307', 'column_308', 'column_309', 'column_310', 'column_311', 'column_312', 'column_313', 'column_314', 'column_315', 'column_316', 'column_317', 'column_318', 'column_319', 'column_320', 'column_321', 'column_322', 'column_323', 'column_324', 'column_325', 'column_326', 'column_327', 'column_328', 'column_329', 'column_330', 'column_331', 'column_332', 'column_333', 'column_334', 'column_335', 'column_336', 'column_337', 'column_338', 'column_339', 'column_340', 'column_341', 'column_342', 'column_343', 'column_344', 'column_345', 'column_346', 'column_347', 'column_348', 'column_349', 'column_350', 'column_351', 'column_352', 'column_353', 'column_354', 'column_355', 'column_356', 'column_357', 'column_358', 'column_359', 'column_360', 'column_361', 'column_362', 'column_363', 'column_364', 'column_365', 'column_366', 'column_367', 'column_368', 'column_369', 'column_370', 'column_371', 'column_372', 'column_373', 'column_374', 'column_375', 'column_376', 'column_377', 'column_378', 'column_379', 'column_380', 'column_381', 'column_382', 'column_383', 'column_384', 'column_385', 'column_386', 'column_387', 'column_388', 'column_389', 'column_390', 'column_391', 'column_392', 'column_393', 'column_394', 'column_395', 'column_396', 'column_397', 'column_398', 'column_399', 'column_400', 'column_401', 'column_402', 'column_403', 'column_404', 'column_405', 'column_406', 'column_407', 'column_408', 'column_409', 'column_410', 'column_411', 'column_412', 'column_413', 'column_414', 'column_415', 'column_416', 'column_417', 'column_418', 'column_419', 'column_420', 'column_421', 'column_422', 'column_423', 'column_424', 'column_425', 'column_426', 'column_427', 'column_428', 'column_429', 'column_430', 'column_431', 'column_432', 'column_433', 'column_434', 'column_435', 'column_436', 'column_437', 'column_438', 'column_439', 'column_440', 'column_441', 'column_442', 'column_443', 'column_444', 'column_445', 'column_446', 'column_447', 'column_448', 'column_449', 'column_450', 'column_451', 'column_452', 'column_453', 'column_454', 'column_455', 'column_456', 'column_457', 'column_458', 'column_459', 'column_460', 'column_461', 'column_462', 'column_463', 'column_464', 'column_465', 'column_466', 'column_467', 'column_468', 'column_469', 'column_470', 'column_471', 'column_472', 'column_473', 'column_474', 'column_475', 'column_476', 'column_477', 'column_478', 'column_479', 'column_480', 'column_481', 'column_482', 'column_483', 'column_484', 'column_485', 'column_486', 'column_487', 'column_488', 'column_489', 'column_490', 'column_491', 'column_492', 'column_493', 'column_494', 'column_495', 'column_496', 'column_497', 'column_498', 'column_499', 'column_500', 'column_501', 'column_502', 'column_503', 'column_504', 'column_505', 'column_506', 'column_507', 'column_508', 'column_509', 'column_510', 'column_511', 'column_512', 'column_513', 'column_514', 'column_515', 'column_516', 'column_517', 'column_518', 'column_519', 'column_520', 'column_521', 'column_522', 'column_523', 'column_524', 'column_525', 'column_526', 'column_527', 'column_528', 'column_529', 'column_530', 'column_531', 'column_532', 'column_533', 'column_534', 'column_535', 'column_536', 'column_537', 'column_538', 'column_539', 'column_540', 'column_541', 'column_542', 'column_543', 'column_544', 'column_545', 'column_546', 'column_547', 'column_548', 'column_549', 'column_550', 'column_551', 'column_552', 'column_553', 'column_554', 'column_555', 'column_556', 'column_557', 'column_558', 'column_559', 'column_560', 'column_561', 'column_562', 'column_563', 'column_564', 'column_565', 'column_566', 'column_567', 'column_568', 'column_569', 'column_570', 'column_571', 'column_572', 'column_573', 'column_574', 'column_575', 'column_576', 'column_577', 'column_578', 'column_579', 'column_580', 'column_581', 'column_582', 'column_583', 'column_584', 'column_585', 'column_586', 'column_587', 'column_588', 'column_589', 'column_590', 'column_591', 'column_592', 'column_593', 'column_594', 'column_595', 'column_596', 'column_597', 'column_598', 'column_599', 'column_600', 'column_601', 'column_602', 'column_603', 'column_604', 'column_605', 'column_606', 'column_607', 'column_608', 'column_609', 'column_610', 'column_611', 'column_612', 'column_613', 'column_614', 'column_615', 'column_616', 'column_617', 'column_618', 'column_619', 'column_620', 'column_621', 'column_622', 'column_623', 'column_624', 'column_625', 'column_626', 'column_627', 'column_628', 'column_629', 'column_630', 'column_631', 'column_632', 'column_633', 'column_634', 'column_635', 'column_636', 'column_637', 'column_638', 'column_639', 'column_640', 'column_641', 'column_642', 'column_643', 'column_644', 'column_645', 'column_646', 'column_647', 'column_648', 'column_649', 'column_650', 'column_651', 'column_652', 'column_653', 'column_654', 'column_655', 'column_656', 'column_657', 'column_658', 'column_659', 'column_660', 'column_661', 'column_662', 'column_663', 'column_664', 'column_665', 'column_666', 'column_667', 'column_668', 'column_669', 'column_670', 'column_671', 'column_672', 'column_673', 'column_674', 'column_675', 'column_676', 'column_677', 'column_678', 'column_679', 'column_680', 'column_681', 'column_682', 'column_683', 'column_684', 'column_685', 'column_686', 'column_687', 'column_688', 'column_689', 'column_690', 'column_691', 'column_692', 'column_693', 'column_694', 'column_695', 'column_696', 'column_697', 'column_698', 'column_699', 'column_700', 'column_701', 'column_702', 'column_703', 'column_704', 'column_705', 'column_706', 'column_707', 'column_708', 'column_709', 'column_710', 'column_711', 'column_712', 'column_713', 'column_714', 'column_715', 'column_716', 'column_717', 'column_718', 'column_719', 'column_720', 'column_721', 'column_722', 'column_723', 'column_724', 'column_725', 'column_726', 'column_727', 'column_728', 'column_729', 'column_730', 'column_731', 'column_732', 'column_733', 'column_734', 'column_735', 'column_736', 'column_737', 'column_738', 'column_739', 'column_740', 'column_741', 'column_742', 'column_743', 'column_744', 'column_745', 'column_746', 'column_747', 'column_748', 'column_749', 'column_750', 'column_751', 'column_752', 'column_753', 'column_754', 'column_755', 'column_756', 'column_757', 'column_758', 'column_759', 'column_760', 'column_761', 'column_762', 'column_763', 'column_764', 'column_765', 'column_766', 'column_767', 'column_768', 'column_769', 'column_770', 'column_771', 'column_772', 'column_773', 'column_774', 'column_775', 'column_776', 'column_777', 'column_778', 'column_779', 'column_780', 'column_781', 'column_782', 'column_783', 'column_784', 'column_785', 'column_786', 'column_787', 'column_788', 'column_789', 'column_790', 'column_791', 'column_792', 'column_793', 'column_794', 'column_795', 'column_796', 'column_797', 'column_798'};
51 | predictors = inputTable(:, predictorNames);
52 | response = inputTable.column_799;
53 | isCategoricalPredictor = [false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false];
54 |
55 | % Train a classifier
56 | % This code specifies all the classifier options and trains the classifier.
57 | classificationSVM = fitcsvm(...
58 | predictors, ...
59 | response, ...
60 | 'KernelFunction', 'gaussian', ...
61 | 'PolynomialOrder', [], ...
62 | 'KernelScale', 'auto', ...
63 | 'BoxConstraint', 1, ...
64 | 'Standardize', true, ...
65 | 'ClassNames', [-1; 1]);
66 |
67 | % Create the result struct with predict function
68 | predictorExtractionFcn = @(x) array2table(x, 'VariableNames', predictorNames);
69 | svmPredictFcn = @(x) predict(classificationSVM, x);
70 | trainedClassifier.predictFcn = @(x) svmPredictFcn(predictorExtractionFcn(x));
71 |
72 | % Add additional fields to the result struct
73 | trainedClassifier.ClassificationSVM = classificationSVM;
74 | trainedClassifier.About = 'This struct is a trained classifier exported from Classification Learner R2016b.';
75 | trainedClassifier.HowToPredict = sprintf('To make predictions on a new predictor column matrix, X, use: \n yfit = c.predictFcn(X) \nreplacing ''c'' with the name of the variable that is this struct, e.g. ''trainedClassifier''. \n \nX must contain exactly 798 columns because this classifier was trained using 798 predictors. \nX must contain only predictor columns in exactly the same order and format as your training \ndata. Do not include the response column or any columns you did not import into \nClassification Learner. \n \nFor more information, see How to predict using an exported model.');
76 |
77 | % Extract predictors and response
78 | % This code processes the data into the right shape for training the
79 | % classifier.
80 | % Convert input to table
81 | inputTable = array2table(trainingData, 'VariableNames', {'column_1', 'column_2', 'column_3', 'column_4', 'column_5', 'column_6', 'column_7', 'column_8', 'column_9', 'column_10', 'column_11', 'column_12', 'column_13', 'column_14', 'column_15', 'column_16', 'column_17', 'column_18', 'column_19', 'column_20', 'column_21', 'column_22', 'column_23', 'column_24', 'column_25', 'column_26', 'column_27', 'column_28', 'column_29', 'column_30', 'column_31', 'column_32', 'column_33', 'column_34', 'column_35', 'column_36', 'column_37', 'column_38', 'column_39', 'column_40', 'column_41', 'column_42', 'column_43', 'column_44', 'column_45', 'column_46', 'column_47', 'column_48', 'column_49', 'column_50', 'column_51', 'column_52', 'column_53', 'column_54', 'column_55', 'column_56', 'column_57', 'column_58', 'column_59', 'column_60', 'column_61', 'column_62', 'column_63', 'column_64', 'column_65', 'column_66', 'column_67', 'column_68', 'column_69', 'column_70', 'column_71', 'column_72', 'column_73', 'column_74', 'column_75', 'column_76', 'column_77', 'column_78', 'column_79', 'column_80', 'column_81', 'column_82', 'column_83', 'column_84', 'column_85', 'column_86', 'column_87', 'column_88', 'column_89', 'column_90', 'column_91', 'column_92', 'column_93', 'column_94', 'column_95', 'column_96', 'column_97', 'column_98', 'column_99', 'column_100', 'column_101', 'column_102', 'column_103', 'column_104', 'column_105', 'column_106', 'column_107', 'column_108', 'column_109', 'column_110', 'column_111', 'column_112', 'column_113', 'column_114', 'column_115', 'column_116', 'column_117', 'column_118', 'column_119', 'column_120', 'column_121', 'column_122', 'column_123', 'column_124', 'column_125', 'column_126', 'column_127', 'column_128', 'column_129', 'column_130', 'column_131', 'column_132', 'column_133', 'column_134', 'column_135', 'column_136', 'column_137', 'column_138', 'column_139', 'column_140', 'column_141', 'column_142', 'column_143', 'column_144', 'column_145', 'column_146', 'column_147', 'column_148', 'column_149', 'column_150', 'column_151', 'column_152', 'column_153', 'column_154', 'column_155', 'column_156', 'column_157', 'column_158', 'column_159', 'column_160', 'column_161', 'column_162', 'column_163', 'column_164', 'column_165', 'column_166', 'column_167', 'column_168', 'column_169', 'column_170', 'column_171', 'column_172', 'column_173', 'column_174', 'column_175', 'column_176', 'column_177', 'column_178', 'column_179', 'column_180', 'column_181', 'column_182', 'column_183', 'column_184', 'column_185', 'column_186', 'column_187', 'column_188', 'column_189', 'column_190', 'column_191', 'column_192', 'column_193', 'column_194', 'column_195', 'column_196', 'column_197', 'column_198', 'column_199', 'column_200', 'column_201', 'column_202', 'column_203', 'column_204', 'column_205', 'column_206', 'column_207', 'column_208', 'column_209', 'column_210', 'column_211', 'column_212', 'column_213', 'column_214', 'column_215', 'column_216', 'column_217', 'column_218', 'column_219', 'column_220', 'column_221', 'column_222', 'column_223', 'column_224', 'column_225', 'column_226', 'column_227', 'column_228', 'column_229', 'column_230', 'column_231', 'column_232', 'column_233', 'column_234', 'column_235', 'column_236', 'column_237', 'column_238', 'column_239', 'column_240', 'column_241', 'column_242', 'column_243', 'column_244', 'column_245', 'column_246', 'column_247', 'column_248', 'column_249', 'column_250', 'column_251', 'column_252', 'column_253', 'column_254', 'column_255', 'column_256', 'column_257', 'column_258', 'column_259', 'column_260', 'column_261', 'column_262', 'column_263', 'column_264', 'column_265', 'column_266', 'column_267', 'column_268', 'column_269', 'column_270', 'column_271', 'column_272', 'column_273', 'column_274', 'column_275', 'column_276', 'column_277', 'column_278', 'column_279', 'column_280', 'column_281', 'column_282', 'column_283', 'column_284', 'column_285', 'column_286', 'column_287', 'column_288', 'column_289', 'column_290', 'column_291', 'column_292', 'column_293', 'column_294', 'column_295', 'column_296', 'column_297', 'column_298', 'column_299', 'column_300', 'column_301', 'column_302', 'column_303', 'column_304', 'column_305', 'column_306', 'column_307', 'column_308', 'column_309', 'column_310', 'column_311', 'column_312', 'column_313', 'column_314', 'column_315', 'column_316', 'column_317', 'column_318', 'column_319', 'column_320', 'column_321', 'column_322', 'column_323', 'column_324', 'column_325', 'column_326', 'column_327', 'column_328', 'column_329', 'column_330', 'column_331', 'column_332', 'column_333', 'column_334', 'column_335', 'column_336', 'column_337', 'column_338', 'column_339', 'column_340', 'column_341', 'column_342', 'column_343', 'column_344', 'column_345', 'column_346', 'column_347', 'column_348', 'column_349', 'column_350', 'column_351', 'column_352', 'column_353', 'column_354', 'column_355', 'column_356', 'column_357', 'column_358', 'column_359', 'column_360', 'column_361', 'column_362', 'column_363', 'column_364', 'column_365', 'column_366', 'column_367', 'column_368', 'column_369', 'column_370', 'column_371', 'column_372', 'column_373', 'column_374', 'column_375', 'column_376', 'column_377', 'column_378', 'column_379', 'column_380', 'column_381', 'column_382', 'column_383', 'column_384', 'column_385', 'column_386', 'column_387', 'column_388', 'column_389', 'column_390', 'column_391', 'column_392', 'column_393', 'column_394', 'column_395', 'column_396', 'column_397', 'column_398', 'column_399', 'column_400', 'column_401', 'column_402', 'column_403', 'column_404', 'column_405', 'column_406', 'column_407', 'column_408', 'column_409', 'column_410', 'column_411', 'column_412', 'column_413', 'column_414', 'column_415', 'column_416', 'column_417', 'column_418', 'column_419', 'column_420', 'column_421', 'column_422', 'column_423', 'column_424', 'column_425', 'column_426', 'column_427', 'column_428', 'column_429', 'column_430', 'column_431', 'column_432', 'column_433', 'column_434', 'column_435', 'column_436', 'column_437', 'column_438', 'column_439', 'column_440', 'column_441', 'column_442', 'column_443', 'column_444', 'column_445', 'column_446', 'column_447', 'column_448', 'column_449', 'column_450', 'column_451', 'column_452', 'column_453', 'column_454', 'column_455', 'column_456', 'column_457', 'column_458', 'column_459', 'column_460', 'column_461', 'column_462', 'column_463', 'column_464', 'column_465', 'column_466', 'column_467', 'column_468', 'column_469', 'column_470', 'column_471', 'column_472', 'column_473', 'column_474', 'column_475', 'column_476', 'column_477', 'column_478', 'column_479', 'column_480', 'column_481', 'column_482', 'column_483', 'column_484', 'column_485', 'column_486', 'column_487', 'column_488', 'column_489', 'column_490', 'column_491', 'column_492', 'column_493', 'column_494', 'column_495', 'column_496', 'column_497', 'column_498', 'column_499', 'column_500', 'column_501', 'column_502', 'column_503', 'column_504', 'column_505', 'column_506', 'column_507', 'column_508', 'column_509', 'column_510', 'column_511', 'column_512', 'column_513', 'column_514', 'column_515', 'column_516', 'column_517', 'column_518', 'column_519', 'column_520', 'column_521', 'column_522', 'column_523', 'column_524', 'column_525', 'column_526', 'column_527', 'column_528', 'column_529', 'column_530', 'column_531', 'column_532', 'column_533', 'column_534', 'column_535', 'column_536', 'column_537', 'column_538', 'column_539', 'column_540', 'column_541', 'column_542', 'column_543', 'column_544', 'column_545', 'column_546', 'column_547', 'column_548', 'column_549', 'column_550', 'column_551', 'column_552', 'column_553', 'column_554', 'column_555', 'column_556', 'column_557', 'column_558', 'column_559', 'column_560', 'column_561', 'column_562', 'column_563', 'column_564', 'column_565', 'column_566', 'column_567', 'column_568', 'column_569', 'column_570', 'column_571', 'column_572', 'column_573', 'column_574', 'column_575', 'column_576', 'column_577', 'column_578', 'column_579', 'column_580', 'column_581', 'column_582', 'column_583', 'column_584', 'column_585', 'column_586', 'column_587', 'column_588', 'column_589', 'column_590', 'column_591', 'column_592', 'column_593', 'column_594', 'column_595', 'column_596', 'column_597', 'column_598', 'column_599', 'column_600', 'column_601', 'column_602', 'column_603', 'column_604', 'column_605', 'column_606', 'column_607', 'column_608', 'column_609', 'column_610', 'column_611', 'column_612', 'column_613', 'column_614', 'column_615', 'column_616', 'column_617', 'column_618', 'column_619', 'column_620', 'column_621', 'column_622', 'column_623', 'column_624', 'column_625', 'column_626', 'column_627', 'column_628', 'column_629', 'column_630', 'column_631', 'column_632', 'column_633', 'column_634', 'column_635', 'column_636', 'column_637', 'column_638', 'column_639', 'column_640', 'column_641', 'column_642', 'column_643', 'column_644', 'column_645', 'column_646', 'column_647', 'column_648', 'column_649', 'column_650', 'column_651', 'column_652', 'column_653', 'column_654', 'column_655', 'column_656', 'column_657', 'column_658', 'column_659', 'column_660', 'column_661', 'column_662', 'column_663', 'column_664', 'column_665', 'column_666', 'column_667', 'column_668', 'column_669', 'column_670', 'column_671', 'column_672', 'column_673', 'column_674', 'column_675', 'column_676', 'column_677', 'column_678', 'column_679', 'column_680', 'column_681', 'column_682', 'column_683', 'column_684', 'column_685', 'column_686', 'column_687', 'column_688', 'column_689', 'column_690', 'column_691', 'column_692', 'column_693', 'column_694', 'column_695', 'column_696', 'column_697', 'column_698', 'column_699', 'column_700', 'column_701', 'column_702', 'column_703', 'column_704', 'column_705', 'column_706', 'column_707', 'column_708', 'column_709', 'column_710', 'column_711', 'column_712', 'column_713', 'column_714', 'column_715', 'column_716', 'column_717', 'column_718', 'column_719', 'column_720', 'column_721', 'column_722', 'column_723', 'column_724', 'column_725', 'column_726', 'column_727', 'column_728', 'column_729', 'column_730', 'column_731', 'column_732', 'column_733', 'column_734', 'column_735', 'column_736', 'column_737', 'column_738', 'column_739', 'column_740', 'column_741', 'column_742', 'column_743', 'column_744', 'column_745', 'column_746', 'column_747', 'column_748', 'column_749', 'column_750', 'column_751', 'column_752', 'column_753', 'column_754', 'column_755', 'column_756', 'column_757', 'column_758', 'column_759', 'column_760', 'column_761', 'column_762', 'column_763', 'column_764', 'column_765', 'column_766', 'column_767', 'column_768', 'column_769', 'column_770', 'column_771', 'column_772', 'column_773', 'column_774', 'column_775', 'column_776', 'column_777', 'column_778', 'column_779', 'column_780', 'column_781', 'column_782', 'column_783', 'column_784', 'column_785', 'column_786', 'column_787', 'column_788', 'column_789', 'column_790', 'column_791', 'column_792', 'column_793', 'column_794', 'column_795', 'column_796', 'column_797', 'column_798', 'column_799'});
82 |
83 | predictorNames = {'column_1', 'column_2', 'column_3', 'column_4', 'column_5', 'column_6', 'column_7', 'column_8', 'column_9', 'column_10', 'column_11', 'column_12', 'column_13', 'column_14', 'column_15', 'column_16', 'column_17', 'column_18', 'column_19', 'column_20', 'column_21', 'column_22', 'column_23', 'column_24', 'column_25', 'column_26', 'column_27', 'column_28', 'column_29', 'column_30', 'column_31', 'column_32', 'column_33', 'column_34', 'column_35', 'column_36', 'column_37', 'column_38', 'column_39', 'column_40', 'column_41', 'column_42', 'column_43', 'column_44', 'column_45', 'column_46', 'column_47', 'column_48', 'column_49', 'column_50', 'column_51', 'column_52', 'column_53', 'column_54', 'column_55', 'column_56', 'column_57', 'column_58', 'column_59', 'column_60', 'column_61', 'column_62', 'column_63', 'column_64', 'column_65', 'column_66', 'column_67', 'column_68', 'column_69', 'column_70', 'column_71', 'column_72', 'column_73', 'column_74', 'column_75', 'column_76', 'column_77', 'column_78', 'column_79', 'column_80', 'column_81', 'column_82', 'column_83', 'column_84', 'column_85', 'column_86', 'column_87', 'column_88', 'column_89', 'column_90', 'column_91', 'column_92', 'column_93', 'column_94', 'column_95', 'column_96', 'column_97', 'column_98', 'column_99', 'column_100', 'column_101', 'column_102', 'column_103', 'column_104', 'column_105', 'column_106', 'column_107', 'column_108', 'column_109', 'column_110', 'column_111', 'column_112', 'column_113', 'column_114', 'column_115', 'column_116', 'column_117', 'column_118', 'column_119', 'column_120', 'column_121', 'column_122', 'column_123', 'column_124', 'column_125', 'column_126', 'column_127', 'column_128', 'column_129', 'column_130', 'column_131', 'column_132', 'column_133', 'column_134', 'column_135', 'column_136', 'column_137', 'column_138', 'column_139', 'column_140', 'column_141', 'column_142', 'column_143', 'column_144', 'column_145', 'column_146', 'column_147', 'column_148', 'column_149', 'column_150', 'column_151', 'column_152', 'column_153', 'column_154', 'column_155', 'column_156', 'column_157', 'column_158', 'column_159', 'column_160', 'column_161', 'column_162', 'column_163', 'column_164', 'column_165', 'column_166', 'column_167', 'column_168', 'column_169', 'column_170', 'column_171', 'column_172', 'column_173', 'column_174', 'column_175', 'column_176', 'column_177', 'column_178', 'column_179', 'column_180', 'column_181', 'column_182', 'column_183', 'column_184', 'column_185', 'column_186', 'column_187', 'column_188', 'column_189', 'column_190', 'column_191', 'column_192', 'column_193', 'column_194', 'column_195', 'column_196', 'column_197', 'column_198', 'column_199', 'column_200', 'column_201', 'column_202', 'column_203', 'column_204', 'column_205', 'column_206', 'column_207', 'column_208', 'column_209', 'column_210', 'column_211', 'column_212', 'column_213', 'column_214', 'column_215', 'column_216', 'column_217', 'column_218', 'column_219', 'column_220', 'column_221', 'column_222', 'column_223', 'column_224', 'column_225', 'column_226', 'column_227', 'column_228', 'column_229', 'column_230', 'column_231', 'column_232', 'column_233', 'column_234', 'column_235', 'column_236', 'column_237', 'column_238', 'column_239', 'column_240', 'column_241', 'column_242', 'column_243', 'column_244', 'column_245', 'column_246', 'column_247', 'column_248', 'column_249', 'column_250', 'column_251', 'column_252', 'column_253', 'column_254', 'column_255', 'column_256', 'column_257', 'column_258', 'column_259', 'column_260', 'column_261', 'column_262', 'column_263', 'column_264', 'column_265', 'column_266', 'column_267', 'column_268', 'column_269', 'column_270', 'column_271', 'column_272', 'column_273', 'column_274', 'column_275', 'column_276', 'column_277', 'column_278', 'column_279', 'column_280', 'column_281', 'column_282', 'column_283', 'column_284', 'column_285', 'column_286', 'column_287', 'column_288', 'column_289', 'column_290', 'column_291', 'column_292', 'column_293', 'column_294', 'column_295', 'column_296', 'column_297', 'column_298', 'column_299', 'column_300', 'column_301', 'column_302', 'column_303', 'column_304', 'column_305', 'column_306', 'column_307', 'column_308', 'column_309', 'column_310', 'column_311', 'column_312', 'column_313', 'column_314', 'column_315', 'column_316', 'column_317', 'column_318', 'column_319', 'column_320', 'column_321', 'column_322', 'column_323', 'column_324', 'column_325', 'column_326', 'column_327', 'column_328', 'column_329', 'column_330', 'column_331', 'column_332', 'column_333', 'column_334', 'column_335', 'column_336', 'column_337', 'column_338', 'column_339', 'column_340', 'column_341', 'column_342', 'column_343', 'column_344', 'column_345', 'column_346', 'column_347', 'column_348', 'column_349', 'column_350', 'column_351', 'column_352', 'column_353', 'column_354', 'column_355', 'column_356', 'column_357', 'column_358', 'column_359', 'column_360', 'column_361', 'column_362', 'column_363', 'column_364', 'column_365', 'column_366', 'column_367', 'column_368', 'column_369', 'column_370', 'column_371', 'column_372', 'column_373', 'column_374', 'column_375', 'column_376', 'column_377', 'column_378', 'column_379', 'column_380', 'column_381', 'column_382', 'column_383', 'column_384', 'column_385', 'column_386', 'column_387', 'column_388', 'column_389', 'column_390', 'column_391', 'column_392', 'column_393', 'column_394', 'column_395', 'column_396', 'column_397', 'column_398', 'column_399', 'column_400', 'column_401', 'column_402', 'column_403', 'column_404', 'column_405', 'column_406', 'column_407', 'column_408', 'column_409', 'column_410', 'column_411', 'column_412', 'column_413', 'column_414', 'column_415', 'column_416', 'column_417', 'column_418', 'column_419', 'column_420', 'column_421', 'column_422', 'column_423', 'column_424', 'column_425', 'column_426', 'column_427', 'column_428', 'column_429', 'column_430', 'column_431', 'column_432', 'column_433', 'column_434', 'column_435', 'column_436', 'column_437', 'column_438', 'column_439', 'column_440', 'column_441', 'column_442', 'column_443', 'column_444', 'column_445', 'column_446', 'column_447', 'column_448', 'column_449', 'column_450', 'column_451', 'column_452', 'column_453', 'column_454', 'column_455', 'column_456', 'column_457', 'column_458', 'column_459', 'column_460', 'column_461', 'column_462', 'column_463', 'column_464', 'column_465', 'column_466', 'column_467', 'column_468', 'column_469', 'column_470', 'column_471', 'column_472', 'column_473', 'column_474', 'column_475', 'column_476', 'column_477', 'column_478', 'column_479', 'column_480', 'column_481', 'column_482', 'column_483', 'column_484', 'column_485', 'column_486', 'column_487', 'column_488', 'column_489', 'column_490', 'column_491', 'column_492', 'column_493', 'column_494', 'column_495', 'column_496', 'column_497', 'column_498', 'column_499', 'column_500', 'column_501', 'column_502', 'column_503', 'column_504', 'column_505', 'column_506', 'column_507', 'column_508', 'column_509', 'column_510', 'column_511', 'column_512', 'column_513', 'column_514', 'column_515', 'column_516', 'column_517', 'column_518', 'column_519', 'column_520', 'column_521', 'column_522', 'column_523', 'column_524', 'column_525', 'column_526', 'column_527', 'column_528', 'column_529', 'column_530', 'column_531', 'column_532', 'column_533', 'column_534', 'column_535', 'column_536', 'column_537', 'column_538', 'column_539', 'column_540', 'column_541', 'column_542', 'column_543', 'column_544', 'column_545', 'column_546', 'column_547', 'column_548', 'column_549', 'column_550', 'column_551', 'column_552', 'column_553', 'column_554', 'column_555', 'column_556', 'column_557', 'column_558', 'column_559', 'column_560', 'column_561', 'column_562', 'column_563', 'column_564', 'column_565', 'column_566', 'column_567', 'column_568', 'column_569', 'column_570', 'column_571', 'column_572', 'column_573', 'column_574', 'column_575', 'column_576', 'column_577', 'column_578', 'column_579', 'column_580', 'column_581', 'column_582', 'column_583', 'column_584', 'column_585', 'column_586', 'column_587', 'column_588', 'column_589', 'column_590', 'column_591', 'column_592', 'column_593', 'column_594', 'column_595', 'column_596', 'column_597', 'column_598', 'column_599', 'column_600', 'column_601', 'column_602', 'column_603', 'column_604', 'column_605', 'column_606', 'column_607', 'column_608', 'column_609', 'column_610', 'column_611', 'column_612', 'column_613', 'column_614', 'column_615', 'column_616', 'column_617', 'column_618', 'column_619', 'column_620', 'column_621', 'column_622', 'column_623', 'column_624', 'column_625', 'column_626', 'column_627', 'column_628', 'column_629', 'column_630', 'column_631', 'column_632', 'column_633', 'column_634', 'column_635', 'column_636', 'column_637', 'column_638', 'column_639', 'column_640', 'column_641', 'column_642', 'column_643', 'column_644', 'column_645', 'column_646', 'column_647', 'column_648', 'column_649', 'column_650', 'column_651', 'column_652', 'column_653', 'column_654', 'column_655', 'column_656', 'column_657', 'column_658', 'column_659', 'column_660', 'column_661', 'column_662', 'column_663', 'column_664', 'column_665', 'column_666', 'column_667', 'column_668', 'column_669', 'column_670', 'column_671', 'column_672', 'column_673', 'column_674', 'column_675', 'column_676', 'column_677', 'column_678', 'column_679', 'column_680', 'column_681', 'column_682', 'column_683', 'column_684', 'column_685', 'column_686', 'column_687', 'column_688', 'column_689', 'column_690', 'column_691', 'column_692', 'column_693', 'column_694', 'column_695', 'column_696', 'column_697', 'column_698', 'column_699', 'column_700', 'column_701', 'column_702', 'column_703', 'column_704', 'column_705', 'column_706', 'column_707', 'column_708', 'column_709', 'column_710', 'column_711', 'column_712', 'column_713', 'column_714', 'column_715', 'column_716', 'column_717', 'column_718', 'column_719', 'column_720', 'column_721', 'column_722', 'column_723', 'column_724', 'column_725', 'column_726', 'column_727', 'column_728', 'column_729', 'column_730', 'column_731', 'column_732', 'column_733', 'column_734', 'column_735', 'column_736', 'column_737', 'column_738', 'column_739', 'column_740', 'column_741', 'column_742', 'column_743', 'column_744', 'column_745', 'column_746', 'column_747', 'column_748', 'column_749', 'column_750', 'column_751', 'column_752', 'column_753', 'column_754', 'column_755', 'column_756', 'column_757', 'column_758', 'column_759', 'column_760', 'column_761', 'column_762', 'column_763', 'column_764', 'column_765', 'column_766', 'column_767', 'column_768', 'column_769', 'column_770', 'column_771', 'column_772', 'column_773', 'column_774', 'column_775', 'column_776', 'column_777', 'column_778', 'column_779', 'column_780', 'column_781', 'column_782', 'column_783', 'column_784', 'column_785', 'column_786', 'column_787', 'column_788', 'column_789', 'column_790', 'column_791', 'column_792', 'column_793', 'column_794', 'column_795', 'column_796', 'column_797', 'column_798'};
84 | predictors = inputTable(:, predictorNames);
85 | response = inputTable.column_799;
86 | isCategoricalPredictor = [false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false, false];
87 |
88 | % Perform cross-validation
89 | partitionedModel = crossval(trainedClassifier.ClassificationSVM, 'KFold', 5);
90 |
91 | % Compute validation accuracy
92 | validationAccuracy = 1 - kfoldLoss(partitionedModel, 'LossFun', 'ClassifError');
93 |
94 | % Compute validation predictions and scores
95 | [validationPredictions, validationScores] = kfoldPredict(partitionedModel);
96 |
--------------------------------------------------------------------------------