├── ALL_model.py ├── DeepConvNet_training_ELU.py ├── DeepConvNet_training_LeakyReLU.py ├── DeepConvNet_training_ReLU.py ├── EEGNet_training_ELU.py ├── EEGNet_training_LeakyReLU.py ├── EEGNet_training_ReLU.py ├── Experiment Report.pdf ├── Plot_History_Result.py ├── README.md ├── S4b_test.npz ├── S4b_train.npz ├── X11b_test.npz ├── X11b_train.npz ├── checkpoint ├── DeepConvNet_checkpoint_ELU.rar ├── DeepConvNet_checkpoint_ELU訓練參數.txt ├── DeepConvNet_checkpoint_LeakyReLU.rar ├── DeepConvNet_checkpoint_LeakyReLU訓練參數.txt ├── DeepConvNet_checkpoint_ReLU.rar ├── DeepConvNet_checkpoint_ReLU訓練參數.txt ├── EEGNet_checkpoint_ELU.rar ├── EEGNet_checkpoint_ELU訓練參數.txt ├── EEGNet_checkpoint_LeakyReLU.rar ├── EEGNet_checkpoint_LeakyReLU訓練參數.txt ├── EEGNet_checkpoint_ReLU.rar └── EEGNet_checkpoint_ReLU訓練參數.txt ├── dataloader.py ├── history_csv ├── DeepConvNet_ELU.csv ├── DeepConvNet_LeakyReLU.csv ├── DeepConvNet_ReLU.csv ├── EEGNet_ELU.csv ├── EEGNet_LeakyReLU.csv └── EEGNet_ReLU.csv └── model_testing.py /ALL_model.py: -------------------------------------------------------------------------------- 1 | import torch.nn as nn 2 | import torch 3 | 4 | 5 | class DeepConvNet_ELU(torch.nn.Module): 6 | def __init__(self, n_output): 7 | super(DeepConvNet_ELU, self).__init__() 8 | self.model = nn.Sequential( 9 | # Conv2d(1, 25, kernel_size=(1,5),padding='VALID',bias=False), 10 | # Conv2d(25, 25, kernel_size=(2,1), padding='VALID',bias=False), 11 | nn.Conv2d(1, 25, kernel_size=(1,5),bias=False), 12 | nn.Conv2d(25, 25, kernel_size=(2,1),bias=False), 13 | nn.BatchNorm2d(25, eps=1e-05, momentum=0.1), 14 | nn.ELU(alpha=0.4), 15 | nn.MaxPool2d(kernel_size=(1,2)), 16 | nn.Dropout(p=0.4), 17 | 18 | # Conv2d(25, 50, kernel_size=(1,5),padding='VALID',bias=False), 19 | nn.Conv2d(25, 50, kernel_size=(1,5),bias=False), 20 | nn.BatchNorm2d(50, eps=1e-05, momentum=0.1), 21 | nn.ELU(alpha=0.4), 22 | nn.MaxPool2d(kernel_size=(1,2)), 23 | nn.Dropout(p=0.4), 24 | 25 | # Conv2d(50, 100, kernel_size=(1,5),padding='VALID',bias=False), 26 | nn.Conv2d(50, 100, kernel_size=(1,5),bias=False), 27 | nn.BatchNorm2d(100, eps=1e-05, momentum=0.1), 28 | nn.ELU(alpha=0.4), 29 | nn.MaxPool2d(kernel_size=(1,2)), 30 | nn.Dropout(p=0.4), 31 | 32 | # Conv2d(100, 200, kernel_size=(1,5),padding='VALID',bias=False), 33 | nn.Conv2d(100, 200, kernel_size=(1,5),bias=False), 34 | nn.BatchNorm2d(200, eps=1e-05, momentum=0.1), 35 | nn.ELU(alpha=0.4), 36 | nn.MaxPool2d(kernel_size=(1,2)), 37 | nn.Dropout(p=0.4), 38 | 39 | nn.Flatten(), 40 | nn.Linear(8600,n_output,bias=True) 41 | ) 42 | 43 | def forward(self, x): 44 | out = self.model(x) 45 | return out 46 | 47 | class DeepConvNet_ReLU(torch.nn.Module): 48 | def __init__(self, n_output): 49 | super(DeepConvNet_ReLU, self).__init__() 50 | self.model = nn.Sequential( 51 | nn.Conv2d(1, 25, kernel_size=(1,5),bias=False), 52 | nn.Conv2d(25, 25, kernel_size=(2,1),bias=False), 53 | nn.BatchNorm2d(25, eps=1e-05, momentum=0.1), 54 | nn.ReLU(), 55 | nn.MaxPool2d(kernel_size=(1,2)), 56 | nn.Dropout(p=0.47), 57 | 58 | nn.Conv2d(25, 50, kernel_size=(1,5),bias=False), 59 | nn.BatchNorm2d(50, eps=1e-05, momentum=0.1), 60 | nn.ReLU(), 61 | nn.MaxPool2d(kernel_size=(1,2)), 62 | nn.Dropout(p=0.47), 63 | 64 | nn.Conv2d(50, 100, kernel_size=(1,5),bias=False), 65 | nn.BatchNorm2d(100, eps=1e-05, momentum=0.1), 66 | nn.ReLU(), 67 | nn.MaxPool2d(kernel_size=(1,2)), 68 | nn.Dropout(p=0.47), 69 | 70 | nn.Conv2d(100, 200, kernel_size=(1,5),bias=False), 71 | nn.BatchNorm2d(200, eps=1e-05, momentum=0.1), 72 | nn.ReLU(), 73 | nn.MaxPool2d(kernel_size=(1,2)), 74 | nn.Dropout(p=0.47), 75 | 76 | nn.Flatten(), 77 | nn.Linear(8600,n_output,bias=True) 78 | ) 79 | 80 | def forward(self, x): 81 | out = self.model(x) 82 | return out 83 | 84 | class DeepConvNet_LeakyReLU(torch.nn.Module): 85 | def __init__(self, n_output): 86 | super(DeepConvNet_LeakyReLU, self).__init__() 87 | self.model = nn.Sequential( 88 | # Conv2d(1, 25, kernel_size=(1,5),padding='VALID',bias=False), 89 | # Conv2d(25, 25, kernel_size=(2,1), padding='VALID',bias=False), 90 | nn.Conv2d(1, 25, kernel_size=(1,5),bias=False), 91 | nn.Conv2d(25, 25, kernel_size=(2,1),bias=False), 92 | nn.BatchNorm2d(25, eps=1e-05, momentum=0.1), 93 | nn.LeakyReLU(negative_slope=0.04), 94 | nn.MaxPool2d(kernel_size=(1,2)), 95 | nn.Dropout(p=0.47), 96 | 97 | # Conv2d(25, 50, kernel_size=(1,5),padding='VALID',bias=False), 98 | nn.Conv2d(25, 50, kernel_size=(1,5),bias=False), 99 | nn.BatchNorm2d(50, eps=1e-05, momentum=0.1), 100 | nn.LeakyReLU(negative_slope=0.09), 101 | nn.MaxPool2d(kernel_size=(1,2)), 102 | nn.Dropout(p=0.47), 103 | 104 | # Conv2d(50, 100, kernel_size=(1,5),padding='VALID',bias=False), 105 | nn.Conv2d(50, 100, kernel_size=(1,5),bias=False), 106 | nn.BatchNorm2d(100, eps=1e-05, momentum=0.1), 107 | nn.LeakyReLU(negative_slope=0.04), 108 | nn.MaxPool2d(kernel_size=(1,2)), 109 | nn.Dropout(p=0.47), 110 | 111 | # Conv2d(100, 200, kernel_size=(1,5),padding='VALID',bias=False), 112 | nn.Conv2d(100, 200, kernel_size=(1,5),bias=False), 113 | nn.BatchNorm2d(200, eps=1e-05, momentum=0.1), 114 | nn.LeakyReLU(negative_slope=0.09), 115 | nn.MaxPool2d(kernel_size=(1,2)), 116 | nn.Dropout(p=0.47), 117 | 118 | nn.Flatten(), 119 | nn.Linear(8600,n_output,bias=True) 120 | ) 121 | 122 | def forward(self, x): 123 | out = self.model(x) 124 | return out 125 | 126 | class EEGNet_ELU(torch.nn.Module): 127 | def __init__(self, n_output): 128 | super(EEGNet_ELU, self).__init__() 129 | self.firstConv = nn.Sequential( 130 | nn.Conv2d(1, 16, kernel_size=(1,51), stride=(1,1), padding=(0,25),bias=False), 131 | nn.BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) 132 | ) 133 | self.depthwiseConv = nn.Sequential( 134 | nn.Conv2d(16, 32, kernel_size=(2,1), stride=(1,1), groups=8,bias=False), 135 | nn.BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True), 136 | nn.ELU(alpha=0.1), 137 | nn.AvgPool2d(kernel_size=(1,4), stride=(1,4),padding=0), 138 | nn.Dropout(p=0.35) 139 | ) 140 | self.separableConv = nn.Sequential( 141 | nn.Conv2d(32, 32, kernel_size=(1,15), stride=(1,1), padding=(0,7),bias=False), 142 | nn.BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True), 143 | nn.ELU(alpha=0.1), 144 | nn.AvgPool2d(kernel_size=(1,8), stride=(1,8),padding=0), 145 | nn.Dropout(p=0.35) 146 | ) 147 | self.classify = nn.Sequential( 148 | nn.Flatten(), 149 | nn.Linear(736,n_output,bias=True) 150 | ) 151 | 152 | def forward(self, x): 153 | out = self.firstConv(x) 154 | out = self.depthwiseConv(out) 155 | out = self.separableConv(out) 156 | out = self.classify(out) 157 | return out 158 | 159 | class EEGNet_LeakyReLU(torch.nn.Module): 160 | def __init__(self, n_output): 161 | super(EEGNet_LeakyReLU, self).__init__() 162 | self.firstConv = nn.Sequential( 163 | nn.Conv2d(1, 16, kernel_size=(1,51), stride=(1,1), padding=(0,25),bias=False), 164 | nn.BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) 165 | ) 166 | self.depthwiseConv = nn.Sequential( 167 | nn.Conv2d(16, 32, kernel_size=(2,1), stride=(1,1), groups=8,bias=False), 168 | nn.BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True), 169 | nn.LeakyReLU(negative_slope=0.06), 170 | nn.AvgPool2d(kernel_size=(1,4), stride=(1,4),padding=0), 171 | nn.Dropout(p=0.35) 172 | ) 173 | self.separableConv = nn.Sequential( 174 | nn.Conv2d(32, 32, kernel_size=(1,15), stride=(1,1), padding=(0,7),bias=False), 175 | nn.BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True), 176 | nn.LeakyReLU(negative_slope=0.06), 177 | nn.AvgPool2d(kernel_size=(1,8), stride=(1,8),padding=0), 178 | nn.Dropout(p=0.35) 179 | ) 180 | self.classify = nn.Sequential( 181 | nn.Flatten(), 182 | nn.Linear(736,n_output,bias=True) 183 | ) 184 | 185 | def forward(self, x): 186 | out = self.firstConv(x) 187 | out = self.depthwiseConv(out) 188 | out = self.separableConv(out) 189 | out = self.classify(out) 190 | return out 191 | 192 | class EEGNet_ReLU(torch.nn.Module): 193 | def __init__(self, n_output): 194 | super(EEGNet_ReLU, self).__init__() 195 | self.firstConv = nn.Sequential( 196 | nn.Conv2d(1, 16, kernel_size=(1,51), stride=(1,1), padding=(0,25),bias=False), 197 | nn.BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) 198 | ) 199 | self.depthwiseConv = nn.Sequential( 200 | nn.Conv2d(16, 32, kernel_size=(2,1), stride=(1,1), groups=8,bias=False), 201 | nn.BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True), 202 | nn.ReLU(), 203 | nn.AvgPool2d(kernel_size=(1,4), stride=(1,4),padding=0), 204 | nn.Dropout(p=0.35) 205 | ) 206 | self.separableConv = nn.Sequential( 207 | nn.Conv2d(32, 32, kernel_size=(1,15), stride=(1,1), padding=(0,7),bias=False), 208 | nn.BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True), 209 | nn.ReLU(), 210 | nn.AvgPool2d(kernel_size=(1,8), stride=(1,8),padding=0), 211 | nn.Dropout(p=0.35) 212 | ) 213 | self.classify = nn.Sequential( 214 | nn.Flatten(), 215 | nn.Linear(736,n_output,bias=True) 216 | ) 217 | 218 | def forward(self, x): 219 | out = self.firstConv(x) 220 | out = self.depthwiseConv(out) 221 | out = self.separableConv(out) 222 | out = self.classify(out) 223 | return out -------------------------------------------------------------------------------- /DeepConvNet_training_ELU.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch.autograd import Variable 3 | import torch.nn.functional as F 4 | import torch.nn as nn 5 | from torch.nn.modules import transformer 6 | from dataloader import read_bci_data 7 | import matplotlib.pyplot as plt 8 | from torch.utils.data import TensorDataset,DataLoader 9 | import numpy as np 10 | import torch.optim as optim 11 | from torchsummary import summary 12 | from torchvision import transforms 13 | import pandas as pd 14 | import os 15 | import argparse 16 | 17 | def testing(x_test,y_test,model,device,filepath): 18 | 19 | # model.load_state_dict(torch.load(filepath)) 20 | model.eval() 21 | with torch.no_grad(): 22 | model.cuda(0) 23 | n = x_test.shape[0] 24 | 25 | x_test = x_test.astype("float32") 26 | y_test = y_test.astype("float32").reshape(y_test.shape[0],) 27 | 28 | x_test, y_test = Variable(torch.from_numpy(x_test)),Variable(torch.from_numpy(y_test)) 29 | x_test,y_test = x_test.to(device),y_test.to(device) 30 | y_pred_test = model(x_test) 31 | correct = (torch.max(y_pred_test,1)[1]==y_test).sum().item() 32 | # print("testing accuracy:",correct/n) 33 | return correct/n 34 | 35 | def init_weights(m): 36 | if type(m) == nn.Linear: 37 | # torch.nn.init.uniform(m.weight) 38 | torch.nn.init.xavier_uniform(m.weight) 39 | m.bias.data.fill_(0.08) 40 | 41 | parser = argparse.ArgumentParser() 42 | parser.add_argument('--epochs', type=int, default='3000', help='training epochs') 43 | parser.add_argument('--learning_rate', type=float, default='1e-3', help='learning rate') 44 | parser.add_argument('--save_model', action='store_true', help='check if you want to save the model.') 45 | parser.add_argument('--save_csv', action='store_true', help='check if you want to save the training history.') 46 | opt = parser.parse_args() 47 | 48 | train_data, train_label, test_data, test_label = read_bci_data() 49 | 50 | filepath = os.path.abspath(os.path.dirname(__file__))+"\checkpoint\DeepConvNet_checkpoint_ELU.rar" 51 | filepath_csv = os.path.abspath(os.path.dirname(__file__))+"\history_csv\DeepConvNet_ELU.csv" 52 | 53 | n = train_data.shape[0] 54 | epochs = opt.epochs 55 | lr = opt.learning_rate 56 | 57 | min_loss=1 58 | max_accuracy = 0 59 | device = torch.device("cuda:0") 60 | 61 | train_data = train_data.astype("float32") 62 | train_label = train_label.astype("float32").reshape(train_label.shape[0],) 63 | 64 | # train_data.shape = (1080,1,2,750) 65 | # train_label.shape = (1080,) 66 | 67 | x, y = Variable(torch.from_numpy(train_data)),Variable(torch.from_numpy(train_label)) 68 | y=torch.tensor(y, dtype=torch.long) 69 | 70 | class DeepConvNet_ELU(torch.nn.Module): 71 | def __init__(self, n_output): 72 | super(DeepConvNet_ELU, self).__init__() 73 | self.model = nn.Sequential( 74 | # Conv2d(1, 25, kernel_size=(1,5),padding='VALID',bias=False), 75 | # Conv2d(25, 25, kernel_size=(2,1), padding='VALID',bias=False), 76 | nn.Conv2d(1, 25, kernel_size=(1,5),bias=False), 77 | nn.Conv2d(25, 25, kernel_size=(2,1),bias=False), 78 | nn.BatchNorm2d(25, eps=1e-05, momentum=0.1), 79 | nn.ELU(alpha=0.4), 80 | nn.MaxPool2d(kernel_size=(1,2)), 81 | nn.Dropout(p=0.4), 82 | 83 | # Conv2d(25, 50, kernel_size=(1,5),padding='VALID',bias=False), 84 | nn.Conv2d(25, 50, kernel_size=(1,5),bias=False), 85 | nn.BatchNorm2d(50, eps=1e-05, momentum=0.1), 86 | nn.ELU(alpha=0.4), 87 | nn.MaxPool2d(kernel_size=(1,2)), 88 | nn.Dropout(p=0.4), 89 | 90 | # Conv2d(50, 100, kernel_size=(1,5),padding='VALID',bias=False), 91 | nn.Conv2d(50, 100, kernel_size=(1,5),bias=False), 92 | nn.BatchNorm2d(100, eps=1e-05, momentum=0.1), 93 | nn.ELU(alpha=0.4), 94 | nn.MaxPool2d(kernel_size=(1,2)), 95 | nn.Dropout(p=0.4), 96 | 97 | # Conv2d(100, 200, kernel_size=(1,5),padding='VALID',bias=False), 98 | nn.Conv2d(100, 200, kernel_size=(1,5),bias=False), 99 | nn.BatchNorm2d(200, eps=1e-05, momentum=0.1), 100 | nn.ELU(alpha=0.4), 101 | nn.MaxPool2d(kernel_size=(1,2)), 102 | nn.Dropout(p=0.4), 103 | 104 | nn.Flatten(), 105 | nn.Linear(8600,n_output,bias=True) 106 | ) 107 | 108 | def forward(self, x): 109 | out = self.model(x) 110 | return out 111 | 112 | model = DeepConvNet_ELU(n_output=2) 113 | # model.apply(init_weights) 114 | criterion = nn.CrossEntropyLoss() 115 | 116 | # optimizer = optim.Adam(model.parameters(),lr = 1e-3) 117 | optimizer = optim.RMSprop(model.parameters(),lr = lr, momentum = 0.9, weight_decay=1e-3) 118 | # optimizer = optim.SGD(model.parameters(), lr=1e-3, momentum=0.5, weight_decay=5e-4) 119 | scheduler = optim.lr_scheduler.MultiStepLR(optimizer, milestones=[400,500,1000], gamma=0.5) 120 | 121 | model.to(device) 122 | summary(model.cuda(),(1,2,750)) 123 | 124 | loss_history = [] 125 | train_accuracy_history = [] 126 | test_accuracy_history = [] 127 | 128 | for epoch in range(epochs): 129 | # for idx,(data,target) in enumerate(loader): 130 | model.train() 131 | x,y = x.to(device),y.to(device) 132 | y_pred = model(x) 133 | 134 | # print(y_pred.shape) 135 | # print(y.shape) 136 | 137 | # loss = F.mse_loss(y_pred, y) 138 | 139 | loss = criterion(y_pred, y) 140 | train_loss = loss.item() 141 | loss_history.append(train_loss) 142 | 143 | optimizer.zero_grad() 144 | loss.backward() 145 | optimizer.step() 146 | # scheduler.step() 147 | 148 | if epoch%1==0: 149 | 150 | # correct= (y_pred.ge(0.5) == y).sum().item() 151 | n = y.shape[0] 152 | correct = (torch.max(y_pred,1)[1]==y).sum().item() 153 | train_accuracy = correct / n 154 | train_accuracy_history.append(train_accuracy) 155 | 156 | # print("epochs:",epoch,"loss:",loss.item(),"Accuracy:",(correct / n),"Learning rate:",scheduler.get_last_lr()[0]) 157 | test_accuracy = testing(test_data,test_label,model,device,filepath) 158 | test_accuracy_history.append(test_accuracy) 159 | 160 | print("epochs:",epoch,"loss:",train_loss,"Training Accuracy:",train_accuracy,"Testing Accuracy:",test_accuracy,"Learning rate:",scheduler.get_last_lr()[0]) 161 | 162 | if train_lossmax_accuracy: 167 | max_accuracy = train_accuracy 168 | if opt.save_model: 169 | torch.save(model.state_dict(), filepath) 170 | 171 | print("最大的Accuracy為:",max_accuracy,"最小的Loss值為:",min_loss) 172 | df = pd.DataFrame({"loss":loss_history,"train_accuracy_history":train_accuracy_history,"test_accuracy_history":test_accuracy_history}) 173 | 174 | if opt.save_csv: 175 | df.to_csv(filepath_csv,encoding="utf-8-sig") 176 | -------------------------------------------------------------------------------- /DeepConvNet_training_LeakyReLU.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch.autograd import Variable 3 | import torch.nn.functional as F 4 | import torch.nn as nn 5 | from torch.nn.modules import transformer 6 | from dataloader import read_bci_data 7 | import matplotlib.pyplot as plt 8 | from torch.utils.data import TensorDataset,DataLoader 9 | import numpy as np 10 | import torch.optim as optim 11 | from torchsummary import summary 12 | from torchvision import transforms 13 | import pandas as pd 14 | import os 15 | import argparse 16 | 17 | def testing(x_test,y_test,model,device,filepath): 18 | 19 | # model.load_state_dict(torch.load(filepath)) 20 | model.eval() 21 | with torch.no_grad(): 22 | model.cuda(0) 23 | n = x_test.shape[0] 24 | 25 | x_test = x_test.astype("float32") 26 | y_test = y_test.astype("float32").reshape(y_test.shape[0],) 27 | 28 | x_test, y_test = Variable(torch.from_numpy(x_test)),Variable(torch.from_numpy(y_test)) 29 | x_test,y_test = x_test.to(device),y_test.to(device) 30 | y_pred_test = model(x_test) 31 | correct = (torch.max(y_pred_test,1)[1]==y_test).sum().item() 32 | # print("testing accuracy:",correct/n) 33 | return correct/n 34 | 35 | def init_weights(m): 36 | if type(m) == nn.Linear: 37 | # torch.nn.init.uniform(m.weight) 38 | torch.nn.init.xavier_uniform(m.weight) 39 | m.bias.data.fill_(0.08) 40 | 41 | train_data, train_label, test_data, test_label = read_bci_data() 42 | 43 | parser = argparse.ArgumentParser() 44 | parser.add_argument('--epochs', type=int, default='3000', help='training epochs') 45 | parser.add_argument('--learning_rate', type=float, default='1e-3', help='learning rate') 46 | parser.add_argument('--save_model', action='store_true', help='check if you want to save the model.') 47 | parser.add_argument('--save_csv', action='store_true', help='check if you want to save the training history.') 48 | opt = parser.parse_args() 49 | 50 | filepath = os.path.abspath(os.path.dirname(__file__))+"\checkpoint\DeepConvNet_checkpoint_LeakyReLU.rar" 51 | filepath_csv = os.path.abspath(os.path.dirname(__file__))+"\history_csv\DeepConvNet_LeakyReLU.csv" 52 | 53 | n = train_data.shape[0] 54 | epochs = 3000 55 | lr = 1e-3 56 | 57 | min_loss=1 58 | max_accuracy = 0 59 | device = torch.device("cuda:0") 60 | 61 | train_data = train_data.astype("float32") 62 | train_label = train_label.astype("float32").reshape(train_label.shape[0],) 63 | 64 | # train_data.shape = (1080,1,2,750) 65 | # train_label.shape = (1080,) 66 | 67 | x, y = Variable(torch.from_numpy(train_data)),Variable(torch.from_numpy(train_label)) 68 | y=torch.tensor(y, dtype=torch.long) 69 | 70 | class DeepConvNet_LeakyReLU(torch.nn.Module): 71 | def __init__(self, n_output): 72 | super(DeepConvNet_LeakyReLU, self).__init__() 73 | self.model = nn.Sequential( 74 | # Conv2d(1, 25, kernel_size=(1,5),padding='VALID',bias=False), 75 | # Conv2d(25, 25, kernel_size=(2,1), padding='VALID',bias=False), 76 | nn.Conv2d(1, 25, kernel_size=(1,5),bias=False), 77 | nn.Conv2d(25, 25, kernel_size=(2,1),bias=False), 78 | nn.BatchNorm2d(25, eps=1e-05, momentum=0.1), 79 | nn.LeakyReLU(negative_slope=0.04), 80 | nn.MaxPool2d(kernel_size=(1,2)), 81 | nn.Dropout(p=0.47), 82 | 83 | # Conv2d(25, 50, kernel_size=(1,5),padding='VALID',bias=False), 84 | nn.Conv2d(25, 50, kernel_size=(1,5),bias=False), 85 | nn.BatchNorm2d(50, eps=1e-05, momentum=0.1), 86 | nn.LeakyReLU(negative_slope=0.09), 87 | nn.MaxPool2d(kernel_size=(1,2)), 88 | nn.Dropout(p=0.47), 89 | 90 | # Conv2d(50, 100, kernel_size=(1,5),padding='VALID',bias=False), 91 | nn.Conv2d(50, 100, kernel_size=(1,5),bias=False), 92 | nn.BatchNorm2d(100, eps=1e-05, momentum=0.1), 93 | nn.LeakyReLU(negative_slope=0.04), 94 | nn.MaxPool2d(kernel_size=(1,2)), 95 | nn.Dropout(p=0.47), 96 | 97 | # Conv2d(100, 200, kernel_size=(1,5),padding='VALID',bias=False), 98 | nn.Conv2d(100, 200, kernel_size=(1,5),bias=False), 99 | nn.BatchNorm2d(200, eps=1e-05, momentum=0.1), 100 | nn.LeakyReLU(negative_slope=0.09), 101 | nn.MaxPool2d(kernel_size=(1,2)), 102 | nn.Dropout(p=0.47), 103 | 104 | nn.Flatten(), 105 | nn.Linear(8600,n_output,bias=True) 106 | ) 107 | 108 | def forward(self, x): 109 | out = self.model(x) 110 | return out 111 | 112 | model = DeepConvNet_LeakyReLU(n_output=2) 113 | print(model) 114 | # model.apply(init_weights) 115 | criterion = nn.CrossEntropyLoss() 116 | 117 | # optimizer = optim.Adam(model.parameters(),lr = 1e-3) 118 | optimizer = optim.RMSprop(model.parameters(),lr = lr, momentum = 0.9, weight_decay=1e-3) 119 | # optimizer = optim.SGD(model.parameters(), lr=1e-3, momentum=0.5, weight_decay=5e-4) 120 | scheduler = optim.lr_scheduler.MultiStepLR(optimizer, milestones=[400,500,1000], gamma=0.5) 121 | 122 | model.to(device) 123 | summary(model.cuda(),(1,2,750)) 124 | 125 | loss_history = [] 126 | train_accuracy_history = [] 127 | test_accuracy_history = [] 128 | 129 | for epoch in range(epochs): 130 | # for idx,(data,target) in enumerate(loader): 131 | model.train() 132 | x,y = x.to(device),y.to(device) 133 | y_pred = model(x) 134 | 135 | # print(y_pred.shape) 136 | # print(y.shape) 137 | 138 | # loss = F.mse_loss(y_pred, y) 139 | 140 | loss = criterion(y_pred, y) 141 | train_loss = loss.item() 142 | loss_history.append(train_loss) 143 | 144 | optimizer.zero_grad() 145 | loss.backward() 146 | optimizer.step() 147 | # scheduler.step() 148 | 149 | if epoch%1==0: 150 | 151 | # correct= (y_pred.ge(0.5) == y).sum().item() 152 | n = y.shape[0] 153 | correct = (torch.max(y_pred,1)[1]==y).sum().item() 154 | train_accuracy = correct / n 155 | train_accuracy_history.append(train_accuracy) 156 | 157 | # print("epochs:",epoch,"loss:",loss.item(),"Accuracy:",(correct / n),"Learning rate:",scheduler.get_last_lr()[0]) 158 | test_accuracy = testing(test_data,test_label,model,device,filepath) 159 | test_accuracy_history.append(test_accuracy) 160 | 161 | print("epochs:",epoch,"loss:",train_loss,"Training Accuracy:",train_accuracy,"Testing Accuracy:",test_accuracy,"Learning rate:",scheduler.get_last_lr()[0]) 162 | 163 | if train_lossmax_accuracy: 168 | max_accuracy = train_accuracy 169 | if opt.save_model: 170 | torch.save(model.state_dict(), filepath) 171 | 172 | print("最大的Accuracy為:",max_accuracy,"最小的Loss值為:",min_loss) 173 | df = pd.DataFrame({"loss":loss_history,"train_accuracy_history":train_accuracy_history,"test_accuracy_history":test_accuracy_history}) 174 | 175 | if opt.save_csv: 176 | df.to_csv(filepath_csv,encoding="utf-8-sig") 177 | -------------------------------------------------------------------------------- /DeepConvNet_training_ReLU.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch.autograd import Variable 3 | import torch.nn.functional as F 4 | import torch.nn as nn 5 | from torch.nn.modules import transformer 6 | from dataloader import read_bci_data 7 | import matplotlib.pyplot as plt 8 | from torch.utils.data import TensorDataset,DataLoader 9 | import numpy as np 10 | import torch.optim as optim 11 | from torchsummary import summary 12 | from torchvision import transforms 13 | import pandas as pd 14 | import os 15 | import argparse 16 | 17 | def testing(x_test,y_test,model,device,filepath): 18 | 19 | # model.load_state_dict(torch.load(filepath)) 20 | model.eval() 21 | with torch.no_grad(): 22 | model.cuda(0) 23 | n = x_test.shape[0] 24 | 25 | x_test = x_test.astype("float32") 26 | y_test = y_test.astype("float32").reshape(y_test.shape[0],) 27 | 28 | x_test, y_test = Variable(torch.from_numpy(x_test)),Variable(torch.from_numpy(y_test)) 29 | x_test,y_test = x_test.to(device),y_test.to(device) 30 | y_pred_test = model(x_test) 31 | correct = (torch.max(y_pred_test,1)[1]==y_test).sum().item() 32 | # print("testing accuracy:",correct/n) 33 | return correct/n 34 | 35 | 36 | def init_weights(m): 37 | if type(m) == nn.Linear: 38 | # torch.nn.init.uniform(m.weight) 39 | torch.nn.init.xavier_uniform(m.weight) 40 | m.bias.data.fill_(0.08) 41 | 42 | train_data, train_label, test_data, test_label = read_bci_data() 43 | 44 | parser = argparse.ArgumentParser() 45 | parser.add_argument('--epochs', type=int, default='3000', help='training epochs') 46 | parser.add_argument('--learning_rate', type=float, default='1e-3', help='learning rate') 47 | parser.add_argument('--save_model', action='store_true', help='check if you want to save the model.') 48 | parser.add_argument('--save_csv', action='store_true', help='check if you want to save the training history.') 49 | opt = parser.parse_args() 50 | 51 | filepath = os.path.abspath(os.path.dirname(__file__))+"\checkpoint\DeepConvNet_checkpoint_ReLU.rar" 52 | filepath_csv = os.path.abspath(os.path.dirname(__file__))+"\history_csv\DeepConvNet_ReLU.csv" 53 | 54 | n = train_data.shape[0] 55 | epochs = 3000 56 | lr = 1e-3 57 | 58 | min_loss = 1 59 | max_accuracy = 0 60 | device = torch.device("cuda:0") 61 | 62 | train_data = train_data.astype("float32") 63 | train_label = train_label.astype("float32").reshape(train_label.shape[0],) 64 | 65 | # train_data.shape = (1080,1,2,750) 66 | # train_label.shape = (1080,) 67 | 68 | x, y = Variable(torch.from_numpy(train_data)),Variable(torch.from_numpy(train_label)) 69 | y=torch.tensor(y, dtype=torch.long) 70 | 71 | class DeepConvNet_ReLU(torch.nn.Module): 72 | def __init__(self, n_output): 73 | super(DeepConvNet_ReLU, self).__init__() 74 | self.model = nn.Sequential( 75 | nn.Conv2d(1, 25, kernel_size=(1,5),bias=False), 76 | nn.Conv2d(25, 25, kernel_size=(2,1),bias=False), 77 | nn.BatchNorm2d(25, eps=1e-05, momentum=0.1), 78 | nn.ReLU(), 79 | nn.MaxPool2d(kernel_size=(1,2)), 80 | nn.Dropout(p=0.47), 81 | 82 | nn.Conv2d(25, 50, kernel_size=(1,5),bias=False), 83 | nn.BatchNorm2d(50, eps=1e-05, momentum=0.1), 84 | nn.ReLU(), 85 | nn.MaxPool2d(kernel_size=(1,2)), 86 | nn.Dropout(p=0.47), 87 | 88 | nn.Conv2d(50, 100, kernel_size=(1,5),bias=False), 89 | nn.BatchNorm2d(100, eps=1e-05, momentum=0.1), 90 | nn.ReLU(), 91 | nn.MaxPool2d(kernel_size=(1,2)), 92 | nn.Dropout(p=0.47), 93 | 94 | nn.Conv2d(100, 200, kernel_size=(1,5),bias=False), 95 | nn.BatchNorm2d(200, eps=1e-05, momentum=0.1), 96 | nn.ReLU(), 97 | nn.MaxPool2d(kernel_size=(1,2)), 98 | nn.Dropout(p=0.47), 99 | 100 | nn.Flatten(), 101 | nn.Linear(8600,n_output,bias=True) 102 | ) 103 | 104 | def forward(self, x): 105 | out = self.model(x) 106 | return out 107 | 108 | model = DeepConvNet_ReLU(n_output=2) 109 | # model.apply(init_weights) 110 | criterion = nn.CrossEntropyLoss() 111 | 112 | # optimizer = optim.Adam(model.parameters(),lr = 1e-3) 113 | optimizer = optim.RMSprop(model.parameters(),lr = lr, momentum = 0.9, weight_decay=1e-3) 114 | # optimizer = optim.SGD(model.parameters(), lr=1e-3, momentum=0.5, weight_decay=5e-4) 115 | scheduler = optim.lr_scheduler.MultiStepLR(optimizer, milestones=[600], gamma=5e-1) 116 | 117 | model.to(device) 118 | summary(model.cuda(),(1,2,750)) 119 | 120 | loss_history = [] 121 | train_accuracy_history = [] 122 | test_accuracy_history = [] 123 | 124 | for epoch in range(epochs): 125 | # for idx,(data,target) in enumerate(loader): 126 | model.train() 127 | x,y = x.to(device),y.to(device) 128 | y_pred = model(x) 129 | 130 | # print(y_pred.shape) 131 | # print(y.shape) 132 | 133 | # loss = F.mse_loss(y_pred, y) 134 | 135 | loss = criterion(y_pred, y) 136 | train_loss = loss.item() 137 | loss_history.append(train_loss) 138 | 139 | optimizer.zero_grad() 140 | loss.backward() 141 | optimizer.step() 142 | # scheduler.step() 143 | 144 | if epoch%1==0: 145 | 146 | # correct= (y_pred.ge(0.5) == y).sum().item() 147 | n = y.shape[0] 148 | correct = (torch.max(y_pred,1)[1]==y).sum().item() 149 | train_accuracy = correct / n 150 | train_accuracy_history.append(train_accuracy) 151 | 152 | # print("epochs:",epoch,"loss:",loss.item(),"Accuracy:",(correct / n),"Learning rate:",scheduler.get_last_lr()[0]) 153 | test_accuracy = testing(test_data,test_label,model,device,filepath) 154 | test_accuracy_history.append(test_accuracy) 155 | 156 | print("epochs:",epoch,"loss:",train_loss,"Training Accuracy:",train_accuracy,"Testing Accuracy:",test_accuracy,"Learning rate:",scheduler.get_last_lr()[0]) 157 | 158 | if train_lossmax_accuracy: 163 | max_accuracy = train_accuracy 164 | if opt.save_model: 165 | torch.save(model.state_dict(), filepath) 166 | 167 | print("最大的Accuracy為:",max_accuracy,"最小的Loss值為:",min_loss) 168 | df = pd.DataFrame({"loss":loss_history,"train_accuracy_history":train_accuracy_history,"test_accuracy_history":test_accuracy_history}) 169 | 170 | if opt.save_csv: 171 | df.to_csv(filepath_csv,encoding="utf-8-sig") 172 | -------------------------------------------------------------------------------- /EEGNet_training_ELU.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Fri Jul 9 17:01:35 2021 4 | 5 | @author: haoyuan 6 | """ 7 | 8 | import torch 9 | from torch.autograd import Variable 10 | import torch.nn.functional as F 11 | import torch.nn as nn 12 | from dataloader import read_bci_data 13 | import matplotlib.pyplot as plt 14 | from torch.utils.data import TensorDataset,DataLoader 15 | import numpy as np 16 | import torch.optim as optim 17 | from torchsummary import summary 18 | import pandas as pd 19 | import os 20 | import argparse 21 | 22 | def testing(x_test,y_test,model,device,filepath): 23 | 24 | # model.load_state_dict(torch.load(filepath)) 25 | model.eval() 26 | with torch.no_grad(): 27 | model.cuda(0) 28 | n = x_test.shape[0] 29 | 30 | x_test = x_test.astype("float32") 31 | y_test = y_test.astype("float32").reshape(y_test.shape[0],) 32 | 33 | x_test, y_test = Variable(torch.from_numpy(x_test)),Variable(torch.from_numpy(y_test)) 34 | 35 | x_test,y_test = x_test.to(device),y_test.to(device) 36 | y_pred_test = model(x_test) 37 | 38 | correct_test = (torch.max(y_pred_test,1)[1]==y_test).sum().item() 39 | test_accuracy = correct_test/n 40 | # print("testing accuracy:",correct/n) 41 | 42 | return test_accuracy 43 | 44 | parser = argparse.ArgumentParser() 45 | parser.add_argument('--epochs', type=int, default='700', help='training epochs') 46 | parser.add_argument('--learning_rate', type=float, default='1e-3', help='learning rate') 47 | parser.add_argument('--save_model', action='store_true', help='check if you want to save the model.') 48 | parser.add_argument('--save_csv', action='store_true', help='check if you want to save the training history.') 49 | opt = parser.parse_args() 50 | 51 | torch.manual_seed(1) # reproducible 52 | epochs = 700 53 | lr = 1e-3 54 | 55 | filepath = os.path.abspath(os.path.dirname(__file__))+"\checkpoint\EEGNet_checkpoint_ELU.rar" 56 | filepath_csv = os.path.abspath(os.path.dirname(__file__))+"\history_csv\EEGNet_ELU.csv" 57 | 58 | min_loss=1 59 | max_accuracy = 0 60 | device = torch.device("cuda:0") 61 | 62 | train_data, train_label, test_data, test_label = read_bci_data() 63 | 64 | n = train_data.shape[0] 65 | 66 | train_data = train_data.astype("float32") 67 | train_label = train_label.astype("float32").reshape(train_label.shape[0],) 68 | 69 | # train_data.shape = (1080,1,2,750) 70 | # train_label.shape = (1080,) 71 | 72 | # loader = DataLoader(TensorDataset(train_data,train_label),batch_size=8) 73 | x, y = Variable(torch.from_numpy(train_data)),Variable(torch.from_numpy(train_label)) 74 | y=torch.tensor(y, dtype=torch.long) 75 | 76 | class EEGNet_ELU(torch.nn.Module): 77 | def __init__(self, n_output): 78 | super(EEGNet_ELU, self).__init__() 79 | self.firstConv = nn.Sequential( 80 | nn.Conv2d(1, 16, kernel_size=(1,51), stride=(1,1), padding=(0,25),bias=False), 81 | nn.BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) 82 | ) 83 | self.depthwiseConv = nn.Sequential( 84 | nn.Conv2d(16, 32, kernel_size=(2,1), stride=(1,1), groups=8,bias=False), 85 | nn.BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True), 86 | nn.ELU(alpha=0.1), 87 | nn.AvgPool2d(kernel_size=(1,4), stride=(1,4),padding=0), 88 | nn.Dropout(p=0.35) 89 | ) 90 | self.separableConv = nn.Sequential( 91 | nn.Conv2d(32, 32, kernel_size=(1,15), stride=(1,1), padding=(0,7),bias=False), 92 | nn.BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True), 93 | nn.ELU(alpha=0.1), 94 | nn.AvgPool2d(kernel_size=(1,8), stride=(1,8),padding=0), 95 | nn.Dropout(p=0.35) 96 | ) 97 | self.classify = nn.Sequential( 98 | nn.Flatten(), 99 | nn.Linear(736,n_output,bias=True) 100 | ) 101 | 102 | def forward(self, x): 103 | out = self.firstConv(x) 104 | out = self.depthwiseConv(out) 105 | out = self.separableConv(out) 106 | out = self.classify(out) 107 | return out 108 | 109 | model = EEGNet_ELU(n_output=2) 110 | print(model) 111 | criterion = nn.CrossEntropyLoss() 112 | 113 | # optimizer = optim.Adam(model.parameters(),lr = lr) 114 | optimizer = optim.RMSprop(model.parameters(),lr = lr, momentum = 0.6) 115 | scheduler = optim.lr_scheduler.MultiStepLR(optimizer, milestones=[100,300,500], gamma=0.1) 116 | 117 | model.cuda(0) 118 | summary(model.cuda(),(1,2,750)) 119 | 120 | loss_history = [] 121 | train_accuracy_history = [] 122 | test_accuracy_history = [] 123 | 124 | for epoch in range(epochs): 125 | # for idx,(data,target) in enumerate(loader): 126 | model.train() 127 | x,y = x.to(device),y.to(device) 128 | y_pred = model(x) 129 | 130 | # print(y_pred.shape) 131 | # print(y.shape) 132 | 133 | # loss = F.mse_loss(y_pred, y) 134 | 135 | loss = criterion(y_pred, y) 136 | train_loss = loss.item() 137 | loss_history.append(train_loss) 138 | 139 | optimizer.zero_grad() 140 | loss.backward() 141 | optimizer.step() 142 | # scheduler.step() 143 | 144 | if epoch%1==0: 145 | 146 | # correct= (y_pred.ge(0.5) == y).sum().item() 147 | n = y.shape[0] 148 | correct = (torch.max(y_pred,1)[1]==y).sum().item() 149 | train_accuracy = correct / n 150 | train_accuracy_history.append(train_accuracy) 151 | 152 | # print("epochs:",epoch,"loss:",loss.item(),"Accuracy:",(correct / n),"Learning rate:",scheduler.get_last_lr()[0]) 153 | test_accuracy = testing(test_data,test_label,model,device,filepath) 154 | test_accuracy_history.append(test_accuracy) 155 | 156 | print("epochs:",epoch,"loss:",train_loss,"Training Accuracy:",train_accuracy,"Testing Accuracy:",test_accuracy,"Learning rate:",scheduler.get_last_lr()[0]) 157 | 158 | if train_lossmax_accuracy: 163 | max_accuracy = train_accuracy 164 | if opt.save_model: 165 | torch.save(model.state_dict(), filepath) 166 | 167 | print("最大的Accuracy為:",max_accuracy,"最小的Loss值為:",min_loss) 168 | df = pd.DataFrame({"loss":loss_history,"train_accuracy_history":train_accuracy_history,"test_accuracy_history":test_accuracy_history}) 169 | 170 | if opt.save_csv: 171 | df.to_csv(filepath_csv,encoding="utf-8-sig") 172 | 173 | -------------------------------------------------------------------------------- /EEGNet_training_LeakyReLU.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Fri Jul 9 17:01:35 2021 4 | 5 | @author: haoyuan 6 | """ 7 | 8 | import torch 9 | from torch.autograd import Variable 10 | import torch.nn.functional as F 11 | import torch.nn as nn 12 | from dataloader import read_bci_data 13 | import matplotlib.pyplot as plt 14 | from torch.utils.data import TensorDataset,DataLoader 15 | import numpy as np 16 | import pandas as pd 17 | import torch.optim as optim 18 | from torchsummary import summary 19 | import os 20 | import argparse 21 | 22 | def plot_here_hustory(train_accuracy_history,test_accuracy_history,loss_history): 23 | 24 | plt.figure() 25 | 26 | plt.suptitle("Training Curve",fontsize=15) 27 | 28 | plt.subplot(1,2,1) 29 | plt.title("Accuracy Curve",fontsize=14) 30 | plt.plot(train_accuracy_history,c='r', label='Training') 31 | plt.plot(test_accuracy_history,c='b',label='Testing') 32 | plt.legend(loc='best') 33 | 34 | plt.xlabel("Epochs") 35 | 36 | plt.subplot(1,2,2) 37 | plt.title("Loss Curve",fontsize=14) 38 | plt.plot(loss_history,c='g') 39 | plt.xlabel("Epochs") 40 | 41 | plt.show() 42 | 43 | 44 | def testing(x_test,y_test,model,device,filepath): 45 | 46 | # model.load_state_dict(torch.load(filepath)) 47 | model.eval() 48 | with torch.no_grad(): 49 | model.cuda(0) 50 | n = x_test.shape[0] 51 | 52 | x_test = x_test.astype("float32") 53 | y_test = y_test.astype("float32").reshape(y_test.shape[0],) 54 | # y_test = y_test.astype("float32").reshape(y_test.shape[0],1) 55 | 56 | x_test, y_test = Variable(torch.from_numpy(x_test)),Variable(torch.from_numpy(y_test)) 57 | 58 | x_test,y_test = x_test.to(device),y_test.to(device) 59 | y_pred_test = model(x_test) 60 | 61 | # correct_test = (y_pred_test.ge(0.5) == y_test).sum().item() 62 | correct_test = (torch.max(y_pred_test,1)[1]==y_test).sum().item() 63 | test_accuracy = correct_test/n 64 | # print("testing accuracy:",correct/n) 65 | 66 | return test_accuracy 67 | 68 | parser = argparse.ArgumentParser() 69 | parser.add_argument('--epochs', type=int, default='700', help='training epochs') 70 | parser.add_argument('--learning_rate', type=float, default='1e-3', help='learning rate') 71 | parser.add_argument('--save_model', action='store_true', help='check if you want to save the model.') 72 | parser.add_argument('--save_csv', action='store_true', help='check if you want to save the training history.') 73 | opt = parser.parse_args() 74 | 75 | torch.manual_seed(1) # reproducible 76 | epochs = 700 77 | lr = 1e-3 78 | 79 | filepath = os.path.abspath(os.path.dirname(__file__))+"\checkpoint\EEGNet_checkpoint_LeakyReLU.rar" 80 | filepath_csv = os.path.abspath(os.path.dirname(__file__))+"\history_csv\EEGNet_LeakyReLU.csv" 81 | 82 | min_loss=1 83 | max_train_accuracy = 0 84 | max_test_accuracy = 0 85 | 86 | device = torch.device("cuda:0") 87 | 88 | train_data, train_label, test_data, test_label = read_bci_data() 89 | 90 | n = train_data.shape[0] 91 | 92 | train_data = train_data.astype("float32") 93 | train_label = train_label.astype("float32").reshape(train_label.shape[0],) 94 | # train_label = train_label.astype("float32").reshape(train_label.shape[0],1) 95 | 96 | # train_data.shape = (1080,1,2,750) 97 | # train_label.shape = (1080,) 98 | 99 | # loader = DataLoader(TensorDataset(train_data,train_label),batch_size=8) 100 | x, y = Variable(torch.from_numpy(train_data)),Variable(torch.from_numpy(train_label)) 101 | y=torch.tensor(y, dtype=torch.long) 102 | 103 | class EEGNet_LeakyReLU(torch.nn.Module): 104 | def __init__(self, n_output): 105 | super(EEGNet_LeakyReLU, self).__init__() 106 | self.firstConv = nn.Sequential( 107 | nn.Conv2d(1, 16, kernel_size=(1,51), stride=(1,1), padding=(0,25),bias=False), 108 | nn.BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) 109 | ) 110 | self.depthwiseConv = nn.Sequential( 111 | nn.Conv2d(16, 32, kernel_size=(2,1), stride=(1,1), groups=8,bias=False), 112 | nn.BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True), 113 | nn.LeakyReLU(negative_slope=0.06), 114 | nn.AvgPool2d(kernel_size=(1,4), stride=(1,4),padding=0), 115 | nn.Dropout(p=0.5) 116 | ) 117 | self.separableConv = nn.Sequential( 118 | nn.Conv2d(32, 32, kernel_size=(1,15), stride=(1,1), padding=(0,7),bias=False), 119 | nn.BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True), 120 | nn.LeakyReLU(negative_slope=0.06), 121 | nn.AvgPool2d(kernel_size=(1,8), stride=(1,8),padding=0), 122 | nn.Dropout(p=0.5) 123 | ) 124 | self.classify = nn.Sequential( 125 | nn.Flatten(), 126 | nn.Linear(736,n_output,bias=True) 127 | ) 128 | 129 | def forward(self, x): 130 | out = self.firstConv(x) 131 | out = self.depthwiseConv(out) 132 | out = self.separableConv(out) 133 | out = self.classify(out) 134 | return out 135 | 136 | model = EEGNet_LeakyReLU(n_output=2) 137 | print(model) 138 | criterion = nn.CrossEntropyLoss() 139 | 140 | # optimizer = optim.Adam(model.parameters(),lr = lr) 141 | optimizer = optim.RMSprop(model.parameters(),lr = lr, momentum = 0.6) 142 | # optimizer = optim.SGD(model.parameters(), lr=1, momentum=0.5, weight_decay=5e-4) 143 | scheduler = optim.lr_scheduler.MultiStepLR(optimizer, milestones=[100,300,500], gamma=0.1) 144 | 145 | model.cuda(0) 146 | summary(model.cuda(),(1,2,750)) 147 | 148 | loss_history = [] 149 | train_accuracy_history = [] 150 | test_accuracy_history = [] 151 | 152 | for epoch in range(epochs): 153 | # for idx,(data,target) in enumerate(loader): 154 | model.train() 155 | x,y = x.to(device),y.to(device) 156 | y_pred = model(x) 157 | 158 | # print(y_pred.shape) 159 | # print(y.shape) 160 | 161 | # loss = F.mse_loss(y_pred, y) 162 | 163 | loss = criterion(y_pred, y) 164 | train_loss = loss.item() 165 | loss_history.append(train_loss) 166 | 167 | optimizer.zero_grad() 168 | loss.backward() 169 | optimizer.step() 170 | # scheduler.step() 171 | 172 | if epoch%1==0: 173 | 174 | # correct= (y_pred.ge(0.5) == y).sum().item() 175 | n = y.shape[0] 176 | correct = (torch.max(y_pred,1)[1]==y).sum().item() 177 | train_accuracy = correct / n 178 | train_accuracy_history.append(train_accuracy) 179 | 180 | # print("epochs:",epoch,"loss:",loss.item(),"Accuracy:",(correct / n),"Learning rate:",scheduler.get_last_lr()[0]) 181 | test_accuracy = testing(test_data,test_label,model,device,filepath) 182 | test_accuracy_history.append(test_accuracy) 183 | 184 | print("epochs:",epoch,"loss:",train_loss,"Training Accuracy:",train_accuracy,"Testing Accuracy:",test_accuracy,"Learning rate:",scheduler.get_last_lr()[0]) 185 | 186 | if train_lossmax_train_accuracy: 191 | max_train_accuracy = train_accuracy 192 | if opt.save_model: 193 | torch.save(model.state_dict(), filepath) 194 | 195 | if test_accuracy>max_test_accuracy: 196 | max_test_accuracy = test_accuracy 197 | 198 | print("最大的Training Accuracy為:",max_train_accuracy,"最大的Testing Accuracy為:",max_test_accuracy,"最小的Loss值為:",min_loss) 199 | plot_here_hustory(train_accuracy_history,test_accuracy_history,loss_history) 200 | df = pd.DataFrame({"loss":loss_history,"train_accuracy_history":train_accuracy_history,"test_accuracy_history":test_accuracy_history}) 201 | if opt.save_csv: 202 | df.to_csv(filepath_csv,encoding="utf-8-sig") 203 | -------------------------------------------------------------------------------- /EEGNet_training_ReLU.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Fri Jul 9 17:01:35 2021 4 | 5 | @author: haoyuan 6 | """ 7 | 8 | import torch 9 | from torch.autograd import Variable 10 | import torch.nn.functional as F 11 | import torch.nn as nn 12 | from dataloader import read_bci_data 13 | import matplotlib.pyplot as plt 14 | from torch.utils.data import TensorDataset,DataLoader 15 | import numpy as np 16 | import torch.optim as optim 17 | import pandas as pd 18 | from torchsummary import summary 19 | import os 20 | import argparse 21 | 22 | def testing(x_test,y_test,model,device,filepath): 23 | 24 | # model.load_state_dict(torch.load(filepath)) 25 | model.eval() 26 | with torch.no_grad(): 27 | model.cuda(0) 28 | n = x_test.shape[0] 29 | 30 | x_test = x_test.astype("float32") 31 | y_test = y_test.astype("float32").reshape(y_test.shape[0],) 32 | 33 | x_test, y_test = Variable(torch.from_numpy(x_test)),Variable(torch.from_numpy(y_test)) 34 | 35 | x_test,y_test = x_test.to(device),y_test.to(device) 36 | y_pred_test = model(x_test) 37 | 38 | correct_test = (torch.max(y_pred_test,1)[1]==y_test).sum().item() 39 | test_accuracy = correct_test/n 40 | # print("testing accuracy:",correct/n) 41 | 42 | return test_accuracy 43 | 44 | torch.manual_seed(1) # reproducible 45 | 46 | parser = argparse.ArgumentParser() 47 | parser.add_argument('--epochs', type=int, default='700', help='training epochs') 48 | parser.add_argument('--learning_rate', type=float, default='1e-3', help='learning rate') 49 | parser.add_argument('--save_model', action='store_true', help='check if you want to save the model.') 50 | parser.add_argument('--save_csv', action='store_true', help='check if you want to save the training history.') 51 | opt = parser.parse_args() 52 | 53 | epochs = 750 54 | lr = 1e-3 55 | 56 | filepath = os.path.abspath(os.path.dirname(__file__))+"\checkpoint\EEGNet_checkpoint_ReLU.rar" 57 | filepath_csv = os.path.abspath(os.path.dirname(__file__))+"\history_csv\EEGNet_ReLU.csv" 58 | 59 | min_loss=1 60 | max_accuracy = 0 61 | device = torch.device("cuda:0") 62 | 63 | train_data, train_label, test_data, test_label = read_bci_data() 64 | 65 | n = train_data.shape[0] 66 | 67 | train_data = train_data.astype("float32") 68 | train_label = train_label.astype("float32").reshape(train_label.shape[0],) 69 | 70 | # train_data.shape = (1080,1,2,750) 71 | # train_label.shape = (1080,) 72 | 73 | # loader = DataLoader(TensorDataset(train_data,train_label),batch_size=8) 74 | x, y = Variable(torch.from_numpy(train_data)),Variable(torch.from_numpy(train_label)) 75 | y=torch.tensor(y, dtype=torch.long) 76 | 77 | class EEGNet_ReLU(torch.nn.Module): 78 | def __init__(self, n_output): 79 | super(EEGNet_ReLU, self).__init__() 80 | self.firstConv = nn.Sequential( 81 | nn.Conv2d(1, 16, kernel_size=(1,51), stride=(1,1), padding=(0,25),bias=False), 82 | nn.BatchNorm2d(16, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True) 83 | ) 84 | self.depthwiseConv = nn.Sequential( 85 | nn.Conv2d(16, 32, kernel_size=(2,1), stride=(1,1), groups=8,bias=False), 86 | nn.BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True), 87 | nn.ReLU(), 88 | nn.AvgPool2d(kernel_size=(1,4), stride=(1,4),padding=0), 89 | nn.Dropout(p=0.35) 90 | ) 91 | self.separableConv = nn.Sequential( 92 | nn.Conv2d(32, 32, kernel_size=(1,15), stride=(1,1), padding=(0,7),bias=False), 93 | nn.BatchNorm2d(32, eps=1e-05, momentum=0.1, affine=True, track_running_stats=True), 94 | nn.ReLU(), 95 | nn.AvgPool2d(kernel_size=(1,8), stride=(1,8),padding=0), 96 | nn.Dropout(p=0.35) 97 | ) 98 | self.classify = nn.Sequential( 99 | nn.Flatten(), 100 | nn.Linear(736,n_output,bias=True) 101 | ) 102 | 103 | def forward(self, x): 104 | out = self.firstConv(x) 105 | out = self.depthwiseConv(out) 106 | out = self.separableConv(out) 107 | out = self.classify(out) 108 | return out 109 | 110 | model = EEGNet_ReLU(n_output=2) 111 | print(model) 112 | criterion = nn.CrossEntropyLoss() 113 | 114 | # optimizer = optim.Adam(model.parameters(),lr = lr) 115 | optimizer = optim.RMSprop(model.parameters(),lr = lr, momentum = 0.2) 116 | # optimizer = optim.SGD(model.parameters(), lr=1, momentum=0.5, weight_decay=5e-4) 117 | scheduler = optim.lr_scheduler.MultiStepLR(optimizer, milestones=[100,300,500], gamma=0.1) 118 | 119 | model.cuda(0) 120 | summary(model.cuda(),(1,2,750)) 121 | 122 | loss_history = [] 123 | train_accuracy_history = [] 124 | test_accuracy_history = [] 125 | 126 | for epoch in range(epochs): 127 | # for idx,(data,target) in enumerate(loader): 128 | model.train() 129 | x,y = x.to(device),y.to(device) 130 | y_pred = model(x) 131 | 132 | # print(y_pred.shape) 133 | # print(y.shape) 134 | 135 | # loss = F.mse_loss(y_pred, y) 136 | 137 | loss = criterion(y_pred, y) 138 | train_loss = loss.item() 139 | loss_history.append(train_loss) 140 | 141 | optimizer.zero_grad() 142 | loss.backward() 143 | optimizer.step() 144 | # scheduler.step() 145 | 146 | if epoch%1==0: 147 | 148 | # correct= (y_pred.ge(0.5) == y).sum().item() 149 | n = y.shape[0] 150 | correct = (torch.max(y_pred,1)[1]==y).sum().item() 151 | train_accuracy = correct / n 152 | train_accuracy_history.append(train_accuracy) 153 | 154 | # print("epochs:",epoch,"loss:",loss.item(),"Accuracy:",(correct / n),"Learning rate:",scheduler.get_last_lr()[0]) 155 | test_accuracy = testing(test_data,test_label,model,device,filepath) 156 | test_accuracy_history.append(test_accuracy) 157 | 158 | print("epochs:",epoch,"loss:",train_loss,"Training Accuracy:",train_accuracy,"Testing Accuracy:",test_accuracy,"Learning rate:",scheduler.get_last_lr()[0]) 159 | 160 | if train_lossmax_accuracy: 165 | max_accuracy = train_accuracy 166 | if opt.save_model: 167 | torch.save(model.state_dict(), filepath) 168 | 169 | print("最大的Accuracy為:",max_accuracy,"最小的Loss值為:",min_loss) 170 | df = pd.DataFrame({"loss":loss_history,"train_accuracy_history":train_accuracy_history,"test_accuracy_history":test_accuracy_history}) 171 | if opt.save_csv: 172 | df.to_csv(filepath_csv,encoding="utf-8-sig") 173 | 174 | -------------------------------------------------------------------------------- /Experiment Report.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/secondlevel/EEG-classification/b40e2da677fc59b7c809e4c8db323dcb00247105/Experiment Report.pdf -------------------------------------------------------------------------------- /Plot_History_Result.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | import pandas as pd 3 | import numpy as np 4 | import argparse 5 | import os 6 | 7 | def plot_loss_curve(DeepConvNet_ELU,DeepConvNet_ReLU,DeepConvNet_LeakyReLU,EEGNet_ELU,EEGNet_ReLU,EEGNet_LeakyReLU): 8 | 9 | plt.plot(DeepConvNet_ELU['loss'], '-b', label='DeepConvNet_ELU') 10 | plt.plot(DeepConvNet_ReLU['loss'], '-g', label='DeepConvNet_ReLU') 11 | plt.plot(DeepConvNet_LeakyReLU['loss'], '-r', label='DeepConvNet_LeakyReLU') 12 | plt.plot(EEGNet_ELU['loss'], '-c', label='EEGNet_ELU') 13 | plt.plot(EEGNet_ReLU['loss'], '-m', label='EEGNet_ReLU') 14 | plt.plot(EEGNet_LeakyReLU['loss'], '-y', label='EEGNet_LeakyReLU') 15 | 16 | plt.xlabel("Epoch",fontsize=13) 17 | plt.legend(loc='best') 18 | 19 | plt.ylabel("Loss Value",fontsize=13) 20 | plt.title("(Loss Curve)Activation function comparision(All)",fontsize=18) 21 | 22 | plt.show() 23 | return "loss圖繪製成功" 24 | 25 | def plot_EEGNet_accuracy_curve(DeepConvNet_ELU,DeepConvNet_ReLU,DeepConvNet_LeakyReLU,EEGNet_ELU,EEGNet_ReLU,EEGNet_LeakyReLU): 26 | 27 | plt.plot(np.array(EEGNet_ELU['train_accuracy_history'])*100, '-b', label='ELU_train') 28 | plt.plot(np.array(EEGNet_ReLU['train_accuracy_history'])*100, '-g', label='ReLU_train') 29 | plt.plot(np.array(EEGNet_LeakyReLU['train_accuracy_history'])*100, '-r', label='LeakyReLU_train') 30 | 31 | plt.plot(np.array(EEGNet_ELU['test_accuracy_history'])*100, '-c', label='ELU_test') 32 | plt.plot(np.array(EEGNet_ReLU['test_accuracy_history'])*100, '-m', label='ReLU_test') 33 | plt.plot(np.array(EEGNet_LeakyReLU['test_accuracy_history'])*100, '-y', label='LeakyReLU_test') 34 | 35 | plt.xlabel("Epoch",fontsize=13) 36 | plt.legend(loc='best') 37 | 38 | plt.ylabel("Accuracy(%)",fontsize=13) 39 | plt.title("Activation function comparision(EGGNet)",fontsize=18) 40 | 41 | plt.show() 42 | return "EEGNet Accuracy圖繪製成功" 43 | 44 | def plot_DeepConvNet_accuracy_curve(DeepConvNet_ELU,DeepConvNet_ReLU,DeepConvNet_LeakyReLU,EEGNet_ELU,EEGNet_ReLU,EEGNet_LeakyReLU): 45 | 46 | plt.plot(np.array(DeepConvNet_ELU['train_accuracy_history'])*100, '-b', label='ELU_train') 47 | plt.plot(np.array(DeepConvNet_ReLU['train_accuracy_history'])*100, '-g', label='ReLU_train') 48 | plt.plot(np.array(DeepConvNet_LeakyReLU['train_accuracy_history'])*100, '-r', label='LeakyReLU_train') 49 | 50 | plt.plot(np.array(DeepConvNet_ELU['test_accuracy_history'])*100, '-c', label='ELU_test') 51 | plt.plot(np.array(DeepConvNet_ReLU['test_accuracy_history'])*100, '-m', label='ReLU_test') 52 | plt.plot(np.array(DeepConvNet_LeakyReLU['test_accuracy_history'])*100, '-y', label='LeakyReLU_test') 53 | 54 | plt.xlabel("Epoch",fontsize=13) 55 | plt.legend(loc='best') 56 | 57 | plt.ylabel("Accuracy(%)",fontsize=13) 58 | plt.title("Activation function comparision(DeepConvNet)",fontsize=18) 59 | 60 | plt.show() 61 | return "DeepConvNet Accuracy圖繪製成功" 62 | 63 | if __name__ == "__main__": 64 | 65 | parser = argparse.ArgumentParser() 66 | parser.add_argument('--plot_loss_curve', action='store_true', help='check if you want to plot the loss curve.') 67 | parser.add_argument('--plot_EEGNet_accuracy_curve', action='store_true', help='check if you want to plot the EEGNet accuracy curve.') 68 | parser.add_argument('--plot_DeepConvNet_accuracy_curve', action='store_true', help='check if you want to plot the DeepConvNet accuracy curve.') 69 | opt = parser.parse_args() 70 | 71 | path = os.path.abspath(os.path.dirname(__file__))+"/history_csv/" 72 | 73 | DeepConvNet_ELU = pd.DataFrame(pd.read_csv(path+"DeepConvNet_ELU.csv",encoding="utf-8-sig")) 74 | DeepConvNet_ReLU = pd.DataFrame(pd.read_csv(path+"DeepConvNet_ReLU.csv",encoding="utf-8-sig")) 75 | DeepConvNet_LeakyReLU = pd.DataFrame(pd.read_csv(path+"DeepConvNet_LeakyReLU.csv",encoding="utf-8-sig")) 76 | 77 | EEGNet_ELU = pd.DataFrame(pd.read_csv(path+"EEGNet_ELU.csv",encoding="utf-8-sig")) 78 | EEGNet_ReLU = pd.DataFrame(pd.read_csv(path+"EEGNet_ReLU.csv",encoding="utf-8-sig")) 79 | EEGNet_LeakyReLU = pd.DataFrame(pd.read_csv(path+"EEGNet_LeakyReLU.csv",encoding="utf-8-sig")) 80 | 81 | if opt.plot_loss_curve: 82 | plot_loss_curve(DeepConvNet_ELU,DeepConvNet_ReLU,DeepConvNet_LeakyReLU,EEGNet_ELU,EEGNet_ReLU,EEGNet_LeakyReLU) 83 | 84 | if opt.plot_EEGNet_accuracy_curve: 85 | plot_EEGNet_accuracy_curve(DeepConvNet_ELU,DeepConvNet_ReLU,DeepConvNet_LeakyReLU,EEGNet_ELU,EEGNet_ReLU,EEGNet_LeakyReLU) 86 | 87 | if opt.plot_DeepConvNet_accuracy_curve: 88 | plot_DeepConvNet_accuracy_curve(DeepConvNet_ELU,DeepConvNet_ReLU,DeepConvNet_LeakyReLU,EEGNet_ELU,EEGNet_ReLU,EEGNet_LeakyReLU) 89 | 90 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # EEG-classification (Deep Learning and Practice homework 2) 2 | The task is to classify BCI competition datasets(EEG signals) by using EEGNet and DeepConvNet with different activation functions. I have built EEGNet and DeepConvNet by using **pytorch**. 3 | 4 | You can get some detailed introduction and experimental results in this [link](https://github.com/secondlevel/EEG-classification/blob/main/Experiment%20Report.pdf ). 5 | 6 |

7 | 8 | 9 |

10 | 11 | ## Hardware 12 | Operating System: Windows 10 13 | 14 | CPU: Intel(R) Core(TM) i7-6700 CPU @ 3.40GHz 15 | 16 | GPU: NVIDIA GeForce GTX TITAN X 17 | 18 | ## Requirement 19 | 20 | In this work, you can use the following two option to build the environment. 21 | 22 | ### First option (recommend) 23 | ```bash 24 | $ conda env create -f environment.yml 25 | ``` 26 | 27 | ### Second option 28 | ```bash= 29 | $ conda create --name Summer python=3.8 -y 30 | $ conda activate Summer 31 | $ conda install pytorch==1.7.1 torchvision==0.8.2 torchaudio==0.7.2 cudatoolkit=10.2 -c pytorch 32 | $ conda install numpy 33 | $ conda install matplotlib -y 34 | $ conda install pandas -y 35 | $ pip install torchsummary 36 | ``` 37 | 38 | ## Model Architecture 39 | 40 | The model architecture that combines with different activation function was in the **ALL_model.py** file. 41 | 42 | - ### EEGNet 43 | 44 |

45 | 46 |

47 | 48 | - ### DeepConvNet 49 | 50 |

51 | 52 |

53 | 54 | ## Data Description 55 | 56 | In this project, the training and testing data were provided by [**BCI Competition III – IIIb**](http://www.bbci.de/competition/iii/desc_IIIb.pdf). 57 | 58 | ```bash= 59 | Data: [Batch Size, 1, 2, 750] 60 | Label: [Batch Size, 2] 61 | ``` 62 | 63 | You can use the read_bci_data function in the **dataloader.py** file to obtain the training data, training label, testing data and testing label. 64 | 65 | ```python= 66 | train_data, train_label, test_data, test_label = read_bci_data() 67 | ``` 68 | 69 |

70 | 71 |

72 | 73 | ## Performance Metrics 74 | 75 | In this project, **Mean Squared Error** and **Crossentropy** are the loss function, and **Accuracy** is the classification metrics. 76 | 77 | ### Mean Squared Error(MSE) 78 | 79 |

80 | 81 |

82 | 83 | - **y_j:** ground-truth value 84 | - **y_hat:** predicted value from the regression model 85 | - **N:** number of datums 86 | 87 | --- 88 | 89 | ### Crossentropy 90 | 91 |

92 | 93 |

94 | 95 | - M: number of classes 96 | - log: the natural log 97 | - y: binary indicator (0 or 1) if class label c is the correct classification for observation o 98 | - p: predicted probability observation o is of class c 99 | 100 | --- 101 | 102 | ### Accuracy 103 | 104 |

105 | 106 |

107 | 108 | - **True Positive(TP)** signifies how many positive class samples your model predicted correctly. 109 | - **True Negative(TN)** signifies how many negative class samples your model predicted correctly. 110 | - **False Positive(FP)** signifies how many negative class samples your model predicted incorrectly. This factor represents Type-I error in statistical nomenclature. This error positioning in the confusion matrix depends on the choice of the null hypothesis. 111 | - **False Negative(FN)** signifies how many positive class samples your model predicted incorrectly. This factor represents Type-II error in statistical nomenclature. This error positioning in the confusion matrix also depends on the choice of the null hypothesis. 112 | 113 | ## Training 114 | 115 | In the training step, there provided six file to train different model. 116 | 117 | Each file contains a different model architecture with a different activation function. In addition, you can config the training parameters through the following argparse, and use the following instructions to train different method. 118 | 119 | Finally, you will get such training result. The first picture is about DeepConvNet, and the second picture is about EEGNet. 120 | 121 | You can get some detailed introduction and experimental results in this [link](https://github.com/secondlevel/EEG-classification/blob/main/Experiment%20Report.pdf). 122 | 123 | ```bash= 124 | parser.add_argument('--epochs', type=int, default='700', help='training epochs') 125 | parser.add_argument('--learning_rate', type=float, default='1e-3', help='learning rate') 126 | parser.add_argument('--save_model', action='store_true', help='check if you want to save the model.') 127 | parser.add_argument('--save_csv', action='store_true', help='check if you want to save the training history.') 128 | ``` 129 | 130 | ### DeepConvNet with ELU 131 | 132 | ```bash= 133 | python DeepConvNet_training_ELU.py --epochs 3000 --learning_rate 1e-3 --save_model --save_csv 134 | ``` 135 | 136 | ### DeepConvNet with LeakyReLU 137 | 138 | ```bash= 139 | python DeepConvNet_training_LeakyReLU.py --epochs 3000 --learning_rate 1e-3 --save_model --save_csv 140 | ``` 141 | 142 | ### DeepConvNet with ReLU 143 | 144 | ```bash= 145 | python DeepConvNet_training_ReLU.py --epochs 3000 --learning_rate 1e-3 --save_model --save_csv 146 | ``` 147 | 148 | ### EEGNet with ELU 149 | 150 | ```bash= 151 | python EEGNet_training_ELU.py --epochs 700 --learning_rate 1e-3 --save_model --save_csv 152 | ``` 153 | 154 | ### EEGNet with LeakyReLU 155 | 156 | ```bash= 157 | python EEGNet_training_LeakyReLU.py --epochs 700 --learning_rate 1e-3 --save_model --save_csv 158 | ``` 159 | 160 | ### EEGNet with ReLU 161 | 162 | ```bash= 163 | python EEGNet_training_ReLU.py --epochs 700 --learning_rate 1e-3 --save_model --save_csv 164 | ``` 165 | 166 | ### DeepConvNet training curve 167 | 168 |

169 | 170 |

171 | 172 | ### EEGNet training curve 173 | 174 |

175 | 176 |

177 | 178 | ## Testing 179 | 180 | You can display the testing results in different models by using the following commands in combination with different activation functions. 181 | The model checkpoint were in the [**checkpoint**](https://github.com/secondlevel/EEG-classification/tree/main/checkpoint) directory. 182 | 183 | The detailed experimental result are in the [link](https://github.com/secondlevel/EEG-classification/blob/main/Experiment%20Report.pdf). 184 | 185 | ```bash= 186 | python model_testing.py 187 | ``` 188 | 189 | Then you will get the best result like this, each of the values were the testing accuracy. 190 | 191 | | | ReLU |LeakyReLU |ELU | 192 | |-------------|-----------|----------|----------| 193 | | EEGNet | 87.1296 % | 88.2407 % | 87.2222 % | 194 | | DeepConvNet | 85.4630 % | 84.0741 % | 83.7963 % | 195 | 196 | ## Reference 197 | - https://arxiv.org/abs/1611.08024 198 | - https://arxiv.org/pdf/1703.05051.pdf 199 | - https://jmlr.org/papers/volume15/srivastava14a/srivastava14a.pdf 200 | - https://reurl.cc/QjLZnM 201 | - https://reurl.cc/k71a5L 202 | - https://zhuanlan.zhihu.com/p/35709485 203 | -------------------------------------------------------------------------------- /S4b_test.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/secondlevel/EEG-classification/b40e2da677fc59b7c809e4c8db323dcb00247105/S4b_test.npz -------------------------------------------------------------------------------- /S4b_train.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/secondlevel/EEG-classification/b40e2da677fc59b7c809e4c8db323dcb00247105/S4b_train.npz -------------------------------------------------------------------------------- /X11b_test.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/secondlevel/EEG-classification/b40e2da677fc59b7c809e4c8db323dcb00247105/X11b_test.npz -------------------------------------------------------------------------------- /X11b_train.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/secondlevel/EEG-classification/b40e2da677fc59b7c809e4c8db323dcb00247105/X11b_train.npz -------------------------------------------------------------------------------- /checkpoint/DeepConvNet_checkpoint_ELU.rar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/secondlevel/EEG-classification/b40e2da677fc59b7c809e4c8db323dcb00247105/checkpoint/DeepConvNet_checkpoint_ELU.rar -------------------------------------------------------------------------------- /checkpoint/DeepConvNet_checkpoint_ELU訓練參數.txt: -------------------------------------------------------------------------------- 1 | epochs=3000 2 | lr=1e-3 3 | optimizers=rmsprop(momentim=0.9,weight_decay=1e-3) 4 | lossfunction = crossentropy 5 | loss = 0.020115818828344345 6 | testing accuracy = 83.79629629629629% -------------------------------------------------------------------------------- /checkpoint/DeepConvNet_checkpoint_LeakyReLU.rar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/secondlevel/EEG-classification/b40e2da677fc59b7c809e4c8db323dcb00247105/checkpoint/DeepConvNet_checkpoint_LeakyReLU.rar -------------------------------------------------------------------------------- /checkpoint/DeepConvNet_checkpoint_LeakyReLU訓練參數.txt: -------------------------------------------------------------------------------- 1 | epochs=3000 2 | lr=1e-3 3 | optimizers=rmsprop(momentim=0.9,weight_decay=1e-3) 4 | lossfunction = crossentropy 5 | loss = 0.05809950828552246 6 | testing accuracy = 84.07% -------------------------------------------------------------------------------- /checkpoint/DeepConvNet_checkpoint_ReLU.rar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/secondlevel/EEG-classification/b40e2da677fc59b7c809e4c8db323dcb00247105/checkpoint/DeepConvNet_checkpoint_ReLU.rar -------------------------------------------------------------------------------- /checkpoint/DeepConvNet_checkpoint_ReLU訓練參數.txt: -------------------------------------------------------------------------------- 1 | epochs=3000 2 | lr=1e-3 3 | optimizers=rmsprop(momentim=0.9,weight_decay=1e-3) 4 | lossfunction = crossentropy 5 | loss = 0.08539348095655441 6 | testing accuracy = 85.46% -------------------------------------------------------------------------------- /checkpoint/EEGNet_checkpoint_ELU.rar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/secondlevel/EEG-classification/b40e2da677fc59b7c809e4c8db323dcb00247105/checkpoint/EEGNet_checkpoint_ELU.rar -------------------------------------------------------------------------------- /checkpoint/EEGNet_checkpoint_ELU訓練參數.txt: -------------------------------------------------------------------------------- 1 | epochs=700 2 | lr=1e-3 3 | optimizers=rmsprop(momentim=0.6) 4 | lossfunction = crossentropy 5 | loss = 0.038 6 | testing accuracy = 87.222% -------------------------------------------------------------------------------- /checkpoint/EEGNet_checkpoint_LeakyReLU.rar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/secondlevel/EEG-classification/b40e2da677fc59b7c809e4c8db323dcb00247105/checkpoint/EEGNet_checkpoint_LeakyReLU.rar -------------------------------------------------------------------------------- /checkpoint/EEGNet_checkpoint_LeakyReLU訓練參數.txt: -------------------------------------------------------------------------------- 1 | epochs=700 2 | lr=1e-3 3 | optimizers=rmsprop(momentim=0.6) 4 | lossfunction = crossentropy 5 | loss = 0.03 6 | testing accuracy = 88.24% -------------------------------------------------------------------------------- /checkpoint/EEGNet_checkpoint_ReLU.rar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/secondlevel/EEG-classification/b40e2da677fc59b7c809e4c8db323dcb00247105/checkpoint/EEGNet_checkpoint_ReLU.rar -------------------------------------------------------------------------------- /checkpoint/EEGNet_checkpoint_ReLU訓練參數.txt: -------------------------------------------------------------------------------- 1 | epochs=750 2 | lr=1e-3 3 | optimizers=rmsprop(momentim=0.2) 4 | lossfunction = crossentropy 5 | loss = 0.05 6 | testing accuracy = 87.12% -------------------------------------------------------------------------------- /dataloader.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | def read_bci_data(): 4 | S4b_train = np.load('S4b_train.npz') 5 | X11b_train = np.load('X11b_train.npz') 6 | S4b_test = np.load('S4b_test.npz') 7 | X11b_test = np.load('X11b_test.npz') 8 | 9 | train_data = np.concatenate((S4b_train['signal'], X11b_train['signal']), axis=0) 10 | train_label = np.concatenate((S4b_train['label'], X11b_train['label']), axis=0) 11 | test_data = np.concatenate((S4b_test['signal'], X11b_test['signal']), axis=0) 12 | test_label = np.concatenate((S4b_test['label'], X11b_test['label']), axis=0) 13 | 14 | train_label = train_label - 1 15 | test_label = test_label -1 16 | train_data = np.transpose(np.expand_dims(train_data, axis=1), (0, 1, 3, 2)) 17 | test_data = np.transpose(np.expand_dims(test_data, axis=1), (0, 1, 3, 2)) 18 | 19 | mask = np.where(np.isnan(train_data)) 20 | train_data[mask] = np.nanmean(train_data) 21 | 22 | mask = np.where(np.isnan(test_data)) 23 | test_data[mask] = np.nanmean(test_data) 24 | 25 | # print(train_data.shape, train_label.shape, test_data.shape, test_label.shape) 26 | 27 | return train_data, train_label, test_data, test_label 28 | 29 | if __name__ == "__main__": 30 | read_bci_data() 31 | -------------------------------------------------------------------------------- /history_csv/EEGNet_ELU.csv: -------------------------------------------------------------------------------- 1 | ,loss,train_accuracy_history,test_accuracy_history 2 | 0,0.7245604991912842,0.4898148148148148,0.5907407407407408 3 | 1,0.7394616007804871,0.5666666666666667,0.5 4 | 2,1.8657777309417725,0.5,0.5425925925925926 5 | 3,0.8832643628120422,0.5694444444444444,0.6574074074074074 6 | 4,0.7016125917434692,0.6435185185185185,0.6166666666666667 7 | 5,0.7569069266319275,0.6259259259259259,0.6657407407407407 8 | 6,0.5841464400291443,0.7046296296296296,0.6925925925925925 9 | 7,0.5953238606452942,0.6981481481481482,0.687962962962963 10 | 8,0.5517401695251465,0.7111111111111111,0.6814814814814815 11 | 9,0.520121693611145,0.7157407407407408,0.6768518518518518 12 | 10,0.5173141956329346,0.7287037037037037,0.6805555555555556 13 | 11,0.5044800639152527,0.7388888888888889,0.6972222222222222 14 | 12,0.5078418850898743,0.7277777777777777,0.7 15 | 13,0.5079275369644165,0.7351851851851852,0.7129629629629629 16 | 14,0.49283576011657715,0.7444444444444445,0.712037037037037 17 | 15,0.4822392761707306,0.7546296296296297,0.7064814814814815 18 | 16,0.48433974385261536,0.7537037037037037,0.7092592592592593 19 | 17,0.4763529598712921,0.7620370370370371,0.7148148148148148 20 | 18,0.4725523889064789,0.7638888888888888,0.7185185185185186 21 | 19,0.4775543808937073,0.7564814814814815,0.7175925925925926 22 | 20,0.46935325860977173,0.7722222222222223,0.7212962962962963 23 | 21,0.45733174681663513,0.7731481481481481,0.725 24 | 22,0.4586886465549469,0.7712962962962963,0.7277777777777777 25 | 23,0.4580053985118866,0.7777777777777778,0.7305555555555555 26 | 24,0.47083550691604614,0.7555555555555555,0.7231481481481481 27 | 25,0.4552864134311676,0.7759259259259259,0.7277777777777777 28 | 26,0.45908427238464355,0.7842592592592592,0.7287037037037037 29 | 27,0.4639139473438263,0.7759259259259259,0.7296296296296296 30 | 28,0.4529997408390045,0.7768518518518519,0.7268518518518519 31 | 29,0.44324323534965515,0.7796296296296297,0.7277777777777777 32 | 30,0.4549542963504791,0.774074074074074,0.7296296296296296 33 | 31,0.4449424147605896,0.7898148148148149,0.737037037037037 34 | 32,0.4485025405883789,0.7722222222222223,0.737037037037037 35 | 33,0.44938144087791443,0.7685185185185185,0.7361111111111112 36 | 34,0.44854286313056946,0.7731481481481481,0.7407407407407407 37 | 35,0.4359537661075592,0.7888888888888889,0.7305555555555555 38 | 36,0.43383482098579407,0.7935185185185185,0.7268518518518519 39 | 37,0.435693621635437,0.7981481481481482,0.7314814814814815 40 | 38,0.4239104390144348,0.8101851851851852,0.7425925925925926 41 | 39,0.43848058581352234,0.7833333333333333,0.7444444444444445 42 | 40,0.4283924996852875,0.7953703703703704,0.7425925925925926 43 | 41,0.4300389587879181,0.7962962962962963,0.7435185185185185 44 | 42,0.41762080788612366,0.8018518518518518,0.7416666666666667 45 | 43,0.4247076213359833,0.7944444444444444,0.7444444444444445 46 | 44,0.4242200553417206,0.8037037037037037,0.7398148148148148 47 | 45,0.43288201093673706,0.7898148148148149,0.7481481481481481 48 | 46,0.41504254937171936,0.7898148148148149,0.7472222222222222 49 | 47,0.41763195395469666,0.787962962962963,0.7509259259259259 50 | 48,0.41262108087539673,0.8055555555555556,0.7509259259259259 51 | 49,0.38747695088386536,0.8277777777777777,0.7472222222222222 52 | 50,0.40907347202301025,0.812962962962963,0.7537037037037037 53 | 51,0.3910885453224182,0.8185185185185185,0.7481481481481481 54 | 52,0.3937612771987915,0.8222222222222222,0.7481481481481481 55 | 53,0.3918972313404083,0.8185185185185185,0.75 56 | 54,0.39838966727256775,0.8111111111111111,0.7462962962962963 57 | 55,0.39783260226249695,0.8148148148148148,0.7518518518518519 58 | 56,0.38832950592041016,0.8240740740740741,0.7574074074074074 59 | 57,0.3848109841346741,0.8296296296296296,0.7564814814814815 60 | 58,0.36966952681541443,0.8388888888888889,0.7731481481481481 61 | 59,0.3734446167945862,0.8314814814814815,0.7712962962962963 62 | 60,0.37962087988853455,0.8296296296296296,0.7583333333333333 63 | 61,0.38462474942207336,0.8222222222222222,0.7796296296296297 64 | 62,0.3458385169506073,0.8462962962962963,0.7777777777777778 65 | 63,0.3471059799194336,0.8324074074074074,0.7851851851851852 66 | 64,0.36827370524406433,0.825925925925926,0.7907407407407407 67 | 65,0.35417288541793823,0.8416666666666667,0.7870370370370371 68 | 66,0.3494899868965149,0.8435185185185186,0.7851851851851852 69 | 67,0.33770883083343506,0.8592592592592593,0.7925925925925926 70 | 68,0.3338605761528015,0.8527777777777777,0.7925925925925926 71 | 69,0.3388868272304535,0.8555555555555555,0.7981481481481482 72 | 70,0.3234059512615204,0.850925925925926,0.7972222222222223 73 | 71,0.3345869779586792,0.85,0.7981481481481482 74 | 72,0.32115450501441956,0.8722222222222222,0.7962962962962963 75 | 73,0.3403162360191345,0.8537037037037037,0.8055555555555556 76 | 74,0.32076188921928406,0.875,0.8018518518518518 77 | 75,0.3158367872238159,0.8657407407407407,0.8046296296296296 78 | 76,0.3074434995651245,0.8777777777777778,0.8037037037037037 79 | 77,0.3299827575683594,0.8472222222222222,0.8074074074074075 80 | 78,0.3059634566307068,0.875,0.812962962962963 81 | 79,0.3007813096046448,0.8740740740740741,0.8092592592592592 82 | 80,0.29505497217178345,0.8740740740740741,0.8157407407407408 83 | 81,0.2963674068450928,0.8777777777777778,0.8055555555555556 84 | 82,0.29721173644065857,0.8787037037037037,0.8138888888888889 85 | 83,0.28415486216545105,0.8888888888888888,0.8101851851851852 86 | 84,0.2929135859012604,0.8787037037037037,0.8138888888888889 87 | 85,0.28258994221687317,0.8861111111111111,0.8175925925925925 88 | 86,0.286893367767334,0.8851851851851852,0.8222222222222222 89 | 87,0.2833928167819977,0.8731481481481481,0.8194444444444444 90 | 88,0.2733739912509918,0.8870370370370371,0.8185185185185185 91 | 89,0.28045234084129333,0.8851851851851852,0.8175925925925925 92 | 90,0.2771933972835541,0.8925925925925926,0.8175925925925925 93 | 91,0.2723783552646637,0.8888888888888888,0.8175925925925925 94 | 92,0.2770446538925171,0.8861111111111111,0.8203703703703704 95 | 93,0.26447993516921997,0.887962962962963,0.8222222222222222 96 | 94,0.2692067325115204,0.899074074074074,0.8212962962962963 97 | 95,0.25903114676475525,0.8981481481481481,0.8351851851851851 98 | 96,0.2530732750892639,0.9074074074074074,0.8231481481481482 99 | 97,0.24962489306926727,0.9046296296296297,0.8333333333333334 100 | 98,0.25460848212242126,0.9046296296296297,0.8333333333333334 101 | 99,0.23809464275836945,0.9074074074074074,0.8166666666666667 102 | 100,0.25696536898612976,0.8870370370370371,0.8351851851851851 103 | 101,0.2433365434408188,0.9083333333333333,0.825925925925926 104 | 102,0.24227388203144073,0.9009259259259259,0.8268518518518518 105 | 103,0.24851274490356445,0.899074074074074,0.8351851851851851 106 | 104,0.2506431043148041,0.899074074074074,0.8379629629629629 107 | 105,0.23650702834129333,0.9101851851851852,0.837037037037037 108 | 106,0.23513944447040558,0.9074074074074074,0.8379629629629629 109 | 107,0.22135807573795319,0.9009259259259259,0.8324074074074074 110 | 108,0.23323631286621094,0.8953703703703704,0.8379629629629629 111 | 109,0.2401219755411148,0.9,0.8407407407407408 112 | 110,0.2112869769334793,0.9222222222222223,0.8453703703703703 113 | 111,0.2143135517835617,0.9101851851851852,0.8407407407407408 114 | 112,0.21797975897789001,0.9175925925925926,0.8407407407407408 115 | 113,0.226470947265625,0.9111111111111111,0.85 116 | 114,0.22758324444293976,0.9055555555555556,0.8490740740740741 117 | 115,0.22604042291641235,0.9092592592592592,0.8472222222222222 118 | 116,0.230287104845047,0.9083333333333333,0.8462962962962963 119 | 117,0.21426844596862793,0.9175925925925926,0.8481481481481481 120 | 118,0.21980559825897217,0.9064814814814814,0.8453703703703703 121 | 119,0.19923388957977295,0.9296296296296296,0.8472222222222222 122 | 120,0.21129417419433594,0.9194444444444444,0.8453703703703703 123 | 121,0.21512848138809204,0.9138888888888889,0.8518518518518519 124 | 122,0.2143687903881073,0.9148148148148149,0.8444444444444444 125 | 123,0.21813951432704926,0.9101851851851852,0.8490740740740741 126 | 124,0.2037995606660843,0.9175925925925926,0.8425925925925926 127 | 125,0.2081487476825714,0.9138888888888889,0.8481481481481481 128 | 126,0.20790374279022217,0.9175925925925926,0.8435185185185186 129 | 127,0.20803380012512207,0.9277777777777778,0.8444444444444444 130 | 128,0.19705989956855774,0.9333333333333333,0.8490740740740741 131 | 129,0.19859883189201355,0.924074074074074,0.850925925925926 132 | 130,0.20222875475883484,0.9166666666666666,0.8537037037037037 133 | 131,0.20439653098583221,0.9166666666666666,0.8462962962962963 134 | 132,0.19817523658275604,0.9185185185185185,0.8416666666666667 135 | 133,0.19151762127876282,0.9342592592592592,0.85 136 | 134,0.1926925778388977,0.9185185185185185,0.8518518518518519 137 | 135,0.20437568426132202,0.9185185185185185,0.8203703703703704 138 | 136,0.2048736810684204,0.9194444444444444,0.8416666666666667 139 | 137,0.19042931497097015,0.924074074074074,0.8490740740740741 140 | 138,0.18028664588928223,0.9342592592592592,0.850925925925926 141 | 139,0.19271256029605865,0.9231481481481482,0.850925925925926 142 | 140,0.18953371047973633,0.9287037037037037,0.8564814814814815 143 | 141,0.18506377935409546,0.9277777777777778,0.8537037037037037 144 | 142,0.18271814286708832,0.9268518518518518,0.8537037037037037 145 | 143,0.19051136076450348,0.925,0.8333333333333334 146 | 144,0.1892772912979126,0.9259259259259259,0.8472222222222222 147 | 145,0.183171808719635,0.9277777777777778,0.8611111111111112 148 | 146,0.19068825244903564,0.925,0.8379629629629629 149 | 147,0.18479128181934357,0.9231481481481482,0.8537037037037037 150 | 148,0.1767285168170929,0.924074074074074,0.8564814814814815 151 | 149,0.16890795528888702,0.9296296296296296,0.8453703703703703 152 | 150,0.1890818178653717,0.9203703703703704,0.8583333333333333 153 | 151,0.17364074289798737,0.9287037037037037,0.8546296296296296 154 | 152,0.17348363995552063,0.9314814814814815,0.8555555555555555 155 | 153,0.16780146956443787,0.9351851851851852,0.8537037037037037 156 | 154,0.1691170334815979,0.9324074074074075,0.8407407407407408 157 | 155,0.18132346868515015,0.9314814814814815,0.8574074074074074 158 | 156,0.16933442652225494,0.9342592592592592,0.8555555555555555 159 | 157,0.1814371794462204,0.9268518518518518,0.8555555555555555 160 | 158,0.17055487632751465,0.937037037037037,0.8435185185185186 161 | 159,0.17846541106700897,0.9305555555555556,0.862037037037037 162 | 160,0.15525305271148682,0.937037037037037,0.8444444444444444 163 | 161,0.16436602175235748,0.9416666666666667,0.8546296296296296 164 | 162,0.1599123328924179,0.937962962962963,0.850925925925926 165 | 163,0.17051686346530914,0.9314814814814815,0.8537037037037037 166 | 164,0.1658879518508911,0.9314814814814815,0.8555555555555555 167 | 165,0.17249098420143127,0.9287037037037037,0.8574074074074074 168 | 166,0.15039882063865662,0.9407407407407408,0.8388888888888889 169 | 167,0.1760253608226776,0.9296296296296296,0.8574074074074074 170 | 168,0.16444605588912964,0.9407407407407408,0.8518518518518519 171 | 169,0.15690147876739502,0.9425925925925925,0.8518518518518519 172 | 170,0.15736302733421326,0.9444444444444444,0.8537037037037037 173 | 171,0.1668911576271057,0.9361111111111111,0.8592592592592593 174 | 172,0.16189229488372803,0.9398148148148148,0.8518518518518519 175 | 173,0.161949023604393,0.9416666666666667,0.8416666666666667 176 | 174,0.15189018845558167,0.9407407407407408,0.8537037037037037 177 | 175,0.158053919672966,0.9444444444444444,0.8388888888888889 178 | 176,0.17238931357860565,0.9314814814814815,0.8583333333333333 179 | 177,0.1601613163948059,0.937037037037037,0.8481481481481481 180 | 178,0.15182949602603912,0.937962962962963,0.8592592592592593 181 | 179,0.13926666975021362,0.95,0.8537037037037037 182 | 180,0.1647292822599411,0.9324074074074075,0.8407407407407408 183 | 181,0.15560710430145264,0.9453703703703704,0.8648148148148148 184 | 182,0.14815357327461243,0.9361111111111111,0.8583333333333333 185 | 183,0.14885903894901276,0.9416666666666667,0.850925925925926 186 | 184,0.15992310643196106,0.9388888888888889,0.8601851851851852 187 | 185,0.14523357152938843,0.9472222222222222,0.8555555555555555 188 | 186,0.13770228624343872,0.9490740740740741,0.8453703703703703 189 | 187,0.15466322004795074,0.9425925925925925,0.862037037037037 190 | 188,0.13996461033821106,0.9444444444444444,0.862037037037037 191 | 189,0.13805246353149414,0.9435185185185185,0.8638888888888889 192 | 190,0.12395672500133514,0.9611111111111111,0.8490740740740741 193 | 191,0.15030457079410553,0.9407407407407408,0.8564814814814815 194 | 192,0.14033856987953186,0.95,0.8527777777777777 195 | 193,0.13966669142246246,0.9453703703703704,0.8601851851851852 196 | 194,0.13486720621585846,0.950925925925926,0.850925925925926 197 | 195,0.1518315076828003,0.9416666666666667,0.8527777777777777 198 | 196,0.14110885560512543,0.9472222222222222,0.8555555555555555 199 | 197,0.13282524049282074,0.95,0.8546296296296296 200 | 198,0.14175327122211456,0.9453703703703704,0.8435185185185186 201 | 199,0.15227772295475006,0.9416666666666667,0.8527777777777777 202 | 200,0.1267467886209488,0.9518518518518518,0.8472222222222222 203 | 201,0.13296890258789062,0.9435185185185185,0.8398148148148148 204 | 202,0.15813615918159485,0.937037037037037,0.8481481481481481 205 | 203,0.14102157950401306,0.95,0.8342592592592593 206 | 204,0.12739041447639465,0.9574074074074074,0.8601851851851852 207 | 205,0.135765939950943,0.9462962962962963,0.8537037037037037 208 | 206,0.11625643819570541,0.9629629629629629,0.85 209 | 207,0.1337314397096634,0.950925925925926,0.850925925925926 210 | 208,0.13767190277576447,0.9453703703703704,0.8611111111111112 211 | 209,0.13540418446063995,0.9435185185185185,0.8518518518518519 212 | 210,0.13217224180698395,0.95,0.850925925925926 213 | 211,0.13436394929885864,0.95,0.8527777777777777 214 | 212,0.13929250836372375,0.9398148148148148,0.8546296296296296 215 | 213,0.1407334953546524,0.9472222222222222,0.8462962962962963 216 | 214,0.12687428295612335,0.9472222222222222,0.8546296296296296 217 | 215,0.1449219137430191,0.9388888888888889,0.8583333333333333 218 | 216,0.12232982367277145,0.9601851851851851,0.8277777777777777 219 | 217,0.12671446800231934,0.9555555555555556,0.8592592592592593 220 | 218,0.13636423647403717,0.9453703703703704,0.862037037037037 221 | 219,0.12756898999214172,0.9537037037037037,0.8583333333333333 222 | 220,0.1104523316025734,0.9638888888888889,0.8546296296296296 223 | 221,0.12787199020385742,0.9527777777777777,0.862037037037037 224 | 222,0.1388472467660904,0.9435185185185185,0.862037037037037 225 | 223,0.120479516685009,0.9611111111111111,0.8592592592592593 226 | 224,0.12854085862636566,0.9555555555555556,0.8574074074074074 227 | 225,0.11457309871912003,0.9629629629629629,0.8490740740740741 228 | 226,0.13090750575065613,0.9462962962962963,0.8472222222222222 229 | 227,0.1308913677930832,0.950925925925926,0.8481481481481481 230 | 228,0.12057078629732132,0.9490740740740741,0.8675925925925926 231 | 229,0.12970130145549774,0.9564814814814815,0.8444444444444444 232 | 230,0.12182606011629105,0.9546296296296296,0.8444444444444444 233 | 231,0.11537515372037888,0.9518518518518518,0.8574074074074074 234 | 232,0.11426419764757156,0.9555555555555556,0.8555555555555555 235 | 233,0.11561937630176544,0.9592592592592593,0.8601851851851852 236 | 234,0.13111859560012817,0.9490740740740741,0.8537037037037037 237 | 235,0.13179804384708405,0.9453703703703704,0.8462962962962963 238 | 236,0.12472948431968689,0.9527777777777777,0.85 239 | 237,0.11639127880334854,0.9564814814814815,0.8666666666666667 240 | 238,0.1175178736448288,0.9537037037037037,0.8657407407407407 241 | 239,0.11367343366146088,0.9592592592592593,0.8583333333333333 242 | 240,0.10654623061418533,0.9601851851851851,0.8611111111111112 243 | 241,0.10923119634389877,0.9555555555555556,0.8527777777777777 244 | 242,0.11948724091053009,0.9546296296296296,0.8527777777777777 245 | 243,0.11304201930761337,0.9638888888888889,0.8564814814814815 246 | 244,0.11884641647338867,0.9527777777777777,0.8546296296296296 247 | 245,0.10955800861120224,0.9574074074074074,0.8537037037037037 248 | 246,0.11448586732149124,0.9574074074074074,0.8564814814814815 249 | 247,0.10720578581094742,0.962037037037037,0.8435185185185186 250 | 248,0.11411537230014801,0.9546296296296296,0.8583333333333333 251 | 249,0.11709262430667877,0.950925925925926,0.8546296296296296 252 | 250,0.11567798256874084,0.9601851851851851,0.8601851851851852 253 | 251,0.10329826176166534,0.9629629629629629,0.8648148148148148 254 | 252,0.11591266840696335,0.9583333333333334,0.8527777777777777 255 | 253,0.11616114526987076,0.9564814814814815,0.8537037037037037 256 | 254,0.10817550867795944,0.9657407407407408,0.8555555555555555 257 | 255,0.10351510345935822,0.9583333333333334,0.8592592592592593 258 | 256,0.11412810534238815,0.9611111111111111,0.862037037037037 259 | 257,0.1169385015964508,0.9537037037037037,0.8583333333333333 260 | 258,0.10508093982934952,0.9648148148148148,0.8388888888888889 261 | 259,0.11268555372953415,0.9537037037037037,0.8583333333333333 262 | 260,0.1284506469964981,0.9481481481481482,0.8453703703703703 263 | 261,0.11340666562318802,0.9675925925925926,0.8537037037037037 264 | 262,0.11652068048715591,0.9629629629629629,0.8462962962962963 265 | 263,0.10861478000879288,0.9601851851851851,0.8222222222222222 266 | 264,0.10017074644565582,0.9638888888888889,0.8462962962962963 267 | 265,0.1040724441409111,0.9657407407407408,0.8574074074074074 268 | 266,0.11385741829872131,0.9546296296296296,0.8537037037037037 269 | 267,0.0951615422964096,0.9648148148148148,0.8555555555555555 270 | 268,0.10242604464292526,0.9638888888888889,0.8592592592592593 271 | 269,0.11211863160133362,0.9592592592592593,0.8592592592592593 272 | 270,0.10705631971359253,0.9648148148148148,0.8462962962962963 273 | 271,0.1267222911119461,0.95,0.8537037037037037 274 | 272,0.0991411954164505,0.9675925925925926,0.8462962962962963 275 | 273,0.10288666933774948,0.9648148148148148,0.8518518518518519 276 | 274,0.08799448609352112,0.975,0.8546296296296296 277 | 275,0.10354797542095184,0.9638888888888889,0.8629629629629629 278 | 276,0.11168030649423599,0.9546296296296296,0.8601851851851852 279 | 277,0.10948580503463745,0.9592592592592593,0.8601851851851852 280 | 278,0.10143180936574936,0.9638888888888889,0.8564814814814815 281 | 279,0.10014872997999191,0.9601851851851851,0.8592592592592593 282 | 280,0.08689473569393158,0.9675925925925926,0.8583333333333333 283 | 281,0.10136283934116364,0.9675925925925926,0.8564814814814815 284 | 282,0.10393916815519333,0.9675925925925926,0.85 285 | 283,0.12088505923748016,0.9527777777777777,0.8601851851851852 286 | 284,0.10316045582294464,0.9601851851851851,0.850925925925926 287 | 285,0.08438648283481598,0.9694444444444444,0.8601851851851852 288 | 286,0.10678696632385254,0.9712962962962963,0.8527777777777777 289 | 287,0.10299597680568695,0.9574074074074074,0.8583333333333333 290 | 288,0.09809631109237671,0.9703703703703703,0.8546296296296296 291 | 289,0.10299345850944519,0.9601851851851851,0.8527777777777777 292 | 290,0.09445740282535553,0.9638888888888889,0.8518518518518519 293 | 291,0.105081707239151,0.9629629629629629,0.8564814814814815 294 | 292,0.09068255126476288,0.9685185185185186,0.85 295 | 293,0.10385487228631973,0.9685185185185186,0.85 296 | 294,0.08932065218687057,0.9629629629629629,0.8611111111111112 297 | 295,0.08586641401052475,0.9731481481481481,0.8574074074074074 298 | 296,0.10645603388547897,0.9601851851851851,0.8435185185185186 299 | 297,0.1079910546541214,0.9574074074074074,0.8416666666666667 300 | 298,0.10359487682580948,0.9583333333333334,0.8472222222222222 301 | 299,0.10765992850065231,0.9601851851851851,0.8537037037037037 302 | 300,0.1005689725279808,0.9601851851851851,0.8537037037037037 303 | 301,0.10874965786933899,0.9648148148148148,0.8555555555555555 304 | 302,0.09385106712579727,0.9657407407407408,0.8564814814814815 305 | 303,0.10304412245750427,0.9648148148148148,0.8537037037037037 306 | 304,0.09954178333282471,0.9574074074074074,0.8490740740740741 307 | 305,0.09711870551109314,0.9583333333333334,0.8546296296296296 308 | 306,0.0972067341208458,0.962037037037037,0.8583333333333333 309 | 307,0.09158563613891602,0.9611111111111111,0.8555555555555555 310 | 308,0.09760290384292603,0.9694444444444444,0.8537037037037037 311 | 309,0.08242569118738174,0.9740740740740741,0.85 312 | 310,0.09367990493774414,0.962037037037037,0.8583333333333333 313 | 311,0.09480337798595428,0.9712962962962963,0.85 314 | 312,0.09845508635044098,0.9675925925925926,0.85 315 | 313,0.08560652285814285,0.9731481481481481,0.8481481481481481 316 | 314,0.0823500007390976,0.9694444444444444,0.8546296296296296 317 | 315,0.08684425801038742,0.9648148148148148,0.8481481481481481 318 | 316,0.09091924875974655,0.9648148148148148,0.8425925925925926 319 | 317,0.0890759602189064,0.9694444444444444,0.8481481481481481 320 | 318,0.08617915958166122,0.9712962962962963,0.8527777777777777 321 | 319,0.07859823107719421,0.9731481481481481,0.8583333333333333 322 | 320,0.08331239968538284,0.9777777777777777,0.85 323 | 321,0.09437932074069977,0.9731481481481481,0.8388888888888889 324 | 322,0.08260225504636765,0.9675925925925926,0.8416666666666667 325 | 323,0.0980432778596878,0.9574074074074074,0.85 326 | 324,0.10327781736850739,0.950925925925926,0.8453703703703703 327 | 325,0.08584314584732056,0.9694444444444444,0.8527777777777777 328 | 326,0.09585422277450562,0.9638888888888889,0.8537037037037037 329 | 327,0.09627267718315125,0.9675925925925926,0.8564814814814815 330 | 328,0.0971805676817894,0.9648148148148148,0.8555555555555555 331 | 329,0.08428021520376205,0.9731481481481481,0.8564814814814815 332 | 330,0.09627597779035568,0.9666666666666667,0.8537037037037037 333 | 331,0.08161123842000961,0.9731481481481481,0.8537037037037037 334 | 332,0.08118320256471634,0.975,0.8537037037037037 335 | 333,0.07878637313842773,0.9740740740740741,0.8546296296296296 336 | 334,0.09017397463321686,0.9657407407407408,0.8527777777777777 337 | 335,0.09752898663282394,0.9601851851851851,0.8490740740740741 338 | 336,0.0818510502576828,0.9703703703703703,0.8555555555555555 339 | 337,0.08101656287908554,0.9787037037037037,0.8555555555555555 340 | 338,0.08840081840753555,0.9666666666666667,0.8555555555555555 341 | 339,0.08048953115940094,0.9712962962962963,0.8574074074074074 342 | 340,0.0945034995675087,0.9638888888888889,0.8574074074074074 343 | 341,0.08545035868883133,0.9685185185185186,0.8564814814814815 344 | 342,0.07580909878015518,0.9777777777777777,0.8490740740740741 345 | 343,0.07372287660837173,0.9768518518518519,0.8555555555555555 346 | 344,0.0938429981470108,0.9675925925925926,0.8527777777777777 347 | 345,0.0830683559179306,0.9740740740740741,0.8564814814814815 348 | 346,0.09735969454050064,0.9648148148148148,0.850925925925926 349 | 347,0.07283150404691696,0.975925925925926,0.8592592592592593 350 | 348,0.06798463314771652,0.9768518518518519,0.862037037037037 351 | 349,0.0854719802737236,0.9648148148148148,0.8555555555555555 352 | 350,0.07508239150047302,0.9768518518518519,0.8564814814814815 353 | 351,0.08075644820928574,0.9685185185185186,0.8537037037037037 354 | 352,0.07604149729013443,0.9666666666666667,0.8564814814814815 355 | 353,0.08419837057590485,0.9648148148148148,0.8601851851851852 356 | 354,0.1001332700252533,0.9564814814814815,0.8518518518518519 357 | 355,0.08840464800596237,0.9685185185185186,0.8490740740740741 358 | 356,0.10127991437911987,0.9629629629629629,0.8583333333333333 359 | 357,0.09176860004663467,0.962037037037037,0.8592592592592593 360 | 358,0.07989807426929474,0.9712962962962963,0.8546296296296296 361 | 359,0.08742611855268478,0.9657407407407408,0.8379629629629629 362 | 360,0.08433442562818527,0.9675925925925926,0.8527777777777777 363 | 361,0.09608801454305649,0.9648148148148148,0.8546296296296296 364 | 362,0.07793010771274567,0.975925925925926,0.85 365 | 363,0.08314892649650574,0.9712962962962963,0.8537037037037037 366 | 364,0.07908156514167786,0.9731481481481481,0.8555555555555555 367 | 365,0.07875585556030273,0.9685185185185186,0.8527777777777777 368 | 366,0.08155711740255356,0.9657407407407408,0.8398148148148148 369 | 367,0.08481626212596893,0.9648148148148148,0.8527777777777777 370 | 368,0.07297318428754807,0.975,0.8444444444444444 371 | 369,0.0840194895863533,0.9675925925925926,0.8527777777777777 372 | 370,0.07961268723011017,0.9703703703703703,0.85 373 | 371,0.08285937458276749,0.9694444444444444,0.8527777777777777 374 | 372,0.07926249504089355,0.975925925925926,0.8518518518518519 375 | 373,0.07920130342245102,0.9694444444444444,0.8537037037037037 376 | 374,0.07672947645187378,0.9740740740740741,0.8481481481481481 377 | 375,0.09182839095592499,0.9703703703703703,0.8527777777777777 378 | 376,0.07546696066856384,0.9722222222222222,0.8555555555555555 379 | 377,0.06828223913908005,0.9722222222222222,0.8611111111111112 380 | 378,0.08193132281303406,0.9712962962962963,0.8638888888888889 381 | 379,0.07597926259040833,0.975,0.8555555555555555 382 | 380,0.07157104462385178,0.9768518518518519,0.8527777777777777 383 | 381,0.06669415533542633,0.9777777777777777,0.8564814814814815 384 | 382,0.07655835896730423,0.9768518518518519,0.8564814814814815 385 | 383,0.07715851068496704,0.9694444444444444,0.8555555555555555 386 | 384,0.0804872214794159,0.9685185185185186,0.8574074074074074 387 | 385,0.06534487009048462,0.975925925925926,0.8555555555555555 388 | 386,0.06691722571849823,0.9777777777777777,0.8518518518518519 389 | 387,0.07188674807548523,0.975,0.85 390 | 388,0.07420088350772858,0.9796296296296296,0.8444444444444444 391 | 389,0.075544074177742,0.975925925925926,0.8518518518518519 392 | 390,0.06839101761579514,0.9796296296296296,0.8601851851851852 393 | 391,0.07013965398073196,0.9712962962962963,0.8555555555555555 394 | 392,0.09509436786174774,0.9740740740740741,0.8564814814814815 395 | 393,0.0717209130525589,0.9740740740740741,0.8564814814814815 396 | 394,0.07940791547298431,0.9675925925925926,0.8555555555555555 397 | 395,0.07258967310190201,0.9740740740740741,0.8462962962962963 398 | 396,0.06963631510734558,0.975,0.8527777777777777 399 | 397,0.07023101300001144,0.9712962962962963,0.8583333333333333 400 | 398,0.06728404015302658,0.9787037037037037,0.8574074074074074 401 | 399,0.0860927402973175,0.9666666666666667,0.8555555555555555 402 | 400,0.06958889216184616,0.975925925925926,0.8537037037037037 403 | 401,0.07046125829219818,0.9824074074074074,0.8555555555555555 404 | 402,0.06871412694454193,0.9796296296296296,0.8574074074074074 405 | 403,0.0696769431233406,0.9787037037037037,0.8490740740740741 406 | 404,0.0772365927696228,0.9712962962962963,0.8481481481481481 407 | 405,0.07243849337100983,0.9722222222222222,0.8555555555555555 408 | 406,0.07763110101222992,0.9740740740740741,0.8546296296296296 409 | 407,0.07785384356975555,0.9722222222222222,0.8537037037037037 410 | 408,0.06644858419895172,0.9777777777777777,0.8444444444444444 411 | 409,0.06070253625512123,0.9777777777777777,0.85 412 | 410,0.07447623461484909,0.9777777777777777,0.8388888888888889 413 | 411,0.09593910723924637,0.9666666666666667,0.8546296296296296 414 | 412,0.07328814268112183,0.975925925925926,0.8546296296296296 415 | 413,0.08094322681427002,0.9703703703703703,0.8574074074074074 416 | 414,0.07818272709846497,0.9712962962962963,0.8490740740740741 417 | 415,0.08880261331796646,0.9657407407407408,0.8564814814814815 418 | 416,0.0770268589258194,0.9712962962962963,0.8537037037037037 419 | 417,0.09068086743354797,0.9666666666666667,0.8601851851851852 420 | 418,0.07905197888612747,0.962037037037037,0.8583333333333333 421 | 419,0.06915413588285446,0.975,0.8592592592592593 422 | 420,0.06581497937440872,0.975,0.8583333333333333 423 | 421,0.07584162056446075,0.9694444444444444,0.8629629629629629 424 | 422,0.06588303297758102,0.975,0.8611111111111112 425 | 423,0.0742960050702095,0.975925925925926,0.8583333333333333 426 | 424,0.07488588243722916,0.9712962962962963,0.8518518518518519 427 | 425,0.07699216157197952,0.9685185185185186,0.8425925925925926 428 | 426,0.060867972671985626,0.975925925925926,0.8527777777777777 429 | 427,0.05530679598450661,0.9805555555555555,0.8555555555555555 430 | 428,0.06517817080020905,0.975,0.8574074074074074 431 | 429,0.06468991935253143,0.9787037037037037,0.8555555555555555 432 | 430,0.06288137286901474,0.975925925925926,0.8546296296296296 433 | 431,0.07775162905454636,0.9722222222222222,0.8537037037037037 434 | 432,0.06815394014120102,0.975,0.8518518518518519 435 | 433,0.07087692618370056,0.9685185185185186,0.8555555555555555 436 | 434,0.07692843675613403,0.9731481481481481,0.8555555555555555 437 | 435,0.08172333240509033,0.9666666666666667,0.8481481481481481 438 | 436,0.06536830961704254,0.9796296296296296,0.8546296296296296 439 | 437,0.07097388058900833,0.9712962962962963,0.8555555555555555 440 | 438,0.0749165266752243,0.9712962962962963,0.8490740740740741 441 | 439,0.06623615324497223,0.9768518518518519,0.8481481481481481 442 | 440,0.0739126056432724,0.9731481481481481,0.8481481481481481 443 | 441,0.07425549626350403,0.9722222222222222,0.8490740740740741 444 | 442,0.0711694210767746,0.9796296296296296,0.850925925925926 445 | 443,0.07988610863685608,0.9703703703703703,0.8564814814814815 446 | 444,0.06505683064460754,0.975,0.8546296296296296 447 | 445,0.06771741062402725,0.975925925925926,0.8611111111111112 448 | 446,0.06492495536804199,0.975925925925926,0.8611111111111112 449 | 447,0.05788559094071388,0.9805555555555555,0.862037037037037 450 | 448,0.06111491098999977,0.975925925925926,0.8555555555555555 451 | 449,0.08164288848638535,0.9638888888888889,0.8462962962962963 452 | 450,0.06365910172462463,0.9796296296296296,0.8481481481481481 453 | 451,0.06431818753480911,0.9740740740740741,0.8462962962962963 454 | 452,0.07119520008563995,0.9722222222222222,0.8583333333333333 455 | 453,0.07012879103422165,0.9731481481481481,0.85 456 | 454,0.06035321578383446,0.975925925925926,0.8388888888888889 457 | 455,0.0755726769566536,0.975,0.8472222222222222 458 | 456,0.07967083156108856,0.9712962962962963,0.8592592592592593 459 | 457,0.07953145354986191,0.9777777777777777,0.8592592592592593 460 | 458,0.05358758568763733,0.9833333333333333,0.8527777777777777 461 | 459,0.06886377930641174,0.9722222222222222,0.8546296296296296 462 | 460,0.06916508078575134,0.9712962962962963,0.8472222222222222 463 | 461,0.08060373365879059,0.9731481481481481,0.8527777777777777 464 | 462,0.05803277716040611,0.9824074074074074,0.8583333333333333 465 | 463,0.06251808255910873,0.9768518518518519,0.8546296296296296 466 | 464,0.08682992309331894,0.9685185185185186,0.8546296296296296 467 | 465,0.04562998563051224,0.9851851851851852,0.850925925925926 468 | 466,0.06823687255382538,0.9787037037037037,0.85 469 | 467,0.057850368320941925,0.9796296296296296,0.8546296296296296 470 | 468,0.05942967161536217,0.9824074074074074,0.8564814814814815 471 | 469,0.05776926502585411,0.9777777777777777,0.8564814814814815 472 | 470,0.07539654523134232,0.975,0.8574074074074074 473 | 471,0.06210095435380936,0.9796296296296296,0.8583333333333333 474 | 472,0.06797661632299423,0.975,0.8537037037037037 475 | 473,0.065189890563488,0.9777777777777777,0.8583333333333333 476 | 474,0.07001883536577225,0.9777777777777777,0.8564814814814815 477 | 475,0.07707233726978302,0.9675925925925926,0.8564814814814815 478 | 476,0.06427037715911865,0.9777777777777777,0.8546296296296296 479 | 477,0.05609414726495743,0.9777777777777777,0.8518518518518519 480 | 478,0.06502053886651993,0.9787037037037037,0.850925925925926 481 | 479,0.06346489489078522,0.975,0.8592592592592593 482 | 480,0.06736671179533005,0.975925925925926,0.8555555555555555 483 | 481,0.07400508970022202,0.975,0.8592592592592593 484 | 482,0.07628364861011505,0.9731481481481481,0.8490740740740741 485 | 483,0.062383659183979034,0.975,0.8462962962962963 486 | 484,0.057513296604156494,0.9796296296296296,0.8546296296296296 487 | 485,0.061588309705257416,0.975,0.8518518518518519 488 | 486,0.07388707995414734,0.9740740740740741,0.8537037037037037 489 | 487,0.06621503829956055,0.9768518518518519,0.85 490 | 488,0.05247166380286217,0.9796296296296296,0.8527777777777777 491 | 489,0.07183492183685303,0.9731481481481481,0.8444444444444444 492 | 490,0.06998326629400253,0.975,0.8583333333333333 493 | 491,0.06211882084608078,0.9731481481481481,0.8592592592592593 494 | 492,0.05803384259343147,0.9842592592592593,0.8537037037037037 495 | 493,0.061055563390254974,0.9777777777777777,0.8518518518518519 496 | 494,0.061644453555345535,0.9777777777777777,0.8537037037037037 497 | 495,0.04878193140029907,0.9851851851851852,0.850925925925926 498 | 496,0.07438692450523376,0.9740740740740741,0.862037037037037 499 | 497,0.06484925001859665,0.975,0.8546296296296296 500 | 498,0.0578124038875103,0.9796296296296296,0.8527777777777777 501 | 499,0.058043379336595535,0.9796296296296296,0.8537037037037037 502 | 500,0.054365094751119614,0.9814814814814815,0.8546296296296296 503 | 501,0.051760632544755936,0.9861111111111112,0.8472222222222222 504 | 502,0.055764976888895035,0.9777777777777777,0.8546296296296296 505 | 503,0.05131106078624725,0.9796296296296296,0.8555555555555555 506 | 504,0.04498286917805672,0.9851851851851852,0.8583333333333333 507 | 505,0.05561409518122673,0.9814814814814815,0.8537037037037037 508 | 506,0.06936069577932358,0.9703703703703703,0.8518518518518519 509 | 507,0.05269435793161392,0.9824074074074074,0.850925925925926 510 | 508,0.056778017431497574,0.9787037037037037,0.8518518518518519 511 | 509,0.06326235830783844,0.9768518518518519,0.8564814814814815 512 | 510,0.06643705070018768,0.9768518518518519,0.85 513 | 511,0.05426410585641861,0.9777777777777777,0.8601851851851852 514 | 512,0.06567002087831497,0.975925925925926,0.850925925925926 515 | 513,0.06239522248506546,0.9777777777777777,0.837037037037037 516 | 514,0.0733746811747551,0.9740740740740741,0.8574074074074074 517 | 515,0.04696489870548248,0.9861111111111112,0.8611111111111112 518 | 516,0.06626659631729126,0.9824074074074074,0.8537037037037037 519 | 517,0.05801072344183922,0.9787037037037037,0.8546296296296296 520 | 518,0.050684213638305664,0.9796296296296296,0.8490740740740741 521 | 519,0.045229434967041016,0.987037037037037,0.85 522 | 520,0.05862509459257126,0.9787037037037037,0.8583333333333333 523 | 521,0.0651608482003212,0.9814814814814815,0.8629629629629629 524 | 522,0.06773564219474792,0.9731481481481481,0.8601851851851852 525 | 523,0.06089547649025917,0.9768518518518519,0.8583333333333333 526 | 524,0.050331734120845795,0.9842592592592593,0.8611111111111112 527 | 525,0.05642896890640259,0.9740740740740741,0.8564814814814815 528 | 526,0.0560162179172039,0.9787037037037037,0.8527777777777777 529 | 527,0.05049844831228256,0.9833333333333333,0.8583333333333333 530 | 528,0.06052472069859505,0.975925925925926,0.8583333333333333 531 | 529,0.04242582991719246,0.987037037037037,0.8527777777777777 532 | 530,0.054025452584028244,0.9787037037037037,0.85 533 | 531,0.057053305208683014,0.9777777777777777,0.8564814814814815 534 | 532,0.044379930943250656,0.9824074074074074,0.8527777777777777 535 | 533,0.047525499016046524,0.987037037037037,0.8546296296296296 536 | 534,0.06820569187402725,0.975,0.8546296296296296 537 | 535,0.07935632020235062,0.9731481481481481,0.8546296296296296 538 | 536,0.04700257629156113,0.9861111111111112,0.8527777777777777 539 | 537,0.05702509358525276,0.9768518518518519,0.85 540 | 538,0.04215047135949135,0.9879629629629629,0.8537037037037037 541 | 539,0.06096314266324043,0.9768518518518519,0.8564814814814815 542 | 540,0.0567907951772213,0.9787037037037037,0.8564814814814815 543 | 541,0.05770916864275932,0.9787037037037037,0.850925925925926 544 | 542,0.058390166610479355,0.975925925925926,0.8583333333333333 545 | 543,0.04770117253065109,0.9842592592592593,0.8592592592592593 546 | 544,0.05484658479690552,0.9814814814814815,0.8583333333333333 547 | 545,0.0526733435690403,0.9796296296296296,0.8601851851851852 548 | 546,0.06041128933429718,0.9787037037037037,0.8574074074074074 549 | 547,0.05730482563376427,0.9796296296296296,0.8583333333333333 550 | 548,0.06450871378183365,0.9787037037037037,0.8518518518518519 551 | 549,0.06198396906256676,0.9777777777777777,0.8611111111111112 552 | 550,0.05420535057783127,0.9796296296296296,0.8472222222222222 553 | 551,0.05214926227927208,0.9814814814814815,0.8583333333333333 554 | 552,0.07093249261379242,0.975,0.8555555555555555 555 | 553,0.05599796026945114,0.9805555555555555,0.8546296296296296 556 | 554,0.05158387869596481,0.9851851851851852,0.862037037037037 557 | 555,0.05149943754076958,0.9833333333333333,0.8601851851851852 558 | 556,0.053179480135440826,0.9824074074074074,0.8527777777777777 559 | 557,0.047451119869947433,0.9879629629629629,0.8342592592592593 560 | 558,0.06500561535358429,0.9768518518518519,0.8638888888888889 561 | 559,0.06316733360290527,0.9731481481481481,0.8564814814814815 562 | 560,0.04835186153650284,0.987037037037037,0.8592592592592593 563 | 561,0.05645449087023735,0.9796296296296296,0.8611111111111112 564 | 562,0.07492376118898392,0.9694444444444444,0.8518518518518519 565 | 563,0.058401238173246384,0.9814814814814815,0.862037037037037 566 | 564,0.06499544531106949,0.9768518518518519,0.8481481481481481 567 | 565,0.05243556946516037,0.9796296296296296,0.8583333333333333 568 | 566,0.06201353296637535,0.9777777777777777,0.8546296296296296 569 | 567,0.06059228256344795,0.9740740740740741,0.8592592592592593 570 | 568,0.0618692971765995,0.9787037037037037,0.8546296296296296 571 | 569,0.04903591424226761,0.9824074074074074,0.850925925925926 572 | 570,0.053980737924575806,0.9824074074074074,0.8537037037037037 573 | 571,0.05455328896641731,0.9824074074074074,0.8537037037037037 574 | 572,0.05372597277164459,0.9824074074074074,0.8490740740740741 575 | 573,0.05037764459848404,0.9805555555555555,0.8518518518518519 576 | 574,0.044683538377285004,0.9861111111111112,0.8555555555555555 577 | 575,0.049047719687223434,0.9842592592592593,0.8518518518518519 578 | 576,0.06608076393604279,0.9777777777777777,0.8555555555555555 579 | 577,0.05767354369163513,0.9851851851851852,0.8592592592592593 580 | 578,0.07284024357795715,0.9740740740740741,0.8490740740740741 581 | 579,0.07972890138626099,0.9694444444444444,0.8555555555555555 582 | 580,0.05402060225605965,0.9787037037037037,0.8574074074074074 583 | 581,0.050857026129961014,0.9814814814814815,0.8583333333333333 584 | 582,0.07075835764408112,0.9722222222222222,0.8546296296296296 585 | 583,0.06337172538042068,0.975925925925926,0.8592592592592593 586 | 584,0.0514293909072876,0.9796296296296296,0.8574074074074074 587 | 585,0.041634127497673035,0.9898148148148148,0.8592592592592593 588 | 586,0.04878133162856102,0.9833333333333333,0.8518518518518519 589 | 587,0.04723140224814415,0.9842592592592593,0.8648148148148148 590 | 588,0.05938871204853058,0.9796296296296296,0.8574074074074074 591 | 589,0.05483178049325943,0.9805555555555555,0.8583333333333333 592 | 590,0.047993242740631104,0.9833333333333333,0.8611111111111112 593 | 591,0.053385622799396515,0.9824074074074074,0.85 594 | 592,0.05284566804766655,0.9833333333333333,0.8546296296296296 595 | 593,0.05216984823346138,0.9842592592592593,0.8592592592592593 596 | 594,0.05219867452979088,0.9851851851851852,0.8518518518518519 597 | 595,0.05438060313463211,0.9824074074074074,0.8546296296296296 598 | 596,0.054400019347667694,0.9796296296296296,0.8527777777777777 599 | 597,0.04880572855472565,0.9777777777777777,0.8472222222222222 600 | 598,0.05090931057929993,0.9796296296296296,0.8472222222222222 601 | 599,0.05341554433107376,0.9824074074074074,0.850925925925926 602 | 600,0.051372140645980835,0.9805555555555555,0.8527777777777777 603 | 601,0.056556958705186844,0.9805555555555555,0.8537037037037037 604 | 602,0.05515209585428238,0.9768518518518519,0.8555555555555555 605 | 603,0.06208395957946777,0.9814814814814815,0.8453703703703703 606 | 604,0.04973379150032997,0.9805555555555555,0.8527777777777777 607 | 605,0.04899045452475548,0.987037037037037,0.8537037037037037 608 | 606,0.05464046448469162,0.9833333333333333,0.8611111111111112 609 | 607,0.04668666049838066,0.9861111111111112,0.862037037037037 610 | 608,0.04914805293083191,0.9805555555555555,0.8611111111111112 611 | 609,0.04385225847363472,0.9842592592592593,0.8611111111111112 612 | 610,0.050797346979379654,0.9787037037037037,0.8574074074074074 613 | 611,0.050997816026210785,0.987037037037037,0.8592592592592593 614 | 612,0.03969847783446312,0.9861111111111112,0.8564814814814815 615 | 613,0.03685866296291351,0.987037037037037,0.8583333333333333 616 | 614,0.03820493444800377,0.9879629629629629,0.8592592592592593 617 | 615,0.049072738736867905,0.9842592592592593,0.8583333333333333 618 | 616,0.048153262585401535,0.9824074074074074,0.8583333333333333 619 | 617,0.040718019008636475,0.9888888888888889,0.8592592592592593 620 | 618,0.048579636961221695,0.9851851851851852,0.8592592592592593 621 | 619,0.05614776536822319,0.975925925925926,0.8601851851851852 622 | 620,0.044610437005758286,0.9861111111111112,0.8648148148148148 623 | 621,0.06790630519390106,0.975925925925926,0.8583333333333333 624 | 622,0.03659170866012573,0.9888888888888889,0.8537037037037037 625 | 623,0.050863735377788544,0.9842592592592593,0.8527777777777777 626 | 624,0.049349915236234665,0.9833333333333333,0.8601851851851852 627 | 625,0.055011678487062454,0.9851851851851852,0.862037037037037 628 | 626,0.04748889431357384,0.9842592592592593,0.8583333333333333 629 | 627,0.04249182716012001,0.9851851851851852,0.8564814814814815 630 | 628,0.054805293679237366,0.9796296296296296,0.8601851851851852 631 | 629,0.035045064985752106,0.9907407407407407,0.8601851851851852 632 | 630,0.03991865739226341,0.9898148148148148,0.8583333333333333 633 | 631,0.05198892205953598,0.9814814814814815,0.8592592592592593 634 | 632,0.05324519798159599,0.975,0.8564814814814815 635 | 633,0.05380745232105255,0.9787037037037037,0.8546296296296296 636 | 634,0.048334334045648575,0.9805555555555555,0.8518518518518519 637 | 635,0.05149901285767555,0.9833333333333333,0.8555555555555555 638 | 636,0.05268094688653946,0.9768518518518519,0.8537037037037037 639 | 637,0.05970735847949982,0.9740740740740741,0.8490740740740741 640 | 638,0.044028423726558685,0.9851851851851852,0.8537037037037037 641 | 639,0.038532793521881104,0.9833333333333333,0.8574074074074074 642 | 640,0.06159931421279907,0.9740740740740741,0.8564814814814815 643 | 641,0.06983960419893265,0.975925925925926,0.8564814814814815 644 | 642,0.0428994745016098,0.9861111111111112,0.8592592592592593 645 | 643,0.04765273258090019,0.9805555555555555,0.8564814814814815 646 | 644,0.05978364497423172,0.9796296296296296,0.8564814814814815 647 | 645,0.047292083501815796,0.9842592592592593,0.8583333333333333 648 | 646,0.059028495103120804,0.9787037037037037,0.8592592592592593 649 | 647,0.06306006014347076,0.9824074074074074,0.8574074074074074 650 | 648,0.05138592794537544,0.9824074074074074,0.8601851851851852 651 | 649,0.04157331585884094,0.9879629629629629,0.8574074074074074 652 | 650,0.04527664557099342,0.9842592592592593,0.8629629629629629 653 | 651,0.04242401197552681,0.9842592592592593,0.8583333333333333 654 | 652,0.04923214390873909,0.9842592592592593,0.8592592592592593 655 | 653,0.03647953271865845,0.9907407407407407,0.8583333333333333 656 | 654,0.04942069202661514,0.9842592592592593,0.8574074074074074 657 | 655,0.03844870254397392,0.987037037037037,0.8638888888888889 658 | 656,0.036335933953523636,0.9861111111111112,0.8611111111111112 659 | 657,0.06792914867401123,0.9740740740740741,0.8574074074074074 660 | 658,0.05937414616346359,0.9796296296296296,0.8611111111111112 661 | 659,0.06836635619401932,0.9768518518518519,0.8435185185185186 662 | 660,0.06717295199632645,0.9777777777777777,0.8518518518518519 663 | 661,0.04207709804177284,0.9851851851851852,0.8490740740740741 664 | 662,0.05383051559329033,0.9777777777777777,0.8592592592592593 665 | 663,0.03741542622447014,0.987037037037037,0.8592592592592593 666 | 664,0.044877734035253525,0.9814814814814815,0.8546296296296296 667 | 665,0.037379488348960876,0.9879629629629629,0.85 668 | 666,0.05004338547587395,0.9824074074074074,0.8546296296296296 669 | 667,0.039306312799453735,0.9833333333333333,0.8546296296296296 670 | 668,0.03726518899202347,0.9851851851851852,0.8592592592592593 671 | 669,0.041859742254018784,0.9851851851851852,0.8518518518518519 672 | 670,0.039138082414865494,0.987037037037037,0.8546296296296296 673 | 671,0.03750765696167946,0.9916666666666667,0.8546296296296296 674 | 672,0.031056495383381844,0.9907407407407407,0.8527777777777777 675 | 673,0.049477748572826385,0.9796296296296296,0.8611111111111112 676 | 674,0.040604304522275925,0.9861111111111112,0.8648148148148148 677 | 675,0.038957804441452026,0.9842592592592593,0.862037037037037 678 | 676,0.0429944172501564,0.9805555555555555,0.8592592592592593 679 | 677,0.03402583673596382,0.9861111111111112,0.8583333333333333 680 | 678,0.047143563628196716,0.9805555555555555,0.8564814814814815 681 | 679,0.04800241440534592,0.9851851851851852,0.8564814814814815 682 | 680,0.045927830040454865,0.9833333333333333,0.8592592592592593 683 | 681,0.05284411460161209,0.9824074074074074,0.8546296296296296 684 | 682,0.04742374271154404,0.9796296296296296,0.8555555555555555 685 | 683,0.05297929421067238,0.9824074074074074,0.8574074074074074 686 | 684,0.05167287960648537,0.987037037037037,0.8601851851851852 687 | 685,0.054003290832042694,0.9824074074074074,0.8555555555555555 688 | 686,0.05431744456291199,0.9796296296296296,0.85 689 | 687,0.05166762322187424,0.9824074074074074,0.8583333333333333 690 | 688,0.03887671232223511,0.9888888888888889,0.8574074074074074 691 | 689,0.053349047899246216,0.9851851851851852,0.8583333333333333 692 | 690,0.053921669721603394,0.9814814814814815,0.8583333333333333 693 | 691,0.048295002430677414,0.9851851851851852,0.862037037037037 694 | 692,0.04660574346780777,0.9833333333333333,0.8657407407407407 695 | 693,0.040640734136104584,0.9805555555555555,0.8648148148148148 696 | 694,0.04856833443045616,0.9796296296296296,0.8583333333333333 697 | 695,0.030626671388745308,0.9888888888888889,0.8546296296296296 698 | 696,0.03645568713545799,0.9888888888888889,0.8601851851851852 699 | 697,0.042389001697301865,0.9842592592592593,0.862037037037037 700 | 698,0.04068094864487648,0.987037037037037,0.8611111111111112 701 | 699,0.034717220813035965,0.9916666666666667,0.8583333333333333 702 | -------------------------------------------------------------------------------- /history_csv/EEGNet_LeakyReLU.csv: -------------------------------------------------------------------------------- 1 | ,loss,train_accuracy_history,test_accuracy_history 2 | 0,0.7242484092712402,0.49444444444444446,0.6055555555555555 3 | 1,0.7318371534347534,0.5740740740740741,0.5 4 | 2,1.8442935943603516,0.5,0.5416666666666666 5 | 3,0.8749323487281799,0.5740740740740741,0.6777777777777778 6 | 4,0.7092840671539307,0.6370370370370371,0.6351851851851852 7 | 5,0.7595816254615784,0.6333333333333333,0.6648148148148149 8 | 6,0.589707612991333,0.6972222222222222,0.6916666666666667 9 | 7,0.5911937355995178,0.6935185185185185,0.6888888888888889 10 | 8,0.5510285496711731,0.7074074074074074,0.6759259259259259 11 | 9,0.5205474495887756,0.7194444444444444,0.6842592592592592 12 | 10,0.5243563652038574,0.7231481481481481,0.6953703703703704 13 | 11,0.5067970752716064,0.7314814814814815,0.7037037037037037 14 | 12,0.5066251158714294,0.7277777777777777,0.7 15 | 13,0.5126774907112122,0.7277777777777777,0.7064814814814815 16 | 14,0.494672030210495,0.7592592592592593,0.7083333333333334 17 | 15,0.4849047064781189,0.7583333333333333,0.7064814814814815 18 | 16,0.4857172667980194,0.7398148148148148,0.7092592592592593 19 | 17,0.47782835364341736,0.7620370370370371,0.7101851851851851 20 | 18,0.47280922532081604,0.7675925925925926,0.7129629629629629 21 | 19,0.4807232916355133,0.7537037037037037,0.7111111111111111 22 | 20,0.47129082679748535,0.774074074074074,0.7138888888888889 23 | 21,0.4591389298439026,0.7648148148148148,0.7166666666666667 24 | 22,0.4649001955986023,0.774074074074074,0.7166666666666667 25 | 23,0.46087557077407837,0.774074074074074,0.7212962962962963 26 | 24,0.47058460116386414,0.7611111111111111,0.7194444444444444 27 | 25,0.4574286639690399,0.7824074074074074,0.7194444444444444 28 | 26,0.4613454043865204,0.7666666666666667,0.7185185185185186 29 | 27,0.4721715450286865,0.775,0.7203703703703703 30 | 28,0.45530936121940613,0.775,0.7222222222222222 31 | 29,0.44693559408187866,0.7907407407407407,0.725925925925926 32 | 30,0.45765987038612366,0.7731481481481481,0.7342592592592593 33 | 31,0.45074084401130676,0.7759259259259259,0.7342592592592593 34 | 32,0.4545373320579529,0.7666666666666667,0.7351851851851852 35 | 33,0.4534953534603119,0.775,0.7361111111111112 36 | 34,0.45212650299072266,0.7796296296296297,0.7324074074074074 37 | 35,0.4378930330276489,0.7851851851851852,0.7333333333333333 38 | 36,0.44024115800857544,0.787962962962963,0.7287037037037037 39 | 37,0.4414830803871155,0.7851851851851852,0.7287037037037037 40 | 38,0.4282678961753845,0.8,0.7305555555555555 41 | 39,0.4444934129714966,0.7768518518518519,0.7407407407407407 42 | 40,0.43651479482650757,0.7851851851851852,0.7407407407407407 43 | 41,0.43600356578826904,0.8074074074074075,0.7342592592592593 44 | 42,0.4249078333377838,0.7953703703703704,0.7342592592592593 45 | 43,0.4301309287548065,0.7870370370370371,0.7361111111111112 46 | 44,0.42768511176109314,0.7981481481481482,0.7314814814814815 47 | 45,0.4342251121997833,0.7861111111111111,0.7324074074074074 48 | 46,0.4237412214279175,0.7888888888888889,0.7379629629629629 49 | 47,0.42524904012680054,0.7888888888888889,0.7435185185185185 50 | 48,0.42282959818840027,0.8018518518518518,0.7435185185185185 51 | 49,0.39849910140037537,0.8185185185185185,0.7472222222222222 52 | 50,0.4234986901283264,0.8,0.7407407407407407 53 | 51,0.40454477071762085,0.8055555555555556,0.7435185185185185 54 | 52,0.40882912278175354,0.8166666666666667,0.7453703703703703 55 | 53,0.4086543023586273,0.812962962962963,0.7481481481481481 56 | 54,0.4093385338783264,0.7972222222222223,0.75 57 | 55,0.41299471259117126,0.7953703703703704,0.7564814814814815 58 | 56,0.40216413140296936,0.8277777777777777,0.7574074074074074 59 | 57,0.39902788400650024,0.8148148148148148,0.7564814814814815 60 | 58,0.38099807500839233,0.8314814814814815,0.7527777777777778 61 | 59,0.398322194814682,0.8231481481481482,0.7574074074074074 62 | 60,0.3921894133090973,0.8240740740740741,0.7574074074074074 63 | 61,0.3831443786621094,0.8166666666666667,0.762962962962963 64 | 62,0.3633168935775757,0.8416666666666667,0.7601851851851852 65 | 63,0.35357439517974854,0.8324074074074074,0.7712962962962963 66 | 64,0.3806248903274536,0.8231481481481482,0.7787037037037037 67 | 65,0.3555682599544525,0.837037037037037,0.7805555555555556 68 | 66,0.36245065927505493,0.8388888888888889,0.7805555555555556 69 | 67,0.35602980852127075,0.8416666666666667,0.787962962962963 70 | 68,0.34572362899780273,0.85,0.7796296296296297 71 | 69,0.3510808050632477,0.8435185185185186,0.7972222222222223 72 | 70,0.34093984961509705,0.8490740740740741,0.7981481481481482 73 | 71,0.34312257170677185,0.8490740740740741,0.7953703703703704 74 | 72,0.3315439522266388,0.862037037037037,0.7925925925925926 75 | 73,0.3426104187965393,0.8555555555555555,0.7981481481481482 76 | 74,0.3431813418865204,0.8537037037037037,0.7962962962962963 77 | 75,0.3297426998615265,0.8564814814814815,0.8009259259259259 78 | 76,0.31635332107543945,0.8648148148148148,0.799074074074074 79 | 77,0.3366682231426239,0.8490740740740741,0.8046296296296296 80 | 78,0.3109399676322937,0.8722222222222222,0.8046296296296296 81 | 79,0.3136817216873169,0.8675925925925926,0.8101851851851852 82 | 80,0.30560585856437683,0.8740740740740741,0.812962962962963 83 | 81,0.3067622482776642,0.8731481481481481,0.8111111111111111 84 | 82,0.30660775303840637,0.8759259259259259,0.8138888888888889 85 | 83,0.292537122964859,0.8851851851851852,0.8092592592592592 86 | 84,0.30282220244407654,0.8694444444444445,0.812037037037037 87 | 85,0.286873996257782,0.8805555555555555,0.8157407407407408 88 | 86,0.2968757748603821,0.8824074074074074,0.8287037037037037 89 | 87,0.29624003171920776,0.8694444444444445,0.8212962962962963 90 | 88,0.2801753282546997,0.8824074074074074,0.8240740740740741 91 | 89,0.29342713952064514,0.8787037037037037,0.8064814814814815 92 | 90,0.2901999354362488,0.8740740740740741,0.8240740740740741 93 | 91,0.28165385127067566,0.8833333333333333,0.8222222222222222 94 | 92,0.2893243432044983,0.8805555555555555,0.8333333333333334 95 | 93,0.2723652124404907,0.887962962962963,0.8268518518518518 96 | 94,0.2729160785675049,0.8962962962962963,0.825 97 | 95,0.2767501771450043,0.8925925925925926,0.8111111111111111 98 | 96,0.27013102173805237,0.8953703703703704,0.8379629629629629 99 | 97,0.2607714533805847,0.8935185185185185,0.8379629629629629 100 | 98,0.27269574999809265,0.8935185185185185,0.8416666666666667 101 | 99,0.2542588412761688,0.899074074074074,0.8407407407407408 102 | 100,0.2505200505256653,0.9009259259259259,0.8425925925925926 103 | 101,0.2529260218143463,0.8981481481481481,0.8425925925925926 104 | 102,0.24970093369483948,0.9,0.8222222222222222 105 | 103,0.2538243532180786,0.8962962962962963,0.8416666666666667 106 | 104,0.2528512179851532,0.8981481481481481,0.8435185185185186 107 | 105,0.2410922348499298,0.9027777777777778,0.8314814814814815 108 | 106,0.24340426921844482,0.9055555555555556,0.8435185185185186 109 | 107,0.2310989946126938,0.9064814814814814,0.8444444444444444 110 | 108,0.24148757755756378,0.9046296296296297,0.8453703703703703 111 | 109,0.25658857822418213,0.8981481481481481,0.8407407407407408 112 | 110,0.22231683135032654,0.9074074074074074,0.85 113 | 111,0.21699701249599457,0.9148148148148149,0.8453703703703703 114 | 112,0.22566372156143188,0.9194444444444444,0.8472222222222222 115 | 113,0.23885568976402283,0.9138888888888889,0.8527777777777777 116 | 114,0.23032905161380768,0.9074074074074074,0.8472222222222222 117 | 115,0.2345239669084549,0.9037037037037037,0.850925925925926 118 | 116,0.2360721379518509,0.8870370370370371,0.8444444444444444 119 | 117,0.22626754641532898,0.9092592592592592,0.8472222222222222 120 | 118,0.22671081125736237,0.9037037037037037,0.8453703703703703 121 | 119,0.21759247779846191,0.9111111111111111,0.8518518518518519 122 | 120,0.21060863137245178,0.9120370370370371,0.8490740740740741 123 | 121,0.2204764485359192,0.9157407407407407,0.8527777777777777 124 | 122,0.21701861917972565,0.9092592592592592,0.8527777777777777 125 | 123,0.21404612064361572,0.9185185185185185,0.8574074074074074 126 | 124,0.216660737991333,0.9166666666666666,0.8527777777777777 127 | 125,0.20843756198883057,0.9222222222222223,0.8462962962962963 128 | 126,0.21904544532299042,0.9111111111111111,0.8407407407407408 129 | 127,0.2149248719215393,0.9185185185185185,0.850925925925926 130 | 128,0.2066865712404251,0.9212962962962963,0.8546296296296296 131 | 129,0.21732757985591888,0.9027777777777778,0.85 132 | 130,0.214651957154274,0.9083333333333333,0.8462962962962963 133 | 131,0.2116249054670334,0.9175925925925926,0.8444444444444444 134 | 132,0.20306898653507233,0.9194444444444444,0.8601851851851852 135 | 133,0.20050941407680511,0.9194444444444444,0.8453703703703703 136 | 134,0.21404299139976501,0.9166666666666666,0.8416666666666667 137 | 135,0.2136870175600052,0.9138888888888889,0.8462962962962963 138 | 136,0.20498275756835938,0.9287037037037037,0.8583333333333333 139 | 137,0.1832038313150406,0.9314814814814815,0.8462962962962963 140 | 138,0.18986187875270844,0.9305555555555556,0.8564814814814815 141 | 139,0.18811717629432678,0.9314814814814815,0.8564814814814815 142 | 140,0.1946287602186203,0.924074074074074,0.8546296296296296 143 | 141,0.1986522227525711,0.9194444444444444,0.8490740740740741 144 | 142,0.1900337189435959,0.9222222222222223,0.8518518518518519 145 | 143,0.2017524242401123,0.9166666666666666,0.85 146 | 144,0.1858319342136383,0.9287037037037037,0.8425925925925926 147 | 145,0.2066008746623993,0.9166666666666666,0.8518518518518519 148 | 146,0.20857155323028564,0.9194444444444444,0.8490740740740741 149 | 147,0.18415020406246185,0.9314814814814815,0.8518518518518519 150 | 148,0.18173913657665253,0.9305555555555556,0.8564814814814815 151 | 149,0.17339231073856354,0.9333333333333333,0.8546296296296296 152 | 150,0.20019592344760895,0.9231481481481482,0.8583333333333333 153 | 151,0.17765243351459503,0.9324074074074075,0.8546296296296296 154 | 152,0.188533753156662,0.9259259259259259,0.8518518518518519 155 | 153,0.18074777722358704,0.9333333333333333,0.8546296296296296 156 | 154,0.1791534721851349,0.9324074074074075,0.8555555555555555 157 | 155,0.18786348402500153,0.9222222222222223,0.8490740740740741 158 | 156,0.18230696022510529,0.9314814814814815,0.8527777777777777 159 | 157,0.18681727349758148,0.9185185185185185,0.8574074074074074 160 | 158,0.17945611476898193,0.9222222222222223,0.8527777777777777 161 | 159,0.18282219767570496,0.9268518518518518,0.8564814814814815 162 | 160,0.1652323305606842,0.9314814814814815,0.8490740740740741 163 | 161,0.17658104002475739,0.9287037037037037,0.850925925925926 164 | 162,0.16309291124343872,0.9398148148148148,0.8555555555555555 165 | 163,0.1730714589357376,0.9305555555555556,0.850925925925926 166 | 164,0.17246299982070923,0.9314814814814815,0.85 167 | 165,0.170795738697052,0.9277777777777778,0.8601851851851852 168 | 166,0.16302627325057983,0.9361111111111111,0.8518518518518519 169 | 167,0.18148955702781677,0.9259259259259259,0.8611111111111112 170 | 168,0.16870731115341187,0.9342592592592592,0.85 171 | 169,0.17132332921028137,0.9314814814814815,0.8546296296296296 172 | 170,0.15787793695926666,0.9324074074074075,0.8629629629629629 173 | 171,0.1720406711101532,0.925,0.8555555555555555 174 | 172,0.17426298558712006,0.9296296296296296,0.8546296296296296 175 | 173,0.16609562933444977,0.9388888888888889,0.8555555555555555 176 | 174,0.16308189928531647,0.937037037037037,0.8629629629629629 177 | 175,0.16090284287929535,0.9425925925925925,0.8583333333333333 178 | 176,0.16614656150341034,0.937037037037037,0.8555555555555555 179 | 177,0.16447025537490845,0.9462962962962963,0.8574074074074074 180 | 178,0.14940179884433746,0.9435185185185185,0.8583333333333333 181 | 179,0.15031111240386963,0.9398148148148148,0.862037037037037 182 | 180,0.159268319606781,0.9407407407407408,0.8574074074074074 183 | 181,0.13986040651798248,0.9518518518518518,0.8564814814814815 184 | 182,0.1594027429819107,0.9398148148148148,0.8546296296296296 185 | 183,0.16003543138504028,0.9407407407407408,0.8574074074074074 186 | 184,0.15605470538139343,0.9351851851851852,0.85 187 | 185,0.15484769642353058,0.9398148148148148,0.8583333333333333 188 | 186,0.14876897633075714,0.9462962962962963,0.8592592592592593 189 | 187,0.1550288051366806,0.9314814814814815,0.8629629629629629 190 | 188,0.15427803993225098,0.937962962962963,0.8648148148148148 191 | 189,0.1454058289527893,0.9444444444444444,0.8611111111111112 192 | 190,0.1404881328344345,0.9435185185185185,0.8638888888888889 193 | 191,0.1469123363494873,0.9324074074074075,0.8592592592592593 194 | 192,0.1439601182937622,0.9453703703703704,0.8583333333333333 195 | 193,0.14327844977378845,0.9444444444444444,0.8574074074074074 196 | 194,0.14237749576568604,0.9435185185185185,0.8592592592592593 197 | 195,0.16051176190376282,0.9342592592592592,0.8481481481481481 198 | 196,0.15119796991348267,0.9444444444444444,0.8592592592592593 199 | 197,0.13713905215263367,0.9481481481481482,0.8546296296296296 200 | 198,0.1642834097146988,0.9277777777777778,0.8527777777777777 201 | 199,0.15195852518081665,0.9388888888888889,0.8555555555555555 202 | 200,0.1465427279472351,0.9398148148148148,0.862037037037037 203 | 201,0.145602747797966,0.9361111111111111,0.8407407407407408 204 | 202,0.17050796747207642,0.9287037037037037,0.8546296296296296 205 | 203,0.14932329952716827,0.9462962962962963,0.8555555555555555 206 | 204,0.145091712474823,0.9490740740740741,0.8583333333333333 207 | 205,0.13413316011428833,0.9472222222222222,0.862037037037037 208 | 206,0.13293133676052094,0.9527777777777777,0.8601851851851852 209 | 207,0.1514918953180313,0.9435185185185185,0.8546296296296296 210 | 208,0.13582643866539001,0.95,0.8611111111111112 211 | 209,0.1303371638059616,0.95,0.8574074074074074 212 | 210,0.13368718326091766,0.950925925925926,0.8564814814814815 213 | 211,0.1478671431541443,0.9425925925925925,0.8564814814814815 214 | 212,0.15138229727745056,0.9388888888888889,0.8564814814814815 215 | 213,0.14767801761627197,0.937037037037037,0.8472222222222222 216 | 214,0.14346380531787872,0.9425925925925925,0.8546296296296296 217 | 215,0.13576532900333405,0.9527777777777777,0.8546296296296296 218 | 216,0.12767568230628967,0.9546296296296296,0.85 219 | 217,0.13349002599716187,0.9444444444444444,0.8527777777777777 220 | 218,0.12737956643104553,0.950925925925926,0.8574074074074074 221 | 219,0.12219831347465515,0.9546296296296296,0.8555555555555555 222 | 220,0.11250888556241989,0.9611111111111111,0.8574074074074074 223 | 221,0.12447336316108704,0.9537037037037037,0.8592592592592593 224 | 222,0.14377851784229279,0.9416666666666667,0.8601851851851852 225 | 223,0.12807370722293854,0.9490740740740741,0.8601851851851852 226 | 224,0.11785875260829926,0.9555555555555556,0.8601851851851852 227 | 225,0.1192782074213028,0.9611111111111111,0.8592592592592593 228 | 226,0.1250288188457489,0.9546296296296296,0.8583333333333333 229 | 227,0.1346576064825058,0.9416666666666667,0.8601851851851852 230 | 228,0.12437864392995834,0.95,0.8592592592592593 231 | 229,0.1284572333097458,0.9472222222222222,0.8601851851851852 232 | 230,0.1285478174686432,0.9555555555555556,0.8592592592592593 233 | 231,0.12097053229808807,0.9546296296296296,0.8546296296296296 234 | 232,0.12706246972084045,0.9462962962962963,0.8574074074074074 235 | 233,0.11952414363622665,0.9462962962962963,0.8601851851851852 236 | 234,0.12122178077697754,0.95,0.8611111111111112 237 | 235,0.12636518478393555,0.9546296296296296,0.8564814814814815 238 | 236,0.11928740888834,0.9601851851851851,0.8611111111111112 239 | 237,0.1311182826757431,0.9444444444444444,0.862037037037037 240 | 238,0.1268816888332367,0.9518518518518518,0.862037037037037 241 | 239,0.12991172075271606,0.9555555555555556,0.8592592592592593 242 | 240,0.11294937133789062,0.9555555555555556,0.8564814814814815 243 | 241,0.12160318344831467,0.9611111111111111,0.8601851851851852 244 | 242,0.12483489513397217,0.9537037037037037,0.8648148148148148 245 | 243,0.11246000230312347,0.9629629629629629,0.8648148148148148 246 | 244,0.12537898123264313,0.95,0.8629629629629629 247 | 245,0.13037480413913727,0.9435185185185185,0.850925925925926 248 | 246,0.1221155971288681,0.9546296296296296,0.8564814814814815 249 | 247,0.11683339625597,0.9527777777777777,0.8555555555555555 250 | 248,0.12221996486186981,0.95,0.8555555555555555 251 | 249,0.12201286852359772,0.9574074074074074,0.8555555555555555 252 | 250,0.11947051435709,0.9583333333333334,0.8574074074074074 253 | 251,0.10811174660921097,0.9611111111111111,0.8601851851851852 254 | 252,0.11894954741001129,0.9546296296296296,0.8555555555555555 255 | 253,0.1239694356918335,0.95,0.8583333333333333 256 | 254,0.11295578628778458,0.9564814814814815,0.8638888888888889 257 | 255,0.11148275434970856,0.9564814814814815,0.8611111111111112 258 | 256,0.11252859234809875,0.9555555555555556,0.8592592592592593 259 | 257,0.12078098952770233,0.9592592592592593,0.8574074074074074 260 | 258,0.1151285395026207,0.9574074074074074,0.862037037037037 261 | 259,0.12634524703025818,0.9537037037037037,0.8611111111111112 262 | 260,0.11655959486961365,0.9555555555555556,0.850925925925926 263 | 261,0.13577596843242645,0.9444444444444444,0.8574074074074074 264 | 262,0.1486474871635437,0.9462962962962963,0.8453703703703703 265 | 263,0.11828653514385223,0.9583333333333334,0.825 266 | 264,0.14387039840221405,0.9416666666666667,0.8537037037037037 267 | 265,0.12236957997083664,0.9490740740740741,0.8611111111111112 268 | 266,0.12390819191932678,0.9490740740740741,0.8481481481481481 269 | 267,0.11171966791152954,0.9601851851851851,0.8583333333333333 270 | 268,0.10369236767292023,0.9685185185185186,0.8583333333333333 271 | 269,0.11104133725166321,0.9574074074074074,0.8629629629629629 272 | 270,0.10792431980371475,0.9629629629629629,0.8601851851851852 273 | 271,0.11497913300991058,0.950925925925926,0.8601851851851852 274 | 272,0.10331190377473831,0.962037037037037,0.8555555555555555 275 | 273,0.10956704616546631,0.9537037037037037,0.8592592592592593 276 | 274,0.10581035166978836,0.9601851851851851,0.8657407407407407 277 | 275,0.11058305948972702,0.962037037037037,0.8629629629629629 278 | 276,0.12393013387918472,0.9472222222222222,0.8648148148148148 279 | 277,0.11075787991285324,0.962037037037037,0.862037037037037 280 | 278,0.11191993951797485,0.9583333333333334,0.8611111111111112 281 | 279,0.09696844220161438,0.9638888888888889,0.8629629629629629 282 | 280,0.10125195235013962,0.9657407407407408,0.862037037037037 283 | 281,0.10839902609586716,0.9574074074074074,0.8638888888888889 284 | 282,0.11623220145702362,0.9555555555555556,0.862037037037037 285 | 283,0.10903124511241913,0.9592592592592593,0.8555555555555555 286 | 284,0.1155661940574646,0.95,0.862037037037037 287 | 285,0.10447345674037933,0.9629629629629629,0.8546296296296296 288 | 286,0.10202465951442719,0.9638888888888889,0.8546296296296296 289 | 287,0.10651734471321106,0.9592592592592593,0.8537037037037037 290 | 288,0.1001584380865097,0.9601851851851851,0.8592592592592593 291 | 289,0.10718374699354172,0.962037037037037,0.862037037037037 292 | 290,0.11392603814601898,0.9574074074074074,0.862037037037037 293 | 291,0.10715892910957336,0.962037037037037,0.8592592592592593 294 | 292,0.09233848750591278,0.9638888888888889,0.8537037037037037 295 | 293,0.10934431105852127,0.9555555555555556,0.8629629629629629 296 | 294,0.08928696066141129,0.9731481481481481,0.8648148148148148 297 | 295,0.09578872472047806,0.9712962962962963,0.8601851851851852 298 | 296,0.10780899226665497,0.9574074074074074,0.8601851851851852 299 | 297,0.09921777248382568,0.9638888888888889,0.8518518518518519 300 | 298,0.09813589602708817,0.9583333333333334,0.8592592592592593 301 | 299,0.1135319396853447,0.9537037037037037,0.8601851851851852 302 | 300,0.08778584748506546,0.9712962962962963,0.8601851851851852 303 | 301,0.11364657431840897,0.9601851851851851,0.8546296296296296 304 | 302,0.11544331908226013,0.9574074074074074,0.8592592592592593 305 | 303,0.1050947979092598,0.9537037037037037,0.8518518518518519 306 | 304,0.11820502579212189,0.9462962962962963,0.8574074074074074 307 | 305,0.09840027242898941,0.9666666666666667,0.8611111111111112 308 | 306,0.09213822335004807,0.9648148148148148,0.8648148148148148 309 | 307,0.09399846941232681,0.9611111111111111,0.8629629629629629 310 | 308,0.10531751811504364,0.9564814814814815,0.8601851851851852 311 | 309,0.0926426351070404,0.9657407407407408,0.8611111111111112 312 | 310,0.0879606157541275,0.9712962962962963,0.8601851851851852 313 | 311,0.08717894554138184,0.9712962962962963,0.8592592592592593 314 | 312,0.09881841391324997,0.9648148148148148,0.8555555555555555 315 | 313,0.09497888386249542,0.9685185185185186,0.8527777777777777 316 | 314,0.08671785891056061,0.9648148148148148,0.8546296296296296 317 | 315,0.09105182439088821,0.9638888888888889,0.8546296296296296 318 | 316,0.08913447707891464,0.9675925925925926,0.8638888888888889 319 | 317,0.08729175478219986,0.9638888888888889,0.8648148148148148 320 | 318,0.10163694620132446,0.9574074074074074,0.8574074074074074 321 | 319,0.08744069188833237,0.9694444444444444,0.8555555555555555 322 | 320,0.09348853677511215,0.9666666666666667,0.8574074074074074 323 | 321,0.09988346695899963,0.9657407407407408,0.8490740740740741 324 | 322,0.09904851764440536,0.962037037037037,0.8601851851851852 325 | 323,0.09388132393360138,0.9648148148148148,0.8601851851851852 326 | 324,0.11436185985803604,0.9546296296296296,0.8574074074074074 327 | 325,0.09430616348981857,0.9694444444444444,0.8564814814814815 328 | 326,0.08861396461725235,0.9675925925925926,0.85 329 | 327,0.10938730835914612,0.950925925925926,0.850925925925926 330 | 328,0.09560190141201019,0.9629629629629629,0.850925925925926 331 | 329,0.08449885994195938,0.9685185185185186,0.8564814814814815 332 | 330,0.10265357047319412,0.9574074074074074,0.8546296296296296 333 | 331,0.09584697335958481,0.9583333333333334,0.8574074074074074 334 | 332,0.07755979150533676,0.9712962962962963,0.8555555555555555 335 | 333,0.07730432599782944,0.9740740740740741,0.8583333333333333 336 | 334,0.10956286638975143,0.9592592592592593,0.850925925925926 337 | 335,0.1040457934141159,0.9611111111111111,0.8574074074074074 338 | 336,0.0960693210363388,0.9675925925925926,0.8574074074074074 339 | 337,0.09487437456846237,0.9611111111111111,0.8638888888888889 340 | 338,0.10401710867881775,0.962037037037037,0.8601851851851852 341 | 339,0.08102522790431976,0.9740740740740741,0.8611111111111112 342 | 340,0.10003668069839478,0.9629629629629629,0.8564814814814815 343 | 341,0.09084171801805496,0.9638888888888889,0.8574074074074074 344 | 342,0.0866101086139679,0.9685185185185186,0.8611111111111112 345 | 343,0.0714772492647171,0.9796296296296296,0.8592592592592593 346 | 344,0.09239647537469864,0.9638888888888889,0.8564814814814815 347 | 345,0.08721350133419037,0.9666666666666667,0.8629629629629629 348 | 346,0.10270453244447708,0.9657407407407408,0.8564814814814815 349 | 347,0.07847350835800171,0.9703703703703703,0.8601851851851852 350 | 348,0.0739135891199112,0.9731481481481481,0.862037037037037 351 | 349,0.08665748685598373,0.9648148148148148,0.8611111111111112 352 | 350,0.08045829832553864,0.9712962962962963,0.8592592592592593 353 | 351,0.08703937381505966,0.9685185185185186,0.8555555555555555 354 | 352,0.08072689175605774,0.9703703703703703,0.8546296296296296 355 | 353,0.08864853531122208,0.9648148148148148,0.8574074074074074 356 | 354,0.09450573474168777,0.9638888888888889,0.8555555555555555 357 | 355,0.07309373468160629,0.9777777777777777,0.8638888888888889 358 | 356,0.09689939767122269,0.9629629629629629,0.8592592592592593 359 | 357,0.10376712679862976,0.9592592592592593,0.8629629629629629 360 | 358,0.08714128285646439,0.9694444444444444,0.8611111111111112 361 | 359,0.09291383624076843,0.9666666666666667,0.8527777777777777 362 | 360,0.08908567577600479,0.9657407407407408,0.85 363 | 361,0.0956483706831932,0.9601851851851851,0.8537037037037037 364 | 362,0.0725596621632576,0.9712962962962963,0.8629629629629629 365 | 363,0.07934223860502243,0.9666666666666667,0.8583333333333333 366 | 364,0.08058546483516693,0.9796296296296296,0.8574074074074074 367 | 365,0.08523619174957275,0.9675925925925926,0.8555555555555555 368 | 366,0.08322294056415558,0.975925925925926,0.8583333333333333 369 | 367,0.08192512392997742,0.9703703703703703,0.8583333333333333 370 | 368,0.08762618899345398,0.9685185185185186,0.8592592592592593 371 | 369,0.0824311226606369,0.9666666666666667,0.8574074074074074 372 | 370,0.08435769379138947,0.9666666666666667,0.8564814814814815 373 | 371,0.08995691686868668,0.9675925925925926,0.8564814814814815 374 | 372,0.09385976195335388,0.9629629629629629,0.8574074074074074 375 | 373,0.08942274004220963,0.9675925925925926,0.8537037037037037 376 | 374,0.07829640805721283,0.9712962962962963,0.8592592592592593 377 | 375,0.09324268996715546,0.9648148148148148,0.8592592592592593 378 | 376,0.07565678656101227,0.9722222222222222,0.8592592592592593 379 | 377,0.08007402718067169,0.9657407407407408,0.8629629629629629 380 | 378,0.08222687244415283,0.9722222222222222,0.8666666666666667 381 | 379,0.09413562715053558,0.9629629629629629,0.8564814814814815 382 | 380,0.08359430730342865,0.9694444444444444,0.8546296296296296 383 | 381,0.08807714283466339,0.9648148148148148,0.8666666666666667 384 | 382,0.08210913836956024,0.9722222222222222,0.8592592592592593 385 | 383,0.07931598275899887,0.9703703703703703,0.8601851851851852 386 | 384,0.09638742357492447,0.9592592592592593,0.8527777777777777 387 | 385,0.09048784524202347,0.9675925925925926,0.8564814814814815 388 | 386,0.07668060064315796,0.9694444444444444,0.8527777777777777 389 | 387,0.09196801483631134,0.9611111111111111,0.8564814814814815 390 | 388,0.07711032778024673,0.9722222222222222,0.8518518518518519 391 | 389,0.07972012460231781,0.9712962962962963,0.8592592592592593 392 | 390,0.06670508533716202,0.9787037037037037,0.8583333333333333 393 | 391,0.09070730209350586,0.9638888888888889,0.862037037037037 394 | 392,0.09297041594982147,0.9703703703703703,0.8583333333333333 395 | 393,0.0786428228020668,0.9703703703703703,0.862037037037037 396 | 394,0.07235212624073029,0.9731481481481481,0.8648148148148148 397 | 395,0.07398460060358047,0.9712962962962963,0.8638888888888889 398 | 396,0.08225191384553909,0.9685185185185186,0.8611111111111112 399 | 397,0.07298481464385986,0.9731481481481481,0.8648148148148148 400 | 398,0.07020103186368942,0.9703703703703703,0.8666666666666667 401 | 399,0.1003236472606659,0.9583333333333334,0.862037037037037 402 | 400,0.07690399885177612,0.9740740740740741,0.8611111111111112 403 | 401,0.0822196900844574,0.9740740740740741,0.8629629629629629 404 | 402,0.07114826887845993,0.9777777777777777,0.8611111111111112 405 | 403,0.07007066160440445,0.9777777777777777,0.8546296296296296 406 | 404,0.07435408979654312,0.9703703703703703,0.8629629629629629 407 | 405,0.08627597987651825,0.9703703703703703,0.8583333333333333 408 | 406,0.07776538282632828,0.9740740740740741,0.8601851851851852 409 | 407,0.07948649674654007,0.9731481481481481,0.862037037037037 410 | 408,0.07821803539991379,0.9694444444444444,0.85 411 | 409,0.07119747251272202,0.975925925925926,0.8564814814814815 412 | 410,0.07849996536970139,0.9694444444444444,0.8638888888888889 413 | 411,0.07725847512483597,0.9703703703703703,0.8601851851851852 414 | 412,0.07046229392290115,0.9833333333333333,0.8564814814814815 415 | 413,0.07648202776908875,0.9712962962962963,0.8574074074074074 416 | 414,0.06914912909269333,0.9777777777777777,0.8583333333333333 417 | 415,0.07078932225704193,0.9731481481481481,0.8638888888888889 418 | 416,0.08095209300518036,0.9722222222222222,0.8666666666666667 419 | 417,0.09749460965394974,0.9611111111111111,0.8564814814814815 420 | 418,0.07480394840240479,0.9740740740740741,0.8574074074074074 421 | 419,0.0725700780749321,0.9731481481481481,0.8657407407407407 422 | 420,0.09639619290828705,0.9611111111111111,0.8629629629629629 423 | 421,0.06188930943608284,0.9833333333333333,0.8564814814814815 424 | 422,0.07664652168750763,0.9777777777777777,0.8629629629629629 425 | 423,0.07888577878475189,0.975,0.862037037037037 426 | 424,0.07459814846515656,0.9740740740740741,0.8638888888888889 427 | 425,0.08596612513065338,0.9712962962962963,0.8583333333333333 428 | 426,0.07219600677490234,0.975925925925926,0.8574074074074074 429 | 427,0.06558022648096085,0.9842592592592593,0.8574074074074074 430 | 428,0.07435883581638336,0.9685185185185186,0.8592592592592593 431 | 429,0.07230570912361145,0.975925925925926,0.8611111111111112 432 | 430,0.07063811272382736,0.9703703703703703,0.8611111111111112 433 | 431,0.07160867750644684,0.9787037037037037,0.8574074074074074 434 | 432,0.07565641403198242,0.9731481481481481,0.8583333333333333 435 | 433,0.0633038878440857,0.9777777777777777,0.8555555555555555 436 | 434,0.08687080442905426,0.9666666666666667,0.8537037037037037 437 | 435,0.08516650646924973,0.9657407407407408,0.8583333333333333 438 | 436,0.07029548287391663,0.9703703703703703,0.8564814814814815 439 | 437,0.07016608119010925,0.9740740740740741,0.8601851851851852 440 | 438,0.09109492599964142,0.9638888888888889,0.862037037037037 441 | 439,0.06557968258857727,0.975925925925926,0.8657407407407407 442 | 440,0.07235276699066162,0.9731481481481481,0.8583333333333333 443 | 441,0.07209056615829468,0.9777777777777777,0.8583333333333333 444 | 442,0.08334317058324814,0.9648148148148148,0.8527777777777777 445 | 443,0.07699482887983322,0.9694444444444444,0.8574074074074074 446 | 444,0.06466782838106155,0.975,0.8546296296296296 447 | 445,0.06783969700336456,0.9740740740740741,0.8629629629629629 448 | 446,0.06887686997652054,0.9814814814814815,0.8638888888888889 449 | 447,0.07107479125261307,0.9787037037037037,0.8629629629629629 450 | 448,0.06034103035926819,0.9796296296296296,0.8555555555555555 451 | 449,0.08790556341409683,0.9731481481481481,0.862037037037037 452 | 450,0.08074399083852768,0.9666666666666667,0.8638888888888889 453 | 451,0.08245115727186203,0.975925925925926,0.8666666666666667 454 | 452,0.07955877482891083,0.9685185185185186,0.8611111111111112 455 | 453,0.0890486016869545,0.9685185185185186,0.8518518518518519 456 | 454,0.05719204619526863,0.9842592592592593,0.8481481481481481 457 | 455,0.07585056871175766,0.9731481481481481,0.8583333333333333 458 | 456,0.07737888395786285,0.9703703703703703,0.8574074074074074 459 | 457,0.0727868378162384,0.9777777777777777,0.862037037037037 460 | 458,0.06545800715684891,0.9731481481481481,0.8611111111111112 461 | 459,0.05672936141490936,0.9787037037037037,0.862037037037037 462 | 460,0.06570231169462204,0.9731481481481481,0.8601851851851852 463 | 461,0.07520895451307297,0.9777777777777777,0.8555555555555555 464 | 462,0.0635601133108139,0.975,0.862037037037037 465 | 463,0.06577102094888687,0.975,0.862037037037037 466 | 464,0.07713451981544495,0.9694444444444444,0.8592592592592593 467 | 465,0.057603102177381516,0.9805555555555555,0.8629629629629629 468 | 466,0.058820486068725586,0.9833333333333333,0.8611111111111112 469 | 467,0.06002100557088852,0.9731481481481481,0.8648148148148148 470 | 468,0.06571737676858902,0.9731481481481481,0.862037037037037 471 | 469,0.0686396136879921,0.975925925925926,0.8564814814814815 472 | 470,0.06536120176315308,0.9796296296296296,0.8657407407407407 473 | 471,0.07520414143800735,0.9657407407407408,0.8685185185185185 474 | 472,0.06476762890815735,0.9768518518518519,0.8490740740740741 475 | 473,0.07430041581392288,0.975925925925926,0.8518518518518519 476 | 474,0.06997513771057129,0.9740740740740741,0.8601851851851852 477 | 475,0.061671242117881775,0.9814814814814815,0.8685185185185185 478 | 476,0.0684516578912735,0.975,0.8657407407407407 479 | 477,0.06126883625984192,0.9777777777777777,0.862037037037037 480 | 478,0.07809513062238693,0.9712962962962963,0.8611111111111112 481 | 479,0.06736984848976135,0.9777777777777777,0.8611111111111112 482 | 480,0.060704704374074936,0.9796296296296296,0.8574074074074074 483 | 481,0.07072954624891281,0.9712962962962963,0.862037037037037 484 | 482,0.06019170582294464,0.9777777777777777,0.8601851851851852 485 | 483,0.05587910860776901,0.9796296296296296,0.8574074074074074 486 | 484,0.06412046402692795,0.975925925925926,0.8546296296296296 487 | 485,0.08078484237194061,0.975,0.8611111111111112 488 | 486,0.06772089004516602,0.975925925925926,0.8592592592592593 489 | 487,0.06460190564393997,0.9805555555555555,0.8611111111111112 490 | 488,0.0656619593501091,0.9768518518518519,0.8564814814814815 491 | 489,0.06325360387563705,0.9740740740740741,0.8648148148148148 492 | 490,0.0747017040848732,0.9694444444444444,0.8564814814814815 493 | 491,0.06317641586065292,0.975,0.8537037037037037 494 | 492,0.08065050095319748,0.9712962962962963,0.8611111111111112 495 | 493,0.06726828962564468,0.975925925925926,0.8527777777777777 496 | 494,0.06161954626441002,0.9777777777777777,0.8564814814814815 497 | 495,0.06664922088384628,0.9768518518518519,0.8648148148148148 498 | 496,0.08651863038539886,0.9629629629629629,0.8638888888888889 499 | 497,0.06705544888973236,0.975,0.8648148148148148 500 | 498,0.07064240425825119,0.975,0.8592592592592593 501 | 499,0.05969282612204552,0.9777777777777777,0.8601851851851852 502 | 500,0.05750519037246704,0.9805555555555555,0.862037037037037 503 | 501,0.06555831432342529,0.9777777777777777,0.8601851851851852 504 | 502,0.055001597851514816,0.9824074074074074,0.8564814814814815 505 | 503,0.07065097987651825,0.9731481481481481,0.8527777777777777 506 | 504,0.057271990925073624,0.9805555555555555,0.8518518518518519 507 | 505,0.05691845715045929,0.9777777777777777,0.8527777777777777 508 | 506,0.06422417610883713,0.9796296296296296,0.8564814814814815 509 | 507,0.05561039596796036,0.9796296296296296,0.8611111111111112 510 | 508,0.06196587160229683,0.9787037037037037,0.8574074074074074 511 | 509,0.060922276228666306,0.9824074074074074,0.8564814814814815 512 | 510,0.06390323489904404,0.9796296296296296,0.8611111111111112 513 | 511,0.05348202958703041,0.9805555555555555,0.8574074074074074 514 | 512,0.07017120718955994,0.9722222222222222,0.8629629629629629 515 | 513,0.04910425469279289,0.9851851851851852,0.8638888888888889 516 | 514,0.07451395690441132,0.9712962962962963,0.8546296296296296 517 | 515,0.06297845393419266,0.9824074074074074,0.8592592592592593 518 | 516,0.086659274995327,0.9675925925925926,0.8574074074074074 519 | 517,0.08146549761295319,0.9648148148148148,0.8638888888888889 520 | 518,0.0657808780670166,0.9796296296296296,0.8527777777777777 521 | 519,0.06627773493528366,0.9777777777777777,0.8555555555555555 522 | 520,0.07160575687885284,0.975925925925926,0.8546296296296296 523 | 521,0.06580740213394165,0.9731481481481481,0.8555555555555555 524 | 522,0.06206270307302475,0.9796296296296296,0.8611111111111112 525 | 523,0.06035526469349861,0.9824074074074074,0.8537037037037037 526 | 524,0.056754983961582184,0.9787037037037037,0.862037037037037 527 | 525,0.05894530937075615,0.9814814814814815,0.8629629629629629 528 | 526,0.05906198173761368,0.9740740740740741,0.8638888888888889 529 | 527,0.05390932038426399,0.9851851851851852,0.8564814814814815 530 | 528,0.06607576459646225,0.9777777777777777,0.8629629629629629 531 | 529,0.05572834983468056,0.9833333333333333,0.8555555555555555 532 | 530,0.0563749261200428,0.9805555555555555,0.8638888888888889 533 | 531,0.060086559504270554,0.975925925925926,0.8601851851851852 534 | 532,0.05108845233917236,0.9787037037037037,0.8611111111111112 535 | 533,0.04557536914944649,0.9879629629629629,0.8601851851851852 536 | 534,0.05168617516756058,0.9833333333333333,0.8555555555555555 537 | 535,0.08113662153482437,0.9666666666666667,0.8555555555555555 538 | 536,0.0681246817111969,0.9731481481481481,0.850925925925926 539 | 537,0.0634758323431015,0.9787037037037037,0.8537037037037037 540 | 538,0.048201050609350204,0.9916666666666667,0.8574074074074074 541 | 539,0.05642363056540489,0.9787037037037037,0.8574074074074074 542 | 540,0.05948761850595474,0.9814814814814815,0.8592592592592593 543 | 541,0.06083996966481209,0.9768518518518519,0.8574074074074074 544 | 542,0.06779327988624573,0.975,0.8546296296296296 545 | 543,0.05871220678091049,0.9796296296296296,0.8592592592592593 546 | 544,0.058280099183321,0.9777777777777777,0.8675925925925926 547 | 545,0.06862642616033554,0.9694444444444444,0.8583333333333333 548 | 546,0.0625021681189537,0.9777777777777777,0.8648148148148148 549 | 547,0.06328526139259338,0.975925925925926,0.8601851851851852 550 | 548,0.06903477758169174,0.975,0.8592592592592593 551 | 549,0.055243972688913345,0.9814814814814815,0.862037037037037 552 | 550,0.06377283483743668,0.975,0.8648148148148148 553 | 551,0.05092623457312584,0.9777777777777777,0.8657407407407407 554 | 552,0.07323973625898361,0.9731481481481481,0.8601851851851852 555 | 553,0.06695317476987839,0.9787037037037037,0.8574074074074074 556 | 554,0.05169624090194702,0.9824074074074074,0.8629629629629629 557 | 555,0.062240149825811386,0.9768518518518519,0.8666666666666667 558 | 556,0.057912275195121765,0.9805555555555555,0.8611111111111112 559 | 557,0.050995685160160065,0.9851851851851852,0.8490740740740741 560 | 558,0.05609634146094322,0.9851851851851852,0.8611111111111112 561 | 559,0.059983111917972565,0.9777777777777777,0.8583333333333333 562 | 560,0.057224906980991364,0.9814814814814815,0.8657407407407407 563 | 561,0.05757426470518112,0.9805555555555555,0.8685185185185185 564 | 562,0.06417600810527802,0.9814814814814815,0.8648148148148148 565 | 563,0.05777215212583542,0.9787037037037037,0.8592592592592593 566 | 564,0.0768127292394638,0.9740740740740741,0.8583333333333333 567 | 565,0.052620552480220795,0.9777777777777777,0.8638888888888889 568 | 566,0.06483504921197891,0.9777777777777777,0.8611111111111112 569 | 567,0.04502205550670624,0.9879629629629629,0.8629629629629629 570 | 568,0.0618838407099247,0.9777777777777777,0.8611111111111112 571 | 569,0.05733932554721832,0.9796296296296296,0.8629629629629629 572 | 570,0.06056288257241249,0.9722222222222222,0.862037037037037 573 | 571,0.04925068840384483,0.9851851851851852,0.8666666666666667 574 | 572,0.05403102934360504,0.9814814814814815,0.8574074074074074 575 | 573,0.05848945304751396,0.9805555555555555,0.8601851851851852 576 | 574,0.0434517040848732,0.9861111111111112,0.862037037037037 577 | 575,0.049659863114356995,0.9879629629629629,0.8675925925925926 578 | 576,0.05129554495215416,0.9787037037037037,0.8629629629629629 579 | 577,0.05194801464676857,0.9833333333333333,0.8592592592592593 580 | 578,0.06047441065311432,0.9777777777777777,0.8648148148148148 581 | 579,0.060768771916627884,0.9805555555555555,0.8629629629629629 582 | 580,0.05945796146988869,0.9796296296296296,0.8694444444444445 583 | 581,0.05322476103901863,0.9796296296296296,0.8638888888888889 584 | 582,0.0648767352104187,0.9777777777777777,0.8592592592592593 585 | 583,0.057660870254039764,0.9796296296296296,0.8629629629629629 586 | 584,0.055841896682977676,0.9814814814814815,0.8638888888888889 587 | 585,0.050276052206754684,0.9833333333333333,0.8657407407407407 588 | 586,0.05788541957736015,0.9814814814814815,0.8527777777777777 589 | 587,0.0603993721306324,0.9731481481481481,0.8648148148148148 590 | 588,0.05780067294836044,0.9824074074074074,0.8583333333333333 591 | 589,0.05583375692367554,0.9768518518518519,0.862037037037037 592 | 590,0.05299415811896324,0.9814814814814815,0.8666666666666667 593 | 591,0.05900511145591736,0.9796296296296296,0.8601851851851852 594 | 592,0.0686308816075325,0.9740740740740741,0.8592592592592593 595 | 593,0.05415398254990578,0.9814814814814815,0.862037037037037 596 | 594,0.05625756084918976,0.9805555555555555,0.8657407407407407 597 | 595,0.047346848994493484,0.9842592592592593,0.862037037037037 598 | 596,0.057353291660547256,0.9796296296296296,0.8611111111111112 599 | 597,0.04529428109526634,0.9805555555555555,0.8611111111111112 600 | 598,0.0544745959341526,0.9879629629629629,0.8638888888888889 601 | 599,0.0709248036146164,0.975,0.8638888888888889 602 | 600,0.04525085911154747,0.987037037037037,0.8611111111111112 603 | 601,0.05481597036123276,0.9814814814814815,0.8592592592592593 604 | 602,0.0588061697781086,0.9805555555555555,0.8592592592592593 605 | 603,0.05734889954328537,0.9777777777777777,0.8555555555555555 606 | 604,0.04509403184056282,0.987037037037037,0.8527777777777777 607 | 605,0.044713735580444336,0.9861111111111112,0.8472222222222222 608 | 606,0.049151863902807236,0.9879629629629629,0.8629629629629629 609 | 607,0.04627925902605057,0.9833333333333333,0.862037037037037 610 | 608,0.052648719400167465,0.9814814814814815,0.8648148148148148 611 | 609,0.05057733878493309,0.9824074074074074,0.8657407407407407 612 | 610,0.05817260220646858,0.9787037037037037,0.8518518518518519 613 | 611,0.05620439723134041,0.9796296296296296,0.862037037037037 614 | 612,0.05361032858490944,0.9833333333333333,0.8638888888888889 615 | 613,0.05367158353328705,0.9796296296296296,0.8638888888888889 616 | 614,0.04686807841062546,0.9824074074074074,0.8648148148148148 617 | 615,0.05754898488521576,0.9796296296296296,0.8583333333333333 618 | 616,0.054196421056985855,0.9824074074074074,0.8574074074074074 619 | 617,0.05604434758424759,0.9842592592592593,0.862037037037037 620 | 618,0.057191893458366394,0.9787037037037037,0.8657407407407407 621 | 619,0.052439212799072266,0.9824074074074074,0.8648148148148148 622 | 620,0.05445997789502144,0.9796296296296296,0.8648148148148148 623 | 621,0.07406198233366013,0.9787037037037037,0.8537037037037037 624 | 622,0.06558506935834885,0.9731481481481481,0.8638888888888889 625 | 623,0.06540263444185257,0.9805555555555555,0.862037037037037 626 | 624,0.05910342186689377,0.9768518518518519,0.8703703703703703 627 | 625,0.05296624079346657,0.9814814814814815,0.8555555555555555 628 | 626,0.049518875777721405,0.9796296296296296,0.8638888888888889 629 | 627,0.04917377233505249,0.9805555555555555,0.8703703703703703 630 | 628,0.05938076600432396,0.9805555555555555,0.8638888888888889 631 | 629,0.049216147512197495,0.9814814814814815,0.862037037037037 632 | 630,0.03818563371896744,0.987037037037037,0.8648148148148148 633 | 631,0.05210832878947258,0.9851851851851852,0.8574074074074074 634 | 632,0.04002687335014343,0.9916666666666667,0.862037037037037 635 | 633,0.056374721229076385,0.975925925925926,0.8657407407407407 636 | 634,0.055809855461120605,0.9768518518518519,0.862037037037037 637 | 635,0.05380857735872269,0.9814814814814815,0.8638888888888889 638 | 636,0.040657903999090195,0.9888888888888889,0.8574074074074074 639 | 637,0.05997834354639053,0.9787037037037037,0.8648148148148148 640 | 638,0.05673659220337868,0.9768518518518519,0.8611111111111112 641 | 639,0.04817114397883415,0.9814814814814815,0.8638888888888889 642 | 640,0.050144828855991364,0.9805555555555555,0.8546296296296296 643 | 641,0.06134054437279701,0.9814814814814815,0.8546296296296296 644 | 642,0.04541808366775513,0.9861111111111112,0.8527777777777777 645 | 643,0.05953512713313103,0.9833333333333333,0.8583333333333333 646 | 644,0.051492005586624146,0.9805555555555555,0.8583333333333333 647 | 645,0.05574837327003479,0.9842592592592593,0.862037037037037 648 | 646,0.05667430907487869,0.9805555555555555,0.8592592592592593 649 | 647,0.04574084281921387,0.9833333333333333,0.8518518518518519 650 | 648,0.055175330489873886,0.9805555555555555,0.8583333333333333 651 | 649,0.05455338582396507,0.9805555555555555,0.8601851851851852 652 | 650,0.049844298511743546,0.9861111111111112,0.862037037037037 653 | 651,0.04176216199994087,0.9879629629629629,0.8592592592592593 654 | 652,0.051767170429229736,0.9833333333333333,0.8592592592592593 655 | 653,0.06255272775888443,0.9740740740740741,0.8611111111111112 656 | 654,0.05545257031917572,0.9824074074074074,0.8583333333333333 657 | 655,0.05120215192437172,0.9824074074074074,0.8611111111111112 658 | 656,0.04497367516160011,0.9842592592592593,0.8583333333333333 659 | 657,0.06502020359039307,0.975925925925926,0.8629629629629629 660 | 658,0.0625997930765152,0.975,0.862037037037037 661 | 659,0.05471241474151611,0.9814814814814815,0.8592592592592593 662 | 660,0.046236928552389145,0.9851851851851852,0.8555555555555555 663 | 661,0.056474458426237106,0.975925925925926,0.8555555555555555 664 | 662,0.05663993954658508,0.975925925925926,0.8574074074074074 665 | 663,0.04214084893465042,0.9851851851851852,0.8574074074074074 666 | 664,0.040011730045080185,0.9907407407407407,0.8583333333333333 667 | 665,0.051747240126132965,0.987037037037037,0.8537037037037037 668 | 666,0.05798118934035301,0.975,0.8564814814814815 669 | 667,0.04807266965508461,0.9861111111111112,0.8611111111111112 670 | 668,0.04450666531920433,0.9851851851851852,0.8611111111111112 671 | 669,0.0575152151286602,0.9796296296296296,0.8638888888888889 672 | 670,0.045333683490753174,0.987037037037037,0.862037037037037 673 | 671,0.03572048619389534,0.9898148148148148,0.8592592592592593 674 | 672,0.03581805154681206,0.9898148148148148,0.8574074074074074 675 | 673,0.051447488367557526,0.9787037037037037,0.8527777777777777 676 | 674,0.049510106444358826,0.9861111111111112,0.8564814814814815 677 | 675,0.05814313888549805,0.9796296296296296,0.8574074074074074 678 | 676,0.05149722844362259,0.9805555555555555,0.8574074074074074 679 | 677,0.039260927587747574,0.9879629629629629,0.8583333333333333 680 | 678,0.0645260289311409,0.9787037037037037,0.8481481481481481 681 | 679,0.04158508777618408,0.9888888888888889,0.8555555555555555 682 | 680,0.046944934874773026,0.9824074074074074,0.8592592592592593 683 | 681,0.05548521503806114,0.9777777777777777,0.8601851851851852 684 | 682,0.05411051586270332,0.9814814814814815,0.862037037037037 685 | 683,0.05640500783920288,0.9805555555555555,0.8592592592592593 686 | 684,0.043932631611824036,0.9851851851851852,0.8685185185185185 687 | 685,0.038198068737983704,0.9879629629629629,0.8657407407407407 688 | 686,0.04444318637251854,0.9861111111111112,0.8648148148148148 689 | 687,0.04249778389930725,0.9925925925925926,0.8611111111111112 690 | 688,0.04972287267446518,0.9833333333333333,0.8611111111111112 691 | 689,0.04969647154211998,0.9796296296296296,0.8638888888888889 692 | 690,0.06475770473480225,0.9768518518518519,0.8648148148148148 693 | 691,0.04393203929066658,0.9861111111111112,0.8592592592592593 694 | 692,0.04613058269023895,0.9851851851851852,0.8583333333333333 695 | 693,0.043679751455783844,0.9861111111111112,0.8583333333333333 696 | 694,0.04041236266493797,0.9851851851851852,0.8638888888888889 697 | 695,0.039587609469890594,0.9879629629629629,0.8629629629629629 698 | 696,0.04103263095021248,0.9888888888888889,0.8629629629629629 699 | 697,0.04560917988419533,0.9907407407407407,0.862037037037037 700 | 698,0.039870042353868484,0.9879629629629629,0.8611111111111112 701 | 699,0.04895653575658798,0.9805555555555555,0.862037037037037 702 | -------------------------------------------------------------------------------- /history_csv/EEGNet_ReLU.csv: -------------------------------------------------------------------------------- 1 | ,loss,train_accuracy_history,test_accuracy_history 2 | 0,0.7242701053619385,0.4898148148148148,0.5657407407407408 3 | 1,0.7683119773864746,0.55,0.5 4 | 2,2.0760037899017334,0.5,0.5555555555555556 5 | 3,0.6413630843162537,0.6444444444444445,0.6074074074074074 6 | 4,0.6689133644104004,0.6194444444444445,0.6777777777777778 7 | 5,0.5421703457832336,0.7064814814814815,0.6805555555555556 8 | 6,0.5393694639205933,0.7203703703703703,0.674074074074074 9 | 7,0.5340166091918945,0.7342592592592593,0.6787037037037037 10 | 8,0.5112271904945374,0.7453703703703703,0.6953703703703704 11 | 9,0.5086097121238708,0.7268518518518519,0.7064814814814815 12 | 10,0.5085079073905945,0.737037037037037,0.712037037037037 13 | 11,0.49425801634788513,0.7611111111111111,0.7185185185185186 14 | 12,0.506977379322052,0.7277777777777777,0.7148148148148148 15 | 13,0.5054395794868469,0.7407407407407407,0.7194444444444444 16 | 14,0.4916456341743469,0.7694444444444445,0.725 17 | 15,0.4835962951183319,0.7574074074074074,0.7175925925925926 18 | 16,0.4799795150756836,0.7592592592592593,0.7268518518518519 19 | 17,0.47729384899139404,0.7685185185185185,0.7287037037037037 20 | 18,0.47290298342704773,0.7611111111111111,0.7305555555555555 21 | 19,0.47665145993232727,0.7712962962962963,0.7138888888888889 22 | 20,0.47334006428718567,0.7712962962962963,0.7222222222222222 23 | 21,0.461516797542572,0.7722222222222223,0.7314814814814815 24 | 22,0.46122077107429504,0.7722222222222223,0.7296296296296296 25 | 23,0.45780912041664124,0.7648148148148148,0.7314814814814815 26 | 24,0.46625256538391113,0.7592592592592593,0.7157407407407408 27 | 25,0.45729297399520874,0.7833333333333333,0.7268518518518519 28 | 26,0.4576893150806427,0.7787037037037037,0.725 29 | 27,0.46286460757255554,0.7842592592592592,0.7398148148148148 30 | 28,0.4512041509151459,0.7842592592592592,0.7277777777777777 31 | 29,0.4442245066165924,0.7916666666666666,0.7324074074074074 32 | 30,0.45330771803855896,0.7768518518518519,0.7416666666666667 33 | 31,0.4431035816669464,0.8009259259259259,0.7509259259259259 34 | 32,0.4434913694858551,0.7814814814814814,0.7296296296296296 35 | 33,0.44777947664260864,0.775,0.7379629629629629 36 | 34,0.4502260386943817,0.7888888888888889,0.7342592592592593 37 | 35,0.4357152283191681,0.7898148148148149,0.7444444444444445 38 | 36,0.43155547976493835,0.7888888888888889,0.7425925925925926 39 | 37,0.43617185950279236,0.8,0.7472222222222222 40 | 38,0.4274948537349701,0.8046296296296296,0.7481481481481481 41 | 39,0.43520545959472656,0.7907407407407407,0.7416666666666667 42 | 40,0.42965877056121826,0.8,0.7490740740740741 43 | 41,0.4251673221588135,0.8064814814814815,0.7416666666666667 44 | 42,0.4199344217777252,0.7972222222222223,0.75 45 | 43,0.42748335003852844,0.8009259259259259,0.7425925925925926 46 | 44,0.4233282804489136,0.8,0.75 47 | 45,0.4293104112148285,0.787962962962963,0.7527777777777778 48 | 46,0.41648629307746887,0.8037037037037037,0.7620370370370371 49 | 47,0.418689489364624,0.8027777777777778,0.7564814814814815 50 | 48,0.40954405069351196,0.8027777777777778,0.7666666666666667 51 | 49,0.3900645673274994,0.8314814814814815,0.7638888888888888 52 | 50,0.4028545320034027,0.8222222222222222,0.7620370370370371 53 | 51,0.39005619287490845,0.8314814814814815,0.7666666666666667 54 | 52,0.39453786611557007,0.8222222222222222,0.7648148148148148 55 | 53,0.38747015595436096,0.8351851851851851,0.7731481481481481 56 | 54,0.3957446217536926,0.8166666666666667,0.7675925925925926 57 | 55,0.40259718894958496,0.8175925925925925,0.7712962962962963 58 | 56,0.383501797914505,0.8351851851851851,0.774074074074074 59 | 57,0.3834306299686432,0.8277777777777777,0.7787037037037037 60 | 58,0.36832478642463684,0.837037037037037,0.7842592592592592 61 | 59,0.3815688192844391,0.8231481481481482,0.7861111111111111 62 | 60,0.37191712856292725,0.8398148148148148,0.7888888888888889 63 | 61,0.366870254278183,0.8398148148148148,0.7842592592592592 64 | 62,0.35464543104171753,0.8564814814814815,0.7814814814814814 65 | 63,0.36756619811058044,0.8287037037037037,0.7796296296296297 66 | 64,0.3994515836238861,0.8101851851851852,0.7518518518518519 67 | 65,0.40430036187171936,0.8009259259259259,0.7805555555555556 68 | 66,0.38604262471199036,0.8240740740740741,0.799074074074074 69 | 67,0.35485541820526123,0.8472222222222222,0.7953703703703704 70 | 68,0.3507339060306549,0.8407407407407408,0.7944444444444444 71 | 69,0.3481066823005676,0.8444444444444444,0.7981481481481482 72 | 70,0.3369613587856293,0.8574074074074074,0.7972222222222223 73 | 71,0.3424978256225586,0.8601851851851852,0.799074074074074 74 | 72,0.3314216732978821,0.8601851851851852,0.8027777777777778 75 | 73,0.34533295035362244,0.8472222222222222,0.8074074074074075 76 | 74,0.3310406506061554,0.8657407407407407,0.8101851851851852 77 | 75,0.3301054537296295,0.8583333333333333,0.8046296296296296 78 | 76,0.33081215620040894,0.862037037037037,0.8138888888888889 79 | 77,0.3481092154979706,0.8379629629629629,0.8055555555555556 80 | 78,0.33334383368492126,0.8685185185185185,0.8009259259259259 81 | 79,0.33576226234436035,0.8555555555555555,0.8111111111111111 82 | 80,0.3239854872226715,0.8611111111111112,0.8111111111111111 83 | 81,0.3325522541999817,0.8527777777777777,0.8009259259259259 84 | 82,0.3477676808834076,0.8425925925925926,0.8064814814814815 85 | 83,0.3349691331386566,0.8407407407407408,0.8 86 | 84,0.32283082604408264,0.8564814814814815,0.8083333333333333 87 | 85,0.3124289810657501,0.8768518518518519,0.8111111111111111 88 | 86,0.31622037291526794,0.8694444444444445,0.812037037037037 89 | 87,0.31144991517066956,0.8675925925925926,0.8138888888888889 90 | 88,0.30324798822402954,0.8731481481481481,0.8092592592592592 91 | 89,0.30958616733551025,0.8759259259259259,0.8203703703703704 92 | 90,0.29140734672546387,0.8898148148148148,0.8157407407407408 93 | 91,0.29717668890953064,0.875,0.8055555555555556 94 | 92,0.30252423882484436,0.8685185185185185,0.8037037037037037 95 | 93,0.29728373885154724,0.8740740740740741,0.8111111111111111 96 | 94,0.29830512404441833,0.8814814814814815,0.8212962962962963 97 | 95,0.2943286597728729,0.8703703703703703,0.8203703703703704 98 | 96,0.28692901134490967,0.8851851851851852,0.8212962962962963 99 | 97,0.2864960730075836,0.8787037037037037,0.8268518518518518 100 | 98,0.2928295135498047,0.8777777777777778,0.8240740740740741 101 | 99,0.2741495668888092,0.8824074074074074,0.8157407407407408 102 | 100,0.2870939373970032,0.875,0.8296296296296296 103 | 101,0.277905136346817,0.8814814814814815,0.8157407407407408 104 | 102,0.29164132475852966,0.8759259259259259,0.8231481481481482 105 | 103,0.30865728855133057,0.8518518518518519,0.8240740740740741 106 | 104,0.3161000609397888,0.8592592592592593,0.825925925925926 107 | 105,0.2975293695926666,0.8629629629629629,0.825925925925926 108 | 106,0.2759478986263275,0.8731481481481481,0.8287037037037037 109 | 107,0.2586618661880493,0.8962962962962963,0.8296296296296296 110 | 108,0.2789125144481659,0.8851851851851852,0.8314814814814815 111 | 109,0.28710755705833435,0.8740740740740741,0.8111111111111111 112 | 110,0.2688528001308441,0.8888888888888888,0.8287037037037037 113 | 111,0.2741178572177887,0.8851851851851852,0.7981481481481482 114 | 112,0.2718975841999054,0.8972222222222223,0.8342592592592593 115 | 113,0.26252079010009766,0.8861111111111111,0.825 116 | 114,0.2713572680950165,0.8925925925925926,0.8268518518518518 117 | 115,0.27285557985305786,0.8888888888888888,0.8305555555555556 118 | 116,0.26036903262138367,0.8870370370370371,0.8314814814814815 119 | 117,0.25668197870254517,0.8935185185185185,0.8342592592592593 120 | 118,0.24736253917217255,0.9046296296296297,0.8342592592592593 121 | 119,0.2544189989566803,0.9046296296296297,0.8277777777777777 122 | 120,0.2561197578907013,0.8907407407407407,0.8361111111111111 123 | 121,0.2544386684894562,0.899074074074074,0.812037037037037 124 | 122,0.2516731917858124,0.8925925925925926,0.837037037037037 125 | 123,0.23738214373588562,0.912962962962963,0.8333333333333334 126 | 124,0.2351929247379303,0.9083333333333333,0.8324074074074074 127 | 125,0.2531057596206665,0.8898148148148148,0.8277777777777777 128 | 126,0.2532484233379364,0.8935185185185185,0.8342592592592593 129 | 127,0.25653478503227234,0.8888888888888888,0.8351851851851851 130 | 128,0.24795113503932953,0.9018518518518519,0.8287037037037037 131 | 129,0.2471490055322647,0.9,0.837037037037037 132 | 130,0.24532604217529297,0.9027777777777778,0.8324074074074074 133 | 131,0.24109873175621033,0.899074074074074,0.8314814814814815 134 | 132,0.23097816109657288,0.9120370370370371,0.8314814814814815 135 | 133,0.23712338507175446,0.9046296296296297,0.8305555555555556 136 | 134,0.23455536365509033,0.9064814814814814,0.8305555555555556 137 | 135,0.23944538831710815,0.9046296296296297,0.8203703703703704 138 | 136,0.24200114607810974,0.9074074074074074,0.8083333333333333 139 | 137,0.2352231740951538,0.9027777777777778,0.8314814814814815 140 | 138,0.2378188818693161,0.9120370370370371,0.8379629629629629 141 | 139,0.23347479104995728,0.9055555555555556,0.8361111111111111 142 | 140,0.23954640328884125,0.9046296296296297,0.8305555555555556 143 | 141,0.22131012380123138,0.9157407407407407,0.8287037037037037 144 | 142,0.232986718416214,0.9092592592592592,0.837037037037037 145 | 143,0.23815898597240448,0.9064814814814814,0.837037037037037 146 | 144,0.23007720708847046,0.9046296296296297,0.8379629629629629 147 | 145,0.23568540811538696,0.9055555555555556,0.8324074074074074 148 | 146,0.2393442541360855,0.9046296296296297,0.8379629629629629 149 | 147,0.2317357063293457,0.9055555555555556,0.8324074074074074 150 | 148,0.22454415261745453,0.9046296296296297,0.8333333333333334 151 | 149,0.2130281776189804,0.9037037037037037,0.8388888888888889 152 | 150,0.22798867523670197,0.9055555555555556,0.8407407407407408 153 | 151,0.21767346560955048,0.9120370370370371,0.8398148148148148 154 | 152,0.2240350991487503,0.9101851851851852,0.8314814814814815 155 | 153,0.22774533927440643,0.9027777777777778,0.8416666666666667 156 | 154,0.21878725290298462,0.9185185185185185,0.8212962962962963 157 | 155,0.23165832459926605,0.9009259259259259,0.8314814814814815 158 | 156,0.22322607040405273,0.912962962962963,0.8212962962962963 159 | 157,0.22854764759540558,0.9,0.8277777777777777 160 | 158,0.21787315607070923,0.9101851851851852,0.8453703703703703 161 | 159,0.21911856532096863,0.9120370370370371,0.8462962962962963 162 | 160,0.19277137517929077,0.9287037037037037,0.8435185185185186 163 | 161,0.2058122605085373,0.9166666666666666,0.8435185185185186 164 | 162,0.20934177935123444,0.9138888888888889,0.8379629629629629 165 | 163,0.20496654510498047,0.9194444444444444,0.8462962962962963 166 | 164,0.19720396399497986,0.9231481481481482,0.8407407407407408 167 | 165,0.2102314829826355,0.9111111111111111,0.8453703703703703 168 | 166,0.20989833772182465,0.9111111111111111,0.8314814814814815 169 | 167,0.2175193876028061,0.9166666666666666,0.8222222222222222 170 | 168,0.21744385361671448,0.9166666666666666,0.7898148148148149 171 | 169,0.21625050902366638,0.9111111111111111,0.8157407407407408 172 | 170,0.2150181382894516,0.9157407407407407,0.8083333333333333 173 | 171,0.21884602308273315,0.9101851851851852,0.8453703703703703 174 | 172,0.21260276436805725,0.9120370370370371,0.8287037037037037 175 | 173,0.2087576985359192,0.9138888888888889,0.8416666666666667 176 | 174,0.20588476955890656,0.9185185185185185,0.8435185185185186 177 | 175,0.20597490668296814,0.9259259259259259,0.8305555555555556 178 | 176,0.2220941036939621,0.9166666666666666,0.8462962962962963 179 | 177,0.20993614196777344,0.9194444444444444,0.8398148148148148 180 | 178,0.2005496323108673,0.9212962962962963,0.8425925925925926 181 | 179,0.18859770894050598,0.9287037037037037,0.8361111111111111 182 | 180,0.20412231981754303,0.924074074074074,0.8277777777777777 183 | 181,0.18930815160274506,0.925,0.8231481481481482 184 | 182,0.18809056282043457,0.9287037037037037,0.8462962962962963 185 | 183,0.19300884008407593,0.9259259259259259,0.8435185185185186 186 | 184,0.20749661326408386,0.9203703703703704,0.8398148148148148 187 | 185,0.18927910923957825,0.9287037037037037,0.8527777777777777 188 | 186,0.1916225701570511,0.9222222222222223,0.8462962962962963 189 | 187,0.21453498303890228,0.9111111111111111,0.8416666666666667 190 | 188,0.2188885360956192,0.9037037037037037,0.8490740740740741 191 | 189,0.23531827330589294,0.9046296296296297,0.8416666666666667 192 | 190,0.18396306037902832,0.9324074074074075,0.8398148148148148 193 | 191,0.18573300540447235,0.9296296296296296,0.8453703703703703 194 | 192,0.197417750954628,0.9333333333333333,0.8351851851851851 195 | 193,0.19844329357147217,0.9212962962962963,0.8305555555555556 196 | 194,0.18491019308567047,0.925,0.8324074074074074 197 | 195,0.20667926967144012,0.9138888888888889,0.8453703703703703 198 | 196,0.18537017703056335,0.9277777777777778,0.8462962962962963 199 | 197,0.1811547875404358,0.9333333333333333,0.8472222222222222 200 | 198,0.1780945360660553,0.9314814814814815,0.8453703703703703 201 | 199,0.18994854390621185,0.9203703703703704,0.8324074074074074 202 | 200,0.1766653060913086,0.9333333333333333,0.8490740740740741 203 | 201,0.17800487577915192,0.9324074074074075,0.85 204 | 202,0.19079531729221344,0.924074074074074,0.8527777777777777 205 | 203,0.1881391704082489,0.9314814814814815,0.85 206 | 204,0.19107282161712646,0.9259259259259259,0.850925925925926 207 | 205,0.19704951345920563,0.9203703703703704,0.8537037037037037 208 | 206,0.1653699427843094,0.9416666666666667,0.8407407407407408 209 | 207,0.1965150535106659,0.9166666666666666,0.8407407407407408 210 | 208,0.18887801468372345,0.9212962962962963,0.8444444444444444 211 | 209,0.20309105515480042,0.9194444444444444,0.8490740740740741 212 | 210,0.1875709742307663,0.9259259259259259,0.85 213 | 211,0.2034614235162735,0.912962962962963,0.8379629629629629 214 | 212,0.19418491423130035,0.9212962962962963,0.8481481481481481 215 | 213,0.185178741812706,0.9287037037037037,0.85 216 | 214,0.17194116115570068,0.9324074074074075,0.8407407407407408 217 | 215,0.17239150404930115,0.9416666666666667,0.8490740740740741 218 | 216,0.18039990961551666,0.9333333333333333,0.8444444444444444 219 | 217,0.18017251789569855,0.9259259259259259,0.85 220 | 218,0.17254218459129333,0.9305555555555556,0.8518518518518519 221 | 219,0.17721915245056152,0.9277777777777778,0.8481481481481481 222 | 220,0.1781577467918396,0.9342592592592592,0.85 223 | 221,0.1682037115097046,0.9351851851851852,0.85 224 | 222,0.18228477239608765,0.925,0.8546296296296296 225 | 223,0.17310917377471924,0.9388888888888889,0.85 226 | 224,0.18505451083183289,0.9212962962962963,0.8537037037037037 227 | 225,0.1607399731874466,0.9416666666666667,0.8472222222222222 228 | 226,0.17773668467998505,0.9333333333333333,0.8490740740740741 229 | 227,0.1765921711921692,0.9314814814814815,0.8490740740740741 230 | 228,0.16600294411182404,0.9342592592592592,0.8564814814814815 231 | 229,0.1667608618736267,0.9342592592592592,0.8240740740740741 232 | 230,0.17506471276283264,0.937037037037037,0.8472222222222222 233 | 231,0.17040209472179413,0.9351851851851852,0.850925925925926 234 | 232,0.17257139086723328,0.9314814814814815,0.8546296296296296 235 | 233,0.17920100688934326,0.9268518518518518,0.8481481481481481 236 | 234,0.17626598477363586,0.9305555555555556,0.8564814814814815 237 | 235,0.16654126346111298,0.9342592592592592,0.85 238 | 236,0.167166605591774,0.937962962962963,0.8518518518518519 239 | 237,0.1715412437915802,0.9287037037037037,0.8518518518518519 240 | 238,0.1624038964509964,0.9425925925925925,0.8592592592592593 241 | 239,0.15986384451389313,0.9407407407407408,0.8527777777777777 242 | 240,0.155182346701622,0.9361111111111111,0.8351851851851851 243 | 241,0.16468524932861328,0.9342592592592592,0.8518518518518519 244 | 242,0.1727854609489441,0.937037037037037,0.8416666666666667 245 | 243,0.16240248084068298,0.9342592592592592,0.8481481481481481 246 | 244,0.17612163722515106,0.9277777777777778,0.8398148148148148 247 | 245,0.165974423289299,0.9398148148148148,0.8574074074074074 248 | 246,0.16396072506904602,0.9453703703703704,0.8416666666666667 249 | 247,0.16292332112789154,0.9398148148148148,0.8592592592592593 250 | 248,0.16922369599342346,0.9314814814814815,0.8425925925925926 251 | 249,0.1597066968679428,0.9425925925925925,0.8564814814814815 252 | 250,0.15511249005794525,0.9462962962962963,0.8574074074074074 253 | 251,0.15420444309711456,0.9407407407407408,0.8546296296296296 254 | 252,0.1564275175333023,0.9398148148148148,0.850925925925926 255 | 253,0.16456153988838196,0.9361111111111111,0.85 256 | 254,0.1583629697561264,0.9388888888888889,0.8564814814814815 257 | 255,0.1726319044828415,0.9222222222222223,0.8555555555555555 258 | 256,0.16598767042160034,0.9351851851851852,0.8481481481481481 259 | 257,0.17545902729034424,0.9398148148148148,0.8453703703703703 260 | 258,0.15624544024467468,0.9388888888888889,0.8194444444444444 261 | 259,0.1848399043083191,0.9166666666666666,0.8101851851851852 262 | 260,0.19912350177764893,0.9212962962962963,0.8009259259259259 263 | 261,0.20191724598407745,0.9185185185185185,0.8453703703703703 264 | 262,0.1848081797361374,0.9416666666666667,0.8398148148148148 265 | 263,0.14771127700805664,0.9435185185185185,0.825 266 | 264,0.15319658815860748,0.937962962962963,0.8546296296296296 267 | 265,0.1522553265094757,0.9444444444444444,0.8564814814814815 268 | 266,0.16808919608592987,0.9388888888888889,0.8583333333333333 269 | 267,0.14255701005458832,0.9537037037037037,0.8546296296296296 270 | 268,0.15172943472862244,0.9462962962962963,0.8574074074074074 271 | 269,0.1584128737449646,0.937037037037037,0.8564814814814815 272 | 270,0.14726559817790985,0.9444444444444444,0.8537037037037037 273 | 271,0.1683364063501358,0.9342592592592592,0.8546296296296296 274 | 272,0.14865688979625702,0.9453703703703704,0.8537037037037037 275 | 273,0.1426052749156952,0.9407407407407408,0.8592592592592593 276 | 274,0.14366662502288818,0.9490740740740741,0.8592592592592593 277 | 275,0.15645837783813477,0.9425925925925925,0.8583333333333333 278 | 276,0.14698803424835205,0.9435185185185185,0.8574074074074074 279 | 277,0.15961667895317078,0.9453703703703704,0.8601851851851852 280 | 278,0.15874797105789185,0.9342592592592592,0.8490740740740741 281 | 279,0.15057389438152313,0.9453703703703704,0.850925925925926 282 | 280,0.1507430523633957,0.9407407407407408,0.8583333333333333 283 | 281,0.15310607850551605,0.9398148148148148,0.8518518518518519 284 | 282,0.1385527104139328,0.9462962962962963,0.8629629629629629 285 | 283,0.14320358633995056,0.95,0.8574074074074074 286 | 284,0.15240508317947388,0.9351851851851852,0.8583333333333333 287 | 285,0.13493762910366058,0.9518518518518518,0.8546296296296296 288 | 286,0.14310584962368011,0.9453703703703704,0.8574074074074074 289 | 287,0.13836300373077393,0.9416666666666667,0.8564814814814815 290 | 288,0.1329374760389328,0.9537037037037037,0.8546296296296296 291 | 289,0.14988984167575836,0.9472222222222222,0.8574074074074074 292 | 290,0.16850890219211578,0.9268518518518518,0.8518518518518519 293 | 291,0.16260406374931335,0.9425925925925925,0.8240740740740741 294 | 292,0.1527482271194458,0.9435185185185185,0.8564814814814815 295 | 293,0.15793712437152863,0.9333333333333333,0.8546296296296296 296 | 294,0.13270600140094757,0.9583333333333334,0.8611111111111112 297 | 295,0.14179672300815582,0.9472222222222222,0.8601851851851852 298 | 296,0.14095890522003174,0.9490740740740741,0.8518518518518519 299 | 297,0.14417582750320435,0.9407407407407408,0.8398148148148148 300 | 298,0.1309247463941574,0.9527777777777777,0.8574074074074074 301 | 299,0.1501646339893341,0.9416666666666667,0.8601851851851852 302 | 300,0.13693849742412567,0.95,0.8601851851851852 303 | 301,0.16273446381092072,0.937037037037037,0.8518518518518519 304 | 302,0.14454813301563263,0.95,0.8425925925925926 305 | 303,0.13727176189422607,0.9444444444444444,0.8611111111111112 306 | 304,0.1400345414876938,0.95,0.8537037037037037 307 | 305,0.1423521190881729,0.9490740740740741,0.8444444444444444 308 | 306,0.14938272535800934,0.9518518518518518,0.850925925925926 309 | 307,0.13862977921962738,0.9490740740740741,0.8379629629629629 310 | 308,0.15224118530750275,0.9398148148148148,0.8527777777777777 311 | 309,0.1449757069349289,0.9435185185185185,0.8537037037037037 312 | 310,0.13127708435058594,0.950925925925926,0.8611111111111112 313 | 311,0.13426868617534637,0.95,0.8592592592592593 314 | 312,0.1325986236333847,0.9537037037037037,0.8435185185185186 315 | 313,0.13009850680828094,0.950925925925926,0.8555555555555555 316 | 314,0.13911613821983337,0.9518518518518518,0.8537037037037037 317 | 315,0.14629481732845306,0.9351851851851852,0.8490740740740741 318 | 316,0.15619193017482758,0.9388888888888889,0.8407407407407408 319 | 317,0.15414755046367645,0.9425925925925925,0.8472222222222222 320 | 318,0.13549835979938507,0.9462962962962963,0.8453703703703703 321 | 319,0.14359743893146515,0.950925925925926,0.8564814814814815 322 | 320,0.1394113451242447,0.9462962962962963,0.8379629629629629 323 | 321,0.14155001938343048,0.9462962962962963,0.8648148148148148 324 | 322,0.13717623054981232,0.9490740740740741,0.85 325 | 323,0.14195895195007324,0.9546296296296296,0.850925925925926 326 | 324,0.14484654366970062,0.9407407407407408,0.8555555555555555 327 | 325,0.14951951801776886,0.9462962962962963,0.850925925925926 328 | 326,0.13361665606498718,0.9490740740740741,0.8601851851851852 329 | 327,0.13853110373020172,0.9481481481481482,0.8583333333333333 330 | 328,0.1361744999885559,0.9564814814814815,0.8574074074074074 331 | 329,0.12546145915985107,0.95,0.8564814814814815 332 | 330,0.131302148103714,0.95,0.8574074074074074 333 | 331,0.12962086498737335,0.9574074074074074,0.850925925925926 334 | 332,0.1278376579284668,0.9546296296296296,0.8611111111111112 335 | 333,0.12194225192070007,0.9592592592592593,0.8546296296296296 336 | 334,0.13830852508544922,0.95,0.8611111111111112 337 | 335,0.13572891056537628,0.9453703703703704,0.8527777777777777 338 | 336,0.13919974863529205,0.9490740740740741,0.8629629629629629 339 | 337,0.13852402567863464,0.950925925925926,0.8555555555555555 340 | 338,0.14494799077510834,0.9333333333333333,0.8555555555555555 341 | 339,0.13559526205062866,0.9472222222222222,0.8574074074074074 342 | 340,0.14112623035907745,0.950925925925926,0.8546296296296296 343 | 341,0.1366184949874878,0.9472222222222222,0.8546296296296296 344 | 342,0.13432450592517853,0.9453703703703704,0.837037037037037 345 | 343,0.13025718927383423,0.9481481481481482,0.85 346 | 344,0.16298535466194153,0.9324074074074075,0.8527777777777777 347 | 345,0.13223238289356232,0.9518518518518518,0.8583333333333333 348 | 346,0.1469714492559433,0.9398148148148148,0.8564814814814815 349 | 347,0.12410606443881989,0.9564814814814815,0.8611111111111112 350 | 348,0.12366718798875809,0.9518518518518518,0.8564814814814815 351 | 349,0.13097527623176575,0.950925925925926,0.8546296296296296 352 | 350,0.112483449280262,0.9638888888888889,0.8462962962962963 353 | 351,0.14620600640773773,0.937962962962963,0.8611111111111112 354 | 352,0.12438546866178513,0.9555555555555556,0.8675925925925926 355 | 353,0.1258891224861145,0.9527777777777777,0.8462962962962963 356 | 354,0.1411053091287613,0.9444444444444444,0.8351851851851851 357 | 355,0.1676863729953766,0.9333333333333333,0.7851851851851852 358 | 356,0.1829184591770172,0.9314814814814815,0.8657407407407407 359 | 357,0.14541329443454742,0.9444444444444444,0.8629629629629629 360 | 358,0.12922930717468262,0.9518518518518518,0.862037037037037 361 | 359,0.14043879508972168,0.9490740740740741,0.8583333333333333 362 | 360,0.13045793771743774,0.950925925925926,0.8555555555555555 363 | 361,0.13579759001731873,0.95,0.8638888888888889 364 | 362,0.11806372553110123,0.9583333333333334,0.8583333333333333 365 | 363,0.13162581622600555,0.95,0.8611111111111112 366 | 364,0.12252572923898697,0.9555555555555556,0.8601851851851852 367 | 365,0.12724587321281433,0.9555555555555556,0.8518518518518519 368 | 366,0.11277943104505539,0.9629629629629629,0.8555555555555555 369 | 367,0.12251544743776321,0.9537037037037037,0.8527777777777777 370 | 368,0.12253587692975998,0.950925925925926,0.8648148148148148 371 | 369,0.12688449025154114,0.9583333333333334,0.8564814814814815 372 | 370,0.1227349042892456,0.95,0.862037037037037 373 | 371,0.13174623250961304,0.9472222222222222,0.8574074074074074 374 | 372,0.129847913980484,0.9472222222222222,0.862037037037037 375 | 373,0.13253886997699738,0.9490740740740741,0.8472222222222222 376 | 374,0.11628419160842896,0.9527777777777777,0.8583333333333333 377 | 375,0.1344849169254303,0.9444444444444444,0.8564814814814815 378 | 376,0.12054882943630219,0.9583333333333334,0.8666666666666667 379 | 377,0.11879115551710129,0.950925925925926,0.8657407407407407 380 | 378,0.12129388004541397,0.950925925925926,0.8629629629629629 381 | 379,0.11992837488651276,0.9518518518518518,0.8592592592592593 382 | 380,0.13545255362987518,0.9481481481481482,0.8601851851851852 383 | 381,0.13421443104743958,0.9481481481481482,0.8611111111111112 384 | 382,0.14050795137882233,0.9490740740740741,0.8648148148148148 385 | 383,0.1305614411830902,0.9490740740740741,0.8537037037037037 386 | 384,0.1154954731464386,0.95,0.8657407407407407 387 | 385,0.12341305613517761,0.9537037037037037,0.8574074074074074 388 | 386,0.11406518518924713,0.9574074074074074,0.8527777777777777 389 | 387,0.11041557043790817,0.9564814814814815,0.8657407407407407 390 | 388,0.11753354966640472,0.9583333333333334,0.862037037037037 391 | 389,0.12084497511386871,0.9555555555555556,0.8490740740740741 392 | 390,0.11958601325750351,0.9537037037037037,0.862037037037037 393 | 391,0.12390480190515518,0.950925925925926,0.862037037037037 394 | 392,0.13174450397491455,0.9537037037037037,0.862037037037037 395 | 393,0.11786043643951416,0.9527777777777777,0.8657407407407407 396 | 394,0.1323070079088211,0.9444444444444444,0.8592592592592593 397 | 395,0.10848717391490936,0.9666666666666667,0.8675925925925926 398 | 396,0.11105215549468994,0.9527777777777777,0.8611111111111112 399 | 397,0.13057613372802734,0.9481481481481482,0.8546296296296296 400 | 398,0.12051516026258469,0.9574074074074074,0.8416666666666667 401 | 399,0.12505346536636353,0.9490740740740741,0.8583333333333333 402 | 400,0.11279527097940445,0.9527777777777777,0.8583333333333333 403 | 401,0.11312678456306458,0.9601851851851851,0.8592592592592593 404 | 402,0.12320884317159653,0.9527777777777777,0.862037037037037 405 | 403,0.12097392976284027,0.9592592592592593,0.862037037037037 406 | 404,0.13236159086227417,0.9518518518518518,0.8648148148148148 407 | 405,0.11774387955665588,0.9564814814814815,0.8638888888888889 408 | 406,0.1118377298116684,0.9527777777777777,0.8546296296296296 409 | 407,0.11138325929641724,0.9537037037037037,0.850925925925926 410 | 408,0.12722736597061157,0.950925925925926,0.8592592592592593 411 | 409,0.12754668295383453,0.95,0.8583333333333333 412 | 410,0.13855718076229095,0.9388888888888889,0.8592592592592593 413 | 411,0.1419697403907776,0.9472222222222222,0.8518518518518519 414 | 412,0.13533851504325867,0.9416666666666667,0.837037037037037 415 | 413,0.1397697776556015,0.9407407407407408,0.8527777777777777 416 | 414,0.14585085213184357,0.937962962962963,0.8111111111111111 417 | 415,0.14289624989032745,0.9342592592592592,0.8583333333333333 418 | 416,0.12285402417182922,0.9518518518518518,0.8574074074074074 419 | 417,0.12848415970802307,0.9518518518518518,0.8583333333333333 420 | 418,0.12212556600570679,0.95,0.8611111111111112 421 | 419,0.1021375060081482,0.962037037037037,0.8629629629629629 422 | 420,0.10990149527788162,0.9629629629629629,0.8675925925925926 423 | 421,0.11161751300096512,0.9555555555555556,0.8527777777777777 424 | 422,0.11486812680959702,0.9518518518518518,0.8648148148148148 425 | 423,0.09973617643117905,0.9675925925925926,0.8518518518518519 426 | 424,0.10418110340833664,0.9601851851851851,0.8657407407407407 427 | 425,0.10589820146560669,0.9601851851851851,0.8611111111111112 428 | 426,0.09929278492927551,0.9694444444444444,0.8462962962962963 429 | 427,0.1011260375380516,0.9629629629629629,0.8518518518518519 430 | 428,0.12313434481620789,0.9518518518518518,0.8583333333333333 431 | 429,0.12278831005096436,0.9555555555555556,0.8546296296296296 432 | 430,0.11699932813644409,0.9629629629629629,0.8629629629629629 433 | 431,0.13203193247318268,0.9564814814814815,0.850925925925926 434 | 432,0.12273290008306503,0.9490740740740741,0.862037037037037 435 | 433,0.13266900181770325,0.9481481481481482,0.8611111111111112 436 | 434,0.13324660062789917,0.9416666666666667,0.8648148148148148 437 | 435,0.12242522090673447,0.9518518518518518,0.8555555555555555 438 | 436,0.11882811784744263,0.9518518518518518,0.850925925925926 439 | 437,0.1292412132024765,0.9481481481481482,0.8555555555555555 440 | 438,0.11678320169448853,0.9546296296296296,0.8601851851851852 441 | 439,0.10701493173837662,0.9648148148148148,0.8629629629629629 442 | 440,0.10830487310886383,0.962037037037037,0.8629629629629629 443 | 441,0.11848733574151993,0.9518518518518518,0.862037037037037 444 | 442,0.12216274440288544,0.9537037037037037,0.8611111111111112 445 | 443,0.11190824955701828,0.9601851851851851,0.8629629629629629 446 | 444,0.11415252089500427,0.9592592592592593,0.8601851851851852 447 | 445,0.12290900945663452,0.950925925925926,0.8657407407407407 448 | 446,0.0977964922785759,0.9648148148148148,0.8629629629629629 449 | 447,0.10254824161529541,0.9574074074074074,0.8638888888888889 450 | 448,0.11030251532793045,0.962037037037037,0.8601851851851852 451 | 449,0.11830930411815643,0.9583333333333334,0.862037037037037 452 | 450,0.08676312863826752,0.9703703703703703,0.862037037037037 453 | 451,0.10797034204006195,0.9629629629629629,0.8666666666666667 454 | 452,0.12093176692724228,0.9527777777777777,0.8583333333333333 455 | 453,0.12955118715763092,0.9462962962962963,0.8611111111111112 456 | 454,0.12572984397411346,0.9490740740740741,0.8657407407407407 457 | 455,0.13784092664718628,0.9462962962962963,0.8574074074074074 458 | 456,0.1397477090358734,0.9388888888888889,0.8666666666666667 459 | 457,0.12783773243427277,0.9527777777777777,0.8657407407407407 460 | 458,0.10179141908884048,0.9592592592592593,0.8638888888888889 461 | 459,0.11285310238599777,0.9527777777777777,0.8555555555555555 462 | 460,0.10623056441545486,0.9601851851851851,0.8685185185185185 463 | 461,0.1164741963148117,0.950925925925926,0.862037037037037 464 | 462,0.10590857267379761,0.9574074074074074,0.8583333333333333 465 | 463,0.10231228917837143,0.9657407407407408,0.8629629629629629 466 | 464,0.11445175111293793,0.9592592592592593,0.8648148148148148 467 | 465,0.09266277402639389,0.9675925925925926,0.8675925925925926 468 | 466,0.11892064660787582,0.9555555555555556,0.8583333333333333 469 | 467,0.12134437263011932,0.9527777777777777,0.8537037037037037 470 | 468,0.11016444116830826,0.9564814814814815,0.8601851851851852 471 | 469,0.1102924644947052,0.9611111111111111,0.850925925925926 472 | 470,0.11056608706712723,0.9592592592592593,0.8583333333333333 473 | 471,0.10938083380460739,0.9601851851851851,0.8601851851851852 474 | 472,0.10466975718736649,0.9555555555555556,0.8555555555555555 475 | 473,0.11060065031051636,0.962037037037037,0.862037037037037 476 | 474,0.112749844789505,0.9592592592592593,0.8481481481481481 477 | 475,0.12113058567047119,0.9481481481481482,0.8592592592592593 478 | 476,0.1107122153043747,0.9601851851851851,0.8537037037037037 479 | 477,0.10854177922010422,0.9601851851851851,0.8537037037037037 480 | 478,0.10417848080396652,0.9657407407407408,0.8629629629629629 481 | 479,0.10902412235736847,0.9592592592592593,0.8638888888888889 482 | 480,0.12317732721567154,0.9490740740740741,0.8601851851851852 483 | 481,0.102236807346344,0.9657407407407408,0.8638888888888889 484 | 482,0.10950940102338791,0.9629629629629629,0.8648148148148148 485 | 483,0.09561044722795486,0.9629629629629629,0.8601851851851852 486 | 484,0.09750295430421829,0.9574074074074074,0.8629629629629629 487 | 485,0.09869469702243805,0.9611111111111111,0.8685185185185185 488 | 486,0.11172265559434891,0.9518518518518518,0.8638888888888889 489 | 487,0.09271328896284103,0.9611111111111111,0.8546296296296296 490 | 488,0.09306375682353973,0.9666666666666667,0.8574074074074074 491 | 489,0.11751456558704376,0.9555555555555556,0.862037037037037 492 | 490,0.11208881437778473,0.9555555555555556,0.8592592592592593 493 | 491,0.10345029085874557,0.9629629629629629,0.8703703703703703 494 | 492,0.09601636976003647,0.9611111111111111,0.8564814814814815 495 | 493,0.10665726661682129,0.9666666666666667,0.8675925925925926 496 | 494,0.10334411263465881,0.9638888888888889,0.8703703703703703 497 | 495,0.10876889526844025,0.962037037037037,0.8481481481481481 498 | 496,0.12084869295358658,0.950925925925926,0.8611111111111112 499 | 497,0.10877383500337601,0.9657407407407408,0.8555555555555555 500 | 498,0.09825059026479721,0.9629629629629629,0.8666666666666667 501 | 499,0.10241367667913437,0.9583333333333334,0.8629629629629629 502 | 500,0.11395972967147827,0.9537037037037037,0.85 503 | 501,0.11688879877328873,0.9574074074074074,0.8518518518518519 504 | 502,0.10358824580907822,0.9657407407407408,0.8342592592592593 505 | 503,0.10832808166742325,0.9611111111111111,0.8601851851851852 506 | 504,0.10063128918409348,0.9657407407407408,0.8425925925925926 507 | 505,0.12306487560272217,0.9518518518518518,0.8611111111111112 508 | 506,0.10222060233354568,0.9592592592592593,0.8435185185185186 509 | 507,0.09493324160575867,0.9611111111111111,0.8648148148148148 510 | 508,0.09471865743398666,0.9675925925925926,0.8657407407407407 511 | 509,0.0981200560927391,0.9675925925925926,0.8592592592592593 512 | 510,0.09797297418117523,0.9666666666666667,0.8685185185185185 513 | 511,0.09313032776117325,0.9694444444444444,0.8453703703703703 514 | 512,0.10769259184598923,0.9592592592592593,0.8666666666666667 515 | 513,0.08350273221731186,0.9731481481481481,0.8629629629629629 516 | 514,0.10489608347415924,0.9574074074074074,0.8675925925925926 517 | 515,0.0903487354516983,0.9638888888888889,0.8666666666666667 518 | 516,0.09285685420036316,0.9666666666666667,0.8666666666666667 519 | 517,0.08373381942510605,0.9657407407407408,0.8666666666666667 520 | 518,0.08961795270442963,0.9722222222222222,0.8731481481481481 521 | 519,0.09900297224521637,0.962037037037037,0.862037037037037 522 | 520,0.09799535572528839,0.9648148148148148,0.8657407407407407 523 | 521,0.09448060393333435,0.9629629629629629,0.862037037037037 524 | 522,0.09163752943277359,0.9648148148148148,0.8629629629629629 525 | 523,0.10444007068872452,0.9685185185185186,0.8583333333333333 526 | 524,0.10396350920200348,0.9611111111111111,0.8638888888888889 527 | 525,0.08792926371097565,0.9796296296296296,0.8583333333333333 528 | 526,0.09448140859603882,0.9657407407407408,0.8546296296296296 529 | 527,0.09115730971097946,0.9703703703703703,0.8601851851851852 530 | 528,0.10091491043567657,0.9601851851851851,0.8592592592592593 531 | 529,0.09318847954273224,0.9694444444444444,0.8638888888888889 532 | 530,0.09237018972635269,0.9638888888888889,0.8592592592592593 533 | 531,0.09890947490930557,0.9629629629629629,0.8685185185185185 534 | 532,0.08623593300580978,0.9703703703703703,0.8564814814814815 535 | 533,0.09142079949378967,0.9648148148148148,0.8648148148148148 536 | 534,0.10190389305353165,0.962037037037037,0.8648148148148148 537 | 535,0.10827189683914185,0.9648148148148148,0.8574074074074074 538 | 536,0.08630447089672089,0.9694444444444444,0.8518518518518519 539 | 537,0.10537933558225632,0.9675925925925926,0.8490740740740741 540 | 538,0.09077509492635727,0.9666666666666667,0.8435185185185186 541 | 539,0.10662196576595306,0.9611111111111111,0.8638888888888889 542 | 540,0.08943893760442734,0.9712962962962963,0.862037037037037 543 | 541,0.08991844952106476,0.9712962962962963,0.8592592592592593 544 | 542,0.10675680637359619,0.9638888888888889,0.8703703703703703 545 | 543,0.09353562444448471,0.9638888888888889,0.8611111111111112 546 | 544,0.10028490424156189,0.9629629629629629,0.8694444444444445 547 | 545,0.10230277478694916,0.9601851851851851,0.8648148148148148 548 | 546,0.10575748234987259,0.9592592592592593,0.8629629629629629 549 | 547,0.10578607767820358,0.9574074074074074,0.8629629629629629 550 | 548,0.11412844806909561,0.9490740740740741,0.8453703703703703 551 | 549,0.09433204680681229,0.9703703703703703,0.8601851851851852 552 | 550,0.09731759130954742,0.9694444444444444,0.8666666666666667 553 | 551,0.10557027161121368,0.9574074074074074,0.8675925925925926 554 | 552,0.10134103894233704,0.9601851851851851,0.8546296296296296 555 | 553,0.09656666964292526,0.9657407407407408,0.8601851851851852 556 | 554,0.113289974629879,0.9564814814814815,0.8583333333333333 557 | 555,0.0950891301035881,0.9703703703703703,0.8675925925925926 558 | 556,0.10128410160541534,0.9592592592592593,0.8564814814814815 559 | 557,0.10021726787090302,0.9629629629629629,0.8601851851851852 560 | 558,0.10055605322122574,0.9574074074074074,0.8638888888888889 561 | 559,0.08996472507715225,0.9703703703703703,0.8703703703703703 562 | 560,0.08457841724157333,0.975925925925926,0.8657407407407407 563 | 561,0.07743049412965775,0.9768518518518519,0.8666666666666667 564 | 562,0.09855592250823975,0.9629629629629629,0.8638888888888889 565 | 563,0.09920358657836914,0.9685185185185186,0.8666666666666667 566 | 564,0.09537289291620255,0.9712962962962963,0.8611111111111112 567 | 565,0.09591943025588989,0.9592592592592593,0.8583333333333333 568 | 566,0.0984303280711174,0.9666666666666667,0.8629629629629629 569 | 567,0.10589936375617981,0.9518518518518518,0.8657407407407407 570 | 568,0.08471477031707764,0.9740740740740741,0.8666666666666667 571 | 569,0.08438795059919357,0.9666666666666667,0.862037037037037 572 | 570,0.08424607664346695,0.9675925925925926,0.8666666666666667 573 | 571,0.08602017909288406,0.9685185185185186,0.8648148148148148 574 | 572,0.10057295113801956,0.9675925925925926,0.8648148148148148 575 | 573,0.08046669512987137,0.9694444444444444,0.8611111111111112 576 | 574,0.0764271467924118,0.9685185185185186,0.862037037037037 577 | 575,0.0846107080578804,0.9666666666666667,0.8666666666666667 578 | 576,0.08409009873867035,0.9685185185185186,0.8611111111111112 579 | 577,0.08929187059402466,0.9675925925925926,0.8574074074074074 580 | 578,0.10466881841421127,0.9601851851851851,0.8592592592592593 581 | 579,0.09473089128732681,0.9611111111111111,0.862037037037037 582 | 580,0.09594804793596268,0.9648148148148148,0.8574074074074074 583 | 581,0.08871471881866455,0.9675925925925926,0.8675925925925926 584 | 582,0.09732286632061005,0.9685185185185186,0.8611111111111112 585 | 583,0.09338735789060593,0.9666666666666667,0.8537037037037037 586 | 584,0.09657648205757141,0.9629629629629629,0.8537037037037037 587 | 585,0.09981600195169449,0.9601851851851851,0.8694444444444445 588 | 586,0.10261186957359314,0.9638888888888889,0.8657407407407407 589 | 587,0.09459999203681946,0.962037037037037,0.8722222222222222 590 | 588,0.10435451567173004,0.9574074074074074,0.8638888888888889 591 | 589,0.09794095903635025,0.9611111111111111,0.8694444444444445 592 | 590,0.0965161994099617,0.9601851851851851,0.8694444444444445 593 | 591,0.10016480088233948,0.9564814814814815,0.8712962962962963 594 | 592,0.09880887717008591,0.9592592592592593,0.862037037037037 595 | 593,0.09003119170665741,0.9657407407407408,0.8666666666666667 596 | 594,0.10064826905727386,0.9592592592592593,0.8648148148148148 597 | 595,0.07761812210083008,0.975,0.8657407407407407 598 | 596,0.09457039833068848,0.9638888888888889,0.8537037037037037 599 | 597,0.09584823995828629,0.962037037037037,0.8648148148148148 600 | 598,0.08434782922267914,0.9666666666666667,0.8685185185185185 601 | 599,0.09213703870773315,0.9648148148148148,0.8574074074074074 602 | 600,0.0812160074710846,0.9731481481481481,0.8564814814814815 603 | 601,0.09430316090583801,0.9629629629629629,0.8601851851851852 604 | 602,0.09329681098461151,0.9629629629629629,0.8592592592592593 605 | 603,0.10737383365631104,0.9592592592592593,0.8685185185185185 606 | 604,0.09422678500413895,0.9638888888888889,0.8555555555555555 607 | 605,0.09647992998361588,0.9657407407407408,0.8527777777777777 608 | 606,0.10060632228851318,0.9629629629629629,0.8592592592592593 609 | 607,0.08435221761465073,0.9731481481481481,0.8611111111111112 610 | 608,0.08595431596040726,0.9703703703703703,0.8685185185185185 611 | 609,0.0861276239156723,0.9712962962962963,0.8694444444444445 612 | 610,0.08572821319103241,0.9712962962962963,0.8731481481481481 613 | 611,0.09558092057704926,0.9629629629629629,0.8601851851851852 614 | 612,0.07648874074220657,0.9731481481481481,0.8666666666666667 615 | 613,0.0892016813158989,0.9638888888888889,0.8601851851851852 616 | 614,0.08018401265144348,0.975925925925926,0.8694444444444445 617 | 615,0.0837501510977745,0.9703703703703703,0.8629629629629629 618 | 616,0.10220848023891449,0.9648148148148148,0.8611111111111112 619 | 617,0.08256380259990692,0.9648148148148148,0.8638888888888889 620 | 618,0.10338643193244934,0.9638888888888889,0.8611111111111112 621 | 619,0.11133868247270584,0.9601851851851851,0.850925925925926 622 | 620,0.10190019011497498,0.9601851851851851,0.8583333333333333 623 | 621,0.09475860744714737,0.9611111111111111,0.8712962962962963 624 | 622,0.09062600135803223,0.9657407407407408,0.8657407407407407 625 | 623,0.09294939786195755,0.9666666666666667,0.8629629629629629 626 | 624,0.10020322352647781,0.9638888888888889,0.8694444444444445 627 | 625,0.08144848793745041,0.9638888888888889,0.8648148148148148 628 | 626,0.08539355546236038,0.9675925925925926,0.8648148148148148 629 | 627,0.08453390747308731,0.9722222222222222,0.8666666666666667 630 | 628,0.08287182450294495,0.9722222222222222,0.8657407407407407 631 | 629,0.07632241398096085,0.9731481481481481,0.8657407407407407 632 | 630,0.07849885523319244,0.9722222222222222,0.8592592592592593 633 | 631,0.08822452276945114,0.9675925925925926,0.8657407407407407 634 | 632,0.07844550162553787,0.9694444444444444,0.862037037037037 635 | 633,0.08183050155639648,0.9675925925925926,0.8601851851851852 636 | 634,0.09709767997264862,0.962037037037037,0.8481481481481481 637 | 635,0.0790596529841423,0.9722222222222222,0.8611111111111112 638 | 636,0.08800908178091049,0.9648148148148148,0.862037037037037 639 | 637,0.07968788594007492,0.9685185185185186,0.8629629629629629 640 | 638,0.08258385956287384,0.9675925925925926,0.8657407407407407 641 | 639,0.09116953611373901,0.9666666666666667,0.8685185185185185 642 | 640,0.08704110234975815,0.962037037037037,0.862037037037037 643 | 641,0.08972645550966263,0.9629629629629629,0.8592592592592593 644 | 642,0.09132323414087296,0.9657407407407408,0.8675925925925926 645 | 643,0.09770692139863968,0.9666666666666667,0.8666666666666667 646 | 644,0.08456085622310638,0.975,0.8537037037037037 647 | 645,0.08476415276527405,0.9768518518518519,0.8694444444444445 648 | 646,0.08283117413520813,0.9694444444444444,0.8685185185185185 649 | 647,0.08352808654308319,0.9675925925925926,0.8694444444444445 650 | 648,0.07747632265090942,0.9731481481481481,0.8601851851851852 651 | 649,0.0783202052116394,0.9694444444444444,0.8592592592592593 652 | 650,0.07111023366451263,0.975925925925926,0.8638888888888889 653 | 651,0.08154821395874023,0.975,0.8629629629629629 654 | 652,0.07776615023612976,0.9731481481481481,0.8685185185185185 655 | 653,0.06440390646457672,0.9796296296296296,0.8722222222222222 656 | 654,0.08873050659894943,0.9638888888888889,0.8685185185185185 657 | 655,0.07228326052427292,0.975925925925926,0.8638888888888889 658 | 656,0.07206534594297409,0.9712962962962963,0.8638888888888889 659 | 657,0.09284087270498276,0.9731481481481481,0.8629629629629629 660 | 658,0.09461924433708191,0.9592592592592593,0.8722222222222222 661 | 659,0.08582601696252823,0.9712962962962963,0.8694444444444445 662 | 660,0.06943730264902115,0.9768518518518519,0.8546296296296296 663 | 661,0.08094879239797592,0.9722222222222222,0.8638888888888889 664 | 662,0.07415038347244263,0.9722222222222222,0.8648148148148148 665 | 663,0.07471994310617447,0.9740740740740741,0.8648148148148148 666 | 664,0.08422443270683289,0.9629629629629629,0.8546296296296296 667 | 665,0.07709168642759323,0.975925925925926,0.8537037037037037 668 | 666,0.09011992812156677,0.9648148148148148,0.8694444444444445 669 | 667,0.09813710302114487,0.9629629629629629,0.8694444444444445 670 | 668,0.08744128048419952,0.962037037037037,0.8574074074074074 671 | 669,0.09508227556943893,0.9629629629629629,0.8592592592592593 672 | 670,0.09802672266960144,0.9638888888888889,0.8564814814814815 673 | 671,0.09593456238508224,0.9583333333333334,0.8703703703703703 674 | 672,0.08941193670034409,0.9629629629629629,0.8675925925925926 675 | 673,0.09489279240369797,0.9638888888888889,0.8629629629629629 676 | 674,0.0767735093832016,0.9731481481481481,0.8722222222222222 677 | 675,0.08026467263698578,0.9685185185185186,0.8564814814814815 678 | 676,0.07363684475421906,0.9805555555555555,0.8629629629629629 679 | 677,0.059705063700675964,0.9814814814814815,0.862037037037037 680 | 678,0.07626799494028091,0.9722222222222222,0.8537037037037037 681 | 679,0.07820790261030197,0.9694444444444444,0.8666666666666667 682 | 680,0.07406459748744965,0.975925925925926,0.8592592592592593 683 | 681,0.09560428559780121,0.9601851851851851,0.8629629629629629 684 | 682,0.08110789209604263,0.9685185185185186,0.8722222222222222 685 | 683,0.07566837221384048,0.9731481481481481,0.8685185185185185 686 | 684,0.08860719203948975,0.9694444444444444,0.8611111111111112 687 | 685,0.07357017695903778,0.975925925925926,0.8583333333333333 688 | 686,0.07078483700752258,0.9805555555555555,0.8722222222222222 689 | 687,0.08317459374666214,0.9731481481481481,0.8685185185185185 690 | 688,0.08205562084913254,0.9694444444444444,0.8527777777777777 691 | 689,0.07619346678256989,0.9731481481481481,0.8666666666666667 692 | 690,0.0759160965681076,0.9722222222222222,0.8731481481481481 693 | 691,0.0729844868183136,0.975,0.8555555555555555 694 | 692,0.0771721601486206,0.9722222222222222,0.8629629629629629 695 | 693,0.075386643409729,0.9712962962962963,0.8611111111111112 696 | 694,0.0646684467792511,0.9777777777777777,0.862037037037037 697 | 695,0.07732091844081879,0.975,0.8694444444444445 698 | 696,0.06751666963100433,0.975925925925926,0.8712962962962963 699 | 697,0.07120567560195923,0.9740740740740741,0.8685185185185185 700 | 698,0.07814991474151611,0.9722222222222222,0.8685185185185185 701 | 699,0.05337807163596153,0.9879629629629629,0.8694444444444445 702 | 700,0.07545655220746994,0.9675925925925926,0.8685185185185185 703 | 701,0.08445124328136444,0.9685185185185186,0.8601851851851852 704 | 702,0.07218294590711594,0.9694444444444444,0.8657407407407407 705 | 703,0.06958746165037155,0.9777777777777777,0.8638888888888889 706 | 704,0.07208754122257233,0.975925925925926,0.862037037037037 707 | 705,0.0738409012556076,0.975925925925926,0.8657407407407407 708 | 706,0.061326537281274796,0.9851851851851852,0.8666666666666667 709 | 707,0.06015891954302788,0.9842592592592593,0.8722222222222222 710 | 708,0.06874881684780121,0.975925925925926,0.8694444444444445 711 | 709,0.06630825996398926,0.9777777777777777,0.8694444444444445 712 | 710,0.06883778423070908,0.9740740740740741,0.8638888888888889 713 | 711,0.07030073553323746,0.9768518518518519,0.8685185185185185 714 | 712,0.06337527185678482,0.9796296296296296,0.8722222222222222 715 | 713,0.0805354118347168,0.9703703703703703,0.8703703703703703 716 | 714,0.07289706915616989,0.9796296296296296,0.8657407407407407 717 | 715,0.07005735486745834,0.975,0.8601851851851852 718 | 716,0.06536449491977692,0.9805555555555555,0.862037037037037 719 | 717,0.08385860919952393,0.9740740740740741,0.8675925925925926 720 | 718,0.0838431566953659,0.9694444444444444,0.8648148148148148 721 | 719,0.08465926349163055,0.9694444444444444,0.8731481481481481 722 | 720,0.07179785519838333,0.9731481481481481,0.8675925925925926 723 | 721,0.07855840772390366,0.9722222222222222,0.8712962962962963 724 | 722,0.08460445702075958,0.9675925925925926,0.8666666666666667 725 | 723,0.07177189737558365,0.9740740740740741,0.8648148148148148 726 | 724,0.07231292873620987,0.9703703703703703,0.8657407407407407 727 | 725,0.075795978307724,0.9685185185185186,0.8722222222222222 728 | 726,0.06529821455478668,0.9712962962962963,0.8675925925925926 729 | 727,0.07680589705705643,0.9685185185185186,0.8675925925925926 730 | 728,0.08495554327964783,0.9712962962962963,0.8629629629629629 731 | 729,0.07708315551280975,0.9712962962962963,0.8574074074074074 732 | 730,0.06673111766576767,0.9777777777777777,0.8546296296296296 733 | 731,0.0831020250916481,0.9648148148148148,0.8648148148148148 734 | 732,0.08250080049037933,0.9675925925925926,0.8546296296296296 735 | 733,0.08671463280916214,0.9685185185185186,0.8592592592592593 736 | 734,0.11004209518432617,0.95,0.8675925925925926 737 | 735,0.09682231396436691,0.9611111111111111,0.8666666666666667 738 | 736,0.09783380478620529,0.9648148148148148,0.8407407407407408 739 | 737,0.08290181308984756,0.9629629629629629,0.8712962962962963 740 | 738,0.07005114108324051,0.9777777777777777,0.8657407407407407 741 | 739,0.06719884276390076,0.9740740740740741,0.8694444444444445 742 | 740,0.06312999129295349,0.9796296296296296,0.8694444444444445 743 | 741,0.08483496308326721,0.9648148148148148,0.8638888888888889 744 | 742,0.06648781895637512,0.9740740740740741,0.8675925925925926 745 | 743,0.0951148271560669,0.9648148148148148,0.862037037037037 746 | 744,0.10193808376789093,0.9555555555555556,0.8546296296296296 747 | 745,0.0986732766032219,0.9555555555555556,0.8666666666666667 748 | 746,0.0786263719201088,0.9731481481481481,0.8601851851851852 749 | 747,0.0878666415810585,0.9638888888888889,0.8703703703703703 750 | 748,0.06386861205101013,0.9777777777777777,0.8731481481481481 751 | 749,0.07154332101345062,0.9722222222222222,0.8694444444444445 752 | -------------------------------------------------------------------------------- /model_testing.py: -------------------------------------------------------------------------------- 1 | from ALL_model import * 2 | from torch.autograd import Variable 3 | from dataloader import read_bci_data 4 | import pandas as pd 5 | import torch 6 | import numpy 7 | import os 8 | 9 | def testing(x_test,y_test,device,model): 10 | 11 | model.eval() 12 | with torch.no_grad(): 13 | model.to(device) 14 | n = x_test.shape[0] 15 | 16 | x_test = x_test.astype("float32") 17 | y_test = y_test.astype("float32").reshape(y_test.shape[0],) 18 | 19 | x_test, y_test = Variable(torch.from_numpy(x_test)),Variable(torch.from_numpy(y_test)) 20 | x_test,y_test = x_test.to(device),y_test.to(device) 21 | y_pred_test = model(x_test) 22 | correct = (torch.max(y_pred_test,1)[1]==y_test).sum().item() 23 | # print("testing accuracy:",correct/n) 24 | return correct/n 25 | 26 | if __name__ == "__main__": 27 | 28 | model_list=[EEGNet_ReLU, EEGNet_LeakyReLU, EEGNet_ELU, DeepConvNet_ReLU, DeepConvNet_LeakyReLU, DeepConvNet_ELU] 29 | model_file_path=["EEGNet_checkpoint_ReLU.rar","EEGNet_checkpoint_LeakyReLU.rar","EEGNet_checkpoint_ELU.rar","DeepConvNet_checkpoint_ReLU.rar","DeepConvNet_checkpoint_LeakyReLU.rar","DeepConvNet_checkpoint_ELU.rar"] 30 | 31 | ReLU_accuracy=[] 32 | LeakyReLU_accuracy=[] 33 | ELU_accuracy=[] 34 | 35 | for i in range(len(model_list)): 36 | 37 | 38 | filepath=os.path.abspath(os.path.dirname(__file__))+"\\checkpoint\\"+model_file_path[i] 39 | 40 | device = torch.device("cuda:0") 41 | model = model_list[i](2) 42 | model.load_state_dict(torch.load(filepath)) 43 | 44 | train_data, train_label, test_data, test_label = read_bci_data() 45 | testing_accuracy = testing(test_data,test_label,device,model) 46 | 47 | if "LeakyReLU" in model_file_path[i]: 48 | LeakyReLU_accuracy.append(testing_accuracy) 49 | elif "ReLU" in model_file_path[i]: 50 | ReLU_accuracy.append(testing_accuracy) 51 | else: 52 | ELU_accuracy.append(testing_accuracy) 53 | 54 | df = pd.DataFrame({"ReLU":ReLU_accuracy,"LeakyReLU":LeakyReLU_accuracy,"ELU":ELU_accuracy},index=["EEGNet","DeepConvNet"]) 55 | print(df) 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | --------------------------------------------------------------------------------