├── .DS_Store ├── Fig1.png ├── Fig2.png ├── Fig3.png ├── Fig4.png ├── Fig5.png ├── Fig6.png ├── FigS1.png ├── FigS2.png ├── FigS3.png ├── FigS4.png ├── FigS5.png ├── FigS6.png ├── FigS7.png ├── FigS8.png ├── FigS9.png ├── IIIC_Classification-Supplemental.docx ├── IIIC_Classification-Supplemental.pdf ├── IIIC_IRR_Commentary.pdf ├── IIIC_SPaRCNet.pdf ├── IIIC_train_0502.py ├── LICENSE.md ├── README.md ├── SPaRCNet ├── Data │ └── Raw │ │ └── sample_cEEG.mat ├── instruction.txt ├── model_1130.pt └── runSPaRCNet.py ├── Table1.png ├── TableS1.png └── code_for_figures ├── Callbacks ├── BC_LUT_v2.mat ├── LOC_18channels.mat ├── brewermap.m ├── energyop.m ├── fcn_EEGpic.m ├── fcn_bcReject.m ├── fcn_bipolar.m ├── fcn_cleaningPipeline.m ├── fcn_computeFeatures_powers_complex_v2.m ├── fcn_cpd.m ├── fcn_fixBadChannel.m ├── fcn_getCM.m ├── fcn_getCali_human.m ├── fcn_getModelVotes.m ├── fcn_getOPs_loo.m ├── fcn_getZeroCrossings.m ├── fcn_get_mIRR_human_loo.m ├── fcn_get_mIRR_model.m ├── fcn_get_pIRR_human.m ├── fcn_get_pIRR_model.m ├── fcn_isBadChannel_v2.m ├── fcn_plotConfusionMx.m ├── fcn_plotEEG.m ├── fcn_plotPR.m └── fcn_plotROC.m ├── Data ├── Figure1 │ └── figure1_input.mat ├── Figure3 │ ├── figure3_input.mat │ └── samples.mat ├── Figure4to6 │ └── samples │ │ ├── GPD_sample1.mat │ │ ├── GPD_sample2.mat │ │ ├── GPD_sample3.mat │ │ ├── GPD_sample4.mat │ │ ├── GRDA_sample1.mat │ │ ├── GRDA_sample2.mat │ │ ├── GRDA_sample3.mat │ │ ├── GRDA_sample4.mat │ │ ├── LPD_sample1.mat │ │ ├── LPD_sample2.mat │ │ ├── LPD_sample3.mat │ │ ├── LPD_sample4.mat │ │ ├── LRDA_sample1.mat │ │ ├── LRDA_sample2.mat │ │ ├── LRDA_sample3.mat │ │ ├── LRDA_sample4.mat │ │ ├── Other_sample1.mat │ │ ├── Other_sample2.mat │ │ ├── Other_sample3.mat │ │ ├── Other_sample4.mat │ │ ├── SZ_sample1.mat │ │ ├── SZ_sample2.mat │ │ ├── SZ_sample3.mat │ │ └── SZ_sample4.mat ├── FigureS2 │ ├── dataset1.mat │ ├── dataset2.mat │ ├── dataset3.mat │ ├── dataset4.mat │ ├── datasetA.mat │ ├── datasetB.mat │ ├── datasetC.mat │ └── datasetD.mat ├── FigureS3 │ └── FigureS3_input.mat ├── FigureS5 │ └── FigureS5_input.mat ├── FigureS8 │ └── FigureS8_input.mat └── Table1 │ ├── dataset1.mat │ ├── dataset2.mat │ ├── dataset3.mat │ ├── dataset4.mat │ └── patient_demo.mat ├── Fig1.png ├── Fig2.png ├── Fig3.png ├── Fig4.png ├── Fig5.png ├── Fig6.png ├── FigS3.png ├── FigS5.png ├── FigS8.png ├── Figure1_ROC.m ├── Figure2_PR.m ├── Figure3_UMAPs.m ├── Figure4_samples_SZ_LPD.m ├── Figure5_samples_GPD_LRDA.m ├── Figure6_samples_GRDA_Other.m ├── FigureS2_Flowchart.m ├── FigureS3_SPspread.m ├── FigureS5_UMAPspread.m ├── FigureS8_IRR.m └── Table1_Splits.m /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/.DS_Store -------------------------------------------------------------------------------- /Fig1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/Fig1.png -------------------------------------------------------------------------------- /Fig2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/Fig2.png -------------------------------------------------------------------------------- /Fig3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/Fig3.png -------------------------------------------------------------------------------- /Fig4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/Fig4.png -------------------------------------------------------------------------------- /Fig5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/Fig5.png -------------------------------------------------------------------------------- /Fig6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/Fig6.png -------------------------------------------------------------------------------- /FigS1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/FigS1.png -------------------------------------------------------------------------------- /FigS2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/FigS2.png -------------------------------------------------------------------------------- /FigS3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/FigS3.png -------------------------------------------------------------------------------- /FigS4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/FigS4.png -------------------------------------------------------------------------------- /FigS5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/FigS5.png -------------------------------------------------------------------------------- /FigS6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/FigS6.png -------------------------------------------------------------------------------- /FigS7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/FigS7.png -------------------------------------------------------------------------------- /FigS8.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/FigS8.png -------------------------------------------------------------------------------- /FigS9.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/FigS9.png -------------------------------------------------------------------------------- /IIIC_Classification-Supplemental.docx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/IIIC_Classification-Supplemental.docx -------------------------------------------------------------------------------- /IIIC_Classification-Supplemental.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/IIIC_Classification-Supplemental.pdf -------------------------------------------------------------------------------- /IIIC_IRR_Commentary.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/IIIC_IRR_Commentary.pdf -------------------------------------------------------------------------------- /IIIC_SPaRCNet.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/IIIC_SPaRCNet.pdf -------------------------------------------------------------------------------- /IIIC_train_0502.py: -------------------------------------------------------------------------------- 1 | 2 | import os 3 | 4 | import time 5 | 6 | import numpy as np 7 | 8 | import random 9 | 10 | import re 11 | 12 | from collections import OrderedDict 13 | 14 | import torch 15 | import torch.nn as nn 16 | import torch.nn.functional as F 17 | import torch.optim as optim 18 | 19 | from collections import Counter 20 | from collections import OrderedDict 21 | 22 | from sklearn.utils import class_weight 23 | 24 | import itertools 25 | 26 | import matplotlib.pyplot as plt 27 | 28 | from sklearn.metrics import confusion_matrix 29 | 30 | from sklearn.metrics import classification_report 31 | 32 | print ("") 33 | print ("done") 34 | print ("") 35 | 36 | 37 | 38 | class _DenseLayer(nn.Sequential): 39 | def __init__(self, num_input_features, growth_rate, bn_size, drop_rate, conv_bias, batch_norm): 40 | super(_DenseLayer, self).__init__() 41 | if batch_norm: 42 | self.add_module('norm1', nn.BatchNorm1d(num_input_features)), 43 | # self.add_module('relu1', nn.ReLU()), 44 | self.add_module('elu1', nn.ELU()), 45 | self.add_module('conv1', nn.Conv1d(num_input_features, bn_size * growth_rate, kernel_size=1, stride=1, bias=conv_bias)), 46 | if batch_norm: 47 | self.add_module('norm2', nn.BatchNorm1d(bn_size * growth_rate)), 48 | # self.add_module('relu2', nn.ReLU()), 49 | self.add_module('elu2', nn.ELU()), 50 | self.add_module('conv2', nn.Conv1d(bn_size * growth_rate, growth_rate, kernel_size=3, stride=1, padding=1, bias=conv_bias)), 51 | # self.add_module('conv2', nn.Conv1d(bn_size * growth_rate, growth_rate, kernel_size=7, stride=1, padding=3, bias=conv_bias)), 52 | self.drop_rate = drop_rate 53 | 54 | def forward(self, x): 55 | # print("Dense Layer Input: ") 56 | # print(x.size()) 57 | new_features = super(_DenseLayer, self).forward(x) 58 | # print("Dense Layer Output:") 59 | # print(new_features.size()) 60 | if self.drop_rate > 0: 61 | new_features = F.dropout(new_features, p=self.drop_rate, training=self.training) 62 | return torch.cat([x, new_features], 1) 63 | 64 | 65 | class _DenseBlock(nn.Sequential): 66 | def __init__(self, num_layers, num_input_features, bn_size, growth_rate, drop_rate, conv_bias, batch_norm): 67 | super(_DenseBlock, self).__init__() 68 | for i in range(num_layers): 69 | layer = _DenseLayer(num_input_features + i * growth_rate, growth_rate, bn_size, drop_rate, conv_bias, batch_norm) 70 | self.add_module('denselayer%d' % (i + 1), layer) 71 | 72 | 73 | class _Transition(nn.Sequential): 74 | def __init__(self, num_input_features, num_output_features, conv_bias, batch_norm): 75 | super(_Transition, self).__init__() 76 | if batch_norm: 77 | self.add_module('norm', nn.BatchNorm1d(num_input_features)) 78 | # self.add_module('relu', nn.ReLU()) 79 | self.add_module('elu', nn.ELU()) 80 | self.add_module('conv', nn.Conv1d(num_input_features, num_output_features, kernel_size=1, stride=1, bias=conv_bias)) 81 | self.add_module('pool', nn.AvgPool1d(kernel_size=2, stride=2)) 82 | 83 | 84 | class DenseNetEnconder(nn.Module): 85 | def __init__(self, growth_rate=32, block_config=(4, 4, 4, 4, 4, 4, 4), #block_config=(6, 12, 24, 48, 24, 20, 16), #block_config=(6, 12, 24, 16), 86 | in_channels=16, num_init_features=64, bn_size=4, drop_rate=0.2, conv_bias=True, batch_norm=False): 87 | 88 | super(DenseNetEnconder, self).__init__() 89 | 90 | # First convolution 91 | first_conv = OrderedDict([('conv0', nn.Conv1d(in_channels, num_init_features, kernel_size=7, stride=2, padding=3, bias=conv_bias))]) 92 | # first_conv = OrderedDict([('conv0', nn.Conv1d(in_channels, num_init_features, groups=in_channels, kernel_size=7, stride=2, padding=3, bias=conv_bias))]) 93 | # first_conv = OrderedDict([('conv0', nn.Conv1d(in_channels, num_init_features, kernel_size=15, stride=2, padding=7, bias=conv_bias))]) 94 | 95 | # first_conv = OrderedDict([ 96 | # ('conv0-depth', nn.Conv1d(in_channels, 32, groups=in_channels, kernel_size=7, stride=2, padding=3, bias=conv_bias)), 97 | # ('conv0-point', nn.Conv1d(32, num_init_features, kernel_size=1, stride=1, bias=conv_bias)), 98 | # ]) 99 | 100 | if batch_norm: 101 | first_conv['norm0'] = nn.BatchNorm1d(num_init_features) 102 | # first_conv['relu0'] = nn.ReLU() 103 | first_conv['elu0'] = nn.ELU() 104 | first_conv['pool0'] = nn.MaxPool1d(kernel_size=3, stride=2, padding=1) 105 | 106 | self.densenet = nn.Sequential(first_conv) 107 | 108 | num_features = num_init_features 109 | for i, num_layers in enumerate(block_config): 110 | block = _DenseBlock(num_layers=num_layers, num_input_features=num_features, 111 | bn_size=bn_size, growth_rate=growth_rate, drop_rate=drop_rate, conv_bias=conv_bias, batch_norm=batch_norm) 112 | self.densenet.add_module('denseblock%d' % (i + 1), block) 113 | num_features = num_features + num_layers * growth_rate 114 | if i != len(block_config) - 1: 115 | trans = _Transition(num_input_features=num_features, num_output_features=num_features // 2, conv_bias=conv_bias, batch_norm=batch_norm) 116 | self.densenet.add_module('transition%d' % (i + 1), trans) 117 | num_features = num_features // 2 118 | 119 | # Final batch norm 120 | if batch_norm: 121 | self.densenet.add_module('norm{}'.format(len(block_config) + 1), nn.BatchNorm1d(num_features)) 122 | # self.features.add_module('norm5', BatchReNorm1d(num_features)) 123 | 124 | self.densenet.add_module('relu{}'.format(len(block_config) + 1), nn.ReLU()) 125 | self.densenet.add_module('pool{}'.format(len(block_config) + 1), nn.AvgPool1d(kernel_size=7, stride=3)) # stride originally 1 126 | 127 | self.num_features = num_features 128 | 129 | # Official init from torch repo. 130 | for m in self.modules(): 131 | if isinstance(m, nn.Conv1d): 132 | nn.init.kaiming_normal_(m.weight.data) 133 | elif isinstance(m, nn.BatchNorm1d): 134 | m.weight.data.fill_(1) 135 | m.bias.data.zero_() 136 | elif isinstance(m, nn.Linear): 137 | m.bias.data.zero_() 138 | 139 | def forward(self, x): 140 | features = self.densenet(x) 141 | # print("Final Output") 142 | # print(features.size()) 143 | return features.view(features.size(0), -1) 144 | 145 | 146 | class DenseNetClassifier(nn.Module): 147 | # def __init__(self, growth_rate=16, block_config=(3, 6, 12, 8), #block_config=(6, 12, 24, 48, 24, 20, 16), #block_config=(6, 12, 24, 16), 148 | # in_channels=16, num_init_features=32, bn_size=2, drop_rate=0, conv_bias=False, drop_fc=0.5, num_classes=6): 149 | def __init__(self, growth_rate=32, block_config=(4, 4, 4, 4, 4, 4, 4), 150 | in_channels=16, num_init_features=64, bn_size=4, drop_rate=0.2, conv_bias=True, batch_norm=False, drop_fc=0.5, num_classes=6): 151 | 152 | super(DenseNetClassifier, self).__init__() 153 | 154 | self.features = DenseNetEnconder(growth_rate=growth_rate, block_config=block_config, in_channels=in_channels, 155 | num_init_features=num_init_features, bn_size=bn_size, drop_rate=drop_rate, 156 | conv_bias=conv_bias, batch_norm=batch_norm) 157 | 158 | # Linear layer 159 | self.classifier = nn.Sequential( 160 | nn.Dropout(p=drop_fc), 161 | nn.Linear(self.features.num_features, num_classes) 162 | ) 163 | 164 | # Official init from torch repo. 165 | for m in self.modules(): 166 | if isinstance(m, nn.Conv1d): 167 | nn.init.kaiming_normal_(m.weight.data) 168 | elif isinstance(m, nn.BatchNorm1d): 169 | m.weight.data.fill_(1) 170 | m.bias.data.zero_() 171 | elif isinstance(m, nn.Linear): 172 | m.bias.data.zero_() 173 | 174 | def forward(self, x): 175 | features = self.features(x) 176 | out = self.classifier(features) 177 | return out, features 178 | 179 | 180 | 181 | 182 | class WeightedKLDivWithLogitsLoss(nn.KLDivLoss): 183 | def __init__(self, weight): 184 | super(WeightedKLDivWithLogitsLoss, self).__init__(size_average=None, reduce=None, reduction='none') 185 | self.register_buffer('weight', weight) 186 | 187 | def forward(self, input, target): 188 | # TODO: For KLDivLoss: input should 'log-probability' and target should be 'probability' 189 | # TODO: input for this method is logits, and target is probabilities 190 | batch_size = input.size(0) 191 | log_prob = F.log_softmax(input, 1) 192 | element_loss = super(WeightedKLDivWithLogitsLoss, self).forward(log_prob, target) 193 | 194 | sample_loss = torch.sum(element_loss, dim=1) 195 | sample_weight = torch.sum(target * self.weight, dim=1) 196 | 197 | weighted_loss = sample_loss*sample_weight 198 | # Average over mini-batch, not element-wise 199 | avg_loss = torch.sum(weighted_loss) / batch_size 200 | 201 | return avg_loss 202 | 203 | 204 | 205 | 206 | class AverageMeter(object): 207 | """Computes and stores the average and current value""" 208 | 209 | def __init__(self): 210 | self.reset() 211 | 212 | def reset(self): 213 | self.val = 0 214 | self.avg = 0 215 | self.sum = 0 216 | self.count = 0 217 | 218 | def update(self, val, n=1): 219 | self.val = val 220 | self.sum += val * n 221 | self.count += n 222 | self.avg = self.sum / self.count 223 | 224 | 225 | 226 | 227 | def get_train_val_data(total_X, total_Y): 228 | N = total_X.shape[0] 229 | N1 = int(N*0.8) # ratio for val 230 | N2 = N-N1 231 | #print (" N, N1, N2: ", N, N1, N2) 232 | sn_list = list(range(N)) 233 | #print (sn_list[0:10]) 234 | random.shuffle(sn_list) 235 | #print (sn_list[0:10]) 236 | 237 | train_X = list() 238 | train_Y = list() 239 | for n in range(N1): 240 | sn = sn_list[n] 241 | train_X.append(total_X[sn,:,:]) 242 | train_Y.append(total_Y[sn,:]) 243 | train_X = np.array(train_X) 244 | train_Y = np.array(train_Y) 245 | 246 | val_X = list() 247 | val_Y = list() 248 | for n in range(N1,N): 249 | sn = sn_list[n] 250 | val_X.append(total_X[sn,:,:]) 251 | val_Y.append(total_Y[sn,:]) 252 | val_X = np.array(val_X) 253 | val_Y = np.array(val_Y) 254 | 255 | return train_X, train_Y, val_X, val_Y 256 | 257 | 258 | def get_batch_sn_array(N, batch_size, epoch): 259 | 260 | #print ("N: ", N) 261 | #print ("") 262 | 263 | total_sn_list = list() 264 | 265 | for n in range(N): 266 | 267 | total_sn_list.append(n) 268 | total_sn_list.append(-n) 269 | 270 | #print ("len(total_sn_list): ", len(total_sn_list)) 271 | #print ("") 272 | 273 | #print (total_sn_list[0:10]) 274 | 275 | 276 | seed_num = epoch 277 | 278 | #print ("seed_num: ", seed_num) 279 | #print ("") 280 | 281 | random.seed(seed_num) 282 | 283 | random.shuffle(total_sn_list) 284 | 285 | 286 | result_list = list() 287 | 288 | N2 = int( len(total_sn_list)/batch_size ) 289 | 290 | #print ("N2: ", N2) 291 | #print ("") 292 | 293 | for n in range(N2): 294 | 295 | start_sn = n*batch_size 296 | end_sn = (n+1)*batch_size 297 | 298 | result = list() 299 | 300 | for k in range(start_sn,end_sn): 301 | 302 | sn = total_sn_list[k] 303 | 304 | result.append(sn) 305 | 306 | result_list.append(result) 307 | 308 | result_array = np.array(result_list) 309 | 310 | return result_array 311 | 312 | 313 | def get_batch_X_Y (train_X, train_Y, batch_sn_array): 314 | 315 | #print ("batch_sn_array.shape: ", batch_sn_array.shape) 316 | #print ("") 317 | 318 | #print (batch_sn_array) 319 | #print ("") 320 | 321 | K = batch_sn_array.shape[0] 322 | 323 | result_X = list() 324 | result_Y = list() 325 | 326 | for k in range(K): 327 | 328 | sn = batch_sn_array[k] 329 | 330 | if sn >= 0: 331 | 332 | sn = sn 333 | 334 | x = train_X[sn,:,:] 335 | 336 | y = train_Y[sn,:] 337 | 338 | else: 339 | 340 | sn = -sn 341 | 342 | x = np.array(train_X[sn,:,:]) 343 | 344 | x2 = np.zeros((16, 2000)) 345 | 346 | x2[0:4,:] = x[4:8,:] 347 | x2[4:8,:] = x[0:4,:] 348 | 349 | x2[8:12,:] = x[12:16,:] 350 | x2[12:16,:] = x[8:12,:] 351 | 352 | x = x2 353 | 354 | y = train_Y[sn,:] 355 | 356 | result_X.append(x) 357 | result_Y.append(y) 358 | 359 | result_X = np.array(result_X) 360 | result_Y = np.array(result_Y) 361 | 362 | #print ("result_X.shape: ", result_X.shape) 363 | #print ("result_Y.shape: ", result_Y.shape) 364 | #print ("") 365 | 366 | return (result_X,result_Y) 367 | 368 | 369 | 370 | if __name__ == '__main__': 371 | 372 | 373 | 374 | train_all_X = np.load("../all_train_X.npy") 375 | train_all_Y = np.load("../all_train_Y2_hard.npy") 376 | 377 | print ("train_all_X.shape: ", train_all_X.shape) 378 | print ("train_all_Y.shape: ", train_all_Y.shape) 379 | print ("") 380 | 381 | 382 | train_10_X = np.load("../10_train_X.npy") 383 | train_10_Y = np.load("../10_train_Y2_hard.npy") 384 | 385 | print ("train_10_X.shape: ", train_10_X.shape) 386 | print ("train_10_Y.shape: ", train_10_Y.shape) 387 | print ("") 388 | 389 | train_X, train_Y, val_X, val_Y = get_train_val_data(train_10_X, train_10_Y) 390 | 391 | print ("train_X.shape: ", train_X.shape) 392 | print ("train_Y.shape: ", train_Y.shape) 393 | print ("") 394 | 395 | print ("val_X.shape: ", val_X.shape) 396 | print ("val_Y.shape: ", val_Y.shape) 397 | print ("") 398 | 399 | device = torch.device("cuda" if torch.cuda.is_available() else "cpu") 400 | 401 | print ("device: ", device) 402 | print ("") 403 | 404 | # model_cnn = torch.load("./previous_models/model_xxx.pt") 405 | model_cnn = DenseNetClassifier() 406 | 407 | model_cnn.to(device) 408 | 409 | #print (model_cnn) 410 | #print ("") 411 | 412 | train_label = np.argmax(train_Y,1) 413 | 414 | #print ( "Counter(train_label): ", Counter(train_label) ) 415 | #print ("") 416 | 417 | train_W = class_weight.compute_class_weight('balanced',np.unique(train_label),train_label) 418 | 419 | #print ("train_W: ", train_W) 420 | #print ("") 421 | 422 | class_weight = train_W 423 | 424 | #print ("class_weight: ", class_weight) 425 | #print ("") 426 | 427 | class_weight_torch = torch.from_numpy(class_weight).float() 428 | 429 | criterion = WeightedKLDivWithLogitsLoss(class_weight_torch) 430 | 431 | criterion.to(device) 432 | 433 | #print ("criterion: ", criterion) 434 | #print ("") 435 | 436 | 437 | optimizer = optim.Adam(model_cnn.parameters(), lr=6.25*1e-5, betas = (0.9,0.999),eps = 1.0*1e-8, weight_decay=1.0*1e-3) 438 | 439 | #print ("optimizer: ", optimizer) 440 | #print ("") 441 | 442 | batch_size = 32 443 | 444 | time1 = time.time() 445 | 446 | for epoch in range(20): 447 | 448 | print ("****************************************************************") 449 | print ("epoch: ", epoch) 450 | 451 | losses = AverageMeter() 452 | 453 | model_cnn.train() 454 | 455 | 456 | total_batch_sn_array = get_batch_sn_array(train_X.shape[0], batch_size, epoch) 457 | 458 | print ("total_batch_sn_array.shape: ", total_batch_sn_array.shape) 459 | print ("") 460 | 461 | K = total_batch_sn_array.shape[0] 462 | 463 | print ("K: ", K) 464 | print ("") 465 | 466 | S_list = list() 467 | Y_list = list() 468 | 469 | for k in range(K): 470 | 471 | if k%100 == 0: 472 | print (k) 473 | 474 | batch_sn_array = total_batch_sn_array[k,:] 475 | (X,Y) = get_batch_X_Y(train_X, train_Y, batch_sn_array) 476 | 477 | X = X.astype("float64") 478 | Y = Y.astype("float64") 479 | 480 | X = torch.from_numpy(X).float() 481 | Y = torch.from_numpy(Y).float() 482 | 483 | X = X.to(device) 484 | Y = Y.to(device) 485 | 486 | optimizer.zero_grad() 487 | 488 | output, _ = model_cnn(X) 489 | 490 | loss = criterion(output, Y) 491 | 492 | assert not np.isnan(loss.item()), 'Model diverged with loss = NaN' 493 | 494 | loss.backward() 495 | optimizer.step() 496 | 497 | losses.update(loss.item(), X.size(0)) 498 | 499 | 500 | del X 501 | del Y 502 | del output 503 | 504 | 505 | #del X_batch_list 506 | #del Y_batch_list 507 | 508 | print ("losses.avg: ", losses.avg) 509 | print ("") 510 | 511 | 512 | time2 = time.time() 513 | 514 | print("Duration: ", (time2-time1)) 515 | print("") 516 | 517 | 518 | 519 | 520 | 521 | 522 | 523 | 524 | 525 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | # Open Data Commons Attribution License (ODC-By) v1.0 2 | 3 | This license is from https://opendatacommons.org/licenses/by/1-0/ 4 | A plain language summary is available [here](https://opendatacommons.org/licenses/by/summary/) 5 | 6 | # ODC Attribution License (ODC-By) 7 | 8 | ### Preamble 9 | 10 | The Open Data Commons Attribution License is a license agreement 11 | intended to allow users to freely share, modify, and use this Database 12 | subject only to the attribution requirements set out in Section 4. 13 | 14 | Databases can contain a wide variety of types of content (images, 15 | audiovisual material, and sounds all in the same database, for example), 16 | and so this license only governs the rights over the Database, and not 17 | the contents of the Database individually. Licensors may therefore wish 18 | to use this license together with another license for the contents. 19 | 20 | Sometimes the contents of a database, or the database itself, can be 21 | covered by other rights not addressed here (such as private contracts, 22 | trademark over the name, or privacy rights / data protection rights 23 | over information in the contents), and so you are advised that you may 24 | have to consult other documents or clear other rights before doing 25 | activities not covered by this License. 26 | 27 | ------ 28 | 29 | The Licensor (as defined below) 30 | 31 | and 32 | 33 | You (as defined below) 34 | 35 | agree as follows: 36 | 37 | ### 1.0 Definitions of Capitalised Words 38 | 39 | "Collective Database" - Means this Database in unmodified form as part 40 | of a collection of independent databases in themselves that together are 41 | assembled into a collective whole. A work that constitutes a Collective 42 | Database will not be considered a Derivative Database. 43 | 44 | "Convey" - As a verb, means Using the Database, a Derivative Database, 45 | or the Database as part of a Collective Database in any way that enables 46 | a Person to make or receive copies of the Database or a Derivative 47 | Database. Conveying does not include interaction with a user through a 48 | computer network, or creating and Using a Produced Work, where no 49 | transfer of a copy of the Database or a Derivative Database occurs. 50 | 51 | "Contents" - The contents of this Database, which includes the 52 | information, independent works, or other material collected into the 53 | Database. For example, the contents of the Database could be factual 54 | data or works such as images, audiovisual material, text, or sounds. 55 | 56 | "Database" - A collection of material (the Contents) arranged in a 57 | systematic or methodical way and individually accessible by electronic 58 | or other means offered under the terms of this License. 59 | 60 | "Database Directive" - Means Directive 96/9/EC of the European 61 | Parliament and of the Council of 11 March 1996 on the legal protection 62 | of databases, as amended or succeeded. 63 | 64 | "Database Right" - Means rights resulting from the Chapter III ("sui 65 | generis") rights in the Database Directive (as amended and as transposed 66 | by member states), which includes the Extraction and Re-utilisation of 67 | the whole or a Substantial part of the Contents, as well as any similar 68 | rights available in the relevant jurisdiction under Section 10.4. 69 | 70 | "Derivative Database" - Means a database based upon the Database, and 71 | includes any translation, adaptation, arrangement, modification, or any 72 | other alteration of the Database or of a Substantial part of the 73 | Contents. This includes, but is not limited to, Extracting or 74 | Re-utilising the whole or a Substantial part of the Contents in a new 75 | Database. 76 | 77 | "Extraction" - Means the permanent or temporary transfer of all or a 78 | Substantial part of the Contents to another medium by any means or in 79 | any form. 80 | 81 | "License" - Means this license agreement and is both a license of rights 82 | such as copyright and Database Rights and an agreement in contract. 83 | 84 | "Licensor" - Means the Person that offers the Database under the terms 85 | of this License. 86 | 87 | "Person" - Means a natural or legal person or a body of persons 88 | corporate or incorporate. 89 | 90 | "Produced Work" - a work (such as an image, audiovisual material, text, 91 | or sounds) resulting from using the whole or a Substantial part of the 92 | Contents (via a search or other query) from this Database, a Derivative 93 | Database, or this Database as part of a Collective Database. 94 | 95 | "Publicly" - means to Persons other than You or under Your control by 96 | either more than 50% ownership or by the power to direct their 97 | activities (such as contracting with an independent consultant). 98 | 99 | "Re-utilisation" - means any form of making available to the public all 100 | or a Substantial part of the Contents by the distribution of copies, by 101 | renting, by online or other forms of transmission. 102 | 103 | "Substantial" - Means substantial in terms of quantity or quality or a 104 | combination of both. The repeated and systematic Extraction or 105 | Re-utilisation of insubstantial parts of the Contents may amount to the 106 | Extraction or Re-utilisation of a Substantial part of the Contents. 107 | 108 | "Use" - As a verb, means doing any act that is restricted by copyright 109 | or Database Rights whether in the original medium or any other; and 110 | includes without limitation distributing, copying, publicly performing, 111 | publicly displaying, and preparing derivative works of the Database, as 112 | well as modifying the Database as may be technically necessary to use it 113 | in a different mode or format. 114 | 115 | "You" - Means a Person exercising rights under this License who has not 116 | previously violated the terms of this License with respect to the 117 | Database, or who has received express permission from the Licensor to 118 | exercise rights under this License despite a previous violation. 119 | 120 | Words in the singular include the plural and vice versa. 121 | 122 | ### 2.0 What this License covers 123 | 124 | 2.1. Legal effect of this document. This License is: 125 | 126 | a. A license of applicable copyright and neighbouring rights; 127 | 128 | b. A license of the Database Right; and 129 | 130 | c. An agreement in contract between You and the Licensor. 131 | 132 | 2.2 Legal rights covered. This License covers the legal rights in the 133 | Database, including: 134 | 135 | a. Copyright. Any copyright or neighbouring rights in the Database. 136 | The copyright licensed includes any individual elements of the 137 | Database, but does not cover the copyright over the Contents 138 | independent of this Database. See Section 2.4 for details. Copyright 139 | law varies between jurisdictions, but is likely to cover: the Database 140 | model or schema, which is the structure, arrangement, and organisation 141 | of the Database, and can also include the Database tables and table 142 | indexes; the data entry and output sheets; and the Field names of 143 | Contents stored in the Database; 144 | 145 | b. Database Rights. Database Rights only extend to the Extraction and 146 | Re-utilisation of the whole or a Substantial part of the Contents. 147 | Database Rights can apply even when there is no copyright over the 148 | Database. Database Rights can also apply when the Contents are removed 149 | from the Database and are selected and arranged in a way that would 150 | not infringe any applicable copyright; and 151 | 152 | c. Contract. This is an agreement between You and the Licensor for 153 | access to the Database. In return you agree to certain conditions of 154 | use on this access as outlined in this License. 155 | 156 | 2.3 Rights not covered. 157 | 158 | a. This License does not apply to computer programs used in the making 159 | or operation of the Database; 160 | 161 | b. This License does not cover any patents over the Contents or the 162 | Database; and 163 | 164 | c. This License does not cover any trademarks associated with the 165 | Database. 166 | 167 | 2.4 Relationship to Contents in the Database. The individual items of 168 | the Contents contained in this Database may be covered by other rights, 169 | including copyright, patent, data protection, privacy, or personality 170 | rights, and this License does not cover any rights (other than Database 171 | Rights or in contract) in individual Contents contained in the Database. 172 | For example, if used on a Database of images (the Contents), this 173 | License would not apply to copyright over individual images, which could 174 | have their own separate licenses, or one single license covering all of 175 | the rights over the images. 176 | 177 | ### 3.0 Rights granted 178 | 179 | 3.1 Subject to the terms and conditions of this License, the Licensor 180 | grants to You a worldwide, royalty-free, non-exclusive, terminable (but 181 | only under Section 9) license to Use the Database for the duration of 182 | any applicable copyright and Database Rights. These rights explicitly 183 | include commercial use, and do not exclude any field of endeavour. To 184 | the extent possible in the relevant jurisdiction, these rights may be 185 | exercised in all media and formats whether now known or created in the 186 | future. 187 | 188 | The rights granted cover, for example: 189 | 190 | a. Extraction and Re-utilisation of the whole or a Substantial part of 191 | the Contents; 192 | 193 | b. Creation of Derivative Databases; 194 | 195 | c. Creation of Collective Databases; 196 | 197 | d. Creation of temporary or permanent reproductions by any means and 198 | in any form, in whole or in part, including of any Derivative 199 | Databases or as a part of Collective Databases; and 200 | 201 | e. Distribution, communication, display, lending, making available, or 202 | performance to the public by any means and in any form, in whole or in 203 | part, including of any Derivative Database or as a part of Collective 204 | Databases. 205 | 206 | 3.2 Compulsory license schemes. For the avoidance of doubt: 207 | 208 | a. Non-waivable compulsory license schemes. In those jurisdictions in 209 | which the right to collect royalties through any statutory or 210 | compulsory licensing scheme cannot be waived, the Licensor reserves 211 | the exclusive right to collect such royalties for any exercise by You 212 | of the rights granted under this License; 213 | 214 | b. Waivable compulsory license schemes. In those jurisdictions in 215 | which the right to collect royalties through any statutory or 216 | compulsory licensing scheme can be waived, the Licensor waives the 217 | exclusive right to collect such royalties for any exercise by You of 218 | the rights granted under this License; and, 219 | 220 | c. Voluntary license schemes. The Licensor waives the right to collect 221 | royalties, whether individually or, in the event that the Licensor is 222 | a member of a collecting society that administers voluntary licensing 223 | schemes, via that society, from any exercise by You of the rights 224 | granted under this License. 225 | 226 | 3.3 The right to release the Database under different terms, or to stop 227 | distributing or making available the Database, is reserved. Note that 228 | this Database may be multiple-licensed, and so You may have the choice 229 | of using alternative licenses for this Database. Subject to Section 230 | 10.4, all other rights not expressly granted by Licensor are reserved. 231 | 232 | ### 4.0 Conditions of Use 233 | 234 | 4.1 The rights granted in Section 3 above are expressly made subject to 235 | Your complying with the following conditions of use. These are important 236 | conditions of this License, and if You fail to follow them, You will be 237 | in material breach of its terms. 238 | 239 | 4.2 Notices. If You Publicly Convey this Database, any Derivative 240 | Database, or the Database as part of a Collective Database, then You 241 | must: 242 | 243 | a. Do so only under the terms of this License; 244 | 245 | b. Include a copy of this License or its Uniform Resource Identifier (URI) 246 | with the Database or Derivative Database, including both in the 247 | Database or Derivative Database and in any relevant documentation; 248 | 249 | c. Keep intact any copyright or Database Right notices and notices 250 | that refer to this License; and 251 | 252 | d. If it is not possible to put the required notices in a particular 253 | file due to its structure, then You must include the notices in a 254 | location (such as a relevant directory) where users would be likely to 255 | look for it. 256 | 257 | 4.3 Notice for using output (Contents). Creating and Using a Produced 258 | Work does not require the notice in Section 4.2. However, if you 259 | Publicly Use a Produced Work, You must include a notice associated with 260 | the Produced Work reasonably calculated to make any Person that uses, 261 | views, accesses, interacts with, or is otherwise exposed to the Produced 262 | Work aware that Content was obtained from the Database, Derivative 263 | Database, or the Database as part of a Collective Database, and that it 264 | is available under this License. 265 | 266 | a. Example notice. The following text will satisfy notice under 267 | Section 4.3: 268 | 269 | Contains information from DATABASE NAME which is made available 270 | under the ODC Attribution License. 271 | 272 | DATABASE NAME should be replaced with the name of the Database and a 273 | hyperlink to the location of the Database. "ODC Attribution License" 274 | should contain a hyperlink to the URI of the text of this License. If 275 | hyperlinks are not possible, You should include the plain text of the 276 | required URI's with the above notice. 277 | 278 | 4.4 Licensing of others. You may not sublicense the Database. Each time 279 | You communicate the Database, the whole or Substantial part of the 280 | Contents, or any Derivative Database to anyone else in any way, the 281 | Licensor offers to the recipient a license to the Database on the same 282 | terms and conditions as this License. You are not responsible for 283 | enforcing compliance by third parties with this License, but You may 284 | enforce any rights that You have over a Derivative Database. You are 285 | solely responsible for any modifications of a Derivative Database made 286 | by You or another Person at Your direction. You may not impose any 287 | further restrictions on the exercise of the rights granted or affirmed 288 | under this License. 289 | 290 | ### 5.0 Moral rights 291 | 292 | 5.1 Moral rights. This section covers moral rights, including any rights 293 | to be identified as the author of the Database or to object to treatment 294 | that would otherwise prejudice the author's honour and reputation, or 295 | any other derogatory treatment: 296 | 297 | a. For jurisdictions allowing waiver of moral rights, Licensor waives 298 | all moral rights that Licensor may have in the Database to the fullest 299 | extent possible by the law of the relevant jurisdiction under Section 300 | 10.4; 301 | 302 | b. If waiver of moral rights under Section 5.1 a in the relevant 303 | jurisdiction is not possible, Licensor agrees not to assert any moral 304 | rights over the Database and waives all claims in moral rights to the 305 | fullest extent possible by the law of the relevant jurisdiction under 306 | Section 10.4; and 307 | 308 | c. For jurisdictions not allowing waiver or an agreement not to assert 309 | moral rights under Section 5.1 a and b, the author may retain their 310 | moral rights over certain aspects of the Database. 311 | 312 | Please note that some jurisdictions do not allow for the waiver of moral 313 | rights, and so moral rights may still subsist over the Database in some 314 | jurisdictions. 315 | 316 | ### 6.0 Fair dealing, Database exceptions, and other rights not affected 317 | 318 | 6.1 This License does not affect any rights that You or anyone else may 319 | independently have under any applicable law to make any use of this 320 | Database, including without limitation: 321 | 322 | a. Exceptions to the Database Right including: Extraction of Contents 323 | from non-electronic Databases for private purposes, Extraction for 324 | purposes of illustration for teaching or scientific research, and 325 | Extraction or Re-utilisation for public security or an administrative 326 | or judicial procedure. 327 | 328 | b. Fair dealing, fair use, or any other legally recognised limitation 329 | or exception to infringement of copyright or other applicable laws. 330 | 331 | 6.2 This License does not affect any rights of lawful users to Extract 332 | and Re-utilise insubstantial parts of the Contents, evaluated 333 | quantitatively or qualitatively, for any purposes whatsoever, including 334 | creating a Derivative Database (subject to other rights over the 335 | Contents, see Section 2.4). The repeated and systematic Extraction or 336 | Re-utilisation of insubstantial parts of the Contents may however amount 337 | to the Extraction or Re-utilisation of a Substantial part of the 338 | Contents. 339 | 340 | ### 7.0 Warranties and Disclaimer 341 | 342 | 7.1 The Database is licensed by the Licensor "as is" and without any 343 | warranty of any kind, either express, implied, or arising by statute, 344 | custom, course of dealing, or trade usage. Licensor specifically 345 | disclaims any and all implied warranties or conditions of title, 346 | non-infringement, accuracy or completeness, the presence or absence of 347 | errors, fitness for a particular purpose, merchantability, or otherwise. 348 | Some jurisdictions do not allow the exclusion of implied warranties, so 349 | this exclusion may not apply to You. 350 | 351 | ### 8.0 Limitation of liability 352 | 353 | 8.1 Subject to any liability that may not be excluded or limited by law, 354 | the Licensor is not liable for, and expressly excludes, all liability 355 | for loss or damage however and whenever caused to anyone by any use 356 | under this License, whether by You or by anyone else, and whether caused 357 | by any fault on the part of the Licensor or not. This exclusion of 358 | liability includes, but is not limited to, any special, incidental, 359 | consequential, punitive, or exemplary damages such as loss of revenue, 360 | data, anticipated profits, and lost business. This exclusion applies 361 | even if the Licensor has been advised of the possibility of such 362 | damages. 363 | 364 | 8.2 If liability may not be excluded by law, it is limited to actual and 365 | direct financial loss to the extent it is caused by proved negligence on 366 | the part of the Licensor. 367 | 368 | ### 9.0 Termination of Your rights under this License 369 | 370 | 9.1 Any breach by You of the terms and conditions of this License 371 | automatically terminates this License with immediate effect and without 372 | notice to You. For the avoidance of doubt, Persons who have received the 373 | Database, the whole or a Substantial part of the Contents, Derivative 374 | Databases, or the Database as part of a Collective Database from You 375 | under this License will not have their licenses terminated provided 376 | their use is in full compliance with this License or a license granted 377 | under Section 4.8 of this License. Sections 1, 2, 7, 8, 9 and 10 will 378 | survive any termination of this License. 379 | 380 | 9.2 If You are not in breach of the terms of this License, the Licensor 381 | will not terminate Your rights under it. 382 | 383 | 9.3 Unless terminated under Section 9.1, this License is granted to You 384 | for the duration of applicable rights in the Database. 385 | 386 | 9.4 Reinstatement of rights. If you cease any breach of the terms and 387 | conditions of this License, then your full rights under this License 388 | will be reinstated: 389 | 390 | a. Provisionally and subject to permanent termination until the 60th 391 | day after cessation of breach; 392 | 393 | b. Permanently on the 60th day after cessation of breach unless 394 | otherwise reasonably notified by the Licensor; or 395 | 396 | c. Permanently if reasonably notified by the Licensor of the 397 | violation, this is the first time You have received notice of 398 | violation of this License from the Licensor, and You cure the 399 | violation prior to 30 days after your receipt of the notice. 400 | 401 | 9.5 Notwithstanding the above, Licensor reserves the right to release 402 | the Database under different license terms or to stop distributing or 403 | making available the Database. Releasing the Database under different 404 | license terms or stopping the distribution of the Database will not 405 | withdraw this License (or any other license that has been, or is 406 | required to be, granted under the terms of this License), and this 407 | License will continue in full force and effect unless terminated as 408 | stated above. 409 | 410 | ### 10.0 General 411 | 412 | 10.1 If any provision of this License is held to be invalid or 413 | unenforceable, that must not affect the validity or enforceability of 414 | the remainder of the terms and conditions of this License and each 415 | remaining provision of this License shall be valid and enforced to the 416 | fullest extent permitted by law. 417 | 418 | 10.2 This License is the entire agreement between the parties with 419 | respect to the rights granted here over the Database. It replaces any 420 | earlier understandings, agreements or representations with respect to 421 | the Database. 422 | 423 | 10.3 If You are in breach of the terms of this License, You will not be 424 | entitled to rely on the terms of this License or to complain of any 425 | breach by the Licensor. 426 | 427 | 10.4 Choice of law. This License takes effect in and will be governed by 428 | the laws of the relevant jurisdiction in which the License terms are 429 | sought to be enforced. If the standard suite of rights granted under 430 | applicable copyright law and Database Rights in the relevant 431 | jurisdiction includes additional rights not granted under this License, 432 | these additional rights are granted in this License in order to meet the 433 | terms of this License. 434 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # IIIC-SPaRCNet 2 | This repository provides code to reproduce figures in "Development of Expert-Level Classification of Seizures and Rhythmic and Periodic Patterns During EEG Interpretation". 3 | 4 | ## References and papers 5 | Jing J, Ge W, Hong S, Fernandes MB, Lin Z, Yang C, An S, Struck AF, Herlopian A, Karakis I, Halford JJ, Ng MC, Johnson EL, Appavu BL, Sarkis RA, Osman G, Kaplan PW, Dhakar MB, Arcot Jayagopal L, Sheikh Z, Taraschenko O, Schmitt S, Haider HA, Kim JA, Swisher CB, Gaspard N, Cervenka MC, Rodriguez Ruiz AA, Lee JW, Tabaeizadeh M, Gilmore EJ, Nordstrom K, Yoo JY, Holmes MG, Herman ST, Williams JA, Pathmanathan J, Nascimento FA, Fan Z, Nasiri S, Shafi MM, Cash SS, Hoch DB, Cole AJ, Rosenthal ES, Zafar SF, Sun J, Westover MB. **Development of Expert-Level Classification of Seizures and Rhythmic and Periodic Patterns During EEG Interpretation.** *Neurology*. 2023 Apr 25;100(17):e1750-e1762. doi: 10.1212/WNL.0000000000207127. Epub 2023 Mar 6. PMID: 36878708. 6 | 7 | The [paper](IIIC_SPaRCNet.pdf) 8 | The [supplemental material](IIIC_Classification-Supplemental.pdf) 9 | [Commentary](IIIC_IRR_Commentary.pdf) 10 | 11 | ## Figures and Tables from Main Text 12 | 13 | ### Table 1: Characteristics of patients/EEGs used in training and testing of SzNet. 14 | ![Table 1](Table1.png) 15 | 16 | ### Figure 1: Evaluation of model performance relative to experts: ROC curves 17 | ![Figure 1](Fig1.png) 18 | Solid curves are median ROC curves that show model performance; shading indicates 95% confidence bands. Expert operating points (x, y) on the ROC curve are shown as solid circles with (x, y) = (false-positive rate [FPR, aka 1 − specificity], true-positive rate [TPR, aka sensitivity]). Markers are colored in black when they lie above the median ROC curve of the model (better than model performance) and in gray when they lie below (inferior to model performance). EUROC = % of experts under the ROC curve; GPD = generalized periodic discharge; GRDA = generalized rhythmic delta activity; LPD = lateralized periodic discharge; LRDA = lateralized rhythmic delta activity; PPV = positive predicted value; ROC = receiver operating characteristic. 19 | 20 | ### Figure 2: Evaluation of model performance relative to experts: PR curves 21 | ![Figure 2](Fig2.png) 22 | Solid curves aremedian PR curves that show model performance; shading indicates 95% confidence bands. Expert operating points (x, y) on the PR curve are shown as solid triangles with (x, y) = (TPR, precision [aka positive predictive value (PPV)]). Markers are colored in black when they lie above themedian PR curve of the model (better thanmodel performance) and in gray when they lie below (inferior tomodel performance). EUPRC =% of experts under the PR curve; GPD = generalized periodic discharge; GRDA = generalized rhythmic delta activity; LPD = lateralized periodic discharge; LRDA = lateralized rhythmic delta activity; PR = precision recall. 23 | 24 | ### Figure 3 Maps of the Ictal-Interictal-Injury Continuum Learned by SPaRCNet 25 | ![Figure 3](Fig3.png) 26 | Two-dimensional coordinates are calculated by an algorithm (UMAP) such that patterns assigned similar probabilities for each class by the model are near each other in the map. The map learned by SparCNet (model) forms a “starfish” pattern, with the 5 IIIC patterns (SZ, LPD, GPD, LRDA, and GRDA) emanating as arms from a central region containing non-IIIC patterns. The coloring of the map indicates the model’s classification decisions and closely matches the pattern obtained by overlaying expert-consensus labels (human). Model uncertainty (uncertainty), indicating the degree to which the model assigns similar probabilities to multiple patterns, is greatest near the central region and decreases toward the tips of the “starfish” arms. The probability that an EEG segment represents a seizure or any one of the 4 most highly epileptiform patterns (the sum of the probabilities of SZ, LPD, GPD, or LRDA is shown in SZ burden and IIIC burden). GPD = generalized periodic discharge; GRDA = generalized rhythmic delta activity; IIIC = ictal-interictal-injury continuum; LPD = lateralized periodic discharge; LRDA = lateralized rhythmic delta activity; SZ = seizure. 27 | 28 | ### Figure 4: Examples of Smooth Pattern Transition for SZ (A) and LPD (B) 29 | ![Figure 4](Fig4.png) 30 | Samples are selected at different levels of model uncertainty ranging from the “starfish” arm tips toward the central area. IIIC patterns transition smoothly between distinct prototype patterns at the starfish arm tips into less distinct patterns near the body, lending credence to the concept of a “continuum” between ictal and interictal EEG patterns. IIIC = ictal-interictal-injury continuum; LPD = lateralized periodic discharge; SZ = seizure. 31 | 32 | ### Figure 5: Examples of Smooth Pattern Transition for GPD (A) and LRDA (B) 33 | ![Figure 5](Fig5.png) 34 | Samples are selected at different levels of model uncertainty ranging from the “starfish” arm tips toward the central area. GPD = generalized periodic discharge; LRDA = lateralized rhythmic delta activity. 35 | 36 | ### Figure 6 Examples of Smooth Pattern Transition for GRDA (A) and “Other” (B) Samples 37 | ![Figure 6](Fig6.png) 38 | 39 | Samples are selected at different levels of model uncertainty ranging from the “starfish” arm tips toward the central area. GRDA = generalized rhythmic delta activity. 40 | 41 | ## Figures and Tables from Supplemental Material 42 | 43 | ### Figure S1. Web-based GUI used to collect annotations of EEG segments from multiple experts. 44 | ![Figure S1](FigS1.png) 45 | There is no code for this figure. 46 | 47 | ### Figure S2. Flow diagram for creation of training and test datasets. 48 | ![Figure S2](FigS2.png) 49 | 50 | ### Figure S3: Samples belonging to the same stationary period (SP) are assigned the same label. 51 | ![Figure S3](FigS3.png) 52 | 53 | ### Figure S4. Architecture of SzNet, based on the Dense-Net CNN architecture. 54 | ![Figure S4](FigS4.png) 55 | There is no code for this figure. 56 | 57 | ### Figure S5. Creation of pseudo-labels via “label spreading” in Steps 3-4 of the model development procedure for SzNet. 58 | ![Figure S5](FigS5.png) 59 | A: The embedding map (“UMAP”) produced by the CNN trained in Step 1. “Real” labels from the 20 different experts are indicated by the colors; however, not all experts labeled all the points. To augment the training data, an individual UMAP was created for each expert and used to “spread” labels from the points that each expert did label (left hand UMAP in each of the pairs shown in B) to the points that the expert did not label (right-hand UMAP of each pair). These UMAPs were then averaged to create the single overall UMAP illustrated on the right in A. Note that the averaging process produces “soft” labels, whereas for illustration purposes the final UMAP depicted above is colored based on the IIIC pattern whose label has the highest value after label spreading. 60 | 61 | ### Figure S6. Model development for SPaRCNet. 62 | ![Figure S6](FigS6.png) 63 | There is no code for this figure. 64 | 65 | ### Figure S7. Thresholds used for “First or second-best classification”. 66 | ![Figure S7](FigS7.png) 67 | There is no code for this figure. 68 | 69 | ### Figure S8. Additional performance metrics for SzNet. 70 | ![Figure S8](FigS8.png) 71 | (A) Bar plots showing average inter-rater reliability (IRR) between pairs of experts (ee-pIRR; light red bars) and the average agreement between the algorithm and each expert (ea-pIRR; dark red bars); and average agreement of experts with the label assigned by the majority of other experts (ee-mIRR) and of the algorithm with the majority of experts (ea-mIRR). The differences for each of these pairs is shown in B, with values above 0 indicating better performance for the algorithm (ea>ee), and values below 0 indicating better performance among experts. Confidence intervals are calculated via bootstrapping. The confusion matrices (CM) in subplots C, D, E, F expand on the IRR results, showing not only how well experts and algorithm agree with the label being treated for each analysis as the “correct answer” (numbers along the diagonals), but also showing the distribution of disagreements (values along each row). 72 | 73 | ### Table S1. Prior literature in seizures and IIIC patterns detection. 74 | ![Table S1](TableS1.png) 75 | There is no code for this Table. 76 | 77 | ### Figure S9. PRISMA (Preferred Reporting Items for Systematic Reviews and Meta‐Analyses) flow diagram, where N corresponds to the number of articles. 78 | ![Figure S9](FigS9.png) 79 | There is no code for this figure. 80 | -------------------------------------------------------------------------------- /SPaRCNet/Data/Raw/sample_cEEG.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/SPaRCNet/Data/Raw/sample_cEEG.mat -------------------------------------------------------------------------------- /SPaRCNet/instruction.txt: -------------------------------------------------------------------------------- 1 | SpacNet Configure 2 | 3 | The steps to run the code are as follows: 4 | 5 | 0. Please install "anaconda3" and open a terminal. Please input the following lines step by step. 6 | 7 | 1. $conda create -n spacnet python=3.6 8 | 9 | 2. $activate spacnet 10 | 11 | 3. $conda install -c conda-forge hdf5storage 12 | 13 | 4. $pip install mne 14 | 15 | 5. $pip install torch==1.5.0+cpu torchvision==0.6.0+cpu -f https://download.pytorch.org/whl/torch_stable.html 16 | 17 | 6. $python runSPaRCNet.py -------------------------------------------------------------------------------- /SPaRCNet/model_1130.pt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/SPaRCNet/model_1130.pt -------------------------------------------------------------------------------- /SPaRCNet/runSPaRCNet.py: -------------------------------------------------------------------------------- 1 | import hdf5storage 2 | import numpy as np 3 | import re 4 | from mne.filter import filter_data, notch_filter 5 | import time 6 | import numpy as np 7 | import random 8 | import re 9 | 10 | from collections import OrderedDict 11 | import torch 12 | import torch.nn as nn 13 | import torch.nn.functional as F 14 | import torch.optim as optim 15 | from collections import Counter 16 | import os 17 | 18 | print ("") 19 | print ("lib finish") 20 | print ("") 21 | 22 | 23 | ################################################## nets 24 | class _DenseLayer(nn.Sequential): 25 | def __init__(self, num_input_features, growth_rate, bn_size, drop_rate, conv_bias, batch_norm): 26 | super(_DenseLayer, self).__init__() 27 | if batch_norm: 28 | self.add_module('norm1', nn.BatchNorm1d(num_input_features)), 29 | # self.add_module('relu1', nn.ReLU()), 30 | self.add_module('elu1', nn.ELU()), 31 | self.add_module('conv1', nn.Conv1d(num_input_features, bn_size * growth_rate, kernel_size=1, stride=1, bias=conv_bias)), 32 | if batch_norm: 33 | self.add_module('norm2', nn.BatchNorm1d(bn_size * growth_rate)), 34 | # self.add_module('relu2', nn.ReLU()), 35 | self.add_module('elu2', nn.ELU()), 36 | self.add_module('conv2', nn.Conv1d(bn_size * growth_rate, growth_rate, kernel_size=3, stride=1, padding=1, bias=conv_bias)), 37 | # self.add_module('conv2', nn.Conv1d(bn_size * growth_rate, growth_rate, kernel_size=7, stride=1, padding=3, bias=conv_bias)), 38 | self.drop_rate = drop_rate 39 | 40 | def forward(self, x): 41 | # print("Dense Layer Input: ") 42 | # print(x.size()) 43 | new_features = super(_DenseLayer, self).forward(x) 44 | # print("Dense Layer Output:") 45 | # print(new_features.size()) 46 | if self.drop_rate > 0: 47 | new_features = F.dropout(new_features, p=self.drop_rate, training=self.training) 48 | return torch.cat([x, new_features], 1) 49 | 50 | 51 | class _DenseBlock(nn.Sequential): 52 | def __init__(self, num_layers, num_input_features, bn_size, growth_rate, drop_rate, conv_bias, batch_norm): 53 | super(_DenseBlock, self).__init__() 54 | for i in range(num_layers): 55 | layer = _DenseLayer(num_input_features + i * growth_rate, growth_rate, bn_size, drop_rate, conv_bias, batch_norm) 56 | self.add_module('denselayer%d' % (i + 1), layer) 57 | 58 | 59 | class _Transition(nn.Sequential): 60 | def __init__(self, num_input_features, num_output_features, conv_bias, batch_norm): 61 | super(_Transition, self).__init__() 62 | if batch_norm: 63 | self.add_module('norm', nn.BatchNorm1d(num_input_features)) 64 | # self.add_module('relu', nn.ReLU()) 65 | self.add_module('elu', nn.ELU()) 66 | self.add_module('conv', nn.Conv1d(num_input_features, num_output_features, kernel_size=1, stride=1, bias=conv_bias)) 67 | self.add_module('pool', nn.AvgPool1d(kernel_size=2, stride=2)) 68 | 69 | 70 | class DenseNetEnconder(nn.Module): 71 | def __init__(self, growth_rate=32, block_config=(4, 4, 4, 4, 4, 4, 4), #block_config=(6, 12, 24, 48, 24, 20, 16), #block_config=(6, 12, 24, 16), 72 | in_channels=16, num_init_features=64, bn_size=4, drop_rate=0.2, conv_bias=True, batch_norm=False): 73 | 74 | super(DenseNetEnconder, self).__init__() 75 | 76 | # First convolution 77 | first_conv = OrderedDict([('conv0', nn.Conv1d(in_channels, num_init_features, kernel_size=7, stride=2, padding=3, bias=conv_bias))]) 78 | # first_conv = OrderedDict([('conv0', nn.Conv1d(in_channels, num_init_features, groups=in_channels, kernel_size=7, stride=2, padding=3, bias=conv_bias))]) 79 | # first_conv = OrderedDict([('conv0', nn.Conv1d(in_channels, num_init_features, kernel_size=15, stride=2, padding=7, bias=conv_bias))]) 80 | 81 | # first_conv = OrderedDict([ 82 | # ('conv0-depth', nn.Conv1d(in_channels, 32, groups=in_channels, kernel_size=7, stride=2, padding=3, bias=conv_bias)), 83 | # ('conv0-point', nn.Conv1d(32, num_init_features, kernel_size=1, stride=1, bias=conv_bias)), 84 | # ]) 85 | 86 | if batch_norm: 87 | first_conv['norm0'] = nn.BatchNorm1d(num_init_features) 88 | # first_conv['relu0'] = nn.ReLU() 89 | first_conv['elu0'] = nn.ELU() 90 | first_conv['pool0'] = nn.MaxPool1d(kernel_size=3, stride=2, padding=1) 91 | 92 | self.densenet = nn.Sequential(first_conv) 93 | 94 | num_features = num_init_features 95 | for i, num_layers in enumerate(block_config): 96 | block = _DenseBlock(num_layers=num_layers, num_input_features=num_features, 97 | bn_size=bn_size, growth_rate=growth_rate, drop_rate=drop_rate, conv_bias=conv_bias, batch_norm=batch_norm) 98 | self.densenet.add_module('denseblock%d' % (i + 1), block) 99 | num_features = num_features + num_layers * growth_rate 100 | if i != len(block_config) - 1: 101 | trans = _Transition(num_input_features=num_features, num_output_features=num_features // 2, conv_bias=conv_bias, batch_norm=batch_norm) 102 | self.densenet.add_module('transition%d' % (i + 1), trans) 103 | num_features = num_features // 2 104 | 105 | # Final batch norm 106 | if batch_norm: 107 | self.densenet.add_module('norm{}'.format(len(block_config) + 1), nn.BatchNorm1d(num_features)) 108 | # self.features.add_module('norm5', BatchReNorm1d(num_features)) 109 | 110 | self.densenet.add_module('relu{}'.format(len(block_config) + 1), nn.ReLU()) 111 | self.densenet.add_module('pool{}'.format(len(block_config) + 1), nn.AvgPool1d(kernel_size=7, stride=3)) # stride originally 1 112 | 113 | self.num_features = num_features 114 | 115 | # Official init from torch repo. 116 | for m in self.modules(): 117 | if isinstance(m, nn.Conv1d): 118 | nn.init.kaiming_normal_(m.weight.data) 119 | elif isinstance(m, nn.BatchNorm1d): 120 | m.weight.data.fill_(1) 121 | m.bias.data.zero_() 122 | elif isinstance(m, nn.Linear): 123 | m.bias.data.zero_() 124 | 125 | def forward(self, x): 126 | features = self.densenet(x) 127 | # print("Final Output") 128 | # print(features.size()) 129 | return features.view(features.size(0), -1) 130 | 131 | 132 | class DenseNetClassifier(nn.Module): 133 | # def __init__(self, growth_rate=16, block_config=(3, 6, 12, 8), #block_config=(6, 12, 24, 48, 24, 20, 16), #block_config=(6, 12, 24, 16), 134 | # in_channels=16, num_init_features=32, bn_size=2, drop_rate=0, conv_bias=False, drop_fc=0.5, num_classes=6): 135 | def __init__(self, growth_rate=32, block_config=(4, 4, 4, 4, 4, 4, 4), 136 | in_channels=16, num_init_features=64, bn_size=4, drop_rate=0.2, conv_bias=True, batch_norm=False, drop_fc=0.5, num_classes=6): 137 | 138 | super(DenseNetClassifier, self).__init__() 139 | 140 | self.features = DenseNetEnconder(growth_rate=growth_rate, block_config=block_config, in_channels=in_channels, 141 | num_init_features=num_init_features, bn_size=bn_size, drop_rate=drop_rate, 142 | conv_bias=conv_bias, batch_norm=batch_norm) 143 | 144 | # Linear layer 145 | self.classifier = nn.Sequential( 146 | nn.Dropout(p=drop_fc), 147 | nn.Linear(self.features.num_features, num_classes) 148 | ) 149 | 150 | # Official init from torch repo. 151 | for m in self.modules(): 152 | if isinstance(m, nn.Conv1d): 153 | nn.init.kaiming_normal_(m.weight.data) 154 | elif isinstance(m, nn.BatchNorm1d): 155 | m.weight.data.fill_(1) 156 | m.bias.data.zero_() 157 | elif isinstance(m, nn.Linear): 158 | m.bias.data.zero_() 159 | 160 | def forward(self, x): 161 | features = self.features(x) 162 | out = self.classifier(features) 163 | return out, features 164 | 165 | 166 | device = "cuda" if torch.cuda.is_available() else "cpu" 167 | 168 | print ("device: ", device) 169 | print ("") 170 | 171 | model_cnn = torch.load("./Tools/model_1130.pt", map_location=torch.device('cpu')) 172 | model_cnn.eval() 173 | 174 | 175 | 176 | ################################################################################################################################## 177 | 178 | print ("") 179 | print ("$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$") 180 | print ("read data") 181 | print ("$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$") 182 | print ("") 183 | 184 | ###################################################################################### 185 | 186 | 187 | 188 | # file_name_array = np.loadtxt("file_path_24h.txt", delimiter = ',', dtype = np.str) 189 | 190 | total_file_list = os.listdir("./Data/Raw/") 191 | 192 | print ("len(total_file_list): ", len(total_file_list)) 193 | print ("") 194 | 195 | T = len(total_file_list) 196 | 197 | print ("T: ", T) 198 | print ("") 199 | 200 | 201 | for t in range(T): 202 | 203 | print ("***************************************") 204 | print ("t: ", t) 205 | print ("") 206 | 207 | 208 | file_name = total_file_list[t] 209 | save_name = file_name.rstrip(".mat") 210 | 211 | if os.path.isfile("./Data/iiic/"+ save_name + "_score.csv"): 212 | print('--alr done ' + save_name) 213 | 214 | else: 215 | print ("file_name: ", file_name) 216 | print ("") 217 | print ("save_name: ", save_name) 218 | print ("") 219 | 220 | path1 = "./Data/Raw/" + file_name 221 | print ("path1: ", path1) 222 | print ("") 223 | 224 | mat = hdf5storage.loadmat(path1) 225 | print (mat.keys()) 226 | print ("") 227 | 228 | X = mat['data'] 229 | print ("X.shape: ", X.shape) 230 | print ("") 231 | 232 | print ("************ Montage") 233 | print ("") 234 | 235 | X2 = X[[0,4,5,6, 11,15,16,17, 0,1,2,3, 11,12,13,14]] - X[[4,5,6,7, 15,16,17,18, 1,2,3,7, 12,13,14,18]] 236 | print ("X2.shape: ", X2.shape) 237 | 238 | print ("************ filtering") 239 | print ("") 240 | 241 | X2 = notch_filter(X2, 200, 60, n_jobs=-1, verbose='ERROR') 242 | X2 = filter_data(X2, 200, 0.5, 40, n_jobs=-1, verbose='ERROR') 243 | 244 | print ("X2.shape: ", X2.shape) 245 | N = int(X2.shape[1]/400) 246 | 247 | print ("N: ", N) 248 | print ("") 249 | 250 | print ("************ reshaping") 251 | print ("") 252 | 253 | X3 = np.zeros((N-5,16,2000)) 254 | 255 | for n in range(N-5): 256 | start_sn = n*400 257 | end_sn = start_sn + 2000 258 | x = X2[:,start_sn:end_sn] 259 | X3[n,:,:] = x 260 | 261 | print ("X3.shape: ", X3.shape) 262 | 263 | X = X3 264 | 265 | print ("X.shape: ", X.shape) 266 | print ("") 267 | print ("np.isnan(X).sum(): ", np.isnan(X).sum()) 268 | print ("np.max(X): ", np.max(X)) 269 | print ("np.min(X): ", np.min(X)) 270 | print ("") 271 | 272 | X = np.where(X<=500, X, 500) 273 | X = np.where(X>=-500, X, -500) 274 | 275 | print ("X.shape: ", X.shape) 276 | print ("") 277 | print ("np.isnan(X).sum(): ", np.isnan(X).sum()) 278 | print ("np.max(X): ", np.max(X)) 279 | print ("np.min(X): ", np.min(X)) 280 | print ("") 281 | 282 | X4 = X 283 | 284 | del X 285 | del X2 286 | del X3 287 | 288 | print ("X4.shape: ", X4.shape) 289 | print ("") 290 | 291 | 292 | ######################### evaluation 293 | batch_size = 1000 294 | def get_unlabeled_batch_list(X_train,batch_size): 295 | N = X_train.shape[0] 296 | sn_list = list(range(N)) 297 | K = int(N/batch_size) 298 | X_list = list() 299 | end_sn = 0 300 | 301 | for k in range(K): 302 | start_sn = k*batch_size 303 | end_sn = start_sn + batch_size 304 | 305 | X = X_train[start_sn:end_sn,:,:] 306 | X_list.append(X) 307 | if not end_sn == N: 308 | X = X_train[end_sn:N,:,:] 309 | X_list.append(X) 310 | 311 | return (X_list) 312 | 313 | ################### scanning 314 | model_cnn.eval() #* 315 | # print ("unlabeled losses.avg: ", losses.avg) 316 | 317 | (X_batch_list) = get_unlabeled_batch_list(X4,batch_size) 318 | K = len(X_batch_list) 319 | 320 | print ("K: ", K) 321 | print ("") 322 | 323 | S_list = list() 324 | V_list = list() 325 | 326 | for k in range(K): 327 | if k%100 == 0: 328 | print (k) 329 | 330 | X = X_batch_list[k] 331 | 332 | #print ("X.shape: ", X.shape) 333 | #print ("Y.shape: ", Y.shape) 334 | 335 | X = torch.from_numpy(X).float() 336 | X = X.to(device) 337 | 338 | output, v = model_cnn(X) 339 | 340 | S_list.append(output.detach().to('cpu')) 341 | V_list.append(v.detach().to('cpu')) 342 | 343 | del X 344 | del output 345 | del v 346 | 347 | 348 | #print ("unlabeled losses.avg: ", losses.avg) 349 | #print ("") 350 | 351 | S2 = torch.cat(S_list,dim=0) 352 | prob = F.softmax(S2, 1) 353 | unlabeled_score = prob.numpy() 354 | 355 | print ("") 356 | print ("unlabeled_score.shape: ", unlabeled_score.shape) 357 | print ("") 358 | 359 | V2 = torch.cat(V_list,dim=0) 360 | unlabeled_V = V2.numpy() 361 | 362 | print ("unlabeled_V.shape: ", unlabeled_V.shape) 363 | print ("") 364 | 365 | path3 = "./Data/iiic/"+ save_name + "_score.csv" 366 | np.savetxt(path3, unlabeled_score, delimiter=',') 367 | 368 | #path4 = "./Data/iiic/"+ save_name + "_vector.csv" 369 | #np.savetxt(path4, unlabeled_V, delimiter=',') 370 | 371 | print ("writing finish") 372 | print ("") 373 | 374 | del X4 375 | del X_batch_list 376 | 377 | print ("") 378 | print ("Done!") 379 | print ("") 380 | -------------------------------------------------------------------------------- /Table1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/Table1.png -------------------------------------------------------------------------------- /TableS1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/TableS1.png -------------------------------------------------------------------------------- /code_for_figures/Callbacks/BC_LUT_v2.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Callbacks/BC_LUT_v2.mat -------------------------------------------------------------------------------- /code_for_figures/Callbacks/LOC_18channels.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Callbacks/LOC_18channels.mat -------------------------------------------------------------------------------- /code_for_figures/Callbacks/brewermap.m: -------------------------------------------------------------------------------- 1 | function [map,num,typ,scheme] = brewermap(N,scheme) %#ok<*ISMAT> 2 | % The complete selection of ColorBrewer colorschemes (RGB colormaps). 3 | % 4 | % (c) 2014-2020 Stephen Cobeldick 5 | % 6 | % Returns any RGB colormap from the ColorBrewer colorschemes, especially 7 | % intended for mapping and plots with attractive, distinguishable colors. 8 | % 9 | %%% Basic Syntax: 10 | % brewermap() % print summary 11 | % map = brewermap(N,scheme) 12 | %%% Preset Syntax: 13 | % old = brewermap(scheme) 14 | % map = brewermap() 15 | % map = brewermap(N) 16 | % 17 | % [...,num,typ] = brewermap(...) 18 | % 19 | %% Color Schemes %% 20 | % 21 | % This product includes color specifications and designs developed by Cynthia Brewer. 22 | % See the ColorBrewer website for further information about each colorscheme, 23 | % colour-blind suitability, licensing, and citations: http://colorbrewer.org/ 24 | % Each colorscheme is defined by a set of hand-picked RGB values (nodes). 25 | % To reverse the colormap sequence simply prefix the scheme name with '*'. 26 | % 27 | % If is greater than the requested colorscheme's number of nodes then: 28 | % * Diverging and Sequential schemes are interpolated in Lab colorspace. 29 | % * Qualitative schemes repeat the nodes (i.e. just like LINES does). 30 | % Else: 31 | % * Exact values from the ColorBrewer schemes are returned for all colorschemes. 32 | % 33 | %%% Diverging 34 | % 35 | % Scheme|'BrBG'|'PRGn'|'PiYG'|'PuOr'|'RdBu'|'RdGy'|'RdYlBu'|'RdYlGn'|'Spectral'| 36 | % ------|------|------|------|------|------|------|--------|--------|----------| 37 | % Nodes | 11 | 11 | 11 | 11 | 11 | 11 | 11 | 11 | 11 | 38 | % 39 | %%% Qualitative 40 | % 41 | % Scheme|'Accent'|'Dark2'|'Paired'|'Pastel1'|'Pastel2'|'Set1'|'Set2'|'Set3'| 42 | % ------|--------|-------|--------|---------|---------|------|------|------| 43 | % Nodes | 8 | 8 | 12 | 9 | 8 | 9 | 8 | 12 | 44 | % 45 | %%% Sequential 46 | % 47 | % Scheme|'Blues'|'BuGn'|'BuPu'|'GnBu'|'Greens'|'Greys'|'OrRd'|'Oranges'|'PuBu'| 48 | % ------|-------|------|------|------|--------|-------|------|---------|------| 49 | % Nodes | 9 | 9 | 9 | 9 | 9 | 9 | 9 | 9 | 9 | 50 | % 51 | % Scheme|'PuBuGn'|'PuRd'|'Purples'|'RdPu'|'Reds'|'YlGn'|'YlGnBu'|'YlOrBr'|'YlOrRd'| 52 | % ------|--------|------|---------|------|------|------|--------|--------|--------| 53 | % Nodes | 9 | 9 | 9 | 9 | 9 | 9 | 9 | 9 | 9 | 54 | % 55 | %% Examples %% 56 | % 57 | %%% Plot a scheme's RGB values: 58 | % >> rgbplot(brewermap(9, 'Blues')) % standard 59 | % >> rgbplot(brewermap(9,'*Blues')) % reversed 60 | % 61 | %%% View information about a colorscheme: 62 | % >> [~,num,typ] = brewermap(NaN,'Paired') 63 | % num = 12 64 | % typ = 'Qualitative' 65 | % 66 | %%% Multi-line plot using matrices: 67 | % >> N = 6; 68 | % >> axes('ColorOrder',brewermap(N,'Pastel2'),'NextPlot','replacechildren') 69 | % >> X = linspace(0,pi*3,1000); 70 | % >> Y = bsxfun(@(x,n)n*sin(x+2*n*pi/N), X(:), 1:N); 71 | % >> plot(X,Y, 'linewidth',4) 72 | % 73 | %%% Multi-line plot in a loop: 74 | % set(0,'DefaultAxesColorOrder',brewermap(NaN,'Accent')) 75 | % N = 6; 76 | % X = linspace(0,pi*3,1000); 77 | % Y = bsxfun(@(x,n)n*sin(x+2*n*pi/N), X(:), 1:N); 78 | % for n = 1:N 79 | % plot(X(:),Y(:,n), 'linewidth',4); 80 | % hold all 81 | % end 82 | % 83 | %%% New colors for the COLORMAP example: 84 | % >> S = load('spine'); 85 | % >> image(S.X) 86 | % >> colormap(brewermap([],'YlGnBu')) 87 | % 88 | %%% New colors for the SURF example: 89 | % >> [X,Y,Z] = peaks(30); 90 | % >> surfc(X,Y,Z) 91 | % >> colormap(brewermap([],'RdYlGn')) 92 | % >> axis([-3,3,-3,3,-10,5]) 93 | % 94 | %% Input and Output Arguments %% 95 | % 96 | %%% Inputs: 97 | % N = NumericScalar, N>=0, an integer to specify the colormap length. 98 | % = [], same length as the current figure's colormap (see COLORMAP). 99 | % = NaN, same length as the defining RGB nodes (useful for line ColorOrder). 100 | % scheme = CharRowVector, a ColorBrewer colorscheme name. 101 | % 102 | %%% Outputs: 103 | % map = NumericMatrix, size Nx3, a colormap of RGB values between 0 and 1. 104 | % num = NumericVector, the number of nodes defining the ColorBrewer colorscheme. 105 | % typ = CharRowVector, the colorscheme type: 'Diverging'/'Qualitative'/'Sequential'. 106 | % 107 | % See also CUBEHELIX LBMAP PARULA LINES RGBPLOT COLORMAP COLORBAR PLOT PLOT3 AXES SET 108 | 109 | %% Input Wrangling %% 110 | % 111 | persistent scm 112 | % 113 | raw = bmColors(); 114 | % 115 | err = 'First input must be a real positive scalar numeric or [] or NaN.'; 116 | if nargin==0&&nargout==0 117 | hdr = { 'Type'; 'Scheme'; 'Nodes'}; 118 | tsn = [{raw.typ};{raw.str};{raw.num}]; 119 | fprintf('%-12s %-9s %s\n',hdr{:}); 120 | fprintf('%-12s %-9s %u\n',tsn{:}); 121 | return 122 | elseif nargin==0 || isnumeric(N)&&isequal(N,[]) 123 | % Default is the same as MATLAB colormaps: 124 | N = size(get(gcf,'colormap'),1); 125 | if nargin<2 126 | assert(~isempty(scm),'SC:colorbrewer:SchemeNotPreset',... 127 | 'Scheme must be preset before this call: BREWERMAP(SCHEME)') 128 | scheme = scm; 129 | end 130 | elseif nargin==1&&ischar(N)&&ndims(N)==2&&size(N,1)==1 131 | if strcmpi(N,'list') 132 | map = {raw.str}; 133 | num = [raw.num]; 134 | typ = {raw.typ}; 135 | return 136 | end 137 | scheme = N; % preset 138 | else 139 | assert(isnumeric(N)&&isscalar(N),... 140 | 'SC:brewermap:NotScalarNumeric',err) 141 | assert(isnan(N)||isreal(N)&&isfinite(N)&&fix(N)==N&&N>=0,... 142 | 'SC:brewermap:NotRealPositiveNotNaN',err) 143 | end 144 | % 145 | assert(ischar(scheme)&&ndims(scheme)==2&&size(scheme,1)==1,... 146 | 'SC:brewermap:NotCharacterVector',... 147 | 'Second input must be a character vector (the scheme name).') 148 | isr = strncmp(scheme,'*',1); 149 | ids = strcmpi(scheme(1+isr:end),{raw.str}); 150 | assert(any(ids),'SC:brewermap:UnknownScheme','Unknown scheme name: %s',scheme) 151 | % 152 | num = raw(ids).num; 153 | typ = raw(ids).typ; 154 | % 155 | if ischar(N) 156 | map = scm; 157 | scm = N; 158 | return 159 | elseif N==0 160 | map = ones(0,3); 161 | return 162 | elseif isnan(N) 163 | N = num; 164 | end 165 | % 166 | % Downsample: 167 | [idx,itp] = bmIndex(N,num,typ); 168 | map= raw(ids).rgb(idx,:)/255; 169 | % Interpolate: 170 | if itp 171 | M = [... sRGB to XYZ 172 | 0.4124564,0.3575761,0.1804375;... 173 | 0.2126729,0.7151522,0.0721750;... 174 | 0.0193339,0.1191920,0.9503041]; 175 | wpt = [0.95047,1,1.08883]; % D65 176 | % 177 | map = bmRGB2Lab(map,M,wpt); % optional 178 | % 179 | % Extrapolate a small amount beyond end nodes: 180 | %ido = linspace(0,num+1,N+2); 181 | %ido = ido(2:end-1); 182 | % Interpolation completely within end nodes: 183 | ido = linspace(1,num,N); 184 | % 185 | switch typ 186 | case 'Diverging' 187 | mid = ceil(num/2); 188 | ida = 1:mid; 189 | idz = mid:num; 190 | map = [... 191 | interp1(ida,map(ida,:),ido(ido<=mid),'pchip');... 192 | interp1(idz,map(idz,:),ido(ido>mid),'pchip')]; 193 | case 'Sequential' 194 | map = interp1(1:num,map,ido,'pchip'); 195 | otherwise 196 | error('SC:brewermap:NoInterp','Cannot interpolate this type.') 197 | end 198 | % 199 | map = bmLab2RGB(map,M,wpt); % optional 200 | end 201 | % Limit output range: 202 | map = max(0,min(1,map)); 203 | % Reverse row order: 204 | if isr 205 | map = map(end:-1:1,:); 206 | end 207 | % 208 | end 209 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%brewermap 210 | function lab = bmRGB2Lab(rgb,M,wpt) 211 | % Convert a matrix of sRGB values to Lab. 212 | %applycform(rgb,makecform('srgb2lab','AdaptedWhitePoint',wpt)) 213 | % RGB2XYZ: 214 | xyz = bmGammaInv(rgb) * M.'; 215 | % Remember to include my license when copying my implementation. 216 | % XYZ2Lab: 217 | xyz = bsxfun(@rdivide,xyz,wpt); 218 | idx = xyz>(6/29)^3; 219 | F = idx.*(xyz.^(1/3)) + ~idx.*(xyz*(29/6)^2/3+4/29); 220 | lab(:,2:3) = bsxfun(@times,[500,200],F(:,1:2)-F(:,2:3)); 221 | lab(:,1) = 116*F(:,2) - 16; 222 | end 223 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%bmRGB2Lab 224 | function rgb = bmGammaInv(rgb) 225 | % Inverse gamma correction of sRGB data. 226 | idx = rgb <= 0.04045; 227 | rgb(idx) = rgb(idx) / 12.92; 228 | rgb(~idx) = real(((rgb(~idx) + 0.055) / 1.055).^2.4); 229 | end 230 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%bmGammaInv 231 | function rgb = bmLab2RGB(lab,M,wpt) 232 | % Convert a matrix of Lab values to sRGB. 233 | %applycform(lab,makecform('lab2srgb','AdaptedWhitePoint',wpt)) 234 | % Lab2XYZ 235 | tmp = bsxfun(@rdivide,lab(:,[2,1,3]),[500,Inf,-200]); 236 | tmp = bsxfun(@plus,tmp,(lab(:,1)+16)/116); 237 | idx = tmp>(6/29); 238 | tmp = idx.*(tmp.^3) + ~idx.*(3*(6/29)^2*(tmp-4/29)); 239 | xyz = bsxfun(@times,tmp,wpt); 240 | % Remember to include my license when copying my implementation. 241 | % XYZ2RGB 242 | rgb = max(0,min(1, bmGammaCor(xyz / M.'))); 243 | end 244 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%cbLab2RGB 245 | function rgb = bmGammaCor(rgb) 246 | % Gamma correction of sRGB data. 247 | idx = rgb <= 0.0031308; 248 | rgb(idx) = 12.92 * rgb(idx); 249 | rgb(~idx) = real(1.055 * rgb(~idx).^(1/2.4) - 0.055); 250 | end 251 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%bmGammaCor 252 | function [idx,itp] = bmIndex(N,num,typ) 253 | % Ensure exactly the same colors as the online ColorBrewer colorschemes. 254 | % 255 | itp = N>num; 256 | switch typ 257 | case 'Qualitative' 258 | itp = false; 259 | idx = 1+mod(0:N-1,num); 260 | case 'Diverging' 261 | switch N 262 | case 1 % extrapolated 263 | idx = 8; 264 | case 2 % extrapolated 265 | idx = [4,12]; 266 | case 3 267 | idx = [5,8,11]; 268 | case 4 269 | idx = [3,6,10,13]; 270 | case 5 271 | idx = [3,6,8,10,13]; 272 | case 6 273 | idx = [2,5,7,9,11,14]; 274 | case 7 275 | idx = [2,5,7,8,9,11,14]; 276 | case 8 277 | idx = [2,4,6,7,9,10,12,14]; 278 | case 9 279 | idx = [2,4,6,7,8,9,10,12,14]; 280 | case 10 281 | idx = [1,2,4,6,7,9,10,12,14,15]; 282 | otherwise 283 | idx = [1,2,4,6,7,8,9,10,12,14,15]; 284 | end 285 | case 'Sequential' 286 | switch N 287 | case 1 % extrapolated 288 | idx = 6; 289 | case 2 % extrapolated 290 | idx = [4,8]; 291 | case 3 292 | idx = [3,6,9]; 293 | case 4 294 | idx = [2,5,7,10]; 295 | case 5 296 | idx = [2,5,7,9,11]; 297 | case 6 298 | idx = [2,4,6,7,9,11]; 299 | case 7 300 | idx = [2,4,6,7,8,10,12]; 301 | case 8 302 | idx = [1,3,4,6,7,8,10,12]; 303 | otherwise 304 | idx = [1,3,4,6,7,8,10,11,13]; 305 | end 306 | otherwise 307 | error('SC:brewermap:UnknownType','Unknown type string.') 308 | end 309 | % 310 | end 311 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%bmIndex 312 | function raw = bmColors() 313 | % Return a structure of all colorschemes: name, scheme type, RGB values, number of nodes. 314 | % Order: first sort by , then case-insensitive sort by : 315 | raw(35).str = 'YlOrRd'; 316 | raw(35).typ = 'Sequential'; 317 | raw(35).rgb = [255,255,204;255,255,178;255,237,160;254,217,118;254,204,92;254,178,76;253,141,60;252,78,42;240,59,32;227,26,28;189,0,38;177,0,38;128,0,38]; 318 | raw(34).str = 'YlOrBr'; 319 | raw(34).typ = 'Sequential'; 320 | raw(34).rgb = [255,255,229;255,255,212;255,247,188;254,227,145;254,217,142;254,196,79;254,153,41;236,112,20;217,95,14;204,76,2;153,52,4;140,45,4;102,37,6]; 321 | raw(33).str = 'YlGnBu'; 322 | raw(33).typ = 'Sequential'; 323 | raw(33).rgb = [255,255,217;255,255,204;237,248,177;199,233,180;161,218,180;127,205,187;65,182,196;29,145,192;44,127,184;34,94,168;37,52,148;12,44,132;8,29,88]; 324 | raw(32).str = 'YlGn'; 325 | raw(32).typ = 'Sequential'; 326 | raw(32).rgb = [255,255,229;255,255,204;247,252,185;217,240,163;194,230,153;173,221,142;120,198,121;65,171,93;49,163,84;35,132,67;0,104,55;0,90,50;0,69,41]; 327 | raw(31).str = 'Reds'; 328 | raw(31).typ = 'Sequential'; 329 | raw(31).rgb = [255,245,240;254,229,217;254,224,210;252,187,161;252,174,145;252,146,114;251,106,74;239,59,44;222,45,38;203,24,29;165,15,21;153,0,13;103,0,13]; 330 | raw(30).str = 'RdPu'; 331 | raw(30).typ = 'Sequential'; 332 | raw(30).rgb = [255,247,243;254,235,226;253,224,221;252,197,192;251,180,185;250,159,181;247,104,161;221,52,151;197,27,138;174,1,126;122,1,119;122,1,119;73,0,106]; 333 | raw(29).str = 'Purples'; 334 | raw(29).typ = 'Sequential'; 335 | raw(29).rgb = [252,251,253;242,240,247;239,237,245;218,218,235;203,201,226;188,189,220;158,154,200;128,125,186;117,107,177;106,81,163;84,39,143;74,20,134;63,0,125]; 336 | raw(28).str = 'PuRd'; 337 | raw(28).typ = 'Sequential'; 338 | raw(28).rgb = [247,244,249;241,238,246;231,225,239;212,185,218;215,181,216;201,148,199;223,101,176;231,41,138;221,28,119;206,18,86;152,0,67;145,0,63;103,0,31]; 339 | raw(27).str = 'PuBuGn'; 340 | raw(27).typ = 'Sequential'; 341 | raw(27).rgb = [255,247,251;246,239,247;236,226,240;208,209,230;189,201,225;166,189,219;103,169,207;54,144,192;28,144,153;2,129,138;1,108,89;1,100,80;1,70,54]; 342 | raw(26).str = 'PuBu'; 343 | raw(26).typ = 'Sequential'; 344 | raw(26).rgb = [255,247,251;241,238,246;236,231,242;208,209,230;189,201,225;166,189,219;116,169,207;54,144,192;43,140,190;5,112,176;4,90,141;3,78,123;2,56,88]; 345 | raw(25).str = 'Oranges'; 346 | raw(25).typ = 'Sequential'; 347 | raw(25).rgb = [255,245,235;254,237,222;254,230,206;253,208,162;253,190,133;253,174,107;253,141,60;241,105,19;230,85,13;217,72,1;166,54,3;140,45,4;127,39,4]; 348 | raw(24).str = 'OrRd'; 349 | raw(24).typ = 'Sequential'; 350 | raw(24).rgb = [255,247,236;254,240,217;254,232,200;253,212,158;253,204,138;253,187,132;252,141,89;239,101,72;227,74,51;215,48,31;179,0,0;153,0,0;127,0,0]; 351 | raw(23).str = 'Greys'; 352 | raw(23).typ = 'Sequential'; 353 | raw(23).rgb = [255,255,255;247,247,247;240,240,240;217,217,217;204,204,204;189,189,189;150,150,150;115,115,115;99,99,99;82,82,82;37,37,37;37,37,37;0,0,0]; 354 | raw(22).str = 'Greens'; 355 | raw(22).typ = 'Sequential'; 356 | raw(22).rgb = [247,252,245;237,248,233;229,245,224;199,233,192;186,228,179;161,217,155;116,196,118;65,171,93;49,163,84;35,139,69;0,109,44;0,90,50;0,68,27]; 357 | raw(21).str = 'GnBu'; 358 | raw(21).typ = 'Sequential'; 359 | raw(21).rgb = [247,252,240;240,249,232;224,243,219;204,235,197;186,228,188;168,221,181;123,204,196;78,179,211;67,162,202;43,140,190;8,104,172;8,88,158;8,64,129]; 360 | raw(20).str = 'BuPu'; 361 | raw(20).typ = 'Sequential'; 362 | raw(20).rgb = [247,252,253;237,248,251;224,236,244;191,211,230;179,205,227;158,188,218;140,150,198;140,107,177;136,86,167;136,65,157;129,15,124;110,1,107;77,0,75]; 363 | raw(19).str = 'BuGn'; 364 | raw(19).typ = 'Sequential'; 365 | raw(19).rgb = [247,252,253;237,248,251;229,245,249;204,236,230;178,226,226;153,216,201;102,194,164;65,174,118;44,162,95;35,139,69;0,109,44;0,88,36;0,68,27]; 366 | raw(18).str = 'Blues'; 367 | raw(18).typ = 'Sequential'; 368 | raw(18).rgb = [247,251,255;239,243,255;222,235,247;198,219,239;189,215,231;158,202,225;107,174,214;66,146,198;49,130,189;33,113,181;8,81,156;8,69,148;8,48,107]; 369 | raw(17).str = 'Set3'; 370 | raw(17).typ = 'Qualitative'; 371 | raw(17).rgb = [141,211,199;255,255,179;190,186,218;251,128,114;128,177,211;253,180,98;179,222,105;252,205,229;217,217,217;188,128,189;204,235,197;255,237,111]; 372 | raw(16).str = 'Set2'; 373 | raw(16).typ = 'Qualitative'; 374 | raw(16).rgb = [102,194,165;252,141,98;141,160,203;231,138,195;166,216,84;255,217,47;229,196,148;179,179,179]; 375 | raw(15).str = 'Set1'; 376 | raw(15).typ = 'Qualitative'; 377 | raw(15).rgb = [228,26,28;55,126,184;77,175,74;152,78,163;255,127,0;255,255,51;166,86,40;247,129,191;153,153,153]; 378 | raw(14).str = 'Pastel2'; 379 | raw(14).typ = 'Qualitative'; 380 | raw(14).rgb = [179,226,205;253,205,172;203,213,232;244,202,228;230,245,201;255,242,174;241,226,204;204,204,204]; 381 | raw(13).str = 'Pastel1'; 382 | raw(13).typ = 'Qualitative'; 383 | raw(13).rgb = [251,180,174;179,205,227;204,235,197;222,203,228;254,217,166;255,255,204;229,216,189;253,218,236;242,242,242]; 384 | raw(12).str = 'Paired'; 385 | raw(12).typ = 'Qualitative'; 386 | raw(12).rgb = [166,206,227;31,120,180;178,223,138;51,160,44;251,154,153;227,26,28;253,191,111;255,127,0;202,178,214;106,61,154;255,255,153;177,89,40]; 387 | raw(11).str = 'Dark2'; 388 | raw(11).typ = 'Qualitative'; 389 | raw(11).rgb = [27,158,119;217,95,2;117,112,179;231,41,138;102,166,30;230,171,2;166,118,29;102,102,102]; 390 | raw(10).str = 'Accent'; 391 | raw(10).typ = 'Qualitative'; 392 | raw(10).rgb = [127,201,127;190,174,212;253,192,134;255,255,153;56,108,176;240,2,127;191,91,23;102,102,102]; 393 | raw(09).str = 'Spectral'; 394 | raw(09).typ = 'Diverging'; 395 | raw(09).rgb = [158,1,66;213,62,79;215,25,28;244,109,67;252,141,89;253,174,97;254,224,139;255,255,191;230,245,152;171,221,164;153,213,148;102,194,165;43,131,186;50,136,189;94,79,162]; 396 | raw(08).str = 'RdYlGn'; 397 | raw(08).typ = 'Diverging'; 398 | raw(08).rgb = [165,0,38;215,48,39;215,25,28;244,109,67;252,141,89;253,174,97;254,224,139;255,255,191;217,239,139;166,217,106;145,207,96;102,189,99;26,150,65;26,152,80;0,104,55]; 399 | raw(07).str = 'RdYlBu'; 400 | raw(07).typ = 'Diverging'; 401 | raw(07).rgb = [165,0,38;215,48,39;215,25,28;244,109,67;252,141,89;253,174,97;254,224,144;255,255,191;224,243,248;171,217,233;145,191,219;116,173,209;44,123,182;69,117,180;49,54,149]; 402 | raw(06).str = 'RdGy'; 403 | raw(06).typ = 'Diverging'; 404 | raw(06).rgb = [103,0,31;178,24,43;202,0,32;214,96,77;239,138,98;244,165,130;253,219,199;255,255,255;224,224,224;186,186,186;153,153,153;135,135,135;64,64,64;77,77,77;26,26,26]; 405 | raw(05).str = 'RdBu'; 406 | raw(05).typ = 'Diverging'; 407 | raw(05).rgb = [103,0,31;178,24,43;202,0,32;214,96,77;239,138,98;244,165,130;253,219,199;247,247,247;209,229,240;146,197,222;103,169,207;67,147,195;5,113,176;33,102,172;5,48,97]; 408 | raw(04).str = 'PuOr'; 409 | raw(04).typ = 'Diverging'; 410 | raw(04).rgb = [127,59,8;179,88,6;230,97,1;224,130,20;241,163,64;253,184,99;254,224,182;247,247,247;216,218,235;178,171,210;153,142,195;128,115,172;94,60,153;84,39,136;45,0,75]; 411 | raw(03).str = 'PRGn'; 412 | raw(03).typ = 'Diverging'; 413 | raw(03).rgb = [64,0,75;118,42,131;123,50,148;153,112,171;175,141,195;194,165,207;231,212,232;247,247,247;217,240,211;166,219,160;127,191,123;90,174,97;0,136,55;27,120,55;0,68,27]; 414 | raw(02).str = 'PiYG'; 415 | raw(02).typ = 'Diverging'; 416 | raw(02).rgb = [142,1,82;197,27,125;208,28,139;222,119,174;233,163,201;241,182,218;253,224,239;247,247,247;230,245,208;184,225,134;161,215,106;127,188,65;77,172,38;77,146,33;39,100,25]; 417 | raw(01).str = 'BrBG'; 418 | raw(01).typ = 'Diverging'; 419 | raw(01).rgb = [84,48,5;140,81,10;166,97,26;191,129,45;216,179,101;223,194,125;246,232,195;245,245,245;199,234,229;128,205,193;90,180,172;53,151,143;1,133,113;1,102,94;0,60,48]; 420 | % number of nodes: 421 | for k = 1:numel(raw) 422 | switch raw(k).typ 423 | case 'Diverging' 424 | raw(k).num = 11; 425 | case 'Qualitative' 426 | raw(k).num = size(raw(k).rgb,1); 427 | case 'Sequential' 428 | raw(k).num = 9; 429 | otherwise 430 | error('SC:brewermap:UnknownType','Unknown type string.') 431 | end 432 | end 433 | % 434 | end 435 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%bmColors 436 | % Code and Implementation: 437 | % Copyright (c) 2014-2020 Stephen Cobeldick 438 | % Color Values Only: 439 | % Copyright (c) 2002 Cynthia Brewer, Mark Harrower, and The Pennsylvania State University. 440 | % 441 | % Licensed under the Apache License, Version 2.0 (the "License"); 442 | % you may not use this file except in compliance with the License. 443 | % You may obtain a copy of the License at 444 | % 445 | % http://www.apache.org/licenses/LICENSE-2.0 446 | % 447 | % Unless required by applicable law or agreed to in writing, software 448 | % distributed under the License is distributed on an "AS IS" BASIS, 449 | % WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 450 | % See the License for the specific language governing permissions and limitations under the License. 451 | % 452 | % Redistribution and use in source and binary forms, with or without 453 | % modification, are permitted provided that the following conditions are met: 454 | % 455 | % 1. Redistributions as source code must retain the above copyright notice, this 456 | % list of conditions and the following disclaimer. 457 | % 458 | % 2. The end-user documentation included with the redistribution, if any, must 459 | % include the following acknowledgment: "This product includes color 460 | % specifications and designs developed by Cynthia Brewer 461 | % (http://colorbrewer.org/)." Alternately, this acknowledgment may appear in the 462 | % software itself, if and wherever such third-party acknowledgments normally appear. 463 | % 464 | % 4. The name "ColorBrewer" must not be used to endorse or promote products 465 | % derived from this software without prior written permission. For written 466 | % permission, please contact Cynthia Brewer at cbrewer@psu.edu. 467 | % 468 | % 5. Products derived from this software may not be called "ColorBrewer", nor 469 | % may "ColorBrewer" appear in their name, without prior written permission of Cynthia Brewer. 470 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%license -------------------------------------------------------------------------------- /code_for_figures/Callbacks/energyop.m: -------------------------------------------------------------------------------- 1 | function [ey,ex]=energyop(sig,gr) 2 | %% %calculates the energy operator of a signal 3 | %% %input 4 | %1. Raw signal (Vector) 5 | %2. gr (Plot or not plot) 6 | %% %Output 7 | %Energy operator signal (ey) 8 | %Teager operator (ex) 9 | %% %Method 10 | %The Teager Energy Operator is determined as 11 | %(x(t)) = (dx/dt)^2+ x(t)(d^2x/dt^2) (1.1) 12 | %in the continuous case (where x_ means the rst derivative of x, and x¨ means the second 13 | %derivative), and as 14 | %[x[n]] = x^2[n] + x[n - 1]x[n + 1] (1.2) 15 | %in the discrete case. 16 | %% Method 17 | %Note that the function is vectorized for optimum processing speed(Keep calm and vectorize) 18 | %Author : Hooman Sedghamiz 19 | %% hoose792@student.liu.se 20 | %% 21 | if nargin<2 22 | gr=0; 23 | end 24 | sig=sig(:); 25 | %% (x(t)) = (dx/dt)^2+ x(t)(d^2x/dt^2) 26 | %Operator 1 27 | y=diff(sig); 28 | y=[0;y]; 29 | squ=y(2:length(y)-1).^2; 30 | oddi=y(1:length(y)-2); 31 | eveni=y(3:length(y)); 32 | ey=squ - (oddi.*eveni); 33 | %% [x[n]] = x^2[n] - x[n - 1]x[n + 1] 34 | %operator ex 35 | squ1=sig(2:length(sig)-1).^2; 36 | oddi1=sig(1:length(sig)-2); 37 | eveni1=sig(3:length(sig)); 38 | ex=squ1 - (oddi1.*eveni1); 39 | ex = [ex(1); ex; ex(length(sig)-2)]; %make it the same length 40 | %% plots 41 | if gr 42 | figure,ax(1)=subplot(211);plot((sig/max(sig))-mean(sig/max(sig)),'b'), 43 | hold on, 44 | plot((ey/max(ey))-mean(ey/max(ey)),'Linewidth',2,'LineStyle','--','color','r'), 45 | axis tight; 46 | hleg1=legend('Original Signal','Energy Operator'); 47 | set(hleg1,'Location','NorthWest') 48 | ax(2)=subplot(212);plot((sig/max(sig))-mean(sig/max(sig)),'b'), 49 | hold on, 50 | plot((ex/max(ex))-mean(ex/max(ex)),'Linewidth',2,'LineStyle','--','color','g'), 51 | hleg2=legend('Original Signal','Teager Energy'); 52 | set(hleg2,'Location','NorthWest') 53 | axis tight, 54 | zoom on; 55 | linkaxes(ax,'x'); 56 | end -------------------------------------------------------------------------------- /code_for_figures/Callbacks/fcn_EEGpic.m: -------------------------------------------------------------------------------- 1 | function fcn_EEGpic(SEG,Ax_eeg,str) 2 | Fs=200;w=10;zScale=1/150; 3 | tc=size(SEG,2)/2; 4 | eeg=SEG(1:18,tc-w*Fs/2+1:tc+w*Fs/2); 5 | ekg=-SEG(19,tc-w*Fs/2+1:tc+w*Fs/2); 6 | 7 | tto=tc*Fs-(w/2)*Fs+1;tt1=tc*Fs+(w/2)*Fs; 8 | tt=tto:tt1; 9 | 10 | gap=NaN(1,size(eeg,2)); 11 | seg=eeg; 12 | seg_disp=[seg(1:4,:);gap;seg(5:8,:);gap;seg(9:12,:);gap;seg(13:16,:);gap;seg(17:18,:);gap;ekg]; 13 | 14 | M=size(seg_disp,1); 15 | DCoff=repmat(flipud((1:M)'),1,size(seg_disp,2)); 16 | seg_disp(seg_disp>300)=300;seg_disp(seg_disp<-300)=-300; 17 | 18 | set(0,'CurrentFigure',gcf);set(gcf,'CurrentAxes',Ax_eeg);cla(Ax_eeg) 19 | hold(Ax_eeg,'on') 20 | title(str) 21 | for iSec=1:11 22 | ta=tto+Fs*(iSec-1); 23 | line(Ax_eeg,[ta ta], [0 M+1],'linestyle','--','color',[.5 .5 .5]) 24 | end 25 | 26 | plot(Ax_eeg,tt,zScale*seg_disp(1:end-1,:)+DCoff(1:end-1,:),'k','linewidth',1); 27 | 28 | ekg_=seg_disp(end,:);ekg_=(ekg_-mean(ekg_))/(eps+std(ekg_)); 29 | plot(Ax_eeg,tt,.2*ekg_+DCoff(end,:),'r','linewidth',1); 30 | axis off 31 | set(Ax_eeg,'ylim',[0 M+1],'xlim',[tto tt1+1]) 32 | hold(Ax_eeg,'off') 33 | end 34 | -------------------------------------------------------------------------------- /code_for_figures/Callbacks/fcn_bcReject.m: -------------------------------------------------------------------------------- 1 | function bc=fcn_bcReject(features,LUT_BG,K) 2 | bc=zeros(1,18); 3 | for ch=1:18 4 | for k=1:K 5 | idx=LUT_BG{k,1}; 6 | thr=LUT_BG{k,5}; 7 | rho=LUT_BG{k,3}; 8 | x=features(ch,idx)*sign(rho); 9 | if x25)=25;P_(P_<-15)=-15; 6 | 7 | [icp,~]=findchangepts(P_,'Statistic','mean','MinThreshold',thr_cp*var(P_)); 8 | flags1=zeros(nn,1);flags1(icp)=1;flags1(1)=1;flags1(end)=-1; 9 | 10 | icp_=unique([icp,1,nn]); 11 | icp_center=floor((icp_(1:end-1)+icp_(2:end))/2); 12 | flags2=zeros(nn,1);flags2(icp_center)=1; 13 | end -------------------------------------------------------------------------------- /code_for_figures/Callbacks/fcn_fixBadChannel.m: -------------------------------------------------------------------------------- 1 | function eeg_clean=fcn_fixBadChannel(eeg,badScr,LOC) 2 | % eeg: 18 channel bipolar 3 | % badScr: [0] good [1] bad 4 | 5 | [M,N]=size(eeg); 6 | idx_good=find(badScr==0); 7 | if isempty(idx_good) 8 | eeg_clean=zeros(size(eeg)); 9 | else 10 | idx_bad=find(badScr==1); 11 | eeg_clean= eeg; 12 | eeg_clean(idx_bad,:)=NaN; 13 | while ~isempty(idx_bad) 14 | x=LOC(idx_good,1);y=LOC(idx_good,2); 15 | xq=LOC(idx_bad,1);yq=LOC(idx_bad,2); 16 | vq_1=griddata(x,y,eeg_clean(idx_good,1),xq,yq); 17 | if ~isempty(vq_1) 18 | for jj=1:N 19 | v=eeg_clean(idx_good,jj); 20 | eeg_clean(idx_bad,jj)=griddata(x,y,v,xq,yq); 21 | end 22 | end 23 | idx_bad_old=idx_bad; 24 | badScr=isnan(eeg_clean(:,1)); 25 | idx_good=find(badScr==0); 26 | idx_bad=find(badScr==1); 27 | if isempty(idx_bad) 28 | break 29 | else 30 | if length(idx_bad)==length(idx_bad_old) 31 | avg_ll=nanmean(eeg_clean(1:4,:),1); 32 | avg_rl=nanmean(eeg_clean(5:8,:),1); 33 | avg_lp=nanmean(eeg_clean(9:12,:),1); 34 | avg_rp=nanmean(eeg_clean(13:16,:),1); 35 | avg_cc=nanmean(eeg_clean(17:18,:),1); 36 | for ii=1:M 37 | if badScr(ii)==1 38 | switch ii 39 | case {1,2,3,4} 40 | eeg_ii=avg_ll; 41 | case {5,6,7,8} 42 | eeg_ii=avg_rl; 43 | case {9,10,11,12} 44 | eeg_ii=avg_lp; 45 | case {13,14,15,16} 46 | eeg_ii=avg_rp; 47 | case {17,18} 48 | eeg_ii=avg_cc; 49 | end 50 | if isnan(mean(eeg_ii)) 51 | eeg_ii=zeros(1,N); 52 | end 53 | eeg_clean(ii,:)=eeg_ii; 54 | end 55 | end 56 | break 57 | end 58 | end 59 | end 60 | end 61 | end 62 | -------------------------------------------------------------------------------- /code_for_figures/Callbacks/fcn_getCM.m: -------------------------------------------------------------------------------- 1 | function [CM,c]=fcn_getCM(yref,yquery,pp) 2 | CM=zeros(6,6);c=CM; 3 | for i=1:6 4 | patternA=pp(i); 5 | for j=1:6 6 | patternB=pp(j); 7 | n=sum(yref==patternA&yquery==patternB); 8 | d=sum(yref==patternA&~isnan(yquery)&~isnan(yref)); 9 | CM(i,j)=n/d;c(i,j)=d; 10 | end 11 | end 12 | end -------------------------------------------------------------------------------- /code_for_figures/Callbacks/fcn_getCali_human.m: -------------------------------------------------------------------------------- 1 | function cali_idx=fcn_getCali_human(Y,p,K,M) 2 | b_den=sum(~isnan(Y),2); 3 | Yr=Y(b_den>=10,:); 4 | Ne=size(Yr,2); 5 | bin_edges=(0:K)/K;n=NaN(Ne,K); 6 | for i=1:Ne 7 | yi=Yr(:,i); 8 | Yr_=Yr(:,setdiff(1:Ne,i)); 9 | b_num=sum(Yr_==p,2); 10 | b_den=sum(~isnan(Yr_),2); 11 | b=b_num./b_den; 12 | 13 | for j=1:K 14 | bin_left=bin_edges(j);bin_right= bin_edges(j+1); 15 | bin_center(j)=(bin_left+bin_right)/2; 16 | ind=find(b>bin_left&b<=bin_right&~isnan(yi)); 17 | n(i,j)=100*sum(yi(ind)==p)/length(ind); 18 | if length(ind)<10 19 | n(i,j)=nan; 20 | end 21 | end 22 | end 23 | 24 | ns=[];th=linspace(-20,20,1000); 25 | for k=1:size(n,1) 26 | yy=n(k,:);idx1=find(~isnan(yy)); 27 | yy=[0,yy(idx1),100];xx=[0,100*bin_center(idx1),100]; 28 | best=inf;best_th=0; 29 | for j=1:length(th) 30 | pr=min(xx/100,(1-0.001)); 31 | z=log((pr)./(1-pr))+th(j); 32 | yh=100./(1+exp(-z)); 33 | C=sum((yy-yh).^2); 34 | if C(m1-thresh(j1,j2)) 6 | ym(i,1)=j2; 7 | else 8 | ym(i,1)=j1; 9 | end 10 | end 11 | ym=ym-1; 12 | end 13 | -------------------------------------------------------------------------------- /code_for_figures/Callbacks/fcn_getOPs_loo.m: -------------------------------------------------------------------------------- 1 | function [SEN,FPR,PPV]=fcn_getOPs_loo(Y,p) 2 | SEN=NaN(length(p),size(Y,2));FPR=NaN(length(p),size(Y,2));PPV=NaN(length(p),size(Y,2)); 3 | for i=1:length(p) 4 | for j=1:size(Y,2) 5 | idx_others=setdiff(1:size(Y,2),j); 6 | Y_j=Y(:,idx_others); 7 | yy=mode(Y_j,2); 8 | yt_full=(yy==p(i)); 9 | 10 | temp=Y(:,j);ind=find(~isnan(temp));temp=temp(ind); 11 | yi=(temp==p(i)); 12 | yt=yt_full(ind); 13 | 14 | SEN(i,j)=100*sum(yi==1&yt==1)/sum(yt==1); 15 | FPR(i,j)=100*sum(yi==1&yt==0)/sum(yt==0); 16 | PPV(i,j)=100*sum(yi==1&yt==1)/sum(yi==1); 17 | end 18 | end 19 | PPV(isnan(PPV))=1;SEN(isnan(SEN))=1;FPR(isnan(FPR))=0; 20 | end 21 | -------------------------------------------------------------------------------- /code_for_figures/Callbacks/fcn_getZeroCrossings.m: -------------------------------------------------------------------------------- 1 | function zcc=fcn_getZeroCrossings(x) 2 | zcc=find((x(1:end-1)<=0&x(2:end)>0)|(x(1:end-1)>=0&x(2:end)<0)); 3 | end -------------------------------------------------------------------------------- /code_for_figures/Callbacks/fcn_get_mIRR_human_loo.m: -------------------------------------------------------------------------------- 1 | function [M, Mk]=fcn_get_mIRR_human_loo(Y) 2 | pp=[1:5,0];M=zeros(6,6);ct=0; 3 | for k=1:size(Y, 2) 4 | yk=Y(:,k); 5 | Y_=Y;Y_(:, k)=[]; 6 | yref=mode(Y_,2); 7 | mk=fcn_getCM(yref,yk,pp); 8 | if sum(isnan(mk(:)))==0 9 | M=M+mk;Mk{k}=mk;ct=ct+1; 10 | end 11 | end 12 | M=M/ct; 13 | end 14 | -------------------------------------------------------------------------------- /code_for_figures/Callbacks/fcn_get_mIRR_model.m: -------------------------------------------------------------------------------- 1 | function M=fcn_get_mIRR_model(Y,y_model) 2 | yref=mode(Y,2); 3 | pp=[1:5,0]; 4 | M=fcn_getCM(yref,y_model,pp); 5 | end 6 | -------------------------------------------------------------------------------- /code_for_figures/Callbacks/fcn_get_pIRR_human.m: -------------------------------------------------------------------------------- 1 | function [P,PairwiseComparisons,nij,Pij,C,med_mn_mx]=fcn_get_pIRR_human(Y) 2 | pp=[1:5,0];pairs=0;P=zeros(6,6);nij=0;expertsUsed=[]; 3 | PairwiseComparisons=zeros(size(Y,2),size(Y,2)); 4 | for i=1:size(Y,2) 5 | yi=Y(:,i); 6 | for j=1:size(Y,2) 7 | if i~=j 8 | yj=Y(:,j); 9 | [pij,c]=fcn_getCM(yi,yj,pp); 10 | mn=min(min(c),[],2); 11 | n=sum(~isnan(yi)&~isnan(yj)); 12 | if sum(isnan(pij(:)))==0&&mn>=10 13 | pairs=pairs + 1; 14 | P=P + pij; 15 | Pij{pairs}=pij; 16 | nij(pairs,1)=n; 17 | C{pairs}=c; 18 | PairwiseComparisons(i,j)=n; 19 | expertsUsed=[expertsUsed;i;j]; 20 | end 21 | end 22 | end 23 | end 24 | P=P/pairs; 25 | 26 | haveMoreThan100=zeros(size(C,2),1);forMedian=[]; 27 | for i=1:size(C,2) 28 | c=C{i}; 29 | mn(i)=min(min(c),[],2); 30 | if mn(i)>100 31 | haveMoreThan100(i)=1; 32 | forMedian=[forMedian c(:)]; 33 | end 34 | end 35 | med_mn_mx=[median(forMedian(:));min(forMedian(:));max(forMedian(:))]; 36 | end 37 | -------------------------------------------------------------------------------- /code_for_figures/Callbacks/fcn_get_pIRR_model.m: -------------------------------------------------------------------------------- 1 | function [P,PairwiseComparisons,nij,Pij,C,med_mn_mx]=fcn_get_pIRR_model(Y,y_model) 2 | pp=[1:5,0];pairs=0;P=zeros(6,6);nij=0;expertsUsed=[]; 3 | PairwiseComparisons=zeros(size(Y,2)*2,1); 4 | for i=1:size(Y,2) 5 | yi=Y(:,i); 6 | [pij,c]=fcn_getCM(yi,y_model,pp); 7 | mn=min(c(:)); 8 | n=sum(~isnan(yi)&~isnan(y_model)); 9 | if sum(isnan(pij(:)))==0&&mn>=10 10 | pairs=pairs + 1; 11 | P=P + pij; 12 | Pij{pairs}=pij; 13 | nij(pairs,1)=n; 14 | C{pairs}=c; 15 | PairwiseComparisons(i)=n; 16 | expertsUsed=[expertsUsed;i;-1]; 17 | end 18 | 19 | [pij,c]=fcn_getCM(y_model,yi,pp); 20 | mn=min(c(:)); 21 | if sum(isnan(pij(:)))==0&&mn>=10 22 | pairs=pairs + 1; 23 | P=P + pij; 24 | Pij{pairs}=pij; 25 | nij(pairs,1)=n; 26 | C{pairs}=c; 27 | PairwiseComparisons(i)=n; 28 | expertsUsed=[expertsUsed;i;-1]; 29 | end 30 | end 31 | P=P/pairs; 32 | 33 | haveMoreThan100=zeros(size(C,2),1); 34 | forMedian=[]; 35 | for i=1:size(C,2) 36 | c=C{i}; 37 | mn(i)=min(min(c),[],2); 38 | if mn(i)>100 39 | haveMoreThan100(i)=1; 40 | forMedian=[forMedian c(:)]; 41 | end 42 | end 43 | med_mn_mx=[median(forMedian(:)),min(forMedian(:)),max(forMedian(:))]; 44 | end 45 | -------------------------------------------------------------------------------- /code_for_figures/Callbacks/fcn_isBadChannel_v2.m: -------------------------------------------------------------------------------- 1 | function y=fcn_isBadChannel_v2(eeg,ekg,Fs,LUT_BG,K) 2 | % eeg: 10sec 18-ch bipolar EEG 3 | for ii=1:size(eeg,1) 4 | x=eeg(ii,:); 5 | [eeg_nleo(ii,:),eeg_tkeo(ii,:)]=energyop(x,0); 6 | end 7 | [ekg_nleo,ekg_tkeo]=energyop(ekg,0); 8 | 9 | ff1=fcn_computeFeatures_powers_complex_v2(eeg,ekg,Fs); 10 | ff2=fcn_computeFeatures_powers_complex_v2(eeg_nleo,ekg_nleo,Fs); 11 | ff3=fcn_computeFeatures_powers_complex_v2(eeg_tkeo,ekg_tkeo,Fs); 12 | 13 | ff=[ff1,ff2,ff3]; 14 | y=fcn_bcReject(ff,LUT_BG,K); 15 | end -------------------------------------------------------------------------------- /code_for_figures/Callbacks/fcn_plotConfusionMx.m: -------------------------------------------------------------------------------- 1 | function fcn_plotConfusionMx(Mx,titleStr,labels,colorStr,ax,jj,figure_idx) 2 | set(gcf,'CurrentAxes',ax);cla(ax) 3 | hold(ax,'on'); 4 | x=1:6; 5 | imagesc(ax,x,x,Mx,[0,1]) 6 | for i=0:7 7 | xx=[i,i]-0.5;yy=[-1,7]; 8 | plot(ax,xx,yy,'k',yy,xx,'k'); 9 | end 10 | for i=1:6 11 | for j=1:6 12 | n=round(Mx(i,j)*100); 13 | if n>10;xx=j-.2;else;xx=j - 0.1;end;yy=i; 14 | if n<=50;text(ax,xx,yy,num2str(n),'color','k','fontsize',20);else;text(ax,xx,yy,num2str(n),'color','w','fontsize',20);end 15 | end 16 | end 17 | axis ij;axis square;box off 18 | colormap(ax,brewermap([],colorStr)) 19 | set(ax,'TickLength',[0,0],'xtick',1:6,'ytick',1:6,'xticklabels',labels,'yticklabels',labels,'fontsize',12) 20 | if jj == 1 21 | set(ax,'xtick',1:6,'ytick',1:6,'xticklabels',[],'yticklabels',labels,'fontsize',12) 22 | end 23 | text(ax,0,0.25,figure_idx,'fontsize',25) 24 | xlim([.5 6.5]);ylim([.5 6.5]) 25 | title(titleStr); 26 | hold(ax,'off'); 27 | end 28 | -------------------------------------------------------------------------------- /code_for_figures/Callbacks/fcn_plotEEG.m: -------------------------------------------------------------------------------- 1 | function fcn_plotEEG(f,Ax_EEG,seg,montage,labelshow,str_event) 2 | eeg=seg(1:19,:);ekg=seg(20,:); 3 | w=10;Fs=200;zScale=1/150; 4 | channel_withspace_bipolar ={'Fp1-F7','F7-T3','T3-T5','T5-O1','','Fp2-F8','F8-T4','T4-T6','T6-O2','','Fp1-F3','F3-C3','C3-P3','P3-O1','','Fp2-F4','F4-C4','C4-P4','P4-O2','','Fz-Cz' 'Cz-Pz','','EKG'}; 5 | tto=1;tt1=w*Fs;tt=tto:tt1;gap=NaN(1,size(eeg,2)); 6 | switch montage 7 | case 'L-Bipolar' 8 | seg=fcn_bipolar(eeg); 9 | seg_disp=[seg(1:4,:);gap;seg(5:8,:);gap;seg(9:12,:);gap;seg(13:16,:);gap;seg(17:18,:);gap;ekg]; 10 | channel_withspace=channel_withspace_bipolar; 11 | case 'Average' 12 | seg=eeg-repmat(mean(eeg,1),size(eeg,1),1); 13 | seg_disp=[seg(1:8,:);gap;seg(9:11,:);gap;seg(12:19,:);gap;ekg]; 14 | channel_withspace=channel_withspace_average; 15 | case 'Monopolar' 16 | seg= eeg; 17 | seg_disp=[seg(1:8,:);gap;seg(9:11,:);gap;seg(12:19,:);gap;ekg]; 18 | channel_withspace=channel_withspace_monopolar; 19 | end 20 | M=size(seg_disp,1);DCoff=repmat(flipud((1:M)'),1,size(seg_disp,2)); 21 | 22 | set(f,'CurrentAxes',Ax_EEG);cla(Ax_EEG) 23 | hold(Ax_EEG,'on') 24 | for iSec=1:round((tt1-tto+1)/Fs)+1 25 | ta=tto+Fs*(iSec-1); 26 | line([ta ta], [0 M+1],'linestyle','--','color',[.5 .5 .5]) 27 | end 28 | plot(Ax_EEG,tt,zScale*seg_disp(1:end-1,:)+DCoff(1:end-1,:),'k'); 29 | ekg_=seg_disp(end,:);ekg_=(ekg_-mean(ekg_))/(eps+std(ekg_)); 30 | plot(Ax_EEG,tt,.2*ekg_+DCoff(end,:),'r'); 31 | set(Ax_EEG,'box','off','ylim',[0 M+1],'xlim',[tto tt1+1],'xtick',round(tt(1):2*Fs:tt(end)),'xticklabel',[]); 32 | if labelshow 33 | for iCh=1:length(channel_withspace) 34 | ta=DCoff(iCh); 35 | text(Ax_EEG,tt(1)-Fs/20,ta,channel_withspace(iCh),'fontsize',7,'HorizontalAlignment','right','VerticalAlignment','middle') 36 | end 37 | end 38 | text(Ax_EEG,tt(1),0,[str_event,'-00:00:05'],'fontsize',10,'HorizontalAlignment','left','VerticalAlignment','top') 39 | text(Ax_EEG,(tt(1)+tt(end))/2,0,str_event,'fontsize',10,'HorizontalAlignment','left','VerticalAlignment','top') 40 | text(Ax_EEG,tt(end),0,[str_event,'+00:00:05'],'fontsize',10,'HorizontalAlignment','right','VerticalAlignment','top') 41 | axis off 42 | hold(Ax_EEG,'off') 43 | end 44 | -------------------------------------------------------------------------------- /code_for_figures/Callbacks/fcn_plotPR.m: -------------------------------------------------------------------------------- 1 | function fcn_plotPR(sp,ax,c,op_sen,op_ppv,sen_M,ppv_M,auprc_M,sen_L,ppv_L,auprc_L,sen_U,ppv_U,auprc_U,EUPRC_T,EUPRC_L,EUPRC_U) 2 | labels={'Seizure','LPD','GPD','LRDA','GRDA','Other'}; 3 | set(gcf,'CurrentAxes',ax{sp});cla(ax{sp}) 4 | hold(ax{sp},'on'); 5 | xx=[sen_L,fliplr(sen_U)];yy=[ppv_L,fliplr(ppv_U)]; 6 | patch(ax{sp},xx,yy,'b','facealpha',.3,'edgecolor','none','facecolor',c); 7 | plot(ax{sp},sen_M,ppv_M,'-','color',c,'linewidth',2); 8 | 9 | scatter(ax{sp},op_sen,op_ppv,50,[0 0 0],'filled','marker','v'); 10 | idx=[]; 11 | for kk=1:length(op_sen) 12 | op_sen_kk=op_sen(kk);op_ppv_kk=op_ppv(kk); 13 | [~,ii]=min(abs(op_sen_kk-sen_M)); 14 | if op_ppv_kk<=ppv_M(ii) 15 | idx=[idx;kk]; 16 | end 17 | end 18 | scatter(ax{sp},op_sen(idx),op_ppv(idx),50,[0.5,0.5,0.5],'filled','marker','v'); 19 | if sp==1 20 | text(ax{sp},20,80,['AUPRC: ',num2str(round(auprc_M*1000)/10),' (',num2str(round(auprc_L*1000)/10),', ',num2str(round(auprc_U*1000)/10),')%','\newlineEUPRC: ',num2str(round(EUPRC_T*1000)/10),' (',num2str(round(EUPRC_L*1000)/10),', ',num2str(round(EUPRC_U*1000)/10),')%'],'fontsize',12); 21 | else 22 | text(ax{sp},20,20,['AUPRC: ',num2str(round(auprc_M*1000)/10),' (',num2str(round(auprc_L*1000)/10),', ',num2str(round(auprc_U*1000)/10),')%','\newlineEUPRC: ',num2str(round(EUPRC_T*1000)/10),' (',num2str(round(EUPRC_L*1000)/10),', ',num2str(round(EUPRC_U*1000)/10),')%'],'fontsize',12); 23 | end 24 | set(ax{sp},'xtick',0:20:100,'ytick',0:20:100,'xlim',[0,100],'ylim',[0,100],'fontsize',12); 25 | xlabel('TPR'); ylabel('PPV'); axis square; grid on; box off 26 | title(labels{sp},'fontsize',20); 27 | hold(ax{sp},'off'); 28 | end 29 | 30 | 31 | -------------------------------------------------------------------------------- /code_for_figures/Callbacks/fcn_plotROC.m: -------------------------------------------------------------------------------- 1 | function fcn_plotROC(sp,ax,cc,op_fpr,op_sen,fpr_M,sen_M,auroc_M,fpr_L,sen_L,auroc_L,fpr_U,sen_U,auroc_U,EUROC_T,EUROC_L,EUROC_U) 2 | patterns={'Seizure','LPD','GPD','LRDA','GRDA','Other'}; 3 | set(gcf,'CurrentAxes',ax{sp});cla(ax{sp}) 4 | hold(ax{sp},'on'); 5 | xx=[fpr_L,fliplr(fpr_U)];yy=[sen_L,fliplr(sen_U)]; 6 | patch(ax{sp},xx,yy,'b', 'facealpha',.3,'edgecolor', 'none', 'facecolor',cc); 7 | plot(ax{sp},fpr_M,sen_M,'-', 'color',cc,'linewidth',2); 8 | scatter(ax{sp},op_fpr,op_sen,50,[0,0,0],'filled');idx=[]; 9 | for kk=1:length(op_fpr) 10 | op_sen_kk=op_sen(kk);op_fpr_kk=op_fpr(kk); 11 | [~,ii]=min(abs(op_fpr_kk-fpr_M)); 12 | if op_sen_kk<=sen_M(ii) 13 | idx=[idx;kk]; 14 | end 15 | end 16 | scatter(ax{sp},op_fpr(idx),op_sen(idx),50,[0.5 0.5 0.5],'filled'); 17 | text(ax{sp},20,20,['AUROC: ',num2str(round(auroc_M*1000)/10),' (',num2str(round(auroc_L*1000)/10),', ',num2str(round(auroc_U*1000)/10),')%', '\newlineEUROC: ',num2str(round(EUROC_T*1000)/10),' (',num2str(round(EUROC_L*1000)/10),', ',num2str(round(EUROC_U*1000)/10),')%'],'fontsize',12); 18 | set(ax{sp},'xtick',0:20:100,'ytick',0:20:100,'xlim',[0,100],'ylim',[0,100],'fontsize',12); 19 | xlabel('FPR');ylabel('TPR');axis square;grid on;box off; 20 | title(patterns{sp},'fontsize',20); 21 | hold(ax{sp},'off'); 22 | end 23 | 24 | -------------------------------------------------------------------------------- /code_for_figures/Data/Figure1/figure1_input.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Figure1/figure1_input.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Figure3/figure3_input.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Figure3/figure3_input.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Figure3/samples.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Figure3/samples.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Figure4to6/samples/GPD_sample1.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Figure4to6/samples/GPD_sample1.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Figure4to6/samples/GPD_sample2.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Figure4to6/samples/GPD_sample2.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Figure4to6/samples/GPD_sample3.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Figure4to6/samples/GPD_sample3.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Figure4to6/samples/GPD_sample4.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Figure4to6/samples/GPD_sample4.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Figure4to6/samples/GRDA_sample1.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Figure4to6/samples/GRDA_sample1.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Figure4to6/samples/GRDA_sample2.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Figure4to6/samples/GRDA_sample2.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Figure4to6/samples/GRDA_sample3.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Figure4to6/samples/GRDA_sample3.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Figure4to6/samples/GRDA_sample4.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Figure4to6/samples/GRDA_sample4.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Figure4to6/samples/LPD_sample1.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Figure4to6/samples/LPD_sample1.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Figure4to6/samples/LPD_sample2.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Figure4to6/samples/LPD_sample2.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Figure4to6/samples/LPD_sample3.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Figure4to6/samples/LPD_sample3.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Figure4to6/samples/LPD_sample4.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Figure4to6/samples/LPD_sample4.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Figure4to6/samples/LRDA_sample1.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Figure4to6/samples/LRDA_sample1.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Figure4to6/samples/LRDA_sample2.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Figure4to6/samples/LRDA_sample2.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Figure4to6/samples/LRDA_sample3.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Figure4to6/samples/LRDA_sample3.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Figure4to6/samples/LRDA_sample4.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Figure4to6/samples/LRDA_sample4.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Figure4to6/samples/Other_sample1.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Figure4to6/samples/Other_sample1.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Figure4to6/samples/Other_sample2.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Figure4to6/samples/Other_sample2.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Figure4to6/samples/Other_sample3.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Figure4to6/samples/Other_sample3.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Figure4to6/samples/Other_sample4.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Figure4to6/samples/Other_sample4.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Figure4to6/samples/SZ_sample1.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Figure4to6/samples/SZ_sample1.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Figure4to6/samples/SZ_sample2.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Figure4to6/samples/SZ_sample2.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Figure4to6/samples/SZ_sample3.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Figure4to6/samples/SZ_sample3.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Figure4to6/samples/SZ_sample4.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Figure4to6/samples/SZ_sample4.mat -------------------------------------------------------------------------------- /code_for_figures/Data/FigureS2/dataset1.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/FigureS2/dataset1.mat -------------------------------------------------------------------------------- /code_for_figures/Data/FigureS2/dataset2.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/FigureS2/dataset2.mat -------------------------------------------------------------------------------- /code_for_figures/Data/FigureS2/dataset3.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/FigureS2/dataset3.mat -------------------------------------------------------------------------------- /code_for_figures/Data/FigureS2/dataset4.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/FigureS2/dataset4.mat -------------------------------------------------------------------------------- /code_for_figures/Data/FigureS2/datasetA.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/FigureS2/datasetA.mat -------------------------------------------------------------------------------- /code_for_figures/Data/FigureS2/datasetB.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/FigureS2/datasetB.mat -------------------------------------------------------------------------------- /code_for_figures/Data/FigureS2/datasetC.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/FigureS2/datasetC.mat -------------------------------------------------------------------------------- /code_for_figures/Data/FigureS2/datasetD.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/FigureS2/datasetD.mat -------------------------------------------------------------------------------- /code_for_figures/Data/FigureS3/FigureS3_input.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/FigureS3/FigureS3_input.mat -------------------------------------------------------------------------------- /code_for_figures/Data/FigureS5/FigureS5_input.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/FigureS5/FigureS5_input.mat -------------------------------------------------------------------------------- /code_for_figures/Data/FigureS8/FigureS8_input.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/FigureS8/FigureS8_input.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Table1/dataset1.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Table1/dataset1.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Table1/dataset2.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Table1/dataset2.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Table1/dataset3.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Table1/dataset3.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Table1/dataset4.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Table1/dataset4.mat -------------------------------------------------------------------------------- /code_for_figures/Data/Table1/patient_demo.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Data/Table1/patient_demo.mat -------------------------------------------------------------------------------- /code_for_figures/Fig1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Fig1.png -------------------------------------------------------------------------------- /code_for_figures/Fig2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Fig2.png -------------------------------------------------------------------------------- /code_for_figures/Fig3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Fig3.png -------------------------------------------------------------------------------- /code_for_figures/Fig4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Fig4.png -------------------------------------------------------------------------------- /code_for_figures/Fig5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Fig5.png -------------------------------------------------------------------------------- /code_for_figures/Fig6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/Fig6.png -------------------------------------------------------------------------------- /code_for_figures/FigS3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/FigS3.png -------------------------------------------------------------------------------- /code_for_figures/FigS5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/FigS5.png -------------------------------------------------------------------------------- /code_for_figures/FigS8.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/bdsp-core/IIIC-SPaRCNet/09081b1698dce8e15b7944283079a38e4bf38dd5/code_for_figures/FigS8.png -------------------------------------------------------------------------------- /code_for_figures/Figure1_ROC.m: -------------------------------------------------------------------------------- 1 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 2 | % Figure 1: Evaluation of model performance relative to experts: ROC curves 3 | % plot the true human operating points 4 | % report the EUROC using data with K=1E4 bootstrap 5 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 6 | clc;close all;clear; 7 | 8 | addpath('./Callbacks/'); 9 | load('./Data/Figure1/figure1_input.mat') 10 | 11 | figure('units','normalized','position',[0.0615,0.0889,0.7703,0.8204],'MenuBar','none','ToolBar','none','color','w'); 12 | ax={subplot('position',[.050,.564,.250,.350]);subplot('position',[.370,.564,.250,.350]);subplot('position',[.690,.564,.250,.350]);subplot('position',[.050,.080,.250,.350]);subplot('position',[.370,.080,.250,.350]);subplot('position',[.690,.080,.250,.350])}; 13 | 14 | p=[1:5,0];nClass=length(p); 15 | cc=[0.635,0.078,0.184;0.850,0.325,0.098;0.929,0.694,0.125;0.466,0.674,0.188;0.301,0.745,0.933;0.000,0.447,0.741]; 16 | 17 | for i=1:nClass 18 | fcn_plotROC(i,ax,cc(i,:),op_fpr_T(i,:),op_sen_T(i,:),fpr_Median(i,:),sen_Median(i,:),auroc_Median(i),fpr_L(i,:),sen_L(i,:),auroc_L(i),fpr_U(i,:),sen_U(i,:),auroc_U(i),EUROC_Median(i),EUROC_L(i),EUROC_U(i)); 19 | end 20 | print(gcf,'-r300','-dpng', 'Fig1.png'); 21 | -------------------------------------------------------------------------------- /code_for_figures/Figure2_PR.m: -------------------------------------------------------------------------------- 1 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 2 | % Figure 2: Evaluation of model performance relative to experts: PR curves 3 | % plot the true human operating points 4 | % report the EUPRC using data with K=1E4 bootstrap 5 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 6 | clc;close all;clear; 7 | 8 | addpath('./Callbacks/'); 9 | load('./Data/Figure1/figure1_input.mat') 10 | 11 | figure('units','normalized','position',[0.0615,0.0889,0.7703,0.8204],'MenuBar','none','ToolBar','none','color','w'); 12 | ax={subplot('position',[.050,.564,.250,.350]);subplot('position',[.370,.564,.250,.350]);subplot('position',[.690,.564,.250,.350]);subplot('position',[.050,.080,.250,.350]);subplot('position',[.370,.080,.250,.350]);subplot('position',[.690,.080,.250,.350])}; 13 | 14 | p=[1:5,0];nClass=length(p); 15 | cc=[0.635,0.078,0.184;0.850,0.325,0.098;0.929,0.694,0.125;0.466,0.674,0.188;0.301,0.745,0.933;0.000,0.447,0.741]; 16 | 17 | for i=1:nClass 18 | fcn_plotPR(i,ax,cc(i,:),op_sen_T(i,:),op_ppv_T(i,:),sen_Median(i,:),ppv_Median(i,:),auprc_Median(i),sen_L(i,:),ppv_L(i,:),auprc_L(i),sen_U(i,:),ppv_U(i,:),auprc_U(i),EUPRC_Median(i),EUPRC_L(i),EUPRC_U(i)); 19 | end 20 | print(gcf,'-r300','-dpng','Fig2.png'); 21 | -------------------------------------------------------------------------------- /code_for_figures/Figure3_UMAPs.m: -------------------------------------------------------------------------------- 1 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 2 | % Figure 3: Maps of the Ictal-Interictal-Injury Continuum Learned by SPaRCNet 3 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 4 | clc;close all;clear; 5 | 6 | %% set figure 7 | f=figure('units','normalized','position',[0.1000,0.0315,0.4099,0.9333],'MenuBar','none','ToolBar','none','color','w'); 8 | ax={subplot('position',[.00,.75,.50,.25]);subplot('position',[.50,.75,.50,.25]);subplot('position',[.00,.00,.50,.25]);subplot('position',[.50,.00,.50,.25]);subplot('position',[.00,.25,1.00,.50])}; 9 | 10 | tmp=load('./Data/Figure3/figure3_input.mat'); 11 | Y=tmp.Y;Y_hat=tmp.Y_hat;Vxy=tmp.Vxy; 12 | 13 | %% human 14 | xlimts=[-10,25];ylimts=[-18,25]; 15 | patterns={'Seizure','LPD','GPD','LRDA','GRDA','Other'}; 16 | 17 | [~,y]=max(Y(:,[2:size(Y,2),1]),[],2); 18 | Cs=flipud(jet(7));colors=NaN(length(y),3); 19 | for k=1:length(patterns) 20 | idx_hat=find(y==k); 21 | colors(idx_hat,:)=repmat(Cs(k,:),length(idx_hat),1); 22 | end 23 | set(gcf,'CurrentAxes',ax{1});cla(ax{1}) 24 | hold(ax{1},'on'); 25 | ss=scatter(ax{1},Vxy(:,1),Vxy(:,2),20,colors,'filled'); 26 | alpha(ss,.2);axis equal;axis off; 27 | for i=1:6 28 | y1=12-(i-1)*2;y2=13-(i-1)*2; 29 | fill(ax{1},[10,10,12,12]+8,[y1,y2,y2,y1]+7,Cs(i,:),'edgecolor',Cs(i,:)) 30 | text(ax{1},12.2+8,(y1+y2)/2+7.3,patterns{i},'fontsize',8,'verticalalignment','middle') 31 | end 32 | text(ax{1},mean(xlimts),23.5,'Human','fontsize',15,'horizontalalignment','center') 33 | set(ax{1},'xlim',xlimts,'ylim',ylimts) 34 | hold(ax{1},'off'); 35 | 36 | %% SZ burden 37 | yp_sz=Y_hat(:,2);yp_sz=(yp_sz-min(yp_sz))/(max(yp_sz)-min(yp_sz)+eps); 38 | K=20;A=1.4;M=100; 39 | Cs=flipud(hot(round(K*A)));Cs=Cs(round(K*(A-1))+1:end,:);colors_sz=NaN(length(yp_sz),3); 40 | for k=1:K 41 | y1=(k-1)*0.05;y2=(k)*0.05; 42 | if k=y1&yp_sz=y1&yp_sz<=y2); 47 | colors_sz(idx_sz,:)=repmat(Cs(k,:),length(idx_sz),1); 48 | end 49 | end 50 | set(gcf,'CurrentAxes',ax{3});cla(ax{3}) 51 | hold(ax{3},'on'); 52 | ss=scatter(ax{3},Vxy(:,1),Vxy(:,2),20,colors_sz,'filled'); 53 | alpha(ss,.2);axis equal;axis off; 54 | 55 | cs=flipud(hot(M));cs=cs(round((1-1/A)*M):M,:);cs=reshape(cs,size(cs,1),1,3);dd=max(Vxy(:,2))-size(cs,1)/6; 56 | image(ax{3},20:20.8,(1:size(cs,1))/6+dd,cs) 57 | text(ax{3},21,1/6+dd,'min','fontsize',8,'verticalalignment','top','horizontalalignment','left'); 58 | text(ax{3},21,size(cs,1)/6+dd,'max','fontsize',8,'verticalalignment','bottom','horizontalalignment','left'); 59 | text(ax{3},mean(xlimts),23.5,'SZ burden','fontsize',15,'horizontalalignment','center') 60 | set(ax{3},'xlim',xlimts,'ylim',ylimts) 61 | hold(ax{3},'off'); 62 | 63 | %% IIIC burden 64 | yp_iiic=sum(Y_hat(:,2:5),2);yp_iiic=(yp_iiic-min(yp_iiic))/(max(yp_iiic)-min(yp_iiic)+eps); 65 | Cs=flipud(hot(round(K*A)));Cs=Cs(round(K*(A-1))+1:end,:);colors_iiic=NaN(length(yp_iiic),3); 66 | for k=1:K 67 | y1=(k-1)*0.05;y2=(k)*0.05; 68 | if k=y1&yp_iiic=y1&yp_iiic<=y2); 73 | colors_iiic(idx_iiic,:)=repmat(Cs(k,:),length(idx_iiic),1); 74 | end 75 | end 76 | set(gcf,'CurrentAxes',ax{4});cla(ax{4}) 77 | hold(ax{4},'on'); 78 | ss=scatter(ax{4},Vxy(:,1),Vxy(:,2),20,colors_iiic,'filled'); 79 | alpha(ss,.2);axis equal;axis off; 80 | cs=flipud(hot(M));cs=cs(round((1-1/A)*M):M,:);cs=reshape(cs,size(cs,1),1,3);dd=max(Vxy(:,2))-size(cs,1)/6; 81 | image(ax{4},20:20.8,(1:size(cs,1))/6+dd,cs) 82 | text(ax{4},21,1/6+dd,'min','fontsize',8,'verticalalignment','top','horizontalalignment','left'); 83 | text(ax{4},21,size(cs,1)/6+dd,'max','fontsize',8,'verticalalignment','bottom','horizontalalignment','left'); 84 | text(ax{4},mean(xlimts),23.5,'IIIC burden','fontsize',15,'horizontalalignment','center') 85 | set(ax{4},'xlim',xlimts,'ylim',ylimts) 86 | hold(ax{4},'off'); 87 | 88 | %% Model uncertainty (entropy) 89 | en_model=-sum(Y_hat.*log2(Y_hat),2); 90 | yp=(en_model-min(en_model))/(max(en_model)-min(en_model)+eps); 91 | Cs=flipud(hot(round(K*A)));Cs=Cs(round(K*(A-1))+1:end,:); 92 | colors_conf=NaN(length(yp),3); 93 | for k=1:K 94 | y1=(k-1)*0.05;y2=(k)*0.05; 95 | if k=y1&yp=y1&yp<=y2); 100 | colors_conf(idx_conf,:)=repmat(Cs(k,:),length(idx_conf),1); 101 | end 102 | end 103 | set(gcf,'CurrentAxes',ax{2});cla(ax{2}) 104 | hold(ax{2},'on'); 105 | ss=scatter(ax{2},Vxy(:,1),Vxy(:,2),20,colors_conf,'filled'); 106 | alpha(ss,.2);axis equal;axis off; 107 | cs=flipud(hot(M));cs=cs(round((1-1/A)*M):M,:);cs=reshape(cs,size(cs,1),1,3);dd=max(Vxy(:,2))-size(cs,1)/6; 108 | image(ax{2},20:20.8,(1:size(cs,1))/6+dd,cs) 109 | text(ax{2},21,1/6+dd,'min','fontsize',8,'verticalalignment','top','horizontalalignment','left'); 110 | text(ax{2},21,size(cs,1)/6+dd,'max','fontsize',8,'verticalalignment','bottom','horizontalalignment','left'); 111 | text(ax{2},mean(xlimts),23.5,'Uncertainty','fontsize',15,'horizontalalignment','center'); 112 | set(ax{2},'xlim',xlimts,'ylim',ylimts) 113 | hold(ax{2},'off'); 114 | 115 | %% Model prediciton 116 | [~,y_hat]=max(Y_hat(:,[2:6,1]),[],2); 117 | Cs=flipud(jet(7));colors_hat=NaN(length(y_hat),3); 118 | for k=1:6 119 | idx_hat=find(y_hat==k); 120 | colors_hat(idx_hat,:)=repmat(Cs(k,:),length(idx_hat),1); 121 | end 122 | set(gcf,'CurrentAxes',ax{5});cla(ax{5}) 123 | hold(ax{5},'on'); 124 | ss=scatter(ax{5},Vxy(:,1),Vxy(:,2),20,colors_hat,'filled'); 125 | alpha(ss,.1);axis equal;axis off; 126 | for i=1:6 127 | y1=12-(i-1)*1.5;y2=13-(i-1)*1.5; 128 | fill(ax{5},[10,10,12,12]+8,[y1,y2,y2,y1]+7,Cs(i,:),'edgecolor',Cs(i,:)) 129 | text(ax{5},12.2+8,(y1+y2)/2+7,patterns{i},'fontsize',10) 130 | end 131 | text(ax{5},mean(xlimts),22.5,'Model','fontsize',18,'horizontalalignment','center') 132 | set(ax{5},'xlim',xlimts,'ylim',[-18,23]) 133 | hold(ax{5},'off'); 134 | 135 | %% EEG samples on the map 136 | tmp=load('./Data/Figure3/samples.mat'); 137 | LUT=tmp.LUT; 138 | hold(ax{5},'on'); 139 | for i=1:size(LUT,1) 140 | lut=LUT{i};vxy=cell2mat(lut(:,[4,5])); 141 | plot(ax{5},vxy(:,1),vxy(:,2),'ko',vxy(:,1),vxy(:,2),'kx','markersize',8) 142 | for ii=1:size(lut,1) 143 | text(ax{5},vxy(ii,1)+.6,vxy(ii,2)-.6,[patterns{i},'_',num2str(ii)],'horizontalalignment','left') 144 | end 145 | end 146 | hold(ax{5},'off'); 147 | 148 | print(gcf,'-r300','-dpng','./Fig3.png'); 149 | -------------------------------------------------------------------------------- /code_for_figures/Figure4_samples_SZ_LPD.m: -------------------------------------------------------------------------------- 1 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 2 | % Figure 4: Examples of Smooth Pattern Transition for SZ (A) and LPD (B) 3 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 4 | clc;close all;clear; 5 | 6 | addpath('./Callbacks/'); 7 | dataDir='./Data/Figure4to6/samples/'; 8 | tmp=load('./Data/Figure3/samples.mat'); 9 | LUT=tmp.LUT; 10 | Fs=200;[B1,A1]=butter(3,[.5,40]/Fs);[B2,A2]=butter(3,[55,65]/Fs,'stop'); 11 | 12 | %% set figure 13 | f=figure('units','normalized','position',[0.1000,0.0380,0.4667,0.9333],'MenuBar','none','ToolBar','none','color','w'); 14 | ax={subplot('position',[.05,.74,.43,.20]);subplot('position',[.05,.50,.43,.20]);subplot('position',[.05,.26,.43,.20]);subplot('position',[.05,.02,.43,.20]);subplot('position',[.53,.74,.43,.20]);subplot('position',[.53,.50,.43,.20]);subplot('position',[.53,.26,.43,.20]);subplot('position',[.53,.02,.43,.20])}; 15 | 16 | % A. SZ 17 | lut=LUT{1}; 18 | for i=1:size(lut,1) 19 | fileName=lut{i,1};tmp=load([dataDir,fileName]); 20 | seg=tmp.seg;seg=filtfilt(B1,A1,seg')';seg=filtfilt(B2,A2,seg')'; 21 | eeg=seg(1:19,(20*Fs+1):30*Fs);ekg=seg(20,(20*Fs+1):30*Fs); 22 | if isnan(var(ekg))||var(ekg)==0 23 | ekg=mean(eeg,1); 24 | end 25 | eeg_clean=fcn_cleaningPipeline(eeg,ekg); 26 | SEG=[eeg_clean;ekg]; 27 | yp=num2str(round(100*lut{i,3}(2))/100); 28 | if length(yp)==1 29 | yp=[yp,'.00']; 30 | else 31 | yp=[yp,repmat('0',1,4-length(yp))]; 32 | end 33 | fcn_EEGpic(SEG,ax{i},['Seizure_',num2str(i),' ',yp]) 34 | end 35 | 36 | hold(ax{1},'on'); 37 | text(ax{1},198900,28,'A','fontsize',25) 38 | hold(ax{1},'off'); 39 | 40 | %% B. LPD 41 | lut=LUT{2}; 42 | for i=1:size(lut,1) 43 | fileName=lut{i,1};tmp=load([dataDir,fileName]); 44 | seg=tmp.seg; 45 | seg=filtfilt(B1,A1,seg')';seg=filtfilt(B2,A2,seg')'; 46 | eeg=seg(1:19,(20*Fs+1):30*Fs);ekg=seg(20,(20*Fs+1):30*Fs); 47 | if isnan(var(ekg))||var(ekg)==0 48 | ekg=mean(eeg,1); 49 | end 50 | eeg_clean=fcn_cleaningPipeline(eeg,ekg); 51 | SEG=[eeg_clean;ekg]; 52 | yp=num2str(round(100*lut{i,3}(3))/100); 53 | if length(yp)==1 54 | yp=[yp,'.00']; 55 | else 56 | yp=[yp,repmat('0',1,4-length(yp))]; 57 | end 58 | fcn_EEGpic(SEG,ax{4+i},['LPD_',num2str(i),' ',yp]) 59 | end 60 | hold(ax{5},'on'); 61 | text(ax{5},198900,28,'B','fontsize',25) 62 | hold(ax{5},'off'); 63 | 64 | %% 65 | print(gcf,'-r300','-dpng','./Fig4.png'); 66 | -------------------------------------------------------------------------------- /code_for_figures/Figure5_samples_GPD_LRDA.m: -------------------------------------------------------------------------------- 1 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 2 | % Figure 5: Examples of Smooth Pattern Transition for GPD (A) and LRDA (B) 3 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 4 | clc;close all;clear; 5 | 6 | addpath('./Callbacks/'); 7 | dataDir='./Data/Figure4to6/samples/'; 8 | tmp=load('./Data/Figure3/samples.mat'); 9 | LUT=tmp.LUT; 10 | Fs=200;[B1,A1]=butter(3,[.5,40]/Fs);[B2,A2]=butter(3,[55,65]/Fs,'stop'); 11 | 12 | %% set figure 13 | f=figure('units','normalized','position',[0.1000,0.0380,0.4667,0.9333],'MenuBar','none','ToolBar','none','color','w'); 14 | ax={subplot('position',[.05,.74,.43,.20]);subplot('position',[.05,.50,.43,.20]);subplot('position',[.05,.26,.43,.20]);subplot('position',[.05,.02,.43,.20]);subplot('position',[.53,.74,.43,.20]);subplot('position',[.53,.50,.43,.20]);subplot('position',[.53,.26,.43,.20]);subplot('position',[.53,.02,.43,.20])}; 15 | 16 | % A. GPD 17 | lut=LUT{3}; 18 | for i=1:size(lut,1) 19 | fileName=lut{i,1};tmp=load([dataDir,fileName]); 20 | seg=tmp.seg;seg=filtfilt(B1,A1,seg')';seg=filtfilt(B2,A2,seg')'; 21 | eeg=seg(1:19,(20*Fs+1):30*Fs);ekg=seg(20,(20*Fs+1):30*Fs); 22 | if isnan(var(ekg))||var(ekg)==0 23 | ekg=mean(eeg,1); 24 | end 25 | eeg_clean=fcn_cleaningPipeline(eeg,ekg); 26 | SEG=[eeg_clean;ekg]; 27 | yp=num2str(round(100*lut{i,3}(4))/100); 28 | if length(yp)==1 29 | yp=[yp,'.00']; 30 | else 31 | yp=[yp,repmat('0',1,4-length(yp))]; 32 | end 33 | fcn_EEGpic(SEG,ax{i},['GPD_',num2str(i),' ',yp]) 34 | end 35 | 36 | hold(ax{1},'on'); 37 | text(ax{1},198900,28,'A','fontsize',25) 38 | hold(ax{1},'off'); 39 | 40 | %% B. LRDA 41 | lut=LUT{4}; 42 | for i=1:size(lut,1) 43 | fileName=lut{i,1};tmp=load([dataDir,fileName]); 44 | seg=tmp.seg; 45 | seg=filtfilt(B1,A1,seg')';seg=filtfilt(B2,A2,seg')'; 46 | eeg=seg(1:19,(20*Fs+1):30*Fs);ekg=seg(20,(20*Fs+1):30*Fs); 47 | if isnan(var(ekg))||var(ekg)==0 48 | ekg=mean(eeg,1); 49 | end 50 | eeg_clean=fcn_cleaningPipeline(eeg,ekg); 51 | SEG=[eeg_clean;ekg]; 52 | yp=num2str(round(100*lut{i,3}(5))/100); 53 | if length(yp)==1 54 | yp=[yp,'.00']; 55 | else 56 | yp=[yp,repmat('0',1,4-length(yp))]; 57 | end 58 | fcn_EEGpic(SEG,ax{4+i},['LRDA_',num2str(i),' ',yp]) 59 | end 60 | hold(ax{5},'on'); 61 | text(ax{5},198900,28,'B','fontsize',25) 62 | hold(ax{5},'off'); 63 | 64 | %% 65 | print(gcf,'-r300','-dpng','./Fig5.png'); 66 | -------------------------------------------------------------------------------- /code_for_figures/Figure6_samples_GRDA_Other.m: -------------------------------------------------------------------------------- 1 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 2 | % Figure 6: Examples of Smooth Pattern Transition for GRDA (A) and Other (B) 3 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 4 | clc;close all;clear; 5 | 6 | addpath('./Callbacks/'); 7 | dataDir='./Data/Figure4to6/samples/'; 8 | tmp=load('./Data/Figure3/samples.mat'); 9 | LUT=tmp.LUT; 10 | Fs=200;[B1,A1]=butter(3,[.5,40]/Fs);[B2,A2]=butter(3,[55,65]/Fs,'stop'); 11 | 12 | %% set figure 13 | f=figure('units','normalized','position',[0.1000,0.0380,0.4667,0.9333],'MenuBar','none','ToolBar','none','color','w'); 14 | ax={subplot('position',[.05,.74,.43,.20]);subplot('position',[.05,.50,.43,.20]);subplot('position',[.05,.26,.43,.20]);subplot('position',[.05,.02,.43,.20]);subplot('position',[.53,.74,.43,.20]);subplot('position',[.53,.50,.43,.20]);subplot('position',[.53,.26,.43,.20]);subplot('position',[.53,.02,.43,.20])}; 15 | 16 | % A. GRDA 17 | lut=LUT{5}; 18 | for i=1:size(lut,1) 19 | fileName=lut{i,1};tmp=load([dataDir,fileName]); 20 | seg=tmp.seg;seg=filtfilt(B1,A1,seg')';seg=filtfilt(B2,A2,seg')'; 21 | eeg=seg(1:19,(20*Fs+1):30*Fs);ekg=seg(20,(20*Fs+1):30*Fs); 22 | if isnan(var(ekg))||var(ekg)==0 23 | ekg=mean(eeg,1); 24 | end 25 | eeg_clean=fcn_cleaningPipeline(eeg,ekg); 26 | SEG=[eeg_clean;ekg]; 27 | yp=num2str(round(100*lut{i,3}(6))/100); 28 | if length(yp)==1 29 | yp=[yp,'.00']; 30 | else 31 | yp=[yp,repmat('0',1,4-length(yp))]; 32 | end 33 | fcn_EEGpic(SEG,ax{i},['GRDA_',num2str(i),' ',yp]) 34 | end 35 | 36 | hold(ax{1},'on'); 37 | text(ax{1},198900,28,'A','fontsize',25) 38 | hold(ax{1},'off'); 39 | 40 | %% B. Other 41 | lut=LUT{6}; 42 | for i=1:size(lut,1) 43 | fileName=lut{i,1};tmp=load([dataDir,fileName]); 44 | seg=tmp.seg; 45 | seg=filtfilt(B1,A1,seg')';seg=filtfilt(B2,A2,seg')'; 46 | eeg=seg(1:19,(20*Fs+1):30*Fs);ekg=seg(20,(20*Fs+1):30*Fs); 47 | SEG=[fcn_bipolar(eeg);ekg]; 48 | yp=num2str(round(100*lut{i,3}(1))/100); 49 | if length(yp)==1 50 | yp=[yp,'.00']; 51 | else 52 | yp=[yp,repmat('0',1,4-length(yp))]; 53 | end 54 | fcn_EEGpic(SEG,ax{4+i},['Other_',num2str(i),' ',yp]) 55 | end 56 | hold(ax{5},'on'); 57 | text(ax{5},198900,28,'B','fontsize',25) 58 | hold(ax{5},'off'); 59 | 60 | %% 61 | print(gcf,'-r300','-dpng','./Fig6.png'); 62 | -------------------------------------------------------------------------------- /code_for_figures/FigureS2_Flowchart.m: -------------------------------------------------------------------------------- 1 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 2 | % Figure S2: Flowchart on data splits. 3 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 4 | clc;close all;clear; 5 | 6 | dataDir='./Data/FigureS2/'; 7 | 8 | %% Dataset A: all Real 9 | tmp=load([dataDir,'datasetA.mat']);keys=tmp.keys;Y=tmp.Y;pats=cell(size(Y,1),1);for i=1:size(Y,1);x=split(keys{i},'_');pats{i}=x{2};end;pats=unique(pats); 10 | raters=find(sum(~isnan(Y),1)>0);y=mode(Y,2);c=hist(y,0:5);c=c([2:6 1])'; 11 | str1=['Dataset A: ',num2str(length(raters)),' raters ',num2str(length(pats)),' patients ',num2str(length(y)),' samples']; 12 | str2=['SZ ',num2str(c(1)) ];str3=['LPD ',num2str(c(2)) ];str4=['GPD ',num2str(c(3)) ];str5=['LRDA ',num2str(c(4)) ];str6=['GRDA ',num2str(c(5)) ];str7=['Other ',num2str(c(6)) ]; 13 | disp('-----------------------------------------------------') 14 | disp(str1);disp(str2);disp(str3);disp(str4);disp(str5);disp(str6);disp(str7) 15 | 16 | %% Dataset B: 1+2 Real (>3) 17 | tmp=load([dataDir,'datasetB.mat']);keys=tmp.keys;Y=tmp.Y;pats=cell(size(Y,1),1);for i=1:size(Y,1);x=split(keys{i},'_');pats{i}=x{2};end;pats=unique(pats); 18 | raters=find(sum(~isnan(Y),1)>0);y=mode(Y,2);c=hist(y,0:5);c=c([2:6 1])'; 19 | str1=['Dataset B: ',num2str(length(raters)),' raters ',num2str(length(pats)),' patients ',num2str(length(y)),' samples']; 20 | str2=['SZ ',num2str(c(1)) ];str3=['LPD ',num2str(c(2)) ];str4=['GPD ',num2str(c(3)) ];str5=['LRDA ',num2str(c(4)) ];str6=['GRDA ',num2str(c(5)) ];str7=['Other ',num2str(c(6)) ]; 21 | disp('-----------------------------------------------------') 22 | disp(str1);disp(str2);disp(str3);disp(str4);disp(str5);disp(str6);disp(str7) 23 | 24 | %% Dataset C: 3+4 Real (>10) 25 | tmp=load([dataDir,'datasetC.mat']);keys=tmp.keys;Y=tmp.Y;pats=cell(size(Y,1),1);for i=1:size(Y,1);x=split(keys{i},'_');pats{i}=x{2};end;pats=unique(pats); 26 | raters=find(sum(~isnan(Y),1)>0);y=mode(Y,2);c=hist(y,0:5);c=c([2:6 1])'; 27 | str1=['Dataset C: ',num2str(length(raters)),' raters ',num2str(length(pats)),' patients ',num2str(length(y)),' samples']; 28 | str2=['SZ ',num2str(c(1)) ];str3=['LPD ',num2str(c(2)) ];str4=['GPD ',num2str(c(3)) ];str5=['LRDA ',num2str(c(4)) ];str6=['GRDA ',num2str(c(5)) ];str7=['Other ',num2str(c(6)) ]; 29 | disp('-----------------------------------------------------') 30 | disp(str1);disp(str2);disp(str3);disp(str4);disp(str5);disp(str6);disp(str7) 31 | 32 | %% Dataset D: 3+4 Real+SP-spread 33 | tmp=load([dataDir,'datasetD.mat']);keys=tmp.keys;Y=tmp.Y;pats=cell(size(Y,1),1);for i=1:size(Y,1);x=split(keys{i},'_');pats{i}=x{2};end;pats=unique(pats); 34 | raters=find(sum(~isnan(Y),1)>0);y=mode(Y,2);c=hist(y,0:5);c=c([2:6 1])'; 35 | str1=['Dataset D: ',num2str(length(raters)),' raters ',num2str(length(pats)),' patients ',num2str(length(y)),' samples']; 36 | str2=['SZ ',num2str(c(1)) ];str3=['LPD ',num2str(c(2)) ];str4=['GPD ',num2str(c(3)) ];str5=['LRDA ',num2str(c(4)) ];str6=['GRDA ',num2str(c(5)) ];str7=['Other ',num2str(c(6)) ]; 37 | disp('-----------------------------------------------------') 38 | disp(str1);disp(str2);disp(str3);disp(str4);disp(str5);disp(str6);disp(str7) 39 | 40 | %% Dataset 1: Real+SP-spread 41 | tmp=load([dataDir,'dataset1.mat']);keys=tmp.keys;Y=tmp.Y;pats=cell(size(Y,1),1);for i=1:size(Y,1);x=split(keys{i},'_');pats{i}=x{2};end;pats=unique(pats); 42 | raters=find(sum(~isnan(Y),1)>0);y=mode(Y,2);c=hist(y,0:5);c=c([2:6 1])'; 43 | str1=['Dataset 1: ',num2str(length(raters)),' raters ',num2str(length(pats)),' patients ',num2str(length(y)),' samples']; 44 | str2=['SZ ',num2str(c(1)) ];str3=['LPD ',num2str(c(2)) ];str4=['GPD ',num2str(c(3)) ];str5=['LRDA ',num2str(c(4)) ];str6=['GRDA ',num2str(c(5)) ];str7=['Other ',num2str(c(6)) ]; 45 | disp('-----------------------------------------------------') 46 | disp(str1);disp(str2);disp(str3);disp(str4);disp(str5);disp(str6);disp(str7) 47 | 48 | %% Dataset 2: Real+SP-spread+UMAP-spread 49 | tmp=load([dataDir,'dataset2.mat']);keys=tmp.keys;Y=tmp.Y;pats=cell(size(Y,1),1);for i=1:size(Y,1);x=split(keys{i},'_');pats{i}=x{2};end;pats=unique(pats); 50 | raters=find(sum(~isnan(Y),1)>0);y=mode(Y,2);c=hist(y,0:5);c=c([2:6 1])'; 51 | str1=['Dataset 2: ',num2str(length(raters)),' raters ',num2str(length(pats)),' patients ',num2str(length(y)),' samples']; 52 | str2=['SZ ',num2str(c(1)) ];str3=['LPD ',num2str(c(2)) ];str4=['GPD ',num2str(c(3)) ];str5=['LRDA ',num2str(c(4)) ];str6=['GRDA ',num2str(c(5)) ];str7=['Other ',num2str(c(6)) ]; 53 | disp('-----------------------------------------------------') 54 | disp(str1);disp(str2);disp(str3);disp(str4);disp(str5);disp(str6);disp(str7) 55 | 56 | %% Dataset 3: Real+SP-spread 57 | tmp=load([dataDir,'dataset3.mat']);keys=tmp.keys;Y=tmp.Y;pats=cell(size(Y,1),1);for i=1:size(Y,1);x=split(keys{i},'_');pats{i}=x{2};end;pats=unique(pats); 58 | raters=find(sum(~isnan(Y),1)>0);y=mode(Y,2);c=hist(y,0:5);c=c([2:6 1])'; 59 | str1=['Dataset 3: ',num2str(length(raters)),' raters ',num2str(length(pats)),' patients ',num2str(length(y)),' samples']; 60 | str2=['SZ ',num2str(c(1)) ];str3=['LPD ',num2str(c(2)) ];str4=['GPD ',num2str(c(3)) ];str5=['LRDA ',num2str(c(4)) ];str6=['GRDA ',num2str(c(5)) ];str7=['Other ',num2str(c(6)) ]; 61 | disp('-----------------------------------------------------') 62 | disp(str1);disp(str2);disp(str3);disp(str4);disp(str5);disp(str6);disp(str7) 63 | 64 | %% Dataset 4: Real+SP-spread 65 | tmp=load([dataDir,'dataset4.mat']);keys=tmp.keys;Y=tmp.Y;pats=cell(size(Y,1),1);for i=1:size(Y,1);x=split(keys{i},'_');pats{i}=x{2};end;pats=unique(pats); 66 | raters=find(sum(~isnan(Y),1)>0);y=mode(Y,2);c=hist(y,0:5);c=c([2:6 1])'; 67 | str1=['Dataset 4: ',num2str(length(raters)),' raters ',num2str(length(pats)),' patients ',num2str(length(y)),' samples']; 68 | str2=['SZ ',num2str(c(1)) ];str3=['LPD ',num2str(c(2)) ];str4=['GPD ',num2str(c(3)) ];str5=['LRDA ',num2str(c(4)) ];str6=['GRDA ',num2str(c(5)) ];str7=['Other ',num2str(c(6)) ]; 69 | disp('-----------------------------------------------------') 70 | disp(str1);disp(str2);disp(str3);disp(str4);disp(str5);disp(str6);disp(str7) 71 | -------------------------------------------------------------------------------- /code_for_figures/FigureS3_SPspread.m: -------------------------------------------------------------------------------- 1 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 2 | % Figure S3: Samples belonging to the same stationary period (SP) are 3 | % assigned the same label. 4 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 5 | clc;close all;clear; 6 | addpath('./Callbacks/'); 7 | 8 | %% set figure 9 | f=figure('units','normalized','position',[0.1094,0.0472,0.8531,0.8574],'MenuBar','none','ToolBar','none','color','w'); 10 | ax={subplot('position',[.03,.86,.96,.12]);subplot('position',[.03,.74,.96,.12]);subplot('position',[.03,.62,.96,.12]);subplot('position',[.03,.50,.96,.12]);subplot('position',[.03,.40,.96,.10]);subplot('position',[.03,.03,.30,.34]);subplot('position',[.36,.03,.30,.34]);subplot('position',[.69,.03,.30,.34])}; 11 | 12 | tmp=load('./Data/FigureS3/FigureS3_input'); 13 | seg_t1=tmp.seg_t1;seg_t2=tmp.seg_t2;seg_tc=tmp.seg_tc; 14 | thr_cp=.3;ss=tmp.ss;S_x=tmp.S_x;S_y=tmp.S_y;S_data=tmp.S_data; 15 | 16 | %% qEEG 17 | [icp,P,iscp,iscpc]=fcn_cpd(ss,thr_cp); 18 | col=[-10,25];colormap jet;spatialRegs={'LL','RL','LP','RP'}; 19 | for i=1:4 20 | set(f,'CurrentAxes',ax{i});cla(ax{i}) 21 | spec=S_data{i,1}; 22 | imagesc(ax{i},S_x,S_y,pow2db(spec),col); 23 | axis(ax{i},'xy'); 24 | xx=get(ax{i},'yticklabel'); 25 | text(ax{i},S_x(1),18,spatialRegs{i},'fontsize',15,'color','w','fontweight','bold') 26 | set(ax{i},'xtick',[],'box','on','yticklabel',xx) 27 | ylabel(ax{i},'Freq (Hz)') 28 | end 29 | 30 | set(f,'CurrentAxes',ax{5});cla(ax{5}) 31 | stimes=S_x;tc=length(stimes)/2+1; 32 | hold(ax{5},'on') 33 | a=min(P)-5;b=max(P)+1; 34 | plot(ax{5},stimes,P,'g-','linewidth',2) 35 | set(ax{5},'xtick',[]) 36 | for icpd=1:length(icp)-1 37 | aa_=icp(icpd);bb_=icp(icpd+1); 38 | if icpd==10 39 | cc_=round((aa_+bb_)/2); 40 | plot(ax{5},[stimes(cc_),stimes(cc_)],[a b],'r-.','linewidth',1) 41 | text(ax{5},stimes(cc_),a,'t_C','verticalalignment','top','horizontalalignment','center') 42 | text(ax{5},stimes(round((bb_+cc_)/2)),a,'t_2','verticalalignment','top','horizontalalignment','left') 43 | text(ax{5},stimes(round((aa_+cc_)/2)),a,'t_1','verticalalignment','top','horizontalalignment','left') 44 | plot(ax{5},stimes(round((bb_+cc_)/2)),a+3,'rv','markersize',10,'markerfacecolor','r') 45 | plot(ax{5},stimes(round((aa_+cc_)/2)),a+3,'rv','markersize',10,'markerfacecolor','r') 46 | end 47 | if icpd>1 48 | plot(ax{5},[stimes(aa_),stimes(aa_)],[a b],'m-.','linewidth',1) 49 | end 50 | cpd_mean=mean(P(aa_:bb_)); 51 | plot(ax{5},[stimes(aa_),stimes(bb_)],[cpd_mean,cpd_mean],'b-','linewidth',1) 52 | end 53 | xlim([stimes(1) stimes(end)]);ylim([a b]);ylabel('Power (dB)');box on 54 | 55 | plot(ax{5},[stimes(tc),stimes(tc)],[a b],'b--','linewidth',1) 56 | text(ax{5},stimes(1),a,'00:00:00','verticalalignment','top','horizontalalignment','left') 57 | text(ax{5},stimes(end),a,'00:10:00','verticalalignment','top','horizontalalignment','right') 58 | hold(ax{5},'off') 59 | 60 | %% EEG 61 | fcn_plotEEG(f,ax{6},seg_t1,'L-Bipolar',1,'t_1') 62 | fcn_plotEEG(f,ax{7},seg_tc,'L-Bipolar',0,'t_C') 63 | fcn_plotEEG(f,ax{8},seg_t2,'L-Bipolar',0,'t_2') 64 | 65 | print(gcf,'-r300','-dpng', './FigS3.png'); 66 | -------------------------------------------------------------------------------- /code_for_figures/FigureS5_UMAPspread.m: -------------------------------------------------------------------------------- 1 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 2 | % Figure S5. Creation of pseudo-labels via “label spreading” in Steps 3-4 3 | % of the model development procedure for SPaRCNet. 4 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 5 | clc;close all;clear; 6 | addpath('./Callbacks/'); 7 | 8 | %% set figure 9 | figure('units','normalized','position',[0.0,0.1,0.9,0.8],'MenuBar','none','ToolBar','none','color','w'); 10 | ax={subplot('position',[0/6,5/6,1/6,1/6]);subplot('position',[1/6,5/6,1/6,1/6]);subplot('position',[2/6,5/6,1/6,1/6]);subplot('position',[3/6,5/6,1/6,1/6]);subplot('position',[4/6,5/6,1/6,1/6]);subplot('position',[5/6,5/6,1/6,1/6]);subplot('position',[5/6,4/6,1/6,1/6]);subplot('position',[5/6,3/6,1/6,1/6]);subplot('position',[5/6,2/6,1/6,1/6]);subplot('position',[5/6,1/6,1/6,1/6]);subplot('position',[5/6,0/6,1/6,1/6]);subplot('position',[4/6,0/6,1/6,1/6]);subplot('position',[3/6,0/6,1/6,1/6]);subplot('position',[2/6,0/6,1/6,1/6]);subplot('position',[1/6,0/6,1/6,1/6]);subplot('position',[0/6,0/6,1/6,1/6]);subplot('position',[0/6,1/6,1/6,1/6]);subplot('position',[0/6,2/6,1/6,1/6]);subplot('position',[0/6,3/6,1/6,1/6]);subplot('position',[0/6,4/6,1/6,1/6]);subplot('position',[1/6,1/6,4/6 4/6])}; 11 | 12 | %% UMAP and labels 13 | tmp=load('./Data/FigureS5/FigureS5_input.mat'); 14 | Vxy=tmp.Vxy;Y_real=tmp.Y;Y_pseu=tmp.Y_spread;IDX=tmp.idx; 15 | y_real=mode(Y_real,2);y_real(y_real==0)=6; 16 | y_pseu=mode(Y_pseu,2);y_pseu(y_pseu==0)=6; 17 | 18 | xlimts=[-15,60];ylimts=[-18,25]; 19 | Cs=flipud(jet(7)); 20 | colors_real=0.8*ones(length(y_real),3); 21 | colors_pseu=0.8*ones(length(y_real),3); 22 | 23 | for k=1:6 24 | idx=find(y_real==k);colors_real(idx,:)=repmat(Cs(k,:),length(idx),1); 25 | idx=find(y_pseu==k);colors_pseu(idx,:)=repmat(Cs(k,:),length(idx),1); 26 | end 27 | 28 | set(gcf,'CurrentAxes',ax{21});cla(ax{21}) 29 | hold(ax{21},'on'); 30 | text(ax{21},xlimts(1)+5,ylimts(2)-3,'A','fontsize',20); 31 | text(ax{21},5,25/2+9,'Before','fontsize',15); 32 | text(ax{21},41,25/2+9,'After','fontsize',15); 33 | ss1=scatter(ax{21},Vxy(:,1),Vxy(:,2),20,colors_real,'filled');alpha(ss1,.2) 34 | ss2=scatter(ax{21},Vxy(:,1)+35,Vxy(:,2),20,colors_pseu,'filled');alpha(ss2,.2) 35 | 36 | patterns={'Seizure','LPD','GPD','LRDA','GRDA','Other'}; 37 | for i=1:6 38 | y1=12-(i-1)*2;y2=13-(i-1)*2; 39 | fill(ax{21},[10,10,12,12]+8,[y1,y2,y2,y1]+7,Cs(i,:),'edgecolor',Cs(i,:)) 40 | text(ax{21},12.2+8,(y1+y2)/2+7.3,patterns{i},'fontsize',10,'verticalalignment','middle') 41 | end 42 | set(ax{21},'xlim',xlimts,'ylim',ylimts) 43 | axis off; 44 | hold(ax{21},'off'); 45 | 46 | %% individuals 47 | subIDX={'B','C','D','E','F','G','H','I','J','K','L','M','N','O','P','Q','R','S','T','U','V','W','X','Y','Z'}; 48 | for jj=1:length(IDX) 49 | jj1=IDX(jj); 50 | y_real=Y_real(:,jj1);y_real(y_real==0)=6; 51 | y_pseu=Y_pseu(:,jj1);y_pseu(y_pseu==0)=6; 52 | 53 | colors_real=NaN(length(y_real),3); 54 | colors_pseu=NaN(length(y_real),3); 55 | for k=1:6 56 | idx=find(y_real==k);colors_real(idx,:)=repmat(Cs(k,:),length(idx),1); 57 | idx=find(y_pseu==k);colors_pseu(idx,:)=repmat(Cs(k,:),length(idx),1); 58 | end 59 | set(gcf,'CurrentAxes',ax{jj});cla(ax{jj}) 60 | hold(ax{jj},'on'); 61 | if jj == 1 62 | text(ax{jj},xlimts(1)+1,ylimts(2)-4,subIDX{jj},'fontsize',20) 63 | end 64 | idx1=find(~isnan(y_real));idx0=find(isnan(y_real)); 65 | ss0=scatter(ax{jj},Vxy(idx0,1),Vxy(idx0,2),20,repmat([0.8,0.8,0.8],length(idx0),1),'filled');alpha(ss0,.2); 66 | ss1=scatter(ax{jj},Vxy(idx1,1),Vxy(idx1,2),20,colors_real(idx1,:),'filled');alpha(ss1,.05); 67 | ss2=scatter(ax{jj},Vxy(:,1)+35,Vxy(:,2),20,colors_pseu,'filled');alpha(ss2,.2) 68 | text(ax{jj},20,25/2+10,['E',repmat('0',1,2-length(num2str(jj))),num2str(jj)],'fontsize',10) 69 | set(ax{jj},'xlim',xlimts,'ylim',ylimts);axis off; 70 | hold(ax{jj},'off'); 71 | drawnow 72 | end 73 | 74 | %% 75 | print(gcf,'-r300','-dpng', './FigS5.png'); -------------------------------------------------------------------------------- /code_for_figures/FigureS8_IRR.m: -------------------------------------------------------------------------------- 1 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 2 | % Figure S8: Additional performance metrics for SPaRCNet. 3 | %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% 4 | clc;close all;clear; 5 | addpath('./Callbacks/'); 6 | 7 | % set figure 8 | figure('units','normalized','position',[0.0000,0.0463,1.0000,0.9269],'MenuBar','none','ToolBar','none','color','w'); 9 | ax={subplot('position',[.03,.03,.43,.35]);subplot('position',[.03,.45,.43,.51]);subplot('position',[.49,.03,.24,.44]);subplot('position',[.49,.52,.24,.44]);subplot('position',[.75,.03,.24,.44]);subplot('position',[.75,.52,.24,.44])}; 10 | patterns={'Other','Seizure','LPD','GPD','LRDA','GRDA'}; 11 | 12 | % get data 13 | tmp=load('./Data/FigureS8/FigureS8_input.mat'); 14 | Y_model=tmp.Yh;Y=tmp.Y; 15 | 16 | % re-calibrate with post rules learnt from training set dataset#3 17 | best_thresh=[0,0.0268,0.0016,0,0.00070,0;0,0,0.3476,0.3242,0.2916,0;0,0.0132,0,0.0754,0.0027,0;0,0.333,0.1615,0,0.2917,0;0,0.2527,0.4035,0.2862,0,0;0.4574,0,0,0.0375,0.0066,0]; 18 | y_model=fcn_getModelVotes(Y_model,best_thresh); 19 | 20 | % IRR with CI from bootstrap 10K times 21 | pIRR_EE=tmp.pIRR_EE;pIRR_EA=tmp.pIRR_EA; 22 | mIRR_EE=tmp.mIRR_EE;mIRR_EA=tmp.mIRR_EA; 23 | pIRR_EE_L=prctile(pIRR_EE,2.5);pIRR_EE_U=prctile(pIRR_EE,100-2.5); 24 | pIRR_EA_L=prctile(pIRR_EA,2.5);pIRR_EA_U=prctile(pIRR_EA,100-2.5); 25 | mIRR_EE_L=prctile(mIRR_EE,2.5);mIRR_EE_U=prctile(mIRR_EE,100-2.5); 26 | mIRR_EA_L=prctile(mIRR_EA,2.5);mIRR_EA_U=prctile(mIRR_EA,100-2.5); 27 | dpIRR=pIRR_EA-pIRR_EE;dmIRR=mIRR_EA-mIRR_EE; 28 | dpIRR_L=prctile(dpIRR,2.5);dpIRR_U=prctile(dpIRR,100-2.5); 29 | dmIRR_L=prctile(dmIRR,2.5);dmIRR_U=prctile(dmIRR,100-2.5); 30 | 31 | % real CM and pIRR bars 32 | P_human=fcn_get_pIRR_human(Y); 33 | P_model=fcn_get_pIRR_model(Y,y_model); 34 | 35 | pirr_ee=diag(P_human);pirr_ea=diag(P_model); 36 | 37 | titleStr='ee-pCM (%)'; 38 | fcn_plotConfusionMx(P_human,titleStr,patterns([2:6 1]),'Purples',ax{4},1,'C'); 39 | 40 | titleStr='ea-pCM (%)'; 41 | fcn_plotConfusionMx(P_model,titleStr,patterns([2:6 1]),'Purples',ax{6},1,'D'); 42 | 43 | % real CM and mIRR bars 44 | M_human=fcn_get_mIRR_human_loo(Y); 45 | M_model=fcn_get_mIRR_model(Y,y_model); 46 | mirr_ee=diag(M_human);mirr_ea=diag(M_model); 47 | 48 | titleStr='ee-mCM (%)'; 49 | fcn_plotConfusionMx(M_human,titleStr,patterns([2:6,1]),'Purples',ax{3},2,'E'); 50 | 51 | titleStr='ea-mCM (%)'; 52 | fcn_plotConfusionMx(M_model,titleStr,patterns([2:6,1]),'Purples',ax{5},2,'F'); 53 | 54 | % IRR bars 55 | set(gcf,'CurrentAxes',ax{2});cla(ax{2}) 56 | hold(ax{2},'on'); 57 | cc=brewermap(4,'RdBu');cc=cc([2,1,3,4],:); 58 | X=[pirr_ee,pirr_ea,mirr_ee,mirr_ea]; 59 | b=bar(ax{2},X,'FaceColor','flat'); 60 | for k=1:size(X,2) 61 | b(k).CData=cc(k,:); 62 | end 63 | xdata=[0.7273,1.7273,2.7273,3.7273,4.7273,5.7273;0.9091,1.9091,2.9091,3.9091,4.9091,5.9091;1.0909,2.0909,3.0909,4.0909,5.0909,6.0909;1.2727,2.2727,3.2727,4.2727,5.2727,6.2727]; 64 | 65 | pp={'Seizure','LPD','GPD','LRDA','GRDA','Other'}; 66 | set(ax{2},'xtick',1:6,'xticklabel',pp,'fontsize',12) 67 | 68 | dd=0.03; 69 | for i=1:6 70 | plot(ax{2},[xdata(1,i) xdata(1,i)],[pIRR_EE_L(i),pIRR_EE_U(i)],'k-','linewidth',1) 71 | plot(ax{2},[xdata(1,i)-dd xdata(1,i)+dd],[pIRR_EE_L(i),pIRR_EE_L(i)],'k-','linewidth',1) 72 | plot(ax{2},[xdata(1,i)-dd xdata(1,i)+dd],[pIRR_EE_U(i),pIRR_EE_U(i)],'k-','linewidth',1) 73 | 74 | plot(ax{2},[xdata(2,i) xdata(2,i)],[pIRR_EA_L(i),pIRR_EA_U(i)],'k-','linewidth',1) 75 | plot(ax{2},[xdata(2,i)-dd xdata(2,i)+dd],[pIRR_EA_L(i),pIRR_EA_L(i)],'k-','linewidth',1) 76 | plot(ax{2},[xdata(2,i)-dd xdata(2,i)+dd],[pIRR_EA_U(i),pIRR_EA_U(i)],'k-','linewidth',1) 77 | 78 | plot(ax{2},[xdata(3,i) xdata(3,i)],[mIRR_EE_L(i),mIRR_EE_U(i)],'k-','linewidth',1) 79 | plot(ax{2},[xdata(3,i)-dd xdata(3,i)+dd],[mIRR_EE_L(i),mIRR_EE_L(i)],'k-','linewidth',1) 80 | plot(ax{2},[xdata(3,i)-dd xdata(3,i)+dd],[mIRR_EE_U(i),mIRR_EE_U(i)],'k-','linewidth',1) 81 | 82 | plot(ax{2},[xdata(4,i) xdata(4,i)],[mIRR_EA_L(i),mIRR_EA_U(i)],'k-','linewidth',1) 83 | plot(ax{2},[xdata(4,i)-dd xdata(4,i)+dd],[mIRR_EA_L(i),mIRR_EA_L(i)],'k-','linewidth',1) 84 | plot(ax{2},[xdata(4,i)-dd xdata(4,i)+dd],[mIRR_EA_U(i),mIRR_EA_U(i)],'k-','linewidth',1) 85 | end 86 | legend(b,'ee-pIRR','ea-pIRR','ee-mIRR','ea-mIRR') 87 | legend('boxoff') 88 | ylim([0,1]) 89 | text(ax{2},0.1,1.03,'A','fontsize',25) 90 | hold(ax{2},'off'); 91 | 92 | % diff-IRR 93 | set(gcf,'CurrentAxes',ax{1});cla(ax{1}) 94 | hold(ax{1},'on'); 95 | cc=brewermap(2,'RdBu'); 96 | X =[pirr_ea-pirr_ee,mirr_ea- mirr_ee]; 97 | b=bar(ax{1},X,'FaceColor','flat'); 98 | for k=1:size(X,2) 99 | b(k).CData=cc(k,:); 100 | end 101 | xdata=[0.8571,1.8571,2.8571,3.8571,4.8571,5.8571;1.1429,2.1429,3.1429,4.1429,5.1429,6.1429]; 102 | set(ax{1},'xtick',1:6,'xticklabel',pp,'fontsize',12) 103 | 104 | for i=1:6 105 | plot(ax{1},[xdata(1,i) xdata(1,i)],[dpIRR_L(i),dpIRR_U(i)],'k-','linewidth',1) 106 | plot(ax{1},[xdata(1,i)-dd xdata(1,i)+dd],[dpIRR_L(i),dpIRR_L(i)],'k-','linewidth',1) 107 | plot(ax{1},[xdata(1,i)-dd xdata(1,i)+dd],[dpIRR_U(i),dpIRR_U(i)],'k-','linewidth',1) 108 | 109 | plot(ax{1},[xdata(2,i) xdata(2,i)],[dmIRR_L(i),dmIRR_U(i)],'k-','linewidth',1) 110 | plot(ax{1},[xdata(2,i)-dd xdata(2,i)+dd],[dmIRR_L(i),dmIRR_L(i)],'k-','linewidth',1) 111 | plot(ax{1},[xdata(2,i)-dd xdata(2,i)+dd],[dmIRR_U(i),dmIRR_U(i)],'k-','linewidth',1) 112 | end 113 | legend(b,'ea-ee pIRR','ea-ee mIRR') 114 | legend('boxoff') 115 | ylim([-0.15,0.15]) 116 | 117 | text(ax{1},0.1,0.15,'B','fontsize',25) 118 | text(ax{1},.7, 0.1,'ea>ee','fontsize',15) 119 | text(ax{1},.7,-0.1,'ea0);y=mode(Y,2);c=hist(y,0:5);c=c([2:6,1])';p=round(1E3*c/sum(c))/10; 16 | sex=lut_pat(ismember(lut_pat(:,1),pats),2);nFemale=sum(ismember(sex,'Female')); 17 | age=cell2mat(lut_pat(ismember(lut_pat(:,1),pats),3)); 18 | str1=['Dataset 1: ',num2str(length(raters)),' raters ',num2str(length(pats)),' patients ',num2str(length(y)),' samples'];str8=['Age ',num2str(round(10*nanmean(age))/10),' (',num2str(round(10*nanstd(age))/10),')']; 19 | str9=['Sex /F ',num2str(nFemale),' (',num2str(round(1E3*nFemale/length(pats))/10),'%)']; 20 | str2=['SZ ',num2str(c(1)),' (',num2str(p(1)),'%)']; 21 | str3=['LPD ',num2str(c(2)),' (',num2str(p(2)),'%)']; 22 | str4=['GPD ',num2str(c(3)),' (',num2str(p(3)),'%)']; 23 | str5=['LRDA ',num2str(c(4)),' (',num2str(p(4)),'%)']; 24 | str6=['GRDA ',num2str(c(5)),' (',num2str(p(5)),'%)']; 25 | str7=['Other ',num2str(c(6)),' (',num2str(p(6)),'%)']; 26 | disp('-----------------------------------------------------') 27 | disp(str1);disp(str8);disp(str9);disp(str2);disp(str3);disp(str4);disp(str5);disp(str6);disp(str7) 28 | 29 | % Dataset 2: Real+SP-spread+UMAP-spread 30 | tmp=load('./Data/Table1/dataset2.mat');keys=tmp.keys;Y=tmp.Y;pats=cell(size(Y,1),1);for i=1:size(Y,1);x=split(keys{i},'_');pats{i}=x{2};end;pats=unique(pats); 31 | raters=find(sum(~isnan(Y),1)>0);y=mode(Y,2);c=hist(y,0:5);c=c([2:6,1])';p=round(1E3*c/sum(c))/10; 32 | sex=lut_pat(ismember(lut_pat(:,1),pats),2);nFemale=sum(ismember(sex,'Female')); 33 | age=cell2mat(lut_pat(ismember(lut_pat(:,1),pats),3)); 34 | str1=['Dataset 2: ',num2str(length(raters)),' raters ',num2str(length(pats)),' patients ',num2str(length(y)),' samples'];str8=['Age ',num2str(round(10*nanmean(age))/10),' (',num2str(round(10*nanstd(age))/10),')']; 35 | str9=['Sex /F ',num2str(nFemale),' (',num2str(round(1E3*nFemale/length(pats))/10),'%)']; 36 | str2=['SZ ',num2str(c(1)),' (',num2str(p(1)),'%)']; 37 | str3=['LPD ',num2str(c(2)),' (',num2str(p(2)),'%)']; 38 | str4=['GPD ',num2str(c(3)),' (',num2str(p(3)),'%)']; 39 | str5=['LRDA ',num2str(c(4)),' (',num2str(p(4)),'%)']; 40 | str6=['GRDA ',num2str(c(5)),' (',num2str(p(5)),'%)']; 41 | str7=['Other ',num2str(c(6)),' (',num2str(p(6)),'%)']; 42 | disp('-----------------------------------------------------') 43 | disp(str1);disp(str8);disp(str9);disp(str2);disp(str3);disp(str4);disp(str5);disp(str6);disp(str7) 44 | 45 | % Dataset 3: Real+SP-spread 46 | tmp=load('./Data/Table1/dataset3.mat');keys=tmp.keys;Y=tmp.Y;pats=cell(size(Y,1),1);for i=1:size(Y,1);x=split(keys{i},'_');pats{i}=x{2};end;pats=unique(pats); 47 | raters=find(sum(~isnan(Y),1)>0);y=mode(Y,2);c=hist(y,0:5);c=c([2:6,1])';p=round(1E3*c/sum(c))/10; 48 | sex=lut_pat(ismember(lut_pat(:,1),pats),2);nFemale=sum(ismember(sex,'Female')); 49 | age=cell2mat(lut_pat(ismember(lut_pat(:,1),pats),3)); 50 | str1=['Dataset 3: ',num2str(length(raters)),' raters ',num2str(length(pats)),' patients ',num2str(length(y)),' samples'];str8=['Age ',num2str(round(10*nanmean(age))/10),' (',num2str(round(10*nanstd(age))/10),')']; 51 | str9=['Sex /F ',num2str(nFemale),' (',num2str(round(1E3*nFemale/length(pats))/10),'%)']; 52 | str2=['SZ ',num2str(c(1)),' (',num2str(p(1)),'%)']; 53 | str3=['LPD ',num2str(c(2)),' (',num2str(p(2)),'%)']; 54 | str4=['GPD ',num2str(c(3)),' (',num2str(p(3)),'%)']; 55 | str5=['LRDA ',num2str(c(4)),' (',num2str(p(4)),'%)']; 56 | str6=['GRDA ',num2str(c(5)),' (',num2str(p(5)),'%)']; 57 | str7=['Other ',num2str(c(6)),' (',num2str(p(6)),'%)']; 58 | disp('-----------------------------------------------------') 59 | disp(str1);disp(str8);disp(str9);disp(str2);disp(str3);disp(str4);disp(str5);disp(str6);disp(str7) 60 | 61 | % Dataset 4: Real+SP-spread 62 | tmp=load('./Data/Table1/dataset4.mat');keys=tmp.keys;Y=tmp.Y;pats=cell(size(Y,1),1);for i=1:size(Y,1);x=split(keys{i},'_');pats{i}=x{2};end;pats=unique(pats); 63 | raters=find(sum(~isnan(Y),1)>0);y=mode(Y,2);c=hist(y,0:5);c=c([2:6,1])';p=round(1E3*c/sum(c))/10; 64 | sex=lut_pat(ismember(lut_pat(:,1),pats),2);nFemale=sum(ismember(sex,'Female')); 65 | age=cell2mat(lut_pat(ismember(lut_pat(:,1),pats),3)); 66 | str1=['Dataset 4: ',num2str(length(raters)),' raters ',num2str(length(pats)),' patients ',num2str(length(y)),' samples']; 67 | str8=['Age ',num2str(round(10*nanmean(age))/10),' (',num2str(round(10*nanstd(age))/10),')']; 68 | str9=['Sex /F ',num2str(nFemale),' (',num2str(round(1E3*nFemale/length(pats))/10),'%)']; 69 | str2=['SZ ',num2str(c(1)),' (',num2str(p(1)),'%)']; 70 | str3=['LPD ',num2str(c(2)),' (',num2str(p(2)),'%)']; 71 | str4=['GPD ',num2str(c(3)),' (',num2str(p(3)),'%)']; 72 | str5=['LRDA ',num2str(c(4)),' (',num2str(p(4)),'%)']; 73 | str6=['GRDA ',num2str(c(5)),' (',num2str(p(5)),'%)']; 74 | str7=['Other ',num2str(c(6)),' (',num2str(p(6)),'%)']; 75 | disp('-----------------------------------------------------') 76 | disp(str1);disp(str8);disp(str9);disp(str2);disp(str3);disp(str4);disp(str5);disp(str6);disp(str7) 77 | 78 | %% Table 1 part B - human performance 79 | tmp=load('./Data/Table1/dataset3.mat'); 80 | Y=tmp.Y;nBins=10;K=100;cali_idx_3=NaN(length(pp),20); 81 | [op_sen_3,op_fpr_3,op_ppv_3]=fcn_getOPs_loo(Y,pp); 82 | for i=1:length(pp);cali_idx_3(i,:)=fcn_getCali_human(Y,(i-1),nBins,K);end 83 | cali_idx_3=100*cali_idx_3([2:6,1],:); 84 | 85 | tmp=load('./Data/Table1/dataset4.mat'); 86 | Y=tmp.Y;cali_idx_4=NaN(length(pp),20); 87 | [op_sen_4,op_fpr_4,op_ppv_4]=fcn_getOPs_loo(Y,pp); 88 | for i=1:length(pp);cali_idx_4(i,:)=fcn_getCali_human(Y,(i-1),nBins,K);end 89 | cali_idx_4=100*cali_idx_4([2:6,1],:); 90 | 91 | T_3=NaN(6,12);T_4=NaN(6,12); 92 | for i=1:6 93 | sen=op_sen_3(i,:);fpr=op_fpr_3(i,:);ppv=op_ppv_3(i,:);cal=(cali_idx_3(i,:)); 94 | T_3(i,:)=[mean(sen),min(sen),max(sen),mean(fpr),min(fpr),max(fpr),mean(ppv),min(ppv),max(ppv),mean(cal),min(cal),max(cal)]; 95 | sen=op_sen_4(i,:);fpr=op_fpr_4(i,:);ppv=op_ppv_4(i,:);cal=(cali_idx_4(i,:)); 96 | T_4(i,:)=[mean(sen),min(sen),max(sen),mean(fpr),min(fpr),max(fpr),mean(ppv),min(ppv),max(ppv), mean(cal),min(cal),max(cal)]; 97 | end 98 | 99 | T_3=round(T_3);TT_3=cell(6*4,1);T_4=round(T_4);TT_4=cell(6*4,1); 100 | pp={'Seizure','LPD','GPD','LRDA','GRDA','Other'};ss={'TPR','FPR','PPV','CAL'}; 101 | for i=1:length(pp) 102 | idx1=(i-1)*length(ss); 103 | for k=1:length(ss) 104 | idx2=(k-1)*(length(ss)-1); 105 | TT_3{idx1+k}=[pp{i},'_',ss{k},' ',num2str(T_3(i,idx2+1)),' (',num2str(T_3(i,idx2+2)),' to ',num2str(T_3(i,idx2+3)),')']; 106 | TT_4{idx1+k}=[pp{i},'_',ss{k},' ',num2str(T_4(i,idx2+1)),' (',num2str(T_4(i,idx2+2)),' to ',num2str(T_4(i,idx2+3)),')']; 107 | end 108 | end 109 | disp('--------------------------------------------------------------------') 110 | disp([[{'Dataset 3'};TT_3],[{'Dataset 4'};TT_4]]) 111 | --------------------------------------------------------------------------------