├── Feature_Extractor ├── WavLM.py ├── data_preprocess.py ├── iemocap_data │ ├── data1.csv │ ├── data2.csv │ ├── data3.csv │ ├── data4.csv │ ├── data5.csv │ ├── name_label_text.csv │ ├── readme.md │ ├── train1.csv │ ├── train2.csv │ ├── train3.csv │ ├── train4.csv │ ├── train5.csv │ ├── valid1.csv │ ├── valid2.csv │ ├── valid3.csv │ ├── valid4.csv │ └── valid5.csv ├── modules.py └── readme.md ├── IEMOCAP ├── Dataset.py ├── __init__.py ├── model.py ├── readme.md ├── train.py └── utils │ ├── DGWT.py │ ├── DLWT.py │ ├── DWFormerBlock.py │ ├── __init__.py │ ├── modules.py │ └── vanillatransformer.py ├── Meld ├── Dataset.py ├── dev_sent_emo.csv ├── model.py ├── readme.md ├── test_sent_emo.csv ├── train.py └── train_sent_emo.csv └── README.md /Feature_Extractor/WavLM.py: -------------------------------------------------------------------------------- 1 | # -------------------------------------------------------- 2 | # WavLM: Large-Scale Self-Supervised Pre-training for Full Stack Speech Processing (https://arxiv.org/abs/2110.13900.pdf) 3 | # Github source: https://github.com/microsoft/unilm/tree/master/wavlm 4 | # Copyright (c) 2021 Microsoft 5 | # Licensed under The MIT License [see LICENSE for details] 6 | # Based on fairseq code bases 7 | # https://github.com/pytorch/fairseq 8 | # -------------------------------------------------------- 9 | 10 | import math 11 | import logging 12 | from typing import List, Optional, Tuple 13 | 14 | import numpy as np 15 | 16 | import torch 17 | import torch.nn as nn 18 | import torch.nn.functional as F 19 | from torch.nn import LayerNorm 20 | from modules import ( 21 | Fp32GroupNorm, 22 | Fp32LayerNorm, 23 | GradMultiply, 24 | MultiheadAttention, 25 | SamePad, 26 | init_bert_params, 27 | get_activation_fn, 28 | TransposeLast, 29 | GLU_Linear, 30 | ) 31 | 32 | logger = logging.getLogger(__name__) 33 | 34 | 35 | def compute_mask_indices( 36 | shape: Tuple[int, int], 37 | padding_mask: Optional[torch.Tensor], 38 | mask_prob: float, 39 | mask_length: int, 40 | mask_type: str = "static", 41 | mask_other: float = 0.0, 42 | min_masks: int = 0, 43 | no_overlap: bool = False, 44 | min_space: int = 0, 45 | ) -> np.ndarray: 46 | """ 47 | Computes random mask spans for a given shape 48 | 49 | Args: 50 | shape: the the shape for which to compute masks. 51 | should be of size 2 where first element is batch size and 2nd is timesteps 52 | padding_mask: optional padding mask of the same size as shape, which will prevent masking padded elements 53 | mask_prob: probability for each token to be chosen as start of the span to be masked. this will be multiplied by 54 | number of timesteps divided by length of mask span to mask approximately this percentage of all elements. 55 | however due to overlaps, the actual number will be smaller (unless no_overlap is True) 56 | mask_type: how to compute mask lengths 57 | static = fixed size 58 | uniform = sample from uniform distribution [mask_other, mask_length*2] 59 | normal = sample from normal distribution with mean mask_length and stdev mask_other. mask is min 1 element 60 | poisson = sample from possion distribution with lambda = mask length 61 | min_masks: minimum number of masked spans 62 | no_overlap: if false, will switch to an alternative recursive algorithm that prevents spans from overlapping 63 | min_space: only used if no_overlap is True, this is how many elements to keep unmasked between spans 64 | """ 65 | 66 | bsz, all_sz = shape 67 | mask = np.full((bsz, all_sz), False) 68 | 69 | all_num_mask = int( 70 | # add a random number for probabilistic rounding 71 | mask_prob * all_sz / float(mask_length) 72 | + np.random.rand() 73 | ) 74 | 75 | all_num_mask = max(min_masks, all_num_mask) 76 | 77 | mask_idcs = [] 78 | for i in range(bsz): 79 | if padding_mask is not None: 80 | sz = all_sz - padding_mask[i].long().sum().item() 81 | num_mask = int( 82 | # add a random number for probabilistic rounding 83 | mask_prob * sz / float(mask_length) 84 | + np.random.rand() 85 | ) 86 | num_mask = max(min_masks, num_mask) 87 | else: 88 | sz = all_sz 89 | num_mask = all_num_mask 90 | 91 | if mask_type == "static": 92 | lengths = np.full(num_mask, mask_length) 93 | elif mask_type == "uniform": 94 | lengths = np.random.randint(mask_other, mask_length * 2 + 1, size=num_mask) 95 | elif mask_type == "normal": 96 | lengths = np.random.normal(mask_length, mask_other, size=num_mask) 97 | lengths = [max(1, int(round(x))) for x in lengths] 98 | elif mask_type == "poisson": 99 | lengths = np.random.poisson(mask_length, size=num_mask) 100 | lengths = [int(round(x)) for x in lengths] 101 | else: 102 | raise Exception("unknown mask selection " + mask_type) 103 | 104 | if sum(lengths) == 0: 105 | lengths[0] = min(mask_length, sz - 1) 106 | 107 | if no_overlap: 108 | mask_idc = [] 109 | 110 | def arrange(s, e, length, keep_length): 111 | span_start = np.random.randint(s, e - length) 112 | mask_idc.extend(span_start + i for i in range(length)) 113 | 114 | new_parts = [] 115 | if span_start - s - min_space >= keep_length: 116 | new_parts.append((s, span_start - min_space + 1)) 117 | if e - span_start - keep_length - min_space > keep_length: 118 | new_parts.append((span_start + length + min_space, e)) 119 | return new_parts 120 | 121 | parts = [(0, sz)] 122 | min_length = min(lengths) 123 | for length in sorted(lengths, reverse=True): 124 | lens = np.fromiter( 125 | (e - s if e - s >= length + min_space else 0 for s, e in parts), 126 | np.int, 127 | ) 128 | l_sum = np.sum(lens) 129 | if l_sum == 0: 130 | break 131 | probs = lens / np.sum(lens) 132 | c = np.random.choice(len(parts), p=probs) 133 | s, e = parts.pop(c) 134 | parts.extend(arrange(s, e, length, min_length)) 135 | mask_idc = np.asarray(mask_idc) 136 | else: 137 | min_len = min(lengths) 138 | if sz - min_len <= num_mask: 139 | min_len = sz - num_mask - 1 140 | 141 | mask_idc = np.random.choice(sz - min_len, num_mask, replace=False) 142 | 143 | mask_idc = np.asarray( 144 | [ 145 | mask_idc[j] + offset 146 | for j in range(len(mask_idc)) 147 | for offset in range(lengths[j]) 148 | ] 149 | ) 150 | 151 | mask_idcs.append(np.unique(mask_idc[mask_idc < sz])) 152 | 153 | min_len = min([len(m) for m in mask_idcs]) 154 | for i, mask_idc in enumerate(mask_idcs): 155 | if len(mask_idc) > min_len: 156 | mask_idc = np.random.choice(mask_idc, min_len, replace=False) 157 | mask[i, mask_idc] = True 158 | 159 | return mask 160 | 161 | 162 | class WavLMConfig: 163 | def __init__(self, cfg=None): 164 | self.extractor_mode: str = "default" # mode for feature extractor. default has a single group norm with d groups in the first conv block, whereas layer_norm has layer norms in every block (meant to use with normalize=True) 165 | self.encoder_layers: int = 12 # num encoder layers in the transformer 166 | 167 | self.encoder_embed_dim: int = 768 # encoder embedding dimension 168 | self.encoder_ffn_embed_dim: int = 3072 # encoder embedding dimension for FFN 169 | self.encoder_attention_heads: int = 12 # num encoder attention heads 170 | self.activation_fn: str = "gelu" # activation function to use 171 | 172 | self.layer_norm_first: bool = False # apply layernorm first in the transformer 173 | self.conv_feature_layers: str = "[(512,10,5)] + [(512,3,2)] * 4 + [(512,2,2)] * 2" # string describing convolutional feature extraction layers in form of a python list that contains [(dim, kernel_size, stride), ...] 174 | self.conv_bias: bool = False # include bias in conv encoder 175 | self.feature_grad_mult: float = 1.0 # multiply feature extractor var grads by this 176 | 177 | self.normalize: bool = False # normalize input to have 0 mean and unit variance during training 178 | 179 | # dropouts 180 | self.dropout: float = 0.1 # dropout probability for the transformer 181 | self.attention_dropout: float = 0.1 # dropout probability for attention weights 182 | self.activation_dropout: float = 0.0 # dropout probability after activation in FFN 183 | self.encoder_layerdrop: float = 0.0 # probability of dropping a tarnsformer layer 184 | self.dropout_input: float = 0.0 # dropout to apply to the input (after feat extr) 185 | self.dropout_features: float = 0.0 # dropout to apply to the features (after feat extr) 186 | 187 | # masking 188 | self.mask_length: int = 10 # mask length 189 | self.mask_prob: float = 0.65 # probability of replacing a token with mask 190 | self.mask_selection: str = "static" # how to choose mask length 191 | self.mask_other: float = 0 # secondary mask argument (used for more complex distributions), see help in compute_mask_indicesh 192 | self.no_mask_overlap: bool = False # whether to allow masks to overlap 193 | self.mask_min_space: int = 1 # min space between spans (if no overlap is enabled) 194 | 195 | # channel masking 196 | self.mask_channel_length: int = 10 # length of the mask for features (channels) 197 | self.mask_channel_prob: float = 0.0 # probability of replacing a feature with 0 198 | self.mask_channel_selection: str = "static" # how to choose mask length for channel masking 199 | self.mask_channel_other: float = 0 # secondary mask argument (used for more complex distributions), see help in compute_mask_indices 200 | self.no_mask_channel_overlap: bool = False # whether to allow channel masks to overlap 201 | self.mask_channel_min_space: int = 1 # min space between spans (if no overlap is enabled) 202 | 203 | # positional embeddings 204 | self.conv_pos: int = 128 # number of filters for convolutional positional embeddings 205 | self.conv_pos_groups: int = 16 # number of groups for convolutional positional embedding 206 | 207 | # relative position embedding 208 | self.relative_position_embedding: bool = False # apply relative position embedding 209 | self.num_buckets: int = 320 # number of buckets for relative position embedding 210 | self.max_distance: int = 1280 # maximum distance for relative position embedding 211 | self.gru_rel_pos: bool = False # apply gated relative position embedding 212 | 213 | if cfg is not None: 214 | self.update(cfg) 215 | 216 | def update(self, cfg: dict): 217 | self.__dict__.update(cfg) 218 | 219 | 220 | class WavLM(nn.Module): 221 | def __init__( 222 | self, 223 | cfg: WavLMConfig, 224 | ) -> None: 225 | super().__init__() 226 | logger.info(f"WavLM Config: {cfg.__dict__}") 227 | 228 | self.cfg = cfg 229 | feature_enc_layers = eval(cfg.conv_feature_layers) 230 | self.embed = feature_enc_layers[-1][0] 231 | 232 | self.feature_extractor = ConvFeatureExtractionModel( 233 | conv_layers=feature_enc_layers, 234 | dropout=0.0, 235 | mode=cfg.extractor_mode, 236 | conv_bias=cfg.conv_bias, 237 | ) 238 | 239 | self.post_extract_proj = ( 240 | nn.Linear(self.embed, cfg.encoder_embed_dim) 241 | if self.embed != cfg.encoder_embed_dim 242 | else None 243 | ) 244 | 245 | self.mask_prob = cfg.mask_prob 246 | self.mask_selection = cfg.mask_selection 247 | self.mask_other = cfg.mask_other 248 | self.mask_length = cfg.mask_length 249 | self.no_mask_overlap = cfg.no_mask_overlap 250 | self.mask_min_space = cfg.mask_min_space 251 | 252 | self.mask_channel_prob = cfg.mask_channel_prob 253 | self.mask_channel_selection = cfg.mask_channel_selection 254 | self.mask_channel_other = cfg.mask_channel_other 255 | self.mask_channel_length = cfg.mask_channel_length 256 | self.no_mask_channel_overlap = cfg.no_mask_channel_overlap 257 | self.mask_channel_min_space = cfg.mask_channel_min_space 258 | 259 | self.dropout_input = nn.Dropout(cfg.dropout_input) 260 | self.dropout_features = nn.Dropout(cfg.dropout_features) 261 | 262 | self.feature_grad_mult = cfg.feature_grad_mult 263 | 264 | self.mask_emb = nn.Parameter( 265 | torch.FloatTensor(cfg.encoder_embed_dim).uniform_() 266 | ) 267 | 268 | self.encoder = TransformerEncoder(cfg) 269 | self.layer_norm = LayerNorm(self.embed) 270 | 271 | def apply_mask(self, x, padding_mask): 272 | B, T, C = x.shape 273 | if self.mask_prob > 0: 274 | mask_indices = compute_mask_indices( 275 | (B, T), 276 | padding_mask, 277 | self.mask_prob, 278 | self.mask_length, 279 | self.mask_selection, 280 | self.mask_other, 281 | min_masks=2, 282 | no_overlap=self.no_mask_overlap, 283 | min_space=self.mask_min_space, 284 | ) 285 | mask_indices = torch.from_numpy(mask_indices).to(x.device) 286 | x[mask_indices] = self.mask_emb 287 | else: 288 | mask_indices = None 289 | 290 | if self.mask_channel_prob > 0: 291 | mask_channel_indices = compute_mask_indices( 292 | (B, C), 293 | None, 294 | self.mask_channel_prob, 295 | self.mask_channel_length, 296 | self.mask_channel_selection, 297 | self.mask_channel_other, 298 | no_overlap=self.no_mask_channel_overlap, 299 | min_space=self.mask_channel_min_space, 300 | ) 301 | mask_channel_indices = ( 302 | torch.from_numpy(mask_channel_indices) 303 | .to(x.device) 304 | .unsqueeze(1) 305 | .expand(-1, T, -1) 306 | ) 307 | x[mask_channel_indices] = 0 308 | 309 | return x, mask_indices 310 | 311 | def forward_padding_mask( 312 | self, features: torch.Tensor, padding_mask: torch.Tensor, 313 | ) -> torch.Tensor: 314 | extra = padding_mask.size(1) % features.size(1) 315 | if extra > 0: 316 | padding_mask = padding_mask[:, :-extra] 317 | padding_mask = padding_mask.view( 318 | padding_mask.size(0), features.size(1), -1 319 | ) 320 | padding_mask = padding_mask.all(-1) 321 | return padding_mask 322 | 323 | def extract_features( 324 | self, 325 | source: torch.Tensor, 326 | padding_mask: Optional[torch.Tensor] = None, 327 | mask: bool = False, 328 | ret_conv: bool = False, 329 | output_layer: Optional[int] = None, 330 | ret_layer_results: bool = False, 331 | ): 332 | 333 | if self.feature_grad_mult > 0: 334 | features = self.feature_extractor(source) 335 | if self.feature_grad_mult != 1.0: 336 | features = GradMultiply.apply(features, self.feature_grad_mult) 337 | else: 338 | with torch.no_grad(): 339 | features = self.feature_extractor(source) 340 | 341 | features = features.transpose(1, 2) 342 | features = self.layer_norm(features) 343 | 344 | if padding_mask is not None: 345 | padding_mask = self.forward_padding_mask(features, padding_mask) 346 | 347 | if self.post_extract_proj is not None: 348 | features = self.post_extract_proj(features) 349 | 350 | features = self.dropout_input(features) 351 | 352 | if mask: 353 | x, mask_indices = self.apply_mask( 354 | features, padding_mask 355 | ) 356 | else: 357 | x = features 358 | 359 | # feature: (B, T, D), float 360 | # target: (B, T), long 361 | # x: (B, T, D), float 362 | # padding_mask: (B, T), bool 363 | # mask_indices: (B, T), bool 364 | x, layer_results = self.encoder( 365 | x, 366 | padding_mask=padding_mask, 367 | layer=None if output_layer is None else output_layer - 1 368 | ) 369 | 370 | res = {"x": x, "padding_mask": padding_mask, "features": features, "layer_results": layer_results} 371 | 372 | feature = res["features"] if ret_conv else res["x"] 373 | if ret_layer_results: 374 | feature = (feature, res["layer_results"]) 375 | return feature, res["padding_mask"] 376 | 377 | 378 | class ConvFeatureExtractionModel(nn.Module): 379 | def __init__( 380 | self, 381 | conv_layers: List[Tuple[int, int, int]], 382 | dropout: float = 0.0, 383 | mode: str = "default", 384 | conv_bias: bool = False, 385 | conv_type: str = "default" 386 | ): 387 | super().__init__() 388 | 389 | assert mode in {"default", "layer_norm"} 390 | 391 | def block( 392 | n_in, 393 | n_out, 394 | k, 395 | stride, 396 | is_layer_norm=False, 397 | is_group_norm=False, 398 | conv_bias=False, 399 | ): 400 | def make_conv(): 401 | conv = nn.Conv1d(n_in, n_out, k, stride=stride, bias=conv_bias) 402 | nn.init.kaiming_normal_(conv.weight) 403 | return conv 404 | 405 | assert ( 406 | is_layer_norm and is_group_norm 407 | ) == False, "layer norm and group norm are exclusive" 408 | 409 | if is_layer_norm: 410 | return nn.Sequential( 411 | make_conv(), 412 | nn.Dropout(p=dropout), 413 | nn.Sequential( 414 | TransposeLast(), 415 | Fp32LayerNorm(dim, elementwise_affine=True), 416 | TransposeLast(), 417 | ), 418 | nn.GELU(), 419 | ) 420 | elif is_group_norm: 421 | return nn.Sequential( 422 | make_conv(), 423 | nn.Dropout(p=dropout), 424 | Fp32GroupNorm(dim, dim, affine=True), 425 | nn.GELU(), 426 | ) 427 | else: 428 | return nn.Sequential(make_conv(), nn.Dropout(p=dropout), nn.GELU()) 429 | 430 | self.conv_type = conv_type 431 | if self.conv_type == "default": 432 | in_d = 1 433 | self.conv_layers = nn.ModuleList() 434 | for i, cl in enumerate(conv_layers): 435 | assert len(cl) == 3, "invalid conv definition: " + str(cl) 436 | (dim, k, stride) = cl 437 | 438 | self.conv_layers.append( 439 | block( 440 | in_d, 441 | dim, 442 | k, 443 | stride, 444 | is_layer_norm=mode == "layer_norm", 445 | is_group_norm=mode == "default" and i == 0, 446 | conv_bias=conv_bias, 447 | ) 448 | ) 449 | in_d = dim 450 | elif self.conv_type == "conv2d": 451 | in_d = 1 452 | self.conv_layers = nn.ModuleList() 453 | for i, cl in enumerate(conv_layers): 454 | assert len(cl) == 3 455 | (dim, k, stride) = cl 456 | 457 | self.conv_layers.append( 458 | torch.nn.Conv2d(in_d, dim, k, stride) 459 | ) 460 | self.conv_layers.append(torch.nn.ReLU()) 461 | in_d = dim 462 | elif self.conv_type == "custom": 463 | in_d = 1 464 | idim = 80 465 | self.conv_layers = nn.ModuleList() 466 | for i, cl in enumerate(conv_layers): 467 | assert len(cl) == 3 468 | (dim, k, stride) = cl 469 | self.conv_layers.append( 470 | torch.nn.Conv2d(in_d, dim, k, stride, padding=1) 471 | ) 472 | self.conv_layers.append( 473 | torch.nn.LayerNorm([dim, idim]) 474 | ) 475 | self.conv_layers.append(torch.nn.ReLU()) 476 | in_d = dim 477 | if (i + 1) % 2 == 0: 478 | self.conv_layers.append( 479 | torch.nn.MaxPool2d(2, stride=2, ceil_mode=True) 480 | ) 481 | idim = int(math.ceil(idim / 2)) 482 | else: 483 | pass 484 | 485 | def forward(self, x, mask=None): 486 | 487 | # BxT -> BxCxT 488 | x = x.unsqueeze(1) 489 | if self.conv_type == "custom": 490 | for conv in self.conv_layers: 491 | if isinstance(conv, nn.LayerNorm): 492 | x = x.transpose(1, 2) 493 | x = conv(x).transpose(1, 2) 494 | else: 495 | x = conv(x) 496 | x = x.transpose(2, 3).contiguous() 497 | x = x.view(x.size(0), -1, x.size(-1)) 498 | else: 499 | for conv in self.conv_layers: 500 | x = conv(x) 501 | if self.conv_type == "conv2d": 502 | b, c, t, f = x.size() 503 | x = x.transpose(2, 3).contiguous().view(b, c * f, t) 504 | return x 505 | 506 | 507 | class TransformerEncoder(nn.Module): 508 | def __init__(self, args): 509 | super().__init__() 510 | 511 | self.dropout = args.dropout 512 | self.embedding_dim = args.encoder_embed_dim 513 | 514 | self.pos_conv = nn.Conv1d( 515 | self.embedding_dim, 516 | self.embedding_dim, 517 | kernel_size=args.conv_pos, 518 | padding=args.conv_pos // 2, 519 | groups=args.conv_pos_groups, 520 | ) 521 | dropout = 0 522 | std = math.sqrt((4 * (1.0 - dropout)) / (args.conv_pos * self.embedding_dim)) 523 | nn.init.normal_(self.pos_conv.weight, mean=0, std=std) 524 | nn.init.constant_(self.pos_conv.bias, 0) 525 | 526 | self.pos_conv = nn.utils.weight_norm(self.pos_conv, name="weight", dim=2) 527 | self.pos_conv = nn.Sequential(self.pos_conv, SamePad(args.conv_pos), nn.GELU()) 528 | 529 | if hasattr(args, "relative_position_embedding"): 530 | self.relative_position_embedding = args.relative_position_embedding 531 | self.num_buckets = args.num_buckets 532 | self.max_distance = args.max_distance 533 | else: 534 | self.relative_position_embedding = False 535 | self.num_buckets = 0 536 | self.max_distance = 0 537 | 538 | self.layers = nn.ModuleList( 539 | [ 540 | TransformerSentenceEncoderLayer( 541 | embedding_dim=self.embedding_dim, 542 | ffn_embedding_dim=args.encoder_ffn_embed_dim, 543 | num_attention_heads=args.encoder_attention_heads, 544 | dropout=self.dropout, 545 | attention_dropout=args.attention_dropout, 546 | activation_dropout=args.activation_dropout, 547 | activation_fn=args.activation_fn, 548 | layer_norm_first=args.layer_norm_first, 549 | has_relative_attention_bias=(self.relative_position_embedding and i == 0), 550 | num_buckets=self.num_buckets, 551 | max_distance=self.max_distance, 552 | gru_rel_pos=args.gru_rel_pos, 553 | ) 554 | for i in range(args.encoder_layers) 555 | ] 556 | ) 557 | 558 | self.layer_norm_first = args.layer_norm_first 559 | self.layer_norm = LayerNorm(self.embedding_dim) 560 | self.layerdrop = args.encoder_layerdrop 561 | 562 | self.apply(init_bert_params) 563 | 564 | def forward(self, x, padding_mask=None, streaming_mask=None, layer=None): 565 | x, layer_results = self.extract_features(x, padding_mask, streaming_mask, layer) 566 | 567 | if self.layer_norm_first and layer is None: 568 | x = self.layer_norm(x) 569 | 570 | return x, layer_results 571 | 572 | def extract_features(self, x, padding_mask=None, streaming_mask=None, tgt_layer=None): 573 | 574 | if padding_mask is not None: 575 | x[padding_mask] = 0 576 | 577 | x_conv = self.pos_conv(x.transpose(1, 2)) 578 | x_conv = x_conv.transpose(1, 2) 579 | x += x_conv 580 | 581 | if not self.layer_norm_first: 582 | x = self.layer_norm(x) 583 | 584 | x = F.dropout(x, p=self.dropout, training=self.training) 585 | 586 | # B x T x C -> T x B x C 587 | x = x.transpose(0, 1) 588 | 589 | layer_results = [] 590 | z = None 591 | if tgt_layer is not None: 592 | layer_results.append((x, z)) 593 | r = None 594 | pos_bias = None 595 | for i, layer in enumerate(self.layers): 596 | dropout_probability = np.random.random() 597 | if not self.training or (dropout_probability > self.layerdrop): 598 | x, z, pos_bias = layer(x, self_attn_padding_mask=padding_mask, need_weights=False, 599 | self_attn_mask=streaming_mask, pos_bias=pos_bias) 600 | if tgt_layer is not None: 601 | layer_results.append((x, z)) 602 | if i == tgt_layer: 603 | r = x 604 | break 605 | 606 | if r is not None: 607 | x = r 608 | 609 | # T x B x C -> B x T x C 610 | x = x.transpose(0, 1) 611 | 612 | return x, layer_results 613 | 614 | 615 | class TransformerSentenceEncoderLayer(nn.Module): 616 | """ 617 | Implements a Transformer Encoder Layer used in BERT/XLM style pre-trained 618 | models. 619 | """ 620 | 621 | def __init__( 622 | self, 623 | embedding_dim: float = 768, 624 | ffn_embedding_dim: float = 3072, 625 | num_attention_heads: float = 8, 626 | dropout: float = 0.1, 627 | attention_dropout: float = 0.1, 628 | activation_dropout: float = 0.1, 629 | activation_fn: str = "relu", 630 | layer_norm_first: bool = False, 631 | has_relative_attention_bias: bool = False, 632 | num_buckets: int = 0, 633 | max_distance: int = 0, 634 | rescale_init: bool = False, 635 | gru_rel_pos: bool = False, 636 | ) -> None: 637 | 638 | super().__init__() 639 | # Initialize parameters 640 | self.embedding_dim = embedding_dim 641 | self.dropout = dropout 642 | self.activation_dropout = activation_dropout 643 | 644 | # Initialize blocks 645 | self.activation_name = activation_fn 646 | self.activation_fn = get_activation_fn(activation_fn) 647 | self.self_attn = MultiheadAttention( 648 | self.embedding_dim, 649 | num_attention_heads, 650 | dropout=attention_dropout, 651 | self_attention=True, 652 | has_relative_attention_bias=has_relative_attention_bias, 653 | num_buckets=num_buckets, 654 | max_distance=max_distance, 655 | rescale_init=rescale_init, 656 | gru_rel_pos=gru_rel_pos, 657 | ) 658 | 659 | self.dropout1 = nn.Dropout(dropout) 660 | self.dropout2 = nn.Dropout(self.activation_dropout) 661 | self.dropout3 = nn.Dropout(dropout) 662 | 663 | self.layer_norm_first = layer_norm_first 664 | 665 | # layer norm associated with the self attention layer 666 | self.self_attn_layer_norm = LayerNorm(self.embedding_dim) 667 | 668 | if self.activation_name == "glu": 669 | self.fc1 = GLU_Linear(self.embedding_dim, ffn_embedding_dim, "swish") 670 | else: 671 | self.fc1 = nn.Linear(self.embedding_dim, ffn_embedding_dim) 672 | self.fc2 = nn.Linear(ffn_embedding_dim, self.embedding_dim) 673 | 674 | # layer norm associated with the position wise feed-forward NN 675 | self.final_layer_norm = LayerNorm(self.embedding_dim) 676 | 677 | def forward( 678 | self, 679 | x: torch.Tensor, 680 | self_attn_mask: torch.Tensor = None, 681 | self_attn_padding_mask: torch.Tensor = None, 682 | need_weights: bool = False, 683 | pos_bias=None 684 | ): 685 | """ 686 | LayerNorm is applied either before or after the self-attention/ffn 687 | modules similar to the original Transformer imlementation. 688 | """ 689 | residual = x 690 | 691 | if self.layer_norm_first: 692 | x = self.self_attn_layer_norm(x) 693 | x, attn, pos_bias = self.self_attn( 694 | query=x, 695 | key=x, 696 | value=x, 697 | key_padding_mask=self_attn_padding_mask, 698 | need_weights=False, 699 | attn_mask=self_attn_mask, 700 | position_bias=pos_bias 701 | ) 702 | x = self.dropout1(x) 703 | x = residual + x 704 | 705 | residual = x 706 | x = self.final_layer_norm(x) 707 | if self.activation_name == "glu": 708 | x = self.fc1(x) 709 | else: 710 | x = self.activation_fn(self.fc1(x)) 711 | x = self.dropout2(x) 712 | x = self.fc2(x) 713 | x = self.dropout3(x) 714 | x = residual + x 715 | else: 716 | x, attn, pos_bias = self.self_attn( 717 | query=x, 718 | key=x, 719 | value=x, 720 | key_padding_mask=self_attn_padding_mask, 721 | need_weights=need_weights, 722 | attn_mask=self_attn_mask, 723 | position_bias=pos_bias 724 | ) 725 | 726 | x = self.dropout1(x) 727 | x = residual + x 728 | 729 | x = self.self_attn_layer_norm(x) 730 | 731 | residual = x 732 | if self.activation_name == "glu": 733 | x = self.fc1(x) 734 | else: 735 | x = self.activation_fn(self.fc1(x)) 736 | x = self.dropout2(x) 737 | x = self.fc2(x) 738 | x = self.dropout3(x) 739 | x = residual + x 740 | x = self.final_layer_norm(x) 741 | 742 | return x, attn, pos_bias 743 | 744 | -------------------------------------------------------------------------------- /Feature_Extractor/data_preprocess.py: -------------------------------------------------------------------------------- 1 | import os 2 | os.environ["CUDA_VISIBLE_DEVICES"] = '7' 3 | import librosa 4 | import numpy as np 5 | import pandas as pd 6 | import torch 7 | import torch.nn as nn 8 | import fairseq 9 | import lmdb 10 | import shutil 11 | import math 12 | from WavLM import WavLM,WavLMConfig 13 | device = torch.device("cuda" if torch.cuda.is_available() else "cpu") 14 | cuda = True if torch.cuda.is_available() else False 15 | ''' 16 | 目标:将一整段很长的语音进行切割,只保留 17 | ''' 18 | 19 | checkpoint = torch.load('/148Dataset/PretrainedModel/model.audio/english/WavLM-Large.pt')#Pretrained model checkpoint path. 20 | cfg = WavLMConfig(checkpoint['cfg']) 21 | model = WavLM(cfg) 22 | model.load_state_dict(checkpoint['model']) 23 | model = model.to(device) 24 | model.eval() 25 | 26 | #IEMOCAP: 27 | for i in range(1,6): 28 | csv = r'iemocap_data/data'# csv path. 29 | inputdir = r'/148Dataset/data-chen.shuaiqi/IEMOCAP/IEMOCAP_full_release/Session'# data path. 30 | outputdir = r'/148Dataset/data-chen.shuaiqi/IEMOCAP/IEMOCAP_full_release/Feature/WavLM/Session' #feature path. 31 | dir = inputdir + str(i) + r'/sentences/wav/' 32 | csvs = csv + str(i) + '.csv' 33 | csv = pd.read_csv(csvs) 34 | name1 = csv.dataname.values 35 | name2 = csv.newdataname.values 36 | for j in range(len(name2)): 37 | data, _ = librosa.load(dir+name1[j]+'/'+name2[j]+'.wav',sr = 16000) 38 | data = data[np.newaxis,:] 39 | data = torch.Tensor(data).to(device) 40 | with torch.no_grad(): 41 | feature = model.extract_features(source = data, output_layer = 12)[0] 42 | feature = feature.squeeze(0) 43 | feature = feature.cpu().data.numpy() 44 | np.save(outputdir + str(i) + '/'+name2[j] + '.npy',feature) 45 | 46 | 47 | #------------------------------------------------------------------------------------- 48 | #MELD: 49 | 50 | def MELD_extractor(inputdir,outputdir): 51 | name = os.listdir(inputdir) 52 | for i in range(len(name)): 53 | data1 = inputdir + name[i] 54 | names = name[i] 55 | data,_ = librosa.load(data1,sr = 16000) 56 | data = data[np.newaxis,:] 57 | data = torch.Tensor(data).to(device) 58 | with torch.no_grad(): 59 | feature = model.extract_features(source = data, output_layer = 12)[0] 60 | feature = feature.squeeze(0) 61 | feature = feature.cpu().data.numpy() 62 | print(feature.shape) 63 | np.save(outputdir + names[:-4] + '.npy', feature) 64 | #train: 65 | inputdir = r'/148Dataset/data-chen.shuaiqi/MELD/MELD.Raw/train_splits/' 66 | outputdir = r'/148Dataset/data-chen.shuaiqi/MELD/MELD.Raw/feature/WavLM12/train/' 67 | MELD_extractor(inputdir,outputdir) 68 | #dev: 69 | inputdir = r'/148Dataset/data-chen.shuaiqi/MELD/MELD.Raw/dev_splits_complete/' 70 | outputdir = r'/148Dataset/data-chen.shuaiqi/MELD/MELD.Raw/feature/WavLM12/dev/' 71 | MELD_extractor(inputdir,outputdir) 72 | #test: 73 | inputdir = r'/148Dataset/data-chen.shuaiqi/MELD/MELD.Raw/output_repeated_splits_test/' 74 | outputdir = r'/148Dataset/data-chen.shuaiqi/MELD/MELD.Raw/feature/WavLM12/test/' 75 | MELD_extractor(inputdir,outputdir) 76 | 77 | -------------------------------------------------------------------------------- /Feature_Extractor/iemocap_data/readme.md: -------------------------------------------------------------------------------- 1 | 2 | ## datax.csv 3 | save the training data and validating data of Session x 4 | 5 | ## trainx.csv 6 | save the training data of Session x 7 | 8 | ## validx.csv 9 | save the validating data of Session x 10 | 11 | ## name_label_text.csv 12 | formant: 13 | >> ,name,label,text 14 | 15 | example: 16 | >> 0,Ses01M_script02_2_M000,xxx,They're out there swimming right now... 17 | -------------------------------------------------------------------------------- /Feature_Extractor/iemocap_data/valid1.csv: -------------------------------------------------------------------------------- 1 | name,label 2 | Ses01F_script02_1_F000,3 3 | Ses01F_script02_1_F004,3 4 | Ses01F_script02_1_F006,0 5 | Ses01F_script02_1_F007,0 6 | Ses01F_script02_1_F008,3 7 | Ses01F_script02_1_F009,2 8 | Ses01F_script02_1_F010,1 9 | Ses01F_script02_1_F015,3 10 | Ses01F_script02_1_F024,3 11 | Ses01F_script02_1_F025,3 12 | Ses01F_script02_1_M002,2 13 | Ses01F_script02_1_M007,3 14 | Ses01F_script02_1_M008,3 15 | Ses01F_script02_1_M009,3 16 | Ses01F_script02_1_M010,3 17 | Ses01F_script02_1_M011,3 18 | Ses01F_script02_1_M013,3 19 | Ses01F_script02_1_M014,3 20 | Ses01F_script02_1_M015,3 21 | Ses01F_script02_1_M016,2 22 | Ses01F_script02_1_M020,3 23 | Ses01F_script02_1_M021,3 24 | Ses01F_script02_1_M023,3 25 | Ses01F_script02_1_M024,3 26 | Ses01F_script02_1_M025,3 27 | Ses01F_script02_1_M026,3 28 | Ses01F_script02_1_M029,3 29 | Ses01F_script02_1_M031,3 30 | Ses01F_script02_1_M032,3 31 | Ses01F_script02_1_M033,3 32 | Ses01F_script02_1_M034,3 33 | Ses01F_script02_1_M035,2 34 | Ses01F_script02_1_M036,2 35 | Ses01F_script02_1_M038,2 36 | Ses01F_script02_1_M040,2 37 | Ses01F_script02_1_M043,2 38 | Ses01F_script02_1_M044,2 39 | Ses01F_impro04_F000,3 40 | Ses01F_impro04_F001,3 41 | Ses01F_impro04_F005,3 42 | Ses01F_impro04_F006,3 43 | Ses01F_impro04_F009,3 44 | Ses01F_impro04_F013,3 45 | Ses01F_impro04_F014,3 46 | Ses01F_impro04_F028,0 47 | Ses01F_impro04_F029,0 48 | Ses01F_impro04_F030,0 49 | Ses01F_impro04_F031,0 50 | Ses01F_impro04_F032,0 51 | Ses01F_impro04_F033,0 52 | Ses01F_impro04_M000,3 53 | Ses01F_impro04_M001,3 54 | Ses01F_impro04_M002,3 55 | Ses01F_impro04_M003,3 56 | Ses01F_impro04_M004,3 57 | Ses01F_impro04_M005,3 58 | Ses01F_impro04_M006,3 59 | Ses01F_impro04_M007,3 60 | Ses01F_impro04_M008,3 61 | Ses01F_impro04_M009,3 62 | Ses01F_impro04_M010,3 63 | Ses01F_impro04_M011,3 64 | Ses01F_impro04_M012,3 65 | Ses01F_impro04_M013,3 66 | Ses01F_impro04_M014,3 67 | Ses01F_impro04_M015,3 68 | Ses01F_impro04_M016,3 69 | Ses01F_impro04_M017,3 70 | Ses01F_impro04_M018,3 71 | Ses01F_impro04_M019,3 72 | Ses01F_impro04_M020,3 73 | Ses01F_impro04_M021,3 74 | Ses01F_impro04_M022,3 75 | Ses01F_impro04_M023,3 76 | Ses01F_impro04_M024,3 77 | Ses01F_impro04_M025,3 78 | Ses01F_impro04_M026,3 79 | Ses01F_impro04_M027,3 80 | Ses01F_impro04_M028,3 81 | Ses01F_impro04_M031,3 82 | Ses01F_impro04_M032,3 83 | Ses01M_script01_1_F004,1 84 | Ses01M_script01_1_F005,1 85 | Ses01M_script01_1_F006,1 86 | Ses01M_script01_1_F008,0 87 | Ses01M_script01_1_F011,0 88 | Ses01M_script01_1_F014,0 89 | Ses01M_script01_1_F015,0 90 | Ses01M_script01_1_F016,0 91 | Ses01M_script01_1_F017,0 92 | Ses01M_script01_1_F020,0 93 | Ses01M_script01_1_F023,0 94 | Ses01M_script01_1_F024,0 95 | Ses01M_script01_1_F025,3 96 | Ses01M_script01_1_F026,3 97 | Ses01M_script01_1_F029,0 98 | Ses01M_script01_1_F030,0 99 | Ses01M_script01_1_F031,1 100 | Ses01M_script01_1_F032,0 101 | Ses01M_script01_1_F035,0 102 | Ses01M_script01_1_F036,0 103 | Ses01M_script01_1_F037,0 104 | Ses01M_script01_1_F039,0 105 | Ses01M_script01_1_F040,0 106 | Ses01M_script01_1_F041,0 107 | Ses01M_script01_1_F042,0 108 | Ses01M_script01_1_F043,0 109 | Ses01M_script01_1_M000,3 110 | Ses01M_script01_1_M002,3 111 | Ses01M_script01_1_M003,1 112 | Ses01M_script01_1_M004,1 113 | Ses01M_script01_1_M005,1 114 | Ses01M_script01_1_M006,1 115 | Ses01M_script01_1_M007,1 116 | Ses01M_script01_1_M017,3 117 | Ses01M_script01_1_M020,0 118 | Ses01M_script01_1_M021,0 119 | Ses01M_script01_1_M023,0 120 | Ses01M_script01_1_M024,1 121 | Ses01M_script01_1_M034,0 122 | Ses01M_script01_1_M035,0 123 | Ses01M_script01_1_M037,3 124 | Ses01M_script01_1_M038,1 125 | Ses01M_script01_1_M040,1 126 | Ses01M_script01_1_M041,0 127 | Ses01M_script01_1_M042,1 128 | Ses01M_script01_2_F000,0 129 | Ses01M_script01_2_F001,0 130 | Ses01M_script01_2_F004,0 131 | Ses01M_script01_2_F005,0 132 | Ses01M_script01_2_F009,0 133 | Ses01M_script01_2_F011,0 134 | Ses01M_script01_2_F012,0 135 | Ses01M_script01_2_F013,0 136 | Ses01M_script01_2_M000,3 137 | Ses01M_script01_2_M001,3 138 | Ses01M_script01_2_M007,3 139 | Ses01M_script01_2_M015,0 140 | Ses01M_script01_2_M016,0 141 | Ses01M_script01_2_M017,0 142 | Ses01M_impro06_F000,3 143 | Ses01M_impro06_F001,3 144 | Ses01M_impro06_F002,3 145 | Ses01M_impro06_F003,1 146 | Ses01M_impro06_F004,1 147 | Ses01M_impro06_F005,1 148 | Ses01M_impro06_F006,1 149 | Ses01M_impro06_F007,3 150 | Ses01M_impro06_F008,3 151 | Ses01M_impro06_F009,3 152 | Ses01M_impro06_F010,3 153 | Ses01M_impro06_F011,3 154 | Ses01M_impro06_F012,3 155 | Ses01M_impro06_F013,3 156 | Ses01M_impro06_F014,3 157 | Ses01M_impro06_F015,3 158 | Ses01M_impro06_F016,3 159 | Ses01M_impro06_F017,3 160 | Ses01M_impro06_F018,3 161 | Ses01M_impro06_F019,3 162 | Ses01M_impro06_F020,3 163 | Ses01M_impro06_F021,3 164 | Ses01M_impro06_F022,3 165 | Ses01M_impro06_F023,3 166 | Ses01M_impro06_F024,3 167 | Ses01M_impro06_F025,3 168 | Ses01M_impro06_F026,3 169 | Ses01M_impro06_F027,3 170 | Ses01M_impro06_F028,3 171 | Ses01M_impro06_M000,1 172 | Ses01M_impro06_M001,1 173 | Ses01M_impro06_M002,1 174 | Ses01M_impro06_M003,1 175 | Ses01M_impro06_M004,1 176 | Ses01M_impro06_M005,1 177 | Ses01M_impro06_M006,1 178 | Ses01M_impro06_M007,1 179 | Ses01M_impro06_M008,1 180 | Ses01M_impro06_M009,1 181 | Ses01M_impro06_M010,1 182 | Ses01M_impro06_M011,1 183 | Ses01M_impro06_M012,1 184 | Ses01M_impro06_M013,1 185 | Ses01M_impro06_M014,1 186 | Ses01M_impro06_M015,1 187 | Ses01M_impro06_M016,1 188 | Ses01M_impro06_M017,1 189 | Ses01M_impro06_M018,1 190 | Ses01M_impro06_M019,1 191 | Ses01M_impro06_M020,1 192 | Ses01M_impro06_M021,1 193 | Ses01M_impro06_M022,1 194 | Ses01M_impro06_M023,1 195 | Ses01M_impro06_M024,1 196 | Ses01M_impro06_M025,1 197 | Ses01M_impro06_M026,1 198 | Ses01M_impro06_M027,1 199 | Ses01M_impro06_M028,1 200 | Ses01M_impro06_M029,1 201 | Ses01M_impro06_M030,1 202 | Ses01M_impro07_F000,3 203 | Ses01M_impro07_F001,2 204 | Ses01M_impro07_F003,2 205 | Ses01M_impro07_F004,2 206 | Ses01M_impro07_F005,2 207 | Ses01M_impro07_F006,2 208 | Ses01M_impro07_F007,2 209 | Ses01M_impro07_F008,2 210 | Ses01M_impro07_F009,2 211 | Ses01M_impro07_F010,3 212 | Ses01M_impro07_F011,2 213 | Ses01M_impro07_F012,3 214 | Ses01M_impro07_F013,2 215 | Ses01M_impro07_F014,2 216 | Ses01M_impro07_F015,2 217 | Ses01M_impro07_F016,2 218 | Ses01M_impro07_F018,2 219 | Ses01M_impro07_F020,3 220 | Ses01M_impro07_F021,2 221 | Ses01M_impro07_F022,2 222 | Ses01M_impro07_F023,2 223 | Ses01M_impro07_F024,2 224 | Ses01M_impro07_F025,2 225 | Ses01M_impro07_F026,2 226 | Ses01M_impro07_F027,2 227 | Ses01M_impro07_F033,2 228 | Ses01M_impro07_F034,2 229 | Ses01M_impro07_M000,2 230 | Ses01M_impro07_M001,2 231 | Ses01M_impro07_M002,2 232 | Ses01M_impro07_M003,2 233 | Ses01M_impro07_M004,2 234 | Ses01M_impro07_M005,2 235 | Ses01M_impro07_M006,2 236 | Ses01M_impro07_M007,2 237 | Ses01M_impro07_M008,2 238 | Ses01M_impro07_M009,2 239 | Ses01M_impro07_M010,2 240 | Ses01M_impro07_M011,2 241 | Ses01M_impro07_M013,2 242 | Ses01M_impro07_M017,2 243 | Ses01M_impro07_M018,2 244 | Ses01M_impro07_M019,2 245 | Ses01M_impro07_M020,3 246 | Ses01M_impro07_M024,2 247 | Ses01M_impro07_M026,3 248 | Ses01M_impro07_M030,3 249 | Ses01M_impro07_M032,2 250 | Ses01M_impro07_M033,2 251 | Ses01F_script03_2_F000,2 252 | Ses01F_script03_2_F001,2 253 | Ses01F_script03_2_F002,2 254 | Ses01F_script03_2_F004,3 255 | Ses01F_script03_2_F005,3 256 | Ses01F_script03_2_F013,0 257 | Ses01F_script03_2_F014,0 258 | Ses01F_script03_2_F015,0 259 | Ses01F_script03_2_F016,0 260 | Ses01F_script03_2_F017,0 261 | Ses01F_script03_2_F018,0 262 | Ses01F_script03_2_F021,0 263 | Ses01F_script03_2_F022,0 264 | Ses01F_script03_2_F023,0 265 | Ses01F_script03_2_F024,0 266 | Ses01F_script03_2_F025,0 267 | Ses01F_script03_2_F026,0 268 | Ses01F_script03_2_F027,0 269 | Ses01F_script03_2_F028,0 270 | Ses01F_script03_2_F029,0 271 | Ses01F_script03_2_F030,3 272 | Ses01F_script03_2_F031,3 273 | Ses01F_script03_2_F032,3 274 | Ses01F_script03_2_F034,0 275 | Ses01F_script03_2_F035,0 276 | Ses01F_script03_2_F036,0 277 | Ses01F_script03_2_F037,0 278 | Ses01F_script03_2_F038,0 279 | Ses01F_script03_2_F039,0 280 | Ses01F_script03_2_F040,0 281 | Ses01F_script03_2_M001,3 282 | Ses01F_script03_2_M002,3 283 | Ses01F_script03_2_M004,3 284 | Ses01F_script03_2_M007,0 285 | Ses01F_script03_2_M010,3 286 | Ses01F_script03_2_M011,3 287 | Ses01F_script03_2_M014,0 288 | Ses01F_script03_2_M016,3 289 | Ses01F_script03_2_M018,3 290 | Ses01F_script03_2_M019,3 291 | Ses01F_script03_2_M020,0 292 | Ses01F_script03_2_M021,0 293 | Ses01F_script03_2_M022,0 294 | Ses01F_script03_2_M023,0 295 | Ses01F_script03_2_M024,0 296 | Ses01F_script03_2_M025,0 297 | Ses01F_script03_2_M026,0 298 | Ses01F_script03_2_M027,0 299 | Ses01F_script03_2_M029,0 300 | Ses01F_script03_2_M030,0 301 | Ses01F_script03_2_M031,3 302 | Ses01F_script03_2_M032,0 303 | Ses01F_script03_2_M033,0 304 | Ses01F_script03_2_M034,0 305 | Ses01F_script03_2_M035,0 306 | Ses01F_script03_2_M036,0 307 | Ses01F_script03_2_M037,0 308 | Ses01F_script03_2_M038,0 309 | Ses01F_script03_2_M039,0 310 | Ses01F_script03_2_M040,0 311 | Ses01F_impro07_F000,2 312 | Ses01F_impro07_F002,2 313 | Ses01F_impro07_F003,2 314 | Ses01F_impro07_F004,2 315 | Ses01F_impro07_F005,2 316 | Ses01F_impro07_F009,2 317 | Ses01F_impro07_F010,2 318 | Ses01F_impro07_F012,2 319 | Ses01F_impro07_F013,2 320 | Ses01F_impro07_F016,2 321 | Ses01F_impro07_M000,2 322 | Ses01F_impro07_M001,2 323 | Ses01F_impro07_M002,2 324 | Ses01F_impro07_M003,2 325 | Ses01F_impro07_M004,2 326 | Ses01F_impro07_M005,2 327 | Ses01F_impro07_M006,2 328 | Ses01F_impro07_M007,2 329 | Ses01F_impro07_M008,2 330 | Ses01F_impro07_M009,2 331 | Ses01F_impro07_M012,3 332 | Ses01F_impro07_M013,3 333 | Ses01F_impro07_M014,2 334 | Ses01F_impro07_M015,2 335 | Ses01F_impro07_M017,2 336 | Ses01F_impro07_M018,3 337 | Ses01F_impro07_M019,2 338 | Ses01M_script02_2_F000,3 339 | Ses01M_script02_2_F010,0 340 | Ses01M_script02_2_F011,0 341 | Ses01M_script02_2_F013,0 342 | Ses01M_script02_2_F014,0 343 | Ses01M_script02_2_F015,0 344 | Ses01M_script02_2_F016,0 345 | Ses01M_script02_2_F019,3 346 | Ses01M_script02_2_F022,1 347 | Ses01M_script02_2_F024,1 348 | Ses01M_script02_2_F025,1 349 | Ses01M_script02_2_F026,1 350 | Ses01M_script02_2_F030,0 351 | Ses01M_script02_2_F031,0 352 | Ses01M_script02_2_F032,0 353 | Ses01M_script02_2_F033,1 354 | Ses01M_script02_2_F034,1 355 | Ses01M_script02_2_F035,1 356 | Ses01M_script02_2_F036,1 357 | Ses01M_script02_2_F037,1 358 | Ses01M_script02_2_F040,1 359 | Ses01M_script02_2_F044,2 360 | Ses01M_script02_2_F045,2 361 | Ses01M_script02_2_F046,1 362 | Ses01M_script02_2_F047,3 363 | Ses01M_script02_2_M001,2 364 | Ses01M_script02_2_M002,2 365 | Ses01M_script02_2_M003,2 366 | Ses01M_script02_2_M008,3 367 | Ses01M_script02_2_M013,3 368 | Ses01M_script02_2_M019,3 369 | Ses01M_script02_2_M020,3 370 | Ses01M_script02_2_M021,3 371 | Ses01M_script02_2_M022,3 372 | Ses01M_script02_2_M023,3 373 | Ses01M_script02_2_M025,3 374 | Ses01M_script02_2_M027,3 375 | Ses01M_script02_2_M028,3 376 | Ses01M_script02_2_M029,3 377 | Ses01M_script02_2_M030,3 378 | Ses01M_script02_2_M033,0 379 | Ses01M_script02_2_M035,0 380 | Ses01M_script02_2_M037,1 381 | Ses01M_script02_2_M038,1 382 | Ses01M_script02_2_M039,3 383 | Ses01M_script02_2_M040,3 384 | Ses01M_script02_2_M041,1 385 | Ses01M_script02_2_M042,1 386 | Ses01M_script02_2_M043,1 387 | Ses01M_script02_2_M044,1 388 | Ses01M_script02_2_M045,1 389 | Ses01M_script02_2_M049,2 390 | Ses01M_script02_2_M050,3 391 | Ses01M_script02_2_M051,3 392 | Ses01M_script02_2_M052,2 393 | Ses01M_script02_2_M053,2 394 | Ses01M_script02_2_M054,3 395 | Ses01F_script02_2_F000,3 396 | Ses01F_script02_2_F001,3 397 | Ses01F_script02_2_F006,0 398 | Ses01F_script02_2_F008,0 399 | Ses01F_script02_2_F009,0 400 | Ses01F_script02_2_F010,0 401 | Ses01F_script02_2_F015,1 402 | Ses01F_script02_2_F016,0 403 | Ses01F_script02_2_F017,3 404 | Ses01F_script02_2_F019,2 405 | Ses01F_script02_2_F022,1 406 | Ses01F_script02_2_F023,3 407 | Ses01F_script02_2_F026,3 408 | Ses01F_script02_2_F034,1 409 | Ses01F_script02_2_F035,1 410 | Ses01F_script02_2_F036,1 411 | Ses01F_script02_2_F040,1 412 | Ses01F_script02_2_F041,3 413 | Ses01F_script02_2_F042,2 414 | Ses01F_script02_2_F043,2 415 | Ses01F_script02_2_F044,2 416 | Ses01F_script02_2_F045,2 417 | Ses01F_script02_2_F046,1 418 | Ses01F_script02_2_F047,3 419 | Ses01F_script02_2_M000,2 420 | Ses01F_script02_2_M001,2 421 | Ses01F_script02_2_M002,3 422 | Ses01F_script02_2_M006,3 423 | Ses01F_script02_2_M016,3 424 | Ses01F_script02_2_M017,3 425 | Ses01F_script02_2_M019,3 426 | Ses01F_script02_2_M021,3 427 | Ses01F_script02_2_M022,3 428 | Ses01F_script02_2_M023,3 429 | Ses01F_script02_2_M024,2 430 | Ses01F_script02_2_M026,2 431 | Ses01F_script02_2_M033,3 432 | Ses01F_script02_2_M037,1 433 | Ses01F_script02_2_M038,1 434 | Ses01F_script02_2_M040,3 435 | Ses01F_script02_2_M042,3 436 | Ses01F_script02_2_M043,3 437 | Ses01F_script02_2_M044,2 438 | Ses01F_script02_2_M046,2 439 | Ses01F_script02_2_M047,3 440 | Ses01F_script02_2_M048,3 441 | Ses01F_script02_2_M049,2 442 | Ses01F_script01_2_F003,0 443 | Ses01F_script01_2_F004,0 444 | Ses01F_script01_2_F005,0 445 | Ses01F_script01_2_F006,0 446 | Ses01F_script01_2_F007,0 447 | Ses01F_script01_2_F008,0 448 | Ses01F_script01_2_F009,0 449 | Ses01F_script01_2_F010,0 450 | Ses01F_script01_2_F011,0 451 | Ses01F_script01_2_F012,0 452 | Ses01F_script01_2_F013,0 453 | Ses01F_script01_2_M001,3 454 | Ses01F_script01_2_M004,3 455 | Ses01F_script01_2_M008,3 456 | Ses01F_script01_2_M017,0 457 | Ses01M_script03_1_F003,2 458 | Ses01M_script03_1_F004,2 459 | Ses01M_script03_1_F007,2 460 | Ses01M_script03_1_F008,2 461 | Ses01M_script03_1_F009,2 462 | Ses01M_script03_1_F010,2 463 | Ses01M_script03_1_F011,2 464 | Ses01M_script03_1_F014,2 465 | Ses01M_script03_1_F015,2 466 | Ses01M_script03_1_F017,2 467 | Ses01M_script03_1_F018,2 468 | Ses01M_script03_1_F019,2 469 | Ses01M_script03_1_F021,2 470 | Ses01M_script03_1_F024,2 471 | Ses01M_script03_1_F030,2 472 | Ses01M_script03_1_F031,2 473 | Ses01M_script03_1_F034,2 474 | Ses01M_script03_1_F036,2 475 | Ses01M_script03_1_F037,2 476 | Ses01M_script03_1_F038,2 477 | Ses01M_script03_1_F039,2 478 | Ses01M_script03_1_F040,2 479 | Ses01M_script03_1_F041,2 480 | Ses01M_script03_1_M004,2 481 | Ses01M_script03_1_M010,2 482 | Ses01M_script03_1_M011,2 483 | Ses01M_script03_1_M012,2 484 | Ses01M_script03_1_M013,2 485 | Ses01M_script03_1_M014,1 486 | Ses01M_script03_1_M016,2 487 | Ses01M_script03_1_M017,2 488 | Ses01M_script03_1_M019,2 489 | Ses01M_script03_1_M021,2 490 | Ses01M_script03_1_M022,2 491 | Ses01M_script03_1_M028,2 492 | Ses01M_script03_1_M029,2 493 | Ses01M_script03_1_M030,2 494 | Ses01M_script03_1_M031,2 495 | Ses01M_script03_1_M033,2 496 | Ses01M_script03_1_M037,2 497 | Ses01M_script03_1_M039,2 498 | Ses01M_script03_1_M040,2 499 | Ses01M_script03_1_M041,2 500 | Ses01F_script03_1_F005,2 501 | Ses01F_script03_1_F006,2 502 | Ses01F_script03_1_F007,2 503 | Ses01F_script03_1_F010,2 504 | Ses01F_script03_1_F011,2 505 | Ses01F_script03_1_F012,2 506 | Ses01F_script03_1_F020,2 507 | Ses01F_script03_1_F021,2 508 | Ses01F_script03_1_F022,2 509 | Ses01F_script03_1_F024,2 510 | Ses01F_script03_1_F029,2 511 | Ses01F_script03_1_F030,2 512 | Ses01F_script03_1_M005,2 513 | Ses01F_script03_1_M007,2 514 | Ses01F_script03_1_M008,2 515 | Ses01F_script03_1_M009,2 516 | Ses01F_script03_1_M010,2 517 | Ses01F_script03_1_M013,2 518 | Ses01F_script03_1_M018,2 519 | Ses01F_script03_1_M021,2 520 | Ses01F_script03_1_M024,2 521 | Ses01F_script03_1_M025,2 522 | Ses01F_script03_1_M026,2 523 | Ses01F_script03_1_M034,2 524 | Ses01F_script03_1_M036,2 525 | Ses01F_script03_1_M037,2 526 | Ses01F_impro02_F000,1 527 | Ses01F_impro02_F001,1 528 | Ses01F_impro02_F002,1 529 | Ses01F_impro02_F003,3 530 | Ses01F_impro02_F004,1 531 | Ses01F_impro02_F005,1 532 | Ses01F_impro02_F006,3 533 | Ses01F_impro02_F008,1 534 | Ses01F_impro02_F010,1 535 | Ses01F_impro02_F012,1 536 | Ses01F_impro02_F013,1 537 | Ses01F_impro02_F014,1 538 | Ses01F_impro02_F015,1 539 | Ses01F_impro02_F016,3 540 | Ses01F_impro02_F017,1 541 | Ses01F_impro02_F018,3 542 | Ses01F_impro02_F019,1 543 | Ses01F_impro02_F020,1 544 | Ses01F_impro02_M001,1 545 | Ses01F_impro02_M005,1 546 | Ses01F_impro02_M009,1 547 | Ses01F_impro02_M010,1 548 | Ses01F_impro02_M011,1 549 | Ses01F_impro02_M012,3 550 | Ses01F_impro02_M013,1 551 | Ses01F_impro02_M014,1 552 | Ses01F_impro02_M015,3 553 | Ses01M_script02_1_F005,0 554 | Ses01M_script02_1_F007,3 555 | Ses01M_script02_1_F008,3 556 | Ses01M_script02_1_F009,2 557 | Ses01M_script02_1_F014,0 558 | Ses01M_script02_1_F017,0 559 | Ses01M_script02_1_F021,0 560 | Ses01M_script02_1_F022,0 561 | Ses01M_script02_1_F023,0 562 | Ses01M_script02_1_M000,2 563 | Ses01M_script02_1_M001,2 564 | Ses01M_script02_1_M002,2 565 | Ses01M_script02_1_M004,2 566 | Ses01M_script02_1_M005,2 567 | Ses01M_script02_1_M007,3 568 | Ses01M_script02_1_M009,3 569 | Ses01M_script02_1_M010,3 570 | Ses01M_script02_1_M011,3 571 | Ses01M_script02_1_M014,2 572 | Ses01M_script02_1_M016,2 573 | Ses01M_script02_1_M017,2 574 | Ses01M_script02_1_M022,3 575 | Ses01M_script02_1_M023,3 576 | Ses01M_script02_1_M028,0 577 | Ses01M_script02_1_M030,3 578 | Ses01M_script02_1_M031,2 579 | Ses01M_script02_1_M032,2 580 | Ses01M_script02_1_M037,3 581 | Ses01M_script02_1_M039,3 582 | Ses01M_script02_1_M040,2 583 | Ses01M_script02_1_M041,2 584 | Ses01M_script02_1_M042,2 585 | Ses01M_script02_1_M043,2 586 | Ses01M_script02_1_M044,2 587 | Ses01M_script02_1_M045,2 588 | Ses01M_script02_1_M046,2 589 | Ses01F_script01_1_F005,3 590 | Ses01F_script01_1_F015,3 591 | Ses01F_script01_1_F034,3 592 | Ses01F_script01_1_F035,0 593 | Ses01F_script01_1_F037,0 594 | Ses01F_script01_1_F038,0 595 | Ses01F_script01_1_F041,0 596 | Ses01F_script01_1_F042,0 597 | Ses01F_script01_1_M000,3 598 | Ses01F_script01_1_M001,3 599 | Ses01F_script01_1_M002,3 600 | Ses01F_script01_1_M003,3 601 | Ses01F_script01_1_M004,3 602 | Ses01F_script01_1_M007,3 603 | Ses01F_script01_1_M010,3 604 | Ses01F_script01_1_M012,3 605 | Ses01F_script01_1_M014,3 606 | Ses01F_script01_1_M015,3 607 | Ses01F_script01_1_M016,3 608 | Ses01F_script01_1_M017,3 609 | Ses01F_script01_1_M019,0 610 | Ses01F_script01_1_M023,3 611 | Ses01F_script01_1_M024,3 612 | Ses01F_script01_1_M025,3 613 | Ses01F_script01_1_M034,3 614 | Ses01F_script01_1_M039,3 615 | Ses01F_script01_1_M040,3 616 | Ses01F_impro06_F002,1 617 | Ses01F_impro06_F003,3 618 | Ses01F_impro06_F004,3 619 | Ses01F_impro06_F005,1 620 | Ses01F_impro06_F006,1 621 | Ses01F_impro06_F007,1 622 | Ses01F_impro06_F008,1 623 | Ses01F_impro06_F009,3 624 | Ses01F_impro06_F010,1 625 | Ses01F_impro06_F011,1 626 | Ses01F_impro06_F012,1 627 | Ses01F_impro06_F013,1 628 | Ses01F_impro06_F014,1 629 | Ses01F_impro06_F015,1 630 | Ses01F_impro06_F016,3 631 | Ses01F_impro06_F018,3 632 | Ses01F_impro06_F019,3 633 | Ses01F_impro06_F020,1 634 | Ses01F_impro06_F021,1 635 | Ses01F_impro06_F022,1 636 | Ses01F_impro06_F023,1 637 | Ses01F_impro06_F024,1 638 | Ses01F_impro06_F025,1 639 | Ses01F_impro06_F026,1 640 | Ses01F_impro06_F027,3 641 | Ses01F_impro06_F028,1 642 | Ses01F_impro06_M000,1 643 | Ses01F_impro06_M001,3 644 | Ses01F_impro06_M002,3 645 | Ses01F_impro06_M003,1 646 | Ses01F_impro06_M004,3 647 | Ses01F_impro06_M005,3 648 | Ses01F_impro06_M006,3 649 | Ses01F_impro06_M007,1 650 | Ses01F_impro06_M008,3 651 | Ses01F_impro06_M009,3 652 | Ses01F_impro06_M010,3 653 | Ses01F_impro06_M011,3 654 | Ses01F_impro06_M012,3 655 | Ses01F_impro06_M013,3 656 | Ses01F_impro06_M014,3 657 | Ses01F_impro06_M015,3 658 | Ses01F_impro06_M016,3 659 | Ses01M_impro03_F002,2 660 | Ses01M_impro03_F003,2 661 | Ses01M_impro03_F004,2 662 | Ses01M_impro03_F005,2 663 | Ses01M_impro03_F006,2 664 | Ses01M_impro03_F007,2 665 | Ses01M_impro03_F008,2 666 | Ses01M_impro03_F009,2 667 | Ses01M_impro03_F010,2 668 | Ses01M_impro03_F011,2 669 | Ses01M_impro03_F012,2 670 | Ses01M_impro03_F013,2 671 | Ses01M_impro03_F014,2 672 | Ses01M_impro03_F015,2 673 | Ses01M_impro03_F016,2 674 | Ses01M_impro03_F017,3 675 | Ses01M_impro03_F018,3 676 | Ses01M_impro03_F019,2 677 | Ses01M_impro03_F020,2 678 | Ses01M_impro03_F021,3 679 | Ses01M_impro03_F022,2 680 | Ses01M_impro03_F023,2 681 | Ses01M_impro03_F024,2 682 | Ses01M_impro03_F025,2 683 | Ses01M_impro03_F026,2 684 | Ses01M_impro03_M000,2 685 | Ses01M_impro03_M001,2 686 | Ses01M_impro03_M003,3 687 | Ses01M_impro03_M004,2 688 | Ses01M_impro03_M005,2 689 | Ses01M_impro03_M006,3 690 | Ses01M_impro03_M007,2 691 | Ses01M_impro03_M008,2 692 | Ses01M_impro03_M010,2 693 | Ses01M_impro03_M011,2 694 | Ses01M_impro03_M012,2 695 | Ses01M_impro03_M013,2 696 | Ses01M_impro03_M014,3 697 | Ses01M_impro03_M015,2 698 | Ses01M_impro03_M016,2 699 | Ses01M_impro03_M017,2 700 | Ses01M_impro03_M018,3 701 | Ses01M_impro03_M019,2 702 | Ses01M_impro03_M020,2 703 | Ses01M_impro03_M021,3 704 | Ses01M_impro03_M022,2 705 | Ses01M_impro03_M023,2 706 | Ses01M_impro03_M024,2 707 | Ses01F_script01_3_F000,3 708 | Ses01F_script01_3_F001,3 709 | Ses01F_script01_3_F002,3 710 | Ses01F_script01_3_F003,3 711 | Ses01F_script01_3_F004,3 712 | Ses01F_script01_3_F005,3 713 | Ses01F_script01_3_F006,3 714 | Ses01F_script01_3_F007,3 715 | Ses01F_script01_3_F008,3 716 | Ses01F_script01_3_F009,3 717 | Ses01F_script01_3_F010,2 718 | Ses01F_script01_3_F011,2 719 | Ses01F_script01_3_F012,2 720 | Ses01F_script01_3_F013,2 721 | Ses01F_script01_3_F014,3 722 | Ses01F_script01_3_F015,3 723 | Ses01F_script01_3_F016,3 724 | Ses01F_script01_3_F017,2 725 | Ses01F_script01_3_F019,2 726 | Ses01F_script01_3_F020,2 727 | Ses01F_script01_3_F021,2 728 | Ses01F_script01_3_F022,3 729 | Ses01F_script01_3_F028,3 730 | Ses01F_script01_3_F029,3 731 | Ses01F_script01_3_F030,1 732 | Ses01F_script01_3_F031,3 733 | Ses01F_script01_3_F035,2 734 | Ses01F_script01_3_M000,3 735 | Ses01F_script01_3_M001,3 736 | Ses01F_script01_3_M003,3 737 | Ses01F_script01_3_M004,3 738 | Ses01F_script01_3_M005,3 739 | Ses01F_script01_3_M008,2 740 | Ses01F_script01_3_M009,2 741 | Ses01F_script01_3_M010,2 742 | Ses01F_script01_3_M015,2 743 | Ses01F_script01_3_M016,2 744 | Ses01F_script01_3_M017,2 745 | Ses01F_script01_3_M019,2 746 | Ses01F_script01_3_M020,2 747 | Ses01F_script01_3_M021,1 748 | Ses01F_script01_3_M022,1 749 | Ses01F_script01_3_M023,1 750 | Ses01F_script01_3_M024,1 751 | Ses01F_script01_3_M025,1 752 | Ses01F_script01_3_M026,1 753 | Ses01F_script01_3_M027,1 754 | Ses01F_script01_3_M028,1 755 | Ses01F_script01_3_M029,1 756 | Ses01F_script01_3_M030,1 757 | Ses01F_script01_3_M032,1 758 | Ses01F_script01_3_M033,1 759 | Ses01F_script01_3_M036,1 760 | Ses01F_script01_3_M037,3 761 | Ses01M_impro01_F000,0 762 | Ses01M_impro01_F001,0 763 | Ses01M_impro01_F007,0 764 | Ses01M_impro01_F008,0 765 | Ses01M_impro01_F011,0 766 | Ses01M_impro01_F012,0 767 | Ses01M_impro01_F019,0 768 | Ses01M_impro01_F020,0 769 | Ses01M_impro01_F021,0 770 | Ses01M_impro01_F022,0 771 | Ses01M_impro01_F024,3 772 | Ses01M_impro01_F025,3 773 | Ses01M_impro01_M003,3 774 | Ses01M_impro01_M010,3 775 | Ses01M_impro01_M020,0 776 | Ses01M_impro01_M025,0 777 | Ses01M_impro01_M030,0 778 | Ses01M_impro01_M031,0 779 | Ses01M_impro01_M032,0 780 | Ses01F_impro03_F000,2 781 | Ses01F_impro03_F001,2 782 | Ses01F_impro03_F002,2 783 | Ses01F_impro03_F004,2 784 | Ses01F_impro03_F005,2 785 | Ses01F_impro03_F006,2 786 | Ses01F_impro03_F008,2 787 | Ses01F_impro03_F009,2 788 | Ses01F_impro03_F010,2 789 | Ses01F_impro03_F012,2 790 | Ses01F_impro03_F013,2 791 | Ses01F_impro03_F015,2 792 | Ses01F_impro03_F016,2 793 | Ses01F_impro03_F017,3 794 | Ses01F_impro03_F018,3 795 | Ses01F_impro03_F019,3 796 | Ses01F_impro03_F021,2 797 | Ses01F_impro03_F022,3 798 | Ses01F_impro03_F023,3 799 | Ses01F_impro03_F024,2 800 | Ses01F_impro03_F025,2 801 | Ses01F_impro03_F026,2 802 | Ses01F_impro03_M000,3 803 | Ses01F_impro03_M001,3 804 | Ses01F_impro03_M003,2 805 | Ses01F_impro03_M006,3 806 | Ses01F_impro03_M007,2 807 | Ses01F_impro03_M008,3 808 | Ses01F_impro03_M009,3 809 | Ses01F_impro03_M011,3 810 | Ses01F_impro03_M013,2 811 | Ses01F_impro03_M014,2 812 | Ses01F_impro03_M015,2 813 | Ses01F_impro03_M016,3 814 | Ses01F_impro03_M019,3 815 | Ses01F_impro03_M020,3 816 | Ses01F_impro03_M021,3 817 | Ses01F_impro03_M022,2 818 | Ses01F_impro03_M023,2 819 | Ses01F_impro03_M024,3 820 | Ses01F_impro01_F000,3 821 | Ses01F_impro01_F001,3 822 | Ses01F_impro01_F002,3 823 | Ses01F_impro01_F005,3 824 | Ses01F_impro01_F012,0 825 | Ses01F_impro01_F014,3 826 | Ses01F_impro01_M011,0 827 | Ses01F_impro01_M013,0 828 | Ses01F_impro05_F008,0 829 | Ses01F_impro05_F009,0 830 | Ses01F_impro05_F011,0 831 | Ses01F_impro05_F012,0 832 | Ses01F_impro05_F014,0 833 | Ses01F_impro05_F015,0 834 | Ses01F_impro05_F016,0 835 | Ses01F_impro05_F017,0 836 | Ses01F_impro05_F018,0 837 | Ses01F_impro05_F019,0 838 | Ses01F_impro05_F020,0 839 | Ses01F_impro05_F021,0 840 | Ses01F_impro05_F022,0 841 | Ses01F_impro05_F023,0 842 | Ses01F_impro05_F024,0 843 | Ses01F_impro05_F025,0 844 | Ses01F_impro05_F026,0 845 | Ses01F_impro05_F028,0 846 | Ses01F_impro05_F029,0 847 | Ses01F_impro05_F030,0 848 | Ses01F_impro05_M000,3 849 | Ses01F_impro05_M001,2 850 | Ses01F_impro05_M002,2 851 | Ses01F_impro05_M003,3 852 | Ses01F_impro05_M004,3 853 | Ses01F_impro05_M005,3 854 | Ses01F_impro05_M006,3 855 | Ses01F_impro05_M007,3 856 | Ses01F_impro05_M008,3 857 | Ses01F_impro05_M009,3 858 | Ses01F_impro05_M010,3 859 | Ses01F_impro05_M011,3 860 | Ses01F_impro05_M012,3 861 | Ses01F_impro05_M013,3 862 | Ses01F_impro05_M014,3 863 | Ses01F_impro05_M015,3 864 | Ses01F_impro05_M016,3 865 | Ses01F_impro05_M017,3 866 | Ses01F_impro05_M019,3 867 | Ses01F_impro05_M020,3 868 | Ses01F_impro05_M022,3 869 | Ses01F_impro05_M023,3 870 | Ses01F_impro05_M024,3 871 | Ses01F_impro05_M025,3 872 | Ses01F_impro05_M026,3 873 | Ses01F_impro05_M027,3 874 | Ses01F_impro05_M028,3 875 | Ses01F_impro05_M029,3 876 | Ses01F_impro05_M030,3 877 | Ses01F_impro05_M031,3 878 | Ses01F_impro05_M034,3 879 | Ses01F_impro05_M035,3 880 | Ses01M_script01_3_F000,3 881 | Ses01M_script01_3_F001,3 882 | Ses01M_script01_3_F003,1 883 | Ses01M_script01_3_F004,3 884 | Ses01M_script01_3_F005,1 885 | Ses01M_script01_3_F006,1 886 | Ses01M_script01_3_F008,3 887 | Ses01M_script01_3_F012,3 888 | Ses01M_script01_3_F014,2 889 | Ses01M_script01_3_F015,2 890 | Ses01M_script01_3_F016,2 891 | Ses01M_script01_3_F018,1 892 | Ses01M_script01_3_F019,1 893 | Ses01M_script01_3_F021,1 894 | Ses01M_script01_3_F022,1 895 | Ses01M_script01_3_F023,1 896 | Ses01M_script01_3_F024,1 897 | Ses01M_script01_3_F025,1 898 | Ses01M_script01_3_F026,1 899 | Ses01M_script01_3_F030,2 900 | Ses01M_script01_3_M000,2 901 | Ses01M_script01_3_M001,2 902 | Ses01M_script01_3_M002,1 903 | Ses01M_script01_3_M004,3 904 | Ses01M_script01_3_M006,3 905 | Ses01M_script01_3_M007,3 906 | Ses01M_script01_3_M009,2 907 | Ses01M_script01_3_M014,3 908 | Ses01M_script01_3_M016,2 909 | Ses01M_script01_3_M018,2 910 | Ses01M_script01_3_M019,2 911 | Ses01M_script01_3_M020,1 912 | Ses01M_script01_3_M021,1 913 | Ses01M_script01_3_M022,1 914 | Ses01M_script01_3_M023,1 915 | Ses01M_script01_3_M024,1 916 | Ses01M_script01_3_M025,1 917 | Ses01M_script01_3_M026,1 918 | Ses01M_script01_3_M027,1 919 | Ses01M_script01_3_M028,1 920 | Ses01M_script01_3_M029,1 921 | Ses01M_script01_3_M030,1 922 | Ses01M_script01_3_M031,1 923 | Ses01M_script01_3_M032,1 924 | Ses01M_script01_3_M033,1 925 | Ses01M_script01_3_M034,1 926 | Ses01M_script01_3_M035,1 927 | Ses01M_script01_3_M036,1 928 | Ses01M_script01_3_M037,1 929 | Ses01M_script01_3_M038,1 930 | Ses01M_script01_3_M039,1 931 | Ses01M_script01_3_M040,1 932 | Ses01M_script01_3_M041,1 933 | Ses01M_script01_3_M043,2 934 | Ses01M_script03_2_F006,3 935 | Ses01M_script03_2_F010,3 936 | Ses01M_script03_2_F011,0 937 | Ses01M_script03_2_F015,0 938 | Ses01M_script03_2_F017,0 939 | Ses01M_script03_2_F022,0 940 | Ses01M_script03_2_F023,0 941 | Ses01M_script03_2_F024,0 942 | Ses01M_script03_2_F025,0 943 | Ses01M_script03_2_F026,0 944 | Ses01M_script03_2_F027,0 945 | Ses01M_script03_2_F028,0 946 | Ses01M_script03_2_F029,0 947 | Ses01M_script03_2_F030,0 948 | Ses01M_script03_2_F031,0 949 | Ses01M_script03_2_F033,0 950 | Ses01M_script03_2_F034,0 951 | Ses01M_script03_2_F035,0 952 | Ses01M_script03_2_F036,0 953 | Ses01M_script03_2_F037,0 954 | Ses01M_script03_2_F038,0 955 | Ses01M_script03_2_F039,0 956 | Ses01M_script03_2_F040,0 957 | Ses01M_script03_2_F041,0 958 | Ses01M_script03_2_M012,3 959 | Ses01M_script03_2_M013,3 960 | Ses01M_script03_2_M018,3 961 | Ses01M_script03_2_M022,0 962 | Ses01M_script03_2_M023,0 963 | Ses01M_script03_2_M024,0 964 | Ses01M_script03_2_M025,0 965 | Ses01M_script03_2_M026,0 966 | Ses01M_script03_2_M027,0 967 | Ses01M_script03_2_M028,0 968 | Ses01M_script03_2_M029,0 969 | Ses01M_script03_2_M030,0 970 | Ses01M_script03_2_M031,0 971 | Ses01M_script03_2_M032,0 972 | Ses01M_script03_2_M033,0 973 | Ses01M_script03_2_M034,0 974 | Ses01M_script03_2_M035,0 975 | Ses01M_script03_2_M036,0 976 | Ses01M_script03_2_M037,0 977 | Ses01M_script03_2_M038,0 978 | Ses01M_script03_2_M039,0 979 | Ses01M_script03_2_M040,0 980 | Ses01M_script03_2_M041,0 981 | Ses01M_script03_2_M042,0 982 | Ses01M_script03_2_M043,0 983 | Ses01M_impro04_F000,3 984 | Ses01M_impro04_F002,3 985 | Ses01M_impro04_F003,3 986 | Ses01M_impro04_F004,3 987 | Ses01M_impro04_F005,3 988 | Ses01M_impro04_F006,3 989 | Ses01M_impro04_F007,3 990 | Ses01M_impro04_F008,3 991 | Ses01M_impro04_F009,3 992 | Ses01M_impro04_F010,3 993 | Ses01M_impro04_F011,3 994 | Ses01M_impro04_F012,3 995 | Ses01M_impro04_F013,3 996 | Ses01M_impro04_F014,3 997 | Ses01M_impro04_F015,3 998 | Ses01M_impro04_F016,3 999 | Ses01M_impro04_F017,3 1000 | Ses01M_impro04_F018,3 1001 | Ses01M_impro04_F019,3 1002 | Ses01M_impro04_F020,3 1003 | Ses01M_impro04_F021,3 1004 | Ses01M_impro04_F022,3 1005 | Ses01M_impro04_F023,3 1006 | Ses01M_impro04_F025,3 1007 | Ses01M_impro04_M014,0 1008 | Ses01M_impro04_M018,0 1009 | Ses01M_impro04_M023,0 1010 | Ses01M_impro04_M025,3 1011 | Ses01M_impro02_F000,3 1012 | Ses01M_impro02_F009,1 1013 | Ses01M_impro02_F010,1 1014 | Ses01M_impro02_F013,1 1015 | Ses01M_impro02_F015,1 1016 | Ses01M_impro02_F017,1 1017 | Ses01M_impro02_F019,1 1018 | Ses01M_impro02_M000,3 1019 | Ses01M_impro02_M001,1 1020 | Ses01M_impro02_M002,1 1021 | Ses01M_impro02_M003,1 1022 | Ses01M_impro02_M007,1 1023 | Ses01M_impro02_M009,1 1024 | Ses01M_impro02_M010,1 1025 | Ses01M_impro02_M011,1 1026 | Ses01M_impro02_M012,1 1027 | Ses01M_impro02_M013,1 1028 | Ses01M_impro02_M014,1 1029 | Ses01M_impro02_M015,1 1030 | Ses01M_impro02_M016,1 1031 | Ses01M_impro02_M017,1 1032 | Ses01M_impro02_M018,1 1033 | Ses01M_impro02_M019,1 1034 | Ses01M_impro02_M020,1 1035 | Ses01M_impro02_M021,1 1036 | Ses01M_impro02_M022,1 1037 | Ses01M_impro02_M023,1 1038 | Ses01M_impro02_M025,1 1039 | Ses01M_impro05_F000,3 1040 | Ses01M_impro05_F001,3 1041 | Ses01M_impro05_F002,3 1042 | Ses01M_impro05_F003,3 1043 | Ses01M_impro05_F004,3 1044 | Ses01M_impro05_F005,3 1045 | Ses01M_impro05_F006,3 1046 | Ses01M_impro05_F007,3 1047 | Ses01M_impro05_F011,3 1048 | Ses01M_impro05_F012,3 1049 | Ses01M_impro05_F013,3 1050 | Ses01M_impro05_F014,3 1051 | Ses01M_impro05_F015,3 1052 | Ses01M_impro05_F016,3 1053 | Ses01M_impro05_F017,3 1054 | Ses01M_impro05_F018,3 1055 | Ses01M_impro05_F019,3 1056 | Ses01M_impro05_F020,3 1057 | Ses01M_impro05_F021,3 1058 | Ses01M_impro05_F022,3 1059 | Ses01M_impro05_F023,3 1060 | Ses01M_impro05_F024,3 1061 | Ses01M_impro05_F025,3 1062 | Ses01M_impro05_F026,3 1063 | Ses01M_impro05_F027,3 1064 | Ses01M_impro05_F028,3 1065 | Ses01M_impro05_F029,3 1066 | Ses01M_impro05_F030,3 1067 | Ses01M_impro05_F031,3 1068 | Ses01M_impro05_F032,3 1069 | Ses01M_impro05_F033,3 1070 | Ses01M_impro05_F034,3 1071 | Ses01M_impro05_M004,0 1072 | Ses01M_impro05_M005,0 1073 | Ses01M_impro05_M006,0 1074 | Ses01M_impro05_M007,0 1075 | Ses01M_impro05_M008,0 1076 | Ses01M_impro05_M015,3 1077 | Ses01M_impro05_M020,0 1078 | Ses01M_impro05_M021,0 1079 | Ses01M_impro05_M022,0 1080 | Ses01M_impro05_M023,0 1081 | Ses01M_impro05_M024,0 1082 | Ses01M_impro05_M025,0 1083 | Ses01M_impro05_M026,0 1084 | Ses01M_impro05_M027,0 1085 | Ses01M_impro05_M028,0 1086 | Ses01M_impro05_M031,0 1087 | -------------------------------------------------------------------------------- /Feature_Extractor/iemocap_data/valid2.csv: -------------------------------------------------------------------------------- 1 | name,label 2 | Ses02M_impro04_F000,3 3 | Ses02M_impro04_F001,3 4 | Ses02M_impro04_F002,3 5 | Ses02M_impro04_F003,3 6 | Ses02M_impro04_F004,3 7 | Ses02M_impro04_F005,3 8 | Ses02M_impro04_F006,3 9 | Ses02M_impro04_F007,3 10 | Ses02M_impro04_F008,3 11 | Ses02M_impro04_F009,1 12 | Ses02M_impro04_F010,3 13 | Ses02M_impro04_F011,3 14 | Ses02M_impro04_F012,3 15 | Ses02M_impro04_F014,3 16 | Ses02M_impro04_F015,3 17 | Ses02M_impro04_F016,3 18 | Ses02M_impro04_M014,1 19 | Ses02M_impro04_M019,0 20 | Ses02M_impro04_M022,1 21 | Ses02M_script03_2_F000,2 22 | Ses02M_script03_2_F001,2 23 | Ses02M_script03_2_F002,3 24 | Ses02M_script03_2_F003,3 25 | Ses02M_script03_2_F005,3 26 | Ses02M_script03_2_F006,3 27 | Ses02M_script03_2_F007,3 28 | Ses02M_script03_2_F009,3 29 | Ses02M_script03_2_F011,3 30 | Ses02M_script03_2_F012,3 31 | Ses02M_script03_2_F014,0 32 | Ses02M_script03_2_F015,3 33 | Ses02M_script03_2_F019,0 34 | Ses02M_script03_2_F023,0 35 | Ses02M_script03_2_F024,3 36 | Ses02M_script03_2_F028,0 37 | Ses02M_script03_2_F035,0 38 | Ses02M_script03_2_F036,0 39 | Ses02M_script03_2_F037,0 40 | Ses02M_script03_2_F038,0 41 | Ses02M_script03_2_F039,0 42 | Ses02M_script03_2_F040,0 43 | Ses02M_script03_2_F041,0 44 | Ses02M_script03_2_F042,0 45 | Ses02M_script03_2_M000,3 46 | Ses02M_script03_2_M001,0 47 | Ses02M_script03_2_M003,0 48 | Ses02M_script03_2_M005,0 49 | Ses02M_script03_2_M007,0 50 | Ses02M_script03_2_M010,3 51 | Ses02M_script03_2_M011,3 52 | Ses02M_script03_2_M012,3 53 | Ses02M_script03_2_M016,3 54 | Ses02M_script03_2_M018,3 55 | Ses02M_script03_2_M024,3 56 | Ses02M_script03_2_M026,0 57 | Ses02M_script03_2_M027,0 58 | Ses02M_script03_2_M031,0 59 | Ses02M_script03_2_M032,3 60 | Ses02M_script03_2_M033,3 61 | Ses02M_script03_2_M039,0 62 | Ses02M_script03_2_M040,0 63 | Ses02M_script03_2_M041,0 64 | Ses02M_script03_2_M042,0 65 | Ses02M_script03_2_M043,0 66 | Ses02M_script03_2_M044,0 67 | Ses02M_script03_2_M045,0 68 | Ses02M_script03_2_M046,0 69 | Ses02M_script03_2_M047,0 70 | Ses02M_impro02_F000,1 71 | Ses02M_impro02_F001,1 72 | Ses02M_impro02_F002,1 73 | Ses02M_impro02_F003,1 74 | Ses02M_impro02_F004,1 75 | Ses02M_impro02_F005,1 76 | Ses02M_impro02_F006,1 77 | Ses02M_impro02_F007,1 78 | Ses02M_impro02_F008,1 79 | Ses02M_impro02_F009,1 80 | Ses02M_impro02_F010,1 81 | Ses02M_impro02_F012,1 82 | Ses02M_impro02_F013,1 83 | Ses02M_impro02_F014,1 84 | Ses02M_impro02_M000,1 85 | Ses02M_impro02_M001,1 86 | Ses02M_impro02_M002,1 87 | Ses02M_impro02_M007,1 88 | Ses02M_impro02_M008,1 89 | Ses02M_impro02_M009,1 90 | Ses02M_impro02_M013,1 91 | Ses02M_impro02_M014,3 92 | Ses02M_impro02_M017,1 93 | Ses02M_impro02_M018,1 94 | Ses02M_impro06_F002,1 95 | Ses02M_impro06_F003,1 96 | Ses02M_impro06_F004,1 97 | Ses02M_impro06_F005,1 98 | Ses02M_impro06_F006,1 99 | Ses02M_impro06_F007,1 100 | Ses02M_impro06_F008,1 101 | Ses02M_impro06_F009,1 102 | Ses02M_impro06_F010,1 103 | Ses02M_impro06_F011,1 104 | Ses02M_impro06_F012,1 105 | Ses02M_impro06_F013,1 106 | Ses02M_impro06_F014,1 107 | Ses02M_impro06_F015,1 108 | Ses02M_impro06_F016,1 109 | Ses02M_impro06_F017,1 110 | Ses02M_impro06_F018,1 111 | Ses02M_impro06_M000,1 112 | Ses02M_impro06_M001,1 113 | Ses02M_impro06_M002,1 114 | Ses02M_impro06_M003,1 115 | Ses02M_impro06_M004,1 116 | Ses02M_impro06_M005,1 117 | Ses02M_impro06_M006,1 118 | Ses02M_impro06_M007,1 119 | Ses02M_impro06_M008,1 120 | Ses02M_impro06_M009,1 121 | Ses02M_impro06_M010,1 122 | Ses02M_impro06_M011,1 123 | Ses02M_impro06_M012,1 124 | Ses02M_impro06_M013,1 125 | Ses02M_impro06_M014,1 126 | Ses02M_impro06_M015,1 127 | Ses02M_impro06_M016,1 128 | Ses02M_impro06_M017,1 129 | Ses02M_impro06_M018,1 130 | Ses02M_impro06_M019,1 131 | Ses02M_impro06_M020,1 132 | Ses02M_impro06_M021,1 133 | Ses02M_impro06_M022,1 134 | Ses02M_impro06_M023,1 135 | Ses02M_script01_2_F000,0 136 | Ses02M_script01_2_F001,0 137 | Ses02M_script01_2_F002,0 138 | Ses02M_script01_2_F003,0 139 | Ses02M_script01_2_F005,0 140 | Ses02M_script01_2_F006,0 141 | Ses02M_script01_2_F010,0 142 | Ses02M_script01_2_F013,1 143 | Ses02M_script01_2_M001,3 144 | Ses02M_script01_2_M004,3 145 | Ses02M_script01_2_M005,3 146 | Ses02M_script01_2_M007,3 147 | Ses02M_script01_2_M016,0 148 | Ses02M_script01_2_M017,0 149 | Ses02F_script01_1_F004,1 150 | Ses02F_script01_1_F005,1 151 | Ses02F_script01_1_F006,1 152 | Ses02F_script01_1_F017,1 153 | Ses02F_script01_1_F018,3 154 | Ses02F_script01_1_F019,0 155 | Ses02F_script01_1_F021,3 156 | Ses02F_script01_1_F025,3 157 | Ses02F_script01_1_F026,3 158 | Ses02F_script01_1_F027,3 159 | Ses02F_script01_1_F028,1 160 | Ses02F_script01_1_F036,0 161 | Ses02F_script01_1_F037,0 162 | Ses02F_script01_1_F038,0 163 | Ses02F_script01_1_F040,0 164 | Ses02F_script01_1_F044,1 165 | Ses02F_script01_1_M011,0 166 | Ses02F_script01_1_M013,0 167 | Ses02F_script01_1_M017,3 168 | Ses02F_script01_1_M021,0 169 | Ses02F_script01_1_M023,0 170 | Ses02F_script01_1_M025,0 171 | Ses02F_script01_1_M027,1 172 | Ses02F_script01_1_M029,3 173 | Ses02F_script01_1_M036,0 174 | Ses02F_script01_1_M037,0 175 | Ses02F_script01_1_M038,0 176 | Ses02F_script01_1_M040,3 177 | Ses02F_script01_1_M043,3 178 | Ses02F_script01_1_M044,3 179 | Ses02M_impro08_F000,2 180 | Ses02M_impro08_F001,2 181 | Ses02M_impro08_F002,2 182 | Ses02M_impro08_F006,3 183 | Ses02M_impro08_F007,3 184 | Ses02M_impro08_F008,3 185 | Ses02M_impro08_F010,3 186 | Ses02M_impro08_F019,3 187 | Ses02M_impro08_F023,2 188 | Ses02M_impro08_F024,2 189 | Ses02M_impro08_F025,3 190 | Ses02M_impro08_F026,3 191 | Ses02M_impro08_F027,3 192 | Ses02M_impro08_F028,2 193 | Ses02M_impro08_F030,2 194 | Ses02M_impro08_M000,3 195 | Ses02M_impro08_M002,3 196 | Ses02M_impro08_M003,3 197 | Ses02M_impro08_M004,3 198 | Ses02M_impro08_M005,3 199 | Ses02M_impro08_M007,3 200 | Ses02M_impro08_M008,3 201 | Ses02M_impro08_M009,3 202 | Ses02M_impro08_M010,3 203 | Ses02M_impro08_M011,3 204 | Ses02M_impro08_M012,3 205 | Ses02M_impro08_M013,3 206 | Ses02M_impro08_M014,3 207 | Ses02M_impro08_M015,3 208 | Ses02M_impro08_M016,3 209 | Ses02M_impro08_M017,3 210 | Ses02M_impro08_M018,3 211 | Ses02M_impro08_M019,3 212 | Ses02M_impro08_M020,3 213 | Ses02M_impro08_M021,3 214 | Ses02M_impro08_M022,3 215 | Ses02M_impro08_M023,3 216 | Ses02M_impro08_M024,3 217 | Ses02M_impro08_M025,3 218 | Ses02M_impro08_M026,3 219 | Ses02M_impro08_M027,3 220 | Ses02M_impro08_M028,3 221 | Ses02M_impro08_M029,3 222 | Ses02M_impro08_M030,3 223 | Ses02M_impro08_M031,3 224 | Ses02F_impro07_F001,2 225 | Ses02F_impro07_F002,2 226 | Ses02F_impro07_F003,2 227 | Ses02F_impro07_F004,2 228 | Ses02F_impro07_F005,2 229 | Ses02F_impro07_F006,2 230 | Ses02F_impro07_F007,2 231 | Ses02F_impro07_F008,2 232 | Ses02F_impro07_F009,2 233 | Ses02F_impro07_F010,2 234 | Ses02F_impro07_F011,2 235 | Ses02F_impro07_F012,2 236 | Ses02F_impro07_F013,2 237 | Ses02F_impro07_F014,2 238 | Ses02F_impro07_F015,2 239 | Ses02F_impro07_F016,2 240 | Ses02F_impro07_F017,2 241 | Ses02F_impro07_F018,2 242 | Ses02F_impro07_F019,2 243 | Ses02F_impro07_F020,2 244 | Ses02F_impro07_F021,2 245 | Ses02F_impro07_F022,2 246 | Ses02F_impro07_F023,2 247 | Ses02F_impro07_F024,2 248 | Ses02F_impro07_F025,2 249 | Ses02F_impro07_F026,2 250 | Ses02F_impro07_F027,2 251 | Ses02F_impro07_F028,2 252 | Ses02F_impro07_F029,2 253 | Ses02F_impro07_F030,2 254 | Ses02F_impro07_F031,2 255 | Ses02F_impro07_F032,2 256 | Ses02F_impro07_F033,2 257 | Ses02F_impro07_F034,2 258 | Ses02F_impro07_F035,2 259 | Ses02F_impro07_F036,2 260 | Ses02F_impro07_F037,2 261 | Ses02F_impro07_M000,3 262 | Ses02F_impro07_M001,2 263 | Ses02F_impro07_M002,2 264 | Ses02F_impro07_M003,2 265 | Ses02F_impro07_M004,2 266 | Ses02F_impro07_M005,2 267 | Ses02F_impro07_M006,2 268 | Ses02F_impro07_M007,2 269 | Ses02F_impro07_M008,2 270 | Ses02F_impro07_M009,2 271 | Ses02F_impro07_M010,2 272 | Ses02F_impro07_M011,2 273 | Ses02F_impro07_M016,3 274 | Ses02F_impro07_M017,2 275 | Ses02F_impro07_M018,2 276 | Ses02F_impro07_M019,2 277 | Ses02F_impro07_M023,2 278 | Ses02F_impro07_M027,2 279 | Ses02F_impro07_M028,2 280 | Ses02F_impro07_M029,2 281 | Ses02F_impro07_M034,2 282 | Ses02M_impro05_F000,3 283 | Ses02M_impro05_F001,3 284 | Ses02M_impro05_F002,3 285 | Ses02M_impro05_F003,3 286 | Ses02M_impro05_F004,3 287 | Ses02M_impro05_F005,3 288 | Ses02M_impro05_F006,3 289 | Ses02M_impro05_F007,3 290 | Ses02M_impro05_F008,3 291 | Ses02M_impro05_F009,3 292 | Ses02M_impro05_F010,3 293 | Ses02M_impro05_F011,3 294 | Ses02M_impro05_F012,3 295 | Ses02M_impro05_F013,3 296 | Ses02M_impro05_F014,3 297 | Ses02M_impro05_F015,3 298 | Ses02M_impro05_F016,3 299 | Ses02M_impro05_F017,3 300 | Ses02M_impro05_F018,3 301 | Ses02M_impro05_F019,3 302 | Ses02M_impro05_F020,3 303 | Ses02M_impro05_F021,3 304 | Ses02M_impro05_F023,3 305 | Ses02M_impro05_F024,3 306 | Ses02M_impro05_F025,3 307 | Ses02M_impro05_F026,3 308 | Ses02M_impro05_M000,3 309 | Ses02M_impro05_M001,3 310 | Ses02M_impro05_M002,3 311 | Ses02M_impro05_M003,3 312 | Ses02M_impro05_M013,0 313 | Ses02M_impro05_M016,0 314 | Ses02M_impro05_M018,0 315 | Ses02M_impro05_M025,0 316 | Ses02M_impro05_M026,0 317 | Ses02F_script01_3_F009,1 318 | Ses02F_script01_3_F012,2 319 | Ses02F_script01_3_F013,2 320 | Ses02F_script01_3_F014,2 321 | Ses02F_script01_3_F016,2 322 | Ses02F_script01_3_F020,1 323 | Ses02F_script01_3_F021,1 324 | Ses02F_script01_3_F022,1 325 | Ses02F_script01_3_F023,1 326 | Ses02F_script01_3_F024,1 327 | Ses02F_script01_3_F025,1 328 | Ses02F_script01_3_F026,1 329 | Ses02F_script01_3_F029,2 330 | Ses02F_script01_3_M001,3 331 | Ses02F_script01_3_M010,2 332 | Ses02F_script01_3_M020,1 333 | Ses02F_script01_3_M021,1 334 | Ses02F_script01_3_M022,1 335 | Ses02F_script01_3_M023,1 336 | Ses02F_script01_3_M024,1 337 | Ses02F_script01_3_M025,1 338 | Ses02F_script01_3_M026,1 339 | Ses02F_script01_3_M027,1 340 | Ses02F_script01_3_M028,1 341 | Ses02F_script01_3_M029,1 342 | Ses02F_script01_3_M030,1 343 | Ses02F_script01_3_M031,1 344 | Ses02F_script01_3_M032,1 345 | Ses02F_script01_3_M033,1 346 | Ses02F_script01_3_M034,1 347 | Ses02F_impro04_F009,1 348 | Ses02F_impro04_F016,1 349 | Ses02F_impro04_M000,3 350 | Ses02F_impro04_M001,3 351 | Ses02F_impro04_M003,3 352 | Ses02F_impro04_M004,3 353 | Ses02F_impro04_M005,3 354 | Ses02F_impro04_M006,3 355 | Ses02F_impro04_M007,3 356 | Ses02F_impro04_M008,3 357 | Ses02F_impro04_M009,3 358 | Ses02F_impro04_M010,3 359 | Ses02F_impro04_M011,3 360 | Ses02F_impro04_M012,3 361 | Ses02F_impro04_M013,3 362 | Ses02F_impro04_M015,3 363 | Ses02F_impro04_M016,3 364 | Ses02F_impro04_M017,3 365 | Ses02F_impro04_M018,3 366 | Ses02F_impro04_M019,3 367 | Ses02M_script02_1_F000,3 368 | Ses02M_script02_1_F001,3 369 | Ses02M_script02_1_F005,3 370 | Ses02M_script02_1_F006,3 371 | Ses02M_script02_1_F011,3 372 | Ses02M_script02_1_F016,1 373 | Ses02M_script02_1_F017,3 374 | Ses02M_script02_1_F018,0 375 | Ses02M_script02_1_F020,3 376 | Ses02M_script02_1_F021,3 377 | Ses02M_script02_1_M000,2 378 | Ses02M_script02_1_M001,2 379 | Ses02M_script02_1_M003,2 380 | Ses02M_script02_1_M004,2 381 | Ses02M_script02_1_M005,2 382 | Ses02M_script02_1_M006,2 383 | Ses02M_script02_1_M013,2 384 | Ses02M_script02_1_M014,2 385 | Ses02M_script02_1_M015,2 386 | Ses02M_script02_1_M016,2 387 | Ses02M_script02_1_M023,2 388 | Ses02M_script02_1_M024,2 389 | Ses02M_script02_1_M025,2 390 | Ses02M_script02_1_M026,2 391 | Ses02M_script02_1_M028,2 392 | Ses02M_script02_1_M029,2 393 | Ses02M_script02_1_M035,2 394 | Ses02M_script02_1_M036,2 395 | Ses02M_script02_1_M037,2 396 | Ses02M_script02_1_M038,2 397 | Ses02M_script02_1_M039,2 398 | Ses02M_script02_1_M040,2 399 | Ses02M_script02_1_M041,2 400 | Ses02M_script02_1_M042,2 401 | Ses02M_impro01_F000,3 402 | Ses02M_impro01_F001,3 403 | Ses02M_impro01_F002,3 404 | Ses02M_impro01_F003,3 405 | Ses02M_impro01_F004,3 406 | Ses02M_impro01_F005,3 407 | Ses02M_impro01_F006,3 408 | Ses02M_impro01_F015,3 409 | Ses02M_impro01_F016,3 410 | Ses02M_impro01_M001,3 411 | Ses02M_impro01_M002,3 412 | Ses02M_impro01_M003,3 413 | Ses02M_impro01_M004,3 414 | Ses02M_impro01_M005,3 415 | Ses02M_impro01_M007,0 416 | Ses02M_impro01_M008,0 417 | Ses02M_impro01_M009,0 418 | Ses02M_impro01_M010,0 419 | Ses02M_impro01_M011,0 420 | Ses02M_impro01_M018,0 421 | Ses02M_impro01_M019,0 422 | Ses02M_impro07_F000,2 423 | Ses02M_impro07_F001,2 424 | Ses02M_impro07_F003,2 425 | Ses02M_impro07_F004,2 426 | Ses02M_impro07_F005,2 427 | Ses02M_impro07_F006,2 428 | Ses02M_impro07_F007,2 429 | Ses02M_impro07_F008,2 430 | Ses02M_impro07_F009,2 431 | Ses02M_impro07_F010,2 432 | Ses02M_impro07_F011,2 433 | Ses02M_impro07_F012,2 434 | Ses02M_impro07_F013,2 435 | Ses02M_impro07_F014,2 436 | Ses02M_impro07_F015,2 437 | Ses02M_impro07_F016,2 438 | Ses02M_impro07_F017,2 439 | Ses02M_impro07_F018,2 440 | Ses02M_impro07_F019,2 441 | Ses02M_impro07_F020,2 442 | Ses02M_impro07_F021,2 443 | Ses02M_impro07_F022,2 444 | Ses02M_impro07_F023,2 445 | Ses02M_impro07_F024,2 446 | Ses02M_impro07_F025,2 447 | Ses02M_impro07_F026,2 448 | Ses02M_impro07_F027,2 449 | Ses02M_impro07_F028,2 450 | Ses02M_impro07_F029,2 451 | Ses02M_impro07_M002,2 452 | Ses02M_impro07_M006,2 453 | Ses02M_impro07_M007,2 454 | Ses02M_impro07_M008,2 455 | Ses02M_impro07_M009,2 456 | Ses02M_impro07_M010,2 457 | Ses02M_impro07_M011,2 458 | Ses02M_impro07_M013,2 459 | Ses02M_impro07_M014,2 460 | Ses02M_impro07_M015,2 461 | Ses02M_impro07_M017,2 462 | Ses02M_impro07_M018,2 463 | Ses02M_impro07_M019,2 464 | Ses02M_impro07_M020,2 465 | Ses02M_impro07_M021,2 466 | Ses02M_impro07_M022,2 467 | Ses02M_impro07_M023,2 468 | Ses02M_impro07_M024,2 469 | Ses02M_impro07_M025,2 470 | Ses02M_impro07_M026,2 471 | Ses02M_impro07_M029,2 472 | Ses02M_impro07_M030,2 473 | Ses02M_impro07_M031,2 474 | Ses02M_impro07_M032,2 475 | Ses02F_impro01_F000,3 476 | Ses02F_impro01_F018,0 477 | Ses02F_impro01_F019,0 478 | Ses02F_impro01_F020,0 479 | Ses02F_impro01_F021,0 480 | Ses02F_impro01_M000,3 481 | Ses02F_impro01_M001,3 482 | Ses02F_impro01_M002,3 483 | Ses02F_impro01_M004,3 484 | Ses02F_impro01_M017,0 485 | Ses02F_impro01_M019,0 486 | Ses02F_script02_2_F000,3 487 | Ses02F_script02_2_F005,0 488 | Ses02F_script02_2_F006,0 489 | Ses02F_script02_2_F011,0 490 | Ses02F_script02_2_F015,1 491 | Ses02F_script02_2_F016,1 492 | Ses02F_script02_2_F017,1 493 | Ses02F_script02_2_F018,1 494 | Ses02F_script02_2_F019,1 495 | Ses02F_script02_2_F020,1 496 | Ses02F_script02_2_F021,1 497 | Ses02F_script02_2_F022,1 498 | Ses02F_script02_2_F024,1 499 | Ses02F_script02_2_F031,1 500 | Ses02F_script02_2_F033,1 501 | Ses02F_script02_2_F034,1 502 | Ses02F_script02_2_F035,1 503 | Ses02F_script02_2_F038,1 504 | Ses02F_script02_2_F041,2 505 | Ses02F_script02_2_F042,2 506 | Ses02F_script02_2_F043,2 507 | Ses02F_script02_2_F044,1 508 | Ses02F_script02_2_F045,2 509 | Ses02F_script02_2_M000,2 510 | Ses02F_script02_2_M001,3 511 | Ses02F_script02_2_M002,3 512 | Ses02F_script02_2_M003,3 513 | Ses02F_script02_2_M005,3 514 | Ses02F_script02_2_M007,3 515 | Ses02F_script02_2_M009,3 516 | Ses02F_script02_2_M010,3 517 | Ses02F_script02_2_M013,3 518 | Ses02F_script02_2_M016,3 519 | Ses02F_script02_2_M017,3 520 | Ses02F_script02_2_M018,3 521 | Ses02F_script02_2_M019,3 522 | Ses02F_script02_2_M020,3 523 | Ses02F_script02_2_M021,3 524 | Ses02F_script02_2_M022,3 525 | Ses02F_script02_2_M023,3 526 | Ses02F_script02_2_M029,0 527 | Ses02F_script02_2_M031,1 528 | Ses02F_script02_2_M033,3 529 | Ses02F_script02_2_M034,3 530 | Ses02F_script02_2_M035,1 531 | Ses02F_script02_2_M036,1 532 | Ses02F_script02_2_M037,1 533 | Ses02F_script02_2_M038,1 534 | Ses02F_script02_2_M041,2 535 | Ses02F_script02_2_M042,2 536 | Ses02F_script02_2_M043,2 537 | Ses02F_script02_2_M045,2 538 | Ses02F_script02_2_M046,2 539 | Ses02F_impro02_F000,1 540 | Ses02F_impro02_F001,1 541 | Ses02F_impro02_F002,1 542 | Ses02F_impro02_F003,1 543 | Ses02F_impro02_F004,1 544 | Ses02F_impro02_F005,1 545 | Ses02F_impro02_F006,1 546 | Ses02F_impro02_F007,1 547 | Ses02F_impro02_F008,1 548 | Ses02F_impro02_F009,1 549 | Ses02F_impro02_F010,1 550 | Ses02F_impro02_F011,1 551 | Ses02F_impro02_F012,1 552 | Ses02F_impro02_F013,1 553 | Ses02F_impro02_M007,1 554 | Ses02F_impro02_M012,1 555 | Ses02F_impro03_F000,2 556 | Ses02F_impro03_F001,2 557 | Ses02F_impro03_F004,2 558 | Ses02F_impro03_F009,2 559 | Ses02F_impro03_F011,2 560 | Ses02F_impro03_F012,2 561 | Ses02F_impro03_F013,2 562 | Ses02F_impro03_F014,2 563 | Ses02F_impro03_F015,2 564 | Ses02F_impro03_F016,2 565 | Ses02F_impro03_F017,2 566 | Ses02F_impro03_F018,2 567 | Ses02F_impro03_F019,2 568 | Ses02F_impro03_F021,2 569 | Ses02F_impro03_F023,2 570 | Ses02F_impro03_F024,2 571 | Ses02F_impro03_F026,2 572 | Ses02F_impro03_F027,2 573 | Ses02F_impro03_F028,2 574 | Ses02F_impro03_F029,2 575 | Ses02F_impro03_F030,2 576 | Ses02F_impro03_M001,3 577 | Ses02F_impro03_M004,2 578 | Ses02F_impro03_M005,2 579 | Ses02F_impro03_M009,2 580 | Ses02F_impro03_M010,2 581 | Ses02F_impro03_M011,2 582 | Ses02F_impro03_M012,2 583 | Ses02F_impro03_M013,3 584 | Ses02F_impro03_M014,3 585 | Ses02F_impro03_M015,3 586 | Ses02F_impro03_M016,3 587 | Ses02F_impro03_M017,3 588 | Ses02F_impro03_M018,3 589 | Ses02F_impro03_M019,2 590 | Ses02F_impro03_M020,2 591 | Ses02F_impro03_M022,2 592 | Ses02F_impro03_M023,2 593 | Ses02F_impro03_M024,2 594 | Ses02F_impro03_M025,2 595 | Ses02F_impro03_M026,2 596 | Ses02F_impro06_F000,1 597 | Ses02F_impro06_F001,1 598 | Ses02F_impro06_F002,1 599 | Ses02F_impro06_F003,1 600 | Ses02F_impro06_F004,1 601 | Ses02F_impro06_F005,1 602 | Ses02F_impro06_F006,1 603 | Ses02F_impro06_F007,1 604 | Ses02F_impro06_F008,1 605 | Ses02F_impro06_F009,1 606 | Ses02F_impro06_F010,1 607 | Ses02F_impro06_F011,1 608 | Ses02F_impro06_F012,1 609 | Ses02F_impro06_F013,1 610 | Ses02F_impro06_F014,1 611 | Ses02F_impro06_M000,3 612 | Ses02F_impro06_M001,3 613 | Ses02F_impro06_M002,3 614 | Ses02F_impro06_M003,3 615 | Ses02F_impro06_M004,3 616 | Ses02F_impro06_M005,3 617 | Ses02F_impro06_M006,3 618 | Ses02F_impro06_M007,3 619 | Ses02F_impro06_M008,3 620 | Ses02F_impro06_M009,3 621 | Ses02F_impro06_M010,3 622 | Ses02F_impro06_M011,3 623 | Ses02F_impro06_M012,3 624 | Ses02F_impro06_M013,3 625 | Ses02F_impro06_M014,3 626 | Ses02F_impro06_M015,3 627 | Ses02F_impro06_M016,3 628 | Ses02F_impro06_M017,3 629 | Ses02F_impro06_M018,3 630 | Ses02F_impro06_M019,3 631 | Ses02F_impro05_F000,3 632 | Ses02F_impro05_F001,3 633 | Ses02F_impro05_F004,3 634 | Ses02F_impro05_F006,0 635 | Ses02F_impro05_F010,0 636 | Ses02F_impro05_F019,0 637 | Ses02F_impro05_M000,3 638 | Ses02F_impro05_M001,3 639 | Ses02F_impro05_M002,3 640 | Ses02F_impro05_M003,3 641 | Ses02F_impro05_M004,3 642 | Ses02F_impro05_M005,3 643 | Ses02F_impro05_M006,3 644 | Ses02F_impro05_M007,3 645 | Ses02F_impro05_M008,3 646 | Ses02F_impro05_M009,3 647 | Ses02F_impro05_M010,3 648 | Ses02F_impro05_M011,3 649 | Ses02F_impro05_M012,3 650 | Ses02F_impro05_M013,3 651 | Ses02F_impro05_M014,3 652 | Ses02F_impro05_M015,3 653 | Ses02F_impro05_M016,3 654 | Ses02F_impro05_M017,3 655 | Ses02F_impro05_M018,3 656 | Ses02F_impro05_M019,3 657 | Ses02F_impro05_M020,3 658 | Ses02F_impro05_M021,3 659 | Ses02M_impro03_F000,2 660 | Ses02M_impro03_F001,2 661 | Ses02M_impro03_F003,2 662 | Ses02M_impro03_F004,2 663 | Ses02M_impro03_F005,2 664 | Ses02M_impro03_F007,2 665 | Ses02M_impro03_F008,2 666 | Ses02M_impro03_F011,2 667 | Ses02M_impro03_F012,2 668 | Ses02M_impro03_F013,2 669 | Ses02M_impro03_F014,2 670 | Ses02M_impro03_F015,2 671 | Ses02M_impro03_F016,2 672 | Ses02M_impro03_F017,2 673 | Ses02M_impro03_F018,2 674 | Ses02M_impro03_F020,2 675 | Ses02M_impro03_F023,2 676 | Ses02M_impro03_F024,2 677 | Ses02M_impro03_F028,2 678 | Ses02M_impro03_M000,2 679 | Ses02M_impro03_M001,2 680 | Ses02M_impro03_M005,2 681 | Ses02M_impro03_M008,2 682 | Ses02M_impro03_M009,2 683 | Ses02M_impro03_M010,2 684 | Ses02M_impro03_M011,2 685 | Ses02M_impro03_M012,2 686 | Ses02M_impro03_M014,2 687 | Ses02M_impro03_M015,2 688 | Ses02M_impro03_M016,2 689 | Ses02M_impro03_M017,2 690 | Ses02M_impro03_M018,2 691 | Ses02M_impro03_M019,2 692 | Ses02M_impro03_M020,2 693 | Ses02M_impro03_M021,2 694 | Ses02M_impro03_M024,2 695 | Ses02M_impro03_M026,2 696 | Ses02M_impro03_M027,2 697 | Ses02M_impro03_M028,2 698 | Ses02M_impro03_M030,2 699 | Ses02M_impro03_M031,2 700 | Ses02M_script01_3_F001,2 701 | Ses02M_script01_3_F002,2 702 | Ses02M_script01_3_F003,2 703 | Ses02M_script01_3_F007,2 704 | Ses02M_script01_3_F008,3 705 | Ses02M_script01_3_F009,2 706 | Ses02M_script01_3_F012,2 707 | Ses02M_script01_3_F013,2 708 | Ses02M_script01_3_F019,1 709 | Ses02M_script01_3_F021,1 710 | Ses02M_script01_3_F025,1 711 | Ses02M_script01_3_F026,1 712 | Ses02M_script01_3_F027,1 713 | Ses02M_script01_3_F028,2 714 | Ses02M_script01_3_M000,2 715 | Ses02M_script01_3_M010,2 716 | Ses02M_script01_3_M014,2 717 | Ses02M_script01_3_M015,2 718 | Ses02M_script01_3_M020,1 719 | Ses02M_script01_3_M021,1 720 | Ses02M_script01_3_M022,1 721 | Ses02M_script01_3_M025,1 722 | Ses02M_script01_3_M026,1 723 | Ses02M_script01_3_M027,1 724 | Ses02M_script01_3_M028,1 725 | Ses02M_script01_3_M029,1 726 | Ses02M_script01_3_M030,1 727 | Ses02M_script01_3_M031,1 728 | Ses02M_script01_3_M032,1 729 | Ses02M_script01_3_M033,1 730 | Ses02M_script01_3_M034,1 731 | Ses02M_script01_3_M035,1 732 | Ses02M_script01_3_M036,3 733 | Ses02F_script02_1_F001,3 734 | Ses02F_script02_1_F005,3 735 | Ses02F_script02_1_F019,3 736 | Ses02F_script02_1_F021,3 737 | Ses02F_script02_1_M000,2 738 | Ses02F_script02_1_M001,2 739 | Ses02F_script02_1_M002,2 740 | Ses02F_script02_1_M003,2 741 | Ses02F_script02_1_M004,2 742 | Ses02F_script02_1_M005,2 743 | Ses02F_script02_1_M006,2 744 | Ses02F_script02_1_M009,3 745 | Ses02F_script02_1_M010,3 746 | Ses02F_script02_1_M012,3 747 | Ses02F_script02_1_M013,3 748 | Ses02F_script02_1_M014,2 749 | Ses02F_script02_1_M015,2 750 | Ses02F_script02_1_M016,2 751 | Ses02F_script02_1_M017,3 752 | Ses02F_script02_1_M019,3 753 | Ses02F_script02_1_M020,3 754 | Ses02F_script02_1_M021,3 755 | Ses02F_script02_1_M024,2 756 | Ses02F_script02_1_M025,2 757 | Ses02F_script02_1_M026,2 758 | Ses02F_script02_1_M028,2 759 | Ses02F_script02_1_M031,3 760 | Ses02F_script02_1_M033,3 761 | Ses02F_script02_1_M035,2 762 | Ses02F_script02_1_M036,2 763 | Ses02F_script02_1_M039,2 764 | Ses02F_script02_1_M040,2 765 | Ses02F_script02_1_M041,2 766 | Ses02F_script02_1_M042,2 767 | Ses02F_script02_1_M043,3 768 | Ses02F_script02_1_M044,3 769 | Ses02M_script01_1_F015,3 770 | Ses02M_script01_1_F018,3 771 | Ses02M_script01_1_F021,3 772 | Ses02M_script01_1_F023,3 773 | Ses02M_script01_1_F025,3 774 | Ses02M_script01_1_F026,0 775 | Ses02M_script01_1_F027,0 776 | Ses02M_script01_1_F032,0 777 | Ses02M_script01_1_F033,0 778 | Ses02M_script01_1_F034,0 779 | Ses02M_script01_1_F035,0 780 | Ses02M_script01_1_F036,0 781 | Ses02M_script01_1_F037,0 782 | Ses02M_script01_1_F041,1 783 | Ses02M_script01_1_M003,1 784 | Ses02M_script01_1_M004,1 785 | Ses02M_script01_1_M005,1 786 | Ses02M_script01_1_M012,0 787 | Ses02M_script01_1_M013,0 788 | Ses02M_script01_1_M016,3 789 | Ses02M_script01_1_M017,3 790 | Ses02M_script01_1_M018,3 791 | Ses02M_script01_1_M021,0 792 | Ses02M_script01_1_M024,0 793 | Ses02M_script01_1_M025,0 794 | Ses02M_script01_1_M026,3 795 | Ses02M_script01_1_M027,3 796 | Ses02M_script01_1_M028,1 797 | Ses02M_script01_1_M035,1 798 | Ses02M_script01_1_M036,1 799 | Ses02M_script01_1_M037,0 800 | Ses02M_script01_1_M038,0 801 | Ses02M_script01_1_M039,0 802 | Ses02M_script01_1_M041,1 803 | Ses02F_script03_1_F005,2 804 | Ses02F_script03_1_F006,2 805 | Ses02F_script03_1_F007,2 806 | Ses02F_script03_1_F008,2 807 | Ses02F_script03_1_F009,2 808 | Ses02F_script03_1_F010,2 809 | Ses02F_script03_1_F012,3 810 | Ses02F_script03_1_F013,3 811 | Ses02F_script03_1_F014,3 812 | Ses02F_script03_1_F015,3 813 | Ses02F_script03_1_F016,3 814 | Ses02F_script03_1_F017,3 815 | Ses02F_script03_1_F019,2 816 | Ses02F_script03_1_F020,2 817 | Ses02F_script03_1_F021,2 818 | Ses02F_script03_1_F022,2 819 | Ses02F_script03_1_F023,2 820 | Ses02F_script03_1_F024,2 821 | Ses02F_script03_1_F025,2 822 | Ses02F_script03_1_F026,2 823 | Ses02F_script03_1_F027,3 824 | Ses02F_script03_1_F028,2 825 | Ses02F_script03_1_F029,2 826 | Ses02F_script03_1_M001,3 827 | Ses02F_script03_1_M004,3 828 | Ses02F_script03_1_M005,3 829 | Ses02F_script03_1_M007,3 830 | Ses02F_script03_1_M008,3 831 | Ses02F_script03_1_M009,3 832 | Ses02F_script03_1_M010,3 833 | Ses02F_script03_1_M012,3 834 | Ses02F_script03_1_M013,3 835 | Ses02F_script03_1_M014,3 836 | Ses02F_script03_1_M015,3 837 | Ses02F_script03_1_M016,3 838 | Ses02F_script03_1_M017,3 839 | Ses02F_script03_1_M018,3 840 | Ses02F_script03_1_M019,2 841 | Ses02F_script03_1_M020,2 842 | Ses02F_script03_1_M021,3 843 | Ses02F_script03_1_M022,2 844 | Ses02F_script03_1_M023,2 845 | Ses02F_script03_1_M024,3 846 | Ses02F_script03_1_M025,3 847 | Ses02F_script03_1_M026,3 848 | Ses02F_script03_1_M028,3 849 | Ses02F_script03_1_M029,3 850 | Ses02F_script03_1_M031,3 851 | Ses02F_script03_1_M032,2 852 | Ses02F_script03_1_M033,3 853 | Ses02F_script03_1_M034,3 854 | Ses02F_script03_2_F007,3 855 | Ses02F_script03_2_F011,3 856 | Ses02F_script03_2_F012,3 857 | Ses02F_script03_2_F013,0 858 | Ses02F_script03_2_F014,0 859 | Ses02F_script03_2_F016,2 860 | Ses02F_script03_2_F018,0 861 | Ses02F_script03_2_F020,0 862 | Ses02F_script03_2_F025,0 863 | Ses02F_script03_2_F029,0 864 | Ses02F_script03_2_F031,0 865 | Ses02F_script03_2_F034,0 866 | Ses02F_script03_2_F035,0 867 | Ses02F_script03_2_F036,0 868 | Ses02F_script03_2_F037,0 869 | Ses02F_script03_2_F038,0 870 | Ses02F_script03_2_F039,0 871 | Ses02F_script03_2_F040,0 872 | Ses02F_script03_2_F041,0 873 | Ses02F_script03_2_F042,0 874 | Ses02F_script03_2_F043,0 875 | Ses02F_script03_2_M000,3 876 | Ses02F_script03_2_M001,3 877 | Ses02F_script03_2_M006,0 878 | Ses02F_script03_2_M007,0 879 | Ses02F_script03_2_M009,3 880 | Ses02F_script03_2_M011,3 881 | Ses02F_script03_2_M013,0 882 | Ses02F_script03_2_M027,0 883 | Ses02F_script03_2_M030,0 884 | Ses02F_script03_2_M036,0 885 | Ses02F_script03_2_M037,0 886 | Ses02F_script03_2_M038,0 887 | Ses02F_script03_2_M039,0 888 | Ses02F_script03_2_M041,0 889 | Ses02F_script03_2_M042,0 890 | Ses02F_script03_2_M043,0 891 | Ses02F_script03_2_M044,0 892 | Ses02F_script03_2_M045,0 893 | Ses02F_script03_2_M046,0 894 | Ses02F_script03_2_M047,0 895 | Ses02M_script02_2_F005,0 896 | Ses02M_script02_2_F006,0 897 | Ses02M_script02_2_F011,0 898 | Ses02M_script02_2_F013,0 899 | Ses02M_script02_2_F015,1 900 | Ses02M_script02_2_F016,1 901 | Ses02M_script02_2_F017,1 902 | Ses02M_script02_2_F018,1 903 | Ses02M_script02_2_F019,1 904 | Ses02M_script02_2_F020,1 905 | Ses02M_script02_2_F021,1 906 | Ses02M_script02_2_F022,1 907 | Ses02M_script02_2_F031,1 908 | Ses02M_script02_2_F034,1 909 | Ses02M_script02_2_F037,2 910 | Ses02M_script02_2_F038,1 911 | Ses02M_script02_2_F042,2 912 | Ses02M_script02_2_F044,1 913 | Ses02M_script02_2_F045,2 914 | Ses02M_script02_2_F046,3 915 | Ses02M_script02_2_M000,3 916 | Ses02M_script02_2_M001,3 917 | Ses02M_script02_2_M002,3 918 | Ses02M_script02_2_M010,3 919 | Ses02M_script02_2_M015,3 920 | Ses02M_script02_2_M016,3 921 | Ses02M_script02_2_M017,3 922 | Ses02M_script02_2_M019,2 923 | Ses02M_script02_2_M020,2 924 | Ses02M_script02_2_M021,3 925 | Ses02M_script02_2_M022,3 926 | Ses02M_script02_2_M023,3 927 | Ses02M_script02_2_M025,3 928 | Ses02M_script02_2_M029,0 929 | Ses02M_script02_2_M035,1 930 | Ses02M_script02_2_M037,1 931 | Ses02M_script02_2_M038,1 932 | Ses02M_script02_2_M040,2 933 | Ses02M_script02_2_M041,2 934 | Ses02M_script02_2_M042,2 935 | Ses02M_script02_2_M045,2 936 | Ses02M_script02_2_M046,2 937 | Ses02M_script03_1_F002,2 938 | Ses02M_script03_1_F003,2 939 | Ses02M_script03_1_F005,2 940 | Ses02M_script03_1_F006,2 941 | Ses02M_script03_1_F007,2 942 | Ses02M_script03_1_F012,2 943 | Ses02M_script03_1_F015,1 944 | Ses02M_script03_1_F021,2 945 | Ses02M_script03_1_F022,2 946 | Ses02M_script03_1_F023,2 947 | Ses02M_script03_1_F024,2 948 | Ses02M_script03_1_F025,2 949 | Ses02M_script03_1_F027,2 950 | Ses02M_script03_1_F028,2 951 | Ses02M_script03_1_F029,2 952 | Ses02M_script03_1_M005,2 953 | Ses02M_script03_1_M007,2 954 | Ses02M_script03_1_M008,2 955 | Ses02M_script03_1_M009,1 956 | Ses02M_script03_1_M010,2 957 | Ses02M_script03_1_M011,2 958 | Ses02M_script03_1_M012,2 959 | Ses02M_script03_1_M014,2 960 | Ses02M_script03_1_M015,2 961 | Ses02M_script03_1_M016,1 962 | Ses02M_script03_1_M018,2 963 | Ses02M_script03_1_M019,2 964 | Ses02M_script03_1_M020,2 965 | Ses02M_script03_1_M021,2 966 | Ses02M_script03_1_M022,2 967 | Ses02M_script03_1_M024,2 968 | Ses02M_script03_1_M029,2 969 | Ses02M_script03_1_M031,2 970 | Ses02F_script01_2_F005,0 971 | Ses02F_script01_2_F012,0 972 | Ses02F_script01_2_F013,0 973 | Ses02F_script01_2_M001,3 974 | Ses02F_script01_2_M002,3 975 | Ses02F_script01_2_M004,3 976 | Ses02F_script01_2_M005,3 977 | Ses02F_script01_2_M006,3 978 | Ses02F_script01_2_M007,3 979 | Ses02F_script01_2_M008,3 980 | Ses02F_script01_2_M016,0 981 | Ses02F_script01_2_M017,0 982 | Ses02F_script01_2_M018,0 983 | Ses02F_impro08_F000,3 984 | Ses02F_impro08_F001,3 985 | Ses02F_impro08_F002,3 986 | Ses02F_impro08_F003,3 987 | Ses02F_impro08_F004,3 988 | Ses02F_impro08_F005,3 989 | Ses02F_impro08_F006,3 990 | Ses02F_impro08_F007,3 991 | Ses02F_impro08_F008,3 992 | Ses02F_impro08_F009,3 993 | Ses02F_impro08_F010,3 994 | Ses02F_impro08_F011,3 995 | Ses02F_impro08_F012,3 996 | Ses02F_impro08_F013,3 997 | Ses02F_impro08_F014,3 998 | Ses02F_impro08_F015,3 999 | Ses02F_impro08_F016,3 1000 | Ses02F_impro08_F017,3 1001 | Ses02F_impro08_F018,3 1002 | Ses02F_impro08_F019,3 1003 | Ses02F_impro08_F020,3 1004 | Ses02F_impro08_F021,3 1005 | Ses02F_impro08_F022,3 1006 | Ses02F_impro08_F023,3 1007 | Ses02F_impro08_F024,3 1008 | Ses02F_impro08_F025,3 1009 | Ses02F_impro08_F026,3 1010 | Ses02F_impro08_F027,3 1011 | Ses02F_impro08_M003,3 1012 | Ses02F_impro08_M005,3 1013 | Ses02F_impro08_M006,3 1014 | Ses02F_impro08_M007,3 1015 | Ses02F_impro08_M008,3 1016 | Ses02F_impro08_M009,3 1017 | Ses02F_impro08_M010,3 1018 | Ses02F_impro08_M011,3 1019 | Ses02F_impro08_M012,3 1020 | Ses02F_impro08_M013,3 1021 | Ses02F_impro08_M015,3 1022 | Ses02F_impro08_M016,3 1023 | Ses02F_impro08_M018,3 1024 | Ses02F_impro08_M019,3 1025 | -------------------------------------------------------------------------------- /Feature_Extractor/iemocap_data/valid3.csv: -------------------------------------------------------------------------------- 1 | name,label 2 | Ses03F_script03_2_F004,3 3 | Ses03F_script03_2_F008,3 4 | Ses03F_script03_2_F013,3 5 | Ses03F_script03_2_F014,0 6 | Ses03F_script03_2_F015,0 7 | Ses03F_script03_2_F017,3 8 | Ses03F_script03_2_F018,0 9 | Ses03F_script03_2_F023,0 10 | Ses03F_script03_2_F026,0 11 | Ses03F_script03_2_F035,0 12 | Ses03F_script03_2_F037,0 13 | Ses03F_script03_2_F038,0 14 | Ses03F_script03_2_F039,0 15 | Ses03F_script03_2_F040,0 16 | Ses03F_script03_2_F041,0 17 | Ses03F_script03_2_F042,0 18 | Ses03F_script03_2_F043,0 19 | Ses03F_script03_2_M000,0 20 | Ses03F_script03_2_M001,0 21 | Ses03F_script03_2_M002,0 22 | Ses03F_script03_2_M005,0 23 | Ses03F_script03_2_M007,0 24 | Ses03F_script03_2_M008,0 25 | Ses03F_script03_2_M009,3 26 | Ses03F_script03_2_M010,3 27 | Ses03F_script03_2_M011,3 28 | Ses03F_script03_2_M012,3 29 | Ses03F_script03_2_M014,0 30 | Ses03F_script03_2_M015,0 31 | Ses03F_script03_2_M016,0 32 | Ses03F_script03_2_M017,0 33 | Ses03F_script03_2_M020,0 34 | Ses03F_script03_2_M024,0 35 | Ses03F_script03_2_M026,0 36 | Ses03F_script03_2_M027,0 37 | Ses03F_script03_2_M028,0 38 | Ses03F_script03_2_M029,0 39 | Ses03F_script03_2_M030,0 40 | Ses03F_script03_2_M034,0 41 | Ses03F_script03_2_M035,3 42 | Ses03F_script03_2_M038,0 43 | Ses03F_script03_2_M039,0 44 | Ses03F_script03_2_M040,0 45 | Ses03F_script03_2_M041,0 46 | Ses03F_script03_2_M042,0 47 | Ses03F_script03_2_M043,0 48 | Ses03F_script03_2_M044,0 49 | Ses03M_impro02_F000,2 50 | Ses03M_impro02_F003,2 51 | Ses03M_impro02_F013,1 52 | Ses03M_impro02_F014,1 53 | Ses03M_impro02_F015,1 54 | Ses03M_impro02_F016,1 55 | Ses03M_impro02_F017,1 56 | Ses03M_impro02_F022,1 57 | Ses03M_impro02_F023,1 58 | Ses03M_impro02_F024,1 59 | Ses03M_impro02_F025,1 60 | Ses03M_impro02_F026,1 61 | Ses03M_impro02_F027,1 62 | Ses03M_impro02_F028,1 63 | Ses03M_impro02_F029,1 64 | Ses03M_impro02_F030,1 65 | Ses03M_impro02_F031,1 66 | Ses03M_impro02_F032,1 67 | Ses03M_impro02_F033,1 68 | Ses03M_impro02_F034,1 69 | Ses03M_impro02_M000,1 70 | Ses03M_impro02_M001,1 71 | Ses03M_impro02_M002,1 72 | Ses03M_impro02_M003,1 73 | Ses03M_impro02_M004,1 74 | Ses03M_impro02_M005,1 75 | Ses03M_impro02_M006,1 76 | Ses03M_impro02_M011,1 77 | Ses03M_impro02_M012,1 78 | Ses03M_impro02_M013,3 79 | Ses03M_impro02_M015,3 80 | Ses03M_impro02_M016,1 81 | Ses03M_impro02_M019,1 82 | Ses03M_impro02_M024,1 83 | Ses03M_impro02_M025,1 84 | Ses03M_impro02_M026,1 85 | Ses03M_impro02_M027,1 86 | Ses03M_impro02_M028,1 87 | Ses03M_impro02_M029,1 88 | Ses03M_impro02_M030,1 89 | Ses03M_impro02_M031,1 90 | Ses03M_impro06_F000,1 91 | Ses03M_impro06_F001,1 92 | Ses03M_impro06_F002,1 93 | Ses03M_impro06_F003,1 94 | Ses03M_impro06_F004,1 95 | Ses03M_impro06_F005,1 96 | Ses03M_impro06_F007,1 97 | Ses03M_impro06_F008,1 98 | Ses03M_impro06_F009,1 99 | Ses03M_impro06_F010,1 100 | Ses03M_impro06_F011,1 101 | Ses03M_impro06_F012,1 102 | Ses03M_impro06_F013,1 103 | Ses03M_impro06_F014,1 104 | Ses03M_impro06_F015,1 105 | Ses03M_impro06_F016,1 106 | Ses03M_impro06_F017,1 107 | Ses03M_impro06_F018,1 108 | Ses03M_impro06_F019,1 109 | Ses03M_impro06_F020,1 110 | Ses03M_impro06_M000,1 111 | Ses03M_impro06_M001,1 112 | Ses03M_impro06_M002,1 113 | Ses03M_impro06_M003,1 114 | Ses03M_impro06_M004,1 115 | Ses03M_impro06_M005,1 116 | Ses03M_impro06_M006,1 117 | Ses03M_impro06_M007,1 118 | Ses03M_impro06_M008,1 119 | Ses03M_impro06_M009,1 120 | Ses03M_impro06_M010,1 121 | Ses03M_impro06_M011,1 122 | Ses03M_impro06_M012,1 123 | Ses03M_impro06_M013,1 124 | Ses03M_impro06_M014,1 125 | Ses03M_impro06_M015,1 126 | Ses03M_impro06_M016,1 127 | Ses03M_impro06_M017,1 128 | Ses03M_impro06_M018,1 129 | Ses03M_impro06_M019,1 130 | Ses03M_impro06_M020,1 131 | Ses03M_impro06_M021,1 132 | Ses03M_impro06_M022,1 133 | Ses03M_impro06_M023,1 134 | Ses03M_impro06_M024,1 135 | Ses03M_impro06_M025,1 136 | Ses03M_impro06_M026,1 137 | Ses03M_impro06_M027,1 138 | Ses03M_impro06_M028,1 139 | Ses03M_impro06_M029,1 140 | Ses03M_impro06_M030,1 141 | Ses03M_impro06_M031,1 142 | Ses03M_impro06_M032,1 143 | Ses03F_script01_1_F004,3 144 | Ses03F_script01_1_F005,3 145 | Ses03F_script01_1_F007,3 146 | Ses03F_script01_1_F010,3 147 | Ses03F_script01_1_F014,3 148 | Ses03F_script01_1_F017,0 149 | Ses03F_script01_1_F019,3 150 | Ses03F_script01_1_F021,3 151 | Ses03F_script01_1_F030,3 152 | Ses03F_script01_1_F031,3 153 | Ses03F_script01_1_F045,3 154 | Ses03F_script01_1_F046,3 155 | Ses03F_script01_1_F050,3 156 | Ses03F_script01_1_F052,3 157 | Ses03F_script01_1_F053,3 158 | Ses03F_script01_1_M000,3 159 | Ses03F_script01_1_M001,3 160 | Ses03F_script01_1_M002,3 161 | Ses03F_script01_1_M003,3 162 | Ses03F_script01_1_M004,3 163 | Ses03F_script01_1_M005,3 164 | Ses03F_script01_1_M006,3 165 | Ses03F_script01_1_M007,3 166 | Ses03F_script01_1_M008,3 167 | Ses03F_script01_1_M009,3 168 | Ses03F_script01_1_M016,3 169 | Ses03F_script01_1_M017,3 170 | Ses03F_script01_1_M018,3 171 | Ses03F_script01_1_M020,3 172 | Ses03F_script01_1_M021,0 173 | Ses03F_script01_1_M022,0 174 | Ses03F_script01_1_M026,1 175 | Ses03F_script01_1_M027,0 176 | Ses03F_script01_1_M028,3 177 | Ses03F_script01_1_M029,3 178 | Ses03F_script01_1_M030,3 179 | Ses03F_script01_1_M031,3 180 | Ses03F_script01_1_M034,3 181 | Ses03F_script01_1_M036,3 182 | Ses03F_script01_1_M041,3 183 | Ses03F_script01_1_M042,3 184 | Ses03F_script01_1_M045,0 185 | Ses03F_script01_1_M046,0 186 | Ses03F_script01_1_M047,3 187 | Ses03F_script01_1_M048,3 188 | Ses03F_script01_1_M049,3 189 | Ses03F_script01_1_M050,3 190 | Ses03F_script01_1_M052,3 191 | Ses03F_script01_1_M053,3 192 | Ses03F_impro02_F000,3 193 | Ses03F_impro02_F001,3 194 | Ses03F_impro02_F005,2 195 | Ses03F_impro02_F007,2 196 | Ses03F_impro02_F008,1 197 | Ses03F_impro02_F010,1 198 | Ses03F_impro02_F011,1 199 | Ses03F_impro02_F012,1 200 | Ses03F_impro02_F015,1 201 | Ses03F_impro02_F018,1 202 | Ses03F_impro02_F019,1 203 | Ses03F_impro02_F020,1 204 | Ses03F_impro02_F021,1 205 | Ses03F_impro02_F023,1 206 | Ses03F_impro02_F024,1 207 | Ses03F_impro02_F025,1 208 | Ses03F_impro02_F026,1 209 | Ses03F_impro02_F027,1 210 | Ses03F_impro02_F028,1 211 | Ses03F_impro02_F029,1 212 | Ses03F_impro02_F031,1 213 | Ses03F_impro02_F032,1 214 | Ses03F_impro02_F034,1 215 | Ses03F_impro02_F035,1 216 | Ses03F_impro02_F036,1 217 | Ses03F_impro02_F037,1 218 | Ses03F_impro02_F038,1 219 | Ses03F_impro02_F039,1 220 | Ses03F_impro02_F040,1 221 | Ses03F_impro02_F041,1 222 | Ses03F_impro02_F042,1 223 | Ses03F_impro02_F043,1 224 | Ses03F_impro02_M000,3 225 | Ses03F_impro02_M001,2 226 | Ses03F_impro02_M007,0 227 | Ses03F_impro02_M014,1 228 | Ses03F_impro02_M016,1 229 | Ses03F_impro02_M017,1 230 | Ses03F_impro02_M018,1 231 | Ses03F_impro02_M019,1 232 | Ses03F_impro02_M020,1 233 | Ses03F_impro02_M021,1 234 | Ses03F_impro02_M024,1 235 | Ses03F_impro02_M025,1 236 | Ses03F_impro02_M027,0 237 | Ses03F_impro02_M028,1 238 | Ses03F_impro02_M029,1 239 | Ses03F_impro02_M031,1 240 | Ses03M_impro07_F000,2 241 | Ses03M_impro07_F001,2 242 | Ses03M_impro07_F006,2 243 | Ses03M_impro07_F007,2 244 | Ses03M_impro07_F008,2 245 | Ses03M_impro07_F009,2 246 | Ses03M_impro07_F010,2 247 | Ses03M_impro07_F011,2 248 | Ses03M_impro07_F012,2 249 | Ses03M_impro07_F013,2 250 | Ses03M_impro07_F014,2 251 | Ses03M_impro07_F016,2 252 | Ses03M_impro07_F017,2 253 | Ses03M_impro07_F018,2 254 | Ses03M_impro07_F019,2 255 | Ses03M_impro07_F023,3 256 | Ses03M_impro07_M002,2 257 | Ses03M_impro07_M003,2 258 | Ses03M_impro07_M004,2 259 | Ses03M_impro07_M005,2 260 | Ses03M_impro07_M006,2 261 | Ses03M_impro07_M007,2 262 | Ses03M_impro07_M008,2 263 | Ses03M_impro07_M009,3 264 | Ses03M_impro07_M010,2 265 | Ses03M_impro07_M011,2 266 | Ses03M_impro07_M013,2 267 | Ses03M_impro07_M014,2 268 | Ses03M_impro07_M015,2 269 | Ses03M_impro07_M016,2 270 | Ses03M_impro07_M017,2 271 | Ses03M_impro07_M018,2 272 | Ses03M_impro07_M019,2 273 | Ses03M_impro07_M020,2 274 | Ses03M_impro07_M022,2 275 | Ses03M_impro07_M023,2 276 | Ses03M_impro07_M024,2 277 | Ses03M_impro07_M025,2 278 | Ses03F_impro03_F004,2 279 | Ses03F_impro03_F005,2 280 | Ses03F_impro03_F006,2 281 | Ses03F_impro03_F007,2 282 | Ses03F_impro03_F008,2 283 | Ses03F_impro03_F009,2 284 | Ses03F_impro03_F011,2 285 | Ses03F_impro03_F012,2 286 | Ses03F_impro03_F013,2 287 | Ses03F_impro03_F014,2 288 | Ses03F_impro03_F015,2 289 | Ses03F_impro03_F016,2 290 | Ses03F_impro03_F017,2 291 | Ses03F_impro03_F018,2 292 | Ses03F_impro03_F019,2 293 | Ses03F_impro03_F020,2 294 | Ses03F_impro03_F021,2 295 | Ses03F_impro03_F024,2 296 | Ses03F_impro03_M000,2 297 | Ses03F_impro03_M001,2 298 | Ses03F_impro03_M002,2 299 | Ses03F_impro03_M005,2 300 | Ses03F_impro03_M006,2 301 | Ses03F_impro03_M007,2 302 | Ses03F_impro03_M010,2 303 | Ses03F_impro03_M015,2 304 | Ses03F_impro03_M021,2 305 | Ses03F_script02_1_F000,1 306 | Ses03F_script02_1_F003,0 307 | Ses03F_script02_1_F010,1 308 | Ses03F_script02_1_F011,1 309 | Ses03F_script02_1_F012,0 310 | Ses03F_script02_1_M000,2 311 | Ses03F_script02_1_M001,2 312 | Ses03F_script02_1_M002,2 313 | Ses03F_script02_1_M003,2 314 | Ses03F_script02_1_M004,2 315 | Ses03F_script02_1_M005,2 316 | Ses03F_script02_1_M006,2 317 | Ses03F_script02_1_M009,2 318 | Ses03F_script02_1_M010,2 319 | Ses03F_script02_1_M011,2 320 | Ses03F_script02_1_M013,2 321 | Ses03F_script02_1_M014,2 322 | Ses03F_script02_1_M015,2 323 | Ses03F_script02_1_M016,2 324 | Ses03F_script02_1_M017,3 325 | Ses03F_script02_1_M019,3 326 | Ses03F_script02_1_M020,3 327 | Ses03F_script02_1_M021,2 328 | Ses03F_script02_1_M022,3 329 | Ses03F_script02_1_M023,3 330 | Ses03F_script02_1_M036,2 331 | Ses03F_script02_1_M038,2 332 | Ses03F_script02_1_M039,2 333 | Ses03F_script02_1_M040,2 334 | Ses03F_script02_1_M041,2 335 | Ses03F_script02_1_M042,2 336 | Ses03F_script02_1_M044,3 337 | Ses03M_script03_1_F003,2 338 | Ses03M_script03_1_F004,2 339 | Ses03M_script03_1_F005,2 340 | Ses03M_script03_1_F006,2 341 | Ses03M_script03_1_F018,1 342 | Ses03M_script03_1_M004,2 343 | Ses03M_script03_1_M006,2 344 | Ses03M_script03_1_M035,3 345 | Ses03M_impro05a_F000,3 346 | Ses03M_impro05a_F001,3 347 | Ses03M_impro05a_F002,3 348 | Ses03M_impro05a_F005,3 349 | Ses03M_impro05a_F007,3 350 | Ses03M_impro05a_F008,3 351 | Ses03M_impro05a_F009,3 352 | Ses03M_impro05a_F011,3 353 | Ses03M_impro05a_F012,3 354 | Ses03M_impro05a_F014,3 355 | Ses03M_impro05a_F016,3 356 | Ses03M_impro05a_F018,0 357 | Ses03M_impro05a_F021,3 358 | Ses03M_impro05a_F023,3 359 | Ses03M_impro05a_F024,3 360 | Ses03M_impro05a_F025,3 361 | Ses03M_impro05a_F028,0 362 | Ses03M_impro05a_M000,0 363 | Ses03M_impro05a_M001,0 364 | Ses03M_impro05a_M002,0 365 | Ses03M_impro05a_M003,0 366 | Ses03M_impro05a_M004,0 367 | Ses03M_impro05a_M005,0 368 | Ses03M_impro05a_M006,0 369 | Ses03M_impro05a_M007,0 370 | Ses03M_impro05a_M010,0 371 | Ses03M_impro05a_M011,0 372 | Ses03M_impro05a_M012,0 373 | Ses03M_impro05a_M013,0 374 | Ses03M_impro05a_M014,0 375 | Ses03M_impro05a_M016,0 376 | Ses03M_impro05a_M017,0 377 | Ses03M_impro05a_M018,0 378 | Ses03M_impro05a_M020,0 379 | Ses03M_impro05a_M021,0 380 | Ses03M_impro05a_M022,0 381 | Ses03M_impro05a_M023,0 382 | Ses03M_impro05a_M024,0 383 | Ses03M_impro05a_M025,0 384 | Ses03M_impro05a_M026,0 385 | Ses03M_impro05a_M027,0 386 | Ses03M_impro05a_M028,0 387 | Ses03M_impro05a_M029,0 388 | Ses03M_impro05a_M030,2 389 | Ses03F_impro06_F000,1 390 | Ses03F_impro06_F001,1 391 | Ses03F_impro06_F002,1 392 | Ses03F_impro06_F003,1 393 | Ses03F_impro06_F004,1 394 | Ses03F_impro06_F005,1 395 | Ses03F_impro06_F006,1 396 | Ses03F_impro06_F007,1 397 | Ses03F_impro06_F008,1 398 | Ses03F_impro06_F009,1 399 | Ses03F_impro06_F010,1 400 | Ses03F_impro06_F011,1 401 | Ses03F_impro06_F012,1 402 | Ses03F_impro06_F013,1 403 | Ses03F_impro06_F014,1 404 | Ses03F_impro06_F015,1 405 | Ses03F_impro06_F016,1 406 | Ses03F_impro06_F017,1 407 | Ses03F_impro06_F018,1 408 | Ses03F_impro06_F019,1 409 | Ses03F_impro06_F020,1 410 | Ses03F_impro06_F021,1 411 | Ses03F_impro06_F022,1 412 | Ses03F_impro06_F023,1 413 | Ses03F_impro06_F024,1 414 | Ses03F_impro06_F025,1 415 | Ses03F_impro06_F026,1 416 | Ses03F_impro06_F027,1 417 | Ses03F_impro06_F028,1 418 | Ses03F_impro06_F029,1 419 | Ses03F_impro06_F030,1 420 | Ses03F_impro06_F031,1 421 | Ses03F_impro06_F032,1 422 | Ses03F_impro06_F033,1 423 | Ses03F_impro06_F034,1 424 | Ses03F_impro06_F035,1 425 | Ses03F_impro06_M000,3 426 | Ses03F_impro06_M001,3 427 | Ses03F_impro06_M002,1 428 | Ses03F_impro06_M003,3 429 | Ses03F_impro06_M004,1 430 | Ses03F_impro06_M005,1 431 | Ses03F_impro06_M006,1 432 | Ses03F_impro06_M007,1 433 | Ses03F_impro06_M008,1 434 | Ses03F_impro06_M009,1 435 | Ses03F_impro06_M010,1 436 | Ses03F_impro06_M011,1 437 | Ses03F_impro06_M012,1 438 | Ses03F_impro06_M013,1 439 | Ses03F_impro06_M014,1 440 | Ses03F_impro06_M015,1 441 | Ses03F_impro06_M016,1 442 | Ses03M_script03_2_F000,3 443 | Ses03M_script03_2_F001,0 444 | Ses03M_script03_2_F003,3 445 | Ses03M_script03_2_F004,3 446 | Ses03M_script03_2_F006,3 447 | Ses03M_script03_2_F010,0 448 | Ses03M_script03_2_F011,0 449 | Ses03M_script03_2_F012,3 450 | Ses03M_script03_2_F014,0 451 | Ses03M_script03_2_F016,3 452 | Ses03M_script03_2_F017,3 453 | Ses03M_script03_2_F024,0 454 | Ses03M_script03_2_F025,0 455 | Ses03M_script03_2_F028,0 456 | Ses03M_script03_2_F029,0 457 | Ses03M_script03_2_F030,0 458 | Ses03M_script03_2_F031,0 459 | Ses03M_script03_2_F032,0 460 | Ses03M_script03_2_F033,0 461 | Ses03M_script03_2_F036,3 462 | Ses03M_script03_2_F038,0 463 | Ses03M_script03_2_F039,0 464 | Ses03M_script03_2_F040,0 465 | Ses03M_script03_2_F041,0 466 | Ses03M_script03_2_F042,0 467 | Ses03M_script03_2_F043,0 468 | Ses03M_script03_2_F044,0 469 | Ses03M_script03_2_F045,0 470 | Ses03M_script03_2_M000,3 471 | Ses03M_script03_2_M003,0 472 | Ses03M_script03_2_M004,0 473 | Ses03M_script03_2_M005,0 474 | Ses03M_script03_2_M006,0 475 | Ses03M_script03_2_M007,0 476 | Ses03M_script03_2_M008,0 477 | Ses03M_script03_2_M009,3 478 | Ses03M_script03_2_M010,3 479 | Ses03M_script03_2_M011,0 480 | Ses03M_script03_2_M012,0 481 | Ses03M_script03_2_M014,0 482 | Ses03M_script03_2_M019,3 483 | Ses03M_script03_2_M020,3 484 | Ses03M_script03_2_M021,3 485 | Ses03M_script03_2_M029,0 486 | Ses03M_script03_2_M030,0 487 | Ses03M_script03_2_M031,0 488 | Ses03M_script03_2_M032,0 489 | Ses03M_script03_2_M033,0 490 | Ses03M_script03_2_M034,0 491 | Ses03M_script03_2_M038,0 492 | Ses03M_script03_2_M039,0 493 | Ses03M_script03_2_M040,0 494 | Ses03M_script03_2_M041,0 495 | Ses03M_script03_2_M042,0 496 | Ses03M_script03_2_M043,0 497 | Ses03M_script03_2_M044,0 498 | Ses03M_script03_2_M045,0 499 | Ses03M_script03_2_M046,0 500 | Ses03F_script01_3_F000,2 501 | Ses03F_script01_3_F001,2 502 | Ses03F_script01_3_F002,2 503 | Ses03F_script01_3_F003,1 504 | Ses03F_script01_3_F008,1 505 | Ses03F_script01_3_F011,2 506 | Ses03F_script01_3_F012,2 507 | Ses03F_script01_3_F013,2 508 | Ses03F_script01_3_F014,2 509 | Ses03F_script01_3_F018,2 510 | Ses03F_script01_3_F021,2 511 | Ses03F_script01_3_F023,2 512 | Ses03F_script01_3_F025,1 513 | Ses03F_script01_3_F026,1 514 | Ses03F_script01_3_F027,1 515 | Ses03F_script01_3_F028,1 516 | Ses03F_script01_3_F029,1 517 | Ses03F_script01_3_F030,1 518 | Ses03F_script01_3_F031,1 519 | Ses03F_script01_3_F032,1 520 | Ses03F_script01_3_F033,1 521 | Ses03F_script01_3_F035,2 522 | Ses03F_script01_3_M000,2 523 | Ses03F_script01_3_M001,3 524 | Ses03F_script01_3_M003,3 525 | Ses03F_script01_3_M004,3 526 | Ses03F_script01_3_M006,2 527 | Ses03F_script01_3_M009,2 528 | Ses03F_script01_3_M010,2 529 | Ses03F_script01_3_M011,2 530 | Ses03F_script01_3_M012,2 531 | Ses03F_script01_3_M013,2 532 | Ses03F_script01_3_M014,2 533 | Ses03F_script01_3_M015,2 534 | Ses03F_script01_3_M018,2 535 | Ses03F_script01_3_M019,2 536 | Ses03F_script01_3_M020,2 537 | Ses03F_script01_3_M021,2 538 | Ses03F_script01_3_M023,2 539 | Ses03F_script01_3_M024,2 540 | Ses03F_script01_3_M025,1 541 | Ses03F_script01_3_M026,3 542 | Ses03F_script01_3_M027,1 543 | Ses03F_script01_3_M028,1 544 | Ses03F_script01_3_M029,1 545 | Ses03F_script01_3_M030,1 546 | Ses03F_script01_3_M031,1 547 | Ses03F_script01_3_M032,1 548 | Ses03F_script01_3_M033,1 549 | Ses03F_script01_3_M034,1 550 | Ses03F_script01_3_M035,1 551 | Ses03F_script01_3_M036,1 552 | Ses03F_script01_3_M037,1 553 | Ses03F_script01_3_M038,1 554 | Ses03F_script01_3_M039,1 555 | Ses03F_script01_3_M040,1 556 | Ses03F_script01_3_M041,1 557 | Ses03F_script01_3_M042,1 558 | Ses03F_script01_3_M043,1 559 | Ses03F_script01_3_M045,2 560 | Ses03F_script01_3_M046,2 561 | Ses03M_script01_2_F000,0 562 | Ses03M_script01_2_F001,0 563 | Ses03M_script01_2_F006,0 564 | Ses03M_script01_2_F009,0 565 | Ses03M_script01_2_F010,1 566 | Ses03M_script01_2_F011,1 567 | Ses03M_script01_2_F012,0 568 | Ses03M_script01_2_F015,0 569 | Ses03M_script01_2_F017,1 570 | Ses03M_script01_2_F019,1 571 | Ses03M_script01_2_M001,3 572 | Ses03M_script01_2_M004,3 573 | Ses03M_script01_2_M005,3 574 | Ses03M_script01_2_M008,3 575 | Ses03M_script01_2_M014,0 576 | Ses03M_script01_2_M015,0 577 | Ses03M_script01_2_M016,0 578 | Ses03M_script01_2_M017,0 579 | Ses03F_script01_2_F000,0 580 | Ses03F_script01_2_F001,0 581 | Ses03F_script01_2_F002,0 582 | Ses03F_script01_2_F004,0 583 | Ses03F_script01_2_F006,0 584 | Ses03F_script01_2_F011,1 585 | Ses03F_script01_2_F013,0 586 | Ses03F_script01_2_F015,0 587 | Ses03F_script01_2_F016,0 588 | Ses03F_script01_2_F018,1 589 | Ses03F_script01_2_F020,1 590 | Ses03F_script01_2_F021,1 591 | Ses03F_script01_2_M001,3 592 | Ses03F_script01_2_M003,3 593 | Ses03F_script01_2_M004,3 594 | Ses03F_script01_2_M006,3 595 | Ses03F_script01_2_M007,3 596 | Ses03F_script01_2_M008,3 597 | Ses03F_script01_2_M009,3 598 | Ses03F_script01_2_M011,0 599 | Ses03F_script01_2_M013,0 600 | Ses03F_script01_2_M014,0 601 | Ses03F_script01_2_M015,0 602 | Ses03F_script01_2_M016,0 603 | Ses03F_script01_2_M017,0 604 | Ses03F_script01_2_M018,0 605 | Ses03M_impro04_F001,3 606 | Ses03M_impro04_F003,3 607 | Ses03M_impro04_F004,3 608 | Ses03M_impro04_F007,1 609 | Ses03M_impro04_F008,1 610 | Ses03M_impro04_F010,2 611 | Ses03M_impro04_F014,3 612 | Ses03M_impro04_F015,3 613 | Ses03M_impro04_F023,3 614 | Ses03M_impro04_F025,3 615 | Ses03M_impro04_F029,3 616 | Ses03M_impro04_F030,3 617 | Ses03M_impro04_F031,3 618 | Ses03M_impro04_F032,3 619 | Ses03M_impro04_M000,1 620 | Ses03M_impro04_M004,0 621 | Ses03M_impro04_M008,0 622 | Ses03M_impro04_M014,3 623 | Ses03M_impro04_M024,3 624 | Ses03M_impro04_M026,1 625 | Ses03M_impro04_M030,1 626 | Ses03M_impro04_M031,1 627 | Ses03M_impro04_M038,3 628 | Ses03M_impro04_M041,2 629 | Ses03F_impro04_F002,1 630 | Ses03F_impro04_F022,1 631 | Ses03F_impro04_F023,1 632 | Ses03F_impro04_F024,1 633 | Ses03F_impro04_M000,3 634 | Ses03F_impro04_M001,3 635 | Ses03F_impro04_M002,3 636 | Ses03F_impro04_M003,3 637 | Ses03F_impro04_M004,3 638 | Ses03F_impro04_M005,3 639 | Ses03F_impro04_M006,3 640 | Ses03F_impro04_M007,3 641 | Ses03F_impro04_M008,3 642 | Ses03F_impro04_M009,3 643 | Ses03F_impro04_M010,3 644 | Ses03F_impro04_M011,3 645 | Ses03F_impro04_M012,3 646 | Ses03F_impro04_M013,3 647 | Ses03F_impro04_M014,3 648 | Ses03F_impro04_M015,3 649 | Ses03F_impro04_M016,3 650 | Ses03F_impro04_M017,3 651 | Ses03F_impro04_M018,3 652 | Ses03F_impro04_M019,3 653 | Ses03M_script02_2_F000,0 654 | Ses03M_script02_2_F001,0 655 | Ses03M_script02_2_F002,0 656 | Ses03M_script02_2_F003,0 657 | Ses03M_script02_2_F004,0 658 | Ses03M_script02_2_F009,0 659 | Ses03M_script02_2_F011,0 660 | Ses03M_script02_2_F013,0 661 | Ses03M_script02_2_F016,0 662 | Ses03M_script02_2_F018,1 663 | Ses03M_script02_2_F023,1 664 | Ses03M_script02_2_F024,1 665 | Ses03M_script02_2_F025,1 666 | Ses03M_script02_2_F031,0 667 | Ses03M_script02_2_F033,0 668 | Ses03M_script02_2_F034,1 669 | Ses03M_script02_2_F035,1 670 | Ses03M_script02_2_F036,1 671 | Ses03M_script02_2_F037,1 672 | Ses03M_script02_2_F038,1 673 | Ses03M_script02_2_F040,1 674 | Ses03M_script02_2_F041,1 675 | Ses03M_script02_2_F042,2 676 | Ses03M_script02_2_F044,2 677 | Ses03M_script02_2_F046,2 678 | Ses03M_script02_2_F047,2 679 | Ses03M_script02_2_F048,2 680 | Ses03M_script02_2_F049,2 681 | Ses03M_script02_2_M000,2 682 | Ses03M_script02_2_M014,3 683 | Ses03M_script02_2_M017,3 684 | Ses03M_script02_2_M019,3 685 | Ses03M_script02_2_M020,3 686 | Ses03M_script02_2_M021,3 687 | Ses03M_script02_2_M022,3 688 | Ses03M_script02_2_M028,0 689 | Ses03M_script02_2_M029,0 690 | Ses03M_script02_2_M030,0 691 | Ses03M_script02_2_M031,0 692 | Ses03M_script02_2_M032,0 693 | Ses03M_script02_2_M033,0 694 | Ses03M_script02_2_M036,1 695 | Ses03M_script02_2_M037,1 696 | Ses03M_script02_2_M041,2 697 | Ses03M_script02_2_M042,2 698 | Ses03M_script02_2_M045,2 699 | Ses03M_script02_2_M046,2 700 | Ses03M_script02_1_F005,3 701 | Ses03M_script02_1_F016,3 702 | Ses03M_script02_1_M000,2 703 | Ses03M_script02_1_M001,2 704 | Ses03M_script02_1_M002,2 705 | Ses03M_script02_1_M003,2 706 | Ses03M_script02_1_M004,2 707 | Ses03M_script02_1_M005,2 708 | Ses03M_script02_1_M014,2 709 | Ses03M_script02_1_M015,2 710 | Ses03M_script02_1_M016,2 711 | Ses03M_script02_1_M017,3 712 | Ses03M_script02_1_M023,2 713 | Ses03M_script02_1_M024,2 714 | Ses03M_script02_1_M025,2 715 | Ses03M_script02_1_M026,2 716 | Ses03M_script02_1_M027,2 717 | Ses03M_script02_1_M034,2 718 | Ses03M_script02_1_M035,2 719 | Ses03M_script02_1_M036,2 720 | Ses03M_script02_1_M037,2 721 | Ses03M_script02_1_M038,2 722 | Ses03M_script02_1_M039,2 723 | Ses03M_script02_1_M040,2 724 | Ses03M_script02_1_M041,2 725 | Ses03M_script02_1_M042,2 726 | Ses03M_script02_1_M043,2 727 | Ses03F_impro05_F002,0 728 | Ses03F_impro05_F005,0 729 | Ses03F_impro05_F007,0 730 | Ses03F_impro05_F008,0 731 | Ses03F_impro05_F009,0 732 | Ses03F_impro05_F010,0 733 | Ses03F_impro05_F011,0 734 | Ses03F_impro05_F015,0 735 | Ses03F_impro05_F016,0 736 | Ses03F_impro05_F017,0 737 | Ses03F_impro05_M003,3 738 | Ses03F_impro05_M008,3 739 | Ses03F_impro05_M009,3 740 | Ses03F_impro05_M011,3 741 | Ses03F_impro05_M013,3 742 | Ses03F_impro05_M014,3 743 | Ses03M_impro08b_F005,0 744 | Ses03M_impro08b_F006,0 745 | Ses03M_impro08b_F021,3 746 | Ses03M_impro08b_F023,3 747 | Ses03M_impro08b_F024,3 748 | Ses03M_impro08b_M000,3 749 | Ses03M_impro08b_M001,3 750 | Ses03M_impro08b_M002,3 751 | Ses03M_impro08b_M003,3 752 | Ses03M_impro08b_M004,3 753 | Ses03M_impro08b_M005,3 754 | Ses03M_impro08b_M006,3 755 | Ses03M_impro08b_M007,3 756 | Ses03M_impro08b_M008,3 757 | Ses03M_impro08b_M009,3 758 | Ses03M_impro08b_M010,3 759 | Ses03M_impro08b_M011,3 760 | Ses03M_impro08b_M012,3 761 | Ses03M_impro08b_M013,3 762 | Ses03M_impro08b_M014,3 763 | Ses03M_impro08b_M015,3 764 | Ses03M_impro08b_M016,3 765 | Ses03M_impro08b_M017,3 766 | Ses03M_impro08b_M018,3 767 | Ses03M_impro08b_M019,3 768 | Ses03M_impro08b_M020,3 769 | Ses03M_impro08b_M021,3 770 | Ses03M_impro08b_M022,3 771 | Ses03M_impro03_F001,2 772 | Ses03M_impro03_F003,2 773 | Ses03M_impro03_F004,2 774 | Ses03M_impro03_F005,2 775 | Ses03M_impro03_F006,2 776 | Ses03M_impro03_F010,2 777 | Ses03M_impro03_F012,2 778 | Ses03M_impro03_F015,3 779 | Ses03M_impro03_F016,2 780 | Ses03M_impro03_F017,2 781 | Ses03M_impro03_F018,3 782 | Ses03M_impro03_F021,2 783 | Ses03M_impro03_F022,2 784 | Ses03M_impro03_F023,2 785 | Ses03M_impro03_F024,2 786 | Ses03M_impro03_F025,2 787 | Ses03M_impro03_F026,2 788 | Ses03M_impro03_F027,2 789 | Ses03M_impro03_F033,2 790 | Ses03M_impro03_F036,3 791 | Ses03M_impro03_M000,2 792 | Ses03M_impro03_M001,2 793 | Ses03M_impro03_M002,2 794 | Ses03M_impro03_M003,2 795 | Ses03M_impro03_M004,2 796 | Ses03M_impro03_M011,2 797 | Ses03M_impro03_M012,2 798 | Ses03M_impro03_M013,2 799 | Ses03M_impro03_M014,2 800 | Ses03M_impro03_M015,2 801 | Ses03M_impro03_M016,2 802 | Ses03M_impro03_M017,2 803 | Ses03M_impro03_M018,2 804 | Ses03M_impro03_M019,2 805 | Ses03M_impro03_M020,2 806 | Ses03M_impro03_M021,2 807 | Ses03M_impro03_M028,3 808 | Ses03M_impro03_M029,2 809 | Ses03M_impro03_M030,2 810 | Ses03M_impro03_M031,2 811 | Ses03M_impro03_M032,2 812 | Ses03M_impro03_M033,3 813 | Ses03M_impro03_M035,2 814 | Ses03M_impro03_M038,3 815 | Ses03M_impro03_M039,2 816 | Ses03F_script02_2_F000,3 817 | Ses03F_script02_2_F005,0 818 | Ses03F_script02_2_F012,0 819 | Ses03F_script02_2_F016,1 820 | Ses03F_script02_2_F017,1 821 | Ses03F_script02_2_F018,1 822 | Ses03F_script02_2_F019,2 823 | Ses03F_script02_2_F020,2 824 | Ses03F_script02_2_F021,2 825 | Ses03F_script02_2_F022,1 826 | Ses03F_script02_2_F023,2 827 | Ses03F_script02_2_F024,1 828 | Ses03F_script02_2_F025,1 829 | Ses03F_script02_2_F026,1 830 | Ses03F_script02_2_F027,1 831 | Ses03F_script02_2_F028,1 832 | Ses03F_script02_2_F031,0 833 | Ses03F_script02_2_F035,0 834 | Ses03F_script02_2_F036,1 835 | Ses03F_script02_2_F038,1 836 | Ses03F_script02_2_F039,1 837 | Ses03F_script02_2_F040,1 838 | Ses03F_script02_2_F043,2 839 | Ses03F_script02_2_F044,2 840 | Ses03F_script02_2_F045,2 841 | Ses03F_script02_2_F048,2 842 | Ses03F_script02_2_F050,2 843 | Ses03F_script02_2_F051,1 844 | Ses03F_script02_2_M001,3 845 | Ses03F_script02_2_M005,3 846 | Ses03F_script02_2_M008,3 847 | Ses03F_script02_2_M016,3 848 | Ses03F_script02_2_M026,0 849 | Ses03F_script02_2_M038,2 850 | Ses03F_script02_2_M039,2 851 | Ses03F_script02_2_M040,2 852 | Ses03F_script02_2_M041,2 853 | Ses03F_script02_2_M042,3 854 | Ses03F_script02_2_M043,3 855 | Ses03F_script02_2_M044,2 856 | Ses03F_script02_2_M045,2 857 | Ses03M_script01_3_F004,1 858 | Ses03M_script01_3_F006,1 859 | Ses03M_script01_3_F007,1 860 | Ses03M_script01_3_F008,1 861 | Ses03M_script01_3_F011,2 862 | Ses03M_script01_3_F012,2 863 | Ses03M_script01_3_F013,2 864 | Ses03M_script01_3_F014,2 865 | Ses03M_script01_3_F015,2 866 | Ses03M_script01_3_F016,2 867 | Ses03M_script01_3_F017,2 868 | Ses03M_script01_3_F018,2 869 | Ses03M_script01_3_F019,2 870 | Ses03M_script01_3_F020,2 871 | Ses03M_script01_3_F021,2 872 | Ses03M_script01_3_F029,1 873 | Ses03M_script01_3_F030,1 874 | Ses03M_script01_3_F031,1 875 | Ses03M_script01_3_F033,1 876 | Ses03M_script01_3_F034,2 877 | Ses03M_script01_3_M001,3 878 | Ses03M_script01_3_M004,3 879 | Ses03M_script01_3_M008,2 880 | Ses03M_script01_3_M010,2 881 | Ses03M_script01_3_M016,2 882 | Ses03M_script01_3_M018,2 883 | Ses03M_script01_3_M019,2 884 | Ses03M_script01_3_M021,2 885 | Ses03M_script01_3_M023,1 886 | Ses03M_script01_3_M027,1 887 | Ses03M_script01_3_M028,1 888 | Ses03M_script01_3_M029,1 889 | Ses03M_script01_3_M030,1 890 | Ses03M_script01_3_M031,1 891 | Ses03M_script01_3_M032,1 892 | Ses03M_script01_3_M033,1 893 | Ses03M_script01_3_M034,1 894 | Ses03M_script01_3_M035,1 895 | Ses03M_script01_3_M036,1 896 | Ses03M_script01_3_M037,1 897 | Ses03M_script01_3_M038,1 898 | Ses03M_script01_3_M039,1 899 | Ses03M_script01_3_M040,1 900 | Ses03M_script01_3_M041,1 901 | Ses03M_script01_3_M042,1 902 | Ses03M_script01_3_M045,2 903 | Ses03M_script01_3_M046,2 904 | Ses03F_impro08_F000,3 905 | Ses03F_impro08_F001,3 906 | Ses03F_impro08_F002,3 907 | Ses03F_impro08_F003,3 908 | Ses03F_impro08_F004,3 909 | Ses03F_impro08_F005,3 910 | Ses03F_impro08_F006,3 911 | Ses03F_impro08_F007,3 912 | Ses03F_impro08_F008,3 913 | Ses03F_impro08_F009,3 914 | Ses03F_impro08_F010,3 915 | Ses03F_impro08_F011,3 916 | Ses03F_impro08_F012,3 917 | Ses03F_impro08_F013,3 918 | Ses03F_impro08_F014,3 919 | Ses03F_impro08_F015,3 920 | Ses03F_impro08_F016,3 921 | Ses03F_impro08_F020,3 922 | Ses03F_impro08_F021,3 923 | Ses03F_impro08_F022,3 924 | Ses03F_impro08_F023,3 925 | Ses03F_impro08_F024,3 926 | Ses03F_impro08_F025,3 927 | Ses03F_impro08_F026,3 928 | Ses03F_impro08_F027,3 929 | Ses03F_impro08_F028,3 930 | Ses03F_impro08_F029,3 931 | Ses03F_impro08_M000,0 932 | Ses03F_impro08_M006,0 933 | Ses03F_impro08_M007,0 934 | Ses03F_impro08_M008,0 935 | Ses03F_impro08_M009,0 936 | Ses03F_impro08_M010,0 937 | Ses03F_impro08_M011,0 938 | Ses03F_impro08_M012,0 939 | Ses03F_impro08_M014,0 940 | Ses03F_impro08_M015,0 941 | Ses03F_impro08_M021,3 942 | Ses03M_impro08a_F004,3 943 | Ses03M_impro08a_F013,0 944 | Ses03M_impro08a_F014,0 945 | Ses03M_impro08a_F015,0 946 | Ses03M_impro08a_F020,0 947 | Ses03M_impro08a_F021,0 948 | Ses03M_impro08a_F022,0 949 | Ses03M_impro08a_M000,3 950 | Ses03M_impro08a_M001,3 951 | Ses03M_impro08a_M002,3 952 | Ses03M_impro08a_M003,3 953 | Ses03M_impro08a_M004,3 954 | Ses03M_impro08a_M005,3 955 | Ses03M_impro08a_M006,3 956 | Ses03M_impro08a_M007,3 957 | Ses03M_impro08a_M008,3 958 | Ses03M_impro08a_M009,3 959 | Ses03M_impro08a_M010,3 960 | Ses03M_impro08a_M012,3 961 | Ses03M_impro08a_M013,3 962 | Ses03M_impro08a_M016,3 963 | Ses03M_impro08a_M017,3 964 | Ses03M_impro08a_M018,3 965 | Ses03M_impro08a_M019,3 966 | Ses03M_impro08a_M020,3 967 | Ses03M_impro08a_M021,3 968 | Ses03M_impro08a_M022,3 969 | Ses03M_impro08a_M023,3 970 | Ses03M_impro08a_M024,3 971 | Ses03M_impro08a_M025,3 972 | Ses03M_impro08a_M026,3 973 | Ses03M_impro08a_M027,3 974 | Ses03F_impro01_F002,3 975 | Ses03F_impro01_M001,3 976 | Ses03F_impro01_M002,3 977 | Ses03F_impro01_M003,3 978 | Ses03F_impro01_M004,3 979 | Ses03F_impro01_M005,3 980 | Ses03F_script03_1_F000,3 981 | Ses03F_script03_1_F001,3 982 | Ses03F_script03_1_F009,2 983 | Ses03F_script03_1_F014,1 984 | Ses03F_script03_1_F015,1 985 | Ses03F_script03_1_F019,2 986 | Ses03F_script03_1_F021,2 987 | Ses03F_script03_1_F025,2 988 | Ses03F_script03_1_F026,2 989 | Ses03F_script03_1_F027,2 990 | Ses03F_script03_1_M004,3 991 | Ses03F_script03_1_M005,3 992 | Ses03F_script03_1_M007,2 993 | Ses03F_script03_1_M011,2 994 | Ses03F_script03_1_M013,2 995 | Ses03F_script03_1_M020,2 996 | Ses03F_script03_1_M022,2 997 | Ses03F_script03_1_M027,2 998 | Ses03M_script01_1_F000,3 999 | Ses03M_script01_1_F004,1 1000 | Ses03M_script01_1_F005,1 1001 | Ses03M_script01_1_F008,1 1002 | Ses03M_script01_1_F016,0 1003 | Ses03M_script01_1_F017,0 1004 | Ses03M_script01_1_F020,0 1005 | Ses03M_script01_1_F021,3 1006 | Ses03M_script01_1_F028,3 1007 | Ses03M_script01_1_F030,3 1008 | Ses03M_script01_1_F038,0 1009 | Ses03M_script01_1_F040,1 1010 | Ses03M_script01_1_F043,0 1011 | Ses03M_script01_1_F045,1 1012 | Ses03M_script01_1_F046,1 1013 | Ses03M_script01_1_M000,1 1014 | Ses03M_script01_1_M001,1 1015 | Ses03M_script01_1_M002,1 1016 | Ses03M_script01_1_M003,1 1017 | Ses03M_script01_1_M004,1 1018 | Ses03M_script01_1_M005,1 1019 | Ses03M_script01_1_M006,1 1020 | Ses03M_script01_1_M007,1 1021 | Ses03M_script01_1_M009,0 1022 | Ses03M_script01_1_M011,0 1023 | Ses03M_script01_1_M012,0 1024 | Ses03M_script01_1_M017,3 1025 | Ses03M_script01_1_M019,3 1026 | Ses03M_script01_1_M021,3 1027 | Ses03M_script01_1_M022,0 1028 | Ses03M_script01_1_M027,0 1029 | Ses03M_script01_1_M028,3 1030 | Ses03M_script01_1_M029,3 1031 | Ses03M_script01_1_M030,1 1032 | Ses03M_script01_1_M031,3 1033 | Ses03M_script01_1_M036,1 1034 | Ses03M_script01_1_M037,3 1035 | Ses03M_script01_1_M038,3 1036 | Ses03M_script01_1_M039,0 1037 | Ses03M_script01_1_M040,0 1038 | Ses03M_script01_1_M041,0 1039 | Ses03M_script01_1_M043,3 1040 | Ses03M_script01_1_M045,1 1041 | Ses03M_script01_1_M046,1 1042 | Ses03M_script01_1_M047,1 1043 | Ses03F_impro07_F001,2 1044 | Ses03F_impro07_F004,2 1045 | Ses03F_impro07_F005,2 1046 | Ses03F_impro07_F007,2 1047 | Ses03F_impro07_F008,2 1048 | Ses03F_impro07_F009,2 1049 | Ses03F_impro07_F011,2 1050 | Ses03F_impro07_F012,2 1051 | Ses03F_impro07_F013,2 1052 | Ses03F_impro07_F014,2 1053 | Ses03F_impro07_F015,3 1054 | Ses03F_impro07_F016,2 1055 | Ses03F_impro07_F017,2 1056 | Ses03F_impro07_F019,2 1057 | Ses03F_impro07_F020,2 1058 | Ses03F_impro07_F021,2 1059 | Ses03F_impro07_F022,2 1060 | Ses03F_impro07_F023,2 1061 | Ses03F_impro07_F025,2 1062 | Ses03F_impro07_F026,2 1063 | Ses03F_impro07_F027,2 1064 | Ses03F_impro07_F028,2 1065 | Ses03F_impro07_F029,2 1066 | Ses03F_impro07_F031,3 1067 | Ses03F_impro07_F032,2 1068 | Ses03F_impro07_F034,2 1069 | Ses03F_impro07_F035,2 1070 | Ses03F_impro07_M000,3 1071 | Ses03F_impro07_M001,3 1072 | Ses03F_impro07_M003,3 1073 | Ses03F_impro07_M004,3 1074 | Ses03F_impro07_M008,2 1075 | Ses03F_impro07_M011,2 1076 | Ses03F_impro07_M012,3 1077 | Ses03F_impro07_M013,2 1078 | Ses03F_impro07_M014,2 1079 | Ses03F_impro07_M015,2 1080 | Ses03F_impro07_M019,2 1081 | Ses03F_impro07_M021,3 1082 | Ses03F_impro07_M022,2 1083 | Ses03F_impro07_M023,3 1084 | Ses03F_impro07_M025,3 1085 | Ses03F_impro07_M026,2 1086 | Ses03F_impro07_M027,2 1087 | Ses03F_impro07_M028,2 1088 | Ses03F_impro07_M029,2 1089 | Ses03F_impro07_M032,2 1090 | Ses03F_impro07_M033,3 1091 | Ses03F_impro07_M034,2 1092 | Ses03F_impro07_M035,2 1093 | Ses03M_impro05b_F000,3 1094 | Ses03M_impro05b_F001,3 1095 | Ses03M_impro05b_F002,3 1096 | Ses03M_impro05b_F003,3 1097 | Ses03M_impro05b_F004,3 1098 | Ses03M_impro05b_F005,3 1099 | Ses03M_impro05b_F006,3 1100 | Ses03M_impro05b_F007,3 1101 | Ses03M_impro05b_F008,3 1102 | Ses03M_impro05b_F009,3 1103 | Ses03M_impro05b_F010,3 1104 | Ses03M_impro05b_F011,3 1105 | Ses03M_impro05b_F012,3 1106 | Ses03M_impro05b_F013,3 1107 | Ses03M_impro05b_F014,3 1108 | Ses03M_impro05b_F015,3 1109 | Ses03M_impro05b_F016,3 1110 | Ses03M_impro05b_F018,3 1111 | Ses03M_impro05b_F019,3 1112 | Ses03M_impro05b_F020,3 1113 | Ses03M_impro05b_F021,3 1114 | Ses03M_impro05b_F022,3 1115 | Ses03M_impro05b_F023,3 1116 | Ses03M_impro05b_F027,3 1117 | Ses03M_impro05b_F028,3 1118 | Ses03M_impro05b_F029,3 1119 | Ses03M_impro05b_F030,3 1120 | Ses03M_impro05b_M008,0 1121 | Ses03M_impro05b_M011,0 1122 | Ses03M_impro05b_M013,0 1123 | Ses03M_impro05b_M014,0 1124 | Ses03M_impro05b_M015,0 1125 | Ses03M_impro05b_M016,0 1126 | Ses03M_impro05b_M019,3 1127 | Ses03M_impro05b_M020,0 1128 | Ses03M_impro05b_M021,0 1129 | Ses03M_impro05b_M022,0 1130 | Ses03M_impro05b_M023,0 1131 | Ses03M_impro05b_M024,0 1132 | Ses03M_impro05b_M025,0 1133 | Ses03M_impro05b_M026,0 1134 | Ses03M_impro05b_M027,0 1135 | Ses03M_impro05b_M029,0 1136 | Ses03M_impro05b_M031,0 1137 | Ses03M_impro05b_M032,0 1138 | Ses03M_impro05b_M033,0 1139 | Ses03M_impro01_F000,3 1140 | Ses03M_impro01_F003,3 1141 | Ses03M_impro01_F022,0 1142 | Ses03M_impro01_F025,0 1143 | Ses03M_impro01_M004,0 1144 | Ses03M_impro01_M011,0 1145 | Ses03M_impro01_M012,0 1146 | Ses03M_impro01_M014,0 1147 | Ses03M_impro01_M015,0 1148 | Ses03M_impro01_M017,0 1149 | Ses03M_impro01_M019,0 1150 | Ses03M_impro01_M023,0 1151 | Ses03M_impro01_M028,0 1152 | Ses03M_impro01_M029,0 1153 | -------------------------------------------------------------------------------- /Feature_Extractor/iemocap_data/valid4.csv: -------------------------------------------------------------------------------- 1 | name,label 2 | Ses04F_impro06_F000,1 3 | Ses04F_impro06_F001,1 4 | Ses04F_impro06_F002,1 5 | Ses04F_impro06_F003,1 6 | Ses04F_impro06_F004,1 7 | Ses04F_impro06_F005,1 8 | Ses04F_impro06_F006,1 9 | Ses04F_impro06_F007,1 10 | Ses04F_impro06_F008,2 11 | Ses04F_impro06_F009,2 12 | Ses04F_impro06_F010,1 13 | Ses04F_impro06_M000,1 14 | Ses04F_impro06_M001,1 15 | Ses04F_impro06_M002,1 16 | Ses04F_impro06_M003,1 17 | Ses04F_impro06_M004,3 18 | Ses04F_impro06_M005,1 19 | Ses04F_impro06_M006,3 20 | Ses04F_impro06_M007,3 21 | Ses04F_impro06_M008,1 22 | Ses04F_impro06_M009,1 23 | Ses04F_impro06_M010,1 24 | Ses04F_impro06_M011,1 25 | Ses04F_impro06_M012,3 26 | Ses04F_impro06_M013,1 27 | Ses04F_impro06_M014,1 28 | Ses04M_script01_1_F000,3 29 | Ses04M_script01_1_F007,1 30 | Ses04M_script01_1_F011,0 31 | Ses04M_script01_1_F012,0 32 | Ses04M_script01_1_F013,0 33 | Ses04M_script01_1_F015,3 34 | Ses04M_script01_1_F017,3 35 | Ses04M_script01_1_F018,0 36 | Ses04M_script01_1_F019,3 37 | Ses04M_script01_1_F021,0 38 | Ses04M_script01_1_F022,0 39 | Ses04M_script01_1_F027,3 40 | Ses04M_script01_1_F028,3 41 | Ses04M_script01_1_F029,0 42 | Ses04M_script01_1_F032,3 43 | Ses04M_script01_1_F033,3 44 | Ses04M_script01_1_F034,0 45 | Ses04M_script01_1_F035,0 46 | Ses04M_script01_1_F036,0 47 | Ses04M_script01_1_F037,0 48 | Ses04M_script01_1_F039,0 49 | Ses04M_script01_1_F040,0 50 | Ses04M_script01_1_F041,0 51 | Ses04M_script01_1_M000,3 52 | Ses04M_script01_1_M001,3 53 | Ses04M_script01_1_M013,0 54 | Ses04M_script01_1_M014,0 55 | Ses04M_script01_1_M018,3 56 | Ses04M_script01_1_M021,0 57 | Ses04M_script01_1_M022,0 58 | Ses04M_script01_1_M026,3 59 | Ses04M_script01_1_M031,3 60 | Ses04M_script01_1_M032,3 61 | Ses04M_script01_1_M034,1 62 | Ses04M_script01_1_M037,0 63 | Ses04M_script01_1_M038,0 64 | Ses04M_script01_1_M040,3 65 | Ses04M_script01_1_M041,3 66 | Ses04M_script01_2_F000,0 67 | Ses04M_script01_2_F001,3 68 | Ses04M_script01_2_F002,0 69 | Ses04M_script01_2_F003,0 70 | Ses04M_script01_2_F004,0 71 | Ses04M_script01_2_F005,0 72 | Ses04M_script01_2_F006,0 73 | Ses04M_script01_2_F007,3 74 | Ses04M_script01_2_F008,0 75 | Ses04M_script01_2_F009,0 76 | Ses04M_script01_2_F010,0 77 | Ses04M_script01_2_F011,0 78 | Ses04M_script01_2_F012,0 79 | Ses04M_script01_2_F013,0 80 | Ses04M_script01_2_F014,0 81 | Ses04M_script01_2_F015,0 82 | Ses04M_script01_2_F016,0 83 | Ses04M_script01_2_F018,0 84 | Ses04M_script01_2_F020,1 85 | Ses04M_script01_2_M000,3 86 | Ses04M_script01_2_M005,3 87 | Ses04M_script01_2_M006,3 88 | Ses04M_script01_2_M007,3 89 | Ses04M_script01_2_M019,0 90 | Ses04M_impro08_F020,3 91 | Ses04M_impro08_F022,2 92 | Ses04M_impro08_F023,2 93 | Ses04M_impro08_F024,2 94 | Ses04M_impro08_F027,3 95 | Ses04M_impro08_F028,3 96 | Ses04M_impro08_F030,3 97 | Ses04M_impro08_M001,3 98 | Ses04M_impro08_M004,3 99 | Ses04M_impro08_M005,3 100 | Ses04M_impro08_M006,3 101 | Ses04M_impro08_M008,3 102 | Ses04M_impro08_M010,3 103 | Ses04M_impro08_M011,3 104 | Ses04M_impro08_M012,3 105 | Ses04M_impro08_M013,3 106 | Ses04M_impro08_M017,3 107 | Ses04M_impro08_M019,3 108 | Ses04M_impro08_M020,3 109 | Ses04M_impro08_M021,3 110 | Ses04M_impro08_M022,3 111 | Ses04M_impro08_M023,3 112 | Ses04M_impro08_M024,3 113 | Ses04M_impro08_M025,3 114 | Ses04M_impro08_M026,3 115 | Ses04M_impro08_M027,3 116 | Ses04M_impro08_M028,3 117 | Ses04F_impro08_F000,2 118 | Ses04F_impro08_F001,3 119 | Ses04F_impro08_F002,3 120 | Ses04F_impro08_F004,3 121 | Ses04F_impro08_F005,3 122 | Ses04F_impro08_F006,3 123 | Ses04F_impro08_F008,3 124 | Ses04F_impro08_F009,3 125 | Ses04F_impro08_F023,0 126 | Ses04F_impro08_M001,3 127 | Ses04F_impro08_M012,3 128 | Ses04F_impro08_M013,0 129 | Ses04F_impro08_M014,0 130 | Ses04F_impro08_M015,0 131 | Ses04F_impro08_M016,0 132 | Ses04F_impro08_M017,0 133 | Ses04F_impro08_M018,0 134 | Ses04F_impro08_M019,0 135 | Ses04F_impro08_M022,0 136 | Ses04F_impro08_M023,0 137 | Ses04F_impro08_M024,0 138 | Ses04F_script03_2_F005,0 139 | Ses04F_script03_2_F012,0 140 | Ses04F_script03_2_F013,0 141 | Ses04F_script03_2_F014,0 142 | Ses04F_script03_2_F019,0 143 | Ses04F_script03_2_F020,0 144 | Ses04F_script03_2_F021,0 145 | Ses04F_script03_2_F022,0 146 | Ses04F_script03_2_F023,0 147 | Ses04F_script03_2_F024,0 148 | Ses04F_script03_2_F025,0 149 | Ses04F_script03_2_F026,0 150 | Ses04F_script03_2_F027,0 151 | Ses04F_script03_2_F028,0 152 | Ses04F_script03_2_F029,0 153 | Ses04F_script03_2_F030,0 154 | Ses04F_script03_2_F031,0 155 | Ses04F_script03_2_F034,0 156 | Ses04F_script03_2_F035,0 157 | Ses04F_script03_2_F036,0 158 | Ses04F_script03_2_F037,0 159 | Ses04F_script03_2_F038,0 160 | Ses04F_script03_2_F039,0 161 | Ses04F_script03_2_F040,0 162 | Ses04F_script03_2_F041,0 163 | Ses04F_script03_2_F042,0 164 | Ses04F_script03_2_F043,0 165 | Ses04F_script03_2_F044,0 166 | Ses04F_script03_2_M000,3 167 | Ses04F_script03_2_M001,0 168 | Ses04F_script03_2_M003,0 169 | Ses04F_script03_2_M005,0 170 | Ses04F_script03_2_M006,0 171 | Ses04F_script03_2_M007,0 172 | Ses04F_script03_2_M008,0 173 | Ses04F_script03_2_M010,3 174 | Ses04F_script03_2_M014,0 175 | Ses04F_script03_2_M015,0 176 | Ses04F_script03_2_M016,0 177 | Ses04F_script03_2_M024,3 178 | Ses04F_script03_2_M027,0 179 | Ses04F_script03_2_M031,0 180 | Ses04F_script03_2_M034,0 181 | Ses04F_script03_2_M035,0 182 | Ses04F_script03_2_M037,0 183 | Ses04F_script03_2_M039,0 184 | Ses04F_script03_2_M040,0 185 | Ses04F_script03_2_M041,0 186 | Ses04F_script03_2_M042,0 187 | Ses04F_script03_2_M043,0 188 | Ses04F_script03_2_M044,0 189 | Ses04F_script03_2_M045,0 190 | Ses04M_impro05_F000,3 191 | Ses04M_impro05_F001,3 192 | Ses04M_impro05_F002,3 193 | Ses04M_impro05_F003,3 194 | Ses04M_impro05_F006,3 195 | Ses04M_impro05_F007,3 196 | Ses04M_impro05_F008,3 197 | Ses04M_impro05_F009,3 198 | Ses04M_impro05_F010,3 199 | Ses04M_impro05_F013,3 200 | Ses04M_impro05_F017,3 201 | Ses04M_impro05_F019,3 202 | Ses04M_impro05_F020,3 203 | Ses04M_impro05_F036,3 204 | Ses04M_impro05_F037,3 205 | Ses04M_impro05_M008,0 206 | Ses04M_impro05_M009,0 207 | Ses04M_impro05_M010,0 208 | Ses04M_impro05_M011,0 209 | Ses04M_impro05_M014,0 210 | Ses04M_impro05_M015,0 211 | Ses04M_impro05_M016,0 212 | Ses04M_impro05_M017,0 213 | Ses04M_impro05_M018,0 214 | Ses04M_impro05_M022,0 215 | Ses04M_impro05_M026,0 216 | Ses04M_impro05_M038,0 217 | Ses04M_impro05_M039,0 218 | Ses04M_impro05_M044,0 219 | Ses04M_impro05_M045,0 220 | Ses04M_script03_1_F005,2 221 | Ses04M_script03_1_F006,2 222 | Ses04M_script03_1_F007,2 223 | Ses04M_script03_1_F008,2 224 | Ses04M_script03_1_F009,2 225 | Ses04M_script03_1_F012,2 226 | Ses04M_script03_1_F013,3 227 | Ses04M_script03_1_F014,2 228 | Ses04M_script03_1_F015,2 229 | Ses04M_script03_1_F017,2 230 | Ses04M_script03_1_F018,2 231 | Ses04M_script03_1_F021,2 232 | Ses04M_script03_1_F022,2 233 | Ses04M_script03_1_F023,2 234 | Ses04M_script03_1_F024,2 235 | Ses04M_script03_1_F025,2 236 | Ses04M_script03_1_F026,2 237 | Ses04M_script03_1_F027,2 238 | Ses04M_script03_1_F028,2 239 | Ses04M_script03_1_F029,2 240 | Ses04M_script03_1_F030,2 241 | Ses04M_script03_1_M003,3 242 | Ses04M_script03_1_M004,2 243 | Ses04M_script03_1_M006,2 244 | Ses04M_script03_1_M007,2 245 | Ses04M_script03_1_M010,3 246 | Ses04M_script03_1_M012,3 247 | Ses04M_script03_1_M013,0 248 | Ses04M_script03_1_M016,1 249 | Ses04M_script03_1_M021,2 250 | Ses04M_script03_1_M025,0 251 | Ses04M_script03_1_M028,3 252 | Ses04M_script03_1_M029,2 253 | Ses04M_script03_1_M033,3 254 | Ses04M_script03_2_F000,0 255 | Ses04M_script03_2_F001,2 256 | Ses04M_script03_2_F002,2 257 | Ses04M_script03_2_F003,2 258 | Ses04M_script03_2_F006,2 259 | Ses04M_script03_2_F008,2 260 | Ses04M_script03_2_F009,3 261 | Ses04M_script03_2_F010,0 262 | Ses04M_script03_2_F014,0 263 | Ses04M_script03_2_F017,0 264 | Ses04M_script03_2_F019,0 265 | Ses04M_script03_2_F022,0 266 | Ses04M_script03_2_F023,0 267 | Ses04M_script03_2_F024,3 268 | Ses04M_script03_2_F026,0 269 | Ses04M_script03_2_F028,0 270 | Ses04M_script03_2_F029,0 271 | Ses04M_script03_2_F030,0 272 | Ses04M_script03_2_F031,0 273 | Ses04M_script03_2_F032,0 274 | Ses04M_script03_2_F033,0 275 | Ses04M_script03_2_F034,0 276 | Ses04M_script03_2_F035,0 277 | Ses04M_script03_2_F036,0 278 | Ses04M_script03_2_F037,0 279 | Ses04M_script03_2_F039,0 280 | Ses04M_script03_2_F040,0 281 | Ses04M_script03_2_F041,0 282 | Ses04M_script03_2_F042,0 283 | Ses04M_script03_2_F043,3 284 | Ses04M_script03_2_F044,0 285 | Ses04M_script03_2_F045,0 286 | Ses04M_script03_2_F046,0 287 | Ses04M_script03_2_F047,0 288 | Ses04M_script03_2_F048,0 289 | Ses04M_script03_2_F049,0 290 | Ses04M_script03_2_F050,0 291 | Ses04M_script03_2_F051,0 292 | Ses04M_script03_2_F052,0 293 | Ses04M_script03_2_M001,3 294 | Ses04M_script03_2_M008,0 295 | Ses04M_script03_2_M009,3 296 | Ses04M_script03_2_M010,0 297 | Ses04M_script03_2_M011,0 298 | Ses04M_script03_2_M012,0 299 | Ses04M_script03_2_M013,0 300 | Ses04M_script03_2_M014,0 301 | Ses04M_script03_2_M020,0 302 | Ses04M_script03_2_M021,0 303 | Ses04M_script03_2_M022,0 304 | Ses04M_script03_2_M024,0 305 | Ses04M_script03_2_M025,0 306 | Ses04M_script03_2_M026,3 307 | Ses04M_script03_2_M027,0 308 | Ses04M_script03_2_M028,3 309 | Ses04M_script03_2_M029,0 310 | Ses04M_script03_2_M030,0 311 | Ses04M_script03_2_M031,0 312 | Ses04M_script03_2_M033,0 313 | Ses04M_script03_2_M034,0 314 | Ses04M_script03_2_M035,0 315 | Ses04M_script03_2_M036,0 316 | Ses04M_script03_2_M038,0 317 | Ses04M_script03_2_M040,0 318 | Ses04M_script03_2_M041,0 319 | Ses04M_script03_2_M043,0 320 | Ses04M_script03_2_M046,0 321 | Ses04M_script03_2_M047,0 322 | Ses04M_script03_2_M048,0 323 | Ses04M_script03_2_M049,0 324 | Ses04M_script03_2_M050,0 325 | Ses04M_script03_2_M051,0 326 | Ses04M_script03_2_M052,0 327 | Ses04M_script03_2_M053,0 328 | Ses04M_script03_2_M054,0 329 | Ses04M_script03_2_M055,0 330 | Ses04M_script03_2_M056,0 331 | Ses04M_impro06_F000,1 332 | Ses04M_impro06_F001,1 333 | Ses04M_impro06_F002,1 334 | Ses04M_impro06_F003,1 335 | Ses04M_impro06_F004,1 336 | Ses04M_impro06_F005,1 337 | Ses04M_impro06_F006,1 338 | Ses04M_impro06_F007,1 339 | Ses04M_impro06_F008,1 340 | Ses04M_impro06_F009,1 341 | Ses04M_impro06_F010,1 342 | Ses04M_impro06_F011,1 343 | Ses04M_impro06_F012,1 344 | Ses04M_impro06_F014,1 345 | Ses04M_impro06_F015,1 346 | Ses04M_impro06_F016,1 347 | Ses04M_impro06_F017,1 348 | Ses04M_impro06_F018,1 349 | Ses04M_impro06_F019,1 350 | Ses04M_impro06_M000,1 351 | Ses04M_impro06_M001,1 352 | Ses04M_impro06_M002,1 353 | Ses04M_impro06_M003,1 354 | Ses04M_impro06_M004,1 355 | Ses04M_impro06_M005,1 356 | Ses04M_impro06_M006,1 357 | Ses04M_impro06_M007,1 358 | Ses04M_impro06_M008,1 359 | Ses04M_impro06_M009,1 360 | Ses04M_impro06_M010,1 361 | Ses04M_impro06_M011,1 362 | Ses04M_impro06_M012,1 363 | Ses04M_impro06_M013,1 364 | Ses04M_impro06_M014,1 365 | Ses04M_impro06_M015,1 366 | Ses04M_impro06_M016,1 367 | Ses04M_impro06_M017,1 368 | Ses04M_impro06_M018,1 369 | Ses04M_impro06_M019,1 370 | Ses04M_impro06_M020,1 371 | Ses04F_script01_1_F004,1 372 | Ses04F_script01_1_F011,0 373 | Ses04F_script01_1_F012,0 374 | Ses04F_script01_1_F013,0 375 | Ses04F_script01_1_F014,0 376 | Ses04F_script01_1_F015,0 377 | Ses04F_script01_1_F018,3 378 | Ses04F_script01_1_F019,0 379 | Ses04F_script01_1_F022,0 380 | Ses04F_script01_1_F023,0 381 | Ses04F_script01_1_F024,0 382 | Ses04F_script01_1_F025,3 383 | Ses04F_script01_1_F029,0 384 | Ses04F_script01_1_F030,0 385 | Ses04F_script01_1_F032,0 386 | Ses04F_script01_1_F035,0 387 | Ses04F_script01_1_F036,0 388 | Ses04F_script01_1_F037,0 389 | Ses04F_script01_1_F040,0 390 | Ses04F_script01_1_F041,0 391 | Ses04F_script01_1_F042,0 392 | Ses04F_script01_1_F044,0 393 | Ses04F_script01_1_M000,3 394 | Ses04F_script01_1_M004,1 395 | Ses04F_script01_1_M005,1 396 | Ses04F_script01_1_M009,0 397 | Ses04F_script01_1_M010,0 398 | Ses04F_script01_1_M012,0 399 | Ses04F_script01_1_M013,0 400 | Ses04F_script01_1_M016,3 401 | Ses04F_script01_1_M017,3 402 | Ses04F_script01_1_M020,0 403 | Ses04F_script01_1_M021,0 404 | Ses04F_script01_1_M022,0 405 | Ses04F_script01_1_M025,0 406 | Ses04F_script01_1_M026,1 407 | Ses04F_script01_1_M028,0 408 | Ses04F_script01_1_M029,0 409 | Ses04F_script01_1_M030,0 410 | Ses04F_script01_1_M032,1 411 | Ses04F_script01_1_M034,0 412 | Ses04F_script01_1_M038,0 413 | Ses04M_impro07_F002,2 414 | Ses04M_impro07_F003,2 415 | Ses04M_impro07_F004,2 416 | Ses04M_impro07_F006,2 417 | Ses04M_impro07_F007,2 418 | Ses04M_impro07_F008,2 419 | Ses04M_impro07_F009,2 420 | Ses04M_impro07_F010,2 421 | Ses04M_impro07_F011,2 422 | Ses04M_impro07_F012,2 423 | Ses04M_impro07_F013,2 424 | Ses04M_impro07_F014,2 425 | Ses04M_impro07_F015,2 426 | Ses04M_impro07_F016,2 427 | Ses04M_impro07_F017,2 428 | Ses04M_impro07_F018,2 429 | Ses04M_impro07_F022,2 430 | Ses04M_impro07_F023,3 431 | Ses04M_impro07_F026,3 432 | Ses04M_impro07_F027,2 433 | Ses04M_impro07_F036,3 434 | Ses04M_impro07_F037,3 435 | Ses04M_impro07_M000,2 436 | Ses04M_impro07_M001,2 437 | Ses04M_impro07_M002,2 438 | Ses04M_impro07_M003,2 439 | Ses04M_impro07_M004,2 440 | Ses04M_impro07_M007,2 441 | Ses04M_impro07_M008,2 442 | Ses04M_impro07_M009,2 443 | Ses04M_impro07_M010,2 444 | Ses04M_impro07_M011,2 445 | Ses04M_impro07_M012,2 446 | Ses04M_impro07_M013,2 447 | Ses04M_impro07_M014,2 448 | Ses04M_impro07_M015,2 449 | Ses04M_impro07_M020,2 450 | Ses04M_impro07_M021,2 451 | Ses04M_impro07_M025,3 452 | Ses04M_impro07_M026,3 453 | Ses04M_impro07_M029,3 454 | Ses04M_impro07_M030,3 455 | Ses04M_impro07_M032,3 456 | Ses04M_impro07_M035,3 457 | Ses04M_impro07_M036,3 458 | Ses04M_impro07_M037,3 459 | Ses04M_impro07_M040,3 460 | Ses04M_impro07_M041,3 461 | Ses04M_impro07_M042,3 462 | Ses04M_impro07_M043,3 463 | Ses04M_impro04_F000,3 464 | Ses04M_impro04_F001,3 465 | Ses04M_impro04_F002,3 466 | Ses04M_impro04_F003,3 467 | Ses04M_impro04_F004,3 468 | Ses04M_impro04_F005,3 469 | Ses04M_impro04_F006,3 470 | Ses04M_impro04_F007,3 471 | Ses04M_impro04_F008,3 472 | Ses04M_impro04_F009,3 473 | Ses04M_impro04_F011,3 474 | Ses04M_impro04_F012,3 475 | Ses04M_impro04_F013,3 476 | Ses04M_impro04_F015,3 477 | Ses04M_impro04_F016,3 478 | Ses04M_impro04_F017,3 479 | Ses04M_impro04_M004,3 480 | Ses04M_impro04_M020,3 481 | Ses04M_impro04_M021,3 482 | Ses04M_impro04_M022,3 483 | Ses04F_script01_3_F009,2 484 | Ses04F_script01_3_F010,2 485 | Ses04F_script01_3_F012,2 486 | Ses04F_script01_3_F013,2 487 | Ses04F_script01_3_F014,2 488 | Ses04F_script01_3_F015,2 489 | Ses04F_script01_3_F023,3 490 | Ses04F_script01_3_F024,1 491 | Ses04F_script01_3_F030,2 492 | Ses04F_script01_3_M004,0 493 | Ses04F_script01_3_M008,3 494 | Ses04F_script01_3_M009,2 495 | Ses04F_script01_3_M010,2 496 | Ses04F_script01_3_M011,2 497 | Ses04F_script01_3_M014,3 498 | Ses04F_script01_3_M018,2 499 | Ses04F_script01_3_M019,2 500 | Ses04F_script01_3_M020,2 501 | Ses04F_script01_3_M023,1 502 | Ses04F_script01_3_M024,1 503 | Ses04F_script01_3_M026,1 504 | Ses04F_script01_3_M027,1 505 | Ses04F_script01_3_M028,1 506 | Ses04F_script01_3_M029,1 507 | Ses04F_script01_3_M030,1 508 | Ses04F_script01_3_M031,1 509 | Ses04F_script01_3_M032,1 510 | Ses04F_script01_3_M033,1 511 | Ses04F_script01_3_M034,1 512 | Ses04F_script01_3_M035,1 513 | Ses04F_script01_3_M036,1 514 | Ses04F_script01_3_M039,1 515 | Ses04F_script01_3_M040,1 516 | Ses04F_script01_3_M041,1 517 | Ses04F_script01_3_M043,1 518 | Ses04F_script01_3_M044,1 519 | Ses04F_script01_3_M045,1 520 | Ses04F_script01_3_M047,2 521 | Ses04F_impro02_F000,1 522 | Ses04F_impro02_F001,1 523 | Ses04F_impro02_F002,1 524 | Ses04F_impro02_F003,1 525 | Ses04F_impro02_F005,1 526 | Ses04F_impro02_F006,1 527 | Ses04F_impro02_F007,1 528 | Ses04F_impro02_F009,1 529 | Ses04F_impro02_F011,1 530 | Ses04F_impro02_F012,0 531 | Ses04F_impro02_F017,0 532 | Ses04F_impro02_F018,0 533 | Ses04F_impro02_F022,0 534 | Ses04F_impro02_F023,1 535 | Ses04F_impro02_F024,1 536 | Ses04F_impro02_M000,1 537 | Ses04F_impro02_M001,1 538 | Ses04F_impro02_M004,1 539 | Ses04F_impro02_M012,1 540 | Ses04F_impro02_M018,0 541 | Ses04F_impro02_M020,0 542 | Ses04F_impro02_M021,0 543 | Ses04F_impro02_M024,1 544 | Ses04F_impro02_M025,1 545 | Ses04F_impro02_M026,1 546 | Ses04M_script02_1_F000,3 547 | Ses04M_script02_1_F002,0 548 | Ses04M_script02_1_F003,0 549 | Ses04M_script02_1_F004,0 550 | Ses04M_script02_1_F005,0 551 | Ses04M_script02_1_F007,0 552 | Ses04M_script02_1_F008,0 553 | Ses04M_script02_1_F010,0 554 | Ses04M_script02_1_F012,0 555 | Ses04M_script02_1_F013,0 556 | Ses04M_script02_1_F014,3 557 | Ses04M_script02_1_F017,0 558 | Ses04M_script02_1_F018,0 559 | Ses04M_script02_1_M001,2 560 | Ses04M_script02_1_M002,2 561 | Ses04M_script02_1_M003,2 562 | Ses04M_script02_1_M005,3 563 | Ses04M_script02_1_M008,2 564 | Ses04M_script02_1_M009,3 565 | Ses04M_script02_1_M012,3 566 | Ses04M_script02_1_M014,3 567 | Ses04M_script02_1_M015,3 568 | Ses04M_script02_1_M018,2 569 | Ses04M_script02_1_M020,2 570 | Ses04M_script02_1_M021,2 571 | Ses04M_script02_1_M022,2 572 | Ses04M_script02_1_M023,2 573 | Ses04M_script02_1_M028,2 574 | Ses04M_script02_1_M029,2 575 | Ses04M_script02_1_M030,2 576 | Ses04M_script02_1_M031,2 577 | Ses04M_script02_1_M032,2 578 | Ses04M_script02_1_M033,2 579 | Ses04F_impro07_F000,2 580 | Ses04F_impro07_F001,2 581 | Ses04F_impro07_F002,2 582 | Ses04F_impro07_F003,2 583 | Ses04F_impro07_F004,2 584 | Ses04F_impro07_F005,2 585 | Ses04F_impro07_F006,2 586 | Ses04F_impro07_F007,2 587 | Ses04F_impro07_F008,2 588 | Ses04F_impro07_F009,2 589 | Ses04F_impro07_F010,2 590 | Ses04F_impro07_F011,2 591 | Ses04F_impro07_F012,2 592 | Ses04F_impro07_F013,2 593 | Ses04F_impro07_F014,2 594 | Ses04F_impro07_F015,2 595 | Ses04F_impro07_F016,2 596 | Ses04F_impro07_F017,2 597 | Ses04F_impro07_F018,2 598 | Ses04F_impro07_F021,2 599 | Ses04F_impro07_F022,2 600 | Ses04F_impro07_F024,2 601 | Ses04F_impro07_F027,2 602 | Ses04F_impro07_F028,2 603 | Ses04F_impro07_F029,2 604 | Ses04F_impro07_F030,2 605 | Ses04F_impro07_F031,2 606 | Ses04F_impro07_F032,2 607 | Ses04F_impro07_F033,2 608 | Ses04F_impro07_F035,2 609 | Ses04F_impro07_F036,2 610 | Ses04F_impro07_F038,2 611 | Ses04F_impro07_F039,2 612 | Ses04F_impro07_F040,2 613 | Ses04F_impro07_F041,2 614 | Ses04F_impro07_F042,2 615 | Ses04F_impro07_F044,2 616 | Ses04F_impro07_F047,2 617 | Ses04F_impro07_F048,2 618 | Ses04F_impro07_F049,2 619 | Ses04F_impro07_F050,2 620 | Ses04F_impro07_F051,2 621 | Ses04F_impro07_F052,2 622 | Ses04F_impro07_F058,2 623 | Ses04F_impro07_F059,2 624 | Ses04F_impro07_F060,2 625 | Ses04F_impro07_F061,2 626 | Ses04F_impro07_F062,2 627 | Ses04F_impro07_F063,2 628 | Ses04F_impro07_F064,2 629 | Ses04F_impro07_F065,2 630 | Ses04F_impro07_F066,2 631 | Ses04F_impro07_F070,2 632 | Ses04F_impro07_F074,2 633 | Ses04F_impro07_F078,2 634 | Ses04F_impro07_F079,2 635 | Ses04F_impro07_M000,3 636 | Ses04F_impro07_M001,2 637 | Ses04F_impro07_M005,2 638 | Ses04F_impro07_M006,2 639 | Ses04F_impro07_M009,3 640 | Ses04F_impro07_M013,3 641 | Ses04F_impro07_M016,3 642 | Ses04F_impro07_M017,3 643 | Ses04F_impro07_M018,3 644 | Ses04F_impro07_M019,3 645 | Ses04F_impro07_M020,3 646 | Ses04F_impro07_M021,3 647 | Ses04F_impro07_M022,3 648 | Ses04F_impro07_M023,3 649 | Ses04F_impro07_M026,3 650 | Ses04F_impro07_M028,2 651 | Ses04F_impro07_M031,3 652 | Ses04F_impro07_M032,3 653 | Ses04F_impro07_M033,3 654 | Ses04F_impro07_M034,2 655 | Ses04F_impro07_M035,3 656 | Ses04F_impro07_M036,2 657 | Ses04F_impro07_M037,2 658 | Ses04F_impro07_M038,2 659 | Ses04F_impro07_M039,2 660 | Ses04F_impro07_M040,2 661 | Ses04F_impro07_M041,2 662 | Ses04F_impro07_M042,2 663 | Ses04F_impro07_M043,2 664 | Ses04F_impro07_M044,3 665 | Ses04F_impro07_M045,2 666 | Ses04F_impro07_M046,2 667 | Ses04F_impro07_M049,2 668 | Ses04F_impro07_M050,2 669 | Ses04F_impro07_M051,3 670 | Ses04F_impro07_M057,3 671 | Ses04F_impro07_M058,3 672 | Ses04F_impro07_M059,3 673 | Ses04F_impro07_M060,3 674 | Ses04F_impro07_M061,3 675 | Ses04F_impro07_M062,3 676 | Ses04F_impro07_M063,3 677 | Ses04F_impro07_M064,3 678 | Ses04F_impro07_M066,3 679 | Ses04F_impro07_M067,3 680 | Ses04F_impro07_M070,2 681 | Ses04F_impro07_M071,3 682 | Ses04F_impro07_M072,3 683 | Ses04F_impro07_M073,3 684 | Ses04F_impro07_M076,3 685 | Ses04F_impro07_M077,3 686 | Ses04F_impro07_M079,3 687 | Ses04F_impro07_M080,3 688 | Ses04F_impro07_M082,3 689 | Ses04F_impro05_F004,0 690 | Ses04F_impro05_F005,0 691 | Ses04F_impro05_F006,0 692 | Ses04F_impro05_F008,0 693 | Ses04F_impro05_F009,0 694 | Ses04F_impro05_F010,0 695 | Ses04F_impro05_F011,0 696 | Ses04F_impro05_F012,0 697 | Ses04F_impro05_F013,0 698 | Ses04F_impro05_F014,0 699 | Ses04F_impro05_F015,0 700 | Ses04F_impro05_F016,0 701 | Ses04F_impro05_F017,0 702 | Ses04F_impro05_F018,0 703 | Ses04F_impro05_F019,0 704 | Ses04F_script03_1_F005,2 705 | Ses04F_script03_1_F006,2 706 | Ses04F_script03_1_F007,2 707 | Ses04F_script03_1_F009,2 708 | Ses04F_script03_1_F011,2 709 | Ses04F_script03_1_F012,3 710 | Ses04F_script03_1_F014,2 711 | Ses04F_script03_1_F017,3 712 | Ses04F_script03_1_F021,2 713 | Ses04F_script03_1_F022,2 714 | Ses04F_script03_1_F023,2 715 | Ses04F_script03_1_F024,2 716 | Ses04F_script03_1_F026,2 717 | Ses04F_script03_1_F028,2 718 | Ses04F_script03_1_F029,2 719 | Ses04F_script03_1_F030,2 720 | Ses04F_script03_1_M006,2 721 | Ses04F_script03_1_M007,2 722 | Ses04F_script03_1_M008,2 723 | Ses04F_script03_1_M009,3 724 | Ses04F_script03_1_M017,3 725 | Ses04F_script03_1_M027,3 726 | Ses04F_script03_1_M028,3 727 | Ses04F_script03_1_M029,3 728 | Ses04F_script03_1_M030,3 729 | Ses04F_script03_1_M031,3 730 | Ses04F_script01_2_F000,0 731 | Ses04F_script01_2_F001,0 732 | Ses04F_script01_2_F005,0 733 | Ses04F_script01_2_F006,0 734 | Ses04F_script01_2_F007,0 735 | Ses04F_script01_2_F008,0 736 | Ses04F_script01_2_F009,0 737 | Ses04F_script01_2_F010,0 738 | Ses04F_script01_2_F016,0 739 | Ses04F_script01_2_M000,3 740 | Ses04F_script01_2_M001,3 741 | Ses04F_script01_2_M004,3 742 | Ses04F_script01_2_M007,3 743 | Ses04F_script01_2_M008,3 744 | Ses04F_script01_2_M017,0 745 | Ses04F_script02_1_F000,3 746 | Ses04F_script02_1_F007,0 747 | Ses04F_script02_1_F008,0 748 | Ses04F_script02_1_F012,0 749 | Ses04F_script02_1_F013,0 750 | Ses04F_script02_1_F014,0 751 | Ses04F_script02_1_F016,0 752 | Ses04F_script02_1_F017,0 753 | Ses04F_script02_1_F019,0 754 | Ses04F_script02_1_F021,0 755 | Ses04F_script02_1_F022,0 756 | Ses04F_script02_1_M000,2 757 | Ses04F_script02_1_M002,2 758 | Ses04F_script02_1_M009,2 759 | Ses04F_script02_1_M010,3 760 | Ses04F_script02_1_M011,2 761 | Ses04F_script02_1_M017,3 762 | Ses04F_script02_1_M021,2 763 | Ses04F_script02_1_M023,2 764 | Ses04F_script02_1_M026,2 765 | Ses04F_script02_1_M031,0 766 | Ses04F_script02_1_M032,2 767 | Ses04F_script02_1_M033,2 768 | Ses04F_script02_1_M034,2 769 | Ses04F_script02_1_M035,2 770 | Ses04F_script02_1_M036,2 771 | Ses04F_script02_1_M037,2 772 | Ses04F_script02_1_M038,2 773 | Ses04F_script02_1_M039,2 774 | Ses04F_script02_1_M040,2 775 | Ses04M_script01_3_F012,2 776 | Ses04M_script01_3_F013,2 777 | Ses04M_script01_3_F014,2 778 | Ses04M_script01_3_F022,1 779 | Ses04M_script01_3_F023,1 780 | Ses04M_script01_3_F027,2 781 | Ses04M_script01_3_M022,1 782 | Ses04M_script01_3_M023,1 783 | Ses04M_script01_3_M024,1 784 | Ses04M_script01_3_M025,1 785 | Ses04M_script01_3_M026,1 786 | Ses04M_script01_3_M027,1 787 | Ses04M_script01_3_M028,1 788 | Ses04M_script01_3_M030,1 789 | Ses04M_script01_3_M031,1 790 | Ses04M_script01_3_M032,1 791 | Ses04M_script01_3_M033,1 792 | Ses04M_script01_3_M034,1 793 | Ses04M_script01_3_M035,1 794 | Ses04M_script01_3_M037,2 795 | Ses04F_impro01_F004,0 796 | Ses04F_impro01_F008,0 797 | Ses04F_impro01_F009,0 798 | Ses04F_impro01_F010,0 799 | Ses04F_impro01_F012,0 800 | Ses04F_impro01_F014,0 801 | Ses04F_impro01_F016,0 802 | Ses04F_impro01_F019,0 803 | Ses04F_impro01_F021,0 804 | Ses04F_impro01_F022,0 805 | Ses04F_impro01_M005,0 806 | Ses04F_impro01_M008,0 807 | Ses04F_impro01_M011,0 808 | Ses04F_impro01_M014,0 809 | Ses04F_impro01_M021,0 810 | Ses04F_impro01_M023,0 811 | Ses04F_impro01_M024,0 812 | Ses04M_impro03_F002,2 813 | Ses04M_impro03_F003,2 814 | Ses04M_impro03_F004,2 815 | Ses04M_impro03_F005,2 816 | Ses04M_impro03_F006,2 817 | Ses04M_impro03_F007,2 818 | Ses04M_impro03_F008,2 819 | Ses04M_impro03_F009,2 820 | Ses04M_impro03_F010,2 821 | Ses04M_impro03_F011,2 822 | Ses04M_impro03_F012,2 823 | Ses04M_impro03_F015,2 824 | Ses04M_impro03_F016,2 825 | Ses04M_impro03_F017,2 826 | Ses04M_impro03_F019,2 827 | Ses04M_impro03_F024,2 828 | Ses04M_impro03_F025,2 829 | Ses04M_impro03_M002,2 830 | Ses04M_impro03_M005,2 831 | Ses04M_impro03_M006,2 832 | Ses04M_impro03_M007,2 833 | Ses04M_impro03_M008,2 834 | Ses04M_impro03_M009,2 835 | Ses04M_impro03_M011,2 836 | Ses04M_impro03_M015,2 837 | Ses04M_impro03_M017,2 838 | Ses04M_impro03_M019,2 839 | Ses04M_impro03_M020,2 840 | Ses04M_impro03_M021,2 841 | Ses04M_impro03_M022,2 842 | Ses04M_impro03_M023,2 843 | Ses04M_impro03_M024,2 844 | Ses04M_impro03_M025,2 845 | Ses04M_impro03_M026,2 846 | Ses04M_impro03_M028,2 847 | Ses04M_impro03_M029,2 848 | Ses04M_impro03_M030,2 849 | Ses04M_impro03_M031,3 850 | Ses04F_impro03_F000,2 851 | Ses04F_impro03_F001,2 852 | Ses04F_impro03_F002,2 853 | Ses04F_impro03_F003,2 854 | Ses04F_impro03_F004,2 855 | Ses04F_impro03_F005,2 856 | Ses04F_impro03_F006,2 857 | Ses04F_impro03_F007,2 858 | Ses04F_impro03_F009,2 859 | Ses04F_impro03_F010,2 860 | Ses04F_impro03_F011,2 861 | Ses04F_impro03_F012,2 862 | Ses04F_impro03_F013,2 863 | Ses04F_impro03_F014,2 864 | Ses04F_impro03_F015,2 865 | Ses04F_impro03_F016,2 866 | Ses04F_impro03_F017,2 867 | Ses04F_impro03_F018,2 868 | Ses04F_impro03_F034,2 869 | Ses04F_impro03_F036,2 870 | Ses04F_impro03_F038,2 871 | Ses04F_impro03_F040,2 872 | Ses04F_impro03_F041,2 873 | Ses04F_impro03_F042,2 874 | Ses04F_impro03_F043,2 875 | Ses04F_impro03_F044,2 876 | Ses04F_impro03_F045,2 877 | Ses04F_impro03_F047,2 878 | Ses04F_impro03_F048,2 879 | Ses04F_impro03_F050,2 880 | Ses04F_impro03_F051,2 881 | Ses04F_impro03_F052,2 882 | Ses04F_impro03_M000,3 883 | Ses04F_impro03_M001,3 884 | Ses04F_impro03_M003,2 885 | Ses04F_impro03_M004,2 886 | Ses04F_impro03_M005,2 887 | Ses04F_impro03_M014,2 888 | Ses04F_impro03_M018,3 889 | Ses04F_impro03_M019,3 890 | Ses04F_impro03_M020,3 891 | Ses04F_impro03_M021,3 892 | Ses04F_impro03_M027,3 893 | Ses04F_impro03_M028,3 894 | Ses04F_impro03_M029,3 895 | Ses04F_impro03_M030,3 896 | Ses04F_impro03_M031,3 897 | Ses04F_impro03_M032,3 898 | Ses04F_impro03_M033,3 899 | Ses04F_impro03_M034,3 900 | Ses04F_impro03_M035,3 901 | Ses04F_impro03_M036,2 902 | Ses04F_impro03_M037,2 903 | Ses04F_impro03_M038,2 904 | Ses04F_impro03_M039,2 905 | Ses04F_impro03_M040,2 906 | Ses04F_impro03_M041,2 907 | Ses04F_impro03_M042,2 908 | Ses04F_impro03_M043,2 909 | Ses04F_impro03_M047,3 910 | Ses04F_impro04_F000,3 911 | Ses04F_impro04_F005,0 912 | Ses04F_impro04_F006,0 913 | Ses04F_impro04_F008,0 914 | Ses04F_impro04_F011,0 915 | Ses04F_impro04_F013,0 916 | Ses04F_impro04_F014,0 917 | Ses04F_impro04_F015,0 918 | Ses04F_impro04_F016,0 919 | Ses04F_impro04_F017,0 920 | Ses04F_impro04_F018,0 921 | Ses04F_impro04_F025,0 922 | Ses04F_impro04_F030,0 923 | Ses04F_impro04_F031,0 924 | Ses04F_impro04_M001,3 925 | Ses04F_impro04_M002,3 926 | Ses04F_impro04_M003,3 927 | Ses04F_impro04_M004,3 928 | Ses04F_impro04_M005,3 929 | Ses04F_impro04_M006,3 930 | Ses04F_impro04_M007,3 931 | Ses04F_impro04_M008,3 932 | Ses04F_impro04_M009,3 933 | Ses04F_impro04_M013,3 934 | Ses04F_impro04_M014,3 935 | Ses04F_impro04_M015,3 936 | Ses04F_impro04_M016,3 937 | Ses04F_impro04_M017,3 938 | Ses04F_impro04_M018,3 939 | Ses04F_impro04_M019,3 940 | Ses04F_impro04_M020,3 941 | Ses04F_impro04_M021,3 942 | Ses04F_impro04_M022,3 943 | Ses04F_impro04_M023,3 944 | Ses04F_impro04_M026,3 945 | Ses04F_impro04_M028,3 946 | Ses04F_impro04_M030,3 947 | Ses04F_impro04_M031,3 948 | Ses04F_impro04_M033,3 949 | Ses04F_script02_2_F002,0 950 | Ses04F_script02_2_F005,0 951 | Ses04F_script02_2_F006,0 952 | Ses04F_script02_2_F007,0 953 | Ses04F_script02_2_F008,3 954 | Ses04F_script02_2_F009,0 955 | Ses04F_script02_2_F010,0 956 | Ses04F_script02_2_F011,0 957 | Ses04F_script02_2_F014,0 958 | Ses04F_script02_2_F019,1 959 | Ses04F_script02_2_F020,1 960 | Ses04F_script02_2_F021,1 961 | Ses04F_script02_2_F022,1 962 | Ses04F_script02_2_F023,1 963 | Ses04F_script02_2_F024,1 964 | Ses04F_script02_2_F025,3 965 | Ses04F_script02_2_F026,1 966 | Ses04F_script02_2_F031,0 967 | Ses04F_script02_2_F033,0 968 | Ses04F_script02_2_F034,0 969 | Ses04F_script02_2_F037,1 970 | Ses04F_script02_2_F041,1 971 | Ses04F_script02_2_F043,2 972 | Ses04F_script02_2_F044,2 973 | Ses04F_script02_2_F045,2 974 | Ses04F_script02_2_F046,2 975 | Ses04F_script02_2_M000,3 976 | Ses04F_script02_2_M019,3 977 | Ses04F_script02_2_M020,3 978 | Ses04F_script02_2_M021,2 979 | Ses04F_script02_2_M023,3 980 | Ses04F_script02_2_M040,1 981 | Ses04F_script02_2_M041,1 982 | Ses04F_script02_2_M043,3 983 | Ses04F_script02_2_M044,3 984 | Ses04F_script02_2_M046,2 985 | Ses04F_script02_2_M047,3 986 | Ses04M_impro02_F009,1 987 | Ses04M_impro02_F020,1 988 | Ses04M_impro02_F021,0 989 | Ses04M_impro02_M000,1 990 | Ses04M_impro02_M008,3 991 | Ses04M_script02_2_F000,3 992 | Ses04M_script02_2_F006,0 993 | Ses04M_script02_2_F007,0 994 | Ses04M_script02_2_F009,0 995 | Ses04M_script02_2_F013,0 996 | Ses04M_script02_2_F016,1 997 | Ses04M_script02_2_F018,1 998 | Ses04M_script02_2_F019,1 999 | Ses04M_script02_2_F020,1 1000 | Ses04M_script02_2_F021,1 1001 | Ses04M_script02_2_F025,0 1002 | Ses04M_script02_2_F026,0 1003 | Ses04M_script02_2_F027,0 1004 | Ses04M_script02_2_F028,0 1005 | Ses04M_script02_2_F029,0 1006 | Ses04M_script02_2_F032,1 1007 | Ses04M_script02_2_F035,2 1008 | Ses04M_script02_2_F036,2 1009 | Ses04M_script02_2_M000,3 1010 | Ses04M_script02_2_M001,3 1011 | Ses04M_script02_2_M006,0 1012 | Ses04M_script02_2_M007,0 1013 | Ses04M_script02_2_M020,3 1014 | Ses04M_script02_2_M021,3 1015 | Ses04M_script02_2_M029,0 1016 | Ses04M_script02_2_M031,0 1017 | Ses04M_script02_2_M034,0 1018 | Ses04M_script02_2_M037,1 1019 | Ses04M_script02_2_M039,2 1020 | Ses04M_script02_2_M040,2 1021 | Ses04M_script02_2_M041,2 1022 | Ses04M_impro01_F000,3 1023 | Ses04M_impro01_F001,3 1024 | Ses04M_impro01_F002,3 1025 | Ses04M_impro01_F012,3 1026 | Ses04M_impro01_F018,0 1027 | Ses04M_impro01_F019,0 1028 | Ses04M_impro01_F021,0 1029 | Ses04M_impro01_M000,3 1030 | Ses04M_impro01_M003,3 1031 | Ses04M_impro01_M018,0 1032 | Ses04M_impro01_M024,0 1033 | -------------------------------------------------------------------------------- /Feature_Extractor/readme.md: -------------------------------------------------------------------------------- 1 | for extracting audio features. 2 | -------------------------------------------------------------------------------- /IEMOCAP/Dataset.py: -------------------------------------------------------------------------------- 1 | import os 2 | import librosa 3 | import numpy as np 4 | import pandas as pd 5 | import torch 6 | import torch.nn as nn 7 | import fairseq 8 | import lmdb 9 | import shutil 10 | def preprocess_lmdb(out_path,data_path,mode,csvname,reset): 11 | if os.path.exists(out_path + mode): 12 | if reset == False: 13 | return None 14 | else: 15 | shutil.rmtree(out_path + mode) 16 | print('start preprocessing .') 17 | data = pd.read_csv(csvname) 18 | names = data.name.values 19 | labels = data.label.values 20 | env = lmdb.open(out_path + mode,map_size = 409951162700) 21 | count = 0 22 | ones = np.ones((324),dtype = np.float32) 23 | with env.begin(write = True) as txn: 24 | for i in range(len(labels)): 25 | name1 = names[i] 26 | data1 = np.load(data_path + 'Session' + name1[4]+'/'+name1+'.npy') 27 | newdata1 = np.zeros((324,1024),dtype = np.float32) 28 | mask = np.zeros((324),dtype = np.float32) 29 | lens = data1.shape[0] 30 | if lens > 324: 31 | newlens = 324 32 | else: 33 | newlens = lens 34 | mask[newlens:] = ones[newlens:] 35 | newdata1[:newlens, :] = data1[:newlens, :] 36 | key_data = 'data-%05d'%count 37 | key_label = 'label-%05d'%count 38 | key_mask = 'mask-%05d'%count 39 | txn.put(key_data.encode(),newdata1) 40 | txn.put(key_label.encode(),labels[i]) 41 | txn.put(key_mask.encode(),mask) 42 | count += 1 43 | env.close() 44 | print(' preprocess is finished !') 45 | 46 | out_path = r'./new_database_wavlm_mask_324/' #save the Dataset 47 | os.mkdir(out_path) 48 | for i in range(1,6): 49 | csvname1 = r'train'+ str(i) + '.csv' 50 | data_path = r'./Feature/WavLM/'#place where saves the WavLM features 51 | mode = r'train' + str(i) 52 | os.mkdir(out_path+mode) 53 | reset = True 54 | preprocess_lmdb(out_path,data_path,mode,csvname1,reset) 55 | mode = r'valid' + str(i) 56 | os.mkdir(out_path+mode) 57 | csvname2 = r'valid' + str(i) + '.csv' 58 | preprocess_lmdb(out_path,data_path,mode,csvname2,reset) -------------------------------------------------------------------------------- /IEMOCAP/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/scutcsq/DWFormer/2799b01c769f77e151fcd51a01b21c8e99bb0434/IEMOCAP/__init__.py -------------------------------------------------------------------------------- /IEMOCAP/model.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import random 4 | from utils.vanillatransformer import vanilla_transformer_block 5 | 6 | from utils.modules import PositionalEncoding 7 | from utils.DWFormerBlock import DWFormerBlock 8 | 9 | 10 | class classifier(nn.Module): 11 | def __init__(self, feadim, classnum): 12 | super(classifier, self).__init__() 13 | self.fc1 = nn.Linear(feadim, feadim // 2) 14 | self.fc2 = nn.Linear(feadim // 2, feadim // 4) 15 | self.fc3 = nn.Linear(feadim // 4, classnum) 16 | self.relu = nn.ReLU() 17 | self.dropout = nn.Dropout(0.5) 18 | self.avgpool = nn.AdaptiveAvgPool1d(1) 19 | def forward(self, x): 20 | x = x.permute([0,2,1]) 21 | x = self.avgpool(x).squeeze(-1) 22 | x = self.fc1(x) 23 | x = self.dropout(self.relu(x)) 24 | x = self.fc2(x) 25 | x = self.dropout(self.relu(x)) 26 | out = self.fc3(x) 27 | return out 28 | 29 | 30 | class DWFormer(nn.Module): 31 | def __init__(self, feadim, n_head, FFNdim, classnum): 32 | super(DWFormer, self).__init__() 33 | ''' 34 | feadim:input dimension of the feature 35 | n_head:numbers of the attention head 36 | FFNdim:dimension of FeedForward Network 37 | classnum: numbers of emotion 38 | ''' 39 | self.or1 = vanilla_transformer_block(feadim, n_head, FFNdim) 40 | self.dt1 = DWFormerBlock(feadim, n_head, FFNdim) 41 | self.dt2 = DWFormerBlock(feadim, n_head, FFNdim) 42 | self.dt3 = DWFormerBlock(feadim, n_head, FFNdim) 43 | self.classifier = classifier(feadim, classnum) 44 | self.PE = PositionalEncoding(feadim) 45 | self.ln1 = nn.LayerNorm(feadim,eps = 1e-5) 46 | self.ln2 = nn.LayerNorm(feadim,eps = 1e-5) 47 | self.ln3 = nn.LayerNorm(feadim,eps = 1e-5) 48 | self.ln4 = nn.LayerNorm(feadim,eps = 1e-5) 49 | self._reset_parameters() 50 | def _reset_parameters(self): 51 | for p in self.parameters(): 52 | if p.dim() > 1: 53 | nn.init.xavier_uniform_(p) 54 | def forward(self, x, x_mask): 55 | ''' 56 | x:input data, (b, t, c) 57 | x_mask: 58 | ''' 59 | batch, times, _ = x.shape 60 | haltingscore = torch.zeros((batch, times), device=x.device) 61 | 62 | # propose random attention scores instead of vanilla transformer 63 | # randomdata = self.getNumList(0,323,120) 64 | # haltingscore[:,randomdata] += 1e-10 65 | # Vanilla Transformer Block 66 | 67 | x = self.ln1(x) 68 | x1,_,attn = self.or1(x, haltingscore) 69 | 70 | # DWFormer Block 71 | 72 | x2,thresholds1,attn11 = self.dt1(x1, x_mask, attn) 73 | x3 = self.ln2(x2) 74 | x4,thresholds2,attn12 = self.dt2(x3, x_mask, attn11) 75 | x5 = self.ln3(x4) 76 | x6,thresholds3,attn13 = self.dt3(x5, x_mask, attn12) 77 | 78 | # Classifier 79 | 80 | out = self.classifier(x6) 81 | 82 | return out 83 | 84 | # attn4 = torch.cat([attn.unsqueeze(0).data,attn11.unsqueeze(0).data,attn12.unsqueeze(0).data,attn13.unsqueeze(0)],dim = 0)#ori结果 85 | # thresholds = torch.cat([thresholds1.unsqueeze(0).data,thresholds2.unsqueeze(0).data,thresholds3.unsqueeze(0)],dim = 0)#分窗 86 | # return out,attn4,thresholds 87 | 88 | def getNuthresholdsist(self,start, end, n): 89 | ''' 90 | generate random init 91 | ''' 92 | numsArray = set() 93 | while len(numsArray) < n: 94 | numsArray.add(random.randint(start, end)) 95 | 96 | return list(numsArray) 97 | 98 | -------------------------------------------------------------------------------- /IEMOCAP/readme.md: -------------------------------------------------------------------------------- 1 | This fold saves the code of IEMOCAP. 2 | -------------------------------------------------------------------------------- /IEMOCAP/train.py: -------------------------------------------------------------------------------- 1 | import os 2 | # from turtle import forward 3 | os.environ["CUDA_VISIBLE_DEVICES"] = '6' 4 | import sys 5 | import torch 6 | import torch.nn as nn 7 | import numpy as np 8 | import pandas as pd 9 | # import librosa 10 | import time 11 | import random 12 | from sklearn import metrics 13 | from torch.utils.data import Dataset 14 | from torch.utils.data import DataLoader 15 | from torch.optim import lr_scheduler 16 | from torch.backends import cudnn 17 | # from statistics import mode 18 | import matplotlib.pyplot as plt 19 | 20 | from model import DWFormer 21 | import torch.nn.functional as F 22 | import math 23 | import lmdb 24 | 25 | 26 | torch.set_num_threads(1) 27 | gen_data = False 28 | voting = True 29 | 30 | #-----------------------------------------------限制随机------------------------------------------ 31 | 32 | torch.backends.cudnn.deterministic=True 33 | torch.backends.cudnn.benchmark= False 34 | device = torch.device("cuda" if torch.cuda.is_available() else "cpu") 35 | cuda = True if torch.cuda.is_available() else False 36 | #-------------------------------------------------生成DataLoader----------------------------------- 37 | class lmdb_dataset(Dataset): 38 | def __init__(self,out_path,mode): 39 | self.env = lmdb.open(out_path + mode) 40 | self.txn = self.env.begin(write = False) 41 | self.len = self.txn.stat()['entries'] 42 | def __getitem__(self,index): 43 | key_data = 'data-%05d' %index 44 | key_label = 'label-%05d' %index 45 | key_mask = 'mask-%05d' %index 46 | 47 | data = np.frombuffer(self.txn.get(key_data.encode()),dtype = np.float32) 48 | data = torch.FloatTensor(data.reshape(-1,1024).copy()) 49 | label = np.frombuffer(self.txn.get(key_label.encode()),dtype = np.int64) 50 | label = torch.LongTensor(label.copy()).squeeze() 51 | mask = np.frombuffer(self.txn.get(key_mask.encode()),dtype = np.float32) 52 | mask = torch.FloatTensor(mask.copy()) 53 | 54 | return data, label,mask 55 | def __len__(self): 56 | return int(self.len / 3) 57 | #--------------------------------------------------数据加载--------------------------------- 58 | final_wa = [] 59 | final_ua = [] 60 | for i in range(1,6): 61 | seed = 3 62 | torch.manual_seed(seed) 63 | torch.cuda.manual_seed(seed) 64 | np.random.seed(seed) 65 | random.seed(seed) 66 | torch.cuda.empty_cache() 67 | #generate Dataset and DataLoader 68 | out_path = r'./new_database_wavlm_mask_324/' 69 | # out_path = r'./new_database_hubert_mask_324/' 70 | # out_path = r'./randoms/' 71 | trainplace = r'train' + str(i) 72 | validplace = r'valid' + str(i) 73 | train_dataset = lmdb_dataset(out_path,trainplace) 74 | develop_dataset = lmdb_dataset(out_path,validplace) 75 | trainDataset = DataLoader(dataset=train_dataset,batch_size=32,shuffle=True,drop_last = False) 76 | developDataset = DataLoader(dataset=develop_dataset,batch_size=32,shuffle= False) 77 | 78 | #---------------------------------------------------parameter setting--------------------------------- 79 | # model = Vanilla_Transformer(input_dim = 1024, ffn_embed_dim = 512, num_layers = 7, num_heads = 8, num_classes = 4,dropout = 0.3).to(device) 80 | model = DWFormer(feadim = 1024, n_head = 8, FFNdim = 512, classnum = 4).to(device) 81 | # model = localTranformer2(feadim = 1024,n_head = 8, FFNdim = 512, classnum = 4).to(device) 82 | WD = 5e-4 83 | LR_DECAY = 0.5 84 | EPOCH = 120 85 | STEP_SIZE = 5 86 | lr = 5e-4 87 | # optimizer = torch.optim.Adam(model.parameters(), lr = 5e-6, betas=(0.9, 0.999), eps=1e-08, weight_decay=WD) 88 | optimizer = torch.optim.SGD(model.parameters(),lr = lr, momentum = 0.9) 89 | # scheduler = lr_scheduler.StepLR(optimizer, step_size=STEP_SIZE, gamma=LR_DECAY) 90 | # scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(optimizer, 120, 1e-4 * 0.1) 91 | scheduler = torch.optim.lr_scheduler.CosineAnnealingWarmRestarts(optimizer,T_0 = 3,T_mult = 2, eta_min = 1e-4 * 0.1) 92 | loss = nn.CrossEntropyLoss().to(device) 93 | #--------------------------------------------------train-------------------------------------------- 94 | best_wa = 0 95 | best_ua = 0 96 | num = 0 97 | for epoch in range(EPOCH): 98 | model.train() 99 | loss_tr = 0.0 100 | start_time = time.time() 101 | pred_all,actu_all = [],[] 102 | for step, (datas,labels,mask) in enumerate(trainDataset, 0): 103 | mask = mask.to(device) 104 | datas = datas.to(device) 105 | labels = labels.view(len(labels)) 106 | labels = labels.to(device) 107 | optimizer.zero_grad() 108 | # out, attn, windowattn, ML = model(datas,mask) 109 | out = model(datas, mask) 110 | err1 = loss(out,labels.long()) 111 | err1.backward() 112 | optimizer.step() 113 | pred = torch.max(out.cpu().data, 1)[1].numpy() 114 | actu = labels.cpu().data.numpy() 115 | pred_all += list(pred) 116 | actu_all += list(actu) 117 | loss_tr += err1.cpu().item() 118 | loss_tr = loss_tr / len(trainDataset.dataset) 119 | pred_all, actu_all = np.array(pred_all), np.array(actu_all) 120 | wa_tr = metrics.accuracy_score(actu_all, pred_all) 121 | ua_tr = metrics.recall_score(actu_all, pred_all,average='macro') 122 | end_time = time.time() 123 | print('TRAIN:: Epoch: ', epoch, '| Loss: %.3f' % loss_tr, '| wa: %.3f' % wa_tr, '| ua: %.3f' % ua_tr) 124 | print('所耗时长:',str(end_time-start_time),'s') 125 | scheduler.step() 126 | # torch.save(model.state_dict(), 'result2/'+str(epoch)+'.pkl') 127 | # #---------------------------------------------------develop----------------------------------------- 128 | model.eval() 129 | loss_de = 0.0 130 | start_time = time.time() 131 | pred_all,actu_all = [],[] 132 | for step, (datas,labels,mask) in enumerate(developDataset, 0): 133 | mask = mask.to(device) 134 | datas = datas.to(device) 135 | labels = labels.view(len(labels)) 136 | labels = labels.to(device) 137 | #原有 138 | with torch.no_grad(): 139 | # out, attn, windowattn, ML = model(datas,mask) 140 | out = model(datas, mask) 141 | err1 = loss(out,labels.long()) 142 | pred = torch.max(out.cpu().data, 1)[1].numpy() 143 | actu = labels.cpu().data.numpy() 144 | pred_all += list(pred) 145 | actu_all += list(actu) 146 | loss_de += err1.cpu().item() 147 | loss_de = loss_de / len(developDataset.dataset) 148 | pred_all, actu_all = np.array(pred_all,dtype=int), np.array(actu_all,dtype=int) 149 | wa_de = metrics.accuracy_score(actu_all, pred_all) 150 | ua_de = metrics.recall_score(actu_all, pred_all,average='macro') 151 | if (ua_de+wa_de) >= (best_ua + best_wa): 152 | best_ua = ua_de 153 | best_wa = wa_de 154 | num = epoch 155 | end_time = time.time() 156 | print('VALID:: Epoch: ', epoch, '| Loss: %.3f' % loss_de, '| wa: %.3f' % wa_de, '| ua: %.3f' % ua_de) 157 | print('所耗时长: ',str(end_time-start_time),'s') 158 | final_wa.append(best_wa) 159 | final_ua.append(best_ua) 160 | print('当前折最好结果: | wa: %.3f' %best_wa, '|ua: %.3f' %best_ua) 161 | final_wa = np.array(final_wa) 162 | final_ua = np.array(final_ua) 163 | final_wa1 = np.mean(final_wa) 164 | final_ua1 = np.mean(final_ua) 165 | print('五折交叉验证结果: | wa: %.3f' %final_wa1,'|ua: %.3f' %final_ua1) 166 | -------------------------------------------------------------------------------- /IEMOCAP/utils/DGWT.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import numpy as np 4 | import math 5 | from torch.autograd import Variable 6 | from einops import rearrange 7 | import time 8 | import torch.nn.functional as F 9 | import torch.nn.utils.rnn as rnn 10 | import random 11 | from fairseq.modules.multihead_attention import MultiheadAttention 12 | from .modules import FeedForwardNetwork 13 | 14 | class GlobalMHA(nn.Module): 15 | def __init__(self, dim, head): 16 | super(GlobalMHA, self).__init__() 17 | self.softmax2 = nn.Softmax(dim=-1) 18 | self.mha = MultiheadAttention(embed_dim=dim, num_heads=head) 19 | 20 | def forward(self, x, mask): 21 | ''' 22 | x:data shape,(t, batch, c) 23 | 24 | mask:attention mask,mask window tokens padding, (batch, t) 25 | ''' 26 | attn_output, attn_output_weights = self.mha(query=x, key=x, value=x,key_padding_mask = mask.bool()) 27 | haltingscore = torch.sum(attn_output_weights, dim=1) 28 | #calculate important scores 29 | mask2 = mask * (-1e10) 30 | haltingscore = self.softmax2(haltingscore + mask2) 31 | return attn_output, haltingscore 32 | 33 | class DynamicGlobalWindowTransformer(nn.Module): 34 | def __init__(self, dim, head, FFNdim) -> None: 35 | super(DynamicGlobalWindowTransformer, self).__init__() 36 | self.MHSA = GlobalMHA(dim, head) 37 | self.FFN = FeedForwardNetwork(dim, FFNdim) 38 | self.ln1 = nn.LayerNorm(dim, eps=1e-5) 39 | self.ln2 = nn.LayerNorm(dim, eps=1e-5) 40 | 41 | def forward(self, x, mask): 42 | x = x.permute([1,0,2]) 43 | residual = x 44 | x1, attn = self.MHSA(x, mask) 45 | x = residual + x1 46 | x = self.ln1(x) 47 | residual = x 48 | x2 = self.FFN(x) 49 | x = self.ln2(residual + x2) 50 | x = x.permute([1,0,2]) 51 | return x, attn -------------------------------------------------------------------------------- /IEMOCAP/utils/DLWT.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import numpy as np 4 | import math 5 | from torch.autograd import Variable 6 | from einops import rearrange 7 | import time 8 | import torch.nn.functional as F 9 | import torch.nn.utils.rnn as rnn 10 | import random 11 | from fairseq.modules.multihead_attention import MultiheadAttention 12 | from .modules import FeedForwardNetwork 13 | 14 | class LocalMHA(nn.Module): 15 | def __init__(self, dim, head): 16 | super(LocalMHA, self).__init__() 17 | assert dim % head == 0 18 | self.softmax = nn.Softmax(dim=-1) 19 | self.softmax2 = nn.Softmax(dim=-1) 20 | self.head = head 21 | self.mha = MultiheadAttention(embed_dim=dim, num_heads=head) 22 | self.dropout = nn.Dropout(0.1) 23 | def forward(self, x, mask,mappingmask): 24 | ''' 25 | x:input data, (t, batch, c) 26 | 27 | attn mask: attention mask, (batch,t, t) 28 | 29 | padding mask: mask the padding portion, (batch t) 30 | ''' 31 | #calculate multi-head attention 32 | mask1 = mask.unsqueeze(1) 33 | mask1 = mask1.expand(-1,self.head,-1,-1) 34 | mask1 = rearrange(mask1,'b h t1 t2 -> (b h) t1 t2') 35 | mask2 = torch.ones_like(mappingmask) 36 | mask3 = (mask2- mappingmask) * (-1e10) 37 | attn_output, attn_output_weights = self.mha(query=x, key=x, value=x,attn_mask=mask1.bool()) 38 | #calculate importance scores 39 | haltingscore = torch.sum(attn_output_weights, dim=1).unsqueeze(1) 40 | haltingscore = haltingscore * mappingmask 41 | haltingscore = self.softmax2(haltingscore + mask3) * mappingmask 42 | haltingscore = torch.where(torch.isnan(haltingscore),torch.zeros_like(haltingscore),haltingscore) 43 | haltingscore = torch.sum(haltingscore,dim = 1) 44 | return attn_output, haltingscore 45 | 46 | class DynamicLocalWindowTransformer(nn.Module): 47 | def __init__(self, dim, head, FFNdim) -> None: 48 | super(DynamicLocalWindowTransformer, self).__init__() 49 | self.MHSA = LocalMHA(dim, head) 50 | self.FFN = FeedForwardNetwork(dim, FFNdim) 51 | self.ln1 = nn.LayerNorm(dim, eps=1e-5) 52 | self.ln2 = nn.LayerNorm(dim, eps=1e-5) 53 | 54 | def forward(self, x, mask,mappingmask): 55 | ''' 56 | Dyanmic Local Window Transformer (DLWT) modules 57 | 58 | x:input data, (batch, t, c) 59 | 60 | attn mask: attention mask, (batch, t, t) 61 | 62 | padding mask: mask the padding portion, (batch, t) 63 | ''' 64 | x = x.permute([1,0,2]) 65 | residual = x 66 | x1, attn = self.MHSA(x, mask,mappingmask) 67 | x = residual + x1 68 | x = self.ln1(x) 69 | residual = x 70 | x2 = self.FFN(x) 71 | x = self.ln2(residual + x2) 72 | x = x.permute([1,0,2]) 73 | return x, attn -------------------------------------------------------------------------------- /IEMOCAP/utils/DWFormerBlock.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import numpy as np 4 | import math 5 | from torch.autograd import Variable 6 | from einops import rearrange 7 | import time 8 | import torch.nn.functional as F 9 | import torch.nn.utils.rnn as rnn 10 | import random 11 | from .DLWT import DynamicLocalWindowTransformer 12 | from .DGWT import DynamicGlobalWindowTransformer 13 | from .modules import arbitrary_mask_v2 14 | 15 | class DWFormerBlock(nn.Module): 16 | def __init__(self, feadim, n_head, ffndim): 17 | super(DWFormerBlock, self).__init__() 18 | self.DLWT = DynamicLocalWindowTransformer(feadim, n_head, ffndim) 19 | self.DGWT = DynamicGlobalWindowTransformer(feadim, n_head, ffndim) 20 | self.softmax = nn.Softmax(dim=-1) 21 | self.sigmoid = nn.Sigmoid() 22 | self.ln1 = nn.LayerNorm(feadim,eps = 1e-5) 23 | self.ln2 = nn.LayerNorm(feadim,eps = 1e-5) 24 | def forward(self, x, mask, haltingscore): 25 | #---Dynamic Local Window Splitting Module--- 26 | 27 | attention_mask,mappingmask,attention_mask2,thresholds,lengths,wise = self.mask_generation_function(haltingscore, x, mask,threshold = 0.5, lambdas= 0.85) 28 | 29 | #---Dynamic Local Window Transformer Module--- 30 | 31 | x = self.ln1(x) 32 | local_x, local_score = self.DLWT(x, attention_mask,mappingmask) 33 | 34 | #---Dynamic Global Window Transformer Module 35 | local_x = local_x * wise.unsqueeze(-1) 36 | pre_global_x = self.beforeDGWT(local_x,mappingmask,local_score,lengths) 37 | pre_global_x = self.ln2(pre_global_x) 38 | global_x, hh2 = self.DGWT(pre_global_x, attention_mask2) 39 | 40 | #Summation 41 | 42 | data, attn = self.afterDGWT(local_x,global_x,local_score,hh2,mappingmask) 43 | attn = self.softmax(attn) 44 | 45 | return data, thresholds, attn 46 | def mask_generation_function(self,haltingscore, x, mask,threshold=0.5, lambdas=0.85): 47 | ''' 48 | input: 49 | haltingscore:(batch,token_length) 50 | x:(batch,token_length,feadim) 51 | mask:(batch,token_length) 52 | threshold 53 | lambdas 54 | output: 55 | attention_mask:(batch ) 56 | outdata(windownum,max_token_length,feadim) 57 | outmask(windownum,max_token_length) 58 | calwindow(batch) 59 | calwindowlength(windownum) 60 | attention_mask,mappingmask,attention_mask2,thresholds,totallength,wise 61 | ''' 62 | batch, token_length, fea = x.shape 63 | zero_mat = torch.zeros_like(haltingscore) 64 | one_mat = torch.ones_like(haltingscore) 65 | 66 | # calculate data length 67 | 68 | mask11 = one_mat - mask 69 | mask1 = torch.sum(mask11, dim=-1) # real data length 70 | token_length1 = torch.ones((batch), device=x.device) * token_length 71 | 72 | # threshold method 73 | 74 | med = mask1 * threshold + token_length1 - mask1 75 | thresholds, _ = torch.sort(haltingscore, dim=-1) 76 | med = list(map(int, med.cpu().data.numpy())) 77 | thresholds1 = thresholds[:, med] 78 | thresholds = torch.diag(thresholds1) 79 | 80 | # divide window,and find the begin and the end of the windows by first order difference 81 | # we use convolution operation to prevent the individual token from forming independent window 82 | 83 | x1 = torch.where(haltingscore >= thresholds.unsqueeze(1), one_mat, zero_mat) # (batch,token_length)#bigger than the threshold is set as 1, while set as 0. 84 | wise = torch.where(haltingscore>= thresholds.unsqueeze(1), one_mat, one_mat * lambdas) # those token_lengths in weak emotional information places are multiplied by lambda = 0.85 85 | x2_1 = x1[:, 1:] # (batch,token_length) 86 | x2_2 = 1 - x1[:, -1] 87 | x2 = torch.cat([x2_1, x2_2.unsqueeze(-1)], axis=1) 88 | x3 = x2 - x1 # (batch,token_length)#一阶差分得到 89 | x3 = torch.where(x3 == -1, one_mat, x3) 90 | x4 = x3.view(1, 1, batch, token_length) 91 | b = Variable(torch.ones((1, 1, 1, 2), device=x.device)) 92 | x4 = F.conv1d(x4, b, padding=(0, 1)).view(batch, -1) 93 | x4 = x4[:, :-1] 94 | x3 = torch.where(x4 == 2, zero_mat, x3) 95 | x3[:,-1] = 1 96 | 97 | # utilize the begin and the end of the windows, split the window 98 | 99 | zerodim = torch.zeros((1), device=x.device) 100 | onedim = torch.ones((1), device=x.device) * (-1) 101 | result = torch.where(x3 != 0) # 返回切割位置 102 | onedim2 = torch.ones((1), device=x.device) * len(result[0]) 103 | result2 = torch.cat((onedim, result[1][:-1])) # 返回起点位置,但对batch切换数据后得改变起点重置为0 104 | result3 = torch.cat((result[0][1:], zerodim)) 105 | result4 = result3 - result[0] 106 | result4 = torch.cat((zerodim, result4[:-1])) 107 | zero = torch.ones_like(result4) * (-1) 108 | result2 = torch.where(result4 == 1, zero, result2) 109 | length = result[1] - result2 # calculate each length of the windows. 110 | result6 = torch.where(result4 == 1) 111 | result7 = torch.cat([result6[0], onedim2]) 112 | result8 = torch.cat([zerodim, result6[0]]) 113 | calwindow = result7 - result8 114 | result2 += 1 115 | 116 | # calculate the begin and the end of the windows 117 | 118 | maxwindow = int(max(calwindow)) #maximum length of the window 119 | mappingmask = torch.zeros((batch,int(maxwindow),token_length),device = x.device)#batch,maxwindownum,token_length 120 | attention_mask = torch.ones((batch,token_length,token_length),device = x.device) 121 | calwindow1 = list(map(int, calwindow.cpu().data.numpy())) 122 | beginplace = torch.split(result2,calwindow1,dim = 0) 123 | endplace = torch.split(result[1],calwindow1,dim = 0) 124 | beginplace = rnn.pad_sequence(beginplace,batch_first = True)#calculate the begin places of the window 125 | endplace = rnn.pad_sequence(endplace,batch_first = True)#calculate the end places of the window 126 | 127 | # generate the attention mask for Local Window Tranformer Block 128 | 129 | a1 = torch.arange(1,maxwindow+1).unsqueeze(0).unsqueeze(-1).to(x.device) 130 | a1 = a1.expand(batch,-1,token_length) 131 | a2 = calwindow.unsqueeze(-1).unsqueeze(-1) 132 | a2 = a2.expand(-1,maxwindow,token_length) 133 | zeromat = torch.zeros_like(a2) 134 | mappingmask = arbitrary_mask_v2(beginplace,endplace,token_length,reverse = True,return_bool= False) 135 | mappingmask = torch.where(a1<=a2,mappingmask,zeromat) 136 | mappingmask_t = mappingmask.transpose(1,2) 137 | attention_mask = torch.matmul(mappingmask_t,mappingmask) 138 | attention_mask = 1- attention_mask 139 | 140 | # generate the attention mask for Global Window Transformer Block 141 | 142 | highattn1 = torch.arange(1,maxwindow+1).unsqueeze(0).to(x.device) 143 | highattn1 = highattn1.expand(batch,-1) 144 | calwindow2= calwindow.unsqueeze(-1) 145 | calwindow2 = calwindow2.expand(-1,maxwindow) 146 | attention_mask2 = torch.zeros((batch,maxwindow),device = x.device) 147 | one_mat = torch.ones((batch,maxwindow),device = x.device) 148 | attention_mask2 = torch.where(calwindow2>=highattn1,attention_mask2,one_mat) 149 | totallength = endplace - beginplace 150 | onemat = torch.ones_like(totallength) 151 | totallength = torch.where(totallength ==0,onemat,totallength) 152 | 153 | # return results 154 | 155 | return attention_mask,mappingmask,attention_mask2,thresholds,totallength,wise 156 | def beforeDGWT(self,inputdata, mappingmask,attn1,lengths): 157 | ''' 158 | Transform feature into window sequence. 159 | input: 160 | inputdata:(b,t,fea) 161 | mappingmask(b,maxwindownum,t) 162 | attn1(batch,t) 163 | output: 164 | outdata:(batch,maxwindownum,fea) 165 | ''' 166 | 167 | data = inputdata * attn1.unsqueeze(-1) 168 | outdata = torch.matmul(mappingmask,data) 169 | return outdata 170 | 171 | def afterDGWT(self,inputdata1,inputdata2,attn1,attn2,mappingmask): 172 | ''' 173 | Sum the DLWT features and DGWT features by upsampling. 174 | input:inputdata1:(b,t,fea) 175 | inputdata2:(b,maxwindownum,fea) 176 | attn1(b,t) 177 | attn2(b,maxwindownum) 178 | mappingmask(b,maxwindownum,t) 179 | output: 180 | outdata(b,t,fea) 181 | outattn(b,t) 182 | ''' 183 | mappingmask = mappingmask.transpose(-1,-2) 184 | inputdata2 = torch.matmul(mappingmask,inputdata2) 185 | attn2 = torch.matmul(mappingmask,attn2.unsqueeze(-1)).squeeze(-1) 186 | outattn = attn1 * attn2 187 | outdata = inputdata1 + inputdata2 188 | return outdata,outattn -------------------------------------------------------------------------------- /IEMOCAP/utils/__init__.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /IEMOCAP/utils/modules.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import numpy as np 4 | import math 5 | from torch.autograd import Variable 6 | from einops import rearrange 7 | import time 8 | import torch.nn.functional as F 9 | import torch.nn.utils.rnn as rnn 10 | import random 11 | from fairseq.modules.multihead_attention import MultiheadAttention 12 | 13 | def arbitrary_mask_v2(start_index: torch.Tensor, end_index: torch.Tensor, len_out: int, reverse: bool = False, return_bool: bool = True): 14 | '''' 15 | generate attention mask matrix 16 | Args: 17 | start_index: (b t), the first true value 18 | end_index: (b t), the last true value 19 | len_out: length of mask 20 | reverse: reverse the output mask (Default: False) 21 | return_bool: if True, return torch.BoolTensor, otherwise torch.FloatTensor 22 | Returns: 23 | mask: (b t len_out), the padded values are marked as True if reverse is False 24 | ''' 25 | b, t = start_index.shape 26 | start_index = start_index.unsqueeze(dim=-1)# b t 1 27 | end_index = end_index.unsqueeze(dim=-1) 28 | 29 | mask = torch.arange(0, len_out, device=start_index.device).unsqueeze(dim=0).unsqueeze(dim=1).expand(b, t, -1) 30 | mask = ((mask - start_index) < 0) | ((mask - end_index) > 0) 31 | if reverse: 32 | mask = ~mask 33 | if not return_bool: 34 | mask = mask.float() 35 | return mask 36 | 37 | class PositionalEncoding(nn.Module): 38 | "Implement the PE function." 39 | 40 | def __init__(self, d_model, max_len=5000): 41 | super(PositionalEncoding, self).__init__() 42 | 43 | # Compute the positional encodings once in log space. 44 | pe = torch.zeros(max_len, d_model) 45 | position = torch.arange(0, max_len).unsqueeze(1) 46 | div_term = torch.exp(torch.arange(0, d_model, 2) * 47 | -(math.log(10000.0) / d_model)) 48 | pe[:, 0::2] = torch.sin(position * div_term) 49 | pe[:, 1::2] = torch.cos(position * div_term) 50 | pe = pe.unsqueeze(0) 51 | self.register_buffer('pe', pe) 52 | 53 | def forward(self, x): 54 | x = x + Variable(self.pe[:, :x.size(1)], 55 | requires_grad=False) 56 | return x 57 | 58 | 59 | class FeedForwardNetwork(nn.Module): 60 | "Implement the FFN function" 61 | def __init__(self, dim, FFNdim,dropout = 0.3) -> None: 62 | super(FeedForwardNetwork, self).__init__() 63 | self.FFN1 = nn.Linear(dim, FFNdim) 64 | self.gelu = nn.GELU() 65 | self.relu = nn.ReLU() 66 | self.FFN2 = nn.Linear(FFNdim, dim) 67 | self.dropout = nn.Dropout(dropout) 68 | self.dropout2 = nn.Dropout(dropout) 69 | 70 | def forward(self, x): 71 | x1 = self.FFN1(x) 72 | x1 = self.relu(x1) 73 | x1 = self.dropout(x1) 74 | x1 = self.FFN2(x1) 75 | x1 = self.dropout2(x1) 76 | return x1 -------------------------------------------------------------------------------- /IEMOCAP/utils/vanillatransformer.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import numpy as np 4 | import math 5 | from torch.autograd import Variable 6 | from einops import rearrange 7 | import time 8 | import torch.nn.functional as F 9 | import torch.nn.utils.rnn as rnn 10 | import random 11 | from fairseq.modules.multihead_attention import MultiheadAttention 12 | from .modules import FeedForwardNetwork 13 | # import modules.FeedForwardNetwork as FeedForwardNetwork 14 | 15 | 16 | class vanilla_transformer_block(nn.Module): 17 | def __init__(self, dim, head, FFNdim) -> None: 18 | super(vanilla_transformer_block, self).__init__() 19 | self.mha = MultiheadAttention(embed_dim=dim, num_heads=head) 20 | self.FFN = FeedForwardNetwork(dim, FFNdim) 21 | self.ln1 = nn.LayerNorm(dim, eps=1e-5) 22 | self.ln2 = nn.LayerNorm(dim, eps=1e-5) 23 | self.sigmoid = nn.Sigmoid() 24 | self.softmax = nn.Softmax(dim = -1) 25 | 26 | def forward(self, x, haltingscore): 27 | ''' 28 | x: (batch, t, c) 29 | haltingscore:(batch, t) 30 | ''' 31 | x = x.permute([1,0,2]) 32 | residual = x 33 | x1, attn = self.mha(key = x, value = x, query = x) 34 | x = residual + x1 35 | x = self.ln1(x) 36 | residual = x 37 | x2 = self.FFN(x) 38 | x = self.ln2(residual + x2) 39 | x = x.permute([1,0,2]) 40 | attn = torch.sum(attn,dim = 1) 41 | attn = self.softmax(attn) 42 | haltingscore += attn 43 | return x, haltingscore, attn -------------------------------------------------------------------------------- /Meld/Dataset.py: -------------------------------------------------------------------------------- 1 | import os 2 | import librosa 3 | import numpy as np 4 | import pandas as pd 5 | import torch 6 | import torch.nn as nn 7 | import fairseq 8 | import lmdb 9 | import shutil 10 | from scipy import io 11 | 12 | def label_MELD_change(label): 13 | if label == 'neutral': 14 | datalabel = 0 15 | elif label == 'joy': 16 | datalabel = 1 17 | elif label == 'sadness': 18 | datalabel = 2 19 | elif label == 'anger': 20 | datalabel = 3 21 | elif label == 'surprise': 22 | datalabel = 4 23 | elif label == 'fear': 24 | datalabel = 5 25 | elif label == 'disgust': 26 | datalabel = 6 27 | return datalabel 28 | 29 | def gender_MELD_change(label): 30 | if label == 'Chandler': 31 | datalabel = 0 32 | elif label == 'Joey': 33 | datalabel = 0 34 | elif label == 'Rachel': 35 | datalabel = 1 36 | elif label == 'Monica': 37 | datalabel = 1 38 | elif label == 'Phoebe': 39 | datalabel = 1 40 | elif label == 'Ross': 41 | datalabel = 0 42 | else: 43 | datalabel = 2 44 | return datalabel 45 | 46 | def preprocess_lmdb_MELD(out_path,mode,csvname,reset,length=225): 47 | if os.path.exists(out_path + mode): 48 | if reset == False: 49 | return None 50 | else: 51 | shutil.rmtree(out_path + mode) 52 | data = pd.read_csv(csvname) 53 | Dialogue_ID = data.Dialogue_ID.values 54 | Utterance_ID = data.Utterance_ID.values 55 | emotion = data.Emotion.values 56 | speaker = data.Speaker.values 57 | env = lmdb.open(out_path + mode,map_size = 409951162700) 58 | count = 0 59 | with env.begin(write = True) as txn: 60 | for i in range(len(emotion)): 61 | name1 = 'dia' + str(Dialogue_ID[i]) + '_utt' + str(Utterance_ID[i]) + '.npy' 62 | # namepath = '/148Dataset/data-chen.weidong/meld/feature/wavlm_large_L12_mat/'+ mode + '/' + name1 63 | namepath = r'./feature/MFCC/'+mode+'/'+name1 64 | if os.path.exists(namepath): 65 | # data1 = io.loadmat(namepath)['wavlm'] 66 | data1 = np.load(namepath) 67 | newdata1 = np.zeros((length,1024),dtype = np.float32) 68 | maskdata = np.zeros((length), dtype = np.float32) 69 | ones = np.ones((length), dtype = np.float32) 70 | lens = data1.shape[0] 71 | if lens >= length: 72 | lens = length 73 | newlabel = label_MELD_change(emotion[i]) 74 | sexlabel = gender_MELD_change(speaker[i]) 75 | # print(speaker[i]) 76 | # print(sexlabel) 77 | if sexlabel!= 2: 78 | newdata1[:lens, :] = data1[:lens, :] 79 | maskdata[lens:] = ones[lens:] 80 | key_data = 'data-%05d'%count 81 | key_label = 'label-%05d'%count 82 | key_mask = 'mask-%05d'%count 83 | txn.put(key_data.encode(),newdata1) 84 | txn.put(key_label.encode(),np.array([newlabel])) 85 | txn.put(key_mask.encode(),maskdata) 86 | # txn.put(key_mask.encode(),mask) 87 | # print(newlabel) 88 | count += 1 89 | env.close() 90 | print(count) 91 | out_path = r'./WavLM12/' 92 | mode = 'train' 93 | csvname = r'train_sent_emo.csv' 94 | reset = True 95 | preprocess_lmdb_MELD(out_path,mode,csvname,reset) 96 | mode = 'dev' 97 | csvname = r'dev_sent_emo.csv' 98 | preprocess_lmdb_MELD(out_path,mode,csvname,reset) 99 | mode = 'test' 100 | csvname = r'test_sent_emo.csv' 101 | preprocess_lmdb_MELD(out_path,mode,csvname,reset) 102 | -------------------------------------------------------------------------------- /Meld/model.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import random 4 | from utils.vanillatransformer import vanilla_transformer_block 5 | 6 | from utils.modules import PositionalEncoding 7 | from utils.DWFormerBlock import DWFormerBlock 8 | 9 | 10 | class classifier(nn.Module): 11 | def __init__(self, feadim, classnum): 12 | super(classifier, self).__init__() 13 | self.fc1 = nn.Linear(feadim, feadim // 2) 14 | self.fc2 = nn.Linear(feadim // 2, feadim // 4) 15 | self.fc3 = nn.Linear(feadim // 4, classnum) 16 | self.relu = nn.ReLU() 17 | self.dropout = nn.Dropout(0.5) 18 | self.avgpool = nn.AdaptiveAvgPool1d(1) 19 | def forward(self, x): 20 | x = x.permute([0,2,1]) 21 | x = self.avgpool(x).squeeze(-1) 22 | x = self.fc1(x) 23 | x = self.dropout(self.relu(x)) 24 | x = self.fc2(x) 25 | x = self.dropout(self.relu(x)) 26 | out = self.fc3(x) 27 | return out 28 | 29 | 30 | class DWFormer(nn.Module): 31 | def __init__(self, feadim, n_head, FFNdim, classnum): 32 | super(DWFormer, self).__init__() 33 | ''' 34 | feadim:input dimension of the feature 35 | n_head:numbers of the attention head 36 | FFNdim:dimension of FeedForward Network 37 | classnum: numbers of emotion 38 | ''' 39 | self.or1 = vanilla_transformer_block(feadim, n_head, FFNdim) 40 | self.dt1 = DWFormerBlock(feadim, n_head, FFNdim) 41 | self.dt2 = DWFormerBlock(feadim, n_head, FFNdim) 42 | self.dt3 = DWFormerBlock(feadim, n_head, FFNdim) 43 | self.dt4 = DWFormerBlock(feadim, n_head, FFNdim) 44 | self.classifier = classifier(feadim, classnum) 45 | self.PE = PositionalEncoding(feadim) 46 | self.ln1 = nn.LayerNorm(feadim,eps = 1e-5) 47 | self.ln2 = nn.LayerNorm(feadim,eps = 1e-5) 48 | self.ln3 = nn.LayerNorm(feadim,eps = 1e-5) 49 | self.ln4 = nn.LayerNorm(feadim,eps = 1e-5) 50 | self._reset_parameters() 51 | def _reset_parameters(self): 52 | for p in self.parameters(): 53 | if p.dim() > 1: 54 | nn.init.xavier_uniform_(p) 55 | def forward(self, x, x_mask): 56 | ''' 57 | x:input data, (b, t, c) 58 | x_mask: 59 | ''' 60 | batch, times, _ = x.shape 61 | haltingscore = torch.zeros((batch, times), device=x.device) 62 | 63 | # propose random attention scores instead of vanilla transformer 64 | # randomdata = self.getNumList(0,323,120) 65 | # haltingscore[:,randomdata] += 1e-10 66 | # Vanilla Transformer Block 67 | 68 | x = self.ln1(x) 69 | x1,_,attn = self.or1(x, haltingscore) 70 | 71 | # DWFormer Block 72 | 73 | x2,thresholds1,attn11 = self.dt1(x1, x_mask, attn) 74 | x3 = self.ln2(x2) 75 | x4,thresholds2,attn12 = self.dt2(x3, x_mask, attn11) 76 | # x5 = self.ln3(x4) 77 | # x6,thresholds3,attn13 = self.dt3(x5, x_mask, attn12) 78 | 79 | # Classifier 80 | 81 | out = self.classifier(x4) 82 | 83 | return out 84 | 85 | # attn4 = torch.cat([attn.unsqueeze(0).data,attn11.unsqueeze(0).data,attn12.unsqueeze(0).data,attn13.unsqueeze(0)],dim = 0)#ori结果 86 | # thresholds = torch.cat([thresholds1.unsqueeze(0).data,thresholds2.unsqueeze(0).data,thresholds3.unsqueeze(0)],dim = 0)#分窗 87 | # return out,attn4,thresholds 88 | 89 | def getNuthresholdsist(self,start, end, n): 90 | ''' 91 | generate random init 92 | ''' 93 | numsArray = set() 94 | while len(numsArray) < n: 95 | numsArray.add(random.randint(start, end)) 96 | 97 | return list(numsArray) 98 | -------------------------------------------------------------------------------- /Meld/readme.md: -------------------------------------------------------------------------------- 1 | Here saves the code of Meld. 2 | -------------------------------------------------------------------------------- /Meld/train.py: -------------------------------------------------------------------------------- 1 | import os 2 | os.environ["CUDA_VISIBLE_DEVICES"] = '3' 3 | import torch 4 | import torch.nn as nn 5 | import numpy as np 6 | import time 7 | import random 8 | from sklearn import metrics 9 | from torch.utils.data import Dataset 10 | from torch.utils.data import DataLoader 11 | from statistics import mode 12 | from model import DWFormer 13 | import lmdb 14 | torch.set_num_threads(1) 15 | gen_data = False 16 | voting = True 17 | 18 | #-----------------------------------------------限制随机------------------------------------------ 19 | 20 | torch.backends.cudnn.deterministic=True 21 | torch.backends.cudnn.benchmark= False 22 | device = torch.device("cuda" if torch.cuda.is_available() else "cpu") 23 | cuda = True if torch.cuda.is_available() else False 24 | 25 | #-------------------------------------------------生成DataLoader----------------------------------- 26 | 27 | class lmdb_dataset(Dataset): 28 | def __init__(self,out_path,mode): 29 | self.env = lmdb.open(out_path + mode) 30 | self.txn = self.env.begin(write = False) 31 | self.len = self.txn.stat()['entries'] 32 | def __getitem__(self,index): 33 | key_data = 'data-%05d' %index 34 | key_label = 'label-%05d' %index 35 | key_mask = 'mask-%05d' %index 36 | 37 | data = np.frombuffer(self.txn.get(key_data.encode()),dtype = np.float32) 38 | data = torch.FloatTensor(data.reshape(-1,1024).copy()) 39 | label = np.frombuffer(self.txn.get(key_label.encode()),dtype = np.int64) 40 | label = torch.LongTensor(label.copy()).squeeze() 41 | mask = np.frombuffer(self.txn.get(key_mask.encode()),dtype = np.float32) 42 | mask = torch.FloatTensor(mask.copy()) 43 | 44 | return data, label, mask 45 | def __len__(self): 46 | return int(self.len / 3) 47 | 48 | final_wa = [] 49 | final_ua = [] 50 | 51 | seed = 2 52 | torch.manual_seed(seed) 53 | torch.cuda.manual_seed(seed) 54 | np.random.seed(seed) 55 | random.seed(seed) 56 | 57 | 58 | out_path = r'./WavLM/' 59 | trainplace = r'train' 60 | validplace = r'dev' 61 | testplace = r'test' 62 | train_dataset = lmdb_dataset(out_path,trainplace) 63 | develop_dataset = lmdb_dataset(out_path,validplace) 64 | test_dataset = lmdb_dataset(out_path,testplace) 65 | trainDataset = DataLoader(dataset=train_dataset,batch_size=32,shuffle=True,drop_last = True) 66 | developDataset = DataLoader(dataset=develop_dataset,batch_size=32,shuffle= False) 67 | testDataset = DataLoader(dataset = test_dataset,batch_size = 32, shuffle = False) 68 | 69 | model = DWFormer(feadim = 1024, n_head = 8, FFNdim = 512, classnum = 7).to(device) 70 | 71 | modelname = 'dwformer' 72 | WD = 1e-3 73 | LR_DECAY = 0.5 74 | EPOCH = 100 75 | STEP_SIZE = 5 76 | lr = 5e-4 77 | 78 | optimizer = torch.optim.SGD(model.parameters(),lr = lr, momentum = 0.9) 79 | 80 | scheduler = torch.optim.lr_scheduler.CosineAnnealingWarmRestarts(optimizer,T_0 = 3,T_mult = 2, eta_min = 1e-4 * 0.1) 81 | loss = nn.CrossEntropyLoss().to(device) 82 | 83 | #--------------------------------------------------train-------------------------------------------- 84 | best_wa = 0 85 | best_ua = 0 86 | best_test_wa = 0 87 | best_test_ua = 0 88 | act_best_test_wa = 0 89 | act_best_test_ua = 0 90 | num = 0 91 | for epoch in range(EPOCH): 92 | model.train() 93 | loss_tr = 0.0 94 | start_time = time.time() 95 | pred_all,actu_all = [],[] 96 | for step, (datas, labels, mask) in enumerate(trainDataset, 0): 97 | mask = mask.to(device) 98 | datas = datas.to(device) 99 | labels = labels.view(len(labels)) 100 | labels = labels.to(device) 101 | optimizer.zero_grad() 102 | 103 | out = model(datas,mask) 104 | 105 | 106 | err1 = loss(out,labels.long()) 107 | err1.backward() 108 | optimizer.step() 109 | pred = torch.max(out.cpu().data, 1)[1].numpy() 110 | actu = labels.cpu().data.numpy() 111 | pred_all += list(pred) 112 | actu_all += list(actu) 113 | loss_tr += err1.cpu().item() 114 | loss_tr = loss_tr / len(trainDataset.dataset) 115 | pred_all, actu_all = np.array(pred_all), np.array(actu_all) 116 | wa_tr = metrics.accuracy_score(actu_all, pred_all) 117 | # ua_tr = metrics.recall_score(actu_all, pred_all,average='macro') 118 | ua_tr = metrics.f1_score(actu_all, pred_all, average = 'weighted') 119 | end_time = time.time() 120 | print('当前学习率',str(optimizer.param_groups[0]['lr'])) 121 | print('TRAIN:: Epoch: ', epoch, '| Loss: %.3f' % loss_tr, '| wa: %.3f' % wa_tr, '| ua: %.3f' % ua_tr) 122 | print('所耗时长:',str(end_time-start_time),'s') 123 | scheduler.step() 124 | 125 | # #---------------------------------------------------develop----------------------------------------- 126 | model.eval() 127 | loss_de = 0.0 128 | start_time = time.time() 129 | pred_all,actu_all = [],[] 130 | for step, (datas, labels, mask) in enumerate(developDataset, 0): 131 | mask = mask.to(device) 132 | datas = datas.to(device) 133 | labels = labels.view(len(labels)) 134 | labels = labels.to(device) 135 | #原有 136 | with torch.no_grad(): 137 | out = model(datas, mask) 138 | err1 = loss(out,labels.long()) 139 | pred = torch.max(out.cpu().data, 1)[1].numpy() 140 | actu = labels.cpu().data.numpy() 141 | pred_all += list(pred) 142 | actu_all += list(actu) 143 | loss_de += err1.cpu().item() 144 | loss_de = loss_de / len(developDataset.dataset) 145 | pred_all, actu_all = np.array(pred_all,dtype=int), np.array(actu_all,dtype=int) 146 | 147 | wa_de = metrics.accuracy_score(actu_all, pred_all) 148 | # ua_de = metrics.recall_score(actu_all, pred_all,average='macro') 149 | ua_de = metrics.f1_score(actu_all, pred_all, average = 'weighted') 150 | if ua_de > best_ua: 151 | torch.save(model.state_dict(), modelname + '/model-best_seed'+str(seed)+'.txt') 152 | best_ua = ua_de 153 | best_wa = wa_de 154 | num = epoch 155 | elif (ua_de == best_ua) and (wa_de > best_wa): 156 | torch.save(model.state_dict(), modelname + '/model-best_seed'+str(seed)+'.txt') 157 | best_ua = ua_de 158 | best_wa = wa_de 159 | num = epoch 160 | end_time = time.time() 161 | print('VALID:: Epoch: ', epoch, '| Loss: %.3f' % loss_de, '| wa: %.3f' % wa_de, '| ua: %.3f' % ua_de) 162 | print('所耗时长: ',str(end_time-start_time),'s') 163 | # # #------------------------------------test------------------------------------------------------------ 164 | print('验证集最好结果: | wa: %.3f' %best_wa, '|ua: %.3f' %best_ua) 165 | torch.cuda.empty_cache() 166 | model = DWFormer(feadim = 1024, n_head = 8, FFNdim = 512, classnum = 7) 167 | model.load_state_dict(torch.load(modelname + '/model-best_seed'+str(seed)+'.txt')) 168 | model = model.to(device) 169 | model.eval() 170 | loss_te = 0.0 171 | # start_time = time.time() 172 | pred_all,actu_all = [],[] 173 | for step, (datas,labels,mask) in enumerate(testDataset, 0): 174 | mask = mask.to(device) 175 | datas = datas.to(device) 176 | labels = labels.view(len(labels)) 177 | labels = labels.to(device) 178 | #原有 179 | with torch.no_grad(): 180 | out = model(datas,mask) 181 | 182 | err1 = loss(out,labels.long()) 183 | pred = torch.max(out.cpu().data, 1)[1].numpy() 184 | actu = labels.cpu().data.numpy() 185 | pred_all += list(pred) 186 | actu_all += list(actu) 187 | loss_te += err1.cpu().item() 188 | loss_te = loss_te / len(testDataset.dataset) 189 | pred_all, actu_all = np.array(pred_all,dtype=int), np.array(actu_all,dtype=int) 190 | epoch = 1 191 | wa_te = metrics.accuracy_score(actu_all, pred_all) 192 | # ua_te = metrics.recall_score(actu_all, pred_all,average='macro') 193 | ua_te = metrics.f1_score(actu_all, pred_all, average = 'weighted') 194 | print('TEST:: Epoch: ', epoch, '| Loss: %.3f' % loss_te, '| wa: %.3f' % wa_te, '| ua: %.3f' % ua_te) 195 | 196 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DWFormer: Dynamic Window Transformer for Speech Emotion Recognition 2 | 3 | This work is accepted by ICASSP 2023 Oral. 4 | 5 | The paper link is: 6 | 7 | https://ieeexplore.ieee.org/abstract/document/10094651 8 | 9 | https://arxiv.org/abs/2303.01694 10 | ## Data and Pretrained Model Preparation 11 | The feature we used is from WavLM-Large, which could be downloaded from https://github.com/microsoft/unilm/tree/master/wavlm. 12 | 13 | Download the IEMOCAP dataset from https://sail.usc.edu/iemocap/ 14 | 15 | Download the Meld dataset from https://affective-meld.github.io 16 | 17 | ## Feature extraction: 18 | Feature extraction file is in ./Feature_extractor/data_preprocess.py 19 | 20 | To process data in batches, we pad the data into the same length. The length of the data in IEMOCAP is 324, while the length in Meld 21 | is 226. 22 | 23 | > python data_preprocess.py 24 | ## Dataset: 25 | > python ./IEMOCAP/Dataset.py 26 | > 27 | > python ./Meld/Dataset.py 28 | ## Training & Evaluating: 29 | 30 | The files are IEMOCAP/train.py & MELD/train.py 31 | 32 | The name of the model file is model.py 33 | 34 | > python ./IEMOCAP/train.py 35 | > 36 | > python ./Meld/train.py 37 | 38 | ## Citation: 39 | 40 | S. Chen, X. Xing, W. Zhang, W. Chen and X. Xu, "DWFormer: Dynamic Window Transformer for Speech Emotion Recognition," ICASSP 2023 - 2023 IEEE International Conference on Acoustics, Speech and Signal Processing (ICASSP), Rhodes Island, Greece, 2023, pp. 1-5, doi: 10.1109/ICASSP49357.2023.10094651. 41 | --------------------------------------------------------------------------------