├── codes ├── utils │ ├── __init__.py │ ├── metrics.py │ ├── transformations.py │ ├── tf_helper.py │ ├── split_helper.py │ ├── nn_layers.py │ └── dataset_helper.py ├── network │ ├── __init__.py │ ├── cnn.py │ ├── crnn.py │ └── network.py ├── .gitignore ├── generateSplit.py ├── definitions.py ├── train.py └── summarizeScores.py ├── data ├── training2017 │ ├── .gitignore │ └── splits │ │ └── split_5_6_14 │ │ ├── properties.json │ │ ├── valid2.json │ │ ├── valid4.json │ │ ├── valid0.json │ │ ├── valid1.json │ │ ├── valid3.json │ │ ├── test3.json │ │ ├── test1.json │ │ ├── test0.json │ │ ├── test4.json │ │ ├── test2.json │ │ └── train2.json └── get_data.sh ├── requirements.txt ├── requirements_gpu.txt ├── jobs ├── yannickpc.json ├── narigpu.json └── your_machine.json ├── models ├── CNN_paper.json └── CRNN_paper.json └── README.md /codes/utils/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /codes/network/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /codes/.gitignore: -------------------------------------------------------------------------------- 1 | __pycache__/ 2 | */__pycache__/ 3 | -------------------------------------------------------------------------------- /data/training2017/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | 3 | !.gitignore 4 | !REFERENCE-v3.csv 5 | !splits/ 6 | !splits/*/ 7 | !splits/*/* -------------------------------------------------------------------------------- /data/get_data.sh: -------------------------------------------------------------------------------- 1 | wget https://physionet.org/challenge/2017/training2017.zip 2 | unzip training2017.zip 3 | rm training2017.zip 4 | cd training2017 5 | mkdir mat_files hea_files 6 | mv *.mat mat_files 7 | mv *.hea hea_files -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | tensorflow==1.0.0 2 | scipy==1.0.0 3 | cycler==0.10.0 4 | numpy==1.13.3 5 | matplotlib==2.1.0 6 | parse==1.8.2 7 | protobuf==3.5.0.post1 8 | pyparsing==2.2.0 9 | python-dateutil==2.6.1 10 | pytz==2017.3 11 | scikit-learn==0.19.1 12 | six==1.11.0 13 | sklearn==0.0 -------------------------------------------------------------------------------- /requirements_gpu.txt: -------------------------------------------------------------------------------- 1 | tensorflow-gpu==1.0.0 2 | scipy==1.0.0 3 | cycler==0.10.0 4 | numpy==1.13.3 5 | matplotlib==2.1.0 6 | parse==1.8.2 7 | protobuf==3.5.0.post1 8 | pyparsing==2.2.0 9 | python-dateutil==2.6.1 10 | pytz==2017.3 11 | scikit-learn==0.19.1 12 | six==1.11.0 13 | sklearn==0.0 -------------------------------------------------------------------------------- /data/training2017/splits/split_5_6_14/properties.json: -------------------------------------------------------------------------------- 1 | { 2 | "inputs": { 3 | "(train+valid)/valid": 6, 4 | "holdout": false, 5 | "number of folds": 5, 6 | "seed": 14 7 | }, 8 | "name": "split_5_6_14", 9 | "relative size": { 10 | "holdout": 0, 11 | "test": 3, 12 | "train": 10, 13 | "valid": 2 14 | }, 15 | "relative size fraction": { 16 | "holdout": 0.0, 17 | "test": 0.2, 18 | "train": 0.66667, 19 | "valid": 0.13333 20 | } 21 | } -------------------------------------------------------------------------------- /jobs/yannickpc.json: -------------------------------------------------------------------------------- 1 | { 2 | 3 | "CNN_paper": { 4 | "description": "CNN with the configuration from the paper", 5 | "model": "CNN_paper", 6 | "split": "split_5_6_14", 7 | "log_en": "True", 8 | "log_test_score": "True", 9 | "cvids": [4] 10 | }, 11 | 12 | "CRNN_paper": { 13 | "description": "CRNN with the configuration from the paper", 14 | "model": "CRNN_paper", 15 | "split": "split_5_6_14", 16 | "log_en": "True", 17 | "log_test_score": "True", 18 | "cvids": [4] 19 | } 20 | 21 | } -------------------------------------------------------------------------------- /jobs/narigpu.json: -------------------------------------------------------------------------------- 1 | { 2 | 3 | "CNN_paper": { 4 | "description": "CNN with the configuration from the paper", 5 | "model": "CNN_paper", 6 | "split": "split_5_6_14", 7 | "log_en": "True", 8 | "log_test_score": "True", 9 | "cvids": [0,1,2,3] 10 | }, 11 | 12 | "CRNN_paper": { 13 | "description": "CRNN with the configuration from the paper", 14 | "model": "CRNN_paper", 15 | "split": "split_5_6_14", 16 | "log_en": "True", 17 | "log_test_score": "True", 18 | "cvids": [0,1,2,3] 19 | } 20 | 21 | } -------------------------------------------------------------------------------- /jobs/your_machine.json: -------------------------------------------------------------------------------- 1 | { 2 | 3 | "CNN_paper": { 4 | "description": "CNN with the configuration from the paper", 5 | "model": "CNN_paper", 6 | "split": "split_5_6_14", 7 | "log_en": "True", 8 | "log_test_score": "True", 9 | "cvids": [0,1,2,3,4] 10 | }, 11 | 12 | "CRNN_paper": { 13 | "description": "CRNN with the configuration from the paper", 14 | "model": "CRNN_paper", 15 | "split": "split_5_6_14", 16 | "log_en": "True", 17 | "log_test_score": "True", 18 | "cvids": [0,1,2,3,4] 19 | } 20 | 21 | } -------------------------------------------------------------------------------- /codes/generateSplit.py: -------------------------------------------------------------------------------- 1 | '''*********************************************** 2 | * 3 | * project: physioNet 4 | * created: 23.11.2017 5 | * purpose: generate a certain split 6 | * 7 | ***********************************************''' 8 | 9 | '''*********************************************** 10 | * Imports 11 | ***********************************************''' 12 | 13 | import utils.dataset_helper as dsh 14 | 15 | '''*********************************************** 16 | * Script 17 | ***********************************************''' 18 | 19 | if __name__ == '__main__': 20 | dsh.gen_split(holdout=False, n_folds=5, tv_frac=6, seed=14) 21 | 22 | -------------------------------------------------------------------------------- /models/CNN_paper.json: -------------------------------------------------------------------------------- 1 | { 2 | "model_name": "CNN", 3 | 4 | "model_parameters": { 5 | "n_channels_first": 32, 6 | "growth_block_end": 32, 7 | "n_conv_blocks": 6, 8 | "kernel_size": [5,5], 9 | "dilation_rates": [1,1,1,1], 10 | "strides_block_end": [2,2], 11 | "max_pooling": false 12 | }, 13 | 14 | "preprocessing": { 15 | "spectrogram": true, 16 | "nperseg": 64, 17 | "noverlap": 32 18 | }, 19 | 20 | "loss_function_parameters": { 21 | "l2_penalty": 0, 22 | "class_penalty": 0.2 23 | }, 24 | 25 | "training_parameters": { 26 | "learning_rate": 0.001, 27 | "batch_size": 20, 28 | "drop_rate": 0.15, 29 | "exponential_decay": false, 30 | "dataset_compensation": 0.2, 31 | "validation_step": 5, 32 | "early_stop_wait": 100 33 | }, 34 | 35 | "data_augmentation": { 36 | "resampling": true, 37 | "resample_method": "random", 38 | "zero_filter": true, 39 | "reload_step": 5, 40 | "awgn": false 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /models/CRNN_paper.json: -------------------------------------------------------------------------------- 1 | { 2 | "model_name": "CRNN", 3 | 4 | "model_parameters": { 5 | "n_channels_first": 32, 6 | "growth_block_end": 32, 7 | "n_conv_blocks": 4, 8 | "kernel_size": [5,5], 9 | "dilation_rates": [1,1,1,1,1,1], 10 | "strides_block_end": [2,2], 11 | "max_pooling": false, 12 | "n_lstmneurons": 200, 13 | "n_lstmlayers": 3, 14 | "bidirectional": true 15 | }, 16 | 17 | "preprocessing": { 18 | "spectrogram": true, 19 | "nperseg": 64, 20 | "noverlap": 32 21 | }, 22 | 23 | "loss_function_parameters": { 24 | "l2_penalty": 0, 25 | "class_penalty": 0.2 26 | }, 27 | 28 | "training_parameters": { 29 | "learning_rate": 0.001, 30 | "batch_size": 20, 31 | "drop_rate": 0.15, 32 | "exponential_decay": false, 33 | "dataset_compensation": 0.2, 34 | "validation_step": 5, 35 | "early_stop_wait": 100 36 | }, 37 | 38 | "data_augmentation": { 39 | "resampling": true, 40 | "resample_method": "random", 41 | "zero_filter": true, 42 | "reload_step": 5, 43 | "awgn": false 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /codes/definitions.py: -------------------------------------------------------------------------------- 1 | '''*********************************************** 2 | * 3 | * project: physioNet 4 | * created: 01.03.2017 5 | * purpose: Global settings/definitions 6 | * - global variables 7 | * - physical environment setup 8 | * 9 | ***********************************************''' 10 | 11 | import os 12 | import socket 13 | 14 | 15 | '''*********************************************** 16 | * Physical Environment 17 | ***********************************************''' 18 | 19 | GPU = '/gpu:0' 20 | CPU = '/cpu:0' 21 | default_dev = GPU 22 | 23 | # For multi GPU training use (4 GPUs in this example) 24 | # GPU_devices = ['0','1','2','3'] 25 | GPU_devices = ['0'] 26 | 27 | '''*********************************************** 28 | * Directories & Files 29 | ***********************************************''' 30 | 31 | filedir = os.path.dirname(__file__) 32 | root = os.path.join(filedir, '..') 33 | 34 | data_dir = 'data' 35 | log_dir = 'log' 36 | tmp_dir = 'tmp' 37 | model_dir = 'models' 38 | job_dir = 'jobs' 39 | 40 | job_file_name = 'your_machine' 41 | 42 | job_file = os.path.join(root, job_dir, job_file_name + '.json') 43 | model_fmt = os.path.join(root, model_dir, '{}' + '.json') 44 | 45 | if not os.path.exists(os.path.join(root, tmp_dir)): 46 | os.makedirs(os.path.join(root, tmp_dir)) 47 | if not os.path.exists(os.path.join(root, log_dir)): 48 | os.makedirs(os.path.join(root, log_dir)) -------------------------------------------------------------------------------- /codes/utils/metrics.py: -------------------------------------------------------------------------------- 1 | '''*********************************************** 2 | * 3 | * project: physioNet 4 | * created: 22.03.2017 5 | * purpose: calculate metrics to compare nets 6 | * 7 | ***********************************************''' 8 | 9 | '''*********************************************** 10 | * Imports 11 | ***********************************************''' 12 | 13 | import numpy as np 14 | 15 | from definitions import * 16 | import utils.dataset_helper as dsh 17 | 18 | '''*********************************************** 19 | * Functions 20 | ***********************************************''' 21 | 22 | def maxAsIndexMatrix(a): 23 | out = (a == np.max(a, axis=1)[:,None]).astype(int) 24 | return out 25 | 26 | def get_prediction(prob_vec): 27 | prediction = np.argmax(prob_vec, axis=1) 28 | return prediction 29 | 30 | def compute_accuracy(prediction, actual): 31 | if len(prediction)==0: 32 | accuracy = 0 33 | else: 34 | accuracy = 100 * sum(actual == prediction) / len(prediction) 35 | return accuracy 36 | 37 | def compute_score(prediction, actual, class_tags, verbose=False): 38 | if len(prediction)==0: 39 | score = 0 40 | dictionary = {} 41 | else: 42 | f1 = [] 43 | dictionary = {} 44 | n_classes = len(class_tags) 45 | for c in range(n_classes): 46 | [f, dict] = calculate_F1(prediction, actual, c, verbose=verbose, class_tags=class_tags) 47 | if class_tags[c] != '~': 48 | f1.append(f) 49 | dictionary.update(dict) 50 | f1 = np.array(f1) 51 | f1 = f1[~np.isnan(f1)] 52 | score = 100*sum(f1)/len(f1) 53 | return [score, dictionary] 54 | 55 | def calculate_F1(index1, index2, val, verbose=False, class_tags=[]): 56 | b1 = np.where(index1 == val, 1, 0) 57 | b2 = np.where(index2 == val, 1, 0) 58 | nc = sum(b1*b2) 59 | n1 = sum(b1) 60 | n2 = sum(b2) 61 | f1 = 2*nc/(n1 + n2) 62 | dict = {class_tags[val]:{ 63 | 'score':100*f1, 64 | 'pred':n1.item(), 65 | 'actual':n2.item(), 66 | 'correct':nc.item()} 67 | } 68 | if verbose: 69 | print(class_tags[val], 70 | '\tscore = {0:.0f} %'.format(100*f1), 71 | '\tpred:', n1, 72 | '\tactual:', n2, 73 | '\tcorrect:', nc) 74 | return [f1, dict] 75 | 76 | '''*********************************************** 77 | * Script 78 | ***********************************************''' 79 | if __name__ == '__main__': 80 | pred = np.random.random([5,4]) 81 | index = maxAsIndexMatrix(pred) 82 | print(pred) 83 | print('\n') 84 | print(index) -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Convolutional Recurrent Neural Networks for Electrocardiogram Classification 2 | 3 | This is the implementation of the neural networks for electrocardiogram classification proposed in this [paper](https://arxiv.org/abs/1710.06122). 4 | 5 | ## Requirements 6 | 7 | The code was tested on TensorFlow 1.0.0/Python 3.5. 8 | To install the required python packages, run 9 | ``` 10 | pip install -r requirements.txt 11 | ``` 12 | For GPU support, use 13 | ``` 14 | pip install -r requirements_gpu.txt 15 | ``` 16 | Other versions of the packages might work, but were not tested. 17 | 18 | ## Get the data 19 | 20 | The dataset we used was provided for the [2017 PhysioNet/CinC Challenge](https://physionet.org/challenge/2017/). To download the data and to have the right folder structure, switch to the `data` folder and run the script 21 | ``` 22 | ./get_data.sh 23 | ``` 24 | 25 | ## The framework 26 | 27 | The framework is designed to experiment with different network architectures. Some code structures might seem complicated, but they help to have a clear organization of training jobs and a detailed log of all results. 28 | 29 | The generic architecture of the CNN and the CRNN are defined in `codes/network`. The hyperparameters of the model and the training procedure are defined in a json-file in the folder `models`. The setups from the paper are predefined, the parameter names should be self-explanatory. 30 | 31 | We always used the same split of the dataset, to ensure a fair comparison of different architectures. Other splits of the dataset can be generated with the python script `generateSplit.py`. The first two parameters determine the size of the test, validation, and training set. The last parameter is the seed used to generate the random split. 32 | 33 | Before starting training, a job has to be defined in the folder `jobs`. 34 | 35 | ## Start training 36 | 37 | Before starting, you have to adapt the file `codes/definitions.py` to your environment. The important parameters to set are `default_dev` and `GPU_devices` (see the file for more detail). 38 | 39 | The file `jobs/your_machine.json` provided allows to reproduce the 5-fold CV experiments from the paper. All the folds `0,1,2,3,4` have to be activated in `cvids`. The proportions of the train/validation/test split is also chosen here (`split`). There is an option to turn off the tensorboard-log (`log_en`) since it uses a lot of disk space (a simple `.csv`-file with the learning curve is always available). 40 | 41 | To start this training jobs, change to the `codes` folder and run 42 | ``` 43 | python train.py 44 | ``` 45 | This stores all the jobs (one per model per CV-fold) in a queue, and they get processed by the available GPUs. 46 | 47 | If you change the code and want to debug it using the model `models/model_name`, you can simply run 48 | ``` 49 | python train.py model_name 50 | ``` 51 | This starts a single training-job with some fixed parameters. 52 | 53 | ## Visualize the results 54 | 55 | After the first job is completed, the results can be found in the `log` folder. The folders are organized in the format `architecture/jobname/fold`. In each such folder the following files can be found: 56 | * The tensorboard log, if it was activated for this job. 57 | * The trained model that achieved the best validation score. 58 | * The hotlog.csv is the simple csv-log. 59 | * A copy of the model and the job that led to those results. 60 | 61 | If you want to compare, how well different CNN configurations work, you can run from the code folder 62 | ``` 63 | python summarizeScores.py CNN 64 | ``` 65 | The same works for the CRNN. 66 | 67 | ## Citation 68 | 69 | If you find this code useful for your research, please cite 70 | ``` 71 | @incollection{zihlmann2017convolutional, 72 | Author = {Zihlmann, Martin and Perekrestenko, Dmytro and Tschannen, Michael}, 73 | Booktitle = {Computing in Cardiology (CinC)}, 74 | Title = {Convolutional Recurrent Neural Networks for Electrocardiogram Classification}, 75 | Year = {2017}} 76 | ``` 77 | and acknowledge this repository. 78 | -------------------------------------------------------------------------------- /codes/network/cnn.py: -------------------------------------------------------------------------------- 1 | '''*********************************************** 2 | * 3 | * project: physioNet 4 | * created: 18.04.2017 5 | * purpose: convolutional neural network (CNN) class 6 | * 7 | ***********************************************''' 8 | 9 | '''*********************************************** 10 | * Imports 11 | ***********************************************''' 12 | 13 | import tensorflow as tf 14 | 15 | from definitions import * 16 | from network.network import Network 17 | import utils.nn_layers as nn 18 | import utils.tf_helper as tfh 19 | 20 | '''*********************************************** 21 | * Classes 22 | ***********************************************''' 23 | 24 | class CNN(Network): 25 | 26 | def __init__(self): 27 | Network.__init__(self) 28 | self.n_conv_blocks = None 29 | self.kernel_size = None 30 | self.n_channels_first = None 31 | self.dilation_rates = None 32 | self.growth_block_end = None 33 | self.strides_block_end = None 34 | self.max_pooling = None 35 | 36 | 37 | def create_model(self, data): 38 | 39 | length = tfh.get_length(data) 40 | 41 | with tf.name_scope('preshape'): 42 | conv_data = tf.expand_dims(data, 3) 43 | print(tfh.get_static_shape(conv_data)) 44 | 45 | with tf.name_scope('convolutions'): 46 | for i in range(self.n_conv_blocks): 47 | with tf.name_scope('conv_block' + str(i)): 48 | n_channels = None 49 | if i == 0: 50 | n_channels = self.n_channels_first 51 | [conv_data, length] = nn.conv2d_block( 52 | inputs=conv_data, 53 | length=length, 54 | kernel_size=self.kernel_size, 55 | n_channels=n_channels, 56 | dilation_rates=self.dilation_rates, 57 | growth=self.growth_block_end, 58 | strides_end=self.strides_block_end, 59 | max_pooling=self.max_pooling, 60 | is_training=self.is_training, 61 | drop_rate=self.drop_rate) 62 | print(tfh.get_static_shape(conv_data)) 63 | 64 | with tf.name_scope('postshape'): 65 | [_, t_s, f_s, c_s] = tfh.get_static_shape(conv_data) 66 | feature_seq = tf.reshape(conv_data, [-1, t_s, f_s * c_s]) 67 | print(tfh.get_static_shape(feature_seq)) 68 | 69 | with tf.name_scope('average_features'): 70 | features = nn.average_features(feature_seq, length) 71 | 72 | with tf.name_scope('linear_layer'): 73 | pred = tf.layers.dense(inputs=features, units=self.n_classes) 74 | 75 | return pred 76 | 77 | 78 | def get_modelParameters(self): 79 | dict = { 80 | 'n_conv_blocks' : self.n_conv_blocks, 81 | 'kernel_size' : self.kernel_size, 82 | 'n_channels_first' : self.n_channels_first, 83 | 'dilation_rates' : self.dilation_rates, 84 | 'growth_block_end' : self.growth_block_end, 85 | 'strides_block_end' : self.strides_block_end, 86 | 'max_pooling' : self.max_pooling 87 | } 88 | return dict 89 | 90 | 91 | def set_modelParameters(self, dict): 92 | self.n_conv_blocks = dict['n_conv_blocks'] 93 | self.kernel_size = dict['kernel_size'] 94 | self.n_channels_first = dict['n_channels_first'] 95 | self.dilation_rates = dict['dilation_rates'] 96 | self.growth_block_end = dict['growth_block_end'] 97 | self.strides_block_end = dict['strides_block_end'] 98 | self.max_pooling = dict['max_pooling'] 99 | 100 | -------------------------------------------------------------------------------- /codes/utils/transformations.py: -------------------------------------------------------------------------------- 1 | '''*********************************************** 2 | * 3 | * project: physioNet 4 | * created: 22.03.2017 5 | * purpose: data transformations 6 | * 7 | ***********************************************''' 8 | 9 | '''*********************************************** 10 | * Imports 11 | ***********************************************''' 12 | 13 | import numpy as np 14 | import scipy as sc 15 | from scipy import signal 16 | import math 17 | 18 | from definitions import * 19 | 20 | '''*********************************************** 21 | * External functions 22 | ***********************************************''' 23 | 24 | def spectrogram(data, nperseg=32, noverlap=16): 25 | log_spectrogram = True 26 | fs = 300 27 | _, _, Sxx = signal.spectrogram(data, fs=fs, nperseg=nperseg, noverlap=noverlap) 28 | Sxx = np.transpose(Sxx,[0,2,1]) 29 | if log_spectrogram: 30 | Sxx = abs(Sxx) 31 | mask = Sxx > 0 32 | Sxx[mask] = np.log(Sxx[mask]) 33 | return Sxx 34 | 35 | def upscale(signals, upscale_factor=1): 36 | signals = np.repeat(signals, upscale_factor, axis=0) 37 | return signals 38 | 39 | def random_resample(signals, upscale_factor=1): 40 | [n_signals,length] = signals.shape 41 | # pulse variation from 60 bpm to 120 bpm, expected 80 bpm 42 | new_length = np.random.randint( 43 | low=int(length*80/120), 44 | high=int(length*80/60), 45 | size=[n_signals, upscale_factor] 46 | ) 47 | signals = [np.array(s) for s in signals.tolist()] 48 | new_length = [np.array(nl) for nl in new_length.tolist()] 49 | sigs = [stretch_squeeze(s,l) for s,nl in zip(signals,new_length) for l in nl] 50 | sigs = [fit_tolength(s, length) for s in sigs] 51 | sigs = np.array(sigs) 52 | return sigs 53 | 54 | def random_resample_with_mean(signals, meanHRs): 55 | [n_signals,length] = signals.shape 56 | new_lengths = [np.random.randint(low=int(length*hr/120), high=int(length*hr/60)) for hr in meanHRs] 57 | signals = [np.array(s) for s in signals.tolist()] 58 | new_lengths = [np.array(nl) for nl in new_lengths] 59 | sigs = [stretch_squeeze(s,nl) for s,nl in zip(signals,new_lengths)] 60 | sigs = [fit_tolength(s, length) for s in sigs] 61 | sigs = np.array(sigs) 62 | return sigs 63 | 64 | def resample_with_mean(signals, meanHRs): 65 | [n_signals,length] = signals.shape 66 | new_lengths = [int(length*hr/80) for hr in meanHRs] 67 | signals = [np.array(s) for s in signals.tolist()] 68 | new_lengths = [np.array(nl) for nl in new_lengths] 69 | sigs = [stretch_squeeze(s,nl) for s,nl in zip(signals,new_lengths)] 70 | sigs = [fit_tolength(s, length) for s in sigs] 71 | sigs = np.array(sigs) 72 | return sigs 73 | 74 | def zero_filter(input, threshold=2, depth=8): 75 | shape = input.shape 76 | # compensate for lost length due to mask processing 77 | noise_shape = [shape[0], shape[1] + depth] 78 | noise = np.random.normal(0,1,noise_shape) 79 | mask = np.greater(noise, threshold) 80 | # grow a neighbourhood of True values with at least length depth+1 81 | for d in range(depth): 82 | mask = np.logical_or(mask[:, :-1], mask[:, 1:]) 83 | output = np.where(mask, np.zeros(shape), input) 84 | return output 85 | 86 | '''*********************************************** 87 | * Internal functions 88 | ***********************************************''' 89 | 90 | def stretch_squeeze(source, length): 91 | target = np.zeros([1, length]) 92 | interpol_obj = sc.interpolate.interp1d(np.arange(source.size), source) 93 | grid = np.linspace(0, source.size - 1, target.size) 94 | result = interpol_obj(grid) 95 | return result 96 | 97 | def fit_tolength(source, length): 98 | target = np.zeros([length]) 99 | w_l = min(source.size, target.size) 100 | target[0:w_l] = source[0:w_l] 101 | return target 102 | 103 | '''*********************************************** 104 | * Script 105 | ***********************************************''' 106 | 107 | if __name__ == '__main__': 108 | pass 109 | 110 | 111 | -------------------------------------------------------------------------------- /codes/network/crnn.py: -------------------------------------------------------------------------------- 1 | '''*********************************************** 2 | * 3 | * project: physioNet 4 | * created: 18.04.2017 5 | * purpose: convolutional neural network (CNN) class 6 | * 7 | ***********************************************''' 8 | 9 | '''*********************************************** 10 | * Imports 11 | ***********************************************''' 12 | 13 | import tensorflow as tf 14 | 15 | from definitions import * 16 | from network.network import Network 17 | import utils.nn_layers as nn 18 | import utils.tf_helper as tfh 19 | 20 | '''*********************************************** 21 | * Classes 22 | ***********************************************''' 23 | 24 | class CRNN(Network): 25 | 26 | def __init__(self): 27 | Network.__init__(self) 28 | self.n_conv_blocks = None 29 | self.kernel_size = None 30 | self.n_channels_first = None 31 | self.dilation_rates = None 32 | self.growth_block_end = None 33 | self.strides_block_end = None 34 | self.max_pooling = None 35 | self.n_lstmneurons = None 36 | self.n_lstmlayers = None 37 | self.bidirectional = None 38 | 39 | 40 | def create_model(self, data): 41 | 42 | length = tfh.get_length(data) 43 | 44 | with tf.name_scope('preshape'): 45 | conv_data = tf.expand_dims(data, 3) 46 | print(tfh.get_static_shape(conv_data)) 47 | 48 | with tf.name_scope('convolutions'): 49 | for i in range(self.n_conv_blocks): 50 | with tf.name_scope('conv2d_layer' + str(i)): 51 | n_channels = None 52 | if i == 0: 53 | n_channels = self.n_channels_first 54 | [conv_data, length] = nn.conv2d_block( 55 | inputs=conv_data, 56 | length=length, 57 | kernel_size=self.kernel_size, 58 | n_channels=n_channels, 59 | dilation_rates=self.dilation_rates, 60 | growth=self.growth_block_end, 61 | strides_end=self.strides_block_end, 62 | max_pooling=self.max_pooling, 63 | is_training=self.is_training, 64 | drop_rate=self.drop_rate) 65 | print(tfh.get_static_shape(conv_data)) 66 | 67 | with tf.name_scope('postshape'): 68 | [_, t_s, f_s, c_s] = tfh.get_static_shape(conv_data) 69 | feature_seq = tf.reshape(conv_data, [-1, t_s, f_s * c_s]) 70 | print(tfh.get_static_shape(feature_seq)) 71 | 72 | with tf.name_scope('gradient_diode'): 73 | feature_seq = tf.cond(tf.equal(self.training_phase, 1), # when in phase 1 do only train RNN 74 | lambda: tf.stop_gradient(feature_seq), 75 | lambda: tf.identity(feature_seq)) 76 | 77 | with tf.name_scope('model_selector'): 78 | features = tf.cond(tf.equal(self.training_phase, 0), # when in phase 0 do not use RNN 79 | lambda: nn.mean_branch(feature_seq, length, self.n_lstmneurons), 80 | lambda: nn.lstm_layer(feature_seq, length, self.n_lstmneurons, self.n_lstmlayers, bidirectional=self.bidirectional, drop_rate=self.drop_rate)) 81 | print(tfh.get_static_shape(features)) 82 | 83 | with tf.name_scope('linear_layer'): 84 | pred = tf.layers.dense(inputs=features, units=self.n_classes) 85 | 86 | return pred 87 | 88 | 89 | def get_modelParameters(self): 90 | dict = { 91 | 'n_conv_blocks' : self.n_conv_blocks, 92 | 'kernel_size' : self.kernel_size, 93 | 'n_channels_first' : self.n_channels_first, 94 | 'dilation_rates' : self.dilation_rates, 95 | 'growth_block_end' : self.growth_block_end, 96 | 'strides_block_end' : self.strides_block_end, 97 | 'max_pooling' : self.max_pooling, 98 | 'n_lstmneurons' : self.n_lstmneurons, 99 | 'n_lstmlayers' : self.n_lstmlayers, 100 | 'bidirectional' : self.bidirectional 101 | } 102 | return dict 103 | 104 | def set_modelParameters(self, dict): 105 | self.n_conv_blocks = dict['n_conv_blocks'] 106 | self.kernel_size = dict['kernel_size'] 107 | self.n_channels_first = dict['n_channels_first'] 108 | self.dilation_rates = dict['dilation_rates'] 109 | self.growth_block_end = dict['growth_block_end'] 110 | self.strides_block_end = dict['strides_block_end'] 111 | self.max_pooling = dict['max_pooling'] 112 | self.n_lstmneurons = dict['n_lstmneurons'] 113 | self.n_lstmlayers = dict['n_lstmlayers'] 114 | self.bidirectional = dict['bidirectional'] 115 | -------------------------------------------------------------------------------- /codes/utils/tf_helper.py: -------------------------------------------------------------------------------- 1 | '''*********************************************** 2 | * 3 | * project: physioNet 4 | * created: 10.03.2017 5 | * purpose: abstract tf peculiarities away 6 | * 7 | ***********************************************''' 8 | 9 | 10 | '''*********************************************** 11 | * Imports 12 | ***********************************************''' 13 | 14 | from definitions import * 15 | import os 16 | import tensorflow as tf 17 | import datetime as dt 18 | from tensorflow.python.framework import dtypes 19 | from tensorflow.python.platform import gfile 20 | 21 | '''*********************************************** 22 | * Variables 23 | ***********************************************''' 24 | 25 | gpu_prec = tf.float32 26 | 27 | '''*********************************************** 28 | * Yannick functions 29 | # ***********************************************''' 30 | 31 | def create_constant(value, type): 32 | const = tf.constant(value, dtype=type) 33 | cast = tf.cast(const, gpu_prec) 34 | return cast 35 | 36 | def create_variable(name, shape): 37 | var = tf.Variable(tf.random_normal(shape), name=name, trainable=True) 38 | tf.add_to_collection('vars', var) 39 | return var 40 | 41 | def create_input(dtype, shape, name): 42 | init_name = '_'.join([name,'init']) 43 | init = tf.placeholder(dtype, shape, name=init_name) 44 | cast = tf.cast(init, gpu_prec) 45 | var = tf.Variable(cast, name=name, trainable=False, collections=[], validate_shape=False) 46 | var.set_shape(shape) 47 | tf.add_to_collection('inputs', var) 48 | tf.add_to_collection('inputs', init) 49 | return [init, var] 50 | 51 | def get_dynamic_shape(tensor): 52 | return tf.shape(tensor) 53 | 54 | def get_static_shape(tensor): 55 | return tensor.get_shape().as_list() 56 | 57 | def get_dynamiclast(output, seq_l): 58 | rng = tf.range(0, tf.shape(seq_l)[0]) 59 | indexes = tf.stack([rng, seq_l - 1], 1) 60 | relevant = tf.gather_nd(output, indexes) 61 | return relevant 62 | 63 | def get_staticlast(output, seq_max_l): 64 | output = tf.transpose(output, [1, 0, 2]) 65 | last = output[seq_max_l - 1] 66 | return last 67 | 68 | def set_dynamiczero(input, length): 69 | shape = get_static_shape(input) 70 | shape[0] = get_dynamic_shape(input)[0] 71 | dim = len(shape) 72 | # generate 2d Matrix of same shape with col_n in each entry 73 | r = tf.range(0, shape[1], 1) 74 | r = tf.expand_dims(r, 0) 75 | if dim==2: 76 | r = tf.tile(r, [shape[0],1]) 77 | else: 78 | r = tf.expand_dims(r, 2) 79 | r = tf.tile(r, [shape[0], 1, shape[2]]) 80 | # generate 2d Matrix with length of each dataset in batch in row 81 | l = tf.expand_dims(length, 1) 82 | if dim==2: 83 | l = tf.tile(l, [1, shape[1]]) 84 | else: 85 | l = tf.expand_dims(l, 2) 86 | l = tf.tile(l, [1,shape[1], shape[2]]) 87 | # when col_n smaller than length mask entry is true 88 | mask = tf.less(r, l) 89 | # when col_n larger than length, set input to zero 90 | output = tf.where(mask, input, tf.zeros(shape, dtype=gpu_prec)) 91 | return output 92 | 93 | def get_length(sequence): 94 | shape = get_static_shape(sequence) 95 | dim = len(shape) 96 | if dim==2: 97 | used = tf.sign(tf.abs(sequence)) 98 | elif dim==3: 99 | used = tf.sign(tf.reduce_max(tf.abs(sequence), reduction_indices=2)) 100 | else: 101 | raise ValueError('only 2D or 3D sequences supported') 102 | used = tf.cast(used, tf.int32) 103 | rng = tf.range(0, shape[1]) 104 | ranged = rng * used 105 | length = tf.reduce_max(ranged, axis=1) + 1 106 | return length 107 | 108 | def where_greater(input, threshold, replacement): 109 | shape = get_static_shape(input) 110 | shape[0] = get_dynamic_shape(input)[0] 111 | condition = tf.greater(input, threshold) 112 | rep = tf.ones(shape, dtype=tf.int32) * replacement 113 | output = tf.where(condition, input, rep) 114 | return output 115 | 116 | def get_prediction(net_output): 117 | prediction = tf.argmax(net_output, axis=1) 118 | return prediction 119 | 120 | def truncate_static(data, size): 121 | trunc = data[:,0:size] 122 | return trunc 123 | 124 | def truncate_dynamic(data): 125 | length = get_length(data) 126 | print(length) 127 | size = tf.reduce_max(length) 128 | trunc = data[:,0:size] 129 | return trunc 130 | 131 | def compute_score(prediction, label): 132 | pred = get_prediction(prediction) 133 | lab = get_prediction(label) 134 | f1 = [calculate_F1(pred, lab, i) for i in range(4)] 135 | not_nan = tf.less(f1, 1) 136 | f1 = tf.where(not_nan, f1, [0,0,0,0]) 137 | f = tf.reduce_sum(f1)/tf.reduce_sum(tf.cast(not_nan, gpu_prec)) 138 | return f 139 | 140 | def calculate_F1(indexA, indexB, val): 141 | b1 = tf.equal(indexA, val) 142 | b2 = tf.equal(indexB, val) 143 | both_true = tf.logical_and(b1, b2) 144 | n1 = tf.reduce_sum(tf.cast(b1, gpu_prec)) 145 | n2 = tf.reduce_sum(tf.cast(b2, gpu_prec)) 146 | ncorrect = tf.reduce_sum(tf.cast(both_true, gpu_prec)) 147 | f1 = 2*ncorrect/(n1 + n2) 148 | return f1 149 | 150 | 151 | '''*********************************************** 152 | * Script 153 | ***********************************************''' 154 | -------------------------------------------------------------------------------- /codes/utils/split_helper.py: -------------------------------------------------------------------------------- 1 | '''*********************************************** 2 | * 3 | * project: physioNet 4 | * created: 23.11.2017 5 | * purpose: helper functions to split dataset 6 | * 7 | ***********************************************''' 8 | 9 | '''*********************************************** 10 | * Imports 11 | ***********************************************''' 12 | 13 | import numpy as np 14 | import math 15 | import functools 16 | 17 | from definitions import * 18 | 19 | '''*********************************************** 20 | * External Functions 21 | ***********************************************''' 22 | 23 | def get_properties(holdout, n_folds, tv_frac, seed): 24 | 25 | name = "split_%d_%d_%d" % (n_folds,tv_frac,seed) 26 | if holdout: 27 | name = name + "_holdout" 28 | 29 | size_relative = get_relative_size(holdout, n_folds, tv_frac) 30 | size_sum = np.sum(size_relative) 31 | size_fraction = [round(sr/size_sum,5) for sr in size_relative] 32 | 33 | properties = { 34 | 'name': name, 35 | 'inputs': { 36 | 'holdout': holdout, 37 | 'number of folds': n_folds, 38 | '(train+valid)/valid': tv_frac, 39 | 'seed': seed 40 | }, 41 | 'relative size': { 42 | 'train': size_relative[0], 43 | 'valid': size_relative[1], 44 | 'test': size_relative[2], 45 | 'holdout': size_relative[3] 46 | }, 47 | 'relative size fraction': { 48 | 'train': size_fraction[0], 49 | 'valid': size_fraction[1], 50 | 'test': size_fraction[2], 51 | 'holdout': size_fraction[3] 52 | } 53 | } 54 | 55 | return properties 56 | 57 | 58 | def ins_id_into_fname(fname, id): 59 | fname = fname[:-5] + str(id) + fname[-5:] 60 | return fname 61 | 62 | 63 | def stratified_split(id_list, labels, rel_size, shuffle=False, seed=None): 64 | id_list = np.array(id_list) 65 | # split id_list into a set for each label 66 | class_sets = split_bylabels(id_list, labels) 67 | n_classes = len(class_sets) 68 | # for each label, split corresponding data into rel_size splits 69 | split_class_sets = [split_set(class_set, rel_size=rel_size, shuffle=shuffle, seed=seed) for class_set in class_sets] 70 | # concatenate the splits for the different labels again into a set 71 | # but keeping the rel_size splits apart 72 | # ClassA ClassB ClassC 73 | # Set0 1/3 + 1/3 + 1/3 74 | # Set1 1/2 + 1/2 + 1/2 75 | # Set2 1/6 + 1/6 + 1/6 76 | split_sets = [np.sort(np.hstack([split_class_sets[i][j] for i in range(n_classes)])) for j in range(len(rel_size))] 77 | return split_sets 78 | 79 | 80 | def adjoint_set(parent_set, sibling_set): 81 | pset = np.array(parent_set) 82 | sset = np.array(sibling_set) 83 | set = np.setdiff1d(pset, sset) 84 | return set 85 | 86 | '''*********************************************** 87 | * Internal Functions 88 | ***********************************************''' 89 | 90 | def get_relative_size(holdout, n_folds, tv_frac): 91 | if holdout: 92 | denom = (n_folds+1)*n_folds*tv_frac 93 | [holdout_n, rest] = minisplit(denom,n_folds+1) 94 | [test_n, rest] = minisplit(rest,n_folds) 95 | [valid_n, train_n] = minisplit(rest,tv_frac) 96 | else: 97 | denom = n_folds*tv_frac 98 | holdout_n = 0 99 | [test_n, rest] = minisplit(denom,n_folds) 100 | [valid_n, train_n] = minisplit(rest,tv_frac) 101 | size_relative = div_by_gcd([train_n, valid_n, test_n, holdout_n]) 102 | return size_relative 103 | 104 | def minisplit(total,splitfactor): 105 | smallpart = int(total/splitfactor) 106 | bigpart = total-smallpart 107 | return [smallpart, bigpart] 108 | 109 | def div_by_gcd(values): 110 | gcd = functools.reduce(math.gcd, values) 111 | values = [int(v/gcd) for v in values] 112 | return values 113 | 114 | def split_bylabels(id_list, labels): 115 | n_classes = labels.shape[1] 116 | # generate a mask for each feature, selecting only those entries which are labelled accordingly 117 | masks = [labels[:, i].astype(bool) for i in range(n_classes)] 118 | # split dataset into subsets, one for each feature in labels 119 | class_sets = [id_list[mask] for mask in masks] 120 | return class_sets 121 | 122 | def split_set(inset, rel_size, shuffle=False, seed=None): 123 | 124 | rel_size = div_by_gcd(rel_size) 125 | rel_size_sum = sum(rel_size) 126 | inset = np.array(inset).astype(int) 127 | 128 | if shuffle: 129 | np.random.seed(seed) 130 | np.random.shuffle(inset) 131 | np.random.seed(seed=None) 132 | 133 | small_sets = np.array_split(inset, rel_size_sum) 134 | out_sets = [] 135 | [out_sets.append([]) for idx,_ in enumerate(rel_size)] 136 | 137 | # guarantee at least one small_set per outset 138 | index = 0 139 | for idx,size in enumerate(rel_size): 140 | if size>0: 141 | out_sets[idx].append(small_sets[index]) 142 | index+=1 143 | else: 144 | out_sets[idx].append([]) 145 | rel_size = [r-1 for r in rel_size] 146 | for idx,size in enumerate(rel_size): 147 | if size>0: 148 | [out_sets[idx].append(small_sets[index+i]) for i in range(size)] 149 | index+=size 150 | else: 151 | out_sets[idx].append([]) 152 | 153 | out_sets = [np.concatenate(s) for s in out_sets] 154 | out_sets = [np.sort(s).astype(int) for s in out_sets] 155 | 156 | return out_sets -------------------------------------------------------------------------------- /data/training2017/splits/split_5_6_14/valid2.json: -------------------------------------------------------------------------------- 1 | [5, 14, 21, 26, 37, 49, 62, 63, 68, 71, 75, 81, 86, 89, 97, 99, 112, 113, 130, 132, 138, 142, 161, 166, 172, 176, 177, 182, 185, 205, 210, 211, 223, 224, 226, 237, 238, 254, 256, 261, 266, 267, 277, 282, 298, 332, 334, 343, 349, 353, 357, 365, 367, 369, 383, 407, 408, 410, 419, 423, 447, 455, 471, 476, 477, 478, 480, 493, 496, 499, 508, 517, 536, 543, 544, 545, 548, 549, 550, 554, 555, 557, 558, 570, 578, 585, 588, 589, 607, 618, 628, 631, 643, 656, 666, 672, 674, 677, 681, 687, 690, 695, 713, 725, 726, 757, 762, 766, 785, 798, 800, 803, 805, 833, 834, 835, 866, 873, 874, 884, 897, 898, 902, 906, 914, 915, 921, 932, 939, 943, 949, 950, 952, 962, 984, 998, 1001, 1008, 1011, 1014, 1037, 1039, 1045, 1050, 1060, 1062, 1082, 1104, 1109, 1118, 1121, 1135, 1142, 1145, 1151, 1166, 1172, 1180, 1184, 1220, 1251, 1253, 1258, 1265, 1276, 1292, 1307, 1310, 1329, 1330, 1344, 1352, 1361, 1385, 1392, 1396, 1401, 1413, 1422, 1425, 1449, 1464, 1468, 1473, 1481, 1482, 1483, 1487, 1492, 1502, 1508, 1523, 1525, 1544, 1554, 1557, 1559, 1570, 1580, 1594, 1599, 1616, 1631, 1632, 1637, 1649, 1656, 1667, 1668, 1683, 1701, 1711, 1718, 1723, 1727, 1730, 1741, 1762, 1763, 1775, 1784, 1807, 1808, 1809, 1815, 1817, 1827, 1831, 1845, 1853, 1857, 1859, 1860, 1883, 1892, 1900, 1912, 1913, 1918, 1926, 1938, 1941, 1950, 1959, 1967, 1973, 1977, 1983, 1988, 1998, 2002, 2013, 2016, 2023, 2026, 2032, 2034, 2038, 2043, 2046, 2079, 2098, 2099, 2107, 2110, 2116, 2124, 2126, 2130, 2132, 2134, 2136, 2140, 2154, 2161, 2168, 2182, 2186, 2189, 2193, 2198, 2203, 2213, 2233, 2236, 2253, 2254, 2282, 2284, 2289, 2297, 2299, 2320, 2338, 2346, 2351, 2372, 2379, 2392, 2406, 2409, 2410, 2416, 2417, 2423, 2424, 2426, 2427, 2445, 2449, 2455, 2459, 2463, 2471, 2474, 2488, 2497, 2500, 2502, 2503, 2512, 2517, 2524, 2526, 2533, 2539, 2547, 2548, 2558, 2565, 2586, 2603, 2607, 2614, 2621, 2628, 2639, 2640, 2659, 2666, 2668, 2682, 2686, 2687, 2712, 2719, 2726, 2729, 2731, 2736, 2741, 2748, 2755, 2778, 2784, 2823, 2833, 2838, 2840, 2852, 2855, 2865, 2871, 2873, 2877, 2880, 2885, 2898, 2903, 2915, 2925, 2932, 2937, 2942, 2943, 2969, 2987, 2989, 3006, 3007, 3012, 3015, 3020, 3024, 3034, 3035, 3036, 3037, 3043, 3044, 3045, 3046, 3060, 3062, 3069, 3077, 3083, 3084, 3129, 3143, 3166, 3170, 3175, 3191, 3192, 3196, 3198, 3204, 3207, 3214, 3223, 3230, 3232, 3240, 3249, 3252, 3259, 3261, 3271, 3273, 3284, 3289, 3290, 3303, 3307, 3308, 3338, 3349, 3351, 3358, 3359, 3360, 3364, 3374, 3376, 3388, 3396, 3404, 3408, 3428, 3439, 3440, 3446, 3447, 3449, 3452, 3456, 3465, 3468, 3475, 3493, 3494, 3501, 3507, 3513, 3514, 3516, 3528, 3536, 3541, 3543, 3547, 3553, 3564, 3569, 3570, 3578, 3583, 3585, 3591, 3601, 3610, 3616, 3619, 3624, 3633, 3636, 3648, 3654, 3667, 3673, 3680, 3682, 3683, 3690, 3693, 3696, 3714, 3715, 3723, 3729, 3755, 3756, 3758, 3767, 3771, 3778, 3781, 3794, 3809, 3820, 3832, 3841, 3844, 3849, 3853, 3861, 3876, 3877, 3881, 3883, 3885, 3904, 3906, 3908, 3912, 3916, 3920, 3924, 3928, 3935, 3968, 3984, 3992, 3993, 4004, 4007, 4008, 4015, 4019, 4023, 4038, 4046, 4047, 4052, 4058, 4059, 4063, 4064, 4070, 4081, 4083, 4096, 4109, 4120, 4121, 4137, 4146, 4153, 4154, 4157, 4165, 4175, 4176, 4195, 4200, 4207, 4210, 4243, 4244, 4245, 4246, 4248, 4257, 4261, 4263, 4268, 4274, 4278, 4283, 4285, 4300, 4311, 4314, 4317, 4322, 4330, 4331, 4332, 4333, 4340, 4343, 4347, 4361, 4365, 4367, 4376, 4384, 4394, 4400, 4422, 4425, 4426, 4435, 4440, 4443, 4456, 4457, 4486, 4496, 4527, 4534, 4536, 4537, 4538, 4539, 4553, 4565, 4573, 4576, 4579, 4592, 4593, 4594, 4595, 4600, 4611, 4612, 4662, 4679, 4689, 4698, 4709, 4719, 4721, 4728, 4757, 4765, 4772, 4774, 4780, 4787, 4797, 4803, 4804, 4822, 4825, 4832, 4852, 4857, 4863, 4872, 4889, 4908, 4918, 4944, 4951, 4954, 4956, 4957, 4958, 4959, 4963, 4966, 4973, 4984, 4994, 4996, 4998, 5000, 5012, 5028, 5063, 5069, 5072, 5088, 5097, 5110, 5138, 5139, 5144, 5151, 5163, 5170, 5179, 5184, 5194, 5198, 5208, 5211, 5214, 5220, 5231, 5234, 5242, 5256, 5275, 5278, 5279, 5282, 5283, 5290, 5301, 5304, 5305, 5318, 5339, 5360, 5367, 5380, 5382, 5400, 5401, 5415, 5417, 5419, 5423, 5427, 5434, 5442, 5445, 5451, 5452, 5455, 5459, 5463, 5465, 5470, 5480, 5482, 5491, 5499, 5520, 5524, 5526, 5528, 5529, 5530, 5531, 5532, 5534, 5545, 5549, 5580, 5591, 5592, 5593, 5598, 5604, 5607, 5611, 5623, 5626, 5635, 5648, 5652, 5663, 5687, 5689, 5700, 5709, 5721, 5725, 5728, 5729, 5731, 5756, 5759, 5760, 5765, 5771, 5772, 5773, 5775, 5794, 5810, 5811, 5814, 5817, 5819, 5829, 5834, 5837, 5842, 5845, 5846, 5850, 5852, 5853, 5870, 5876, 5882, 5885, 5891, 5892, 5907, 5924, 5931, 5932, 5950, 5952, 5962, 5964, 5966, 5975, 5993, 5994, 5995, 6007, 6010, 6017, 6018, 6022, 6030, 6036, 6038, 6040, 6044, 6064, 6078, 6091, 6094, 6105, 6113, 6122, 6129, 6134, 6136, 6153, 6154, 6161, 6162, 6166, 6181, 6188, 6210, 6221, 6223, 6227, 6231, 6232, 6250, 6259, 6261, 6274, 6279, 6284, 6288, 6297, 6325, 6328, 6336, 6338, 6352, 6362, 6368, 6379, 6383, 6384, 6385, 6395, 6397, 6409, 6412, 6414, 6418, 6429, 6437, 6439, 6454, 6459, 6461, 6462, 6473, 6487, 6489, 6516, 6519, 6523, 6528, 6529, 6531, 6532, 6539, 6561, 6562, 6563, 6565, 6572, 6583, 6597, 6607, 6609, 6616, 6617, 6629, 6630, 6637, 6648, 6654, 6656, 6658, 6664, 6671, 6685, 6686, 6692, 6698, 6700, 6706, 6711, 6713, 6720, 6723, 6726, 6734, 6739, 6741, 6743, 6749, 6754, 6761, 6763, 6765, 6770, 6772, 6793, 6802, 6805, 6807, 6816, 6817, 6821, 6837, 6841, 6854, 6875, 6880, 6890, 6891, 6893, 6898, 6907, 6910, 6926, 6951, 6968, 6976, 6977, 6991, 6994, 7000, 7013, 7019, 7027, 7037, 7048, 7049, 7050, 7056, 7060, 7061, 7069, 7088, 7093, 7095, 7108, 7115, 7117, 7118, 7129, 7140, 7156, 7162, 7173, 7191, 7209, 7228, 7230, 7236, 7239, 7240, 7245, 7250, 7252, 7279, 7285, 7294, 7296, 7302, 7308, 7317, 7347, 7357, 7364, 7366, 7369, 7370, 7379, 7381, 7383, 7390, 7394, 7396, 7402, 7411, 7423, 7454, 7467, 7468, 7482, 7492, 7499, 7501, 7519, 7520, 7535, 7540, 7543, 7549, 7552, 7566, 7577, 7580, 7587, 7592, 7596, 7608, 7610, 7629, 7630, 7631, 7646, 7659, 7663, 7668, 7670, 7676, 7689, 7700, 7702, 7703, 7714, 7718, 7720, 7723, 7726, 7728, 7729, 7730, 7741, 7767, 7772, 7786, 7791, 7797, 7801, 7813, 7827, 7844, 7857, 7864, 7879, 7882, 7886, 7895, 7899, 7902, 7921, 7923, 7936, 7939, 7948, 7949, 7952, 7962, 7981, 7995, 8004, 8011, 8020, 8029, 8038, 8039, 8041, 8054, 8058, 8081, 8098, 8102, 8105, 8112, 8118, 8119, 8127, 8129, 8132, 8145, 8158, 8165, 8184, 8189, 8194, 8199, 8202, 8204, 8212, 8225, 8230, 8235, 8236, 8241, 8250, 8261, 8262, 8267, 8270, 8278, 8285, 8286, 8291, 8297, 8310, 8325, 8348, 8351, 8355, 8362, 8373, 8378, 8388, 8398, 8407, 8408, 8412, 8414, 8420, 8422, 8432, 8444, 8449, 8450, 8461, 8465, 8468, 8487, 8507, 8513, 8522] -------------------------------------------------------------------------------- /data/training2017/splits/split_5_6_14/valid4.json: -------------------------------------------------------------------------------- 1 | [4, 16, 21, 26, 36, 51, 54, 64, 68, 72, 82, 84, 89, 91, 97, 102, 104, 117, 122, 135, 138, 144, 158, 166, 170, 175, 178, 179, 183, 202, 207, 210, 211, 214, 229, 230, 245, 249, 256, 260, 263, 264, 274, 286, 307, 322, 326, 328, 336, 340, 351, 358, 360, 364, 378, 391, 392, 401, 414, 417, 434, 442, 455, 464, 471, 472, 478, 495, 499, 503, 506, 509, 517, 521, 522, 524, 529, 530, 535, 552, 557, 558, 560, 563, 564, 583, 592, 600, 619, 631, 638, 639, 644, 645, 659, 666, 691, 694, 695, 700, 710, 713, 729, 732, 738, 746, 751, 777, 780, 786, 795, 814, 818, 822, 847, 852, 855, 867, 868, 872, 878, 884, 888, 896, 907, 914, 915, 934, 940, 951, 962, 964, 966, 980, 981, 985, 1006, 1021, 1028, 1038, 1046, 1050, 1067, 1075, 1080, 1090, 1100, 1101, 1111, 1128, 1129, 1141, 1146, 1152, 1160, 1161, 1172, 1190, 1200, 1206, 1213, 1223, 1237, 1244, 1279, 1291, 1299, 1315, 1328, 1345, 1347, 1360, 1362, 1378, 1387, 1402, 1406, 1407, 1416, 1423, 1439, 1441, 1459, 1475, 1489, 1490, 1499, 1503, 1504, 1509, 1510, 1531, 1539, 1548, 1562, 1578, 1579, 1593, 1596, 1601, 1609, 1612, 1620, 1624, 1631, 1639, 1647, 1653, 1656, 1657, 1670, 1676, 1692, 1700, 1715, 1740, 1751, 1764, 1765, 1772, 1773, 1786, 1787, 1815, 1818, 1819, 1823, 1834, 1835, 1844, 1853, 1858, 1868, 1870, 1880, 1887, 1888, 1914, 1935, 1937, 1945, 1954, 1966, 1968, 1976, 1979, 1987, 1989, 2008, 2017, 2029, 2037, 2046, 2047, 2049, 2050, 2062, 2063, 2065, 2075, 2078, 2081, 2102, 2116, 2141, 2145, 2148, 2155, 2162, 2166, 2168, 2169, 2170, 2177, 2182, 2203, 2206, 2212, 2224, 2227, 2228, 2233, 2234, 2239, 2243, 2250, 2255, 2257, 2288, 2300, 2322, 2325, 2337, 2350, 2355, 2358, 2360, 2361, 2369, 2372, 2389, 2398, 2406, 2408, 2410, 2419, 2423, 2429, 2476, 2485, 2486, 2487, 2489, 2497, 2507, 2513, 2553, 2554, 2555, 2561, 2565, 2572, 2578, 2581, 2584, 2586, 2593, 2595, 2598, 2607, 2608, 2623, 2630, 2640, 2657, 2665, 2673, 2674, 2683, 2697, 2708, 2709, 2710, 2716, 2743, 2750, 2761, 2773, 2778, 2783, 2798, 2800, 2804, 2835, 2836, 2852, 2872, 2881, 2888, 2891, 2893, 2898, 2905, 2906, 2924, 2938, 2941, 2946, 2950, 2967, 2974, 2986, 2996, 3001, 3031, 3035, 3045, 3048, 3058, 3064, 3067, 3068, 3080, 3086, 3088, 3091, 3098, 3100, 3115, 3120, 3129, 3134, 3136, 3142, 3148, 3169, 3175, 3185, 3191, 3206, 3209, 3212, 3218, 3230, 3233, 3240, 3244, 3249, 3255, 3262, 3276, 3277, 3285, 3298, 3299, 3303, 3305, 3331, 3334, 3351, 3353, 3374, 3385, 3397, 3400, 3407, 3409, 3413, 3422, 3424, 3426, 3442, 3451, 3461, 3462, 3468, 3475, 3480, 3490, 3492, 3496, 3497, 3503, 3506, 3508, 3509, 3512, 3522, 3525, 3531, 3549, 3552, 3556, 3566, 3572, 3575, 3579, 3607, 3611, 3614, 3615, 3621, 3622, 3625, 3630, 3643, 3644, 3645, 3646, 3650, 3661, 3683, 3696, 3699, 3715, 3722, 3727, 3732, 3742, 3743, 3751, 3755, 3757, 3762, 3763, 3764, 3788, 3789, 3800, 3814, 3817, 3844, 3845, 3856, 3861, 3863, 3869, 3889, 3903, 3905, 3911, 3913, 3914, 3919, 3920, 3940, 3941, 3942, 3943, 3948, 3966, 3968, 3973, 3974, 3991, 4032, 4041, 4047, 4052, 4053, 4054, 4055, 4064, 4066, 4067, 4072, 4074, 4076, 4092, 4102, 4104, 4115, 4116, 4120, 4122, 4129, 4134, 4144, 4176, 4177, 4178, 4179, 4196, 4200, 4202, 4204, 4205, 4211, 4229, 4254, 4259, 4269, 4290, 4296, 4306, 4307, 4311, 4312, 4318, 4319, 4325, 4328, 4329, 4335, 4336, 4337, 4347, 4350, 4357, 4359, 4361, 4368, 4371, 4379, 4380, 4381, 4387, 4395, 4397, 4400, 4402, 4413, 4415, 4423, 4424, 4448, 4463, 4477, 4478, 4479, 4496, 4499, 4500, 4512, 4533, 4538, 4557, 4566, 4577, 4578, 4579, 4580, 4595, 4606, 4612, 4616, 4625, 4626, 4635, 4640, 4663, 4696, 4697, 4718, 4730, 4734, 4737, 4738, 4746, 4750, 4759, 4761, 4764, 4780, 4785, 4788, 4793, 4795, 4806, 4812, 4822, 4832, 4833, 4844, 4881, 4892, 4901, 4904, 4905, 4916, 4930, 4931, 4946, 4970, 4976, 4984, 4985, 4990, 4991, 4992, 4994, 5023, 5040, 5042, 5052, 5054, 5087, 5108, 5113, 5115, 5131, 5132, 5134, 5137, 5152, 5166, 5167, 5176, 5192, 5197, 5211, 5212, 5217, 5219, 5228, 5229, 5230, 5232, 5233, 5236, 5238, 5256, 5264, 5274, 5297, 5304, 5307, 5313, 5321, 5337, 5346, 5351, 5359, 5371, 5381, 5383, 5390, 5393, 5402, 5403, 5419, 5431, 5434, 5441, 5451, 5455, 5473, 5480, 5488, 5491, 5502, 5504, 5515, 5517, 5521, 5525, 5533, 5538, 5539, 5561, 5562, 5563, 5565, 5566, 5568, 5569, 5578, 5582, 5590, 5609, 5622, 5631, 5642, 5647, 5649, 5658, 5662, 5667, 5673, 5696, 5705, 5716, 5723, 5726, 5730, 5738, 5742, 5745, 5746, 5747, 5750, 5760, 5779, 5781, 5789, 5793, 5794, 5801, 5818, 5821, 5837, 5839, 5841, 5842, 5845, 5846, 5847, 5859, 5863, 5864, 5870, 5872, 5875, 5889, 5898, 5906, 5909, 5914, 5918, 5921, 5941, 5942, 5946, 5969, 5977, 5986, 5987, 5992, 5993, 5997, 5999, 6002, 6014, 6016, 6020, 6025, 6032, 6033, 6034, 6038, 6056, 6059, 6062, 6071, 6103, 6112, 6113, 6115, 6132, 6139, 6145, 6149, 6164, 6183, 6185, 6186, 6187, 6197, 6208, 6215, 6223, 6231, 6256, 6258, 6264, 6272, 6282, 6292, 6306, 6311, 6312, 6333, 6336, 6340, 6357, 6371, 6378, 6393, 6397, 6401, 6402, 6414, 6429, 6437, 6439, 6443, 6457, 6459, 6461, 6463, 6469, 6471, 6473, 6497, 6498, 6500, 6507, 6510, 6524, 6528, 6556, 6564, 6566, 6568, 6574, 6578, 6586, 6589, 6591, 6601, 6612, 6625, 6632, 6633, 6637, 6644, 6647, 6648, 6649, 6653, 6654, 6666, 6677, 6678, 6679, 6688, 6692, 6695, 6704, 6714, 6717, 6721, 6723, 6724, 6726, 6739, 6745, 6754, 6760, 6763, 6765, 6772, 6773, 6777, 6779, 6784, 6792, 6795, 6796, 6798, 6808, 6832, 6835, 6836, 6841, 6847, 6848, 6849, 6874, 6880, 6892, 6906, 6909, 6912, 6916, 6927, 6928, 6932, 6939, 6946, 6968, 6971, 6978, 7000, 7006, 7015, 7020, 7031, 7036, 7050, 7057, 7058, 7063, 7066, 7067, 7072, 7075, 7085, 7111, 7115, 7118, 7122, 7128, 7141, 7142, 7153, 7155, 7159, 7165, 7186, 7200, 7212, 7218, 7222, 7234, 7235, 7237, 7240, 7252, 7256, 7257, 7264, 7275, 7288, 7307, 7308, 7310, 7321, 7327, 7331, 7343, 7353, 7361, 7366, 7370, 7372, 7373, 7384, 7386, 7393, 7398, 7399, 7410, 7420, 7428, 7432, 7468, 7480, 7489, 7492, 7503, 7513, 7536, 7538, 7547, 7552, 7557, 7560, 7563, 7569, 7578, 7581, 7583, 7602, 7608, 7618, 7619, 7630, 7634, 7647, 7655, 7663, 7664, 7672, 7675, 7678, 7698, 7702, 7713, 7721, 7725, 7726, 7728, 7740, 7742, 7744, 7752, 7755, 7771, 7774, 7779, 7796, 7798, 7815, 7816, 7818, 7832, 7836, 7840, 7858, 7867, 7876, 7880, 7885, 7888, 7895, 7899, 7908, 7921, 7923, 7939, 7942, 7948, 7950, 7951, 7954, 7962, 7979, 7984, 7994, 8009, 8015, 8026, 8029, 8030, 8039, 8045, 8050, 8061, 8063, 8087, 8092, 8102, 8103, 8117, 8133, 8135, 8150, 8157, 8159, 8169, 8180, 8183, 8190, 8191, 8193, 8194, 8196, 8201, 8213, 8217, 8220, 8226, 8230, 8241, 8251, 8253, 8256, 8260, 8263, 8280, 8285, 8296, 8297, 8321, 8328, 8336, 8341, 8344, 8346, 8349, 8353, 8359, 8360, 8371, 8388, 8399, 8418, 8427, 8439, 8445, 8454, 8462, 8464, 8469, 8487, 8508, 8515, 8522] -------------------------------------------------------------------------------- /data/training2017/splits/split_5_6_14/valid0.json: -------------------------------------------------------------------------------- 1 | [4, 11, 18, 21, 35, 49, 60, 62, 63, 67, 78, 93, 98, 105, 113, 117, 130, 134, 137, 146, 157, 162, 179, 191, 193, 195, 197, 206, 209, 229, 233, 234, 235, 248, 249, 260, 267, 272, 273, 276, 283, 290, 307, 310, 335, 346, 349, 357, 364, 369, 371, 376, 387, 389, 400, 401, 403, 408, 420, 423, 442, 452, 456, 463, 469, 471, 475, 492, 494, 500, 508, 530, 531, 538, 540, 541, 543, 551, 569, 570, 577, 579, 584, 593, 605, 606, 616, 620, 622, 623, 629, 643, 646, 669, 670, 671, 674, 677, 695, 698, 714, 728, 737, 755, 758, 772, 774, 801, 804, 806, 807, 835, 837, 842, 843, 877, 883, 885, 887, 899, 909, 926, 930, 931, 933, 934, 935, 951, 955, 957, 967, 969, 970, 1007, 1010, 1016, 1021, 1023, 1030, 1046, 1055, 1062, 1063, 1066, 1076, 1082, 1094, 1108, 1119, 1126, 1142, 1148, 1162, 1172, 1184, 1199, 1203, 1230, 1237, 1275, 1289, 1292, 1305, 1316, 1324, 1331, 1343, 1357, 1358, 1359, 1369, 1380, 1398, 1404, 1409, 1417, 1434, 1440, 1454, 1487, 1488, 1489, 1496, 1497, 1500, 1520, 1529, 1536, 1549, 1557, 1562, 1563, 1590, 1597, 1603, 1609, 1613, 1619, 1625, 1637, 1661, 1675, 1677, 1688, 1693, 1717, 1720, 1755, 1767, 1772, 1778, 1790, 1799, 1809, 1811, 1812, 1813, 1826, 1831, 1839, 1856, 1857, 1858, 1862, 1865, 1878, 1895, 1896, 1903, 1940, 1941, 1952, 1955, 1964, 1965, 1982, 1991, 1992, 1994, 2003, 2004, 2008, 2013, 2025, 2037, 2052, 2057, 2061, 2067, 2069, 2074, 2076, 2081, 2084, 2086, 2088, 2099, 2102, 2106, 2110, 2134, 2155, 2162, 2169, 2171, 2175, 2183, 2185, 2186, 2189, 2196, 2199, 2219, 2221, 2241, 2246, 2251, 2255, 2261, 2272, 2273, 2290, 2294, 2300, 2301, 2320, 2321, 2322, 2323, 2380, 2383, 2395, 2402, 2411, 2424, 2432, 2436, 2443, 2446, 2449, 2462, 2467, 2493, 2506, 2508, 2515, 2517, 2523, 2525, 2539, 2545, 2550, 2551, 2554, 2560, 2574, 2589, 2590, 2591, 2598, 2604, 2611, 2613, 2614, 2619, 2622, 2626, 2639, 2640, 2645, 2653, 2667, 2684, 2693, 2720, 2724, 2742, 2747, 2752, 2753, 2768, 2778, 2796, 2810, 2815, 2827, 2835, 2836, 2838, 2842, 2844, 2856, 2863, 2891, 2900, 2939, 2943, 2949, 2955, 2959, 2960, 2968, 2971, 2972, 2977, 2988, 2998, 2999, 3026, 3055, 3064, 3065, 3074, 3075, 3076, 3083, 3097, 3102, 3104, 3119, 3120, 3121, 3125, 3130, 3141, 3144, 3151, 3152, 3154, 3156, 3159, 3163, 3174, 3192, 3206, 3220, 3221, 3224, 3240, 3243, 3246, 3252, 3268, 3273, 3281, 3285, 3290, 3291, 3296, 3308, 3315, 3325, 3337, 3338, 3344, 3347, 3365, 3376, 3377, 3387, 3390, 3403, 3415, 3418, 3431, 3433, 3440, 3441, 3443, 3446, 3449, 3456, 3460, 3469, 3478, 3482, 3491, 3515, 3530, 3544, 3548, 3550, 3555, 3562, 3565, 3567, 3577, 3578, 3583, 3601, 3602, 3606, 3614, 3615, 3616, 3618, 3628, 3633, 3649, 3652, 3656, 3657, 3662, 3665, 3667, 3669, 3681, 3682, 3687, 3690, 3717, 3721, 3725, 3730, 3737, 3742, 3754, 3756, 3758, 3762, 3773, 3780, 3782, 3783, 3791, 3799, 3804, 3805, 3806, 3812, 3820, 3830, 3833, 3841, 3842, 3843, 3856, 3878, 3879, 3886, 3918, 3931, 3933, 3936, 3948, 3967, 3968, 3969, 3971, 3972, 3975, 3976, 3977, 3980, 3993, 3997, 4006, 4007, 4025, 4028, 4030, 4066, 4074, 4077, 4081, 4082, 4090, 4091, 4098, 4099, 4101, 4102, 4107, 4108, 4114, 4129, 4137, 4140, 4147, 4148, 4158, 4164, 4166, 4187, 4214, 4215, 4216, 4221, 4224, 4228, 4241, 4251, 4253, 4256, 4260, 4261, 4267, 4277, 4296, 4304, 4310, 4341, 4342, 4343, 4347, 4355, 4357, 4361, 4372, 4375, 4379, 4380, 4386, 4392, 4399, 4403, 4404, 4411, 4412, 4413, 4415, 4421, 4426, 4434, 4437, 4441, 4443, 4450, 4457, 4458, 4480, 4496, 4510, 4511, 4516, 4517, 4531, 4543, 4545, 4546, 4579, 4581, 4594, 4597, 4607, 4618, 4619, 4620, 4622, 4633, 4642, 4655, 4664, 4666, 4682, 4684, 4715, 4749, 4773, 4778, 4783, 4785, 4794, 4797, 4798, 4803, 4822, 4825, 4832, 4833, 4839, 4843, 4848, 4852, 4860, 4863, 4867, 4873, 4877, 4883, 4901, 4913, 4915, 4924, 4927, 4932, 4948, 4951, 4980, 4984, 4986, 5028, 5031, 5033, 5035, 5038, 5041, 5063, 5066, 5074, 5091, 5094, 5101, 5124, 5125, 5129, 5140, 5147, 5155, 5173, 5179, 5180, 5181, 5194, 5197, 5207, 5213, 5220, 5227, 5232, 5252, 5257, 5264, 5274, 5275, 5277, 5284, 5293, 5313, 5336, 5346, 5347, 5348, 5350, 5357, 5365, 5376, 5377, 5379, 5383, 5389, 5390, 5413, 5415, 5435, 5457, 5461, 5500, 5502, 5504, 5506, 5513, 5517, 5530, 5538, 5540, 5543, 5544, 5560, 5580, 5581, 5586, 5587, 5593, 5596, 5610, 5618, 5631, 5633, 5634, 5635, 5636, 5641, 5645, 5658, 5659, 5663, 5670, 5686, 5691, 5704, 5709, 5710, 5715, 5718, 5724, 5726, 5729, 5732, 5735, 5739, 5761, 5773, 5774, 5776, 5783, 5790, 5801, 5802, 5804, 5807, 5808, 5810, 5812, 5817, 5832, 5837, 5841, 5843, 5854, 5856, 5869, 5877, 5896, 5897, 5898, 5903, 5909, 5919, 5924, 5929, 5938, 5940, 5943, 5963, 5967, 5977, 5983, 5988, 5990, 5994, 5997, 6012, 6023, 6025, 6038, 6048, 6051, 6053, 6057, 6063, 6065, 6067, 6074, 6082, 6083, 6087, 6095, 6102, 6106, 6111, 6112, 6147, 6149, 6154, 6165, 6174, 6178, 6184, 6190, 6205, 6211, 6214, 6215, 6228, 6245, 6250, 6256, 6269, 6289, 6297, 6302, 6309, 6311, 6329, 6333, 6339, 6352, 6355, 6369, 6392, 6399, 6404, 6410, 6418, 6431, 6437, 6443, 6445, 6446, 6454, 6457, 6467, 6469, 6470, 6471, 6482, 6486, 6487, 6501, 6504, 6508, 6509, 6518, 6524, 6525, 6537, 6542, 6545, 6570, 6574, 6575, 6578, 6582, 6598, 6599, 6600, 6607, 6608, 6614, 6618, 6620, 6635, 6642, 6646, 6648, 6650, 6653, 6654, 6660, 6664, 6680, 6682, 6684, 6705, 6719, 6725, 6727, 6732, 6738, 6743, 6744, 6749, 6754, 6756, 6762, 6765, 6768, 6772, 6774, 6778, 6779, 6787, 6798, 6800, 6801, 6804, 6805, 6818, 6835, 6840, 6850, 6852, 6853, 6860, 6868, 6879, 6887, 6903, 6908, 6919, 6924, 6926, 6933, 6935, 6941, 6948, 6973, 6975, 6989, 7000, 7014, 7016, 7018, 7020, 7026, 7044, 7046, 7054, 7055, 7060, 7078, 7093, 7103, 7107, 7117, 7119, 7126, 7127, 7137, 7142, 7152, 7168, 7184, 7203, 7204, 7212, 7213, 7223, 7226, 7235, 7238, 7248, 7260, 7274, 7279, 7280, 7285, 7298, 7315, 7344, 7352, 7361, 7362, 7364, 7366, 7377, 7383, 7392, 7394, 7397, 7403, 7415, 7428, 7446, 7457, 7464, 7469, 7471, 7473, 7480, 7498, 7501, 7502, 7510, 7530, 7534, 7546, 7550, 7552, 7554, 7556, 7559, 7565, 7568, 7573, 7580, 7607, 7608, 7615, 7625, 7632, 7634, 7635, 7636, 7637, 7650, 7655, 7659, 7667, 7674, 7676, 7688, 7691, 7698, 7701, 7703, 7710, 7714, 7721, 7724, 7725, 7731, 7742, 7758, 7777, 7778, 7779, 7785, 7790, 7807, 7821, 7840, 7844, 7850, 7862, 7865, 7880, 7881, 7890, 7894, 7897, 7900, 7907, 7953, 7955, 7964, 7979, 7980, 7986, 7996, 7999, 8005, 8016, 8025, 8027, 8029, 8046, 8048, 8049, 8055, 8067, 8085, 8107, 8110, 8111, 8123, 8136, 8142, 8152, 8161, 8167, 8168, 8171, 8176, 8180, 8183, 8189, 8190, 8191, 8195, 8199, 8208, 8209, 8218, 8221, 8231, 8241, 8255, 8260, 8268, 8274, 8284, 8302, 8317, 8336, 8344, 8346, 8348, 8351, 8358, 8383, 8389, 8395, 8402, 8407, 8414, 8422, 8427, 8435, 8447, 8455, 8459, 8464, 8484, 8489, 8503, 8512, 8518] -------------------------------------------------------------------------------- /data/training2017/splits/split_5_6_14/valid1.json: -------------------------------------------------------------------------------- 1 | [4, 12, 21, 26, 27, 36, 50, 64, 68, 72, 77, 85, 93, 97, 102, 105, 113, 123, 127, 129, 131, 144, 145, 150, 178, 185, 189, 190, 191, 199, 201, 227, 229, 230, 232, 241, 242, 247, 251, 263, 265, 270, 272, 277, 287, 324, 329, 355, 358, 366, 372, 386, 388, 391, 410, 419, 420, 423, 431, 440, 448, 479, 493, 495, 503, 506, 512, 514, 516, 521, 522, 523, 529, 533, 541, 543, 545, 550, 568, 588, 595, 596, 597, 598, 608, 611, 627, 637, 648, 650, 660, 669, 683, 704, 708, 710, 734, 753, 754, 755, 757, 760, 774, 779, 786, 791, 798, 799, 822, 825, 855, 859, 862, 864, 868, 869, 872, 878, 888, 907, 909, 913, 939, 942, 947, 956, 960, 969, 972, 986, 992, 1014, 1020, 1022, 1023, 1056, 1064, 1070, 1074, 1086, 1094, 1101, 1108, 1123, 1129, 1130, 1137, 1142, 1144, 1155, 1180, 1190, 1202, 1203, 1208, 1223, 1231, 1236, 1243, 1256, 1259, 1260, 1265, 1284, 1308, 1350, 1377, 1381, 1399, 1411, 1414, 1415, 1428, 1429, 1433, 1437, 1455, 1458, 1463, 1468, 1474, 1479, 1499, 1503, 1505, 1549, 1551, 1560, 1566, 1568, 1579, 1584, 1585, 1590, 1604, 1613, 1635, 1641, 1647, 1652, 1658, 1663, 1666, 1667, 1675, 1679, 1684, 1693, 1704, 1718, 1725, 1728, 1738, 1764, 1770, 1784, 1812, 1821, 1828, 1829, 1848, 1857, 1872, 1873, 1883, 1886, 1889, 1896, 1900, 1908, 1917, 1924, 1925, 1929, 1930, 1932, 1940, 1947, 1951, 1965, 1988, 2006, 2018, 2041, 2058, 2061, 2062, 2069, 2078, 2080, 2084, 2096, 2102, 2110, 2116, 2134, 2137, 2144, 2149, 2158, 2161, 2169, 2171, 2175, 2179, 2181, 2193, 2212, 2213, 2215, 2218, 2246, 2247, 2253, 2256, 2261, 2269, 2274, 2275, 2277, 2280, 2285, 2310, 2313, 2314, 2318, 2332, 2336, 2337, 2340, 2344, 2348, 2350, 2355, 2358, 2364, 2382, 2400, 2409, 2423, 2429, 2433, 2444, 2454, 2456, 2485, 2495, 2504, 2549, 2555, 2563, 2565, 2566, 2569, 2581, 2585, 2591, 2614, 2624, 2625, 2626, 2632, 2633, 2640, 2652, 2655, 2658, 2659, 2662, 2667, 2670, 2678, 2695, 2696, 2698, 2701, 2709, 2715, 2719, 2731, 2755, 2763, 2769, 2770, 2772, 2785, 2793, 2802, 2803, 2810, 2822, 2835, 2844, 2848, 2853, 2861, 2864, 2869, 2877, 2889, 2890, 2898, 2906, 2911, 2917, 2945, 2953, 2962, 2974, 2975, 2984, 2987, 2990, 2991, 2994, 2995, 3004, 3012, 3015, 3018, 3035, 3070, 3086, 3091, 3095, 3103, 3106, 3109, 3112, 3114, 3130, 3131, 3132, 3136, 3138, 3149, 3154, 3155, 3158, 3161, 3168, 3176, 3178, 3192, 3212, 3214, 3226, 3229, 3242, 3263, 3265, 3272, 3295, 3297, 3301, 3304, 3310, 3313, 3331, 3344, 3350, 3351, 3357, 3361, 3371, 3373, 3382, 3384, 3385, 3391, 3398, 3401, 3406, 3422, 3428, 3435, 3436, 3439, 3444, 3450, 3451, 3456, 3461, 3467, 3470, 3487, 3493, 3504, 3515, 3519, 3521, 3527, 3528, 3540, 3544, 3557, 3564, 3570, 3574, 3575, 3577, 3582, 3588, 3591, 3601, 3605, 3612, 3615, 3623, 3630, 3632, 3643, 3658, 3667, 3668, 3676, 3677, 3681, 3699, 3701, 3702, 3709, 3717, 3725, 3732, 3745, 3750, 3768, 3777, 3778, 3782, 3792, 3795, 3804, 3809, 3812, 3815, 3818, 3822, 3848, 3851, 3855, 3862, 3865, 3869, 3870, 3883, 3891, 3910, 3926, 3948, 3953, 3981, 3984, 3985, 3988, 3995, 3996, 4002, 4003, 4012, 4014, 4017, 4024, 4026, 4029, 4045, 4047, 4063, 4076, 4089, 4105, 4106, 4113, 4115, 4117, 4118, 4119, 4124, 4127, 4129, 4149, 4154, 4161, 4162, 4167, 4171, 4182, 4192, 4201, 4232, 4234, 4236, 4240, 4264, 4268, 4270, 4275, 4276, 4281, 4291, 4308, 4315, 4322, 4327, 4335, 4359, 4364, 4367, 4368, 4374, 4377, 4384, 4385, 4390, 4391, 4402, 4407, 4411, 4416, 4420, 4424, 4432, 4434, 4436, 4440, 4447, 4450, 4456, 4470, 4472, 4480, 4491, 4493, 4498, 4522, 4533, 4535, 4537, 4538, 4557, 4560, 4562, 4567, 4570, 4591, 4613, 4630, 4644, 4645, 4646, 4647, 4651, 4652, 4664, 4681, 4690, 4699, 4700, 4718, 4723, 4734, 4736, 4742, 4745, 4761, 4781, 4795, 4803, 4807, 4818, 4830, 4834, 4839, 4844, 4868, 4876, 4877, 4880, 4886, 4888, 4891, 4912, 4917, 4938, 4982, 4987, 4998, 5034, 5064, 5076, 5078, 5080, 5082, 5083, 5084, 5087, 5110, 5122, 5123, 5125, 5126, 5128, 5144, 5154, 5163, 5175, 5176, 5180, 5184, 5197, 5199, 5204, 5207, 5211, 5214, 5219, 5228, 5244, 5257, 5266, 5274, 5284, 5288, 5289, 5295, 5303, 5325, 5331, 5337, 5352, 5355, 5360, 5364, 5371, 5372, 5373, 5374, 5380, 5383, 5394, 5401, 5404, 5423, 5452, 5455, 5465, 5476, 5489, 5497, 5500, 5501, 5503, 5505, 5509, 5519, 5523, 5530, 5533, 5535, 5543, 5558, 5560, 5561, 5566, 5568, 5572, 5575, 5587, 5601, 5604, 5605, 5606, 5608, 5610, 5612, 5627, 5629, 5659, 5662, 5676, 5681, 5685, 5687, 5698, 5701, 5714, 5716, 5726, 5729, 5735, 5737, 5743, 5750, 5751, 5761, 5770, 5773, 5776, 5780, 5783, 5796, 5803, 5806, 5813, 5815, 5825, 5831, 5838, 5840, 5843, 5857, 5861, 5862, 5868, 5870, 5888, 5892, 5895, 5899, 5904, 5917, 5929, 5936, 5940, 5942, 5944, 5961, 5965, 5980, 5991, 6008, 6015, 6016, 6021, 6025, 6027, 6037, 6038, 6045, 6050, 6051, 6052, 6056, 6057, 6067, 6070, 6076, 6079, 6090, 6118, 6119, 6122, 6132, 6136, 6142, 6147, 6151, 6158, 6168, 6172, 6173, 6179, 6180, 6197, 6202, 6226, 6229, 6234, 6243, 6246, 6260, 6264, 6278, 6284, 6295, 6298, 6301, 6335, 6343, 6345, 6351, 6360, 6365, 6377, 6380, 6384, 6390, 6392, 6398, 6404, 6414, 6419, 6420, 6424, 6439, 6443, 6445, 6460, 6465, 6466, 6467, 6474, 6486, 6496, 6512, 6520, 6525, 6528, 6529, 6540, 6546, 6557, 6560, 6561, 6569, 6579, 6587, 6589, 6598, 6601, 6602, 6605, 6625, 6635, 6636, 6647, 6648, 6669, 6678, 6680, 6683, 6689, 6694, 6695, 6701, 6709, 6710, 6715, 6720, 6724, 6726, 6731, 6738, 6748, 6750, 6751, 6753, 6755, 6760, 6766, 6773, 6786, 6789, 6801, 6805, 6808, 6810, 6820, 6822, 6825, 6845, 6859, 6860, 6869, 6874, 6876, 6879, 6893, 6896, 6904, 6910, 6920, 6927, 6947, 6960, 6975, 6977, 6983, 6989, 6990, 7008, 7017, 7022, 7034, 7035, 7043, 7053, 7066, 7067, 7076, 7077, 7082, 7100, 7105, 7114, 7115, 7126, 7130, 7139, 7150, 7156, 7158, 7176, 7191, 7205, 7210, 7211, 7212, 7213, 7214, 7219, 7228, 7241, 7266, 7274, 7278, 7281, 7283, 7304, 7305, 7339, 7346, 7360, 7361, 7362, 7366, 7373, 7376, 7383, 7386, 7390, 7397, 7403, 7428, 7432, 7435, 7442, 7453, 7472, 7473, 7483, 7486, 7512, 7514, 7525, 7530, 7540, 7544, 7549, 7552, 7563, 7570, 7572, 7580, 7593, 7603, 7604, 7616, 7622, 7623, 7631, 7637, 7638, 7647, 7654, 7657, 7663, 7664, 7666, 7671, 7674, 7686, 7697, 7703, 7708, 7711, 7719, 7721, 7731, 7736, 7753, 7754, 7770, 7777, 7779, 7794, 7795, 7799, 7815, 7826, 7840, 7847, 7858, 7864, 7881, 7883, 7891, 7902, 7908, 7923, 7932, 7934, 7935, 7938, 7949, 7965, 7989, 8001, 8006, 8008, 8012, 8015, 8017, 8022, 8036, 8039, 8059, 8063, 8064, 8087, 8088, 8092, 8094, 8103, 8111, 8113, 8124, 8130, 8138, 8143, 8157, 8161, 8171, 8173, 8174, 8175, 8182, 8198, 8203, 8209, 8215, 8219, 8238, 8240, 8247, 8249, 8256, 8260, 8262, 8266, 8272, 8295, 8299, 8307, 8326, 8328, 8330, 8335, 8341, 8343, 8352, 8375, 8377, 8390, 8396, 8401, 8402, 8429, 8431, 8441, 8447, 8449, 8450, 8487, 8489, 8503, 8512, 8518] -------------------------------------------------------------------------------- /data/training2017/splits/split_5_6_14/valid3.json: -------------------------------------------------------------------------------- 1 | [4, 14, 19, 26, 27, 37, 51, 63, 64, 68, 77, 85, 91, 93, 98, 104, 107, 116, 117, 131, 138, 146, 150, 159, 172, 178, 180, 182, 190, 201, 209, 222, 224, 226, 228, 242, 244, 265, 266, 271, 273, 279, 286, 292, 308, 355, 357, 362, 365, 372, 380, 381, 385, 386, 409, 420, 421, 425, 437, 442, 459, 471, 473, 487, 492, 494, 496, 497, 514, 515, 528, 548, 551, 561, 565, 572, 575, 580, 581, 582, 583, 585, 603, 604, 608, 616, 624, 634, 644, 648, 653, 655, 660, 663, 665, 707, 709, 713, 714, 721, 728, 732, 739, 752, 764, 783, 787, 798, 809, 813, 816, 820, 856, 858, 861, 867, 873, 875, 909, 910, 923, 924, 927, 944, 952, 960, 967, 974, 976, 992, 996, 1011, 1020, 1021, 1023, 1027, 1041, 1061, 1066, 1081, 1083, 1089, 1096, 1114, 1120, 1127, 1129, 1132, 1140, 1168, 1184, 1188, 1190, 1205, 1214, 1224, 1229, 1239, 1246, 1247, 1261, 1279, 1299, 1308, 1334, 1348, 1350, 1359, 1381, 1390, 1391, 1402, 1415, 1420, 1424, 1437, 1442, 1445, 1448, 1470, 1473, 1479, 1501, 1508, 1509, 1517, 1523, 1524, 1528, 1536, 1537, 1551, 1559, 1577, 1591, 1597, 1603, 1610, 1615, 1618, 1626, 1631, 1658, 1673, 1674, 1685, 1688, 1690, 1707, 1715, 1724, 1731, 1755, 1757, 1761, 1767, 1778, 1784, 1786, 1795, 1797, 1808, 1813, 1845, 1847, 1848, 1851, 1858, 1868, 1872, 1882, 1884, 1913, 1934, 1941, 1948, 1955, 1960, 1961, 1966, 1985, 1993, 1996, 2008, 2010, 2019, 2029, 2031, 2043, 2047, 2051, 2055, 2058, 2061, 2075, 2078, 2085, 2088, 2095, 2097, 2119, 2129, 2131, 2151, 2158, 2160, 2167, 2168, 2174, 2178, 2182, 2185, 2198, 2209, 2211, 2224, 2226, 2231, 2233, 2236, 2239, 2243, 2255, 2258, 2268, 2270, 2290, 2293, 2296, 2299, 2322, 2347, 2360, 2370, 2387, 2389, 2396, 2398, 2399, 2400, 2406, 2410, 2441, 2462, 2466, 2476, 2478, 2480, 2481, 2490, 2491, 2496, 2508, 2514, 2543, 2545, 2546, 2553, 2565, 2571, 2573, 2580, 2583, 2590, 2592, 2596, 2603, 2618, 2628, 2648, 2657, 2665, 2666, 2675, 2684, 2688, 2700, 2721, 2732, 2740, 2744, 2747, 2750, 2756, 2761, 2766, 2775, 2780, 2789, 2815, 2841, 2845, 2854, 2863, 2876, 2877, 2887, 2899, 2903, 2916, 2919, 2920, 2921, 2925, 2926, 2933, 2939, 2962, 2963, 3002, 3010, 3016, 3020, 3022, 3035, 3040, 3043, 3054, 3056, 3057, 3064, 3065, 3066, 3080, 3089, 3093, 3096, 3104, 3109, 3114, 3155, 3167, 3169, 3170, 3182, 3183, 3188, 3196, 3203, 3224, 3231, 3236, 3241, 3242, 3251, 3259, 3268, 3271, 3279, 3280, 3284, 3288, 3291, 3315, 3317, 3328, 3334, 3342, 3358, 3362, 3377, 3379, 3387, 3388, 3396, 3404, 3423, 3429, 3437, 3438, 3453, 3459, 3464, 3475, 3476, 3479, 3483, 3490, 3494, 3498, 3506, 3508, 3516, 3519, 3525, 3533, 3536, 3539, 3568, 3569, 3574, 3579, 3582, 3586, 3589, 3591, 3609, 3610, 3617, 3622, 3627, 3630, 3634, 3640, 3653, 3661, 3667, 3668, 3669, 3680, 3683, 3688, 3699, 3705, 3710, 3713, 3718, 3719, 3720, 3721, 3736, 3745, 3747, 3752, 3759, 3763, 3773, 3778, 3781, 3783, 3788, 3817, 3840, 3847, 3851, 3865, 3875, 3879, 3890, 3894, 3895, 3896, 3897, 3898, 3900, 3913, 3916, 3918, 3923, 3925, 3948, 3975, 3990, 4007, 4008, 4012, 4014, 4016, 4017, 4021, 4023, 4028, 4029, 4030, 4036, 4048, 4052, 4058, 4063, 4065, 4067, 4072, 4083, 4092, 4094, 4129, 4132, 4139, 4160, 4162, 4163, 4169, 4175, 4176, 4180, 4195, 4200, 4213, 4255, 4260, 4263, 4269, 4271, 4276, 4279, 4289, 4295, 4297, 4307, 4315, 4318, 4325, 4326, 4333, 4335, 4336, 4344, 4347, 4348, 4349, 4356, 4358, 4362, 4364, 4365, 4373, 4389, 4394, 4400, 4407, 4415, 4416, 4446, 4447, 4448, 4458, 4479, 4481, 4496, 4503, 4506, 4512, 4522, 4541, 4549, 4551, 4554, 4558, 4562, 4566, 4578, 4591, 4598, 4610, 4620, 4624, 4640, 4641, 4665, 4692, 4711, 4724, 4731, 4737, 4741, 4742, 4751, 4754, 4763, 4769, 4776, 4779, 4787, 4791, 4797, 4801, 4826, 4830, 4852, 4861, 4874, 4883, 4891, 4927, 4930, 4933, 4948, 4962, 4974, 4978, 4982, 4983, 4985, 4987, 4990, 5004, 5008, 5014, 5033, 5036, 5040, 5043, 5053, 5072, 5078, 5100, 5107, 5127, 5132, 5135, 5142, 5153, 5155, 5181, 5186, 5189, 5191, 5201, 5208, 5215, 5216, 5220, 5221, 5227, 5232, 5240, 5256, 5263, 5280, 5283, 5285, 5289, 5314, 5315, 5317, 5322, 5338, 5351, 5357, 5360, 5385, 5390, 5392, 5393, 5398, 5400, 5409, 5411, 5428, 5442, 5447, 5450, 5452, 5467, 5477, 5478, 5479, 5493, 5510, 5511, 5512, 5525, 5527, 5533, 5540, 5553, 5554, 5555, 5556, 5559, 5560, 5562, 5574, 5595, 5597, 5603, 5621, 5630, 5636, 5645, 5646, 5648, 5652, 5661, 5673, 5684, 5701, 5708, 5709, 5718, 5728, 5736, 5740, 5741, 5742, 5749, 5758, 5767, 5772, 5777, 5786, 5791, 5798, 5804, 5811, 5818, 5828, 5830, 5832, 5834, 5838, 5840, 5842, 5850, 5854, 5858, 5863, 5865, 5869, 5887, 5893, 5900, 5903, 5906, 5917, 5927, 5931, 5939, 5954, 5958, 5960, 5961, 5973, 5975, 5977, 5995, 5997, 5999, 6008, 6009, 6015, 6019, 6024, 6028, 6046, 6051, 6053, 6054, 6071, 6092, 6106, 6109, 6125, 6130, 6132, 6136, 6141, 6144, 6167, 6172, 6173, 6182, 6184, 6202, 6207, 6231, 6250, 6259, 6261, 6264, 6270, 6278, 6289, 6292, 6308, 6320, 6326, 6329, 6343, 6370, 6372, 6374, 6381, 6390, 6398, 6406, 6412, 6425, 6426, 6427, 6435, 6437, 6453, 6456, 6461, 6462, 6463, 6469, 6471, 6473, 6498, 6499, 6502, 6505, 6518, 6527, 6541, 6542, 6551, 6577, 6579, 6580, 6585, 6587, 6595, 6598, 6599, 6600, 6607, 6611, 6618, 6625, 6626, 6641, 6647, 6652, 6655, 6657, 6659, 6664, 6685, 6688, 6691, 6696, 6699, 6713, 6718, 6728, 6730, 6734, 6744, 6747, 6749, 6755, 6760, 6762, 6768, 6770, 6773, 6774, 6785, 6794, 6796, 6810, 6812, 6814, 6816, 6818, 6830, 6844, 6848, 6858, 6863, 6868, 6881, 6891, 6905, 6914, 6928, 6930, 6941, 6942, 6949, 6955, 6958, 6968, 6971, 6990, 7001, 7011, 7017, 7023, 7025, 7028, 7037, 7054, 7063, 7067, 7068, 7074, 7075, 7076, 7080, 7088, 7108, 7112, 7128, 7141, 7142, 7143, 7156, 7159, 7164, 7168, 7170, 7193, 7216, 7226, 7229, 7236, 7239, 7240, 7243, 7266, 7267, 7281, 7282, 7284, 7289, 7292, 7300, 7304, 7319, 7324, 7330, 7341, 7365, 7376, 7384, 7385, 7386, 7397, 7401, 7411, 7414, 7415, 7426, 7429, 7435, 7443, 7467, 7479, 7485, 7495, 7511, 7518, 7519, 7543, 7544, 7555, 7556, 7562, 7566, 7567, 7568, 7578, 7583, 7586, 7587, 7605, 7616, 7619, 7620, 7630, 7644, 7646, 7657, 7660, 7670, 7671, 7672, 7681, 7689, 7690, 7701, 7715, 7720, 7722, 7723, 7724, 7731, 7736, 7747, 7748, 7749, 7754, 7776, 7780, 7799, 7801, 7803, 7821, 7822, 7823, 7833, 7855, 7860, 7865, 7877, 7883, 7887, 7888, 7891, 7902, 7905, 7908, 7909, 7915, 7927, 7936, 7939, 7953, 7955, 7957, 7967, 7995, 8003, 8004, 8022, 8024, 8030, 8031, 8035, 8041, 8044, 8045, 8047, 8051, 8060, 8087, 8105, 8109, 8111, 8112, 8122, 8131, 8132, 8145, 8158, 8163, 8165, 8174, 8178, 8187, 8189, 8197, 8200, 8203, 8215, 8216, 8221, 8225, 8229, 8244, 8247, 8249, 8250, 8252, 8272, 8276, 8281, 8283, 8288, 8315, 8336, 8338, 8341, 8345, 8348, 8353, 8358, 8365, 8371, 8391, 8399, 8402, 8405, 8406, 8410, 8431, 8440, 8444, 8449, 8457, 8468, 8473, 8502, 8507, 8513] -------------------------------------------------------------------------------- /codes/utils/nn_layers.py: -------------------------------------------------------------------------------- 1 | '''*********************************************** 2 | * 3 | * project: physioNet 4 | * created: 22.03.2017 5 | * purpose: nn building blocks 6 | * 7 | ***********************************************''' 8 | 9 | '''*********************************************** 10 | * Imports 11 | ***********************************************''' 12 | 13 | import utils.tf_helper as tfh 14 | import tensorflow as tf 15 | import scipy.signal as sig 16 | import tensorflow.contrib.rnn as rnn 17 | 18 | 19 | '''*********************************************** 20 | * Variables 21 | ***********************************************''' 22 | 23 | rnn_all_outputs = True 24 | 25 | '''*********************************************** 26 | * Functions 27 | ***********************************************''' 28 | 29 | def conv2d_block(inputs, length, kernel_size=[3,3], n_channels=None, growth=0, dilation_rates=[1,1,1], 30 | strides_end=[2,2], max_pooling=False, is_training=False, drop_rate=0): 31 | 32 | # inherit layer-width from input 33 | if n_channels is None: 34 | n_channels = tfh.get_static_shape(inputs)[3] 35 | 36 | conv = inputs 37 | max_pool_en = False 38 | strides = [1,1] 39 | depth = len(dilation_rates) 40 | 41 | for d in range(depth): 42 | 43 | if d == depth-1: 44 | n_channels = n_channels+growth 45 | if max_pooling: 46 | max_pool_en = True 47 | else: 48 | strides = strides_end 49 | 50 | conv = tf.layers.conv2d( 51 | inputs=conv, 52 | filters=n_channels, 53 | kernel_size=kernel_size, 54 | strides=strides, 55 | padding='same', 56 | dilation_rate=(1, dilation_rates[d])) 57 | 58 | conv = tf.layers.batch_normalization( 59 | inputs=conv, 60 | center=True, 61 | scale=True, 62 | training=is_training) 63 | 64 | conv = tf.nn.relu(conv) 65 | if max_pool_en: 66 | conv = tf.layers.max_pooling2d( 67 | inputs=conv, 68 | pool_size=strides_end, 69 | strides=strides_end, 70 | padding='same') 71 | output = tf.layers.dropout(inputs=conv, rate=drop_rate) 72 | 73 | length = tf.floordiv((length+1),2) 74 | 75 | return [output, length] 76 | 77 | 78 | def average_features(input, length): 79 | # as we use affine functions our zero padded datasets 80 | # are now padded with the bias of the previous layers 81 | # in order to get the mean of only meaningful data out 82 | # set the zero-padding part back to zero again 83 | data = tfh.set_dynamiczero(input, length) 84 | # as we have zero padded data, 85 | # reduce_mean would result into too small values for most sequences 86 | # therefore use reduce_sum and divide by actual length instead 87 | data = tf.reduce_sum(data, axis=1) 88 | divisor = tf.cast(length, tf.float32) 89 | divisor = tf.expand_dims(divisor, dim=1) 90 | output = tf.div(data, divisor) 91 | return output 92 | 93 | 94 | def mean_branch(input, length, out_s): 95 | output = average_features(input, length) 96 | output = tf.layers.dense(inputs=output, units=out_s) 97 | output = tf.nn.relu(output) 98 | return output 99 | 100 | 101 | def lstm_layer(data, length, n_neurons, n_layers, bidirectional=False, drop_rate=None): 102 | data = tfh.set_dynamiczero(data, length) 103 | seq_l = tfh.get_length(data) 104 | if bidirectional: 105 | # we concatenate forward and backward outputs to one output, 106 | # each must be only half the final size 107 | n_neurons = int(n_neurons/2) 108 | cell_fw = rnn.LSTMCell( n_neurons, state_is_tuple=True) 109 | cell_fw = rnn.DropoutWrapper(cell_fw, output_keep_prob=1-drop_rate) 110 | cell_fw = rnn.MultiRNNCell([cell_fw] * n_layers) 111 | cell_bw = rnn.LSTMCell( n_neurons, state_is_tuple=True) 112 | cell_bw = rnn.DropoutWrapper(cell_bw, output_keep_prob=1-drop_rate) 113 | cell_bw = rnn.MultiRNNCell([cell_bw] * n_layers) 114 | outputs,_ = tf.nn.bidirectional_dynamic_rnn( 115 | cell_fw, 116 | cell_bw, 117 | data, 118 | sequence_length=seq_l, 119 | dtype=tf.float32 120 | ) 121 | (output_fw, output_bw) = outputs 122 | if rnn_all_outputs: 123 | output_bw = average_features(output_bw, length) 124 | output_fw = average_features(output_fw, length) 125 | else: 126 | output_bw = output_bw[:,0,:] 127 | output_fw = tfh.get_dynamiclast(output_fw, seq_l) 128 | output = tf.concat([output_bw, output_fw], axis=1) 129 | else: 130 | cell = rnn.LSTMCell( n_neurons, state_is_tuple=True) 131 | cell = rnn.DropoutWrapper(cell, output_keep_prob=1-drop_rate) 132 | cell = rnn.MultiRNNCell([cell] * n_layers) 133 | output, _ = tf.nn.dynamic_rnn( 134 | cell=cell, 135 | dtype=tf.float32, 136 | sequence_length=seq_l, 137 | inputs=data) 138 | output = tfh.get_dynamiclast(output, seq_l) 139 | return output 140 | 141 | 142 | def awgn_channel(input, snr): 143 | # adds white gaussian noise to input, wherever input is not zero padded 144 | shape = tfh.get_static_shape(input) 145 | shape[0] = tfh.get_dynamic_shape(input)[0] 146 | 147 | dim = len(shape) 148 | l = tfh.get_length(input) 149 | # total energy of input signal 150 | e = tf.multiply(input, input) 151 | if dim==2: 152 | e = tf.reduce_sum(e, axis=1) 153 | else: 154 | e = tf.reduce_mean(e, axis=2) 155 | e = tf.reduce_sum(e, axis=1) 156 | # average power of input signal (neglecting zero padding in division) 157 | p = tf.div(e, tf.cast(l, tf.float32)) 158 | snr = tf.constant(snr, tf.float32) 159 | stddev = tf.sqrt(tf.div(p, snr)) 160 | # make 3d 161 | stddev = tf.expand_dims(stddev, 1) 162 | stddev = tf.expand_dims(stddev, 1) 163 | # generate noise of same shape 164 | noise = tf.random_normal( shape, mean=0.0, stddev=1, dtype=tf.float32) 165 | # each row of noise has its own stddev -> broadcast multiplication with stddev 166 | noise = tf.multiply(stddev, noise) 167 | # drop noise where zero padding in data 168 | noise = tfh.set_dynamiczero(noise, l) 169 | output = input + noise 170 | return output 171 | 172 | '''*********************************************** 173 | * Script 174 | ***********************************************''' 175 | 176 | if __name__ == '__main__': 177 | pass 178 | -------------------------------------------------------------------------------- /codes/train.py: -------------------------------------------------------------------------------- 1 | '''*********************************************** 2 | * 3 | * project: physioNet 4 | * file: train 5 | * created: 22.03.2017 6 | * purpose: main script to train networks 7 | * 8 | ***********************************************''' 9 | 10 | '''*********************************************** 11 | * Imports 12 | ***********************************************''' 13 | 14 | from definitions import * 15 | import sys 16 | import multiprocessing as mp 17 | import os 18 | import time 19 | import json 20 | 21 | from network.cnn import CNN 22 | from network.crnn import CRNN 23 | 24 | '''*********************************************** 25 | * run training on queue 26 | ***********************************************''' 27 | 28 | def train_queue(): 29 | 30 | # generate/load jobs to be done 31 | jobs = load_jobs() 32 | print('jobs in queue:') 33 | [print(' ' + job['name'] + '(' + str(job['cvid']) + ')' ) for job in jobs] 34 | 35 | # store jobes in queue 36 | job_queue = mp.Queue(len(jobs)) 37 | for job in jobs: 38 | job_queue.put(job) 39 | 40 | # Instantiate workers 41 | if default_dev == CPU: 42 | workers = [{'dev': '0', 'name': 'CPU0'}] 43 | else: 44 | workers = [ {'dev': dev, 'name': 'GPU' + dev} for dev in GPU_devices] 45 | print('workers used in this queue:') 46 | [print(' ' + worker['name']) for worker in workers] 47 | 48 | # generate and start a process for every available GPU 49 | processes = [mp.Process(name=worker['name']+'_process', target=run_worker, args=(worker, job_queue, )) 50 | for worker in workers ] 51 | for process in processes: 52 | process.start() 53 | 54 | try: 55 | for process in processes: 56 | process.join() 57 | except: 58 | for process in processes: 59 | print('Terminanting', process.name) 60 | process.terminate() 61 | 62 | print('Main process terminated') 63 | 64 | 65 | def load_jobs(): 66 | with open(job_file) as fh: 67 | jobs_str = fh.read() 68 | jobs_dict = json.loads(jobs_str) 69 | # jobs over cv_ids, so the different jobs have a first run relatively soon 70 | # instead of running the same job len(cv_ids) times before touching an other config 71 | jobs = [] 72 | for idx in range(5): 73 | for job_name, job_dict in jobs_dict.items(): 74 | if idx < len(job_dict['cvids']): 75 | new_job = { 76 | 'name': job_name, 77 | 'description': job_dict['description'], 78 | 'model': job_dict['model'], 79 | 'split': job_dict['split'], 80 | 'log_en': job_dict['log_en'], 81 | 'log_test_score': job_dict['log_test_score'], 82 | 'cvid': job_dict['cvids'][idx] 83 | } 84 | jobs.append(new_job) 85 | return jobs 86 | 87 | 88 | def run_worker(worker, queue): 89 | 90 | # setup environment 91 | outpath = os.path.normpath(os.path.join(root, tmp_dir, 'stdout'+worker['name']+'.out')) 92 | print('Worker ' + worker['name'] + ' starting. Output of this worker was redirected, to track it, open a new window and run ' + 93 | '\"tail -f ' + outpath + '\"') 94 | redirect_output(outpath) 95 | print('---------------------------------------------------------------------------') 96 | os.environ['CUDA_VISIBLE_DEVICES'] = worker['dev'] 97 | print('GPU device', worker['dev'], 'used in process', os.getpid(), '( Worker', worker['name'], ')') 98 | print('Worker ' + worker['name'] + ' starting') 99 | 100 | # load and execute jobs until queue empty 101 | while not queue.empty(): 102 | 103 | try: 104 | job = queue.get() 105 | job.update({'worker': worker['name']}) 106 | except Exception: 107 | print('Exception while loading new job.') 108 | break; 109 | 110 | print('---------------------------------------------------------------------------') 111 | print(' Worker ' + worker['name'] + ' starting job ' + job['name'] + '(' + str(job['cvid']) + ')') 112 | print('---------------------------------------------------------------------------') 113 | 114 | try: 115 | train(job) 116 | except Exception as e: 117 | print(e) 118 | 119 | print(' Worker ' + worker['name'] + ' finished job ' + job['name'] + '(' + str(job['cvid']) + ')') 120 | 121 | print('Worker ' + worker['name'] + ' done.') 122 | 123 | 124 | def redirect_output(filename): 125 | fd = os.open(filename, os.O_WRONLY | os.O_APPEND | os.O_CREAT) 126 | stdout = 1 # stdout 127 | errout = 2 # errout 128 | os.dup2(fd, stdout) 129 | os.dup2(fd, errout) 130 | 131 | '''*********************************************** 132 | * run single job 133 | ***********************************************''' 134 | 135 | def train_single(model_file): 136 | job = { 137 | 'name': model_file, 138 | 'description': 'Running single model to debug', 139 | 'model': model_file, 140 | 'split': 'split_5_6_14', 141 | 'log_en': False, 142 | 'log_test_score': False, 143 | 'cvid': 0, 144 | 'worker': 'debugger' 145 | } 146 | train(job) 147 | 148 | '''*********************************************** 149 | * core training functions 150 | ***********************************************''' 151 | 152 | def train(job): 153 | 154 | if 'CRNN' in job['model']: 155 | trainCRNN(job) 156 | elif 'CNN' in job['model']: 157 | trainCNN(job) 158 | elif 'HNNStage1' in job['model']: 159 | trainHNNStage1(job) 160 | elif 'HNNStage2R' in job['model']: 161 | trainHNNStage2R(job) 162 | elif 'HNNStage2' in job['model']: 163 | trainHNNStage2(job) 164 | else: 165 | print('[Error] No training function defined for this network') 166 | 167 | def trainCRNN(job): 168 | network = CRNN() 169 | network.load_job(job) 170 | network.build() 171 | network.train(epochs=500, phase=0) 172 | network.train(epochs=100, phase=1) 173 | network.train(epochs=200, phase=2) 174 | network.learning_rate = network.learning_rate/10 175 | network.train(epochs=200, phase=3) 176 | network.learning_rate = network.learning_rate/10 177 | network.train(epochs=200, phase=4) 178 | 179 | def trainCNN(job): 180 | network = CNN() 181 | network.load_job(job) 182 | network.build() 183 | network.train(epochs=500, phase=0) 184 | 185 | def trainHNNStage1(job): 186 | network = HNNStage1() 187 | network.load_job(job) 188 | network.build() 189 | network.train(epochs=300, phase=0) 190 | 191 | def trainHNNStage2(job): 192 | network = HNNStage2() 193 | network.load_job(job) 194 | network.build() 195 | network.train(epochs=500, phase=0) 196 | 197 | def trainHNNStage2R(job): 198 | network = HNNStage2R() 199 | network.load_job(job) 200 | network.build() 201 | network.train(epochs=500, phase=0) 202 | network.train(epochs=100, phase=1) 203 | network.train(epochs=200, phase=2) 204 | network.learning_rate = network.learning_rate/10 205 | network.train(epochs=200, phase=3) 206 | network.learning_rate = network.learning_rate/10 207 | network.train(epochs=200, phase=4) 208 | 209 | '''*********************************************** 210 | * Script 211 | ***********************************************''' 212 | 213 | if __name__ == '__main__': 214 | if len(sys.argv) < 2: 215 | train_queue() 216 | else: 217 | model_file = sys.argv[1] 218 | train_single(model_file) 219 | -------------------------------------------------------------------------------- /data/training2017/splits/split_5_6_14/test3.json: -------------------------------------------------------------------------------- 1 | [1, 3, 13, 17, 21, 24, 32, 35, 40, 43, 49, 59, 71, 79, 84, 87, 92, 111, 119, 124, 125, 127, 130, 134, 136, 137, 140, 142, 143, 155, 156, 162, 163, 176, 184, 185, 191, 198, 206, 210, 211, 218, 221, 227, 230, 235, 239, 240, 241, 246, 247, 251, 252, 256, 257, 263, 264, 268, 274, 278, 280, 283, 287, 288, 290, 295, 296, 299, 301, 304, 314, 315, 318, 323, 325, 326, 327, 330, 335, 343, 348, 351, 354, 358, 369, 382, 392, 394, 399, 401, 407, 408, 413, 414, 419, 423, 428, 445, 458, 463, 468, 478, 483, 489, 490, 499, 503, 505, 516, 517, 522, 525, 529, 533, 535, 538, 540, 541, 543, 545, 550, 556, 559, 568, 569, 570, 574, 577, 579, 584, 588, 589, 593, 597, 605, 613, 620, 622, 623, 626, 631, 637, 645, 649, 657, 666, 671, 675, 676, 688, 690, 708, 715, 720, 727, 730, 731, 733, 736, 738, 740, 750, 757, 758, 760, 763, 769, 777, 778, 780, 781, 784, 791, 794, 807, 808, 814, 815, 819, 822, 823, 825, 830, 834, 835, 836, 839, 845, 846, 847, 850, 851, 870, 877, 879, 887, 889, 891, 895, 899, 907, 912, 914, 915, 917, 918, 928, 929, 931, 934, 935, 937, 943, 956, 957, 958, 962, 978, 988, 1005, 1017, 1018, 1022, 1032, 1038, 1053, 1057, 1059, 1060, 1067, 1077, 1078, 1084, 1087, 1094, 1099, 1100, 1101, 1111, 1116, 1126, 1130, 1136, 1141, 1144, 1147, 1151, 1159, 1165, 1172, 1182, 1193, 1195, 1203, 1204, 1211, 1217, 1220, 1228, 1238, 1249, 1260, 1264, 1268, 1286, 1288, 1300, 1301, 1304, 1311, 1317, 1320, 1329, 1333, 1339, 1340, 1341, 1351, 1357, 1364, 1365, 1374, 1378, 1392, 1395, 1400, 1403, 1405, 1412, 1422, 1423, 1431, 1433, 1435, 1436, 1440, 1451, 1456, 1475, 1477, 1482, 1491, 1498, 1527, 1532, 1544, 1546, 1552, 1556, 1558, 1561, 1572, 1585, 1594, 1601, 1604, 1607, 1609, 1611, 1619, 1621, 1624, 1625, 1627, 1634, 1637, 1639, 1657, 1662, 1664, 1667, 1668, 1670, 1675, 1682, 1687, 1692, 1696, 1702, 1704, 1706, 1711, 1713, 1714, 1725, 1727, 1730, 1732, 1733, 1746, 1747, 1753, 1754, 1760, 1763, 1772, 1774, 1775, 1780, 1781, 1782, 1799, 1826, 1828, 1829, 1837, 1838, 1844, 1846, 1850, 1855, 1856, 1864, 1871, 1886, 1887, 1888, 1899, 1900, 1901, 1904, 1911, 1925, 1927, 1928, 1935, 1937, 1949, 1950, 1951, 1952, 1959, 1965, 1972, 1973, 1976, 1979, 1987, 1995, 2013, 2017, 2020, 2023, 2027, 2028, 2034, 2035, 2036, 2037, 2041, 2056, 2057, 2064, 2065, 2077, 2084, 2087, 2098, 2101, 2104, 2116, 2117, 2123, 2132, 2136, 2141, 2144, 2166, 2170, 2179, 2180, 2186, 2189, 2190, 2196, 2225, 2237, 2246, 2250, 2251, 2252, 2253, 2254, 2276, 2278, 2281, 2288, 2292, 2300, 2309, 2315, 2323, 2325, 2326, 2331, 2336, 2350, 2351, 2358, 2362, 2367, 2369, 2372, 2373, 2375, 2386, 2391, 2393, 2405, 2409, 2414, 2419, 2436, 2437, 2439, 2443, 2456, 2459, 2463, 2467, 2472, 2475, 2477, 2479, 2486, 2488, 2495, 2499, 2503, 2504, 2506, 2509, 2512, 2517, 2525, 2530, 2532, 2544, 2554, 2556, 2558, 2560, 2561, 2563, 2591, 2602, 2604, 2614, 2622, 2625, 2639, 2640, 2656, 2662, 2664, 2668, 2676, 2679, 2681, 2682, 2686, 2694, 2699, 2701, 2702, 2703, 2706, 2718, 2719, 2724, 2731, 2735, 2738, 2741, 2758, 2763, 2765, 2769, 2771, 2777, 2781, 2783, 2785, 2794, 2799, 2812, 2819, 2822, 2823, 2825, 2826, 2828, 2831, 2833, 2850, 2859, 2864, 2871, 2872, 2878, 2883, 2886, 2892, 2894, 2898, 2901, 2902, 2904, 2905, 2906, 2910, 2924, 2934, 2942, 2945, 2947, 2948, 2949, 2951, 2955, 2957, 2960, 2964, 2965, 2979, 2984, 2996, 2997, 3005, 3007, 3017, 3024, 3026, 3031, 3033, 3036, 3041, 3042, 3046, 3058, 3060, 3068, 3069, 3074, 3075, 3078, 3081, 3083, 3085, 3086, 3097, 3101, 3107, 3110, 3118, 3120, 3133, 3138, 3140, 3141, 3147, 3152, 3154, 3165, 3168, 3173, 3177, 3187, 3189, 3190, 3191, 3192, 3194, 3201, 3202, 3206, 3213, 3214, 3218, 3221, 3223, 3229, 3230, 3234, 3240, 3244, 3252, 3265, 3275, 3286, 3287, 3296, 3297, 3301, 3311, 3316, 3324, 3326, 3330, 3331, 3336, 3339, 3340, 3343, 3347, 3351, 3365, 3370, 3373, 3374, 3375, 3381, 3382, 3385, 3386, 3390, 3398, 3407, 3409, 3417, 3426, 3435, 3436, 3441, 3443, 3461, 3470, 3473, 3474, 3484, 3488, 3497, 3502, 3512, 3517, 3521, 3524, 3527, 3530, 3544, 3547, 3548, 3550, 3554, 3562, 3570, 3572, 3577, 3578, 3583, 3588, 3592, 3594, 3596, 3599, 3601, 3602, 3605, 3607, 3614, 3615, 3618, 3619, 3621, 3628, 3629, 3633, 3642, 3649, 3657, 3662, 3681, 3692, 3702, 3712, 3717, 3733, 3739, 3742, 3749, 3751, 3755, 3756, 3764, 3770, 3789, 3791, 3797, 3798, 3804, 3806, 3821, 3834, 3841, 3842, 3848, 3854, 3856, 3858, 3868, 3873, 3880, 3881, 3884, 3889, 3904, 3907, 3924, 3926, 3933, 3938, 3939, 3942, 3943, 3944, 3953, 3955, 3973, 3985, 3988, 3994, 3997, 4000, 4002, 4005, 4006, 4022, 4024, 4025, 4031, 4032, 4035, 4039, 4047, 4053, 4057, 4066, 4070, 4074, 4075, 4076, 4082, 4086, 4093, 4102, 4103, 4104, 4108, 4109, 4116, 4117, 4118, 4119, 4120, 4133, 4134, 4136, 4138, 4140, 4142, 4158, 4174, 4181, 4185, 4187, 4190, 4192, 4193, 4198, 4206, 4218, 4221, 4227, 4228, 4230, 4234, 4236, 4238, 4241, 4245, 4251, 4254, 4256, 4258, 4261, 4262, 4265, 4267, 4280, 4281, 4285, 4292, 4310, 4311, 4314, 4321, 4328, 4329, 4332, 4334, 4339, 4340, 4343, 4366, 4367, 4368, 4372, 4378, 4379, 4381, 4390, 4395, 4397, 4399, 4404, 4410, 4413, 4417, 4421, 4426, 4436, 4451, 4454, 4457, 4464, 4469, 4470, 4473, 4474, 4477, 4478, 4480, 4485, 4494, 4498, 4519, 4523, 4524, 4530, 4532, 4538, 4552, 4553, 4556, 4563, 4565, 4567, 4571, 4574, 4579, 4580, 4581, 4583, 4584, 4586, 4593, 4601, 4602, 4613, 4614, 4618, 4622, 4626, 4633, 4636, 4637, 4638, 4643, 4652, 4654, 4656, 4662, 4671, 4678, 4679, 4710, 4714, 4715, 4718, 4730, 4733, 4739, 4745, 4746, 4750, 4760, 4771, 4780, 4781, 4782, 4786, 4788, 4790, 4794, 4802, 4803, 4806, 4819, 4823, 4825, 4832, 4833, 4834, 4835, 4838, 4839, 4841, 4844, 4857, 4859, 4864, 4867, 4868, 4872, 4875, 4877, 4880, 4905, 4907, 4915, 4916, 4919, 4928, 4931, 4935, 4949, 4955, 4961, 4964, 4967, 4968, 4975, 4979, 4984, 4986, 4994, 4999, 5003, 5011, 5015, 5016, 5021, 5025, 5026, 5029, 5035, 5051, 5055, 5057, 5059, 5062, 5063, 5066, 5070, 5077, 5083, 5089, 5091, 5103, 5111, 5116, 5117, 5120, 5122, 5125, 5128, 5133, 5136, 5137, 5141, 5144, 5145, 5149, 5151, 5158, 5173, 5176, 5178, 5182, 5194, 5195, 5199, 5204, 5205, 5211, 5229, 5233, 5236, 5238, 5242, 5257, 5258, 5264, 5266, 5269, 5279, 5286, 5287, 5288, 5290, 5292, 5293, 5295, 5297, 5300, 5306, 5312, 5328, 5331, 5332, 5333, 5339, 5342, 5347, 5349, 5352, 5353, 5358, 5363, 5365, 5366, 5372, 5374, 5376, 5383, 5396, 5401, 5402, 5423, 5425, 5437, 5454, 5459, 5465, 5466, 5468, 5472, 5480, 5484, 5485, 5491, 5504, 5509, 5513, 5514, 5515, 5517, 5518, 5522, 5530, 5535, 5538, 5552, 5557, 5579, 5580, 5582, 5585, 5589, 5590, 5592, 5594, 5604, 5606, 5611, 5615, 5617, 5619, 5626, 5627, 5635, 5643, 5656, 5657, 5665, 5669, 5677, 5681, 5686, 5700, 5711, 5716, 5719, 5720, 5729, 5737, 5739, 5745, 5752, 5754, 5755, 5760, 5761, 5762, 5765, 5776, 5783, 5790, 5793, 5796, 5806, 5808, 5816, 5824, 5835, 5836, 5841, 5852, 5862, 5864, 5870, 5877, 5881, 5884, 5896, 5897, 5901, 5904, 5905, 5910, 5914, 5918, 5919, 5922, 5944, 5948, 5950, 5962, 5984, 5985, 5986, 5993, 5998, 6002, 6005, 6010, 6013, 6014, 6016, 6022, 6023, 6025, 6026, 6030, 6031, 6034, 6038, 6040, 6047, 6048, 6050, 6057, 6058, 6061, 6062, 6069, 6070, 6077, 6082, 6083, 6088, 6091, 6104, 6105, 6108, 6113, 6116, 6117, 6119, 6139, 6143, 6145, 6147, 6149, 6150, 6152, 6155, 6168, 6175, 6177, 6181, 6183, 6197, 6198, 6200, 6203, 6204, 6205, 6210, 6212, 6215, 6216, 6217, 6220, 6233, 6234, 6243, 6244, 6246, 6254, 6262, 6266, 6267, 6277, 6295, 6297, 6298, 6302, 6303, 6304, 6311, 6321, 6331, 6332, 6333, 6334, 6335, 6345, 6346, 6352, 6354, 6360, 6365, 6367, 6371, 6378, 6380, 6388, 6391, 6393, 6401, 6402, 6404, 6410, 6414, 6423, 6424, 6430, 6438, 6439, 6441, 6443, 6450, 6458, 6459, 6476, 6477, 6478, 6479, 6486, 6487, 6488, 6500, 6503, 6504, 6506, 6511, 6516, 6521, 6523, 6528, 6531, 6534, 6538, 6539, 6545, 6554, 6555, 6556, 6557, 6561, 6569, 6571, 6574, 6576, 6589, 6606, 6610, 6619, 6623, 6624, 6632, 6635, 6638, 6648, 6653, 6661, 6665, 6669, 6674, 6679, 6682, 6683, 6692, 6700, 6703, 6704, 6726, 6729, 6735, 6739, 6740, 6743, 6752, 6754, 6759, 6765, 6771, 6777, 6780, 6782, 6787, 6791, 6793, 6803, 6805, 6809, 6815, 6819, 6820, 6835, 6847, 6852, 6855, 6859, 6860, 6861, 6866, 6872, 6874, 6880, 6887, 6890, 6893, 6898, 6899, 6904, 6906, 6907, 6908, 6909, 6910, 6911, 6913, 6916, 6927, 6931, 6932, 6934, 6939, 6951, 6952, 6963, 6975, 6996, 6997, 6999, 7003, 7005, 7014, 7021, 7029, 7030, 7031, 7035, 7050, 7051, 7056, 7057, 7060, 7066, 7083, 7084, 7085, 7103, 7109, 7113, 7115, 7120, 7127, 7134, 7137, 7147, 7151, 7154, 7160, 7173, 7177, 7181, 7189, 7191, 7196, 7202, 7209, 7210, 7219, 7220, 7221, 7223, 7228, 7230, 7235, 7244, 7250, 7257, 7262, 7263, 7271, 7274, 7278, 7279, 7287, 7288, 7291, 7295, 7297, 7318, 7331, 7350, 7357, 7361, 7366, 7367, 7370, 7371, 7372, 7380, 7387, 7388, 7390, 7393, 7399, 7400, 7402, 7403, 7405, 7413, 7425, 7427, 7431, 7433, 7434, 7445, 7456, 7461, 7464, 7468, 7470, 7473, 7480, 7489, 7491, 7492, 7493, 7496, 7510, 7517, 7522, 7523, 7524, 7526, 7531, 7552, 7558, 7559, 7564, 7571, 7575, 7581, 7589, 7595, 7599, 7603, 7621, 7636, 7645, 7651, 7659, 7663, 7667, 7673, 7676, 7677, 7684, 7693, 7698, 7713, 7714, 7725, 7726, 7728, 7729, 7732, 7738, 7740, 7742, 7746, 7752, 7757, 7758, 7771, 7774, 7775, 7777, 7778, 7779, 7784, 7785, 7798, 7809, 7819, 7824, 7834, 7837, 7840, 7841, 7843, 7844, 7846, 7856, 7867, 7873, 7899, 7901, 7920, 7923, 7926, 7928, 7932, 7934, 7942, 7943, 7952, 7954, 7956, 7962, 7963, 7972, 7976, 7980, 7981, 7987, 7989, 7994, 7998, 8009, 8015, 8029, 8040, 8059, 8063, 8077, 8078, 8081, 8084, 8085, 8088, 8089, 8095, 8107, 8110, 8137, 8147, 8148, 8151, 8154, 8155, 8176, 8179, 8180, 8188, 8190, 8199, 8206, 8213, 8223, 8227, 8234, 8254, 8255, 8256, 8258, 8260, 8262, 8271, 8277, 8278, 8279, 8280, 8285, 8286, 8287, 8294, 8299, 8305, 8306, 8307, 8309, 8313, 8322, 8327, 8328, 8343, 8344, 8355, 8356, 8357, 8361, 8366, 8368, 8372, 8377, 8379, 8385, 8388, 8392, 8403, 8408, 8420, 8423, 8426, 8427, 8448, 8450, 8454, 8462, 8465, 8469, 8470, 8477, 8481, 8484, 8487, 8492, 8493, 8511, 8512, 8514, 8516, 8518, 8523, 8524, 8525, 8527, 8528] -------------------------------------------------------------------------------- /data/training2017/splits/split_5_6_14/test1.json: -------------------------------------------------------------------------------- 1 | [6, 8, 14, 18, 31, 42, 45, 53, 58, 62, 63, 66, 67, 69, 81, 91, 94, 95, 109, 112, 117, 132, 135, 138, 146, 157, 164, 165, 166, 169, 174, 177, 182, 192, 193, 195, 207, 209, 217, 226, 233, 249, 254, 260, 262, 276, 279, 281, 282, 289, 293, 294, 298, 303, 305, 307, 311, 312, 313, 316, 331, 332, 334, 338, 340, 341, 357, 365, 376, 378, 381, 383, 385, 387, 389, 395, 397, 400, 409, 424, 427, 432, 436, 437, 438, 439, 442, 443, 447, 449, 450, 451, 452, 454, 459, 461, 464, 465, 467, 469, 471, 472, 473, 481, 487, 488, 491, 494, 498, 508, 513, 519, 524, 531, 534, 537, 549, 551, 558, 560, 566, 572, 576, 578, 581, 585, 587, 601, 607, 609, 610, 618, 619, 628, 633, 640, 646, 655, 658, 659, 661, 662, 665, 670, 674, 677, 678, 680, 681, 686, 687, 689, 691, 692, 693, 694, 695, 698, 699, 706, 711, 713, 714, 718, 723, 741, 742, 749, 761, 762, 766, 767, 790, 803, 806, 810, 812, 813, 817, 820, 828, 829, 838, 842, 849, 852, 853, 858, 860, 871, 884, 894, 926, 933, 945, 950, 951, 952, 959, 963, 964, 967, 968, 970, 979, 983, 989, 991, 998, 1000, 1007, 1008, 1009, 1021, 1024, 1026, 1028, 1039, 1043, 1047, 1051, 1061, 1063, 1066, 1068, 1076, 1082, 1089, 1090, 1093, 1098, 1105, 1106, 1109, 1119, 1127, 1128, 1131, 1132, 1146, 1148, 1162, 1166, 1168, 1169, 1173, 1174, 1175, 1181, 1183, 1199, 1206, 1207, 1210, 1213, 1216, 1218, 1237, 1240, 1248, 1250, 1251, 1257, 1262, 1266, 1272, 1273, 1275, 1277, 1278, 1281, 1283, 1287, 1319, 1322, 1326, 1328, 1331, 1337, 1343, 1345, 1354, 1356, 1360, 1369, 1371, 1376, 1383, 1384, 1391, 1398, 1404, 1410, 1413, 1424, 1426, 1441, 1442, 1454, 1457, 1464, 1481, 1483, 1487, 1490, 1492, 1493, 1494, 1495, 1501, 1517, 1520, 1522, 1526, 1536, 1537, 1542, 1548, 1553, 1554, 1557, 1563, 1567, 1569, 1570, 1574, 1576, 1578, 1580, 1581, 1591, 1592, 1603, 1608, 1612, 1614, 1617, 1618, 1620, 1622, 1623, 1626, 1628, 1632, 1642, 1643, 1656, 1659, 1661, 1674, 1676, 1677, 1678, 1681, 1685, 1688, 1697, 1701, 1703, 1705, 1707, 1710, 1715, 1720, 1722, 1724, 1726, 1737, 1742, 1748, 1752, 1755, 1766, 1767, 1769, 1771, 1779, 1800, 1803, 1807, 1808, 1809, 1820, 1822, 1827, 1831, 1853, 1859, 1862, 1865, 1868, 1869, 1882, 1891, 1893, 1894, 1895, 1898, 1905, 1906, 1914, 1921, 1931, 1942, 1943, 1944, 1945, 1954, 1955, 1956, 1962, 1967, 1968, 1969, 1970, 1978, 1980, 1982, 1983, 1984, 1989, 1992, 1997, 2001, 2002, 2003, 2008, 2009, 2011, 2015, 2021, 2026, 2038, 2045, 2050, 2051, 2052, 2053, 2067, 2071, 2074, 2075, 2076, 2082, 2088, 2089, 2100, 2108, 2121, 2122, 2124, 2128, 2131, 2133, 2135, 2140, 2142, 2146, 2147, 2148, 2154, 2159, 2162, 2163, 2167, 2176, 2177, 2188, 2195, 2199, 2201, 2204, 2206, 2208, 2214, 2216, 2220, 2226, 2234, 2235, 2241, 2242, 2243, 2264, 2267, 2272, 2297, 2301, 2303, 2304, 2308, 2311, 2322, 2338, 2342, 2363, 2365, 2374, 2379, 2392, 2394, 2399, 2404, 2406, 2408, 2411, 2416, 2418, 2432, 2435, 2441, 2448, 2449, 2464, 2470, 2471, 2474, 2476, 2484, 2487, 2490, 2491, 2493, 2494, 2496, 2497, 2513, 2514, 2515, 2523, 2531, 2533, 2539, 2541, 2546, 2557, 2564, 2567, 2574, 2577, 2578, 2580, 2586, 2594, 2597, 2601, 2607, 2609, 2610, 2617, 2619, 2621, 2631, 2636, 2638, 2641, 2646, 2649, 2651, 2669, 2671, 2674, 2677, 2683, 2691, 2692, 2717, 2725, 2744, 2749, 2750, 2752, 2754, 2761, 2776, 2778, 2782, 2787, 2788, 2789, 2795, 2797, 2798, 2807, 2815, 2832, 2834, 2837, 2838, 2843, 2846, 2856, 2870, 2874, 2881, 2887, 2888, 2891, 2899, 2900, 2909, 2914, 2915, 2941, 2946, 2956, 2967, 2977, 2983, 2998, 3019, 3030, 3034, 3039, 3040, 3049, 3052, 3054, 3061, 3064, 3067, 3088, 3096, 3102, 3104, 3105, 3113, 3119, 3122, 3125, 3126, 3143, 3148, 3151, 3164, 3166, 3169, 3170, 3172, 3181, 3183, 3193, 3196, 3199, 3203, 3207, 3222, 3224, 3232, 3236, 3239, 3243, 3245, 3249, 3260, 3261, 3262, 3266, 3271, 3274, 3279, 3281, 3284, 3290, 3293, 3306, 3307, 3309, 3322, 3323, 3328, 3333, 3337, 3338, 3342, 3355, 3358, 3359, 3362, 3379, 3392, 3393, 3394, 3397, 3400, 3402, 3408, 3410, 3411, 3421, 3438, 3440, 3442, 3447, 3452, 3453, 3459, 3465, 3468, 3477, 3479, 3480, 3481, 3482, 3486, 3490, 3491, 3494, 3495, 3501, 3505, 3508, 3520, 3522, 3525, 3531, 3533, 3534, 3538, 3539, 3542, 3551, 3555, 3560, 3565, 3576, 3590, 3600, 3609, 3616, 3624, 3626, 3631, 3639, 3641, 3648, 3655, 3663, 3666, 3671, 3672, 3673, 3678, 3680, 3683, 3689, 3691, 3694, 3696, 3700, 3707, 3714, 3719, 3722, 3723, 3724, 3727, 3737, 3748, 3754, 3757, 3758, 3761, 3762, 3763, 3772, 3774, 3781, 3784, 3790, 3805, 3807, 3814, 3817, 3820, 3823, 3826, 3830, 3831, 3835, 3844, 3846, 3849, 3850, 3861, 3864, 3874, 3885, 3887, 3890, 3895, 3898, 3905, 3906, 3908, 3909, 3916, 3918, 3920, 3927, 3928, 3935, 3945, 3946, 3947, 3956, 3960, 3961, 3966, 3968, 3975, 3982, 3986, 3987, 3989, 3991, 3992, 3993, 3998, 4001, 4019, 4033, 4037, 4040, 4041, 4042, 4046, 4055, 4064, 4068, 4078, 4079, 4090, 4095, 4096, 4098, 4099, 4101, 4112, 4122, 4132, 4139, 4143, 4146, 4150, 4152, 4157, 4163, 4164, 4165, 4175, 4176, 4178, 4186, 4203, 4209, 4210, 4211, 4222, 4229, 4233, 4235, 4237, 4239, 4243, 4257, 4271, 4274, 4278, 4279, 4288, 4290, 4294, 4297, 4299, 4300, 4303, 4304, 4306, 4324, 4336, 4337, 4338, 4341, 4346, 4347, 4350, 4353, 4356, 4360, 4361, 4362, 4365, 4371, 4382, 4386, 4394, 4400, 4406, 4409, 4419, 4425, 4431, 4435, 4437, 4443, 4444, 4449, 4453, 4462, 4475, 4479, 4481, 4486, 4495, 4496, 4503, 4505, 4507, 4511, 4512, 4514, 4516, 4518, 4520, 4531, 4534, 4536, 4539, 4540, 4543, 4548, 4549, 4559, 4572, 4573, 4576, 4585, 4589, 4592, 4595, 4599, 4610, 4611, 4612, 4617, 4621, 4623, 4625, 4628, 4642, 4648, 4649, 4650, 4660, 4663, 4666, 4668, 4670, 4674, 4682, 4683, 4691, 4706, 4717, 4732, 4741, 4744, 4747, 4754, 4756, 4757, 4759, 4762, 4770, 4773, 4775, 4776, 4779, 4793, 4797, 4799, 4800, 4801, 4805, 4809, 4813, 4814, 4816, 4820, 4822, 4826, 4840, 4842, 4845, 4853, 4860, 4861, 4863, 4874, 4881, 4883, 4884, 4885, 4887, 4889, 4894, 4899, 4901, 4902, 4903, 4906, 4909, 4910, 4913, 4914, 4920, 4925, 4927, 4929, 4936, 4939, 4942, 4944, 4947, 4950, 4951, 4952, 4960, 4962, 4966, 4970, 4973, 4977, 4980, 4985, 4992, 5008, 5013, 5019, 5020, 5023, 5028, 5032, 5041, 5043, 5058, 5069, 5072, 5081, 5093, 5096, 5098, 5100, 5101, 5109, 5119, 5121, 5131, 5135, 5156, 5159, 5166, 5190, 5198, 5209, 5224, 5226, 5230, 5232, 5241, 5247, 5248, 5271, 5272, 5273, 5275, 5277, 5298, 5307, 5310, 5313, 5317, 5320, 5323, 5334, 5341, 5344, 5356, 5359, 5361, 5362, 5367, 5386, 5390, 5392, 5395, 5409, 5411, 5415, 5419, 5420, 5426, 5435, 5449, 5453, 5461, 5470, 5471, 5474, 5481, 5486, 5492, 5493, 5495, 5496, 5498, 5506, 5527, 5531, 5532, 5534, 5540, 5546, 5547, 5550, 5564, 5581, 5591, 5593, 5597, 5602, 5607, 5618, 5623, 5631, 5633, 5638, 5639, 5647, 5652, 5654, 5673, 5679, 5682, 5690, 5702, 5706, 5707, 5710, 5712, 5715, 5723, 5730, 5734, 5746, 5747, 5749, 5756, 5763, 5766, 5777, 5779, 5782, 5785, 5788, 5794, 5807, 5814, 5817, 5821, 5823, 5829, 5832, 5834, 5837, 5842, 5846, 5849, 5850, 5853, 5856, 5859, 5865, 5866, 5869, 5872, 5882, 5885, 5887, 5893, 5902, 5903, 5907, 5913, 5921, 5927, 5928, 5931, 5935, 5941, 5946, 5947, 5953, 5955, 5959, 5964, 5966, 5969, 5975, 5976, 5981, 5982, 5992, 5994, 5996, 5997, 6001, 6003, 6011, 6018, 6028, 6029, 6071, 6074, 6093, 6102, 6107, 6110, 6129, 6144, 6169, 6174, 6182, 6185, 6194, 6195, 6201, 6222, 6224, 6227, 6232, 6248, 6250, 6268, 6272, 6280, 6281, 6283, 6285, 6288, 6291, 6293, 6299, 6312, 6313, 6317, 6318, 6324, 6326, 6328, 6330, 6336, 6356, 6361, 6369, 6374, 6376, 6385, 6395, 6396, 6397, 6405, 6407, 6408, 6421, 6422, 6431, 6434, 6437, 6442, 6453, 6457, 6469, 6470, 6493, 6497, 6498, 6507, 6508, 6510, 6515, 6519, 6522, 6524, 6526, 6527, 6530, 6532, 6550, 6553, 6558, 6562, 6563, 6566, 6570, 6582, 6590, 6596, 6603, 6607, 6608, 6611, 6612, 6620, 6627, 6629, 6630, 6633, 6642, 6646, 6650, 6652, 6654, 6663, 6667, 6675, 6681, 6685, 6686, 6690, 6691, 6693, 6696, 6705, 6713, 6714, 6717, 6718, 6721, 6725, 6730, 6744, 6745, 6749, 6756, 6758, 6761, 6763, 6768, 6770, 6772, 6775, 6778, 6779, 6784, 6785, 6788, 6794, 6798, 6804, 6806, 6818, 6821, 6826, 6827, 6830, 6831, 6832, 6833, 6837, 6839, 6840, 6842, 6850, 6854, 6868, 6871, 6881, 6882, 6883, 6886, 6892, 6895, 6917, 6924, 6935, 6936, 6944, 6949, 6964, 6966, 6970, 6978, 6979, 6980, 6987, 6988, 6998, 7000, 7001, 7006, 7007, 7018, 7026, 7027, 7032, 7036, 7037, 7038, 7044, 7048, 7052, 7071, 7079, 7081, 7088, 7092, 7093, 7097, 7098, 7104, 7108, 7110, 7111, 7117, 7118, 7121, 7122, 7140, 7144, 7167, 7175, 7184, 7199, 7200, 7225, 7226, 7227, 7231, 7232, 7234, 7239, 7243, 7252, 7254, 7264, 7269, 7273, 7275, 7277, 7286, 7289, 7290, 7300, 7303, 7306, 7317, 7319, 7321, 7323, 7327, 7335, 7336, 7343, 7345, 7348, 7349, 7353, 7354, 7359, 7364, 7384, 7389, 7398, 7408, 7416, 7419, 7430, 7439, 7440, 7446, 7448, 7454, 7458, 7462, 7465, 7466, 7471, 7476, 7477, 7478, 7485, 7497, 7498, 7501, 7502, 7503, 7505, 7506, 7513, 7516, 7534, 7535, 7536, 7541, 7550, 7554, 7557, 7573, 7576, 7579, 7583, 7584, 7586, 7591, 7596, 7598, 7605, 7608, 7611, 7619, 7630, 7634, 7635, 7642, 7646, 7649, 7662, 7670, 7672, 7675, 7680, 7681, 7682, 7701, 7702, 7704, 7706, 7710, 7715, 7724, 7733, 7735, 7737, 7747, 7761, 7764, 7766, 7782, 7783, 7789, 7796, 7806, 7807, 7818, 7825, 7827, 7828, 7830, 7831, 7848, 7853, 7861, 7865, 7866, 7872, 7874, 7879, 7885, 7890, 7894, 7906, 7913, 7915, 7918, 7919, 7921, 7927, 7931, 7933, 7939, 7941, 7944, 7948, 7957, 7960, 7961, 7964, 7966, 7969, 7970, 7974, 7975, 7982, 7991, 7999, 8005, 8016, 8027, 8031, 8041, 8043, 8044, 8045, 8048, 8053, 8058, 8060, 8062, 8068, 8076, 8093, 8106, 8115, 8118, 8121, 8123, 8127, 8134, 8135, 8142, 8145, 8146, 8159, 8165, 8169, 8184, 8191, 8195, 8196, 8202, 8205, 8208, 8211, 8214, 8218, 8222, 8225, 8230, 8241, 8245, 8246, 8261, 8267, 8270, 8274, 8275, 8289, 8291, 8292, 8293, 8296, 8297, 8304, 8318, 8331, 8334, 8340, 8345, 8346, 8347, 8348, 8353, 8362, 8365, 8373, 8374, 8383, 8384, 8393, 8394, 8400, 8404, 8406, 8410, 8414, 8421, 8422, 8424, 8425, 8428, 8432, 8435, 8442, 8443, 8451, 8464, 8466, 8468, 8471, 8475, 8480, 8482, 8483, 8491, 8494, 8495, 8496, 8497, 8504, 8507, 8508, 8510, 8515, 8526] -------------------------------------------------------------------------------- /data/training2017/splits/split_5_6_14/test0.json: -------------------------------------------------------------------------------- 1 | [5, 16, 20, 25, 26, 38, 41, 44, 48, 52, 54, 68, 70, 72, 73, 75, 76, 80, 83, 85, 86, 89, 90, 97, 99, 100, 101, 104, 106, 108, 110, 114, 115, 123, 128, 129, 131, 139, 148, 149, 150, 151, 152, 159, 160, 170, 171, 181, 183, 186, 187, 188, 189, 190, 194, 201, 202, 204, 215, 216, 220, 223, 232, 237, 242, 243, 250, 258, 266, 277, 300, 308, 309, 324, 328, 329, 333, 339, 353, 356, 361, 362, 370, 372, 379, 402, 406, 410, 416, 417, 418, 434, 444, 455, 466, 476, 480, 482, 484, 486, 497, 514, 521, 526, 542, 544, 555, 557, 563, 564, 567, 571, 590, 591, 592, 598, 599, 600, 603, 604, 611, 612, 621, 635, 636, 639, 641, 644, 647, 648, 653, 660, 673, 682, 684, 685, 701, 705, 707, 710, 712, 716, 719, 721, 722, 724, 726, 734, 747, 753, 765, 770, 771, 775, 779, 785, 789, 792, 793, 800, 802, 818, 821, 824, 827, 832, 833, 840, 844, 854, 855, 862, 866, 867, 868, 873, 874, 881, 882, 890, 897, 898, 900, 902, 908, 920, 922, 924, 932, 938, 940, 941, 944, 946, 965, 974, 980, 982, 986, 992, 997, 999, 1002, 1006, 1020, 1025, 1027, 1029, 1040, 1042, 1045, 1049, 1050, 1054, 1065, 1072, 1073, 1074, 1075, 1079, 1081, 1085, 1095, 1104, 1113, 1114, 1115, 1117, 1120, 1121, 1124, 1125, 1129, 1134, 1135, 1138, 1140, 1150, 1154, 1163, 1171, 1176, 1179, 1180, 1190, 1192, 1194, 1197, 1198, 1212, 1219, 1222, 1224, 1226, 1227, 1231, 1232, 1234, 1239, 1241, 1244, 1255, 1261, 1265, 1276, 1282, 1285, 1291, 1299, 1302, 1307, 1312, 1313, 1325, 1327, 1330, 1332, 1334, 1335, 1336, 1344, 1347, 1348, 1350, 1352, 1353, 1355, 1373, 1381, 1382, 1385, 1389, 1394, 1399, 1401, 1406, 1411, 1418, 1420, 1430, 1437, 1438, 1439, 1443, 1444, 1445, 1447, 1450, 1467, 1468, 1469, 1473, 1474, 1479, 1480, 1484, 1486, 1499, 1503, 1504, 1505, 1507, 1508, 1509, 1512, 1518, 1521, 1523, 1525, 1531, 1533, 1539, 1541, 1543, 1545, 1547, 1555, 1568, 1571, 1583, 1588, 1589, 1593, 1596, 1598, 1599, 1605, 1610, 1615, 1616, 1630, 1631, 1633, 1636, 1640, 1641, 1645, 1646, 1647, 1648, 1653, 1663, 1691, 1694, 1712, 1721, 1728, 1729, 1731, 1739, 1740, 1743, 1744, 1745, 1757, 1758, 1759, 1764, 1765, 1773, 1784, 1785, 1786, 1789, 1792, 1794, 1797, 1798, 1802, 1804, 1814, 1815, 1819, 1823, 1830, 1834, 1835, 1847, 1849, 1861, 1872, 1873, 1875, 1880, 1889, 1892, 1907, 1908, 1910, 1917, 1918, 1919, 1922, 1926, 1930, 1933, 1957, 1958, 1960, 1981, 1985, 1988, 1996, 2006, 2007, 2010, 2014, 2016, 2029, 2031, 2042, 2043, 2046, 2047, 2048, 2049, 2054, 2055, 2058, 2063, 2066, 2068, 2072, 2080, 2085, 2093, 2094, 2097, 2105, 2107, 2125, 2137, 2138, 2143, 2149, 2153, 2168, 2178, 2182, 2184, 2193, 2197, 2203, 2209, 2211, 2212, 2215, 2227, 2231, 2233, 2239, 2244, 2245, 2249, 2258, 2259, 2262, 2263, 2268, 2269, 2270, 2271, 2277, 2279, 2282, 2283, 2289, 2293, 2302, 2306, 2307, 2313, 2316, 2327, 2329, 2330, 2332, 2337, 2341, 2345, 2347, 2348, 2352, 2355, 2357, 2359, 2360, 2370, 2371, 2378, 2382, 2385, 2388, 2390, 2397, 2400, 2407, 2410, 2417, 2423, 2426, 2427, 2429, 2440, 2444, 2447, 2450, 2454, 2457, 2478, 2480, 2485, 2489, 2492, 2498, 2502, 2505, 2516, 2518, 2520, 2521, 2522, 2524, 2527, 2529, 2536, 2537, 2542, 2548, 2549, 2562, 2565, 2569, 2576, 2579, 2582, 2584, 2592, 2593, 2596, 2606, 2612, 2618, 2623, 2628, 2629, 2630, 2635, 2644, 2648, 2657, 2665, 2666, 2672, 2673, 2685, 2690, 2697, 2698, 2704, 2709, 2710, 2711, 2712, 2714, 2715, 2721, 2727, 2728, 2729, 2730, 2734, 2736, 2737, 2739, 2743, 2748, 2756, 2759, 2760, 2764, 2767, 2772, 2773, 2780, 2784, 2793, 2800, 2802, 2803, 2804, 2824, 2829, 2830, 2840, 2841, 2845, 2848, 2851, 2852, 2853, 2858, 2867, 2868, 2877, 2882, 2889, 2893, 2897, 2911, 2912, 2913, 2918, 2919, 2923, 2926, 2930, 2932, 2935, 2937, 2938, 2952, 2963, 2970, 2974, 2981, 2986, 2990, 2994, 3002, 3012, 3013, 3014, 3016, 3020, 3025, 3029, 3032, 3035, 3043, 3044, 3045, 3047, 3048, 3051, 3053, 3063, 3080, 3082, 3084, 3091, 3093, 3100, 3112, 3115, 3116, 3117, 3129, 3132, 3137, 3160, 3167, 3175, 3176, 3178, 3184, 3188, 3198, 3204, 3205, 3219, 3233, 3241, 3247, 3248, 3250, 3254, 3258, 3263, 3264, 3270, 3272, 3276, 3277, 3280, 3289, 3295, 3298, 3299, 3300, 3303, 3314, 3320, 3327, 3332, 3334, 3335, 3353, 3354, 3356, 3361, 3363, 3364, 3367, 3368, 3378, 3384, 3388, 3399, 3401, 3404, 3414, 3422, 3423, 3424, 3429, 3445, 3448, 3451, 3455, 3462, 3471, 3472, 3475, 3476, 3496, 3498, 3503, 3507, 3516, 3518, 3519, 3526, 3528, 3535, 3541, 3543, 3549, 3552, 3557, 3563, 3564, 3569, 3574, 3584, 3586, 3589, 3603, 3604, 3608, 3611, 3617, 3630, 3638, 3644, 3645, 3647, 3650, 3660, 3675, 3679, 3686, 3697, 3701, 3703, 3705, 3706, 3715, 3718, 3720, 3728, 3734, 3740, 3747, 3750, 3752, 3767, 3769, 3776, 3777, 3786, 3796, 3800, 3802, 3808, 3810, 3815, 3824, 3828, 3840, 3853, 3859, 3867, 3875, 3883, 3888, 3891, 3901, 3911, 3912, 3922, 3925, 3937, 3940, 3949, 3954, 3964, 3970, 3974, 3984, 3990, 4004, 4008, 4010, 4015, 4016, 4020, 4021, 4027, 4034, 4044, 4048, 4051, 4052, 4056, 4058, 4061, 4065, 4067, 4073, 4088, 4092, 4094, 4115, 4121, 4123, 4128, 4131, 4135, 4144, 4145, 4149, 4151, 4153, 4154, 4156, 4161, 4167, 4168, 4169, 4179, 4183, 4184, 4188, 4194, 4199, 4200, 4220, 4223, 4225, 4231, 4232, 4240, 4242, 4250, 4252, 4263, 4268, 4276, 4282, 4305, 4307, 4308, 4312, 4313, 4316, 4317, 4319, 4320, 4325, 4327, 4344, 4345, 4349, 4351, 4352, 4359, 4363, 4364, 4373, 4376, 4405, 4407, 4416, 4418, 4430, 4442, 4448, 4459, 4461, 4465, 4466, 4483, 4488, 4489, 4493, 4497, 4499, 4500, 4509, 4513, 4515, 4521, 4522, 4525, 4527, 4533, 4544, 4550, 4554, 4561, 4566, 4577, 4578, 4582, 4587, 4598, 4600, 4603, 4605, 4606, 4624, 4627, 4644, 4645, 4647, 4651, 4665, 4672, 4688, 4690, 4692, 4695, 4697, 4698, 4700, 4704, 4709, 4711, 4713, 4716, 4720, 4726, 4729, 4734, 4736, 4737, 4752, 4755, 4765, 4767, 4769, 4787, 4791, 4804, 4810, 4811, 4817, 4818, 4821, 4828, 4846, 4847, 4854, 4856, 4886, 4891, 4898, 4918, 4926, 4943, 4945, 4946, 4954, 4958, 4959, 4963, 4976, 4983, 4989, 4990, 4991, 4993, 4995, 4997, 5000, 5007, 5009, 5018, 5034, 5036, 5039, 5044, 5046, 5049, 5050, 5052, 5054, 5056, 5068, 5071, 5080, 5087, 5088, 5097, 5102, 5105, 5106, 5108, 5113, 5118, 5123, 5130, 5138, 5139, 5153, 5154, 5163, 5164, 5167, 5171, 5187, 5191, 5192, 5193, 5196, 5216, 5217, 5225, 5231, 5245, 5246, 5253, 5254, 5256, 5259, 5262, 5263, 5270, 5301, 5302, 5303, 5304, 5309, 5311, 5314, 5322, 5324, 5327, 5335, 5338, 5351, 5364, 5368, 5369, 5373, 5375, 5380, 5381, 5387, 5388, 5393, 5394, 5403, 5404, 5406, 5408, 5410, 5414, 5416, 5417, 5427, 5428, 5429, 5431, 5432, 5434, 5436, 5438, 5439, 5440, 5451, 5452, 5455, 5458, 5460, 5464, 5473, 5475, 5488, 5489, 5501, 5508, 5512, 5525, 5528, 5533, 5536, 5537, 5539, 5541, 5542, 5545, 5549, 5554, 5555, 5556, 5559, 5562, 5563, 5565, 5569, 5571, 5577, 5583, 5598, 5599, 5601, 5605, 5609, 5613, 5646, 5648, 5649, 5660, 5662, 5666, 5668, 5683, 5693, 5696, 5699, 5703, 5721, 5722, 5725, 5731, 5736, 5740, 5748, 5751, 5768, 5770, 5771, 5780, 5781, 5784, 5786, 5791, 5795, 5803, 5811, 5815, 5818, 5819, 5822, 5825, 5828, 5833, 5845, 5851, 5855, 5857, 5868, 5874, 5875, 5879, 5880, 5889, 5890, 5892, 5895, 5906, 5911, 5917, 5923, 5925, 5926, 5930, 5932, 5934, 5939, 5942, 5956, 5965, 5971, 6000, 6006, 6007, 6019, 6033, 6036, 6039, 6044, 6045, 6055, 6060, 6064, 6073, 6076, 6079, 6080, 6089, 6090, 6100, 6101, 6103, 6109, 6118, 6132, 6134, 6136, 6141, 6151, 6164, 6166, 6167, 6170, 6171, 6180, 6186, 6187, 6188, 6202, 6207, 6208, 6209, 6213, 6219, 6221, 6223, 6225, 6229, 6230, 6231, 6237, 6238, 6249, 6252, 6257, 6264, 6271, 6273, 6282, 6286, 6287, 6292, 6294, 6300, 6301, 6305, 6306, 6314, 6325, 6337, 6338, 6340, 6341, 6351, 6357, 6363, 6375, 6377, 6381, 6387, 6398, 6400, 6403, 6406, 6409, 6427, 6429, 6447, 6448, 6449, 6452, 6455, 6461, 6464, 6472, 6473, 6485, 6489, 6491, 6492, 6496, 6505, 6514, 6540, 6541, 6548, 6551, 6552, 6572, 6573, 6581, 6586, 6591, 6595, 6597, 6604, 6626, 6628, 6637, 6639, 6644, 6647, 6656, 6657, 6659, 6673, 6677, 6687, 6689, 6701, 6702, 6706, 6711, 6716, 6720, 6723, 6734, 6741, 6742, 6747, 6748, 6750, 6764, 6783, 6792, 6802, 6807, 6808, 6813, 6816, 6817, 6836, 6838, 6841, 6848, 6851, 6875, 6877, 6878, 6885, 6889, 6896, 6897, 6901, 6905, 6918, 6921, 6922, 6925, 6937, 6945, 6946, 6953, 6954, 6956, 6960, 6968, 6971, 6974, 6976, 6982, 6992, 6993, 6994, 7004, 7011, 7012, 7017, 7025, 7028, 7039, 7047, 7058, 7067, 7069, 7072, 7074, 7075, 7076, 7080, 7086, 7095, 7096, 7105, 7124, 7130, 7131, 7133, 7135, 7149, 7153, 7157, 7158, 7162, 7163, 7165, 7170, 7176, 7187, 7188, 7192, 7193, 7194, 7197, 7205, 7206, 7208, 7211, 7214, 7215, 7218, 7236, 7240, 7251, 7255, 7256, 7267, 7282, 7294, 7307, 7308, 7309, 7322, 7326, 7328, 7332, 7337, 7338, 7339, 7351, 7356, 7358, 7360, 7369, 7373, 7374, 7376, 7378, 7379, 7381, 7385, 7386, 7396, 7406, 7407, 7412, 7414, 7417, 7435, 7441, 7443, 7447, 7449, 7450, 7455, 7482, 7483, 7486, 7487, 7499, 7508, 7509, 7511, 7515, 7519, 7520, 7529, 7532, 7533, 7539, 7542, 7543, 7545, 7547, 7549, 7553, 7567, 7587, 7588, 7590, 7594, 7597, 7601, 7604, 7606, 7617, 7623, 7627, 7629, 7638, 7639, 7641, 7648, 7660, 7664, 7665, 7671, 7689, 7695, 7696, 7697, 7709, 7717, 7718, 7719, 7720, 7723, 7730, 7736, 7744, 7748, 7753, 7759, 7769, 7773, 7786, 7788, 7791, 7792, 7797, 7800, 7801, 7805, 7811, 7813, 7814, 7823, 7829, 7839, 7845, 7849, 7857, 7859, 7864, 7870, 7875, 7876, 7877, 7882, 7886, 7888, 7889, 7891, 7893, 7895, 7896, 7902, 7914, 7925, 7935, 7937, 7951, 7959, 7973, 7977, 7978, 7993, 7995, 7997, 8002, 8006, 8011, 8014, 8019, 8020, 8022, 8023, 8030, 8032, 8034, 8035, 8039, 8042, 8052, 8054, 8056, 8070, 8071, 8079, 8080, 8082, 8083, 8086, 8092, 8096, 8098, 8102, 8104, 8112, 8113, 8122, 8130, 8132, 8133, 8139, 8143, 8150, 8163, 8164, 8175, 8177, 8178, 8194, 8197, 8215, 8217, 8220, 8224, 8226, 8229, 8236, 8238, 8247, 8250, 8251, 8252, 8257, 8269, 8276, 8281, 8288, 8290, 8301, 8310, 8321, 8324, 8325, 8329, 8330, 8332, 8335, 8337, 8338, 8339, 8341, 8342, 8359, 8371, 8376, 8378, 8382, 8396, 8397, 8399, 8409, 8411, 8418, 8430, 8433, 8434, 8438, 8439, 8441, 8444, 8445, 8446, 8452, 8453, 8458, 8461, 8463, 8472, 8479, 8485, 8501, 8502, 8506, 8509, 8513, 8519, 8521, 8522] -------------------------------------------------------------------------------- /data/training2017/splits/split_5_6_14/test4.json: -------------------------------------------------------------------------------- 1 | [7, 9, 10, 12, 22, 23, 28, 34, 37, 39, 46, 47, 55, 57, 60, 61, 65, 78, 96, 98, 105, 107, 113, 118, 121, 133, 147, 161, 167, 168, 172, 197, 203, 205, 208, 224, 225, 234, 236, 238, 244, 248, 253, 261, 265, 267, 270, 271, 272, 291, 292, 302, 319, 321, 337, 344, 345, 346, 347, 349, 355, 359, 363, 367, 368, 374, 380, 388, 393, 403, 412, 415, 421, 422, 425, 429, 433, 440, 446, 448, 453, 456, 474, 475, 477, 479, 485, 492, 493, 496, 500, 507, 512, 515, 518, 520, 523, 527, 528, 536, 539, 546, 547, 548, 554, 561, 562, 565, 573, 580, 582, 586, 594, 595, 596, 602, 606, 614, 615, 617, 624, 632, 643, 651, 654, 656, 663, 664, 669, 672, 683, 703, 709, 725, 728, 743, 744, 745, 754, 755, 772, 773, 774, 782, 787, 797, 798, 799, 804, 805, 809, 816, 826, 848, 857, 859, 861, 864, 880, 885, 886, 893, 901, 906, 910, 916, 919, 921, 925, 927, 936, 939, 942, 947, 948, 949, 960, 969, 971, 972, 973, 984, 987, 990, 994, 996, 1001, 1003, 1004, 1010, 1011, 1012, 1014, 1023, 1031, 1036, 1037, 1041, 1044, 1048, 1052, 1062, 1070, 1071, 1086, 1088, 1091, 1096, 1097, 1102, 1107, 1110, 1118, 1123, 1133, 1139, 1142, 1145, 1149, 1156, 1157, 1167, 1170, 1177, 1178, 1184, 1186, 1191, 1196, 1201, 1205, 1208, 1209, 1214, 1215, 1221, 1229, 1233, 1235, 1243, 1245, 1247, 1253, 1254, 1258, 1263, 1280, 1292, 1293, 1294, 1296, 1297, 1305, 1310, 1316, 1318, 1323, 1324, 1338, 1361, 1363, 1368, 1370, 1375, 1380, 1393, 1396, 1408, 1409, 1414, 1415, 1421, 1425, 1427, 1448, 1449, 1452, 1453, 1455, 1460, 1463, 1466, 1470, 1472, 1476, 1485, 1497, 1500, 1502, 1514, 1515, 1516, 1519, 1524, 1528, 1529, 1538, 1549, 1550, 1551, 1559, 1564, 1565, 1573, 1584, 1586, 1587, 1600, 1606, 1638, 1644, 1649, 1651, 1655, 1660, 1669, 1673, 1680, 1683, 1684, 1695, 1699, 1716, 1717, 1718, 1723, 1738, 1741, 1756, 1762, 1770, 1776, 1777, 1778, 1788, 1801, 1805, 1806, 1813, 1816, 1817, 1825, 1832, 1840, 1841, 1842, 1843, 1845, 1848, 1854, 1857, 1860, 1866, 1877, 1881, 1883, 1884, 1885, 1896, 1897, 1912, 1913, 1915, 1923, 1929, 1934, 1936, 1938, 1941, 1946, 1947, 1948, 1971, 1974, 1977, 1991, 1994, 1998, 2000, 2012, 2018, 2025, 2030, 2032, 2033, 2039, 2040, 2044, 2059, 2069, 2079, 2083, 2086, 2090, 2092, 2095, 2096, 2099, 2103, 2110, 2112, 2114, 2115, 2118, 2119, 2126, 2127, 2129, 2130, 2134, 2139, 2150, 2152, 2158, 2160, 2161, 2173, 2174, 2175, 2181, 2183, 2187, 2191, 2192, 2194, 2198, 2200, 2207, 2210, 2213, 2218, 2221, 2222, 2229, 2230, 2232, 2236, 2238, 2260, 2266, 2273, 2274, 2275, 2280, 2284, 2287, 2290, 2299, 2305, 2310, 2314, 2317, 2320, 2324, 2340, 2346, 2354, 2364, 2368, 2376, 2381, 2384, 2395, 2396, 2402, 2403, 2413, 2415, 2424, 2425, 2428, 2430, 2431, 2434, 2438, 2445, 2455, 2460, 2461, 2468, 2469, 2473, 2482, 2500, 2501, 2510, 2526, 2534, 2538, 2540, 2543, 2547, 2550, 2552, 2566, 2568, 2575, 2583, 2587, 2589, 2599, 2603, 2613, 2615, 2616, 2624, 2632, 2633, 2634, 2637, 2642, 2643, 2653, 2654, 2659, 2661, 2667, 2670, 2678, 2687, 2688, 2689, 2693, 2695, 2696, 2700, 2705, 2713, 2720, 2723, 2726, 2732, 2733, 2745, 2753, 2755, 2766, 2774, 2779, 2796, 2805, 2806, 2809, 2810, 2813, 2816, 2818, 2820, 2827, 2842, 2844, 2849, 2854, 2855, 2857, 2863, 2865, 2869, 2873, 2880, 2884, 2885, 2896, 2903, 2907, 2916, 2920, 2921, 2922, 2925, 2928, 2929, 2936, 2940, 2943, 2944, 2953, 2959, 2962, 2969, 2972, 2976, 2978, 2980, 2982, 2987, 2989, 2992, 2993, 3003, 3004, 3006, 3010, 3011, 3015, 3021, 3037, 3050, 3059, 3062, 3065, 3066, 3072, 3073, 3076, 3077, 3079, 3087, 3092, 3094, 3099, 3121, 3128, 3131, 3135, 3144, 3145, 3146, 3149, 3153, 3156, 3163, 3174, 3179, 3180, 3210, 3215, 3216, 3217, 3220, 3225, 3227, 3231, 3237, 3238, 3246, 3256, 3257, 3259, 3268, 3273, 3278, 3282, 3291, 3292, 3294, 3308, 3310, 3317, 3319, 3321, 3329, 3344, 3345, 3346, 3348, 3349, 3350, 3357, 3360, 3366, 3371, 3372, 3376, 3377, 3380, 3389, 3391, 3396, 3403, 3405, 3406, 3416, 3420, 3425, 3427, 3428, 3430, 3433, 3439, 3444, 3446, 3449, 3450, 3456, 3457, 3458, 3467, 3478, 3483, 3485, 3493, 3499, 3500, 3513, 3514, 3515, 3529, 3536, 3545, 3546, 3553, 3558, 3559, 3561, 3567, 3571, 3573, 3585, 3587, 3591, 3595, 3597, 3598, 3606, 3610, 3612, 3613, 3620, 3636, 3652, 3654, 3656, 3659, 3665, 3667, 3668, 3670, 3674, 3676, 3682, 3685, 3690, 3693, 3695, 3708, 3709, 3710, 3713, 3721, 3725, 3729, 3735, 3745, 3753, 3760, 3765, 3766, 3768, 3771, 3773, 3775, 3778, 3780, 3783, 3785, 3794, 3795, 3803, 3809, 3812, 3813, 3818, 3819, 3825, 3832, 3833, 3836, 3837, 3839, 3847, 3851, 3852, 3865, 3870, 3871, 3872, 3876, 3877, 3892, 3897, 3899, 3917, 3921, 3923, 3932, 3934, 3936, 3952, 3957, 3958, 3963, 3965, 3969, 3976, 3978, 3979, 3980, 3981, 3983, 4007, 4011, 4013, 4014, 4017, 4018, 4023, 4028, 4029, 4030, 4038, 4050, 4059, 4060, 4063, 4069, 4071, 4080, 4081, 4083, 4085, 4087, 4091, 4100, 4105, 4106, 4125, 4126, 4127, 4130, 4137, 4155, 4162, 4173, 4189, 4195, 4207, 4208, 4212, 4213, 4217, 4219, 4224, 4244, 4246, 4248, 4253, 4266, 4272, 4277, 4283, 4287, 4289, 4291, 4298, 4302, 4315, 4322, 4330, 4331, 4333, 4348, 4354, 4369, 4370, 4377, 4383, 4384, 4385, 4389, 4392, 4393, 4401, 4411, 4412, 4414, 4422, 4429, 4434, 4440, 4441, 4445, 4446, 4447, 4450, 4452, 4456, 4458, 4460, 4471, 4482, 4502, 4504, 4506, 4517, 4537, 4545, 4547, 4560, 4562, 4568, 4570, 4588, 4594, 4596, 4607, 4609, 4615, 4620, 4629, 4630, 4631, 4641, 4646, 4657, 4658, 4661, 4664, 4667, 4675, 4677, 4680, 4681, 4685, 4689, 4703, 4708, 4712, 4719, 4721, 4722, 4728, 4731, 4735, 4740, 4742, 4748, 4749, 4751, 4753, 4772, 4774, 4789, 4792, 4796, 4807, 4815, 4824, 4827, 4830, 4848, 4852, 4858, 4862, 4870, 4876, 4879, 4882, 4888, 4893, 4895, 4896, 4908, 4911, 4912, 4917, 4924, 4932, 4937, 4940, 4948, 4953, 4956, 4957, 4965, 4969, 4972, 4974, 4978, 4981, 4982, 4987, 4988, 4996, 4998, 5001, 5002, 5006, 5010, 5012, 5027, 5033, 5037, 5038, 5047, 5048, 5060, 5061, 5065, 5067, 5073, 5075, 5078, 5084, 5086, 5095, 5099, 5110, 5127, 5140, 5142, 5146, 5165, 5169, 5170, 5175, 5177, 5179, 5181, 5183, 5184, 5186, 5189, 5201, 5202, 5203, 5208, 5210, 5213, 5214, 5220, 5222, 5223, 5234, 5237, 5239, 5240, 5249, 5251, 5276, 5278, 5280, 5282, 5283, 5285, 5289, 5294, 5296, 5299, 5305, 5315, 5318, 5319, 5326, 5329, 5330, 5336, 5350, 5355, 5357, 5360, 5382, 5384, 5385, 5391, 5399, 5400, 5407, 5413, 5418, 5430, 5442, 5445, 5446, 5447, 5448, 5450, 5456, 5457, 5462, 5463, 5467, 5478, 5482, 5487, 5490, 5494, 5497, 5499, 5500, 5505, 5507, 5520, 5524, 5526, 5529, 5543, 5548, 5551, 5560, 5570, 5575, 5576, 5584, 5586, 5595, 5600, 5612, 5614, 5620, 5624, 5632, 5634, 5636, 5640, 5651, 5653, 5663, 5675, 5678, 5680, 5684, 5685, 5687, 5689, 5691, 5694, 5697, 5701, 5709, 5717, 5718, 5727, 5728, 5732, 5735, 5741, 5744, 5757, 5759, 5764, 5769, 5772, 5773, 5774, 5775, 5778, 5792, 5797, 5798, 5800, 5802, 5804, 5805, 5810, 5812, 5820, 5826, 5831, 5838, 5840, 5844, 5860, 5873, 5876, 5878, 5888, 5891, 5915, 5924, 5949, 5951, 5952, 5954, 5957, 5958, 5961, 5963, 5968, 5972, 5979, 5983, 5991, 5995, 6004, 6008, 6017, 6024, 6027, 6035, 6037, 6042, 6043, 6049, 6052, 6054, 6063, 6066, 6068, 6072, 6075, 6078, 6081, 6084, 6086, 6092, 6094, 6095, 6097, 6098, 6111, 6120, 6121, 6122, 6123, 6125, 6128, 6130, 6131, 6133, 6137, 6140, 6146, 6153, 6154, 6156, 6159, 6160, 6161, 6162, 6163, 6165, 6173, 6176, 6179, 6193, 6196, 6211, 6214, 6226, 6235, 6236, 6239, 6242, 6245, 6247, 6251, 6253, 6255, 6259, 6260, 6261, 6265, 6269, 6270, 6274, 6275, 6276, 6278, 6279, 6284, 6296, 6308, 6309, 6315, 6316, 6319, 6327, 6339, 6343, 6344, 6347, 6362, 6368, 6373, 6379, 6382, 6383, 6384, 6386, 6392, 6399, 6412, 6413, 6416, 6418, 6420, 6425, 6426, 6428, 6433, 6435, 6436, 6446, 6451, 6454, 6462, 6475, 6480, 6481, 6482, 6483, 6494, 6499, 6501, 6502, 6509, 6512, 6520, 6529, 6535, 6536, 6546, 6549, 6559, 6565, 6567, 6575, 6583, 6593, 6594, 6600, 6602, 6605, 6609, 6615, 6616, 6617, 6631, 6634, 6645, 6651, 6658, 6660, 6664, 6671, 6672, 6684, 6697, 6698, 6707, 6710, 6712, 6715, 6719, 6732, 6733, 6736, 6737, 6751, 6753, 6755, 6774, 6781, 6799, 6800, 6801, 6814, 6823, 6824, 6828, 6829, 6834, 6844, 6845, 6846, 6857, 6858, 6863, 6865, 6867, 6869, 6884, 6891, 6894, 6900, 6914, 6915, 6926, 6930, 6933, 6941, 6943, 6948, 6955, 6959, 6965, 6972, 6977, 6981, 6983, 6989, 6991, 7002, 7013, 7019, 7022, 7024, 7033, 7040, 7041, 7046, 7049, 7054, 7059, 7061, 7062, 7070, 7073, 7077, 7078, 7082, 7090, 7094, 7102, 7106, 7107, 7112, 7119, 7123, 7129, 7138, 7139, 7143, 7148, 7150, 7156, 7169, 7172, 7180, 7190, 7195, 7201, 7203, 7204, 7213, 7224, 7229, 7233, 7238, 7241, 7245, 7246, 7247, 7261, 7265, 7266, 7268, 7270, 7281, 7283, 7285, 7293, 7296, 7298, 7299, 7301, 7302, 7305, 7313, 7315, 7316, 7340, 7341, 7344, 7347, 7355, 7362, 7377, 7382, 7383, 7394, 7397, 7401, 7404, 7409, 7411, 7415, 7418, 7422, 7423, 7426, 7437, 7451, 7452, 7459, 7463, 7467, 7469, 7475, 7479, 7481, 7484, 7488, 7494, 7500, 7507, 7512, 7514, 7521, 7527, 7540, 7548, 7551, 7556, 7562, 7565, 7566, 7568, 7577, 7580, 7585, 7592, 7593, 7610, 7615, 7625, 7628, 7631, 7633, 7637, 7640, 7650, 7658, 7661, 7666, 7668, 7669, 7674, 7679, 7683, 7686, 7692, 7694, 7700, 7703, 7707, 7708, 7711, 7712, 7722, 7727, 7731, 7741, 7751, 7756, 7760, 7762, 7767, 7768, 7772, 7780, 7793, 7794, 7802, 7803, 7810, 7817, 7820, 7821, 7822, 7835, 7838, 7842, 7847, 7851, 7852, 7855, 7860, 7863, 7868, 7884, 7887, 7892, 7897, 7898, 7900, 7903, 7904, 7905, 7909, 7917, 7922, 7924, 7929, 7930, 7936, 7938, 7945, 7947, 7949, 7958, 7965, 7971, 7983, 7985, 7992, 8000, 8001, 8003, 8004, 8007, 8008, 8025, 8033, 8037, 8038, 8046, 8049, 8064, 8066, 8072, 8094, 8097, 8099, 8100, 8105, 8109, 8111, 8114, 8119, 8120, 8125, 8128, 8129, 8131, 8136, 8156, 8158, 8160, 8161, 8162, 8166, 8173, 8185, 8189, 8192, 8204, 8207, 8209, 8210, 8212, 8216, 8221, 8228, 8231, 8235, 8240, 8242, 8243, 8244, 8249, 8259, 8264, 8273, 8282, 8283, 8284, 8295, 8302, 8303, 8311, 8314, 8316, 8317, 8350, 8351, 8352, 8364, 8369, 8370, 8380, 8381, 8386, 8389, 8390, 8395, 8398, 8401, 8402, 8405, 8407, 8412, 8413, 8415, 8416, 8419, 8429, 8431, 8436, 8437, 8440, 8449, 8457, 8460, 8467, 8473, 8476, 8478, 8486, 8498, 8499, 8500, 8503] -------------------------------------------------------------------------------- /data/training2017/splits/split_5_6_14/test2.json: -------------------------------------------------------------------------------- 1 | [2, 4, 11, 15, 19, 27, 29, 30, 33, 36, 50, 51, 56, 64, 74, 77, 82, 88, 93, 102, 103, 116, 120, 122, 126, 141, 144, 145, 153, 154, 158, 173, 175, 178, 179, 180, 196, 199, 200, 212, 213, 214, 219, 222, 228, 229, 231, 245, 255, 259, 269, 273, 275, 284, 285, 286, 297, 306, 310, 317, 320, 322, 336, 342, 350, 352, 360, 364, 366, 371, 373, 375, 377, 384, 386, 390, 391, 396, 398, 404, 405, 411, 420, 426, 430, 431, 435, 441, 457, 460, 462, 470, 495, 501, 502, 504, 506, 509, 510, 511, 530, 532, 552, 553, 575, 583, 608, 616, 625, 627, 629, 630, 634, 638, 642, 650, 652, 667, 668, 679, 696, 697, 700, 702, 704, 717, 729, 732, 735, 737, 739, 746, 748, 751, 752, 756, 759, 764, 768, 776, 783, 786, 788, 795, 796, 801, 811, 831, 837, 841, 843, 856, 863, 865, 869, 872, 875, 876, 878, 883, 888, 892, 896, 903, 904, 905, 909, 911, 913, 923, 930, 953, 954, 955, 961, 966, 975, 976, 977, 981, 985, 993, 995, 1013, 1015, 1016, 1019, 1030, 1033, 1034, 1035, 1046, 1055, 1056, 1058, 1064, 1069, 1080, 1083, 1092, 1103, 1108, 1112, 1122, 1137, 1143, 1152, 1153, 1155, 1158, 1160, 1161, 1164, 1185, 1187, 1188, 1189, 1200, 1202, 1223, 1225, 1230, 1236, 1242, 1246, 1252, 1256, 1259, 1267, 1269, 1270, 1271, 1274, 1279, 1284, 1289, 1290, 1295, 1298, 1303, 1306, 1308, 1309, 1314, 1315, 1321, 1342, 1346, 1349, 1358, 1359, 1362, 1366, 1367, 1372, 1377, 1379, 1386, 1387, 1388, 1390, 1397, 1402, 1407, 1416, 1417, 1419, 1428, 1429, 1432, 1434, 1446, 1458, 1459, 1461, 1462, 1465, 1471, 1478, 1488, 1489, 1496, 1506, 1510, 1511, 1513, 1530, 1534, 1535, 1540, 1560, 1562, 1566, 1575, 1577, 1579, 1582, 1590, 1595, 1597, 1602, 1613, 1629, 1635, 1650, 1652, 1654, 1658, 1665, 1666, 1671, 1672, 1679, 1686, 1689, 1690, 1693, 1698, 1700, 1708, 1709, 1719, 1734, 1735, 1736, 1749, 1750, 1751, 1761, 1768, 1783, 1787, 1790, 1791, 1793, 1795, 1796, 1810, 1811, 1812, 1818, 1821, 1824, 1833, 1836, 1839, 1851, 1852, 1858, 1863, 1867, 1870, 1874, 1876, 1878, 1879, 1890, 1902, 1903, 1909, 1916, 1920, 1924, 1932, 1939, 1940, 1953, 1961, 1963, 1964, 1966, 1975, 1986, 1990, 1993, 1999, 2004, 2005, 2019, 2022, 2024, 2060, 2061, 2062, 2070, 2073, 2078, 2081, 2091, 2102, 2106, 2109, 2111, 2113, 2120, 2145, 2151, 2155, 2156, 2157, 2164, 2165, 2169, 2171, 2172, 2185, 2202, 2205, 2217, 2219, 2223, 2224, 2228, 2240, 2247, 2248, 2255, 2256, 2257, 2261, 2265, 2285, 2286, 2291, 2294, 2295, 2296, 2298, 2312, 2318, 2319, 2321, 2328, 2333, 2334, 2335, 2339, 2343, 2344, 2349, 2353, 2356, 2361, 2366, 2377, 2380, 2383, 2387, 2389, 2398, 2401, 2412, 2420, 2421, 2422, 2433, 2442, 2446, 2451, 2452, 2453, 2458, 2462, 2465, 2466, 2481, 2483, 2507, 2508, 2511, 2519, 2528, 2535, 2545, 2551, 2553, 2555, 2559, 2570, 2571, 2572, 2573, 2581, 2585, 2588, 2590, 2595, 2598, 2600, 2605, 2608, 2611, 2620, 2626, 2627, 2645, 2647, 2650, 2652, 2655, 2658, 2660, 2663, 2675, 2680, 2684, 2707, 2708, 2716, 2722, 2740, 2742, 2746, 2747, 2751, 2757, 2762, 2768, 2770, 2775, 2786, 2790, 2791, 2792, 2801, 2808, 2811, 2814, 2817, 2821, 2835, 2836, 2839, 2847, 2860, 2861, 2862, 2866, 2875, 2876, 2879, 2890, 2895, 2908, 2917, 2927, 2931, 2933, 2939, 2950, 2954, 2958, 2961, 2966, 2968, 2971, 2973, 2975, 2985, 2988, 2991, 2995, 2999, 3000, 3001, 3008, 3009, 3018, 3022, 3023, 3027, 3028, 3038, 3055, 3056, 3057, 3070, 3071, 3089, 3090, 3095, 3098, 3103, 3106, 3108, 3109, 3111, 3114, 3123, 3124, 3127, 3130, 3134, 3136, 3139, 3142, 3150, 3155, 3157, 3158, 3159, 3161, 3162, 3171, 3182, 3185, 3186, 3195, 3197, 3200, 3208, 3209, 3211, 3212, 3226, 3228, 3235, 3242, 3251, 3253, 3255, 3267, 3269, 3283, 3285, 3288, 3302, 3304, 3305, 3312, 3313, 3315, 3318, 3325, 3341, 3352, 3369, 3383, 3387, 3395, 3412, 3413, 3415, 3418, 3419, 3431, 3432, 3434, 3437, 3454, 3460, 3463, 3464, 3466, 3469, 3487, 3489, 3492, 3504, 3506, 3509, 3510, 3511, 3523, 3532, 3537, 3540, 3556, 3566, 3568, 3575, 3579, 3580, 3581, 3582, 3593, 3622, 3623, 3625, 3627, 3632, 3634, 3635, 3637, 3640, 3643, 3646, 3651, 3653, 3658, 3661, 3664, 3669, 3677, 3684, 3687, 3688, 3698, 3699, 3704, 3711, 3716, 3726, 3730, 3731, 3732, 3736, 3738, 3741, 3743, 3744, 3746, 3759, 3779, 3782, 3787, 3788, 3792, 3793, 3799, 3801, 3811, 3816, 3822, 3827, 3829, 3838, 3843, 3845, 3855, 3857, 3860, 3862, 3863, 3866, 3869, 3878, 3879, 3882, 3886, 3893, 3894, 3896, 3900, 3902, 3903, 3910, 3913, 3914, 3915, 3919, 3929, 3930, 3931, 3941, 3948, 3950, 3951, 3959, 3962, 3967, 3971, 3972, 3977, 3995, 3996, 3999, 4003, 4009, 4012, 4026, 4036, 4043, 4045, 4049, 4054, 4062, 4072, 4077, 4084, 4089, 4097, 4107, 4110, 4111, 4113, 4114, 4124, 4129, 4141, 4147, 4148, 4159, 4160, 4166, 4170, 4171, 4172, 4177, 4180, 4182, 4191, 4196, 4197, 4201, 4202, 4204, 4205, 4214, 4215, 4216, 4226, 4247, 4249, 4255, 4259, 4260, 4264, 4269, 4270, 4273, 4275, 4284, 4286, 4293, 4295, 4296, 4301, 4309, 4318, 4323, 4326, 4335, 4342, 4355, 4357, 4358, 4374, 4375, 4380, 4387, 4388, 4391, 4396, 4398, 4402, 4403, 4408, 4415, 4420, 4423, 4424, 4427, 4428, 4432, 4433, 4438, 4439, 4455, 4463, 4467, 4468, 4472, 4476, 4484, 4487, 4490, 4491, 4492, 4501, 4508, 4510, 4526, 4528, 4529, 4535, 4541, 4542, 4546, 4551, 4555, 4557, 4558, 4564, 4569, 4575, 4590, 4591, 4597, 4604, 4608, 4616, 4619, 4632, 4634, 4635, 4639, 4640, 4653, 4655, 4659, 4669, 4673, 4676, 4684, 4686, 4687, 4693, 4694, 4696, 4699, 4701, 4702, 4705, 4707, 4723, 4724, 4725, 4727, 4738, 4743, 4758, 4761, 4763, 4764, 4766, 4768, 4777, 4778, 4783, 4784, 4785, 4795, 4798, 4808, 4812, 4829, 4831, 4836, 4837, 4843, 4849, 4850, 4851, 4855, 4865, 4866, 4869, 4871, 4873, 4878, 4890, 4892, 4897, 4900, 4904, 4921, 4922, 4923, 4930, 4933, 4934, 4938, 4941, 4971, 5004, 5005, 5014, 5017, 5022, 5024, 5030, 5031, 5040, 5042, 5045, 5053, 5064, 5074, 5076, 5079, 5082, 5085, 5090, 5092, 5094, 5104, 5107, 5112, 5114, 5115, 5124, 5126, 5129, 5132, 5134, 5143, 5147, 5148, 5150, 5152, 5155, 5157, 5160, 5161, 5162, 5168, 5172, 5174, 5180, 5185, 5188, 5197, 5200, 5206, 5207, 5212, 5215, 5218, 5219, 5221, 5227, 5228, 5235, 5243, 5244, 5250, 5252, 5255, 5260, 5261, 5265, 5267, 5268, 5274, 5281, 5284, 5291, 5308, 5316, 5321, 5325, 5337, 5340, 5343, 5345, 5346, 5348, 5354, 5370, 5371, 5377, 5378, 5379, 5389, 5397, 5398, 5405, 5412, 5421, 5422, 5424, 5433, 5441, 5443, 5444, 5469, 5476, 5477, 5479, 5483, 5502, 5503, 5510, 5511, 5516, 5519, 5521, 5523, 5544, 5553, 5558, 5561, 5566, 5567, 5568, 5572, 5573, 5574, 5578, 5587, 5588, 5596, 5603, 5608, 5610, 5616, 5621, 5622, 5625, 5628, 5629, 5630, 5637, 5641, 5642, 5644, 5645, 5650, 5655, 5658, 5659, 5661, 5664, 5667, 5670, 5671, 5672, 5674, 5676, 5688, 5692, 5695, 5698, 5704, 5705, 5708, 5713, 5714, 5724, 5726, 5733, 5738, 5742, 5743, 5750, 5753, 5758, 5767, 5787, 5789, 5799, 5801, 5809, 5813, 5827, 5830, 5839, 5843, 5847, 5848, 5854, 5858, 5861, 5863, 5867, 5871, 5883, 5886, 5894, 5898, 5899, 5900, 5908, 5909, 5912, 5916, 5920, 5929, 5933, 5936, 5937, 5938, 5940, 5943, 5945, 5960, 5967, 5970, 5973, 5974, 5977, 5978, 5980, 5987, 5988, 5989, 5990, 5999, 6009, 6012, 6015, 6020, 6021, 6032, 6041, 6046, 6051, 6053, 6056, 6059, 6065, 6067, 6085, 6087, 6096, 6099, 6106, 6112, 6114, 6115, 6124, 6126, 6127, 6135, 6138, 6142, 6148, 6157, 6158, 6172, 6178, 6184, 6189, 6190, 6191, 6192, 6199, 6206, 6218, 6228, 6240, 6241, 6256, 6258, 6263, 6289, 6290, 6307, 6310, 6320, 6322, 6323, 6329, 6342, 6348, 6349, 6350, 6353, 6355, 6358, 6359, 6364, 6366, 6370, 6372, 6389, 6390, 6394, 6411, 6415, 6417, 6419, 6432, 6440, 6444, 6445, 6456, 6460, 6463, 6465, 6466, 6467, 6468, 6471, 6474, 6484, 6490, 6495, 6513, 6517, 6518, 6525, 6533, 6537, 6542, 6543, 6544, 6547, 6560, 6564, 6568, 6577, 6578, 6579, 6580, 6584, 6585, 6587, 6588, 6592, 6598, 6599, 6601, 6613, 6614, 6618, 6621, 6622, 6625, 6636, 6640, 6641, 6643, 6649, 6655, 6662, 6666, 6668, 6670, 6676, 6678, 6680, 6688, 6694, 6695, 6699, 6708, 6709, 6722, 6724, 6727, 6728, 6731, 6738, 6746, 6757, 6760, 6762, 6766, 6767, 6769, 6773, 6776, 6786, 6789, 6790, 6795, 6796, 6797, 6810, 6811, 6812, 6822, 6825, 6843, 6849, 6853, 6856, 6862, 6864, 6870, 6873, 6876, 6879, 6888, 6902, 6903, 6912, 6919, 6920, 6923, 6928, 6929, 6938, 6940, 6942, 6947, 6950, 6957, 6958, 6961, 6962, 6967, 6969, 6973, 6984, 6985, 6986, 6990, 6995, 7008, 7009, 7010, 7015, 7016, 7020, 7023, 7034, 7042, 7043, 7045, 7053, 7055, 7063, 7064, 7065, 7068, 7087, 7089, 7091, 7099, 7100, 7101, 7114, 7116, 7125, 7126, 7128, 7132, 7136, 7141, 7142, 7145, 7146, 7152, 7155, 7159, 7161, 7164, 7166, 7168, 7171, 7174, 7178, 7179, 7182, 7183, 7185, 7186, 7198, 7207, 7212, 7216, 7217, 7222, 7237, 7242, 7248, 7249, 7253, 7258, 7259, 7260, 7272, 7276, 7280, 7284, 7292, 7304, 7310, 7311, 7312, 7314, 7320, 7324, 7325, 7329, 7330, 7333, 7334, 7342, 7346, 7352, 7363, 7365, 7368, 7375, 7391, 7392, 7395, 7410, 7420, 7421, 7424, 7428, 7429, 7432, 7436, 7438, 7442, 7444, 7453, 7457, 7460, 7472, 7474, 7490, 7495, 7504, 7518, 7525, 7528, 7530, 7537, 7538, 7544, 7546, 7555, 7560, 7561, 7563, 7569, 7570, 7572, 7574, 7578, 7582, 7600, 7602, 7607, 7609, 7612, 7613, 7614, 7616, 7618, 7620, 7622, 7624, 7626, 7632, 7643, 7644, 7647, 7652, 7653, 7654, 7655, 7656, 7657, 7678, 7685, 7687, 7688, 7690, 7691, 7699, 7705, 7716, 7721, 7734, 7739, 7743, 7745, 7749, 7750, 7754, 7755, 7763, 7765, 7770, 7776, 7781, 7787, 7790, 7795, 7799, 7804, 7808, 7812, 7815, 7816, 7826, 7832, 7833, 7836, 7850, 7854, 7858, 7862, 7869, 7871, 7878, 7880, 7881, 7883, 7907, 7908, 7910, 7911, 7912, 7916, 7940, 7946, 7950, 7953, 7955, 7967, 7968, 7979, 7984, 7986, 7988, 7990, 7996, 8010, 8012, 8013, 8017, 8018, 8021, 8024, 8026, 8028, 8036, 8047, 8050, 8051, 8055, 8057, 8061, 8065, 8067, 8069, 8073, 8074, 8075, 8087, 8090, 8091, 8101, 8103, 8108, 8116, 8117, 8124, 8126, 8138, 8140, 8141, 8144, 8149, 8152, 8153, 8157, 8167, 8168, 8170, 8171, 8172, 8174, 8181, 8182, 8183, 8186, 8187, 8193, 8198, 8200, 8201, 8203, 8219, 8232, 8233, 8237, 8239, 8248, 8253, 8263, 8265, 8266, 8268, 8272, 8298, 8300, 8308, 8312, 8315, 8319, 8320, 8323, 8326, 8333, 8336, 8349, 8354, 8358, 8360, 8363, 8367, 8375, 8387, 8391, 8417, 8447, 8455, 8456, 8459, 8474, 8488, 8489, 8490, 8505, 8517, 8520] -------------------------------------------------------------------------------- /codes/summarizeScores.py: -------------------------------------------------------------------------------- 1 | '''*********************************************** 2 | * 3 | * project: physioNet 4 | * created: 23.11.2017 5 | * purpose: print a score overview 6 | * 7 | ***********************************************''' 8 | 9 | '''*********************************************** 10 | * Imports 11 | ***********************************************''' 12 | 13 | import numpy as np 14 | import os 15 | import sys 16 | import json 17 | import collections 18 | from definitions import * 19 | import utils.dataset_helper as dsh 20 | 21 | '''*********************************************** 22 | * Variables 23 | ***********************************************''' 24 | 25 | keys = { 26 | 'acc_test' : ['scoring', 'acc_test'], 27 | 'sco_test' : ['scoring', 'sc_test'], 28 | 'acc_valid' : ['scoring', 'acc_valid'], 29 | 'sco_valid' : ['scoring', 'sc_valid'], 30 | 'acc_train' : ['scoring', 'acc_train'], 31 | 'sco_train' : ['scoring', 'sc_train'], 32 | 'Nactual' : ['split_scoring', 'test', '~', 'actual'], 33 | 'Npredict' : ['split_scoring', 'test', '~', 'pred'], 34 | 'Ncorrect' : ['split_scoring', 'test', '~', 'correct'], 35 | 'sco_normal': ['split_scoring', 'test', 'N', 'score'], 36 | 'sco_afib' : ['split_scoring', 'test', 'A', 'score'], 37 | 'sco_other' : ['split_scoring', 'test', 'O', 'score'], 38 | 'sco_noise' : ['split_scoring', 'test', '~', 'score'] 39 | } 40 | 41 | scorekeys = ['acc_test', 'sco_test', 'acc_valid', 'sco_valid', 'acc_train', 'sco_train'] 42 | noisekeys = ['Nactual', 'Npredict', 'Ncorrect'] 43 | 44 | pkdict_set1 = ['sco_test', 'sco_normal', 'sco_afib', 'sco_other', 'sco_noise'] 45 | pkdict_set2 = ['sco_test', 'sco_normal', 'sco_afib', 'sco_other'] 46 | pkdict_set3 = ['sco_test', 'Nprecis', 'Nrecall'] 47 | 48 | pkdict = {} 49 | pkdict['CNN'] = pkdict_set1 50 | pkdict['CRNN'] = pkdict_set1 51 | pkdict['HNN'] = pkdict_set1 52 | pkdict['HNNStage1'] = pkdict_set3 53 | pkdict['HNNStage2'] = pkdict_set2 54 | pkdict['HNNStage2R'] = pkdict_set2 55 | 56 | '''*********************************************** 57 | * Help Functions 58 | ***********************************************''' 59 | 60 | def checkDir(network, rootdir, subdir=''): 61 | outpaths = [] 62 | dirs = os.listdir(os.path.join(rootdir, subdir)) 63 | dirs = [d for d in dirs if network in d] 64 | for d in dirs: 65 | relpath = os.path.join(subdir, d) 66 | abspath = os.path.join(rootdir, relpath) 67 | if 'fold0' in os.listdir(abspath): 68 | outpaths.append(relpath) 69 | else: 70 | outpaths = outpaths+checkDir(network, rootdir, relpath) 71 | return outpaths 72 | 73 | def insertDict(insDict, keylist, value): 74 | 75 | if len(keylist)==0: 76 | return value 77 | 78 | if not isinstance(insDict, dict): 79 | dictStack = [{} for k in range(len(keylist)+1)] 80 | dictStack[0] = value 81 | for i in range(1,len(keylist)+1): 82 | dictStack[i] = {keylist[-i]:dictStack[i-1]} 83 | return dictStack[-1] 84 | else: 85 | key = keylist[0] 86 | if key in insDict: 87 | insDict[key] = insertDict(insDict[key], keylist[1:], value) 88 | else: 89 | dictStack = [{} for k in keylist] 90 | dictStack[0] = value 91 | for i in range(1,len(keylist)): 92 | dictStack[i] = {keylist[-i]:dictStack[i-1]} 93 | # print(dictStack) 94 | insDict[key] = dictStack[-1] 95 | return insDict 96 | 97 | def readFromDict(readDict, keylist): 98 | tmpDict = readDict 99 | for key in keylist: 100 | tmpDict = tmpDict[key] 101 | return tmpDict 102 | 103 | def print_line(symb, length): 104 | for i in range(length): 105 | print(symb, end='') 106 | print('') 107 | 108 | def print_distance(length): 109 | for i in range(length): 110 | print('') 111 | 112 | '''*********************************************** 113 | * Main Functions 114 | ***********************************************''' 115 | 116 | def summarize(network): 117 | 118 | printkeys=pkdict[network] 119 | 120 | rootdir = os.path.join(root, 'log', network) 121 | relpaths = checkDir(network, rootdir) 122 | phases = ['phase'+str(p) for p in range(10)] 123 | folds = ['fold'+str(f) for f in range(5)] 124 | 125 | dictPFR, dictPRF, dictRPF, dictRFP, dictFPR, dictFRP = {}, {}, {}, {}, {}, {} 126 | 127 | # extract all required values from score-dict 128 | 129 | for phase in phases: 130 | for fold in folds: 131 | for relpath in relpaths: 132 | 133 | checkdir = os.path.join(rootdir, relpath, fold, 'trained_'+phase) 134 | if os.path.exists(checkdir): 135 | with open(os.path.join(checkdir,'scores.json')) as fh: 136 | scoreStr = fh.read() 137 | scoreDict = json.loads(scoreStr) 138 | 139 | subDict = {} 140 | for key in scorekeys: 141 | var = readFromDict(scoreDict, keys[key]) 142 | subDict[key] = round(var,3) 143 | 144 | if 'Nprecis' in printkeys: 145 | for key in noisekeys: 146 | subDict[key] = readFromDict(scoreDict, keys[key]) 147 | Nprecis = 100*subDict['Ncorrect']/subDict['Npredict'] 148 | subDict['Nprecis'] = round(Nprecis,3) 149 | Nrecall = 100*subDict['Ncorrect']/subDict['Nactual'] 150 | subDict['Nrecall'] = round(Nrecall,3) 151 | 152 | if 'sco_normal' in printkeys: 153 | subDict['sco_normal'] = readFromDict(scoreDict, keys['sco_normal']) 154 | 155 | if 'sco_afib' in printkeys: 156 | subDict['sco_afib'] = readFromDict(scoreDict, keys['sco_afib']) 157 | 158 | if 'sco_other' in printkeys: 159 | subDict['sco_other'] = readFromDict(scoreDict, keys['sco_other']) 160 | 161 | if 'sco_noise' in printkeys: 162 | subDict['sco_noise'] = readFromDict(scoreDict, keys['sco_noise']) 163 | 164 | dictPRF = insertDict(dictPRF, [phase, relpath, fold], subDict) 165 | dictRPF = insertDict(dictRPF, [relpath, phase, fold], subDict) 166 | 167 | # calculate all means and standard deviations 168 | 169 | for phase in phases: 170 | if phase in dictPRF: 171 | phaseDict = dictPRF[phase] 172 | 173 | for relpath in relpaths: 174 | if relpath in phaseDict: 175 | relpathDict = phaseDict[relpath] 176 | 177 | valueDict = {} 178 | for pk in printkeys: 179 | valueDict[pk] = np.array([]) 180 | for fold in folds: 181 | if fold in relpathDict: 182 | foldDict = relpathDict[fold] 183 | for pk in printkeys: 184 | newval = foldDict[pk] 185 | valueDict[pk] = np.append(valueDict[pk], newval) 186 | meanDict, stdevDict = {}, {} 187 | for pk in printkeys: 188 | meanDict[pk] = round(np.mean(valueDict[pk]),3) 189 | stdevDict[pk] = round(np.std(valueDict[pk]),3) 190 | dictPRF[phase][relpath]['mean'] = meanDict 191 | dictPRF[phase][relpath]['stdev'] = stdevDict 192 | dictRPF[relpath][phase]['mean'] = meanDict 193 | dictRPF[relpath][phase]['stdev'] = stdevDict 194 | 195 | folds.append('mean') 196 | folds.append('stdev') 197 | 198 | # print complete overview 199 | 200 | print_distance(10) 201 | linelength = 16*len(printkeys)+8 202 | for phase in phases: 203 | if phase in dictPRF: 204 | phaseDict = dictPRF[phase] 205 | 206 | print_line('*', linelength) 207 | print('* ', phase) 208 | print_line('*', linelength) 209 | print('* \t', end="") 210 | for pk in printkeys: 211 | print(pk, '\t', end="") 212 | print('') 213 | print_line('*', linelength) 214 | 215 | for relpath in relpaths: 216 | if relpath in phaseDict: 217 | relpathDict = phaseDict[relpath] 218 | 219 | print_line('-', linelength) 220 | print(relpath) 221 | print_line('-', linelength) 222 | 223 | for fold in folds: 224 | if fold in relpathDict: 225 | foldDict = relpathDict[fold] 226 | 227 | if fold == 'mean': 228 | print_line('- ', int(linelength/2)) 229 | 230 | print(fold, '\t', end="") 231 | for pk in printkeys: 232 | print(foldDict[pk], '\t\t', end="") 233 | print('') 234 | 235 | 236 | # print short overview 237 | 238 | print_line('-', linelength) 239 | print_distance(3) 240 | 241 | shortDict = {} 242 | for relpath in relpaths: 243 | shortDict[relpath] = {} 244 | shortDict[relpath]['mean'] = {} 245 | shortDict[relpath]['stdev'] = {} 246 | for phase in phases: 247 | if phase in dictPRF: 248 | phaseDict = dictPRF[phase] 249 | for relpath in relpaths: 250 | if relpath in phaseDict: 251 | relpathDict = phaseDict[relpath] 252 | # print(relpathDict['stdev']) 253 | shortDict[relpath]['mean'] = relpathDict['mean'] 254 | shortDict[relpath]['stdev'] = relpathDict['stdev'] 255 | 256 | print_line('~', linelength+40) 257 | print('{0: <40}'.format(''), end="") 258 | for pk in printkeys: 259 | print(pk, '\t', end="") 260 | print('') 261 | print_line('~', linelength+40) 262 | for relpath in relpaths: 263 | mean = shortDict[relpath]['mean'] 264 | stdev = shortDict[relpath]['stdev'] 265 | print('{0: <40}'.format(relpath), end="") 266 | for pk in printkeys: 267 | print('%2.3f ±%1.3f\t' % (mean[pk], stdev[pk]), end="") 268 | print('') 269 | print_line('~', linelength+40) 270 | 271 | 272 | save_file = os.path.join(root, 'log', network, 'summary.json') 273 | with open(save_file, 'w+') as fh: 274 | json.dump(dictPRF, fh, indent=4, sort_keys=True) 275 | 276 | 277 | '''*********************************************** 278 | * Script 279 | ***********************************************''' 280 | 281 | def main(): 282 | network = sys.argv[1] 283 | summarize(network) 284 | 285 | if __name__ == '__main__': 286 | main() 287 | -------------------------------------------------------------------------------- /codes/utils/dataset_helper.py: -------------------------------------------------------------------------------- 1 | '''*********************************************** 2 | * 3 | * project: physioNet 4 | * created: 21.11.2017 5 | * purpose: interface to the physioNet database 6 | * 7 | ***********************************************''' 8 | 9 | '''*********************************************** 10 | * Imports 11 | ***********************************************''' 12 | 13 | import os 14 | import sys 15 | import numpy as np 16 | import scipy.io as sio 17 | import json 18 | import math 19 | import parse 20 | 21 | from definitions import * 22 | import utils.split_helper as sh 23 | 24 | '''*********************************************** 25 | * Variables 26 | ***********************************************''' 27 | 28 | version = 3 29 | 30 | # directories 31 | # db_dir = '.' if entry else os.path.join(data_dir, 'training2017') 32 | # mat_dir = '.' if entry else 'mat_files' 33 | db_dir = os.path.join(data_dir, 'training2017') 34 | mat_dir = 'mat_files' 35 | hea_dir = 'hea_files' 36 | hr_dir = 'heartrates' 37 | split_dir = 'splits' 38 | 39 | # different formats 40 | data_fmt = 'A{:0>5}' 41 | hea_fmt = os.path.join(root, db_dir, hea_dir, data_fmt + '.hea') 42 | mat_fmt = os.path.join(root, db_dir, mat_dir, data_fmt + '.mat') 43 | hr_fmt = os.path.join(root, db_dir, hr_dir, data_fmt + '_{}' + '.json') 44 | ref_fmt = os.path.join(root, db_dir, '{}.csv') 45 | rec_fmt = os.path.join(root, db_dir, '{}') 46 | split_fmt= os.path.join(root, db_dir, split_dir, '{}') 47 | 48 | # version specific variables 49 | ref_files = ['REFERENCE-original', 'REFERENCE', 'REFERENCE-v3'] 50 | class_distributions = [[60.44, 9.04, 29.98, 0.54], 51 | [59.22, 8.65, 28.80, 3.33], 52 | [59.52, 8.89, 28.32, 3.27]] 53 | 54 | # records and reference file 55 | rec_file = rec_fmt.format('RECORDS') 56 | ref_file = ref_fmt.format(ref_files[version-1]) 57 | 58 | # class information 59 | n_classes = 4 60 | class_distribution = class_distributions[version-1] 61 | class_tags = ['N', 'A', 'O', '~'] 62 | class_descriptions = ['normal', 'AF pathology', 'other pathology', 'noise'] 63 | 64 | # special signals 65 | n_sequences = 8528 66 | shortest_seq = 5493 67 | longest_seq = 5736 68 | min_seq_length = 2714 69 | max_seq_length = 18286 70 | 71 | '''*********************************************** 72 | * Main load functions 73 | ***********************************************''' 74 | 75 | def load_data(load_list, ext_len=None, data2d=True): 76 | 77 | if load_list == []: 78 | return [] 79 | 80 | id_list = convert_to_id_list(load_list) 81 | file_list = [mat_fmt.format(id) for id in id_list] 82 | signals = np.array([load_signal(file) for file in file_list]) 83 | 84 | if ext_len is not None: 85 | signals = [extend_signal(sig,ext_len) for sig in signals] 86 | signals = np.vstack(signals) 87 | 88 | if len(signals) == 1 and not data2d: 89 | signals = signals[0] 90 | 91 | return signals 92 | 93 | 94 | def load_label(load_list, output_type='onehot'): 95 | 96 | if load_list == [] or np.array_equal(load_list, []): 97 | return [] 98 | 99 | id_list = convert_to_id_list(load_list) 100 | 101 | with open(ref_file) as fh: 102 | ref_content = fh.readlines() 103 | 104 | line_fmt = '{},{}\n' 105 | labels = [parse.parse(line_fmt, ref_content[id-1])[1] for id in id_list] 106 | 107 | if output_type is 'int' or 'onehot': 108 | labels = np.array([label_str_to_int(label) for label in labels]) 109 | 110 | if output_type is 'onehot': 111 | labels = [label_int_to_onehot(label) for label in labels] 112 | labels = np.vstack(labels) 113 | 114 | if len(labels) == 1: 115 | labels = labels[0] 116 | 117 | return labels 118 | 119 | '''*********************************************** 120 | * Load helper functions 121 | ***********************************************''' 122 | 123 | def load_signal(path): 124 | dict = sio.loadmat(path) 125 | signal = dict['val'] 126 | return signal[0,:] 127 | 128 | def extend_signal(signal, length): 129 | extended = np.zeros(length) 130 | siglength = np.min([length, signal.shape[0]]) 131 | extended[:siglength] = signal[:siglength] 132 | return extended 133 | 134 | '''*********************************************** 135 | * Format conversion Functions 136 | ***********************************************''' 137 | 138 | def convert_to_id_list(conv_list): 139 | # if single integer or string, convert to list 140 | if isinstance(conv_list, (str, int)): 141 | conv_list = [conv_list] 142 | # turn strings into integers 143 | if isinstance(conv_list[0], str): 144 | conv_list = [name2id(name) for name in conv_list] 145 | return conv_list 146 | 147 | def name2id(name): 148 | [id_str] = parse.parse(data_fmt, name) 149 | id = int(id_str) 150 | return id 151 | 152 | def label_str_to_int(str_label): 153 | return class_tags.index(str_label) 154 | 155 | def label_int_to_onehot(int_label): 156 | onehot = np.zeros(n_classes) 157 | onehot[int_label] = 1 158 | return onehot 159 | 160 | '''*********************************************** 161 | * Batch split function 162 | ***********************************************''' 163 | 164 | def batch_splitter(set_size, batch_s, shuffle=False, labels=None, compensation_factor=0, pretraining=False): 165 | if set_size==0: 166 | return [] 167 | else: 168 | set = np.array(range(set_size)) 169 | if labels is not None and not pretraining: 170 | masks = [labels[:,i].astype(bool) for i in range(labels.shape[1])] 171 | sets = [set[mask] for mask in masks] 172 | lst = [] 173 | for idx, set in enumerate(sets): 174 | scale = int(100*compensation_factor/class_distribution[idx]) + 1 175 | set = np.matlib.repmat(set, scale, 1) 176 | set = set.reshape([-1,1]) 177 | lst.append(set) 178 | set = np.vstack(lst) 179 | set = set.squeeze() 180 | np.random.shuffle(set) 181 | set = set[0:set_size] 182 | set = np.sort(set) 183 | set_size = set.shape[0] 184 | n_batches = math.ceil(set_size / batch_s) 185 | if shuffle: 186 | np.random.shuffle(set) 187 | batches = np.array_split(set, n_batches) 188 | return batches 189 | 190 | '''*********************************************** 191 | * Dataset split generation & loading 192 | ***********************************************''' 193 | 194 | def gen_split(holdout, n_folds, tv_frac, seed): 195 | 196 | properties = sh.get_properties(holdout, n_folds, tv_frac, seed) 197 | 198 | save_dir = split_fmt.format(properties['name']) 199 | holdout_file = os.path.join(save_dir, 'holdout.json') 200 | test_file_base = os.path.join(save_dir, 'test.json') 201 | valid_file_base = os.path.join(save_dir, 'valid.json') 202 | train_file_base = os.path.join(save_dir, 'train.json') 203 | 204 | # prevent overwriting an existing split 205 | if os.path.exists(save_dir): 206 | print('Error: Split with this name already exists!') 207 | sys.exit(1) 208 | os.makedirs(save_dir) 209 | 210 | prop_file = os.path.join(save_dir,'properties.json') 211 | with open(prop_file, 'w+') as fh: 212 | json.dump(properties, fh, indent=4, sort_keys=True) 213 | 214 | ids = range(1, n_sequences + 1) 215 | if holdout: 216 | sets = sh.stratified_split( 217 | id_list=ids, 218 | labels=load_label(ids), 219 | rel_size=[1]*(n_folds+1), 220 | shuffle=True, 221 | seed=seed 222 | ) 223 | holdout_set = sets[0] 224 | sets = sets[1:] 225 | rest = sh.adjoint_set(ids, holdout_set) 226 | fname = holdout_file 227 | with open(fname, 'w+') as fh: 228 | json.dump(holdout_set.tolist(), fh) 229 | else: 230 | sets = sh.stratified_split( 231 | id_list=ids, 232 | labels=load_label(ids), 233 | rel_size=[1]*n_folds, 234 | shuffle=True, 235 | seed=seed 236 | ) 237 | rest = ids 238 | 239 | for idx, test_set in enumerate(sets): 240 | train_valid_set = sh.adjoint_set(rest, test_set) 241 | [train_set, valid_set] = sh.stratified_split( 242 | id_list=train_valid_set, 243 | labels=load_label(train_valid_set), 244 | rel_size=[(tv_frac-1), 1], 245 | shuffle=True, 246 | seed=seed 247 | ) 248 | fname = sh.ins_id_into_fname(test_file_base, idx) 249 | with open(fname, 'w+') as fh: 250 | json.dump(test_set.tolist(), fh) 251 | fname = sh.ins_id_into_fname(train_file_base, idx) 252 | with open(fname, 'w+') as fh: 253 | json.dump(train_set.tolist(), fh) 254 | fname = sh.ins_id_into_fname(valid_file_base, idx) 255 | with open(fname, 'w+') as fh: 256 | json.dump(valid_set.tolist(), fh) 257 | 258 | 259 | def load_split(name, cvid): 260 | 261 | load_dir = os.path.join(root, db_dir, split_dir, name) 262 | 263 | test_file_base = os.path.join(load_dir, 'test.json') 264 | test_file = sh.ins_id_into_fname(test_file_base, cvid) 265 | with open(test_file) as fh: 266 | test_set = np.array(json.load(fh)) 267 | 268 | train_file_base = os.path.join(load_dir, 'train.json') 269 | train_file = sh.ins_id_into_fname(train_file_base, cvid) 270 | with open(train_file) as fh: 271 | train_set = np.array(json.load(fh)) 272 | 273 | valid_file_base = os.path.join(load_dir, 'valid.json') 274 | valid_file = sh.ins_id_into_fname(valid_file_base, cvid) 275 | with open(valid_file) as fh: 276 | valid_set = np.array(json.load(fh)) 277 | 278 | holdout_set = np.array([]) 279 | if 'holdout' in name: 280 | holdout_file = os.path.join(load_dir, 'holdout.json') 281 | with open(holdout_file) as fh: 282 | holdout_set = np.array(json.load(fh)) 283 | 284 | return [train_set, valid_set, test_set, holdout_set] 285 | 286 | '''*********************************************** 287 | * Heartrate stuff 288 | ***********************************************''' 289 | 290 | def load_heartrate(load_list): 291 | 292 | loadname = os.path.join(root, data_dir, 'training2017', 'heartrates', 'factors.json') 293 | with open(loadname) as fh: 294 | ref_content = fh.readlines() 295 | line_fmt = '{},{}\n' 296 | factor_sel = [int(parse.parse(line_fmt, ref_content[id-1])[1]) for id in range(1,n_sequences+1)] 297 | 298 | if load_list == []: 299 | return [] 300 | 301 | id_list = convert_to_id_list(load_list) 302 | 303 | file_list = [hr_fmt.format(id, factor_sel[id-1]) for id in id_list] 304 | peaks_list = [np.genfromtxt(file, delimiter=',')[:-1] for file in file_list] 305 | dists_list = [peaks[1:]-peaks[:-1] for peaks in peaks_list] 306 | mean_list = [np.mean(dists) for dists in dists_list] 307 | hr_list = [300*60/mean for mean in mean_list] 308 | 309 | return hr_list 310 | 311 | '''*********************************************** 312 | * Script 313 | ***********************************************''' 314 | 315 | if __name__ == '__main__': 316 | pass -------------------------------------------------------------------------------- /codes/network/network.py: -------------------------------------------------------------------------------- 1 | '''*********************************************** 2 | * 3 | * project: physioNet 4 | * created: 22.03.2017 5 | * purpose: abstract network interface-class 6 | * 7 | ***********************************************''' 8 | 9 | '''*********************************************** 10 | * Imports 11 | ***********************************************''' 12 | 13 | import tensorflow as tf 14 | import numpy as np 15 | import datetime as dt 16 | import os 17 | import json 18 | import csv 19 | import time 20 | 21 | from definitions import * 22 | import utils.dataset_helper as dsh 23 | import utils.transformations as trans 24 | import utils.nn_layers as nn 25 | import utils.metrics as met 26 | import utils.tf_helper as tfh 27 | 28 | 29 | '''*********************************************** 30 | * Classes 31 | ***********************************************''' 32 | 33 | class Network: 34 | 35 | '''*********************************************** 36 | * Initialisation 37 | ***********************************************''' 38 | 39 | def __init__(self): 40 | 41 | #***************************************# 42 | # from training job: # 43 | #***************************************# 44 | 45 | self.job_name = None 46 | self.job_description = None 47 | self.worker = None 48 | self.cvid = None 49 | self.model = None 50 | self.split = None 51 | self.log_en = None 52 | self.log_test_score = None 53 | 54 | #***************************************# 55 | # from model: # 56 | #***************************************# 57 | 58 | self.model_name = None 59 | 60 | # preprocessing 61 | self.spectrogram = None 62 | self.nperseg = None 63 | self.noverlap = None 64 | 65 | # loss_function_parameters 66 | self.l2_penalty = None 67 | self.class_penalty = None 68 | 69 | # training_parameters 70 | self.learning_rate = None 71 | self.batch_size = None 72 | self.drop_rate = None 73 | self.exponential_decay = None 74 | self.dataset_compensation = None 75 | self.validation_step = None 76 | self.early_stop_wait = None 77 | 78 | # data_augmentation 79 | self.resampling = None 80 | self.resample_method = None 81 | self.zero_filter = None 82 | self.reload_step = None 83 | self.awgn = None 84 | 85 | #***************************************# 86 | # internal constants: # 87 | #***************************************# 88 | 89 | # max number of sequences trainable in 90 | # one shot (VRAM limitation of GPU) 91 | self.max_s = 20 92 | 93 | #***************************************# 94 | # internal variables: # 95 | #***************************************# 96 | 97 | # training phase: determines how the network 98 | # is routed (also for classification) 99 | self.phase = 0 100 | 101 | # for training 102 | self.termination_epoch = 0 103 | self.termination_cost = 0 104 | self.tmp_dir = None 105 | self.log_dir = None 106 | 107 | # input size parameters 108 | self.ext_len = None 109 | self.max_shape = None 110 | 111 | # dataset parameters 112 | self.n_classes = dsh.n_classes 113 | self.class_distribution = dsh.class_distribution 114 | self.class_tags = dsh.class_tags 115 | 116 | '''*********************************************** 117 | * Build network 118 | ***********************************************''' 119 | 120 | def build(self, longest_seq=dsh.longest_seq): 121 | 122 | # we work with the global default graph, 123 | # if .build() is called multiple times inside the same process, 124 | # multiple instances of the complete graph would exits inside the global default graph 125 | # as soon as a session runs, it claims its inputs have not been fed properly. 126 | # Because the default graph then has twice as many inputs as expected 127 | # therefore reset the default graph before each build, 128 | # such that this error cannot possibly occur again 129 | tf.reset_default_graph() 130 | 131 | # all signals are extended to the length of the longest sequence in the dataset 132 | data = dsh.load_data(longest_seq) 133 | self.ext_len = data.shape[1] 134 | print('Extension length: {:}'.format(self.ext_len)) 135 | 136 | data = self.load_input(longest_seq) 137 | self.max_shape = [data.shape[1], data.shape[2]] 138 | 139 | with tf.device(default_dev): 140 | with tf.name_scope('inputs'): 141 | self.create_inputs() 142 | with tf.name_scope('model'): 143 | self.pred = self.create_model(self.data_subset) 144 | with tf.name_scope('cost_function'): 145 | self.cost = self.cost_function(self.pred, self.label_subset) 146 | update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS) 147 | with tf.control_dependencies(update_ops): 148 | with tf.name_scope('optim'): 149 | if(self.exponential_decay): 150 | step = tf.Variable(0, trainable=False) 151 | rate = tf.train.exponential_decay(learning_rate=self.learning_rate, 152 | global_step=step, 153 | decay_steps=1, 154 | decay_rate=0.9999) 155 | optimizer = tf.train.AdamOptimizer(rate) 156 | self.optimizerizer = optimizer.minimize(self.cost, aggregation_method=2, global_step=step) 157 | else: 158 | optimizer = tf.train.AdamOptimizer(self.learning_rate) 159 | self.optimizerizer = optimizer.minimize(self.cost, aggregation_method=2) 160 | 161 | 162 | def create_inputs(self): 163 | [self.data_init, self.data] = tfh.create_input(dtype='int16', shape=[None, self.max_shape[0], self.max_shape[1]], name='data') 164 | [self.label_init, self.label] = tfh.create_input(dtype='float64', shape=[None, self.n_classes], name='labels') 165 | self.subset = tf.placeholder(dtype=tf.int32, shape=[None], name='batch_selector') 166 | self.dropout_rate = tf.placeholder(dtype=tf.float32, shape=[], name='dropout_rate') 167 | self.training_phase = tf.placeholder(dtype=tf.int32, shape=[], name='training_phase') 168 | self.is_training = tf.placeholder(dtype=tf.bool, shape=[], name='is_training') 169 | data = tf.gather(self.data, self.subset) 170 | if self.awgn: 171 | data = tf.cond( 172 | self.is_training, 173 | lambda: nn.awgn_channel(data, snr=3), 174 | lambda: data 175 | ) 176 | self.data_subset = data 177 | self.label_subset = tf.gather(self.label, self.subset) 178 | 179 | 180 | def create_model(self, data, feat_s): 181 | raise NotImplementedError("Must be overridden with proper definition of forward path") 182 | 183 | 184 | def cost_function(self, pred, label): 185 | 186 | eq_w = [1 for _ in self.class_distribution] 187 | occ_w = [100/r for r in self.class_distribution] 188 | c = self.class_penalty 189 | weights = [[e * (1-c) + o * c for e,o in zip(eq_w, occ_w)]] 190 | class_weights = tf.constant(weights, dtype=tf.float32) 191 | # select cost multiplier for each signal 192 | weight_per_sig = tf.matmul(class_weights, 193 | tf.transpose(label)) 194 | 195 | # generate l2 loss over all trainable weights (not biases) 196 | penal = tf.constant(self.l2_penalty, dtype=tf.float32) 197 | vars = tf.trainable_variables() 198 | vars = [v for v in vars if 199 | 'bias' not in v.name and 200 | 'batch_normalization' not in v.name and 201 | 'cond' not in v.name] 202 | lossL2 = tf.add_n([tf.nn.l2_loss(v) for v in vars]) 203 | 204 | softmax = tf.nn.softmax_cross_entropy_with_logits(logits=pred, labels=label) 205 | # scale with importance of respecting rare classes 206 | softmax = tf.matmul(weight_per_sig, tf.expand_dims(softmax, 1)) 207 | # overall cost is classification loss + l2 penalty of learned parameters 208 | cost = tf.reduce_mean(softmax + penal * lossL2) 209 | 210 | return cost 211 | 212 | 213 | '''*********************************************** 214 | * Load and preprocess inputs 215 | ***********************************************''' 216 | 217 | def load_input(self, load_list, is_training=False): 218 | data = dsh.load_data(load_list, ext_len=self.ext_len, data2d=True) 219 | 220 | if is_training: 221 | if self.zero_filter: 222 | data = trans.zero_filter(data, threshold=2, depth=10) 223 | if self.resampling: 224 | data = trans.random_resample(data, upscale_factor=1) 225 | 226 | if self.spectrogram: 227 | data = trans.spectrogram(data, nperseg=self.nperseg, noverlap=self.noverlap) 228 | else: 229 | data = np.expand_dims(data, axis=2) 230 | 231 | return data 232 | 233 | def load_label(self, load_list, output_type='onehot'): 234 | label = dsh.load_label(load_list, output_type) 235 | return label 236 | 237 | '''*********************************************** 238 | * Main functions: train, classify, predict_score 239 | ***********************************************''' 240 | 241 | def train(self, epochs, phase=0): 242 | 243 | try: 244 | 245 | print('Start training phase', phase) 246 | 247 | self.phase = phase 248 | if self.log_en: 249 | self.setup_logger() 250 | self.new_session() 251 | if self.phase > 0: 252 | load_dir = os.path.join(self.log_dir, 'trained_phase{:0>1}'.format(self.phase-1)) 253 | self.load_session(load_dir) 254 | max_valid_score = 0 255 | cost = 0 256 | 257 | for epoch in range(epochs): 258 | 259 | t = time.time() 260 | 261 | if epoch % self.validation_step == 0: 262 | [vacc,vscore,_] = self.predict_score(run_set=self.valid_set) 263 | print('Validation accuracy = {0:.2f}%'.format(vacc), '/ score = {0:.2f} %'.format(vscore)) 264 | self.do_log(tag='validation_acc_phase'+str(self.phase), value=vacc, epoch=epoch) 265 | self.do_log(tag='validation_score_phase'+str(self.phase), value=vscore, epoch=epoch) 266 | hot_log_fields = [epoch, self.termination_epoch, cost, vacc, vscore] 267 | if self.log_test_score: 268 | [tacc,tscore,_] = self.predict_score(run_set=self.test_set) 269 | self.do_log(tag='test_acc_phase'+str(self.phase), value=tacc, epoch=epoch) 270 | self.do_log(tag='test_score_phase'+str(self.phase), value=tscore, epoch=epoch) 271 | hot_log_fields.extend([tacc, tscore]) 272 | self.hot_log(fields=hot_log_fields) 273 | # if validation score improves: save network and update termination epoch/cost 274 | if vscore >= max_valid_score: 275 | print('Saving network...') 276 | self.save_session(self.tmp_dir) 277 | max_valid_score = vscore 278 | self.termination_epoch = epoch 279 | self.termination_cost = cost 280 | print('Best score is {0:.2f}%'.format(max_valid_score), 281 | '(achieved in Epoch {:0>4})'.format(self.termination_epoch)) 282 | # early stop if no improvement for 100 epochs 283 | if epoch > self.termination_epoch + self.early_stop_wait and epoch > 300: 284 | print('Early stopping triggered, as no improvement') 285 | break 286 | 287 | cost = 0 288 | if epoch == 0 or (epoch & self.reload_step == 0 and (self.zero_filter or self.resampling)): 289 | train_data = self.load_input(self.train_set, is_training=True) 290 | train_label = self.load_label(self.train_set) 291 | self.sess.run(self.data.initializer, feed_dict={self.data_init: train_data}) 292 | self.sess.run(self.label.initializer, feed_dict={self.label_init: train_label}) 293 | batches = dsh.batch_splitter(train_data.shape[0], batch_s=self.batch_size, shuffle=True, 294 | labels=train_label, compensation_factor=self.dataset_compensation) 295 | for batch in batches: 296 | [_, c] = self.sess.run([self.optimizerizer, self.cost], 297 | feed_dict={ 298 | self.subset: batch, 299 | self.dropout_rate: self.drop_rate, 300 | self.training_phase: self.phase, 301 | self.is_training: True 302 | }) 303 | cost += c 304 | 305 | cost = cost/len(batches) 306 | self.do_log(tag='cost_phase'+str(self.phase), value=cost, epoch=epoch) 307 | dur = time.time() - t 308 | print('Epoch {:0>4}:'.format(epoch+1), 'cost= {:.6f}'.format(cost), '({:.1f}s)'.format(dur)) 309 | 310 | # after training store best parameters and the achieved accuracy/score 311 | self.load_session(self.tmp_dir) 312 | save_dir = os.path.join(self.log_dir, 'trained_phase{:0>1}'.format(self.phase)) 313 | self.save_session(save_dir) 314 | scoreDict = self.genScoreDict() 315 | score_file = os.path.join(save_dir, 'scores.json') 316 | with open(score_file, 'w+') as fh: 317 | json.dump(scoreDict, fh, indent=4, sort_keys=True) 318 | 319 | except KeyboardInterrupt: 320 | print('KeyboardInterrupt: running training phase canceled') 321 | 322 | 323 | def classify(self, id_list, phase=0): 324 | self.phase = phase 325 | self.new_session() 326 | load_dir = os.path.join(self.log_dir, 'trained_phase{:0>1}'.format(self.phase)) 327 | print(load_dir) 328 | self.load_session(load_dir) 329 | data = self.load_input(id_list) 330 | pred_prob = self.predict(data) 331 | pred_int = self.prob2label(pred_prob) 332 | pred_label = [self.class_tags[pred] for pred in pred_int] 333 | return [pred_label, pred_int, pred_prob] 334 | 335 | 336 | def predict_score(self, run_set): 337 | data = self.load_input(run_set) 338 | act_label = self.load_label(run_set, output_type='int') 339 | pred_prob = self.predict(data) 340 | pred_label = self.prob2label(pred_prob) 341 | acc = met.compute_accuracy(pred_label, act_label) 342 | [score, scdict] = met.compute_score(pred_label, act_label, class_tags=self.class_tags, verbose=True) 343 | return [acc, score, scdict] 344 | 345 | 346 | def predict(self, data): 347 | self.sess.run(self.data.initializer, feed_dict={self.data_init: data}) 348 | self.sess.run(self.label.initializer, feed_dict={self.label_init: [range(self.n_classes)]}) 349 | batches = dsh.batch_splitter(len(data), batch_s=self.max_s, shuffle=False) 350 | predict_list = [self.sess.run(self.pred, feed_dict={ 351 | self.subset: batch, 352 | self.dropout_rate: 0, 353 | self.training_phase: self.phase, 354 | self.is_training: False 355 | }) 356 | for batch in batches] 357 | predictions = np.concatenate(predict_list, axis=0) 358 | return predictions 359 | 360 | def prob2label(self, prob_vec): 361 | prediction = np.argmax(prob_vec, axis=1) 362 | return prediction 363 | 364 | '''*********************************************** 365 | * Logging 366 | ***********************************************''' 367 | 368 | def setup_logger(self): 369 | tf_log_dir = os.path.join(self.log_dir, 'tboard') 370 | if not os.path.exists(tf_log_dir): 371 | os.makedirs(tf_log_dir) 372 | logname = 'phase{:0>1}_'.format(self.phase) + dt.datetime.now().strftime('%Y_%m_%d_%H%M%S') 373 | logfile = os.path.join(tf_log_dir, logname) 374 | self.logger = tf.summary.FileWriter(logfile, graph=tf.get_default_graph()) 375 | 376 | def do_log(self, tag, value, epoch=None): 377 | if self.log_en: 378 | summary = tf.Summary() 379 | summary.value.add(tag=tag, simple_value=value) 380 | self.logger.add_summary(summary,epoch) 381 | self.logger.flush() 382 | 383 | def hot_log(self, fields): 384 | logfile = os.path.join(self.log_dir, 'hotlog.csv') 385 | with open(logfile, 'a+') as fh: 386 | writer = csv.writer(fh) 387 | writer.writerow(fields) 388 | 389 | '''*********************************************** 390 | * Create/save/load a session 391 | ***********************************************''' 392 | 393 | def new_session(self): 394 | init = tf.global_variables_initializer() 395 | config = tf.ConfigProto(allow_soft_placement=True) 396 | config.gpu_options.allow_growth = True 397 | self.sess = tf.Session(config=config) 398 | self.sess.run(init, feed_dict={self.training_phase:self.phase}) 399 | 400 | def save_session(self, save_dir): 401 | saver = tf.train.Saver() 402 | if not os.path.exists(save_dir): 403 | os.makedirs(save_dir) 404 | saver.save(self.sess, os.path.join(save_dir, 'session')) 405 | 406 | def load_session(self, load_dir): 407 | loader = tf.train.Saver() 408 | loader.restore(self.sess, os.path.join(load_dir, 'session')) 409 | 410 | '''*********************************************** 411 | * Load job from json-file 412 | ***********************************************''' 413 | 414 | def load_job(self, job): 415 | 416 | # load job and corresponding model+split 417 | self.jobFromDict(job_dict=job) 418 | self.load_model(model=self.model) 419 | self.load_split(split=self.split, cvid=self.cvid) 420 | 421 | # create directories 422 | self.tmp_dir = os.path.join(root, tmp_dir, self.worker) 423 | model_dir = os.path.join(root, log_dir, self.model_name) 424 | job_dir = os.path.join(model_dir, self.job_name) 425 | self.log_dir = os.path.join(job_dir, 'fold' + str(self.cvid)) 426 | if not os.path.exists(self.tmp_dir): 427 | os.makedirs(self.tmp_dir) 428 | if not os.path.exists(model_dir): 429 | os.makedirs(model_dir) 430 | if not os.path.exists(job_dir): 431 | os.makedirs(job_dir) 432 | if not os.path.exists(self.log_dir): 433 | os.makedirs(self.log_dir) 434 | 435 | # save job and model into log folder 436 | job_save_file = os.path.join(self.log_dir, 'job.json') 437 | with open(job_save_file, 'w+') as fh: 438 | json.dump(self.jobToDict(), fh, indent=4, sort_keys=True) 439 | model_save_file = os.path.join(self.log_dir, 'model.json') 440 | with open(model_save_file, 'w+') as fh: 441 | json.dump(self.modelToDict(), fh, indent=4, sort_keys=True) 442 | 443 | def load_model(self, model): 444 | model_path = model_fmt.format(model) 445 | print('Loading model from:', model_path) 446 | with open(model_path) as fh: 447 | model_str = fh.read() 448 | model_dict = json.loads(model_str) 449 | self.modelFromDict(model_dict) 450 | 451 | def load_split(self, split, cvid): 452 | split_path = dsh.split_fmt.format(split) 453 | print('Loading split from:', split_path) 454 | [self.train_set, self.valid_set, self.test_set, self.holdout_set] = dsh.load_split(split_path, self.cvid) 455 | self.train_set = self.set_filter(self.train_set) 456 | self.valid_set = self.set_filter(self.valid_set) 457 | self.test_set = self.set_filter(self.test_set) 458 | self.holdout_set = self.set_filter(self.holdout_set) 459 | print('Train set: ', self.train_set.shape[0], self.train_set[0:20]) 460 | print('Valid set: ', self.valid_set.shape[0], self.valid_set[0:10]) 461 | print('Test set: ', self.test_set.shape[0], self.test_set[0:10]) 462 | print('Holdout set:', self.holdout_set.shape[0], self.holdout_set[0:10]) 463 | 464 | def set_filter(self, ids): 465 | return ids 466 | 467 | '''*********************************************** 468 | * Dict conversion functions 469 | ***********************************************''' 470 | 471 | def genScoreDict(self): 472 | [tr_acc, tr_sc, tr_dict] = self.predict_score(run_set=self.train_set) 473 | [va_acc, va_sc, va_dict] = self.predict_score(run_set=self.valid_set) 474 | [te_acc, te_sc, te_dict] = self.predict_score(run_set=self.test_set) 475 | score_dict = { 476 | 'scoring': 477 | { 478 | 'acc_train' : tr_acc, 479 | 'sc_train' : tr_sc, 480 | 'acc_valid' : va_acc, 481 | 'sc_valid' : va_sc, 482 | 'acc_test' : te_acc, 483 | 'sc_test' : te_sc 484 | }, 485 | 'split_scoring': 486 | { 487 | 'train' : tr_dict, 488 | 'valid' : va_dict, 489 | 'test' : te_dict 490 | }, 491 | 'termination': 492 | { 493 | 'termination_epoch': self.termination_epoch, 494 | 'termination_cost': self.termination_cost 495 | } 496 | } 497 | return score_dict 498 | 499 | def jobFromDict(self, job_dict): 500 | self.job_name = job_dict['name'] 501 | self.job_description = job_dict['description'] 502 | self.model = job_dict['model'] 503 | self.split = job_dict['split'] 504 | self.log_en = job_dict['log_en'] 505 | self.log_test_score = job_dict['log_test_score'] 506 | self.cvid = job_dict['cvid'] 507 | self.worker = job_dict['worker'] 508 | 509 | def jobToDict(self): 510 | job_dict = { 511 | 'name' : self.job_name, 512 | 'description' : self.job_description, 513 | 'model' : self.model, 514 | 'split' : self.split, 515 | 'log_en' : self.log_en, 516 | 'log_test_score' : self.log_test_score, 517 | 'cvid' : self.cvid, 518 | 'worker' : self.worker 519 | } 520 | return job_dict 521 | 522 | def modelFromDict(self, model_dict): 523 | self.model_name = model_dict['model_name'] 524 | mp = model_dict['model_parameters'] 525 | pp = model_dict['preprocessing'] 526 | lp = model_dict['loss_function_parameters'] 527 | tp = model_dict['training_parameters'] 528 | da = model_dict['data_augmentation'] 529 | self.set_modelParameters(mp) 530 | self.spectrogram = pp['spectrogram'] 531 | self.nperseg = pp['nperseg'] 532 | self.noverlap = pp['noverlap'] 533 | self.l2_penalty = lp['l2_penalty'] 534 | self.class_penalty = lp['class_penalty'] 535 | self.learning_rate = tp['learning_rate'] 536 | self.batch_size = tp['batch_size'] 537 | self.drop_rate = tp['drop_rate'] 538 | self.exponential_decay = tp['exponential_decay'] 539 | self.dataset_compensation = tp['dataset_compensation'] 540 | self.validation_step = tp['validation_step'] 541 | self.early_stop_wait = tp['early_stop_wait'] 542 | self.resampling = da['resampling'] 543 | self.resample_method = da['resample_method'] 544 | self.zero_filter = da['zero_filter'] 545 | self.reload_step = da['reload_step'] 546 | self.awgn = da['awgn'] 547 | 548 | def set_modelParameters(self, param_dict): 549 | raise NotImplementedError("Must be overridden by specific model, parametrize model from dictionary") 550 | 551 | def modelToDict(self): 552 | model_dict = { 553 | 'model_name': self.model_name, 554 | 'model_parameters': self.get_modelParameters(), 555 | 'preprocessing': 556 | { 557 | 'spectrogram' : self.spectrogram, 558 | 'nperseg' : self.nperseg, 559 | 'noverlap' : self.noverlap 560 | }, 561 | 'loss_function_parameters': 562 | { 563 | 'l2_penalty' : self.l2_penalty, 564 | 'class_penalty' : self.class_penalty 565 | }, 566 | 'training_parameters': 567 | { 568 | 'learning_rate' : self.learning_rate, 569 | 'batch_size' : self.batch_size, 570 | 'drop_rate' : self.drop_rate, 571 | 'exponential_decay' : self.exponential_decay, 572 | 'dataset_compensation' : self.dataset_compensation, 573 | 'validation_step' : self.validation_step, 574 | 'early_stop_wait' : self.early_stop_wait 575 | }, 576 | 'data_augmentation': { 577 | 'resampling' : self.resampling, 578 | 'resample_method' : self.resample_method, 579 | 'zero_filter' : self.zero_filter, 580 | 'reload_step' : self.reload_step, 581 | 'awgn' : self.awgn 582 | } 583 | } 584 | return model_dict 585 | 586 | def get_modelParameters(self): 587 | raise NotImplementedError("Must be overridden by specific model, dump all architectural parameters") -------------------------------------------------------------------------------- /data/training2017/splits/split_5_6_14/train2.json: -------------------------------------------------------------------------------- 1 | [1, 3, 6, 7, 8, 9, 10, 12, 13, 16, 17, 18, 20, 22, 23, 24, 25, 28, 31, 32, 34, 35, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 52, 53, 54, 55, 57, 58, 59, 60, 61, 65, 66, 67, 69, 70, 72, 73, 76, 78, 79, 80, 83, 84, 85, 87, 90, 91, 92, 94, 95, 96, 98, 100, 101, 104, 105, 106, 107, 108, 109, 110, 111, 114, 115, 117, 118, 119, 121, 123, 124, 125, 127, 128, 129, 131, 133, 134, 135, 136, 137, 139, 140, 143, 146, 147, 148, 149, 150, 151, 152, 155, 156, 157, 159, 160, 162, 163, 164, 165, 167, 168, 169, 170, 171, 174, 181, 183, 184, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 197, 198, 201, 202, 203, 204, 206, 207, 208, 209, 215, 216, 217, 218, 220, 221, 225, 227, 230, 232, 233, 234, 235, 236, 239, 240, 241, 242, 243, 244, 246, 247, 248, 249, 250, 251, 252, 253, 257, 258, 260, 262, 263, 264, 265, 268, 270, 271, 272, 274, 276, 278, 279, 280, 281, 283, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 299, 300, 301, 302, 303, 304, 305, 307, 308, 309, 311, 312, 313, 314, 315, 316, 318, 319, 321, 323, 324, 325, 326, 327, 328, 329, 330, 331, 333, 335, 337, 338, 339, 340, 341, 344, 345, 346, 347, 348, 351, 354, 355, 356, 358, 359, 361, 362, 363, 368, 370, 372, 374, 376, 378, 379, 380, 381, 382, 385, 387, 388, 389, 392, 393, 394, 395, 397, 399, 400, 401, 402, 403, 406, 409, 412, 413, 414, 415, 416, 417, 418, 421, 422, 424, 425, 427, 428, 429, 432, 433, 434, 436, 437, 438, 439, 440, 442, 443, 444, 445, 446, 448, 449, 450, 451, 452, 453, 454, 456, 458, 459, 461, 463, 464, 465, 466, 467, 468, 469, 472, 473, 474, 475, 479, 481, 482, 483, 484, 485, 486, 487, 488, 489, 490, 491, 492, 494, 497, 498, 500, 503, 505, 507, 512, 513, 514, 515, 516, 518, 519, 520, 521, 522, 523, 524, 525, 526, 527, 528, 529, 531, 533, 534, 535, 537, 538, 539, 540, 541, 542, 546, 547, 551, 556, 559, 560, 561, 562, 563, 564, 565, 566, 567, 568, 569, 571, 572, 573, 574, 576, 577, 579, 580, 581, 582, 584, 586, 587, 590, 591, 592, 593, 594, 595, 596, 597, 598, 599, 600, 601, 602, 603, 604, 605, 606, 609, 610, 611, 612, 613, 614, 615, 617, 619, 620, 621, 622, 623, 624, 626, 632, 633, 635, 636, 637, 639, 640, 641, 644, 645, 646, 647, 648, 649, 651, 653, 654, 655, 657, 658, 659, 660, 661, 662, 663, 664, 665, 669, 670, 671, 673, 675, 676, 678, 680, 682, 683, 684, 685, 686, 688, 689, 691, 692, 693, 694, 698, 699, 701, 703, 705, 706, 707, 708, 709, 710, 711, 712, 714, 715, 716, 718, 719, 720, 721, 722, 723, 724, 727, 728, 730, 731, 733, 734, 736, 738, 740, 741, 742, 743, 744, 745, 747, 749, 750, 753, 754, 755, 758, 760, 761, 763, 765, 767, 769, 770, 771, 772, 773, 774, 775, 777, 778, 779, 780, 781, 782, 784, 787, 789, 790, 791, 792, 793, 794, 797, 799, 802, 804, 806, 807, 808, 809, 810, 812, 813, 814, 815, 816, 817, 818, 819, 820, 821, 822, 823, 824, 825, 826, 827, 828, 829, 830, 832, 836, 838, 839, 840, 842, 844, 845, 846, 847, 848, 849, 850, 851, 852, 853, 854, 855, 857, 858, 859, 860, 861, 862, 864, 867, 868, 870, 871, 877, 879, 880, 881, 882, 885, 886, 887, 889, 890, 891, 893, 894, 895, 899, 900, 901, 907, 908, 910, 912, 916, 917, 918, 919, 920, 922, 924, 925, 926, 927, 928, 929, 931, 933, 934, 935, 936, 937, 938, 940, 941, 942, 944, 945, 946, 947, 948, 951, 956, 957, 958, 959, 960, 963, 964, 965, 967, 968, 969, 970, 971, 972, 973, 974, 978, 979, 980, 982, 983, 986, 987, 988, 989, 990, 991, 992, 994, 996, 997, 999, 1000, 1002, 1003, 1004, 1005, 1006, 1007, 1009, 1010, 1012, 1017, 1018, 1020, 1021, 1022, 1023, 1024, 1025, 1026, 1027, 1028, 1029, 1031, 1032, 1036, 1038, 1040, 1041, 1042, 1043, 1044, 1047, 1048, 1049, 1051, 1052, 1053, 1054, 1057, 1059, 1061, 1063, 1065, 1066, 1067, 1068, 1070, 1071, 1072, 1073, 1074, 1075, 1076, 1077, 1078, 1079, 1081, 1084, 1085, 1086, 1087, 1088, 1089, 1090, 1091, 1093, 1094, 1095, 1096, 1097, 1098, 1099, 1100, 1101, 1102, 1105, 1106, 1107, 1110, 1111, 1113, 1114, 1115, 1116, 1117, 1119, 1120, 1123, 1124, 1125, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1134, 1136, 1138, 1139, 1140, 1141, 1144, 1146, 1147, 1148, 1149, 1150, 1154, 1156, 1157, 1159, 1162, 1163, 1165, 1167, 1168, 1169, 1170, 1171, 1173, 1174, 1175, 1176, 1177, 1178, 1179, 1181, 1182, 1183, 1186, 1190, 1191, 1192, 1193, 1194, 1195, 1196, 1197, 1198, 1199, 1201, 1203, 1204, 1205, 1206, 1207, 1208, 1209, 1210, 1211, 1212, 1213, 1214, 1215, 1216, 1217, 1218, 1219, 1221, 1222, 1224, 1226, 1227, 1228, 1229, 1231, 1232, 1233, 1234, 1235, 1237, 1238, 1239, 1240, 1241, 1243, 1244, 1245, 1247, 1248, 1249, 1250, 1254, 1255, 1257, 1260, 1261, 1262, 1263, 1264, 1266, 1268, 1272, 1273, 1275, 1277, 1278, 1280, 1281, 1282, 1283, 1285, 1286, 1287, 1288, 1291, 1293, 1294, 1296, 1297, 1299, 1300, 1301, 1302, 1304, 1305, 1311, 1312, 1313, 1316, 1317, 1318, 1319, 1320, 1322, 1323, 1324, 1325, 1326, 1327, 1328, 1331, 1332, 1333, 1334, 1335, 1336, 1337, 1338, 1339, 1340, 1341, 1343, 1345, 1347, 1348, 1350, 1351, 1353, 1354, 1355, 1356, 1357, 1360, 1363, 1364, 1365, 1368, 1369, 1370, 1371, 1373, 1374, 1375, 1376, 1378, 1380, 1381, 1382, 1383, 1384, 1389, 1391, 1393, 1394, 1395, 1398, 1399, 1400, 1403, 1404, 1405, 1406, 1408, 1409, 1410, 1411, 1412, 1414, 1415, 1418, 1420, 1421, 1423, 1424, 1426, 1427, 1430, 1431, 1433, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1447, 1448, 1450, 1451, 1452, 1453, 1454, 1455, 1456, 1457, 1460, 1463, 1466, 1467, 1469, 1470, 1472, 1474, 1475, 1476, 1477, 1479, 1480, 1484, 1485, 1486, 1490, 1491, 1493, 1494, 1495, 1497, 1498, 1499, 1500, 1501, 1503, 1504, 1505, 1507, 1509, 1512, 1514, 1515, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1524, 1526, 1527, 1528, 1529, 1531, 1532, 1533, 1536, 1537, 1538, 1539, 1541, 1542, 1543, 1545, 1546, 1547, 1548, 1549, 1550, 1551, 1552, 1553, 1555, 1556, 1558, 1561, 1563, 1564, 1565, 1567, 1568, 1569, 1571, 1572, 1573, 1574, 1576, 1578, 1581, 1583, 1584, 1585, 1586, 1587, 1588, 1589, 1591, 1592, 1593, 1596, 1598, 1600, 1601, 1603, 1604, 1605, 1606, 1607, 1608, 1609, 1610, 1611, 1612, 1614, 1615, 1617, 1618, 1619, 1620, 1621, 1622, 1623, 1624, 1625, 1626, 1627, 1628, 1630, 1633, 1634, 1636, 1638, 1639, 1640, 1641, 1642, 1643, 1644, 1645, 1646, 1647, 1648, 1651, 1653, 1655, 1657, 1659, 1660, 1661, 1662, 1663, 1664, 1669, 1670, 1673, 1674, 1675, 1676, 1677, 1678, 1680, 1681, 1682, 1684, 1685, 1687, 1688, 1691, 1692, 1694, 1695, 1696, 1697, 1699, 1702, 1703, 1704, 1705, 1706, 1707, 1710, 1712, 1713, 1714, 1715, 1716, 1717, 1720, 1721, 1722, 1724, 1725, 1726, 1728, 1729, 1731, 1732, 1733, 1737, 1738, 1739, 1740, 1742, 1743, 1744, 1745, 1746, 1747, 1748, 1752, 1753, 1754, 1755, 1756, 1757, 1758, 1759, 1760, 1764, 1765, 1766, 1767, 1769, 1770, 1771, 1772, 1773, 1774, 1776, 1777, 1778, 1779, 1780, 1781, 1782, 1785, 1786, 1788, 1789, 1792, 1794, 1797, 1798, 1799, 1800, 1801, 1802, 1803, 1804, 1805, 1806, 1813, 1814, 1816, 1819, 1820, 1822, 1823, 1825, 1826, 1828, 1829, 1830, 1832, 1834, 1835, 1837, 1838, 1840, 1841, 1842, 1843, 1844, 1846, 1847, 1848, 1849, 1850, 1854, 1855, 1856, 1861, 1862, 1864, 1865, 1866, 1868, 1869, 1871, 1872, 1873, 1875, 1877, 1880, 1881, 1882, 1884, 1885, 1886, 1887, 1888, 1889, 1891, 1893, 1894, 1895, 1896, 1897, 1898, 1899, 1901, 1904, 1905, 1906, 1907, 1908, 1910, 1911, 1914, 1915, 1917, 1919, 1921, 1922, 1923, 1925, 1927, 1928, 1929, 1930, 1931, 1933, 1934, 1935, 1936, 1937, 1942, 1943, 1944, 1945, 1946, 1947, 1948, 1949, 1951, 1952, 1954, 1955, 1956, 1957, 1958, 1960, 1962, 1965, 1968, 1969, 1970, 1971, 1972, 1974, 1976, 1978, 1979, 1980, 1981, 1982, 1984, 1985, 1987, 1989, 1991, 1992, 1994, 1995, 1996, 1997, 2000, 2001, 2003, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2014, 2015, 2017, 2018, 2020, 2021, 2025, 2027, 2028, 2029, 2030, 2031, 2033, 2035, 2036, 2037, 2039, 2040, 2041, 2042, 2044, 2045, 2047, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2056, 2057, 2058, 2059, 2063, 2064, 2065, 2066, 2067, 2068, 2069, 2071, 2072, 2074, 2075, 2076, 2077, 2080, 2082, 2083, 2084, 2085, 2086, 2087, 2088, 2089, 2090, 2092, 2093, 2094, 2095, 2096, 2097, 2100, 2101, 2103, 2104, 2105, 2108, 2112, 2114, 2115, 2117, 2118, 2119, 2121, 2122, 2123, 2125, 2127, 2128, 2129, 2131, 2133, 2135, 2137, 2138, 2139, 2141, 2142, 2143, 2144, 2146, 2147, 2148, 2149, 2150, 2152, 2153, 2158, 2159, 2160, 2162, 2163, 2166, 2167, 2170, 2173, 2174, 2175, 2176, 2177, 2178, 2179, 2180, 2181, 2183, 2184, 2187, 2188, 2190, 2191, 2192, 2194, 2195, 2196, 2197, 2199, 2200, 2201, 2204, 2206, 2207, 2208, 2209, 2210, 2211, 2212, 2214, 2215, 2216, 2218, 2220, 2221, 2222, 2225, 2226, 2227, 2229, 2230, 2231, 2232, 2234, 2235, 2237, 2238, 2239, 2241, 2242, 2243, 2244, 2245, 2246, 2249, 2250, 2251, 2252, 2258, 2259, 2260, 2262, 2263, 2264, 2266, 2267, 2268, 2269, 2270, 2271, 2272, 2273, 2274, 2275, 2276, 2277, 2278, 2279, 2280, 2281, 2283, 2287, 2288, 2290, 2292, 2293, 2300, 2301, 2302, 2303, 2304, 2305, 2306, 2307, 2308, 2309, 2310, 2311, 2313, 2314, 2315, 2316, 2317, 2322, 2323, 2324, 2325, 2326, 2327, 2329, 2330, 2331, 2332, 2336, 2337, 2340, 2341, 2342, 2345, 2347, 2348, 2350, 2352, 2354, 2355, 2357, 2358, 2359, 2360, 2362, 2363, 2364, 2365, 2367, 2368, 2369, 2370, 2371, 2373, 2374, 2375, 2376, 2378, 2381, 2382, 2384, 2385, 2386, 2388, 2390, 2391, 2393, 2394, 2395, 2396, 2397, 2399, 2400, 2402, 2403, 2404, 2405, 2407, 2408, 2411, 2413, 2414, 2415, 2418, 2419, 2425, 2428, 2429, 2430, 2431, 2432, 2434, 2435, 2436, 2437, 2438, 2439, 2440, 2441, 2443, 2444, 2447, 2448, 2450, 2454, 2456, 2457, 2460, 2461, 2464, 2467, 2468, 2469, 2470, 2472, 2473, 2475, 2476, 2477, 2478, 2479, 2480, 2482, 2484, 2485, 2486, 2487, 2489, 2490, 2491, 2492, 2493, 2494, 2495, 2496, 2498, 2499, 2501, 2504, 2505, 2506, 2509, 2510, 2513, 2514, 2515, 2516, 2518, 2520, 2521, 2522, 2523, 2525, 2527, 2529, 2530, 2531, 2532, 2534, 2536, 2537, 2538, 2540, 2541, 2542, 2543, 2544, 2546, 2549, 2550, 2552, 2554, 2556, 2557, 2560, 2561, 2562, 2563, 2564, 2566, 2567, 2568, 2569, 2574, 2575, 2576, 2577, 2578, 2579, 2580, 2582, 2583, 2584, 2587, 2589, 2591, 2592, 2593, 2594, 2596, 2597, 2599, 2601, 2602, 2604, 2606, 2609, 2610, 2612, 2613, 2615, 2616, 2617, 2618, 2619, 2622, 2623, 2624, 2625, 2629, 2630, 2631, 2632, 2633, 2634, 2635, 2636, 2637, 2638, 2641, 2642, 2643, 2644, 2646, 2648, 2649, 2651, 2653, 2654, 2656, 2657, 2661, 2662, 2664, 2665, 2667, 2669, 2670, 2671, 2672, 2673, 2674, 2676, 2677, 2678, 2679, 2681, 2683, 2685, 2688, 2689, 2690, 2691, 2692, 2693, 2694, 2695, 2696, 2697, 2698, 2699, 2700, 2701, 2702, 2703, 2704, 2705, 2706, 2709, 2710, 2711, 2713, 2714, 2715, 2717, 2718, 2720, 2721, 2723, 2724, 2725, 2727, 2728, 2730, 2732, 2733, 2734, 2735, 2737, 2738, 2739, 2743, 2744, 2745, 2749, 2750, 2752, 2753, 2754, 2756, 2758, 2759, 2760, 2761, 2763, 2764, 2765, 2766, 2767, 2769, 2771, 2772, 2773, 2774, 2776, 2777, 2779, 2780, 2781, 2782, 2783, 2785, 2787, 2788, 2789, 2793, 2794, 2795, 2796, 2797, 2798, 2799, 2800, 2802, 2803, 2804, 2805, 2806, 2807, 2809, 2810, 2812, 2813, 2815, 2816, 2818, 2819, 2820, 2822, 2824, 2825, 2826, 2827, 2828, 2829, 2830, 2831, 2832, 2834, 2837, 2841, 2842, 2843, 2844, 2845, 2846, 2848, 2849, 2850, 2851, 2853, 2854, 2856, 2857, 2858, 2859, 2863, 2864, 2867, 2868, 2869, 2870, 2872, 2874, 2878, 2881, 2882, 2883, 2884, 2886, 2887, 2888, 2889, 2891, 2892, 2893, 2894, 2896, 2897, 2899, 2900, 2901, 2902, 2904, 2905, 2906, 2907, 2909, 2910, 2911, 2912, 2913, 2914, 2916, 2918, 2919, 2920, 2921, 2922, 2923, 2924, 2926, 2928, 2929, 2930, 2934, 2935, 2936, 2938, 2940, 2941, 2944, 2945, 2946, 2947, 2948, 2949, 2951, 2952, 2953, 2955, 2956, 2957, 2959, 2960, 2962, 2963, 2964, 2965, 2967, 2970, 2972, 2974, 2976, 2977, 2978, 2979, 2980, 2981, 2982, 2983, 2984, 2986, 2990, 2992, 2993, 2994, 2996, 2997, 2998, 3002, 3003, 3004, 3005, 3010, 3011, 3013, 3014, 3016, 3017, 3019, 3021, 3025, 3026, 3029, 3030, 3031, 3032, 3033, 3039, 3040, 3041, 3042, 3047, 3048, 3049, 3050, 3051, 3052, 3053, 3054, 3058, 3059, 3061, 3063, 3064, 3065, 3066, 3067, 3068, 3072, 3073, 3074, 3075, 3076, 3078, 3079, 3080, 3081, 3082, 3085, 3086, 3087, 3088, 3091, 3092, 3093, 3094, 3096, 3097, 3099, 3100, 3101, 3102, 3104, 3105, 3107, 3110, 3112, 3113, 3115, 3116, 3117, 3118, 3119, 3120, 3121, 3122, 3125, 3126, 3128, 3131, 3132, 3133, 3135, 3137, 3138, 3140, 3141, 3144, 3145, 3146, 3147, 3148, 3149, 3151, 3152, 3153, 3154, 3156, 3160, 3163, 3164, 3165, 3167, 3168, 3169, 3172, 3173, 3174, 3176, 3177, 3178, 3179, 3180, 3181, 3183, 3184, 3187, 3188, 3189, 3190, 3193, 3194, 3199, 3201, 3202, 3203, 3205, 3206, 3210, 3213, 3215, 3216, 3217, 3218, 3219, 3220, 3221, 3222, 3224, 3225, 3227, 3229, 3231, 3233, 3234, 3236, 3237, 3238, 3239, 3241, 3243, 3244, 3245, 3246, 3247, 3248, 3250, 3254, 3256, 3257, 3258, 3260, 3262, 3263, 3264, 3265, 3266, 3268, 3270, 3272, 3274, 3275, 3276, 3277, 3278, 3279, 3280, 3281, 3282, 3286, 3287, 3291, 3292, 3293, 3294, 3295, 3296, 3297, 3298, 3299, 3300, 3301, 3306, 3309, 3310, 3311, 3314, 3316, 3317, 3319, 3320, 3321, 3322, 3323, 3324, 3326, 3327, 3328, 3329, 3330, 3331, 3332, 3333, 3334, 3335, 3336, 3337, 3339, 3340, 3342, 3343, 3344, 3345, 3346, 3347, 3348, 3350, 3353, 3354, 3355, 3356, 3357, 3361, 3362, 3363, 3365, 3366, 3367, 3368, 3370, 3371, 3372, 3373, 3375, 3377, 3378, 3379, 3380, 3381, 3382, 3384, 3385, 3386, 3389, 3390, 3391, 3392, 3393, 3394, 3397, 3398, 3399, 3400, 3401, 3402, 3403, 3405, 3406, 3407, 3409, 3410, 3411, 3414, 3416, 3417, 3420, 3421, 3422, 3423, 3424, 3425, 3426, 3427, 3429, 3430, 3433, 3435, 3436, 3438, 3441, 3442, 3443, 3444, 3445, 3448, 3450, 3451, 3453, 3455, 3457, 3458, 3459, 3461, 3462, 3467, 3470, 3471, 3472, 3473, 3474, 3476, 3477, 3478, 3479, 3480, 3481, 3482, 3483, 3484, 3485, 3486, 3488, 3490, 3491, 3495, 3496, 3497, 3498, 3499, 3500, 3502, 3503, 3505, 3508, 3512, 3515, 3517, 3518, 3519, 3520, 3521, 3522, 3524, 3525, 3526, 3527, 3529, 3530, 3531, 3533, 3534, 3535, 3538, 3539, 3542, 3544, 3545, 3546, 3548, 3549, 3550, 3551, 3552, 3554, 3555, 3557, 3558, 3559, 3560, 3561, 3562, 3563, 3565, 3567, 3571, 3572, 3573, 3574, 3576, 3577, 3584, 3586, 3587, 3588, 3589, 3590, 3592, 3594, 3595, 3596, 3597, 3598, 3599, 3600, 3602, 3603, 3604, 3605, 3606, 3607, 3608, 3609, 3611, 3612, 3613, 3614, 3615, 3617, 3618, 3620, 3621, 3626, 3628, 3629, 3630, 3631, 3638, 3639, 3641, 3642, 3644, 3645, 3647, 3649, 3650, 3652, 3655, 3656, 3657, 3659, 3660, 3662, 3663, 3665, 3666, 3668, 3670, 3671, 3672, 3674, 3675, 3676, 3678, 3679, 3681, 3685, 3686, 3689, 3691, 3692, 3694, 3695, 3697, 3700, 3701, 3702, 3703, 3705, 3706, 3707, 3708, 3709, 3710, 3712, 3713, 3717, 3718, 3719, 3720, 3721, 3722, 3724, 3725, 3727, 3728, 3733, 3734, 3735, 3737, 3739, 3740, 3742, 3745, 3747, 3748, 3749, 3750, 3751, 3752, 3753, 3754, 3757, 3760, 3761, 3762, 3763, 3764, 3765, 3766, 3768, 3769, 3770, 3772, 3773, 3774, 3775, 3776, 3777, 3780, 3783, 3784, 3785, 3786, 3789, 3790, 3791, 3795, 3796, 3797, 3798, 3800, 3802, 3803, 3804, 3805, 3806, 3807, 3808, 3810, 3812, 3813, 3814, 3815, 3817, 3818, 3819, 3821, 3823, 3824, 3825, 3826, 3828, 3830, 3831, 3833, 3834, 3835, 3836, 3837, 3839, 3840, 3842, 3846, 3847, 3848, 3850, 3851, 3852, 3854, 3856, 3858, 3859, 3864, 3865, 3867, 3868, 3870, 3871, 3872, 3873, 3874, 3875, 3880, 3884, 3887, 3888, 3889, 3890, 3891, 3892, 3895, 3897, 3898, 3899, 3901, 3905, 3907, 3909, 3911, 3917, 3918, 3921, 3922, 3923, 3925, 3926, 3927, 3932, 3933, 3934, 3936, 3937, 3938, 3939, 3940, 3942, 3943, 3944, 3945, 3946, 3947, 3949, 3952, 3953, 3954, 3955, 3956, 3957, 3958, 3960, 3961, 3963, 3964, 3965, 3966, 3969, 3970, 3973, 3974, 3975, 3976, 3978, 3979, 3980, 3981, 3982, 3983, 3985, 3986, 3987, 3988, 3989, 3990, 3991, 3994, 3997, 3998, 4000, 4001, 4002, 4005, 4006, 4010, 4011, 4013, 4014, 4016, 4017, 4018, 4020, 4021, 4022, 4024, 4025, 4027, 4028, 4029, 4030, 4031, 4032, 4033, 4034, 4035, 4037, 4039, 4040, 4041, 4042, 4044, 4048, 4050, 4051, 4053, 4055, 4056, 4057, 4060, 4061, 4065, 4066, 4067, 4068, 4069, 4071, 4073, 4074, 4075, 4076, 4078, 4079, 4080, 4082, 4085, 4086, 4087, 4088, 4090, 4091, 4092, 4093, 4094, 4095, 4098, 4099, 4100, 4101, 4102, 4103, 4104, 4105, 4106, 4108, 4112, 4115, 4116, 4117, 4118, 4119, 4122, 4123, 4125, 4126, 4127, 4128, 4130, 4131, 4132, 4133, 4134, 4135, 4136, 4138, 4139, 4140, 4142, 4143, 4144, 4145, 4149, 4150, 4151, 4152, 4155, 4156, 4158, 4161, 4162, 4163, 4164, 4167, 4168, 4169, 4173, 4174, 4178, 4179, 4181, 4183, 4184, 4185, 4186, 4187, 4188, 4189, 4190, 4192, 4193, 4194, 4198, 4199, 4203, 4206, 4208, 4209, 4211, 4212, 4213, 4217, 4218, 4219, 4220, 4221, 4222, 4223, 4224, 4225, 4227, 4228, 4229, 4230, 4231, 4232, 4233, 4234, 4235, 4236, 4237, 4238, 4239, 4240, 4241, 4242, 4250, 4251, 4252, 4253, 4254, 4256, 4258, 4262, 4265, 4266, 4267, 4271, 4272, 4276, 4277, 4279, 4280, 4281, 4282, 4287, 4288, 4289, 4290, 4291, 4292, 4294, 4297, 4298, 4299, 4302, 4303, 4304, 4305, 4306, 4307, 4308, 4310, 4312, 4313, 4315, 4316, 4319, 4320, 4321, 4324, 4325, 4327, 4328, 4329, 4334, 4336, 4337, 4338, 4339, 4341, 4344, 4345, 4346, 4348, 4349, 4350, 4351, 4352, 4353, 4354, 4356, 4359, 4360, 4362, 4363, 4364, 4366, 4368, 4369, 4370, 4371, 4372, 4373, 4377, 4378, 4379, 4381, 4382, 4383, 4385, 4386, 4389, 4390, 4392, 4393, 4395, 4397, 4399, 4401, 4404, 4405, 4406, 4407, 4409, 4410, 4411, 4412, 4413, 4414, 4416, 4417, 4418, 4419, 4421, 4429, 4430, 4431, 4434, 4436, 4437, 4441, 4442, 4444, 4445, 4446, 4447, 4448, 4449, 4450, 4451, 4452, 4453, 4454, 4458, 4459, 4460, 4461, 4462, 4464, 4465, 4466, 4469, 4470, 4471, 4473, 4474, 4475, 4477, 4478, 4479, 4480, 4481, 4482, 4483, 4485, 4488, 4489, 4493, 4494, 4495, 4497, 4498, 4499, 4500, 4502, 4503, 4504, 4505, 4506, 4507, 4509, 4511, 4512, 4513, 4514, 4515, 4516, 4517, 4518, 4519, 4520, 4521, 4522, 4523, 4524, 4525, 4530, 4531, 4532, 4533, 4540, 4543, 4544, 4545, 4547, 4548, 4549, 4550, 4552, 4554, 4556, 4559, 4560, 4561, 4562, 4563, 4566, 4567, 4568, 4570, 4571, 4572, 4574, 4577, 4578, 4580, 4581, 4582, 4583, 4584, 4585, 4586, 4587, 4588, 4589, 4596, 4598, 4599, 4601, 4602, 4603, 4605, 4606, 4607, 4609, 4610, 4613, 4614, 4615, 4617, 4618, 4620, 4621, 4622, 4623, 4624, 4625, 4626, 4627, 4628, 4629, 4630, 4631, 4633, 4636, 4637, 4638, 4641, 4642, 4643, 4644, 4645, 4646, 4647, 4648, 4649, 4650, 4651, 4652, 4654, 4656, 4657, 4658, 4660, 4661, 4663, 4664, 4665, 4666, 4667, 4668, 4670, 4671, 4672, 4674, 4675, 4677, 4678, 4680, 4681, 4682, 4683, 4685, 4688, 4690, 4691, 4692, 4695, 4697, 4700, 4703, 4704, 4706, 4708, 4710, 4711, 4712, 4713, 4714, 4715, 4716, 4717, 4718, 4720, 4722, 4726, 4729, 4730, 4731, 4732, 4733, 4734, 4735, 4736, 4737, 4739, 4740, 4741, 4742, 4744, 4745, 4746, 4747, 4748, 4749, 4750, 4751, 4752, 4753, 4754, 4755, 4756, 4759, 4760, 4762, 4767, 4769, 4770, 4771, 4773, 4775, 4776, 4779, 4781, 4782, 4786, 4788, 4789, 4790, 4791, 4792, 4793, 4794, 4796, 4799, 4800, 4801, 4802, 4805, 4806, 4807, 4809, 4810, 4811, 4813, 4814, 4815, 4816, 4817, 4818, 4819, 4820, 4821, 4823, 4824, 4826, 4827, 4828, 4830, 4833, 4834, 4835, 4838, 4839, 4840, 4841, 4842, 4844, 4845, 4846, 4847, 4848, 4853, 4854, 4856, 4858, 4859, 4860, 4861, 4862, 4864, 4867, 4868, 4870, 4874, 4875, 4876, 4877, 4879, 4880, 4881, 4882, 4883, 4884, 4885, 4886, 4887, 4888, 4891, 4893, 4894, 4895, 4896, 4898, 4899, 4901, 4902, 4903, 4905, 4906, 4907, 4909, 4910, 4911, 4912, 4913, 4914, 4915, 4916, 4917, 4919, 4920, 4924, 4925, 4926, 4927, 4928, 4929, 4931, 4932, 4935, 4936, 4937, 4939, 4940, 4942, 4943, 4945, 4946, 4947, 4948, 4949, 4950, 4952, 4953, 4955, 4960, 4961, 4962, 4964, 4965, 4967, 4968, 4969, 4970, 4972, 4974, 4975, 4976, 4977, 4978, 4979, 4980, 4981, 4982, 4983, 4985, 4986, 4987, 4988, 4989, 4990, 4991, 4992, 4993, 4995, 4997, 4999, 5001, 5002, 5003, 5006, 5007, 5008, 5009, 5010, 5011, 5013, 5015, 5016, 5018, 5019, 5020, 5021, 5023, 5025, 5026, 5027, 5029, 5032, 5033, 5034, 5035, 5036, 5037, 5038, 5039, 5041, 5043, 5044, 5046, 5047, 5048, 5049, 5050, 5051, 5052, 5054, 5055, 5056, 5057, 5058, 5059, 5060, 5061, 5062, 5065, 5066, 5067, 5068, 5070, 5071, 5073, 5075, 5077, 5078, 5080, 5081, 5083, 5084, 5086, 5087, 5089, 5091, 5093, 5095, 5096, 5098, 5099, 5100, 5101, 5102, 5103, 5105, 5106, 5108, 5109, 5111, 5113, 5116, 5117, 5118, 5119, 5120, 5121, 5122, 5123, 5125, 5127, 5128, 5130, 5131, 5133, 5135, 5136, 5137, 5140, 5141, 5142, 5145, 5146, 5149, 5153, 5154, 5156, 5158, 5159, 5164, 5165, 5166, 5167, 5169, 5171, 5173, 5175, 5176, 5177, 5178, 5181, 5182, 5183, 5186, 5187, 5189, 5190, 5191, 5192, 5193, 5195, 5196, 5199, 5201, 5202, 5203, 5204, 5205, 5209, 5210, 5213, 5216, 5217, 5222, 5223, 5224, 5225, 5226, 5229, 5230, 5232, 5233, 5236, 5237, 5238, 5239, 5240, 5241, 5245, 5246, 5247, 5248, 5249, 5251, 5253, 5254, 5257, 5258, 5259, 5262, 5263, 5264, 5266, 5269, 5270, 5271, 5272, 5273, 5276, 5277, 5280, 5285, 5286, 5287, 5288, 5289, 5292, 5293, 5294, 5295, 5296, 5297, 5298, 5299, 5300, 5302, 5303, 5306, 5307, 5309, 5310, 5311, 5312, 5313, 5314, 5315, 5317, 5319, 5320, 5322, 5323, 5324, 5326, 5327, 5328, 5329, 5330, 5331, 5332, 5333, 5334, 5335, 5336, 5338, 5341, 5342, 5344, 5347, 5349, 5350, 5351, 5352, 5353, 5355, 5356, 5357, 5358, 5359, 5361, 5362, 5363, 5364, 5365, 5366, 5368, 5369, 5372, 5373, 5374, 5375, 5376, 5381, 5383, 5384, 5385, 5386, 5387, 5388, 5390, 5391, 5392, 5393, 5394, 5395, 5396, 5399, 5402, 5403, 5404, 5406, 5407, 5408, 5409, 5410, 5411, 5413, 5414, 5416, 5418, 5420, 5425, 5426, 5428, 5429, 5430, 5431, 5432, 5435, 5436, 5437, 5438, 5439, 5440, 5446, 5447, 5448, 5449, 5450, 5453, 5454, 5456, 5457, 5458, 5460, 5461, 5462, 5464, 5466, 5467, 5468, 5471, 5472, 5473, 5474, 5475, 5478, 5481, 5484, 5485, 5486, 5487, 5488, 5489, 5490, 5492, 5493, 5494, 5495, 5496, 5497, 5498, 5500, 5501, 5504, 5505, 5506, 5507, 5508, 5509, 5512, 5513, 5514, 5515, 5517, 5518, 5522, 5525, 5527, 5533, 5535, 5536, 5537, 5538, 5539, 5540, 5541, 5542, 5543, 5546, 5547, 5548, 5550, 5551, 5552, 5554, 5555, 5556, 5557, 5559, 5560, 5562, 5563, 5564, 5565, 5569, 5570, 5571, 5575, 5576, 5577, 5579, 5581, 5582, 5583, 5584, 5585, 5586, 5589, 5590, 5594, 5595, 5597, 5599, 5600, 5601, 5602, 5605, 5606, 5609, 5612, 5613, 5614, 5615, 5617, 5618, 5619, 5620, 5624, 5627, 5631, 5632, 5633, 5634, 5636, 5638, 5639, 5640, 5643, 5646, 5647, 5649, 5651, 5653, 5654, 5656, 5657, 5660, 5662, 5665, 5666, 5668, 5669, 5673, 5675, 5677, 5678, 5679, 5680, 5681, 5682, 5683, 5684, 5685, 5686, 5690, 5691, 5693, 5694, 5696, 5697, 5699, 5701, 5702, 5703, 5706, 5707, 5710, 5711, 5712, 5715, 5716, 5717, 5718, 5719, 5720, 5722, 5723, 5727, 5730, 5732, 5734, 5735, 5736, 5737, 5739, 5740, 5741, 5744, 5745, 5746, 5747, 5748, 5749, 5751, 5752, 5754, 5755, 5757, 5761, 5762, 5763, 5764, 5766, 5768, 5769, 5770, 5774, 5776, 5777, 5778, 5779, 5780, 5781, 5782, 5783, 5784, 5785, 5786, 5788, 5790, 5791, 5792, 5793, 5795, 5796, 5797, 5798, 5800, 5802, 5803, 5804, 5805, 5806, 5807, 5808, 5812, 5815, 5816, 5818, 5820, 5821, 5822, 5823, 5824, 5825, 5826, 5828, 5831, 5832, 5833, 5835, 5836, 5838, 5840, 5841, 5844, 5849, 5851, 5855, 5856, 5857, 5859, 5860, 5862, 5864, 5865, 5866, 5868, 5869, 5872, 5873, 5874, 5875, 5877, 5878, 5879, 5880, 5881, 5884, 5887, 5888, 5889, 5890, 5893, 5895, 5896, 5897, 5901, 5902, 5903, 5904, 5905, 5906, 5910, 5911, 5913, 5914, 5915, 5917, 5918, 5919, 5921, 5922, 5923, 5925, 5926, 5927, 5928, 5930, 5934, 5935, 5939, 5941, 5942, 5944, 5946, 5947, 5948, 5949, 5951, 5953, 5954, 5955, 5956, 5957, 5958, 5959, 5961, 5963, 5965, 5968, 5969, 5971, 5972, 5976, 5979, 5981, 5982, 5983, 5984, 5985, 5986, 5991, 5992, 5996, 5997, 5998, 6000, 6001, 6002, 6003, 6004, 6005, 6006, 6008, 6011, 6013, 6014, 6016, 6019, 6023, 6024, 6025, 6026, 6027, 6028, 6029, 6031, 6033, 6034, 6035, 6037, 6039, 6042, 6043, 6045, 6047, 6048, 6049, 6050, 6052, 6054, 6055, 6057, 6058, 6060, 6061, 6062, 6063, 6066, 6068, 6069, 6070, 6071, 6072, 6073, 6074, 6075, 6076, 6077, 6079, 6080, 6081, 6082, 6083, 6084, 6086, 6088, 6089, 6090, 6092, 6093, 6095, 6097, 6098, 6100, 6101, 6102, 6103, 6104, 6107, 6108, 6109, 6110, 6111, 6116, 6117, 6118, 6119, 6120, 6121, 6123, 6125, 6128, 6130, 6131, 6132, 6133, 6137, 6139, 6140, 6141, 6143, 6144, 6145, 6146, 6147, 6149, 6150, 6151, 6152, 6155, 6156, 6159, 6160, 6163, 6164, 6165, 6167, 6168, 6169, 6170, 6171, 6173, 6174, 6175, 6176, 6177, 6179, 6180, 6182, 6183, 6185, 6186, 6187, 6193, 6194, 6195, 6196, 6197, 6198, 6200, 6201, 6202, 6203, 6204, 6205, 6207, 6208, 6209, 6211, 6212, 6213, 6214, 6215, 6216, 6217, 6219, 6220, 6222, 6224, 6225, 6226, 6229, 6230, 6233, 6234, 6235, 6236, 6237, 6238, 6239, 6242, 6243, 6244, 6245, 6246, 6247, 6248, 6249, 6251, 6252, 6253, 6254, 6255, 6257, 6260, 6262, 6264, 6265, 6266, 6267, 6268, 6269, 6270, 6271, 6272, 6273, 6275, 6276, 6277, 6278, 6280, 6281, 6282, 6283, 6285, 6286, 6287, 6291, 6292, 6293, 6294, 6295, 6296, 6298, 6299, 6300, 6301, 6302, 6303, 6304, 6305, 6306, 6308, 6309, 6311, 6312, 6313, 6314, 6315, 6316, 6317, 6318, 6319, 6321, 6324, 6326, 6327, 6330, 6331, 6332, 6333, 6334, 6335, 6337, 6339, 6340, 6341, 6343, 6344, 6345, 6346, 6347, 6351, 6354, 6356, 6357, 6360, 6361, 6363, 6365, 6367, 6369, 6371, 6373, 6374, 6375, 6376, 6377, 6378, 6380, 6381, 6382, 6386, 6387, 6388, 6391, 6392, 6393, 6396, 6398, 6399, 6400, 6401, 6402, 6403, 6404, 6405, 6406, 6407, 6408, 6410, 6413, 6416, 6420, 6421, 6422, 6423, 6424, 6425, 6426, 6427, 6428, 6430, 6431, 6433, 6434, 6435, 6436, 6438, 6441, 6442, 6443, 6446, 6447, 6448, 6449, 6450, 6451, 6452, 6453, 6455, 6457, 6458, 6464, 6469, 6470, 6472, 6475, 6476, 6477, 6478, 6479, 6480, 6481, 6482, 6483, 6485, 6486, 6488, 6491, 6492, 6493, 6494, 6496, 6497, 6498, 6499, 6500, 6501, 6502, 6503, 6504, 6505, 6506, 6507, 6508, 6509, 6510, 6511, 6512, 6514, 6515, 6520, 6521, 6522, 6524, 6526, 6527, 6530, 6534, 6535, 6536, 6538, 6540, 6541, 6545, 6546, 6548, 6549, 6550, 6551, 6552, 6553, 6554, 6555, 6556, 6557, 6558, 6559, 6566, 6567, 6569, 6570, 6571, 6573, 6574, 6575, 6576, 6581, 6582, 6586, 6589, 6590, 6591, 6593, 6594, 6595, 6596, 6600, 6602, 6603, 6604, 6605, 6606, 6608, 6610, 6611, 6612, 6615, 6619, 6620, 6623, 6624, 6626, 6627, 6628, 6631, 6632, 6633, 6634, 6635, 6638, 6639, 6642, 6644, 6645, 6646, 6647, 6650, 6651, 6652, 6653, 6657, 6659, 6660, 6661, 6663, 6665, 6667, 6669, 6672, 6673, 6674, 6675, 6677, 6679, 6681, 6682, 6683, 6684, 6687, 6689, 6690, 6691, 6693, 6696, 6697, 6701, 6702, 6703, 6704, 6705, 6707, 6710, 6712, 6714, 6715, 6716, 6717, 6718, 6719, 6721, 6725, 6729, 6730, 6732, 6733, 6735, 6736, 6737, 6740, 6742, 6744, 6745, 6747, 6748, 6750, 6751, 6752, 6753, 6755, 6756, 6758, 6759, 6764, 6768, 6771, 6774, 6775, 6777, 6778, 6779, 6780, 6781, 6782, 6783, 6784, 6785, 6787, 6788, 6791, 6792, 6794, 6798, 6799, 6800, 6801, 6803, 6804, 6806, 6808, 6809, 6813, 6814, 6815, 6818, 6819, 6820, 6823, 6824, 6826, 6827, 6828, 6829, 6830, 6831, 6832, 6833, 6834, 6835, 6836, 6838, 6839, 6840, 6842, 6844, 6845, 6846, 6847, 6848, 6850, 6851, 6852, 6855, 6857, 6858, 6859, 6860, 6861, 6863, 6865, 6866, 6867, 6868, 6869, 6871, 6872, 6874, 6877, 6878, 6881, 6882, 6883, 6884, 6885, 6886, 6887, 6889, 6892, 6894, 6895, 6896, 6897, 6899, 6900, 6901, 6904, 6905, 6906, 6908, 6909, 6911, 6913, 6914, 6915, 6916, 6917, 6918, 6921, 6922, 6924, 6925, 6927, 6930, 6931, 6932, 6933, 6934, 6935, 6936, 6937, 6939, 6941, 6943, 6944, 6945, 6946, 6948, 6949, 6952, 6953, 6954, 6955, 6956, 6959, 6960, 6963, 6964, 6965, 6966, 6970, 6971, 6972, 6974, 6975, 6978, 6979, 6980, 6981, 6982, 6983, 6987, 6988, 6989, 6992, 6993, 6996, 6997, 6998, 6999, 7001, 7002, 7003, 7004, 7005, 7006, 7007, 7011, 7012, 7014, 7017, 7018, 7021, 7022, 7024, 7025, 7026, 7028, 7029, 7030, 7031, 7032, 7033, 7035, 7036, 7038, 7039, 7040, 7041, 7044, 7046, 7047, 7051, 7052, 7054, 7057, 7058, 7059, 7062, 7066, 7067, 7070, 7071, 7072, 7073, 7074, 7075, 7076, 7077, 7078, 7079, 7080, 7081, 7082, 7083, 7084, 7085, 7086, 7090, 7092, 7094, 7096, 7097, 7098, 7102, 7103, 7104, 7105, 7106, 7107, 7109, 7110, 7111, 7112, 7113, 7119, 7120, 7121, 7122, 7123, 7124, 7127, 7130, 7131, 7133, 7134, 7135, 7137, 7138, 7139, 7143, 7144, 7147, 7148, 7149, 7150, 7151, 7153, 7154, 7157, 7158, 7160, 7163, 7165, 7167, 7169, 7170, 7172, 7175, 7176, 7177, 7180, 7181, 7184, 7187, 7188, 7189, 7190, 7192, 7193, 7194, 7195, 7196, 7197, 7199, 7200, 7201, 7202, 7203, 7204, 7205, 7206, 7208, 7210, 7211, 7213, 7214, 7215, 7218, 7219, 7220, 7221, 7223, 7224, 7225, 7226, 7227, 7229, 7231, 7232, 7233, 7234, 7235, 7238, 7241, 7243, 7244, 7246, 7247, 7251, 7254, 7255, 7256, 7257, 7261, 7262, 7263, 7264, 7265, 7266, 7267, 7268, 7269, 7270, 7271, 7273, 7274, 7275, 7277, 7278, 7281, 7282, 7283, 7286, 7287, 7288, 7289, 7290, 7291, 7293, 7295, 7297, 7298, 7299, 7300, 7301, 7303, 7305, 7306, 7307, 7309, 7313, 7315, 7316, 7318, 7319, 7321, 7322, 7323, 7326, 7327, 7328, 7331, 7332, 7335, 7336, 7337, 7338, 7339, 7340, 7341, 7343, 7344, 7345, 7348, 7349, 7350, 7351, 7353, 7354, 7355, 7356, 7358, 7359, 7360, 7361, 7362, 7367, 7371, 7372, 7373, 7374, 7376, 7377, 7378, 7380, 7382, 7384, 7385, 7386, 7387, 7388, 7389, 7393, 7397, 7398, 7399, 7400, 7401, 7403, 7404, 7405, 7406, 7407, 7408, 7409, 7412, 7413, 7414, 7415, 7416, 7417, 7418, 7419, 7422, 7425, 7426, 7427, 7430, 7431, 7433, 7434, 7435, 7437, 7439, 7440, 7441, 7443, 7445, 7446, 7447, 7448, 7449, 7450, 7451, 7452, 7455, 7456, 7458, 7459, 7461, 7462, 7463, 7464, 7465, 7466, 7469, 7470, 7471, 7473, 7475, 7476, 7477, 7478, 7479, 7480, 7481, 7483, 7484, 7485, 7486, 7487, 7488, 7489, 7491, 7493, 7494, 7496, 7497, 7498, 7500, 7502, 7503, 7505, 7506, 7507, 7508, 7509, 7510, 7511, 7512, 7513, 7514, 7515, 7516, 7517, 7521, 7522, 7523, 7524, 7526, 7527, 7529, 7531, 7532, 7533, 7534, 7536, 7539, 7541, 7542, 7545, 7547, 7548, 7550, 7551, 7553, 7554, 7556, 7557, 7558, 7559, 7562, 7564, 7565, 7567, 7568, 7571, 7573, 7575, 7576, 7579, 7581, 7583, 7584, 7585, 7586, 7588, 7589, 7590, 7591, 7593, 7594, 7595, 7597, 7598, 7599, 7601, 7603, 7604, 7605, 7606, 7611, 7615, 7617, 7619, 7621, 7623, 7625, 7627, 7628, 7633, 7634, 7635, 7636, 7637, 7638, 7639, 7640, 7641, 7642, 7645, 7648, 7649, 7650, 7651, 7658, 7660, 7661, 7662, 7664, 7665, 7666, 7667, 7669, 7671, 7672, 7673, 7674, 7675, 7677, 7679, 7680, 7681, 7682, 7683, 7684, 7686, 7692, 7693, 7694, 7695, 7696, 7697, 7698, 7701, 7704, 7706, 7707, 7708, 7709, 7710, 7711, 7712, 7713, 7715, 7717, 7719, 7722, 7724, 7725, 7727, 7731, 7732, 7733, 7735, 7736, 7737, 7738, 7740, 7742, 7744, 7746, 7747, 7748, 7751, 7752, 7753, 7756, 7757, 7758, 7759, 7760, 7761, 7762, 7764, 7766, 7768, 7769, 7771, 7773, 7774, 7775, 7777, 7778, 7779, 7780, 7782, 7783, 7784, 7785, 7788, 7789, 7792, 7793, 7794, 7796, 7798, 7800, 7802, 7803, 7805, 7806, 7807, 7809, 7810, 7811, 7814, 7817, 7818, 7819, 7820, 7821, 7822, 7823, 7824, 7825, 7828, 7829, 7830, 7831, 7834, 7835, 7837, 7838, 7839, 7840, 7841, 7842, 7843, 7845, 7846, 7847, 7848, 7849, 7851, 7852, 7853, 7855, 7856, 7859, 7860, 7861, 7863, 7865, 7866, 7867, 7868, 7870, 7872, 7873, 7874, 7875, 7876, 7877, 7884, 7885, 7887, 7888, 7889, 7890, 7891, 7892, 7893, 7894, 7896, 7897, 7898, 7900, 7901, 7903, 7904, 7905, 7906, 7909, 7913, 7914, 7915, 7917, 7918, 7919, 7920, 7922, 7924, 7925, 7926, 7927, 7928, 7929, 7930, 7931, 7932, 7933, 7934, 7935, 7937, 7938, 7941, 7942, 7943, 7944, 7945, 7947, 7951, 7954, 7956, 7957, 7958, 7959, 7960, 7961, 7963, 7964, 7965, 7966, 7969, 7970, 7971, 7972, 7973, 7974, 7975, 7976, 7977, 7978, 7980, 7982, 7983, 7985, 7987, 7989, 7991, 7992, 7993, 7994, 7997, 7998, 7999, 8000, 8001, 8002, 8003, 8005, 8006, 8007, 8008, 8009, 8014, 8015, 8016, 8019, 8022, 8023, 8025, 8027, 8030, 8031, 8032, 8033, 8034, 8035, 8037, 8040, 8042, 8043, 8044, 8045, 8046, 8048, 8049, 8052, 8053, 8056, 8059, 8060, 8062, 8063, 8064, 8066, 8068, 8070, 8071, 8072, 8076, 8077, 8078, 8079, 8080, 8082, 8083, 8084, 8085, 8086, 8088, 8089, 8092, 8093, 8094, 8095, 8096, 8097, 8099, 8100, 8104, 8106, 8107, 8109, 8110, 8111, 8113, 8114, 8115, 8120, 8121, 8122, 8123, 8125, 8128, 8130, 8131, 8133, 8134, 8135, 8136, 8137, 8139, 8142, 8143, 8146, 8147, 8148, 8150, 8151, 8154, 8155, 8156, 8159, 8160, 8161, 8162, 8163, 8164, 8166, 8169, 8173, 8175, 8176, 8177, 8178, 8179, 8180, 8185, 8188, 8190, 8191, 8192, 8195, 8196, 8197, 8205, 8206, 8207, 8208, 8209, 8210, 8211, 8213, 8214, 8215, 8216, 8217, 8218, 8220, 8221, 8222, 8223, 8224, 8226, 8227, 8228, 8229, 8231, 8234, 8238, 8240, 8242, 8243, 8244, 8245, 8246, 8247, 8249, 8251, 8252, 8254, 8255, 8256, 8257, 8258, 8259, 8260, 8264, 8269, 8271, 8273, 8274, 8275, 8276, 8277, 8279, 8280, 8281, 8282, 8283, 8284, 8287, 8288, 8289, 8290, 8292, 8293, 8294, 8295, 8296, 8299, 8301, 8302, 8303, 8304, 8305, 8306, 8307, 8309, 8311, 8313, 8314, 8316, 8317, 8318, 8321, 8322, 8324, 8327, 8328, 8329, 8330, 8331, 8332, 8334, 8335, 8337, 8338, 8339, 8340, 8341, 8342, 8343, 8344, 8345, 8346, 8347, 8350, 8352, 8353, 8356, 8357, 8359, 8361, 8364, 8365, 8366, 8368, 8369, 8370, 8371, 8372, 8374, 8376, 8377, 8379, 8380, 8381, 8382, 8383, 8384, 8385, 8386, 8389, 8390, 8392, 8393, 8394, 8395, 8396, 8397, 8399, 8400, 8401, 8402, 8403, 8404, 8405, 8406, 8409, 8410, 8411, 8413, 8415, 8416, 8418, 8419, 8421, 8423, 8424, 8425, 8426, 8427, 8428, 8429, 8430, 8431, 8433, 8434, 8435, 8436, 8437, 8438, 8439, 8440, 8441, 8442, 8443, 8445, 8446, 8448, 8451, 8452, 8453, 8454, 8457, 8458, 8460, 8462, 8463, 8464, 8466, 8467, 8469, 8470, 8471, 8472, 8473, 8475, 8476, 8477, 8478, 8479, 8480, 8481, 8482, 8483, 8484, 8485, 8486, 8491, 8492, 8493, 8494, 8495, 8496, 8497, 8498, 8499, 8500, 8501, 8502, 8503, 8504, 8506, 8508, 8509, 8510, 8511, 8512, 8514, 8515, 8516, 8518, 8519, 8521, 8523, 8524, 8525, 8526, 8527, 8528] --------------------------------------------------------------------------------