├── .Rhistory ├── utils_eval.pyc ├── class_DeepHit.pyc ├── utils_network.pyc ├── sample data └── METABRIC │ ├── .~lock.cleaned_features.csv# │ └── label.csv ├── README.md ├── utils_network.py ├── utils_eval.py ├── import_data.py ├── main_RandomSearch.py ├── get_main.py ├── summarize_results.py └── class_DeepHit.py /.Rhistory: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /utils_eval.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chl8856/DeepHit/HEAD/utils_eval.pyc -------------------------------------------------------------------------------- /class_DeepHit.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chl8856/DeepHit/HEAD/class_DeepHit.pyc -------------------------------------------------------------------------------- /utils_network.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chl8856/DeepHit/HEAD/utils_network.pyc -------------------------------------------------------------------------------- /sample data/METABRIC/.~lock.cleaned_features.csv#: -------------------------------------------------------------------------------- 1 | ,changhee,changhee-Inspiron,18.12.2018 15:09,file:///home/changhee/.config/libreoffice/4; -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DeepHit 2 | Title: "DeepHit: A Deep Learning Approach to Survival Analysis with Competing Risks" 3 | 4 | Authors: Changhee Lee, William R. Zame, Jinsung Yoon, Mihaela van der Schaar 5 | 6 | - Reference: C. Lee, W. R. Zame, J. Yoon, M. van der Schaar, "DeepHit: A Deep Learning Approach to Survival Analysis with Competing Risks," AAAI Conference on Artificial Intelligence (AAAI), 2018 7 | - Paper: http://medianetlab.ee.ucla.edu/papers/AAAI_2018_DeepHit 8 | - Supplementary: http://medianetlab.ee.ucla.edu/papers/AAAI_2018_DeepHit_Appendix 9 | 10 | ### Description of the code 11 | This code shows the modified implementation of DeepHit on Metabric (single risk) and Synthetic (competing risks) datasets. 12 | 13 | The detailed modifications are as follows: 14 | - Hyper-parameter opimization using random search is implemented 15 | - Residual connections are removed 16 | - The definition of the time-dependent C-index is changed; please refer to T.A. Gerds et al, "Estimating a Time-Dependent Concordance Index for Survival Prediction Models with Covariate Dependent Censoring," Stat Med., 2013 17 | - Set "EVAL_TIMES" to a list of evaluation times of interest for optimizating the network with respect these evaluation times. 18 | -------------------------------------------------------------------------------- /utils_network.py: -------------------------------------------------------------------------------- 1 | ''' 2 | First implemented: 01/25/2018 3 | > For survival analysis on longitudinal dataset 4 | By CHANGHEE LEE 5 | 6 | Modifcation List: 7 | - 08/07/2018: weight regularization for FC_NET is added 8 | ''' 9 | 10 | import tensorflow as tf 11 | import numpy as np 12 | 13 | from tensorflow.contrib.layers import fully_connected as FC_Net 14 | 15 | 16 | ### CONSTRUCT MULTICELL FOR MULTI-LAYER RNNS 17 | def create_rnn_cell(num_units, num_layers, keep_prob, RNN_type): 18 | ''' 19 | GOAL : create multi-cell (including a single cell) to construct multi-layer RNN 20 | num_units : number of units in each layer 21 | num_layers : number of layers in MulticellRNN 22 | keep_prob : keep probabilty [0, 1] (if None, dropout is not employed) 23 | RNN_type : either 'LSTM' or 'GRU' 24 | ''' 25 | cells = [] 26 | for _ in range(num_layers): 27 | if RNN_type == 'GRU': 28 | cell = tf.contrib.rnn.GRUCell(num_units) 29 | elif RNN_type == 'LSTM': 30 | cell = tf.contrib.rnn.LSTMCell(num_units) 31 | if not keep_prob is None: 32 | cell = tf.contrib.rnn.DropoutWrapper(cell, output_keep_prob=keep_prob) 33 | cells.append(cell) 34 | cell = tf.contrib.rnn.MultiRNNCell(cells) 35 | 36 | return cell 37 | 38 | 39 | ### EXTRACT STATE OUTPUT OF MULTICELL-RNNS 40 | def create_concat_state(state, num_layers, RNN_type): 41 | ''' 42 | GOAL : concatenate the tuple-type tensor (state) into a single tensor 43 | state : input state is a tuple ofo MulticellRNN (i.e. output of MulticellRNN) 44 | consist of only hidden states h for GRU and hidden states c and h for LSTM 45 | num_layers : number of layers in MulticellRNN 46 | RNN_type : either 'LSTM' or 'GRU' 47 | ''' 48 | for i in range(num_layers): 49 | if RNN_type == 'LSTM': 50 | tmp = state[i][1] ## i-th layer, h state for LSTM 51 | elif RNN_type == 'GRU': 52 | tmp = state[i] ## i-th layer, h state for GRU 53 | else: 54 | print('ERROR: WRONG RNN CELL TYPE') 55 | 56 | if i == 0: 57 | rnn_state_out = tmp 58 | else: 59 | rnn_state_out = tf.concat([rnn_state_out, tmp], axis = 1) 60 | 61 | return rnn_state_out 62 | 63 | 64 | ### FEEDFORWARD NETWORK 65 | def create_FCNet(inputs, num_layers, h_dim, h_fn, o_dim, o_fn, w_init, keep_prob=1.0, w_reg=None): 66 | ''' 67 | GOAL : Create FC network with different specifications 68 | inputs (tensor) : input tensor 69 | num_layers : number of layers in FCNet 70 | h_dim (int) : number of hidden units 71 | h_fn : activation function for hidden layers (default: tf.nn.relu) 72 | o_dim (int) : number of output units 73 | o_fn : activation function for output layers (defalut: None) 74 | w_init : initialization for weight matrix (defalut: Xavier) 75 | keep_prob : keep probabilty [0, 1] (if None, dropout is not employed) 76 | ''' 77 | # default active functions (hidden: relu, out: None) 78 | if h_fn is None: 79 | h_fn = tf.nn.relu 80 | if o_fn is None: 81 | o_fn = None 82 | 83 | # default initialization functions (weight: Xavier, bias: None) 84 | if w_init is None: 85 | w_init = tf.contrib.layers.xavier_initializer() # Xavier initialization 86 | 87 | for layer in range(num_layers): 88 | if num_layers == 1: 89 | out = FC_Net(inputs, o_dim, activation_fn=o_fn, weights_initializer=w_init, weights_regularizer=w_reg) 90 | else: 91 | if layer == 0: 92 | h = FC_Net(inputs, h_dim, activation_fn=h_fn, weights_initializer=w_init, weights_regularizer=w_reg) 93 | if not keep_prob is None: 94 | h = tf.nn.dropout(h, keep_prob=keep_prob) 95 | 96 | elif layer > 0 and layer != (num_layers-1): # layer > 0: 97 | h = FC_Net(h, h_dim, activation_fn=h_fn, weights_initializer=w_init, weights_regularizer=w_reg) 98 | if not keep_prob is None: 99 | h = tf.nn.dropout(h, keep_prob=keep_prob) 100 | 101 | else: # layer == num_layers-1 (the last layer) 102 | out = FC_Net(h, o_dim, activation_fn=o_fn, weights_initializer=w_init, weights_regularizer=w_reg) 103 | 104 | return out -------------------------------------------------------------------------------- /utils_eval.py: -------------------------------------------------------------------------------- 1 | ''' 2 | This provide time-dependent Concordance index and Brier Score: 3 | - Use weighted_c_index and weighted_brier_score, which are the unbiased estimates. 4 | 5 | See equations and descriptions eq. (11) and (12) of the following paper: 6 | - C. Lee, W. R. Zame, A. Alaa, M. van der Schaar, "Temporal Quilting for Survival Analysis", AISTATS 2019 7 | ''' 8 | 9 | import numpy as np 10 | from lifelines import KaplanMeierFitter 11 | 12 | 13 | ### C(t)-INDEX CALCULATION 14 | def c_index(Prediction, Time_survival, Death, Time): 15 | ''' 16 | This is a cause-specific c(t)-index 17 | - Prediction : risk at Time (higher --> more risky) 18 | - Time_survival : survival/censoring time 19 | - Death : 20 | > 1: death 21 | > 0: censored (including death from other cause) 22 | - Time : time of evaluation (time-horizon when evaluating C-index) 23 | ''' 24 | N = len(Prediction) 25 | A = np.zeros((N,N)) 26 | Q = np.zeros((N,N)) 27 | N_t = np.zeros((N,N)) 28 | Num = 0 29 | Den = 0 30 | for i in range(N): 31 | A[i, np.where(Time_survival[i] < Time_survival)] = 1 32 | Q[i, np.where(Prediction[i] > Prediction)] = 1 33 | 34 | if (Time_survival[i]<=Time and Death[i]==1): 35 | N_t[i,:] = 1 36 | 37 | Num = np.sum(((A)*N_t)*Q) 38 | Den = np.sum((A)*N_t) 39 | 40 | if Num == 0 and Den == 0: 41 | result = -1 # not able to compute c-index! 42 | else: 43 | result = float(Num/Den) 44 | 45 | return result 46 | 47 | ### BRIER-SCORE 48 | def brier_score(Prediction, Time_survival, Death, Time): 49 | N = len(Prediction) 50 | y_true = ((Time_survival <= Time) * Death).astype(float) 51 | 52 | return np.mean((Prediction - y_true)**2) 53 | 54 | # result2[k, t] = brier_score_loss(risk[:, k], ((te_time[:,0] <= eval_horizon) * (te_label[:,0] == k+1)).astype(int)) 55 | 56 | 57 | ##### WEIGHTED C-INDEX & BRIER-SCORE 58 | def CensoringProb(Y, T): 59 | 60 | T = T.reshape([-1]) # (N,) - np array 61 | Y = Y.reshape([-1]) # (N,) - np array 62 | 63 | kmf = KaplanMeierFitter() 64 | kmf.fit(T, event_observed=(Y==0).astype(int)) # censoring prob = survival probability of event "censoring" 65 | G = np.asarray(kmf.survival_function_.reset_index()).transpose() 66 | G[1, G[1, :] == 0] = G[1, G[1, :] != 0][-1] #fill 0 with ZoH (to prevent nan values) 67 | 68 | return G 69 | 70 | 71 | ### C(t)-INDEX CALCULATION: this account for the weighted average for unbaised estimation 72 | def weighted_c_index(T_train, Y_train, Prediction, T_test, Y_test, Time): 73 | ''' 74 | This is a cause-specific c(t)-index 75 | - Prediction : risk at Time (higher --> more risky) 76 | - Time_survival : survival/censoring time 77 | - Death : 78 | > 1: death 79 | > 0: censored (including death from other cause) 80 | - Time : time of evaluation (time-horizon when evaluating C-index) 81 | ''' 82 | G = CensoringProb(Y_train, T_train) 83 | 84 | N = len(Prediction) 85 | A = np.zeros((N,N)) 86 | Q = np.zeros((N,N)) 87 | N_t = np.zeros((N,N)) 88 | Num = 0 89 | Den = 0 90 | for i in range(N): 91 | tmp_idx = np.where(G[0,:] >= T_test[i])[0] 92 | 93 | if len(tmp_idx) == 0: 94 | W = (1./G[1, -1])**2 95 | else: 96 | W = (1./G[1, tmp_idx[0]])**2 97 | 98 | A[i, np.where(T_test[i] < T_test)] = 1. * W 99 | Q[i, np.where(Prediction[i] > Prediction)] = 1. # give weights 100 | 101 | if (T_test[i]<=Time and Y_test[i]==1): 102 | N_t[i,:] = 1. 103 | 104 | Num = np.sum(((A)*N_t)*Q) 105 | Den = np.sum((A)*N_t) 106 | 107 | if Num == 0 and Den == 0: 108 | result = -1 # not able to compute c-index! 109 | else: 110 | result = float(Num/Den) 111 | 112 | return result 113 | 114 | 115 | # this account for the weighted average for unbaised estimation 116 | def weighted_brier_score(T_train, Y_train, Prediction, T_test, Y_test, Time): 117 | G = CensoringProb(Y_train, T_train) 118 | N = len(Prediction) 119 | 120 | W = np.zeros(len(Y_test)) 121 | Y_tilde = (T_test > Time).astype(float) 122 | 123 | for i in range(N): 124 | tmp_idx1 = np.where(G[0,:] >= T_test[i])[0] 125 | tmp_idx2 = np.where(G[0,:] >= Time)[0] 126 | 127 | if len(tmp_idx1) == 0: 128 | G1 = G[1, -1] 129 | else: 130 | G1 = G[1, tmp_idx1[0]] 131 | 132 | if len(tmp_idx2) == 0: 133 | G2 = G[1, -1] 134 | else: 135 | G2 = G[1, tmp_idx2[0]] 136 | W[i] = (1. - Y_tilde[i])*float(Y_test[i])/G1 + Y_tilde[i]/G2 137 | 138 | y_true = ((T_test <= Time) * Y_test).astype(float) 139 | 140 | return np.mean(W*(Y_tilde - (1.-Prediction))**2) 141 | 142 | 143 | -------------------------------------------------------------------------------- /import_data.py: -------------------------------------------------------------------------------- 1 | ''' 2 | This provide the dimension/data/mask to train/test the network. 3 | 4 | Once must construct a function similar to "import_dataset_SYNTHETIC": 5 | - DATA FORMAT: 6 | > data: covariates with x_dim dimension. 7 | > label: 0: censoring, 1 ~ K: K competing(single) risk(s) 8 | > time: time-to-event or time-to-censoring 9 | - Based on the data, creat mask1 and mask2 that are required to calculate loss functions. 10 | ''' 11 | import numpy as np 12 | import pandas as pd 13 | import random 14 | 15 | 16 | ##### DEFINE USER-FUNCTIONS ##### 17 | def f_get_Normalization(X, norm_mode): 18 | num_Patient, num_Feature = np.shape(X) 19 | 20 | if norm_mode == 'standard': #zero mean unit variance 21 | for j in range(num_Feature): 22 | if np.std(X[:,j]) != 0: 23 | X[:,j] = (X[:,j] - np.mean(X[:, j]))/np.std(X[:,j]) 24 | else: 25 | X[:,j] = (X[:,j] - np.mean(X[:, j])) 26 | elif norm_mode == 'normal': #min-max normalization 27 | for j in range(num_Feature): 28 | X[:,j] = (X[:,j] - np.min(X[:,j]))/(np.max(X[:,j]) - np.min(X[:,j])) 29 | else: 30 | print("INPUT MODE ERROR!") 31 | 32 | return X 33 | 34 | ### MASK FUNCTIONS 35 | ''' 36 | fc_mask2 : To calculate LOSS_1 (log-likelihood loss) 37 | fc_mask3 : To calculate LOSS_2 (ranking loss) 38 | ''' 39 | def f_get_fc_mask2(time, label, num_Event, num_Category): 40 | ''' 41 | mask4 is required to get the log-likelihood loss 42 | mask4 size is [N, num_Event, num_Category] 43 | if not censored : one element = 1 (0 elsewhere) 44 | if censored : fill elements with 1 after the censoring time (for all events) 45 | ''' 46 | mask = np.zeros([np.shape(time)[0], num_Event, num_Category]) # for the first loss function 47 | for i in range(np.shape(time)[0]): 48 | if label[i,0] != 0: #not censored 49 | mask[i,int(label[i,0]-1),int(time[i,0])] = 1 50 | else: #label[i,2]==0: censored 51 | mask[i,:,int(time[i,0]+1):] = 1 #fill 1 until from the censoring time (to get 1 - \sum F) 52 | return mask 53 | 54 | 55 | def f_get_fc_mask3(time, meas_time, num_Category): 56 | ''' 57 | mask5 is required calculate the ranking loss (for pair-wise comparision) 58 | mask5 size is [N, num_Category]. 59 | - For longitudinal measurements: 60 | 1's from the last measurement to the event time (exclusive and inclusive, respectively) 61 | denom is not needed since comparing is done over the same denom 62 | - For single measurement: 63 | 1's from start to the event time(inclusive) 64 | ''' 65 | mask = np.zeros([np.shape(time)[0], num_Category]) # for the first loss function 66 | if np.shape(meas_time): #lonogitudinal measurements 67 | for i in range(np.shape(time)[0]): 68 | t1 = int(meas_time[i, 0]) # last measurement time 69 | t2 = int(time[i, 0]) # censoring/event time 70 | mask[i,(t1+1):(t2+1)] = 1 #this excludes the last measurement time and includes the event time 71 | else: #single measurement 72 | for i in range(np.shape(time)[0]): 73 | t = int(time[i, 0]) # censoring/event time 74 | mask[i,:(t+1)] = 1 #this excludes the last measurement time and includes the event time 75 | return mask 76 | 77 | 78 | def import_dataset_SYNTHETIC(norm_mode='standard'): 79 | in_filename = './sample data/SYNTHETIC/synthetic_comprisk.csv' 80 | df = pd.read_csv(in_filename, sep=',') 81 | 82 | label = np.asarray(df[['label']]) 83 | time = np.asarray(df[['time']]) 84 | data = np.asarray(df.iloc[:,4:]) 85 | data = f_get_Normalization(data, norm_mode) 86 | 87 | num_Category = int(np.max(time) * 1.2) #to have enough time-horizon 88 | num_Event = int(len(np.unique(label)) - 1) #only count the number of events (do not count censoring as an event) 89 | 90 | x_dim = np.shape(data)[1] 91 | 92 | mask1 = f_get_fc_mask2(time, label, num_Event, num_Category) 93 | mask2 = f_get_fc_mask3(time, -1, num_Category) 94 | 95 | DIM = (x_dim) 96 | DATA = (data, time, label) 97 | MASK = (mask1, mask2) 98 | 99 | return DIM, DATA, MASK 100 | 101 | 102 | def import_dataset_METABRIC(norm_mode='standard'): 103 | in_filename1 = './sample data/METABRIC/cleaned_features_final.csv' 104 | in_filename2 = './sample data/METABRIC/label.csv' 105 | 106 | df1 = pd.read_csv(in_filename1, sep =',') 107 | df2 = pd.read_csv(in_filename2, sep =',') 108 | 109 | data = np.asarray(df1) 110 | data = f_get_Normalization(data, norm_mode) 111 | 112 | time = np.asarray(df2[['event_time']]) 113 | # time = np.round(time/12.) #unit time = month 114 | label = np.asarray(df2[['label']]) 115 | 116 | 117 | num_Category = int(np.max(time) * 1.2) #to have enough time-horizon 118 | num_Event = int(len(np.unique(label)) - 1) #only count the number of events (do not count censoring as an event) 119 | 120 | x_dim = np.shape(data)[1] 121 | 122 | mask1 = f_get_fc_mask2(time, label, num_Event, num_Category) 123 | mask2 = f_get_fc_mask3(time, -1, num_Category) 124 | 125 | DIM = (x_dim) 126 | DATA = (data, time, label) 127 | MASK = (mask1, mask2) 128 | 129 | return DIM, DATA, MASK -------------------------------------------------------------------------------- /main_RandomSearch.py: -------------------------------------------------------------------------------- 1 | ''' 2 | This runs random search to find the optimized hyper-parameters using cross-validation 3 | 4 | INPUTS: 5 | - OUT_ITERATION: # of training/testing splits 6 | - RS_ITERATION: # of random search iteration 7 | - data_mode: mode to select the time-to-event data from "import_data.py" 8 | - seed: random seed for training/testing/validation splits 9 | - EVAL_TIMES: list of time-horizons at which the performance is maximized; 10 | the validation is performed at given EVAL_TIMES (e.g., [12, 24, 36]) 11 | 12 | OUTPUTS: 13 | - "hyperparameters_log.txt" is the output 14 | - Once the hyper parameters are optimized, run "summarize_results.py" to get the final results. 15 | ''' 16 | import time, datetime, os 17 | import get_main 18 | import numpy as np 19 | 20 | import import_data as impt 21 | 22 | 23 | # this saves the current hyperparameters 24 | def save_logging(dictionary, log_name): 25 | with open(log_name, 'w') as f: 26 | for key, value in dictionary.items(): 27 | f.write('%s:%s\n' % (key, value)) 28 | 29 | # this open can calls the saved hyperparameters 30 | def load_logging(filename): 31 | data = dict() 32 | with open(filename) as f: 33 | def is_float(input): 34 | try: 35 | num = float(input) 36 | except ValueError: 37 | return False 38 | return True 39 | 40 | for line in f.readlines(): 41 | if ':' in line: 42 | key,value = line.strip().split(':', 1) 43 | if value.isdigit(): 44 | data[key] = int(value) 45 | elif is_float(value): 46 | data[key] = float(value) 47 | elif value == 'None': 48 | data[key] = None 49 | else: 50 | data[key] = value 51 | else: 52 | pass # deal with bad lines of text here 53 | return data 54 | 55 | 56 | # this randomly select hyperparamters based on the given list of candidates 57 | def get_random_hyperparameters(out_path): 58 | SET_BATCH_SIZE = [32, 64, 128] #mb_size 59 | 60 | SET_LAYERS = [1,2,3,5] #number of layers 61 | SET_NODES = [50, 100, 200, 300] #number of nodes 62 | 63 | SET_ACTIVATION_FN = ['relu', 'elu', 'tanh'] #non-linear activation functions 64 | 65 | SET_ALPHA = [0.1, 0.5, 1.0, 3.0, 5.0] #alpha values -> log-likelihood loss 66 | SET_BETA = [0.1, 0.5, 1.0, 3.0, 5.0] #beta values -> ranking loss 67 | SET_GAMMA = [0.1, 0.5, 1.0, 3.0, 5.0] #gamma values -> calibration loss 68 | 69 | new_parser = {'mb_size': SET_BATCH_SIZE[np.random.randint(len(SET_BATCH_SIZE))], 70 | 71 | 'iteration': 50000, 72 | 73 | 'keep_prob': 0.6, 74 | 'lr_train': 1e-4, 75 | 76 | 'h_dim_shared': SET_NODES[np.random.randint(len(SET_NODES))], 77 | 'h_dim_CS': SET_NODES[np.random.randint(len(SET_NODES))], 78 | 'num_layers_shared':SET_LAYERS[np.random.randint(len(SET_LAYERS))], 79 | 'num_layers_CS':SET_LAYERS[np.random.randint(len(SET_LAYERS))], 80 | 'active_fn': SET_ACTIVATION_FN[np.random.randint(len(SET_ACTIVATION_FN))], 81 | 82 | 'alpha':1.0, #default (set alpha = 1.0 and change beta and gamma) 83 | 'beta':SET_BETA[np.random.randint(len(SET_BETA))], 84 | 'gamma':0, #default (no calibration loss) 85 | # 'alpha':SET_ALPHA[np.random.randint(len(SET_ALPHA))], 86 | # 'beta':SET_BETA[np.random.randint(len(SET_BETA))], 87 | # 'gamma':SET_GAMMA[np.random.randint(len(SET_GAMMA))], 88 | 89 | 'out_path':out_path} 90 | 91 | return new_parser #outputs the dictionary of the randomly-chosen hyperparamters 92 | 93 | 94 | 95 | 96 | ##### MAIN SETTING 97 | OUT_ITERATION = 5 98 | RS_ITERATION = 50 99 | 100 | data_mode = 'METABRIC' 101 | seed = 1234 102 | 103 | 104 | ##### IMPORT DATASET 105 | ''' 106 | num_Category = typically, max event/censoring time * 1.2 (to make enough time horizon) 107 | num_Event = number of evetns i.e. len(np.unique(label))-1 108 | max_length = maximum number of measurements 109 | x_dim = data dimension including delta (num_features) 110 | mask1, mask2 = used for cause-specific network (FCNet structure) 111 | 112 | EVAL_TIMES = set specific evaluation time horizons at which the validatoin performance is maximized. 113 | (This must be selected based on the dataset) 114 | 115 | ''' 116 | if data_mode == 'SYNTHETIC': 117 | (x_dim), (data, time, label), (mask1, mask2) = impt.import_dataset_SYNTHETIC(norm_mode = 'standard') 118 | EVAL_TIMES = [12, 24, 36] 119 | elif data_mode == 'METABRIC': 120 | (x_dim), (data, time, label), (mask1, mask2) = impt.import_dataset_METABRIC(norm_mode = 'standard') 121 | EVAL_TIMES = [144, 288, 432] 122 | else: 123 | print('ERROR: DATA_MODE NOT FOUND !!!') 124 | 125 | 126 | DATA = (data, time, label) 127 | MASK = (mask1, mask2) #masks are required to calculate loss functions without for-loops. 128 | 129 | out_path = data_mode + '/results/' 130 | 131 | for itr in range(OUT_ITERATION): 132 | 133 | if not os.path.exists(out_path + '/itr_' + str(itr) + '/'): 134 | os.makedirs(out_path + '/itr_' + str(itr) + '/') 135 | 136 | max_valid = 0. 137 | log_name = out_path + '/itr_' + str(itr) + '/hyperparameters_log.txt' 138 | 139 | for r_itr in range(RS_ITERATION): 140 | print('OUTER_ITERATION: ' + str(itr)) 141 | print('Random search... itr: ' + str(r_itr)) 142 | new_parser = get_random_hyperparameters(out_path) 143 | print(new_parser) 144 | 145 | # get validation performance given the hyperparameters 146 | tmp_max = get_main.get_valid_performance(DATA, MASK, new_parser, itr, EVAL_TIMES, MAX_VALUE=max_valid) 147 | 148 | if tmp_max > max_valid: 149 | max_valid = tmp_max 150 | max_parser = new_parser 151 | save_logging(max_parser, log_name) #save the hyperparameters if this provides the maximum validation performance 152 | 153 | print('Current best: ' + str(max_valid)) -------------------------------------------------------------------------------- /get_main.py: -------------------------------------------------------------------------------- 1 | ''' 2 | This train DeepHit, and outputs the validation performance for random search. 3 | 4 | INPUTS: 5 | - DATA = (data, time, label) 6 | - MASK = (mask1, mask2) 7 | - in_parser: dictionary of hyperparameters 8 | - out_itr: the training/testing split indicator 9 | - eval_time: None or a list (e.g. [12, 24, 36]) at which the validation of the network is performed 10 | - MAX_VALUE: maximum validation value 11 | - OUT_ITERATION: total number of training/testing splits 12 | - seed: random seed for training/testing/validation 13 | 14 | OUTPUTS: 15 | - the validation performance of the trained network 16 | - save the trained network in the folder directed by "in_parser['out_path'] + '/itr_' + str(out_itr)" 17 | ''' 18 | 19 | _EPSILON = 1e-08 20 | 21 | 22 | import numpy as np 23 | import pandas as pd 24 | import tensorflow as tf 25 | import random 26 | import os 27 | # import sys 28 | 29 | from termcolor import colored 30 | from tensorflow.contrib.layers import fully_connected as FC_Net 31 | from sklearn.metrics import brier_score_loss 32 | from sklearn.model_selection import train_test_split 33 | 34 | import utils_network as utils 35 | 36 | from class_DeepHit import Model_DeepHit 37 | from utils_eval import c_index, brier_score, weighted_c_index, weighted_brier_score 38 | 39 | 40 | 41 | ##### USER-DEFINED FUNCTIONS 42 | def log(x): 43 | return tf.log(x + 1e-8) 44 | 45 | def div(x, y): 46 | return tf.div(x, (y + 1e-8)) 47 | 48 | def f_get_minibatch(mb_size, x, label, time, mask1, mask2): 49 | idx = range(np.shape(x)[0]) 50 | idx = random.sample(idx, mb_size) 51 | 52 | x_mb = x[idx, :].astype(np.float32) 53 | k_mb = label[idx, :].astype(np.float32) # censoring(0)/event(1,2,..) label 54 | t_mb = time[idx, :].astype(np.float32) 55 | m1_mb = mask1[idx, :, :].astype(np.float32) #fc_mask 56 | m2_mb = mask2[idx, :].astype(np.float32) #fc_mask 57 | return x_mb, k_mb, t_mb, m1_mb, m2_mb 58 | 59 | 60 | def get_valid_performance(DATA, MASK, in_parser, out_itr, eval_time=None, MAX_VALUE = -99, OUT_ITERATION=5, seed=1234): 61 | ##### DATA & MASK 62 | (data, time, label) = DATA 63 | (mask1, mask2) = MASK 64 | 65 | x_dim = np.shape(data)[1] 66 | _, num_Event, num_Category = np.shape(mask1) # dim of mask1: [subj, Num_Event, Num_Category] 67 | 68 | ACTIVATION_FN = {'relu': tf.nn.relu, 'elu': tf.nn.elu, 'tanh': tf.nn.tanh} 69 | 70 | ##### HYPER-PARAMETERS 71 | mb_size = in_parser['mb_size'] 72 | 73 | iteration = in_parser['iteration'] 74 | 75 | keep_prob = in_parser['keep_prob'] 76 | lr_train = in_parser['lr_train'] 77 | 78 | 79 | alpha = in_parser['alpha'] #for log-likelihood loss 80 | beta = in_parser['beta'] #for ranking loss 81 | gamma = in_parser['gamma'] #for RNN-prediction loss 82 | parameter_name = 'a' + str('%02.0f' %(10*alpha)) + 'b' + str('%02.0f' %(10*beta)) + 'c' + str('%02.0f' %(10*gamma)) 83 | 84 | initial_W = tf.contrib.layers.xavier_initializer() 85 | 86 | 87 | ##### MAKE DICTIONARIES 88 | # INPUT DIMENSIONS 89 | input_dims = { 'x_dim' : x_dim, 90 | 'num_Event' : num_Event, 91 | 'num_Category' : num_Category} 92 | 93 | # NETWORK HYPER-PARMETERS 94 | network_settings = { 'h_dim_shared' : in_parser['h_dim_shared'], 95 | 'num_layers_shared' : in_parser['num_layers_shared'], 96 | 'h_dim_CS' : in_parser['h_dim_CS'], 97 | 'num_layers_CS' : in_parser['num_layers_CS'], 98 | 'active_fn' : ACTIVATION_FN[in_parser['active_fn']], 99 | 'initial_W' : initial_W } 100 | 101 | 102 | file_path_final = in_parser['out_path'] + '/itr_' + str(out_itr) 103 | 104 | #change parameters... 105 | if not os.path.exists(file_path_final + '/models/'): 106 | os.makedirs(file_path_final + '/models/') 107 | 108 | 109 | print (file_path_final + ' (a:' + str(alpha) + ' b:' + str(beta) + ' c:' + str(gamma) + ')' ) 110 | 111 | ##### CREATE DEEPFHT NETWORK 112 | tf.reset_default_graph() 113 | 114 | config = tf.ConfigProto() 115 | config.gpu_options.allow_growth = True 116 | sess = tf.Session(config=config) 117 | 118 | model = Model_DeepHit(sess, "DeepHit", input_dims, network_settings) 119 | saver = tf.train.Saver() 120 | 121 | sess.run(tf.global_variables_initializer()) 122 | 123 | 124 | ### TRAINING-TESTING SPLIT 125 | (tr_data,te_data, tr_time,te_time, tr_label,te_label, 126 | tr_mask1,te_mask1, tr_mask2,te_mask2) = train_test_split(data, time, label, mask1, mask2, test_size=0.20, random_state=seed) 127 | 128 | 129 | (tr_data,va_data, tr_time,va_time, tr_label,va_label, 130 | tr_mask1,va_mask1, tr_mask2,va_mask2) = train_test_split(tr_data, tr_time, tr_label, tr_mask1, tr_mask2, test_size=0.20, random_state=seed) 131 | 132 | max_valid = -99 133 | stop_flag = 0 134 | 135 | if eval_time is None: 136 | eval_time = [int(np.percentile(tr_time, 25)), int(np.percentile(tr_time, 50)), int(np.percentile(tr_time, 75))] 137 | 138 | 139 | ### TRAINING - MAIN 140 | print( "MAIN TRAINING ...") 141 | print( "EVALUATION TIMES: " + str(eval_time)) 142 | 143 | avg_loss = 0 144 | for itr in range(iteration): 145 | if stop_flag > 5: #for faster early stopping 146 | break 147 | else: 148 | x_mb, k_mb, t_mb, m1_mb, m2_mb = f_get_minibatch(mb_size, tr_data, tr_label, tr_time, tr_mask1, tr_mask2) 149 | DATA = (x_mb, k_mb, t_mb) 150 | MASK = (m1_mb, m2_mb) 151 | PARAMETERS = (alpha, beta, gamma) 152 | _, loss_curr = model.train(DATA, MASK, PARAMETERS, keep_prob, lr_train) 153 | avg_loss += loss_curr/1000 154 | 155 | if (itr+1)%1000 == 0: 156 | print('|| ITR: ' + str('%04d' % (itr + 1)) + ' | Loss: ' + colored(str('%.4f' %(avg_loss)), 'yellow' , attrs=['bold'])) 157 | avg_loss = 0 158 | 159 | ### VALIDATION (based on average C-index of our interest) 160 | if (itr+1)%1000 == 0: 161 | ### PREDICTION 162 | pred = model.predict(va_data) 163 | 164 | ### EVALUATION 165 | va_result1 = np.zeros([num_Event, len(eval_time)]) 166 | 167 | for t, t_time in enumerate(eval_time): 168 | eval_horizon = int(t_time) 169 | 170 | if eval_horizon >= num_Category: 171 | print('ERROR: evaluation horizon is out of range') 172 | va_result1[:, t] = va_result2[:, t] = -1 173 | else: 174 | risk = np.sum(pred[:,:,:(eval_horizon+1)], axis=2) #risk score until eval_time 175 | for k in range(num_Event): 176 | # va_result1[k, t] = c_index(risk[:,k], va_time, (va_label[:,0] == k+1).astype(int), eval_horizon) #-1 for no event (not comparable) 177 | va_result1[k, t] = weighted_c_index(tr_time, (tr_label[:,0] == k+1).astype(int), risk[:,k], va_time, (va_label[:,0] == k+1).astype(int), eval_horizon) 178 | tmp_valid = np.mean(va_result1) 179 | 180 | 181 | if tmp_valid > max_valid: 182 | stop_flag = 0 183 | max_valid = tmp_valid 184 | print( 'updated.... average c-index = ' + str('%.4f' %(tmp_valid))) 185 | 186 | if max_valid > MAX_VALUE: 187 | saver.save(sess, file_path_final + '/models/model_itr_' + str(out_itr)) 188 | else: 189 | stop_flag += 1 190 | 191 | return max_valid 192 | -------------------------------------------------------------------------------- /summarize_results.py: -------------------------------------------------------------------------------- 1 | _EPSILON = 1e-08 2 | 3 | import numpy as np 4 | import pandas as pd 5 | import tensorflow as tf 6 | import random 7 | import os 8 | # import sys 9 | 10 | from termcolor import colored 11 | from tensorflow.contrib.layers import fully_connected as FC_Net 12 | from sklearn.metrics import brier_score_loss 13 | from sklearn.model_selection import train_test_split 14 | 15 | import import_data as impt 16 | import utils_network as utils 17 | 18 | from class_DeepHit import Model_DeepHit 19 | from utils_eval import c_index, brier_score, weighted_c_index, weighted_brier_score 20 | 21 | 22 | def load_logging(filename): 23 | data = dict() 24 | with open(filename) as f: 25 | def is_float(input): 26 | try: 27 | num = float(input) 28 | except ValueError: 29 | return False 30 | return True 31 | 32 | for line in f.readlines(): 33 | if ':' in line: 34 | key,value = line.strip().split(':', 1) 35 | if value.isdigit(): 36 | data[key] = int(value) 37 | elif is_float(value): 38 | data[key] = float(value) 39 | elif value == 'None': 40 | data[key] = None 41 | else: 42 | data[key] = value 43 | else: 44 | pass # deal with bad lines of text here 45 | return data 46 | 47 | 48 | 49 | ##### MAIN SETTING 50 | OUT_ITERATION = 5 51 | 52 | data_mode = 'SYNTHETIC' #METABRIC, SYNTHETIC 53 | seed = 1234 54 | 55 | EVAL_TIMES = [12, 24, 36] # evalution times (for C-index and Brier-Score) 56 | 57 | 58 | ##### IMPORT DATASET 59 | ''' 60 | num_Category = max event/censoring time * 1.2 (to make enough time horizon) 61 | num_Event = number of evetns i.e. len(np.unique(label))-1 62 | max_length = maximum number of measurements 63 | x_dim = data dimension including delta (num_features) 64 | mask1, mask2 = used for cause-specific network (FCNet structure) 65 | ''' 66 | if data_mode == 'SYNTHETIC': 67 | (x_dim), (data, time, label), (mask1, mask2) = impt.import_dataset_SYNTHETIC(norm_mode = 'standard') 68 | EVAL_TIMES = [12, 24, 36] 69 | elif data_mode == 'METABRIC': 70 | (x_dim), (data, time, label), (mask1, mask2) = impt.import_dataset_METABRIC(norm_mode = 'standard') 71 | EVAL_TIMES = [144, 288, 432] 72 | else: 73 | print('ERROR: DATA_MODE NOT FOUND !!!') 74 | 75 | _, num_Event, num_Category = np.shape(mask1) # dim of mask1: [subj, Num_Event, Num_Category] 76 | 77 | 78 | 79 | in_path = data_mode + '/results/' 80 | 81 | if not os.path.exists(in_path): 82 | os.makedirs(in_path) 83 | 84 | 85 | FINAL1 = np.zeros([num_Event, len(EVAL_TIMES), OUT_ITERATION]) 86 | FINAL2 = np.zeros([num_Event, len(EVAL_TIMES), OUT_ITERATION]) 87 | 88 | 89 | for out_itr in range(OUT_ITERATION): 90 | in_hypfile = in_path + '/itr_' + str(out_itr) + '/hyperparameters_log.txt' 91 | in_parser = load_logging(in_hypfile) 92 | 93 | 94 | ##### HYPER-PARAMETERS 95 | mb_size = in_parser['mb_size'] 96 | 97 | iteration = in_parser['iteration'] 98 | 99 | keep_prob = in_parser['keep_prob'] 100 | lr_train = in_parser['lr_train'] 101 | 102 | h_dim_shared = in_parser['h_dim_shared'] 103 | h_dim_CS = in_parser['h_dim_CS'] 104 | num_layers_shared = in_parser['num_layers_shared'] 105 | num_layers_CS = in_parser['num_layers_CS'] 106 | 107 | if in_parser['active_fn'] == 'relu': 108 | active_fn = tf.nn.relu 109 | elif in_parser['active_fn'] == 'elu': 110 | active_fn = tf.nn.elu 111 | elif in_parser['active_fn'] == 'tanh': 112 | active_fn = tf.nn.tanh 113 | else: 114 | print('Error!') 115 | 116 | 117 | initial_W = tf.contrib.layers.xavier_initializer() 118 | 119 | alpha = in_parser['alpha'] #for log-likelihood loss 120 | beta = in_parser['beta'] #for ranking loss 121 | gamma = in_parser['gamma'] #for RNN-prediction loss 122 | parameter_name = 'a' + str('%02.0f' %(10*alpha)) + 'b' + str('%02.0f' %(10*beta)) + 'c' + str('%02.0f' %(10*gamma)) 123 | 124 | 125 | ##### MAKE DICTIONARIES 126 | # INPUT DIMENSIONS 127 | input_dims = { 'x_dim' : x_dim, 128 | 'num_Event' : num_Event, 129 | 'num_Category' : num_Category} 130 | 131 | # NETWORK HYPER-PARMETERS 132 | network_settings = { 'h_dim_shared' : h_dim_shared, 133 | 'h_dim_CS' : h_dim_CS, 134 | 'num_layers_shared' : num_layers_shared, 135 | 'num_layers_CS' : num_layers_CS, 136 | 'active_fn' : active_fn, 137 | 'initial_W' : initial_W } 138 | 139 | 140 | # for out_itr in range(OUT_ITERATION): 141 | print ('ITR: ' + str(out_itr+1) + ' DATA MODE: ' + data_mode + ' (a:' + str(alpha) + ' b:' + str(beta) + ' c:' + str(gamma) + ')' ) 142 | ##### CREATE DEEPFHT NETWORK 143 | tf.reset_default_graph() 144 | 145 | config = tf.ConfigProto() 146 | config.gpu_options.allow_growth = True 147 | sess = tf.Session(config=config) 148 | 149 | model = Model_DeepHit(sess, "DeepHit", input_dims, network_settings) 150 | saver = tf.train.Saver() 151 | 152 | sess.run(tf.global_variables_initializer()) 153 | 154 | ### TRAINING-TESTING SPLIT 155 | (tr_data,te_data, tr_time,te_time, tr_label,te_label, 156 | tr_mask1,te_mask1, tr_mask2,te_mask2) = train_test_split(data, time, label, mask1, mask2, test_size=0.20, random_state=seed) 157 | 158 | (tr_data,va_data, tr_time,va_time, tr_label,va_label, 159 | tr_mask1,va_mask1, tr_mask2,va_mask2) = train_test_split(tr_data, tr_time, tr_label, tr_mask1, tr_mask2, test_size=0.20, random_state=seed) 160 | 161 | ##### PREDICTION & EVALUATION 162 | saver.restore(sess, in_path + '/itr_' + str(out_itr) + '/models/model_itr_' + str(out_itr)) 163 | 164 | ### PREDICTION 165 | pred = model.predict(te_data) 166 | 167 | ### EVALUATION 168 | result1, result2 = np.zeros([num_Event, len(EVAL_TIMES)]), np.zeros([num_Event, len(EVAL_TIMES)]) 169 | 170 | for t, t_time in enumerate(EVAL_TIMES): 171 | eval_horizon = int(t_time) 172 | 173 | if eval_horizon >= num_Category: 174 | print( 'ERROR: evaluation horizon is out of range') 175 | result1[:, t] = result2[:, t] = -1 176 | else: 177 | # calculate F(t | x, Y, t >= t_M) = \sum_{t_M <= \tau < t} P(\tau | x, Y, \tau > t_M) 178 | risk = np.sum(pred[:,:,:(eval_horizon+1)], axis=2) #risk score until EVAL_TIMES 179 | for k in range(num_Event): 180 | # result1[k, t] = c_index(risk[:,k], te_time, (te_label[:,0] == k+1).astype(float), eval_horizon) #-1 for no event (not comparable) 181 | # result2[k, t] = brier_score(risk[:,k], te_time, (te_label[:,0] == k+1).astype(float), eval_horizon) #-1 for no event (not comparable) 182 | result1[k, t] = weighted_c_index(tr_time, (tr_label[:,0] == k+1).astype(int), risk[:,k], te_time, (te_label[:,0] == k+1).astype(int), eval_horizon) #-1 for no event (not comparable) 183 | result2[k, t] = weighted_brier_score(tr_time, (tr_label[:,0] == k+1).astype(int), risk[:,k], te_time, (te_label[:,0] == k+1).astype(int), eval_horizon) #-1 for no event (not comparable) 184 | 185 | FINAL1[:, :, out_itr] = result1 186 | FINAL2[:, :, out_itr] = result2 187 | 188 | ### SAVE RESULTS 189 | row_header = [] 190 | for t in range(num_Event): 191 | row_header.append('Event_' + str(t+1)) 192 | 193 | col_header1 = [] 194 | col_header2 = [] 195 | for t in EVAL_TIMES: 196 | col_header1.append(str(t) + 'yr c_index') 197 | col_header2.append(str(t) + 'yr B_score') 198 | 199 | # c-index result 200 | df1 = pd.DataFrame(result1, index = row_header, columns=col_header1) 201 | df1.to_csv(in_path + '/result_CINDEX_itr' + str(out_itr) + '.csv') 202 | 203 | # brier-score result 204 | df2 = pd.DataFrame(result2, index = row_header, columns=col_header2) 205 | df2.to_csv(in_path + '/result_BRIER_itr' + str(out_itr) + '.csv') 206 | 207 | ### PRINT RESULTS 208 | print('========================================================') 209 | print('ITR: ' + str(out_itr+1) + ' DATA MODE: ' + data_mode + ' (a:' + str(alpha) + ' b:' + str(beta) + ' c:' + str(gamma) + ')' ) 210 | print('SharedNet Parameters: ' + 'h_dim_shared = '+str(h_dim_shared) + ' num_layers_shared = '+str(num_layers_shared) + 'Non-Linearity: ' + str(active_fn)) 211 | print('CSNet Parameters: ' + 'h_dim_CS = '+str(h_dim_CS) + ' num_layers_CS = '+str(num_layers_CS) + 'Non-Linearity: ' + str(active_fn)) 212 | 213 | print('--------------------------------------------------------') 214 | print('- C-INDEX: ') 215 | print(df1) 216 | print('--------------------------------------------------------') 217 | print('- BRIER-SCORE: ') 218 | print(df2) 219 | print('========================================================') 220 | 221 | 222 | 223 | ### FINAL MEAN/STD 224 | # c-index result 225 | df1_mean = pd.DataFrame(np.mean(FINAL1, axis=2), index = row_header, columns=col_header1) 226 | df1_std = pd.DataFrame(np.std(FINAL1, axis=2), index = row_header, columns=col_header1) 227 | df1_mean.to_csv(in_path + '/result_CINDEX_FINAL_MEAN.csv') 228 | df1_std.to_csv(in_path + '/result_CINDEX_FINAL_STD.csv') 229 | 230 | # brier-score result 231 | df2_mean = pd.DataFrame(np.mean(FINAL2, axis=2), index = row_header, columns=col_header2) 232 | df2_std = pd.DataFrame(np.std(FINAL2, axis=2), index = row_header, columns=col_header2) 233 | df2_mean.to_csv(in_path + '/result_BRIER_FINAL_MEAN.csv') 234 | df2_std.to_csv(in_path + '/result_BRIER_FINAL_STD.csv') 235 | 236 | 237 | ### PRINT RESULTS 238 | print('========================================================') 239 | print('- FINAL C-INDEX: ') 240 | print(df1_mean) 241 | print('--------------------------------------------------------') 242 | print('- FINAL BRIER-SCORE: ') 243 | print(df2_mean) 244 | print('========================================================') -------------------------------------------------------------------------------- /class_DeepHit.py: -------------------------------------------------------------------------------- 1 | ''' 2 | This declare DeepHit architecture: 3 | 4 | INPUTS: 5 | - input_dims: dictionary of dimension information 6 | > x_dim: dimension of features 7 | > num_Event: number of competing events (this does not include censoring label) 8 | > num_Category: dimension of time horizon of interest, i.e., |T| where T = {0, 1, ..., T_max-1} 9 | : this is equivalent to the output dimension 10 | - network_settings: 11 | > h_dim_shared & num_layers_shared: number of nodes and number of fully-connected layers for the shared subnetwork 12 | > h_dim_CS & num_layers_CS: number of nodes and number of fully-connected layers for the cause-specific subnetworks 13 | > active_fn: 'relu', 'elu', 'tanh' 14 | > initial_W: Xavier initialization is used as a baseline 15 | 16 | LOSS FUNCTIONS: 17 | - 1. loglikelihood (this includes log-likelihood of subjects who are censored) 18 | - 2. rankding loss (this is calculated only for acceptable pairs; see the paper for the definition) 19 | - 3. calibration loss (this is to reduce the calibration loss; this is not included in the paper version) 20 | ''' 21 | 22 | import numpy as np 23 | import tensorflow as tf 24 | import random 25 | 26 | from tensorflow.contrib.layers import fully_connected as FC_Net 27 | 28 | ### user-defined functions 29 | import utils_network as utils 30 | 31 | _EPSILON = 1e-08 32 | 33 | 34 | 35 | ##### USER-DEFINED FUNCTIONS 36 | def log(x): 37 | return tf.log(x + _EPSILON) 38 | 39 | def div(x, y): 40 | return tf.div(x, (y + _EPSILON)) 41 | 42 | 43 | class Model_DeepHit: 44 | def __init__(self, sess, name, input_dims, network_settings): 45 | self.sess = sess 46 | self.name = name 47 | 48 | # INPUT DIMENSIONS 49 | self.x_dim = input_dims['x_dim'] 50 | 51 | self.num_Event = input_dims['num_Event'] 52 | self.num_Category = input_dims['num_Category'] 53 | 54 | # NETWORK HYPER-PARMETERS 55 | self.h_dim_shared = network_settings['h_dim_shared'] 56 | self.h_dim_CS = network_settings['h_dim_CS'] 57 | self.num_layers_shared = network_settings['num_layers_shared'] 58 | self.num_layers_CS = network_settings['num_layers_CS'] 59 | 60 | self.active_fn = network_settings['active_fn'] 61 | self.initial_W = network_settings['initial_W'] 62 | self.reg_W = tf.contrib.layers.l2_regularizer(scale=1e-4) 63 | self.reg_W_out = tf.contrib.layers.l1_regularizer(scale=1e-4) 64 | 65 | self._build_net() 66 | 67 | 68 | def _build_net(self): 69 | with tf.variable_scope(self.name): 70 | #### PLACEHOLDER DECLARATION 71 | self.mb_size = tf.placeholder(tf.int32, [], name='batch_size') 72 | self.lr_rate = tf.placeholder(tf.float32, [], name='learning_rate') 73 | self.keep_prob = tf.placeholder(tf.float32, [], name='keep_probability') #keeping rate 74 | self.a = tf.placeholder(tf.float32, [], name='alpha') 75 | self.b = tf.placeholder(tf.float32, [], name='beta') 76 | self.c = tf.placeholder(tf.float32, [], name='gamma') 77 | 78 | self.x = tf.placeholder(tf.float32, shape=[None, self.x_dim], name='inputs') 79 | self.k = tf.placeholder(tf.float32, shape=[None, 1], name='labels') #event/censoring label (censoring:0) 80 | self.t = tf.placeholder(tf.float32, shape=[None, 1], name='timetoevents') 81 | 82 | self.fc_mask1 = tf.placeholder(tf.float32, shape=[None, self.num_Event, self.num_Category], name='mask1') #for Loss 1 83 | self.fc_mask2 = tf.placeholder(tf.float32, shape=[None, self.num_Category], name='mask2') #for Loss 2 / Loss 3 84 | 85 | 86 | ##### SHARED SUBNETWORK w/ FCNETS 87 | shared_out = utils.create_FCNet(self.x, self.num_layers_shared, self.h_dim_shared, self.active_fn, self.h_dim_shared, self.active_fn, self.initial_W, self.keep_prob, self.reg_W) 88 | last_x = self.x #for residual connection 89 | 90 | h = tf.concat([last_x, shared_out], axis=1) 91 | 92 | #(num_layers_CS) layers for cause-specific (num_Event subNets) 93 | out = [] 94 | for _ in range(self.num_Event): 95 | cs_out = utils.create_FCNet(h, (self.num_layers_CS), self.h_dim_CS, self.active_fn, self.h_dim_CS, self.active_fn, self.initial_W, self.keep_prob, self.reg_W) 96 | out.append(cs_out) 97 | out = tf.stack(out, axis=1) # stack referenced on subject 98 | out = tf.reshape(out, [-1, self.num_Event*self.h_dim_CS]) 99 | out = tf.nn.dropout(out, keep_prob=self.keep_prob) 100 | 101 | out = FC_Net(out, self.num_Event * self.num_Category, activation_fn=tf.nn.softmax, 102 | weights_initializer=self.initial_W, weights_regularizer=self.reg_W_out, scope="Output") 103 | self.out = tf.reshape(out, [-1, self.num_Event, self.num_Category]) 104 | 105 | 106 | ##### GET LOSS FUNCTIONS 107 | self.loss_Log_Likelihood() #get loss1: Log-Likelihood loss 108 | self.loss_Ranking() #get loss2: Ranking loss 109 | self.loss_Calibration() #get loss3: Calibration loss 110 | 111 | self.LOSS_TOTAL = self.a*self.LOSS_1 + self.b*self.LOSS_2 + self.c*self.LOSS_3 + tf.losses.get_regularization_loss() 112 | self.solver = tf.train.AdamOptimizer(learning_rate=self.lr_rate).minimize(self.LOSS_TOTAL) 113 | 114 | 115 | ### LOSS-FUNCTION 1 -- Log-likelihood loss 116 | def loss_Log_Likelihood(self): 117 | I_1 = tf.sign(self.k) 118 | 119 | #for uncenosred: log P(T=t,K=k|x) 120 | tmp1 = tf.reduce_sum(tf.reduce_sum(self.fc_mask1 * self.out, reduction_indices=2), reduction_indices=1, keep_dims=True) 121 | tmp1 = I_1 * log(tmp1) 122 | 123 | #for censored: log \sum P(T>t|x) 124 | tmp2 = tf.reduce_sum(tf.reduce_sum(self.fc_mask2 * self.out, reduction_indices=2), reduction_indices=1, keep_dims=True) 125 | tmp2 = (1. - I_1) * log(tmp2) 126 | 127 | self.LOSS_1 = - tf.reduce_mean(tmp1 + 1.0*tmp2) 128 | 129 | 130 | ### LOSS-FUNCTION 2 -- Ranking loss 131 | def loss_Ranking(self): 132 | sigma1 = tf.constant(0.1, dtype=tf.float32) 133 | 134 | eta = [] 135 | for e in range(self.num_Event): 136 | one_vector = tf.ones_like(self.t, dtype=tf.float32) 137 | I_2 = tf.cast(tf.equal(self.k, e+1), dtype = tf.float32) #indicator for event 138 | I_2 = tf.diag(tf.squeeze(I_2)) 139 | tmp_e = tf.reshape(tf.slice(self.out, [0, e, 0], [-1, 1, -1]), [-1, self.num_Category]) #event specific joint prob. 140 | 141 | R = tf.matmul(tmp_e, tf.transpose(self.fc_mask2)) #no need to divide by each individual dominator 142 | # r_{ij} = risk of i-th pat based on j-th time-condition (last meas. time ~ event time) , i.e. r_i(T_{j}) 143 | 144 | diag_R = tf.reshape(tf.diag_part(R), [-1, 1]) 145 | R = tf.matmul(one_vector, tf.transpose(diag_R)) - R # R_{ij} = r_{j}(T_{j}) - r_{i}(T_{j}) 146 | R = tf.transpose(R) # Now, R_{ij} (i-th row j-th column) = r_{i}(T_{i}) - r_{j}(T_{i}) 147 | 148 | T = tf.nn.relu(tf.sign(tf.matmul(one_vector, tf.transpose(self.t)) - tf.matmul(self.t, tf.transpose(one_vector)))) 149 | # T_{ij}=1 if t_i < t_j and T_{ij}=0 if t_i >= t_j 150 | 151 | T = tf.matmul(I_2, T) # only remains T_{ij}=1 when event occured for subject i 152 | 153 | tmp_eta = tf.reduce_mean(T * tf.exp(-R/sigma1), reduction_indices=1, keep_dims=True) 154 | 155 | eta.append(tmp_eta) 156 | eta = tf.stack(eta, axis=1) #stack referenced on subjects 157 | eta = tf.reduce_mean(tf.reshape(eta, [-1, self.num_Event]), reduction_indices=1, keep_dims=True) 158 | 159 | self.LOSS_2 = tf.reduce_sum(eta) #sum over num_Events 160 | 161 | 162 | 163 | ### LOSS-FUNCTION 3 -- Calibration Loss 164 | def loss_Calibration(self): 165 | eta = [] 166 | for e in range(self.num_Event): 167 | one_vector = tf.ones_like(self.t, dtype=tf.float32) 168 | I_2 = tf.cast(tf.equal(self.k, e+1), dtype = tf.float32) #indicator for event 169 | tmp_e = tf.reshape(tf.slice(self.out, [0, e, 0], [-1, 1, -1]), [-1, self.num_Category]) #event specific joint prob. 170 | 171 | r = tf.reduce_sum(tmp_e * self.fc_mask2, axis=0) #no need to divide by each individual dominator 172 | tmp_eta = tf.reduce_mean((r - I_2)**2, reduction_indices=1, keep_dims=True) 173 | 174 | eta.append(tmp_eta) 175 | eta = tf.stack(eta, axis=1) #stack referenced on subjects 176 | eta = tf.reduce_mean(tf.reshape(eta, [-1, self.num_Event]), reduction_indices=1, keep_dims=True) 177 | 178 | self.LOSS_3 = tf.reduce_sum(eta) #sum over num_Events 179 | 180 | 181 | def get_cost(self, DATA, MASK, PARAMETERS, keep_prob, lr_train): 182 | (x_mb, k_mb, t_mb) = DATA 183 | (m1_mb, m2_mb) = MASK 184 | (alpha, beta, gamma) = PARAMETERS 185 | return self.sess.run(self.LOSS_TOTAL, 186 | feed_dict={self.x:x_mb, self.k:k_mb, self.t:t_mb, self.fc_mask1: m1_mb, self.fc_mask2:m2_mb, 187 | self.a:alpha, self.b:beta, self.c:gamma, 188 | self.mb_size: np.shape(x_mb)[0], self.keep_prob:keep_prob, self.lr_rate:lr_train}) 189 | 190 | def train(self, DATA, MASK, PARAMETERS, keep_prob, lr_train): 191 | (x_mb, k_mb, t_mb) = DATA 192 | (m1_mb, m2_mb) = MASK 193 | (alpha, beta, gamma) = PARAMETERS 194 | return self.sess.run([self.solver, self.LOSS_TOTAL], 195 | feed_dict={self.x:x_mb, self.k:k_mb, self.t:t_mb, self.fc_mask1: m1_mb, self.fc_mask2:m2_mb, 196 | self.a:alpha, self.b:beta, self.c:gamma, 197 | self.mb_size: np.shape(x_mb)[0], self.keep_prob:keep_prob, self.lr_rate:lr_train}) 198 | 199 | def predict(self, x_test, keep_prob=1.0): 200 | return self.sess.run(self.out, feed_dict={self.x: x_test, self.mb_size: np.shape(x_test)[0], self.keep_prob: keep_prob}) 201 | 202 | # def predict(self, x_test, MASK, keep_prob=1.0): 203 | # (m1_test, m2_test) = MASK 204 | # return self.sess.run(self.out, 205 | # feed_dict={self.x: x_test, self.rnn_mask1:m1_test, self.rnn_mask2:m2_test, self.keep_prob: keep_prob}) 206 | -------------------------------------------------------------------------------- /sample data/METABRIC/label.csv: -------------------------------------------------------------------------------- 1 | event_time,label 2 | 2999 ,0 3 | 1484 ,0 4 | 3053 ,0 5 | 1721 ,0 6 | 1241 ,1 7 | 234 ,1 8 | 2947 ,0 9 | 672 ,1 10 | 2734 ,1 11 | 2083 ,0 12 | 1097 ,1 13 | 1088 ,1 14 | 2314 ,0 15 | 2258 ,0 16 | 2361 ,0 17 | 424 ,1 18 | 1594 ,0 19 | 1784 ,0 20 | 2005 ,0 21 | 2310 ,0 22 | 1900 ,0 23 | 1718 ,0 24 | 1846 ,0 25 | 2482 ,0 26 | 1420 ,0 27 | 1543 ,0 28 | 2673 ,0 29 | 1583 ,0 30 | 855 ,1 31 | 2149 ,0 32 | 2484 ,1 33 | 2063 ,0 34 | 1493 ,1 35 | 2611 ,0 36 | 1844 ,0 37 | 242 ,1 38 | 2713 ,0 39 | 1333 ,0 40 | 1481 ,0 41 | 1866 ,0 42 | 1281 ,1 43 | 1433 ,0 44 | 2554 ,0 45 | 3148 ,0 46 | 1175 ,1 47 | 1384 ,0 48 | 402 ,0 49 | 2002 ,1 50 | 2474 ,0 51 | 247 ,0 52 | 364 ,0 53 | 872 ,1 54 | 1694 ,0 55 | 2569 ,0 56 | 2527 ,0 57 | 2368 ,0 58 | 49 ,0 59 | 1479 ,0 60 | 1918 ,0 61 | 1882 ,0 62 | 1157 ,1 63 | 1870 ,0 64 | 1999 ,1 65 | 2612 ,0 66 | 388 ,1 67 | 2398 ,0 68 | 2271 ,0 69 | 1405 ,0 70 | 2596 ,0 71 | 169 ,0 72 | 372 ,0 73 | 1630 ,1 74 | 4562 ,1 75 | 1841 ,0 76 | 2580 ,0 77 | 1553 ,1 78 | 53 ,0 79 | 1551 ,1 80 | 2392 ,0 81 | 1477 ,1 82 | 1860 ,0 83 | 2567 ,0 84 | 1884 ,0 85 | 1794 ,0 86 | 59 ,0 87 | 453 ,0 88 | 1729 ,0 89 | 2259 ,0 90 | 2384 ,0 91 | 1673 ,0 92 | 1141 ,1 93 | 325 ,0 94 | 1428 ,0 95 | 2282 ,0 96 | 1293 ,1 97 | 2283 ,0 98 | 2215 ,0 99 | 2801 ,1 100 | 163 ,0 101 | 1854 ,0 102 | 130 ,0 103 | 2363 ,0 104 | 2171 ,0 105 | 2363 ,0 106 | 1368 ,1 107 | 2755 ,0 108 | 538 ,1 109 | 1870 ,0 110 | 2215 ,0 111 | 2297 ,0 112 | 1315 ,1 113 | 939 ,1 114 | 272 ,1 115 | 459 ,1 116 | 1877 ,0 117 | 283 ,0 118 | 2107 ,0 119 | 2019 ,0 120 | 2184 ,0 121 | 2018 ,0 122 | 2283 ,0 123 | 2191 ,0 124 | 2330 ,0 125 | 2344 ,0 126 | 2073 ,0 127 | 1763 ,1 128 | 729 ,1 129 | 1919 ,0 130 | 1585 ,0 131 | 3359 ,0 132 | 1995 ,0 133 | 1881 ,0 134 | 1299 ,1 135 | 1927 ,0 136 | 1841 ,0 137 | 2179 ,0 138 | 1195 ,1 139 | 606 ,1 140 | 3128 ,0 141 | 2440 ,1 142 | 1008 ,0 143 | 4398 ,0 144 | 1361 ,0 145 | 2102 ,0 146 | 325 ,1 147 | 1958 ,0 148 | 2653 ,0 149 | 1366 ,0 150 | 1324 ,0 151 | 2173 ,1 152 | 2827 ,1 153 | 4277 ,1 154 | 3861 ,0 155 | 2083 ,0 156 | 2567 ,1 157 | 2194 ,1 158 | 2198 ,0 159 | 4028 ,0 160 | 2302 ,0 161 | 3259 ,0 162 | 2319 ,0 163 | 2130 ,0 164 | 3628 ,0 165 | 2965 ,0 166 | 3948 ,0 167 | 2752 ,1 168 | 1966 ,0 169 | 3821 ,0 170 | 2707 ,1 171 | 3511 ,0 172 | 2476 ,0 173 | 1293 ,1 174 | 1701 ,0 175 | 2720 ,1 176 | 855 ,1 177 | 1441 ,0 178 | 8220 ,0 179 | 3660 ,1 180 | 3874 ,0 181 | 2141 ,0 182 | 175 ,1 183 | 3807 ,0 184 | 3406 ,0 185 | 3819 ,0 186 | 661 ,1 187 | 101 ,0 188 | 1992 ,0 189 | 2842 ,1 190 | 1184 ,1 191 | 2147 ,1 192 | 2767 ,0 193 | 1176 ,1 194 | 1483 ,1 195 | 1917 ,0 196 | 385 ,0 197 | 3037 ,0 198 | 3684 ,0 199 | 1778 ,1 200 | 2526 ,1 201 | 1778 ,0 202 | 3346 ,1 203 | 1905 ,1 204 | 1289 ,1 205 | 3520 ,0 206 | 1885 ,0 207 | 1270 ,1 208 | 1918 ,0 209 | 2125 ,0 210 | 3576 ,0 211 | 3325 ,0 212 | 133 ,1 213 | 3625 ,0 214 | 1828 ,0 215 | 4550 ,1 216 | 1918 ,0 217 | 1221 ,1 218 | 1968 ,0 219 | 1922 ,0 220 | 1516 ,1 221 | 3198 ,0 222 | 2037 ,0 223 | 3598 ,0 224 | 1772 ,0 225 | 3687 ,0 226 | 1048 ,0 227 | 2042 ,1 228 | 797 ,1 229 | 3602 ,0 230 | 1828 ,0 231 | 800 ,1 232 | 2925 ,0 233 | 1848 ,0 234 | 3760 ,1 235 | 2734 ,1 236 | 3317 ,0 237 | 202 ,0 238 | 613 ,1 239 | 6167 ,1 240 | 2338 ,0 241 | 1848 ,0 242 | 1382 ,1 243 | 5860 ,1 244 | 1656 ,1 245 | 1255 ,1 246 | 348 ,1 247 | 4896 ,1 248 | 5359 ,0 249 | 1524 ,0 250 | 179 ,0 251 | 1256 ,0 252 | 452 ,1 253 | 1411 ,1 254 | 2697 ,1 255 | 3385 ,0 256 | 2617 ,1 257 | 1897 ,0 258 | 1930 ,0 259 | 1857 ,1 260 | 1944 ,0 261 | 1854 ,0 262 | 1945 ,0 263 | 1883 ,1 264 | 810 ,1 265 | 43 ,0 266 | 1767 ,0 267 | 260 ,0 268 | 1584 ,0 269 | 255 ,0 270 | 857 ,1 271 | 2010 ,0 272 | 313 ,0 273 | 2163 ,0 274 | 1688 ,1 275 | 1093 ,1 276 | 2050 ,0 277 | 1829 ,0 278 | 335 ,0 279 | 2166 ,1 280 | 2504 ,0 281 | 989 ,1 282 | 625 ,1 283 | 1927 ,0 284 | 870 ,1 285 | 1820 ,0 286 | 1662 ,0 287 | 1296 ,1 288 | 2038 ,0 289 | 674 ,1 290 | 788 ,1 291 | 1840 ,0 292 | 1905 ,0 293 | 1772 ,1 294 | 1652 ,0 295 | 2120 ,0 296 | 2120 ,0 297 | 1890 ,0 298 | 1809 ,0 299 | 1647 ,0 300 | 2081 ,0 301 | 1876 ,0 302 | 188 ,1 303 | 1970 ,0 304 | 511 ,0 305 | 1938 ,0 306 | 1938 ,0 307 | 1014 ,1 308 | 1940 ,0 309 | 295 ,1 310 | 732 ,1 311 | 2166 ,0 312 | 1647 ,0 313 | 1678 ,0 314 | 1910 ,0 315 | 180 ,0 316 | 1655 ,0 317 | 2566 ,0 318 | 1365 ,1 319 | 1959 ,0 320 | 1885 ,1 321 | 1851 ,0 322 | 2178 ,0 323 | 2143 ,0 324 | 2037 ,0 325 | 1903 ,0 326 | 387 ,0 327 | 1664 ,1 328 | 1678 ,0 329 | 1768 ,0 330 | 877 ,1 331 | 2040 ,0 332 | 1502 ,0 333 | 1884 ,0 334 | 1294 ,1 335 | 1405 ,1 336 | 2006 ,0 337 | 2006 ,0 338 | 1068 ,1 339 | 1838 ,0 340 | 1835 ,0 341 | 1859 ,0 342 | 1179 ,1 343 | 858 ,1 344 | 1340 ,0 345 | 1385 ,0 346 | 2021 ,0 347 | 2015 ,0 348 | 2036 ,0 349 | 1967 ,0 350 | 764 ,0 351 | 1121 ,0 352 | 1527 ,0 353 | 1440 ,0 354 | 1561 ,0 355 | 2354 ,0 356 | 1871 ,0 357 | 1092 ,1 358 | 746 ,1 359 | 2226 ,0 360 | 1883 ,0 361 | 1103 ,1 362 | 1651 ,0 363 | 1918 ,0 364 | 2267 ,0 365 | 1955 ,0 366 | 684 ,0 367 | 399 ,0 368 | 1169 ,0 369 | 1099 ,1 370 | 1883 ,1 371 | 475 ,0 372 | 1847 ,0 373 | 2225 ,0 374 | 253 ,0 375 | 1790 ,0 376 | 1871 ,0 377 | 499 ,0 378 | 1930 ,0 379 | 1821 ,0 380 | 1351 ,1 381 | 749 ,0 382 | 1395 ,0 383 | 1876 ,0 384 | 1592 ,0 385 | 1851 ,0 386 | 1876 ,0 387 | 1667 ,0 388 | 402 ,0 389 | 1482 ,0 390 | 1857 ,0 391 | 1637 ,0 392 | 2964 ,0 393 | 299 ,0 394 | 2425 ,0 395 | 2922 ,0 396 | 2556 ,1 397 | 3295 ,1 398 | 2317 ,0 399 | 1456 ,1 400 | 4098 ,0 401 | 3986 ,0 402 | 3746 ,0 403 | 3463 ,0 404 | 3658 ,0 405 | 3512 ,0 406 | 723 ,1 407 | 2565 ,1 408 | 3367 ,0 409 | 2273 ,0 410 | 1458 ,1 411 | 2082 ,0 412 | 2380 ,1 413 | 1235 ,1 414 | 3598 ,0 415 | 3961 ,0 416 | 839 ,0 417 | 394 ,0 418 | 1559 ,1 419 | 2042 ,1 420 | 2572 ,1 421 | 3335 ,1 422 | 2935 ,1 423 | 3602 ,0 424 | 1138 ,1 425 | 2352 ,0 426 | 1785 ,1 427 | 6705 ,0 428 | 2344 ,0 429 | 2382 ,0 430 | 427 ,0 431 | 1482 ,0 432 | 930 ,1 433 | 1514 ,0 434 | 1204 ,0 435 | 1577 ,0 436 | 2218 ,0 437 | 236 ,1 438 | 466 ,1 439 | 56 ,0 440 | 273 ,0 441 | 1525 ,0 442 | 185 ,0 443 | 1995 ,0 444 | 1540 ,0 445 | 1841 ,0 446 | 1766 ,0 447 | 978 ,0 448 | 487 ,0 449 | 1040 ,1 450 | 1327 ,0 451 | 1079 ,0 452 | 1648 ,0 453 | 1466 ,0 454 | 2004 ,0 455 | 1856 ,0 456 | 880 ,1 457 | 1415 ,0 458 | 1704 ,0 459 | 824 ,1 460 | 1577 ,0 461 | 463 ,0 462 | 363 ,0 463 | 1498 ,0 464 | 626 ,0 465 | 1464 ,0 466 | 257 ,0 467 | 875 ,0 468 | 871 ,1 469 | 922 ,0 470 | 1561 ,0 471 | 489 ,0 472 | 501 ,0 473 | 971 ,0 474 | 1092 ,0 475 | 1291 ,0 476 | 1057 ,1 477 | 23 ,0 478 | 1121 ,0 479 | 342 ,0 480 | 1403 ,1 481 | 965 ,0 482 | 536 ,0 483 | 1171 ,0 484 | 358 ,1 485 | 787 ,0 486 | 922 ,0 487 | 1393 ,0 488 | 1112 ,0 489 | 1227 ,0 490 | 1010 ,0 491 | 402 ,1 492 | 1057 ,0 493 | 77 ,1 494 | 1065 ,0 495 | 522 ,0 496 | 867 ,0 497 | 727 ,1 498 | 1283 ,0 499 | 1176 ,0 500 | 564 ,1 501 | 126 ,0 502 | 819 ,0 503 | 8 ,1 504 | 560 ,1 505 | 600 ,1 506 | 760 ,0 507 | 1661 ,0 508 | 318 ,1 509 | 730 ,1 510 | 2083 ,0 511 | 2957 ,1 512 | 2533 ,0 513 | 498 ,1 514 | 2001 ,0 515 | 433 ,1 516 | 37 ,0 517 | 1271 ,1 518 | 2810 ,1 519 | 2098 ,1 520 | 3277 ,0 521 | 3241 ,0 522 | 1293 ,1 523 | 929 ,1 524 | 1655 ,0 525 | 2266 ,0 526 | 2582 ,0 527 | 1916 ,1 528 | 1215 ,0 529 | 1791 ,1 530 | 5222 ,0 531 | 1437 ,1 532 | 5196 ,0 533 | 5490 ,0 534 | 5292 ,0 535 | 4902 ,0 536 | 1948 ,1 537 | 4703 ,0 538 | 2673 ,1 539 | 2673 ,1 540 | 3855 ,0 541 | 5352 ,0 542 | 5056 ,0 543 | 573 ,1 544 | 5202 ,0 545 | 1355 ,1 546 | 1589 ,1 547 | 1343 ,1 548 | 2211 ,1 549 | 5318 ,0 550 | 4911 ,0 551 | 3997 ,1 552 | 4860 ,0 553 | 5224 ,0 554 | 4857 ,0 555 | 4787 ,0 556 | 4861 ,0 557 | 3242 ,1 558 | 4748 ,0 559 | 1542 ,1 560 | 4011 ,0 561 | 5173 ,0 562 | 747 ,1 563 | 5145 ,0 564 | 5069 ,1 565 | 4521 ,0 566 | 2933 ,1 567 | 4362 ,0 568 | 2457 ,1 569 | 5025 ,0 570 | 5213 ,0 571 | 1050 ,1 572 | 3541 ,1 573 | 4408 ,0 574 | 4832 ,0 575 | 5087 ,0 576 | 5221 ,0 577 | 4800 ,0 578 | 5108 ,0 579 | 1345 ,1 580 | 1111 ,1 581 | 3950 ,1 582 | 5173 ,0 583 | 4717 ,0 584 | 1213 ,1 585 | 2048 ,1 586 | 1837 ,0 587 | 5086 ,0 588 | 3323 ,1 589 | 4866 ,0 590 | 5064 ,0 591 | 2631 ,1 592 | 4535 ,0 593 | 4910 ,0 594 | 2576 ,1 595 | 4751 ,0 596 | 5580 ,0 597 | 5059 ,0 598 | 4811 ,0 599 | 3175 ,0 600 | 4899 ,0 601 | 4703 ,0 602 | 4860 ,0 603 | 4489 ,0 604 | 1651 ,1 605 | 4671 ,0 606 | 4430 ,0 607 | 592 ,1 608 | 5132 ,0 609 | 4402 ,0 610 | 4381 ,0 611 | 4491 ,1 612 | 1186 ,1 613 | 4930 ,0 614 | 1296 ,1 615 | 283 ,1 616 | 4770 ,0 617 | 4311 ,0 618 | 4245 ,0 619 | 4936 ,0 620 | 4247 ,1 621 | 3626 ,0 622 | 4944 ,0 623 | 4436 ,0 624 | 4846 ,0 625 | 620 ,1 626 | 4944 ,0 627 | 3768 ,1 628 | 4614 ,0 629 | 4241 ,0 630 | 958 ,1 631 | 4388 ,0 632 | 432 ,1 633 | 423 ,1 634 | 4740 ,0 635 | 4430 ,0 636 | 4850 ,0 637 | 3252 ,0 638 | 4858 ,1 639 | 3038 ,1 640 | 816 ,1 641 | 3359 ,0 642 | 4709 ,0 643 | 4619 ,0 644 | 904 ,1 645 | 159 ,1 646 | 4412 ,0 647 | 4704 ,0 648 | 4803 ,0 649 | 288 ,1 650 | 4857 ,0 651 | 3062 ,0 652 | 4514 ,0 653 | 4628 ,0 654 | 3318 ,1 655 | 4749 ,0 656 | 4733 ,0 657 | 4426 ,0 658 | 4778 ,0 659 | 4506 ,0 660 | 4398 ,0 661 | 4817 ,0 662 | 4388 ,0 663 | 3772 ,0 664 | 4423 ,0 665 | 471 ,1 666 | 3447 ,1 667 | 4118 ,0 668 | 4474 ,0 669 | 4528 ,0 670 | 4482 ,0 671 | 4590 ,0 672 | 4494 ,0 673 | 4273 ,0 674 | 1277 ,1 675 | 1069 ,1 676 | 4153 ,0 677 | 2112 ,1 678 | 4493 ,0 679 | 2964 ,0 680 | 2215 ,1 681 | 4668 ,0 682 | 4647 ,0 683 | 961 ,1 684 | 4422 ,0 685 | 1338 ,1 686 | 4391 ,0 687 | 857 ,1 688 | 4741 ,0 689 | 4481 ,0 690 | 4475 ,0 691 | 2433 ,1 692 | 4721 ,0 693 | 4633 ,0 694 | 4570 ,1 695 | 4356 ,0 696 | 862 ,1 697 | 2261 ,1 698 | 4694 ,0 699 | 3021 ,0 700 | 3940 ,1 701 | 4381 ,0 702 | 4230 ,0 703 | 4468 ,0 704 | 4570 ,0 705 | 4108 ,0 706 | 3901 ,0 707 | 4584 ,0 708 | 4056 ,0 709 | 4125 ,0 710 | 718 ,1 711 | 4136 ,0 712 | 4535 ,0 713 | 2991 ,1 714 | 1675 ,1 715 | 4020 ,0 716 | 1536 ,1 717 | 1964 ,1 718 | 680 ,1 719 | 1246 ,1 720 | 988 ,1 721 | 3915 ,0 722 | 4026 ,0 723 | 3961 ,0 724 | 4135 ,0 725 | 3803 ,0 726 | 4076 ,0 727 | 3985 ,0 728 | 4124 ,0 729 | 1139 ,1 730 | 4305 ,0 731 | 4209 ,0 732 | 4372 ,0 733 | 4390 ,0 734 | 3269 ,0 735 | 1141 ,1 736 | 803 ,1 737 | 2046 ,1 738 | 326 ,1 739 | 4307 ,0 740 | 4079 ,0 741 | 4040 ,0 742 | 2626 ,1 743 | 3415 ,1 744 | 4241 ,0 745 | 4087 ,0 746 | 696 ,1 747 | 1669 ,1 748 | 4080 ,0 749 | 4131 ,0 750 | 4178 ,0 751 | 4243 ,0 752 | 1503 ,1 753 | 3835 ,0 754 | 3781 ,0 755 | 3104 ,0 756 | 4027 ,0 757 | 3964 ,0 758 | 763 ,1 759 | 3263 ,0 760 | 4246 ,0 761 | 3726 ,0 762 | 3596 ,1 763 | 985 ,1 764 | 3716 ,0 765 | 4180 ,0 766 | 4135 ,0 767 | 1164 ,1 768 | 1884 ,0 769 | 3506 ,0 770 | 4037 ,0 771 | 3737 ,0 772 | 824 ,1 773 | 3072 ,1 774 | 3544 ,1 775 | 465 ,1 776 | 3898 ,0 777 | 3721 ,0 778 | 4108 ,0 779 | 1788 ,1 780 | 3460 ,0 781 | 2582 ,1 782 | 3410 ,0 783 | 3389 ,1 784 | 2587 ,1 785 | 4046 ,0 786 | 990 ,1 787 | 3339 ,0 788 | 3880 ,0 789 | 746 ,1 790 | 3775 ,0 791 | 3319 ,0 792 | 3695 ,0 793 | 3296 ,0 794 | 1164 ,1 795 | 3736 ,0 796 | 3289 ,0 797 | 3872 ,0 798 | 2260 ,1 799 | 2782 ,1 800 | 3586 ,0 801 | 3793 ,0 802 | 3794 ,1 803 | 3766 ,0 804 | 5353 ,0 805 | 3324 ,0 806 | 2168 ,0 807 | 3331 ,0 808 | 2976 ,0 809 | 3327 ,0 810 | 3509 ,0 811 | 3005 ,0 812 | 3368 ,0 813 | 3026 ,1 814 | 842 ,1 815 | 1490 ,1 816 | 3197 ,1 817 | 7495 ,0 818 | 4665 ,0 819 | 1342 ,1 820 | 351 ,1 821 | 4978 ,0 822 | 8321 ,1 823 | 339 ,1 824 | 4179 ,1 825 | 3170 ,1 826 | 3723 ,0 827 | 456 ,1 828 | 8941 ,1 829 | 5160 ,1 830 | 8725 ,0 831 | 8441 ,1 832 | 5291 ,0 833 | 2034 ,1 834 | 6513 ,1 835 | 5244 ,1 836 | 7745 ,1 837 | 4962 ,1 838 | 3435 ,1 839 | 4435 ,1 840 | 6529 ,1 841 | 469 ,1 842 | 1891 ,1 843 | 1487 ,1 844 | 5541 ,1 845 | 4810 ,1 846 | 9184 ,0 847 | 9218 ,0 848 | 2535 ,1 849 | 1657 ,1 850 | 1965 ,1 851 | 3468 ,1 852 | 1091 ,1 853 | 7606 ,1 854 | 9193 ,0 855 | 6111 ,1 856 | 3005 ,1 857 | 5352 ,0 858 | 4896 ,1 859 | 1017 ,1 860 | 2961 ,1 861 | 1798 ,1 862 | 3966 ,1 863 | 7326 ,1 864 | 7801 ,1 865 | 1023 ,0 866 | 5245 ,0 867 | 2905 ,1 868 | 8183 ,0 869 | 6575 ,1 870 | 1653 ,1 871 | 5671 ,0 872 | 2671 ,1 873 | 7041 ,1 874 | 7967 ,1 875 | 8805 ,0 876 | 5337 ,0 877 | 514 ,1 878 | 5894 ,1 879 | 3799 ,1 880 | 2617 ,1 881 | 5978 ,1 882 | 4936 ,0 883 | 7067 ,1 884 | 8507 ,0 885 | 6239 ,1 886 | 5208 ,1 887 | 1011 ,1 888 | 5692 ,0 889 | 5958 ,1 890 | 6786 ,1 891 | 932 ,1 892 | 2044 ,1 893 | 3856 ,1 894 | 461 ,1 895 | 3421 ,1 896 | 356 ,1 897 | 4339 ,0 898 | 3584 ,1 899 | 1277 ,1 900 | 8735 ,0 901 | 1844 ,1 902 | 4930 ,1 903 | 4188 ,1 904 | 2501 ,1 905 | 6197 ,1 906 | 7248 ,1 907 | 501 ,1 908 | 8303 ,0 909 | 7943 ,1 910 | 5127 ,1 911 | 4443 ,1 912 | 8555 ,0 913 | 1569 ,1 914 | 3794 ,1 915 | 7954 ,0 916 | 7673 ,0 917 | 6313 ,1 918 | 5920 ,1 919 | 1235 ,1 920 | 4207 ,0 921 | 3581 ,1 922 | 5735 ,0 923 | 1688 ,1 924 | 3684 ,1 925 | 7177 ,0 926 | 3652 ,0 927 | 2704 ,1 928 | 7390 ,0 929 | 4257 ,0 930 | 6852 ,0 931 | 2680 ,0 932 | 4290 ,1 933 | 5599 ,1 934 | 2875 ,1 935 | 5046 ,0 936 | 3288 ,1 937 | 3570 ,1 938 | 6018 ,0 939 | 4457 ,1 940 | 6808 ,0 941 | 1244 ,1 942 | 2700 ,1 943 | 5056 ,1 944 | 505 ,1 945 | 909 ,1 946 | 1444 ,1 947 | 1929 ,1 948 | 2970 ,1 949 | 1196 ,1 950 | 7027 ,0 951 | 7180 ,0 952 | 7110 ,0 953 | 3347 ,1 954 | 4088 ,0 955 | 3584 ,1 956 | 2034 ,1 957 | 1041 ,1 958 | 6628 ,1 959 | 2610 ,1 960 | 2432 ,1 961 | 7099 ,0 962 | 7049 ,0 963 | 7441 ,0 964 | 4186 ,1 965 | 1299 ,1 966 | 1453 ,1 967 | 2223 ,1 968 | 2559 ,1 969 | 3971 ,0 970 | 3218 ,1 971 | 5949 ,0 972 | 639 ,1 973 | 6982 ,0 974 | 3540 ,1 975 | 5120 ,1 976 | 3826 ,0 977 | 3808 ,0 978 | 1920 ,1 979 | 6393 ,1 980 | 1670 ,1 981 | 6636 ,0 982 | 6837 ,1 983 | 4403 ,0 984 | 1730 ,0 985 | 944 ,1 986 | 6860 ,0 987 | 2510 ,1 988 | 1057 ,1 989 | 1287 ,1 990 | 5950 ,1 991 | 2447 ,1 992 | 712 ,1 993 | 2583 ,0 994 | 6359 ,0 995 | 4382 ,0 996 | 2963 ,1 997 | 840 ,1 998 | 3939 ,1 999 | 1346 ,1 1000 | 4496 ,0 1001 | 592 ,1 1002 | 6974 ,0 1003 | 3666 ,0 1004 | 5282 ,0 1005 | 4850 ,1 1006 | 1695 ,1 1007 | 2494 ,1 1008 | 6905 ,0 1009 | 4590 ,1 1010 | 1280 ,1 1011 | 6690 ,0 1012 | 1544 ,1 1013 | 1132 ,1 1014 | 3758 ,1 1015 | 822 ,1 1016 | 6358 ,0 1017 | 3744 ,1 1018 | 6828 ,0 1019 | 3063 ,0 1020 | 6660 ,0 1021 | 1414 ,1 1022 | 651 ,1 1023 | 4562 ,1 1024 | 6950 ,0 1025 | 6184 ,1 1026 | 3047 ,0 1027 | 6889 ,0 1028 | 4372 ,1 1029 | 3650 ,1 1030 | 2432 ,1 1031 | 2090 ,1 1032 | 5215 ,1 1033 | 1219 ,1 1034 | 2501 ,1 1035 | 1365 ,1 1036 | 3848 ,0 1037 | 2981 ,1 1038 | 6567 ,0 1039 | 3328 ,1 1040 | 3911 ,1 1041 | 1317 ,1 1042 | 3894 ,1 1043 | 6053 ,1 1044 | 5958 ,0 1045 | 2204 ,1 1046 | 2287 ,1 1047 | 1719 ,1 1048 | 2749 ,1 1049 | 2577 ,0 1050 | 5698 ,0 1051 | 1371 ,1 1052 | 1275 ,1 1053 | 5288 ,1 1054 | 4252 ,1 1055 | 1085 ,1 1056 | 703 ,1 1057 | 6023 ,1 1058 | 6990 ,0 1059 | 3509 ,0 1060 | 764 ,1 1061 | 3049 ,1 1062 | 1360 ,1 1063 | 2708 ,1 1064 | 7351 ,0 1065 | 1244 ,1 1066 | 3880 ,1 1067 | 4734 ,1 1068 | 6239 ,0 1069 | 5619 ,0 1070 | 6315 ,0 1071 | 6404 ,0 1072 | 6261 ,0 1073 | 879 ,1 1074 | 6118 ,0 1075 | 1194 ,1 1076 | 4713 ,0 1077 | 3530 ,1 1078 | 4518 ,1 1079 | 674 ,1 1080 | 2399 ,1 1081 | 6329 ,0 1082 | 6050 ,0 1083 | 6083 ,0 1084 | 6048 ,0 1085 | 1445 ,1 1086 | 1062 ,1 1087 | 6015 ,0 1088 | 5301 ,0 1089 | 6269 ,0 1090 | 1946 ,1 1091 | 5923 ,0 1092 | 3537 ,1 1093 | 5678 ,0 1094 | 6185 ,0 1095 | 3877 ,1 1096 | 5933 ,0 1097 | 1437 ,1 1098 | 943 ,1 1099 | 1500 ,1 1100 | 3083 ,1 1101 | 6208 ,0 1102 | 1200 ,1 1103 | 1753 ,0 1104 | 911 ,1 1105 | 5981 ,0 1106 | 2107 ,0 1107 | 5826 ,0 1108 | 2317 ,1 1109 | 2118 ,1 1110 | 1759 ,1 1111 | 3724 ,1 1112 | 5159 ,0 1113 | 604 ,1 1114 | 5806 ,0 1115 | 4518 ,1 1116 | 5532 ,0 1117 | 2079 ,1 1118 | 5930 ,1 1119 | 3527 ,1 1120 | 6080 ,0 1121 | 4743 ,0 1122 | 1494 ,0 1123 | 1479 ,1 1124 | 3921 ,1 1125 | 2361 ,0 1126 | 5943 ,1 1127 | 2664 ,1 1128 | 5283 ,1 1129 | 5966 ,0 1130 | 6077 ,0 1131 | 5953 ,0 1132 | 5423 ,1 1133 | 575 ,1 1134 | 2394 ,1 1135 | 5603 ,0 1136 | 3806 ,0 1137 | 4961 ,0 1138 | 2257 ,1 1139 | 5224 ,1 1140 | 2993 ,1 1141 | 1967 ,1 1142 | 5638 ,0 1143 | 2454 ,1 1144 | 5782 ,0 1145 | 5762 ,0 1146 | 2415 ,1 1147 | 5637 ,0 1148 | 5688 ,0 1149 | 5221 ,0 1150 | 4348 ,0 1151 | 3258 ,1 1152 | 1134 ,1 1153 | 2545 ,1 1154 | 3558 ,1 1155 | 5696 ,0 1156 | 1582 ,1 1157 | 3176 ,0 1158 | 1864 ,1 1159 | 2375 ,1 1160 | 3674 ,0 1161 | 3851 ,1 1162 | 4183 ,0 1163 | 5049 ,1 1164 | 2591 ,1 1165 | 5920 ,0 1166 | 5530 ,0 1167 | 1037 ,1 1168 | 4518 ,1 1169 | 5631 ,0 1170 | 1690 ,1 1171 | 5362 ,1 1172 | 21 ,0 1173 | 4503 ,1 1174 | 1183 ,0 1175 | 5048 ,0 1176 | 5715 ,0 1177 | 2745 ,0 1178 | 5544 ,0 1179 | 2841 ,0 1180 | 5475 ,0 1181 | 5731 ,0 1182 | 1507 ,1 1183 | 2245 ,0 1184 | 5908 ,0 1185 | 806 ,1 1186 | 2613 ,1 1187 | 5736 ,0 1188 | 1740 ,1 1189 | 5936 ,0 1190 | 2527 ,1 1191 | 3696 ,0 1192 | 5237 ,1 1193 | 4273 ,1 1194 | 1848 ,0 1195 | 5592 ,1 1196 | 597 ,1 1197 | 1030 ,1 1198 | 861 ,1 1199 | 3675 ,1 1200 | 5865 ,0 1201 | 5354 ,0 1202 | 2479 ,1 1203 | 3752 ,1 1204 | 657 ,1 1205 | 5269 ,0 1206 | 461 ,1 1207 | 2322 ,0 1208 | 2153 ,1 1209 | 5758 ,0 1210 | 5615 ,0 1211 | 1793 ,0 1212 | 834 ,1 1213 | 3447 ,1 1214 | 3771 ,1 1215 | 2709 ,1 1216 | 3726 ,1 1217 | 819 ,1 1218 | 3615 ,0 1219 | 5718 ,0 1220 | 5910 ,0 1221 | 5469 ,0 1222 | 1118 ,1 1223 | 5362 ,0 1224 | 1298 ,0 1225 | 1110 ,1 1226 | 2699 ,1 1227 | 4042 ,1 1228 | 3838 ,1 1229 | 2502 ,0 1230 | 4777 ,1 1231 | 608 ,0 1232 | 5820 ,0 1233 | 3325 ,1 1234 | 3478 ,1 1235 | 5125 ,0 1236 | 5529 ,0 1237 | 547 ,1 1238 | 5812 ,0 1239 | 2632 ,1 1240 | 5260 ,0 1241 | 4930 ,0 1242 | 5553 ,0 1243 | 2415 ,0 1244 | 2422 ,1 1245 | 5637 ,0 1246 | 1559 ,1 1247 | 5550 ,1 1248 | 5528 ,0 1249 | 664 ,1 1250 | 4536 ,1 1251 | 5616 ,0 1252 | 3379 ,1 1253 | 1957 ,1 1254 | 5530 ,0 1255 | 5503 ,0 1256 | 4028 ,1 1257 | 3556 ,1 1258 | 2680 ,1 1259 | 2324 ,1 1260 | 5438 ,0 1261 | 582 ,1 1262 | 3359 ,1 1263 | 1447 ,0 1264 | 4265 ,1 1265 | 1628 ,1 1266 | 5617 ,0 1267 | 2821 ,1 1268 | 5706 ,0 1269 | 5414 ,0 1270 | 1569 ,1 1271 | 2909 ,1 1272 | 5391 ,0 1273 | 5559 ,0 1274 | 5453 ,0 1275 | 4557 ,0 1276 | 1476 ,0 1277 | 4514 ,0 1278 | 5283 ,0 1279 | 1302 ,1 1280 | 3221 ,1 1281 | 3561 ,1 1282 | 5598 ,0 1283 | 5354 ,0 1284 | 3939 ,1 1285 | 3355 ,1 1286 | 4340 ,1 1287 | 2982 ,1 1288 | 1298 ,1 1289 | 5349 ,0 1290 | 2296 ,0 1291 | 5422 ,0 1292 | 1019 ,1 1293 | 5275 ,0 1294 | 497 ,1 1295 | 5131 ,0 1296 | 5203 ,0 1297 | 5488 ,0 1298 | 5000 ,0 1299 | 1324 ,1 1300 | 5157 ,0 1301 | 3115 ,1 1302 | 1153 ,1 1303 | 3724 ,0 1304 | 3374 ,1 1305 | 3062 ,1 1306 | 1607 ,0 1307 | 3242 ,1 1308 | 3032 ,1 1309 | 1385 ,1 1310 | 5102 ,0 1311 | 3494 ,1 1312 | 5153 ,0 1313 | 2509 ,1 1314 | 2132 ,1 1315 | 5584 ,0 1316 | 5596 ,0 1317 | 5025 ,0 1318 | 5602 ,0 1319 | 1826 ,1 1320 | 2279 ,1 1321 | 5908 ,0 1322 | 2999 ,1 1323 | 2063 ,1 1324 | 2336 ,1 1325 | 2379 ,0 1326 | 1960 ,1 1327 | 3720 ,1 1328 | 3852 ,1 1329 | 2610 ,1 1330 | 1108 ,1 1331 | 4606 ,0 1332 | 1453 ,1 1333 | 5240 ,0 1334 | 3394 ,0 1335 | 2268 ,1 1336 | 414 ,1 1337 | 2547 ,1 1338 | 501 ,1 1339 | 4970 ,0 1340 | 5222 ,0 1341 | 5085 ,0 1342 | 4483 ,1 1343 | 4792 ,0 1344 | 3143 ,1 1345 | 3524 ,0 1346 | 1730 ,1 1347 | 1137 ,1 1348 | 1396 ,1 1349 | 1291 ,1 1350 | 1609 ,1 1351 | 425 ,1 1352 | 3069 ,1 1353 | 4732 ,0 1354 | 5049 ,0 1355 | 5116 ,0 1356 | 2886 ,1 1357 | 4906 ,0 1358 | 426 ,1 1359 | 1372 ,1 1360 | 3436 ,1 1361 | 3724 ,1 1362 | 2965 ,1 1363 | 2562 ,1 1364 | 757 ,1 1365 | 1550 ,1 1366 | 5147 ,0 1367 | 5305 ,0 1368 | 5192 ,0 1369 | 5092 ,0 1370 | 5460 ,0 1371 | 5090 ,0 1372 | 3784 ,0 1373 | 1609 ,1 1374 | 5405 ,0 1375 | 865 ,1 1376 | 4834 ,1 1377 | 461 ,1 1378 | 3773 ,1 1379 | 691 ,1 1380 | 5059 ,0 1381 | 3004 ,1 1382 | 1179 ,1 1383 | 3579 ,1 1384 | 76 ,0 1385 | 3060 ,1 1386 | 368 ,1 1387 | 5137 ,0 1388 | 3487 ,1 1389 | 3410 ,1 1390 | 2135 ,1 1391 | 4464 ,1 1392 | 5082 ,0 1393 | 1369 ,1 1394 | 4408 ,1 1395 | 3743 ,1 1396 | 928 ,1 1397 | 5066 ,0 1398 | 4620 ,1 1399 | 5040 ,0 1400 | 4955 ,0 1401 | 4970 ,0 1402 | 5114 ,0 1403 | 5386 ,0 1404 | 3204 ,1 1405 | 2105 ,0 1406 | 3233 ,1 1407 | 2872 ,1 1408 | 2242 ,1 1409 | 3528 ,0 1410 | 2923 ,1 1411 | 3011 ,0 1412 | 3042 ,0 1413 | 5262 ,0 1414 | 2787 ,0 1415 | 780 ,1 1416 | 3850 ,0 1417 | 4735 ,1 1418 | 4218 ,1 1419 | 3212 ,0 1420 | 2260 ,0 1421 | 3018 ,0 1422 | 5245 ,0 1423 | 1382 ,1 1424 | 2506 ,1 1425 | 1484 ,1 1426 | 1039 ,1 1427 | 4672 ,1 1428 | 4514 ,0 1429 | 921 ,1 1430 | 476 ,1 1431 | 2955 ,1 1432 | 2421 ,0 1433 | 1355 ,1 1434 | 3511 ,0 1435 | 291 ,1 1436 | 5194 ,0 1437 | 4588 ,0 1438 | 4885 ,1 1439 | 4466 ,1 1440 | 3708 ,0 1441 | 4923 ,0 1442 | 485 ,1 1443 | 1719 ,1 1444 | 1520 ,1 1445 | 1854 ,1 1446 | 4828 ,1 1447 | 4891 ,0 1448 | 2724 ,1 1449 | 4490 ,0 1450 | 1744 ,1 1451 | 3496 ,1 1452 | 564 ,1 1453 | 4895 ,0 1454 | 2981 ,0 1455 | 1262 ,1 1456 | 4869 ,0 1457 | 587 ,1 1458 | 3777 ,1 1459 | 5089 ,0 1460 | 76 ,1 1461 | 3309 ,0 1462 | 1029 ,1 1463 | 1391 ,1 1464 | 530 ,1 1465 | 1273 ,1 1466 | 4973 ,0 1467 | 954 ,1 1468 | 3699 ,1 1469 | 5296 ,0 1470 | 1536 ,1 1471 | 1975 ,1 1472 | 701 ,1 1473 | 1717 ,1 1474 | 4662 ,0 1475 | 5187 ,0 1476 | 3001 ,0 1477 | 3712 ,0 1478 | 1799 ,0 1479 | 3583 ,0 1480 | 3093 ,1 1481 | 4437 ,0 1482 | 1099 ,1 1483 | 4771 ,0 1484 | 926 ,1 1485 | 4816 ,0 1486 | 489 ,1 1487 | 3744 ,0 1488 | 60 ,0 1489 | 4532 ,0 1490 | 4312 ,0 1491 | 444 ,1 1492 | 4035 ,0 1493 | 4820 ,0 1494 | 2396 ,1 1495 | 4741 ,0 1496 | 2373 ,1 1497 | 3604 ,1 1498 | 1453 ,1 1499 | 4989 ,0 1500 | 1033 ,1 1501 | 5036 ,0 1502 | 2957 ,0 1503 | 760 ,0 1504 | 2668 ,1 1505 | 771 ,0 1506 | 3542 ,0 1507 | 1041 ,1 1508 | 4635 ,1 1509 | 4773 ,0 1510 | 4405 ,0 1511 | 751 ,0 1512 | 565 ,1 1513 | 4802 ,0 1514 | 3089 ,1 1515 | 3530 ,1 1516 | 3461 ,0 1517 | 4737 ,0 1518 | 232 ,0 1519 | 4616 ,1 1520 | 4697 ,0 1521 | 3530 ,1 1522 | 4767 ,0 1523 | 1662 ,1 1524 | 2707 ,1 1525 | 2149 ,1 1526 | 4056 ,0 1527 | 3341 ,0 1528 | 2733 ,1 1529 | 1617 ,1 1530 | 2434 ,1 1531 | 4807 ,0 1532 | 535 ,0 1533 | 4648 ,1 1534 | 790 ,1 1535 | 2775 ,1 1536 | 4912 ,0 1537 | 982 ,1 1538 | 4521 ,0 1539 | 4215 ,0 1540 | 4669 ,0 1541 | 3663 ,0 1542 | 4369 ,0 1543 | 3711 ,0 1544 | 4267 ,0 1545 | 3493 ,1 1546 | 4778 ,0 1547 | 3846 ,1 1548 | 1262 ,1 1549 | 1911 ,0 1550 | 4134 ,1 1551 | 4080 ,0 1552 | 969 ,1 1553 | 2489 ,0 1554 | 1278 ,1 1555 | 3114 ,1 1556 | 1145 ,1 1557 | 1051 ,1 1558 | 1887 ,1 1559 | 632 ,1 1560 | 3249 ,1 1561 | 4551 ,0 1562 | 4006 ,0 1563 | 3625 ,0 1564 | 4213 ,0 1565 | 667 ,1 1566 | 4522 ,0 1567 | 924 ,1 1568 | 4152 ,0 1569 | 630 ,1 1570 | 3140 ,1 1571 | 4263 ,0 1572 | 1915 ,0 1573 | 4524 ,0 1574 | 3792 ,1 1575 | 4059 ,1 1576 | 1907 ,1 1577 | 2116 ,1 1578 | 2073 ,1 1579 | 4441 ,0 1580 | 6334 ,1 1581 | 4333 ,1 1582 | 5758 ,1 1583 | 4306 ,1 1584 | 2127 ,1 1585 | 4295 ,1 1586 | 406 ,1 1587 | 1448 ,1 1588 | 4690 ,0 1589 | 1562 ,1 1590 | 6172 ,0 1591 | 6749 ,0 1592 | 1492 ,1 1593 | 6933 ,0 1594 | 4361 ,1 1595 | 5683 ,0 1596 | 2194 ,1 1597 | 5861 ,0 1598 | 5214 ,0 1599 | 1136 ,1 1600 | 5046 ,1 1601 | 6201 ,0 1602 | 5023 ,0 1603 | 5605 ,1 1604 | 6032 ,0 1605 | 6060 ,0 1606 | 642 ,0 1607 | 680 ,1 1608 | 860 ,0 1609 | 1141 ,1 1610 | 6112 ,0 1611 | 468 ,1 1612 | 5656 ,1 1613 | 5478 ,1 1614 | 2150 ,0 1615 | 1722 ,1 1616 | 5470 ,1 1617 | 5108 ,1 1618 | 5294 ,0 1619 | 1623 ,1 1620 | 3075 ,1 1621 | 4921 ,1 1622 | 730 ,0 1623 | 69 ,1 1624 | 4717 ,0 1625 | 319 ,1 1626 | 302 ,1 1627 | 2325 ,1 1628 | 1410 ,0 1629 | 3391 ,0 1630 | 3700 ,1 1631 | 2990 ,0 1632 | 3737 ,1 1633 | 2005 ,0 1634 | 3610 ,0 1635 | 5215 ,0 1636 | 1603 ,0 1637 | 3469 ,1 1638 | 4617 ,1 1639 | 1301 ,0 1640 | 1854 ,1 1641 | 2358 ,1 1642 | 769 ,1 1643 | 2366 ,1 1644 | 4046 ,0 1645 | 3133 ,0 1646 | 1030 ,1 1647 | 2688 ,1 1648 | 1669 ,0 1649 | 2248 ,1 1650 | 2330 ,1 1651 | 2310 ,0 1652 | 3653 ,0 1653 | 1758 ,1 1654 | 476 ,1 1655 | 3752 ,0 1656 | 2660 ,1 1657 | 135 ,0 1658 | 3684 ,1 1659 | 4361 ,0 1660 | 2542 ,1 1661 | 2768 ,0 1662 | 994 ,1 1663 | 1754 ,1 1664 | 1648 ,1 1665 | 3595 ,0 1666 | 2374 ,1 1667 | 4794 ,0 1668 | 1992 ,0 1669 | 1133 ,1 1670 | 713 ,0 1671 | 2024 ,0 1672 | 739 ,1 1673 | 1279 ,1 1674 | 1574 ,1 1675 | 3335 ,0 1676 | 902 ,1 1677 | 3528 ,0 1678 | 1703 ,1 1679 | 4614 ,0 1680 | 3549 ,1 1681 | 775 ,0 1682 | 1269 ,1 1683 | 2088 ,0 1684 | 3409 ,0 1685 | 737 ,0 1686 | 4488 ,0 1687 | 4434 ,1 1688 | 3836 ,0 1689 | 1064 ,1 1690 | 3536 ,0 1691 | 1061 ,1 1692 | 3779 ,0 1693 | 3 ,1 1694 | 2244 ,1 1695 | 2177 ,1 1696 | 2550 ,1 1697 | 1704 ,0 1698 | 3344 ,0 1699 | 3156 ,1 1700 | 1608 ,0 1701 | 3622 ,0 1702 | 2637 ,0 1703 | 3723 ,0 1704 | 1586 ,1 1705 | 2254 ,0 1706 | 441 ,1 1707 | 2933 ,0 1708 | 1827 ,1 1709 | 2037 ,0 1710 | 2730 ,0 1711 | 2276 ,0 1712 | 3352 ,0 1713 | 672 ,1 1714 | 3150 ,0 1715 | 836 ,1 1716 | 105 ,1 1717 | 2772 ,1 1718 | 2650 ,1 1719 | 1486 ,1 1720 | 1056 ,1 1721 | 3009 ,1 1722 | 2204 ,0 1723 | 1915 ,1 1724 | 1528 ,0 1725 | 2658 ,0 1726 | 1761 ,0 1727 | 185 ,0 1728 | 1996 ,0 1729 | 2570 ,0 1730 | 877 ,1 1731 | 427 ,0 1732 | 2401 ,0 1733 | 2411 ,1 1734 | 548 ,1 1735 | 2022 ,0 1736 | 1869 ,0 1737 | 1889 ,0 1738 | 1737 ,0 1739 | 516 ,1 1740 | 1582 ,0 1741 | 4588 ,1 1742 | 6160 ,0 1743 | 1136 ,1 1744 | 2551 ,1 1745 | 501 ,1 1746 | 1816 ,0 1747 | 1940 ,0 1748 | 1738 ,0 1749 | 1625 ,0 1750 | 1514 ,0 1751 | 1360 ,0 1752 | 1778 ,0 1753 | 1758 ,0 1754 | 1295 ,1 1755 | 1643 ,1 1756 | 2221 ,0 1757 | 1785 ,0 1758 | 595 ,1 1759 | 1393 ,0 1760 | 2061 ,0 1761 | 2345 ,0 1762 | 1805 ,0 1763 | 2135 ,1 1764 | 2394 ,0 1765 | 2143 ,1 1766 | 2565 ,0 1767 | 803 ,1 1768 | 2108 ,0 1769 | 2407 ,0 1770 | 2008 ,0 1771 | 1103 ,1 1772 | 2694 ,0 1773 | 2306 ,0 1774 | 940 ,1 1775 | 2561 ,0 1776 | 2144 ,0 1777 | 2253 ,0 1778 | 962 ,1 1779 | 2132 ,0 1780 | 2592 ,0 1781 | 2431 ,0 1782 | 1476 ,1 1783 | 2155 ,0 1784 | 2843 ,1 1785 | 2655 ,0 1786 | 2746 ,0 1787 | 2605 ,0 1788 | 2847 ,1 1789 | 2404 ,0 1790 | 2654 ,0 1791 | 3061 ,0 1792 | 1954 ,1 1793 | 2578 ,1 1794 | 3184 ,0 1795 | 3664 ,0 1796 | 913 ,1 1797 | 274 ,1 1798 | 3510 ,0 1799 | 1315 ,1 1800 | 3675 ,0 1801 | 3580 ,0 1802 | 3056 ,0 1803 | 2816 ,0 1804 | 2876 ,0 1805 | 2598 ,0 1806 | 3236 ,0 1807 | 2693 ,0 1808 | 3150 ,0 1809 | 2954 ,0 1810 | 3438 ,0 1811 | 3242 ,0 1812 | 3062 ,0 1813 | 1570 ,1 1814 | 3477 ,0 1815 | 3245 ,0 1816 | 3349 ,0 1817 | 2951 ,0 1818 | 3038 ,0 1819 | 125 ,1 1820 | 2732 ,0 1821 | 2737 ,0 1822 | 3066 ,0 1823 | 3164 ,0 1824 | 3496 ,0 1825 | 3303 ,0 1826 | 3706 ,0 1827 | 152 ,1 1828 | 3706 ,0 1829 | 3567 ,0 1830 | 3293 ,0 1831 | 3308 ,0 1832 | 3672 ,0 1833 | 3064 ,0 1834 | 3401 ,0 1835 | 3469 ,0 1836 | 3720 ,0 1837 | 1955 ,1 1838 | 3597 ,0 1839 | 3538 ,0 1840 | 3728 ,0 1841 | 3123 ,1 1842 | 3467 ,0 1843 | 1125 ,1 1844 | 1708 ,1 1845 | 3526 ,0 1846 | 905 ,1 1847 | 1941 ,1 1848 | 3297 ,0 1849 | 3698 ,0 1850 | 3559 ,0 1851 | 1001 ,1 1852 | 4217 ,0 1853 | 3851 ,0 1854 | 2665 ,1 1855 | 897 ,1 1856 | 4130 ,0 1857 | 4188 ,0 1858 | 4060 ,0 1859 | 1808 ,1 1860 | 3089 ,1 1861 | 4042 ,0 1862 | 3948 ,0 1863 | 715 ,1 1864 | 802 ,1 1865 | 4120 ,0 1866 | 3520 ,1 1867 | 2724 ,1 1868 | 4157 ,0 1869 | 4377 ,0 1870 | 4134 ,0 1871 | 3079 ,1 1872 | 4150 ,0 1873 | 3884 ,0 1874 | 570 ,1 1875 | 1339 ,1 1876 | 3869 ,0 1877 | 4333 ,0 1878 | 4136 ,0 1879 | 4284 ,0 1880 | 3254 ,1 1881 | 4493 ,0 1882 | 4673 ,0 1883 | 1393 ,1 1884 | 658 ,1 1885 | 4442 ,0 1886 | 3646 ,0 1887 | 4206 ,0 1888 | 1579 ,1 1889 | 4374 ,1 1890 | 4415 ,0 1891 | 4231 ,0 1892 | 4515 ,0 1893 | 4662 ,0 1894 | 4414 ,1 1895 | 4479 ,0 1896 | 1906 ,1 1897 | 4223 ,0 1898 | 4554 ,0 1899 | 4726 ,0 1900 | 1484 ,1 1901 | 3751 ,1 1902 | 146 ,0 1903 | 4403 ,1 1904 | 4357 ,0 1905 | 4308 ,1 1906 | 2624 ,1 1907 | 1400 ,1 1908 | 2265 ,1 1909 | 3213 ,1 1910 | 4639 ,0 1911 | 502 ,1 1912 | 2280 ,1 1913 | 2928 ,1 1914 | 4903 ,0 1915 | 4981 ,0 1916 | 1672 ,1 1917 | 1378 ,1 1918 | 1575 ,1 1919 | 4960 ,0 1920 | 4608 ,0 1921 | 2712 ,1 1922 | 4963 ,0 1923 | 4956 ,0 1924 | 4548 ,0 1925 | 1650 ,1 1926 | 1601 ,1 1927 | 1896 ,1 1928 | 5212 ,0 1929 | 3348 ,1 1930 | 5478 ,0 1931 | 4883 ,0 1932 | 5328 ,0 1933 | 4736 ,0 1934 | 5190 ,0 1935 | 5124 ,0 1936 | 4804 ,0 1937 | 718 ,1 1938 | 698 ,1 1939 | 5133 ,0 1940 | 4464 ,1 1941 | 5525 ,0 1942 | 1760 ,1 1943 | 1530 ,1 1944 | 647 ,1 1945 | 1071 ,1 1946 | 1799 ,1 1947 | 5766 ,0 1948 | 1041 ,1 1949 | 452 ,1 1950 | 935 ,0 1951 | 3121 ,1 1952 | 4949 ,1 1953 | 1289 ,1 1954 | 2045 ,1 1955 | 5831 ,0 1956 | 5704 ,0 1957 | 635 ,1 1958 | 5255 ,0 1959 | 6246 ,0 1960 | 1332 ,1 1961 | 5560 ,0 1962 | 5611 ,0 1963 | 5866 ,0 1964 | 639 ,1 1965 | 5866 ,0 1966 | 1486 ,1 1967 | 866 ,1 1968 | 5998 ,0 1969 | 3270 ,1 1970 | 4759 ,1 1971 | 2909 ,1 1972 | 812 ,1 1973 | 3800 ,1 1974 | 205 ,1 1975 | 2354 ,1 1976 | 5977 ,0 1977 | 2482 ,1 1978 | 5906 ,0 1979 | 1342 ,1 1980 | 5279 ,1 1981 | 2587 ,1 1982 | 5893 ,0 1983 | --------------------------------------------------------------------------------