├── DataProcessing ├── ClusterPre.py ├── EdgeMask.py ├── PairDistance.py ├── PathClass.py ├── SimCon.py ├── SimReg.py ├── create_dataset.py ├── degree_code.py └── utils.py ├── DownStream ├── cold_start.py ├── create_sample.py ├── utils.py └── warm_start.py ├── MSSL └── models │ ├── get_feature.py │ ├── layers.py │ ├── models_class.py │ ├── models_reg.py │ ├── models_sim.py │ ├── train_class.py │ ├── train_reg.py │ ├── train_sim.py │ └── utils.py ├── PES_cold.sh ├── PES_warm.sh ├── Program Operation Guideline.pdf ├── README.md └── data ├── BioHNsdata ├── BioHNs.txt ├── BioHNs_clean.txt ├── BioHNs_code.txt ├── BioHNsdata_with order │ ├── BioHNs_code_with_order.txt │ ├── BioHNs_with_order.txt │ ├── disease_sim_with_order.txt │ ├── drug_sim_with_order.txt │ └── protein_sim_with_order.txt ├── disease.txt ├── disease_sim.txt ├── drug.txt ├── drug_sim.txt ├── protein.txt └── protein_sim.txt ├── DownStreamdata ├── DDInet.txt └── DTInet.txt └── SSLdata ├── ClusterPre.txt ├── EdgeMask.txt ├── PairDistance.txt ├── PathClass.txt ├── SSL_data_description.txt ├── SimCon.txt └── SimReg.txt /DataProcessing/ClusterPre.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import random 3 | import datetime 4 | import networkx as nx 5 | import math 6 | import argparse 7 | from utils import Biograph 8 | 9 | path = '../data/' 10 | parser = argparse.ArgumentParser() 11 | parser.add_argument('--downstream', type=str, default='DDI', help='The name of downstream') 12 | parser.add_argument('--scenario', type=str, default='warm', help='The test scenario of downstream') 13 | parser.add_argument('--dataclean', type=int, default=0, help='Whether to remove the test data from SSL dataset.') # 14 | 15 | args = parser.parse_args() 16 | 17 | 18 | 19 | 20 | def clustering_coefficient(): 21 | G, BioHNs =Biograph(args.downstream, args.scenario, args.dataclean) 22 | node_cent=open(path + "SSLdata/ClusterPre.txt",'w') 23 | for i in range(len(G.nodes)): 24 | node_cent.write(str(i) + " " + str(nx.clustering(G,i)) + "\n") 25 | 26 | 27 | 28 | 29 | if __name__ == "__main__": 30 | clustering_coefficient() 31 | 32 | # Biograph() 33 | 34 | 35 | -------------------------------------------------------------------------------- /DataProcessing/EdgeMask.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import random 3 | import datetime 4 | import networkx as nx 5 | import os 6 | from utils import Biograph 7 | import argparse 8 | 9 | # 721 (0,721) 10 | # 1894 (721, 2615) 11 | # 431 (2615, 3046) 12 | 13 | path = '../data/' 14 | parser = argparse.ArgumentParser() 15 | parser.add_argument('--downstream', type=str, default='DDI', help='The name of downstream') 16 | parser.add_argument('--scenario', type=str, default='warm', help='The test scenario of downstream') 17 | parser.add_argument('--dataclean', type=int, default=0, help='Whether to remove the test data from SSL dataset.') # 18 | 19 | args = parser.parse_args() 20 | 21 | def mask_edge(): 22 | G, BioHNs =Biograph(args.downstream, args.scenario, args.dataclean) 23 | edge, false=[], [] 24 | c1, c2, c3, c4 = 0, 0, 0, 0 25 | for eg in G.edges(): 26 | eglist=list(eg) 27 | eglist.sort() 28 | neg=list(G.neighbors(eglist[0])) 29 | if eglist[0]<721: 30 | if eglist[1]<721: 31 | for i in range(1): 32 | # if np.random.rand()<0.5: # a certain number of drug-drug interaction 33 | edge.append([eglist[0], eglist[1], 1]) 34 | rand=np.random.randint(721) 35 | while(rand in neg): 36 | rand=np.random.randint(721) 37 | false.append([eglist[0], rand, 4]) 38 | c1 += 1 39 | elif eglist[1]<2615: 40 | for i in range(1): # a certain number of drug-protein interaction 41 | edge.append([eglist[0], eglist[1], 2]) 42 | rand=np.random.randint(721, 2615) 43 | while(rand in neg): 44 | rand=np.random.randint(721, 2615) 45 | false.append([eglist[0], rand, 4]) 46 | c2 += 1 47 | else: 48 | for i in range(1): # a certain number of drug-disease interaction 49 | edge.append([eglist[0], eglist[1], 3]) 50 | rand=np.random.randint(2615, 3046) 51 | while(rand in neg): 52 | rand=np.random.randint(2615, 3046) 53 | false.append([eglist[0], rand, 4]) 54 | c3 += 1 55 | else: 56 | if eglist[1]<2615: 57 | for i in range(1): # a certain number of protein-protein interaction 58 | edge.append([eglist[0], eglist[1], 1]) 59 | rand=np.random.randint(721, 2615) 60 | while(rand in neg): 61 | rand=np.random.randint(721, 2615) 62 | false.append([eglist[0], rand, 4]) 63 | c1 += 1 64 | else: 65 | for i in range(1): # a certain number of protein-disaese interaction 66 | edge.append([eglist[0], eglist[1], 0]) 67 | rand=np.random.randint(2615, 3046) 68 | while(rand in neg): 69 | rand=np.random.randint(2615, 3046) 70 | false.append([eglist[0], rand, 4]) 71 | c4 += 1 72 | temp = np.array(edge) 73 | # temp = np.vstack((np.array(edge), np.array(false))) 74 | np.savetxt(path + "/SSLdata/EdgeMask.txt", temp, fmt="%d") 75 | print('the number of each class:', c1, c2, c3, c4, c1+c2+c3+c4) 76 | 77 | if __name__ == "__main__": 78 | mask_edge() 79 | 80 | -------------------------------------------------------------------------------- /DataProcessing/PairDistance.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import random 3 | import datetime 4 | import networkx as nx 5 | from utils import Biograph 6 | import argparse 7 | 8 | path = '../data/' 9 | parser = argparse.ArgumentParser() 10 | parser.add_argument('--downstream', type=str, default='DDI', help='The name of downstream') 11 | parser.add_argument('--scenario', type=str, default='warm', help='The test scenario of downstream') 12 | parser.add_argument('--dataclean', type=int, default=0, help='Whether to remove the test data from SSL dataset.') # 13 | 14 | args = parser.parse_args() 15 | 16 | 17 | def global_disdance(): 18 | G, BioHNs =Biograph(args.downstream, args.scenario, args.dataclean) 19 | dis_sample=[] 20 | one_hop, two_hop, three_hop, four_hop = 0, 0, 0, 0 # 221140 3359298 5042652 597242 21 | prob = 0.06 # Sampling probability 22 | for i in range(len(G.nodes)): 23 | for u in range(len(G.nodes)): 24 | if nx.has_path(G, i, u): 25 | length=nx.shortest_path_length(G, i, u) 26 | if length==1 and np.random.rand(1)< prob: 27 | dis_sample.append([i,u, length]) 28 | one_hop+=1 29 | elif length==2 and np.random.rand(1)< prob * 0.0658: 30 | dis_sample.append([i,u, length]) 31 | two_hop+=1 32 | elif length==3 and np.random.rand(1)< prob * 0.0439: 33 | dis_sample.append([i,u, length]) 34 | three_hop+=1 35 | elif length>3 and np.random.rand(1)< prob*0.3703: 36 | dis_sample.append([i,u, 4]) 37 | four_hop+=1 38 | 39 | np.savetxt(path + "SSLdata/PairDistance.txt", np.array(dis_sample), fmt="%d") 40 | print("one_hop, two_hop, three_hop, four_hop", one_hop, two_hop, three_hop, four_hop, one_hop+two_hop+three_hop+four_hop) 41 | 42 | 43 | if __name__ == "__main__": 44 | global_disdance() 45 | 46 | 47 | -------------------------------------------------------------------------------- /DataProcessing/PathClass.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import random 3 | import datetime 4 | import networkx as nx 5 | import os 6 | import shutil 7 | from utils import Biograph 8 | import argparse 9 | 10 | # 721 (0,721) 11 | # 1894 (721, 2615) 12 | # 431 (2615, 3046) 13 | 14 | path = '../data/' 15 | parser = argparse.ArgumentParser() 16 | parser.add_argument('--downstream', type=str, default='DDI', help='The name of downstream') 17 | parser.add_argument('--scenario', type=str, default='warm', help='The test scenario of downstream') 18 | parser.add_argument('--dataclean', type=int, default=0, help='Whether to remove the test data from SSL dataset.') # 19 | 20 | args = parser.parse_args() 21 | 22 | 23 | def cur_neighbor(G, cur_node, start, end): 24 | neighbor = list(G.neighbors(cur_node)) 25 | neighbors = [] 26 | for tem in neighbor: 27 | if tem>=start and tem720 and i< 2615: 26 | matsim=pro_sim 27 | numsize=pro_num 28 | k=drug_num 29 | else: 30 | matsim=disease_sim 31 | numsize=disease_num 32 | k=drug_num+pro_num 33 | 34 | for j in range (pairnum): # a certain number of three tuples 35 | rand1=np.random.randint(numsize) 36 | rand2=np.random.randint(numsize) 37 | diff= matsim[i-k,rand1]-matsim[i-k,rand2] 38 | while diff<0: 39 | rand1=np.random.randint(numsize) 40 | rand2=np.random.randint(numsize) 41 | diff=matsim[i-k,rand1]-matsim[i-k,rand2] 42 | sample_list.append([i,rand1+k,rand2+k,diff]) 43 | count+=1 44 | simsample=open(path+"SSLdata/SimCon.txt", 'w') 45 | for i in range(len(sample_list)): 46 | simsample.write(str(sample_list[i][0])+ " "+ str(sample_list[i][1])+ " "+ str(sample_list[i][2])+" "+ str(sample_list[i][3]) + "\n") 47 | print("sample number:",count) 48 | 49 | 50 | 51 | if __name__ == "__main__": 52 | sim_contrast(pairnum=15) 53 | 54 | -------------------------------------------------------------------------------- /DataProcessing/SimReg.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import random 3 | import datetime 4 | import networkx as nx 5 | import math 6 | 7 | path = '../data/' 8 | 9 | drug_sim = np.loadtxt(path+"BioHNsdata/drug_sim.txt") 10 | pro_sim = np.loadtxt(path+"BioHNsdata/protein_sim.txt") 11 | disease_sim = np.loadtxt(path+"BioHNsdata/disease_sim.txt") 12 | drug_num, pro_num, disease_num = drug_sim.shape[0], pro_sim.shape[0], disease_sim.shape[0] 13 | 14 | def sim_regression(dgpair=70, propair=180, dispair=40): # a certain number of node pairs 15 | simsample=open(path+"SSLdata/SimReg.txt", 'w') 16 | kdg, kpro, kdis = 0, 0, 0 17 | for i in range(drug_num): 18 | dg=np.random.choice(drug_num, dgpair, replace=False) # a certain number of drug pairs 19 | for j in dg: 20 | simsample.write(str(i)+ " "+ str(j)+ " "+ str(drug_sim[i][j])+ "\n") 21 | kdg += 1 22 | 23 | for i in range(pro_num): 24 | pro=np.random.choice(pro_num, propair, replace=False) # a certain number of protein pairs 25 | for j in pro: 26 | simsample.write(str(i+drug_num)+ " "+ str(j+drug_num)+ " "+ str(pro_sim[i][j])+ "\n") 27 | kpro += 1 28 | 29 | for i in range(disease_num): 30 | dis=np.random.choice(disease_num, dispair, replace=False) # a certain number of protein pairs 31 | for j in dis: 32 | simsample.write(str(i+drug_num+pro_num)+ " "+ str(j+drug_num+pro_num)+ " "+ str(disease_sim[i][j])+ "\n") 33 | kdis += 1 34 | print("similirty of drug, protein and disease: ", kdg, kpro, kdis, kdg+kpro+kdis) 35 | 36 | if __name__ == "__main__": 37 | sim_regression(dgpair=18, propair=48, dispair=12) # a certain number of node pairs 38 | 39 | -------------------------------------------------------------------------------- /DataProcessing/create_dataset.py: -------------------------------------------------------------------------------- 1 | import random 2 | import argparse 3 | parser = argparse.ArgumentParser() 4 | parser.add_argument('--input_file', type=str, default=False, help='the path of input file.') 5 | 6 | args = parser.parse_args() 7 | 8 | with open(args.input_file, 'r') as f: 9 | lines = f.readlines() 10 | 11 | random.shuffle(lines) 12 | num_sample = len(lines) 13 | num_test = int(num_sample*0.01) 14 | test = lines[:num_test] 15 | train = lines[num_test:] 16 | file_prefix = args.input_file[:args.input_file.rfind('.')] 17 | train_file = file_prefix + '_train.txt' 18 | test_file = file_prefix + '_test.txt' 19 | 20 | print("input file "+str(args.input_file)) 21 | with open(train_file, 'w') as f: 22 | f.writelines(train) 23 | print("write training dataset successfully, the number of samples: "+ str(len(train))) 24 | with open(test_file, 'w') as f: 25 | f.writelines(test) 26 | print("write testing dataset successfully, the number of samples: "+ str(len(test))) 27 | -------------------------------------------------------------------------------- /DataProcessing/degree_code.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import random 3 | import datetime 4 | import networkx as nx 5 | import os 6 | from utils import Biograph 7 | import argparse 8 | 9 | # 721 (0,721) 10 | # 1894 (721, 2615) 11 | # 431 (2615, 3046) 12 | 13 | path = '../data/' 14 | parser = argparse.ArgumentParser() 15 | parser.add_argument('--downstream', type=str, default='DDI', help='The name of downstream') 16 | parser.add_argument('--scenario', type=str, default='warm', help='The prediction scenario (i.e., warm or cold start)') 17 | parser.add_argument('--dataclean', type=int, default=0, help='Whether to remove the test data from SSL dataset.') # 18 | 19 | args = parser.parse_args() 20 | 21 | 22 | def degree_code(): 23 | G, BioHNs =Biograph(args.downstream, args.scenario, args.dataclean) 24 | drug_num, protein_num, disease_num = 721, 1894, 431 25 | 26 | drug, protein, disease = [], [], [] 27 | nodes = sorted(list(G.nodes())) 28 | 29 | for i in nodes: 30 | neigh = list(G.neighbors(i)) 31 | dg, pro, dis = 0, 0, 0 32 | for nod in neigh: 33 | if nod < drug_num: 34 | dg += 1 35 | elif nod >= drug_num and nod < drug_num + protein_num: 36 | pro += 1 37 | else: 38 | dis += 1 39 | if dg+pro+dis==0: 40 | if np.random.rand()<0.333: 41 | dg = 1 42 | elif np.random.rand()>=0.333 and np.random.rand()<0.666: 43 | pro = 1 44 | else: 45 | dis =1 46 | drug.append(dg) 47 | protein.append(pro) 48 | disease.append(dis) 49 | 50 | dg_col, pro_col, dis_col = len(list(set(drug))), len(list(set(protein))), len(list(set(disease))) 51 | dg_code, pro_code, dis_code = np.zeros((drug_num, dg_col + pro_col + dis_col)), np.zeros( 52 | (protein_num, dg_col + pro_col + dis_col)), np.zeros((disease_num, dg_col + pro_col + dis_col)) 53 | dglist, prolist, dislist = sorted(list(set(drug))), sorted(list(set(protein))), sorted(list(set(disease))) 54 | 55 | for i in range(drug_num): 56 | dg_code[i, dglist.index(drug[i])] = 1 57 | dg_code[i, (prolist.index(protein[i])) + dg_col] = 1 58 | dg_code[i, (dislist.index(disease[i])) + dg_col + pro_col] = 1 59 | 60 | for i in range(protein_num): 61 | pro_code[i, dglist.index(drug[i + drug_num])] = 1 62 | pro_code[i, (prolist.index(protein[i + drug_num])) + dg_col] = 1 63 | pro_code[i, (dislist.index(disease[i + drug_num])) + dg_col + pro_col] = 1 64 | 65 | for i in range(disease_num): 66 | dis_code[i, dglist.index(drug[i + drug_num + protein_num])] = 1 67 | dis_code[i, (prolist.index(protein[i + drug_num + protein_num])) + dg_col] = 1 68 | dis_code[i, (dislist.index(disease[i + drug_num + protein_num])) + dg_col + pro_col] = 1 69 | 70 | BioHNs_code = np.vstack((dg_code, pro_code, dis_code)) 71 | np.savetxt("../data/BioHNsdata/BioHNs_code.txt", BioHNs_code, fmt="%d") 72 | np.savetxt("../data/BioHNsdata/BioHNs_clean.txt", BioHNs, fmt="%d") 73 | 74 | print("dg_col, pro_col, dis_col", dg_col, pro_col, dis_col, BioHNs_code.shape) 75 | print("The number", dglist.index(drug[i])) 76 | 77 | 78 | if __name__ == "__main__": 79 | degree_code() 80 | 81 | -------------------------------------------------------------------------------- /DataProcessing/utils.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import random 3 | import datetime 4 | import networkx as nx 5 | import math 6 | 7 | path = '../data/' 8 | 9 | def Biograph(downstream, scenario, dataclean): 10 | BioHNs = np.loadtxt(path + "BioHNsdata/BioHNs.txt", dtype=int) 11 | G=nx.Graph() 12 | for i in range(BioHNs.shape[0]): 13 | for j in range(BioHNs.shape[0]): 14 | if BioHNs[i,j]==1: 15 | G.add_edge(i,j) 16 | node_num, edge_num = len(G.nodes), len(G.edges) 17 | print('number of nodes and edges:', node_num, edge_num) 18 | 19 | if dataclean: 20 | test_index = np.loadtxt(path + "DownStreamdata/" + downstream + "net_" + scenario + "_test.txt", dtype=int) 21 | test_index = np.array(list(set([tuple(t) for t in test_index]))) 22 | for edg in test_index: 23 | if edg[2] == 1: 24 | G.remove_edge(edg[0], edg[1]) 25 | BioHNs[edg[0], edg[1]] = 0 26 | BioHNs[edg[1], edg[0]] = 0 27 | node_num, edge_num = len(G.nodes), len(G.edges) 28 | print('number of clean nodes and edges:', node_num, edge_num) 29 | return G, BioHNs 30 | -------------------------------------------------------------------------------- /DownStream/cold_start.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import time 4 | import torch.nn.functional as F 5 | import torch.optim as optim 6 | import argparse 7 | import numpy as np 8 | import torch.backends.cudnn as cudnn 9 | from sklearn.metrics import roc_auc_score 10 | from sklearn.metrics import average_precision_score 11 | import os 12 | import copy 13 | 14 | from utils import * 15 | 16 | parser = argparse.ArgumentParser() 17 | parser.add_argument('--no-cuda', action='store_true', default=False, help='Disables CUDA training.') 18 | parser.add_argument('--seed', type=int, default=72, help='Random seed.') 19 | parser.add_argument('--epochs', type=int, default=20, help='Number of epochs to train.') 20 | parser.add_argument('--lr', type=float, default=0.005, help='Initial learning rate.') 21 | parser.add_argument('--weight_decay', type=float, default=5e-4, help='Weight decay (L2 loss on parameters).') 22 | parser.add_argument('--input_file', type=str, default='./', help='the path of input data.') 23 | parser.add_argument('--feature', type=str, default='./', help='the path of node representations.') 24 | parser.add_argument('--batch_size', type=int, default=128, help='the value of batch size.') 25 | parser.add_argument('--ratio', type=float, default=0.05, help='the ratio of test dataset') 26 | parser.add_argument('--save', type=str, default='./', help='the path to save model') 27 | 28 | args = parser.parse_args() 29 | args.cuda = not args.no_cuda and torch.cuda.is_available() 30 | 31 | if not os.path.exists(args.save): 32 | os.makedirs(args.save) 33 | 34 | 35 | with open(args.input_file, "r") as f: 36 | input_data = f.readlines() 37 | feature = torch.load(args.feature) 38 | input_feature = feature.shape[1]*2 39 | feature = feature.tolist() 40 | print("load input successfully!") 41 | 42 | def test(): 43 | predicts, labels = [], [] 44 | for X, y in test_iter: 45 | X = X.cuda() 46 | y_hat = model(X) 47 | predicts.extend(y_hat.view(-1).cpu().tolist()) 48 | labels.extend(y.cpu().tolist()) 49 | predicts, labels = np.array(predicts), np.array(labels) 50 | auc = roc_auc_score(labels, predicts) 51 | aupr = average_precision_score(labels, predicts) 52 | return round(auc, 3), round(aupr, 3) 53 | 54 | 55 | def train(): 56 | for X, y in train_iter: 57 | X = X.cuda() 58 | y = y.view(-1, 1).float().cuda() 59 | optimizer.zero_grad() 60 | y_hat = model(X) 61 | l = loss(y_hat, y) 62 | l.backward() 63 | optimizer.step() 64 | 65 | print("Ratio of the test dataset: "+ str(args.ratio)) 66 | print("learning rate: "+ str(args.lr)) 67 | all_auc, all_aupr = [], [] 68 | t_total = time.time() 69 | frequency = int(args.epochs / 5) 70 | 71 | model = nn.Sequential( 72 | # nn.Linear(input_feature, 1), 73 | nn.Linear(input_feature, 64), 74 | nn.ReLU(), 75 | nn.Linear(64, 1), 76 | nn.Sigmoid() 77 | ) 78 | optimizer = optim.Adam(model.parameters(), 79 | lr=args.lr, 80 | weight_decay=args.weight_decay) 81 | loss = nn.BCELoss() 82 | model.cuda() 83 | best_auc, best_aupr = 0, 0 84 | filename = args.save.split('/') 85 | 86 | train_iter, test_iter = load_cold_data(input_data, feature, args.batch_size, filename[0]) 87 | for i in range(args.epochs): 88 | train() 89 | auc, aupr = test() 90 | print("epoch: "+str(i)+" auc: "+str(auc)+" aupr: "+str(aupr)) 91 | best_model = copy.deepcopy(model) 92 | best_auc = auc 93 | best_aupr = aupr 94 | 95 | torch.save(best_model, args.save+'/best_model.pt') 96 | print("AUC: ") 97 | print(best_auc) 98 | print("AUPR: ") 99 | print(best_aupr) 100 | print("Total time elapsed: {:.4f}s".format(time.time() - t_total)) 101 | 102 | -------------------------------------------------------------------------------- /DownStream/create_sample.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import random 3 | import numpy as np 4 | 5 | parser = argparse.ArgumentParser() 6 | parser.add_argument('--input', type=str, default='./', help='path of the input dataset.') 7 | parser.add_argument('--offset', type=int, default=0, help='the choice of downstream tasks. 0 is DDI, 1 is DTI') 8 | parser.add_argument('--seed', type=int, default=18, help='Random seed.') 9 | parser.add_argument('--warm_ratio', type=float, default=0.1, help='the ratio of test dataset in warm start prediction') 10 | parser.add_argument('--cold_ratio', type=float, default=0.05, help='the ratio of test dataset in warm start prediction') 11 | 12 | args = parser.parse_args() 13 | 14 | with open(args.input, 'r') as f: 15 | lines = f.readlines() 16 | num_postive, num_negtive = 0, 0 17 | for line in lines: 18 | line = line.split(' ') 19 | for v in line: 20 | if int(v) == 0: 21 | num_negtive += 1 22 | else : 23 | num_postive += 1 24 | 25 | ratio = num_postive / num_negtive 26 | num_postive, num_negtive = 0, 0 27 | random.seed(args.seed) 28 | num_sample = [] 29 | num_row = len(lines) 30 | num_drug, num_pro = 721, 1894 31 | 32 | 33 | if args.offset: 34 | offset = num_drug 35 | else: 36 | offset = 0 37 | 38 | for i in range(num_row): 39 | line = lines[i].split(' ') 40 | # num_column = len(line) 41 | if args.offset: 42 | num = len(line) 43 | else: 44 | num = i 45 | 46 | for j in range(num): 47 | v = int(line[j]) 48 | if v == 1: 49 | num_postive += 1 50 | num_sample.append(str(i) + ' ' + str(j+offset) + ' ' + str(v)) 51 | else: 52 | if random.random() 0, e, zero_vec) 32 | attention = F.softmax(attention, dim=1) 33 | attention = F.dropout(attention, self.dropout, training=self.training) 34 | h_prime = torch.matmul(attention, Wh) 35 | 36 | if self.concat: 37 | return F.elu(h_prime) 38 | else: 39 | return h_prime 40 | 41 | def _prepare_attentional_mechanism_input(self, Wh): 42 | # Wh.shape (N, out_feature) 43 | # self.a.shape (2 * out_feature, 1) 44 | # Wh1&2.shape (N, 1) 45 | # e.shape (N, N) 46 | Wh1 = torch.matmul(Wh, self.a[:self.out_features, :]) 47 | Wh2 = torch.matmul(Wh, self.a[self.out_features:, :]) 48 | # broadcast add 49 | e = Wh1 + Wh2.T 50 | return self.leakyrelu(e) 51 | 52 | def __repr__(self): 53 | return self.__class__.__name__ + ' (' + str(self.in_features) + ' -> ' + str(self.out_features) + ')' 54 | 55 | -------------------------------------------------------------------------------- /MSSL/models/models_class.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | from layers import GraphAttentionLayer 5 | 6 | class MGTA(nn.Module): 7 | def __init__(self, nfeat, nhid, nclass, dropout, alpha, nheads, length, ntask): 8 | """Dense version of GAT.""" 9 | super(MGTA, self).__init__() 10 | self.dropout = dropout 11 | 12 | self.private = [GraphAttentionLayer(nfeat, nhid, dropout=dropout, alpha=alpha, concat=True) for _ in range(nheads)] 13 | for i, attention in enumerate(self.private): 14 | self.add_module('attention_{}'.format(i), attention) 15 | 16 | self.share = [GraphAttentionLayer(nfeat, nhid, dropout=dropout, alpha=alpha, concat=True) for _ in range(nheads)] 17 | for i, attention in enumerate(self.share): 18 | self.add_module('attention2_{}'.format(i), attention) 19 | self.share_classifier = nn.Linear(nhid*nheads, ntask) 20 | 21 | self.classifier = nn.Linear(nhid*nheads*length*2, nclass) 22 | 23 | 24 | def forward(self, features, adj, path, task): 25 | x = F.dropout(features, self.dropout, training=self.training) 26 | private_x = torch.cat([att(x, adj) for att in self.private], dim=1) 27 | share_x = torch.cat([att(x, adj) for att in self.share], dim=1) 28 | private_x = F.dropout(private_x, self.dropout, training=self.training) 29 | share_x = F.dropout(share_x, self.dropout, training=self.training) 30 | 31 | share_feature = torch.cat([share_x[node, :].view(1, -1) for p in path for node in p], dim=0) 32 | node_task = self.share_classifier(share_feature) 33 | node_task = F.softmax(torch.sigmoid(node_task), dim=1) 34 | adv_loss = F.cross_entropy(node_task, task) 35 | 36 | private_feature = torch.cat([private_x[node, :].view(1, -1) for p in path for node in p], dim=0) 37 | diff = share_feature.t().matmul(private_feature) 38 | diff_loss = (diff**2).sum() 39 | 40 | x = torch.cat([share_feature, private_feature], dim=1).view(-1, share_feature.shape[1]*path.shape[1]*2) 41 | x = self.classifier(x) 42 | return F.softmax(x, dim=1), adv_loss, diff_loss 43 | -------------------------------------------------------------------------------- /MSSL/models/models_reg.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | from layers import GraphAttentionLayer 5 | 6 | class MGTA(nn.Module): 7 | def __init__(self, nfeat, nhid, nclass, dropout, alpha, nheads, length, ntask): 8 | """Dense version of GAT.""" 9 | super(MGTA, self).__init__() 10 | self.dropout = dropout 11 | 12 | self.private = [GraphAttentionLayer(nfeat, nhid, dropout=dropout, alpha=alpha, concat=True) for _ in range(nheads)] 13 | for i, attention in enumerate(self.private): 14 | self.add_module('attention_{}'.format(i), attention) 15 | 16 | self.share = [GraphAttentionLayer(nfeat, nhid, dropout=dropout, alpha=alpha, concat=True) for _ in range(nheads)] 17 | for i, attention in enumerate(self.share): 18 | self.add_module('attention2_{}'.format(i), attention) 19 | self.share_classifier = nn.Linear(nhid*nheads, ntask) 20 | 21 | self.classifier = nn.Linear(nhid*nheads*length*2, 1) 22 | 23 | 24 | def forward(self, features, adj, path, task): 25 | x = F.dropout(features, self.dropout, training=self.training) 26 | private_x = torch.cat([att(x, adj) for att in self.private], dim=1) 27 | share_x = torch.cat([att(x, adj) for att in self.share], dim=1) 28 | private_x = F.dropout(private_x, self.dropout, training=self.training) 29 | share_x = F.dropout(share_x, self.dropout, training=self.training) 30 | 31 | share_feature = torch.cat([share_x[node, :].view(1, -1) for p in path for node in p], dim=0) 32 | node_task = self.share_classifier(share_feature) 33 | node_task = F.softmax(torch.sigmoid(node_task), dim=1) 34 | adv_loss = F.cross_entropy(node_task, task) 35 | 36 | private_feature = torch.cat([private_x[node, :].view(1, -1) for p in path for node in p], dim=0) 37 | diff = share_feature.t().matmul(private_feature) 38 | diff_loss = (diff**2).sum() 39 | 40 | x = torch.cat([share_feature, private_feature], dim=1).view(-1, share_feature.shape[1]*path.shape[1]*2) 41 | x = F.sigmoid(self.classifier(x)) 42 | 43 | return x, adv_loss, diff_loss 44 | -------------------------------------------------------------------------------- /MSSL/models/models_sim.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | import torch.nn.functional as F 4 | from layers import GraphAttentionLayer 5 | 6 | class MGTA(nn.Module): 7 | def __init__(self, nfeat, nhid, nclass, dropout, alpha, nheads, ntask): 8 | """Dense version of GAT.""" 9 | super(MGTA, self).__init__() 10 | self.dropout = dropout 11 | 12 | self.private = [GraphAttentionLayer(nfeat, nhid, dropout=dropout, alpha=alpha, concat=True) for _ in range(nheads)] 13 | for i, attention in enumerate(self.private): 14 | self.add_module('attention_{}'.format(i), attention) 15 | 16 | self.share = [GraphAttentionLayer(nfeat, nhid, dropout=dropout, alpha=alpha, concat=True) for _ in range(nheads)] 17 | for i, attention in enumerate(self.share): 18 | self.add_module('attention2_{}'.format(i), attention) 19 | self.share_classifier = nn.Linear(nhid*nheads, ntask) 20 | 21 | def norm(self, x): 22 | return torch.sqrt((x ** 2).sum(dim=1, keepdim=True)+1e-5) 23 | 24 | def cosine(self, x1, x2): 25 | norm_x1, norm_x2 = self.norm(x1), self.norm(x2) 26 | inner_product = (x1 * x2).sum(dim=1, keepdim=True) 27 | product = norm_x1*norm_x2 28 | result = inner_product / product 29 | return result 30 | 31 | 32 | def forward(self, features, adj, path, task): 33 | x = F.dropout(features, self.dropout, training=self.training) 34 | private_x = torch.cat([att(x, adj) for att in self.private], dim=1) 35 | share_x = torch.cat([att(x, adj) for att in self.share], dim=1) 36 | private_x = F.dropout(private_x, self.dropout, training=self.training) 37 | share_x = F.dropout(share_x, self.dropout, training=self.training) 38 | 39 | share_feature = torch.cat([share_x[node, :].view(1, -1) for p in path for node in p], dim=0) 40 | node_task = self.share_classifier(share_feature) 41 | node_task = F.softmax(torch.sigmoid(node_task), dim=1) 42 | adv_loss = F.cross_entropy(node_task, task) 43 | 44 | private_feature = torch.cat([private_x[node, :].view(1, -1) for p in path for node in p], dim=0) 45 | diff = share_feature.t().matmul(private_feature) 46 | diff_loss = (diff**2).sum() 47 | 48 | x = torch.cat([share_feature, private_feature], dim=1).view(-1, 3, share_feature.shape[1]*2) 49 | 50 | x1, x2, x3 = x[:, 0, :], x[:, 1, :], x[:, 2, :] 51 | 52 | return self.cosine(x1, x2)-self.cosine(x1, x3), adv_loss, diff_loss 53 | 54 | -------------------------------------------------------------------------------- /MSSL/models/train_class.py: -------------------------------------------------------------------------------- 1 | from __future__ import division 2 | from __future__ import print_function 3 | 4 | import os 5 | import glob 6 | import time 7 | import random 8 | import argparse 9 | import numpy as np 10 | import torch 11 | import torch.nn as nn 12 | import torch.nn.functional as F 13 | import torch.optim as optim 14 | import copy 15 | from torch.autograd import Variable 16 | 17 | from utils import * 18 | from models_class import MGTA 19 | 20 | # Training settings 21 | parser = argparse.ArgumentParser() 22 | parser.add_argument('--no-cuda', action='store_true', default=False, help='Disables CUDA training.') 23 | parser.add_argument('--seed', type=int, default=72, help='the random seed.') 24 | parser.add_argument('--epochs', type=int, default=20, help='the number of epochs to train.') 25 | parser.add_argument('--lr', type=float, default=0.005, help='the initial learning rate.') 26 | parser.add_argument('--weight_decay', type=float, default=5e-4, help='Weight decay (L2 loss on parameters).') 27 | parser.add_argument('--hidden', type=int, default=8, help='the number of hidden units.') 28 | parser.add_argument('--nb_heads', type=int, default=8, help='the number of head attentions.') 29 | parser.add_argument('--dropout', type=float, default=0.6, help='the dropout rate (1 - keep probability).') 30 | parser.add_argument('--alpha', type=float, default=0.2, help='alpha for the leaky_relu.') 31 | parser.add_argument('--save', type=str, default='./', help='the path for saving multi-task SSL model.') 32 | parser.add_argument('--batch_size', type=int, default=32, help='the value of batch size.') 33 | parser.add_argument('--train_file', type=str, default='./', help='the path of training dataset for SSL.') 34 | parser.add_argument('--test_file', type=str, default='./', help='the path of testing dataset for SSL.') 35 | parser.add_argument('--mode', type=int, default=0, help='the type of label, 0 is int, 1 is float.') 36 | parser.add_argument('--nclass', type=int, default=1, help='the number of hidden units in output layer.') 37 | parser.add_argument('--length', type=int, default=2, help='the length of each sample.') 38 | parser.add_argument('--sub', type=int, default=0, help='the parameter is set as 1 in only PairDistance task') 39 | parser.add_argument('--ntask', type=int, default=2, help='the number of tasks in combinations.') 40 | parser.add_argument('--task', type=int, default=0, help='the identifier of current task.') 41 | parser.add_argument('--share', type=str, default='', help='the path of share model.') 42 | parser.add_argument('--refine', type=str, default='', help='the path of private model.') 43 | parser.add_argument('--time', type=int, default=1, help='the identifier of current epoch.') 44 | 45 | 46 | args = parser.parse_args() 47 | args.cuda = not args.no_cuda and torch.cuda.is_available() 48 | 49 | if not os.path.exists(args.save): 50 | os.makedirs(args.save) 51 | 52 | # Load data 53 | features, adj = load_graph() 54 | print("load graph successfully!") 55 | train_iter = load_dataset(args.train_file, args.batch_size, args.mode, sub=args.sub) 56 | test_iter = load_dataset(args.test_file, args.batch_size, args.mode, shuffle=False, sub=args.sub) 57 | print("load dataset successfully!") 58 | # Model and optimizer 59 | if args.refine == '': 60 | model = MGTA(nfeat=features.shape[1], 61 | nhid=args.hidden, 62 | nclass=args.nclass, 63 | dropout=args.dropout, 64 | nheads=args.nb_heads, 65 | alpha=args.alpha, 66 | length=args.length, 67 | ntask=args.ntask) 68 | else : 69 | model = torch.load(args.refine+'/'+str(args.task)+'.pt') 70 | print("Load checkpoint successfully!") 71 | 72 | if args.share != '': 73 | model.share = torch.load(args.share+'/'+'share.pt') 74 | model.share_classifier = torch.load(args.share+'/'+'share_classifier.pt') 75 | print("Load share model successfully!") 76 | 77 | optimizer = optim.Adam(model.parameters(), 78 | lr=args.lr/args.time, 79 | weight_decay=args.weight_decay) 80 | loss = nn.CrossEntropyLoss() 81 | 82 | if args.cuda: 83 | model.cuda() 84 | features = features.cuda() 85 | adj = adj.cuda() 86 | 87 | features, adj = Variable(features), Variable(adj) 88 | 89 | def train(epoch): 90 | t = time.time() 91 | global best_acc 92 | model.train() 93 | for X, y in train_iter: 94 | task = torch.ones(X.view(-1).shape[0], dtype=torch.long)*args.task 95 | task = Variable(task) 96 | if args.cuda: 97 | X = X.cuda() 98 | y = y.cuda() 99 | task = task.cuda() 100 | optimizer.zero_grad() 101 | output, adv_loss, diff_loss = model(features, adj, X, task) 102 | loss_train = loss(output, y) 103 | loss_train = loss_train + 0.05*adv_loss + 0.01*diff_loss 104 | loss_train.backward() 105 | optimizer.step() 106 | model.eval() 107 | acc_val = compute_test() 108 | print('Epoch: {:04d}'.format(epoch+1), 109 | 'loss_train: {:.4f}'.format(loss_train.data.item()), 110 | 'time: {:.4f}s'.format(time.time() - t)) 111 | 112 | 113 | def compute_test(): 114 | model.eval() 115 | correct, total = 0, 0 116 | for X, y in test_iter: 117 | task = torch.ones(X.view(-1).shape[0], dtype=torch.long)*args.task 118 | task = Variable(task) 119 | total += y.shape[0] 120 | if args.cuda: 121 | X = X.cuda() 122 | y = y.cuda() 123 | task = task.cuda() 124 | output, _, _ = model(features, adj, X, task) 125 | output = output.argmax(dim=1) 126 | correct += (output==y).sum().cpu().item() 127 | correct = correct 128 | acc = round(correct/total, 2) 129 | 130 | print("accuracy= {:.2f}".format(acc)) 131 | return acc 132 | 133 | # Train model 134 | t_total = time.time() 135 | best_acc = 0 136 | print("start training!") 137 | print("learning rate: " + str(args.lr/args.time)) 138 | for epoch in range(args.epochs): 139 | train(epoch) 140 | 141 | print("Optimization Finished!", args.train_file) 142 | print("Total time elapsed: {:.4f}s".format(time.time() - t_total)) 143 | torch.save(model, args.save+'/'+str(args.task)+'.pt') 144 | torch.save(model.share, args.save+'/share.pt') 145 | torch.save(model.share_classifier, args.save+'/share_classifier.pt') 146 | print("Save model successfully!") 147 | -------------------------------------------------------------------------------- /MSSL/models/train_reg.py: -------------------------------------------------------------------------------- 1 | from __future__ import division 2 | from __future__ import print_function 3 | 4 | import os 5 | import glob 6 | import time 7 | import random 8 | import argparse 9 | import numpy as np 10 | import torch 11 | import copy 12 | import torch.nn as nn 13 | import torch.nn.functional as F 14 | import torch.optim as optim 15 | from torch.autograd import Variable 16 | 17 | from utils import * 18 | from models_reg import MGTA 19 | 20 | # Training settings 21 | parser = argparse.ArgumentParser() 22 | parser.add_argument('--no-cuda', action='store_true', default=False, help='Disables CUDA training.') 23 | parser.add_argument('--seed', type=int, default=72, help='Random seed.') 24 | parser.add_argument('--epochs', type=int, default=20, help='Number of epochs to train.') 25 | parser.add_argument('--lr', type=float, default=0.005, help='Initial learning rate.') 26 | parser.add_argument('--weight_decay', type=float, default=5e-4, help='Weight decay (L2 loss on parameters).') 27 | parser.add_argument('--hidden', type=int, default=8, help='Number of hidden units.') 28 | parser.add_argument('--nb_heads', type=int, default=8, help='Number of head attentions.') 29 | parser.add_argument('--dropout', type=float, default=0.6, help='Dropout rate (1 - keep probability).') 30 | parser.add_argument('--alpha', type=float, default=0.2, help='Alpha for the leaky_relu.') 31 | parser.add_argument('--save', type=str, default='./', help='saving multi-task SSL model.') 32 | parser.add_argument('--batch_size', type=int, default=32, help='the value of batch size.') 33 | parser.add_argument('--train_file', type=str, default='./', help='the path of training dataset for SSL.') 34 | parser.add_argument('--test_file', type=str, default='./', help='the path of test dataset for SSL.') 35 | parser.add_argument('--mode', type=int, default=0, help='the type of label, 0 is int, 1 is float.') 36 | parser.add_argument('--nclass', type=int, default=1, help='the number of hidden units in output layer.') 37 | parser.add_argument('--length', type=int, default=2, help='the length of each sample.') 38 | parser.add_argument('--sub', type=int, default=0, help='Is the value of the label minus 1.') 39 | parser.add_argument('--ntask', type=int, default=2, help='the number of tasks in combinations.') 40 | parser.add_argument('--task', type=int, default=0, help='the identifier of current task.') 41 | parser.add_argument('--share', type=str, default='', help='the path of share model.') 42 | parser.add_argument('--refine', type=str, default='', help='the path of private model.') 43 | parser.add_argument('--time', type=int, default=1, help='the identifier of current epoch.') 44 | 45 | 46 | args = parser.parse_args() 47 | args.cuda = not args.no_cuda and torch.cuda.is_available() 48 | 49 | if not os.path.exists(args.save): 50 | os.makedirs(args.save) 51 | 52 | # Load data 53 | features, adj = load_graph() 54 | print("load graph successfully!") 55 | train_iter = load_dataset(args.train_file, args.batch_size, args.mode, sub=args.sub) 56 | test_iter = load_dataset(args.test_file, args.batch_size, args.mode, shuffle=False, sub=args.sub) 57 | print("load dataset successfully!") 58 | # Model and optimizer 59 | 60 | if args.refine == '': 61 | model = MGTA(nfeat=features.shape[1], 62 | nhid=args.hidden, 63 | nclass=args.nclass, 64 | dropout=args.dropout, 65 | nheads=args.nb_heads, 66 | alpha=args.alpha, 67 | length=args.length, 68 | ntask=args.ntask) 69 | else : 70 | model = torch.load(args.refine+'/'+str(args.task)+'.pt') 71 | print("Load checkpoint successfully!") 72 | 73 | if args.share != '': 74 | model.share = torch.load(args.share+'/'+'share.pt') 75 | model.share_classifier = torch.load(args.share+'/'+'share_classifier.pt') 76 | print("Load share model successfully!") 77 | 78 | optimizer = optim.Adam(model.parameters(), 79 | lr=args.lr/args.time, 80 | weight_decay=args.weight_decay) 81 | loss = nn.MSELoss() 82 | 83 | if args.cuda: 84 | model.cuda() 85 | features = features.cuda() 86 | adj = adj.cuda() 87 | 88 | features, adj = Variable(features), Variable(adj) 89 | 90 | 91 | def train(epoch): 92 | global best_acc 93 | t = time.time() 94 | model.train() 95 | for X, y in train_iter: 96 | task = torch.ones(X.view(-1).shape[0], dtype=torch.long)*args.task 97 | task = Variable(task) 98 | if args.cuda: 99 | X = X.cuda() 100 | y = y.view(-1, 1).cuda() 101 | task = task.cuda() 102 | optimizer.zero_grad() 103 | output, adv_loss, diff_loss = model(features, adj, X, task) 104 | loss_train = loss(output, y) 105 | loss_train = loss_train + 0.05*adv_loss + 0.01*diff_loss 106 | loss_train.backward() 107 | optimizer.step() 108 | model.eval() 109 | # sum_loss = compute_test() 110 | print('Epoch: {:04d}'.format(epoch+1), 111 | 'loss_train: {:.4f}'.format(loss_train.data.item()), 112 | 'time: {:.4f}s'.format(time.time() - t)) 113 | 114 | 115 | def compute_test(): 116 | model.eval() 117 | sum_loss = 0 118 | for X, y in test_iter: 119 | task = torch.ones(X.view(-1).shape[0], dtype=torch.long)*args.task 120 | task = Variable(task) 121 | if args.cuda: 122 | X = X.cuda() 123 | y = y.cuda() 124 | task = task.cuda() 125 | output, adv_loss, diff_loss = model(features, adj, X, task) 126 | sum_loss += loss(output, y) 127 | 128 | print("loss_test= {:.4f}".format(sum_loss)) 129 | return sum_loss 130 | 131 | # Train model 132 | best_acc = 0 133 | t_total = time.time() 134 | print("start training!") 135 | print("learning rate: " + str(args.lr/args.time)) 136 | for epoch in range(args.epochs): 137 | train(epoch) 138 | 139 | print("Optimization Finished!", args.train_file) 140 | print("Total time elapsed: {:.4f}s".format(time.time() - t_total)) 141 | 142 | torch.save(model, args.save+'/'+str(args.task)+'.pt') 143 | torch.save(model.share, args.save+'/share.pt') 144 | torch.save(model.share_classifier, args.save+'/share_classifier.pt') 145 | print("Save model successfully!") 146 | -------------------------------------------------------------------------------- /MSSL/models/train_sim.py: -------------------------------------------------------------------------------- 1 | from __future__ import division 2 | from __future__ import print_function 3 | 4 | import os 5 | import glob 6 | import time 7 | import random 8 | import argparse 9 | import numpy as np 10 | import torch 11 | import copy 12 | import torch.nn as nn 13 | import torch.nn.functional as F 14 | import torch.optim as optim 15 | from torch.autograd import Variable 16 | 17 | from utils import * 18 | from models_sim import MGTA 19 | 20 | # Training settings 21 | parser = argparse.ArgumentParser() 22 | parser.add_argument('--no-cuda', action='store_true', default=False, help='Disables CUDA training.') 23 | parser.add_argument('--seed', type=int, default=72, help='Random seed.') 24 | parser.add_argument('--epochs', type=int, default=20, help='Number of epochs to train.') 25 | parser.add_argument('--lr', type=float, default=0.005, help='Initial learning rate.') 26 | parser.add_argument('--weight_decay', type=float, default=5e-4, help='Weight decay (L2 loss on parameters).') 27 | parser.add_argument('--hidden', type=int, default=8, help='Number of hidden units.') 28 | parser.add_argument('--nb_heads', type=int, default=8, help='Number of head attentions.') 29 | parser.add_argument('--dropout', type=float, default=0.6, help='Dropout rate (1 - keep probability).') 30 | parser.add_argument('--alpha', type=float, default=0.2, help='Alpha for the leaky_relu.') 31 | parser.add_argument('--save', type=str, default='./', help='the path for saving multi-task SSL model.') 32 | parser.add_argument('--batch_size', type=int, default=32, help='the value of batch size.') 33 | parser.add_argument('--train_file', type=str, default='./', help='the path of training dataset for SSL.') 34 | parser.add_argument('--test_file', type=str, default='./', help='the path of test dataset for SSL.') 35 | parser.add_argument('--mode', type=int, default=0, help='the type of label, 0 is int, 1 is float.') 36 | parser.add_argument('--nclass', type=int, default=1, help='the number of hidden units in output layer.') 37 | parser.add_argument('--sub', type=int, default=0, help='Is the value of the label minus 1.') 38 | parser.add_argument('--ntask', type=int, default=2, help='the number of tasks in combinations.') 39 | parser.add_argument('--task', type=int, default=0, help='the identifier of current task.') 40 | parser.add_argument('--share', type=str, default='', help='the path of share model.') 41 | parser.add_argument('--refine', type=str, default='', help='the path of private model.') 42 | parser.add_argument('--time', type=int, default=1, help='the identifier of current epoch.') 43 | 44 | 45 | args = parser.parse_args() 46 | args.cuda = not args.no_cuda and torch.cuda.is_available() 47 | 48 | if not os.path.exists(args.save): 49 | os.makedirs(args.save) 50 | 51 | # Load data 52 | features, adj = load_graph() 53 | print("load graph successfully!") 54 | train_iter = load_dataset(args.train_file, args.batch_size, args.mode, sub=args.sub) 55 | test_iter = load_dataset(args.test_file, args.batch_size, args.mode, shuffle=False, sub=args.sub) 56 | print("load dataset successfully!") 57 | # Model and optimizer 58 | 59 | if args.refine == '': 60 | model = MGTA(nfeat=features.shape[1], 61 | nhid=args.hidden, 62 | nclass=args.nclass, 63 | dropout=args.dropout, 64 | nheads=args.nb_heads, 65 | alpha=args.alpha, 66 | ntask=args.ntask) 67 | else : 68 | model = torch.load(args.refine+'/'+str(args.task)+'.pt') 69 | print("Load checkpoint successfully!") 70 | 71 | if args.share != '': 72 | model.share = torch.load(args.share+'/'+'share.pt') 73 | model.share_classifier = torch.load(args.share+'/'+'share_classifier.pt') 74 | print("Load share model successfully!") 75 | 76 | optimizer = optim.Adam(model.parameters(), 77 | lr=args.lr, 78 | weight_decay=args.weight_decay) 79 | 80 | if args.cuda: 81 | model.cuda() 82 | features = features.cuda() 83 | adj = adj.cuda() 84 | 85 | features, adj = Variable(features), Variable(adj) 86 | 87 | 88 | def train(epoch): 89 | global best_acc 90 | t = time.time() 91 | model.train() 92 | for X, y in train_iter: 93 | task = torch.ones(X.view(-1).shape[0], dtype=torch.long)*args.task 94 | task = Variable(task) 95 | if args.cuda: 96 | X = X.cuda() 97 | y = y.view(-1, 1).cuda() 98 | task = task.cuda() 99 | optimizer.zero_grad() 100 | output, adv_loss, diff_loss = model(features, adj, X, task) 101 | loss_mask = output.le(0).float() 102 | loss_train = ((y-output)*loss_mask).sum() 103 | loss_train = loss_train + 0.05*adv_loss + 0.01*diff_loss 104 | loss_train.backward() 105 | optimizer.step() 106 | 107 | # sum_loss = compute_test() 108 | print('Epoch: {:04d}'.format(epoch+1), 109 | 'loss_train: {:.4f}'.format(loss_train.data.item()), 110 | 'time: {:.4f}s'.format(time.time() - t)) 111 | 112 | 113 | def compute_test(): 114 | model.eval() 115 | sum_loss = 0 116 | for X, y in test_iter: 117 | task = torch.ones(X.view(-1).shape[0], dtype=torch.long)*args.task 118 | task = Variable(task) 119 | if args.cuda: 120 | X = X.cuda() 121 | y = y.cuda() 122 | task = task.cuda() 123 | output, adv_loss, diff_loss = model(features, adj, X, task) 124 | loss_mask = output.le(0).float() 125 | loss_test = ((y-output)*loss_mask).sum() 126 | sum_loss += loss_test 127 | 128 | print("loss_test= {:.4f}".format(sum_loss)) 129 | return sum_loss 130 | 131 | # Train model 132 | # frequence = int(args.epochs/10) 133 | best_acc = 0 134 | t_total = time.time() 135 | print("start training!") 136 | print("learning rate: " + str(args.lr/args.time)) 137 | for epoch in range(args.epochs): 138 | train(epoch) 139 | 140 | 141 | print("Optimization Finished!", args.train_file) 142 | print("Total time elapsed: {:.4f}s".format(time.time() - t_total)) 143 | 144 | torch.save(model, args.save+'/'+str(args.task)+'.pt') 145 | torch.save(model.share, args.save+'/share.pt') 146 | torch.save(model.share_classifier, args.save+'/share_classifier.pt') 147 | print("Save model successfully!") 148 | -------------------------------------------------------------------------------- /MSSL/models/utils.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import scipy.sparse as sp 3 | import torch 4 | import torch.utils.data as Data 5 | 6 | def load_graph(feature_file='../data/BioHNsdata/BioHNs_code.txt', adj_file='../data/BioHNsdata/BioHNs_clean.txt'): 7 | with open(feature_file, 'r') as f: 8 | lines = f.readlines() 9 | features = [] 10 | for line in lines: 11 | line = line.split(' ') 12 | line = [int(x) for x in line] 13 | features.append(line) 14 | features = torch.tensor(features, dtype=torch.float) 15 | adj = [] 16 | with open(adj_file, 'r') as f: 17 | lines = f.readlines() 18 | for line in lines: 19 | line = line.split(' ') 20 | line = [int(x) for x in line] 21 | adj.append(line) 22 | adj = torch.tensor(adj, dtype=torch.float) 23 | return features, adj 24 | 25 | def load_dataset(data_file, batch_size, mode=0, shuffle=True, sub=0): 26 | with open(data_file, 'r') as f: 27 | lines = f.readlines() 28 | data_X, data_y = [], [] 29 | for line in lines: 30 | X, y = [], [] 31 | line = line.split(' ') 32 | X, y = line[:-1], line[-1] 33 | X = [int(x) for x in X] 34 | y = float(y) if mode else int(y) 35 | if sub: 36 | y = y - 1 37 | data_X.append(X) 38 | data_y.append(y) 39 | data_X = torch.tensor(data_X, dtype=torch.long) 40 | if mode: 41 | data_y = torch.tensor(data_y, dtype=torch.float) 42 | else: 43 | data_y = torch.tensor(data_y, dtype=torch.long) 44 | data_dataset = Data.TensorDataset(data_X, data_y) 45 | data_iter = Data.DataLoader(data_dataset, batch_size=batch_size, shuffle=shuffle) 46 | return data_iter 47 | 48 | -------------------------------------------------------------------------------- /PES_cold.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | cd DownStream 4 | python create_sample.py --input ../data/DownStreamdata/DTInet.txt --offset 1 5 | 6 | cd ../DataProcessing 7 | 8 | python degree_code.py --downstream DTI --scenario cold --dataclean 1 >logpre 9 | python PairDistance.py --downstream DTI --scenario cold --dataclean 1 >>logpre 10 | python SimCon.py --downstream DTI --scenario cold --dataclean 1 >>logpre 11 | python EdgeMask.py --downstream DTI --scenario cold --dataclean 1 >>logpre 12 | # python PathClass.py --downstream DTI --scenario cold --dataclean 1 >>logpre 13 | # python SimReg.py --downstream DTI --scenario cold --dataclean 1 >>logpre 14 | # python ClusterPre.py --downstream DTI --scenario cold --dataclean 1 >>logpre 15 | 16 | 17 | python create_dataset.py --input_file ../data/SSLdata/PairDistance.txt 18 | python create_dataset.py --input_file ../data/SSLdata/EdgeMask.txt 19 | python create_dataset.py --input_file ../data/SSLdata/SimCon.txt 20 | # python create_dataset.py --input_file ../data/SSLdata/SimReg.txt 21 | # python create_dataset.py --input_file ../data/SSLdata/PathClass.txt 22 | # python create_dataset.py --input_file ../data/SSLdata/ClusterPre.txt 23 | 24 | cd ../MSSL 25 | 26 | python models/train_class.py --train_file ../data/SSLdata/PairDistance_train.txt --test_file ../data/SSLdata/PairDistance_test.txt --save PairDistance_EdgeMask_SimCon --batch_size 128 --length 2 --nclass 4 --sub 1 --ntask 3 --task 0 --epochs 1 --lr 5e-4 > PairDistance_EdgeMask_SimCon.log 27 | python models/train_class.py --train_file ../data/SSLdata/EdgeMask_train.txt --test_file ../data/SSLdata/EdgeMask_test.txt --save PairDistance_EdgeMask_SimCon --share PairDistance_EdgeMask_SimCon --batch_size 128 --length 2 --nclass 4 --ntask 3 --task 1 --epochs 1 --lr 5e-4 >> PairDistance_EdgeMask_SimCon.log 28 | python models/train_sim.py --train_file ../data/SSLdata/SimCon_train.txt --test_file ../data/SSLdata/SimCon_test.txt --save PairDistance_EdgeMask_SimCon --share PairDistance_EdgeMask_SimCon --batch_size 128 --mode 1 --ntask 3 --task 2 --lr 5e-4 --epochs 1 >> PairDistance_EdgeMask_SimCon.log 29 | for ((i=1; i<=30; i++)) 30 | do 31 | rnd=$(($RANDOM%3)) 32 | case $rnd in 33 | 0) 34 | python models/train_class.py --train_file ../data/SSLdata/PairDistance_train.txt --test_file ../data/SSLdata/PairDistance_test.txt --save PairDistance_EdgeMask_SimCon --share PairDistance_EdgeMask_SimCon --refine PairDistance_EdgeMask_SimCon --batch_size 128 --length 2 --nclass 4 --sub 1 --ntask 3 --task 0 --time $i --epochs 1 --lr 5e-4 >> PairDistance_EdgeMask_SimCon.log 35 | ;; 36 | 1) 37 | python models/train_class.py --train_file ../data/SSLdata/EdgeMask_train.txt --test_file ../data/SSLdata/EdgeMask_test.txt --save PairDistance_EdgeMask_SimCon --share PairDistance_EdgeMask_SimCon --refine PairDistance_EdgeMask_SimCon --batch_size 128 --length 2 --nclass 4 --ntask 3 --task 1 --time $i --epochs 1 --lr 5e-4 >> PairDistance_EdgeMask_SimCon.log 38 | ;; 39 | 2) 40 | python models/train_sim.py --train_file ../data/SSLdata/SimCon_train.txt --test_file ../data/SSLdata/SimCon_test.txt --save PairDistance_EdgeMask_SimCon --share PairDistance_EdgeMask_SimCon --refine PairDistance_EdgeMask_SimCon --batch_size 128 --mode 1 --ntask 3 --task 2 --time $i --lr 5e-4 --epochs 1 >> PairDistance_EdgeMask_SimCon.log 41 | ;; 42 | esac 43 | done 44 | # 45 | ### 46 | python models/get_feature.py --model PairDistance_EdgeMask_SimCon --length 3 47 | 48 | 49 | cd ../DownStream 50 | 51 | python cold_start.py --input_file ../data/DownStreamdata/DTInet_sample.txt --feature ../MSSL/feature_PairDistance_EdgeMask_SimCon.pt --lr 0.002 --epochs 30 --save DTInet/PairDistance_EdgeMask_SimCon_cold >> DTI_PairDistance_EdgeMask_SimCon 52 | 53 | 54 | 55 | -------------------------------------------------------------------------------- /PES_warm.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | cd DownStream 4 | python create_sample.py --input ../data/DownStreamdata/DTInet.txt --offset 1 5 | 6 | cd ../DataProcessing 7 | 8 | python degree_code.py --downstream DTI --scenario warm --dataclean 1 >logpre 9 | python PairDistance.py --downstream DTI --scenario warm --dataclean 1 >>logpre 10 | python SimCon.py --downstream DTI --scenario warm --dataclean 1 >>logpre 11 | python EdgeMask.py --downstream DTI --scenario warm --dataclean 1 >>logpre 12 | # python PathClass.py --downstream DTI --scenario warm --dataclean 1 >>logpre 13 | # python SimReg.py --downstream DTI --scenario warm --dataclean 1 >>logpre 14 | # python ClusterPre.py --downstream DTI --scenario warm --dataclean 1 >>logpre 15 | 16 | python create_dataset.py --input_file ../data/SSLdata/PairDistance.txt 17 | python create_dataset.py --input_file ../data/SSLdata/EdgeMask.txt 18 | python create_dataset.py --input_file ../data/SSLdata/SimCon.txt 19 | # python create_dataset.py --input_file ../data/SSLdata/SimReg.txt 20 | # python create_dataset.py --input_file ../data/SSLdata/PathClass.txt 21 | # python create_dataset.py --input_file ../data/SSLdata/ClusterPre.txt 22 | 23 | cd ../MSSL 24 | 25 | python models/train_class.py --train_file ../data/SSLdata/PairDistance_train.txt --test_file ../data/SSLdata/PairDistance_test.txt --save PairDistance_EdgeMask_SimCon --batch_size 128 --length 2 --nclass 4 --sub 1 --ntask 3 --task 0 --epochs 1 --lr 5e-4 > PairDistance_EdgeMask_SimCon.log 26 | python models/train_class.py --train_file ../data/SSLdata/EdgeMask_train.txt --test_file ../data/SSLdata/EdgeMask_test.txt --save PairDistance_EdgeMask_SimCon --share PairDistance_EdgeMask_SimCon --batch_size 128 --length 2 --nclass 4 --ntask 3 --task 1 --epochs 1 --lr 5e-4 >> PairDistance_EdgeMask_SimCon.log 27 | python models/train_sim.py --train_file ../data/SSLdata/SimCon_train.txt --test_file ../data/SSLdata/SimCon_test.txt --save PairDistance_EdgeMask_SimCon --share PairDistance_EdgeMask_SimCon --batch_size 128 --mode 1 --ntask 3 --task 2 --lr 5e-4 --epochs 1 >> PairDistance_EdgeMask_SimCon.log 28 | for ((i=1; i<=30; i++)) 29 | do 30 | rnd=$(($RANDOM%3)) 31 | case $rnd in 32 | 0) 33 | python models/train_class.py --train_file ../data/SSLdata/PairDistance_train.txt --test_file ../data/SSLdata/PairDistance_test.txt --save PairDistance_EdgeMask_SimCon --share PairDistance_EdgeMask_SimCon --refine PairDistance_EdgeMask_SimCon --batch_size 128 --length 2 --nclass 4 --sub 1 --ntask 3 --task 0 --time $i --epochs 1 --lr 5e-4 >> PairDistance_EdgeMask_SimCon.log 34 | ;; 35 | 1) 36 | python models/train_class.py --train_file ../data/SSLdata/EdgeMask_train.txt --test_file ../data/SSLdata/EdgeMask_test.txt --save PairDistance_EdgeMask_SimCon --share PairDistance_EdgeMask_SimCon --refine PairDistance_EdgeMask_SimCon --batch_size 128 --length 2 --nclass 4 --ntask 3 --task 1 --time $i --epochs 1 --lr 5e-4 >> PairDistance_EdgeMask_SimCon.log 37 | ;; 38 | 2) 39 | python models/train_sim.py --train_file ../data/SSLdata/SimCon_train.txt --test_file ../data/SSLdata/SimCon_test.txt --save PairDistance_EdgeMask_SimCon --share PairDistance_EdgeMask_SimCon --refine PairDistance_EdgeMask_SimCon --batch_size 128 --mode 1 --ntask 3 --task 2 --time $i --lr 5e-4 --epochs 1 >> PairDistance_EdgeMask_SimCon.log 40 | ;; 41 | esac 42 | done 43 | # 44 | ### 45 | python models/get_feature.py --model PairDistance_EdgeMask_SimCon --length 3 46 | 47 | 48 | cd ../DownStream 49 | 50 | python warm_start.py --input_file ../data/DownStreamdata/DTInet_sample.txt --feature ../MSSL/feature_PairDistance_EdgeMask_SimCon.pt --lr 0.002 --epochs 30 --save DTInet/PairDistance_EdgeMask_SimCon >> DTI_PairDistance_EdgeMask_SimCon 51 | 52 | -------------------------------------------------------------------------------- /Program Operation Guideline.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/pengsl-lab/MSSL/99d86c2612d6cdc5504ae3673440bd090abb1d52/Program Operation Guideline.pdf -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # MSSL2drug 2 | We peopose multi-task joint strategies of self-supervised representation learning on biomedical networks for drug discovery, named MSSL2drug. We design six basic SSL tasks that are inspired by various modality features including structures, semantics, and attributes in biomedical heterogeneous networks. In addition, fifteen combinations of multiple tasks are evaluated by a graph attention-based adversarial multi-task learning framework in two drug discovery scenarios. The results suggest two important findings. (1) Combinations of multimodal tasks achieve the best performance compared to other multi-task joint strategies. (2) The joint training of local and global SSL tasks yields higher performance than random task combinations. 3 | 4 | ## Data description 5 | 1. BioHNsdata: biomedical heterogeneous networks 6 | * drug.txt: list of drug. 7 | * protein.txt: list of protein. 8 | * disease.txt: list of disease. 9 | * drug_sim.txt: drug similarity matrix 10 | * protein_sim.txt: protein similarity matrix 11 | * disease_sim.txt: protein similarity matrix 12 | * BioHNs.txt: the adjacency matrix of the biomedical heterogeneous networks. 13 | * BioHNs_code.txt: the initialization features of nodes in the biomedical heterogeneous network. 14 | * BioHNsdata_with order: the corresponding order of node in drug_sim.txt, protein_sim.txt, disease_sim.txt, and BioHNs.txt BioHNs.txt, and BioHNs_code.txt 15 | 2. DownStreamdata: drug discovery data 16 | * DDINet.txt: Drug-Drug interaction matrix. 17 | * DTINet.txt: Drug-Protein interaction matrix. 18 | 3. SSLdata: Self-supervised representation pretext 19 | * ClusterPre.txt, PairDistance.txt, EdgeMask.txt, PathClass.txt, SimReg.txt and SimCon.txt: a example dataset for six self-supervised task. 20 | * SSL_data_description.txt : The detailed description of PairDistance.txt, EdgeMask.txt, PathClass.txt, SimReg.txt and SimCon.txt 21 | 22 | ## Single task-driven self-supervised representation learning 23 | [Graph attention networks (GATs)](https://arxiv.org/abs/1710.10903v3) are used to train the single task-driven self-supervised representation learning. The code of GAT can be downloaded from https://github.com/Diego999/pyGAT. 24 | 25 | ## Multi-task self-supervised representation learning for drug discovey 26 | `models/` contains the different implementations of graph attention-based adversarial multi-task learning models for three different training paradigm, including classification task, regression task and similarity constrast task. In MSSL2drug, we develop fifteen combinations of multiple tasks. However, there are same implementations among multi-task combinations. For a given multi-task combination, each user can executable programs according to the [Operation Guideline](https://github.com/pengsl-lab/MSSL/blob/main/Program%20Operation%20Guideline.pdf). Here, we take PairDistance_EdgeMask_SimCon (PES) as an examples. The detailed implementations of warm and cold start for PairDistance_EdgeMask_SimCon is described in the PES_warm.sh and PES_cold.sh, respectively. Finally, the users can execute PairDistance_EdgeMask_SimCon model by using the following command lines. 27 | 28 | 1. Warm-start scenarios: 29 | `bash PES_warm.sh` 30 | 31 | 2. Cold-start scenarios: 32 | `bash PES_cold.sh` 33 | 34 | ## Requirements 35 | MSSL2drug is tested to work under: 36 | * Python 3.7.7 37 | * numpy 1.16.1 38 | * torch 1.6.0 39 | 40 | # Contacts 41 | If you have any questions or comments, please feel free to email: xqw@hnu.edu.cn or jicheng@hnu.edu.cn. 42 | -------------------------------------------------------------------------------- /data/BioHNsdata/disease.txt: -------------------------------------------------------------------------------- 1 | C0011860:Diabetes Mellitus, Non-Insulin-Dependent 2 | C0034063:Pulmonary Edema 3 | C0004364:Autoimmune Diseases 4 | C0009663:Condylomata Acuminata 5 | C0009324:Ulcerative Colitis 6 | C0040261:Onychomycosis 7 | C0006277:Bronchitis 8 | C0002962:Angina Pectoris 9 | C0020635:Hypopituitarism 10 | C0030807:Pemphigus 11 | C0600518:Choroidal Neovascularization 12 | C0018834:Heartburn 13 | C0002453:Amenorrhea 14 | C0013604:Edema 15 | C0019342:Genital Herpes 16 | C0023290:Leishmaniasis, Visceral 17 | C0034734:Raynaud Disease 18 | C0032285:Pneumonia 19 | C0002994:Angioedema 20 | C0917796:Optic Atrophy, Hereditary, Leber 21 | C0018099:Gout 22 | C0085413:Polycystic Kidney, Autosomal Dominant 23 | C0025202:melanoma 24 | C0039240:Supraventricular tachycardia 25 | C0029001:Onchocerciasis 26 | C0010417:Cryptorchidism 27 | C0029442:Osteomalacia 28 | C0014175:Endometriosis 29 | C0020615:Hypoglycemia 30 | C0473527:Hypoalphalipoproteinemias 31 | C0010692:Cystitis 32 | C0027051:Myocardial Infarction 33 | C0014358:Enterocolitis, Pseudomembranous 34 | C0002880:Autoimmune hemolytic anemia 35 | C0037299:Skin Ulcer 36 | C0017605:Angle Closure Glaucoma 37 | C0334634:Malignant lymphoma, lymphocytic, intermediate differentiation, diffuse 38 | C0003962:Ascites 39 | C0041466:Typhoid Fever 40 | C0027662:Multiple Endocrine Neoplasia 41 | C0006840:Candidiasis 42 | C0029401:Osteitis Deformans 43 | C0162316:Iron deficiency anemia 44 | C0005424:Biliary Tract Diseases 45 | C0033860:Psoriasis 46 | C0043037:Common wart 47 | C1739363:Prostatic Hypertrophy 48 | C0020428:Hyperaldosteronism 49 | C0041341:Tuberous Sclerosis 50 | C0030528:Paratyphoid Fever 51 | C0007820:Cerebrovascular Disorders 52 | C0038013:Ankylosing spondylitis 53 | C0041321:Tuberculosis, Miliary 54 | C0023473:Myeloid Leukemia, Chronic 55 | C0282488:Interstitial Cystitis 56 | C0027497:Nausea 57 | C1135868:Gestational Trophoblastic Neoplasms 58 | C0004245:Atrioventricular Block 59 | C0032269:Pneumococcal Infections 60 | C0031511:Pheochromocytoma 61 | C0027022:Myeloproliferative disease 62 | C0040028:Thrombocythemia, Essential 63 | C0026857:Musculoskeletal Diseases 64 | C0029458:Osteoporosis, Postmenopausal 65 | C0003850:Arteriosclerosis 66 | C0011884:Diabetic Retinopathy 67 | C0039483:Giant Cell Arteritis 68 | C0014733:Erysipelas 69 | C0006849:Oral candidiasis 70 | C0085631:Agitation 71 | C0013338:Pituitary dwarfism 72 | C0020443:Hypercholesterolemia 73 | C0008526:Choroiditis 74 | C0022876:Premature Obstetric Labor 75 | C0020502:Hyperparathyroidism 76 | C0025281:Meniere Disease 77 | C0036220:Kaposi Sarcoma 78 | C0028768:Obsessive-Compulsive Disorder 79 | C0015230:Exanthema 80 | C0031154:Peritonitis 81 | C0030567:Parkinson Disease 82 | C0036992:Short Bowel Syndrome 83 | C0004031:Aspergillosis, Allergic Bronchopulmonary 84 | C0017574:Gingivitis 85 | C0015967:Fever 86 | C0013274:Patent ductus arteriosus 87 | C0011991:Diarrhea 88 | C0039520:Tenosynovitis 89 | C0152020:Gastroparesis 90 | C0034013:Precocious Puberty 91 | C0039685:Tetralogy of Fallot 92 | C0023479:Acute myelomonocytic leukemia 93 | C0013404:Dyspnea 94 | C0023885:Liver Abscess 95 | C0020676:Hypothyroidism 96 | C0021670:insulinoma 97 | C0019156:Hepatic Veno-Occlusive Disease 98 | C0036117:Salmonella infections 99 | C0019829:Hodgkin Disease 100 | C0040034:Thrombocytopenia 101 | C0011615:Dermatitis, Atopic 102 | C0011854:Diabetes Mellitus, Insulin-Dependent 103 | C0010308:Congenital Hypothyroidism 104 | C0740394:Hyperuricemia 105 | C0038218:Status Asthmaticus 106 | C0024530:Malaria 107 | C0031069:Familial Mediterranean Fever 108 | C0001973:Alcoholic Intoxication, Chronic 109 | C0040517:Gilles de la Tourette syndrome 110 | C0025295:Meningitis, Pneumococcal 111 | C0024537:Malaria, Vivax 112 | C0021775:Intermittent Claudication 113 | C0035335:Retinoblastoma 114 | C0041228:African Trypanosomiasis 115 | C0032533:Polymyalgia Rheumatica 116 | C0039103:Synovitis 117 | C0022735:Klinefelter Syndrome 118 | C0034065:Pulmonary Embolism 119 | C0242422:Parkinsonian Disorders 120 | C0006118:Brain Neoplasms 121 | C0042870:Vitamin D Deficiency 122 | C0002171:Alopecia Areata 123 | C0019080:Hemorrhage 124 | C0024299:Lymphoma 125 | C0038166:Staphylococcal Skin Infections 126 | C0022951:Lactose Intolerance 127 | C0024117:Chronic Obstructive Airway Disease 128 | C0001144:Acne Vulgaris 129 | C0018621:Hay fever 130 | C0001403:Addison Disease 131 | C0027708:Nephroblastoma 132 | C0023440:Acute Erythroblastic Leukemia 133 | C1562585:Leprosy, Multibacillary 134 | C0162836:Hidradenitis Suppurativa 135 | C0019069:Hemophilia A 136 | C0014742:Erythema Multiforme 137 | C0035243:Respiratory Tract Infections 138 | C0036341:Schizophrenia 139 | C0004626:Pneumonia, Bacterial 140 | C0030920:Peptic Ulcer 141 | C0042029:Urinary tract infection 142 | C0003872:Arthritis, Psoriatic 143 | C0016034:Breast Fibrocystic Disease 144 | C0079774:Peripheral T-Cell Lymphoma 145 | C0042164:Uveitis 146 | C0019880:Homocystinuria 147 | C0014347:Enterobacteriaceae Infections 148 | C0006434:Burn injury 149 | C0012739:Disseminated Intravascular Coagulation 150 | C0037769:West Syndrome 151 | C0007134:Renal Cell Carcinoma 152 | C0041318:Tuberculosis, Meningeal 153 | C1961099:Precursor T-Cell Lymphoblastic Leukemia-Lymphoma 154 | C0030193:Pain 155 | C0079744:Diffuse Large B-Cell Lymphoma 156 | C0033845:Pseudotumor Cerebri 157 | C0001622:Adrenal Gland Hyperfunction 158 | C0022073:Iridocyclitis 159 | C0151636:Premature ventricular contractions 160 | C0010200:Coughing 161 | C0020626:Hypoparathyroidism 162 | C0023241:Legionnaires' Disease 163 | C0206178:Cytomegalovirus Retinitis 164 | C0596170:Binge eating disorder 165 | C0016412:Folic Acid Deficiency 166 | C0029077:Ophthalmia, Sympathetic 167 | C0221013:Mastocytosis, Systemic 168 | C0038395:Streptococcal Infections 169 | C0019243:Angioedemas, Hereditary 170 | C0023786:Mucopolysaccharidosis I 171 | C0033581:prostatitis 172 | C0150045:Urge Incontinence 173 | C0267963:Exocrine pancreatic insufficiency 174 | C0042998:Vulvovaginitis 175 | C0032897:Prader-Willi Syndrome 176 | C0004352:Autistic Disorder 177 | C0162627:Skin Diseases, Bacterial 178 | C0149931:Migraine Disorders 179 | C0242172:Pelvic Inflammatory Disease 180 | C0042974:von Willebrand Disease 181 | C0036323:Schistosomiasis 182 | C0017563:Gingival Diseases 183 | C0025289:Meningitis 184 | C0002874:Aplastic Anemia 185 | C0271568:Laron Syndrome 186 | C0040038:Thromboembolism 187 | C1261473:Sarcoma 188 | C0024198:Lyme Disease 189 | C0001957:Alcohol Withdrawal Delirium 190 | C0035435:Rheumatism 191 | C0010346:Crohn Disease 192 | C0006413:Burkitt Lymphoma 193 | C0013595:Eczema 194 | C0026838:Muscle Spasticity 195 | C0023343:Leprosy 196 | C0024305:Lymphoma, Non-Hodgkin 197 | C0041327:Tuberculosis, Pulmonary 198 | C0015397:Disorder of eye 199 | C0042165:Anterior uveitis 200 | C0022104:Irritable Bowel Syndrome 201 | C0029456:Osteoporosis 202 | C0020461:Hyperkalemia 203 | C0085096:Peripheral Vascular Diseases 204 | C0018081:Gonorrhea 205 | C0042571:Vertigo 206 | C0175683:Citrullinemia 207 | C0033975:Psychotic Disorders 208 | C0035220:Respiratory Distress Syndrome, Newborn 209 | C0024535:Malaria, Falciparum 210 | C0010674:Cystic Fibrosis 211 | C0238198:Gastrointestinal Stromal Tumors 212 | C2316212:Cryopyrin-Associated Periodic Syndromes 213 | C0003615:Appendicitis 214 | C0026764:Multiple Myeloma 215 | C0037199:Sinusitis 216 | C0040558:Toxoplasmosis 217 | C0038160:Staphylococcal Infections 218 | C0040262:Tinea Versicolor 219 | C0014356:Enterocolitis 220 | C0005283:beta Thalassemia 221 | C0017168:Gastroesophageal reflux disease 222 | C0018213:Graves Disease 223 | C0003873:Rheumatoid Arthritis 224 | C0002963:Angina Pectoris, Variant 225 | C0035439:Rheumatic Heart Disease 226 | C0038363:Aphthous Stomatitis 227 | C0677607:Hashimoto Disease 228 | C0002390:Extrinsic allergic alveolitis 229 | C0030409:Paracoccidioidomycosis 230 | C0524910:Hepatitis C, Chronic 231 | C0206682:Follicular thyroid carcinoma 232 | C0020542:Pulmonary Hypertension 233 | C0003862:Arthralgia 234 | C0003492:Aortic coarctation 235 | C0409959:Osteoarthritis, Knee 236 | C0029408:Degenerative polyarthritis 237 | C0027726:Nephrotic Syndrome 238 | C0018790:Cardiac Arrest 239 | C0085159:Seasonal Affective Disorder 240 | C0021400:Influenza 241 | C0017601:Glaucoma 242 | C0282193:Iron Overload 243 | C0023449:Acute lymphocytic leukemia 244 | C0220756:Niemann-Pick Disease, Type C 245 | C0025294:Meningococcal meningitis 246 | C0013395:Dyspepsia 247 | C0006060:Boutonneuse Fever 248 | C0014550:Epilepsies, Myoclonic 249 | C0004096:Asthma 250 | C0008325:Cholecystitis 251 | C0031099:Periodontitis 252 | C0040053:Thrombosis 253 | C0020598:Hypocalcemia 254 | C0020550:Hyperthyroidism 255 | C0038454:Cerebrovascular accident 256 | C0028840:Ocular Hypertension 257 | C0003864:Arthritis 258 | C0009443:Common Cold 259 | C0024591:Malignant hyperpyrexia due to anesthesia 260 | C0040592:Trachoma 261 | C0020651:Hypotension, Orthostatic 262 | C0019151:Hepatic Encephalopathy 263 | C0030201:Pain, Postoperative 264 | C0038325:Stevens-Johnson Syndrome 265 | C0079773:Lymphoma, T-Cell, Cutaneous 266 | C0037274:Dermatologic disorders 267 | C0014547:Epilepsies, Partial 268 | C0004030:Aspergillosis 269 | C0017612:Glaucoma, Open-Angle 270 | C0011881:Diabetic Nephropathy 271 | C0020473:Hyperlipidemia 272 | C0013390:Dysmenorrhea 273 | C0009088:Cluster Headache 274 | C0035579:Rickets 275 | C0700345:Candidiasis, Vulvovaginal 276 | C0020514:Hyperprolactinemia 277 | C0024301:Lymphoma, Follicular 278 | C0021361:Female infertility 279 | C0016053:Fibromyalgia 280 | C0040425:Tonsillitis 281 | C0034069:Pulmonary Fibrosis 282 | C0524909:Hepatitis B, Chronic 283 | C0014544:Epilepsy 284 | C0474368:Labor Pain 285 | C0027819:Neuroblastoma 286 | C0020459:Hyperinsulinism 287 | C0017205:Gaucher Disease 288 | C1328252:Mucocutaneous leishmaniasis 289 | C0019655:Histoplasmosis 290 | C0035457:Rhinitis, Allergic, Perennial 291 | C0034067:Pulmonary Emphysema 292 | C0008149:Chlamydia Infections 293 | C0020649:Hypotension 294 | C0032797:Postpartum Hemorrhage 295 | C0032463:Polycythemia Vera 296 | C0024138:Lupus Erythematosus, Discoid 297 | C0019202:Hepatolenticular Degeneration 298 | C0005699:Blast Phase 299 | C0018021:Goiter 300 | C1260899:Anemia, Diamond-Blackfan 301 | C0019693:HIV Infections 302 | C0023465:Acute monocytic leukemia 303 | C0042024:Urinary Incontinence 304 | C0006309:Brucellosis 305 | C0042345:Varicosity 306 | C0751967:Multiple Sclerosis, Relapsing-Remitting 307 | C0032064:Plague 308 | C0276226:Herpes encephalitis 309 | C0004238:Atrial Fibrillation 310 | C0009766:Allergic Conjunctivitis 311 | C0018802:Congestive heart failure 312 | C0023283:Leishmaniasis, Cutaneous 313 | C0014869:Peptic Esophagitis 314 | C0002170:Alopecia 315 | C0149925:Small cell carcinoma of lung 316 | C0035258:Restless Legs Syndrome 317 | C0001627:Congenital adrenal hyperplasia 318 | C0026769:Multiple Sclerosis 319 | C0002736:Amyotrophic Lateral Sclerosis 320 | C0001206:Acromegaly 321 | C1527336:Sjogren's Syndrome 322 | C0022548:Keloid 323 | C0085166:Bacterial Vaginosis 324 | C0027404:Narcolepsy 325 | C0027430:Nasal Polyps 326 | C0005138:Berylliosis 327 | C0038436:Post-Traumatic Stress Disorder 328 | C0010691:Cystinuria 329 | C0024449:Mycetoma 330 | C0034012:Delayed Puberty 331 | C0010414:Infection by Cryptococcus neoformans 332 | C0042900:Vitiligo 333 | C0020437:Hypercalcemia 334 | C0019348:Herpes Simplex Infections 335 | C0002986:Fabry Disease 336 | C0023646:Lichen Planus 337 | C0497327:Dementia 338 | C0020538:Hypertensive disease 339 | C0036980:Shock, Cardiogenic 340 | C0024141:Lupus Erythematosus, Systemic 341 | C0028754:Obesity 342 | C0011616:Contact Dermatitis 343 | C0022972:Lambert-Eaton Myasthenic Syndrome 344 | C0023467:Leukemia, Myelocytic, Acute 345 | C0031350:Pharyngitis 346 | C0038358:Gastric ulcer 347 | C0013264:Muscular Dystrophy, Duchenne 348 | C0002726:Amyloidosis 349 | C0006846:Cutaneous Candidiasis 350 | C0007117:Basal cell carcinoma 351 | C0035235:Respiratory Syncytial Virus Infections 352 | C0042963:Vomiting 353 | C0003868:Arthritis, Gouty 354 | C0014836:Escherichia coli Infections 355 | C0041408:Turner Syndrome 356 | C1263846:Attention deficit hyperactivity disorder 357 | C0003469:Anxiety Disorders 358 | C0024419:Waldenstrom Macroglobulinemia 359 | C0033578:Prostatic Neoplasms 360 | C0011849:Diabetes Mellitus 361 | C0008677:Bronchitis, Chronic 362 | C0040015:Thrombasthenia 363 | C0342731:Deficiency of mevalonate kinase 364 | C0006267:Bronchiectasis 365 | C0042109:Urticaria 366 | C0026603:Motion Sickness 367 | C0020479:Hyperlipoproteinemia Type III 368 | C0007138:Carcinoma, Transitional Cell 369 | C0034362:Q Fever 370 | C0007131:Non-Small Cell Lung Carcinoma 371 | C0003950:Ascariasis 372 | C0020557:Hypertriglyceridemia 373 | C0007194:Hypertrophic Cardiomyopathy 374 | C0028326:Noonan Syndrome 375 | C0032302:Mycoplasma pneumonia 376 | C0027145:Myxedema 377 | C0035455:Rhinitis 378 | C0023474:Leukemia, Myeloid, Chronic-Phase 379 | C0027796:Neuralgia 380 | C0030848:Peyronie Disease 381 | C0002888:Anemia, Megaloblastic 382 | C0035854:Rosacea 383 | C0039128:Syphilis 384 | C0033893:Tension Headache 385 | C0038525:Subarachnoid Hemorrhage 386 | C0004153:Atherosclerosis 387 | C0023443:Hairy Cell Leukemia 388 | C0020474:Hyperlipidemia, Familial Combined 389 | C0007137:Squamous cell carcinoma 390 | C0020621:Hypokalemia 391 | C0019360:Herpes zoster disease 392 | C0013295:Duodenal Ulcer 393 | C0023487:Acute Promyelocytic Leukemia 394 | C0034186:Pyelonephritis 395 | C0026987:Myelofibrosis 396 | C0002792:anaphylaxis 397 | C0008533:Hemophilia B 398 | C0011633:Dermatomyositis 399 | C0005586:Bipolar Disorder 400 | C0002395:Alzheimer's Disease 401 | C0042373:Vascular Diseases 402 | C0002351:Altitude Sickness 403 | C0002895:Anemia, Sickle Cell 404 | C0042842:Vitamin A Deficiency 405 | C1134719:Invasive Ductal Breast Carcinoma 406 | C0338437:Neurocysticercosis 407 | C1269683:Major Depressive Disorder 408 | C0042847:Vitamin B 12 Deficiency 409 | C0021390:Inflammatory Bowel Diseases 410 | C0220994:Hyperammonemia 411 | C0020625:Hyponatremia 412 | C0162568:Erythropoietic Protoporphyria 413 | C0036202:Sarcoidosis 414 | C0029443:Osteomyelitis 415 | C0009806:Constipation 416 | C0011581:Depressive disorder 417 | C0019345:Herpes Labialis 418 | C0040127:Thyroid Crisis 419 | C0015503:Factor VII Deficiency 420 | C0311375:Arsenic Poisoning 421 | C0013922:Embolism 422 | C0026896:Myasthenia Gravis 423 | C0024115:Lung diseases 424 | C0030319:Panic Disorder 425 | C0008350:Cholelithiasis 426 | C0014549:Tonic-Clonic Epilepsy 427 | C0154246:Urea Cycle Disorders, Inborn 428 | C0014553:Absence Epilepsy 429 | C0033774:Pruritus 430 | C0022602:Actinic keratosis 431 | C0042510:Ventricular Fibrillation -------------------------------------------------------------------------------- /data/BioHNsdata/drug.txt: -------------------------------------------------------------------------------- 1 | DB00014:Goserelin 2 | DB00035:Desmopressin 3 | DB00091:Cyclosporine 4 | DB00104:Octreotide 5 | DB00114:Pyridoxal Phosphate 6 | DB00115:Cyanocobalamin 7 | DB00121:Biotin 8 | DB00126:Vitamin C 9 | DB00130:L-Glutamine 10 | DB00134:L-Methionine 11 | DB00143:Glutathione 12 | DB00145:Glycine 13 | DB00150:L-Tryptophan 14 | DB00158:Folic Acid 15 | DB00162:Vitamin A 16 | DB00166:Lipoic Acid 17 | DB00170:Menadione 18 | DB00175:Pravastatin 19 | DB00176:Fluvoxamine 20 | DB00179:Masoprocol 21 | DB00181:Baclofen 22 | DB00182:Amphetamine 23 | DB00184:Nicotine 24 | DB00185:Cevimeline 25 | DB00186:Lorazepam 26 | DB00188:Bortezomib 27 | DB00189:Ethchlorvynol 28 | DB00191:Phentermine 29 | DB00193:Tramadol 30 | DB00195:Betaxolol 31 | DB00198:Oseltamivir 32 | DB00200:Hydroxocobalamin 33 | DB00201:Caffeine 34 | DB00202:Succinylcholine 35 | DB00203:Sildenafil 36 | DB00204:Dofetilide 37 | DB00205:Pyrimethamine 38 | DB00208:Ticlopidine 39 | DB00210:Adapalene 40 | DB00211:Midodrine 41 | DB00213:Pantoprazole 42 | DB00215:Citalopram 43 | DB00216:Eletriptan 44 | DB00217:Bethanidine 45 | DB00218:Moxifloxacin 46 | DB00221:Isoetarine 47 | DB00222:Glimepiride 48 | DB00227:Lovastatin 49 | DB00228:Enflurane 50 | DB00231:Temazepam 51 | DB00232:Methyclothiazide 52 | DB00233:Aminosalicylic Acid 53 | DB00234:Reboxetine 54 | DB00235:Milrinone 55 | DB00242:Cladribine 56 | DB00243:Ranolazine 57 | DB00244:Mesalazine 58 | DB00245:Benzatropine 59 | DB00246:Ziprasidone 60 | DB00247:Methysergide 61 | DB00248:Cabergoline 62 | DB00252:Phenytoin 63 | DB00255:Diethylstilbestrol 64 | DB00257:Clotrimazole 65 | DB00259:Sulfanilamide 66 | DB00261:Anagrelide 67 | DB00264:Metoprolol 68 | DB00266:Dicoumarol 69 | DB00268:Ropinirole 70 | DB00270:Isradipine 71 | DB00273:Topiramate 72 | DB00276:Amsacrine 73 | DB00277:Theophylline 74 | DB00280:Disopyramide 75 | DB00281:Lidocaine 76 | DB00283:Clemastine 77 | DB00284:Acarbose 78 | DB00285:Venlafaxine 79 | DB00288:Amcinonide 80 | DB00289:Atomoxetine 81 | DB00290:Bleomycin 82 | DB00292:Etomidate 83 | DB00293:Raltitrexed 84 | DB00294:Etonogestrel 85 | DB00295:Morphine 86 | DB00297:Bupivacaine 87 | DB00298:Dapiprazole 88 | DB00304:Desogestrel 89 | DB00307:Bexarotene 90 | DB00308:Ibutilide 91 | DB00312:Pentobarbital 92 | DB00313:Valproic Acid 93 | DB00315:Zolmitriptan 94 | DB00316:Acetaminophen 95 | DB00317:Gefitinib 96 | DB00318:Codeine 97 | DB00320:Dihydroergotamine 98 | DB00321:Amitriptyline 99 | DB00327:Hydromorphone 100 | DB00328:Indomethacin 101 | DB00332:Ipratropium bromide 102 | DB00333:Methadone 103 | DB00334:Olanzapine 104 | DB00335:Atenolol 105 | DB00337:Pimecrolimus 106 | DB00340:Metixene 107 | DB00343:Diltiazem 108 | DB00344:Protriptyline 109 | DB00346:Alfuzosin 110 | DB00349:Clobazam 111 | DB00350:Minoxidil 112 | DB00353:Methylergometrine 113 | DB00354:Buclizine 114 | DB00358:Mefloquine 115 | DB00360:Sapropterin 116 | DB00363:Clozapine 117 | DB00364:Sucralfate 118 | DB00366:Doxylamine 119 | DB00367:Levonorgestrel 120 | DB00368:Norepinephrine 121 | DB00370:Mirtazapine 122 | DB00371:Meprobamate 123 | DB00373:Timolol 124 | DB00374:Treprostinil 125 | DB00376:Trihexyphenidyl 126 | DB00379:Mexiletine 127 | DB00380:Dexrazoxane 128 | DB00381:Amlodipine 129 | DB00383:Oxyphencyclimine 130 | DB00384:Triamterene 131 | DB00387:Procyclidine 132 | DB00388:Phenylephrine 133 | DB00391:Sulpiride 134 | DB00392:Ethopropazine 135 | DB00393:Nimodipine 136 | DB00394:Beclomethasone 137 | DB00396:Progesterone 138 | DB00397:Phenylpropanolamine 139 | DB00398:Sorafenib 140 | DB00399:Zoledronic acid 141 | DB00401:Nisoldipine 142 | DB00402:Eszopiclone 143 | DB00404:Alprazolam 144 | DB00408:Loxapine 145 | DB00409:Remoxipride 146 | DB00411:Carbachol 147 | DB00412:Rosiglitazone 148 | DB00413:Pramipexole 149 | DB00418:Secobarbital 150 | DB00419:Miglustat 151 | DB00420:Promazine 152 | DB00421:Spironolactone 153 | DB00422:Methylphenidate 154 | DB00424:Hyoscyamine 155 | DB00425:Zolpidem 156 | DB00431:Lindane 157 | DB00433:Prochlorperazine 158 | DB00434:Cyproheptadine 159 | DB00436:Bendroflumethiazide 160 | DB00440:Trimethoprim 161 | DB00441:Gemcitabine 162 | DB00445:Epirubicin 163 | DB00450:Droperidol 164 | DB00454:Pethidine 165 | DB00455:Loratadine 166 | DB00456:Cefalotin 167 | DB00457:Prazosin 168 | DB00458:Imipramine 169 | DB00459:Acitretin 170 | DB00461:Nabumetone 171 | DB00464:Sodium Tetradecyl Sulfate 172 | DB00465:Ketorolac 173 | DB00467:Enoxacin 174 | DB00468:Quinine 175 | DB00469:Tenoxicam 176 | DB00470:Dronabinol 177 | DB00471:Montelukast 178 | DB00472:Fluoxetine 179 | DB00475:Chlordiazepoxide 180 | DB00476:Duloxetine 181 | DB00477:Chlorpromazine 182 | DB00478:Rimantadine 183 | DB00480:Lenalidomide 184 | DB00481:Raloxifene 185 | DB00482:Celecoxib 186 | DB00484:Brimonidine 187 | DB00485:Dicloxacillin 188 | DB00486:Nabilone 189 | DB00487:Pefloxacin 190 | DB00489:Sotalol 191 | DB00490:Buspirone 192 | DB00491:Miglitol 193 | DB00493:Cefotaxime 194 | DB00495:Zidovudine 195 | DB00496:Darifenacin 196 | DB00497:Oxycodone 197 | DB00499:Flutamide 198 | DB00500:Tolmetin 199 | DB00502:Haloperidol 200 | DB00508:Triflupromazine 201 | DB00509:Dextrothyroxine 202 | DB00513:Aminocaproic Acid 203 | DB00514:Dextromethorphan 204 | DB00518:Albendazole 205 | DB00521:Carteolol 206 | DB00523:Alitretinoin 207 | DB00524:Metolazone 208 | DB00527:Cinchocaine 209 | DB00530:Erlotinib 210 | DB00533:Rofecoxib 211 | DB00537:Ciprofloxacin 212 | DB00539:Toremifene 213 | DB00540:Nortriptyline 214 | DB00541:Vincristine 215 | DB00543:Amoxapine 216 | DB00545:Pyridostigmine 217 | DB00546:Adinazolam 218 | DB00548:Azelaic Acid 219 | DB00549:Zafirlukast 220 | DB00553:Methoxsalen 221 | DB00554:Piroxicam 222 | DB00558:Zanamivir 223 | DB00559:Bosentan 224 | DB00561:Doxapram 225 | DB00563:Methotrexate 226 | DB00568:Cinnarizine 227 | DB00570:Vinblastine 228 | DB00571:Propranolol 229 | DB00572:Atropine 230 | DB00573:Fenoprofen 231 | DB00575:Clonidine 232 | DB00579:Mazindol 233 | DB00580:Valdecoxib 234 | DB00582:Voriconazole 235 | DB00586:Diclofenac 236 | DB00588:Fluticasone 237 | DB00589:Lisuride 238 | DB00590:Doxazosin 239 | DB00594:Amiloride 240 | DB00598:Labetalol 241 | DB00599:Thiopental 242 | DB00602:Ivermectin 243 | DB00604:Cisapride 244 | DB00605:Sulindac 245 | DB00606:Cyclothiazide 246 | DB00608:Chloroquine 247 | DB00611:Butorphanol 248 | DB00612:Bisoprolol 249 | DB00613:Amodiaquine 250 | DB00615:Rifabutin 251 | DB00619:Imatinib 252 | DB00622:Nicardipine 253 | DB00623:Fluphenazine 254 | DB00624:Testosterone 255 | DB00627:Niacin 256 | DB00630:Alendronic acid 257 | DB00631:Clofarabine 258 | DB00633:Dexmedetomidine 259 | DB00637:Astemizole 260 | DB00640:Adenosine 261 | DB00641:Simvastatin 262 | DB00642:Pemetrexed 263 | DB00643:Mebendazole 264 | DB00647:Dextropropoxyphene 265 | DB00651:Dyphylline 266 | DB00652:Pentazocine 267 | DB00653:Magnesium Sulfate 268 | DB00655:Estrone 269 | DB00656:Trazodone 270 | DB00659:Acamprosate 271 | DB00661:Verapamil 272 | DB00667:Histamine Phosphate 273 | DB00668:Epinephrine 274 | DB00669:Sumatriptan 275 | DB00670:Pirenzepine 276 | DB00673:Aprepitant 277 | DB00674:Galantamine 278 | DB00675:Tamoxifen 279 | DB00679:Thioridazine 280 | DB00682:Warfarin 281 | DB00683:Midazolam 282 | DB00685:Trovafloxacin 283 | DB00687:Fludrocortisone 284 | DB00690:Flurazepam 285 | DB00691:Moexipril 286 | DB00692:Phentolamine 287 | DB00694:Daunorubicin 288 | DB00695:Furosemide 289 | DB00696:Ergotamine 290 | DB00697:Tizanidine 291 | DB00701:Amprenavir 292 | DB00703:Methazolamide 293 | DB00704:Naltrexone 294 | DB00706:Tamsulosin 295 | DB00708:Sufentanil 296 | DB00711:Diethylcarbamazine 297 | DB00712:Flurbiprofen 298 | DB00714:Apomorphine 299 | DB00715:Paroxetine 300 | DB00716:Nedocromil 301 | DB00717:Norethisterone 302 | DB00721:Procaine 303 | DB00722:Lisinopril 304 | DB00723:Methoxamine 305 | DB00724:Imiquimod 306 | DB00726:Trimipramine 307 | DB00731:Nateglinide 308 | DB00734:Risperidone 309 | DB00741:Hydrocortisone 310 | DB00747:Scopolamine 311 | DB00749:Etodolac 312 | DB00751:Epinastine 313 | DB00752:Tranylcypromine 314 | DB00753:Isoflurane 315 | DB00755:Tretinoin 316 | DB00756:Hexachlorophene 317 | DB00758:Clopidogrel 318 | DB00762:Irinotecan 319 | DB00768:Olopatadine 320 | DB00773:Etoposide 321 | DB00774:Hydroflumethiazide 322 | DB00775:Tirofiban 323 | DB00780:Phenelzine 324 | DB00783:Estradiol 325 | DB00784:Mefenamic acid 326 | DB00788:Naproxen 327 | DB00794:Primidone 328 | DB00795:Sulfasalazine 329 | DB00797:Tolazoline 330 | DB00799:Tazarotene 331 | DB00800:Fenoldopam 332 | DB00801:Halazepam 333 | DB00804:Dicyclomine 334 | DB00805:Minaprine 335 | DB00806:Pentoxifylline 336 | DB00808:Indapamide 337 | DB00809:Tropicamide 338 | DB00810:Biperiden 339 | DB00811:Ribavirin 340 | DB00812:Phenylbutazone 341 | DB00813:Fentanyl 342 | DB00814:Meloxicam 343 | DB00818:Propofol 344 | DB00819:Acetazolamide 345 | DB00820:Tadalafil 346 | DB00822:Disulfiram 347 | DB00823:Ethynodiol 348 | DB00829:Diazepam 349 | DB00831:Trifluoperazine 350 | DB00834:Mifepristone 351 | DB00835:Brompheniramine 352 | DB00836:Loperamide 353 | DB00837:Progabide 354 | DB00841:Dobutamine 355 | DB00842:Oxazepam 356 | DB00843:Donepezil 357 | DB00844:Nalbuphine 358 | DB00847:Cysteamine 359 | DB00849:Methylphenobarbital 360 | DB00850:Perphenazine 361 | DB00851:Dacarbazine 362 | DB00852:Pseudoephedrine 363 | DB00860:Prednisolone 364 | DB00861:Diflunisal 365 | DB00862:Vardenafil 366 | DB00866:Alprenolol 367 | DB00869:Dorzolamide 368 | DB00870:Suprofen 369 | DB00871:Terbutaline 370 | DB00872:Conivaptan 371 | DB00875:Flupentixol 372 | DB00877:Sirolimus 373 | DB00878:Chlorhexidine 374 | DB00880:Chlorothiazide 375 | DB00887:Bumetanide 376 | DB00889:Granisetron 377 | DB00897:Triazolam 378 | DB00898:Ethanol 379 | DB00899:Remifentanil 380 | DB00903:Etacrynic acid 381 | DB00904:Ondansetron 382 | DB00905:Bimatoprost 383 | DB00907:Cocaine 384 | DB00908:Quinidine 385 | DB00909:Zonisamide 386 | DB00912:Repaglinide 387 | DB00914:Phenformin 388 | DB00915:Amantadine 389 | DB00917:Dinoprostone 390 | DB00918:Almotriptan 391 | DB00920:Ketotifen 392 | DB00921:Buprenorphine 393 | DB00922:Levosimendan 394 | DB00925:Phenoxybenzamine 395 | DB00929:Misoprostol 396 | DB00933:Mesoridazine 397 | DB00934:Maprotiline 398 | DB00935:Oxymetazoline 399 | DB00936:Salicylic acid 400 | DB00938:Salmeterol 401 | DB00939:Meclofenamic acid 402 | DB00940:Methantheline 403 | DB00944:Demecarium 404 | DB00945:Acetylsalicylic acid 405 | DB00949:Felbamate 406 | DB00951:Isoniazid 407 | DB00952:Naratriptan 408 | DB00953:Rizatriptan 409 | DB00956:Hydrocodone 410 | DB00960:Pindolol 411 | DB00962:Zaleplon 412 | DB00963:Bromfenac 413 | DB00964:Apraclonidine 414 | DB00966:Telmisartan 415 | DB00969:Alosetron 416 | DB00972:Azelastine 417 | DB00973:Ezetimibe 418 | DB00975:Dipyridamole 419 | DB00977:Ethinyl Estradiol 420 | DB00978:Lomefloxacin 421 | DB00980:Ramelteon 422 | DB00981:Physostigmine 423 | DB00983:Formoterol 424 | DB00986:Glycopyrronium 425 | DB00988:Dopamine 426 | DB00989:Rivastigmine 427 | DB00991:Oxaprozin 428 | DB00995:Auranofin 429 | DB00996:Gabapentin 430 | DB00997:Doxorubicin 431 | DB00998:Frovatriptan 432 | DB00999:Hydrochlorothiazide 433 | DB01001:Salbutamol 434 | DB01003:Cromoglicic acid 435 | DB01006:Letrozole 436 | DB01009:Ketoprofen 437 | DB01011:Metyrapone 438 | DB01014:Balsalazide 439 | DB01016:Glyburide 440 | DB01017:Minocycline 441 | DB01018:Guanfacine 442 | DB01019:Bethanechol 443 | DB01021:Trichlormethiazide 444 | DB01022:Phylloquinone 445 | DB01023:Felodipine 446 | DB01024:Mycophenolic acid 447 | DB01025:Amlexanox 448 | DB01028:Methoxyflurane 449 | DB01029:Irbesartan 450 | DB01030:Topotecan 451 | DB01032:Probenecid 452 | DB01035:Procainamide 453 | DB01036:Tolterodine 454 | DB01037:Selegiline 455 | DB01039:Fenofibrate 456 | DB01041:Thalidomide 457 | DB01043:Memantine 458 | DB01049:Ergoloid mesylate 459 | DB01050:Ibuprofen 460 | DB01054:Nitrendipine 461 | DB01059:Norfloxacin 462 | DB01062:Oxybutynin 463 | DB01063:Acetophenazine 464 | DB01064:Isoprenaline 465 | DB01065:Melatonin 466 | DB01067:Glipizide 467 | DB01068:Clonazepam 468 | DB01069:Promethazine 469 | DB01071:Mequitazine 470 | DB01073:Fludarabine 471 | DB01074:Perhexiline 472 | DB01075:Diphenhydramine 473 | DB01076:Atorvastatin 474 | DB01077:Etidronic acid 475 | DB01079:Tegaserod 476 | DB01080:Vigabatrin 477 | DB01081:Diphenoxylate 478 | DB01083:Orlistat 479 | DB01085:Pilocarpine 480 | DB01088:Iloprost 481 | DB01097:Leflunomide 482 | DB01098:Rosuvastatin 483 | DB01099:Flucytosine 484 | DB01100:Pimozide 485 | DB01102:Arbutamine 486 | DB01103:Quinacrine 487 | DB01106:Levocabastine 488 | DB01108:Trilostane 489 | DB01110:Miconazole 490 | DB01113:Papaverine 491 | DB01114:Chlorphenamine 492 | DB01115:Nifedipine 493 | DB01118:Amiodarone 494 | DB01119:Diazoxide 495 | DB01120:Gliclazide 496 | DB01124:Tolbutamide 497 | DB01126:Dutasteride 498 | DB01127:Econazole 499 | DB01128:Bicalutamide 500 | DB01132:Pioglitazone 501 | DB01133:Tiludronate 502 | DB01135:Doxacurium chloride 503 | DB01136:Carvedilol 504 | DB01137:Levofloxacin 505 | DB01138:Sulfinpyrazone 506 | DB01140:Cefadroxil 507 | DB01142:Doxepin 508 | DB01143:Amifostine 509 | DB01144:Diclofenamide 510 | DB01148:Flavoxate 511 | DB01149:Nefazodone 512 | DB01151:Desipramine 513 | DB01156:Bupropion 514 | DB01159:Halothane 515 | DB01161:Chloroprocaine 516 | DB01162:Terazosin 517 | DB01165:Ofloxacin 518 | DB01166:Cilostazol 519 | DB01169:Arsenic trioxide 520 | DB01171:Moclobemide 521 | DB01174:Phenobarbital 522 | DB01175:Escitalopram 523 | DB01176:Cyclizine 524 | DB01179:Podofilox 525 | DB01182:Propafenone 526 | DB01183:Naloxone 527 | DB01184:Domperidone 528 | DB01185:Fluoxymesterone 529 | DB01186:Pergolide 530 | DB01189:Desflurane 531 | DB01191:Dexfenfluramine 532 | DB01193:Acebutolol 533 | DB01194:Brinzolamide 534 | DB01195:Flecainide 535 | DB01196:Estramustine 536 | DB01197:Captopril 537 | DB01198:Zopiclone 538 | DB01199:Tubocurarine 539 | DB01200:Bromocriptine 540 | DB01202:Levetiracetam 541 | DB01203:Nadolol 542 | DB01204:Mitoxantrone 543 | DB01205:Flumazenil 544 | DB01208:Sparfloxacin 545 | DB01209:Dezocine 546 | DB01210:Levobunolol 547 | DB01212:Ceftriaxone 548 | DB01214:Metipranolol 549 | DB01215:Estazolam 550 | DB01216:Finasteride 551 | DB01221:Ketamine 552 | DB01223:Aminophylline 553 | DB01224:Quetiapine 554 | DB01225:Enoxaparin 555 | DB01227:Levomethadyl Acetate 556 | DB01229:Paclitaxel 557 | DB01233:Metoclopramide 558 | DB01234:Dexamethasone 559 | DB01235:Levodopa 560 | DB01236:Sevoflurane 561 | DB01238:Aripiprazole 562 | DB01239:Chlorprothixene 563 | DB01240:Epoprostenol 564 | DB01241:Gemfibrozil 565 | DB01242:Clomipramine 566 | DB01244:Bepridil 567 | DB01247:Isocarboxazid 568 | DB01248:Docetaxel 569 | DB01250:Olsalazine 570 | DB01251:Gliquidone 571 | DB01252:Mitiglinide 572 | DB01254:Dasatinib 573 | DB01259:Lapatinib 574 | DB01267:Paliperidone 575 | DB01268:Sunitinib 576 | DB01273:Varenicline 577 | DB01275:Hydralazine 578 | DB01283:Lumiracoxib 579 | DB01288:Fenoterol 580 | DB01289:Glisoxepide 581 | DB01291:Pirbuterol 582 | DB01295:Bevantolol 583 | DB01296:Glucosamine 584 | DB01303:Oxtriphylline 585 | DB01325:Quinethazone 586 | DB01326:Cefamandole 587 | DB01327:Cefazolin 588 | DB01336:Metocurine 589 | DB01337:Pancuronium 590 | DB01339:Vecuronium 591 | DB01349:Tasosartan 592 | DB01351:Amobarbital 593 | DB01353:Butethal 594 | DB01355:Hexobarbital 595 | DB01356:Lithium 596 | DB01359:Penbutolol 597 | DB01364:Ephedrine 598 | DB01367:Rasagiline 599 | DB01382:Glycodiazine 600 | DB01392:Yohimbine 601 | DB01393:Bezafibrate 602 | DB01394:Colchicine 603 | DB01399:Salsalate 604 | DB01400:Neostigmine 605 | DB01403:Methotrimeprazine 606 | DB01406:Danazol 607 | DB01407:Clenbuterol 608 | DB01411:Pranlukast 609 | DB01412:Theobromine 610 | DB01427:Amrinone 611 | DB01429:Aprindine 612 | DB01431:Allylestrenol 613 | DB01435:Antipyrine 614 | DB01452:Heroin 615 | DB01544:Flunitrazepam 616 | DB01558:Bromazepam 617 | DB01559:Clotiazepam 618 | DB01567:Fludiazepam 619 | DB01580:Oxprenolol 620 | DB01586:Ursodeoxycholic acid 621 | DB01587:Ketazolam 622 | DB01588:Prazepam 623 | DB01589:Quazepam 624 | DB01591:Solifenacin 625 | DB01595:Nitrazepam 626 | DB01599:Probucol 627 | DB01600:Tiaprofenic acid 628 | DB01608:Propericiazine 629 | DB01611:Hydroxychloroquine 630 | DB01614:Acepromazine 631 | DB01618:Molindone 632 | DB01620:Pheniramine 633 | DB01621:Pipotiazine 634 | DB01622:Thioproperazine 635 | DB01623:Thiothixene 636 | DB01624:Zuclopenthixol 637 | DB01625:Isopropamide 638 | DB01626:Pargyline 639 | DB01628:Etoricoxib 640 | DB01656:Roflumilast 641 | DB01708:Prasterone 642 | DB02546:Vorinostat 643 | DB02638:Terlipressin 644 | DB04272:Citric Acid 645 | DB04552:Niflumic Acid 646 | DB04573:Estriol 647 | DB04794:Bifonazole 648 | DB04841:Flunarizine 649 | DB04842:Fluspirilene 650 | DB04843:Mepenzolate 651 | DB04855:Dronedarone 652 | DB04861:Nebivolol 653 | DB04868:Nilotinib 654 | DB04876:Vildagliptin 655 | DB04878:Voglibose 656 | DB04894:Vapreotide 657 | DB04896:Milnacipran 658 | DB04946:Iloperidone 659 | DB04948:Lofexidine 660 | DB04953:Ezogabine 661 | DB05013:Ingenol Mebutate 662 | DB05039:Indacaterol 663 | DB05105:Pleconaril 664 | DB05266:Ibudilast 665 | DB05271:Rotigotine 666 | DB05294:Vandetanib 667 | DB05381:histamine dihydrochloride 668 | DB06144:Sertindole 669 | DB06148:Mianserin 670 | DB06155:Rimonabant 671 | DB06196:Icatibant 672 | DB06203:Alogliptin 673 | DB06204:Tapentadol 674 | DB06213:Regadenoson 675 | DB06216:Asenapine 676 | DB06218:Lacosamide 677 | DB06268:Sitaxentan 678 | DB06274:Alvimopan 679 | DB06288:Amisulpride 680 | DB06292:Dapagliflozin 681 | DB06335:Saxagliptin 682 | DB06589:Pazopanib 683 | DB06594:Agomelatine 684 | DB06616:Bosutinib 685 | DB06684:Vilazodone 686 | DB06689:Ethanolamine 687 | DB06691:Mepyramine 688 | DB06694:Xylometazoline 689 | DB06698:Betahistine 690 | DB06700:Desvenlafaxine 691 | DB06702:Fesoterodine 692 | DB06710:Methyltestosterone 693 | DB06711:Naphazoline 694 | DB06712:Nilvadipine 695 | DB06725:Lornoxicam 696 | DB06772:Cabazitaxel 697 | DB06777:Chenodeoxycholic acid 698 | DB06795:Mafenide 699 | DB06802:Nepafenac 700 | DB06809:Plerixafor 701 | DB08604:Triclosan 702 | DB08800:Chloropyramine 703 | DB08801:Dimetindene 704 | DB08807:Bopindolol 705 | DB08808:Bupranolol 706 | DB08811:Tofisopam 707 | DB08815:Lurasidone 708 | DB08828:Vismodegib 709 | DB08865:Crizotinib 710 | DB08868:Fingolimod 711 | DB08873:Boceprevir 712 | DB08877:Ruxolitinib 713 | DB08881:Vemurafenib 714 | DB08882:Linagliptin 715 | DB08896:Regorafenib 716 | DB08901:Ponatinib 717 | DB08907:Canagliflozin 718 | DB08916:Afatinib 719 | DB08932:Macitentan 720 | DB08954:Ifenprodil 721 | DB09000:Cyamemazine -------------------------------------------------------------------------------- /data/SSLdata/ClusterPre.txt: -------------------------------------------------------------------------------- 1 | 1007 0.14285714285714285 2 | 38 0.6236440522154808 3 | 43 0.8344203660845781 4 | 1812 0.10756302521008404 5 | 1763 0.27450980392156865 6 | 2169 0.2046783625730994 7 | 1319 0 8 | 160 0.742245601306004 9 | 1383 0.19696969696969696 10 | 433 0 11 | 1811 0 12 | 548 0.8851275107199278 13 | 871 0 14 | 487 0.3 15 | 973 0.08088235294117647 16 | 36 0.5053509885724651 17 | 336 0.75 18 | 618 0.48537340979500365 19 | 142 0.7766866703036915 20 | 2353 0.23790322580645162 21 | 1617 0.3006535947712418 22 | 1793 0 23 | 738 0.23684210526315788 24 | 746 0.5 25 | 1158 0.16231884057971013 26 | 1262 0.1111111111111111 27 | 1578 0.12087912087912088 28 | 1786 0.3736842105263158 29 | 1211 0.07977207977207977 30 | 2975 0 31 | -------------------------------------------------------------------------------- /data/SSLdata/EdgeMask.txt: -------------------------------------------------------------------------------- 1 | 1577 2081 1 2 | 136 520 1 3 | 395 506 1 4 | 180 371 1 5 | 207 278 1 6 | 1136 1136 1 7 | 20 298 1 8 | 1371 2670 0 9 | 266 634 1 10 | 13 61 1 11 | 623 834 2 12 | 141 342 1 13 | 1442 1598 1 14 | 364 366 1 15 | 237 361 1 16 | 167 280 1 17 | 237 529 1 18 | 889 2362 1 19 | 1311 3003 0 20 | 450 679 1 21 | 891 2896 0 22 | 50 274 1 23 | 155 1220 2 24 | 984 1998 1 25 | 632 680 1 26 | 97 708 1 27 | 240 555 1 28 | 391 486 1 29 | 939 2751 0 30 | 34 464 1 31 | 150 207 1 32 | 303 597 1 33 | 110 545 1 34 | 99 525 1 35 | 173 652 1 36 | 1900 2697 0 37 | 178 245 1 38 | 1498 2716 0 39 | 450 569 1 40 | 157 521 1 41 | 135 374 1 42 | 442 521 1 43 | 234 594 1 44 | 1516 2503 1 45 | 1442 1970 1 46 | 107 511 1 47 | 1456 1719 1 48 | 971 2863 0 49 | 1371 2884 0 50 | 210 224 1 51 | 37 532 1 52 | 175 567 1 53 | 1076 2679 0 54 | 108 204 1 55 | 142 318 1 56 | 127 173 1 57 | 263 439 1 58 | 448 489 1 59 | 67 438 1 60 | 88 478 1 61 | 510 664 1 62 | 363 594 1 63 | 1682 1709 1 64 | 1208 2102 1 65 | 307 492 1 66 | 1066 1246 1 67 | 2185 2765 0 68 | 101 713 1 69 | 158 180 1 70 | 594 664 1 71 | 235 370 1 72 | 1126 1759 1 73 | 203 226 1 74 | 60 710 1 75 | 339 568 1 76 | 94 1144 2 77 | 818 2474 1 78 | 1074 1174 1 79 | 1169 2040 1 80 | 204 575 1 81 | 1479 1479 1 82 | 101 442 1 83 | 1864 2856 0 84 | 169 388 1 85 | 1438 1772 1 86 | 58 559 1 87 | 144 298 1 88 | 59 1449 2 89 | 38 643 1 90 | 328 618 1 91 | 520 669 1 92 | 122 494 1 93 | 404 529 1 94 | 67 173 1 95 | 199 684 1 96 | 1017 2880 0 97 | 159 495 1 98 | 298 543 1 99 | 1331 1339 1 100 | 250 298 1 101 | 2117 2835 0 102 | 308 690 1 103 | 99 444 1 104 | 711 1741 2 105 | 1528 2389 1 106 | 507 564 1 107 | 207 265 1 108 | 342 615 1 109 | 77 97 1 110 | 115 476 1 111 | 529 716 1 112 | 1442 2628 0 113 | 136 1351 2 114 | 2007 2389 1 115 | 175 205 1 116 | 1941 2119 1 117 | 307 342 1 118 | 119 293 1 119 | 197 626 1 120 | 154 167 1 121 | 1962 2704 0 122 | 268 552 1 123 | 207 377 1 124 | 67 1321 2 125 | 331 490 1 126 | 1036 1654 1 127 | 140 204 1 128 | 239 711 1 129 | 312 594 1 130 | 93 222 1 131 | 32 572 1 132 | 1485 2939 0 133 | 97 113 1 134 | 559 678 1 135 | 98 637 1 136 | 50 314 1 137 | 69 235 1 138 | 380 407 1 139 | 68 558 1 140 | 101 249 1 141 | 28 594 1 142 | 975 2699 0 143 | 362 2729 3 144 | 2316 2951 0 145 | 1594 2769 0 146 | 1514 2849 0 147 | 348 1449 2 148 | 949 2354 1 149 | 1413 3015 0 150 | 204 2870 3 151 | 148 695 1 152 | 582 663 1 153 | 159 347 1 154 | 102 420 1 155 | 1278 2805 0 156 | 102 648 1 157 | 1159 2912 0 158 | 55 674 1 159 | 326 703 1 160 | 2290 2863 0 161 | 189 453 1 162 | 65 297 1 163 | 2316 2694 0 164 | 2312 2860 0 165 | 145 239 1 166 | 1431 2984 0 167 | 1196 3014 0 168 | 235 584 1 169 | 250 491 1 170 | 2319 2466 1 171 | 89 342 1 172 | 207 559 1 173 | 1746 3015 0 174 | 380 420 1 175 | 405 409 1 176 | 245 432 1 177 | 77 463 1 178 | 150 294 1 179 | 121 198 1 180 | 1490 2689 0 181 | 1918 2153 1 182 | 164 178 1 183 | 95 652 1 184 | 257 614 1 185 | 979 2627 0 186 | 153 309 1 187 | 245 249 1 188 | 1077 2252 1 189 | 180 342 1 190 | 348 617 1 191 | 1374 2929 0 192 | 307 658 1 193 | 1566 1745 1 194 | 1432 2704 0 195 | 26 382 1 196 | 57 175 1 197 | 2256 2752 0 198 | 248 444 1 199 | 147 448 1 200 | 58 464 1 201 | 412 529 1 202 | 238 558 1 203 | 151 1074 2 204 | 312 365 1 205 | 445 590 1 206 | 1315 1418 1 207 | 782 2349 1 208 | 766 1457 1 209 | 260 351 1 210 | 395 1015 2 211 | 330 335 1 212 | 29 373 1 213 | 133 340 1 214 | 207 280 1 215 | 140 148 1 216 | 184 195 1 217 | 497 947 2 218 | 55 194 1 219 | 70 141 1 220 | 503 506 1 221 | 618 682 1 222 | 184 194 1 223 | 2577 2828 0 224 | 97 534 1 225 | 18 634 1 226 | 1637 2686 0 227 | 189 580 1 228 | 188 626 1 229 | 32 55 1 230 | 157 1540 2 231 | 32 169 1 232 | 135 431 1 233 | 286 502 1 234 | 1598 2285 1 235 | 375 529 1 236 | 163 322 1 237 | 276 635 1 238 | 99 324 1 239 | 37 375 1 240 | 250 709 1 241 | 2132 2615 0 242 | 379 576 1 243 | 2221 2617 0 244 | 1451 2722 0 245 | 331 706 1 246 | 256 373 1 247 | 559 571 1 248 | 1889 2932 0 249 | 426 581 1 250 | 1135 2752 0 251 | 374 564 1 252 | 2029 2042 1 253 | 34 496 1 254 | 98 404 1 255 | 187 420 1 256 | 361 508 1 257 | 209 602 1 258 | 1514 1933 1 259 | 484 656 1 260 | 182 371 1 261 | 171 365 1 262 | 166 288 1 263 | 561 571 1 264 | 280 572 1 265 | 46 159 1 266 | 1485 2925 0 267 | 2082 2252 1 268 | 2185 2339 1 269 | 140 151 1 270 | 238 568 1 271 | 571 718 1 272 | 887 2974 0 273 | 2078 2263 1 274 | 193 369 1 275 | 384 674 1 276 | 70 102 1 277 | 263 354 1 278 | 122 330 1 279 | 1892 2033 1 280 | 1404 2505 1 281 | 418 717 1 282 | 177 2692 3 283 | 3 503 1 284 | 683 2285 2 285 | 310 339 1 286 | 320 588 1 287 | 34 408 1 288 | 79 468 1 289 | 92 156 1 290 | 161 644 1 291 | 1202 2628 0 292 | 202 351 1 293 | 95 560 1 294 | 193 492 1 295 | 1237 2642 0 296 | 657 1017 2 297 | 104 540 1 298 | 106 391 1 299 | 1994 2617 0 300 | 106 150 1 301 | 2191 2498 1 302 | 1591 2628 0 303 | 263 652 1 304 | 86 410 1 305 | 1903 2974 0 306 | 348 526 1 307 | 1351 1418 1 308 | 123 324 1 309 | 23 278 1 310 | 300 307 1 311 | 359 376 1 312 | 2206 3037 0 313 | 163 521 1 314 | 431 671 1 315 | 353 564 1 316 | 347 634 1 317 | 32 309 1 318 | 122 213 1 319 | 193 222 1 320 | 174 698 1 321 | 294 330 1 322 | 179 674 1 323 | 184 610 1 324 | 76 584 1 325 | 147 268 1 326 | 2358 2751 0 327 | 970 1751 1 328 | 161 184 1 329 | 935 3015 0 330 | 211 212 1 331 | 63 426 1 332 | 802 2441 1 333 | 208 2553 2 334 | 282 421 1 335 | 37 667 1 336 | 66 1737 2 337 | 409 502 1 338 | 79 491 1 339 | 251 374 1 340 | 222 262 1 341 | 305 484 1 342 | 741 985 1 343 | 480 640 1 344 | 513 561 1 345 | 957 2974 0 346 | 102 297 1 347 | 2335 2587 1 348 | 1564 1973 1 349 | 1490 2965 0 350 | 1321 2369 1 351 | 211 518 1 352 | 2167 2871 0 353 | 1657 2103 1 354 | 337 555 1 355 | 58 635 1 356 | 1386 2252 1 357 | 179 573 1 358 | 324 327 1 359 | 290 572 1 360 | 162 1017 2 361 | 541 589 1 362 | 1650 2984 0 363 | 2334 2334 1 364 | 276 708 1 365 | 67 339 1 366 | 246 471 1 367 | 1128 1857 1 368 | 1446 2907 0 369 | 311 681 1 370 | 882 2866 0 371 | 1890 1977 1 372 | 55 593 1 373 | 41 50 1 374 | 275 494 1 375 | 1772 2546 1 376 | 131 453 1 377 | 328 558 1 378 | 151 409 1 379 | 178 185 1 380 | 97 397 1 381 | 313 326 1 382 | 79 245 1 383 | 317 565 1 384 | 282 323 1 385 | 92 305 1 386 | 169 398 1 387 | 565 589 1 388 | 288 680 1 389 | 25 716 1 390 | 2575 2932 0 391 | 214 377 1 392 | 1643 2615 0 393 | 1482 1704 1 394 | 1662 2984 0 395 | 189 310 1 396 | 291 540 1 397 | 486 593 1 398 | 115 522 1 399 | 140 526 1 400 | 70 190 1 401 | 77 573 1 402 | 355 708 1 403 | 388 520 1 404 | 538 679 1 405 | 75 656 1 406 | 359 531 1 407 | 354 657 1 408 | 152 560 1 409 | 91 481 1 410 | 242 574 1 411 | 113 162 1 412 | 88 93 1 413 | 28 630 1 414 | 67 192 1 415 | 394 445 1 416 | 306 653 1 417 | 661 714 1 418 | 117 354 1 419 | 195 490 1 420 | 37 453 1 421 | 377 652 1 422 | 195 318 1 423 | 403 671 1 424 | 47 712 1 425 | 2302 2844 0 426 | 1409 1837 1 427 | 435 480 1 428 | 1488 2751 0 429 | 2190 2885 0 430 | 1375 2900 0 431 | 548 668 1 432 | 85 659 1 433 | 65 370 1 434 | 58 610 1 435 | 283 529 1 436 | 1498 2812 0 437 | 1545 1761 1 438 | 1618 1750 1 439 | 1723 1963 1 440 | 1421 1580 1 441 | 897 2145 1 442 | 39 578 1 443 | 559 652 1 444 | 835 970 1 445 | 199 311 1 446 | 312 378 1 447 | 919 2805 0 448 | 674 683 1 449 | 1032 2677 0 450 | 1864 2189 1 451 | 2375 2887 0 452 | 444 695 1 453 | 2359 2972 0 454 | 113 323 1 455 | 212 539 1 456 | 225 2226 2 457 | 1493 2857 0 458 | 251 661 1 459 | 2564 2628 0 460 | 413 560 1 461 | 19 367 1 462 | 465 597 1 463 | 72 312 1 464 | 302 426 1 465 | 1043 2615 0 466 | 313 410 1 467 | 551 588 1 468 | 923 1525 1 469 | 1860 2900 0 470 | 1886 2688 0 471 | 1994 2738 0 472 | 182 534 1 473 | 1493 2667 0 474 | 48 212 1 475 | 122 448 1 476 | 2049 2884 0 477 | 777 1078 1 478 | 665 1059 2 479 | 510 633 1 480 | 422 957 2 481 | 974 2628 0 482 | 227 698 1 483 | 18 309 1 484 | 35 571 1 485 | 408 439 1 486 | 127 2378 2 487 | 1892 2009 1 488 | 2451 2567 1 489 | 245 715 1 490 | 42 213 1 491 | 2197 2900 0 492 | 796 2978 0 493 | 1509 1997 1 494 | 127 286 1 495 | 61 593 1 496 | 152 678 1 497 | 68 313 1 498 | 38 590 1 499 | 967 2857 0 500 | 88 316 1 501 | 1860 1873 1 502 | 2375 2815 0 503 | 48 358 1 504 | 1482 1601 1 505 | 1377 2628 0 506 | 145 502 1 507 | 38 626 1 508 | 1067 1523 1 509 | 776 1412 1 510 | 683 2008 2 511 | 70 564 1 512 | 22 48 1 513 | 138 652 1 514 | 68 100 1 515 | 1911 2677 0 516 | 193 245 1 517 | 1968 2260 1 518 | 1346 1348 1 519 | 98 376 1 520 | 29 520 1 521 | 340 462 1 522 | 224 535 1 523 | 2293 2869 0 524 | 316 616 1 525 | 204 602 1 526 | 167 382 1 527 | 2032 2451 1 528 | 465 560 1 529 | 185 518 1 530 | 1667 2733 0 531 | 120 692 1 532 | 108 547 1 533 | 239 415 1 534 | 522 629 1 535 | 2096 2099 1 536 | 1492 2713 0 537 | 391 522 1 538 | 63 1292 2 539 | 752 1295 1 540 | 140 335 1 541 | 96 716 1 542 | 185 257 1 543 | 2452 2704 0 544 | 276 681 1 545 | 109 148 1 546 | 579 581 1 547 | 137 637 1 548 | 2180 2688 0 549 | 63 270 1 550 | 370 432 1 551 | 1122 2972 0 552 | 277 1449 2 553 | 74 182 1 554 | 167 356 1 555 | 1650 3003 0 556 | 1905 2027 1 557 | 184 422 1 558 | 235 415 1 559 | 150 623 1 560 | 721 2974 0 561 | 152 658 1 562 | 1594 3030 0 563 | 1997 3003 0 564 | 206 297 1 565 | 91 250 1 566 | 2358 2358 1 567 | 167 383 1 568 | 341 367 1 569 | 41 384 1 570 | 1850 2863 0 571 | 749 2259 1 572 | 1884 3037 0 573 | 43 230 1 574 | 567 646 1 575 | 492 552 1 576 | 251 504 1 577 | 18 21 1 578 | 154 391 1 579 | 538 650 1 580 | 1818 2615 0 581 | 835 3013 0 582 | 32 453 1 583 | 477 675 1 584 | 647 1404 2 585 | 284 597 1 586 | 242 297 1 587 | 959 2979 0 588 | 202 2699 3 589 | 26 240 1 590 | 2 197 1 591 | 314 373 1 592 | 180 266 1 593 | 547 603 1 594 | 66 208 1 595 | 121 415 1 596 | 1410 2809 0 597 | 1490 2714 0 598 | 2316 2919 0 599 | 250 593 1 600 | 574 2415 2 601 | 308 557 1 602 | 92 633 1 603 | 324 503 1 604 | 946 3030 0 605 | 88 371 1 606 | 174 626 1 607 | 561 672 1 608 | 47 253 1 609 | 2396 2871 0 610 | 293 453 1 611 | 246 370 1 612 | 1409 2809 0 613 | 359 519 1 614 | 725 1404 1 615 | 1436 2695 0 616 | 554 584 1 617 | 86 370 1 618 | 69 141 1 619 | 437 503 1 620 | 120 356 1 621 | 910 1987 1 622 | 440 657 1 623 | 90 207 1 624 | 93 179 1 625 | 379 438 1 626 | 1533 1974 1 627 | 365 650 1 628 | 1578 2837 0 629 | 20 115 1 630 | 420 548 1 631 | 206 335 1 632 | 55 713 1 633 | 1561 2668 0 634 | 1963 3000 0 635 | 61 383 1 636 | 1088 2419 1 637 | 1665 2955 0 638 | 3 483 1 639 | 239 453 1 640 | 1667 2900 0 641 | 1409 2805 0 642 | 1353 2139 1 643 | 2131 3037 0 644 | 1201 1945 1 645 | 1019 2855 0 646 | 98 106 1 647 | 190 1016 2 648 | 539 560 1 649 | 298 584 1 650 | 853 1484 1 651 | 1093 1100 1 652 | 552 613 1 653 | 1594 2880 0 654 | 996 996 1 655 | 298 834 2 656 | 61 102 1 657 | 103 463 1 658 | 541 1718 2 659 | 224 492 1 660 | 651 708 1 661 | 2316 2905 0 662 | 208 567 1 663 | 429 683 1 664 | 237 693 1 665 | 232 341 1 666 | 253 572 1 667 | 206 589 1 668 | 2492 2880 0 669 | 218 435 1 670 | 143 1540 2 671 | 19 104 1 672 | 127 645 1 673 | 1680 2915 0 674 | 1332 1333 1 675 | 1727 2932 0 676 | 199 342 1 677 | 1070 1522 1 678 | 37 719 1 679 | 326 689 1 680 | 70 370 1 681 | 294 326 1 682 | 317 1949 2 683 | 127 280 1 684 | 2103 2714 0 685 | 248 581 1 686 | 260 653 1 687 | 538 566 1 688 | 263 359 1 689 | 140 371 1 690 | 1368 2628 0 691 | 659 665 1 692 | 290 472 1 693 | 294 453 1 694 | 40 695 1 695 | 415 1429 2 696 | 204 642 1 697 | 227 675 1 698 | 115 158 1 699 | 115 516 1 700 | 370 560 1 701 | 1942 2974 0 702 | 22 103 1 703 | 353 2374 2 704 | 95 380 1 705 | 134 576 1 706 | 2472 2472 1 707 | 182 309 1 708 | 1371 2759 0 709 | 1781 2934 0 710 | 2 95 1 711 | 2492 3037 0 712 | 924 1051 1 713 | 1860 2706 0 714 | 1482 2297 1 715 | 29 150 1 716 | 150 514 1 717 | 2306 2800 0 718 | 1663 2834 0 719 | 1940 2752 0 720 | 283 624 1 721 | 276 405 1 722 | 1907 2615 0 723 | 258 1754 2 724 | 103 502 1 725 | 725 1580 1 726 | 106 140 1 727 | 63 2889 3 728 | 1085 2866 0 729 | 246 564 1 730 | 180 648 1 731 | 667 672 1 732 | 1673 2800 0 733 | 2447 2973 0 734 | 2205 3000 0 735 | 350 3043 3 736 | 373 704 1 737 | 99 318 1 738 | 29 95 1 739 | 327 339 1 740 | 875 3015 0 741 | 2359 3006 0 742 | 316 572 1 743 | 1502 2444 1 744 | 135 537 1 745 | 1576 2768 0 746 | 251 671 1 747 | 270 376 1 748 | 250 657 1 749 | 254 713 1 750 | 199 245 1 751 | 1418 1881 1 752 | 122 342 1 753 | 395 561 1 754 | 837 2955 0 755 | 1408 2749 0 756 | 383 651 1 757 | 908 2768 0 758 | 61 562 1 759 | 2319 2828 0 760 | 122 328 1 761 | 97 601 1 762 | 42 512 1 763 | 50 297 1 764 | 1032 1774 1 765 | 218 458 1 766 | 1418 1673 1 767 | 101 308 1 768 | 1520 1598 1 769 | 163 354 1 770 | 37 304 1 771 | 409 679 1 772 | 120 712 1 773 | 256 651 1 774 | 139 209 1 775 | 89 268 1 776 | 19 543 1 777 | 495 520 1 778 | 415 647 1 779 | 69 138 1 780 | 239 597 1 781 | 309 351 1 782 | 211 283 1 783 | 1503 1528 1 784 | 106 404 1 785 | 407 561 1 786 | 177 612 1 787 | 2444 2501 1 788 | 499 632 1 789 | 101 624 1 790 | 1720 2880 0 791 | 239 247 1 792 | 2114 2661 0 793 | 81 536 1 794 | 278 361 1 795 | 488 541 1 796 | 510 622 1 797 | 367 545 1 798 | 2255 2932 0 799 | 1821 2737 0 800 | 741 2468 1 801 | 211 252 1 802 | 101 416 1 803 | 58 616 1 804 | 79 564 1 805 | 1995 2952 0 806 | 1495 2658 0 807 | 243 694 1 808 | 49 657 1 809 | 1641 2906 0 810 | 563 696 1 811 | 160 198 1 812 | 198 327 1 813 | 49 199 1 814 | 116 398 1 815 | 517 719 1 816 | 1090 3017 0 817 | 1486 2818 0 818 | 1373 3037 0 819 | 106 599 1 820 | 1100 2615 0 821 | 414 520 1 822 | 66 684 1 823 | 108 431 1 824 | 2049 3015 0 825 | 2424 2424 1 826 | 263 624 1 827 | 164 495 1 828 | 56 129 1 829 | 86 447 1 830 | 18 91 1 831 | 395 522 1 832 | 360 520 1 833 | 25 355 1 834 | 283 396 1 835 | 129 379 1 836 | 318 381 1 837 | 381 687 1 838 | 1719 2965 0 839 | 1651 2660 0 840 | 230 340 1 841 | 49 499 1 842 | 380 641 1 843 | 366 595 1 844 | 307 648 1 845 | 359 1429 2 846 | 333 558 1 847 | 265 647 1 848 | 52 347 1 849 | 226 715 1 850 | 252 717 1 851 | 249 263 1 852 | 125 388 1 853 | 156 177 1 854 | 1695 2951 0 855 | 104 670 1 856 | 1777 3021 0 857 | 416 472 1 858 | 1441 2701 0 859 | 2068 2752 0 860 | 75 101 1 861 | 2017 2974 0 862 | 391 592 1 863 | 565 707 1 864 | 538 595 1 865 | 396 554 1 866 | 939 1876 1 867 | 599 650 1 868 | 391 679 1 869 | 18 127 1 870 | 422 442 1 871 | 91 187 1 872 | 149 374 1 873 | 90 674 1 874 | 681 1832 2 875 | 1719 2386 1 876 | 479 716 1 877 | 574 761 2 878 | 1567 2955 0 879 | 149 706 1 880 | 237 412 1 881 | 93 418 1 882 | 99 275 1 883 | 683 713 1 884 | 37 359 1 885 | 2228 2684 0 886 | 75 601 1 887 | 29 173 1 888 | 2391 2679 0 889 | 498 560 1 890 | 37 383 1 891 | 526 635 1 892 | 148 499 1 893 | 103 537 1 894 | 973 2869 0 895 | 273 375 1 896 | 426 431 1 897 | 2290 2812 0 898 | 1862 2040 1 899 | 796 2885 0 900 | 264 614 1 901 | 48 370 1 902 | 2316 3014 0 903 | 239 492 1 904 | 391 464 1 905 | 199 240 1 906 | 2266 2266 1 907 | 725 745 1 908 | 44 602 1 909 | 396 490 1 910 | 98 347 1 911 | 1493 2765 0 912 | 1485 2884 0 913 | 156 539 1 914 | 1449 3015 0 915 | 2 284 1 916 | 123 516 1 917 | 2358 2900 0 918 | 1371 2720 0 919 | 101 495 1 920 | 29 959 2 921 | 2263 2944 0 922 | 245 339 1 923 | 470 511 1 924 | 342 447 1 925 | 123 232 1 926 | 68 103 1 927 | 704 957 2 928 | 106 389 1 929 | 773 2871 0 930 | 164 340 1 931 | 522 544 1 932 | 25 415 1 933 | 83 455 1 934 | 148 447 1 935 | 1074 2732 0 936 | 1074 2299 1 937 | 40 555 1 938 | 471 524 1 939 | 61 71 1 940 | 529 565 1 941 | 959 2355 1 942 | 1622 2010 1 943 | 152 204 1 944 | 119 2863 3 945 | 643 663 1 946 | 278 517 1 947 | 34 90 1 948 | 163 455 1 949 | 97 682 1 950 | 290 448 1 951 | 437 679 1 952 | 123 665 1 953 | 52 198 1 954 | 282 320 1 955 | 563 672 1 956 | 203 351 1 957 | 44 644 1 958 | 419 555 1 959 | 1100 2900 0 960 | 273 407 1 961 | 61 512 1 962 | 147 378 1 963 | 2114 3037 0 964 | 1534 3014 0 965 | 148 573 1 966 | 2343 2768 0 967 | 1402 2749 0 968 | 350 652 1 969 | 607 1317 2 970 | 1658 2974 0 971 | 238 398 1 972 | 358 591 1 973 | 41 96 1 974 | 247 252 1 975 | 514 604 1 976 | 637 658 1 977 | 1211 2881 0 978 | 490 760 2 979 | 1248 2752 0 980 | 62 472 1 981 | 140 173 1 982 | 115 439 1 983 | 24 91 1 984 | 367 553 1 985 | 2253 2661 0 986 | 1821 2615 0 987 | 143 406 1 988 | 84 187 1 989 | 1954 2504 1 990 | 403 435 1 991 | 236 369 1 992 | 349 518 1 993 | 327 365 1 994 | 1973 2551 1 995 | 110 442 1 996 | 206 554 1 997 | 298 633 1 998 | 97 351 1 999 | 177 179 1 1000 | 497 1754 2 1001 | 66 68 1 1002 | 1843 1977 1 1003 | 2358 2755 0 1004 | 103 718 1 1005 | 120 439 1 1006 | 85 302 1 1007 | 280 354 1 1008 | 1160 1482 1 1009 | 1550 1968 1 1010 | 359 550 1 1011 | 1127 2430 1 1012 | 287 326 1 1013 | 847 2897 0 1014 | 1618 2354 1 1015 | 267 277 1 1016 | 540 559 1 1017 | 286 308 1 1018 | 366 650 1 1019 | 206 569 1 1020 | 347 438 1 1021 | 43 668 1 1022 | 417 704 1 1023 | 1196 2955 0 1024 | 959 1138 1 1025 | 1397 1397 1 1026 | 431 2952 3 1027 | 507 651 1 1028 | 1074 2865 0 1029 | 417 703 1 1030 | 1150 1150 1 1031 | 277 396 1 1032 | 44 681 1 1033 | 174 577 1 1034 | 780 2955 0 1035 | 74 632 1 1036 | 374 409 1 1037 | 32 91 1 1038 | 1754 2876 0 1039 | 486 688 1 1040 | 487 567 1 1041 | 41 623 1 1042 | 79 622 1 1043 | 687 954 2 1044 | 28 573 1 1045 | 341 388 1 1046 | 246 635 1 1047 | 214 522 1 1048 | 897 2239 1 1049 | 535 695 1 1050 | 1158 2501 1 1051 | 435 540 1 1052 | 1738 2677 0 1053 | 117 305 1 1054 | 647 1015 2 1055 | 102 141 1 1056 | 932 1036 1 1057 | 2052 2615 0 1058 | 1002 3014 0 1059 | 1518 2068 1 1060 | 188 327 1 1061 | 1100 2869 0 1062 | 2164 3021 0 1063 | 314 567 1 1064 | 538 574 1 1065 | 183 1455 2 1066 | 1179 3015 0 1067 | 615 617 1 1068 | 2452 2952 0 1069 | 1576 2752 0 1070 | 493 597 1 1071 | 104 302 1 1072 | 411 618 1 1073 | 1208 2067 1 1074 | 2570 2570 1 1075 | 518 681 1 1076 | 316 562 1 1077 | 713 834 2 1078 | 1376 2303 1 1079 | 75 311 1 1080 | 227 415 1 1081 | 21 506 1 1082 | 144 552 1 1083 | 1728 2870 0 1084 | 2014 2545 1 1085 | 848 2929 0 1086 | 919 1438 1 1087 | 555 597 1 1088 | 141 490 1 1089 | 795 1303 1 1090 | 68 613 1 1091 | 1136 1139 1 1092 | 1370 3003 0 1093 | 1860 3014 0 1094 | 110 409 1 1095 | 157 268 1 1096 | 85 552 1 1097 | 74 283 1 1098 | 581 603 1 1099 | 56 562 1 1100 | 1408 1636 1 1101 | 1429 2628 0 1102 | 1133 2962 0 1103 | 194 396 1 1104 | 286 449 1 1105 | 297 492 1 1106 | 1642 2646 0 1107 | 992 3025 0 1108 | 32 715 1 1109 | 287 2952 3 1110 | 136 681 1 1111 | 1215 2768 0 1112 | 1363 2741 0 1113 | 19 327 1 1114 | 1088 2817 0 1115 | 277 841 2 1116 | 1663 2657 0 1117 | 148 536 1 1118 | -------------------------------------------------------------------------------- /data/SSLdata/PairDistance.txt: -------------------------------------------------------------------------------- 1 | 2836 2633 3 2 | 1498 1357 2 3 | 3040 582 2 4 | 1018 1333 2 5 | 1804 2136 4 6 | 2546 446 4 7 | 1351 1436 1 8 | 1289 2067 3 9 | 289 689 1 10 | 2158 995 3 11 | 824 1745 2 12 | 77 1946 4 13 | 2933 655 4 14 | 2316 720 3 15 | 2100 2068 1 16 | 630 723 4 17 | 2858 2749 4 18 | 1928 1922 3 19 | 1787 1438 2 20 | 96 757 3 21 | 2786 2145 2 22 | 1701 1013 2 23 | 2799 948 2 24 | 1979 2738 2 25 | 1665 1285 2 26 | 821 1271 4 27 | 558 120 1 28 | 1266 628 3 29 | 1201 2137 1 30 | 233 609 1 31 | 579 233 1 32 | 965 2707 2 33 | 2806 2174 3 34 | 902 545 2 35 | 669 600 2 36 | 2248 2264 3 37 | 1709 919 2 38 | 1338 2504 2 39 | 224 467 1 40 | 2062 2873 4 41 | 1101 2447 3 42 | 2020 1852 2 43 | 2871 1561 1 44 | 133 1982 4 45 | 1608 170 4 46 | 1367 2066 2 47 | 2815 1803 1 48 | 951 130 3 49 | 116 2683 2 50 | 745 815 2 51 | 1423 2070 3 52 | 366 1603 4 53 | 1195 3041 4 54 | 50 578 1 55 | 2512 1365 2 56 | 1577 1925 2 57 | 1246 1067 2 58 | 2505 1457 2 59 | 2784 1836 4 60 | 1565 1415 3 61 | 2687 2783 4 62 | 2067 452 2 63 | 190 513 1 64 | 682 144 1 65 | 3032 828 4 66 | 6 2717 4 67 | 412 3037 3 68 | 3031 1064 4 69 | 2754 1629 1 70 | 1485 889 2 71 | 2011 995 3 72 | 198 2259 2 73 | 229 238 1 74 | 4 1837 4 75 | 1295 1882 3 76 | 440 20 1 77 | 87 349 1 78 | 455 2297 2 79 | 787 1438 2 80 | 470 2246 3 81 | 2471 2811 3 82 | 2534 126 4 83 | 1726 1160 2 84 | 2273 1135 3 85 | 1151 5 4 86 | 3009 1705 3 87 | 2981 332 3 88 | 1636 2060 2 89 | 139 2986 3 90 | 79 410 1 91 | 657 2752 1 92 | 2276 2843 4 93 | 2272 1993 2 94 | 1073 1024 3 95 | 4 2801 4 96 | 1958 464 2 97 | 2570 1443 3 98 | 1198 405 3 99 | 1637 2844 1 100 | 1125 1888 4 101 | 690 560 1 102 | 1231 201 4 103 | 2740 2996 4 104 | 1490 575 3 105 | 163 1303 3 106 | 2871 1261 2 107 | 1906 1019 3 108 | 235 2533 2 109 | 2568 885 2 110 | 1421 542 3 111 | 50 2115 2 112 | 2176 1269 3 113 | 1653 1124 3 114 | 112 2904 4 115 | 2195 642 4 116 | 2283 617 4 117 | 1898 2985 4 118 | 2026 1023 4 119 | 331 294 1 120 | 2138 2789 4 121 | 237 707 1 122 | 2552 2975 4 123 | 43 1486 3 124 | 1276 2718 3 125 | 551 542 1 126 | 992 414 3 127 | 2961 702 4 128 | 53 1659 3 129 | 2024 2191 2 130 | 2089 274 3 131 | 2113 8 4 132 | 1641 1596 2 133 | 404 107 1 134 | 2027 500 4 135 | 2083 234 2 136 | 2423 1073 2 137 | 1967 2866 3 138 | 621 2766 4 139 | 1036 1380 2 140 | 704 2645 2 141 | 252 1832 2 142 | 1297 636 4 143 | 2679 1547 2 144 | 1548 739 3 145 | 2582 1301 4 146 | 732 2646 2 147 | 1847 2831 4 148 | 238 1576 2 149 | 2830 256 3 150 | 1043 1152 1 151 | 1588 1107 2 152 | 2374 2945 4 153 | 18 277 1 154 | 202 69 1 155 | 1927 1215 2 156 | 214 307 1 157 | 1308 2091 4 158 | 2374 187 2 159 | 2495 478 3 160 | 1057 2120 3 161 | 2677 2105 1 162 | 558 622 1 163 | 2879 1003 3 164 | 2846 516 3 165 | 1778 1784 4 166 | 347 226 1 167 | 1281 1170 4 168 | 161 10 4 169 | 943 1881 1 170 | 1365 200 4 171 | 164 554 1 172 | 1348 3032 3 173 | 298 66 1 174 | 1114 753 4 175 | 2141 1744 4 176 | 2459 416 3 177 | 410 233 1 178 | 2982 1244 4 179 | 58 557 1 180 | 595 353 1 181 | 2284 1467 2 182 | 2665 1787 1 183 | 1066 1862 1 184 | 133 1534 3 185 | 1371 1684 2 186 | 1209 1232 3 187 | 1126 2549 1 188 | 1392 101 3 189 | 194 2182 3 190 | 624 251 1 191 | 1038 642 4 192 | 1244 2437 4 193 | 278 52 1 194 | 2760 2636 4 195 | 371 1393 3 196 | 767 924 1 197 | 1362 99 2 198 | 2907 707 2 199 | 1452 2280 2 200 | 2877 2816 4 201 | 1339 2097 3 202 | 177 194 1 203 | 2599 1972 3 204 | 536 2961 4 205 | 530 549 2 206 | 688 1185 4 207 | 1056 2783 4 208 | 525 2382 2 209 | 1168 1985 3 210 | 238 2282 2 211 | 1834 1015 2 212 | 2570 2138 4 213 | 1550 2436 3 214 | 166 298 1 215 | 955 1431 2 216 | 1529 2430 1 217 | 2390 3012 3 218 | 816 1252 2 219 | 687 2923 2 220 | 222 554 1 221 | 2732 2703 3 222 | 2288 1946 4 223 | 405 218 2 224 | 2590 1723 3 225 | 2807 832 1 226 | 1668 2688 1 227 | 2239 29 2 228 | 1809 476 4 229 | 1508 3014 1 230 | 49 275 1 231 | 1931 1688 2 232 | 90 2600 3 233 | 1759 2620 4 234 | 1326 1993 3 235 | 121 2428 2 236 | 417 661 1 237 | 2599 423 3 238 | 302 559 1 239 | 2981 1297 3 240 | 2478 400 1 241 | 2060 1275 4 242 | 1219 446 4 243 | 202 538 1 244 | 2070 1609 2 245 | 2496 1502 3 246 | 1526 2598 2 247 | 31 1531 4 248 | 2929 1564 1 249 | 1237 455 3 250 | 1462 2133 3 251 | 133 672 1 252 | 1306 2766 4 253 | 318 109 1 254 | 1801 2507 2 255 | 1735 1462 3 256 | 268 68 1 257 | 748 3020 4 258 | 1087 1471 4 259 | 1300 662 4 260 | 379 726 2 261 | 1318 2405 4 262 | 1380 20 4 263 | 1964 2528 3 264 | 1962 1417 3 265 | 2712 539 3 266 | 1467 1308 2 267 | 978 2952 1 268 | 3000 2067 1 269 | 544 621 1 270 | 2559 1575 2 271 | 395 554 1 272 | 512 2567 3 273 | 540 510 1 274 | 2681 2593 2 275 | 2224 1315 4 276 | 1884 1044 2 277 | 1891 2918 3 278 | 2574 1132 2 279 | 6 1428 3 280 | 383 571 1 281 | 806 2602 3 282 | 2977 2895 4 283 | 1277 335 3 284 | 301 1217 3 285 | 1494 2804 1 286 | 368 1901 3 287 | 221 953 4 288 | 1553 1023 4 289 | 2734 2277 1 290 | 10 2457 4 291 | 2998 1029 3 292 | 608 1653 4 293 | 2132 1433 2 294 | 2954 1851 2 295 | 74 241 1 296 | 3029 1560 4 297 | 863 869 4 298 | 1510 3005 4 299 | 1544 1946 4 300 | 1476 668 3 301 | 2859 2457 4 302 | 820 2917 3 303 | 295 2027 4 304 | 337 2388 2 305 | 73 25 1 306 | 102 1348 2 307 | 2525 922 2 308 | 388 412 2 309 | 156 1659 3 310 | 2438 2583 4 311 | 573 335 1 312 | 718 599 1 313 | 763 942 3 314 | 74 506 1 315 | 1304 1515 2 316 | 2523 58 2 317 | 1949 32 2 318 | 656 420 1 319 | 540 73 1 320 | 267 1988 2 321 | 1247 155 4 322 | 2965 1329 2 323 | 1097 207 4 324 | 2506 111 3 325 | 418 191 1 326 | 81 618 1 327 | 492 982 3 328 | 2506 1374 2 329 | 1501 318 3 330 | 2885 515 2 331 | 1869 1729 4 332 | 280 249 1 333 | 2606 864 3 334 | 219 2556 4 335 | 473 1323 4 336 | 2418 2992 4 337 | 1246 8 4 338 | 2570 571 2 339 | 2477 1983 4 340 | 2871 2044 2 341 | 1735 452 3 342 | 2118 1176 4 343 | 1715 1962 4 344 | 140 555 1 345 | 565 676 1 346 | 931 2507 4 347 | 2672 2165 4 348 | 555 574 1 349 | 632 157 1 350 | 783 1196 1 351 | 2748 644 4 352 | 2303 1772 1 353 | 138 488 1 354 | 520 623 1 355 | 2280 2260 3 356 | 1244 2762 4 357 | 383 290 1 358 | 1984 1818 3 359 | 198 2329 3 360 | 1381 1730 3 361 | 2789 2453 4 362 | 1022 1696 2 363 | 490 1240 3 364 | 2716 2977 4 365 | 1049 1804 4 366 | 269 1240 4 367 | 79 312 1 368 | 166 2277 2 369 | 2223 1677 3 370 | 1059 707 2 371 | 2673 475 4 372 | 2677 1996 1 373 | 1986 1448 4 374 | 415 65 1 375 | 2900 1038 2 376 | 771 486 4 377 | 998 1950 3 378 | 1043 2660 2 379 | 1816 1287 4 380 | 1472 2288 4 381 | 1401 2301 2 382 | 2578 435 4 383 | 71 93 1 384 | 820 2108 3 385 | 1330 40 3 386 | 2490 898 2 387 | 869 3011 4 388 | 837 720 1 389 | 1085 1649 2 390 | 2142 3026 4 391 | 1036 1178 2 392 | 160 1038 3 393 | 2515 1807 2 394 | 383 371 1 395 | 861 2777 3 396 | 2620 782 4 397 | 863 980 4 398 | 571 1320 1 399 | 625 2558 4 400 | 2515 1968 1 401 | 2020 1226 4 402 | 644 566 2 403 | 74 1783 3 404 | 239 413 1 405 | 339 431 1 406 | 1670 1568 3 407 | 1833 7 4 408 | 919 2974 1 409 | 1200 789 3 410 | 1604 1040 2 411 | 244 2902 4 412 | 505 668 3 413 | 2704 467 2 414 | 144 245 1 415 | 2977 354 4 416 | 1924 259 4 417 | 413 568 1 418 | 1866 1793 3 419 | 167 1995 2 420 | 417 127 1 421 | 3032 509 4 422 | 2022 2331 1 423 | 1656 1337 4 424 | 518 252 1 425 | 2741 2954 2 426 | 706 1983 3 427 | 564 66 1 428 | 2720 1108 2 429 | 2704 1770 1 430 | 112 794 4 431 | 749 1245 1 432 | 177 261 2 433 | 173 591 2 434 | 422 401 2 435 | 1074 2628 1 436 | 2990 473 4 437 | 1228 2679 3 438 | 912 734 4 439 | 307 1348 2 440 | 1229 318 3 441 | 1327 955 2 442 | 1165 1454 3 443 | 1607 1179 2 444 | 924 2809 2 445 | 2537 1470 2 446 | 1307 280 4 447 | 2275 1363 1 448 | 794 1681 2 449 | 213 2806 1 450 | 2973 974 1 451 | 799 156 4 452 | 2617 1170 3 453 | 2641 616 4 454 | 3020 2864 4 455 | 2265 2071 3 456 | 1232 1929 3 457 | 1040 1549 3 458 | 672 117 1 459 | 2992 571 1 460 | 428 522 1 461 | 395 855 3 462 | 1870 269 4 463 | 1594 2765 1 464 | 2945 2763 4 465 | 1959 3015 1 466 | 304 1722 4 467 | 2652 1656 3 468 | 2164 2795 3 469 | 2504 2675 2 470 | 430 2292 4 471 | 2288 2634 4 472 | 289 37 1 473 | 1897 221 4 474 | 511 1953 3 475 | 43 2892 3 476 | 113 547 1 477 | 2644 30 4 478 | 94 1752 2 479 | 2586 2006 2 480 | 497 2699 2 481 | 604 172 2 482 | 181 2979 4 483 | 860 434 4 484 | 2067 1556 2 485 | 2649 490 2 486 | 2824 1634 2 487 | 3023 1457 1 488 | 32 121 1 489 | 2 696 1 490 | 645 103 1 491 | 858 362 3 492 | 2570 1197 3 493 | 3041 882 3 494 | 2751 673 4 495 | 1768 2820 4 496 | 1610 2018 3 497 | 1959 1729 2 498 | 735 2948 3 499 | 1667 555 2 500 | 118 833 3 501 | 2976 2544 3 502 | 492 140 1 503 | 1396 986 2 504 | 69 74 1 505 | 1764 932 1 506 | 2768 2492 1 507 | 1475 836 2 508 | 2421 593 3 509 | 246 425 2 510 | 2174 592 2 511 | 1679 622 4 512 | 2405 1047 3 513 | 2954 1550 3 514 | 2633 915 4 515 | 1205 550 4 516 | 2232 1589 2 517 | 2270 203 2 518 | 3038 115 2 519 | 262 69 1 520 | 1592 2784 4 521 | 706 350 1 522 | 2679 1366 1 523 | 2089 1484 1 524 | 763 694 3 525 | 735 323 2 526 | 1968 733 1 527 | 513 58 1 528 | 411 871 4 529 | 43 2982 3 530 | 479 237 1 531 | -------------------------------------------------------------------------------- /data/SSLdata/PathClass.txt: -------------------------------------------------------------------------------- 1 | 568 188 802 1968 0 2 | 288 959 703 965 7 3 | 247 959 538 1017 7 4 | 649 2960 868 1637 15 5 | 1388 503 103 41 2 6 | 852 2752 58 265 6 7 | 1016 345 2722 269 14 8 | 2238 589 492 517 2 9 | 1429 1493 2973 534 16 10 | 2215 2615 1634 32 12 11 | 741 2508 444 127 4 12 | 645 279 2800 1088 5 13 | 1093 67 2866 67 14 14 | 2125 1752 265 336 4 15 | 992 2988 2520 369 0 16 | 694 2768 1502 2193 15 17 | 91 1397 1074 2295 9 18 | 580 60 936 1726 3 19 | 7 267 1450 1019 3 20 | 1014 274 2754 274 14 21 | 251 1540 1539 959 9 22 | 2314 2693 235 38 6 23 | 1036 1598 2067 341 10 24 | 957 941 538 627 4 25 | 159 2755 585 2568 13 26 | 382 263 2768 2180 5 27 | 1941 1436 2223 716 10 28 | 2059 394 2061 381 8 29 | 571 154 28 1752 1 30 | 567 164 1540 1070 3 31 | 1256 2955 1016 507 12 32 | 247 957 2615 2121 11 33 | 1074 3015 1516 339 12 34 | 719 553 67 1997 1 35 | 1951 2970 79 88 6 36 | 314 2742 2312 2388 15 37 | 648 246 2768 1871 5 38 | 1718 2695 337 526 6 39 | 1907 941 152 604 4 40 | 1664 2615 1949 40 12 41 | 1637 251 2480 317 0 42 | 1016 564 2692 564 14 43 | 1968 957 2907 424 16 44 | 74 90 219 1634 1 45 | 1982 1376 2905 399 16 46 | 317 1949 1214 1026 9 47 | 1043 1650 3006 116 16 48 | 1474 1639 2768 398 16 49 | 533 18 556 941 1 50 | 94 100 837 1890 0 51 | 355 1634 355 760 7 52 | 306 1292 2409 1283 0 53 | 1873 278 2228 689 8 54 | 345 1093 1727 2031 9 55 | 327 2619 1122 1598 15 56 | 835 252 1449 180 8 57 | 895 2893 901 328 12 58 | 542 154 1212 1977 3 59 | 659 1558 659 1559 7 60 | 9 558 2733 918 5 61 | 1344 3013 2228 526 12 62 | 670 2746 2228 2432 0 63 | 1486 2818 172 116 6 64 | 575 849 512 2226 7 65 | 1343 2722 1293 175 12 66 | 1918 1918 2828 182 16 67 | 478 222 2846 1450 5 68 | 556 2835 1498 2153 15 69 | 38 557 2932 1642 5 70 | 659 312 2226 1941 3 71 | 1968 957 29 148 4 72 | 1604 322 655 285 0 73 | 1010 2752 143 382 6 74 | 525 1754 62 837 7 75 | 1514 739 1514 354 10 76 | 2308 910 2815 139 16 77 | 1900 715 2725 113 0 78 | 2481 344 2661 496 14 79 | 713 834 2897 971 11 80 | 256 2857 960 1671 15 81 | 1485 2850 184 57 6 82 | 670 279 418 2228 1 83 | 642 251 517 1818 1 84 | 541 18 169 1604 1 85 | 1634 603 2103 133 8 86 | 2537 741 2152 256 10 87 | 481 2686 416 1381 13 88 | 312 127 935 1726 3 89 | 1605 322 3030 167 14 90 | 164 1936 787 2212 9 91 | 185 941 957 941 9 92 | 1208 2178 651 365 4 93 | 974 2765 138 366 6 94 | 152 954 706 954 7 95 | 1017 3030 97 462 6 96 | 1540 101 664 206 2 97 | 1638 372 1641 148 8 98 | 2230 619 3039 619 14 99 | 397 21 2938 1990 5 100 | 2408 2408 2897 148 16 101 | 714 2825 250 1564 13 102 | 1572 538 2695 297 14 103 | 1314 472 2869 413 14 104 | 2439 129 1461 648 8 105 | 631 21 415 833 1 106 | 527 279 964 1019 3 107 | 1409 2823 173 98 6 108 | 2518 2518 149 431 4 109 | 611 2067 1884 1884 0 110 | 422 1016 3021 1229 11 111 | 1418 1261 322 489 4 112 | 140 2952 1509 2252 15 113 | 1742 1449 60 369 4 114 | 932 2148 574 527 4 115 | 1444 510 2228 645 8 116 | 87 1514 2041 1514 9 117 | 936 2692 959 578 12 118 | 264 2863 72 902 13 119 | 1014 2695 13 114 0 120 | 1641 682 457 646 0 121 | 494 172 171 1099 1 122 | 474 150 1450 1019 3 123 | 837 252 1860 667 8 124 | 604 1429 1973 2457 9 125 | 1 2749 1770 1770 15 126 | 408 1752 2955 2256 11 127 | 2065 2065 2867 443 16 128 | 2313 2924 75 481 6 129 | 947 97 395 476 2 130 | 887 658 343 328 0 131 | 796 959 122 209 4 132 | 1122 2974 2378 69 12 133 | 285 2674 393 2238 13 134 | 712 3 2934 2264 5 135 | 103 2622 1637 1528 15 136 | 997 277 1860 348 8 137 | 2247 2951 1127 255 12 138 | 312 1605 3038 936 11 139 | 2066 403 2847 32 14 140 | 959 103 463 39 2 141 | 1404 424 1430 335 8 142 | 263 429 2958 1401 0 143 | 671 1011 3030 1333 11 144 | 1604 2846 222 696 6 145 | 480 2226 2723 987 11 146 | 689 2228 1973 1591 9 147 | 273 2919 1587 1053 0 148 | 245 2910 362 1300 13 149 | 481 2863 522 1820 0 150 | 1770 2923 417 518 6 151 | 6 1204 3014 1021 11 152 | 2119 1641 2698 372 16 153 | 454 2227 1664 2599 9 154 | 941 644 662 429 0 155 | 2228 227 77 84 2 156 | 1452 1045 1210 149 10 157 | 460 2785 1718 1303 15 158 | 533 1700 1083 2271 0 159 | 1808 628 1900 628 8 160 | 715 1204 1592 2113 9 161 | 1482 1814 34 236 4 162 | 438 780 2646 1415 11 163 | 1029 709 1030 709 8 164 | 308 1351 2004 2467 9 165 | 1528 1618 1461 113 10 166 | 54 1292 298 2226 7 167 | 1197 1754 95 420 4 168 | 1998 1181 339 224 4 169 | 693 2508 3000 1413 11 170 | 355 1540 2638 992 11 171 | 2185 2622 2067 220 12 172 | 215 3036 1408 1528 15 173 | 319 2929 449 1514 13 174 | 1997 2228 278 96 4 175 | 1977 1751 488 17 4 176 | 1727 2692 1213 142 12 177 | 13 2657 1594 2491 15 178 | 817 2952 251 334 6 179 | 338 193 2311 1179 3 180 | 834 3014 355 228 6 181 | 546 450 707 2241 1 182 | 211 1233 1201 1622 0 183 | 663 2863 135 1514 13 184 | 747 1514 282 149 4 185 | 1634 183 1015 162 8 186 | 194 84 1751 1754 3 187 | 557 2805 2540 2549 15 188 | 461 2786 194 837 13 189 | 1588 3002 472 541 6 190 | 38 2107 2768 2022 11 191 | 135 2714 135 1514 0 192 | 837 337 2733 337 14 193 | 790 138 2765 681 14 194 | 721 1351 3030 322 16 195 | 265 787 198 936 7 196 | 2184 2180 238 174 4 197 | 349 1351 1918 2154 9 198 | 1540 452 2786 461 14 199 | 1465 2765 138 46 6 200 | 1279 1279 2695 597 16 201 | 1444 1446 42 92 4 202 | 1449 212 3030 212 14 203 | 1070 2074 51 282 4 204 | 2022 3037 2228 351 12 205 | 627 1014 2695 847 11 206 | 2226 689 3021 120 14 207 | 551 290 2915 959 5 208 | 2330 959 545 256 4 209 | 557 411 267 1514 1 210 | 135 2991 131 1014 13 211 | 163 653 2615 1481 5 212 | 1779 1864 2905 551 16 213 | 970 1181 504 417 4 214 | 2568 586 2753 546 14 215 | 1398 2324 1892 449 10 216 | 251 1540 533 1540 7 217 | 263 674 1017 1542 3 218 | 2384 2377 251 517 4 219 | 142 1171 142 1216 7 220 | 3 2934 236 947 13 221 | 172 2818 1486 2209 15 222 | 1303 1907 941 687 10 223 | 538 418 1414 1027 3 224 | 671 1011 653 1110 7 225 | 1015 199 713 418 2 226 | 572 1878 711 2131 7 227 | 567 504 63 1540 1 228 | 2236 385 2282 385 8 229 | 1514 300 681 71 2 230 | 1446 273 462 566 2 231 | 177 2692 1228 1227 15 232 | 212 3030 1203 2613 15 233 | 2409 1754 391 526 4 234 | 895 2893 2287 275 12 235 | 1413 472 2952 590 14 236 | 1638 2628 335 244 6 237 | 576 204 560 935 1 238 | 304 492 185 947 1 239 | 324 460 135 1514 1 240 | 683 1621 681 2581 7 241 | 661 431 2373 2295 3 242 | 338 2965 338 899 13 243 | 218 342 72 899 1 244 | 256 2151 469 1292 7 245 | 787 265 246 238 2 246 | 1248 3015 2372 508 12 247 | 409 679 2615 1916 5 248 | 954 307 1949 589 8 249 | 211 1418 211 1455 7 250 | 1307 1305 2377 251 10 251 | 542 1214 2971 837 11 252 | 1770 2920 709 3 6 253 | 50 694 2768 1084 5 254 | 2492 2951 1460 6 12 255 | 1773 1451 510 476 4 256 | 1860 2746 286 268 6 257 | 994 324 2886 458 14 258 | 1495 1641 148 326 4 259 | 212 3030 214 835 13 260 | 1437 1579 2768 356 16 261 | 238 129 948 2388 3 262 | 115 1016 2723 2226 11 263 | 477 2955 27 901 13 264 | 102 3021 1485 1043 15 265 | 1099 234 1495 435 8 266 | 834 541 1608 16 8 267 | 760 202 102 227 2 268 | 764 1359 3030 167 16 269 | 2336 2103 2103 421 10 270 | 283 265 760 796 3 271 | 422 335 2952 1662 5 272 | 2421 2227 353 131 4 273 | 907 2915 290 534 6 274 | 2066 174 2871 174 14 275 | 2313 3008 503 267 6 276 | 974 2684 1451 342 12 277 | 577 2634 341 2067 0 278 | 214 2763 1463 969 0 279 | 1447 307 132 84 2 280 | 704 1404 1969 1398 0 281 | 1404 234 1099 296 8 282 | 561 2752 2547 1452 15 283 | 2161 1036 2642 136 16 284 | 691 1514 643 1514 7 285 | 2066 342 37 279 2 286 | 796 2731 253 224 6 287 | 849 512 3030 107 14 288 | 905 2733 618 465 0 289 | 706 941 970 2137 9 290 | 368 2863 1109 1948 15 291 | 861 136 862 388 8 292 | 336 841 2790 1590 11 293 | 1540 212 1540 263 8 294 | 1564 2825 714 417 6 295 | 935 936 180 488 4 296 | 546 2268 2952 1986 0 297 | 300 713 102 1444 1 298 | 1446 1445 2752 370 16 299 | 1468 1468 2847 325 16 300 | 1947 1668 2870 29 0 301 | 234 2768 324 2049 13 302 | 285 1540 1973 2078 9 303 | 424 326 531 959 1 304 | 1211 2722 1455 492 12 305 | 131 936 935 1973 9 306 | 1230 3021 684 512 6 307 | 1139 1444 511 187 4 308 | 2226 510 3021 52 14 309 | 423 558 2733 979 5 310 | 363 2372 1702 2585 0 311 | 208 867 734 732 9 312 | 332 556 941 2053 0 313 | 645 245 1540 2419 3 314 | 2319 2742 267 122 6 315 | 907 2952 66 39 6 316 | 13 2779 1668 1428 15 317 | 273 2887 2375 1305 15 318 | 654 58 73 780 0 319 | 1941 2226 21 112 0 320 | 574 519 3030 961 5 321 | 130 835 3030 1371 11 322 | 1002 2741 1074 645 12 323 | 1754 95 88 196 2 324 | 1029 709 2920 541 14 325 | 192 2568 165 2568 7 326 | 159 998 2857 1594 11 327 | 934 372 1638 500 8 328 | 710 607 797 2054 3 329 | 1931 263 1631 683 0 330 | 808 1728 2692 298 16 331 | 463 523 936 1726 0 332 | 1404 483 2723 483 14 333 | 1752 163 2898 356 14 334 | 561 294 1860 1430 3 335 | 600 222 2846 1409 5 336 | 224 998 1145 1201 9 337 | 1002 67 1997 400 8 338 | 488 2227 2677 1628 11 339 | 228 544 2768 1672 5 340 | 198 2821 198 1540 13 341 | 1997 1514 519 57 4 342 | 393 2674 393 2238 13 343 | 875 2989 281 324 6 344 | 1444 1446 1444 430 10 345 | 899 72 279 165 2 346 | 2295 2373 431 333 4 347 | 164 251 2377 2384 3 348 | 535 2120 2121 2203 9 349 | 1583 1303 2771 487 16 350 | 1622 1540 543 573 0 351 | 405 474 511 1451 1 352 | 2007 1608 491 115 4 353 | 134 2999 1051 1606 15 354 | 672 2768 1570 1521 15 355 | 848 2929 449 96 6 356 | 1341 3038 1453 684 12 357 | 645 544 2635 992 0 358 | 2256 2504 1404 609 10 359 | 899 113 712 89 2 360 | 691 2944 691 1351 13 361 | 555 2870 992 2150 15 362 | 2235 930 2897 148 16 363 | 2553 572 324 500 2 364 | 659 350 185 941 1 365 | 956 960 2768 411 16 366 | 1604 637 2952 590 14 367 | 34 492 3045 2198 5 368 | 85 576 2952 1074 5 369 | 1314 186 1314 563 8 370 | 515 936 1726 2421 9 371 | 166 432 957 1726 3 372 | 111 1015 2805 796 11 373 | 845 22 845 22 8 374 | 275 2287 2670 969 11 375 | 992 992 2850 271 16 376 | 472 115 440 941 1 377 | 60 2890 60 1450 13 378 | 1451 573 1017 492 8 379 | 565 1244 27 901 0 380 | 1224 2897 91 520 6 381 | 225 61 2881 850 5 382 | 1438 2313 2924 361 16 383 | 629 2017 3037 2240 11 384 | 571 2912 571 1743 13 385 | 399 2905 1415 2208 15 386 | 1418 1206 1665 682 10 387 | 1641 1495 197 473 4 388 | 681 1830 1564 1132 9 389 | 641 2457 641 1028 0 390 | 1116 1029 1444 293 0 391 | 1860 2897 520 503 6 392 | 1016 3021 77 696 6 393 | 1425 625 2686 600 14 394 | 115 444 2952 924 5 395 | 1135 2844 710 414 6 396 | 2252 965 2986 17 16 397 | 941 307 2790 307 14 398 | 361 2227 2227 1664 9 399 | 287 499 184 2227 1 400 | 1318 1567 2615 569 16 401 | 2358 2755 419 191 6 402 | 289 941 2701 1069 11 403 | 420 1665 424 1444 7 404 | 983 2768 1446 198 12 405 | 1663 2998 1451 407 12 406 | 590 2952 1735 1110 15 407 | 718 127 148 1641 1 408 | 486 358 2897 2178 5 409 | 1650 2954 2111 668 12 410 | 2407 2871 901 692 12 411 | 1015 630 1016 633 8 412 | 19 3044 19 878 13 413 | 2571 450 2571 192 8 414 | 2586 2628 1634 133 12 415 | 965 365 101 522 2 416 | 962 2650 343 593 6 417 | 2441 2240 1976 208 10 418 | 966 2645 1093 481 12 419 | 71 715 1139 919 3 420 | 354 1514 2209 922 9 421 | 458 590 2952 2197 5 422 | 1751 1752 408 354 4 423 | 495 349 1752 2125 3 424 | 52 2226 510 2479 7 425 | 1641 2810 213 347 6 426 | 381 2870 478 834 13 427 | 554 1540 57 1449 7 428 | 485 717 1577 1108 3 429 | 947 692 901 27 8 430 | 1462 497 115 474 0 431 | 364 796 2036 2354 9 432 | 1397 889 2719 72 16 433 | 260 230 294 1752 1 434 | 270 506 102 1455 1 435 | 1201 2812 1039 281 12 436 | 117 2872 93 2372 13 437 | 324 2768 324 2049 13 438 | 129 1257 1598 1918 0 439 | 325 540 965 965 3 440 | 601 396 1014 833 3 441 | 2237 393 558 520 2 442 | 842 3030 322 50 0 443 | 103 2952 1193 2186 15 444 | 348 573 66 1737 1 445 | 49 199 1540 1539 3 446 | 606 957 365 965 7 447 | 125 66 2622 1095 5 448 | 2571 220 698 602 2 449 | 1542 620 2692 18 0 450 | 605 560 103 957 1 451 | 2053 1808 2952 595 16 452 | 1635 362 2924 486 14 453 | 1019 1450 703 359 4 454 | 584 2372 2677 1563 11 455 | 1588 1451 41 289 4 456 | 594 2850 99 1196 13 457 | 1803 2615 1949 311 12 458 | 380 58 1018 1224 3 459 | 103 959 2815 940 11 460 | 1377 1377 3044 660 16 461 | 152 431 184 2228 1 462 | 377 356 1752 1752 3 463 | 2067 638 379 588 2 464 | 1095 2800 1093 481 12 465 | 2227 1015 902 304 10 466 | 1740 1596 2889 63 16 467 | 2595 2508 693 497 4 468 | 2024 1559 659 85 4 469 | 1776 1014 833 177 10 470 | 1571 2838 127 597 6 471 | 1876 2915 1577 558 12 472 | 948 129 1404 248 8 473 | 350 294 2877 2287 5 474 | 416 488 2832 1474 5 475 | 1445 1444 573 389 4 476 | 119 2227 764 1043 9 477 | 1891 1641 3014 355 16 478 | 1428 3015 2372 64 12 479 | 2358 2811 405 447 6 480 | 346 1514 445 1503 7 481 | 593 621 354 1514 1 482 | 992 2783 670 287 6 483 | 245 2017 2970 1514 11 484 | 1514 2742 418 450 6 485 | 69 2952 239 760 13 486 | 1454 138 1161 94 8 487 | 541 1910 205 1910 0 488 | 121 187 885 1624 3 489 | 1554 2409 1754 647 10 490 | 760 760 177 659 4 491 | 1451 2723 483 452 6 492 | 191 2518 583 2213 0 493 | 40 2835 242 1444 13 494 | 507 1016 214 2227 7 495 | 899 1303 902 673 10 496 | 221 2854 2568 1482 15 497 | 1432 1663 2883 122 16 498 | 1493 2792 389 720 6 499 | 1445 2974 959 66 12 500 | 2288 275 67 327 2 501 | 1224 3038 935 230 12 502 | 562 2872 408 1752 13 503 | 826 1823 2570 576 0 504 | 648 1461 2810 2186 11 505 | 51 291 409 1444 1 506 | 552 2752 573 1454 13 507 | 1529 1445 102 656 4 508 | 84 1752 2125 1823 9 509 | 584 2372 697 2374 7 510 | 354 2803 886 886 15 511 | 226 524 84 1751 1 512 | 947 120 3021 684 14 513 | 1448 667 1455 396 8 514 | 694 1910 694 1910 7 515 | 1638 2883 29 402 6 516 | 370 2752 307 1450 13 517 | 174 563 2334 1592 3 518 | 1139 2752 1475 523 12 519 | 597 2695 124 835 13 520 | 1211 2871 837 461 12 521 | 1461 47 193 17 2 522 | 260 208 2333 1074 0 523 | 383 516 2761 2313 5 524 | 703 959 103 957 7 525 | 138 1743 138 739 7 526 | 225 954 198 1454 7 527 | 1303 3021 120 143 6 528 | 41 1727 1416 1551 0 529 | 2316 2736 86 449 0 530 | 586 2753 192 2568 13 531 | 2067 341 2066 426 8 532 | 145 835 3038 1344 11 533 | 208 541 2920 1466 5 534 | 2053 941 77 225 4 535 | 1951 3021 179 57 6 536 | 393 2674 393 947 13 537 | 619 600 492 1450 1 538 | 1030 1444 595 393 4 539 | 1019 1450 2792 288 16 540 | 65 305 2226 1941 3 541 | 691 1351 2480 1076 9 542 | 449 2579 2579 1397 9 543 | 1854 1726 936 573 10 544 | 16 2609 2741 1567 11 545 | 333 194 2786 1074 5 546 | 190 143 1540 1441 3 547 | 648 2752 1493 1494 15 548 | 506 3043 967 1911 15 549 | 1404 129 2952 238 14 550 | 536 199 1014 721 3 551 | 330 1018 1224 1018 9 552 | 480 2227 176 1036 7 553 | 603 683 2912 1159 5 554 | 1048 1634 334 719 4 555 | 522 2743 1196 1997 15 556 | 546 983 1724 2030 9 557 | 131 424 604 1429 1 558 | 1907 835 173 329 4 559 | 2439 1485 2721 601 16 560 | 519 1514 2871 1468 11 561 | 325 2666 974 1194 0 562 | 616 75 204 957 1 563 | 613 520 2887 1864 5 564 | 54 3001 2258 2146 15 565 | 243 1293 258 1449 7 566 | 545 760 760 796 9 567 | 1576 68 712 204 0 568 | 2568 192 1986 586 8 569 | 2303 2119 1718 20 10 570 | 1116 515 947 393 0 571 | 118 2866 279 1083 0 572 | 540 106 553 1514 0 573 | 2309 173 534 168 2 574 | 560 257 97 1014 1 575 | 1212 376 85 382 2 576 | 1136 2752 634 659 6 577 | 1719 2804 2233 261 12 578 | 314 2742 2319 1994 15 579 | 1404 308 2924 171 14 580 | 620 300 2742 2312 5 581 | 232 2226 1941 1945 9 582 | 60 2890 895 888 15 583 | 660 3044 1377 1394 15 584 | 1376 2910 308 564 6 585 | 522 2743 2316 2119 15 586 | 1819 639 1819 639 8 587 | 1929 653 1929 680 8 588 | 428 2383 979 1043 9 589 | 450 174 247 959 1 590 | 780 327 780 472 8 591 | 1615 1909 2752 395 16 592 | 1216 1214 3030 97 16 593 | 1303 3030 305 516 6 594 | 411 2768 1509 924 15 595 | 619 600 1918 2504 3 596 | 2 2837 363 1083 13 597 | 250 114 2615 1498 0 598 | 1720 2645 965 524 12 599 | 73 1008 1920 1997 0 600 | 152 2228 1973 1535 9 601 | 2145 1141 2752 561 16 602 | 974 2879 88 319 6 603 | 219 520 3040 1506 5 604 | 1949 1214 1216 347 10 605 | 386 2236 2689 1035 11 606 | 1598 250 2579 449 8 607 | 428 2383 428 2383 7 608 | 1447 668 3021 268 14 609 | 841 130 834 23 8 610 | 2313 2924 164 688 6 611 | 525 2768 2397 2386 15 612 | 347 1017 1017 1212 0 613 | 45 959 3035 1770 11 614 | 156 199 2966 2543 5 615 | 1441 1514 118 240 4 616 | 637 212 3030 1018 5 617 | 197 1156 243 1293 7 618 | 1127 1139 1444 96 10 619 | 88 2879 641 796 13 620 | 53 311 1429 1493 3 621 | 1468 2872 117 370 6 622 | 684 3021 721 1421 15 623 | 658 154 2968 1157 0 624 | 1016 62 371 557 2 625 | 2066 325 2067 363 8 626 | 314 1450 212 2228 7 627 | 677 2884 535 1593 0 628 | 23 834 3038 1217 11 629 | 237 941 1907 959 9 630 | 1136 3030 97 665 6 631 | 508 2650 1485 2439 15 632 | 1172 224 2659 557 14 633 | 974 3021 2067 296 12 634 | 500 197 2837 2539 5 635 | 522 2924 2313 2252 15 636 | 403 2066 2064 1482 9 637 | 2376 647 1449 106 8 638 | 1441 1973 1429 634 10 639 | 899 1762 2952 409 16 640 | 409 597 1604 1043 3 641 | 1973 1751 1754 258 10 642 | 184 2850 2315 2311 15 643 | 340 130 837 2335 3 644 | 2484 709 2920 541 14 645 | 2317 2628 1451 268 12 646 | 595 626 2768 2258 5 647 | 165 279 184 919 1 648 | 1821 2952 204 248 6 649 | 2227 454 2686 454 14 650 | 369 3025 1497 742 15 651 | 1241 711 2675 711 14 652 | 413 1019 345 2227 7 653 | 2441 802 417 259 4 654 | 168 2659 224 1918 13 655 | 2226 277 106 219 2 656 | 144 21 1444 1446 0 657 | 520 2887 32 1634 13 658 | 978 2742 38 1 6 659 | 645 277 180 1455 1 660 | 1596 2889 63 194 6 661 | 311 1429 561 1449 7 662 | 704 108 440 947 1 663 | 2436 1303 965 545 10 664 | 2226 2224 716 664 0 665 | 130 70 3040 2103 5 666 | 254 499 2615 2012 5 667 | 135 442 2372 1528 3 668 | 332 187 286 1014 0 669 | 2067 363 437 13 2 670 | 2228 2733 105 421 6 671 | 367 2066 2752 1229 11 672 | 834 478 2870 29 14 673 | 1634 177 559 592 2 674 | 583 72 302 992 1 675 | 643 1916 161 1441 7 676 | 961 3043 368 242 6 677 | 667 1540 1540 1540 9 678 | 978 2742 38 1 6 679 | 191 403 2067 2052 3 680 | 379 2358 286 1430 7 681 | 1193 2974 1210 102 12 682 | 2238 40 90 661 2 683 | 688 2690 688 901 13 684 | 1655 1144 1461 113 0 685 | 837 75 3043 207 14 686 | 368 959 2700 847 11 687 | 28 2228 77 2228 7 688 | 2452 1514 282 654 4 689 | 1204 711 25 583 2 690 | 2236 71 518 623 2 691 | 2209 1514 519 361 4 692 | 2378 69 2378 69 8 693 | 11 2774 131 1014 13 694 | 1886 238 2628 379 14 695 | 348 1540 492 837 7 696 | 640 1209 640 1209 7 697 | 532 533 432 957 1 698 | 263 66 2752 1096 0 699 | 1438 745 1015 467 10 700 | 176 159 573 1447 1 701 | 1455 143 1017 497 8 702 | 1739 2955 27 686 6 703 | 941 68 2695 664 14 704 | 628 1808 304 902 7 705 | 1067 492 190 160 2 706 | 282 2744 1068 1436 15 707 | 591 409 93 2226 0 708 | 218 2863 1100 1133 15 709 | 939 1526 1019 448 10 710 | 1450 1019 1450 111 10 711 | 567 2984 94 828 13 712 | 328 3015 864 864 15 713 | 1430 56 53 397 0 714 | 1442 1740 1430 15 10 715 | 372 2237 1418 2465 9 716 | 22 703 1449 1526 3 717 | 394 2061 394 2061 7 718 | 1994 2973 1604 637 12 719 | 706 954 2992 1638 0 720 | 234 1083 2974 1903 11 721 | 2577 939 2772 309 16 722 | 2066 426 13 82 2 723 | 2105 2154 3014 276 16 724 | 1522 1515 2952 29 16 725 | 1083 2299 1083 495 10 726 | 499 1605 499 1915 7 727 | 1526 1449 106 711 4 728 | 1590 2722 1212 154 12 729 | 2330 2313 2894 505 16 730 | 144 408 1754 1690 3 731 | 1518 3017 1754 408 12 732 | 546 2724 546 2568 13 733 | 263 1540 524 1450 7 734 | 82 1172 1172 1172 9 735 | 1572 2924 522 615 6 736 | 2409 959 965 409 10 737 | 1876 1427 981 646 10 738 | 2371 398 2372 70 8 739 | 941 490 3043 74 14 740 | 1360 2869 2066 171 12 741 | 341 2837 424 1293 0 742 | 547 400 1997 2480 3 743 | 93 438 2615 1372 5 744 | 833 97 190 252 2 745 | 235 618 2952 2375 5 746 | 2336 1359 2752 132 16 747 | 1726 2621 796 25 12 748 | 1754 163 88 241 2 749 | 590 1019 345 1424 7 750 | 113 461 584 2372 1 751 | 1465 1467 2823 173 16 752 | 965 524 1451 407 8 753 | 2061 388 1284 479 8 754 | 184 2886 234 2049 13 755 | 503 252 1549 1549 3 756 | 376 1214 3032 1214 11 757 | 218 1997 3037 1571 11 758 | 694 2768 32 902 13 759 | 1772 1997 1019 448 10 760 | 2119 993 2695 558 16 761 | 969 967 3043 74 16 762 | 1596 2889 63 610 6 763 | 716 841 2790 2094 0 764 | 901 27 2955 669 14 765 | 1089 2749 1 325 6 766 | 936 127 2227 424 8 767 | 2066 644 581 270 2 768 | 544 2224 1698 1682 0 769 | 307 2752 150 1014 13 770 | 1918 88 2107 329 8 771 | 1449 511 664 198 2 772 | 2224 716 373 122 2 773 | 535 2952 106 1449 13 774 | 534 307 2790 1359 5 775 | 478 834 130 834 7 776 | 2374 2374 2374 335 10 777 | 658 597 1605 1604 3 778 | 404 98 2872 1457 5 779 | 2103 215 3036 215 14 780 | 2377 3006 151 341 0 781 | 261 2984 1431 1432 15 782 | 211 1455 1303 817 9 783 | 430 2792 2359 1478 15 784 | 111 710 2844 1135 5 785 | 78 670 2783 992 5 786 | 2374 250 1511 94 8 787 | 552 1015 511 936 7 788 | 1321 67 494 600 2 789 | 1448 273 2792 520 14 790 | 1879 585 2694 44 14 791 | 1016 605 2757 605 14 792 | 44 173 2823 2313 5 793 | 718 41 1449 1742 0 794 | 3 2934 236 1450 13 795 | 542 115 1450 1019 3 796 | 28 1752 62 1449 7 797 | 1495 2656 139 437 6 798 | 1010 2378 69 551 0 799 | 108 936 528 947 7 800 | 2330 1582 2686 625 16 801 | 275 2288 275 2288 7 802 | 28 1752 1751 970 9 803 | 76 37 472 1314 0 804 | 447 534 198 954 0 805 | 2094 1570 1397 91 10 806 | 1540 1540 73 146 4 807 | 542 1212 2897 1015 11 808 | 2368 2952 1540 268 12 809 | 366 2374 2374 2374 9 810 | 1481 2615 438 306 6 811 | 1293 1293 113 657 4 812 | 205 2105 916 770 9 813 | 1739 1949 537 668 4 814 | 483 1293 2692 844 11 815 | 2119 1495 2846 222 16 816 | 166 416 3002 730 5 817 | 961 2686 1429 57 12 818 | 957 204 957 399 8 819 | 625 1425 625 1425 7 820 | 406 1444 2881 1211 11 821 | 284 992 535 1593 7 822 | 276 1036 208 1577 0 823 | 152 2970 2103 1657 15 824 | 245 2823 1371 1372 15 825 | 1483 1482 1634 32 10 826 | 0 76 118 1514 1 827 | 589 2238 2955 1770 11 828 | 466 668 32 1540 1 829 | 1977 1544 395 178 0 830 | 1641 2923 106 129 6 831 | 436 93 166 1540 1 832 | 1432 2741 1014 627 12 833 | 992 992 2897 591 16 834 | 1397 91 3042 91 14 835 | 138 2585 2296 1598 0 836 | 478 2883 381 2061 13 837 | 250 1449 1606 1573 9 838 | 1445 1444 190 462 4 839 | 488 1752 492 841 7 840 | 407 1451 1451 2409 9 841 | 44 209 500 1638 1 842 | 167 3030 1758 1758 15 843 | 957 66 2952 335 14 844 | 2430 2193 2952 595 16 845 | 1514 319 210 160 2 846 | 135 1514 1577 1108 9 847 | 494 2615 939 1876 15 848 | 412 2912 2120 2121 0 849 | 523 1475 2951 2087 11 850 | 2294 2313 2837 2 16 851 | 2334 1083 1083 193 10 852 | 121 396 263 1540 1 853 | 2579 2897 834 214 12 854 | 1208 2952 239 508 6 855 | 1211 3042 1216 347 12 856 | 682 289 941 970 3 857 | 176 349 1754 970 3 858 | 126 429 2958 1701 5 859 | 1754 349 122 177 2 860 | 214 340 2768 1498 5 861 | 319 714 2258 2146 3 862 | 1514 308 149 346 2 863 | 86 1449 2944 1109 11 864 | 2316 2872 95 401 6 865 | 303 538 1451 2062 3 866 | 97 833 97 837 7 867 | 661 63 2683 2312 5 868 | 273 2792 1451 2062 15 869 | 50 330 1014 1776 3 870 | 941 185 479 455 2 871 | 2103 421 100 75 2 872 | 657 345 657 1451 1 873 | 2421 1429 634 544 4 874 | 96 2792 902 2409 15 875 | 70 3040 1720 1891 15 876 | 479 2057 2743 1490 11 877 | 1468 3036 603 403 6 878 | 368 2905 72 902 13 879 | 206 2952 335 2372 13 880 | 315 1076 418 2277 7 881 | 2218 2218 1861 172 10 882 | 398 2646 2175 765 15 883 | 525 1316 525 1752 7 884 | 64 2632 64 2374 0 885 | 1284 479 334 493 2 886 | 970 941 361 701 4 887 | 1540 452 402 133 2 888 | 164 205 2154 777 3 889 | 710 319 2929 1204 5 890 | 110 676 1031 964 3 891 | 1212 466 177 613 2 892 | 558 1577 2358 937 9 893 | 2319 2790 1216 376 12 894 | 595 1444 1029 2252 9 895 | 32 2886 324 1404 13 896 | 221 799 221 799 7 897 | 760 796 760 144 10 898 | 219 222 2846 1735 5 899 | 716 308 1718 853 3 900 | 572 122 485 834 1 901 | 1181 504 1181 504 8 902 | 374 2372 2372 2372 9 903 | 383 63 706 941 1 904 | 970 1019 2883 508 16 905 | 92 2792 1084 1786 15 906 | 2150 2633 1420 248 12 907 | 367 206 2372 1528 3 908 | 69 669 1651 932 3 909 | 2227 77 683 241 2 910 | 2506 747 2837 400 16 911 | 185 954 2752 1446 11 912 | 1017 3021 849 575 12 913 | 919 970 1181 504 10 914 | 324 516 308 1718 1 915 | 183 1208 940 2168 9 916 | 416 571 2912 1376 5 917 | 144 1017 214 835 7 918 | 1094 2952 239 718 6 919 | 1773 1451 1451 41 10 920 | 202 2228 300 1514 7 921 | 1429 153 2754 128 14 922 | 2491 1594 2806 224 16 923 | 834 150 954 237 8 924 | 1860 260 1413 242 8 925 | 647 841 552 1450 7 926 | 2 2837 2119 2318 15 927 | 1015 277 835 130 8 928 | 227 2952 1002 916 15 929 | 1604 2897 520 334 6 930 | 641 2443 641 1831 7 931 | 317 715 2252 1077 3 932 | 1572 173 2103 133 8 933 | 340 1754 2421 2420 9 934 | 2226 506 1540 89 8 935 | 215 2103 2103 2336 9 936 | 1357 1357 2752 307 16 937 | 760 268 417 582 2 938 | 226 403 510 2226 1 939 | 837 564 941 152 8 940 | 2602 1735 2952 69 16 941 | 2226 512 2955 231 14 942 | 68 1016 538 1015 7 943 | 1127 139 2583 139 8 944 | 145 841 564 1449 7 945 | 428 2386 428 2383 0 946 | 657 1445 511 1015 7 947 | 643 480 2227 764 3 948 | 688 2690 688 901 13 949 | 431 2984 160 1514 0 950 | 273 678 440 1450 1 951 | 286 1441 1441 1598 9 952 | 312 3021 1739 2570 15 953 | 1019 448 545 348 2 954 | 957 194 2786 623 14 955 | 2241 707 2964 707 14 956 | 458 2066 171 1099 7 957 | 608 265 2768 901 5 958 | 612 1292 2955 836 0 959 | 684 1429 3015 2482 11 960 | 426 18 2692 1223 0 961 | 1890 1754 163 305 4 962 | 508 2650 1663 1418 15 963 | 954 667 1445 406 8 964 | 274 841 198 1016 7 965 | 687 564 184 2374 1 966 | 2277 2944 1754 163 12 967 | 935 936 2661 293 16 968 | 1031 964 279 714 4 969 | 260 1860 101 1752 7 970 | 2066 2064 2067 341 10 971 | 1751 84 536 672 2 972 | 1468 2872 361 112 6 973 | 52 1451 3021 1884 11 974 | 399 2905 1641 1528 15 975 | 72 2782 368 959 0 976 | 437 595 60 1017 1 977 | 686 66 1737 1729 3 978 | 1154 322 3030 322 14 979 | 450 2718 1468 1470 15 980 | 1084 2622 565 256 6 981 | 237 959 1745 959 9 982 | 13 2779 2211 2211 15 983 | 286 1441 63 1449 7 984 | 1067 2966 1016 332 12 985 | 641 238 2764 911 0 986 | 2371 398 2738 403 0 987 | 2226 52 3021 682 14 988 | 2508 47 62 591 2 989 | 2064 2951 834 478 12 990 | 1429 2615 1083 495 12 991 | 489 2817 489 1540 13 992 | 947 37 239 704 2 993 | 1418 962 2870 366 16 994 | 486 2924 164 1429 13 995 | 2481 417 1815 364 8 996 | 1870 2409 1292 534 10 997 | 23 77 415 1461 1 998 | 1015 132 1017 307 8 999 | 1740 1540 667 152 4 1000 | 1194 3030 511 720 6 1001 | 1556 1724 1445 42 10 1002 | 680 2615 1884 1997 15 1003 | 341 2067 2052 1439 9 1004 | 1112 2752 1540 163 12 1005 | 1062 1208 250 416 4 1006 | 153 2754 309 841 13 1007 | 1797 2313 2924 486 16 1008 | 81 656 2893 1604 5 1009 | 416 63 2832 1474 5 1010 | 1752 84 2768 544 14 1011 | 1252 3014 1252 177 12 1012 | 1465 1612 2315 717 0 1013 | 58 935 1726 935 9 1014 | 1514 691 1351 523 8 1015 | 564 760 760 796 9 1016 | 377 440 2952 998 5 1017 | 545 2870 1351 2154 15 1018 | 274 132 1015 1544 3 1019 | 1338 3021 1450 556 12 1020 | 290 403 2067 2471 3 1021 | 1127 255 1127 255 8 1022 | 141 1215 542 1212 7 1023 | 1220 2855 704 73 0 1024 | 1452 382 2227 177 8 1025 | 719 415 2924 2313 5 1026 | 2119 1149 1451 407 10 1027 | 243 1293 3013 2376 11 1028 | 76 1210 149 2075 7 1029 | 246 1752 2970 2316 11 1030 | 464 84 68 1540 1 1031 | 2409 2752 760 202 12 1032 | 19 555 2883 1377 5 1033 | 941 511 841 225 8 1034 | 14 1093 67 1321 7 1035 | 718 1031 2636 1408 11 1036 | 978 2742 398 327 6 1037 | 560 251 184 2374 1 1038 | 1216 542 1215 542 8 1039 | 168 2659 2473 1179 15 1040 | 2226 511 679 110 2 1041 | 224 2837 184 2228 13 1042 | -------------------------------------------------------------------------------- /data/SSLdata/SSL_data_description.txt: -------------------------------------------------------------------------------- 1 | -----------------ClusterPre.txt------------------- 2 | 0 0.6984625743463135 3 | 1 0.5666507405637841 4 | 2 0.4253164827645197 5 | 3 0.583941605839416 6 | 4 0 7 | 5 0 8 | ... 9 | 10 | The first column denotes node ID in BioHNs. 11 | The second column denotes the clustering coefficient of this node. 12 | 13 | -----------------PairDistance.txt------------------- 14 | 0 15 2 15 | 0 31 4 16 | 0 44 1 17 | 0 65 1 18 | 0 76 1 19 | 0 77 1 20 | ... 21 | 22 | The first two columns denote the ID of two nodes in BioHNs, respectively. 23 | The third column denotes the distance categories of the node pairs. 24 | 25 | -----------------EdgeMask.txt------------------- 26 | 0 712 1 27 | 3 77 1 28 | 0 2642 3 29 | 25 1042 2 30 | 3 46 1 31 | ... 32 | 33 | The first two columns denote the ID of two nodes in BioHNs, respectively. 34 | The third column denotes the edge types between the node pairs. 35 | 36 | -----------------PathClass.txt------------------- 37 | 251 520 16 2303 1 38 | 160 571 1655 2053 3 39 | 331 70 3040 1224 5 40 | 348 2409 348 1429 7 41 | 1210 191 2518 654 8 42 | ... 43 | 44 | The first four columns denote the ID of four nodes in BioHNs, respectively. The four nodes form a path. 45 | The fifth column denotes the types of path. 46 | 47 | 48 | -----------------SimReg.txt------------------- 49 | 0 328 0.36145 50 | 0 39 0.5 51 | 0 162 0.51064 52 | 1 626 0.22857 53 | 1 261 0.62667 54 | ... 55 | 56 | The first two columns denote the ID of two nodes in BioHNs, respectively. 57 | The third column denotes the similarity values between two nodes. 58 | 59 | -----------------SimCon.txt------------------- 60 | 0 188 74 0.13842 61 | 1 299 418 0.1638 62 | 1 399 64 0.27647 63 | 2 587 234 0.23703 64 | 12 455 16 0.21229 65 | 3 601 681 0.13456 66 | ... 67 | 68 | The first three columns denote the ID of three nodes in BioHNs, respectively. 69 | The fourth column denotes the difference of similarity values. Formally, value in 4 column = similarity(node in 1 column, node in 2 column)-similarity(node in 1 column, node in 3 column), such as: sim(0, 188) - sim(0, 74) = 0.13842 -------------------------------------------------------------------------------- /data/SSLdata/SimCon.txt: -------------------------------------------------------------------------------- 1 | 1541 1948 1491 0.0017399999999999916 2 | 1343 1659 2188 0.029049999999999992 3 | 256 502 436 0.22611 4 | 593 124 699 0.041830000000000034 5 | 2771 2944 2847 0.10298792186744105 6 | 1559 1906 898 0.014229999999999993 7 | 2511 1350 1103 0.03572600000000001 8 | 2151 1276 2577 0.012309999999999988 9 | 1994 2220 1723 0.02491299999999999 10 | 2463 2021 929 0.021820000000000006 11 | 1665 2230 1946 0.05443999999999999 12 | 893 2240 1821 0.009899999999999992 13 | 2061 2507 2568 0.0 14 | 500 362 112 0.03531000000000001 15 | 252 305 230 0.07489000000000001 16 | 1798 2337 868 0.02295999999999998 17 | 1918 1827 1820 0.01287000000000002 18 | 2539 1321 816 0.013429999999999997 19 | 1526 825 1966 0.004449999999999996 20 | 646 537 343 0.0 21 | 18 583 157 0.07552999999999999 22 | 1406 895 1836 0.02153000000000002 23 | 775 1216 2191 0.00014999999999998348 24 | 1410 1316 969 0.0021200000000000108 25 | 1781 1026 1329 0.013619999999999993 26 | 2344 2011 832 0.0038799999999999946 27 | 2439 1409 1517 0.02595 28 | 2036 2250 1526 0.07483 29 | 682 355 474 0.20980000000000004 30 | 2947 2868 2826 0.5721350095864449 31 | 523 332 584 0.06322000000000003 32 | 2395 1522 1048 0.019109999999999988 33 | 397 708 516 0.12188000000000004 34 | 921 2238 1197 0.029859999999999998 35 | 246 511 477 0.16783000000000003 36 | 229 161 338 0.12875999999999999 37 | 2404 2462 1949 0.050600000000000006 38 | 1288 2534 1314 0.010700000000000015 39 | 1068 2462 2262 0.03952000000000001 40 | 2529 1612 1768 0.048729999999999996 41 | 1557 811 771 0.011349999999999999 42 | 1408 731 1245 0.006630000000000025 43 | 2450 1381 1855 0.01396 44 | 1636 808 1478 0.05299999999999999 45 | 457 617 135 0.010519999999999974 46 | 64 536 246 0.015270000000000006 47 | 1549 925 2426 0.003239999999999993 48 | 1421 2127 993 0.028251 49 | 1416 2278 1691 0.01752999999999999 50 | 2301 1599 2516 0.044017000000000014 51 | 2265 1258 734 0.010659999999999975 52 | 395 106 361 0.34813999999999995 53 | 522 430 660 0.293821 54 | 1330 1287 1468 0.008239999999999997 55 | 2390 1060 2349 0.093192 56 | 957 1511 1585 0.0005510000000000098 57 | 2824 2668 2817 0.08057755480036899 58 | 1650 1709 2268 0.006330000000000002 59 | 2406 1144 1707 0.077337 60 | 836 1378 1170 0.007400000000000018 61 | 72 512 637 0.029909999999999992 62 | 269 368 44 0.017510000000000026 63 | 1781 1952 1888 0.024980000000000002 64 | 1127 2287 1928 0.029270000000000018 65 | 276 42 250 0.050549999999999984 66 | 969 2519 1461 0.01407 67 | 2472 2149 1287 0.03922999999999999 68 | 222 404 530 0.10335 69 | 2039 2201 2024 0.07675 70 | 338 426 444 0.1619 71 | 323 167 389 0.0025000000000000022 72 | 1894 957 952 0.0033199999999999896 73 | 1110 2439 1239 0.004830000000000001 74 | 1757 1459 1629 0.056989999999999985 75 | 515 528 672 0.18197000000000002 76 | 2378 834 2428 0.004130000000000009 77 | 1990 752 2171 0.07881699999999998 78 | 2846 3017 2868 0.10030764674652226 79 | 1651 1832 941 0.048960000000000004 80 | 1406 1923 1882 0.012631000000000003 81 | 78 396 158 0.05990000000000001 82 | 1652 2267 2341 0.016170000000000018 83 | 434 718 487 0.09007 84 | 206 82 488 0.15697 85 | 2295 2317 1324 0.053330000000000016 86 | 1143 1047 1579 0.03140000000000001 87 | 1368 1486 1044 0.10020000000000001 88 | 1175 1170 1680 0.06142200000000002 89 | 2627 2621 2634 0.18341282979352957 90 | 1915 1311 2173 0.01727999999999999 91 | 2982 2646 3026 0.7409218284911101 92 | 913 862 2274 0.0024399999999999977 93 | 2214 1944 1954 0.010735000000000008 94 | 2449 1452 2317 0.021210000000000007 95 | 1505 2019 917 0.011509999999999992 96 | 2157 1529 1418 0.04023299999999999 97 | 228 496 87 0.023339999999999972 98 | 2777 2755 2845 0.10994318325112656 99 | 2494 1144 809 0.03648000000000001 100 | 2539 1468 1437 0.0063399999999999845 101 | 921 2520 2192 0.004739999999999994 102 | 1008 2593 738 0.037496 103 | 203 139 524 0.03589000000000003 104 | 714 202 116 0.08441000000000001 105 | 1201 2603 2218 0.021647000000000007 106 | 709 152 412 0.14172 107 | 2918 2878 2917 0.1913061094085332 108 | 304 259 443 0.27619 109 | 1222 1096 945 0.026779999999999998 110 | 1838 1046 2050 0.018079999999999985 111 | 179 355 41 0.042289999999999994 112 | 989 1226 798 0.02259 113 | 864 1471 1189 0.0012399999999999911 114 | 130 629 239 0.01308999999999999 115 | 1477 793 1069 0.0698 116 | 1481 1963 2133 0.0018700000000000105 117 | 154 391 124 0.13153000000000004 118 | 1405 1982 1314 0.009947999999999999 119 | 1383 1304 1048 0.01988000000000001 120 | 2177 2427 768 0.037809999999999996 121 | 262 94 556 0.05556 122 | 2347 1465 2391 0.021779999999999994 123 | 2746 2707 2832 0.1608344859347191 124 | 1436 1109 2017 0.124711 125 | 698 53 614 0.07680999999999999 126 | 1571 2248 1034 0.05404999999999999 127 | 2823 2949 2873 0.44239754928019387 128 | 1858 912 2465 0.0002500000000000002 129 | 1918 2150 2023 0.025370000000000004 130 | 2178 867 1170 0.08441299999999999 131 | 1032 831 2426 0.006739999999999996 132 | 991 1022 2158 0.025849999999999984 133 | 570 719 720 0.18458000000000002 134 | 2211 1736 1073 0.010700000000000015 135 | 2959 2812 2638 0.17525258509916597 136 | 416 157 147 0.00953000000000001 137 | 1069 1893 1502 0.021919999999999995 138 | 1977 2401 1040 0.06140000000000001 139 | 2137 1031 2496 0.01793 140 | 2830 2874 2916 0.2663764419512956 141 | 2317 998 1216 0.032350000000000004 142 | 74 29 697 0.09808 143 | 449 96 228 0.25128 144 | 377 193 601 0.055993 145 | 1522 1408 2562 0.0061199999999999866 146 | 1330 865 1631 0.04153299999999999 147 | 2218 1416 842 0.049350000000000005 148 | 309 218 119 0.060450000000000004 149 | 918 1212 1713 9.000000000000674e-05 150 | 2604 791 1526 0.0025700000000000167 151 | 200 595 276 0.15278000000000003 152 | 136 600 315 0.06188999999999997 153 | 867 2525 738 0.03297699999999999 154 | 2097 1008 939 0.03767000000000001 155 | 1630 1359 1175 0.006668000000000007 156 | 96 389 430 0.02203000000000005 157 | 537 417 662 0.10512999999999995 158 | 1527 1517 2128 0.09312299999999998 159 | 2729 2646 3043 0.20307259825172613 160 | 281 581 507 0.11201 161 | 1662 761 2379 0.03325 162 | 1505 952 1904 0.05094 163 | 2767 2738 2965 0.04144855830088712 164 | 2044 1972 1104 0.00817000000000001 165 | 905 1843 1855 0.023249999999999993 166 | 385 85 104 0.025639999999999996 167 | 2384 2259 2584 0.02323 168 | 113 353 467 0.19262999999999997 169 | 2501 2012 1187 0.03926199999999999 170 | 778 2472 827 0.03656999999999999 171 | 2463 2573 847 0.02704000000000001 172 | 2310 972 1754 0.012340000000000004 173 | 858 1325 1604 0.012750000000000011 174 | 324 125 251 0.05421999999999999 175 | 298 24 72 0.03702 176 | 753 1784 729 0.051426 177 | 768 1861 1754 0.00578999999999999 178 | 227 578 158 0.39142 179 | 2975 2882 2774 0.14926262134967133 180 | 1898 2136 2358 0.010470000000000007 181 | 1435 2398 2470 0.014800000000000008 182 | 2737 2920 2959 0.3506143277000842 183 | 2391 2349 730 0.036559999999999995 184 | 2867 2615 2767 0.06277895266920136 185 | 281 275 242 0.008449999999999958 186 | 1056 1711 1967 0.03778999999999999 187 | 1991 1897 1404 0.0029800000000000104 188 | 152 399 167 0.09999999999999998 189 | 611 716 678 0.049500000000000016 190 | 2000 2302 1442 0.023282999999999998 191 | 2052 2153 2179 0.008929999999999993 192 | 2514 2217 976 0.0022739999999999982 193 | 1543 1092 1925 0.04405999999999999 194 | 1719 1278 2423 0.064727 195 | 940 2493 1464 0.009589999999999987 196 | 186 477 33 0.13083 197 | 973 1964 2144 0.076352 198 | 463 59 612 0.05080000000000001 199 | 1533 1277 1046 0.009099999999999997 200 | 931 1502 2586 0.0061400000000000066 201 | 2738 3021 2841 0.19331765002396484 202 | 1600 1336 1655 0.04060000000000001 203 | 544 201 178 0.04952000000000001 204 | 1263 1405 1264 0.009819999999999995 205 | 955 883 817 0.08027000000000001 206 | 2020 1661 1626 0.03151999999999999 207 | 1896 730 814 0.06662 208 | 1725 1129 1497 0.03383000000000003 209 | 1793 2607 2410 0.02336000000000002 210 | 1428 932 803 8.00000000000245e-05 211 | 506 478 19 0.06517 212 | 1921 2197 1779 0.022430000000000005 213 | 841 1892 1659 0.08322000000000002 214 | 2152 1376 760 0.0039899999999999936 215 | 610 595 494 0.0024599999999999622 216 | 2649 2812 2747 0.6390365097602045 217 | 935 1424 1066 0.010579999999999992 218 | 284 509 123 0.07386000000000004 219 | 2206 1496 1629 0.010020000000000001 220 | 956 1639 2103 0.0013900000000000023 221 | 3022 2762 2904 0.04006177518553611 222 | 1373 896 2014 0.058740000000000014 223 | 1562 2523 1663 0.02026 224 | 342 547 709 0.22878 225 | 271 526 387 0.22970000000000002 226 | 2209 1042 2527 0.049769999999999995 227 | 1584 2602 1345 0.008050000000000002 228 | 2494 2216 1833 0.08839 229 | 2446 2371 2086 0.011710000000000026 230 | 1231 1419 1728 0.042489999999999986 231 | 1499 1177 762 0.020460000000000006 232 | 1775 1165 2372 0.01013 233 | 857 1445 1405 0.062572 234 | 2568 1181 2034 0.04680000000000001 235 | 2044 2335 1986 0.00839999999999999 236 | 2274 1309 821 0.03687300000000002 237 | 2324 1334 2079 0.012450000000000017 238 | 1374 1242 1936 0.01723999999999999 239 | 2561 2368 1285 0.04507 240 | 407 249 135 0.24171 241 | 299 204 57 0.09762000000000004 242 | 2793 2965 3019 0.023413131725236513 243 | 1091 1482 2161 0.05021999999999999 244 | 2412 2393 1000 0.009410000000000002 245 | 796 2237 1378 0.035614999999999994 246 | 851 804 1479 0.00383 247 | 2421 1652 2406 0.002579999999999999 248 | 1753 1910 2522 0.024249999999999994 249 | 182 174 473 0.33533 250 | 1789 1241 1858 0.009200000000000014 251 | 5 630 14 0.2541399999999999 252 | 1993 905 1883 0.04546 253 | 1234 1436 2090 0.016170000000000004 254 | 2733 2659 2728 0.5780439979883627 255 | 1411 2095 1165 0.05477000000000001 256 | 955 1458 1211 0.04830999999999999 257 | 2856 2933 2772 0.4922996450753054 258 | 653 56 679 0.016220000000000012 259 | 163 149 601 0.23755000000000004 260 | 77 73 473 0.30909000000000003 261 | 455 183 235 0.28697 262 | 1976 858 2032 0.009229999999999988 263 | 2231 1365 1954 0.024010000000000004 264 | 551 382 604 0.02682000000000001 265 | 1288 929 814 0.042440000000000005 266 | 2615 2885 2817 0.12816782129893634 267 | 2426 1148 1662 0.016660000000000008 268 | 1659 2302 1581 0.03950999999999999 269 | 1611 899 2594 0.040303000000000005 270 | 1693 1709 1163 0.033540000000000014 271 | 1802 1379 791 0.015549999999999994 272 | 554 103 307 0.047619999999999996 273 | 2109 1395 1022 0.029260000000000008 274 | 2879 2853 2772 0.08252049766197377 275 | 1110 1901 900 0.0023200000000000026 276 | 581 346 271 0.054410000000000014 277 | 2677 2719 2902 0.06346596943363855 278 | 374 501 215 0.13620000000000004 279 | 1930 2596 1765 0.025529999999999997 280 | 1726 1537 995 0.03119000000000001 281 | 782 2128 2270 0.013269999999999976 282 | 2729 2658 3031 0.5440404673874198 283 | 2232 2098 1303 0.057179999999999995 284 | 1393 2405 1138 0.0015100000000000113 285 | 226 439 24 0.20190000000000008 286 | 1124 1778 2175 0.012270000000000003 287 | 2348 727 2181 0.02273 288 | 126 318 521 0.015810000000000046 289 | 1014 1411 1516 0.013250000000000012 290 | 1317 1815 777 0.030328999999999995 291 | 2809 2695 2919 0.11377531122189 292 | 525 677 146 0.08982000000000001 293 | 618 269 390 0.02940999999999999 294 | 2385 2039 1068 0.03680999999999998 295 | 2511 1281 723 0.0032900000000000013 296 | 2200 2018 2287 0.014120000000000008 297 | 147 470 622 0.08945999999999998 298 | 1101 997 2528 0.003960000000000005 299 | 2552 1480 1422 0.011359999999999981 300 | 980 1973 2607 0.021150000000000002 301 | 2815 3037 2853 0.6234632348028871 302 | 1088 1782 1643 0.005720000000000003 303 | 3029 2622 2802 0.6127436679558969 304 | 3023 2934 2985 0.4999451885859944 305 | 261 505 395 0.25294 306 | 1157 2021 1626 0.023318 307 | 2878 2865 2712 0.05235822514006938 308 | 580 76 455 0.15554000000000007 309 | 2682 2938 2750 0.05182398164023755 310 | 1845 1856 1303 0.012379999999999988 311 | 1629 2497 1781 0.037495 312 | 642 717 351 0.005750000000000033 313 | 1289 1506 1446 0.03579 314 | 891 1714 987 0.0019499999999999934 315 | 2646 2901 2798 0.01930348116932501 316 | 2370 1884 2392 0.01963999999999999 317 | 300 106 617 0.04280999999999999 318 | 1244 2086 800 0.011080000000000007 319 | 1307 2540 1342 0.10325200000000001 320 | 1504 781 928 0.005610000000000004 321 | 1180 2169 2282 0.042190000000000005 322 | 1248 1555 1678 0.04329999999999999 323 | 662 160 106 0.10750000000000004 324 | 1063 983 1746 0.021430000000000005 325 | 2366 1061 909 0.026379999999999987 326 | 976 2115 2124 0.01126000000000002 327 | 2669 2834 2864 0.006616229718650751 328 | 1606 803 1119 0.018819000000000002 329 | 2319 1314 1677 0.04568000000000001 330 | 3039 2834 2899 0.029279711046987567 331 | 576 613 87 0.10366399999999999 332 | 357 643 296 0.11765 333 | 525 547 61 0.09292 334 | 2525 1586 1230 0.03975999999999999 335 | 2807 2761 2714 0.030686457749866447 336 | 2757 2757 2883 0.3450887726784435 337 | 2570 1182 1048 0.023059999999999997 338 | 556 430 62 0.13205999999999996 339 | 815 1783 2266 0.011690000000000006 340 | 1154 1870 832 0.0010600000000000054 341 | 1230 1313 901 0.024249999999999994 342 | 2122 1577 809 0.0020299999999999763 343 | 211 634 339 0.07428999999999997 344 | 1424 1149 1218 0.006379999999999997 345 | 754 2321 2319 0.0006599999999999939 346 | 2924 2830 2774 0.17958541698213581 347 | 1288 1889 1967 0.005229999999999985 348 | 574 584 649 0.09364 349 | 627 704 181 0.11650999999999997 350 | 441 35 506 0.07143 351 | 2492 1577 811 0.010860000000000009 352 | 1206 1374 1557 0.07638 353 | 1560 2362 2356 0.008209999999999995 354 | 924 964 1437 0.0020499999999999963 355 | 2925 2884 2749 0.19122000042831133 356 | 357 558 181 0.09398000000000001 357 | 12 302 364 0.28194 358 | 1819 851 1240 0.0042200000000000015 359 | 2617 2752 3032 0.866480356749619 360 | 1103 937 1964 0.033894999999999995 361 | 212 396 13 0.43939 362 | 1987 1310 2584 0.07080000000000002 363 | 273 343 536 0.09889999999999999 364 | 2214 2556 1821 0.00936999999999999 365 | 443 413 166 0.08687 366 | 1529 896 2183 0.05038999999999999 367 | 1659 1036 858 0.10297 368 | 2637 2829 2718 0.004762214548334143 369 | 2800 2823 2705 0.1380998610815538 370 | 467 163 99 0.10583000000000004 371 | 871 2407 1617 0.00519 372 | 2876 2689 2709 0.02348351775561497 373 | 2083 779 1511 0.053529999999999994 374 | 1862 1038 1187 0.03222 375 | 2724 2759 2849 0.06344296497006818 376 | 1973 2043 1512 0.0010599999999999776 377 | 1310 1944 1518 0.03941999999999998 378 | 1166 1301 2055 0.023930000000000007 379 | 225 293 7 0.129627 380 | 790 1377 2022 0.0020599999999999924 381 | 2245 2201 1874 0.09288000000000002 382 | 77 149 314 0.15109 383 | 972 2551 2052 0.03172 384 | 1925 736 2218 0.04727999999999999 385 | 670 590 316 0.006489999999999996 386 | 88 690 312 0.29327000000000003 387 | 1929 1759 1032 0.005489999999999995 388 | 2517 1032 2270 0.011009999999999992 389 | 339 607 556 0.13029999999999997 390 | 2178 1798 1796 0.042669 391 | 1748 2545 1017 0.0019399999999999973 392 | 1550 2276 1781 0.020199999999999996 393 | 1189 1598 2247 0.06856999999999999 394 | 2409 1590 1572 0.012130000000000002 395 | 763 2394 2609 0.0013100000000000056 396 | 1494 1964 1273 0.003060000000000007 397 | 2951 2765 2753 0.424524681527479 398 | 2768 2885 3026 0.879285356580373 399 | 1283 1801 754 0.01201000000000002 400 | 1582 1633 2283 0.109362 401 | 1275 781 2433 0.016479999999999995 402 | 268 560 266 0.614722 403 | 1518 1004 2014 0.015200000000000019 404 | 1201 1051 2036 0.018142000000000005 405 | 306 281 65 0.026639999999999997 406 | 210 163 601 0.025000000000000022 407 | 3016 2723 2997 0.42972458628776405 408 | 1882 1454 1084 0.02099000000000001 409 | 453 216 70 0.20587999999999998 410 | 1783 807 1429 0.006280000000000008 411 | 300 375 237 0.02812999999999999 412 | 1489 2580 751 0.018979999999999997 413 | 1036 2240 1593 0.05518999999999999 414 | 52 148 691 0.06364000000000003 415 | 1633 2508 1652 0.001579999999999998 416 | 602 417 407 0.12824 417 | 2190 941 1311 0.008672000000000013 418 | 1524 1228 1944 0.008289999999999992 419 | 2206 2356 848 0.02453999999999998 420 | 1942 1034 1823 0.07433 421 | 2103 2181 2373 1.9999999999992246e-05 422 | 1634 1941 1588 0.0063700000000000145 423 | 2828 2825 2963 0.5985993143240369 424 | 2247 1208 1551 0.03843700000000001 425 | 1008 1793 2561 9.999999999998899e-05 426 | 2757 2790 2781 0.28216414566272263 427 | 827 2133 1038 0.01480999999999999 428 | 2736 2684 2671 0.1267484125189542 429 | 431 657 559 0.24638 430 | 1467 1237 1902 0.068038 431 | 2672 2658 2870 0.015276062754392195 432 | 2576 939 1729 0.02124200000000001 433 | 1819 946 1761 0.017960000000000018 434 | 1305 894 2138 0.021309999999999996 435 | 933 2456 2036 0.008940000000000003 436 | 2095 2570 1331 0.0024300000000000155 437 | 1447 2105 2259 0.006530000000000008 438 | 1081 2217 2383 0.020579999999999987 439 | 891 2135 911 0.04118100000000001 440 | 2737 2758 2945 0.1808897403560859 441 | 1194 1279 979 0.008889999999999995 442 | 1649 1857 2564 0.03086 443 | 1462 1046 1536 0.02887999999999999 444 | 2127 2311 1580 0.018250000000000016 445 | 1161 1004 1941 0.008289999999999992 446 | 1303 1116 1560 0.03884000000000001 447 | 336 118 447 0.15595799999999999 448 | 1123 2343 2151 0.05271000000000001 449 | 1334 2007 992 0.029179999999999998 450 | 2249 2503 1527 0.03148999999999999 451 | 499 533 588 0.20063000000000003 452 | 2611 2070 1758 0.030079999999999996 453 | 508 360 667 0.031039999999999984 454 | 2770 2963 3001 0.0793649746425532 455 | 1261 1390 1722 0.06304 456 | 1597 1114 1027 0.03053 457 | -------------------------------------------------------------------------------- /data/SSLdata/SimReg.txt: -------------------------------------------------------------------------------- 1 | 1505 2373 0.1664 2 | 528 501 0.5 3 | 2180 1654 0.17426 4 | 918 1805 0.15603 5 | 676 2 0.31633 6 | 2529 2003 0.17075 7 | 1894 1187 0.1442 8 | 840 1268 0.15105 9 | 1347 1101 0.078229 10 | 963 2025 0.14732 11 | 2490 2601 0.12217 12 | 141 221 0.32184 13 | 857 1705 0.099255 14 | 1289 2564 0.10771 15 | 1320 1126 0.13445 16 | 607 406 0.38202 17 | 1706 1549 0.13747 18 | 1129 1967 0.18187 19 | 1731 1791 0.19304 20 | 1707 2106 0.12813 21 | 602 130 0.16326 22 | 1607 1739 0.11807 23 | 2470 737 0.14753 24 | 811 2176 0.090694 25 | 1342 1954 0.11143 26 | 2273 2094 0.10049 27 | 1043 842 0.12418 28 | 2275 1341 0.14682 29 | 2770 3041 0.10896533719613767 30 | 869 1069 0.09902 31 | 2222 1662 0.14077 32 | 1483 2355 0.17578 33 | 2543 2426 0.13951 34 | 404 584 0.25 35 | 1481 1433 0.17297 36 | 2068 1955 0.095994 37 | 1725 1630 0.10066 38 | 274 141 0.67143 39 | 1180 2528 0.15152 40 | 1554 1063 0.13848 41 | 1699 894 0.10874 42 | 153 602 0.31372 43 | 1910 2604 0.16357 44 | 1421 1027 0.074495 45 | 1619 1879 0.14327 46 | 2062 1649 0.17644 47 | 46 292 0.38947 48 | 2196 1819 0.15247 49 | 1660 2407 0.071499 50 | 1025 1246 0.16094 51 | 2333 2158 0.081202 52 | 1291 2167 0.12032 53 | 1042 1604 0.14786 54 | 269 18 0.32394 55 | 773 2111 0.13331 56 | 698 24 0.37255 57 | 907 733 0.13652 58 | 1945 2559 0.18452 59 | 1575 1255 0.12945 60 | 3001 2786 0.16767172255043478 61 | 84 312 0.125 62 | 791 859 0.15776 63 | 973 1759 0.17376 64 | 1684 1546 0.1485 65 | 173 282 0.29578 66 | 1953 1965 0.18875 67 | 2053 1257 0.12152 68 | 756 2599 0.1692 69 | 2532 748 0.15689 70 | 2407 905 0.15351 71 | 2315 903 0.11678 72 | 36 19 0.19231 73 | 299 677 0.57143 74 | 1003 1729 0.13326 75 | 2371 1711 0.099443 76 | 1353 975 0.17307 77 | 2737 2942 0.20043329182617578 78 | 2206 2154 0.13671 79 | 422 710 0.35526 80 | 2092 1591 0.069277 81 | 2660 2979 0.7826887160510289 82 | 2783 2830 0.1476165323135884 83 | 1150 1857 0.17049 84 | 1238 2085 0.17103 85 | 2542 2111 0.13055 86 | 2622 2792 0.8581248811842113 87 | 1616 1249 0.14167 88 | 1132 866 0.16371 89 | 2276 1766 0.13651 90 | 1255 900 0.1115 91 | 1714 1593 0.1678 92 | 2472 1506 0.18885 93 | 852 1567 0.16198 94 | 1675 1979 0.15561 95 | 840 1892 0.12414 96 | 1734 1462 0.14404 97 | 511 336 0.42308 98 | 930 1915 0.17122 99 | 2958 3025 0.455348153624773 100 | 1398 2482 0.15219 101 | 2539 2053 0.12884 102 | 2470 1234 0.14555 103 | 1343 1018 0.15449 104 | 2129 953 0.14799 105 | 809 1005 0.18386 106 | 883 1129 0.16888 107 | 1740 2508 0.15341 108 | 840 1270 0.17213 109 | 1252 1757 0.17799 110 | 1332 2604 0.16529 111 | 2270 1599 0.135 112 | 172 138 0.48649 113 | 546 504 0.48352 114 | 1115 1452 0.071458 115 | 1354 2207 0.12946 116 | 460 57 0.3913 117 | 1244 895 0.14452 118 | 1605 2544 0.17228 119 | 1130 1739 0.19088 120 | 2077 1791 0.09532 121 | 762 1288 0.16497 122 | 1538 1680 0.091201 123 | 1009 2015 0.17285 124 | 1016 1947 0.16384 125 | 1013 1055 0.18434 126 | 1411 2502 0.18035 127 | 2473 1699 0.12726 128 | 1176 1062 0.13801 129 | 2178 1649 0.1014 130 | 1894 1809 0.15563 131 | 1812 951 0.11351 132 | 1137 2476 0.14658 133 | 2118 1049 0.13691 134 | 1487 1950 0.13317 135 | 1607 1661 0.086374 136 | 819 1081 0.16285 137 | 2594 2105 0.12127 138 | 2423 1800 0.12803 139 | 2668 2962 0.7696733762751288 140 | 1354 2602 0.16418 141 | 2246 1342 0.12576 142 | 50 710 0.36559 143 | 878 1651 0.11101 144 | 2306 1047 0.14638 145 | 1123 1224 0.17843 146 | 1134 1584 0.14802 147 | 1801 1214 0.15616 148 | 1006 1711 0.15559 149 | 469 34 0.48571 150 | 762 820 0.09359 151 | 1095 1330 0.11431 152 | 1604 997 0.18249 153 | 1922 1904 0.12759 154 | 2199 1422 0.15207 155 | 606 670 0.29268 156 | 2734 2855 0.8780231571482843 157 | 737 2137 0.17682 158 | 2020 1896 0.15193 159 | 292 250 0.39189 160 | 2467 2156 0.07322 161 | 1127 910 0.1711 162 | 2997 2705 0.32312739872978274 163 | 796 2452 0.087338 164 | 952 1359 0.13097 165 | 635 627 0.63793 166 | 1830 1803 0.16489 167 | 1611 1260 0.18095 168 | 1006 1527 0.1754 169 | 1090 1182 0.08473 170 | 2306 2022 0.14695 171 | 1897 901 0.12306 172 | 1800 983 0.18156 173 | 2461 1001 0.17542 174 | 784 1038 0.18341 175 | 1143 1531 0.15872 176 | 2257 1443 0.080526 177 | 1394 2233 0.11261 178 | 2348 2023 0.15363 179 | 1874 1632 0.1649 180 | 1589 991 0.14751 181 | 1401 1960 0.14386 182 | 1828 1550 0.11921 183 | 1143 1878 0.15081 184 | 1927 2031 0.1517 185 | 804 936 0.1559 186 | 1410 2458 0.15834 187 | 1640 1508 0.11092 188 | 2848 2937 0.4405869372709058 189 | 861 2491 0.14266 190 | 1130 1211 0.15255 191 | 1728 2417 0.17188 192 | 875 2183 0.15302 193 | 1753 1168 0.12128 194 | 1259 1147 0.18091 195 | 2663 2914 0.14908289618098933 196 | 1186 1017 0.14394 197 | 2309 2020 0.17987 198 | 227 693 0.23944 199 | 702 134 0.41429 200 | 1527 2269 0.17487 201 | 1336 2026 0.11672 202 | 809 2046 0.16799 203 | 1675 1231 0.099416 204 | 2225 1039 0.16151 205 | 1266 2186 0.1837 206 | 1545 1402 0.14468 207 | 1806 2484 0.18323 208 | 615 359 0.45161 209 | 1046 1441 0.14674 210 | 768 1973 0.15947 211 | 148 437 0.38235 212 | 2630 2868 0.5931800113710979 213 | 1870 2466 0.17006 214 | 1676 896 0.094875 215 | 1461 1479 0.1155 216 | 1774 2261 0.11171 217 | 1597 1020 0.12619 218 | 1021 1165 0.17734 219 | 1768 964 0.12238 220 | 2054 2226 0.15967 221 | 2457 1114 0.14173 222 | 170 144 0.225 223 | 1003 1867 0.16565 224 | 1703 2610 0.15168 225 | 1331 1505 0.15015 226 | 1751 2603 0.16993 227 | 1094 1209 0.1829 228 | 1726 2206 0.10783 229 | 1926 1742 0.12163 230 | 1082 969 0.17091 231 | 2255 2565 0.19009 232 | 1546 2103 0.16517 233 | 1828 2378 0.14547 234 | 2822 2740 0.29129708636072027 235 | 514 608 0.27536 236 | 967 1338 0.14465 237 | 1623 1761 0.10608 238 | 1694 1942 0.10504 239 | 1933 1230 0.17574 240 | 1321 992 0.16336 241 | 2489 1612 0.11118 242 | 2163 1750 0.10254 243 | 936 1458 0.2366 244 | 2416 2410 0.16463 245 | 1103 2308 0.11513 246 | 1620 1747 0.12632 247 | 1033 1244 0.17467 248 | 2834 2703 0.38549316948098794 249 | 818 1965 0.28215 250 | 2631 2920 0.4142028799658257 251 | 2053 1730 0.11227 252 | 2456 2339 0.1558 253 | 292 658 0.4058 254 | 977 1810 0.17423 255 | 1612 910 0.16982 256 | 1209 1501 0.156 257 | 1865 2588 0.1679 258 | 1393 960 0.16258 259 | 389 716 0.2093 260 | 798 2197 0.11344 261 | 547 636 0.30645 262 | 1398 2032 0.16109 263 | 765 2189 0.13311 264 | 2231 1325 0.15418 265 | 1597 1407 0.12737 266 | 1104 2166 0.11127 267 | 554 191 0.27869 268 | 1170 2596 0.14644 269 | 2090 864 0.12624 270 | 1306 819 0.16975 271 | 1435 1584 0.16946 272 | 2112 1322 0.11404 273 | 1391 2406 0.18405 274 | 285 400 0.2807 275 | 1552 1584 0.10935 276 | 1847 1081 0.18024 277 | 1437 1747 0.11662 278 | 1409 1331 0.13825 279 | 2015 1673 0.17049 280 | 1861 1872 0.16226 281 | 945 1813 0.11307 282 | 2268 1290 0.078463 283 | 259 79 0.23611 284 | 962 2426 0.16885 285 | 1779 877 0.17236 286 | 1005 1714 0.17383 287 | 2032 1545 0.15145 288 | 962 779 0.16723 289 | 677 565 0.47059 290 | 833 2147 0.1475 291 | 1391 2304 0.18207 292 | 1818 1392 0.10657 293 | 1246 1095 0.1784 294 | 1174 1933 0.16876 295 | 2348 1140 0.16367 296 | 275 498 0.36458 297 | 1970 1071 0.36895 298 | 1714 1232 0.15216 299 | 1669 2175 0.047902 300 | 1881 1197 0.11714 301 | 1406 1857 0.16642 302 | 969 761 0.15453 303 | 1245 971 0.17352 304 | 925 896 0.18169 305 | 650 361 0.16883 306 | 259 143 0.37975 307 | 2586 2408 0.17078 308 | 1331 2174 0.13667 309 | 1901 966 0.088714 310 | 2415 782 0.096119 311 | 2451 2499 0.1904 312 | 2408 2179 0.11164 313 | 1835 1635 0.12667 314 | 2663 2885 0.2489429788775145 315 | 942 1814 0.16159 316 | 923 1813 0.14715 317 | 1843 1223 0.16422 318 | 232 130 0.2 319 | 2667 2936 0.2588524115650922 320 | 1701 1065 0.18318 321 | 920 1798 0.14975 322 | 2748 2788 0.1353352832366127 323 | 1473 962 0.12497 324 | 2515 1410 0.071135 325 | 1437 2141 0.12341 326 | 3045 2853 0.2972877853783136 327 | 2461 1144 0.18447 328 | 1353 2480 0.15826 329 | 169 392 0.2 330 | 1082 2401 0.18671 331 | 2361 1872 0.1483 332 | 2027 1518 0.085772 333 | 1461 1483 0.10636 334 | 1936 1064 0.16333 335 | 2003 1427 0.15249 336 | 1124 1628 0.1619 337 | 2393 2278 0.17078 338 | 2510 1764 0.16579 339 | 1333 971 0.1571 340 | 1723 978 0.13406 341 | 1862 1129 0.17639 342 | 2583 2378 0.085216 343 | 547 140 0.35714 344 | 1631 1855 0.085216 345 | 641 93 0.38636 346 | 2717 2749 0.0 347 | 1116 1254 0.15666 348 | 1318 839 0.15807 349 | 1990 2001 0.17987 350 | 2160 2527 0.098778 351 | 1966 2109 0.13259 352 | 1717 1119 0.1375 353 | 1833 980 0.13601 354 | 1110 2555 0.13971 355 | 1784 1366 0.17524 356 | 2878 2628 0.6945094348184341 357 | 294 326 0.39706 358 | 2238 1612 0.17965 359 | 2404 1559 0.16633 360 | 1571 2199 0.14196 361 | 1477 1656 0.17342 362 | 1176 2599 0.1554 363 | 2246 2272 0.15798 364 | 1857 942 0.16305 365 | 1486 975 0.12101 366 | 2192 1030 0.1584 367 | 731 940 0.10968 368 | 2060 1106 0.16438 369 | 53 595 0.26316 370 | 1350 1158 0.11265 371 | 1245 2121 0.14763 372 | 833 1407 0.0899 373 | 2554 1952 0.14052 374 | 1672 1799 0.1756 375 | 196 51 0.28302 376 | 1489 1961 0.16092 377 | 1560 1460 0.16092 378 | 1602 1023 0.15034 379 | 859 1953 0.15056 380 | 1466 1021 0.15865 381 | 2459 2385 0.16867 382 | 999 1819 0.13362 383 | 2450 1859 0.1587 384 | 1866 1519 0.13666 385 | 1955 2517 0.13965 386 | 1763 1600 0.16279 387 | 8 386 0.33333 388 | 2122 948 0.16349 389 | 971 956 0.11607 390 | 1914 1399 0.15887 391 | 1122 755 0.16362 392 | 2464 2486 0.18164 393 | 2531 1385 0.16732 394 | 2601 1675 0.17197 395 | 1111 1002 0.1737 396 | 1274 2574 0.17558 397 | 2161 2573 0.11335 398 | 149 615 0.23288 399 | 1008 1418 0.12142 400 | 580 378 0.4 401 | 1530 1783 0.18405 402 | 120 641 0.20635 403 | 2318 1436 0.12495 404 | 1212 1825 0.10266 405 | 1314 2187 0.081838 406 | 1585 2073 0.16842 407 | 1662 2287 0.12266 408 | 1435 1727 0.16807 409 | 1012 1419 0.14012 410 | 984 1334 0.16469 411 | 333 425 0.25 412 | 2996 2803 0.2662908384431439 413 | 2095 1463 0.18627 414 | 1631 785 0.13234 415 | 2819 2995 0.1400146395079352 416 | 1590 2167 0.16446 417 | 949 2203 0.14296 418 | 1933 1403 0.16607 419 | 502 360 0.28 420 | 1263 1787 0.17854 421 | 1590 928 0.16683 422 | 1814 950 0.15558 423 | 1277 1853 0.17197 424 | 2400 1320 0.14104 425 | 2417 2395 0.15877 426 | 2418 1255 0.17542 427 | 2588 1562 0.14989 428 | 2394 1779 0.1572 429 | 2436 1060 0.17186 430 | 949 2535 0.11178 431 | 834 2446 0.16121 432 | 946 1804 0.16429 433 | 1437 2454 0.16684 434 | 1432 1937 0.18569 435 | 1530 2529 0.18049 436 | 983 2076 0.18548 437 | 1456 2353 0.18851 438 | 1708 1478 0.15786 439 | 723 1334 0.13225 440 | 2648 2813 0.14427452248855305 441 | 940 2176 0.11778 442 | 1970 1199 0.18235 443 | 867 2376 0.17036 444 | 2088 2267 0.1468 445 | 1070 976 0.19149 446 | 1305 2556 0.13429 447 | 2478 1995 0.17174 448 | 882 1651 0.11056 449 | 1789 1388 0.14223 450 | 1046 2249 0.16749 451 | 1311 1333 0.15841 452 | 2101 2181 0.1541 453 | 1762 1218 0.18393 454 | 1190 2025 0.15806 455 | 811 1802 0.16553 456 | 403 637 0.11765 457 | 633 265 0.46667 458 | 2194 1664 0.1473 459 | 899 1560 0.11064 460 | 709 35 0.33333 461 | 599 161 0.52857 462 | 1077 2483 0.1295 463 | 2232 2495 0.16407 464 | 2445 2471 0.1994 465 | 1927 832 0.15521 466 | 1923 1569 0.1519 467 | 2913 2810 0.8663329843729287 468 | 1598 2318 0.15118 469 | 2014 2417 0.1467 470 | 1988 1288 0.17326 471 | 1193 1777 0.13848 472 | 992 2003 0.14372 473 | 965 1774 0.11467 474 | 2208 1587 0.20786 475 | 3025 2786 0.3011641205596972 476 | 1586 2495 0.17175 477 | 893 901 0.16451 478 | 123 644 0.21667 479 | 1812 2487 0.12635 480 | 1703 2588 0.14508 481 | 1245 1582 0.16783 482 | 2351 1046 0.17457 483 | 2611 1911 0.17105 484 | 2439 1303 0.14731 485 | 1833 2557 0.089064 486 | 730 2136 0.17493 487 | 2228 916 0.14329 488 | 1614 1076 0.17499 489 | 1160 847 0.15329 490 | 1380 1458 0.12545 491 | 1917 2197 0.17276 492 | 2141 2138 0.18555 493 | 1116 1883 0.13734 494 | 1557 1371 0.12112 495 | 1122 1111 0.15013 496 | 2201 1094 0.13144 497 | 2554 2465 0.09852 498 | 2437 1089 0.17157 499 | 2613 2264 0.15154 500 | 2447 1838 0.18158 501 | 1866 880 0.15496 502 | 972 2049 0.17318 503 | 1282 1088 0.117 504 | 841 1355 0.15204 505 | 1038 2080 0.17419 506 | 1162 958 0.12581 507 | 2344 927 0.17532 508 | 178 436 0.20339 509 | 2436 745 0.12791 510 | 1679 1099 0.13902 511 | 1412 2488 0.18502 512 | 1604 2370 0.17508 513 | 1768 1860 0.15834 514 | 1868 2020 0.13785 515 | 1521 1616 0.14419 516 | 2120 1133 0.14159 517 | 1846 1221 0.17335 518 | 1862 848 0.16074 519 | 771 1500 0.17284 520 | 2699 2896 0.884821239196304 521 | 927 736 0.19564 522 | 1545 1301 0.15468 523 | 1993 2033 0.15433 524 | 2309 1695 0.12515 525 | 1328 957 0.17567 526 | 2369 1303 0.12922 527 | 849 1980 0.1687 528 | 2824 2921 0.34567509045915007 529 | 1713 1594 0.13575 530 | 2353 728 0.14889 531 | 1086 2613 0.14184 532 | 2281 993 0.12187 533 | 1356 933 0.13306 534 | 348 247 0.22535 535 | 183 605 0.36111 536 | 1496 1848 0.16288 537 | 1765 1276 0.12221 538 | 444 312 0.19565 539 | 2469 2359 0.17899 540 | 1151 2488 0.16448 541 | 1457 2046 0.15843 542 | 206 82 0.42857 543 | 573 477 0.35802 544 | 1849 2289 0.17112 545 | 1424 2105 0.13869 546 | 892 1036 0.11717 547 | 2491 1891 0.12215 548 | 1994 1195 0.093365 549 | 1599 1458 0.10883 550 | 811 754 0.17066 551 | 1090 1357 0.14042 552 | 2953 2727 0.31979520516519394 553 | 1815 1908 0.13756 554 | 813 985 0.21006 555 | 934 2579 0.17992 556 | 1890 2402 0.13445 557 | 1180 1927 0.14224 558 | 1129 803 0.19407 559 | 873 1248 0.1756 560 | 1730 1080 0.15156 561 | 1346 1278 0.16792 562 | 1228 970 0.17211 563 | 789 1907 0.1378 564 | 1383 2286 0.13123 565 | 2481 1819 0.16271 566 | 2443 1041 0.13395 567 | 978 2192 0.1519 568 | 948 931 0.16814 569 | 2339 1934 0.14577 570 | 1277 1502 0.17888 571 | 2572 977 0.13649 572 | 2069 976 0.16933 573 | 1936 2052 0.16157 574 | 1124 1095 0.132 575 | 2195 1049 0.14697 576 | 1794 2283 0.13115 577 | 441 212 0.18868 578 | 765 2452 0.15071 579 | 2303 1760 0.1765 580 | 2246 1679 0.12003 581 | 1909 2522 0.13061 582 | 2501 1317 0.11925 583 | 2529 1215 0.16999 584 | 335 82 0.3956 585 | 2242 1005 0.16207 586 | 1826 2107 0.15376 587 | 2560 969 0.17921 588 | 2319 1442 0.12144 589 | 2500 2082 0.16963 590 | 2522 1242 0.18186 591 | 524 603 0.26154 592 | 1863 2083 0.17881 593 | 1455 1416 0.1369 594 | 1565 1951 0.15537 595 | 1032 2195 0.12728 596 | 88 80 0.22222 597 | 1660 2345 0.044432 598 | 1774 1275 0.14284 599 | 214 656 0.31343 600 | 1883 1768 0.067212 601 | 1382 1160 0.1682 602 | 1024 2582 0.13925 603 | 1527 2191 0.17476 604 | 2585 1505 0.15137 605 | 2365 955 0.13933 606 | 741 1395 0.16197 607 | 2352 2337 0.092117 608 | 969 809 0.16935 609 | 1702 888 0.10298 610 | 629 427 0.26471 611 | 755 1526 0.1357 612 | 122 114 0.5 613 | 326 124 0.31034 614 | 551 114 0.58904 615 | 1192 1858 0.14254 616 | 2247 822 0.12633 617 | 2164 754 0.13647 618 | 2549 2239 0.088871 619 | 2636 3036 0.623867945976436 620 | 1885 751 0.1419 621 | 2045 1085 0.141 622 | 481 186 0.50562 623 | 1895 1692 0.078638 624 | 1651 1019 0.1055 625 | 1321 2211 0.14568 626 | 2469 1616 0.17315 627 | 1030 2290 0.13971 628 | 1533 1617 0.15364 629 | 2189 1383 0.12735 630 | 739 1193 0.11969 631 | 234 346 0.18518 632 | 1113 1131 0.1625 633 | 1483 1650 0.12072 634 | 662 701 0.27778 635 | 591 464 0.42857 636 | 2209 1902 0.13463 637 | 725 1023 0.099302 638 | 1615 2383 0.11819 639 | 1686 1898 0.17152 640 | 2221 1033 0.18185 641 | 1349 2356 0.11062 642 | 1966 1374 0.18147 643 | 616 636 0.33333 644 | 473 381 0.18182 645 | 2077 2413 0.11413 646 | 2090 887 0.083261 647 | 1556 1321 0.17783 648 | 2096 1561 0.094437 649 | 928 2288 0.16688 650 | 1029 1765 0.20595 651 | 1626 1221 0.13067 652 | 158 294 0.26374 653 | 480 255 0.15152 654 | 992 2456 0.11897 655 | 2325 2084 0.16819 656 | 1199 1811 0.16279 657 | 478 430 0.375 658 | 618 625 0.3617 659 | 3041 2884 0.17277350266979608 660 | 900 2355 0.15288 661 | 1191 2087 0.16729 662 | 1199 841 0.18987 663 | 1306 1938 0.18323 664 | 896 2331 0.19374 665 | 1922 1317 0.16953 666 | 629 654 0.15714 667 | 1401 1891 0.40213 668 | 2085 2139 0.14779 669 | 2139 1113 0.10401 670 | 2297 2559 0.12919 671 | 1659 2219 0.12699 672 | 2146 1833 0.11224 673 | 2884 3026 0.09262807270192075 674 | 989 733 0.19224 675 | 1368 1379 0.12466 676 | 831 1619 0.16736 677 | 2066 860 0.16316 678 | 2422 2568 0.16649 679 | 1553 2435 0.15042 680 | 1438 2223 0.15954 681 | 2068 1847 0.1762 682 | 783 1515 0.1699 683 | 2465 2331 0.11825 684 | 1252 1392 0.1554 685 | 2450 2087 0.16496 686 | 1513 2602 0.098489 687 | 744 2430 0.12656 688 | 1615 1826 0.12765 689 | 747 968 0.17669 690 | 914 1888 0.16732 691 | 1104 1078 0.16528 692 | 1797 1854 0.16784 693 | 1735 1394 0.19113 694 | 2447 2559 0.17157 695 | 290 138 0.30208 696 | 2272 2614 0.18464 697 | 2725 3011 0.3908620690938646 698 | 2163 2331 0.1334 699 | 1748 1958 0.16791 700 | 921 1854 0.15118 701 | 1104 1409 0.16027 702 | 1263 2577 0.16395 703 | 2498 1813 0.12336 704 | 1435 755 0.16075 705 | 2585 1669 0.079844 706 | 1352 831 0.10823 707 | 2421 1266 0.17479 708 | 2620 2653 0.17782649249528767 709 | 2522 1296 0.16691 710 | 1267 849 0.14675 711 | 1198 2032 0.16339 712 | 1470 2016 0.16295 713 | 2403 2441 0.18213 714 | 2916 2827 0.23854051184960684 715 | 937 1682 0.1311 716 | 854 1585 0.18456 717 | 19 168 0.47059 718 | 1693 1480 0.13215 719 | 938 1452 0.16663 720 | 1397 1508 0.10295 721 | 1476 1697 0.061506 722 | 780 1007 0.14606 723 | 887 1078 0.095563 724 | 2075 2541 0.17588 725 | 540 267 0.43396 726 | 1130 2413 0.08041 727 | 651 474 0.32895 728 | 1035 1250 0.12217 729 | 1112 2062 0.10122 730 | 2353 2094 0.10257 731 | 1054 2431 0.1779 732 | 1518 1059 0.17621 733 | 1888 1302 0.18513 734 | 2344 2415 0.14719 735 | 1866 952 0.16722 736 | 697 47 0.08 737 | 1048 1981 0.18588 738 | 1132 1061 0.16966 739 | 822 883 0.15952 740 | 2486 2322 0.1594 741 | 739 1369 0.098603 742 | 2333 2531 0.14972 743 | 2449 1283 0.19061 744 | 225 620 0.33333 745 | 506 292 0.40625 746 | 1670 1017 0.14999 747 | 1866 1646 0.15084 748 | 1354 1287 0.11776 749 | 659 25 0.31818 750 | 60 136 0.3 751 | 1448 1060 0.11837 752 | 2451 2601 0.22563 753 | 865 1716 0.12804 754 | 2602 1507 0.13206 755 | 2374 1724 0.16883 756 | 1293 2193 0.1537 757 | 2009 2108 0.14441 758 | 2566 957 0.080619 759 | 2517 1532 0.16851 760 | 58 688 0.29412 761 | 2112 2123 0.13343 762 | 2195 2552 0.10108 763 | 11 636 0.18367 764 | 2660 2713 0.8979417523930201 765 | 809 2481 0.097208 766 | 1272 1726 0.15174 767 | 1538 2574 0.16195 768 | 2388 2454 0.1257 769 | 2898 2839 0.30935833596699547 770 | 1131 1687 0.11346 771 | 1649 2280 0.18246 772 | 2013 2028 0.11778 773 | 139 173 0.28985 774 | 2408 1932 0.17665 775 | 492 365 0.38461 776 | 1181 1772 0.17554 777 | 2972 2898 0.31024700199322824 778 | 354 403 0.24 779 | 2335 957 0.11916 780 | 2210 1555 0.16656 781 | 2928 2754 0.8158495916162358 782 | 1415 2229 0.13649 783 | 606 203 0.25373 784 | 1142 2225 0.17845 785 | 1061 1087 0.10574 786 | 1817 984 0.093745 787 | 854 886 0.15175 788 | 1733 792 0.13429 789 | 1664 1828 0.11987 790 | 2454 2487 0.16786 791 | 1780 1440 0.082744 792 | 1096 1111 0.1879 793 | 82 219 0.33333 794 | 1932 2369 0.16477 795 | 79 628 0.37705 796 | 753 1363 0.14297 797 | 910 1508 0.09293 798 | 927 1843 0.20321 799 | 1138 1379 0.090069 800 | 1079 2215 0.17314 801 | 541 232 0.18518 802 | 317 234 0.24359 803 | 2574 1398 0.16417 804 | 1549 1011 0.15212 805 | 1003 1062 0.11879 806 | 1491 2211 0.1714 807 | 2453 1067 0.15163 808 | 1400 1399 0.18355 809 | 30 17 0.49152 810 | 605 504 0.27848 811 | 536 495 0.28421 812 | 2135 1218 0.10574 813 | 610 180 0.57143 814 | 1307 1223 0.090868 815 | 2428 2071 0.17309 816 | 2115 1381 0.098949 817 | 899 2362 0.12745 818 | 1720 895 0.12041 819 | 1256 2004 0.16801 820 | 759 1946 0.091699 821 | 1175 865 0.1045 822 | 1513 742 0.10358 823 | 2422 2385 0.19005 824 | 1366 2108 0.14422 825 | 2347 1708 0.17659 826 | 2564 1503 0.13313 827 | 1694 1160 0.086165 828 | 744 1942 0.17171 829 | 2406 2105 0.14919 830 | 1721 1212 0.13675 831 | 1892 2408 0.16131 832 | 1673 1823 0.1359 833 | 850 1493 0.11992 834 | 786 2306 0.13774 835 | 2604 2461 0.16381 836 | 1463 1924 0.15383 837 | 1237 2585 0.15499 838 | 527 427 0.25758 839 | 3041 2910 0.17265813191998505 840 | 606 647 0.23214 841 | 2965 2666 0.8177753425440034 842 | 2348 1648 0.15453 843 | 710 492 0.37037 844 | 2592 914 0.17495 845 | 1662 1694 0.09266 846 | 2450 2083 0.17792 847 | 1976 838 0.14854 848 | 278 125 0.26786 849 | 1507 2321 0.15067 850 | 819 1808 0.11774 851 | 825 1951 0.15948 852 | 2091 1181 0.15879 853 | 1616 1632 0.10498 854 | 1598 1036 0.20187 855 | 2894 2945 0.14003111043432043 856 | 2505 984 0.15708 857 | 2118 2324 0.082911 858 | 2334 1388 0.14611 859 | 1731 1081 0.091192 860 | 2595 2408 0.08671 861 | 2136 2312 0.14069 862 | 2351 1695 0.11563 863 | 43 231 0.22034 864 | 1596 2334 0.11373 865 | 937 2250 0.17165 866 | 2950 2794 0.30445124144150054 867 | 1298 1556 0.15842 868 | 610 195 0.4127 869 | 1338 1787 0.17236 870 | 1202 2282 0.13786 871 | 2211 1826 0.1455 872 | 2056 1446 0.21047 873 | 1191 1310 0.17268 874 | 1488 1971 0.081471 875 | 2395 1109 0.1391 876 | 2452 1789 0.17032 877 | 2334 2539 0.13352 878 | 2830 2790 0.3869695186088237 879 | 2012 1204 0.14664 880 | 198 341 0.23656 881 | 1090 993 0.16606 882 | 2275 2482 0.14031 883 | 1137 2415 0.15367 884 | 324 235 0.20339 885 | 2880 2765 0.9285771654916486 886 | 2192 1056 0.12293 887 | 1247 2280 0.19853 888 | 1704 880 0.19156 889 | 1868 1155 0.16372 890 | 1045 2407 0.12464 891 | 691 449 0.35135 892 | 1443 1057 0.10712 893 | 1316 2441 0.15179 894 | 112 330 0.35593 895 | 1106 852 0.1572 896 | 1082 1564 0.16992 897 | 478 59 0.46269 898 | 990 2068 0.12057 899 | 2107 1312 0.12735 900 | 724 803 0.1572 901 | 1888 2166 0.16098 902 | 1195 1312 0.1136 903 | 1837 2113 0.13198 904 | 1357 1525 0.17618 905 | 1552 1305 0.15997 906 | 2349 1961 0.1373 907 | 689 390 0.4 908 | 632 691 0.22892 909 | 466 407 0.28947 910 | 1812 1963 0.15415 911 | 2209 986 0.14171 912 | 121 423 0.34375 913 | 2035 1347 0.092082 914 | 2536 2150 0.1522 915 | 1481 2400 0.17985 916 | 1675 1209 0.1147 917 | 1220 2439 0.16194 918 | 2041 2360 0.16592 919 | 1502 1335 0.15638 920 | 2416 1504 0.14554 921 | 1979 1212 0.15857 922 | 1883 2145 0.089645 923 | 1237 946 0.16278 924 | 1440 1502 0.11689 925 | 1318 739 0.10914 926 | 2037 1908 0.15718 927 | 1442 1128 0.16057 928 | 1348 2108 0.1611 929 | 1764 1060 0.12562 930 | 2510 890 0.1687 931 | 1012 953 0.12812 932 | 792 1718 0.16861 933 | 587 628 0.41892 934 | 1562 2571 0.13049 935 | 1511 2023 0.13974 936 | 801 2546 0.16776 937 | 100 435 0.24074 938 | 103 127 0.42029 939 | 1737 1605 0.14344 940 | 2435 1378 0.11988 941 | 1395 2410 0.13306 942 | 1825 1040 0.13682 943 | 1842 1629 0.13705 944 | 1024 2225 0.16934 945 | 558 68 0.28814 946 | 1744 2416 0.11089 947 | 2137 1528 0.16752 948 | 2503 2424 0.18973 949 | 66 180 0.28788 950 | 953 1876 0.12952 951 | 1321 2061 0.21292 952 | 1509 2281 0.10713 953 | 345 254 0.025641 954 | 1831 1307 0.069631 955 | 775 1009 0.1533 956 | 1763 1890 0.14837 957 | 2374 2595 0.077827 958 | 1909 2267 0.18206 959 | 921 1802 0.15192 960 | 1522 1418 0.14302 961 | 684 172 0.54054 962 | 2501 2582 0.14081 963 | 2051 811 0.17249 964 | 1087 1516 0.12718 965 | 1782 1730 0.15252 966 | 1082 984 0.14358 967 | 2299 2593 0.17012 968 | 1776 2210 0.17601 969 | 1403 1653 0.16548 970 | 2569 1952 0.12542 971 | 1295 1492 0.11415 972 | 2167 2601 0.15243 973 | 2611 1413 0.16057 974 | 1909 2300 0.16771 975 | 1265 985 0.16249 976 | 2259 1856 0.17738 977 | 1165 2487 0.16265 978 | 1766 1164 0.17562 979 | 1787 2160 0.085624 980 | 1579 1192 0.14986 981 | 863 1484 0.16759 982 | 1099 756 0.078412 983 | 2121 1507 0.18284 984 | 729 2414 0.13445 985 | 973 2163 0.10278 986 | 2476 1965 0.1519 987 | 1181 1492 0.12398 988 | 293 437 0.43902 989 | 1867 1847 0.15853 990 | 984 1138 0.11405 991 | 1117 2322 0.088198 992 | 2243 1713 0.16735 993 | 210 551 0.55844 994 | 1635 2099 0.1224 995 | 2284 2187 0.15953 996 | 495 523 0.14458 997 | 637 213 0.2027 998 | 1360 1221 0.15412 999 | 2494 1909 0.12647 1000 | 1864 1255 0.17331 1001 | 1616 2226 0.16139 1002 | 786 746 0.15879 1003 | 846 735 0.17036 1004 | 601 245 0.28788 1005 | 167 307 0.45454 1006 | 2233 1945 0.15631 1007 | 2205 2063 0.17592 1008 | 626 643 0.28125 1009 | 2140 1633 0.11079 1010 | 1532 726 0.15607 1011 | 675 660 0.33333 1012 | 2496 1933 0.15699 1013 | 2337 1997 0.19403 1014 | 2342 774 0.092429 1015 | 1202 1609 0.16654 1016 | 1772 2032 0.17213 1017 | 460 219 0.30882 1018 | 1839 1422 0.1617 1019 | 276 629 0.46154 1020 | 1334 1811 0.13479 1021 | 943 2287 0.16733 1022 | 1399 1535 0.18017 1023 | 2995 2892 0.155971925498958 1024 | 1950 1432 0.18287 1025 | 2175 1142 0.12214 1026 | 1645 2048 0.16051 1027 | 1757 1012 0.16488 1028 | 2985 2848 0.10059691477282107 1029 | 1483 2487 0.15354 1030 | 2316 1032 0.16164 1031 | 1916 1490 0.13402 1032 | 1357 1107 0.15574 1033 | 1522 785 0.14664 1034 | 833 1583 0.16102 1035 | 2258 1200 0.17114 1036 | 854 1135 0.1135 1037 | 1694 737 0.17518 1038 | 1381 1839 0.11782 1039 | 2173 1080 0.11503 1040 | 2105 882 0.17961 1041 | 2103 1317 0.1486 1042 | 1595 2305 0.13686 1043 | 1241 1589 0.17576 1044 | 2657 2968 0.42667029654723276 1045 | 1754 2603 0.17032 1046 | 207 175 0.41935 1047 | 1276 2403 0.11834 1048 | 1304 1341 0.14115 1049 | 947 1907 0.091674 1050 | 1227 1344 0.14138 1051 | 2114 1320 0.17768 1052 | 1237 1531 0.15942 1053 | 2043 1256 0.17465 1054 | 527 608 0.17808 1055 | 908 1650 0.16595 1056 | 2191 2494 0.17222 1057 | 1211 2375 0.12021 1058 | 998 956 0.15955 1059 | 2409 1036 0.081452 1060 | 1760 1411 0.16239 1061 | 2557 1461 0.18892 1062 | 2774 2729 0.5951520344084718 1063 | 1201 837 0.11884 1064 | 2324 1258 0.12481 1065 | 770 2177 0.10093 1066 | 2529 2493 0.18715 1067 | 667 329 0.26316 1068 | 853 1473 0.12302 1069 | 1999 2191 0.18451 1070 | 980 1213 0.17918 1071 | 1153 821 0.13685 1072 | 2590 1378 0.11764 1073 | 988 2321 0.12778 1074 | 989 2317 0.17511 1075 | 2078 2235 0.16927 1076 | 1577 1196 0.18922 1077 | 1949 2565 0.15788 1078 | 871 2482 0.15306 1079 | 76 148 0.36364 1080 | 2548 1278 0.17713 1081 | 1197 2007 0.13811 1082 | 1607 2374 0.08628 1083 | 981 740 0.15781 1084 | 1353 1044 0.096815 1085 | 2332 2394 0.16368 1086 | 2863 2700 0.8890328760046998 1087 | 2532 2593 0.13644 1088 | 819 2491 0.17641 1089 | 2605 1515 0.17323 1090 | 539 411 0.23729 1091 | --------------------------------------------------------------------------------