├── README.md ├── architecture.py ├── ckpt └── .gitignore ├── data ├── autism.npz ├── bloodtransfusion.npz ├── cervical_cancer.npz ├── chronic-kidney.npz ├── cryotherapy.npz ├── dermatology.npz ├── echocardiogram.npz ├── foresttype.npz ├── haberman.npz ├── hcc-survival.npz ├── hepatitis.npz ├── horse-colic.npz ├── immunotherapy.npz ├── iris.npz ├── lung-cancer.npz ├── winequality_red.npz ├── winequality_white.npz └── winetype.npz ├── fine_tune.py ├── maml ├── README.md ├── architecture.py ├── fine_tuneMAML.py ├── problem.py ├── test_datasetsMAML.py ├── test_datasetsMAML_finetuned.py └── trainMAML.py ├── models └── .gitignore ├── plot_decision_bounds.py ├── plot_multigaussian.py ├── problem.py ├── results ├── Feat-128-16-100.txt ├── N-128-16-100.txt ├── Sigma-128-16-100.txt ├── auctable.tex ├── auctable10.tex ├── auctable20.tex ├── timings.tex ├── timings16.tex ├── timings24.tex ├── timings4.tex └── timings8.tex ├── test_datasets.py ├── test_multigaussian_sweepDifficulty.py ├── test_multigaussian_sweepFeat.py ├── test_multigaussian_sweepN.py ├── testing.py ├── timing.py ├── train128.py ├── train2_N100.py ├── train2_N20.py ├── train2_baseline.py ├── train2_diff4.py ├── train2_general.py ├── train32.py └── training_curves ├── finetuning-autism.txt ├── finetuning-bloodtransfusion.txt ├── finetuning-cervical_cancer.txt ├── finetuning-chronic-kidney.txt ├── finetuning-cryotherapy.txt ├── finetuning-dermatology.txt ├── finetuning-echocardiogram.txt ├── finetuning-foresttype.txt ├── finetuning-haberman.txt ├── finetuning-hcc-survival.txt ├── finetuning-hepatitis.txt ├── finetuning-horse-colic.txt ├── finetuning-immunotherapy.txt ├── finetuning-iris.txt ├── finetuning-lung-cancer.txt ├── finetuning-winequality_red.txt ├── finetuning-winequality_white.txt ├── finetuning-winetype.txt ├── training128-16-strong.txt ├── training128-16-weak.txt ├── training128-16.txt ├── training2-N100.txt ├── training2-N20.txt ├── training2-base.txt ├── training2-diff4.txt ├── training2-general.txt └── training32-16.txt /README.md: -------------------------------------------------------------------------------- 1 | # ClassifierGenerators 2 | Supporting code for the paper 'Learning to generate classifiers'. 3 | -------------------------------------------------------------------------------- /architecture.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from math import * 4 | 5 | import torch 6 | import torch.nn as nn 7 | from torch.nn import Parameter 8 | from torch.nn import functional as F 9 | import torch.optim 10 | from torch.autograd import Variable 11 | 12 | def tovar(x): 13 | return Variable(torch.FloatTensor(x).cuda(), requires_grad = False) 14 | 15 | def toivar(x): 16 | return Variable(torch.LongTensor(x).cuda(), requires_grad = False) 17 | 18 | class Attention(nn.Module): 19 | def __init__(self, Nfield, Nquery, Nkey, Nval): 20 | super(Attention,self).__init__() 21 | 22 | self.field_to_key = nn.Conv1d(Nfield, Nkey, 1) 23 | self.field_to_val = nn.Conv1d(Nfield, Nval, 1) 24 | self.query_to_key = nn.Conv1d(Nquery, Nkey, 1) 25 | 26 | self.nkey = Nkey 27 | self.nval = Nval 28 | 29 | def forward(self, field, query): 30 | s = field.size() 31 | fkeys = self.field_to_key(field) 32 | fvals = self.field_to_val(field) 33 | 34 | hkeys = self.query_to_key(query) # Batch * Key Size * Queries 35 | 36 | z = torch.bmm(fkeys.transpose(1,2), hkeys)/sqrt(self.nkey) 37 | w = torch.exp(torch.clamp(z,-30,30)) # Batch * # Keys * Queries 38 | w = w/(torch.sum(w,1,keepdim=True) + 1e-16) 39 | 40 | y = torch.bmm(fvals, w) # Batch * Val Size * Queries 41 | return y 42 | 43 | class ClassifierGenerator(nn.Module): 44 | def __init__(self, FEATURES, CLASSES, NETSIZE=512): 45 | super(ClassifierGenerator,self).__init__() 46 | 47 | self.FEATURES = FEATURES 48 | self.CLASSES = CLASSES 49 | 50 | self.emb1a = nn.Conv1d(FEATURES,NETSIZE,1) 51 | self.emb2a = nn.Conv1d(NETSIZE,NETSIZE,1) 52 | 53 | self.emb1b = nn.Conv1d(FEATURES+CLASSES,NETSIZE,1) 54 | self.emb2b = nn.Conv1d(NETSIZE,NETSIZE,1) 55 | 56 | self.attn1a = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 57 | self.attn1b = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 58 | self.attn1c = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 59 | self.attn1d = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 60 | 61 | self.emb3a = nn.Conv1d(NETSIZE,NETSIZE,1) 62 | self.emb3b = nn.Conv1d(NETSIZE,NETSIZE,1) 63 | 64 | self.attn2a = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 65 | self.attn2b = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 66 | self.attn2c = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 67 | self.attn2d = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 68 | 69 | self.emb4a = nn.Conv1d(NETSIZE,NETSIZE,1) 70 | self.emb4b = nn.Conv1d(NETSIZE,NETSIZE,1) 71 | 72 | self.attn3a = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 73 | self.attn3b = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 74 | self.attn3c = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 75 | self.attn3d = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 76 | 77 | self.emb5a = nn.Conv1d(NETSIZE,NETSIZE,1) 78 | self.emb5b = nn.Conv1d(NETSIZE,NETSIZE,1) 79 | 80 | self.attn4a = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 81 | self.attn4b = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 82 | self.attn4c = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 83 | self.attn4d = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 84 | 85 | self.emb6 = nn.Conv1d(NETSIZE,NETSIZE,1) 86 | self.emb7 = nn.Conv1d(NETSIZE,NETSIZE,1) 87 | self.emb8 = nn.Conv1d(NETSIZE,NETSIZE,1) 88 | self.emb9 = nn.Conv1d(NETSIZE,CLASSES,1) 89 | 90 | self.adam = torch.optim.Adam(self.parameters(), lr=1e-5) 91 | 92 | def forward(self, mem, test, classes): 93 | ts = test.size() 94 | 95 | mempts = mem.squeeze(1) 96 | testpts = test.squeeze(1) 97 | 98 | # Scaling here improves initial training speed 99 | x = 10*F.relu(self.emb2b(F.relu(self.emb1b(mempts)))) 100 | y = 10*F.relu(self.emb2a(F.relu(self.emb1a(testpts)))) 101 | 102 | z1 = self.attn1a(x,x) 103 | z2 = self.attn1b(x,x) 104 | z3 = self.attn1c(x,x) 105 | z4 = self.attn1d(x,x) 106 | 107 | z = torch.cat([z1,z2,z3,z4],1) 108 | x = x + self.emb3b(F.relu(self.emb3a(z))) 109 | 110 | z1 = self.attn2a(x,x) 111 | z2 = self.attn2b(x,x) 112 | z3 = self.attn2c(x,x) 113 | z4 = self.attn2d(x,x) 114 | 115 | z = torch.cat([z1,z2,z3,z4],1) 116 | xm = x + self.emb4b(F.relu(self.emb4a(z))) 117 | 118 | z1 = self.attn3a(xm,y) 119 | z2 = self.attn3b(xm,y) 120 | z3 = self.attn3c(xm,y) 121 | z4 = self.attn3d(xm,y) 122 | 123 | z = torch.cat([z1,z2,z3,z4],1) 124 | 125 | z = y + self.emb5b(F.relu(self.emb5a(z))) 126 | 127 | z1 = self.attn4a(xm,z) 128 | z2 = self.attn4b(xm,z) 129 | z3 = self.attn4c(xm,z) 130 | z4 = self.attn4d(xm,z) 131 | 132 | z = torch.cat([z1,z2,z3,z4],1) 133 | 134 | y = self.emb9(F.relu(self.emb8(F.relu(self.emb7(F.relu(self.emb6(z))))))) 135 | 136 | # Mask out classes that are known to not be present in the dataset 137 | mask = classes.unsqueeze(1).unsqueeze(2).expand(ts[0],self.CLASSES,ts[3]) 138 | idx_y = torch.arange(self.CLASSES).cuda().unsqueeze(0).unsqueeze(2).expand(ts[0],self.CLASSES,ts[3]) 139 | if isinstance(mask, torch.cuda.FloatTensor): 140 | idx_y = idx_y.cuda() 141 | else: 142 | idx_y = idx_y.cpu() 143 | mask = Variable(-30*torch.ge(idx_y, mask).float(), requires_grad=False) 144 | y = y + mask 145 | 146 | return F.log_softmax(y,dim=1) 147 | 148 | # Transform a dataset into the canonical number of features 149 | def normalizeAndProject(xd, NTRAIN, FEATURES): 150 | feat = xd.shape[1] 151 | 152 | # Normalize before and after to prevent features with extreme scale 153 | mu = np.mean(xd[0:NTRAIN],axis=0, keepdims=True) 154 | std = np.std(xd[0:NTRAIN],axis=0, keepdims=True) + 1e-16 155 | xd = (xd-mu)/std 156 | 157 | projection = np.random.randn(feat,FEATURES)/sqrt(FEATURES+feat) 158 | xd = np.matmul(xd,projection) 159 | 160 | mu = np.mean(xd[0:NTRAIN],axis=0, keepdims=True) 161 | std = np.std(xd[0:NTRAIN],axis=0, keepdims=True) + 1e-16 162 | xd = (xd-mu)/std 163 | 164 | return xd 165 | 166 | # Fake SKLearn wrapper for the network 167 | class NetworkSKL(): 168 | def __init__(self, net, ensemble=30, cuda=True): 169 | if cuda: 170 | self.net = net.cuda() 171 | else: 172 | self.net = net.cpu() 173 | self.ensemble = ensemble 174 | self.cuda = cuda 175 | 176 | def fit(self, x, y): 177 | self.x = x 178 | self.y = y 179 | pass 180 | 181 | def predict_proba(self, x): 182 | train_x = self.x 183 | train_y = self.y 184 | test_x = x 185 | net = self.net 186 | ensemble = self.ensemble 187 | 188 | CLASSES = net.CLASSES 189 | FEATURES = net.FEATURES 190 | 191 | # This isn't necessarily accurate, for training data that doesn't contain one of each class, but we ensure that when making the training/test sets anyhow 192 | classes = np.unique(train_y).shape[0] 193 | 194 | trainlabels = np.zeros((train_x.shape[0],CLASSES)) 195 | x = np.arange(train_x.shape[0]) 196 | trainlabels[x,train_y[x]] = 1 197 | 198 | classtensor = torch.FloatTensor(np.array([classes])) 199 | if self.cuda: 200 | classtensor = classtensor.cuda() 201 | 202 | traindata = [] 203 | testdata = [] 204 | for i in range(ensemble): 205 | # Need to transform everything together to make sure we use the same projection 206 | xd = np.vstack([train_x, test_x]) 207 | xd = normalizeAndProject(xd, train_x.shape[0], FEATURES) 208 | ptrain_x = xd[0:train_x.shape[0]] 209 | ptest_x = xd[train_x.shape[0]:] 210 | 211 | traindata.append(tovar(np.hstack([ptrain_x,trainlabels]).reshape((1,1,train_x.shape[0],FEATURES+CLASSES)).transpose(0,1,3,2))) 212 | testdata.append(tovar(ptest_x.reshape((1,1,ptest_x.shape[0],FEATURES)).transpose(0,1,3,2))) 213 | 214 | traindata = torch.cat(traindata,0) 215 | testdata = torch.cat(testdata,0) 216 | 217 | if self.cuda: 218 | traindata = traindata.cuda() 219 | testdata = testdata.cuda() 220 | else: 221 | traindata = traindata.cpu() 222 | testdata = testdata.cpu() 223 | 224 | preds = np.exp(net.forward(traindata, testdata, classes=classtensor).cpu().data.numpy()).mean(axis=0) 225 | 226 | # We need to strictly project to the right number of classes and maintain probabilities 227 | preds = preds.transpose(1,0)[:,:classes] 228 | preds = preds/np.sum(preds,axis=1,keepdims=True) 229 | return preds 230 | -------------------------------------------------------------------------------- /ckpt/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore -------------------------------------------------------------------------------- /data/autism.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arayabrain/ClassifierGenerators/ffb3b2ea68e415a1594a0956c669dc6789943d4d/data/autism.npz -------------------------------------------------------------------------------- /data/bloodtransfusion.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arayabrain/ClassifierGenerators/ffb3b2ea68e415a1594a0956c669dc6789943d4d/data/bloodtransfusion.npz -------------------------------------------------------------------------------- /data/cervical_cancer.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arayabrain/ClassifierGenerators/ffb3b2ea68e415a1594a0956c669dc6789943d4d/data/cervical_cancer.npz -------------------------------------------------------------------------------- /data/chronic-kidney.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arayabrain/ClassifierGenerators/ffb3b2ea68e415a1594a0956c669dc6789943d4d/data/chronic-kidney.npz -------------------------------------------------------------------------------- /data/cryotherapy.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arayabrain/ClassifierGenerators/ffb3b2ea68e415a1594a0956c669dc6789943d4d/data/cryotherapy.npz -------------------------------------------------------------------------------- /data/dermatology.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arayabrain/ClassifierGenerators/ffb3b2ea68e415a1594a0956c669dc6789943d4d/data/dermatology.npz -------------------------------------------------------------------------------- /data/echocardiogram.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arayabrain/ClassifierGenerators/ffb3b2ea68e415a1594a0956c669dc6789943d4d/data/echocardiogram.npz -------------------------------------------------------------------------------- /data/foresttype.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arayabrain/ClassifierGenerators/ffb3b2ea68e415a1594a0956c669dc6789943d4d/data/foresttype.npz -------------------------------------------------------------------------------- /data/haberman.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arayabrain/ClassifierGenerators/ffb3b2ea68e415a1594a0956c669dc6789943d4d/data/haberman.npz -------------------------------------------------------------------------------- /data/hcc-survival.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arayabrain/ClassifierGenerators/ffb3b2ea68e415a1594a0956c669dc6789943d4d/data/hcc-survival.npz -------------------------------------------------------------------------------- /data/hepatitis.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arayabrain/ClassifierGenerators/ffb3b2ea68e415a1594a0956c669dc6789943d4d/data/hepatitis.npz -------------------------------------------------------------------------------- /data/horse-colic.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arayabrain/ClassifierGenerators/ffb3b2ea68e415a1594a0956c669dc6789943d4d/data/horse-colic.npz -------------------------------------------------------------------------------- /data/immunotherapy.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arayabrain/ClassifierGenerators/ffb3b2ea68e415a1594a0956c669dc6789943d4d/data/immunotherapy.npz -------------------------------------------------------------------------------- /data/iris.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arayabrain/ClassifierGenerators/ffb3b2ea68e415a1594a0956c669dc6789943d4d/data/iris.npz -------------------------------------------------------------------------------- /data/lung-cancer.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arayabrain/ClassifierGenerators/ffb3b2ea68e415a1594a0956c669dc6789943d4d/data/lung-cancer.npz -------------------------------------------------------------------------------- /data/winequality_red.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arayabrain/ClassifierGenerators/ffb3b2ea68e415a1594a0956c669dc6789943d4d/data/winequality_red.npz -------------------------------------------------------------------------------- /data/winequality_white.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arayabrain/ClassifierGenerators/ffb3b2ea68e415a1594a0956c669dc6789943d4d/data/winequality_white.npz -------------------------------------------------------------------------------- /data/winetype.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/arayabrain/ClassifierGenerators/ffb3b2ea68e415a1594a0956c669dc6789943d4d/data/winetype.npz -------------------------------------------------------------------------------- /fine_tune.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | 4 | import sys 5 | 6 | from math import * 7 | 8 | import torch 9 | import torch.nn as nn 10 | from torch.nn import Parameter 11 | from torch.nn import functional as F 12 | import torch.optim 13 | from torch.autograd import Variable 14 | 15 | import time 16 | import copy 17 | 18 | import glob 19 | 20 | from architecture import ClassifierGenerator, NetworkSKL, tovar, toivar, normalizeAndProject 21 | from problem import problemGenerator 22 | from testing import evalClassifier, compareMethodsOnSet 23 | 24 | def trainingStep(net, NTRAIN, NTEST, data_x, data_y, BS = 200): 25 | FEATURES = net.FEATURES 26 | CLASSES = net.CLASSES 27 | 28 | net.zero_grad() 29 | batch_mem = [] 30 | batch_test = [] 31 | batch_label = [] 32 | class_count = [] 33 | 34 | for i in range(BS): 35 | j = np.random.randint(len(data_x)) 36 | feat = data_x[j].shape[1] 37 | classes = np.unique(data_y[j]).shape[0] 38 | 39 | xd = data_x[j].copy() 40 | 41 | # Data augmentation 42 | f_idx = np.random.permutation(feat) 43 | xd = xd[:,f_idx] 44 | 45 | c_idx = np.random.permutation(classes) 46 | yd = np.zeros((data_y[j].shape[0], classes)) 47 | yd[np.arange(data_y[j].shape[0]), c_idx[data_y[j][np.arange(data_y[j].shape[0])]]] = 1 48 | 49 | idx = np.random.permutation(xd.shape[0]) 50 | xd = xd[idx] 51 | yd = yd[idx] 52 | 53 | if classes=120: 101 | tdx.append(data_x[didx2]) 102 | tdy.append(data_y[didx2]) 103 | 104 | for i in range(20): 105 | err = trainingStep(net, 100, 20, tdx, tdy) 106 | f = open("training_curves/finetuning-%s.txt" % data_names[didx], "a") 107 | f.write("%d %.6g\n" % (i, err)) 108 | f.close() 109 | 110 | torch.save(net.state_dict(), open("models/classifier-generator-128-16-%s.pth" % data_names[didx], "wb")) 111 | -------------------------------------------------------------------------------- /maml/README.md: -------------------------------------------------------------------------------- 1 | This folder contains the code for the MAML benchmark. It requires the 2 | same subdirectory structure as the root directory of this distribution - 3 | specifically, the data/, ckpt/, and results/ subdirectories, with the 4 | small datasets for testing included in data/. 5 | 6 | The model is trained using trainMAML.py, and the table of results for 7 | the benchmark is generated with test_datasetsMAML.py. Currently, these 8 | must be merged by hand to generate the table in our paper. -------------------------------------------------------------------------------- /maml/architecture.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from math import * 4 | 5 | import torch 6 | import torch.nn as nn 7 | from torch.nn import Parameter 8 | from torch.nn import functional as F 9 | import torch.optim 10 | from torch.autograd import Variable 11 | 12 | def tovar(x): 13 | return Variable(torch.FloatTensor(x).cuda(), requires_grad = False) 14 | 15 | def toivar(x): 16 | return Variable(torch.LongTensor(x).cuda(), requires_grad = False) 17 | 18 | class Attention(nn.Module): 19 | def __init__(self, Nfield, Nquery, Nkey, Nval): 20 | super(Attention,self).__init__() 21 | 22 | self.field_to_key = nn.Conv1d(Nfield, Nkey, 1) 23 | self.field_to_val = nn.Conv1d(Nfield, Nval, 1) 24 | self.query_to_key = nn.Conv1d(Nquery, Nkey, 1) 25 | 26 | self.nkey = Nkey 27 | self.nval = Nval 28 | 29 | def forward(self, field, query): 30 | s = field.size() 31 | fkeys = self.field_to_key(field) 32 | fvals = self.field_to_val(field) 33 | 34 | hkeys = self.query_to_key(query) # Batch * Key Size * Queries 35 | 36 | z = torch.bmm(fkeys.transpose(1,2), hkeys)/sqrt(self.nkey) 37 | w = torch.exp(torch.clamp(z,-30,30)) # Batch * # Keys * Queries 38 | w = w/(torch.sum(w,1,keepdim=True) + 1e-16) 39 | 40 | y = torch.bmm(fvals, w) # Batch * Val Size * Queries 41 | return y 42 | 43 | class ClassifierGenerator(nn.Module): 44 | def __init__(self, FEATURES, CLASSES, NETSIZE=512): 45 | super(ClassifierGenerator,self).__init__() 46 | 47 | self.FEATURES = FEATURES 48 | self.CLASSES = CLASSES 49 | 50 | self.emb1a = nn.Conv1d(FEATURES,NETSIZE,1) 51 | self.emb2a = nn.Conv1d(NETSIZE,NETSIZE,1) 52 | 53 | self.emb1b = nn.Conv1d(FEATURES+CLASSES,NETSIZE,1) 54 | self.emb2b = nn.Conv1d(NETSIZE,NETSIZE,1) 55 | 56 | self.attn1a = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 57 | self.attn1b = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 58 | self.attn1c = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 59 | self.attn1d = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 60 | 61 | self.emb3a = nn.Conv1d(NETSIZE,NETSIZE,1) 62 | self.emb3b = nn.Conv1d(NETSIZE,NETSIZE,1) 63 | 64 | self.attn2a = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 65 | self.attn2b = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 66 | self.attn2c = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 67 | self.attn2d = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 68 | 69 | self.emb4a = nn.Conv1d(NETSIZE,NETSIZE,1) 70 | self.emb4b = nn.Conv1d(NETSIZE,NETSIZE,1) 71 | 72 | self.attn3a = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 73 | self.attn3b = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 74 | self.attn3c = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 75 | self.attn3d = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 76 | 77 | self.emb5a = nn.Conv1d(NETSIZE,NETSIZE,1) 78 | self.emb5b = nn.Conv1d(NETSIZE,NETSIZE,1) 79 | 80 | self.attn4a = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 81 | self.attn4b = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 82 | self.attn4c = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 83 | self.attn4d = Attention(NETSIZE,NETSIZE,32,NETSIZE//4) 84 | 85 | self.emb6 = nn.Conv1d(NETSIZE,NETSIZE,1) 86 | self.emb7 = nn.Conv1d(NETSIZE,NETSIZE,1) 87 | self.emb8 = nn.Conv1d(NETSIZE,NETSIZE,1) 88 | self.emb9 = nn.Conv1d(NETSIZE,CLASSES,1) 89 | 90 | self.adam = torch.optim.Adam(self.parameters(), lr=1e-5) 91 | 92 | def forward(self, mem, test, classes): 93 | ts = test.size() 94 | 95 | mempts = mem.squeeze(1) 96 | testpts = test.squeeze(1) 97 | 98 | # Scaling here improves initial training speed 99 | x = 10*F.relu(self.emb2b(F.relu(self.emb1b(mempts)))) 100 | y = 10*F.relu(self.emb2a(F.relu(self.emb1a(testpts)))) 101 | 102 | z1 = self.attn1a(x,x) 103 | z2 = self.attn1b(x,x) 104 | z3 = self.attn1c(x,x) 105 | z4 = self.attn1d(x,x) 106 | 107 | z = torch.cat([z1,z2,z3,z4],1) 108 | x = x + self.emb3b(F.relu(self.emb3a(z))) 109 | 110 | z1 = self.attn2a(x,x) 111 | z2 = self.attn2b(x,x) 112 | z3 = self.attn2c(x,x) 113 | z4 = self.attn2d(x,x) 114 | 115 | z = torch.cat([z1,z2,z3,z4],1) 116 | xm = x + self.emb4b(F.relu(self.emb4a(z))) 117 | 118 | z1 = self.attn3a(xm,y) 119 | z2 = self.attn3b(xm,y) 120 | z3 = self.attn3c(xm,y) 121 | z4 = self.attn3d(xm,y) 122 | 123 | z = torch.cat([z1,z2,z3,z4],1) 124 | 125 | z = y + self.emb5b(F.relu(self.emb5a(z))) 126 | 127 | z1 = self.attn4a(xm,z) 128 | z2 = self.attn4b(xm,z) 129 | z3 = self.attn4c(xm,z) 130 | z4 = self.attn4d(xm,z) 131 | 132 | z = torch.cat([z1,z2,z3,z4],1) 133 | 134 | y = self.emb9(F.relu(self.emb8(F.relu(self.emb7(F.relu(self.emb6(z))))))) 135 | 136 | # Mask out classes that are known to not be present in the dataset 137 | mask = classes.unsqueeze(1).unsqueeze(2).expand(ts[0],self.CLASSES,ts[3]) 138 | idx_y = torch.arange(self.CLASSES).cuda().unsqueeze(0).unsqueeze(2).expand(ts[0],self.CLASSES,ts[3]) 139 | 140 | mask = Variable(-30*torch.ge(idx_y, mask).float(), requires_grad=False) 141 | y = y + mask 142 | 143 | return F.log_softmax(y,dim=1) 144 | 145 | class metaLinear(nn.Module): 146 | def __init__(self, Nin, Nout): 147 | super(metaLinear,self).__init__() 148 | 149 | self.betaW = Parameter(torch.FloatTensor(np.ones(1)*1e-2)) 150 | self.betaB = Parameter(torch.FloatTensor(np.ones(1)*1e-2)) 151 | 152 | self.layer = nn.Linear(Nin, Nout) 153 | 154 | def forward(self, x, meta=False): 155 | if meta: 156 | return F.linear(x, 157 | weight=self.layer.weight + self.dweight, 158 | bias=self.layer.bias + self.dbias) 159 | else: 160 | self.dweight = torch.zeros_like(self.layer.weight) 161 | self.dbias = torch.zeros_like(self.layer.bias) 162 | 163 | return F.linear(x, 164 | weight=self.layer.weight, 165 | bias=self.layer.bias) 166 | 167 | def update(self, loss): 168 | gradW, gradB = torch.autograd.grad([loss], [self.layer.weight, self.layer.bias], 169 | retain_graph=True, create_graph=True) 170 | #gradW = Variable(gradW.data) 171 | #gradB = Variable(gradB.data) 172 | self.dweight = (self.dweight - self.betaW[0] * gradW) 173 | self.dbias = (self.dbias - self.betaB[0] * gradB) 174 | 175 | class MAMLNet(nn.Module): 176 | def __init__(self, FEATURES, CLASSES, NETSIZE=128): 177 | super(MAMLNet,self).__init__() 178 | 179 | self.l1 = metaLinear(FEATURES,NETSIZE) 180 | self.l2 = metaLinear(NETSIZE,NETSIZE) 181 | self.l3 = metaLinear(NETSIZE,NETSIZE) 182 | self.l4 = metaLinear(NETSIZE,NETSIZE) 183 | self.l5 = metaLinear(NETSIZE,CLASSES) 184 | 185 | self.steps = 1 186 | self.CLASSES = CLASSES 187 | self.FEATURES = FEATURES 188 | self.adam = torch.optim.Adam(self.parameters(), lr=5e-5) 189 | 190 | def loss(self, p, y): 191 | idx = torch.arange(p.size()[0]).long() 192 | 193 | return -torch.mean(p[idx,y[idx]]) 194 | 195 | def forward(self, x, classes, meta=False): 196 | z = F.relu(self.l1(x,meta)) 197 | z = F.relu(self.l2(z,meta)) 198 | z = F.relu(self.l3(z,meta)) 199 | z = F.relu(self.l4(z,meta)) 200 | z = self.l5(z,meta) 201 | z[:,classes:] -= 30 202 | 203 | z = F.log_softmax(z,dim=1) 204 | 205 | return z 206 | 207 | def update(self, loss): 208 | for module in self.modules(): 209 | if type(module) == metaLinear: 210 | module.update(loss) 211 | 212 | def fullpass(self, x_train, y_train, x_test, classes): 213 | p_train = self.forward(x_train, classes, meta=False) 214 | loss = self.loss(p_train, y_train) 215 | self.update(loss) 216 | 217 | for i in range(self.steps-1): 218 | p_train = self.forward(x_train, classes, meta=True) 219 | loss = self.loss(p_train, y_train) 220 | self.update(loss) 221 | 222 | p_test = self.forward(x_test, classes, meta=True) 223 | 224 | return p_test 225 | 226 | # Transform a dataset into the canonical number of features 227 | def normalizeAndProject(xd, NTRAIN, FEATURES): 228 | feat = xd.shape[1] 229 | txd = tovar(xd) 230 | mu = torch.mean(txd[0:NTRAIN],0,keepdim=True) 231 | std = torch.std(txd[0:NTRAIN],0,keepdim=True) + 1e-6 232 | txd = (txd-mu)/std 233 | 234 | projection = torch.zeros(feat,FEATURES).cuda() 235 | projection.normal_() 236 | projection = projection / sqrt(FEATURES+feat) 237 | txd = torch.matmul(txd, projection) 238 | 239 | # Normalize before and after to prevent features with extreme scale 240 | mu = torch.mean(txd[0:NTRAIN],0,keepdim=True) 241 | std = torch.std(txd[0:NTRAIN],0,keepdim=True) + 1e-6 242 | txd = (txd-mu)/std 243 | 244 | return txd.cpu().detach().numpy() 245 | 246 | # Fake SKLearn wrapper for the network 247 | class NetworkSKL(): 248 | def __init__(self, net, ensemble=30): 249 | self.net = net 250 | self.ensemble = ensemble 251 | 252 | def fit(self, x, y): 253 | self.x = x 254 | self.y = y 255 | pass 256 | 257 | def predict_proba(self, x): 258 | train_x = self.x 259 | train_y = self.y 260 | test_x = x 261 | net = self.net 262 | ensemble = self.ensemble 263 | 264 | CLASSES = net.CLASSES 265 | FEATURES = net.FEATURES 266 | 267 | # This isn't necessarily accurate, for training data that doesn't contain one of each class, but we ensure that when making the training/test sets anyhow 268 | classes = np.unique(train_y).shape[0] 269 | 270 | trainlabels = np.zeros((train_x.shape[0],CLASSES)) 271 | x = np.arange(train_x.shape[0]) 272 | trainlabels[x,train_y[x]] = 1 273 | 274 | classtensor = torch.cuda.FloatTensor(np.array([classes])) 275 | 276 | traindata = [] 277 | testdata = [] 278 | for i in range(ensemble): 279 | # Need to transform everything together to make sure we use the same projection 280 | xd = np.vstack([train_x, test_x]) 281 | xd = normalizeAndProject(xd, train_x.shape[0], FEATURES) 282 | ptrain_x = xd[0:train_x.shape[0]] 283 | ptest_x = xd[train_x.shape[0]:] 284 | 285 | traindata.append(tovar(np.hstack([ptrain_x,trainlabels]).reshape((1,1,train_x.shape[0],FEATURES+CLASSES)).transpose(0,1,3,2))) 286 | testdata.append(tovar(ptest_x.reshape((1,1,ptest_x.shape[0],FEATURES)).transpose(0,1,3,2))) 287 | 288 | traindata = torch.cat(traindata,0) 289 | testdata = torch.cat(testdata,0) 290 | 291 | preds = np.exp(net.forward(traindata, testdata, classes=classtensor).cpu().data.numpy()).mean(axis=0) 292 | 293 | # We need to strictly project to the right number of classes and maintain probabilities 294 | preds = preds.transpose(1,0)[:,:classes] 295 | preds = preds/np.sum(preds,axis=1,keepdims=True) 296 | return preds 297 | 298 | # Fake SKLearn wrapper for the MAML network 299 | class MAMLSKL(): 300 | def __init__(self, net, ensemble=30): 301 | self.net = net 302 | self.ensemble = ensemble 303 | 304 | def fit(self, x, y): 305 | self.x = x 306 | self.y = y 307 | pass 308 | 309 | def predict_proba(self, x): 310 | train_x = self.x 311 | train_y = self.y 312 | test_x = x 313 | net = self.net 314 | ensemble = self.ensemble 315 | 316 | CLASSES = net.CLASSES 317 | FEATURES = net.FEATURES 318 | 319 | # This isn't necessarily accurate, for training data that doesn't contain one of each class, but we ensure that when making the training/test sets anyhow 320 | classes = np.unique(train_y).shape[0] 321 | 322 | traindata = [] 323 | testdata = [] 324 | preds = [] 325 | for i in range(ensemble): 326 | # Need to transform everything together to make sure we use the same projection 327 | xd = np.vstack([train_x, test_x]) 328 | xd = normalizeAndProject(xd, train_x.shape[0], FEATURES) 329 | ptrain_x = tovar(xd[0:train_x.shape[0]]) 330 | ptest_x = tovar(xd[train_x.shape[0]:]) 331 | 332 | p = np.clip(net.fullpass(ptrain_x, toivar(train_y), ptest_x, classes).cpu().data.numpy(),-50,0) 333 | if np.sum(np.isnan(p))>0: 334 | print(xd[0]) 335 | preds.append(np.exp(p)) 336 | 337 | preds = np.array(preds).mean(axis=0) 338 | # We need to strictly project to the right number of classes and maintain probabilities 339 | preds = preds[:,:classes] 340 | preds = preds/(np.sum(preds,axis=1,keepdims=True)+1e-8) 341 | 342 | return preds 343 | -------------------------------------------------------------------------------- /maml/fine_tuneMAML.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | 4 | import sys 5 | 6 | from math import * 7 | 8 | import torch 9 | import torch.nn as nn 10 | from torch.nn import Parameter 11 | from torch.nn import functional as F 12 | import torch.optim 13 | from torch.autograd import Variable 14 | 15 | import time 16 | import copy 17 | 18 | import glob 19 | 20 | from architecture import MAMLNet, MAMLSKL, tovar, toivar, normalizeAndProject 21 | from problem import problemGenerator 22 | from testing import evalClassifier, compareMethodsOnSet 23 | 24 | def trainingStep(net, NTRAIN, NTEST, data_x, data_y, BS=200): 25 | FEATURES = net.FEATURES 26 | CLASSES = net.CLASSES 27 | 28 | net.zero_grad() 29 | err = [] 30 | 31 | for i in range(BS): 32 | j = np.random.randint(len(data_x)) 33 | feat = data_x[j].shape[1] 34 | classes = np.unique(data_y[j]).shape[0] 35 | 36 | xd = data_x[j].copy() 37 | 38 | # Data augmentation 39 | f_idx = np.random.permutation(feat) 40 | xd = xd[:,f_idx] 41 | 42 | c_idx = np.random.permutation(classes) 43 | yd = np.zeros((data_y[j].shape[0], classes)) 44 | yd[np.arange(data_y[j].shape[0]), c_idx[data_y[j][np.arange(data_y[j].shape[0])]]] = 1 45 | 46 | idx = np.random.permutation(xd.shape[0]) 47 | xd = xd[idx] 48 | yd = yd[idx] 49 | 50 | if classes=120: 95 | tdx.append(data_x[didx2]) 96 | tdy.append(data_y[didx2]) 97 | 98 | ecol = 0 99 | for i in range(500): 100 | err = trainingStep(net, 100, 20, tdx, tdy) 101 | ecol = ecol + err 102 | if i%10 == 9: 103 | methods = [lambda: MAMLSKL(net)] 104 | results1 = compareMethodsOnSet(methods, echocardio['x'], echocardio['y'].astype(np.int32), samples=20) 105 | auc1 = results1[0][1] 106 | results2 = compareMethodsOnSet(methods, bloodtransfusion['x'], bloodtransfusion['y'].astype(np.int32), samples=20) 107 | auc2 = results2[0][1] 108 | results3 = compareMethodsOnSet(methods, autism['x'], autism['y'].astype(np.int32), samples=20) 109 | auc3 = results3[0][1] 110 | 111 | f = open("finetuning-%s.txt" % data_names[didx], "a") 112 | f.write("%d %.6g %.6g %.6g %.6g\n" % (i, ecol/10.0, auc1, auc2, auc3)) 113 | f.close() 114 | ecol = 0 115 | 116 | torch.save(net.state_dict(), open("maml-%s.pth" % data_names[didx], "wb")) 117 | -------------------------------------------------------------------------------- /maml/problem.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from math import * 4 | 5 | import torch 6 | import torch.nn as nn 7 | from torch.nn import Parameter 8 | from torch.nn import functional as F 9 | import torch.optim 10 | from torch.autograd import Variable 11 | 12 | def problemGenerator(N=200, CLASSES = 8, FEATURES = 8, sigma = 1.0, sparseness = 0, imbalance = 0): 13 | rclass = np.random.randn(CLASSES) 14 | pclass = np.exp(-imbalance*rclass) 15 | pclass = pclass/np.sum(pclass) 16 | 17 | covariances = torch.FloatTensor(CLASSES, FEATURES, FEATURES).cuda().normal_() * sigma 18 | means = torch.FloatTensor(CLASSES, FEATURES).cuda().normal_() 19 | 20 | for i in range(FEATURES): 21 | if np.random.rand()=20: 85 | f.write("& 10 ") 86 | results10 = np.array(compareMethodsOnSet(methods, data_x, data_y, N=10, samples=800)) 87 | stdev = np.mean(results10[:,3]) 88 | maxval = np.max(results10[:,1]) 89 | f.write("& %.3g " % stdev) 90 | for i in range(results10.shape[0]): 91 | if abs(maxval-results10[i,1])=60: 99 | f.write("& 50 ") 100 | results50 = np.array(compareMethodsOnSet(methods, data_x, data_y, N=50, samples=800)) 101 | stdev = np.mean(results50[:,3]) 102 | maxval = np.max(results50[:,1]) 103 | f.write("& %.3g " % stdev) 104 | for i in range(results50.shape[0]): 105 | if abs(maxval-results50[i,1])=20: 84 | f.write("& 10 ") 85 | results10 = np.array(compareMethodsOnSet(methods, data_x, data_y, N=10, samples=800)) 86 | stdev = np.mean(results10[:,3]) 87 | maxval = np.max(results10[:,1]) 88 | f.write("& %.3g " % stdev) 89 | for i in range(results10.shape[0]): 90 | if abs(maxval-results10[i,1])=60: 98 | f.write("& 50 ") 99 | results50 = np.array(compareMethodsOnSet(methods, data_x, data_y, N=50, samples=800)) 100 | stdev = np.mean(results50[:,3]) 101 | maxval = np.max(results50[:,1]) 102 | f.write("& %.3g " % stdev) 103 | for i in range(results50.shape[0]): 104 | if abs(maxval-results50[i,1])= 50: 88 | err = err/err_count 89 | errs.append(err) 90 | 91 | """ 92 | methods = [lambda: MAMLSKL(net)] 93 | results1 = compareMethodsOnSet(methods, echocardio['x'], echocardio['y'].astype(np.int32), samples=200) 94 | auc1 = results1[0][1] 95 | results2 = compareMethodsOnSet(methods, bloodtransfusion['x'], bloodtransfusion['y'].astype(np.int32), samples=200) 96 | auc2 = results2[0][1] 97 | results3 = compareMethodsOnSet(methods, autism['x'], autism['y'].astype(np.int32), samples=200) 98 | auc3 = results3[0][1] 99 | """ 100 | f = open("maml128-16.txt","a") 101 | f.write("%d %.6g %.6g\n" % (i, err, difficulty_level)) 102 | f.close() 103 | 104 | # Curriculum 105 | if err<0.7 and difficulty_level<0.2: 106 | difficulty_level *= 2.0 107 | 108 | err = 0 109 | err_count = 0 110 | 111 | torch.save(net.state_dict(),open("maml-128-16.pth","wb")) 112 | -------------------------------------------------------------------------------- /models/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !.gitignore -------------------------------------------------------------------------------- /plot_decision_bounds.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | 4 | import glob 5 | import sys 6 | 7 | from math import * 8 | 9 | import torch 10 | import torch.nn as nn 11 | from torch.nn import Parameter 12 | from torch.nn import functional as F 13 | import torch.optim 14 | from torch.autograd import Variable 15 | 16 | import time 17 | import copy 18 | 19 | import seaborn 20 | 21 | from problem import problemGenerator 22 | from architecture import ClassifierGenerator, tovar, toivar 23 | 24 | import warnings 25 | 26 | def fxn(): 27 | warnings.warn("deprecated", DeprecationWarning) 28 | 29 | with warnings.catch_warnings(): 30 | warnings.simplefilter("ignore") 31 | fxn() 32 | 33 | def plotDecisionBoundary(x,y,net): 34 | x = (x-np.mean(x,axis=0,keepdims=True))/(1e-16 + np.std(x,axis=0,keepdims=True)) 35 | 36 | x = x.reshape((1,1,x.shape[0],x.shape[1])).transpose(0,1,3,2) 37 | y = y.reshape((1,1,y.shape[0],y.shape[1])).transpose(0,1,3,2) 38 | 39 | trainset = np.concatenate([x,y],axis=2) 40 | 41 | xx,yy = np.meshgrid(np.arange(-3.0,3.05,0.05), np.arange(-3.0, 3.05, 0.05)) 42 | XR = xx.shape[0] 43 | 44 | xx = xx.reshape((1,1,1,XR*XR)) 45 | yy = yy.reshape((1,1,1,XR*XR)) 46 | 47 | testset = np.concatenate([xx,yy],axis=2) 48 | 49 | p = np.exp(net.forward(tovar(trainset), tovar(testset), torch.cuda.FloatTensor(np.array([4]))).cpu().data.numpy()) 50 | 51 | p = p.reshape((4,XR,XR)).transpose(1,2,0) 52 | xx = xx.reshape((XR,XR)) 53 | yy = yy.reshape((XR,XR)) 54 | 55 | colors = np.array([ [0.7,0.2,0.2], [0.2,0.7,0.2], [0.2, 0.2, 0.7], [0.7, 0.2, 0.7]]) 56 | 57 | im = np.zeros((XR,XR,3)) 58 | for j in range(4): 59 | im += p[:,:,j].reshape((XR,XR,1))*np.array(colors[j]).reshape((1,1,3)) 60 | 61 | yl = np.argmax(y,axis=2)[0,0] 62 | 63 | plt.imshow(im,extent=[-3,3,3,-3]) 64 | for j in range(4): 65 | plt.scatter(x[0,0,0,yl==j],x[0,0,1,yl==j], c=colors[j], edgecolors='k', lw=1,s=10) 66 | 67 | plt.xticks([]) 68 | plt.yticks([]) 69 | plt.xlim(-3,3) 70 | plt.ylim(-3,3) 71 | 72 | net2_4_400_1 = ClassifierGenerator(2, 4, 384).cuda() 73 | net2_4_400_1.load_state_dict(torch.load("models/classifier-generator-2-4-base.pth")) 74 | 75 | net2_4_20_1 = ClassifierGenerator(2, 4, 384).cuda() 76 | net2_4_20_1.load_state_dict(torch.load("models/classifier-generator-2-4-N20.pth")) 77 | 78 | net2_4_100_1 = ClassifierGenerator(2, 4, 384).cuda() 79 | net2_4_100_1.load_state_dict(torch.load("models/classifier-generator-2-4-N100.pth")) 80 | 81 | net2_4_100_4 = ClassifierGenerator(2, 4, 384).cuda() 82 | net2_4_100_4.load_state_dict(torch.load("models/classifier-generator-2-4-diff4.pth")) 83 | 84 | net2_4_gen = ClassifierGenerator(2, 4, 384).cuda() 85 | net2_4_gen.load_state_dict(torch.load("models/classifier-generator-2-4-general.pth")) 86 | 87 | np.random.seed(12345) 88 | torch.manual_seed(12345) 89 | 90 | xd1, yd1 = problemGenerator(100, CLASSES=4, FEATURES=2, sigma=0.25) 91 | xd2, yd2 = problemGenerator(100, CLASSES=4, FEATURES=2, sigma=1) 92 | 93 | def rollGenerator(N,CLASSES): 94 | yl = np.random.randint(CLASSES,size=(N,)) 95 | y = np.zeros((N,CLASSES)) 96 | y[np.arange(N), yl[np.arange(N)]] = 1 97 | 98 | u = np.random.rand(N) 99 | v = np.random.randn(N,2) 100 | 101 | r = 0.5+2.5*u 102 | theta = (2*pi/CLASSES)*yl + 3*(2*pi/CLASSES)*u 103 | 104 | x = np.array([ r*np.cos(theta), r*np.sin(theta) ]).transpose(1,0) + 0.1*v 105 | 106 | return x,y 107 | 108 | xd3, yd3 = rollGenerator(100,4) 109 | 110 | plt.subplot(3,5,1) 111 | plt.title("$N=20, \sigma=1$") 112 | plt.ylabel("$\\sigma=0.25$") 113 | plotDecisionBoundary(xd1,yd1,net2_4_20_1) 114 | 115 | plt.subplot(3,5,2) 116 | plt.title("$N=100, \\sigma=1$") 117 | plotDecisionBoundary(xd1,yd1,net2_4_100_1) 118 | 119 | plt.subplot(3,5,3) 120 | plt.title("$N=400, \\sigma=1$") 121 | plotDecisionBoundary(xd1,yd1,net2_4_400_1) 122 | 123 | plt.subplot(3,5,4) 124 | plt.title("$N=100, \\sigma=4$") 125 | plotDecisionBoundary(xd1,yd1,net2_4_100_4) 126 | 127 | plt.subplot(3,5,5) 128 | plt.title("$N=20-400, \\sigma=0.25-4$") 129 | plotDecisionBoundary(xd1,yd1,net2_4_gen) 130 | 131 | plt.subplot(3,5,6) 132 | plt.ylabel("$\\sigma=1$") 133 | plotDecisionBoundary(xd2,yd2,net2_4_20_1) 134 | 135 | plt.subplot(3,5,7) 136 | plotDecisionBoundary(xd2,yd2,net2_4_100_1) 137 | 138 | plt.subplot(3,5,8) 139 | plotDecisionBoundary(xd2,yd2,net2_4_400_1) 140 | 141 | plt.subplot(3,5,9) 142 | plotDecisionBoundary(xd2,yd2,net2_4_100_4) 143 | 144 | plt.subplot(3,5,10) 145 | plotDecisionBoundary(xd2,yd2,net2_4_gen) 146 | 147 | plt.subplot(3,5,11) 148 | plt.ylabel("Roll") 149 | plotDecisionBoundary(xd3,yd3,net2_4_20_1) 150 | 151 | plt.subplot(3,5,12) 152 | plotDecisionBoundary(xd3,yd3,net2_4_100_1) 153 | 154 | plt.subplot(3,5,13) 155 | plotDecisionBoundary(xd3,yd3,net2_4_400_1) 156 | 157 | plt.subplot(3,5,14) 158 | plotDecisionBoundary(xd3,yd3,net2_4_100_4) 159 | 160 | plt.subplot(3,5,15) 161 | plotDecisionBoundary(xd3,yd3,net2_4_gen) 162 | 163 | plt.gcf().set_size_inches((15,9)) 164 | plt.savefig("decision.pdf") 165 | -------------------------------------------------------------------------------- /plot_multigaussian.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | import seaborn 4 | import pandas as pd 5 | 6 | seaborn.set() 7 | 8 | def plotCurve(xvar, data, label, linestyle, linewidth): 9 | nd = np.array(data) 10 | mu = nd[:,0] 11 | std = nd[:,1] 12 | 13 | plt.errorbar(xvar, mu, std, label=label, ls=linestyle, lw=linewidth) 14 | 15 | plt.subplot(1,3,1) 16 | plt.title("Dependency on amount training data") 17 | points = pd.read_csv("results/N-128-16-100.txt",sep=";") 18 | labels = np.array(points.columns[1::2]) 19 | 20 | order = [5,6,0,1,2,3,4] 21 | Nv = np.array(points.iloc[:,0]) 22 | points = np.array(points.iloc[:,1:]) 23 | labels = labels[order] 24 | 25 | for i in range(labels.shape[0]): 26 | if i>=2: 27 | style='--' 28 | width=1 29 | else: 30 | style='-' 31 | width=2 32 | plotCurve(Nv, points[:,2*order[i]:2*order[i]+2], labels[i], style, width) 33 | 34 | plt.ylabel("AUC") 35 | plt.xlabel("$N_{train}$") 36 | plt.legend() 37 | 38 | plt.subplot(1,3,2) 39 | plt.title("Dependency on number of features") 40 | points = pd.read_csv("results/Feat-128-16-100.txt",sep=";") 41 | labels = np.array(points.columns[1::2]) 42 | 43 | order = [5,6,0,1,2,3,4] 44 | Nv = np.array(points.iloc[:,0]) 45 | points = np.array(points.iloc[:,1:]) 46 | labels = labels[order] 47 | 48 | for i in range(labels.shape[0]): 49 | if i>=2: 50 | style='--' 51 | width=1 52 | else: 53 | style='-' 54 | width=2 55 | plotCurve(Nv, points[:,2*order[i]:2*order[i]+2], labels[i], style, width) 56 | 57 | plt.ylabel("AUC") 58 | plt.xlabel("$N_F$") 59 | plt.legend() 60 | 61 | plt.subplot(1,3,3) 62 | plt.title("Dependency on problem difficulty") 63 | points = pd.read_csv("results/Sigma-128-16-100.txt",sep=";") 64 | labels = np.array(points.columns[1::2]) 65 | 66 | order = [5,6,7,8,0,1,2,3,4] 67 | Nv = np.array(points.iloc[:,0]) 68 | points = np.array(points.iloc[:,1:]) 69 | labels = labels[order] 70 | 71 | for i in range(labels.shape[0]): 72 | if i>=4: 73 | style='--' 74 | width=1 75 | else: 76 | style='-' 77 | width=2 78 | plotCurve(Nv, points[:,2*order[i]:2*order[i]+2], labels[i], style, width) 79 | 80 | plt.ylabel("AUC") 81 | plt.xlabel("$\sigma$") 82 | plt.legend() 83 | 84 | plt.gcf().set_size_inches((18,6)) 85 | plt.savefig("sweeps.pdf") 86 | -------------------------------------------------------------------------------- /problem.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from math import * 4 | 5 | import torch 6 | import torch.nn as nn 7 | from torch.nn import Parameter 8 | from torch.nn import functional as F 9 | import torch.optim 10 | from torch.autograd import Variable 11 | 12 | def problemGenerator(N=200, CLASSES = 8, FEATURES = 8, sigma = 1.0, sparseness = 0, imbalance = 0): 13 | rclass = np.random.randn(CLASSES) 14 | pclass = np.exp(-imbalance*rclass) 15 | pclass = pclass/np.sum(pclass) 16 | 17 | covariances = torch.FloatTensor(CLASSES, FEATURES, FEATURES).cuda().normal_() * sigma 18 | means = torch.FloatTensor(CLASSES, FEATURES).cuda().normal_() 19 | 20 | for i in range(FEATURES): 21 | if np.random.rand()=20: 98 | f.write("& 10 ") 99 | results10 = np.array(compareMethodsOnSet(methods + [ lambda: NetworkSKL(ftnet) ], data_x, data_y, N=10, samples=800)) 100 | stdev = np.mean(results10[:,3]) 101 | maxval = np.max(results10[:,1]) 102 | f.write("& %.3g " % stdev) 103 | for i in range(results10.shape[0]): 104 | if abs(maxval-results10[i,1])=60: 112 | f.write("& 50 ") 113 | results50 = np.array(compareMethodsOnSet(methods + [ lambda: NetworkSKL(ftnet) ], data_x, data_y, N=50, samples=800)) 114 | stdev = np.mean(results50[:,3]) 115 | maxval = np.max(results50[:,1]) 116 | f.write("& %.3g " % stdev) 117 | for i in range(results50.shape[0]): 118 | if abs(maxval-results50[i,1])= 50: 96 | err = err/err_count 97 | errs.append(err) 98 | 99 | methods = [lambda: NetworkSKL(net)] 100 | results1 = compareMethodsOnSet(methods, echocardio['x'], echocardio['y'].astype(np.int32), samples=200) 101 | auc1 = results1[0][1] 102 | results2 = compareMethodsOnSet(methods, bloodtransfusion['x'], bloodtransfusion['y'].astype(np.int32), samples=200) 103 | auc2 = results2[0][1] 104 | results3 = compareMethodsOnSet(methods, autism['x'], autism['y'].astype(np.int32), samples=200) 105 | auc3 = results3[0][1] 106 | 107 | f = open("training_curves/training128-16.txt","a") 108 | f.write("%d %.6g %.6g %.6g %.6g %.6g\n" % (i, err, difficulty_level, auc1, auc2, auc3)) 109 | f.close() 110 | 111 | # Curriculum 112 | if err<0.7 and difficulty_level<0.2: 113 | difficulty_level *= 2.0 114 | 115 | err = 0 116 | err_count = 0 117 | 118 | torch.save(net.state_dict(),open("models/classifier-generator-128-16.pth","wb")) 119 | -------------------------------------------------------------------------------- /train2_N100.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | 4 | import sys 5 | 6 | from math import * 7 | 8 | import torch 9 | import torch.nn as nn 10 | from torch.nn import Parameter 11 | from torch.nn import functional as F 12 | import torch.optim 13 | from torch.autograd import Variable 14 | 15 | import time 16 | import copy 17 | 18 | from architecture import ClassifierGenerator, NetworkSKL, tovar, toivar, normalizeAndProject 19 | from problem import problemGenerator 20 | from testing import evalClassifier, compareMethodsOnSet 21 | 22 | def trainingStep(net, NTRAIN, min_difficulty = 1.0, max_difficulty = 1.0, min_sparseness = 0, max_sparseness = 0, min_imbalance = 0, max_imbalance = 0, feature_variation = True, class_variation = True, BS = 200): 23 | FEATURES = net.FEATURES 24 | CLASSES = net.CLASSES 25 | 26 | net.zero_grad() 27 | batch_mem = [] 28 | batch_test = [] 29 | batch_label = [] 30 | class_count = [] 31 | 32 | for i in range(BS): 33 | if feature_variation: 34 | feat = np.random.randint(2.5*FEATURES) + FEATURES//2 35 | else: 36 | feat = FEATURES 37 | 38 | if class_variation: 39 | classes = np.random.randint(CLASSES-2) + 2 40 | else: 41 | classes = CLASSES 42 | 43 | xd,yd = problemGenerator(N=NTRAIN+100, FEATURES=feat, CLASSES=classes, 44 | sigma = np.random.rand()*(max_difficulty - min_difficulty) + min_difficulty, 45 | sparseness = np.random.rand()*(max_sparseness - min_sparseness) + min_sparseness, 46 | imbalance = np.random.rand()*(max_imbalance - min_imbalance) + min_imbalance) 47 | 48 | if classes= 50: 88 | err = err/err_count 89 | errs.append(err) 90 | 91 | f = open("training_curves/training2-N100.txt","a") 92 | f.write("%d %.6g %.6g\n" % (i, err, difficulty_level)) 93 | f.close() 94 | 95 | err = 0 96 | err_count = 0 97 | 98 | torch.save(net.state_dict(),open("models/classifier-generator-2-4-N100.pth","wb")) 99 | -------------------------------------------------------------------------------- /train2_N20.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | 4 | import sys 5 | 6 | from math import * 7 | 8 | import torch 9 | import torch.nn as nn 10 | from torch.nn import Parameter 11 | from torch.nn import functional as F 12 | import torch.optim 13 | from torch.autograd import Variable 14 | 15 | import time 16 | import copy 17 | 18 | from architecture import ClassifierGenerator, NetworkSKL, tovar, toivar, normalizeAndProject 19 | from problem import problemGenerator 20 | from testing import evalClassifier, compareMethodsOnSet 21 | 22 | def trainingStep(net, NTRAIN, min_difficulty = 1.0, max_difficulty = 1.0, min_sparseness = 0, max_sparseness = 0, min_imbalance = 0, max_imbalance = 0, feature_variation = True, class_variation = True, BS = 200): 23 | FEATURES = net.FEATURES 24 | CLASSES = net.CLASSES 25 | 26 | net.zero_grad() 27 | batch_mem = [] 28 | batch_test = [] 29 | batch_label = [] 30 | class_count = [] 31 | 32 | for i in range(BS): 33 | if feature_variation: 34 | feat = np.random.randint(2.5*FEATURES) + FEATURES//2 35 | else: 36 | feat = FEATURES 37 | 38 | if class_variation: 39 | classes = np.random.randint(CLASSES-2) + 2 40 | else: 41 | classes = CLASSES 42 | 43 | xd,yd = problemGenerator(N=NTRAIN+100, FEATURES=feat, CLASSES=classes, 44 | sigma = np.random.rand()*(max_difficulty - min_difficulty) + min_difficulty, 45 | sparseness = np.random.rand()*(max_sparseness - min_sparseness) + min_sparseness, 46 | imbalance = np.random.rand()*(max_imbalance - min_imbalance) + min_imbalance) 47 | 48 | if classes= 50: 88 | err = err/err_count 89 | errs.append(err) 90 | 91 | f = open("training_curves/training2-N20.txt","a") 92 | f.write("%d %.6g %.6g\n" % (i, err, difficulty_level)) 93 | f.close() 94 | 95 | err = 0 96 | err_count = 0 97 | 98 | torch.save(net.state_dict(),open("models/classifier-generator-2-4-N20.pth","wb")) 99 | -------------------------------------------------------------------------------- /train2_baseline.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | 4 | import sys 5 | 6 | from math import * 7 | 8 | import torch 9 | import torch.nn as nn 10 | from torch.nn import Parameter 11 | from torch.nn import functional as F 12 | import torch.optim 13 | from torch.autograd import Variable 14 | 15 | import time 16 | import copy 17 | 18 | from architecture import ClassifierGenerator, NetworkSKL, tovar, toivar, normalizeAndProject 19 | from problem import problemGenerator 20 | from testing import evalClassifier, compareMethodsOnSet 21 | 22 | def trainingStep(net, NTRAIN, min_difficulty = 1.0, max_difficulty = 1.0, min_sparseness = 0, max_sparseness = 0, min_imbalance = 0, max_imbalance = 0, feature_variation = True, class_variation = True, BS = 200): 23 | FEATURES = net.FEATURES 24 | CLASSES = net.CLASSES 25 | 26 | net.zero_grad() 27 | batch_mem = [] 28 | batch_test = [] 29 | batch_label = [] 30 | class_count = [] 31 | 32 | for i in range(BS): 33 | if feature_variation: 34 | feat = np.random.randint(2.5*FEATURES) + FEATURES//2 35 | else: 36 | feat = FEATURES 37 | 38 | if class_variation: 39 | classes = np.random.randint(CLASSES-2) + 2 40 | else: 41 | classes = CLASSES 42 | 43 | xd,yd = problemGenerator(N=NTRAIN+100, FEATURES=feat, CLASSES=classes, 44 | sigma = np.random.rand()*(max_difficulty - min_difficulty) + min_difficulty, 45 | sparseness = np.random.rand()*(max_sparseness - min_sparseness) + min_sparseness, 46 | imbalance = np.random.rand()*(max_imbalance - min_imbalance) + min_imbalance) 47 | 48 | if classes= 50: 88 | err = err/err_count 89 | errs.append(err) 90 | 91 | f = open("training_curves/training2-base.txt","a") 92 | f.write("%d %.6g %.6g\n" % (i, err, difficulty_level)) 93 | f.close() 94 | 95 | err = 0 96 | err_count = 0 97 | 98 | torch.save(net.state_dict(),open("models/classifier-generator-2-4-base.pth","wb")) 99 | -------------------------------------------------------------------------------- /train2_diff4.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | 4 | import sys 5 | 6 | from math import * 7 | 8 | import torch 9 | import torch.nn as nn 10 | from torch.nn import Parameter 11 | from torch.nn import functional as F 12 | import torch.optim 13 | from torch.autograd import Variable 14 | 15 | import time 16 | import copy 17 | 18 | from architecture import ClassifierGenerator, NetworkSKL, tovar, toivar, normalizeAndProject 19 | from problem import problemGenerator 20 | from testing import evalClassifier, compareMethodsOnSet 21 | 22 | def trainingStep(net, NTRAIN, min_difficulty = 1.0, max_difficulty = 1.0, min_sparseness = 0, max_sparseness = 0, min_imbalance = 0, max_imbalance = 0, feature_variation = True, class_variation = True, BS = 200): 23 | FEATURES = net.FEATURES 24 | CLASSES = net.CLASSES 25 | 26 | net.zero_grad() 27 | batch_mem = [] 28 | batch_test = [] 29 | batch_label = [] 30 | class_count = [] 31 | 32 | for i in range(BS): 33 | if feature_variation: 34 | feat = np.random.randint(2.5*FEATURES) + FEATURES//2 35 | else: 36 | feat = FEATURES 37 | 38 | if class_variation: 39 | classes = np.random.randint(CLASSES-2) + 2 40 | else: 41 | classes = CLASSES 42 | 43 | xd,yd = problemGenerator(N=NTRAIN+100, FEATURES=feat, CLASSES=classes, 44 | sigma = np.random.rand()*(max_difficulty - min_difficulty) + min_difficulty, 45 | sparseness = np.random.rand()*(max_sparseness - min_sparseness) + min_sparseness, 46 | imbalance = np.random.rand()*(max_imbalance - min_imbalance) + min_imbalance) 47 | 48 | if classes= 50: 88 | err = err/err_count 89 | errs.append(err) 90 | 91 | f = open("training_curves/training2-diff4.txt","a") 92 | f.write("%d %.6g %.6g\n" % (i, err, difficulty_level)) 93 | f.close() 94 | 95 | err = 0 96 | err_count = 0 97 | 98 | torch.save(net.state_dict(),open("models/classifier-generator-2-4-diff4.pth","wb")) 99 | -------------------------------------------------------------------------------- /train2_general.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | 4 | import sys 5 | 6 | from math import * 7 | 8 | import torch 9 | import torch.nn as nn 10 | from torch.nn import Parameter 11 | from torch.nn import functional as F 12 | import torch.optim 13 | from torch.autograd import Variable 14 | 15 | import time 16 | import copy 17 | 18 | from architecture import ClassifierGenerator, NetworkSKL, tovar, toivar, normalizeAndProject 19 | from problem import problemGenerator 20 | from testing import evalClassifier, compareMethodsOnSet 21 | 22 | def trainingStep(net, NTRAIN, min_difficulty = 1.0, max_difficulty = 1.0, min_sparseness = 0, max_sparseness = 0, min_imbalance = 0, max_imbalance = 0, feature_variation = True, class_variation = True, BS = 200): 23 | FEATURES = net.FEATURES 24 | CLASSES = net.CLASSES 25 | 26 | net.zero_grad() 27 | batch_mem = [] 28 | batch_test = [] 29 | batch_label = [] 30 | class_count = [] 31 | 32 | for i in range(BS): 33 | if feature_variation: 34 | feat = np.random.randint(2.5*FEATURES) + FEATURES//2 35 | else: 36 | feat = FEATURES 37 | 38 | if class_variation: 39 | classes = np.random.randint(CLASSES-2) + 2 40 | else: 41 | classes = CLASSES 42 | 43 | xd,yd = problemGenerator(N=NTRAIN+100, FEATURES=feat, CLASSES=classes, 44 | sigma = np.random.rand()*(max_difficulty - min_difficulty) + min_difficulty, 45 | sparseness = np.random.rand()*(max_sparseness - min_sparseness) + min_sparseness, 46 | imbalance = np.random.rand()*(max_imbalance - min_imbalance) + min_imbalance) 47 | 48 | if classes= 50: 87 | err = err/err_count 88 | errs.append(err) 89 | 90 | #methods = [lambda: NetworkSKL(net)] 91 | #results1 = compareMethodsOnSet(methods, echocardio['x'], echocardio['y'].astype(np.int32), samples=200) 92 | #auc1 = results1[0][1] 93 | #results2 = compareMethodsOnSet(methods, bloodtransfusion['x'], bloodtransfusion['y'].astype(np.int32), samples=200) 94 | #auc2 = results2[0][1] 95 | #results3 = compareMethodsOnSet(methods, autism['x'], autism['y'].astype(np.int32), samples=200) 96 | #auc3 = results3[0][1] 97 | 98 | f = open("training2-general.txt","a") 99 | f.write("%d %.6g\n" % (i, err)) 100 | f.close() 101 | 102 | err = 0 103 | err_count = 0 104 | 105 | torch.save(net.state_dict(),open("classifier-generator-2-4-general.pth","wb")) 106 | -------------------------------------------------------------------------------- /train32.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | 4 | import sys 5 | 6 | from math import * 7 | 8 | import torch 9 | import torch.nn as nn 10 | from torch.nn import Parameter 11 | from torch.nn import functional as F 12 | import torch.optim 13 | from torch.autograd import Variable 14 | 15 | import time 16 | import copy 17 | 18 | from architecture import ClassifierGenerator, NetworkSKL, tovar, toivar, normalizeAndProject 19 | from problem import problemGenerator 20 | from testing import evalClassifier, compareMethodsOnSet 21 | 22 | def trainingStep(net, NTRAIN, min_difficulty = 1.0, max_difficulty = 1.0, min_sparseness = 0, max_sparseness = 0, min_imbalance = 0, max_imbalance = 0, feature_variation = True, class_variation = True, BS = 200): 23 | FEATURES = net.FEATURES 24 | CLASSES = net.CLASSES 25 | 26 | net.zero_grad() 27 | batch_mem = [] 28 | batch_test = [] 29 | batch_label = [] 30 | class_count = [] 31 | 32 | for i in range(BS): 33 | if feature_variation: 34 | feat = np.random.randint(2.5*FEATURES) + FEATURES//2 35 | else: 36 | feat = FEATURES 37 | 38 | if class_variation: 39 | classes = np.random.randint(CLASSES-2) + 2 40 | else: 41 | classes = CLASSES 42 | 43 | xd,yd = problemGenerator(N=NTRAIN+100, FEATURES=feat, CLASSES=classes, 44 | sigma = np.random.rand()*(max_difficulty - min_difficulty) + min_difficulty, 45 | sparseness = np.random.rand()*(max_sparseness - min_sparseness) + min_sparseness, 46 | imbalance = np.random.rand()*(max_imbalance - min_imbalance) + min_imbalance) 47 | 48 | if classes= 50: 96 | err = err/err_count 97 | errs.append(err) 98 | 99 | methods = [lambda: NetworkSKL(net)] 100 | results1 = compareMethodsOnSet(methods, echocardio['x'], echocardio['y'].astype(np.int32), samples=200) 101 | auc1 = results1[0][1] 102 | results2 = compareMethodsOnSet(methods, bloodtransfusion['x'], bloodtransfusion['y'].astype(np.int32), samples=200) 103 | auc2 = results2[0][1] 104 | results3 = compareMethodsOnSet(methods, autism['x'], autism['y'].astype(np.int32), samples=200) 105 | auc3 = results3[0][1] 106 | 107 | f = open("training_curves/training32-16.txt","a") 108 | f.write("%d %.6g %.6g %.6g %.6g %.6g\n" % (i, err, difficulty_level, auc1, auc2, auc3)) 109 | f.close() 110 | 111 | # Curriculum 112 | if err<0.7 and difficulty_level<0.4: 113 | difficulty_level *= 2.0 114 | 115 | err = 0 116 | err_count = 0 117 | 118 | torch.save(net.state_dict(),open("models/classifier-generator-32-16.pth","wb")) 119 | -------------------------------------------------------------------------------- /training_curves/finetuning-autism.txt: -------------------------------------------------------------------------------- 1 | 0 1.93224 2 | 1 1.76395 3 | 2 1.71711 4 | 3 1.7162 5 | 4 1.56644 6 | 5 1.58108 7 | 6 1.67949 8 | 7 1.48765 9 | 8 1.42996 10 | 9 1.49812 11 | 10 1.40603 12 | 11 1.35206 13 | 12 1.36759 14 | 13 1.36234 15 | 14 1.24941 16 | 15 1.27262 17 | 16 1.36579 18 | 17 1.1373 19 | 18 1.26488 20 | 19 1.24514 21 | -------------------------------------------------------------------------------- /training_curves/finetuning-bloodtransfusion.txt: -------------------------------------------------------------------------------- 1 | 0 2.15757 2 | 1 1.79581 3 | 2 1.83826 4 | 3 1.50172 5 | 4 1.49056 6 | 5 1.62894 7 | 6 1.65092 8 | 7 1.56861 9 | 8 1.57414 10 | 9 1.46081 11 | 10 1.40631 12 | 11 1.42288 13 | 12 1.3943 14 | 13 1.27971 15 | 14 1.31086 16 | 15 1.16358 17 | 16 1.08897 18 | 17 1.15101 19 | 18 1.14056 20 | 19 1.15411 21 | -------------------------------------------------------------------------------- /training_curves/finetuning-cervical_cancer.txt: -------------------------------------------------------------------------------- 1 | 0 1.77735 2 | 1 1.74096 3 | 2 1.61424 4 | 3 1.65419 5 | 4 1.53005 6 | 5 1.60421 7 | 6 1.44424 8 | 7 1.57501 9 | 8 1.40848 10 | 9 1.44147 11 | 10 1.45497 12 | 11 1.41666 13 | 12 1.43331 14 | 13 1.46908 15 | 14 1.3756 16 | 15 1.43754 17 | 16 1.1436 18 | 17 1.15361 19 | 18 1.24752 20 | 19 1.22217 21 | -------------------------------------------------------------------------------- /training_curves/finetuning-chronic-kidney.txt: -------------------------------------------------------------------------------- 1 | 0 1.9385 2 | 1 1.73013 3 | 2 1.82971 4 | 3 1.49134 5 | 4 1.77159 6 | 5 1.4788 7 | 6 1.52803 8 | 7 1.53301 9 | 8 1.55747 10 | 9 1.63345 11 | 10 1.41897 12 | 11 1.37897 13 | 12 1.55448 14 | 13 1.23973 15 | 14 1.43092 16 | 15 1.30772 17 | 16 1.44397 18 | 17 1.20286 19 | 18 1.22827 20 | 19 1.15775 21 | -------------------------------------------------------------------------------- /training_curves/finetuning-cryotherapy.txt: -------------------------------------------------------------------------------- 1 | 0 1.7638 2 | 1 1.961 3 | 2 1.91001 4 | 3 1.72907 5 | 4 1.67928 6 | 5 1.55927 7 | 6 1.51599 8 | 7 1.45171 9 | 8 1.37302 10 | 9 1.33361 11 | 10 1.52863 12 | 11 1.38315 13 | 12 1.31677 14 | 13 1.51075 15 | 14 1.24775 16 | 15 1.18645 17 | 16 1.05979 18 | 17 1.35179 19 | 18 1.13845 20 | 19 1.0974 21 | -------------------------------------------------------------------------------- /training_curves/finetuning-dermatology.txt: -------------------------------------------------------------------------------- 1 | 0 2.02504 2 | 1 1.83204 3 | 2 1.86957 4 | 3 1.82324 5 | 4 1.80354 6 | 5 1.68147 7 | 6 1.61398 8 | 7 1.57962 9 | 8 1.55764 10 | 9 1.39227 11 | 10 1.36465 12 | 11 1.47915 13 | 12 1.36686 14 | 13 1.46127 15 | 14 1.38826 16 | 15 1.19379 17 | 16 1.13811 18 | 17 1.21095 19 | 18 1.17367 20 | 19 1.1021 21 | -------------------------------------------------------------------------------- /training_curves/finetuning-echocardiogram.txt: -------------------------------------------------------------------------------- 1 | 0 1.9106 2 | 1 1.81895 3 | 2 1.77016 4 | 3 1.72323 5 | 4 1.53211 6 | 5 1.55203 7 | 6 1.43412 8 | 7 1.4104 9 | 8 1.25962 10 | 9 1.3254 11 | 10 1.41596 12 | 11 1.35579 13 | 12 1.43119 14 | 13 1.34148 15 | 14 1.225 16 | 15 1.15233 17 | 16 1.05396 18 | 17 1.11014 19 | 18 1.16225 20 | 19 1.21312 21 | -------------------------------------------------------------------------------- /training_curves/finetuning-foresttype.txt: -------------------------------------------------------------------------------- 1 | 0 1.9797 2 | 1 1.83272 3 | 2 1.70859 4 | 3 1.72977 5 | 4 1.84019 6 | 5 1.65378 7 | 6 1.74304 8 | 7 1.66039 9 | 8 1.52465 10 | 9 1.35296 11 | 10 1.57617 12 | 11 1.21287 13 | 12 1.35044 14 | 13 1.31674 15 | 14 1.29248 16 | 15 1.13692 17 | 16 1.34038 18 | 17 1.24565 19 | 18 1.12578 20 | 19 1.13667 21 | -------------------------------------------------------------------------------- /training_curves/finetuning-haberman.txt: -------------------------------------------------------------------------------- 1 | 0 1.95393 2 | 1 1.71495 3 | 2 1.72097 4 | 3 1.64855 5 | 4 1.47354 6 | 5 1.55222 7 | 6 1.46716 8 | 7 1.45983 9 | 8 1.54345 10 | 9 1.4674 11 | 10 1.45494 12 | 11 1.13385 13 | 12 1.37483 14 | 13 1.2032 15 | 14 1.42661 16 | 15 1.08101 17 | 16 1.15194 18 | 17 1.03081 19 | 18 1.08717 20 | 19 1.093 21 | -------------------------------------------------------------------------------- /training_curves/finetuning-hcc-survival.txt: -------------------------------------------------------------------------------- 1 | 0 1.99281 2 | 1 1.91654 3 | 2 1.59249 4 | 3 1.40608 5 | 4 1.71596 6 | 5 1.54022 7 | 6 1.69243 8 | 7 1.55727 9 | 8 1.47108 10 | 9 1.53525 11 | 10 1.47749 12 | 11 1.46029 13 | 12 1.31984 14 | 13 1.36598 15 | 14 1.34384 16 | 15 1.25667 17 | 16 1.14053 18 | 17 1.00871 19 | 18 1.09718 20 | 19 1.06729 21 | -------------------------------------------------------------------------------- /training_curves/finetuning-hepatitis.txt: -------------------------------------------------------------------------------- 1 | 0 1.68377 2 | 1 1.5629 3 | 2 1.86778 4 | 3 1.67056 5 | 4 1.84608 6 | 5 1.56524 7 | 6 1.53241 8 | 7 1.5893 9 | 8 1.59642 10 | 9 1.50118 11 | 10 1.39962 12 | 11 1.30114 13 | 12 1.30631 14 | 13 1.28578 15 | 14 1.22784 16 | 15 1.25674 17 | 16 1.27457 18 | 17 1.19099 19 | 18 1.13438 20 | 19 1.04789 21 | -------------------------------------------------------------------------------- /training_curves/finetuning-horse-colic.txt: -------------------------------------------------------------------------------- 1 | 0 1.7758 2 | 1 1.94843 3 | 2 1.76795 4 | 3 1.72282 5 | 4 1.63544 6 | 5 1.41462 7 | 6 1.59605 8 | 7 1.55093 9 | 8 1.55265 10 | 9 1.5002 11 | 10 1.53843 12 | 11 1.44896 13 | 12 1.21751 14 | 13 1.45309 15 | 14 1.34143 16 | 15 1.3163 17 | 16 1.33792 18 | 17 1.19312 19 | 18 1.19474 20 | 19 1.205 21 | -------------------------------------------------------------------------------- /training_curves/finetuning-immunotherapy.txt: -------------------------------------------------------------------------------- 1 | 0 1.75684 2 | 1 1.63968 3 | 2 1.66079 4 | 3 1.60692 5 | 4 1.68613 6 | 5 1.56831 7 | 6 1.53307 8 | 7 1.44175 9 | 8 1.52269 10 | 9 1.50168 11 | 10 1.39683 12 | 11 1.4507 13 | 12 1.40289 14 | 13 1.39074 15 | 14 1.29428 16 | 15 1.26717 17 | 16 1.09582 18 | 17 1.16294 19 | 18 1.22027 20 | 19 1.22684 21 | -------------------------------------------------------------------------------- /training_curves/finetuning-iris.txt: -------------------------------------------------------------------------------- 1 | 0 2.00894 2 | 1 1.95144 3 | 2 1.6001 4 | 3 1.57993 5 | 4 1.62977 6 | 5 1.71651 7 | 6 1.76392 8 | 7 1.4822 9 | 8 1.57606 10 | 9 1.557 11 | 10 1.39124 12 | 11 1.64462 13 | 12 1.30923 14 | 13 1.43567 15 | 14 1.42406 16 | 15 1.24307 17 | 16 1.31727 18 | 17 1.1704 19 | 18 1.14214 20 | 19 1.32507 21 | -------------------------------------------------------------------------------- /training_curves/finetuning-lung-cancer.txt: -------------------------------------------------------------------------------- 1 | 0 1.82588 2 | 1 1.79619 3 | 2 1.9807 4 | 3 1.54157 5 | 4 1.47637 6 | 5 1.51708 7 | 6 1.55367 8 | 7 1.48963 9 | 8 1.49299 10 | 9 1.58282 11 | 10 1.42184 12 | 11 1.49248 13 | 12 1.36931 14 | 13 1.41639 15 | 14 1.20877 16 | 15 1.22371 17 | 16 1.16545 18 | 17 1.19531 19 | 18 1.21916 20 | 19 1.09527 21 | -------------------------------------------------------------------------------- /training_curves/finetuning-winequality_red.txt: -------------------------------------------------------------------------------- 1 | 0 1.65211 2 | 1 1.51689 3 | 2 1.60184 4 | 3 1.49608 5 | 4 1.35552 6 | 5 1.46863 7 | 6 1.22208 8 | 7 1.33244 9 | 8 1.15552 10 | 9 1.22745 11 | 10 1.31433 12 | 11 1.19576 13 | 12 1.20953 14 | 13 1.09578 15 | 14 1.12713 16 | 15 0.916191 17 | 16 0.972585 18 | 17 0.996111 19 | 18 0.945155 20 | 19 0.924111 21 | -------------------------------------------------------------------------------- /training_curves/finetuning-winequality_white.txt: -------------------------------------------------------------------------------- 1 | 0 1.95291 2 | 1 1.77451 3 | 2 1.34489 4 | 3 1.45371 5 | 4 1.50631 6 | 5 1.40695 7 | 6 1.47324 8 | 7 1.41893 9 | 8 1.31344 10 | 9 1.29181 11 | 10 1.18859 12 | 11 1.18977 13 | 12 1.0267 14 | 13 1.10133 15 | 14 1.21814 16 | 15 1.00921 17 | 16 1.27025 18 | 17 0.998453 19 | 18 1.08758 20 | 19 0.999706 21 | -------------------------------------------------------------------------------- /training_curves/finetuning-winetype.txt: -------------------------------------------------------------------------------- 1 | 0 1.84798 2 | 1 1.87472 3 | 2 1.74359 4 | 3 1.67245 5 | 4 1.63627 6 | 5 1.61669 7 | 6 1.73325 8 | 7 1.69419 9 | 8 1.46507 10 | 9 1.56876 11 | 10 1.49468 12 | 11 1.51715 13 | 12 1.43786 14 | 13 1.36588 15 | 14 1.31405 16 | 15 1.28053 17 | 16 1.19157 18 | 17 1.23071 19 | 18 1.22419 20 | 19 1.25182 21 | -------------------------------------------------------------------------------- /training_curves/training2-N100.txt: -------------------------------------------------------------------------------- 1 | 49 1.38508 1 2 | 99 1.37542 1 3 | 149 1.33176 1 4 | 199 1.23756 1 5 | 249 1.09774 1 6 | 299 0.964917 1 7 | 349 0.910152 1 8 | 399 0.885334 1 9 | 449 0.877103 1 10 | 499 0.861022 1 11 | 549 0.854014 1 12 | 599 0.849407 1 13 | 649 0.842788 1 14 | 699 0.840969 1 15 | 749 0.838767 1 16 | 799 0.830495 1 17 | 849 0.831352 1 18 | 899 0.824267 1 19 | 949 0.823179 1 20 | 999 0.822576 1 21 | 1049 0.817938 1 22 | 1099 0.818199 1 23 | 1149 0.813547 1 24 | 1199 0.814332 1 25 | 1249 0.80949 1 26 | 1299 0.80862 1 27 | 1349 0.804373 1 28 | 1399 0.805583 1 29 | 1449 0.80075 1 30 | 1499 0.801598 1 31 | 1549 0.794631 1 32 | 1599 0.795415 1 33 | 1649 0.793065 1 34 | 1699 0.78942 1 35 | 1749 0.78718 1 36 | 1799 0.785873 1 37 | 1849 0.784792 1 38 | 1899 0.782848 1 39 | 1949 0.7822 1 40 | 1999 0.781108 1 41 | 2049 0.781393 1 42 | 2099 0.775803 1 43 | 2149 0.77923 1 44 | 2199 0.779658 1 45 | 2249 0.775719 1 46 | 2299 0.771994 1 47 | 2349 0.776129 1 48 | 2399 0.771595 1 49 | 2449 0.772561 1 50 | 2499 0.771626 1 51 | 2549 0.771222 1 52 | 2599 0.768175 1 53 | 2649 0.770237 1 54 | 2699 0.765237 1 55 | 2749 0.767859 1 56 | 2799 0.769478 1 57 | 2849 0.764822 1 58 | 2899 0.767464 1 59 | 2949 0.762327 1 60 | 2999 0.763924 1 61 | 3049 0.76589 1 62 | 3099 0.761481 1 63 | 3149 0.76088 1 64 | 3199 0.762333 1 65 | 3249 0.764168 1 66 | 3299 0.761479 1 67 | 3349 0.763255 1 68 | 3399 0.761012 1 69 | 3449 0.761335 1 70 | 3499 0.758522 1 71 | 3549 0.75535 1 72 | 3599 0.760933 1 73 | 3649 0.75917 1 74 | 3699 0.760232 1 75 | 3749 0.75588 1 76 | 3799 0.755766 1 77 | 3849 0.755265 1 78 | 3899 0.76085 1 79 | 3949 0.756247 1 80 | 3999 0.755258 1 81 | 4049 0.756981 1 82 | 4099 0.756701 1 83 | 4149 0.754585 1 84 | 4199 0.751618 1 85 | 4249 0.758146 1 86 | 4299 0.751583 1 87 | 4349 0.756173 1 88 | 4399 0.75389 1 89 | 4449 0.752348 1 90 | 4499 0.747892 1 91 | 4549 0.754575 1 92 | 4599 0.749454 1 93 | 4649 0.750198 1 94 | 4699 0.751858 1 95 | 4749 0.748632 1 96 | 4799 0.750081 1 97 | 4849 0.751037 1 98 | 4899 0.749638 1 99 | 4949 0.748452 1 100 | 4999 0.750847 1 101 | 5049 0.74741 1 102 | 5099 0.751735 1 103 | 5149 0.747325 1 104 | 5199 0.748502 1 105 | 5249 0.747777 1 106 | 5299 0.748296 1 107 | 5349 0.747299 1 108 | 5399 0.750202 1 109 | 5449 0.74365 1 110 | 5499 0.744821 1 111 | 5549 0.744963 1 112 | 5599 0.74505 1 113 | 5649 0.742755 1 114 | 5699 0.749176 1 115 | 5749 0.743768 1 116 | 5799 0.744309 1 117 | 5849 0.745861 1 118 | 5899 0.741878 1 119 | 5949 0.745363 1 120 | 5999 0.74313 1 121 | 6049 0.744272 1 122 | 6099 0.73958 1 123 | 6149 0.743104 1 124 | 6199 0.743114 1 125 | 6249 0.741919 1 126 | 6299 0.741195 1 127 | 6349 0.743583 1 128 | 6399 0.742573 1 129 | 6449 0.744087 1 130 | 6499 0.746445 1 131 | 6549 0.738619 1 132 | 6599 0.739287 1 133 | 6649 0.739373 1 134 | 6699 0.737829 1 135 | 6749 0.739557 1 136 | 6799 0.739681 1 137 | 6849 0.738757 1 138 | 6899 0.744211 1 139 | 6949 0.737624 1 140 | 6999 0.737222 1 141 | 7049 0.739422 1 142 | 7099 0.741228 1 143 | 7149 0.73855 1 144 | 7199 0.740137 1 145 | 7249 0.736996 1 146 | 7299 0.738168 1 147 | 7349 0.740257 1 148 | 7399 0.736644 1 149 | 7449 0.736595 1 150 | 7499 0.740316 1 151 | 7549 0.736841 1 152 | 7599 0.734516 1 153 | 7649 0.738544 1 154 | 7699 0.738351 1 155 | 7749 0.734539 1 156 | 7799 0.7369 1 157 | 7849 0.736127 1 158 | 7899 0.737513 1 159 | 7949 0.736639 1 160 | 7999 0.73107 1 161 | 8049 0.735872 1 162 | 8099 0.734119 1 163 | 8149 0.734695 1 164 | 8199 0.73403 1 165 | 8249 0.732669 1 166 | 8299 0.735988 1 167 | 8349 0.735674 1 168 | 8399 0.736281 1 169 | 8449 0.735995 1 170 | 8499 0.735725 1 171 | 8549 0.733093 1 172 | 8599 0.732521 1 173 | 8649 0.736049 1 174 | 8699 0.729587 1 175 | 8749 0.727341 1 176 | 8799 0.729533 1 177 | 8849 0.730902 1 178 | 8899 0.730352 1 179 | 8949 0.730266 1 180 | 8999 0.733519 1 181 | 9049 0.731287 1 182 | 9099 0.732997 1 183 | 9149 0.73237 1 184 | 9199 0.732552 1 185 | 9249 0.725633 1 186 | 9299 0.730886 1 187 | 9349 0.732873 1 188 | 9399 0.730134 1 189 | 9449 0.728338 1 190 | 9499 0.727494 1 191 | 9549 0.734157 1 192 | 9599 0.730639 1 193 | 9649 0.7325 1 194 | 9699 0.732372 1 195 | 9749 0.727004 1 196 | 9799 0.728099 1 197 | 9849 0.727816 1 198 | 9899 0.726045 1 199 | 9949 0.728079 1 200 | 9999 0.729831 1 201 | 10049 0.728968 1 202 | 10099 0.729084 1 203 | 10149 0.726557 1 204 | 10199 0.731209 1 205 | 10249 0.727234 1 206 | 10299 0.726035 1 207 | 10349 0.727161 1 208 | 10399 0.72984 1 209 | 10449 0.727355 1 210 | 10499 0.726945 1 211 | 10549 0.728239 1 212 | 10599 0.72529 1 213 | 10649 0.725277 1 214 | 10699 0.724949 1 215 | 10749 0.72937 1 216 | 10799 0.724325 1 217 | 10849 0.724392 1 218 | 10899 0.726653 1 219 | 10949 0.727586 1 220 | 10999 0.726205 1 221 | 11049 0.724409 1 222 | 11099 0.722964 1 223 | 11149 0.725749 1 224 | 11199 0.727527 1 225 | 11249 0.72951 1 226 | 11299 0.724215 1 227 | 11349 0.725514 1 228 | 11399 0.724805 1 229 | 11449 0.725462 1 230 | 11499 0.725481 1 231 | 11549 0.724592 1 232 | 11599 0.724445 1 233 | 11649 0.72737 1 234 | 11699 0.72061 1 235 | 11749 0.722657 1 236 | 11799 0.727372 1 237 | 11849 0.723286 1 238 | 11899 0.724058 1 239 | 11949 0.720847 1 240 | 11999 0.718388 1 241 | 12049 0.718838 1 242 | 12099 0.722253 1 243 | 12149 0.723531 1 244 | 12199 0.72112 1 245 | 12249 0.725738 1 246 | 12299 0.721642 1 247 | 12349 0.719336 1 248 | 12399 0.721493 1 249 | 12449 0.722603 1 250 | 12499 0.720075 1 251 | 12549 0.723117 1 252 | 12599 0.724147 1 253 | 12649 0.72292 1 254 | 12699 0.72096 1 255 | 12749 0.717109 1 256 | 12799 0.723032 1 257 | 12849 0.721524 1 258 | 12899 0.721112 1 259 | 12949 0.718847 1 260 | 12999 0.7221 1 261 | 13049 0.716752 1 262 | 13099 0.720801 1 263 | 13149 0.718477 1 264 | 13199 0.717746 1 265 | 13249 0.718819 1 266 | 13299 0.718178 1 267 | 13349 0.714394 1 268 | 13399 0.716623 1 269 | 13449 0.716903 1 270 | 13499 0.721321 1 271 | 13549 0.718274 1 272 | 13599 0.715405 1 273 | 13649 0.720885 1 274 | 13699 0.717348 1 275 | 13749 0.715146 1 276 | 13799 0.716142 1 277 | 13849 0.716971 1 278 | 13899 0.718632 1 279 | 13949 0.717895 1 280 | 13999 0.715847 1 281 | 14049 0.716752 1 282 | 14099 0.713613 1 283 | 14149 0.715162 1 284 | 14199 0.712745 1 285 | 14249 0.715482 1 286 | 14299 0.71683 1 287 | 14349 0.711926 1 288 | 14399 0.711825 1 289 | 14449 0.716328 1 290 | 14499 0.711302 1 291 | 14549 0.715064 1 292 | 14599 0.712296 1 293 | 14649 0.714276 1 294 | 14699 0.715065 1 295 | 14749 0.714101 1 296 | 14799 0.71606 1 297 | 14849 0.716677 1 298 | 14899 0.711813 1 299 | 14949 0.710903 1 300 | 14999 0.711938 1 301 | 15049 0.711709 1 302 | 15099 0.712043 1 303 | 15149 0.714919 1 304 | 15199 0.710476 1 305 | 15249 0.711995 1 306 | 15299 0.711609 1 307 | 15349 0.71191 1 308 | 15399 0.711091 1 309 | 15449 0.712239 1 310 | 15499 0.712216 1 311 | 15549 0.71167 1 312 | 15599 0.710571 1 313 | 15649 0.70683 1 314 | 15699 0.711351 1 315 | 15749 0.711816 1 316 | 15799 0.71071 1 317 | 15849 0.709146 1 318 | 15899 0.711687 1 319 | 15949 0.710636 1 320 | 15999 0.708189 1 321 | 16049 0.702185 1 322 | 16099 0.707289 1 323 | 16149 0.703955 1 324 | 16199 0.706976 1 325 | 16249 0.706789 1 326 | 16299 0.707418 1 327 | 16349 0.707044 1 328 | 16399 0.70426 1 329 | 16449 0.703868 1 330 | 16499 0.705787 1 331 | 16549 0.703746 1 332 | 16599 0.707535 1 333 | 16649 0.703738 1 334 | 16699 0.703611 1 335 | 16749 0.703657 1 336 | 16799 0.704536 1 337 | 16849 0.707335 1 338 | 16899 0.702732 1 339 | 16949 0.705201 1 340 | 16999 0.706564 1 341 | 17049 0.70118 1 342 | 17099 0.705032 1 343 | 17149 0.703877 1 344 | 17199 0.702711 1 345 | 17249 0.704017 1 346 | 17299 0.702656 1 347 | 17349 0.702059 1 348 | 17399 0.703774 1 349 | 17449 0.704309 1 350 | 17499 0.703461 1 351 | 17549 0.705423 1 352 | 17599 0.701599 1 353 | 17649 0.703634 1 354 | 17699 0.708376 1 355 | 17749 0.703027 1 356 | 17799 0.703769 1 357 | 17849 0.699249 1 358 | 17899 0.69913 1 359 | 17949 0.701527 1 360 | 17999 0.698582 1 361 | 18049 0.705962 1 362 | 18099 0.701368 1 363 | 18149 0.699605 1 364 | 18199 0.698385 1 365 | 18249 0.700734 1 366 | 18299 0.697347 1 367 | 18349 0.698225 1 368 | 18399 0.699863 1 369 | 18449 0.699684 1 370 | 18499 0.704212 1 371 | 18549 0.700288 1 372 | 18599 0.702659 1 373 | 18649 0.695962 1 374 | 18699 0.698647 1 375 | 18749 0.696246 1 376 | 18799 0.696978 1 377 | 18849 0.698191 1 378 | 18899 0.699466 1 379 | 18949 0.698734 1 380 | 18999 0.699016 1 381 | 19049 0.701796 1 382 | 19099 0.698295 1 383 | 19149 0.697529 1 384 | 19199 0.69882 1 385 | 19249 0.69826 1 386 | 19299 0.692238 1 387 | 19349 0.699581 1 388 | 19399 0.696322 1 389 | 19449 0.694108 1 390 | 19499 0.695425 1 391 | 19549 0.696854 1 392 | 19599 0.694684 1 393 | 19649 0.697572 1 394 | 19699 0.691546 1 395 | 19749 0.69958 1 396 | 19799 0.692795 1 397 | 19849 0.696128 1 398 | 19899 0.696352 1 399 | 19949 0.696705 1 400 | 19999 0.692896 1 401 | 20049 0.697324 1 402 | 20099 0.695593 1 403 | 20149 0.694651 1 404 | 20199 0.695252 1 405 | 20249 0.693484 1 406 | 20299 0.68634 1 407 | 20349 0.698118 1 408 | 20399 0.693687 1 409 | 20449 0.694023 1 410 | 20499 0.696 1 411 | 20549 0.691725 1 412 | 20599 0.696311 1 413 | 20649 0.693973 1 414 | 20699 0.69293 1 415 | 20749 0.693042 1 416 | 20799 0.689854 1 417 | 20849 0.694345 1 418 | 20899 0.688482 1 419 | 20949 0.692585 1 420 | 20999 0.692084 1 421 | 21049 0.689386 1 422 | 21099 0.693592 1 423 | 21149 0.688288 1 424 | 21199 0.689819 1 425 | 21249 0.691264 1 426 | 21299 0.692655 1 427 | 21349 0.69264 1 428 | 21399 0.69763 1 429 | 21449 0.690417 1 430 | 21499 0.693737 1 431 | 21549 0.690938 1 432 | 21599 0.687692 1 433 | 21649 0.690362 1 434 | 21699 0.691338 1 435 | 21749 0.692857 1 436 | 21799 0.694129 1 437 | 21849 0.693152 1 438 | 21899 0.693026 1 439 | 21949 0.689254 1 440 | 21999 0.685799 1 441 | 22049 0.68834 1 442 | 22099 0.690701 1 443 | 22149 0.690639 1 444 | 22199 0.68935 1 445 | 22249 0.691056 1 446 | 22299 0.687017 1 447 | 22349 0.68877 1 448 | 22399 0.687966 1 449 | 22449 0.688117 1 450 | 22499 0.68966 1 451 | 22549 0.686103 1 452 | 22599 0.685525 1 453 | 22649 0.685862 1 454 | 22699 0.686848 1 455 | 22749 0.68446 1 456 | 22799 0.689839 1 457 | 22849 0.687018 1 458 | 22899 0.686151 1 459 | 22949 0.6836 1 460 | 22999 0.684252 1 461 | 23049 0.687519 1 462 | 23099 0.684759 1 463 | 23149 0.687399 1 464 | 23199 0.689851 1 465 | 23249 0.685566 1 466 | 23299 0.683635 1 467 | 23349 0.687473 1 468 | 23399 0.685416 1 469 | 23449 0.688529 1 470 | 23499 0.686674 1 471 | 23549 0.682764 1 472 | 23599 0.687314 1 473 | 23649 0.689096 1 474 | 23699 0.685452 1 475 | 23749 0.683218 1 476 | 23799 0.68645 1 477 | 23849 0.68377 1 478 | 23899 0.684522 1 479 | 23949 0.684491 1 480 | 23999 0.680577 1 481 | 24049 0.6802 1 482 | 24099 0.687374 1 483 | 24149 0.68068 1 484 | 24199 0.680287 1 485 | 24249 0.688511 1 486 | 24299 0.686564 1 487 | 24349 0.680703 1 488 | 24399 0.68245 1 489 | 24449 0.681174 1 490 | 24499 0.685889 1 491 | 24549 0.684642 1 492 | 24599 0.683311 1 493 | 24649 0.680633 1 494 | 24699 0.686154 1 495 | 24749 0.684153 1 496 | 24799 0.683586 1 497 | 24849 0.6842 1 498 | 24899 0.684937 1 499 | 24949 0.680762 1 500 | 24999 0.67918 1 501 | 25049 0.678918 1 502 | 25099 0.67975 1 503 | 25149 0.679468 1 504 | 25199 0.683588 1 505 | 25249 0.680108 1 506 | 25299 0.682691 1 507 | 25349 0.681864 1 508 | 25399 0.679643 1 509 | 25449 0.678258 1 510 | 25499 0.680475 1 511 | 25549 0.677644 1 512 | 25599 0.684051 1 513 | 25649 0.683941 1 514 | 25699 0.67754 1 515 | 25749 0.679257 1 516 | 25799 0.682084 1 517 | 25849 0.679145 1 518 | 25899 0.682428 1 519 | 25949 0.682172 1 520 | 25999 0.679954 1 521 | 26049 0.681061 1 522 | 26099 0.680535 1 523 | 26149 0.680755 1 524 | 26199 0.677638 1 525 | 26249 0.680571 1 526 | 26299 0.684053 1 527 | 26349 0.677348 1 528 | 26399 0.684403 1 529 | 26449 0.68133 1 530 | 26499 0.679714 1 531 | 26549 0.677534 1 532 | 26599 0.675958 1 533 | 26649 0.676297 1 534 | 26699 0.679806 1 535 | 26749 0.677896 1 536 | 26799 0.678608 1 537 | 26849 0.68076 1 538 | 26899 0.680608 1 539 | 26949 0.681245 1 540 | 26999 0.678122 1 541 | 27049 0.676552 1 542 | 27099 0.676587 1 543 | 27149 0.674614 1 544 | 27199 0.678804 1 545 | 27249 0.673823 1 546 | 27299 0.675789 1 547 | 27349 0.676812 1 548 | 27399 0.679433 1 549 | 27449 0.675683 1 550 | 27499 0.676926 1 551 | 27549 0.677049 1 552 | 27599 0.678223 1 553 | 27649 0.680161 1 554 | 27699 0.677766 1 555 | 27749 0.676219 1 556 | 27799 0.678508 1 557 | 27849 0.67868 1 558 | 27899 0.679965 1 559 | 27949 0.679004 1 560 | 27999 0.672851 1 561 | 28049 0.675177 1 562 | 28099 0.674104 1 563 | 28149 0.677024 1 564 | 28199 0.676851 1 565 | 28249 0.678599 1 566 | 28299 0.675454 1 567 | 28349 0.675805 1 568 | 28399 0.672088 1 569 | 28449 0.677491 1 570 | 28499 0.677685 1 571 | 28549 0.668762 1 572 | 28599 0.670913 1 573 | 28649 0.673383 1 574 | 28699 0.67705 1 575 | 28749 0.674045 1 576 | 28799 0.674198 1 577 | 28849 0.671865 1 578 | 28899 0.674455 1 579 | 28949 0.673427 1 580 | 28999 0.675112 1 581 | 29049 0.672598 1 582 | 29099 0.675132 1 583 | 29149 0.677283 1 584 | 29199 0.676802 1 585 | 29249 0.677088 1 586 | 29299 0.676646 1 587 | 29349 0.674766 1 588 | 29399 0.674578 1 589 | 29449 0.673834 1 590 | 29499 0.673114 1 591 | 29549 0.675437 1 592 | 29599 0.672387 1 593 | 29649 0.673221 1 594 | 29699 0.674202 1 595 | 29749 0.672059 1 596 | 29799 0.671083 1 597 | 29849 0.67458 1 598 | 29899 0.673327 1 599 | 29949 0.673411 1 600 | 29999 0.673036 1 601 | 30049 0.675758 1 602 | 30099 0.671968 1 603 | 30149 0.67706 1 604 | 30199 0.672593 1 605 | 30249 0.67228 1 606 | 30299 0.675052 1 607 | 30349 0.671852 1 608 | 30399 0.668757 1 609 | 30449 0.671482 1 610 | 30499 0.671401 1 611 | 30549 0.673992 1 612 | 30599 0.672124 1 613 | 30649 0.672854 1 614 | 30699 0.671909 1 615 | 30749 0.672226 1 616 | 30799 0.671422 1 617 | 30849 0.668454 1 618 | 30899 0.670446 1 619 | 30949 0.671642 1 620 | 30999 0.672163 1 621 | 31049 0.670365 1 622 | 31099 0.665264 1 623 | 31149 0.664803 1 624 | 31199 0.673784 1 625 | 31249 0.672783 1 626 | 31299 0.669471 1 627 | 31349 0.669404 1 628 | 31399 0.670446 1 629 | 31449 0.672127 1 630 | 31499 0.672004 1 631 | 31549 0.669393 1 632 | 31599 0.667773 1 633 | 31649 0.667915 1 634 | 31699 0.673008 1 635 | 31749 0.670828 1 636 | 31799 0.668534 1 637 | 31849 0.671403 1 638 | 31899 0.664987 1 639 | 31949 0.670333 1 640 | 31999 0.670697 1 641 | 32049 0.666947 1 642 | 32099 0.66718 1 643 | 32149 0.669931 1 644 | 32199 0.671529 1 645 | 32249 0.672355 1 646 | 32299 0.668827 1 647 | 32349 0.670849 1 648 | 32399 0.667544 1 649 | 32449 0.66547 1 650 | 32499 0.669489 1 651 | 32549 0.667549 1 652 | 32599 0.671631 1 653 | 32649 0.672116 1 654 | 32699 0.665371 1 655 | 32749 0.668891 1 656 | 32799 0.670777 1 657 | 32849 0.666109 1 658 | 32899 0.668174 1 659 | 32949 0.670179 1 660 | 32999 0.666697 1 661 | 33049 0.665822 1 662 | 33099 0.671423 1 663 | 33149 0.668357 1 664 | 33199 0.6667 1 665 | 33249 0.666184 1 666 | 33299 0.666713 1 667 | 33349 0.666971 1 668 | 33399 0.665289 1 669 | 33449 0.666363 1 670 | 33499 0.664124 1 671 | 33549 0.664957 1 672 | 33599 0.665154 1 673 | 33649 0.665992 1 674 | 33699 0.66773 1 675 | 33749 0.666689 1 676 | 33799 0.664764 1 677 | 33849 0.666419 1 678 | 33899 0.666693 1 679 | 33949 0.666778 1 680 | 33999 0.663364 1 681 | 34049 0.664791 1 682 | 34099 0.66632 1 683 | 34149 0.665951 1 684 | 34199 0.665505 1 685 | 34249 0.667396 1 686 | 34299 0.664742 1 687 | 34349 0.666901 1 688 | 34399 0.665197 1 689 | 34449 0.665784 1 690 | 34499 0.662002 1 691 | 34549 0.66151 1 692 | 34599 0.66588 1 693 | 34649 0.664627 1 694 | 34699 0.662434 1 695 | 34749 0.666254 1 696 | 34799 0.662762 1 697 | 34849 0.662924 1 698 | 34899 0.666707 1 699 | 34949 0.665419 1 700 | 34999 0.666887 1 701 | 35049 0.665224 1 702 | 35099 0.665006 1 703 | 35149 0.667612 1 704 | 35199 0.661601 1 705 | 35249 0.666339 1 706 | 35299 0.666148 1 707 | 35349 0.663622 1 708 | 35399 0.663254 1 709 | 35449 0.664302 1 710 | 35499 0.665435 1 711 | 35549 0.658982 1 712 | 35599 0.661945 1 713 | 35649 0.66764 1 714 | 35699 0.664456 1 715 | 35749 0.663192 1 716 | 35799 0.663147 1 717 | 35849 0.661069 1 718 | 35899 0.662458 1 719 | 35949 0.664108 1 720 | 35999 0.663943 1 721 | 36049 0.664201 1 722 | 36099 0.658786 1 723 | 36149 0.663195 1 724 | 36199 0.662449 1 725 | 36249 0.660872 1 726 | 36299 0.660614 1 727 | 36349 0.660533 1 728 | 36399 0.661265 1 729 | 36449 0.664025 1 730 | 36499 0.66364 1 731 | 36549 0.661451 1 732 | 36599 0.659848 1 733 | 36649 0.660694 1 734 | 36699 0.659146 1 735 | 36749 0.657579 1 736 | 36799 0.655123 1 737 | 36849 0.655663 1 738 | 36899 0.660467 1 739 | 36949 0.660203 1 740 | 36999 0.660242 1 741 | 37049 0.659602 1 742 | 37099 0.663343 1 743 | 37149 0.660319 1 744 | 37199 0.657662 1 745 | 37249 0.660657 1 746 | 37299 0.661149 1 747 | 37349 0.658014 1 748 | 37399 0.663948 1 749 | 37449 0.660192 1 750 | 37499 0.659953 1 751 | 37549 0.661061 1 752 | 37599 0.660853 1 753 | 37649 0.659307 1 754 | 37699 0.658174 1 755 | 37749 0.65414 1 756 | 37799 0.662309 1 757 | 37849 0.660161 1 758 | 37899 0.659683 1 759 | 37949 0.664049 1 760 | 37999 0.656911 1 761 | 38049 0.657718 1 762 | 38099 0.657472 1 763 | 38149 0.657114 1 764 | 38199 0.655413 1 765 | 38249 0.660667 1 766 | 38299 0.657884 1 767 | 38349 0.660602 1 768 | 38399 0.659287 1 769 | 38449 0.656788 1 770 | 38499 0.653113 1 771 | 38549 0.657709 1 772 | 38599 0.65619 1 773 | 38649 0.65774 1 774 | 38699 0.658311 1 775 | 38749 0.656736 1 776 | 38799 0.653502 1 777 | 38849 0.659628 1 778 | 38899 0.654844 1 779 | 38949 0.657295 1 780 | 38999 0.655994 1 781 | 39049 0.656521 1 782 | 39099 0.657334 1 783 | 39149 0.656729 1 784 | 39199 0.662097 1 785 | 39249 0.654784 1 786 | 39299 0.660178 1 787 | 39349 0.657815 1 788 | 39399 0.653512 1 789 | 39449 0.651374 1 790 | 39499 0.655078 1 791 | 39549 0.658304 1 792 | 39599 0.654787 1 793 | 39649 0.657135 1 794 | 39699 0.655607 1 795 | 39749 0.654224 1 796 | 39799 0.653329 1 797 | 39849 0.655736 1 798 | 39899 0.658047 1 799 | 39949 0.655574 1 800 | 39999 0.658736 1 801 | -------------------------------------------------------------------------------- /training_curves/training2-N20.txt: -------------------------------------------------------------------------------- 1 | 49 1.3839 1 2 | 99 1.37115 1 3 | 149 1.32749 1 4 | 199 1.25234 1 5 | 249 1.18144 1 6 | 299 1.12789 1 7 | 349 1.10273 1 8 | 399 1.09353 1 9 | 449 1.07963 1 10 | 499 1.07576 1 11 | 549 1.06877 1 12 | 599 1.06089 1 13 | 649 1.06128 1 14 | 699 1.05546 1 15 | 749 1.05384 1 16 | 799 1.05093 1 17 | 849 1.04746 1 18 | 899 1.04322 1 19 | 949 1.03992 1 20 | 999 1.04062 1 21 | 1049 1.03704 1 22 | 1099 1.0412 1 23 | 1149 1.03444 1 24 | 1199 1.03445 1 25 | 1249 1.03413 1 26 | 1299 1.02969 1 27 | 1349 1.03055 1 28 | 1399 1.02721 1 29 | 1449 1.02671 1 30 | 1499 1.02338 1 31 | 1549 1.02432 1 32 | 1599 1.02121 1 33 | 1649 1.02099 1 34 | 1699 1.02212 1 35 | 1749 1.01841 1 36 | 1799 1.01609 1 37 | 1849 1.01522 1 38 | 1899 1.01725 1 39 | 1949 1.01855 1 40 | 1999 1.01219 1 41 | 2049 1.0156 1 42 | 2099 1.00959 1 43 | 2149 1.01218 1 44 | 2199 1.01064 1 45 | 2249 1.01683 1 46 | 2299 1.00848 1 47 | 2349 1.00773 1 48 | 2399 1.00816 1 49 | 2449 1.00652 1 50 | 2499 1.00536 1 51 | 2549 1.00089 1 52 | 2599 1.00551 1 53 | 2649 1.00167 1 54 | 2699 1.00548 1 55 | 2749 1.00621 1 56 | 2799 1 1 57 | 2849 1.0016 1 58 | 2899 1.00022 1 59 | 2949 0.998052 1 60 | 2999 0.994574 1 61 | 3049 0.997884 1 62 | 3099 0.997275 1 63 | 3149 0.997931 1 64 | 3199 0.995627 1 65 | 3249 0.991017 1 66 | 3299 0.997501 1 67 | 3349 0.994131 1 68 | 3399 0.989785 1 69 | 3449 0.993383 1 70 | 3499 0.99391 1 71 | 3549 0.992425 1 72 | 3599 0.991923 1 73 | 3649 0.992225 1 74 | 3699 0.988492 1 75 | 3749 0.988877 1 76 | 3799 0.986882 1 77 | 3849 0.986416 1 78 | 3899 0.989798 1 79 | 3949 0.985551 1 80 | 3999 0.985261 1 81 | 4049 0.986187 1 82 | 4099 0.986122 1 83 | 4149 0.985523 1 84 | 4199 0.984973 1 85 | 4249 0.98409 1 86 | 4299 0.983743 1 87 | 4349 0.98423 1 88 | 4399 0.979905 1 89 | 4449 0.98544 1 90 | 4499 0.978662 1 91 | 4549 0.983025 1 92 | 4599 0.981525 1 93 | 4649 0.981983 1 94 | 4699 0.979515 1 95 | 4749 0.98117 1 96 | 4799 0.979716 1 97 | 4849 0.979682 1 98 | 4899 0.977688 1 99 | 4949 0.976804 1 100 | 4999 0.979742 1 101 | 5049 0.981518 1 102 | 5099 0.977095 1 103 | 5149 0.980198 1 104 | 5199 0.974677 1 105 | 5249 0.974789 1 106 | 5299 0.977146 1 107 | 5349 0.980419 1 108 | 5399 0.975568 1 109 | 5449 0.976776 1 110 | 5499 0.976174 1 111 | 5549 0.976478 1 112 | 5599 0.972249 1 113 | 5649 0.974082 1 114 | 5699 0.972242 1 115 | 5749 0.977463 1 116 | 5799 0.974125 1 117 | 5849 0.969568 1 118 | 5899 0.971004 1 119 | 5949 0.973429 1 120 | 5999 0.971178 1 121 | 6049 0.970317 1 122 | 6099 0.97323 1 123 | 6149 0.971642 1 124 | 6199 0.974202 1 125 | 6249 0.972935 1 126 | 6299 0.971119 1 127 | 6349 0.967897 1 128 | 6399 0.970734 1 129 | 6449 0.965583 1 130 | 6499 0.967603 1 131 | 6549 0.969224 1 132 | 6599 0.967078 1 133 | 6649 0.971716 1 134 | 6699 0.96917 1 135 | 6749 0.965621 1 136 | 6799 0.967805 1 137 | 6849 0.969009 1 138 | 6899 0.967738 1 139 | 6949 0.966537 1 140 | 6999 0.96133 1 141 | 7049 0.965646 1 142 | 7099 0.966332 1 143 | 7149 0.963292 1 144 | 7199 0.965738 1 145 | 7249 0.962258 1 146 | 7299 0.961689 1 147 | 7349 0.962372 1 148 | 7399 0.963819 1 149 | 7449 0.964397 1 150 | 7499 0.960919 1 151 | 7549 0.96259 1 152 | 7599 0.960767 1 153 | 7649 0.959559 1 154 | 7699 0.960629 1 155 | 7749 0.957153 1 156 | 7799 0.958702 1 157 | 7849 0.961062 1 158 | 7899 0.960317 1 159 | 7949 0.957488 1 160 | 7999 0.957949 1 161 | 8049 0.959549 1 162 | 8099 0.959449 1 163 | 8149 0.957533 1 164 | 8199 0.959267 1 165 | 8249 0.957874 1 166 | 8299 0.958325 1 167 | 8349 0.957556 1 168 | 8399 0.95534 1 169 | 8449 0.959662 1 170 | 8499 0.956591 1 171 | 8549 0.958504 1 172 | 8599 0.956077 1 173 | 8649 0.954726 1 174 | 8699 0.956609 1 175 | 8749 0.955963 1 176 | 8799 0.954569 1 177 | 8849 0.956197 1 178 | 8899 0.954386 1 179 | 8949 0.954195 1 180 | 8999 0.953447 1 181 | 9049 0.955131 1 182 | 9099 0.950872 1 183 | 9149 0.95496 1 184 | 9199 0.952245 1 185 | 9249 0.950302 1 186 | 9299 0.946805 1 187 | 9349 0.948486 1 188 | 9399 0.948955 1 189 | 9449 0.951452 1 190 | 9499 0.949795 1 191 | 9549 0.952482 1 192 | 9599 0.949726 1 193 | 9649 0.947739 1 194 | 9699 0.951434 1 195 | 9749 0.944911 1 196 | 9799 0.946963 1 197 | 9849 0.946519 1 198 | 9899 0.946677 1 199 | 9949 0.950191 1 200 | 9999 0.944983 1 201 | 10049 0.948179 1 202 | 10099 0.9468 1 203 | 10149 0.948416 1 204 | 10199 0.946125 1 205 | 10249 0.946908 1 206 | 10299 0.947246 1 207 | 10349 0.942658 1 208 | 10399 0.948388 1 209 | 10449 0.944587 1 210 | 10499 0.94426 1 211 | 10549 0.943555 1 212 | 10599 0.940068 1 213 | 10649 0.944344 1 214 | 10699 0.9432 1 215 | 10749 0.944375 1 216 | 10799 0.945017 1 217 | 10849 0.938879 1 218 | 10899 0.938815 1 219 | 10949 0.941693 1 220 | 10999 0.937067 1 221 | 11049 0.94255 1 222 | 11099 0.940763 1 223 | 11149 0.941642 1 224 | 11199 0.939156 1 225 | 11249 0.938791 1 226 | 11299 0.94001 1 227 | 11349 0.936309 1 228 | 11399 0.937116 1 229 | 11449 0.939334 1 230 | 11499 0.943885 1 231 | 11549 0.935806 1 232 | 11599 0.936124 1 233 | 11649 0.933883 1 234 | 11699 0.934754 1 235 | 11749 0.938023 1 236 | 11799 0.933757 1 237 | 11849 0.936756 1 238 | 11899 0.935976 1 239 | 11949 0.93861 1 240 | 11999 0.937896 1 241 | 12049 0.934557 1 242 | 12099 0.936409 1 243 | 12149 0.932024 1 244 | 12199 0.932974 1 245 | 12249 0.932571 1 246 | 12299 0.934805 1 247 | 12349 0.935743 1 248 | 12399 0.931664 1 249 | 12449 0.92677 1 250 | 12499 0.93523 1 251 | 12549 0.93266 1 252 | 12599 0.932 1 253 | 12649 0.93256 1 254 | 12699 0.933289 1 255 | 12749 0.930171 1 256 | 12799 0.936711 1 257 | 12849 0.927588 1 258 | 12899 0.929193 1 259 | 12949 0.928896 1 260 | 12999 0.925712 1 261 | 13049 0.927039 1 262 | 13099 0.92926 1 263 | 13149 0.929728 1 264 | 13199 0.927372 1 265 | 13249 0.931392 1 266 | 13299 0.926455 1 267 | 13349 0.927125 1 268 | 13399 0.928222 1 269 | 13449 0.930169 1 270 | 13499 0.926454 1 271 | 13549 0.928519 1 272 | 13599 0.92977 1 273 | 13649 0.931933 1 274 | 13699 0.927841 1 275 | 13749 0.926375 1 276 | 13799 0.9254 1 277 | 13849 0.925102 1 278 | 13899 0.926547 1 279 | 13949 0.923533 1 280 | 13999 0.922192 1 281 | 14049 0.925991 1 282 | 14099 0.924765 1 283 | 14149 0.922138 1 284 | 14199 0.921908 1 285 | 14249 0.922425 1 286 | 14299 0.922526 1 287 | 14349 0.920232 1 288 | 14399 0.92038 1 289 | 14449 0.920114 1 290 | 14499 0.924243 1 291 | 14549 0.920646 1 292 | 14599 0.92343 1 293 | 14649 0.918484 1 294 | 14699 0.919826 1 295 | 14749 0.919926 1 296 | 14799 0.917163 1 297 | 14849 0.920833 1 298 | 14899 0.915092 1 299 | 14949 0.917189 1 300 | 14999 0.919788 1 301 | 15049 0.918599 1 302 | 15099 0.920554 1 303 | 15149 0.918713 1 304 | 15199 0.91351 1 305 | 15249 0.917478 1 306 | 15299 0.911372 1 307 | 15349 0.913802 1 308 | 15399 0.917569 1 309 | 15449 0.916556 1 310 | 15499 0.916137 1 311 | 15549 0.919595 1 312 | 15599 0.915683 1 313 | 15649 0.917645 1 314 | 15699 0.91576 1 315 | 15749 0.920543 1 316 | 15799 0.914618 1 317 | 15849 0.914519 1 318 | 15899 0.914762 1 319 | 15949 0.914094 1 320 | 15999 0.913391 1 321 | 16049 0.916583 1 322 | 16099 0.916755 1 323 | 16149 0.912319 1 324 | 16199 0.912182 1 325 | 16249 0.912804 1 326 | 16299 0.915277 1 327 | 16349 0.911678 1 328 | 16399 0.911356 1 329 | 16449 0.912729 1 330 | 16499 0.909338 1 331 | 16549 0.911659 1 332 | 16599 0.912414 1 333 | 16649 0.911314 1 334 | 16699 0.908372 1 335 | 16749 0.910659 1 336 | 16799 0.909808 1 337 | 16849 0.908297 1 338 | 16899 0.907519 1 339 | 16949 0.912955 1 340 | 16999 0.9131 1 341 | 17049 0.907061 1 342 | 17099 0.909208 1 343 | 17149 0.905877 1 344 | 17199 0.90701 1 345 | 17249 0.910415 1 346 | 17299 0.909744 1 347 | 17349 0.908461 1 348 | 17399 0.909115 1 349 | 17449 0.906127 1 350 | 17499 0.908654 1 351 | 17549 0.907697 1 352 | 17599 0.905603 1 353 | 17649 0.906425 1 354 | 17699 0.910185 1 355 | 17749 0.902957 1 356 | 17799 0.904571 1 357 | 17849 0.905849 1 358 | 17899 0.907657 1 359 | 17949 0.905572 1 360 | 17999 0.905368 1 361 | 18049 0.908224 1 362 | 18099 0.904883 1 363 | 18149 0.906795 1 364 | 18199 0.907109 1 365 | 18249 0.908615 1 366 | 18299 0.907079 1 367 | 18349 0.907414 1 368 | 18399 0.903415 1 369 | 18449 0.903294 1 370 | 18499 0.907693 1 371 | 18549 0.902596 1 372 | 18599 0.903057 1 373 | 18649 0.907125 1 374 | 18699 0.899366 1 375 | 18749 0.901057 1 376 | 18799 0.903703 1 377 | 18849 0.904742 1 378 | 18899 0.905763 1 379 | 18949 0.902289 1 380 | 18999 0.898941 1 381 | 19049 0.903717 1 382 | 19099 0.903606 1 383 | 19149 0.903064 1 384 | 19199 0.899315 1 385 | 19249 0.899066 1 386 | 19299 0.897571 1 387 | 19349 0.90118 1 388 | 19399 0.897376 1 389 | 19449 0.901933 1 390 | 19499 0.902513 1 391 | 19549 0.900079 1 392 | 19599 0.89726 1 393 | 19649 0.896454 1 394 | 19699 0.900057 1 395 | 19749 0.899807 1 396 | 19799 0.902539 1 397 | 19849 0.900244 1 398 | 19899 0.897828 1 399 | 19949 0.899879 1 400 | 19999 0.89924 1 401 | 20049 0.895153 1 402 | 20099 0.894679 1 403 | 20149 0.893025 1 404 | 20199 0.897927 1 405 | 20249 0.893434 1 406 | 20299 0.896323 1 407 | 20349 0.898108 1 408 | 20399 0.89627 1 409 | 20449 0.899239 1 410 | 20499 0.894027 1 411 | 20549 0.90185 1 412 | 20599 0.892848 1 413 | 20649 0.896078 1 414 | 20699 0.894352 1 415 | 20749 0.896632 1 416 | 20799 0.899903 1 417 | 20849 0.898365 1 418 | 20899 0.893044 1 419 | 20949 0.895872 1 420 | 20999 0.89256 1 421 | 21049 0.89404 1 422 | 21099 0.892986 1 423 | 21149 0.89551 1 424 | 21199 0.892089 1 425 | 21249 0.894832 1 426 | 21299 0.898652 1 427 | 21349 0.897326 1 428 | 21399 0.894703 1 429 | 21449 0.89555 1 430 | 21499 0.892947 1 431 | 21549 0.887828 1 432 | 21599 0.891309 1 433 | 21649 0.890689 1 434 | 21699 0.894454 1 435 | 21749 0.895585 1 436 | 21799 0.890004 1 437 | 21849 0.893366 1 438 | 21899 0.89319 1 439 | 21949 0.893268 1 440 | 21999 0.889138 1 441 | 22049 0.893844 1 442 | 22099 0.889892 1 443 | 22149 0.890559 1 444 | 22199 0.88992 1 445 | 22249 0.891948 1 446 | 22299 0.888797 1 447 | 22349 0.894398 1 448 | 22399 0.891239 1 449 | 22449 0.88933 1 450 | 22499 0.88856 1 451 | 22549 0.888545 1 452 | 22599 0.885743 1 453 | 22649 0.891771 1 454 | 22699 0.889974 1 455 | 22749 0.891103 1 456 | 22799 0.890128 1 457 | 22849 0.887571 1 458 | 22899 0.890759 1 459 | 22949 0.88877 1 460 | 22999 0.893165 1 461 | 23049 0.889091 1 462 | 23099 0.887159 1 463 | 23149 0.889723 1 464 | 23199 0.890797 1 465 | 23249 0.889868 1 466 | 23299 0.89235 1 467 | 23349 0.88949 1 468 | 23399 0.885947 1 469 | 23449 0.888823 1 470 | 23499 0.889505 1 471 | 23549 0.886923 1 472 | 23599 0.888205 1 473 | 23649 0.886086 1 474 | 23699 0.888442 1 475 | 23749 0.887119 1 476 | 23799 0.886804 1 477 | 23849 0.890143 1 478 | 23899 0.887202 1 479 | 23949 0.885968 1 480 | 23999 0.88536 1 481 | 24049 0.887108 1 482 | 24099 0.88824 1 483 | 24149 0.886692 1 484 | 24199 0.889212 1 485 | 24249 0.883576 1 486 | 24299 0.887668 1 487 | 24349 0.885809 1 488 | 24399 0.885323 1 489 | 24449 0.885058 1 490 | 24499 0.885458 1 491 | 24549 0.885508 1 492 | 24599 0.884947 1 493 | 24649 0.885955 1 494 | 24699 0.883605 1 495 | 24749 0.885408 1 496 | 24799 0.884836 1 497 | 24849 0.887555 1 498 | 24899 0.887181 1 499 | 24949 0.883476 1 500 | 24999 0.884772 1 501 | 25049 0.882735 1 502 | 25099 0.884755 1 503 | 25149 0.884378 1 504 | 25199 0.884345 1 505 | 25249 0.885039 1 506 | 25299 0.884596 1 507 | 25349 0.885398 1 508 | 25399 0.882912 1 509 | 25449 0.881625 1 510 | 25499 0.881081 1 511 | 25549 0.885556 1 512 | 25599 0.883133 1 513 | 25649 0.884563 1 514 | 25699 0.880969 1 515 | 25749 0.878611 1 516 | 25799 0.883674 1 517 | 25849 0.883181 1 518 | 25899 0.878883 1 519 | 25949 0.886832 1 520 | 25999 0.881745 1 521 | 26049 0.882799 1 522 | 26099 0.877581 1 523 | 26149 0.881046 1 524 | 26199 0.879403 1 525 | 26249 0.879996 1 526 | 26299 0.882352 1 527 | 26349 0.879746 1 528 | 26399 0.884169 1 529 | 26449 0.880278 1 530 | 26499 0.881578 1 531 | 26549 0.883109 1 532 | 26599 0.879032 1 533 | 26649 0.881549 1 534 | 26699 0.884896 1 535 | 26749 0.876863 1 536 | 26799 0.878774 1 537 | 26849 0.879655 1 538 | 26899 0.880638 1 539 | 26949 0.880729 1 540 | 26999 0.882887 1 541 | 27049 0.880235 1 542 | 27099 0.88071 1 543 | 27149 0.881169 1 544 | 27199 0.879719 1 545 | 27249 0.876991 1 546 | 27299 0.873313 1 547 | 27349 0.87788 1 548 | 27399 0.879124 1 549 | 27449 0.871632 1 550 | 27499 0.881169 1 551 | 27549 0.881127 1 552 | 27599 0.879018 1 553 | 27649 0.874938 1 554 | 27699 0.881453 1 555 | 27749 0.876022 1 556 | 27799 0.8771 1 557 | 27849 0.875733 1 558 | 27899 0.879375 1 559 | 27949 0.876892 1 560 | 27999 0.875568 1 561 | 28049 0.878598 1 562 | 28099 0.875674 1 563 | 28149 0.8754 1 564 | 28199 0.87596 1 565 | 28249 0.87754 1 566 | 28299 0.878357 1 567 | 28349 0.879414 1 568 | 28399 0.880165 1 569 | 28449 0.875096 1 570 | 28499 0.876795 1 571 | 28549 0.873307 1 572 | 28599 0.877396 1 573 | 28649 0.878189 1 574 | 28699 0.875114 1 575 | 28749 0.876011 1 576 | 28799 0.875985 1 577 | 28849 0.876252 1 578 | 28899 0.878018 1 579 | 28949 0.874965 1 580 | 28999 0.87658 1 581 | 29049 0.877135 1 582 | 29099 0.877994 1 583 | 29149 0.874369 1 584 | 29199 0.874564 1 585 | 29249 0.873754 1 586 | 29299 0.876782 1 587 | 29349 0.87422 1 588 | 29399 0.875253 1 589 | 29449 0.875482 1 590 | 29499 0.876381 1 591 | 29549 0.875265 1 592 | 29599 0.877023 1 593 | 29649 0.872355 1 594 | 29699 0.873839 1 595 | 29749 0.878291 1 596 | 29799 0.871382 1 597 | 29849 0.871902 1 598 | 29899 0.874739 1 599 | 29949 0.871633 1 600 | 29999 0.874522 1 601 | 30049 0.873053 1 602 | 30099 0.87618 1 603 | 30149 0.872214 1 604 | 30199 0.875308 1 605 | 30249 0.874404 1 606 | 30299 0.871119 1 607 | 30349 0.872256 1 608 | 30399 0.874698 1 609 | 30449 0.872152 1 610 | 30499 0.870905 1 611 | 30549 0.874085 1 612 | 30599 0.87451 1 613 | 30649 0.873335 1 614 | 30699 0.871749 1 615 | 30749 0.873729 1 616 | 30799 0.872525 1 617 | 30849 0.872797 1 618 | 30899 0.875012 1 619 | 30949 0.869161 1 620 | 30999 0.873586 1 621 | 31049 0.871958 1 622 | 31099 0.87688 1 623 | 31149 0.872671 1 624 | 31199 0.870601 1 625 | 31249 0.872885 1 626 | 31299 0.870786 1 627 | 31349 0.87268 1 628 | 31399 0.868905 1 629 | 31449 0.871062 1 630 | 31499 0.868886 1 631 | 31549 0.87297 1 632 | 31599 0.871623 1 633 | 31649 0.868449 1 634 | 31699 0.868738 1 635 | 31749 0.870748 1 636 | 31799 0.872779 1 637 | 31849 0.872727 1 638 | 31899 0.869257 1 639 | 31949 0.871401 1 640 | 31999 0.869272 1 641 | 32049 0.870439 1 642 | 32099 0.868541 1 643 | 32149 0.869662 1 644 | 32199 0.869538 1 645 | 32249 0.872604 1 646 | 32299 0.86835 1 647 | 32349 0.871186 1 648 | 32399 0.869487 1 649 | 32449 0.870926 1 650 | 32499 0.871736 1 651 | 32549 0.866917 1 652 | 32599 0.866292 1 653 | 32649 0.868342 1 654 | 32699 0.868865 1 655 | 32749 0.868374 1 656 | 32799 0.867407 1 657 | 32849 0.865103 1 658 | 32899 0.865761 1 659 | 32949 0.874085 1 660 | 32999 0.869314 1 661 | 33049 0.868528 1 662 | 33099 0.869432 1 663 | 33149 0.874428 1 664 | 33199 0.871849 1 665 | 33249 0.865442 1 666 | 33299 0.868725 1 667 | 33349 0.866586 1 668 | 33399 0.869482 1 669 | 33449 0.869704 1 670 | 33499 0.869987 1 671 | 33549 0.870808 1 672 | 33599 0.870835 1 673 | 33649 0.867939 1 674 | 33699 0.869175 1 675 | 33749 0.86882 1 676 | 33799 0.863673 1 677 | 33849 0.865543 1 678 | 33899 0.866471 1 679 | 33949 0.864168 1 680 | 33999 0.867449 1 681 | 34049 0.864806 1 682 | 34099 0.866993 1 683 | 34149 0.867458 1 684 | 34199 0.86485 1 685 | 34249 0.865732 1 686 | 34299 0.867716 1 687 | 34349 0.864426 1 688 | 34399 0.869516 1 689 | 34449 0.863482 1 690 | 34499 0.867512 1 691 | 34549 0.866791 1 692 | 34599 0.857853 1 693 | 34649 0.867068 1 694 | 34699 0.865068 1 695 | 34749 0.866026 1 696 | 34799 0.866727 1 697 | 34849 0.864661 1 698 | 34899 0.866699 1 699 | 34949 0.862497 1 700 | 34999 0.866746 1 701 | 35049 0.868353 1 702 | 35099 0.865542 1 703 | 35149 0.861971 1 704 | 35199 0.86383 1 705 | 35249 0.867129 1 706 | 35299 0.861433 1 707 | 35349 0.862805 1 708 | 35399 0.8622 1 709 | 35449 0.86619 1 710 | 35499 0.864325 1 711 | 35549 0.865271 1 712 | 35599 0.866348 1 713 | 35649 0.865897 1 714 | 35699 0.862615 1 715 | 35749 0.865774 1 716 | 35799 0.866584 1 717 | 35849 0.868698 1 718 | 35899 0.860675 1 719 | 35949 0.863345 1 720 | 35999 0.861409 1 721 | 36049 0.866005 1 722 | 36099 0.863556 1 723 | 36149 0.860991 1 724 | 36199 0.861972 1 725 | 36249 0.864112 1 726 | 36299 0.861558 1 727 | 36349 0.862548 1 728 | 36399 0.861796 1 729 | 36449 0.863997 1 730 | 36499 0.863555 1 731 | 36549 0.862874 1 732 | 36599 0.863952 1 733 | 36649 0.861342 1 734 | 36699 0.859259 1 735 | 36749 0.863295 1 736 | 36799 0.860651 1 737 | 36849 0.862082 1 738 | 36899 0.863151 1 739 | 36949 0.861476 1 740 | 36999 0.863466 1 741 | 37049 0.860612 1 742 | 37099 0.862787 1 743 | 37149 0.863883 1 744 | 37199 0.859229 1 745 | 37249 0.860857 1 746 | 37299 0.866307 1 747 | 37349 0.865223 1 748 | 37399 0.855676 1 749 | 37449 0.859343 1 750 | 37499 0.862672 1 751 | 37549 0.860732 1 752 | 37599 0.86042 1 753 | 37649 0.859929 1 754 | 37699 0.865124 1 755 | 37749 0.8654 1 756 | 37799 0.861459 1 757 | 37849 0.863518 1 758 | 37899 0.862277 1 759 | 37949 0.85635 1 760 | 37999 0.860285 1 761 | 38049 0.862695 1 762 | 38099 0.865193 1 763 | 38149 0.8625 1 764 | 38199 0.853345 1 765 | 38249 0.860924 1 766 | 38299 0.858771 1 767 | 38349 0.859797 1 768 | 38399 0.862212 1 769 | 38449 0.859542 1 770 | 38499 0.862665 1 771 | 38549 0.856858 1 772 | 38599 0.862348 1 773 | 38649 0.860692 1 774 | 38699 0.85947 1 775 | 38749 0.861378 1 776 | 38799 0.855007 1 777 | 38849 0.863591 1 778 | 38899 0.858691 1 779 | 38949 0.859028 1 780 | 38999 0.861767 1 781 | 39049 0.864772 1 782 | 39099 0.862588 1 783 | 39149 0.856146 1 784 | 39199 0.860223 1 785 | 39249 0.859361 1 786 | 39299 0.856066 1 787 | 39349 0.856606 1 788 | 39399 0.859492 1 789 | 39449 0.860591 1 790 | 39499 0.858739 1 791 | 39549 0.858714 1 792 | 39599 0.856319 1 793 | 39649 0.858463 1 794 | 39699 0.857813 1 795 | 39749 0.856659 1 796 | 39799 0.86029 1 797 | 39849 0.859122 1 798 | 39899 0.856977 1 799 | 39949 0.861849 1 800 | 39999 0.85778 1 801 | -------------------------------------------------------------------------------- /training_curves/training2-base.txt: -------------------------------------------------------------------------------- 1 | 49 1.3835 1 2 | 99 1.3654 1 3 | 149 1.27916 1 4 | 199 1.06716 1 5 | 249 0.898075 1 6 | 299 0.842784 1 7 | 349 0.821855 1 8 | 399 0.805002 1 9 | 449 0.790759 1 10 | 499 0.777374 1 11 | 549 0.769751 1 12 | 599 0.763246 1 13 | 649 0.758533 1 14 | 699 0.750551 1 15 | 749 0.746115 1 16 | 799 0.735584 1 17 | 849 0.735192 1 18 | 899 0.731949 1 19 | 949 0.724282 1 20 | 999 0.724215 1 21 | 1049 0.722286 1 22 | 1099 0.717194 1 23 | 1149 0.716039 1 24 | 1199 0.713419 1 25 | 1249 0.712823 1 26 | 1299 0.707149 1 27 | 1349 0.708501 1 28 | 1399 0.705979 1 29 | 1449 0.703027 1 30 | 1499 0.70249 1 31 | 1549 0.701798 1 32 | 1599 0.695534 1 33 | 1649 0.69556 1 34 | 1699 0.696269 1 35 | 1749 0.695914 1 36 | 1799 0.694691 1 37 | 1849 0.693839 1 38 | 1899 0.695351 1 39 | 1949 0.690226 1 40 | 1999 0.692989 1 41 | 2049 0.688481 1 42 | 2099 0.689216 1 43 | 2149 0.688207 1 44 | 2199 0.688639 1 45 | 2249 0.685854 1 46 | 2299 0.687744 1 47 | 2349 0.682689 1 48 | 2399 0.681521 1 49 | 2449 0.685677 1 50 | 2499 0.683036 1 51 | 2549 0.680603 1 52 | 2599 0.683302 1 53 | 2649 0.6831 1 54 | 2699 0.679636 1 55 | 2749 0.681276 1 56 | 2799 0.679197 1 57 | 2849 0.683272 1 58 | 2899 0.679906 1 59 | 2949 0.678133 1 60 | 2999 0.678199 1 61 | 3049 0.675458 1 62 | 3099 0.677452 1 63 | 3149 0.676094 1 64 | 3199 0.671883 1 65 | 3249 0.6756 1 66 | 3299 0.673381 1 67 | 3349 0.672706 1 68 | 3399 0.674726 1 69 | 3449 0.668592 1 70 | 3499 0.671164 1 71 | 3549 0.669716 1 72 | 3599 0.671381 1 73 | 3649 0.670161 1 74 | 3699 0.66857 1 75 | 3749 0.670754 1 76 | 3799 0.671302 1 77 | 3849 0.668948 1 78 | 3899 0.66549 1 79 | 3949 0.672084 1 80 | 3999 0.667265 1 81 | 4049 0.66696 1 82 | 4099 0.663426 1 83 | 4149 0.671104 1 84 | 4199 0.663317 1 85 | 4249 0.667967 1 86 | 4299 0.667019 1 87 | 4349 0.668075 1 88 | 4399 0.667159 1 89 | 4449 0.664677 1 90 | 4499 0.666196 1 91 | 4549 0.665014 1 92 | 4599 0.667888 1 93 | 4649 0.6678 1 94 | 4699 0.660331 1 95 | 4749 0.666376 1 96 | 4799 0.660846 1 97 | 4849 0.660373 1 98 | 4899 0.66201 1 99 | 4949 0.660705 1 100 | 4999 0.662448 1 101 | 5049 0.660969 1 102 | 5099 0.665651 1 103 | 5149 0.661737 1 104 | 5199 0.660414 1 105 | 5249 0.661235 1 106 | 5299 0.665466 1 107 | 5349 0.658104 1 108 | 5399 0.661996 1 109 | 5449 0.661386 1 110 | 5499 0.658109 1 111 | 5549 0.659415 1 112 | 5599 0.659104 1 113 | 5649 0.65523 1 114 | 5699 0.65852 1 115 | 5749 0.655412 1 116 | 5799 0.657068 1 117 | 5849 0.659645 1 118 | 5899 0.659839 1 119 | 5949 0.65743 1 120 | 5999 0.654583 1 121 | 6049 0.657806 1 122 | 6099 0.657562 1 123 | 6149 0.656706 1 124 | 6199 0.657607 1 125 | 6249 0.65619 1 126 | 6299 0.657329 1 127 | 6349 0.658308 1 128 | 6399 0.65778 1 129 | 6449 0.658383 1 130 | 6499 0.653372 1 131 | 6549 0.658806 1 132 | 6599 0.651928 1 133 | 6649 0.653269 1 134 | 6699 0.655763 1 135 | 6749 0.655785 1 136 | 6799 0.647144 1 137 | 6849 0.653508 1 138 | 6899 0.65422 1 139 | 6949 0.650894 1 140 | 6999 0.655273 1 141 | 7049 0.649727 1 142 | 7099 0.650728 1 143 | 7149 0.653561 1 144 | 7199 0.652764 1 145 | 7249 0.650775 1 146 | 7299 0.653088 1 147 | 7349 0.647068 1 148 | 7399 0.652807 1 149 | 7449 0.64757 1 150 | 7499 0.650957 1 151 | 7549 0.648502 1 152 | 7599 0.654449 1 153 | 7649 0.648264 1 154 | 7699 0.646913 1 155 | 7749 0.648635 1 156 | 7799 0.651738 1 157 | 7849 0.648419 1 158 | 7899 0.650118 1 159 | 7949 0.645534 1 160 | 7999 0.648243 1 161 | 8049 0.647857 1 162 | 8099 0.648609 1 163 | 8149 0.649413 1 164 | 8199 0.644465 1 165 | 8249 0.649418 1 166 | 8299 0.646284 1 167 | 8349 0.64704 1 168 | 8399 0.649541 1 169 | 8449 0.648771 1 170 | 8499 0.646956 1 171 | 8549 0.647063 1 172 | 8599 0.646681 1 173 | 8649 0.645717 1 174 | 8699 0.644463 1 175 | 8749 0.646432 1 176 | 8799 0.64415 1 177 | 8849 0.647798 1 178 | 8899 0.646965 1 179 | 8949 0.647449 1 180 | 8999 0.645338 1 181 | 9049 0.646842 1 182 | 9099 0.64484 1 183 | 9149 0.646827 1 184 | 9199 0.642408 1 185 | 9249 0.642403 1 186 | 9299 0.648033 1 187 | 9349 0.643191 1 188 | 9399 0.643685 1 189 | 9449 0.64229 1 190 | 9499 0.645935 1 191 | 9549 0.642874 1 192 | 9599 0.642139 1 193 | 9649 0.643667 1 194 | 9699 0.645285 1 195 | 9749 0.642422 1 196 | 9799 0.642178 1 197 | 9849 0.638799 1 198 | 9899 0.640746 1 199 | 9949 0.640442 1 200 | 9999 0.642483 1 201 | 10049 0.642651 1 202 | 10099 0.643273 1 203 | 10149 0.646314 1 204 | 10199 0.640843 1 205 | 10249 0.639187 1 206 | 10299 0.642798 1 207 | 10349 0.640794 1 208 | 10399 0.639403 1 209 | 10449 0.641958 1 210 | 10499 0.64167 1 211 | 10549 0.63796 1 212 | 10599 0.641701 1 213 | 10649 0.641723 1 214 | 10699 0.639337 1 215 | 10749 0.638384 1 216 | 10799 0.640402 1 217 | 10849 0.641985 1 218 | 10899 0.644807 1 219 | 10949 0.640501 1 220 | 10999 0.636272 1 221 | 11049 0.641723 1 222 | 11099 0.637127 1 223 | 11149 0.641172 1 224 | 11199 0.645551 1 225 | 11249 0.641008 1 226 | 11299 0.637812 1 227 | 11349 0.641369 1 228 | 11399 0.636894 1 229 | 11449 0.639245 1 230 | 11499 0.639927 1 231 | 11549 0.639077 1 232 | 11599 0.637092 1 233 | 11649 0.640697 1 234 | 11699 0.636654 1 235 | 11749 0.638793 1 236 | 11799 0.638852 1 237 | 11849 0.638647 1 238 | 11899 0.638767 1 239 | 11949 0.635336 1 240 | 11999 0.637087 1 241 | 12049 0.637712 1 242 | 12099 0.634674 1 243 | 12149 0.636928 1 244 | 12199 0.63879 1 245 | 12249 0.639218 1 246 | 12299 0.63734 1 247 | 12349 0.634867 1 248 | 12399 0.636149 1 249 | 12449 0.638241 1 250 | 12499 0.636348 1 251 | 12549 0.636353 1 252 | 12599 0.636916 1 253 | 12649 0.637261 1 254 | 12699 0.635647 1 255 | 12749 0.634806 1 256 | 12799 0.63399 1 257 | 12849 0.632692 1 258 | 12899 0.634496 1 259 | 12949 0.634549 1 260 | 12999 0.637761 1 261 | 13049 0.638821 1 262 | 13099 0.634216 1 263 | 13149 0.63621 1 264 | 13199 0.638743 1 265 | 13249 0.638356 1 266 | 13299 0.633365 1 267 | 13349 0.631716 1 268 | 13399 0.63412 1 269 | 13449 0.635171 1 270 | 13499 0.638568 1 271 | 13549 0.634052 1 272 | 13599 0.631849 1 273 | 13649 0.634168 1 274 | 13699 0.637905 1 275 | 13749 0.633472 1 276 | 13799 0.634292 1 277 | 13849 0.633243 1 278 | 13899 0.634176 1 279 | 13949 0.63591 1 280 | 13999 0.635844 1 281 | 14049 0.634443 1 282 | 14099 0.631283 1 283 | 14149 0.63232 1 284 | 14199 0.634237 1 285 | 14249 0.634615 1 286 | 14299 0.634767 1 287 | 14349 0.636161 1 288 | 14399 0.639341 1 289 | 14449 0.634569 1 290 | 14499 0.633948 1 291 | 14549 0.629968 1 292 | 14599 0.634436 1 293 | 14649 0.631588 1 294 | 14699 0.633942 1 295 | 14749 0.6287 1 296 | 14799 0.629859 1 297 | 14849 0.633048 1 298 | 14899 0.630315 1 299 | 14949 0.635644 1 300 | 14999 0.634636 1 301 | 15049 0.630121 1 302 | 15099 0.633641 1 303 | 15149 0.632733 1 304 | 15199 0.632655 1 305 | 15249 0.631306 1 306 | 15299 0.636557 1 307 | 15349 0.634877 1 308 | 15399 0.633504 1 309 | 15449 0.630619 1 310 | 15499 0.632604 1 311 | 15549 0.629837 1 312 | 15599 0.632226 1 313 | 15649 0.629562 1 314 | 15699 0.634071 1 315 | 15749 0.629718 1 316 | 15799 0.633443 1 317 | 15849 0.628905 1 318 | 15899 0.634551 1 319 | 15949 0.634615 1 320 | 15999 0.629543 1 321 | 16049 0.62847 1 322 | 16099 0.631488 1 323 | 16149 0.632321 1 324 | 16199 0.629548 1 325 | 16249 0.629288 1 326 | 16299 0.62849 1 327 | 16349 0.633095 1 328 | 16399 0.628616 1 329 | 16449 0.631223 1 330 | 16499 0.631587 1 331 | 16549 0.632583 1 332 | 16599 0.627199 1 333 | 16649 0.630674 1 334 | 16699 0.632532 1 335 | 16749 0.629257 1 336 | 16799 0.62696 1 337 | 16849 0.62753 1 338 | 16899 0.625513 1 339 | 16949 0.629718 1 340 | 16999 0.63345 1 341 | 17049 0.62873 1 342 | 17099 0.632239 1 343 | 17149 0.62873 1 344 | 17199 0.630198 1 345 | 17249 0.629027 1 346 | 17299 0.629447 1 347 | 17349 0.63055 1 348 | 17399 0.630831 1 349 | 17449 0.628202 1 350 | 17499 0.62455 1 351 | 17549 0.626539 1 352 | 17599 0.630053 1 353 | 17649 0.627508 1 354 | 17699 0.627627 1 355 | 17749 0.629489 1 356 | 17799 0.628617 1 357 | 17849 0.628823 1 358 | 17899 0.629438 1 359 | 17949 0.628467 1 360 | 17999 0.625742 1 361 | 18049 0.625643 1 362 | 18099 0.624598 1 363 | 18149 0.632057 1 364 | 18199 0.631284 1 365 | 18249 0.629881 1 366 | 18299 0.623429 1 367 | 18349 0.630557 1 368 | 18399 0.628938 1 369 | 18449 0.626503 1 370 | 18499 0.625343 1 371 | 18549 0.627191 1 372 | 18599 0.631699 1 373 | 18649 0.629553 1 374 | 18699 0.627031 1 375 | 18749 0.622409 1 376 | 18799 0.629561 1 377 | 18849 0.629541 1 378 | 18899 0.628524 1 379 | 18949 0.627988 1 380 | 18999 0.62682 1 381 | 19049 0.625936 1 382 | 19099 0.625102 1 383 | 19149 0.627858 1 384 | 19199 0.626226 1 385 | 19249 0.628971 1 386 | 19299 0.625265 1 387 | 19349 0.625178 1 388 | 19399 0.62265 1 389 | 19449 0.624819 1 390 | 19499 0.628786 1 391 | 19549 0.627089 1 392 | 19599 0.626789 1 393 | 19649 0.630322 1 394 | 19699 0.626832 1 395 | 19749 0.625996 1 396 | 19799 0.627431 1 397 | 19849 0.625469 1 398 | 19899 0.631401 1 399 | 19949 0.622618 1 400 | 19999 0.626868 1 401 | 20049 0.627019 1 402 | 20099 0.629345 1 403 | 20149 0.621963 1 404 | 20199 0.624792 1 405 | 20249 0.621603 1 406 | 20299 0.623088 1 407 | 20349 0.622757 1 408 | 20399 0.625335 1 409 | 20449 0.622965 1 410 | 20499 0.625849 1 411 | 20549 0.626374 1 412 | 20599 0.622931 1 413 | 20649 0.623542 1 414 | 20699 0.627215 1 415 | 20749 0.628026 1 416 | 20799 0.622973 1 417 | 20849 0.620962 1 418 | 20899 0.624922 1 419 | 20949 0.626078 1 420 | 20999 0.627999 1 421 | 21049 0.626201 1 422 | 21099 0.62015 1 423 | 21149 0.62376 1 424 | 21199 0.624349 1 425 | 21249 0.628533 1 426 | 21299 0.621437 1 427 | 21349 0.6233 1 428 | 21399 0.622965 1 429 | 21449 0.622695 1 430 | 21499 0.625259 1 431 | 21549 0.623119 1 432 | 21599 0.620627 1 433 | 21649 0.620574 1 434 | 21699 0.621645 1 435 | 21749 0.62384 1 436 | 21799 0.619503 1 437 | 21849 0.620977 1 438 | 21899 0.627241 1 439 | 21949 0.625423 1 440 | 21999 0.62266 1 441 | 22049 0.621591 1 442 | 22099 0.622643 1 443 | 22149 0.622663 1 444 | 22199 0.62062 1 445 | 22249 0.622345 1 446 | 22299 0.625578 1 447 | 22349 0.621582 1 448 | 22399 0.622603 1 449 | 22449 0.623378 1 450 | 22499 0.619384 1 451 | 22549 0.624774 1 452 | 22599 0.621744 1 453 | 22649 0.625284 1 454 | 22699 0.620154 1 455 | 22749 0.624146 1 456 | 22799 0.620173 1 457 | 22849 0.619095 1 458 | 22899 0.624952 1 459 | 22949 0.625526 1 460 | 22999 0.617354 1 461 | 23049 0.621766 1 462 | 23099 0.622681 1 463 | 23149 0.622303 1 464 | 23199 0.621162 1 465 | 23249 0.618472 1 466 | 23299 0.623246 1 467 | 23349 0.621286 1 468 | 23399 0.622219 1 469 | 23449 0.621299 1 470 | 23499 0.621792 1 471 | 23549 0.619831 1 472 | 23599 0.62322 1 473 | 23649 0.623345 1 474 | 23699 0.623459 1 475 | 23749 0.621302 1 476 | 23799 0.618751 1 477 | 23849 0.620773 1 478 | 23899 0.622165 1 479 | 23949 0.620073 1 480 | 23999 0.62068 1 481 | 24049 0.621659 1 482 | 24099 0.621508 1 483 | 24149 0.622873 1 484 | 24199 0.624001 1 485 | 24249 0.623922 1 486 | 24299 0.620573 1 487 | 24349 0.62183 1 488 | 24399 0.623118 1 489 | 24449 0.619091 1 490 | 24499 0.620285 1 491 | 24549 0.620779 1 492 | 24599 0.617593 1 493 | 24649 0.622135 1 494 | 24699 0.622171 1 495 | 24749 0.623095 1 496 | 24799 0.618611 1 497 | 24849 0.620072 1 498 | 24899 0.617927 1 499 | 24949 0.623692 1 500 | 24999 0.621249 1 501 | 25049 0.61877 1 502 | 25099 0.618387 1 503 | 25149 0.623064 1 504 | 25199 0.620606 1 505 | 25249 0.622207 1 506 | 25299 0.624443 1 507 | 25349 0.62055 1 508 | 25399 0.618903 1 509 | 25449 0.623953 1 510 | 25499 0.619314 1 511 | 25549 0.616535 1 512 | 25599 0.61957 1 513 | 25649 0.620392 1 514 | 25699 0.619088 1 515 | 25749 0.62202 1 516 | 25799 0.618822 1 517 | 25849 0.620389 1 518 | 25899 0.620067 1 519 | 25949 0.620164 1 520 | 25999 0.616125 1 521 | 26049 0.621666 1 522 | 26099 0.616756 1 523 | 26149 0.622722 1 524 | 26199 0.620562 1 525 | 26249 0.615141 1 526 | 26299 0.621307 1 527 | 26349 0.619379 1 528 | 26399 0.614863 1 529 | 26449 0.619043 1 530 | 26499 0.620826 1 531 | 26549 0.619983 1 532 | 26599 0.619757 1 533 | 26649 0.61941 1 534 | 26699 0.615815 1 535 | 26749 0.617949 1 536 | 26799 0.618347 1 537 | 26849 0.617539 1 538 | 26899 0.61731 1 539 | 26949 0.619534 1 540 | 26999 0.61717 1 541 | 27049 0.615363 1 542 | 27099 0.618543 1 543 | 27149 0.617632 1 544 | 27199 0.619677 1 545 | 27249 0.615479 1 546 | 27299 0.614855 1 547 | 27349 0.618864 1 548 | 27399 0.618719 1 549 | 27449 0.618372 1 550 | 27499 0.618609 1 551 | 27549 0.622728 1 552 | 27599 0.62059 1 553 | 27649 0.620535 1 554 | 27699 0.619689 1 555 | 27749 0.613099 1 556 | 27799 0.61767 1 557 | 27849 0.617305 1 558 | 27899 0.617324 1 559 | 27949 0.617508 1 560 | 27999 0.615129 1 561 | 28049 0.613772 1 562 | 28099 0.618666 1 563 | 28149 0.615244 1 564 | 28199 0.61715 1 565 | 28249 0.614532 1 566 | 28299 0.616619 1 567 | 28349 0.614579 1 568 | 28399 0.61297 1 569 | 28449 0.61622 1 570 | 28499 0.617253 1 571 | 28549 0.616924 1 572 | 28599 0.618186 1 573 | 28649 0.612555 1 574 | 28699 0.615909 1 575 | 28749 0.615313 1 576 | 28799 0.61684 1 577 | 28849 0.62049 1 578 | 28899 0.615357 1 579 | 28949 0.616366 1 580 | 28999 0.61886 1 581 | 29049 0.617826 1 582 | 29099 0.613775 1 583 | 29149 0.614953 1 584 | 29199 0.616338 1 585 | 29249 0.617549 1 586 | 29299 0.615834 1 587 | 29349 0.61897 1 588 | 29399 0.613274 1 589 | 29449 0.617955 1 590 | 29499 0.615651 1 591 | 29549 0.6173 1 592 | 29599 0.616414 1 593 | 29649 0.611494 1 594 | 29699 0.616461 1 595 | 29749 0.616178 1 596 | 29799 0.614352 1 597 | 29849 0.613962 1 598 | 29899 0.613133 1 599 | 29949 0.618047 1 600 | 29999 0.611911 1 601 | 30049 0.61679 1 602 | 30099 0.612734 1 603 | 30149 0.615138 1 604 | 30199 0.614887 1 605 | 30249 0.619261 1 606 | 30299 0.615611 1 607 | 30349 0.616755 1 608 | 30399 0.619693 1 609 | 30449 0.615476 1 610 | 30499 0.613639 1 611 | 30549 0.613412 1 612 | 30599 0.615185 1 613 | 30649 0.613957 1 614 | 30699 0.611918 1 615 | 30749 0.614539 1 616 | 30799 0.611123 1 617 | 30849 0.611224 1 618 | 30899 0.614103 1 619 | 30949 0.61631 1 620 | 30999 0.616 1 621 | 31049 0.613888 1 622 | 31099 0.612688 1 623 | 31149 0.616255 1 624 | 31199 0.616121 1 625 | 31249 0.613062 1 626 | 31299 0.616215 1 627 | 31349 0.614964 1 628 | 31399 0.614165 1 629 | 31449 0.611402 1 630 | 31499 0.610602 1 631 | 31549 0.615687 1 632 | 31599 0.61358 1 633 | 31649 0.613457 1 634 | 31699 0.614721 1 635 | 31749 0.613103 1 636 | 31799 0.612151 1 637 | 31849 0.614945 1 638 | 31899 0.609521 1 639 | 31949 0.613485 1 640 | 31999 0.611714 1 641 | 32049 0.615631 1 642 | 32099 0.614374 1 643 | 32149 0.614055 1 644 | 32199 0.61106 1 645 | 32249 0.615114 1 646 | 32299 0.612793 1 647 | 32349 0.612354 1 648 | 32399 0.609616 1 649 | 32449 0.610542 1 650 | 32499 0.610858 1 651 | 32549 0.613243 1 652 | 32599 0.611319 1 653 | 32649 0.61386 1 654 | 32699 0.615041 1 655 | 32749 0.611687 1 656 | 32799 0.613153 1 657 | 32849 0.613505 1 658 | 32899 0.612545 1 659 | 32949 0.611618 1 660 | 32999 0.613342 1 661 | 33049 0.6085 1 662 | 33099 0.609762 1 663 | 33149 0.613342 1 664 | 33199 0.609436 1 665 | 33249 0.610012 1 666 | 33299 0.610232 1 667 | 33349 0.610717 1 668 | 33399 0.611867 1 669 | 33449 0.608565 1 670 | 33499 0.610704 1 671 | 33549 0.609574 1 672 | 33599 0.61497 1 673 | 33649 0.611609 1 674 | 33699 0.612463 1 675 | 33749 0.611978 1 676 | 33799 0.608761 1 677 | 33849 0.611945 1 678 | 33899 0.614785 1 679 | 33949 0.612255 1 680 | 33999 0.605088 1 681 | 34049 0.609797 1 682 | 34099 0.613402 1 683 | 34149 0.612874 1 684 | 34199 0.611038 1 685 | 34249 0.610025 1 686 | 34299 0.609819 1 687 | 34349 0.609227 1 688 | 34399 0.609985 1 689 | 34449 0.611577 1 690 | 34499 0.61236 1 691 | 34549 0.614052 1 692 | 34599 0.610495 1 693 | 34649 0.606097 1 694 | 34699 0.610345 1 695 | 34749 0.607137 1 696 | 34799 0.609135 1 697 | 34849 0.612098 1 698 | 34899 0.613135 1 699 | 34949 0.611761 1 700 | 34999 0.610385 1 701 | 35049 0.610251 1 702 | 35099 0.613396 1 703 | 35149 0.609213 1 704 | 35199 0.610567 1 705 | 35249 0.610363 1 706 | 35299 0.609358 1 707 | 35349 0.610848 1 708 | 35399 0.608695 1 709 | 35449 0.608275 1 710 | 35499 0.607192 1 711 | 35549 0.610387 1 712 | 35599 0.612257 1 713 | 35649 0.606682 1 714 | 35699 0.610229 1 715 | 35749 0.612229 1 716 | 35799 0.611705 1 717 | 35849 0.60993 1 718 | 35899 0.607506 1 719 | 35949 0.604757 1 720 | 35999 0.608895 1 721 | 36049 0.610579 1 722 | 36099 0.611773 1 723 | 36149 0.612772 1 724 | 36199 0.610259 1 725 | 36249 0.609076 1 726 | 36299 0.610613 1 727 | 36349 0.606218 1 728 | 36399 0.607073 1 729 | 36449 0.608352 1 730 | 36499 0.613757 1 731 | 36549 0.613004 1 732 | 36599 0.610224 1 733 | 36649 0.610007 1 734 | 36699 0.608684 1 735 | 36749 0.611321 1 736 | 36799 0.608555 1 737 | 36849 0.61029 1 738 | 36899 0.611946 1 739 | 36949 0.606616 1 740 | 36999 0.603705 1 741 | 37049 0.607314 1 742 | 37099 0.608633 1 743 | 37149 0.607845 1 744 | 37199 0.607077 1 745 | 37249 0.61113 1 746 | 37299 0.605728 1 747 | 37349 0.614082 1 748 | 37399 0.604486 1 749 | 37449 0.606729 1 750 | 37499 0.60939 1 751 | 37549 0.60368 1 752 | 37599 0.609206 1 753 | 37649 0.606803 1 754 | 37699 0.609691 1 755 | 37749 0.603357 1 756 | 37799 0.605825 1 757 | 37849 0.609864 1 758 | 37899 0.607019 1 759 | 37949 0.607433 1 760 | 37999 0.60769 1 761 | 38049 0.607036 1 762 | 38099 0.605916 1 763 | 38149 0.607784 1 764 | 38199 0.607969 1 765 | 38249 0.606034 1 766 | 38299 0.609885 1 767 | 38349 0.604071 1 768 | 38399 0.602679 1 769 | 38449 0.605338 1 770 | 38499 0.607941 1 771 | 38549 0.608051 1 772 | 38599 0.606186 1 773 | 38649 0.605152 1 774 | 38699 0.60688 1 775 | 38749 0.607166 1 776 | 38799 0.605876 1 777 | 38849 0.604394 1 778 | 38899 0.603765 1 779 | 38949 0.605302 1 780 | 38999 0.606054 1 781 | 39049 0.605981 1 782 | 39099 0.610889 1 783 | 39149 0.604407 1 784 | 39199 0.607696 1 785 | 39249 0.604224 1 786 | 39299 0.607629 1 787 | 39349 0.605055 1 788 | 39399 0.604709 1 789 | 39449 0.60429 1 790 | 39499 0.604301 1 791 | 39549 0.603411 1 792 | 39599 0.60788 1 793 | 39649 0.604592 1 794 | 39699 0.605966 1 795 | 39749 0.60627 1 796 | 39799 0.605602 1 797 | 39849 0.602853 1 798 | 39899 0.602376 1 799 | 39949 0.599885 1 800 | 39999 0.602588 1 801 | -------------------------------------------------------------------------------- /training_curves/training2-diff4.txt: -------------------------------------------------------------------------------- 1 | 49 1.38579 4 2 | 99 1.3808 4 3 | 149 1.36331 4 4 | 199 1.32196 4 5 | 249 1.25446 4 6 | 299 1.19999 4 7 | 349 1.16659 4 8 | 399 1.14569 4 9 | 449 1.12844 4 10 | 499 1.11705 4 11 | 549 1.1031 4 12 | 599 1.09207 4 13 | 649 1.08512 4 14 | 699 1.07472 4 15 | 749 1.0711 4 16 | 799 1.06226 4 17 | 849 1.05815 4 18 | 899 1.05166 4 19 | 949 1.04665 4 20 | 999 1.04015 4 21 | 1049 1.03901 4 22 | 1099 1.03465 4 23 | 1149 1.02828 4 24 | 1199 1.02748 4 25 | 1249 1.02431 4 26 | 1299 1.01762 4 27 | 1349 1.02052 4 28 | 1399 1.01763 4 29 | 1449 1.0129 4 30 | 1499 1.01026 4 31 | 1549 1.00893 4 32 | 1599 1.01212 4 33 | 1649 1.00743 4 34 | 1699 1.00289 4 35 | 1749 1.00404 4 36 | 1799 1.00107 4 37 | 1849 0.999738 4 38 | 1899 0.999932 4 39 | 1949 0.998227 4 40 | 1999 0.996651 4 41 | 2049 0.994476 4 42 | 2099 0.998385 4 43 | 2149 0.995721 4 44 | 2199 0.99326 4 45 | 2249 0.994445 4 46 | 2299 0.993456 4 47 | 2349 0.989528 4 48 | 2399 0.990101 4 49 | 2449 0.988586 4 50 | 2499 0.986978 4 51 | 2549 0.986159 4 52 | 2599 0.987722 4 53 | 2649 0.982232 4 54 | 2699 0.985156 4 55 | 2749 0.987046 4 56 | 2799 0.983389 4 57 | 2849 0.979218 4 58 | 2899 0.980639 4 59 | 2949 0.977392 4 60 | 2999 0.979508 4 61 | 3049 0.97922 4 62 | 3099 0.97799 4 63 | 3149 0.979396 4 64 | 3199 0.979277 4 65 | 3249 0.97293 4 66 | 3299 0.977267 4 67 | 3349 0.974926 4 68 | 3399 0.974027 4 69 | 3449 0.973845 4 70 | 3499 0.97368 4 71 | 3549 0.97499 4 72 | 3599 0.973389 4 73 | 3649 0.975154 4 74 | 3699 0.974727 4 75 | 3749 0.968236 4 76 | 3799 0.971146 4 77 | 3849 0.96886 4 78 | 3899 0.971677 4 79 | 3949 0.969054 4 80 | 3999 0.968525 4 81 | 4049 0.967231 4 82 | 4099 0.967776 4 83 | 4149 0.967651 4 84 | 4199 0.96576 4 85 | 4249 0.966019 4 86 | 4299 0.965341 4 87 | 4349 0.967035 4 88 | 4399 0.966504 4 89 | 4449 0.96899 4 90 | 4499 0.966124 4 91 | 4549 0.964134 4 92 | 4599 0.965088 4 93 | 4649 0.965996 4 94 | 4699 0.961722 4 95 | 4749 0.964204 4 96 | 4799 0.963685 4 97 | 4849 0.9624 4 98 | 4899 0.960045 4 99 | 4949 0.962433 4 100 | 4999 0.960458 4 101 | 5049 0.962776 4 102 | 5099 0.961186 4 103 | 5149 0.961233 4 104 | 5199 0.960461 4 105 | 5249 0.959029 4 106 | 5299 0.957241 4 107 | 5349 0.959997 4 108 | 5399 0.962287 4 109 | 5449 0.960743 4 110 | 5499 0.958 4 111 | 5549 0.961457 4 112 | 5599 0.957724 4 113 | 5649 0.958058 4 114 | 5699 0.95612 4 115 | 5749 0.952592 4 116 | 5799 0.956682 4 117 | 5849 0.954917 4 118 | 5899 0.95563 4 119 | 5949 0.957228 4 120 | 5999 0.957006 4 121 | 6049 0.95543 4 122 | 6099 0.956377 4 123 | 6149 0.953939 4 124 | 6199 0.95297 4 125 | 6249 0.953605 4 126 | 6299 0.953131 4 127 | 6349 0.952058 4 128 | 6399 0.956809 4 129 | 6449 0.952243 4 130 | 6499 0.952185 4 131 | 6549 0.952876 4 132 | 6599 0.95231 4 133 | 6649 0.955609 4 134 | 6699 0.94884 4 135 | 6749 0.947546 4 136 | 6799 0.953263 4 137 | 6849 0.952234 4 138 | 6899 0.949593 4 139 | 6949 0.951751 4 140 | 6999 0.948495 4 141 | 7049 0.949287 4 142 | 7099 0.952362 4 143 | 7149 0.949204 4 144 | 7199 0.950694 4 145 | 7249 0.950269 4 146 | 7299 0.947913 4 147 | 7349 0.946147 4 148 | 7399 0.946609 4 149 | 7449 0.947164 4 150 | 7499 0.946192 4 151 | 7549 0.94355 4 152 | 7599 0.947575 4 153 | 7649 0.944674 4 154 | 7699 0.948104 4 155 | 7749 0.943802 4 156 | 7799 0.945771 4 157 | 7849 0.948735 4 158 | 7899 0.945384 4 159 | 7949 0.944825 4 160 | 7999 0.946774 4 161 | 8049 0.944197 4 162 | 8099 0.945688 4 163 | 8149 0.946988 4 164 | 8199 0.942686 4 165 | 8249 0.941773 4 166 | 8299 0.941958 4 167 | 8349 0.944841 4 168 | 8399 0.941624 4 169 | 8449 0.94622 4 170 | 8499 0.943101 4 171 | 8549 0.94444 4 172 | 8599 0.944485 4 173 | 8649 0.94304 4 174 | 8699 0.943323 4 175 | 8749 0.941316 4 176 | 8799 0.939548 4 177 | 8849 0.94278 4 178 | 8899 0.940417 4 179 | 8949 0.939575 4 180 | 8999 0.939382 4 181 | 9049 0.94185 4 182 | 9099 0.941752 4 183 | 9149 0.940182 4 184 | 9199 0.93541 4 185 | 9249 0.940642 4 186 | 9299 0.939107 4 187 | 9349 0.941273 4 188 | 9399 0.939016 4 189 | 9449 0.939926 4 190 | 9499 0.938919 4 191 | 9549 0.939185 4 192 | 9599 0.938189 4 193 | 9649 0.935175 4 194 | 9699 0.938689 4 195 | 9749 0.938151 4 196 | 9799 0.937594 4 197 | 9849 0.934775 4 198 | 9899 0.93875 4 199 | 9949 0.93681 4 200 | 9999 0.940337 4 201 | 10049 0.935518 4 202 | 10099 0.936488 4 203 | 10149 0.934847 4 204 | 10199 0.934211 4 205 | 10249 0.932953 4 206 | 10299 0.936671 4 207 | 10349 0.930771 4 208 | 10399 0.937535 4 209 | 10449 0.933913 4 210 | 10499 0.937002 4 211 | 10549 0.934734 4 212 | 10599 0.9334 4 213 | 10649 0.936382 4 214 | 10699 0.936897 4 215 | 10749 0.934564 4 216 | 10799 0.936322 4 217 | 10849 0.932764 4 218 | 10899 0.934056 4 219 | 10949 0.931995 4 220 | 10999 0.928319 4 221 | 11049 0.933159 4 222 | 11099 0.931981 4 223 | 11149 0.928124 4 224 | 11199 0.932802 4 225 | 11249 0.930047 4 226 | 11299 0.929145 4 227 | 11349 0.930205 4 228 | 11399 0.929323 4 229 | 11449 0.931842 4 230 | 11499 0.930667 4 231 | 11549 0.930823 4 232 | 11599 0.929463 4 233 | 11649 0.932158 4 234 | 11699 0.930705 4 235 | 11749 0.929849 4 236 | 11799 0.930913 4 237 | 11849 0.935524 4 238 | 11899 0.928711 4 239 | 11949 0.926936 4 240 | 11999 0.925773 4 241 | 12049 0.926893 4 242 | 12099 0.930544 4 243 | 12149 0.930573 4 244 | 12199 0.925903 4 245 | 12249 0.926437 4 246 | 12299 0.930382 4 247 | 12349 0.928398 4 248 | 12399 0.924937 4 249 | 12449 0.929376 4 250 | 12499 0.926868 4 251 | 12549 0.926876 4 252 | 12599 0.928459 4 253 | 12649 0.92517 4 254 | 12699 0.928175 4 255 | 12749 0.925662 4 256 | 12799 0.925858 4 257 | 12849 0.925152 4 258 | 12899 0.926152 4 259 | 12949 0.924661 4 260 | 12999 0.921631 4 261 | 13049 0.921846 4 262 | 13099 0.925987 4 263 | 13149 0.926145 4 264 | 13199 0.927058 4 265 | 13249 0.92677 4 266 | 13299 0.92367 4 267 | 13349 0.923632 4 268 | 13399 0.92423 4 269 | 13449 0.922399 4 270 | 13499 0.923806 4 271 | 13549 0.921469 4 272 | 13599 0.920194 4 273 | 13649 0.919395 4 274 | 13699 0.924522 4 275 | 13749 0.919408 4 276 | 13799 0.920422 4 277 | 13849 0.923358 4 278 | 13899 0.920282 4 279 | 13949 0.922385 4 280 | 13999 0.919651 4 281 | 14049 0.923047 4 282 | 14099 0.919655 4 283 | 14149 0.918523 4 284 | 14199 0.921321 4 285 | 14249 0.918995 4 286 | 14299 0.920343 4 287 | 14349 0.919736 4 288 | 14399 0.919159 4 289 | 14449 0.92004 4 290 | 14499 0.919992 4 291 | 14549 0.921026 4 292 | 14599 0.914097 4 293 | 14649 0.918737 4 294 | 14699 0.919017 4 295 | 14749 0.917101 4 296 | 14799 0.915907 4 297 | 14849 0.917407 4 298 | 14899 0.918398 4 299 | 14949 0.916721 4 300 | 14999 0.919224 4 301 | 15049 0.915383 4 302 | 15099 0.916978 4 303 | 15149 0.91735 4 304 | 15199 0.918155 4 305 | 15249 0.916042 4 306 | 15299 0.91661 4 307 | 15349 0.916066 4 308 | 15399 0.915437 4 309 | 15449 0.914263 4 310 | 15499 0.913563 4 311 | 15549 0.915103 4 312 | 15599 0.914994 4 313 | 15649 0.913023 4 314 | 15699 0.912207 4 315 | 15749 0.914118 4 316 | 15799 0.913344 4 317 | 15849 0.912244 4 318 | 15899 0.913068 4 319 | 15949 0.91401 4 320 | 15999 0.913562 4 321 | 16049 0.913331 4 322 | 16099 0.913531 4 323 | 16149 0.911837 4 324 | 16199 0.912721 4 325 | 16249 0.913937 4 326 | 16299 0.913716 4 327 | 16349 0.913182 4 328 | 16399 0.913776 4 329 | 16449 0.911721 4 330 | 16499 0.910849 4 331 | 16549 0.910395 4 332 | 16599 0.912289 4 333 | 16649 0.907911 4 334 | 16699 0.911635 4 335 | 16749 0.90984 4 336 | 16799 0.905802 4 337 | 16849 0.908122 4 338 | 16899 0.909783 4 339 | 16949 0.909032 4 340 | 16999 0.910612 4 341 | 17049 0.907287 4 342 | 17099 0.909033 4 343 | 17149 0.909352 4 344 | 17199 0.907383 4 345 | 17249 0.908324 4 346 | 17299 0.911526 4 347 | 17349 0.907736 4 348 | 17399 0.90571 4 349 | 17449 0.903736 4 350 | 17499 0.907852 4 351 | 17549 0.906126 4 352 | 17599 0.908261 4 353 | 17649 0.907253 4 354 | 17699 0.903876 4 355 | 17749 0.902792 4 356 | 17799 0.903832 4 357 | 17849 0.906388 4 358 | 17899 0.908316 4 359 | 17949 0.909589 4 360 | 17999 0.904184 4 361 | 18049 0.905662 4 362 | 18099 0.908201 4 363 | 18149 0.905766 4 364 | 18199 0.90208 4 365 | 18249 0.905931 4 366 | 18299 0.90595 4 367 | 18349 0.906552 4 368 | 18399 0.901693 4 369 | 18449 0.905434 4 370 | 18499 0.902546 4 371 | 18549 0.903623 4 372 | 18599 0.904691 4 373 | 18649 0.905838 4 374 | 18699 0.902074 4 375 | 18749 0.902832 4 376 | 18799 0.902292 4 377 | 18849 0.901393 4 378 | 18899 0.901823 4 379 | 18949 0.901875 4 380 | 18999 0.900151 4 381 | 19049 0.901678 4 382 | 19099 0.905091 4 383 | 19149 0.901764 4 384 | 19199 0.902698 4 385 | 19249 0.903235 4 386 | 19299 0.90095 4 387 | 19349 0.897994 4 388 | 19399 0.900717 4 389 | 19449 0.898589 4 390 | 19499 0.89855 4 391 | 19549 0.900172 4 392 | 19599 0.899527 4 393 | 19649 0.899006 4 394 | 19699 0.896388 4 395 | 19749 0.90147 4 396 | 19799 0.899502 4 397 | 19849 0.899851 4 398 | 19899 0.898214 4 399 | 19949 0.898994 4 400 | 19999 0.900999 4 401 | 20049 0.899918 4 402 | 20099 0.898708 4 403 | 20149 0.898091 4 404 | 20199 0.897361 4 405 | 20249 0.899379 4 406 | 20299 0.897022 4 407 | 20349 0.898069 4 408 | 20399 0.895527 4 409 | 20449 0.896514 4 410 | 20499 0.897199 4 411 | 20549 0.897997 4 412 | 20599 0.89702 4 413 | 20649 0.893663 4 414 | 20699 0.89095 4 415 | 20749 0.898384 4 416 | 20799 0.896778 4 417 | 20849 0.897498 4 418 | 20899 0.895115 4 419 | 20949 0.895029 4 420 | 20999 0.89294 4 421 | 21049 0.895508 4 422 | 21099 0.892878 4 423 | 21149 0.89605 4 424 | 21199 0.897713 4 425 | 21249 0.893632 4 426 | 21299 0.892259 4 427 | 21349 0.892318 4 428 | 21399 0.894053 4 429 | 21449 0.894111 4 430 | 21499 0.892571 4 431 | 21549 0.892167 4 432 | 21599 0.890044 4 433 | 21649 0.894641 4 434 | 21699 0.891618 4 435 | 21749 0.891896 4 436 | 21799 0.890361 4 437 | 21849 0.885432 4 438 | 21899 0.893238 4 439 | 21949 0.892438 4 440 | 21999 0.889473 4 441 | 22049 0.892384 4 442 | 22099 0.89215 4 443 | 22149 0.891834 4 444 | 22199 0.891168 4 445 | 22249 0.888228 4 446 | 22299 0.887521 4 447 | 22349 0.886873 4 448 | 22399 0.888968 4 449 | 22449 0.889934 4 450 | 22499 0.890553 4 451 | 22549 0.888428 4 452 | 22599 0.888524 4 453 | 22649 0.88793 4 454 | 22699 0.887431 4 455 | 22749 0.887523 4 456 | 22799 0.887734 4 457 | 22849 0.891898 4 458 | 22899 0.89013 4 459 | 22949 0.885794 4 460 | 22999 0.884594 4 461 | 23049 0.88605 4 462 | 23099 0.888695 4 463 | 23149 0.887805 4 464 | 23199 0.88662 4 465 | 23249 0.887397 4 466 | 23299 0.883481 4 467 | 23349 0.887846 4 468 | 23399 0.884215 4 469 | 23449 0.880059 4 470 | 23499 0.885769 4 471 | 23549 0.884191 4 472 | 23599 0.885083 4 473 | 23649 0.884795 4 474 | 23699 0.882801 4 475 | 23749 0.886206 4 476 | 23799 0.882387 4 477 | 23849 0.885178 4 478 | 23899 0.883617 4 479 | 23949 0.886201 4 480 | 23999 0.882359 4 481 | 24049 0.880409 4 482 | 24099 0.881644 4 483 | 24149 0.881468 4 484 | 24199 0.883721 4 485 | 24249 0.877186 4 486 | 24299 0.885475 4 487 | 24349 0.881918 4 488 | 24399 0.883413 4 489 | 24449 0.88386 4 490 | 24499 0.883127 4 491 | 24549 0.882595 4 492 | 24599 0.881367 4 493 | 24649 0.878541 4 494 | 24699 0.88112 4 495 | 24749 0.880185 4 496 | 24799 0.880892 4 497 | 24849 0.878562 4 498 | 24899 0.88041 4 499 | 24949 0.879931 4 500 | 24999 0.879203 4 501 | 25049 0.880437 4 502 | 25099 0.878461 4 503 | 25149 0.882391 4 504 | 25199 0.880059 4 505 | 25249 0.879449 4 506 | 25299 0.881264 4 507 | 25349 0.882127 4 508 | 25399 0.877958 4 509 | 25449 0.878996 4 510 | 25499 0.878267 4 511 | 25549 0.878611 4 512 | 25599 0.879174 4 513 | 25649 0.875948 4 514 | 25699 0.880076 4 515 | 25749 0.875665 4 516 | 25799 0.878175 4 517 | 25849 0.877848 4 518 | 25899 0.874715 4 519 | 25949 0.877798 4 520 | 25999 0.880134 4 521 | 26049 0.877347 4 522 | 26099 0.87938 4 523 | 26149 0.872574 4 524 | 26199 0.874208 4 525 | 26249 0.875198 4 526 | 26299 0.875884 4 527 | 26349 0.877164 4 528 | 26399 0.876542 4 529 | 26449 0.875124 4 530 | 26499 0.876888 4 531 | 26549 0.876982 4 532 | 26599 0.874476 4 533 | 26649 0.873556 4 534 | 26699 0.874974 4 535 | 26749 0.873388 4 536 | 26799 0.87209 4 537 | 26849 0.874566 4 538 | 26899 0.87348 4 539 | 26949 0.873814 4 540 | 26999 0.876122 4 541 | 27049 0.872646 4 542 | 27099 0.872044 4 543 | 27149 0.87401 4 544 | 27199 0.874194 4 545 | 27249 0.871062 4 546 | 27299 0.871492 4 547 | 27349 0.873001 4 548 | 27399 0.869785 4 549 | 27449 0.873632 4 550 | 27499 0.86964 4 551 | 27549 0.870176 4 552 | 27599 0.870513 4 553 | 27649 0.869663 4 554 | 27699 0.871652 4 555 | 27749 0.874259 4 556 | 27799 0.871475 4 557 | 27849 0.870276 4 558 | 27899 0.868961 4 559 | 27949 0.871005 4 560 | 27999 0.870455 4 561 | 28049 0.871438 4 562 | 28099 0.870433 4 563 | 28149 0.872048 4 564 | 28199 0.869002 4 565 | 28249 0.869965 4 566 | 28299 0.872377 4 567 | 28349 0.867777 4 568 | 28399 0.869498 4 569 | 28449 0.870118 4 570 | 28499 0.868981 4 571 | 28549 0.871197 4 572 | 28599 0.869391 4 573 | 28649 0.865643 4 574 | 28699 0.866855 4 575 | 28749 0.870754 4 576 | 28799 0.867511 4 577 | 28849 0.870056 4 578 | 28899 0.868296 4 579 | 28949 0.86921 4 580 | 28999 0.870403 4 581 | 29049 0.870985 4 582 | 29099 0.869559 4 583 | 29149 0.865007 4 584 | 29199 0.869088 4 585 | 29249 0.867447 4 586 | 29299 0.868467 4 587 | 29349 0.867391 4 588 | 29399 0.86709 4 589 | 29449 0.863628 4 590 | 29499 0.86552 4 591 | 29549 0.871395 4 592 | 29599 0.866763 4 593 | 29649 0.86916 4 594 | 29699 0.865816 4 595 | 29749 0.868792 4 596 | 29799 0.866529 4 597 | 29849 0.864791 4 598 | 29899 0.864544 4 599 | 29949 0.867773 4 600 | 29999 0.862124 4 601 | 30049 0.866693 4 602 | 30099 0.862487 4 603 | 30149 0.862321 4 604 | 30199 0.867053 4 605 | 30249 0.863903 4 606 | 30299 0.866245 4 607 | 30349 0.864371 4 608 | 30399 0.862736 4 609 | 30449 0.86762 4 610 | 30499 0.865085 4 611 | 30549 0.865604 4 612 | 30599 0.863712 4 613 | 30649 0.865592 4 614 | 30699 0.863422 4 615 | 30749 0.863597 4 616 | 30799 0.863054 4 617 | 30849 0.862408 4 618 | 30899 0.861487 4 619 | 30949 0.862266 4 620 | 30999 0.862122 4 621 | 31049 0.864151 4 622 | 31099 0.865452 4 623 | 31149 0.863378 4 624 | 31199 0.863808 4 625 | 31249 0.860649 4 626 | 31299 0.859258 4 627 | 31349 0.862297 4 628 | 31399 0.864018 4 629 | 31449 0.86158 4 630 | 31499 0.864552 4 631 | 31549 0.860744 4 632 | 31599 0.860322 4 633 | 31649 0.861724 4 634 | 31699 0.866198 4 635 | 31749 0.862331 4 636 | 31799 0.864518 4 637 | 31849 0.861216 4 638 | 31899 0.863071 4 639 | 31949 0.861112 4 640 | 31999 0.862287 4 641 | 32049 0.866531 4 642 | 32099 0.860795 4 643 | 32149 0.86306 4 644 | 32199 0.862151 4 645 | 32249 0.860116 4 646 | 32299 0.859761 4 647 | 32349 0.857768 4 648 | 32399 0.861003 4 649 | 32449 0.860444 4 650 | 32499 0.860691 4 651 | 32549 0.862369 4 652 | 32599 0.858087 4 653 | 32649 0.857554 4 654 | 32699 0.859667 4 655 | 32749 0.862082 4 656 | 32799 0.855458 4 657 | 32849 0.856882 4 658 | 32899 0.860088 4 659 | 32949 0.859306 4 660 | 32999 0.855341 4 661 | 33049 0.857423 4 662 | 33099 0.860348 4 663 | 33149 0.861831 4 664 | 33199 0.854077 4 665 | 33249 0.859029 4 666 | 33299 0.858479 4 667 | 33349 0.859025 4 668 | 33399 0.857426 4 669 | 33449 0.859645 4 670 | 33499 0.860406 4 671 | 33549 0.857692 4 672 | 33599 0.860703 4 673 | 33649 0.856307 4 674 | 33699 0.858887 4 675 | 33749 0.857026 4 676 | 33799 0.857797 4 677 | 33849 0.858729 4 678 | 33899 0.858446 4 679 | 33949 0.85689 4 680 | 33999 0.85651 4 681 | 34049 0.856526 4 682 | 34099 0.857016 4 683 | 34149 0.858818 4 684 | 34199 0.855362 4 685 | 34249 0.855443 4 686 | 34299 0.856751 4 687 | 34349 0.855635 4 688 | 34399 0.856863 4 689 | 34449 0.856118 4 690 | 34499 0.857604 4 691 | 34549 0.85154 4 692 | 34599 0.85411 4 693 | 34649 0.852742 4 694 | 34699 0.85568 4 695 | 34749 0.855569 4 696 | 34799 0.854818 4 697 | 34849 0.855254 4 698 | 34899 0.855422 4 699 | 34949 0.851434 4 700 | 34999 0.852234 4 701 | 35049 0.851698 4 702 | 35099 0.853635 4 703 | 35149 0.853 4 704 | 35199 0.858035 4 705 | 35249 0.852916 4 706 | 35299 0.853656 4 707 | 35349 0.854309 4 708 | 35399 0.855429 4 709 | 35449 0.852847 4 710 | 35499 0.855753 4 711 | 35549 0.851348 4 712 | 35599 0.855242 4 713 | 35649 0.850189 4 714 | 35699 0.855683 4 715 | 35749 0.851223 4 716 | 35799 0.855222 4 717 | 35849 0.852551 4 718 | 35899 0.848829 4 719 | 35949 0.853149 4 720 | 35999 0.851386 4 721 | 36049 0.853559 4 722 | 36099 0.855996 4 723 | 36149 0.852952 4 724 | 36199 0.855375 4 725 | 36249 0.853323 4 726 | 36299 0.852521 4 727 | 36349 0.854882 4 728 | 36399 0.850882 4 729 | 36449 0.854897 4 730 | 36499 0.850557 4 731 | 36549 0.852704 4 732 | 36599 0.851563 4 733 | 36649 0.85116 4 734 | 36699 0.852597 4 735 | 36749 0.853552 4 736 | 36799 0.850553 4 737 | 36849 0.851638 4 738 | 36899 0.848811 4 739 | 36949 0.85078 4 740 | 36999 0.851204 4 741 | 37049 0.849053 4 742 | 37099 0.854639 4 743 | 37149 0.850306 4 744 | 37199 0.85071 4 745 | 37249 0.848908 4 746 | 37299 0.851593 4 747 | 37349 0.849579 4 748 | 37399 0.853112 4 749 | 37449 0.850954 4 750 | 37499 0.850365 4 751 | 37549 0.852705 4 752 | 37599 0.851126 4 753 | 37649 0.850089 4 754 | 37699 0.850423 4 755 | 37749 0.850226 4 756 | 37799 0.84916 4 757 | 37849 0.852856 4 758 | 37899 0.851821 4 759 | 37949 0.848992 4 760 | 37999 0.85128 4 761 | 38049 0.84848 4 762 | 38099 0.849207 4 763 | 38149 0.850269 4 764 | 38199 0.849333 4 765 | 38249 0.851892 4 766 | 38299 0.848998 4 767 | 38349 0.849777 4 768 | 38399 0.852048 4 769 | 38449 0.848905 4 770 | 38499 0.849761 4 771 | 38549 0.851136 4 772 | 38599 0.848043 4 773 | 38649 0.847769 4 774 | 38699 0.850263 4 775 | 38749 0.846356 4 776 | 38799 0.847089 4 777 | 38849 0.84796 4 778 | 38899 0.849944 4 779 | 38949 0.850304 4 780 | 38999 0.848932 4 781 | 39049 0.846816 4 782 | 39099 0.848317 4 783 | 39149 0.849227 4 784 | 39199 0.847595 4 785 | 39249 0.851603 4 786 | 39299 0.848003 4 787 | 39349 0.847851 4 788 | 39399 0.849557 4 789 | 39449 0.846705 4 790 | 39499 0.84829 4 791 | 39549 0.847243 4 792 | 39599 0.847487 4 793 | 39649 0.849555 4 794 | 39699 0.849973 4 795 | 39749 0.848278 4 796 | 39799 0.849594 4 797 | 39849 0.845253 4 798 | 39899 0.847111 4 799 | 39949 0.846908 4 800 | 39999 0.851142 4 801 | -------------------------------------------------------------------------------- /training_curves/training2-general.txt: -------------------------------------------------------------------------------- 1 | 49 1.38513 2 | 99 1.37528 3 | 149 1.33512 4 | 199 1.25762 5 | 249 1.16735 6 | 299 1.07165 7 | 349 0.994208 8 | 399 0.95273 9 | 449 0.936446 10 | 499 0.960717 11 | 549 0.921972 12 | 599 0.911676 13 | 649 0.900686 14 | 699 0.905429 15 | 749 0.899357 16 | 799 0.924917 17 | 849 0.908717 18 | 899 0.909096 19 | 949 0.887543 20 | 999 0.879603 21 | 1049 0.918762 22 | 1099 0.856277 23 | 1149 0.908135 24 | 1199 0.890991 25 | 1249 0.87535 26 | 1299 0.892996 27 | 1349 0.866373 28 | 1399 0.889494 29 | 1449 0.880717 30 | 1499 0.868678 31 | 1549 0.883313 32 | 1599 0.855934 33 | 1649 0.887485 34 | 1699 0.871329 35 | 1749 0.875141 36 | 1799 0.862728 37 | 1849 0.858299 38 | 1899 0.849594 39 | 1949 0.853421 40 | 1999 0.885646 41 | 2049 0.850287 42 | 2099 0.864585 43 | 2149 0.884409 44 | 2199 0.874041 45 | 2249 0.846469 46 | 2299 0.870553 47 | 2349 0.868317 48 | 2399 0.864352 49 | 2449 0.834737 50 | 2499 0.850379 51 | 2549 0.83779 52 | 2599 0.835566 53 | 2649 0.834436 54 | 2699 0.854565 55 | 2749 0.844536 56 | 2799 0.865961 57 | 2849 0.850697 58 | 2899 0.85013 59 | 2949 0.829143 60 | 2999 0.858399 61 | 3049 0.841826 62 | 3099 0.829566 63 | 3149 0.855829 64 | 3199 0.846938 65 | 3249 0.873013 66 | 3299 0.846899 67 | 3349 0.84027 68 | 3399 0.822361 69 | 3449 0.869917 70 | 3499 0.842444 71 | 3549 0.842579 72 | 3599 0.868073 73 | 3649 0.835183 74 | 3699 0.829693 75 | 3749 0.824583 76 | 3799 0.840703 77 | 3849 0.857606 78 | 3899 0.848911 79 | 3949 0.824536 80 | 3999 0.825107 81 | 4049 0.8495 82 | 4099 0.841194 83 | 4149 0.842334 84 | 4199 0.82809 85 | 4249 0.834769 86 | 4299 0.83737 87 | 4349 0.852477 88 | 4399 0.812917 89 | 4449 0.824662 90 | 4499 0.833757 91 | 4549 0.816334 92 | 4599 0.81898 93 | 4649 0.819986 94 | 4699 0.843085 95 | 4749 0.837972 96 | 4799 0.865111 97 | 4849 0.815464 98 | 4899 0.824231 99 | 4949 0.820475 100 | 4999 0.839607 101 | 5049 0.834492 102 | 5099 0.837292 103 | 5149 0.812685 104 | 5199 0.819562 105 | 5249 0.84126 106 | 5299 0.820082 107 | 5349 0.841551 108 | 5399 0.849766 109 | 5449 0.841048 110 | 5499 0.831438 111 | 5549 0.801029 112 | 5599 0.814717 113 | 5649 0.84197 114 | 5699 0.814171 115 | 5749 0.831036 116 | 5799 0.816262 117 | 5849 0.810958 118 | 5899 0.833127 119 | 5949 0.82233 120 | 5999 0.823048 121 | 6049 0.851889 122 | 6099 0.802841 123 | 6149 0.838314 124 | 6199 0.83011 125 | 6249 0.813801 126 | 6299 0.828774 127 | 6349 0.816649 128 | 6399 0.806215 129 | 6449 0.833758 130 | 6499 0.838115 131 | 6549 0.8564 132 | 6599 0.836864 133 | 6649 0.804206 134 | 6699 0.841353 135 | 6749 0.813264 136 | 6799 0.823497 137 | 6849 0.798834 138 | 6899 0.82149 139 | 6949 0.82403 140 | 6999 0.802572 141 | 7049 0.822698 142 | 7099 0.816431 143 | 7149 0.823064 144 | 7199 0.81899 145 | 7249 0.808564 146 | 7299 0.819484 147 | 7349 0.813669 148 | 7399 0.806081 149 | 7449 0.810129 150 | 7499 0.815453 151 | 7549 0.802688 152 | 7599 0.847094 153 | 7649 0.838457 154 | 7699 0.823351 155 | 7749 0.820649 156 | 7799 0.842683 157 | 7849 0.811264 158 | 7899 0.820161 159 | 7949 0.81904 160 | 7999 0.817147 161 | 8049 0.787276 162 | 8099 0.792079 163 | 8149 0.799446 164 | 8199 0.820175 165 | 8249 0.830528 166 | 8299 0.810387 167 | 8349 0.822498 168 | 8399 0.816015 169 | 8449 0.823917 170 | 8499 0.807183 171 | 8549 0.830647 172 | 8599 0.804859 173 | 8649 0.79746 174 | 8699 0.778804 175 | 8749 0.844746 176 | 8799 0.814774 177 | 8849 0.791373 178 | 8899 0.828998 179 | 8949 0.811612 180 | 8999 0.80136 181 | 9049 0.834637 182 | 9099 0.794168 183 | 9149 0.789863 184 | 9199 0.809185 185 | 9249 0.799575 186 | 9299 0.808759 187 | 9349 0.802295 188 | 9399 0.81223 189 | 9449 0.822414 190 | 9499 0.796359 191 | 9549 0.822869 192 | 9599 0.78747 193 | 9649 0.802122 194 | 9699 0.814875 195 | 9749 0.814815 196 | 9799 0.836194 197 | 9849 0.79488 198 | 9899 0.815514 199 | 9949 0.822001 200 | 9999 0.811157 201 | 10049 0.840228 202 | 10099 0.785333 203 | 10149 0.808032 204 | 10199 0.792347 205 | 10249 0.823129 206 | 10299 0.818503 207 | 10349 0.814691 208 | 10399 0.801073 209 | 10449 0.824689 210 | 10499 0.802277 211 | 10549 0.788081 212 | 10599 0.804533 213 | 10649 0.789133 214 | 10699 0.839023 215 | 10749 0.794209 216 | 10799 0.799311 217 | 10849 0.807936 218 | 10899 0.810935 219 | 10949 0.806887 220 | 10999 0.790891 221 | 11049 0.803733 222 | 11099 0.813371 223 | 11149 0.808017 224 | 11199 0.823214 225 | 11249 0.801661 226 | 11299 0.82436 227 | 11349 0.793325 228 | 11399 0.804244 229 | 11449 0.813276 230 | 11499 0.795751 231 | 11549 0.802105 232 | 11599 0.803117 233 | 11649 0.811501 234 | 11699 0.80095 235 | 11749 0.826251 236 | 11799 0.810115 237 | 11849 0.810201 238 | 11899 0.813276 239 | 11949 0.810093 240 | 11999 0.804151 241 | 12049 0.78663 242 | 12099 0.794654 243 | 12149 0.802196 244 | 12199 0.814382 245 | 12249 0.805696 246 | 12299 0.8017 247 | 12349 0.811524 248 | 12399 0.798272 249 | 12449 0.799481 250 | 12499 0.798863 251 | 12549 0.783225 252 | 12599 0.822423 253 | 12649 0.825641 254 | 12699 0.802274 255 | 12749 0.798619 256 | 12799 0.828737 257 | 12849 0.777073 258 | 12899 0.818028 259 | 12949 0.785357 260 | 12999 0.806204 261 | 13049 0.787324 262 | 13099 0.801437 263 | 13149 0.807979 264 | 13199 0.791013 265 | 13249 0.782523 266 | 13299 0.808135 267 | 13349 0.797089 268 | 13399 0.826701 269 | 13449 0.799845 270 | 13499 0.795075 271 | 13549 0.794529 272 | 13599 0.818275 273 | 13649 0.804663 274 | 13699 0.807801 275 | 13749 0.794326 276 | 13799 0.813936 277 | 13849 0.840246 278 | 13899 0.78704 279 | 13949 0.793388 280 | 13999 0.797293 281 | 14049 0.803278 282 | 14099 0.789659 283 | 14149 0.801851 284 | 14199 0.79852 285 | 14249 0.787914 286 | 14299 0.808392 287 | 14349 0.809265 288 | 14399 0.797731 289 | 14449 0.77143 290 | 14499 0.785298 291 | 14549 0.806825 292 | 14599 0.775801 293 | 14649 0.799722 294 | 14699 0.790197 295 | 14749 0.80685 296 | 14799 0.809495 297 | 14849 0.794908 298 | 14899 0.799395 299 | 14949 0.835249 300 | 14999 0.788065 301 | 15049 0.782378 302 | 15099 0.831272 303 | 15149 0.806921 304 | 15199 0.804342 305 | 15249 0.816676 306 | 15299 0.835232 307 | 15349 0.804872 308 | 15399 0.810488 309 | 15449 0.798076 310 | 15499 0.782188 311 | 15549 0.80424 312 | 15599 0.825444 313 | 15649 0.788384 314 | 15699 0.830101 315 | 15749 0.786279 316 | 15799 0.788156 317 | 15849 0.798651 318 | 15899 0.784131 319 | 15949 0.789967 320 | 15999 0.802947 321 | 16049 0.795567 322 | 16099 0.786235 323 | 16149 0.782527 324 | 16199 0.789232 325 | 16249 0.78923 326 | 16299 0.786868 327 | 16349 0.821722 328 | 16399 0.806866 329 | 16449 0.814588 330 | 16499 0.801715 331 | 16549 0.794748 332 | 16599 0.796349 333 | 16649 0.806235 334 | 16699 0.806672 335 | 16749 0.794499 336 | 16799 0.781861 337 | 16849 0.803492 338 | 16899 0.794849 339 | 16949 0.783515 340 | 16999 0.819635 341 | 17049 0.773892 342 | 17099 0.795603 343 | 17149 0.803758 344 | 17199 0.818101 345 | 17249 0.782007 346 | 17299 0.781124 347 | 17349 0.7873 348 | 17399 0.799237 349 | 17449 0.804531 350 | 17499 0.79165 351 | 17549 0.798493 352 | 17599 0.771394 353 | 17649 0.79177 354 | 17699 0.78091 355 | 17749 0.796384 356 | 17799 0.782858 357 | 17849 0.809141 358 | 17899 0.823676 359 | 17949 0.835524 360 | 17999 0.801323 361 | 18049 0.802997 362 | 18099 0.800254 363 | 18149 0.785905 364 | 18199 0.801298 365 | 18249 0.785951 366 | 18299 0.78149 367 | 18349 0.806645 368 | 18399 0.801958 369 | 18449 0.797062 370 | 18499 0.802946 371 | 18549 0.821054 372 | 18599 0.781474 373 | 18649 0.778937 374 | 18699 0.775159 375 | 18749 0.786587 376 | 18799 0.775981 377 | 18849 0.792203 378 | 18899 0.779387 379 | 18949 0.811545 380 | 18999 0.78963 381 | 19049 0.779782 382 | 19099 0.803772 383 | 19149 0.788266 384 | 19199 0.778777 385 | 19249 0.765948 386 | 19299 0.841508 387 | 19349 0.795566 388 | 19399 0.806203 389 | 19449 0.781253 390 | 19499 0.794105 391 | 19549 0.770099 392 | 19599 0.784847 393 | 19649 0.801683 394 | 19699 0.790629 395 | 19749 0.806479 396 | 19799 0.783166 397 | 19849 0.781442 398 | 19899 0.819284 399 | 19949 0.802248 400 | 19999 0.77371 401 | 20049 0.80722 402 | 20099 0.794765 403 | 20149 0.794007 404 | 20199 0.775243 405 | 20249 0.810247 406 | 20299 0.793177 407 | 20349 0.820855 408 | 20399 0.806366 409 | 20449 0.802929 410 | 20499 0.822852 411 | 20549 0.808431 412 | 20599 0.793282 413 | 20649 0.774981 414 | 20699 0.771857 415 | 20749 0.786245 416 | 20799 0.78141 417 | 20849 0.779949 418 | 20899 0.789573 419 | 20949 0.788686 420 | 20999 0.779242 421 | 21049 0.783728 422 | 21099 0.791785 423 | 21149 0.792663 424 | 21199 0.800926 425 | 21249 0.791483 426 | 21299 0.797523 427 | 21349 0.78223 428 | 21399 0.782484 429 | 21449 0.798446 430 | 21499 0.759114 431 | 21549 0.7885 432 | 21599 0.814726 433 | 21649 0.807865 434 | 21699 0.787168 435 | 21749 0.793424 436 | 21799 0.777021 437 | 21849 0.780382 438 | 21899 0.793717 439 | 21949 0.809842 440 | 21999 0.78701 441 | 22049 0.795522 442 | 22099 0.786308 443 | 22149 0.780908 444 | 22199 0.772883 445 | 22249 0.772821 446 | 22299 0.783261 447 | 22349 0.801205 448 | 22399 0.779422 449 | 22449 0.799781 450 | 22499 0.793361 451 | 22549 0.795841 452 | 22599 0.819804 453 | 22649 0.783813 454 | 22699 0.78158 455 | 22749 0.813185 456 | 22799 0.771383 457 | 22849 0.780094 458 | 22899 0.786461 459 | 22949 0.814262 460 | 22999 0.766121 461 | 23049 0.777603 462 | 23099 0.777809 463 | 23149 0.758861 464 | 23199 0.794766 465 | 23249 0.767829 466 | 23299 0.798576 467 | 23349 0.785503 468 | 23399 0.778993 469 | 23449 0.800862 470 | 23499 0.794779 471 | 23549 0.785612 472 | 23599 0.775396 473 | 23649 0.813888 474 | 23699 0.775193 475 | 23749 0.784006 476 | 23799 0.785655 477 | 23849 0.765779 478 | 23899 0.768949 479 | 23949 0.780939 480 | 23999 0.774694 481 | 24049 0.769935 482 | 24099 0.786329 483 | 24149 0.779988 484 | 24199 0.787972 485 | 24249 0.783812 486 | 24299 0.788319 487 | 24349 0.78215 488 | 24399 0.802582 489 | 24449 0.802573 490 | 24499 0.776876 491 | 24549 0.762334 492 | 24599 0.775208 493 | 24649 0.8043 494 | 24699 0.780916 495 | 24749 0.807061 496 | 24799 0.784529 497 | 24849 0.767322 498 | 24899 0.786289 499 | 24949 0.767692 500 | 24999 0.789268 501 | 25049 0.78674 502 | 25099 0.784345 503 | 25149 0.785224 504 | 25199 0.778355 505 | 25249 0.769435 506 | 25299 0.755311 507 | 25349 0.805982 508 | 25399 0.778282 509 | 25449 0.783122 510 | 25499 0.771949 511 | 25549 0.783912 512 | 25599 0.770955 513 | 25649 0.770656 514 | 25699 0.770628 515 | 25749 0.762932 516 | 25799 0.775623 517 | 25849 0.781829 518 | 25899 0.767264 519 | 25949 0.808911 520 | 25999 0.799261 521 | 26049 0.771685 522 | 26099 0.797744 523 | 26149 0.770227 524 | 26199 0.789925 525 | 26249 0.78237 526 | 26299 0.789647 527 | 26349 0.802248 528 | 26399 0.787288 529 | 26449 0.77677 530 | 26499 0.769188 531 | 26549 0.787573 532 | 26599 0.78049 533 | 26649 0.806521 534 | 26699 0.792452 535 | 26749 0.800577 536 | 26799 0.777643 537 | 26849 0.786925 538 | 26899 0.787054 539 | 26949 0.772808 540 | 26999 0.778219 541 | 27049 0.763398 542 | 27099 0.817491 543 | 27149 0.758346 544 | 27199 0.774931 545 | 27249 0.775775 546 | 27299 0.775995 547 | 27349 0.780477 548 | 27399 0.796675 549 | 27449 0.772262 550 | 27499 0.78804 551 | 27549 0.768988 552 | 27599 0.796884 553 | 27649 0.783014 554 | 27699 0.791578 555 | 27749 0.796101 556 | 27799 0.787413 557 | 27849 0.774256 558 | 27899 0.764632 559 | 27949 0.764969 560 | 27999 0.786098 561 | 28049 0.776338 562 | 28099 0.771294 563 | 28149 0.802905 564 | 28199 0.781491 565 | 28249 0.782154 566 | 28299 0.787705 567 | 28349 0.751954 568 | 28399 0.77001 569 | 28449 0.767522 570 | 28499 0.768202 571 | 28549 0.809173 572 | 28599 0.787972 573 | 28649 0.77137 574 | 28699 0.760264 575 | 28749 0.763217 576 | 28799 0.769785 577 | 28849 0.779894 578 | 28899 0.767586 579 | 28949 0.776618 580 | 28999 0.764205 581 | 29049 0.791362 582 | 29099 0.771692 583 | 29149 0.761037 584 | 29199 0.776507 585 | 29249 0.773662 586 | 29299 0.769075 587 | 29349 0.782609 588 | 29399 0.784187 589 | 29449 0.761551 590 | 29499 0.772612 591 | 29549 0.794811 592 | 29599 0.783047 593 | 29649 0.803686 594 | 29699 0.790019 595 | 29749 0.76635 596 | 29799 0.788231 597 | 29849 0.770896 598 | 29899 0.782765 599 | 29949 0.761288 600 | 29999 0.787018 601 | 30049 0.775304 602 | 30099 0.78153 603 | 30149 0.776774 604 | 30199 0.775697 605 | 30249 0.757232 606 | 30299 0.761247 607 | 30349 0.763001 608 | 30399 0.758738 609 | 30449 0.778736 610 | 30499 0.776499 611 | 30549 0.798269 612 | 30599 0.789894 613 | 30649 0.780332 614 | 30699 0.772474 615 | 30749 0.791727 616 | 30799 0.778599 617 | 30849 0.760758 618 | 30899 0.756643 619 | 30949 0.775982 620 | 30999 0.763415 621 | 31049 0.767171 622 | 31099 0.780273 623 | 31149 0.767582 624 | 31199 0.770402 625 | 31249 0.778542 626 | 31299 0.817584 627 | 31349 0.784682 628 | 31399 0.813395 629 | 31449 0.766372 630 | 31499 0.777408 631 | 31549 0.772344 632 | 31599 0.805519 633 | 31649 0.761885 634 | 31699 0.782212 635 | 31749 0.764642 636 | 31799 0.759831 637 | 31849 0.785138 638 | 31899 0.772018 639 | 31949 0.776133 640 | 31999 0.772269 641 | 32049 0.770375 642 | 32099 0.792441 643 | 32149 0.804193 644 | 32199 0.765265 645 | 32249 0.753055 646 | 32299 0.769168 647 | 32349 0.775328 648 | 32399 0.749415 649 | 32449 0.77535 650 | 32499 0.776997 651 | 32549 0.797572 652 | 32599 0.759282 653 | 32649 0.766656 654 | 32699 0.782109 655 | 32749 0.768068 656 | 32799 0.771113 657 | 32849 0.756356 658 | 32899 0.752835 659 | 32949 0.794813 660 | 32999 0.770545 661 | 33049 0.774648 662 | 33099 0.785318 663 | 33149 0.760563 664 | 33199 0.790122 665 | 33249 0.75495 666 | 33299 0.788032 667 | 33349 0.767068 668 | 33399 0.762271 669 | 33449 0.765475 670 | 33499 0.788248 671 | 33549 0.776704 672 | 33599 0.76791 673 | 33649 0.754024 674 | 33699 0.779509 675 | 33749 0.76835 676 | 33799 0.773396 677 | 33849 0.767741 678 | 33899 0.770962 679 | 33949 0.7721 680 | 33999 0.767875 681 | 34049 0.771684 682 | 34099 0.768708 683 | 34149 0.76305 684 | 34199 0.778902 685 | 34249 0.759473 686 | 34299 0.766245 687 | 34349 0.775715 688 | 34399 0.780248 689 | 34449 0.761423 690 | 34499 0.768758 691 | 34549 0.784023 692 | 34599 0.777973 693 | 34649 0.773792 694 | 34699 0.767409 695 | 34749 0.762176 696 | 34799 0.767425 697 | 34849 0.772935 698 | 34899 0.777396 699 | 34949 0.774194 700 | 34999 0.775891 701 | 35049 0.774356 702 | 35099 0.762697 703 | 35149 0.76578 704 | 35199 0.7738 705 | 35249 0.7677 706 | 35299 0.783786 707 | 35349 0.790587 708 | 35399 0.737662 709 | 35449 0.787597 710 | 35499 0.760119 711 | 35549 0.764978 712 | 35599 0.739124 713 | 35649 0.763843 714 | 35699 0.775551 715 | 35749 0.77042 716 | 35799 0.756887 717 | 35849 0.766181 718 | 35899 0.782253 719 | 35949 0.780654 720 | 35999 0.772794 721 | 36049 0.765135 722 | 36099 0.777001 723 | 36149 0.765499 724 | 36199 0.758403 725 | 36249 0.762801 726 | 36299 0.74867 727 | 36349 0.761376 728 | 36399 0.779474 729 | 36449 0.762721 730 | 36499 0.75013 731 | 36549 0.780121 732 | 36599 0.775632 733 | 36649 0.766077 734 | 36699 0.791851 735 | 36749 0.771552 736 | 36799 0.762366 737 | 36849 0.752703 738 | 36899 0.765456 739 | 36949 0.763145 740 | 36999 0.753275 741 | 37049 0.758312 742 | 37099 0.768938 743 | 37149 0.763612 744 | 37199 0.753743 745 | 37249 0.780128 746 | 37299 0.765619 747 | 37349 0.78177 748 | 37399 0.753345 749 | 37449 0.75226 750 | 37499 0.778939 751 | 37549 0.80075 752 | 37599 0.78233 753 | 37649 0.77018 754 | 37699 0.769103 755 | 37749 0.747658 756 | 37799 0.760564 757 | 37849 0.762068 758 | 37899 0.765045 759 | 37949 0.782736 760 | 37999 0.756872 761 | 38049 0.752967 762 | 38099 0.770531 763 | 38149 0.764827 764 | 38199 0.754699 765 | 38249 0.779107 766 | 38299 0.773709 767 | 38349 0.760108 768 | 38399 0.762539 769 | 38449 0.755685 770 | 38499 0.74422 771 | 38549 0.759347 772 | 38599 0.759555 773 | 38649 0.756058 774 | 38699 0.754437 775 | 38749 0.766287 776 | 38799 0.766063 777 | 38849 0.755528 778 | 38899 0.757085 779 | 38949 0.756923 780 | 38999 0.763326 781 | 39049 0.765631 782 | 39099 0.770867 783 | 39149 0.778116 784 | 39199 0.770357 785 | 39249 0.748653 786 | 39299 0.742431 787 | 39349 0.753174 788 | 39399 0.765995 789 | 39449 0.750923 790 | 39499 0.769724 791 | 39549 0.768088 792 | 39599 0.767549 793 | 39649 0.752759 794 | 39699 0.755369 795 | 39749 0.73528 796 | 39799 0.740343 797 | 39849 0.741726 798 | 39899 0.750175 799 | 39949 0.734811 800 | 39999 0.753203 801 | --------------------------------------------------------------------------------