├── (Keras) Cornell Movie Review Dataset.ipynb ├── (Keras) Cornell Movie Review Dataset.py ├── (Keras) IMDB Dataset.ipynb ├── (Keras) IMDB Dataset.py ├── (PyTorch) Cornell Movie Review Dataset.ipynb ├── (PyTorch) Cornell Movie Review Dataset.py ├── (PyTorch) IMDB Dataset.ipynb ├── (PyTorch) IMDB Dataset.py ├── .gitignore ├── Dockerfile ├── README.md ├── data ├── scaledata.README.1.0.txt └── scaledata │ ├── Dennis+Schwartz │ ├── id.Dennis+Schwartz │ ├── label.3class.Dennis+Schwartz │ ├── label.4class.Dennis+Schwartz │ ├── rating.Dennis+Schwartz │ └── subj.Dennis+Schwartz │ ├── James+Berardinelli │ ├── id.James+Berardinelli │ ├── label.3class.James+Berardinelli │ ├── label.4class.James+Berardinelli │ ├── rating.James+Berardinelli │ └── subj.James+Berardinelli │ ├── Scott+Renshaw │ ├── id.Scott+Renshaw │ ├── label.3class.Scott+Renshaw │ ├── label.4class.Scott+Renshaw │ ├── rating.Scott+Renshaw │ └── subj.Scott+Renshaw │ └── Steve+Rhodes │ ├── id.Steve+Rhodes │ ├── label.3class.Steve+Rhodes │ ├── label.4class.Steve+Rhodes │ ├── rating.Steve+Rhodes │ └── subj.Steve+Rhodes ├── embed_regularize.py ├── jupyter_notebook_config.py ├── locked_dropout.py ├── weight_drop.py ├── yaringal_callbacks.py └── yaringal_dataset.py /(Keras) Cornell Movie Review Dataset.py: -------------------------------------------------------------------------------- 1 | 2 | # coding: utf-8 3 | 4 | # # (Keras) Cornell Movie Review Dataset 5 | 6 | # Based on https://github.com/yaringal/BayesianRNN/blob/master/Example/sentiment_lstm_regression.py 7 | 8 | # In[1]: 9 | 10 | 11 | import numpy as np 12 | from tensorflow.contrib.keras.python.keras.optimizers import SGD, RMSprop, Adagrad 13 | from tensorflow.contrib.keras.python.keras.models import Sequential 14 | from tensorflow.contrib.keras.python.keras.layers.core import Dense, Dropout 15 | from tensorflow.contrib.keras.python.keras.layers.embeddings import Embedding 16 | from tensorflow.contrib.keras.python.keras.layers.recurrent import LSTM, GRU, SimpleRNN 17 | from tensorflow.contrib.keras.python.keras.regularizers import l2 18 | from tensorflow.contrib.keras.python.keras.optimizers import Adam 19 | from sklearn.metrics import mean_squared_error 20 | import matplotlib.pyplot as plt 21 | 22 | from yaringal_callbacks import ModelTest 23 | from yaringal_dataset import loader 24 | 25 | get_ipython().magic('matplotlib inline') 26 | plt.style.use('fivethirtyeight') 27 | plt.rcParams["figure.figsize"] = (8, 5) 28 | 29 | # Global params: 30 | NB_WORDS = 20000 31 | SKIP_TOP = 0 32 | TEST_SPLIT = 0.2 33 | INIT_SEED = 2017 34 | GLOBAL_SEED = 2018 35 | MAXLEN = 200 36 | BATCH_SIZE = 128 37 | TEST_BATCH_SIZE = 512 38 | 39 | 40 | # In[2]: 41 | 42 | 43 | dataset = loader(INIT_SEED, MAXLEN, NB_WORDS, SKIP_TOP, TEST_SPLIT) 44 | 45 | X_train, X_test, Y_train, Y_test = dataset.X_train, dataset.X_test, dataset.Y_train, dataset.Y_test 46 | mean_y_train, std_y_train = dataset.mean_y_train, dataset.std_y_train 47 | 48 | 49 | # In[3]: 50 | 51 | 52 | def get_model(idrop=0.2, edrop=0.1, odrop=0.25, rdrop=0.2, weight_decay=1e-4, lr=1e-3): 53 | model = Sequential() 54 | model.add(Embedding(NB_WORDS, 128, embeddings_regularizer=l2(weight_decay), 55 | input_length=MAXLEN)) 56 | if edrop: 57 | model.add(Dropout(edrop)) 58 | model.add(LSTM(128, kernel_regularizer=l2(weight_decay), recurrent_regularizer=l2(weight_decay), 59 | bias_regularizer=l2(weight_decay), dropout=idrop, recurrent_dropout=rdrop)) 60 | if odrop: 61 | model.add(Dropout(odrop)) 62 | model.add(Dense(1, kernel_regularizer=l2(weight_decay), 63 | bias_regularizer=l2(weight_decay))) 64 | optimizer = Adam(lr) 65 | model.compile(loss='mse', metrics=["mse"], optimizer=optimizer) 66 | return model 67 | 68 | 69 | # ## Normal Variational LSTM (w/o Embedding Dropout) 70 | 71 | # In[4]: 72 | 73 | 74 | print('Build model...') 75 | model = get_model(rdrop=0.25, odrop=0.25, edrop=0, idrop=0.25, weight_decay=1e-4, lr=1e-3) 76 | 77 | 78 | # In[5]: 79 | 80 | 81 | modeltest_1 = ModelTest(X_test, Yt=Y_test, 82 | test_every_X_epochs=2, verbose=0, T=10, 83 | mean_y_train=mean_y_train, std_y_train=std_y_train, 84 | loss='euclidean', batch_size=TEST_BATCH_SIZE) 85 | 86 | 87 | # In[6]: 88 | 89 | 90 | history_1 = model.fit( 91 | X_train, Y_train, 92 | verbose=2, 93 | shuffle=True, 94 | # validation_data=[X_test, Y_test], 95 | batch_size=BATCH_SIZE, epochs=200, callbacks=[modeltest_1]) 96 | 97 | 98 | # In[24]: 99 | 100 | 101 | print("Best RMSE: {:.4f} Best Epoch: {}".format( 102 | np.min([x[1] ** 0.5 for x in modeltest_1.history]), 103 | (np.argmin([x[1] ** 0.5 for x in modeltest_1.history]) + 1)*2 104 | )) 105 | 106 | 107 | # ## Standard LSTM w/o Dropout 108 | 109 | # In[10]: 110 | 111 | 112 | print('Build model...') 113 | model = get_model(edrop=0, rdrop=0, odrop=0, idrop=0, weight_decay=1e-10, lr=1e-3) 114 | 115 | 116 | # In[11]: 117 | 118 | 119 | modeltest_2 = ModelTest(X_test, Yt=Y_test, 120 | test_every_X_epochs=2, verbose=0, T=1, 121 | mean_y_train=mean_y_train, std_y_train=std_y_train, 122 | loss='euclidean', batch_size=TEST_BATCH_SIZE) 123 | 124 | 125 | # In[12]: 126 | 127 | 128 | history_2 = model.fit( 129 | X_train, Y_train, 130 | verbose=2, 131 | shuffle=True, 132 | # validation_data=[X_test, Y_test], 133 | batch_size=BATCH_SIZE, epochs=200, callbacks=[modeltest_2] 134 | ) 135 | 136 | 137 | # In[25]: 138 | 139 | 140 | print("Best RMSE: {:.4f} Best Epoch: {}".format( 141 | np.min([x[1] ** 0.5 for x in modeltest_2.history]), 142 | (np.argmin([x[1] ** 0.5 for x in modeltest_2.history]) + 1)*2 143 | )) 144 | 145 | 146 | # ## LSTM with Standard Dropout (different mask at differnt time steps) 147 | 148 | # In[13]: 149 | 150 | 151 | print('Build model...') 152 | model = get_model(edrop=0.3, rdrop=0, odrop=0.3, idrop=0, weight_decay=1e-4, lr=1e-3) 153 | 154 | 155 | # In[14]: 156 | 157 | 158 | modeltest_3 = ModelTest(X_test, Yt=Y_test, 159 | test_every_X_epochs=2, verbose=0, T=10, 160 | mean_y_train=mean_y_train, std_y_train=std_y_train, 161 | loss='euclidean', batch_size=TEST_BATCH_SIZE) 162 | 163 | 164 | # In[15]: 165 | 166 | 167 | history_3 = model.fit( 168 | X_train, Y_train, 169 | verbose=2, 170 | shuffle=True, 171 | # validation_data=[X_test, Y_test], 172 | batch_size=BATCH_SIZE, epochs=200, callbacks=[modeltest_3]) 173 | 174 | 175 | # In[26]: 176 | 177 | 178 | print("Best RMSE: {:.4f} Best Epoch: {}".format( 179 | np.min([x[1] ** 0.5 for x in modeltest_3.history]), 180 | (np.argmin([x[1] ** 0.5 for x in modeltest_3.history]) + 1)*2 181 | )) 182 | 183 | 184 | # ## Visualizations 185 | 186 | # In[40]: 187 | 188 | 189 | plt.figure(figsize=(12, 4)) 190 | plt.subplot(1, 2, 1) 191 | plt.title("Raw MSE Comparison - Training Set") 192 | plt.plot(np.arange(len(history_1.history["mean_squared_error"])), 193 | history_1.history["mean_squared_error"], label="variational") 194 | plt.plot(np.arange(len(history_2.history["mean_squared_error"])), 195 | history_2.history["mean_squared_error"], "g-", label="no dropout") 196 | plt.plot(np.arange(len(history_3.history["mean_squared_error"])), 197 | history_3.history["mean_squared_error"], "y-", label="naive dropout") 198 | plt.legend(loc='best') 199 | plt.xlabel("epochs") 200 | plt.ylabel("Raw MSE") 201 | plt.subplot(1, 2, 2) 202 | plt.title("(MC - Approx) Histogram") 203 | plt.hist([x[1] ** 0.5 - x[0] ** 0.5 for x in modeltest_1.history], alpha=0.5, label="varational") 204 | plt.hist([x[1] ** 0.5 - x[0] ** 0.5 for x in modeltest_3.history], alpha=0.5, label="navie dropout") 205 | plt.legend(loc='best') 206 | plt.xlabel("Difference in Raw MSE") 207 | plt.ylabel("Count") 208 | plt.xticks(fontsize=8, rotation=0) 209 | 210 | 211 | # In[39]: 212 | 213 | 214 | plt.title("RMSE Comparison - Validation Set") 215 | plt.plot(np.arange(len(modeltest_1.history)), [x[1] ** 0.5 for x in modeltest_1.history], "b-", label="variational(mc)") 216 | plt.plot(np.arange(len(modeltest_2.history)), [x[0] ** 0.5 for x in modeltest_2.history], "g-", label="no dropout") 217 | plt.plot(np.arange(len(modeltest_3.history)), [x[1] ** 0.5 for x in modeltest_3.history], "y-", label="naive dropout(mc)") 218 | plt.legend(loc='best') 219 | plt.xlabel("epochs") 220 | plt.ylabel("RMSE") 221 | 222 | -------------------------------------------------------------------------------- /(Keras) IMDB Dataset.py: -------------------------------------------------------------------------------- 1 | 2 | # coding: utf-8 3 | 4 | # # (Keras) IMDB Dataset 5 | 6 | # In[1]: 7 | 8 | 9 | import numpy as np 10 | from tensorflow.contrib.keras.python.keras.optimizers import SGD, RMSprop, Adagrad 11 | from tensorflow.contrib.keras.python.keras.models import Sequential 12 | from tensorflow.contrib.keras.python.keras.layers.core import Dense, Dropout 13 | from tensorflow.contrib.keras.python.keras.layers.embeddings import Embedding 14 | from tensorflow.contrib.keras.python.keras.layers.recurrent import LSTM, GRU, SimpleRNN 15 | from tensorflow.contrib.keras.python.keras.regularizers import l2 16 | from tensorflow.contrib.keras.python.keras.optimizers import Adam 17 | from tensorflow.contrib.keras.python.keras.preprocessing import sequence 18 | from tensorflow.contrib.keras.python.keras.datasets import imdb 19 | from sklearn.metrics import mean_squared_error 20 | import matplotlib.pyplot as plt 21 | import matplotlib.ticker as ticker 22 | 23 | from yaringal_callbacks import ModelTest 24 | from yaringal_dataset import loader 25 | 26 | get_ipython().magic('matplotlib inline') 27 | plt.style.use('fivethirtyeight') 28 | plt.rcParams["figure.figsize"] = (8, 5) 29 | 30 | # Global params: 31 | NB_WORDS = 20000 32 | SKIP_TOP = 0 33 | TEST_SPLIT = 0.2 34 | INIT_SEED = 2017 35 | GLOBAL_SEED = 2018 36 | MAXLEN = 80 37 | BATCH_SIZE = 128 38 | TEST_BATCH_SIZE = 512 39 | WEIGHT_DECAY = 1e-4 40 | 41 | 42 | # In[2]: 43 | 44 | 45 | np.random.seed(100) 46 | 47 | 48 | # In[3]: 49 | 50 | 51 | (X_train, Y_train), (X_test, Y_test) = imdb.load_data(num_words=NB_WORDS) 52 | print(len(X_train), 'train sequences') 53 | print(len(X_test), 'test sequences') 54 | print('Pad sequences (samples x time)') 55 | X_train = sequence.pad_sequences(X_train, maxlen=MAXLEN) 56 | X_test = sequence.pad_sequences(X_test, maxlen=MAXLEN) 57 | print('x_train shape:', X_train.shape) 58 | print('x_test shape:', X_test.shape) 59 | 60 | 61 | # In[4]: 62 | 63 | 64 | def get_model(idrop=0.2, edrop=0.1, odrop=0.25, rdrop=0.2, weight_decay=WEIGHT_DECAY): 65 | model = Sequential() 66 | model.add(Embedding(NB_WORDS, 128, embeddings_regularizer=l2(weight_decay), 67 | input_length=MAXLEN)) # , batch_input_shape=(batch_size, maxlen))) 68 | if edrop: 69 | model.add(Dropout(edrop)) 70 | model.add(LSTM(128, kernel_regularizer=l2(weight_decay), recurrent_regularizer=l2(weight_decay), 71 | bias_regularizer=l2(weight_decay), dropout=idrop, recurrent_dropout=rdrop)) 72 | if odrop: 73 | model.add(Dropout(odrop)) 74 | model.add(Dense(1, kernel_regularizer=l2(weight_decay), 75 | bias_regularizer=l2(weight_decay), activation='sigmoid')) 76 | optimizer = Adam(1e-3) 77 | model.compile(loss='binary_crossentropy', metrics=["binary_accuracy"], optimizer=optimizer) 78 | return model 79 | 80 | 81 | # ## Normal Variational LSTM (w/o Embedding Dropout) 82 | # All models in this notebook do not have embedding dropout as Keras does not have such layer. 83 | 84 | # In[5]: 85 | 86 | 87 | print('Build model...') 88 | model = get_model(idrop=0.25, edrop=0, odrop=0.25, rdrop=0.25, weight_decay=1e-4) 89 | 90 | 91 | # In[6]: 92 | 93 | 94 | modeltest_1 = ModelTest(X_test, Yt=Y_test, 95 | test_every_X_epochs=1, verbose=0, 96 | loss='binary', batch_size=TEST_BATCH_SIZE) 97 | 98 | 99 | # In[7]: 100 | 101 | 102 | history_1 = model.fit( 103 | X_train, Y_train, 104 | verbose=2, 105 | shuffle=True, 106 | # validation_data=[X_test, Y_test], 107 | batch_size=BATCH_SIZE, epochs=20, callbacks=[modeltest_1]) 108 | 109 | 110 | # In[11]: 111 | 112 | 113 | best_epoch = np.argmin([x[1] for x in modeltest_1.history[:18]]) + 1 114 | print("Best Loss: {:.4f} Acc: {:.2f}% Best Epoch: {}".format( 115 | modeltest_1.history[best_epoch-1][1], 116 | modeltest_1.history[best_epoch-1][3] * 100, 117 | best_epoch 118 | )) 119 | 120 | 121 | # In[12]: 122 | 123 | 124 | plt.title("Log Loss Comparison") 125 | plt.plot(np.arange(len(modeltest_1.history)), [x[0] for x in modeltest_1.history], label="std") 126 | plt.plot(np.arange(len(modeltest_1.history)), [x[1] for x in modeltest_1.history], "g-", label="mc") 127 | plt.legend(loc='best') 128 | 129 | 130 | # In[13]: 131 | 132 | 133 | plt.title("Accuracy Comparison") 134 | plt.plot(np.arange(0, len(modeltest_1.history)), [x[2] for x in modeltest_1.history], label="std") 135 | plt.plot(np.arange(0, len(modeltest_1.history)), [x[3] for x in modeltest_1.history], "g-", label="mc") 136 | plt.legend(loc='best') 137 | 138 | 139 | # ## Standard LSTM 140 | # I choose to keep a very low weight decay because assigning zero seems to cause some problems. 141 | 142 | # In[14]: 143 | 144 | 145 | print('Build model...') 146 | model = get_model(edrop=0, rdrop=0, odrop=0, idrop=0, weight_decay=1e-10) 147 | 148 | 149 | # In[15]: 150 | 151 | 152 | modeltest_2 = ModelTest(X_test, Yt=Y_test, 153 | test_every_X_epochs=1, verbose=0, T=1, 154 | loss='binary', batch_size=TEST_BATCH_SIZE) 155 | 156 | 157 | # In[17]: 158 | 159 | 160 | history_2 = model.fit( 161 | X_train, Y_train, 162 | verbose=2, 163 | shuffle=True, 164 | # validation_data=[X_test, Y_test], 165 | batch_size=BATCH_SIZE, epochs=20, callbacks=[modeltest_2]) 166 | 167 | 168 | # In[25]: 169 | 170 | 171 | best_epoch = np.argmin([x[1] for x in modeltest_2.history]) + 1 172 | print("Best Loss: {:.4f} Acc: {:.2f}% Best Epoch: {}".format( 173 | modeltest_2.history[best_epoch-1][1], 174 | modeltest_2.history[best_epoch-1][3] * 100, 175 | best_epoch 176 | )) 177 | 178 | 179 | # ## LSTM with Standard Dropout (different mask at differnt time steps) 180 | 181 | # In[20]: 182 | 183 | 184 | print('Build model...') 185 | model = get_model(edrop=0.25, rdrop=0, odrop=0.25, idrop=0, weight_decay=1e-4) 186 | 187 | 188 | # In[21]: 189 | 190 | 191 | modeltest_3 = ModelTest(X_test, Yt=Y_test, 192 | test_every_X_epochs=1, verbose=0, T=10, 193 | loss='binary', batch_size=TEST_BATCH_SIZE) 194 | 195 | 196 | # In[22]: 197 | 198 | 199 | history_3 =model.fit( 200 | X_train, Y_train, 201 | verbose=2, 202 | shuffle=True, 203 | # validation_data=[X_test, Y_test], 204 | batch_size=BATCH_SIZE, epochs=20, callbacks=[modeltest_3]) 205 | 206 | 207 | # In[24]: 208 | 209 | 210 | best_epoch = np.argmin([x[1] for x in modeltest_3.history[:19]]) + 1 211 | print("Best Loss: {:.4f} Acc: {:.2f}% Best Epoch: {}".format( 212 | modeltest_3.history[best_epoch-1][1], 213 | modeltest_3.history[best_epoch-1][3] * 100, 214 | best_epoch 215 | )) 216 | 217 | 218 | # ## Visualizations 219 | 220 | # In[40]: 221 | 222 | 223 | bins = np.arange(-0.1, 0.035, 0.01) 224 | 225 | 226 | # In[53]: 227 | 228 | 229 | len(history_2.history["binary_accuracy"]) 230 | 231 | 232 | # In[54]: 233 | 234 | 235 | plt.figure(figsize=(12, 4)) 236 | plt.subplot(1, 2, 1) 237 | plt.title("Accuracy Comparison - Training Set") 238 | plt.plot(np.arange(len(history_2.history["binary_accuracy"])), 239 | np.array(history_1.history["binary_accuracy"][:20]) * 100, label="variational") 240 | plt.plot(np.arange(len(history_2.history["binary_accuracy"])), 241 | np.array(history_2.history["binary_accuracy"]) * 100, "g-", label="no dropout") 242 | plt.plot(np.arange(len(history_3.history["binary_accuracy"])), 243 | np.array(history_3.history["binary_accuracy"]) * 100, "y-", label="naive dropout") 244 | plt.legend(loc='best') 245 | plt.xlabel("epochs") 246 | plt.ylabel("Accuracy") 247 | plt.subplot(1, 2, 2) 248 | plt.title("(MC - Approx) Histogram") 249 | plt.hist([x[1] - x[0] for x in modeltest_1.history[:17]], bins=bins, alpha=0.5, label="varational") 250 | plt.hist([x[1] - x[0] for x in modeltest_3.history[:17]], bins=bins, alpha=0.5, label="navie dropout") 251 | plt.legend(loc='best') 252 | plt.xlabel("Difference in Loss") 253 | plt.ylabel("Count") 254 | plt.xticks(fontsize=8, rotation=0) 255 | 256 | 257 | # In[60]: 258 | 259 | 260 | plt.figure(figsize=(12, 4)) 261 | plt.subplot(1, 2, 1) 262 | plt.title("Log Loss Comparison - Validation Set") 263 | plt.plot(np.arange(len(modeltest_2.history)), [x[1] for x in modeltest_1.history[:20]], "b-", label="variational(mc)") 264 | plt.plot(np.arange(len(modeltest_2.history)), [x[1] for x in modeltest_2.history], "g-", label="no dropout") 265 | plt.plot(np.arange(len(modeltest_3.history)), [x[1] for x in modeltest_3.history], "y-", label="naive dropout(mc)") 266 | plt.legend(loc='best') 267 | plt.xlabel("epochs") 268 | plt.ylabel("Log Loss") 269 | plt.subplot(1, 2, 2) 270 | plt.title("Accuracy Comparison - Validation Set") 271 | plt.plot(np.arange(len(modeltest_2.history)), [x[3] * 100 for x in modeltest_1.history[:20]], "b-", label="variational(mc)") 272 | plt.plot(np.arange(len(modeltest_2.history)), [x[3] * 100 for x in modeltest_2.history], "g-", label="no dropout") 273 | plt.plot(np.arange(len(modeltest_3.history)), [x[3] * 100 for x in modeltest_3.history], "y-", label="naive dropout(mc)") 274 | plt.legend(loc='best') 275 | plt.xlabel("epochs") 276 | plt.ylabel("Accuracy (%)") 277 | 278 | 279 | # In[ ]: 280 | 281 | 282 | 283 | 284 | -------------------------------------------------------------------------------- /(PyTorch) Cornell Movie Review Dataset.py: -------------------------------------------------------------------------------- 1 | 2 | # coding: utf-8 3 | 4 | # # (PyTorch) Cornell Movie Review Dataset 5 | 6 | # In[1]: 7 | 8 | 9 | import torch 10 | import torch.nn as nn 11 | from torch.autograd import Variable 12 | import torch.nn.functional as F 13 | from sklearn.metrics import mean_squared_error 14 | from tqdm import tqdm_notebook 15 | import numpy as np 16 | import matplotlib.pyplot as plt 17 | 18 | from yaringal_dataset import loader 19 | from weight_drop import WeightDrop 20 | from embed_regularize import embedded_dropout 21 | from locked_dropout import LockedDropout 22 | 23 | get_ipython().magic('matplotlib inline') 24 | plt.style.use('fivethirtyeight') 25 | plt.rcParams["figure.figsize"] = (8, 5) 26 | 27 | # Global params: 28 | NB_WORDS = 20000 29 | SKIP_TOP = 0 30 | TEST_SPLIT = 0.2 31 | INIT_SEED = 2017 32 | GLOBAL_SEED = 2018 33 | MAXLEN = 200 34 | BATCH_SIZE = 128 35 | TEST_BATCH_SIZE = 512 36 | 37 | 38 | # In[2]: 39 | 40 | 41 | dataset = loader(INIT_SEED, MAXLEN, NB_WORDS, SKIP_TOP, TEST_SPLIT) 42 | 43 | X_train, X_test, Y_train, Y_test = dataset.X_train, dataset.X_test, dataset.Y_train, dataset.Y_test 44 | mean_y_train, std_y_train = dataset.mean_y_train, dataset.std_y_train 45 | 46 | 47 | # In[3]: 48 | 49 | 50 | def inverse_transform(v): 51 | return v * std_y_train + mean_y_train 52 | 53 | 54 | # In[4]: 55 | 56 | 57 | class Model(nn.Module): 58 | def __init__(self, nb_words, hidden_size=128, embedding_size=128, n_layers=1, 59 | wdrop=0.25, odrop=0.25, edrop=0.1, idrop=0.25, variational=False, 60 | standard_dropout=False, batch_first=True): 61 | super(Model, self).__init__() 62 | self.standard_dropout = standard_dropout 63 | self.lockdrop = LockedDropout(batch_first=batch_first) 64 | self.odrop = odrop 65 | self.idrop = idrop 66 | self.edrop = edrop 67 | self.n_layers = n_layers 68 | self.embedding = nn.Embedding(nb_words, embedding_size) 69 | self.rnns = [ 70 | nn.LSTM(embedding_size if l == 0 else hidden_size, 71 | hidden_size, num_layers=1, batch_first=batch_first) 72 | for l in range(n_layers) 73 | ] 74 | if wdrop: 75 | self.rnns = [WeightDrop(rnn, ['weight_hh_l0'], dropout=wdrop, variational=variational) 76 | for rnn in self.rnns] 77 | self.rnns = torch.nn.ModuleList(self.rnns) 78 | self.output_layer = nn.Linear(hidden_size, 1) 79 | self.init_weights() 80 | 81 | def init_weights(self): 82 | initrange = 0.1 83 | self.embedding.weight.data.uniform_(-initrange, initrange) 84 | self.output_layer.bias.data.fill_(0) 85 | self.output_layer.weight.data.uniform_(-initrange, initrange) 86 | 87 | def forward(self, X): 88 | emb = embedded_dropout(self.embedding, X, dropout=self.edrop if self.training else 0) 89 | if self.standard_dropout: 90 | raw_output = F.dropout(emb, p=self.idrop, training=self.training) 91 | else: 92 | raw_output = self.lockdrop(emb, self.idrop) 93 | new_hidden, new_cell_state = [], [] 94 | for l, rnn in enumerate(self.rnns): 95 | raw_output, (new_h, new_c) = rnn(raw_output) 96 | if self.standard_dropout: 97 | raw_output = F.dropout(raw_output, p=self.odrop, training=self.training) 98 | else: 99 | raw_output = self.lockdrop(raw_output, self.odrop) 100 | new_hidden.append(new_h) 101 | new_cell_state.append(new_c) 102 | hidden = torch.cat(new_hidden, 0) 103 | cell_state = torch.cat(new_cell_state, 0) 104 | final_output = self.output_layer(raw_output) 105 | return final_output[:, -1, 0], hidden, cell_state 106 | 107 | 108 | # In[5]: 109 | 110 | 111 | MC_ROUNDS = 10 112 | def fit(model, optimizer, X_train_tensor, Y_train_tensor, 113 | X_test_tensor, Y_test_tensor, n_epochs=30, mc_dropout=True): 114 | epoch_losses = [] 115 | criterion = torch.nn.MSELoss() 116 | for epoch in range(n_epochs): 117 | indices = torch.randperm(len(X_train)).cuda() 118 | losses, losses_raw = [], [] 119 | model.train() 120 | for i in range(0, len(X_train), BATCH_SIZE): #tqdm_notebook(range(0, len(X_train), BATCH_SIZE)): 121 | optimizer.zero_grad() 122 | pred, _, _ = model(Variable(X_train_tensor[indices[i:(i+BATCH_SIZE)]])) 123 | loss_raw = criterion( 124 | pred, 125 | Variable(Y_train_tensor[indices[i:(i+BATCH_SIZE)]], requires_grad=False) 126 | ) 127 | loss = F.mse_loss(inverse_transform(pred), 128 | inverse_transform( 129 | Variable(Y_train_tensor[indices[i:(i+BATCH_SIZE)]], requires_grad=False))) 130 | losses_raw.append(loss_raw.data.cpu()[0]) 131 | losses.append(loss.data.cpu()[0]) 132 | loss_raw.backward() 133 | optimizer.step() 134 | train_loss = np.mean(losses)** 0.5 135 | train_loss_raw = np.mean(losses_raw) 136 | # Standard dropout approximation 137 | losses, losses_raw = [], [] 138 | model.eval() 139 | for i in range(0, len(X_test), TEST_BATCH_SIZE): 140 | pred_test, _, _ = model(Variable(X_test_tensor[i:(i+TEST_BATCH_SIZE)], volatile=True)) 141 | loss_raw = F.mse_loss(pred_test, Variable(Y_test_tensor[i:(i+TEST_BATCH_SIZE)])) 142 | loss = F.mse_loss(inverse_transform(pred_test), 143 | inverse_transform(Variable(Y_test_tensor[i:(i+TEST_BATCH_SIZE)]))) 144 | losses_raw.append(loss_raw.data.cpu()[0]) 145 | losses.append(loss.data.cpu()[0]) 146 | std_test_loss = np.mean(losses) ** 0.5 147 | std_test_loss_raw = np.mean(losses_raw) 148 | if mc_dropout: 149 | # MC dropout 150 | losses, losses_raw = [], [] 151 | model.train() 152 | for i in range(0, len(X_test), TEST_BATCH_SIZE): 153 | pred_list = [] 154 | for j in range(MC_ROUNDS): 155 | pred_test, _, _ = model(Variable(X_test_tensor[i:(i+TEST_BATCH_SIZE)], volatile=True)) 156 | pred_list.append(pred_test.unsqueeze(0)) 157 | pred_all = torch.mean(torch.cat(pred_list, 0), 0) 158 | loss_raw = F.mse_loss(pred_all, Variable(Y_test_tensor[i:(i+TEST_BATCH_SIZE)])) 159 | loss = F.mse_loss(inverse_transform(pred_all), 160 | inverse_transform(Variable(Y_test_tensor[i:(i+TEST_BATCH_SIZE)]))) 161 | losses_raw.append(loss_raw.data.cpu()[0]) 162 | losses.append(loss.data.cpu()[0]) 163 | mc_test_loss = np.mean(losses) ** 0.5 164 | mc_test_loss_raw = np.mean(losses_raw) 165 | epoch_losses.append([ 166 | train_loss, std_test_loss, mc_test_loss, 167 | train_loss_raw, std_test_loss_raw, mc_test_loss_raw 168 | ]) 169 | print("Epoch: {} Train: {:.4f}/{:.4f}, Val Std: {:.4f}/{:.4f}, Val MC: {:.4f}/{:.4f}".format( 170 | epoch, train_loss, std_test_loss_raw, std_test_loss, std_test_loss_raw, mc_test_loss, mc_test_loss_raw)) 171 | else: 172 | epoch_losses.append([train_loss, std_test_loss, mc_test_loss]) 173 | print("Epoch: {} Train: {:.4f}/{:.4f}, Val Std: {:.4f}/{:.4f}".format( 174 | epoch, train_loss, std_test_loss_raw, std_test_loss, std_test_loss_raw)) 175 | return epoch_losses 176 | 177 | 178 | # In[6]: 179 | 180 | 181 | Y_train_tensor = torch.from_numpy(Y_train).float().cuda() 182 | Y_test_tensor = torch.from_numpy(Y_test).float().cuda() 183 | X_train_tensor = torch.from_numpy(X_train).long().cuda() 184 | X_test_tensor = torch.from_numpy(X_test).long().cuda() 185 | 186 | 187 | # ## Weight Dropped LSTM (w Embedding Dropout) 188 | 189 | # In[7]: 190 | 191 | 192 | model_1 = Model(NB_WORDS + dataset.index_from, wdrop=0.02, odrop=0.1, edrop=0.2, idrop=0.1) 193 | model_1.cuda() 194 | optimizer = torch.optim.Adam([ 195 | {'params': model_1.parameters(), 'lr': 1e-4, 'weight_decay': 1e-4} 196 | ],) 197 | epoch_losses_1 = fit( 198 | model_1, optimizer, X_train_tensor, Y_train_tensor, X_test_tensor, Y_test_tensor, n_epochs=100) 199 | 200 | 201 | # In[11]: 202 | 203 | 204 | print("Best RMSE: {:.4f} Best Epoch: {}".format( 205 | np.min([x[2] for x in epoch_losses_1]), 206 | np.argmin([x[2] for x in epoch_losses_1]) + 1 207 | )) 208 | 209 | 210 | # ## No Dropout 211 | 212 | # In[12]: 213 | 214 | 215 | model_2 = Model(NB_WORDS + dataset.index_from, wdrop=0, odrop=0, edrop=0, idrop=0) 216 | model_2.cuda() 217 | optimizer = torch.optim.Adam([ 218 | {'params': model_2.parameters(), 'lr': 1e-4} 219 | ],) 220 | epoch_losses_2 = fit( 221 | model_2, optimizer, X_train_tensor, Y_train_tensor, X_test_tensor, Y_test_tensor, n_epochs=100) 222 | 223 | 224 | # In[13]: 225 | 226 | 227 | print("Best RMSE: {:.4f} Best Epoch: {}".format( 228 | np.min([x[2] for x in epoch_losses_2]), 229 | np.argmin([x[2] for x in epoch_losses_2]) + 1 230 | )) 231 | 232 | 233 | # ## Naive Dropout (w/o Embedding Dropout) 234 | 235 | # In[14]: 236 | 237 | 238 | model_3 = Model(NB_WORDS + dataset.index_from, 239 | wdrop=0, odrop=0.2, edrop=0, idrop=0.2, standard_dropout=True) 240 | model_3.cuda() 241 | optimizer = torch.optim.Adam([ 242 | {'params': model_3.parameters(), 'lr': 1e-4, 'weight_decay': 1e-4} 243 | ],) 244 | epoch_losses_3 = fit( 245 | model_3, optimizer, X_train_tensor, Y_train_tensor, X_test_tensor, Y_test_tensor, n_epochs=100) 246 | 247 | 248 | # In[16]: 249 | 250 | 251 | print("Best RMSE: {:.4f} Best Epoch: {}".format( 252 | np.min([x[2] for x in epoch_losses_3]), 253 | np.argmin([x[2] for x in epoch_losses_3]) + 1 254 | )) 255 | 256 | 257 | # ## Variational LSTM 258 | 259 | # In[17]: 260 | 261 | 262 | model_4 = Model(NB_WORDS + dataset.index_from, wdrop=0.02, odrop=0.1, edrop=0.2, idrop=0.1, variational=True) 263 | model_4.cuda() 264 | optimizer = torch.optim.Adam([ 265 | {'params': model_4.parameters(), 'lr': 1e-4, 'weight_decay': 1e-4} 266 | ],) 267 | epoch_losses_4 = fit( 268 | model_4, optimizer, X_train_tensor, Y_train_tensor, X_test_tensor, Y_test_tensor, n_epochs=100) 269 | 270 | 271 | # In[19]: 272 | 273 | 274 | print("Best RMSE: {:.4f} Best Epoch: {}".format( 275 | np.min([x[2] for x in epoch_losses_4]), 276 | np.argmin([x[2] for x in epoch_losses_4]) + 1 277 | )) 278 | 279 | 280 | # ## Variational LSTM w/o Recurrent Dropout 281 | 282 | # In[20]: 283 | 284 | 285 | model_5 = Model(NB_WORDS + dataset.index_from, wdrop=0, odrop=0.1, edrop=0.2, idrop=0.1) 286 | model_5.cuda() 287 | optimizer = torch.optim.Adam([ 288 | {'params': model_5.parameters(), 'lr': 1e-4, 'weight_decay': 1e-4} 289 | ],) 290 | epoch_losses_5 = fit( 291 | model_5, optimizer, X_train_tensor, Y_train_tensor, X_test_tensor, Y_test_tensor, n_epochs=100) 292 | 293 | 294 | # In[21]: 295 | 296 | 297 | print("Best RMSE: {:.4f} Best Epoch: {}".format( 298 | np.min([x[2] for x in epoch_losses_5]), 299 | np.argmin([x[2] for x in epoch_losses_5]) + 1 300 | )) 301 | 302 | 303 | # ## Visualizations 304 | 305 | # In[52]: 306 | 307 | 308 | bins = np.arange(-0.012, 0.005, 0.001) 309 | 310 | 311 | # In[61]: 312 | 313 | 314 | plt.figure(figsize=(12, 4)) 315 | plt.subplot(1, 2, 1) 316 | plt.title("Raw MSE Comparison - Training Set") 317 | plt.plot(np.arange(len(epoch_losses_1)), [x[3] for x in epoch_losses_1], label="weight dropped") 318 | plt.plot(np.arange(len(epoch_losses_2)), [x[3] for x in epoch_losses_2], "g-", label="no dropout") 319 | plt.plot(np.arange(len(epoch_losses_3)), [x[3] for x in epoch_losses_3], "y-", label="naive dropout") 320 | plt.plot(np.arange(len(epoch_losses_4)), [x[3] for x in epoch_losses_4], "m-", label="variational") 321 | plt.plot(np.arange(len(epoch_losses_5)), [x[3] for x in epoch_losses_5], "c-", label="variational-2") 322 | plt.legend(loc='best') 323 | plt.xlabel("epochs") 324 | plt.ylabel("logloss") 325 | plt.subplot(1, 2, 2) 326 | plt.title("(MC - Approx) Histogram") 327 | plt.hist([x[2] - x[1] for x in epoch_losses_1], bins=bins, alpha=0.3, label="w-dropped") 328 | plt.hist([x[2] - x[1] for x in epoch_losses_3], bins=bins, alpha=0.3, label="naive") 329 | plt.hist([x[2] - x[1] for x in epoch_losses_4], bins=bins, alpha=0.3, label="varational w-drop") 330 | plt.legend(loc='best') 331 | plt.xlabel("Difference in Raw MSE") 332 | plt.ylabel("Count") 333 | plt.xticks(fontsize=8, rotation=0) 334 | 335 | 336 | # In[60]: 337 | 338 | 339 | plt.figure(figsize=(10, 6)) 340 | plt.title("RMSE Comparison - Validation Set") 341 | plt.plot(np.arange(len(epoch_losses_1)), [x[2] for x in epoch_losses_1], label="weight dropped") 342 | plt.plot(np.arange(len(epoch_losses_2)), [x[2] for x in epoch_losses_2], "g-", label="no dropout") 343 | plt.plot(np.arange(len(epoch_losses_3)), [x[2] for x in epoch_losses_3], "y-", label="naive dropout") 344 | plt.plot(np.arange(len(epoch_losses_4)), [x[2] for x in epoch_losses_4], "m-", label="variational") 345 | plt.plot(np.arange(len(epoch_losses_5)), [x[2] for x in epoch_losses_5], "c-", label="v w/o r-drop") 346 | plt.legend(loc='best') 347 | plt.xlabel("epochs") 348 | plt.ylabel("logloss") 349 | 350 | 351 | # In[ ]: 352 | 353 | 354 | 355 | 356 | -------------------------------------------------------------------------------- /(PyTorch) IMDB Dataset.py: -------------------------------------------------------------------------------- 1 | 2 | # coding: utf-8 3 | 4 | # # (PyTorch) IMDB Dataset 5 | 6 | # In[1]: 7 | 8 | 9 | import torch 10 | import torch.nn as nn 11 | from torch.autograd import Variable 12 | import torch.nn.functional as F 13 | from tensorflow.contrib.keras.python.keras.datasets import imdb 14 | from tensorflow.contrib.keras.python.keras.preprocessing import sequence 15 | from sklearn.metrics import mean_squared_error 16 | from tqdm import tqdm_notebook 17 | import numpy as np 18 | import matplotlib.pyplot as plt 19 | # import matplotlib.ticker as ticker 20 | 21 | from yaringal_dataset import loader 22 | from weight_drop import WeightDrop 23 | from embed_regularize import embedded_dropout 24 | from locked_dropout import LockedDropout 25 | 26 | get_ipython().magic('matplotlib inline') 27 | plt.style.use('fivethirtyeight') 28 | plt.rcParams["figure.figsize"] = (10, 5) 29 | 30 | # Global params: 31 | NB_WORDS = 20000 32 | SKIP_TOP = 0 33 | TEST_SPLIT = 0.2 34 | INIT_SEED = 2017 35 | GLOBAL_SEED = 2018 36 | MAXLEN = 80 37 | BATCH_SIZE = 128 38 | TEST_BATCH_SIZE = 512 39 | 40 | 41 | # In[2]: 42 | 43 | 44 | (X_train, Y_train), (X_test, Y_test) = imdb.load_data(num_words=NB_WORDS) 45 | print(len(X_train), 'train sequences') 46 | print(len(X_test), 'test sequences') 47 | print('Pad sequences (samples x time)') 48 | X_train = sequence.pad_sequences(X_train, maxlen=MAXLEN) 49 | X_test = sequence.pad_sequences(X_test, maxlen=MAXLEN) 50 | print('x_train shape:', X_train.shape) 51 | print('x_test shape:', X_test.shape) 52 | 53 | 54 | # In[3]: 55 | 56 | 57 | class Model(nn.Module): 58 | def __init__(self, nb_words, hidden_size=128, embedding_size=128, n_layers=1, 59 | wdrop=0.25, odrop=0.25, edrop=0.1, idrop=0.25, variational=False, 60 | standard_dropout=False, batch_first=True): 61 | super(Model, self).__init__() 62 | self.standard_dropout = standard_dropout 63 | self.lockdrop = LockedDropout(batch_first=batch_first) 64 | self.odrop = odrop 65 | self.idrop = idrop 66 | self.edrop = edrop 67 | self.n_layers = n_layers 68 | self.embedding = nn.Embedding(nb_words, embedding_size) 69 | self.rnns = [ 70 | nn.LSTM(embedding_size if l == 0 else hidden_size, 71 | hidden_size, num_layers=1, batch_first=batch_first) 72 | for l in range(n_layers) 73 | ] 74 | if wdrop: 75 | self.rnns = [WeightDrop(rnn, ['weight_hh_l0'], dropout=wdrop, variational=variational) 76 | for rnn in self.rnns] 77 | self.rnns = torch.nn.ModuleList(self.rnns) 78 | self.output_layer = nn.Linear(hidden_size, 1) 79 | self.init_weights() 80 | 81 | def init_weights(self): 82 | initrange = 0.1 83 | self.embedding.weight.data.uniform_(-initrange, initrange) 84 | self.output_layer.bias.data.fill_(0) 85 | self.output_layer.weight.data.uniform_(-initrange, initrange) 86 | 87 | def forward(self, X): 88 | emb = embedded_dropout(self.embedding, X, dropout=self.edrop if self.training else 0) 89 | if self.standard_dropout: 90 | raw_output = F.dropout(emb, p=self.idrop, training=self.training) 91 | else: 92 | raw_output = self.lockdrop(emb, self.idrop) 93 | new_hidden, new_cell_state = [], [] 94 | for l, rnn in enumerate(self.rnns): 95 | raw_output, (new_h, new_c) = rnn(raw_output) 96 | if self.standard_dropout: 97 | raw_output = F.dropout(raw_output, p=self.odrop, training=self.training) 98 | else: 99 | raw_output = self.lockdrop(raw_output, self.odrop) 100 | new_hidden.append(new_h) 101 | new_cell_state.append(new_c) 102 | hidden = torch.cat(new_hidden, 0) 103 | cell_state = torch.cat(new_cell_state, 0) 104 | final_output = self.output_layer(raw_output) 105 | return final_output[:, -1, 0], hidden, cell_state 106 | 107 | 108 | # In[4]: 109 | 110 | 111 | MC_ROUNDS = 10 112 | def fit(model, optimizer, X_train_tensor, Y_train_tensor, X_test_tensor, Y_test_tensor, n_epochs=30): 113 | epoch_losses = [] 114 | criterion = torch.nn.BCEWithLogitsLoss() 115 | for epoch in range(n_epochs): 116 | indices = torch.randperm(len(X_train)).cuda() 117 | losses, acc = [], [] 118 | model.train() 119 | for i in range(0, len(X_train), BATCH_SIZE): #tqdm_notebook(range(0, len(X_train), BATCH_SIZE)): 120 | optimizer.zero_grad() 121 | pred, _, _ = model(Variable(X_train_tensor[indices[i:(i+BATCH_SIZE)]])) 122 | # print(pred.size()) 123 | loss = criterion( 124 | pred, 125 | Variable(Y_train_tensor[indices[i:(i+BATCH_SIZE)]], requires_grad=False) 126 | ) 127 | acc.append( 128 | torch.eq( 129 | (F.sigmoid(pred).data > 0.5).float(), 130 | Y_train_tensor[indices[i:(i+BATCH_SIZE)]] 131 | ) 132 | ) 133 | losses.append(loss.data.cpu()[0]) 134 | loss.backward() 135 | optimizer.step() 136 | train_acc = torch.mean(torch.cat(acc).float()) 137 | train_loss = np.mean(losses) 138 | # Standard dropout approximation 139 | losses, acc=[], [] 140 | model.eval() 141 | for i in range(0, len(X_test), TEST_BATCH_SIZE): 142 | pred_test, _, _ = model(Variable(X_test_tensor[i:(i+TEST_BATCH_SIZE)], volatile=True)) 143 | # print(pred.size()) 144 | loss = F.binary_cross_entropy_with_logits( 145 | pred_test, Variable(Y_test_tensor[i:(i+TEST_BATCH_SIZE)])) 146 | acc.append( 147 | torch.eq( 148 | (F.sigmoid(pred_test).data > 0.5).float(), 149 | Y_test_tensor[i:(i+TEST_BATCH_SIZE)] 150 | ) 151 | ) 152 | losses.append(loss.data.cpu()[0]) 153 | std_test_acc = torch.mean(torch.cat(acc).float()) 154 | std_test_loss = np.mean(losses) 155 | # MC dropout 156 | losses, acc = [], [] 157 | model.train() 158 | for i in range(0, len(X_test), TEST_BATCH_SIZE): 159 | pred_list = [] 160 | for j in range(MC_ROUNDS): 161 | pred_test, _, _ = model(Variable(X_test_tensor[i:(i+TEST_BATCH_SIZE)], volatile=True)) 162 | pred_list.append(pred_test.unsqueeze(0)) 163 | pred_all = torch.mean(torch.cat(pred_list, 0), 0) 164 | loss = F.binary_cross_entropy_with_logits( 165 | pred_all, Variable(Y_test_tensor[i:(i+TEST_BATCH_SIZE)])) 166 | acc.append( 167 | torch.eq( 168 | (F.sigmoid(pred_all).data > 0.5).float(), 169 | Y_test_tensor[i:(i+TEST_BATCH_SIZE)] 170 | ) 171 | ) 172 | losses.append(loss.data.cpu()[0]) 173 | mc_test_acc = torch.mean(torch.cat(acc).float()) 174 | mc_test_loss = np.mean(losses) 175 | epoch_losses.append([ 176 | train_loss, std_test_loss, mc_test_loss, 177 | train_acc, std_test_acc, mc_test_acc]) 178 | print("Epoch: {} Train: {:.4f}/{:.2f}%, Val Std: {:.4f}/{:.2f}%, Val MC: {:.4f}/{:.2f}%".format( 179 | epoch, train_loss, train_acc*100, std_test_loss, std_test_acc*100, mc_test_loss, mc_test_acc*100)) 180 | return epoch_losses 181 | 182 | 183 | # In[5]: 184 | 185 | 186 | Y_train_tensor = torch.from_numpy(Y_train).float().cuda() 187 | Y_test_tensor = torch.from_numpy(Y_test).float().cuda() 188 | X_train_tensor = torch.from_numpy(X_train).long().cuda() 189 | X_test_tensor = torch.from_numpy(X_test).long().cuda() 190 | 191 | 192 | # ## Weight Dropped LSTM (w Embedding Dropout) 193 | 194 | # In[6]: 195 | 196 | 197 | model_1 = Model(NB_WORDS, wdrop=0.02, odrop=0.1, edrop=0.2, idrop=0.1) 198 | model_1.cuda() 199 | optimizer = torch.optim.Adam([ 200 | {'params': model_1.parameters(), 'lr': 1e-4, 'weight_decay': 1e-4} 201 | ]) 202 | epoch_losses_1 = fit( 203 | model_1, optimizer, X_train_tensor, Y_train_tensor, X_test_tensor, Y_test_tensor, n_epochs=20) 204 | 205 | 206 | # In[16]: 207 | 208 | 209 | best_epoch = np.argmin([x[2] for x in epoch_losses_1]) + 1 210 | print("Best Loss: {:.4f} Acc: {:.2f}% Best Epoch: {}".format( 211 | epoch_losses_1[best_epoch-1][2], 212 | epoch_losses_1[best_epoch-1][5] * 100, 213 | best_epoch 214 | )) 215 | 216 | 217 | # ## No Dropout 218 | 219 | # In[8]: 220 | 221 | 222 | model_2 = Model(NB_WORDS, wdrop=0, odrop=0, edrop=0, idrop=0) 223 | model_2.cuda() 224 | optimizer = torch.optim.Adam(model_2.parameters(), lr=1e-4) 225 | epoch_losses_2 = fit(model_2, optimizer, X_train_tensor, Y_train_tensor, 226 | X_test_tensor, Y_test_tensor, n_epochs=20) 227 | 228 | 229 | # In[9]: 230 | 231 | 232 | best_epoch = np.argmin([x[1] for x in epoch_losses_2]) + 1 233 | print("Best Loss: {:.4f} Acc: {:.2f}% Best Epoch: {}".format( 234 | epoch_losses_2[best_epoch-1][1], 235 | epoch_losses_2[best_epoch-1][4] * 100, 236 | best_epoch 237 | )) 238 | 239 | 240 | # ## Naive Dropout (w/o Embedding Dropout) 241 | 242 | # In[10]: 243 | 244 | 245 | model_3 = Model(NB_WORDS, wdrop=0, odrop=0.2, edrop=0, idrop=0.2, standard_dropout=True) 246 | model_3.cuda() 247 | optimizer = torch.optim.Adam(model_3.parameters(), lr=1e-4) 248 | epoch_losses_3 = fit(model_3, optimizer, X_train_tensor, Y_train_tensor, 249 | X_test_tensor, Y_test_tensor, n_epochs=20) 250 | 251 | 252 | # In[11]: 253 | 254 | 255 | best_epoch = np.argmin([x[2] for x in epoch_losses_3]) + 1 256 | print("Best Loss: {:.4f} Acc: {:.2f}% Best Epoch: {}".format( 257 | epoch_losses_3[best_epoch-1][2], 258 | epoch_losses_3[best_epoch-1][5] * 100, 259 | best_epoch 260 | )) 261 | 262 | 263 | # ## Variational LSTM 264 | 265 | # In[12]: 266 | 267 | 268 | model_4 = Model(NB_WORDS, wdrop=0.02, odrop=0.1, edrop=0.2, idrop=0.1, variational=True) 269 | model_4.cuda() 270 | optimizer = torch.optim.Adam(model_4.parameters(), lr=1e-4) 271 | epoch_losses_4 = fit(model_4, optimizer, X_train_tensor, Y_train_tensor, 272 | X_test_tensor, Y_test_tensor, n_epochs=20) 273 | 274 | 275 | # In[13]: 276 | 277 | 278 | best_epoch = np.argmin([x[2] for x in epoch_losses_4]) + 1 279 | print("Best Loss: {:.4f} Acc: {:.2f}% Best Epoch: {}".format( 280 | epoch_losses_4[best_epoch-1][2], 281 | epoch_losses_4[best_epoch-1][5] * 100, 282 | best_epoch 283 | )) 284 | 285 | 286 | # ## Variational LSTM w/o Recurrent Dropout 287 | 288 | # In[14]: 289 | 290 | 291 | model_5 = Model(NB_WORDS, wdrop=0., odrop=0.1, edrop=0.2, idrop=0.1) 292 | model_5.cuda() 293 | optimizer = torch.optim.Adam(model_5.parameters(), lr=1e-4) 294 | epoch_losses_5= fit(model_5, optimizer, X_train_tensor, Y_train_tensor, 295 | X_test_tensor, Y_test_tensor, n_epochs=20) 296 | 297 | 298 | # In[17]: 299 | 300 | 301 | best_epoch = np.argmin([x[2] for x in epoch_losses_5]) + 1 302 | print("Best Loss: {:.4f} Acc: {:.2f}% Best Epoch: {}".format( 303 | epoch_losses_5[best_epoch-1][2], 304 | epoch_losses_5[best_epoch-1][5] * 100, 305 | best_epoch 306 | )) 307 | 308 | 309 | # ## Visualizations 310 | 311 | # In[27]: 312 | 313 | 314 | bins = np.arange(-0.03, 0.01, 0.002) 315 | plt.figure(figsize=(12, 4)) 316 | plt.subplot(1, 2, 1) 317 | plt.title("Accuracy Comparison - Training Set") 318 | plt.plot(np.arange(len(epoch_losses_1)), [x[3] * 100 for x in epoch_losses_1], label="weight dropped") 319 | plt.plot(np.arange(len(epoch_losses_2)), [x[3] * 100 for x in epoch_losses_2], "g-", label="no dropout") 320 | plt.plot(np.arange(len(epoch_losses_3)), [x[3] * 100 for x in epoch_losses_3], "y-", label="naive dropout") 321 | plt.plot(np.arange(len(epoch_losses_4)), [x[3] * 100 for x in epoch_losses_4], "m-", label="variational") 322 | plt.plot(np.arange(len(epoch_losses_5)), [x[3] * 100 for x in epoch_losses_5], "c-", label="v w/o r-drop") 323 | plt.legend(loc='best') 324 | plt.xlabel("epochs") 325 | plt.ylabel("Accuracy (%)") 326 | plt.subplot(1, 2, 2) 327 | plt.title("(MC - Approx) Histogram") 328 | plt.hist([x[2] - x[1] for x in epoch_losses_1], bins=bins, alpha=0.3, label="w-dropped") 329 | plt.hist([x[2] - x[1] for x in epoch_losses_3], bins=bins, alpha=0.3, label="naive") 330 | plt.hist([x[2] - x[1] for x in epoch_losses_4], bins=bins, alpha=0.3, label="varational w-drop") 331 | plt.legend(loc='best') 332 | plt.xlabel("Difference in Raw MSE") 333 | plt.ylabel("Count") 334 | plt.xticks(fontsize=8, rotation=0) 335 | 336 | 337 | # In[33]: 338 | 339 | 340 | plt.figure(figsize=(12, 4)) 341 | plt.subplot(1, 2, 1) 342 | plt.title("Log Loss Comparison - Validation Set") 343 | plt.plot(np.arange(len(epoch_losses_1)), [x[2] * 100 for x in epoch_losses_1], "b-", label="weight dropped(mc)") 344 | plt.plot(np.arange(len(epoch_losses_2)), [x[2] * 100 for x in epoch_losses_2], "g-", label="no dropout") 345 | plt.plot(np.arange(len(epoch_losses_3)), [x[2] * 100 for x in epoch_losses_3], "y-", label="naive dropout(mc)") 346 | plt.plot(np.arange(len(epoch_losses_4)), [x[2] * 100 for x in epoch_losses_4], "m-", label="variational") 347 | plt.plot(np.arange(len(epoch_losses_5)), [x[2] * 100 for x in epoch_losses_5], "c-", label="v w/o r-drop") 348 | plt.legend(loc='best') 349 | plt.xlabel("epochs") 350 | plt.ylabel("Log Loss") 351 | plt.subplot(1, 2, 2) 352 | plt.title("Accuracy Comparison - Validation Set") 353 | plt.plot(np.arange(len(epoch_losses_1)), [x[5] for x in epoch_losses_1], "b-", label="weight dropped(mc)") 354 | plt.plot(np.arange(len(epoch_losses_2)), [x[5] for x in epoch_losses_2], "g-", label="no dropout") 355 | plt.plot(np.arange(len(epoch_losses_3)), [x[5] for x in epoch_losses_3], "y-", label="naive dropout(mc)") 356 | plt.plot(np.arange(len(epoch_losses_4)), [x[5] for x in epoch_losses_4], "m-", label="variational") 357 | plt.plot(np.arange(len(epoch_losses_5)), [x[5] for x in epoch_losses_5], "c-", label="v w/o r-drop") 358 | plt.legend(loc='best') 359 | plt.xlabel("epochs") 360 | plt.ylabel("Accuracy (%)") 361 | 362 | 363 | # In[ ]: 364 | 365 | 366 | 367 | 368 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.py[cod] 2 | 3 | # C extensions 4 | *.so 5 | 6 | # Packages 7 | *.egg 8 | *.egg-info 9 | dist 10 | build 11 | eggs 12 | .eggs 13 | parts 14 | bin 15 | var 16 | sdist 17 | wheelhouse 18 | develop-eggs 19 | .installed.cfg 20 | lib 21 | lib64 22 | venv*/ 23 | pyvenv*/ 24 | 25 | # Installer logs 26 | pip-log.txt 27 | 28 | # Unit test / coverage reports 29 | .coverage 30 | .tox 31 | .coverage.* 32 | nosetests.xml 33 | coverage.xml 34 | htmlcov 35 | 36 | # Translations 37 | *.mo 38 | 39 | # Mr Developer 40 | .mr.developer.cfg 41 | .project 42 | .pydevproject 43 | .idea 44 | *.iml 45 | *.komodoproject 46 | 47 | # Complexity 48 | output/*.html 49 | output/*/index.html 50 | 51 | # Sphinx 52 | docs/_build 53 | 54 | .DS_Store 55 | *~ 56 | .*.sw[po] 57 | .build 58 | .ve 59 | .env 60 | .cache 61 | .pytest 62 | .bootstrap 63 | .appveyor.token 64 | *.bak 65 | 66 | *.html 67 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ceshine/cuda-pytorch:0.2.0 2 | 3 | MAINTAINER CeShine Lee 4 | 5 | RUN pip install --upgrade pip tqdm && \ 6 | pip install tensorflow-gpu==1.3.0 7 | 8 | RUN conda install -y --quiet jupyter mkl-service matplotlib && \ 9 | conda clean -tipsy 10 | 11 | # Jupyter 12 | EXPOSE 8888 13 | CMD jupyter notebook --ip=0.0.0.0 --port=8888 14 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # recurrent-dropout-experiments 2 | 3 | Source code for the Medium post — [[Learning Note] Dropout in Recurrent Networks — Part 3](https://towardsdatascience.com/learning-note-dropout-in-recurrent-networks-part-3-1b161d030cd4). 4 | 5 | Other posts in the series: 6 | 7 | * [[Learning Note] Dropout in Recurrent Networks — Part 1](https://becominghuman.ai/learning-note-dropout-in-recurrent-networks-part-1-57a9c19a2307) 8 | * [[Learning Note] Dropout in Recurrent Networks — Part 2](https://towardsdatascience.com/learning-note-dropout-in-recurrent-networks-part-2-f209222481f8) 9 | -------------------------------------------------------------------------------- /data/scaledata.README.1.0.txt: -------------------------------------------------------------------------------- 1 | 2 | ======= 3 | 4 | Introduction 5 | 6 | This README v1.0 (June, 2005) for the v0.9 and v1.0 scale datasets comes 7 | from the URL 8 | http://www.cs.cornell.edu/people/pabo/movie-review-data . 9 | 10 | ======= 11 | 12 | Citation Info 13 | 14 | This data was first used in Bo Pang and Lillian Lee, 15 | ``Seeing stars: Exploiting class relationships for sentiment categorization 16 | with respect to rating scales.'', Proceedings of the ACL, 2005. 17 | 18 | @InProceedings{Pang+Lee:05a, 19 | author = {Bo Pang and Lillian Lee}, 20 | title = {Seeing stars: Exploiting class relationships for sentiment 21 | categorization with respect to rating scales}, 22 | booktitle = {Proceedings of the ACL}, 23 | year = 2005 24 | } 25 | 26 | ======= 27 | 28 | Data Format Summary 29 | 30 | There are two tar files, roughly corresponding to (1) the reviews 31 | after pre-processing, including subjectivity extraction (i.e., the 32 | data we used in our experiments) and (2) the reviews after very light 33 | pre-processing (provided in case these prove convenient to others; to 34 | date we have not experimented directly with them). 35 | 36 | 37 | (1) scale_data.tar.gz (scale dataset v1.0): contains this readme and 38 | data files that were used in the experiments described in Pang/Lee 39 | ACL 2005. 40 | 41 | Specifically: 42 | 43 | Each sub-directory $author contains data extracted from reviews written 44 | by some single author; altogether, there are four author sub-directories. 45 | 46 | In each such sub-directory, each line in the file subj.$author 47 | corresponds to the subjective extract of one review. The 48 | corresponding line in the file id.$author specifies the source html 49 | file for the review from which the extract was created; these source 50 | files can be found in polarity_html.zip, available from 51 | http://www.cs.cornell.edu/people/pabo/movie-review-data ("Pool of 52 | 27886 unprocessed html files"). 53 | 54 | We automatically tokenized and applied pattern matching technique to 55 | remove explicit rating indications from the reviews. Subjective 56 | sentences were automatically identified using the system described in 57 | our 2004 ACL paper 58 | (http://www.cs.cornell.edu/home/llee/papers/cutsent.home.html). We 59 | did not apply any feature selection algorithms in our experiments; 60 | we simply used all unigrams as features, and used feature 61 | presence/absence to create feature vectors. 62 | 63 | The class label for each extract is given in the corresponding line 64 | of the file label.3class.$author (for the {0,1,2} three-category 65 | classification task) or label.4class.$author (for the {0,1,2,3} 66 | four-categority classification task). 67 | 68 | For those who wish to experiment with more fine-grained labels, we 69 | also provide normalized ratings (in the range [0-1] with stepsize 70 | 0.1 or smaller, depending on the smallest unit used by the author) 71 | in the file rating.$author. 72 | 73 | EXAMPLE: consider the information corresponding to the extract 74 | represented by the first line of Steve+Rhodes/subj.Steve+Rhodes: 75 | 76 | % paste Steve+Rhodes/label.3class.Steve+Rhodes \ 77 | Steve+Rhodes/label.4class.Steve+Rhodes Steve+Rhodes/id.Steve+Rhodes \ 78 | Steve+Rhodes/rating.Steve+Rhodes | head -1 79 | 0 0 11790 0.1 80 | 81 | The class labels for both the three-class and four-class tasks are 0. 82 | The original review was written by Steve Rhodes and extracted from 83 | 11790.html (see above for location of original reviews). 84 | The numerical rating converted from the four-star system used by the author 85 | (1/2 star was the smallest unit he employed) is 0.1 (see section 86 | "Label Decision" below for more information on rating normalization). 87 | 88 | 89 | (2) scale_whole_review.tar.gz (scale dataset v0.9): Contains this 90 | README and the review files in their entireties before passing 91 | through tokenization, sentence separation, and subjectivity 92 | extraction. 93 | 94 | Specifically: 95 | 96 | The entire review for each subjective extract in $author/subj.$author 97 | (of scale dataset v1.0) can be identified by the id number specified 98 | in the correponding line of $author/id.$author and located as file 99 | $author/txt.parag/$id.txt 100 | where each line of $id.txt corresponds to one paragraph of the review. 101 | 102 | ======= 103 | 104 | Label Decision 105 | 106 | The numerical ratings were derived from texts in the original html 107 | files. Note that with our particular conversion scheme, 0-to-4 stars 108 | within a four star system translates into 0.1-to-0.9 in our normalized 109 | numerical ratings, whereas 0-to-5 stars within a five star system 110 | translates into 0-to-1. (The reasoning was that in a four-star 111 | system, an author is more likely to assign "endpoint" scores because 112 | the dynamic range of the rating scheme is smaller.) 113 | 114 | The class labels were then derived from the normalized numerical ratings. 115 | * for the three-class task: 116 | 0: rating <= 0.4 117 | 1: 0.4 < rating < 0.7 118 | 2: rating >= 0.7 119 | 120 | * for the four-class task: 121 | 0: rating <=.3 122 | 1: .4 <= rating <=.5 123 | 2: .6 <= rating <= .7 124 | 3: .8 <= rating 125 | 126 | -------------------------------------------------------------------------------- /data/scaledata/Dennis+Schwartz/id.Dennis+Schwartz: -------------------------------------------------------------------------------- 1 | 29420 2 | 17219 3 | 18406 4 | 18648 5 | 20021 6 | 20454 7 | 20473 8 | 20538 9 | 21002 10 | 21739 11 | 21899 12 | 21908 13 | 22251 14 | 23721 15 | 24276 16 | 25097 17 | 25119 18 | 25673 19 | 25799 20 | 26295 21 | 26341 22 | 26402 23 | 26462 24 | 27176 25 | 27366 26 | 27820 27 | 27838 28 | 27962 29 | 28340 30 | 28418 31 | 29411 32 | 29423 33 | 29480 34 | 29517 35 | 17185 36 | 17243 37 | 17255 38 | 17713 39 | 17761 40 | 18156 41 | 18227 42 | 18447 43 | 18987 44 | 19043 45 | 19313 46 | 19439 47 | 20094 48 | 20198 49 | 20218 50 | 20332 51 | 20366 52 | 20513 53 | 20634 54 | 20671 55 | 21031 56 | 21702 57 | 22009 58 | 22052 59 | 22173 60 | 22373 61 | 22641 62 | 22688 63 | 23046 64 | 23077 65 | 23227 66 | 23319 67 | 23355 68 | 23465 69 | 23518 70 | 23544 71 | 23598 72 | 23599 73 | 23613 74 | 23688 75 | 23739 76 | 23818 77 | 24085 78 | 24135 79 | 24136 80 | 24138 81 | 24237 82 | 24315 83 | 24447 84 | 24779 85 | 24837 86 | 24840 87 | 24891 88 | 24894 89 | 24913 90 | 24953 91 | 25098 92 | 25120 93 | 25202 94 | 25323 95 | 25326 96 | 25458 97 | 25580 98 | 25581 99 | 25746 100 | 25747 101 | 26025 102 | 26081 103 | 26082 104 | 26132 105 | 26257 106 | 26258 107 | 26568 108 | 26569 109 | 26594 110 | 26652 111 | 26653 112 | 26654 113 | 26674 114 | 27016 115 | 27017 116 | 27019 117 | 27086 118 | 27232 119 | 27364 120 | 27403 121 | 27512 122 | 27868 123 | 27869 124 | 27891 125 | 27892 126 | 28018 127 | 28080 128 | 28081 129 | 28199 130 | 28303 131 | 28306 132 | 28307 133 | 28309 134 | 28328 135 | 28332 136 | 28394 137 | 28415 138 | 28419 139 | 28539 140 | 28597 141 | 28607 142 | 28685 143 | 28686 144 | 28689 145 | 28799 146 | 28909 147 | 28913 148 | 28916 149 | 28958 150 | 28960 151 | 28961 152 | 28962 153 | 29032 154 | 29040 155 | 29134 156 | 29137 157 | 29139 158 | 29302 159 | 29418 160 | 29427 161 | 29478 162 | 29479 163 | 29518 164 | 29571 165 | 29664 166 | 29665 167 | 29702 168 | 29703 169 | 29810 170 | 29852 171 | 29853 172 | 17280 173 | 17300 174 | 17532 175 | 17609 176 | 17753 177 | 17879 178 | 17971 179 | 18161 180 | 18282 181 | 18566 182 | 18569 183 | 18583 184 | 18622 185 | 18755 186 | 18835 187 | 19042 188 | 19068 189 | 19084 190 | 19163 191 | 19239 192 | 19269 193 | 19338 194 | 19376 195 | 19805 196 | 19869 197 | 19878 198 | 20254 199 | 20311 200 | 20655 201 | 20719 202 | 20987 203 | 20989 204 | 21168 205 | 21216 206 | 21318 207 | 21361 208 | 21495 209 | 21778 210 | 21823 211 | 21898 212 | 21966 213 | 21967 214 | 21968 215 | 21985 216 | 22006 217 | 22222 218 | 22238 219 | 22324 220 | 22520 221 | 22753 222 | 22796 223 | 22865 224 | 22909 225 | 23045 226 | 23058 227 | 23125 228 | 23207 229 | 23283 230 | 23600 231 | 23612 232 | 23687 233 | 23738 234 | 23740 235 | 23911 236 | 23931 237 | 24297 238 | 24298 239 | 24299 240 | 24316 241 | 24391 242 | 24407 243 | 24448 244 | 24560 245 | 24596 246 | 24600 247 | 24651 248 | 24652 249 | 24703 250 | 24704 251 | 24732 252 | 24845 253 | 24846 254 | 24847 255 | 24911 256 | 24912 257 | 24955 258 | 25015 259 | 25016 260 | 25117 261 | 25203 262 | 25247 263 | 25316 264 | 25325 265 | 25429 266 | 25430 267 | 25459 268 | 25462 269 | 25614 270 | 25669 271 | 25672 272 | 25769 273 | 25800 274 | 25910 275 | 25948 276 | 25951 277 | 26135 278 | 26197 279 | 26296 280 | 26570 281 | 26595 282 | 26596 283 | 26651 284 | 26713 285 | 26721 286 | 26852 287 | 26895 288 | 26896 289 | 26924 290 | 26976 291 | 27021 292 | 27175 293 | 27252 294 | 27318 295 | 27320 296 | 27322 297 | 27401 298 | 27404 299 | 27484 300 | 27485 301 | 27514 302 | 27542 303 | 27616 304 | 27747 305 | 27819 306 | 27870 307 | 27959 308 | 27960 309 | 28013 310 | 28014 311 | 28157 312 | 28227 313 | 28302 314 | 28304 315 | 28308 316 | 28341 317 | 28422 318 | 28423 319 | 28424 320 | 28459 321 | 28461 322 | 28462 323 | 28463 324 | 28536 325 | 28542 326 | 28544 327 | 28598 328 | 28599 329 | 28600 330 | 28602 331 | 28604 332 | 28609 333 | 28902 334 | 28910 335 | 28914 336 | 28957 337 | 28963 338 | 29030 339 | 29031 340 | 29037 341 | 29039 342 | 29136 343 | 29158 344 | 29236 345 | 29237 346 | 29304 347 | 29345 348 | 29406 349 | 29426 350 | 29451 351 | 29452 352 | 29521 353 | 29522 354 | 29595 355 | 29668 356 | 29704 357 | 29754 358 | 29813 359 | 29854 360 | 17110 361 | 17111 362 | 17119 363 | 17147 364 | 17303 365 | 17391 366 | 17430 367 | 17457 368 | 17518 369 | 17578 370 | 17662 371 | 17663 372 | 17758 373 | 17803 374 | 17934 375 | 17945 376 | 18016 377 | 18067 378 | 18087 379 | 18368 380 | 18413 381 | 18623 382 | 18688 383 | 18815 384 | 18882 385 | 18922 386 | 18958 387 | 19222 388 | 19454 389 | 19538 390 | 19870 391 | 19908 392 | 19920 393 | 19941 394 | 19994 395 | 20046 396 | 20127 397 | 20170 398 | 20237 399 | 20373 400 | 20436 401 | 20714 402 | 20736 403 | 20749 404 | 20857 405 | 20927 406 | 21298 407 | 21399 408 | 21701 409 | 21731 410 | 21965 411 | 21983 412 | 21993 413 | 22019 414 | 22071 415 | 22082 416 | 22171 417 | 22239 418 | 22325 419 | 22327 420 | 22328 421 | 22463 422 | 22521 423 | 22536 424 | 22584 425 | 22687 426 | 22751 427 | 22754 428 | 22797 429 | 22828 430 | 22864 431 | 22907 432 | 22908 433 | 23126 434 | 23127 435 | 23154 436 | 23263 437 | 23264 438 | 23463 439 | 23464 440 | 23493 441 | 23516 442 | 23532 443 | 23827 444 | 23855 445 | 23857 446 | 23912 447 | 23932 448 | 23933 449 | 23935 450 | 23982 451 | 23984 452 | 23985 453 | 24083 454 | 24084 455 | 24141 456 | 24275 457 | 24390 458 | 24449 459 | 24529 460 | 24561 461 | 24653 462 | 24701 463 | 24702 464 | 24733 465 | 24777 466 | 24839 467 | 24841 468 | 24842 469 | 24848 470 | 24850 471 | 24851 472 | 24890 473 | 24892 474 | 24893 475 | 24914 476 | 24915 477 | 25017 478 | 25040 479 | 25053 480 | 25118 481 | 25121 482 | 25201 483 | 25229 484 | 25230 485 | 25246 486 | 25578 487 | 25583 488 | 25612 489 | 25613 490 | 25615 491 | 25670 492 | 25671 493 | 25682 494 | 25908 495 | 25950 496 | 25976 497 | 25977 498 | 26004 499 | 26063 500 | 26064 501 | 26080 502 | 26134 503 | 26294 504 | 26342 505 | 26460 506 | 26675 507 | 26712 508 | 26754 509 | 26755 510 | 26773 511 | 26829 512 | 26830 513 | 26897 514 | 26925 515 | 26945 516 | 26973 517 | 26975 518 | 27024 519 | 27078 520 | 27131 521 | 27174 522 | 27250 523 | 27251 524 | 27321 525 | 27323 526 | 27363 527 | 27487 528 | 27543 529 | 27586 530 | 27587 531 | 27588 532 | 27614 533 | 27617 534 | 27752 535 | 27753 536 | 27777 537 | 27781 538 | 27782 539 | 27818 540 | 27839 541 | 27840 542 | 27866 543 | 27872 544 | 27893 545 | 27894 546 | 27943 547 | 28016 548 | 28082 549 | 28123 550 | 28124 551 | 28125 552 | 28152 553 | 28153 554 | 28156 555 | 28201 556 | 28208 557 | 28305 558 | 28329 559 | 28331 560 | 28334 561 | 28395 562 | 28416 563 | 28417 564 | 28456 565 | 28537 566 | 28543 567 | 28595 568 | 28605 569 | 28606 570 | 28688 571 | 28690 572 | 28800 573 | 28911 574 | 28912 575 | 28917 576 | 28955 577 | 28959 578 | 29028 579 | 29034 580 | 29035 581 | 29133 582 | 29160 583 | 29162 584 | 29193 585 | 29195 586 | 29196 587 | 29238 588 | 29239 589 | 29241 590 | 29343 591 | 29344 592 | 29405 593 | 29409 594 | 29412 595 | 29422 596 | 29428 597 | 29453 598 | 29519 599 | 29523 600 | 29524 601 | 29568 602 | 29569 603 | 29593 604 | 29594 605 | 29597 606 | 29663 607 | 29666 608 | 29753 609 | 29755 610 | 29779 611 | 29855 612 | 17139 613 | 17146 614 | 17150 615 | 17192 616 | 17398 617 | 17399 618 | 17460 619 | 17501 620 | 17811 621 | 17896 622 | 17898 623 | 17902 624 | 17912 625 | 17933 626 | 17963 627 | 17992 628 | 18032 629 | 18141 630 | 18263 631 | 18473 632 | 18485 633 | 18930 634 | 19022 635 | 19030 636 | 19069 637 | 19180 638 | 19193 639 | 19321 640 | 19960 641 | 20093 642 | 20120 643 | 20274 644 | 20422 645 | 20428 646 | 20474 647 | 20560 648 | 20584 649 | 20597 650 | 21066 651 | 21165 652 | 21429 653 | 21732 654 | 21740 655 | 21777 656 | 21909 657 | 22072 658 | 22172 659 | 22468 660 | 22533 661 | 22535 662 | 22585 663 | 22586 664 | 22643 665 | 22862 666 | 22970 667 | 23153 668 | 23205 669 | 23494 670 | 23583 671 | 23614 672 | 23854 673 | 23856 674 | 24081 675 | 24236 676 | 24389 677 | 24406 678 | 24530 679 | 24650 680 | 24776 681 | 24849 682 | 24954 683 | 25204 684 | 25227 685 | 25228 686 | 25460 687 | 25576 688 | 25577 689 | 25771 690 | 25801 691 | 25909 692 | 25952 693 | 25978 694 | 26006 695 | 26062 696 | 26133 697 | 26173 698 | 26175 699 | 26188 700 | 26213 701 | 26215 702 | 26403 703 | 26463 704 | 26571 705 | 26597 706 | 26676 707 | 26714 708 | 26720 709 | 26774 710 | 26828 711 | 26832 712 | 26850 713 | 26898 714 | 26923 715 | 26942 716 | 26943 717 | 26944 718 | 26971 719 | 26972 720 | 26974 721 | 27018 722 | 27022 723 | 27023 724 | 27047 725 | 27084 726 | 27085 727 | 27128 728 | 27129 729 | 27234 730 | 27317 731 | 27319 732 | 27402 733 | 27511 734 | 27513 735 | 27515 736 | 27751 737 | 27754 738 | 27776 739 | 27949 740 | 27961 741 | 28012 742 | 28043 743 | 28044 744 | 28083 745 | 28122 746 | 28202 747 | 28301 748 | 28337 749 | 28420 750 | 28454 751 | 28538 752 | 28540 753 | 28541 754 | 28596 755 | 28684 756 | 28797 757 | 28798 758 | 28901 759 | 28915 760 | 28956 761 | 29029 762 | 29033 763 | 29135 764 | 29138 765 | 29157 766 | 29159 767 | 29161 768 | 29191 769 | 29192 770 | 29240 771 | 29301 772 | 29346 773 | 29347 774 | 29408 775 | 29410 776 | 29429 777 | 29449 778 | 29450 779 | 29456 780 | 29570 781 | 29662 782 | 29667 783 | 29701 784 | 29758 785 | 29777 786 | 29814 787 | 16748 788 | 17108 789 | 17116 790 | 17254 791 | 17431 792 | 17447 793 | 17534 794 | 17610 795 | 17655 796 | 17695 797 | 17711 798 | 17874 799 | 18181 800 | 18272 801 | 18375 802 | 18498 803 | 19251 804 | 19357 805 | 19745 806 | 19751 807 | 19781 808 | 20080 809 | 20273 810 | 20348 811 | 20609 812 | 20763 813 | 20985 814 | 21077 815 | 21116 816 | 21133 817 | 21248 818 | 21466 819 | 22018 820 | 22020 821 | 22081 822 | 22368 823 | 22371 824 | 22642 825 | 22685 826 | 22686 827 | 22852 828 | 22863 829 | 22955 830 | 22971 831 | 23006 832 | 23043 833 | 23044 834 | 23262 835 | 23318 836 | 23409 837 | 23531 838 | 23667 839 | 23668 840 | 23686 841 | 23828 842 | 23830 843 | 23983 844 | 24082 845 | 24140 846 | 24181 847 | 24274 848 | 24597 849 | 24598 850 | 24778 851 | 24838 852 | 25052 853 | 25233 854 | 25431 855 | 25437 856 | 25461 857 | 25616 858 | 25748 859 | 25911 860 | 25912 861 | 25913 862 | 25914 863 | 25949 864 | 26005 865 | 26172 866 | 26214 867 | 26255 868 | 26598 869 | 26753 870 | 26831 871 | 26851 872 | 27049 873 | 27079 874 | 27130 875 | 27233 876 | 27316 877 | 27541 878 | 27778 879 | 27779 880 | 27867 881 | 27871 882 | 28017 883 | 28045 884 | 28204 885 | 28226 886 | 28327 887 | 28330 888 | 28333 889 | 28335 890 | 28339 891 | 28342 892 | 28396 893 | 28421 894 | 28535 895 | 28545 896 | 28601 897 | 28687 898 | 28801 899 | 29038 900 | 29194 901 | 29303 902 | 29305 903 | 29306 904 | 29407 905 | 29419 906 | 29424 907 | 29455 908 | 29481 909 | 29482 910 | 29598 911 | 29756 912 | 29757 913 | 29780 914 | 17118 915 | 17145 916 | 17239 917 | 17341 918 | 17384 919 | 17757 920 | 17886 921 | 18004 922 | 18068 923 | 18080 924 | 18088 925 | 18273 926 | 18274 927 | 18283 928 | 18307 929 | 18396 930 | 18501 931 | 19061 932 | 19121 933 | 19530 934 | 19621 935 | 19673 936 | 19744 937 | 19752 938 | 19836 939 | 20005 940 | 20107 941 | 20272 942 | 20486 943 | 20619 944 | 20684 945 | 20864 946 | 20994 947 | 21199 948 | 21378 949 | 21733 950 | 21741 951 | 21910 952 | 22050 953 | 22051 954 | 22174 955 | 22250 956 | 22462 957 | 22522 958 | 22534 959 | 22613 960 | 22752 961 | 22957 962 | 23001 963 | 23075 964 | 23076 965 | 23206 966 | 23228 967 | 23229 968 | 23281 969 | 23282 970 | 23356 971 | 23517 972 | 23720 973 | 23934 974 | 24182 975 | 24559 976 | 24731 977 | 25099 978 | 25205 979 | 25428 980 | 25579 981 | 25582 982 | 25611 983 | 25947 984 | 26026 985 | 26171 986 | 26400 987 | 26401 988 | 26461 989 | 27365 990 | 27946 991 | 28019 992 | 28228 993 | 28603 994 | 29036 995 | 29425 996 | 29454 997 | 29520 998 | 29596 999 | 29669 1000 | 29778 1001 | 29811 1002 | 29812 1003 | 17109 1004 | 17117 1005 | 17144 1006 | 18136 1007 | 18376 1008 | 18414 1009 | 18480 1010 | 18733 1011 | 19418 1012 | 19837 1013 | 19880 1014 | 19979 1015 | 20185 1016 | 21146 1017 | 22249 1018 | 22631 1019 | 22795 1020 | 22956 1021 | 24528 1022 | 25770 1023 | 26256 1024 | 27048 1025 | 27486 1026 | 28338 1027 | 28608 1028 | -------------------------------------------------------------------------------- /data/scaledata/Dennis+Schwartz/label.3class.Dennis+Schwartz: -------------------------------------------------------------------------------- 1 | 0 2 | 0 3 | 0 4 | 0 5 | 0 6 | 0 7 | 0 8 | 0 9 | 0 10 | 0 11 | 0 12 | 0 13 | 0 14 | 0 15 | 0 16 | 0 17 | 0 18 | 0 19 | 0 20 | 0 21 | 0 22 | 0 23 | 0 24 | 0 25 | 0 26 | 0 27 | 0 28 | 0 29 | 0 30 | 0 31 | 0 32 | 0 33 | 0 34 | 0 35 | 0 36 | 0 37 | 0 38 | 0 39 | 0 40 | 0 41 | 0 42 | 0 43 | 0 44 | 0 45 | 0 46 | 0 47 | 0 48 | 0 49 | 0 50 | 0 51 | 0 52 | 0 53 | 0 54 | 0 55 | 0 56 | 0 57 | 0 58 | 0 59 | 0 60 | 0 61 | 0 62 | 0 63 | 0 64 | 0 65 | 0 66 | 0 67 | 0 68 | 0 69 | 0 70 | 0 71 | 0 72 | 0 73 | 0 74 | 0 75 | 0 76 | 0 77 | 0 78 | 0 79 | 0 80 | 0 81 | 0 82 | 0 83 | 0 84 | 0 85 | 0 86 | 0 87 | 0 88 | 0 89 | 0 90 | 0 91 | 0 92 | 0 93 | 0 94 | 0 95 | 0 96 | 0 97 | 0 98 | 0 99 | 0 100 | 0 101 | 0 102 | 0 103 | 0 104 | 0 105 | 0 106 | 0 107 | 0 108 | 0 109 | 0 110 | 0 111 | 0 112 | 0 113 | 0 114 | 0 115 | 0 116 | 0 117 | 0 118 | 0 119 | 0 120 | 0 121 | 0 122 | 0 123 | 0 124 | 0 125 | 0 126 | 0 127 | 0 128 | 0 129 | 0 130 | 0 131 | 0 132 | 0 133 | 0 134 | 0 135 | 0 136 | 0 137 | 0 138 | 0 139 | 0 140 | 0 141 | 0 142 | 0 143 | 0 144 | 0 145 | 0 146 | 0 147 | 0 148 | 0 149 | 0 150 | 0 151 | 0 152 | 0 153 | 0 154 | 0 155 | 0 156 | 0 157 | 0 158 | 0 159 | 0 160 | 0 161 | 0 162 | 0 163 | 0 164 | 0 165 | 0 166 | 0 167 | 0 168 | 0 169 | 0 170 | 0 171 | 0 172 | 0 173 | 0 174 | 0 175 | 0 176 | 0 177 | 0 178 | 0 179 | 0 180 | 0 181 | 0 182 | 0 183 | 0 184 | 0 185 | 0 186 | 0 187 | 0 188 | 0 189 | 0 190 | 0 191 | 0 192 | 0 193 | 0 194 | 0 195 | 0 196 | 0 197 | 0 198 | 0 199 | 0 200 | 0 201 | 0 202 | 0 203 | 0 204 | 0 205 | 0 206 | 0 207 | 0 208 | 0 209 | 0 210 | 0 211 | 0 212 | 0 213 | 0 214 | 0 215 | 0 216 | 0 217 | 0 218 | 0 219 | 0 220 | 0 221 | 0 222 | 0 223 | 0 224 | 0 225 | 0 226 | 0 227 | 0 228 | 0 229 | 0 230 | 0 231 | 0 232 | 0 233 | 0 234 | 0 235 | 0 236 | 0 237 | 0 238 | 0 239 | 0 240 | 0 241 | 0 242 | 0 243 | 0 244 | 0 245 | 0 246 | 0 247 | 0 248 | 0 249 | 0 250 | 0 251 | 0 252 | 0 253 | 0 254 | 0 255 | 0 256 | 0 257 | 0 258 | 0 259 | 0 260 | 0 261 | 0 262 | 0 263 | 0 264 | 0 265 | 0 266 | 0 267 | 0 268 | 0 269 | 0 270 | 0 271 | 0 272 | 0 273 | 0 274 | 0 275 | 0 276 | 0 277 | 0 278 | 0 279 | 0 280 | 0 281 | 0 282 | 0 283 | 0 284 | 0 285 | 0 286 | 0 287 | 0 288 | 0 289 | 0 290 | 0 291 | 0 292 | 0 293 | 0 294 | 0 295 | 0 296 | 0 297 | 0 298 | 0 299 | 0 300 | 0 301 | 0 302 | 0 303 | 0 304 | 0 305 | 0 306 | 0 307 | 0 308 | 0 309 | 0 310 | 0 311 | 0 312 | 0 313 | 0 314 | 0 315 | 0 316 | 0 317 | 0 318 | 0 319 | 0 320 | 0 321 | 0 322 | 0 323 | 0 324 | 0 325 | 0 326 | 0 327 | 0 328 | 0 329 | 0 330 | 0 331 | 0 332 | 0 333 | 0 334 | 0 335 | 0 336 | 0 337 | 0 338 | 0 339 | 0 340 | 0 341 | 0 342 | 0 343 | 0 344 | 0 345 | 0 346 | 0 347 | 0 348 | 0 349 | 0 350 | 0 351 | 0 352 | 0 353 | 0 354 | 0 355 | 0 356 | 0 357 | 0 358 | 0 359 | 0 360 | 1 361 | 1 362 | 1 363 | 1 364 | 1 365 | 1 366 | 1 367 | 1 368 | 1 369 | 1 370 | 1 371 | 1 372 | 1 373 | 1 374 | 1 375 | 1 376 | 1 377 | 1 378 | 1 379 | 1 380 | 1 381 | 1 382 | 1 383 | 1 384 | 1 385 | 1 386 | 1 387 | 1 388 | 1 389 | 1 390 | 1 391 | 1 392 | 1 393 | 1 394 | 1 395 | 1 396 | 1 397 | 1 398 | 1 399 | 1 400 | 1 401 | 1 402 | 1 403 | 1 404 | 1 405 | 1 406 | 1 407 | 1 408 | 1 409 | 1 410 | 1 411 | 1 412 | 1 413 | 1 414 | 1 415 | 1 416 | 1 417 | 1 418 | 1 419 | 1 420 | 1 421 | 1 422 | 1 423 | 1 424 | 1 425 | 1 426 | 1 427 | 1 428 | 1 429 | 1 430 | 1 431 | 1 432 | 1 433 | 1 434 | 1 435 | 1 436 | 1 437 | 1 438 | 1 439 | 1 440 | 1 441 | 1 442 | 1 443 | 1 444 | 1 445 | 1 446 | 1 447 | 1 448 | 1 449 | 1 450 | 1 451 | 1 452 | 1 453 | 1 454 | 1 455 | 1 456 | 1 457 | 1 458 | 1 459 | 1 460 | 1 461 | 1 462 | 1 463 | 1 464 | 1 465 | 1 466 | 1 467 | 1 468 | 1 469 | 1 470 | 1 471 | 1 472 | 1 473 | 1 474 | 1 475 | 1 476 | 1 477 | 1 478 | 1 479 | 1 480 | 1 481 | 1 482 | 1 483 | 1 484 | 1 485 | 1 486 | 1 487 | 1 488 | 1 489 | 1 490 | 1 491 | 1 492 | 1 493 | 1 494 | 1 495 | 1 496 | 1 497 | 1 498 | 1 499 | 1 500 | 1 501 | 1 502 | 1 503 | 1 504 | 1 505 | 1 506 | 1 507 | 1 508 | 1 509 | 1 510 | 1 511 | 1 512 | 1 513 | 1 514 | 1 515 | 1 516 | 1 517 | 1 518 | 1 519 | 1 520 | 1 521 | 1 522 | 1 523 | 1 524 | 1 525 | 1 526 | 1 527 | 1 528 | 1 529 | 1 530 | 1 531 | 1 532 | 1 533 | 1 534 | 1 535 | 1 536 | 1 537 | 1 538 | 1 539 | 1 540 | 1 541 | 1 542 | 1 543 | 1 544 | 1 545 | 1 546 | 1 547 | 1 548 | 1 549 | 1 550 | 1 551 | 1 552 | 1 553 | 1 554 | 1 555 | 1 556 | 1 557 | 1 558 | 1 559 | 1 560 | 1 561 | 1 562 | 1 563 | 1 564 | 1 565 | 1 566 | 1 567 | 1 568 | 1 569 | 1 570 | 1 571 | 1 572 | 1 573 | 1 574 | 1 575 | 1 576 | 1 577 | 1 578 | 1 579 | 1 580 | 1 581 | 1 582 | 1 583 | 1 584 | 1 585 | 1 586 | 1 587 | 1 588 | 1 589 | 1 590 | 1 591 | 1 592 | 1 593 | 1 594 | 1 595 | 1 596 | 1 597 | 1 598 | 1 599 | 1 600 | 1 601 | 1 602 | 1 603 | 1 604 | 1 605 | 1 606 | 1 607 | 1 608 | 1 609 | 1 610 | 1 611 | 1 612 | 1 613 | 1 614 | 1 615 | 1 616 | 1 617 | 1 618 | 1 619 | 1 620 | 1 621 | 1 622 | 1 623 | 1 624 | 1 625 | 1 626 | 1 627 | 1 628 | 1 629 | 1 630 | 1 631 | 1 632 | 1 633 | 1 634 | 1 635 | 1 636 | 1 637 | 1 638 | 1 639 | 1 640 | 1 641 | 1 642 | 1 643 | 1 644 | 1 645 | 1 646 | 1 647 | 1 648 | 1 649 | 1 650 | 1 651 | 1 652 | 1 653 | 1 654 | 1 655 | 1 656 | 1 657 | 1 658 | 1 659 | 1 660 | 1 661 | 1 662 | 1 663 | 1 664 | 1 665 | 1 666 | 1 667 | 1 668 | 1 669 | 1 670 | 1 671 | 1 672 | 1 673 | 1 674 | 1 675 | 1 676 | 1 677 | 1 678 | 1 679 | 1 680 | 1 681 | 1 682 | 1 683 | 1 684 | 1 685 | 1 686 | 1 687 | 1 688 | 1 689 | 1 690 | 1 691 | 1 692 | 1 693 | 1 694 | 1 695 | 1 696 | 1 697 | 1 698 | 1 699 | 1 700 | 1 701 | 1 702 | 1 703 | 1 704 | 1 705 | 1 706 | 1 707 | 1 708 | 1 709 | 1 710 | 1 711 | 1 712 | 1 713 | 1 714 | 1 715 | 1 716 | 1 717 | 1 718 | 1 719 | 1 720 | 1 721 | 1 722 | 1 723 | 1 724 | 1 725 | 1 726 | 1 727 | 1 728 | 1 729 | 1 730 | 1 731 | 1 732 | 1 733 | 1 734 | 1 735 | 1 736 | 1 737 | 1 738 | 1 739 | 1 740 | 1 741 | 1 742 | 1 743 | 1 744 | 1 745 | 1 746 | 1 747 | 1 748 | 1 749 | 1 750 | 1 751 | 1 752 | 1 753 | 1 754 | 1 755 | 1 756 | 1 757 | 1 758 | 1 759 | 1 760 | 1 761 | 1 762 | 1 763 | 1 764 | 1 765 | 1 766 | 1 767 | 1 768 | 1 769 | 1 770 | 1 771 | 1 772 | 1 773 | 1 774 | 1 775 | 1 776 | 1 777 | 1 778 | 1 779 | 1 780 | 1 781 | 1 782 | 1 783 | 1 784 | 1 785 | 1 786 | 1 787 | 2 788 | 2 789 | 2 790 | 2 791 | 2 792 | 2 793 | 2 794 | 2 795 | 2 796 | 2 797 | 2 798 | 2 799 | 2 800 | 2 801 | 2 802 | 2 803 | 2 804 | 2 805 | 2 806 | 2 807 | 2 808 | 2 809 | 2 810 | 2 811 | 2 812 | 2 813 | 2 814 | 2 815 | 2 816 | 2 817 | 2 818 | 2 819 | 2 820 | 2 821 | 2 822 | 2 823 | 2 824 | 2 825 | 2 826 | 2 827 | 2 828 | 2 829 | 2 830 | 2 831 | 2 832 | 2 833 | 2 834 | 2 835 | 2 836 | 2 837 | 2 838 | 2 839 | 2 840 | 2 841 | 2 842 | 2 843 | 2 844 | 2 845 | 2 846 | 2 847 | 2 848 | 2 849 | 2 850 | 2 851 | 2 852 | 2 853 | 2 854 | 2 855 | 2 856 | 2 857 | 2 858 | 2 859 | 2 860 | 2 861 | 2 862 | 2 863 | 2 864 | 2 865 | 2 866 | 2 867 | 2 868 | 2 869 | 2 870 | 2 871 | 2 872 | 2 873 | 2 874 | 2 875 | 2 876 | 2 877 | 2 878 | 2 879 | 2 880 | 2 881 | 2 882 | 2 883 | 2 884 | 2 885 | 2 886 | 2 887 | 2 888 | 2 889 | 2 890 | 2 891 | 2 892 | 2 893 | 2 894 | 2 895 | 2 896 | 2 897 | 2 898 | 2 899 | 2 900 | 2 901 | 2 902 | 2 903 | 2 904 | 2 905 | 2 906 | 2 907 | 2 908 | 2 909 | 2 910 | 2 911 | 2 912 | 2 913 | 2 914 | 2 915 | 2 916 | 2 917 | 2 918 | 2 919 | 2 920 | 2 921 | 2 922 | 2 923 | 2 924 | 2 925 | 2 926 | 2 927 | 2 928 | 2 929 | 2 930 | 2 931 | 2 932 | 2 933 | 2 934 | 2 935 | 2 936 | 2 937 | 2 938 | 2 939 | 2 940 | 2 941 | 2 942 | 2 943 | 2 944 | 2 945 | 2 946 | 2 947 | 2 948 | 2 949 | 2 950 | 2 951 | 2 952 | 2 953 | 2 954 | 2 955 | 2 956 | 2 957 | 2 958 | 2 959 | 2 960 | 2 961 | 2 962 | 2 963 | 2 964 | 2 965 | 2 966 | 2 967 | 2 968 | 2 969 | 2 970 | 2 971 | 2 972 | 2 973 | 2 974 | 2 975 | 2 976 | 2 977 | 2 978 | 2 979 | 2 980 | 2 981 | 2 982 | 2 983 | 2 984 | 2 985 | 2 986 | 2 987 | 2 988 | 2 989 | 2 990 | 2 991 | 2 992 | 2 993 | 2 994 | 2 995 | 2 996 | 2 997 | 2 998 | 2 999 | 2 1000 | 2 1001 | 2 1002 | 2 1003 | 2 1004 | 2 1005 | 2 1006 | 2 1007 | 2 1008 | 2 1009 | 2 1010 | 2 1011 | 2 1012 | 2 1013 | 2 1014 | 2 1015 | 2 1016 | 2 1017 | 2 1018 | 2 1019 | 2 1020 | 2 1021 | 2 1022 | 2 1023 | 2 1024 | 2 1025 | 2 1026 | 2 1027 | 2 1028 | -------------------------------------------------------------------------------- /data/scaledata/Dennis+Schwartz/label.4class.Dennis+Schwartz: -------------------------------------------------------------------------------- 1 | 0 2 | 0 3 | 0 4 | 0 5 | 0 6 | 0 7 | 0 8 | 0 9 | 0 10 | 0 11 | 0 12 | 0 13 | 0 14 | 0 15 | 0 16 | 0 17 | 0 18 | 0 19 | 0 20 | 0 21 | 0 22 | 0 23 | 0 24 | 0 25 | 0 26 | 0 27 | 0 28 | 0 29 | 0 30 | 0 31 | 0 32 | 0 33 | 0 34 | 0 35 | 0 36 | 0 37 | 0 38 | 0 39 | 0 40 | 0 41 | 0 42 | 0 43 | 0 44 | 0 45 | 0 46 | 0 47 | 0 48 | 0 49 | 0 50 | 0 51 | 0 52 | 0 53 | 0 54 | 0 55 | 0 56 | 0 57 | 0 58 | 0 59 | 0 60 | 0 61 | 0 62 | 0 63 | 0 64 | 0 65 | 0 66 | 0 67 | 0 68 | 0 69 | 0 70 | 0 71 | 0 72 | 0 73 | 0 74 | 0 75 | 0 76 | 0 77 | 0 78 | 0 79 | 0 80 | 0 81 | 0 82 | 0 83 | 0 84 | 0 85 | 0 86 | 0 87 | 0 88 | 0 89 | 0 90 | 0 91 | 0 92 | 0 93 | 0 94 | 0 95 | 0 96 | 0 97 | 0 98 | 0 99 | 0 100 | 0 101 | 0 102 | 0 103 | 0 104 | 0 105 | 0 106 | 0 107 | 0 108 | 0 109 | 0 110 | 0 111 | 0 112 | 0 113 | 0 114 | 0 115 | 0 116 | 0 117 | 0 118 | 0 119 | 0 120 | 0 121 | 0 122 | 0 123 | 0 124 | 0 125 | 0 126 | 0 127 | 0 128 | 0 129 | 0 130 | 0 131 | 0 132 | 0 133 | 0 134 | 0 135 | 0 136 | 0 137 | 0 138 | 0 139 | 0 140 | 0 141 | 0 142 | 0 143 | 0 144 | 0 145 | 0 146 | 0 147 | 0 148 | 0 149 | 0 150 | 0 151 | 0 152 | 0 153 | 0 154 | 0 155 | 0 156 | 0 157 | 0 158 | 0 159 | 0 160 | 0 161 | 0 162 | 0 163 | 0 164 | 0 165 | 0 166 | 0 167 | 0 168 | 0 169 | 0 170 | 0 171 | 0 172 | 1 173 | 1 174 | 1 175 | 1 176 | 1 177 | 1 178 | 1 179 | 1 180 | 1 181 | 1 182 | 1 183 | 1 184 | 1 185 | 1 186 | 1 187 | 1 188 | 1 189 | 1 190 | 1 191 | 1 192 | 1 193 | 1 194 | 1 195 | 1 196 | 1 197 | 1 198 | 1 199 | 1 200 | 1 201 | 1 202 | 1 203 | 1 204 | 1 205 | 1 206 | 1 207 | 1 208 | 1 209 | 1 210 | 1 211 | 1 212 | 1 213 | 1 214 | 1 215 | 1 216 | 1 217 | 1 218 | 1 219 | 1 220 | 1 221 | 1 222 | 1 223 | 1 224 | 1 225 | 1 226 | 1 227 | 1 228 | 1 229 | 1 230 | 1 231 | 1 232 | 1 233 | 1 234 | 1 235 | 1 236 | 1 237 | 1 238 | 1 239 | 1 240 | 1 241 | 1 242 | 1 243 | 1 244 | 1 245 | 1 246 | 1 247 | 1 248 | 1 249 | 1 250 | 1 251 | 1 252 | 1 253 | 1 254 | 1 255 | 1 256 | 1 257 | 1 258 | 1 259 | 1 260 | 1 261 | 1 262 | 1 263 | 1 264 | 1 265 | 1 266 | 1 267 | 1 268 | 1 269 | 1 270 | 1 271 | 1 272 | 1 273 | 1 274 | 1 275 | 1 276 | 1 277 | 1 278 | 1 279 | 1 280 | 1 281 | 1 282 | 1 283 | 1 284 | 1 285 | 1 286 | 1 287 | 1 288 | 1 289 | 1 290 | 1 291 | 1 292 | 1 293 | 1 294 | 1 295 | 1 296 | 1 297 | 1 298 | 1 299 | 1 300 | 1 301 | 1 302 | 1 303 | 1 304 | 1 305 | 1 306 | 1 307 | 1 308 | 1 309 | 1 310 | 1 311 | 1 312 | 1 313 | 1 314 | 1 315 | 1 316 | 1 317 | 1 318 | 1 319 | 1 320 | 1 321 | 1 322 | 1 323 | 1 324 | 1 325 | 1 326 | 1 327 | 1 328 | 1 329 | 1 330 | 1 331 | 1 332 | 1 333 | 1 334 | 1 335 | 1 336 | 1 337 | 1 338 | 1 339 | 1 340 | 1 341 | 1 342 | 1 343 | 1 344 | 1 345 | 1 346 | 1 347 | 1 348 | 1 349 | 1 350 | 1 351 | 1 352 | 1 353 | 1 354 | 1 355 | 1 356 | 1 357 | 1 358 | 1 359 | 1 360 | 1 361 | 1 362 | 1 363 | 1 364 | 1 365 | 1 366 | 1 367 | 1 368 | 1 369 | 1 370 | 1 371 | 1 372 | 1 373 | 1 374 | 1 375 | 1 376 | 1 377 | 1 378 | 1 379 | 1 380 | 1 381 | 1 382 | 1 383 | 1 384 | 1 385 | 1 386 | 1 387 | 1 388 | 1 389 | 1 390 | 1 391 | 1 392 | 1 393 | 1 394 | 1 395 | 1 396 | 1 397 | 1 398 | 1 399 | 1 400 | 1 401 | 1 402 | 1 403 | 1 404 | 1 405 | 1 406 | 1 407 | 1 408 | 1 409 | 1 410 | 1 411 | 1 412 | 1 413 | 1 414 | 1 415 | 1 416 | 1 417 | 1 418 | 1 419 | 1 420 | 1 421 | 1 422 | 1 423 | 1 424 | 1 425 | 1 426 | 1 427 | 1 428 | 1 429 | 1 430 | 1 431 | 1 432 | 1 433 | 1 434 | 1 435 | 1 436 | 1 437 | 1 438 | 1 439 | 1 440 | 1 441 | 1 442 | 1 443 | 1 444 | 1 445 | 1 446 | 1 447 | 1 448 | 1 449 | 1 450 | 1 451 | 1 452 | 1 453 | 1 454 | 1 455 | 1 456 | 1 457 | 1 458 | 1 459 | 1 460 | 1 461 | 1 462 | 1 463 | 1 464 | 1 465 | 1 466 | 1 467 | 1 468 | 1 469 | 1 470 | 1 471 | 1 472 | 1 473 | 1 474 | 1 475 | 1 476 | 1 477 | 1 478 | 1 479 | 1 480 | 1 481 | 1 482 | 1 483 | 1 484 | 1 485 | 1 486 | 1 487 | 1 488 | 1 489 | 1 490 | 1 491 | 1 492 | 1 493 | 1 494 | 1 495 | 1 496 | 1 497 | 1 498 | 1 499 | 1 500 | 1 501 | 1 502 | 1 503 | 1 504 | 1 505 | 1 506 | 1 507 | 1 508 | 1 509 | 1 510 | 1 511 | 1 512 | 1 513 | 1 514 | 1 515 | 1 516 | 1 517 | 1 518 | 1 519 | 1 520 | 1 521 | 1 522 | 1 523 | 1 524 | 1 525 | 1 526 | 1 527 | 1 528 | 1 529 | 1 530 | 1 531 | 1 532 | 1 533 | 1 534 | 1 535 | 1 536 | 1 537 | 1 538 | 1 539 | 1 540 | 1 541 | 1 542 | 1 543 | 1 544 | 1 545 | 1 546 | 1 547 | 1 548 | 1 549 | 1 550 | 1 551 | 1 552 | 1 553 | 1 554 | 1 555 | 1 556 | 1 557 | 1 558 | 1 559 | 1 560 | 1 561 | 1 562 | 1 563 | 1 564 | 1 565 | 1 566 | 1 567 | 1 568 | 1 569 | 1 570 | 1 571 | 1 572 | 1 573 | 1 574 | 1 575 | 1 576 | 1 577 | 1 578 | 1 579 | 1 580 | 1 581 | 1 582 | 1 583 | 1 584 | 1 585 | 1 586 | 1 587 | 1 588 | 1 589 | 1 590 | 1 591 | 1 592 | 1 593 | 1 594 | 1 595 | 1 596 | 1 597 | 1 598 | 1 599 | 1 600 | 1 601 | 1 602 | 1 603 | 1 604 | 1 605 | 1 606 | 1 607 | 1 608 | 1 609 | 1 610 | 1 611 | 1 612 | 2 613 | 2 614 | 2 615 | 2 616 | 2 617 | 2 618 | 2 619 | 2 620 | 2 621 | 2 622 | 2 623 | 2 624 | 2 625 | 2 626 | 2 627 | 2 628 | 2 629 | 2 630 | 2 631 | 2 632 | 2 633 | 2 634 | 2 635 | 2 636 | 2 637 | 2 638 | 2 639 | 2 640 | 2 641 | 2 642 | 2 643 | 2 644 | 2 645 | 2 646 | 2 647 | 2 648 | 2 649 | 2 650 | 2 651 | 2 652 | 2 653 | 2 654 | 2 655 | 2 656 | 2 657 | 2 658 | 2 659 | 2 660 | 2 661 | 2 662 | 2 663 | 2 664 | 2 665 | 2 666 | 2 667 | 2 668 | 2 669 | 2 670 | 2 671 | 2 672 | 2 673 | 2 674 | 2 675 | 2 676 | 2 677 | 2 678 | 2 679 | 2 680 | 2 681 | 2 682 | 2 683 | 2 684 | 2 685 | 2 686 | 2 687 | 2 688 | 2 689 | 2 690 | 2 691 | 2 692 | 2 693 | 2 694 | 2 695 | 2 696 | 2 697 | 2 698 | 2 699 | 2 700 | 2 701 | 2 702 | 2 703 | 2 704 | 2 705 | 2 706 | 2 707 | 2 708 | 2 709 | 2 710 | 2 711 | 2 712 | 2 713 | 2 714 | 2 715 | 2 716 | 2 717 | 2 718 | 2 719 | 2 720 | 2 721 | 2 722 | 2 723 | 2 724 | 2 725 | 2 726 | 2 727 | 2 728 | 2 729 | 2 730 | 2 731 | 2 732 | 2 733 | 2 734 | 2 735 | 2 736 | 2 737 | 2 738 | 2 739 | 2 740 | 2 741 | 2 742 | 2 743 | 2 744 | 2 745 | 2 746 | 2 747 | 2 748 | 2 749 | 2 750 | 2 751 | 2 752 | 2 753 | 2 754 | 2 755 | 2 756 | 2 757 | 2 758 | 2 759 | 2 760 | 2 761 | 2 762 | 2 763 | 2 764 | 2 765 | 2 766 | 2 767 | 2 768 | 2 769 | 2 770 | 2 771 | 2 772 | 2 773 | 2 774 | 2 775 | 2 776 | 2 777 | 2 778 | 2 779 | 2 780 | 2 781 | 2 782 | 2 783 | 2 784 | 2 785 | 2 786 | 2 787 | 2 788 | 2 789 | 2 790 | 2 791 | 2 792 | 2 793 | 2 794 | 2 795 | 2 796 | 2 797 | 2 798 | 2 799 | 2 800 | 2 801 | 2 802 | 2 803 | 2 804 | 2 805 | 2 806 | 2 807 | 2 808 | 2 809 | 2 810 | 2 811 | 2 812 | 2 813 | 2 814 | 2 815 | 2 816 | 2 817 | 2 818 | 2 819 | 2 820 | 2 821 | 2 822 | 2 823 | 2 824 | 2 825 | 2 826 | 2 827 | 2 828 | 2 829 | 2 830 | 2 831 | 2 832 | 2 833 | 2 834 | 2 835 | 2 836 | 2 837 | 2 838 | 2 839 | 2 840 | 2 841 | 2 842 | 2 843 | 2 844 | 2 845 | 2 846 | 2 847 | 2 848 | 2 849 | 2 850 | 2 851 | 2 852 | 2 853 | 2 854 | 2 855 | 2 856 | 2 857 | 2 858 | 2 859 | 2 860 | 2 861 | 2 862 | 2 863 | 2 864 | 2 865 | 2 866 | 2 867 | 2 868 | 2 869 | 2 870 | 2 871 | 2 872 | 2 873 | 2 874 | 2 875 | 2 876 | 2 877 | 2 878 | 2 879 | 2 880 | 2 881 | 2 882 | 2 883 | 2 884 | 2 885 | 2 886 | 2 887 | 2 888 | 2 889 | 2 890 | 2 891 | 2 892 | 2 893 | 2 894 | 2 895 | 2 896 | 2 897 | 2 898 | 2 899 | 2 900 | 2 901 | 2 902 | 2 903 | 2 904 | 2 905 | 2 906 | 2 907 | 2 908 | 2 909 | 2 910 | 2 911 | 2 912 | 2 913 | 2 914 | 3 915 | 3 916 | 3 917 | 3 918 | 3 919 | 3 920 | 3 921 | 3 922 | 3 923 | 3 924 | 3 925 | 3 926 | 3 927 | 3 928 | 3 929 | 3 930 | 3 931 | 3 932 | 3 933 | 3 934 | 3 935 | 3 936 | 3 937 | 3 938 | 3 939 | 3 940 | 3 941 | 3 942 | 3 943 | 3 944 | 3 945 | 3 946 | 3 947 | 3 948 | 3 949 | 3 950 | 3 951 | 3 952 | 3 953 | 3 954 | 3 955 | 3 956 | 3 957 | 3 958 | 3 959 | 3 960 | 3 961 | 3 962 | 3 963 | 3 964 | 3 965 | 3 966 | 3 967 | 3 968 | 3 969 | 3 970 | 3 971 | 3 972 | 3 973 | 3 974 | 3 975 | 3 976 | 3 977 | 3 978 | 3 979 | 3 980 | 3 981 | 3 982 | 3 983 | 3 984 | 3 985 | 3 986 | 3 987 | 3 988 | 3 989 | 3 990 | 3 991 | 3 992 | 3 993 | 3 994 | 3 995 | 3 996 | 3 997 | 3 998 | 3 999 | 3 1000 | 3 1001 | 3 1002 | 3 1003 | 3 1004 | 3 1005 | 3 1006 | 3 1007 | 3 1008 | 3 1009 | 3 1010 | 3 1011 | 3 1012 | 3 1013 | 3 1014 | 3 1015 | 3 1016 | 3 1017 | 3 1018 | 3 1019 | 3 1020 | 3 1021 | 3 1022 | 3 1023 | 3 1024 | 3 1025 | 3 1026 | 3 1027 | 3 1028 | -------------------------------------------------------------------------------- /data/scaledata/Dennis+Schwartz/rating.Dennis+Schwartz: -------------------------------------------------------------------------------- 1 | 0.1 2 | 0.2 3 | 0.2 4 | 0.2 5 | 0.2 6 | 0.2 7 | 0.2 8 | 0.2 9 | 0.2 10 | 0.2 11 | 0.2 12 | 0.2 13 | 0.2 14 | 0.2 15 | 0.2 16 | 0.2 17 | 0.2 18 | 0.2 19 | 0.2 20 | 0.2 21 | 0.2 22 | 0.2 23 | 0.2 24 | 0.2 25 | 0.2 26 | 0.2 27 | 0.2 28 | 0.2 29 | 0.2 30 | 0.2 31 | 0.2 32 | 0.2 33 | 0.2 34 | 0.2 35 | 0.3 36 | 0.3 37 | 0.3 38 | 0.3 39 | 0.3 40 | 0.3 41 | 0.3 42 | 0.3 43 | 0.3 44 | 0.3 45 | 0.3 46 | 0.3 47 | 0.3 48 | 0.3 49 | 0.3 50 | 0.3 51 | 0.3 52 | 0.3 53 | 0.3 54 | 0.3 55 | 0.3 56 | 0.3 57 | 0.3 58 | 0.3 59 | 0.3 60 | 0.3 61 | 0.3 62 | 0.3 63 | 0.3 64 | 0.3 65 | 0.3 66 | 0.3 67 | 0.3 68 | 0.3 69 | 0.3 70 | 0.3 71 | 0.3 72 | 0.3 73 | 0.3 74 | 0.3 75 | 0.3 76 | 0.3 77 | 0.3 78 | 0.3 79 | 0.3 80 | 0.3 81 | 0.3 82 | 0.3 83 | 0.3 84 | 0.3 85 | 0.3 86 | 0.3 87 | 0.3 88 | 0.3 89 | 0.3 90 | 0.3 91 | 0.3 92 | 0.3 93 | 0.3 94 | 0.3 95 | 0.3 96 | 0.3 97 | 0.3 98 | 0.3 99 | 0.3 100 | 0.3 101 | 0.3 102 | 0.3 103 | 0.3 104 | 0.3 105 | 0.3 106 | 0.3 107 | 0.3 108 | 0.3 109 | 0.3 110 | 0.3 111 | 0.3 112 | 0.3 113 | 0.3 114 | 0.3 115 | 0.3 116 | 0.3 117 | 0.3 118 | 0.3 119 | 0.3 120 | 0.3 121 | 0.3 122 | 0.3 123 | 0.3 124 | 0.3 125 | 0.3 126 | 0.3 127 | 0.3 128 | 0.3 129 | 0.3 130 | 0.3 131 | 0.3 132 | 0.3 133 | 0.3 134 | 0.3 135 | 0.3 136 | 0.3 137 | 0.3 138 | 0.3 139 | 0.3 140 | 0.3 141 | 0.3 142 | 0.3 143 | 0.3 144 | 0.3 145 | 0.3 146 | 0.3 147 | 0.3 148 | 0.3 149 | 0.3 150 | 0.3 151 | 0.3 152 | 0.3 153 | 0.3 154 | 0.3 155 | 0.3 156 | 0.3 157 | 0.3 158 | 0.3 159 | 0.3 160 | 0.3 161 | 0.3 162 | 0.3 163 | 0.3 164 | 0.3 165 | 0.3 166 | 0.3 167 | 0.3 168 | 0.3 169 | 0.3 170 | 0.3 171 | 0.3 172 | 0.4 173 | 0.4 174 | 0.4 175 | 0.4 176 | 0.4 177 | 0.4 178 | 0.4 179 | 0.4 180 | 0.4 181 | 0.4 182 | 0.4 183 | 0.4 184 | 0.4 185 | 0.4 186 | 0.4 187 | 0.4 188 | 0.4 189 | 0.4 190 | 0.4 191 | 0.4 192 | 0.4 193 | 0.4 194 | 0.4 195 | 0.4 196 | 0.4 197 | 0.4 198 | 0.4 199 | 0.4 200 | 0.4 201 | 0.4 202 | 0.4 203 | 0.4 204 | 0.4 205 | 0.4 206 | 0.4 207 | 0.4 208 | 0.4 209 | 0.4 210 | 0.4 211 | 0.4 212 | 0.4 213 | 0.4 214 | 0.4 215 | 0.4 216 | 0.4 217 | 0.4 218 | 0.4 219 | 0.4 220 | 0.4 221 | 0.4 222 | 0.4 223 | 0.4 224 | 0.4 225 | 0.4 226 | 0.4 227 | 0.4 228 | 0.4 229 | 0.4 230 | 0.4 231 | 0.4 232 | 0.4 233 | 0.4 234 | 0.4 235 | 0.4 236 | 0.4 237 | 0.4 238 | 0.4 239 | 0.4 240 | 0.4 241 | 0.4 242 | 0.4 243 | 0.4 244 | 0.4 245 | 0.4 246 | 0.4 247 | 0.4 248 | 0.4 249 | 0.4 250 | 0.4 251 | 0.4 252 | 0.4 253 | 0.4 254 | 0.4 255 | 0.4 256 | 0.4 257 | 0.4 258 | 0.4 259 | 0.4 260 | 0.4 261 | 0.4 262 | 0.4 263 | 0.4 264 | 0.4 265 | 0.4 266 | 0.4 267 | 0.4 268 | 0.4 269 | 0.4 270 | 0.4 271 | 0.4 272 | 0.4 273 | 0.4 274 | 0.4 275 | 0.4 276 | 0.4 277 | 0.4 278 | 0.4 279 | 0.4 280 | 0.4 281 | 0.4 282 | 0.4 283 | 0.4 284 | 0.4 285 | 0.4 286 | 0.4 287 | 0.4 288 | 0.4 289 | 0.4 290 | 0.4 291 | 0.4 292 | 0.4 293 | 0.4 294 | 0.4 295 | 0.4 296 | 0.4 297 | 0.4 298 | 0.4 299 | 0.4 300 | 0.4 301 | 0.4 302 | 0.4 303 | 0.4 304 | 0.4 305 | 0.4 306 | 0.4 307 | 0.4 308 | 0.4 309 | 0.4 310 | 0.4 311 | 0.4 312 | 0.4 313 | 0.4 314 | 0.4 315 | 0.4 316 | 0.4 317 | 0.4 318 | 0.4 319 | 0.4 320 | 0.4 321 | 0.4 322 | 0.4 323 | 0.4 324 | 0.4 325 | 0.4 326 | 0.4 327 | 0.4 328 | 0.4 329 | 0.4 330 | 0.4 331 | 0.4 332 | 0.4 333 | 0.4 334 | 0.4 335 | 0.4 336 | 0.4 337 | 0.4 338 | 0.4 339 | 0.4 340 | 0.4 341 | 0.4 342 | 0.4 343 | 0.4 344 | 0.4 345 | 0.4 346 | 0.4 347 | 0.4 348 | 0.4 349 | 0.4 350 | 0.4 351 | 0.4 352 | 0.4 353 | 0.4 354 | 0.4 355 | 0.4 356 | 0.4 357 | 0.4 358 | 0.4 359 | 0.4 360 | 0.5 361 | 0.5 362 | 0.5 363 | 0.5 364 | 0.5 365 | 0.5 366 | 0.5 367 | 0.5 368 | 0.5 369 | 0.5 370 | 0.5 371 | 0.5 372 | 0.5 373 | 0.5 374 | 0.5 375 | 0.5 376 | 0.5 377 | 0.5 378 | 0.5 379 | 0.5 380 | 0.5 381 | 0.5 382 | 0.5 383 | 0.5 384 | 0.5 385 | 0.5 386 | 0.5 387 | 0.5 388 | 0.5 389 | 0.5 390 | 0.5 391 | 0.5 392 | 0.5 393 | 0.5 394 | 0.5 395 | 0.5 396 | 0.5 397 | 0.5 398 | 0.5 399 | 0.5 400 | 0.5 401 | 0.5 402 | 0.5 403 | 0.5 404 | 0.5 405 | 0.5 406 | 0.5 407 | 0.5 408 | 0.5 409 | 0.5 410 | 0.5 411 | 0.5 412 | 0.5 413 | 0.5 414 | 0.5 415 | 0.5 416 | 0.5 417 | 0.5 418 | 0.5 419 | 0.5 420 | 0.5 421 | 0.5 422 | 0.5 423 | 0.5 424 | 0.5 425 | 0.5 426 | 0.5 427 | 0.5 428 | 0.5 429 | 0.5 430 | 0.5 431 | 0.5 432 | 0.5 433 | 0.5 434 | 0.5 435 | 0.5 436 | 0.5 437 | 0.5 438 | 0.5 439 | 0.5 440 | 0.5 441 | 0.5 442 | 0.5 443 | 0.5 444 | 0.5 445 | 0.5 446 | 0.5 447 | 0.5 448 | 0.5 449 | 0.5 450 | 0.5 451 | 0.5 452 | 0.5 453 | 0.5 454 | 0.5 455 | 0.5 456 | 0.5 457 | 0.5 458 | 0.5 459 | 0.5 460 | 0.5 461 | 0.5 462 | 0.5 463 | 0.5 464 | 0.5 465 | 0.5 466 | 0.5 467 | 0.5 468 | 0.5 469 | 0.5 470 | 0.5 471 | 0.5 472 | 0.5 473 | 0.5 474 | 0.5 475 | 0.5 476 | 0.5 477 | 0.5 478 | 0.5 479 | 0.5 480 | 0.5 481 | 0.5 482 | 0.5 483 | 0.5 484 | 0.5 485 | 0.5 486 | 0.5 487 | 0.5 488 | 0.5 489 | 0.5 490 | 0.5 491 | 0.5 492 | 0.5 493 | 0.5 494 | 0.5 495 | 0.5 496 | 0.5 497 | 0.5 498 | 0.5 499 | 0.5 500 | 0.5 501 | 0.5 502 | 0.5 503 | 0.5 504 | 0.5 505 | 0.5 506 | 0.5 507 | 0.5 508 | 0.5 509 | 0.5 510 | 0.5 511 | 0.5 512 | 0.5 513 | 0.5 514 | 0.5 515 | 0.5 516 | 0.5 517 | 0.5 518 | 0.5 519 | 0.5 520 | 0.5 521 | 0.5 522 | 0.5 523 | 0.5 524 | 0.5 525 | 0.5 526 | 0.5 527 | 0.5 528 | 0.5 529 | 0.5 530 | 0.5 531 | 0.5 532 | 0.5 533 | 0.5 534 | 0.5 535 | 0.5 536 | 0.5 537 | 0.5 538 | 0.5 539 | 0.5 540 | 0.5 541 | 0.5 542 | 0.5 543 | 0.5 544 | 0.5 545 | 0.5 546 | 0.5 547 | 0.5 548 | 0.5 549 | 0.5 550 | 0.5 551 | 0.5 552 | 0.5 553 | 0.5 554 | 0.5 555 | 0.5 556 | 0.5 557 | 0.5 558 | 0.5 559 | 0.5 560 | 0.5 561 | 0.5 562 | 0.5 563 | 0.5 564 | 0.5 565 | 0.5 566 | 0.5 567 | 0.5 568 | 0.5 569 | 0.5 570 | 0.5 571 | 0.5 572 | 0.5 573 | 0.5 574 | 0.5 575 | 0.5 576 | 0.5 577 | 0.5 578 | 0.5 579 | 0.5 580 | 0.5 581 | 0.5 582 | 0.5 583 | 0.5 584 | 0.5 585 | 0.5 586 | 0.5 587 | 0.5 588 | 0.5 589 | 0.5 590 | 0.5 591 | 0.5 592 | 0.5 593 | 0.5 594 | 0.5 595 | 0.5 596 | 0.5 597 | 0.5 598 | 0.5 599 | 0.5 600 | 0.5 601 | 0.5 602 | 0.5 603 | 0.5 604 | 0.5 605 | 0.5 606 | 0.5 607 | 0.5 608 | 0.5 609 | 0.5 610 | 0.5 611 | 0.5 612 | 0.6 613 | 0.6 614 | 0.6 615 | 0.6 616 | 0.6 617 | 0.6 618 | 0.6 619 | 0.6 620 | 0.6 621 | 0.6 622 | 0.6 623 | 0.6 624 | 0.6 625 | 0.6 626 | 0.6 627 | 0.6 628 | 0.6 629 | 0.6 630 | 0.6 631 | 0.6 632 | 0.6 633 | 0.6 634 | 0.6 635 | 0.6 636 | 0.6 637 | 0.6 638 | 0.6 639 | 0.6 640 | 0.6 641 | 0.6 642 | 0.6 643 | 0.6 644 | 0.6 645 | 0.6 646 | 0.6 647 | 0.6 648 | 0.6 649 | 0.6 650 | 0.6 651 | 0.6 652 | 0.6 653 | 0.6 654 | 0.6 655 | 0.6 656 | 0.6 657 | 0.6 658 | 0.6 659 | 0.6 660 | 0.6 661 | 0.6 662 | 0.6 663 | 0.6 664 | 0.6 665 | 0.6 666 | 0.6 667 | 0.6 668 | 0.6 669 | 0.6 670 | 0.6 671 | 0.6 672 | 0.6 673 | 0.6 674 | 0.6 675 | 0.6 676 | 0.6 677 | 0.6 678 | 0.6 679 | 0.6 680 | 0.6 681 | 0.6 682 | 0.6 683 | 0.6 684 | 0.6 685 | 0.6 686 | 0.6 687 | 0.6 688 | 0.6 689 | 0.6 690 | 0.6 691 | 0.6 692 | 0.6 693 | 0.6 694 | 0.6 695 | 0.6 696 | 0.6 697 | 0.6 698 | 0.6 699 | 0.6 700 | 0.6 701 | 0.6 702 | 0.6 703 | 0.6 704 | 0.6 705 | 0.6 706 | 0.6 707 | 0.6 708 | 0.6 709 | 0.6 710 | 0.6 711 | 0.6 712 | 0.6 713 | 0.6 714 | 0.6 715 | 0.6 716 | 0.6 717 | 0.6 718 | 0.6 719 | 0.6 720 | 0.6 721 | 0.6 722 | 0.6 723 | 0.6 724 | 0.6 725 | 0.6 726 | 0.6 727 | 0.6 728 | 0.6 729 | 0.6 730 | 0.6 731 | 0.6 732 | 0.6 733 | 0.6 734 | 0.6 735 | 0.6 736 | 0.6 737 | 0.6 738 | 0.6 739 | 0.6 740 | 0.6 741 | 0.6 742 | 0.6 743 | 0.6 744 | 0.6 745 | 0.6 746 | 0.6 747 | 0.6 748 | 0.6 749 | 0.6 750 | 0.6 751 | 0.6 752 | 0.6 753 | 0.6 754 | 0.6 755 | 0.6 756 | 0.6 757 | 0.6 758 | 0.6 759 | 0.6 760 | 0.6 761 | 0.6 762 | 0.6 763 | 0.6 764 | 0.6 765 | 0.6 766 | 0.6 767 | 0.6 768 | 0.6 769 | 0.6 770 | 0.6 771 | 0.6 772 | 0.6 773 | 0.6 774 | 0.6 775 | 0.6 776 | 0.6 777 | 0.6 778 | 0.6 779 | 0.6 780 | 0.6 781 | 0.6 782 | 0.6 783 | 0.6 784 | 0.6 785 | 0.6 786 | 0.6 787 | 0.7 788 | 0.7 789 | 0.7 790 | 0.7 791 | 0.7 792 | 0.7 793 | 0.7 794 | 0.7 795 | 0.7 796 | 0.7 797 | 0.7 798 | 0.7 799 | 0.7 800 | 0.7 801 | 0.7 802 | 0.7 803 | 0.7 804 | 0.7 805 | 0.7 806 | 0.7 807 | 0.7 808 | 0.7 809 | 0.7 810 | 0.7 811 | 0.7 812 | 0.7 813 | 0.7 814 | 0.7 815 | 0.7 816 | 0.7 817 | 0.7 818 | 0.7 819 | 0.7 820 | 0.7 821 | 0.7 822 | 0.7 823 | 0.7 824 | 0.7 825 | 0.7 826 | 0.7 827 | 0.7 828 | 0.7 829 | 0.7 830 | 0.7 831 | 0.7 832 | 0.7 833 | 0.7 834 | 0.7 835 | 0.7 836 | 0.7 837 | 0.7 838 | 0.7 839 | 0.7 840 | 0.7 841 | 0.7 842 | 0.7 843 | 0.7 844 | 0.7 845 | 0.7 846 | 0.7 847 | 0.7 848 | 0.7 849 | 0.7 850 | 0.7 851 | 0.7 852 | 0.7 853 | 0.7 854 | 0.7 855 | 0.7 856 | 0.7 857 | 0.7 858 | 0.7 859 | 0.7 860 | 0.7 861 | 0.7 862 | 0.7 863 | 0.7 864 | 0.7 865 | 0.7 866 | 0.7 867 | 0.7 868 | 0.7 869 | 0.7 870 | 0.7 871 | 0.7 872 | 0.7 873 | 0.7 874 | 0.7 875 | 0.7 876 | 0.7 877 | 0.7 878 | 0.7 879 | 0.7 880 | 0.7 881 | 0.7 882 | 0.7 883 | 0.7 884 | 0.7 885 | 0.7 886 | 0.7 887 | 0.7 888 | 0.7 889 | 0.7 890 | 0.7 891 | 0.7 892 | 0.7 893 | 0.7 894 | 0.7 895 | 0.7 896 | 0.7 897 | 0.7 898 | 0.7 899 | 0.7 900 | 0.7 901 | 0.7 902 | 0.7 903 | 0.7 904 | 0.7 905 | 0.7 906 | 0.7 907 | 0.7 908 | 0.7 909 | 0.7 910 | 0.7 911 | 0.7 912 | 0.7 913 | 0.7 914 | 0.8 915 | 0.8 916 | 0.8 917 | 0.8 918 | 0.8 919 | 0.8 920 | 0.8 921 | 0.8 922 | 0.8 923 | 0.8 924 | 0.8 925 | 0.8 926 | 0.8 927 | 0.8 928 | 0.8 929 | 0.8 930 | 0.8 931 | 0.8 932 | 0.8 933 | 0.8 934 | 0.8 935 | 0.8 936 | 0.8 937 | 0.8 938 | 0.8 939 | 0.8 940 | 0.8 941 | 0.8 942 | 0.8 943 | 0.8 944 | 0.8 945 | 0.8 946 | 0.8 947 | 0.8 948 | 0.8 949 | 0.8 950 | 0.8 951 | 0.8 952 | 0.8 953 | 0.8 954 | 0.8 955 | 0.8 956 | 0.8 957 | 0.8 958 | 0.8 959 | 0.8 960 | 0.8 961 | 0.8 962 | 0.8 963 | 0.8 964 | 0.8 965 | 0.8 966 | 0.8 967 | 0.8 968 | 0.8 969 | 0.8 970 | 0.8 971 | 0.8 972 | 0.8 973 | 0.8 974 | 0.8 975 | 0.8 976 | 0.8 977 | 0.8 978 | 0.8 979 | 0.8 980 | 0.8 981 | 0.8 982 | 0.8 983 | 0.8 984 | 0.8 985 | 0.8 986 | 0.8 987 | 0.8 988 | 0.8 989 | 0.8 990 | 0.8 991 | 0.8 992 | 0.8 993 | 0.8 994 | 0.8 995 | 0.8 996 | 0.8 997 | 0.8 998 | 0.8 999 | 0.8 1000 | 0.8 1001 | 0.8 1002 | 0.8 1003 | 0.9 1004 | 0.9 1005 | 0.9 1006 | 0.9 1007 | 0.9 1008 | 0.9 1009 | 0.9 1010 | 0.9 1011 | 0.9 1012 | 0.9 1013 | 0.9 1014 | 0.9 1015 | 0.9 1016 | 0.9 1017 | 0.9 1018 | 0.9 1019 | 0.9 1020 | 0.9 1021 | 0.9 1022 | 0.9 1023 | 0.9 1024 | 0.9 1025 | 0.9 1026 | 0.9 1027 | 0.9 1028 | -------------------------------------------------------------------------------- /data/scaledata/James+Berardinelli/id.James+Berardinelli: -------------------------------------------------------------------------------- 1 | 2321 2 | 3337 3 | 6511 4 | 6912 5 | 2240 6 | 4073 7 | 2120 8 | 3313 9 | 4899 10 | 5752 11 | 9080 12 | 9596 13 | 2175 14 | 4337 15 | 2577 16 | 4042 17 | 5268 18 | 6271 19 | 6841 20 | 3130 21 | 2081 22 | 3169 23 | 3838 24 | 2727 25 | 4755 26 | 4903 27 | 5182 28 | 5520 29 | 5667 30 | 6229 31 | 7383 32 | 8726 33 | 8792 34 | 9903 35 | 2101 36 | 2530 37 | 4550 38 | 5037 39 | 5841 40 | 6141 41 | 6273 42 | 6711 43 | 6905 44 | 7107 45 | 7563 46 | 8407 47 | 2090 48 | 4135 49 | 2219 50 | 2033 51 | 2117 52 | 3237 53 | 3332 54 | 2177 55 | 2490 56 | 3926 57 | 4263 58 | 10247 59 | 10533 60 | 10859 61 | 11382 62 | 12765 63 | 1860 64 | 1911 65 | 1916 66 | 1990 67 | 2187 68 | 2909 69 | 4260 70 | 4468 71 | 4682 72 | 4902 73 | 4918 74 | 5304 75 | 5339 76 | 5403 77 | 5704 78 | 5842 79 | 5894 80 | 6017 81 | 6356 82 | 6442 83 | 6747 84 | 6842 85 | 7232 86 | 7442 87 | 7764 88 | 9034 89 | 9306 90 | 9492 91 | 2058 92 | 2252 93 | 2278 94 | 2525 95 | 4324 96 | 2086 97 | 2625 98 | 2094 99 | 2122 100 | 2804 101 | 3107 102 | 4161 103 | 2059 104 | 2169 105 | 2791 106 | 4036 107 | 2462 108 | 3032 109 | 4010 110 | 4540 111 | 4820 112 | 4884 113 | 5890 114 | 6018 115 | 6057 116 | 6127 117 | 7309 118 | 7560 119 | 7839 120 | 8629 121 | 8985 122 | 9281 123 | 2290 124 | 2391 125 | 2949 126 | 3102 127 | 3673 128 | 2045 129 | 2274 130 | 2386 131 | 2875 132 | 3161 133 | 3214 134 | 3996 135 | 2026 136 | 2308 137 | 2552 138 | 3990 139 | 10091 140 | 10363 141 | 10613 142 | 11028 143 | 11233 144 | 11557 145 | 11816 146 | 11945 147 | 11951 148 | 12089 149 | 12222 150 | 12224 151 | 12427 152 | 12547 153 | 12727 154 | 1864 155 | 1867 156 | 1930 157 | 1934 158 | 1991 159 | 2008 160 | 2141 161 | 2365 162 | 2642 163 | 2774 164 | 2854 165 | 2985 166 | 3472 167 | 3587 168 | 4844 169 | 4877 170 | 4975 171 | 4994 172 | 5238 173 | 5319 174 | 5720 175 | 5888 176 | 5941 177 | 5946 178 | 6278 179 | 6590 180 | 7229 181 | 7273 182 | 7724 183 | 7933 184 | 7935 185 | 8032 186 | 8632 187 | 2025 188 | 2181 189 | 2236 190 | 3330 191 | 3696 192 | 4120 193 | 2171 194 | 3408 195 | 3658 196 | 2476 197 | 2515 198 | 4265 199 | 2036 200 | 2099 201 | 2174 202 | 2338 203 | 2416 204 | 2496 205 | 2582 206 | 2694 207 | 3253 208 | 3776 209 | 2848 210 | 3591 211 | 4586 212 | 4612 213 | 4771 214 | 4823 215 | 4901 216 | 5031 217 | 5074 218 | 6528 219 | 6784 220 | 7128 221 | 7334 222 | 8113 223 | 8723 224 | 8724 225 | 9396 226 | 2051 227 | 2529 228 | 3391 229 | 2221 230 | 2289 231 | 2456 232 | 2517 233 | 2578 234 | 3026 235 | 3079 236 | 2046 237 | 2157 238 | 2685 239 | 3158 240 | 3462 241 | 2337 242 | 2410 243 | 2975 244 | 3148 245 | 3952 246 | 4430 247 | 10056 248 | 10142 249 | 10212 250 | 10283 251 | 10576 252 | 10696 253 | 10707 254 | 10824 255 | 10857 256 | 10919 257 | 11023 258 | 11066 259 | 11159 260 | 11230 261 | 11232 262 | 11475 263 | 11539 264 | 11747 265 | 12057 266 | 12328 267 | 12428 268 | 12429 269 | 12549 270 | 12598 271 | 12729 272 | 12896 273 | 12928 274 | 1865 275 | 1866 276 | 1929 277 | 1932 278 | 1937 279 | 1943 280 | 1967 281 | 1968 282 | 1974 283 | 1981 284 | 1994 285 | 2006 286 | 2085 287 | 2108 288 | 2262 289 | 2681 290 | 3699 291 | 3895 292 | 3989 293 | 4457 294 | 4694 295 | 4896 296 | 5188 297 | 5307 298 | 5405 299 | 5680 300 | 5879 301 | 5889 302 | 6153 303 | 6178 304 | 6397 305 | 6710 306 | 6950 307 | 7012 308 | 7063 309 | 7191 310 | 7384 311 | 7785 312 | 8341 313 | 8627 314 | 8628 315 | 9165 316 | 9713 317 | 9779 318 | 9805 319 | 2303 320 | 2547 321 | 2620 322 | 2736 323 | 3358 324 | 3586 325 | 3785 326 | 4143 327 | 4162 328 | 4193 329 | 4342 330 | 2116 331 | 2256 332 | 2379 333 | 2568 334 | 2795 335 | 3302 336 | 3503 337 | 2158 338 | 2185 339 | 2437 340 | 4553 341 | 2356 342 | 2671 343 | 3388 344 | 3970 345 | 4012 346 | 4396 347 | 2079 348 | 2229 349 | 2417 350 | 2508 351 | 2623 352 | 3612 353 | 3940 354 | 4465 355 | 4714 356 | 4824 357 | 5028 358 | 5193 359 | 5194 360 | 5537 361 | 5548 362 | 5554 363 | 5574 364 | 5678 365 | 5699 366 | 5802 367 | 5887 368 | 5895 369 | 5973 370 | 6154 371 | 6155 372 | 6218 373 | 6441 374 | 6906 375 | 6951 376 | 7013 377 | 7234 378 | 7267 379 | 7307 380 | 7346 381 | 7492 382 | 7525 383 | 8419 384 | 8790 385 | 9054 386 | 9164 387 | 2292 388 | 2430 389 | 2581 390 | 2907 391 | 3269 392 | 3618 393 | 4165 394 | 4453 395 | 4460 396 | 2143 397 | 2455 398 | 2698 399 | 2880 400 | 3023 401 | 3281 402 | 3711 403 | 3861 404 | 3919 405 | 4163 406 | 2089 407 | 2182 408 | 2319 409 | 2428 410 | 2605 411 | 2845 412 | 2993 413 | 3121 414 | 3290 415 | 3707 416 | 4064 417 | 4119 418 | 4225 419 | 4434 420 | 2098 421 | 2244 422 | 2394 423 | 2489 424 | 2828 425 | 3182 426 | 3609 427 | 3649 428 | 3701 429 | 3862 430 | 3988 431 | 10432 432 | 10858 433 | 10923 434 | 11380 435 | 11383 436 | 11477 437 | 11587 438 | 11808 439 | 11874 440 | 11986 441 | 12149 442 | 12150 443 | 12175 444 | 12346 445 | 12358 446 | 12548 447 | 12648 448 | 12700 449 | 12793 450 | 13022 451 | 1889 452 | 1891 453 | 1908 454 | 1910 455 | 1912 456 | 1921 457 | 1925 458 | 1961 459 | 1975 460 | 1984 461 | 1985 462 | 2007 463 | 2009 464 | 2080 465 | 2497 466 | 2522 467 | 2540 468 | 2682 469 | 2941 470 | 3029 471 | 3098 472 | 3137 473 | 3208 474 | 3304 475 | 3629 476 | 3786 477 | 4084 478 | 4286 479 | 4369 480 | 4464 481 | 4589 482 | 4738 483 | 4754 484 | 4767 485 | 4768 486 | 4860 487 | 4948 488 | 5072 489 | 5073 490 | 5113 491 | 5134 492 | 5233 493 | 5240 494 | 5324 495 | 5551 496 | 5553 497 | 5697 498 | 5750 499 | 5799 500 | 5840 501 | 5943 502 | 5945 503 | 6030 504 | 6056 505 | 6058 506 | 6101 507 | 6102 508 | 6156 509 | 6200 510 | 6276 511 | 6357 512 | 6358 513 | 6484 514 | 6485 515 | 6529 516 | 6555 517 | 6592 518 | 6706 519 | 6746 520 | 7035 521 | 7095 522 | 7126 523 | 7192 524 | 7194 525 | 7516 526 | 7588 527 | 7597 528 | 7762 529 | 7811 530 | 7838 531 | 7858 532 | 7959 533 | 8226 534 | 8227 535 | 8263 536 | 8342 537 | 8433 538 | 8434 539 | 8497 540 | 8729 541 | 9035 542 | 9099 543 | 9130 544 | 9440 545 | 9493 546 | 9575 547 | 9833 548 | 9901 549 | 2095 550 | 2267 551 | 2374 552 | 2481 553 | 2608 554 | 2781 555 | 2919 556 | 3217 557 | 3639 558 | 4022 559 | 4247 560 | 4376 561 | 2031 562 | 2179 563 | 2748 564 | 2812 565 | 3176 566 | 3336 567 | 4149 568 | 2035 569 | 2121 570 | 2233 571 | 2378 572 | 2624 573 | 2788 574 | 3147 575 | 3293 576 | 3582 577 | 3619 578 | 3632 579 | 3813 580 | 3908 581 | 4187 582 | 4372 583 | 4554 584 | 2315 585 | 2405 586 | 2894 587 | 3489 588 | 3751 589 | 4237 590 | 2266 591 | 2562 592 | 2667 593 | 3272 594 | 3323 595 | 3660 596 | 3822 597 | 4198 598 | 4375 599 | 4769 600 | 4822 601 | 4839 602 | 4875 603 | 4898 604 | 5172 605 | 5185 606 | 5195 607 | 5305 608 | 5426 609 | 5437 610 | 5528 611 | 5651 612 | 5843 613 | 6015 614 | 6016 615 | 6114 616 | 6123 617 | 6124 618 | 6131 619 | 6230 620 | 6486 621 | 6783 622 | 6907 623 | 7046 624 | 7169 625 | 7231 626 | 7344 627 | 7444 628 | 7482 629 | 7518 630 | 7598 631 | 7679 632 | 7763 633 | 8264 634 | 8420 635 | 8498 636 | 9296 637 | 9397 638 | 2173 639 | 2457 640 | 2468 641 | 2565 642 | 2725 643 | 2905 644 | 2973 645 | 3068 646 | 3521 647 | 3836 648 | 3941 649 | 4049 650 | 2183 651 | 2650 652 | 2678 653 | 3204 654 | 3695 655 | 3816 656 | 3969 657 | 4074 658 | 4125 659 | 2172 660 | 2296 661 | 2896 662 | 3246 663 | 3432 664 | 3698 665 | 3703 666 | 4285 667 | 2128 668 | 2176 669 | 2402 670 | 2449 671 | 2543 672 | 2761 673 | 2978 674 | 3089 675 | 3346 676 | 3526 677 | 4299 678 | 10019 679 | 10094 680 | 10143 681 | 10175 682 | 10211 683 | 10219 684 | 10228 685 | 10248 686 | 10326 687 | 10364 688 | 10365 689 | 10433 690 | 10451 691 | 10534 692 | 10615 693 | 10825 694 | 10912 695 | 10979 696 | 11022 697 | 11067 698 | 11134 699 | 11135 700 | 11305 701 | 11381 702 | 11427 703 | 11474 704 | 11476 705 | 11586 706 | 11641 707 | 11674 708 | 11746 709 | 11946 710 | 12033 711 | 12056 712 | 12221 713 | 12223 714 | 12263 715 | 12264 716 | 12357 717 | 12487 718 | 12489 719 | 12491 720 | 12728 721 | 12791 722 | 12792 723 | 12897 724 | 12948 725 | 12949 726 | 12995 727 | 1858 728 | 1944 729 | 1945 730 | 1979 731 | 2005 732 | 2091 733 | 2159 734 | 2178 735 | 2268 736 | 2370 737 | 2564 738 | 2566 739 | 2977 740 | 2979 741 | 3129 742 | 3261 743 | 3294 744 | 3342 745 | 3433 746 | 3625 747 | 3844 748 | 3879 749 | 4167 750 | 4194 751 | 4292 752 | 4576 753 | 4617 754 | 4628 755 | 4634 756 | 4687 757 | 4710 758 | 4742 759 | 4766 760 | 4995 761 | 5002 762 | 5156 763 | 5171 764 | 5191 765 | 5237 766 | 5239 767 | 5241 768 | 5256 769 | 5269 770 | 5341 771 | 5513 772 | 5516 773 | 5715 774 | 5717 775 | 5801 776 | 5844 777 | 5944 778 | 5974 779 | 6099 780 | 6103 781 | 6130 782 | 6172 783 | 6272 784 | 6274 785 | 6307 786 | 6327 787 | 6444 788 | 6556 789 | 6652 790 | 6785 791 | 6786 792 | 6952 793 | 7064 794 | 7065 795 | 7109 796 | 7125 797 | 7173 798 | 7268 799 | 7333 800 | 7414 801 | 7494 802 | 7519 803 | 7557 804 | 7583 805 | 7587 806 | 7590 807 | 7677 808 | 7960 809 | 7961 810 | 8224 811 | 8225 812 | 8408 813 | 8492 814 | 8631 815 | 8791 816 | 8855 817 | 8941 818 | 9036 819 | 9037 820 | 9079 821 | 9100 822 | 9205 823 | 9247 824 | 9264 825 | 9441 826 | 9442 827 | 9576 828 | 9654 829 | 9718 830 | 9750 831 | 9778 832 | 9852 833 | 9900 834 | 9902 835 | 2062 836 | 2180 837 | 2218 838 | 2255 839 | 2493 840 | 2541 841 | 3361 842 | 3611 843 | 4122 844 | 2392 845 | 2418 846 | 2558 847 | 2754 848 | 2986 849 | 3037 850 | 3110 851 | 3111 852 | 3451 853 | 3896 854 | 2110 855 | 2471 856 | 2691 857 | 3297 858 | 3395 859 | 3527 860 | 3641 861 | 3667 862 | 4043 863 | 4238 864 | 4296 865 | 4398 866 | 2043 867 | 2087 868 | 2447 869 | 2653 870 | 2758 871 | 2775 872 | 3055 873 | 3096 874 | 3627 875 | 3954 876 | 4011 877 | 4588 878 | 2408 879 | 2570 880 | 3319 881 | 3442 882 | 3628 883 | 4635 884 | 4684 885 | 4695 886 | 4874 887 | 4876 888 | 4878 889 | 4879 890 | 4886 891 | 4895 892 | 4897 893 | 4956 894 | 5024 895 | 5038 896 | 5050 897 | 5075 898 | 5118 899 | 5220 900 | 5242 901 | 5254 902 | 5255 903 | 5308 904 | 5344 905 | 5409 906 | 5438 907 | 5635 908 | 5657 909 | 5756 910 | 5800 911 | 6032 912 | 6037 913 | 6059 914 | 6097 915 | 6136 916 | 6140 917 | 6142 918 | 6159 919 | 6198 920 | 6277 921 | 6290 922 | 6335 923 | 6398 924 | 6513 925 | 6540 926 | 6546 927 | 6593 928 | 6818 929 | 6908 930 | 6953 931 | 7029 932 | 7081 933 | 7097 934 | 7127 935 | 7230 936 | 7279 937 | 7280 938 | 7415 939 | 7443 940 | 7495 941 | 7517 942 | 7523 943 | 7524 944 | 7526 945 | 7561 946 | 7562 947 | 7564 948 | 7565 949 | 7600 950 | 7675 951 | 7680 952 | 7786 953 | 7934 954 | 8031 955 | 8046 956 | 8047 957 | 8626 958 | 8725 959 | 9204 960 | 9386 961 | 9388 962 | 2279 963 | 2484 964 | 2656 965 | 2737 966 | 2916 967 | 3163 968 | 3603 969 | 4085 970 | 4109 971 | 4281 972 | 4529 973 | 2055 974 | 2220 975 | 2258 976 | 2435 977 | 2466 978 | 2549 979 | 2756 980 | 2807 981 | 2872 982 | 3050 983 | 3077 984 | 3216 985 | 3244 986 | 3457 987 | 3630 988 | 4056 989 | 2299 990 | 2463 991 | 2550 992 | 2627 993 | 2739 994 | 2924 995 | 3152 996 | 3309 997 | 3372 998 | 3837 999 | 3971 1000 | 4024 1001 | 4047 1002 | 4358 1003 | 4451 1004 | 2076 1005 | 2254 1006 | 2316 1007 | 2498 1008 | 2551 1009 | 2811 1010 | 3017 1011 | 3085 1012 | 3167 1013 | 3284 1014 | 3363 1015 | 3702 1016 | 3742 1017 | 3752 1018 | 3831 1019 | 3916 1020 | 3953 1021 | 4113 1022 | 4153 1023 | 4199 1024 | 4420 1025 | 4435 1026 | 4466 1027 | 10115 1028 | 10116 1029 | 10328 1030 | 10732 1031 | 11024 1032 | 11027 1033 | 11231 1034 | 11540 1035 | 11875 1036 | 11987 1037 | 12327 1038 | 12345 1039 | 12347 1040 | 12488 1041 | 12699 1042 | 12757 1043 | 12950 1044 | 1859 1045 | 1928 1046 | 1998 1047 | 2532 1048 | 2553 1049 | 2669 1050 | 2777 1051 | 3141 1052 | 3171 1053 | 3207 1054 | 3352 1055 | 3460 1056 | 3528 1057 | 3697 1058 | 3835 1059 | 3917 1060 | 4141 1061 | 4233 1062 | 4633 1063 | 4685 1064 | 4686 1065 | 4713 1066 | 4846 1067 | 4917 1068 | 4934 1069 | 4989 1070 | 5025 1071 | 5099 1072 | 5121 1073 | 5176 1074 | 5177 1075 | 5186 1076 | 5192 1077 | 5236 1078 | 5323 1079 | 5402 1080 | 5510 1081 | 5645 1082 | 5716 1083 | 5940 1084 | 5942 1085 | 5975 1086 | 6033 1087 | 6080 1088 | 6082 1089 | 6098 1090 | 6121 1091 | 6129 1092 | 6158 1093 | 6179 1094 | 6197 1095 | 6228 1096 | 6326 1097 | 6380 1098 | 6469 1099 | 6471 1100 | 6512 1101 | 6707 1102 | 6709 1103 | 6792 1104 | 6984 1105 | 7011 1106 | 7015 1107 | 7047 1108 | 7193 1109 | 7345 1110 | 7483 1111 | 7520 1112 | 7521 1113 | 7559 1114 | 7584 1115 | 7585 1116 | 7586 1117 | 7599 1118 | 7857 1119 | 7881 1120 | 8280 1121 | 8340 1122 | 8630 1123 | 8984 1124 | 8986 1125 | 8988 1126 | 9081 1127 | 9129 1128 | 9248 1129 | 9280 1130 | 9283 1131 | 9542 1132 | 9653 1133 | 9834 1134 | 9837 1135 | 9853 1136 | 2576 1137 | 2702 1138 | 2755 1139 | 2847 1140 | 3516 1141 | 3635 1142 | 4431 1143 | 4518 1144 | 2186 1145 | 2407 1146 | 2573 1147 | 2718 1148 | 3010 1149 | 3060 1150 | 3317 1151 | 3613 1152 | 3768 1153 | 4083 1154 | 4336 1155 | 4511 1156 | 2115 1157 | 2548 1158 | 3610 1159 | 3646 1160 | 3886 1161 | 2067 1162 | 2151 1163 | 2349 1164 | 2720 1165 | 2821 1166 | 3034 1167 | 3114 1168 | 3334 1169 | 3634 1170 | 3644 1171 | 3955 1172 | 4097 1173 | 4524 1174 | 2030 1175 | 2263 1176 | 2421 1177 | 2439 1178 | 2563 1179 | 2888 1180 | 3224 1181 | 3393 1182 | 3645 1183 | 4542 1184 | 4632 1185 | 4683 1186 | 4996 1187 | 5173 1188 | 5234 1189 | 5235 1190 | 5265 1191 | 5435 1192 | 5550 1193 | 5597 1194 | 5598 1195 | 5714 1196 | 5817 1197 | 5955 1198 | 6031 1199 | 6275 1200 | 6325 1201 | 6395 1202 | 6440 1203 | 6443 1204 | 6514 1205 | 6708 1206 | 7014 1207 | 7096 1208 | 7142 1209 | 7269 1210 | 7522 1211 | 7558 1212 | 7624 1213 | 7678 1214 | 7765 1215 | 8987 1216 | 9190 1217 | 2113 1218 | 2140 1219 | 2474 1220 | 2661 1221 | 3093 1222 | 3704 1223 | 3956 1224 | 4262 1225 | 4314 1226 | 2205 1227 | 2966 1228 | 3598 1229 | 3897 1230 | 4142 1231 | 2434 1232 | 2626 1233 | 2772 1234 | 2859 1235 | 3097 1236 | 3446 1237 | 3633 1238 | 3671 1239 | 4168 1240 | 4335 1241 | 2184 1242 | 2475 1243 | 2794 1244 | 3221 1245 | 3311 1246 | 3918 1247 | 10020 1248 | 10090 1249 | 10176 1250 | 11025 1251 | 11306 1252 | 11772 1253 | 12090 1254 | 12220 1255 | 1917 1256 | 1976 1257 | 2526 1258 | 2689 1259 | 3354 1260 | 3648 1261 | 3719 1262 | 5257 1263 | 5616 1264 | 6081 1265 | 6100 1266 | 6113 1267 | 6157 1268 | 6782 1269 | 6949 1270 | 7589 1271 | 7676 1272 | 8030 1273 | 8114 1274 | 9249 1275 | 9507 1276 | 9751 1277 | 2170 1278 | 3155 1279 | 3262 1280 | 3834 1281 | 3177 1282 | 3790 1283 | 4159 1284 | 4166 1285 | 4958 1286 | 2331 1287 | 2817 1288 | 3434 1289 | 3664 1290 | 4391 1291 | 4548 1292 | 2119 1293 | 4170 1294 | 2163 1295 | 2816 1296 | 3804 1297 | 4236 1298 | 5853 1299 | 5927 1300 | 6591 1301 | 7382 1302 | 2281 1303 | 3708 1304 | 4359 1305 | 2354 1306 | 2757 1307 | 3022 1308 | -------------------------------------------------------------------------------- /data/scaledata/James+Berardinelli/label.3class.James+Berardinelli: -------------------------------------------------------------------------------- 1 | 0 2 | 0 3 | 0 4 | 0 5 | 0 6 | 0 7 | 0 8 | 0 9 | 0 10 | 0 11 | 0 12 | 0 13 | 0 14 | 0 15 | 0 16 | 0 17 | 0 18 | 0 19 | 0 20 | 0 21 | 0 22 | 0 23 | 0 24 | 0 25 | 0 26 | 0 27 | 0 28 | 0 29 | 0 30 | 0 31 | 0 32 | 0 33 | 0 34 | 0 35 | 0 36 | 0 37 | 0 38 | 0 39 | 0 40 | 0 41 | 0 42 | 0 43 | 0 44 | 0 45 | 0 46 | 0 47 | 0 48 | 0 49 | 0 50 | 0 51 | 0 52 | 0 53 | 0 54 | 0 55 | 0 56 | 0 57 | 0 58 | 0 59 | 0 60 | 0 61 | 0 62 | 0 63 | 0 64 | 0 65 | 0 66 | 0 67 | 0 68 | 0 69 | 0 70 | 0 71 | 0 72 | 0 73 | 0 74 | 0 75 | 0 76 | 0 77 | 0 78 | 0 79 | 0 80 | 0 81 | 0 82 | 0 83 | 0 84 | 0 85 | 0 86 | 0 87 | 0 88 | 0 89 | 0 90 | 0 91 | 0 92 | 0 93 | 0 94 | 0 95 | 0 96 | 0 97 | 0 98 | 0 99 | 0 100 | 0 101 | 0 102 | 0 103 | 0 104 | 0 105 | 0 106 | 0 107 | 0 108 | 0 109 | 0 110 | 0 111 | 0 112 | 0 113 | 0 114 | 0 115 | 0 116 | 0 117 | 0 118 | 0 119 | 0 120 | 0 121 | 0 122 | 0 123 | 0 124 | 0 125 | 0 126 | 0 127 | 0 128 | 0 129 | 0 130 | 0 131 | 0 132 | 0 133 | 0 134 | 0 135 | 0 136 | 0 137 | 0 138 | 0 139 | 0 140 | 0 141 | 0 142 | 0 143 | 0 144 | 0 145 | 0 146 | 0 147 | 0 148 | 0 149 | 0 150 | 0 151 | 0 152 | 0 153 | 0 154 | 0 155 | 0 156 | 0 157 | 0 158 | 0 159 | 0 160 | 0 161 | 0 162 | 0 163 | 0 164 | 0 165 | 0 166 | 0 167 | 0 168 | 0 169 | 0 170 | 0 171 | 0 172 | 0 173 | 0 174 | 0 175 | 0 176 | 0 177 | 0 178 | 0 179 | 0 180 | 0 181 | 0 182 | 0 183 | 0 184 | 0 185 | 0 186 | 0 187 | 1 188 | 1 189 | 1 190 | 1 191 | 1 192 | 1 193 | 1 194 | 1 195 | 1 196 | 1 197 | 1 198 | 1 199 | 1 200 | 1 201 | 1 202 | 1 203 | 1 204 | 1 205 | 1 206 | 1 207 | 1 208 | 1 209 | 1 210 | 1 211 | 1 212 | 1 213 | 1 214 | 1 215 | 1 216 | 1 217 | 1 218 | 1 219 | 1 220 | 1 221 | 1 222 | 1 223 | 1 224 | 1 225 | 1 226 | 1 227 | 1 228 | 1 229 | 1 230 | 1 231 | 1 232 | 1 233 | 1 234 | 1 235 | 1 236 | 1 237 | 1 238 | 1 239 | 1 240 | 1 241 | 1 242 | 1 243 | 1 244 | 1 245 | 1 246 | 1 247 | 1 248 | 1 249 | 1 250 | 1 251 | 1 252 | 1 253 | 1 254 | 1 255 | 1 256 | 1 257 | 1 258 | 1 259 | 1 260 | 1 261 | 1 262 | 1 263 | 1 264 | 1 265 | 1 266 | 1 267 | 1 268 | 1 269 | 1 270 | 1 271 | 1 272 | 1 273 | 1 274 | 1 275 | 1 276 | 1 277 | 1 278 | 1 279 | 1 280 | 1 281 | 1 282 | 1 283 | 1 284 | 1 285 | 1 286 | 1 287 | 1 288 | 1 289 | 1 290 | 1 291 | 1 292 | 1 293 | 1 294 | 1 295 | 1 296 | 1 297 | 1 298 | 1 299 | 1 300 | 1 301 | 1 302 | 1 303 | 1 304 | 1 305 | 1 306 | 1 307 | 1 308 | 1 309 | 1 310 | 1 311 | 1 312 | 1 313 | 1 314 | 1 315 | 1 316 | 1 317 | 1 318 | 1 319 | 1 320 | 1 321 | 1 322 | 1 323 | 1 324 | 1 325 | 1 326 | 1 327 | 1 328 | 1 329 | 1 330 | 1 331 | 1 332 | 1 333 | 1 334 | 1 335 | 1 336 | 1 337 | 1 338 | 1 339 | 1 340 | 1 341 | 1 342 | 1 343 | 1 344 | 1 345 | 1 346 | 1 347 | 1 348 | 1 349 | 1 350 | 1 351 | 1 352 | 1 353 | 1 354 | 1 355 | 1 356 | 1 357 | 1 358 | 1 359 | 1 360 | 1 361 | 1 362 | 1 363 | 1 364 | 1 365 | 1 366 | 1 367 | 1 368 | 1 369 | 1 370 | 1 371 | 1 372 | 1 373 | 1 374 | 1 375 | 1 376 | 1 377 | 1 378 | 1 379 | 1 380 | 1 381 | 1 382 | 1 383 | 1 384 | 1 385 | 1 386 | 1 387 | 1 388 | 1 389 | 1 390 | 1 391 | 1 392 | 1 393 | 1 394 | 1 395 | 1 396 | 1 397 | 1 398 | 1 399 | 1 400 | 1 401 | 1 402 | 1 403 | 1 404 | 1 405 | 1 406 | 1 407 | 1 408 | 1 409 | 1 410 | 1 411 | 1 412 | 1 413 | 1 414 | 1 415 | 1 416 | 1 417 | 1 418 | 1 419 | 1 420 | 1 421 | 1 422 | 1 423 | 1 424 | 1 425 | 1 426 | 1 427 | 1 428 | 1 429 | 1 430 | 1 431 | 1 432 | 1 433 | 1 434 | 1 435 | 1 436 | 1 437 | 1 438 | 1 439 | 1 440 | 1 441 | 1 442 | 1 443 | 1 444 | 1 445 | 1 446 | 1 447 | 1 448 | 1 449 | 1 450 | 1 451 | 1 452 | 1 453 | 1 454 | 1 455 | 1 456 | 1 457 | 1 458 | 1 459 | 1 460 | 1 461 | 1 462 | 1 463 | 1 464 | 1 465 | 1 466 | 1 467 | 1 468 | 1 469 | 1 470 | 1 471 | 1 472 | 1 473 | 1 474 | 1 475 | 1 476 | 1 477 | 1 478 | 1 479 | 1 480 | 1 481 | 1 482 | 1 483 | 1 484 | 1 485 | 1 486 | 1 487 | 1 488 | 1 489 | 1 490 | 1 491 | 1 492 | 1 493 | 1 494 | 1 495 | 1 496 | 1 497 | 1 498 | 1 499 | 1 500 | 1 501 | 1 502 | 1 503 | 1 504 | 1 505 | 1 506 | 1 507 | 1 508 | 1 509 | 1 510 | 1 511 | 1 512 | 1 513 | 1 514 | 1 515 | 1 516 | 1 517 | 1 518 | 1 519 | 1 520 | 1 521 | 1 522 | 1 523 | 1 524 | 1 525 | 1 526 | 1 527 | 1 528 | 1 529 | 1 530 | 1 531 | 1 532 | 1 533 | 1 534 | 1 535 | 1 536 | 1 537 | 1 538 | 1 539 | 1 540 | 1 541 | 1 542 | 1 543 | 1 544 | 1 545 | 1 546 | 1 547 | 1 548 | 1 549 | 1 550 | 1 551 | 1 552 | 1 553 | 1 554 | 1 555 | 1 556 | 1 557 | 1 558 | 1 559 | 1 560 | 1 561 | 1 562 | 1 563 | 1 564 | 1 565 | 1 566 | 1 567 | 1 568 | 1 569 | 1 570 | 1 571 | 1 572 | 1 573 | 1 574 | 1 575 | 1 576 | 1 577 | 1 578 | 1 579 | 1 580 | 1 581 | 1 582 | 1 583 | 1 584 | 1 585 | 1 586 | 1 587 | 1 588 | 1 589 | 1 590 | 1 591 | 1 592 | 1 593 | 1 594 | 1 595 | 1 596 | 1 597 | 1 598 | 1 599 | 1 600 | 1 601 | 1 602 | 1 603 | 1 604 | 1 605 | 1 606 | 1 607 | 1 608 | 1 609 | 1 610 | 1 611 | 1 612 | 1 613 | 1 614 | 1 615 | 1 616 | 1 617 | 1 618 | 1 619 | 1 620 | 1 621 | 1 622 | 1 623 | 1 624 | 1 625 | 1 626 | 1 627 | 1 628 | 1 629 | 1 630 | 1 631 | 1 632 | 1 633 | 1 634 | 1 635 | 1 636 | 1 637 | 1 638 | 1 639 | 1 640 | 1 641 | 1 642 | 1 643 | 1 644 | 1 645 | 1 646 | 1 647 | 1 648 | 1 649 | 1 650 | 1 651 | 1 652 | 1 653 | 1 654 | 1 655 | 1 656 | 1 657 | 1 658 | 1 659 | 1 660 | 1 661 | 1 662 | 1 663 | 1 664 | 1 665 | 1 666 | 1 667 | 1 668 | 1 669 | 1 670 | 1 671 | 1 672 | 1 673 | 1 674 | 1 675 | 1 676 | 1 677 | 1 678 | 2 679 | 2 680 | 2 681 | 2 682 | 2 683 | 2 684 | 2 685 | 2 686 | 2 687 | 2 688 | 2 689 | 2 690 | 2 691 | 2 692 | 2 693 | 2 694 | 2 695 | 2 696 | 2 697 | 2 698 | 2 699 | 2 700 | 2 701 | 2 702 | 2 703 | 2 704 | 2 705 | 2 706 | 2 707 | 2 708 | 2 709 | 2 710 | 2 711 | 2 712 | 2 713 | 2 714 | 2 715 | 2 716 | 2 717 | 2 718 | 2 719 | 2 720 | 2 721 | 2 722 | 2 723 | 2 724 | 2 725 | 2 726 | 2 727 | 2 728 | 2 729 | 2 730 | 2 731 | 2 732 | 2 733 | 2 734 | 2 735 | 2 736 | 2 737 | 2 738 | 2 739 | 2 740 | 2 741 | 2 742 | 2 743 | 2 744 | 2 745 | 2 746 | 2 747 | 2 748 | 2 749 | 2 750 | 2 751 | 2 752 | 2 753 | 2 754 | 2 755 | 2 756 | 2 757 | 2 758 | 2 759 | 2 760 | 2 761 | 2 762 | 2 763 | 2 764 | 2 765 | 2 766 | 2 767 | 2 768 | 2 769 | 2 770 | 2 771 | 2 772 | 2 773 | 2 774 | 2 775 | 2 776 | 2 777 | 2 778 | 2 779 | 2 780 | 2 781 | 2 782 | 2 783 | 2 784 | 2 785 | 2 786 | 2 787 | 2 788 | 2 789 | 2 790 | 2 791 | 2 792 | 2 793 | 2 794 | 2 795 | 2 796 | 2 797 | 2 798 | 2 799 | 2 800 | 2 801 | 2 802 | 2 803 | 2 804 | 2 805 | 2 806 | 2 807 | 2 808 | 2 809 | 2 810 | 2 811 | 2 812 | 2 813 | 2 814 | 2 815 | 2 816 | 2 817 | 2 818 | 2 819 | 2 820 | 2 821 | 2 822 | 2 823 | 2 824 | 2 825 | 2 826 | 2 827 | 2 828 | 2 829 | 2 830 | 2 831 | 2 832 | 2 833 | 2 834 | 2 835 | 2 836 | 2 837 | 2 838 | 2 839 | 2 840 | 2 841 | 2 842 | 2 843 | 2 844 | 2 845 | 2 846 | 2 847 | 2 848 | 2 849 | 2 850 | 2 851 | 2 852 | 2 853 | 2 854 | 2 855 | 2 856 | 2 857 | 2 858 | 2 859 | 2 860 | 2 861 | 2 862 | 2 863 | 2 864 | 2 865 | 2 866 | 2 867 | 2 868 | 2 869 | 2 870 | 2 871 | 2 872 | 2 873 | 2 874 | 2 875 | 2 876 | 2 877 | 2 878 | 2 879 | 2 880 | 2 881 | 2 882 | 2 883 | 2 884 | 2 885 | 2 886 | 2 887 | 2 888 | 2 889 | 2 890 | 2 891 | 2 892 | 2 893 | 2 894 | 2 895 | 2 896 | 2 897 | 2 898 | 2 899 | 2 900 | 2 901 | 2 902 | 2 903 | 2 904 | 2 905 | 2 906 | 2 907 | 2 908 | 2 909 | 2 910 | 2 911 | 2 912 | 2 913 | 2 914 | 2 915 | 2 916 | 2 917 | 2 918 | 2 919 | 2 920 | 2 921 | 2 922 | 2 923 | 2 924 | 2 925 | 2 926 | 2 927 | 2 928 | 2 929 | 2 930 | 2 931 | 2 932 | 2 933 | 2 934 | 2 935 | 2 936 | 2 937 | 2 938 | 2 939 | 2 940 | 2 941 | 2 942 | 2 943 | 2 944 | 2 945 | 2 946 | 2 947 | 2 948 | 2 949 | 2 950 | 2 951 | 2 952 | 2 953 | 2 954 | 2 955 | 2 956 | 2 957 | 2 958 | 2 959 | 2 960 | 2 961 | 2 962 | 2 963 | 2 964 | 2 965 | 2 966 | 2 967 | 2 968 | 2 969 | 2 970 | 2 971 | 2 972 | 2 973 | 2 974 | 2 975 | 2 976 | 2 977 | 2 978 | 2 979 | 2 980 | 2 981 | 2 982 | 2 983 | 2 984 | 2 985 | 2 986 | 2 987 | 2 988 | 2 989 | 2 990 | 2 991 | 2 992 | 2 993 | 2 994 | 2 995 | 2 996 | 2 997 | 2 998 | 2 999 | 2 1000 | 2 1001 | 2 1002 | 2 1003 | 2 1004 | 2 1005 | 2 1006 | 2 1007 | 2 1008 | 2 1009 | 2 1010 | 2 1011 | 2 1012 | 2 1013 | 2 1014 | 2 1015 | 2 1016 | 2 1017 | 2 1018 | 2 1019 | 2 1020 | 2 1021 | 2 1022 | 2 1023 | 2 1024 | 2 1025 | 2 1026 | 2 1027 | 2 1028 | 2 1029 | 2 1030 | 2 1031 | 2 1032 | 2 1033 | 2 1034 | 2 1035 | 2 1036 | 2 1037 | 2 1038 | 2 1039 | 2 1040 | 2 1041 | 2 1042 | 2 1043 | 2 1044 | 2 1045 | 2 1046 | 2 1047 | 2 1048 | 2 1049 | 2 1050 | 2 1051 | 2 1052 | 2 1053 | 2 1054 | 2 1055 | 2 1056 | 2 1057 | 2 1058 | 2 1059 | 2 1060 | 2 1061 | 2 1062 | 2 1063 | 2 1064 | 2 1065 | 2 1066 | 2 1067 | 2 1068 | 2 1069 | 2 1070 | 2 1071 | 2 1072 | 2 1073 | 2 1074 | 2 1075 | 2 1076 | 2 1077 | 2 1078 | 2 1079 | 2 1080 | 2 1081 | 2 1082 | 2 1083 | 2 1084 | 2 1085 | 2 1086 | 2 1087 | 2 1088 | 2 1089 | 2 1090 | 2 1091 | 2 1092 | 2 1093 | 2 1094 | 2 1095 | 2 1096 | 2 1097 | 2 1098 | 2 1099 | 2 1100 | 2 1101 | 2 1102 | 2 1103 | 2 1104 | 2 1105 | 2 1106 | 2 1107 | 2 1108 | 2 1109 | 2 1110 | 2 1111 | 2 1112 | 2 1113 | 2 1114 | 2 1115 | 2 1116 | 2 1117 | 2 1118 | 2 1119 | 2 1120 | 2 1121 | 2 1122 | 2 1123 | 2 1124 | 2 1125 | 2 1126 | 2 1127 | 2 1128 | 2 1129 | 2 1130 | 2 1131 | 2 1132 | 2 1133 | 2 1134 | 2 1135 | 2 1136 | 2 1137 | 2 1138 | 2 1139 | 2 1140 | 2 1141 | 2 1142 | 2 1143 | 2 1144 | 2 1145 | 2 1146 | 2 1147 | 2 1148 | 2 1149 | 2 1150 | 2 1151 | 2 1152 | 2 1153 | 2 1154 | 2 1155 | 2 1156 | 2 1157 | 2 1158 | 2 1159 | 2 1160 | 2 1161 | 2 1162 | 2 1163 | 2 1164 | 2 1165 | 2 1166 | 2 1167 | 2 1168 | 2 1169 | 2 1170 | 2 1171 | 2 1172 | 2 1173 | 2 1174 | 2 1175 | 2 1176 | 2 1177 | 2 1178 | 2 1179 | 2 1180 | 2 1181 | 2 1182 | 2 1183 | 2 1184 | 2 1185 | 2 1186 | 2 1187 | 2 1188 | 2 1189 | 2 1190 | 2 1191 | 2 1192 | 2 1193 | 2 1194 | 2 1195 | 2 1196 | 2 1197 | 2 1198 | 2 1199 | 2 1200 | 2 1201 | 2 1202 | 2 1203 | 2 1204 | 2 1205 | 2 1206 | 2 1207 | 2 1208 | 2 1209 | 2 1210 | 2 1211 | 2 1212 | 2 1213 | 2 1214 | 2 1215 | 2 1216 | 2 1217 | 2 1218 | 2 1219 | 2 1220 | 2 1221 | 2 1222 | 2 1223 | 2 1224 | 2 1225 | 2 1226 | 2 1227 | 2 1228 | 2 1229 | 2 1230 | 2 1231 | 2 1232 | 2 1233 | 2 1234 | 2 1235 | 2 1236 | 2 1237 | 2 1238 | 2 1239 | 2 1240 | 2 1241 | 2 1242 | 2 1243 | 2 1244 | 2 1245 | 2 1246 | 2 1247 | 2 1248 | 2 1249 | 2 1250 | 2 1251 | 2 1252 | 2 1253 | 2 1254 | 2 1255 | 2 1256 | 2 1257 | 2 1258 | 2 1259 | 2 1260 | 2 1261 | 2 1262 | 2 1263 | 2 1264 | 2 1265 | 2 1266 | 2 1267 | 2 1268 | 2 1269 | 2 1270 | 2 1271 | 2 1272 | 2 1273 | 2 1274 | 2 1275 | 2 1276 | 2 1277 | 2 1278 | 2 1279 | 2 1280 | 2 1281 | 2 1282 | 2 1283 | 2 1284 | 2 1285 | 2 1286 | 2 1287 | 2 1288 | 2 1289 | 2 1290 | 2 1291 | 2 1292 | 2 1293 | 2 1294 | 2 1295 | 2 1296 | 2 1297 | 2 1298 | 2 1299 | 2 1300 | 2 1301 | 2 1302 | 2 1303 | 2 1304 | 2 1305 | 2 1306 | 2 1307 | 2 1308 | -------------------------------------------------------------------------------- /data/scaledata/James+Berardinelli/label.4class.James+Berardinelli: -------------------------------------------------------------------------------- 1 | 0 2 | 0 3 | 0 4 | 0 5 | 0 6 | 0 7 | 0 8 | 0 9 | 0 10 | 0 11 | 0 12 | 0 13 | 0 14 | 0 15 | 0 16 | 0 17 | 0 18 | 0 19 | 0 20 | 0 21 | 0 22 | 0 23 | 0 24 | 0 25 | 0 26 | 0 27 | 0 28 | 0 29 | 0 30 | 0 31 | 0 32 | 0 33 | 0 34 | 0 35 | 0 36 | 0 37 | 0 38 | 0 39 | 0 40 | 0 41 | 0 42 | 0 43 | 0 44 | 0 45 | 0 46 | 0 47 | 0 48 | 0 49 | 0 50 | 0 51 | 0 52 | 0 53 | 0 54 | 0 55 | 0 56 | 0 57 | 0 58 | 0 59 | 0 60 | 0 61 | 0 62 | 0 63 | 0 64 | 0 65 | 0 66 | 0 67 | 0 68 | 0 69 | 0 70 | 0 71 | 0 72 | 0 73 | 0 74 | 0 75 | 0 76 | 0 77 | 0 78 | 0 79 | 0 80 | 0 81 | 0 82 | 0 83 | 0 84 | 0 85 | 0 86 | 0 87 | 0 88 | 0 89 | 0 90 | 0 91 | 0 92 | 0 93 | 0 94 | 0 95 | 0 96 | 0 97 | 0 98 | 0 99 | 0 100 | 0 101 | 0 102 | 0 103 | 0 104 | 0 105 | 0 106 | 0 107 | 0 108 | 0 109 | 0 110 | 0 111 | 0 112 | 0 113 | 0 114 | 0 115 | 0 116 | 0 117 | 0 118 | 0 119 | 0 120 | 0 121 | 0 122 | 0 123 | 0 124 | 0 125 | 0 126 | 0 127 | 0 128 | 0 129 | 0 130 | 0 131 | 0 132 | 0 133 | 0 134 | 0 135 | 0 136 | 0 137 | 0 138 | 0 139 | 1 140 | 1 141 | 1 142 | 1 143 | 1 144 | 1 145 | 1 146 | 1 147 | 1 148 | 1 149 | 1 150 | 1 151 | 1 152 | 1 153 | 1 154 | 1 155 | 1 156 | 1 157 | 1 158 | 1 159 | 1 160 | 1 161 | 1 162 | 1 163 | 1 164 | 1 165 | 1 166 | 1 167 | 1 168 | 1 169 | 1 170 | 1 171 | 1 172 | 1 173 | 1 174 | 1 175 | 1 176 | 1 177 | 1 178 | 1 179 | 1 180 | 1 181 | 1 182 | 1 183 | 1 184 | 1 185 | 1 186 | 1 187 | 1 188 | 1 189 | 1 190 | 1 191 | 1 192 | 1 193 | 1 194 | 1 195 | 1 196 | 1 197 | 1 198 | 1 199 | 1 200 | 1 201 | 1 202 | 1 203 | 1 204 | 1 205 | 1 206 | 1 207 | 1 208 | 1 209 | 1 210 | 1 211 | 1 212 | 1 213 | 1 214 | 1 215 | 1 216 | 1 217 | 1 218 | 1 219 | 1 220 | 1 221 | 1 222 | 1 223 | 1 224 | 1 225 | 1 226 | 1 227 | 1 228 | 1 229 | 1 230 | 1 231 | 1 232 | 1 233 | 1 234 | 1 235 | 1 236 | 1 237 | 1 238 | 1 239 | 1 240 | 1 241 | 1 242 | 1 243 | 1 244 | 1 245 | 1 246 | 1 247 | 1 248 | 1 249 | 1 250 | 1 251 | 1 252 | 1 253 | 1 254 | 1 255 | 1 256 | 1 257 | 1 258 | 1 259 | 1 260 | 1 261 | 1 262 | 1 263 | 1 264 | 1 265 | 1 266 | 1 267 | 1 268 | 1 269 | 1 270 | 1 271 | 1 272 | 1 273 | 1 274 | 1 275 | 1 276 | 1 277 | 1 278 | 1 279 | 1 280 | 1 281 | 1 282 | 1 283 | 1 284 | 1 285 | 1 286 | 1 287 | 1 288 | 1 289 | 1 290 | 1 291 | 1 292 | 1 293 | 1 294 | 1 295 | 1 296 | 1 297 | 1 298 | 1 299 | 1 300 | 1 301 | 1 302 | 1 303 | 1 304 | 1 305 | 1 306 | 1 307 | 1 308 | 1 309 | 1 310 | 1 311 | 1 312 | 1 313 | 1 314 | 1 315 | 1 316 | 1 317 | 1 318 | 1 319 | 1 320 | 1 321 | 1 322 | 1 323 | 1 324 | 1 325 | 1 326 | 1 327 | 1 328 | 1 329 | 1 330 | 1 331 | 1 332 | 1 333 | 1 334 | 1 335 | 1 336 | 1 337 | 1 338 | 1 339 | 1 340 | 1 341 | 1 342 | 1 343 | 1 344 | 1 345 | 1 346 | 1 347 | 1 348 | 1 349 | 1 350 | 1 351 | 1 352 | 1 353 | 1 354 | 1 355 | 1 356 | 1 357 | 1 358 | 1 359 | 1 360 | 1 361 | 1 362 | 1 363 | 1 364 | 1 365 | 1 366 | 1 367 | 1 368 | 1 369 | 1 370 | 1 371 | 1 372 | 1 373 | 1 374 | 1 375 | 1 376 | 1 377 | 1 378 | 1 379 | 1 380 | 1 381 | 1 382 | 1 383 | 1 384 | 1 385 | 1 386 | 1 387 | 1 388 | 1 389 | 1 390 | 1 391 | 1 392 | 1 393 | 1 394 | 1 395 | 1 396 | 1 397 | 1 398 | 1 399 | 1 400 | 1 401 | 1 402 | 1 403 | 1 404 | 1 405 | 1 406 | 1 407 | 1 408 | 1 409 | 1 410 | 1 411 | 1 412 | 1 413 | 1 414 | 1 415 | 1 416 | 1 417 | 1 418 | 1 419 | 1 420 | 1 421 | 1 422 | 1 423 | 1 424 | 1 425 | 1 426 | 1 427 | 1 428 | 1 429 | 1 430 | 1 431 | 2 432 | 2 433 | 2 434 | 2 435 | 2 436 | 2 437 | 2 438 | 2 439 | 2 440 | 2 441 | 2 442 | 2 443 | 2 444 | 2 445 | 2 446 | 2 447 | 2 448 | 2 449 | 2 450 | 2 451 | 2 452 | 2 453 | 2 454 | 2 455 | 2 456 | 2 457 | 2 458 | 2 459 | 2 460 | 2 461 | 2 462 | 2 463 | 2 464 | 2 465 | 2 466 | 2 467 | 2 468 | 2 469 | 2 470 | 2 471 | 2 472 | 2 473 | 2 474 | 2 475 | 2 476 | 2 477 | 2 478 | 2 479 | 2 480 | 2 481 | 2 482 | 2 483 | 2 484 | 2 485 | 2 486 | 2 487 | 2 488 | 2 489 | 2 490 | 2 491 | 2 492 | 2 493 | 2 494 | 2 495 | 2 496 | 2 497 | 2 498 | 2 499 | 2 500 | 2 501 | 2 502 | 2 503 | 2 504 | 2 505 | 2 506 | 2 507 | 2 508 | 2 509 | 2 510 | 2 511 | 2 512 | 2 513 | 2 514 | 2 515 | 2 516 | 2 517 | 2 518 | 2 519 | 2 520 | 2 521 | 2 522 | 2 523 | 2 524 | 2 525 | 2 526 | 2 527 | 2 528 | 2 529 | 2 530 | 2 531 | 2 532 | 2 533 | 2 534 | 2 535 | 2 536 | 2 537 | 2 538 | 2 539 | 2 540 | 2 541 | 2 542 | 2 543 | 2 544 | 2 545 | 2 546 | 2 547 | 2 548 | 2 549 | 2 550 | 2 551 | 2 552 | 2 553 | 2 554 | 2 555 | 2 556 | 2 557 | 2 558 | 2 559 | 2 560 | 2 561 | 2 562 | 2 563 | 2 564 | 2 565 | 2 566 | 2 567 | 2 568 | 2 569 | 2 570 | 2 571 | 2 572 | 2 573 | 2 574 | 2 575 | 2 576 | 2 577 | 2 578 | 2 579 | 2 580 | 2 581 | 2 582 | 2 583 | 2 584 | 2 585 | 2 586 | 2 587 | 2 588 | 2 589 | 2 590 | 2 591 | 2 592 | 2 593 | 2 594 | 2 595 | 2 596 | 2 597 | 2 598 | 2 599 | 2 600 | 2 601 | 2 602 | 2 603 | 2 604 | 2 605 | 2 606 | 2 607 | 2 608 | 2 609 | 2 610 | 2 611 | 2 612 | 2 613 | 2 614 | 2 615 | 2 616 | 2 617 | 2 618 | 2 619 | 2 620 | 2 621 | 2 622 | 2 623 | 2 624 | 2 625 | 2 626 | 2 627 | 2 628 | 2 629 | 2 630 | 2 631 | 2 632 | 2 633 | 2 634 | 2 635 | 2 636 | 2 637 | 2 638 | 2 639 | 2 640 | 2 641 | 2 642 | 2 643 | 2 644 | 2 645 | 2 646 | 2 647 | 2 648 | 2 649 | 2 650 | 2 651 | 2 652 | 2 653 | 2 654 | 2 655 | 2 656 | 2 657 | 2 658 | 2 659 | 2 660 | 2 661 | 2 662 | 2 663 | 2 664 | 2 665 | 2 666 | 2 667 | 2 668 | 2 669 | 2 670 | 2 671 | 2 672 | 2 673 | 2 674 | 2 675 | 2 676 | 2 677 | 2 678 | 2 679 | 2 680 | 2 681 | 2 682 | 2 683 | 2 684 | 2 685 | 2 686 | 2 687 | 2 688 | 2 689 | 2 690 | 2 691 | 2 692 | 2 693 | 2 694 | 2 695 | 2 696 | 2 697 | 2 698 | 2 699 | 2 700 | 2 701 | 2 702 | 2 703 | 2 704 | 2 705 | 2 706 | 2 707 | 2 708 | 2 709 | 2 710 | 2 711 | 2 712 | 2 713 | 2 714 | 2 715 | 2 716 | 2 717 | 2 718 | 2 719 | 2 720 | 2 721 | 2 722 | 2 723 | 2 724 | 2 725 | 2 726 | 2 727 | 2 728 | 2 729 | 2 730 | 2 731 | 2 732 | 2 733 | 2 734 | 2 735 | 2 736 | 2 737 | 2 738 | 2 739 | 2 740 | 2 741 | 2 742 | 2 743 | 2 744 | 2 745 | 2 746 | 2 747 | 2 748 | 2 749 | 2 750 | 2 751 | 2 752 | 2 753 | 2 754 | 2 755 | 2 756 | 2 757 | 2 758 | 2 759 | 2 760 | 2 761 | 2 762 | 2 763 | 2 764 | 2 765 | 2 766 | 2 767 | 2 768 | 2 769 | 2 770 | 2 771 | 2 772 | 2 773 | 2 774 | 2 775 | 2 776 | 2 777 | 2 778 | 2 779 | 2 780 | 2 781 | 2 782 | 2 783 | 2 784 | 2 785 | 2 786 | 2 787 | 2 788 | 2 789 | 2 790 | 2 791 | 2 792 | 2 793 | 2 794 | 2 795 | 2 796 | 2 797 | 2 798 | 2 799 | 2 800 | 2 801 | 2 802 | 2 803 | 2 804 | 2 805 | 2 806 | 2 807 | 2 808 | 2 809 | 2 810 | 2 811 | 2 812 | 2 813 | 2 814 | 2 815 | 2 816 | 2 817 | 2 818 | 2 819 | 2 820 | 2 821 | 2 822 | 2 823 | 2 824 | 2 825 | 2 826 | 2 827 | 2 828 | 2 829 | 2 830 | 2 831 | 2 832 | 2 833 | 2 834 | 2 835 | 2 836 | 2 837 | 2 838 | 2 839 | 2 840 | 2 841 | 2 842 | 2 843 | 2 844 | 2 845 | 2 846 | 2 847 | 2 848 | 2 849 | 2 850 | 2 851 | 2 852 | 2 853 | 2 854 | 2 855 | 2 856 | 2 857 | 2 858 | 2 859 | 2 860 | 2 861 | 2 862 | 2 863 | 2 864 | 2 865 | 2 866 | 2 867 | 2 868 | 2 869 | 2 870 | 2 871 | 2 872 | 2 873 | 2 874 | 2 875 | 2 876 | 2 877 | 2 878 | 2 879 | 2 880 | 2 881 | 2 882 | 2 883 | 2 884 | 2 885 | 2 886 | 2 887 | 2 888 | 2 889 | 2 890 | 2 891 | 2 892 | 2 893 | 2 894 | 2 895 | 2 896 | 2 897 | 2 898 | 2 899 | 2 900 | 2 901 | 2 902 | 2 903 | 2 904 | 2 905 | 2 906 | 2 907 | 2 908 | 2 909 | 2 910 | 2 911 | 2 912 | 2 913 | 2 914 | 2 915 | 2 916 | 2 917 | 2 918 | 2 919 | 2 920 | 2 921 | 2 922 | 2 923 | 2 924 | 2 925 | 2 926 | 2 927 | 2 928 | 2 929 | 2 930 | 2 931 | 2 932 | 2 933 | 2 934 | 2 935 | 2 936 | 2 937 | 2 938 | 2 939 | 2 940 | 2 941 | 2 942 | 2 943 | 2 944 | 2 945 | 2 946 | 2 947 | 2 948 | 2 949 | 2 950 | 2 951 | 2 952 | 2 953 | 2 954 | 2 955 | 2 956 | 2 957 | 2 958 | 2 959 | 2 960 | 2 961 | 2 962 | 2 963 | 2 964 | 2 965 | 2 966 | 2 967 | 2 968 | 2 969 | 2 970 | 2 971 | 2 972 | 2 973 | 2 974 | 2 975 | 2 976 | 2 977 | 2 978 | 2 979 | 2 980 | 2 981 | 2 982 | 2 983 | 2 984 | 2 985 | 2 986 | 2 987 | 2 988 | 2 989 | 2 990 | 2 991 | 2 992 | 2 993 | 2 994 | 2 995 | 2 996 | 2 997 | 2 998 | 2 999 | 2 1000 | 2 1001 | 2 1002 | 2 1003 | 2 1004 | 2 1005 | 2 1006 | 2 1007 | 2 1008 | 2 1009 | 2 1010 | 2 1011 | 2 1012 | 2 1013 | 2 1014 | 2 1015 | 2 1016 | 2 1017 | 2 1018 | 2 1019 | 2 1020 | 2 1021 | 2 1022 | 2 1023 | 2 1024 | 2 1025 | 2 1026 | 2 1027 | 3 1028 | 3 1029 | 3 1030 | 3 1031 | 3 1032 | 3 1033 | 3 1034 | 3 1035 | 3 1036 | 3 1037 | 3 1038 | 3 1039 | 3 1040 | 3 1041 | 3 1042 | 3 1043 | 3 1044 | 3 1045 | 3 1046 | 3 1047 | 3 1048 | 3 1049 | 3 1050 | 3 1051 | 3 1052 | 3 1053 | 3 1054 | 3 1055 | 3 1056 | 3 1057 | 3 1058 | 3 1059 | 3 1060 | 3 1061 | 3 1062 | 3 1063 | 3 1064 | 3 1065 | 3 1066 | 3 1067 | 3 1068 | 3 1069 | 3 1070 | 3 1071 | 3 1072 | 3 1073 | 3 1074 | 3 1075 | 3 1076 | 3 1077 | 3 1078 | 3 1079 | 3 1080 | 3 1081 | 3 1082 | 3 1083 | 3 1084 | 3 1085 | 3 1086 | 3 1087 | 3 1088 | 3 1089 | 3 1090 | 3 1091 | 3 1092 | 3 1093 | 3 1094 | 3 1095 | 3 1096 | 3 1097 | 3 1098 | 3 1099 | 3 1100 | 3 1101 | 3 1102 | 3 1103 | 3 1104 | 3 1105 | 3 1106 | 3 1107 | 3 1108 | 3 1109 | 3 1110 | 3 1111 | 3 1112 | 3 1113 | 3 1114 | 3 1115 | 3 1116 | 3 1117 | 3 1118 | 3 1119 | 3 1120 | 3 1121 | 3 1122 | 3 1123 | 3 1124 | 3 1125 | 3 1126 | 3 1127 | 3 1128 | 3 1129 | 3 1130 | 3 1131 | 3 1132 | 3 1133 | 3 1134 | 3 1135 | 3 1136 | 3 1137 | 3 1138 | 3 1139 | 3 1140 | 3 1141 | 3 1142 | 3 1143 | 3 1144 | 3 1145 | 3 1146 | 3 1147 | 3 1148 | 3 1149 | 3 1150 | 3 1151 | 3 1152 | 3 1153 | 3 1154 | 3 1155 | 3 1156 | 3 1157 | 3 1158 | 3 1159 | 3 1160 | 3 1161 | 3 1162 | 3 1163 | 3 1164 | 3 1165 | 3 1166 | 3 1167 | 3 1168 | 3 1169 | 3 1170 | 3 1171 | 3 1172 | 3 1173 | 3 1174 | 3 1175 | 3 1176 | 3 1177 | 3 1178 | 3 1179 | 3 1180 | 3 1181 | 3 1182 | 3 1183 | 3 1184 | 3 1185 | 3 1186 | 3 1187 | 3 1188 | 3 1189 | 3 1190 | 3 1191 | 3 1192 | 3 1193 | 3 1194 | 3 1195 | 3 1196 | 3 1197 | 3 1198 | 3 1199 | 3 1200 | 3 1201 | 3 1202 | 3 1203 | 3 1204 | 3 1205 | 3 1206 | 3 1207 | 3 1208 | 3 1209 | 3 1210 | 3 1211 | 3 1212 | 3 1213 | 3 1214 | 3 1215 | 3 1216 | 3 1217 | 3 1218 | 3 1219 | 3 1220 | 3 1221 | 3 1222 | 3 1223 | 3 1224 | 3 1225 | 3 1226 | 3 1227 | 3 1228 | 3 1229 | 3 1230 | 3 1231 | 3 1232 | 3 1233 | 3 1234 | 3 1235 | 3 1236 | 3 1237 | 3 1238 | 3 1239 | 3 1240 | 3 1241 | 3 1242 | 3 1243 | 3 1244 | 3 1245 | 3 1246 | 3 1247 | 3 1248 | 3 1249 | 3 1250 | 3 1251 | 3 1252 | 3 1253 | 3 1254 | 3 1255 | 3 1256 | 3 1257 | 3 1258 | 3 1259 | 3 1260 | 3 1261 | 3 1262 | 3 1263 | 3 1264 | 3 1265 | 3 1266 | 3 1267 | 3 1268 | 3 1269 | 3 1270 | 3 1271 | 3 1272 | 3 1273 | 3 1274 | 3 1275 | 3 1276 | 3 1277 | 3 1278 | 3 1279 | 3 1280 | 3 1281 | 3 1282 | 3 1283 | 3 1284 | 3 1285 | 3 1286 | 3 1287 | 3 1288 | 3 1289 | 3 1290 | 3 1291 | 3 1292 | 3 1293 | 3 1294 | 3 1295 | 3 1296 | 3 1297 | 3 1298 | 3 1299 | 3 1300 | 3 1301 | 3 1302 | 3 1303 | 3 1304 | 3 1305 | 3 1306 | 3 1307 | 3 1308 | -------------------------------------------------------------------------------- /data/scaledata/James+Berardinelli/rating.James+Berardinelli: -------------------------------------------------------------------------------- 1 | 0.05 2 | 0.05 3 | 0.05 4 | 0.05 5 | 0.09 6 | 0.09 7 | 0.1 8 | 0.1 9 | 0.1 10 | 0.1 11 | 0.1 12 | 0.1 13 | 0.11 14 | 0.14 15 | 0.15 16 | 0.15 17 | 0.15 18 | 0.15 19 | 0.15 20 | 0.16 21 | 0.17 22 | 0.18 23 | 0.18 24 | 0.2 25 | 0.2 26 | 0.2 27 | 0.2 28 | 0.2 29 | 0.2 30 | 0.2 31 | 0.2 32 | 0.2 33 | 0.2 34 | 0.2 35 | 0.22 36 | 0.25 37 | 0.25 38 | 0.25 39 | 0.25 40 | 0.25 41 | 0.25 42 | 0.25 43 | 0.25 44 | 0.25 45 | 0.25 46 | 0.25 47 | 0.26 48 | 0.26 49 | 0.27 50 | 0.28 51 | 0.28 52 | 0.28 53 | 0.28 54 | 0.29 55 | 0.29 56 | 0.29 57 | 0.29 58 | 0.3 59 | 0.3 60 | 0.3 61 | 0.3 62 | 0.3 63 | 0.3 64 | 0.3 65 | 0.3 66 | 0.3 67 | 0.3 68 | 0.3 69 | 0.3 70 | 0.3 71 | 0.3 72 | 0.3 73 | 0.3 74 | 0.3 75 | 0.3 76 | 0.3 77 | 0.3 78 | 0.3 79 | 0.3 80 | 0.3 81 | 0.3 82 | 0.3 83 | 0.3 84 | 0.3 85 | 0.3 86 | 0.3 87 | 0.3 88 | 0.3 89 | 0.3 90 | 0.3 91 | 0.31 92 | 0.31 93 | 0.31 94 | 0.31 95 | 0.31 96 | 0.32 97 | 0.32 98 | 0.33 99 | 0.33 100 | 0.33 101 | 0.33 102 | 0.33 103 | 0.34 104 | 0.34 105 | 0.34 106 | 0.34 107 | 0.35 108 | 0.35 109 | 0.35 110 | 0.35 111 | 0.35 112 | 0.35 113 | 0.35 114 | 0.35 115 | 0.35 116 | 0.35 117 | 0.35 118 | 0.35 119 | 0.35 120 | 0.35 121 | 0.35 122 | 0.35 123 | 0.36 124 | 0.36 125 | 0.36 126 | 0.37 127 | 0.37 128 | 0.38 129 | 0.38 130 | 0.38 131 | 0.38 132 | 0.38 133 | 0.38 134 | 0.38 135 | 0.39 136 | 0.39 137 | 0.39 138 | 0.39 139 | 0.4 140 | 0.4 141 | 0.4 142 | 0.4 143 | 0.4 144 | 0.4 145 | 0.4 146 | 0.4 147 | 0.4 148 | 0.4 149 | 0.4 150 | 0.4 151 | 0.4 152 | 0.4 153 | 0.4 154 | 0.4 155 | 0.4 156 | 0.4 157 | 0.4 158 | 0.4 159 | 0.4 160 | 0.4 161 | 0.4 162 | 0.4 163 | 0.4 164 | 0.4 165 | 0.4 166 | 0.4 167 | 0.4 168 | 0.4 169 | 0.4 170 | 0.4 171 | 0.4 172 | 0.4 173 | 0.4 174 | 0.4 175 | 0.4 176 | 0.4 177 | 0.4 178 | 0.4 179 | 0.4 180 | 0.4 181 | 0.4 182 | 0.4 183 | 0.4 184 | 0.4 185 | 0.4 186 | 0.4 187 | 0.41 188 | 0.41 189 | 0.41 190 | 0.41 191 | 0.41 192 | 0.41 193 | 0.42 194 | 0.42 195 | 0.42 196 | 0.43 197 | 0.43 198 | 0.43 199 | 0.44 200 | 0.44 201 | 0.44 202 | 0.44 203 | 0.44 204 | 0.44 205 | 0.44 206 | 0.44 207 | 0.44 208 | 0.44 209 | 0.45 210 | 0.45 211 | 0.45 212 | 0.45 213 | 0.45 214 | 0.45 215 | 0.45 216 | 0.45 217 | 0.45 218 | 0.45 219 | 0.45 220 | 0.45 221 | 0.45 222 | 0.45 223 | 0.45 224 | 0.45 225 | 0.45 226 | 0.46 227 | 0.46 228 | 0.46 229 | 0.47 230 | 0.47 231 | 0.47 232 | 0.47 233 | 0.47 234 | 0.47 235 | 0.47 236 | 0.48 237 | 0.48 238 | 0.48 239 | 0.48 240 | 0.48 241 | 0.49 242 | 0.49 243 | 0.49 244 | 0.49 245 | 0.49 246 | 0.49 247 | 0.5 248 | 0.5 249 | 0.5 250 | 0.5 251 | 0.5 252 | 0.5 253 | 0.5 254 | 0.5 255 | 0.5 256 | 0.5 257 | 0.5 258 | 0.5 259 | 0.5 260 | 0.5 261 | 0.5 262 | 0.5 263 | 0.5 264 | 0.5 265 | 0.5 266 | 0.5 267 | 0.5 268 | 0.5 269 | 0.5 270 | 0.5 271 | 0.5 272 | 0.5 273 | 0.5 274 | 0.5 275 | 0.5 276 | 0.5 277 | 0.5 278 | 0.5 279 | 0.5 280 | 0.5 281 | 0.5 282 | 0.5 283 | 0.5 284 | 0.5 285 | 0.5 286 | 0.5 287 | 0.5 288 | 0.5 289 | 0.5 290 | 0.5 291 | 0.5 292 | 0.5 293 | 0.5 294 | 0.5 295 | 0.5 296 | 0.5 297 | 0.5 298 | 0.5 299 | 0.5 300 | 0.5 301 | 0.5 302 | 0.5 303 | 0.5 304 | 0.5 305 | 0.5 306 | 0.5 307 | 0.5 308 | 0.5 309 | 0.5 310 | 0.5 311 | 0.5 312 | 0.5 313 | 0.5 314 | 0.5 315 | 0.5 316 | 0.5 317 | 0.5 318 | 0.5 319 | 0.51 320 | 0.51 321 | 0.51 322 | 0.51 323 | 0.51 324 | 0.51 325 | 0.51 326 | 0.51 327 | 0.51 328 | 0.51 329 | 0.51 330 | 0.52 331 | 0.52 332 | 0.52 333 | 0.52 334 | 0.52 335 | 0.52 336 | 0.52 337 | 0.53 338 | 0.53 339 | 0.53 340 | 0.53 341 | 0.54 342 | 0.54 343 | 0.54 344 | 0.54 345 | 0.54 346 | 0.54 347 | 0.55 348 | 0.55 349 | 0.55 350 | 0.55 351 | 0.55 352 | 0.55 353 | 0.55 354 | 0.55 355 | 0.55 356 | 0.55 357 | 0.55 358 | 0.55 359 | 0.55 360 | 0.55 361 | 0.55 362 | 0.55 363 | 0.55 364 | 0.55 365 | 0.55 366 | 0.55 367 | 0.55 368 | 0.55 369 | 0.55 370 | 0.55 371 | 0.55 372 | 0.55 373 | 0.55 374 | 0.55 375 | 0.55 376 | 0.55 377 | 0.55 378 | 0.55 379 | 0.55 380 | 0.55 381 | 0.55 382 | 0.55 383 | 0.55 384 | 0.55 385 | 0.55 386 | 0.55 387 | 0.56 388 | 0.56 389 | 0.56 390 | 0.56 391 | 0.56 392 | 0.56 393 | 0.56 394 | 0.56 395 | 0.56 396 | 0.57 397 | 0.57 398 | 0.57 399 | 0.57 400 | 0.57 401 | 0.57 402 | 0.57 403 | 0.57 404 | 0.57 405 | 0.57 406 | 0.58 407 | 0.58 408 | 0.58 409 | 0.58 410 | 0.58 411 | 0.58 412 | 0.58 413 | 0.58 414 | 0.58 415 | 0.58 416 | 0.58 417 | 0.58 418 | 0.58 419 | 0.58 420 | 0.59 421 | 0.59 422 | 0.59 423 | 0.59 424 | 0.59 425 | 0.59 426 | 0.59 427 | 0.59 428 | 0.59 429 | 0.59 430 | 0.59 431 | 0.6 432 | 0.6 433 | 0.6 434 | 0.6 435 | 0.6 436 | 0.6 437 | 0.6 438 | 0.6 439 | 0.6 440 | 0.6 441 | 0.6 442 | 0.6 443 | 0.6 444 | 0.6 445 | 0.6 446 | 0.6 447 | 0.6 448 | 0.6 449 | 0.6 450 | 0.6 451 | 0.6 452 | 0.6 453 | 0.6 454 | 0.6 455 | 0.6 456 | 0.6 457 | 0.6 458 | 0.6 459 | 0.6 460 | 0.6 461 | 0.6 462 | 0.6 463 | 0.6 464 | 0.6 465 | 0.6 466 | 0.6 467 | 0.6 468 | 0.6 469 | 0.6 470 | 0.6 471 | 0.6 472 | 0.6 473 | 0.6 474 | 0.6 475 | 0.6 476 | 0.6 477 | 0.6 478 | 0.6 479 | 0.6 480 | 0.6 481 | 0.6 482 | 0.6 483 | 0.6 484 | 0.6 485 | 0.6 486 | 0.6 487 | 0.6 488 | 0.6 489 | 0.6 490 | 0.6 491 | 0.6 492 | 0.6 493 | 0.6 494 | 0.6 495 | 0.6 496 | 0.6 497 | 0.6 498 | 0.6 499 | 0.6 500 | 0.6 501 | 0.6 502 | 0.6 503 | 0.6 504 | 0.6 505 | 0.6 506 | 0.6 507 | 0.6 508 | 0.6 509 | 0.6 510 | 0.6 511 | 0.6 512 | 0.6 513 | 0.6 514 | 0.6 515 | 0.6 516 | 0.6 517 | 0.6 518 | 0.6 519 | 0.6 520 | 0.6 521 | 0.6 522 | 0.6 523 | 0.6 524 | 0.6 525 | 0.6 526 | 0.6 527 | 0.6 528 | 0.6 529 | 0.6 530 | 0.6 531 | 0.6 532 | 0.6 533 | 0.6 534 | 0.6 535 | 0.6 536 | 0.6 537 | 0.6 538 | 0.6 539 | 0.6 540 | 0.6 541 | 0.6 542 | 0.6 543 | 0.6 544 | 0.6 545 | 0.6 546 | 0.6 547 | 0.6 548 | 0.6 549 | 0.61 550 | 0.61 551 | 0.61 552 | 0.61 553 | 0.61 554 | 0.61 555 | 0.61 556 | 0.61 557 | 0.61 558 | 0.61 559 | 0.61 560 | 0.61 561 | 0.62 562 | 0.62 563 | 0.62 564 | 0.62 565 | 0.62 566 | 0.62 567 | 0.62 568 | 0.63 569 | 0.63 570 | 0.63 571 | 0.63 572 | 0.63 573 | 0.63 574 | 0.63 575 | 0.63 576 | 0.63 577 | 0.63 578 | 0.63 579 | 0.63 580 | 0.63 581 | 0.63 582 | 0.63 583 | 0.63 584 | 0.64 585 | 0.64 586 | 0.64 587 | 0.64 588 | 0.64 589 | 0.64 590 | 0.65 591 | 0.65 592 | 0.65 593 | 0.65 594 | 0.65 595 | 0.65 596 | 0.65 597 | 0.65 598 | 0.65 599 | 0.65 600 | 0.65 601 | 0.65 602 | 0.65 603 | 0.65 604 | 0.65 605 | 0.65 606 | 0.65 607 | 0.65 608 | 0.65 609 | 0.65 610 | 0.65 611 | 0.65 612 | 0.65 613 | 0.65 614 | 0.65 615 | 0.65 616 | 0.65 617 | 0.65 618 | 0.65 619 | 0.65 620 | 0.65 621 | 0.65 622 | 0.65 623 | 0.65 624 | 0.65 625 | 0.65 626 | 0.65 627 | 0.65 628 | 0.65 629 | 0.65 630 | 0.65 631 | 0.65 632 | 0.65 633 | 0.65 634 | 0.65 635 | 0.65 636 | 0.65 637 | 0.65 638 | 0.66 639 | 0.66 640 | 0.66 641 | 0.66 642 | 0.66 643 | 0.66 644 | 0.66 645 | 0.66 646 | 0.66 647 | 0.66 648 | 0.66 649 | 0.66 650 | 0.67 651 | 0.67 652 | 0.67 653 | 0.67 654 | 0.67 655 | 0.67 656 | 0.67 657 | 0.67 658 | 0.67 659 | 0.68 660 | 0.68 661 | 0.68 662 | 0.68 663 | 0.68 664 | 0.68 665 | 0.68 666 | 0.68 667 | 0.69 668 | 0.69 669 | 0.69 670 | 0.69 671 | 0.69 672 | 0.69 673 | 0.69 674 | 0.69 675 | 0.69 676 | 0.69 677 | 0.69 678 | 0.7 679 | 0.7 680 | 0.7 681 | 0.7 682 | 0.7 683 | 0.7 684 | 0.7 685 | 0.7 686 | 0.7 687 | 0.7 688 | 0.7 689 | 0.7 690 | 0.7 691 | 0.7 692 | 0.7 693 | 0.7 694 | 0.7 695 | 0.7 696 | 0.7 697 | 0.7 698 | 0.7 699 | 0.7 700 | 0.7 701 | 0.7 702 | 0.7 703 | 0.7 704 | 0.7 705 | 0.7 706 | 0.7 707 | 0.7 708 | 0.7 709 | 0.7 710 | 0.7 711 | 0.7 712 | 0.7 713 | 0.7 714 | 0.7 715 | 0.7 716 | 0.7 717 | 0.7 718 | 0.7 719 | 0.7 720 | 0.7 721 | 0.7 722 | 0.7 723 | 0.7 724 | 0.7 725 | 0.7 726 | 0.7 727 | 0.7 728 | 0.7 729 | 0.7 730 | 0.7 731 | 0.7 732 | 0.7 733 | 0.7 734 | 0.7 735 | 0.7 736 | 0.7 737 | 0.7 738 | 0.7 739 | 0.7 740 | 0.7 741 | 0.7 742 | 0.7 743 | 0.7 744 | 0.7 745 | 0.7 746 | 0.7 747 | 0.7 748 | 0.7 749 | 0.7 750 | 0.7 751 | 0.7 752 | 0.7 753 | 0.7 754 | 0.7 755 | 0.7 756 | 0.7 757 | 0.7 758 | 0.7 759 | 0.7 760 | 0.7 761 | 0.7 762 | 0.7 763 | 0.7 764 | 0.7 765 | 0.7 766 | 0.7 767 | 0.7 768 | 0.7 769 | 0.7 770 | 0.7 771 | 0.7 772 | 0.7 773 | 0.7 774 | 0.7 775 | 0.7 776 | 0.7 777 | 0.7 778 | 0.7 779 | 0.7 780 | 0.7 781 | 0.7 782 | 0.7 783 | 0.7 784 | 0.7 785 | 0.7 786 | 0.7 787 | 0.7 788 | 0.7 789 | 0.7 790 | 0.7 791 | 0.7 792 | 0.7 793 | 0.7 794 | 0.7 795 | 0.7 796 | 0.7 797 | 0.7 798 | 0.7 799 | 0.7 800 | 0.7 801 | 0.7 802 | 0.7 803 | 0.7 804 | 0.7 805 | 0.7 806 | 0.7 807 | 0.7 808 | 0.7 809 | 0.7 810 | 0.7 811 | 0.7 812 | 0.7 813 | 0.7 814 | 0.7 815 | 0.7 816 | 0.7 817 | 0.7 818 | 0.7 819 | 0.7 820 | 0.7 821 | 0.7 822 | 0.7 823 | 0.7 824 | 0.7 825 | 0.7 826 | 0.7 827 | 0.7 828 | 0.7 829 | 0.7 830 | 0.7 831 | 0.7 832 | 0.7 833 | 0.7 834 | 0.7 835 | 0.71 836 | 0.71 837 | 0.71 838 | 0.71 839 | 0.71 840 | 0.71 841 | 0.71 842 | 0.71 843 | 0.71 844 | 0.72 845 | 0.72 846 | 0.72 847 | 0.72 848 | 0.72 849 | 0.72 850 | 0.72 851 | 0.72 852 | 0.72 853 | 0.72 854 | 0.73 855 | 0.73 856 | 0.73 857 | 0.73 858 | 0.73 859 | 0.73 860 | 0.73 861 | 0.73 862 | 0.73 863 | 0.73 864 | 0.73 865 | 0.73 866 | 0.74 867 | 0.74 868 | 0.74 869 | 0.74 870 | 0.74 871 | 0.74 872 | 0.74 873 | 0.74 874 | 0.74 875 | 0.74 876 | 0.74 877 | 0.74 878 | 0.75 879 | 0.75 880 | 0.75 881 | 0.75 882 | 0.75 883 | 0.75 884 | 0.75 885 | 0.75 886 | 0.75 887 | 0.75 888 | 0.75 889 | 0.75 890 | 0.75 891 | 0.75 892 | 0.75 893 | 0.75 894 | 0.75 895 | 0.75 896 | 0.75 897 | 0.75 898 | 0.75 899 | 0.75 900 | 0.75 901 | 0.75 902 | 0.75 903 | 0.75 904 | 0.75 905 | 0.75 906 | 0.75 907 | 0.75 908 | 0.75 909 | 0.75 910 | 0.75 911 | 0.75 912 | 0.75 913 | 0.75 914 | 0.75 915 | 0.75 916 | 0.75 917 | 0.75 918 | 0.75 919 | 0.75 920 | 0.75 921 | 0.75 922 | 0.75 923 | 0.75 924 | 0.75 925 | 0.75 926 | 0.75 927 | 0.75 928 | 0.75 929 | 0.75 930 | 0.75 931 | 0.75 932 | 0.75 933 | 0.75 934 | 0.75 935 | 0.75 936 | 0.75 937 | 0.75 938 | 0.75 939 | 0.75 940 | 0.75 941 | 0.75 942 | 0.75 943 | 0.75 944 | 0.75 945 | 0.75 946 | 0.75 947 | 0.75 948 | 0.75 949 | 0.75 950 | 0.75 951 | 0.75 952 | 0.75 953 | 0.75 954 | 0.75 955 | 0.75 956 | 0.75 957 | 0.75 958 | 0.75 959 | 0.75 960 | 0.75 961 | 0.75 962 | 0.76 963 | 0.76 964 | 0.76 965 | 0.76 966 | 0.76 967 | 0.76 968 | 0.76 969 | 0.76 970 | 0.76 971 | 0.76 972 | 0.76 973 | 0.77 974 | 0.77 975 | 0.77 976 | 0.77 977 | 0.77 978 | 0.77 979 | 0.77 980 | 0.77 981 | 0.77 982 | 0.77 983 | 0.77 984 | 0.77 985 | 0.77 986 | 0.77 987 | 0.77 988 | 0.77 989 | 0.78 990 | 0.78 991 | 0.78 992 | 0.78 993 | 0.78 994 | 0.78 995 | 0.78 996 | 0.78 997 | 0.78 998 | 0.78 999 | 0.78 1000 | 0.78 1001 | 0.78 1002 | 0.78 1003 | 0.78 1004 | 0.79 1005 | 0.79 1006 | 0.79 1007 | 0.79 1008 | 0.79 1009 | 0.79 1010 | 0.79 1011 | 0.79 1012 | 0.79 1013 | 0.79 1014 | 0.79 1015 | 0.79 1016 | 0.79 1017 | 0.79 1018 | 0.79 1019 | 0.79 1020 | 0.79 1021 | 0.79 1022 | 0.79 1023 | 0.79 1024 | 0.79 1025 | 0.79 1026 | 0.79 1027 | 0.8 1028 | 0.8 1029 | 0.8 1030 | 0.8 1031 | 0.8 1032 | 0.8 1033 | 0.8 1034 | 0.8 1035 | 0.8 1036 | 0.8 1037 | 0.8 1038 | 0.8 1039 | 0.8 1040 | 0.8 1041 | 0.8 1042 | 0.8 1043 | 0.8 1044 | 0.8 1045 | 0.8 1046 | 0.8 1047 | 0.8 1048 | 0.8 1049 | 0.8 1050 | 0.8 1051 | 0.8 1052 | 0.8 1053 | 0.8 1054 | 0.8 1055 | 0.8 1056 | 0.8 1057 | 0.8 1058 | 0.8 1059 | 0.8 1060 | 0.8 1061 | 0.8 1062 | 0.8 1063 | 0.8 1064 | 0.8 1065 | 0.8 1066 | 0.8 1067 | 0.8 1068 | 0.8 1069 | 0.8 1070 | 0.8 1071 | 0.8 1072 | 0.8 1073 | 0.8 1074 | 0.8 1075 | 0.8 1076 | 0.8 1077 | 0.8 1078 | 0.8 1079 | 0.8 1080 | 0.8 1081 | 0.8 1082 | 0.8 1083 | 0.8 1084 | 0.8 1085 | 0.8 1086 | 0.8 1087 | 0.8 1088 | 0.8 1089 | 0.8 1090 | 0.8 1091 | 0.8 1092 | 0.8 1093 | 0.8 1094 | 0.8 1095 | 0.8 1096 | 0.8 1097 | 0.8 1098 | 0.8 1099 | 0.8 1100 | 0.8 1101 | 0.8 1102 | 0.8 1103 | 0.8 1104 | 0.8 1105 | 0.8 1106 | 0.8 1107 | 0.8 1108 | 0.8 1109 | 0.8 1110 | 0.8 1111 | 0.8 1112 | 0.8 1113 | 0.8 1114 | 0.8 1115 | 0.8 1116 | 0.8 1117 | 0.8 1118 | 0.8 1119 | 0.8 1120 | 0.8 1121 | 0.8 1122 | 0.8 1123 | 0.8 1124 | 0.8 1125 | 0.8 1126 | 0.8 1127 | 0.8 1128 | 0.8 1129 | 0.8 1130 | 0.8 1131 | 0.8 1132 | 0.8 1133 | 0.8 1134 | 0.8 1135 | 0.8 1136 | 0.81 1137 | 0.81 1138 | 0.81 1139 | 0.81 1140 | 0.81 1141 | 0.81 1142 | 0.81 1143 | 0.81 1144 | 0.82 1145 | 0.82 1146 | 0.82 1147 | 0.82 1148 | 0.82 1149 | 0.82 1150 | 0.82 1151 | 0.82 1152 | 0.82 1153 | 0.82 1154 | 0.82 1155 | 0.82 1156 | 0.83 1157 | 0.83 1158 | 0.83 1159 | 0.83 1160 | 0.83 1161 | 0.84 1162 | 0.84 1163 | 0.84 1164 | 0.84 1165 | 0.84 1166 | 0.84 1167 | 0.84 1168 | 0.84 1169 | 0.84 1170 | 0.84 1171 | 0.84 1172 | 0.84 1173 | 0.84 1174 | 0.85 1175 | 0.85 1176 | 0.85 1177 | 0.85 1178 | 0.85 1179 | 0.85 1180 | 0.85 1181 | 0.85 1182 | 0.85 1183 | 0.85 1184 | 0.85 1185 | 0.85 1186 | 0.85 1187 | 0.85 1188 | 0.85 1189 | 0.85 1190 | 0.85 1191 | 0.85 1192 | 0.85 1193 | 0.85 1194 | 0.85 1195 | 0.85 1196 | 0.85 1197 | 0.85 1198 | 0.85 1199 | 0.85 1200 | 0.85 1201 | 0.85 1202 | 0.85 1203 | 0.85 1204 | 0.85 1205 | 0.85 1206 | 0.85 1207 | 0.85 1208 | 0.85 1209 | 0.85 1210 | 0.85 1211 | 0.85 1212 | 0.85 1213 | 0.85 1214 | 0.85 1215 | 0.85 1216 | 0.85 1217 | 0.86 1218 | 0.86 1219 | 0.86 1220 | 0.86 1221 | 0.86 1222 | 0.86 1223 | 0.86 1224 | 0.86 1225 | 0.86 1226 | 0.87 1227 | 0.87 1228 | 0.87 1229 | 0.87 1230 | 0.87 1231 | 0.88 1232 | 0.88 1233 | 0.88 1234 | 0.88 1235 | 0.88 1236 | 0.88 1237 | 0.88 1238 | 0.88 1239 | 0.88 1240 | 0.88 1241 | 0.89 1242 | 0.89 1243 | 0.89 1244 | 0.89 1245 | 0.89 1246 | 0.89 1247 | 0.9 1248 | 0.9 1249 | 0.9 1250 | 0.9 1251 | 0.9 1252 | 0.9 1253 | 0.9 1254 | 0.9 1255 | 0.9 1256 | 0.9 1257 | 0.9 1258 | 0.9 1259 | 0.9 1260 | 0.9 1261 | 0.9 1262 | 0.9 1263 | 0.9 1264 | 0.9 1265 | 0.9 1266 | 0.9 1267 | 0.9 1268 | 0.9 1269 | 0.9 1270 | 0.9 1271 | 0.9 1272 | 0.9 1273 | 0.9 1274 | 0.9 1275 | 0.9 1276 | 0.9 1277 | 0.91 1278 | 0.91 1279 | 0.91 1280 | 0.91 1281 | 0.92 1282 | 0.92 1283 | 0.92 1284 | 0.92 1285 | 0.92 1286 | 0.93 1287 | 0.93 1288 | 0.93 1289 | 0.93 1290 | 0.93 1291 | 0.93 1292 | 0.94 1293 | 0.94 1294 | 0.95 1295 | 0.95 1296 | 0.95 1297 | 0.95 1298 | 0.95 1299 | 0.95 1300 | 0.95 1301 | 0.95 1302 | 0.96 1303 | 0.96 1304 | 0.96 1305 | 0.97 1306 | 0.97 1307 | 0.99 1308 | -------------------------------------------------------------------------------- /data/scaledata/Scott+Renshaw/id.Scott+Renshaw: -------------------------------------------------------------------------------- 1 | 11961 2 | 13915 3 | 2790 4 | 3285 5 | 10264 6 | 11679 7 | 16619 8 | 18008 9 | 2531 10 | 25920 11 | 2950 12 | 5450 13 | 5820 14 | 7391 15 | 8622 16 | 12390 17 | 13548 18 | 14039 19 | 17010 20 | 17434 21 | 2107 22 | 21249 23 | 2193 24 | 2426 25 | 24553 26 | 2518 27 | 26544 28 | 26790 29 | 2750 30 | 3031 31 | 3455 32 | 3493 33 | 5931 34 | 6317 35 | 6685 36 | 6823 37 | 7541 38 | 7743 39 | 8184 40 | 8668 41 | 8901 42 | 9060 43 | 10083 44 | 11564 45 | 14453 46 | 14516 47 | 15765 48 | 15818 49 | 16503 50 | 16884 51 | 18020 52 | 18438 53 | 18538 54 | 19320 55 | 20210 56 | 20302 57 | 20811 58 | 2103 59 | 21483 60 | 2166 61 | 2194 62 | 2196 63 | 2261 64 | 2291 65 | 23156 66 | 23381 67 | 2339 68 | 2364 69 | 24059 70 | 24190 71 | 2419 72 | 2483 73 | 24977 74 | 2504 75 | 25739 76 | 2585 77 | 26100 78 | 2631 79 | 26698 80 | 2677 81 | 26786 82 | 26794 83 | 2760 84 | 2803 85 | 3007 86 | 3081 87 | 3087 88 | 3154 89 | 3206 90 | 3306 91 | 3349 92 | 3367 93 | 3509 94 | 5444 95 | 5455 96 | 5740 97 | 5763 98 | 5852 99 | 5988 100 | 6096 101 | 6208 102 | 6217 103 | 6412 104 | 6545 105 | 6567 106 | 6721 107 | 6929 108 | 7066 109 | 7359 110 | 7502 111 | 7715 112 | 7791 113 | 8423 114 | 8590 115 | 8617 116 | 10225 117 | 10800 118 | 10871 119 | 11207 120 | 11222 121 | 11463 122 | 12050 123 | 12360 124 | 12650 125 | 12859 126 | 13841 127 | 13878 128 | 14097 129 | 14486 130 | 14669 131 | 14953 132 | 15205 133 | 15261 134 | 15394 135 | 15753 136 | 16090 137 | 16115 138 | 16977 139 | 17198 140 | 17643 141 | 17774 142 | 18146 143 | 18499 144 | 18659 145 | 18944 146 | 19040 147 | 19475 148 | 19978 149 | 20033 150 | 20184 151 | 20635 152 | 21238 153 | 21329 154 | 21567 155 | 21850 156 | 22028 157 | 2217 158 | 22729 159 | 2272 160 | 22918 161 | 2304 162 | 23261 163 | 2329 164 | 2336 165 | 23582 166 | 23652 167 | 2375 168 | 2395 169 | 24020 170 | 24023 171 | 24309 172 | 2453 173 | 24674 174 | 24889 175 | 2495 176 | 25020 177 | 25021 178 | 25257 179 | 25412 180 | 25413 181 | 25448 182 | 2559 183 | 25738 184 | 26229 185 | 26546 186 | 2676 187 | 2684 188 | 26930 189 | 2738 190 | 2768 191 | 2951 192 | 3027 193 | 3065 194 | 3070 195 | 3092 196 | 3193 197 | 3231 198 | 3247 199 | 3339 200 | 3377 201 | 3440 202 | 3515 203 | 3595 204 | 5497 205 | 5542 206 | 5663 207 | 5749 208 | 5784 209 | 5881 210 | 5921 211 | 5972 212 | 6035 213 | 6086 214 | 6201 215 | 6257 216 | 6337 217 | 6748 218 | 6764 219 | 6830 220 | 6894 221 | 6925 222 | 7041 223 | 7076 224 | 7156 225 | 7218 226 | 7260 227 | 7300 228 | 7342 229 | 8161 230 | 8217 231 | 8239 232 | 8412 233 | 8714 234 | 8801 235 | 8891 236 | 9049 237 | 9181 238 | 9372 239 | 9561 240 | 10201 241 | 10290 242 | 10611 243 | 10913 244 | 11065 245 | 11343 246 | 11632 247 | 11813 248 | 11912 249 | 12135 250 | 12249 251 | 12265 252 | 12564 253 | 12689 254 | 12763 255 | 13131 256 | 13145 257 | 13470 258 | 13600 259 | 13672 260 | 13800 261 | 13974 262 | 14081 263 | 14220 264 | 14283 265 | 14339 266 | 14596 267 | 14852 268 | 14928 269 | 14959 270 | 15088 271 | 15107 272 | 15234 273 | 15378 274 | 15564 275 | 15745 276 | 16025 277 | 16074 278 | 16209 279 | 16640 280 | 16674 281 | 16749 282 | 17284 283 | 17388 284 | 17539 285 | 17785 286 | 17864 287 | 17892 288 | 18207 289 | 18296 290 | 18433 291 | 18767 292 | 19126 293 | 19242 294 | 19602 295 | 19815 296 | 20215 297 | 20252 298 | 20341 299 | 20622 300 | 2066 301 | 20995 302 | 21411 303 | 2142 304 | 21591 305 | 21713 306 | 2198 307 | 2201 308 | 22069 309 | 22083 310 | 2216 311 | 22223 312 | 2235 313 | 2257 314 | 22615 315 | 22652 316 | 22946 317 | 23026 318 | 2310 319 | 2323 320 | 23430 321 | 23473 322 | 2351 323 | 23700 324 | 2387 325 | 24021 326 | 24061 327 | 2415 328 | 2446 329 | 24673 330 | 2494 331 | 24979 332 | 24981 333 | 25018 334 | 25131 335 | 25150 336 | 2520 337 | 2555 338 | 25575 339 | 25755 340 | 25922 341 | 2604 342 | 2619 343 | 26230 344 | 26607 345 | 26699 346 | 26723 347 | 27066 348 | 2706 349 | 27276 350 | 2830 351 | 2883 352 | 2947 353 | 3054 354 | 3074 355 | 3188 356 | 3218 357 | 3232 358 | 3256 359 | 3287 360 | 3292 361 | 3463 362 | 3518 363 | 3597 364 | 3604 365 | 5454 366 | 5456 367 | 5496 368 | 5691 369 | 5703 370 | 5810 371 | 5833 372 | 5848 373 | 5961 374 | 6034 375 | 6195 376 | 6252 377 | 6386 378 | 6488 379 | 6496 380 | 6499 381 | 6516 382 | 6581 383 | 6646 384 | 6719 385 | 6767 386 | 6877 387 | 6932 388 | 6935 389 | 6966 390 | 7106 391 | 7178 392 | 7190 393 | 7266 394 | 7378 395 | 7403 396 | 7476 397 | 7477 398 | 7594 399 | 7655 400 | 7779 401 | 8311 402 | 8435 403 | 8480 404 | 8507 405 | 8638 406 | 9103 407 | 9368 408 | 9762 409 | 9780 410 | 9943 411 | 10436 412 | 10485 413 | 10988 414 | 11043 415 | 11395 416 | 11786 417 | 12029 418 | 12154 419 | 12228 420 | 12460 421 | 12553 422 | 12883 423 | 12984 424 | 13195 425 | 13339 426 | 13653 427 | 13688 428 | 13729 429 | 14008 430 | 14287 431 | 14358 432 | 14577 433 | 14940 434 | 15040 435 | 15110 436 | 15117 437 | 15323 438 | 15518 439 | 15528 440 | 15551 441 | 15609 442 | 15698 443 | 15700 444 | 15808 445 | 16126 446 | 16273 447 | 16736 448 | 16918 449 | 17498 450 | 17565 451 | 17836 452 | 17991 453 | 18228 454 | 18584 455 | 18923 456 | 19062 457 | 19182 458 | 19294 459 | 19458 460 | 19735 461 | 19802 462 | 19936 463 | 20115 464 | 20478 465 | 20786 466 | 20805 467 | 21147 468 | 21287 469 | 2162 470 | 21922 471 | 2197 472 | 2204 473 | 22056 474 | 2222 475 | 2225 476 | 22281 477 | 22306 478 | 2247 479 | 22531 480 | 23003 481 | 23083 482 | 23199 483 | 2325 484 | 2330 485 | 23366 486 | 23396 487 | 2371 488 | 2390 489 | 24022 490 | 24187 491 | 24188 492 | 24189 493 | 24308 494 | 24310 495 | 24533 496 | 2465 497 | 24675 498 | 24685 499 | 2480 500 | 2491 501 | 25019 502 | 2511 503 | 2524 504 | 25449 505 | 25490 506 | 25571 507 | 25573 508 | 25574 509 | 2560 510 | 2575 511 | 25921 512 | 2638 513 | 26425 514 | 26545 515 | 26722 516 | 2675 517 | 26873 518 | 2692 519 | 27067 520 | 27068 521 | 27169 522 | 27170 523 | 27262 524 | 2726 525 | 2778 526 | 2959 527 | 3016 528 | 3040 529 | 3125 530 | 3143 531 | 3195 532 | 3212 533 | 3236 534 | 3301 535 | 3341 536 | 3347 537 | 3362 538 | 3538 539 | 5449 540 | 5539 541 | 5555 542 | 5664 543 | 5948 544 | 5954 545 | 5997 546 | 6007 547 | 6085 548 | 6193 549 | 6227 550 | 6253 551 | 6330 552 | 6342 553 | 6481 554 | 6524 555 | 6569 556 | 6660 557 | 6667 558 | 6675 559 | 6921 560 | 7003 561 | 7114 562 | 7116 563 | 7171 564 | 7217 565 | 7238 566 | 7277 567 | 7311 568 | 7408 569 | 7533 570 | 7591 571 | 7604 572 | 7689 573 | 7726 574 | 7896 575 | 8109 576 | 8395 577 | 8543 578 | 8611 579 | 8702 580 | 8779 581 | 8811 582 | 9156 583 | 9208 584 | 9292 585 | 9453 586 | 9614 587 | 9637 588 | 9870 589 | 10121 590 | 10145 591 | 10458 592 | 10535 593 | 10841 594 | 10957 595 | 10990 596 | 11334 597 | 11721 598 | 11985 599 | 12104 600 | 12821 601 | 12936 602 | 12979 603 | 13322 604 | 13786 605 | 13904 606 | 14176 607 | 14241 608 | 14559 609 | 14626 610 | 14935 611 | 15207 612 | 15316 613 | 15621 614 | 15916 615 | 15982 616 | 16554 617 | 16602 618 | 16722 619 | 17258 620 | 17302 621 | 17349 622 | 17516 623 | 18268 624 | 18304 625 | 18858 626 | 18866 627 | 19011 628 | 19137 629 | 19339 630 | 19700 631 | 19921 632 | 20439 633 | 20582 634 | 20673 635 | 20674 636 | 20696 637 | 2114 638 | 2130 639 | 21421 640 | 2144 641 | 21507 642 | 2161 643 | 21687 644 | 21798 645 | 21995 646 | 2199 647 | 22126 648 | 22189 649 | 2230 650 | 2241 651 | 22467 652 | 2269 653 | 22743 654 | 2288 655 | 23242 656 | 23308 657 | 23344 658 | 2342 659 | 2352 660 | 23560 661 | 23701 662 | 2377 663 | 23804 664 | 23805 665 | 2393 666 | 24058 667 | 24060 668 | 24204 669 | 2423 670 | 24311 671 | 2482 672 | 24978 673 | 25129 674 | 25130 675 | 2571 676 | 25923 677 | 25991 678 | 25992 679 | 26426 680 | 26427 681 | 26547 682 | 2670 683 | 26875 684 | 26929 685 | 2696 686 | 2749 687 | 2808 688 | 3018 689 | 3064 690 | 3095 691 | 3139 692 | 3198 693 | 3276 694 | 3320 695 | 3404 696 | 3414 697 | 3491 698 | 3593 699 | 5447 700 | 5607 701 | 5615 702 | 5637 703 | 5683 704 | 5688 705 | 5694 706 | 5781 707 | 5896 708 | 5971 709 | 6038 710 | 6068 711 | 6106 712 | 6175 713 | 6194 714 | 6255 715 | 6289 716 | 6515 717 | 6538 718 | 6549 719 | 6579 720 | 6662 721 | 6717 722 | 6772 723 | 6778 724 | 6817 725 | 6879 726 | 7010 727 | 7048 728 | 7187 729 | 7321 730 | 7380 731 | 7512 732 | 7637 733 | 7648 734 | 7836 735 | 8453 736 | 8495 737 | 8569 738 | 8884 739 | 8978 740 | 9007 741 | 9122 742 | 9277 743 | 9383 744 | 9671 745 | 10101 746 | 10184 747 | 10873 748 | 11585 749 | 11771 750 | 11849 751 | 11923 752 | 12225 753 | 13922 754 | 14421 755 | 14521 756 | 14687 757 | 15010 758 | 15590 759 | 15735 760 | 16694 761 | 17700 762 | 17725 763 | 18096 764 | 18596 765 | 18607 766 | 19529 767 | 2064 768 | 2133 769 | 2150 770 | 22142 771 | 2215 772 | 22233 773 | 2224 774 | 2242 775 | 2265 776 | 2309 777 | 2359 778 | 2383 779 | 24019 780 | 2467 781 | 24980 782 | 2501 783 | 25153 784 | 2516 785 | 25260 786 | 25314 787 | 2546 788 | 25572 789 | 2602 790 | 26104 791 | 26608 792 | 2680 793 | 2719 794 | 27261 795 | 27274 796 | 27275 797 | 2769 798 | 2771 799 | 2895 800 | 2955 801 | 3046 802 | 3118 803 | 3210 804 | 3251 805 | 3265 806 | 3360 807 | 3443 808 | 3448 809 | 5453 810 | 5783 811 | 5795 812 | 6048 813 | 6084 814 | 6241 815 | 6305 816 | 6381 817 | 6473 818 | 6504 819 | 6617 820 | 6976 821 | 7036 822 | 7060 823 | 7306 824 | 7464 825 | 7567 826 | 7823 827 | 8162 828 | 8262 829 | 8754 830 | 9916 831 | 10065 832 | 10109 833 | 10329 834 | 10943 835 | 13120 836 | 13375 837 | 14809 838 | 15295 839 | 15477 840 | 15783 841 | 16539 842 | 18054 843 | 21135 844 | 2149 845 | 21533 846 | 21671 847 | 22372 848 | 2260 849 | 2332 850 | 24433 851 | 2450 852 | 2579 853 | 26095 854 | 2632 855 | 26789 856 | 26795 857 | 27069 858 | 27273 859 | 2793 860 | 2988 861 | 3146 862 | 3192 863 | 3267 864 | 3375 865 | 5448 866 | 5452 867 | 5512 868 | 5687 869 | 5831 870 | 6108 871 | 6354 872 | 6401 873 | 6509 874 | 6829 875 | 6979 876 | 7547 877 | 8333 878 | 8935 879 | 9518 880 | 9703 881 | 9823 882 | 10203 883 | 12733 884 | 13579 885 | 15811 886 | 19282 887 | 21452 888 | 2203 889 | 2206 890 | 2298 891 | 27260 892 | 2820 893 | 2885 894 | 3013 895 | 3134 896 | 3283 897 | 5460 898 | 5754 899 | 6072 900 | 6371 901 | 6892 902 | 8645 903 | -------------------------------------------------------------------------------- /data/scaledata/Scott+Renshaw/label.3class.Scott+Renshaw: -------------------------------------------------------------------------------- 1 | 0 2 | 0 3 | 0 4 | 0 5 | 0 6 | 0 7 | 0 8 | 0 9 | 0 10 | 0 11 | 0 12 | 0 13 | 0 14 | 0 15 | 0 16 | 0 17 | 0 18 | 0 19 | 0 20 | 0 21 | 0 22 | 0 23 | 0 24 | 0 25 | 0 26 | 0 27 | 0 28 | 0 29 | 0 30 | 0 31 | 0 32 | 0 33 | 0 34 | 0 35 | 0 36 | 0 37 | 0 38 | 0 39 | 0 40 | 0 41 | 0 42 | 0 43 | 0 44 | 0 45 | 0 46 | 0 47 | 0 48 | 0 49 | 0 50 | 0 51 | 0 52 | 0 53 | 0 54 | 0 55 | 0 56 | 0 57 | 0 58 | 0 59 | 0 60 | 0 61 | 0 62 | 0 63 | 0 64 | 0 65 | 0 66 | 0 67 | 0 68 | 0 69 | 0 70 | 0 71 | 0 72 | 0 73 | 0 74 | 0 75 | 0 76 | 0 77 | 0 78 | 0 79 | 0 80 | 0 81 | 0 82 | 0 83 | 0 84 | 0 85 | 0 86 | 0 87 | 0 88 | 0 89 | 0 90 | 0 91 | 0 92 | 0 93 | 0 94 | 0 95 | 0 96 | 0 97 | 0 98 | 0 99 | 0 100 | 0 101 | 0 102 | 0 103 | 0 104 | 0 105 | 0 106 | 0 107 | 0 108 | 0 109 | 0 110 | 0 111 | 0 112 | 0 113 | 0 114 | 0 115 | 0 116 | 0 117 | 0 118 | 0 119 | 0 120 | 0 121 | 0 122 | 0 123 | 0 124 | 0 125 | 0 126 | 0 127 | 0 128 | 0 129 | 0 130 | 0 131 | 0 132 | 0 133 | 0 134 | 0 135 | 0 136 | 0 137 | 0 138 | 0 139 | 0 140 | 0 141 | 0 142 | 0 143 | 0 144 | 0 145 | 0 146 | 0 147 | 0 148 | 0 149 | 0 150 | 0 151 | 0 152 | 0 153 | 0 154 | 0 155 | 0 156 | 0 157 | 0 158 | 0 159 | 0 160 | 0 161 | 0 162 | 0 163 | 0 164 | 0 165 | 0 166 | 0 167 | 0 168 | 0 169 | 0 170 | 0 171 | 0 172 | 0 173 | 0 174 | 0 175 | 0 176 | 0 177 | 0 178 | 0 179 | 0 180 | 0 181 | 0 182 | 0 183 | 0 184 | 0 185 | 0 186 | 0 187 | 0 188 | 0 189 | 0 190 | 0 191 | 0 192 | 0 193 | 0 194 | 0 195 | 0 196 | 0 197 | 0 198 | 0 199 | 0 200 | 0 201 | 0 202 | 0 203 | 0 204 | 0 205 | 0 206 | 0 207 | 0 208 | 0 209 | 0 210 | 0 211 | 0 212 | 0 213 | 0 214 | 0 215 | 0 216 | 0 217 | 0 218 | 0 219 | 0 220 | 0 221 | 0 222 | 0 223 | 0 224 | 0 225 | 0 226 | 0 227 | 0 228 | 0 229 | 0 230 | 0 231 | 0 232 | 0 233 | 0 234 | 0 235 | 0 236 | 0 237 | 0 238 | 0 239 | 0 240 | 1 241 | 1 242 | 1 243 | 1 244 | 1 245 | 1 246 | 1 247 | 1 248 | 1 249 | 1 250 | 1 251 | 1 252 | 1 253 | 1 254 | 1 255 | 1 256 | 1 257 | 1 258 | 1 259 | 1 260 | 1 261 | 1 262 | 1 263 | 1 264 | 1 265 | 1 266 | 1 267 | 1 268 | 1 269 | 1 270 | 1 271 | 1 272 | 1 273 | 1 274 | 1 275 | 1 276 | 1 277 | 1 278 | 1 279 | 1 280 | 1 281 | 1 282 | 1 283 | 1 284 | 1 285 | 1 286 | 1 287 | 1 288 | 1 289 | 1 290 | 1 291 | 1 292 | 1 293 | 1 294 | 1 295 | 1 296 | 1 297 | 1 298 | 1 299 | 1 300 | 1 301 | 1 302 | 1 303 | 1 304 | 1 305 | 1 306 | 1 307 | 1 308 | 1 309 | 1 310 | 1 311 | 1 312 | 1 313 | 1 314 | 1 315 | 1 316 | 1 317 | 1 318 | 1 319 | 1 320 | 1 321 | 1 322 | 1 323 | 1 324 | 1 325 | 1 326 | 1 327 | 1 328 | 1 329 | 1 330 | 1 331 | 1 332 | 1 333 | 1 334 | 1 335 | 1 336 | 1 337 | 1 338 | 1 339 | 1 340 | 1 341 | 1 342 | 1 343 | 1 344 | 1 345 | 1 346 | 1 347 | 1 348 | 1 349 | 1 350 | 1 351 | 1 352 | 1 353 | 1 354 | 1 355 | 1 356 | 1 357 | 1 358 | 1 359 | 1 360 | 1 361 | 1 362 | 1 363 | 1 364 | 1 365 | 1 366 | 1 367 | 1 368 | 1 369 | 1 370 | 1 371 | 1 372 | 1 373 | 1 374 | 1 375 | 1 376 | 1 377 | 1 378 | 1 379 | 1 380 | 1 381 | 1 382 | 1 383 | 1 384 | 1 385 | 1 386 | 1 387 | 1 388 | 1 389 | 1 390 | 1 391 | 1 392 | 1 393 | 1 394 | 1 395 | 1 396 | 1 397 | 1 398 | 1 399 | 1 400 | 1 401 | 1 402 | 1 403 | 1 404 | 1 405 | 1 406 | 1 407 | 1 408 | 1 409 | 1 410 | 1 411 | 1 412 | 1 413 | 1 414 | 1 415 | 1 416 | 1 417 | 1 418 | 1 419 | 1 420 | 1 421 | 1 422 | 1 423 | 1 424 | 1 425 | 1 426 | 1 427 | 1 428 | 1 429 | 1 430 | 1 431 | 1 432 | 1 433 | 1 434 | 1 435 | 1 436 | 1 437 | 1 438 | 1 439 | 1 440 | 1 441 | 1 442 | 1 443 | 1 444 | 1 445 | 1 446 | 1 447 | 1 448 | 1 449 | 1 450 | 1 451 | 1 452 | 1 453 | 1 454 | 1 455 | 1 456 | 1 457 | 1 458 | 1 459 | 1 460 | 1 461 | 1 462 | 1 463 | 1 464 | 1 465 | 1 466 | 1 467 | 1 468 | 1 469 | 1 470 | 1 471 | 1 472 | 1 473 | 1 474 | 1 475 | 1 476 | 1 477 | 1 478 | 1 479 | 1 480 | 1 481 | 1 482 | 1 483 | 1 484 | 1 485 | 1 486 | 1 487 | 1 488 | 1 489 | 1 490 | 1 491 | 1 492 | 1 493 | 1 494 | 1 495 | 1 496 | 1 497 | 1 498 | 1 499 | 1 500 | 1 501 | 1 502 | 1 503 | 1 504 | 1 505 | 1 506 | 1 507 | 1 508 | 1 509 | 1 510 | 1 511 | 1 512 | 1 513 | 1 514 | 1 515 | 1 516 | 1 517 | 1 518 | 1 519 | 1 520 | 1 521 | 1 522 | 1 523 | 1 524 | 1 525 | 1 526 | 1 527 | 1 528 | 1 529 | 1 530 | 1 531 | 1 532 | 1 533 | 1 534 | 1 535 | 1 536 | 1 537 | 1 538 | 1 539 | 1 540 | 1 541 | 1 542 | 1 543 | 1 544 | 1 545 | 1 546 | 1 547 | 1 548 | 1 549 | 1 550 | 1 551 | 1 552 | 1 553 | 1 554 | 1 555 | 1 556 | 1 557 | 1 558 | 1 559 | 1 560 | 1 561 | 1 562 | 1 563 | 1 564 | 1 565 | 1 566 | 1 567 | 1 568 | 1 569 | 1 570 | 1 571 | 1 572 | 1 573 | 1 574 | 1 575 | 1 576 | 1 577 | 1 578 | 1 579 | 1 580 | 1 581 | 1 582 | 1 583 | 1 584 | 1 585 | 1 586 | 1 587 | 1 588 | 1 589 | 2 590 | 2 591 | 2 592 | 2 593 | 2 594 | 2 595 | 2 596 | 2 597 | 2 598 | 2 599 | 2 600 | 2 601 | 2 602 | 2 603 | 2 604 | 2 605 | 2 606 | 2 607 | 2 608 | 2 609 | 2 610 | 2 611 | 2 612 | 2 613 | 2 614 | 2 615 | 2 616 | 2 617 | 2 618 | 2 619 | 2 620 | 2 621 | 2 622 | 2 623 | 2 624 | 2 625 | 2 626 | 2 627 | 2 628 | 2 629 | 2 630 | 2 631 | 2 632 | 2 633 | 2 634 | 2 635 | 2 636 | 2 637 | 2 638 | 2 639 | 2 640 | 2 641 | 2 642 | 2 643 | 2 644 | 2 645 | 2 646 | 2 647 | 2 648 | 2 649 | 2 650 | 2 651 | 2 652 | 2 653 | 2 654 | 2 655 | 2 656 | 2 657 | 2 658 | 2 659 | 2 660 | 2 661 | 2 662 | 2 663 | 2 664 | 2 665 | 2 666 | 2 667 | 2 668 | 2 669 | 2 670 | 2 671 | 2 672 | 2 673 | 2 674 | 2 675 | 2 676 | 2 677 | 2 678 | 2 679 | 2 680 | 2 681 | 2 682 | 2 683 | 2 684 | 2 685 | 2 686 | 2 687 | 2 688 | 2 689 | 2 690 | 2 691 | 2 692 | 2 693 | 2 694 | 2 695 | 2 696 | 2 697 | 2 698 | 2 699 | 2 700 | 2 701 | 2 702 | 2 703 | 2 704 | 2 705 | 2 706 | 2 707 | 2 708 | 2 709 | 2 710 | 2 711 | 2 712 | 2 713 | 2 714 | 2 715 | 2 716 | 2 717 | 2 718 | 2 719 | 2 720 | 2 721 | 2 722 | 2 723 | 2 724 | 2 725 | 2 726 | 2 727 | 2 728 | 2 729 | 2 730 | 2 731 | 2 732 | 2 733 | 2 734 | 2 735 | 2 736 | 2 737 | 2 738 | 2 739 | 2 740 | 2 741 | 2 742 | 2 743 | 2 744 | 2 745 | 2 746 | 2 747 | 2 748 | 2 749 | 2 750 | 2 751 | 2 752 | 2 753 | 2 754 | 2 755 | 2 756 | 2 757 | 2 758 | 2 759 | 2 760 | 2 761 | 2 762 | 2 763 | 2 764 | 2 765 | 2 766 | 2 767 | 2 768 | 2 769 | 2 770 | 2 771 | 2 772 | 2 773 | 2 774 | 2 775 | 2 776 | 2 777 | 2 778 | 2 779 | 2 780 | 2 781 | 2 782 | 2 783 | 2 784 | 2 785 | 2 786 | 2 787 | 2 788 | 2 789 | 2 790 | 2 791 | 2 792 | 2 793 | 2 794 | 2 795 | 2 796 | 2 797 | 2 798 | 2 799 | 2 800 | 2 801 | 2 802 | 2 803 | 2 804 | 2 805 | 2 806 | 2 807 | 2 808 | 2 809 | 2 810 | 2 811 | 2 812 | 2 813 | 2 814 | 2 815 | 2 816 | 2 817 | 2 818 | 2 819 | 2 820 | 2 821 | 2 822 | 2 823 | 2 824 | 2 825 | 2 826 | 2 827 | 2 828 | 2 829 | 2 830 | 2 831 | 2 832 | 2 833 | 2 834 | 2 835 | 2 836 | 2 837 | 2 838 | 2 839 | 2 840 | 2 841 | 2 842 | 2 843 | 2 844 | 2 845 | 2 846 | 2 847 | 2 848 | 2 849 | 2 850 | 2 851 | 2 852 | 2 853 | 2 854 | 2 855 | 2 856 | 2 857 | 2 858 | 2 859 | 2 860 | 2 861 | 2 862 | 2 863 | 2 864 | 2 865 | 2 866 | 2 867 | 2 868 | 2 869 | 2 870 | 2 871 | 2 872 | 2 873 | 2 874 | 2 875 | 2 876 | 2 877 | 2 878 | 2 879 | 2 880 | 2 881 | 2 882 | 2 883 | 2 884 | 2 885 | 2 886 | 2 887 | 2 888 | 2 889 | 2 890 | 2 891 | 2 892 | 2 893 | 2 894 | 2 895 | 2 896 | 2 897 | 2 898 | 2 899 | 2 900 | 2 901 | 2 902 | 2 903 | -------------------------------------------------------------------------------- /data/scaledata/Scott+Renshaw/label.4class.Scott+Renshaw: -------------------------------------------------------------------------------- 1 | 0 2 | 0 3 | 0 4 | 0 5 | 0 6 | 0 7 | 0 8 | 0 9 | 0 10 | 0 11 | 0 12 | 0 13 | 0 14 | 0 15 | 0 16 | 0 17 | 0 18 | 0 19 | 0 20 | 0 21 | 0 22 | 0 23 | 0 24 | 0 25 | 0 26 | 0 27 | 0 28 | 0 29 | 0 30 | 0 31 | 0 32 | 0 33 | 0 34 | 0 35 | 0 36 | 0 37 | 0 38 | 0 39 | 0 40 | 0 41 | 0 42 | 0 43 | 0 44 | 0 45 | 0 46 | 0 47 | 0 48 | 0 49 | 0 50 | 0 51 | 0 52 | 0 53 | 0 54 | 0 55 | 0 56 | 0 57 | 0 58 | 0 59 | 0 60 | 0 61 | 0 62 | 0 63 | 0 64 | 0 65 | 0 66 | 0 67 | 0 68 | 0 69 | 0 70 | 0 71 | 0 72 | 0 73 | 0 74 | 0 75 | 0 76 | 0 77 | 0 78 | 0 79 | 0 80 | 0 81 | 0 82 | 0 83 | 0 84 | 0 85 | 0 86 | 0 87 | 0 88 | 0 89 | 0 90 | 0 91 | 0 92 | 0 93 | 0 94 | 0 95 | 0 96 | 0 97 | 0 98 | 0 99 | 0 100 | 0 101 | 0 102 | 0 103 | 0 104 | 0 105 | 0 106 | 0 107 | 0 108 | 0 109 | 0 110 | 0 111 | 0 112 | 0 113 | 0 114 | 0 115 | 0 116 | 1 117 | 1 118 | 1 119 | 1 120 | 1 121 | 1 122 | 1 123 | 1 124 | 1 125 | 1 126 | 1 127 | 1 128 | 1 129 | 1 130 | 1 131 | 1 132 | 1 133 | 1 134 | 1 135 | 1 136 | 1 137 | 1 138 | 1 139 | 1 140 | 1 141 | 1 142 | 1 143 | 1 144 | 1 145 | 1 146 | 1 147 | 1 148 | 1 149 | 1 150 | 1 151 | 1 152 | 1 153 | 1 154 | 1 155 | 1 156 | 1 157 | 1 158 | 1 159 | 1 160 | 1 161 | 1 162 | 1 163 | 1 164 | 1 165 | 1 166 | 1 167 | 1 168 | 1 169 | 1 170 | 1 171 | 1 172 | 1 173 | 1 174 | 1 175 | 1 176 | 1 177 | 1 178 | 1 179 | 1 180 | 1 181 | 1 182 | 1 183 | 1 184 | 1 185 | 1 186 | 1 187 | 1 188 | 1 189 | 1 190 | 1 191 | 1 192 | 1 193 | 1 194 | 1 195 | 1 196 | 1 197 | 1 198 | 1 199 | 1 200 | 1 201 | 1 202 | 1 203 | 1 204 | 1 205 | 1 206 | 1 207 | 1 208 | 1 209 | 1 210 | 1 211 | 1 212 | 1 213 | 1 214 | 1 215 | 1 216 | 1 217 | 1 218 | 1 219 | 1 220 | 1 221 | 1 222 | 1 223 | 1 224 | 1 225 | 1 226 | 1 227 | 1 228 | 1 229 | 1 230 | 1 231 | 1 232 | 1 233 | 1 234 | 1 235 | 1 236 | 1 237 | 1 238 | 1 239 | 1 240 | 1 241 | 1 242 | 1 243 | 1 244 | 1 245 | 1 246 | 1 247 | 1 248 | 1 249 | 1 250 | 1 251 | 1 252 | 1 253 | 1 254 | 1 255 | 1 256 | 1 257 | 1 258 | 1 259 | 1 260 | 1 261 | 1 262 | 1 263 | 1 264 | 1 265 | 1 266 | 1 267 | 1 268 | 1 269 | 1 270 | 1 271 | 1 272 | 1 273 | 1 274 | 1 275 | 1 276 | 1 277 | 1 278 | 1 279 | 1 280 | 1 281 | 1 282 | 1 283 | 1 284 | 1 285 | 1 286 | 1 287 | 1 288 | 1 289 | 1 290 | 1 291 | 1 292 | 1 293 | 1 294 | 1 295 | 1 296 | 1 297 | 1 298 | 1 299 | 1 300 | 1 301 | 1 302 | 1 303 | 1 304 | 1 305 | 1 306 | 1 307 | 1 308 | 1 309 | 1 310 | 1 311 | 1 312 | 1 313 | 1 314 | 1 315 | 1 316 | 1 317 | 1 318 | 1 319 | 1 320 | 1 321 | 1 322 | 1 323 | 1 324 | 1 325 | 1 326 | 1 327 | 1 328 | 1 329 | 1 330 | 1 331 | 1 332 | 1 333 | 1 334 | 1 335 | 1 336 | 1 337 | 1 338 | 1 339 | 1 340 | 1 341 | 1 342 | 1 343 | 1 344 | 1 345 | 1 346 | 1 347 | 1 348 | 1 349 | 1 350 | 1 351 | 1 352 | 1 353 | 1 354 | 1 355 | 1 356 | 1 357 | 1 358 | 1 359 | 1 360 | 1 361 | 1 362 | 1 363 | 1 364 | 1 365 | 1 366 | 1 367 | 1 368 | 1 369 | 1 370 | 1 371 | 1 372 | 1 373 | 1 374 | 1 375 | 1 376 | 1 377 | 1 378 | 1 379 | 1 380 | 1 381 | 1 382 | 1 383 | 1 384 | 1 385 | 1 386 | 1 387 | 1 388 | 1 389 | 1 390 | 1 391 | 1 392 | 1 393 | 1 394 | 1 395 | 1 396 | 1 397 | 1 398 | 1 399 | 1 400 | 1 401 | 1 402 | 1 403 | 1 404 | 1 405 | 1 406 | 1 407 | 1 408 | 1 409 | 1 410 | 1 411 | 2 412 | 2 413 | 2 414 | 2 415 | 2 416 | 2 417 | 2 418 | 2 419 | 2 420 | 2 421 | 2 422 | 2 423 | 2 424 | 2 425 | 2 426 | 2 427 | 2 428 | 2 429 | 2 430 | 2 431 | 2 432 | 2 433 | 2 434 | 2 435 | 2 436 | 2 437 | 2 438 | 2 439 | 2 440 | 2 441 | 2 442 | 2 443 | 2 444 | 2 445 | 2 446 | 2 447 | 2 448 | 2 449 | 2 450 | 2 451 | 2 452 | 2 453 | 2 454 | 2 455 | 2 456 | 2 457 | 2 458 | 2 459 | 2 460 | 2 461 | 2 462 | 2 463 | 2 464 | 2 465 | 2 466 | 2 467 | 2 468 | 2 469 | 2 470 | 2 471 | 2 472 | 2 473 | 2 474 | 2 475 | 2 476 | 2 477 | 2 478 | 2 479 | 2 480 | 2 481 | 2 482 | 2 483 | 2 484 | 2 485 | 2 486 | 2 487 | 2 488 | 2 489 | 2 490 | 2 491 | 2 492 | 2 493 | 2 494 | 2 495 | 2 496 | 2 497 | 2 498 | 2 499 | 2 500 | 2 501 | 2 502 | 2 503 | 2 504 | 2 505 | 2 506 | 2 507 | 2 508 | 2 509 | 2 510 | 2 511 | 2 512 | 2 513 | 2 514 | 2 515 | 2 516 | 2 517 | 2 518 | 2 519 | 2 520 | 2 521 | 2 522 | 2 523 | 2 524 | 2 525 | 2 526 | 2 527 | 2 528 | 2 529 | 2 530 | 2 531 | 2 532 | 2 533 | 2 534 | 2 535 | 2 536 | 2 537 | 2 538 | 2 539 | 2 540 | 2 541 | 2 542 | 2 543 | 2 544 | 2 545 | 2 546 | 2 547 | 2 548 | 2 549 | 2 550 | 2 551 | 2 552 | 2 553 | 2 554 | 2 555 | 2 556 | 2 557 | 2 558 | 2 559 | 2 560 | 2 561 | 2 562 | 2 563 | 2 564 | 2 565 | 2 566 | 2 567 | 2 568 | 2 569 | 2 570 | 2 571 | 2 572 | 2 573 | 2 574 | 2 575 | 2 576 | 2 577 | 2 578 | 2 579 | 2 580 | 2 581 | 2 582 | 2 583 | 2 584 | 2 585 | 2 586 | 2 587 | 2 588 | 2 589 | 2 590 | 2 591 | 2 592 | 2 593 | 2 594 | 2 595 | 2 596 | 2 597 | 2 598 | 2 599 | 2 600 | 2 601 | 2 602 | 2 603 | 2 604 | 2 605 | 2 606 | 2 607 | 2 608 | 2 609 | 2 610 | 2 611 | 2 612 | 2 613 | 2 614 | 2 615 | 2 616 | 2 617 | 2 618 | 2 619 | 2 620 | 2 621 | 2 622 | 2 623 | 2 624 | 2 625 | 2 626 | 2 627 | 2 628 | 2 629 | 2 630 | 2 631 | 2 632 | 2 633 | 2 634 | 2 635 | 2 636 | 2 637 | 2 638 | 2 639 | 2 640 | 2 641 | 2 642 | 2 643 | 2 644 | 2 645 | 2 646 | 2 647 | 2 648 | 2 649 | 2 650 | 2 651 | 2 652 | 2 653 | 2 654 | 2 655 | 2 656 | 2 657 | 2 658 | 2 659 | 2 660 | 2 661 | 2 662 | 2 663 | 2 664 | 2 665 | 2 666 | 2 667 | 2 668 | 2 669 | 2 670 | 2 671 | 2 672 | 2 673 | 2 674 | 2 675 | 2 676 | 2 677 | 2 678 | 2 679 | 2 680 | 2 681 | 2 682 | 2 683 | 2 684 | 2 685 | 2 686 | 2 687 | 2 688 | 2 689 | 2 690 | 2 691 | 2 692 | 2 693 | 2 694 | 2 695 | 2 696 | 2 697 | 2 698 | 2 699 | 2 700 | 2 701 | 2 702 | 2 703 | 2 704 | 2 705 | 2 706 | 2 707 | 2 708 | 2 709 | 2 710 | 2 711 | 2 712 | 2 713 | 2 714 | 2 715 | 2 716 | 2 717 | 2 718 | 2 719 | 2 720 | 2 721 | 2 722 | 2 723 | 2 724 | 2 725 | 2 726 | 2 727 | 2 728 | 2 729 | 2 730 | 2 731 | 2 732 | 2 733 | 2 734 | 2 735 | 2 736 | 2 737 | 2 738 | 2 739 | 2 740 | 2 741 | 2 742 | 2 743 | 2 744 | 2 745 | 3 746 | 3 747 | 3 748 | 3 749 | 3 750 | 3 751 | 3 752 | 3 753 | 3 754 | 3 755 | 3 756 | 3 757 | 3 758 | 3 759 | 3 760 | 3 761 | 3 762 | 3 763 | 3 764 | 3 765 | 3 766 | 3 767 | 3 768 | 3 769 | 3 770 | 3 771 | 3 772 | 3 773 | 3 774 | 3 775 | 3 776 | 3 777 | 3 778 | 3 779 | 3 780 | 3 781 | 3 782 | 3 783 | 3 784 | 3 785 | 3 786 | 3 787 | 3 788 | 3 789 | 3 790 | 3 791 | 3 792 | 3 793 | 3 794 | 3 795 | 3 796 | 3 797 | 3 798 | 3 799 | 3 800 | 3 801 | 3 802 | 3 803 | 3 804 | 3 805 | 3 806 | 3 807 | 3 808 | 3 809 | 3 810 | 3 811 | 3 812 | 3 813 | 3 814 | 3 815 | 3 816 | 3 817 | 3 818 | 3 819 | 3 820 | 3 821 | 3 822 | 3 823 | 3 824 | 3 825 | 3 826 | 3 827 | 3 828 | 3 829 | 3 830 | 3 831 | 3 832 | 3 833 | 3 834 | 3 835 | 3 836 | 3 837 | 3 838 | 3 839 | 3 840 | 3 841 | 3 842 | 3 843 | 3 844 | 3 845 | 3 846 | 3 847 | 3 848 | 3 849 | 3 850 | 3 851 | 3 852 | 3 853 | 3 854 | 3 855 | 3 856 | 3 857 | 3 858 | 3 859 | 3 860 | 3 861 | 3 862 | 3 863 | 3 864 | 3 865 | 3 866 | 3 867 | 3 868 | 3 869 | 3 870 | 3 871 | 3 872 | 3 873 | 3 874 | 3 875 | 3 876 | 3 877 | 3 878 | 3 879 | 3 880 | 3 881 | 3 882 | 3 883 | 3 884 | 3 885 | 3 886 | 3 887 | 3 888 | 3 889 | 3 890 | 3 891 | 3 892 | 3 893 | 3 894 | 3 895 | 3 896 | 3 897 | 3 898 | 3 899 | 3 900 | 3 901 | 3 902 | 3 903 | -------------------------------------------------------------------------------- /data/scaledata/Scott+Renshaw/rating.Scott+Renshaw: -------------------------------------------------------------------------------- 1 | 0 2 | 0 3 | 0 4 | 0 5 | 0.1 6 | 0.1 7 | 0.1 8 | 0.1 9 | 0.1 10 | 0.1 11 | 0.1 12 | 0.1 13 | 0.1 14 | 0.1 15 | 0.1 16 | 0.2 17 | 0.2 18 | 0.2 19 | 0.2 20 | 0.2 21 | 0.2 22 | 0.2 23 | 0.2 24 | 0.2 25 | 0.2 26 | 0.2 27 | 0.2 28 | 0.2 29 | 0.2 30 | 0.2 31 | 0.2 32 | 0.2 33 | 0.2 34 | 0.2 35 | 0.2 36 | 0.2 37 | 0.2 38 | 0.2 39 | 0.2 40 | 0.2 41 | 0.2 42 | 0.2 43 | 0.3 44 | 0.3 45 | 0.3 46 | 0.3 47 | 0.3 48 | 0.3 49 | 0.3 50 | 0.3 51 | 0.3 52 | 0.3 53 | 0.3 54 | 0.3 55 | 0.3 56 | 0.3 57 | 0.3 58 | 0.3 59 | 0.3 60 | 0.3 61 | 0.3 62 | 0.3 63 | 0.3 64 | 0.3 65 | 0.3 66 | 0.3 67 | 0.3 68 | 0.3 69 | 0.3 70 | 0.3 71 | 0.3 72 | 0.3 73 | 0.3 74 | 0.3 75 | 0.3 76 | 0.3 77 | 0.3 78 | 0.3 79 | 0.3 80 | 0.3 81 | 0.3 82 | 0.3 83 | 0.3 84 | 0.3 85 | 0.3 86 | 0.3 87 | 0.3 88 | 0.3 89 | 0.3 90 | 0.3 91 | 0.3 92 | 0.3 93 | 0.3 94 | 0.3 95 | 0.3 96 | 0.3 97 | 0.3 98 | 0.3 99 | 0.3 100 | 0.3 101 | 0.3 102 | 0.3 103 | 0.3 104 | 0.3 105 | 0.3 106 | 0.3 107 | 0.3 108 | 0.3 109 | 0.3 110 | 0.3 111 | 0.3 112 | 0.3 113 | 0.3 114 | 0.3 115 | 0.3 116 | 0.4 117 | 0.4 118 | 0.4 119 | 0.4 120 | 0.4 121 | 0.4 122 | 0.4 123 | 0.4 124 | 0.4 125 | 0.4 126 | 0.4 127 | 0.4 128 | 0.4 129 | 0.4 130 | 0.4 131 | 0.4 132 | 0.4 133 | 0.4 134 | 0.4 135 | 0.4 136 | 0.4 137 | 0.4 138 | 0.4 139 | 0.4 140 | 0.4 141 | 0.4 142 | 0.4 143 | 0.4 144 | 0.4 145 | 0.4 146 | 0.4 147 | 0.4 148 | 0.4 149 | 0.4 150 | 0.4 151 | 0.4 152 | 0.4 153 | 0.4 154 | 0.4 155 | 0.4 156 | 0.4 157 | 0.4 158 | 0.4 159 | 0.4 160 | 0.4 161 | 0.4 162 | 0.4 163 | 0.4 164 | 0.4 165 | 0.4 166 | 0.4 167 | 0.4 168 | 0.4 169 | 0.4 170 | 0.4 171 | 0.4 172 | 0.4 173 | 0.4 174 | 0.4 175 | 0.4 176 | 0.4 177 | 0.4 178 | 0.4 179 | 0.4 180 | 0.4 181 | 0.4 182 | 0.4 183 | 0.4 184 | 0.4 185 | 0.4 186 | 0.4 187 | 0.4 188 | 0.4 189 | 0.4 190 | 0.4 191 | 0.4 192 | 0.4 193 | 0.4 194 | 0.4 195 | 0.4 196 | 0.4 197 | 0.4 198 | 0.4 199 | 0.4 200 | 0.4 201 | 0.4 202 | 0.4 203 | 0.4 204 | 0.4 205 | 0.4 206 | 0.4 207 | 0.4 208 | 0.4 209 | 0.4 210 | 0.4 211 | 0.4 212 | 0.4 213 | 0.4 214 | 0.4 215 | 0.4 216 | 0.4 217 | 0.4 218 | 0.4 219 | 0.4 220 | 0.4 221 | 0.4 222 | 0.4 223 | 0.4 224 | 0.4 225 | 0.4 226 | 0.4 227 | 0.4 228 | 0.4 229 | 0.4 230 | 0.4 231 | 0.4 232 | 0.4 233 | 0.4 234 | 0.4 235 | 0.4 236 | 0.4 237 | 0.4 238 | 0.4 239 | 0.4 240 | 0.5 241 | 0.5 242 | 0.5 243 | 0.5 244 | 0.5 245 | 0.5 246 | 0.5 247 | 0.5 248 | 0.5 249 | 0.5 250 | 0.5 251 | 0.5 252 | 0.5 253 | 0.5 254 | 0.5 255 | 0.5 256 | 0.5 257 | 0.5 258 | 0.5 259 | 0.5 260 | 0.5 261 | 0.5 262 | 0.5 263 | 0.5 264 | 0.5 265 | 0.5 266 | 0.5 267 | 0.5 268 | 0.5 269 | 0.5 270 | 0.5 271 | 0.5 272 | 0.5 273 | 0.5 274 | 0.5 275 | 0.5 276 | 0.5 277 | 0.5 278 | 0.5 279 | 0.5 280 | 0.5 281 | 0.5 282 | 0.5 283 | 0.5 284 | 0.5 285 | 0.5 286 | 0.5 287 | 0.5 288 | 0.5 289 | 0.5 290 | 0.5 291 | 0.5 292 | 0.5 293 | 0.5 294 | 0.5 295 | 0.5 296 | 0.5 297 | 0.5 298 | 0.5 299 | 0.5 300 | 0.5 301 | 0.5 302 | 0.5 303 | 0.5 304 | 0.5 305 | 0.5 306 | 0.5 307 | 0.5 308 | 0.5 309 | 0.5 310 | 0.5 311 | 0.5 312 | 0.5 313 | 0.5 314 | 0.5 315 | 0.5 316 | 0.5 317 | 0.5 318 | 0.5 319 | 0.5 320 | 0.5 321 | 0.5 322 | 0.5 323 | 0.5 324 | 0.5 325 | 0.5 326 | 0.5 327 | 0.5 328 | 0.5 329 | 0.5 330 | 0.5 331 | 0.5 332 | 0.5 333 | 0.5 334 | 0.5 335 | 0.5 336 | 0.5 337 | 0.5 338 | 0.5 339 | 0.5 340 | 0.5 341 | 0.5 342 | 0.5 343 | 0.5 344 | 0.5 345 | 0.5 346 | 0.5 347 | 0.5 348 | 0.5 349 | 0.5 350 | 0.5 351 | 0.5 352 | 0.5 353 | 0.5 354 | 0.5 355 | 0.5 356 | 0.5 357 | 0.5 358 | 0.5 359 | 0.5 360 | 0.5 361 | 0.5 362 | 0.5 363 | 0.5 364 | 0.5 365 | 0.5 366 | 0.5 367 | 0.5 368 | 0.5 369 | 0.5 370 | 0.5 371 | 0.5 372 | 0.5 373 | 0.5 374 | 0.5 375 | 0.5 376 | 0.5 377 | 0.5 378 | 0.5 379 | 0.5 380 | 0.5 381 | 0.5 382 | 0.5 383 | 0.5 384 | 0.5 385 | 0.5 386 | 0.5 387 | 0.5 388 | 0.5 389 | 0.5 390 | 0.5 391 | 0.5 392 | 0.5 393 | 0.5 394 | 0.5 395 | 0.5 396 | 0.5 397 | 0.5 398 | 0.5 399 | 0.5 400 | 0.5 401 | 0.5 402 | 0.5 403 | 0.5 404 | 0.5 405 | 0.5 406 | 0.5 407 | 0.5 408 | 0.5 409 | 0.5 410 | 0.5 411 | 0.6 412 | 0.6 413 | 0.6 414 | 0.6 415 | 0.6 416 | 0.6 417 | 0.6 418 | 0.6 419 | 0.6 420 | 0.6 421 | 0.6 422 | 0.6 423 | 0.6 424 | 0.6 425 | 0.6 426 | 0.6 427 | 0.6 428 | 0.6 429 | 0.6 430 | 0.6 431 | 0.6 432 | 0.6 433 | 0.6 434 | 0.6 435 | 0.6 436 | 0.6 437 | 0.6 438 | 0.6 439 | 0.6 440 | 0.6 441 | 0.6 442 | 0.6 443 | 0.6 444 | 0.6 445 | 0.6 446 | 0.6 447 | 0.6 448 | 0.6 449 | 0.6 450 | 0.6 451 | 0.6 452 | 0.6 453 | 0.6 454 | 0.6 455 | 0.6 456 | 0.6 457 | 0.6 458 | 0.6 459 | 0.6 460 | 0.6 461 | 0.6 462 | 0.6 463 | 0.6 464 | 0.6 465 | 0.6 466 | 0.6 467 | 0.6 468 | 0.6 469 | 0.6 470 | 0.6 471 | 0.6 472 | 0.6 473 | 0.6 474 | 0.6 475 | 0.6 476 | 0.6 477 | 0.6 478 | 0.6 479 | 0.6 480 | 0.6 481 | 0.6 482 | 0.6 483 | 0.6 484 | 0.6 485 | 0.6 486 | 0.6 487 | 0.6 488 | 0.6 489 | 0.6 490 | 0.6 491 | 0.6 492 | 0.6 493 | 0.6 494 | 0.6 495 | 0.6 496 | 0.6 497 | 0.6 498 | 0.6 499 | 0.6 500 | 0.6 501 | 0.6 502 | 0.6 503 | 0.6 504 | 0.6 505 | 0.6 506 | 0.6 507 | 0.6 508 | 0.6 509 | 0.6 510 | 0.6 511 | 0.6 512 | 0.6 513 | 0.6 514 | 0.6 515 | 0.6 516 | 0.6 517 | 0.6 518 | 0.6 519 | 0.6 520 | 0.6 521 | 0.6 522 | 0.6 523 | 0.6 524 | 0.6 525 | 0.6 526 | 0.6 527 | 0.6 528 | 0.6 529 | 0.6 530 | 0.6 531 | 0.6 532 | 0.6 533 | 0.6 534 | 0.6 535 | 0.6 536 | 0.6 537 | 0.6 538 | 0.6 539 | 0.6 540 | 0.6 541 | 0.6 542 | 0.6 543 | 0.6 544 | 0.6 545 | 0.6 546 | 0.6 547 | 0.6 548 | 0.6 549 | 0.6 550 | 0.6 551 | 0.6 552 | 0.6 553 | 0.6 554 | 0.6 555 | 0.6 556 | 0.6 557 | 0.6 558 | 0.6 559 | 0.6 560 | 0.6 561 | 0.6 562 | 0.6 563 | 0.6 564 | 0.6 565 | 0.6 566 | 0.6 567 | 0.6 568 | 0.6 569 | 0.6 570 | 0.6 571 | 0.6 572 | 0.6 573 | 0.6 574 | 0.6 575 | 0.6 576 | 0.6 577 | 0.6 578 | 0.6 579 | 0.6 580 | 0.6 581 | 0.6 582 | 0.6 583 | 0.6 584 | 0.6 585 | 0.6 586 | 0.6 587 | 0.6 588 | 0.6 589 | 0.7 590 | 0.7 591 | 0.7 592 | 0.7 593 | 0.7 594 | 0.7 595 | 0.7 596 | 0.7 597 | 0.7 598 | 0.7 599 | 0.7 600 | 0.7 601 | 0.7 602 | 0.7 603 | 0.7 604 | 0.7 605 | 0.7 606 | 0.7 607 | 0.7 608 | 0.7 609 | 0.7 610 | 0.7 611 | 0.7 612 | 0.7 613 | 0.7 614 | 0.7 615 | 0.7 616 | 0.7 617 | 0.7 618 | 0.7 619 | 0.7 620 | 0.7 621 | 0.7 622 | 0.7 623 | 0.7 624 | 0.7 625 | 0.7 626 | 0.7 627 | 0.7 628 | 0.7 629 | 0.7 630 | 0.7 631 | 0.7 632 | 0.7 633 | 0.7 634 | 0.7 635 | 0.7 636 | 0.7 637 | 0.7 638 | 0.7 639 | 0.7 640 | 0.7 641 | 0.7 642 | 0.7 643 | 0.7 644 | 0.7 645 | 0.7 646 | 0.7 647 | 0.7 648 | 0.7 649 | 0.7 650 | 0.7 651 | 0.7 652 | 0.7 653 | 0.7 654 | 0.7 655 | 0.7 656 | 0.7 657 | 0.7 658 | 0.7 659 | 0.7 660 | 0.7 661 | 0.7 662 | 0.7 663 | 0.7 664 | 0.7 665 | 0.7 666 | 0.7 667 | 0.7 668 | 0.7 669 | 0.7 670 | 0.7 671 | 0.7 672 | 0.7 673 | 0.7 674 | 0.7 675 | 0.7 676 | 0.7 677 | 0.7 678 | 0.7 679 | 0.7 680 | 0.7 681 | 0.7 682 | 0.7 683 | 0.7 684 | 0.7 685 | 0.7 686 | 0.7 687 | 0.7 688 | 0.7 689 | 0.7 690 | 0.7 691 | 0.7 692 | 0.7 693 | 0.7 694 | 0.7 695 | 0.7 696 | 0.7 697 | 0.7 698 | 0.7 699 | 0.7 700 | 0.7 701 | 0.7 702 | 0.7 703 | 0.7 704 | 0.7 705 | 0.7 706 | 0.7 707 | 0.7 708 | 0.7 709 | 0.7 710 | 0.7 711 | 0.7 712 | 0.7 713 | 0.7 714 | 0.7 715 | 0.7 716 | 0.7 717 | 0.7 718 | 0.7 719 | 0.7 720 | 0.7 721 | 0.7 722 | 0.7 723 | 0.7 724 | 0.7 725 | 0.7 726 | 0.7 727 | 0.7 728 | 0.7 729 | 0.7 730 | 0.7 731 | 0.7 732 | 0.7 733 | 0.7 734 | 0.7 735 | 0.7 736 | 0.7 737 | 0.7 738 | 0.7 739 | 0.7 740 | 0.7 741 | 0.7 742 | 0.7 743 | 0.7 744 | 0.7 745 | 0.8 746 | 0.8 747 | 0.8 748 | 0.8 749 | 0.8 750 | 0.8 751 | 0.8 752 | 0.8 753 | 0.8 754 | 0.8 755 | 0.8 756 | 0.8 757 | 0.8 758 | 0.8 759 | 0.8 760 | 0.8 761 | 0.8 762 | 0.8 763 | 0.8 764 | 0.8 765 | 0.8 766 | 0.8 767 | 0.8 768 | 0.8 769 | 0.8 770 | 0.8 771 | 0.8 772 | 0.8 773 | 0.8 774 | 0.8 775 | 0.8 776 | 0.8 777 | 0.8 778 | 0.8 779 | 0.8 780 | 0.8 781 | 0.8 782 | 0.8 783 | 0.8 784 | 0.8 785 | 0.8 786 | 0.8 787 | 0.8 788 | 0.8 789 | 0.8 790 | 0.8 791 | 0.8 792 | 0.8 793 | 0.8 794 | 0.8 795 | 0.8 796 | 0.8 797 | 0.8 798 | 0.8 799 | 0.8 800 | 0.8 801 | 0.8 802 | 0.8 803 | 0.8 804 | 0.8 805 | 0.8 806 | 0.8 807 | 0.8 808 | 0.8 809 | 0.8 810 | 0.8 811 | 0.8 812 | 0.8 813 | 0.8 814 | 0.8 815 | 0.8 816 | 0.8 817 | 0.8 818 | 0.8 819 | 0.8 820 | 0.8 821 | 0.8 822 | 0.8 823 | 0.8 824 | 0.8 825 | 0.8 826 | 0.8 827 | 0.8 828 | 0.8 829 | 0.8 830 | 0.8 831 | 0.9 832 | 0.9 833 | 0.9 834 | 0.9 835 | 0.9 836 | 0.9 837 | 0.9 838 | 0.9 839 | 0.9 840 | 0.9 841 | 0.9 842 | 0.9 843 | 0.9 844 | 0.9 845 | 0.9 846 | 0.9 847 | 0.9 848 | 0.9 849 | 0.9 850 | 0.9 851 | 0.9 852 | 0.9 853 | 0.9 854 | 0.9 855 | 0.9 856 | 0.9 857 | 0.9 858 | 0.9 859 | 0.9 860 | 0.9 861 | 0.9 862 | 0.9 863 | 0.9 864 | 0.9 865 | 0.9 866 | 0.9 867 | 0.9 868 | 0.9 869 | 0.9 870 | 0.9 871 | 0.9 872 | 0.9 873 | 0.9 874 | 0.9 875 | 0.9 876 | 0.9 877 | 0.9 878 | 0.9 879 | 0.9 880 | 0.9 881 | 0.9 882 | 1 883 | 1 884 | 1 885 | 1 886 | 1 887 | 1 888 | 1 889 | 1 890 | 1 891 | 1 892 | 1 893 | 1 894 | 1 895 | 1 896 | 1 897 | 1 898 | 1 899 | 1 900 | 1 901 | 1 902 | 1 903 | -------------------------------------------------------------------------------- /data/scaledata/Steve+Rhodes/label.3class.Steve+Rhodes: -------------------------------------------------------------------------------- 1 | 0 2 | 0 3 | 0 4 | 0 5 | 0 6 | 0 7 | 0 8 | 0 9 | 0 10 | 0 11 | 0 12 | 0 13 | 0 14 | 0 15 | 0 16 | 0 17 | 0 18 | 0 19 | 0 20 | 0 21 | 0 22 | 0 23 | 0 24 | 0 25 | 0 26 | 0 27 | 0 28 | 0 29 | 0 30 | 0 31 | 0 32 | 0 33 | 0 34 | 0 35 | 0 36 | 0 37 | 0 38 | 0 39 | 0 40 | 0 41 | 0 42 | 0 43 | 0 44 | 0 45 | 0 46 | 0 47 | 0 48 | 0 49 | 0 50 | 0 51 | 0 52 | 0 53 | 0 54 | 0 55 | 0 56 | 0 57 | 0 58 | 0 59 | 0 60 | 0 61 | 0 62 | 0 63 | 0 64 | 0 65 | 0 66 | 0 67 | 0 68 | 0 69 | 0 70 | 0 71 | 0 72 | 0 73 | 0 74 | 0 75 | 0 76 | 0 77 | 0 78 | 0 79 | 0 80 | 0 81 | 0 82 | 0 83 | 0 84 | 0 85 | 0 86 | 0 87 | 0 88 | 0 89 | 0 90 | 0 91 | 0 92 | 0 93 | 0 94 | 0 95 | 0 96 | 0 97 | 0 98 | 0 99 | 0 100 | 0 101 | 0 102 | 0 103 | 0 104 | 0 105 | 0 106 | 0 107 | 0 108 | 0 109 | 0 110 | 0 111 | 0 112 | 0 113 | 0 114 | 0 115 | 0 116 | 0 117 | 0 118 | 0 119 | 0 120 | 0 121 | 0 122 | 0 123 | 0 124 | 0 125 | 0 126 | 0 127 | 0 128 | 0 129 | 0 130 | 0 131 | 0 132 | 0 133 | 0 134 | 0 135 | 0 136 | 0 137 | 0 138 | 0 139 | 0 140 | 0 141 | 0 142 | 0 143 | 0 144 | 0 145 | 0 146 | 0 147 | 0 148 | 0 149 | 0 150 | 0 151 | 0 152 | 0 153 | 0 154 | 0 155 | 0 156 | 0 157 | 0 158 | 0 159 | 0 160 | 0 161 | 0 162 | 0 163 | 0 164 | 0 165 | 0 166 | 0 167 | 0 168 | 0 169 | 0 170 | 0 171 | 0 172 | 0 173 | 0 174 | 0 175 | 0 176 | 0 177 | 0 178 | 0 179 | 0 180 | 0 181 | 0 182 | 0 183 | 0 184 | 0 185 | 0 186 | 0 187 | 0 188 | 0 189 | 0 190 | 0 191 | 0 192 | 0 193 | 0 194 | 0 195 | 0 196 | 0 197 | 0 198 | 0 199 | 0 200 | 0 201 | 0 202 | 0 203 | 0 204 | 0 205 | 0 206 | 0 207 | 0 208 | 0 209 | 0 210 | 0 211 | 0 212 | 0 213 | 0 214 | 0 215 | 0 216 | 0 217 | 0 218 | 0 219 | 0 220 | 0 221 | 0 222 | 0 223 | 0 224 | 0 225 | 0 226 | 0 227 | 0 228 | 0 229 | 0 230 | 0 231 | 0 232 | 0 233 | 0 234 | 0 235 | 0 236 | 0 237 | 0 238 | 0 239 | 0 240 | 0 241 | 0 242 | 0 243 | 0 244 | 0 245 | 0 246 | 0 247 | 0 248 | 0 249 | 0 250 | 0 251 | 0 252 | 0 253 | 0 254 | 0 255 | 0 256 | 0 257 | 0 258 | 0 259 | 0 260 | 0 261 | 0 262 | 0 263 | 0 264 | 0 265 | 0 266 | 0 267 | 0 268 | 0 269 | 0 270 | 0 271 | 0 272 | 0 273 | 0 274 | 0 275 | 0 276 | 0 277 | 0 278 | 0 279 | 0 280 | 0 281 | 0 282 | 0 283 | 0 284 | 0 285 | 0 286 | 0 287 | 0 288 | 0 289 | 0 290 | 0 291 | 0 292 | 0 293 | 0 294 | 0 295 | 0 296 | 0 297 | 0 298 | 0 299 | 0 300 | 0 301 | 0 302 | 0 303 | 0 304 | 0 305 | 0 306 | 0 307 | 0 308 | 0 309 | 0 310 | 0 311 | 0 312 | 0 313 | 0 314 | 0 315 | 0 316 | 0 317 | 0 318 | 0 319 | 0 320 | 0 321 | 0 322 | 0 323 | 0 324 | 0 325 | 0 326 | 0 327 | 0 328 | 0 329 | 0 330 | 0 331 | 0 332 | 0 333 | 0 334 | 0 335 | 0 336 | 0 337 | 0 338 | 0 339 | 0 340 | 0 341 | 0 342 | 0 343 | 0 344 | 0 345 | 0 346 | 0 347 | 0 348 | 0 349 | 0 350 | 0 351 | 0 352 | 0 353 | 0 354 | 0 355 | 0 356 | 0 357 | 0 358 | 0 359 | 0 360 | 0 361 | 0 362 | 0 363 | 0 364 | 0 365 | 0 366 | 0 367 | 0 368 | 0 369 | 0 370 | 0 371 | 0 372 | 0 373 | 0 374 | 0 375 | 0 376 | 0 377 | 0 378 | 0 379 | 0 380 | 0 381 | 0 382 | 0 383 | 0 384 | 0 385 | 0 386 | 0 387 | 0 388 | 0 389 | 0 390 | 0 391 | 0 392 | 0 393 | 0 394 | 0 395 | 0 396 | 0 397 | 0 398 | 0 399 | 0 400 | 0 401 | 0 402 | 0 403 | 0 404 | 0 405 | 0 406 | 0 407 | 0 408 | 0 409 | 0 410 | 0 411 | 0 412 | 0 413 | 0 414 | 1 415 | 1 416 | 1 417 | 1 418 | 1 419 | 1 420 | 1 421 | 1 422 | 1 423 | 1 424 | 1 425 | 1 426 | 1 427 | 1 428 | 1 429 | 1 430 | 1 431 | 1 432 | 1 433 | 1 434 | 1 435 | 1 436 | 1 437 | 1 438 | 1 439 | 1 440 | 1 441 | 1 442 | 1 443 | 1 444 | 1 445 | 1 446 | 1 447 | 1 448 | 1 449 | 1 450 | 1 451 | 1 452 | 1 453 | 1 454 | 1 455 | 1 456 | 1 457 | 1 458 | 1 459 | 1 460 | 1 461 | 1 462 | 1 463 | 1 464 | 1 465 | 1 466 | 1 467 | 1 468 | 1 469 | 1 470 | 1 471 | 1 472 | 1 473 | 1 474 | 1 475 | 1 476 | 1 477 | 1 478 | 1 479 | 1 480 | 1 481 | 1 482 | 1 483 | 1 484 | 1 485 | 1 486 | 1 487 | 1 488 | 1 489 | 1 490 | 1 491 | 1 492 | 1 493 | 1 494 | 1 495 | 1 496 | 1 497 | 1 498 | 1 499 | 1 500 | 1 501 | 1 502 | 1 503 | 1 504 | 1 505 | 1 506 | 1 507 | 1 508 | 1 509 | 1 510 | 1 511 | 1 512 | 1 513 | 1 514 | 1 515 | 1 516 | 1 517 | 1 518 | 1 519 | 1 520 | 1 521 | 1 522 | 1 523 | 1 524 | 1 525 | 1 526 | 1 527 | 1 528 | 1 529 | 1 530 | 1 531 | 1 532 | 1 533 | 1 534 | 1 535 | 1 536 | 1 537 | 1 538 | 1 539 | 1 540 | 1 541 | 1 542 | 1 543 | 1 544 | 1 545 | 1 546 | 1 547 | 1 548 | 1 549 | 1 550 | 1 551 | 1 552 | 1 553 | 1 554 | 1 555 | 1 556 | 1 557 | 1 558 | 1 559 | 1 560 | 1 561 | 1 562 | 1 563 | 1 564 | 1 565 | 1 566 | 1 567 | 1 568 | 1 569 | 1 570 | 1 571 | 1 572 | 1 573 | 1 574 | 1 575 | 1 576 | 1 577 | 1 578 | 1 579 | 1 580 | 1 581 | 1 582 | 1 583 | 1 584 | 1 585 | 1 586 | 1 587 | 1 588 | 1 589 | 1 590 | 1 591 | 1 592 | 1 593 | 1 594 | 1 595 | 1 596 | 1 597 | 1 598 | 1 599 | 1 600 | 1 601 | 1 602 | 1 603 | 1 604 | 1 605 | 1 606 | 1 607 | 1 608 | 1 609 | 1 610 | 1 611 | 1 612 | 1 613 | 1 614 | 1 615 | 1 616 | 1 617 | 1 618 | 1 619 | 1 620 | 1 621 | 1 622 | 1 623 | 1 624 | 1 625 | 1 626 | 1 627 | 1 628 | 1 629 | 1 630 | 1 631 | 1 632 | 1 633 | 1 634 | 1 635 | 1 636 | 1 637 | 1 638 | 1 639 | 1 640 | 1 641 | 1 642 | 1 643 | 1 644 | 1 645 | 1 646 | 1 647 | 1 648 | 1 649 | 1 650 | 1 651 | 1 652 | 1 653 | 1 654 | 1 655 | 1 656 | 1 657 | 1 658 | 1 659 | 1 660 | 1 661 | 1 662 | 1 663 | 1 664 | 1 665 | 1 666 | 1 667 | 1 668 | 1 669 | 1 670 | 1 671 | 1 672 | 1 673 | 1 674 | 1 675 | 1 676 | 1 677 | 1 678 | 1 679 | 1 680 | 1 681 | 1 682 | 1 683 | 1 684 | 1 685 | 1 686 | 1 687 | 1 688 | 1 689 | 1 690 | 1 691 | 1 692 | 1 693 | 1 694 | 1 695 | 1 696 | 1 697 | 1 698 | 1 699 | 1 700 | 1 701 | 1 702 | 1 703 | 1 704 | 1 705 | 1 706 | 1 707 | 1 708 | 1 709 | 1 710 | 1 711 | 1 712 | 1 713 | 1 714 | 1 715 | 1 716 | 1 717 | 1 718 | 1 719 | 1 720 | 1 721 | 1 722 | 1 723 | 1 724 | 1 725 | 1 726 | 1 727 | 1 728 | 1 729 | 1 730 | 1 731 | 1 732 | 1 733 | 1 734 | 1 735 | 1 736 | 1 737 | 1 738 | 1 739 | 1 740 | 1 741 | 1 742 | 1 743 | 1 744 | 1 745 | 1 746 | 1 747 | 1 748 | 1 749 | 1 750 | 1 751 | 1 752 | 1 753 | 1 754 | 1 755 | 1 756 | 1 757 | 1 758 | 1 759 | 1 760 | 1 761 | 1 762 | 1 763 | 1 764 | 1 765 | 1 766 | 1 767 | 1 768 | 1 769 | 1 770 | 1 771 | 1 772 | 1 773 | 1 774 | 1 775 | 1 776 | 1 777 | 1 778 | 1 779 | 1 780 | 1 781 | 1 782 | 1 783 | 1 784 | 1 785 | 1 786 | 1 787 | 1 788 | 1 789 | 1 790 | 1 791 | 1 792 | 1 793 | 1 794 | 1 795 | 1 796 | 1 797 | 1 798 | 1 799 | 1 800 | 1 801 | 1 802 | 1 803 | 1 804 | 1 805 | 1 806 | 1 807 | 1 808 | 1 809 | 1 810 | 1 811 | 1 812 | 1 813 | 1 814 | 1 815 | 1 816 | 1 817 | 1 818 | 1 819 | 1 820 | 1 821 | 1 822 | 1 823 | 1 824 | 1 825 | 1 826 | 1 827 | 1 828 | 1 829 | 1 830 | 1 831 | 1 832 | 1 833 | 1 834 | 1 835 | 1 836 | 1 837 | 1 838 | 1 839 | 1 840 | 1 841 | 1 842 | 1 843 | 1 844 | 1 845 | 1 846 | 1 847 | 1 848 | 1 849 | 1 850 | 1 851 | 1 852 | 1 853 | 1 854 | 1 855 | 1 856 | 1 857 | 1 858 | 1 859 | 1 860 | 1 861 | 1 862 | 1 863 | 1 864 | 1 865 | 1 866 | 1 867 | 1 868 | 1 869 | 1 870 | 1 871 | 1 872 | 1 873 | 1 874 | 1 875 | 1 876 | 1 877 | 1 878 | 1 879 | 1 880 | 1 881 | 1 882 | 1 883 | 1 884 | 1 885 | 1 886 | 1 887 | 1 888 | 1 889 | 1 890 | 1 891 | 1 892 | 1 893 | 1 894 | 1 895 | 1 896 | 1 897 | 1 898 | 1 899 | 1 900 | 1 901 | 1 902 | 1 903 | 1 904 | 1 905 | 1 906 | 1 907 | 1 908 | 1 909 | 1 910 | 1 911 | 1 912 | 1 913 | 1 914 | 1 915 | 1 916 | 1 917 | 1 918 | 1 919 | 1 920 | 1 921 | 1 922 | 1 923 | 1 924 | 1 925 | 1 926 | 1 927 | 1 928 | 1 929 | 1 930 | 1 931 | 1 932 | 1 933 | 1 934 | 1 935 | 1 936 | 1 937 | 1 938 | 1 939 | 1 940 | 1 941 | 1 942 | 1 943 | 1 944 | 1 945 | 1 946 | 1 947 | 1 948 | 1 949 | 1 950 | 1 951 | 1 952 | 1 953 | 1 954 | 1 955 | 1 956 | 1 957 | 1 958 | 1 959 | 1 960 | 1 961 | 1 962 | 1 963 | 1 964 | 1 965 | 1 966 | 1 967 | 1 968 | 1 969 | 1 970 | 1 971 | 1 972 | 1 973 | 1 974 | 1 975 | 1 976 | 1 977 | 1 978 | 1 979 | 1 980 | 1 981 | 1 982 | 1 983 | 1 984 | 1 985 | 1 986 | 1 987 | 1 988 | 1 989 | 1 990 | 1 991 | 1 992 | 1 993 | 1 994 | 1 995 | 1 996 | 1 997 | 1 998 | 1 999 | 1 1000 | 1 1001 | 1 1002 | 1 1003 | 1 1004 | 1 1005 | 1 1006 | 1 1007 | 1 1008 | 1 1009 | 1 1010 | 1 1011 | 1 1012 | 1 1013 | 1 1014 | 1 1015 | 1 1016 | 1 1017 | 1 1018 | 1 1019 | 1 1020 | 1 1021 | 1 1022 | 1 1023 | 1 1024 | 1 1025 | 1 1026 | 1 1027 | 1 1028 | 1 1029 | 1 1030 | 1 1031 | 1 1032 | 1 1033 | 1 1034 | 1 1035 | 1 1036 | 1 1037 | 1 1038 | 1 1039 | 1 1040 | 1 1041 | 1 1042 | 1 1043 | 1 1044 | 1 1045 | 1 1046 | 1 1047 | 1 1048 | 1 1049 | 1 1050 | 1 1051 | 1 1052 | 1 1053 | 1 1054 | 1 1055 | 1 1056 | 1 1057 | 1 1058 | 1 1059 | 1 1060 | 1 1061 | 1 1062 | 2 1063 | 2 1064 | 2 1065 | 2 1066 | 2 1067 | 2 1068 | 2 1069 | 2 1070 | 2 1071 | 2 1072 | 2 1073 | 2 1074 | 2 1075 | 2 1076 | 2 1077 | 2 1078 | 2 1079 | 2 1080 | 2 1081 | 2 1082 | 2 1083 | 2 1084 | 2 1085 | 2 1086 | 2 1087 | 2 1088 | 2 1089 | 2 1090 | 2 1091 | 2 1092 | 2 1093 | 2 1094 | 2 1095 | 2 1096 | 2 1097 | 2 1098 | 2 1099 | 2 1100 | 2 1101 | 2 1102 | 2 1103 | 2 1104 | 2 1105 | 2 1106 | 2 1107 | 2 1108 | 2 1109 | 2 1110 | 2 1111 | 2 1112 | 2 1113 | 2 1114 | 2 1115 | 2 1116 | 2 1117 | 2 1118 | 2 1119 | 2 1120 | 2 1121 | 2 1122 | 2 1123 | 2 1124 | 2 1125 | 2 1126 | 2 1127 | 2 1128 | 2 1129 | 2 1130 | 2 1131 | 2 1132 | 2 1133 | 2 1134 | 2 1135 | 2 1136 | 2 1137 | 2 1138 | 2 1139 | 2 1140 | 2 1141 | 2 1142 | 2 1143 | 2 1144 | 2 1145 | 2 1146 | 2 1147 | 2 1148 | 2 1149 | 2 1150 | 2 1151 | 2 1152 | 2 1153 | 2 1154 | 2 1155 | 2 1156 | 2 1157 | 2 1158 | 2 1159 | 2 1160 | 2 1161 | 2 1162 | 2 1163 | 2 1164 | 2 1165 | 2 1166 | 2 1167 | 2 1168 | 2 1169 | 2 1170 | 2 1171 | 2 1172 | 2 1173 | 2 1174 | 2 1175 | 2 1176 | 2 1177 | 2 1178 | 2 1179 | 2 1180 | 2 1181 | 2 1182 | 2 1183 | 2 1184 | 2 1185 | 2 1186 | 2 1187 | 2 1188 | 2 1189 | 2 1190 | 2 1191 | 2 1192 | 2 1193 | 2 1194 | 2 1195 | 2 1196 | 2 1197 | 2 1198 | 2 1199 | 2 1200 | 2 1201 | 2 1202 | 2 1203 | 2 1204 | 2 1205 | 2 1206 | 2 1207 | 2 1208 | 2 1209 | 2 1210 | 2 1211 | 2 1212 | 2 1213 | 2 1214 | 2 1215 | 2 1216 | 2 1217 | 2 1218 | 2 1219 | 2 1220 | 2 1221 | 2 1222 | 2 1223 | 2 1224 | 2 1225 | 2 1226 | 2 1227 | 2 1228 | 2 1229 | 2 1230 | 2 1231 | 2 1232 | 2 1233 | 2 1234 | 2 1235 | 2 1236 | 2 1237 | 2 1238 | 2 1239 | 2 1240 | 2 1241 | 2 1242 | 2 1243 | 2 1244 | 2 1245 | 2 1246 | 2 1247 | 2 1248 | 2 1249 | 2 1250 | 2 1251 | 2 1252 | 2 1253 | 2 1254 | 2 1255 | 2 1256 | 2 1257 | 2 1258 | 2 1259 | 2 1260 | 2 1261 | 2 1262 | 2 1263 | 2 1264 | 2 1265 | 2 1266 | 2 1267 | 2 1268 | 2 1269 | 2 1270 | 2 1271 | 2 1272 | 2 1273 | 2 1274 | 2 1275 | 2 1276 | 2 1277 | 2 1278 | 2 1279 | 2 1280 | 2 1281 | 2 1282 | 2 1283 | 2 1284 | 2 1285 | 2 1286 | 2 1287 | 2 1288 | 2 1289 | 2 1290 | 2 1291 | 2 1292 | 2 1293 | 2 1294 | 2 1295 | 2 1296 | 2 1297 | 2 1298 | 2 1299 | 2 1300 | 2 1301 | 2 1302 | 2 1303 | 2 1304 | 2 1305 | 2 1306 | 2 1307 | 2 1308 | 2 1309 | 2 1310 | 2 1311 | 2 1312 | 2 1313 | 2 1314 | 2 1315 | 2 1316 | 2 1317 | 2 1318 | 2 1319 | 2 1320 | 2 1321 | 2 1322 | 2 1323 | 2 1324 | 2 1325 | 2 1326 | 2 1327 | 2 1328 | 2 1329 | 2 1330 | 2 1331 | 2 1332 | 2 1333 | 2 1334 | 2 1335 | 2 1336 | 2 1337 | 2 1338 | 2 1339 | 2 1340 | 2 1341 | 2 1342 | 2 1343 | 2 1344 | 2 1345 | 2 1346 | 2 1347 | 2 1348 | 2 1349 | 2 1350 | 2 1351 | 2 1352 | 2 1353 | 2 1354 | 2 1355 | 2 1356 | 2 1357 | 2 1358 | 2 1359 | 2 1360 | 2 1361 | 2 1362 | 2 1363 | 2 1364 | 2 1365 | 2 1366 | 2 1367 | 2 1368 | 2 1369 | 2 1370 | 2 1371 | 2 1372 | 2 1373 | 2 1374 | 2 1375 | 2 1376 | 2 1377 | 2 1378 | 2 1379 | 2 1380 | 2 1381 | 2 1382 | 2 1383 | 2 1384 | 2 1385 | 2 1386 | 2 1387 | 2 1388 | 2 1389 | 2 1390 | 2 1391 | 2 1392 | 2 1393 | 2 1394 | 2 1395 | 2 1396 | 2 1397 | 2 1398 | 2 1399 | 2 1400 | 2 1401 | 2 1402 | 2 1403 | 2 1404 | 2 1405 | 2 1406 | 2 1407 | 2 1408 | 2 1409 | 2 1410 | 2 1411 | 2 1412 | 2 1413 | 2 1414 | 2 1415 | 2 1416 | 2 1417 | 2 1418 | 2 1419 | 2 1420 | 2 1421 | 2 1422 | 2 1423 | 2 1424 | 2 1425 | 2 1426 | 2 1427 | 2 1428 | 2 1429 | 2 1430 | 2 1431 | 2 1432 | 2 1433 | 2 1434 | 2 1435 | 2 1436 | 2 1437 | 2 1438 | 2 1439 | 2 1440 | 2 1441 | 2 1442 | 2 1443 | 2 1444 | 2 1445 | 2 1446 | 2 1447 | 2 1448 | 2 1449 | 2 1450 | 2 1451 | 2 1452 | 2 1453 | 2 1454 | 2 1455 | 2 1456 | 2 1457 | 2 1458 | 2 1459 | 2 1460 | 2 1461 | 2 1462 | 2 1463 | 2 1464 | 2 1465 | 2 1466 | 2 1467 | 2 1468 | 2 1469 | 2 1470 | 2 1471 | 2 1472 | 2 1473 | 2 1474 | 2 1475 | 2 1476 | 2 1477 | 2 1478 | 2 1479 | 2 1480 | 2 1481 | 2 1482 | 2 1483 | 2 1484 | 2 1485 | 2 1486 | 2 1487 | 2 1488 | 2 1489 | 2 1490 | 2 1491 | 2 1492 | 2 1493 | 2 1494 | 2 1495 | 2 1496 | 2 1497 | 2 1498 | 2 1499 | 2 1500 | 2 1501 | 2 1502 | 2 1503 | 2 1504 | 2 1505 | 2 1506 | 2 1507 | 2 1508 | 2 1509 | 2 1510 | 2 1511 | 2 1512 | 2 1513 | 2 1514 | 2 1515 | 2 1516 | 2 1517 | 2 1518 | 2 1519 | 2 1520 | 2 1521 | 2 1522 | 2 1523 | 2 1524 | 2 1525 | 2 1526 | 2 1527 | 2 1528 | 2 1529 | 2 1530 | 2 1531 | 2 1532 | 2 1533 | 2 1534 | 2 1535 | 2 1536 | 2 1537 | 2 1538 | 2 1539 | 2 1540 | 2 1541 | 2 1542 | 2 1543 | 2 1544 | 2 1545 | 2 1546 | 2 1547 | 2 1548 | 2 1549 | 2 1550 | 2 1551 | 2 1552 | 2 1553 | 2 1554 | 2 1555 | 2 1556 | 2 1557 | 2 1558 | 2 1559 | 2 1560 | 2 1561 | 2 1562 | 2 1563 | 2 1564 | 2 1565 | 2 1566 | 2 1567 | 2 1568 | 2 1569 | 2 1570 | 2 1571 | 2 1572 | 2 1573 | 2 1574 | 2 1575 | 2 1576 | 2 1577 | 2 1578 | 2 1579 | 2 1580 | 2 1581 | 2 1582 | 2 1583 | 2 1584 | 2 1585 | 2 1586 | 2 1587 | 2 1588 | 2 1589 | 2 1590 | 2 1591 | 2 1592 | 2 1593 | 2 1594 | 2 1595 | 2 1596 | 2 1597 | 2 1598 | 2 1599 | 2 1600 | 2 1601 | 2 1602 | 2 1603 | 2 1604 | 2 1605 | 2 1606 | 2 1607 | 2 1608 | 2 1609 | 2 1610 | 2 1611 | 2 1612 | 2 1613 | 2 1614 | 2 1615 | 2 1616 | 2 1617 | 2 1618 | 2 1619 | 2 1620 | 2 1621 | 2 1622 | 2 1623 | 2 1624 | 2 1625 | 2 1626 | 2 1627 | 2 1628 | 2 1629 | 2 1630 | 2 1631 | 2 1632 | 2 1633 | 2 1634 | 2 1635 | 2 1636 | 2 1637 | 2 1638 | 2 1639 | 2 1640 | 2 1641 | 2 1642 | 2 1643 | 2 1644 | 2 1645 | 2 1646 | 2 1647 | 2 1648 | 2 1649 | 2 1650 | 2 1651 | 2 1652 | 2 1653 | 2 1654 | 2 1655 | 2 1656 | 2 1657 | 2 1658 | 2 1659 | 2 1660 | 2 1661 | 2 1662 | 2 1663 | 2 1664 | 2 1665 | 2 1666 | 2 1667 | 2 1668 | 2 1669 | 2 1670 | 2 1671 | 2 1672 | 2 1673 | 2 1674 | 2 1675 | 2 1676 | 2 1677 | 2 1678 | 2 1679 | 2 1680 | 2 1681 | 2 1682 | 2 1683 | 2 1684 | 2 1685 | 2 1686 | 2 1687 | 2 1688 | 2 1689 | 2 1690 | 2 1691 | 2 1692 | 2 1693 | 2 1694 | 2 1695 | 2 1696 | 2 1697 | 2 1698 | 2 1699 | 2 1700 | 2 1701 | 2 1702 | 2 1703 | 2 1704 | 2 1705 | 2 1706 | 2 1707 | 2 1708 | 2 1709 | 2 1710 | 2 1711 | 2 1712 | 2 1713 | 2 1714 | 2 1715 | 2 1716 | 2 1717 | 2 1718 | 2 1719 | 2 1720 | 2 1721 | 2 1722 | 2 1723 | 2 1724 | 2 1725 | 2 1726 | 2 1727 | 2 1728 | 2 1729 | 2 1730 | 2 1731 | 2 1732 | 2 1733 | 2 1734 | 2 1735 | 2 1736 | 2 1737 | 2 1738 | 2 1739 | 2 1740 | 2 1741 | 2 1742 | 2 1743 | 2 1744 | 2 1745 | 2 1746 | 2 1747 | 2 1748 | 2 1749 | 2 1750 | 2 1751 | 2 1752 | 2 1753 | 2 1754 | 2 1755 | 2 1756 | 2 1757 | 2 1758 | 2 1759 | 2 1760 | 2 1761 | 2 1762 | 2 1763 | 2 1764 | 2 1765 | 2 1766 | 2 1767 | 2 1768 | 2 1769 | 2 1770 | 2 1771 | -------------------------------------------------------------------------------- /data/scaledata/Steve+Rhodes/label.4class.Steve+Rhodes: -------------------------------------------------------------------------------- 1 | 0 2 | 0 3 | 0 4 | 0 5 | 0 6 | 0 7 | 0 8 | 0 9 | 0 10 | 0 11 | 0 12 | 0 13 | 0 14 | 0 15 | 0 16 | 0 17 | 0 18 | 0 19 | 0 20 | 0 21 | 0 22 | 0 23 | 0 24 | 0 25 | 0 26 | 0 27 | 0 28 | 0 29 | 0 30 | 0 31 | 0 32 | 0 33 | 0 34 | 0 35 | 0 36 | 0 37 | 0 38 | 0 39 | 0 40 | 0 41 | 0 42 | 0 43 | 0 44 | 0 45 | 0 46 | 0 47 | 0 48 | 0 49 | 0 50 | 0 51 | 0 52 | 0 53 | 0 54 | 0 55 | 0 56 | 0 57 | 0 58 | 0 59 | 0 60 | 0 61 | 0 62 | 0 63 | 0 64 | 0 65 | 0 66 | 0 67 | 0 68 | 0 69 | 0 70 | 0 71 | 0 72 | 0 73 | 0 74 | 0 75 | 0 76 | 0 77 | 0 78 | 0 79 | 0 80 | 0 81 | 0 82 | 0 83 | 0 84 | 0 85 | 0 86 | 0 87 | 0 88 | 0 89 | 0 90 | 0 91 | 0 92 | 0 93 | 0 94 | 0 95 | 0 96 | 0 97 | 0 98 | 0 99 | 0 100 | 0 101 | 0 102 | 0 103 | 0 104 | 0 105 | 0 106 | 0 107 | 0 108 | 0 109 | 0 110 | 0 111 | 0 112 | 0 113 | 0 114 | 0 115 | 0 116 | 0 117 | 0 118 | 0 119 | 0 120 | 0 121 | 0 122 | 0 123 | 0 124 | 0 125 | 0 126 | 0 127 | 0 128 | 0 129 | 0 130 | 0 131 | 0 132 | 0 133 | 0 134 | 0 135 | 0 136 | 0 137 | 0 138 | 0 139 | 0 140 | 0 141 | 0 142 | 0 143 | 0 144 | 0 145 | 0 146 | 0 147 | 0 148 | 0 149 | 0 150 | 0 151 | 0 152 | 0 153 | 0 154 | 0 155 | 0 156 | 0 157 | 0 158 | 0 159 | 0 160 | 0 161 | 0 162 | 0 163 | 0 164 | 0 165 | 0 166 | 0 167 | 0 168 | 0 169 | 0 170 | 0 171 | 0 172 | 0 173 | 0 174 | 0 175 | 0 176 | 0 177 | 0 178 | 0 179 | 0 180 | 0 181 | 0 182 | 0 183 | 0 184 | 0 185 | 0 186 | 0 187 | 0 188 | 0 189 | 0 190 | 0 191 | 0 192 | 1 193 | 1 194 | 1 195 | 1 196 | 1 197 | 1 198 | 1 199 | 1 200 | 1 201 | 1 202 | 1 203 | 1 204 | 1 205 | 1 206 | 1 207 | 1 208 | 1 209 | 1 210 | 1 211 | 1 212 | 1 213 | 1 214 | 1 215 | 1 216 | 1 217 | 1 218 | 1 219 | 1 220 | 1 221 | 1 222 | 1 223 | 1 224 | 1 225 | 1 226 | 1 227 | 1 228 | 1 229 | 1 230 | 1 231 | 1 232 | 1 233 | 1 234 | 1 235 | 1 236 | 1 237 | 1 238 | 1 239 | 1 240 | 1 241 | 1 242 | 1 243 | 1 244 | 1 245 | 1 246 | 1 247 | 1 248 | 1 249 | 1 250 | 1 251 | 1 252 | 1 253 | 1 254 | 1 255 | 1 256 | 1 257 | 1 258 | 1 259 | 1 260 | 1 261 | 1 262 | 1 263 | 1 264 | 1 265 | 1 266 | 1 267 | 1 268 | 1 269 | 1 270 | 1 271 | 1 272 | 1 273 | 1 274 | 1 275 | 1 276 | 1 277 | 1 278 | 1 279 | 1 280 | 1 281 | 1 282 | 1 283 | 1 284 | 1 285 | 1 286 | 1 287 | 1 288 | 1 289 | 1 290 | 1 291 | 1 292 | 1 293 | 1 294 | 1 295 | 1 296 | 1 297 | 1 298 | 1 299 | 1 300 | 1 301 | 1 302 | 1 303 | 1 304 | 1 305 | 1 306 | 1 307 | 1 308 | 1 309 | 1 310 | 1 311 | 1 312 | 1 313 | 1 314 | 1 315 | 1 316 | 1 317 | 1 318 | 1 319 | 1 320 | 1 321 | 1 322 | 1 323 | 1 324 | 1 325 | 1 326 | 1 327 | 1 328 | 1 329 | 1 330 | 1 331 | 1 332 | 1 333 | 1 334 | 1 335 | 1 336 | 1 337 | 1 338 | 1 339 | 1 340 | 1 341 | 1 342 | 1 343 | 1 344 | 1 345 | 1 346 | 1 347 | 1 348 | 1 349 | 1 350 | 1 351 | 1 352 | 1 353 | 1 354 | 1 355 | 1 356 | 1 357 | 1 358 | 1 359 | 1 360 | 1 361 | 1 362 | 1 363 | 1 364 | 1 365 | 1 366 | 1 367 | 1 368 | 1 369 | 1 370 | 1 371 | 1 372 | 1 373 | 1 374 | 1 375 | 1 376 | 1 377 | 1 378 | 1 379 | 1 380 | 1 381 | 1 382 | 1 383 | 1 384 | 1 385 | 1 386 | 1 387 | 1 388 | 1 389 | 1 390 | 1 391 | 1 392 | 1 393 | 1 394 | 1 395 | 1 396 | 1 397 | 1 398 | 1 399 | 1 400 | 1 401 | 1 402 | 1 403 | 1 404 | 1 405 | 1 406 | 1 407 | 1 408 | 1 409 | 1 410 | 1 411 | 1 412 | 1 413 | 1 414 | 1 415 | 1 416 | 1 417 | 1 418 | 1 419 | 1 420 | 1 421 | 1 422 | 1 423 | 1 424 | 1 425 | 1 426 | 1 427 | 1 428 | 1 429 | 1 430 | 1 431 | 1 432 | 1 433 | 1 434 | 1 435 | 1 436 | 1 437 | 1 438 | 1 439 | 1 440 | 1 441 | 1 442 | 1 443 | 1 444 | 1 445 | 1 446 | 1 447 | 1 448 | 1 449 | 1 450 | 1 451 | 1 452 | 1 453 | 1 454 | 1 455 | 1 456 | 1 457 | 1 458 | 1 459 | 1 460 | 1 461 | 1 462 | 1 463 | 1 464 | 1 465 | 1 466 | 1 467 | 1 468 | 1 469 | 1 470 | 1 471 | 1 472 | 1 473 | 1 474 | 1 475 | 1 476 | 1 477 | 1 478 | 1 479 | 1 480 | 1 481 | 1 482 | 1 483 | 1 484 | 1 485 | 1 486 | 1 487 | 1 488 | 1 489 | 1 490 | 1 491 | 1 492 | 1 493 | 1 494 | 1 495 | 1 496 | 1 497 | 1 498 | 1 499 | 1 500 | 1 501 | 1 502 | 1 503 | 1 504 | 1 505 | 1 506 | 1 507 | 1 508 | 1 509 | 1 510 | 1 511 | 1 512 | 1 513 | 1 514 | 1 515 | 1 516 | 1 517 | 1 518 | 1 519 | 1 520 | 1 521 | 1 522 | 1 523 | 1 524 | 1 525 | 1 526 | 1 527 | 1 528 | 1 529 | 1 530 | 1 531 | 1 532 | 1 533 | 1 534 | 1 535 | 1 536 | 1 537 | 1 538 | 1 539 | 1 540 | 1 541 | 1 542 | 1 543 | 1 544 | 1 545 | 1 546 | 1 547 | 1 548 | 1 549 | 1 550 | 1 551 | 1 552 | 1 553 | 1 554 | 1 555 | 1 556 | 1 557 | 1 558 | 1 559 | 1 560 | 1 561 | 1 562 | 1 563 | 1 564 | 1 565 | 1 566 | 1 567 | 1 568 | 1 569 | 1 570 | 1 571 | 1 572 | 1 573 | 1 574 | 1 575 | 1 576 | 1 577 | 1 578 | 1 579 | 1 580 | 1 581 | 1 582 | 1 583 | 1 584 | 1 585 | 1 586 | 1 587 | 1 588 | 1 589 | 1 590 | 1 591 | 1 592 | 1 593 | 1 594 | 1 595 | 1 596 | 1 597 | 1 598 | 1 599 | 1 600 | 1 601 | 1 602 | 1 603 | 1 604 | 1 605 | 1 606 | 1 607 | 1 608 | 1 609 | 1 610 | 1 611 | 1 612 | 1 613 | 1 614 | 1 615 | 1 616 | 1 617 | 1 618 | 1 619 | 1 620 | 1 621 | 1 622 | 1 623 | 1 624 | 1 625 | 1 626 | 1 627 | 1 628 | 1 629 | 1 630 | 1 631 | 1 632 | 1 633 | 1 634 | 1 635 | 1 636 | 1 637 | 1 638 | 1 639 | 1 640 | 1 641 | 1 642 | 1 643 | 1 644 | 1 645 | 1 646 | 1 647 | 1 648 | 1 649 | 1 650 | 1 651 | 1 652 | 1 653 | 1 654 | 1 655 | 1 656 | 1 657 | 1 658 | 1 659 | 1 660 | 1 661 | 1 662 | 1 663 | 1 664 | 1 665 | 1 666 | 1 667 | 1 668 | 1 669 | 1 670 | 1 671 | 1 672 | 1 673 | 1 674 | 1 675 | 1 676 | 1 677 | 1 678 | 1 679 | 1 680 | 1 681 | 1 682 | 1 683 | 1 684 | 1 685 | 1 686 | 1 687 | 1 688 | 1 689 | 1 690 | 1 691 | 1 692 | 1 693 | 1 694 | 1 695 | 1 696 | 1 697 | 1 698 | 1 699 | 1 700 | 1 701 | 1 702 | 1 703 | 1 704 | 1 705 | 1 706 | 1 707 | 1 708 | 1 709 | 1 710 | 1 711 | 1 712 | 1 713 | 1 714 | 1 715 | 1 716 | 1 717 | 1 718 | 2 719 | 2 720 | 2 721 | 2 722 | 2 723 | 2 724 | 2 725 | 2 726 | 2 727 | 2 728 | 2 729 | 2 730 | 2 731 | 2 732 | 2 733 | 2 734 | 2 735 | 2 736 | 2 737 | 2 738 | 2 739 | 2 740 | 2 741 | 2 742 | 2 743 | 2 744 | 2 745 | 2 746 | 2 747 | 2 748 | 2 749 | 2 750 | 2 751 | 2 752 | 2 753 | 2 754 | 2 755 | 2 756 | 2 757 | 2 758 | 2 759 | 2 760 | 2 761 | 2 762 | 2 763 | 2 764 | 2 765 | 2 766 | 2 767 | 2 768 | 2 769 | 2 770 | 2 771 | 2 772 | 2 773 | 2 774 | 2 775 | 2 776 | 2 777 | 2 778 | 2 779 | 2 780 | 2 781 | 2 782 | 2 783 | 2 784 | 2 785 | 2 786 | 2 787 | 2 788 | 2 789 | 2 790 | 2 791 | 2 792 | 2 793 | 2 794 | 2 795 | 2 796 | 2 797 | 2 798 | 2 799 | 2 800 | 2 801 | 2 802 | 2 803 | 2 804 | 2 805 | 2 806 | 2 807 | 2 808 | 2 809 | 2 810 | 2 811 | 2 812 | 2 813 | 2 814 | 2 815 | 2 816 | 2 817 | 2 818 | 2 819 | 2 820 | 2 821 | 2 822 | 2 823 | 2 824 | 2 825 | 2 826 | 2 827 | 2 828 | 2 829 | 2 830 | 2 831 | 2 832 | 2 833 | 2 834 | 2 835 | 2 836 | 2 837 | 2 838 | 2 839 | 2 840 | 2 841 | 2 842 | 2 843 | 2 844 | 2 845 | 2 846 | 2 847 | 2 848 | 2 849 | 2 850 | 2 851 | 2 852 | 2 853 | 2 854 | 2 855 | 2 856 | 2 857 | 2 858 | 2 859 | 2 860 | 2 861 | 2 862 | 2 863 | 2 864 | 2 865 | 2 866 | 2 867 | 2 868 | 2 869 | 2 870 | 2 871 | 2 872 | 2 873 | 2 874 | 2 875 | 2 876 | 2 877 | 2 878 | 2 879 | 2 880 | 2 881 | 2 882 | 2 883 | 2 884 | 2 885 | 2 886 | 2 887 | 2 888 | 2 889 | 2 890 | 2 891 | 2 892 | 2 893 | 2 894 | 2 895 | 2 896 | 2 897 | 2 898 | 2 899 | 2 900 | 2 901 | 2 902 | 2 903 | 2 904 | 2 905 | 2 906 | 2 907 | 2 908 | 2 909 | 2 910 | 2 911 | 2 912 | 2 913 | 2 914 | 2 915 | 2 916 | 2 917 | 2 918 | 2 919 | 2 920 | 2 921 | 2 922 | 2 923 | 2 924 | 2 925 | 2 926 | 2 927 | 2 928 | 2 929 | 2 930 | 2 931 | 2 932 | 2 933 | 2 934 | 2 935 | 2 936 | 2 937 | 2 938 | 2 939 | 2 940 | 2 941 | 2 942 | 2 943 | 2 944 | 2 945 | 2 946 | 2 947 | 2 948 | 2 949 | 2 950 | 2 951 | 2 952 | 2 953 | 2 954 | 2 955 | 2 956 | 2 957 | 2 958 | 2 959 | 2 960 | 2 961 | 2 962 | 2 963 | 2 964 | 2 965 | 2 966 | 2 967 | 2 968 | 2 969 | 2 970 | 2 971 | 2 972 | 2 973 | 2 974 | 2 975 | 2 976 | 2 977 | 2 978 | 2 979 | 2 980 | 2 981 | 2 982 | 2 983 | 2 984 | 2 985 | 2 986 | 2 987 | 2 988 | 2 989 | 2 990 | 2 991 | 2 992 | 2 993 | 2 994 | 2 995 | 2 996 | 2 997 | 2 998 | 2 999 | 2 1000 | 2 1001 | 2 1002 | 2 1003 | 2 1004 | 2 1005 | 2 1006 | 2 1007 | 2 1008 | 2 1009 | 2 1010 | 2 1011 | 2 1012 | 2 1013 | 2 1014 | 2 1015 | 2 1016 | 2 1017 | 2 1018 | 2 1019 | 2 1020 | 2 1021 | 2 1022 | 2 1023 | 2 1024 | 2 1025 | 2 1026 | 2 1027 | 2 1028 | 2 1029 | 2 1030 | 2 1031 | 2 1032 | 2 1033 | 2 1034 | 2 1035 | 2 1036 | 2 1037 | 2 1038 | 2 1039 | 2 1040 | 2 1041 | 2 1042 | 2 1043 | 2 1044 | 2 1045 | 2 1046 | 2 1047 | 2 1048 | 2 1049 | 2 1050 | 2 1051 | 2 1052 | 2 1053 | 2 1054 | 2 1055 | 2 1056 | 2 1057 | 2 1058 | 2 1059 | 2 1060 | 2 1061 | 2 1062 | 2 1063 | 2 1064 | 2 1065 | 2 1066 | 2 1067 | 2 1068 | 2 1069 | 2 1070 | 2 1071 | 2 1072 | 2 1073 | 2 1074 | 2 1075 | 2 1076 | 2 1077 | 2 1078 | 2 1079 | 2 1080 | 2 1081 | 2 1082 | 2 1083 | 2 1084 | 2 1085 | 2 1086 | 2 1087 | 2 1088 | 2 1089 | 2 1090 | 2 1091 | 2 1092 | 2 1093 | 2 1094 | 2 1095 | 2 1096 | 2 1097 | 2 1098 | 2 1099 | 2 1100 | 2 1101 | 2 1102 | 2 1103 | 2 1104 | 2 1105 | 2 1106 | 2 1107 | 2 1108 | 2 1109 | 2 1110 | 2 1111 | 2 1112 | 2 1113 | 2 1114 | 2 1115 | 2 1116 | 2 1117 | 2 1118 | 2 1119 | 2 1120 | 2 1121 | 2 1122 | 2 1123 | 2 1124 | 2 1125 | 2 1126 | 2 1127 | 2 1128 | 2 1129 | 2 1130 | 2 1131 | 2 1132 | 2 1133 | 2 1134 | 2 1135 | 2 1136 | 2 1137 | 2 1138 | 2 1139 | 2 1140 | 2 1141 | 2 1142 | 2 1143 | 2 1144 | 2 1145 | 2 1146 | 2 1147 | 2 1148 | 2 1149 | 2 1150 | 2 1151 | 2 1152 | 2 1153 | 2 1154 | 2 1155 | 2 1156 | 2 1157 | 2 1158 | 2 1159 | 2 1160 | 2 1161 | 2 1162 | 2 1163 | 2 1164 | 2 1165 | 2 1166 | 2 1167 | 2 1168 | 2 1169 | 2 1170 | 2 1171 | 2 1172 | 2 1173 | 2 1174 | 2 1175 | 2 1176 | 2 1177 | 2 1178 | 2 1179 | 2 1180 | 2 1181 | 2 1182 | 2 1183 | 2 1184 | 2 1185 | 2 1186 | 2 1187 | 2 1188 | 2 1189 | 2 1190 | 2 1191 | 2 1192 | 2 1193 | 2 1194 | 2 1195 | 2 1196 | 2 1197 | 2 1198 | 2 1199 | 2 1200 | 2 1201 | 2 1202 | 2 1203 | 2 1204 | 2 1205 | 2 1206 | 2 1207 | 2 1208 | 2 1209 | 2 1210 | 2 1211 | 2 1212 | 2 1213 | 2 1214 | 2 1215 | 2 1216 | 2 1217 | 2 1218 | 2 1219 | 2 1220 | 2 1221 | 2 1222 | 2 1223 | 2 1224 | 2 1225 | 2 1226 | 2 1227 | 2 1228 | 2 1229 | 2 1230 | 2 1231 | 2 1232 | 2 1233 | 2 1234 | 2 1235 | 2 1236 | 2 1237 | 2 1238 | 2 1239 | 2 1240 | 2 1241 | 2 1242 | 2 1243 | 2 1244 | 2 1245 | 2 1246 | 2 1247 | 2 1248 | 2 1249 | 2 1250 | 2 1251 | 2 1252 | 2 1253 | 2 1254 | 2 1255 | 2 1256 | 2 1257 | 2 1258 | 2 1259 | 2 1260 | 2 1261 | 2 1262 | 2 1263 | 2 1264 | 2 1265 | 2 1266 | 2 1267 | 2 1268 | 2 1269 | 2 1270 | 2 1271 | 2 1272 | 2 1273 | 2 1274 | 2 1275 | 2 1276 | 2 1277 | 2 1278 | 2 1279 | 2 1280 | 2 1281 | 2 1282 | 2 1283 | 2 1284 | 2 1285 | 2 1286 | 2 1287 | 2 1288 | 2 1289 | 2 1290 | 2 1291 | 2 1292 | 2 1293 | 2 1294 | 2 1295 | 2 1296 | 2 1297 | 2 1298 | 2 1299 | 2 1300 | 2 1301 | 2 1302 | 2 1303 | 2 1304 | 2 1305 | 2 1306 | 2 1307 | 2 1308 | 2 1309 | 2 1310 | 2 1311 | 2 1312 | 2 1313 | 2 1314 | 2 1315 | 2 1316 | 2 1317 | 2 1318 | 2 1319 | 2 1320 | 2 1321 | 2 1322 | 2 1323 | 2 1324 | 2 1325 | 2 1326 | 2 1327 | 2 1328 | 2 1329 | 2 1330 | 2 1331 | 2 1332 | 2 1333 | 2 1334 | 2 1335 | 2 1336 | 2 1337 | 2 1338 | 2 1339 | 2 1340 | 2 1341 | 2 1342 | 2 1343 | 2 1344 | 2 1345 | 2 1346 | 2 1347 | 2 1348 | 2 1349 | 2 1350 | 2 1351 | 2 1352 | 2 1353 | 2 1354 | 2 1355 | 2 1356 | 2 1357 | 2 1358 | 2 1359 | 2 1360 | 2 1361 | 2 1362 | 2 1363 | 2 1364 | 2 1365 | 2 1366 | 2 1367 | 2 1368 | 2 1369 | 2 1370 | 2 1371 | 2 1372 | 2 1373 | 2 1374 | 2 1375 | 2 1376 | 2 1377 | 2 1378 | 2 1379 | 2 1380 | 2 1381 | 2 1382 | 2 1383 | 2 1384 | 2 1385 | 2 1386 | 2 1387 | 2 1388 | 2 1389 | 2 1390 | 2 1391 | 2 1392 | 2 1393 | 2 1394 | 2 1395 | 2 1396 | 2 1397 | 2 1398 | 2 1399 | 2 1400 | 2 1401 | 2 1402 | 2 1403 | 2 1404 | 2 1405 | 2 1406 | 2 1407 | 2 1408 | 2 1409 | 2 1410 | 2 1411 | 2 1412 | 2 1413 | 2 1414 | 2 1415 | 2 1416 | 2 1417 | 2 1418 | 2 1419 | 2 1420 | 2 1421 | 2 1422 | 2 1423 | 2 1424 | 2 1425 | 2 1426 | 2 1427 | 2 1428 | 2 1429 | 2 1430 | 2 1431 | 2 1432 | 2 1433 | 2 1434 | 2 1435 | 2 1436 | 2 1437 | 2 1438 | 2 1439 | 2 1440 | 2 1441 | 2 1442 | 2 1443 | 2 1444 | 2 1445 | 2 1446 | 2 1447 | 2 1448 | 2 1449 | 2 1450 | 2 1451 | 2 1452 | 2 1453 | 2 1454 | 2 1455 | 2 1456 | 2 1457 | 2 1458 | 2 1459 | 2 1460 | 2 1461 | 2 1462 | 2 1463 | 2 1464 | 2 1465 | 2 1466 | 2 1467 | 2 1468 | 2 1469 | 2 1470 | 2 1471 | 2 1472 | 2 1473 | 2 1474 | 2 1475 | 2 1476 | 2 1477 | 2 1478 | 2 1479 | 2 1480 | 2 1481 | 2 1482 | 2 1483 | 2 1484 | 3 1485 | 3 1486 | 3 1487 | 3 1488 | 3 1489 | 3 1490 | 3 1491 | 3 1492 | 3 1493 | 3 1494 | 3 1495 | 3 1496 | 3 1497 | 3 1498 | 3 1499 | 3 1500 | 3 1501 | 3 1502 | 3 1503 | 3 1504 | 3 1505 | 3 1506 | 3 1507 | 3 1508 | 3 1509 | 3 1510 | 3 1511 | 3 1512 | 3 1513 | 3 1514 | 3 1515 | 3 1516 | 3 1517 | 3 1518 | 3 1519 | 3 1520 | 3 1521 | 3 1522 | 3 1523 | 3 1524 | 3 1525 | 3 1526 | 3 1527 | 3 1528 | 3 1529 | 3 1530 | 3 1531 | 3 1532 | 3 1533 | 3 1534 | 3 1535 | 3 1536 | 3 1537 | 3 1538 | 3 1539 | 3 1540 | 3 1541 | 3 1542 | 3 1543 | 3 1544 | 3 1545 | 3 1546 | 3 1547 | 3 1548 | 3 1549 | 3 1550 | 3 1551 | 3 1552 | 3 1553 | 3 1554 | 3 1555 | 3 1556 | 3 1557 | 3 1558 | 3 1559 | 3 1560 | 3 1561 | 3 1562 | 3 1563 | 3 1564 | 3 1565 | 3 1566 | 3 1567 | 3 1568 | 3 1569 | 3 1570 | 3 1571 | 3 1572 | 3 1573 | 3 1574 | 3 1575 | 3 1576 | 3 1577 | 3 1578 | 3 1579 | 3 1580 | 3 1581 | 3 1582 | 3 1583 | 3 1584 | 3 1585 | 3 1586 | 3 1587 | 3 1588 | 3 1589 | 3 1590 | 3 1591 | 3 1592 | 3 1593 | 3 1594 | 3 1595 | 3 1596 | 3 1597 | 3 1598 | 3 1599 | 3 1600 | 3 1601 | 3 1602 | 3 1603 | 3 1604 | 3 1605 | 3 1606 | 3 1607 | 3 1608 | 3 1609 | 3 1610 | 3 1611 | 3 1612 | 3 1613 | 3 1614 | 3 1615 | 3 1616 | 3 1617 | 3 1618 | 3 1619 | 3 1620 | 3 1621 | 3 1622 | 3 1623 | 3 1624 | 3 1625 | 3 1626 | 3 1627 | 3 1628 | 3 1629 | 3 1630 | 3 1631 | 3 1632 | 3 1633 | 3 1634 | 3 1635 | 3 1636 | 3 1637 | 3 1638 | 3 1639 | 3 1640 | 3 1641 | 3 1642 | 3 1643 | 3 1644 | 3 1645 | 3 1646 | 3 1647 | 3 1648 | 3 1649 | 3 1650 | 3 1651 | 3 1652 | 3 1653 | 3 1654 | 3 1655 | 3 1656 | 3 1657 | 3 1658 | 3 1659 | 3 1660 | 3 1661 | 3 1662 | 3 1663 | 3 1664 | 3 1665 | 3 1666 | 3 1667 | 3 1668 | 3 1669 | 3 1670 | 3 1671 | 3 1672 | 3 1673 | 3 1674 | 3 1675 | 3 1676 | 3 1677 | 3 1678 | 3 1679 | 3 1680 | 3 1681 | 3 1682 | 3 1683 | 3 1684 | 3 1685 | 3 1686 | 3 1687 | 3 1688 | 3 1689 | 3 1690 | 3 1691 | 3 1692 | 3 1693 | 3 1694 | 3 1695 | 3 1696 | 3 1697 | 3 1698 | 3 1699 | 3 1700 | 3 1701 | 3 1702 | 3 1703 | 3 1704 | 3 1705 | 3 1706 | 3 1707 | 3 1708 | 3 1709 | 3 1710 | 3 1711 | 3 1712 | 3 1713 | 3 1714 | 3 1715 | 3 1716 | 3 1717 | 3 1718 | 3 1719 | 3 1720 | 3 1721 | 3 1722 | 3 1723 | 3 1724 | 3 1725 | 3 1726 | 3 1727 | 3 1728 | 3 1729 | 3 1730 | 3 1731 | 3 1732 | 3 1733 | 3 1734 | 3 1735 | 3 1736 | 3 1737 | 3 1738 | 3 1739 | 3 1740 | 3 1741 | 3 1742 | 3 1743 | 3 1744 | 3 1745 | 3 1746 | 3 1747 | 3 1748 | 3 1749 | 3 1750 | 3 1751 | 3 1752 | 3 1753 | 3 1754 | 3 1755 | 3 1756 | 3 1757 | 3 1758 | 3 1759 | 3 1760 | 3 1761 | 3 1762 | 3 1763 | 3 1764 | 3 1765 | 3 1766 | 3 1767 | 3 1768 | 3 1769 | 3 1770 | 3 1771 | -------------------------------------------------------------------------------- /embed_regularize.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | import torch 4 | from torch.autograd import Variable 5 | 6 | def embedded_dropout(embed, words, dropout=0.1, scale=None): 7 | if dropout: 8 | mask = embed.weight.data.new().resize_((embed.weight.size(0), 1)).bernoulli_(1 - dropout).expand_as(embed.weight) / (1 - dropout) 9 | mask = Variable(mask) 10 | masked_embed_weight = mask * embed.weight 11 | else: 12 | masked_embed_weight = embed.weight 13 | if scale: 14 | masked_embed_weight = scale.expand_as(masked_embed_weight) * masked_embed_weight 15 | 16 | padding_idx = embed.padding_idx 17 | if padding_idx is None: 18 | padding_idx = -1 19 | X = embed._backend.Embedding.apply(words, masked_embed_weight, 20 | padding_idx, embed.max_norm, embed.norm_type, 21 | embed.scale_grad_by_freq, embed.sparse 22 | ) 23 | return X 24 | 25 | if __name__ == '__main__': 26 | V = 50 27 | h = 4 28 | bptt = 10 29 | batch_size = 2 30 | 31 | embed = torch.nn.Embedding(V, h) 32 | 33 | words = np.random.random_integers(low=0, high=V-1, size=(batch_size, bptt)) 34 | words = torch.LongTensor(words) 35 | words = Variable(words) 36 | 37 | origX = embed(words) 38 | X = embedded_dropout(embed, words) 39 | 40 | print(origX) 41 | print(X) 42 | -------------------------------------------------------------------------------- /jupyter_notebook_config.py: -------------------------------------------------------------------------------- 1 | # Reference: https://svds.com/jupyter-notebook-best-practices-for-data-science/ 2 | import os 3 | from subprocess import check_call 4 | 5 | def post_save(model, os_path, contents_manager): 6 | """post-save hook for converting notebooks to .py scripts""" 7 | if model['type'] != 'notebook': 8 | return # only do this for notebooks 9 | d, fname = os.path.split(os_path) 10 | check_call(['jupyter', 'nbconvert', '--to', 'script', fname], cwd=d) 11 | check_call(['jupyter', 'nbconvert', '--to', 'html', fname], cwd=d) 12 | 13 | c.FileContentsManager.post_save_hook = post_save 14 | -------------------------------------------------------------------------------- /locked_dropout.py: -------------------------------------------------------------------------------- 1 | import torch 2 | import torch.nn as nn 3 | from torch.autograd import Variable 4 | 5 | class LockedDropout(nn.Module): 6 | def __init__(self, batch_first=False): 7 | super().__init__() 8 | self.batch_first = batch_first 9 | 10 | def forward(self, x, dropout=0.5): 11 | if not self.training or not dropout: 12 | return x 13 | if self.batch_first: 14 | m = x.data.new(x.size(0), 1, x.size(2)).bernoulli_(1 - dropout) 15 | else: 16 | m = x.data.new(1, x.size(1), x.size(2)).bernoulli_(1 - dropout) 17 | mask = Variable(m, requires_grad=False) / (1 - dropout) 18 | mask = mask.expand_as(x) 19 | return mask * x 20 | -------------------------------------------------------------------------------- /weight_drop.py: -------------------------------------------------------------------------------- 1 | import torch 2 | from torch.nn import Parameter 3 | from functools import wraps 4 | 5 | class WeightDrop(torch.nn.Module): 6 | def __init__(self, module, weights, dropout=0, variational=False): 7 | super(WeightDrop, self).__init__() 8 | self.module = module 9 | self.weights = weights 10 | self.dropout = dropout 11 | self.variational = variational 12 | self._setup() 13 | 14 | def widget_demagnetizer_y2k_edition(*args, **kwargs): 15 | # We need to replace flatten_parameters with a nothing function 16 | # It must be a function rather than a lambda as otherwise pickling explodes 17 | # We can't write boring code though, so ... WIDGET DEMAGNETIZER Y2K EDITION! 18 | # (╯°□°)╯︵ ┻━┻ 19 | return 20 | 21 | def _setup(self): 22 | # Terrible temporary solution to an issue regarding compacting weights re: CUDNN RNN 23 | if issubclass(type(self.module), torch.nn.RNNBase): 24 | self.module.flatten_parameters = self.widget_demagnetizer_y2k_edition 25 | 26 | for name_w in self.weights: 27 | print('Applying weight drop of {} to {}'.format(self.dropout, name_w)) 28 | w = getattr(self.module, name_w) 29 | del self.module._parameters[name_w] 30 | self.module.register_parameter(name_w + '_raw', Parameter(w.data)) 31 | 32 | def _setweights(self): 33 | for name_w in self.weights: 34 | raw_w = getattr(self.module, name_w + '_raw') 35 | w = None 36 | if self.variational: 37 | mask = torch.autograd.Variable(torch.ones(raw_w.size(0), 1)) 38 | if raw_w.is_cuda: mask = mask.cuda() 39 | mask = torch.nn.functional.dropout(mask, p=self.dropout, training=True) 40 | w = mask.expand_as(raw_w) * raw_w 41 | else: 42 | w = torch.nn.functional.dropout(raw_w, p=self.dropout, training=self.training) 43 | setattr(self.module, name_w, w) 44 | 45 | def forward(self, *args): 46 | self._setweights() 47 | return self.module.forward(*args) 48 | 49 | if __name__ == '__main__': 50 | import torch 51 | from weight_drop import WeightDrop 52 | 53 | # Input is (seq, batch, input) 54 | x = torch.autograd.Variable(torch.randn(2, 1, 10)).cuda() 55 | h0 = None 56 | 57 | ### 58 | 59 | print('Testing WeightDrop') 60 | print('=-=-=-=-=-=-=-=-=-=') 61 | 62 | ### 63 | 64 | print('Testing WeightDrop with Linear') 65 | 66 | lin = WeightDrop(torch.nn.Linear(10, 10), ['weight'], dropout=0.9) 67 | lin.cuda() 68 | run1 = [x.sum() for x in lin(x).data] 69 | run2 = [x.sum() for x in lin(x).data] 70 | 71 | print('All items should be different') 72 | print('Run 1:', run1) 73 | print('Run 2:', run2) 74 | 75 | assert run1[0] != run2[0] 76 | assert run1[1] != run2[1] 77 | 78 | print('---') 79 | 80 | ### 81 | 82 | print('Testing WeightDrop with LSTM') 83 | 84 | wdrnn = WeightDrop(torch.nn.LSTM(10, 10), ['weight_hh_l0'], dropout=0.9) 85 | wdrnn.cuda() 86 | 87 | run1 = [x.sum() for x in wdrnn(x, h0)[0].data] 88 | run2 = [x.sum() for x in wdrnn(x, h0)[0].data] 89 | 90 | print('First timesteps should be equal, all others should differ') 91 | print('Run 1:', run1) 92 | print('Run 2:', run2) 93 | 94 | # First time step, not influenced by hidden to hidden weights, should be equal 95 | assert run1[0] == run2[0] 96 | # Second step should not 97 | assert run1[1] != run2[1] 98 | 99 | print('---') -------------------------------------------------------------------------------- /yaringal_callbacks.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from tensorflow.contrib.keras.python.keras.callbacks import Callback 3 | from tensorflow.contrib.keras.python.keras import backend as K 4 | from tensorflow.contrib.keras.python.keras import models 5 | from tensorflow.contrib.keras.python.keras.engine.training import _standardize_input_data 6 | from sklearn.metrics import log_loss, mean_squared_error 7 | 8 | 9 | class ModelTest(Callback): 10 | ''' Test model at the end of every X epochs. 11 | The model is tested using both MC dropout and the dropout 12 | approximation. Output metrics for various losses are supported. 13 | # Arguments 14 | Xt: model inputs to test. 15 | Yt: model outputs to get accuracy / error (ground truth). 16 | T: number of samples to use in MC dropout. 17 | test_every_X_epochs: test every test_every_X_epochs epochs. 18 | batch_size: number of data points to put in each batch 19 | (often larger than training batch size). 20 | verbose: verbosity mode, 0 or 1. 21 | loss: a string from ['binary', 'categorical', 'euclidean'] 22 | used to calculate the testing metric. 23 | mean_y_train: mean of outputs in regression cases to add back 24 | to model output ('euclidean' loss). 25 | std_y_train: std of outputs in regression cases to add back 26 | to model output ('euclidean' loss). 27 | # References 28 | - [Dropout: A simple way to prevent neural networks from overfitting](http://jmlr.org/papers/v15/srivastava14a.html) 29 | - [Dropout as a Bayesian Approximation: Representing Model Uncertainty in Deep Learning](http://arxiv.org/abs/1506.02142) 30 | ''' 31 | 32 | def __init__(self, Xt, Yt, T=10, test_every_X_epochs=1, batch_size=500, verbose=1, 33 | loss=None, mean_y_train=None, std_y_train=None): 34 | super(ModelTest, self).__init__() 35 | self.Xt = Xt 36 | self.Yt = np.array(Yt) 37 | self.T = T 38 | self.test_every_X_epochs = test_every_X_epochs 39 | self.batch_size = batch_size 40 | self.verbose = verbose 41 | self.loss = loss 42 | self.mean_y_train = mean_y_train 43 | self.std_y_train = std_y_train 44 | self._predict_stochastic = None 45 | self.history = [] 46 | 47 | def predict_stochastic(self, X, batch_size=128, verbose=0): 48 | '''Generate output predictions for the input samples 49 | batch by batch, using stochastic forward passes. If 50 | dropout is used at training, during prediction network 51 | units will be dropped at random as well. This procedure 52 | can be used for MC dropout (see [ModelTest callbacks](callbacks.md)). 53 | # Arguments 54 | X: the input data, as a numpy array. 55 | batch_size: integer. 56 | verbose: verbosity mode, 0 or 1. 57 | # Returns 58 | A numpy array of predictions. 59 | # References 60 | - [Dropout: A simple way to prevent neural networks from overfitting](http://jmlr.org/papers/v15/srivastava14a.html) 61 | - [Dropout as a Bayesian Approximation: Representing Model Uncertainty in Deep Learning](http://arxiv.org/abs/1506.02142) 62 | ''' 63 | # https://stackoverflow.com/questions/44351054/keras-forward-pass-with-dropout 64 | X = _standardize_input_data( 65 | X, self.model.model._feed_input_names, self.model.model._feed_input_shapes, 66 | check_batch_axis=False 67 | ) 68 | if self._predict_stochastic is None: # we only get self.model after init 69 | self._predict_stochastic = K.function( 70 | [self.model.layers[0].input, K.learning_phase()], [self.model.layers[-1].output]) 71 | return self.model._predict_loop( 72 | self._predict_stochastic, X + [1.], batch_size, verbose)[:, 0] 73 | 74 | def on_epoch_end(self, epoch, logs={}): 75 | if epoch % self.test_every_X_epochs != 0: 76 | return 77 | model_output = self.model.predict(self.Xt, batch_size=self.batch_size, 78 | verbose=self.verbose)[:, 0] 79 | MC_model_output = [] 80 | for _ in range(self.T): 81 | MC_model_output += [self.predict_stochastic(self.Xt, 82 | batch_size=self.batch_size, 83 | verbose=self.verbose)] 84 | 85 | # assert not np.array_equal(MC_model_output[0], MC_model_output[1]) 86 | # assert not np.array_equal(MC_model_output[0], model_output) 87 | # tmp = self.model.predict(self.Xt, batch_size=self.batch_size, 88 | # verbose=self.verbose) 89 | # assert np.array_equal(tmp, model_output) 90 | 91 | MC_model_output = np.array(MC_model_output) 92 | MC_model_output_mean = np.mean(MC_model_output, 0) 93 | # print(MC_model_output_mean.shape) 94 | # print(self.Yt.shape) 95 | 96 | if self.loss == 'binary': 97 | standard_acc = np.mean(self.Yt == np.round(model_output.flatten())) 98 | MC_acc = np.mean(self.Yt == np.round( 99 | MC_model_output_mean.flatten())) 100 | standard_loss = log_loss(self.Yt, model_output) 101 | MC_loss = log_loss(self.Yt, MC_model_output_mean) 102 | print("Standard logloss/acc at epoch %05d: %0.4f/%.2f%%" % 103 | (epoch, float(standard_loss), standard_acc * 100)) 104 | print("MC logloss/acc at epoch %05d: %0.4f/%.2f%%" % 105 | (epoch, float(MC_loss), MC_acc * 100)) 106 | self.history.append( 107 | [float(standard_loss), float(MC_loss), standard_acc, MC_acc]) 108 | elif self.loss == 'categorical': 109 | standard_acc = np.mean( 110 | np.argmax(self.Yt, axis=-1) == np.argmax(model_output, axis=-1)) 111 | MC_acc = np.mean(np.argmax(self.Yt, axis=-1) == 112 | np.argmax(MC_model_output_mean, axis=-1)) 113 | print("Standard accuracy at epoch %05d: %0.5f" % 114 | (epoch, float(standard_acc))) 115 | print("MC accuracy at epoch %05d: %0.5f" % (epoch, float(MC_acc))) 116 | elif self.loss == 'euclidean': 117 | # print("Mean:", np.mean(model_output), "Max:", np.max(model_output)) 118 | # print("Mean:", np.mean(self.Yt), "Max:", np.max(self.Yt)) 119 | Yt_inverse_transformed = self.Yt * self.std_y_train + self.mean_y_train 120 | standard_err_raw = mean_squared_error(self.Yt, model_output) 121 | model_output = model_output * self.std_y_train + self.mean_y_train 122 | standard_err = mean_squared_error( 123 | Yt_inverse_transformed, model_output) 124 | MC_err_raw = mean_squared_error(self.Yt, MC_model_output_mean) 125 | MC_model_output_mean = MC_model_output_mean * \ 126 | self.std_y_train + self.mean_y_train 127 | MC_err = mean_squared_error( 128 | Yt_inverse_transformed, MC_model_output_mean) 129 | print("Standard error at epoch %03d: %0.4f/%0.4f" % 130 | (epoch, float(standard_err_raw), float(standard_err)**0.5)) 131 | print("MC error at epoch %03d: %0.4f/%0.4f" % 132 | (epoch, float(MC_err_raw), float(MC_err)**0.5)) 133 | self.history.append([float(standard_err), float(MC_err)]) 134 | else: 135 | raise Exception('No loss: ' + self.loss) 136 | -------------------------------------------------------------------------------- /yaringal_dataset.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from tensorflow.contrib.keras.python.keras.preprocessing import sequence 3 | from tensorflow.contrib.keras.python.keras.preprocessing import text 4 | 5 | 6 | class loader(object): 7 | def __init__(self, init_seed, maxlen, nb_words, skip_top, test_split): 8 | self.start_char = 1 9 | self.oov_char = 2 10 | self.index_from = 3 11 | 12 | files = ["Dennis+Schwartz", "James+Berardinelli", 13 | "Scott+Renshaw", "Steve+Rhodes"] 14 | texts, ratings = [], [] 15 | for file in files: 16 | with open("data/scaledata/" + file + "/subj." + file, "r") as f: 17 | texts += list(f) 18 | with open("data/scaledata/" + file + "/rating." + file, "r") as f: 19 | ratings += list(f) 20 | tokenizer = text.Tokenizer(filters='') 21 | tokenizer.fit_on_texts(texts) 22 | X = tokenizer.texts_to_sequences(texts) 23 | Y = [float(rating) for rating in ratings] 24 | 25 | # Shuffle data: 26 | np.random.seed(init_seed) 27 | np.random.shuffle(X) 28 | np.random.seed(init_seed) 29 | np.random.shuffle(Y) 30 | 31 | # Parse data 32 | X = [[self.start_char] + [w + self.index_from for w in x] for x in X] 33 | 34 | new_X = [] 35 | new_Y = [] 36 | for x, y in zip(X, Y): 37 | for i in range(0, len(x), maxlen): 38 | new_X.append(x[i:i + maxlen]) 39 | new_Y.append(y) 40 | X = np.array(new_X) 41 | Y = np.array(new_Y) 42 | # by convention, use 2 as OOV word 43 | # reserve 'index_from' (=3 by default) characters: 0 (padding), 1 (start), 2 (OOV) 44 | X = [[self.oov_char if (w >= nb_words or w < skip_top) 45 | else w for w in x] for x in X] 46 | 47 | self.X_train = X[:int(len(X) * (1 - test_split))] 48 | self.Y_train = Y[:int(len(X) * (1 - test_split))] 49 | self.mean_y_train = np.mean(self.Y_train) 50 | self.std_y_train = np.std(self.Y_train) 51 | self.Y_train = (self.Y_train - self.mean_y_train) / self.std_y_train 52 | 53 | self.X_test = X[int(len(X) * (1 - test_split)):] 54 | self.Y_test = Y[int(len(X) * (1 - test_split)):] 55 | self.Y_test = (self.Y_test - self.mean_y_train) / self.std_y_train 56 | 57 | print(len(self.X_train), 'train sequences') 58 | print(len(self.X_test), 'test sequences') 59 | 60 | print("Pad sequences (samples x time)") 61 | self.X_train = sequence.pad_sequences(self.X_train, maxlen=maxlen) 62 | self.X_test = sequence.pad_sequences(self.X_test, maxlen=maxlen) 63 | print('X_train shape:', self.X_train.shape) 64 | print('X_test shape:', self.X_test.shape) 65 | --------------------------------------------------------------------------------