├── runner.py ├── utils ├── misc.py ├── results_plotter.py ├── parser.py ├── dataloader.py └── logger.py ├── LICENSE ├── .gitignore ├── README.md ├── models ├── perceptron.py └── nn.py ├── run_svm.py ├── run_pam.py ├── run_nn.py └── uci ├── heart.csv ├── thyroid.csv ├── breast-cancer.data ├── diabetes.csv └── flare-solar.csv /runner.py: -------------------------------------------------------------------------------- 1 | from utils.parser import parse_args 2 | 3 | 4 | def run(args): 5 | from run_nn import run as run_nn 6 | from run_svm import run as run_svm 7 | from run_pam import run as run_pam 8 | 9 | run_nn(args) 10 | run_svm(args) 11 | run_pam(args) 12 | 13 | 14 | if __name__ == '__main__': 15 | run(parse_args().__dict__) 16 | -------------------------------------------------------------------------------- /utils/misc.py: -------------------------------------------------------------------------------- 1 | import random 2 | import numpy as np 3 | import torch 4 | 5 | 6 | def set_global_seeds(i): 7 | random.seed(i) 8 | np.random.seed(i) 9 | torch.manual_seed(i) 10 | if torch.cuda.is_available(): 11 | torch.cuda.manual_seed_all(i) 12 | 13 | 14 | def make_arg_list(arg): 15 | arg = arg.copy() 16 | args = [] 17 | for seed in range(arg['seeds']): 18 | arg['seed'] = seed 19 | args.append(arg.copy()) 20 | return args 21 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Henry Guo 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | .pytest_cache/ 49 | 50 | # Translations 51 | *.mo 52 | *.pot 53 | 54 | # Django stuff: 55 | *.log 56 | local_settings.py 57 | db.sqlite3 58 | 59 | # Flask stuff: 60 | instance/ 61 | .webassets-cache 62 | 63 | # Scrapy stuff: 64 | .scrapy 65 | 66 | # Sphinx documentation 67 | docs/_build/ 68 | 69 | # PyBuilder 70 | target/ 71 | 72 | # Jupyter Notebook 73 | .ipynb_checkpoints 74 | 75 | # pyenv 76 | .python-version 77 | 78 | # celery beat schedule file 79 | celerybeat-schedule 80 | 81 | # SageMath parsed files 82 | *.sage.py 83 | 84 | # Environments 85 | .env 86 | .venv 87 | env/ 88 | venv/ 89 | ENV/ 90 | env.bak/ 91 | venv.bak/ 92 | 93 | # Spyder project settings 94 | .spyderproject 95 | .spyproject 96 | 97 | # Rope project settings 98 | .ropeproject 99 | 100 | # mkdocs documentation 101 | /site 102 | 103 | # mypy 104 | .mypy_cache/ 105 | 106 | .idea/ 107 | .DS_Store 108 | uci/large/ 109 | logs/ 110 | .ipynb 111 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # learning-with-noisy-labels 2 | Learning with Noisy Labels by adopting a peer prediction loss function. 3 | 4 | ## Requirements 5 | * Python3 6 | * Pytorch 7 | * Pandas 8 | * Numpy 9 | * Scipy 10 | * Sklearn 11 | 12 | ## Example 13 | Run the following command in the terminal to replicate our experiment on UCI Heart Dataset. 14 | ``` 15 | python runner.py heart --seeds 8 --test-size 0.15 --val-size 0.1 --dropout 0 --loss bce --activation relu --normalize --verbose --e0 0.1 --e1 0.3 --episodes 1000 --batchsize 64 --batchsize-peer 64 --hidsize 8 --lr 0.0007 --alpha 1 16 | ``` 17 | 18 | If you want to equalize the prior by pre-sampling, add this argument: '--equalize-prior'. 19 | 20 | ### Details of the arguments: 21 | 22 | * --dataset: name of the dataset, includes: 'heart', 'breast', 'breast2', 'german', 'banana', 'image', 'thyroid', 'titanic', 'splice', 'twonorm', 'waveform', 'flare-solar', 'diabetes', 'susy', 'higgs' 23 | * --e0: error rate for class 0 (default: 0) 24 | * --e1: error rate for class 1 (default: 0) 25 | * --hidsize: size of hidden layers 26 | * --lr: learning rate 27 | * --batchsize: batchsize for training 28 | * --batchsize-peer: batchsize for peer sampling 29 | * --alpha: weight of peer term in peer loss 30 | * --margin: margin for PAM 31 | * --C1: weight of class 1 for C-SVM 32 | * --dropout: dropout for neural network (deprecated, better without dropout) 33 | * --activation: activation function, includes: relu, sigmoid, tanh, elu, relu6 (default: relu) 34 | * --loss: loss function, includes: bce, mse, logistic, l1, huber (default: bce) 35 | * --seeds: repeat experiments across how many seeds (default: 8) 36 | * --episodes: training episodes 37 | * --val-size: validation set proportion 38 | * --test-size: test set proportion 39 | * --equalize-prior: whether to equalize P(Y=1) and P(Y=0) (store_true) 40 | * --normalize: whether to normalize the data 41 | * --verbose: output more information (store_true) 42 | -------------------------------------------------------------------------------- /models/perceptron.py: -------------------------------------------------------------------------------- 1 | """https://github.com/lightgan/numpy-perceptron/blob/develop/perceptron.ipynb""" 2 | import numpy as np 3 | from sklearn.utils import shuffle 4 | 5 | 6 | class Perceptron(object): 7 | """ 8 | This is a perceptron class that can be used for all different variants of perceptrons 9 | depending on how it is initialized. 10 | """ 11 | def __init__(self, feature_dim, learning_rate=0.001, margin=0.0): 12 | # Initialize needed variables 13 | self.W = np.random.uniform(-0.1, 0.1, size=(1, feature_dim)) 14 | self.b = np.random.uniform(-0.01, 0.01) 15 | self.lr = learning_rate 16 | self.margin = margin 17 | 18 | def fit(self, X_train, y_train, epochs=100): 19 | pred_acc = [] 20 | updates_list = [] 21 | y_train[y_train == 0] = -1 22 | 23 | for i in range(epochs): 24 | X_train, y_train = shuffle(X_train, y_train) 25 | # Predict and update if needed 26 | y_pred, updates = self._predict_and_update(X_train, y_train) 27 | # Update number of updates 28 | updates_list.append(updates) 29 | pred_acc.append(np.mean(y_train == y_pred)) 30 | 31 | return pred_acc 32 | 33 | def _predict_and_update(self, X, y): 34 | num_updates = 0 35 | y_pred = np.zeros((len(y))) 36 | for i, x in enumerate(X): 37 | single_pred = self._predict(x) 38 | # Set label depending on sign of pred 39 | y_pred[i] = 1 if single_pred >= 0 else -1 40 | # Check if a mistake was made or we are below the margin 41 | if (y_pred[i] != y[i]) or (y[i] * single_pred < self.margin): 42 | num_updates += 1 43 | # sign of y determines if we add or subtract 44 | self.W = self.W + (self.lr * X[i] * y[i]) 45 | self.b = self.b + (self.lr * y[i]) 46 | return y_pred, num_updates 47 | 48 | def _predict(self, x): 49 | return np.dot(self.W, x.T).flatten() + self.b 50 | 51 | def predict(self, x): 52 | res = self._predict(x) 53 | res[res >= 0] = 1 54 | res[res < 0] = 0 55 | return res 56 | 57 | def score(self, X_test, y_test): 58 | y_pred = self.predict(X_test) 59 | return np.mean(y_pred == y_test) 60 | -------------------------------------------------------------------------------- /run_svm.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import numpy as np 3 | import multiprocessing as mp 4 | 5 | from sklearn.svm import SVC 6 | 7 | from utils import logger 8 | from utils.dataloader import DataLoader 9 | from utils.misc import set_global_seeds, make_arg_list 10 | 11 | CLASS_WEIGHTS = [0.1, 0.2, 0.25, 0.33, 0.5, 1.0, 2.0, 3.0, 4.0, 5.0, 10.0] 12 | 13 | 14 | def find_best_c1(args): 15 | set_global_seeds(args['seed']) 16 | dataset = DataLoader(args['dataset']) 17 | X_train, X_test, X_val, y_train, y_test, y_val = dataset.prepare_train_test_val(args) 18 | results = [] 19 | for c1 in CLASS_WEIGHTS: 20 | model = SVC(gamma='auto', class_weight={0: 1., 1: c1}) 21 | model.fit(X_train, y_train) 22 | results.append(model.score(X_val, y_val)) 23 | return results 24 | 25 | 26 | def run_c_svm(args): 27 | set_global_seeds(args['seed']) 28 | dataset = DataLoader(args['dataset']) 29 | X_train, X_test, X_val, y_train, y_test, y_val = dataset.prepare_train_test_val(args) 30 | model = SVC(gamma='auto', class_weight={0: 1., 1: args['C1']}) 31 | model.fit(X_train, y_train) 32 | return model.score(X_test, y_test) 33 | 34 | 35 | def run(args): 36 | logger.configure(f'logs/{args["dataset"]}/svm/{datetime.datetime.now().strftime("%y-%m-%d-%H-%M-%S")}') 37 | logger.info(args) 38 | 39 | pool = mp.Pool(mp.cpu_count()) 40 | svm_arg = args.copy() 41 | 42 | if 'C1' not in svm_arg.keys(): 43 | best_c1 = pool.map(find_best_c1, make_arg_list(svm_arg)) 44 | best_c1 = np.mean(best_c1, 0) 45 | if 'verbose' in svm_arg.keys() and svm_arg['verbose']: 46 | for i in range(len(best_c1)): 47 | logger.record_tabular(f'[C-SVM] C1 = {CLASS_WEIGHTS[i]}', best_c1[i]) 48 | logger.dump_tabular() 49 | best_c1 = CLASS_WEIGHTS[best_c1.argmax()] 50 | logger.record_tabular('[C-SVM] best C1', best_c1) 51 | svm_arg['C1'] = best_c1 52 | 53 | results_svm = pool.map(run_c_svm, make_arg_list(svm_arg)) 54 | 55 | logger.record_tabular('[C-SVM] accuracy mean', np.mean(results_svm)) 56 | logger.record_tabular('[C-SVM] accuracy max', np.max(results_svm)) 57 | logger.record_tabular('[C-SVM] accuracy min', np.min(results_svm)) 58 | logger.record_tabular('[C-SVM] accuracy std', np.std(results_svm)) 59 | logger.dump_tabular() 60 | 61 | 62 | if __name__ == '__main__': 63 | from utils.parser import parse_args 64 | run(parse_args().__dict__) 65 | -------------------------------------------------------------------------------- /run_pam.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import numpy as np 3 | import multiprocessing as mp 4 | 5 | from models.perceptron import Perceptron 6 | 7 | from utils import logger 8 | from utils.dataloader import DataLoader 9 | from utils.misc import set_global_seeds, make_arg_list 10 | 11 | MARGINS = [0, 0.125, 0.25, 0.5, 1, 2, 3, 4, 5] 12 | 13 | 14 | def find_best_margin(args): 15 | """ return `best_margin / 0.1` """ 16 | set_global_seeds(args['seed']) 17 | dataset = DataLoader(args['dataset']) 18 | X_train, X_test, X_val, y_train, y_test, y_val = dataset.prepare_train_test_val(args) 19 | 20 | results = [] 21 | for margin in MARGINS: 22 | model = Perceptron(feature_dim=X_train.shape[-1], margin=margin) 23 | model.fit(X_train, y_train) 24 | results.append(model.score(X_val, y_val)) 25 | return results 26 | 27 | 28 | def run_pam(args): 29 | set_global_seeds(args['seed']) 30 | dataset = DataLoader(args['dataset']) 31 | X_train, X_test, X_val, y_train, y_test, y_val = dataset.prepare_train_test_val(args) 32 | model = Perceptron(feature_dim=X_train.shape[-1], margin=args['margin']) 33 | model.fit(X_train, y_train) 34 | return model.score(X_test, y_test) 35 | 36 | 37 | def run(args): 38 | logger.configure(f'logs/{args["dataset"]}/pam/{datetime.datetime.now().strftime("%y-%m-%d-%H-%M-%S")}') 39 | logger.info(args) 40 | 41 | pool = mp.Pool(mp.cpu_count()) 42 | pam_arg = args.copy() 43 | 44 | if 'margin' not in pam_arg.keys(): 45 | best_margin = pool.map(find_best_margin, make_arg_list(pam_arg)) 46 | best_margin = np.mean(best_margin, 0) 47 | if 'verbose' in pam_arg.keys() and pam_arg['verbose']: 48 | for i in range(len(best_margin)): 49 | logger.record_tabular(f'[PAM] margin = {MARGINS[i]}', best_margin[i]) 50 | logger.dump_tabular() 51 | best_margin = MARGINS[best_margin.argmax()] 52 | logger.record_tabular('[PAM] best margin', best_margin) 53 | pam_arg['margin'] = best_margin 54 | 55 | results_pam = pool.map(run_pam, make_arg_list(pam_arg)) 56 | 57 | logger.record_tabular('[PAM] accuracy mean', np.mean(results_pam)) 58 | logger.record_tabular('[PAM] accuracy max', np.max(results_pam)) 59 | logger.record_tabular('[PAM] accuracy min', np.min(results_pam)) 60 | logger.record_tabular('[PAM] accuracy std', np.std(results_pam)) 61 | logger.dump_tabular() 62 | 63 | 64 | if __name__ == '__main__': 65 | from utils.parser import parse_args 66 | run(parse_args().__dict__) 67 | -------------------------------------------------------------------------------- /utils/results_plotter.py: -------------------------------------------------------------------------------- 1 | import os 2 | import os.path as osp 3 | import argparse 4 | 5 | import numpy as np 6 | import pandas as pd 7 | 8 | import matplotlib 9 | matplotlib.use('Agg') 10 | import matplotlib.pyplot as plt 11 | from matplotlib.backends.backend_pdf import PdfPages 12 | from scipy.ndimage.filters import gaussian_filter1d 13 | 14 | 15 | def parse_args(): 16 | parser = argparse.ArgumentParser() 17 | parser.add_argument('log', type=str, default='test') 18 | args = parser.parse_args() 19 | return args 20 | 21 | 22 | def plot_(folder): 23 | results = {'training acc': [], 'testing acc': []} 24 | for root, dirs, files in os.walk(folder): 25 | for dir in dirs: 26 | csv = osp.join(root, dir, 'progress.csv') 27 | df = pd.read_csv(csv) 28 | for key in results.keys(): 29 | results[key].append(df[key].values) 30 | 31 | for i, key in enumerate(results.keys()): 32 | mean = np.mean(results[key], axis=0) 33 | std = np.std(results[key], axis=0) 34 | results[key] = {'mean': mean, 'std': std} 35 | _mean = gaussian_filter1d(mean, sigma=3) 36 | _min = gaussian_filter1d(mean - std, sigma=3) 37 | _max = gaussian_filter1d(mean + std, sigma=3) 38 | plt.plot(_mean, label=key, color=f'C{i}') 39 | plt.fill_between(range(mean.size), _min, _max, color=f'C{i}', alpha=0.3) 40 | 41 | plt.xlabel('episodes') 42 | plt.legend() 43 | plt.grid() 44 | plt.show() 45 | 46 | mean = results['testing acc']['mean'] 47 | max_mean = max([mean[-i-10:-i-1].mean() for i in range(0, len(mean)-10)]) 48 | print(f'The maximal mean test accuracy of 10 episodes is {max_mean}') 49 | 50 | 51 | def plot(results, labels, title=None, path=None): 52 | with PdfPages(f'{path}/{title.replace(" ", "_")}.pdf') as pdf: 53 | for i, (result, label) in enumerate(zip(results, labels)): 54 | mean = np.mean(result, axis=0) 55 | std = np.std(result, axis=0) 56 | _mean = gaussian_filter1d(mean, sigma=3) 57 | _min = gaussian_filter1d(mean - std, sigma=3) 58 | _max = gaussian_filter1d(mean + std, sigma=3) 59 | plt.plot(_mean, label=label, color=f'C{i}') 60 | plt.fill_between(range(mean.size), _min, _max, color=f'C{i}', alpha=0.3) 61 | 62 | if title: 63 | plt.title(title) 64 | plt.xlabel('episodes (x20)') 65 | plt.legend() 66 | plt.grid() 67 | if path: 68 | pdf.savefig() 69 | else: 70 | plt.show() 71 | plt.cla() 72 | 73 | 74 | def plot__(results, labels, title=None): 75 | for i, (result, label) in enumerate(zip(results, labels)): 76 | plt.plot(results[0], label=label, color=f'C{i}') 77 | for res in result[1:]: 78 | plt.plot(res, color=f'C{i}') 79 | _min = gaussian_filter1d(np.min(result, 0), sigma=3) 80 | _max = gaussian_filter1d(np.max(result, 0), sigma=3) 81 | plt.fill_between(range(len(_min)), _min, _max, color=f'C{i}', alpha=0.3) 82 | 83 | if title: 84 | plt.title(title) 85 | plt.xlabel('episodes') 86 | # plt.legend() 87 | plt.grid() 88 | plt.show() 89 | 90 | 91 | def main(): 92 | args = parse_args() 93 | folder = f'logs/{args.log}' 94 | plot_(folder) 95 | 96 | 97 | if __name__ == '__main__': 98 | main() 99 | -------------------------------------------------------------------------------- /utils/parser.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | 3 | 4 | def parse_args(): 5 | parser = argparse.ArgumentParser() 6 | parser.add_argument('dataset', type=str, default='heart', choices=[ 7 | 'heart', 'breast', 'breast2', 'german', 'banana', 'image', 'thyroid', 8 | 'titanic', 'splice', 'twonorm', 'waveform', 'flare-solar', 'diabetes', 9 | 'susy', 'higgs', 10 | ]) 11 | 12 | # error rate 13 | parser.add_argument('--e0', type=float, default=0, 14 | help='error rate for class 0 (default: 0)') 15 | parser.add_argument('--e1', type=float, default=0, 16 | help='error rate for class 1 (default: 0)') 17 | 18 | # neural network hyper-parameters 19 | parser.add_argument('--hidsize', nargs='+', type=int, 20 | default=[8, 16, 32, 64], 21 | help='sizes of hidden layers for grid search') 22 | parser.add_argument('--lr', nargs='+', required=True, type=float, 23 | default=[0.0007, 0.001, 0.005, 0.01, 0.05], 24 | help='learning rates for grid search') 25 | parser.add_argument('--batchsize', nargs='+', required=True, type=int, 26 | default=[1, 4, 16, 32, 64], 27 | help='batchsize for neural networks') 28 | parser.add_argument('--batchsize-peer', nargs='+', required=True, type=int, 29 | default=[1, 4, 16, 32, 64], 30 | help='batchsize for neural networks') 31 | 32 | # hyper-parameters 33 | parser.add_argument('--alpha', nargs='+', type=float, 34 | default=[-5, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 2.0, 3.0, 4.0, 5.0], 35 | help='coefficient of peer loss for grid search') 36 | parser.add_argument('--margin', type=float, default=0., 37 | help='margin for PAM (default: 0.)') 38 | parser.add_argument('--C1', type=float, default=1., 39 | help='C1 for C-SVM (default: 1.0)') 40 | parser.add_argument('--dropout', type=float, default=0., 41 | help='dropout for neural networks (default: 0)') 42 | 43 | # functions 44 | parser.add_argument('--activation', type=str, default='relu', 45 | choices=['relu', 'sigmoid', 'tanh', 'elu', 'relu6'], 46 | help='activation function (default: relu)') 47 | parser.add_argument('--loss', type=str, default='bce', 48 | choices=['bce', 'mse', 'logistic', 'l1', 'huber'], 49 | help='loss function (default: bce)') 50 | 51 | # experiment 52 | parser.add_argument('--seeds', type=int, default=1, 53 | help='repeat experiments across how many seeds') 54 | parser.add_argument('--episodes', type=int, default=1000, 55 | help='least training episodes (default: 1000)') 56 | parser.add_argument('--val-size', type=float, default=0.15, 57 | help='proportion of validation set (default: 0.15)') 58 | parser.add_argument('--test-size', type=float, default=0.15, 59 | help='proportion of test set (default: 0.15)') 60 | parser.add_argument('--equalize-prior', action='store_true', default=False, 61 | help='whether to equalize P(y=1) and P(y=0) (default: False)') 62 | parser.add_argument('--normalize', action='store_true', default=True, 63 | help='whether to normalize the data (default: True)') 64 | parser.add_argument('--verbose', action='store_true', default=False, 65 | help='whether to output more information (default: False)') 66 | 67 | args = parser.parse_args() 68 | 69 | args.hidsize = [[hdim, hdim] for hdim in args.hidsize] 70 | 71 | return args 72 | -------------------------------------------------------------------------------- /utils/dataloader.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | 4 | from sklearn.model_selection import train_test_split 5 | from sklearn.preprocessing import StandardScaler 6 | 7 | 8 | class DataLoader(object): 9 | def __init__(self, name): 10 | self.name = name 11 | if name == 'susy': 12 | self.df = pd.read_csv(f'uci/large/SUSY.csv', header=None) 13 | self.preprocess_susy() 14 | if name == 'higgs': 15 | self.df = pd.read_csv(f'uci/large/HIGGS.csv', header=None) 16 | self.preprocess_susy() 17 | if name == 'heart': 18 | self.df = pd.read_csv(f'uci/heart.csv') 19 | self.preprocess_heart() 20 | elif name == 'breast': 21 | self.df = pd.read_csv(f'uci/breast-cancer.data', header=None, sep=',') 22 | self.preprocess_breast() 23 | elif name == 'breast2': 24 | self.df = pd.read_csv(f'uci/breast.csv') 25 | self.preprocess_breast2() 26 | elif name == 'german': 27 | self.df = pd.read_csv('uci/german.data', header=None, sep=' ') 28 | self.preprocess_german() 29 | elif name == 'banana': 30 | self.df = pd.read_csv('uci/banana.csv') 31 | elif name == 'image': 32 | self.df = pd.read_csv('uci/image.csv') 33 | elif name == 'titanic': 34 | self.df = pd.read_csv('uci/titanic.csv') 35 | elif name == 'thyroid': 36 | self.df = pd.read_csv('uci/thyroid.csv') 37 | elif name == 'twonorm': 38 | self.df = pd.read_csv('uci/twonorm.csv') 39 | elif name == 'waveform': 40 | self.df = pd.read_csv('uci/waveform.csv') 41 | elif name == 'flare-solar': 42 | self.df = pd.read_csv('uci/flare-solar.csv') 43 | self.categorical() 44 | elif name == 'waveform': 45 | self.df = pd.read_csv('uci/waveform.csv') 46 | elif name == 'splice': 47 | self.df = pd.read_csv('uci/splice.csv') 48 | self.categorical() 49 | elif name == 'diabetes': 50 | self.df = pd.read_csv('uci/diabetes.csv') 51 | self.preprocess_diabetes() 52 | 53 | def load(self, path): 54 | df = open(path).readlines() 55 | df = list(map(lambda line: list(map(float, line.split())), df)) 56 | self.df = pd.DataFrame(df) 57 | return self 58 | 59 | def categorical(self): 60 | self.df = onehot(self.df, [col for col in self.df.columns if col != 'target']) 61 | 62 | def preprocess_susy(self): 63 | self.df.rename(columns={0: 'target'}, inplace=True) 64 | 65 | def preprocess_heart(self): 66 | self.df = onehot(self.df, ['cp', 'slope', 'thal', 'restecg']) 67 | 68 | def preprocess_german(self): 69 | self.df.rename(columns={20: 'target'}, inplace=True) 70 | self.df.target.replace({2: 0}, inplace=True) 71 | cate_cols = [i for i in self.df.columns if self.df[i].dtype == 'object'] 72 | self.df = onehot(self.df, cate_cols) 73 | 74 | def preprocess_breast(self): 75 | self.df.rename(columns={0: 'target'}, inplace=True) 76 | self.df.target.replace({'no-recurrence-events': 0, 'recurrence-events': 1}, inplace=True) 77 | self.categorical() 78 | 79 | def preprocess_breast2(self): 80 | self.df.replace({'M': 1, 'B': 0}, inplace=True) 81 | self.df.rename(columns={'diagnosis': 'target'}, inplace=True) 82 | self.df.drop(['id', 'Unnamed: 32'], axis=1, inplace=True) 83 | 84 | def preprocess_diabetes(self): 85 | self.df.rename(columns={'Outcome': 'target'}, inplace=True) 86 | 87 | def equalize_prior(self, target='target'): 88 | pos = self.df.loc[self.df[target] == 1] 89 | neg = self.df.loc[self.df[target] == 0] 90 | n = min(pos.shape[0], neg.shape[0]) 91 | pos = pos.sample(n=n) 92 | neg = neg.sample(n=n) 93 | self.df = pd.concat([pos, neg], axis=0) 94 | return self 95 | 96 | def train_test_split(self, test_size=0.25, normalize=True): 97 | X = self.df.drop(['target'], axis=1).values 98 | y = self.df.target.values 99 | self.X_train, self.X_test, self.y_train, self.y_test = train_test_split(X, y, test_size=test_size) 100 | sc = StandardScaler() 101 | if normalize: 102 | self.X_train = sc.fit_transform(self.X_train) 103 | self.X_test = sc.transform(self.X_test) 104 | return self.X_train, self.X_test, self.y_train, self.y_test 105 | 106 | def train_test_val_split(self, e0, e1, test_size=0.2, val_size=0.1, normalize=True): 107 | X = self.df.drop(['target'], axis=1).values 108 | y = self.df.target.values 109 | self.X_train, self.X_test, self.y_train, self.y_test = \ 110 | train_test_split(X, y, test_size=test_size) 111 | if normalize: 112 | sc = StandardScaler() 113 | self.X_train = sc.fit_transform(self.X_train) 114 | self.X_test = sc.transform(self.X_test) 115 | self.X_train, self.X_val, self.y_train, self.y_val = \ 116 | train_test_split(self.X_train, self.y_train, test_size=val_size/(1-test_size)) 117 | self.y_train = make_noisy_data(self.y_train, e0, e1) 118 | self.y_val = make_noisy_data(self.y_val, e0, e1) 119 | return self.X_train, self.X_test, self.X_val, self.y_train, self.y_test, self.y_val 120 | 121 | def prepare_train_test(self, kargs): 122 | if kargs['equalize_prior']: 123 | self.equalize_prior() 124 | X_train, X_test, y_train, y_test = self.train_test_split(kargs['test_size'], kargs['normalize']) 125 | y_noisy = make_noisy_data(y_train, kargs['e0'], kargs['e1']) 126 | return X_train, X_test, y_noisy, y_test 127 | 128 | def prepare_train_test_val(self, kargs): 129 | if kargs['equalize_prior']: 130 | self.equalize_prior() 131 | X_train, X_test, X_val, y_train, y_test, y_val = self.train_test_val_split( 132 | e0=kargs['e0'], e1=kargs['e1'], 133 | test_size=kargs['test_size'], 134 | val_size=kargs['val_size'], 135 | normalize=kargs['normalize'], 136 | ) 137 | return X_train, X_test, X_val, y_train, y_test, y_val 138 | 139 | 140 | def onehot(df, cols): 141 | dummies = [pd.get_dummies(df[col]) for col in cols] 142 | df.drop(cols, axis=1, inplace=True) 143 | df = pd.concat([df] + dummies, axis=1) 144 | return df 145 | 146 | 147 | def make_noisy_data(y, e0, e1): 148 | num_neg = np.count_nonzero(y == 0) 149 | num_pos = np.count_nonzero(y == 1) 150 | flip0 = np.random.choice(np.where(y == 0)[0], int(num_neg * e0), replace=False) 151 | flip1 = np.random.choice(np.where(y == 1)[0], int(num_pos * e1), replace=False) 152 | flipped_idxes = np.concatenate([flip0, flip1]) 153 | y_noisy = y.copy() 154 | y_noisy[flipped_idxes] = 1 - y_noisy[flipped_idxes] 155 | return y_noisy 156 | -------------------------------------------------------------------------------- /run_nn.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import numpy as np 3 | import multiprocessing as mp 4 | 5 | from utils import logger 6 | from utils.dataloader import DataLoader 7 | from utils.results_plotter import plot 8 | from utils.misc import set_global_seeds, make_arg_list 9 | 10 | from models.nn import MLP, BinaryClassifier, PeerBinaryClassifier, SurrogateBinaryClassifier, DMIClassifier 11 | 12 | ALPHAS = [-5, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0, 2.0, 3.0, 4.0, 5.0] 13 | 14 | 15 | def find_best_alpha(kargs): 16 | if len(kargs['alpha']) == 1: 17 | return { 18 | 'alpha': kargs['alpha'][0] 19 | } 20 | pool = mp.Pool(mp.cpu_count()) 21 | results = [] 22 | args = kargs.copy() 23 | for alpha in kargs['alpha']: 24 | args['alpha'] = alpha 25 | res = [res['val_acc'] for res in pool.map(run_nn_peer, make_arg_list(args))] 26 | res = np.mean(res, axis=0)[-1] 27 | if 'verbose' in args.keys() and args['verbose']: 28 | logger.record_tabular(f'[PEER] alpha = {alpha}', res) 29 | results.append(res) 30 | pool.close() 31 | pool.join() 32 | logger.dump_tabular() 33 | best_alpha = kargs['alpha'][np.argmax(results)] 34 | return { 35 | 'alpha': best_alpha 36 | } 37 | 38 | 39 | def find_best_alpha_val(kargs): 40 | if len(kargs['alpha']) == 1: 41 | return { 42 | 'alpha': kargs['alpha'][0] 43 | } 44 | args = kargs.copy() 45 | pool = mp.Pool(mp.cpu_count()) 46 | results = [] 47 | for alpha in kargs['alpha']: 48 | args['alpha'] = alpha 49 | res = [res['val_acc'] for res in pool.map(run_nn_peer_val, make_arg_list(args))] 50 | res = np.mean(res, axis=0)[-1] 51 | if 'verbose' in args.keys() and args['verbose']: 52 | logger.record_tabular(f'[PEER] alpha = {alpha}', res) 53 | results.append(res) 54 | pool.close() 55 | pool.join() 56 | logger.dump_tabular() 57 | best_alpha = kargs['alpha'][np.argmax(results)] 58 | return { 59 | 'alpha': best_alpha 60 | } 61 | 62 | 63 | def find_best_params(kargs): 64 | args = kargs.copy() 65 | args['alpha'] = 1.0 66 | pool = mp.Pool(mp.cpu_count()) 67 | results = np.empty((len(kargs['batchsize']), len(kargs['lr']), len(kargs['hidsize']))) 68 | 69 | if len(kargs['batchsize']) == 1 and len(kargs['lr']) == 1 and len(kargs['hidsize']) == 1: 70 | return { 71 | 'batchsize': kargs['batchsize'][0], 72 | 'batchsize_peer': kargs['batchsize_peer'][0], 73 | 'hidsize': kargs['hidsize'][0], 74 | 'lr': kargs['lr'][0], 75 | } 76 | 77 | for k, hidsize in enumerate(kargs['hidsize']): 78 | for i, batchsize in enumerate(kargs['batchsize']): 79 | for j, lr in enumerate(kargs['lr']): 80 | args.update({ 81 | 'batchsize': batchsize, 82 | 'hidsize': hidsize, 83 | 'lr': lr, 84 | }) 85 | res = [res['val_acc'] for res in pool.map(run_nn_peer, make_arg_list(args))] 86 | results[i, j, k] = np.mean(res, axis=0)[-1] 87 | if 'verbose' in args.keys() and args['verbose']: 88 | logger.info( 89 | f'acc:{results[i, j, k]:4.3}\t' 90 | f'hidsize:{str(hidsize):8}\t' 91 | f'batchsize:{batchsize:2}\t' 92 | f'lr:{lr:6}\t' 93 | ) 94 | pool.close() 95 | pool.join() 96 | best_batchsize, best_lr, best_hidsize = np.unravel_index(results.reshape(-1).argmax(), results.shape) 97 | best_acc = results.max() 98 | best_batchsize = kargs['batchsize'][best_batchsize] 99 | best_lr = kargs['lr'][best_lr] 100 | best_hidsize = kargs['hidsize'][best_hidsize] 101 | 102 | return { 103 | 'batchsize': best_batchsize, 104 | 'hidsize': best_hidsize, 105 | 'lr': best_lr, 106 | 'acc': best_acc, 107 | } 108 | 109 | 110 | def run_nn(args): 111 | set_global_seeds(args['seed']) 112 | dataset = DataLoader(args['dataset']) 113 | X_train, X_test, X_val, y_train, y_test, y_val = dataset.prepare_train_test_val(args) 114 | mlp = MLP( 115 | feature_dim=X_train.shape[-1], 116 | hidsizes=args['hidsize'], 117 | dropout=args['dropout'], 118 | ) 119 | classifier = BinaryClassifier( 120 | model=mlp, 121 | learning_rate=args['lr'], 122 | loss_func=args['loss'], 123 | ) 124 | results = classifier.fit( 125 | X_train, y_train, X_test, y_test, 126 | batchsize=args['batchsize'], 127 | episodes=args['episodes'], 128 | logger=logger if args['seeds'] == 1 else None, 129 | ) 130 | return results 131 | 132 | 133 | def run_nn_symm(args): 134 | set_global_seeds(args['seed']) 135 | dataset = DataLoader(args['dataset']) 136 | X_train, X_test, X_val, y_train, y_test, y_val = dataset.prepare_train_test_val(args) 137 | mlp = MLP( 138 | feature_dim=X_train.shape[-1], 139 | hidsizes=args['hidsize'], 140 | dropout=args['dropout'], 141 | ) 142 | classifier = BinaryClassifier( 143 | model=mlp, 144 | learning_rate=args['lr'], 145 | loss_func='sigmoid', # symmetric loss 146 | ) 147 | results = classifier.fit( 148 | X_train, y_train, X_test, y_test, 149 | batchsize=args['batchsize'], 150 | episodes=args['episodes'], 151 | logger=logger if args['seeds'] == 1 else None, 152 | ) 153 | return results 154 | 155 | 156 | def run_nn_dmi(args): 157 | set_global_seeds(args['seed']) 158 | dataset = DataLoader(args['dataset']) 159 | X_train, X_test, X_val, y_train, y_test, y_val = dataset.prepare_train_test_val(args) 160 | mlp = MLP( 161 | feature_dim=X_train.shape[-1], 162 | hidsizes=args['hidsize'], 163 | dropout=args['dropout'], 164 | outputs=2, 165 | ) 166 | classifier = DMIClassifier( 167 | model=mlp, 168 | learning_rate=args['lr'], 169 | ) 170 | results = classifier.fit( 171 | X_train, y_train, X_test, y_test, 172 | batchsize=args['batchsize'], 173 | episodes=args['episodes'], 174 | logger=logger if args['seeds'] == 1 else None, 175 | ) 176 | return results 177 | 178 | 179 | def run_nn_surr(args): 180 | set_global_seeds(args['seed']) 181 | dataset = DataLoader(args['dataset']) 182 | X_train, X_test, X_val, y_train, y_test, y_val = dataset.prepare_train_test_val(args) 183 | mlp = MLP( 184 | feature_dim=X_train.shape[-1], 185 | hidsizes=args['hidsize'], 186 | dropout=args['dropout'] 187 | ) 188 | classifier = SurrogateBinaryClassifier( 189 | model=mlp, 190 | learning_rate=args['lr'], 191 | loss_func=args['loss'], 192 | e0=args['e0'], 193 | e1=args['e1'], 194 | ) 195 | results = classifier.fit( 196 | X_train, y_train, X_test, y_test, 197 | batchsize=args['batchsize'], 198 | episodes=args['episodes'], 199 | logger=logger if args['seeds'] == 1 else None 200 | ) 201 | return results 202 | 203 | 204 | def run_nn_peer(args): 205 | set_global_seeds(args['seed']) 206 | dataset = DataLoader(args['dataset']) 207 | X_train, X_test, X_val, y_train, y_test, y_val = dataset.prepare_train_test_val(args) 208 | mlp = MLP( 209 | feature_dim=X_train.shape[-1], 210 | hidsizes=args['hidsize'], 211 | dropout=args['dropout'] 212 | ) 213 | classifier = PeerBinaryClassifier( 214 | model=mlp, 215 | learning_rate=args['lr'], 216 | loss_func=args['loss'], 217 | alpha=args['alpha'], 218 | ) 219 | results = classifier.fit( 220 | X_train, y_train, X_test, y_test, 221 | batchsize=args['batchsize'], 222 | batchsize_=args['batchsize_peer'], 223 | episodes=args['episodes'], 224 | logger=logger if args['seeds'] == 1 else None 225 | ) 226 | return results 227 | 228 | 229 | def run_nn_peer_val(args): 230 | set_global_seeds(args['seed']) 231 | dataset = DataLoader(args['dataset']) 232 | X_train, X_test, X_val, y_train, y_test, y_val = dataset.prepare_train_test_val(args) 233 | mlp = MLP( 234 | feature_dim=X_train.shape[-1], 235 | hidsizes=args['hidsize'], 236 | dropout=args['dropout'] 237 | ) 238 | classifier = PeerBinaryClassifier( 239 | model=mlp, 240 | learning_rate=args['lr'], 241 | loss_func=args['loss'], 242 | alpha=args['alpha'], 243 | ) 244 | results = classifier.fit( 245 | X_train, y_train, X_val, y_val, 246 | batchsize=args['batchsize'], 247 | episodes=args['episodes'], 248 | logger=logger if args['seeds'] == 1 else None 249 | ) 250 | return results 251 | 252 | 253 | def get_max_mean(result, interval=100): 254 | return max([np.mean(result[-i-interval:-i-1]) for i in range(0, len(result)-interval)]) 255 | 256 | 257 | def run(args): 258 | prefix = datetime.datetime.now().strftime("%y-%m-%d-%H-%M-%S") 259 | logger.configure(f'logs/{args["dataset"]}/nn/{prefix}') 260 | logger.info(args) 261 | 262 | pool = mp.Pool(mp.cpu_count()) 263 | 264 | nn_arg = args.copy() 265 | nn_arg.update(find_best_params(nn_arg)) 266 | nn_arg.update(find_best_alpha_val(nn_arg)) 267 | logger.record_tabular('[PEER] batchsize', nn_arg['batchsize']) 268 | logger.record_tabular('[PEER] learning rate', nn_arg['lr']) 269 | logger.record_tabular('[PEER] hidsize', nn_arg['hidsize']) 270 | logger.record_tabular('[PEER] alpha', nn_arg['alpha']) 271 | logger.dump_tabular() 272 | 273 | nn_arg['seed'] = 1 274 | run_nn_dmi(nn_arg) 275 | results_dmi = pool.map(run_nn_dmi, make_arg_list(nn_arg)) 276 | results_surr = pool.map(run_nn_surr, make_arg_list(nn_arg)) 277 | results_nn = pool.map(run_nn, make_arg_list(nn_arg)) 278 | results_peer = pool.map(run_nn_peer, make_arg_list(nn_arg)) 279 | results_symm = pool.map(run_nn_symm, make_arg_list(nn_arg)) 280 | pool.close() 281 | pool.join() 282 | 283 | test_acc_bce = [res['val_acc'] for res in results_nn] 284 | test_acc_peer = [res['val_acc'] for res in results_peer] 285 | test_acc_surr = [res['val_acc'] for res in results_surr] 286 | test_acc_symm = [res['val_acc'] for res in results_symm] 287 | test_acc_dmi = [res['val_acc'] for res in results_dmi] 288 | 289 | plot([test_acc_bce, test_acc_peer, test_acc_surr, test_acc_symm, test_acc_dmi], 290 | ['cross entropy loss', 'peer loss', 'surrogate loss', 'symmtric loss', 'dmi loss'], 291 | title='Accuracy During Testing', 292 | path=f'logs/{args["dataset"]}/nn/{prefix}') 293 | 294 | train_acc_bce = [res['train_acc'] for res in results_nn] 295 | train_acc_peer = [res['train_acc'] for res in results_peer] 296 | train_acc_surr = [res['train_acc'] for res in results_surr] 297 | train_acc_symm = [res['train_acc'] for res in results_symm] 298 | train_acc_dmi = [res['train_acc'] for res in results_dmi] 299 | 300 | plot([train_acc_bce, train_acc_peer, train_acc_surr, train_acc_symm, train_acc_dmi], 301 | ['cross entropy loss', 'peer loss', 'surrogate loss', 'symmetric loss', 'dmi loss'], 302 | title='Accuracy During Training', 303 | path=f'logs/{args["dataset"]}/nn/{prefix}') 304 | 305 | loss_acc_surr = [res['loss'] for res in results_surr] 306 | loss_acc_bce = [res['loss'] for res in results_nn] 307 | loss_acc_peer = [res['loss'] for res in results_peer] 308 | loss_acc_symm = [res['loss'] for res in results_symm] 309 | loss_acc_dmi = [res['loss'] for res in results_dmi] 310 | 311 | plot([loss_acc_bce, loss_acc_peer, loss_acc_surr, loss_acc_symm, loss_acc_dmi], 312 | ['cross entropy loss', 'peer loss', 'surrogate loss', 'symmetric loss', 'dmi loss'], 313 | title='Loss', 314 | path=f'logs/{args["dataset"]}/nn/{prefix}') 315 | 316 | logger.record_tabular('[NN] with peer loss', np.mean(test_acc_peer, 0)[-1]) 317 | logger.record_tabular('[NN] with surrogate loss', np.mean(test_acc_surr, 0)[-1]) 318 | logger.record_tabular('[NN] with symmetric loss', np.mean(test_acc_symm, 0)[-1]) 319 | logger.record_tabular('[NN] with dmi loss', np.mean(test_acc_dmi, 0)[-1]) 320 | logger.record_tabular(f'[NN] with {args["loss"]} loss', np.mean(test_acc_bce, 0)[-1]) 321 | logger.dump_tabular() 322 | 323 | 324 | if __name__ == '__main__': 325 | from utils.parser import parse_args 326 | run(parse_args().__dict__) 327 | -------------------------------------------------------------------------------- /uci/heart.csv: -------------------------------------------------------------------------------- 1 | age,sex,cp,trestbps,chol,fbs,restecg,thalach,exang,oldpeak,slope,ca,thal,target 2 | 63,1,3,145,233,1,0,150,0,2.3,0,0,1,1 3 | 37,1,2,130,250,0,1,187,0,3.5,0,0,2,1 4 | 41,0,1,130,204,0,0,172,0,1.4,2,0,2,1 5 | 56,1,1,120,236,0,1,178,0,0.8,2,0,2,1 6 | 57,0,0,120,354,0,1,163,1,0.6,2,0,2,1 7 | 57,1,0,140,192,0,1,148,0,0.4,1,0,1,1 8 | 56,0,1,140,294,0,0,153,0,1.3,1,0,2,1 9 | 44,1,1,120,263,0,1,173,0,0,2,0,3,1 10 | 52,1,2,172,199,1,1,162,0,0.5,2,0,3,1 11 | 57,1,2,150,168,0,1,174,0,1.6,2,0,2,1 12 | 54,1,0,140,239,0,1,160,0,1.2,2,0,2,1 13 | 48,0,2,130,275,0,1,139,0,0.2,2,0,2,1 14 | 49,1,1,130,266,0,1,171,0,0.6,2,0,2,1 15 | 64,1,3,110,211,0,0,144,1,1.8,1,0,2,1 16 | 58,0,3,150,283,1,0,162,0,1,2,0,2,1 17 | 50,0,2,120,219,0,1,158,0,1.6,1,0,2,1 18 | 58,0,2,120,340,0,1,172,0,0,2,0,2,1 19 | 66,0,3,150,226,0,1,114,0,2.6,0,0,2,1 20 | 43,1,0,150,247,0,1,171,0,1.5,2,0,2,1 21 | 69,0,3,140,239,0,1,151,0,1.8,2,2,2,1 22 | 59,1,0,135,234,0,1,161,0,0.5,1,0,3,1 23 | 44,1,2,130,233,0,1,179,1,0.4,2,0,2,1 24 | 42,1,0,140,226,0,1,178,0,0,2,0,2,1 25 | 61,1,2,150,243,1,1,137,1,1,1,0,2,1 26 | 40,1,3,140,199,0,1,178,1,1.4,2,0,3,1 27 | 71,0,1,160,302,0,1,162,0,0.4,2,2,2,1 28 | 59,1,2,150,212,1,1,157,0,1.6,2,0,2,1 29 | 51,1,2,110,175,0,1,123,0,0.6,2,0,2,1 30 | 65,0,2,140,417,1,0,157,0,0.8,2,1,2,1 31 | 53,1,2,130,197,1,0,152,0,1.2,0,0,2,1 32 | 41,0,1,105,198,0,1,168,0,0,2,1,2,1 33 | 65,1,0,120,177,0,1,140,0,0.4,2,0,3,1 34 | 44,1,1,130,219,0,0,188,0,0,2,0,2,1 35 | 54,1,2,125,273,0,0,152,0,0.5,0,1,2,1 36 | 51,1,3,125,213,0,0,125,1,1.4,2,1,2,1 37 | 46,0,2,142,177,0,0,160,1,1.4,0,0,2,1 38 | 54,0,2,135,304,1,1,170,0,0,2,0,2,1 39 | 54,1,2,150,232,0,0,165,0,1.6,2,0,3,1 40 | 65,0,2,155,269,0,1,148,0,0.8,2,0,2,1 41 | 65,0,2,160,360,0,0,151,0,0.8,2,0,2,1 42 | 51,0,2,140,308,0,0,142,0,1.5,2,1,2,1 43 | 48,1,1,130,245,0,0,180,0,0.2,1,0,2,1 44 | 45,1,0,104,208,0,0,148,1,3,1,0,2,1 45 | 53,0,0,130,264,0,0,143,0,0.4,1,0,2,1 46 | 39,1,2,140,321,0,0,182,0,0,2,0,2,1 47 | 52,1,1,120,325,0,1,172,0,0.2,2,0,2,1 48 | 44,1,2,140,235,0,0,180,0,0,2,0,2,1 49 | 47,1,2,138,257,0,0,156,0,0,2,0,2,1 50 | 53,0,2,128,216,0,0,115,0,0,2,0,0,1 51 | 53,0,0,138,234,0,0,160,0,0,2,0,2,1 52 | 51,0,2,130,256,0,0,149,0,0.5,2,0,2,1 53 | 66,1,0,120,302,0,0,151,0,0.4,1,0,2,1 54 | 62,1,2,130,231,0,1,146,0,1.8,1,3,3,1 55 | 44,0,2,108,141,0,1,175,0,0.6,1,0,2,1 56 | 63,0,2,135,252,0,0,172,0,0,2,0,2,1 57 | 52,1,1,134,201,0,1,158,0,0.8,2,1,2,1 58 | 48,1,0,122,222,0,0,186,0,0,2,0,2,1 59 | 45,1,0,115,260,0,0,185,0,0,2,0,2,1 60 | 34,1,3,118,182,0,0,174,0,0,2,0,2,1 61 | 57,0,0,128,303,0,0,159,0,0,2,1,2,1 62 | 71,0,2,110,265,1,0,130,0,0,2,1,2,1 63 | 54,1,1,108,309,0,1,156,0,0,2,0,3,1 64 | 52,1,3,118,186,0,0,190,0,0,1,0,1,1 65 | 41,1,1,135,203,0,1,132,0,0,1,0,1,1 66 | 58,1,2,140,211,1,0,165,0,0,2,0,2,1 67 | 35,0,0,138,183,0,1,182,0,1.4,2,0,2,1 68 | 51,1,2,100,222,0,1,143,1,1.2,1,0,2,1 69 | 45,0,1,130,234,0,0,175,0,0.6,1,0,2,1 70 | 44,1,1,120,220,0,1,170,0,0,2,0,2,1 71 | 62,0,0,124,209,0,1,163,0,0,2,0,2,1 72 | 54,1,2,120,258,0,0,147,0,0.4,1,0,3,1 73 | 51,1,2,94,227,0,1,154,1,0,2,1,3,1 74 | 29,1,1,130,204,0,0,202,0,0,2,0,2,1 75 | 51,1,0,140,261,0,0,186,1,0,2,0,2,1 76 | 43,0,2,122,213,0,1,165,0,0.2,1,0,2,1 77 | 55,0,1,135,250,0,0,161,0,1.4,1,0,2,1 78 | 51,1,2,125,245,1,0,166,0,2.4,1,0,2,1 79 | 59,1,1,140,221,0,1,164,1,0,2,0,2,1 80 | 52,1,1,128,205,1,1,184,0,0,2,0,2,1 81 | 58,1,2,105,240,0,0,154,1,0.6,1,0,3,1 82 | 41,1,2,112,250,0,1,179,0,0,2,0,2,1 83 | 45,1,1,128,308,0,0,170,0,0,2,0,2,1 84 | 60,0,2,102,318,0,1,160,0,0,2,1,2,1 85 | 52,1,3,152,298,1,1,178,0,1.2,1,0,3,1 86 | 42,0,0,102,265,0,0,122,0,0.6,1,0,2,1 87 | 67,0,2,115,564,0,0,160,0,1.6,1,0,3,1 88 | 68,1,2,118,277,0,1,151,0,1,2,1,3,1 89 | 46,1,1,101,197,1,1,156,0,0,2,0,3,1 90 | 54,0,2,110,214,0,1,158,0,1.6,1,0,2,1 91 | 58,0,0,100,248,0,0,122,0,1,1,0,2,1 92 | 48,1,2,124,255,1,1,175,0,0,2,2,2,1 93 | 57,1,0,132,207,0,1,168,1,0,2,0,3,1 94 | 52,1,2,138,223,0,1,169,0,0,2,4,2,1 95 | 54,0,1,132,288,1,0,159,1,0,2,1,2,1 96 | 45,0,1,112,160,0,1,138,0,0,1,0,2,1 97 | 53,1,0,142,226,0,0,111,1,0,2,0,3,1 98 | 62,0,0,140,394,0,0,157,0,1.2,1,0,2,1 99 | 52,1,0,108,233,1,1,147,0,0.1,2,3,3,1 100 | 43,1,2,130,315,0,1,162,0,1.9,2,1,2,1 101 | 53,1,2,130,246,1,0,173,0,0,2,3,2,1 102 | 42,1,3,148,244,0,0,178,0,0.8,2,2,2,1 103 | 59,1,3,178,270,0,0,145,0,4.2,0,0,3,1 104 | 63,0,1,140,195,0,1,179,0,0,2,2,2,1 105 | 42,1,2,120,240,1,1,194,0,0.8,0,0,3,1 106 | 50,1,2,129,196,0,1,163,0,0,2,0,2,1 107 | 68,0,2,120,211,0,0,115,0,1.5,1,0,2,1 108 | 69,1,3,160,234,1,0,131,0,0.1,1,1,2,1 109 | 45,0,0,138,236,0,0,152,1,0.2,1,0,2,1 110 | 50,0,1,120,244,0,1,162,0,1.1,2,0,2,1 111 | 50,0,0,110,254,0,0,159,0,0,2,0,2,1 112 | 64,0,0,180,325,0,1,154,1,0,2,0,2,1 113 | 57,1,2,150,126,1,1,173,0,0.2,2,1,3,1 114 | 64,0,2,140,313,0,1,133,0,0.2,2,0,3,1 115 | 43,1,0,110,211,0,1,161,0,0,2,0,3,1 116 | 55,1,1,130,262,0,1,155,0,0,2,0,2,1 117 | 37,0,2,120,215,0,1,170,0,0,2,0,2,1 118 | 41,1,2,130,214,0,0,168,0,2,1,0,2,1 119 | 56,1,3,120,193,0,0,162,0,1.9,1,0,3,1 120 | 46,0,1,105,204,0,1,172,0,0,2,0,2,1 121 | 46,0,0,138,243,0,0,152,1,0,1,0,2,1 122 | 64,0,0,130,303,0,1,122,0,2,1,2,2,1 123 | 59,1,0,138,271,0,0,182,0,0,2,0,2,1 124 | 41,0,2,112,268,0,0,172,1,0,2,0,2,1 125 | 54,0,2,108,267,0,0,167,0,0,2,0,2,1 126 | 39,0,2,94,199,0,1,179,0,0,2,0,2,1 127 | 34,0,1,118,210,0,1,192,0,0.7,2,0,2,1 128 | 47,1,0,112,204,0,1,143,0,0.1,2,0,2,1 129 | 67,0,2,152,277,0,1,172,0,0,2,1,2,1 130 | 52,0,2,136,196,0,0,169,0,0.1,1,0,2,1 131 | 74,0,1,120,269,0,0,121,1,0.2,2,1,2,1 132 | 54,0,2,160,201,0,1,163,0,0,2,1,2,1 133 | 49,0,1,134,271,0,1,162,0,0,1,0,2,1 134 | 42,1,1,120,295,0,1,162,0,0,2,0,2,1 135 | 41,1,1,110,235,0,1,153,0,0,2,0,2,1 136 | 41,0,1,126,306,0,1,163,0,0,2,0,2,1 137 | 49,0,0,130,269,0,1,163,0,0,2,0,2,1 138 | 60,0,2,120,178,1,1,96,0,0,2,0,2,1 139 | 62,1,1,128,208,1,0,140,0,0,2,0,2,1 140 | 57,1,0,110,201,0,1,126,1,1.5,1,0,1,1 141 | 64,1,0,128,263,0,1,105,1,0.2,1,1,3,1 142 | 51,0,2,120,295,0,0,157,0,0.6,2,0,2,1 143 | 43,1,0,115,303,0,1,181,0,1.2,1,0,2,1 144 | 42,0,2,120,209,0,1,173,0,0,1,0,2,1 145 | 67,0,0,106,223,0,1,142,0,0.3,2,2,2,1 146 | 76,0,2,140,197,0,2,116,0,1.1,1,0,2,1 147 | 70,1,1,156,245,0,0,143,0,0,2,0,2,1 148 | 44,0,2,118,242,0,1,149,0,0.3,1,1,2,1 149 | 60,0,3,150,240,0,1,171,0,0.9,2,0,2,1 150 | 44,1,2,120,226,0,1,169,0,0,2,0,2,1 151 | 42,1,2,130,180,0,1,150,0,0,2,0,2,1 152 | 66,1,0,160,228,0,0,138,0,2.3,2,0,1,1 153 | 71,0,0,112,149,0,1,125,0,1.6,1,0,2,1 154 | 64,1,3,170,227,0,0,155,0,0.6,1,0,3,1 155 | 66,0,2,146,278,0,0,152,0,0,1,1,2,1 156 | 39,0,2,138,220,0,1,152,0,0,1,0,2,1 157 | 58,0,0,130,197,0,1,131,0,0.6,1,0,2,1 158 | 47,1,2,130,253,0,1,179,0,0,2,0,2,1 159 | 35,1,1,122,192,0,1,174,0,0,2,0,2,1 160 | 58,1,1,125,220,0,1,144,0,0.4,1,4,3,1 161 | 56,1,1,130,221,0,0,163,0,0,2,0,3,1 162 | 56,1,1,120,240,0,1,169,0,0,0,0,2,1 163 | 55,0,1,132,342,0,1,166,0,1.2,2,0,2,1 164 | 41,1,1,120,157,0,1,182,0,0,2,0,2,1 165 | 38,1,2,138,175,0,1,173,0,0,2,4,2,1 166 | 38,1,2,138,175,0,1,173,0,0,2,4,2,1 167 | 67,1,0,160,286,0,0,108,1,1.5,1,3,2,0 168 | 67,1,0,120,229,0,0,129,1,2.6,1,2,3,0 169 | 62,0,0,140,268,0,0,160,0,3.6,0,2,2,0 170 | 63,1,0,130,254,0,0,147,0,1.4,1,1,3,0 171 | 53,1,0,140,203,1,0,155,1,3.1,0,0,3,0 172 | 56,1,2,130,256,1,0,142,1,0.6,1,1,1,0 173 | 48,1,1,110,229,0,1,168,0,1,0,0,3,0 174 | 58,1,1,120,284,0,0,160,0,1.8,1,0,2,0 175 | 58,1,2,132,224,0,0,173,0,3.2,2,2,3,0 176 | 60,1,0,130,206,0,0,132,1,2.4,1,2,3,0 177 | 40,1,0,110,167,0,0,114,1,2,1,0,3,0 178 | 60,1,0,117,230,1,1,160,1,1.4,2,2,3,0 179 | 64,1,2,140,335,0,1,158,0,0,2,0,2,0 180 | 43,1,0,120,177,0,0,120,1,2.5,1,0,3,0 181 | 57,1,0,150,276,0,0,112,1,0.6,1,1,1,0 182 | 55,1,0,132,353,0,1,132,1,1.2,1,1,3,0 183 | 65,0,0,150,225,0,0,114,0,1,1,3,3,0 184 | 61,0,0,130,330,0,0,169,0,0,2,0,2,0 185 | 58,1,2,112,230,0,0,165,0,2.5,1,1,3,0 186 | 50,1,0,150,243,0,0,128,0,2.6,1,0,3,0 187 | 44,1,0,112,290,0,0,153,0,0,2,1,2,0 188 | 60,1,0,130,253,0,1,144,1,1.4,2,1,3,0 189 | 54,1,0,124,266,0,0,109,1,2.2,1,1,3,0 190 | 50,1,2,140,233,0,1,163,0,0.6,1,1,3,0 191 | 41,1,0,110,172,0,0,158,0,0,2,0,3,0 192 | 51,0,0,130,305,0,1,142,1,1.2,1,0,3,0 193 | 58,1,0,128,216,0,0,131,1,2.2,1,3,3,0 194 | 54,1,0,120,188,0,1,113,0,1.4,1,1,3,0 195 | 60,1,0,145,282,0,0,142,1,2.8,1,2,3,0 196 | 60,1,2,140,185,0,0,155,0,3,1,0,2,0 197 | 59,1,0,170,326,0,0,140,1,3.4,0,0,3,0 198 | 46,1,2,150,231,0,1,147,0,3.6,1,0,2,0 199 | 67,1,0,125,254,1,1,163,0,0.2,1,2,3,0 200 | 62,1,0,120,267,0,1,99,1,1.8,1,2,3,0 201 | 65,1,0,110,248,0,0,158,0,0.6,2,2,1,0 202 | 44,1,0,110,197,0,0,177,0,0,2,1,2,0 203 | 60,1,0,125,258,0,0,141,1,2.8,1,1,3,0 204 | 58,1,0,150,270,0,0,111,1,0.8,2,0,3,0 205 | 68,1,2,180,274,1,0,150,1,1.6,1,0,3,0 206 | 62,0,0,160,164,0,0,145,0,6.2,0,3,3,0 207 | 52,1,0,128,255,0,1,161,1,0,2,1,3,0 208 | 59,1,0,110,239,0,0,142,1,1.2,1,1,3,0 209 | 60,0,0,150,258,0,0,157,0,2.6,1,2,3,0 210 | 49,1,2,120,188,0,1,139,0,2,1,3,3,0 211 | 59,1,0,140,177,0,1,162,1,0,2,1,3,0 212 | 57,1,2,128,229,0,0,150,0,0.4,1,1,3,0 213 | 61,1,0,120,260,0,1,140,1,3.6,1,1,3,0 214 | 39,1,0,118,219,0,1,140,0,1.2,1,0,3,0 215 | 61,0,0,145,307,0,0,146,1,1,1,0,3,0 216 | 56,1,0,125,249,1,0,144,1,1.2,1,1,2,0 217 | 43,0,0,132,341,1,0,136,1,3,1,0,3,0 218 | 62,0,2,130,263,0,1,97,0,1.2,1,1,3,0 219 | 63,1,0,130,330,1,0,132,1,1.8,2,3,3,0 220 | 65,1,0,135,254,0,0,127,0,2.8,1,1,3,0 221 | 48,1,0,130,256,1,0,150,1,0,2,2,3,0 222 | 63,0,0,150,407,0,0,154,0,4,1,3,3,0 223 | 55,1,0,140,217,0,1,111,1,5.6,0,0,3,0 224 | 65,1,3,138,282,1,0,174,0,1.4,1,1,2,0 225 | 56,0,0,200,288,1,0,133,1,4,0,2,3,0 226 | 54,1,0,110,239,0,1,126,1,2.8,1,1,3,0 227 | 70,1,0,145,174,0,1,125,1,2.6,0,0,3,0 228 | 62,1,1,120,281,0,0,103,0,1.4,1,1,3,0 229 | 35,1,0,120,198,0,1,130,1,1.6,1,0,3,0 230 | 59,1,3,170,288,0,0,159,0,0.2,1,0,3,0 231 | 64,1,2,125,309,0,1,131,1,1.8,1,0,3,0 232 | 47,1,2,108,243,0,1,152,0,0,2,0,2,0 233 | 57,1,0,165,289,1,0,124,0,1,1,3,3,0 234 | 55,1,0,160,289,0,0,145,1,0.8,1,1,3,0 235 | 64,1,0,120,246,0,0,96,1,2.2,0,1,2,0 236 | 70,1,0,130,322,0,0,109,0,2.4,1,3,2,0 237 | 51,1,0,140,299,0,1,173,1,1.6,2,0,3,0 238 | 58,1,0,125,300,0,0,171,0,0,2,2,3,0 239 | 60,1,0,140,293,0,0,170,0,1.2,1,2,3,0 240 | 77,1,0,125,304,0,0,162,1,0,2,3,2,0 241 | 35,1,0,126,282,0,0,156,1,0,2,0,3,0 242 | 70,1,2,160,269,0,1,112,1,2.9,1,1,3,0 243 | 59,0,0,174,249,0,1,143,1,0,1,0,2,0 244 | 64,1,0,145,212,0,0,132,0,2,1,2,1,0 245 | 57,1,0,152,274,0,1,88,1,1.2,1,1,3,0 246 | 56,1,0,132,184,0,0,105,1,2.1,1,1,1,0 247 | 48,1,0,124,274,0,0,166,0,0.5,1,0,3,0 248 | 56,0,0,134,409,0,0,150,1,1.9,1,2,3,0 249 | 66,1,1,160,246,0,1,120,1,0,1,3,1,0 250 | 54,1,1,192,283,0,0,195,0,0,2,1,3,0 251 | 69,1,2,140,254,0,0,146,0,2,1,3,3,0 252 | 51,1,0,140,298,0,1,122,1,4.2,1,3,3,0 253 | 43,1,0,132,247,1,0,143,1,0.1,1,4,3,0 254 | 62,0,0,138,294,1,1,106,0,1.9,1,3,2,0 255 | 67,1,0,100,299,0,0,125,1,0.9,1,2,2,0 256 | 59,1,3,160,273,0,0,125,0,0,2,0,2,0 257 | 45,1,0,142,309,0,0,147,1,0,1,3,3,0 258 | 58,1,0,128,259,0,0,130,1,3,1,2,3,0 259 | 50,1,0,144,200,0,0,126,1,0.9,1,0,3,0 260 | 62,0,0,150,244,0,1,154,1,1.4,1,0,2,0 261 | 38,1,3,120,231,0,1,182,1,3.8,1,0,3,0 262 | 66,0,0,178,228,1,1,165,1,1,1,2,3,0 263 | 52,1,0,112,230,0,1,160,0,0,2,1,2,0 264 | 53,1,0,123,282,0,1,95,1,2,1,2,3,0 265 | 63,0,0,108,269,0,1,169,1,1.8,1,2,2,0 266 | 54,1,0,110,206,0,0,108,1,0,1,1,2,0 267 | 66,1,0,112,212,0,0,132,1,0.1,2,1,2,0 268 | 55,0,0,180,327,0,2,117,1,3.4,1,0,2,0 269 | 49,1,2,118,149,0,0,126,0,0.8,2,3,2,0 270 | 54,1,0,122,286,0,0,116,1,3.2,1,2,2,0 271 | 56,1,0,130,283,1,0,103,1,1.6,0,0,3,0 272 | 46,1,0,120,249,0,0,144,0,0.8,2,0,3,0 273 | 61,1,3,134,234,0,1,145,0,2.6,1,2,2,0 274 | 67,1,0,120,237,0,1,71,0,1,1,0,2,0 275 | 58,1,0,100,234,0,1,156,0,0.1,2,1,3,0 276 | 47,1,0,110,275,0,0,118,1,1,1,1,2,0 277 | 52,1,0,125,212,0,1,168,0,1,2,2,3,0 278 | 58,1,0,146,218,0,1,105,0,2,1,1,3,0 279 | 57,1,1,124,261,0,1,141,0,0.3,2,0,3,0 280 | 58,0,1,136,319,1,0,152,0,0,2,2,2,0 281 | 61,1,0,138,166,0,0,125,1,3.6,1,1,2,0 282 | 42,1,0,136,315,0,1,125,1,1.8,1,0,1,0 283 | 52,1,0,128,204,1,1,156,1,1,1,0,0,0 284 | 59,1,2,126,218,1,1,134,0,2.2,1,1,1,0 285 | 40,1,0,152,223,0,1,181,0,0,2,0,3,0 286 | 61,1,0,140,207,0,0,138,1,1.9,2,1,3,0 287 | 46,1,0,140,311,0,1,120,1,1.8,1,2,3,0 288 | 59,1,3,134,204,0,1,162,0,0.8,2,2,2,0 289 | 57,1,1,154,232,0,0,164,0,0,2,1,2,0 290 | 57,1,0,110,335,0,1,143,1,3,1,1,3,0 291 | 55,0,0,128,205,0,2,130,1,2,1,1,3,0 292 | 61,1,0,148,203,0,1,161,0,0,2,1,3,0 293 | 58,1,0,114,318,0,2,140,0,4.4,0,3,1,0 294 | 58,0,0,170,225,1,0,146,1,2.8,1,2,1,0 295 | 67,1,2,152,212,0,0,150,0,0.8,1,0,3,0 296 | 44,1,0,120,169,0,1,144,1,2.8,0,0,1,0 297 | 63,1,0,140,187,0,0,144,1,4,2,2,3,0 298 | 63,0,0,124,197,0,1,136,1,0,1,0,2,0 299 | 59,1,0,164,176,1,0,90,0,1,1,2,1,0 300 | 57,0,0,140,241,0,1,123,1,0.2,1,0,3,0 301 | 45,1,3,110,264,0,1,132,0,1.2,1,0,3,0 302 | 68,1,0,144,193,1,1,141,0,3.4,1,2,3,0 303 | 57,1,0,130,131,0,1,115,1,1.2,1,1,3,0 304 | 57,0,1,130,236,0,0,174,0,0,1,1,2,0 305 | -------------------------------------------------------------------------------- /uci/thyroid.csv: -------------------------------------------------------------------------------- 1 | target,1,2,3,4,5 2 | 0.0,-0.98645055,-0.27670772,-0.27412289,-0.18314766,0.22357116 3 | 1.0,-0.55303642,-1.0327397,-0.45566123,-0.24147495,0.10160108 4 | 0.0,0.66052313,-0.68712509,0.27049212,-0.15398402,0.30895022 5 | 1.0,1.3539857,-1.6375653,-0.90950708,1.9895436,0.65046645 6 | 0.0,1.7873998,-0.082299485,-0.18335372,-0.35812951,-0.11794507 7 | 1.0,1.1806201,-1.723969,-1.4541221,2.9081983,2.1994865 8 | 1.0,2.5675453,-1.227148,-1.0910454,0.21056149,0.26016219 9 | 0.0,0.227109,-0.38471229,-0.0018153834,0.12307056,-0.26430917 10 | 0.0,1.1806201,-0.38471229,0.36126129,-0.27063859,0.11379809 11 | 1.0,-2.6334242,1.9697874,3.2658747,-0.28522041,-0.47165831 12 | 1.0,-0.89976772,1.6457737,-0.0018153834,-0.21231131,-0.47165831 13 | 0.0,0.40047465,-0.68712509,-0.36489206,-0.27063859,-0.081354046 14 | 1.0,0.48715748,-0.77352875,-0.63719957,0.26888877,1.2481199 15 | 0.0,0.40047465,-0.51431778,-0.72796874,-0.18314766,0.028419029 16 | 0.0,-0.4663536,-0.77352875,0.088953786,-0.24147495,-0.020369004 17 | 0.0,-0.29298795,-0.06069857,0.45203046,-0.27063859,-0.34968823 18 | 0.0,0.83388877,-0.4063132,0.088953786,-0.32896587,-0.11794507 19 | 0.0,-0.032939473,0.54412704,0.36126129,-0.16856584,-0.41067327 20 | 1.0,-0.37967077,1.6241728,-0.27412289,-0.37271133,-0.4594613 21 | 1.0,2.7409109,-0.92473515,-0.092584553,0.92507069,1.2481199 22 | 0.0,-0.8130849,-0.81673058,0.45203046,-0.15398402,-0.044763021 23 | 0.0,0.5738403,0.025705088,-0.18335372,-0.19772949,0.016222021 24 | 1.0,-3.6736181,2.8986267,4.9904889,-0.15398402,-0.58143139 25 | 0.0,0.053743351,-0.14710223,-0.27412289,-0.19772949,-0.47165831 26 | 1.0,-0.1196223,-1.3783544,-1.1818146,-0.16856584,-0.33749122 27 | 0.0,-0.55303642,0.50092521,-0.63719957,-0.22689313,-0.17893011 28 | 0.0,0.31379183,0.34971881,-0.36489206,-0.1394022,-0.1545361 29 | 0.0,0.227109,-0.082299485,0.088953786,-0.24147495,-0.42287028 30 | 0.0,-0.37967077,-0.55751961,-0.36489206,-0.19772949,-0.52044635 31 | 0.0,-0.37967077,0.26331515,0.088953786,-0.27063859,-0.38627925 32 | 0.0,-0.37967077,-0.38471229,0.088953786,-0.1394022,-0.56923438 33 | 1.0,3.0009594,2.6826176,1.268953,-0.22689313,-0.43506729 34 | 1.0,-2.2000101,2.5098103,0.72433797,-0.25605677,-0.58143139 35 | 0.0,0.227109,-0.85993241,-0.18335372,-0.12482038,-0.34968823 36 | 0.0,-0.72640207,-0.60072143,0.27049212,-0.29980223,-0.23991515 37 | 0.0,-0.032939473,-0.21190497,-0.18335372,-0.27063859,-0.39847626 38 | 0.0,-0.20630512,0.047306003,0.27049212,-0.28522041,-0.17893011 39 | 1.0,-2.5467414,2.833824,7.3504873,-0.28522041,-0.52044635 40 | 0.0,-0.29298795,0.30651698,-0.092584553,-0.28522041,-0.38627925 41 | 0.0,0.9205716,-0.31990955,-0.72796874,-0.21231131,-0.33749122 42 | 1.0,0.83388877,-1.9615791,-1.0910454,7.8076899,2.1263045 43 | 0.0,1.5273514,-0.47111595,-0.092584553,-0.1394022,0.2723592 44 | 0.0,0.227109,-0.66552418,-0.18335372,-0.21231131,-0.10574806 45 | 0.0,0.5738403,1.3433609,-0.90950708,-0.22689313,-0.32529421 46 | 1.0,-1.8532788,1.4297645,2.7212597,-0.29980223,-0.49605233 47 | 1.0,-1.0731334,0.9329435,1.5412605,-0.19772949,-0.47165831 48 | 0.0,0.40047465,-0.38471229,-0.27412289,-0.18314766,-0.4472643 49 | 0.0,-0.37967077,-0.90313424,-0.81873791,-0.28522041,-0.39847626 50 | 0.0,-0.37967077,0.26331515,-0.72796874,-0.29980223,-0.36188524 51 | 0.0,-1.0731334,-0.44951503,-0.5464304,-0.24147495,-0.39847626 52 | 0.0,0.31379183,-0.29830863,-0.092584553,-0.29980223,-0.4472643 53 | 1.0,0.9205716,-1.3999553,-0.092584553,0.67717974,2.1141075 54 | 0.0,1.5273514,0.65213161,0.45203046,-0.21231131,-0.43506729 55 | 0.0,-0.032939473,-0.14710223,-0.092584553,-0.25605677,0.028419029 56 | 0.0,1.0072544,0.047306003,0.45203046,-0.29980223,-0.14233909 57 | 0.0,-0.4663536,-0.06069857,-0.72796874,-0.22689313,-0.41067327 58 | 0.0,-0.29298795,-0.1039004,0.27049212,-0.19772949,-0.50824934 59 | 0.0,-0.63971925,-0.49271686,-0.092584553,-0.12482038,-0.20332413 60 | 0.0,0.053743351,-0.62232235,-0.81873791,-0.18314766,0.016222021 61 | 0.0,0.40047465,-0.16870314,0.6335688,-0.19772949,-0.32529421 62 | 1.0,-1.8532788,0.65213161,0.72433797,-0.40187497,-0.48385532 63 | 1.0,2.1341311,-1.723969,-1.1818146,2.2665982,0.49190535 64 | 0.0,-0.29298795,-1.227148,-0.63719957,-0.18314766,-0.33749122 65 | 0.0,0.5738403,0.047306003,0.27049212,-0.18314766,-0.41067327 66 | 0.0,-0.1196223,-0.2551068,-0.63719957,-0.095656742,-0.20332413 67 | 0.0,0.14042618,-0.44951503,0.088953786,-0.15398402,-0.0081719956 68 | 0.0,0.9205716,0.11210875,0.17972295,-0.25605677,-0.28870319 69 | 0.0,0.48715748,-0.60072143,-0.5464304,-0.22689313,-0.26430917 70 | 1.0,2.1341311,-1.7023681,-1.2725838,1.3625253,-0.23991515 71 | 1.0,-2.2000101,1.8617828,2.2674138,-0.25605677,-0.54484036 72 | 0.0,0.227109,0.15531058,-0.27412289,-0.28522041,-0.52044635 73 | 0.0,0.14042618,-0.16870314,-0.18335372,-0.24147495,-0.0081719956 74 | 0.0,-0.20630512,-0.31990955,-0.092584553,-0.19772949,-0.41067327 75 | 0.0,-0.72640207,-0.60072143,-0.27412289,-0.19772949,-0.3130972 76 | 0.0,-0.1196223,-0.60072143,-0.5464304,-0.18314766,-0.23991515 77 | 0.0,-1.1598162,-0.1039004,-0.36489206,-0.27063859,-0.13014208 78 | 1.0,-3.5869353,1.0409481,5.3535656,-0.32896587,-0.53264335 79 | 1.0,-1.5065475,0.26331515,0.088953786,-0.31438405,-0.53264335 80 | 0.0,-1.5932303,-0.4063132,-0.18335372,-0.11023856,0.052813046 81 | 0.0,0.31379183,-0.44951503,0.088953786,-0.25605677,-0.14233909 82 | 0.0,-0.8130849,-0.082299485,0.54279963,-0.22689313,-0.53264335 83 | 0.0,-1.3331818,-0.51431778,-0.63719957,-0.22689313,0.028419029 84 | 1.0,-0.89976772,0.67373253,1.5412605,-0.31438405,-0.52044635 85 | 0.0,0.48715748,0.15531058,-1.0002762,-0.11023856,0.052813046 86 | 0.0,-0.37967077,-0.2551068,-0.36489206,-0.25605677,-0.32529421 87 | 0.0,0.74720595,-0.38471229,-0.0018153834,-0.19772949,1.1627408 88 | 0.0,0.9205716,0.54412704,0.45203046,-0.29980223,-0.27650618 89 | 0.0,-0.63971925,-0.082299485,-0.45566123,-0.25605677,-0.3130972 90 | 1.0,0.9205716,-1.723969,-1.0910454,2.28118,2.4190327 91 | 0.0,0.5738403,0.34971881,-0.092584553,-0.21231131,0.15038911 92 | 0.0,0.053743351,-0.77352875,-0.81873791,-0.29980223,-0.38627925 93 | 0.0,-0.72640207,-0.68712509,-0.5464304,-0.27063859,0.18698014 94 | 0.0,0.66052313,0.24171423,-0.45566123,-0.19772949,-0.25211216 95 | 1.0,-1.0731334,-1.1191434,-0.72796874,-0.11023856,1.0285737 96 | 0.0,0.14042618,-0.31990955,-0.36489206,-0.29980223,-0.36188524 97 | 1.0,-1.8532788,3.071434,3.2658747,-0.29980223,-0.49605233 98 | 0.0,-0.98645055,-0.16870314,-0.45566123,-0.1394022,-0.54484036 99 | 0.0,0.9205716,-0.66552418,-0.0018153834,-0.22689313,-0.27650618 100 | 0.0,-0.63971925,-0.70872601,-0.63719957,-0.21231131,-0.34968823 101 | 0.0,0.5738403,-0.44951503,-0.45566123,-0.25605677,-0.056960029 102 | 1.0,-1.0731334,1.1273517,-0.092584553,-0.24147495,-0.53264335 103 | 1.0,1.4406685,-2.0263818,-1.5448913,1.3625253,0.5650874 104 | 1.0,-0.37967077,0.45772338,1.268953,-0.25605677,-0.50824934 105 | 0.0,-0.55303642,-0.55751961,-0.81873791,-0.31438405,-0.4472643 106 | 1.0,1.700717,-1.8103727,-1.1818146,1.4062708,-0.1545361 107 | 0.0,0.14042618,-0.29830863,-0.27412289,-0.25605677,-0.032566012 108 | 0.0,0.66052313,-0.44951503,0.088953786,-0.27063859,-0.032566012 109 | 1.0,0.31379183,1.5809709,-0.092584553,-0.27063859,-0.50824934 110 | 1.0,0.9205716,-0.66552418,0.17972295,1.1000525,4.1997959 111 | 0.0,0.14042618,0.43612247,0.36126129,-0.28522041,-0.044763021 112 | 0.0,0.053743351,0.11210875,-0.092584553,-0.27063859,-0.22771815 113 | 0.0,-0.20630512,0.84653984,-0.36489206,-0.27063859,-0.27650618 114 | 0.0,-0.8130849,0.30651698,0.54279963,-0.31438405,-0.54484036 115 | 1.0,0.9205716,-1.4863589,0.54279963,-0.24147495,0.040616038 116 | 0.0,-0.37967077,-0.81673058,0.17972295,-0.21231131,0.34554125 117 | 0.0,-1.4198647,-0.21190497,-0.36489206,-0.29980223,-0.17893011 118 | 0.0,-0.032939473,-0.039097655,-0.45566123,-0.25605677,-0.25211216 119 | 0.0,-0.1196223,0.11210875,0.17972295,-0.22689313,-0.21552114 120 | 0.0,0.74720595,0.13370966,0.17972295,-0.31438405,-0.081354046 121 | 0.0,-0.72640207,-0.44951503,-0.63719957,-0.27063859,-0.3009002 122 | 0.0,-0.37967077,-0.082299485,-0.092584553,-0.18314766,-0.069157037 123 | 0.0,-0.20630512,0.67373253,-0.36489206,-0.0081658194,-0.3009002 124 | 0.0,0.66052313,-0.14710223,-0.0018153834,-0.19772949,0.32114723 125 | 0.0,-0.032939473,-0.49271686,-0.5464304,-0.095656742,-0.27650618 126 | 1.0,1.0072544,-1.1191434,-0.092584553,1.2167071,5.9561651 127 | 1.0,0.227109,-1.5727626,-1.0910454,5.5620896,1.8091822 128 | 0.0,0.74720595,0.50092521,-0.36489206,-0.27063859,-0.22771815 129 | 0.0,0.31379183,-0.19030406,0.088953786,-0.15398402,-0.3130972 130 | 0.0,-0.8130849,0.13370966,0.45203046,-0.28522041,-0.27650618 131 | 1.0,-1.766596,2.574613,4.7181814,-0.31438405,-0.52044635 132 | 0.0,-0.032939473,0.025705088,-0.5464304,-0.15398402,0.016222021 133 | 0.0,0.5738403,0.26331515,0.088953786,-0.24147495,-0.22771815 134 | 0.0,0.48715748,0.11210875,-0.092584553,-0.18314766,-0.26430917 135 | 0.0,0.66052313,0.50092521,-0.0018153834,-0.24147495,-0.032566012 136 | 1.0,0.74720595,-0.73032692,-0.5464304,-0.16856584,0.89440662 137 | 0.0,-0.29298795,-0.68712509,-0.36489206,-0.24147495,-0.032566012 138 | 0.0,0.14042618,-0.29830863,-0.27412289,-0.24147495,0.4309203 139 | 0.0,0.5738403,-0.14710223,0.72433797,-0.27063859,0.0040250127 140 | 0.0,0.053743351,0.30651698,0.36126129,-0.28522041,-0.10574806 141 | 0.0,-0.032939473,0.11210875,-0.0018153834,-0.35812951,-0.52044635 142 | 1.0,1.8740827,-1.5511617,-1.0002762,1.0271434,0.065010054 143 | 0.0,-0.032939473,-0.31990955,0.17972295,-0.25605677,-0.069157037 144 | 0.0,0.83388877,0.65213161,-0.36489206,-0.22689313,-0.069157037 145 | 1.0,-3.8469838,1.7969801,7.3504873,-0.22689313,-0.49605233 146 | 0.0,1.4406685,-0.1039004,0.36126129,-0.27063859,-0.020369004 147 | 1.0,2.1341311,1.4081636,2.6304905,-0.32896587,-0.49605233 148 | 0.0,-0.63971925,-0.31990955,-0.36489206,-0.29980223,-0.21552114 149 | 0.0,0.227109,-0.73032692,-0.63719957,-0.24147495,-0.26430917 150 | 1.0,-0.29298795,0.76013618,0.99664548,-0.25605677,-0.50824934 151 | 0.0,-0.29298795,-0.21190497,-1.0910454,-0.27063859,-0.22771815 152 | 0.0,0.40047465,0.26331515,0.45203046,-0.12482038,-0.54484036 153 | 0.0,0.40047465,-0.31990955,-0.27412289,-0.18314766,-0.53264335 154 | 0.0,0.83388877,0.15531058,0.17972295,-0.22689313,-0.37408225 155 | 0.0,-0.98645055,-0.90313424,-1.3633529,-0.22689313,-0.1667331 156 | 0.0,-0.55303642,0.047306003,-0.5464304,-0.31438405,-0.49605233 157 | 0.0,-0.20630512,0.67373253,-0.72796874,-0.28522041,-0.13014208 158 | 1.0,-0.63971925,-0.9895379,-0.45566123,-0.22689313,0.30895022 159 | 0.0,-0.29298795,-0.1039004,-0.18335372,-0.28522041,-0.13014208 160 | 1.0,0.83388877,-1.3135516,-0.72796874,2.9373619,0.18698014 161 | 0.0,1.0939372,-0.039097655,-0.27412289,-0.28522041,-0.23991515 162 | 1.0,0.053743351,2.2505993,1.6320297,-0.32896587,-0.48385532 163 | 1.0,1.3539857,-1.3351525,-0.72796874,0.82299795,2.6507758 164 | 1.0,-2.8934727,3.330645,2.358183,-0.24147495,-0.52044635 165 | 1.0,0.74720595,-1.3567534,-0.36489206,1.2750344,5.4438907 166 | 0.0,-0.98645055,0.11210875,-0.27412289,-0.081074922,-0.5936284 167 | 1.0,-1.766596,2.2073975,4.8997198,-0.25605677,-0.53264335 168 | 0.0,-0.37967077,-0.62232235,-0.36489206,-0.02274764,0.016222021 169 | 0.0,-0.63971925,-0.29830863,-0.63719957,-0.22689313,-0.33749122 170 | 1.0,-0.98645055,1.4729664,2.1766447,-0.16856584,-0.48385532 171 | 0.0,0.053743351,-0.31990955,-0.45566123,-0.27063859,-0.27650618 172 | 0.0,1.0072544,0.7817371,-0.36489206,-0.18314766,-0.4472643 173 | 1.0,2.7409109,-1.5943635,-0.5464304,0.82299795,0.40652629 174 | 0.0,0.31379183,0.0041041738,1.0874146,-0.12482038,0.21137415 175 | 1.0,0.31379183,-1.0327397,-1.0910454,0.42928879,1.8823643 176 | 1.0,0.14042618,1.32176,0.17972295,-0.28522041,-0.52044635 177 | 0.0,-0.55303642,-0.29830863,-0.092584553,-0.1394022,-0.37408225 178 | 0.0,-0.4663536,-0.81673058,-0.092584553,-0.34354769,-0.41067327 179 | 1.0,1.1806201,-0.92473515,-0.72796874,1.5812526,6.3586664 180 | 1.0,-1.766596,3.0066313,3.1751055,-0.34354769,-0.49605233 181 | 0.0,0.053743351,-0.2551068,-0.0018153834,-0.18314766,0.028419029 182 | 0.0,-0.55303642,-0.38471229,-0.45566123,-0.34354769,-0.044763021 183 | 1.0,2.3074968,-1.8319736,-1.4541221,4.3372167,0.51629936 184 | 0.0,-0.29298795,-0.1039004,-0.36489206,-0.29980223,-0.4472643 185 | 0.0,0.48715748,1.1705536,0.36126129,-0.12482038,-0.26430917 186 | 0.0,-1.6799131,-0.38471229,-0.27412289,-0.21231131,-0.37408225 187 | 1.0,-1.246499,0.26331515,0.72433797,-0.18314766,-0.54484036 188 | 1.0,-3.8469838,3.330645,3.5381822,-0.22689313,-0.48385532 189 | 0.0,0.74720595,0.15531058,-0.092584553,-0.21231131,-0.14233909 190 | 0.0,0.9205716,-0.60072143,-0.63719957,-0.19772949,0.016222021 191 | 1.0,0.053743351,1.1489526,-0.0018153834,-0.31438405,-0.53264335 192 | 0.0,-0.55303642,-0.082299485,0.90587631,-0.21231131,-0.52044635 193 | 1.0,-1.3331818,2.2938011,-0.092584553,-0.21231131,-0.56923438 194 | 1.0,0.83388877,-1.0327397,-0.72796874,0.60427064,4.4681301 195 | 0.0,-0.032939473,-0.9895379,-0.27412289,-0.21231131,-0.32529421 196 | 0.0,0.5738403,0.43612247,-0.092584553,-0.1394022,-0.32529421 197 | 0.0,1.7873998,0.025705088,-0.27412289,-0.28522041,0.052813046 198 | 0.0,2.0474483,-0.039097655,0.90587631,-0.29980223,-0.27650618 199 | 0.0,-0.1196223,0.22011332,-0.63719957,-0.1394022,-0.38627925 200 | 0.0,1.700717,0.43612247,0.72433797,-0.24147495,-0.081354046 201 | 0.0,0.83388877,0.32811789,0.36126129,-0.095656742,-0.3130972 202 | 0.0,0.74720595,-0.73032692,-0.63719957,-0.24147495,-0.3009002 203 | 0.0,-0.72640207,-0.77352875,-0.36489206,-0.28522041,-0.1545361 204 | 0.0,1.0939372,0.41452155,0.72433797,-0.16856584,-0.22771815 205 | 1.0,2.5675453,1.4081636,1.7227988,-0.25605677,-0.53264335 206 | 0.0,1.0072544,0.047306003,-0.18335372,-0.22689313,-0.49605233 207 | 0.0,0.48715748,-0.38471229,-0.18335372,-0.32896587,-0.23991515 208 | 0.0,-0.1196223,-0.73032692,-0.81873791,-0.28522041,-0.32529421 209 | 0.0,1.4406685,0.11210875,-0.18335372,-0.24147495,-0.081354046 210 | 0.0,0.40047465,0.0041041738,0.45203046,-0.19772949,0.18698014 211 | 1.0,-1.766596,0.95454442,1.9951063,-0.34354769,-0.48385532 212 | 0.0,0.31379183,0.26331515,-0.18335372,-0.29980223,-0.22771815 213 | 0.0,0.053743351,-0.44951503,-0.0018153834,-0.11023856,0.2723592 214 | 0.0,0.40047465,0.22011332,0.17972295,-0.37271133,-0.33749122 215 | 0.0,0.053743351,0.11210875,-0.27412289,-0.18314766,-0.17893011 216 | 1.0,-0.89976772,0.67373253,1.0874146,-0.34354769,-0.52044635 217 | -------------------------------------------------------------------------------- /models/nn.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import torch 3 | import torch.nn as nn 4 | import torch.functional as F 5 | from sklearn.metrics import accuracy_score 6 | 7 | 8 | class DMILoss: 9 | def __call__(self, *args, **kwargs): 10 | return self.forward(*args, **kwargs) 11 | 12 | def forward(self, output, target): 13 | outputs = torch.softmax(output, dim=-1) 14 | targets = target.reshape(-1, 1).type(torch.int64) 15 | y_onehot = torch.zeros(target.size(0), 2) 16 | y_onehot.scatter_(1, targets, 1) 17 | y_onehot = y_onehot.transpose(0, 1) 18 | mat = y_onehot @ outputs 19 | mat = mat / target.size(0) 20 | det = torch.det(mat.float()) 21 | if det < 0: 22 | return torch.log(torch.abs(det) + 0.0001) 23 | else: 24 | return -torch.log(torch.abs(det) + 0.0001) 25 | 26 | 27 | class SigmoidLoss: 28 | def __call__(self, *args, **kwargs): 29 | return self.forward(*args, **kwargs) 30 | 31 | def forward(self, y_pred, y): 32 | return torch.mean(1. / (1. + torch.exp(y_pred * y))) 33 | 34 | class MLP(nn.Module): 35 | def __init__(self, feature_dim, hidsizes, outputs=1, dropout=0., activation='relu'): 36 | super(MLP, self).__init__() 37 | 38 | if activation == 'relu': 39 | self.ac_fn = torch.nn.ReLU 40 | elif activation == 'tanh': 41 | self.ac_fn = torch.nn.Tanh 42 | elif activation == 'sigmoid': 43 | self.ac_fn = torch.nn.Sigmoid 44 | elif activation == 'leaky': 45 | self.ac_fn = torch.nn.LeakyReLU 46 | elif activation == 'elu': 47 | self.ac_fn = torch.nn.ELU 48 | elif activation == 'relu6': 49 | self.ac_fn = torch.nn.ReLU6 50 | 51 | self.mlp = [] 52 | hidsizes = [feature_dim] + hidsizes 53 | for i in range(1, len(hidsizes)): 54 | self.mlp.append(nn.Linear(hidsizes[i-1], hidsizes[i])) 55 | self.mlp.append(nn.Dropout(dropout)) 56 | self.mlp.append(self.ac_fn()) 57 | self.mlp = nn.Sequential(*self.mlp, nn.Linear(hidsizes[-1], outputs)) 58 | 59 | def forward(self, x): 60 | if type(x) != torch.Tensor: 61 | x = torch.tensor(x, dtype=torch.float) 62 | if x.dim() < 2: 63 | x = x.unsqueeze(0) 64 | if torch.cuda.is_available(): 65 | x = x.cuda() 66 | return self.mlp(x).squeeze(-1) 67 | 68 | @staticmethod 69 | def layer_init(layer, w_scale=1.0): 70 | nn.init.orthogonal_(layer.weight.data) 71 | layer.weight.data.mul_(w_scale) 72 | nn.init.constant_(layer.bias.data, 0) 73 | return layer 74 | 75 | 76 | class BinaryClassifier(object): 77 | def __init__(self, model, learning_rate, loss_func='bce'): 78 | self.model = model 79 | if torch.cuda.is_available(): 80 | self.model.cuda() 81 | 82 | if loss_func == 'bce': 83 | self.transform_y = False 84 | self.ac_fn = None 85 | self.loss_func = torch.nn.BCEWithLogitsLoss 86 | elif loss_func == 'mse': 87 | self.transform_y = True 88 | self.ac_fn = torch.tanh 89 | self.loss_func = torch.nn.MSELoss 90 | elif loss_func == 'l1': 91 | self.transform_y = True 92 | self.ac_fn = torch.tanh 93 | self.loss_func = torch.nn.L1Loss 94 | elif loss_func == 'huber': 95 | self.transform_y = True 96 | self.ac_fn = torch.tanh 97 | self.loss_func = torch.nn.SmoothL1Loss 98 | elif loss_func == 'logistic': 99 | self.transform_y = True 100 | self.ac_fn = torch.tanh 101 | self.loss_func = torch.nn.SoftMarginLoss 102 | elif loss_func == 'sigmoid': 103 | self.transform_y = True 104 | self.ac_fn = torch.tanh 105 | self.loss_func = SigmoidLoss 106 | elif loss_func == 'dmi': 107 | self.transform_y = False 108 | self.ac_fn = torch.sigmoid 109 | self.loss_func = DMILoss 110 | else: 111 | raise(NotImplementedError, loss_func) 112 | 113 | self.loss = self.loss_func() 114 | self.optimizer = torch.optim.Adam(self.model.parameters(), learning_rate) 115 | 116 | def predict(self, X): 117 | with torch.no_grad(): 118 | if not self.ac_fn: 119 | y_pred = torch.sigmoid(self.model(X)).cpu().numpy() # bce with logits 120 | else: 121 | y_pred = self.ac_fn(self.model(X)).cpu().numpy() 122 | if self.transform_y: 123 | y_pred[y_pred < 0] = -1 124 | y_pred[y_pred >= 0] = 1 125 | else: 126 | y_pred = y_pred.round() 127 | return y_pred 128 | 129 | def train(self, X, y): 130 | self.model.train() 131 | 132 | y_pred = self.model(X) 133 | if self.ac_fn: 134 | y_pred = self.ac_fn(y_pred) 135 | y = torch.tensor(y, dtype=torch.float) 136 | 137 | loss = self.loss(y_pred, y) 138 | self.optimizer.zero_grad() 139 | loss.backward() 140 | # torch.nn.utils.clip_grad_norm_(self.model.parameters(), 0.5) 141 | self.optimizer.step() 142 | return loss.item() 143 | 144 | def val(self, X, y): 145 | self.model.eval() 146 | y_pred = self.predict(torch.tensor(X, dtype=torch.float)) 147 | acc = accuracy_score(y, y_pred) 148 | return acc 149 | 150 | def fit(self, X_train, y_train, X_val=None, y_val=None, episodes=100, batchsize=None, 151 | val_interval=20, log_interval=100, logger=None): 152 | if self.transform_y: 153 | y_train[y_train == 0] = -1 154 | if y_val is not None: 155 | y_val[y_val == 0] = -1 156 | 157 | train_acc, val_acc, losses = [], [], [] 158 | batchsize = batchsize if batchsize and batchsize < len(X_train) else len(X_train) 159 | m = X_train.shape[0] 160 | 161 | for ep in range(episodes): 162 | mb_idxes = np.random.choice(m, batchsize, replace=False) 163 | mb_X_train, mb_y_train = X_train[mb_idxes], y_train[mb_idxes] 164 | loss = self.train(mb_X_train, mb_y_train) 165 | losses.append(loss) 166 | 167 | if ep % val_interval == 0 and X_val is not None and y_val is not None: 168 | train_acc.append(self.val(X_train, y_train)) 169 | val_acc.append(self.val(X_val, y_val)) 170 | if logger is not None and ep % log_interval == 0: 171 | logger.record_tabular('ep', ep) 172 | logger.record_tabular('loss', np.mean(losses[-log_interval:])) 173 | logger.record_tabular('train_acc', np.mean(train_acc[-log_interval//val_interval:])) 174 | if X_val is not None and y_val is not None: 175 | logger.record_tabular('val_acc', np.mean(val_acc[-log_interval//val_interval:])) 176 | logger.dump_tabular() 177 | 178 | return {'loss': losses, 'train_acc': train_acc, 'val_acc': val_acc} 179 | 180 | 181 | class DMIClassifier(object): 182 | def __init__(self, model, learning_rate): 183 | self.model = model 184 | if torch.cuda.is_available(): 185 | self.model.cuda() 186 | 187 | self.loss = DMILoss() 188 | self.optimizer = torch.optim.Adam(self.model.parameters(), learning_rate) 189 | 190 | def predict(self, X): 191 | with torch.no_grad(): 192 | y_pred = self.model(X).cpu().numpy() 193 | y_pred = y_pred.argmax(-1) 194 | return y_pred 195 | 196 | def train(self, X, y): 197 | self.model.train() 198 | 199 | y_pred = self.model(X) 200 | y = torch.tensor(y, dtype=torch.float) 201 | 202 | loss = self.loss(y_pred, y) 203 | self.optimizer.zero_grad() 204 | loss.backward() 205 | # torch.nn.utils.clip_grad_norm_(self.model.parameters(), 0.5) 206 | self.optimizer.step() 207 | return loss.item() 208 | 209 | def val(self, X, y): 210 | self.model.eval() 211 | y_pred = self.predict(torch.tensor(X, dtype=torch.float)) 212 | acc = accuracy_score(y, y_pred) 213 | return acc 214 | 215 | def fit(self, X_train, y_train, X_val=None, y_val=None, episodes=100, batchsize=None, 216 | val_interval=20, log_interval=100, logger=None): 217 | train_acc, val_acc, losses = [], [], [] 218 | batchsize = batchsize if batchsize and batchsize < len(X_train) else len(X_train) 219 | m = X_train.shape[0] 220 | 221 | for ep in range(episodes): 222 | mb_idxes = np.random.choice(m, batchsize, replace=False) 223 | mb_X_train, mb_y_train = X_train[mb_idxes], y_train[mb_idxes] 224 | loss = self.train(mb_X_train, mb_y_train) 225 | losses.append(loss) 226 | 227 | if ep % val_interval == 0 and X_val is not None and y_val is not None: 228 | train_acc.append(self.val(X_train, y_train)) 229 | val_acc.append(self.val(X_val, y_val)) 230 | if logger is not None and ep % log_interval == 0: 231 | logger.record_tabular('ep', ep) 232 | logger.record_tabular('loss', np.mean(losses[-log_interval:])) 233 | logger.record_tabular('train_acc', np.mean(train_acc[-log_interval//val_interval:])) 234 | if X_val is not None and y_val is not None: 235 | logger.record_tabular('val_acc', np.mean(val_acc[-log_interval//val_interval:])) 236 | logger.dump_tabular() 237 | 238 | return {'loss': losses, 'train_acc': train_acc, 'val_acc': val_acc} 239 | 240 | 241 | class SurrogateBinaryClassifier(BinaryClassifier): 242 | def __init__(self, model, learning_rate, loss_func, e0, e1): 243 | super(SurrogateBinaryClassifier, self).__init__(model, learning_rate, loss_func) 244 | self.e = np.array([e0, e1], dtype=float) 245 | self.loss = self.loss_func(reduction='none') 246 | 247 | def train(self, X, y): 248 | """ The original surrogate function is: 249 | 250 | (1 - \rho_{-y}) * l(t,y) - \rho_{y} * l(t,-y) 251 | loss = --------------------------------------------- 252 | 1 - \rho_{+1} - \rho_{-1} 253 | 254 | where y \in {-1, +1}, 255 | 256 | But because we use {0, 1} as the label, so the loss becomes: 257 | 258 | (1 - e_{1-y}) * l(t,y) - e_{y} * l(t,1-y) 259 | loss = ----------------------------------------- 260 | 1 - e_0 - e_1 261 | """ 262 | self.model.train() 263 | 264 | y_pred = self.model(X) 265 | if self.ac_fn: 266 | y_pred = self.ac_fn(y_pred) 267 | if self.transform_y: 268 | y[y == -1] = 0 269 | c1 = torch.tensor(1 - self.e[np.int32(1-y)], dtype=torch.float) 270 | c2 = torch.tensor(self.e[np.int32(y)], dtype=torch.float) 271 | if self.transform_y: 272 | y[y == 0] = -1 273 | y = torch.tensor(y, dtype=torch.float) 274 | 275 | loss1 = c1 * self.loss(y_pred, y) 276 | loss2 = c2 * self.loss(y_pred, -y if self.transform_y else 1 - y) 277 | loss = torch.mean(loss1 - loss2) / (1 - self.e.sum()) 278 | self.optimizer.zero_grad() 279 | loss.backward() 280 | # torch.nn.utils.clip_grad_norm_(self.model.parameters(), 0.5) 281 | self.optimizer.step() 282 | return loss.item() 283 | 284 | 285 | class PeerBinaryClassifier(BinaryClassifier): 286 | def __init__(self, model, learning_rate, loss_func, alpha=1.): 287 | super(PeerBinaryClassifier, self).__init__(model, learning_rate, loss_func) 288 | self.alpha = alpha 289 | 290 | def train(self, X, y, X_, y_): 291 | self.model.train() 292 | 293 | y_pred = self.model(X) 294 | if self.ac_fn: 295 | y_pred = self.ac_fn(y_pred) 296 | y = torch.tensor(y, dtype=torch.float) 297 | 298 | y_pred_ = self.model(X_) 299 | if self.ac_fn: 300 | y_pred_ = self.ac_fn(y_pred_) 301 | y_ = torch.tensor(y_, dtype=torch.float) 302 | 303 | loss = self.loss(y_pred, y) - self.alpha * self.loss(y_pred_, y_) 304 | self.optimizer.zero_grad() 305 | loss.backward() 306 | # torch.nn.utils.clip_grad_norm_(self.model.parameters(), 0.5) 307 | self.optimizer.step() 308 | return loss.item() 309 | 310 | def fit(self, X_train, y_train, X_val=None, y_val=None, episodes=100, batchsize=None, batchsize_=None, 311 | val_interval=20, log_interval=100, logger=None): 312 | if self.transform_y: 313 | y_train[y_train == 0] = -1 314 | if y_val is not None: 315 | y_val[y_val == 0] = -1 316 | 317 | losses, train_acc, val_acc = [], [], [] 318 | batchsize = batchsize or len(X_train) 319 | batchsize_ = batchsize_ or len(X_train) 320 | m = X_train.shape[0] 321 | 322 | for ep in range(episodes): 323 | mb_idxes = np.random.choice(m, batchsize, replace=False) 324 | mb_X_train, mb_y_train = X_train[mb_idxes], y_train[mb_idxes] 325 | mb_X_train_ = X_train[np.random.choice(m, batchsize_, replace=False)] 326 | mb_y_train_ = y_train[np.random.choice(m, batchsize_, replace=False)] 327 | loss = self.train(mb_X_train, mb_y_train, mb_X_train_, mb_y_train_) 328 | losses.append(loss) 329 | 330 | if ep % val_interval == 0 and X_val is not None and y_val is not None: 331 | train_acc.append(self.val(X_train, y_train)) 332 | val_acc.append(self.val(X_val, y_val)) 333 | if logger is not None and ep % log_interval == 0: 334 | logger.record_tabular('ep', ep) 335 | logger.record_tabular('loss', np.mean(losses[-log_interval:])) 336 | logger.record_tabular('train_acc', np.mean(train_acc[-log_interval//val_interval:])) 337 | if X_val is not None and y_val is not None: 338 | logger.record_tabular('val_acc', np.mean(val_acc[-log_interval//val_interval:])) 339 | logger.dump_tabular() 340 | 341 | return { 342 | 'loss': losses, 343 | 'train_acc': train_acc, 344 | 'val_acc': val_acc 345 | } 346 | -------------------------------------------------------------------------------- /utils/logger.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import shutil 4 | import os.path as osp 5 | import json 6 | import time 7 | import datetime 8 | import tempfile 9 | from collections import defaultdict 10 | 11 | LOG_OUTPUT_FORMATS = ['stdout', 'log', 'csv'] 12 | # Also valid: json, tensorboard 13 | 14 | DEBUG = 10 15 | INFO = 20 16 | WARN = 30 17 | ERROR = 40 18 | 19 | DISABLED = 50 20 | 21 | class KVWriter(object): 22 | def writekvs(self, kvs): 23 | raise NotImplementedError 24 | 25 | class SeqWriter(object): 26 | def writeseq(self, seq): 27 | raise NotImplementedError 28 | 29 | class HumanOutputFormat(KVWriter, SeqWriter): 30 | def __init__(self, filename_or_file): 31 | if isinstance(filename_or_file, str): 32 | self.file = open(filename_or_file, 'wt') 33 | self.own_file = True 34 | else: 35 | assert hasattr(filename_or_file, 'read'), 'expected file or str, got %s'%filename_or_file 36 | self.file = filename_or_file 37 | self.own_file = False 38 | 39 | def writekvs(self, kvs): 40 | # Create strings for printing 41 | key2str = {} 42 | for (key, val) in sorted(kvs.items()): 43 | if isinstance(val, float): 44 | valstr = '%-8.3g' % (val,) 45 | else: 46 | valstr = str(val) 47 | key2str[self._truncate(key)] = self._truncate(valstr) 48 | 49 | # Find max widths 50 | if len(key2str) == 0: 51 | print('WARNING: tried to write empty key-value dict') 52 | return 53 | else: 54 | keywidth = max(map(len, key2str.keys())) 55 | valwidth = max(map(len, key2str.values())) 56 | 57 | # Write out the data 58 | dashes = '-' * (keywidth + valwidth + 7) 59 | lines = [dashes] 60 | for (key, val) in sorted(key2str.items()): 61 | lines.append('| %s%s | %s%s |' % ( 62 | key, 63 | ' ' * (keywidth - len(key)), 64 | val, 65 | ' ' * (valwidth - len(val)), 66 | )) 67 | lines.append(dashes) 68 | self.file.write('\n'.join(lines) + '\n') 69 | 70 | # Flush the output to the file 71 | self.file.flush() 72 | 73 | def _truncate(self, s): 74 | return s[:20] + '...' if len(s) > 23 else s 75 | 76 | def writeseq(self, seq): 77 | for arg in seq: 78 | self.file.write(arg) 79 | self.file.write('\n') 80 | self.file.flush() 81 | 82 | def close(self): 83 | if self.own_file: 84 | self.file.close() 85 | 86 | class JSONOutputFormat(KVWriter): 87 | def __init__(self, filename): 88 | self.file = open(filename, 'wt') 89 | 90 | def writekvs(self, kvs): 91 | for k, v in sorted(kvs.items()): 92 | if hasattr(v, 'dtype'): 93 | v = v.tolist() 94 | kvs[k] = float(v) 95 | self.file.write(json.dumps(kvs) + '\n') 96 | self.file.flush() 97 | 98 | def close(self): 99 | self.file.close() 100 | 101 | class CSVOutputFormat(KVWriter): 102 | def __init__(self, filename): 103 | self.file = open(filename, 'w+t') 104 | self.keys = [] 105 | self.sep = ',' 106 | 107 | def writekvs(self, kvs): 108 | # Add our current row to the history 109 | extra_keys = kvs.keys() - self.keys 110 | if extra_keys: 111 | self.keys.extend(extra_keys) 112 | self.file.seek(0) 113 | lines = self.file.readlines() 114 | self.file.seek(0) 115 | for (i, k) in enumerate(self.keys): 116 | if i > 0: 117 | self.file.write(',') 118 | self.file.write(k) 119 | self.file.write('\n') 120 | for line in lines[1:]: 121 | self.file.write(line[:-1]) 122 | self.file.write(self.sep * len(extra_keys)) 123 | self.file.write('\n') 124 | for (i, k) in enumerate(self.keys): 125 | if i > 0: 126 | self.file.write(',') 127 | v = kvs.get(k) 128 | if v is not None: 129 | self.file.write(str(v)) 130 | self.file.write('\n') 131 | self.file.flush() 132 | 133 | def close(self): 134 | self.file.close() 135 | 136 | 137 | class TensorBoardOutputFormat(KVWriter): 138 | """ 139 | Dumps key/value pairs into TensorBoard's numeric format. 140 | """ 141 | def __init__(self, dir): 142 | os.makedirs(dir, exist_ok=True) 143 | self.dir = dir 144 | self.step = 1 145 | prefix = 'events' 146 | path = osp.join(osp.abspath(dir), prefix) 147 | import tensorflow as tf 148 | from tensorflow.python import pywrap_tensorflow 149 | from tensorflow.core.util import event_pb2 150 | from tensorflow.python.util import compat 151 | self.tf = tf 152 | self.event_pb2 = event_pb2 153 | self.pywrap_tensorflow = pywrap_tensorflow 154 | self.writer = pywrap_tensorflow.EventsWriter(compat.as_bytes(path)) 155 | 156 | def writekvs(self, kvs): 157 | def summary_val(k, v): 158 | kwargs = {'tag': k, 'simple_value': float(v)} 159 | return self.tf.Summary.Value(**kwargs) 160 | summary = self.tf.Summary(value=[summary_val(k, v) for k, v in kvs.items()]) 161 | event = self.event_pb2.Event(wall_time=time.time(), summary=summary) 162 | event.step = self.step # is there any reason why you'd want to specify the step? 163 | self.writer.WriteEvent(event) 164 | self.writer.Flush() 165 | self.step += 1 166 | 167 | def close(self): 168 | if self.writer: 169 | self.writer.Close() 170 | self.writer = None 171 | 172 | def make_output_format(format, ev_dir, log_suffix=''): 173 | os.makedirs(ev_dir, exist_ok=True) 174 | if format == 'stdout': 175 | return HumanOutputFormat(sys.stdout) 176 | elif format == 'log': 177 | return HumanOutputFormat(osp.join(ev_dir, 'log%s.txt' % log_suffix)) 178 | elif format == 'json': 179 | return JSONOutputFormat(osp.join(ev_dir, 'progress%s.json' % log_suffix)) 180 | elif format == 'csv': 181 | return CSVOutputFormat(osp.join(ev_dir, 'progress%s.csv' % log_suffix)) 182 | elif format == 'tensorboard': 183 | return TensorBoardOutputFormat(osp.join(ev_dir, 'tb%s' % log_suffix)) 184 | else: 185 | raise ValueError('Unknown format specified: %s' % (format,)) 186 | 187 | # ================================================================ 188 | # API 189 | # ================================================================ 190 | 191 | def logkv(key, val): 192 | """ 193 | Log a value of some diagnostic 194 | Call this once for each diagnostic quantity, each iteration 195 | If called many times, last value will be used. 196 | """ 197 | Logger.CURRENT.logkv(key, val) 198 | 199 | def logkv_mean(key, val): 200 | """ 201 | The same as logkv(), but if called many times, values averaged. 202 | """ 203 | Logger.CURRENT.logkv_mean(key, val) 204 | 205 | def logkvs(d): 206 | """ 207 | Log a dictionary of key-value pairs 208 | """ 209 | for (k, v) in d.items(): 210 | logkv(k, v) 211 | 212 | def dumpkvs(): 213 | """ 214 | Write all of the diagnostics from the current iteration 215 | 216 | level: int. (see logger.py docs) If the global logger level is higher than 217 | the level argument here, don't print to stdout. 218 | """ 219 | Logger.CURRENT.dumpkvs() 220 | 221 | def getkvs(): 222 | return Logger.CURRENT.name2val 223 | 224 | 225 | def log(*args, level=INFO): 226 | """ 227 | Write the sequence of args, with no separators, to the console and output files (if you've configured an output file). 228 | """ 229 | Logger.CURRENT.log(*args, level=level) 230 | 231 | def debug(*args): 232 | log(*args, level=DEBUG) 233 | 234 | def info(*args): 235 | log(*args, level=INFO) 236 | 237 | def warn(*args): 238 | log(*args, level=WARN) 239 | 240 | def error(*args): 241 | log(*args, level=ERROR) 242 | 243 | 244 | def set_level(level): 245 | """ 246 | Set logging threshold on current logger. 247 | """ 248 | Logger.CURRENT.set_level(level) 249 | 250 | def get_dir(): 251 | """ 252 | Get directory that log files are being written to. 253 | will be None if there is no output directory (i.e., if you didn't call start) 254 | """ 255 | return Logger.CURRENT.get_dir() 256 | 257 | record_tabular = logkv 258 | dump_tabular = dumpkvs 259 | 260 | class ProfileKV: 261 | """ 262 | Usage: 263 | with logger.ProfileKV("interesting_scope"): 264 | code 265 | """ 266 | def __init__(self, n): 267 | self.n = "wait_" + n 268 | def __enter__(self): 269 | self.t1 = time.time() 270 | def __exit__(self ,type, value, traceback): 271 | Logger.CURRENT.name2val[self.n] += time.time() - self.t1 272 | 273 | def profile(n): 274 | """ 275 | Usage: 276 | @profile("my_func") 277 | def my_func(): code 278 | """ 279 | def decorator_with_name(func): 280 | def func_wrapper(*args, **kwargs): 281 | with ProfileKV(n): 282 | return func(*args, **kwargs) 283 | return func_wrapper 284 | return decorator_with_name 285 | 286 | 287 | # ================================================================ 288 | # Backend 289 | # ================================================================ 290 | 291 | class Logger(object): 292 | DEFAULT = None # A logger with no output files. (See right below class definition) 293 | # So that you can still log to the terminal without setting up any output files 294 | CURRENT = None # Current logger being used by the free functions above 295 | 296 | def __init__(self, dir, output_formats): 297 | self.name2val = defaultdict(float) # values this iteration 298 | self.name2cnt = defaultdict(int) 299 | self.level = INFO 300 | self.dir = dir 301 | self.output_formats = output_formats 302 | 303 | # Logging API, forwarded 304 | # ---------------------------------------- 305 | def logkv(self, key, val): 306 | self.name2val[key] = val 307 | 308 | def logkv_mean(self, key, val): 309 | if val is None: 310 | self.name2val[key] = None 311 | return 312 | oldval, cnt = self.name2val[key], self.name2cnt[key] 313 | self.name2val[key] = oldval*cnt/(cnt+1) + val/(cnt+1) 314 | self.name2cnt[key] = cnt + 1 315 | 316 | def dumpkvs(self): 317 | if self.level == DISABLED: return 318 | for fmt in self.output_formats: 319 | if isinstance(fmt, KVWriter): 320 | fmt.writekvs(self.name2val) 321 | self.name2val.clear() 322 | self.name2cnt.clear() 323 | 324 | def log(self, *args, level=INFO): 325 | if self.level <= level: 326 | self._do_log(args) 327 | 328 | # Configuration 329 | # ---------------------------------------- 330 | def set_level(self, level): 331 | self.level = level 332 | 333 | def get_dir(self): 334 | return self.dir 335 | 336 | def close(self): 337 | for fmt in self.output_formats: 338 | fmt.close() 339 | 340 | # Misc 341 | # ---------------------------------------- 342 | def _do_log(self, args): 343 | for fmt in self.output_formats: 344 | if isinstance(fmt, SeqWriter): 345 | fmt.writeseq(map(str, args)) 346 | 347 | Logger.DEFAULT = Logger.CURRENT = Logger(dir=None, output_formats=[HumanOutputFormat(sys.stdout)]) 348 | 349 | def configure(dir=None, format_strs=None): 350 | if dir is None: 351 | dir = os.getenv('OPENAI_LOGDIR') 352 | if dir is None: 353 | dir = osp.join(tempfile.gettempdir(), 354 | datetime.datetime.now().strftime("openai-%Y-%m-%d-%H-%M-%S-%f")) 355 | assert isinstance(dir, str) 356 | os.makedirs(dir, exist_ok=True) 357 | 358 | if format_strs is None: 359 | strs = os.getenv('OPENAI_LOG_FORMAT') 360 | format_strs = strs.split(',') if strs else LOG_OUTPUT_FORMATS 361 | output_formats = [make_output_format(f, dir) for f in format_strs] 362 | 363 | Logger.CURRENT = Logger(dir=dir, output_formats=output_formats) 364 | log('Logging to %s'%dir) 365 | 366 | def reset(): 367 | if Logger.CURRENT is not Logger.DEFAULT: 368 | Logger.CURRENT.close() 369 | Logger.CURRENT = Logger.DEFAULT 370 | log('Reset logger') 371 | 372 | class scoped_configure(object): 373 | def __init__(self, dir=None, format_strs=None): 374 | self.dir = dir 375 | self.format_strs = format_strs 376 | self.prevlogger = None 377 | def __enter__(self): 378 | self.prevlogger = Logger.CURRENT 379 | configure(dir=self.dir, format_strs=self.format_strs) 380 | def __exit__(self, *args): 381 | Logger.CURRENT.close() 382 | Logger.CURRENT = self.prevlogger 383 | 384 | # ================================================================ 385 | 386 | def _demo(): 387 | info("hi") 388 | debug("shouldn't appear") 389 | set_level(DEBUG) 390 | debug("should appear") 391 | dir = "/tmp/testlogging" 392 | if os.path.exists(dir): 393 | shutil.rmtree(dir) 394 | configure(dir=dir) 395 | logkv("a", 3) 396 | logkv("b", 2.5) 397 | dumpkvs() 398 | logkv("b", -2.5) 399 | logkv("a", 5.5) 400 | dumpkvs() 401 | info("^^^ should see a = 5.5") 402 | logkv_mean("b", -22.5) 403 | logkv_mean("b", -44.4) 404 | logkv("a", 5.5) 405 | dumpkvs() 406 | info("^^^ should see b = 33.3") 407 | 408 | logkv("b", -2.5) 409 | dumpkvs() 410 | 411 | logkv("a", "longasslongasslongasslongasslongasslongassvalue") 412 | dumpkvs() 413 | 414 | 415 | # ================================================================ 416 | # Readers 417 | # ================================================================ 418 | 419 | def read_json(fname): 420 | import pandas 421 | ds = [] 422 | with open(fname, 'rt') as fh: 423 | for line in fh: 424 | ds.append(json.loads(line)) 425 | return pandas.DataFrame(ds) 426 | 427 | def read_csv(fname): 428 | import pandas 429 | return pandas.read_csv(fname, index_col=None, comment='#') 430 | 431 | def read_tb(path): 432 | """ 433 | path : a tensorboard file OR a directory, where we will find all TB files 434 | of the form events.* 435 | """ 436 | import pandas 437 | import numpy as np 438 | from glob import glob 439 | from collections import defaultdict 440 | import tensorflow as tf 441 | if osp.isdir(path): 442 | fnames = glob(osp.join(path, "events.*")) 443 | elif osp.basename(path).startswith("events."): 444 | fnames = [path] 445 | else: 446 | raise NotImplementedError("Expected tensorboard file or directory containing them. Got %s"%path) 447 | tag2pairs = defaultdict(list) 448 | maxstep = 0 449 | for fname in fnames: 450 | for summary in tf.train.summary_iterator(fname): 451 | if summary.step > 0: 452 | for v in summary.summary.value: 453 | pair = (summary.step, v.simple_value) 454 | tag2pairs[v.tag].append(pair) 455 | maxstep = max(summary.step, maxstep) 456 | data = np.empty((maxstep, len(tag2pairs))) 457 | data[:] = np.nan 458 | tags = sorted(tag2pairs.keys()) 459 | for (colidx,tag) in enumerate(tags): 460 | pairs = tag2pairs[tag] 461 | for (step, value) in pairs: 462 | data[step-1, colidx] = value 463 | return pandas.DataFrame(data, columns=tags) 464 | 465 | if __name__ == "__main__": 466 | _demo() 467 | -------------------------------------------------------------------------------- /uci/breast-cancer.data: -------------------------------------------------------------------------------- 1 | no-recurrence-events,30-39,premeno,30-34,0-2,no,3,left,left_low,no 2 | no-recurrence-events,40-49,premeno,20-24,0-2,no,2,right,right_up,no 3 | no-recurrence-events,40-49,premeno,20-24,0-2,no,2,left,left_low,no 4 | no-recurrence-events,60-69,ge40,15-19,0-2,no,2,right,left_up,no 5 | no-recurrence-events,40-49,premeno,0-4,0-2,no,2,right,right_low,no 6 | no-recurrence-events,60-69,ge40,15-19,0-2,no,2,left,left_low,no 7 | no-recurrence-events,50-59,premeno,25-29,0-2,no,2,left,left_low,no 8 | no-recurrence-events,60-69,ge40,20-24,0-2,no,1,left,left_low,no 9 | no-recurrence-events,40-49,premeno,50-54,0-2,no,2,left,left_low,no 10 | no-recurrence-events,40-49,premeno,20-24,0-2,no,2,right,left_up,no 11 | no-recurrence-events,40-49,premeno,0-4,0-2,no,3,left,central,no 12 | no-recurrence-events,50-59,ge40,25-29,0-2,no,2,left,left_low,no 13 | no-recurrence-events,60-69,lt40,10-14,0-2,no,1,left,right_up,no 14 | no-recurrence-events,50-59,ge40,25-29,0-2,no,3,left,right_up,no 15 | no-recurrence-events,40-49,premeno,30-34,0-2,no,3,left,left_up,no 16 | no-recurrence-events,60-69,lt40,30-34,0-2,no,1,left,left_low,no 17 | no-recurrence-events,40-49,premeno,15-19,0-2,no,2,left,left_low,no 18 | no-recurrence-events,50-59,premeno,30-34,0-2,no,3,left,left_low,no 19 | no-recurrence-events,60-69,ge40,30-34,0-2,no,3,left,left_low,no 20 | no-recurrence-events,50-59,ge40,30-34,0-2,no,1,right,right_up,no 21 | no-recurrence-events,50-59,ge40,40-44,0-2,no,2,left,left_low,no 22 | no-recurrence-events,60-69,ge40,15-19,0-2,no,2,left,left_low,no 23 | no-recurrence-events,30-39,premeno,25-29,0-2,no,2,right,left_low,no 24 | no-recurrence-events,50-59,premeno,40-44,0-2,no,2,left,left_up,no 25 | no-recurrence-events,50-59,premeno,35-39,0-2,no,2,right,left_up,no 26 | no-recurrence-events,40-49,premeno,25-29,0-2,no,2,left,left_up,no 27 | no-recurrence-events,50-59,premeno,20-24,0-2,no,1,left,left_low,no 28 | no-recurrence-events,60-69,ge40,25-29,0-2,no,3,right,left_up,no 29 | no-recurrence-events,40-49,premeno,40-44,0-2,no,2,right,left_low,no 30 | no-recurrence-events,60-69,ge40,30-34,0-2,no,2,left,left_low,no 31 | no-recurrence-events,50-59,ge40,40-44,0-2,no,3,right,left_up,no 32 | no-recurrence-events,50-59,premeno,15-19,0-2,no,2,right,left_low,no 33 | no-recurrence-events,50-59,premeno,10-14,0-2,no,3,left,left_low,no 34 | no-recurrence-events,50-59,ge40,10-14,0-2,no,1,right,left_up,no 35 | no-recurrence-events,50-59,ge40,10-14,0-2,no,1,left,left_up,no 36 | no-recurrence-events,30-39,premeno,30-34,0-2,no,2,left,left_up,no 37 | no-recurrence-events,50-59,ge40,0-4,0-2,no,2,left,central,no 38 | no-recurrence-events,50-59,ge40,15-19,0-2,no,1,right,central,no 39 | no-recurrence-events,40-49,premeno,10-14,0-2,no,2,left,left_low,no 40 | no-recurrence-events,40-49,premeno,30-34,0-2,no,1,left,left_low,no 41 | no-recurrence-events,50-59,ge40,20-24,0-2,no,1,right,left_low,no 42 | no-recurrence-events,60-69,ge40,25-29,0-2,no,2,left,left_low,no 43 | no-recurrence-events,60-69,ge40,5-9,0-2,no,1,left,central,no 44 | no-recurrence-events,40-49,premeno,10-14,0-2,no,2,left,left_up,no 45 | no-recurrence-events,50-59,ge40,50-54,0-2,no,1,right,right_up,no 46 | no-recurrence-events,50-59,ge40,30-34,0-2,no,1,left,left_up,no 47 | no-recurrence-events,40-49,premeno,25-29,0-2,no,2,right,left_low,no 48 | no-recurrence-events,50-59,premeno,25-29,0-2,no,1,right,left_up,no 49 | no-recurrence-events,40-49,premeno,20-24,0-2,no,1,right,right_up,no 50 | no-recurrence-events,40-49,premeno,20-24,0-2,no,1,right,left_low,no 51 | no-recurrence-events,50-59,lt40,15-19,0-2,no,2,left,left_low,no 52 | no-recurrence-events,30-39,premeno,20-24,0-2,no,2,left,right_low,no 53 | no-recurrence-events,50-59,premeno,15-19,0-2,no,1,left,left_low,no 54 | no-recurrence-events,70-79,ge40,20-24,0-2,no,3,left,left_up,no 55 | no-recurrence-events,70-79,ge40,40-44,0-2,no,1,right,left_up,no 56 | no-recurrence-events,70-79,ge40,40-44,0-2,no,1,right,right_up,no 57 | no-recurrence-events,50-59,ge40,0-4,0-2,no,1,right,central,no 58 | no-recurrence-events,50-59,ge40,5-9,0-2,no,2,right,right_up,no 59 | no-recurrence-events,60-69,ge40,30-34,0-2,no,1,left,left_up,no 60 | no-recurrence-events,60-69,ge40,15-19,0-2,no,1,right,left_up,no 61 | no-recurrence-events,40-49,premeno,20-24,0-2,no,2,left,central,no 62 | no-recurrence-events,40-49,premeno,10-14,0-2,no,1,right,right_low,no 63 | no-recurrence-events,50-59,ge40,0-4,0-2,no,1,left,left_low,no 64 | no-recurrence-events,20-29,premeno,35-39,0-2,no,2,right,right_up,no 65 | no-recurrence-events,40-49,premeno,25-29,0-2,no,1,left,right_low,no 66 | no-recurrence-events,40-49,premeno,10-14,0-2,no,1,right,left_up,no 67 | no-recurrence-events,40-49,premeno,25-29,0-2,no,1,right,right_low,no 68 | no-recurrence-events,50-59,ge40,20-24,0-2,no,3,left,left_up,no 69 | no-recurrence-events,50-59,ge40,35-39,0-2,no,3,left,left_low,no 70 | no-recurrence-events,60-69,ge40,50-54,0-2,no,2,left,left_low,no 71 | no-recurrence-events,60-69,ge40,10-14,0-2,no,1,left,left_low,no 72 | no-recurrence-events,40-49,premeno,25-29,0-2,no,2,right,left_up,no 73 | no-recurrence-events,60-69,ge40,20-24,0-2,no,2,left,left_up,no 74 | no-recurrence-events,50-59,premeno,15-19,0-2,no,2,right,right_low,no 75 | no-recurrence-events,30-39,premeno,5-9,0-2,no,2,left,right_low,no 76 | no-recurrence-events,50-59,ge40,10-14,0-2,no,1,left,left_low,no 77 | no-recurrence-events,50-59,ge40,10-14,0-2,no,2,left,left_low,no 78 | no-recurrence-events,30-39,premeno,25-29,0-2,no,1,left,central,no 79 | no-recurrence-events,50-59,premeno,25-29,0-2,no,2,left,left_low,no 80 | no-recurrence-events,40-49,premeno,25-29,0-2,no,2,right,central,no 81 | no-recurrence-events,50-59,ge40,10-14,0-2,no,2,right,left_low,no 82 | no-recurrence-events,60-69,ge40,10-14,0-2,no,1,left,left_up,no 83 | no-recurrence-events,60-69,ge40,15-19,0-2,no,2,right,left_low,no 84 | no-recurrence-events,50-59,ge40,15-19,0-2,no,2,right,left_low,no 85 | no-recurrence-events,40-49,premeno,20-24,0-2,no,1,left,right_low,no 86 | no-recurrence-events,50-59,ge40,35-39,0-2,no,3,left,left_up,no 87 | no-recurrence-events,60-69,ge40,25-29,0-2,no,2,right,left_low,no 88 | no-recurrence-events,70-79,ge40,0-4,0-2,no,1,left,right_low,no 89 | no-recurrence-events,50-59,ge40,20-24,0-2,no,3,right,left_up,no 90 | no-recurrence-events,40-49,premeno,40-44,0-2,no,1,right,left_up,no 91 | no-recurrence-events,30-39,premeno,0-4,0-2,no,2,right,central,no 92 | no-recurrence-events,50-59,ge40,20-24,0-2,no,3,left,left_up,no 93 | no-recurrence-events,50-59,ge40,25-29,0-2,no,2,right,left_up,no 94 | no-recurrence-events,60-69,ge40,20-24,0-2,no,2,right,left_up,no 95 | no-recurrence-events,50-59,premeno,10-14,0-2,no,1,left,left_low,no 96 | no-recurrence-events,40-49,premeno,30-34,0-2,no,2,right,right_low,no 97 | no-recurrence-events,60-69,ge40,30-34,0-2,no,2,left,left_up,no 98 | no-recurrence-events,60-69,ge40,15-19,0-2,no,2,right,left_up,no 99 | no-recurrence-events,40-49,premeno,30-34,0-2,no,1,left,right_up,no 100 | no-recurrence-events,30-39,premeno,25-29,0-2,no,2,left,left_low,no 101 | no-recurrence-events,40-49,ge40,20-24,0-2,no,3,left,left_low,no 102 | no-recurrence-events,50-59,ge40,30-34,0-2,no,3,right,left_low,no 103 | no-recurrence-events,50-59,premeno,25-29,0-2,no,2,right,right_low,no 104 | no-recurrence-events,40-49,premeno,20-24,0-2,no,2,left,right_low,no 105 | no-recurrence-events,40-49,premeno,10-14,0-2,no,2,right,left_low,no 106 | no-recurrence-events,40-49,premeno,30-34,0-2,no,1,right,left_up,no 107 | no-recurrence-events,40-49,premeno,20-24,0-2,no,2,left,left_up,no 108 | no-recurrence-events,30-39,premeno,40-44,0-2,no,2,right,right_up,no 109 | no-recurrence-events,40-49,premeno,30-34,0-2,no,3,right,right_up,no 110 | no-recurrence-events,60-69,ge40,30-34,0-2,no,1,right,left_up,no 111 | no-recurrence-events,50-59,ge40,25-29,0-2,no,1,left,left_low,no 112 | no-recurrence-events,50-59,ge40,15-19,0-2,no,1,right,central,no 113 | no-recurrence-events,40-49,premeno,20-24,0-2,no,2,right,left_up,no 114 | no-recurrence-events,40-49,premeno,10-14,0-2,no,1,right,left_up,no 115 | no-recurrence-events,40-49,premeno,35-39,0-2,no,2,right,right_up,no 116 | no-recurrence-events,50-59,ge40,20-24,0-2,no,2,right,left_up,no 117 | no-recurrence-events,30-39,premeno,15-19,0-2,no,1,left,left_low,no 118 | no-recurrence-events,40-49,ge40,20-24,0-2,no,3,left,left_up,no 119 | no-recurrence-events,30-39,premeno,10-14,0-2,no,1,right,left_low,no 120 | no-recurrence-events,60-69,ge40,15-19,0-2,no,1,left,right_low,no 121 | no-recurrence-events,60-69,ge40,20-24,0-2,no,1,left,left_low,no 122 | no-recurrence-events,50-59,ge40,15-19,0-2,no,2,right,right_up,no 123 | no-recurrence-events,50-59,ge40,40-44,0-2,no,3,left,left_up,no 124 | no-recurrence-events,50-59,ge40,30-34,0-2,no,1,right,left_low,no 125 | no-recurrence-events,60-69,ge40,10-14,0-2,no,1,right,left_low,no 126 | no-recurrence-events,70-79,ge40,10-14,0-2,no,2,left,central,no 127 | no-recurrence-events,30-39,premeno,30-34,6-8,yes,2,right,right_up,no 128 | no-recurrence-events,30-39,premeno,25-29,6-8,yes,2,right,left_up,yes 129 | no-recurrence-events,50-59,premeno,25-29,0-2,yes,2,left,left_up,no 130 | no-recurrence-events,40-49,premeno,35-39,9-11,yes,2,right,left_up,yes 131 | no-recurrence-events,40-49,premeno,35-39,9-11,yes,2,right,right_up,yes 132 | no-recurrence-events,40-49,premeno,40-44,3-5,yes,3,right,left_up,yes 133 | no-recurrence-events,40-49,premeno,30-34,6-8,no,2,left,left_up,no 134 | no-recurrence-events,50-59,ge40,40-44,0-2,no,3,left,right_up,no 135 | no-recurrence-events,60-69,ge40,30-34,0-2,no,2,left,left_low,yes 136 | no-recurrence-events,30-39,premeno,20-24,3-5,no,2,right,central,no 137 | no-recurrence-events,30-39,premeno,40-44,3-5,no,3,right,right_up,yes 138 | no-recurrence-events,40-49,premeno,5-9,0-2,no,1,left,left_low,yes 139 | no-recurrence-events,30-39,premeno,40-44,0-2,no,2,left,left_low,yes 140 | no-recurrence-events,40-49,premeno,30-34,0-2,no,2,left,right_low,no 141 | no-recurrence-events,50-59,ge40,40-44,3-5,yes,2,left,left_low,no 142 | no-recurrence-events,50-59,premeno,20-24,3-5,yes,2,left,left_low,no 143 | no-recurrence-events,60-69,ge40,10-14,0-2,no,1,left,left_up,no 144 | no-recurrence-events,40-49,premeno,45-49,0-2,no,2,left,left_low,yes 145 | no-recurrence-events,60-69,ge40,45-49,6-8,yes,3,left,central,no 146 | no-recurrence-events,40-49,premeno,25-29,0-2,?,2,left,right_low,yes 147 | no-recurrence-events,60-69,ge40,50-54,0-2,no,2,right,left_up,yes 148 | no-recurrence-events,50-59,premeno,30-34,3-5,yes,2,left,left_low,yes 149 | no-recurrence-events,30-39,premeno,20-24,0-2,no,3,left,central,no 150 | no-recurrence-events,50-59,lt40,30-34,0-2,no,3,right,left_up,no 151 | no-recurrence-events,50-59,ge40,25-29,15-17,yes,3,right,left_up,no 152 | no-recurrence-events,60-69,ge40,30-34,3-5,yes,3,left,left_low,no 153 | no-recurrence-events,50-59,ge40,35-39,15-17,no,3,left,left_low,no 154 | no-recurrence-events,60-69,ge40,15-19,0-2,no,3,right,left_up,yes 155 | no-recurrence-events,30-39,lt40,15-19,0-2,no,3,right,left_up,no 156 | no-recurrence-events,60-69,ge40,40-44,3-5,no,2,right,left_up,yes 157 | no-recurrence-events,50-59,ge40,25-29,3-5,yes,3,right,left_up,no 158 | no-recurrence-events,50-59,premeno,30-34,0-2,no,1,left,central,no 159 | no-recurrence-events,50-59,ge40,30-34,0-2,no,1,right,central,no 160 | no-recurrence-events,40-49,premeno,35-39,0-2,no,1,left,left_low,no 161 | no-recurrence-events,40-49,premeno,25-29,0-2,no,3,right,left_up,yes 162 | no-recurrence-events,40-49,premeno,30-34,3-5,yes,2,right,left_low,no 163 | no-recurrence-events,60-69,ge40,10-14,0-2,no,2,right,left_up,yes 164 | no-recurrence-events,60-69,ge40,25-29,3-5,?,1,right,left_up,yes 165 | no-recurrence-events,60-69,ge40,25-29,3-5,?,1,right,left_low,yes 166 | no-recurrence-events,40-49,premeno,20-24,3-5,no,2,right,left_up,no 167 | no-recurrence-events,40-49,premeno,20-24,3-5,no,2,right,left_low,no 168 | no-recurrence-events,40-49,ge40,40-44,15-17,yes,2,right,left_up,yes 169 | no-recurrence-events,50-59,premeno,10-14,0-2,no,2,right,left_up,no 170 | no-recurrence-events,40-49,ge40,30-34,0-2,no,2,left,left_up,yes 171 | no-recurrence-events,30-39,premeno,20-24,3-5,yes,2,right,left_up,yes 172 | no-recurrence-events,30-39,premeno,15-19,0-2,no,1,left,left_low,no 173 | no-recurrence-events,60-69,ge40,30-34,6-8,yes,2,right,right_up,no 174 | no-recurrence-events,50-59,ge40,20-24,3-5,yes,2,right,left_up,no 175 | no-recurrence-events,50-59,premeno,25-29,3-5,yes,2,left,left_low,yes 176 | no-recurrence-events,40-49,premeno,30-34,0-2,no,2,right,right_up,yes 177 | no-recurrence-events,40-49,ge40,25-29,0-2,no,2,left,left_low,no 178 | no-recurrence-events,60-69,ge40,10-14,0-2,no,2,left,left_low,no 179 | no-recurrence-events,50-59,premeno,25-29,3-5,no,2,right,left_up,yes 180 | no-recurrence-events,40-49,premeno,20-24,0-2,no,3,right,left_low,yes 181 | no-recurrence-events,40-49,premeno,35-39,0-2,yes,3,right,left_up,yes 182 | no-recurrence-events,40-49,premeno,35-39,0-2,yes,3,right,left_low,yes 183 | no-recurrence-events,40-49,premeno,25-29,0-2,no,1,right,left_low,yes 184 | no-recurrence-events,50-59,ge40,30-34,9-11,?,3,left,left_up,yes 185 | no-recurrence-events,50-59,ge40,30-34,9-11,?,3,left,left_low,yes 186 | no-recurrence-events,40-49,premeno,20-24,6-8,no,2,right,left_low,yes 187 | no-recurrence-events,50-59,ge40,25-29,0-2,no,1,left,right_low,no 188 | no-recurrence-events,60-69,ge40,15-19,0-2,no,2,left,left_up,yes 189 | no-recurrence-events,40-49,premeno,10-14,0-2,no,2,right,left_up,no 190 | no-recurrence-events,50-59,ge40,20-24,0-2,yes,2,right,left_up,no 191 | no-recurrence-events,40-49,premeno,15-19,12-14,no,3,right,right_low,yes 192 | no-recurrence-events,40-49,premeno,25-29,0-2,no,2,left,left_up,yes 193 | no-recurrence-events,50-59,ge40,30-34,6-8,yes,2,left,left_low,no 194 | no-recurrence-events,30-39,premeno,10-14,0-2,no,2,left,right_low,no 195 | no-recurrence-events,50-59,premeno,50-54,0-2,yes,2,right,left_up,yes 196 | no-recurrence-events,50-59,ge40,35-39,0-2,no,2,left,left_up,no 197 | no-recurrence-events,50-59,premeno,10-14,3-5,no,1,right,left_up,no 198 | no-recurrence-events,40-49,premeno,10-14,0-2,no,2,left,left_low,yes 199 | no-recurrence-events,50-59,ge40,15-19,0-2,yes,2,left,central,yes 200 | no-recurrence-events,50-59,premeno,25-29,0-2,no,1,left,left_low,no 201 | no-recurrence-events,60-69,ge40,25-29,0-2,no,3,right,left_low,no 202 | recurrence-events,50-59,premeno,15-19,0-2,no,2,left,left_low,no 203 | recurrence-events,40-49,premeno,40-44,0-2,no,1,left,left_low,no 204 | recurrence-events,50-59,ge40,35-39,0-2,no,2,left,left_low,no 205 | recurrence-events,50-59,premeno,25-29,0-2,no,2,left,right_up,no 206 | recurrence-events,30-39,premeno,0-4,0-2,no,2,right,central,no 207 | recurrence-events,50-59,ge40,30-34,0-2,no,3,left,?,no 208 | recurrence-events,50-59,premeno,25-29,0-2,no,2,left,right_up,no 209 | recurrence-events,50-59,premeno,30-34,0-2,no,3,left,right_up,no 210 | recurrence-events,40-49,premeno,35-39,0-2,no,1,right,left_up,no 211 | recurrence-events,40-49,premeno,20-24,0-2,no,2,left,left_low,no 212 | recurrence-events,50-59,ge40,20-24,0-2,no,2,right,central,no 213 | recurrence-events,40-49,premeno,30-34,0-2,no,3,right,right_up,no 214 | recurrence-events,50-59,premeno,25-29,0-2,no,1,right,left_up,no 215 | recurrence-events,60-69,ge40,40-44,0-2,no,2,right,left_low,no 216 | recurrence-events,40-49,ge40,20-24,0-2,no,2,right,left_up,no 217 | recurrence-events,50-59,ge40,20-24,0-2,no,2,left,left_up,no 218 | recurrence-events,40-49,premeno,15-19,0-2,no,2,left,left_up,no 219 | recurrence-events,60-69,ge40,30-34,0-2,no,3,right,central,no 220 | recurrence-events,30-39,premeno,15-19,0-2,no,1,right,left_low,no 221 | recurrence-events,40-49,premeno,25-29,0-2,no,3,left,right_up,no 222 | recurrence-events,30-39,premeno,30-34,0-2,no,1,right,left_up,no 223 | recurrence-events,60-69,ge40,25-29,0-2,no,3,left,right_low,yes 224 | recurrence-events,60-69,ge40,20-24,0-2,no,3,right,left_low,no 225 | recurrence-events,30-39,premeno,25-29,3-5,yes,3,left,left_low,yes 226 | recurrence-events,40-49,ge40,20-24,3-5,no,3,right,left_low,yes 227 | recurrence-events,40-49,premeno,30-34,15-17,yes,3,left,left_low,no 228 | recurrence-events,50-59,premeno,30-34,0-2,no,3,right,left_up,yes 229 | recurrence-events,60-69,ge40,40-44,3-5,yes,3,right,left_low,no 230 | recurrence-events,60-69,ge40,45-49,0-2,no,1,right,right_up,yes 231 | recurrence-events,50-59,premeno,50-54,9-11,yes,2,right,left_up,no 232 | recurrence-events,40-49,premeno,30-34,3-5,no,2,right,left_up,no 233 | recurrence-events,30-39,premeno,30-34,3-5,no,3,right,left_up,yes 234 | recurrence-events,70-79,ge40,15-19,9-11,?,1,left,left_low,yes 235 | recurrence-events,60-69,ge40,30-34,0-2,no,3,right,left_up,yes 236 | recurrence-events,50-59,premeno,25-29,3-5,yes,3,left,left_low,yes 237 | recurrence-events,40-49,premeno,25-29,0-2,no,2,right,left_low,no 238 | recurrence-events,40-49,premeno,25-29,0-2,no,2,right,left_low,no 239 | recurrence-events,30-39,premeno,35-39,0-2,no,3,left,left_low,no 240 | recurrence-events,40-49,premeno,20-24,3-5,yes,2,right,right_up,yes 241 | recurrence-events,60-69,ge40,20-24,3-5,no,2,left,left_low,yes 242 | recurrence-events,40-49,premeno,15-19,15-17,yes,3,left,left_low,no 243 | recurrence-events,50-59,ge40,25-29,6-8,no,3,left,left_low,yes 244 | recurrence-events,50-59,ge40,20-24,3-5,yes,3,right,right_up,no 245 | recurrence-events,40-49,premeno,30-34,12-14,yes,3,left,left_up,yes 246 | recurrence-events,30-39,premeno,30-34,9-11,no,2,right,left_up,yes 247 | recurrence-events,30-39,premeno,15-19,6-8,yes,3,left,left_low,yes 248 | recurrence-events,50-59,ge40,30-34,9-11,yes,3,left,right_low,yes 249 | recurrence-events,60-69,ge40,35-39,6-8,yes,3,left,left_low,no 250 | recurrence-events,30-39,premeno,20-24,3-5,yes,2,left,left_low,no 251 | recurrence-events,40-49,premeno,25-29,0-2,no,3,left,left_up,no 252 | recurrence-events,40-49,premeno,50-54,0-2,no,2,right,left_low,yes 253 | recurrence-events,30-39,premeno,40-44,0-2,no,1,left,left_up,no 254 | recurrence-events,60-69,ge40,50-54,0-2,no,3,right,left_up,no 255 | recurrence-events,40-49,premeno,30-34,0-2,yes,3,right,right_up,no 256 | recurrence-events,40-49,premeno,30-34,6-8,yes,3,right,left_up,no 257 | recurrence-events,40-49,premeno,30-34,0-2,no,1,left,left_low,yes 258 | recurrence-events,40-49,premeno,20-24,3-5,yes,2,left,left_low,yes 259 | recurrence-events,50-59,ge40,30-34,6-8,yes,2,left,right_low,yes 260 | recurrence-events,50-59,ge40,30-34,3-5,no,3,right,left_up,no 261 | recurrence-events,60-69,ge40,25-29,3-5,no,2,right,right_up,no 262 | recurrence-events,40-49,ge40,25-29,12-14,yes,3,left,right_low,yes 263 | recurrence-events,60-69,ge40,25-29,0-2,no,3,left,left_up,no 264 | recurrence-events,50-59,lt40,20-24,0-2,?,1,left,left_up,no 265 | recurrence-events,50-59,lt40,20-24,0-2,?,1,left,left_low,no 266 | recurrence-events,30-39,premeno,35-39,9-11,yes,3,left,left_low,no 267 | recurrence-events,40-49,premeno,30-34,3-5,yes,2,left,right_up,no 268 | recurrence-events,60-69,ge40,20-24,24-26,yes,3,left,left_low,yes 269 | recurrence-events,30-39,premeno,35-39,0-2,no,3,left,left_low,no 270 | recurrence-events,40-49,premeno,25-29,0-2,no,2,left,left_low,yes 271 | recurrence-events,50-59,ge40,30-34,6-8,yes,3,left,right_low,no 272 | recurrence-events,50-59,premeno,25-29,0-2,no,3,right,left_low,yes 273 | recurrence-events,40-49,premeno,15-19,0-2,yes,3,right,left_up,no 274 | recurrence-events,60-69,ge40,30-34,0-2,yes,2,right,right_up,yes 275 | recurrence-events,60-69,ge40,30-34,3-5,yes,2,left,central,yes 276 | recurrence-events,40-49,premeno,25-29,9-11,yes,3,right,left_up,no 277 | recurrence-events,30-39,premeno,25-29,6-8,yes,3,left,right_low,yes 278 | recurrence-events,60-69,ge40,10-14,6-8,yes,3,left,left_up,yes 279 | recurrence-events,50-59,premeno,35-39,15-17,yes,3,right,right_up,no 280 | recurrence-events,50-59,ge40,40-44,6-8,yes,3,left,left_low,yes 281 | recurrence-events,50-59,ge40,40-44,6-8,yes,3,left,left_low,yes 282 | recurrence-events,30-39,premeno,30-34,0-2,no,2,left,left_up,no 283 | recurrence-events,30-39,premeno,20-24,0-2,no,3,left,left_up,yes 284 | recurrence-events,60-69,ge40,20-24,0-2,no,1,right,left_up,no 285 | recurrence-events,40-49,ge40,30-34,3-5,no,3,left,left_low,no 286 | recurrence-events,50-59,ge40,30-34,3-5,no,3,left,left_low,no 287 | -------------------------------------------------------------------------------- /uci/diabetes.csv: -------------------------------------------------------------------------------- 1 | Pregnancies,Glucose,BloodPressure,SkinThickness,Insulin,BMI,DiabetesPedigreeFunction,Age,Outcome 2 | 6,148,72,35,0,33.6,0.627,50,1 3 | 1,85,66,29,0,26.6,0.351,31,0 4 | 8,183,64,0,0,23.3,0.672,32,1 5 | 1,89,66,23,94,28.1,0.167,21,0 6 | 0,137,40,35,168,43.1,2.288,33,1 7 | 5,116,74,0,0,25.6,0.201,30,0 8 | 3,78,50,32,88,31,0.248,26,1 9 | 10,115,0,0,0,35.3,0.134,29,0 10 | 2,197,70,45,543,30.5,0.158,53,1 11 | 8,125,96,0,0,0,0.232,54,1 12 | 4,110,92,0,0,37.6,0.191,30,0 13 | 10,168,74,0,0,38,0.537,34,1 14 | 10,139,80,0,0,27.1,1.441,57,0 15 | 1,189,60,23,846,30.1,0.398,59,1 16 | 5,166,72,19,175,25.8,0.587,51,1 17 | 7,100,0,0,0,30,0.484,32,1 18 | 0,118,84,47,230,45.8,0.551,31,1 19 | 7,107,74,0,0,29.6,0.254,31,1 20 | 1,103,30,38,83,43.3,0.183,33,0 21 | 1,115,70,30,96,34.6,0.529,32,1 22 | 3,126,88,41,235,39.3,0.704,27,0 23 | 8,99,84,0,0,35.4,0.388,50,0 24 | 7,196,90,0,0,39.8,0.451,41,1 25 | 9,119,80,35,0,29,0.263,29,1 26 | 11,143,94,33,146,36.6,0.254,51,1 27 | 10,125,70,26,115,31.1,0.205,41,1 28 | 7,147,76,0,0,39.4,0.257,43,1 29 | 1,97,66,15,140,23.2,0.487,22,0 30 | 13,145,82,19,110,22.2,0.245,57,0 31 | 5,117,92,0,0,34.1,0.337,38,0 32 | 5,109,75,26,0,36,0.546,60,0 33 | 3,158,76,36,245,31.6,0.851,28,1 34 | 3,88,58,11,54,24.8,0.267,22,0 35 | 6,92,92,0,0,19.9,0.188,28,0 36 | 10,122,78,31,0,27.6,0.512,45,0 37 | 4,103,60,33,192,24,0.966,33,0 38 | 11,138,76,0,0,33.2,0.42,35,0 39 | 9,102,76,37,0,32.9,0.665,46,1 40 | 2,90,68,42,0,38.2,0.503,27,1 41 | 4,111,72,47,207,37.1,1.39,56,1 42 | 3,180,64,25,70,34,0.271,26,0 43 | 7,133,84,0,0,40.2,0.696,37,0 44 | 7,106,92,18,0,22.7,0.235,48,0 45 | 9,171,110,24,240,45.4,0.721,54,1 46 | 7,159,64,0,0,27.4,0.294,40,0 47 | 0,180,66,39,0,42,1.893,25,1 48 | 1,146,56,0,0,29.7,0.564,29,0 49 | 2,71,70,27,0,28,0.586,22,0 50 | 7,103,66,32,0,39.1,0.344,31,1 51 | 7,105,0,0,0,0,0.305,24,0 52 | 1,103,80,11,82,19.4,0.491,22,0 53 | 1,101,50,15,36,24.2,0.526,26,0 54 | 5,88,66,21,23,24.4,0.342,30,0 55 | 8,176,90,34,300,33.7,0.467,58,1 56 | 7,150,66,42,342,34.7,0.718,42,0 57 | 1,73,50,10,0,23,0.248,21,0 58 | 7,187,68,39,304,37.7,0.254,41,1 59 | 0,100,88,60,110,46.8,0.962,31,0 60 | 0,146,82,0,0,40.5,1.781,44,0 61 | 0,105,64,41,142,41.5,0.173,22,0 62 | 2,84,0,0,0,0,0.304,21,0 63 | 8,133,72,0,0,32.9,0.27,39,1 64 | 5,44,62,0,0,25,0.587,36,0 65 | 2,141,58,34,128,25.4,0.699,24,0 66 | 7,114,66,0,0,32.8,0.258,42,1 67 | 5,99,74,27,0,29,0.203,32,0 68 | 0,109,88,30,0,32.5,0.855,38,1 69 | 2,109,92,0,0,42.7,0.845,54,0 70 | 1,95,66,13,38,19.6,0.334,25,0 71 | 4,146,85,27,100,28.9,0.189,27,0 72 | 2,100,66,20,90,32.9,0.867,28,1 73 | 5,139,64,35,140,28.6,0.411,26,0 74 | 13,126,90,0,0,43.4,0.583,42,1 75 | 4,129,86,20,270,35.1,0.231,23,0 76 | 1,79,75,30,0,32,0.396,22,0 77 | 1,0,48,20,0,24.7,0.14,22,0 78 | 7,62,78,0,0,32.6,0.391,41,0 79 | 5,95,72,33,0,37.7,0.37,27,0 80 | 0,131,0,0,0,43.2,0.27,26,1 81 | 2,112,66,22,0,25,0.307,24,0 82 | 3,113,44,13,0,22.4,0.14,22,0 83 | 2,74,0,0,0,0,0.102,22,0 84 | 7,83,78,26,71,29.3,0.767,36,0 85 | 0,101,65,28,0,24.6,0.237,22,0 86 | 5,137,108,0,0,48.8,0.227,37,1 87 | 2,110,74,29,125,32.4,0.698,27,0 88 | 13,106,72,54,0,36.6,0.178,45,0 89 | 2,100,68,25,71,38.5,0.324,26,0 90 | 15,136,70,32,110,37.1,0.153,43,1 91 | 1,107,68,19,0,26.5,0.165,24,0 92 | 1,80,55,0,0,19.1,0.258,21,0 93 | 4,123,80,15,176,32,0.443,34,0 94 | 7,81,78,40,48,46.7,0.261,42,0 95 | 4,134,72,0,0,23.8,0.277,60,1 96 | 2,142,82,18,64,24.7,0.761,21,0 97 | 6,144,72,27,228,33.9,0.255,40,0 98 | 2,92,62,28,0,31.6,0.13,24,0 99 | 1,71,48,18,76,20.4,0.323,22,0 100 | 6,93,50,30,64,28.7,0.356,23,0 101 | 1,122,90,51,220,49.7,0.325,31,1 102 | 1,163,72,0,0,39,1.222,33,1 103 | 1,151,60,0,0,26.1,0.179,22,0 104 | 0,125,96,0,0,22.5,0.262,21,0 105 | 1,81,72,18,40,26.6,0.283,24,0 106 | 2,85,65,0,0,39.6,0.93,27,0 107 | 1,126,56,29,152,28.7,0.801,21,0 108 | 1,96,122,0,0,22.4,0.207,27,0 109 | 4,144,58,28,140,29.5,0.287,37,0 110 | 3,83,58,31,18,34.3,0.336,25,0 111 | 0,95,85,25,36,37.4,0.247,24,1 112 | 3,171,72,33,135,33.3,0.199,24,1 113 | 8,155,62,26,495,34,0.543,46,1 114 | 1,89,76,34,37,31.2,0.192,23,0 115 | 4,76,62,0,0,34,0.391,25,0 116 | 7,160,54,32,175,30.5,0.588,39,1 117 | 4,146,92,0,0,31.2,0.539,61,1 118 | 5,124,74,0,0,34,0.22,38,1 119 | 5,78,48,0,0,33.7,0.654,25,0 120 | 4,97,60,23,0,28.2,0.443,22,0 121 | 4,99,76,15,51,23.2,0.223,21,0 122 | 0,162,76,56,100,53.2,0.759,25,1 123 | 6,111,64,39,0,34.2,0.26,24,0 124 | 2,107,74,30,100,33.6,0.404,23,0 125 | 5,132,80,0,0,26.8,0.186,69,0 126 | 0,113,76,0,0,33.3,0.278,23,1 127 | 1,88,30,42,99,55,0.496,26,1 128 | 3,120,70,30,135,42.9,0.452,30,0 129 | 1,118,58,36,94,33.3,0.261,23,0 130 | 1,117,88,24,145,34.5,0.403,40,1 131 | 0,105,84,0,0,27.9,0.741,62,1 132 | 4,173,70,14,168,29.7,0.361,33,1 133 | 9,122,56,0,0,33.3,1.114,33,1 134 | 3,170,64,37,225,34.5,0.356,30,1 135 | 8,84,74,31,0,38.3,0.457,39,0 136 | 2,96,68,13,49,21.1,0.647,26,0 137 | 2,125,60,20,140,33.8,0.088,31,0 138 | 0,100,70,26,50,30.8,0.597,21,0 139 | 0,93,60,25,92,28.7,0.532,22,0 140 | 0,129,80,0,0,31.2,0.703,29,0 141 | 5,105,72,29,325,36.9,0.159,28,0 142 | 3,128,78,0,0,21.1,0.268,55,0 143 | 5,106,82,30,0,39.5,0.286,38,0 144 | 2,108,52,26,63,32.5,0.318,22,0 145 | 10,108,66,0,0,32.4,0.272,42,1 146 | 4,154,62,31,284,32.8,0.237,23,0 147 | 0,102,75,23,0,0,0.572,21,0 148 | 9,57,80,37,0,32.8,0.096,41,0 149 | 2,106,64,35,119,30.5,1.4,34,0 150 | 5,147,78,0,0,33.7,0.218,65,0 151 | 2,90,70,17,0,27.3,0.085,22,0 152 | 1,136,74,50,204,37.4,0.399,24,0 153 | 4,114,65,0,0,21.9,0.432,37,0 154 | 9,156,86,28,155,34.3,1.189,42,1 155 | 1,153,82,42,485,40.6,0.687,23,0 156 | 8,188,78,0,0,47.9,0.137,43,1 157 | 7,152,88,44,0,50,0.337,36,1 158 | 2,99,52,15,94,24.6,0.637,21,0 159 | 1,109,56,21,135,25.2,0.833,23,0 160 | 2,88,74,19,53,29,0.229,22,0 161 | 17,163,72,41,114,40.9,0.817,47,1 162 | 4,151,90,38,0,29.7,0.294,36,0 163 | 7,102,74,40,105,37.2,0.204,45,0 164 | 0,114,80,34,285,44.2,0.167,27,0 165 | 2,100,64,23,0,29.7,0.368,21,0 166 | 0,131,88,0,0,31.6,0.743,32,1 167 | 6,104,74,18,156,29.9,0.722,41,1 168 | 3,148,66,25,0,32.5,0.256,22,0 169 | 4,120,68,0,0,29.6,0.709,34,0 170 | 4,110,66,0,0,31.9,0.471,29,0 171 | 3,111,90,12,78,28.4,0.495,29,0 172 | 6,102,82,0,0,30.8,0.18,36,1 173 | 6,134,70,23,130,35.4,0.542,29,1 174 | 2,87,0,23,0,28.9,0.773,25,0 175 | 1,79,60,42,48,43.5,0.678,23,0 176 | 2,75,64,24,55,29.7,0.37,33,0 177 | 8,179,72,42,130,32.7,0.719,36,1 178 | 6,85,78,0,0,31.2,0.382,42,0 179 | 0,129,110,46,130,67.1,0.319,26,1 180 | 5,143,78,0,0,45,0.19,47,0 181 | 5,130,82,0,0,39.1,0.956,37,1 182 | 6,87,80,0,0,23.2,0.084,32,0 183 | 0,119,64,18,92,34.9,0.725,23,0 184 | 1,0,74,20,23,27.7,0.299,21,0 185 | 5,73,60,0,0,26.8,0.268,27,0 186 | 4,141,74,0,0,27.6,0.244,40,0 187 | 7,194,68,28,0,35.9,0.745,41,1 188 | 8,181,68,36,495,30.1,0.615,60,1 189 | 1,128,98,41,58,32,1.321,33,1 190 | 8,109,76,39,114,27.9,0.64,31,1 191 | 5,139,80,35,160,31.6,0.361,25,1 192 | 3,111,62,0,0,22.6,0.142,21,0 193 | 9,123,70,44,94,33.1,0.374,40,0 194 | 7,159,66,0,0,30.4,0.383,36,1 195 | 11,135,0,0,0,52.3,0.578,40,1 196 | 8,85,55,20,0,24.4,0.136,42,0 197 | 5,158,84,41,210,39.4,0.395,29,1 198 | 1,105,58,0,0,24.3,0.187,21,0 199 | 3,107,62,13,48,22.9,0.678,23,1 200 | 4,109,64,44,99,34.8,0.905,26,1 201 | 4,148,60,27,318,30.9,0.15,29,1 202 | 0,113,80,16,0,31,0.874,21,0 203 | 1,138,82,0,0,40.1,0.236,28,0 204 | 0,108,68,20,0,27.3,0.787,32,0 205 | 2,99,70,16,44,20.4,0.235,27,0 206 | 6,103,72,32,190,37.7,0.324,55,0 207 | 5,111,72,28,0,23.9,0.407,27,0 208 | 8,196,76,29,280,37.5,0.605,57,1 209 | 5,162,104,0,0,37.7,0.151,52,1 210 | 1,96,64,27,87,33.2,0.289,21,0 211 | 7,184,84,33,0,35.5,0.355,41,1 212 | 2,81,60,22,0,27.7,0.29,25,0 213 | 0,147,85,54,0,42.8,0.375,24,0 214 | 7,179,95,31,0,34.2,0.164,60,0 215 | 0,140,65,26,130,42.6,0.431,24,1 216 | 9,112,82,32,175,34.2,0.26,36,1 217 | 12,151,70,40,271,41.8,0.742,38,1 218 | 5,109,62,41,129,35.8,0.514,25,1 219 | 6,125,68,30,120,30,0.464,32,0 220 | 5,85,74,22,0,29,1.224,32,1 221 | 5,112,66,0,0,37.8,0.261,41,1 222 | 0,177,60,29,478,34.6,1.072,21,1 223 | 2,158,90,0,0,31.6,0.805,66,1 224 | 7,119,0,0,0,25.2,0.209,37,0 225 | 7,142,60,33,190,28.8,0.687,61,0 226 | 1,100,66,15,56,23.6,0.666,26,0 227 | 1,87,78,27,32,34.6,0.101,22,0 228 | 0,101,76,0,0,35.7,0.198,26,0 229 | 3,162,52,38,0,37.2,0.652,24,1 230 | 4,197,70,39,744,36.7,2.329,31,0 231 | 0,117,80,31,53,45.2,0.089,24,0 232 | 4,142,86,0,0,44,0.645,22,1 233 | 6,134,80,37,370,46.2,0.238,46,1 234 | 1,79,80,25,37,25.4,0.583,22,0 235 | 4,122,68,0,0,35,0.394,29,0 236 | 3,74,68,28,45,29.7,0.293,23,0 237 | 4,171,72,0,0,43.6,0.479,26,1 238 | 7,181,84,21,192,35.9,0.586,51,1 239 | 0,179,90,27,0,44.1,0.686,23,1 240 | 9,164,84,21,0,30.8,0.831,32,1 241 | 0,104,76,0,0,18.4,0.582,27,0 242 | 1,91,64,24,0,29.2,0.192,21,0 243 | 4,91,70,32,88,33.1,0.446,22,0 244 | 3,139,54,0,0,25.6,0.402,22,1 245 | 6,119,50,22,176,27.1,1.318,33,1 246 | 2,146,76,35,194,38.2,0.329,29,0 247 | 9,184,85,15,0,30,1.213,49,1 248 | 10,122,68,0,0,31.2,0.258,41,0 249 | 0,165,90,33,680,52.3,0.427,23,0 250 | 9,124,70,33,402,35.4,0.282,34,0 251 | 1,111,86,19,0,30.1,0.143,23,0 252 | 9,106,52,0,0,31.2,0.38,42,0 253 | 2,129,84,0,0,28,0.284,27,0 254 | 2,90,80,14,55,24.4,0.249,24,0 255 | 0,86,68,32,0,35.8,0.238,25,0 256 | 12,92,62,7,258,27.6,0.926,44,1 257 | 1,113,64,35,0,33.6,0.543,21,1 258 | 3,111,56,39,0,30.1,0.557,30,0 259 | 2,114,68,22,0,28.7,0.092,25,0 260 | 1,193,50,16,375,25.9,0.655,24,0 261 | 11,155,76,28,150,33.3,1.353,51,1 262 | 3,191,68,15,130,30.9,0.299,34,0 263 | 3,141,0,0,0,30,0.761,27,1 264 | 4,95,70,32,0,32.1,0.612,24,0 265 | 3,142,80,15,0,32.4,0.2,63,0 266 | 4,123,62,0,0,32,0.226,35,1 267 | 5,96,74,18,67,33.6,0.997,43,0 268 | 0,138,0,0,0,36.3,0.933,25,1 269 | 2,128,64,42,0,40,1.101,24,0 270 | 0,102,52,0,0,25.1,0.078,21,0 271 | 2,146,0,0,0,27.5,0.24,28,1 272 | 10,101,86,37,0,45.6,1.136,38,1 273 | 2,108,62,32,56,25.2,0.128,21,0 274 | 3,122,78,0,0,23,0.254,40,0 275 | 1,71,78,50,45,33.2,0.422,21,0 276 | 13,106,70,0,0,34.2,0.251,52,0 277 | 2,100,70,52,57,40.5,0.677,25,0 278 | 7,106,60,24,0,26.5,0.296,29,1 279 | 0,104,64,23,116,27.8,0.454,23,0 280 | 5,114,74,0,0,24.9,0.744,57,0 281 | 2,108,62,10,278,25.3,0.881,22,0 282 | 0,146,70,0,0,37.9,0.334,28,1 283 | 10,129,76,28,122,35.9,0.28,39,0 284 | 7,133,88,15,155,32.4,0.262,37,0 285 | 7,161,86,0,0,30.4,0.165,47,1 286 | 2,108,80,0,0,27,0.259,52,1 287 | 7,136,74,26,135,26,0.647,51,0 288 | 5,155,84,44,545,38.7,0.619,34,0 289 | 1,119,86,39,220,45.6,0.808,29,1 290 | 4,96,56,17,49,20.8,0.34,26,0 291 | 5,108,72,43,75,36.1,0.263,33,0 292 | 0,78,88,29,40,36.9,0.434,21,0 293 | 0,107,62,30,74,36.6,0.757,25,1 294 | 2,128,78,37,182,43.3,1.224,31,1 295 | 1,128,48,45,194,40.5,0.613,24,1 296 | 0,161,50,0,0,21.9,0.254,65,0 297 | 6,151,62,31,120,35.5,0.692,28,0 298 | 2,146,70,38,360,28,0.337,29,1 299 | 0,126,84,29,215,30.7,0.52,24,0 300 | 14,100,78,25,184,36.6,0.412,46,1 301 | 8,112,72,0,0,23.6,0.84,58,0 302 | 0,167,0,0,0,32.3,0.839,30,1 303 | 2,144,58,33,135,31.6,0.422,25,1 304 | 5,77,82,41,42,35.8,0.156,35,0 305 | 5,115,98,0,0,52.9,0.209,28,1 306 | 3,150,76,0,0,21,0.207,37,0 307 | 2,120,76,37,105,39.7,0.215,29,0 308 | 10,161,68,23,132,25.5,0.326,47,1 309 | 0,137,68,14,148,24.8,0.143,21,0 310 | 0,128,68,19,180,30.5,1.391,25,1 311 | 2,124,68,28,205,32.9,0.875,30,1 312 | 6,80,66,30,0,26.2,0.313,41,0 313 | 0,106,70,37,148,39.4,0.605,22,0 314 | 2,155,74,17,96,26.6,0.433,27,1 315 | 3,113,50,10,85,29.5,0.626,25,0 316 | 7,109,80,31,0,35.9,1.127,43,1 317 | 2,112,68,22,94,34.1,0.315,26,0 318 | 3,99,80,11,64,19.3,0.284,30,0 319 | 3,182,74,0,0,30.5,0.345,29,1 320 | 3,115,66,39,140,38.1,0.15,28,0 321 | 6,194,78,0,0,23.5,0.129,59,1 322 | 4,129,60,12,231,27.5,0.527,31,0 323 | 3,112,74,30,0,31.6,0.197,25,1 324 | 0,124,70,20,0,27.4,0.254,36,1 325 | 13,152,90,33,29,26.8,0.731,43,1 326 | 2,112,75,32,0,35.7,0.148,21,0 327 | 1,157,72,21,168,25.6,0.123,24,0 328 | 1,122,64,32,156,35.1,0.692,30,1 329 | 10,179,70,0,0,35.1,0.2,37,0 330 | 2,102,86,36,120,45.5,0.127,23,1 331 | 6,105,70,32,68,30.8,0.122,37,0 332 | 8,118,72,19,0,23.1,1.476,46,0 333 | 2,87,58,16,52,32.7,0.166,25,0 334 | 1,180,0,0,0,43.3,0.282,41,1 335 | 12,106,80,0,0,23.6,0.137,44,0 336 | 1,95,60,18,58,23.9,0.26,22,0 337 | 0,165,76,43,255,47.9,0.259,26,0 338 | 0,117,0,0,0,33.8,0.932,44,0 339 | 5,115,76,0,0,31.2,0.343,44,1 340 | 9,152,78,34,171,34.2,0.893,33,1 341 | 7,178,84,0,0,39.9,0.331,41,1 342 | 1,130,70,13,105,25.9,0.472,22,0 343 | 1,95,74,21,73,25.9,0.673,36,0 344 | 1,0,68,35,0,32,0.389,22,0 345 | 5,122,86,0,0,34.7,0.29,33,0 346 | 8,95,72,0,0,36.8,0.485,57,0 347 | 8,126,88,36,108,38.5,0.349,49,0 348 | 1,139,46,19,83,28.7,0.654,22,0 349 | 3,116,0,0,0,23.5,0.187,23,0 350 | 3,99,62,19,74,21.8,0.279,26,0 351 | 5,0,80,32,0,41,0.346,37,1 352 | 4,92,80,0,0,42.2,0.237,29,0 353 | 4,137,84,0,0,31.2,0.252,30,0 354 | 3,61,82,28,0,34.4,0.243,46,0 355 | 1,90,62,12,43,27.2,0.58,24,0 356 | 3,90,78,0,0,42.7,0.559,21,0 357 | 9,165,88,0,0,30.4,0.302,49,1 358 | 1,125,50,40,167,33.3,0.962,28,1 359 | 13,129,0,30,0,39.9,0.569,44,1 360 | 12,88,74,40,54,35.3,0.378,48,0 361 | 1,196,76,36,249,36.5,0.875,29,1 362 | 5,189,64,33,325,31.2,0.583,29,1 363 | 5,158,70,0,0,29.8,0.207,63,0 364 | 5,103,108,37,0,39.2,0.305,65,0 365 | 4,146,78,0,0,38.5,0.52,67,1 366 | 4,147,74,25,293,34.9,0.385,30,0 367 | 5,99,54,28,83,34,0.499,30,0 368 | 6,124,72,0,0,27.6,0.368,29,1 369 | 0,101,64,17,0,21,0.252,21,0 370 | 3,81,86,16,66,27.5,0.306,22,0 371 | 1,133,102,28,140,32.8,0.234,45,1 372 | 3,173,82,48,465,38.4,2.137,25,1 373 | 0,118,64,23,89,0,1.731,21,0 374 | 0,84,64,22,66,35.8,0.545,21,0 375 | 2,105,58,40,94,34.9,0.225,25,0 376 | 2,122,52,43,158,36.2,0.816,28,0 377 | 12,140,82,43,325,39.2,0.528,58,1 378 | 0,98,82,15,84,25.2,0.299,22,0 379 | 1,87,60,37,75,37.2,0.509,22,0 380 | 4,156,75,0,0,48.3,0.238,32,1 381 | 0,93,100,39,72,43.4,1.021,35,0 382 | 1,107,72,30,82,30.8,0.821,24,0 383 | 0,105,68,22,0,20,0.236,22,0 384 | 1,109,60,8,182,25.4,0.947,21,0 385 | 1,90,62,18,59,25.1,1.268,25,0 386 | 1,125,70,24,110,24.3,0.221,25,0 387 | 1,119,54,13,50,22.3,0.205,24,0 388 | 5,116,74,29,0,32.3,0.66,35,1 389 | 8,105,100,36,0,43.3,0.239,45,1 390 | 5,144,82,26,285,32,0.452,58,1 391 | 3,100,68,23,81,31.6,0.949,28,0 392 | 1,100,66,29,196,32,0.444,42,0 393 | 5,166,76,0,0,45.7,0.34,27,1 394 | 1,131,64,14,415,23.7,0.389,21,0 395 | 4,116,72,12,87,22.1,0.463,37,0 396 | 4,158,78,0,0,32.9,0.803,31,1 397 | 2,127,58,24,275,27.7,1.6,25,0 398 | 3,96,56,34,115,24.7,0.944,39,0 399 | 0,131,66,40,0,34.3,0.196,22,1 400 | 3,82,70,0,0,21.1,0.389,25,0 401 | 3,193,70,31,0,34.9,0.241,25,1 402 | 4,95,64,0,0,32,0.161,31,1 403 | 6,137,61,0,0,24.2,0.151,55,0 404 | 5,136,84,41,88,35,0.286,35,1 405 | 9,72,78,25,0,31.6,0.28,38,0 406 | 5,168,64,0,0,32.9,0.135,41,1 407 | 2,123,48,32,165,42.1,0.52,26,0 408 | 4,115,72,0,0,28.9,0.376,46,1 409 | 0,101,62,0,0,21.9,0.336,25,0 410 | 8,197,74,0,0,25.9,1.191,39,1 411 | 1,172,68,49,579,42.4,0.702,28,1 412 | 6,102,90,39,0,35.7,0.674,28,0 413 | 1,112,72,30,176,34.4,0.528,25,0 414 | 1,143,84,23,310,42.4,1.076,22,0 415 | 1,143,74,22,61,26.2,0.256,21,0 416 | 0,138,60,35,167,34.6,0.534,21,1 417 | 3,173,84,33,474,35.7,0.258,22,1 418 | 1,97,68,21,0,27.2,1.095,22,0 419 | 4,144,82,32,0,38.5,0.554,37,1 420 | 1,83,68,0,0,18.2,0.624,27,0 421 | 3,129,64,29,115,26.4,0.219,28,1 422 | 1,119,88,41,170,45.3,0.507,26,0 423 | 2,94,68,18,76,26,0.561,21,0 424 | 0,102,64,46,78,40.6,0.496,21,0 425 | 2,115,64,22,0,30.8,0.421,21,0 426 | 8,151,78,32,210,42.9,0.516,36,1 427 | 4,184,78,39,277,37,0.264,31,1 428 | 0,94,0,0,0,0,0.256,25,0 429 | 1,181,64,30,180,34.1,0.328,38,1 430 | 0,135,94,46,145,40.6,0.284,26,0 431 | 1,95,82,25,180,35,0.233,43,1 432 | 2,99,0,0,0,22.2,0.108,23,0 433 | 3,89,74,16,85,30.4,0.551,38,0 434 | 1,80,74,11,60,30,0.527,22,0 435 | 2,139,75,0,0,25.6,0.167,29,0 436 | 1,90,68,8,0,24.5,1.138,36,0 437 | 0,141,0,0,0,42.4,0.205,29,1 438 | 12,140,85,33,0,37.4,0.244,41,0 439 | 5,147,75,0,0,29.9,0.434,28,0 440 | 1,97,70,15,0,18.2,0.147,21,0 441 | 6,107,88,0,0,36.8,0.727,31,0 442 | 0,189,104,25,0,34.3,0.435,41,1 443 | 2,83,66,23,50,32.2,0.497,22,0 444 | 4,117,64,27,120,33.2,0.23,24,0 445 | 8,108,70,0,0,30.5,0.955,33,1 446 | 4,117,62,12,0,29.7,0.38,30,1 447 | 0,180,78,63,14,59.4,2.42,25,1 448 | 1,100,72,12,70,25.3,0.658,28,0 449 | 0,95,80,45,92,36.5,0.33,26,0 450 | 0,104,64,37,64,33.6,0.51,22,1 451 | 0,120,74,18,63,30.5,0.285,26,0 452 | 1,82,64,13,95,21.2,0.415,23,0 453 | 2,134,70,0,0,28.9,0.542,23,1 454 | 0,91,68,32,210,39.9,0.381,25,0 455 | 2,119,0,0,0,19.6,0.832,72,0 456 | 2,100,54,28,105,37.8,0.498,24,0 457 | 14,175,62,30,0,33.6,0.212,38,1 458 | 1,135,54,0,0,26.7,0.687,62,0 459 | 5,86,68,28,71,30.2,0.364,24,0 460 | 10,148,84,48,237,37.6,1.001,51,1 461 | 9,134,74,33,60,25.9,0.46,81,0 462 | 9,120,72,22,56,20.8,0.733,48,0 463 | 1,71,62,0,0,21.8,0.416,26,0 464 | 8,74,70,40,49,35.3,0.705,39,0 465 | 5,88,78,30,0,27.6,0.258,37,0 466 | 10,115,98,0,0,24,1.022,34,0 467 | 0,124,56,13,105,21.8,0.452,21,0 468 | 0,74,52,10,36,27.8,0.269,22,0 469 | 0,97,64,36,100,36.8,0.6,25,0 470 | 8,120,0,0,0,30,0.183,38,1 471 | 6,154,78,41,140,46.1,0.571,27,0 472 | 1,144,82,40,0,41.3,0.607,28,0 473 | 0,137,70,38,0,33.2,0.17,22,0 474 | 0,119,66,27,0,38.8,0.259,22,0 475 | 7,136,90,0,0,29.9,0.21,50,0 476 | 4,114,64,0,0,28.9,0.126,24,0 477 | 0,137,84,27,0,27.3,0.231,59,0 478 | 2,105,80,45,191,33.7,0.711,29,1 479 | 7,114,76,17,110,23.8,0.466,31,0 480 | 8,126,74,38,75,25.9,0.162,39,0 481 | 4,132,86,31,0,28,0.419,63,0 482 | 3,158,70,30,328,35.5,0.344,35,1 483 | 0,123,88,37,0,35.2,0.197,29,0 484 | 4,85,58,22,49,27.8,0.306,28,0 485 | 0,84,82,31,125,38.2,0.233,23,0 486 | 0,145,0,0,0,44.2,0.63,31,1 487 | 0,135,68,42,250,42.3,0.365,24,1 488 | 1,139,62,41,480,40.7,0.536,21,0 489 | 0,173,78,32,265,46.5,1.159,58,0 490 | 4,99,72,17,0,25.6,0.294,28,0 491 | 8,194,80,0,0,26.1,0.551,67,0 492 | 2,83,65,28,66,36.8,0.629,24,0 493 | 2,89,90,30,0,33.5,0.292,42,0 494 | 4,99,68,38,0,32.8,0.145,33,0 495 | 4,125,70,18,122,28.9,1.144,45,1 496 | 3,80,0,0,0,0,0.174,22,0 497 | 6,166,74,0,0,26.6,0.304,66,0 498 | 5,110,68,0,0,26,0.292,30,0 499 | 2,81,72,15,76,30.1,0.547,25,0 500 | 7,195,70,33,145,25.1,0.163,55,1 501 | 6,154,74,32,193,29.3,0.839,39,0 502 | 2,117,90,19,71,25.2,0.313,21,0 503 | 3,84,72,32,0,37.2,0.267,28,0 504 | 6,0,68,41,0,39,0.727,41,1 505 | 7,94,64,25,79,33.3,0.738,41,0 506 | 3,96,78,39,0,37.3,0.238,40,0 507 | 10,75,82,0,0,33.3,0.263,38,0 508 | 0,180,90,26,90,36.5,0.314,35,1 509 | 1,130,60,23,170,28.6,0.692,21,0 510 | 2,84,50,23,76,30.4,0.968,21,0 511 | 8,120,78,0,0,25,0.409,64,0 512 | 12,84,72,31,0,29.7,0.297,46,1 513 | 0,139,62,17,210,22.1,0.207,21,0 514 | 9,91,68,0,0,24.2,0.2,58,0 515 | 2,91,62,0,0,27.3,0.525,22,0 516 | 3,99,54,19,86,25.6,0.154,24,0 517 | 3,163,70,18,105,31.6,0.268,28,1 518 | 9,145,88,34,165,30.3,0.771,53,1 519 | 7,125,86,0,0,37.6,0.304,51,0 520 | 13,76,60,0,0,32.8,0.18,41,0 521 | 6,129,90,7,326,19.6,0.582,60,0 522 | 2,68,70,32,66,25,0.187,25,0 523 | 3,124,80,33,130,33.2,0.305,26,0 524 | 6,114,0,0,0,0,0.189,26,0 525 | 9,130,70,0,0,34.2,0.652,45,1 526 | 3,125,58,0,0,31.6,0.151,24,0 527 | 3,87,60,18,0,21.8,0.444,21,0 528 | 1,97,64,19,82,18.2,0.299,21,0 529 | 3,116,74,15,105,26.3,0.107,24,0 530 | 0,117,66,31,188,30.8,0.493,22,0 531 | 0,111,65,0,0,24.6,0.66,31,0 532 | 2,122,60,18,106,29.8,0.717,22,0 533 | 0,107,76,0,0,45.3,0.686,24,0 534 | 1,86,66,52,65,41.3,0.917,29,0 535 | 6,91,0,0,0,29.8,0.501,31,0 536 | 1,77,56,30,56,33.3,1.251,24,0 537 | 4,132,0,0,0,32.9,0.302,23,1 538 | 0,105,90,0,0,29.6,0.197,46,0 539 | 0,57,60,0,0,21.7,0.735,67,0 540 | 0,127,80,37,210,36.3,0.804,23,0 541 | 3,129,92,49,155,36.4,0.968,32,1 542 | 8,100,74,40,215,39.4,0.661,43,1 543 | 3,128,72,25,190,32.4,0.549,27,1 544 | 10,90,85,32,0,34.9,0.825,56,1 545 | 4,84,90,23,56,39.5,0.159,25,0 546 | 1,88,78,29,76,32,0.365,29,0 547 | 8,186,90,35,225,34.5,0.423,37,1 548 | 5,187,76,27,207,43.6,1.034,53,1 549 | 4,131,68,21,166,33.1,0.16,28,0 550 | 1,164,82,43,67,32.8,0.341,50,0 551 | 4,189,110,31,0,28.5,0.68,37,0 552 | 1,116,70,28,0,27.4,0.204,21,0 553 | 3,84,68,30,106,31.9,0.591,25,0 554 | 6,114,88,0,0,27.8,0.247,66,0 555 | 1,88,62,24,44,29.9,0.422,23,0 556 | 1,84,64,23,115,36.9,0.471,28,0 557 | 7,124,70,33,215,25.5,0.161,37,0 558 | 1,97,70,40,0,38.1,0.218,30,0 559 | 8,110,76,0,0,27.8,0.237,58,0 560 | 11,103,68,40,0,46.2,0.126,42,0 561 | 11,85,74,0,0,30.1,0.3,35,0 562 | 6,125,76,0,0,33.8,0.121,54,1 563 | 0,198,66,32,274,41.3,0.502,28,1 564 | 1,87,68,34,77,37.6,0.401,24,0 565 | 6,99,60,19,54,26.9,0.497,32,0 566 | 0,91,80,0,0,32.4,0.601,27,0 567 | 2,95,54,14,88,26.1,0.748,22,0 568 | 1,99,72,30,18,38.6,0.412,21,0 569 | 6,92,62,32,126,32,0.085,46,0 570 | 4,154,72,29,126,31.3,0.338,37,0 571 | 0,121,66,30,165,34.3,0.203,33,1 572 | 3,78,70,0,0,32.5,0.27,39,0 573 | 2,130,96,0,0,22.6,0.268,21,0 574 | 3,111,58,31,44,29.5,0.43,22,0 575 | 2,98,60,17,120,34.7,0.198,22,0 576 | 1,143,86,30,330,30.1,0.892,23,0 577 | 1,119,44,47,63,35.5,0.28,25,0 578 | 6,108,44,20,130,24,0.813,35,0 579 | 2,118,80,0,0,42.9,0.693,21,1 580 | 10,133,68,0,0,27,0.245,36,0 581 | 2,197,70,99,0,34.7,0.575,62,1 582 | 0,151,90,46,0,42.1,0.371,21,1 583 | 6,109,60,27,0,25,0.206,27,0 584 | 12,121,78,17,0,26.5,0.259,62,0 585 | 8,100,76,0,0,38.7,0.19,42,0 586 | 8,124,76,24,600,28.7,0.687,52,1 587 | 1,93,56,11,0,22.5,0.417,22,0 588 | 8,143,66,0,0,34.9,0.129,41,1 589 | 6,103,66,0,0,24.3,0.249,29,0 590 | 3,176,86,27,156,33.3,1.154,52,1 591 | 0,73,0,0,0,21.1,0.342,25,0 592 | 11,111,84,40,0,46.8,0.925,45,1 593 | 2,112,78,50,140,39.4,0.175,24,0 594 | 3,132,80,0,0,34.4,0.402,44,1 595 | 2,82,52,22,115,28.5,1.699,25,0 596 | 6,123,72,45,230,33.6,0.733,34,0 597 | 0,188,82,14,185,32,0.682,22,1 598 | 0,67,76,0,0,45.3,0.194,46,0 599 | 1,89,24,19,25,27.8,0.559,21,0 600 | 1,173,74,0,0,36.8,0.088,38,1 601 | 1,109,38,18,120,23.1,0.407,26,0 602 | 1,108,88,19,0,27.1,0.4,24,0 603 | 6,96,0,0,0,23.7,0.19,28,0 604 | 1,124,74,36,0,27.8,0.1,30,0 605 | 7,150,78,29,126,35.2,0.692,54,1 606 | 4,183,0,0,0,28.4,0.212,36,1 607 | 1,124,60,32,0,35.8,0.514,21,0 608 | 1,181,78,42,293,40,1.258,22,1 609 | 1,92,62,25,41,19.5,0.482,25,0 610 | 0,152,82,39,272,41.5,0.27,27,0 611 | 1,111,62,13,182,24,0.138,23,0 612 | 3,106,54,21,158,30.9,0.292,24,0 613 | 3,174,58,22,194,32.9,0.593,36,1 614 | 7,168,88,42,321,38.2,0.787,40,1 615 | 6,105,80,28,0,32.5,0.878,26,0 616 | 11,138,74,26,144,36.1,0.557,50,1 617 | 3,106,72,0,0,25.8,0.207,27,0 618 | 6,117,96,0,0,28.7,0.157,30,0 619 | 2,68,62,13,15,20.1,0.257,23,0 620 | 9,112,82,24,0,28.2,1.282,50,1 621 | 0,119,0,0,0,32.4,0.141,24,1 622 | 2,112,86,42,160,38.4,0.246,28,0 623 | 2,92,76,20,0,24.2,1.698,28,0 624 | 6,183,94,0,0,40.8,1.461,45,0 625 | 0,94,70,27,115,43.5,0.347,21,0 626 | 2,108,64,0,0,30.8,0.158,21,0 627 | 4,90,88,47,54,37.7,0.362,29,0 628 | 0,125,68,0,0,24.7,0.206,21,0 629 | 0,132,78,0,0,32.4,0.393,21,0 630 | 5,128,80,0,0,34.6,0.144,45,0 631 | 4,94,65,22,0,24.7,0.148,21,0 632 | 7,114,64,0,0,27.4,0.732,34,1 633 | 0,102,78,40,90,34.5,0.238,24,0 634 | 2,111,60,0,0,26.2,0.343,23,0 635 | 1,128,82,17,183,27.5,0.115,22,0 636 | 10,92,62,0,0,25.9,0.167,31,0 637 | 13,104,72,0,0,31.2,0.465,38,1 638 | 5,104,74,0,0,28.8,0.153,48,0 639 | 2,94,76,18,66,31.6,0.649,23,0 640 | 7,97,76,32,91,40.9,0.871,32,1 641 | 1,100,74,12,46,19.5,0.149,28,0 642 | 0,102,86,17,105,29.3,0.695,27,0 643 | 4,128,70,0,0,34.3,0.303,24,0 644 | 6,147,80,0,0,29.5,0.178,50,1 645 | 4,90,0,0,0,28,0.61,31,0 646 | 3,103,72,30,152,27.6,0.73,27,0 647 | 2,157,74,35,440,39.4,0.134,30,0 648 | 1,167,74,17,144,23.4,0.447,33,1 649 | 0,179,50,36,159,37.8,0.455,22,1 650 | 11,136,84,35,130,28.3,0.26,42,1 651 | 0,107,60,25,0,26.4,0.133,23,0 652 | 1,91,54,25,100,25.2,0.234,23,0 653 | 1,117,60,23,106,33.8,0.466,27,0 654 | 5,123,74,40,77,34.1,0.269,28,0 655 | 2,120,54,0,0,26.8,0.455,27,0 656 | 1,106,70,28,135,34.2,0.142,22,0 657 | 2,155,52,27,540,38.7,0.24,25,1 658 | 2,101,58,35,90,21.8,0.155,22,0 659 | 1,120,80,48,200,38.9,1.162,41,0 660 | 11,127,106,0,0,39,0.19,51,0 661 | 3,80,82,31,70,34.2,1.292,27,1 662 | 10,162,84,0,0,27.7,0.182,54,0 663 | 1,199,76,43,0,42.9,1.394,22,1 664 | 8,167,106,46,231,37.6,0.165,43,1 665 | 9,145,80,46,130,37.9,0.637,40,1 666 | 6,115,60,39,0,33.7,0.245,40,1 667 | 1,112,80,45,132,34.8,0.217,24,0 668 | 4,145,82,18,0,32.5,0.235,70,1 669 | 10,111,70,27,0,27.5,0.141,40,1 670 | 6,98,58,33,190,34,0.43,43,0 671 | 9,154,78,30,100,30.9,0.164,45,0 672 | 6,165,68,26,168,33.6,0.631,49,0 673 | 1,99,58,10,0,25.4,0.551,21,0 674 | 10,68,106,23,49,35.5,0.285,47,0 675 | 3,123,100,35,240,57.3,0.88,22,0 676 | 8,91,82,0,0,35.6,0.587,68,0 677 | 6,195,70,0,0,30.9,0.328,31,1 678 | 9,156,86,0,0,24.8,0.23,53,1 679 | 0,93,60,0,0,35.3,0.263,25,0 680 | 3,121,52,0,0,36,0.127,25,1 681 | 2,101,58,17,265,24.2,0.614,23,0 682 | 2,56,56,28,45,24.2,0.332,22,0 683 | 0,162,76,36,0,49.6,0.364,26,1 684 | 0,95,64,39,105,44.6,0.366,22,0 685 | 4,125,80,0,0,32.3,0.536,27,1 686 | 5,136,82,0,0,0,0.64,69,0 687 | 2,129,74,26,205,33.2,0.591,25,0 688 | 3,130,64,0,0,23.1,0.314,22,0 689 | 1,107,50,19,0,28.3,0.181,29,0 690 | 1,140,74,26,180,24.1,0.828,23,0 691 | 1,144,82,46,180,46.1,0.335,46,1 692 | 8,107,80,0,0,24.6,0.856,34,0 693 | 13,158,114,0,0,42.3,0.257,44,1 694 | 2,121,70,32,95,39.1,0.886,23,0 695 | 7,129,68,49,125,38.5,0.439,43,1 696 | 2,90,60,0,0,23.5,0.191,25,0 697 | 7,142,90,24,480,30.4,0.128,43,1 698 | 3,169,74,19,125,29.9,0.268,31,1 699 | 0,99,0,0,0,25,0.253,22,0 700 | 4,127,88,11,155,34.5,0.598,28,0 701 | 4,118,70,0,0,44.5,0.904,26,0 702 | 2,122,76,27,200,35.9,0.483,26,0 703 | 6,125,78,31,0,27.6,0.565,49,1 704 | 1,168,88,29,0,35,0.905,52,1 705 | 2,129,0,0,0,38.5,0.304,41,0 706 | 4,110,76,20,100,28.4,0.118,27,0 707 | 6,80,80,36,0,39.8,0.177,28,0 708 | 10,115,0,0,0,0,0.261,30,1 709 | 2,127,46,21,335,34.4,0.176,22,0 710 | 9,164,78,0,0,32.8,0.148,45,1 711 | 2,93,64,32,160,38,0.674,23,1 712 | 3,158,64,13,387,31.2,0.295,24,0 713 | 5,126,78,27,22,29.6,0.439,40,0 714 | 10,129,62,36,0,41.2,0.441,38,1 715 | 0,134,58,20,291,26.4,0.352,21,0 716 | 3,102,74,0,0,29.5,0.121,32,0 717 | 7,187,50,33,392,33.9,0.826,34,1 718 | 3,173,78,39,185,33.8,0.97,31,1 719 | 10,94,72,18,0,23.1,0.595,56,0 720 | 1,108,60,46,178,35.5,0.415,24,0 721 | 5,97,76,27,0,35.6,0.378,52,1 722 | 4,83,86,19,0,29.3,0.317,34,0 723 | 1,114,66,36,200,38.1,0.289,21,0 724 | 1,149,68,29,127,29.3,0.349,42,1 725 | 5,117,86,30,105,39.1,0.251,42,0 726 | 1,111,94,0,0,32.8,0.265,45,0 727 | 4,112,78,40,0,39.4,0.236,38,0 728 | 1,116,78,29,180,36.1,0.496,25,0 729 | 0,141,84,26,0,32.4,0.433,22,0 730 | 2,175,88,0,0,22.9,0.326,22,0 731 | 2,92,52,0,0,30.1,0.141,22,0 732 | 3,130,78,23,79,28.4,0.323,34,1 733 | 8,120,86,0,0,28.4,0.259,22,1 734 | 2,174,88,37,120,44.5,0.646,24,1 735 | 2,106,56,27,165,29,0.426,22,0 736 | 2,105,75,0,0,23.3,0.56,53,0 737 | 4,95,60,32,0,35.4,0.284,28,0 738 | 0,126,86,27,120,27.4,0.515,21,0 739 | 8,65,72,23,0,32,0.6,42,0 740 | 2,99,60,17,160,36.6,0.453,21,0 741 | 1,102,74,0,0,39.5,0.293,42,1 742 | 11,120,80,37,150,42.3,0.785,48,1 743 | 3,102,44,20,94,30.8,0.4,26,0 744 | 1,109,58,18,116,28.5,0.219,22,0 745 | 9,140,94,0,0,32.7,0.734,45,1 746 | 13,153,88,37,140,40.6,1.174,39,0 747 | 12,100,84,33,105,30,0.488,46,0 748 | 1,147,94,41,0,49.3,0.358,27,1 749 | 1,81,74,41,57,46.3,1.096,32,0 750 | 3,187,70,22,200,36.4,0.408,36,1 751 | 6,162,62,0,0,24.3,0.178,50,1 752 | 4,136,70,0,0,31.2,1.182,22,1 753 | 1,121,78,39,74,39,0.261,28,0 754 | 3,108,62,24,0,26,0.223,25,0 755 | 0,181,88,44,510,43.3,0.222,26,1 756 | 8,154,78,32,0,32.4,0.443,45,1 757 | 1,128,88,39,110,36.5,1.057,37,1 758 | 7,137,90,41,0,32,0.391,39,0 759 | 0,123,72,0,0,36.3,0.258,52,1 760 | 1,106,76,0,0,37.5,0.197,26,0 761 | 6,190,92,0,0,35.5,0.278,66,1 762 | 2,88,58,26,16,28.4,0.766,22,0 763 | 9,170,74,31,0,44,0.403,43,1 764 | 9,89,62,0,0,22.5,0.142,33,0 765 | 10,101,76,48,180,32.9,0.171,63,0 766 | 2,122,70,27,0,36.8,0.34,27,0 767 | 5,121,72,23,112,26.2,0.245,30,0 768 | 1,126,60,0,0,30.1,0.349,47,1 769 | 1,93,70,31,0,30.4,0.315,23,0 -------------------------------------------------------------------------------- /uci/flare-solar.csv: -------------------------------------------------------------------------------- 1 | target,1,2,3,4,5,6,7,8,9 2 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 3 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 4 | 1.0,2.0,2.0,3.0,2.0,2.0,1.0,0.0,0.0,0.0 5 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,3.0,0.0,0.0 6 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 7 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 8 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 9 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 10 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 11 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 12 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 13 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 14 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 15 | 0.0,2.0,2.0,2.0,1.0,2.0,1.0,0.0,0.0,0.0 16 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 17 | 0.0,1.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 18 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 19 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 20 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 21 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 22 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 23 | 1.0,1.0,3.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 24 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 25 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,1.0,0.0 26 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 27 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 28 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 29 | 1.0,2.0,3.0,1.0,2.0,2.0,2.0,1.0,3.0,2.0 30 | 1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 31 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 32 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 33 | 1.0,1.0,2.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 34 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 35 | 1.0,2.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 36 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 37 | 1.0,1.0,2.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 38 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 39 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 40 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 41 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,2.0,0.0,0.0 42 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 43 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 44 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 45 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 46 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 47 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 48 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 49 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 50 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 51 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 52 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 53 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 54 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 55 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 56 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 57 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 58 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 59 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 60 | 0.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 61 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,3.0,0.0,0.0 62 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 63 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 64 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,2.0,1.0,0.0 65 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 66 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 67 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 68 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 69 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 70 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 71 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 72 | 1.0,1.0,2.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 73 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 74 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 75 | 0.0,2.0,2.0,3.0,2.0,2.0,1.0,0.0,0.0,0.0 76 | 0.0,2.0,2.0,3.0,2.0,2.0,1.0,0.0,0.0,0.0 77 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 78 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 79 | 0.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 80 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 81 | 1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 82 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 83 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 84 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 85 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 86 | 0.0,2.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 87 | 0.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 88 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 89 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 90 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 91 | 0.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 92 | 1.0,1.0,3.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 93 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 94 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 95 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 96 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 97 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 98 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 99 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,1.0,0.0 100 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 101 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 102 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 103 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 104 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 105 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 106 | 1.0,1.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 107 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 108 | 1.0,1.0,3.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 109 | 1.0,1.0,3.0,1.0,2.0,2.0,2.0,2.0,1.0,0.0 110 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 111 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 112 | 1.0,2.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 113 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 114 | 1.0,2.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 115 | 0.0,1.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 116 | 1.0,2.0,3.0,1.0,2.0,2.0,2.0,2.0,0.0,0.0 117 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 118 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 119 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 120 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 121 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 122 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 123 | 0.0,1.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 124 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 125 | 1.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 126 | 0.0,1.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 127 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 128 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 129 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 130 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 131 | 0.0,2.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 132 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 133 | 1.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 134 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 135 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 136 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 137 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 138 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 139 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 140 | 1.0,2.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 141 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 142 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 143 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 144 | 1.0,1.0,2.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 145 | 1.0,2.0,2.0,3.0,2.0,2.0,1.0,0.0,0.0,0.0 146 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 147 | 1.0,2.0,3.0,2.0,1.0,2.0,1.0,0.0,0.0,0.0 148 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 149 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 150 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 151 | 1.0,1.0,3.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 152 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 153 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 154 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 155 | 1.0,2.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 156 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 157 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 158 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 159 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 160 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 161 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 162 | 0.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 163 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 164 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 165 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 166 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 167 | 1.0,2.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 168 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 169 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 170 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 171 | 1.0,2.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 172 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 173 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 174 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 175 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 176 | 0.0,2.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 177 | 1.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 178 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 179 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 180 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 181 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 182 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 183 | 1.0,2.0,2.0,3.0,2.0,2.0,2.0,0.0,0.0,0.0 184 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 185 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 186 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 187 | 1.0,1.0,2.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 188 | 1.0,2.0,3.0,3.0,2.0,2.0,1.0,6.0,0.0,0.0 189 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 190 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 191 | 0.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 192 | 0.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 193 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 194 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,1.0,0.0 195 | 0.0,2.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 196 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 197 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 198 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 199 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 200 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 201 | 1.0,1.0,2.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 202 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 203 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,1.0,1.0,0.0 204 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 205 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 206 | 1.0,1.0,2.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 207 | 1.0,2.0,3.0,1.0,2.0,2.0,2.0,3.0,0.0,0.0 208 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 209 | 1.0,2.0,3.0,1.0,2.0,2.0,2.0,2.0,1.0,0.0 210 | 1.0,2.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 211 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 212 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 213 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 214 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 215 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 216 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 217 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 218 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 219 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,2.0,0.0,0.0 220 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 221 | 0.0,2.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 222 | 1.0,2.0,3.0,1.0,2.0,2.0,2.0,1.0,0.0,0.0 223 | 1.0,2.0,3.0,3.0,2.0,2.0,2.0,1.0,0.0,0.0 224 | 0.0,1.0,1.0,3.0,2.0,2.0,1.0,0.0,0.0,0.0 225 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 226 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 227 | 0.0,1.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 228 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 229 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 230 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 231 | 0.0,2.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 232 | 1.0,1.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 233 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 234 | 1.0,2.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 235 | 1.0,1.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 236 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 237 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 238 | 1.0,1.0,2.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 239 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,4.0,1.0,0.0 240 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 241 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,2.0,0.0 242 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 243 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 244 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 245 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 246 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 247 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 248 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 249 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 250 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 251 | 1.0,1.0,3.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 252 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 253 | 1.0,2.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 254 | 0.0,2.0,3.0,1.0,2.0,2.0,1.0,5.0,0.0,0.0 255 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 256 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 257 | 1.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 258 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 259 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 260 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 261 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 262 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 263 | 1.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 264 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 265 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 266 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 267 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 268 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 269 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 270 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 271 | 0.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 272 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 273 | 1.0,2.0,3.0,1.0,2.0,2.0,1.0,3.0,0.0,0.0 274 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 275 | 1.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 276 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,1.0,0.0 277 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 278 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 279 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 280 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 281 | 1.0,1.0,2.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 282 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 283 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 284 | 1.0,1.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 285 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 286 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 287 | 1.0,2.0,3.0,1.0,2.0,2.0,2.0,0.0,0.0,0.0 288 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 289 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 290 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 291 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 292 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 293 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 294 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,1.0,0.0 295 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 296 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 297 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,1.0,0.0 298 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 299 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,2.0,0.0,0.0 300 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 301 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 302 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,3.0,0.0,0.0 303 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 304 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 305 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 306 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 307 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 308 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 309 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 310 | 1.0,2.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 311 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 312 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 313 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 314 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 315 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 316 | 1.0,2.0,3.0,3.0,2.0,2.0,2.0,0.0,5.0,1.0 317 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 318 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,6.0,0.0,0.0 319 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 320 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 321 | 1.0,1.0,2.0,1.0,2.0,2.0,2.0,0.0,1.0,1.0 322 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,2.0,0.0,0.0 323 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 324 | 1.0,2.0,3.0,3.0,2.0,2.0,1.0,2.0,0.0,0.0 325 | 0.0,2.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 326 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 327 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 328 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,1.0,0.0 329 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 330 | 1.0,2.0,3.0,2.0,2.0,2.0,1.0,0.0,0.0,0.0 331 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 332 | 1.0,2.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 333 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,3.0,0.0 334 | 1.0,1.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 335 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 336 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 337 | 1.0,2.0,2.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0 338 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 339 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 340 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 341 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 342 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 343 | 1.0,1.0,2.0,2.0,2.0,2.0,1.0,0.0,0.0,0.0 344 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 345 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 346 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 347 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 348 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 349 | 0.0,2.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 350 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 351 | 0.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 352 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 353 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,3.0,0.0,0.0 354 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 355 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 356 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 357 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 358 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 359 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 360 | 1.0,1.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 361 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 362 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 363 | 1.0,2.0,3.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 364 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 365 | 1.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 366 | 0.0,2.0,2.0,1.0,2.0,2.0,1.0,2.0,0.0,0.0 367 | 1.0,2.0,3.0,1.0,2.0,2.0,2.0,0.0,0.0,0.0 368 | 1.0,1.0,2.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 369 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 370 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 371 | 0.0,2.0,2.0,3.0,2.0,2.0,1.0,0.0,0.0,0.0 372 | 1.0,1.0,2.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 373 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 374 | 1.0,2.0,2.0,3.0,2.0,2.0,2.0,2.0,2.0,0.0 375 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 376 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 377 | 1.0,1.0,2.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 378 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 379 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 380 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 381 | 0.0,2.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 382 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 383 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 384 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 385 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 386 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 387 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 388 | 1.0,2.0,3.0,3.0,2.0,2.0,2.0,6.0,0.0,0.0 389 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 390 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 391 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 392 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,3.0,0.0,0.0 393 | 0.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 394 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 395 | 0.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 396 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 397 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 398 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 399 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,3.0,0.0,0.0 400 | 0.0,2.0,3.0,3.0,1.0,2.0,1.0,0.0,0.0,0.0 401 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 402 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 403 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 404 | 1.0,2.0,2.0,1.0,1.0,2.0,1.0,8.0,0.0,0.0 405 | 0.0,1.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 406 | 0.0,2.0,2.0,3.0,2.0,2.0,1.0,1.0,0.0,0.0 407 | 0.0,1.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 408 | 1.0,1.0,2.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 409 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,3.0,0.0,0.0 410 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 411 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 412 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 413 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 414 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 415 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 416 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 417 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 418 | 1.0,2.0,3.0,1.0,2.0,2.0,1.0,4.0,4.0,0.0 419 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 420 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,1.0,0.0 421 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 422 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 423 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 424 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 425 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 426 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 427 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 428 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 429 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 430 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 431 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 432 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 433 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 434 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,3.0,0.0,0.0 435 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 436 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 437 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,4.0,0.0,0.0 438 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 439 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 440 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 441 | 0.0,2.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 442 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 443 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 444 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 445 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 446 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 447 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 448 | 1.0,2.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 449 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 450 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 451 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 452 | 0.0,2.0,3.0,3.0,2.0,2.0,1.0,2.0,0.0,0.0 453 | 1.0,1.0,2.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 454 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 455 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 456 | 1.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 457 | 1.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 458 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,5.0,1.0,0.0 459 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 460 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 461 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 462 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 463 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 464 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 465 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 466 | 1.0,2.0,3.0,3.0,2.0,2.0,2.0,1.0,0.0,1.0 467 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 468 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0 469 | 0.0,2.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 470 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 471 | 0.0,1.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 472 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 473 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 474 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 475 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 476 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0 477 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 478 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 479 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 480 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 481 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 482 | 0.0,1.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 483 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 484 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 485 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 486 | 0.0,2.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 487 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 488 | 1.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 489 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 490 | 1.0,1.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 491 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 492 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 493 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 494 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 495 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 496 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 497 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 498 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 499 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 500 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 501 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 502 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 503 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,1.0,0.0 504 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 505 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 506 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 507 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 508 | 1.0,2.0,3.0,2.0,2.0,2.0,1.0,2.0,0.0,0.0 509 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 510 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 511 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 512 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 513 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 514 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 515 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 516 | 0.0,2.0,3.0,3.0,1.0,2.0,1.0,0.0,1.0,0.0 517 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 518 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 519 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 520 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 521 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 522 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 523 | 1.0,1.0,2.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 524 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 525 | 0.0,2.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 526 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 527 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 528 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 529 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 530 | 1.0,1.0,3.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 531 | 0.0,2.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 532 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 533 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 534 | 1.0,2.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 535 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,4.0,0.0,0.0 536 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 537 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 538 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 539 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 540 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 541 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 542 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 543 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 544 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 545 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,3.0,0.0,0.0 546 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 547 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 548 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 549 | 0.0,2.0,2.0,1.0,2.0,2.0,1.0,4.0,0.0,0.0 550 | 0.0,2.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 551 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 552 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 553 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 554 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 555 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 556 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 557 | 0.0,1.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 558 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 559 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 560 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 561 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 562 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 563 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 564 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 565 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 566 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 567 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 568 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 569 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 570 | 1.0,2.0,2.0,3.0,2.0,2.0,1.0,0.0,0.0,0.0 571 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,1.0,0.0 572 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 573 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,3.0,0.0,0.0 574 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 575 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 576 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 577 | 1.0,2.0,2.0,2.0,2.0,2.0,2.0,0.0,0.0,0.0 578 | 1.0,2.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 579 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 580 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 581 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 582 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 583 | 0.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 584 | 1.0,1.0,2.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 585 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 586 | 1.0,2.0,3.0,1.0,2.0,2.0,1.0,2.0,0.0,0.0 587 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 588 | 0.0,2.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 589 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 590 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 591 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 592 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 593 | 1.0,1.0,2.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 594 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 595 | 1.0,1.0,1.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 596 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 597 | 1.0,2.0,3.0,1.0,1.0,2.0,1.0,2.0,0.0,0.0 598 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,3.0,0.0,0.0 599 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 600 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 601 | 1.0,2.0,1.0,3.0,2.0,2.0,1.0,5.0,0.0,0.0 602 | 1.0,2.0,3.0,1.0,2.0,2.0,2.0,1.0,1.0,0.0 603 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 604 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 605 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0 606 | 1.0,1.0,1.0,1.0,2.0,2.0,1.0,1.0,1.0,0.0 607 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 608 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 609 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 610 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 611 | 1.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 612 | 0.0,2.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 613 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 614 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 615 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 616 | 0.0,1.0,3.0,1.0,2.0,2.0,2.0,0.0,0.0,0.0 617 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 618 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 619 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 620 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 621 | 1.0,1.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 622 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 623 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 624 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 625 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 626 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 627 | 1.0,2.0,3.0,1.0,2.0,2.0,2.0,0.0,0.0,0.0 628 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,2.0,0.0,0.0 629 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 630 | 0.0,2.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 631 | 0.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 632 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 633 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 634 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 635 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,3.0,0.0,0.0 636 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 637 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 638 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 639 | 1.0,1.0,3.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 640 | 1.0,1.0,3.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 641 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 642 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 643 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 644 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 645 | 1.0,1.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 646 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 647 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 648 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 649 | 0.0,2.0,2.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 650 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 651 | 0.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 652 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 653 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 654 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 655 | 0.0,2.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 656 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 657 | 1.0,2.0,2.0,3.0,2.0,2.0,1.0,0.0,0.0,0.0 658 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 659 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 660 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 661 | 1.0,1.0,3.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 662 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 663 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 664 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 665 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 666 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 667 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 668 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 669 | 1.0,1.0,2.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 670 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 671 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 672 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 673 | 0.0,2.0,2.0,2.0,2.0,2.0,1.0,1.0,1.0,0.0 674 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 675 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 676 | 1.0,1.0,3.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 677 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 678 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 679 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 680 | 0.0,1.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 681 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 682 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 683 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 684 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 685 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 686 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,2.0,0.0,0.0 687 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 688 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 689 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 690 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 691 | 1.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 692 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,2.0,0.0,0.0 693 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 694 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 695 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 696 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 697 | 1.0,1.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 698 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 699 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 700 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 701 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 702 | 1.0,2.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 703 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 704 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 705 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 706 | 0.0,2.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 707 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 708 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 709 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 710 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 711 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 712 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 713 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 714 | 0.0,2.0,3.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 715 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 716 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 717 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 718 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 719 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 720 | 1.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 721 | 1.0,1.0,2.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 722 | 0.0,2.0,2.0,3.0,2.0,2.0,1.0,0.0,0.0,0.0 723 | 1.0,2.0,2.0,2.0,2.0,2.0,2.0,4.0,0.0,0.0 724 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 725 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 726 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 727 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 728 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 729 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 730 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 731 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 732 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,1.0,0.0 733 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 734 | 1.0,2.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 735 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 736 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 737 | 1.0,1.0,2.0,1.0,2.0,2.0,2.0,0.0,0.0,0.0 738 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0 739 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 740 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 741 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 742 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 743 | 1.0,1.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 744 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 745 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,2.0,0.0,0.0 746 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 747 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 748 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 749 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 750 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 751 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 752 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 753 | 1.0,1.0,1.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 754 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 755 | 0.0,2.0,2.0,2.0,1.0,2.0,1.0,0.0,0.0,0.0 756 | 0.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 757 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 758 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 759 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 760 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 761 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 762 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 763 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 764 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 765 | 1.0,2.0,3.0,3.0,1.0,2.0,1.0,1.0,0.0,0.0 766 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 767 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 768 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 769 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 770 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 771 | 1.0,1.0,1.0,2.0,2.0,2.0,1.0,0.0,0.0,0.0 772 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 773 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,2.0,0.0,0.0 774 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 775 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 776 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 777 | 1.0,2.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 778 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 779 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 780 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 781 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 782 | 0.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 783 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 784 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 785 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,4.0,0.0,0.0 786 | 1.0,2.0,3.0,1.0,2.0,2.0,1.0,3.0,1.0,0.0 787 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 788 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,5.0,0.0,0.0 789 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 790 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 791 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 792 | 1.0,2.0,3.0,2.0,2.0,2.0,1.0,0.0,0.0,0.0 793 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 794 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 795 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 796 | 1.0,1.0,2.0,1.0,2.0,2.0,2.0,1.0,0.0,0.0 797 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 798 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 799 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,2.0,0.0,0.0 800 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,4.0,0.0,0.0 801 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 802 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 803 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 804 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 805 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 806 | 1.0,1.0,3.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 807 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 808 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 809 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 810 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 811 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 812 | 0.0,1.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 813 | 1.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 814 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 815 | 1.0,2.0,3.0,1.0,2.0,2.0,1.0,0.0,1.0,0.0 816 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 817 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 818 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 819 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 820 | 0.0,2.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 821 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 822 | 1.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 823 | 0.0,2.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 824 | 1.0,1.0,3.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 825 | 1.0,2.0,3.0,1.0,1.0,2.0,1.0,2.0,0.0,0.0 826 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 827 | 1.0,2.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 828 | 1.0,1.0,2.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 829 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 830 | 1.0,2.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 831 | 0.0,2.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 832 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 833 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 834 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 835 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 836 | 1.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 837 | 1.0,2.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 838 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 839 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 840 | 0.0,2.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 841 | 0.0,1.0,1.0,1.0,2.0,2.0,1.0,3.0,0.0,0.0 842 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 843 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 844 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 845 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 846 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 847 | 1.0,1.0,2.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 848 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 849 | 1.0,2.0,3.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 850 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 851 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 852 | 0.0,2.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 853 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 854 | 0.0,2.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 855 | 0.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 856 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 857 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 858 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 859 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 860 | 0.0,2.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 861 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 862 | 1.0,1.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 863 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 864 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 865 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 866 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 867 | 0.0,2.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 868 | 1.0,1.0,2.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 869 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 870 | 1.0,2.0,3.0,3.0,2.0,2.0,1.0,1.0,0.0,0.0 871 | 1.0,2.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 872 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 873 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 874 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,2.0,0.0,0.0 875 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 876 | 1.0,1.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 877 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 878 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 879 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 880 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 881 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 882 | 1.0,2.0,3.0,1.0,2.0,2.0,1.0,2.0,0.0,0.0 883 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 884 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 885 | 1.0,1.0,2.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 886 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 887 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 888 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 889 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 890 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,2.0,0.0,0.0 891 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 892 | 1.0,2.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 893 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 894 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 895 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 896 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 897 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 898 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 899 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 900 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 901 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 902 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,2.0,0.0,0.0 903 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 904 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 905 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 906 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 907 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 908 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 909 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,0.0,1.0,0.0 910 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 911 | 0.0,2.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 912 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 913 | 1.0,2.0,2.0,1.0,2.0,2.0,2.0,1.0,0.0,0.0 914 | 1.0,1.0,3.0,1.0,2.0,2.0,2.0,0.0,0.0,0.0 915 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 916 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 917 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 918 | 0.0,1.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 919 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 920 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 921 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 922 | 0.0,2.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 923 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,3.0,1.0,0.0 924 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 925 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 926 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 927 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 928 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 929 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 930 | 1.0,1.0,2.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 931 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,2.0,0.0,0.0 932 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 933 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 934 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 935 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 936 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 937 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 938 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 939 | 1.0,1.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 940 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 941 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 942 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 943 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 944 | 0.0,2.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 945 | 0.0,2.0,3.0,2.0,1.0,2.0,1.0,1.0,0.0,0.0 946 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,3.0,0.0,0.0 947 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 948 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 949 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,3.0,0.0,0.0 950 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 951 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 952 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 953 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,1.0,0.0 954 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 955 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 956 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 957 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 958 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 959 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 960 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 961 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 962 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 963 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 964 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 965 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 966 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 967 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 968 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 969 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 970 | 1.0,2.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 971 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,4.0,0.0,0.0 972 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 973 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 974 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 975 | 0.0,1.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 976 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 977 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 978 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 979 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 980 | 1.0,2.0,2.0,3.0,2.0,2.0,2.0,0.0,0.0,0.0 981 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 982 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 983 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 984 | 1.0,1.0,2.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 985 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 986 | 1.0,1.0,3.0,1.0,2.0,2.0,2.0,1.0,0.0,0.0 987 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 988 | 1.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 989 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 990 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 991 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 992 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 993 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 994 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 995 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 996 | 1.0,2.0,3.0,1.0,1.0,2.0,1.0,2.0,0.0,0.0 997 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,2.0,1.0,0.0 998 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 999 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 1000 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 1001 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1002 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1003 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,2.0,0.0,0.0 1004 | 1.0,1.0,2.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 1005 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1006 | 1.0,1.0,3.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 1007 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1008 | 1.0,1.0,3.0,1.0,2.0,2.0,2.0,0.0,0.0,0.0 1009 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 1010 | 1.0,1.0,3.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 1011 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 1012 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 1013 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1014 | 1.0,2.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 1015 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1016 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1017 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1018 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1019 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 1020 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1021 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,2.0,2.0,0.0 1022 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1023 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1024 | 0.0,1.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 1025 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1026 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1027 | 1.0,2.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,1.0 1028 | 1.0,1.0,2.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 1029 | 1.0,1.0,3.0,1.0,2.0,1.0,1.0,0.0,0.0,0.0 1030 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1031 | 0.0,2.0,2.0,2.0,2.0,2.0,1.0,0.0,0.0,0.0 1032 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 1033 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 1034 | 0.0,2.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 1035 | 0.0,1.0,1.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1036 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1037 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1038 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,1.0,0.0,0.0 1039 | 1.0,2.0,3.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 1040 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 1041 | 0.0,1.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 1042 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 1043 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1044 | 1.0,2.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 1045 | 0.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 1046 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1047 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 1048 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1049 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1050 | 1.0,1.0,2.0,1.0,2.0,2.0,1.0,1.0,0.0,0.0 1051 | 1.0,1.0,2.0,1.0,1.0,1.0,1.0,0.0,0.0,0.0 1052 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1053 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 1054 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1055 | 1.0,2.0,3.0,1.0,2.0,2.0,1.0,2.0,0.0,0.0 1056 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1057 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,1.0,0.0,0.0 1058 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,1.0,0.0 1059 | 1.0,2.0,3.0,1.0,1.0,2.0,1.0,3.0,0.0,0.0 1060 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1061 | 1.0,1.0,1.0,1.0,2.0,2.0,1.0,0.0,0.0,0.0 1062 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1063 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1064 | 0.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1065 | 1.0,1.0,3.0,1.0,1.0,2.0,1.0,2.0,0.0,0.0 1066 | 1.0,1.0,2.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1067 | 0.0,1.0,3.0,1.0,1.0,2.0,1.0,0.0,0.0,0.0 1068 | --------------------------------------------------------------------------------