├── README.md ├── data ├── Processed_BreastCancer.data ├── Processed_Cancer.data ├── Processed_Concrete.data ├── Processed_Diabetis.data ├── Processed_Flare.data ├── Processed_German.data ├── Processed_Heart.data ├── Processed_Image.data ├── Processed_Splice.data ├── Processed_Student_Prob.data ├── Processed_Thyroid.data ├── Processed_Titanic.data ├── Processed_Waveform.data ├── USPS.csv ├── banana_data.csv ├── crabs.csv ├── ionosphere.data ├── pima-indians-diabetes.data ├── ringnorm.data ├── sonar.data └── twonorm.data ├── src ├── AFKMC2.jl ├── BSVM.jl ├── DataAccess.jl ├── ECM.jl ├── KernelFunctions.jl ├── paper_experiment_functions.jl └── test_functions.jl └── tests ├── paper_experiments.jl └── run_test.jl /README.md: -------------------------------------------------------------------------------- 1 | # !!!DISCLAIMER!!! # 2 | ## A better and more recent Julia implementation exists now at : [AugmentedGaussianProcesses.jl](https://github.com/theogf/AugmentedGaussianProcesses.jl) , this repository only contains an older version of the algorithm as well the experiments presented in our paper, it is relying on an outdated version of Julia 3 | 4 | ## README ## 5 | 6 | 7 | ### Objective ### 8 | 9 | * This repository contains the updated source code for the ***Bayesian Nonlinear Support Vector Machine (BSVM)*** both in its **stochastic (and with inducing points)** and its **batch version** 10 | * It relates to the paper published at the conference track of ECML 17' __"Bayesian Nonlinear Support Vector Machines for Big Data"__ by Florian Wenzel, Théo Galy-Fajou, Matthäus Deutsch and Marius Kloft. Paper is available at [https://arxiv.org/abs/1707.05532][arxiv] 11 | 12 | ### How do I install the package? ### 13 | 14 | * First clone this repository (`git clone https://github.com/theogf/BayesianSVM.git`) 15 | * If you simply want to try out the package you need to install the **Julia** dependencies : 16 | - [Distributions][dist] 17 | - [PyPlot][pyplot] 18 | - [StatsBase][statsbase] 19 | - [GaussianMixtures][gaussm] 20 | - [Clustering][clustering] 21 | - [ScikitLearn][scikitjl] 22 | 23 | *Note: to install new packages use the Pkg.add("ModuleName") function in Julia* 24 | * If you want to try the competitors as well you will need to install these **Julia** and **Python** dependencies (as well as Python ofc): 25 | * (Julia)[PyCall][pycall] 26 | * (Python)[ScikitLearn][scikit] 27 | * (Python)[Tensorflow][tflow] 28 | * (Python)[GPflow][gpflow] 29 | 30 | *Note: to use Tensorflow and GPflow, they must me included in the search path of PyCall, to do this use : `unshift!(PyVector(pyimport("sys")["path"]), "path_to_add")` and call `Pkg.build("PyCall")`, also note that they are much more complicate to install* 31 | * Both tests and source files are written in Julia (v0.5), one first needs to julia to run those, however a Python or Matlab user should be able to read easily through the code as the syntax is quite similar 32 | * Some light datasets are included (especially the **Rätsch Benchmark dataset**), the SUSY dataset can be found on UCI 33 | ### How to run tests? ### 34 | 35 | * Go to the "test" folder, open "run_test.jl", chose the dataset and change the parameters (more is explained in the file) and simply run the file. (*for example change the type of BSVM (linear/nonlinear, sparse, use of stochasticity etc*) 36 | * If you want to also use the competitors, open "paper_experiments.jl", chose the dataset, chose the methods you want to test and adapt the parameters (more details in the file). 37 | * For more custom usage of the BSVM method, look at the source code of src/BSVM.jl, where all the options are explained. More documentation will be there soon. 38 | 39 | ### Who to contact ### 40 | 41 | **For any queries please contact theo.galyfajou at gmail.com** 42 | 43 | [arxiv]: 44 | [dist]: 45 | [pyplot]: 46 | [pycall]: 47 | [statsbase]: 48 | [gaussm]: 49 | [clustering]: 50 | [scikitjl]: 51 | [scikit]: 52 | [tflow]: 53 | [gpflow]: 54 | -------------------------------------------------------------------------------- /data/Processed_BreastCancer.data: -------------------------------------------------------------------------------- 1 | -2.6935114,0.90363699,1.059912,-0.45921259,0.5612411,-0.13800562,0.97475967,0.62937942,0.51629307,-1 2 | -1.6961842,-2.6847186,-0.79578056,-0.45921259,0.5612411,1.2124779,0.97475967,-0.98230246,0.51629307,-1 3 | -1.6961842,0.90363699,-2.18755,-0.45921259,0.5612411,-0.13800562,0.97475967,2.2410613,0.51629307,-1 4 | -1.6961842,0.90363699,-2.18755,-0.45921259,0.5612411,-0.13800562,0.97475967,2.2410613,0.51629307,1 5 | -1.6961842,0.90363699,-1.7236268,-0.45921259,0.5612411,-0.13800562,-1.0184056,1.4352204,0.51629307,-1 6 | -1.6961842,0.90363699,-1.2597037,-0.45921259,0.5612411,-1.4884892,0.97475967,-0.17646152,0.51629307,-1 7 | -1.6961842,0.90363699,-1.2597037,-0.45921259,0.5612411,-0.13800562,-1.0184056,1.4352204,0.51629307,-1 8 | -1.6961842,0.90363699,-0.79578056,-0.45921259,0.5612411,-1.4884892,-1.0184056,-0.17646152,0.51629307,-1 9 | -1.6961842,0.90363699,-0.79578056,-0.45921259,0.5612411,-1.4884892,0.97475967,-0.17646152,0.51629307,1 10 | -1.6961842,0.90363699,-0.79578056,1.0567422,-1.7687598,1.2124779,-1.0184056,-0.17646152,-1.9227466,1 11 | -1.6961842,0.90363699,-0.33185742,-0.45921259,0.5612411,-0.13800562,-1.0184056,1.4352204,0.51629307,-1 12 | -1.6961842,0.90363699,-0.33185742,-0.45921259,0.5612411,1.2124779,-1.0184056,-0.98230246,-1.9227466,1 13 | -1.6961842,0.90363699,-0.33185742,-0.45921259,0.5612411,1.2124779,-1.0184056,2.2410613,0.51629307,-1 14 | -1.6961842,0.90363699,-0.33185742,0.29876482,-1.7687598,-0.13800562,-1.0184056,-0.17646152,0.51629307,1 15 | -1.6961842,0.90363699,-0.33185742,0.29876482,-1.7687598,-0.13800562,0.97475967,-0.98230246,-1.9227466,-1 16 | -1.6961842,0.90363699,-0.33185742,0.29876482,0.5612411,-0.13800562,0.97475967,2.2410613,0.51629307,-1 17 | -1.6961842,0.90363699,0.13206571,-0.45921259,0.5612411,-1.4884892,-1.0184056,2.2410613,0.51629307,-1 18 | -1.6961842,0.90363699,0.13206571,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.17646152,0.51629307,-1 19 | -1.6961842,0.90363699,0.13206571,-0.45921259,0.5612411,-0.13800562,0.97475967,-0.17646152,0.51629307,-1 20 | -1.6961842,0.90363699,0.13206571,0.29876482,-1.7687598,1.2124779,-1.0184056,-0.17646152,-1.9227466,1 21 | -1.6961842,0.90363699,0.13206571,1.0567422,-1.7687598,-0.13800562,0.97475967,-0.98230246,-1.9227466,-1 22 | -1.6961842,0.90363699,0.13206571,1.0567422,-1.7687598,1.2124779,-1.0184056,1.4352204,-1.9227466,1 23 | -1.6961842,0.90363699,0.59598884,-0.45921259,0.5612411,-1.4884892,0.97475967,-0.98230246,0.51629307,1 24 | -1.6961842,0.90363699,0.59598884,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.98230246,0.51629307,-1 25 | -1.6961842,0.90363699,0.59598884,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.98230246,0.51629307,1 26 | -1.6961842,0.90363699,0.59598884,-0.45921259,0.5612411,1.2124779,-1.0184056,-0.17646152,0.51629307,-1 27 | -1.6961842,0.90363699,0.59598884,0.29876482,0.5612411,1.2124779,0.97475967,-0.98230246,-1.9227466,1 28 | -1.6961842,0.90363699,0.59598884,1.0567422,-1.7687598,-0.13800562,0.97475967,0.62937942,0.51629307,-1 29 | -1.6961842,0.90363699,0.59598884,1.8147196,0.5612411,-0.13800562,0.97475967,-0.98230246,-1.9227466,1 30 | -1.6961842,0.90363699,1.059912,-0.45921259,0.5612411,1.2124779,-1.0184056,-0.17646152,0.51629307,1 31 | -1.6961842,0.90363699,1.059912,1.8147196,-1.7687598,1.2124779,-1.0184056,-0.17646152,0.51629307,1 32 | -1.6961842,0.90363699,1.5238351,-0.45921259,0.5612411,-1.4884892,-1.0184056,-0.98230246,0.51629307,1 33 | -1.6961842,0.90363699,1.5238351,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.17646152,-1.9227466,-1 34 | -1.6961842,0.90363699,1.5238351,-0.45921259,0.5612411,-0.13800562,0.97475967,0.62937942,0.51629307,-1 35 | -1.6961842,0.90363699,1.5238351,0.29876482,0.5612411,1.2124779,0.97475967,0.62937942,-1.9227466,-1 36 | -0.6988570200000001,-0.8905408,-0.33185742,-0.45921259,0.5612411,-0.13800562,0.97475967,-0.98230246,0.51629307,1 37 | -0.6988570200000001,-0.8905408,-0.33185742,-0.45921259,0.5612411,1.2124779,-1.0184056,-0.98230246,0.51629307,-1 38 | -0.6988570200000001,-0.8905408,-0.33185742,-0.45921259,0.5612411,1.2124779,-1.0184056,-0.17646152,0.51629307,-1 39 | -0.6988570200000001,-0.8905408,-0.33185742,0.29876482,0.5612411,1.2124779,0.97475967,-0.17646152,-1.9227466,1 40 | -0.6988570200000001,-0.8905408,0.13206571,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.17646152,0.51629307,-1 41 | -0.6988570200000001,-0.8905408,0.13206571,2.5726971,-1.7687598,1.2124779,-1.0184056,1.4352204,-1.9227466,1 42 | -0.6988570200000001,-0.8905408,0.59598884,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.98230246,-1.9227466,-1 43 | -0.6988570200000001,-0.8905408,0.59598884,0.29876482,0.5612411,1.2124779,-1.0184056,-0.17646152,0.51629307,1 44 | -0.6988570200000001,-0.8905408,1.5238351,3.3306745,-1.7687598,-0.13800562,0.97475967,-0.98230246,-1.9227466,-1 45 | -0.6988570200000001,0.90363699,-2.18755,-0.45921259,0.5612411,-0.13800562,0.97475967,1.4352204,0.51629307,-1 46 | -0.6988570200000001,0.90363699,-2.18755,-0.45921259,0.5612411,1.2124779,-1.0184056,2.2410613,0.51629307,-1 47 | -0.6988570200000001,0.90363699,-1.7236268,-0.45921259,0.5612411,-1.4884892,-1.0184056,-0.17646152,-1.9227466,-1 48 | -0.6988570200000001,0.90363699,-1.2597037,-0.45921259,0.5612411,-1.4884892,0.97475967,-0.98230246,0.51629307,-1 49 | -0.6988570200000001,0.90363699,-1.2597037,-0.45921259,0.5612411,-1.4884892,0.97475967,1.4352204,0.51629307,-1 50 | -0.6988570200000001,0.90363699,-1.2597037,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.98230246,0.51629307,-1 51 | -0.6988570200000001,0.90363699,-1.2597037,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.17646152,-1.9227466,-1 52 | -0.6988570200000001,0.90363699,-1.2597037,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.17646152,0.51629307,-1 53 | -0.6988570200000001,0.90363699,-1.2597037,-0.45921259,0.5612411,-0.13800562,0.97475967,-0.98230246,0.51629307,-1 54 | -0.6988570200000001,0.90363699,-1.2597037,-0.45921259,0.5612411,-0.13800562,0.97475967,-0.17646152,0.51629307,-1 55 | -0.6988570200000001,0.90363699,-0.79578056,-0.45921259,-1.7687598,1.2124779,0.97475967,-0.98230246,0.51629307,1 56 | -0.6988570200000001,0.90363699,-0.79578056,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.98230246,0.51629307,1 57 | -0.6988570200000001,0.90363699,-0.79578056,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.17646152,0.51629307,-1 58 | -0.6988570200000001,0.90363699,-0.79578056,2.5726971,0.5612411,1.2124779,0.97475967,1.4352204,-1.9227466,-1 59 | -0.6988570200000001,0.90363699,-0.79578056,3.3306745,-1.7687598,1.2124779,-1.0184056,-0.17646152,0.51629307,1 60 | -0.6988570200000001,0.90363699,-0.33185742,-0.45921259,0.5612411,-1.4884892,-1.0184056,1.4352204,0.51629307,-1 61 | -0.6988570200000001,0.90363699,-0.33185742,-0.45921259,0.5612411,-1.4884892,0.97475967,-0.17646152,0.51629307,-1 62 | -0.6988570200000001,0.90363699,-0.33185742,-0.45921259,0.5612411,-1.4884892,0.97475967,0.62937942,0.51629307,-1 63 | -0.6988570200000001,0.90363699,-0.33185742,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.98230246,0.51629307,-1 64 | -0.6988570200000001,0.90363699,-0.33185742,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.17646152,0.51629307,-1 65 | -0.6988570200000001,0.90363699,-0.33185742,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.17646152,0.51629307,1 66 | -0.6988570200000001,0.90363699,-0.33185742,-0.45921259,0.5612411,-0.13800562,-1.0184056,1.4352204,0.51629307,-1 67 | -0.6988570200000001,0.90363699,-0.33185742,-0.45921259,0.5612411,-0.13800562,-1.0184056,2.2410613,0.51629307,-1 68 | -0.6988570200000001,0.90363699,-0.33185742,-0.45921259,0.5612411,-0.13800562,0.97475967,-0.98230246,0.51629307,-1 69 | -0.6988570200000001,0.90363699,-0.33185742,-0.45921259,0.5612411,-0.13800562,0.97475967,0.62937942,0.51629307,-1 70 | -0.6988570200000001,0.90363699,-0.33185742,-0.45921259,0.5612411,1.2124779,0.97475967,-0.17646152,-1.9227466,-1 71 | -0.6988570200000001,0.90363699,-0.33185742,0.29876482,-1.7687598,-0.13800562,-1.0184056,-0.17646152,-1.9227466,1 72 | -0.6988570200000001,0.90363699,-0.33185742,0.29876482,-1.7687598,-0.13800562,0.97475967,0.62937942,-1.9227466,1 73 | -0.6988570200000001,0.90363699,-0.33185742,0.29876482,0.5612411,-0.13800562,0.97475967,-0.98230246,0.51629307,-1 74 | -0.6988570200000001,0.90363699,-0.33185742,0.29876482,0.5612411,-0.13800562,0.97475967,-0.17646152,0.51629307,-1 75 | -0.6988570200000001,0.90363699,-0.33185742,1.0567422,0.5612411,-0.13800562,0.97475967,-0.17646152,-1.9227466,-1 76 | -0.6988570200000001,0.90363699,0.13206571,-0.45921259,0.5612411,-1.4884892,-1.0184056,1.4352204,0.51629307,-1 77 | -0.6988570200000001,0.90363699,0.13206571,-0.45921259,0.5612411,-1.4884892,0.97475967,-0.17646152,-1.9227466,-1 78 | -0.6988570200000001,0.90363699,0.13206571,-0.45921259,0.5612411,-1.4884892,0.97475967,1.4352204,0.51629307,-1 79 | -0.6988570200000001,0.90363699,0.13206571,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.98230246,-1.9227466,-1 80 | -0.6988570200000001,0.90363699,0.13206571,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.98230246,0.51629307,-1 81 | -0.6988570200000001,0.90363699,0.13206571,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.17646152,-1.9227466,1 82 | -0.6988570200000001,0.90363699,0.13206571,-0.45921259,0.5612411,-0.13800562,0.97475967,-0.98230246,0.51629307,-1 83 | -0.6988570200000001,0.90363699,0.13206571,-0.45921259,0.5612411,-0.13800562,0.97475967,-0.17646152,0.51629307,-1 84 | -0.6988570200000001,0.90363699,0.13206571,-0.45921259,0.5612411,-0.13800562,0.97475967,-0.17646152,0.51629307,1 85 | -0.6988570200000001,0.90363699,0.13206571,-0.45921259,0.5612411,-0.13800562,0.97475967,2.2410613,0.51629307,-1 86 | -0.6988570200000001,0.90363699,0.13206571,-0.45921259,0.5612411,1.2124779,-1.0184056,-0.98230246,0.51629307,1 87 | -0.6988570200000001,0.90363699,0.13206571,-0.45921259,0.5612411,1.2124779,-1.0184056,0.62937942,0.51629307,1 88 | -0.6988570200000001,0.90363699,0.13206571,-0.45921259,0.5612411,1.2124779,0.97475967,-0.98230246,-1.9227466,-1 89 | -0.6988570200000001,0.90363699,0.13206571,1.8147196,-1.7687598,1.2124779,0.97475967,-0.98230246,0.51629307,1 90 | -0.6988570200000001,0.90363699,0.59598884,-0.45921259,-1.7687598,1.2124779,0.97475967,0.62937942,0.51629307,1 91 | -0.6988570200000001,0.90363699,0.59598884,-0.45921259,0.5612411,-1.4884892,-1.0184056,-0.17646152,-1.9227466,1 92 | -0.6988570200000001,0.90363699,0.59598884,-0.45921259,0.5612411,-1.4884892,-1.0184056,-0.17646152,0.51629307,-1 93 | -0.6988570200000001,0.90363699,0.59598884,-0.45921259,0.5612411,-1.4884892,-1.0184056,0.62937942,0.51629307,-1 94 | -0.6988570200000001,0.90363699,0.59598884,-0.45921259,0.5612411,-1.4884892,0.97475967,-0.98230246,0.51629307,-1 95 | -0.6988570200000001,0.90363699,0.59598884,-0.45921259,0.5612411,-0.13800562,-1.0184056,1.4352204,0.51629307,-1 96 | -0.6988570200000001,0.90363699,0.59598884,-0.45921259,0.5612411,-0.13800562,0.97475967,0.62937942,-1.9227466,-1 97 | -0.6988570200000001,0.90363699,0.59598884,-0.45921259,0.5612411,-0.13800562,0.97475967,1.4352204,0.51629307,-1 98 | -0.6988570200000001,0.90363699,0.59598884,-0.45921259,0.5612411,1.2124779,-1.0184056,-0.98230246,0.51629307,-1 99 | -0.6988570200000001,0.90363699,0.59598884,-0.45921259,0.5612411,1.2124779,0.97475967,0.62937942,0.51629307,-1 100 | -0.6988570200000001,0.90363699,0.59598884,-0.45921259,0.5612411,1.2124779,0.97475967,0.62937942,0.51629307,1 101 | -0.6988570200000001,0.90363699,0.59598884,0.29876482,-1.7687598,-0.13800562,-1.0184056,0.62937942,0.51629307,1 102 | -0.6988570200000001,0.90363699,0.59598884,0.29876482,-1.7687598,-0.13800562,0.97475967,-0.17646152,0.51629307,-1 103 | -0.6988570200000001,0.90363699,0.59598884,0.29876482,0.5612411,-0.13800562,0.97475967,-0.98230246,0.51629307,1 104 | -0.6988570200000001,0.90363699,0.59598884,1.0567422,-1.7687598,1.2124779,0.97475967,-0.98230246,0.51629307,1 105 | -0.6988570200000001,0.90363699,0.59598884,1.0567422,0.5612411,-0.13800562,-1.0184056,-0.98230246,0.51629307,-1 106 | -0.6988570200000001,0.90363699,0.59598884,2.5726971,-1.7687598,1.2124779,-1.0184056,-0.98230246,-1.9227466,1 107 | -0.6988570200000001,0.90363699,0.59598884,3.3306745,-1.7687598,1.2124779,-1.0184056,-0.17646152,0.51629307,1 108 | -0.6988570200000001,0.90363699,1.059912,-0.45921259,-1.7687598,1.2124779,0.97475967,-0.98230246,-1.9227466,-1 109 | -0.6988570200000001,0.90363699,1.059912,-0.45921259,-1.7687598,1.2124779,0.97475967,-0.17646152,-1.9227466,-1 110 | -0.6988570200000001,0.90363699,1.059912,-0.45921259,0.5612411,-1.4884892,-1.0184056,-0.17646152,0.51629307,-1 111 | -0.6988570200000001,0.90363699,1.059912,-0.45921259,0.5612411,-1.4884892,0.97475967,-0.98230246,0.51629307,1 112 | -0.6988570200000001,0.90363699,1.059912,-0.45921259,0.5612411,-0.13800562,0.97475967,0.62937942,0.51629307,-1 113 | -0.6988570200000001,0.90363699,1.059912,1.8147196,-1.7687598,-0.13800562,0.97475967,-0.98230246,-1.9227466,-1 114 | -0.6988570200000001,0.90363699,1.059912,1.8147196,-1.7687598,-0.13800562,0.97475967,0.62937942,-1.9227466,-1 115 | -0.6988570200000001,0.90363699,1.5238351,-0.45921259,0.5612411,-1.4884892,-1.0184056,-0.17646152,0.51629307,1 116 | -0.6988570200000001,0.90363699,1.5238351,-0.45921259,0.5612411,-1.4884892,0.97475967,-0.98230246,0.51629307,-1 117 | -0.6988570200000001,0.90363699,1.5238351,-0.45921259,0.5612411,-0.13800562,0.97475967,-0.17646152,0.51629307,-1 118 | -0.6988570200000001,0.90363699,1.5238351,0.29876482,-1.7687598,1.2124779,0.97475967,-0.98230246,-1.9227466,-1 119 | -0.6988570200000001,0.90363699,1.9877582,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.17646152,-1.9227466,-1 120 | -0.6988570200000001,0.90363699,2.4516814,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.17646152,0.51629307,-1 121 | -0.6988570200000001,0.90363699,2.4516814,-0.45921259,0.5612411,-0.13800562,0.97475967,-0.17646152,-1.9227466,1 122 | 0.29847018,-2.6847186,-0.79578056,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.17646152,0.51629307,-1 123 | 0.29847018,-2.6847186,0.59598884,-0.45921259,0.5612411,1.2124779,0.97475967,-0.98230246,0.51629307,-1 124 | 0.29847018,-0.8905408,-2.18755,-0.45921259,0.5612411,-1.4884892,-1.0184056,-0.17646152,0.51629307,-1 125 | 0.29847018,-0.8905408,-2.18755,-0.45921259,0.5612411,-1.4884892,0.97475967,2.2410613,0.51629307,-1 126 | 0.29847018,-0.8905408,-2.18755,-0.45921259,0.5612411,-0.13800562,-1.0184056,2.2410613,0.51629307,-1 127 | 0.29847018,-0.8905408,-1.7236268,-0.45921259,0.5612411,-0.13800562,0.97475967,0.62937942,0.51629307,-1 128 | 0.29847018,-0.8905408,-1.2597037,-0.45921259,0.5612411,-1.4884892,-1.0184056,-0.98230246,0.51629307,-1 129 | 0.29847018,-0.8905408,-1.2597037,-0.45921259,0.5612411,-1.4884892,-1.0184056,-0.17646152,0.51629307,-1 130 | 0.29847018,-0.8905408,-1.2597037,-0.45921259,0.5612411,-1.4884892,0.97475967,-0.98230246,0.51629307,-1 131 | 0.29847018,-0.8905408,-1.2597037,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.17646152,0.51629307,-1 132 | 0.29847018,-0.8905408,-1.2597037,-0.45921259,0.5612411,-0.13800562,0.97475967,-0.17646152,0.51629307,-1 133 | 0.29847018,-0.8905408,-0.79578056,-0.45921259,-1.7687598,-0.13800562,-1.0184056,2.2410613,-1.9227466,-1 134 | 0.29847018,-0.8905408,-0.79578056,-0.45921259,0.5612411,-1.4884892,0.97475967,2.2410613,0.51629307,-1 135 | 0.29847018,-0.8905408,-0.79578056,-0.45921259,0.5612411,-0.13800562,0.97475967,-0.17646152,0.51629307,-1 136 | 0.29847018,-0.8905408,-0.79578056,-0.45921259,0.5612411,-0.13800562,0.97475967,0.62937942,0.51629307,-1 137 | 0.29847018,-0.8905408,-0.33185742,-0.45921259,-1.7687598,-0.13800562,0.97475967,-0.98230246,0.51629307,-1 138 | 0.29847018,-0.8905408,-0.33185742,-0.45921259,0.5612411,-1.4884892,0.97475967,-0.17646152,0.51629307,-1 139 | 0.29847018,-0.8905408,-0.33185742,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.98230246,0.51629307,1 140 | 0.29847018,-0.8905408,-0.33185742,-0.45921259,0.5612411,-0.13800562,0.97475967,-0.98230246,0.51629307,-1 141 | 0.29847018,-0.8905408,-0.33185742,-0.45921259,0.5612411,-0.13800562,0.97475967,2.2410613,0.51629307,1 142 | 0.29847018,-0.8905408,-0.33185742,-0.45921259,0.5612411,1.2124779,-1.0184056,-0.98230246,0.51629307,-1 143 | 0.29847018,-0.8905408,-0.33185742,-0.45921259,0.5612411,1.2124779,0.97475967,-0.98230246,0.51629307,-1 144 | 0.29847018,-0.8905408,-0.33185742,0.29876482,-1.7687598,-0.13800562,0.97475967,-0.98230246,0.51629307,-1 145 | 0.29847018,-0.8905408,-0.33185742,0.29876482,-1.7687598,1.2124779,0.97475967,0.62937942,0.51629307,1 146 | 0.29847018,-0.8905408,0.13206571,-0.45921259,0.5612411,-1.4884892,-1.0184056,-0.17646152,0.51629307,-1 147 | 0.29847018,-0.8905408,0.13206571,-0.45921259,0.5612411,-1.4884892,-1.0184056,1.4352204,0.51629307,-1 148 | 0.29847018,-0.8905408,0.13206571,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.17646152,0.51629307,-1 149 | 0.29847018,-0.8905408,0.13206571,-0.45921259,0.5612411,-0.13800562,0.97475967,-0.98230246,0.51629307,-1 150 | 0.29847018,-0.8905408,0.13206571,-0.45921259,0.5612411,1.2124779,-1.0184056,0.62937942,0.51629307,-1 151 | 0.29847018,-0.8905408,0.13206571,0.29876482,-1.7687598,1.2124779,0.97475967,-0.98230246,0.51629307,-1 152 | 0.29847018,-0.8905408,0.13206571,1.0567422,0.5612411,1.2124779,-1.0184056,-0.17646152,-1.9227466,1 153 | 0.29847018,-0.8905408,0.13206571,3.3306745,-1.7687598,1.2124779,0.97475967,-0.98230246,0.51629307,-1 154 | 0.29847018,-0.8905408,0.59598884,-0.45921259,0.5612411,-1.4884892,-1.0184056,-0.98230246,0.51629307,-1 155 | 0.29847018,-0.8905408,0.59598884,-0.45921259,0.5612411,-1.4884892,0.97475967,-0.17646152,0.51629307,-1 156 | 0.29847018,-0.8905408,0.59598884,-0.45921259,0.5612411,-1.4884892,0.97475967,0.62937942,0.51629307,-1 157 | 0.29847018,-0.8905408,0.59598884,-0.45921259,0.5612411,-1.4884892,0.97475967,2.2410613,0.51629307,-1 158 | 0.29847018,-0.8905408,0.59598884,-0.45921259,0.5612411,1.2124779,0.97475967,-0.17646152,0.51629307,-1 159 | 0.29847018,-0.8905408,0.59598884,0.29876482,0.5612411,1.2124779,-1.0184056,-0.17646152,0.51629307,1 160 | 0.29847018,-0.8905408,0.59598884,0.29876482,0.5612411,1.2124779,0.97475967,-0.98230246,0.51629307,1 161 | 0.29847018,-0.8905408,0.59598884,1.0567422,-1.7687598,-0.13800562,-1.0184056,-0.17646152,0.51629307,-1 162 | 0.29847018,-0.8905408,0.59598884,1.0567422,-1.7687598,-0.13800562,-1.0184056,1.4352204,-1.9227466,1 163 | 0.29847018,-0.8905408,0.59598884,1.0567422,-1.7687598,1.2124779,-1.0184056,1.4352204,0.51629307,1 164 | 0.29847018,-0.8905408,0.59598884,1.8147196,-1.7687598,1.2124779,-1.0184056,1.4352204,-1.9227466,1 165 | 0.29847018,-0.8905408,1.059912,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.98230246,0.51629307,-1 166 | 0.29847018,-0.8905408,1.059912,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.17646152,0.51629307,1 167 | 0.29847018,-0.8905408,1.059912,-0.45921259,0.5612411,1.2124779,-1.0184056,-0.98230246,0.51629307,-1 168 | 0.29847018,-0.8905408,1.059912,-0.45921259,0.5612411,1.2124779,-1.0184056,-0.17646152,0.51629307,-1 169 | 0.29847018,-0.8905408,1.059912,3.3306745,0.5612411,1.2124779,-1.0184056,-0.17646152,0.51629307,-1 170 | 0.29847018,-0.8905408,1.5238351,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.17646152,0.51629307,-1 171 | 0.29847018,-0.8905408,1.5238351,-0.45921259,0.5612411,1.2124779,-1.0184056,-0.98230246,0.51629307,-1 172 | 0.29847018,-0.8905408,1.5238351,-0.45921259,0.5612411,1.2124779,-1.0184056,0.62937942,0.51629307,-1 173 | 0.29847018,-0.8905408,1.5238351,-0.45921259,0.5612411,1.2124779,0.97475967,-0.98230246,0.51629307,-1 174 | 0.29847018,-0.8905408,1.5238351,0.29876482,-1.7687598,-0.13800562,-1.0184056,-0.17646152,0.51629307,-1 175 | 0.29847018,-0.8905408,1.5238351,1.0567422,-1.7687598,1.2124779,-1.0184056,-0.17646152,-1.9227466,1 176 | 0.29847018,-0.8905408,2.4516814,-0.45921259,0.5612411,-1.4884892,0.97475967,0.62937942,0.51629307,-1 177 | 0.29847018,0.90363699,-1.2597037,-0.45921259,0.5612411,-1.4884892,-1.0184056,-0.17646152,0.51629307,-1 178 | 0.29847018,0.90363699,-1.2597037,-0.45921259,0.5612411,-0.13800562,0.97475967,-0.98230246,0.51629307,-1 179 | 0.29847018,0.90363699,-1.2597037,-0.45921259,0.5612411,1.2124779,-1.0184056,-0.17646152,0.51629307,-1 180 | 0.29847018,0.90363699,-1.2597037,0.29876482,0.5612411,-1.4884892,0.97475967,-0.98230246,0.51629307,-1 181 | 0.29847018,0.90363699,-0.79578056,-0.45921259,0.5612411,-1.4884892,-1.0184056,-0.17646152,0.51629307,-1 182 | 0.29847018,0.90363699,-0.79578056,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.17646152,0.51629307,1 183 | 0.29847018,0.90363699,-0.79578056,-0.45921259,0.5612411,-0.13800562,0.97475967,-0.17646152,0.51629307,-1 184 | 0.29847018,0.90363699,-0.79578056,-0.45921259,0.5612411,-0.13800562,0.97475967,1.4352204,0.51629307,-1 185 | 0.29847018,0.90363699,-0.33185742,-0.45921259,0.5612411,-1.4884892,-1.0184056,-0.17646152,0.51629307,-1 186 | 0.29847018,0.90363699,-0.33185742,0.29876482,-1.7687598,-0.13800562,-1.0184056,-0.17646152,0.51629307,-1 187 | 0.29847018,0.90363699,0.13206571,-0.45921259,-1.7687598,-0.13800562,-1.0184056,-0.98230246,0.51629307,-1 188 | 0.29847018,0.90363699,0.13206571,-0.45921259,0.5612411,-1.4884892,-1.0184056,-0.17646152,0.51629307,-1 189 | 0.29847018,0.90363699,0.13206571,-0.45921259,0.5612411,-1.4884892,0.97475967,-0.98230246,0.51629307,-1 190 | 0.29847018,0.90363699,0.13206571,-0.45921259,0.5612411,-1.4884892,0.97475967,-0.98230246,0.51629307,1 191 | 0.29847018,0.90363699,0.13206571,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.17646152,0.51629307,-1 192 | 0.29847018,0.90363699,0.13206571,-0.45921259,0.5612411,-0.13800562,-1.0184056,0.62937942,0.51629307,1 193 | 0.29847018,0.90363699,0.13206571,-0.45921259,0.5612411,-0.13800562,0.97475967,1.4352204,0.51629307,-1 194 | 0.29847018,0.90363699,0.13206571,-0.45921259,0.5612411,1.2124779,0.97475967,-0.17646152,-1.9227466,1 195 | 0.29847018,0.90363699,0.13206571,0.29876482,-1.7687598,-0.13800562,-1.0184056,-0.17646152,-1.9227466,-1 196 | 0.29847018,0.90363699,0.13206571,0.29876482,-1.7687598,1.2124779,-1.0184056,-0.17646152,-1.9227466,1 197 | 0.29847018,0.90363699,0.13206571,0.29876482,0.5612411,-0.13800562,0.97475967,-0.98230246,-1.9227466,-1 198 | 0.29847018,0.90363699,0.59598884,-0.45921259,0.5612411,-1.4884892,-1.0184056,2.2410613,0.51629307,-1 199 | 0.29847018,0.90363699,0.59598884,-0.45921259,0.5612411,1.2124779,-1.0184056,-0.17646152,0.51629307,-1 200 | 0.29847018,0.90363699,0.59598884,-0.45921259,0.5612411,1.2124779,-1.0184056,0.62937942,0.51629307,1 201 | 0.29847018,0.90363699,0.59598884,-0.45921259,0.5612411,1.2124779,0.97475967,-0.98230246,-1.9227466,1 202 | 0.29847018,0.90363699,0.59598884,0.29876482,-1.7687598,-0.13800562,-1.0184056,-0.17646152,-1.9227466,-1 203 | 0.29847018,0.90363699,1.059912,-0.45921259,0.5612411,-0.13800562,0.97475967,-0.98230246,0.51629307,-1 204 | 0.29847018,0.90363699,1.059912,3.3306745,-1.7687598,1.2124779,0.97475967,0.62937942,0.51629307,1 205 | 0.29847018,0.90363699,1.5238351,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.98230246,0.51629307,-1 206 | 0.29847018,0.90363699,2.4516814,-0.45921259,-1.7687598,-0.13800562,0.97475967,-0.98230246,-1.9227466,-1 207 | 0.29847018,0.90363699,2.4516814,1.8147196,-1.7687598,-0.13800562,0.97475967,-0.98230246,0.51629307,1 208 | 1.2957974,-2.6847186,-1.2597037,-0.45921259,0.5612411,-1.4884892,-1.0184056,0.62937942,0.51629307,-1 209 | 1.2957974,-2.6847186,0.59598884,-0.45921259,0.5612411,-1.4884892,-1.0184056,-0.17646152,0.51629307,-1 210 | 1.2957974,-0.8905408,-1.7236268,-0.45921259,0.5612411,-1.4884892,-1.0184056,2.2410613,0.51629307,-1 211 | 1.2957974,-0.8905408,-1.2597037,-0.45921259,0.5612411,-1.4884892,-1.0184056,-0.98230246,0.51629307,-1 212 | 1.2957974,-0.8905408,-1.2597037,-0.45921259,0.5612411,-1.4884892,-1.0184056,-0.17646152,0.51629307,-1 213 | 1.2957974,-0.8905408,-1.2597037,-0.45921259,0.5612411,-1.4884892,0.97475967,-0.17646152,0.51629307,-1 214 | 1.2957974,-0.8905408,-1.2597037,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.17646152,0.51629307,-1 215 | 1.2957974,-0.8905408,-1.2597037,-0.45921259,0.5612411,-0.13800562,0.97475967,-0.98230246,-1.9227466,-1 216 | 1.2957974,-0.8905408,-1.2597037,1.0567422,-1.7687598,1.2124779,-1.0184056,-0.98230246,-1.9227466,1 217 | 1.2957974,-0.8905408,-0.79578056,-0.45921259,0.5612411,-1.4884892,-1.0184056,1.4352204,0.51629307,-1 218 | 1.2957974,-0.8905408,-0.79578056,-0.45921259,0.5612411,-1.4884892,0.97475967,-0.98230246,0.51629307,-1 219 | 1.2957974,-0.8905408,-0.79578056,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.98230246,-1.9227466,-1 220 | 1.2957974,-0.8905408,-0.79578056,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.17646152,0.51629307,-1 221 | 1.2957974,-0.8905408,-0.79578056,-0.45921259,0.5612411,-0.13800562,0.97475967,-0.98230246,0.51629307,-1 222 | 1.2957974,-0.8905408,-0.79578056,-0.45921259,0.5612411,-0.13800562,0.97475967,-0.17646152,0.51629307,-1 223 | 1.2957974,-0.8905408,-0.79578056,-0.45921259,0.5612411,1.2124779,0.97475967,-0.98230246,-1.9227466,-1 224 | 1.2957974,-0.8905408,-0.33185742,-0.45921259,0.5612411,-1.4884892,-1.0184056,-0.17646152,0.51629307,-1 225 | 1.2957974,-0.8905408,-0.33185742,-0.45921259,0.5612411,-1.4884892,0.97475967,-0.98230246,0.51629307,1 226 | 1.2957974,-0.8905408,-0.33185742,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.98230246,0.51629307,-1 227 | 1.2957974,-0.8905408,-0.33185742,-0.45921259,0.5612411,-0.13800562,0.97475967,-0.98230246,0.51629307,-1 228 | 1.2957974,-0.8905408,-0.33185742,-0.45921259,0.5612411,1.2124779,0.97475967,-0.17646152,0.51629307,1 229 | 1.2957974,-0.8905408,-0.33185742,0.29876482,0.5612411,-0.13800562,-1.0184056,-0.17646152,-1.9227466,1 230 | 1.2957974,-0.8905408,-0.33185742,5.6046067,-1.7687598,1.2124779,-1.0184056,-0.17646152,-1.9227466,1 231 | 1.2957974,-0.8905408,0.13206571,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.17646152,0.51629307,-1 232 | 1.2957974,-0.8905408,0.13206571,-0.45921259,0.5612411,-0.13800562,0.97475967,-0.17646152,0.51629307,-1 233 | 1.2957974,-0.8905408,0.13206571,-0.45921259,0.5612411,1.2124779,-1.0184056,-0.98230246,0.51629307,1 234 | 1.2957974,-0.8905408,0.13206571,-0.45921259,0.5612411,1.2124779,-1.0184056,1.4352204,-1.9227466,1 235 | 1.2957974,-0.8905408,0.13206571,-0.45921259,0.5612411,1.2124779,0.97475967,-0.98230246,0.51629307,-1 236 | 1.2957974,-0.8905408,0.13206571,-0.45921259,0.5612411,1.2124779,0.97475967,-0.17646152,0.51629307,-1 237 | 1.2957974,-0.8905408,0.13206571,0.29876482,0.5612411,-0.13800562,0.97475967,0.62937942,0.51629307,1 238 | 1.2957974,-0.8905408,0.59598884,-0.45921259,-1.7687598,-0.13800562,0.97475967,0.62937942,-1.9227466,1 239 | 1.2957974,-0.8905408,0.59598884,-0.45921259,0.5612411,-1.4884892,-1.0184056,-0.98230246,0.51629307,-1 240 | 1.2957974,-0.8905408,0.59598884,-0.45921259,0.5612411,-1.4884892,0.97475967,-0.98230246,0.51629307,-1 241 | 1.2957974,-0.8905408,0.59598884,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.98230246,0.51629307,-1 242 | 1.2957974,-0.8905408,0.59598884,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.17646152,-1.9227466,-1 243 | 1.2957974,-0.8905408,0.59598884,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.17646152,0.51629307,-1 244 | 1.2957974,-0.8905408,0.59598884,-0.45921259,0.5612411,1.2124779,-1.0184056,-0.17646152,0.51629307,-1 245 | 1.2957974,-0.8905408,0.59598884,-0.45921259,0.5612411,1.2124779,0.97475967,-0.98230246,-1.9227466,1 246 | 1.2957974,-0.8905408,0.59598884,-0.45921259,0.5612411,1.2124779,0.97475967,2.2410613,0.51629307,1 247 | 1.2957974,-0.8905408,0.59598884,0.29876482,-1.7687598,-0.13800562,-1.0184056,2.2410613,-1.9227466,1 248 | 1.2957974,-0.8905408,0.59598884,0.29876482,-1.7687598,1.2124779,-1.0184056,-0.17646152,0.51629307,-1 249 | 1.2957974,-0.8905408,0.59598884,1.0567422,-1.7687598,-0.13800562,0.97475967,0.62937942,0.51629307,-1 250 | 1.2957974,-0.8905408,1.059912,1.0567422,-1.7687598,1.2124779,-1.0184056,-0.17646152,0.51629307,1 251 | 1.2957974,-0.8905408,1.5238351,-0.45921259,0.5612411,-0.13800562,0.97475967,-0.17646152,0.51629307,1 252 | 1.2957974,-0.8905408,1.5238351,0.29876482,-1.7687598,1.2124779,0.97475967,-0.17646152,0.51629307,1 253 | 1.2957974,-0.8905408,1.5238351,0.29876482,0.5612411,-0.13800562,0.97475967,-0.98230246,-1.9227466,-1 254 | 1.2957974,-0.8905408,1.9877582,-0.45921259,0.5612411,-1.4884892,0.97475967,0.62937942,-1.9227466,1 255 | 1.2957974,-0.8905408,1.9877582,1.0567422,-1.7687598,1.2124779,-1.0184056,2.2410613,0.51629307,-1 256 | 1.2957974,-0.8905408,2.4516814,-0.45921259,0.5612411,-0.13800562,-1.0184056,-0.17646152,0.51629307,-1 257 | 1.2957974,-0.8905408,2.4516814,-0.45921259,0.5612411,-0.13800562,0.97475967,-0.98230246,-1.9227466,-1 258 | 1.2957974,-0.8905408,2.4516814,-0.45921259,0.5612411,1.2124779,0.97475967,-0.98230246,0.51629307,1 259 | 2.2931246,-0.8905408,-2.18755,-0.45921259,0.5612411,-1.4884892,-1.0184056,1.4352204,0.51629307,-1 260 | 2.2931246,-0.8905408,-1.2597037,-0.45921259,0.5612411,-0.13800562,-1.0184056,2.2410613,0.51629307,-1 261 | 2.2931246,-0.8905408,-0.33185742,-0.45921259,0.5612411,1.2124779,-1.0184056,-0.98230246,0.51629307,-1 262 | 2.2931246,-0.8905408,1.5238351,-0.45921259,0.5612411,-1.4884892,0.97475967,-0.98230246,0.51629307,-1 263 | 2.2931246,-0.8905408,1.5238351,-0.45921259,0.5612411,-1.4884892,0.97475967,0.62937942,0.51629307,-1 264 | -------------------------------------------------------------------------------- /data/Processed_Flare.data: -------------------------------------------------------------------------------- 1 | 1,1,1,1,1,1,0,0,0,1 2 | 1,1,1,1,2,1,0,0,0,-1 3 | 1,1,1,1,2,1,0,0,0,1 4 | 1,1,1,1,2,1,1,0,0,1 5 | 1,1,1,2,2,1,0,0,0,-1 6 | 1,1,1,2,2,1,0,0,0,1 7 | 1,1,1,2,2,1,1,0,0,1 8 | 1,1,1,2,2,1,1,1,0,1 9 | 1,1,1,2,2,1,3,0,0,-1 10 | 1,1,2,2,2,1,0,0,0,1 11 | 1,1,3,2,2,1,0,0,0,-1 12 | 1,2,1,1,1,1,0,0,0,1 13 | 1,2,1,1,1,1,0,1,0,1 14 | 1,2,1,1,1,1,1,0,0,1 15 | 1,2,1,1,2,1,0,0,0,-1 16 | 1,2,1,1,2,1,0,0,0,1 17 | 1,2,1,1,2,1,1,0,0,-1 18 | 1,2,1,1,2,1,1,0,0,1 19 | 1,2,1,1,2,1,2,0,0,-1 20 | 1,2,1,1,2,1,2,0,0,1 21 | 1,2,1,1,2,1,3,0,0,-1 22 | 1,2,1,2,1,1,0,0,0,1 23 | 1,2,1,2,2,1,0,0,0,-1 24 | 1,2,1,2,2,1,0,0,0,1 25 | 1,2,1,2,2,1,0,1,0,-1 26 | 1,2,1,2,2,1,0,1,0,1 27 | 1,2,1,2,2,1,0,3,0,1 28 | 1,2,1,2,2,1,1,0,0,-1 29 | 1,2,1,2,2,1,1,0,0,1 30 | 1,2,1,2,2,1,2,0,0,-1 31 | 1,2,1,2,2,1,2,0,0,1 32 | 1,2,1,2,2,1,3,0,0,-1 33 | 1,2,1,2,2,1,3,0,0,1 34 | 1,2,1,2,2,1,4,0,0,1 35 | 1,2,1,2,2,1,4,1,0,-1 36 | 1,2,1,2,2,2,0,0,0,1 37 | 1,2,1,2,2,2,0,1,1,1 38 | 1,2,1,2,2,2,1,0,0,1 39 | 1,2,2,2,2,1,0,0,0,1 40 | 1,3,1,1,1,1,0,0,0,1 41 | 1,3,1,1,1,1,1,0,0,1 42 | 1,3,1,1,1,1,2,0,0,1 43 | 1,3,1,1,2,1,0,0,0,-1 44 | 1,3,1,1,2,1,0,0,0,1 45 | 1,3,1,1,2,1,0,1,0,-1 46 | 1,3,1,1,2,1,0,1,0,1 47 | 1,3,1,1,2,1,0,2,0,1 48 | 1,3,1,1,2,1,1,0,0,-1 49 | 1,3,1,1,2,1,1,0,0,1 50 | 1,3,1,1,2,1,1,1,0,-1 51 | 1,3,1,1,2,1,2,0,0,-1 52 | 1,3,1,1,2,1,2,0,0,1 53 | 1,3,1,1,2,1,2,1,0,-1 54 | 1,3,1,1,2,1,2,2,0,-1 55 | 1,3,1,1,2,1,3,0,0,-1 56 | 1,3,1,1,2,1,3,1,0,1 57 | 1,3,1,1,2,1,5,0,0,-1 58 | 1,3,1,1,2,1,6,0,0,-1 59 | 1,3,1,2,1,1,0,0,0,1 60 | 1,3,1,2,2,1,0,0,0,-1 61 | 1,3,1,2,2,1,0,0,0,1 62 | 1,3,1,2,2,1,0,1,0,1 63 | 1,3,1,2,2,1,1,0,0,-1 64 | 1,3,1,2,2,1,1,0,0,1 65 | 1,3,1,2,2,1,1,1,0,1 66 | 1,3,1,2,2,1,2,0,0,1 67 | 1,3,1,2,2,1,2,1,0,-1 68 | 1,3,1,2,2,1,3,0,0,1 69 | 1,3,1,2,2,1,4,0,0,1 70 | 1,3,1,2,2,1,5,1,0,1 71 | 1,3,1,2,2,2,0,0,0,-1 72 | 1,3,1,2,2,2,0,0,0,1 73 | 1,3,1,2,2,2,1,0,0,1 74 | 1,3,1,2,2,2,2,1,0,1 75 | 2,1,1,1,2,1,0,0,0,-1 76 | 2,1,1,2,2,1,0,0,0,-1 77 | 2,1,1,2,2,1,0,0,0,1 78 | 2,1,3,2,2,1,5,0,0,1 79 | 2,2,1,1,1,1,0,0,0,1 80 | 2,2,1,1,1,1,1,0,0,1 81 | 2,2,1,1,2,1,0,0,0,-1 82 | 2,2,1,1,2,1,0,0,0,1 83 | 2,2,1,1,2,1,1,0,0,-1 84 | 2,2,1,1,2,1,8,0,0,1 85 | 2,2,1,2,2,1,0,0,0,-1 86 | 2,2,1,2,2,1,0,0,0,1 87 | 2,2,1,2,2,1,0,1,0,1 88 | 2,2,1,2,2,1,1,0,0,-1 89 | 2,2,1,2,2,1,1,0,0,1 90 | 2,2,1,2,2,1,1,0,1,1 91 | 2,2,1,2,2,1,2,0,0,-1 92 | 2,2,1,2,2,1,2,0,0,1 93 | 2,2,1,2,2,1,3,0,0,1 94 | 2,2,1,2,2,1,4,0,0,-1 95 | 2,2,1,2,2,1,4,0,0,1 96 | 2,2,1,2,2,2,1,0,0,1 97 | 2,2,2,1,2,1,0,0,0,-1 98 | 2,2,2,2,2,1,0,0,0,-1 99 | 2,2,2,2,2,1,1,1,0,-1 100 | 2,2,2,2,2,2,0,0,0,1 101 | 2,2,2,2,2,2,4,0,0,1 102 | 2,2,3,2,2,1,0,0,0,-1 103 | 2,2,3,2,2,1,0,0,0,1 104 | 2,2,3,2,2,1,1,0,0,-1 105 | 2,2,3,2,2,2,0,0,0,1 106 | 2,2,3,2,2,2,2,2,0,1 107 | 2,3,1,1,2,1,0,0,0,-1 108 | 2,3,1,1,2,1,0,0,0,1 109 | 2,3,1,1,2,1,1,0,0,-1 110 | 2,3,1,1,2,1,1,0,0,1 111 | 2,3,1,1,2,1,2,0,0,1 112 | 2,3,1,1,2,1,3,0,0,1 113 | 2,3,1,2,2,1,0,0,0,-1 114 | 2,3,1,2,2,1,0,0,0,1 115 | 2,3,1,2,2,1,0,1,0,1 116 | 2,3,1,2,2,1,1,0,0,-1 117 | 2,3,1,2,2,1,1,0,0,1 118 | 2,3,1,2,2,1,2,0,0,1 119 | 2,3,1,2,2,1,3,0,0,1 120 | 2,3,1,2,2,1,3,1,0,1 121 | 2,3,1,2,2,1,4,4,0,1 122 | 2,3,1,2,2,1,5,0,0,-1 123 | 2,3,1,2,2,2,0,0,0,1 124 | 2,3,1,2,2,2,1,0,0,1 125 | 2,3,1,2,2,2,1,1,0,1 126 | 2,3,1,2,2,2,1,3,2,1 127 | 2,3,1,2,2,2,2,0,0,1 128 | 2,3,1,2,2,2,2,1,0,1 129 | 2,3,1,2,2,2,3,0,0,1 130 | 2,3,2,1,2,1,0,0,0,1 131 | 2,3,2,1,2,1,1,0,0,-1 132 | 2,3,2,2,2,1,0,0,0,1 133 | 2,3,2,2,2,1,2,0,0,1 134 | 2,3,3,1,2,1,0,0,0,-1 135 | 2,3,3,1,2,1,0,1,0,-1 136 | 2,3,3,1,2,1,1,0,0,1 137 | 2,3,3,2,2,1,1,0,0,1 138 | 2,3,3,2,2,1,2,0,0,-1 139 | 2,3,3,2,2,1,2,0,0,1 140 | 2,3,3,2,2,1,6,0,0,1 141 | 2,3,3,2,2,2,0,5,1,1 142 | 2,3,3,2,2,2,1,0,0,1 143 | 2,3,3,2,2,2,1,0,1,1 144 | 2,3,3,2,2,2,6,0,0,1 145 | -------------------------------------------------------------------------------- /data/Processed_Heart.data: -------------------------------------------------------------------------------- 1 | -2.79209,0.68822166,-1.2357503,-0.075270067,-0.88339298,-0.41625584,0.97984406,2.2586058,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 2 | -2.2431863,-1.4476387,-1.2357503,-0.74710207,-0.76730792,-0.41625584,-1.0243824,1.8269334,-0.69992253,-0.30562085,-0.9524656,-0.71021606,-0.87408263,-1 3 | -2.2431863,0.68822166,-2.2882822,-0.74710207,-1.3090382,-0.41625584,0.97984406,1.0499231,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 4 | -2.1334056,-1.4476387,0.8693133199999999,0.37261793,-1.2896907,-0.41625584,-1.0243824,1.395261,-0.69992253,0.30562085,-0.9524656,-0.71021606,-0.87408263,-1 5 | -2.1334056,0.68822166,0.8693133199999999,-0.63513007,-0.99947805,-0.41625584,-1.0243824,-0.8494353100000001,1.423438,0.48026133,0.67516549,-0.71021606,1.1870729,1 6 | -2.1334056,0.68822166,0.8693133199999999,-0.29921407,0.62571281,-0.41625584,0.97984406,0.27291287,1.423438,-0.91686254,-0.9524656,-0.71021606,1.1870729,1 7 | -1.9138441,-1.4476387,-0.1832185,-0.63513007,-0.67057037,-0.41625584,-1.0243824,0.8772542,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 8 | -1.9138441,0.68822166,-0.1832185,-0.075270067,0.006592485,-0.41625584,-1.0243824,1.6110972,-0.69992253,2.1393459,2.3027966,-0.71021606,-0.87408263,-1 9 | -1.8040634,0.68822166,-2.2882822,-0.63513007,-0.36101021,-0.41625584,-1.0243824,1.395261,1.423438,2.4013066,0.67516549,-0.71021606,1.1870729,1 10 | -1.6942826,-1.4476387,-0.1832185,-2.0907661,-0.98013054,-0.41625584,-1.0243824,1.2657593,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 11 | -1.6942826,0.68822166,-0.1832185,0.48458993,1.3802657,-0.41625584,0.97984406,1.395261,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 12 | -1.6942826,0.68822166,0.8693133199999999,-0.74710207,-0.59318033,-0.41625584,-1.0243824,-0.41776293,-0.69992253,0.13098036,0.67516549,-0.71021606,1.1870729,1 13 | -1.5845019,0.68822166,-2.2882822,0.48458993,-0.98013054,-0.41625584,-1.0243824,1.2225921,1.423438,0.30562085,-0.9524656,-0.71021606,1.1870729,-1 14 | -1.5845019,0.68822166,0.8693133199999999,-1.1949901,-1.5992509,-0.41625584,0.97984406,-1.5401111,1.423438,0.8295423,0.67516549,-0.71021606,1.1870729,1 15 | -1.5845019,0.68822166,0.8693133199999999,1.1564219,-0.51579029,-0.41625584,-1.0243824,1.3520938,-0.69992253,-0.91686254,-0.9524656,-0.71021606,1.1870729,1 16 | -1.4747212,-1.4476387,-1.2357503,-1.4749201,-0.99947805,-0.41625584,-1.0243824,0.79091972,-0.69992253,-0.91686254,-0.9524656,0.34922226,-0.87408263,-1 17 | -1.4747212,-1.4476387,-1.2357503,-0.29921407,1.0900531,-0.41625584,-1.0243824,0.57508353,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 18 | -1.4747212,-1.4476387,-1.2357503,-0.075270067,-0.88339298,-0.41625584,0.97984406,0.96358867,-0.69992253,0.30562085,-0.9524656,-0.71021606,-0.87408263,-1 19 | -1.4747212,-1.4476387,-0.1832185,-1.0830181,0.35484767,-0.41625584,0.97984406,0.96358867,1.423438,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 20 | -1.4747212,0.68822166,-1.2357503,-1.1949901,-0.28362017,-0.41625584,-1.0243824,0.14341116,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 21 | -1.4747212,0.68822166,-1.2357503,0.20465993,-0.90274049,-0.41625584,-1.0243824,-0.76310083,-0.69992253,-0.91686254,0.67516549,-0.71021606,0.67178403,-1 22 | -1.4747212,0.68822166,-0.1832185,-1.0830181,0.006592485,-0.41625584,-1.0243824,1.2657593,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 23 | -1.4747212,0.68822166,-0.1832185,-0.075270067,-0.68991788,-0.41625584,0.97984406,0.79091972,-0.69992253,0.8295423,0.67516549,-0.71021606,-0.87408263,-1 24 | -1.4747212,0.68822166,0.8693133199999999,-1.1949901,-1.5025133,-0.41625584,0.97984406,0.35924734,-0.69992253,-0.91686254,-0.9524656,-0.71021606,1.1870729,1 25 | -1.3649404,-1.4476387,-0.1832185,-0.63513007,-0.78665543,-0.41625584,-1.0243824,1.0067559,-0.69992253,-0.91686254,0.67516549,-0.71021606,-0.87408263,-1 26 | -1.3649404,-1.4476387,0.8693133199999999,-1.6428781,0.29680514,-0.41625584,0.97984406,-1.1947732,-0.69992253,-0.39294109,0.67516549,-0.71021606,-0.87408263,-1 27 | -1.3649404,0.68822166,-2.2882822,0.93247793,-0.10949258,-0.41625584,0.97984406,1.2225921,-0.69992253,-0.2183006,-0.9524656,1.4086606,-0.87408263,-1 28 | -1.3649404,0.68822166,-1.2357503,-0.63513007,0.8772304399999999,-0.41625584,-1.0243824,0.53191629,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 29 | -1.3649404,0.68822166,-0.1832185,-0.63513007,-0.18688262,2.3934711,-1.0243824,1.9132679,-0.69992253,-0.2183006,2.3027966,-0.71021606,1.1870729,-1 30 | -1.3649404,0.68822166,-0.1832185,-0.075270067,-1.3477332,-0.41625584,-1.0243824,0.013909443,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 31 | -1.3649404,0.68822166,0.8693133199999999,0.26064593,1.2641806,-0.41625584,-1.0243824,-1.0652715,1.423438,0.65490181,0.67516549,-0.71021606,0.67178403,1 32 | -1.3649404,0.68822166,0.8693133199999999,0.48458993,-0.45774776,-0.41625584,-1.0243824,1.2225921,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 33 | -1.2551597,-1.4476387,-0.1832185,-0.52315807,-0.70926539,-0.41625584,-1.0243824,0.66141801,-0.69992253,-0.74222205,0.67516549,-0.71021606,-0.87408263,-1 34 | -1.2551597,-1.4476387,0.8693133199999999,0.036701933,1.7672159,2.3934711,0.97984406,-0.59043188,1.423438,1.7027447,0.67516549,-0.71021606,1.1870729,1 35 | -1.2551597,0.68822166,-0.1832185,-0.075270067,1.2641806,-0.41625584,-1.0243824,0.53191629,-0.69992253,0.74222205,-0.9524656,0.34922226,-0.87408263,-1 36 | -1.2551597,0.68822166,0.8693133199999999,-1.1949901,-0.74796041,-0.41625584,-1.0243824,0.48874906,-0.69992253,-0.91686254,-0.9524656,-0.71021606,1.1870729,-1 37 | -1.2551597,0.68822166,0.8693133199999999,-0.91506006,1.0320105,-0.41625584,-1.0243824,1.3520938,-0.69992253,0.13098036,0.67516549,-0.71021606,-0.87408263,-1 38 | -1.2551597,0.68822166,0.8693133199999999,-0.63513007,-1.4057758,-0.41625584,0.97984406,-1.2811077,1.423438,1.2661435,0.67516549,-0.71021606,1.1870729,1 39 | -1.2551597,0.68822166,0.8693133199999999,1.0444499,-0.051450046,-0.41625584,-1.0243824,0.92042143,-0.69992253,0.39294109,-0.9524656,-0.71021606,-0.87408263,-1 40 | -1.145379,-1.4476387,-0.1832185,-1.3069621,-2.1022861,-0.41625584,-1.0243824,1.0930904,-0.69992253,-0.39294109,0.67516549,-0.71021606,-0.87408263,-1 41 | -1.145379,-1.4476387,-0.1832185,-0.74710207,-0.1481876,-0.41625584,-1.0243824,-0.029257794,-0.69992253,-0.65490181,0.67516549,0.34922226,-0.87408263,-1 42 | -1.145379,0.68822166,-1.2357503,-0.63513007,-0.57383282,-0.41625584,-1.0243824,0.8772542,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 43 | -1.145379,0.68822166,-1.2357503,-0.63513007,0.25811012,-0.41625584,-1.0243824,1.0067559,-0.69992253,-0.91686254,-0.9524656,-0.71021606,1.1870729,-1 44 | -1.145379,0.68822166,-1.2357503,-0.075270067,-0.59318033,-0.41625584,0.97984406,1.6542645,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 45 | -1.145379,0.68822166,-0.1832185,-0.63513007,-0.45774776,-0.41625584,-1.0243824,0.83408696,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 46 | -1.145379,0.68822166,-0.1832185,-0.075270067,-0.32231519,-0.41625584,-1.0243824,1.2657593,1.423438,-0.56758157,-0.9524656,-0.71021606,-0.87408263,-1 47 | -1.145379,0.68822166,-0.1832185,0.48458993,-0.28362017,-0.41625584,0.97984406,1.3089266,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 48 | -1.145379,0.68822166,0.8693133199999999,-1.1949901,-1.0188256,-0.41625584,0.97984406,1.1794249,-0.69992253,-0.91686254,-0.9524656,0.34922226,-0.87408263,1 49 | -1.145379,0.68822166,0.8693133199999999,-1.0830181,0.78049289,-0.41625584,0.97984406,0.14341116,-0.69992253,-0.91686254,-0.9524656,0.34922226,-0.87408263,1 50 | -1.0355982,-1.4476387,-1.2357503,-1.0830181,-1.7346834,-0.41625584,-1.0243824,-0.50409741,-0.69992253,-0.91686254,0.67516549,-0.71021606,-0.87408263,-1 51 | -1.0355982,-1.4476387,-1.2357503,-0.075270067,-0.30296768,-0.41625584,0.97984406,1.0930904,-0.69992253,-0.39294109,0.67516549,-0.71021606,-0.87408263,-1 52 | -1.0355982,-1.4476387,0.8693133199999999,0.37261793,-0.26427266,-0.41625584,0.97984406,0.10024392,1.423438,-0.74222205,0.67516549,-0.71021606,-0.87408263,-1 53 | -1.0355982,0.68822166,-1.2357503,-0.18724207,1.1287481,-0.41625584,0.97984406,0.8772542,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 54 | -1.0355982,0.68822166,0.8693133199999999,-1.5309061,-0.8060029399999999,-0.41625584,0.97984406,-0.072425032,1.423438,1.7027447,0.67516549,-0.71021606,-0.87408263,-1 55 | -1.0355982,0.68822166,0.8693133199999999,-0.91506006,0.20006759,-0.41625584,0.97984406,1.5247628,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 56 | -1.0355982,0.68822166,0.8693133199999999,0.59656193,1.1480956,-0.41625584,0.97984406,-0.11559227,1.423438,-0.91686254,0.67516549,2.4680989,1.1870729,1 57 | -0.9258175199999999,-1.4476387,-1.2357503,-1.4749201,-0.88339298,-0.41625584,-1.0243824,0.96358867,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 58 | -0.9258175199999999,-1.4476387,-0.1832185,0.59656193,-1.4057758,-0.41625584,0.97984406,0.44558182,1.423438,0.30562085,2.3027966,-0.71021606,-0.87408263,-1 59 | -0.9258175199999999,-1.4476387,0.8693133199999999,0.37261793,-0.12884009,-0.41625584,0.97984406,0.10024392,1.423438,-0.91686254,0.67516549,-0.71021606,-0.87408263,-1 60 | -0.9258175199999999,0.68822166,-1.2357503,-1.6988641,-1.0188256,2.3934711,-1.0243824,0.27291287,-0.69992253,-0.91686254,-0.9524656,-0.71021606,1.1870729,-1 61 | -0.9258175199999999,0.68822166,-0.1832185,1.0444499,-0.36101021,-0.41625584,-1.0243824,-0.11559227,-0.69992253,2.2266662,0.67516549,-0.71021606,-0.87408263,1 62 | -0.9258175199999999,0.68822166,0.8693133199999999,-0.63513007,-0.012755025,-0.41625584,0.97984406,-0.24509398,-0.69992253,-0.2183006,-0.9524656,-0.71021606,1.1870729,1 63 | -0.9258175199999999,0.68822166,0.8693133199999999,0.48458993,1.1867906,-0.41625584,-1.0243824,-1.2811077,1.423438,0.65490181,0.67516549,1.4086606,1.1870729,1 64 | -0.81603678,0.68822166,-0.1832185,-1.3069621,-0.12884009,-0.41625584,-1.0243824,0.10024392,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,1 65 | -0.81603678,0.68822166,-0.1832185,0.37261793,0.14202506,-0.41625584,0.97984406,0.27291287,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 66 | -0.81603678,0.68822166,0.8693133199999999,-1.1949901,0.49028024,-0.41625584,0.97984406,-1.3674422,1.423438,-0.043660121,0.67516549,0.34922226,-0.87408263,1 67 | -0.81603678,0.68822166,0.8693133199999999,-1.0830181,-0.88339298,-0.41625584,-1.0243824,-0.28826122,-0.69992253,-0.8295423,-0.9524656,-0.71021606,-0.87408263,-1 68 | -0.70625605,-1.4476387,-0.1832185,-0.075270067,0.49028024,-0.41625584,-1.0243824,-0.46093017,-0.69992253,-0.74222205,-0.9524656,-0.71021606,-0.87408263,-1 69 | -0.70625605,0.68822166,-1.2357503,-1.1949901,-0.39970523,-0.41625584,-1.0243824,0.79091972,-0.69992253,-0.043660121,2.3027966,-0.71021606,1.1870729,1 70 | -0.70625605,0.68822166,-1.2357503,-0.075270067,-0.090145066,-0.41625584,0.97984406,1.3089266,-0.69992253,-0.74222205,0.67516549,-0.71021606,-0.87408263,-1 71 | -0.70625605,0.68822166,-0.1832185,-0.41118607,0.10333004,2.3934711,-1.0243824,1.0930904,-0.69992253,-0.91686254,-0.9524656,1.4086606,-0.87408263,-1 72 | -0.70625605,0.68822166,0.8693133199999999,-0.52315807,-0.5351378,-0.41625584,0.97984406,1.56793,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 73 | -0.70625605,0.68822166,0.8693133199999999,-0.41118607,0.47093273,-0.41625584,0.97984406,0.70458524,-0.69992253,-0.48026133,0.67516549,-0.71021606,1.1870729,1 74 | -0.70625605,0.68822166,0.8693133199999999,-0.075270067,0.12267755,2.3934711,0.97984406,0.013909443,1.423438,-0.91686254,-0.9524656,1.4086606,1.1870729,1 75 | -0.59647532,-1.4476387,-1.2357503,0.14867393,0.4128902,-0.41625584,-1.0243824,0.53191629,-0.69992253,-0.91686254,0.67516549,-0.71021606,-0.87408263,-1 76 | -0.59647532,-1.4476387,0.8693133199999999,-0.075270067,0.37419518,-0.41625584,-1.0243824,0.57508353,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 77 | -0.59647532,0.68822166,-1.2357503,-0.075270067,0.31615265,-0.41625584,-1.0243824,0.92042143,-0.69992253,-0.39294109,-0.9524656,-0.71021606,-0.87408263,-1 78 | -0.59647532,0.68822166,-0.1832185,-0.74710207,-1.947506,-0.41625584,0.97984406,-1.0221043,-0.69992253,-0.2183006,-0.9524656,2.4680989,-0.87408263,1 79 | -0.59647532,0.68822166,-0.1832185,-0.63513007,-1.1929531,-0.41625584,-1.0243824,-0.46093017,-0.69992253,0.8295423,0.67516549,2.4680989,1.1870729,1 80 | -0.48669458,-1.4476387,-1.2357503,-0.63513007,-0.10949258,-0.41625584,-1.0243824,0.53191629,-0.69992253,0.043660121,-0.9524656,-0.71021606,-0.87408263,-1 81 | -0.48669458,-1.4476387,-0.1832185,-0.63513007,-0.59318033,-0.41625584,-1.0243824,0.35924734,-0.69992253,0.48026133,0.67516549,-0.71021606,-0.87408263,-1 82 | -0.48669458,-1.4476387,0.8693133199999999,-1.1949901,0.083982526,-0.41625584,0.97984406,0.40241458,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 83 | -0.48669458,0.68822166,-0.1832185,-0.13125607,-1.0381731,-0.41625584,-1.0243824,0.57508353,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 84 | -0.48669458,0.68822166,-0.1832185,0.48458993,-0.32231519,-0.41625584,-1.0243824,0.57508353,-0.69992253,-0.39294109,0.67516549,0.34922226,1.1870729,1 85 | -0.48669458,0.68822166,0.8693133199999999,0.70853393,-0.96078303,-0.41625584,0.97984406,-1.0221043,1.423438,-0.13098036,0.67516549,-0.71021606,1.1870729,1 86 | -0.48669458,0.68822166,0.8693133199999999,1.0444499,-0.12884009,-0.41625584,0.97984406,-0.93576978,-0.69992253,1.3534637,0.67516549,-0.71021606,1.1870729,1 87 | -0.37691385,-1.4476387,-0.1832185,-0.63513007,0.8772304399999999,-0.41625584,0.97984406,0.31608011,-0.69992253,-0.39294109,-0.9524656,-0.71021606,-0.87408263,-1 88 | -0.37691385,-1.4476387,-0.1832185,-0.075270067,0.12267755,-0.41625584,0.97984406,-0.029257794,-0.69992253,-0.48026133,-0.9524656,-0.71021606,-0.87408263,-1 89 | -0.37691385,-1.4476387,-0.1832185,0.48458993,1.1287481,-0.41625584,0.97984406,-0.33142846,-0.69992253,0.39294109,-0.9524656,0.34922226,-0.87408263,-1 90 | -0.37691385,-1.4476387,0.8693133199999999,-0.075270067,1.0707055,-0.41625584,-1.0243824,-0.33142846,1.423438,0.13098036,0.67516549,-0.71021606,1.1870729,1 91 | -0.37691385,0.68822166,-2.2882822,-0.35520007,-0.70926539,-0.41625584,0.97984406,-1.0652715,1.423438,0.30562085,-0.9524656,0.34922226,-0.87408263,-1 92 | -0.37691385,0.68822166,-0.1832185,-2.0907661,-0.43840025,-0.41625584,-1.0243824,0.18657839,1.423438,-0.91686254,-0.9524656,0.34922226,1.1870729,-1 93 | -0.37691385,0.68822166,-0.1832185,-1.7548501,-0.5351378,-0.41625584,-1.0243824,-0.28826122,1.423438,0.13098036,0.67516549,-0.71021606,-0.87408263,-1 94 | -0.37691385,0.68822166,-0.1832185,-1.1949901,-1.4444708,-0.41625584,-1.0243824,-1.151606,-0.69992253,-0.39294109,-0.9524656,-0.71021606,-0.87408263,-1 95 | -0.37691385,0.68822166,-0.1832185,-0.35520007,-0.090145066,2.3934711,0.97984406,0.70458524,-0.69992253,1.1788233,0.67516549,-0.71021606,-0.87408263,-1 96 | -0.37691385,0.68822166,0.8693133199999999,0.48458993,0.2194151,-0.41625584,0.97984406,1.56793,1.423438,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 97 | -0.37691385,0.68822166,0.8693133199999999,0.48458993,0.93527297,-0.41625584,-1.0243824,-1.1947732,1.423438,2.7505876,0.67516549,2.4680989,1.1870729,1 98 | -0.37691385,0.68822166,0.8693133199999999,0.48458993,0.95462048,-0.41625584,-1.0243824,1.0067559,1.423438,0.48026133,-0.9524656,-0.71021606,1.1870729,1 99 | -0.26713312,-1.4476387,-0.1832185,0.26064593,-1.0381731,-0.41625584,0.97984406,0.83408696,-0.69992253,-0.8295423,0.67516549,-0.71021606,-0.87408263,-1 100 | -0.26713312,0.68822166,-2.2882822,-0.74710207,-1.2316482,-0.41625584,0.97984406,1.7405989,-0.69992253,-0.91686254,0.67516549,-0.71021606,0.67178403,-1 101 | -0.26713312,0.68822166,-2.2882822,1.1564219,0.93527297,2.3934711,-1.0243824,1.2225921,-0.69992253,0.13098036,0.67516549,-0.71021606,1.1870729,-1 102 | -0.26713312,0.68822166,-1.2357503,-0.63513007,1.4576558,-0.41625584,-1.0243824,0.96358867,-0.69992253,-0.74222205,-0.9524656,-0.71021606,-0.87408263,-1 103 | -0.26713312,0.68822166,-1.2357503,-0.18724207,-0.86404547,2.3934711,-1.0243824,1.4815955,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 104 | -0.26713312,0.68822166,-1.2357503,0.14867393,-0.94143552,-0.41625584,-1.0243824,0.35924734,-0.69992253,-0.2183006,-0.9524656,0.34922226,-0.87408263,-1 105 | -0.26713312,0.68822166,-0.1832185,2.2761419,-0.98013054,2.3934711,-1.0243824,0.53191629,-0.69992253,-0.48026133,-0.9524656,-0.71021606,1.1870729,-1 106 | -0.26713312,0.68822166,0.8693133199999999,-1.3069621,-0.32231519,2.3934711,-1.0243824,-0.11559227,-0.69992253,-0.8295423,-0.9524656,2.4680989,1.1870729,-1 107 | -0.26713312,0.68822166,0.8693133199999999,-1.0830181,-0.38035772,-0.41625584,-1.0243824,0.44558182,-0.69992253,-0.91686254,-0.9524656,0.34922226,-0.87408263,1 108 | -0.26713312,0.68822166,0.8693133199999999,-0.35520007,-0.7286129,-0.41625584,-1.0243824,0.79091972,-0.69992253,-0.043660121,-0.9524656,1.4086606,1.1870729,1 109 | -0.26713312,0.68822166,0.8693133199999999,-0.18724207,0.10333004,-0.41625584,-1.0243824,0.48874906,1.423438,-0.91686254,-0.9524656,0.34922226,1.1870729,1 110 | -0.15735238,-1.4476387,0.8693133199999999,-0.075270067,0.27745763,-0.41625584,0.97984406,-0.28826122,-0.69992253,-0.56758157,0.67516549,-0.71021606,-0.87408263,-1 111 | -0.15735238,-1.4476387,0.8693133199999999,0.37261793,-0.30296768,-0.41625584,0.97984406,0.44558182,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 112 | -0.15735238,0.68822166,-0.1832185,-0.075270067,-1.0188256,2.3934711,0.97984406,0.10024392,-0.69992253,0.13098036,2.3027966,-0.71021606,-0.87408263,-1 113 | -0.15735238,0.68822166,-0.1832185,-0.075270067,-0.070797556,2.3934711,0.97984406,1.0067559,-0.69992253,-0.91686254,-0.9524656,2.4680989,-0.87408263,-1 114 | -0.15735238,0.68822166,0.8693133199999999,-0.46717207,0.62571281,-0.41625584,-1.0243824,-2.3602886,1.423438,0.8295423,0.67516549,1.4086606,1.1870729,1 115 | -0.15735238,0.68822166,0.8693133199999999,0.48458993,-0.90274049,2.3934711,0.97984406,0.22974563,1.423438,1.790065,2.3027966,-0.71021606,1.1870729,1 116 | -0.15735238,0.68822166,0.8693133199999999,0.59656193,-0.45774776,-0.41625584,0.97984406,-1.6696128,1.423438,-0.91686254,-0.9524656,-0.71021606,1.1870729,-1 117 | -0.047571651,-1.4476387,-1.2357503,0.036701933,0.74179787,2.3934711,0.97984406,0.40241458,1.423438,-0.91686254,-0.9524656,0.34922226,-0.87408263,-1 118 | -0.047571651,-1.4476387,-0.1832185,-1.3069621,0.33550016,-0.41625584,0.97984406,0.7477524800000001,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 119 | -0.047571651,-1.4476387,-0.1832185,-1.1949901,-0.68991788,-0.41625584,-1.0243824,0.35924734,-0.69992253,0.48026133,0.67516549,-0.71021606,-0.87408263,-1 120 | -0.047571651,-1.4476387,-0.1832185,0.20465993,1.051358,2.3934711,-1.0243824,0.8772542,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 121 | -0.047571651,-1.4476387,-0.1832185,1.6043099,-0.94143552,-0.41625584,-1.0243824,0.57508353,-0.69992253,-0.91686254,-0.9524656,0.34922226,-0.87408263,-1 122 | -0.047571651,0.68822166,-1.2357503,-1.3069621,1.1480956,-0.41625584,-1.0243824,0.27291287,-0.69992253,-0.91686254,-0.9524656,-0.71021606,1.1870729,-1 123 | -0.047571651,0.68822166,-1.2357503,3.3958619,0.64506032,-0.41625584,0.97984406,1.9564351,-0.69992253,-0.91686254,-0.9524656,0.34922226,1.1870729,1 124 | -0.047571651,0.68822166,-0.1832185,-0.63513007,0.16137257,-0.41625584,0.97984406,-0.11559227,-0.69992253,-0.56758157,0.67516549,-0.71021606,1.1870729,-1 125 | -0.047571651,0.68822166,-0.1832185,-0.35520007,0.45158522,-0.41625584,0.97984406,0.10024392,-0.69992253,-0.48026133,2.3027966,0.34922226,-0.87408263,-1 126 | -0.047571651,0.68822166,-0.1832185,1.0444499,-0.3416627,-0.41625584,0.97984406,0.66141801,-0.69992253,0.48026133,-0.9524656,-0.71021606,1.1870729,-1 127 | -0.047571651,0.68822166,0.8693133199999999,-1.1949901,-0.84469796,-0.41625584,0.97984406,-1.7991145,1.423438,-0.91686254,0.67516549,0.34922226,-0.87408263,1 128 | -0.047571651,0.68822166,0.8693133199999999,-1.1949901,-0.20623013,-0.41625584,-1.0243824,-1.0221043,1.423438,1.5281042,0.67516549,0.34922226,1.1870729,1 129 | -0.047571651,0.68822166,0.8693133199999999,-0.63513007,-1.1929531,-0.41625584,-1.0243824,-1.5832783,-0.69992253,0.30562085,0.67516549,0.34922226,1.1870729,1 130 | -0.047571651,0.68822166,0.8693133199999999,-0.52315807,0.7031028499999999,-0.41625584,0.97984406,-1.4537766,1.423438,1.8773852,0.67516549,1.4086606,-0.87408263,1 131 | -0.047571651,0.68822166,0.8693133199999999,-0.41118607,0.31615265,-0.41625584,0.97984406,-1.7559473,1.423438,1.0041828,0.67516549,0.34922226,1.1870729,1 132 | -0.047571651,0.68822166,0.8693133199999999,0.48458993,-0.20623013,-0.41625584,-1.0243824,0.44558182,-0.69992253,0.13098036,-0.9524656,-0.71021606,-0.87408263,-1 133 | 0.062209082,-1.4476387,-1.2357503,0.20465993,0.006592485,-0.41625584,0.97984406,0.48874906,-0.69992253,0.30562085,0.67516549,-0.71021606,-0.87408263,-1 134 | 0.062209082,-1.4476387,0.8693133199999999,2.7240299,1.4963508,-0.41625584,-0.022269183,-1.4106094,1.423438,2.0520257,0.67516549,-0.71021606,-0.87408263,1 135 | 0.062209082,0.68822166,-1.2357503,-0.075270067,0.23876261,-0.41625584,-1.0243824,0.22974563,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 136 | 0.062209082,0.68822166,0.8693133199999999,0.036701933,1.999386,-0.41625584,-1.0243824,-0.76310083,1.423438,0.13098036,0.67516549,0.34922226,1.1870729,1 137 | 0.062209082,0.68822166,0.8693133199999999,0.48458993,-0.6318753499999999,-0.41625584,-1.0243824,-1.6696128,1.423438,3.973071,2.3027966,-0.71021606,1.1870729,1 138 | 0.062209082,0.68822166,0.8693133199999999,1.6043099,0.76114538,-0.41625584,0.97984406,-0.20192674,1.423438,-0.2183006,0.67516549,0.34922226,1.1870729,1 139 | 0.17198982,-1.4476387,-1.2357503,0.48458993,0.85788293,-0.41625584,0.97984406,0.14341116,-0.69992253,0.2183006,0.67516549,-0.71021606,-0.87408263,-1 140 | 0.17198982,-1.4476387,0.8693133199999999,0.14867393,3.0828466,-0.41625584,0.97984406,0.013909443,1.423438,0.74222205,0.67516549,1.4086606,1.1870729,1 141 | 0.17198982,-1.4476387,0.8693133199999999,3.8437499,0.74179787,2.3934711,0.97984406,-0.7199336,1.423438,2.5759471,2.3027966,1.4086606,1.1870729,1 142 | 0.17198982,0.68822166,-2.2882822,-0.63513007,-1.0962156,-0.41625584,0.97984406,0.53191629,-0.69992253,0.74222205,0.67516549,-0.71021606,1.1870729,-1 143 | 0.17198982,0.68822166,-1.2357503,-0.63513007,-0.26427266,-0.41625584,-1.0243824,1.2225921,-0.69992253,-0.2183006,-0.9524656,-0.71021606,-0.87408263,-1 144 | 0.17198982,0.68822166,-0.1832185,-0.075270067,0.12267755,2.3934711,0.97984406,-0.33142846,1.423438,-0.39294109,0.67516549,0.34922226,0.67178403,1 145 | 0.17198982,0.68822166,0.8693133199999999,-0.35520007,-0.012755025,2.3934711,0.97984406,-0.24509398,1.423438,0.13098036,0.67516549,0.34922226,-0.87408263,1 146 | 0.17198982,0.68822166,0.8693133199999999,-0.075270067,0.64506032,2.3934711,0.97984406,-2.0149507,1.423438,0.48026133,2.3027966,-0.71021606,1.1870729,1 147 | 0.17198982,0.68822166,0.8693133199999999,0.036701933,-1.2703432,-0.41625584,0.97984406,-1.9286162,1.423438,0.91686254,0.67516549,0.34922226,0.67178403,1 148 | 0.28177055,-1.4476387,0.8693133199999999,-0.63513007,2.0187335,-0.41625584,-1.0243824,0.57508353,1.423438,-0.39294109,-0.9524656,-0.71021606,-0.87408263,-1 149 | 0.28177055,-1.4476387,0.8693133199999999,-0.18724207,1.0320105,-0.41625584,0.97984406,0.40241458,-0.69992253,-0.91686254,-0.9524656,0.34922226,-0.87408263,-1 150 | 0.28177055,0.68822166,-1.2357503,-0.41118607,0.2194151,-0.41625584,-1.0243824,-0.3745957,-0.69992253,-0.65490181,-0.9524656,-0.71021606,1.1870729,1 151 | 0.28177055,0.68822166,-0.1832185,-0.18724207,-0.39970523,-0.41625584,0.97984406,0.013909443,-0.69992253,-0.56758157,0.67516549,0.34922226,1.1870729,1 152 | 0.28177055,0.68822166,-0.1832185,1.0444499,-2.3924988,2.3934711,-1.0243824,1.0067559,-0.69992253,-0.74222205,-0.9524656,0.34922226,1.1870729,-1 153 | 0.28177055,0.68822166,-0.1832185,1.0444499,-1.5799034,-0.41625584,-1.0243824,1.0499231,-0.69992253,0.48026133,-0.9524656,-0.71021606,-0.87408263,-1 154 | 0.28177055,0.68822166,0.8693133199999999,-1.1949901,-0.94143552,-0.41625584,-1.0243824,-1.0221043,1.423438,0.39294109,0.67516549,-0.71021606,0.67178403,-1 155 | 0.28177055,0.68822166,0.8693133199999999,0.036701933,-0.82535045,-0.41625584,-1.0243824,0.79091972,1.423438,-0.91686254,-0.9524656,-0.71021606,1.1870729,-1 156 | 0.28177055,0.68822166,0.8693133199999999,0.48458993,-1.1155631,-0.41625584,-1.0243824,-0.072425032,-0.69992253,-0.56758157,0.67516549,-0.71021606,0.67178403,-1 157 | 0.28177055,0.68822166,0.8693133199999999,1.0444499,0.50962775,-0.41625584,0.97984406,-1.6264456,1.423438,-0.39294109,0.67516549,0.34922226,0.67178403,1 158 | 0.28177055,0.68822166,0.8693133199999999,1.1564219,0.47093273,-0.41625584,-1.0243824,-2.6624593,1.423438,0.13098036,0.67516549,0.34922226,1.1870729,1 159 | 0.28177055,0.68822166,0.8693133199999999,1.8842399,0.76114538,2.3934711,0.97984406,-1.1084387,-0.69992253,-0.043660121,0.67516549,2.4680989,1.1870729,1 160 | 0.39155128,-1.4476387,-2.2882822,1.0444499,0.64506032,2.3934711,0.97984406,0.53191629,-0.69992253,-0.043660121,-0.9524656,-0.71021606,-0.87408263,-1 161 | 0.39155128,-1.4476387,-1.2357503,0.26064593,1.3415707,2.3934711,0.97984406,0.10024392,-0.69992253,-0.91686254,-0.9524656,1.4086606,-0.87408263,1 162 | 0.39155128,-1.4476387,-0.1832185,-0.63513007,1.7478684,-0.41625584,-1.0243824,0.96358867,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 163 | 0.39155128,-1.4476387,0.8693133199999999,-1.7548501,-0.032102535,-0.41625584,0.97984406,-1.1947732,-0.69992253,-0.043660121,0.67516549,-0.71021606,-0.87408263,-1 164 | 0.39155128,0.68822166,-1.2357503,-0.63513007,0.66440783,-0.41625584,0.97984406,0.44558182,-0.69992253,0.65490181,0.67516549,-0.71021606,-0.87408263,1 165 | 0.39155128,0.68822166,-0.1832185,-1.4749201,-0.18688262,-0.41625584,0.97984406,0.18657839,1.423438,-0.39294109,0.67516549,-0.71021606,1.1870729,-1 166 | 0.39155128,0.68822166,-0.1832185,-1.0830181,-0.38035772,-0.41625584,0.97984406,0.66141801,-0.69992253,1.2661435,0.67516549,0.34922226,1.1870729,1 167 | 0.39155128,0.68822166,-0.1832185,0.036701933,-0.49644278,-0.41625584,0.97984406,1.0067559,-0.69992253,1.8773852,-0.9524656,1.4086606,1.1870729,1 168 | 0.39155128,0.68822166,-0.1832185,0.48458993,-0.74796041,2.3934711,0.97984406,0.66141801,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 169 | 0.39155128,0.68822166,0.8693133199999999,-1.7548501,-0.30296768,-0.41625584,-1.0243824,0.27291287,-0.69992253,-0.8295423,-0.9524656,0.34922226,1.1870729,1 170 | 0.39155128,0.68822166,0.8693133199999999,-0.35520007,0.9739679999999999,-0.41625584,0.97984406,0.92042143,-0.69992253,-0.91686254,-0.9524656,1.4086606,1.1870729,1 171 | 0.39155128,0.68822166,0.8693133199999999,-0.18724207,-0.65122286,-0.41625584,0.97984406,-0.80626807,1.423438,1.0041828,0.67516549,2.4680989,1.1870729,1 172 | 0.39155128,0.68822166,0.8693133199999999,-0.18724207,0.18072008,-0.41625584,0.97984406,-0.8494353100000001,1.423438,1.7027447,0.67516549,1.4086606,1.1870729,1 173 | 0.39155128,0.68822166,0.8693133199999999,0.82050593,-0.61252784,-0.41625584,-1.0243824,-1.9286162,-0.69992253,0.8295423,0.67516549,0.34922226,1.1870729,1 174 | 0.39155128,0.68822166,0.8693133199999999,1.0444499,0.39354269,-0.41625584,0.97984406,-1.6696128,1.423438,-0.2183006,-0.9524656,-0.71021606,1.1870729,1 175 | 0.5013320100000001,-1.4476387,0.8693133199999999,2.3881139,-0.012755025,-0.41625584,-1.0243824,-0.28826122,1.423438,-0.91686254,0.67516549,-0.71021606,-0.87408263,1 176 | 0.5013320100000001,0.68822166,-2.2882822,1.6043099,0.45158522,-0.41625584,0.97984406,-1.0652715,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,1 177 | 0.5013320100000001,0.68822166,-2.2882822,2.1641699,0.74179787,-0.41625584,0.97984406,0.40241458,-0.69992253,-0.74222205,0.67516549,-0.71021606,1.1870729,1 178 | 0.5013320100000001,0.68822166,-2.2882822,2.6120579,0.39354269,-0.41625584,0.97984406,-0.20192674,-0.69992253,2.7505876,2.3027966,-0.71021606,1.1870729,-1 179 | 0.5013320100000001,0.68822166,-1.2357503,0.48458993,-0.55448531,-0.41625584,-1.0243824,0.6182507699999999,1.423438,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 180 | 0.5013320100000001,0.68822166,-0.1832185,-0.29921407,-0.61252784,2.3934711,-1.0243824,-0.67676636,-0.69992253,1.0041828,0.67516549,0.34922226,0.67178403,1 181 | 0.5013320100000001,0.68822166,-0.1832185,1.0444499,-0.7286129,2.3934711,-1.0243824,0.31608011,-0.69992253,0.48026133,-0.9524656,-0.71021606,-0.87408263,-1 182 | 0.5013320100000001,0.68822166,0.8693133199999999,-1.1949901,-0.20623013,-0.41625584,0.97984406,-0.33142846,1.423438,0.13098036,0.67516549,0.34922226,1.1870729,1 183 | 0.5013320100000001,0.68822166,0.8693133199999999,0.20465993,-0.30296768,-0.41625584,-1.0243824,0.48874906,-0.69992253,-0.48026133,0.67516549,-0.71021606,1.1870729,-1 184 | 0.5013320100000001,0.68822166,0.8693133199999999,0.37261793,0.4128902,-0.41625584,0.97984406,1.395261,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 185 | 0.5013320100000001,0.68822166,0.8693133199999999,0.48458993,-1.4057758,-0.41625584,-1.0243824,0.53191629,1.423438,-0.91686254,-0.9524656,0.34922226,1.1870729,1 186 | 0.5013320100000001,0.68822166,0.8693133199999999,2.1641699,1.4770033,-0.41625584,0.97984406,-0.41776293,1.423438,2.0520257,2.3027966,-0.71021606,1.1870729,1 187 | 0.61111275,-1.4476387,-2.2882822,1.0444499,-0.18688262,-0.41625584,-1.0243824,0.92042143,-0.69992253,-0.13098036,-0.9524656,-0.71021606,-0.87408263,-1 188 | 0.61111275,-1.4476387,-0.1832185,-1.6428781,1.3222232,-0.41625584,-1.0243824,0.44558182,-0.69992253,-0.91686254,-0.9524656,0.34922226,-0.87408263,-1 189 | 0.61111275,-1.4476387,-0.1832185,-0.63513007,-1.3864282,2.3934711,-1.0243824,-2.3171214,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 190 | 0.61111275,-1.4476387,0.8693133199999999,1.0444499,0.16137257,-0.41625584,0.97984406,0.31608011,-0.69992253,1.3534637,0.67516549,1.4086606,1.1870729,1 191 | 0.61111275,-1.4476387,0.8693133199999999,1.4923379,1.0707055,-0.41625584,0.97984406,0.48874906,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,1 192 | 0.61111275,0.68822166,-0.1832185,0.48458993,-1.2509957,-0.41625584,0.97984406,0.22974563,-0.69992253,1.7027447,0.67516549,-0.71021606,-0.87408263,1 193 | 0.61111275,0.68822166,0.8693133199999999,-0.80308807,-0.38035772,2.3934711,-1.0243824,0.44558182,1.423438,0.30562085,-0.9524656,1.4086606,1.1870729,1 194 | 0.61111275,0.68822166,0.8693133199999999,-0.35520007,0.16137257,-0.41625584,0.97984406,-0.3745957,1.423438,1.5281042,0.67516549,0.34922226,1.1870729,1 195 | 0.61111275,0.68822166,0.8693133199999999,-0.075270067,-0.84469796,-0.41625584,0.97984406,-0.76310083,1.423438,1.1788233,0.67516549,1.4086606,1.1870729,1 196 | 0.61111275,0.68822166,0.8693133199999999,-0.075270067,0.064635016,-0.41625584,-1.0243824,-0.24509398,1.423438,0.30562085,-0.9524656,0.34922226,1.1870729,1 197 | 0.61111275,0.68822166,0.8693133199999999,0.48458993,0.83853542,-0.41625584,0.97984406,0.8772542,-0.69992253,0.13098036,0.67516549,1.4086606,1.1870729,1 198 | 0.61111275,0.68822166,0.8693133199999999,0.76451993,0.62571281,-0.41625584,0.97984406,-0.33142846,1.423438,1.5281042,0.67516549,1.4086606,1.1870729,1 199 | 0.72089348,-1.4476387,0.8693133199999999,-0.075270067,1.5543933,-0.41625584,0.97984406,0.83408696,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,1 200 | 0.72089348,-1.4476387,0.8693133199999999,0.76451993,1.1094006,-0.41625584,0.97984406,-0.15875951,1.423438,-0.043660121,0.67516549,-0.71021606,1.1870729,1 201 | 0.72089348,0.68822166,-2.2882822,0.14867393,-0.30296768,-0.41625584,-1.0243824,-0.20192674,-0.69992253,1.3534637,0.67516549,1.4086606,-0.87408263,1 202 | 0.72089348,0.68822166,-0.1832185,1.0444499,-0.12884009,2.3934711,-1.0243824,-0.54726465,1.423438,-0.043660121,0.67516549,-0.71021606,-0.87408263,-1 203 | 0.72089348,0.68822166,0.8693133199999999,-0.63513007,0.20006759,-0.41625584,-1.0243824,-0.41776293,1.423438,2.2266662,0.67516549,0.34922226,1.1870729,1 204 | 0.72089348,0.68822166,0.8693133199999999,0.37261793,-1.6185984,-0.41625584,0.97984406,-1.0652715,1.423438,2.2266662,0.67516549,0.34922226,-0.87408263,1 205 | 0.72089348,0.68822166,0.8693133199999999,0.48458993,-0.82535045,-0.41625584,0.97984406,-0.50409741,1.423438,0.74222205,-0.9524656,0.34922226,1.1870729,1 206 | 0.83067421,-1.4476387,-0.1832185,-0.075270067,0.25811012,-0.41625584,-1.0243824,-2.2739541,-0.69992253,0.13098036,0.67516549,0.34922226,1.1870729,1 207 | 0.83067421,-1.4476387,0.8693133199999999,-0.41118607,-0.78665543,-0.41625584,-1.0243824,0.57508353,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 208 | 0.83067421,-1.4476387,0.8693133199999999,0.37261793,0.85788293,2.3934711,-1.0243824,-1.885449,-0.69992253,0.74222205,0.67516549,2.4680989,-0.87408263,1 209 | 0.83067421,-1.4476387,0.8693133199999999,0.48458993,0.35484767,-0.41625584,0.97984406,0.44558182,-0.69992253,2.2266662,2.3027966,1.4086606,-0.87408263,1 210 | 0.83067421,-1.4476387,0.8693133199999999,0.48458993,2.792634,-0.41625584,0.97984406,0.31608011,-0.69992253,0.13098036,0.67516549,-0.71021606,-0.87408263,-1 211 | 0.83067421,-1.4476387,0.8693133199999999,1.0444499,-0.10949258,-0.41625584,-1.0243824,0.18657839,1.423438,0.30562085,0.67516549,-0.71021606,-0.87408263,1 212 | 0.83067421,-1.4476387,0.8693133199999999,1.6043099,-1.6572934,-0.41625584,0.97984406,-0.20192674,-0.69992253,4.4969924,2.3027966,2.4680989,1.1870729,1 213 | 0.83067421,0.68822166,-1.2357503,-0.63513007,0.6063653,-0.41625584,0.97984406,-2.0149507,-0.69992253,0.30562085,0.67516549,0.34922226,1.1870729,1 214 | 0.83067421,0.68822166,-1.2357503,-0.18724207,-0.8060029399999999,2.3934711,0.97984406,-0.41776293,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 215 | 0.83067421,0.68822166,-0.1832185,-0.075270067,-0.36101021,-0.41625584,-1.0243824,-0.15875951,-0.69992253,0.65490181,0.67516549,2.4680989,1.1870729,-1 216 | 0.83067421,0.68822166,0.8693133199999999,-0.63513007,0.33550016,-0.41625584,-1.0243824,-2.1876197,1.423438,0.65490181,0.67516549,1.4086606,1.1870729,1 217 | 0.94045495,-1.4476387,-1.2357503,0.48458993,-1.0575206,-0.41625584,-1.0243824,1.2657593,-0.69992253,-0.91686254,-0.9524656,1.4086606,-0.87408263,-1 218 | 0.94045495,-1.4476387,-0.1832185,0.20465993,0.045287505,-0.41625584,0.97984406,0.96358867,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 219 | 0.94045495,-1.4476387,0.8693133199999999,-1.3069621,0.37419518,-0.41625584,-1.0243824,0.83408696,1.423438,0.65490181,0.67516549,1.4086606,-0.87408263,1 220 | 0.94045495,-1.4476387,0.8693133199999999,1.0444499,3.0441516,-0.41625584,0.97984406,0.18657839,-0.69992253,2.5759471,0.67516549,2.4680989,1.1870729,1 221 | 0.94045495,0.68822166,-2.2882822,0.76451993,-0.32231519,2.3934711,0.97984406,0.013909443,-0.69992253,1.091503,2.3027966,-0.71021606,0.67178403,-1 222 | 0.94045495,0.68822166,0.8693133199999999,-0.075270067,0.083982526,-0.41625584,0.97984406,-0.11559227,-0.69992253,0.30562085,0.67516549,0.34922226,1.1870729,1 223 | 0.94045495,0.68822166,0.8693133199999999,-0.075270067,1.5543933,2.3934711,0.97984406,-0.76310083,1.423438,0.65490181,-0.9524656,2.4680989,1.1870729,1 224 | 1.0502357,-1.4476387,-0.1832185,0.48458993,1.2254856,-0.41625584,-1.0243824,-0.7199336,-0.69992253,-0.74222205,-0.9524656,-0.71021606,1.1870729,-1 225 | 1.0502357,-1.4476387,0.8693133199999999,-0.075270067,1.0320105,-0.41625584,-1.0243824,-1.1947732,-0.69992253,0.8295423,0.67516549,1.4086606,-0.87408263,-1 226 | 1.0502357,-1.4476387,0.8693133199999999,2.7240299,1.4576558,-0.41625584,-1.0243824,0.18657839,1.423438,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 227 | 1.0502357,0.68822166,-2.2882822,-1.1949901,-0.74796041,-0.41625584,0.97984406,-0.24509398,1.423438,0.65490181,0.67516549,-0.71021606,-0.87408263,-1 228 | 1.0502357,0.68822166,-0.1832185,-0.35520007,1.1480956,-0.41625584,-1.0243824,-0.80626807,1.423438,0.65490181,0.67516549,-0.71021606,1.1870729,1 229 | 1.0502357,0.68822166,-0.1832185,0.48458993,1.6511309,-0.41625584,-1.0243824,0.35924734,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,1 230 | 1.0502357,0.68822166,0.8693133199999999,-0.63513007,-0.070797556,-0.41625584,0.97984406,-2.3171214,1.423438,1.0041828,2.3027966,0.34922226,-0.87408263,1 231 | 1.0502357,0.68822166,0.8693133199999999,-0.18724207,0.25811012,-0.41625584,-1.0243824,-1.9286162,1.423438,-0.74222205,0.67516549,0.34922226,1.1870729,-1 232 | 1.0502357,0.68822166,0.8693133199999999,0.76451993,-0.7286129,-0.41625584,0.97984406,-0.76310083,-0.69992253,0.8295423,0.67516549,1.4086606,0.67178403,1 233 | 1.1600164,-1.4476387,-0.1832185,0.48458993,3.2376267,2.3934711,0.97984406,0.31608011,-0.69992253,-0.2183006,-0.9524656,0.34922226,-0.87408263,-1 234 | 1.1600164,-1.4476387,-0.1832185,1.3243799,0.37419518,-0.41625584,-1.0243824,-0.072425032,-0.69992253,-0.2183006,-0.9524656,-0.71021606,-0.87408263,-1 235 | 1.1600164,-1.4476387,-0.1832185,1.6043099,2.1348186,-0.41625584,0.97984406,0.057076681,-0.69992253,-0.2183006,-0.9524656,-0.71021606,-0.87408263,-1 236 | 1.1600164,-1.4476387,0.8693133199999999,1.0444499,-0.47709527,-0.41625584,0.97984406,-1.5401111,-0.69992253,-0.043660121,0.67516549,2.4680989,1.1870729,1 237 | 1.1600164,0.68822166,-2.2882822,0.37261793,0.62571281,2.3934711,0.97984406,1.0499231,-0.69992253,0.30562085,0.67516549,0.34922226,-0.87408263,1 238 | 1.1600164,0.68822166,0.8693133199999999,-1.1949901,-0.032102535,-0.41625584,0.97984406,0.35924734,-0.69992253,-0.39294109,-0.9524656,1.4086606,0.67178403,1 239 | 1.1600164,0.68822166,0.8693133199999999,-0.63513007,-1.4057758,-0.41625584,-1.0243824,-0.41776293,-0.69992253,-0.56758157,-0.9524656,-0.71021606,1.1870729,-1 240 | 1.1600164,0.68822166,0.8693133199999999,0.20465993,0.083982526,-0.41625584,0.97984406,-0.97893702,-0.69992253,1.5281042,0.67516549,0.34922226,1.1870729,1 241 | 1.2697971,-1.4476387,-2.2882822,1.0444499,-0.45774776,-0.41625584,-1.0243824,-1.5401111,-0.69992253,1.3534637,2.3027966,-0.71021606,-0.87408263,-1 242 | 1.2697971,-1.4476387,0.8693133199999999,2.6120579,-0.41905274,2.3934711,-1.0243824,0.66141801,1.423438,-0.043660121,0.67516549,1.4086606,1.1870729,1 243 | 1.2697971,0.68822166,-1.2357503,1.6043099,-0.070797556,-0.41625584,-1.0243824,-1.2811077,1.423438,-0.91686254,0.67516549,2.4680989,0.67178403,1 244 | 1.2697971,0.68822166,0.8693133199999999,-1.0830181,-0.7286129,-0.41625584,0.97984406,-0.76310083,1.423438,-0.8295423,-0.9524656,0.34922226,-0.87408263,1 245 | 1.2697971,0.68822166,0.8693133199999999,-0.63513007,1.012663,-0.41625584,0.97984406,0.057076681,-0.69992253,-0.56758157,0.67516549,-0.71021606,-0.87408263,-1 246 | 1.2697971,0.68822166,0.8693133199999999,1.6043099,-0.41905274,-0.41625584,0.97984406,-0.50409741,-0.69992253,1.091503,-0.9524656,-0.71021606,0.67178403,-1 247 | 1.3795779,-1.4476387,-0.1832185,-0.91506006,6.0817107,-0.41625584,0.97984406,0.44558182,-0.69992253,0.48026133,0.67516549,-0.71021606,1.1870729,-1 248 | 1.3795779,-1.4476387,-0.1832185,1.1564219,0.5289752599999999,-0.41625584,-1.0243824,0.96358867,-0.69992253,-0.91686254,-0.9524656,0.34922226,-0.87408263,-1 249 | 1.3795779,-1.4476387,0.8693133199999999,-1.4189341,-0.51579029,-0.41625584,-1.0243824,-0.33142846,-0.69992253,-0.65490181,-0.9524656,1.4086606,-0.87408263,-1 250 | 1.3795779,0.68822166,0.8693133199999999,-1.7548501,0.95462048,-0.41625584,0.97984406,-1.0652715,1.423438,-0.13098036,0.67516549,1.4086606,-0.87408263,1 251 | 1.3795779,0.68822166,0.8693133199999999,-0.63513007,-0.39970523,-0.41625584,0.97984406,-0.89260255,1.423438,1.3534637,0.67516549,1.4086606,1.1870729,1 252 | 1.3795779,0.68822166,0.8693133199999999,-0.63513007,-0.24492515,-0.41625584,-1.0243824,-3.3963023,-0.69992253,-0.043660121,0.67516549,-0.71021606,-0.87408263,1 253 | 1.3795779,0.68822166,0.8693133199999999,-0.35520007,0.083982526,2.3934711,-1.0243824,0.57508353,-0.69992253,-0.74222205,0.67516549,1.4086606,1.1870729,1 254 | 1.3795779,0.68822166,0.8693133199999999,1.6043099,0.7031028499999999,-0.41625584,0.97984406,-1.7991145,1.423438,0.39294109,0.67516549,2.4680989,-0.87408263,1 255 | 1.4893586,-1.4476387,-0.1832185,-0.63513007,-0.74796041,-0.41625584,0.97984406,-1.4969439,-0.69992253,0.39294109,0.67516549,-0.71021606,-0.87408263,-1 256 | 1.4893586,0.68822166,-0.1832185,-0.74710207,0.5289752599999999,-0.41625584,-1.0243824,0.057076681,-0.69992253,-0.043660121,-0.9524656,0.34922226,1.1870729,-1 257 | 1.4893586,0.68822166,-0.1832185,2.7240299,0.47093273,2.3934711,0.97984406,0.013909443,1.423438,0.48026133,0.67516549,-0.71021606,1.1870729,1 258 | 1.5991393,-1.4476387,-2.2882822,0.48458993,-0.20623013,-0.41625584,-1.0243824,0.057076681,-0.69992253,0.65490181,-0.9524656,1.4086606,-0.87408263,-1 259 | 1.5991393,0.68822166,-2.2882822,1.6043099,-0.30296768,2.3934711,0.97984406,-0.80626807,-0.69992253,-0.8295423,0.67516549,0.34922226,-0.87408263,-1 260 | 1.5991393,0.68822166,-0.1832185,0.48458993,0.083982526,-0.41625584,0.97984406,-0.15875951,-0.69992253,0.8295423,0.67516549,2.4680989,1.1870729,1 261 | 1.7089201,0.68822166,-1.2357503,1.3803659,-0.090145066,-0.41625584,0.97984406,-0.28826122,-0.69992253,-0.91686254,-0.9524656,-0.71021606,-0.87408263,-1 262 | 1.7089201,0.68822166,-0.1832185,1.6043099,0.37419518,-0.41625584,-1.0243824,-1.6264456,1.423438,1.6154245,0.67516549,0.34922226,1.1870729,1 263 | 1.7089201,0.68822166,0.8693133199999999,-0.075270067,1.3996132,-0.41625584,0.97984406,-1.7559473,-0.69992253,1.1788233,0.67516549,2.4680989,-0.87408263,1 264 | 1.7089201,0.68822166,0.8693133199999999,0.76451993,-1.4638183,-0.41625584,-1.0243824,-1.0652715,1.423438,1.3534637,2.3027966,-0.71021606,1.1870729,1 265 | 1.8187008,-1.4476387,-1.2357503,1.6043099,1.012663,-0.41625584,-1.0243824,0.53191629,-0.69992253,-0.56758157,-0.9524656,1.4086606,-0.87408263,-1 266 | 1.8187008,-1.4476387,-0.1832185,-1.1949901,0.29680514,2.3934711,0.97984406,-0.8494353100000001,-0.69992253,-0.91686254,-0.9524656,0.34922226,-0.87408263,-1 267 | 1.8187008,-1.4476387,0.8693133199999999,-1.0830181,-1.947506,-0.41625584,-1.0243824,-1.0652715,-0.69992253,0.48026133,0.67516549,-0.71021606,-0.87408263,-1 268 | 2.148043,-1.4476387,-1.2357503,-0.63513007,0.37419518,-0.41625584,0.97984406,-1.2379404,1.423438,-0.74222205,-0.9524656,0.34922226,-0.87408263,-1 269 | 2.3676045,-1.4476387,-0.1832185,0.48458993,-1.0188256,-0.41625584,-0.022269183,-1.4537766,-0.69992253,0.043660121,0.67516549,-0.71021606,-0.87408263,-1 270 | 2.4773852,0.68822166,0.8693133199999999,-0.35520007,1.051358,-0.41625584,0.97984406,0.53191629,1.423438,-0.91686254,-0.9524656,2.4680989,-0.87408263,1 271 | -------------------------------------------------------------------------------- /data/Processed_Thyroid.data: -------------------------------------------------------------------------------- 1 | -3.8469838,1.7969801,7.3504873,-0.22689313,-0.49605233,1 2 | -3.8469838,3.330645,3.5381822,-0.22689313,-0.48385532,1 3 | -3.6736181,2.8986267,4.9904889,-0.15398402,-0.58143139,1 4 | -3.5869353,1.0409481,5.3535656,-0.32896587,-0.53264335,1 5 | -2.8934727,3.330645,2.358183,-0.24147495,-0.5204463499999999,1 6 | -2.6334242,1.9697874,3.2658747,-0.28522041,-0.47165831,1 7 | -2.5467414,2.833824,7.3504873,-0.28522041,-0.5204463499999999,1 8 | -2.2000101,1.8617828,2.2674138,-0.25605677,-0.54484036,1 9 | -2.2000101,2.5098103,0.72433797,-0.25605677,-0.58143139,1 10 | -1.8532788,0.65213161,0.72433797,-0.40187497,-0.48385532,1 11 | -1.8532788,1.4297645,2.7212597,-0.29980223,-0.49605233,1 12 | -1.8532788,3.071434,3.2658747,-0.29980223,-0.49605233,1 13 | -1.766596,0.9545444199999999,1.9951063,-0.34354769,-0.48385532,1 14 | -1.766596,2.2073975,4.8997198,-0.25605677,-0.53264335,1 15 | -1.766596,2.574613,4.7181814,-0.31438405,-0.5204463499999999,1 16 | -1.766596,3.0066313,3.1751055,-0.34354769,-0.49605233,1 17 | -1.6799131,-0.38471229,-0.27412289,-0.21231131,-0.37408225,-1 18 | -1.5932303,-0.4063132,-0.18335372,-0.11023856,0.052813046,-1 19 | -1.5065475,0.26331515,0.08895378600000001,-0.31438405,-0.53264335,1 20 | -1.4198647,-0.21190497,-0.36489206,-0.29980223,-0.17893011,-1 21 | -1.3331818,-0.5143177799999999,-0.63719957,-0.22689313,0.028419029,-1 22 | -1.3331818,2.2938011,-0.092584553,-0.21231131,-0.56923438,1 23 | -1.246499,0.26331515,0.72433797,-0.18314766,-0.54484036,1 24 | -1.1598162,-0.1039004,-0.36489206,-0.27063859,-0.13014208,-1 25 | -1.0731334,-1.1191434,-0.72796874,-0.11023856,1.0285737,1 26 | -1.0731334,-0.44951503,-0.5464304,-0.24147495,-0.39847626,-1 27 | -1.0731334,0.9329435,1.5412605,-0.19772949,-0.47165831,1 28 | -1.0731334,1.1273517,-0.092584553,-0.24147495,-0.53264335,1 29 | -0.98645055,-0.90313424,-1.3633529,-0.22689313,-0.1667331,-1 30 | -0.98645055,-0.27670772,-0.27412289,-0.18314766,0.22357116,-1 31 | -0.98645055,-0.16870314,-0.45566123,-0.1394022,-0.54484036,-1 32 | -0.98645055,0.11210875,-0.27412289,-0.08107492199999999,-0.5936283999999999,-1 33 | -0.98645055,1.4729664,2.1766447,-0.16856584,-0.48385532,1 34 | -0.89976772,0.67373253,1.0874146,-0.34354769,-0.5204463499999999,1 35 | -0.89976772,0.67373253,1.5412605,-0.31438405,-0.5204463499999999,1 36 | -0.89976772,1.6457737,-0.0018153834,-0.21231131,-0.47165831,1 37 | -0.8130849,-0.81673058,0.45203046,-0.15398402,-0.044763021,-1 38 | -0.8130849,-0.08229948500000001,0.54279963,-0.22689313,-0.53264335,-1 39 | -0.8130849,0.13370966,0.45203046,-0.28522041,-0.27650618,-1 40 | -0.8130849,0.30651698,0.54279963,-0.31438405,-0.54484036,-1 41 | -0.72640207,-0.77352875,-0.36489206,-0.28522041,-0.1545361,-1 42 | -0.72640207,-0.68712509,-0.5464304,-0.27063859,0.18698014,-1 43 | -0.72640207,-0.60072143,-0.27412289,-0.19772949,-0.3130972,-1 44 | -0.72640207,-0.60072143,0.27049212,-0.29980223,-0.23991515,-1 45 | -0.72640207,-0.44951503,-0.63719957,-0.27063859,-0.3009002,-1 46 | -0.63971925,-0.9895379,-0.45566123,-0.22689313,0.30895022,1 47 | -0.63971925,-0.70872601,-0.63719957,-0.21231131,-0.34968823,-1 48 | -0.63971925,-0.49271686,-0.092584553,-0.12482038,-0.20332413,-1 49 | -0.63971925,-0.31990955,-0.36489206,-0.29980223,-0.21552114,-1 50 | -0.63971925,-0.29830863,-0.63719957,-0.22689313,-0.33749122,-1 51 | -0.63971925,-0.08229948500000001,-0.45566123,-0.25605677,-0.3130972,-1 52 | -0.55303642,-1.0327397,-0.45566123,-0.24147495,0.10160108,1 53 | -0.55303642,-0.5575196100000001,-0.81873791,-0.31438405,-0.4472643,-1 54 | -0.55303642,-0.38471229,-0.45566123,-0.34354769,-0.044763021,-1 55 | -0.55303642,-0.29830863,-0.092584553,-0.1394022,-0.37408225,-1 56 | -0.55303642,-0.08229948500000001,0.90587631,-0.21231131,-0.5204463499999999,-1 57 | -0.55303642,0.047306003,-0.5464304,-0.31438405,-0.49605233,-1 58 | -0.55303642,0.50092521,-0.63719957,-0.22689313,-0.17893011,-1 59 | -0.4663536,-0.81673058,-0.092584553,-0.34354769,-0.41067327,-1 60 | -0.4663536,-0.77352875,0.08895378600000001,-0.24147495,-0.020369004,-1 61 | -0.4663536,-0.06069857,-0.72796874,-0.22689313,-0.41067327,-1 62 | -0.37967077,-0.90313424,-0.81873791,-0.28522041,-0.39847626,-1 63 | -0.37967077,-0.81673058,0.17972295,-0.21231131,0.34554125,-1 64 | -0.37967077,-0.62232235,-0.36489206,-0.02274764,0.016222021,-1 65 | -0.37967077,-0.5575196100000001,-0.36489206,-0.19772949,-0.5204463499999999,-1 66 | -0.37967077,-0.38471229,0.08895378600000001,-0.1394022,-0.56923438,-1 67 | -0.37967077,-0.2551068,-0.36489206,-0.25605677,-0.32529421,-1 68 | -0.37967077,-0.08229948500000001,-0.092584553,-0.18314766,-0.069157037,-1 69 | -0.37967077,0.26331515,-0.72796874,-0.29980223,-0.36188524,-1 70 | -0.37967077,0.26331515,0.08895378600000001,-0.27063859,-0.38627925,-1 71 | -0.37967077,0.45772338,1.268953,-0.25605677,-0.50824934,1 72 | -0.37967077,1.6241728,-0.27412289,-0.37271133,-0.4594613,1 73 | -0.29298795,-1.227148,-0.63719957,-0.18314766,-0.33749122,-1 74 | -0.29298795,-0.68712509,-0.36489206,-0.24147495,-0.032566012,-1 75 | -0.29298795,-0.21190497,-1.0910454,-0.27063859,-0.22771815,-1 76 | -0.29298795,-0.1039004,-0.36489206,-0.29980223,-0.4472643,-1 77 | -0.29298795,-0.1039004,-0.18335372,-0.28522041,-0.13014208,-1 78 | -0.29298795,-0.1039004,0.27049212,-0.19772949,-0.50824934,-1 79 | -0.29298795,-0.06069857,0.45203046,-0.27063859,-0.34968823,-1 80 | -0.29298795,0.30651698,-0.092584553,-0.28522041,-0.38627925,-1 81 | -0.29298795,0.7601361800000001,0.99664548,-0.25605677,-0.50824934,1 82 | -0.20630512,-0.31990955,-0.092584553,-0.19772949,-0.41067327,-1 83 | -0.20630512,0.047306003,0.27049212,-0.28522041,-0.17893011,-1 84 | -0.20630512,0.67373253,-0.72796874,-0.28522041,-0.13014208,-1 85 | -0.20630512,0.67373253,-0.36489206,-0.0081658194,-0.3009002,-1 86 | -0.20630512,0.84653984,-0.36489206,-0.27063859,-0.27650618,-1 87 | -0.1196223,-1.3783544,-1.1818146,-0.16856584,-0.33749122,1 88 | -0.1196223,-0.73032692,-0.81873791,-0.28522041,-0.32529421,-1 89 | -0.1196223,-0.60072143,-0.5464304,-0.18314766,-0.23991515,-1 90 | -0.1196223,-0.2551068,-0.63719957,-0.095656742,-0.20332413,-1 91 | -0.1196223,0.11210875,0.17972295,-0.22689313,-0.21552114,-1 92 | -0.1196223,0.22011332,-0.63719957,-0.1394022,-0.38627925,-1 93 | -0.032939473,-0.9895379,-0.27412289,-0.21231131,-0.32529421,-1 94 | -0.032939473,-0.49271686,-0.5464304,-0.095656742,-0.27650618,-1 95 | -0.032939473,-0.31990955,0.17972295,-0.25605677,-0.069157037,-1 96 | -0.032939473,-0.21190497,-0.18335372,-0.27063859,-0.39847626,-1 97 | -0.032939473,-0.14710223,-0.092584553,-0.25605677,0.028419029,-1 98 | -0.032939473,-0.039097655,-0.45566123,-0.25605677,-0.25211216,-1 99 | -0.032939473,0.025705088,-0.5464304,-0.15398402,0.016222021,-1 100 | -0.032939473,0.11210875,-0.0018153834,-0.35812951,-0.5204463499999999,-1 101 | -0.032939473,0.54412704,0.36126129,-0.16856584,-0.41067327,-1 102 | 0.053743351,-0.77352875,-0.81873791,-0.29980223,-0.38627925,-1 103 | 0.053743351,-0.62232235,-0.81873791,-0.18314766,0.016222021,-1 104 | 0.053743351,-0.44951503,-0.0018153834,-0.11023856,0.2723592,-1 105 | 0.053743351,-0.31990955,-0.45566123,-0.27063859,-0.27650618,-1 106 | 0.053743351,-0.2551068,-0.0018153834,-0.18314766,0.028419029,-1 107 | 0.053743351,-0.14710223,-0.27412289,-0.19772949,-0.47165831,-1 108 | 0.053743351,0.11210875,-0.27412289,-0.18314766,-0.17893011,-1 109 | 0.053743351,0.11210875,-0.092584553,-0.27063859,-0.22771815,-1 110 | 0.053743351,0.30651698,0.36126129,-0.28522041,-0.10574806,-1 111 | 0.053743351,1.1489526,-0.0018153834,-0.31438405,-0.53264335,1 112 | 0.053743351,2.2505993,1.6320297,-0.32896587,-0.48385532,1 113 | 0.14042618,-0.44951503,0.08895378600000001,-0.15398402,-0.008171995600000001,-1 114 | 0.14042618,-0.31990955,-0.36489206,-0.29980223,-0.36188524,-1 115 | 0.14042618,-0.29830863,-0.27412289,-0.25605677,-0.032566012,-1 116 | 0.14042618,-0.29830863,-0.27412289,-0.24147495,0.4309203,-1 117 | 0.14042618,-0.16870314,-0.18335372,-0.24147495,-0.008171995600000001,-1 118 | 0.14042618,0.43612247,0.36126129,-0.28522041,-0.044763021,-1 119 | 0.14042618,1.32176,0.17972295,-0.28522041,-0.5204463499999999,1 120 | 0.227109,-1.5727626,-1.0910454,5.5620896,1.8091822,1 121 | 0.227109,-0.85993241,-0.18335372,-0.12482038,-0.34968823,-1 122 | 0.227109,-0.73032692,-0.63719957,-0.24147495,-0.26430917,-1 123 | 0.227109,-0.66552418,-0.18335372,-0.21231131,-0.10574806,-1 124 | 0.227109,-0.38471229,-0.0018153834,0.12307056,-0.26430917,-1 125 | 0.227109,-0.08229948500000001,0.08895378600000001,-0.24147495,-0.42287028,-1 126 | 0.227109,0.15531058,-0.27412289,-0.28522041,-0.5204463499999999,-1 127 | 0.31379183,-1.0327397,-1.0910454,0.42928879,1.8823643,1 128 | 0.31379183,-0.44951503,0.08895378600000001,-0.25605677,-0.14233909,-1 129 | 0.31379183,-0.29830863,-0.092584553,-0.29980223,-0.4472643,-1 130 | 0.31379183,-0.19030406,0.08895378600000001,-0.15398402,-0.3130972,-1 131 | 0.31379183,0.0041041738,1.0874146,-0.12482038,0.21137415,-1 132 | 0.31379183,0.26331515,-0.18335372,-0.29980223,-0.22771815,-1 133 | 0.31379183,0.34971881,-0.36489206,-0.1394022,-0.1545361,-1 134 | 0.31379183,1.5809709,-0.092584553,-0.27063859,-0.50824934,1 135 | 0.40047465,-0.68712509,-0.36489206,-0.27063859,-0.081354046,-1 136 | 0.40047465,-0.5143177799999999,-0.72796874,-0.18314766,0.028419029,-1 137 | 0.40047465,-0.38471229,-0.27412289,-0.18314766,-0.4472643,-1 138 | 0.40047465,-0.31990955,-0.27412289,-0.18314766,-0.53264335,-1 139 | 0.40047465,-0.16870314,0.6335688,-0.19772949,-0.32529421,-1 140 | 0.40047465,0.0041041738,0.45203046,-0.19772949,0.18698014,-1 141 | 0.40047465,0.22011332,0.17972295,-0.37271133,-0.33749122,-1 142 | 0.40047465,0.26331515,0.45203046,-0.12482038,-0.54484036,-1 143 | 0.48715748,-0.77352875,-0.63719957,0.26888877,1.2481199,1 144 | 0.48715748,-0.60072143,-0.5464304,-0.22689313,-0.26430917,-1 145 | 0.48715748,-0.38471229,-0.18335372,-0.32896587,-0.23991515,-1 146 | 0.48715748,0.11210875,-0.092584553,-0.18314766,-0.26430917,-1 147 | 0.48715748,0.15531058,-1.0002762,-0.11023856,0.052813046,-1 148 | 0.48715748,1.1705536,0.36126129,-0.12482038,-0.26430917,-1 149 | 0.5738403,-0.44951503,-0.45566123,-0.25605677,-0.056960029,-1 150 | 0.5738403,-0.14710223,0.72433797,-0.27063859,0.0040250127,-1 151 | 0.5738403,0.025705088,-0.18335372,-0.19772949,0.016222021,-1 152 | 0.5738403,0.047306003,0.27049212,-0.18314766,-0.41067327,-1 153 | 0.5738403,0.26331515,0.08895378600000001,-0.24147495,-0.22771815,-1 154 | 0.5738403,0.34971881,-0.092584553,-0.21231131,0.15038911,-1 155 | 0.5738403,0.43612247,-0.092584553,-0.1394022,-0.32529421,-1 156 | 0.5738403,1.3433609,-0.90950708,-0.22689313,-0.32529421,-1 157 | 0.66052313,-0.68712509,0.27049212,-0.15398402,0.30895022,-1 158 | 0.66052313,-0.44951503,0.08895378600000001,-0.27063859,-0.032566012,-1 159 | 0.66052313,-0.14710223,-0.0018153834,-0.19772949,0.32114723,-1 160 | 0.66052313,0.24171423,-0.45566123,-0.19772949,-0.25211216,-1 161 | 0.66052313,0.50092521,-0.0018153834,-0.24147495,-0.032566012,-1 162 | 0.74720595,-1.3567534,-0.36489206,1.2750344,5.4438907,1 163 | 0.74720595,-0.73032692,-0.63719957,-0.24147495,-0.3009002,-1 164 | 0.74720595,-0.73032692,-0.5464304,-0.16856584,0.89440662,1 165 | 0.74720595,-0.38471229,-0.0018153834,-0.19772949,1.1627408,-1 166 | 0.74720595,0.13370966,0.17972295,-0.31438405,-0.081354046,-1 167 | 0.74720595,0.15531058,-0.092584553,-0.21231131,-0.14233909,-1 168 | 0.74720595,0.50092521,-0.36489206,-0.27063859,-0.22771815,-1 169 | 0.8338887699999999,-1.9615791,-1.0910454,7.8076899,2.1263045,1 170 | 0.8338887699999999,-1.3135516,-0.72796874,2.9373619,0.18698014,1 171 | 0.8338887699999999,-1.0327397,-0.72796874,0.60427064,4.4681301,1 172 | 0.8338887699999999,-0.4063132,0.08895378600000001,-0.32896587,-0.11794507,-1 173 | 0.8338887699999999,0.15531058,0.17972295,-0.22689313,-0.37408225,-1 174 | 0.8338887699999999,0.32811789,0.36126129,-0.095656742,-0.3130972,-1 175 | 0.8338887699999999,0.65213161,-0.36489206,-0.22689313,-0.069157037,-1 176 | 0.9205716,-1.723969,-1.0910454,2.28118,2.4190327,1 177 | 0.9205716,-1.4863589,0.54279963,-0.24147495,0.040616038,1 178 | 0.9205716,-1.3999553,-0.092584553,0.67717974,2.1141075,1 179 | 0.9205716,-0.66552418,-0.0018153834,-0.22689313,-0.27650618,-1 180 | 0.9205716,-0.66552418,0.17972295,1.1000525,4.1997959,1 181 | 0.9205716,-0.60072143,-0.63719957,-0.19772949,0.016222021,-1 182 | 0.9205716,-0.31990955,-0.72796874,-0.21231131,-0.33749122,-1 183 | 0.9205716,0.11210875,0.17972295,-0.25605677,-0.28870319,-1 184 | 0.9205716,0.54412704,0.45203046,-0.29980223,-0.27650618,-1 185 | 1.0072544,-1.1191434,-0.092584553,1.2167071,5.9561651,1 186 | 1.0072544,0.047306003,-0.18335372,-0.22689313,-0.49605233,-1 187 | 1.0072544,0.047306003,0.45203046,-0.29980223,-0.14233909,-1 188 | 1.0072544,0.7817371,-0.36489206,-0.18314766,-0.4472643,-1 189 | 1.0939372,-0.039097655,-0.27412289,-0.28522041,-0.23991515,-1 190 | 1.0939372,0.41452155,0.72433797,-0.16856584,-0.22771815,-1 191 | 1.1806201,-1.723969,-1.4541221,2.9081983,2.1994865,1 192 | 1.1806201,-0.92473515,-0.72796874,1.5812526,6.3586664,1 193 | 1.1806201,-0.38471229,0.36126129,-0.27063859,0.11379809,-1 194 | 1.3539857,-1.6375653,-0.90950708,1.9895436,0.65046645,1 195 | 1.3539857,-1.3351525,-0.72796874,0.82299795,2.6507758,1 196 | 1.4406685,-2.0263818,-1.5448913,1.3625253,0.5650874,1 197 | 1.4406685,-0.1039004,0.36126129,-0.27063859,-0.020369004,-1 198 | 1.4406685,0.11210875,-0.18335372,-0.24147495,-0.081354046,-1 199 | 1.5273514,-0.47111595,-0.092584553,-0.1394022,0.2723592,-1 200 | 1.5273514,0.65213161,0.45203046,-0.21231131,-0.43506729,-1 201 | 1.700717,-1.8103727,-1.1818146,1.4062708,-0.1545361,1 202 | 1.700717,0.43612247,0.72433797,-0.24147495,-0.081354046,-1 203 | 1.7873998,-0.08229948500000001,-0.18335372,-0.35812951,-0.11794507,-1 204 | 1.7873998,0.025705088,-0.27412289,-0.28522041,0.052813046,-1 205 | 1.8740827,-1.5511617,-1.0002762,1.0271434,0.065010054,1 206 | 2.0474483,-0.039097655,0.90587631,-0.29980223,-0.27650618,-1 207 | 2.1341311,-1.723969,-1.1818146,2.2665982,0.49190535,1 208 | 2.1341311,-1.7023681,-1.2725838,1.3625253,-0.23991515,1 209 | 2.1341311,1.4081636,2.6304905,-0.32896587,-0.49605233,1 210 | 2.3074968,-1.8319736,-1.4541221,4.3372167,0.51629936,1 211 | 2.5675453,-1.227148,-1.0910454,0.21056149,0.26016219,1 212 | 2.5675453,1.4081636,1.7227988,-0.25605677,-0.53264335,1 213 | 2.7409109,-1.5943635,-0.5464304,0.82299795,0.40652629,1 214 | 2.7409109,-0.92473515,-0.092584553,0.92507069,1.2481199,1 215 | 3.0009594,2.6826176,1.268953,-0.22689313,-0.43506729,1 216 | -------------------------------------------------------------------------------- /data/Processed_Titanic.data: -------------------------------------------------------------------------------- 1 | -1.8665154,-0.2282095,-1.9186728,-1 2 | -1.8665154,-0.2282095,-1.9186728,1 3 | -1.8665154,-0.2282095,0.5209568,-1 4 | -1.8665154,-0.2282095,0.5209568,1 5 | -1.8665154,4.3799476,-1.9186728,1 6 | -1.8665154,4.3799476,0.5209568,1 7 | -0.92253555,-0.2282095,-1.9186728,-1 8 | -0.92253555,-0.2282095,-1.9186728,1 9 | -0.92253555,-0.2282095,0.5209568,-1 10 | -0.92253555,-0.2282095,0.5209568,1 11 | -0.92253555,4.3799476,-1.9186728,1 12 | -0.92253555,4.3799476,0.5209568,1 13 | 0.021444341,-0.2282095,-1.9186728,-1 14 | 0.021444341,-0.2282095,-1.9186728,1 15 | 0.021444341,-0.2282095,0.5209568,-1 16 | 0.021444341,-0.2282095,0.5209568,1 17 | 0.021444341,4.3799476,-1.9186728,-1 18 | 0.021444341,4.3799476,-1.9186728,1 19 | 0.021444341,4.3799476,0.5209568,-1 20 | 0.021444341,4.3799476,0.5209568,1 21 | 0.96542423,-0.2282095,-1.9186728,-1 22 | 0.96542423,-0.2282095,-1.9186728,1 23 | 0.96542423,-0.2282095,0.5209568,-1 24 | 0.96542423,-0.2282095,0.5209568,1 25 | -------------------------------------------------------------------------------- /data/crabs.csv: -------------------------------------------------------------------------------- 1 | "","sp","sex","index","FL","RW","CL","CW","BD" 2 | "1","B","M",1,8.1,6.7,16.1,19,7 3 | "2","B","M",2,8.8,7.7,18.1,20.8,7.4 4 | "3","B","M",3,9.2,7.8,19,22.4,7.7 5 | "4","B","M",4,9.6,7.9,20.1,23.1,8.2 6 | "5","B","M",5,9.8,8,20.3,23,8.2 7 | "6","B","M",6,10.8,9,23,26.5,9.8 8 | "7","B","M",7,11.1,9.9,23.8,27.1,9.8 9 | "8","B","M",8,11.6,9.1,24.5,28.4,10.4 10 | "9","B","M",9,11.8,9.6,24.2,27.8,9.7 11 | "10","B","M",10,11.8,10.5,25.2,29.3,10.3 12 | "11","B","M",11,12.2,10.8,27.3,31.6,10.9 13 | "12","B","M",12,12.3,11,26.8,31.5,11.4 14 | "13","B","M",13,12.6,10,27.7,31.7,11.4 15 | "14","B","M",14,12.8,10.2,27.2,31.8,10.9 16 | "15","B","M",15,12.8,10.9,27.4,31.5,11 17 | "16","B","M",16,12.9,11,26.8,30.9,11.4 18 | "17","B","M",17,13.1,10.6,28.2,32.3,11 19 | "18","B","M",18,13.1,10.9,28.3,32.4,11.2 20 | "19","B","M",19,13.3,11.1,27.8,32.3,11.3 21 | "20","B","M",20,13.9,11.1,29.2,33.3,12.1 22 | "21","B","M",21,14.3,11.6,31.3,35.5,12.7 23 | "22","B","M",22,14.6,11.3,31.9,36.4,13.7 24 | "23","B","M",23,15,10.9,31.4,36.4,13.2 25 | "24","B","M",24,15,11.5,32.4,37,13.4 26 | "25","B","M",25,15,11.9,32.5,37.2,13.6 27 | "26","B","M",26,15.2,12.1,32.3,36.7,13.6 28 | "27","B","M",27,15.4,11.8,33,37.5,13.6 29 | "28","B","M",28,15.7,12.6,35.8,40.3,14.5 30 | "29","B","M",29,15.9,12.7,34,38.9,14.2 31 | "30","B","M",30,16.1,11.6,33.8,39,14.4 32 | "31","B","M",31,16.1,12.8,34.9,40.7,15.7 33 | "32","B","M",32,16.2,13.3,36,41.7,15.4 34 | "33","B","M",33,16.3,12.7,35.6,40.9,14.9 35 | "34","B","M",34,16.4,13,35.7,41.8,15.2 36 | "35","B","M",35,16.6,13.5,38.1,43.4,14.9 37 | "36","B","M",36,16.8,12.8,36.2,41.8,14.9 38 | "37","B","M",37,16.9,13.2,37.3,42.7,15.6 39 | "38","B","M",38,17.1,12.6,36.4,42,15.1 40 | "39","B","M",39,17.1,12.7,36.7,41.9,15.6 41 | "40","B","M",40,17.2,13.5,37.6,43.9,16.1 42 | "41","B","M",41,17.7,13.6,38.7,44.5,16 43 | "42","B","M",42,17.9,14.1,39.7,44.6,16.8 44 | "43","B","M",43,18,13.7,39.2,44.4,16.2 45 | "44","B","M",44,18.8,15.8,42.1,49,17.8 46 | "45","B","M",45,19.3,13.5,41.6,47.4,17.8 47 | "46","B","M",46,19.3,13.8,40.9,46.5,16.8 48 | "47","B","M",47,19.7,15.3,41.9,48.5,17.8 49 | "48","B","M",48,19.8,14.2,43.2,49.7,18.6 50 | "49","B","M",49,19.8,14.3,42.4,48.9,18.3 51 | "50","B","M",50,21.3,15.7,47.1,54.6,20 52 | "51","B","F",1,7.2,6.5,14.7,17.1,6.1 53 | "52","B","F",2,9,8.5,19.3,22.7,7.7 54 | "53","B","F",3,9.1,8.1,18.5,21.6,7.7 55 | "54","B","F",4,9.1,8.2,19.2,22.2,7.7 56 | "55","B","F",5,9.5,8.2,19.6,22.4,7.8 57 | "56","B","F",6,9.8,8.9,20.4,23.9,8.8 58 | "57","B","F",7,10.1,9.3,20.9,24.4,8.4 59 | "58","B","F",8,10.3,9.5,21.3,24.7,8.9 60 | "59","B","F",9,10.4,9.7,21.7,25.4,8.3 61 | "60","B","F",10,10.8,9.5,22.5,26.3,9.1 62 | "61","B","F",11,11,9.8,22.5,25.7,8.2 63 | "62","B","F",12,11.2,10,22.8,26.9,9.4 64 | "63","B","F",13,11.5,11,24.7,29.2,10.1 65 | "64","B","F",14,11.6,11,24.6,28.5,10.4 66 | "65","B","F",15,11.6,11.4,23.7,27.7,10 67 | "66","B","F",16,11.7,10.6,24.9,28.5,10.4 68 | "67","B","F",17,11.9,11.4,26,30.1,10.9 69 | "68","B","F",18,12,10.7,24.6,28.9,10.5 70 | "69","B","F",19,12,11.1,25.4,29.2,11 71 | "70","B","F",20,12.6,12.2,26.1,31.6,11.2 72 | "71","B","F",21,12.8,11.7,27.1,31.2,11.9 73 | "72","B","F",22,12.8,12.2,26.7,31.1,11.1 74 | "73","B","F",23,12.8,12.2,27.9,31.9,11.5 75 | "74","B","F",24,13,11.4,27.3,31.8,11.3 76 | "75","B","F",25,13.1,11.5,27.6,32.6,11.1 77 | "76","B","F",26,13.2,12.2,27.9,32.1,11.5 78 | "77","B","F",27,13.4,11.8,28.4,32.7,11.7 79 | "78","B","F",28,13.7,12.5,28.6,33.8,11.9 80 | "79","B","F",29,13.9,13,30,34.9,13.1 81 | "80","B","F",30,14.7,12.5,30.1,34.7,12.5 82 | "81","B","F",31,14.9,13.2,30.1,35.6,12 83 | "82","B","F",32,15,13.8,31.7,36.9,14 84 | "83","B","F",33,15,14.2,32.8,37.4,14 85 | "84","B","F",34,15.1,13.3,31.8,36.3,13.5 86 | "85","B","F",35,15.1,13.5,31.9,37,13.8 87 | "86","B","F",36,15.1,13.8,31.7,36.6,13 88 | "87","B","F",37,15.2,14.3,33.9,38.5,14.7 89 | "88","B","F",38,15.3,14.2,32.6,38.3,13.8 90 | "89","B","F",39,15.4,13.3,32.4,37.6,13.8 91 | "90","B","F",40,15.5,13.8,33.4,38.7,14.7 92 | "91","B","F",41,15.6,13.9,32.8,37.9,13.4 93 | "92","B","F",42,15.6,14.7,33.9,39.5,14.3 94 | "93","B","F",43,15.7,13.9,33.6,38.5,14.1 95 | "94","B","F",44,15.8,15,34.5,40.3,15.3 96 | "95","B","F",45,16.2,15.2,34.5,40.1,13.9 97 | "96","B","F",46,16.4,14,34.2,39.8,15.2 98 | "97","B","F",47,16.7,16.1,36.6,41.9,15.4 99 | "98","B","F",48,17.4,16.9,38.2,44.1,16.6 100 | "99","B","F",49,17.5,16.7,38.6,44.5,17 101 | "100","B","F",50,19.2,16.5,40.9,47.9,18.1 102 | "101","O","M",1,9.1,6.9,16.7,18.6,7.4 103 | "102","O","M",2,10.2,8.2,20.2,22.2,9 104 | "103","O","M",3,10.7,8.6,20.7,22.7,9.2 105 | "104","O","M",4,11.4,9,22.7,24.8,10.1 106 | "105","O","M",5,12.5,9.4,23.2,26,10.8 107 | "106","O","M",6,12.5,9.4,24.2,27,11.2 108 | "107","O","M",7,12.7,10.4,26,28.8,12.1 109 | "108","O","M",8,13.2,11,27.1,30.4,12.2 110 | "109","O","M",9,13.4,10.1,26.6,29.6,12 111 | "110","O","M",10,13.7,11,27.5,30.5,12.2 112 | "111","O","M",11,14,11.5,29.2,32.2,13.1 113 | "112","O","M",12,14.1,10.4,28.9,31.8,13.5 114 | "113","O","M",13,14.1,10.5,29.1,31.6,13.1 115 | "114","O","M",14,14.1,10.7,28.7,31.9,13.3 116 | "115","O","M",15,14.2,10.6,28.7,31.7,12.9 117 | "116","O","M",16,14.2,10.7,27.8,30.9,12.7 118 | "117","O","M",17,14.2,11.3,29.2,32.2,13.5 119 | "118","O","M",18,14.6,11.3,29.9,33.5,12.8 120 | "119","O","M",19,14.7,11.1,29,32.1,13.1 121 | "120","O","M",20,15.1,11.4,30.2,33.3,14 122 | "121","O","M",21,15.1,11.5,30.9,34,13.9 123 | "122","O","M",22,15.4,11.1,30.2,33.6,13.5 124 | "123","O","M",23,15.7,12.2,31.7,34.2,14.2 125 | "124","O","M",24,16.2,11.8,32.3,35.3,14.7 126 | "125","O","M",25,16.3,11.6,31.6,34.2,14.5 127 | "126","O","M",26,17.1,12.6,35,38.9,15.7 128 | "127","O","M",27,17.4,12.8,36.1,39.5,16.2 129 | "128","O","M",28,17.5,12,34.4,37.3,15.3 130 | "129","O","M",29,17.5,12.7,34.6,38.4,16.1 131 | "130","O","M",30,17.8,12.5,36,39.8,16.7 132 | "131","O","M",31,17.9,12.9,36.9,40.9,16.5 133 | "132","O","M",32,18,13.4,36.7,41.3,17.1 134 | "133","O","M",33,18.2,13.7,38.8,42.7,17.2 135 | "134","O","M",34,18.4,13.4,37.9,42.2,17.7 136 | "135","O","M",35,18.6,13.4,37.8,41.9,17.3 137 | "136","O","M",36,18.6,13.5,36.9,40.2,17 138 | "137","O","M",37,18.8,13.4,37.2,41.1,17.5 139 | "138","O","M",38,18.8,13.8,39.2,43.3,17.9 140 | "139","O","M",39,19.4,14.1,39.1,43.2,17.8 141 | "140","O","M",40,19.4,14.4,39.8,44.3,17.9 142 | "141","O","M",41,20.1,13.7,40.6,44.5,18 143 | "142","O","M",42,20.6,14.4,42.8,46.5,19.6 144 | "143","O","M",43,21,15,42.9,47.2,19.4 145 | "144","O","M",44,21.5,15.5,45.5,49.7,20.9 146 | "145","O","M",45,21.6,15.4,45.7,49.7,20.6 147 | "146","O","M",46,21.6,14.8,43.4,48.2,20.1 148 | "147","O","M",47,21.9,15.7,45.4,51,21.1 149 | "148","O","M",48,22.1,15.8,44.6,49.6,20.5 150 | "149","O","M",49,23,16.8,47.2,52.1,21.5 151 | "150","O","M",50,23.1,15.7,47.6,52.8,21.6 152 | "151","O","F",1,10.7,9.7,21.4,24,9.8 153 | "152","O","F",2,11.4,9.2,21.7,24.1,9.7 154 | "153","O","F",3,12.5,10,24.1,27,10.9 155 | "154","O","F",4,12.6,11.5,25,28.1,11.5 156 | "155","O","F",5,12.9,11.2,25.8,29.1,11.9 157 | "156","O","F",6,14,11.9,27,31.4,12.6 158 | "157","O","F",7,14,12.8,28.8,32.4,12.7 159 | "158","O","F",8,14.3,12.2,28.1,31.8,12.5 160 | "159","O","F",9,14.7,13.2,29.6,33.4,12.9 161 | "160","O","F",10,14.9,13,30,33.7,13.3 162 | "161","O","F",11,15,12.3,30.1,33.3,14 163 | "162","O","F",12,15.6,13.5,31.2,35.1,14.1 164 | "163","O","F",13,15.6,14,31.6,35.3,13.8 165 | "164","O","F",14,15.6,14.1,31,34.5,13.8 166 | "165","O","F",15,15.7,13.6,31,34.8,13.8 167 | "166","O","F",16,16.1,13.6,31.6,36,14 168 | "167","O","F",17,16.1,13.7,31.4,36.1,13.9 169 | "168","O","F",18,16.2,14,31.6,35.6,13.7 170 | "169","O","F",19,16.7,14.3,32.3,37,14.7 171 | "170","O","F",20,17.1,14.5,33.1,37.2,14.6 172 | "171","O","F",21,17.5,14.3,34.5,39.6,15.6 173 | "172","O","F",22,17.5,14.4,34.5,39,16 174 | "173","O","F",23,17.5,14.7,33.3,37.6,14.6 175 | "174","O","F",24,17.6,14,34,38.6,15.5 176 | "175","O","F",25,18,14.9,34.7,39.5,15.7 177 | "176","O","F",26,18,16.3,37.9,43,17.2 178 | "177","O","F",27,18.3,15.7,35.1,40.5,16.1 179 | "178","O","F",28,18.4,15.5,35.6,40,15.9 180 | "179","O","F",29,18.4,15.7,36.5,41.6,16.4 181 | "180","O","F",30,18.5,14.6,37,42,16.6 182 | "181","O","F",31,18.6,14.5,34.7,39.4,15 183 | "182","O","F",32,18.8,15.2,35.8,40.5,16.6 184 | "183","O","F",33,18.9,16.7,36.3,41.7,15.3 185 | "184","O","F",34,19.1,16,37.8,42.3,16.8 186 | "185","O","F",35,19.1,16.3,37.9,42.6,17.2 187 | "186","O","F",36,19.7,16.7,39.9,43.6,18.2 188 | "187","O","F",37,19.9,16.6,39.4,43.9,17.9 189 | "188","O","F",38,19.9,17.9,40.1,46.4,17.9 190 | "189","O","F",39,20,16.7,40.4,45.1,17.7 191 | "190","O","F",40,20.1,17.2,39.8,44.1,18.6 192 | "191","O","F",41,20.3,16,39.4,44.1,18 193 | "192","O","F",42,20.5,17.5,40,45.5,19.2 194 | "193","O","F",43,20.6,17.5,41.5,46.2,19.2 195 | "194","O","F",44,20.9,16.5,39.9,44.7,17.5 196 | "195","O","F",45,21.3,18.4,43.8,48.4,20 197 | "196","O","F",46,21.4,18,41.2,46.2,18.7 198 | "197","O","F",47,21.7,17.1,41.7,47.2,19.6 199 | "198","O","F",48,21.9,17.2,42.6,47.4,19.5 200 | "199","O","F",49,22.5,17.2,43,48.7,19.8 201 | "200","O","F",50,23.1,20.2,46.2,52.5,21.1 202 | -------------------------------------------------------------------------------- /data/pima-indians-diabetes.data: -------------------------------------------------------------------------------- 1 | 6,148,72,35,0,33.6,0.627,50,1 2 | 1,85,66,29,0,26.6,0.351,31,0 3 | 8,183,64,0,0,23.3,0.672,32,1 4 | 1,89,66,23,94,28.1,0.167,21,0 5 | 0,137,40,35,168,43.1,2.288,33,1 6 | 5,116,74,0,0,25.6,0.201,30,0 7 | 3,78,50,32,88,31.0,0.248,26,1 8 | 10,115,0,0,0,35.3,0.134,29,0 9 | 2,197,70,45,543,30.5,0.158,53,1 10 | 8,125,96,0,0,0.0,0.232,54,1 11 | 4,110,92,0,0,37.6,0.191,30,0 12 | 10,168,74,0,0,38.0,0.537,34,1 13 | 10,139,80,0,0,27.1,1.441,57,0 14 | 1,189,60,23,846,30.1,0.398,59,1 15 | 5,166,72,19,175,25.8,0.587,51,1 16 | 7,100,0,0,0,30.0,0.484,32,1 17 | 0,118,84,47,230,45.8,0.551,31,1 18 | 7,107,74,0,0,29.6,0.254,31,1 19 | 1,103,30,38,83,43.3,0.183,33,0 20 | 1,115,70,30,96,34.6,0.529,32,1 21 | 3,126,88,41,235,39.3,0.704,27,0 22 | 8,99,84,0,0,35.4,0.388,50,0 23 | 7,196,90,0,0,39.8,0.451,41,1 24 | 9,119,80,35,0,29.0,0.263,29,1 25 | 11,143,94,33,146,36.6,0.254,51,1 26 | 10,125,70,26,115,31.1,0.205,41,1 27 | 7,147,76,0,0,39.4,0.257,43,1 28 | 1,97,66,15,140,23.2,0.487,22,0 29 | 13,145,82,19,110,22.2,0.245,57,0 30 | 5,117,92,0,0,34.1,0.337,38,0 31 | 5,109,75,26,0,36.0,0.546,60,0 32 | 3,158,76,36,245,31.6,0.851,28,1 33 | 3,88,58,11,54,24.8,0.267,22,0 34 | 6,92,92,0,0,19.9,0.188,28,0 35 | 10,122,78,31,0,27.6,0.512,45,0 36 | 4,103,60,33,192,24.0,0.966,33,0 37 | 11,138,76,0,0,33.2,0.420,35,0 38 | 9,102,76,37,0,32.9,0.665,46,1 39 | 2,90,68,42,0,38.2,0.503,27,1 40 | 4,111,72,47,207,37.1,1.390,56,1 41 | 3,180,64,25,70,34.0,0.271,26,0 42 | 7,133,84,0,0,40.2,0.696,37,0 43 | 7,106,92,18,0,22.7,0.235,48,0 44 | 9,171,110,24,240,45.4,0.721,54,1 45 | 7,159,64,0,0,27.4,0.294,40,0 46 | 0,180,66,39,0,42.0,1.893,25,1 47 | 1,146,56,0,0,29.7,0.564,29,0 48 | 2,71,70,27,0,28.0,0.586,22,0 49 | 7,103,66,32,0,39.1,0.344,31,1 50 | 7,105,0,0,0,0.0,0.305,24,0 51 | 1,103,80,11,82,19.4,0.491,22,0 52 | 1,101,50,15,36,24.2,0.526,26,0 53 | 5,88,66,21,23,24.4,0.342,30,0 54 | 8,176,90,34,300,33.7,0.467,58,1 55 | 7,150,66,42,342,34.7,0.718,42,0 56 | 1,73,50,10,0,23.0,0.248,21,0 57 | 7,187,68,39,304,37.7,0.254,41,1 58 | 0,100,88,60,110,46.8,0.962,31,0 59 | 0,146,82,0,0,40.5,1.781,44,0 60 | 0,105,64,41,142,41.5,0.173,22,0 61 | 2,84,0,0,0,0.0,0.304,21,0 62 | 8,133,72,0,0,32.9,0.270,39,1 63 | 5,44,62,0,0,25.0,0.587,36,0 64 | 2,141,58,34,128,25.4,0.699,24,0 65 | 7,114,66,0,0,32.8,0.258,42,1 66 | 5,99,74,27,0,29.0,0.203,32,0 67 | 0,109,88,30,0,32.5,0.855,38,1 68 | 2,109,92,0,0,42.7,0.845,54,0 69 | 1,95,66,13,38,19.6,0.334,25,0 70 | 4,146,85,27,100,28.9,0.189,27,0 71 | 2,100,66,20,90,32.9,0.867,28,1 72 | 5,139,64,35,140,28.6,0.411,26,0 73 | 13,126,90,0,0,43.4,0.583,42,1 74 | 4,129,86,20,270,35.1,0.231,23,0 75 | 1,79,75,30,0,32.0,0.396,22,0 76 | 1,0,48,20,0,24.7,0.140,22,0 77 | 7,62,78,0,0,32.6,0.391,41,0 78 | 5,95,72,33,0,37.7,0.370,27,0 79 | 0,131,0,0,0,43.2,0.270,26,1 80 | 2,112,66,22,0,25.0,0.307,24,0 81 | 3,113,44,13,0,22.4,0.140,22,0 82 | 2,74,0,0,0,0.0,0.102,22,0 83 | 7,83,78,26,71,29.3,0.767,36,0 84 | 0,101,65,28,0,24.6,0.237,22,0 85 | 5,137,108,0,0,48.8,0.227,37,1 86 | 2,110,74,29,125,32.4,0.698,27,0 87 | 13,106,72,54,0,36.6,0.178,45,0 88 | 2,100,68,25,71,38.5,0.324,26,0 89 | 15,136,70,32,110,37.1,0.153,43,1 90 | 1,107,68,19,0,26.5,0.165,24,0 91 | 1,80,55,0,0,19.1,0.258,21,0 92 | 4,123,80,15,176,32.0,0.443,34,0 93 | 7,81,78,40,48,46.7,0.261,42,0 94 | 4,134,72,0,0,23.8,0.277,60,1 95 | 2,142,82,18,64,24.7,0.761,21,0 96 | 6,144,72,27,228,33.9,0.255,40,0 97 | 2,92,62,28,0,31.6,0.130,24,0 98 | 1,71,48,18,76,20.4,0.323,22,0 99 | 6,93,50,30,64,28.7,0.356,23,0 100 | 1,122,90,51,220,49.7,0.325,31,1 101 | 1,163,72,0,0,39.0,1.222,33,1 102 | 1,151,60,0,0,26.1,0.179,22,0 103 | 0,125,96,0,0,22.5,0.262,21,0 104 | 1,81,72,18,40,26.6,0.283,24,0 105 | 2,85,65,0,0,39.6,0.930,27,0 106 | 1,126,56,29,152,28.7,0.801,21,0 107 | 1,96,122,0,0,22.4,0.207,27,0 108 | 4,144,58,28,140,29.5,0.287,37,0 109 | 3,83,58,31,18,34.3,0.336,25,0 110 | 0,95,85,25,36,37.4,0.247,24,1 111 | 3,171,72,33,135,33.3,0.199,24,1 112 | 8,155,62,26,495,34.0,0.543,46,1 113 | 1,89,76,34,37,31.2,0.192,23,0 114 | 4,76,62,0,0,34.0,0.391,25,0 115 | 7,160,54,32,175,30.5,0.588,39,1 116 | 4,146,92,0,0,31.2,0.539,61,1 117 | 5,124,74,0,0,34.0,0.220,38,1 118 | 5,78,48,0,0,33.7,0.654,25,0 119 | 4,97,60,23,0,28.2,0.443,22,0 120 | 4,99,76,15,51,23.2,0.223,21,0 121 | 0,162,76,56,100,53.2,0.759,25,1 122 | 6,111,64,39,0,34.2,0.260,24,0 123 | 2,107,74,30,100,33.6,0.404,23,0 124 | 5,132,80,0,0,26.8,0.186,69,0 125 | 0,113,76,0,0,33.3,0.278,23,1 126 | 1,88,30,42,99,55.0,0.496,26,1 127 | 3,120,70,30,135,42.9,0.452,30,0 128 | 1,118,58,36,94,33.3,0.261,23,0 129 | 1,117,88,24,145,34.5,0.403,40,1 130 | 0,105,84,0,0,27.9,0.741,62,1 131 | 4,173,70,14,168,29.7,0.361,33,1 132 | 9,122,56,0,0,33.3,1.114,33,1 133 | 3,170,64,37,225,34.5,0.356,30,1 134 | 8,84,74,31,0,38.3,0.457,39,0 135 | 2,96,68,13,49,21.1,0.647,26,0 136 | 2,125,60,20,140,33.8,0.088,31,0 137 | 0,100,70,26,50,30.8,0.597,21,0 138 | 0,93,60,25,92,28.7,0.532,22,0 139 | 0,129,80,0,0,31.2,0.703,29,0 140 | 5,105,72,29,325,36.9,0.159,28,0 141 | 3,128,78,0,0,21.1,0.268,55,0 142 | 5,106,82,30,0,39.5,0.286,38,0 143 | 2,108,52,26,63,32.5,0.318,22,0 144 | 10,108,66,0,0,32.4,0.272,42,1 145 | 4,154,62,31,284,32.8,0.237,23,0 146 | 0,102,75,23,0,0.0,0.572,21,0 147 | 9,57,80,37,0,32.8,0.096,41,0 148 | 2,106,64,35,119,30.5,1.400,34,0 149 | 5,147,78,0,0,33.7,0.218,65,0 150 | 2,90,70,17,0,27.3,0.085,22,0 151 | 1,136,74,50,204,37.4,0.399,24,0 152 | 4,114,65,0,0,21.9,0.432,37,0 153 | 9,156,86,28,155,34.3,1.189,42,1 154 | 1,153,82,42,485,40.6,0.687,23,0 155 | 8,188,78,0,0,47.9,0.137,43,1 156 | 7,152,88,44,0,50.0,0.337,36,1 157 | 2,99,52,15,94,24.6,0.637,21,0 158 | 1,109,56,21,135,25.2,0.833,23,0 159 | 2,88,74,19,53,29.0,0.229,22,0 160 | 17,163,72,41,114,40.9,0.817,47,1 161 | 4,151,90,38,0,29.7,0.294,36,0 162 | 7,102,74,40,105,37.2,0.204,45,0 163 | 0,114,80,34,285,44.2,0.167,27,0 164 | 2,100,64,23,0,29.7,0.368,21,0 165 | 0,131,88,0,0,31.6,0.743,32,1 166 | 6,104,74,18,156,29.9,0.722,41,1 167 | 3,148,66,25,0,32.5,0.256,22,0 168 | 4,120,68,0,0,29.6,0.709,34,0 169 | 4,110,66,0,0,31.9,0.471,29,0 170 | 3,111,90,12,78,28.4,0.495,29,0 171 | 6,102,82,0,0,30.8,0.180,36,1 172 | 6,134,70,23,130,35.4,0.542,29,1 173 | 2,87,0,23,0,28.9,0.773,25,0 174 | 1,79,60,42,48,43.5,0.678,23,0 175 | 2,75,64,24,55,29.7,0.370,33,0 176 | 8,179,72,42,130,32.7,0.719,36,1 177 | 6,85,78,0,0,31.2,0.382,42,0 178 | 0,129,110,46,130,67.1,0.319,26,1 179 | 5,143,78,0,0,45.0,0.190,47,0 180 | 5,130,82,0,0,39.1,0.956,37,1 181 | 6,87,80,0,0,23.2,0.084,32,0 182 | 0,119,64,18,92,34.9,0.725,23,0 183 | 1,0,74,20,23,27.7,0.299,21,0 184 | 5,73,60,0,0,26.8,0.268,27,0 185 | 4,141,74,0,0,27.6,0.244,40,0 186 | 7,194,68,28,0,35.9,0.745,41,1 187 | 8,181,68,36,495,30.1,0.615,60,1 188 | 1,128,98,41,58,32.0,1.321,33,1 189 | 8,109,76,39,114,27.9,0.640,31,1 190 | 5,139,80,35,160,31.6,0.361,25,1 191 | 3,111,62,0,0,22.6,0.142,21,0 192 | 9,123,70,44,94,33.1,0.374,40,0 193 | 7,159,66,0,0,30.4,0.383,36,1 194 | 11,135,0,0,0,52.3,0.578,40,1 195 | 8,85,55,20,0,24.4,0.136,42,0 196 | 5,158,84,41,210,39.4,0.395,29,1 197 | 1,105,58,0,0,24.3,0.187,21,0 198 | 3,107,62,13,48,22.9,0.678,23,1 199 | 4,109,64,44,99,34.8,0.905,26,1 200 | 4,148,60,27,318,30.9,0.150,29,1 201 | 0,113,80,16,0,31.0,0.874,21,0 202 | 1,138,82,0,0,40.1,0.236,28,0 203 | 0,108,68,20,0,27.3,0.787,32,0 204 | 2,99,70,16,44,20.4,0.235,27,0 205 | 6,103,72,32,190,37.7,0.324,55,0 206 | 5,111,72,28,0,23.9,0.407,27,0 207 | 8,196,76,29,280,37.5,0.605,57,1 208 | 5,162,104,0,0,37.7,0.151,52,1 209 | 1,96,64,27,87,33.2,0.289,21,0 210 | 7,184,84,33,0,35.5,0.355,41,1 211 | 2,81,60,22,0,27.7,0.290,25,0 212 | 0,147,85,54,0,42.8,0.375,24,0 213 | 7,179,95,31,0,34.2,0.164,60,0 214 | 0,140,65,26,130,42.6,0.431,24,1 215 | 9,112,82,32,175,34.2,0.260,36,1 216 | 12,151,70,40,271,41.8,0.742,38,1 217 | 5,109,62,41,129,35.8,0.514,25,1 218 | 6,125,68,30,120,30.0,0.464,32,0 219 | 5,85,74,22,0,29.0,1.224,32,1 220 | 5,112,66,0,0,37.8,0.261,41,1 221 | 0,177,60,29,478,34.6,1.072,21,1 222 | 2,158,90,0,0,31.6,0.805,66,1 223 | 7,119,0,0,0,25.2,0.209,37,0 224 | 7,142,60,33,190,28.8,0.687,61,0 225 | 1,100,66,15,56,23.6,0.666,26,0 226 | 1,87,78,27,32,34.6,0.101,22,0 227 | 0,101,76,0,0,35.7,0.198,26,0 228 | 3,162,52,38,0,37.2,0.652,24,1 229 | 4,197,70,39,744,36.7,2.329,31,0 230 | 0,117,80,31,53,45.2,0.089,24,0 231 | 4,142,86,0,0,44.0,0.645,22,1 232 | 6,134,80,37,370,46.2,0.238,46,1 233 | 1,79,80,25,37,25.4,0.583,22,0 234 | 4,122,68,0,0,35.0,0.394,29,0 235 | 3,74,68,28,45,29.7,0.293,23,0 236 | 4,171,72,0,0,43.6,0.479,26,1 237 | 7,181,84,21,192,35.9,0.586,51,1 238 | 0,179,90,27,0,44.1,0.686,23,1 239 | 9,164,84,21,0,30.8,0.831,32,1 240 | 0,104,76,0,0,18.4,0.582,27,0 241 | 1,91,64,24,0,29.2,0.192,21,0 242 | 4,91,70,32,88,33.1,0.446,22,0 243 | 3,139,54,0,0,25.6,0.402,22,1 244 | 6,119,50,22,176,27.1,1.318,33,1 245 | 2,146,76,35,194,38.2,0.329,29,0 246 | 9,184,85,15,0,30.0,1.213,49,1 247 | 10,122,68,0,0,31.2,0.258,41,0 248 | 0,165,90,33,680,52.3,0.427,23,0 249 | 9,124,70,33,402,35.4,0.282,34,0 250 | 1,111,86,19,0,30.1,0.143,23,0 251 | 9,106,52,0,0,31.2,0.380,42,0 252 | 2,129,84,0,0,28.0,0.284,27,0 253 | 2,90,80,14,55,24.4,0.249,24,0 254 | 0,86,68,32,0,35.8,0.238,25,0 255 | 12,92,62,7,258,27.6,0.926,44,1 256 | 1,113,64,35,0,33.6,0.543,21,1 257 | 3,111,56,39,0,30.1,0.557,30,0 258 | 2,114,68,22,0,28.7,0.092,25,0 259 | 1,193,50,16,375,25.9,0.655,24,0 260 | 11,155,76,28,150,33.3,1.353,51,1 261 | 3,191,68,15,130,30.9,0.299,34,0 262 | 3,141,0,0,0,30.0,0.761,27,1 263 | 4,95,70,32,0,32.1,0.612,24,0 264 | 3,142,80,15,0,32.4,0.200,63,0 265 | 4,123,62,0,0,32.0,0.226,35,1 266 | 5,96,74,18,67,33.6,0.997,43,0 267 | 0,138,0,0,0,36.3,0.933,25,1 268 | 2,128,64,42,0,40.0,1.101,24,0 269 | 0,102,52,0,0,25.1,0.078,21,0 270 | 2,146,0,0,0,27.5,0.240,28,1 271 | 10,101,86,37,0,45.6,1.136,38,1 272 | 2,108,62,32,56,25.2,0.128,21,0 273 | 3,122,78,0,0,23.0,0.254,40,0 274 | 1,71,78,50,45,33.2,0.422,21,0 275 | 13,106,70,0,0,34.2,0.251,52,0 276 | 2,100,70,52,57,40.5,0.677,25,0 277 | 7,106,60,24,0,26.5,0.296,29,1 278 | 0,104,64,23,116,27.8,0.454,23,0 279 | 5,114,74,0,0,24.9,0.744,57,0 280 | 2,108,62,10,278,25.3,0.881,22,0 281 | 0,146,70,0,0,37.9,0.334,28,1 282 | 10,129,76,28,122,35.9,0.280,39,0 283 | 7,133,88,15,155,32.4,0.262,37,0 284 | 7,161,86,0,0,30.4,0.165,47,1 285 | 2,108,80,0,0,27.0,0.259,52,1 286 | 7,136,74,26,135,26.0,0.647,51,0 287 | 5,155,84,44,545,38.7,0.619,34,0 288 | 1,119,86,39,220,45.6,0.808,29,1 289 | 4,96,56,17,49,20.8,0.340,26,0 290 | 5,108,72,43,75,36.1,0.263,33,0 291 | 0,78,88,29,40,36.9,0.434,21,0 292 | 0,107,62,30,74,36.6,0.757,25,1 293 | 2,128,78,37,182,43.3,1.224,31,1 294 | 1,128,48,45,194,40.5,0.613,24,1 295 | 0,161,50,0,0,21.9,0.254,65,0 296 | 6,151,62,31,120,35.5,0.692,28,0 297 | 2,146,70,38,360,28.0,0.337,29,1 298 | 0,126,84,29,215,30.7,0.520,24,0 299 | 14,100,78,25,184,36.6,0.412,46,1 300 | 8,112,72,0,0,23.6,0.840,58,0 301 | 0,167,0,0,0,32.3,0.839,30,1 302 | 2,144,58,33,135,31.6,0.422,25,1 303 | 5,77,82,41,42,35.8,0.156,35,0 304 | 5,115,98,0,0,52.9,0.209,28,1 305 | 3,150,76,0,0,21.0,0.207,37,0 306 | 2,120,76,37,105,39.7,0.215,29,0 307 | 10,161,68,23,132,25.5,0.326,47,1 308 | 0,137,68,14,148,24.8,0.143,21,0 309 | 0,128,68,19,180,30.5,1.391,25,1 310 | 2,124,68,28,205,32.9,0.875,30,1 311 | 6,80,66,30,0,26.2,0.313,41,0 312 | 0,106,70,37,148,39.4,0.605,22,0 313 | 2,155,74,17,96,26.6,0.433,27,1 314 | 3,113,50,10,85,29.5,0.626,25,0 315 | 7,109,80,31,0,35.9,1.127,43,1 316 | 2,112,68,22,94,34.1,0.315,26,0 317 | 3,99,80,11,64,19.3,0.284,30,0 318 | 3,182,74,0,0,30.5,0.345,29,1 319 | 3,115,66,39,140,38.1,0.150,28,0 320 | 6,194,78,0,0,23.5,0.129,59,1 321 | 4,129,60,12,231,27.5,0.527,31,0 322 | 3,112,74,30,0,31.6,0.197,25,1 323 | 0,124,70,20,0,27.4,0.254,36,1 324 | 13,152,90,33,29,26.8,0.731,43,1 325 | 2,112,75,32,0,35.7,0.148,21,0 326 | 1,157,72,21,168,25.6,0.123,24,0 327 | 1,122,64,32,156,35.1,0.692,30,1 328 | 10,179,70,0,0,35.1,0.200,37,0 329 | 2,102,86,36,120,45.5,0.127,23,1 330 | 6,105,70,32,68,30.8,0.122,37,0 331 | 8,118,72,19,0,23.1,1.476,46,0 332 | 2,87,58,16,52,32.7,0.166,25,0 333 | 1,180,0,0,0,43.3,0.282,41,1 334 | 12,106,80,0,0,23.6,0.137,44,0 335 | 1,95,60,18,58,23.9,0.260,22,0 336 | 0,165,76,43,255,47.9,0.259,26,0 337 | 0,117,0,0,0,33.8,0.932,44,0 338 | 5,115,76,0,0,31.2,0.343,44,1 339 | 9,152,78,34,171,34.2,0.893,33,1 340 | 7,178,84,0,0,39.9,0.331,41,1 341 | 1,130,70,13,105,25.9,0.472,22,0 342 | 1,95,74,21,73,25.9,0.673,36,0 343 | 1,0,68,35,0,32.0,0.389,22,0 344 | 5,122,86,0,0,34.7,0.290,33,0 345 | 8,95,72,0,0,36.8,0.485,57,0 346 | 8,126,88,36,108,38.5,0.349,49,0 347 | 1,139,46,19,83,28.7,0.654,22,0 348 | 3,116,0,0,0,23.5,0.187,23,0 349 | 3,99,62,19,74,21.8,0.279,26,0 350 | 5,0,80,32,0,41.0,0.346,37,1 351 | 4,92,80,0,0,42.2,0.237,29,0 352 | 4,137,84,0,0,31.2,0.252,30,0 353 | 3,61,82,28,0,34.4,0.243,46,0 354 | 1,90,62,12,43,27.2,0.580,24,0 355 | 3,90,78,0,0,42.7,0.559,21,0 356 | 9,165,88,0,0,30.4,0.302,49,1 357 | 1,125,50,40,167,33.3,0.962,28,1 358 | 13,129,0,30,0,39.9,0.569,44,1 359 | 12,88,74,40,54,35.3,0.378,48,0 360 | 1,196,76,36,249,36.5,0.875,29,1 361 | 5,189,64,33,325,31.2,0.583,29,1 362 | 5,158,70,0,0,29.8,0.207,63,0 363 | 5,103,108,37,0,39.2,0.305,65,0 364 | 4,146,78,0,0,38.5,0.520,67,1 365 | 4,147,74,25,293,34.9,0.385,30,0 366 | 5,99,54,28,83,34.0,0.499,30,0 367 | 6,124,72,0,0,27.6,0.368,29,1 368 | 0,101,64,17,0,21.0,0.252,21,0 369 | 3,81,86,16,66,27.5,0.306,22,0 370 | 1,133,102,28,140,32.8,0.234,45,1 371 | 3,173,82,48,465,38.4,2.137,25,1 372 | 0,118,64,23,89,0.0,1.731,21,0 373 | 0,84,64,22,66,35.8,0.545,21,0 374 | 2,105,58,40,94,34.9,0.225,25,0 375 | 2,122,52,43,158,36.2,0.816,28,0 376 | 12,140,82,43,325,39.2,0.528,58,1 377 | 0,98,82,15,84,25.2,0.299,22,0 378 | 1,87,60,37,75,37.2,0.509,22,0 379 | 4,156,75,0,0,48.3,0.238,32,1 380 | 0,93,100,39,72,43.4,1.021,35,0 381 | 1,107,72,30,82,30.8,0.821,24,0 382 | 0,105,68,22,0,20.0,0.236,22,0 383 | 1,109,60,8,182,25.4,0.947,21,0 384 | 1,90,62,18,59,25.1,1.268,25,0 385 | 1,125,70,24,110,24.3,0.221,25,0 386 | 1,119,54,13,50,22.3,0.205,24,0 387 | 5,116,74,29,0,32.3,0.660,35,1 388 | 8,105,100,36,0,43.3,0.239,45,1 389 | 5,144,82,26,285,32.0,0.452,58,1 390 | 3,100,68,23,81,31.6,0.949,28,0 391 | 1,100,66,29,196,32.0,0.444,42,0 392 | 5,166,76,0,0,45.7,0.340,27,1 393 | 1,131,64,14,415,23.7,0.389,21,0 394 | 4,116,72,12,87,22.1,0.463,37,0 395 | 4,158,78,0,0,32.9,0.803,31,1 396 | 2,127,58,24,275,27.7,1.600,25,0 397 | 3,96,56,34,115,24.7,0.944,39,0 398 | 0,131,66,40,0,34.3,0.196,22,1 399 | 3,82,70,0,0,21.1,0.389,25,0 400 | 3,193,70,31,0,34.9,0.241,25,1 401 | 4,95,64,0,0,32.0,0.161,31,1 402 | 6,137,61,0,0,24.2,0.151,55,0 403 | 5,136,84,41,88,35.0,0.286,35,1 404 | 9,72,78,25,0,31.6,0.280,38,0 405 | 5,168,64,0,0,32.9,0.135,41,1 406 | 2,123,48,32,165,42.1,0.520,26,0 407 | 4,115,72,0,0,28.9,0.376,46,1 408 | 0,101,62,0,0,21.9,0.336,25,0 409 | 8,197,74,0,0,25.9,1.191,39,1 410 | 1,172,68,49,579,42.4,0.702,28,1 411 | 6,102,90,39,0,35.7,0.674,28,0 412 | 1,112,72,30,176,34.4,0.528,25,0 413 | 1,143,84,23,310,42.4,1.076,22,0 414 | 1,143,74,22,61,26.2,0.256,21,0 415 | 0,138,60,35,167,34.6,0.534,21,1 416 | 3,173,84,33,474,35.7,0.258,22,1 417 | 1,97,68,21,0,27.2,1.095,22,0 418 | 4,144,82,32,0,38.5,0.554,37,1 419 | 1,83,68,0,0,18.2,0.624,27,0 420 | 3,129,64,29,115,26.4,0.219,28,1 421 | 1,119,88,41,170,45.3,0.507,26,0 422 | 2,94,68,18,76,26.0,0.561,21,0 423 | 0,102,64,46,78,40.6,0.496,21,0 424 | 2,115,64,22,0,30.8,0.421,21,0 425 | 8,151,78,32,210,42.9,0.516,36,1 426 | 4,184,78,39,277,37.0,0.264,31,1 427 | 0,94,0,0,0,0.0,0.256,25,0 428 | 1,181,64,30,180,34.1,0.328,38,1 429 | 0,135,94,46,145,40.6,0.284,26,0 430 | 1,95,82,25,180,35.0,0.233,43,1 431 | 2,99,0,0,0,22.2,0.108,23,0 432 | 3,89,74,16,85,30.4,0.551,38,0 433 | 1,80,74,11,60,30.0,0.527,22,0 434 | 2,139,75,0,0,25.6,0.167,29,0 435 | 1,90,68,8,0,24.5,1.138,36,0 436 | 0,141,0,0,0,42.4,0.205,29,1 437 | 12,140,85,33,0,37.4,0.244,41,0 438 | 5,147,75,0,0,29.9,0.434,28,0 439 | 1,97,70,15,0,18.2,0.147,21,0 440 | 6,107,88,0,0,36.8,0.727,31,0 441 | 0,189,104,25,0,34.3,0.435,41,1 442 | 2,83,66,23,50,32.2,0.497,22,0 443 | 4,117,64,27,120,33.2,0.230,24,0 444 | 8,108,70,0,0,30.5,0.955,33,1 445 | 4,117,62,12,0,29.7,0.380,30,1 446 | 0,180,78,63,14,59.4,2.420,25,1 447 | 1,100,72,12,70,25.3,0.658,28,0 448 | 0,95,80,45,92,36.5,0.330,26,0 449 | 0,104,64,37,64,33.6,0.510,22,1 450 | 0,120,74,18,63,30.5,0.285,26,0 451 | 1,82,64,13,95,21.2,0.415,23,0 452 | 2,134,70,0,0,28.9,0.542,23,1 453 | 0,91,68,32,210,39.9,0.381,25,0 454 | 2,119,0,0,0,19.6,0.832,72,0 455 | 2,100,54,28,105,37.8,0.498,24,0 456 | 14,175,62,30,0,33.6,0.212,38,1 457 | 1,135,54,0,0,26.7,0.687,62,0 458 | 5,86,68,28,71,30.2,0.364,24,0 459 | 10,148,84,48,237,37.6,1.001,51,1 460 | 9,134,74,33,60,25.9,0.460,81,0 461 | 9,120,72,22,56,20.8,0.733,48,0 462 | 1,71,62,0,0,21.8,0.416,26,0 463 | 8,74,70,40,49,35.3,0.705,39,0 464 | 5,88,78,30,0,27.6,0.258,37,0 465 | 10,115,98,0,0,24.0,1.022,34,0 466 | 0,124,56,13,105,21.8,0.452,21,0 467 | 0,74,52,10,36,27.8,0.269,22,0 468 | 0,97,64,36,100,36.8,0.600,25,0 469 | 8,120,0,0,0,30.0,0.183,38,1 470 | 6,154,78,41,140,46.1,0.571,27,0 471 | 1,144,82,40,0,41.3,0.607,28,0 472 | 0,137,70,38,0,33.2,0.170,22,0 473 | 0,119,66,27,0,38.8,0.259,22,0 474 | 7,136,90,0,0,29.9,0.210,50,0 475 | 4,114,64,0,0,28.9,0.126,24,0 476 | 0,137,84,27,0,27.3,0.231,59,0 477 | 2,105,80,45,191,33.7,0.711,29,1 478 | 7,114,76,17,110,23.8,0.466,31,0 479 | 8,126,74,38,75,25.9,0.162,39,0 480 | 4,132,86,31,0,28.0,0.419,63,0 481 | 3,158,70,30,328,35.5,0.344,35,1 482 | 0,123,88,37,0,35.2,0.197,29,0 483 | 4,85,58,22,49,27.8,0.306,28,0 484 | 0,84,82,31,125,38.2,0.233,23,0 485 | 0,145,0,0,0,44.2,0.630,31,1 486 | 0,135,68,42,250,42.3,0.365,24,1 487 | 1,139,62,41,480,40.7,0.536,21,0 488 | 0,173,78,32,265,46.5,1.159,58,0 489 | 4,99,72,17,0,25.6,0.294,28,0 490 | 8,194,80,0,0,26.1,0.551,67,0 491 | 2,83,65,28,66,36.8,0.629,24,0 492 | 2,89,90,30,0,33.5,0.292,42,0 493 | 4,99,68,38,0,32.8,0.145,33,0 494 | 4,125,70,18,122,28.9,1.144,45,1 495 | 3,80,0,0,0,0.0,0.174,22,0 496 | 6,166,74,0,0,26.6,0.304,66,0 497 | 5,110,68,0,0,26.0,0.292,30,0 498 | 2,81,72,15,76,30.1,0.547,25,0 499 | 7,195,70,33,145,25.1,0.163,55,1 500 | 6,154,74,32,193,29.3,0.839,39,0 501 | 2,117,90,19,71,25.2,0.313,21,0 502 | 3,84,72,32,0,37.2,0.267,28,0 503 | 6,0,68,41,0,39.0,0.727,41,1 504 | 7,94,64,25,79,33.3,0.738,41,0 505 | 3,96,78,39,0,37.3,0.238,40,0 506 | 10,75,82,0,0,33.3,0.263,38,0 507 | 0,180,90,26,90,36.5,0.314,35,1 508 | 1,130,60,23,170,28.6,0.692,21,0 509 | 2,84,50,23,76,30.4,0.968,21,0 510 | 8,120,78,0,0,25.0,0.409,64,0 511 | 12,84,72,31,0,29.7,0.297,46,1 512 | 0,139,62,17,210,22.1,0.207,21,0 513 | 9,91,68,0,0,24.2,0.200,58,0 514 | 2,91,62,0,0,27.3,0.525,22,0 515 | 3,99,54,19,86,25.6,0.154,24,0 516 | 3,163,70,18,105,31.6,0.268,28,1 517 | 9,145,88,34,165,30.3,0.771,53,1 518 | 7,125,86,0,0,37.6,0.304,51,0 519 | 13,76,60,0,0,32.8,0.180,41,0 520 | 6,129,90,7,326,19.6,0.582,60,0 521 | 2,68,70,32,66,25.0,0.187,25,0 522 | 3,124,80,33,130,33.2,0.305,26,0 523 | 6,114,0,0,0,0.0,0.189,26,0 524 | 9,130,70,0,0,34.2,0.652,45,1 525 | 3,125,58,0,0,31.6,0.151,24,0 526 | 3,87,60,18,0,21.8,0.444,21,0 527 | 1,97,64,19,82,18.2,0.299,21,0 528 | 3,116,74,15,105,26.3,0.107,24,0 529 | 0,117,66,31,188,30.8,0.493,22,0 530 | 0,111,65,0,0,24.6,0.660,31,0 531 | 2,122,60,18,106,29.8,0.717,22,0 532 | 0,107,76,0,0,45.3,0.686,24,0 533 | 1,86,66,52,65,41.3,0.917,29,0 534 | 6,91,0,0,0,29.8,0.501,31,0 535 | 1,77,56,30,56,33.3,1.251,24,0 536 | 4,132,0,0,0,32.9,0.302,23,1 537 | 0,105,90,0,0,29.6,0.197,46,0 538 | 0,57,60,0,0,21.7,0.735,67,0 539 | 0,127,80,37,210,36.3,0.804,23,0 540 | 3,129,92,49,155,36.4,0.968,32,1 541 | 8,100,74,40,215,39.4,0.661,43,1 542 | 3,128,72,25,190,32.4,0.549,27,1 543 | 10,90,85,32,0,34.9,0.825,56,1 544 | 4,84,90,23,56,39.5,0.159,25,0 545 | 1,88,78,29,76,32.0,0.365,29,0 546 | 8,186,90,35,225,34.5,0.423,37,1 547 | 5,187,76,27,207,43.6,1.034,53,1 548 | 4,131,68,21,166,33.1,0.160,28,0 549 | 1,164,82,43,67,32.8,0.341,50,0 550 | 4,189,110,31,0,28.5,0.680,37,0 551 | 1,116,70,28,0,27.4,0.204,21,0 552 | 3,84,68,30,106,31.9,0.591,25,0 553 | 6,114,88,0,0,27.8,0.247,66,0 554 | 1,88,62,24,44,29.9,0.422,23,0 555 | 1,84,64,23,115,36.9,0.471,28,0 556 | 7,124,70,33,215,25.5,0.161,37,0 557 | 1,97,70,40,0,38.1,0.218,30,0 558 | 8,110,76,0,0,27.8,0.237,58,0 559 | 11,103,68,40,0,46.2,0.126,42,0 560 | 11,85,74,0,0,30.1,0.300,35,0 561 | 6,125,76,0,0,33.8,0.121,54,1 562 | 0,198,66,32,274,41.3,0.502,28,1 563 | 1,87,68,34,77,37.6,0.401,24,0 564 | 6,99,60,19,54,26.9,0.497,32,0 565 | 0,91,80,0,0,32.4,0.601,27,0 566 | 2,95,54,14,88,26.1,0.748,22,0 567 | 1,99,72,30,18,38.6,0.412,21,0 568 | 6,92,62,32,126,32.0,0.085,46,0 569 | 4,154,72,29,126,31.3,0.338,37,0 570 | 0,121,66,30,165,34.3,0.203,33,1 571 | 3,78,70,0,0,32.5,0.270,39,0 572 | 2,130,96,0,0,22.6,0.268,21,0 573 | 3,111,58,31,44,29.5,0.430,22,0 574 | 2,98,60,17,120,34.7,0.198,22,0 575 | 1,143,86,30,330,30.1,0.892,23,0 576 | 1,119,44,47,63,35.5,0.280,25,0 577 | 6,108,44,20,130,24.0,0.813,35,0 578 | 2,118,80,0,0,42.9,0.693,21,1 579 | 10,133,68,0,0,27.0,0.245,36,0 580 | 2,197,70,99,0,34.7,0.575,62,1 581 | 0,151,90,46,0,42.1,0.371,21,1 582 | 6,109,60,27,0,25.0,0.206,27,0 583 | 12,121,78,17,0,26.5,0.259,62,0 584 | 8,100,76,0,0,38.7,0.190,42,0 585 | 8,124,76,24,600,28.7,0.687,52,1 586 | 1,93,56,11,0,22.5,0.417,22,0 587 | 8,143,66,0,0,34.9,0.129,41,1 588 | 6,103,66,0,0,24.3,0.249,29,0 589 | 3,176,86,27,156,33.3,1.154,52,1 590 | 0,73,0,0,0,21.1,0.342,25,0 591 | 11,111,84,40,0,46.8,0.925,45,1 592 | 2,112,78,50,140,39.4,0.175,24,0 593 | 3,132,80,0,0,34.4,0.402,44,1 594 | 2,82,52,22,115,28.5,1.699,25,0 595 | 6,123,72,45,230,33.6,0.733,34,0 596 | 0,188,82,14,185,32.0,0.682,22,1 597 | 0,67,76,0,0,45.3,0.194,46,0 598 | 1,89,24,19,25,27.8,0.559,21,0 599 | 1,173,74,0,0,36.8,0.088,38,1 600 | 1,109,38,18,120,23.1,0.407,26,0 601 | 1,108,88,19,0,27.1,0.400,24,0 602 | 6,96,0,0,0,23.7,0.190,28,0 603 | 1,124,74,36,0,27.8,0.100,30,0 604 | 7,150,78,29,126,35.2,0.692,54,1 605 | 4,183,0,0,0,28.4,0.212,36,1 606 | 1,124,60,32,0,35.8,0.514,21,0 607 | 1,181,78,42,293,40.0,1.258,22,1 608 | 1,92,62,25,41,19.5,0.482,25,0 609 | 0,152,82,39,272,41.5,0.270,27,0 610 | 1,111,62,13,182,24.0,0.138,23,0 611 | 3,106,54,21,158,30.9,0.292,24,0 612 | 3,174,58,22,194,32.9,0.593,36,1 613 | 7,168,88,42,321,38.2,0.787,40,1 614 | 6,105,80,28,0,32.5,0.878,26,0 615 | 11,138,74,26,144,36.1,0.557,50,1 616 | 3,106,72,0,0,25.8,0.207,27,0 617 | 6,117,96,0,0,28.7,0.157,30,0 618 | 2,68,62,13,15,20.1,0.257,23,0 619 | 9,112,82,24,0,28.2,1.282,50,1 620 | 0,119,0,0,0,32.4,0.141,24,1 621 | 2,112,86,42,160,38.4,0.246,28,0 622 | 2,92,76,20,0,24.2,1.698,28,0 623 | 6,183,94,0,0,40.8,1.461,45,0 624 | 0,94,70,27,115,43.5,0.347,21,0 625 | 2,108,64,0,0,30.8,0.158,21,0 626 | 4,90,88,47,54,37.7,0.362,29,0 627 | 0,125,68,0,0,24.7,0.206,21,0 628 | 0,132,78,0,0,32.4,0.393,21,0 629 | 5,128,80,0,0,34.6,0.144,45,0 630 | 4,94,65,22,0,24.7,0.148,21,0 631 | 7,114,64,0,0,27.4,0.732,34,1 632 | 0,102,78,40,90,34.5,0.238,24,0 633 | 2,111,60,0,0,26.2,0.343,23,0 634 | 1,128,82,17,183,27.5,0.115,22,0 635 | 10,92,62,0,0,25.9,0.167,31,0 636 | 13,104,72,0,0,31.2,0.465,38,1 637 | 5,104,74,0,0,28.8,0.153,48,0 638 | 2,94,76,18,66,31.6,0.649,23,0 639 | 7,97,76,32,91,40.9,0.871,32,1 640 | 1,100,74,12,46,19.5,0.149,28,0 641 | 0,102,86,17,105,29.3,0.695,27,0 642 | 4,128,70,0,0,34.3,0.303,24,0 643 | 6,147,80,0,0,29.5,0.178,50,1 644 | 4,90,0,0,0,28.0,0.610,31,0 645 | 3,103,72,30,152,27.6,0.730,27,0 646 | 2,157,74,35,440,39.4,0.134,30,0 647 | 1,167,74,17,144,23.4,0.447,33,1 648 | 0,179,50,36,159,37.8,0.455,22,1 649 | 11,136,84,35,130,28.3,0.260,42,1 650 | 0,107,60,25,0,26.4,0.133,23,0 651 | 1,91,54,25,100,25.2,0.234,23,0 652 | 1,117,60,23,106,33.8,0.466,27,0 653 | 5,123,74,40,77,34.1,0.269,28,0 654 | 2,120,54,0,0,26.8,0.455,27,0 655 | 1,106,70,28,135,34.2,0.142,22,0 656 | 2,155,52,27,540,38.7,0.240,25,1 657 | 2,101,58,35,90,21.8,0.155,22,0 658 | 1,120,80,48,200,38.9,1.162,41,0 659 | 11,127,106,0,0,39.0,0.190,51,0 660 | 3,80,82,31,70,34.2,1.292,27,1 661 | 10,162,84,0,0,27.7,0.182,54,0 662 | 1,199,76,43,0,42.9,1.394,22,1 663 | 8,167,106,46,231,37.6,0.165,43,1 664 | 9,145,80,46,130,37.9,0.637,40,1 665 | 6,115,60,39,0,33.7,0.245,40,1 666 | 1,112,80,45,132,34.8,0.217,24,0 667 | 4,145,82,18,0,32.5,0.235,70,1 668 | 10,111,70,27,0,27.5,0.141,40,1 669 | 6,98,58,33,190,34.0,0.430,43,0 670 | 9,154,78,30,100,30.9,0.164,45,0 671 | 6,165,68,26,168,33.6,0.631,49,0 672 | 1,99,58,10,0,25.4,0.551,21,0 673 | 10,68,106,23,49,35.5,0.285,47,0 674 | 3,123,100,35,240,57.3,0.880,22,0 675 | 8,91,82,0,0,35.6,0.587,68,0 676 | 6,195,70,0,0,30.9,0.328,31,1 677 | 9,156,86,0,0,24.8,0.230,53,1 678 | 0,93,60,0,0,35.3,0.263,25,0 679 | 3,121,52,0,0,36.0,0.127,25,1 680 | 2,101,58,17,265,24.2,0.614,23,0 681 | 2,56,56,28,45,24.2,0.332,22,0 682 | 0,162,76,36,0,49.6,0.364,26,1 683 | 0,95,64,39,105,44.6,0.366,22,0 684 | 4,125,80,0,0,32.3,0.536,27,1 685 | 5,136,82,0,0,0.0,0.640,69,0 686 | 2,129,74,26,205,33.2,0.591,25,0 687 | 3,130,64,0,0,23.1,0.314,22,0 688 | 1,107,50,19,0,28.3,0.181,29,0 689 | 1,140,74,26,180,24.1,0.828,23,0 690 | 1,144,82,46,180,46.1,0.335,46,1 691 | 8,107,80,0,0,24.6,0.856,34,0 692 | 13,158,114,0,0,42.3,0.257,44,1 693 | 2,121,70,32,95,39.1,0.886,23,0 694 | 7,129,68,49,125,38.5,0.439,43,1 695 | 2,90,60,0,0,23.5,0.191,25,0 696 | 7,142,90,24,480,30.4,0.128,43,1 697 | 3,169,74,19,125,29.9,0.268,31,1 698 | 0,99,0,0,0,25.0,0.253,22,0 699 | 4,127,88,11,155,34.5,0.598,28,0 700 | 4,118,70,0,0,44.5,0.904,26,0 701 | 2,122,76,27,200,35.9,0.483,26,0 702 | 6,125,78,31,0,27.6,0.565,49,1 703 | 1,168,88,29,0,35.0,0.905,52,1 704 | 2,129,0,0,0,38.5,0.304,41,0 705 | 4,110,76,20,100,28.4,0.118,27,0 706 | 6,80,80,36,0,39.8,0.177,28,0 707 | 10,115,0,0,0,0.0,0.261,30,1 708 | 2,127,46,21,335,34.4,0.176,22,0 709 | 9,164,78,0,0,32.8,0.148,45,1 710 | 2,93,64,32,160,38.0,0.674,23,1 711 | 3,158,64,13,387,31.2,0.295,24,0 712 | 5,126,78,27,22,29.6,0.439,40,0 713 | 10,129,62,36,0,41.2,0.441,38,1 714 | 0,134,58,20,291,26.4,0.352,21,0 715 | 3,102,74,0,0,29.5,0.121,32,0 716 | 7,187,50,33,392,33.9,0.826,34,1 717 | 3,173,78,39,185,33.8,0.970,31,1 718 | 10,94,72,18,0,23.1,0.595,56,0 719 | 1,108,60,46,178,35.5,0.415,24,0 720 | 5,97,76,27,0,35.6,0.378,52,1 721 | 4,83,86,19,0,29.3,0.317,34,0 722 | 1,114,66,36,200,38.1,0.289,21,0 723 | 1,149,68,29,127,29.3,0.349,42,1 724 | 5,117,86,30,105,39.1,0.251,42,0 725 | 1,111,94,0,0,32.8,0.265,45,0 726 | 4,112,78,40,0,39.4,0.236,38,0 727 | 1,116,78,29,180,36.1,0.496,25,0 728 | 0,141,84,26,0,32.4,0.433,22,0 729 | 2,175,88,0,0,22.9,0.326,22,0 730 | 2,92,52,0,0,30.1,0.141,22,0 731 | 3,130,78,23,79,28.4,0.323,34,1 732 | 8,120,86,0,0,28.4,0.259,22,1 733 | 2,174,88,37,120,44.5,0.646,24,1 734 | 2,106,56,27,165,29.0,0.426,22,0 735 | 2,105,75,0,0,23.3,0.560,53,0 736 | 4,95,60,32,0,35.4,0.284,28,0 737 | 0,126,86,27,120,27.4,0.515,21,0 738 | 8,65,72,23,0,32.0,0.600,42,0 739 | 2,99,60,17,160,36.6,0.453,21,0 740 | 1,102,74,0,0,39.5,0.293,42,1 741 | 11,120,80,37,150,42.3,0.785,48,1 742 | 3,102,44,20,94,30.8,0.400,26,0 743 | 1,109,58,18,116,28.5,0.219,22,0 744 | 9,140,94,0,0,32.7,0.734,45,1 745 | 13,153,88,37,140,40.6,1.174,39,0 746 | 12,100,84,33,105,30.0,0.488,46,0 747 | 1,147,94,41,0,49.3,0.358,27,1 748 | 1,81,74,41,57,46.3,1.096,32,0 749 | 3,187,70,22,200,36.4,0.408,36,1 750 | 6,162,62,0,0,24.3,0.178,50,1 751 | 4,136,70,0,0,31.2,1.182,22,1 752 | 1,121,78,39,74,39.0,0.261,28,0 753 | 3,108,62,24,0,26.0,0.223,25,0 754 | 0,181,88,44,510,43.3,0.222,26,1 755 | 8,154,78,32,0,32.4,0.443,45,1 756 | 1,128,88,39,110,36.5,1.057,37,1 757 | 7,137,90,41,0,32.0,0.391,39,0 758 | 0,123,72,0,0,36.3,0.258,52,1 759 | 1,106,76,0,0,37.5,0.197,26,0 760 | 6,190,92,0,0,35.5,0.278,66,1 761 | 2,88,58,26,16,28.4,0.766,22,0 762 | 9,170,74,31,0,44.0,0.403,43,1 763 | 9,89,62,0,0,22.5,0.142,33,0 764 | 10,101,76,48,180,32.9,0.171,63,0 765 | 2,122,70,27,0,36.8,0.340,27,0 766 | 5,121,72,23,112,26.2,0.245,30,0 767 | 1,126,60,0,0,30.1,0.349,47,1 768 | 1,93,70,31,0,30.4,0.315,23,0 769 | -------------------------------------------------------------------------------- /src/AFKMC2.jl: -------------------------------------------------------------------------------- 1 | #File for the Assumption Free K MC2 algorithm (KMeans) 2 | module CustomKMeans 3 | 4 | using Distributions 5 | using StatsBase 6 | using Clustering 7 | export KMeansInducingPoints 8 | 9 | #Return K inducing points from X, m being the number of Markov iterations for the seeding 10 | function KMeansInducingPoints(X,K,m) 11 | C = (KmeansSeed(X,K,m))' 12 | kmeans!(X',C) 13 | return C' 14 | end 15 | #Fast and efficient seeding for KMeans 16 | function KmeansSeed(X,K,m) #X is the data, K the number of centers wanted, m the number of Markov iterations 17 | N = size(X,1) 18 | #Preprocessing, sample first random center 19 | init = StatsBase.sample(1:N,1) 20 | C = zeros(K,size(X,2)) 21 | C[1,:] = X[init,:] 22 | q = zeros(N) 23 | for i in 1:N 24 | q[i] = 0.5*norm(X[i,:]-C[1])^2 25 | end 26 | sumq = sum(q) 27 | q = WeightVec(q/sumq + 1.0/(2*N),1) 28 | uniform = Distributions.Uniform(0,1) 29 | for i in 2:K 30 | x = X[StatsBase.sample(1:N,q,1),:] #weighted sampling, 31 | mindist = mindistance(x,C,i-1) 32 | for j in 2:m 33 | y = X[StatsBase.sample(q),:] #weighted sampling 34 | dist = mindistance(y,C,i-1) 35 | if (dist/mindist > rand(uniform)) 36 | x = y; mindist = dist 37 | end 38 | end 39 | C[i,:]=x 40 | end 41 | return C 42 | end 43 | 44 | #Compue the minimum distance 45 | function mindistance(x,C,K) #Point to look for, collection of centers, number of centers computed 46 | mindist = Inf 47 | for i in 1:K 48 | mindist = min(norm(x-C[i])^2,mindist) 49 | end 50 | return mindist 51 | end 52 | 53 | end #module 54 | -------------------------------------------------------------------------------- /src/BSVM.jl: -------------------------------------------------------------------------------- 1 | if !isdefined(:KernelFunctions); include("KernelFunctions.jl"); end; 2 | if !isdefined(:CustomKMeans); include("AFKMC2.jl"); end; 3 | 4 | module BayesianSVM 5 | 6 | using KernelFunctions 7 | using CustomKMeans 8 | using Distributions 9 | using StatsBase 10 | using PyPlot 11 | using GaussianMixtures 12 | using ScikitLearn 13 | using ScikitLearn: fit! 14 | 15 | export BSVM 16 | export TrainBSVM 17 | 18 | #Corresponds to the BSVM model 19 | type BSVM 20 | #Stochastic parameters 21 | Stochastic::Bool 22 | nSamplesUsed::Int64 #Size of the minibatch used 23 | κ_s::Float64 #Parameters for decay of learning rate (iter + κ)^-τ in case adaptative learning rate is not used 24 | τ_s::Float64 25 | #Non linear parameters 26 | NonLinear::Bool 27 | Sparse::Bool 28 | kernels::Array{Kernel,1} #Kernels function used 29 | γ::Float64 # Regularization parameter of the noise 30 | m::Int64 #Number of inducing points 31 | inducingPointsSelectionMethod::String #Way to select the inducing points ("Random","KMeans","GMM") 32 | #Autotuning parameters 33 | Autotuning::Bool 34 | κ_Θ::Float64 #Parameters for decay of learning rate for the hyperparameter (iter + κ)^-τ 35 | τ_Θ::Int64 36 | autotuningFrequency::Int64 #Frequency of update of the hyperparameter 37 | #Flag for adaptative learning rate for the SVI 38 | AdaptativeLearningRate::Bool 39 | #General Parameters for training 40 | Intercept::Bool 41 | ϵ::Float64 #Desired precision (on ||β(t)-β(t-1)||) 42 | nEpochs::Int64 #Maximum number of iterations 43 | β_init::Array{Float64,1} #Initial value for β 44 | smoothingWindow::Int64 45 | VerboseLevel::Int64 46 | Storing::Bool #Store values for debugging purposes 47 | StoringFrequency::Int64 #Every X steps 48 | StoredValues::Array{Float64,2} 49 | StoreddELBO::Array{Float64,2} 50 | 51 | #Functions 52 | Kernel_function::Function #kernel function associated with the model 53 | Predict::Function 54 | PredictProba::Function 55 | ELBO::Function 56 | dELBO::Function 57 | Plotting::Function 58 | Update::Function 59 | 60 | #Parameters learned with training 61 | nSamples::Int64 # Number of data points 62 | nFeatures::Int64 # Number of features 63 | μ::Array{Float64,1} # Mean for variational distribution 64 | η_1::Array{Float64,1} #Natural Parameter #1 65 | ζ::Array{Float64,2} # Covariance matrix of variational distribution 66 | η_2::Array{Float64,2} #Natural Parameter #2 67 | α::Array{Float64,1} # Distribution parameter of the GIG distribution of the latent variables 68 | invΣ::Array{Float64,2} #Inverse Prior Matrix for the linear case 69 | invK::Array{Float64,2} #Inverse Kernel Matrix for the nonlinear case 70 | invKmm::Array{Float64,2} #Inverse Kernel matrix of inducing points 71 | Ktilde::Array{Float64,1} #Diagonal of the covariance matrix between inducing points and generative points 72 | κ::Array{Float64,2} #Kmn*invKmm 73 | inducingPoints::Array{Float64,2} #Inducing points coordinates for the Big Data GP 74 | top #Storing matrices for repeated predictions (top and down are numerator and discriminator) 75 | down 76 | MatricesPrecomputed::Bool #Flag to know if matrices needed for predictions are already computed or not 77 | ρ_s::Float64 #Learning rate for CAVI 78 | g::Array{Float64,1} # g & h are expected gradient value for computing the adaptive learning rate 79 | h::Float64 80 | ρ_Θ::Float64 # learning rate for auto tuning 81 | initialized::Bool 82 | evol_β::Array{Float64,2} #Store the betas for smooth convergence criterium 83 | 84 | 85 | #Constructor 86 | function BSVM(;Stochastic::Bool=false, 87 | Sparse::Bool=false,NonLinear::Bool=true,AdaptativeLearningRate::Bool=true,Autotuning::Bool=false, 88 | nEpochs::Int64 = 2000, 89 | batchSize::Int64=-1,κ_s::Float64=1.0,τ_s::Int64=100, 90 | kernels=0,γ::Float64=1e-3,m::Int64=100,inducingPointsSelectionMethod::String="Random",κ_Θ::Float64=1.0,τ_Θ::Int64=100,autotuningfrequency::Int64=10, 91 | Intercept::Bool=false,ϵ::Float64=1e-5,β_init=[0.0],smoothingWindow::Int64=10, 92 | Storing::Bool=false,StoringFrequency::Int64=1,VerboseLevel::Int64=0) 93 | iter = 1 94 | if kernels == 0 && NonLinear 95 | warn("No kernel indicated, a rbf kernel function with lengthscale 1 is used") 96 | kernels = [Kernel("rbf",1.0,params=1.0)] 97 | end 98 | this = new(Stochastic,batchSize,κ_s,τ_s,NonLinear,Sparse,kernels,γ,m,inducingPointsSelectionMethod,Autotuning,κ_Θ,τ_Θ,autotuningfrequency,AdaptativeLearningRate,Intercept,ϵ,nEpochs,β_init,smoothingWindow,VerboseLevel,Storing,StoringFrequency) 99 | this.initialized = false 100 | if NonLinear 101 | this.top = 0 102 | this.down = 0 103 | MatricesPrecomputed = false 104 | this.Kernel_function = function(X1,X2) 105 | dist = 0 106 | for i in 1:size(this.kernels,1) 107 | dist += this.kernels[i].coeff*this.kernels[i].compute(X1,X2) 108 | end 109 | return dist 110 | end 111 | 112 | if Sparse 113 | this.Predict = function(X,X_test) 114 | SparsePredict(X_test,this) 115 | end 116 | this.PredictProba = function(X,X_test) 117 | SparsePredictProb(X_test,this) 118 | end 119 | this.ELBO = function(X,y) 120 | SparseELBO(this,y) 121 | end 122 | this.dELBO = function(X,y) #Not correct to change later 123 | return 0 124 | end 125 | else 126 | this.Predict = function(X,X_test) 127 | NonLinearPredict(X,X_test,this) 128 | end 129 | this.PredictProba = function(X,X_test) 130 | NonLinearPredictProb(X,X_test,this) 131 | end 132 | this.ELBO = function(X,y) 133 | ELBO_NL(this,y) 134 | end 135 | this.dELBO = function(X,y) 136 | dELBO_NL(y,this.μ,this.ζ,this.α,this.invK,this.Autotuning ? this.J : eye(size(X,1))) 137 | end 138 | end 139 | 140 | else 141 | this.Predict = function(X) 142 | LinearPredict(X,this.μ) 143 | end 144 | this.PredictProba = function(X) 145 | LinearPredictProb(X,this.μ,this.ζ) 146 | end 147 | this.ELBO = function(X,y;precomputed::Bool=true) 148 | ELBO(Diagonal(y)*X,this.μ,this.ζ,this.α,inv(this.invΣ)) 149 | end 150 | this.dELBO = function(X,y;precomputed::Bool=true) 151 | dELBO(Diagonal(y)*X,this.μ,this.ζ,this.α,inv(this.invΣ)) 152 | end 153 | end 154 | this.Plotting = function(s::String) 155 | Plotting(s,this) 156 | end 157 | this.Update = function(X::Array{Float64,2},y::Array{Float64,1},iter) 158 | Update(this,X,y,iter) 159 | end 160 | return this 161 | end 162 | #end of constructor 163 | end 164 | 165 | #Function to check consistency of the different parameters and the possible correction of some of them in some cases 166 | function ModelVerification(model::BSVM,XSize,ySize) 167 | if model.Intercept && model.NonLinear 168 | warn("Not possible to have intercept for the non linear case, removing automatically this option") 169 | model.Intercept = false 170 | end 171 | if model.Sparse && !model.NonLinear 172 | warn("Model cannot be sparse and linear at the same time, assuming linear model") 173 | end 174 | if model.NonLinear && model.Sparse 175 | if model.m > XSize[1] 176 | warn("There are more inducing points than actual points, setting it to 10%") 177 | model.m = XSize[1]÷10 178 | end 179 | end 180 | if XSize[1] != ySize[1] 181 | warn("There is a dimension problem with the data size(y) != size(X)") 182 | return false 183 | end 184 | if model.γ <= 0 185 | warn("Gamma should be strictly positive, setting it to default value 1.0e-3") 186 | model.γ = 1e-3 187 | end 188 | if model.nSamplesUsed == -1 && model.Stochastic 189 | warn("No batch size has been given, stochastic option has been removed") 190 | model.Stochastic = false 191 | end 192 | if model.m > XSize[1] && model.Sparse 193 | warn("Number of inducing points bigger then number of data points, setting it back to non sparse configuration") 194 | model.Sparse = false 195 | end 196 | return true 197 | end 198 | 199 | function TrainBSVM(model::BSVM,X::Array{Float64,2},y::Array{Float64,1}) 200 | #Verification of consistency of the model 201 | if !ModelVerification(model,size(X),size(y)) 202 | return 203 | end 204 | model.nSamples = length(y) 205 | model.nFeatures = model.NonLinear ? (model.Sparse ? model.m : length(y)) : size(X,2) 206 | 207 | if model.VerboseLevel > 0 208 | println("Starting training of data of size $((model.nSamples,size(X,2))), using the"*(model.Autotuning ? " autotuned" : "")*(model.Stochastic ? " stochastic" : "")*(model.NonLinear ? " kernel" : " linear")*" method" 209 | *(model.AdaptativeLearningRate ? " with adaptative learning rate" : "")*(model.Sparse ? " with inducing points" : "")) 210 | end 211 | 212 | #Initialization of the variables 213 | if !model.initialized 214 | if model.β_init[1] == 0 || length(model.β_init) != nFeatures 215 | if model.VerboseLevel > 1 216 | warn("Initial vector is sampled from a multinormal distribution") 217 | end 218 | model.μ = randn(model.nFeatures) 219 | else 220 | model.μ = model.β_init 221 | end 222 | if model.Intercept 223 | model.nFeatures += 1 224 | X = [ones(Float64,model.nFeatures) X] 225 | end 226 | #Necessary to initialize only for first computation of the ELBO 227 | model.α = abs(rand(model.nSamples)) 228 | model.ζ = eye(model.nFeatures) 229 | #Creation of the Kernel Matrix and its inverse in the different cases as well as the prior 230 | if model.NonLinear 231 | if !model.Sparse 232 | model.invK = inv(Symmetric(CreateKernelMatrix(X,model.Kernel_function) + model.γ*eye(model.nFeatures),:U)) 233 | end 234 | if model.Sparse 235 | if model.inducingPointsSelectionMethod == "Random" 236 | model.inducingPoints = X[StatsBase.sample(1:model.nSamples,model.m,replace=false),:] 237 | elseif model.inducingPointsSelectionMethod == "KMeans" 238 | model.inducingPoints = KMeansInducingPoints(X,model.m,10) 239 | elseif model.inducingPointsSelectionMethod == "GMM" 240 | model.inducingPoints = (ScikitLearn.fit!(GMM(n_components = model.m),X)).μ 241 | end 242 | 243 | model.invKmm = Matrix(Symmetric(inv(CreateKernelMatrix(model.inducingPoints,model.Kernel_function)+model.γ*eye(model.nFeatures)))) 244 | Knm = CreateKernelMatrix(X,model.Kernel_function,X2=model.inducingPoints) 245 | model.κ = Knm*model.invKmm 246 | model.Ktilde = CreateDiagonalKernelMatrix(X,model.Kernel_function) + model.γ*ones(size(X,1)) - squeeze(sum(model.κ.*Knm,2),2) #diag(model.κ*transpose(Knm)) 247 | end 248 | elseif !model.NonLinear 249 | model.invΣ = (1.0/model.γ)*eye(model.nFeatures) 250 | end 251 | if (model.nSamplesUsed <= 0 || model.nSamplesUsed > model.nSamples) 252 | model.nSamplesUsed = model.nSamples 253 | end 254 | #Initialization of the natural parameters 255 | model.η_2 = -0.5*inv(model.ζ) 256 | model.η_1 = 2*model.η_2*model.μ 257 | if model.AdaptativeLearningRate && model.Stochastic 258 | batchindices = StatsBase.sample(1:model.nSamples,model.nSamplesUsed,replace=false) 259 | Z = model.NonLinear ? Diagonal(y) : Diagonal(y)*X 260 | (grad_1,grad_2) = NaturalGradientELBO(model.α,(model.NonLinear && model.Sparse) ? Z*model.κ : Z, model.NonLinear ? (model.Sparse ? model.invKmm : model.invK) : model.invΣ,model.nSamples/model.nSamplesUsed) 261 | model.τ_s = model.nSamplesUsed 262 | model.g = vcat(grad_1,reshape(grad_2,size(grad_2,1)^2)) 263 | model.h = norm(vcat(grad_1,reshape(grad_2,size(grad_2,1)^2)))^2 264 | end 265 | model.ρ_Θ = model.Autotuning? (1+model.τ_Θ)^(-model.κ_Θ) : 1.0; 266 | model.ρ_s = model.Stochastic ? (model.AdaptativeLearningRate ? dot(model.g,model.g)/model.h : (1+model.τ_s)^(-model.κ_s)) : 1.0 267 | if model.Storing 268 | # Storing trace(ζ),ELBO,max(|α|),ρ_s,ρ_Θ/ρ_γ,||Θ||/γ 269 | model.StoredValues = zeros(model.nEpochs,6) 270 | model.StoreddELBO = zeros(model.nEpochs,4) 271 | model.StoredValues[1,:] = [trace(model.ζ),model.ELBO(X,y),maximum(abs(model.α)),model.ρ_s,model.γ,model.Autotuning ? model.ρ_Θ : 0.0] 272 | model.StoreddELBO[1,:] = model.dELBO(X,y) 273 | end 274 | model.initialized = true 275 | model.down = 0 276 | model.top = 0 277 | model.MatricesPrecomputed = false 278 | end 279 | evol_β = zeros(model.nEpochs,model.nFeatures) 280 | evol_β[1,:] = model.μ 281 | 282 | batchindices = collect(1:model.nSamples) 283 | prev = 0 284 | current = 0 285 | if model.VerboseLevel > 2 || (model.VerboseLevel > 1) 286 | current = model.ELBO(X,y) 287 | end 288 | conv = Inf #Initialization of the Convergence value 289 | iter::Int64 = 1 290 | ##End of Initialization of the parameters 291 | if model.VerboseLevel > 1 292 | println("Iteration $iter / $(model.nEpochs) (max)") 293 | println("Convergence : $conv, ELBO : $current") 294 | end 295 | #Two criterions for stopping, number of iterations or convergence 296 | while iter < model.nEpochs && conv > model.ϵ 297 | #Print some of the parameters 298 | model.Update(X,y,iter) 299 | iter += 1 300 | evol_β[iter,:] = model.μ 301 | #smooth_1 = mean(evol_β[max(1,iter-2*model.smoothingWindow):iter-1,:],1);smooth_2 = mean(evol_β[max(2,iter-2*model.smoothingWindow+1):iter,:],1); 302 | smooth_1 = mean(evol_β[max(1,iter-2*model.smoothingWindow):iter-1,:],1);smooth_2 = mean(evol_β[max(2,iter-2*model.smoothingWindow+1):iter,:],1); 303 | conv = norm(smooth_1/norm(smooth_1)-smooth_2/norm(smooth_2)) 304 | #prev = current 305 | if model.VerboseLevel > 2 || (model.VerboseLevel > 1 && iter%100==0) 306 | current = model.ELBO(X,y) 307 | end 308 | #conv = abs(current-prev) 309 | if model.Storing && iter%model.StoringFrequency == 0 310 | if model.NonLinear && model.Stochastic && ((!model.Autotuning && iter<=2) || (model.Autotuning && ((iter-1)%model.autotuningFrequency == 0))) 311 | println("Recomputing Kernel matrices") 312 | model.invK = Matrix(Symmetric(inv(CreateKernelMatrix(X,model.Kernel_function)+model.γ*eye(model.nSamples)),:U)) 313 | if model.Autotuning 314 | model.J = CreateKernelMatrix(X,deriv_rbf,model.Θ) 315 | end 316 | end 317 | model.StoredValues[iter÷model.StoringFrequency,:] = [trace(model.ζ),model.ELBO(X,y),maximum(abs(model.α)),model.ρ_s,model.γ,model.Autotuning ? model.ρ_Θ : 0.0,] 318 | model.StoreddELBO[iter÷model.StoringFrequency,:] = model.dELBO(X,y) 319 | #println(model.StoreddELBO[iter÷model.StoringFrequency,:]) 320 | end 321 | if model.VerboseLevel > 2 || (model.VerboseLevel > 1 && iter%100==0) 322 | println("Iteration $iter / $(model.nEpochs) (max)") 323 | println("Convergence : $conv, ELBO : $current") 324 | if model.Autotuning 325 | println("Gamma : $(model.γ)") 326 | for i in 1:size(model.kernels,1) 327 | println("(Coeff,Parameter) for kernel $i : $((model.kernels[i].coeff,(model.kernels[i].Nparams > 0)? model.kernels[i].param : 0))") 328 | end 329 | println("rho theta : $(model.ρ_Θ)") 330 | end 331 | end 332 | end 333 | if model.VerboseLevel > 0 334 | println("Training ended after $iter iterations") 335 | end 336 | if model.Storing 337 | model.StoredValues = model.StoredValues[1:iter÷model.StoringFrequency,:]; 338 | model.StoreddELBO = model.StoreddELBO[1:iter÷model.StoringFrequency,:]; 339 | model.evol_β = evol_β[1:iter,:] 340 | end 341 | return model 342 | end 343 | 344 | 345 | 346 | function Update(model::BSVM,X::Array{Float64,2},y::Array{Float64,1},iter::Int64) #Coordinates ascent of the parameters 347 | if model.Stochastic 348 | batchindices = StatsBase.sample(1:model.nSamples,model.nSamplesUsed,replace=false) 349 | else 350 | batchindices = collect(1:model.nSamples) 351 | end 352 | model.top = 0; model.down = 0; model.MatricesPrecomputed = false;#Need to recompute the matrices 353 | #Definition of the Z matrix, different for everycase 354 | Z = model.NonLinear ? (model.Sparse ? Diagonal(y[batchindices])*model.κ[batchindices,:] : Diagonal(y[batchindices]) ) : Diagonal(y[batchindices])*X[batchindices,:] 355 | #Computation of latent variables 356 | model.α[batchindices] = (1 - Z*model.μ).^2 + squeeze(sum((Z*model.ζ).*Z,2),2) 357 | if model.Sparse && model.NonLinear 358 | model.α[batchindices] += model.Ktilde[batchindices] #Cf derivation of updates 359 | end 360 | 361 | #Compute the natural gradient 362 | (grad_η_1,grad_η_2) = NaturalGradientELBO(model.α[batchindices],Z, model.NonLinear ? (model.Sparse ? model.invKmm : model.invK) : model.invΣ, model.Stochastic ? model.nSamples/model.nSamplesUsed : 1.0) 363 | 364 | #Compute the learning rate 365 | if model.AdaptativeLearningRate && model.Stochastic 366 | #Using the paper on the adaptive learning rate for the SVI (update from the natural gradients) 367 | model.g = (1-1/model.τ_s)*model.g + vcat(grad_η_1-model.η_1,reshape(grad_η_2-model.η_2,size(grad_η_2,1)^2))./model.τ_s 368 | model.h = (1-1/model.τ_s)*model.h +norm(vcat(grad_η_1-model.η_1,reshape(grad_η_2-model.η_2,size(grad_η_2,1)^2)))^2/model.τ_s 369 | model.ρ_s = norm(model.g)^2/model.h 370 | #if iter%1==0 371 | # println("g : $(norm(model.g)^2), h : $(model.h), rho : $(model.ρ_s), tau : $(model.τ_s)") 372 | #end 373 | model.τ_s = (1.0 - model.ρ_s)*model.τ_s + 1.0 374 | elseif model.Stochastic 375 | #Simple model of learning rate 376 | model.ρ_s = (iter+model.τ_s)^(-model.κ_s) 377 | else 378 | #Non-Stochastic case 379 | model.ρ_s = 1.0 380 | end 381 | model.η_1 = (1.0-model.ρ_s)*model.η_1 + model.ρ_s*grad_η_1; model.η_2 = (1.0-model.ρ_s)*model.η_2 + model.ρ_s*grad_η_2 #Update of the natural parameters with noisy/full natural gradient 382 | model.ζ = -0.5*inv(model.η_2); model.μ = model.ζ*model.η_1 #Back to the distribution parameters (needed for α updates) 383 | 384 | #Autotuning part, only happens every $autotuningFrequency iterations 385 | if model.Autotuning && (iter%model.autotuningFrequency == 0) 386 | if model.VerboseLevel > 2 || (model.VerboseLevel > 1 && iter%100==0) 387 | println("Before hyperparameter optimization ELBO = $(model.ELBO(X,y))") 388 | end 389 | model.ρ_Θ = (iter+model.τ_Θ)^(-model.κ_Θ) 390 | if model.NonLinear 391 | if model.Sparse 392 | update_hyperparameter_Sparse!(model,X,y) 393 | model.invKmm = Matrix(Symmetric(inv(CreateKernelMatrix(model.inducingPoints,model.Kernel_function)+model.γ*eye(model.m)),:U)) 394 | model.κ = CreateKernelMatrix(X,model.Kernel_function,X2=model.inducingPoints)*model.invKmm 395 | else 396 | update_hyperparameter_NL!(model,X,y) 397 | model.invK = Matrix(Symmetric(inv(CreateKernelMatrix(X,model.Kernel_function)+model.γ*eye(size(X,1))),:U)) 398 | end 399 | else 400 | model.γ = update_hyperparameter(model) 401 | model.invΣ = (1/model.γ)*eye(model.nFeatures) 402 | end 403 | if model.VerboseLevel > 2 || (model.VerboseLevel > 1 && iter%100==0) 404 | println("After hyperparameter optimization ELBO = $(model.ELBO(X,y))") 405 | end 406 | end 407 | end; 408 | 409 | function NaturalGradientELBO(α,Z,invPrior,stoch_coef) 410 | grad_1 = stoch_coef*transpose(Z)*(1./sqrt(α)+1) 411 | grad_2 = -0.5*(stoch_coef*transpose(Z)*Diagonal(1./sqrt(α))*Z + invPrior) 412 | (grad_1,grad_2) 413 | end 414 | 415 | function update_hyperparameter!(model) #Gradient ascent for γ, noise 416 | model.γ = model.γ + model.ρ_Θ*0.5*((trace(model.ζ)+norm(model.μ))/(model.γ^2.0)-model.nFeatures/model.γ) 417 | end 418 | 419 | function update_hyperparameter_NL!(model,X,y)#Gradient ascent for Θ , kernel parameters 420 | if model.invK == 0 421 | model.invK = Matrix(Symmetric(inv(CreateKernelMatrix(X,model.Kernel_function)+model.γ*eye(size(X,1))),:U)) 422 | end 423 | NKernels = size(model.kernels,1) 424 | A = model.invK*model.ζ-eye(model.nFeatures) 425 | grad_γ = 0.5*(sum(model.invK.*A)+dot(model.μ,model.invK*model.invK*model.μ)) 426 | if model.VerboseLevel > 2 427 | println("Grad gamma : $grad_γ") 428 | end 429 | model.γ = ((model.γ + model.ρ_Θ*grad_γ) < 0 ) ? model.γ/2 : (model.γ + model.ρ_Θ*grad_γ) 430 | #Update of both the coefficients and hyperparameters of the kernels 431 | if NKernels > 1 #If multiple kernels only update the kernel weight 432 | for i in 1:NKernels 433 | V = model.invK.*CreateKernelMatrix(X,model.kernels[i].compute) 434 | grad = 0.5*(sum(V.*A)+dot(model.μ,V*model.invK*model.μ))#update of the coeff 435 | if model.VerboseLevel > 2 436 | println("Grad kernel $i: $grad") 437 | end 438 | model.kernels[i].coeff = ((model.kernels[i].coeff + model.ρ_Θ*grad) < 0 ) ? model.kernels[i].coeff/2 : (model.kernels[i].coeff + model.ρ_Θ*grad) 439 | end 440 | elseif model.kernels[1].Nparams > 0 #If only one update the kernel lengthscale 441 | V = model.invK*model.kernels[1].coeff*CreateKernelMatrix(X,model.kernels[1].compute_deriv) 442 | grad = 0.5*(sum(V.*A)+dot(model.μ,V*model.invK*model.μ))#update of the hyperparameter 443 | model.kernels[1].param = ((model.kernels[1].param + model.ρ_Θ*grad) < 0 ) ? model.kernels[1].param/2 : (model.kernels[1].param + model.ρ_Θ*grad) 444 | if model.VerboseLevel > 2 445 | println("Grad kernel: $grad") 446 | end 447 | end 448 | end 449 | 450 | function update_hyperparameter_Sparse!(model,X,y)#Gradient ascent for Θ , kernel parameters #Not finished !!!!!!!!!!!!!!!!!!!!!!!!!! 451 | NKernels = size(model.kernels,1) 452 | A = eye(model.nFeatures)-model.invKmm*model.ζ 453 | B = model.μ*transpose(model.μ) + model.ζ 454 | Kmn = CreateKernelMatrix(model.inducingPoints,model.Kernel_function;X2=X) 455 | #Computation of noise constant 456 | if model.inducingPointsSelectionMethod == "Random" 457 | Jnm = CreateKernelMatrix(X,delta_kroenecker,X2=model.inducingPoints) 458 | else 459 | Jnm = 0 460 | end 461 | ι = (Jnm-model.κ)*model.invKmm 462 | grad_γ = -0.5*(sum(model.invKmm.*A) - dot(model.μ, transpose(model.μ)*model.invKmm*model.invKmm + 2*transpose(ones(size(X,1))+1./sqrt(model.α))*diagm(y)*ι)+ 463 | dot(1./sqrt(model.α),diag(model.κ*(B*transpose(ι)-transpose(Jnm)) + ι*(B*transpose(model.κ)-Kmn) )+ ones(size(X,1)))) 464 | if model.VerboseLevel > 2 465 | println("Grad gamma : $grad_γ") 466 | end 467 | #model.γ = ((model.γ + model.ρ_Θ*grad_γ) < 0 ) ? (model.γ < 1e-7 ? model.γ : model.γ/2) : (model.γ + model.ρ_Θ*grad_γ) 468 | if NKernels > 1 469 | for i in 1:NKernels 470 | Jnm = CreateKernelMatrix(X,model.kernels[i].compute,X2=model.inducingPoints) 471 | Jnn = CreateDiagonalKernelMatrix(X,model.kernels[i].compute) 472 | Jmm = CreateKernelMatrix(model.inducingPoints,model.kernels[i].compute) 473 | ι = (Jnm-model.κ*Jmm)*model.invKmm 474 | V = model.invKmm*Jmm 475 | grad = -0.5*(sum(V.*A) - dot(model.μ, transpose(model.μ)*V*model.invKmm + 2*transpose(ones(size(X,1))+1./sqrt(model.α))*diagm(y)*ι) + 476 | dot(1./sqrt(model.α),diag(model.κ*(B*transpose(ι)-transpose(Jnm)) + ι*(B*transpose(model.κ)-Kmn))+ Jnn)) 477 | model.kernels[i].coeff = ((model.kernels[i].coeff + model.ρ_Θ*grad) < 0 ) ? model.kernels[i].coeff/2 : (model.kernels[i].coeff + model.ρ_Θ*grad) 478 | if model.VerboseLevel > 2 479 | println("Grad kernel $i: $grad") 480 | end 481 | end 482 | elseif model.kernels[1].Nparams > 0 #Update of the hyperparameters of the KernelMatrix 483 | Jnm = model.kernels[1].coeff*CreateKernelMatrix(X,model.kernels[1].compute_deriv,X2=model.inducingPoints) 484 | Jnn = model.kernels[1].coeff*CreateDiagonalKernelMatrix(X,model.kernels[1].compute_deriv) 485 | Jmm = model.kernels[1].coeff*CreateKernelMatrix(model.inducingPoints,model.kernels[1].compute_deriv) 486 | ι = (Jnm-model.κ*Jmm)*model.invKmm 487 | V = model.invKmm*Jmm 488 | grad = -0.5*(sum(V.*A) - dot(model.μ, transpose(model.μ)*V*model.invKmm + 2*transpose(ones(size(X,1))+1./sqrt(model.α))*diagm(y)*ι) + 489 | dot(1./sqrt(model.α),diag(model.κ*(B*transpose(ι)-transpose(Jnm)) + ι*(B*transpose(model.κ)-Kmn))+Jnn)) 490 | model.kernels[1].param = ((model.kernels[1].param + model.ρ_Θ*grad) < 0 ) ? model.kernels[1].param/2 : (model.kernels[1].param + model.ρ_Θ*grad) 491 | if model.VerboseLevel > 2 492 | println("Grad kernel: $grad, new param is $(model.kernels[1].param)") 493 | end 494 | end 495 | end 496 | 497 | function ELBO(Z,μ,ζ,α,Σ) #Compute the loglikelihood of the training data, ####-----Could be improved in algebraic form---#### 498 | n = size(Z,1) 499 | likelihood = getindex(0.5*(logdet(ζ)-logdet(Σ)-trace(inv(Σ)*ζ)-transpose(μ)*ζ*μ),1) + n*(log(2)-0.5*log(2*pi)-1); 500 | for i in 1:n 501 | likelihood += 1.0/2.0*log(α[i]) + log(besselk(0.5,α[i])) + dot(vec(Z[i,:]),μ) + getindex(0.5/α[i]*(α[i]^2-(1-dot(vec(Z[i,:]),μ))^2 - transpose(vec(Z[i,:]))*ζ*vec(Z[i,:]))) 502 | end 503 | return likelihood 504 | end; 505 | 506 | function dELBO(Z,μ,ζ,α,Σ) 507 | (n,p) = size(Z) 508 | dζ = 0.5*(inv(ζ)-inv(Σ)-transpose(Z)*Diagonal(1./sqrt(α))*Z) 509 | dμ = -inv(ζ)*μ + transpose(Z)*(1./sqrt(α)+1) 510 | dα = zeros(n) 511 | for i in 1:n 512 | dα[i] = ((1-dot(Z[i,:],μ))^2 + dot(Z[i,:],ζ*Z[i,:]))/(2*(α[i])) - 0.5 513 | end 514 | γ = Σ[1,1] 515 | dγ = 0.5*((trace(ζ)+norm(μ))/(γ^2.0)-p/γ) 516 | return [trace(dζ),norm(dμ),norm(dα),dγ] 517 | end 518 | 519 | 520 | function ELBO_NL(model,y) 521 | n = size(y,1) 522 | ELBO = 0.5*(logdet(model.ζ)+logdet(model.invK)-trace(model.invK*model.ζ)-dot(model.μ,model.invK*model.μ)) 523 | for i in 1:n 524 | ELBO += 0.25*log(model.α[i])+log(besselk(0.5,sqrt(model.α[i])))+y[i]*model.μ[i]+(model.α[i]-(1-model.y[i]*model.μ[i])^2-model.ζ[i,i])/(2*sqrt(model.α[i])) 525 | end 526 | return ELBO 527 | end 528 | 529 | function dELBO_NL(y,μ,ζ,α,invK,J) 530 | n = size(X,1) 531 | dζ = 0.5*(inv(ζ)-invK-Diagonal(1./sqrt(α))) 532 | dμ = -inv(ζ)*μ + Diagonal(y)*(1./sqrt(α)+1) 533 | dα = zeros(n) 534 | for i in 1:n 535 | dα[i] = ((1-y[i]*μ[i])^2+ζ[i,i])/(2*α[i])-0.5 536 | end 537 | dΘ = 0.5*(trace(invK*J*(invK*ζ-1))+dot(μ,invK*J*invK*μ)) 538 | return [trace(dζ),norm(dμ),norm(dα),norm(dΘ)] 539 | end 540 | 541 | function SparseELBO(model,y) 542 | ELBO = 0.0 543 | ELBO += 0.5*(logdet(model.ζ)+logdet(model.invKmm)) 544 | ELBO += -0.5*(sum(model.invKmm.*model.ζ)+dot(model.μ,model.invKmm*model.μ)) #trace replaced by sum 545 | ELBO += dot(y,model.κ*model.μ) 546 | ELBO += sum(0.25*log(model.α) + log(besselk(0.5,sqrt(model.α)))) 547 | ζtilde = model.κ*model.ζ*transpose(model.κ) 548 | for i in 1:length(y) 549 | ELBO += 0.5/sqrt(model.α[i])*(model.α[i]-(1-y[i]*dot(model.κ[i,:],model.μ))^2-(ζtilde[i,i]+model.Ktilde[i])) 550 | end 551 | return ELBO 552 | end 553 | 554 | 555 | function Plotting(option::String,model::BSVM) 556 | if !model.Storing 557 | warn("Data was not saved during training, please rerun training with option Storing=true") 558 | return 559 | elseif isempty(model.StoredValues ) 560 | warn("Model was not trained yet, please run a dataset before"); 561 | return 562 | end 563 | figure("Evolution of model properties over time"); 564 | iterations = collect(1:size(model.evol_β,1)) 565 | sparseiterations = collect(linspace(1,size(model.evol_β,1),size(model.StoredValues,1))) 566 | if option == "All" 567 | nFeatures = model.Autotuning ? 6 : 4; 568 | subplot(nFeatures÷2,2,1) 569 | plot(sparseiterations,model.StoredValues[:,1]) 570 | ylabel(L"Trace($\zeta$)") 571 | subplot(nFeatures÷2,2,2) 572 | plot(iterations,sqrt(sumabs2(model.evol_β,2))) 573 | ylabel(L"Normalized $\beta$") 574 | subplot(nFeatures÷2,2,3) 575 | plot(sparseiterations,model.StoredValues[:,2]) 576 | ylabel("ELBO") 577 | subplot(nFeatures÷2,2,4) 578 | semilogy(sparseiterations,model.StoredValues[:,4]) 579 | ylabel(L"\rho_s") 580 | if model.Autotuning 581 | subplot(nFeatures÷2,2,5) 582 | semilogy(sparseiterations,model.StoredValues[:,5]) 583 | ylabel(model.NonLinear ? L"||\theta||" : L"\gamma") 584 | subplot(nFeatures÷2,2,6) 585 | semilogy(sparseiterations,model.StoredValues[:,6]) 586 | ylabel(L"\rho_\theta") 587 | end 588 | elseif option == "dELBO" 589 | (nIterations,nFeatures) = size(model.StoreddELBO) 590 | DerivativesLabels = ["d\zeta" "d\mu" "d\alpha" "d\theta"] 591 | for i in 1:4 592 | if i <= 3 || (i==4 && model.Autotuning) 593 | subplot(2,2,i) 594 | plot(sparseiterations,model.StoreddELBO[:,i]) 595 | ylabel(DerivativesLabels[i]) 596 | end 597 | end 598 | elseif option == "Beta" 599 | plot(iterations,sqrt(sumabs2(model.evol_β))) 600 | ylabel(L"Normalized $\beta$") 601 | xlabel("Iterations") 602 | elseif option == "ELBO" 603 | plot(iterations,model.StoredValues[:,2]) 604 | ylabel("ELBO") 605 | xlabel("Iterations") 606 | else 607 | warn("Option not available, chose among those : All, dELBO, Beta, Autotuning, ELBO") 608 | end 609 | return; 610 | end 611 | 612 | 613 | function LinearPredict(X,β::Array{Float64,1}) 614 | return X*β 615 | end 616 | 617 | function LinearPredictProb(X,β::Array{Float64,1},ζ::Array{Float64,2}) 618 | n = size(X,1) 619 | predic = zeros(n) 620 | for i in 1:n 621 | predic[i] = cdf(Normal(),(dot(X[i,:],β))/(dot(X[i,:],ζ*X[i,:])+1)) 622 | end 623 | return predic 624 | end 625 | 626 | function NonLinearPredictProb(X,X_test,model) 627 | n = size(X_test,1) 628 | if model.down == 0 629 | if model.invK == 0 630 | model.invK = Matrix(Symmetric(inv(CreateKernelMatrix(X,model.kernelfunction,model.Θ)+model.γ*eye(size(X,1))),:U)) 631 | end 632 | model.top = model.invK*model.μ 633 | model.down = -(model.invK+model.invK*model.ζ*model.invK) 634 | end 635 | ksize = size(X,1) 636 | predic = zeros(n) 637 | k_star = zeros(ksize) 638 | k_starstar = 0 639 | for i in 1:n 640 | for j in 1:ksize 641 | k_star[j] = model.Kernel_function(X[j,:],X_test[i,:]) 642 | end 643 | k_starstar = model.Kernel_function(X_test[i,:],X_test[i,:]) 644 | predic[i] = cdf(Normal(),(dot(k_star,model.top))/(k_starstar + dot(k_star,model.down*k_star) + 1)) 645 | end 646 | predic 647 | end 648 | 649 | function NonLinearPredict(X,X_test,model) 650 | n = size(X_test,1) 651 | if model.top == 0 652 | model.top = model.invK*model.μ 653 | end 654 | k_star = CreateKernelMatrix(X_test,model.Kernel_function,X2=X) 655 | return k_star*model.top 656 | end 657 | 658 | 659 | function SparsePredictProb(X_test,model) 660 | n = size(X_test,1) 661 | ksize = model.m 662 | if model.down == 0 663 | if model.top == 0 664 | model.top = model.invKmm*model.μ 665 | end 666 | model.down = model.invKmm*(eye(ksize)+model.ζ*model.invKmm) 667 | model.MatricesPrecomputed = true 668 | end 669 | predic = zeros(n) 670 | k_star = zeros(ksize) 671 | k_starstar = 0 672 | for i in 1:n 673 | for j in 1:ksize 674 | k_star[j] = model.Kernel_function(model.inducingPoints[j,:],X_test[i,:]) 675 | end 676 | k_starstar = model.Kernel_function(X_test[i,:],X_test[i,:]) 677 | 678 | predic[i] = cdf(Normal(),(dot(k_star,model.top))/(k_starstar - dot(k_star,model.down*k_star) + 1)) 679 | end 680 | predic 681 | end 682 | 683 | 684 | function SparsePredict(X_test,model) 685 | n = size(X_test,1) 686 | if model.top == 0 687 | model.top = model.invKmm*model.μ 688 | end 689 | k_star = CreateKernelMatrix(X_test,model.Kernel_function,X2=model.inducingPoints) 690 | return k_star*model.top 691 | end 692 | 693 | end #End Module 694 | -------------------------------------------------------------------------------- /src/DataAccess.jl: -------------------------------------------------------------------------------- 1 | #Module for either generating data or exporting from an existing dataset 2 | 3 | module DataAccess 4 | 5 | using Distributions 6 | 7 | export generate_uniform_data, generate_normal_data, generate_two_multivariate_data 8 | export get_Ionosphere, get_Sonar, get_Crabs, get_USPS 9 | export get_SUSY, get_Banana, get_Image, get_RingNorm 10 | export get_BinaryMNIST, get_3vs5MNIST, get_BreastCancer 11 | export get_Titanic, get_Splice, get_Diabetis, get_Thyroid 12 | export get_Heart, get_Waveform, get_Flare 13 | 14 | 15 | function generate_uniform_data(nFeatures_::Int64,nSamples_::Int64; y_neg_::Bool = true, boxsize_::Float64 = 1.0, noise_::Float64 = 0.3) 16 | generate_data(2, nFeatures_, nSamples_, y_neg = y_neg_, range = boxsize_, noise = noise_) 17 | end 18 | 19 | function generate_two_multivariate_data(nFeatures_::Int64,nSamples_::Int64; sep_::Float64 = 2.0, y_neg_::Bool = true, noise_::Float64 = 1.0) 20 | generate_data(1, nFeatures_, nSamples_, y_neg = y_neg_, noise = noise_) 21 | end 22 | 23 | function generate_normal_data(nFeatures_::Int64,nSamples_::Int64; sep_::Float64 = 2.0, y_neg_::Bool = true,noise_::Float64 = 0.3, σ_::Float64 = 1.0) 24 | generate_data(3, nFeatures_, nSamples_, y_neg = y_neg_, noise = noise_, σ=σ_) 25 | end 26 | 27 | function generate_multi_beta_normal_data(nFeatures_::Int64,nSamples_::Int64; sep_::Float64 = 2.0, y_neg_::Bool = true,noise_::Float64 = 0.3, σ_::Float64 = 1.0,nβ_::Int64 = 4) 28 | generate_data(4, nFeatures_, nSamples_, y_neg = y_neg_, noise = noise_, σ=σ_, nβ=nβ_) 29 | end 30 | 31 | function get_Ionosphere() 32 | generate_data(5,0,0) 33 | end 34 | 35 | function get_Sonar() 36 | generate_data(6,0,0) 37 | end 38 | 39 | function get_Crabs() 40 | generate_data(7,0,0) 41 | end 42 | 43 | function get_Pima() 44 | generate_data(8,0,0) 45 | end 46 | 47 | function get_USPS() 48 | generate_data(9,0,0) 49 | end 50 | 51 | function get_SUSY() 52 | generate_data(10,0,0) 53 | end 54 | 55 | function get_Banana() 56 | generate_data(11,0,0) 57 | end 58 | 59 | function get_Image() 60 | generate_data(12,0,0) 61 | end 62 | 63 | function get_RingNorm() 64 | generate_data(17,0,0) 65 | end 66 | 67 | function get_BinaryMNIST() 68 | generate_data(22,0,0) 69 | end 70 | 71 | function get_3vs5MNIST() 72 | generate_data(23,0,0) 73 | end 74 | 75 | 76 | function get_BreastCancer() 77 | generate_data(25,0,0) 78 | end 79 | 80 | function get_Titanic() 81 | generate_data(26,0,0) 82 | end 83 | 84 | function get_Splice() 85 | generate_data(27,0,0) 86 | end 87 | 88 | function get_Diabetis() 89 | generate_data(28,0,0) 90 | end 91 | 92 | function get_Thyroid() 93 | generate_data(30,0,0) 94 | end 95 | 96 | function get_Heart() 97 | generate_data(31,0,0) 98 | end 99 | 100 | function get_German() 101 | generate_data(32,0,0) 102 | end 103 | function get_Waveform() 104 | generate_data(33,0,0) 105 | end 106 | 107 | function get_Flare() 108 | generate_data(34,0,0) 109 | end 110 | 111 | function generate_data(datatype, nFeatures, nSamples; sep = 2, y_neg::Bool = true, range::Float64 = 1.0, noise::Float64=1.0, σ::Float64=1.0, nβ::Int64=4) 112 | β_true = zeros(nFeatures) 113 | accuracy = 1 114 | shuffling = true 115 | seed = 123 116 | DatasetName = "None" 117 | if datatype == 1 # Multivariate normal distributed mixture X, easy separable 118 | X = randn(MersenneTwister(seed),(nSamples,nFeatures)) 119 | X[1:nSamples÷2,:] += sep 120 | X[nSamples÷2+1:end,:] -= sep 121 | y = zeros((nSamples,1)) 122 | y[1:nSamples÷2] += 1 123 | β_true[:] = sep 124 | elseif datatype == 2 #Generate points in a box (range) uniformly and separate them through a true hyperplane, still allowing mistakes with a normal (noise) 125 | X = rand(Uniform(-range,range),(nSamples,nFeatures)) 126 | β_true = rand(Normal(0,1),nFeatures) 127 | y = sign(X*β_true+rand(Normal(0,noise),nSamples)) 128 | accuracy= 1-countnz(y-sign(X*β_true))/nSamples 129 | y_neg = false 130 | elseif datatype == 3 131 | X = rand(IsoNormal(zeros(nFeatures),PDMats.ScalMat(nFeatures,σ)),nSamples)' 132 | β_true = rand(Normal(0,1),nFeatures) 133 | y = sign(X*β_true+rand(Normal(0,noise),nSamples)) 134 | accuracy= 1-countnz(y-sign(X*β_true))/nSamples 135 | y_neg = false 136 | elseif datatype == 4 137 | X = rand(IsoNormal(zeros(nFeatures),PDMats.ScalMat(nFeatures,σ)),nSamples)' 138 | β_true = rand(Normal(0,1),(nFeatures,nβ)) 139 | y = zeros((nSamples,1)) 140 | for i in 1:nSamples 141 | y[i] = sign(X[i,:]*β_true[(nSamples%nβ)+1]+rand(Normal(0,noise),nSamples)) 142 | end 143 | β_true = mean(β_true) 144 | accuracy = 1-countnz(y-sign(X*β_true))/nSamples 145 | y_neg = false 146 | elseif datatype == 5 147 | data = readdlm("../data/ionosphere.data",',') 148 | X = convert(Array{Float64,2},data[:,1:(end-1)]) 149 | y = convert(Array{Float64,1},collect(data[:,end].=="g")) 150 | nSamples = size(X,1) 151 | shuffling = true 152 | DatasetName = "Ionosphere" 153 | elseif datatype == 6 154 | data = readdlm("../data/sonar.data",',') 155 | X = convert(Array{Float64,2},data[:,1:(end-1)]) 156 | y = convert(Array{Float64,1},collect(data[:,end].=="M")) 157 | nSamples = size(X,1) 158 | shuffling = true 159 | DatasetName = "Sonar" 160 | elseif datatype == 7 161 | data = readdlm("../data/crabs.csv",',') 162 | X = data[2:end,3:end] 163 | X[:,1] = convert(Array{Float64,1},collect(X[:,1].=="M")) 164 | X = convert(Array{Float64,2},X) 165 | y = convert(Array{Float64,1},collect(data[2:end,2].=="B")) 166 | nSamples = size(X,1) 167 | shuffling = true 168 | DatasetName = "Crabs" 169 | elseif datatype == 8 170 | data = readdlm("../data/pima-indians-diabetes.data",',') 171 | X = convert(Array{Float64,2},data[:,1:(end-1)]) 172 | y = convert(Array{Float64,1},data[:,end]) 173 | nSamples = size(X,1) 174 | shuffling = true 175 | DatasetName = "Pima" 176 | elseif datatype == 9 177 | data = readdlm("../data/USPS.csv",';') 178 | X = convert(Array{Float64,2},data[:,2:end]) 179 | y = convert(Array{Float64,1},collect(data[:,1].==3)) 180 | nSamples = size(X,1) 181 | shuffling = true 182 | DatasetName = "USPS" 183 | elseif datatype == 10 184 | data = readdlm("../data/Processed_SUSY.data",',') 185 | X = convert(Array{Float64,2},data[:,1:end-1]) 186 | y = convert(Array{Float64,1},data[:,end]) 187 | nSamples = size(X,1) 188 | shuffling = false 189 | DatasetName = "SUSY" 190 | elseif datatype == 11 191 | data = readdlm("../data/banana_data.csv",',') 192 | X = convert(Array{Float64,2},data[:,2:end]) 193 | y = convert(Array{Float64,1},data[:,1]) 194 | nSamples = size(X,1) 195 | DatasetName = "Banana" 196 | y_neg = false 197 | elseif datatype == 12 198 | data = readdlm("../data/Processed_Image.data",',') 199 | X = convert(Array{Float64,2},data[:,1:end-1]) 200 | y = convert(Array{Float64,1},data[:,end]) 201 | nSamples = size(X,1) 202 | DatasetName = "Image" 203 | shuffling = false 204 | y_neg = false 205 | elseif datatype == 17 206 | data = readdlm("../data/ringnorm.data") 207 | X = convert(Array{Float64,2},data[:,1:end-1]) 208 | y = convert(Array{Float64,1},data[:,end]) 209 | nSamples = size(X,1) 210 | shuffling = false 211 | DatasetName = "RingNorm" 212 | elseif datatype == 22 213 | data = readdlm("../data/Processed_BinaryMNIST.data",',') 214 | X = convert(Array{Float64,2},data[:,1:end-1]) 215 | y = convert(Array{Float64,1},data[:,end]) 216 | nSamples = size(X,1) 217 | shuffling = false 218 | y_neg = false 219 | DatasetName = "BinaryMNIST" 220 | elseif datatype == 23 221 | data = readdlm("../data/Processed_3vs5MNIST.data",',') 222 | X = convert(Array{Float64,2},data[:,1:end-1]) 223 | y = convert(Array{Float64,1},data[:,end]) 224 | nSamples = size(X,1) 225 | shuffling = false 226 | y_neg = false 227 | DatasetName = "3vs5MNIST" 228 | elseif datatype == 25 229 | data = readdlm("../data/Processed_BreastCancer.data",',') 230 | X = convert(Array{Float64,2},data[:,1:end-1]) 231 | y = convert(Array{Float64,1},data[:,end]) 232 | nSamples = size(X,1) 233 | shuffling = false 234 | y_neg = false 235 | DatasetName = "BreastCancer" 236 | elseif datatype == 26 237 | data = readdlm("../data/Processed_Titanic.data",',') 238 | X = convert(Array{Float64,2},data[:,1:end-1]) 239 | y = convert(Array{Float64,1},data[:,end]) 240 | nSamples = size(X,1) 241 | shuffling = false 242 | y_neg = false 243 | DatasetName = "Titanic" 244 | elseif datatype == 27 245 | data = readdlm("../data/Processed_Splice.data",',') 246 | X = convert(Array{Float64,2},data[:,1:end-1]) 247 | y = convert(Array{Float64,1},data[:,end]) 248 | nSamples = size(X,1) 249 | shuffling = false 250 | y_neg = false 251 | DatasetName = "Splice" 252 | elseif datatype == 28 253 | data = readdlm("../data/Processed_Diabetis.data",',') 254 | X = convert(Array{Float64,2},data[:,1:end-1]) 255 | y = convert(Array{Float64,1},data[:,end]) 256 | nSamples = size(X,1) 257 | shuffling = false 258 | y_neg = false 259 | DatasetName = "Diabetis" 260 | elseif datatype == 30 261 | data = readdlm("../data/Processed_Thyroid.data",',') 262 | X = convert(Array{Float64,2},data[:,1:end-1]) 263 | y = convert(Array{Float64,1},data[:,end]) 264 | nSamples = size(X,1) 265 | shuffling = false 266 | y_neg = false 267 | DatasetName = "Thyroid" 268 | elseif datatype == 31 269 | data = readdlm("../data/Processed_Heart.data",',') 270 | X = convert(Array{Float64,2},data[:,1:end-1]) 271 | y = convert(Array{Float64,1},data[:,end]) 272 | nSamples = size(X,1) 273 | shuffling = false 274 | y_neg = false 275 | DatasetName = "Heart" 276 | elseif datatype == 32 277 | data = readdlm("../data/Processed_German.data",',') 278 | X = convert(Array{Float64,2},data[:,1:end-1]) 279 | y = convert(Array{Float64,1},data[:,end]) 280 | nSamples = size(X,1) 281 | shuffling = false 282 | y_neg = false 283 | DatasetName = "German" 284 | elseif datatype == 33 285 | data = readdlm("../data/Processed_Waveform.data",',') 286 | X = convert(Array{Float64,2},data[:,1:end-1]) 287 | y = convert(Array{Float64,1},data[:,end]) 288 | nSamples = size(X,1) 289 | shuffling = false 290 | y_neg = false 291 | DatasetName = "Waveform" 292 | elseif datatype == 34 293 | data = readdlm("../data/Processed_Flare.data",',') 294 | X = convert(Array{Float64,2},data[:,1:end-1]) 295 | y = convert(Array{Float64,1},data[:,end]) 296 | nSamples = size(X,1) 297 | shuffling = false 298 | y_neg = false 299 | DatasetName = "Flare" 300 | end 301 | 302 | #Go from y [0,1] to y [-1,1] 303 | if y_neg 304 | y = 2*y-1 305 | end 306 | Z = hcat(X,y) 307 | if shuffling 308 | Z = Z[shuffle(collect(1:nSamples)),:] #Shuffle the data 309 | end 310 | (Z[:,1:end-1],Z[:,end],DatasetName,β_true,accuracy) #Send the data and the parameters separately 311 | end; 312 | 313 | end #end of module 314 | -------------------------------------------------------------------------------- /src/ECM.jl: -------------------------------------------------------------------------------- 1 | if !isdefined(:KernelFunctions); include("KernelFunctions.jl"); end; 2 | 3 | module ECM 4 | 5 | using Distributions 6 | using KernelFunctions 7 | 8 | export ECMTraining, PredicECM, PredictProbaECM 9 | 10 | 11 | function ECMTraining(X::Array{Float64,2}, y::Array{Float64,1}; kernel=0,γ::Float64=1.0, nepochs::Int64=100,ϵ::Float64 = 1e-5,Θ=[1.0],verbose=false) 12 | #initialization of parameters 13 | n = size(X,1) 14 | k = size(X,1) 15 | Y = Diagonal(y) 16 | f = randn(k) 17 | invλ = abs(rand(k)) 18 | if kernel == 0 19 | kernel = Kernel("rbf",Θ[1],params=Θ[2]) 20 | elseif typeof(kernel)==AbstractString 21 | if length(Θ)>1 22 | kernel = Kernel(kernel,Θ[1],params=Θ[2]) 23 | else 24 | kernel = Kernel(kernel,Θ[1]) 25 | end 26 | end 27 | #Initialize a vector using the prior information 28 | K = CreateKernelMatrix(X,kernel.compute) 29 | i = 1 30 | diff = Inf; 31 | while i < nepochs && diff > ϵ 32 | prev_λ = 1./invλ; prev_f = f; 33 | #Expectation Step 34 | invλ = sqrt(1.0+2.0/γ)./(abs(1.0-y.*f)) 35 | #Maximization Step 36 | f = K*inv((K+1.0/γ*diagm(1./invλ)))*Y*(1+1./invλ) 37 | diff = norm(f-prev_f); 38 | i += 1 39 | if verbose 40 | println("$i : diff = $diff") 41 | end 42 | end 43 | if verbose 44 | println("Henao stopped after $i iterations") 45 | end 46 | return (invλ,K,kernel,y,f) 47 | end 48 | 49 | function PredicECM(X,y,X_test,invλ,K,γ,kernel) 50 | n = size(X,1) 51 | n_t = size(X_test,1) 52 | predic = zeros(n_t) 53 | Σ = inv(K+1/γ*diagm(1./invλ)) 54 | top = Σ*diagm(y)*(1+1./invλ) 55 | for i in 1:n_t 56 | k_star = zeros(n) 57 | for j in 1:n 58 | k_star[j] = kernel.compute(X[j,:],X_test[i,:]) 59 | end 60 | predic[i] = dot(k_star,top) 61 | end 62 | return predic 63 | end 64 | 65 | function PredictProbaECM(X,y,X_test,invλ,K,γ,kernel) 66 | n = size(X,1) 67 | n_t = size(X_test,1) 68 | predic = zeros(n_t) 69 | Σ = inv(K+1/γ*diagm(1./invλ)) 70 | top = Σ*diagm(y)*(1+1./invλ) 71 | for i in 1:n_t 72 | k_star = zeros(n) 73 | for j in 1:n 74 | k_star[j] = kernel.compute(X[j,:],X_test[i,:]) 75 | end 76 | k_starstar = kernel.compute(X_test[i,:],X_test[i,:]) 77 | predic[i] = cdf(Normal(),dot(k_star,top)/(1+k_starstar-dot(k_star,Σ*k_star))) 78 | end 79 | return predic 80 | end 81 | end 82 | -------------------------------------------------------------------------------- /src/KernelFunctions.jl: -------------------------------------------------------------------------------- 1 | #Module for the kernel type 2 | # 3 | 4 | module KernelFunctions 5 | 6 | export Kernel 7 | export CreateKernelMatrix, CreateDiagonalKernelMatrix 8 | export delta_kroenecker 9 | 10 | 11 | type Kernel 12 | kernel_function::Function # Kernel function 13 | coeff::Float64 #Weight for the kernel 14 | derivative_kernel::Function # Derivative of the kernel function (used for hyperparameter optimization) 15 | param::Float64 #Hyperparameter for the kernel function (depends of the function) 16 | Nparams::Int64 #Number of hyperparameters 17 | compute::Function #General computational function 18 | compute_deriv::Function #General derivative function 19 | #Constructor 20 | function Kernel(kernel, coeff::Float64; params=0) 21 | this = new() 22 | this.coeff = coeff 23 | this.Nparams = 1 24 | if kernel=="rbf" 25 | this.kernel_function = rbf 26 | this.derivative_kernel = deriv_rbf 27 | this.param = params 28 | elseif kernel=="quadra" 29 | this.kernel_function = quadratic 30 | this.derivative_kernel = deriv_quadratic 31 | this.param = params 32 | elseif kernel=="linear" 33 | this.kernel_function = linear 34 | this.Nparams = 0 35 | elseif kernel=="laplace" 36 | this.kernel_function = laplace 37 | this.derivative_kernel = deriv_laplace 38 | this.param = params 39 | elseif kernel=="abel" 40 | this.kernel_function = abel 41 | this.derivative_kernel = deriv_abel 42 | this.param = params 43 | elseif kernel=="imk" 44 | this.kernel_function = imk 45 | this.derivative_kernel = deriv_imk 46 | this.param = params 47 | else 48 | error("Kernel function $(kernel_list[i]) not available, options are : rbf, linear, laplace, abel, imk") 49 | end 50 | if this.Nparams > 0 51 | this.compute = function(X1,X2) 52 | this.kernel_function(X1,X2,this.param) 53 | end 54 | this.compute_deriv = function(X1,X2) 55 | this.derivative_kernel(X1,X2,this.param) 56 | end 57 | else 58 | this.compute = function(X1,X2) 59 | this.kernel_function(X1,X2) 60 | end 61 | end 62 | return this 63 | end 64 | end 65 | 66 | function CreateKernelMatrix(X1,kernel_function;X2=0) #Create the kernel matrix from the training data or the correlation matrix between two set of data 67 | if X2 == 0 68 | ksize = size(X1,1) 69 | K = zeros(ksize,ksize) 70 | for i in 1:ksize 71 | for j in 1:i 72 | K[i,j] = kernel_function(X1[i,:],X1[j,:]) 73 | if i != j 74 | K[j,i] = K[i,j] 75 | end 76 | end 77 | end 78 | return K 79 | else 80 | ksize1 = size(X1,1) 81 | ksize2 = size(X2,1) 82 | K = zeros(ksize1,ksize2) 83 | for i in 1:ksize1 84 | for j in 1:ksize2 85 | K[i,j] = kernel_function(X1[i,:],X2[j,:]) 86 | end 87 | end 88 | return K 89 | end 90 | end 91 | 92 | 93 | function CreateDiagonalKernelMatrix(X,kernel_function;MatrixFormat=false) 94 | n = size(X,1) 95 | kermatrix = zeros(n) 96 | for i in 1:n 97 | kermatrix[i] = kernel_function(X[i,:],X[i,:]) 98 | end 99 | if MatrixFormat 100 | return diagm(kermatrix) 101 | else 102 | return kermatrix 103 | end 104 | end 105 | 106 | function delta_kroenecker(X1::Array{Float64,1},X2::Array{Float64,1}) 107 | return X1==X2 ? 1 : 0 108 | end 109 | 110 | #Gaussian (RBF) Kernel 111 | function rbf(X1::Array{Float64,1},X2::Array{Float64,1},Θ) 112 | if X1 == X2 113 | return 1 114 | end 115 | exp(-(norm(X1-X2))^2/(Θ^2)) 116 | end 117 | 118 | 119 | function deriv_rbf(X1::Array{Float64,1},X2::Array{Float64,1},Θ) 120 | a = norm(X1-X2) 121 | if a != 0 122 | return 2*a^2/(Θ^3)*exp(-a^2/(Θ^2)) 123 | else 124 | return 0 125 | end 126 | end 127 | 128 | #Laplace Kernel 129 | function laplace(X1::Array{Float64,1},X2::Array{Float64,1},Θ) 130 | if X1 == X2 131 | return 1 132 | end 133 | exp(-Θ*norm(X1-X2,2)) 134 | end 135 | 136 | function deriv_laplace(X1::Array{Float64,1},X2::Array{Float64,1},Θ) 137 | a = norm(X1-X2,2) 138 | -a*exp(-Θ*a) 139 | end 140 | 141 | #Abel Kernel 142 | function abel(X1::Array{Float64,1},X2::Array{Float64,1},Θ) 143 | if X1==X2 144 | return 1 145 | end 146 | exp(-Θ*norm(X1-X2,1)) 147 | end 148 | 149 | function deriv_abel(X1::Array{Float64,1},X2::Array{Float64,1},Θ) 150 | a = norm(X1-X2,1) 151 | -a*exp(-Θ*a) 152 | end 153 | 154 | function imk(X1::Array{Float64,1},X2::Array{Float64,1},Θ) 155 | 1/(sqrt(norm(X1-X2)^2+Θ)) 156 | end 157 | 158 | function deriv_imk(X1::Array{Float64,1},X2::Array{Float64,1},Θ) 159 | -1/(2*(sqrt(norm(X1-X2)+Θ))^3) 160 | end 161 | 162 | #Linear Kernel 163 | function linear(X1::Array{Float64,1},X2::Array{Float64,1}) 164 | dot(X1,X2) 165 | end 166 | 167 | #Quadratic Kernel (special case of polynomial kernel) 168 | function quadratic(X1::Array{Float64,1},X2::Array{Float64,1},Θ) 169 | (dot(X1,X2)+Θ)^2 170 | end 171 | 172 | function deriv_quadratic(X1::Array{Float64,1},X2::Array{Float64,1},Θ) 173 | 2*(dot(X1,X2)+Θ) 174 | end 175 | 176 | end #end of module 177 | -------------------------------------------------------------------------------- /src/paper_experiment_functions.jl: -------------------------------------------------------------------------------- 1 | # Paper_Experiment_Functions.jl 2 | #= ---------------- # 3 | Set of datatype and functions for efficient testing. 4 | # ---------------- =# 5 | 6 | 7 | if !isdefined(:ECM); include("../src/ECM.jl"); end; 8 | if !isdefined(:BayesianSVM); include("../src/BSVM.jl"); end; 9 | 10 | module TestFunctions 11 | 12 | using ScikitLearn 13 | using PyCall 14 | using Distributions 15 | using KernelFunctions 16 | using ECM 17 | using BayesianSVM 18 | @sk_import svm: SVC; 19 | @pyimport GPflow 20 | 21 | export TestingModel 22 | export DefaultParameters, BSVMParameters, GPCParameters, ECMParameters, SVMParameters 23 | export CreateModel, TrainModel, RunTests, ProcessResults, PrintResults, WriteResults 24 | export ComputePrediction, ComputePredictionAccuracy 25 | 26 | #Datatype for containing the model, its results and its parameters 27 | type TestingModel 28 | MethodName::String #Name of the method 29 | DatasetName::String #Name of the dataset 30 | ExperimentType::String #Type of experiment 31 | MethodType::String #Type of method used ("SVM","BSVM","ECM","GPC") 32 | Param::Dict{String,Any} #Some paramters to run the method 33 | Results::Dict{String,Any} #Saved results 34 | Model::Any 35 | TestingModel(methname,dataset,exp,methtype) = new(methname,dataset,exp,methtype,Dict{String,Any}(),Dict{String,Any}()) 36 | TestingModel(methname,dataset,exp,methtype,params) = new(methname,dataset,exp,methtype,params,Dict{String,Any}()) 37 | end 38 | 39 | #Create a default dictionary 40 | function DefaultParameters() 41 | param = Dict{String,Any}() 42 | param["ϵ"]= 1e-8 #Convergence criteria 43 | param["BatchSize"] = 10 #Number of points used for stochasticity 44 | param["Kernel"] = "rbf" # Kernel function 45 | param["Θ"] = 1.0 # Hyperparameter for the kernel function 46 | param["γ"] = 1.0 #Variance of introduced noise 47 | param["M"] = 32 #Number of inducing points 48 | param["Window"] = 5 #Number of points used to check convergence (smoothing effect) 49 | param["Verbose"] = 0 #Verbose 50 | return param 51 | end 52 | 53 | #Create a default parameters dictionary for BSVM 54 | function BSVMParameters(;Stochastic=true,NonLinear=true,Sparse=true,ALR=true,Autotuning=false,main_param=DefaultParameters()) 55 | param = Dict{String,Any}() 56 | param["Stochastic"] = Stochastic #Is the method stochastic 57 | param["Sparse"] = Sparse #Is the method using inducing points 58 | param["NonLinear"] = NonLinear #Is the method using kernels 59 | param["ALR"] = ALR #Is the method using adpative learning rate (in case of the stochastic case) 60 | param["AutoTuning"] = Autotuning #Is hyperoptimization performed 61 | param["ATFrequency"] = 10 #How even autotuning is performed 62 | param["κ"] = 1.0; param["τ"] = 100; #Parameters for learning rate of autotuning 63 | param["κ_s"] = 1.0; param["τ_s"] = 100; #Parameters for learning rate of Stochastic gradient descent when ALR is not used 64 | param["ϵ"] = main_param["ϵ"]; param["Window"] = main_param["Window"]; #Convergence criteria (checking parameters norm variation on a window) 65 | param["Kernels"] = [Kernel(main_param["Kernel"],1.0,params=main_param["Θ"])] #Kernel creation (standardized for now) 66 | param["Verbose"] = main_param["Verbose"] ? 2 : 0 #Verbose 67 | param["BatchSize"] = main_param["BatchSize"] #Number of points used for stochasticity 68 | param["M"] = main_param["M"] #Number of inducing points 69 | param["γ"] = main_param["γ"] #Variance of introduced noise 70 | return param 71 | end 72 | 73 | #Create a default parameters dictionary for GPC (similar to BSVM) 74 | function GPCParameters(;Sparse=true,Stochastic=false,main_param=DefaultParameters()) 75 | param = Dict{String,Any}() 76 | param["Sparse"] = Sparse 77 | if Sparse 78 | param["Stochastic"] = Stochastic 79 | else 80 | param["Stochastic"] = false 81 | end 82 | param["ϵ"] = main_param["ϵ"] 83 | param["Kernel"] = GPflow.kernels[:Add]([GPflow.kernels[:RBF](main_param["nFeatures"]),GPflow.kernels[:White](input_dim=main_param["nFeatures"],variance=main_param["γ"])]) 84 | param["BatchSize"] = main_param["BatchSize"] 85 | param["M"] = main_param["M"] 86 | return param 87 | end 88 | 89 | #Create a default parameters dictionary for ECM (similar to BSVM) 90 | function ECMParameters(;main_param=DefaultParameters()) 91 | param = Dict{String,Any}() 92 | param["ϵ"] = main_param["ϵ"] 93 | param["Kernel"] = Kernel(main_param["Kernel"],1.0,params=main_param["Θ"]) 94 | param["γ"] = main_param["γ"] 95 | param["Verbose"] = main_param["Verbose"] 96 | return param 97 | end 98 | 99 | 100 | #Create a default parameters dictionary for SVM (similar to BSVM) 101 | function SVMParameters(;probability = true,main_param=DefaultParameters()) 102 | param = Dict{String,Any}() 103 | param["probability"] = probability 104 | param["ϵ"] = main_param["ϵ"] 105 | param["Kernel"] = main_param["Kernel"] 106 | param["gamma"] = 1/(main_param["Θ"]^2) 107 | param["C"] = 2*main_param["γ"] 108 | param["Verbose"] = main_param["Verbose"] 109 | return param 110 | end 111 | 112 | #Create a model given the parameters passed in p 113 | function CreateModel(tm::TestingModel,X,y) #tm testing_model, p parameters 114 | if tm.MethodType == "BSVM" 115 | tm.Model = BSVM(tm.Param["Stochastic"],batchSize=tm.Param["BatchSize"],Sparse=tm.Param["Sparse"],m=tm.Param["M"],NonLinear=tm.Param["NonLinear"], 116 | kernels=tm.Param["Kernels"],Autotuning=tm.Param["AutoTuning"],autotuningfrequency=tm.Param["ATFrequency"],AdaptativeLearningRate=tm.Param["ALR"],κ_s=tm.Param["κ_s"],τ_s = tm.Param["τ_s"],ϵ=tm.Param["ϵ"],γ=tm.Param["γ"], 117 | κ_Θ=tm.Param["κ"],τ_Θ=tm.Param["τ"],smoothingWindow=tm.Param["Window"],VerboseLevel=tm.Param["Verbose"]) 118 | elseif tm.MethodType == "GPC" 119 | if tm.Param["Sparse"] 120 | if tm.Param["Stochastic"] 121 | #Stochastic Sparse GPC model 122 | GPflow.svgp[:SVGP](X, reshape((y+1)./2,(length(y),1)),kern=tm.Param["Kernel"], likelihood=GPflow.likelihoods[:Bernoulli](), Z=KMeansInducingPoints(X,tm.Param["M"],10), minibatch_size=tm.Param["BatchSize"]) 123 | else 124 | #Sparse GPC model 125 | GPflow.svgp[:SVGP](X, reshape((y+1)./2,(size(y,1),1)),kern=tm.Param["Kernel"],likelihood=GPflow.likelihoods[:Bernoulli](), Z=KMeansInducingPoints(X,tm.Param["M"],10)) 126 | end 127 | else 128 | #Basic GPC model 129 | tm.Model = GPflow.vgp[:VGP](X, reshape((y+1)./2,(size(y,1),1)),kern=tm.Param["Kernel"],likelihood=GPflow.likelihoods[:Bernoulli]()) 130 | end 131 | elseif tm.MethodType == "SVM" 132 | tm.Model = SVC(C=tm.Param["C"],gamma=tm.Param["gamma"], kernel=tm.Param["Kernel"],probability=tm.Param["probability"],tol=tm.Param["ϵ"],verbose=tm.Param["Verbose"]) 133 | end 134 | end 135 | 136 | #Train the model on trainin set (X,y) for #iterations 137 | function TrainModel(tm::TestingModel,X,y,iterations) 138 | time_training = 0; 139 | if tm.MethodType == "BSVM" 140 | tm.Model.nEpochs = iterations 141 | time_training = @elapsed TrainBSVM(tm.Model,X,y) 142 | elseif tm.MethodType == "GPC" 143 | time_training = @elapsed tm.Model[:optimize](maxiter=iterations) 144 | elseif tm.MethodType == "SVM" 145 | tm.Model[:max_iter] = iterations 146 | time_training = @elapsed tm.Model[:fit](X,y) 147 | elseif tm.MethodType == "ECM" 148 | time_training = @elapsed tm.Model = ECMTraining(X,y,γ=tm.Param["γ"],nepochs=iterations,ϵ=tm.Param["ϵ"],kernel=tm.Param["Kernel"],verbose=tm.Param["Verbose"]) 149 | end 150 | return time_training; 151 | end 152 | 153 | #Run tests accordingly to the arguments and save them 154 | function RunTests(tm::TestingModel,X,X_test,y_test;accuracy::Bool=false,brierscore::Bool=false,logscore::Bool=false) 155 | if accuracy 156 | push!(tm.Results["accuracy"],TestAccuracy(y_test,ComputePrediction(tm,X,X_test))) 157 | end 158 | y_predic_acc = 0 159 | if brierscore 160 | y_predic_acc = ComputePredictionAccuracy(tm::TestingModel, X, X_test) 161 | push!(tm.Results["brierscore"],TestBrierScore(y_test,y_predic_acc)) 162 | end 163 | if logscore 164 | if y_predic_acc == 0 165 | y_predic_acc = ComputePredictionAccuracy(tm::TestingModel, X, X_test) 166 | end 167 | push!(tm.Results["logscore"],TestLogScore(y_test,y_predic_acc)) 168 | end 169 | end 170 | 171 | 172 | #Compute the mean and the standard deviation and assemble in one result 173 | function ProcessResults(tm::TestingModel,writing_order) 174 | all_results = Array{Float64,1}() 175 | names = Array{String,1}() 176 | for name in writing_order 177 | result = [mean(tm.Results[name]), std(tm.Results[name])] 178 | all_results = vcat(all_results,result) 179 | names = vcat(names,name) 180 | end 181 | if haskey(tm.Results,"allresults") 182 | tm.Results["allresults"] = vcat(tm.Results["allresults"],all_results') 183 | else 184 | tm.Results["allresults"] = all_results' 185 | end 186 | if !haskey(tm.Results,"names") 187 | tm.Results["names"] = names 188 | end 189 | end 190 | 191 | function PrintResults(results,method_name,writing_order) 192 | println("Model $(method_name) : ") 193 | i = 1 194 | for category in writing_order 195 | println("$category : $(results[i*2-1]) ± $(results[i*2])") 196 | i+=1 197 | end 198 | end 199 | 200 | function WriteResults(tm::TestingModel,location) 201 | fold = String(location*"/"*tm.ExperimentType*"Experiment_"*tm.DatasetName*"Dataset") 202 | if !isdir(fold); mkdir(fold); end; 203 | writedlm(String(fold*"/Results_"*tm.MethodName*".txt"),tm.Results["allresults"]) 204 | end 205 | 206 | #Return predicted labels (-1,1) for test set X_test 207 | function ComputePrediction(tm::TestingModel, X, X_test) 208 | y_predic = [] 209 | if tm.MethodType == "BSVM" 210 | if tm.Model.NonLinear 211 | y_predic = sign(tm.Model.Predict(X,X_test)) 212 | else 213 | y_predic = sign(tm.Model.Predict(X_test)) 214 | end 215 | elseif tm.MethodType == "GPC" 216 | y_predic = sign(tm.Model[:predict_y](X_test)[1]*2-1) 217 | elseif tm.MethodType == "SVM" 218 | y_predic = sign(tm.Model[:predict](X_test)) 219 | elseif tm.MethodType == "ECM" 220 | y_predic = sign(PredicECM(X,tm.Model[4],X_test,tm.Model[1],tm.Model[2],tm.Param["γ"],tm.Model[3])) 221 | end 222 | return y_predic 223 | end 224 | 225 | #Return prediction certainty for class 1 on test set X_test 226 | function ComputePredictionAccuracy(tm::TestingModel, X, X_test) 227 | y_predic = [] 228 | if tm.MethodType == "BSVM" 229 | if tm.Model.NonLinear 230 | y_predic = tm.Model.PredictProba(X,X_test) 231 | else 232 | y_predic = tm.Model.PredictProba(X_test) 233 | end 234 | elseif tm.MethodType == "GPC" 235 | y_predic = tm.Model[:predict_y](X_test)[1] 236 | elseif tm.MethodType == "SVM" 237 | y_predic = tm.Model[:predict_proba](X_test)[:,2] 238 | elseif tm.MethodType == "ECM" 239 | y_predic = PredictProbaECM(X,tm.Model[4],X_test,tm.Model[1],tm.Model[2],tm.Param["γ"],tm.Model[3]) 240 | end 241 | return y_predic 242 | end 243 | 244 | #Return Accuracy on test set 245 | function TestAccuracy(y_test, y_predic) 246 | return 1-sum(1-y_test.*y_predic)/(2*length(y_test)) 247 | end 248 | #Return Brier Score 249 | function TestBrierScore(y_test, y_predic) 250 | return sum(((y_test+1)./2 - y_predic).^2)/length(y_test) 251 | end 252 | #Return Log Score 253 | function TestLogScore(y_test, y_predic) 254 | return sum((y_test+1)./2.*log(y_predic)+(1-(y_test+1)./2).*log(1-y_predic))/length(y_test) 255 | end 256 | #Return ROC 257 | function TestROC(y_test,y_predic) 258 | nt = length(y_test) 259 | truepositive = zeros(npoints); falsepositive = zeros(npoints) 260 | truenegative = zeros(npoints); falsenegative = zeros(npoints) 261 | thresh = collect(linspace(0,1,npoints)) 262 | for i in 1:npoints 263 | for j in 1:nt 264 | truepositive[i] += (yp[j]>=thresh[i] && y_test[j]>=0.9) ? 1 : 0; 265 | truenegative[i] += (yp[j]<=thresh[i] && y_test[j]<=-0.9) ? 1 : 0; 266 | falsepositive[i] += (yp[j]>=thresh[i] && y_test[j]<=-0.9) ? 1 : 0; 267 | falsenegative[i] += (yp[j]<=thresh[i] && y_test[j]>=0.9) ? 1 : 0; 268 | end 269 | end 270 | return (truepositive./(truepositive+falsenegative),falsepositive./(truenegative+falsepositive)) 271 | end 272 | 273 | end #end of module 274 | -------------------------------------------------------------------------------- /src/test_functions.jl: -------------------------------------------------------------------------------- 1 | # Paper_Experiment_Functions.jl 2 | #= ---------------- # 3 | Set of datatype and functions for efficient testing. 4 | # ---------------- =# 5 | 6 | if !isdefined(:BayesianSVM); include("BSVM.jl"); end; 7 | 8 | module TestFunctions 9 | 10 | using BayesianSVM 11 | using ScikitLearn 12 | using Distributions 13 | using KernelFunctions 14 | 15 | export TestingModel 16 | export DefaultParameters, BSVMParameters 17 | export CreateModel, TrainModel, RunTests, ProcessResults, PrintResults, WriteResults 18 | export ComputePrediction, ComputePredictionAccuracy 19 | 20 | #Datatype for containing the model, its results and its parameters 21 | type TestingModel 22 | MethodName::String #Name of the method 23 | DatasetName::String #Name of the dataset 24 | ExperimentType::String #Type of experiment 25 | MethodType::String #Type of method used ("SVM","BSVM","ECM","GPC") 26 | Param::Dict{String,Any} #Some paramters to run the method 27 | Results::Dict{String,Any} #Saved results 28 | Model::Any 29 | TestingModel(methname,dataset,exp,methtype) = new(methname,dataset,exp,methtype,Dict{String,Any}(),Dict{String,Any}()) 30 | TestingModel(methname,dataset,exp,methtype,params) = new(methname,dataset,exp,methtype,params,Dict{String,Any}()) 31 | end 32 | 33 | #Create a default dictionary 34 | function DefaultParameters() 35 | param = Dict{String,Any}() 36 | param["ϵ"]= 1e-8 #Convergence criteria 37 | param["BatchSize"] = 10 #Number of points used for stochasticity 38 | param["Kernel"] = "rbf" # Kernel function 39 | param["Θ"] = 1.0 # Hyperparameter for the kernel function 40 | param["γ"] = 1.0 #Variance of introduced noise 41 | param["M"] = 32 #Number of inducing points 42 | param["Window"] = 5 #Number of points used to check convergence (smoothing effect) 43 | param["Verbose"] = 0 #Verbose 44 | return param 45 | end 46 | 47 | #Create a default parameters dictionary for BSVM 48 | function BSVMParameters(;Stochastic=true,NonLinear=true,Sparse=true,ALR=true,Autotuning=false,main_param=DefaultParameters()) 49 | param = Dict{String,Any}() 50 | param["Stochastic"] = Stochastic #Is the method stochastic 51 | param["Sparse"] = Sparse #Is the method using inducing points 52 | param["NonLinear"] = NonLinear #Is the method using kernels 53 | param["ALR"] = ALR #Is the method using adpative learning rate (in case of the stochastic case) 54 | param["AutoTuning"] = Autotuning #Is hyperoptimization performed 55 | param["ATFrequency"] = 10 #How even autotuning is performed 56 | param["κ"] = 1.0; param["τ"] = 100; #Parameters for learning rate of autotuning 57 | param["κ_s"] = 1.0; param["τ_s"] = 100; #Parameters for learning rate of Stochastic gradient descent when ALR is not used 58 | param["ϵ"] = main_param["ϵ"]; param["Window"] = main_param["Window"]; #Convergence criteria (checking parameters norm variation on a window) 59 | param["Kernels"] = [Kernel(main_param["Kernel"],1.0,params=main_param["Θ"])] #Kernel creation (standardized for now) 60 | param["Verbose"] = main_param["Verbose"] ? 2 : 0 #Verbose 61 | param["BatchSize"] = main_param["BatchSize"] #Number of points used for stochasticity 62 | param["M"] = main_param["M"] #Number of inducing points 63 | param["γ"] = main_param["γ"] #Variance of introduced noise 64 | return param 65 | end 66 | 67 | #Create a model given the parameters passed in p 68 | function CreateModel(tm::TestingModel,X,y) #tm testing_model, p parameters 69 | tm.Model = BSVM(Stochastic=tm.Param["Stochastic"],batchSize=tm.Param["BatchSize"],Sparse=tm.Param["Sparse"],m=tm.Param["M"],NonLinear=tm.Param["NonLinear"], 70 | kernels=tm.Param["Kernels"],Autotuning=tm.Param["AutoTuning"],autotuningfrequency=tm.Param["ATFrequency"],AdaptativeLearningRate=tm.Param["ALR"],κ_s=tm.Param["κ_s"],τ_s = tm.Param["τ_s"],ϵ=tm.Param["ϵ"],γ=tm.Param["γ"], 71 | κ_Θ=tm.Param["κ"],τ_Θ=tm.Param["τ"],smoothingWindow=tm.Param["Window"],VerboseLevel=tm.Param["Verbose"]) 72 | end 73 | 74 | #Train the model on trainin set (X,y) for #iterations 75 | function TrainModel(tm::TestingModel,X,y,iterations) 76 | time_training = 0; 77 | tm.Model.nEpochs = iterations 78 | time_training = @elapsed TrainBSVM(tm.Model,X,y) 79 | return time_training; 80 | end 81 | 82 | #Run tests accordingly to the arguments and save them 83 | function RunTests(tm::TestingModel,X,X_test,y_test;accuracy::Bool=false,brierscore::Bool=false,logscore::Bool=false) 84 | if accuracy 85 | push!(tm.Results["accuracy"],TestAccuracy(y_test,ComputePrediction(tm,X,X_test))) 86 | end 87 | y_predic_acc = 0 88 | if brierscore 89 | y_predic_acc = ComputePredictionAccuracy(tm::TestingModel, X, X_test) 90 | push!(tm.Results["brierscore"],TestBrierScore(y_test,y_predic_acc)) 91 | end 92 | if logscore 93 | if y_predic_acc == 0 94 | y_predic_acc = ComputePredictionAccuracy(tm::TestingModel, X, X_test) 95 | end 96 | push!(tm.Results["logscore"],TestLogScore(y_test,y_predic_acc)) 97 | end 98 | end 99 | 100 | 101 | #Compute the mean and the standard deviation and assemble in one result 102 | function ProcessResults(tm::TestingModel,writing_order) 103 | all_results = Array{Float64,1}() 104 | names = Array{String,1}() 105 | for name in writing_order 106 | result = [mean(tm.Results[name]), std(tm.Results[name])] 107 | all_results = vcat(all_results,result) 108 | names = vcat(names,name) 109 | end 110 | if haskey(tm.Results,"allresults") 111 | tm.Results["allresults"] = vcat(tm.Results["allresults"],all_results') 112 | else 113 | tm.Results["allresults"] = all_results' 114 | end 115 | if !haskey(tm.Results,"names") 116 | tm.Results["names"] = names 117 | end 118 | end 119 | 120 | function PrintResults(results,method_name,writing_order) 121 | i = 1 122 | for category in writing_order 123 | println("$category : $(results[i*2-1]) ± $(results[i*2])") 124 | i+=1 125 | end 126 | end 127 | 128 | function WriteResults(tm::TestingModel,location) 129 | fold = String(location*"/"*tm.ExperimentType*"Experiment_"*tm.DatasetName*"Dataset") 130 | if !isdir(fold); mkdir(fold); end; 131 | writedlm(String(fold*"/Results_"*tm.MethodName*".txt"),tm.Results["allresults"]) 132 | end 133 | 134 | #Return predicted labels (-1,1) for test set X_test 135 | function ComputePrediction(tm::TestingModel, X, X_test) 136 | y_predic = [] 137 | if tm.Model.NonLinear 138 | y_predic = sign(tm.Model.Predict(X,X_test)) 139 | else 140 | y_predic = sign(tm.Model.Predict(X_test)) 141 | end 142 | return y_predic 143 | end 144 | 145 | #Return prediction certainty for class 1 on test set X_test 146 | function ComputePredictionAccuracy(tm::TestingModel, X, X_test) 147 | y_predic = [] 148 | if tm.Model.NonLinear 149 | y_predic = tm.Model.PredictProba(X,X_test) 150 | else 151 | y_predic = tm.Model.PredictProba(X_test) 152 | end 153 | return y_predic 154 | end 155 | 156 | #Return Accuracy on test set 157 | function TestAccuracy(y_test, y_predic) 158 | return 1-sum(1-y_test.*y_predic)/(2*length(y_test)) 159 | end 160 | #Return Brier Score 161 | function TestBrierScore(y_test, y_predic) 162 | return sum(((y_test+1)./2 - y_predic).^2)/length(y_test) 163 | end 164 | #Return Log Score 165 | function TestLogScore(y_test, y_predic) 166 | return sum((y_test+1)./2.*log(y_predic)+(1-(y_test+1)./2).*log(1-y_predic))/length(y_test) 167 | end 168 | #Return ROC 169 | function TestROC(y_test,y_predic) 170 | nt = length(y_test) 171 | truepositive = zeros(npoints); falsepositive = zeros(npoints) 172 | truenegative = zeros(npoints); falsenegative = zeros(npoints) 173 | thresh = collect(linspace(0,1,npoints)) 174 | for i in 1:npoints 175 | for j in 1:nt 176 | truepositive[i] += (yp[j]>=thresh[i] && y_test[j]>=0.9) ? 1 : 0; 177 | truenegative[i] += (yp[j]<=thresh[i] && y_test[j]<=-0.9) ? 1 : 0; 178 | falsepositive[i] += (yp[j]>=thresh[i] && y_test[j]<=-0.9) ? 1 : 0; 179 | falsenegative[i] += (yp[j]<=thresh[i] && y_test[j]>=0.9) ? 1 : 0; 180 | end 181 | end 182 | return (truepositive./(truepositive+falsenegative),falsepositive./(truenegative+falsepositive)) 183 | end 184 | 185 | end #end of module 186 | -------------------------------------------------------------------------------- /tests/paper_experiments.jl: -------------------------------------------------------------------------------- 1 | #### Paper_Experiment_Predictions #### 2 | # Run on a file and compute accuracy on a nFold cross validation 3 | # Compute also the brier score and the logscore 4 | 5 | if !isdefined(:DataAccess); include("../src/DataAccess.jl"); end; 6 | if !isdefined(:TestFunctions); include("../src/paper_experiment_functions.jl");end; 7 | using TestFunctions 8 | using PyPlot 9 | using DataAccess 10 | #Compare Platt, B-BSVM, ECM and GPC 11 | 12 | #Methods and scores to test 13 | doBBSVM = false 14 | doSBSVM = false 15 | doPlatt = true 16 | doGPC = false 17 | doECM = true 18 | 19 | doTime = true #Return time needed for training 20 | doAccuracy = true #Return Accuracy 21 | doBrierScore = true # Return BrierScore 22 | doLogScore = false #Return LogScore 23 | 24 | doWrite = false #Write results in approprate folder 25 | ShowIntResults = false #Show intermediate time, and results for each fold 26 | #Testing Parameters 27 | #= Datasets available are get_X : 28 | Ionosphere,Sonar,Crabs,USPS, Banana, Image, RingNorm 29 | BreastCancer, Titanic, Splice, Diabetis, Thyroid, Heart, Waveform, Flare 30 | =# 31 | (X_data,y_data,DatasetName) = get_BreastCancer() 32 | MaxIter = 100 #Maximum number of iterations for every algorithm 33 | (nSamples,nFeatures) = size(X_data); 34 | nFold = 10; #Chose the number of folds 35 | fold_separation = collect(1:nSamples÷nFold:nSamples+1) #Separate the data in nFold 36 | 37 | 38 | #Main Parameters 39 | main_param = DefaultParameters() 40 | main_param["nFeatures"] = nFeatures 41 | main_param["nSamples"] = nSamples 42 | main_param["ϵ"] = 1e-5 #Convergence criterium 43 | main_param["M"] = min(100,floor(Int64,0.2*nSamples)) 44 | main_param["Kernel"] = "rbf" 45 | main_param["Θ"] = 5.0 #Hyperparameter of the kernel 46 | main_param["BatchSize"] = 10 47 | main_param["Verbose"] = false 48 | main_param["Window"] = 30 49 | #BSVM and GPC Parameters 50 | BBSVMParam = BSVMParameters(Stochastic=false,Sparse=false,ALR=false,main_param=main_param) 51 | SBSVMParam = BSVMParameters(Stochastic=true,Sparse=true,ALR=false,main_param=main_param) 52 | GPCParam = GPCParameters(Stochastic=false,Sparse=false,main_param=main_param) 53 | ECMParam = ECMParameters(main_param=main_param) 54 | SVMParam = SVMParameters(main_param=main_param) 55 | 56 | #Global variables for debugging 57 | X = []; y = []; X_test = []; y_test = []; 58 | 59 | #Set of all models 60 | TestModels = Dict{String,TestingModel}() 61 | 62 | if doBBSVM; TestModels["BBSVM"] = TestingModel("BBSVM",DatasetName,"Prediction","BSVM",BBSVMParam); end; 63 | if doSBSVM; TestModels["SBSVM"] = TestingModel("SBSVM",DatasetName,"Prediction","BSVM",SBSVMParam); end; 64 | if doPlatt; TestModels["Platt"] = TestingModel("SVM",DatasetName,"Prediction","SVM",SVMParam); end; 65 | if doGPC; TestModels["GPC"] = TestingModel("GPC",DatasetName,"Prediction","GPC",GPCParam); end; 66 | if doECM; TestModels["ECM"] = TestingModel("ECM",DatasetName,"Prediction","ECM",ECMParam); end; 67 | 68 | writing_order = Array{String,1}(); if doTime; push!(writing_order,"time"); end; 69 | if doAccuracy; push!(writing_order,"accuracy"); end; if doBrierScore; push!(writing_order,"brierscore"); end; 70 | if doLogScore; push!(writing_order,"logscore"); end; 71 | 72 | #conv_BSVM = falses(nFold); conv_SBSVM = falses(nFold); conv_SSBSVM = falses(nFold); conv_GPC = falses(nFold); conv_SGPC = falses(nFold); conv_SSGPC = falses(nFold); conv_EM = falses(nFold); conv_FITCEM = falses(nFold); conv_SVM = falses(nFold) 73 | for (name,testmodel) in TestModels 74 | println("Running $(testmodel.MethodName) on $(testmodel.DatasetName) dataset") 75 | #Initialize the results storage 76 | if doTime; testmodel.Results["time"] = Array{Float64,1}();end; 77 | if doAccuracy; testmodel.Results["accuracy"] = Array{Float64,1}();end; 78 | if doBrierScore; testmodel.Results["brierscore"] = Array{Float64,1}();end; 79 | if doLogScore; testmodel.Results["logscore"] = Array{Float64,1}();end; 80 | for i in 1:nFold #Run over all folds of the data 81 | if ShowIntResults 82 | println("#### Fold number $i/$nFold ###") 83 | end 84 | X_test = X_data[fold_separation[i]:(fold_separation[i+1])-1,:] 85 | y_test = y_data[fold_separation[i]:(fold_separation[i+1])-1] 86 | X = X_data[vcat(collect(1:fold_separation[i]-1),collect(fold_separation[i+1]:nSamples)),:] 87 | y = y_data[vcat(collect(1:fold_separation[i]-1),collect(fold_separation[i+1]:nSamples))] 88 | CreateModel(testmodel,X,y) 89 | time = TrainModel(testmodel,X,y,MaxIter) 90 | if ShowIntResults 91 | println("$(testmodel.MethodName) : Time = $time") 92 | end 93 | if doTime; push!(testmodel.Results["time"],time); end; 94 | RunTests(testmodel,X,X_test,y_test,accuracy=doAccuracy,brierscore=doBrierScore,logscore=doLogScore) 95 | end 96 | ProcessResults(testmodel,writing_order) #Compute mean and std deviation 97 | PrintResults(testmodel.Results["allresults"],testmodel.MethodName,writing_order) #Print the Results in the end 98 | if doWrite 99 | top_fold = "data"; 100 | if !isdir(top_fold); mkdir(top_fold); end; 101 | WriteResults(testmodel,top_fold) #Write the results in an adapted format into a folder 102 | end 103 | end 104 | -------------------------------------------------------------------------------- /tests/run_test.jl: -------------------------------------------------------------------------------- 1 | #### Paper_Experiment_Predictions #### 2 | # Run on a file and compute accuracy on a nFold cross validation 3 | # Compute also the brier score and the logscore 4 | 5 | if !isdefined(:DataAccess); include("../src/DataAccess.jl"); end; 6 | if !isdefined(:TestFunctions); include("../src/test_functions.jl");end; 7 | using TestFunctions 8 | using DataAccess 9 | 10 | ####### Data and Training Parameters ####### 11 | ### Setting the Dataset ### 12 | #= Datasets available with get_X , replace X with : 13 | Ionosphere,Sonar,Crabs,USPS, Banana, Image, RingNorm 14 | BreastCancer, Titanic, Splice, Diabetis, Thyroid, Heart, Waveform, Flare (a package will be soon created to deal with user's datasets) 15 | =# 16 | (X_data,y_data,DatasetName) = get_USPS() 17 | (nSamples,nFeatures) = size(X_data); 18 | nFold = 10; #Chose the number of folds 19 | fold_separation = collect(1:nSamples÷nFold:nSamples+1) #Separate the data in nFold 20 | 21 | ### Setting results output ### 22 | doTime = true #Compute time needed for training 23 | doAccuracy = true #Compute Accuracy 24 | doBrierScore = true #Compute BrierScore 25 | doLogScore = false #Compute LogScore 26 | doWrite = false #Write results in appropriate folder 27 | ShowIntResults = false #Show intermediate time, and results for each fold 28 | 29 | 30 | ### Training Parameters and Hyperparameters### 31 | MaxIter = 100 #Maximum number of iterations for every algorithm 32 | main_param = DefaultParameters(); main_param["nFeatures"] = nFeatures; main_param["nSamples"] = nSamples 33 | main_param["ϵ"] = 1e-5 #Convergence criterium; 34 | main_param["M"] = min(100,floor(Int64,0.2*nSamples));main_param["Kernel"] = "rbf" 35 | main_param["Θ"] = 5.0 #Hyperparameter of the kernel 36 | main_param["BatchSize"] = 10;main_param["Verbose"] = false;main_param["Window"] = 30 37 | 38 | #### Creating the Model #### 39 | #BSVM Parameters (Stochastic is for the Stochastic version, Sparse is with induing points and ALR is adaptative learning rate) 40 | SBSVMParam = BSVMParameters(Stochastic=true,Sparse=true,ALR=false,main_param=main_param) 41 | Model = TestingModel("SBSVM",DatasetName,"Prediction","BSVM",SBSVMParam) 42 | 43 | writing_order = Array{String,1}(); if doTime; push!(writing_order,"time"); end; 44 | if doAccuracy; push!(writing_order,"accuracy"); end; if doBrierScore; push!(writing_order,"brierscore"); end; 45 | if doLogScore; push!(writing_order,"logscore"); end; 46 | #Initialize the results storage 47 | if doTime; Model.Results["time"] = Array{Float64,1}();end; if doAccuracy; Model.Results["accuracy"] = Array{Float64,1}();end; 48 | if doBrierScore; Model.Results["brierscore"] = Array{Float64,1}();end; if doLogScore; Model.Results["logscore"] = Array{Float64,1}();end; 49 | 50 | 51 | 52 | ###### Training Model ###### 53 | println("Running BSVM on $(Model.DatasetName) dataset") 54 | for i in 1:nFold #Run over all folds of the data 55 | if ShowIntResults 56 | println("#### Fold number $i/$nFold ###") 57 | end 58 | #Separating Data 59 | X_test = X_data[fold_separation[i]:(fold_separation[i+1])-1,:] 60 | y_test = y_data[fold_separation[i]:(fold_separation[i+1])-1] 61 | X = X_data[vcat(collect(1:fold_separation[i]-1),collect(fold_separation[i+1]:nSamples)),:] 62 | y = y_data[vcat(collect(1:fold_separation[i]-1),collect(fold_separation[i+1]:nSamples))] 63 | #Add Data to the Model 64 | CreateModel(Model,X,y) 65 | #Train the Model 66 | time = TrainModel(Model,X,y,MaxIter) 67 | if ShowIntResults 68 | println("$(Model.MethodName) : Time = $time") 69 | end 70 | if doTime; push!(Model.Results["time"],time); end; 71 | #Run the tests on the last fold of data 72 | RunTests(Model,X,X_test,y_test,accuracy=doAccuracy,brierscore=doBrierScore,logscore=doLogScore) 73 | end 74 | 75 | ##### Process the results from the k-fold #### 76 | ProcessResults(Model,writing_order) #Compute mean and std deviation 77 | PrintResults(Model.Results["allresults"],Model.MethodName,writing_order) #Print the Results in the end 78 | if doWrite 79 | top_fold = "data"; 80 | if !isdir(top_fold); mkdir(top_fold); end; 81 | WriteResults(Model,top_fold) #Write the results in an adapted format into a folder 82 | end 83 | --------------------------------------------------------------------------------