├── MANIFEST.in ├── .coveragerc ├── .landscape.yaml ├── _config.yml ├── few ├── _version.py ├── lib │ ├── Makefile │ ├── few_lib.pyx │ ├── evaluation.h │ └── epsilon_lexicase.h ├── __init__.py ├── tests │ ├── __init__.py │ ├── test_variation.py │ ├── test_population.py │ ├── test_selection.py │ ├── test_few.py │ └── test_evaluation.py ├── selection.py ├── variation.py ├── population.py └── evaluation.py ├── .gitignore ├── .travis.yml ├── docs ├── few_example.py └── data │ ├── d_yacht.txt │ ├── d_enc.txt │ └── d_housing.txt ├── ci ├── .travis_test.sh └── .travis_install.sh ├── README.md ├── setup.py └── LICENSE /MANIFEST.in: -------------------------------------------------------------------------------- 1 | global-include *.pyx 2 | global-include *.h 3 | -------------------------------------------------------------------------------- /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | branch = True 3 | source = few 4 | include = few/*.py 5 | -------------------------------------------------------------------------------- /.landscape.yaml: -------------------------------------------------------------------------------- 1 | doc-warnings: yes 2 | 3 | ignore-patterns: 4 | - __init__.py 5 | -------------------------------------------------------------------------------- /_config.yml: -------------------------------------------------------------------------------- 1 | theme: jekyll-theme-minimal 2 | title: FEW 3 | description: A general feature engineering wrapper for sklearn estimators 4 | -------------------------------------------------------------------------------- /few/_version.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Copyright 2016 William La Cava 4 | 5 | license: GNU/GPLv3 6 | 7 | """ 8 | 9 | __version__ = '0.0.51' 10 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.pyc 2 | *.pyo 3 | __pycache__/ 4 | FEW.egg-info/ 5 | *.cfg 6 | *.txt 7 | dist/ 8 | analysis/ 9 | *.dropbox 10 | *.attr 11 | *.ipynb 12 | *.so 13 | *.o 14 | *.cpp 15 | -------------------------------------------------------------------------------- /few/lib/Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: all clean 2 | 3 | all: 4 | g++ -fPIC -shared -O3 -I /usr/include/eigen3 -I /usr/include/python3.5 -std=c++0x epsilon_lexicase.cpp -lpython3.5m -o few_lib.so 5 | # g++ -std=c++0x -shared epsilon_lexicase.o -lpython3.5m -o few_lib.so 6 | clean: 7 | rm few_lib.so 8 | rm epsilon_lexicase.o 9 | -------------------------------------------------------------------------------- /few/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | Copyright 2016 William La Cava 5 | 6 | This file is part of the FEW library. 7 | 8 | License: GPLv3 9 | """ 10 | 11 | from ._version import __version__ 12 | from .few import FEW, main 13 | # from .few import evaluation 14 | # from .few import selection 15 | # from .few import variation 16 | -------------------------------------------------------------------------------- /few/tests/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2016 William La Cava 3 | 4 | This file is part of the FEW library. 5 | 6 | License: GPL 3 7 | """ 8 | from .test_variation import test_cross_makes_valid_program, test_mutate_makes_valid_program 9 | from .test_population import test_pop_shape, test_pop_init 10 | from .test_selection import * 11 | from .test_evaluation import test_out_shapes, test_out_is_correct, test_calc_fitness_shape, test_inertia, test_separation 12 | from .test_few import test_few_fit_shapes, test_few_at_least_as_good_as_default, test_few_classification 13 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | virtualenv: 3 | system_site_packages: true 4 | env: 5 | matrix: 6 | # let's start simple: 7 | # - PYTHON_VERSION="2.7" LATEST="true" "XGBOOST_VERSION=0.4a30" 8 | # - PYTHON_VERSION="3.4" LATEST="true" "XGBOOST_VERSION=0.4a30" 9 | - PYTHON_VERSION="3.5" COVERAGE="true" LATEST="true" 10 | # - PYTHON_VERSION="3.5" LATEST="true" "XGBOOST_VERSION=0.4a30" 11 | install: source ./ci/.travis_install.sh 12 | script: bash ./ci/.travis_test.sh 13 | after_success: 14 | # Ignore coveralls failures as the coveralls server is not very reliable 15 | # but we don't want travis to report a failure in the github UI just 16 | # because the coverage report failed to be published. 17 | - if [[ "$COVERAGE" == "true" ]]; then coveralls || echo "failed"; fi 18 | cache: apt 19 | sudo: required 20 | -------------------------------------------------------------------------------- /few/lib/few_lib.pyx: -------------------------------------------------------------------------------- 1 | # distutils: language=c++ 2 | from eigency.core cimport * 3 | cimport numpy as np 4 | from libcpp.vector cimport vector 5 | from libcpp cimport bool 6 | 7 | cdef extern from "epsilon_lexicase.h": 8 | cdef void _epsilon_lexicase "epsilon_lexicase"(Map[ArrayXXd] & F, int n, 9 | int d, int num_selections, 10 | Map[ArrayXi] & locs, bool lex_size, 11 | Map[ArrayXi] &sizes) 12 | 13 | # This will be exposed to Python 14 | def ep_lex(np.ndarray F, int n, int d, int num_selections, np.ndarray locs, bool lex_size, 15 | np.ndarray sizes): 16 | return _epsilon_lexicase(Map[ArrayXXd](F), n, d, num_selections, 17 | Map[ArrayXi](locs), lex_size, Map[ArrayXi](sizes)) 18 | -------------------------------------------------------------------------------- /docs/few_example.py: -------------------------------------------------------------------------------- 1 | from few import FEW 2 | import pandas as pd 3 | from sklearn.model_selection import train_test_split 4 | 5 | dataset = 'data/d_enc.txt' 6 | 7 | input_data = pd.read_csv(dataset,sep=None,engine='python') 8 | 9 | #generate train/test split 10 | train_i, test_i = train_test_split(input_data.index, train_size=0.75, test_size=0.25, 11 | random_state=10) 12 | 13 | # training data 14 | X_train = input_data.loc[train_i].drop('label', axis=1).values 15 | Y_train = input_data.loc[train_i, 'label'].values 16 | 17 | #testing data 18 | X_test = input_data.loc[test_i].drop('label', axis=1).values 19 | Y_test = input_data.loc[test_i, 'label'].values 20 | 21 | few = FEW(random_state=10,verbosity=1) 22 | few.fit(X_train,Y_train) 23 | 24 | print('\nTraining accuracy: {}'.format(few.score(X_train, Y_train))) 25 | print('Holdout accuracy: {}'.format(few.score(X_test, Y_test))) 26 | print('\Model: {}'.format(few.print_model())) 27 | -------------------------------------------------------------------------------- /ci/.travis_test.sh: -------------------------------------------------------------------------------- 1 | # modified from https://github.com/trevorstephens/gplearn 2 | 3 | # This script is meant to be called by the "install" step defined in 4 | # .travis.yml. See http://docs.travis-ci.com/ for more details. 5 | # The behavior of the script is controlled by environment variabled defined 6 | # in the .travis.yml in the top level folder of the project. 7 | 8 | # License: GNU/GPLv3 9 | 10 | set -e 11 | 12 | python --version 13 | python -c "import numpy; print('numpy %s' % numpy.__version__)" 14 | python -c "import scipy; print('scipy %s' % scipy.__version__)" 15 | python -c "import sklearn; print('sklearn %s' % sklearn.__version__)" 16 | python -c "import pandas; print('pandas %s' % pandas.__version__)" 17 | python -c "import update_checker; print('update_checker %s ' % update_checker.__version__)" 18 | python -c "import tqdm; print('tqdm %s' % tqdm.__version__)" 19 | 20 | if [[ "$COVERAGE" == "true" ]]; then 21 | nosetests -s -v --with-coverage 22 | else 23 | nosetests -s -v 24 | fi 25 | # make test-doc test-sphinxext 26 | -------------------------------------------------------------------------------- /few/lib/evaluation.h: -------------------------------------------------------------------------------- 1 | /* evaluation c++ code 2 | Copyright 2016 William La Cava 3 | 4 | This file is part of the FEW library. 5 | 6 | The FEW library is free software: you can redistribute it and/or 7 | modify it under the terms of the GNU General Public License as published by the 8 | Free Software Foundation, either version 3 of the License, or (at your option) 9 | any later version. 10 | 11 | The FEW library is distributed in the hope that it will be useful, but 12 | WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. 14 | You should have received a copy of the GNU General Public License along with 15 | the FEW library. If not, see http://www.gnu.org/licenses/. 16 | */ 17 | #include 18 | #include "Eigen/Dense" 19 | #include 20 | using namespace Eigen; 21 | using namespace std; 22 | 23 | /* DEFINE Custom Type Names to make code more readable 24 | ExtMat : 2-dim matrix/array externally defined (in Python) 25 | */ 26 | typedef Map ExtMat; 27 | typedef ArrayXXd Mat; 28 | typedef ArrayXd Vec; 29 | 30 | 31 | void evaluate(node n, ExtMat& features, vector stack_float, vector stack_bool) 32 | { 33 | //evalute a program node on a given set of data. 34 | ExtMat F (features, n, d); 35 | 36 | vector stack_float; 37 | vector stack_bool; 38 | for (auto n: program){ 39 | // evaluate program nodes on stack 40 | evaluate(n,F,stack_float,stack_bool); 41 | } 42 | } 43 | 44 | void out(vector program, ExtMat& features, char otype){ 45 | // evaluate program output. 46 | } 47 | -------------------------------------------------------------------------------- /ci/.travis_install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # modified from https://github.com/trevorstephens/gplearn 4 | 5 | # This script is meant to be called by the "install" step defined in 6 | # .travis.yml. See http://docs.travis-ci.com/ for more details. 7 | # The behavior of the script is controlled by environment variabled defined 8 | # in the .travis.yml in the top level folder of the project. 9 | 10 | 11 | # License: GNU/GPLv3 12 | 13 | set -e 14 | 15 | # Fix the compilers to workaround avoid having the Python 3.4 build 16 | # lookup for g++44 unexpectedly. 17 | export CC=gcc 18 | export CXX=g++ 19 | 20 | # Deactivate the travis-provided virtual environment and setup a 21 | # conda-based environment instead 22 | deactivate 23 | 24 | # Use the miniconda installer for faster download / install of conda 25 | # itself 26 | wget http://repo.continuum.io/miniconda/Miniconda-3.9.1-Linux-x86_64.sh \ 27 | -O miniconda.sh 28 | chmod +x miniconda.sh && ./miniconda.sh -b 29 | export PATH=/home/travis/miniconda/bin:$PATH 30 | conda update --yes conda 31 | 32 | # Configure the conda environment and put it in the path using the 33 | # provided versions 34 | if [[ "$LATEST" == "true" ]]; then 35 | conda create -n testenv --yes python=$PYTHON_VERSION pip nose \ 36 | numpy scipy scikit-learn cython pandas 37 | else 38 | conda create -n testenv --yes python=$PYTHON_VERSION pip nose \ 39 | numpy=$NUMPY_VERSION scipy=$SCIPY_VERSION \ 40 | scikit-learn=$SKLEARN_VERSION \ 41 | pandas=$PANDAS_VERSION \ 42 | cython 43 | fi 44 | 45 | source activate testenv 46 | 47 | if [[ "$COVERAGE" == "true" ]]; then 48 | pip install coverage coveralls 49 | fi 50 | 51 | # build output in the travis output when it succeeds. 52 | python --version 53 | 54 | python setup.py install 55 | -------------------------------------------------------------------------------- /few/tests/test_variation.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2016 William La Cava 3 | 4 | This file is part of the FEW library. 5 | 6 | The FEW library is free software: you can redistribute it and/or 7 | modify it under the terms of the GNU General Public License as published by the 8 | Free Software Foundation, either version 3 of the License, or (at your option) 9 | any later version. 10 | 11 | The FEW library is distributed in the hope that it will be useful, but 12 | WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. 14 | You should have received a copy of the GNU General Public License along with 15 | the FEW library. If not, see http://www.gnu.org/licenses/. 16 | 17 | """ 18 | # unit tests for variation methods. 19 | import numpy as np 20 | from few import FEW 21 | from few.tests.test_population import is_valid_program,node 22 | 23 | def test_cross_makes_valid_program(): 24 | """test_variation.py: crossover makes valid programs """ 25 | # np.random.seed(65) 26 | # I = (a+b)*x 27 | p1 = [node('x',loc=1),node('x',loc=2),node('+'),node('x',loc=3),node('*')] 28 | # J = (x/z)-(n*b) 29 | p2 = [node('x',loc=1), node('x',loc=2), node('/'), node('k',value=3.7), node('x',loc=4), node('*'), node('-')] 30 | # test 1000 crossover events 31 | few = FEW() 32 | for i in np.arange(1000): 33 | few.cross(p1,p2) 34 | assert is_valid_program(p1) and is_valid_program(p2) 35 | 36 | def test_mutate_makes_valid_program(): 37 | """test_variation.py: mutation makes valid programs """ 38 | func_set = [node('+'), node('-'), node('*'), node('/'), node('sin'), 39 | node('cos'), node('exp'),node('log'), node('^2'), 40 | node('^3'), node('sqrt')] 41 | # terminal set 42 | term_set = [] 43 | # numbers represent column indices of features 44 | term_set = [node('x',loc=i) for i in np.arange(10)] 45 | term_set += [node('k',value=np.random.rand()) for i in np.arange(10)] 46 | # program 47 | p = [node('k',value=5),node('x',loc=6),node('/'),node('k',value=7),node('x',loc=8),node('*'),node('-')] 48 | # test 1000 mutation events 49 | few = FEW() 50 | for i in np.arange(1000): 51 | few.mutate(p,func_set,term_set) 52 | assert is_valid_program(p) 53 | -------------------------------------------------------------------------------- /few/tests/test_population.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2016 William La Cava 3 | 4 | This file is part of the FEW library. 5 | 6 | The FEW library is free software: you can redistribute it and/or 7 | modify it under the terms of the GNU General Public License as published by the 8 | Free Software Foundation, either version 3 of the License, or (at your option) 9 | any later version. 10 | 11 | The FEW library is distributed in the hope that it will be useful, but 12 | WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. 14 | You should have received a copy of the GNU General Public License along with 15 | the FEW library. If not, see http://www.gnu.org/licenses/. 16 | 17 | """ 18 | import numpy as np 19 | from few import FEW 20 | from few.population import * 21 | from itertools import accumulate 22 | # unit tests for population methods. 23 | def test_pop_shape(): 24 | """test_population.py: population class returns correct sizes """ 25 | # pop = Pop(0) 26 | # assert len(pop) == 0 27 | pop = Pop(10) 28 | assert len(pop.individuals) == 10 29 | 30 | pop = Pop(73) 31 | assert len(pop.individuals) == 73 32 | 33 | pop = Pop(73,5) 34 | assert len(pop.individuals) == 73 35 | 36 | # NOTE: popultation initialization is done in the ellyn class now. this test needs to be rewritten. 37 | def test_pop_init(): 38 | """test_population.py: population initialization makes valid trees """ 39 | # define function set 40 | # function set 41 | # func_set = [('+',2),('-',2),('*',2),('/',2),('sin',1),('cos',1),('exp',1),('log',1)] 42 | # terminal set 43 | term_set = [] 44 | n_features = 3 45 | # numbers represent column indices of features 46 | term_set = [node('x',loc=i) for i in np.arange(n_features)] 47 | # term_set.append(('erc',0,np.random.rand())) # ephemeral random constants 48 | few = FEW(seed_with_ml=False) 49 | few.term_set = term_set 50 | few.n_features=n_features 51 | pop = few.init_pop() 52 | 53 | for I in pop.individuals: 54 | assert is_valid_program(I.stack) 55 | 56 | def is_valid_program(p): 57 | """ checks that the accumulated program length is always greater than the 58 | accumulated arities, indicating that the appropriate number of arguments is 59 | alway present for functions. It then checks that the sum of arties +1 60 | exactly equals the length of the stack, indicating that there are no 61 | missing arguments. """ 62 | # print("p:",p) 63 | arities = list(a.arity[a.in_type] for a in p) 64 | accu_arities = list(accumulate(arities)) 65 | accu_len = list(np.arange(len(p))+1) 66 | check = list(a < b for a,b in zip(accu_arities,accu_len)) 67 | # print("accu_arities:",accu_arities) 68 | # print("accu_len:",accu_len) 69 | # print("accu_arities < accu_len:",accu_arities0 71 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | [![Build Status](https://travis-ci.org/lacava/few.svg?branch=master)](https://travis-ci.org/lacava/few) 2 | [![Code Health](https://landscape.io/github/lacava/few/master/landscape.svg?style=flat)](https://landscape.io/github/lacava/few/master) 3 | [![Coverage Status](https://coveralls.io/repos/github/lacava/few/badge.svg?branch=master)](https://coveralls.io/github/lacava/few?branch=master) 4 | [![DOI](https://zenodo.org/badge/65411376.svg)](https://zenodo.org/badge/latestdoi/65411376) 5 | 6 | # Few 7 | 8 | **Few** is a **Feature Engineering Wrapper** for scikit-learn. Few looks for a set of feature transformations that work best with a specified machine learning algorithm in order to improve model estimation and prediction. In doing so, Few is able to provide the user with a set of concise, engineered features that describe their data. 9 | 10 | Few uses genetic programming to generate, search and update engineered features. It incorporates feedback from the ML process to select important features, while also scoring them internally. 11 | 12 | 13 | ## Install 14 | 15 | You can use pip to install FEW from [PyPi](https://pypi.python.org/pypi/FEW) as: 16 | 17 | ```pip install few``` 18 | 19 | or you can clone the git repo and add it to your Python path. Then from the repo, run 20 | 21 | ```python setup.py install``` 22 | 23 | ### Mac users 24 | 25 | Some Mac users have reported issues when installing with old versions of gcc (like gcc-4.2) because the random.h library is not included (basically [this issue](https://stackoverflow.com/questions/5967065/python-distutils-not-using-correct-version-of-gcc)). I recommend installing gcc-4.8 or greater for use with Few. After updating the compiler, you can reinstall with 26 | 27 | ```python 28 | CC=gcc-4.8 python setupy.py install 29 | ``` 30 | 31 | ## Usage 32 | 33 | Few uses the same nomenclature as [sklearn](http://scikit-learn.org/) supervised learning modules. Here is a simple example script: 34 | 35 | ```python 36 | # import few 37 | from few import FEW 38 | # initialize 39 | learner = FEW(generations=100, population_size=25, ml = LassoLarsCV()) 40 | # fit model 41 | learner.fit(X,y) 42 | # generate prediction 43 | y_pred = learner.predict(X_unseen) 44 | # get feature transformation 45 | Phi = learner.transform(X_unseen) 46 | ``` 47 | 48 | You can also call Few from the terminal as 49 | 50 | ```bash 51 | python -m few.few data_file_name 52 | ``` 53 | 54 | try ```python -m few.few --help``` to see options. 55 | 56 | ## Examples 57 | 58 | Check out [few_example.py](http://github.com/lacava/few/tree/master/docs/few_example.py) to see how to apply FEW to a regression dataset. 59 | 60 | ## Publications 61 | 62 | If you use Few, please reference our publications: 63 | 64 | La Cava, W., and Moore, J.H. A general feature engineering wrapper for machine learning using epsilon-lexicase survival. *Proceedings of the 20th European Conference on Genetic Programming (EuroGP 2017)*, Amsterdam, Netherlands. 65 | [preprint](http://williamlacava.com/pubs/evostar_few_lacava.pdf) 66 | 67 | La Cava, W., and Moore, J.H. Ensemble representation learning: an analysis of fitness and survival for wrapper-based genetic programming methods. *GECCO '17: Proceedings of the 2017 Genetic and Evolutionary Computation Conference*. Berlin, Germany. [arxiv](https://arxiv.org/abs/1703.06934) 68 | 69 | ## Acknowledgments 70 | 71 | This method is being developed to study the genetic causes of human disease in the [Epistasis Lab at UPenn](http://epistasis.org). Work is partially supported by the [Warren Center for Network and Data Science](http://warrencenter.upenn.edu). Thanks to Randy Olson and [TPOT](http://github.com/rhiever/tpot) for Python guidance. 72 | 73 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | from setuptools import setup, find_packages 4 | from setuptools.extension import Extension 5 | from Cython.Build import cythonize 6 | 7 | # the setup file relies on eigency to import its include paths for the 8 | # extension modules. however eigency isn't known as a dependency until after 9 | # setup is parsed; so we need to check for and install eigency before setup. 10 | import importlib 11 | try: 12 | importlib.import_module('eigency') 13 | except (ImportError, AttributeError): 14 | try: 15 | from pip._internal import main 16 | main(['install', 'eigency']) 17 | except ImportError: 18 | raise ImportError('The eigency library must be installed before FEW. ' 19 | 'Automatic install with pip failed.') 20 | finally: 21 | globals()['eigency'] = importlib.import_module('eigency') 22 | 23 | def calculate_version(): 24 | initpy = open('few/_version.py').read().split('\n') 25 | version = list(filter(lambda x: '__version__' in x, initpy))[0].split('\'')[1] 26 | return version 27 | 28 | package_version = calculate_version() 29 | 30 | # few_lib = Extension(name='few_lib', 31 | # sources=['few/lib/epsilon_lexicase.cpp'], 32 | # include_dirs = ['/usr/include/eigen3'], 33 | # depends = ['Eigen/Dense.h'], 34 | # extra_compile_args = ['-std=c++0x'] 35 | # ) 36 | 37 | # check if windows or *nix 38 | from sys import platform 39 | print('platform:',platform) 40 | if platform == 'win32': 41 | eca = '' 42 | else: 43 | eca = '-std=c++0x' 44 | 45 | setup( 46 | name='FEW', 47 | version=package_version, 48 | author='William La Cava', 49 | author_email='lacava@upenn.edu', 50 | packages=find_packages(), 51 | url='https://github.com/lacava/few', 52 | download_url='https://github.com/lacava/few/releases/tag/'+package_version, 53 | license='GNU/GPLv3', 54 | entry_points={'console_scripts': ['few=few:main', ]}, 55 | test_suite='nose.collector', 56 | tests_require=['nose'], 57 | description=('Feature Engineering Wrapper'), 58 | long_description=''' 59 | A feature engineering wrapper for scikitlearn based on genetic programming. 60 | 61 | Contact: 62 | === 63 | e-mail: lacava@upenn.edu 64 | 65 | This project is hosted at https://github.com/lacava/few 66 | ''', 67 | zip_safe=True, 68 | install_requires=['numpy', 'scipy', 'pandas', 'scikit-learn', 69 | 'update_checker', 'tqdm', 'joblib','DistanceClassifier', 70 | 'scikit-mdr','Cython', 'eigency'], 71 | setup_requires=['numpy', 'scipy', 'pandas', 'scikit-learn', 72 | 'update_checker', 'tqdm', 'joblib','DistanceClassifier', 73 | 'scikit-mdr','Cython', 'eigency'], 74 | classifiers=[ 75 | 'Intended Audience :: Science/Research', 76 | 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', 77 | # 'Programming Language :: Python :: 2.7', 78 | # 'Programming Language :: Python :: 3', 79 | # 'Programming Language :: Python :: 3.4', 80 | 'Programming Language :: Python :: 3.5', 81 | 'Topic :: Scientific/Engineering :: Artificial Intelligence' 82 | ], 83 | keywords=['data science', 'machine learning', 'classification'], 84 | ext_modules=cythonize([Extension(name="few_lib", 85 | sources=["few/lib/few_lib.pyx"], 86 | include_dirs=[".", "./few/lib"] + 87 | eigency.get_includes(), 88 | extra_compile_args = [eca])], 89 | language="c++") 90 | ) 91 | -------------------------------------------------------------------------------- /few/tests/test_selection.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2016 William La Cava 3 | 4 | This file is part of the FEW library. 5 | 6 | The FEW library is free software: you can redistribute it and/or 7 | modify it under the terms of the GNU General Public License as published by the 8 | Free Software Foundation, either version 3 of the License, or (at your option) 9 | any later version. 10 | 11 | The FEW library is distributed in the hope that it will be useful, but 12 | WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. 14 | You should have received a copy of the GNU General Public License along with 15 | the FEW library. If not, see http://www.gnu.org/licenses/. 16 | 17 | """ 18 | from few.population import * 19 | # from few.selection import * 20 | from few import FEW 21 | import numpy as np 22 | # unit tests for selection methods. 23 | def test_tournament_shapes(): 24 | """test_selection.py: tournament selection returns correct shape""" 25 | 26 | few = FEW(seed_with_ml=False,population_size=257) 27 | few.term_set = [node('x',loc=0)] 28 | pop = few.init_pop() 29 | offspring,locs = few.tournament(pop.individuals,2) 30 | assert len(offspring) == 257 31 | 32 | offspring,locs = few.tournament(pop.individuals,5) 33 | assert len(offspring) == 257 34 | 35 | # smaller popsize than tournament size 36 | few = FEW(seed_with_ml=False,population_size=2) 37 | few.term_set = [node('x',loc=0)] 38 | pop = few.init_pop() 39 | offspring,locs = few.tournament(pop.individuals,5) 40 | assert len(offspring) == 2; 41 | 42 | def test_lexicase_shapes(): 43 | """test_selection.py: lexicase selection returns correct shape""" 44 | np.random.seed(42) 45 | few = FEW(seed_with_ml=False,population_size=257) 46 | few.term_set = [node('x',loc=0)] 47 | pop = few.init_pop() 48 | offspring = few.lexicase(np.random.rand(257,100)) 49 | assert len(offspring) == 257 50 | 51 | # smaller popsize than tournament size 52 | np.random.seed(42) 53 | few = FEW(seed_with_ml=False,population_size=2) 54 | few.term_set = [node('x',loc=0)] 55 | pop = few.init_pop() 56 | offspring = few.lexicase(np.random.rand(2,100)) 57 | assert len(offspring) == 2; 58 | 59 | def test_epsilon_lexicase_shapes(): 60 | """test_selection.py: epsilon lexicase selection returns correct shape""" 61 | np.random.seed(42) 62 | few = FEW(seed_with_ml=False,population_size=257,lex_size=False) 63 | few.term_set = [node('x',loc=0)] 64 | pop = few.init_pop() 65 | offspring = few.epsilon_lexicase(np.random.rand(257,100),[]) 66 | assert len(offspring) == 257 67 | 68 | # smaller popsize than tournament size 69 | few = FEW(seed_with_ml=False,population_size=2,lex_size=False) 70 | few.term_set = [node('x',loc=0)] 71 | pop = few.init_pop() 72 | offspring = few.epsilon_lexicase(np.random.rand(2,100),[]) 73 | assert len(offspring) == 2; 74 | 75 | def test_lex_size(): 76 | """test_selection.py: lex_size flag on/off""" 77 | 78 | few = FEW(seed_with_ml=False,population_size=257, lex_size=True) 79 | 80 | Fitness_mat = np.random.rand(257,10) 81 | size_mat = np.random.randint(1,100,size=257) 82 | 83 | locs = few.epsilon_lexicase(Fitness_mat,size_mat,num_selections=100, 84 | survival=True) 85 | assert len(locs) == 100 86 | 87 | few = FEW(seed_with_ml=False,population_size=257, lex_size=False) 88 | 89 | Fitness_mat = np.random.rand(257,10) 90 | size_mat = np.random.rand(257,1) 91 | 92 | locs = few.epsilon_lexicase(Fitness_mat,size_mat,num_selections=100, 93 | survival=True) 94 | assert len(locs) == 100 95 | -------------------------------------------------------------------------------- /few/tests/test_few.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright 2016 William La Cava 3 | 4 | This file is part of the FEW library. 5 | 6 | The FEW library is free software: you can redistribute it and/or 7 | modify it under the terms of the GNU General Public License as published by the 8 | Free Software Foundation, either version 3 of the License, or (at your option) 9 | any later version. 10 | 11 | The FEW library is distributed in the hope that it will be useful, but 12 | WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. 14 | You should have received a copy of the GNU General Public License along with 15 | the FEW library. If not, see http://www.gnu.org/licenses/. 16 | 17 | """ 18 | # test FEW methods 19 | from few.few import FEW 20 | from sklearn.datasets import load_boston, load_iris 21 | from sklearn.metrics import r2_score 22 | from sklearn.linear_model import LassoLarsCV 23 | from sklearn.model_selection import train_test_split 24 | import pandas as pd 25 | import numpy as np 26 | 27 | def test_few_fit_shapes(): 28 | """test_few.py: fit and predict return correct shapes """ 29 | np.random.seed(202) 30 | # load example data 31 | boston = load_boston() 32 | d = pd.DataFrame(data=boston.data) 33 | print("feature shape:",boston.data.shape) 34 | 35 | learner = FEW(generations=1, population_size=5, 36 | mutation_rate=0.2, crossover_rate=0.8, 37 | ml = LassoLarsCV(), min_depth = 1, max_depth = 3, 38 | sel = 'epsilon_lexicase', tourn_size = 2, 39 | random_state=0, verbosity=0, 40 | disable_update_check=False, fit_choice = 'mse') 41 | 42 | score = learner.fit(boston.data[:300], boston.target[:300]) 43 | print("learner:",learner._best_estimator) 44 | yhat_test = learner.predict(boston.data[300:]) 45 | test_score = learner.score(boston.data[300:],boston.target[300:]) 46 | print("train score:",score,"test score:",test_score, 47 | "test r2:",r2_score(boston.target[300:],yhat_test)) 48 | assert yhat_test.shape == boston.target[300:].shape 49 | 50 | def test_few_with_parents_weight(): 51 | """test_few.py: few performs without error with parent pressure for selection""" 52 | np.random.seed(1006987) 53 | boston = load_boston() 54 | d = np.column_stack((boston.data,boston.target)) 55 | np.random.shuffle(d) 56 | features = d[:,0:-1] 57 | target = d[:,-1] 58 | 59 | print("feature shape:",boston.data.shape) 60 | 61 | learner = FEW(generations=1, population_size=5, 62 | mutation_rate=1, crossover_rate=1, 63 | ml = LassoLarsCV(), min_depth = 1, max_depth = 3, 64 | sel = 'tournament', fit_choice = 'r2',tourn_size = 2, random_state=0, verbosity=0, 65 | disable_update_check=False, weight_parents=True) 66 | 67 | learner.fit(features[:300], target[:300]) 68 | few_score = learner.score(features[:300], target[:300]) 69 | test_score = learner.score(features[300:],target[300:]) 70 | 71 | print("few score:",few_score) 72 | print("few test score:",test_score) 73 | 74 | 75 | def test_few_at_least_as_good_as_default(): 76 | """test_few.py: few performs at least as well as the default ML """ 77 | np.random.seed(1006987) 78 | boston = load_boston() 79 | d = np.column_stack((boston.data,boston.target)) 80 | np.random.shuffle(d) 81 | features = d[:,0:-1] 82 | target = d[:,-1] 83 | 84 | print("feature shape:",boston.data.shape) 85 | 86 | learner = FEW(generations=1, population_size=5, 87 | ml = LassoLarsCV(), min_depth = 1, max_depth = 3, 88 | sel = 'tournament') 89 | 90 | learner.fit(features[:300], target[:300]) 91 | few_score = learner.score(features[:300], target[:300]) 92 | few_test_score = learner.score(features[300:],target[300:]) 93 | 94 | lasso = LassoLarsCV() 95 | lasso.fit(features[:300], target[:300]) 96 | lasso_score = lasso.score(features[:300], target[:300]) 97 | lasso_test_score = lasso.score(features[300:],target[300:]) 98 | print("few score:",few_score,"lasso score:",lasso_score) 99 | print("few test score:",few_test_score,"lasso test score:", 100 | lasso_test_score) 101 | assert round(few_score,8) >= round(lasso_score,8) 102 | 103 | print("lasso coefficients:",lasso.coef_) 104 | 105 | # assert False 106 | def test_few_classification(): 107 | """test_few.py: tests default classification settings""" 108 | np.random.seed(42) 109 | X, y = load_iris(return_X_y=True) 110 | train,test = train_test_split(np.arange(X.shape[0]), train_size=0.75, 111 | test_size=0.25) 112 | few = FEW(classification=True,population_size='1x',generations=10) 113 | few.fit(X[train],y[train]) 114 | 115 | print('train score:', few.score(X[train],y[train])) 116 | print('test score:', few.score(X[test],y[test])) 117 | 118 | # test boolean output 119 | few = FEW(classification=True,otype='b',population_size='2x', 120 | seed_with_ml=False,generations=10) 121 | np.random.seed(42) 122 | few.fit(X[train],y[train]) 123 | 124 | print('train score:', few.score(X[train],y[train])) 125 | print('test score:', few.score(X[test],y[test])) 126 | few.print_model() 127 | -------------------------------------------------------------------------------- /few/lib/epsilon_lexicase.h: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright 2017 William La Cava 3 | 4 | This file is part of the FEW library. 5 | 6 | The FEW library is free software: you can redistribute it and/or 7 | modify it under the terms of the GNU General Public License as published by the 8 | Free Software Foundation, either version 3 of the License, or (at your option) 9 | any later version. 10 | 11 | The FEW library is distributed in the hope that it will be useful, but 12 | WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or 13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. 14 | You should have received a copy of the GNU General Public License along with 15 | the FEW library. If not, see http://www.gnu.org/licenses/. 16 | 17 | */ 18 | #include 19 | // #include 20 | #include 21 | #include 22 | #include "Eigen/Dense" 23 | #include 24 | #include 25 | #include 26 | 27 | using namespace Eigen; 28 | using namespace std; 29 | 30 | /* DEFINE Custom Type Names to make code more readable 31 | ExtMat : 2-dim matrix/array externally defined (in Python) 32 | */ 33 | typedef Map ExtMat; 34 | typedef Map ExtVec; 35 | // typedef ArrayXd Vec; 36 | 37 | double median(const ArrayXd& v) { 38 | // instantiate a vector 39 | vector x(v.size()); 40 | x.assign(v.data(),v.data()+v.size()); 41 | // middle element 42 | size_t n = x.size()/2; 43 | // sort nth element of array 44 | nth_element(x.begin(),x.begin()+n,x.end()); 45 | // if evenly sized, return average of middle two elements 46 | if (x.size() % 2 == 0) { 47 | nth_element(x.begin(),x.begin()+n-1,x.end()); 48 | return (x[n] + x[n-1]) / 2; 49 | } 50 | // otherwise return middle element 51 | else 52 | return x[n]; 53 | } 54 | double mad(const ArrayXd& x) { 55 | // returns median absolute deviation (MAD) 56 | // get median of x 57 | double x_median = median(x); 58 | //calculate absolute deviation from median 59 | ArrayXd dev(x.size()); 60 | for (int i =0; i < x.size(); ++i) 61 | dev(i) = abs(x(i) - x_median); 62 | // return median of the absolute deviation 63 | return median(dev); 64 | } 65 | // random functions 66 | 67 | 68 | template 69 | Iter select_randomly(Iter start, Iter end, RandomGenerator& g) { 70 | uniform_int_distribution<> dis(0, distance(start, end) - 1); 71 | advance(start, dis(g)); 72 | return start; 73 | } 74 | 75 | // template 76 | // Iter select_randomly(Iter start, Iter end) { 77 | // static random_device rd; 78 | // static mt19937 gen(rd()); 79 | // return select_randomly(start, end, gen); 80 | // } 81 | // random number generator 82 | static random_device rd; 83 | static mt19937 gen(rd()); 84 | //extern "C" 85 | void epsilon_lexicase(const ExtMat & F, int n, int d, 86 | int num_selections, ExtVec& locs, bool lex_size, ExtVec& sizes) 87 | { 88 | // training cases 89 | // ExtMat T (F, n, d); 90 | // cout << "F size: " << F.rows() << "x" << F.cols() << "\n"; 91 | // cout << "locs size: " << locs.size() << "\n"; 92 | // parent locations 93 | // ExtVec L (locs, num_selections); 94 | // get epsilon via median absolute deviations 95 | ArrayXd epsilon(d); 96 | //cout << "calculating epsilon\n"; 97 | //for columns of T, calculate epsilon 98 | for (int i = 0; i ind_locs; 102 | if(lex_size){ 103 | //randomly select a size from sizes 104 | int max_index = sizes.size(); 105 | int random_index = rand() % max_index; 106 | 107 | // individual locations 108 | int j=0; 109 | for(int i=0;i winner; 123 | for (int i = 0; i can_locs = ind_locs; 128 | // set cases 129 | vector cases(d); 130 | iota(cases.begin(),cases.end(),0); 131 | // shuffle cases 132 | random_shuffle(cases.begin(),cases.end()); 133 | //main loop 134 | while(can_locs.size()>1 && cases.size() > 0){ 135 | // winner pool 136 | winner.resize(0); 137 | // minimum error on case 138 | double minfit; 139 | for (int j = 0; j elite.fitness: 44 | # if the elite individual did not survive and elitism is on, replace worst individual with elite 45 | rep_index = np.argmax([x.fitness for x in survivors]) 46 | survivors[rep_index] = elite 47 | survivor_index[rep_index] = elite_index 48 | # return survivors 49 | 50 | return survivors,survivor_index 51 | 52 | def tournament(self,individuals,tourn_size, num_selections=None): 53 | """conducts tournament selection of size tourn_size""" 54 | winners = [] 55 | locs = [] 56 | if num_selections is None: 57 | num_selections = len(individuals) 58 | 59 | for i in np.arange(num_selections): 60 | # sample pool with replacement 61 | pool_i = self.random_state.choice(len(individuals),size=tourn_size) 62 | pool = [] 63 | for i in pool_i: 64 | pool.append(np.mean(individuals[i].fitness)) 65 | # winner 66 | locs.append(pool_i[np.argmin(pool)]) 67 | winners.append(copy.deepcopy(individuals[locs[-1]])) 68 | 69 | return winners,locs 70 | 71 | def lexicase(self,F, num_selections=None, survival = False): 72 | """conducts lexicase selection for de-aggregated fitness vectors""" 73 | if num_selections is None: 74 | num_selections = F.shape[0] 75 | winners = [] 76 | locs = [] 77 | 78 | individual_locs = np.arange(F.shape[0]) 79 | 80 | for i in np.arange(num_selections): 81 | can_locs = individual_locs 82 | cases = list(np.arange(F.shape[1])) 83 | self.random_state.shuffle(cases) 84 | # pdb.set_trace() 85 | while len(cases) > 0 and len(can_locs) > 1: 86 | # get best fitness for case among candidates 87 | best_val_for_case = np.min(F[can_locs,cases[0]]) 88 | # filter individuals without an elite fitness on this case 89 | can_locs = [l for l in can_locs if F[l,cases[0]] <= best_val_for_case ] 90 | cases.pop(0) 91 | 92 | choice = self.random_state.randint(len(can_locs)) 93 | locs.append(can_locs[choice]) 94 | if survival: # filter out winners from remaining selection pool 95 | individual_locs = [i for i in individual_locs if i != can_locs[choice]] 96 | 97 | while len(locs) < num_selections: 98 | locs.append(individual_locs[0]) 99 | 100 | return locs 101 | 102 | # for i in np.arange(num_selections): 103 | # print('num inds:',len(individuals)) 104 | # candidates = individuals 105 | # can_locs = range(len(individuals)) 106 | # cases = list(np.arange(len(individuals[0].fitness_vec))) 107 | # self.random_state.shuffle(cases) 108 | # # pdb.set_trace() 109 | # while len(cases) > 0 and len(candidates) > 1: 110 | # # get best fitness for case among candidates 111 | # # print("candidates:",stacks_2_eqns(candidates),"locations:",can_locs) 112 | # # print("fitnesses for case "+str(cases[0])+":",[x.fitness_vec[cases[0]] for x in candidates]) 113 | # best_val_for_case = min([x.fitness_vec[cases[0]] for x in candidates]) 114 | # # print("best_val_for_case:",best_val_for_case) 115 | # # filter individuals without an elite fitness on this case 116 | # # tmp_c,tmp_l = zip(*((x,l) for x,l in zip(candidates,can_locs) if x.fitness_vec[cases[0]] == best_val_for_case)) 117 | # candidates,can_locs = zip(*((x,l) for x,l in zip(candidates,can_locs) if x.fitness_vec[cases[0]] == best_val_for_case)) 118 | # cases.pop(0) 119 | # 120 | # choice = self.random_state.randint(len(candidates)) 121 | # winners.append(copy.deepcopy(candidates[choice])) 122 | # locs.append(can_locs[choice]) 123 | # if survival: # filter out winners from remaining selection pool 124 | # individuals = list(filter(lambda x: x.stack != candidates[choice].stack, individuals)) 125 | # 126 | # return winners, locs 127 | 128 | def epsilon_lexicase(self, F, sizes, num_selections=None, survival = False): 129 | """conducts epsilon lexicase selection for de-aggregated fitness vectors""" 130 | # pdb.set_trace() 131 | if num_selections is None: 132 | num_selections = F.shape[0] 133 | 134 | if self.c: # use c library 135 | # define c types 136 | locs = np.empty(num_selections,dtype='int32',order='F') 137 | # self.lib.epsilon_lexicase(F,F.shape[0],F.shape[1],num_selections,locs) 138 | if self.lex_size: 139 | ep_lex(F,F.shape[0],F.shape[1],num_selections,locs,self.lex_size,np.array(sizes)) 140 | else: 141 | ep_lex(F,F.shape[0],F.shape[1],num_selections,locs,self.lex_size,np.array([])) 142 | return locs 143 | else: # use python version 144 | 145 | locs = [] 146 | individual_locs = np.arange(F.shape[0]) 147 | # calculate epsilon thresholds based on median absolute deviation (MAD) 148 | mad_for_case = np.array([self.mad(f) for f in F.transpose()]) 149 | for i in np.arange(num_selections): 150 | 151 | can_locs = individual_locs 152 | cases = list(np.arange(F.shape[1])) 153 | self.random_state.shuffle(cases) 154 | # pdb.set_trace() 155 | while len(cases) > 0 and len(can_locs) > 1: 156 | # get best fitness for case among candidates 157 | best_val_for_case = np.min(F[can_locs,cases[0]]) 158 | # filter individuals without an elite fitness on this case 159 | can_locs = [l for l in can_locs if F[l,cases[0]] <= best_val_for_case + mad_for_case[cases[0]]] 160 | cases.pop(0) 161 | 162 | choice = self.random_state.randint(len(can_locs)) 163 | locs.append(can_locs[choice]) 164 | if survival: # filter out winners from remaining selection pool 165 | individual_locs = [i for i in individual_locs if i != can_locs[choice]] 166 | 167 | while len(locs) < num_selections: 168 | locs.append(individual_locs[0]) 169 | 170 | return locs 171 | 172 | 173 | def mad(self,x, axis=None): 174 | """median absolute deviation statistic""" 175 | return np.median(np.abs(x - np.median(x, axis)), axis) 176 | 177 | def deterministic_crowding(self,parents,offspring,X_parents,X_offspring): 178 | """deterministic crowding implementation (for non-steady state). 179 | offspring compete against the parent they are most similar to, here defined as 180 | the parent they are most correlated with. 181 | the offspring only replace their parent if they are more fit. 182 | """ 183 | # get children locations produced from crossover 184 | cross_children = [i for i,o in enumerate(offspring) if len(o.parentid) > 1] 185 | # order offspring so that they are lined up with their most similar parent 186 | for c1,c2 in zip(cross_children[::2], cross_children[1::2]): 187 | # get parent locations 188 | p_loc = [j for j,p in enumerate(parents) if p.id in offspring[c1].parentid] 189 | if len(p_loc) != 2: 190 | continue 191 | # if child is more correlated with its non-root parent 192 | if r2_score(X_parents[p_loc[0]],X_offspring[c1]) + r2_score(X_parents[p_loc[1]],X_offspring[c2]) < r2_score(X_parents[p_loc[0]],X_offspring[c2]) + r2_score(X_parents[p_loc[1]],X_offspring[c1]): 193 | # swap offspring 194 | offspring[c1],offspring[c2] = offspring[c2],offspring[c1] 195 | 196 | survivors = [] 197 | survivor_index = [] 198 | 199 | for i,(p,o) in enumerate(zip(parents,offspring)): 200 | if p.fitness >= o.fitness: 201 | survivors.append(copy.deepcopy(p)) 202 | survivor_index.append(i) 203 | else: 204 | survivors.append(copy.deepcopy(o)) 205 | survivor_index.append(i+len(parents)) 206 | 207 | # return survivors along with their indices 208 | return survivors, survivor_index 209 | -------------------------------------------------------------------------------- /few/variation.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Copyright 2016 William La Cava 4 | 5 | license: GNU/GPLv3 6 | 7 | """ 8 | import numpy as np 9 | from .population import Ind 10 | from itertools import accumulate 11 | import pdb 12 | import copy 13 | import itertools as it 14 | import uuid 15 | 16 | # from few.tests.test_population import is_valid_program 17 | class VariationMixin(object): 18 | """ Defines crossover and mutation operator methods.""" 19 | def variation(self,parents): 20 | """performs variation operators on parents.""" 21 | # downselect to features that are important 22 | if (self.valid(parents) and 23 | self.ml_type != 'SVC' and self.ml_type != 'SVR'): 24 | # this is needed because svm has a bug that throws valueerror on 25 | # attribute check 26 | if hasattr(self.pipeline.named_steps['ml'],'coef_'): 27 | # for l1 regularization, filter individuals with 0 coefficients 28 | if self.weight_parents: 29 | weights = self.pipeline.named_steps['ml'].coef_ 30 | if len(weights.shape)>1: # handle multi-coefficient models 31 | weights = [np.mean(abs(c)) for c in weights.transpose()] 32 | # softmax transformation of the weights 33 | weights = np.exp(weights)/np.sum(np.exp(weights)) 34 | offspring = copy.deepcopy( 35 | list(self.random_state.choice(self.valid(parents), 36 | self.population_size, p=weights))) 37 | else: 38 | offspring = copy.deepcopy(list( 39 | x for i,x in zip(self.pipeline.named_steps['ml'].coef_, 40 | self.valid(parents)) if (i != 0).any())) 41 | elif hasattr(self.pipeline.named_steps['ml'],'feature_importances_'): 42 | # for tree methods, filter our individuals with 0 feature importance 43 | if self.weight_parents: 44 | weights = self.pipeline.named_steps['ml'].feature_importances_ 45 | # softmax transformation of the weights 46 | weights = np.exp(weights)/np.sum(np.exp(weights)) 47 | offspring = copy.deepcopy(list( 48 | self.random_state.choice(self.valid(parents), 49 | self.population_size, p=weights))) 50 | else: 51 | offspring = copy.deepcopy(list( 52 | x for i,x in zip(self.pipeline.named_steps['ml'].feature_importances_, 53 | self.valid(parents)) if i != 0)) 54 | else: 55 | offspring = copy.deepcopy(self.valid(parents)) 56 | else: 57 | offspring = copy.deepcopy(self.valid(parents)) 58 | 59 | if self.elitism: # keep a copy of the elite individual 60 | elite_index = np.argmin([x.fitness for x in parents]) 61 | elite = copy.deepcopy(parents[elite_index]) 62 | 63 | # Apply crossover and mutation on the offspring 64 | if self.verbosity > 2: print("variation...") 65 | for child1, child2 in it.zip_longest(offspring[::2], offspring[1::2], 66 | fillvalue=None): 67 | 68 | if self.random_state.rand() < self.crossover_rate and child2 != None: 69 | # crossover 70 | self.cross(child1.stack, child2.stack, self.max_depth) 71 | # update ids 72 | child1.parentid = [child1.id,child2.id] 73 | child1.id = uuid.uuid4() 74 | child2.parentid = [child1.id,child2.id] 75 | child2.id = uuid.uuid4() 76 | # set default fitness 77 | child1.fitness = -1 78 | child2.fitness = -1 79 | elif child2 == None: 80 | # single mutation 81 | self.mutate(child1.stack,self.func_set,self.term_set) 82 | # update ids 83 | child1.parentid = [child1.id] 84 | child1.id = uuid.uuid4() 85 | # set default fitness 86 | child1.fitness = -1 87 | else: 88 | #double mutation 89 | self.mutate(child1.stack,self.func_set,self.term_set) 90 | self.mutate(child2.stack,self.func_set,self.term_set) 91 | # update ids 92 | child1.parentid = [child1.id] 93 | child1.id = uuid.uuid4() 94 | child2.parentid = [child2.id] 95 | child2.id = uuid.uuid4() 96 | # set default fitness 97 | child1.fitness = -1 98 | child2.fitness = -1 99 | 100 | while len(offspring) < self.population_size: 101 | #make new offspring to replace the invalid ones 102 | offspring.append(Ind()) 103 | self.make_program(offspring[-1].stack,self.func_set,self.term_set, 104 | self.random_state.randint(self.min_depth, 105 | self.max_depth+1), 106 | self.otype) 107 | offspring[-1].stack = list(reversed(offspring[-1].stack)) 108 | 109 | return offspring,elite,elite_index 110 | 111 | def cross(self,p_i,p_j, max_depth = 2): 112 | """subtree-like swap crossover between programs p_i and p_j.""" 113 | # only choose crossover points for out_types available in both programs 114 | # pdb.set_trace() 115 | # determine possible outttypes 116 | types_p_i = [t for t in [p.out_type for p in p_i]] 117 | types_p_j = [t for t in [p.out_type for p in p_j]] 118 | types = set(types_p_i).intersection(types_p_j) 119 | 120 | # grab subtree of p_i 121 | p_i_sub = [i for i,n in enumerate(p_i) if n.out_type in types] 122 | x_i_end = self.random_state.choice(p_i_sub) 123 | x_i_begin = x_i_end 124 | arity_sum = p_i[x_i_end].arity[p_i[x_i_end].in_type] 125 | # print("x_i_end:",x_i_end) 126 | # i = 0 127 | while (arity_sum > 0): #and i < 1000: 128 | if x_i_begin == 0: 129 | print("arity_sum:",arity_sum,"x_i_begin:",x_i_begin,"x_i_end:",x_i_end) 130 | x_i_begin -= 1 131 | arity_sum += p_i[x_i_begin].arity[p_i[x_i_begin].in_type]-1 132 | # i += 1 133 | # if i == 1000: 134 | # print("in variation") 135 | # pdb.set_trace() 136 | 137 | # grab subtree of p_j with matching out_type to p_i[x_i_end] 138 | p_j_sub = [i for i,n in enumerate(p_j) if n.out_type == p_i[x_i_end].out_type] 139 | x_j_end = self.random_state.choice(p_j_sub) 140 | x_j_begin = x_j_end 141 | arity_sum = p_j[x_j_end].arity[p_j[x_j_end].in_type] 142 | # i = 0 143 | while (arity_sum > 0): #and i < 1000: 144 | if x_j_begin == 0: 145 | print("arity_sum:",arity_sum,"x_j_begin:",x_j_begin,"x_j_end:",x_j_end) 146 | print("p_j:",p_j) 147 | x_j_begin -= 1 148 | arity_sum += p_j[x_j_begin].arity[p_j[x_j_begin].in_type]-1 149 | # i += 1 150 | # if i == 1000: 151 | # print("in variation") 152 | # pdb.set_trace() 153 | #swap subtrees 154 | tmpi = p_i[:] 155 | tmpj = p_j[:] 156 | tmpi[x_i_begin:x_i_end+1:],tmpj[x_j_begin:x_j_end+1:] = \ 157 | tmpj[x_j_begin:x_j_end+1:],tmpi[x_i_begin:x_i_end+1:] 158 | 159 | if not self.is_valid_program(p_i) or not self.is_valid_program(p_j): 160 | # pdb.set_trace() 161 | print("parent 1:",p_i,"x_i_begin:",x_i_begin,"x_i_end:",x_i_end) 162 | print("parent 2:",p_j,"x_j_begin:",x_j_begin,"x_j_end:",x_j_end) 163 | print("child 1:",tmpi) 164 | print("child 2:",tmpj) 165 | raise ValueError('Crossover produced an invalid program.') 166 | 167 | # size check, then assignment 168 | if len(tmpi) <= 2**max_depth-1: 169 | p_i[:] = tmpi 170 | if len(tmpj) <= 2**max_depth-1: 171 | p_j[:] = tmpj 172 | 173 | 174 | def mutate(self,p_i,func_set,term_set): #, max_depth=2 175 | """point mutation, addition, removal""" 176 | self.point_mutate(p_i,func_set,term_set) 177 | 178 | def point_mutate(self,p_i,func_set,term_set): 179 | """point mutation on individual p_i""" 180 | # point mutation 181 | x = self.random_state.randint(len(p_i)) 182 | arity = p_i[x].arity[p_i[x].in_type] 183 | # find eligible replacements based on arity and type 184 | reps = [n for n in func_set+term_set 185 | if n.arity[n.in_type]==arity and n.out_type==p_i[x].out_type 186 | and n.in_type==p_i[x].in_type] 187 | 188 | tmp = reps[self.random_state.randint(len(reps))] 189 | tmp_p = p_i[:] 190 | p_i[x] = tmp 191 | if not self.is_valid_program(p_i): 192 | print("old:",tmp_p) 193 | print("new:",p_i) 194 | raise ValueError('Mutation produced an invalid program.') 195 | # 196 | # def add_mutate(p_i,func_set,term_set, max_depth=2): 197 | # """ mutation that adds operation to program""" 198 | # #choose node. move it down, pick an operator to put before it, with another leaf if 199 | # #new operator arity requires it. 200 | # #make sure size requirements are not invalidated (if they are, discard changes) 201 | # 202 | # def sub_mutate(p_i,func_set,term_set, max_depth=2): 203 | # """ mutation that removes operation from program""" 204 | # #choose a node with arity>0. replace it and its subtree with a node with lower arity. 205 | 206 | def is_valid_program(self,p): 207 | """checks whether program p makes a syntactically valid tree. 208 | 209 | checks that the accumulated program length is always greater than the 210 | accumulated arities, indicating that the appropriate number of arguments is 211 | alway present for functions. It then checks that the sum of arties +1 212 | exactly equals the length of the stack, indicating that there are no 213 | missing arguments. 214 | """ 215 | # print("p:",p) 216 | arities = list(a.arity[a.in_type] for a in p) 217 | accu_arities = list(accumulate(arities)) 218 | accu_len = list(np.arange(len(p))+1) 219 | check = list(a < b for a,b in zip(accu_arities,accu_len)) 220 | # print("accu_arities:",accu_arities) 221 | # print("accu_len:",accu_len) 222 | # print("accu_arities < accu_len:",accu_arities0 224 | -------------------------------------------------------------------------------- /docs/data/d_yacht.txt: -------------------------------------------------------------------------------- 1 | x1 x2 x3 x4 x5 x6 label 2 | -2.3 0.568 4.78 3.99 3.17 0.125 0.11 3 | -2.3 0.568 4.78 3.99 3.17 0.150 0.27 4 | -2.3 0.568 4.78 3.99 3.17 0.175 0.47 5 | -2.3 0.568 4.78 3.99 3.17 0.200 0.78 6 | -2.3 0.568 4.78 3.99 3.17 0.225 1.18 7 | -2.3 0.568 4.78 3.99 3.17 0.250 1.82 8 | -2.3 0.568 4.78 3.99 3.17 0.275 2.61 9 | -2.3 0.568 4.78 3.99 3.17 0.300 3.76 10 | -2.3 0.568 4.78 3.99 3.17 0.325 4.99 11 | -2.3 0.568 4.78 3.99 3.17 0.350 7.16 12 | -2.3 0.568 4.78 3.99 3.17 0.375 11.93 13 | -2.3 0.568 4.78 3.99 3.17 0.400 20.11 14 | -2.3 0.568 4.78 3.99 3.17 0.425 32.75 15 | -2.3 0.568 4.78 3.99 3.17 0.450 49.49 16 | -2.3 0.569 4.78 3.04 3.64 0.125 0.04 17 | -2.3 0.569 4.78 3.04 3.64 0.150 0.17 18 | -2.3 0.569 4.78 3.04 3.64 0.175 0.37 19 | -2.3 0.569 4.78 3.04 3.64 0.200 0.66 20 | -2.3 0.569 4.78 3.04 3.64 0.225 1.06 21 | -2.3 0.569 4.78 3.04 3.64 0.250 1.59 22 | -2.3 0.569 4.78 3.04 3.64 0.275 2.33 23 | -2.3 0.569 4.78 3.04 3.64 0.300 3.29 24 | -2.3 0.569 4.78 3.04 3.64 0.325 4.61 25 | -2.3 0.569 4.78 3.04 3.64 0.350 7.11 26 | -2.3 0.569 4.78 3.04 3.64 0.375 11.99 27 | -2.3 0.569 4.78 3.04 3.64 0.400 21.09 28 | -2.3 0.569 4.78 3.04 3.64 0.425 35.01 29 | -2.3 0.569 4.78 3.04 3.64 0.450 51.80 30 | -2.3 0.565 4.78 5.35 2.76 0.125 0.09 31 | -2.3 0.565 4.78 5.35 2.76 0.150 0.29 32 | -2.3 0.565 4.78 5.35 2.76 0.175 0.56 33 | -2.3 0.565 4.78 5.35 2.76 0.200 0.86 34 | -2.3 0.565 4.78 5.35 2.76 0.225 1.31 35 | -2.3 0.565 4.78 5.35 2.76 0.250 1.99 36 | -2.3 0.565 4.78 5.35 2.76 0.275 2.94 37 | -2.3 0.565 4.78 5.35 2.76 0.300 4.21 38 | -2.3 0.565 4.78 5.35 2.76 0.325 5.54 39 | -2.3 0.565 4.78 5.35 2.76 0.350 8.25 40 | -2.3 0.565 4.78 5.35 2.76 0.375 13.08 41 | -2.3 0.565 4.78 5.35 2.76 0.400 21.40 42 | -2.3 0.565 4.78 5.35 2.76 0.425 33.14 43 | -2.3 0.565 4.78 5.35 2.76 0.450 50.14 44 | -2.3 0.564 5.10 3.95 3.53 0.125 0.20 45 | -2.3 0.564 5.10 3.95 3.53 0.150 0.35 46 | -2.3 0.564 5.10 3.95 3.53 0.175 0.65 47 | -2.3 0.564 5.10 3.95 3.53 0.200 0.93 48 | -2.3 0.564 5.10 3.95 3.53 0.225 1.37 49 | -2.3 0.564 5.10 3.95 3.53 0.250 1.97 50 | -2.3 0.564 5.10 3.95 3.53 0.275 2.83 51 | -2.3 0.564 5.10 3.95 3.53 0.300 3.99 52 | -2.3 0.564 5.10 3.95 3.53 0.325 5.19 53 | -2.3 0.564 5.10 3.95 3.53 0.350 8.03 54 | -2.3 0.564 5.10 3.95 3.53 0.375 12.86 55 | -2.3 0.564 5.10 3.95 3.53 0.400 21.51 56 | -2.3 0.564 5.10 3.95 3.53 0.425 33.97 57 | -2.3 0.564 5.10 3.95 3.53 0.450 50.36 58 | -2.4 0.574 4.36 3.96 2.76 0.125 0.20 59 | -2.4 0.574 4.36 3.96 2.76 0.150 0.35 60 | -2.4 0.574 4.36 3.96 2.76 0.175 0.65 61 | -2.4 0.574 4.36 3.96 2.76 0.200 0.93 62 | -2.4 0.574 4.36 3.96 2.76 0.225 1.37 63 | -2.4 0.574 4.36 3.96 2.76 0.250 1.97 64 | -2.4 0.574 4.36 3.96 2.76 0.275 2.83 65 | -2.4 0.574 4.36 3.96 2.76 0.300 3.99 66 | -2.4 0.574 4.36 3.96 2.76 0.325 5.19 67 | -2.4 0.574 4.36 3.96 2.76 0.350 8.03 68 | -2.4 0.574 4.36 3.96 2.76 0.375 12.86 69 | -2.4 0.574 4.36 3.96 2.76 0.400 21.51 70 | -2.4 0.574 4.36 3.96 2.76 0.425 33.97 71 | -2.4 0.574 4.36 3.96 2.76 0.450 50.36 72 | -2.4 0.568 4.34 2.98 3.15 0.125 0.12 73 | -2.4 0.568 4.34 2.98 3.15 0.150 0.26 74 | -2.4 0.568 4.34 2.98 3.15 0.175 0.43 75 | -2.4 0.568 4.34 2.98 3.15 0.200 0.69 76 | -2.4 0.568 4.34 2.98 3.15 0.225 1.09 77 | -2.4 0.568 4.34 2.98 3.15 0.250 1.67 78 | -2.4 0.568 4.34 2.98 3.15 0.275 2.46 79 | -2.4 0.568 4.34 2.98 3.15 0.300 3.43 80 | -2.4 0.568 4.34 2.98 3.15 0.325 4.62 81 | -2.4 0.568 4.34 2.98 3.15 0.350 6.86 82 | -2.4 0.568 4.34 2.98 3.15 0.375 11.56 83 | -2.4 0.568 4.34 2.98 3.15 0.400 20.63 84 | -2.4 0.568 4.34 2.98 3.15 0.425 34.50 85 | -2.4 0.568 4.34 2.98 3.15 0.450 54.23 86 | -2.3 0.562 5.14 4.95 3.17 0.125 0.28 87 | -2.3 0.562 5.14 4.95 3.17 0.150 0.44 88 | -2.3 0.562 5.14 4.95 3.17 0.175 0.70 89 | -2.3 0.562 5.14 4.95 3.17 0.200 1.07 90 | -2.3 0.562 5.14 4.95 3.17 0.225 1.57 91 | -2.3 0.562 5.14 4.95 3.17 0.250 2.23 92 | -2.3 0.562 5.14 4.95 3.17 0.275 3.09 93 | -2.3 0.562 5.14 4.95 3.17 0.300 4.09 94 | -2.3 0.562 5.14 4.95 3.17 0.325 5.82 95 | -2.3 0.562 5.14 4.95 3.17 0.350 8.28 96 | -2.3 0.562 5.14 4.95 3.17 0.375 12.80 97 | -2.3 0.562 5.14 4.95 3.17 0.400 20.41 98 | -2.3 0.562 5.14 4.95 3.17 0.425 32.34 99 | -2.3 0.562 5.14 4.95 3.17 0.450 47.29 100 | -2.4 0.585 4.78 3.84 3.32 0.125 0.20 101 | -2.4 0.585 4.78 3.84 3.32 0.150 0.38 102 | -2.4 0.585 4.78 3.84 3.32 0.175 0.64 103 | -2.4 0.585 4.78 3.84 3.32 0.200 0.97 104 | -2.4 0.585 4.78 3.84 3.32 0.225 1.36 105 | -2.4 0.585 4.78 3.84 3.32 0.250 1.98 106 | -2.4 0.585 4.78 3.84 3.32 0.275 2.91 107 | -2.4 0.585 4.78 3.84 3.32 0.300 4.35 108 | -2.4 0.585 4.78 3.84 3.32 0.325 5.79 109 | -2.4 0.585 4.78 3.84 3.32 0.350 8.04 110 | -2.4 0.585 4.78 3.84 3.32 0.375 12.15 111 | -2.4 0.585 4.78 3.84 3.32 0.400 19.18 112 | -2.4 0.585 4.78 3.84 3.32 0.425 30.09 113 | -2.4 0.585 4.78 3.84 3.32 0.450 44.38 114 | -2.2 0.546 4.78 4.13 3.07 0.125 0.15 115 | -2.2 0.546 4.78 4.13 3.07 0.150 0.32 116 | -2.2 0.546 4.78 4.13 3.07 0.175 0.55 117 | -2.2 0.546 4.78 4.13 3.07 0.200 0.86 118 | -2.2 0.546 4.78 4.13 3.07 0.225 1.24 119 | -2.2 0.546 4.78 4.13 3.07 0.250 1.76 120 | -2.2 0.546 4.78 4.13 3.07 0.275 2.49 121 | -2.2 0.546 4.78 4.13 3.07 0.300 3.45 122 | -2.2 0.546 4.78 4.13 3.07 0.325 4.83 123 | -2.2 0.546 4.78 4.13 3.07 0.350 7.37 124 | -2.2 0.546 4.78 4.13 3.07 0.375 12.76 125 | -2.2 0.546 4.78 4.13 3.07 0.400 21.99 126 | -2.2 0.546 4.78 4.13 3.07 0.425 35.64 127 | -2.2 0.546 4.78 4.13 3.07 0.450 53.07 128 | 0.0 0.565 4.77 3.99 3.15 0.125 0.11 129 | 0.0 0.565 4.77 3.99 3.15 0.150 0.24 130 | 0.0 0.565 4.77 3.99 3.15 0.175 0.49 131 | 0.0 0.565 4.77 3.99 3.15 0.200 0.79 132 | 0.0 0.565 4.77 3.99 3.15 0.225 1.28 133 | 0.0 0.565 4.77 3.99 3.15 0.250 1.96 134 | 0.0 0.565 4.77 3.99 3.15 0.275 2.88 135 | 0.0 0.565 4.77 3.99 3.15 0.300 4.14 136 | 0.0 0.565 4.77 3.99 3.15 0.325 5.96 137 | 0.0 0.565 4.77 3.99 3.15 0.350 9.07 138 | 0.0 0.565 4.77 3.99 3.15 0.375 14.93 139 | 0.0 0.565 4.77 3.99 3.15 0.400 24.13 140 | 0.0 0.565 4.77 3.99 3.15 0.425 38.12 141 | 0.0 0.565 4.77 3.99 3.15 0.450 55.44 142 | -5.0 0.565 4.77 3.99 3.15 0.125 0.07 143 | -5.0 0.565 4.77 3.99 3.15 0.150 0.18 144 | -5.0 0.565 4.77 3.99 3.15 0.175 0.40 145 | -5.0 0.565 4.77 3.99 3.15 0.200 0.70 146 | -5.0 0.565 4.77 3.99 3.15 0.225 1.14 147 | -5.0 0.565 4.77 3.99 3.15 0.250 1.83 148 | -5.0 0.565 4.77 3.99 3.15 0.275 2.77 149 | -5.0 0.565 4.77 3.99 3.15 0.300 4.12 150 | -5.0 0.565 4.77 3.99 3.15 0.325 5.41 151 | -5.0 0.565 4.77 3.99 3.15 0.350 7.87 152 | -5.0 0.565 4.77 3.99 3.15 0.375 12.71 153 | -5.0 0.565 4.77 3.99 3.15 0.400 21.02 154 | -5.0 0.565 4.77 3.99 3.15 0.425 34.58 155 | -5.0 0.565 4.77 3.99 3.15 0.450 51.77 156 | 0.0 0.565 5.10 3.94 3.51 0.125 0.08 157 | 0.0 0.565 5.10 3.94 3.51 0.150 0.26 158 | 0.0 0.565 5.10 3.94 3.51 0.175 0.50 159 | 0.0 0.565 5.10 3.94 3.51 0.200 0.83 160 | 0.0 0.565 5.10 3.94 3.51 0.225 1.28 161 | 0.0 0.565 5.10 3.94 3.51 0.250 1.90 162 | 0.0 0.565 5.10 3.94 3.51 0.275 2.68 163 | 0.0 0.565 5.10 3.94 3.51 0.300 3.76 164 | 0.0 0.565 5.10 3.94 3.51 0.325 5.57 165 | 0.0 0.565 5.10 3.94 3.51 0.350 8.76 166 | 0.0 0.565 5.10 3.94 3.51 0.375 14.24 167 | 0.0 0.565 5.10 3.94 3.51 0.400 23.05 168 | 0.0 0.565 5.10 3.94 3.51 0.425 35.46 169 | 0.0 0.565 5.10 3.94 3.51 0.450 51.99 170 | -5.0 0.565 5.10 3.94 3.51 0.125 0.08 171 | -5.0 0.565 5.10 3.94 3.51 0.150 0.24 172 | -5.0 0.565 5.10 3.94 3.51 0.175 0.45 173 | -5.0 0.565 5.10 3.94 3.51 0.200 0.77 174 | -5.0 0.565 5.10 3.94 3.51 0.225 1.19 175 | -5.0 0.565 5.10 3.94 3.51 0.250 1.76 176 | -5.0 0.565 5.10 3.94 3.51 0.275 2.59 177 | -5.0 0.565 5.10 3.94 3.51 0.300 3.85 178 | -5.0 0.565 5.10 3.94 3.51 0.325 5.27 179 | -5.0 0.565 5.10 3.94 3.51 0.350 7.74 180 | -5.0 0.565 5.10 3.94 3.51 0.375 12.40 181 | -5.0 0.565 5.10 3.94 3.51 0.400 20.91 182 | -5.0 0.565 5.10 3.94 3.51 0.425 33.23 183 | -5.0 0.565 5.10 3.94 3.51 0.450 49.14 184 | -2.3 0.530 5.11 3.69 3.51 0.125 0.08 185 | -2.3 0.530 5.11 3.69 3.51 0.150 0.25 186 | -2.3 0.530 5.11 3.69 3.51 0.175 0.46 187 | -2.3 0.530 5.11 3.69 3.51 0.200 0.75 188 | -2.3 0.530 5.11 3.69 3.51 0.225 1.11 189 | -2.3 0.530 5.11 3.69 3.51 0.250 1.57 190 | -2.3 0.530 5.11 3.69 3.51 0.275 2.17 191 | -2.3 0.530 5.11 3.69 3.51 0.300 2.98 192 | -2.3 0.530 5.11 3.69 3.51 0.325 4.42 193 | -2.3 0.530 5.11 3.69 3.51 0.350 7.84 194 | -2.3 0.530 5.11 3.69 3.51 0.375 14.11 195 | -2.3 0.530 5.11 3.69 3.51 0.400 24.14 196 | -2.3 0.530 5.11 3.69 3.51 0.425 37.95 197 | -2.3 0.530 5.11 3.69 3.51 0.450 55.17 198 | -2.3 0.530 4.76 3.68 3.16 0.125 0.10 199 | -2.3 0.530 4.76 3.68 3.16 0.150 0.23 200 | -2.3 0.530 4.76 3.68 3.16 0.175 0.47 201 | -2.3 0.530 4.76 3.68 3.16 0.200 0.76 202 | -2.3 0.530 4.76 3.68 3.16 0.225 1.15 203 | -2.3 0.530 4.76 3.68 3.16 0.250 1.65 204 | -2.3 0.530 4.76 3.68 3.16 0.275 2.28 205 | -2.3 0.530 4.76 3.68 3.16 0.300 3.09 206 | -2.3 0.530 4.76 3.68 3.16 0.325 4.41 207 | -2.3 0.530 4.76 3.68 3.16 0.350 7.51 208 | -2.3 0.530 4.76 3.68 3.16 0.375 13.77 209 | -2.3 0.530 4.76 3.68 3.16 0.400 23.96 210 | -2.3 0.530 4.76 3.68 3.16 0.425 37.38 211 | -2.3 0.530 4.76 3.68 3.16 0.450 56.46 212 | -2.3 0.530 4.34 2.81 3.15 0.125 0.05 213 | -2.3 0.530 4.34 2.81 3.15 0.150 0.17 214 | -2.3 0.530 4.34 2.81 3.15 0.175 0.35 215 | -2.3 0.530 4.34 2.81 3.15 0.200 0.63 216 | -2.3 0.530 4.34 2.81 3.15 0.225 1.01 217 | -2.3 0.530 4.34 2.81 3.15 0.250 1.43 218 | -2.3 0.530 4.34 2.81 3.15 0.275 2.05 219 | -2.3 0.530 4.34 2.81 3.15 0.300 2.73 220 | -2.3 0.530 4.34 2.81 3.15 0.325 3.87 221 | -2.3 0.530 4.34 2.81 3.15 0.350 7.19 222 | -2.3 0.530 4.34 2.81 3.15 0.375 13.96 223 | -2.3 0.530 4.34 2.81 3.15 0.400 25.18 224 | -2.3 0.530 4.34 2.81 3.15 0.425 41.34 225 | -2.3 0.530 4.34 2.81 3.15 0.450 62.42 226 | 0.0 0.600 4.78 4.24 3.15 0.125 0.03 227 | 0.0 0.600 4.78 4.24 3.15 0.150 0.18 228 | 0.0 0.600 4.78 4.24 3.15 0.175 0.40 229 | 0.0 0.600 4.78 4.24 3.15 0.200 0.73 230 | 0.0 0.600 4.78 4.24 3.15 0.225 1.30 231 | 0.0 0.600 4.78 4.24 3.15 0.250 2.16 232 | 0.0 0.600 4.78 4.24 3.15 0.275 3.35 233 | 0.0 0.600 4.78 4.24 3.15 0.300 5.06 234 | 0.0 0.600 4.78 4.24 3.15 0.325 7.14 235 | 0.0 0.600 4.78 4.24 3.15 0.350 10.36 236 | 0.0 0.600 4.78 4.24 3.15 0.375 15.25 237 | 0.0 0.600 4.78 4.24 3.15 0.400 23.15 238 | 0.0 0.600 4.78 4.24 3.15 0.425 34.62 239 | 0.0 0.600 4.78 4.24 3.15 0.450 51.50 240 | -5.0 0.600 4.78 4.24 3.15 0.125 0.06 241 | -5.0 0.600 4.78 4.24 3.15 0.150 0.15 242 | -5.0 0.600 4.78 4.24 3.15 0.175 0.34 243 | -5.0 0.600 4.78 4.24 3.15 0.200 0.63 244 | -5.0 0.600 4.78 4.24 3.15 0.225 1.13 245 | -5.0 0.600 4.78 4.24 3.15 0.250 1.85 246 | -5.0 0.600 4.78 4.24 3.15 0.275 2.84 247 | -5.0 0.600 4.78 4.24 3.15 0.300 4.34 248 | -5.0 0.600 4.78 4.24 3.15 0.325 6.20 249 | -5.0 0.600 4.78 4.24 3.15 0.350 8.62 250 | -5.0 0.600 4.78 4.24 3.15 0.375 12.49 251 | -5.0 0.600 4.78 4.24 3.15 0.400 20.41 252 | -5.0 0.600 4.78 4.24 3.15 0.425 32.46 253 | -5.0 0.600 4.78 4.24 3.15 0.450 50.94 254 | 0.0 0.530 4.78 3.75 3.15 0.125 0.16 255 | 0.0 0.530 4.78 3.75 3.15 0.150 0.32 256 | 0.0 0.530 4.78 3.75 3.15 0.175 0.59 257 | 0.0 0.530 4.78 3.75 3.15 0.200 0.92 258 | 0.0 0.530 4.78 3.75 3.15 0.225 1.37 259 | 0.0 0.530 4.78 3.75 3.15 0.250 1.94 260 | 0.0 0.530 4.78 3.75 3.15 0.275 2.62 261 | 0.0 0.530 4.78 3.75 3.15 0.300 3.70 262 | 0.0 0.530 4.78 3.75 3.15 0.325 5.45 263 | 0.0 0.530 4.78 3.75 3.15 0.350 9.45 264 | 0.0 0.530 4.78 3.75 3.15 0.375 16.31 265 | 0.0 0.530 4.78 3.75 3.15 0.400 27.34 266 | 0.0 0.530 4.78 3.75 3.15 0.425 41.77 267 | 0.0 0.530 4.78 3.75 3.15 0.450 60.85 268 | -5.0 0.530 4.78 3.75 3.15 0.125 0.09 269 | -5.0 0.530 4.78 3.75 3.15 0.150 0.24 270 | -5.0 0.530 4.78 3.75 3.15 0.175 0.47 271 | -5.0 0.530 4.78 3.75 3.15 0.200 0.78 272 | -5.0 0.530 4.78 3.75 3.15 0.225 1.21 273 | -5.0 0.530 4.78 3.75 3.15 0.250 1.85 274 | -5.0 0.530 4.78 3.75 3.15 0.275 2.62 275 | -5.0 0.530 4.78 3.75 3.15 0.300 3.69 276 | -5.0 0.530 4.78 3.75 3.15 0.325 5.07 277 | -5.0 0.530 4.78 3.75 3.15 0.350 7.95 278 | -5.0 0.530 4.78 3.75 3.15 0.375 13.73 279 | -5.0 0.530 4.78 3.75 3.15 0.400 23.55 280 | -5.0 0.530 4.78 3.75 3.15 0.425 37.14 281 | -5.0 0.530 4.78 3.75 3.15 0.450 55.87 282 | -2.3 0.600 5.10 4.17 3.51 0.125 0.01 283 | -2.3 0.600 5.10 4.17 3.51 0.150 0.16 284 | -2.3 0.600 5.10 4.17 3.51 0.175 0.39 285 | -2.3 0.600 5.10 4.17 3.51 0.200 0.73 286 | -2.3 0.600 5.10 4.17 3.51 0.225 1.24 287 | -2.3 0.600 5.10 4.17 3.51 0.250 1.96 288 | -2.3 0.600 5.10 4.17 3.51 0.275 3.04 289 | -2.3 0.600 5.10 4.17 3.51 0.300 4.46 290 | -2.3 0.600 5.10 4.17 3.51 0.325 6.31 291 | -2.3 0.600 5.10 4.17 3.51 0.350 8.68 292 | -2.3 0.600 5.10 4.17 3.51 0.375 12.39 293 | -2.3 0.600 5.10 4.17 3.51 0.400 20.14 294 | -2.3 0.600 5.10 4.17 3.51 0.425 31.77 295 | -2.3 0.600 5.10 4.17 3.51 0.450 47.13 296 | -2.3 0.600 4.34 4.23 2.73 0.125 0.04 297 | -2.3 0.600 4.34 4.23 2.73 0.150 0.17 298 | -2.3 0.600 4.34 4.23 2.73 0.175 0.36 299 | -2.3 0.600 4.34 4.23 2.73 0.200 0.64 300 | -2.3 0.600 4.34 4.23 2.73 0.225 1.02 301 | -2.3 0.600 4.34 4.23 2.73 0.250 1.62 302 | -2.3 0.600 4.34 4.23 2.73 0.275 2.63 303 | -2.3 0.600 4.34 4.23 2.73 0.300 4.15 304 | -2.3 0.600 4.34 4.23 2.73 0.325 6.00 305 | -2.3 0.600 4.34 4.23 2.73 0.350 8.47 306 | -2.3 0.600 4.34 4.23 2.73 0.375 12.27 307 | -2.3 0.600 4.34 4.23 2.73 0.400 19.59 308 | -2.3 0.600 4.34 4.23 2.73 0.425 30.48 309 | -2.3 0.600 4.34 4.23 2.73 0.450 46.66 310 | 311 | -------------------------------------------------------------------------------- /few/population.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Copyright 2016 William La Cava 4 | 5 | license: GNU/GPLv3 6 | 7 | """ 8 | import numpy as np 9 | import copy 10 | import pdb 11 | import uuid 12 | from mdr import MDR 13 | from collections import defaultdict 14 | import itertools as it 15 | 16 | eqn_dict = { 17 | '+': lambda n,stack_eqn,names: '(' + stack_eqn.pop() + '+' + stack_eqn.pop() + ')', 18 | '-': lambda n,stack_eqn,names: '(' + stack_eqn.pop() + '-' + stack_eqn.pop()+ ')', 19 | '*': lambda n,stack_eqn,names: '(' + stack_eqn.pop() + '*' + stack_eqn.pop()+ ')', 20 | '/': lambda n,stack_eqn,names: '(' + stack_eqn.pop() + '/' + stack_eqn.pop()+ ')', 21 | 'sin': lambda n,stack_eqn,names: 'sin(' + stack_eqn.pop() + ')', 22 | 'cos': lambda n,stack_eqn,names: 'cos(' + stack_eqn.pop() + ')', 23 | 'exp': lambda n,stack_eqn,names: 'exp(' + stack_eqn.pop() + ')', 24 | 'log': lambda n,stack_eqn,names: 'log(' + stack_eqn.pop() + ')', 25 | '^2': lambda n,stack_eqn,names: '(' + stack_eqn.pop() + '^2)', 26 | '^3': lambda n,stack_eqn,names: '(' + stack_eqn.pop() + '^3)', 27 | 'sqrt': lambda n,stack_eqn,names: 'sqrt(|' + stack_eqn.pop() + '|)', 28 | # 'rbf': lambda n,stack_eqn,names: 'exp(-||' + stack_eqn.pop()-stack_eqn.pop() '||^2/2)', 29 | 'x': lambda n,stack_eqn,names: names[n.loc], 30 | 'k': lambda n,stack_eqn,names: str(n.value), 31 | 'mdr2': lambda n,stack_eqn,names: 'MDR(' + stack_eqn.pop() + ',' + stack_eqn.pop() + ')', 32 | # bool operations 33 | '!': lambda n,stack_eqn,names: '(!' + stack_eqn.pop() + ')', 34 | '&': lambda n,stack_eqn,names: '(' + stack_eqn.pop() + '&' + stack_eqn.pop() + ')', 35 | '|': lambda n,stack_eqn,names: '(' + stack_eqn.pop() + '|' + stack_eqn.pop() + ')', 36 | '==': lambda n,stack_eqn,names: '(' + stack_eqn.pop() + '==' + stack_eqn.pop() + ')', 37 | '>_f': lambda n,stack_eqn,names: '(' + stack_eqn.pop() + '>' + stack_eqn.pop() + ')', 38 | '<_f': lambda n,stack_eqn,names: '(' + stack_eqn.pop() + '<' + stack_eqn.pop() + ')', 39 | '>=_f': lambda n,stack_eqn,names: '(' + stack_eqn.pop() + '>=' + stack_eqn.pop() + ')', 40 | '<=_f': lambda n,stack_eqn,names: '(' + stack_eqn.pop() + '<=' + stack_eqn.pop() + ')', 41 | '>_b': lambda n,stack_eqn,names: '(' + stack_eqn.pop() + '>' + stack_eqn.pop() + ')', 42 | '<_b': lambda n,stack_eqn,names: '(' + stack_eqn.pop() + '<' + stack_eqn.pop() + ')', 43 | '>=_b': lambda n,stack_eqn,names: '(' + stack_eqn.pop() + '>=' + stack_eqn.pop() + ')', 44 | '<=_b': lambda n,stack_eqn,names: '(' + stack_eqn.pop() + '<=' + stack_eqn.pop() + ')', 45 | 'xor_b': lambda n,stack_eqn,names: '(' + stack_eqn.pop() + ' XOR ' + stack_eqn.pop() + ')', 46 | 'xor_f': lambda n,stack_eqn,names: '(' + stack_eqn.pop() + ' XOR ' + stack_eqn.pop() + ')' 47 | } 48 | 49 | def run_MDR(n,stack_float,labels=None): 50 | """run utility function for MDR nodes.""" 51 | # need to check that tmp is categorical 52 | 53 | x1 = stack_float.pop() 54 | x2 = stack_float.pop() 55 | # check data is categorical 56 | if len(np.unique(x1))<=3 and len(np.unique(x2))<=3: 57 | tmp = np.vstack((x1,x2)).transpose() 58 | 59 | if labels is None: # prediction 60 | return n.model.transform(tmp)[:,0] 61 | else: # training 62 | out = n.model.fit_transform(tmp,labels)[:,0] 63 | 64 | return out 65 | else: 66 | return np.zeros(x1.shape[0]) 67 | 68 | class node(object): 69 | """node in program""" 70 | def __init__(self,name,loc=None,value=None,otype=None): 71 | """defines properties of a node given its name""" 72 | self.name = name 73 | self.arity = {None:0} 74 | self.arity['f'] = defaultdict(lambda: 0, { 75 | 'sin':1,'cos':1,'exp':1,'log':1,'^2':1,'^3':1, 76 | 'sqrt':1,'if':1, 77 | 'ife':2,'+':2,'-':2,'*':2,'/':2,'>_f':2,'<_f':2, 78 | '>=_f':2,'<=_f':2,'xor_f':2,'mdr2':2})[name] 79 | 80 | self.arity['b'] = defaultdict(lambda: 0, { 81 | '!':1,'if':1,'ife':1, 82 | '&':2,'|':2,'==':2,'>_b':2,'<_b':2,'>=_b':2, 83 | '<=_b':2,'xor_b':2})[name] 84 | self.in_type = { 85 | # float operations 86 | '+':'f', '-':'f', '*':'f', '/':'f', 'sin':'f', 'cos':'f', 'exp': 'f', 87 | 'log':'f', 'x':None, 'k':None, '^2':'f', '^3':'f', 'sqrt': 'f', 88 | # 'rbf': , 89 | # bool operations 90 | '!':'b', '&':'b', '|':'b', '==':'b', '>_f':'f', '<_f':'f', '>=_f':'f', 91 | '<=_f':'f', '>_b':'b', '<_b':'b', '>=_b':'b', '<=_b':'b','xor_b':'b', 92 | 'xor_f':'f', 93 | # mixed 94 | 'mdr2':'f','if':('f','b'),'ife':('f','b') 95 | }[name] 96 | if otype is None: 97 | self.out_type = { 98 | # float operations 99 | '+': 'f','-': 'f','*': 'f','/': 'f','sin': 'f','cos': 'f','exp': 'f', 100 | 'log': 'f','x':'f','k': 'f','^2': 'f','^3': 'f','sqrt': 'f', 101 | # 'rbf': , 102 | # bool operations 103 | '!': 'b', '&': 'b','|': 'b','==': 'b','>_f': 'b','<_f': 'b','>=_f': 'b', 104 | '<=_f': 'b','>_b': 'b','<_b': 'b','>=_b': 'b','<=_b': 'b','xor_f':'b', 105 | 'xor_b':'b', 106 | # mixed 107 | 'mdr2':'b','if':'f','ife':'f' 108 | }[name] 109 | else: 110 | self.out_type = otype 111 | 112 | if 'mdr' in self.name: 113 | self.model = MDR() 114 | self.evaluate = run_MDR 115 | 116 | self.loc = loc 117 | self.value = value 118 | 119 | 120 | class Ind(object): 121 | """class for features, represented as GP stacks.""" 122 | 123 | def __init__(self,fitness = -1.0,stack = None): 124 | """initializes empty individual with invalid fitness.""" 125 | self.fitness = fitness 126 | self.fitness_vec = [] 127 | self.fitness_bool = [] 128 | self.parentid = [] 129 | self.id = uuid.uuid4() 130 | if stack is None: 131 | self.stack = [] 132 | else: 133 | self.stack = copy.deepcopy(stack) 134 | 135 | class Pop(object): 136 | """class representing population""" 137 | def __init__(self,pop_size=100, fit = None): 138 | """initializes population of inds of size pop_size""" 139 | 140 | self.individuals = [] 141 | # initialize population programs 142 | for i in np.arange(pop_size): 143 | if fit is None: 144 | self.individuals.append(Ind()) 145 | else: 146 | self.individuals.append(Ind(fitness = fit)) 147 | 148 | class PopMixin(object): 149 | """methods for constructing features.""" 150 | ######################################################## printing equations 151 | def get_eqn_dict(self): 152 | return { 153 | '+': lambda n,stack_eqn: '(' + stack_eqn.pop() + '+' + stack_eqn.pop() + ')', 154 | '-': lambda n,stack_eqn: '(' + stack_eqn.pop() + '-' + stack_eqn.pop()+ ')', 155 | '*': lambda n,stack_eqn: '(' + stack_eqn.pop() + '*' + stack_eqn.pop()+ ')', 156 | '/': lambda n,stack_eqn: '(' + stack_eqn.pop() + '/' + stack_eqn.pop()+ ')', 157 | 'sin': lambda n,stack_eqn: 'sin(' + stack_eqn.pop() + ')', 158 | 'cos': lambda n,stack_eqn: 'cos(' + stack_eqn.pop() + ')', 159 | 'exp': lambda n,stack_eqn: 'exp(' + stack_eqn.pop() + ')', 160 | 'log': lambda n,stack_eqn: 'log(' + stack_eqn.pop() + ')', 161 | '^2': lambda n,stack_eqn: '(' + stack_eqn.pop() + '^2)', 162 | '^3': lambda n,stack_eqn: '(' + stack_eqn.pop() + '^3)', 163 | 'sqrt': lambda n,stack_eqn: 'sqrt(|' + stack_eqn.pop() + '|)', 164 | # 'rbf': lambda n,stack_eqn: 'exp(-||' + stack_eqn.pop()-stack_eqn.pop() '||^2/2)', 165 | 'x': lambda n,stack_eqn: self.names[n.loc], 166 | 'k': lambda n,stack_eqn: str(n.value), 167 | 'mdr2': lambda n,stack_eqn: 'MDR(' + stack_eqn.pop() + ',' + stack_eqn.pop() + ')', 168 | # bool operations 169 | '!': lambda n,stack_eqn: '(!' + stack_eqn.pop() + ')', 170 | '&': lambda n,stack_eqn: '(' + stack_eqn.pop() + '&' + stack_eqn.pop() + ')', 171 | '|': lambda n,stack_eqn: '(' + stack_eqn.pop() + '|' + stack_eqn.pop() + ')', 172 | '==': lambda n,stack_eqn: '(' + stack_eqn.pop() + '==' + stack_eqn.pop() + ')', 173 | '>_f': lambda n,stack_eqn: '(' + stack_eqn.pop() + '>' + stack_eqn.pop() + ')', 174 | '<_f': lambda n,stack_eqn: '(' + stack_eqn.pop() + '<' + stack_eqn.pop() + ')', 175 | '>=_f': lambda n,stack_eqn: '(' + stack_eqn.pop() + '>=' + stack_eqn.pop() + ')', 176 | '<=_f': lambda n,stack_eqn: '(' + stack_eqn.pop() + '<=' + stack_eqn.pop() + ')', 177 | '>_b': lambda n,stack_eqn: '(' + stack_eqn.pop() + '>' + stack_eqn.pop() + ')', 178 | '<_b': lambda n,stack_eqn: '(' + stack_eqn.pop() + '<' + stack_eqn.pop() + ')', 179 | '>=_b': lambda n,stack_eqn: '(' + stack_eqn.pop() + '>=' + stack_eqn.pop() + ')', 180 | '<=_b': lambda n,stack_eqn: '(' + stack_eqn.pop() + '<=' + stack_eqn.pop() + ')', 181 | 'xor_b': lambda n,stack_eqn: '(' + stack_eqn.pop() + ' XOR ' + stack_eqn.pop() + ')', 182 | 'xor_f': lambda n,stack_eqn: '(' + stack_eqn.pop() + ' XOR ' + stack_eqn.pop() + ')', 183 | 184 | } 185 | 186 | def eval_eqn(self,n,stack_eqn): 187 | if len(stack_eqn) >= n.arity['f']+n.arity['b']: 188 | stack_eqn.append(eqn_dict[n.name](n,stack_eqn,self.names)) 189 | 190 | def stack_2_eqn(self,p): 191 | """returns equation string for program stack""" 192 | stack_eqn = [] 193 | if p: # if stack is not empty 194 | for n in p.stack: 195 | self.eval_eqn(n,stack_eqn) 196 | return stack_eqn[-1] 197 | return [] 198 | 199 | def stacks_2_eqns(self,stacks): 200 | """returns equation strings from stacks""" 201 | if stacks: 202 | return list(map(lambda p: self.stack_2_eqn(p), stacks)) 203 | else: 204 | return [] 205 | 206 | ########################################################### making programs 207 | def make_program(self,stack,func_set,term_set,max_d,ntype): 208 | """makes a program stack""" 209 | # print("stack:",stack,"max d:",max_d) 210 | if max_d == 0: 211 | ts = [t for t in term_set if t.out_type==ntype] 212 | 213 | if not ts: 214 | raise ValueError('no ts. ntype:'+ntype+'. term_set out_types:'+ 215 | ','.join([t.out_type for t in term_set])) 216 | 217 | stack.append(ts[self.random_state.choice(len(ts))]) 218 | else: 219 | fs = [f for f in func_set if (f.out_type==ntype 220 | and (f.in_type=='f' or max_d>1))] 221 | if len(fs)==0: 222 | print('ntype:',ntype,'\nfunc_set:',[f.name for f in func_set]) 223 | stack.append(fs[self.random_state.choice(len(fs))]) 224 | tmp = copy.copy(stack[-1]) 225 | 226 | for i in np.arange(tmp.arity['f']): 227 | self.make_program(stack,func_set,term_set,max_d-1,'f') 228 | for i in np.arange(tmp.arity['b']): 229 | self.make_program(stack,func_set,term_set,max_d-1,'b') 230 | 231 | def init_pop(self): 232 | """initializes population of features as GP stacks.""" 233 | pop = Pop(self.population_size) 234 | seed_with_raw_features = False 235 | # make programs 236 | if self.seed_with_ml: 237 | # initial population is the components of the default ml model 238 | if (self.ml_type == 'SVC' or self.ml_type == 'SVR'): 239 | # this is needed because svm has a bug that throws valueerror 240 | #on attribute check 241 | seed_with_raw_features=True 242 | elif (hasattr(self.pipeline.named_steps['ml'],'coef_') or 243 | hasattr(self.pipeline.named_steps['ml'],'feature_importances_')): 244 | # add model components with non-zero coefficients to initial 245 | # population, in order of coefficient size 246 | coef = (self.pipeline.named_steps['ml'].coef_ if 247 | hasattr(self.pipeline.named_steps['ml'],'coef_') else 248 | self.pipeline.named_steps['ml'].feature_importances_) 249 | # compress multiple coefficients for each feature into single 250 | # numbers (occurs with multiclass classification) 251 | if len(coef.shape)>1: 252 | coef = [np.mean(abs(c)) for c in coef.transpose()] 253 | 254 | # remove zeros 255 | coef = [c for c in coef if c!=0] 256 | # sort feature locations based on importance/coefficient 257 | locs = np.arange(len(coef)) 258 | locs = locs[np.argsort(np.abs(coef))[::-1]] 259 | for i,p in enumerate(pop.individuals): 260 | if i < len(locs): 261 | p.stack = [node('x',loc=locs[i])] 262 | else: 263 | # make program if pop is bigger than n_features 264 | self.make_program(p.stack,self.func_set,self.term_set, 265 | self.random_state.randint(self.min_depth, 266 | self.max_depth+1), 267 | self.otype) 268 | p.stack = list(reversed(p.stack)) 269 | else: 270 | seed_with_raw_features = True 271 | # seed with random features if no importance info available 272 | if seed_with_raw_features: 273 | for i,p in enumerate(pop.individuals): 274 | if i < self.n_features: 275 | p.stack = [node('x', 276 | loc=self.random_state.randint(self.n_features))] 277 | else: 278 | # make program if pop is bigger than n_features 279 | self.make_program(p.stack,self.func_set,self.term_set, 280 | self.random_state.randint(self.min_depth, 281 | self.max_depth+1), 282 | self.otype) 283 | p.stack = list(reversed(p.stack)) 284 | 285 | # print initial population 286 | if self.verbosity > 2: 287 | print("seeded initial population:", 288 | self.stacks_2_eqns(pop.individuals)) 289 | 290 | else: # don't seed with ML 291 | for I in pop.individuals: 292 | depth = self.random_state.randint(self.min_depth,self.max_depth_init) 293 | self.make_program(I.stack,self.func_set,self.term_set,depth, 294 | self.otype) 295 | #print(I.stack) 296 | I.stack = list(reversed(I.stack)) 297 | 298 | return pop 299 | -------------------------------------------------------------------------------- /few/evaluation.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Copyright 2016 William La Cava 4 | 5 | license: GNU/GPLv3 6 | 7 | """ 8 | import numpy as np 9 | from sklearn.metrics import explained_variance_score, mean_absolute_error, mean_squared_error, median_absolute_error, r2_score 10 | import pdb 11 | from sklearn.metrics import silhouette_samples, silhouette_score, accuracy_score, roc_auc_score 12 | import itertools as it 13 | import sys 14 | from sklearn.metrics.pairwise import pairwise_distances 15 | # from profilehooks import profile 16 | from sklearn.externals.joblib import Parallel, delayed 17 | from sklearn.feature_selection import f_classif, f_regression 18 | # safe division 19 | def divs(x,y): 20 | """safe division""" 21 | tmp = np.ones(x.shape) 22 | nonzero_y = y != 0 23 | tmp[nonzero_y] = x[nonzero_y]/y[nonzero_y] 24 | return tmp 25 | 26 | # safe log 27 | def logs(x): 28 | """safe log""" 29 | tmp = np.ones(x.shape) 30 | nonzero_x = x != 0 31 | tmp[nonzero_x] = np.log(np.abs(x[nonzero_x])) 32 | return tmp 33 | 34 | # vectorized r2 score 35 | def r2_score_vec(y_true,y_pred): 36 | """ returns non-aggregate version of r2 score. 37 | 38 | based on r2_score() function from sklearn (http://sklearn.org) 39 | """ 40 | 41 | numerator = (y_true - y_pred) ** 2 42 | denominator = (y_true - np.average(y_true)) ** 2 43 | 44 | nonzero_denominator = denominator != 0 45 | nonzero_numerator = numerator != 0 46 | valid_score = nonzero_denominator & nonzero_numerator 47 | output_scores = np.ones([y_true.shape[0]]) 48 | output_scores[valid_score] = 1 - (numerator[valid_score] / 49 | denominator[valid_score]) 50 | # arbitrary set to zero to avoid -inf scores, having a constant 51 | # y_true is not interesting for scoring a regression anyway 52 | output_scores[nonzero_numerator & ~nonzero_denominator] = 0. 53 | 54 | return output_scores 55 | 56 | class EvaluationMixin(object): 57 | """methods for evaluation.""" 58 | 59 | #evaluation functions 60 | eval_dict = { 61 | # float operations 62 | '+': lambda n,features,stack_float,stack_bool,labels: stack_float.pop() + stack_float.pop(), 63 | '-': lambda n,features,stack_float,stack_bool,labels: stack_float.pop() - stack_float.pop(), 64 | '*': lambda n,features,stack_float,stack_bool,labels: stack_float.pop() * stack_float.pop(), 65 | '/': lambda n,features,stack_float,stack_bool,labels: divs(stack_float.pop(),stack_float.pop()), 66 | 'sin': lambda n,features,stack_float,stack_bool,labels: np.sin(stack_float.pop()), 67 | 'cos': lambda n,features,stack_float,stack_bool,labels: np.cos(stack_float.pop()), 68 | 'exp': lambda n,features,stack_float,stack_bool,labels: np.exp(stack_float.pop()), 69 | 'log': lambda n,features,stack_float,stack_bool,labels: logs(stack_float.pop()),#np.log(np.abs(stack_float.pop())), 70 | 'x': lambda n,features,stack_float,stack_bool,labels: features[:,n.loc], 71 | 'k': lambda n,features,stack_float,stack_bool,labels: np.ones(features.shape[0])*n.value, 72 | '^2': lambda n,features,stack_float,stack_bool,labels: stack_float.pop()**2, 73 | '^3': lambda n,features,stack_float,stack_bool,labels: stack_float.pop()**3, 74 | 'sqrt': lambda n,features,stack_float,stack_bool,labels: np.sqrt(np.abs(stack_float.pop())), 75 | #'gauss': lambda n,features,stack_float,stack_bool,labels: np.exp(-stack_float.pop()**2), 76 | #'gauss2': lambda n,features,stack_float,stack_bool,labels: np.exp(-(stack_float.pop()**2+stack_float.pop()**2)), 77 | # 'rbf': lambda n,features,stack_float,stack_bool,labels: np.exp(-(np.norm(stack_float.pop()-stack_float.pop())**2)/2) 78 | # bool operations 79 | '!': lambda n,features,stack_float,stack_bool,labels: np.logical_not(stack_bool.pop()), 80 | '&': lambda n,features,stack_float,stack_bool,labels: np.logical_and(stack_bool.pop(), stack_bool.pop()), 81 | '|': lambda n,features,stack_float,stack_bool,labels: np.logical_or(stack_bool.pop(), stack_bool.pop()), 82 | '==': lambda n,features,stack_float,stack_bool,labels: stack_bool.pop() == stack_bool.pop(), 83 | '>_f': lambda n,features,stack_float,stack_bool,labels: stack_float.pop() > stack_float.pop(), 84 | '<_f': lambda n,features,stack_float,stack_bool,labels: stack_float.pop() < stack_float.pop(), 85 | '>=_f': lambda n,features,stack_float,stack_bool,labels: stack_float.pop() >= stack_float.pop(), 86 | '<=_f': lambda n,features,stack_float,stack_bool,labels: stack_float.pop() <= stack_float.pop(), 87 | '>_b': lambda n,features,stack_float,stack_bool,labels: stack_bool.pop() > stack_bool.pop(), 88 | '<_b': lambda n,features,stack_float,stack_bool,labels: stack_bool.pop() < stack_bool.pop(), 89 | '>=_b': lambda n,features,stack_float,stack_bool,labels: stack_bool.pop() >= stack_bool.pop(), 90 | '<=_b': lambda n,features,stack_float,stack_bool,labels: stack_bool.pop() <= stack_bool.pop(), 91 | 'xor_b': lambda n,features,stack_float,stack_bool,labels: np.logical_xor(stack_bool.pop(),stack_bool.pop()), 92 | 'xor_f': lambda n,features,stack_float,stack_bool,labels: np.logical_xor(stack_float.pop().astype(bool), stack_float.pop().astype(bool)), 93 | # MDR 94 | 'mdr2': lambda n,features,stack_float,stack_bool,labels: n.evaluate(n,stack_float,labels), 95 | # control flow: 96 | # 'if': lambda n,features,stack_float,stack_bool,labels: stack_float.pop() if stack_bool.pop(), 97 | # 'ife': lambda n,features,stack_float,stack_bool,labels: stack_float.pop() if stack_bool.pop() else stack_float.pop(), 98 | } 99 | 100 | f = { # available fitness metrics 101 | 'mse': lambda y,yhat: mean_squared_error(y,yhat), 102 | 'mae': lambda y,yhat: mean_absolute_error(y,yhat), 103 | 'mdae': lambda y,yhat: median_absolute_error(y,yhat), 104 | 'r2': lambda y,yhat: 1-r2_score(y,yhat), 105 | 'vaf': lambda y,yhat: 1-explained_variance_score(y,yhat), 106 | 'silhouette': lambda y,yhat: 1 - silhouette_score(yhat.reshape(-1,1),y), 107 | 'inertia': lambda y,yhat: inertia(yhat,y), 108 | 'separation': lambda y,yhat: 1 - separation(yhat,y), 109 | 'fisher': lambda y,yhat: 1 - fisher(yhat,y), 110 | 'accuracy': lambda y,yhat: 1 - accuracy_score(yhat,y), 111 | 'random': lambda y,yhat: self.random_state.rand(), 112 | 'roc_auc': lambda y,yhat: 1 - roc_auc_score(y,yhat), 113 | 'pclass': lambda y,yhat: f_classif(yhat.reshape(-1,1),y)[1], 114 | 'preg': lambda y,yhat: f_regression(yhat.reshape(-1,1),y)[1], 115 | # 'relief': lambda y,yhat: 1-ReliefF(n_jobs=-1).fit(yhat.reshape(-1,1),y).feature_importances_ 116 | } 117 | # 118 | f_vec = {# non-aggregated fitness calculations 119 | 'mse': lambda y,yhat: (y - yhat) ** 2, #mean_squared_error(y,yhat,multioutput = 'raw_values'), 120 | 'mae': lambda y,yhat: np.abs(y-yhat), #mean_absolute_error(y,yhat,multioutput = 'raw_values'), 121 | # 'mdae_vec': lambda y,yhat: median_absolute_error(y,yhat,multioutput = 'raw_values'), 122 | 'r2': lambda y,yhat: 1-r2_score_vec(y,yhat), 123 | 'vaf': lambda y,yhat: 1-explained_variance_score(y,yhat,multioutput = 'raw_values'), 124 | 'silhouette': lambda y,yhat: 1 - silhouette_samples(yhat.reshape(-1,1),y), 125 | 'inertia': lambda y,yhat: inertia(yhat,y,samples=True), 126 | 'separation': lambda y,yhat: 1 - separation(yhat,y,samples=True), 127 | 'fisher': lambda y,yhat: 1 - fisher(yhat,y,samples=True), 128 | 'accuracy': lambda y,yhat: 1 - np.sum(yhat==y)/y.shape[0], # this looks wrong, CHECK 129 | 'random': lambda y,yhat: self.random_state.rand(len(y)), 130 | # 'relief': lambda y,yhat: 1-ReliefF(n_jobs=-1,sample_scores=True).fit(yhat.reshape(-1,1),y).feature_importances_ 131 | } 132 | 133 | # f_vec = {# non-aggregated fitness calculations 134 | # 'mse': (y - yhat) ** 2, #mean_squared_error(y,yhat,multioutput = 'raw_values'), 135 | # 'mae': np.abs(y-yhat), #mean_absolute_error(y,yhat,multioutput = 'raw_values'), 136 | # # 'mdae_vec': median_absolute_error(y,yhat,multioutput = 'raw_values'), 137 | # 'r2': 1-r2_score_vec(y,yhat), 138 | # 'vaf': 1-explained_variance_score(y,yhat,multioutput = 'raw_values'), 139 | # 'silhouette': 1 - silhouette_samples(yhat.reshape(-1,1),y), 140 | # 'inertia': inertia(yhat,y,samples=True), 141 | # 'separation': 1 - separation(yhat,y,samples=True), 142 | # 'fisher': 1 - fisher(yhat,y,samples=True), 143 | # 'accuracy': 1 - np.sum(yhat==y)/y.shape[0], 144 | # 'random': self.random_state.rand(len(y)), 145 | # # 'relief': 1-ReliefF(n_jobs=-1,sample_scores=True).fit(yhat.reshape(-1,1),y).feature_importances_ 146 | # } 147 | 148 | 149 | def proper(self,x): 150 | """cleans fitness vector""" 151 | x[x < 0] = self.max_fit 152 | x[np.isnan(x)] = self.max_fit 153 | x[np.isinf(x)] = self.max_fit 154 | return x 155 | def safe(self,x): 156 | """removes nans and infs from outputs.""" 157 | x[np.isinf(x)] = 1 158 | x[np.isnan(x)] = 1 159 | return x 160 | 161 | def evaluate(self,n, features, stack_float, stack_bool,labels=None): 162 | """evaluate node in program""" 163 | np.seterr(all='ignore') 164 | if len(stack_float) >= n.arity['f'] and len(stack_bool) >= n.arity['b']: 165 | if n.out_type == 'f': 166 | stack_float.append( 167 | self.safe(self.eval_dict[n.name](n,features,stack_float, 168 | stack_bool,labels))) 169 | if (np.isnan(stack_float[-1]).any() or 170 | np.isinf(stack_float[-1]).any()): 171 | print("problem operator:",n) 172 | else: 173 | stack_bool.append(self.safe(self.eval_dict[n.name](n,features, 174 | stack_float, 175 | stack_bool, 176 | labels))) 177 | if np.isnan(stack_bool[-1]).any() or np.isinf(stack_bool[-1]).any(): 178 | print("problem operator:",n) 179 | 180 | def all_finite(self,X): 181 | """returns true if X is finite, false, otherwise""" 182 | # Adapted from sklearn utils: _assert_all_finite(X) 183 | # First try an O(n) time, O(1) space solution for the common case that 184 | # everything is finite; fall back to O(n) space np.isfinite to prevent 185 | # false positives from overflow in sum method. 186 | # Note: this is basically here because sklearn tree.py uses float32 internally, 187 | # and float64's that are finite are not finite in float32. 188 | if (X.dtype.char in np.typecodes['AllFloat'] 189 | and not np.isfinite(np.asarray(X,dtype='float32').sum()) 190 | and not np.isfinite(np.asarray(X,dtype='float32')).all()): 191 | return False 192 | return True 193 | 194 | def out(self,I,features,labels=None,otype='f'): 195 | """computes the output for individual I""" 196 | stack_float = [] 197 | stack_bool = [] 198 | # print("stack:",I.stack) 199 | # evaulate stack over rows of features,labels 200 | # pdb.set_trace() 201 | for n in I.stack: 202 | self.evaluate(n,features,stack_float,stack_bool,labels) 203 | # print("stack_float:",stack_float) 204 | if otype=='f': 205 | return (stack_float[-1] if self.all_finite(stack_float[-1]) 206 | else np.zeros(len(features))) 207 | else: 208 | return (stack_bool[-1].astype(float) if self.all_finite(stack_bool[-1]) 209 | else np.zeros(len(features))) 210 | 211 | def calc_fitness(self,X,labels,fit_choice,sel): 212 | """computes fitness of individual output yhat. 213 | yhat: output of a program. 214 | labels: correct outputs 215 | fit_choice: choice of fitness function 216 | """ 217 | 218 | if 'lexicase' in sel: 219 | # return list(map(lambda yhat: self.f_vec[fit_choice](labels,yhat),X)) 220 | return np.asarray( 221 | [self.proper(self.f_vec[fit_choice](labels, 222 | yhat)) for yhat in X], 223 | order='F') 224 | # return list(Parallel(n_jobs=-1)(delayed(self.f_vec[fit_choice])(labels,yhat) for yhat in X)) 225 | else: 226 | # return list(map(lambda yhat: self.f[fit_choice](labels,yhat),X)) 227 | return np.asarray([self.f[fit_choice](labels,yhat) for yhat in X], 228 | order='F').reshape(-1) 229 | 230 | # return list(Parallel(n_jobs=-1)(delayed(self.f[fit_choice])(labels,yhat) for yhat in X)) 231 | 232 | def inertia(X,y,samples=False): 233 | """ return the within-class squared distance from the centroid""" 234 | # pdb.set_trace() 235 | if samples: 236 | # return within-class distance for each sample 237 | inertia = np.zeros(y.shape) 238 | for label in np.unique(y): 239 | inertia[y==label] = (X[y==label] - np.mean(X[y==label])) ** 2 240 | 241 | else: # return aggregate score 242 | inertia = 0 243 | for i,label in enumerate(np.unique(y)): 244 | inertia += np.sum((X[y==label] - np.mean(X[y==label])) ** 2)/len(y[y==label]) 245 | inertia = inertia/len(np.unique(y)) 246 | 247 | return inertia 248 | 249 | def separation(X,y,samples=False): 250 | """ return the sum of the between-class squared distance""" 251 | # pdb.set_trace() 252 | num_classes = len(np.unique(y)) 253 | total_dist = (X.max()-X.min())**2 254 | if samples: 255 | # return intra-class distance for each sample 256 | separation = np.zeros(y.shape) 257 | for label in np.unique(y): 258 | for outsider in np.unique(y[y!=label]): 259 | separation[y==label] += (X[y==label] - np.mean(X[y==outsider])) ** 2 260 | 261 | #normalize between 0 and 1 262 | print('separation:',separation) 263 | print('num_classes:',num_classes) 264 | print('total_dist:',total_dist) 265 | separation = separation#/separation.max() 266 | 267 | print('separation after normalization:',separation) 268 | 269 | else: 270 | # return aggregate score 271 | separation = 0 272 | for i,label in enumerate(np.unique(y)): 273 | for outsider in np.unique(y[y!=label]): 274 | separation += np.sum((X[y==label] - np.mean(X[y==outsider])) ** 2)/len(y[y==label]) 275 | separation = separation/len(np.unique(y)) 276 | 277 | return separation 278 | 279 | def pairwise(iterable): 280 | "s -> (s0,s1), (s1,s2), (s2, s3), ..." 281 | a, b = it.tee(iterable) 282 | next(b, None) 283 | return zip(a, b) 284 | 285 | 286 | def fisher(yhat,y,samples=False): 287 | """Fisher criterion""" 288 | classes = np.unique(y) 289 | mu = np.zeros(len(classes)) 290 | v = np.zeros(len(classes)) 291 | # pdb.set_trace() 292 | for c in classes.astype(int): 293 | mu[c] = np.mean(yhat[y==c]) 294 | v[c] = np.var(yhat[y==c]) 295 | 296 | if not samples: 297 | fisher = 0 298 | for c1,c2 in pairwise(classes.astype(int)): 299 | fisher += np.abs(mu[c1] - mu[c2])/np.sqrt(v[c1]+v[c2]) 300 | else: 301 | # lexicase version 302 | fisher = np.zeros(len(yhat)) 303 | # get closests classes to each class (min mu distance) 304 | mu_d = pairwise_distances(mu.reshape(-1,1)) 305 | min_mu=np.zeros(len(classes),dtype=int) 306 | for i in np.arange(len(min_mu)): 307 | min_mu[i] = np.argsort(mu_d[i])[1] 308 | # for c1, pairwise(classes.astype(int)): 309 | # min_mu[c1] = np.argmin() 310 | for i,l in enumerate(yhat.astype(int)): 311 | fisher[i] = np.abs(l - mu[min_mu[y[i]]])/np.sqrt(v[y[i]]+v[min_mu[y[i]]]) 312 | 313 | # pdb.set_trace() 314 | return fisher 315 | -------------------------------------------------------------------------------- /docs/data/d_enc.txt: -------------------------------------------------------------------------------- 1 | x0,x1,x2,x3,x4,x5,x6,x7,label 2 | 0.98,514.5,294.0,110.25,7.0,2,0.0,0,21.33 3 | 0.98,514.5,294.0,110.25,7.0,3,0.0,0,21.33 4 | 0.98,514.5,294.0,110.25,7.0,4,0.0,0,21.33 5 | 0.98,514.5,294.0,110.25,7.0,5,0.0,0,21.33 6 | 0.9,563.5,318.5,122.5,7.0,2,0.0,0,28.28 7 | 0.9,563.5,318.5,122.5,7.0,3,0.0,0,25.38 8 | 0.9,563.5,318.5,122.5,7.0,4,0.0,0,25.16 9 | 0.9,563.5,318.5,122.5,7.0,5,0.0,0,29.6 10 | 0.86,588.0,294.0,147.0,7.0,2,0.0,0,27.3 11 | 0.86,588.0,294.0,147.0,7.0,3,0.0,0,21.97 12 | 0.86,588.0,294.0,147.0,7.0,4,0.0,0,23.49 13 | 0.86,588.0,294.0,147.0,7.0,5,0.0,0,27.87 14 | 0.82,612.5,318.5,147.0,7.0,2,0.0,0,23.77 15 | 0.82,612.5,318.5,147.0,7.0,3,0.0,0,21.46 16 | 0.82,612.5,318.5,147.0,7.0,4,0.0,0,21.16 17 | 0.82,612.5,318.5,147.0,7.0,5,0.0,0,24.93 18 | 0.79,637.0,343.0,147.0,7.0,2,0.0,0,37.73 19 | 0.79,637.0,343.0,147.0,7.0,3,0.0,0,31.27 20 | 0.79,637.0,343.0,147.0,7.0,4,0.0,0,30.93 21 | 0.79,637.0,343.0,147.0,7.0,5,0.0,0,39.44 22 | 0.76,661.5,416.5,122.5,7.0,2,0.0,0,29.79 23 | 0.76,661.5,416.5,122.5,7.0,3,0.0,0,29.68 24 | 0.76,661.5,416.5,122.5,7.0,4,0.0,0,29.79 25 | 0.76,661.5,416.5,122.5,7.0,5,0.0,0,29.4 26 | 0.74,686.0,245.0,220.5,3.5,2,0.0,0,10.9 27 | 0.74,686.0,245.0,220.5,3.5,3,0.0,0,11.19 28 | 0.74,686.0,245.0,220.5,3.5,4,0.0,0,10.94 29 | 0.74,686.0,245.0,220.5,3.5,5,0.0,0,11.17 30 | 0.71,710.5,269.5,220.5,3.5,2,0.0,0,11.27 31 | 0.71,710.5,269.5,220.5,3.5,3,0.0,0,11.72 32 | 0.71,710.5,269.5,220.5,3.5,4,0.0,0,11.29 33 | 0.71,710.5,269.5,220.5,3.5,5,0.0,0,11.67 34 | 0.69,735.0,294.0,220.5,3.5,2,0.0,0,11.74 35 | 0.69,735.0,294.0,220.5,3.5,3,0.0,0,12.05 36 | 0.69,735.0,294.0,220.5,3.5,4,0.0,0,11.73 37 | 0.69,735.0,294.0,220.5,3.5,5,0.0,0,11.93 38 | 0.66,759.5,318.5,220.5,3.5,2,0.0,0,12.4 39 | 0.66,759.5,318.5,220.5,3.5,3,0.0,0,12.23 40 | 0.66,759.5,318.5,220.5,3.5,4,0.0,0,12.4 41 | 0.66,759.5,318.5,220.5,3.5,5,0.0,0,12.14 42 | 0.64,784.0,343.0,220.5,3.5,2,0.0,0,16.78 43 | 0.64,784.0,343.0,220.5,3.5,3,0.0,0,16.8 44 | 0.64,784.0,343.0,220.5,3.5,4,0.0,0,16.75 45 | 0.64,784.0,343.0,220.5,3.5,5,0.0,0,16.67 46 | 0.62,808.5,367.5,220.5,3.5,2,0.0,0,12.07 47 | 0.62,808.5,367.5,220.5,3.5,3,0.0,0,12.22 48 | 0.62,808.5,367.5,220.5,3.5,4,0.0,0,12.08 49 | 0.62,808.5,367.5,220.5,3.5,5,0.0,0,12.04 50 | 0.98,514.5,294.0,110.25,7.0,2,0.1,1,26.47 51 | 0.98,514.5,294.0,110.25,7.0,3,0.1,1,26.37 52 | 0.98,514.5,294.0,110.25,7.0,4,0.1,1,26.44 53 | 0.98,514.5,294.0,110.25,7.0,5,0.1,1,26.29 54 | 0.9,563.5,318.5,122.5,7.0,2,0.1,1,32.92 55 | 0.9,563.5,318.5,122.5,7.0,3,0.1,1,29.87 56 | 0.9,563.5,318.5,122.5,7.0,4,0.1,1,29.58 57 | 0.9,563.5,318.5,122.5,7.0,5,0.1,1,34.33 58 | 0.86,588.0,294.0,147.0,7.0,2,0.1,1,30.89 59 | 0.86,588.0,294.0,147.0,7.0,3,0.1,1,25.6 60 | 0.86,588.0,294.0,147.0,7.0,4,0.1,1,27.03 61 | 0.86,588.0,294.0,147.0,7.0,5,0.1,1,31.73 62 | 0.82,612.5,318.5,147.0,7.0,2,0.1,1,27.31 63 | 0.82,612.5,318.5,147.0,7.0,3,0.1,1,24.91 64 | 0.82,612.5,318.5,147.0,7.0,4,0.1,1,24.61 65 | 0.82,612.5,318.5,147.0,7.0,5,0.1,1,28.51 66 | 0.79,637.0,343.0,147.0,7.0,2,0.1,1,41.68 67 | 0.79,637.0,343.0,147.0,7.0,3,0.1,1,35.28 68 | 0.79,637.0,343.0,147.0,7.0,4,0.1,1,34.43 69 | 0.79,637.0,343.0,147.0,7.0,5,0.1,1,43.33 70 | 0.76,661.5,416.5,122.5,7.0,2,0.1,1,33.87 71 | 0.76,661.5,416.5,122.5,7.0,3,0.1,1,34.07 72 | 0.76,661.5,416.5,122.5,7.0,4,0.1,1,34.14 73 | 0.76,661.5,416.5,122.5,7.0,5,0.1,1,33.67 74 | 0.74,686.0,245.0,220.5,3.5,2,0.1,1,13.43 75 | 0.74,686.0,245.0,220.5,3.5,3,0.1,1,13.71 76 | 0.74,686.0,245.0,220.5,3.5,4,0.1,1,13.48 77 | 0.74,686.0,245.0,220.5,3.5,5,0.1,1,13.7 78 | 0.71,710.5,269.5,220.5,3.5,2,0.1,1,13.8 79 | 0.71,710.5,269.5,220.5,3.5,3,0.1,1,14.28 80 | 0.71,710.5,269.5,220.5,3.5,4,0.1,1,13.87 81 | 0.71,710.5,269.5,220.5,3.5,5,0.1,1,14.27 82 | 0.69,735.0,294.0,220.5,3.5,2,0.1,1,14.28 83 | 0.69,735.0,294.0,220.5,3.5,3,0.1,1,14.61 84 | 0.69,735.0,294.0,220.5,3.5,4,0.1,1,14.3 85 | 0.69,735.0,294.0,220.5,3.5,5,0.1,1,14.45 86 | 0.66,759.5,318.5,220.5,3.5,2,0.1,1,13.9 87 | 0.66,759.5,318.5,220.5,3.5,3,0.1,1,13.72 88 | 0.66,759.5,318.5,220.5,3.5,4,0.1,1,13.88 89 | 0.66,759.5,318.5,220.5,3.5,5,0.1,1,13.65 90 | 0.64,784.0,343.0,220.5,3.5,2,0.1,1,19.37 91 | 0.64,784.0,343.0,220.5,3.5,3,0.1,1,19.43 92 | 0.64,784.0,343.0,220.5,3.5,4,0.1,1,19.34 93 | 0.64,784.0,343.0,220.5,3.5,5,0.1,1,19.32 94 | 0.62,808.5,367.5,220.5,3.5,2,0.1,1,14.34 95 | 0.62,808.5,367.5,220.5,3.5,3,0.1,1,14.5 96 | 0.62,808.5,367.5,220.5,3.5,4,0.1,1,14.33 97 | 0.62,808.5,367.5,220.5,3.5,5,0.1,1,14.27 98 | 0.98,514.5,294.0,110.25,7.0,2,0.1,2,25.95 99 | 0.98,514.5,294.0,110.25,7.0,3,0.1,2,25.63 100 | 0.98,514.5,294.0,110.25,7.0,4,0.1,2,26.13 101 | 0.98,514.5,294.0,110.25,7.0,5,0.1,2,25.89 102 | 0.9,563.5,318.5,122.5,7.0,2,0.1,2,32.54 103 | 0.9,563.5,318.5,122.5,7.0,3,0.1,2,29.44 104 | 0.9,563.5,318.5,122.5,7.0,4,0.1,2,29.36 105 | 0.9,563.5,318.5,122.5,7.0,5,0.1,2,34.2 106 | 0.86,588.0,294.0,147.0,7.0,2,0.1,2,30.91 107 | 0.86,588.0,294.0,147.0,7.0,3,0.1,2,25.63 108 | 0.86,588.0,294.0,147.0,7.0,4,0.1,2,27.36 109 | 0.86,588.0,294.0,147.0,7.0,5,0.1,2,31.9 110 | 0.82,612.5,318.5,147.0,7.0,2,0.1,2,27.38 111 | 0.82,612.5,318.5,147.0,7.0,3,0.1,2,25.02 112 | 0.82,612.5,318.5,147.0,7.0,4,0.1,2,24.8 113 | 0.82,612.5,318.5,147.0,7.0,5,0.1,2,28.79 114 | 0.79,637.0,343.0,147.0,7.0,2,0.1,2,41.07 115 | 0.79,637.0,343.0,147.0,7.0,3,0.1,2,34.62 116 | 0.79,637.0,343.0,147.0,7.0,4,0.1,2,33.87 117 | 0.79,637.0,343.0,147.0,7.0,5,0.1,2,42.86 118 | 0.76,661.5,416.5,122.5,7.0,2,0.1,2,33.91 119 | 0.76,661.5,416.5,122.5,7.0,3,0.1,2,34.07 120 | 0.76,661.5,416.5,122.5,7.0,4,0.1,2,34.17 121 | 0.76,661.5,416.5,122.5,7.0,5,0.1,2,33.78 122 | 0.74,686.0,245.0,220.5,3.5,2,0.1,2,13.39 123 | 0.74,686.0,245.0,220.5,3.5,3,0.1,2,13.72 124 | 0.74,686.0,245.0,220.5,3.5,4,0.1,2,13.57 125 | 0.74,686.0,245.0,220.5,3.5,5,0.1,2,13.79 126 | 0.71,710.5,269.5,220.5,3.5,2,0.1,2,13.67 127 | 0.71,710.5,269.5,220.5,3.5,3,0.1,2,14.11 128 | 0.71,710.5,269.5,220.5,3.5,4,0.1,2,13.8 129 | 0.71,710.5,269.5,220.5,3.5,5,0.1,2,14.21 130 | 0.69,735.0,294.0,220.5,3.5,2,0.1,2,13.2 131 | 0.69,735.0,294.0,220.5,3.5,3,0.1,2,13.54 132 | 0.69,735.0,294.0,220.5,3.5,4,0.1,2,13.32 133 | 0.69,735.0,294.0,220.5,3.5,5,0.1,2,13.51 134 | 0.66,759.5,318.5,220.5,3.5,2,0.1,2,14.86 135 | 0.66,759.5,318.5,220.5,3.5,3,0.1,2,14.75 136 | 0.66,759.5,318.5,220.5,3.5,4,0.1,2,15.0 137 | 0.66,759.5,318.5,220.5,3.5,5,0.1,2,14.74 138 | 0.64,784.0,343.0,220.5,3.5,2,0.1,2,19.23 139 | 0.64,784.0,343.0,220.5,3.5,3,0.1,2,19.34 140 | 0.64,784.0,343.0,220.5,3.5,4,0.1,2,19.32 141 | 0.64,784.0,343.0,220.5,3.5,5,0.1,2,19.3 142 | 0.62,808.5,367.5,220.5,3.5,2,0.1,2,14.37 143 | 0.62,808.5,367.5,220.5,3.5,3,0.1,2,14.57 144 | 0.62,808.5,367.5,220.5,3.5,4,0.1,2,14.27 145 | 0.62,808.5,367.5,220.5,3.5,5,0.1,2,14.24 146 | 0.98,514.5,294.0,110.25,7.0,2,0.1,3,25.68 147 | 0.98,514.5,294.0,110.25,7.0,3,0.1,3,26.02 148 | 0.98,514.5,294.0,110.25,7.0,4,0.1,3,25.84 149 | 0.98,514.5,294.0,110.25,7.0,5,0.1,3,26.14 150 | 0.9,563.5,318.5,122.5,7.0,2,0.1,3,34.14 151 | 0.9,563.5,318.5,122.5,7.0,3,0.1,3,32.85 152 | 0.9,563.5,318.5,122.5,7.0,4,0.1,3,30.08 153 | 0.9,563.5,318.5,122.5,7.0,5,0.1,3,29.67 154 | 0.86,588.0,294.0,147.0,7.0,2,0.1,3,31.73 155 | 0.86,588.0,294.0,147.0,7.0,3,0.1,3,31.01 156 | 0.86,588.0,294.0,147.0,7.0,4,0.1,3,25.9 157 | 0.86,588.0,294.0,147.0,7.0,5,0.1,3,27.4 158 | 0.82,612.5,318.5,147.0,7.0,2,0.1,3,28.68 159 | 0.82,612.5,318.5,147.0,7.0,3,0.1,3,27.54 160 | 0.82,612.5,318.5,147.0,7.0,4,0.1,3,25.35 161 | 0.82,612.5,318.5,147.0,7.0,5,0.1,3,24.93 162 | 0.79,637.0,343.0,147.0,7.0,2,0.1,3,43.12 163 | 0.79,637.0,343.0,147.0,7.0,3,0.1,3,41.22 164 | 0.79,637.0,343.0,147.0,7.0,4,0.1,3,35.1 165 | 0.79,637.0,343.0,147.0,7.0,5,0.1,3,34.29 166 | 0.76,661.5,416.5,122.5,7.0,2,0.1,3,33.85 167 | 0.76,661.5,416.5,122.5,7.0,3,0.1,3,34.11 168 | 0.76,661.5,416.5,122.5,7.0,4,0.1,3,34.48 169 | 0.76,661.5,416.5,122.5,7.0,5,0.1,3,34.5 170 | 0.74,686.0,245.0,220.5,3.5,2,0.1,3,13.6 171 | 0.74,686.0,245.0,220.5,3.5,3,0.1,3,13.36 172 | 0.74,686.0,245.0,220.5,3.5,4,0.1,3,13.65 173 | 0.74,686.0,245.0,220.5,3.5,5,0.1,3,13.49 174 | 0.71,710.5,269.5,220.5,3.5,2,0.1,3,14.14 175 | 0.71,710.5,269.5,220.5,3.5,3,0.1,3,13.77 176 | 0.71,710.5,269.5,220.5,3.5,4,0.1,3,14.3 177 | 0.71,710.5,269.5,220.5,3.5,5,0.1,3,13.87 178 | 0.69,735.0,294.0,220.5,3.5,2,0.1,3,14.44 179 | 0.69,735.0,294.0,220.5,3.5,3,0.1,3,14.27 180 | 0.69,735.0,294.0,220.5,3.5,4,0.1,3,14.67 181 | 0.69,735.0,294.0,220.5,3.5,5,0.1,3,14.4 182 | 0.66,759.5,318.5,220.5,3.5,2,0.1,3,13.46 183 | 0.66,759.5,318.5,220.5,3.5,3,0.1,3,13.7 184 | 0.66,759.5,318.5,220.5,3.5,4,0.1,3,13.59 185 | 0.66,759.5,318.5,220.5,3.5,5,0.1,3,13.83 186 | 0.64,784.0,343.0,220.5,3.5,2,0.1,3,19.14 187 | 0.64,784.0,343.0,220.5,3.5,3,0.1,3,19.18 188 | 0.64,784.0,343.0,220.5,3.5,4,0.1,3,19.37 189 | 0.64,784.0,343.0,220.5,3.5,5,0.1,3,19.29 190 | 0.62,808.5,367.5,220.5,3.5,2,0.1,3,14.09 191 | 0.62,808.5,367.5,220.5,3.5,3,0.1,3,14.23 192 | 0.62,808.5,367.5,220.5,3.5,4,0.1,3,14.14 193 | 0.62,808.5,367.5,220.5,3.5,5,0.1,3,13.89 194 | 0.98,514.5,294.0,110.25,7.0,2,0.1,4,25.91 195 | 0.98,514.5,294.0,110.25,7.0,3,0.1,4,25.72 196 | 0.98,514.5,294.0,110.25,7.0,4,0.1,4,26.18 197 | 0.98,514.5,294.0,110.25,7.0,5,0.1,4,25.87 198 | 0.9,563.5,318.5,122.5,7.0,2,0.1,4,29.34 199 | 0.9,563.5,318.5,122.5,7.0,3,0.1,4,33.91 200 | 0.9,563.5,318.5,122.5,7.0,4,0.1,4,32.83 201 | 0.9,563.5,318.5,122.5,7.0,5,0.1,4,29.92 202 | 0.86,588.0,294.0,147.0,7.0,2,0.1,4,27.17 203 | 0.86,588.0,294.0,147.0,7.0,3,0.1,4,31.76 204 | 0.86,588.0,294.0,147.0,7.0,4,0.1,4,31.06 205 | 0.86,588.0,294.0,147.0,7.0,5,0.1,4,25.81 206 | 0.82,612.5,318.5,147.0,7.0,2,0.1,4,24.61 207 | 0.82,612.5,318.5,147.0,7.0,3,0.1,4,28.61 208 | 0.82,612.5,318.5,147.0,7.0,4,0.1,4,27.57 209 | 0.82,612.5,318.5,147.0,7.0,5,0.1,4,25.16 210 | 0.79,637.0,343.0,147.0,7.0,2,0.1,4,34.25 211 | 0.79,637.0,343.0,147.0,7.0,3,0.1,4,43.3 212 | 0.79,637.0,343.0,147.0,7.0,4,0.1,4,41.86 213 | 0.79,637.0,343.0,147.0,7.0,5,0.1,4,35.29 214 | 0.76,661.5,416.5,122.5,7.0,2,0.1,4,34.11 215 | 0.76,661.5,416.5,122.5,7.0,3,0.1,4,33.62 216 | 0.76,661.5,416.5,122.5,7.0,4,0.1,4,33.89 217 | 0.76,661.5,416.5,122.5,7.0,5,0.1,4,34.05 218 | 0.74,686.0,245.0,220.5,3.5,2,0.1,4,13.2 219 | 0.74,686.0,245.0,220.5,3.5,3,0.1,4,13.36 220 | 0.74,686.0,245.0,220.5,3.5,4,0.1,4,13.21 221 | 0.74,686.0,245.0,220.5,3.5,5,0.1,4,13.53 222 | 0.71,710.5,269.5,220.5,3.5,2,0.1,4,13.67 223 | 0.71,710.5,269.5,220.5,3.5,3,0.1,4,14.12 224 | 0.71,710.5,269.5,220.5,3.5,4,0.1,4,13.79 225 | 0.71,710.5,269.5,220.5,3.5,5,0.1,4,14.2 226 | 0.69,735.0,294.0,220.5,3.5,2,0.1,4,14.29 227 | 0.69,735.0,294.0,220.5,3.5,3,0.1,4,14.49 228 | 0.69,735.0,294.0,220.5,3.5,4,0.1,4,14.42 229 | 0.69,735.0,294.0,220.5,3.5,5,0.1,4,14.73 230 | 0.66,759.5,318.5,220.5,3.5,2,0.1,4,14.86 231 | 0.66,759.5,318.5,220.5,3.5,3,0.1,4,14.67 232 | 0.66,759.5,318.5,220.5,3.5,4,0.1,4,15.0 233 | 0.66,759.5,318.5,220.5,3.5,5,0.1,4,14.83 234 | 0.64,784.0,343.0,220.5,3.5,2,0.1,4,19.24 235 | 0.64,784.0,343.0,220.5,3.5,3,0.1,4,19.25 236 | 0.64,784.0,343.0,220.5,3.5,4,0.1,4,19.42 237 | 0.64,784.0,343.0,220.5,3.5,5,0.1,4,19.48 238 | 0.62,808.5,367.5,220.5,3.5,2,0.1,4,14.37 239 | 0.62,808.5,367.5,220.5,3.5,3,0.1,4,14.34 240 | 0.62,808.5,367.5,220.5,3.5,4,0.1,4,14.28 241 | 0.62,808.5,367.5,220.5,3.5,5,0.1,4,14.47 242 | 0.98,514.5,294.0,110.25,7.0,2,0.1,5,25.64 243 | 0.98,514.5,294.0,110.25,7.0,3,0.1,5,25.98 244 | 0.98,514.5,294.0,110.25,7.0,4,0.1,5,25.88 245 | 0.98,514.5,294.0,110.25,7.0,5,0.1,5,26.18 246 | 0.9,563.5,318.5,122.5,7.0,2,0.1,5,29.82 247 | 0.9,563.5,318.5,122.5,7.0,3,0.1,5,29.52 248 | 0.9,563.5,318.5,122.5,7.0,4,0.1,5,34.45 249 | 0.9,563.5,318.5,122.5,7.0,5,0.1,5,33.01 250 | 0.86,588.0,294.0,147.0,7.0,2,0.1,5,25.82 251 | 0.86,588.0,294.0,147.0,7.0,3,0.1,5,27.33 252 | 0.86,588.0,294.0,147.0,7.0,4,0.1,5,32.04 253 | 0.86,588.0,294.0,147.0,7.0,5,0.1,5,31.28 254 | 0.82,612.5,318.5,147.0,7.0,2,0.1,5,25.11 255 | 0.82,612.5,318.5,147.0,7.0,3,0.1,5,24.77 256 | 0.82,612.5,318.5,147.0,7.0,4,0.1,5,28.88 257 | 0.82,612.5,318.5,147.0,7.0,5,0.1,5,27.69 258 | 0.79,637.0,343.0,147.0,7.0,2,0.1,5,34.99 259 | 0.79,637.0,343.0,147.0,7.0,3,0.1,5,34.18 260 | 0.79,637.0,343.0,147.0,7.0,4,0.1,5,43.14 261 | 0.79,637.0,343.0,147.0,7.0,5,0.1,5,41.26 262 | 0.76,661.5,416.5,122.5,7.0,2,0.1,5,34.25 263 | 0.76,661.5,416.5,122.5,7.0,3,0.1,5,34.35 264 | 0.76,661.5,416.5,122.5,7.0,4,0.1,5,33.64 265 | 0.76,661.5,416.5,122.5,7.0,5,0.1,5,33.88 266 | 0.74,686.0,245.0,220.5,3.5,2,0.1,5,13.65 267 | 0.74,686.0,245.0,220.5,3.5,3,0.1,5,13.44 268 | 0.74,686.0,245.0,220.5,3.5,4,0.1,5,13.72 269 | 0.74,686.0,245.0,220.5,3.5,5,0.1,5,13.5 270 | 0.71,710.5,269.5,220.5,3.5,2,0.1,5,14.18 271 | 0.71,710.5,269.5,220.5,3.5,3,0.1,5,13.75 272 | 0.71,710.5,269.5,220.5,3.5,4,0.1,5,14.26 273 | 0.71,710.5,269.5,220.5,3.5,5,0.1,5,13.89 274 | 0.69,735.0,294.0,220.5,3.5,2,0.1,5,14.55 275 | 0.69,735.0,294.0,220.5,3.5,3,0.1,5,14.28 276 | 0.69,735.0,294.0,220.5,3.5,4,0.1,5,14.46 277 | 0.69,735.0,294.0,220.5,3.5,5,0.1,5,14.39 278 | 0.66,759.5,318.5,220.5,3.5,2,0.1,5,14.54 279 | 0.66,759.5,318.5,220.5,3.5,3,0.1,5,14.81 280 | 0.66,759.5,318.5,220.5,3.5,4,0.1,5,14.65 281 | 0.66,759.5,318.5,220.5,3.5,5,0.1,5,14.87 282 | 0.64,784.0,343.0,220.5,3.5,2,0.1,5,19.24 283 | 0.64,784.0,343.0,220.5,3.5,3,0.1,5,19.18 284 | 0.64,784.0,343.0,220.5,3.5,4,0.1,5,19.26 285 | 0.64,784.0,343.0,220.5,3.5,5,0.1,5,19.29 286 | 0.62,808.5,367.5,220.5,3.5,2,0.1,5,14.24 287 | 0.62,808.5,367.5,220.5,3.5,3,0.1,5,13.97 288 | 0.62,808.5,367.5,220.5,3.5,4,0.1,5,13.99 289 | 0.62,808.5,367.5,220.5,3.5,5,0.1,5,14.15 290 | 0.98,514.5,294.0,110.25,7.0,2,0.25,1,29.79 291 | 0.98,514.5,294.0,110.25,7.0,3,0.25,1,29.79 292 | 0.98,514.5,294.0,110.25,7.0,4,0.25,1,29.28 293 | 0.98,514.5,294.0,110.25,7.0,5,0.25,1,29.49 294 | 0.9,563.5,318.5,122.5,7.0,2,0.25,1,36.12 295 | 0.9,563.5,318.5,122.5,7.0,3,0.25,1,33.17 296 | 0.9,563.5,318.5,122.5,7.0,4,0.25,1,32.71 297 | 0.9,563.5,318.5,122.5,7.0,5,0.25,1,37.58 298 | 0.86,588.0,294.0,147.0,7.0,2,0.25,1,33.98 299 | 0.86,588.0,294.0,147.0,7.0,3,0.25,1,28.61 300 | 0.86,588.0,294.0,147.0,7.0,4,0.25,1,30.12 301 | 0.86,588.0,294.0,147.0,7.0,5,0.25,1,34.73 302 | 0.82,612.5,318.5,147.0,7.0,2,0.25,1,30.17 303 | 0.82,612.5,318.5,147.0,7.0,3,0.25,1,27.84 304 | 0.82,612.5,318.5,147.0,7.0,4,0.25,1,27.25 305 | 0.82,612.5,318.5,147.0,7.0,5,0.25,1,31.39 306 | 0.79,637.0,343.0,147.0,7.0,2,0.25,1,43.8 307 | 0.79,637.0,343.0,147.0,7.0,3,0.25,1,37.81 308 | 0.79,637.0,343.0,147.0,7.0,4,0.25,1,36.85 309 | 0.79,637.0,343.0,147.0,7.0,5,0.25,1,45.52 310 | 0.76,661.5,416.5,122.5,7.0,2,0.25,1,36.85 311 | 0.76,661.5,416.5,122.5,7.0,3,0.25,1,37.58 312 | 0.76,661.5,416.5,122.5,7.0,4,0.25,1,37.45 313 | 0.76,661.5,416.5,122.5,7.0,5,0.25,1,36.62 314 | 0.74,686.0,245.0,220.5,3.5,2,0.25,1,15.19 315 | 0.74,686.0,245.0,220.5,3.5,3,0.25,1,15.5 316 | 0.74,686.0,245.0,220.5,3.5,4,0.25,1,15.28 317 | 0.74,686.0,245.0,220.5,3.5,5,0.25,1,15.5 318 | 0.71,710.5,269.5,220.5,3.5,2,0.25,1,15.42 319 | 0.71,710.5,269.5,220.5,3.5,3,0.25,1,15.85 320 | 0.71,710.5,269.5,220.5,3.5,4,0.25,1,15.44 321 | 0.71,710.5,269.5,220.5,3.5,5,0.25,1,15.81 322 | 0.69,735.0,294.0,220.5,3.5,2,0.25,1,15.21 323 | 0.69,735.0,294.0,220.5,3.5,3,0.25,1,15.63 324 | 0.69,735.0,294.0,220.5,3.5,4,0.25,1,15.48 325 | 0.69,735.0,294.0,220.5,3.5,5,0.25,1,15.78 326 | 0.66,759.5,318.5,220.5,3.5,2,0.25,1,16.39 327 | 0.66,759.5,318.5,220.5,3.5,3,0.25,1,16.27 328 | 0.66,759.5,318.5,220.5,3.5,4,0.25,1,16.39 329 | 0.66,759.5,318.5,220.5,3.5,5,0.25,1,16.19 330 | 0.64,784.0,343.0,220.5,3.5,2,0.25,1,21.13 331 | 0.64,784.0,343.0,220.5,3.5,3,0.25,1,21.19 332 | 0.64,784.0,343.0,220.5,3.5,4,0.25,1,21.09 333 | 0.64,784.0,343.0,220.5,3.5,5,0.25,1,21.08 334 | 0.62,808.5,367.5,220.5,3.5,2,0.25,1,15.77 335 | 0.62,808.5,367.5,220.5,3.5,3,0.25,1,15.95 336 | 0.62,808.5,367.5,220.5,3.5,4,0.25,1,15.77 337 | 0.62,808.5,367.5,220.5,3.5,5,0.25,1,15.76 338 | 0.98,514.5,294.0,110.25,7.0,2,0.25,2,29.62 339 | 0.98,514.5,294.0,110.25,7.0,3,0.25,2,29.69 340 | 0.98,514.5,294.0,110.25,7.0,4,0.25,2,30.18 341 | 0.98,514.5,294.0,110.25,7.0,5,0.25,2,30.02 342 | 0.9,563.5,318.5,122.5,7.0,2,0.25,2,35.56 343 | 0.9,563.5,318.5,122.5,7.0,3,0.25,2,32.64 344 | 0.9,563.5,318.5,122.5,7.0,4,0.25,2,32.77 345 | 0.9,563.5,318.5,122.5,7.0,5,0.25,2,37.72 346 | 0.86,588.0,294.0,147.0,7.0,2,0.25,2,33.37 347 | 0.86,588.0,294.0,147.0,7.0,3,0.25,2,27.89 348 | 0.86,588.0,294.0,147.0,7.0,4,0.25,2,29.9 349 | 0.86,588.0,294.0,147.0,7.0,5,0.25,2,34.52 350 | 0.82,612.5,318.5,147.0,7.0,2,0.25,2,28.27 351 | 0.82,612.5,318.5,147.0,7.0,3,0.25,2,26.96 352 | 0.82,612.5,318.5,147.0,7.0,4,0.25,2,26.72 353 | 0.82,612.5,318.5,147.0,7.0,5,0.25,2,29.88 354 | 0.79,637.0,343.0,147.0,7.0,2,0.25,2,43.86 355 | 0.79,637.0,343.0,147.0,7.0,3,0.25,2,37.41 356 | 0.79,637.0,343.0,147.0,7.0,4,0.25,2,36.77 357 | 0.79,637.0,343.0,147.0,7.0,5,0.25,2,45.97 358 | 0.76,661.5,416.5,122.5,7.0,2,0.25,2,36.87 359 | 0.76,661.5,416.5,122.5,7.0,3,0.25,2,37.35 360 | 0.76,661.5,416.5,122.5,7.0,4,0.25,2,37.28 361 | 0.76,661.5,416.5,122.5,7.0,5,0.25,2,36.81 362 | 0.74,686.0,245.0,220.5,3.5,2,0.25,2,14.73 363 | 0.74,686.0,245.0,220.5,3.5,3,0.25,2,15.1 364 | 0.74,686.0,245.0,220.5,3.5,4,0.25,2,15.18 365 | 0.74,686.0,245.0,220.5,3.5,5,0.25,2,15.44 366 | 0.71,710.5,269.5,220.5,3.5,2,0.25,2,14.91 367 | 0.71,710.5,269.5,220.5,3.5,3,0.25,2,15.4 368 | 0.71,710.5,269.5,220.5,3.5,4,0.25,2,14.94 369 | 0.71,710.5,269.5,220.5,3.5,5,0.25,2,15.32 370 | 0.69,735.0,294.0,220.5,3.5,2,0.25,2,15.52 371 | 0.69,735.0,294.0,220.5,3.5,3,0.25,2,15.85 372 | 0.69,735.0,294.0,220.5,3.5,4,0.25,2,15.66 373 | 0.69,735.0,294.0,220.5,3.5,5,0.25,2,15.99 374 | 0.66,759.5,318.5,220.5,3.5,2,0.25,2,15.89 375 | 0.66,759.5,318.5,220.5,3.5,3,0.25,2,15.85 376 | 0.66,759.5,318.5,220.5,3.5,4,0.25,2,16.22 377 | 0.66,759.5,318.5,220.5,3.5,5,0.25,2,15.87 378 | 0.64,784.0,343.0,220.5,3.5,2,0.25,2,20.47 379 | 0.64,784.0,343.0,220.5,3.5,3,0.25,2,20.56 380 | 0.64,784.0,343.0,220.5,3.5,4,0.25,2,20.48 381 | 0.64,784.0,343.0,220.5,3.5,5,0.25,2,20.43 382 | 0.62,808.5,367.5,220.5,3.5,2,0.25,2,15.32 383 | 0.62,808.5,367.5,220.5,3.5,3,0.25,2,15.64 384 | 0.62,808.5,367.5,220.5,3.5,4,0.25,2,15.14 385 | 0.62,808.5,367.5,220.5,3.5,5,0.25,2,15.3 386 | 0.98,514.5,294.0,110.25,7.0,2,0.25,3,29.43 387 | 0.98,514.5,294.0,110.25,7.0,3,0.25,3,29.78 388 | 0.98,514.5,294.0,110.25,7.0,4,0.25,3,30.1 389 | 0.98,514.5,294.0,110.25,7.0,5,0.25,3,30.19 390 | 0.9,563.5,318.5,122.5,7.0,2,0.25,3,36.35 391 | 0.9,563.5,318.5,122.5,7.0,3,0.25,3,35.1 392 | 0.9,563.5,318.5,122.5,7.0,4,0.25,3,32.83 393 | 0.9,563.5,318.5,122.5,7.0,5,0.25,3,32.46 394 | 0.86,588.0,294.0,147.0,7.0,2,0.25,3,33.52 395 | 0.86,588.0,294.0,147.0,7.0,3,0.25,3,32.93 396 | 0.86,588.0,294.0,147.0,7.0,4,0.25,3,28.38 397 | 0.86,588.0,294.0,147.0,7.0,5,0.25,3,29.82 398 | 0.82,612.5,318.5,147.0,7.0,2,0.25,3,28.77 399 | 0.82,612.5,318.5,147.0,7.0,3,0.25,3,27.76 400 | 0.82,612.5,318.5,147.0,7.0,4,0.25,3,26.95 401 | 0.82,612.5,318.5,147.0,7.0,5,0.25,3,26.41 402 | 0.79,637.0,343.0,147.0,7.0,2,0.25,3,45.13 403 | 0.79,637.0,343.0,147.0,7.0,3,0.25,3,43.66 404 | 0.79,637.0,343.0,147.0,7.0,4,0.25,3,37.76 405 | 0.79,637.0,343.0,147.0,7.0,5,0.25,3,36.87 406 | 0.76,661.5,416.5,122.5,7.0,2,0.25,3,36.07 407 | 0.76,661.5,416.5,122.5,7.0,3,0.25,3,36.44 408 | 0.76,661.5,416.5,122.5,7.0,4,0.25,3,37.28 409 | 0.76,661.5,416.5,122.5,7.0,5,0.25,3,37.29 410 | 0.74,686.0,245.0,220.5,3.5,2,0.25,3,14.49 411 | 0.74,686.0,245.0,220.5,3.5,3,0.25,3,13.79 412 | 0.74,686.0,245.0,220.5,3.5,4,0.25,3,14.72 413 | 0.74,686.0,245.0,220.5,3.5,5,0.25,3,14.76 414 | 0.71,710.5,269.5,220.5,3.5,2,0.25,3,14.92 415 | 0.71,710.5,269.5,220.5,3.5,3,0.25,3,14.74 416 | 0.71,710.5,269.5,220.5,3.5,4,0.25,3,15.57 417 | 0.71,710.5,269.5,220.5,3.5,5,0.25,3,14.94 418 | 0.69,735.0,294.0,220.5,3.5,2,0.25,3,14.92 419 | 0.69,735.0,294.0,220.5,3.5,3,0.25,3,14.38 420 | 0.69,735.0,294.0,220.5,3.5,4,0.25,3,15.44 421 | 0.69,735.0,294.0,220.5,3.5,5,0.25,3,15.17 422 | 0.66,759.5,318.5,220.5,3.5,2,0.25,3,15.53 423 | 0.66,759.5,318.5,220.5,3.5,3,0.25,3,15.8 424 | 0.66,759.5,318.5,220.5,3.5,4,0.25,3,16.14 425 | 0.66,759.5,318.5,220.5,3.5,5,0.25,3,16.26 426 | 0.64,784.0,343.0,220.5,3.5,2,0.25,3,19.87 427 | 0.64,784.0,343.0,220.5,3.5,3,0.25,3,20.03 428 | 0.64,784.0,343.0,220.5,3.5,4,0.25,3,20.46 429 | 0.64,784.0,343.0,220.5,3.5,5,0.25,3,20.28 430 | 0.62,808.5,367.5,220.5,3.5,2,0.25,3,14.89 431 | 0.62,808.5,367.5,220.5,3.5,3,0.25,3,14.96 432 | 0.62,808.5,367.5,220.5,3.5,4,0.25,3,14.89 433 | 0.62,808.5,367.5,220.5,3.5,5,0.25,3,14.35 434 | 0.98,514.5,294.0,110.25,7.0,2,0.25,4,29.61 435 | 0.98,514.5,294.0,110.25,7.0,3,0.25,4,29.59 436 | 0.98,514.5,294.0,110.25,7.0,4,0.25,4,30.19 437 | 0.98,514.5,294.0,110.25,7.0,5,0.25,4,30.12 438 | 0.9,563.5,318.5,122.5,7.0,2,0.25,4,32.12 439 | 0.9,563.5,318.5,122.5,7.0,3,0.25,4,37.12 440 | 0.9,563.5,318.5,122.5,7.0,4,0.25,4,36.16 441 | 0.9,563.5,318.5,122.5,7.0,5,0.25,4,33.16 442 | 0.86,588.0,294.0,147.0,7.0,2,0.25,4,29.45 443 | 0.86,588.0,294.0,147.0,7.0,3,0.25,4,34.19 444 | 0.86,588.0,294.0,147.0,7.0,4,0.25,4,33.93 445 | 0.86,588.0,294.0,147.0,7.0,5,0.25,4,28.31 446 | 0.82,612.5,318.5,147.0,7.0,2,0.25,4,26.3 447 | 0.82,612.5,318.5,147.0,7.0,3,0.25,4,29.43 448 | 0.82,612.5,318.5,147.0,7.0,4,0.25,4,28.76 449 | 0.82,612.5,318.5,147.0,7.0,5,0.25,4,27.34 450 | 0.79,637.0,343.0,147.0,7.0,2,0.25,4,36.26 451 | 0.79,637.0,343.0,147.0,7.0,3,0.25,4,45.48 452 | 0.79,637.0,343.0,147.0,7.0,4,0.25,4,44.16 453 | 0.79,637.0,343.0,147.0,7.0,5,0.25,4,37.26 454 | 0.76,661.5,416.5,122.5,7.0,2,0.25,4,37.2 455 | 0.76,661.5,416.5,122.5,7.0,3,0.25,4,36.76 456 | 0.76,661.5,416.5,122.5,7.0,4,0.25,4,37.05 457 | 0.76,661.5,416.5,122.5,7.0,5,0.25,4,37.51 458 | 0.74,686.0,245.0,220.5,3.5,2,0.25,4,14.92 459 | 0.74,686.0,245.0,220.5,3.5,3,0.25,4,15.24 460 | 0.74,686.0,245.0,220.5,3.5,4,0.25,4,15.03 461 | 0.74,686.0,245.0,220.5,3.5,5,0.25,4,15.35 462 | 0.71,710.5,269.5,220.5,3.5,2,0.25,4,14.67 463 | 0.71,710.5,269.5,220.5,3.5,3,0.25,4,15.09 464 | 0.71,710.5,269.5,220.5,3.5,4,0.25,4,15.2 465 | 0.71,710.5,269.5,220.5,3.5,5,0.25,4,15.64 466 | 0.69,735.0,294.0,220.5,3.5,2,0.25,4,15.37 467 | 0.69,735.0,294.0,220.5,3.5,3,0.25,4,15.73 468 | 0.69,735.0,294.0,220.5,3.5,4,0.25,4,15.83 469 | 0.69,735.0,294.0,220.5,3.5,5,0.25,4,16.13 470 | 0.66,759.5,318.5,220.5,3.5,2,0.25,4,15.95 471 | 0.66,759.5,318.5,220.5,3.5,3,0.25,4,15.59 472 | 0.66,759.5,318.5,220.5,3.5,4,0.25,4,16.17 473 | 0.66,759.5,318.5,220.5,3.5,5,0.25,4,16.14 474 | 0.64,784.0,343.0,220.5,3.5,2,0.25,4,19.65 475 | 0.64,784.0,343.0,220.5,3.5,3,0.25,4,19.76 476 | 0.64,784.0,343.0,220.5,3.5,4,0.25,4,20.37 477 | 0.64,784.0,343.0,220.5,3.5,5,0.25,4,19.9 478 | 0.62,808.5,367.5,220.5,3.5,2,0.25,4,15.41 479 | 0.62,808.5,367.5,220.5,3.5,3,0.25,4,15.56 480 | 0.62,808.5,367.5,220.5,3.5,4,0.25,4,15.07 481 | 0.62,808.5,367.5,220.5,3.5,5,0.25,4,15.38 482 | 0.98,514.5,294.0,110.25,7.0,2,0.25,5,29.53 483 | 0.98,514.5,294.0,110.25,7.0,3,0.25,5,29.77 484 | 0.98,514.5,294.0,110.25,7.0,4,0.25,5,30.0 485 | 0.98,514.5,294.0,110.25,7.0,5,0.25,5,30.2 486 | 0.9,563.5,318.5,122.5,7.0,2,0.25,5,32.25 487 | 0.9,563.5,318.5,122.5,7.0,3,0.25,5,32.0 488 | 0.9,563.5,318.5,122.5,7.0,4,0.25,5,37.19 489 | 0.9,563.5,318.5,122.5,7.0,5,0.25,5,35.62 490 | 0.86,588.0,294.0,147.0,7.0,2,0.25,5,28.02 491 | 0.86,588.0,294.0,147.0,7.0,3,0.25,5,29.43 492 | 0.86,588.0,294.0,147.0,7.0,4,0.25,5,34.15 493 | 0.86,588.0,294.0,147.0,7.0,5,0.25,5,33.47 494 | 0.82,612.5,318.5,147.0,7.0,2,0.25,5,26.53 495 | 0.82,612.5,318.5,147.0,7.0,3,0.25,5,26.08 496 | 0.82,612.5,318.5,147.0,7.0,4,0.25,5,29.31 497 | 0.82,612.5,318.5,147.0,7.0,5,0.25,5,28.14 498 | 0.79,637.0,343.0,147.0,7.0,2,0.25,5,37.54 499 | 0.79,637.0,343.0,147.0,7.0,3,0.25,5,36.66 500 | 0.79,637.0,343.0,147.0,7.0,4,0.25,5,45.28 501 | 0.79,637.0,343.0,147.0,7.0,5,0.25,5,43.73 502 | 0.76,661.5,416.5,122.5,7.0,2,0.25,5,36.93 503 | 0.76,661.5,416.5,122.5,7.0,3,0.25,5,37.01 504 | 0.76,661.5,416.5,122.5,7.0,4,0.25,5,35.73 505 | 0.76,661.5,416.5,122.5,7.0,5,0.25,5,36.15 506 | 0.74,686.0,245.0,220.5,3.5,2,0.25,5,14.48 507 | 0.74,686.0,245.0,220.5,3.5,3,0.25,5,14.58 508 | 0.74,686.0,245.0,220.5,3.5,4,0.25,5,14.81 509 | 0.74,686.0,245.0,220.5,3.5,5,0.25,5,14.03 510 | 0.71,710.5,269.5,220.5,3.5,2,0.25,5,15.27 511 | 0.71,710.5,269.5,220.5,3.5,3,0.25,5,14.71 512 | 0.71,710.5,269.5,220.5,3.5,4,0.25,5,15.23 513 | 0.71,710.5,269.5,220.5,3.5,5,0.25,5,14.97 514 | 0.69,735.0,294.0,220.5,3.5,2,0.25,5,15.14 515 | 0.69,735.0,294.0,220.5,3.5,3,0.25,5,14.97 516 | 0.69,735.0,294.0,220.5,3.5,4,0.25,5,15.22 517 | 0.69,735.0,294.0,220.5,3.5,5,0.25,5,14.6 518 | 0.66,759.5,318.5,220.5,3.5,2,0.25,5,15.83 519 | 0.66,759.5,318.5,220.5,3.5,3,0.25,5,16.03 520 | 0.66,759.5,318.5,220.5,3.5,4,0.25,5,15.8 521 | 0.66,759.5,318.5,220.5,3.5,5,0.25,5,16.06 522 | 0.64,784.0,343.0,220.5,3.5,2,0.25,5,20.13 523 | 0.64,784.0,343.0,220.5,3.5,3,0.25,5,20.01 524 | 0.64,784.0,343.0,220.5,3.5,4,0.25,5,20.19 525 | 0.64,784.0,343.0,220.5,3.5,5,0.25,5,20.29 526 | 0.62,808.5,367.5,220.5,3.5,2,0.25,5,15.19 527 | 0.62,808.5,367.5,220.5,3.5,3,0.25,5,14.61 528 | 0.62,808.5,367.5,220.5,3.5,4,0.25,5,14.61 529 | 0.62,808.5,367.5,220.5,3.5,5,0.25,5,14.75 530 | 0.98,514.5,294.0,110.25,7.0,2,0.4,1,33.37 531 | 0.98,514.5,294.0,110.25,7.0,3,0.4,1,33.34 532 | 0.98,514.5,294.0,110.25,7.0,4,0.4,1,32.83 533 | 0.98,514.5,294.0,110.25,7.0,5,0.4,1,33.04 534 | 0.9,563.5,318.5,122.5,7.0,2,0.4,1,39.28 535 | 0.9,563.5,318.5,122.5,7.0,3,0.4,1,36.38 536 | 0.9,563.5,318.5,122.5,7.0,4,0.4,1,35.92 537 | 0.9,563.5,318.5,122.5,7.0,5,0.4,1,40.99 538 | 0.86,588.0,294.0,147.0,7.0,2,0.4,1,35.99 539 | 0.86,588.0,294.0,147.0,7.0,3,0.4,1,30.66 540 | 0.86,588.0,294.0,147.0,7.0,4,0.4,1,31.7 541 | 0.86,588.0,294.0,147.0,7.0,5,0.4,1,36.73 542 | 0.82,612.5,318.5,147.0,7.0,2,0.4,1,31.71 543 | 0.82,612.5,318.5,147.0,7.0,3,0.4,1,29.13 544 | 0.82,612.5,318.5,147.0,7.0,4,0.4,1,28.99 545 | 0.82,612.5,318.5,147.0,7.0,5,0.4,1,33.54 546 | 0.79,637.0,343.0,147.0,7.0,2,0.4,1,45.29 547 | 0.79,637.0,343.0,147.0,7.0,3,0.4,1,39.07 548 | 0.79,637.0,343.0,147.0,7.0,4,0.4,1,38.35 549 | 0.79,637.0,343.0,147.0,7.0,5,0.4,1,46.94 550 | 0.76,661.5,416.5,122.5,7.0,2,0.4,1,39.55 551 | 0.76,661.5,416.5,122.5,7.0,3,0.4,1,40.85 552 | 0.76,661.5,416.5,122.5,7.0,4,0.4,1,40.63 553 | 0.76,661.5,416.5,122.5,7.0,5,0.4,1,39.48 554 | 0.74,686.0,245.0,220.5,3.5,2,0.4,1,16.94 555 | 0.74,686.0,245.0,220.5,3.5,3,0.4,1,17.25 556 | 0.74,686.0,245.0,220.5,3.5,4,0.4,1,17.03 557 | 0.74,686.0,245.0,220.5,3.5,5,0.4,1,17.25 558 | 0.71,710.5,269.5,220.5,3.5,2,0.4,1,17.1 559 | 0.71,710.5,269.5,220.5,3.5,3,0.4,1,17.51 560 | 0.71,710.5,269.5,220.5,3.5,4,0.4,1,17.12 561 | 0.71,710.5,269.5,220.5,3.5,5,0.4,1,17.47 562 | 0.69,735.0,294.0,220.5,3.5,2,0.4,1,16.5 563 | 0.69,735.0,294.0,220.5,3.5,3,0.4,1,17.0 564 | 0.69,735.0,294.0,220.5,3.5,4,0.4,1,16.87 565 | 0.69,735.0,294.0,220.5,3.5,5,0.4,1,17.2 566 | 0.66,759.5,318.5,220.5,3.5,2,0.4,1,18.14 567 | 0.66,759.5,318.5,220.5,3.5,3,0.4,1,18.03 568 | 0.66,759.5,318.5,220.5,3.5,4,0.4,1,18.14 569 | 0.66,759.5,318.5,220.5,3.5,5,0.4,1,17.95 570 | 0.64,784.0,343.0,220.5,3.5,2,0.4,1,22.72 571 | 0.64,784.0,343.0,220.5,3.5,3,0.4,1,22.73 572 | 0.64,784.0,343.0,220.5,3.5,4,0.4,1,22.72 573 | 0.64,784.0,343.0,220.5,3.5,5,0.4,1,22.53 574 | 0.62,808.5,367.5,220.5,3.5,2,0.4,1,17.2 575 | 0.62,808.5,367.5,220.5,3.5,3,0.4,1,17.21 576 | 0.62,808.5,367.5,220.5,3.5,4,0.4,1,17.15 577 | 0.62,808.5,367.5,220.5,3.5,5,0.4,1,17.2 578 | 0.98,514.5,294.0,110.25,7.0,2,0.4,2,32.96 579 | 0.98,514.5,294.0,110.25,7.0,3,0.4,2,33.13 580 | 0.98,514.5,294.0,110.25,7.0,4,0.4,2,33.94 581 | 0.98,514.5,294.0,110.25,7.0,5,0.4,2,33.78 582 | 0.9,563.5,318.5,122.5,7.0,2,0.4,2,38.35 583 | 0.9,563.5,318.5,122.5,7.0,3,0.4,2,35.39 584 | 0.9,563.5,318.5,122.5,7.0,4,0.4,2,34.94 585 | 0.9,563.5,318.5,122.5,7.0,5,0.4,2,40.66 586 | 0.86,588.0,294.0,147.0,7.0,2,0.4,2,35.48 587 | 0.86,588.0,294.0,147.0,7.0,3,0.4,2,30.53 588 | 0.86,588.0,294.0,147.0,7.0,4,0.4,2,32.28 589 | 0.86,588.0,294.0,147.0,7.0,5,0.4,2,36.86 590 | 0.82,612.5,318.5,147.0,7.0,2,0.4,2,30.34 591 | 0.82,612.5,318.5,147.0,7.0,3,0.4,2,27.93 592 | 0.82,612.5,318.5,147.0,7.0,4,0.4,2,28.95 593 | 0.82,612.5,318.5,147.0,7.0,5,0.4,2,32.92 594 | 0.79,637.0,343.0,147.0,7.0,2,0.4,2,45.59 595 | 0.79,637.0,343.0,147.0,7.0,3,0.4,2,39.41 596 | 0.79,637.0,343.0,147.0,7.0,4,0.4,2,38.84 597 | 0.79,637.0,343.0,147.0,7.0,5,0.4,2,48.03 598 | 0.76,661.5,416.5,122.5,7.0,2,0.4,2,39.48 599 | 0.76,661.5,416.5,122.5,7.0,3,0.4,2,40.4 600 | 0.76,661.5,416.5,122.5,7.0,4,0.4,2,40.47 601 | 0.76,661.5,416.5,122.5,7.0,5,0.4,2,39.7 602 | 0.74,686.0,245.0,220.5,3.5,2,0.4,2,16.43 603 | 0.74,686.0,245.0,220.5,3.5,3,0.4,2,16.93 604 | 0.74,686.0,245.0,220.5,3.5,4,0.4,2,16.99 605 | 0.74,686.0,245.0,220.5,3.5,5,0.4,2,17.03 606 | 0.71,710.5,269.5,220.5,3.5,2,0.4,2,16.77 607 | 0.71,710.5,269.5,220.5,3.5,3,0.4,2,17.37 608 | 0.71,710.5,269.5,220.5,3.5,4,0.4,2,17.27 609 | 0.71,710.5,269.5,220.5,3.5,5,0.4,2,17.51 610 | 0.69,735.0,294.0,220.5,3.5,2,0.4,2,16.44 611 | 0.69,735.0,294.0,220.5,3.5,3,0.4,2,17.01 612 | 0.69,735.0,294.0,220.5,3.5,4,0.4,2,17.23 613 | 0.69,735.0,294.0,220.5,3.5,5,0.4,2,17.22 614 | 0.66,759.5,318.5,220.5,3.5,2,0.4,2,17.85 615 | 0.66,759.5,318.5,220.5,3.5,3,0.4,2,17.89 616 | 0.66,759.5,318.5,220.5,3.5,4,0.4,2,18.36 617 | 0.66,759.5,318.5,220.5,3.5,5,0.4,2,18.15 618 | 0.64,784.0,343.0,220.5,3.5,2,0.4,2,21.72 619 | 0.64,784.0,343.0,220.5,3.5,3,0.4,2,22.07 620 | 0.64,784.0,343.0,220.5,3.5,4,0.4,2,22.09 621 | 0.64,784.0,343.0,220.5,3.5,5,0.4,2,21.93 622 | 0.62,808.5,367.5,220.5,3.5,2,0.4,2,17.36 623 | 0.62,808.5,367.5,220.5,3.5,3,0.4,2,17.38 624 | 0.62,808.5,367.5,220.5,3.5,4,0.4,2,16.86 625 | 0.62,808.5,367.5,220.5,3.5,5,0.4,2,16.99 626 | 0.98,514.5,294.0,110.25,7.0,2,0.4,3,32.78 627 | 0.98,514.5,294.0,110.25,7.0,3,0.4,3,33.24 628 | 0.98,514.5,294.0,110.25,7.0,4,0.4,3,33.86 629 | 0.98,514.5,294.0,110.25,7.0,5,0.4,3,34.0 630 | 0.9,563.5,318.5,122.5,7.0,2,0.4,3,37.26 631 | 0.9,563.5,318.5,122.5,7.0,3,0.4,3,35.04 632 | 0.9,563.5,318.5,122.5,7.0,4,0.4,3,33.82 633 | 0.9,563.5,318.5,122.5,7.0,5,0.4,3,33.31 634 | 0.86,588.0,294.0,147.0,7.0,2,0.4,3,35.22 635 | 0.86,588.0,294.0,147.0,7.0,3,0.4,3,34.7 636 | 0.86,588.0,294.0,147.0,7.0,4,0.4,3,30.11 637 | 0.86,588.0,294.0,147.0,7.0,5,0.4,3,31.6 638 | 0.82,612.5,318.5,147.0,7.0,2,0.4,3,32.43 639 | 0.82,612.5,318.5,147.0,7.0,3,0.4,3,30.65 640 | 0.82,612.5,318.5,147.0,7.0,4,0.4,3,29.77 641 | 0.82,612.5,318.5,147.0,7.0,5,0.4,3,29.64 642 | 0.79,637.0,343.0,147.0,7.0,2,0.4,3,46.44 643 | 0.79,637.0,343.0,147.0,7.0,3,0.4,3,44.18 644 | 0.79,637.0,343.0,147.0,7.0,4,0.4,3,38.81 645 | 0.79,637.0,343.0,147.0,7.0,5,0.4,3,38.23 646 | 0.76,661.5,416.5,122.5,7.0,2,0.4,3,38.17 647 | 0.76,661.5,416.5,122.5,7.0,3,0.4,3,38.48 648 | 0.76,661.5,416.5,122.5,7.0,4,0.4,3,39.66 649 | 0.76,661.5,416.5,122.5,7.0,5,0.4,3,40.1 650 | 0.74,686.0,245.0,220.5,3.5,2,0.4,3,16.08 651 | 0.74,686.0,245.0,220.5,3.5,3,0.4,3,15.39 652 | 0.74,686.0,245.0,220.5,3.5,4,0.4,3,16.57 653 | 0.74,686.0,245.0,220.5,3.5,5,0.4,3,16.6 654 | 0.71,710.5,269.5,220.5,3.5,2,0.4,3,16.11 655 | 0.71,710.5,269.5,220.5,3.5,3,0.4,3,15.47 656 | 0.71,710.5,269.5,220.5,3.5,4,0.4,3,16.7 657 | 0.71,710.5,269.5,220.5,3.5,5,0.4,3,16.1 658 | 0.69,735.0,294.0,220.5,3.5,2,0.4,3,16.35 659 | 0.69,735.0,294.0,220.5,3.5,3,0.4,3,15.84 660 | 0.69,735.0,294.0,220.5,3.5,4,0.4,3,16.99 661 | 0.69,735.0,294.0,220.5,3.5,5,0.4,3,17.02 662 | 0.66,759.5,318.5,220.5,3.5,2,0.4,3,17.04 663 | 0.66,759.5,318.5,220.5,3.5,3,0.4,3,17.63 664 | 0.66,759.5,318.5,220.5,3.5,4,0.4,3,18.1 665 | 0.66,759.5,318.5,220.5,3.5,5,0.4,3,18.22 666 | 0.64,784.0,343.0,220.5,3.5,2,0.4,3,20.78 667 | 0.64,784.0,343.0,220.5,3.5,3,0.4,3,20.72 668 | 0.64,784.0,343.0,220.5,3.5,4,0.4,3,21.54 669 | 0.64,784.0,343.0,220.5,3.5,5,0.4,3,21.53 670 | 0.62,808.5,367.5,220.5,3.5,2,0.4,3,16.9 671 | 0.62,808.5,367.5,220.5,3.5,3,0.4,3,17.14 672 | 0.62,808.5,367.5,220.5,3.5,4,0.4,3,16.56 673 | 0.62,808.5,367.5,220.5,3.5,5,0.4,3,16.0 674 | 0.98,514.5,294.0,110.25,7.0,2,0.4,4,32.95 675 | 0.98,514.5,294.0,110.25,7.0,3,0.4,4,33.06 676 | 0.98,514.5,294.0,110.25,7.0,4,0.4,4,33.95 677 | 0.98,514.5,294.0,110.25,7.0,5,0.4,4,33.88 678 | 0.9,563.5,318.5,122.5,7.0,2,0.4,4,33.98 679 | 0.9,563.5,318.5,122.5,7.0,3,0.4,4,39.92 680 | 0.9,563.5,318.5,122.5,7.0,4,0.4,4,39.22 681 | 0.9,563.5,318.5,122.5,7.0,5,0.4,4,36.1 682 | 0.86,588.0,294.0,147.0,7.0,2,0.4,4,31.53 683 | 0.86,588.0,294.0,147.0,7.0,3,0.4,4,36.2 684 | 0.86,588.0,294.0,147.0,7.0,4,0.4,4,36.21 685 | 0.86,588.0,294.0,147.0,7.0,5,0.4,4,31.0 686 | 0.82,612.5,318.5,147.0,7.0,2,0.4,4,28.2 687 | 0.82,612.5,318.5,147.0,7.0,3,0.4,4,32.35 688 | 0.82,612.5,318.5,147.0,7.0,4,0.4,4,31.14 689 | 0.82,612.5,318.5,147.0,7.0,5,0.4,4,28.43 690 | 0.79,637.0,343.0,147.0,7.0,2,0.4,4,38.33 691 | 0.79,637.0,343.0,147.0,7.0,3,0.4,4,47.59 692 | 0.79,637.0,343.0,147.0,7.0,4,0.4,4,46.23 693 | 0.79,637.0,343.0,147.0,7.0,5,0.4,4,39.56 694 | 0.76,661.5,416.5,122.5,7.0,2,0.4,4,40.36 695 | 0.76,661.5,416.5,122.5,7.0,3,0.4,4,39.67 696 | 0.76,661.5,416.5,122.5,7.0,4,0.4,4,39.85 697 | 0.76,661.5,416.5,122.5,7.0,5,0.4,4,40.77 698 | 0.74,686.0,245.0,220.5,3.5,2,0.4,4,16.61 699 | 0.74,686.0,245.0,220.5,3.5,3,0.4,4,16.74 700 | 0.74,686.0,245.0,220.5,3.5,4,0.4,4,16.9 701 | 0.74,686.0,245.0,220.5,3.5,5,0.4,4,17.32 702 | 0.71,710.5,269.5,220.5,3.5,2,0.4,4,16.85 703 | 0.71,710.5,269.5,220.5,3.5,3,0.4,4,17.2 704 | 0.71,710.5,269.5,220.5,3.5,4,0.4,4,17.23 705 | 0.71,710.5,269.5,220.5,3.5,5,0.4,4,17.74 706 | 0.69,735.0,294.0,220.5,3.5,2,0.4,4,16.81 707 | 0.69,735.0,294.0,220.5,3.5,3,0.4,4,16.88 708 | 0.69,735.0,294.0,220.5,3.5,4,0.4,4,16.9 709 | 0.69,735.0,294.0,220.5,3.5,5,0.4,4,17.39 710 | 0.66,759.5,318.5,220.5,3.5,2,0.4,4,17.86 711 | 0.66,759.5,318.5,220.5,3.5,3,0.4,4,17.82 712 | 0.66,759.5,318.5,220.5,3.5,4,0.4,4,18.36 713 | 0.66,759.5,318.5,220.5,3.5,5,0.4,4,18.24 714 | 0.64,784.0,343.0,220.5,3.5,2,0.4,4,21.68 715 | 0.64,784.0,343.0,220.5,3.5,3,0.4,4,21.54 716 | 0.64,784.0,343.0,220.5,3.5,4,0.4,4,22.25 717 | 0.64,784.0,343.0,220.5,3.5,5,0.4,4,22.49 718 | 0.62,808.5,367.5,220.5,3.5,2,0.4,4,17.1 719 | 0.62,808.5,367.5,220.5,3.5,3,0.4,4,16.79 720 | 0.62,808.5,367.5,220.5,3.5,4,0.4,4,16.58 721 | 0.62,808.5,367.5,220.5,3.5,5,0.4,4,16.79 722 | 0.98,514.5,294.0,110.25,7.0,2,0.4,5,32.88 723 | 0.98,514.5,294.0,110.25,7.0,3,0.4,5,33.23 724 | 0.98,514.5,294.0,110.25,7.0,4,0.4,5,33.76 725 | 0.98,514.5,294.0,110.25,7.0,5,0.4,5,34.01 726 | 0.9,563.5,318.5,122.5,7.0,2,0.4,5,33.94 727 | 0.9,563.5,318.5,122.5,7.0,3,0.4,5,33.14 728 | 0.9,563.5,318.5,122.5,7.0,4,0.4,5,38.79 729 | 0.9,563.5,318.5,122.5,7.0,5,0.4,5,37.27 730 | 0.86,588.0,294.0,147.0,7.0,2,0.4,5,29.69 731 | 0.86,588.0,294.0,147.0,7.0,3,0.4,5,31.2 732 | 0.86,588.0,294.0,147.0,7.0,4,0.4,5,36.26 733 | 0.86,588.0,294.0,147.0,7.0,5,0.4,5,35.71 734 | 0.82,612.5,318.5,147.0,7.0,2,0.4,5,29.93 735 | 0.82,612.5,318.5,147.0,7.0,3,0.4,5,29.56 736 | 0.82,612.5,318.5,147.0,7.0,4,0.4,5,33.84 737 | 0.82,612.5,318.5,147.0,7.0,5,0.4,5,32.54 738 | 0.79,637.0,343.0,147.0,7.0,2,0.4,5,38.56 739 | 0.79,637.0,343.0,147.0,7.0,3,0.4,5,37.7 740 | 0.79,637.0,343.0,147.0,7.0,4,0.4,5,47.01 741 | 0.79,637.0,343.0,147.0,7.0,5,0.4,5,44.87 742 | 0.76,661.5,416.5,122.5,7.0,2,0.4,5,39.37 743 | 0.76,661.5,416.5,122.5,7.0,3,0.4,5,39.8 744 | 0.76,661.5,416.5,122.5,7.0,4,0.4,5,37.79 745 | 0.76,661.5,416.5,122.5,7.0,5,0.4,5,38.18 746 | 0.74,686.0,245.0,220.5,3.5,2,0.4,5,16.69 747 | 0.74,686.0,245.0,220.5,3.5,3,0.4,5,16.62 748 | 0.74,686.0,245.0,220.5,3.5,4,0.4,5,16.94 749 | 0.74,686.0,245.0,220.5,3.5,5,0.4,5,16.7 750 | 0.71,710.5,269.5,220.5,3.5,2,0.4,5,15.59 751 | 0.71,710.5,269.5,220.5,3.5,3,0.4,5,14.58 752 | 0.71,710.5,269.5,220.5,3.5,4,0.4,5,15.33 753 | 0.71,710.5,269.5,220.5,3.5,5,0.4,5,15.31 754 | 0.69,735.0,294.0,220.5,3.5,2,0.4,5,16.63 755 | 0.69,735.0,294.0,220.5,3.5,3,0.4,5,15.87 756 | 0.69,735.0,294.0,220.5,3.5,4,0.4,5,16.54 757 | 0.69,735.0,294.0,220.5,3.5,5,0.4,5,16.74 758 | 0.66,759.5,318.5,220.5,3.5,2,0.4,5,17.64 759 | 0.66,759.5,318.5,220.5,3.5,3,0.4,5,17.79 760 | 0.66,759.5,318.5,220.5,3.5,4,0.4,5,17.55 761 | 0.66,759.5,318.5,220.5,3.5,5,0.4,5,18.06 762 | 0.64,784.0,343.0,220.5,3.5,2,0.4,5,20.82 763 | 0.64,784.0,343.0,220.5,3.5,3,0.4,5,20.21 764 | 0.64,784.0,343.0,220.5,3.5,4,0.4,5,20.71 765 | 0.64,784.0,343.0,220.5,3.5,5,0.4,5,21.4 766 | 0.62,808.5,367.5,220.5,3.5,2,0.4,5,16.88 767 | 0.62,808.5,367.5,220.5,3.5,3,0.4,5,17.11 768 | 0.62,808.5,367.5,220.5,3.5,4,0.4,5,16.61 769 | 0.62,808.5,367.5,220.5,3.5,5,0.4,5,16.03 770 | -------------------------------------------------------------------------------- /docs/data/d_housing.txt: -------------------------------------------------------------------------------- 1 | x0 x1 x2 x3 x4 x5 x6 x7 x8 x9 x10 x11 x12 label 2 | 0.00632 18 2.31 0 0.538 6.575 65.2 4.09 1 296 15.3 396.9 4.98 24 3 | 0.02731 0 7.07 0 0.469 6.421 78.9 4.9671 2 242 17.8 396.9 9.14 21.6 4 | 0.02729 0 7.07 0 0.469 7.185 61.1 4.9671 2 242 17.8 392.83 4.03 34.7 5 | 0.03237 0 2.18 0 0.458 6.998 45.8 6.0622 3 222 18.7 394.63 2.94 33.4 6 | 0.06905 0 2.18 0 0.458 7.147 54.2 6.0622 3 222 18.7 396.9 5.33 36.2 7 | 0.02985 0 2.18 0 0.458 6.43 58.7 6.0622 3 222 18.7 394.12 5.21 28.7 8 | 0.08829 12.5 7.87 0 0.524 6.012 66.6 5.5605 5 311 15.2 395.6 12.43 22.9 9 | 0.14455 12.5 7.87 0 0.524 6.172 96.1 5.9505 5 311 15.2 396.9 19.15 27.1 10 | 0.21124 12.5 7.87 0 0.524 5.631 100 6.0821 5 311 15.2 386.63 29.93 16.5 11 | 0.17004 12.5 7.87 0 0.524 6.004 85.9 6.5921 5 311 15.2 386.71 17.1 18.9 12 | 0.22489 12.5 7.87 0 0.524 6.377 94.3 6.3467 5 311 15.2 392.52 20.45 15 13 | 0.11747 12.5 7.87 0 0.524 6.009 82.9 6.2267 5 311 15.2 396.9 13.27 18.9 14 | 0.09378 12.5 7.87 0 0.524 5.889 39 5.4509 5 311 15.2 390.5 15.71 21.7 15 | 0.62976 0 8.14 0 0.538 5.949 61.8 4.7075 4 307 21 396.9 8.26 20.4 16 | 0.63796 0 8.14 0 0.538 6.096 84.5 4.4619 4 307 21 380.02 10.26 18.2 17 | 0.62739 0 8.14 0 0.538 5.834 56.5 4.4986 4 307 21 395.62 8.47 19.9 18 | 1.05393 0 8.14 0 0.538 5.935 29.3 4.4986 4 307 21 386.85 6.58 23.1 19 | 0.7842 0 8.14 0 0.538 5.99 81.7 4.2579 4 307 21 386.75 14.67 17.5 20 | 0.80271 0 8.14 0 0.538 5.456 36.6 3.7965 4 307 21 288.99 11.69 20.2 21 | 0.7258 0 8.14 0 0.538 5.727 69.5 3.7965 4 307 21 390.95 11.28 18.2 22 | 1.25179 0 8.14 0 0.538 5.57 98.1 3.7979 4 307 21 376.57 21.02 13.6 23 | 0.85204 0 8.14 0 0.538 5.965 89.2 4.0123 4 307 21 392.53 13.83 19.6 24 | 1.23247 0 8.14 0 0.538 6.142 91.7 3.9769 4 307 21 396.9 18.72 15.2 25 | 0.98843 0 8.14 0 0.538 5.813 100 4.0952 4 307 21 394.54 19.88 14.5 26 | 0.75026 0 8.14 0 0.538 5.924 94.1 4.3996 4 307 21 394.33 16.3 15.6 27 | 0.84054 0 8.14 0 0.538 5.599 85.7 4.4546 4 307 21 303.42 16.51 13.9 28 | 0.67191 0 8.14 0 0.538 5.813 90.3 4.682 4 307 21 376.88 14.81 16.6 29 | 0.95577 0 8.14 0 0.538 6.047 88.8 4.4534 4 307 21 306.38 17.28 14.8 30 | 0.77299 0 8.14 0 0.538 6.495 94.4 4.4547 4 307 21 387.94 12.8 18.4 31 | 1.00245 0 8.14 0 0.538 6.674 87.3 4.239 4 307 21 380.23 11.98 21 32 | 1.13081 0 8.14 0 0.538 5.713 94.1 4.233 4 307 21 360.17 22.6 12.7 33 | 1.35472 0 8.14 0 0.538 6.072 100 4.175 4 307 21 376.73 13.04 14.5 34 | 1.38799 0 8.14 0 0.538 5.95 82 3.99 4 307 21 232.6 27.71 13.2 35 | 1.15172 0 8.14 0 0.538 5.701 95 3.7872 4 307 21 358.77 18.35 13.1 36 | 1.61282 0 8.14 0 0.538 6.096 96.9 3.7598 4 307 21 248.31 20.34 13.5 37 | 0.06417 0 5.96 0 0.499 5.933 68.2 3.3603 5 279 19.2 396.9 9.68 18.9 38 | 0.09744 0 5.96 0 0.499 5.841 61.4 3.3779 5 279 19.2 377.56 11.41 20 39 | 0.08014 0 5.96 0 0.499 5.85 41.5 3.9342 5 279 19.2 396.9 8.77 21 40 | 0.17505 0 5.96 0 0.499 5.966 30.2 3.8473 5 279 19.2 393.43 10.13 24.7 41 | 0.02763 75 2.95 0 0.428 6.595 21.8 5.4011 3 252 18.3 395.63 4.32 30.8 42 | 0.03359 75 2.95 0 0.428 7.024 15.8 5.4011 3 252 18.3 395.62 1.98 34.9 43 | 0.12744 0 6.91 0 0.448 6.77 2.9 5.7209 3 233 17.9 385.41 4.84 26.6 44 | 0.1415 0 6.91 0 0.448 6.169 6.6 5.7209 3 233 17.9 383.37 5.81 25.3 45 | 0.15936 0 6.91 0 0.448 6.211 6.5 5.7209 3 233 17.9 394.46 7.44 24.7 46 | 0.12269 0 6.91 0 0.448 6.069 40 5.7209 3 233 17.9 389.39 9.55 21.2 47 | 0.17142 0 6.91 0 0.448 5.682 33.8 5.1004 3 233 17.9 396.9 10.21 19.3 48 | 0.18836 0 6.91 0 0.448 5.786 33.3 5.1004 3 233 17.9 396.9 14.15 20 49 | 0.22927 0 6.91 0 0.448 6.03 85.5 5.6894 3 233 17.9 392.74 18.8 16.6 50 | 0.25387 0 6.91 0 0.448 5.399 95.3 5.87 3 233 17.9 396.9 30.81 14.4 51 | 0.21977 0 6.91 0 0.448 5.602 62 6.0877 3 233 17.9 396.9 16.2 19.4 52 | 0.08873 21 5.64 0 0.439 5.963 45.7 6.8147 4 243 16.8 395.56 13.45 19.7 53 | 0.04337 21 5.64 0 0.439 6.115 63 6.8147 4 243 16.8 393.97 9.43 20.5 54 | 0.0536 21 5.64 0 0.439 6.511 21.1 6.8147 4 243 16.8 396.9 5.28 25 55 | 0.04981 21 5.64 0 0.439 5.998 21.4 6.8147 4 243 16.8 396.9 8.43 23.4 56 | 0.0136 75 4 0 0.41 5.888 47.6 7.3197 3 469 21.1 396.9 14.8 18.9 57 | 0.01311 90 1.22 0 0.403 7.249 21.9 8.6966 5 226 17.9 395.93 4.81 35.4 58 | 0.02055 85 0.74 0 0.41 6.383 35.7 9.1876 2 313 17.3 396.9 5.77 24.7 59 | 0.01432 100 1.32 0 0.411 6.816 40.5 8.3248 5 256 15.1 392.9 3.95 31.6 60 | 0.15445 25 5.13 0 0.453 6.145 29.2 7.8148 8 284 19.7 390.68 6.86 23.3 61 | 0.10328 25 5.13 0 0.453 5.927 47.2 6.932 8 284 19.7 396.9 9.22 19.6 62 | 0.14932 25 5.13 0 0.453 5.741 66.2 7.2254 8 284 19.7 395.11 13.15 18.7 63 | 0.17171 25 5.13 0 0.453 5.966 93.4 6.8185 8 284 19.7 378.08 14.44 16 64 | 0.11027 25 5.13 0 0.453 6.456 67.8 7.2255 8 284 19.7 396.9 6.73 22.2 65 | 0.1265 25 5.13 0 0.453 6.762 43.4 7.9809 8 284 19.7 395.58 9.5 25 66 | 0.01951 17.5 1.38 0 0.4161 7.104 59.5 9.2229 3 216 18.6 393.24 8.05 33 67 | 0.03584 80 3.37 0 0.398 6.29 17.8 6.6115 4 337 16.1 396.9 4.67 23.5 68 | 0.04379 80 3.37 0 0.398 5.787 31.1 6.6115 4 337 16.1 396.9 10.24 19.4 69 | 0.05789 12.5 6.07 0 0.409 5.878 21.4 6.498 4 345 18.9 396.21 8.1 22 70 | 0.13554 12.5 6.07 0 0.409 5.594 36.8 6.498 4 345 18.9 396.9 13.09 17.4 71 | 0.12816 12.5 6.07 0 0.409 5.885 33 6.498 4 345 18.9 396.9 8.79 20.9 72 | 0.08826 0 10.81 0 0.413 6.417 6.6 5.2873 4 305 19.2 383.73 6.72 24.2 73 | 0.15876 0 10.81 0 0.413 5.961 17.5 5.2873 4 305 19.2 376.94 9.88 21.7 74 | 0.09164 0 10.81 0 0.413 6.065 7.8 5.2873 4 305 19.2 390.91 5.52 22.8 75 | 0.19539 0 10.81 0 0.413 6.245 6.2 5.2873 4 305 19.2 377.17 7.54 23.4 76 | 0.07896 0 12.83 0 0.437 6.273 6 4.2515 5 398 18.7 394.92 6.78 24.1 77 | 0.09512 0 12.83 0 0.437 6.286 45 4.5026 5 398 18.7 383.23 8.94 21.4 78 | 0.10153 0 12.83 0 0.437 6.279 74.5 4.0522 5 398 18.7 373.66 11.97 20 79 | 0.08707 0 12.83 0 0.437 6.14 45.8 4.0905 5 398 18.7 386.96 10.27 20.8 80 | 0.05646 0 12.83 0 0.437 6.232 53.7 5.0141 5 398 18.7 386.4 12.34 21.2 81 | 0.08387 0 12.83 0 0.437 5.874 36.6 4.5026 5 398 18.7 396.06 9.1 20.3 82 | 0.04113 25 4.86 0 0.426 6.727 33.5 5.4007 4 281 19 396.9 5.29 28 83 | 0.04462 25 4.86 0 0.426 6.619 70.4 5.4007 4 281 19 395.63 7.22 23.9 84 | 0.03659 25 4.86 0 0.426 6.302 32.2 5.4007 4 281 19 396.9 6.72 24.8 85 | 0.03551 25 4.86 0 0.426 6.167 46.7 5.4007 4 281 19 390.64 7.51 22.9 86 | 0.05059 0 4.49 0 0.449 6.389 48 4.7794 3 247 18.5 396.9 9.62 23.9 87 | 0.05735 0 4.49 0 0.449 6.63 56.1 4.4377 3 247 18.5 392.3 6.53 26.6 88 | 0.05188 0 4.49 0 0.449 6.015 45.1 4.4272 3 247 18.5 395.99 12.86 22.5 89 | 0.07151 0 4.49 0 0.449 6.121 56.8 3.7476 3 247 18.5 395.15 8.44 22.2 90 | 0.0566 0 3.41 0 0.489 7.007 86.3 3.4217 2 270 17.8 396.9 5.5 23.6 91 | 0.05302 0 3.41 0 0.489 7.079 63.1 3.4145 2 270 17.8 396.06 5.7 28.7 92 | 0.04684 0 3.41 0 0.489 6.417 66.1 3.0923 2 270 17.8 392.18 8.81 22.6 93 | 0.03932 0 3.41 0 0.489 6.405 73.9 3.0921 2 270 17.8 393.55 8.2 22 94 | 0.04203 28 15.04 0 0.464 6.442 53.6 3.6659 4 270 18.2 395.01 8.16 22.9 95 | 0.02875 28 15.04 0 0.464 6.211 28.9 3.6659 4 270 18.2 396.33 6.21 25 96 | 0.04294 28 15.04 0 0.464 6.249 77.3 3.615 4 270 18.2 396.9 10.59 20.6 97 | 0.12204 0 2.89 0 0.445 6.625 57.8 3.4952 2 276 18 357.98 6.65 28.4 98 | 0.11504 0 2.89 0 0.445 6.163 69.6 3.4952 2 276 18 391.83 11.34 21.4 99 | 0.12083 0 2.89 0 0.445 8.069 76 3.4952 2 276 18 396.9 4.21 38.7 100 | 0.08187 0 2.89 0 0.445 7.82 36.9 3.4952 2 276 18 393.53 3.57 43.8 101 | 0.0686 0 2.89 0 0.445 7.416 62.5 3.4952 2 276 18 396.9 6.19 33.2 102 | 0.14866 0 8.56 0 0.52 6.727 79.9 2.7778 5 384 20.9 394.76 9.42 27.5 103 | 0.11432 0 8.56 0 0.52 6.781 71.3 2.8561 5 384 20.9 395.58 7.67 26.5 104 | 0.22876 0 8.56 0 0.52 6.405 85.4 2.7147 5 384 20.9 70.8 10.63 18.6 105 | 0.21161 0 8.56 0 0.52 6.137 87.4 2.7147 5 384 20.9 394.47 13.44 19.3 106 | 0.1396 0 8.56 0 0.52 6.167 90 2.421 5 384 20.9 392.69 12.33 20.1 107 | 0.13262 0 8.56 0 0.52 5.851 96.7 2.1069 5 384 20.9 394.05 16.47 19.5 108 | 0.1712 0 8.56 0 0.52 5.836 91.9 2.211 5 384 20.9 395.67 18.66 19.5 109 | 0.13117 0 8.56 0 0.52 6.127 85.2 2.1224 5 384 20.9 387.69 14.09 20.4 110 | 0.12802 0 8.56 0 0.52 6.474 97.1 2.4329 5 384 20.9 395.24 12.27 19.8 111 | 0.26363 0 8.56 0 0.52 6.229 91.2 2.5451 5 384 20.9 391.23 15.55 19.4 112 | 0.10793 0 8.56 0 0.52 6.195 54.4 2.7778 5 384 20.9 393.49 13 21.7 113 | 0.10084 0 10.01 0 0.547 6.715 81.6 2.6775 6 432 17.8 395.59 10.16 22.8 114 | 0.12329 0 10.01 0 0.547 5.913 92.9 2.3534 6 432 17.8 394.95 16.21 18.8 115 | 0.22212 0 10.01 0 0.547 6.092 95.4 2.548 6 432 17.8 396.9 17.09 18.7 116 | 0.14231 0 10.01 0 0.547 6.254 84.2 2.2565 6 432 17.8 388.74 10.45 18.5 117 | 0.17134 0 10.01 0 0.547 5.928 88.2 2.4631 6 432 17.8 344.91 15.76 18.3 118 | 0.13158 0 10.01 0 0.547 6.176 72.5 2.7301 6 432 17.8 393.3 12.04 21.2 119 | 0.15098 0 10.01 0 0.547 6.021 82.6 2.7474 6 432 17.8 394.51 10.3 19.2 120 | 0.13058 0 10.01 0 0.547 5.872 73.1 2.4775 6 432 17.8 338.63 15.37 20.4 121 | 0.14476 0 10.01 0 0.547 5.731 65.2 2.7592 6 432 17.8 391.5 13.61 19.3 122 | 0.06899 0 25.65 0 0.581 5.87 69.7 2.2577 2 188 19.1 389.15 14.37 22 123 | 0.07165 0 25.65 0 0.581 6.004 84.1 2.1974 2 188 19.1 377.67 14.27 20.3 124 | 0.09299 0 25.65 0 0.581 5.961 92.9 2.0869 2 188 19.1 378.09 17.93 20.5 125 | 0.15038 0 25.65 0 0.581 5.856 97 1.9444 2 188 19.1 370.31 25.41 17.3 126 | 0.09849 0 25.65 0 0.581 5.879 95.8 2.0063 2 188 19.1 379.38 17.58 18.8 127 | 0.16902 0 25.65 0 0.581 5.986 88.4 1.9929 2 188 19.1 385.02 14.81 21.4 128 | 0.38735 0 25.65 0 0.581 5.613 95.6 1.7572 2 188 19.1 359.29 27.26 15.7 129 | 0.25915 0 21.89 0 0.624 5.693 96 1.7883 4 437 21.2 392.11 17.19 16.2 130 | 0.32543 0 21.89 0 0.624 6.431 98.8 1.8125 4 437 21.2 396.9 15.39 18 131 | 0.88125 0 21.89 0 0.624 5.637 94.7 1.9799 4 437 21.2 396.9 18.34 14.3 132 | 0.34006 0 21.89 0 0.624 6.458 98.9 2.1185 4 437 21.2 395.04 12.6 19.2 133 | 1.19294 0 21.89 0 0.624 6.326 97.7 2.271 4 437 21.2 396.9 12.26 19.6 134 | 0.59005 0 21.89 0 0.624 6.372 97.9 2.3274 4 437 21.2 385.76 11.12 23 135 | 0.32982 0 21.89 0 0.624 5.822 95.4 2.4699 4 437 21.2 388.69 15.03 18.4 136 | 0.97617 0 21.89 0 0.624 5.757 98.4 2.346 4 437 21.2 262.76 17.31 15.6 137 | 0.55778 0 21.89 0 0.624 6.335 98.2 2.1107 4 437 21.2 394.67 16.96 18.1 138 | 0.32264 0 21.89 0 0.624 5.942 93.5 1.9669 4 437 21.2 378.25 16.9 17.4 139 | 0.35233 0 21.89 0 0.624 6.454 98.4 1.8498 4 437 21.2 394.08 14.59 17.1 140 | 0.2498 0 21.89 0 0.624 5.857 98.2 1.6686 4 437 21.2 392.04 21.32 13.3 141 | 0.54452 0 21.89 0 0.624 6.151 97.9 1.6687 4 437 21.2 396.9 18.46 17.8 142 | 0.2909 0 21.89 0 0.624 6.174 93.6 1.6119 4 437 21.2 388.08 24.16 14 143 | 1.62864 0 21.89 0 0.624 5.019 100 1.4394 4 437 21.2 396.9 34.41 14.4 144 | 3.32105 0 19.58 1 0.871 5.403 100 1.3216 5 403 14.7 396.9 26.82 13.4 145 | 4.0974 0 19.58 0 0.871 5.468 100 1.4118 5 403 14.7 396.9 26.42 15.6 146 | 2.77974 0 19.58 0 0.871 4.903 97.8 1.3459 5 403 14.7 396.9 29.29 11.8 147 | 2.37934 0 19.58 0 0.871 6.13 100 1.4191 5 403 14.7 172.91 27.8 13.8 148 | 2.15505 0 19.58 0 0.871 5.628 100 1.5166 5 403 14.7 169.27 16.65 15.6 149 | 2.36862 0 19.58 0 0.871 4.926 95.7 1.4608 5 403 14.7 391.71 29.53 14.6 150 | 2.33099 0 19.58 0 0.871 5.186 93.8 1.5296 5 403 14.7 356.99 28.32 17.8 151 | 2.73397 0 19.58 0 0.871 5.597 94.9 1.5257 5 403 14.7 351.85 21.45 15.4 152 | 1.6566 0 19.58 0 0.871 6.122 97.3 1.618 5 403 14.7 372.8 14.1 21.5 153 | 1.49632 0 19.58 0 0.871 5.404 100 1.5916 5 403 14.7 341.6 13.28 19.6 154 | 1.12658 0 19.58 1 0.871 5.012 88 1.6102 5 403 14.7 343.28 12.12 15.3 155 | 2.14918 0 19.58 0 0.871 5.709 98.5 1.6232 5 403 14.7 261.95 15.79 19.4 156 | 1.41385 0 19.58 1 0.871 6.129 96 1.7494 5 403 14.7 321.02 15.12 17 157 | 3.53501 0 19.58 1 0.871 6.152 82.6 1.7455 5 403 14.7 88.01 15.02 15.6 158 | 2.44668 0 19.58 0 0.871 5.272 94 1.7364 5 403 14.7 88.63 16.14 13.1 159 | 1.22358 0 19.58 0 0.605 6.943 97.4 1.8773 5 403 14.7 363.43 4.59 41.3 160 | 1.34284 0 19.58 0 0.605 6.066 100 1.7573 5 403 14.7 353.89 6.43 24.3 161 | 1.42502 0 19.58 0 0.871 6.51 100 1.7659 5 403 14.7 364.31 7.39 23.3 162 | 1.27346 0 19.58 1 0.605 6.25 92.6 1.7984 5 403 14.7 338.92 5.5 27 163 | 1.46336 0 19.58 0 0.605 7.489 90.8 1.9709 5 403 14.7 374.43 1.73 50 164 | 1.83377 0 19.58 1 0.605 7.802 98.2 2.0407 5 403 14.7 389.61 1.92 50 165 | 1.51902 0 19.58 1 0.605 8.375 93.9 2.162 5 403 14.7 388.45 3.32 50 166 | 2.24236 0 19.58 0 0.605 5.854 91.8 2.422 5 403 14.7 395.11 11.64 22.7 167 | 2.924 0 19.58 0 0.605 6.101 93 2.2834 5 403 14.7 240.16 9.81 25 168 | 2.01019 0 19.58 0 0.605 7.929 96.2 2.0459 5 403 14.7 369.3 3.7 50 169 | 1.80028 0 19.58 0 0.605 5.877 79.2 2.4259 5 403 14.7 227.61 12.14 23.8 170 | 2.3004 0 19.58 0 0.605 6.319 96.1 2.1 5 403 14.7 297.09 11.1 23.8 171 | 2.44953 0 19.58 0 0.605 6.402 95.2 2.2625 5 403 14.7 330.04 11.32 22.3 172 | 1.20742 0 19.58 0 0.605 5.875 94.6 2.4259 5 403 14.7 292.29 14.43 17.4 173 | 2.3139 0 19.58 0 0.605 5.88 97.3 2.3887 5 403 14.7 348.13 12.03 19.1 174 | 0.13914 0 4.05 0 0.51 5.572 88.5 2.5961 5 296 16.6 396.9 14.69 23.1 175 | 0.09178 0 4.05 0 0.51 6.416 84.1 2.6463 5 296 16.6 395.5 9.04 23.6 176 | 0.08447 0 4.05 0 0.51 5.859 68.7 2.7019 5 296 16.6 393.23 9.64 22.6 177 | 0.06664 0 4.05 0 0.51 6.546 33.1 3.1323 5 296 16.6 390.96 5.33 29.4 178 | 0.07022 0 4.05 0 0.51 6.02 47.2 3.5549 5 296 16.6 393.23 10.11 23.2 179 | 0.05425 0 4.05 0 0.51 6.315 73.4 3.3175 5 296 16.6 395.6 6.29 24.6 180 | 0.06642 0 4.05 0 0.51 6.86 74.4 2.9153 5 296 16.6 391.27 6.92 29.9 181 | 0.0578 0 2.46 0 0.488 6.98 58.4 2.829 3 193 17.8 396.9 5.04 37.2 182 | 0.06588 0 2.46 0 0.488 7.765 83.3 2.741 3 193 17.8 395.56 7.56 39.8 183 | 0.06888 0 2.46 0 0.488 6.144 62.2 2.5979 3 193 17.8 396.9 9.45 36.2 184 | 0.09103 0 2.46 0 0.488 7.155 92.2 2.7006 3 193 17.8 394.12 4.82 37.9 185 | 0.10008 0 2.46 0 0.488 6.563 95.6 2.847 3 193 17.8 396.9 5.68 32.5 186 | 0.08308 0 2.46 0 0.488 5.604 89.8 2.9879 3 193 17.8 391 13.98 26.4 187 | 0.06047 0 2.46 0 0.488 6.153 68.8 3.2797 3 193 17.8 387.11 13.15 29.6 188 | 0.05602 0 2.46 0 0.488 7.831 53.6 3.1992 3 193 17.8 392.63 4.45 50 189 | 0.07875 45 3.44 0 0.437 6.782 41.1 3.7886 5 398 15.2 393.87 6.68 32 190 | 0.12579 45 3.44 0 0.437 6.556 29.1 4.5667 5 398 15.2 382.84 4.56 29.8 191 | 0.0837 45 3.44 0 0.437 7.185 38.9 4.5667 5 398 15.2 396.9 5.39 34.9 192 | 0.09068 45 3.44 0 0.437 6.951 21.5 6.4798 5 398 15.2 377.68 5.1 37 193 | 0.06911 45 3.44 0 0.437 6.739 30.8 6.4798 5 398 15.2 389.71 4.69 30.5 194 | 0.08664 45 3.44 0 0.437 7.178 26.3 6.4798 5 398 15.2 390.49 2.87 36.4 195 | 0.02187 60 2.93 0 0.401 6.8 9.9 6.2196 1 265 15.6 393.37 5.03 31.1 196 | 0.01439 60 2.93 0 0.401 6.604 18.8 6.2196 1 265 15.6 376.7 4.38 29.1 197 | 0.01381 80 0.46 0 0.422 7.875 32 5.6484 4 255 14.4 394.23 2.97 50 198 | 0.04011 80 1.52 0 0.404 7.287 34.1 7.309 2 329 12.6 396.9 4.08 33.3 199 | 0.04666 80 1.52 0 0.404 7.107 36.6 7.309 2 329 12.6 354.31 8.61 30.3 200 | 0.03768 80 1.52 0 0.404 7.274 38.3 7.309 2 329 12.6 392.2 6.62 34.6 201 | 0.0315 95 1.47 0 0.403 6.975 15.3 7.6534 3 402 17 396.9 4.56 34.9 202 | 0.01778 95 1.47 0 0.403 7.135 13.9 7.6534 3 402 17 384.3 4.45 32.9 203 | 0.03445 82.5 2.03 0 0.415 6.162 38.4 6.27 2 348 14.7 393.77 7.43 24.1 204 | 0.02177 82.5 2.03 0 0.415 7.61 15.7 6.27 2 348 14.7 395.38 3.11 42.3 205 | 0.0351 95 2.68 0 0.4161 7.853 33.2 5.118 4 224 14.7 392.78 3.81 48.5 206 | 0.02009 95 2.68 0 0.4161 8.034 31.9 5.118 4 224 14.7 390.55 2.88 50 207 | 0.13642 0 10.59 0 0.489 5.891 22.3 3.9454 4 277 18.6 396.9 10.87 22.6 208 | 0.22969 0 10.59 0 0.489 6.326 52.5 4.3549 4 277 18.6 394.87 10.97 24.4 209 | 0.25199 0 10.59 0 0.489 5.783 72.7 4.3549 4 277 18.6 389.43 18.06 22.5 210 | 0.13587 0 10.59 1 0.489 6.064 59.1 4.2392 4 277 18.6 381.32 14.66 24.4 211 | 0.43571 0 10.59 1 0.489 5.344 100 3.875 4 277 18.6 396.9 23.09 20 212 | 0.17446 0 10.59 1 0.489 5.96 92.1 3.8771 4 277 18.6 393.25 17.27 21.7 213 | 0.37578 0 10.59 1 0.489 5.404 88.6 3.665 4 277 18.6 395.24 23.98 19.3 214 | 0.21719 0 10.59 1 0.489 5.807 53.8 3.6526 4 277 18.6 390.94 16.03 22.4 215 | 0.14052 0 10.59 0 0.489 6.375 32.3 3.9454 4 277 18.6 385.81 9.38 28.1 216 | 0.28955 0 10.59 0 0.489 5.412 9.8 3.5875 4 277 18.6 348.93 29.55 23.7 217 | 0.19802 0 10.59 0 0.489 6.182 42.4 3.9454 4 277 18.6 393.63 9.47 25 218 | 0.0456 0 13.89 1 0.55 5.888 56 3.1121 5 276 16.4 392.8 13.51 23.3 219 | 0.07013 0 13.89 0 0.55 6.642 85.1 3.4211 5 276 16.4 392.78 9.69 28.7 220 | 0.11069 0 13.89 1 0.55 5.951 93.8 2.8893 5 276 16.4 396.9 17.92 21.5 221 | 0.11425 0 13.89 1 0.55 6.373 92.4 3.3633 5 276 16.4 393.74 10.5 23 222 | 0.35809 0 6.2 1 0.507 6.951 88.5 2.8617 8 307 17.4 391.7 9.71 26.7 223 | 0.40771 0 6.2 1 0.507 6.164 91.3 3.048 8 307 17.4 395.24 21.46 21.7 224 | 0.62356 0 6.2 1 0.507 6.879 77.7 3.2721 8 307 17.4 390.39 9.93 27.5 225 | 0.6147 0 6.2 0 0.507 6.618 80.8 3.2721 8 307 17.4 396.9 7.6 30.1 226 | 0.31533 0 6.2 0 0.504 8.266 78.3 2.8944 8 307 17.4 385.05 4.14 44.8 227 | 0.52693 0 6.2 0 0.504 8.725 83 2.8944 8 307 17.4 382 4.63 50 228 | 0.38214 0 6.2 0 0.504 8.04 86.5 3.2157 8 307 17.4 387.38 3.13 37.6 229 | 0.41238 0 6.2 0 0.504 7.163 79.9 3.2157 8 307 17.4 372.08 6.36 31.6 230 | 0.29819 0 6.2 0 0.504 7.686 17 3.3751 8 307 17.4 377.51 3.92 46.7 231 | 0.44178 0 6.2 0 0.504 6.552 21.4 3.3751 8 307 17.4 380.34 3.76 31.5 232 | 0.537 0 6.2 0 0.504 5.981 68.1 3.6715 8 307 17.4 378.35 11.65 24.3 233 | 0.46296 0 6.2 0 0.504 7.412 76.9 3.6715 8 307 17.4 376.14 5.25 31.7 234 | 0.57529 0 6.2 0 0.507 8.337 73.3 3.8384 8 307 17.4 385.91 2.47 41.7 235 | 0.33147 0 6.2 0 0.507 8.247 70.4 3.6519 8 307 17.4 378.95 3.95 48.3 236 | 0.44791 0 6.2 1 0.507 6.726 66.5 3.6519 8 307 17.4 360.2 8.05 29 237 | 0.33045 0 6.2 0 0.507 6.086 61.5 3.6519 8 307 17.4 376.75 10.88 24 238 | 0.52058 0 6.2 1 0.507 6.631 76.5 4.148 8 307 17.4 388.45 9.54 25.1 239 | 0.51183 0 6.2 0 0.507 7.358 71.6 4.148 8 307 17.4 390.07 4.73 31.5 240 | 0.08244 30 4.93 0 0.428 6.481 18.5 6.1899 6 300 16.6 379.41 6.36 23.7 241 | 0.09252 30 4.93 0 0.428 6.606 42.2 6.1899 6 300 16.6 383.78 7.37 23.3 242 | 0.11329 30 4.93 0 0.428 6.897 54.3 6.3361 6 300 16.6 391.25 11.38 22 243 | 0.10612 30 4.93 0 0.428 6.095 65.1 6.3361 6 300 16.6 394.62 12.4 20.1 244 | 0.1029 30 4.93 0 0.428 6.358 52.9 7.0355 6 300 16.6 372.75 11.22 22.2 245 | 0.12757 30 4.93 0 0.428 6.393 7.8 7.0355 6 300 16.6 374.71 5.19 23.7 246 | 0.20608 22 5.86 0 0.431 5.593 76.5 7.9549 7 330 19.1 372.49 12.5 17.6 247 | 0.19133 22 5.86 0 0.431 5.605 70.2 7.9549 7 330 19.1 389.13 18.46 18.5 248 | 0.33983 22 5.86 0 0.431 6.108 34.9 8.0555 7 330 19.1 390.18 9.16 24.3 249 | 0.19657 22 5.86 0 0.431 6.226 79.2 8.0555 7 330 19.1 376.14 10.15 20.5 250 | 0.16439 22 5.86 0 0.431 6.433 49.1 7.8265 7 330 19.1 374.71 9.52 24.5 251 | 0.19073 22 5.86 0 0.431 6.718 17.5 7.8265 7 330 19.1 393.74 6.56 26.2 252 | 0.1403 22 5.86 0 0.431 6.487 13 7.3967 7 330 19.1 396.28 5.9 24.4 253 | 0.21409 22 5.86 0 0.431 6.438 8.9 7.3967 7 330 19.1 377.07 3.59 24.8 254 | 0.08221 22 5.86 0 0.431 6.957 6.8 8.9067 7 330 19.1 386.09 3.53 29.6 255 | 0.36894 22 5.86 0 0.431 8.259 8.4 8.9067 7 330 19.1 396.9 3.54 42.8 256 | 0.04819 80 3.64 0 0.392 6.108 32 9.2203 1 315 16.4 392.89 6.57 21.9 257 | 0.03548 80 3.64 0 0.392 5.876 19.1 9.2203 1 315 16.4 395.18 9.25 20.9 258 | 0.01538 90 3.75 0 0.394 7.454 34.2 6.3361 3 244 15.9 386.34 3.11 44 259 | 0.61154 20 3.97 0 0.647 8.704 86.9 1.801 5 264 13 389.7 5.12 50 260 | 0.66351 20 3.97 0 0.647 7.333 100 1.8946 5 264 13 383.29 7.79 36 261 | 0.65665 20 3.97 0 0.647 6.842 100 2.0107 5 264 13 391.93 6.9 30.1 262 | 0.54011 20 3.97 0 0.647 7.203 81.8 2.1121 5 264 13 392.8 9.59 33.8 263 | 0.53412 20 3.97 0 0.647 7.52 89.4 2.1398 5 264 13 388.37 7.26 43.1 264 | 0.52014 20 3.97 0 0.647 8.398 91.5 2.2885 5 264 13 386.86 5.91 48.8 265 | 0.82526 20 3.97 0 0.647 7.327 94.5 2.0788 5 264 13 393.42 11.25 31 266 | 0.55007 20 3.97 0 0.647 7.206 91.6 1.9301 5 264 13 387.89 8.1 36.5 267 | 0.76162 20 3.97 0 0.647 5.56 62.8 1.9865 5 264 13 392.4 10.45 22.8 268 | 0.7857 20 3.97 0 0.647 7.014 84.6 2.1329 5 264 13 384.07 14.79 30.7 269 | 0.57834 20 3.97 0 0.575 8.297 67 2.4216 5 264 13 384.54 7.44 50 270 | 0.5405 20 3.97 0 0.575 7.47 52.6 2.872 5 264 13 390.3 3.16 43.5 271 | 0.09065 20 6.96 1 0.464 5.92 61.5 3.9175 3 223 18.6 391.34 13.65 20.7 272 | 0.29916 20 6.96 0 0.464 5.856 42.1 4.429 3 223 18.6 388.65 13 21.1 273 | 0.16211 20 6.96 0 0.464 6.24 16.3 4.429 3 223 18.6 396.9 6.59 25.2 274 | 0.1146 20 6.96 0 0.464 6.538 58.7 3.9175 3 223 18.6 394.96 7.73 24.4 275 | 0.22188 20 6.96 1 0.464 7.691 51.8 4.3665 3 223 18.6 390.77 6.58 35.2 276 | 0.05644 40 6.41 1 0.447 6.758 32.9 4.0776 4 254 17.6 396.9 3.53 32.4 277 | 0.09604 40 6.41 0 0.447 6.854 42.8 4.2673 4 254 17.6 396.9 2.98 32 278 | 0.10469 40 6.41 1 0.447 7.267 49 4.7872 4 254 17.6 389.25 6.05 33.2 279 | 0.06127 40 6.41 1 0.447 6.826 27.6 4.8628 4 254 17.6 393.45 4.16 33.1 280 | 0.07978 40 6.41 0 0.447 6.482 32.1 4.1403 4 254 17.6 396.9 7.19 29.1 281 | 0.21038 20 3.33 0 0.4429 6.812 32.2 4.1007 5 216 14.9 396.9 4.85 35.1 282 | 0.03578 20 3.33 0 0.4429 7.82 64.5 4.6947 5 216 14.9 387.31 3.76 45.4 283 | 0.03705 20 3.33 0 0.4429 6.968 37.2 5.2447 5 216 14.9 392.23 4.59 35.4 284 | 0.06129 20 3.33 1 0.4429 7.645 49.7 5.2119 5 216 14.9 377.07 3.01 46 285 | 0.01501 90 1.21 1 0.401 7.923 24.8 5.885 1 198 13.6 395.52 3.16 50 286 | 0.00906 90 2.97 0 0.4 7.088 20.8 7.3073 1 285 15.3 394.72 7.85 32.2 287 | 0.01096 55 2.25 0 0.389 6.453 31.9 7.3073 1 300 15.3 394.72 8.23 22 288 | 0.01965 80 1.76 0 0.385 6.23 31.5 9.0892 1 241 18.2 341.6 12.93 20.1 289 | 0.03871 52.5 5.32 0 0.405 6.209 31.3 7.3172 6 293 16.6 396.9 7.14 23.2 290 | 0.0459 52.5 5.32 0 0.405 6.315 45.6 7.3172 6 293 16.6 396.9 7.6 22.3 291 | 0.04297 52.5 5.32 0 0.405 6.565 22.9 7.3172 6 293 16.6 371.72 9.51 24.8 292 | 0.03502 80 4.95 0 0.411 6.861 27.9 5.1167 4 245 19.2 396.9 3.33 28.5 293 | 0.07886 80 4.95 0 0.411 7.148 27.7 5.1167 4 245 19.2 396.9 3.56 37.3 294 | 0.03615 80 4.95 0 0.411 6.63 23.4 5.1167 4 245 19.2 396.9 4.7 27.9 295 | 0.08265 0 13.92 0 0.437 6.127 18.4 5.5027 4 289 16 396.9 8.58 23.9 296 | 0.08199 0 13.92 0 0.437 6.009 42.3 5.5027 4 289 16 396.9 10.4 21.7 297 | 0.12932 0 13.92 0 0.437 6.678 31.1 5.9604 4 289 16 396.9 6.27 28.6 298 | 0.05372 0 13.92 0 0.437 6.549 51 5.9604 4 289 16 392.85 7.39 27.1 299 | 0.14103 0 13.92 0 0.437 5.79 58 6.32 4 289 16 396.9 15.84 20.3 300 | 0.06466 70 2.24 0 0.4 6.345 20.1 7.8278 5 358 14.8 368.24 4.97 22.5 301 | 0.05561 70 2.24 0 0.4 7.041 10 7.8278 5 358 14.8 371.58 4.74 29 302 | 0.04417 70 2.24 0 0.4 6.871 47.4 7.8278 5 358 14.8 390.86 6.07 24.8 303 | 0.03537 34 6.09 0 0.433 6.59 40.4 5.4917 7 329 16.1 395.75 9.5 22 304 | 0.09266 34 6.09 0 0.433 6.495 18.4 5.4917 7 329 16.1 383.61 8.67 26.4 305 | 0.1 34 6.09 0 0.433 6.982 17.7 5.4917 7 329 16.1 390.43 4.86 33.1 306 | 0.05515 33 2.18 0 0.472 7.236 41.1 4.022 7 222 18.4 393.68 6.93 36.1 307 | 0.05479 33 2.18 0 0.472 6.616 58.1 3.37 7 222 18.4 393.36 8.93 28.4 308 | 0.07503 33 2.18 0 0.472 7.42 71.9 3.0992 7 222 18.4 396.9 6.47 33.4 309 | 0.04932 33 2.18 0 0.472 6.849 70.3 3.1827 7 222 18.4 396.9 7.53 28.2 310 | 0.49298 0 9.9 0 0.544 6.635 82.5 3.3175 4 304 18.4 396.9 4.54 22.8 311 | 0.3494 0 9.9 0 0.544 5.972 76.7 3.1025 4 304 18.4 396.24 9.97 20.3 312 | 2.63548 0 9.9 0 0.544 4.973 37.8 2.5194 4 304 18.4 350.45 12.64 16.1 313 | 0.79041 0 9.9 0 0.544 6.122 52.8 2.6403 4 304 18.4 396.9 5.98 22.1 314 | 0.26169 0 9.9 0 0.544 6.023 90.4 2.834 4 304 18.4 396.3 11.72 19.4 315 | 0.26938 0 9.9 0 0.544 6.266 82.8 3.2628 4 304 18.4 393.39 7.9 21.6 316 | 0.3692 0 9.9 0 0.544 6.567 87.3 3.6023 4 304 18.4 395.69 9.28 23.8 317 | 0.25356 0 9.9 0 0.544 5.705 77.7 3.945 4 304 18.4 396.42 11.5 16.2 318 | 0.31827 0 9.9 0 0.544 5.914 83.2 3.9986 4 304 18.4 390.7 18.33 17.8 319 | 0.24522 0 9.9 0 0.544 5.782 71.7 4.0317 4 304 18.4 396.9 15.94 19.8 320 | 0.40202 0 9.9 0 0.544 6.382 67.2 3.5325 4 304 18.4 395.21 10.36 23.1 321 | 0.47547 0 9.9 0 0.544 6.113 58.8 4.0019 4 304 18.4 396.23 12.73 21 322 | 0.1676 0 7.38 0 0.493 6.426 52.3 4.5404 5 287 19.6 396.9 7.2 23.8 323 | 0.18159 0 7.38 0 0.493 6.376 54.3 4.5404 5 287 19.6 396.9 6.87 23.1 324 | 0.35114 0 7.38 0 0.493 6.041 49.9 4.7211 5 287 19.6 396.9 7.7 20.4 325 | 0.28392 0 7.38 0 0.493 5.708 74.3 4.7211 5 287 19.6 391.13 11.74 18.5 326 | 0.34109 0 7.38 0 0.493 6.415 40.1 4.7211 5 287 19.6 396.9 6.12 25 327 | 0.19186 0 7.38 0 0.493 6.431 14.7 5.4159 5 287 19.6 393.68 5.08 24.6 328 | 0.30347 0 7.38 0 0.493 6.312 28.9 5.4159 5 287 19.6 396.9 6.15 23 329 | 0.24103 0 7.38 0 0.493 6.083 43.7 5.4159 5 287 19.6 396.9 12.79 22.2 330 | 0.06617 0 3.24 0 0.46 5.868 25.8 5.2146 4 430 16.9 382.44 9.97 19.3 331 | 0.06724 0 3.24 0 0.46 6.333 17.2 5.2146 4 430 16.9 375.21 7.34 22.6 332 | 0.04544 0 3.24 0 0.46 6.144 32.2 5.8736 4 430 16.9 368.57 9.09 19.8 333 | 0.05023 35 6.06 0 0.4379 5.706 28.4 6.6407 1 304 16.9 394.02 12.43 17.1 334 | 0.03466 35 6.06 0 0.4379 6.031 23.3 6.6407 1 304 16.9 362.25 7.83 19.4 335 | 0.05083 0 5.19 0 0.515 6.316 38.1 6.4584 5 224 20.2 389.71 5.68 22.2 336 | 0.03738 0 5.19 0 0.515 6.31 38.5 6.4584 5 224 20.2 389.4 6.75 20.7 337 | 0.03961 0 5.19 0 0.515 6.037 34.5 5.9853 5 224 20.2 396.9 8.01 21.1 338 | 0.03427 0 5.19 0 0.515 5.869 46.3 5.2311 5 224 20.2 396.9 9.8 19.5 339 | 0.03041 0 5.19 0 0.515 5.895 59.6 5.615 5 224 20.2 394.81 10.56 18.5 340 | 0.03306 0 5.19 0 0.515 6.059 37.3 4.8122 5 224 20.2 396.14 8.51 20.6 341 | 0.05497 0 5.19 0 0.515 5.985 45.4 4.8122 5 224 20.2 396.9 9.74 19 342 | 0.06151 0 5.19 0 0.515 5.968 58.5 4.8122 5 224 20.2 396.9 9.29 18.7 343 | 0.01301 35 1.52 0 0.442 7.241 49.3 7.0379 1 284 15.5 394.74 5.49 32.7 344 | 0.02498 0 1.89 0 0.518 6.54 59.7 6.2669 1 422 15.9 389.96 8.65 16.5 345 | 0.02543 55 3.78 0 0.484 6.696 56.4 5.7321 5 370 17.6 396.9 7.18 23.9 346 | 0.03049 55 3.78 0 0.484 6.874 28.1 6.4654 5 370 17.6 387.97 4.61 31.2 347 | 0.03113 0 4.39 0 0.442 6.014 48.5 8.0136 3 352 18.8 385.64 10.53 17.5 348 | 0.06162 0 4.39 0 0.442 5.898 52.3 8.0136 3 352 18.8 364.61 12.67 17.2 349 | 0.0187 85 4.15 0 0.429 6.516 27.7 8.5353 4 351 17.9 392.43 6.36 23.1 350 | 0.01501 80 2.01 0 0.435 6.635 29.7 8.344 4 280 17 390.94 5.99 24.5 351 | 0.02899 40 1.25 0 0.429 6.939 34.5 8.7921 1 335 19.7 389.85 5.89 26.6 352 | 0.06211 40 1.25 0 0.429 6.49 44.4 8.7921 1 335 19.7 396.9 5.98 22.9 353 | 0.0795 60 1.69 0 0.411 6.579 35.9 10.7103 4 411 18.3 370.78 5.49 24.1 354 | 0.07244 60 1.69 0 0.411 5.884 18.5 10.7103 4 411 18.3 392.33 7.79 18.6 355 | 0.01709 90 2.02 0 0.41 6.728 36.1 12.1265 5 187 17 384.46 4.5 30.1 356 | 0.04301 80 1.91 0 0.413 5.663 21.9 10.5857 4 334 22 382.8 8.05 18.2 357 | 0.10659 80 1.91 0 0.413 5.936 19.5 10.5857 4 334 22 376.04 5.57 20.6 358 | 8.98296 0 18.1 1 0.77 6.212 97.4 2.1222 24 666 20.2 377.73 17.6 17.8 359 | 3.8497 0 18.1 1 0.77 6.395 91 2.5052 24 666 20.2 391.34 13.27 21.7 360 | 5.20177 0 18.1 1 0.77 6.127 83.4 2.7227 24 666 20.2 395.43 11.48 22.7 361 | 4.26131 0 18.1 0 0.77 6.112 81.3 2.5091 24 666 20.2 390.74 12.67 22.6 362 | 4.54192 0 18.1 0 0.77 6.398 88 2.5182 24 666 20.2 374.56 7.79 25 363 | 3.83684 0 18.1 0 0.77 6.251 91.1 2.2955 24 666 20.2 350.65 14.19 19.9 364 | 3.67822 0 18.1 0 0.77 5.362 96.2 2.1036 24 666 20.2 380.79 10.19 20.8 365 | 4.22239 0 18.1 1 0.77 5.803 89 1.9047 24 666 20.2 353.04 14.64 16.8 366 | 3.47428 0 18.1 1 0.718 8.78 82.9 1.9047 24 666 20.2 354.55 5.29 21.9 367 | 4.55587 0 18.1 0 0.718 3.561 87.9 1.6132 24 666 20.2 354.7 7.12 27.5 368 | 3.69695 0 18.1 0 0.718 4.963 91.4 1.7523 24 666 20.2 316.03 14 21.9 369 | 13.5222 0 18.1 0 0.631 3.863 100 1.5106 24 666 20.2 131.42 13.33 23.1 370 | 4.89822 0 18.1 0 0.631 4.97 100 1.3325 24 666 20.2 375.52 3.26 50 371 | 5.66998 0 18.1 1 0.631 6.683 96.8 1.3567 24 666 20.2 375.33 3.73 50 372 | 6.53876 0 18.1 1 0.631 7.016 97.5 1.2024 24 666 20.2 392.05 2.96 50 373 | 9.2323 0 18.1 0 0.631 6.216 100 1.1691 24 666 20.2 366.15 9.53 50 374 | 8.26725 0 18.1 1 0.668 5.875 89.6 1.1296 24 666 20.2 347.88 8.88 50 375 | 11.1081 0 18.1 0 0.668 4.906 100 1.1742 24 666 20.2 396.9 34.77 13.8 376 | 18.4982 0 18.1 0 0.668 4.138 100 1.137 24 666 20.2 396.9 37.97 13.8 377 | 19.6091 0 18.1 0 0.671 7.313 97.9 1.3163 24 666 20.2 396.9 13.44 15 378 | 15.288 0 18.1 0 0.671 6.649 93.3 1.3449 24 666 20.2 363.02 23.24 13.9 379 | 9.82349 0 18.1 0 0.671 6.794 98.8 1.358 24 666 20.2 396.9 21.24 13.3 380 | 23.6482 0 18.1 0 0.671 6.38 96.2 1.3861 24 666 20.2 396.9 23.69 13.1 381 | 17.8667 0 18.1 0 0.671 6.223 100 1.3861 24 666 20.2 393.74 21.78 10.2 382 | 88.9762 0 18.1 0 0.671 6.968 91.9 1.4165 24 666 20.2 396.9 17.21 10.4 383 | 15.8744 0 18.1 0 0.671 6.545 99.1 1.5192 24 666 20.2 396.9 21.08 10.9 384 | 9.18702 0 18.1 0 0.7 5.536 100 1.5804 24 666 20.2 396.9 23.6 11.3 385 | 7.99248 0 18.1 0 0.7 5.52 100 1.5331 24 666 20.2 396.9 24.56 12.3 386 | 20.0849 0 18.1 0 0.7 4.368 91.2 1.4395 24 666 20.2 285.83 30.63 8.8 387 | 16.8118 0 18.1 0 0.7 5.277 98.1 1.4261 24 666 20.2 396.9 30.81 7.2 388 | 24.3938 0 18.1 0 0.7 4.652 100 1.4672 24 666 20.2 396.9 28.28 10.5 389 | 22.5971 0 18.1 0 0.7 5 89.5 1.5184 24 666 20.2 396.9 31.99 7.4 390 | 14.3337 0 18.1 0 0.7 4.88 100 1.5895 24 666 20.2 372.92 30.62 10.2 391 | 8.15174 0 18.1 0 0.7 5.39 98.9 1.7281 24 666 20.2 396.9 20.85 11.5 392 | 6.96215 0 18.1 0 0.7 5.713 97 1.9265 24 666 20.2 394.43 17.11 15.1 393 | 5.29305 0 18.1 0 0.7 6.051 82.5 2.1678 24 666 20.2 378.38 18.76 23.2 394 | 11.5779 0 18.1 0 0.7 5.036 97 1.77 24 666 20.2 396.9 25.68 9.7 395 | 8.64476 0 18.1 0 0.693 6.193 92.6 1.7912 24 666 20.2 396.9 15.17 13.8 396 | 13.3598 0 18.1 0 0.693 5.887 94.7 1.7821 24 666 20.2 396.9 16.35 12.7 397 | 8.71675 0 18.1 0 0.693 6.471 98.8 1.7257 24 666 20.2 391.98 17.12 13.1 398 | 5.87205 0 18.1 0 0.693 6.405 96 1.6768 24 666 20.2 396.9 19.37 12.5 399 | 7.67202 0 18.1 0 0.693 5.747 98.9 1.6334 24 666 20.2 393.1 19.92 8.5 400 | 38.3518 0 18.1 0 0.693 5.453 100 1.4896 24 666 20.2 396.9 30.59 5 401 | 9.91655 0 18.1 0 0.693 5.852 77.8 1.5004 24 666 20.2 338.16 29.97 6.3 402 | 25.0461 0 18.1 0 0.693 5.987 100 1.5888 24 666 20.2 396.9 26.77 5.6 403 | 14.2362 0 18.1 0 0.693 6.343 100 1.5741 24 666 20.2 396.9 20.32 7.2 404 | 9.59571 0 18.1 0 0.693 6.404 100 1.639 24 666 20.2 376.11 20.31 12.1 405 | 24.8017 0 18.1 0 0.693 5.349 96 1.7028 24 666 20.2 396.9 19.77 8.3 406 | 41.5292 0 18.1 0 0.693 5.531 85.4 1.6074 24 666 20.2 329.46 27.38 8.5 407 | 67.9208 0 18.1 0 0.693 5.683 100 1.4254 24 666 20.2 384.97 22.98 5 408 | 20.7162 0 18.1 0 0.659 4.138 100 1.1781 24 666 20.2 370.22 23.34 11.9 409 | 11.9511 0 18.1 0 0.659 5.608 100 1.2852 24 666 20.2 332.09 12.13 27.9 410 | 7.40389 0 18.1 0 0.597 5.617 97.9 1.4547 24 666 20.2 314.64 26.4 17.2 411 | 14.4383 0 18.1 0 0.597 6.852 100 1.4655 24 666 20.2 179.36 19.78 27.5 412 | 51.1358 0 18.1 0 0.597 5.757 100 1.413 24 666 20.2 2.6 10.11 15 413 | 14.0507 0 18.1 0 0.597 6.657 100 1.5275 24 666 20.2 35.05 21.22 17.2 414 | 18.811 0 18.1 0 0.597 4.628 100 1.5539 24 666 20.2 28.79 34.37 17.9 415 | 28.6558 0 18.1 0 0.597 5.155 100 1.5894 24 666 20.2 210.97 20.08 16.3 416 | 45.7461 0 18.1 0 0.693 4.519 100 1.6582 24 666 20.2 88.27 36.98 7 417 | 18.0846 0 18.1 0 0.679 6.434 100 1.8347 24 666 20.2 27.25 29.05 7.2 418 | 10.8342 0 18.1 0 0.679 6.782 90.8 1.8195 24 666 20.2 21.57 25.79 7.5 419 | 25.9406 0 18.1 0 0.679 5.304 89.1 1.6475 24 666 20.2 127.36 26.64 10.4 420 | 73.5341 0 18.1 0 0.679 5.957 100 1.8026 24 666 20.2 16.45 20.62 8.8 421 | 11.8123 0 18.1 0 0.718 6.824 76.5 1.794 24 666 20.2 48.45 22.74 8.4 422 | 11.0874 0 18.1 0 0.718 6.411 100 1.8589 24 666 20.2 318.75 15.02 16.7 423 | 7.02259 0 18.1 0 0.718 6.006 95.3 1.8746 24 666 20.2 319.98 15.7 14.2 424 | 12.0482 0 18.1 0 0.614 5.648 87.6 1.9512 24 666 20.2 291.55 14.1 20.8 425 | 7.05042 0 18.1 0 0.614 6.103 85.1 2.0218 24 666 20.2 2.52 23.29 13.4 426 | 8.79212 0 18.1 0 0.584 5.565 70.6 2.0635 24 666 20.2 3.65 17.16 11.7 427 | 15.8603 0 18.1 0 0.679 5.896 95.4 1.9096 24 666 20.2 7.68 24.39 8.3 428 | 12.2472 0 18.1 0 0.584 5.837 59.7 1.9976 24 666 20.2 24.65 15.69 10.2 429 | 37.6619 0 18.1 0 0.679 6.202 78.7 1.8629 24 666 20.2 18.82 14.52 10.9 430 | 7.36711 0 18.1 0 0.679 6.193 78.1 1.9356 24 666 20.2 96.73 21.52 11 431 | 9.33889 0 18.1 0 0.679 6.38 95.6 1.9682 24 666 20.2 60.72 24.08 9.5 432 | 8.49213 0 18.1 0 0.584 6.348 86.1 2.0527 24 666 20.2 83.45 17.64 14.5 433 | 10.0623 0 18.1 0 0.584 6.833 94.3 2.0882 24 666 20.2 81.33 19.69 14.1 434 | 6.44405 0 18.1 0 0.584 6.425 74.8 2.2004 24 666 20.2 97.95 12.03 16.1 435 | 5.58107 0 18.1 0 0.713 6.436 87.9 2.3158 24 666 20.2 100.19 16.22 14.3 436 | 13.9134 0 18.1 0 0.713 6.208 95 2.2222 24 666 20.2 100.63 15.17 11.7 437 | 11.1604 0 18.1 0 0.74 6.629 94.6 2.1247 24 666 20.2 109.85 23.27 13.4 438 | 14.4208 0 18.1 0 0.74 6.461 93.3 2.0026 24 666 20.2 27.49 18.05 9.6 439 | 15.1772 0 18.1 0 0.74 6.152 100 1.9142 24 666 20.2 9.32 26.45 8.7 440 | 13.6781 0 18.1 0 0.74 5.935 87.9 1.8206 24 666 20.2 68.95 34.02 8.4 441 | 9.39063 0 18.1 0 0.74 5.627 93.9 1.8172 24 666 20.2 396.9 22.88 12.8 442 | 22.0511 0 18.1 0 0.74 5.818 92.4 1.8662 24 666 20.2 391.45 22.11 10.5 443 | 9.72418 0 18.1 0 0.74 6.406 97.2 2.0651 24 666 20.2 385.96 19.52 17.1 444 | 5.66637 0 18.1 0 0.74 6.219 100 2.0048 24 666 20.2 395.69 16.59 18.4 445 | 9.96654 0 18.1 0 0.74 6.485 100 1.9784 24 666 20.2 386.73 18.85 15.4 446 | 12.8023 0 18.1 0 0.74 5.854 96.6 1.8956 24 666 20.2 240.52 23.79 10.8 447 | 10.6718 0 18.1 0 0.74 6.459 94.8 1.9879 24 666 20.2 43.06 23.98 11.8 448 | 6.28807 0 18.1 0 0.74 6.341 96.4 2.072 24 666 20.2 318.01 17.79 14.9 449 | 9.92485 0 18.1 0 0.74 6.251 96.6 2.198 24 666 20.2 388.52 16.44 12.6 450 | 9.32909 0 18.1 0 0.713 6.185 98.7 2.2616 24 666 20.2 396.9 18.13 14.1 451 | 7.52601 0 18.1 0 0.713 6.417 98.3 2.185 24 666 20.2 304.21 19.31 13 452 | 6.71772 0 18.1 0 0.713 6.749 92.6 2.3236 24 666 20.2 0.32 17.44 13.4 453 | 5.44114 0 18.1 0 0.713 6.655 98.2 2.3552 24 666 20.2 355.29 17.73 15.2 454 | 5.09017 0 18.1 0 0.713 6.297 91.8 2.3682 24 666 20.2 385.09 17.27 16.1 455 | 8.24809 0 18.1 0 0.713 7.393 99.3 2.4527 24 666 20.2 375.87 16.74 17.8 456 | 9.51363 0 18.1 0 0.713 6.728 94.1 2.4961 24 666 20.2 6.68 18.71 14.9 457 | 4.75237 0 18.1 0 0.713 6.525 86.5 2.4358 24 666 20.2 50.92 18.13 14.1 458 | 4.66883 0 18.1 0 0.713 5.976 87.9 2.5806 24 666 20.2 10.48 19.01 12.7 459 | 8.20058 0 18.1 0 0.713 5.936 80.3 2.7792 24 666 20.2 3.5 16.94 13.5 460 | 7.75223 0 18.1 0 0.713 6.301 83.7 2.7831 24 666 20.2 272.21 16.23 14.9 461 | 6.80117 0 18.1 0 0.713 6.081 84.4 2.7175 24 666 20.2 396.9 14.7 20 462 | 4.81213 0 18.1 0 0.713 6.701 90 2.5975 24 666 20.2 255.23 16.42 16.4 463 | 3.69311 0 18.1 0 0.713 6.376 88.4 2.5671 24 666 20.2 391.43 14.65 17.7 464 | 6.65492 0 18.1 0 0.713 6.317 83 2.7344 24 666 20.2 396.9 13.99 19.5 465 | 5.82115 0 18.1 0 0.713 6.513 89.9 2.8016 24 666 20.2 393.82 10.29 20.2 466 | 7.83932 0 18.1 0 0.655 6.209 65.4 2.9634 24 666 20.2 396.9 13.22 21.4 467 | 3.1636 0 18.1 0 0.655 5.759 48.2 3.0665 24 666 20.2 334.4 14.13 19.9 468 | 3.77498 0 18.1 0 0.655 5.952 84.7 2.8715 24 666 20.2 22.01 17.15 19 469 | 4.42228 0 18.1 0 0.584 6.003 94.5 2.5403 24 666 20.2 331.29 21.32 19.1 470 | 15.5757 0 18.1 0 0.58 5.926 71 2.9084 24 666 20.2 368.74 18.13 19.1 471 | 13.0751 0 18.1 0 0.58 5.713 56.7 2.8237 24 666 20.2 396.9 14.76 20.1 472 | 4.34879 0 18.1 0 0.58 6.167 84 3.0334 24 666 20.2 396.9 16.29 19.9 473 | 4.03841 0 18.1 0 0.532 6.229 90.7 3.0993 24 666 20.2 395.33 12.87 19.6 474 | 3.56868 0 18.1 0 0.58 6.437 75 2.8965 24 666 20.2 393.37 14.36 23.2 475 | 4.64689 0 18.1 0 0.614 6.98 67.6 2.5329 24 666 20.2 374.68 11.66 29.8 476 | 8.05579 0 18.1 0 0.584 5.427 95.4 2.4298 24 666 20.2 352.58 18.14 13.8 477 | 6.39312 0 18.1 0 0.584 6.162 97.4 2.206 24 666 20.2 302.76 24.1 13.3 478 | 4.87141 0 18.1 0 0.614 6.484 93.6 2.3053 24 666 20.2 396.21 18.68 16.7 479 | 15.0234 0 18.1 0 0.614 5.304 97.3 2.1007 24 666 20.2 349.48 24.91 12 480 | 10.233 0 18.1 0 0.614 6.185 96.7 2.1705 24 666 20.2 379.7 18.03 14.6 481 | 14.3337 0 18.1 0 0.614 6.229 88 1.9512 24 666 20.2 383.32 13.11 21.4 482 | 5.82401 0 18.1 0 0.532 6.242 64.7 3.4242 24 666 20.2 396.9 10.74 23 483 | 5.70818 0 18.1 0 0.532 6.75 74.9 3.3317 24 666 20.2 393.07 7.74 23.7 484 | 5.73116 0 18.1 0 0.532 7.061 77 3.4106 24 666 20.2 395.28 7.01 25 485 | 2.81838 0 18.1 0 0.532 5.762 40.3 4.0983 24 666 20.2 392.92 10.42 21.8 486 | 2.37857 0 18.1 0 0.583 5.871 41.9 3.724 24 666 20.2 370.73 13.34 20.6 487 | 3.67367 0 18.1 0 0.583 6.312 51.9 3.9917 24 666 20.2 388.62 10.58 21.2 488 | 5.69175 0 18.1 0 0.583 6.114 79.8 3.5459 24 666 20.2 392.68 14.98 19.1 489 | 4.83567 0 18.1 0 0.583 5.905 53.2 3.1523 24 666 20.2 388.22 11.45 20.6 490 | 0.15086 0 27.74 0 0.609 5.454 92.7 1.8209 4 711 20.1 395.09 18.06 15.2 491 | 0.18337 0 27.74 0 0.609 5.414 98.3 1.7554 4 711 20.1 344.05 23.97 7 492 | 0.20746 0 27.74 0 0.609 5.093 98 1.8226 4 711 20.1 318.43 29.68 8.1 493 | 0.10574 0 27.74 0 0.609 5.983 98.8 1.8681 4 711 20.1 390.11 18.07 13.6 494 | 0.11132 0 27.74 0 0.609 5.983 83.5 2.1099 4 711 20.1 396.9 13.35 20.1 495 | 0.17331 0 9.69 0 0.585 5.707 54 2.3817 6 391 19.2 396.9 12.01 21.8 496 | 0.27957 0 9.69 0 0.585 5.926 42.6 2.3817 6 391 19.2 396.9 13.59 24.5 497 | 0.17899 0 9.69 0 0.585 5.67 28.8 2.7986 6 391 19.2 393.29 17.6 23.1 498 | 0.2896 0 9.69 0 0.585 5.39 72.9 2.7986 6 391 19.2 396.9 21.14 19.7 499 | 0.26838 0 9.69 0 0.585 5.794 70.6 2.8927 6 391 19.2 396.9 14.1 18.3 500 | 0.23912 0 9.69 0 0.585 6.019 65.3 2.4091 6 391 19.2 396.9 12.92 21.2 501 | 0.17783 0 9.69 0 0.585 5.569 73.5 2.3999 6 391 19.2 395.77 15.1 17.5 502 | 0.22438 0 9.69 0 0.585 6.027 79.7 2.4982 6 391 19.2 396.9 14.33 16.8 503 | 0.06263 0 11.93 0 0.573 6.593 69.1 2.4786 1 273 21 391.99 9.67 22.4 504 | 0.04527 0 11.93 0 0.573 6.12 76.7 2.2875 1 273 21 396.9 9.08 20.6 505 | 0.06076 0 11.93 0 0.573 6.976 91 2.1675 1 273 21 396.9 5.64 23.9 506 | 0.10959 0 11.93 0 0.573 6.794 89.3 2.3889 1 273 21 393.45 6.48 22 507 | 0.04741 0 11.93 0 0.573 6.03 80.8 2.505 1 273 21 396.9 7.88 11.9 508 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | GNU GENERAL PUBLIC LICENSE 2 | Version 3, 29 June 2007 3 | 4 | Copyright (C) 2007 Free Software Foundation, Inc. 5 | Everyone is permitted to copy and distribute verbatim copies 6 | of this license document, but changing it is not allowed. 7 | 8 | Preamble 9 | 10 | The GNU General Public License is a free, copyleft license for 11 | software and other kinds of works. 12 | 13 | The licenses for most software and other practical works are designed 14 | to take away your freedom to share and change the works. By contrast, 15 | the GNU General Public License is intended to guarantee your freedom to 16 | share and change all versions of a program--to make sure it remains free 17 | software for all its users. We, the Free Software Foundation, use the 18 | GNU General Public License for most of our software; it applies also to 19 | any other work released this way by its authors. You can apply it to 20 | your programs, too. 21 | 22 | When we speak of free software, we are referring to freedom, not 23 | price. Our General Public Licenses are designed to make sure that you 24 | have the freedom to distribute copies of free software (and charge for 25 | them if you wish), that you receive source code or can get it if you 26 | want it, that you can change the software or use pieces of it in new 27 | free programs, and that you know you can do these things. 28 | 29 | To protect your rights, we need to prevent others from denying you 30 | these rights or asking you to surrender the rights. Therefore, you have 31 | certain responsibilities if you distribute copies of the software, or if 32 | you modify it: responsibilities to respect the freedom of others. 33 | 34 | For example, if you distribute copies of such a program, whether 35 | gratis or for a fee, you must pass on to the recipients the same 36 | freedoms that you received. You must make sure that they, too, receive 37 | or can get the source code. And you must show them these terms so they 38 | know their rights. 39 | 40 | Developers that use the GNU GPL protect your rights with two steps: 41 | (1) assert copyright on the software, and (2) offer you this License 42 | giving you legal permission to copy, distribute and/or modify it. 43 | 44 | For the developers' and authors' protection, the GPL clearly explains 45 | that there is no warranty for this free software. For both users' and 46 | authors' sake, the GPL requires that modified versions be marked as 47 | changed, so that their problems will not be attributed erroneously to 48 | authors of previous versions. 49 | 50 | Some devices are designed to deny users access to install or run 51 | modified versions of the software inside them, although the manufacturer 52 | can do so. This is fundamentally incompatible with the aim of 53 | protecting users' freedom to change the software. The systematic 54 | pattern of such abuse occurs in the area of products for individuals to 55 | use, which is precisely where it is most unacceptable. Therefore, we 56 | have designed this version of the GPL to prohibit the practice for those 57 | products. If such problems arise substantially in other domains, we 58 | stand ready to extend this provision to those domains in future versions 59 | of the GPL, as needed to protect the freedom of users. 60 | 61 | Finally, every program is threatened constantly by software patents. 62 | States should not allow patents to restrict development and use of 63 | software on general-purpose computers, but in those that do, we wish to 64 | avoid the special danger that patents applied to a free program could 65 | make it effectively proprietary. To prevent this, the GPL assures that 66 | patents cannot be used to render the program non-free. 67 | 68 | The precise terms and conditions for copying, distribution and 69 | modification follow. 70 | 71 | TERMS AND CONDITIONS 72 | 73 | 0. Definitions. 74 | 75 | "This License" refers to version 3 of the GNU General Public License. 76 | 77 | "Copyright" also means copyright-like laws that apply to other kinds of 78 | works, such as semiconductor masks. 79 | 80 | "The Program" refers to any copyrightable work licensed under this 81 | License. Each licensee is addressed as "you". "Licensees" and 82 | "recipients" may be individuals or organizations. 83 | 84 | To "modify" a work means to copy from or adapt all or part of the work 85 | in a fashion requiring copyright permission, other than the making of an 86 | exact copy. The resulting work is called a "modified version" of the 87 | earlier work or a work "based on" the earlier work. 88 | 89 | A "covered work" means either the unmodified Program or a work based 90 | on the Program. 91 | 92 | To "propagate" a work means to do anything with it that, without 93 | permission, would make you directly or secondarily liable for 94 | infringement under applicable copyright law, except executing it on a 95 | computer or modifying a private copy. Propagation includes copying, 96 | distribution (with or without modification), making available to the 97 | public, and in some countries other activities as well. 98 | 99 | To "convey" a work means any kind of propagation that enables other 100 | parties to make or receive copies. Mere interaction with a user through 101 | a computer network, with no transfer of a copy, is not conveying. 102 | 103 | An interactive user interface displays "Appropriate Legal Notices" 104 | to the extent that it includes a convenient and prominently visible 105 | feature that (1) displays an appropriate copyright notice, and (2) 106 | tells the user that there is no warranty for the work (except to the 107 | extent that warranties are provided), that licensees may convey the 108 | work under this License, and how to view a copy of this License. If 109 | the interface presents a list of user commands or options, such as a 110 | menu, a prominent item in the list meets this criterion. 111 | 112 | 1. Source Code. 113 | 114 | The "source code" for a work means the preferred form of the work 115 | for making modifications to it. "Object code" means any non-source 116 | form of a work. 117 | 118 | A "Standard Interface" means an interface that either is an official 119 | standard defined by a recognized standards body, or, in the case of 120 | interfaces specified for a particular programming language, one that 121 | is widely used among developers working in that language. 122 | 123 | The "System Libraries" of an executable work include anything, other 124 | than the work as a whole, that (a) is included in the normal form of 125 | packaging a Major Component, but which is not part of that Major 126 | Component, and (b) serves only to enable use of the work with that 127 | Major Component, or to implement a Standard Interface for which an 128 | implementation is available to the public in source code form. A 129 | "Major Component", in this context, means a major essential component 130 | (kernel, window system, and so on) of the specific operating system 131 | (if any) on which the executable work runs, or a compiler used to 132 | produce the work, or an object code interpreter used to run it. 133 | 134 | The "Corresponding Source" for a work in object code form means all 135 | the source code needed to generate, install, and (for an executable 136 | work) run the object code and to modify the work, including scripts to 137 | control those activities. However, it does not include the work's 138 | System Libraries, or general-purpose tools or generally available free 139 | programs which are used unmodified in performing those activities but 140 | which are not part of the work. For example, Corresponding Source 141 | includes interface definition files associated with source files for 142 | the work, and the source code for shared libraries and dynamically 143 | linked subprograms that the work is specifically designed to require, 144 | such as by intimate data communication or control flow between those 145 | subprograms and other parts of the work. 146 | 147 | The Corresponding Source need not include anything that users 148 | can regenerate automatically from other parts of the Corresponding 149 | Source. 150 | 151 | The Corresponding Source for a work in source code form is that 152 | same work. 153 | 154 | 2. Basic Permissions. 155 | 156 | All rights granted under this License are granted for the term of 157 | copyright on the Program, and are irrevocable provided the stated 158 | conditions are met. This License explicitly affirms your unlimited 159 | permission to run the unmodified Program. The output from running a 160 | covered work is covered by this License only if the output, given its 161 | content, constitutes a covered work. This License acknowledges your 162 | rights of fair use or other equivalent, as provided by copyright law. 163 | 164 | You may make, run and propagate covered works that you do not 165 | convey, without conditions so long as your license otherwise remains 166 | in force. You may convey covered works to others for the sole purpose 167 | of having them make modifications exclusively for you, or provide you 168 | with facilities for running those works, provided that you comply with 169 | the terms of this License in conveying all material for which you do 170 | not control copyright. Those thus making or running the covered works 171 | for you must do so exclusively on your behalf, under your direction 172 | and control, on terms that prohibit them from making any copies of 173 | your copyrighted material outside their relationship with you. 174 | 175 | Conveying under any other circumstances is permitted solely under 176 | the conditions stated below. Sublicensing is not allowed; section 10 177 | makes it unnecessary. 178 | 179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law. 180 | 181 | No covered work shall be deemed part of an effective technological 182 | measure under any applicable law fulfilling obligations under article 183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or 184 | similar laws prohibiting or restricting circumvention of such 185 | measures. 186 | 187 | When you convey a covered work, you waive any legal power to forbid 188 | circumvention of technological measures to the extent such circumvention 189 | is effected by exercising rights under this License with respect to 190 | the covered work, and you disclaim any intention to limit operation or 191 | modification of the work as a means of enforcing, against the work's 192 | users, your or third parties' legal rights to forbid circumvention of 193 | technological measures. 194 | 195 | 4. Conveying Verbatim Copies. 196 | 197 | You may convey verbatim copies of the Program's source code as you 198 | receive it, in any medium, provided that you conspicuously and 199 | appropriately publish on each copy an appropriate copyright notice; 200 | keep intact all notices stating that this License and any 201 | non-permissive terms added in accord with section 7 apply to the code; 202 | keep intact all notices of the absence of any warranty; and give all 203 | recipients a copy of this License along with the Program. 204 | 205 | You may charge any price or no price for each copy that you convey, 206 | and you may offer support or warranty protection for a fee. 207 | 208 | 5. Conveying Modified Source Versions. 209 | 210 | You may convey a work based on the Program, or the modifications to 211 | produce it from the Program, in the form of source code under the 212 | terms of section 4, provided that you also meet all of these conditions: 213 | 214 | a) The work must carry prominent notices stating that you modified 215 | it, and giving a relevant date. 216 | 217 | b) The work must carry prominent notices stating that it is 218 | released under this License and any conditions added under section 219 | 7. This requirement modifies the requirement in section 4 to 220 | "keep intact all notices". 221 | 222 | c) You must license the entire work, as a whole, under this 223 | License to anyone who comes into possession of a copy. This 224 | License will therefore apply, along with any applicable section 7 225 | additional terms, to the whole of the work, and all its parts, 226 | regardless of how they are packaged. This License gives no 227 | permission to license the work in any other way, but it does not 228 | invalidate such permission if you have separately received it. 229 | 230 | d) If the work has interactive user interfaces, each must display 231 | Appropriate Legal Notices; however, if the Program has interactive 232 | interfaces that do not display Appropriate Legal Notices, your 233 | work need not make them do so. 234 | 235 | A compilation of a covered work with other separate and independent 236 | works, which are not by their nature extensions of the covered work, 237 | and which are not combined with it such as to form a larger program, 238 | in or on a volume of a storage or distribution medium, is called an 239 | "aggregate" if the compilation and its resulting copyright are not 240 | used to limit the access or legal rights of the compilation's users 241 | beyond what the individual works permit. Inclusion of a covered work 242 | in an aggregate does not cause this License to apply to the other 243 | parts of the aggregate. 244 | 245 | 6. Conveying Non-Source Forms. 246 | 247 | You may convey a covered work in object code form under the terms 248 | of sections 4 and 5, provided that you also convey the 249 | machine-readable Corresponding Source under the terms of this License, 250 | in one of these ways: 251 | 252 | a) Convey the object code in, or embodied in, a physical product 253 | (including a physical distribution medium), accompanied by the 254 | Corresponding Source fixed on a durable physical medium 255 | customarily used for software interchange. 256 | 257 | b) Convey the object code in, or embodied in, a physical product 258 | (including a physical distribution medium), accompanied by a 259 | written offer, valid for at least three years and valid for as 260 | long as you offer spare parts or customer support for that product 261 | model, to give anyone who possesses the object code either (1) a 262 | copy of the Corresponding Source for all the software in the 263 | product that is covered by this License, on a durable physical 264 | medium customarily used for software interchange, for a price no 265 | more than your reasonable cost of physically performing this 266 | conveying of source, or (2) access to copy the 267 | Corresponding Source from a network server at no charge. 268 | 269 | c) Convey individual copies of the object code with a copy of the 270 | written offer to provide the Corresponding Source. This 271 | alternative is allowed only occasionally and noncommercially, and 272 | only if you received the object code with such an offer, in accord 273 | with subsection 6b. 274 | 275 | d) Convey the object code by offering access from a designated 276 | place (gratis or for a charge), and offer equivalent access to the 277 | Corresponding Source in the same way through the same place at no 278 | further charge. You need not require recipients to copy the 279 | Corresponding Source along with the object code. If the place to 280 | copy the object code is a network server, the Corresponding Source 281 | may be on a different server (operated by you or a third party) 282 | that supports equivalent copying facilities, provided you maintain 283 | clear directions next to the object code saying where to find the 284 | Corresponding Source. Regardless of what server hosts the 285 | Corresponding Source, you remain obligated to ensure that it is 286 | available for as long as needed to satisfy these requirements. 287 | 288 | e) Convey the object code using peer-to-peer transmission, provided 289 | you inform other peers where the object code and Corresponding 290 | Source of the work are being offered to the general public at no 291 | charge under subsection 6d. 292 | 293 | A separable portion of the object code, whose source code is excluded 294 | from the Corresponding Source as a System Library, need not be 295 | included in conveying the object code work. 296 | 297 | A "User Product" is either (1) a "consumer product", which means any 298 | tangible personal property which is normally used for personal, family, 299 | or household purposes, or (2) anything designed or sold for incorporation 300 | into a dwelling. In determining whether a product is a consumer product, 301 | doubtful cases shall be resolved in favor of coverage. For a particular 302 | product received by a particular user, "normally used" refers to a 303 | typical or common use of that class of product, regardless of the status 304 | of the particular user or of the way in which the particular user 305 | actually uses, or expects or is expected to use, the product. A product 306 | is a consumer product regardless of whether the product has substantial 307 | commercial, industrial or non-consumer uses, unless such uses represent 308 | the only significant mode of use of the product. 309 | 310 | "Installation Information" for a User Product means any methods, 311 | procedures, authorization keys, or other information required to install 312 | and execute modified versions of a covered work in that User Product from 313 | a modified version of its Corresponding Source. The information must 314 | suffice to ensure that the continued functioning of the modified object 315 | code is in no case prevented or interfered with solely because 316 | modification has been made. 317 | 318 | If you convey an object code work under this section in, or with, or 319 | specifically for use in, a User Product, and the conveying occurs as 320 | part of a transaction in which the right of possession and use of the 321 | User Product is transferred to the recipient in perpetuity or for a 322 | fixed term (regardless of how the transaction is characterized), the 323 | Corresponding Source conveyed under this section must be accompanied 324 | by the Installation Information. But this requirement does not apply 325 | if neither you nor any third party retains the ability to install 326 | modified object code on the User Product (for example, the work has 327 | been installed in ROM). 328 | 329 | The requirement to provide Installation Information does not include a 330 | requirement to continue to provide support service, warranty, or updates 331 | for a work that has been modified or installed by the recipient, or for 332 | the User Product in which it has been modified or installed. Access to a 333 | network may be denied when the modification itself materially and 334 | adversely affects the operation of the network or violates the rules and 335 | protocols for communication across the network. 336 | 337 | Corresponding Source conveyed, and Installation Information provided, 338 | in accord with this section must be in a format that is publicly 339 | documented (and with an implementation available to the public in 340 | source code form), and must require no special password or key for 341 | unpacking, reading or copying. 342 | 343 | 7. Additional Terms. 344 | 345 | "Additional permissions" are terms that supplement the terms of this 346 | License by making exceptions from one or more of its conditions. 347 | Additional permissions that are applicable to the entire Program shall 348 | be treated as though they were included in this License, to the extent 349 | that they are valid under applicable law. If additional permissions 350 | apply only to part of the Program, that part may be used separately 351 | under those permissions, but the entire Program remains governed by 352 | this License without regard to the additional permissions. 353 | 354 | When you convey a copy of a covered work, you may at your option 355 | remove any additional permissions from that copy, or from any part of 356 | it. (Additional permissions may be written to require their own 357 | removal in certain cases when you modify the work.) You may place 358 | additional permissions on material, added by you to a covered work, 359 | for which you have or can give appropriate copyright permission. 360 | 361 | Notwithstanding any other provision of this License, for material you 362 | add to a covered work, you may (if authorized by the copyright holders of 363 | that material) supplement the terms of this License with terms: 364 | 365 | a) Disclaiming warranty or limiting liability differently from the 366 | terms of sections 15 and 16 of this License; or 367 | 368 | b) Requiring preservation of specified reasonable legal notices or 369 | author attributions in that material or in the Appropriate Legal 370 | Notices displayed by works containing it; or 371 | 372 | c) Prohibiting misrepresentation of the origin of that material, or 373 | requiring that modified versions of such material be marked in 374 | reasonable ways as different from the original version; or 375 | 376 | d) Limiting the use for publicity purposes of names of licensors or 377 | authors of the material; or 378 | 379 | e) Declining to grant rights under trademark law for use of some 380 | trade names, trademarks, or service marks; or 381 | 382 | f) Requiring indemnification of licensors and authors of that 383 | material by anyone who conveys the material (or modified versions of 384 | it) with contractual assumptions of liability to the recipient, for 385 | any liability that these contractual assumptions directly impose on 386 | those licensors and authors. 387 | 388 | All other non-permissive additional terms are considered "further 389 | restrictions" within the meaning of section 10. If the Program as you 390 | received it, or any part of it, contains a notice stating that it is 391 | governed by this License along with a term that is a further 392 | restriction, you may remove that term. If a license document contains 393 | a further restriction but permits relicensing or conveying under this 394 | License, you may add to a covered work material governed by the terms 395 | of that license document, provided that the further restriction does 396 | not survive such relicensing or conveying. 397 | 398 | If you add terms to a covered work in accord with this section, you 399 | must place, in the relevant source files, a statement of the 400 | additional terms that apply to those files, or a notice indicating 401 | where to find the applicable terms. 402 | 403 | Additional terms, permissive or non-permissive, may be stated in the 404 | form of a separately written license, or stated as exceptions; 405 | the above requirements apply either way. 406 | 407 | 8. Termination. 408 | 409 | You may not propagate or modify a covered work except as expressly 410 | provided under this License. Any attempt otherwise to propagate or 411 | modify it is void, and will automatically terminate your rights under 412 | this License (including any patent licenses granted under the third 413 | paragraph of section 11). 414 | 415 | However, if you cease all violation of this License, then your 416 | license from a particular copyright holder is reinstated (a) 417 | provisionally, unless and until the copyright holder explicitly and 418 | finally terminates your license, and (b) permanently, if the copyright 419 | holder fails to notify you of the violation by some reasonable means 420 | prior to 60 days after the cessation. 421 | 422 | Moreover, your license from a particular copyright holder is 423 | reinstated permanently if the copyright holder notifies you of the 424 | violation by some reasonable means, this is the first time you have 425 | received notice of violation of this License (for any work) from that 426 | copyright holder, and you cure the violation prior to 30 days after 427 | your receipt of the notice. 428 | 429 | Termination of your rights under this section does not terminate the 430 | licenses of parties who have received copies or rights from you under 431 | this License. If your rights have been terminated and not permanently 432 | reinstated, you do not qualify to receive new licenses for the same 433 | material under section 10. 434 | 435 | 9. Acceptance Not Required for Having Copies. 436 | 437 | You are not required to accept this License in order to receive or 438 | run a copy of the Program. Ancillary propagation of a covered work 439 | occurring solely as a consequence of using peer-to-peer transmission 440 | to receive a copy likewise does not require acceptance. However, 441 | nothing other than this License grants you permission to propagate or 442 | modify any covered work. These actions infringe copyright if you do 443 | not accept this License. Therefore, by modifying or propagating a 444 | covered work, you indicate your acceptance of this License to do so. 445 | 446 | 10. Automatic Licensing of Downstream Recipients. 447 | 448 | Each time you convey a covered work, the recipient automatically 449 | receives a license from the original licensors, to run, modify and 450 | propagate that work, subject to this License. You are not responsible 451 | for enforcing compliance by third parties with this License. 452 | 453 | An "entity transaction" is a transaction transferring control of an 454 | organization, or substantially all assets of one, or subdividing an 455 | organization, or merging organizations. If propagation of a covered 456 | work results from an entity transaction, each party to that 457 | transaction who receives a copy of the work also receives whatever 458 | licenses to the work the party's predecessor in interest had or could 459 | give under the previous paragraph, plus a right to possession of the 460 | Corresponding Source of the work from the predecessor in interest, if 461 | the predecessor has it or can get it with reasonable efforts. 462 | 463 | You may not impose any further restrictions on the exercise of the 464 | rights granted or affirmed under this License. For example, you may 465 | not impose a license fee, royalty, or other charge for exercise of 466 | rights granted under this License, and you may not initiate litigation 467 | (including a cross-claim or counterclaim in a lawsuit) alleging that 468 | any patent claim is infringed by making, using, selling, offering for 469 | sale, or importing the Program or any portion of it. 470 | 471 | 11. Patents. 472 | 473 | A "contributor" is a copyright holder who authorizes use under this 474 | License of the Program or a work on which the Program is based. The 475 | work thus licensed is called the contributor's "contributor version". 476 | 477 | A contributor's "essential patent claims" are all patent claims 478 | owned or controlled by the contributor, whether already acquired or 479 | hereafter acquired, that would be infringed by some manner, permitted 480 | by this License, of making, using, or selling its contributor version, 481 | but do not include claims that would be infringed only as a 482 | consequence of further modification of the contributor version. For 483 | purposes of this definition, "control" includes the right to grant 484 | patent sublicenses in a manner consistent with the requirements of 485 | this License. 486 | 487 | Each contributor grants you a non-exclusive, worldwide, royalty-free 488 | patent license under the contributor's essential patent claims, to 489 | make, use, sell, offer for sale, import and otherwise run, modify and 490 | propagate the contents of its contributor version. 491 | 492 | In the following three paragraphs, a "patent license" is any express 493 | agreement or commitment, however denominated, not to enforce a patent 494 | (such as an express permission to practice a patent or covenant not to 495 | sue for patent infringement). To "grant" such a patent license to a 496 | party means to make such an agreement or commitment not to enforce a 497 | patent against the party. 498 | 499 | If you convey a covered work, knowingly relying on a patent license, 500 | and the Corresponding Source of the work is not available for anyone 501 | to copy, free of charge and under the terms of this License, through a 502 | publicly available network server or other readily accessible means, 503 | then you must either (1) cause the Corresponding Source to be so 504 | available, or (2) arrange to deprive yourself of the benefit of the 505 | patent license for this particular work, or (3) arrange, in a manner 506 | consistent with the requirements of this License, to extend the patent 507 | license to downstream recipients. "Knowingly relying" means you have 508 | actual knowledge that, but for the patent license, your conveying the 509 | covered work in a country, or your recipient's use of the covered work 510 | in a country, would infringe one or more identifiable patents in that 511 | country that you have reason to believe are valid. 512 | 513 | If, pursuant to or in connection with a single transaction or 514 | arrangement, you convey, or propagate by procuring conveyance of, a 515 | covered work, and grant a patent license to some of the parties 516 | receiving the covered work authorizing them to use, propagate, modify 517 | or convey a specific copy of the covered work, then the patent license 518 | you grant is automatically extended to all recipients of the covered 519 | work and works based on it. 520 | 521 | A patent license is "discriminatory" if it does not include within 522 | the scope of its coverage, prohibits the exercise of, or is 523 | conditioned on the non-exercise of one or more of the rights that are 524 | specifically granted under this License. You may not convey a covered 525 | work if you are a party to an arrangement with a third party that is 526 | in the business of distributing software, under which you make payment 527 | to the third party based on the extent of your activity of conveying 528 | the work, and under which the third party grants, to any of the 529 | parties who would receive the covered work from you, a discriminatory 530 | patent license (a) in connection with copies of the covered work 531 | conveyed by you (or copies made from those copies), or (b) primarily 532 | for and in connection with specific products or compilations that 533 | contain the covered work, unless you entered into that arrangement, 534 | or that patent license was granted, prior to 28 March 2007. 535 | 536 | Nothing in this License shall be construed as excluding or limiting 537 | any implied license or other defenses to infringement that may 538 | otherwise be available to you under applicable patent law. 539 | 540 | 12. No Surrender of Others' Freedom. 541 | 542 | If conditions are imposed on you (whether by court order, agreement or 543 | otherwise) that contradict the conditions of this License, they do not 544 | excuse you from the conditions of this License. If you cannot convey a 545 | covered work so as to satisfy simultaneously your obligations under this 546 | License and any other pertinent obligations, then as a consequence you may 547 | not convey it at all. For example, if you agree to terms that obligate you 548 | to collect a royalty for further conveying from those to whom you convey 549 | the Program, the only way you could satisfy both those terms and this 550 | License would be to refrain entirely from conveying the Program. 551 | 552 | 13. Use with the GNU Affero General Public License. 553 | 554 | Notwithstanding any other provision of this License, you have 555 | permission to link or combine any covered work with a work licensed 556 | under version 3 of the GNU Affero General Public License into a single 557 | combined work, and to convey the resulting work. The terms of this 558 | License will continue to apply to the part which is the covered work, 559 | but the special requirements of the GNU Affero General Public License, 560 | section 13, concerning interaction through a network will apply to the 561 | combination as such. 562 | 563 | 14. Revised Versions of this License. 564 | 565 | The Free Software Foundation may publish revised and/or new versions of 566 | the GNU General Public License from time to time. Such new versions will 567 | be similar in spirit to the present version, but may differ in detail to 568 | address new problems or concerns. 569 | 570 | Each version is given a distinguishing version number. If the 571 | Program specifies that a certain numbered version of the GNU General 572 | Public License "or any later version" applies to it, you have the 573 | option of following the terms and conditions either of that numbered 574 | version or of any later version published by the Free Software 575 | Foundation. If the Program does not specify a version number of the 576 | GNU General Public License, you may choose any version ever published 577 | by the Free Software Foundation. 578 | 579 | If the Program specifies that a proxy can decide which future 580 | versions of the GNU General Public License can be used, that proxy's 581 | public statement of acceptance of a version permanently authorizes you 582 | to choose that version for the Program. 583 | 584 | Later license versions may give you additional or different 585 | permissions. However, no additional obligations are imposed on any 586 | author or copyright holder as a result of your choosing to follow a 587 | later version. 588 | 589 | 15. Disclaimer of Warranty. 590 | 591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY 592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT 593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY 594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, 595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM 597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF 598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 599 | 600 | 16. Limitation of Liability. 601 | 602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING 603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS 604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY 605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE 606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF 607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD 608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), 609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF 610 | SUCH DAMAGES. 611 | 612 | 17. Interpretation of Sections 15 and 16. 613 | 614 | If the disclaimer of warranty and limitation of liability provided 615 | above cannot be given local legal effect according to their terms, 616 | reviewing courts shall apply local law that most closely approximates 617 | an absolute waiver of all civil liability in connection with the 618 | Program, unless a warranty or assumption of liability accompanies a 619 | copy of the Program in return for a fee. 620 | 621 | END OF TERMS AND CONDITIONS 622 | 623 | How to Apply These Terms to Your New Programs 624 | 625 | If you develop a new program, and you want it to be of the greatest 626 | possible use to the public, the best way to achieve this is to make it 627 | free software which everyone can redistribute and change under these terms. 628 | 629 | To do so, attach the following notices to the program. It is safest 630 | to attach them to the start of each source file to most effectively 631 | state the exclusion of warranty; and each file should have at least 632 | the "copyright" line and a pointer to where the full notice is found. 633 | 634 | {one line to give the program's name and a brief idea of what it does.} 635 | Copyright (C) {year} {name of author} 636 | 637 | This program is free software: you can redistribute it and/or modify 638 | it under the terms of the GNU General Public License as published by 639 | the Free Software Foundation, either version 3 of the License, or 640 | (at your option) any later version. 641 | 642 | This program is distributed in the hope that it will be useful, 643 | but WITHOUT ANY WARRANTY; without even the implied warranty of 644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 645 | GNU General Public License for more details. 646 | 647 | You should have received a copy of the GNU General Public License 648 | along with this program. If not, see . 649 | 650 | Also add information on how to contact you by electronic and paper mail. 651 | 652 | If the program does terminal interaction, make it output a short 653 | notice like this when it starts in an interactive mode: 654 | 655 | {project} Copyright (C) {year} {fullname} 656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. 657 | This is free software, and you are welcome to redistribute it 658 | under certain conditions; type `show c' for details. 659 | 660 | The hypothetical commands `show w' and `show c' should show the appropriate 661 | parts of the General Public License. Of course, your program's commands 662 | might be different; for a GUI interface, you would use an "about box". 663 | 664 | You should also get your employer (if you work as a programmer) or school, 665 | if any, to sign a "copyright disclaimer" for the program, if necessary. 666 | For more information on this, and how to apply and follow the GNU GPL, see 667 | . 668 | 669 | The GNU General Public License does not permit incorporating your program 670 | into proprietary programs. If your program is a subroutine library, you 671 | may consider it more useful to permit linking proprietary applications with 672 | the library. If this is what you want to do, use the GNU Lesser General 673 | Public License instead of this License. But first, please read 674 | . 675 | --------------------------------------------------------------------------------