├── .github └── workflows │ └── publish-to-pypi.yaml ├── .gitignore ├── Arguments.py ├── EULA.txt ├── Main_M3GP_classification_example.py ├── Main_M3GP_regression_example.py ├── Main_M3GP_standalone.py ├── README.txt ├── datasets ├── boom_bikes.csv └── heart.csv ├── m3gp ├── GeneticOperators.py ├── Individual.py ├── M3GP.py ├── MahalanobisDistanceClassifier.py ├── Node.py └── __init__.py ├── mypy.ini ├── pyproject.toml ├── requirements.txt ├── setup.cfg ├── setup.py └── setup_dev.sh /.github/workflows/publish-to-pypi.yaml: -------------------------------------------------------------------------------- 1 | name: Publish Python distributions to PyPI 2 | 3 | permissions: 4 | id-token: write # Required for OIDC token generation 5 | contents: read # Needed to read repository contents 6 | 7 | on: push 8 | jobs: 9 | build-n-publish: 10 | name: Build and publish Python distributions to PyPI 11 | runs-on: ubuntu-20.04 12 | steps: 13 | - uses: actions/checkout@master 14 | - name: Debug OIDC Token 15 | run: | 16 | echo "Checking OIDC Token..." 17 | env | grep ACTIONS_ID_TOKEN_REQUEST 18 | - name: Set up Python 3.10 19 | uses: actions/setup-python@v4 20 | with: 21 | python-version: '3.10' 22 | cache: 'pip' 23 | - name: Install pypa/build 24 | run: python -m pip install build --user 25 | - name: Build a binary wheel and a source tarball 26 | run: python -m build --sdist --wheel --outdir dist . 27 | - name: Publish distribution 📦 to PyPI 28 | if: startsWith(github.ref, 'refs/tags') 29 | uses: pypa/gh-action-pypi-publish@release/v1 30 | with: 31 | password: ${{ secrets.PYPI_API_TOKEN }} 32 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Does not track compiled python files 2 | *pyc 3 | 4 | # Does not track the default folder for the results 5 | results/* -------------------------------------------------------------------------------- /Arguments.py: -------------------------------------------------------------------------------- 1 | from sys import argv 2 | 3 | from m3gp.MahalanobisDistanceClassifier import MahalanobisDistanceClassifier 4 | from sklearn.ensemble import RandomForestClassifier 5 | from sklearn.tree import DecisionTreeRegressor 6 | 7 | # 8 | # By using this file, you are agreeing to this product's EULA 9 | # 10 | # This product can be obtained in https://github.com/jespb/Python-M3GP 11 | # 12 | # Copyright ©2019-2025 J. E. Batista 13 | # 14 | 15 | 16 | # Operators to be used by the models 17 | # Only these operators are available. To add mode, edit m3gp.Node.calculate(self, sample) 18 | 19 | #OPERATORS = [("+",2),("-",2),("*",2),("/",2),("log2",1), ("max", 3)] # Example 20 | OPERATORS = [("+",2),("-",2),("*",2),("/",2)] # Default 21 | 22 | # Initial Maximum depth 23 | MAX_DEPTH = 6 24 | 25 | # Number of models in the population 26 | POPULATION_SIZE = 500 27 | 28 | # Maximum number of iterations 29 | MAX_GENERATION = 100 30 | 31 | # Fraction of the dataset to be used as training (used by Main_M3GP_standalone.py) 32 | TRAIN_FRACTION = 0.70 33 | 34 | # Number of individuals to be used in the tournament 35 | TOURNAMENT_SIZE = 5 36 | 37 | # Number of best individuals to be automatically moved to the next generation 38 | ELITISM_SIZE = 1 39 | 40 | # Shuffle the dataset (used by Main_M3GP_standalone.py) 41 | SHUFFLE = True 42 | 43 | # Dimensions maximum depth 44 | LIMIT_DEPTH=17 45 | 46 | # Number of runs (used by Main_M3GP_standalone.py) 47 | RUNS = 30 48 | 49 | # Verbose 50 | VERBOSE = True 51 | 52 | # Number of CPU Threads to be used 53 | THREADS = 1 54 | 55 | # Minimum number of dimensions 56 | DIM_MIN = 1 57 | 58 | # An unreachable number of dimensions 59 | DIM_MAX = 9999 60 | 61 | # Random state 62 | RANDOM_STATE = 42 63 | 64 | # Models wrapped by the M3GP models 65 | MODEL = [MahalanobisDistanceClassifier(), RandomForestClassifier(max_depth=6), DecisionTreeRegressor(max_depth=6)][0] 66 | MODEL_NAME = MODEL.__class__.__name__ 67 | 68 | # Fitness used by the M3GP models 69 | FITNESS_TYPE = ["Accuracy", "MSE", "WAF", "2FOLD"][0] 70 | 71 | 72 | 73 | DATASETS_DIR = "datasets/" 74 | OUTPUT_DIR = "results/" 75 | 76 | DATASETS = ["heart.csv"] 77 | OUTPUT = "Classification" 78 | 79 | 80 | 81 | 82 | if "-dsdir" in argv: 83 | DATASETS_DIR = argv[argv.index("-dsdir")+1] 84 | 85 | if "-odir" in argv: 86 | OUTPUT_DIR = argv[argv.index("-odir")+1] 87 | 88 | if "-d" in argv: 89 | DATASETS = argv[argv.index("-d")+1].split(";") 90 | 91 | if "-runs" in argv: 92 | RUNS = int(argv[argv.index("-runs")+1]) 93 | 94 | if "-op" in argv: 95 | OPERATORS = argv[argv.index("-op")+1].split(";") 96 | for i in range(len(OPERATORS)): 97 | OPERATORS[i] = OPERATORS[i].split(",") 98 | OPERATORS[i][1] = int(OPERATORS[i][1]) 99 | 100 | if "-md" in argv: 101 | MAX_DEPTH = int(argv[argv.index("-md")+1]) 102 | 103 | if "-ps" in argv: 104 | POPULATION_SIZE = int(argv[argv.index("-ps")+1]) 105 | 106 | if "-mg" in argv: 107 | MAX_GENERATION = int(argv[argv.index("-mg")+1]) 108 | 109 | if "-tf" in argv: 110 | TRAIN_FRACTION = float(argv[argv.index("-tf")+1]) 111 | 112 | if "-ts" in argv: 113 | TOURNAMENT_SIZE = int(argv[argv.index("-ts")+1]) 114 | 115 | if "-es" in argv: 116 | ELITISM_SIZE = int(argv[argv.index("-es")+1]) 117 | 118 | if "-dontshuffle" in argv: 119 | SHUFFLE = False 120 | 121 | if "-s" in argv: 122 | VERBOSE = False 123 | 124 | if "-t" in argv: 125 | THREADS = int(argv[argv.index("-t")+1]) 126 | 127 | if "-dmin" in argv: 128 | DIM_MIN = int(argv[argv.index("-dmin")+1]) 129 | 130 | if "-dmax" in argv: 131 | DIM_MAX = int(argv[argv.index("-dmax")+1]) 132 | 133 | 134 | if "-rs" in argv: 135 | RANDOM_STATE = int(argv[argv.index("-rs")+1]) 136 | 137 | 138 | -------------------------------------------------------------------------------- /EULA.txt: -------------------------------------------------------------------------------- 1 | END-USER LICENSE AGREEMENT FOR J. E. Batista's MULTIDIMENSIONAL MULTICLASS GENETIC PROGRAMMING WITH MULTIDIMENSIONAL POPULATIONS (M3GP) IMPLEMENTATION 2 | 3 | IMPORTANT: PLEASE READ THE TERMS AND CONDITIONS OF THIS LICENSE AGREEMENT CAREFULLY BEFORE CONTINUING WITH THIS PROGRAM INSTALL. 4 | 5 | This End-User License Agreement ("EULA") is a legal agreement between you (either an individual or a single entity) and J. E. Batista (hereinafter referred to as "Licensor"), for the software product(s) identified above which may include associated software components, media, printed materials, and "online" or electronic documentation ("SOFTWARE PRODUCT"). By installing, copying, or otherwise using the SOFTWARE PRODUCT, you agree to be bound by the terms of this EULA. This license agreement represents the entire agreement concerning the program between You and the Licensor, and it supersedes any prior proposal, representation, or understanding between the parties. If you do not agree to the terms of this EULA, do not install or use the SOFTWARE PRODUCT. 6 | 7 | The SOFTWARE PRODUCT is protected by copyright laws and international copyright treaties, as well as other intellectual property laws and treaties. The SOFTWARE PRODUCT is licensed, not sold. 8 | 9 | 1. GRANT OF LICENSE. 10 | The SOFTWARE PRODUCT is licensed as follows: 11 | (a) Installation and Use. 12 | The Licensor grants you a revocable, non-exclusive, non-transferable, limited right to install and use copies of the SOFTWARE PRODUCT on your computer running a validly licensed copy of the operating system for which the SOFTWARE PRODUCT was designed. 13 | (b) Backup Copies. 14 | You may also make copies of the SOFTWARE PRODUCT as may be necessary for backup and archival purposes. 15 | 16 | 2. DESCRIPTION OF OTHER RIGHTS AND LIMITATIONS. 17 | (a) Maintenance of Copyright Notices. 18 | You must not remove or alter any copyright notices on any and all copies of the SOFTWARE PRODUCT. 19 | (b) Distribution. 20 | You may not distribute copies of the SOFTWARE PRODUCT to third parties. Evaluation versions available for download from the Licensor's websites may be freely distributed. 21 | (c) Prohibition on Reverse Engineering, Decompilation, and Disassembly. 22 | You may not reverse engineer, decompile, or disassemble the SOFTWARE PRODUCT, except and only to the extent that such activity is expressly permitted by applicable law notwithstanding this limitation. 23 | (d) Rental. 24 | You may not rent, lease, or lend the SOFTWARE PRODUCT. 25 | (e) Support Services. 26 | The Licensor may provide you with support services related to the SOFTWARE PRODUCT ("Support Services"). Any supplemental software code provided to you as part of the Support Services shall be considered part of the SOFTWARE PRODUCT and subject to the terms and conditions of this EULA. 27 | (f) Compliance with Applicable Laws. 28 | You must comply with all applicable laws regarding use of the SOFTWARE PRODUCT. 29 | (g) Source Code. 30 | You may only compile and modify the source code of the SOFTWARE PRODUCT for your own personal purpose, research purpose, or to propose a contribution to the SOFTWARE PRODUCT. You are explicitly required to refer the SOFTWARE PRODUCT in publications containing results that derived from using SOFTWARE PRODUCT. 31 | 32 | 3. TERMINATION 33 | Without prejudice to any other rights, the Licensor may terminate this EULA if you fail to comply with the terms and conditions of this EULA. In such event, you must destroy all copies of the SOFTWARE PRODUCT in your possession. 34 | 35 | 4. COPYRIGHT 36 | All title, including but not limited to copyrights, in and to the SOFTWARE PRODUCT and any copies thereof are owned by the Licensor or its suppliers. All title and intellectual property rights in and to the content which may be accessed through use of the SOFTWARE PRODUCT is the property of the respective content owner and may be protected by applicable copyright or other intellectual property laws and treaties. This EULA grants you no rights to use such content. All rights not expressly granted are reserved by the Licensor. 37 | 38 | 5. NO WARRANTIES 39 | The Licensor expressly disclaims any warranty for the SOFTWARE PRODUCT. The SOFTWARE PRODUCT is provided 'As Is' without any express or implied warranty of any kind, including but not limited to any warranties of merchantability, noninfringement, or fitness of a particular purpose. The Licensor does not warrant or assume responsibility for the accuracy or completeness of any information, text, graphics, links or other items contained within the SOFTWARE PRODUCT. The Licensor makes no warranties respecting any harm that may be caused by the transmission of a computer virus, worm, time bomb, logic bomb, or other such computer program. The Licensor further expressly disclaims any warranty or representation to Authorized Users or to any third party. 40 | 41 | 6. LIMITATION OF LIABILITY 42 | In no event shall the Licensor be liable for any damages (including, without limitation, lost profits, business interruption, or lost information) rising out of 'Authorized Users' use of or inability to use the SOFTWARE PRODUCT, even if the Licensor has been advised of the possibility of such damages. In no event will the Licensor be liable for loss of data or for indirect, special, incidental, consequential (including lost profit), or other damages based in contract, tort or otherwise. The Licensor shall have no liability with respect to the content of the SOFTWARE PRODUCT or any part thereof, including but not limited to errors or omissions contained therein, libel, infringements of rights of publicity, privacy, trademark rights, business interruption, personal injury, loss of privacy, moral rights or the disclosure of confidential information. -------------------------------------------------------------------------------- /Main_M3GP_classification_example.py: -------------------------------------------------------------------------------- 1 | import pandas 2 | 3 | from m3gp.M3GP import M3GP 4 | from sklearn.ensemble import RandomForestClassifier 5 | 6 | from sklearn.model_selection import train_test_split 7 | 8 | from sklearn.metrics import accuracy_score 9 | 10 | import warnings 11 | 12 | warnings.filterwarnings("ignore", category=FutureWarning, 13 | message="From version 0.21, test_size will always complement", 14 | module="sklearn") 15 | 16 | # 17 | # By using this file, you are agreeing to this product's EULA 18 | # 19 | # This product can be obtained in https://github.com/jespb/Python-M3GP 20 | # 21 | # Copyright ©2019-2025 J. E. Batista 22 | # 23 | 24 | 25 | 26 | filename= "heart.csv" 27 | 28 | # Open the dataset 29 | ds = pandas.read_csv("datasets/"+filename) 30 | class_header = ds.columns[-1] 31 | 32 | # Split the dataset 33 | Tr_X, Te_X, Tr_Y, Te_Y = train_test_split(ds.drop(columns=[class_header]), ds[class_header], 34 | train_size=0.7, random_state = 42, stratify = ds[class_header]) 35 | 36 | # Train a model 37 | m3gp = M3GP(model_class=RandomForestClassifier(max_depth=6), fitnessType="2FOLD") 38 | m3gp.fit(Tr_X, Tr_Y) 39 | 40 | # Predict test results 41 | pred = m3gp.predict(Te_X) 42 | 43 | # Obtain test accuracy 44 | print( accuracy_score(pred, Te_Y) ) 45 | 46 | -------------------------------------------------------------------------------- /Main_M3GP_regression_example.py: -------------------------------------------------------------------------------- 1 | import pandas 2 | 3 | from m3gp.M3GP import M3GP 4 | 5 | from sklearn.model_selection import train_test_split 6 | 7 | from sklearn.metrics import mean_squared_error 8 | 9 | 10 | from sklearn.tree import DecisionTreeRegressor 11 | 12 | import warnings 13 | 14 | warnings.filterwarnings("ignore", category=FutureWarning, 15 | message="From version 0.21, test_size will always complement", 16 | module="sklearn") 17 | 18 | # 19 | # By using this file, you are agreeing to this product's EULA 20 | # 21 | # This product can be obtained in https://github.com/jespb/Python-M3GP 22 | # 23 | # Copyright ©2019-2025 J. E. Batista 24 | # 25 | 26 | 27 | 28 | filename= "heart.csv" 29 | 30 | # Open the dataset 31 | ds = pandas.read_csv("datasets/"+filename) 32 | class_header = ds.columns[-1] 33 | 34 | # Split the dataset 35 | Tr_X, Te_X, Tr_Y, Te_Y = train_test_split(ds.drop(columns=[class_header]), ds[class_header], 36 | train_size=0.7, random_state = 42) 37 | 38 | # Train a model 39 | m3gp = M3GP(max_generation=20, model_class=DecisionTreeRegressor(max_depth=5), fitnessType="MSE",random_state=21) 40 | m3gp.fit(Tr_X, Tr_Y, Te_X, Te_Y) 41 | 42 | # Predict test results 43 | pred = m3gp.predict(Te_X) 44 | 45 | # Obtain test accuracy 46 | print( mean_squared_error(pred, Te_Y) ) 47 | 48 | -------------------------------------------------------------------------------- /Main_M3GP_standalone.py: -------------------------------------------------------------------------------- 1 | import pandas 2 | 3 | from m3gp.M3GP import M3GP 4 | from sys import argv 5 | from Arguments import * 6 | import os 7 | 8 | from sklearn.model_selection import train_test_split 9 | 10 | import numpy as np 11 | 12 | import warnings 13 | 14 | warnings.filterwarnings("ignore", category=FutureWarning, 15 | message="From version 0.21, test_size will always complement", 16 | module="sklearn") 17 | 18 | 19 | # 20 | # By using this file, you are agreeing to this product's EULA 21 | # 22 | # This product can be obtained in https://github.com/jespb/Python-M3GP 23 | # 24 | # Copyright ©2019-2025 J. E. Batista 25 | # 26 | 27 | 28 | 29 | 30 | def openAndSplitDatasets(which,seed): 31 | if VERBOSE: 32 | print( "> Opening: ", which ) 33 | 34 | # Open dataset 35 | ds = pandas.read_csv(DATASETS_DIR+which) 36 | 37 | # Read header 38 | class_header = ds.columns[-1] 39 | 40 | return train_test_split(ds.drop(columns=[class_header]), ds[class_header], 41 | train_size=TRAIN_FRACTION, random_state=seed, 42 | stratify = ds[class_header]) 43 | 44 | 45 | def run(r,dataset): 46 | if VERBOSE: 47 | print("> Starting run:") 48 | print(" > ID:", r) 49 | print(" > Dataset: "+dataset) 50 | print() 51 | 52 | Tr_X, Te_X, Tr_Y, Te_Y = openAndSplitDatasets(dataset,r) 53 | 54 | # Train a model 55 | m3gp = M3GP(OPERATORS, MAX_DEPTH, POPULATION_SIZE, MAX_GENERATION, TOURNAMENT_SIZE, 56 | ELITISM_SIZE, LIMIT_DEPTH, DIM_MIN, DIM_MAX, THREADS, r, VERBOSE, MODEL, FITNESS_TYPE) 57 | m3gp.fit(Tr_X, Tr_Y, Te_X, Te_Y) 58 | 59 | 60 | # Obtain training results 61 | accuracy = m3gp.getAccuracyOverTime() 62 | waf = m3gp.getWaFOverTime() 63 | kappa = m3gp.getKappaOverTime() 64 | mse = m3gp.getMSEOverTime() 65 | sizes = m3gp.getSizesOverTime() 66 | model_str = str(m3gp.getBestIndividual()) 67 | times = m3gp.getGenerationTimes() 68 | 69 | tr_acc = accuracy[0] 70 | te_acc = accuracy[1] 71 | tr_waf = waf[0] 72 | te_waf = waf[1] 73 | tr_kappa = kappa[0] 74 | te_kappa = kappa[1] 75 | tr_mse = mse[0] 76 | te_mse = mse[1] 77 | size = sizes[0] 78 | dimensions = sizes[1] 79 | 80 | if VERBOSE: 81 | print("> Ending run:") 82 | print(" > ID:", r) 83 | print(" > Dataset:", dataset) 84 | print(" > Final model:", model_str) 85 | print(" > Training accuracy:", tr_acc[-1]) 86 | print(" > Test accuracy:", te_acc[-1]) 87 | print() 88 | 89 | return (tr_acc,te_acc, 90 | tr_waf,te_waf, 91 | tr_kappa,te_kappa, 92 | tr_mse,te_mse, 93 | size,dimensions, 94 | times, 95 | model_str) 96 | 97 | 98 | def callm3gp(): 99 | try: 100 | os.makedirs(OUTPUT_DIR) 101 | except: 102 | pass 103 | 104 | for dataset in DATASETS: 105 | outputFilename = OUTPUT_DIR+"m3gp_"+ dataset 106 | if not os.path.exists(outputFilename): 107 | results = [] 108 | 109 | # Run the algorithm several times 110 | for r in range(RUNS): 111 | results.append(run(r,dataset)) 112 | 113 | # Write output header 114 | file = open(outputFilename , "w") 115 | file.write("Attribute,Run,") 116 | for i in range(MAX_GENERATION): 117 | file.write(str(i)+",") 118 | file.write("\n") 119 | 120 | attributes= ["Training-Accuracy","Test-Accuracy", 121 | "Training-WaF", "Test-WaF", 122 | "Training-Kappa", "Test-Kappa", 123 | "Training-MSE", "Test-MSE", 124 | "Size","Dimensions", 125 | "Time", 126 | "Final_Model"] 127 | 128 | # Write attributes with value over time 129 | for ai in range(len(attributes)-1): 130 | for i in range(len(results)): 131 | file.write("\n"+attributes[ai]+","+str(i)+",") 132 | file.write( ",".join([str(val) for val in results[i][ai]])) 133 | file.write("\n") 134 | 135 | # Write the final models 136 | for i in range(len(results)): 137 | file.write("\n"+attributes[-1]+","+str(i)+",") 138 | file.write(results[i][-1]) 139 | file.write("\n") 140 | 141 | # Write some parameters 142 | file.write("\n\nParameters") 143 | file.write("\nOperators,"+str(OPERATORS)) 144 | file.write("\nMax Initial Depth,"+str(MAX_DEPTH)) 145 | file.write("\nPopulation Size,"+str(POPULATION_SIZE)) 146 | file.write("\nMax Generation,"+str(MAX_GENERATION)) 147 | file.write("\nTournament Size,"+str(TOURNAMENT_SIZE)) 148 | file.write("\nElitism Size,"+str(ELITISM_SIZE)) 149 | file.write("\nDepth Limit,"+str(LIMIT_DEPTH)) 150 | file.write("\nMinimum Dimensions,"+str(DIM_MIN)) 151 | file.write("\nMaximum Dimensions,"+str(DIM_MAX)) 152 | file.write("\nWrapped Model,"+MODEL_NAME) 153 | file.write("\nFitness Type,"+FITNESS_TYPE) 154 | file.write("\nThreads,"+str(THREADS)) 155 | file.write("\nRandom State,"+str(list(range(RUNS)))) 156 | file.write("\nDataset,"+dataset) 157 | 158 | 159 | file.close() 160 | else: 161 | print("Filename: " + outputFilename +" already exists.") 162 | 163 | 164 | if __name__ == '__main__': 165 | callm3gp() 166 | -------------------------------------------------------------------------------- /README.txt: -------------------------------------------------------------------------------- 1 | This is a, easy-to-use, scikit-learn inspired version of the M3GP algorithm. 2 | 3 | 4 | By using this file, you are agreeing to this product's EULA 5 | This product can be obtained in https://github.com/jespb/Python-M3GP 6 | Copyright ©2019-2025 J. E. Batista 7 | 8 | 9 | This file contains information about the command and flags used in the stand-alone version of this implementation and an explanation on how to import, use and edit this implementation. 10 | 11 | 12 | 13 | 14 | This implementation of M3GP can be used in a stand-alone fashion using the following command and flags: 15 | 16 | $ python Main_M3GP_standalone.py 17 | 18 | [-d datasets] 19 | - This flag expects a set of csv dataset names separated by ";" (e.g., "a.csv;b.csv") 20 | - By default, the heart.csv dataset is used 21 | 22 | [-dsdir dir] 23 | - States the dataset directory. 24 | - By default "datasets/" is used 25 | - Use "-dsdir ./" for the root directory 26 | 27 | [-es elite_size] 28 | - This flag expects an integer with the elite size; 29 | - By default, the elite has size 1. 30 | 31 | [-md max_depth] 32 | - This flag expects an integer with the maximum initial depth for the trees; 33 | - By default, this value is set to 6. 34 | 35 | [-mg max_generation] 36 | - This flag expects an integer with the maximum number of generations; 37 | - By default, this value is set to 100. 38 | 39 | [-odir dir] 40 | - States the output directory. 41 | - By default "results/" is used 42 | - Use "-odir ./" for the root directory 43 | 44 | [-op operators] 45 | - This flag excepts a set of operators and their number of arguments, separated by ";" 46 | - Allowed operators: +,2 ; -,2 ; *,2 ; /,2 47 | - By default, the used operators are the sum, subtraction, multiplication and protected division: "+,2;-,2;*,2;/,2" 48 | 49 | [-ps population_size] 50 | - This flag expects an integer with the size of the population; 51 | - By default, this value is set to 500. 52 | 53 | [-runs number_of_runs] 54 | - This flag expects an integer with the number of runs to be made; 55 | - By default, this values is set to 30 56 | 57 | [-tf train_fraction] 58 | - This flag expects a float [0;1] with the fraction of the dataset to be used in training; 59 | - By default, this value is set to 0.70 60 | 61 | [-ts tournament_size] 62 | - This flag expects an integer with the tournament size; 63 | - By default, this value is set to 10. 64 | 65 | [-t number_of_threads] 66 | - This flag expects an integer with the number of threads to use while evaluating the population; 67 | - If the value is set to 1, the multiprocessing library will not be used 68 | - By default, this value is set to 1. 69 | 70 | [-di minimum_number_of_dimension] 71 | - This flag expects an integer with the minimum number of dimensions in each individual; 72 | - This flag affects the number of dimensions in the initial individuals; 73 | - By default, this value is set to 1 74 | 75 | [-dm maximum_number_of_dimension] 76 | - This flag expects an integer with the maximum number of dimensions in each individual; 77 | - By default, this value is set to 9999 78 | 79 | [-rs random state] 80 | - This flag expects an integer with the seed to be used by the M3GP algorithm; 81 | - By default, this value is set to 42 82 | 83 | 84 | 85 | 86 | How to import this implementation to your project: 87 | - Download this repository; 88 | - Copy the "m3gp/" directory to your project directory; 89 | - import the M3GP class using "from m3gp.M3GP import M3GP". 90 | 91 | Alternativaly, m3gp can be installed using: 92 | - pip install m3gp 93 | 94 | 95 | How to use this implementation: 96 | $ from m3gp.M3GP import M3GP 97 | $ model = M3GP() 98 | $ model.fit( training_x, training_y, test_x (optional), test_y (optional) ) 99 | 100 | 101 | Arguments for M3GP(): 102 | operators -> Operators used by the individual (default: [("+",2),("-",2),("*",2),("/",2)] ) 103 | max_depth -> Max initial depths of the individuals (default: 6) 104 | population_size -> Population size (default: 500) 105 | max_generation -> Maximum number of generations (default: 100) 106 | tournament_size -> Tournament size (default: 5) 107 | elitism_size -> Elitism selection size (default: 1) 108 | limit_depth -> Maximum individual depth (default: 17) 109 | threads -> Number of CPU threads to be used (default: 1) 110 | random_state -> Random state (default: 42) 111 | model_class -> Model to be used as the inner classifier/regressor (default: MahalanobisDistanceClassifier() ) 112 | fitnessType -> Fitness to be used (Accuracy, WAF, 2FOLD - Classification, MSE - Regression) (default: "Accuracy") # "2FOLD" means 2-folds on the training data, using WAF 113 | dim_min -> Minimum number of dimensions (default: 1) 114 | dim_max -> Maximum number of dimensions (default: 9999) #The algorithm will not reach this value 115 | 116 | Arguments for model.fit(): 117 | Tr_X -> Training samples 118 | Tr_Y -> Training labels 119 | Te_X -> Test samples, used in the standalone version (default: None) 120 | Te_Y -> Test labels, used in the standalone version (default: None) 121 | 122 | Useful methods: 123 | $ model = M3GP() -> starts the model, the model will be optimized for the MahalanobisDistance classifier, a cluster-based algorithm; 124 | $ model = M3GP(model_class = RandomForestClassifier(max_depth=6), fitnessType="2FOLD") 125 | -> the model will be optimized for the RF classifier 126 | -> Since RF models with no depth limit will memorize the training data, the models will obtain perfect accuracy in early generations, 127 | -> to predent this, either limit the RF trees depth (e.g., max_depth=6), or use "2FOLD" as fitnessType; 128 | $ model.fit(X, Y) -> fits the model to the dataset; 129 | $ model.predict(X) -> Returns a list with the prediction of the given dataset. 130 | 131 | 132 | 133 | 134 | How to edit this implementation: 135 | Fitness Function ( m3gp.Individual ): 136 | - Change the getFitness() method to use your own fitness function; 137 | - This implementation assumes that a higher fitness is always better. To change this, edit the __gt__ method in this class; 138 | - Warning: Since M3GP is a slow method, a fitness function that escalates well with the number of features is recommended. 139 | 140 | 141 | 142 | 143 | Citation: 144 | If you use this implementation, please cite one of the works below, where the implementation is also used: 145 | 146 | @inproceedings{Batista2022, 147 | doi = {10.1109/cec55065.2022.9870343}, 148 | url = {https://doi.org/10.1109/cec55065.2022.9870343}, 149 | year = {2022}, 150 | month = jul, 151 | publisher = {{IEEE}}, 152 | author = {Joao E. Batista and Sara Silva}, 153 | title = {Comparative study of classifier performance using automatic feature construction by M3GP}, 154 | booktitle = {2022 {IEEE} Congress on Evolutionary Computation ({CEC})} 155 | } 156 | 157 | @Article{rs13091623, 158 | AUTHOR = {Batista, João E. and Cabral, Ana I. R. and Vasconcelos, Maria J. P. and Vanneschi, Leonardo and Silva, Sara}, 159 | TITLE = {Improving Land Cover Classification Using Genetic Programming for Feature Construction}, 160 | JOURNAL = {Remote Sensing}, 161 | VOLUME = {13}, 162 | YEAR = {2021}, 163 | NUMBER = {9}, 164 | ARTICLE-NUMBER = {1623}, 165 | URL = {https://www.mdpi.com/2072-4292/13/9/1623}, 166 | ISSN = {2072-4292}, 167 | DOI = {10.3390/rs13091623} 168 | } 169 | 170 | @INPROCEEDINGS{9185630, 171 | author={Batista, João E. and Silva, Sara}, 172 | booktitle={2020 IEEE Congress on Evolutionary Computation (CEC)}, 173 | title={Improving the Detection of Burnt Areas in Remote Sensing using Hyper-features Evolved by M3GP}, 174 | year={2020}, 175 | pages={1-8}, 176 | doi={10.1109/CEC48606.2020.9185630} 177 | } 178 | 179 | 180 | 181 | 182 | Reference: 183 | Muñoz, L., Trujillo, L., & Silva, S. (2015). M3GP – multiclass classification with GP. In Genetic Programming - 18th European Conference, EuroGP 2015, Proceedings (Vol. 9025, pp. 78-91). (Lecture Notes in Computer Science (including subseries Lecture Notes in Artificial Intelligence and Lecture Notes in Bioinformatics); Vol. 9025). Springer-Verlag. https://doi.org/10.1007/978-3-319-16501-1_7 184 | -------------------------------------------------------------------------------- /datasets/boom_bikes.csv: -------------------------------------------------------------------------------- 1 | instant,season,yr,mnth,holiday,weekday,workingday,weathersit,temp,atemp,hum,windspeed,cnt 2 | 1,1,0,1,0,6,0,2,14.110847,18.18125,80.5833,10.749882,985 3 | 2,1,0,1,0,0,0,2,14.902598,17.68695,69.6087,16.652113,801 4 | 3,1,0,1,0,1,1,1,8.050924,9.47025,43.7273,16.636703,1349 5 | 4,1,0,1,0,2,1,1,8.2,10.6061,59.0435,10.739832,1562 6 | 5,1,0,1,0,3,1,1,9.305237,11.4635,43.6957,12.5223,1600 7 | 6,1,0,1,0,4,1,1,8.378268,11.66045,51.8261,6.0008684,1606 8 | 7,1,0,1,0,5,1,2,8.057402,10.44195,49.8696,11.304642,1510 9 | 8,1,0,1,0,6,0,2,6.765,8.1127,53.5833,17.875868,959 10 | 9,1,0,1,0,0,0,1,5.671653,5.80875,43.4167,24.25065,822 11 | 10,1,0,1,0,1,1,1,6.184153,7.5444,48.2917,14.958889,1321 12 | 11,1,0,1,0,2,1,2,6.932731,9.5732,68.6364,8.182844,1263 13 | 12,1,0,1,0,3,1,1,7.081807,8.02365,59.9545,20.410009,1162 14 | 13,1,0,1,0,4,1,1,6.765,7.54415,47.0417,20.167,1406 15 | 14,1,0,1,0,5,1,1,6.59567,9.42065,53.7826,8.478716,1421 16 | 15,1,0,1,0,6,0,2,9.566653,12.4056,49.875,10.583521,1248 17 | 16,1,0,1,0,0,0,1,9.498347,11.71085,48.375,12.625011,1204 18 | 17,1,0,1,1,1,0,2,7.209153,8.83855,53.75,12.999139,1000 19 | 18,1,0,1,0,2,1,2,8.883347,11.61665,86.1667,9.833925,683 20 | 19,1,0,1,0,3,1,2,11.979134,14.9211,74.1739,13.957239,1650 21 | 20,1,0,1,0,4,1,2,10.728347,12.7525,53.8333,13.125568,1927 22 | 21,1,0,1,0,5,1,1,7.2775,7.89165,45.7083,23.667214,1543 23 | 22,1,0,1,0,6,0,1,2.4243464,3.95348,40,11.52199,981 24 | 23,1,0,1,0,0,0,1,3.9573897,4.941955,43.6522,16.5222,986 25 | 24,1,0,1,0,1,1,1,3.9930433,5.8965,49.1739,10.60811,1416 26 | 25,1,0,1,0,2,1,2,9.162598,11.7263,61.6957,8.696332,1985 27 | 26,1,0,1,0,3,1,3,8.9175,10.18,86.25,19.68795,506 28 | 27,1,0,1,0,4,1,1,7.995,10.985,68.75,7.627079,431 29 | 28,1,0,1,0,5,1,2,8.342598,11.16585,79.3043,8.2611,1167 30 | 29,1,0,1,0,6,0,1,8.057402,10.6063,65.1739,9.739455,1098 31 | 30,1,0,1,0,0,0,1,8.877402,12.5161,72.2174,4.9568342,1096 32 | 31,1,0,1,0,1,1,2,7.414153,9.3125,60.375,12.541864,1501 33 | 32,1,0,2,0,2,1,2,7.879134,11.7265,82.9565,3.565271,1360 34 | 33,1,0,2,0,3,1,2,10.66,12.72085,77.5417,17.708636,1526 35 | 34,1,0,2,0,4,1,1,7.665237,8.8939,43.7826,18.609384,1550 36 | 35,1,0,2,0,5,1,2,8.663464,11.42935,58.5217,8.565213,1708 37 | 36,1,0,2,0,6,0,2,9.566653,12.1529,92.9167,10.792293,1005 38 | 37,1,0,2,0,0,0,1,11.719153,14.58355,56.8333,9.5006,1623 39 | 38,1,0,2,0,1,1,1,11.138347,15.1829,73.8333,3.0423561,1712 40 | 39,1,0,2,0,2,1,1,9.054153,9.9123,53.7917,24.25065,1530 41 | 40,1,0,2,0,3,1,2,5.526103,7.21415,49.4783,12.652213,1605 42 | 41,1,0,2,0,4,1,1,5.918268,7.4774,43.7391,14.869645,1538 43 | 42,1,0,2,0,5,1,1,7.752731,10.67545,50.6364,7.27285,1746 44 | 43,1,0,2,0,6,0,1,9.1225,11.6477,54.4167,13.625589,1472 45 | 44,1,0,2,0,0,0,1,12.977402,16.20565,45.7391,17.479161,1589 46 | 45,1,0,2,0,1,1,1,17.015,19.9175,37.5833,27.999836,1913 47 | 46,1,0,2,0,2,1,1,10.909567,12.7137,31.4348,19.522058,1815 48 | 47,1,0,2,0,3,1,1,13.048701,15.81,42.3478,16.869997,2115 49 | 48,1,0,2,0,4,1,1,17.869153,21.4329,50.5,15.416968,2475 50 | 49,1,0,2,0,5,1,1,21.388347,25.59915,51.6667,17.749975,2927 51 | 50,1,0,2,0,6,0,1,16.365847,19.5702,18.7917,34.000021,1635 52 | 51,1,0,2,0,0,0,1,11.693897,13.8665,40.7826,14.956745,1812 53 | 52,1,0,2,1,1,0,2,12.436653,14.20375,60.5,20.625682,1107 54 | 53,1,0,2,0,2,1,1,7.471102,9.30165,57.7778,13.110761,1450 55 | 54,1,0,2,0,3,1,1,9.091299,12.28585,42.3043,6.305571,1917 56 | 55,1,0,2,0,4,1,2,12.121732,14.45955,69.7391,16.783232,1807 57 | 56,1,0,2,0,5,1,2,14.938268,17.52305,71.2174,23.218113,1461 58 | 57,1,0,2,0,6,0,1,11.5825,14.1096,53.7917,12.500257,1969 59 | 58,1,0,2,0,0,0,1,14.082598,17.55545,68,8.391616,2402 60 | 59,1,0,2,0,1,1,2,16.698193,20.0059,87.6364,19.408962,1446 61 | 60,1,0,3,0,2,1,1,10.933347,13.19395,53.5,14.500475,1851 62 | 61,1,0,3,0,3,1,1,13.735,16.00355,44.9583,20.624811,2134 63 | 62,1,0,3,0,4,1,1,8.131653,10.00665,31.8333,15.125518,1685 64 | 63,1,0,3,0,5,1,2,10.728347,12.78395,61.0417,13.624182,1944 65 | 64,1,0,3,0,6,0,2,15.750847,18.93895,78.9167,16.875357,2077 66 | 65,1,0,3,0,0,0,2,15.437402,18.3126,94.8261,23.000229,605 67 | 66,1,0,3,0,1,1,1,10.731299,11.92305,55.1304,22.870584,1872 68 | 67,1,0,3,0,2,1,1,11.9925,15.12,42.0833,8.08355,2133 69 | 68,1,0,3,0,3,1,2,12.129153,14.3304,77.5417,14.75005,1891 70 | 69,1,0,3,0,4,1,3,15.952731,19.2834,0,17.545759,623 71 | 70,1,0,3,0,5,1,2,12.977402,15.25,64.9565,15.60899,1977 72 | 71,1,0,3,0,6,0,1,13.495847,16.2875,59.4583,14.791925,2132 73 | 72,1,0,3,0,0,0,1,15.758268,19.00455,52.7391,18.130468,2417 74 | 73,1,0,3,0,1,1,1,13.333897,16.6,49.6957,9.174042,2046 75 | 74,1,0,3,0,2,1,2,13.013031,15.9089,65.5652,12.348703,2056 76 | 75,1,0,3,0,3,1,2,14.973897,18.3465,77.6522,13.608839,2192 77 | 76,1,0,3,0,4,1,1,17.015,20.51665,60.2917,14.041793,2744 78 | 77,1,0,3,0,5,1,1,22.14,26.35045,52.5217,15.478139,3239 79 | 78,1,0,3,0,6,0,1,19.3725,23.32625,37.9167,24.667189,3117 80 | 79,1,0,3,0,0,0,1,13.6325,16.2875,47.375,13.917307,2471 81 | 80,2,0,3,0,1,1,2,17.647835,20.48675,73.7391,19.348461,2077 82 | 81,2,0,3,0,2,1,1,18.108347,22.0321,62.4583,15.12525,2703 83 | 82,2,0,3,0,3,1,2,14.225237,16.89695,83.9565,15.695487,2121 84 | 83,2,0,3,0,4,1,2,11.685,13.54165,80.5833,16.333729,1865 85 | 84,2,0,3,0,5,1,1,10.830847,12.8156,49.5,15.458575,2210 86 | 85,2,0,3,0,6,0,1,10.899153,12.87855,39.4167,14.041257,2496 87 | 86,2,0,3,0,0,0,2,10.374763,12.51695,49.3913,12.3481,1693 88 | 87,2,0,3,0,1,1,1,10.838268,12.8787,30.2174,14.217668,2028 89 | 88,2,0,3,0,2,1,1,12.4025,14.6454,31.4167,15.208732,2425 90 | 89,2,0,3,0,3,1,2,12.3,14.8675,64.6667,11.583496,1536 91 | 90,2,0,3,0,4,1,3,11.001653,12.87875,91.8333,14.582282,1685 92 | 91,2,0,4,0,5,1,2,12.3,14.1727,68.625,17.333436,2227 93 | 92,2,0,4,0,6,0,2,12.915,15.78185,65.375,13.208782,2252 94 | 93,2,0,4,0,0,0,1,15.511653,18.93835,48,12.208271,3249 95 | 94,2,0,4,0,1,1,1,23.506653,27.14645,42.625,25.833257,3115 96 | 95,2,0,4,0,2,1,2,16.980847,19.9175,64.2083,26.000489,1795 97 | 96,2,0,4,0,3,1,1,16.024153,19.3804,47.0833,17.625221,2808 98 | 97,2,0,4,0,4,1,1,17.9375,21.6848,60.2917,10.874904,3141 99 | 98,2,0,4,0,5,1,2,13.769153,16.22395,83.625,15.208464,1471 100 | 99,2,0,4,0,6,0,2,14.0425,17.07645,87.75,8.916561,2455 101 | 100,2,0,4,0,0,0,2,17.493347,21.33685,85.75,9.833389,2895 102 | 101,2,0,4,0,1,1,2,24.421732,28.26085,71.6956,21.739758,3348 103 | 102,2,0,4,0,2,1,2,20.6025,24.6527,73.9167,18.416893,2034 104 | 103,2,0,4,0,3,1,2,16.9125,20.86415,81.9167,16.791339,2162 105 | 104,2,0,4,0,4,1,1,19.1675,23.1371,54.0417,7.4169,3267 106 | 105,2,0,4,1,5,0,1,18.313347,22.09565,67.125,15.167125,3126 107 | 106,2,0,4,0,6,0,3,17.664153,21.2746,88.8333,22.834136,795 108 | 107,2,0,4,0,0,0,1,18.723347,22.2848,47.9583,20.334232,3744 109 | 108,2,0,4,0,1,1,1,21.0125,25.1573,54.25,10.958989,3429 110 | 109,2,0,4,0,2,1,2,20.739153,24.4629,66.5833,10.584057,3204 111 | 110,2,0,4,0,3,1,1,24.395,28.2196,61.4167,16.208975,3944 112 | 111,2,0,4,0,4,1,1,18.825847,22.6946,40.7083,21.792286,4189 113 | 112,2,0,4,0,5,1,2,13.803347,16.0977,72.9583,14.707907,1683 114 | 113,2,0,4,0,6,0,2,18.86,22.50605,88.7917,15.458575,4036 115 | 114,2,0,4,0,0,0,2,23.848347,27.58815,81.0833,12.875725,4191 116 | 115,2,0,4,0,1,1,1,24.873347,28.725,77.6667,12.417311,4073 117 | 116,2,0,4,0,2,1,1,25.898347,29.70415,72.9167,21.8755,4400 118 | 117,2,0,4,0,3,1,2,25.42,28.7571,83.5417,20.9174,3872 119 | 118,2,0,4,0,4,1,2,25.3175,28.94645,70.0833,21.500836,4058 120 | 119,2,0,4,0,5,1,1,20.91,24.87315,45.7083,16.084221,4595 121 | 120,2,0,4,0,6,0,1,19.3725,23.20105,50.3333,15.750025,5312 122 | 121,2,0,5,0,0,0,2,18.518347,22.4102,76.2083,7.125718,3351 123 | 122,2,0,5,0,1,1,2,22.515847,26.64165,73,12.291418,4401 124 | 123,2,0,5,0,2,1,2,25.283347,29.10395,69.7083,22.958689,4451 125 | 124,2,0,5,0,3,1,2,16.980847,20.2325,73.7083,22.042732,2633 126 | 125,2,0,5,0,4,1,1,18.825847,22.09585,44.4167,19.791264,4433 127 | 126,2,0,5,0,5,1,1,19.645847,23.70585,59,15.292482,4608 128 | 127,2,0,5,0,6,0,1,21.32,25.63105,54.125,10.75015,4714 129 | 128,2,0,5,0,0,0,1,21.661653,25.94665,63.1667,5.0007125,4333 130 | 129,2,0,5,0,1,1,1,21.8325,26.2623,58.875,11.792,4362 131 | 130,2,0,5,0,2,1,1,21.8325,26.13605,48.9167,7.749957,4803 132 | 131,2,0,5,0,3,1,1,22.2425,26.42,63.2917,8.083014,4182 133 | 132,2,0,5,0,4,1,1,21.935,26.16815,74.75,12.707689,4864 134 | 133,2,0,5,0,5,1,2,21.0125,24.715,86.3333,12.041575,4105 135 | 134,2,0,5,0,6,0,2,21.354153,25.03145,92.25,9.04165,3409 136 | 135,2,0,5,0,0,0,2,23.0625,26.8,86.7083,10.249593,4553 137 | 136,2,0,5,0,1,1,1,23.6775,27.5256,78.7917,8.500357,3958 138 | 137,2,0,5,0,2,1,2,23.028347,26.92645,83.7917,18.582718,4123 139 | 138,2,0,5,0,3,1,2,22.55,26.3579,87,13.499964,3855 140 | 139,2,0,5,0,4,1,2,21.764153,25.5371,82.9583,7.250271,4575 141 | 140,2,0,5,0,5,1,1,22.003347,26.4521,71.9583,8.375871,4917 142 | 141,2,0,5,0,6,0,1,24.7025,28.59875,62.6667,8.08355,5805 143 | 142,2,0,5,0,0,0,1,24.770847,28.725,74.9583,9.916536,4660 144 | 143,2,0,5,0,1,1,2,25.898347,29.5148,81,15.667414,4274 145 | 144,2,0,5,0,2,1,2,27.06,30.24065,74.0833,13.875164,4492 146 | 145,2,0,5,0,3,1,1,27.094153,30.7771,69.625,10.333611,4978 147 | 146,2,0,5,0,4,1,1,29.041653,32.7344,67.75,13.376014,4677 148 | 147,2,0,5,0,5,1,1,27.948347,31.8504,65.375,16.125493,4679 149 | 148,2,0,5,0,6,0,1,26.889153,30.61895,72.9583,15.416164,4758 150 | 149,2,0,5,0,0,0,1,27.3675,30.7775,81.875,14.333846,4788 151 | 150,2,0,5,1,1,0,1,30.066653,33.5546,68.5,8.792075,4098 152 | 151,2,0,5,0,2,1,1,31.775,36.26915,63.6667,7.459043,3982 153 | 152,2,0,6,0,3,1,2,31.330847,36.04835,67.7083,13.875164,3974 154 | 153,2,0,6,0,4,1,1,29.315,32.1971,30.5,19.583229,4968 155 | 154,2,0,6,0,5,1,1,25.42,29.35665,35.4167,16.959107,5312 156 | 155,2,0,6,0,6,0,1,26.035,29.7348,45.625,8.250514,5342 157 | 156,2,0,6,0,0,0,2,26.581653,30.8402,65.25,9.292364,4906 158 | 157,2,0,6,0,1,1,1,27.811653,31.0929,60,8.167032,4548 159 | 158,2,0,6,0,2,1,1,29.0075,32.7975,59.7917,12.583136,4833 160 | 159,2,0,6,0,3,1,1,31.809153,36.36395,62.2083,9.166739,4401 161 | 160,2,0,6,0,4,1,2,33.141653,37.87895,56.8333,10.042161,3915 162 | 161,2,0,6,0,5,1,1,30.955,35.1646,60.5,9.417118,4586 163 | 162,2,0,6,0,6,0,1,29.725,33.9019,65.4583,10.37495,4966 164 | 163,2,0,6,0,0,0,1,28.3925,32.16625,74.7917,10.958989,4460 165 | 164,2,0,6,0,1,1,1,26.035,30.0827,49.4583,20.45845,5020 166 | 165,2,0,6,0,2,1,1,24.770847,29.5773,50.7083,18.041961,4891 167 | 166,2,0,6,0,3,1,1,25.693347,29.3877,47.1667,11.250104,5180 168 | 167,2,0,6,0,4,1,2,25.761653,29.7673,68.8333,13.833557,3767 169 | 168,2,0,6,0,5,1,1,26.615847,30.01915,73.5833,9.582943,4844 170 | 169,2,0,6,0,6,0,1,28.563347,32.1977,67.0417,8.000336,5119 171 | 170,2,0,6,0,0,0,2,28.665847,32.2923,66.6667,6.834,4744 172 | 171,2,0,6,0,1,1,2,26.035,29.7673,74.625,10.416825,4010 173 | 172,3,0,6,0,2,1,2,27.914153,31.8823,77.0417,11.458675,4835 174 | 173,3,0,6,0,3,1,1,30.066653,34.69145,70.75,11.541554,4507 175 | 174,3,0,6,0,4,1,2,29.861653,34.69165,70.3333,15.999868,4790 176 | 175,3,0,6,0,5,1,1,29.690847,32.82915,57.3333,14.875675,4991 177 | 176,3,0,6,0,6,0,1,28.495,32.16565,48.3333,14.041257,5202 178 | 177,3,0,6,0,0,0,1,27.88,31.88145,51.3333,6.3337311,5305 179 | 178,3,0,6,0,1,1,2,27.9825,31.8502,65.8333,7.208396,4708 180 | 179,3,0,6,0,2,1,1,30.510847,34.6279,63.4167,9.666961,4648 181 | 180,3,0,6,0,3,1,1,29.861653,32.7344,49.7917,17.542007,5225 182 | 181,3,0,6,0,4,1,1,28.563347,31.8504,43.4167,12.415904,5515 183 | 182,3,0,7,0,5,1,1,29.6225,32.6081,39.625,6.874736,5362 184 | 183,3,0,7,0,6,0,1,30.271653,33.3654,44.4583,7.709154,5119 185 | 184,3,0,7,0,0,0,2,29.383347,33.42875,68.25,15.333486,4649 186 | 185,3,0,7,1,1,0,2,29.793347,33.27085,63.7917,5.4591064,6043 187 | 186,3,0,7,0,2,1,1,30.613347,34.8169,59.0417,8.459286,4665 188 | 187,3,0,7,0,3,1,1,29.52,34.28165,74.3333,10.042161,4629 189 | 188,3,0,7,0,4,1,1,30.75,34.34355,65.125,10.6664,4592 190 | 189,3,0,7,0,5,1,2,29.075847,33.52415,75.7917,15.083643,4040 191 | 190,3,0,7,0,6,0,1,30.066653,33.2079,60.9167,11.250104,5336 192 | 191,3,0,7,0,0,0,1,30.6475,34.50125,57.8333,12.292557,4881 193 | 192,3,0,7,0,1,1,1,31.2625,36.4902,63.5833,18.916579,4086 194 | 193,3,0,7,0,2,1,1,32.560847,36.96375,55.9167,13.417018,4258 195 | 194,3,0,7,0,3,1,1,30.613347,34.4702,63.1667,9.790911,4342 196 | 195,3,0,7,0,4,1,1,27.914153,31.7552,47.625,16.124689,5084 197 | 196,3,0,7,0,5,1,1,27.196653,31.21855,59.125,12.249811,5538 198 | 197,3,0,7,0,6,0,1,28.153347,31.91315,58.5,13.958914,5923 199 | 198,3,0,7,0,0,0,1,29.485847,33.49165,60.4167,16.417211,5302 200 | 199,3,0,7,0,1,1,1,30.613347,35.19625,65.125,14.458868,4458 201 | 200,3,0,7,0,2,1,1,31.843347,37.37395,65.0417,8.7502,4541 202 | 201,3,0,7,0,3,1,1,31.501653,37.3425,70.7083,7.625739,4332 203 | 202,3,0,7,0,4,1,2,33.415,41.31855,69.125,14.875407,3784 204 | 203,3,0,7,0,5,1,1,34.781653,42.0448,58.0417,8.9177,3387 205 | 204,3,0,7,0,6,0,1,34.815847,40.21435,50,8.791807,3285 206 | 205,3,0,7,0,0,0,1,34.03,39.74145,55.0833,11.334457,3606 207 | 206,3,0,7,0,1,1,1,30.476653,36.0479,75.7083,6.0841561,3840 208 | 207,3,0,7,0,2,1,1,31.638347,34.84895,54.0833,13.417286,4590 209 | 208,3,0,7,0,3,1,1,31.775,34.53335,40.2917,12.292021,4656 210 | 209,3,0,7,0,4,1,1,31.945847,36.995,58.3333,11.958093,4390 211 | 210,3,0,7,0,5,1,1,34.371653,39.29835,54.25,11.667246,3846 212 | 211,3,0,7,0,6,0,1,32.970847,36.42685,46.5833,11.291979,4475 213 | 212,3,0,7,0,0,0,1,33.039153,36.4898,48.0833,11.042471,4302 214 | 213,3,0,8,0,1,1,1,31.638347,35.1646,55.0833,10.500039,4266 215 | 214,3,0,8,0,2,1,1,32.116653,35.35355,49.125,13.79195,4845 216 | 215,3,0,8,0,3,1,2,29.998347,33.99685,65.75,9.084061,3574 217 | 216,3,0,8,0,4,1,2,29.11,33.2394,75.75,13.20905,4576 218 | 217,3,0,8,0,5,1,1,29.144153,32.82835,63.0833,12.374632,4866 219 | 218,3,0,8,0,6,0,2,29.383347,33.8077,75.5,15.29275,4294 220 | 219,3,0,8,0,0,0,1,30.4425,35.7646,75.2917,13.499629,3785 221 | 220,3,0,8,0,1,1,1,31.365,35.16415,59.2083,12.875725,4326 222 | 221,3,0,8,0,2,1,1,31.775,36.20605,57.0417,10.125107,4602 223 | 222,3,0,8,0,3,1,1,31.433347,34.24915,42.4167,13.417286,4780 224 | 223,3,0,8,0,4,1,1,29.4175,32.57605,42.375,11.041332,4792 225 | 224,3,0,8,0,5,1,1,29.041653,32.7021,41.5,8.416607,4905 226 | 225,3,0,8,0,6,0,2,28.119153,32.2929,72.9583,14.167418,4150 227 | 226,3,0,8,0,0,0,2,27.743347,31.2194,81.75,14.916411,3820 228 | 227,3,0,8,0,1,1,1,27.299153,30.80835,71.2083,13.999918,4338 229 | 228,3,0,8,0,2,1,1,28.734153,32.29185,57.8333,15.834043,4725 230 | 229,3,0,8,0,3,1,1,29.656653,33.33355,57.5417,9.625689,4694 231 | 230,3,0,8,0,4,1,1,29.178347,33.1129,65.4583,15.624936,3805 232 | 231,3,0,8,0,5,1,2,28.085,31.66105,72.2917,9.333636,4153 233 | 232,3,0,8,0,6,0,1,28.5975,32.4498,67.4167,6.999289,5191 234 | 233,3,0,8,0,0,0,1,29.144153,33.77625,77,16.666518,3873 235 | 234,3,0,8,0,1,1,1,28.358347,31.9127,47,18.54225,4758 236 | 235,3,0,8,0,2,1,1,26.274153,30.30335,45.5417,9.833121,5895 237 | 236,3,0,8,0,3,1,1,27.606653,31.5346,60.5,16.958236,5130 238 | 237,3,0,8,0,4,1,2,28.050847,32.2927,77.1667,14.125811,3542 239 | 238,3,0,8,0,5,1,1,28.7,32.98665,76.125,5.6254875,4661 240 | 239,3,0,8,0,6,0,2,27.88,31.7778,85,25.166339,1115 241 | 240,3,0,8,0,0,0,1,28.989419,32.39795,56.1765,20.412153,4334 242 | 241,3,0,8,0,1,1,1,26.103347,30.3979,55.4583,10.708275,4634 243 | 242,3,0,8,0,2,1,1,26.205847,29.7352,54.8333,8.375536,5204 244 | 243,3,0,8,0,3,1,1,26.923347,30.55605,59.7917,5.5833311,5058 245 | 244,3,0,9,0,4,1,1,26.855,30.74605,63.9167,9.500332,5115 246 | 245,3,0,9,0,5,1,2,26.376653,30.2404,72.7083,9.375243,4727 247 | 246,3,0,9,0,6,0,1,27.435847,31.66065,71.6667,12.416775,4484 248 | 247,3,0,9,0,0,0,1,29.075847,33.27145,74.2083,13.833289,4940 249 | 248,3,0,9,1,1,0,2,27.606653,31.2823,79.0417,14.250632,3351 250 | 249,3,0,9,0,2,1,3,22.14,25.76,88.6957,23.044181,2710 251 | 250,3,0,9,0,3,1,3,24.565847,27.21145,91.7083,6.5003936,1996 252 | 251,3,0,9,0,4,1,3,25.990433,27.76805,93.9565,12.914116,1842 253 | 252,3,0,9,0,5,1,2,26.65,28.9473,89.7917,8.333393,3544 254 | 253,3,0,9,0,6,0,1,27.06,30.3981,75.375,10.291736,5345 255 | 254,3,0,9,0,0,0,1,26.786653,30.46145,71.375,7.708618,5046 256 | 255,3,0,9,0,1,1,1,26.418268,30.1065,69.2174,5.957171,4713 257 | 256,3,0,9,0,2,1,1,26.684153,30.1777,71.25,9.500868,4763 258 | 257,3,0,9,0,3,1,1,27.606653,31.345,69.7083,11.2091,4785 259 | 258,3,0,9,0,4,1,2,23.6775,27.68355,70.9167,18.166782,3659 260 | 259,3,0,9,0,5,1,2,19.235847,23.07375,59.0417,11.000261,4760 261 | 260,3,0,9,0,6,0,2,20.158347,23.9256,71.8333,12.708225,4511 262 | 261,3,0,9,0,0,0,1,20.8075,24.52685,69.5,11.958361,4274 263 | 262,3,0,9,0,1,1,2,22.515847,26.48375,69,10.166714,4539 264 | 263,3,0,9,0,2,1,2,23.028347,26.61085,88.125,9.041918,3641 265 | 264,3,0,9,0,3,1,2,24.395,27.52665,90,6.4590814,4352 266 | 265,3,0,9,0,4,1,2,25.761653,27.74815,90.2083,8.584375,4795 267 | 266,4,0,9,0,5,1,2,24.975847,26.10625,97.25,5.2505689,2395 268 | 267,4,0,9,0,6,0,2,24.873347,28.2206,86.25,5.2516811,5423 269 | 268,4,0,9,0,0,0,2,26.000847,28.63185,84.5,3.3754064,5010 270 | 269,4,0,9,0,1,1,2,26.615847,29.4521,84.8333,7.4169,4630 271 | 270,4,0,9,0,2,1,2,26.103347,28.72625,88.5417,7.917457,4120 272 | 271,4,0,9,0,3,1,2,26.035,28.7579,84.875,9.958143,3907 273 | 272,4,0,9,0,4,1,1,25.283347,28.7256,69.9167,11.583161,4839 274 | 273,4,0,9,0,5,1,1,23.130847,27.24145,64.75,13.833825,5202 275 | 274,4,0,10,0,6,0,2,16.81,20.64315,75.375,19.583832,2429 276 | 275,4,0,10,0,0,0,2,14.623347,17.26585,79.1667,14.874871,2918 277 | 276,4,0,10,0,1,1,2,15.750847,19.6023,76.0833,5.5841686,3570 278 | 277,4,0,10,0,2,1,1,19.850847,23.6429,71,13.792218,4456 279 | 278,4,0,10,0,3,1,1,22.071653,26.3569,64.7917,11.87575,4826 280 | 279,4,0,10,0,4,1,1,20.260847,24.02125,62.0833,9.041918,4765 281 | 280,4,0,10,0,5,1,1,20.944153,25.2202,68.4167,1.5002439,4985 282 | 281,4,0,10,0,6,0,1,21.388347,25.6621,70.125,3.0420814,5409 283 | 282,4,0,10,0,0,0,1,22.174153,26.19915,72.75,4.25115,5511 284 | 283,4,0,10,1,1,0,1,23.404153,27.14625,73.375,2.8343814,5117 285 | 284,4,0,10,0,2,1,2,23.233347,27.3048,80.875,9.583814,4563 286 | 285,4,0,10,0,3,1,3,22.276653,25.88585,90.625,16.62605,2416 287 | 286,4,0,10,0,4,1,2,24.155847,27.5902,89.6667,9.499729,2913 288 | 287,4,0,10,0,5,1,2,22.584153,26.48375,71.625,15.000161,3644 289 | 288,4,0,10,0,6,0,1,20.773347,24.93625,48.3333,17.291561,5217 290 | 289,4,0,10,0,0,0,1,20.978347,25.1577,48.6667,18.875039,5041 291 | 290,4,0,10,0,1,1,1,21.900847,25.53625,57.9583,11.750393,4570 292 | 291,4,0,10,0,2,1,2,21.8325,26.13605,70.1667,7.375829,4748 293 | 292,4,0,10,0,3,1,3,22.211299,25.6924,89.5217,16.303713,2424 294 | 293,4,0,10,0,4,1,1,19.509153,23.32625,63.625,28.292425,4195 295 | 294,4,0,10,0,5,1,1,17.5275,21.1798,57.4167,14.833532,4304 296 | 295,4,0,10,0,6,0,1,17.3225,21.2746,62.9167,6.2086689,4308 297 | 296,4,0,10,0,0,0,1,17.288347,21.11665,74.125,6.6673375,4381 298 | 297,4,0,10,0,1,1,1,18.996653,22.85335,77.2083,7.959064,4187 299 | 298,4,0,10,0,2,1,1,19.338347,23.16875,62.2917,11.166086,4687 300 | 299,4,0,10,0,3,1,2,19.850847,23.6423,72.0417,9.959014,3894 301 | 300,4,0,10,0,4,1,2,19.27,22.8523,81.2917,13.250121,2659 302 | 301,4,0,10,0,5,1,2,13.564153,15.9406,58.5833,15.375093,3747 303 | 302,4,0,10,0,6,0,3,10.420847,11.39565,88.25,23.541857,627 304 | 303,4,0,10,0,0,0,1,13.085847,16.06645,62.375,11.833339,3331 305 | 304,4,0,10,0,1,1,1,13.94,17.80315,70.3333,7.12545,3669 306 | 305,4,0,11,0,2,1,1,16.434153,19.8544,68.375,9.083257,4068 307 | 306,4,0,11,0,3,1,1,15.4775,19.50665,71.875,5.5001439,4186 308 | 307,4,0,11,0,4,1,1,16.741653,20.29605,70.2083,9.166739,3974 309 | 308,4,0,11,0,5,1,2,16.536653,20.1696,62.25,18.209193,4046 310 | 309,4,0,11,0,6,0,1,13.393347,16.1927,51.9167,12.667154,3926 311 | 310,4,0,11,0,0,0,1,14.281653,18.1179,73.4583,6.1676314,3649 312 | 311,4,0,11,0,1,1,1,16.195,20.04355,75.875,3.834075,4035 313 | 312,4,0,11,0,2,1,1,16.741653,20.6123,72.1667,4.6255125,4205 314 | 313,4,0,11,0,3,1,1,16.4,20.45395,75.8333,4.1671186,4109 315 | 314,4,0,11,0,4,1,2,15.58,18.68605,81.3333,12.667489,2933 316 | 315,4,0,11,1,5,0,1,13.290847,15.34085,44.625,21.083225,3368 317 | 316,4,0,11,0,6,0,1,14.623347,17.8971,55.2917,14.208154,4067 318 | 317,4,0,11,0,0,0,1,18.074153,21.5275,45.8333,18.875307,3717 319 | 318,4,0,11,0,1,1,1,21.73,26.2306,58.7083,20.541932,4486 320 | 319,4,0,11,0,2,1,2,21.73,25.37895,68.875,13.375411,4195 321 | 320,4,0,11,0,3,1,3,18.723347,22.5994,93,9.167543,1817 322 | 321,4,0,11,0,4,1,2,14.008347,16.16105,57.5833,20.459254,3053 323 | 322,4,0,11,0,5,1,1,11.240847,13.63605,41,11.291711,3392 324 | 323,4,0,11,0,6,0,1,13.495847,16.22415,50.2083,15.041232,3663 325 | 324,4,0,11,0,0,0,2,18.996653,22.8529,68.4583,12.45865,3520 326 | 325,4,0,11,0,1,1,3,18.3475,22.2531,91,9.249618,2765 327 | 326,4,0,11,0,2,1,3,17.083347,21.0848,96.25,7.959064,1607 328 | 327,4,0,11,0,3,1,2,18.074153,21.52685,75.7917,22.500275,2566 329 | 328,4,0,11,1,4,0,1,15.306653,18.62355,54.9167,11.209368,1495 330 | 329,4,0,11,0,5,1,1,15.375,19.03355,64.375,6.6260186,2792 331 | 330,4,0,11,0,6,0,1,15.409153,19.25435,68.1667,4.5841936,3068 332 | 331,4,0,11,0,0,0,1,18.825847,22.79,69.8333,13.999918,3071 333 | 332,4,0,11,0,1,1,1,20.642598,24.5061,74.3043,9.522174,3867 334 | 333,4,0,11,0,2,1,2,18.791653,22.56875,83.0833,17.292164,2914 335 | 334,4,0,11,0,3,1,1,13.325,15.56105,61.3333,18.167586,3613 336 | 335,4,0,12,0,4,1,1,12.8125,15.2777,52.4583,14.750586,3727 337 | 336,4,0,12,0,5,1,1,12.880847,16.57165,62.5833,6.750518,3940 338 | 337,4,0,12,0,6,0,1,12.265847,15.5302,61.2917,6.4174811,3614 339 | 338,4,0,12,0,0,0,1,13.564153,17.455,77.5833,5.6252061,3485 340 | 339,4,0,12,0,1,1,2,15.819153,19.69625,82.7083,4.1679561,3811 341 | 340,4,0,12,0,2,1,3,18.9625,22.82,94.9583,15.583061,2594 342 | 341,4,0,12,0,3,1,3,16.81,20.0123,97.0417,17.833725,705 343 | 342,4,0,12,0,4,1,1,10.899153,12.8469,58,16.083886,3322 344 | 343,4,0,12,0,5,1,1,11.924153,15.8771,69.5833,5.5420189,3620 345 | 344,4,0,12,0,6,0,1,11.275,13.3206,50.75,15.625807,3190 346 | 345,4,0,12,0,0,0,1,9.054153,12.6577,49,4.4582939,2743 347 | 346,4,0,12,0,1,1,1,9.771653,13.5098,67.0833,4.25115,3310 348 | 347,4,0,12,0,2,1,1,11.5825,15.0569,59,9.41685,3523 349 | 348,4,0,12,0,3,1,2,13.0175,16.9181,66.375,4.0842061,3740 350 | 349,4,0,12,0,4,1,2,17.3225,20.61185,63.4167,17.958814,3709 351 | 350,4,0,12,0,5,1,2,15.375,17.99125,50.0417,17.458525,3577 352 | 351,4,0,12,0,6,0,2,10.591653,12.46855,56.0833,16.292189,2739 353 | 352,4,0,12,0,0,0,1,9.771653,12.27895,58.625,11.375193,2431 354 | 353,4,0,12,0,1,1,1,11.343347,14.04665,63.75,11.584032,3403 355 | 354,4,0,12,0,2,1,2,15.819153,19.8227,59.5417,4.1252436,3750 356 | 355,1,0,12,0,3,1,2,17.561653,21.40085,85.8333,14.8338,2660 357 | 356,1,0,12,0,4,1,2,17.356653,21.30605,75.75,3.167425,3068 358 | 357,1,0,12,0,5,1,1,15.306653,18.87565,68.625,18.374482,2209 359 | 358,1,0,12,0,6,0,1,12.4025,14.9621,54.25,12.750368,1011 360 | 359,1,0,12,0,0,0,1,11.266103,13.99805,68.1304,10.391097,754 361 | 360,1,0,12,1,1,0,1,13.191299,15.77675,50.6957,16.044155,1317 362 | 361,1,0,12,0,2,1,2,13.325,16.38165,76.25,12.62615,1162 363 | 362,1,0,12,0,3,1,1,12.26433,13.9987,50.3913,19.695387,2302 364 | 363,1,0,12,0,4,1,1,10.181653,13.1946,57.4167,8.000604,2423 365 | 364,1,0,12,0,5,1,1,12.778347,15.9406,63.6667,9.000579,2999 366 | 365,1,0,12,0,6,0,1,16.81,20.70605,61.5833,14.750318,2485 367 | 366,1,1,1,0,0,0,1,15.17,18.78105,69.25,12.875189,2294 368 | 367,1,1,1,1,1,0,1,11.194763,12.6152,38.1304,22.087555,1951 369 | 368,1,1,1,0,2,1,1,6.15,6.31375,44.125,24.499957,2236 370 | 369,1,1,1,0,3,1,2,4.4075,5.96685,41.4583,12.3749,2368 371 | 370,1,1,1,0,4,1,1,10.899153,13.9206,52.4167,8.709129,3272 372 | 371,1,1,1,0,5,1,1,13.700847,17.01335,54.2083,11.249836,4098 373 | 372,1,1,1,0,6,0,1,16.126653,19.53895,53.1667,11.708786,4521 374 | 373,1,1,1,0,0,0,1,13.8375,17.0129,46.5,12.833314,3425 375 | 374,1,1,1,0,1,1,2,9.190847,12.37395,70.1667,6.6263,2376 376 | 375,1,1,1,0,2,1,1,12.656536,15.9413,64.6522,12.565984,3598 377 | 376,1,1,1,0,3,1,2,11.240847,14.14105,84.75,8.791807,2177 378 | 377,1,1,1,0,4,1,2,15.6825,19.0969,80.2917,12.124789,4097 379 | 378,1,1,1,0,5,1,1,11.240847,12.4681,50.75,25.333236,3214 380 | 379,1,1,1,0,6,0,1,7.38,9.15435,45.75,12.541261,2493 381 | 380,1,1,1,0,0,0,1,6.833347,8.08125,41.9167,16.834286,2311 382 | 381,1,1,1,1,1,0,1,7.79,9.53315,52.25,15.500986,2298 383 | 382,1,1,1,0,2,1,2,15.294763,18.2139,71.6087,23.39171,2935 384 | 383,1,1,1,0,3,1,1,12.436653,13.7627,44.3333,27.833743,3376 385 | 384,1,1,1,0,4,1,1,7.79,9.5019,49.75,14.750586,3292 386 | 385,1,1,1,0,5,1,2,8.9175,11.0479,45,13.58425,3163 387 | 386,1,1,1,0,6,0,2,7.106653,8.74375,83.125,14.917014,1301 388 | 387,1,1,1,0,0,0,2,6.6625,8.1125,79.625,13.375746,1977 389 | 388,1,1,1,0,1,1,2,8.951653,12.1529,91.125,7.417436,2432 390 | 389,1,1,1,0,2,1,1,14.0425,17.4554,83.5833,8.292389,4339 391 | 390,1,1,1,0,3,1,1,12.060847,14.74105,64.375,10.791757,4270 392 | 391,1,1,1,0,4,1,2,14.008347,17.8025,76.9583,4.9175186,4075 393 | 392,1,1,1,0,5,1,2,17.425,20.76915,74.125,22.958689,3456 394 | 393,1,1,1,0,6,0,1,12.949153,16.31895,54.3333,14.125543,4023 395 | 394,1,1,1,0,0,0,1,11.5825,13.63605,31.125,16.08335,3243 396 | 395,1,1,1,0,1,1,1,11.035847,13.13125,40.0833,14.458064,3624 397 | 396,1,1,1,0,2,1,1,15.99,19.06585,41.6667,17.541739,4509 398 | 397,1,1,2,0,3,1,1,19.235847,23.3269,50.7917,12.667489,4579 399 | 398,1,1,2,0,4,1,2,16.365847,19.94855,67.2917,12.541529,3761 400 | 399,1,1,2,0,5,1,1,12.846653,15.4673,52.6667,11.959232,4151 401 | 400,1,1,2,0,6,0,2,10.830847,13.63625,77.9583,8.167032,2832 402 | 401,1,1,2,0,0,0,2,10.899153,13.22605,68.7917,11.791732,2947 403 | 402,1,1,2,0,1,1,1,11.586969,14.8213,62.2174,10.3046,3784 404 | 403,1,1,2,0,2,1,1,14.520847,18.0552,49.625,9.874393,4375 405 | 404,1,1,2,0,3,1,2,10.523347,13.32105,72.2917,8.959307,2802 406 | 405,1,1,2,0,4,1,1,10.865,13.0994,56.2083,13.000479,3830 407 | 406,1,1,2,0,5,1,2,11.514153,14.6779,54,7.834243,3831 408 | 407,1,1,2,0,6,0,3,9.190847,10.54335,73.125,19.416332,2169 409 | 408,1,1,2,0,0,0,1,5.2275,5.0829,46.4583,27.417204,1529 410 | 409,1,1,2,0,1,1,1,9.1225,11.39565,41.125,11.207961,3422 411 | 410,1,1,2,0,2,1,2,13.085847,16.6973,50.875,9.458993,3922 412 | 411,1,1,2,0,3,1,1,14.281653,17.58145,53.125,12.1672,4169 413 | 412,1,1,2,0,4,1,2,12.983347,16.5081,75.2917,6.125475,3005 414 | 413,1,1,2,0,5,1,1,14.076653,17.58145,63.4583,13.791682,4154 415 | 414,1,1,2,0,6,0,1,14.213347,17.77125,53.4583,12.792243,4318 416 | 415,1,1,2,0,0,0,2,11.48,13.2894,51.5833,16.958504,2689 417 | 416,1,1,2,1,1,0,1,11.48,13.66955,50.7826,15.348561,3129 418 | 417,1,1,2,0,2,1,1,11.800866,14.75565,59.4348,13.783039,3777 419 | 418,1,1,2,0,3,1,1,16.229153,19.63335,56.7917,15.709557,4773 420 | 419,1,1,2,0,4,1,1,18.620847,22.2223,55.4583,12.791171,5062 421 | 420,1,1,2,0,5,1,2,16.7075,20.54855,73.75,15.916989,3487 422 | 421,1,1,2,0,6,0,1,11.924153,12.78375,39.5833,28.250014,2732 423 | 422,1,1,2,0,0,0,1,11.445847,13.4154,41,13.750343,3389 424 | 423,1,1,2,0,1,1,1,15.033347,17.8977,49.0833,17.958211,4322 425 | 424,1,1,2,0,2,1,1,14.725847,17.67625,39.5833,12.958939,4363 426 | 425,1,1,3,0,4,1,1,19.919153,23.76855,61.5417,15.208129,4990 427 | 426,1,1,3,0,5,1,2,14.486653,17.9921,65.7083,9.708568,3194 428 | 427,1,1,3,0,6,0,2,16.980847,20.6746,62.125,10.792293,4066 429 | 428,1,1,3,0,0,0,1,13.359153,15.15105,40.3333,22.416257,3423 430 | 429,1,1,3,0,1,1,1,9.976653,12.05855,50.625,15.333486,3333 431 | 430,1,1,3,0,2,1,1,10.591653,12.7521,45.6667,13.458625,3956 432 | 431,1,1,3,0,3,1,1,16.570847,19.255,51.3333,23.167193,4916 433 | 432,1,1,3,0,4,1,1,21.6275,26.2302,56.75,29.584721,5382 434 | 433,1,1,3,0,5,1,2,16.844153,19.85415,40.7083,27.7916,4569 435 | 434,1,1,3,0,6,0,1,11.7875,13.88835,35.0417,15.12525,4118 436 | 435,1,1,3,0,0,0,1,14.831299,17.9835,47.6957,14.913329,4911 437 | 436,1,1,3,0,1,1,1,19.133347,22.9796,48.9167,13.916771,5298 438 | 437,1,1,3,0,2,1,1,23.165,27.14645,61.75,15.87565,5847 439 | 438,1,1,3,0,3,1,1,23.4725,27.43085,50.7083,7.709154,6312 440 | 439,1,1,3,0,4,1,1,22.8575,26.64125,57.9583,10.042161,6192 441 | 440,1,1,3,0,5,1,2,17.869153,21.81145,84.2083,7.583864,4378 442 | 441,1,1,3,0,6,0,2,21.080847,25.2523,75.5833,7.417168,7836 443 | 442,1,1,3,0,0,0,2,19.3725,23.2,81,8.501161,5892 444 | 443,1,1,3,0,1,1,1,22.345,26.64105,72.875,10.875239,6153 445 | 444,1,1,3,0,2,1,1,22.994153,26.92665,80.7917,8.125157,6093 446 | 445,2,1,3,0,3,1,2,21.798347,25.6629,82.125,6.0004061,6230 447 | 446,2,1,3,0,4,1,1,22.720847,26.57835,83.125,7.876654,6871 448 | 447,2,1,3,0,5,1,2,24.668347,28.50335,69.4167,7.7921,8362 449 | 448,2,1,3,0,6,0,2,20.6025,24.33665,88.5417,12.916461,3372 450 | 449,2,1,3,0,0,0,2,17.9375,21.8744,88.0833,14.791925,4996 451 | 450,2,1,3,0,1,1,1,18.279153,21.9375,47.7917,25.917007,5558 452 | 451,2,1,3,0,2,1,1,13.256653,15.7827,29,12.541864,5102 453 | 452,2,1,3,0,3,1,1,19.850847,23.5475,48.125,19.541957,5698 454 | 453,2,1,3,0,4,1,1,20.260847,24.1152,43.9167,21.41655,6133 455 | 454,2,1,3,0,5,1,2,15.17,18.78105,58.0833,9.250489,5459 456 | 455,2,1,3,0,6,0,2,17.390847,21.0854,73.8333,16.791339,6235 457 | 456,2,1,4,0,0,0,2,17.459153,20.86435,67.625,11.541889,6041 458 | 457,2,1,4,0,1,1,1,17.790433,21.37565,50.4348,20.913313,5936 459 | 458,2,1,4,0,2,1,1,19.133347,23.07415,39.6667,6.708911,6772 460 | 459,2,1,4,0,3,1,1,22.208347,26.6725,46.9583,12.125325,6436 461 | 460,2,1,4,0,4,1,1,17.835,21.55815,37.4167,14.708443,6457 462 | 461,2,1,4,0,5,1,1,16.536653,19.53835,37.7083,20.125996,6460 463 | 462,2,1,4,0,6,0,1,17.9375,21.30645,25.4167,18.416357,6857 464 | 463,2,1,4,0,0,0,1,20.5,24.62125,27.5833,15.583932,5169 465 | 464,2,1,4,0,1,1,1,20.055847,23.8319,31.75,23.999132,5585 466 | 465,2,1,4,0,2,1,1,18.313347,21.81165,43.5,16.708125,5918 467 | 466,2,1,4,0,3,1,1,14.296536,16.8637,46.9565,19.783358,4862 468 | 467,2,1,4,0,4,1,1,16.2975,19.3802,46.625,19.458743,5409 469 | 468,2,1,4,0,5,1,1,18.1425,21.5904,40.8333,10.416557,6398 470 | 469,2,1,4,0,6,0,1,20.295,24.3998,50.2917,12.791439,7460 471 | 470,2,1,4,0,0,0,1,24.873347,28.69375,50.7917,15.083643,7132 472 | 471,2,1,4,1,1,0,1,27.230847,30.74625,56.1667,19.083543,6370 473 | 472,2,1,4,0,2,1,1,24.941653,29.92435,39.0417,18.333143,6691 474 | 473,2,1,4,0,3,1,2,18.996653,22.8519,56.9167,11.250104,4367 475 | 474,2,1,4,0,4,1,1,20.431653,24.6523,61.25,4.4172564,6565 476 | 475,2,1,4,0,5,1,1,21.593347,25.78875,69.4583,10.041357,7290 477 | 476,2,1,4,0,6,0,1,23.37,27.14605,68.2917,19.000329,6624 478 | 477,2,1,4,0,0,0,3,16.263347,19.4752,83.5417,23.084582,1027 479 | 478,2,1,4,0,1,1,2,13.188347,15.05625,76.6667,20.334232,3214 480 | 479,2,1,4,0,2,1,1,16.946653,20.26415,45.4167,16.708661,5633 481 | 480,2,1,4,0,3,1,1,19.543347,23.51585,42.7917,7.959064,6196 482 | 481,2,1,4,0,4,1,2,20.431653,24.17915,75.6667,11.833875,5026 483 | 482,2,1,4,0,5,1,1,18.7575,22.63185,40.0833,23.291411,6233 484 | 483,2,1,4,0,6,0,2,15.443347,18.8752,48.9583,8.708325,4220 485 | 484,2,1,4,0,0,0,1,18.791653,22.50605,58.7083,7.832836,6304 486 | 485,2,1,4,0,1,1,2,19.030847,22.8848,57,11.499746,5572 487 | 486,2,1,5,0,2,1,2,25.146653,28.85105,65.9583,10.458432,5740 488 | 487,2,1,5,0,3,1,1,23.130847,26.8948,79.7083,9.249886,6169 489 | 488,2,1,5,0,4,1,2,22.96,26.8621,76.8333,8.957632,6421 490 | 489,2,1,5,0,5,1,1,25.7275,29.54585,73.5417,10.916846,6296 491 | 490,2,1,5,0,6,0,2,25.488347,29.2304,75.6667,10.250464,6883 492 | 491,2,1,5,0,0,0,2,23.0625,27.33685,74,10.041893,6359 493 | 492,2,1,5,0,1,1,2,22.0375,26.3571,66.4167,15.458307,6273 494 | 493,2,1,5,0,2,1,2,23.848347,27.87355,68.5833,19.833943,5728 495 | 494,2,1,5,0,3,1,2,23.575,27.65125,74.4167,14.499604,4717 496 | 495,2,1,5,0,4,1,1,20.739153,24.58915,55.2083,21.042221,6572 497 | 496,2,1,5,0,5,1,1,21.866653,26.04165,36.0417,15.874779,7030 498 | 497,2,1,5,0,6,0,1,23.130847,27.24085,48.0417,8.249911,7429 499 | 498,2,1,5,0,0,0,1,25.1125,29.2619,57.625,15.082839,6118 500 | 499,2,1,5,0,1,1,2,23.506653,27.495,78.9583,14.250364,2843 501 | 500,2,1,5,0,2,1,2,25.078347,28.8202,79.4583,9.875264,5115 502 | 501,2,1,5,0,3,1,1,26.103347,29.79875,69.7917,8.208304,7424 503 | 502,2,1,5,0,4,1,1,24.326653,28.63065,52,15.374825,7384 504 | 503,2,1,5,0,5,1,1,23.130847,27.55605,52.3333,9.166739,7639 505 | 504,2,1,5,0,6,0,1,24.6,28.3454,45.625,5.626325,8294 506 | 505,2,1,5,0,0,0,1,25.454153,29.19835,53.0417,17.042589,7129 507 | 506,2,1,5,0,1,1,2,24.531653,28.28335,81.125,15.624668,4359 508 | 507,2,1,5,0,2,1,2,25.215,29.04125,76.5833,7.917189,6073 509 | 508,2,1,5,0,3,1,2,25.488347,29.2306,77.4583,6.834,5260 510 | 509,2,1,5,0,4,1,1,26.855,30.335,71.6667,11.584032,6770 511 | 510,2,1,5,0,5,1,1,27.88,31.37645,74.7083,9.41685,6734 512 | 511,2,1,5,0,6,0,1,28.3925,32.1348,73.25,13.332464,6536 513 | 512,2,1,5,0,0,0,1,28.29,32.07125,69.7083,14.416457,6591 514 | 513,2,1,5,1,1,0,1,29.2125,33.965,67.625,13.166907,6043 515 | 514,2,1,5,0,2,1,1,29.6225,33.6496,68.4583,19.7918,5743 516 | 515,2,1,5,0,3,1,2,26.923347,30.55645,67,9.000043,6855 517 | 516,2,1,5,0,4,1,1,27.88,31.56645,49.2917,13.083693,7338 518 | 517,2,1,6,0,5,1,2,26.820847,30.3981,75.5417,15.916721,4127 519 | 518,2,1,6,0,6,0,1,23.916653,28.3144,54.9167,12.499654,8120 520 | 519,2,1,6,0,0,0,1,24.7025,28.75665,49.3333,12.333829,7641 521 | 520,2,1,6,0,1,1,1,24.4975,28.91415,48.7083,19.083811,6998 522 | 521,2,1,6,0,2,1,2,22.174153,26.2946,61.3333,14.041525,7001 523 | 522,2,1,6,0,3,1,1,22.720847,27.1146,61.125,5.167375,7055 524 | 523,2,1,6,0,4,1,1,24.7025,28.4721,56.7083,10.54245,7494 525 | 524,2,1,6,0,5,1,1,26.615847,29.8931,46.7917,11.750661,7736 526 | 525,2,1,6,0,6,0,1,29.144153,32.41835,43.7083,9.667229,7498 527 | 526,2,1,6,0,0,0,1,29.793347,33.17585,53.8333,8.959307,6598 528 | 527,2,1,6,0,1,1,2,29.554153,32.98605,58.7917,13.916771,6664 529 | 528,2,1,6,0,2,1,2,26.786653,29.89375,83.3333,14.374582,4972 530 | 529,2,1,6,0,3,1,1,26.889153,30.55585,58.2083,22.999693,7421 531 | 530,2,1,6,0,4,1,1,26.581653,31.21915,56.9583,17.000111,7363 532 | 531,2,1,6,0,5,1,1,26.205847,29.9877,58.9583,11.833339,7665 533 | 532,2,1,6,0,6,0,1,25.898347,29.7354,50.4167,11.166689,7702 534 | 533,2,1,6,0,0,0,1,24.2925,28.59875,59.875,9.708568,6978 535 | 534,2,1,6,0,1,1,2,23.301653,27.2421,77.7917,11.707982,5099 536 | 535,2,1,6,0,2,1,1,28.221653,32.7346,69,9.917139,6825 537 | 536,2,1,6,0,3,1,1,32.0825,36.04875,59.2083,7.625404,6211 538 | 537,3,1,6,0,4,1,1,33.039153,37.6271,56.7917,7.958729,5905 539 | 538,3,1,6,0,5,1,1,31.8775,36.20605,57.375,12.250414,5823 540 | 539,3,1,6,0,6,0,1,29.998347,32.6396,53.4583,12.041307,7458 541 | 540,3,1,6,0,0,0,1,30.476653,33.7127,47.9167,9.750175,6891 542 | 541,3,1,6,0,1,1,1,29.349153,32.7021,50.4167,20.125661,6779 543 | 542,3,1,6,0,2,1,1,25.864153,29.7352,37.3333,23.292014,7442 544 | 543,3,1,6,0,3,1,1,28.5975,32.0396,36,18.208925,7335 545 | 544,3,1,6,0,4,1,1,30.715847,33.7756,42.25,11.50055,6879 546 | 545,3,1,6,0,5,1,1,34.200847,39.33065,48.875,11.082939,5463 547 | 546,3,1,6,0,6,0,1,31.365,34.3754,60.125,10.791757,5687 548 | 547,3,1,7,0,0,0,1,33.449153,37.53145,51.875,11.291443,5531 549 | 548,3,1,7,0,1,1,1,32.048347,35.1019,44.7083,13.082889,6227 550 | 549,3,1,7,0,2,1,1,32.014153,35.1325,49.2083,8.457879,6660 551 | 550,3,1,7,1,3,0,1,32.355847,36.61685,53.875,9.04165,7403 552 | 551,3,1,7,0,4,1,1,33.9275,38.06835,45.7917,12.999943,6241 553 | 552,3,1,7,0,5,1,1,33.961653,37.62665,45.0833,9.791514,6207 554 | 553,3,1,7,0,6,0,1,35.328347,40.24565,49.2083,10.958118,4840 555 | 554,3,1,7,0,0,0,1,33.7225,39.5198,57.375,8.417143,4672 556 | 555,3,1,7,0,1,1,2,29.144153,32.7027,68.3333,12.125325,6569 557 | 556,3,1,7,0,2,1,2,29.554153,33.2398,66.75,10.166379,6290 558 | 557,3,1,7,0,3,1,1,29.383347,32.51355,63.3333,10.166111,7264 559 | 558,3,1,7,0,4,1,1,29.349153,32.73415,52.9583,9.833925,7446 560 | 559,3,1,7,0,5,1,2,29.998347,33.39665,48.5833,5.41695,7499 561 | 560,3,1,7,0,6,0,2,28.836653,33.3021,69.9167,9.626493,6969 562 | 561,3,1,7,0,0,0,1,30.579153,35.2598,71.7917,11.166689,6031 563 | 562,3,1,7,0,1,1,1,31.296653,36.20625,64.5,11.000529,6830 564 | 563,3,1,7,0,2,1,1,33.551653,37.78415,50.5833,7.666743,6786 565 | 564,3,1,7,0,3,1,1,32.526653,37.27915,57.7083,9.208614,5713 566 | 565,3,1,7,0,4,1,1,31.57,35.7321,60.0417,11.083743,6591 567 | 566,3,1,7,0,5,1,2,27.299153,30.65125,84.4167,14.000789,5870 568 | 567,3,1,7,0,6,0,3,24.429153,27.4956,86.5417,14.2911,4459 569 | 568,3,1,7,0,0,0,2,27.3675,31.15625,76.25,6.2926936,7410 570 | 569,3,1,7,0,1,1,1,30.408347,34.50085,69.4167,9.291761,6966 571 | 570,3,1,7,0,2,1,1,30.784153,35.3225,65.5,14.167418,7592 572 | 571,3,1,7,0,3,1,1,29.690847,32.7027,45,11.0416,8173 573 | 572,3,1,7,0,4,1,1,31.843347,36.96315,59.6667,19.082471,6861 574 | 573,3,1,7,0,5,1,1,32.048347,36.71085,59.4583,10.250464,6904 575 | 574,3,1,7,0,6,0,1,30.989153,34.8802,61.3333,10.54245,6685 576 | 575,3,1,7,0,0,0,1,29.588347,33.39665,62.375,11.416532,6597 577 | 576,3,1,7,0,1,1,1,29.964153,34.24935,66.875,10.292339,7105 578 | 577,3,1,7,0,2,1,1,29.246653,33.1448,70.4167,11.083475,7216 579 | 578,3,1,8,0,3,1,1,29.4175,33.3654,67.75,9.458993,7580 580 | 579,3,1,8,0,4,1,1,30.8525,35.3544,65.9583,8.666718,7261 581 | 580,3,1,8,0,5,1,2,31.399153,36.14335,64.25,14.458064,7175 582 | 581,3,1,8,0,6,0,1,32.526653,37.56335,61.3333,17.249686,6824 583 | 582,3,1,8,0,0,0,1,31.535847,36.55395,65.25,19.458207,5464 584 | 583,3,1,8,0,1,1,2,30.8525,35.5123,65.4167,8.666718,7013 585 | 584,3,1,8,0,2,1,2,30.169153,34.88105,70.375,7.832836,7273 586 | 585,3,1,8,0,3,1,2,30.75,35.38585,67.2917,7.4169,7534 587 | 586,3,1,8,0,4,1,1,30.989153,34.9754,62.0417,10.4587,7286 588 | 587,3,1,8,0,5,1,2,29.349153,33.3971,71.5833,16.000471,5786 589 | 588,3,1,8,0,6,0,2,28.3925,31.91335,73.2917,13.834093,6299 590 | 589,3,1,8,0,0,0,1,28.734153,32.22895,53.0417,8.208304,6544 591 | 590,3,1,8,0,1,1,1,29.554153,33.1127,54.5417,9.126204,6883 592 | 591,3,1,8,0,2,1,1,29.793347,33.83895,68.6667,11.333586,6784 593 | 592,3,1,8,0,3,1,1,28.973347,32.70185,61.9583,11.374657,7347 594 | 593,3,1,8,0,4,1,1,29.485847,32.7344,51.9167,9.500332,7605 595 | 594,3,1,8,0,5,1,1,29.656653,12.12,57.0833,15.500718,7148 596 | 595,3,1,8,0,6,0,1,27.811653,30.90355,60.3333,11.917089,7865 597 | 596,3,1,8,0,0,0,2,26.069153,30.1777,71.1667,5.79215,4549 598 | 597,3,1,8,0,1,1,2,26.069153,29.79835,73.4167,8.708593,6530 599 | 598,3,1,8,0,2,1,1,26.615847,30.05125,67.375,4.8756436,7006 600 | 599,3,1,8,0,3,1,1,27.3675,31.0927,67.7083,4.7089811,7375 601 | 600,3,1,8,0,4,1,1,28.529153,31.8504,63.5833,5.6679186,7765 602 | 601,3,1,8,0,5,1,2,28.8025,32.355,61.5,4.8337686,7582 603 | 602,3,1,8,0,6,0,2,27.128347,30.9348,71.2917,16.375336,6053 604 | 603,3,1,8,0,0,0,2,26.786653,29.7998,84.5833,15.333486,5255 605 | 604,3,1,8,0,1,1,1,28.836653,32.7344,73.0417,8.625111,6917 606 | 605,3,1,8,0,2,1,1,29.861653,33.3025,62,12.791975,7040 607 | 606,3,1,8,0,3,1,1,28.085,31.78665,55.2083,7.541654,7697 608 | 607,3,1,8,0,4,1,1,28.973347,32.63895,59.0417,5.1668189,7713 609 | 608,3,1,8,0,5,1,1,31.330847,34.47,58.75,11.291711,7350 610 | 609,3,1,9,0,6,0,2,30.886653,35.1327,63.8333,7.583529,6140 611 | 610,3,1,9,0,0,0,2,28.563347,32.45,81.5,4.2927436,5810 612 | 611,3,1,9,1,1,0,1,29.0075,33.08145,79.0833,10.125107,6034 613 | 612,3,1,9,0,2,1,1,29.759153,34.3444,75.5,15.833507,6864 614 | 613,3,1,9,0,3,1,1,30.203347,35.44915,74.125,12.583136,7112 615 | 614,3,1,9,0,4,1,2,28.563347,32.76645,81.0417,9.542207,6203 616 | 615,3,1,9,0,5,1,1,28.836653,32.8602,73.625,11.500282,7504 617 | 616,3,1,9,0,6,0,2,27.025847,30.55605,79.9167,18.833968,5976 618 | 617,3,1,9,0,0,0,1,25.01,28.94625,54.75,15.041232,8227 619 | 618,3,1,9,0,1,1,1,23.916653,28.2827,50.375,17.333771,7525 620 | 619,3,1,9,0,2,1,1,23.6775,27.7146,52,6.1676314,7767 621 | 620,3,1,9,0,3,1,1,24.565847,28.50375,57.7083,8.833682,7870 622 | 621,3,1,9,0,4,1,1,25.1125,28.9779,63.7083,5.5422936,7804 623 | 622,3,1,9,0,5,1,1,25.966653,29.70415,67.25,6.958821,8009 624 | 623,3,1,9,0,6,0,1,24.941653,29.29335,50.1667,16.583907,8714 625 | 624,3,1,9,0,0,0,1,23.78,28.15625,57,6.0422811,7333 626 | 625,3,1,9,0,1,1,2,23.814153,27.6525,73.4583,10.166714,6869 627 | 626,3,1,9,0,2,1,2,25.556653,28.25335,87.25,23.958329,4073 628 | 627,3,1,9,0,3,1,1,22.6525,27.0202,53.6667,14.416725,7591 629 | 628,3,1,9,0,4,1,1,22.413347,26.6096,61.8333,7.917189,7720 630 | 629,3,1,9,0,5,1,1,24.565847,28.59855,66.875,10.333343,8167 631 | 630,3,1,9,0,6,0,1,26.65,30.5244,64.6667,19.000061,8395 632 | 631,4,1,9,0,0,0,1,21.695847,25.94665,46.7083,14.958286,7907 633 | 632,4,1,9,0,1,1,1,21.080847,25.12565,49.2917,9.541068,7436 634 | 633,4,1,9,0,2,1,1,22.55,27.20895,57,15.833507,7538 635 | 634,4,1,9,0,3,1,1,26.035,29.83065,63.0833,16.3748,7733 636 | 635,4,1,9,0,4,1,2,26.65,30.39875,69.0833,9.000914,7393 637 | 636,4,1,9,0,5,1,2,25.385847,29.29315,69,10.999993,7415 638 | 637,4,1,9,0,6,0,1,22.2425,26.5148,54.2917,15.249468,8555 639 | 638,4,1,9,0,0,0,1,21.593347,25.88315,58.3333,9.042186,6889 640 | 639,4,1,10,0,1,1,2,21.354153,25.6,64.9167,6.0838814,6778 641 | 640,4,1,10,0,2,1,3,24.224153,27.11665,87.1667,6.999825,4639 642 | 641,4,1,10,0,3,1,2,26.9575,29.95665,79.375,4.4585686,7572 643 | 642,4,1,10,0,4,1,2,26.9575,30.39875,72.2917,7.875582,7328 644 | 643,4,1,10,0,5,1,1,25.215,29.00935,62.75,7.12545,8156 645 | 644,4,1,10,0,6,0,1,22.720847,26.92605,66.4167,17.957675,7965 646 | 645,4,1,10,0,0,0,2,17.049153,20.99065,70.8333,9.457854,3510 647 | 646,4,1,10,1,1,0,2,15.716653,19.3804,70.9583,12.708493,5478 648 | 647,4,1,10,0,2,1,2,18.313347,21.9056,76.1667,12.7501,6392 649 | 648,4,1,10,0,3,1,1,21.080847,25.1571,63.0833,12.584007,7691 650 | 649,4,1,10,0,4,1,1,17.835,21.55835,46.3333,12.166932,7570 651 | 650,4,1,10,0,5,1,1,17.9375,21.65355,53.9167,15.751164,7282 652 | 651,4,1,10,0,6,0,1,16.126653,19.5698,49.4583,9.791514,7109 653 | 652,4,1,10,0,0,0,1,21.388347,25.4102,64.0417,18.667004,6639 654 | 653,4,1,10,0,1,1,2,23.028347,26.9575,70.75,19.834479,5875 655 | 654,4,1,10,0,2,1,1,19.201653,23.0423,55.8333,12.208807,7534 656 | 655,4,1,10,0,3,1,1,18.689153,22.5054,69.2917,6.791857,7461 657 | 656,4,1,10,0,4,1,2,21.4225,25.63125,72.8333,15.874779,7509 658 | 657,4,1,10,0,5,1,2,23.096653,26.8948,81.5,9.041918,5424 659 | 658,4,1,10,0,6,0,1,19.850847,23.6421,57.2917,7.874979,8090 660 | 659,4,1,10,0,0,0,1,19.030847,22.82145,51,11.125618,6824 661 | 660,4,1,10,0,1,1,1,19.9875,24.1471,56.8333,5.4593811,7058 662 | 661,4,1,10,0,2,1,1,22.310847,26.5152,64.1667,6.3345686,7466 663 | 662,4,1,10,0,3,1,1,24.0875,27.93605,63.625,4.8762064,7693 664 | 663,4,1,10,0,4,1,2,22.55,26.4844,80.0417,8.333125,7359 665 | 664,4,1,10,0,5,1,2,22.379153,26.1375,80.7083,8.875289,7444 666 | 665,4,1,10,0,6,0,2,21.73,25.75665,72,15.791364,7852 667 | 666,4,1,10,0,0,0,2,19.5775,23.38855,69.4583,26.666536,4459 668 | 667,4,1,10,0,1,1,3,18.04,21.97,88,23.9994,22 669 | 668,4,1,10,0,2,1,2,13.045462,15.49545,82.5455,14.271603,1096 670 | 669,4,1,10,0,3,1,2,14.6575,18.055,66.6667,11.166689,5566 671 | 670,4,1,11,0,4,1,2,14.999153,18.4971,58.1667,10.542182,5986 672 | 671,4,1,11,0,5,1,1,14.555,17.8021,52.2083,17.833725,5847 673 | 672,4,1,11,0,6,0,2,14.076653,16.1923,49.125,18.125443,5138 674 | 673,4,1,11,0,0,0,1,13.359153,16.4769,53.2917,12.000236,5107 675 | 674,4,1,11,0,1,1,1,13.085847,15.40375,49.4167,15.833775,5259 676 | 675,4,1,11,0,2,1,1,11.514153,14.07835,56.7083,11.625371,5686 677 | 676,4,1,11,0,3,1,2,12.129153,13.73105,54.75,20.375236,5035 678 | 677,4,1,11,0,4,1,1,14.439134,17.09455,33.3478,23.304945,5315 679 | 678,4,1,11,0,5,1,1,14.828347,17.77065,54.0833,14.375386,5992 680 | 679,4,1,11,0,6,0,1,15.955847,19.69685,64.5417,3.8756686,6536 681 | 680,4,1,11,0,0,0,1,17.254153,21.08565,65.9167,8.5425,6852 682 | 681,4,1,11,1,1,0,1,19.885,23.76915,74.1667,11.625639,6269 683 | 682,4,1,11,0,2,1,2,14.076653,16.16125,66.2917,22.917082,4094 684 | 683,4,1,11,0,3,1,1,11.855847,14.07815,55.2083,13.374875,5495 685 | 684,4,1,11,0,4,1,2,13.188347,16.2246,62.0417,10.250129,5445 686 | 685,4,1,11,0,5,1,1,14.145,17.3602,52.4583,11.458675,5698 687 | 686,4,1,11,0,6,0,1,13.325,16.31915,54.5417,12.041843,5629 688 | 687,4,1,11,0,0,0,1,14.0425,16.8873,69.2917,15.250004,4669 689 | 688,4,1,11,0,1,1,2,15.614153,18.78105,62.3333,15.749489,5499 690 | 689,4,1,11,0,2,1,2,15.340847,19.03335,68.5,5.542575,5634 691 | 690,4,1,11,0,3,1,1,14.486653,18.2446,61.375,6.917482,5146 692 | 691,4,1,11,1,4,0,1,13.94,17.51855,58.0417,3.5423436,2425 693 | 692,4,1,11,0,5,1,1,15.101653,18.93895,56.875,9.917407,3910 694 | 693,4,1,11,0,6,0,1,11.411653,12.4371,40.4583,25.250357,2277 695 | 694,4,1,11,0,0,0,1,10.079153,12.87915,46.8333,10.0835,2424 696 | 695,4,1,11,0,1,1,1,12.846653,16.9502,53.5417,3.12555,5087 697 | 696,4,1,11,0,2,1,2,11.958347,14.0779,78.6667,15.916654,3959 698 | 697,4,1,11,0,3,1,1,12.163347,14.4881,50.625,14.125007,5260 699 | 698,4,1,11,0,4,1,1,11.51567,14.9211,55.5652,7.739974,5323 700 | 699,4,1,11,0,5,1,1,12.231653,16.19335,64.9583,3.9175436,5668 701 | 700,4,1,12,0,6,0,2,12.231653,15.8452,80.6667,4.0001814,5191 702 | 701,4,1,12,0,0,0,2,14.2475,17.9604,82.3333,8.333393,4649 703 | 702,4,1,12,0,1,1,1,18.5525,22.7898,76.75,5.5422936,6234 704 | 703,4,1,12,0,2,1,1,19.509153,23.4527,73.375,11.666643,6606 705 | 704,4,1,12,0,3,1,1,17.971653,21.4006,48.5,21.709407,5729 706 | 705,4,1,12,0,4,1,1,10.489153,12.9102,50.875,11.708518,5375 707 | 706,4,1,12,0,5,1,2,13.154153,16.0979,76.4167,8.7502,5008 708 | 707,4,1,12,0,6,0,2,15.648347,19.4754,91.125,6.792393,5582 709 | 708,4,1,12,0,0,0,2,15.750847,19.5073,90.5417,10.584325,3228 710 | 709,4,1,12,0,1,1,2,17.869153,21.77875,92.5,12.750636,5170 711 | 710,4,1,12,0,2,1,2,14.486653,16.91815,59.6667,19.834479,5501 712 | 711,4,1,12,0,3,1,2,12.1975,14.8669,53.8333,10.916779,5319 713 | 712,4,1,12,0,4,1,1,12.129153,14.7094,48.5833,11.666643,5532 714 | 713,4,1,12,0,5,1,1,11.548347,14.7096,64.2917,8.792343,5611 715 | 714,4,1,12,0,6,0,1,13.290847,16.91915,65.0417,7.12545,5047 716 | 715,4,1,12,0,0,0,2,14.8625,18.4969,83.875,6.749714,3786 717 | 716,4,1,12,0,1,1,2,16.126653,20.075,90.7083,6.5833061,4585 718 | 717,4,1,12,0,2,1,1,16.844153,20.4854,66.625,14.834068,5557 719 | 718,4,1,12,0,3,1,1,13.6325,17.1081,62.5417,12.334164,5267 720 | 719,4,1,12,0,4,1,2,13.53,16.76085,66.7917,8.875021,4128 721 | 720,1,1,12,0,5,1,2,13.393347,15.08835,55.6667,25.083661,3623 722 | 721,1,1,12,0,6,0,1,10.899153,11.80565,44.125,27.292182,1749 723 | 722,1,1,12,0,0,0,1,10.079153,12.97355,51.5417,8.916561,1787 724 | 723,1,1,12,0,1,1,2,9.483464,12.945,79.1304,5.1744368,920 725 | 724,1,1,12,1,2,0,2,11.943464,14.72325,73.4783,11.304642,1013 726 | 725,1,1,12,0,3,1,3,9.976653,11.01665,82.3333,21.208582,441 727 | 726,1,1,12,0,4,1,2,10.420847,11.3321,65.2917,23.458911,2114 728 | 727,1,1,12,0,5,1,2,10.386653,12.7523,59,10.416557,3095 729 | 728,1,1,12,0,6,0,2,10.386653,12.12,75.2917,8.333661,1341 730 | 729,1,1,12,0,0,0,1,10.489153,11.585,48.3333,23.500518,1796 731 | 730,1,1,12,0,1,1,2,8.849153,11.17435,57.75,10.374682,2729 732 | -------------------------------------------------------------------------------- /datasets/heart.csv: -------------------------------------------------------------------------------- 1 | X0,X1,X2,X3,X4,X5,X6,X7,X8,X9,X10,X11,X12,Y 2 | 70,1,4,130,322,0,2,109,0,24,2,3,3,1 3 | 67,0,3,115,564,0,2,160,0,16,2,0,7,0 4 | 57,1,2,124,261,0,0,141,0,3,1,0,7,1 5 | 64,1,4,128,263,0,0,105,1,2,2,1,7,0 6 | 74,0,2,120,269,0,2,121,1,2,1,1,3,0 7 | 65,1,4,120,177,0,0,140,0,4,1,0,7,0 8 | 56,1,3,130,256,1,2,142,1,6,2,1,6,1 9 | 59,1,4,110,239,0,2,142,1,12,2,1,7,1 10 | 60,1,4,140,293,0,2,170,0,12,2,2,7,1 11 | 63,0,4,150,407,0,2,154,0,4,2,3,7,1 12 | 59,1,4,135,234,0,0,161,0,5,2,0,7,0 13 | 53,1,4,142,226,0,2,111,1,0,1,0,7,0 14 | 44,1,3,140,235,0,2,180,0,0,1,0,3,0 15 | 61,1,1,134,234,0,0,145,0,26,2,2,3,1 16 | 57,0,4,128,303,0,2,159,0,0,1,1,3,0 17 | 71,0,4,112,149,0,0,125,0,16,2,0,3,0 18 | 46,1,4,140,311,0,0,120,1,18,2,2,7,1 19 | 53,1,4,140,203,1,2,155,1,31,3,0,7,1 20 | 64,1,1,110,211,0,2,144,1,18,2,0,3,0 21 | 40,1,1,140,199,0,0,178,1,14,1,0,7,0 22 | 67,1,4,120,229,0,2,129,1,26,2,2,7,1 23 | 48,1,2,130,245,0,2,180,0,2,2,0,3,0 24 | 43,1,4,115,303,0,0,181,0,12,2,0,3,0 25 | 47,1,4,112,204,0,0,143,0,1,1,0,3,0 26 | 54,0,2,132,288,1,2,159,1,0,1,1,3,0 27 | 48,0,3,130,275,0,0,139,0,2,1,0,3,0 28 | 46,0,4,138,243,0,2,152,1,0,2,0,3,0 29 | 51,0,3,120,295,0,2,157,0,6,1,0,3,0 30 | 58,1,3,112,230,0,2,165,0,25,2,1,7,1 31 | 71,0,3,110,265,1,2,130,0,0,1,1,3,0 32 | 57,1,3,128,229,0,2,150,0,4,2,1,7,1 33 | 66,1,4,160,228,0,2,138,0,23,1,0,6,0 34 | 37,0,3,120,215,0,0,170,0,0,1,0,3,0 35 | 59,1,4,170,326,0,2,140,1,34,3,0,7,1 36 | 50,1,4,144,200,0,2,126,1,9,2,0,7,1 37 | 48,1,4,130,256,1,2,150,1,0,1,2,7,1 38 | 61,1,4,140,207,0,2,138,1,19,1,1,7,1 39 | 59,1,1,160,273,0,2,125,0,0,1,0,3,1 40 | 42,1,3,130,180,0,0,150,0,0,1,0,3,0 41 | 48,1,4,122,222,0,2,186,0,0,1,0,3,0 42 | 40,1,4,152,223,0,0,181,0,0,1,0,7,1 43 | 62,0,4,124,209,0,0,163,0,0,1,0,3,0 44 | 44,1,3,130,233,0,0,179,1,4,1,0,3,0 45 | 46,1,2,101,197,1,0,156,0,0,1,0,7,0 46 | 59,1,3,126,218,1,0,134,0,22,2,1,6,1 47 | 58,1,3,140,211,1,2,165,0,0,1,0,3,0 48 | 49,1,3,118,149,0,2,126,0,8,1,3,3,1 49 | 44,1,4,110,197,0,2,177,0,0,1,1,3,1 50 | 66,1,2,160,246,0,0,120,1,0,2,3,6,1 51 | 65,0,4,150,225,0,2,114,0,1,2,3,7,1 52 | 42,1,4,136,315,0,0,125,1,18,2,0,6,1 53 | 52,1,2,128,205,1,0,184,0,0,1,0,3,0 54 | 65,0,3,140,417,1,2,157,0,8,1,1,3,0 55 | 63,0,2,140,195,0,0,179,0,0,1,2,3,0 56 | 45,0,2,130,234,0,2,175,0,6,2,0,3,0 57 | 41,0,2,105,198,0,0,168,0,0,1,1,3,0 58 | 61,1,4,138,166,0,2,125,1,36,2,1,3,1 59 | 60,0,3,120,178,1,0,96,0,0,1,0,3,0 60 | 59,0,4,174,249,0,0,143,1,0,2,0,3,1 61 | 62,1,2,120,281,0,2,103,0,14,2,1,7,1 62 | 57,1,3,150,126,1,0,173,0,2,1,1,7,0 63 | 51,0,4,130,305,0,0,142,1,12,2,0,7,1 64 | 44,1,3,120,226,0,0,169,0,0,1,0,3,0 65 | 60,0,1,150,240,0,0,171,0,9,1,0,3,0 66 | 63,1,1,145,233,1,2,150,0,23,3,0,6,0 67 | 57,1,4,150,276,0,2,112,1,6,2,1,6,1 68 | 51,1,4,140,261,0,2,186,1,0,1,0,3,0 69 | 58,0,2,136,319,1,2,152,0,0,1,2,3,1 70 | 44,0,3,118,242,0,0,149,0,3,2,1,3,0 71 | 47,1,3,108,243,0,0,152,0,0,1,0,3,1 72 | 61,1,4,120,260,0,0,140,1,36,2,1,7,1 73 | 57,0,4,120,354,0,0,163,1,6,1,0,3,0 74 | 70,1,2,156,245,0,2,143,0,0,1,0,3,0 75 | 76,0,3,140,197,0,1,116,0,11,2,0,3,0 76 | 67,0,4,106,223,0,0,142,0,3,1,2,3,0 77 | 45,1,4,142,309,0,2,147,1,0,2,3,7,1 78 | 45,1,4,104,208,0,2,148,1,3,2,0,3,0 79 | 39,0,3,94,199,0,0,179,0,0,1,0,3,0 80 | 42,0,3,120,209,0,0,173,0,0,2,0,3,0 81 | 56,1,2,120,236,0,0,178,0,8,1,0,3,0 82 | 58,1,4,146,218,0,0,105,0,2,2,1,7,1 83 | 35,1,4,120,198,0,0,130,1,16,2,0,7,1 84 | 58,1,4,150,270,0,2,111,1,8,1,0,7,1 85 | 41,1,3,130,214,0,2,168,0,2,2,0,3,0 86 | 57,1,4,110,201,0,0,126,1,15,2,0,6,0 87 | 42,1,1,148,244,0,2,178,0,8,1,2,3,0 88 | 62,1,2,128,208,1,2,140,0,0,1,0,3,0 89 | 59,1,1,178,270,0,2,145,0,42,3,0,7,0 90 | 41,0,2,126,306,0,0,163,0,0,1,0,3,0 91 | 50,1,4,150,243,0,2,128,0,26,2,0,7,1 92 | 59,1,2,140,221,0,0,164,1,0,1,0,3,0 93 | 61,0,4,130,330,0,2,169,0,0,1,0,3,1 94 | 54,1,4,124,266,0,2,109,1,22,2,1,7,1 95 | 54,1,4,110,206,0,2,108,1,0,2,1,3,1 96 | 52,1,4,125,212,0,0,168,0,1,1,2,7,1 97 | 47,1,4,110,275,0,2,118,1,1,2,1,3,1 98 | 66,1,4,120,302,0,2,151,0,4,2,0,3,0 99 | 58,1,4,100,234,0,0,156,0,1,1,1,7,1 100 | 64,0,3,140,313,0,0,133,0,2,1,0,7,0 101 | 50,0,2,120,244,0,0,162,0,11,1,0,3,0 102 | 44,0,3,108,141,0,0,175,0,6,2,0,3,0 103 | 67,1,4,120,237,0,0,71,0,1,2,0,3,1 104 | 49,0,4,130,269,0,0,163,0,0,1,0,3,0 105 | 57,1,4,165,289,1,2,124,0,1,2,3,7,1 106 | 63,1,4,130,254,0,2,147,0,14,2,1,7,1 107 | 48,1,4,124,274,0,2,166,0,5,2,0,7,1 108 | 51,1,3,100,222,0,0,143,1,12,2,0,3,0 109 | 60,0,4,150,258,0,2,157,0,26,2,2,7,1 110 | 59,1,4,140,177,0,0,162,1,0,1,1,7,1 111 | 45,0,2,112,160,0,0,138,0,0,2,0,3,0 112 | 55,0,4,180,327,0,1,117,1,34,2,0,3,1 113 | 41,1,2,110,235,0,0,153,0,0,1,0,3,0 114 | 60,0,4,158,305,0,2,161,0,0,1,0,3,1 115 | 54,0,3,135,304,1,0,170,0,0,1,0,3,0 116 | 42,1,2,120,295,0,0,162,0,0,1,0,3,0 117 | 49,0,2,134,271,0,0,162,0,0,2,0,3,0 118 | 46,1,4,120,249,0,2,144,0,8,1,0,7,1 119 | 56,0,4,200,288,1,2,133,1,4,3,2,7,1 120 | 66,0,1,150,226,0,0,114,0,26,3,0,3,0 121 | 56,1,4,130,283,1,2,103,1,16,3,0,7,1 122 | 49,1,3,120,188,0,0,139,0,2,2,3,7,1 123 | 54,1,4,122,286,0,2,116,1,32,2,2,3,1 124 | 57,1,4,152,274,0,0,88,1,12,2,1,7,1 125 | 65,0,3,160,360,0,2,151,0,8,1,0,3,0 126 | 54,1,3,125,273,0,2,152,0,5,3,1,3,0 127 | 54,0,3,160,201,0,0,163,0,0,1,1,3,0 128 | 62,1,4,120,267,0,0,99,1,18,2,2,7,1 129 | 52,0,3,136,196,0,2,169,0,1,2,0,3,0 130 | 52,1,2,134,201,0,0,158,0,8,1,1,3,0 131 | 60,1,4,117,230,1,0,160,1,14,1,2,7,1 132 | 63,0,4,108,269,0,0,169,1,18,2,2,3,1 133 | 66,1,4,112,212,0,2,132,1,1,1,1,3,1 134 | 42,1,4,140,226,0,0,178,0,0,1,0,3,0 135 | 64,1,4,120,246,0,2,96,1,22,3,1,3,1 136 | 54,1,3,150,232,0,2,165,0,16,1,0,7,0 137 | 46,0,3,142,177,0,2,160,1,14,3,0,3,0 138 | 67,0,3,152,277,0,0,172,0,0,1,1,3,0 139 | 56,1,4,125,249,1,2,144,1,12,2,1,3,1 140 | 34,0,2,118,210,0,0,192,0,7,1,0,3,0 141 | 57,1,4,132,207,0,0,168,1,0,1,0,7,0 142 | 64,1,4,145,212,0,2,132,0,2,2,2,6,1 143 | 59,1,4,138,271,0,2,182,0,0,1,0,3,0 144 | 50,1,3,140,233,0,0,163,0,6,2,1,7,1 145 | 51,1,1,125,213,0,2,125,1,14,1,1,3,0 146 | 54,1,2,192,283,0,2,195,0,0,1,1,7,1 147 | 53,1,4,123,282,0,0,95,1,2,2,2,7,1 148 | 52,1,4,112,230,0,0,160,0,0,1,1,3,1 149 | 40,1,4,110,167,0,2,114,1,2,2,0,7,1 150 | 58,1,3,132,224,0,2,173,0,32,1,2,7,1 151 | 41,0,3,112,268,0,2,172,1,0,1,0,3,0 152 | 41,1,3,112,250,0,0,179,0,0,1,0,3,0 153 | 50,0,3,120,219,0,0,158,0,16,2,0,3,0 154 | 54,0,3,108,267,0,2,167,0,0,1,0,3,0 155 | 64,0,4,130,303,0,0,122,0,2,2,2,3,0 156 | 51,0,3,130,256,0,2,149,0,5,1,0,3,0 157 | 46,0,2,105,204,0,0,172,0,0,1,0,3,0 158 | 55,1,4,140,217,0,0,111,1,56,3,0,7,1 159 | 45,1,2,128,308,0,2,170,0,0,1,0,3,0 160 | 56,1,1,120,193,0,2,162,0,19,2,0,7,0 161 | 66,0,4,178,228,1,0,165,1,1,2,2,7,1 162 | 38,1,1,120,231,0,0,182,1,38,2,0,7,1 163 | 62,0,4,150,244,0,0,154,1,14,2,0,3,1 164 | 55,1,2,130,262,0,0,155,0,0,1,0,3,0 165 | 58,1,4,128,259,0,2,130,1,3,2,2,7,1 166 | 43,1,4,110,211,0,0,161,0,0,1,0,7,0 167 | 64,0,4,180,325,0,0,154,1,0,1,0,3,0 168 | 50,0,4,110,254,0,2,159,0,0,1,0,3,0 169 | 53,1,3,130,197,1,2,152,0,12,3,0,3,0 170 | 45,0,4,138,236,0,2,152,1,2,2,0,3,0 171 | 65,1,1,138,282,1,2,174,0,14,2,1,3,1 172 | 69,1,1,160,234,1,2,131,0,1,2,1,3,0 173 | 69,1,3,140,254,0,2,146,0,2,2,3,7,1 174 | 67,1,4,100,299,0,2,125,1,9,2,2,3,1 175 | 68,0,3,120,211,0,2,115,0,15,2,0,3,0 176 | 34,1,1,118,182,0,2,174,0,0,1,0,3,0 177 | 62,0,4,138,294,1,0,106,0,19,2,3,3,1 178 | 51,1,4,140,298,0,0,122,1,42,2,3,7,1 179 | 46,1,3,150,231,0,0,147,0,36,2,0,3,1 180 | 67,1,4,125,254,1,0,163,0,2,2,2,7,1 181 | 50,1,3,129,196,0,0,163,0,0,1,0,3,0 182 | 42,1,3,120,240,1,0,194,0,8,3,0,7,0 183 | 56,0,4,134,409,0,2,150,1,19,2,2,7,1 184 | 41,1,4,110,172,0,2,158,0,0,1,0,7,1 185 | 42,0,4,102,265,0,2,122,0,6,2,0,3,0 186 | 53,1,3,130,246,1,2,173,0,0,1,3,3,0 187 | 43,1,3,130,315,0,0,162,0,19,1,1,3,0 188 | 56,1,4,132,184,0,2,105,1,21,2,1,6,1 189 | 52,1,4,108,233,1,0,147,0,1,1,3,7,0 190 | 62,0,4,140,394,0,2,157,0,12,2,0,3,0 191 | 70,1,3,160,269,0,0,112,1,29,2,1,7,1 192 | 54,1,4,140,239,0,0,160,0,12,1,0,3,0 193 | 70,1,4,145,174,0,0,125,1,26,3,0,7,1 194 | 54,1,2,108,309,0,0,156,0,0,1,0,7,0 195 | 35,1,4,126,282,0,2,156,1,0,1,0,7,1 196 | 48,1,3,124,255,1,0,175,0,0,1,2,3,0 197 | 55,0,2,135,250,0,2,161,0,14,2,0,3,0 198 | 58,0,4,100,248,0,2,122,0,1,2,0,3,0 199 | 54,0,3,110,214,0,0,158,0,16,2,0,3,0 200 | 69,0,1,140,239,0,0,151,0,18,1,2,3,0 201 | 77,1,4,125,304,0,2,162,1,0,1,3,3,1 202 | 68,1,3,118,277,0,0,151,0,1,1,1,7,0 203 | 58,1,4,125,300,0,2,171,0,0,1,2,7,1 204 | 60,1,4,125,258,0,2,141,1,28,2,1,7,1 205 | 51,1,4,140,299,0,0,173,1,16,1,0,7,1 206 | 55,1,4,160,289,0,2,145,1,8,2,1,7,1 207 | 52,1,1,152,298,1,0,178,0,12,2,0,7,0 208 | 60,0,3,102,318,0,0,160,0,0,1,1,3,0 209 | 58,1,3,105,240,0,2,154,1,6,2,0,7,0 210 | 64,1,3,125,309,0,0,131,1,18,2,0,7,1 211 | 37,1,3,130,250,0,0,187,0,35,3,0,3,0 212 | 59,1,1,170,288,0,2,159,0,2,2,0,7,1 213 | 51,1,3,125,245,1,2,166,0,24,2,0,3,0 214 | 43,0,3,122,213,0,0,165,0,2,2,0,3,0 215 | 58,1,4,128,216,0,2,131,1,22,2,3,7,1 216 | 29,1,2,130,204,0,2,202,0,0,1,0,3,0 217 | 41,0,2,130,204,0,2,172,0,14,1,0,3,0 218 | 63,0,3,135,252,0,2,172,0,0,1,0,3,0 219 | 51,1,3,94,227,0,0,154,1,0,1,1,7,0 220 | 54,1,3,120,258,0,2,147,0,4,2,0,7,0 221 | 44,1,2,120,220,0,0,170,0,0,1,0,3,0 222 | 54,1,4,110,239,0,0,126,1,28,2,1,7,1 223 | 65,1,4,135,254,0,2,127,0,28,2,1,7,1 224 | 57,1,3,150,168,0,0,174,0,16,1,0,3,0 225 | 63,1,4,130,330,1,2,132,1,18,1,3,7,1 226 | 35,0,4,138,183,0,0,182,0,14,1,0,3,0 227 | 41,1,2,135,203,0,0,132,0,0,2,0,6,0 228 | 62,0,3,130,263,0,0,97,0,12,2,1,7,1 229 | 43,0,4,132,341,1,2,136,1,3,2,0,7,1 230 | 58,0,1,150,283,1,2,162,0,1,1,0,3,0 231 | 52,1,1,118,186,0,2,190,0,0,2,0,6,0 232 | 61,0,4,145,307,0,2,146,1,1,2,0,7,1 233 | 39,1,4,118,219,0,0,140,0,12,2,0,7,1 234 | 45,1,4,115,260,0,2,185,0,0,1,0,3,0 235 | 52,1,4,128,255,0,0,161,1,0,1,1,7,1 236 | 62,1,3,130,231,0,0,146,0,18,2,3,7,0 237 | 62,0,4,160,164,0,2,145,0,62,3,3,7,1 238 | 53,0,4,138,234,0,2,160,0,0,1,0,3,0 239 | 43,1,4,120,177,0,2,120,1,25,2,0,7,1 240 | 47,1,3,138,257,0,2,156,0,0,1,0,3,0 241 | 52,1,2,120,325,0,0,172,0,2,1,0,3,0 242 | 68,1,3,180,274,1,2,150,1,16,2,0,7,1 243 | 39,1,3,140,321,0,2,182,0,0,1,0,3,0 244 | 53,0,4,130,264,0,2,143,0,4,2,0,3,0 245 | 62,0,4,140,268,0,2,160,0,36,3,2,3,1 246 | 51,0,3,140,308,0,2,142,0,15,1,1,3,0 247 | 60,1,4,130,253,0,0,144,1,14,1,1,7,1 248 | 65,1,4,110,248,0,2,158,0,6,1,2,6,1 249 | 65,0,3,155,269,0,0,148,0,8,1,0,3,0 250 | 60,1,3,140,185,0,2,155,0,3,2,0,3,1 251 | 60,1,4,145,282,0,2,142,1,28,2,2,7,1 252 | 54,1,4,120,188,0,0,113,0,14,2,1,7,1 253 | 44,1,2,130,219,0,2,188,0,0,1,0,3,0 254 | 44,1,4,112,290,0,2,153,0,0,1,1,3,1 255 | 51,1,3,110,175,0,0,123,0,6,1,0,3,0 256 | 59,1,3,150,212,1,0,157,0,16,1,0,3,0 257 | 71,0,2,160,302,0,0,162,0,4,1,2,3,0 258 | 61,1,3,150,243,1,0,137,1,1,2,0,3,0 259 | 55,1,4,132,353,0,0,132,1,12,2,1,7,1 260 | 64,1,3,140,335,0,0,158,0,0,1,0,3,1 261 | 43,1,4,150,247,0,0,171,0,15,1,0,3,0 262 | 58,0,3,120,340,0,0,172,0,0,1,0,3,0 263 | 60,1,4,130,206,0,2,132,1,24,2,2,7,1 264 | 58,1,2,120,284,0,2,160,0,18,2,0,3,1 265 | 49,1,2,130,266,0,0,171,0,6,1,0,3,0 266 | 48,1,2,110,229,0,0,168,0,1,3,0,7,1 267 | 52,1,3,172,199,1,0,162,0,5,1,0,7,0 268 | 44,1,2,120,263,0,0,173,0,0,1,0,7,0 269 | 56,0,2,140,294,0,2,153,0,13,2,0,3,0 270 | 57,1,4,140,192,0,0,148,0,4,2,0,6,0 271 | 67,1,4,160,286,0,2,108,1,15,2,3,3,1 272 | -------------------------------------------------------------------------------- /m3gp/GeneticOperators.py: -------------------------------------------------------------------------------- 1 | from .Individual import Individual 2 | from .Node import Node 3 | 4 | # 5 | # By using this file, you are agreeing to this product's EULA 6 | # 7 | # This product can be obtained in https://github.com/jespb/Python-M3GP 8 | # 9 | # Copyright ©2019-2025 J. E. Batista 10 | # 11 | 12 | 13 | def tournament(rng, population,n): 14 | ''' 15 | Selects "n" Individuals from the population and return a 16 | single Individual. 17 | 18 | Parameters: 19 | population (list): A list of Individuals, sorted from best to worse. 20 | ''' 21 | candidates = [rng.randint(0,len(population)-1) for i in range(n)] 22 | return population[min(candidates)] 23 | 24 | 25 | def getElite(population,n): 26 | ''' 27 | Returns the "n" best Individuals in the population. 28 | 29 | Parameters: 30 | population (list): A list of Individuals, sorted from best to worse. 31 | ''' 32 | return population[:n] 33 | 34 | 35 | def getOffspring(rng, population, tournament_size, dim_min, dim_max): 36 | ''' 37 | Genetic Operator: Selects a genetic operator and returns a list with the 38 | offspring Individuals. The crossover GOs return two Individuals and the 39 | mutation GO returns one individual. Individuals over the LIMIT_DEPTH are 40 | then excluded, making it possible for this method to return an empty list. 41 | 42 | Parameters: 43 | population (list): A list of Individuals, sorted from best to worse. 44 | ''' 45 | isCross = rng.random()<0.5 46 | desc = None 47 | 48 | availableXO = [0,1] 49 | availableMT = [0,1,2] 50 | 51 | if isCross: 52 | whichXO = availableXO[ rng.randint(0,len(availableXO)-1 ) ] 53 | if whichXO == 0: 54 | desc = STXO(rng, population, tournament_size) 55 | elif whichXO == 1: 56 | desc = M3XO(rng, population, tournament_size) 57 | else: 58 | whichMut = availableMT[ rng.randint(0,len(availableMT)-1 ) ] 59 | if whichMut == 0: 60 | desc = STMUT(rng, population, tournament_size) 61 | elif whichMut == 1: 62 | desc = M3ADD(rng, population, tournament_size, dim_max) 63 | elif whichMut == 2: 64 | desc = M3REM(rng, population, tournament_size, dim_min) 65 | return desc 66 | 67 | 68 | def discardDeep(population, limit): 69 | ret = [] 70 | for ind in population: 71 | if ind.getDepth() <= limit: 72 | ret.append(ind) 73 | return ret 74 | 75 | 76 | def STXO(rng, population, tournament_size): 77 | ''' 78 | Randomly selects one node from each of two individuals; swaps the node and 79 | sub-nodes; and returns the two new Individuals as the offspring. 80 | 81 | Parameters: 82 | population (list): A list of Individuals, sorted from best to worse. 83 | ''' 84 | ind1 = tournament(rng, population, tournament_size) 85 | ind2 = tournament(rng, population, tournament_size) 86 | 87 | d1 = ind1.getDimensions() 88 | d2 = ind2.getDimensions() 89 | 90 | r1 = rng.randint(0,len(d1)-1) 91 | r2 = rng.randint(0,len(d2)-1) 92 | 93 | n1 = d1[r1].getRandomNode(rng) 94 | n2 = d2[r2].getRandomNode(rng) 95 | 96 | n1.swap(n2) 97 | 98 | ret = [] 99 | for d in [d1,d2]: 100 | i = Individual(ind1.operators, ind1.terminals, ind1.max_depth, ind1.model_class, ind1.fitnessType) 101 | i.copy(d) 102 | ret.append(i) 103 | return ret 104 | 105 | def M3XO(rng, population, tournament_size): 106 | ''' 107 | Randomly selects one dimension from each of two individuals; swaps the 108 | dimensions; and returns the two new Individuals as the offspring. 109 | 110 | Parameters: 111 | population (list): A list of Individuals, sorted from best to worse. 112 | ''' 113 | ind1 = tournament(rng, population, tournament_size) 114 | ind2 = tournament(rng, population, tournament_size) 115 | 116 | d1 = ind1.getDimensions() 117 | d2 = ind2.getDimensions() 118 | 119 | r1 = rng.randint(0,len(d1)-1) 120 | r2 = rng.randint(0,len(d2)-1) 121 | 122 | d1.append(d2[r2]) 123 | d2.append(d1[r1]) 124 | d1.pop(r1) 125 | d2.pop(r2) 126 | 127 | ret = [] 128 | for d in [d1,d2]: 129 | i = Individual(ind1.operators, ind1.terminals, ind1.max_depth, ind1.model_class, ind1.fitnessType) 130 | i.copy(d) 131 | ret.append(i) 132 | return ret 133 | 134 | def STMUT(rng, population, tournament_size): 135 | ''' 136 | Randomly selects one node from a single individual; swaps the node with a 137 | new, node generated using Grow; and returns the new Individual as the offspring. 138 | 139 | Parameters: 140 | population (list): A list of Individuals, sorted from best to worse. 141 | ''' 142 | ind1 = tournament(rng, population, tournament_size) 143 | d1 = ind1.getDimensions() 144 | r1 = rng.randint(0,len(d1)-1) 145 | n1 = d1[r1].getRandomNode(rng) 146 | n = Node() 147 | n.create(rng, ind1.operators, ind1.terminals, ind1.max_depth) 148 | n1.swap(n) 149 | 150 | 151 | ret = [] 152 | i = Individual(ind1.operators, ind1.terminals, ind1.max_depth, ind1.model_class, ind1.fitnessType) 153 | i.copy(d1) 154 | ret.append(i) 155 | return ret 156 | 157 | def M3ADD(rng, population, tournament_size, dim_max): 158 | ''' 159 | Randomly generates a new node using Grow; this node is added to the list of 160 | dimensions; the new Individual is returned as the offspring. 161 | 162 | Parameters: 163 | population (list): A list of Individuals, sorted from best to worse. 164 | ''' 165 | ind1 = tournament(rng, population, tournament_size) 166 | ret = [] 167 | 168 | if ind1.getNumberOfDimensions() < dim_max: 169 | d1 = ind1.getDimensions() 170 | n = Node() 171 | n.create(rng, ind1.operators, ind1.terminals, ind1.max_depth) 172 | d1.append(n) 173 | 174 | i = Individual(ind1.operators, ind1.terminals, ind1.max_depth, ind1.model_class, ind1.fitnessType) 175 | i.copy(d1) 176 | ret.append(i) 177 | 178 | return ret 179 | 180 | def M3REM(rng, population, tournament_size, dim_min): 181 | ''' 182 | Randomly selects one dimensions from a single individual; that dimensions is 183 | removed; the new Individual is returned as the offspring. 184 | 185 | Parameters: 186 | population (list): A list of Individuals, sorted from best to worse. 187 | ''' 188 | ind1 = tournament(rng, population, tournament_size) 189 | ret = [] 190 | 191 | if ind1.getNumberOfDimensions() > dim_min: 192 | d1 = ind1.getDimensions() 193 | r1 = rng.randint(0,len(d1)-1) 194 | d1.pop(r1) 195 | 196 | i = Individual(ind1.operators, ind1.terminals, ind1.max_depth, ind1.model_class, ind1.fitnessType) 197 | i.copy(d1) 198 | ret.append(i) 199 | 200 | return ret 201 | -------------------------------------------------------------------------------- /m3gp/Individual.py: -------------------------------------------------------------------------------- 1 | from .Node import Node 2 | 3 | import pandas as pd 4 | 5 | from copy import deepcopy 6 | 7 | from sklearn.metrics import accuracy_score, f1_score, cohen_kappa_score, mean_squared_error 8 | 9 | 10 | # 11 | # By using this file, you are agreeing to this product's EULA 12 | # 13 | # This product can be obtained in https://github.com/jespb/Python-M3GP 14 | # 15 | # Copyright ©2019-2025 J. E. Batista 16 | # 17 | 18 | class Individual: 19 | training_X = None 20 | training_Y = None 21 | 22 | operators = None 23 | terminals = None 24 | max_depth = None 25 | 26 | dimensions = None 27 | size = 0 28 | depth = 0 29 | 30 | trainingPredictions = None 31 | testPredictions = None 32 | fitness = None 33 | 34 | model = None 35 | 36 | def __init__(self, operators, terminals, max_depth, model_class=None, fitnessType="Accuracy"): 37 | self.operators = operators 38 | self.terminals = terminals 39 | self.max_depth = max_depth 40 | self.model_class = model_class 41 | self.fitnessType = fitnessType 42 | 43 | def create(self,rng, n_dims=1): 44 | self.dimensions = [] 45 | for i in range(n_dims): 46 | n = Node() 47 | n.create(rng, self.operators, self.terminals, self.max_depth, full=True) 48 | self.dimensions.append(n) 49 | 50 | def copy(self, dim): 51 | self.dimensions = dim 52 | 53 | 54 | 55 | def __gt__(self, other): 56 | sf = self.getFitness() 57 | sd = self.getNumberOfDimensions() 58 | ss = self.getSize() 59 | 60 | of = other.getFitness() 61 | od = other.getNumberOfDimensions() 62 | os = other.getSize() 63 | 64 | return (sf > of) or \ 65 | (sf == of and sd < od) or \ 66 | (sf == of and sd == od and ss < os) 67 | 68 | def __ge__(self, other): 69 | return self.getFitness() >= other.getFitness() 70 | 71 | def __str__(self): 72 | return ",".join([str(d) for d in self.dimensions]) 73 | 74 | 75 | def createModel(self): 76 | return deepcopy(self.model_class) 77 | 78 | def fit(self, Tr_x, Tr_y): 79 | ''' 80 | Trains the classifier which will be used in the fitness function 81 | ''' 82 | if self.model is None: 83 | self.training_X = Tr_x 84 | self.training_Y = Tr_y 85 | 86 | self.model = self.createModel() 87 | 88 | hyper_X = self.convert(Tr_x) 89 | 90 | self.model.fit(hyper_X,Tr_y) 91 | 92 | 93 | def getSize(self): 94 | ''' 95 | Returns the total number of nodes within an individual. 96 | ''' 97 | if not self.size: 98 | self.size = sum(n.getSize() for n in self.dimensions) 99 | return self.size 100 | 101 | def getDepth(self): 102 | ''' 103 | Returns the depth of individual. 104 | ''' 105 | if not self.depth: 106 | self.depth = max([dimension.getDepth() for dimension in self.dimensions]) 107 | return self.depth 108 | 109 | def getDimensions(self): 110 | ''' 111 | Returns a deep clone of the individual's list of dimensions. 112 | ''' 113 | ret = [] 114 | for dim in self.dimensions: 115 | ret.append(dim.clone()) 116 | return ret 117 | 118 | 119 | def getNumberOfDimensions(self): 120 | ''' 121 | Returns the total number of dimensions within the individual. 122 | ''' 123 | return len(self.dimensions) 124 | 125 | 126 | 127 | def getFitness(self, tr_x = None, tr_y = None): 128 | ''' 129 | Returns the individual's fitness. 130 | ''' 131 | if self.fitness is None: 132 | if not tr_x is None: 133 | self.training_X = tr_x 134 | if not tr_y is None: 135 | self.training_Y = tr_y 136 | 137 | 138 | if self.fitnessType == "Accuracy": 139 | self.fit(self.training_X, self.training_Y) 140 | self.getTrainingPredictions() 141 | acc = accuracy_score(self.trainingPredictions, self.training_Y) 142 | self.fitness = acc 143 | 144 | if self.fitnessType == "MSE": 145 | self.fit(self.training_X, self.training_Y) 146 | self.getTrainingPredictions() 147 | mse = float(-1 * mean_squared_error(self.trainingPredictions, self.training_Y)) 148 | self.fitness = mse 149 | 150 | if self.fitnessType == "WAF": 151 | self.fit(self.training_X, self.training_Y) 152 | self.getTrainingPredictions() 153 | waf = f1_score(self.trainingPredictions, self.training_Y, average="weighted") 154 | self.fitness = waf 155 | 156 | if self.fitnessType == "2FOLD": 157 | hyper_X = self.convert(self.training_X) 158 | 159 | X1 = hyper_X.iloc[:len(hyper_X)//2] 160 | Y1 = self.training_Y[:len(self.training_Y)//2] 161 | X2 = hyper_X.iloc[len(hyper_X)//2:] 162 | Y2 = self.training_Y[len(self.training_Y)//2:] 163 | 164 | M1 = self.createModel() 165 | M1.fit(X1,Y1) 166 | P1 = M1.predict(X2) 167 | 168 | M2 = self.createModel() 169 | M2.fit(X2,Y2) 170 | P2 = M2.predict(X1) 171 | 172 | f1 = accuracy_score(P1, Y2) 173 | f2 = accuracy_score(P2, Y1) 174 | self.fitness = (f1+f2)/2 175 | 176 | return self.fitness 177 | 178 | 179 | def getTrainingMeasure(self): 180 | if self.fitnessType in ["Accuracy", "2FOLD"]: 181 | self.getTrainingPredictions() 182 | return accuracy_score(self.trainingPredictions, self.training_Y) 183 | 184 | if self.fitnessType == "MSE": 185 | self.getTrainingPredictions() 186 | return -1 * mean_squared_error(self.trainingPredictions, self.training_Y) 187 | 188 | if self.fitnessType == "WAF": 189 | self.getTrainingPredictions() 190 | return f1_score(self.trainingPredictions, self.training_Y, average="weighted") 191 | 192 | 193 | def getTestMeasure(self, test_X, test_Y): 194 | if self.fitnessType in ["Accuracy", "2FOLD"]: 195 | self.getTestPredictions(test_X) 196 | return accuracy_score(self.testPredictions, test_Y) 197 | 198 | if self.fitnessType == "MSE": 199 | self.getTestPredictions(test_X) 200 | return -1 * mean_squared_error(self.testPredictions, test_Y) 201 | 202 | if self.fitnessType == "WAF": 203 | self.getTestPredictions(test_X) 204 | return f1_score(self.testPredictions, test_Y, average="weighted") 205 | 206 | 207 | 208 | def getTrainingPredictions(self): 209 | if self.trainingPredictions is None: 210 | self.trainingPredictions = self.predict(self.training_X) 211 | 212 | return self.trainingPredictions 213 | 214 | def getTestPredictions(self, X): 215 | if self.testPredictions is None: 216 | self.testPredictions = self.predict(X) 217 | 218 | return self.testPredictions 219 | 220 | 221 | 222 | def getMSE(self, X,Y,pred=None): 223 | ''' 224 | Returns the individual's accuracy. 225 | ''' 226 | if pred == "Tr": 227 | pred = self.getTrainingPredictions() 228 | elif pred == "Te": 229 | pred = self.getTestPredictions(X) 230 | else: 231 | pred = self.predict(X) 232 | 233 | return -1 * mean_squared_error(pred, Y) 234 | 235 | 236 | def getAccuracy(self, X,Y,pred=None): 237 | ''' 238 | Returns the individual's accuracy. 239 | ''' 240 | if pred == "Tr": 241 | pred = self.getTrainingPredictions() 242 | elif pred == "Te": 243 | pred = self.getTestPredictions(X) 244 | else: 245 | pred = self.predict(X) 246 | 247 | return accuracy_score(pred, Y) 248 | 249 | 250 | def getWaF(self, X, Y,pred=None): 251 | ''' 252 | Returns the individual's WAF. 253 | ''' 254 | if pred == "Tr": 255 | pred = self.getTrainingPredictions() 256 | elif pred == "Te": 257 | pred = self.getTestPredictions(X) 258 | else: 259 | pred = self.predict(X) 260 | 261 | return f1_score(pred, Y, average="weighted") 262 | 263 | 264 | def getKappa(self, X, Y,pred=None): 265 | ''' 266 | Returns the individual's kappa value. 267 | ''' 268 | if pred == "Tr": 269 | pred = self.getTrainingPredictions() 270 | elif pred == "Te": 271 | pred = self.getTestPredictions(X) 272 | else: 273 | pred = self.predict(X) 274 | 275 | return cohen_kappa_score(pred, Y) 276 | 277 | 278 | 279 | def calculate(self, sample): 280 | ''' 281 | Return the position of a sample in the output space. 282 | ''' 283 | return [self.dimensions[i].calculate(sample) for i in range(len(self.dimensions))] 284 | 285 | 286 | def convert(self, X): 287 | ''' 288 | Returns the converted input space. 289 | ''' 290 | ret = pd.DataFrame() 291 | for i in range(len(self.dimensions)): 292 | a = self.dimensions[i].calculate(X) 293 | ret["#"+str(i)] = a 294 | return ret 295 | 296 | 297 | def predict(self, X): 298 | ''' 299 | Returns the class prediction of a sample. 300 | ''' 301 | hyper_X = self.convert(X) 302 | predictions = self.model.predict(hyper_X) 303 | 304 | return predictions 305 | 306 | 307 | 308 | def prun(self,min_dim=1,simp=False): 309 | ''' 310 | Remove the dimensions that degrade the fitness. 311 | If simp==True, also simplifies each dimension. 312 | ''' 313 | 314 | dup = self.dimensions[:] 315 | i = 0 316 | ind = Individual(self.operators, self.terminals, self.max_depth, self.model_class, self.fitnessType) 317 | ind.copy(dup) 318 | 319 | ind.fit(self.training_X, self.training_Y) 320 | 321 | while i < len(dup) and len(dup) > min_dim: 322 | dup2 = dup[:] 323 | dup2.pop(i) 324 | ind2 = Individual(self.operators, self.terminals, self.max_depth, self.model_class, self.fitnessType) 325 | ind2.copy(dup2) 326 | ind2.fit(self.training_X, self.training_Y) 327 | 328 | if ind2 >= ind: 329 | ind = ind2 330 | dup = dup2 331 | i-=1 332 | i+=1 333 | 334 | self.dimensions = dup 335 | self.trainingAccuracy = None 336 | self.testAccuracy = None 337 | self.size = None 338 | self.depth = None 339 | self.model = None 340 | self.fit(self.training_X, self.training_Y) 341 | 342 | 343 | if simp: 344 | # Simplify dimensions 345 | for d in self.dimensions: 346 | done = False 347 | while not done: 348 | state = str(d) 349 | d.prun(self.training_X) 350 | done = state == str(d) 351 | 352 | 353 | 354 | -------------------------------------------------------------------------------- /m3gp/M3GP.py: -------------------------------------------------------------------------------- 1 | from .Individual import Individual 2 | from .GeneticOperators import getElite, getOffspring, discardDeep 3 | import multiprocessing as mp 4 | import time 5 | 6 | from .MahalanobisDistanceClassifier import MahalanobisDistanceClassifier 7 | 8 | from random import Random 9 | 10 | # 11 | # By using this file, you are agreeing to this product's EULA 12 | # 13 | # This product can be obtained in https://github.com/jespb/Python-M3GP 14 | # 15 | # Copyright ©2019-2025 J. E. Batista 16 | # 17 | 18 | class ClassifierNotTrainedError(Exception): 19 | """ You tried to use the classifier before training it. """ 20 | 21 | def __init__(self, expression, message = ""): 22 | self.expression = expression 23 | self.message = message 24 | 25 | 26 | class M3GP: 27 | 28 | ## __INIT__ arguments 29 | operators = None 30 | max_initial_depth = None 31 | population_size = None 32 | threads = None 33 | random_state = 42 34 | rng = None # random number generator 35 | 36 | max_depth = None 37 | max_generation = None 38 | tournament_size = None 39 | elitism_size = None 40 | dim_min = None 41 | dim_max = None 42 | 43 | model_class = None 44 | fitnessType = None 45 | 46 | verbose = None 47 | 48 | 49 | ## FIT arguments 50 | terminals = None 51 | 52 | population = None 53 | currentGeneration = 0 54 | bestIndividual: Individual = None 55 | 56 | trainingAccuracyOverTime = None 57 | testAccuracyOverTime = None 58 | trainingWaFOverTime = None 59 | testWaFOverTime = None 60 | trainingKappaOverTime = None 61 | testKappaOverTime = None 62 | trainingMSEOverTime = None 63 | testMSEOverTime = None 64 | sizeOverTime = None 65 | dimensionsOverTime = None 66 | generationTimes = None 67 | 68 | 69 | 70 | def checkIfTrained(self): 71 | if self.population == None: 72 | raise ClassifierNotTrainedError("The classifier must be trained using the fit(Tr_X, Tr_Y) method before being used.") 73 | 74 | 75 | 76 | def __init__(self, operators=[("+",2),("-",2),("*",2),("/",2)], max_initial_depth = 6, population_size = 500, 77 | max_generation = 100, tournament_size = 5, elitism_size = 1, max_depth = 17, 78 | dim_min = 1, dim_max = 9999, threads=1, random_state = 42, verbose = True, model_class=None, fitnessType="Accuracy"): 79 | 80 | if sum( [0 if op in [("+",2),("-",2),("*",2),("/",2)] else 0 for op in operators ] ) > 0: 81 | print( "[Warning] Some of the following operators may not be supported:", operators) 82 | 83 | self.operators = operators 84 | 85 | self.max_initial_depth = max_initial_depth 86 | self.population_size = population_size 87 | self.threads = max(1, threads) 88 | self.random_state = random_state 89 | self.rng = Random(random_state) 90 | 91 | self.max_depth = max_depth 92 | self.max_generation = max_generation 93 | self.tournament_size = tournament_size 94 | self.elitism_size = elitism_size 95 | self.dim_min = max(1, dim_min) 96 | self.dim_max = max(1, dim_max) 97 | 98 | self.model_class = model_class 99 | if self.model_class is None: 100 | self.model_class = MahalanobisDistanceClassifier() 101 | self.fitnessType = fitnessType 102 | 103 | self.verbose = verbose 104 | 105 | 106 | 107 | 108 | 109 | def __str__(self): 110 | self.checkIfTrained() 111 | return str(self.getBestIndividual()) 112 | 113 | 114 | 115 | 116 | def getCurrentGeneration(self): 117 | return self.currentGeneration 118 | 119 | 120 | def getBestIndividual(self): 121 | ''' 122 | Returns the final M3GP model. 123 | ''' 124 | self.checkIfTrained() 125 | 126 | return self.bestIndividual 127 | 128 | def getAccuracyOverTime(self): 129 | ''' 130 | Returns the training and test accuracy of the best model in each generation. 131 | ''' 132 | self.checkIfTrained() 133 | 134 | return [self.trainingAccuracyOverTime, self.testAccuracyOverTime] 135 | 136 | def getWaFOverTime(self): 137 | ''' 138 | Returns the training and test WAF of the best model in each generation. 139 | ''' 140 | self.checkIfTrained() 141 | 142 | return [self.trainingWaFOverTime, self.testWaFOverTime] 143 | 144 | def getKappaOverTime(self): 145 | ''' 146 | Returns the training and test kappa values of the best model in each generation. 147 | ''' 148 | self.checkIfTrained() 149 | 150 | return [self.trainingKappaOverTime, self.testKappaOverTime] 151 | 152 | def getMSEOverTime(self): 153 | ''' 154 | Returns the training and test mean squared error values of the best model in each generation. 155 | ''' 156 | self.checkIfTrained() 157 | 158 | return [self.trainingMSEOverTime, self.testMSEOverTime] 159 | 160 | def getSizesOverTime(self): 161 | ''' 162 | Returns the size and number of dimensions of the best model in each generation. 163 | ''' 164 | self.checkIfTrained() 165 | 166 | return [self.sizeOverTime, self.dimensionsOverTime] 167 | 168 | def getGenerationTimes(self): 169 | ''' 170 | Returns the time spent in each generation. 171 | ''' 172 | self.checkIfTrained() 173 | 174 | return self.generationTimes 175 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | def fit(self,Tr_x, Tr_y, Te_x = None, Te_y = None): 186 | if self.verbose: 187 | print(" > Parameters") 188 | print(" > Random State: "+str(self.random_state)) 189 | print(" > Operators: "+str(self.operators)) 190 | print(" > Population Size: "+str(self.population_size)) 191 | print(" > Max Generation: "+str(self.max_generation)) 192 | print(" > Tournament Size: "+str(self.tournament_size)) 193 | print(" > Elitism Size: "+str(self.elitism_size)) 194 | print(" > Max Initial Depth: "+str(self.max_initial_depth)) 195 | print(" > Max Depth: "+str(self.max_depth)) 196 | print(" > Minimum Dimensions: "+str(self.dim_min)) 197 | print(" > Maximum Dimensions: "+str(self.dim_max)) 198 | print(" > Wrapped Model: "+self.model_class.__class__.__name__) 199 | print(" > Fitness Type: "+self.fitnessType) 200 | print(" > Threads: "+str(self.threads)) 201 | print() 202 | 203 | self.Tr_x = Tr_x 204 | self.Tr_y = Tr_y 205 | self.Te_x = Te_x 206 | self.Te_y = Te_y 207 | self.terminals = list(Tr_x.columns) 208 | 209 | 210 | self.population = [] 211 | 212 | while len(self.population) < self.population_size: 213 | ind = Individual(self.operators, self.terminals, self.max_depth, self.model_class, self.fitnessType) 214 | ind.create(self.rng, n_dims = self.dim_min) 215 | self.population.append(ind) 216 | 217 | self.bestIndividual = self.population[0] 218 | self.bestIndividual.fit(self.Tr_x, self.Tr_y) 219 | self.bestIndividual.getFitness() 220 | 221 | if not self.Te_x is None: 222 | self.trainingAccuracyOverTime = [] 223 | self.testAccuracyOverTime = [] 224 | self.trainingWaFOverTime = [] 225 | self.testWaFOverTime = [] 226 | self.trainingKappaOverTime = [] 227 | self.testKappaOverTime = [] 228 | self.trainingMSEOverTime = [] 229 | self.testMSEOverTime = [] 230 | self.sizeOverTime = [] 231 | self.dimensionsOverTime = [] 232 | self.generationTimes = [] 233 | 234 | 235 | 236 | ''' 237 | Training loop for the algorithm. 238 | ''' 239 | if self.verbose: 240 | print(" > Running log:") 241 | 242 | while self.currentGeneration < self.max_generation: 243 | if not self.stoppingCriteria(): 244 | t1 = time.time() 245 | self.nextGeneration() 246 | t2 = time.time() 247 | duration = t2-t1 248 | else: 249 | duration = 0 250 | self.currentGeneration += 1 251 | 252 | if not self.Te_x is None: 253 | if self.fitnessType in ["Accuracy", "2FOLD", "WAF"]: 254 | self.trainingAccuracyOverTime.append(self.bestIndividual.getAccuracy(self.Tr_x, self.Tr_y, pred="Tr")) 255 | self.testAccuracyOverTime.append(self.bestIndividual.getAccuracy(self.Te_x, self.Te_y, pred="Te")) 256 | self.trainingWaFOverTime.append(self.bestIndividual.getWaF(self.Tr_x, self.Tr_y, pred="Tr")) 257 | self.testWaFOverTime.append(self.bestIndividual.getWaF(self.Te_x, self.Te_y, pred="Te")) 258 | self.trainingKappaOverTime.append(self.bestIndividual.getKappa(self.Tr_x, self.Tr_y, pred="Tr")) 259 | self.testKappaOverTime.append(self.bestIndividual.getKappa(self.Te_x, self.Te_y, pred="Te")) 260 | self.trainingMSEOverTime.append(0) 261 | self.testMSEOverTime.append(0) 262 | elif self.fitnessType in ["MSE"]: 263 | self.trainingAccuracyOverTime.append(0) 264 | self.testAccuracyOverTime.append(0) 265 | self.trainingWaFOverTime.append(0) 266 | self.testWaFOverTime.append(0) 267 | self.trainingKappaOverTime.append(0) 268 | self.testKappaOverTime.append(0) 269 | self.trainingMSEOverTime.append(self.bestIndividual.getMSE(self.Tr_x, self.Tr_y, pred="Tr")) 270 | self.testMSEOverTime.append(self.bestIndividual.getMSE(self.Te_x, self.Te_y, pred="Te")) 271 | self.sizeOverTime.append(self.bestIndividual.getSize()) 272 | self.dimensionsOverTime.append(self.bestIndividual.getNumberOfDimensions()) 273 | self.generationTimes.append(duration) 274 | 275 | 276 | # prun the final individual 277 | self.getBestIndividual().prun(min_dim = self.dim_min, simp=True) 278 | 279 | 280 | 281 | 282 | def stoppingCriteria(self): 283 | ''' 284 | Returns True if the stopping criteria was reached. 285 | ''' 286 | genLimit = self.currentGeneration >= self.max_generation 287 | perfectTraining = self.bestIndividual.getFitness() == 1 288 | 289 | return genLimit or perfectTraining 290 | 291 | 292 | 293 | 294 | def nextGeneration(self): 295 | ''' 296 | Generation algorithm: the population is sorted; the best individual is pruned; 297 | the elite is selected; and the offspring are created. 298 | ''' 299 | begin = time.time() 300 | 301 | # Calculates the accuracy of the population using multiprocessing 302 | if self.threads > 1: 303 | with mp.Pool(processes= self.threads) as pool: 304 | results = pool.map(fitIndividuals, [(ind, self.Tr_x, self.Tr_y) for ind in self.population] ) 305 | for i in range(len(self.population)): 306 | self.population[i].trainingPredictions = results[i][0] 307 | self.population[i].fitness = results[i][1] 308 | self.population[i].training_X = self.Tr_x 309 | self.population[i].training_Y = self.Tr_y 310 | else: 311 | [ ind.fit(self.Tr_x, self.Tr_y) for ind in self.population] 312 | [ ind.getFitness() for ind in self.population ] 313 | 314 | # Sort the population from best to worse 315 | self.population.sort(reverse=True) 316 | 317 | 318 | # Update best individual 319 | if self.population[0] > self.bestIndividual: 320 | self.bestIndividual = self.population[0] 321 | self.bestIndividual.prun(min_dim = self.dim_min) 322 | 323 | # Generating Next Generation 324 | newPopulation = [] 325 | newPopulation.extend(getElite(self.population, self.elitism_size)) 326 | while len(newPopulation) < self.population_size: 327 | offspring = getOffspring(self.rng, self.population, self.tournament_size, self.dim_min, self.dim_max) 328 | offspring = discardDeep(offspring, self.max_depth) 329 | newPopulation.extend(offspring) 330 | self.population = newPopulation[:self.population_size] 331 | 332 | 333 | end = time.time() 334 | 335 | 336 | # Debug 337 | if self.verbose and self.currentGeneration%5==0: 338 | if not self.Te_x is None: 339 | print(" > Gen #%2d: Fitness: %.6f // Tr-Score: %.6f // Te-Score: %.6f // Time: %.4f" % (self.currentGeneration, self.bestIndividual.getFitness(), self.bestIndividual.getTrainingMeasure(), self.bestIndividual.getTestMeasure(self.Te_x, self.Te_y), end- begin ) ) 340 | else: 341 | print(" > Gen #%2d: Fitness: %.6f // Tr-Score: %.6f // Time: %.4f" % (self.currentGeneration, self.bestIndividual.getFitness(), self.bestIndividual.getTrainingMeasure(), end- begin ) ) 342 | 343 | 344 | 345 | 346 | 347 | 348 | def predict(self, dataset): 349 | ''' 350 | Returns the predictions for the samples in a dataset. 351 | ''' 352 | self.checkIfTrained() 353 | 354 | return self.getBestIndividual().predict(dataset) 355 | 356 | return "Population Not Trained" if self.bestIndividual == None else self.bestIndividual.predict(sample) 357 | 358 | 359 | def fitIndividuals(a): 360 | ind,x,y = a 361 | ind.getFitness(x,y) 362 | 363 | ret = [] 364 | if "FOLD" in ind.fitnessType: 365 | ret.append(None) 366 | else: 367 | ret.append(ind.getTrainingPredictions()) 368 | ret.append(ind.getFitness()) 369 | 370 | 371 | return ret 372 | 373 | 374 | 375 | 376 | 377 | 378 | -------------------------------------------------------------------------------- /m3gp/MahalanobisDistanceClassifier.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | # 4 | # By using this file, you are agreeing to this product's EULA 5 | # 6 | # This product can be obtained in https://github.com/jespb/Python-M3GP 7 | # 8 | # Copyright ©2019-2025 J. E. Batista 9 | # 10 | 11 | # 12 | # This classifier receives panda DataFrames and converts it to standard types 13 | # to avoid "Infinite" values in the matrices. 14 | # 15 | 16 | def getInverseCovarianceMatrix(cluster): 17 | ''' 18 | Returns the inverse covariance matrix, obtained from a cluster 19 | ''' 20 | ret = [] 21 | for i in range(len(cluster[0])): 22 | ret.append([0]*len(cluster[0])) 23 | for d1 in range(len(cluster[0])): 24 | for d2 in range(len(cluster[0])): 25 | for x in range(len(cluster)): 26 | ret[d1][d2] += cluster[x][d1]*cluster[x][d2]/len(cluster) 27 | return inverseMatrix(ret) 28 | 29 | def mahalanobisDistance(v1,v2,invCovarianceMatrix): 30 | ''' 31 | Returns the mahalanobis distance between two points 32 | ''' 33 | if invCovarianceMatrix is None: 34 | return euclideanDistance(v1,v2) 35 | 36 | x = np.array(v1) 37 | y = np.array(v2) 38 | 39 | sub = np.subtract(x,y) 40 | mult = np.matmul(sub,invCovarianceMatrix) 41 | if (len(invCovarianceMatrix)==1): 42 | mult = [mult] 43 | mult2 = np.matmul(mult,sub.transpose()) 44 | 45 | if(mult2 < 0): 46 | #print("mult2 < 0") 47 | return euclideanDistance(v1,v2) 48 | 49 | return mult2**0.5 50 | 51 | def euclideanDistance(v1,v2): 52 | return sum([(v1[i]-v2[i])**2 for i in range(len(v1))])**0.5 53 | 54 | 55 | def inverseMatrix(m): 56 | ''' 57 | Returns the inverse of the matrix m, if possible, 58 | otherwise returns the diagonal matrix 59 | ''' 60 | try: 61 | return np.linalg.inv(np.array(m)) 62 | except: 63 | return None 64 | 65 | 66 | 67 | class MahalanobisDistanceClassifier: 68 | 69 | invCovarianceMatrix = None 70 | classCentroids = None 71 | classes = None 72 | 73 | def __init__(self): 74 | pass 75 | 76 | def fit(self,X,Y): 77 | ''' 78 | Calculates the class clusters in the output space. 79 | ''' 80 | 81 | X = [ list(sample) for sample in X.iloc ] 82 | Y = list(Y) 83 | 84 | self.classes = list(set(Y)) 85 | clusters = [] 86 | for i in self.classes: 87 | clusters.append([]) 88 | 89 | for sample_index in range(len(X)): 90 | index = self.classes.index(Y[sample_index]) 91 | coor = X[sample_index] 92 | clusters[index].append(coor) 93 | 94 | self.invCovarianceMatrix = [] 95 | for cluster in clusters: 96 | m = getInverseCovarianceMatrix(cluster) 97 | self.invCovarianceMatrix.append(m) 98 | 99 | self.classCentroids = [] 100 | for cluster in clusters: 101 | self.classCentroids.append([0 for i in range(len(cluster[0]))]) 102 | for sample in cluster: 103 | self.classCentroids[-1] = [self.classCentroids[-1][i] + sample[i]/len(cluster) for i in range(len(self.classCentroids[-1]))] 104 | 105 | 106 | 107 | def predict(self, X): 108 | predictions = [] 109 | X = [ list(sample) for sample in X.iloc ] 110 | 111 | for sample in X: 112 | pick_d = mahalanobisDistance(sample, self.classCentroids[0],self.invCovarianceMatrix[0]) 113 | pick = self.classes[0] 114 | 115 | for i in range(len(self.classes)): 116 | d = mahalanobisDistance(sample, self.classCentroids[i],self.invCovarianceMatrix[i]) 117 | if d < pick_d: 118 | pick_d = d 119 | pick = self.classes[i] 120 | 121 | predictions.append(pick) 122 | return predictions -------------------------------------------------------------------------------- /m3gp/Node.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from math import log 4 | 5 | import warnings 6 | warnings.filterwarnings("ignore") 7 | 8 | # 9 | # By using this file, you are agreeing to this product's EULA 10 | # 11 | # This product can be obtained in https://github.com/jespb/Python-M3GP 12 | # 13 | # Copyright ©2019-2022 J. E. Batista 14 | # 15 | 16 | class Node: 17 | branches = None 18 | value = None 19 | 20 | 21 | def __init__(self): 22 | pass 23 | 24 | 25 | def create(self, rng, operators=None, terminals=None, depth=None,full=False): 26 | if depth>1 and (rng.random()<0.5 or full ==True ): 27 | op, n_args = operators[rng.randint(0,len(operators)-1)] 28 | self.value = op 29 | 30 | self.branches = [] 31 | for i in range(n_args): 32 | n = Node() 33 | n.create(rng, operators, terminals, depth-1) 34 | self.branches.append(n) 35 | else: 36 | self.value = terminals[rng.randint(0,len(terminals)-1)] # Sem literais 37 | 38 | 39 | def copy(self,value=None, branches=None): 40 | self.branches = branches 41 | self.value=value 42 | 43 | 44 | def __str__(self): 45 | if self.branches == None: 46 | return str(self.value) 47 | else: 48 | if len(self.branches) == 2: 49 | return "( " + str(self.branches[0]) + " " + str(self.value) + " " + str(self.branches[1]) + " )" 50 | else: 51 | return str(self.value) + " ( " + " ".join( [str(b) for b in self.branches] ) + " )" 52 | 53 | 54 | def getSize(self): 55 | ''' 56 | Returns the total number of nodes within this Node. 57 | ''' 58 | if self.branches == None: 59 | return 1 60 | else: 61 | 62 | return 1 + sum( [b.getSize() for b in self.branches] ) 63 | 64 | 65 | def getDepth(self): 66 | ''' 67 | Returns the depth of this Node. 68 | ''' 69 | if self.branches == None: 70 | return 1 71 | else: 72 | return 1 + max( [b.getDepth() for b in self.branches] ) 73 | 74 | 75 | def getRandomNode(self, rng, value=None): 76 | ''' 77 | Returns a random Node within this Node. 78 | ''' 79 | if value == None: 80 | value = rng.randint(0,self.getSize()-1) 81 | if value == 0: 82 | #print(self) 83 | return self 84 | 85 | #print(value, self) 86 | for i in range(len(self.branches)): 87 | size = self.branches[i].getSize() 88 | if value-1 < size: 89 | return self.branches[i].getRandomNode(rng, value-1) 90 | value -= size 91 | 92 | 93 | def swap(self, other): 94 | ''' 95 | Swaps the content of two nodes. 96 | ''' 97 | b = self.branches 98 | v = self.value 99 | 100 | self.branches = other.branches 101 | self.value = other.value 102 | 103 | other.branches = b 104 | other.value = v 105 | 106 | 107 | def clone(self): 108 | ''' 109 | Returns a clone of this node. 110 | ''' 111 | if self.branches == None: 112 | n = Node() 113 | n.copy(value=self.value, branches = None) 114 | return n 115 | else: 116 | n = Node() 117 | n.copy(value=self.value, branches=[b.clone() for b in self.branches]) 118 | return n 119 | 120 | 121 | 122 | def calculate(self, sample): 123 | ''' 124 | Returns the calculated value of a sample. 125 | ''' 126 | if self.branches == None: 127 | try: 128 | return np.array( sample[self.value] )#.astype("float64") 129 | except: 130 | return np.array( [float(self.value)]*sample.shape[0] ) 131 | 132 | 133 | else: 134 | if self.value == "+": #+ 135 | return self.branches[0].calculate(sample) + self.branches[1].calculate(sample) 136 | if self.value == "-": #- 137 | return self.branches[0].calculate(sample) - self.branches[1].calculate(sample) 138 | if self.value == "*": #* 139 | return self.branches[0].calculate(sample) * self.branches[1].calculate(sample) 140 | if self.value == "/": #/ 141 | right = self.branches[1].calculate(sample) 142 | right = np.where(right==0, 1, right) 143 | return self.branches[0].calculate(sample) / right 144 | if self.value == "log2": # log2(X) 145 | res = self.branches[0].calculate(sample) 146 | res = np.where(res<=0, res, np.log2(res)) 147 | return res 148 | if self.value == "max": # max( X0, X1, ... Xn) 149 | calc = [b.calculate(sample) for b in self.branches] 150 | a = [] 151 | for i in range(len(calc[0])): 152 | a.append( max([calc[k][i] for k in range(len(calc))]) ) 153 | return np.array(a) 154 | 155 | 156 | def isLeaf(self): 157 | ''' 158 | Returns True if the Node had no sub-nodes. 159 | ''' 160 | return self.branches == None 161 | 162 | def getSemantics(self,tr_x): 163 | ''' 164 | Returns the semantic of a Node. 165 | ''' 166 | return self.calculate(tr_x) 167 | 168 | def redirect(self, other): 169 | ''' 170 | Assigns the content of another Node to this Node. 171 | ''' 172 | self.value = other.value 173 | self.branches = other.branches 174 | 175 | def prun(self, tr_x): 176 | ''' 177 | Simplifies this Node 178 | ''' 179 | semantics = self.getSemantics(tr_x) 180 | semantics.sort() 181 | if semantics[0]== semantics[-1] and len(semantics)>1: 182 | self.value = str(semantics[0]) 183 | self.branches = None 184 | 185 | 186 | 187 | if self.branches!=None and len(self.branches)==1: # [log2] 188 | pass 189 | 190 | 191 | 192 | if self.branches!=None and len(self.branches)==2: # [+, -, *, /] 193 | # + 194 | if self.value == "+": 195 | # 0 + X == X 196 | if not self.isLeaf() and ( self.branches[0].isLeaf() and self.branches[0].value == "0.0" ): 197 | self.redirect(self.branches[1]) 198 | 199 | # X + 0 == X 200 | if not self.isLeaf() and ( self.branches[1].isLeaf() and self.branches[1].value == "0.0" ): 201 | self.redirect(self.branches[0]) 202 | 203 | # X + X == 2 * X 204 | if not self.isLeaf() and ( str(self.branches[1]) == str(self.branches[0]) ): 205 | self.value = "*" 206 | n = Node() 207 | n.copy(value = "2.0") 208 | self.branches[0].redirect( n ) 209 | 210 | # - 211 | if self.value == "-": 212 | # X - 0 == X 213 | if not self.isLeaf() and ( self.branches[1].isLeaf() and self.branches[1].value == "0.0" ): 214 | self.redirect(self.branches[0]) 215 | 216 | # X - X == 0 217 | if not self.isLeaf() and ( str(self.branches[1]) == str(self.branches[0]) ): 218 | n = Node() 219 | n.copy(value = "0.0") 220 | self.redirect( n ) 221 | 222 | # * 223 | if self.value == "*": 224 | # X * 0 == 0, 0 * X == 0 225 | if not self.isLeaf() and ( (self.branches[0].isLeaf() and self.branches[0].value=="0.0") or (self.branches[1].isLeaf() and self.branches[1].value=="0.0") ): 226 | n = Node() 227 | n.copy(value = "0.0") 228 | self.redirect( n ) 229 | 230 | # 1 * X == X 231 | if not self.isLeaf() and ( self.branches[0].isLeaf() and self.branches[0].value == "1.0" ): 232 | self.redirect(self.branches[1]) 233 | 234 | # X * 1 == X 235 | if not self.isLeaf() and ( self.branches[1].isLeaf() and self.branches[1].value == "1.0" ): 236 | self.redirect(self.branches[0]) 237 | 238 | # // 239 | if self.value == "/": 240 | # X // 0 == 1 241 | if not self.isLeaf() and ( self.branches[1].isLeaf() and self.branches[1].value=="0.0" ): 242 | n = Node() 243 | n.copy(value = "1.0") 244 | self.redirect( n ) 245 | 246 | # X // 1 == X 247 | if not self.isLeaf() and ( self.branches[1].isLeaf() and self.branches[1].value=="1.0" ): 248 | self.redirect(self.branches[0]) 249 | 250 | # X // X == 1 251 | if not self.isLeaf() and ( str(self.branches[1]) == str(self.branches[0]) ): 252 | n = Node() 253 | n.copy(value = "1.0") 254 | self.redirect( n ) 255 | 256 | 257 | if self.branches!=None and len(self.branches)==3: # [max] 258 | pass 259 | 260 | 261 | 262 | 263 | if self.branches != None: 264 | for branch in self.branches: 265 | branch.prun(tr_x) -------------------------------------------------------------------------------- /m3gp/__init__.py: -------------------------------------------------------------------------------- 1 | from .M3GP import M3GP 2 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | # MyPy config file 2 | # File reference here - http://mypy.readthedocs.io/en/latest/config_file.html#config-file 3 | 4 | [mypy] 5 | warn_redundant_casts = True 6 | warn_unused_ignores = True 7 | 8 | # Needed because of bug in MyPy 9 | disallow_subclassing_any = False 10 | #ignore_missing_imports = True 11 | 12 | mypy_path = stubs 13 | 14 | [mypy-*] 15 | disallow_untyped_calls = True 16 | disallow_untyped_defs = True 17 | check_untyped_defs = True 18 | warn_return_any = True 19 | no_implicit_optional = True 20 | strict_optional = True 21 | #ignore_missing_imports = True 22 | 23 | [mypy-z3] 24 | ignore_missing_imports = True 25 | 26 | [mypy-numpy] 27 | ignore_missing_imports = True 28 | 29 | [mypy-lark] 30 | ignore_missing_imports = True 31 | 32 | [mypy-pandas] 33 | ignore_missing_imports = True 34 | 35 | [mypy-pylab] 36 | ignore_missing_imports = True 37 | 38 | [mypy-palettable.colorbrewer.qualitative] 39 | ignore_missing_imports = True -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "M3GP" 3 | version = "1.2.0" 4 | description = "Python implementation of M3GP" 5 | authors = [ 6 | { name = "João Batista", email = "joao.batista@riken.jp" } 7 | ] 8 | readme = "README.md" 9 | requires-python = "~=3.8" 10 | 11 | [tool.pypi-publish] 12 | repository = "pypi" 13 | 14 | [project.urls] 15 | homepage = "https://github.com/jespb/Python-M3GP" 16 | repository = "https://github.com/jespb/Python-M3GP" 17 | documentation = "https://github.com/jespb/Python-M3GP" 18 | 19 | [project.license] 20 | text = "https://opensource.org/licenses/MIT" 21 | 22 | [build-system] 23 | build-backend = 'setuptools.build_meta' 24 | requires = [ 25 | 'setuptools >= 43.0.0', 26 | ] 27 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | pandas 2 | sklearn 3 | numpy -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = M3GP 3 | version = 0.0.1 4 | author = João Batista 5 | author_email = jebatista@ciencias.ulisboa.pt 6 | description = Python implementation of M3GP. 7 | long_description = file: README.md 8 | long_description_content_type = text/markdown 9 | url = https://github.com/jespb/Python-M3GP 10 | project_urls = 11 | Bug Tracker = https://github.com/jespb/Python-M3GP/issues 12 | classifiers = 13 | Programming Language :: Python :: 3 14 | License :: OSI Approved :: MIT License 15 | Operating System :: OS Independent 16 | 17 | [options] 18 | packages = find: 19 | python_requires = >=3.8 20 | 21 | [options.packages.find] 22 | where = . -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import setuptools 2 | setuptools.setup() -------------------------------------------------------------------------------- /setup_dev.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo $"#/bin/sh\nblack .\n" > .git/hooks/pre-commit 3 | chmod +x .git/hooks/pre-commit --------------------------------------------------------------------------------