├── rce ├── __init__.py ├── utils.py └── generate_ce.py ├── slides ├── Figures │ ├── git.gif │ ├── RCE_iterative.jpg │ └── RCE_iterative_vs_reform.jpg └── Finding_Regions_of_Counterfactual_Explanations_via_Robust_Optimization.pdf ├── pyproject.toml ├── experiments ├── results_banknote │ ├── linear │ │ └── model.csv │ ├── banknote_results_linf_01.txt │ ├── banknote_results_linf_05.txt │ ├── banknote_results_l2_01.txt │ ├── banknote_results_l2_05.txt │ ├── cart │ │ └── model.csv │ └── mlp │ │ └── model.csv ├── results_diabetes │ ├── linear │ │ └── model.csv │ ├── diabetes_results_linf_01.txt │ ├── diabetes_results_linf_05.txt │ ├── diabetes_results_l2_01.txt │ ├── diabetes_results_l2.txt │ └── mlp │ │ └── model.csv ├── results_ionosphere │ ├── linear │ │ └── model.csv │ ├── ionosphere_results_linf_01.txt │ ├── ionosphere_results_linf_05.txt │ ├── ionosphere_results_l2_05.txt │ ├── ionosphere_results_l2_01.txt │ ├── mlp │ │ └── model.csv │ └── cart │ │ └── model.csv ├── Datasets.py └── experiments.ipynb ├── setup.cfg ├── LICENSE ├── README.md └── data └── diabetes.csv /rce/__init__.py: -------------------------------------------------------------------------------- 1 | from .utils import * 2 | from .generate_ce import * -------------------------------------------------------------------------------- /slides/Figures/git.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/donato-maragno/robust-CE/HEAD/slides/Figures/git.gif -------------------------------------------------------------------------------- /slides/Figures/RCE_iterative.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/donato-maragno/robust-CE/HEAD/slides/Figures/RCE_iterative.jpg -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = [ 3 | "setuptools>=42", 4 | "wheel" 5 | ] 6 | build-backend = "setuptools.build_meta" -------------------------------------------------------------------------------- /slides/Figures/RCE_iterative_vs_reform.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/donato-maragno/robust-CE/HEAD/slides/Figures/RCE_iterative_vs_reform.jpg -------------------------------------------------------------------------------- /experiments/results_banknote/linear/model.csv: -------------------------------------------------------------------------------- 1 | X_0,X_1,X_2,X_3,intercept 2 | -10.932824123816841,-8.218128539993497,-7.79453851734345,0.3592203347660593,12.248011797758247 3 | -------------------------------------------------------------------------------- /slides/Finding_Regions_of_Counterfactual_Explanations_via_Robust_Optimization.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/donato-maragno/robust-CE/HEAD/slides/Finding_Regions_of_Counterfactual_Explanations_via_Robust_Optimization.pdf -------------------------------------------------------------------------------- /experiments/results_diabetes/linear/model.csv: -------------------------------------------------------------------------------- 1 | X_0,X_1,X_2,X_3,X_4,X_5,X_6,X_7,intercept 2 | 1.550859052766202,4.846531804791059,-0.7584636974888731,0.18402503411059276,-0.10588792438909057,3.2866616374068935,1.506082852525294,0.9887786209196648,-5.683740342506136 3 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = rce 3 | version = 0.0.1 4 | description = Robust Counterfactual Explanations 5 | long_description = file: README.md 6 | long_description_content_type = text/markdown 7 | url = https://github.com/donato-maragno/robust-CE 8 | classifiers = 9 | Programming Language :: Python :: 3 10 | License :: OSI Approved :: MIT License 11 | 12 | [options] 13 | packages = find: 14 | python_requires = >=3.7 15 | include_package_data = True 16 | install_requires = 17 | pandas 18 | scikit-learn 19 | pyomo 20 | matplotlib 21 | openpyxl -------------------------------------------------------------------------------- /experiments/results_ionosphere/linear/model.csv: -------------------------------------------------------------------------------- 1 | X_0,X_1,X_2,X_3,X_4,X_5,X_6,X_7,X_8,X_9,X_10,X_11,X_12,X_13,X_14,X_15,X_16,X_17,X_18,X_19,X_20,X_21,X_22,X_23,X_24,X_25,X_26,X_27,X_28,X_29,X_30,X_31,X_32,X_33,intercept 2 | 2.9515930793406997,0.0,2.068352516330561,0.7753727286238673,2.170245674751096,1.2213189837703882,1.4548100821556607,1.7328652976587693,1.0059586314011173,0.7586964577910538,-0.31679710887663964,0.18335011182317082,-0.18985094058293428,0.6150956694837133,0.4271612912075341,0.025178726618434087,-0.12682622758788187,0.7329663182939211,-0.420398349880174,0.1314538266389412,0.4325186181909189,-1.9617296488975475,0.8163210576384985,0.2391513994375461,0.8210612375727947,0.37102125508416867,-1.810475348612432,-0.14475762245976947,0.528681948704784,0.8108180002829258,0.8810559834078248,-0.2777312017112253,-0.2215068985248781,-1.2118430690224922,-9.998664117218455 3 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Donato Maragno 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /experiments/results_banknote/banknote_results_linf_01.txt: -------------------------------------------------------------------------------- 1 | linear: 0 0.25 (0.01) 0.00 (0.00) 0 nan (nan) nan (nan) 2 | cart: 3 1.53 (0.04) 1.00 (0.00) 0 nan (nan) nan (nan) 3 | cart: 5 1.86 (0.09) 1.10 (0.07) 0 nan (nan) nan (nan) 4 | cart: 10 3.90 (0.88) 2.00 (0.58) 0 nan (nan) nan (nan) 5 | rf: 5 3.50 (0.36) 1.75 (0.22) 0 nan (nan) nan (nan) 6 | rf: 10 6.74 (1.03) 2.60 (0.40) 0 nan (nan) nan (nan) 7 | rf: 20 21.21 (4.61) 4.55 (0.99) 0 nan (nan) nan (nan) 8 | rf: 50 115.79 (34.35) 7.80 (1.65) 0 nan (nan) nan (nan) 9 | rf: 100 214.38 (65.07) 6.44 (1.31) 2 0.009 (0.000) 21.50 (1.50) 10 | gbm: 5 2.70 (0.22) 1.20 (0.14) 0 nan (nan) nan (nan) 11 | gbm: 10 3.20 (0.30) 1.45 (0.15) 0 nan (nan) nan (nan) 12 | gbm: 20 5.94 (0.50) 2.60 (0.23) 0 nan (nan) nan (nan) 13 | gbm: 50 18.38 (1.62) 4.05 (0.35) 0 nan (nan) nan (nan) 14 | gbm: 100 87.11 (26.24) 7.28 (0.77) 2 0.006 (0.003) 37.50 (8.50) 15 | mlp: (10,) 1.63 (0.04) 1.00 (0.00) 0 nan (nan) nan (nan) 16 | mlp: (10, 10, 10) 2.98 (0.15) 1.15 (0.08) 0 nan (nan) nan (nan) 17 | mlp: (50,) 2.60 (0.14) 1.00 (0.00) 0 nan (nan) nan (nan) 18 | mlp: (100,) 3.53 (0.15) 1.00 (0.00) 0 nan (nan) nan (nan) 19 | -------------------------------------------------------------------------------- /experiments/results_diabetes/diabetes_results_linf_01.txt: -------------------------------------------------------------------------------- 1 | linear: 0 0.23 (0.01) 0.00 (0.00) 0 nan (nan) nan (nan) 2 | cart: 3 1.66 (0.07) 1.20 (0.09) 0 nan (nan) nan (nan) 3 | cart: 5 2.29 (0.11) 1.20 (0.09) 0 nan (nan) nan (nan) 4 | cart: 10 5.77 (0.48) 1.40 (0.13) 0 nan (nan) nan (nan) 5 | rf: 5 5.89 (1.98) 2.60 (0.83) 0 nan (nan) nan (nan) 6 | rf: 10 7.20 (0.94) 2.35 (0.29) 0 nan (nan) nan (nan) 7 | rf: 20 33.55 (7.48) 6.15 (0.79) 0 nan (nan) nan (nan) 8 | rf: 50 110.24 (32.43) 6.47 (1.39) 3 0.007 (0.001) 13.67 (0.88) 9 | rf: 100 274.09 (71.93) 8.87 (1.49) 5 0.004 (0.002) 10.60 (3.56) 10 | gbm: 5 2.76 (0.22) 1.85 (0.17) 0 nan (nan) nan (nan) 11 | gbm: 10 2.72 (0.29) 1.50 (0.24) 0 nan (nan) nan (nan) 12 | gbm: 20 4.25 (0.45) 2.15 (0.28) 0 nan (nan) nan (nan) 13 | gbm: 50 24.60 (8.21) 5.85 (1.41) 0 nan (nan) nan (nan) 14 | gbm: 100 164.32 (42.12) 11.63 (2.00) 1 0.004 (nan) 29.00 (nan) 15 | mlp: (10,) 1.55 (0.06) 1.00 (0.00) 0 nan (nan) nan (nan) 16 | mlp: (10, 10, 10) 2.40 (0.12) 1.15 (0.08) 0 nan (nan) nan (nan) 17 | mlp: (50,) 2.09 (0.12) 1.05 (0.05) 0 nan (nan) nan (nan) 18 | mlp: (100,) 4.36 (0.62) 1.10 (0.07) 0 nan (nan) nan (nan) 19 | -------------------------------------------------------------------------------- /experiments/results_ionosphere/ionosphere_results_linf_01.txt: -------------------------------------------------------------------------------- 1 | linear: 0 0.25 (0.01) 0.00 (0.00) 0 nan (nan) nan (nan) 2 | cart: 3 1.66 (0.07) 1.10 (0.07) 0 nan (nan) nan (nan) 3 | cart: 5 1.96 (0.08) 1.10 (0.07) 0 nan (nan) nan (nan) 4 | cart: 10 2.86 (0.16) 1.20 (0.09) 0 nan (nan) nan (nan) 5 | rf: 5 3.79 (0.24) 1.70 (0.13) 0 nan (nan) nan (nan) 6 | rf: 10 8.76 (0.89) 2.85 (0.33) 0 nan (nan) nan (nan) 7 | rf: 20 22.33 (2.83) 4.40 (0.51) 0 nan (nan) nan (nan) 8 | rf: 50 137.26 (33.37) 8.20 (1.20) 0 nan (nan) nan (nan) 9 | rf: 100 285.62 (95.57) 8.27 (2.02) 9 0.004 (0.001) 17.33 (2.49) 10 | gbm: 5 2.37 (0.15) 1.60 (0.13) 0 nan (nan) nan (nan) 11 | gbm: 10 4.35 (0.44) 2.75 (0.30) 0 nan (nan) nan (nan) 12 | gbm: 20 9.01 (1.11) 3.85 (0.50) 0 nan (nan) nan (nan) 13 | gbm: 50 81.33 (28.39) 8.90 (1.36) 0 nan (nan) nan (nan) 14 | gbm: 100 137.98 (22.74) 10.33 (0.97) 2 0.007 (0.000) 18.00 (4.00) 15 | mlp: (10,) 2.22 (0.19) 1.80 (0.21) 0 nan (nan) nan (nan) 16 | mlp: (10, 10, 10) 13.01 (3.57) 2.30 (0.40) 0 nan (nan) nan (nan) 17 | mlp: (50,) 5.75 (0.75) 1.20 (0.12) 0 nan (nan) nan (nan) 18 | mlp: (100,) 61.31 (33.11) 1.50 (0.22) 10 0.000 (0.000) 0.60 (0.16) 19 | -------------------------------------------------------------------------------- /experiments/results_banknote/banknote_results_linf_05.txt: -------------------------------------------------------------------------------- 1 | linear: 0 0.13 (0.00) 0.00 (0.00) 0 nan (nan) nan (nan) 2 | cart: 3 1.00 (0.06) 1.70 (0.15) 0 nan (nan) nan (nan) 3 | cart: 5 1.18 (0.11) 1.80 (0.22) 0 nan (nan) nan (nan) 4 | cart: 10 2.22 (0.41) 2.95 (0.61) 0 nan (nan) nan (nan) 5 | rf: 5 2.85 (0.47) 3.25 (0.54) 0 nan (nan) nan (nan) 6 | rf: 10 13.51 (3.03) 8.95 (1.60) 0 nan (nan) nan (nan) 7 | rf: 20 13.86 (3.58) 5.53 (0.92) 1 0.041 (nan) 3.00 (nan) 8 | rf: 50 101.21 (24.90) 11.37 (1.53) 1 0.048 (nan) 40.00 (nan) 9 | rf: 100 156.28 (33.21) 8.70 (1.02) 0 nan (nan) nan (nan) 10 | gbm: 5 1.57 (0.15) 1.75 (0.24) 0 nan (nan) nan (nan) 11 | gbm: 10 4.84 (0.58) 3.55 (0.46) 0 nan (nan) nan (nan) 12 | gbm: 20 12.72 (2.22) 8.28 (0.85) 2 0.038 (0.001) 10.00 (2.00) 13 | gbm: 50 73.23 (27.00) 13.76 (1.82) 3 0.039 (0.006) 32.67 (17.07) 14 | gbm: 100 274.22 (51.40) 17.25 (1.57) 4 0.040 (0.004) 36.00 (3.56) 15 | mlp: (10,) 0.90 (0.01) 1.00 (0.00) 0 nan (nan) nan (nan) 16 | mlp: (10, 10, 10) 1.48 (0.03) 1.00 (0.00) 0 nan (nan) nan (nan) 17 | mlp: (50,) 1.39 (0.06) 1.00 (0.00) 0 nan (nan) nan (nan) 18 | mlp: (100,) 1.83 (0.05) 1.00 (0.00) 0 nan (nan) nan (nan) 19 | -------------------------------------------------------------------------------- /experiments/results_banknote/banknote_results_l2_01.txt: -------------------------------------------------------------------------------- 1 | linear: 0 0.24 (0.01) 0.00 (0.00) 0 nan (nan) nan (nan) 2 | cart: 3 2.09 (0.11) 1.45 (0.11) 0 nan (nan) nan (nan) 3 | cart: 5 2.64 (0.14) 1.53 (0.12) 1 0.000 (nan) 0.00 (nan) 4 | cart: 10 4.61 (0.88) 2.70 (0.58) 0 nan (nan) nan (nan) 5 | rf: 5 5.20 (0.62) 2.35 (0.34) 0 nan (nan) nan (nan) 6 | rf: 10 10.83 (2.68) 3.41 (0.78) 3 0.006 (0.001) 16.67 (5.93) 7 | rf: 20 22.59 (7.10) 4.15 (1.22) 7 0.004 (0.002) 16.29 (4.47) 8 | rf: 50 61.93 (14.29) 3.89 (1.22) 11 0.004 (0.001) 17.36 (3.10) 9 | rf: 100 103.33 (31.75) 3.20 (0.89) 10 0.004 (0.001) 17.10 (1.27) 10 | gbm: 5 4.50 (0.50) 2.50 (0.34) 0 nan (nan) nan (nan) 11 | gbm: 10 6.87 (0.88) 3.15 (0.42) 0 nan (nan) nan (nan) 12 | gbm: 20 14.02 (1.09) 4.40 (0.34) 0 nan (nan) nan (nan) 13 | gbm: 50 34.03 (3.15) 5.21 (0.44) 1 0.010 (nan) 11.00 (nan) 14 | gbm: 100 133.53 (24.40) 8.50 (0.98) 6 0.007 (0.001) 22.50 (4.67) 15 | mlp: (10,) 1.64 (0.09) 1.00 (0.00) 0 nan (nan) nan (nan) 16 | mlp: (10, 10, 10) 2.54 (0.15) 1.00 (0.00) 0 nan (nan) nan (nan) 17 | mlp: (50,) 2.42 (0.14) 1.00 (0.00) 0 nan (nan) nan (nan) 18 | mlp: (100,) 3.57 (0.14) 1.00 (0.00) 0 nan (nan) nan (nan) 19 | -------------------------------------------------------------------------------- /experiments/results_diabetes/diabetes_results_linf_05.txt: -------------------------------------------------------------------------------- 1 | linear: 0 0.15 (0.01) 0.00 (0.00) 0 nan (nan) nan (nan) 2 | cart: 3 1.09 (0.07) 1.60 (0.15) 0 nan (nan) nan (nan) 3 | cart: 5 1.63 (0.13) 2.05 (0.22) 0 nan (nan) nan (nan) 4 | cart: 10 8.84 (1.49) 4.45 (0.59) 0 nan (nan) nan (nan) 5 | rf: 5 4.29 (0.73) 5.25 (0.86) 0 nan (nan) nan (nan) 6 | rf: 10 14.71 (3.53) 8.35 (1.24) 0 nan (nan) nan (nan) 7 | rf: 20 89.00 (28.17) 14.00 (2.28) 2 0.045 (0.002) 36.00 (0.00) 8 | rf: 50 303.27 (93.95) 16.73 (2.55) 9 0.034 (0.004) 28.56 (4.60) 9 | rf: 100 453.67 (111.27) 15.43 (2.19) 13 0.032 (0.004) 19.23 (2.01) 10 | gbm: 5 1.50 (0.10) 2.05 (0.20) 0 nan (nan) nan (nan) 11 | gbm: 10 8.87 (4.44) 8.55 (3.21) 0 nan (nan) nan (nan) 12 | gbm: 20 41.81 (17.86) 17.05 (4.37) 1 0.025 (nan) 184.00 (nan) 13 | gbm: 50 223.87 (125.98) 19.86 (4.23) 13 0.027 (0.004) 63.38 (7.44) 14 | gbm: 100 nan (nan) nan (nan) 20 0.022 (0.003) 34.60 (3.00) 15 | mlp: (10,) 1.00 (0.04) 1.15 (0.08) 0 nan (nan) nan (nan) 16 | mlp: (10, 10, 10) 2.02 (0.26) 1.65 (0.21) 0 nan (nan) nan (nan) 17 | mlp: (50,) 1.75 (0.13) 1.35 (0.11) 0 nan (nan) nan (nan) 18 | mlp: (100,) 5.88 (1.19) 1.80 (0.16) 0 nan (nan) nan (nan) -------------------------------------------------------------------------------- /experiments/results_diabetes/diabetes_results_l2_01.txt: -------------------------------------------------------------------------------- 1 | linear: 0 0.24 (0.01) 0.00 (0.00) 0 nan (nan) nan (nan) 2 | cart: 3 2.18 (0.13) 1.70 (0.15) 0 nan (nan) nan (nan) 3 | cart: 5 4.21 (0.39) 2.00 (0.23) 0 nan (nan) nan (nan) 4 | cart: 10 14.17 (2.64) 2.63 (0.50) 1 0.000 (nan) 18.00 (nan) 5 | rf: 5 7.96 (1.65) 3.17 (0.55) 2 0.004 (0.004) 29.00 (4.00) 6 | rf: 10 15.20 (3.17) 4.20 (0.68) 0 nan (nan) nan (nan) 7 | rf: 20 104.42 (27.35) 9.81 (1.78) 4 0.007 (0.002) 12.25 (3.75) 8 | rf: 50 137.37 (39.96) 6.50 (1.51) 10 0.004 (0.001) 14.70 (2.51) 9 | rf: 100 177.47 (41.01) 3.80 (0.86) 15 0.002 (0.001) 11.00 (1.59) 10 | gbm: 5 4.67 (0.47) 2.45 (0.26) 0 nan (nan) nan (nan) 11 | gbm: 10 6.11 (1.04) 2.70 (0.48) 0 nan (nan) nan (nan) 12 | gbm: 20 10.97 (1.45) 3.65 (0.49) 0 nan (nan) nan (nan) 13 | gbm: 50 76.04 (33.09) 9.35 (2.21) 0 nan (nan) nan (nan) 14 | gbm: 100 201.13 (77.68) 12.08 (2.92) 8 0.005 (0.001) 31.88 (4.89) 15 | mlp: (10,) 1.73 (0.05) 1.00 (0.00) 0 nan (nan) nan (nan) 16 | mlp: (10, 10, 10) 2.04 (0.10) 1.00 (0.00) 6 0.000 (0.000) 0.00 (0.00) 17 | mlp: (50,) 2.00 (0.08) 1.00 (0.00) 1 0.000 (nan) 0.00 (nan) 18 | mlp: (100,) 5.28 (0.83) 1.15 (0.11) 0 nan (nan) nan (nan) 19 | -------------------------------------------------------------------------------- /experiments/results_banknote/banknote_results_l2_05.txt: -------------------------------------------------------------------------------- 1 | linear: 0 0.15 (0.00) 0.00 (0.00) 0 nan (nan) nan (nan) 2 | cart: 3 1.72 (0.17) 2.25 (0.19) 0 nan (nan) nan (nan) 3 | cart: 5 2.69 (0.52) 3.95 (0.74) 1 0.026 (nan) 1.00 (nan) 4 | cart: 10 4.44 (0.80) 4.95 (0.93) 0 nan (nan) nan (nan) 5 | rf: 5 4.49 (0.64) 4.40 (0.64) 0 nan (nan) nan (nan) 6 | rf: 10 12.39 (2.57) 6.82 (1.14) 3 0.048 (0.000) 13.33 (1.20) 7 | rf: 20 51.29 (18.03) 10.17 (2.32) 8 0.049 (0.000) 7.38 (1.93) 8 | rf: 50 93.18 (51.21) 7.78 (2.17) 11 0.048 (0.001) 16.36 (3.55) 9 | rf: 100 108.47 (29.76) 5.25 (0.80) 8 0.047 (0.001) 14.00 (2.96) 10 | gbm: 5 2.67 (0.33) 3.55 (0.47) 0 nan (nan) nan (nan) 11 | gbm: 10 5.77 (0.45) 5.55 (0.44) 0 nan (nan) nan (nan) 12 | gbm: 20 23.89 (3.55) 10.41 (1.00) 3 0.046 (0.001) 61.67 (18.67) 13 | gbm: 50 123.75 (51.10) 18.14 (4.47) 6 0.044 (0.004) 41.00 (6.84) 14 | gbm: 100 389.06 (124.83) 20.25 (3.32) 12 0.044 (0.001) 26.75 (2.70) 15 | mlp: (10,) 0.91 (0.00) 1.00 (0.00) 0 nan (nan) nan (nan) 16 | mlp: (10, 10, 10) 1.45 (0.03) 1.00 (0.00) 0 nan (nan) nan (nan) 17 | mlp: (50,) 1.36 (0.03) 1.00 (0.00) 0 nan (nan) nan (nan) 18 | mlp: (100,) 2.26 (0.04) 1.00 (0.00) 0 nan (nan) nan (nan) 19 | -------------------------------------------------------------------------------- /experiments/results_ionosphere/ionosphere_results_linf_05.txt: -------------------------------------------------------------------------------- 1 | linear: 0 0.14 (0.00) 0.00 (0.00) 0 nan (nan) nan (nan) 2 | cart: 3 1.02 (0.07) 1.30 (0.15) 0 nan (nan) nan (nan) 3 | cart: 5 1.33 (0.10) 1.60 (0.18) 0 nan (nan) nan (nan) 4 | cart: 10 3.68 (2.14) 2.95 (1.49) 0 nan (nan) nan (nan) 5 | rf: 5 2.27 (0.19) 2.35 (0.23) 0 nan (nan) nan (nan) 6 | rf: 10 7.41 (1.22) 5.15 (0.67) 0 nan (nan) nan (nan) 7 | rf: 20 47.44 (26.35) 9.50 (2.52) 0 nan (nan) nan (nan) 8 | rf: 50 307.72 (72.52) 19.31 (3.15) 7 0.044 (0.002) 34.57 (4.54) 9 | rf: 100 156.45 (116.11) 5.75 (2.29) 16 0.029 (0.004) 20.69 (1.64) 10 | gbm: 5 1.97 (0.32) 2.65 (0.50) 0 nan (nan) nan (nan) 11 | gbm: 10 11.76 (6.51) 8.85 (3.27) 0 nan (nan) nan (nan) 12 | gbm: 20 19.20 (6.32) 9.45 (1.80) 0 nan (nan) nan (nan) 13 | gbm: 50 139.18 (56.80) 16.31 (3.03) 7 0.023 (0.005) 41.43 (9.01) 14 | gbm: 100 537.13 (295.04) 15.00 (2.08) 17 0.022 (0.003) 22.29 (1.79) 15 | mlp: (10,) 2.96 (0.23) 3.00 (0.25) 0 nan (nan) nan (nan) 16 | mlp: (50,) 19.06 (6.63) 2.37 (0.24) 1 0.049 (nan) 3.00 (nan) 17 | mlp: (10, 10, 10) 229.69 (104.80) 4.90 (0.67) 10 0.039 (0.005) 10.30 (2.68) 18 | mlp: (100,) 289.62 (125.61) 2.70 (0.30) 10 0.000 (0.000) 0.60 (0.16) 19 | -------------------------------------------------------------------------------- /experiments/results_diabetes/diabetes_results_l2.txt: -------------------------------------------------------------------------------- 1 | linear: 0 0.34 (0.02) 0.00 (0.00) 0 nan (nan) nan (nan) 2 | cart: 3 6.71 (0.65) 2.40 (0.24) 0 nan (nan) nan (nan) 3 | cart: 5 20.28 (2.88) 5.00 (0.68) 1 0.000 (nan) 0.00 (nan) 4 | cart: 10 178.35 (34.48) 9.11 (1.30) 1 0.045 (nan) 21.00 (nan) 5 | rf: 5 117.95 (28.96) 9.63 (1.82) 1 0.049 (nan) 34.00 (nan) 6 | rf: 10 200.39 (69.46) 12.92 (2.93) 7 0.043 (0.003) 29.29 (5.04) 7 | rf: 20 149.39 (46.57) 12.75 (2.78) 12 0.042 (0.002) 33.42 (4.34) 8 | rf: 50 560.78 (nan) 6.00 (nan) 19 0.031 (0.003) 12.21 (0.99) 9 | rf: 100 868.10 (nan) 4.00 (nan) 19 0.024 (0.004) 7.89 (0.72) 10 | gbm: 5 8.89 (1.29) 3.68 (0.53) 1 0.000 (nan) 0.00 (nan) 11 | gbm: 10 46.46 (14.33) 12.00 (3.12) 0 nan (nan) nan (nan) 12 | gbm: 20 153.43 (43.11) 18.76 (3.73) 3 0.036 (0.009) 75.33 (8.41) 13 | gbm: 50 243.24 (80.72) 24.50 (6.31) 14 0.029 (0.003) 53.07 (5.33) 14 | gbm: 100 nan (nan) nan (nan) 20 0.020 (0.003) 23.90 (2.03) 15 | mlp: (10,) 4.13 (0.13) 1.00 (0.00) 0 nan (nan) nan (nan) 16 | mlp: (10, 10, 10) 8.02 (0.86) 1.31 (0.18) 4 0.024 (0.014) 0.75 (0.48) 17 | mlp: (50,) 11.27 (1.10) 1.50 (0.15) 0 nan (nan) nan (nan) 18 | mlp: (100,) 26.82 (3.39) 1.30 (0.13) 0 nan (nan) nan (nan) 19 | -------------------------------------------------------------------------------- /experiments/results_ionosphere/ionosphere_results_l2_05.txt: -------------------------------------------------------------------------------- 1 | linear: 0 0.26 (0.01) 0.00 (0.00) 0 nan (nan) nan (nan) 2 | cart: 3 4.40 (1.35) 2.60 (0.90) 0 nan (nan) nan (nan) 3 | cart: 5 5.91 (1.09) 2.26 (0.40) 1 0.000 (nan) 4.00 (nan) 4 | cart: 10 20.91 (10.20) 5.05 (1.51) 0 nan (nan) nan (nan) 5 | rf: 5 28.79 (2.76) 6.35 (0.51) 0 nan (nan) nan (nan) 6 | rf: 10 232.63 (36.46) 10.79 (1.36) 1 0.042 (nan) 40.00 (nan) 7 | rf: 20 450.46 (68.06) 8.50 (1.06) 10 0.027 (0.005) 16.00 (0.73) 8 | rf: 50 510.69 (341.34) 7.50 (3.50) 18 0.021 (0.004) 13.94 (1.04) 9 | rf: 100 495.03 (268.88) 9.00 (2.08) 17 0.014 (0.004) 16.53 (0.93) 10 | gbm: 5 11.38 (1.83) 5.74 (0.68) 1 0.029 (nan) 4.00 (nan) 11 | gbm: 10 72.15 (15.31) 15.19 (2.62) 4 0.005 (0.005) 23.00 (3.24) 12 | gbm: 20 156.17 (19.16) 16.42 (1.71) 8 0.005 (0.003) 31.62 (5.35) 13 | gbm: 50 894.23 (100.35) 32.50 (6.50) 18 0.013 (0.004) 18.89 (1.17) 14 | gbm: 100 nan (nan) nan (nan) 20 0.010 (0.002) 17.25 (1.46) 15 | mlp: (10,) 36.34 (5.22) 1.68 (0.19) 1 0.000 (nan) 12.00 (nan) 16 | mlp: (10, 10, 10) 328.70 (72.59) 2.38 (0.35) 4 0.012 (0.007) 1.50 (0.65) 17 | mlp: (50,) 57.07 (7.28) 1.22 (0.10) 2 0.000 (0.000) 17.50 (12.50) 18 | mlp: (100,) 111.85 (26.18) 1.44 (0.24) 11 0.001 (0.001) 1.09 (0.41) 19 | -------------------------------------------------------------------------------- /experiments/results_ionosphere/ionosphere_results_l2_01.txt: -------------------------------------------------------------------------------- 1 | linear: 0 0.26 (0.01) 0.00 (0.00) 0 nan (nan) nan (nan) 2 | cart: 3 5.54 (0.38) 1.20 (0.09) 0 nan (nan) nan (nan) 3 | cart: 5 10.17 (0.78) 1.56 (0.16) 4 0.000 (0.000) 0.25 (0.25) 4 | cart: 10 21.75 (2.58) 2.06 (0.26) 2 0.000 (0.000) 2.00 (1.00) 5 | rf: 5 69.94 (10.77) 4.41 (0.54) 3 0.006 (0.003) 4.67 (0.33) 6 | rf: 10 237.03 (41.12) 5.50 (0.70) 4 0.003 (0.003) 10.50 (1.85) 7 | rf: 20 370.21 (41.84) 7.33 (0.73) 5 0.004 (0.002) 10.80 (1.98) 8 | rf: 50 570.88 (111.94) 9.70 (1.61) 10 0.001 (0.001) 11.60 (1.10) 9 | rf: 100 531.13 (121.00) 6.17 (0.98) 14 0.001 (0.001) 12.79 (0.43) 10 | gbm: 5 11.47 (1.48) 4.42 (0.66) 8 0.002 (0.001) 4.62 (1.99) 11 | gbm: 10 42.68 (6.67) 6.88 (0.86) 3 0.001 (0.001) 1.67 (0.67) 12 | gbm: 20 58.16 (7.01) 9.62 (1.00) 4 0.004 (0.002) 24.75 (9.71) 13 | gbm: 50 192.70 (42.52) 14.77 (2.47) 7 0.006 (0.001) 31.14 (4.21) 14 | gbm: 100 415.96 (80.09) 17.29 (3.36) 13 0.003 (0.001) 25.08 (1.20) 15 | mlp: (10,) 4.73 (0.57) 1.27 (0.19) 9 0.004 (0.001) 1.00 (0.41) 16 | mlp: (10, 10, 10) 282.24 (193.65) 1.50 (0.50) 18 0.003 (0.001) 0.61 (0.20) 17 | mlp: (50,) 48.13 (12.76) 2.40 (0.51) 15 0.008 (0.001) 1.07 (0.15) 18 | mlp: (100,) 352.64 (153.96) 1.09 (0.09) 9 0.000 (0.000) 0.44 (0.18) 19 | -------------------------------------------------------------------------------- /experiments/Datasets.py: -------------------------------------------------------------------------------- 1 | # # -*- coding: utf-8 -*- 2 | 3 | import pandas as pd 4 | 5 | ### CLASSIFICATION 6 | 7 | def banknote(wd): # Two classes 8 | """ 9 | Attribute Information: 10 | 1. variance of Wavelet Transformed image (continuous) 11 | 2. skewness of Wavelet Transformed image (continuous) 12 | 3. curtosis of Wavelet Transformed image (continuous) 13 | 4. entropy of image (continuous) 14 | 5. class (integer) 15 | """ 16 | df = pd.read_csv(wd+'data_banknote_authentication.csv', header = None) 17 | df.columns = ['X_' + str(i) for i in range(len(df.columns)-1)] + ['Outcome'] 18 | return df 19 | 20 | 21 | def ionosphere(wd): # Two classes 22 | """ 23 | Attribute Information: 24 | 25 | -- All 34 are continuous 26 | -- The 35th attribute is either "good" or "bad" 27 | according to the definition summarized above. 28 | This is a binary classification task. 29 | """ 30 | df = pd.read_csv(wd+'ionosphere.csv', header = None) 31 | df.iloc[:,-1] = (df.iloc[:,-1] == 'g')*1 32 | df.columns = ['X_' + str(i) for i in range(len(df.columns)-1)] + ['Outcome'] 33 | return df 34 | 35 | def wdbc(wd): # Two classes 36 | """ 37 | 1) ID number 38 | 2) Diagnosis (M = malignant, B = benign) 39 | 3-32) 40 | Ten real-valued features are computed for each cell nucleus: 41 | a) radius (mean of distances from center to points on the perimeter) 42 | b) texture (standard deviation of gray-scale values) 43 | c) perimeter 44 | d) area 45 | e) smoothness (local variation in radius lengths) 46 | f) compactness (perimeter^2 / area - 1.0) 47 | g) concavity (severity of concave portions of the contour) 48 | h) concave points (number of concave portions of the contour) 49 | i) symmetry 50 | j) fractal dimension ("coastline approximation" - 1) 51 | """ 52 | df = pd.read_csv(wd+'wdbc.csv', header = None, index_col = 0) 53 | df.columns = ['y'] + ['X_' + str(i) for i in range(len(df.columns)-1)] 54 | y = (df['y'] == 'M')*1 55 | df.drop('y', axis=1, inplace = True) 56 | df['Outcome'] = y 57 | return df 58 | 59 | 60 | def diabetes(wd): # Two classes - Imbalanced 61 | """ 62 | Attribute Information: 63 | 1. Number of times pregnant 64 | 2. Plasma glucose concentration a 2 hours in an oral glucose tolerance test 65 | 3. Diastolic blood pressure (mm Hg) 66 | 4. Triceps skin fold thickness (mm) 67 | 5. 2-Hour serum insulin (mu U/ml) 68 | 6. Body mass index (weight in kg/(height in m)^2) 69 | 7. Diabetes pedigree function 70 | 8. Age (years) 71 | 9. Class variable (0 or 1) 72 | """ 73 | import pandas as pd 74 | df = pd.read_csv(wd+'diabetes.csv') 75 | df.columns = ['X_' + str(i) for i in range(len(df.columns)-1)] + ['Outcome'] 76 | return df 77 | 78 | 79 | def phoneme(wd): # Two classes - Imbalanced 80 | """ 81 | Attribute Information: 82 | Five different attributes were chosen to 83 | characterize each vowel: they are the amplitudes of the five first 84 | harmonics AHi, normalised by the total energy Ene (integrated on all the 85 | frequencies): AHi/Ene. Each harmonic is signed: positive when it 86 | corresponds to a local maximum of the spectrum and negative otherwise. 87 | 6. Class (0 and 1) 88 | """ 89 | import pandas as pd 90 | df = pd.read_csv(wd+'phoneme.csv', header = None) 91 | df.columns = ['X_' + str(i) for i in range(len(df.columns)-1)] + ['Outcome'] 92 | return df 93 | 94 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Robust Counterfactual Explanations 2 | RCE is a Python package for calculating robust counterfactual explanations (CE) for data-driven classification models. CEs play a crucial role in detecting biases and improving the explainability of these models, but most known methods can only provide a single CE which may not be practical for real-world use. Our package uses algorithmic ideas from robust optimization to provide a whole region of optimal CEs, so that the user can select the most reasonable one. Our method is proven to converge for popular ML methods such as logistic regression, decision trees, random forests, and neural networks. 3 | 4 | The full methodology is detailed in [our manuscript](https://arxiv.org/pdf/2301.11113v1.pdf) __Finding Regions of Counterfactual Explanations via Robust Optimization__. See the [slides](https://github.com/donato-maragno/robust-CE/blob/main/slides/Finding_Regions_of_Counterfactual_Explanations_via_Robust_Optimization.pdf) for a more visual explanation of our approach. 5 | 6 | ![](https://github.com/donato-maragno/robust-CE/blob/main/slides/Figures/git.gif) 7 | ## How to install RCE 8 | You can install the RCE package locally by cloning the repository and running ```pip install .``` within the home directory of the repo. This will allow you to load `rce` in Python; see the example notebooks for specific usage of the functions. 9 | 10 | ## How to use RCE 11 | RCE can generate robust counterfactual for logistic regression, decision trees, random forests, gradient boosting machines, and nerual networks with ReLU activation functions. The predictive models must be trained using the ```sklearn``` library. 12 | 13 | ```python 14 | import rce 15 | # train the classifier 16 | clf_type = 'cart' # supported clf types: ['cart', 'linear', 'rf', 'gbm', 'mlp'] 17 | clf = DecisionTreeClassifier(max_depth=5).fit(X_train, y_train) 18 | # define the factual instance 19 | u = pd.DataFrame([X_test.iloc[0, :]]) 20 | # use rce to generate robust counterfactual explanations. rce_sol is the robust counterfactual explanation. 21 | ''' 22 | save_path: (str) path where the clf tables will be saved 23 | task: (str) taks of the ML model; binary or continuous (only binary is supported at the moment) 24 | u: (DataFrame) factual instance 25 | F: (list) feature 26 | F_b: (list) binary features 27 | F_int: (list) integer features 28 | F_coh: (dict) categorical features (one hot encoded) 29 | I: (list) immutable features 30 | L: (list) 'larger than' features 31 | P: (list) positive features 32 | rho: (float) dimension the uncertainty set 33 | unc_type: (str) shape of the robust CE; supported: 'l2' (ball) or 'linf' (box) 34 | iterative: (bool) if true the Robust CE can overlap more leaves otherwise it will be contained fully in one leaf. It must be true for 'mlp' 35 | ''' 36 | final_model, num_iterations, comp_time, rce_sol, solutions_master = rce.generate(clf, X_train, y_train, save_path, clf_type, task, u, F, F_b, F_int, F_coh, I, L, P, rho,unc_type=unc_type, iterative=True) 37 | ``` 38 | 39 | ## Citation 40 | Our software can be cited as: 41 | ```` 42 | @misc{Maragno.2023, 43 | doi = {10.48550/ARXIV.2301.11113}, 44 | 45 | url = {https://arxiv.org/abs/2301.11113}, 46 | 47 | author = {Maragno, Donato and Kurtz, Jannis and Röber, Tabea E. and Goedhart, Rob and Birbil, Ş. Ilker and den Hertog, Dick} 48 | 49 | title = {Finding Regions of Counterfactual Explanations via Robust Optimization}, 50 | 51 | publisher = {arXiv}, 52 | 53 | year = {2023} 54 | } 55 | ```` 56 | 57 | ## Get in touch! 58 | Our package is under active development. We welcome any questions or suggestions. Please submit an issue on Github, or reach us at d.maragno@uva.nl. 59 | -------------------------------------------------------------------------------- /experiments/results_banknote/cart/model.csv: -------------------------------------------------------------------------------- 1 | ID,node_ID,X_0,X_1,X_2,X_3,threshold,prediction 2 | 1,0,1,0.0,0.0,0.0,0.5309236347675323,1.0 3 | 1,1,0.0,1.0,0.0,0.0,0.7984523773193359,1.0 4 | 1,2,1,0.0,0.0,0.0,0.47876597940921783,1.0 5 | 1,3,0.0,0.0,1.0,0.0,0.495606005191803,1.0 6 | 1,4,0.0,1.0,0.0,0.0,0.7882688939571381,1.0 7 | 2,0,1,0.0,0.0,0.0,0.5309236347675323,0.0 8 | 2,1,0.0,1.0,0.0,0.0,0.7984523773193359,0.0 9 | 2,2,1,0.0,0.0,0.0,0.47876597940921783,0.0 10 | 2,3,0.0,0.0,1.0,0.0,0.495606005191803,0.0 11 | 2,4,0.0,-1.0,0.0,0.0,-0.788278893957138,0.0 12 | 2,6,0.0,1.0,0.0,0.0,0.7903605997562408,0.0 13 | 3,0,1,0.0,0.0,0.0,0.5309236347675323,1.0 14 | 3,1,0.0,1.0,0.0,0.0,0.7984523773193359,1.0 15 | 3,2,1,0.0,0.0,0.0,0.47876597940921783,1.0 16 | 3,3,0.0,0.0,1.0,0.0,0.495606005191803,1.0 17 | 3,4,0.0,-1.0,0.0,0.0,-0.788278893957138,1.0 18 | 3,6,0.0,-1.0,0.0,0.0,-0.7903705997562408,1.0 19 | 4,0,1,0.0,0.0,0.0,0.5309236347675323,1.0 20 | 4,1,0.0,1.0,0.0,0.0,0.7984523773193359,1.0 21 | 4,2,1,0.0,0.0,0.0,0.47876597940921783,1.0 22 | 4,3,0.0,0.0,-1.0,0.0,-0.495616005191803,1.0 23 | 4,9,0.0,1.0,0.0,0.0,0.3404565900564194,1.0 24 | 5,0,1,0.0,0.0,0.0,0.5309236347675323,1.0 25 | 5,1,0.0,1.0,0.0,0.0,0.7984523773193359,1.0 26 | 5,2,1,0.0,0.0,0.0,0.47876597940921783,1.0 27 | 5,3,0.0,0.0,-1.0,0.0,-0.495616005191803,1.0 28 | 5,9,0.0,-1.0,0.0,0.0,-0.3404665900564194,1.0 29 | 5,11,0.0,1.0,0.0,0.0,0.4045283943414688,1.0 30 | 5,12,0.0,0.0,1.0,0.0,0.5465052723884583,1.0 31 | 6,0,1,0.0,0.0,0.0,0.5309236347675323,0.0 32 | 6,1,0.0,1.0,0.0,0.0,0.7984523773193359,0.0 33 | 6,2,1,0.0,0.0,0.0,0.47876597940921783,0.0 34 | 6,3,0.0,0.0,-1.0,0.0,-0.495616005191803,0.0 35 | 6,9,0.0,-1.0,0.0,0.0,-0.3404665900564194,0.0 36 | 6,11,0.0,1.0,0.0,0.0,0.4045283943414688,0.0 37 | 6,12,0.0,0.0,-1.0,0.0,-0.5465152723884582,0.0 38 | 7,0,1,0.0,0.0,0.0,0.5309236347675323,0.0 39 | 7,1,0.0,1.0,0.0,0.0,0.7984523773193359,0.0 40 | 7,2,1,0.0,0.0,0.0,0.47876597940921783,0.0 41 | 7,3,0.0,0.0,-1.0,0.0,-0.495616005191803,0.0 42 | 7,9,0.0,-1.0,0.0,0.0,-0.3404665900564194,0.0 43 | 7,11,0.0,-1.0,0.0,0.0,-0.4045383943414688,0.0 44 | 8,0,1,0.0,0.0,0.0,0.5309236347675323,1.0 45 | 8,1,0.0,1.0,0.0,0.0,0.7984523773193359,1.0 46 | 8,2,-1,0.0,0.0,0.0,-0.47877597940921784,1.0 47 | 8,16,0.0,1.0,0.0,0.0,0.7194337248802185,1.0 48 | 8,17,0.0,0.0,1.0,0.0,0.3407822996377945,1.0 49 | 9,0,1,0.0,0.0,0.0,0.5309236347675323,0.0 50 | 9,1,0.0,1.0,0.0,0.0,0.7984523773193359,0.0 51 | 9,2,-1,0.0,0.0,0.0,-0.47877597940921784,0.0 52 | 9,16,0.0,1.0,0.0,0.0,0.7194337248802185,0.0 53 | 9,17,0.0,0.0,-1.0,0.0,-0.3407922996377945,0.0 54 | 9,19,0.0,0.0,0.0,1.0,0.8889522552490234,0.0 55 | 10,0,1,0.0,0.0,0.0,0.5309236347675323,1.0 56 | 10,1,0.0,1.0,0.0,0.0,0.7984523773193359,1.0 57 | 10,2,-1,0.0,0.0,0.0,-0.47877597940921784,1.0 58 | 10,16,0.0,1.0,0.0,0.0,0.7194337248802185,1.0 59 | 10,17,0.0,0.0,-1.0,0.0,-0.3407922996377945,1.0 60 | 10,19,0.0,0.0,0.0,-1.0,-0.8889622552490234,1.0 61 | 11,0,1,0.0,0.0,0.0,0.5309236347675323,0.0 62 | 11,1,0.0,1.0,0.0,0.0,0.7984523773193359,0.0 63 | 11,2,-1,0.0,0.0,0.0,-0.47877597940921784,0.0 64 | 11,16,0.0,-1.0,0.0,0.0,-0.7194437248802185,0.0 65 | 12,0,1,0.0,0.0,0.0,0.5309236347675323,1.0 66 | 12,1,0.0,-1.0,0.0,0.0,-0.7984623773193359,1.0 67 | 12,23,1,0.0,0.0,0.0,0.16702362895011902,1.0 68 | 13,0,1,0.0,0.0,0.0,0.5309236347675323,0.0 69 | 13,1,0.0,-1.0,0.0,0.0,-0.7984623773193359,0.0 70 | 13,23,-1,0.0,0.0,0.0,-0.16703362895011903,0.0 71 | 14,0,-1,0.0,0.0,0.0,-0.5309336347675323,1.0 72 | 14,26,0.0,0.0,1.0,0.0,0.0387726966291666,1.0 73 | 14,27,1,0.0,0.0,0.0,0.7461040318012238,1.0 74 | 15,0,-1,0.0,0.0,0.0,-0.5309336347675323,0.0 75 | 15,26,0.0,0.0,1.0,0.0,0.0387726966291666,0.0 76 | 15,27,-1,0.0,0.0,0.0,-0.7461140318012237,0.0 77 | 16,0,-1,0.0,0.0,0.0,-0.5309336347675323,1.0 78 | 16,26,0.0,0.0,-1.0,0.0,-0.038782696629166606,1.0 79 | 16,30,1,0.0,0.0,0.0,0.6226553916931152,1.0 80 | 16,31,0.0,0.0,1.0,0.0,0.12983392924070358,1.0 81 | 16,32,0.0,1.0,0.0,0.0,0.7274094820022583,1.0 82 | 17,0,-1,0.0,0.0,0.0,-0.5309336347675323,0.0 83 | 17,26,0.0,0.0,-1.0,0.0,-0.038782696629166606,0.0 84 | 17,30,1,0.0,0.0,0.0,0.6226553916931152,0.0 85 | 17,31,0.0,0.0,1.0,0.0,0.12983392924070358,0.0 86 | 17,32,0.0,-1.0,0.0,0.0,-0.7274194820022583,0.0 87 | 18,0,-1,0.0,0.0,0.0,-0.5309336347675323,0.0 88 | 18,26,0.0,0.0,-1.0,0.0,-0.038782696629166606,0.0 89 | 18,30,1,0.0,0.0,0.0,0.6226553916931152,0.0 90 | 18,31,0.0,0.0,-1.0,0.0,-0.1298439292407036,0.0 91 | 18,35,0.0,0.0,0.0,1.0,0.7847169935703278,0.0 92 | 18,36,1,0.0,0.0,0.0,0.5381246209144592,0.0 93 | 18,37,1,0.0,0.0,0.0,0.5367432534694672,0.0 94 | 19,0,-1,0.0,0.0,0.0,-0.5309336347675323,1.0 95 | 19,26,0.0,0.0,-1.0,0.0,-0.038782696629166606,1.0 96 | 19,30,1,0.0,0.0,0.0,0.6226553916931152,1.0 97 | 19,31,0.0,0.0,-1.0,0.0,-0.1298439292407036,1.0 98 | 19,35,0.0,0.0,0.0,1.0,0.7847169935703278,1.0 99 | 19,36,1,0.0,0.0,0.0,0.5381246209144592,1.0 100 | 19,37,-1,0.0,0.0,0.0,-0.5367532534694671,1.0 101 | 20,0,-1,0.0,0.0,0.0,-0.5309336347675323,0.0 102 | 20,26,0.0,0.0,-1.0,0.0,-0.038782696629166606,0.0 103 | 20,30,1,0.0,0.0,0.0,0.6226553916931152,0.0 104 | 20,31,0.0,0.0,-1.0,0.0,-0.1298439292407036,0.0 105 | 20,35,0.0,0.0,0.0,1.0,0.7847169935703278,0.0 106 | 20,36,-1,0.0,0.0,0.0,-0.5381346209144592,0.0 107 | 21,0,-1,0.0,0.0,0.0,-0.5309336347675323,1.0 108 | 21,26,0.0,0.0,-1.0,0.0,-0.038782696629166606,1.0 109 | 21,30,1,0.0,0.0,0.0,0.6226553916931152,1.0 110 | 21,31,0.0,0.0,-1.0,0.0,-0.1298439292407036,1.0 111 | 21,35,0.0,0.0,0.0,-1.0,-0.7847269935703277,1.0 112 | 21,41,0.0,0.0,1.0,0.0,0.3075430244207382,1.0 113 | 21,42,0.0,1.0,0.0,0.0,0.6485489308834076,1.0 114 | 22,0,-1,0.0,0.0,0.0,-0.5309336347675323,0.0 115 | 22,26,0.0,0.0,-1.0,0.0,-0.038782696629166606,0.0 116 | 22,30,1,0.0,0.0,0.0,0.6226553916931152,0.0 117 | 22,31,0.0,0.0,-1.0,0.0,-0.1298439292407036,0.0 118 | 22,35,0.0,0.0,0.0,-1.0,-0.7847269935703277,0.0 119 | 22,41,0.0,0.0,1.0,0.0,0.3075430244207382,0.0 120 | 22,42,0.0,-1.0,0.0,0.0,-0.6485589308834075,0.0 121 | 23,0,-1,0.0,0.0,0.0,-0.5309336347675323,0.0 122 | 23,26,0.0,0.0,-1.0,0.0,-0.038782696629166606,0.0 123 | 23,30,1,0.0,0.0,0.0,0.6226553916931152,0.0 124 | 23,31,0.0,0.0,-1.0,0.0,-0.1298439292407036,0.0 125 | 23,35,0.0,0.0,0.0,-1.0,-0.7847269935703277,0.0 126 | 23,41,0.0,0.0,-1.0,0.0,-0.30755302442073823,0.0 127 | 24,0,-1,0.0,0.0,0.0,-0.5309336347675323,0.0 128 | 24,26,0.0,0.0,-1.0,0.0,-0.038782696629166606,0.0 129 | 24,30,-1,0.0,0.0,0.0,-0.6226653916931152,0.0 130 | 24,46,1,0.0,0.0,0.0,0.6546993255615234,0.0 131 | 24,47,0.0,0.0,1.0,0.0,0.11362999677658081,0.0 132 | 24,48,0.0,0.0,0.0,1.0,0.6139329075813293,0.0 133 | 25,0,-1,0.0,0.0,0.0,-0.5309336347675323,1.0 134 | 25,26,0.0,0.0,-1.0,0.0,-0.038782696629166606,1.0 135 | 25,30,-1,0.0,0.0,0.0,-0.6226653916931152,1.0 136 | 25,46,1,0.0,0.0,0.0,0.6546993255615234,1.0 137 | 25,47,0.0,0.0,1.0,0.0,0.11362999677658081,1.0 138 | 25,48,0.0,0.0,0.0,-1.0,-0.6139429075813293,1.0 139 | 26,0,-1,0.0,0.0,0.0,-0.5309336347675323,0.0 140 | 26,26,0.0,0.0,-1.0,0.0,-0.038782696629166606,0.0 141 | 26,30,-1,0.0,0.0,0.0,-0.6226653916931152,0.0 142 | 26,46,1,0.0,0.0,0.0,0.6546993255615234,0.0 143 | 26,47,0.0,0.0,-1.0,0.0,-0.1136399967765808,0.0 144 | 27,0,-1,0.0,0.0,0.0,-0.5309336347675323,0.0 145 | 27,26,0.0,0.0,-1.0,0.0,-0.038782696629166606,0.0 146 | 27,30,-1,0.0,0.0,0.0,-0.6226653916931152,0.0 147 | 27,46,-1,0.0,0.0,0.0,-0.6547093255615234,0.0 148 | -------------------------------------------------------------------------------- /experiments/experiments.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "id": "661d3e50", 7 | "metadata": {}, 8 | "outputs": [], 9 | "source": [ 10 | "import pandas as pd\n", 11 | "import numpy as np\n", 12 | "import os\n", 13 | "from sklearn.tree import DecisionTreeClassifier\n", 14 | "from sklearn.preprocessing import MinMaxScaler\n", 15 | "from sklearn.linear_model import LogisticRegression\n", 16 | "from sklearn.neural_network import MLPClassifier\n", 17 | "from sklearn.ensemble import RandomForestClassifier, GradientBoostingClassifier\n", 18 | "import rce\n", 19 | "from scipy.stats import sem\n", 20 | "import Datasets as DS" 21 | ] 22 | }, 23 | { 24 | "cell_type": "markdown", 25 | "id": "00605510", 26 | "metadata": {}, 27 | "source": [ 28 | "## User preferences\n", 29 | "\n", 30 | "Here, the user can choose the dataset (`'banknote'`, `'diabetes'`, or `'ionosphere'`), the number of factual instances, the time limit (in seconds), as well as the uncertainty set (`'l2'` or `'linf'`) and rho. Furthermore, the models that should be fit to the data can be specified in a dictionary." 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": null, 36 | "id": "4a3e804b", 37 | "metadata": { 38 | "scrolled": true 39 | }, 40 | "outputs": [], 41 | "source": [ 42 | "dataset_name = 'banknote' # or 'diabetes' or 'ionosphere'\n", 43 | "num_instances = 20\n", 44 | "time_limit = 1000\n", 45 | "rho = 0.05\n", 46 | "unc_type = 'linf' # or 'l2'\n", 47 | "\n", 48 | "clf_dict = {'linear': [0], 'cart': [3, 5, 10], 'rf': [5, 10, 20, 50, 100],\n", 49 | " 'gbm': [5, 10, 20, 50, 100], 'mlp': [(10,), (10, 10, 10), (50,), (100,)]}" 50 | ] 51 | }, 52 | { 53 | "cell_type": "markdown", 54 | "id": "5fc4d32c", 55 | "metadata": {}, 56 | "source": [ 57 | "## Load dataset" 58 | ] 59 | }, 60 | { 61 | "cell_type": "code", 62 | "execution_count": null, 63 | "id": "cbf02000", 64 | "metadata": {}, 65 | "outputs": [], 66 | "source": [ 67 | "data = getattr(DS,dataset_name)\n", 68 | "df = data('../data/')\n", 69 | "\n", 70 | "scaler = MinMaxScaler()\n", 71 | "df = pd.DataFrame(scaler.fit_transform(df), columns = df.columns)\n", 72 | "X = df.iloc[:,:-1]\n", 73 | "y = df['Outcome']\n", 74 | "df.head()" 75 | ] 76 | }, 77 | { 78 | "cell_type": "markdown", 79 | "id": "b2b07454", 80 | "metadata": {}, 81 | "source": [ 82 | "## Robust counterfcatual explanation" 83 | ] 84 | }, 85 | { 86 | "cell_type": "code", 87 | "execution_count": null, 88 | "id": "36a26b00", 89 | "metadata": { 90 | "scrolled": true 91 | }, 92 | "outputs": [], 93 | "source": [ 94 | "fnamefull = './results_%s/%s_results_%s.txt' % (dataset_name,dataset_name,unc_type)\n", 95 | "\n", 96 | "num_iterations_dict = {(i, j):[] for i in clf_dict.keys() for j in clf_dict[i]}\n", 97 | "comp_time_dict = {(i, j):[] for i in clf_dict.keys() for j in clf_dict[i]}\n", 98 | "dist_early_stops = {(i, j):[] for i in clf_dict.keys() for j in clf_dict[i]}\n", 99 | "early_stops_iter = {(i, j):[] for i in clf_dict.keys() for j in clf_dict[i]}\n", 100 | "\n", 101 | "\n", 102 | "for clf_type in clf_dict.keys():\n", 103 | " for param in clf_dict[clf_type]:\n", 104 | "\n", 105 | " if clf_type == 'cart':\n", 106 | " clf = DecisionTreeClassifier(max_depth=param).fit(X, y)\n", 107 | " elif clf_type == 'rf':\n", 108 | " clf = RandomForestClassifier(max_depth=3, random_state=0, n_estimators=param).fit(X, y)\n", 109 | " elif clf_type == 'mlp':\n", 110 | " clf = MLPClassifier(hidden_layer_sizes=param, activation='relu', random_state=0, max_iter=10000).fit(X, y)\n", 111 | " elif clf_type == 'gbm':\n", 112 | " clf = GradientBoostingClassifier(n_estimators=param, learning_rate=1.0, max_depth=2, random_state=0).fit(X, y)\n", 113 | " elif clf_type == 'linear':\n", 114 | " clf = LogisticRegression(random_state=param).fit(X, y)\n", 115 | "\n", 116 | " for i in range(num_instances):\n", 117 | " print(f'######## Iteration number: {i} ########')\n", 118 | " np.random.seed(i)\n", 119 | " u = pd.DataFrame([X.iloc[i, :]])\n", 120 | " \n", 121 | " \n", 122 | " if clf_type == 'linear':\n", 123 | " it = False\n", 124 | " else:\n", 125 | " it = True\n", 126 | " \n", 127 | " final_model, num_iterations, comp_time, x_, solutions_master_dict = rce.generate(clf, X, y, '../experiments/results_%s' % dataset_name, clf_type, 'binary', u, list(u.columns), [], [], {}, [], [], [], rho,\n", 128 | " unc_type=unc_type, iterative=it, time_limit=time_limit)\n", 129 | " \n", 130 | " if x_ is not None:\n", 131 | " solution_subopt, dist = rce.find_maxrad(x_, clf_type, 'results_%s' % dataset_name, x_.columns, [], [], {}, [], [], [], clf.predict(u)[0], unc_type)\n", 132 | " if x_ is None or dist + rho/100 < rho:\n", 133 | " best_dist = 0\n", 134 | " for i in range(len(solutions_master_dict)):\n", 135 | " x_ = solutions_master_dict[i]['sol']\n", 136 | " solution_subopt_i, dist_i = rce.find_maxrad(x_, clf_type, 'results_%s' % dataset_name, x_.columns, [], [], {}, [], [], [], clf.predict(u)[0], unc_type)\n", 137 | " if dist_i >= best_dist:\n", 138 | " best_dist = dist_i\n", 139 | " print(best_dist)\n", 140 | " dist_early_stops[(clf_type, param)].append(best_dist)\n", 141 | " early_stops_iter[(clf_type, param)].append(num_iterations)\n", 142 | " print('\\n\\n@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@ ERROR @@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@\\n\\n')\n", 143 | "\n", 144 | " else:\n", 145 | " num_iterations_dict[(clf_type, param)].append(num_iterations)\n", 146 | " comp_time_dict[(clf_type, param)].append(comp_time)\n", 147 | "\n", 148 | " # write results to .txt file\n", 149 | " num_iterations_list = num_iterations_dict[(clf_type, param)] \n", 150 | " comp_time_list = comp_time_dict[(clf_type, param)] \n", 151 | " dist_early_stops_list = dist_early_stops[(clf_type,param)]\n", 152 | " early_stops_iter_list = early_stops_iter[(clf_type, param)]\n", 153 | "\n", 154 | " txt = '{0}: \\t {1} \\t {2:.2f} ({3:.2f}) \\t {4:.2f} ({5:.2f}) \\t {6} \\t {7:.3f} ({8:.3f}) \\t {9:.2f} ({10:.2f}) '.format(clf_type,\n", 155 | " param,\n", 156 | " np.mean(comp_time_list),\n", 157 | " sem(comp_time_list),\n", 158 | " np.mean(num_iterations_list), \n", 159 | " sem(num_iterations_list), \n", 160 | " len(dist_early_stops_list),\n", 161 | " np.mean(dist_early_stops_list),\n", 162 | " sem(dist_early_stops_list), \n", 163 | " np.mean(early_stops_iter_list),\n", 164 | " sem(early_stops_iter_list))\n", 165 | " with open(fnamefull, 'a') as f: \n", 166 | " print(txt,file=f)" 167 | ] 168 | } 169 | ], 170 | "metadata": { 171 | "kernelspec": { 172 | "display_name": "Python 3 (ipykernel)", 173 | "language": "python", 174 | "name": "python3" 175 | }, 176 | "language_info": { 177 | "codemirror_mode": { 178 | "name": "ipython", 179 | "version": 3 180 | }, 181 | "file_extension": ".py", 182 | "mimetype": "text/x-python", 183 | "name": "python", 184 | "nbconvert_exporter": "python", 185 | "pygments_lexer": "ipython3", 186 | "version": "3.9.7" 187 | } 188 | }, 189 | "nbformat": 4, 190 | "nbformat_minor": 5 191 | } 192 | -------------------------------------------------------------------------------- /experiments/results_ionosphere/mlp/model.csv: -------------------------------------------------------------------------------- 1 | intercept,layer,node,node_0,node_1,node_2,node_3,node_4,node_5,node_6,node_7,node_8,node_9,node_10,node_11,node_12,node_13,node_14,node_15,node_16,node_17,node_18,node_19,node_20,node_21,node_22,node_23,node_24,node_25,node_26,node_27,node_28,node_29,node_30,node_31,node_32,node_33 2 | -0.3492614220510625,0,0,0.22152489342827106,5.367930038246563e-07,0.5526859507405251,-0.09759879722795932,0.05554777274822498,0.027718663579639268,-0.11343604031530134,0.5388635980931376,-0.059713292662451796,-0.08762865526580574,-0.019575654941969688,0.2081596428626872,0.21823790202592966,0.17670446991708183,0.4472900330320132,-0.06115718306745897,0.26459636111049434,0.07976123747140586,-0.2833227139489252,0.060277343293831026,-0.11656448824602489,0.06716397310668644,0.0941791543073835,-0.2270027494729092,0.4495566105731034,0.04330271233867513,-0.2044680217531918,0.47139058593048955,-0.004666270561323912,0.0054386266284281405,0.32908389956134665,0.09086703199849346,-0.24172155183320523,-0.27476777473808134 3 | -0.2387099966366218,0,1,0.2971328074256647,-2.491414612801029e-27,0.4091526539908604,0.35037078356389467,0.2712403328095193,0.3147010800637672,-0.20183187514163184,0.35252692622948684,-0.15585254084093306,0.4872928189997455,-0.3560231826591652,0.5940502191127394,-0.41810058499364994,0.17421723342510437,-0.45718365075731665,0.4083694699543255,-0.27507995925022544,-0.09261875156453038,-0.16481674459844084,-0.041575738113891605,-0.2854369575345596,-0.2875979407329759,0.14148405580130669,-0.3790399817533474,-0.39394210164253457,0.22777991386434512,-0.28424574922378015,0.19435120388285887,0.18217088632035813,-0.0943993645537182,0.028592467471564464,-0.2934511717085555,-0.32420607809185265,-0.06819154946699864 4 | -0.07187133665357384,0,2,0.05909338307504338,3.533217622487415e-23,-0.057722137085061354,-0.07284791510539877,0.11478757297410326,0.3189550888789518,0.07241980141480384,0.3075667759634396,-0.3485307648571352,-0.25795168628695286,0.13431194381166808,0.10885850764015155,0.30179734835399596,-0.25560248403751,-0.06442285174194622,-0.271557983779684,0.13844799243421338,-0.2413727283008107,0.01828274369550389,-0.21663802907879964,-0.1305467442408648,0.09637909405230359,-0.2095290964304652,-0.019300249260860405,-0.025313248284299667,0.1699590146058274,-0.3841985943651747,-0.13606000262995865,0.04946030868018403,0.19881288750431786,-0.1614234987305123,-0.33056243237918176,-0.36409377973015994,-0.36807207087602567 5 | -0.05332943431541637,0,3,-0.011921300084778704,8.802575592538885e-05,0.049342487047791,0.3528807628308599,-0.358806259065564,-0.16128833929252537,-0.35132324708572393,0.41202944057250274,-0.04660481846837979,0.3963207578318786,0.5385577724599127,-0.10412119384932755,-0.0108452866020299,-0.018374877772383686,0.13398422079181554,0.2895899313696161,0.07247252135919799,-0.38515005186348245,-0.30261116960910867,0.4022940111559396,-0.10651709957361952,-0.01787883717572172,0.42013115931685313,-0.3381176982212176,-0.3960218477130397,0.04114943298571373,0.508085583356846,0.34628258879951185,-0.5615111054048485,-0.2872881184576752,-0.29353406985224684,-0.32820277205917264,0.06251654498171372,0.3675997441726662 6 | 0.5484328948321721,0,4,-0.28689093176497044,-9.615959545145068e-05,-0.517874002688377,-0.42152923776167484,-0.13562912586621337,0.05908671365036254,-0.3372551427305097,0.05178936473395921,0.02704435595577273,-0.06134808605795063,-0.16745881555861428,0.21382858100031618,-0.08029562039603647,0.0925014003388395,0.14637413473883928,-0.37088503670875017,0.21781154326669147,0.5392930810289017,0.37458691898851587,0.3216227982848345,-0.3429837826772922,-0.023639730532580637,-0.3886089770887749,-0.3849623343533923,0.20133477615415435,0.3024775191554723,0.28025230824439734,-0.22956826700523894,-0.26938982744116635,-0.11755001391996875,0.008703564426319366,0.3806972242128644,0.06012981304430409,0.2891229700868799 7 | -0.09171177044128287,0,5,0.03302790739413891,-6.222663358694608e-05,0.17610311441454007,0.2937690380086506,0.2514973186428581,-0.33859369850189935,-0.20460489166815138,-0.4482821224637207,-0.282340617579511,-0.2875155910655325,0.18669046006037454,0.01651603499072911,-0.012458024218296355,0.40532911890367684,0.0021888090813951015,0.05098182922200907,0.12009428973641652,-0.3849275019728815,0.09966895807088988,-0.07587178586103878,-0.5238069421752775,0.49369909622032077,0.5708054222831922,-0.031094571245118295,-0.11894974226398518,-0.4232294575420308,0.4766507186809737,0.38818524116000463,-0.3750143382904944,-0.15692077843939853,-0.2276465709442049,-0.2297812116428089,0.06755233861250115,0.35757066983011904 8 | 0.17328089842978228,0,6,-0.03727902863089048,-0.00031421645978083155,-0.34141946974547704,0.04734906544914084,-0.22738011039829192,0.21486660056655107,0.46742850377241923,0.048692773477179145,-0.018875074981794207,-0.19119267402619747,0.14078704206879555,0.24240334472985148,-0.09966231648269458,-0.40025348310342324,-0.14355844019231606,0.35847226008412164,-0.35381694619327936,-0.1456393331416723,0.19089177326814674,-0.3186746970592865,0.3271284272269551,0.013183281900421362,0.10513131311392328,0.013483762348842973,0.09778007587647636,-0.12247978755844975,-0.5192683811124497,0.3541756986966934,0.4230698776480769,-0.6017455867582366,-0.27195867893685144,0.229194110562507,0.2933734599192354,-0.06934503886494593 9 | 0.15575154280793094,0,7,0.5040753827011818,3.861817974411201e-06,0.24306186927677484,0.4712757913939542,-0.47392883949211656,-0.29285557737661083,-0.47744626161207876,-0.540682151845019,-0.1896454113240479,-0.49700023487105516,0.09485353459000292,0.1457136068688453,-0.20942807492330257,0.3839699725859458,0.08466590757586538,-0.20576373268687584,-0.10908073985606481,0.3062396313236801,0.2534472972338742,-0.17237172368963444,-0.07859561537126321,-0.012987799521091204,-0.3280786022560089,0.12822911751336658,-0.16271929575729682,0.023576853880061314,0.33111118582259436,0.19494952172315325,0.037325025624835875,0.2927344559134391,-0.342308044192761,-0.030634632565585587,0.19745301028039855,0.01454224856509502 10 | 0.2973292774047412,0,8,0.49625698419054665,2.4756043835391475e-07,0.18402835345587204,0.5026012026497336,0.0007643542984082318,0.15735085272860685,0.42197689410980044,-0.04432635228899662,0.205942908041488,0.4898612485975493,-0.02223329218161769,0.3986492119498244,0.21068027267604614,0.3503662010488124,0.2423475903110685,0.07727852840813189,0.24686212484155767,0.2773556572552682,0.312062369375998,-0.1464375215034213,0.13695828203438104,0.22531347296897403,0.4453058094294586,-0.14495083637798026,-0.20946271502985261,0.31334071172456485,-0.18130139274605486,0.04411929697387851,0.06584400320697184,0.40522132461249544,0.3299783315826644,0.23882656990926537,0.030942942531892136,0.1230009543777851 11 | 0.16183326028462006,0,9,0.04663590345742758,1.630114263951291e-05,0.055629466108498914,0.06990576118707739,-0.028268495881840515,-0.3171913484554876,-0.19263568943449547,-0.30466520198066405,0.4055195233410886,-0.48213819709342043,0.25021714637730397,0.18490259114851268,-0.03837397424132596,-0.06643899169699227,0.5625302541551074,-0.18493410952243772,-0.03895878737607586,-0.33477990307336175,-0.05449917069913398,0.020404681434427658,0.35531732293711926,0.0807528615487488,0.19467457638765018,0.056765929361900676,0.11717744060169999,-0.005205971850979057,-0.03219097975678267,0.3490101012985932,0.21503026496890032,0.2835812009431951,-0.27993252573388294,0.08504244074429393,-0.10813533056867453,-0.19184433080601238 12 | -0.22286144561506532,1,0,2.3039152023372317e-09,-0.00022394380588515824,6.78857392408234e-07,0.0008176744404336318,-0.0041877086542315995,-1.818198061026569e-13,-0.008490763763433775,0.007383164036014664,-2.5673455759427756e-05,-9.426889468610225e-10,,,,,,,,,,,,,,,,,,,,,,,, 13 | 0.8832847737579786,1,1,-0.5408612143141271,-0.12284775777865432,-0.22112372956434445,0.4071442681278774,0.2677885770211811,1.0312196638050788,0.4332801125518264,1.1426193010441608,0.3601602820535746,0.513779115353379,,,,,,,,,,,,,,,,,,,,,,,, 14 | -0.2942966573003127,1,2,7.751182922116987e-19,5.119147873932055e-24,0.2561991117716004,0.005298518967415414,-1.0564091624817626e-05,-0.002133003206411287,-0.0038249313047823176,-1.4121343136590424e-14,-0.2945837140151527,0.4406068454163814,,,,,,,,,,,,,,,,,,,,,,,, 15 | -0.1323990941017116,1,3,-0.15192237612574894,-0.7306050277976962,5.56356038307001e-10,0.6619028722852307,0.885926213107873,1.200412611417852,0.3836375775376812,0.3366990508525333,0.17059953978611997,-0.564362221744346,,,,,,,,,,,,,,,,,,,,,,,, 16 | 0.6659488634095471,1,4,-0.19781393624331003,-0.11337319753737342,-0.251857737905538,0.7547883745888199,-0.1948963618235417,0.9064865287180615,0.3118619285575091,0.7022821642685528,-0.03298833085215161,-0.16426773715031828,,,,,,,,,,,,,,,,,,,,,,,, 17 | -0.011598948328543022,1,5,0.1909760987050853,-0.6559910016926671,-0.0005031423623922555,1.0521536678053116,1.035961240411136,0.7658765956775556,0.3559554053649292,0.7033840441495435,-0.055210669174402635,-0.1795546452371646,,,,,,,,,,,,,,,,,,,,,,,, 18 | -0.03351814567175712,1,6,0.10016064233479265,0.9447792710946752,0.28323244279586623,-0.17910966617034554,-0.6238743788175519,-0.8781352403050042,-1.0608854065342208,-0.1639587582025466,0.1465052489222786,0.25305711120126717,,,,,,,,,,,,,,,,,,,,,,,, 19 | -0.4967036327795312,1,7,-0.001132638270674132,-3.474918389885282e-13,0.46334577846791203,-0.0005192029850202421,-1.6652846849519598e-09,-3.845752362281743e-05,-0.008906283259857982,-0.004009416877226483,-0.23719576415495167,0.50562969251568,,,,,,,,,,,,,,,,,,,,,,,, 20 | 0.10627349782154466,1,8,0.09857193691506966,0.9852369091417955,-0.08170601846082448,-0.2536325857850612,-0.8935227612409861,-0.3737792144856293,-1.0169954953957379,-0.23552562659686685,0.3003279285381299,0.37433875497222713,,,,,,,,,,,,,,,,,,,,,,,, 21 | 0.2147076199569061,1,9,0.7647981153216614,0.6876956914975737,0.06637259209862484,-0.5202792244848088,-0.7154421085058643,-0.9981679477059356,-0.46651220257951015,-0.7544896992549449,-0.011311359247140041,0.28850083330248427,,,,,,,,,,,,,,,,,,,,,,,, 22 | 0.04645711504651256,2,0,-4.74763803625941e-06,-0.10921954326189794,-0.003645001708484596,-0.3131403460987818,-0.2711763943872912,-0.4834535599802384,0.001579471850964254,-0.007533076944910692,-0.6133271122228819,0.21809634287360416,,,,,,,,,,,,,,,,,,,,,,,, 23 | -0.14906605936257164,2,1,-3.82473775532013e-11,0.8465517087976915,-1.2084368197114882e-06,0.7376537666297784,0.38229629008404803,0.5736552860422557,-0.3848071302329716,-1.0821139130237188e-08,0.03808253364263295,-0.7670183017721612,,,,,,,,,,,,,,,,,,,,,,,, 24 | 0.669728888269211,2,2,-9.476337790340993e-11,0.4570062229981738,-0.33630562363013244,1.054759276212077,0.3608276515546925,0.9472470079240007,-0.7883867688372785,0.10761215923657122,-0.09089629449381162,-0.37087621699816464,,,,,,,,,,,,,,,,,,,,,,,, 25 | 0.49519337754097453,2,3,1.574293192751256e-05,0.6458542322639542,-0.05826332003034783,0.6005572900730582,0.6508486504631102,0.9580627997404118,-0.2273507669633989,-0.09955818601041903,-0.2512028784253603,-0.41135664259704235,,,,,,,,,,,,,,,,,,,,,,,, 26 | -0.06532091721300647,2,4,-3.0473454522801705e-05,0.28060962831322633,-0.39787370567067093,-0.5063898701311875,-0.4560639553326911,-0.501675792336726,-0.16410543626922258,-0.2661870319004278,0.8000295625199857,0.5753855402330655,,,,,,,,,,,,,,,,,,,,,,,, 27 | 0.7031927549945787,2,5,-0.0003623396695690039,0.6447171279683072,8.215928024705963e-13,0.38961335734564445,0.8485136902636324,0.940977616590433,-0.17398729722288722,-0.001524313511057651,-0.4720164354933778,-0.5907037460952573,,,,,,,,,,,,,,,,,,,,,,,, 28 | 0.16810760090940466,2,6,9.4705780941654e-23,0.5117159304045454,-0.10331873233823248,0.5864646150647664,0.7167161888757383,0.4144274985553953,-0.2474847185699724,0.07172541587929461,0.015290979771312307,-0.6149115396533333,,,,,,,,,,,,,,,,,,,,,,,, 29 | 0.39847832916373865,2,7,-4.888544896928494e-07,0.3137443921483277,0.38980192536909614,-0.6771211365436698,-0.7641214582914546,-0.8219502353101978,0.8371079710153634,0.15907086018427552,0.6338824972771268,0.26348190486358886,,,,,,,,,,,,,,,,,,,,,,,, 30 | 0.5249429899101665,2,8,0.0004993157025206056,0.8843464012893417,0.32146760043118083,0.6220398013056042,0.30119120752481743,1.1180110886729802,-0.941122216861734,0.03362196766234202,-0.3588353459672084,-0.35947746462386754,,,,,,,,,,,,,,,,,,,,,,,, 31 | 0.4453246750980641,2,9,-3.0566917973580225e-05,-0.2540960947970624,0.03225988844298233,-0.5685004791315669,-0.33836625791518443,-0.23166202348231854,0.282107485522994,0.38213112834425267,0.11651420230112758,0.7920950420951954,,,,,,,,,,,,,,,,,,,,,,,, 32 | -0.5593066490964479,3,0,0.5529837750119421,-1.3552603468173037,-0.9618887209730068,-0.485017226729866,1.0006012214608446,-0.8050120486301467,-0.9846917311479523,0.8699752832119633,-0.5613531952682903,0.9840688522446944,,,,,,,,,,,,,,,,,,,,,,,, 33 | -------------------------------------------------------------------------------- /rce/utils.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import numpy as np 3 | 4 | 5 | ''' 6 | ContraintLearning class contains a set of functions for the 7 | extrapolation of constraints from a trained Optimal Classification Tree 8 | (with and without hyperplanes). 9 | ''' 10 | class ConstraintLearning(object): 11 | ''' 12 | The initialization requires: 13 | X: dataset without the target column 14 | y: target column 15 | grid: learned model. 16 | ''' 17 | def __init__(self, X, y, learner, algorithm): 18 | if not algorithm in ['rf','cart','linear','svm','mlp', 'gbm', 'iai', 'iai-single']: #error! 19 | raise ValueError("invalid algorithm!") 20 | 21 | self.__data = X 22 | self.__label = y 23 | self.__learner = learner 24 | self.__algorithm = algorithm 25 | 26 | def get_features_list(self): 27 | ''' 28 | Returns the list of features that describe the dataset. 29 | ''' 30 | return list(self.__data.columns) 31 | 32 | def opt_data_reduction(self, datapoint, data): 33 | ''' 34 | This function is called by data_reduction(). It checks if 35 | the datapoint is within the convex hull solving an optimziation model. 36 | ''' 37 | model = ConcreteModel() 38 | model.I = RangeSet(0, data.shape[0] - 1) 39 | model.J = RangeSet(0, data.shape[1] - 1) 40 | 41 | def x_init(model, i, j): 42 | return data[i][j] 43 | 44 | def d_init(model, i): 45 | return datapoint[i] 46 | 47 | model.x = Param(model.I, model.J, initialize=x_init) 48 | model.d = Param(model.J, initialize=d_init) 49 | model.l = Var(model.I, domain=NonNegativeReals) 50 | 51 | def obj_function(model): 52 | return 1 53 | 54 | model.OBJ = Objective(rule=obj_function, sense=minimize) 55 | 56 | def constraint_rule1(model): 57 | return sum(model.l[i] for i in model.I) == 1 58 | 59 | def constraint_rule2(model, j): 60 | return sum(model.l[i] * model.x[i, j] for i in model.I) == model.d[j] 61 | 62 | model.Constraint1 = Constraint(rule=constraint_rule1) 63 | model.Constraint2 = Constraint(model.J, rule=constraint_rule2) 64 | 65 | opt = SolverFactory('cplex') 66 | results = opt.solve(model) 67 | 68 | # print('Termination condition:', results['Solver']._list[0]['Termination condition'].key) 69 | if results['Solver']._list[0]['Termination condition'].key != 'optimal': 70 | return True 71 | else: 72 | return False 73 | 74 | def data_reduction(self): 75 | ''' 76 | This function delete samples that are not verteces of the convex hull. 77 | ''' 78 | data_reduced = pd.DataFrame(columns=self.__data.columns) 79 | c = 0 80 | while c < self.__data.shape[0]: 81 | datapoint = self.__data[c:c + 1] 82 | data = np.delete(self.__data, datapoint, axis=0) 83 | result = self.opt_data_reduction(datapoint[0], data) 84 | if result: 85 | data_reduced = pd.concat([data_reduced,datapoint], ignore_index=True) 86 | c += 1 87 | return data_reduced 88 | 89 | def __leaf_nodes_iai(self, class_c): 90 | ''' 91 | It returns a list of node corresponding to the leaves of the OCT(-H) 92 | ''' 93 | leaf_nodes = [] 94 | num_nodes = self.__learner.get_num_nodes() 95 | ''' 96 | find the node_indices of the leaves 97 | ''' 98 | if class_c in ['continuous','binary']: 99 | for node in range(1, num_nodes + 1): 100 | if self.__learner.is_leaf(node_index=node): 101 | leaf_nodes.append(node) 102 | # else: 103 | # for node in range(1, num_nodes + 1): 104 | # if self.__learner.is_leaf(node_index=node): 105 | # if self.__learner.get_classification_label(node) == class_c: 106 | # leaf_nodes.append(node) 107 | return leaf_nodes 108 | 109 | def constraint_extrapolation_iai(self, class_c): 110 | ''' 111 | It returns a matrix with all the constraints that describe the tree. 112 | This function returns constraints for each leaf. the column ID represent the reference leaf. 113 | IMPORTANT: (constraint structure) the sign is always <=. 114 | ''' 115 | 116 | columns = ['ID'] + self.get_features_list() + ['threshold', 'prediction'] 117 | leaf_nodes = self.__leaf_nodes_iai(class_c) 118 | 119 | ''' 120 | Initialize dataframe where: 121 | ID: represent the same set of constraint that describes a leaf_nodes 122 | features: coefficient for each feature of the data. If coeff = 0 the feature 123 | is not in the constraints. 124 | threshold: right-hand side of the constraints. 125 | ''' 126 | constraints = pd.DataFrame(columns=columns) 127 | ID = 1 128 | for leaf in leaf_nodes: 129 | node = leaf 130 | stop = False 131 | parent_node = self.__learner.get_parent(node_index=node) 132 | while not stop: 133 | constraint = pd.DataFrame(data=np.zeros(len(columns)).reshape(1,-1), columns=columns) 134 | # sign true means that the inequality constraint is Ax<=b otherwise A>b 135 | sign = True if self.__learner.get_lower_child(node_index=parent_node) == node else False 136 | threshold = self.__learner.get_split_threshold(node_index=parent_node) 137 | constraint['threshold'] = threshold-0.00001 if sign else -threshold 138 | 139 | # .is_hyperplane_split is TRUE if the split is a non-orthogonal hyperplane 140 | if self.__learner.is_hyperplane_split(node_index=parent_node): 141 | dict_weights = self.__learner.get_split_weights(node_index=parent_node) 142 | for key in dict_weights[0]: 143 | if sign: 144 | constraint[key] = self.__learner.get_split_weights(node_index=parent_node)[0][key] 145 | else: 146 | constraint[key] = -self.__learner.get_split_weights(node_index=parent_node)[0][key] 147 | else: 148 | if sign: 149 | constraint[self.__learner.get_split_feature(node_index=parent_node)] = 1 150 | else: 151 | constraint[self.__learner.get_split_feature(node_index=parent_node)] = -1 152 | constraint['ID'] = ID 153 | if class_c == 'continuous': 154 | constraint['prediction'] = self.__learner.get_regression_constant(leaf) 155 | elif class_c == 'binary': 156 | constraint['prediction'] = self.__learner.get_classification_proba(leaf)[1] # assume label '1' is class of interest 157 | elif class_c == 'multiclass': 158 | print('Under Development') 159 | 160 | constraints = pd.concat([constraints, constraint], ignore_index=True) 161 | 162 | node = parent_node 163 | # check if the parent_node is a root node 164 | if node == 1: 165 | stop = True 166 | else: 167 | parent_node = self.__learner.get_parent(node_index=node) 168 | ID += 1 169 | 170 | return constraints 171 | 172 | def __find_path_skTree(self, node_numb, path, leaf, children_left, children_right): 173 | ''' 174 | This function is used to find the path of nodes that are visited before reaching a leaf 175 | ''' 176 | path.append(node_numb) 177 | if node_numb == leaf: 178 | return True 179 | left = False 180 | right = False 181 | if (children_left[node_numb] != -1): 182 | left = self.__find_path_skTree(children_left[node_numb], path, leaf, children_left, children_right) 183 | if (children_right[node_numb] != -1): 184 | right = self.__find_path_skTree(children_right[node_numb], path, leaf, children_left, children_right) 185 | if left or right: 186 | return True 187 | path.remove(node_numb) 188 | return False 189 | 190 | def __get_rule_skTree(self, leaf, path, column_names, columns, ID, type_tree, children_left, feature, threshold): 191 | ''' 192 | This functions transform the list of nodes composing a path into a set of constraints 193 | ''' 194 | constraints_leaf = pd.DataFrame(columns=columns) 195 | for index, node in enumerate(path): 196 | constraint = pd.DataFrame(data=np.zeros(len(columns)).reshape(1, -1), columns=columns) 197 | # We check if we are not in the leaf 198 | if node != leaf: 199 | # Do we go under or over the threshold ? 200 | if (children_left[node] == path[index + 1]): 201 | constraint[column_names[feature[node]]] = 1 202 | constraint['threshold'] = threshold[node] 203 | else: 204 | constraint[column_names[feature[node]]] = -1 205 | constraint['threshold'] = -(threshold[node] + 0.00001) #0.00001 206 | constraint['ID'] = ID 207 | constraint['node_ID'] = node 208 | constraints_leaf = pd.concat([constraints_leaf,constraint],ignore_index=True) 209 | return constraints_leaf 210 | 211 | def constraint_extrapolation_skTree(self, class_c): 212 | ''' 213 | :param class_c: either r: regression or c: classification 214 | :return: set of constraints that describe each leaf of the tree. 215 | Constraints with the same ID describe the same leaf and must be satisfied together. 216 | ''' 217 | children_left = self.__learner.tree_.children_left 218 | children_right = self.__learner.tree_.children_right 219 | feature = self.__learner.tree_.feature 220 | threshold = self.__learner.tree_.threshold 221 | # Leaves 222 | leave_id = self.__learner.apply(self.__data) 223 | if class_c == 'multiclass': 224 | columns_classes = [f'prediction_class_{i}' for i in range(len(self.__learner.tree_.value[leave_id[0]][0]))] 225 | columns = ['ID', 'node_ID'] + [feature for feature in self.get_features_list()] + ['threshold'] + columns_classes 226 | else: 227 | columns = ['ID', 'node_ID'] + [feature for feature in self.get_features_list()] + ['threshold', 'prediction'] 228 | constraints = pd.DataFrame(columns=columns) 229 | for i, leaf in enumerate(np.unique(leave_id)): 230 | path_leaf = [] 231 | self.__find_path_skTree(0, path_leaf, leaf, children_left, children_right) 232 | constraints_leaf = self.__get_rule_skTree(leaf, path_leaf, self.get_features_list(), columns, i + 1, class_c, children_left, feature, threshold) 233 | if class_c == 'continuous': 234 | constraints_leaf['prediction'] = self.__learner.tree_.value[leaf].item() 235 | elif class_c == 'binary': 236 | constraints_leaf['prediction'] = self.__learner.tree_.value[leaf][0, 1]/sum(self.__learner.tree_.value[leaf][0]) 237 | # constraints_leaf['prediction'] = np.round(self.__learner.tree_.value[leaf].item()) 238 | elif class_c == 'multiclass': 239 | # for i, class_name in enumerate(columns_classes): 240 | # constraints_leaf[class_name] = self.__learner.tree_.value[leaf][0, i]/sum(self.__learner.tree_.value[leaf][0]) 241 | print('Under Development') 242 | constraints = pd.concat([constraints,constraints_leaf], ignore_index=True) 243 | 244 | return constraints 245 | 246 | def constraint_extrapolation_SVM(self, class_c): 247 | ''' 248 | :return: constraint: it has the following structure: Coeff*x+intercept >= 0 249 | ''' 250 | if class_c == "continuous": 251 | columns = [feature for feature in self.get_features_list()] 252 | constraint = pd.DataFrame(data=[self.__learner.coef_], columns=columns) 253 | constraint['intercept'] = self.__learner.intercept_ 254 | elif class_c == "binary": 255 | columns = [feature for feature in self.get_features_list()] 256 | constraint = pd.DataFrame(data=[self.__learner.coef_[0]], columns=columns) ## only one element of coefficient array of arrays 257 | constraint['intercept'] = self.__learner.intercept_[0] 258 | return constraint 259 | 260 | def constraint_extrapolation_skRF(self, class_c): 261 | columns = ['Tree_id', 'ID'] + [feature for feature in self.get_features_list()] + ['threshold', 'prediction'] 262 | constraints = pd.DataFrame(columns=columns) 263 | for tree_id, tree in enumerate(self.__learner): 264 | children_left = tree.tree_.children_left 265 | children_right = tree.tree_.children_right 266 | feature = tree.tree_.feature 267 | threshold = tree.tree_.threshold 268 | 269 | # Leaves 270 | leave_id = tree.apply(self.__data) 271 | 272 | for i, leaf in enumerate(np.unique(leave_id)): 273 | path_leaf = [] 274 | self.__find_path_skTree(0, path_leaf, leaf, children_left, children_right) 275 | constraints_leaf = self.__get_rule_skTree(leaf, path_leaf, self.get_features_list(), columns[:-1], i + 1, class_c, children_left, feature, threshold) 276 | constraints_leaf['Tree_id'] = tree_id 277 | if class_c == 'continuous': 278 | constraints_leaf['prediction'] = tree.tree_.value[leaf].item() 279 | elif class_c == 'binary': 280 | constraints_leaf['prediction'] = float(tree.tree_.value[leaf][0, 1]/sum(tree.tree_.value[leaf][0])) 281 | # constraints_leaf['prediction'] = np.round(self.__learner.tree_.value[leaf].item()) 282 | elif class_c == 'multiclass': 283 | # for i, class_name in enumerate(columns_classes): 284 | # constraints_leaf[class_name] = self.__learner.tree_.value[leaf][0, i]/sum(self.__learner.tree_.value[leaf][0]) 285 | print('Under Development') 286 | constraints = pd.concat([constraints, constraints_leaf], ignore_index=True) 287 | 288 | return constraints 289 | 290 | def constraint_extrapolation_skGBM(self, class_c): 291 | columns = ['Tree_id', 'ID'] + [feature for feature in self.get_features_list()] + ['threshold', 'prediction', 'initial_prediction', 'learning_rate'] 292 | constraints = pd.DataFrame(columns=columns) 293 | for tree_id, tree_array in enumerate(self.__learner.estimators_): 294 | tree = tree_array.item() 295 | children_left = tree.tree_.children_left 296 | children_right = tree.tree_.children_right 297 | feature = tree.tree_.feature 298 | threshold = tree.tree_.threshold 299 | 300 | # Leaves 301 | leave_id = tree.apply(self.__data) 302 | 303 | for i, leaf in enumerate(np.unique(leave_id)): 304 | path_leaf = [] 305 | self.__find_path_skTree(0, path_leaf, leaf, children_left, children_right) 306 | constraints_leaf = self.__get_rule_skTree(leaf, path_leaf, self.get_features_list(), columns[:-1], i + 1, class_c, children_left, feature, threshold) 307 | constraints_leaf['Tree_id'] = tree_id 308 | if class_c == 'continuous': 309 | # print(tree.tree_.value[leaf]) 310 | constraints_leaf['prediction'] = tree.tree_.value[leaf].item() 311 | constraints_leaf['initial_prediction'] = self.__learner.init_.constant_.item() 312 | constraints_leaf['learning_rate'] = self.__learner.learning_rate 313 | else: 314 | constraints_leaf['prediction'] = tree.tree_.value[leaf].item() 315 | constraints_leaf['initial_prediction'] = 0 316 | constraints_leaf['learning_rate'] = self.__learner.learning_rate 317 | constraints = pd.concat([constraints,constraints_leaf], ignore_index=True) 318 | return constraints 319 | 320 | def constraint_extrapolation_skEN(self, class_c): 321 | ''' 322 | :return: constraint: prediction follows the structure Coeff*x+intercept 323 | ''' 324 | ## Assume a regression model 325 | assert class_c != 'multiclass', 'sorry, the multiclass is under development' 326 | columns = [feature for feature in self.get_features_list()] 327 | if class_c == 'continuous': 328 | constraint = pd.DataFrame(data=[self.__learner.coef_], columns=columns) 329 | else: 330 | constraint = pd.DataFrame(data=[self.__learner.coef_[0]], columns=columns) 331 | constraint['intercept'] = self.__learner.intercept_ 332 | 333 | return constraint 334 | 335 | def __extract_layer(self, l): 336 | df_sub = pd.DataFrame(self.__learner.coefs_[l].transpose()).add_prefix('node_') 337 | df_sub['intercept'] = self.__learner.intercepts_[l] 338 | df_sub['layer'] = l 339 | df_sub['node'] = range(len(df_sub)) 340 | return df_sub 341 | 342 | def constraint_extrapolation_MLP(self, class_c): 343 | assert class_c != 'multiclass', 'sorry, the multiclass is under development' 344 | n_layers = len(self.__learner.coefs_) 345 | constraints = pd.concat([self.__extract_layer(l) for l in range(n_layers)],axis=0) 346 | cols_to_move = ['intercept', 'layer', 'node'] 347 | constraints = constraints[cols_to_move + [col for col in constraints.columns if col not in cols_to_move]] 348 | return constraints 349 | 350 | 351 | def constraint_extrapolation(self, class_c): 352 | if self.__algorithm in ["iai","iai-single"]: 353 | constraints = self.constraint_extrapolation_iai(class_c) 354 | elif self.__algorithm == "cart": 355 | constraints = self.constraint_extrapolation_skTree(class_c) 356 | elif self.__algorithm == "rf": 357 | constraints = self.constraint_extrapolation_skRF(class_c) 358 | elif self.__algorithm == "gbm": 359 | constraints = self.constraint_extrapolation_skGBM(class_c) 360 | elif self.__algorithm == "linear": 361 | constraints = self.constraint_extrapolation_skEN(class_c) 362 | elif self.__algorithm == "svm": 363 | constraints = self.constraint_extrapolation_SVM(class_c) 364 | elif self.__algorithm == "mlp": 365 | constraints = self.constraint_extrapolation_MLP(class_c) 366 | return constraints -------------------------------------------------------------------------------- /experiments/results_ionosphere/cart/model.csv: -------------------------------------------------------------------------------- 1 | ID,node_ID,X_0,X_1,X_2,X_3,X_4,X_5,X_6,X_7,X_8,X_9,X_10,X_11,X_12,X_13,X_14,X_15,X_16,X_17,X_18,X_19,X_20,X_21,X_22,X_23,X_24,X_25,X_26,X_27,X_28,X_29,X_30,X_31,X_32,X_33,threshold,prediction 2 | 1,0,0.0,0.0,0.0,0.0,1,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.615770012140274,0.0 3 | 1,1,0.0,0.0,0.0,0.0,1,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.5207200050354004,0.0 4 | 2,0,0.0,0.0,0.0,0.0,1,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.615770012140274,0.0 5 | 2,1,0.0,0.0,0.0,0.0,-1,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.5207300050354003,0.0 6 | 2,3,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.591869980096817,0.0 7 | 3,0,0.0,0.0,0.0,0.0,1,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.615770012140274,1.0 8 | 3,1,0.0,0.0,0.0,0.0,-1,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.5207300050354003,1.0 9 | 3,3,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.591879980096817,1.0 10 | 3,5,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.8662849962711334,1.0 11 | 4,0,0.0,0.0,0.0,0.0,1,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.615770012140274,0.0 12 | 4,1,0.0,0.0,0.0,0.0,-1,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.5207300050354003,0.0 13 | 4,3,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.591879980096817,0.0 14 | 4,5,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.8662949962711334,0.0 15 | 5,0,0.0,0.0,0.0,0.0,-1,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.615780012140274,1.0 16 | 5,8,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.9999724924564362,1.0 17 | 5,9,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0516550000756979,1.0 18 | 5,10,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.2234250083565712,1.0 19 | 6,0,0.0,0.0,0.0,0.0,-1,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.615780012140274,0.0 20 | 6,8,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.9999724924564362,0.0 21 | 6,9,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0516550000756979,0.0 22 | 6,10,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.2234350083565712,0.0 23 | 7,0,0.0,0.0,0.0,0.0,-1,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.615780012140274,0.0 24 | 7,8,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.9999724924564362,0.0 25 | 7,9,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.0516650000756979,0.0 26 | 7,13,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.8656249940395355,0.0 27 | 7,14,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.585515022277832,0.0 28 | 7,15,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.8166325092315674,0.0 29 | 7,16,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.35584500432014465,0.0 30 | 8,0,0.0,0.0,0.0,0.0,-1,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.615780012140274,1.0 31 | 8,8,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.9999724924564362,1.0 32 | 8,9,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.0516650000756979,1.0 33 | 8,13,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.8656249940395355,1.0 34 | 8,14,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.585515022277832,1.0 35 | 8,15,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.8166325092315674,1.0 36 | 8,16,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,-0.35585500432014466,1.0 37 | 9,0,0.0,0.0,0.0,0.0,-1,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.615780012140274,0.0 38 | 9,8,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.9999724924564362,0.0 39 | 9,9,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.0516650000756979,0.0 40 | 9,13,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.8656249940395355,0.0 41 | 9,14,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.585515022277832,0.0 42 | 9,15,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.8166425092315673,0.0 43 | 10,0,0.0,0.0,0.0,0.0,-1,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.615780012140274,0.0 44 | 10,8,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.9999724924564362,0.0 45 | 10,9,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.0516650000756979,0.0 46 | 10,13,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.8656249940395355,0.0 47 | 10,14,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.585525022277832,0.0 48 | 10,20,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.6159949898719788,0.0 49 | 10,21,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.870647519826889,0.0 50 | 11,0,0.0,0.0,0.0,0.0,-1,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.615780012140274,1.0 51 | 11,8,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.9999724924564362,1.0 52 | 11,9,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.0516650000756979,1.0 53 | 11,13,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.8656249940395355,1.0 54 | 11,14,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.585525022277832,1.0 55 | 11,20,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.6159949898719788,1.0 56 | 11,21,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.870657519826889,1.0 57 | 12,0,0.0,0.0,0.0,0.0,-1,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.615780012140274,1.0 58 | 12,8,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.9999724924564362,1.0 59 | 12,9,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.0516650000756979,1.0 60 | 12,13,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.8656249940395355,1.0 61 | 12,14,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.585525022277832,1.0 62 | 12,20,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.6160049898719787,1.0 63 | 13,0,0.0,0.0,0.0,0.0,-1,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.615780012140274,0.0 64 | 13,8,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.9999724924564362,0.0 65 | 13,9,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.0516650000756979,0.0 66 | 13,13,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.8656349940395355,0.0 67 | 13,25,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.1220725029706955,0.0 68 | 14,0,0.0,0.0,0.0,0.0,-1,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.615780012140274,1.0 69 | 14,8,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.9999724924564362,1.0 70 | 14,9,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.0516650000756979,1.0 71 | 14,13,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.8656349940395355,1.0 72 | 14,25,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.12208250297069549,1.0 73 | 14,27,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.4265175014734268,1.0 74 | 14,28,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.422325000166893,1.0 75 | 15,0,0.0,0.0,0.0,0.0,-1,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.615780012140274,0.0 76 | 15,8,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.9999724924564362,0.0 77 | 15,9,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.0516650000756979,0.0 78 | 15,13,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.8656349940395355,0.0 79 | 15,25,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.12208250297069549,0.0 80 | 15,27,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.4265175014734268,0.0 81 | 15,28,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.422335000166893,0.0 82 | 16,0,0.0,0.0,0.0,0.0,-1,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.615780012140274,1.0 83 | 16,8,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.9999724924564362,1.0 84 | 16,9,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.0516650000756979,1.0 85 | 16,13,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.8656349940395355,1.0 86 | 16,25,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.12208250297069549,1.0 87 | 16,27,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.42652750147342683,1.0 88 | 17,0,0.0,0.0,0.0,0.0,-1,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.615780012140274,0.0 89 | 17,8,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.9999824924564361,0.0 90 | 17,32,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.5,0.0 91 | 18,0,0.0,0.0,0.0,0.0,-1,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.615780012140274,0.0 92 | 18,8,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.9999824924564361,0.0 93 | 18,32,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.50001,0.0 94 | 18,34,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.24119749665260315,0.0 95 | 19,0,0.0,0.0,0.0,0.0,-1,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.615780012140274,0.0 96 | 19,8,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.9999824924564361,0.0 97 | 19,32,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.50001,0.0 98 | 19,34,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.24120749665260316,0.0 99 | 19,36,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.8464899957180023,0.0 100 | 20,0,0.0,0.0,0.0,0.0,-1,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.615780012140274,0.0 101 | 20,8,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.9999824924564361,0.0 102 | 20,32,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.50001,0.0 103 | 20,34,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.24120749665260316,0.0 104 | 20,36,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,-0.8464999957180023,0.0 105 | 20,38,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.8607949912548065,0.0 106 | 21,0,0.0,0.0,0.0,0.0,-1,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.615780012140274,0.0 107 | 21,8,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.9999824924564361,0.0 108 | 21,32,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.50001,0.0 109 | 21,34,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.24120749665260316,0.0 110 | 21,36,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,-0.8464999957180023,0.0 111 | 21,38,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.8608049912548065,0.0 112 | 21,40,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.10560499876737595,0.0 113 | 22,0,0.0,0.0,0.0,0.0,-1,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.615780012140274,1.0 114 | 22,8,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.9999824924564361,1.0 115 | 22,32,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.50001,1.0 116 | 22,34,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.24120749665260316,1.0 117 | 22,36,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,-0.8464999957180023,1.0 118 | 22,38,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.8608049912548065,1.0 119 | 22,40,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,-0.10561499876737594,1.0 120 | 22,42,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.931315004825592,1.0 121 | 23,0,0.0,0.0,0.0,0.0,-1,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.615780012140274,0.0 122 | 23,8,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.9999824924564361,0.0 123 | 23,32,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.50001,0.0 124 | 23,34,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.24120749665260316,0.0 125 | 23,36,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,-0.8464999957180023,0.0 126 | 23,38,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.8608049912548065,0.0 127 | 23,40,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,-0.10561499876737594,0.0 128 | 23,42,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-1.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,-0.931325004825592,0.0 129 | -------------------------------------------------------------------------------- /experiments/results_banknote/mlp/model.csv: -------------------------------------------------------------------------------- 1 | intercept,layer,node,node_0,node_1,node_2,node_3,node_4,node_5,node_6,node_7,node_8,node_9,node_10,node_11,node_12,node_13,node_14,node_15,node_16,node_17,node_18,node_19,node_20,node_21,node_22,node_23,node_24,node_25,node_26,node_27,node_28,node_29,node_30,node_31,node_32,node_33,node_34,node_35,node_36,node_37,node_38,node_39,node_40,node_41,node_42,node_43,node_44,node_45,node_46,node_47,node_48,node_49,node_50,node_51,node_52,node_53,node_54,node_55,node_56,node_57,node_58,node_59,node_60,node_61,node_62,node_63,node_64,node_65,node_66,node_67,node_68,node_69,node_70,node_71,node_72,node_73,node_74,node_75,node_76,node_77,node_78,node_79,node_80,node_81,node_82,node_83,node_84,node_85,node_86,node_87,node_88,node_89,node_90,node_91,node_92,node_93,node_94,node_95,node_96,node_97,node_98,node_99 2 | -0.6719085100309372,0,0,0.6032341479946219,0.6520761482485006,0.6467568594485422,0.09932684064073757,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 3 | -0.5022050322050906,0,1,0.7081830676096488,0.4707696034485476,0.8098665031527357,-0.006987795858739653,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4 | -0.19233876610436643,0,2,4.251325808703224e-54,9.146740499774495e-42,-1.7072226066296185e-52,3.687345076804844e-49,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 5 | 0.8669743168703516,0,3,-0.173008321153706,0.09965669916542957,-1.7567187476235262,-0.09469605334555586,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 6 | 0.15169466092680012,0,4,-0.06464835752400083,-0.1468104283257406,-0.2424640351311433,-0.06713348056647271,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 7 | -0.03319333657228412,0,5,0.057559373332014316,0.02475022828339297,-0.18044836127197078,-0.13599797427885832,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8 | -0.08325020357549409,0,6,4.1700554677513957e-57,-3.3249317721711464e-55,1.6110748640033447e-47,-3.6112997554531936e-28,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 9 | -0.128385567516347,0,7,5.685465850119548e-26,1.2728489759819936e-56,3.462795633191769e-58,-2.557952779070356e-22,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 10 | -0.5182762624498938,0,8,0.767203606869362,0.44595262332469277,0.6327850279873842,-0.004136023510179158,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 11 | -0.7452194032888386,0,9,0.6607663359291193,0.861662379200699,1.058660249877005,-0.1996452248994881,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 12 | -0.6757689916954388,0,10,0.6561064917489311,0.5397562344180674,0.8771292066160273,0.08649398181237404,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 13 | -0.5747411986070637,0,11,0.6208651547803337,0.7355480104275138,0.7208338213223215,-0.18212612191464786,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 14 | -0.20749050334494207,0,12,8.090296637059726e-57,-7.847268179385814e-46,6.121620038164975e-49,-6.647451053258721e-24,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 15 | -0.11916369476060266,0,13,1.4200326562340977e-23,-2.175062085114109e-45,-1.160360786300642e-41,-1.387751994365544e-24,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 16 | -0.13394996350299598,0,14,-2.3111082367898898e-23,5.87318598711337e-34,-1.319721438684456e-20,-5.4168457000750806e-48,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 17 | -0.11856311547599137,0,15,-2.057423613005217e-24,-5.0255452385609475e-54,2.4444875429728733e-40,5.0122420503217144e-40,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 18 | -0.17723533404463657,0,16,-1.3847666325974878e-20,7.242815309891723e-27,-2.221684907165658e-47,6.059144705413136e-56,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 19 | -0.23441021625346703,0,17,-9.334980751265932e-32,-3.1615302916097326e-56,6.706550037513005e-54,3.825874112228401e-58,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 20 | -0.2445193946138108,0,18,0.061049081318274336,0.10991524865502576,0.01657855138069304,0.12419457489767048,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 21 | 0.6053413292528779,0,19,-0.22576524924699573,-0.3458218234176501,-0.3424638719448072,0.2119069458732916,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 22 | -0.5642269589233739,0,20,0.9442816911575335,0.7143840317746664,0.890807753089141,-0.29755743222912306,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 23 | -0.4019929128391507,0,21,0.6999685804529174,0.5343978574851309,0.7909039740649403,-0.31390476804188233,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 24 | -0.5084720008465033,0,22,0.508540739306255,0.7165045868468785,0.6690816979803929,-0.20305760759159713,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 25 | -0.16191145651599914,0,23,-3.0976636156794056e-37,6.112140784109308e-51,1.069052587240095e-35,1.0873398879700919e-45,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 26 | 0.4409915854081364,0,24,0.30903272562663936,0.06256256521895127,-1.5869765504780657,-0.0882355865175004,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 27 | -0.667460723195247,0,25,0.706070006427547,0.6581833171685335,0.8888433437490805,-0.1386583703586893,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 28 | 0.9981109170368409,0,26,-0.8308757990880294,-0.6546165479746818,-0.5457347624189044,0.3430948033915479,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 29 | -0.20882110432687212,0,27,1.993779332500481e-22,1.083459543928998e-45,1.0380903352790485e-36,-9.956269875639208e-46,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 30 | -0.4775555721015561,0,28,0.5230732070304455,0.584371567477369,0.8038630054649972,-0.22860882520123368,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 31 | -0.1016500738940703,0,29,-1.8168273232828223e-55,-2.045929301799963e-44,2.430321643145709e-42,-1.0341127107010134e-52,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 32 | -0.1242184769265301,0,30,-9.594325886939027e-42,2.9985630687192027e-53,8.938658936491389e-41,2.0635313014800853e-34,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 33 | -0.5528535435855498,0,31,0.7120450872978331,0.5325908160709789,0.54810978880027,0.024509561529356846,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 34 | -0.12198730011162241,0,32,1.4171022785959545e-58,-1.7151458186524828e-28,-2.67753717220538e-60,-1.6318622104701667e-22,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 35 | -0.5222602269813392,0,33,0.6076264020818103,0.4858115955751237,0.5824813323488901,0.019001617743744353,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 36 | -0.01498603985124325,0,34,-0.24555888402849554,0.008668980044794312,-0.16088156882927845,0.11104308457735171,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 37 | -0.6055154666779119,0,35,0.5685044249726492,0.565325723847306,0.616408414612675,0.042790219460942934,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 38 | 0.740795394300548,0,36,0.1237748021900574,0.062019904810148116,-2.0832047300378647,-0.11592994672182227,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 39 | -0.10229714091260719,0,37,1.4318297422056448e-54,-6.353870119605948e-50,-5.208201405172267e-59,1.1147647791219174e-53,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 40 | 0.6621661358044574,0,38,-0.0983551176026337,-0.38039240742863495,-0.6474182799337246,0.09474681316618302,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 41 | -0.040901436968405064,0,39,4.516815075759578e-47,-1.0202234907373738e-56,2.4700715868116618e-55,9.734198602250799e-45,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 42 | -0.61919047681856,0,40,0.46323701298515124,0.7080245629945,0.8249418079581624,-0.11176710607621755,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 43 | 0.8978211926060602,0,41,-0.6257993739588581,-0.5188035001316806,-0.7717103217624466,0.12234754199533475,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 44 | 0.9647921102935363,0,42,-0.6062229589498355,-0.6498725421334021,-0.7469286053647398,0.14750633347977904,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 45 | -0.21809103344587208,0,43,-1.0387059007798489e-22,5.844915676098342e-26,-3.9967109390640065e-28,-7.860939455690564e-22,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 46 | 0.4176430902985753,0,44,-0.3198712322533694,-0.30778732872390024,-0.40511802232420735,0.23716963358438034,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 47 | -0.5003071262369694,0,45,0.5937138556795757,0.5801508203615059,0.6650590507187498,-0.19049493231540537,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 48 | 0.800970758765855,0,46,-0.6702424586760668,-0.5007754776281297,-0.5319819935814649,0.2258328553254026,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 49 | 0.8707050245237316,0,47,-0.7014481389156575,-0.29957990876766755,-0.7095785935192951,0.08338365937013387,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 50 | 1.0084011529563746,0,48,-0.8192767027952035,-0.5234315943716912,-0.9049708010442716,0.23806355936665216,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 51 | -0.4093715136995253,0,49,0.40576632247559874,0.7421824059694657,0.5682628104985177,0.10418585813646472,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 52 | -0.09773120903552521,0,50,1.4988566336624607e-56,-1.3999925675795184e-30,-4.868439040845436e-51,-9.173069047807299e-52,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 53 | -0.35905017808893436,0,51,0.4515501311339557,0.6660057637129543,0.773108717214691,-0.3439544878317156,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 54 | -0.6214736712828132,0,52,0.8819626190324784,0.5089362732059861,0.8750728920928704,-0.06206452747864255,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 55 | -0.18931655953396834,0,53,-1.7252128595143523e-25,-2.124143073379834e-53,-2.14275957480264e-40,8.923985745508228e-61,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 56 | 0.8877584344289183,0,54,-0.7061866555892989,-0.586138553059868,-0.37044073109103887,0.23352081980366915,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 57 | -0.4765071407451738,0,55,-0.14398561166394191,0.7717948980990176,0.36813807615808325,0.09530344519605671,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 58 | -0.5122787772262761,0,56,0.6052725101719839,0.6682755390229907,0.6600379522321519,-0.211393835757715,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 59 | -0.21215874262858497,0,57,1.1503454772587409e-40,1.2088017719449407e-56,1.29001920569828e-55,-8.48754364723749e-28,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 60 | 0.8170553648229933,0,58,-0.60918080371495,-0.5275059621570284,-0.4078014640375386,0.14692285259994423,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 61 | 0.8035390728173365,0,59,-0.6361268797075891,-0.5801114226463806,-0.5984675453095846,0.397778534483671,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 62 | -0.10931777940956844,0,60,4.159463149714066e-31,-1.1605290970636829e-45,-6.73502992864942e-40,-5.546879897238892e-34,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 63 | -0.05809918783973447,0,61,-3.814792088614946e-26,3.524399822309343e-58,-2.3399868688098802e-25,4.847794014426478e-25,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 64 | -0.09423468971450867,0,62,0.04042523188614919,0.07192552080559535,-0.2575548417377976,-0.010461587791866515,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 65 | 0.7075056039896142,0,63,-0.6621229854060073,-0.2910921088154496,-0.3259240224075556,0.11992827837359742,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 66 | -0.9317199535023477,0,64,0.7025292208876052,0.9230769149487,1.0334253024048103,0.024640738899084125,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 67 | -0.17441036318655623,0,65,-0.077827399538734,0.15112501152030605,0.11362007219484452,-0.03294781509427678,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 68 | -0.04787578717139287,0,66,0.12680906936323552,-0.2413215769128566,-0.11451237466858095,0.08078917202656077,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 69 | -0.09393033881171467,0,67,-4.0384863156145243e-25,-6.135481241303708e-51,2.3473237783663288e-55,-1.9714100776631782e-54,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 70 | 0.7836596604748046,0,68,-0.31685151531792294,-0.3854282856218506,-0.8095948558277057,0.30945945458035834,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 71 | -0.1259847516612273,0,69,-4.781291380871751e-25,3.1475544583136887e-32,1.9889911826176822e-60,-1.0438438801306688e-46,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 72 | 0.4639698323055473,0,70,-0.09918799852020403,-0.3563383398478797,-0.15649199964182559,0.1849786444914923,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 73 | 0.09731102569852289,0,71,-0.021114732538814577,-0.24843756352816398,0.0028846319642335154,-0.19596765048790493,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 74 | -0.46666566828914047,0,72,0.736917714075224,0.402670280410334,0.5159494511414008,0.02368318068952791,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 75 | 0.9937082214773896,0,73,-0.5543013610635228,-0.7583385967393494,-0.7786893030414217,0.20454824163027996,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 76 | 0.8252832032836995,0,74,-0.3645640761580043,-0.35032352660520466,-0.718694798940824,-0.007986969421885797,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 77 | 0.04506554425024546,0,75,-0.2870740611782044,-0.16541200239411954,0.12650697928933782,-0.2317552017572923,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 78 | -0.46043088915725816,0,76,0.304588789340286,0.42840452291360404,0.7712747977193314,0.06525684618494745,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 79 | 0.7918622279275922,0,77,-0.6656114323719102,-0.26415781136560856,-0.37661714350299946,0.3456962834898883,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 80 | 1.020890798597375,0,78,-0.7629272041025342,-0.4503721471850837,-0.9260840053378893,0.12571173136384883,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 81 | 0.4426367748996328,0,79,-1.0369743339534452,-0.3496027009581303,0.3527625873930411,0.28297802584217024,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 82 | -0.20614749048399073,0,80,-4.782516355638962e-47,1.662887561398809e-31,-1.3585467588793613e-44,2.5502663483538995e-29,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 83 | 0.47497033499447766,0,81,-1.046373881519637,-0.37946112470811344,0.2813701146698889,0.2941458181223338,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 84 | -0.17814418000185844,0,82,-0.10253302206528146,0.011657395604163387,0.03420146452664457,0.16443938051146162,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 85 | -0.03963826340303861,0,83,-4.285832377364412e-46,-3.75098072463832e-39,-2.972251045550314e-20,-2.460171155427983e-56,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 86 | -0.21951266076452058,0,84,-0.007294041504804992,0.16626528573501315,-0.10947187867112007,0.11259814011409602,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 87 | -0.5583160852626543,0,85,0.31608157909724927,0.5747734525587284,0.5770383071955182,0.14010212972261127,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 88 | -0.6489540728694579,0,86,0.5329520173624065,0.5624433626410635,0.835287842670032,0.03248369140873782,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 89 | 0.10403632918303199,0,87,-0.23028583919697934,-0.07583736472140894,-0.07441305158123594,-0.21450026297672775,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 90 | -0.31019142091756247,0,88,0.4654306446074524,0.547514586574693,0.47823950274737487,-0.05181265124180998,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 91 | 0.01397637686946879,0,89,-0.4848728943181483,-0.7519348637198853,1.4309336277158866,0.038150338814597784,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 92 | -0.10817125811256587,0,90,-3.995762824241117e-47,-3.0084534918636764e-54,1.0648188324721794e-51,-7.408341693984202e-23,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 93 | 0.7686022276281042,0,91,-0.36501253878914386,-0.27605626906130354,-1.4878842398159033,0.22222633543858006,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 94 | 0.8511485317612582,0,92,-0.7064419358026717,-0.46287501142685555,-0.40567808923941573,0.1166445302951116,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 95 | -0.7737079979717119,0,93,0.8094397315822816,0.8406584858657739,0.7475169623059688,0.04534241388060618,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 96 | -0.6028227436493775,0,94,0.5448901757821221,0.7678343753860041,0.9532469392867665,-0.17833322772008062,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 97 | -0.10969446224918716,0,95,-4.256901371073217e-34,-9.144358588275883e-61,7.748985397035219e-39,1.2253762862886983e-21,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 98 | -0.021403938269347667,0,96,2.3850871146877065e-55,-2.7964117548163977e-38,-3.5607155911577643e-25,-3.9387039318260943e-51,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 99 | 0.5731571222012055,0,97,-1.699697903228378,-0.348002007871409,1.4129528350317557,0.20267279190538842,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 100 | -0.13714120164040017,0,98,0.06349675536795109,-3.273598472034008e-06,0.17547274336715743,-0.1749849943962896,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 101 | 0.635626985486411,0,99,-0.6661713131998426,-0.2547243567594619,-0.20181351514231272,0.11978592769751788,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 102 | 0.2390155161874857,1,0,-1.1255711438195823,-0.7954543171400547,9.801291709543566e-60,1.330078979753542,-0.05848046234159844,0.15095286049777445,3.7429312881695943e-28,-1.0812898642341508e-21,-0.9706505070035959,-1.2395058763823885,-1.218160704254537,-1.1400789594383645,-2.3910947986048443e-41,6.309021594603085e-47,1.5539673757874484e-52,-9.545355326470067e-46,-2.0990808492277396e-35,-4.635240659049605e-49,0.011631039222203177,0.6113643340612825,-0.8288983703877363,-0.9565216579053905,-1.2907755021374432,1.8512702320556408e-45,1.8023148190154854,-1.1602480076083832,0.8226401665944566,2.111524932100384e-23,-1.2232043839066855,1.1290794141791878e-54,-1.9110512369415424e-20,-0.9509725198901943,1.610713982707227e-52,-1.1951642573105414,-0.11699257929395981,-1.2103829481070465,1.875829238220125,-1.1217831673133216e-45,0.4442559295542414,7.950338076455273e-22,-1.4621061682523022,1.2145743547328056,1.1043874112723462,8.133286838468453e-56,0.8645869408792715,-1.2398997097039575,1.0035686786844766,1.091335495224376,1.0106580366321416,-0.5424306440275057,-1.6650842712962496e-38,-1.0531050905160138,-1.0117177129372223,1.7346656433362923e-60,0.88037660226057,-2.043550724311035,-1.0513156929723846,9.257617140247679e-56,1.0445856609154691,1.014608993343149,1.2232542604431306e-52,-1.0191310245466115e-27,0.2069390270249271,1.0040283598396966,-1.2579131292131707,0.15397806519161114,-0.0015496239585488405,2.5119993376123871e-23,0.7598116077261867,8.270449894475775e-21,0.46379350237027384,-0.137619490954705,-0.9508185858116062,1.0517000964914687,1.0163885046948815,-0.02797398288903704,-0.7487944664490532,0.8353387496460564,0.9798730719840129,1.2743670855577993,4.877570205782587e-31,1.3784518391342493,-0.00703205625129263,-2.58848831902834e-23,0.06446028966961923,-0.9511187045457151,-1.3243078466040181,-0.12809336515005312,-0.7649205229069833,1.742265970410457,-2.006687232670559e-41,1.6061779446803945,0.9553711283277919,-0.9243826806267851,-1.8090201196531233,2.0274426560236124e-19,-5.855391259606669e-51,2.316892393614669,-0.014823364626942358,1.1720668019637162 103 | -------------------------------------------------------------------------------- /data/diabetes.csv: -------------------------------------------------------------------------------- 1 | Pregnancies,Glucose,BloodPressure,SkinThickness,Insulin,BMI,DiabetesPedigreeFunction,Age,Outcome 2 | 6,148,72,35,0,33.6,0.627,50,1 3 | 1,85,66,29,0,26.6,0.351,31,0 4 | 8,183,64,0,0,23.3,0.672,32,1 5 | 1,89,66,23,94,28.1,0.167,21,0 6 | 0,137,40,35,168,43.1,2.288,33,1 7 | 5,116,74,0,0,25.6,0.201,30,0 8 | 3,78,50,32,88,31,0.248,26,1 9 | 10,115,0,0,0,35.3,0.134,29,0 10 | 2,197,70,45,543,30.5,0.158,53,1 11 | 8,125,96,0,0,0,0.232,54,1 12 | 4,110,92,0,0,37.6,0.191,30,0 13 | 10,168,74,0,0,38,0.537,34,1 14 | 10,139,80,0,0,27.1,1.441,57,0 15 | 1,189,60,23,846,30.1,0.398,59,1 16 | 5,166,72,19,175,25.8,0.587,51,1 17 | 7,100,0,0,0,30,0.484,32,1 18 | 0,118,84,47,230,45.8,0.551,31,1 19 | 7,107,74,0,0,29.6,0.254,31,1 20 | 1,103,30,38,83,43.3,0.183,33,0 21 | 1,115,70,30,96,34.6,0.529,32,1 22 | 3,126,88,41,235,39.3,0.704,27,0 23 | 8,99,84,0,0,35.4,0.388,50,0 24 | 7,196,90,0,0,39.8,0.451,41,1 25 | 9,119,80,35,0,29,0.263,29,1 26 | 11,143,94,33,146,36.6,0.254,51,1 27 | 10,125,70,26,115,31.1,0.205,41,1 28 | 7,147,76,0,0,39.4,0.257,43,1 29 | 1,97,66,15,140,23.2,0.487,22,0 30 | 13,145,82,19,110,22.2,0.245,57,0 31 | 5,117,92,0,0,34.1,0.337,38,0 32 | 5,109,75,26,0,36,0.546,60,0 33 | 3,158,76,36,245,31.6,0.851,28,1 34 | 3,88,58,11,54,24.8,0.267,22,0 35 | 6,92,92,0,0,19.9,0.188,28,0 36 | 10,122,78,31,0,27.6,0.512,45,0 37 | 4,103,60,33,192,24,0.966,33,0 38 | 11,138,76,0,0,33.2,0.42,35,0 39 | 9,102,76,37,0,32.9,0.665,46,1 40 | 2,90,68,42,0,38.2,0.503,27,1 41 | 4,111,72,47,207,37.1,1.39,56,1 42 | 3,180,64,25,70,34,0.271,26,0 43 | 7,133,84,0,0,40.2,0.696,37,0 44 | 7,106,92,18,0,22.7,0.235,48,0 45 | 9,171,110,24,240,45.4,0.721,54,1 46 | 7,159,64,0,0,27.4,0.294,40,0 47 | 0,180,66,39,0,42,1.893,25,1 48 | 1,146,56,0,0,29.7,0.564,29,0 49 | 2,71,70,27,0,28,0.586,22,0 50 | 7,103,66,32,0,39.1,0.344,31,1 51 | 7,105,0,0,0,0,0.305,24,0 52 | 1,103,80,11,82,19.4,0.491,22,0 53 | 1,101,50,15,36,24.2,0.526,26,0 54 | 5,88,66,21,23,24.4,0.342,30,0 55 | 8,176,90,34,300,33.7,0.467,58,1 56 | 7,150,66,42,342,34.7,0.718,42,0 57 | 1,73,50,10,0,23,0.248,21,0 58 | 7,187,68,39,304,37.7,0.254,41,1 59 | 0,100,88,60,110,46.8,0.962,31,0 60 | 0,146,82,0,0,40.5,1.781,44,0 61 | 0,105,64,41,142,41.5,0.173,22,0 62 | 2,84,0,0,0,0,0.304,21,0 63 | 8,133,72,0,0,32.9,0.27,39,1 64 | 5,44,62,0,0,25,0.587,36,0 65 | 2,141,58,34,128,25.4,0.699,24,0 66 | 7,114,66,0,0,32.8,0.258,42,1 67 | 5,99,74,27,0,29,0.203,32,0 68 | 0,109,88,30,0,32.5,0.855,38,1 69 | 2,109,92,0,0,42.7,0.845,54,0 70 | 1,95,66,13,38,19.6,0.334,25,0 71 | 4,146,85,27,100,28.9,0.189,27,0 72 | 2,100,66,20,90,32.9,0.867,28,1 73 | 5,139,64,35,140,28.6,0.411,26,0 74 | 13,126,90,0,0,43.4,0.583,42,1 75 | 4,129,86,20,270,35.1,0.231,23,0 76 | 1,79,75,30,0,32,0.396,22,0 77 | 1,0,48,20,0,24.7,0.14,22,0 78 | 7,62,78,0,0,32.6,0.391,41,0 79 | 5,95,72,33,0,37.7,0.37,27,0 80 | 0,131,0,0,0,43.2,0.27,26,1 81 | 2,112,66,22,0,25,0.307,24,0 82 | 3,113,44,13,0,22.4,0.14,22,0 83 | 2,74,0,0,0,0,0.102,22,0 84 | 7,83,78,26,71,29.3,0.767,36,0 85 | 0,101,65,28,0,24.6,0.237,22,0 86 | 5,137,108,0,0,48.8,0.227,37,1 87 | 2,110,74,29,125,32.4,0.698,27,0 88 | 13,106,72,54,0,36.6,0.178,45,0 89 | 2,100,68,25,71,38.5,0.324,26,0 90 | 15,136,70,32,110,37.1,0.153,43,1 91 | 1,107,68,19,0,26.5,0.165,24,0 92 | 1,80,55,0,0,19.1,0.258,21,0 93 | 4,123,80,15,176,32,0.443,34,0 94 | 7,81,78,40,48,46.7,0.261,42,0 95 | 4,134,72,0,0,23.8,0.277,60,1 96 | 2,142,82,18,64,24.7,0.761,21,0 97 | 6,144,72,27,228,33.9,0.255,40,0 98 | 2,92,62,28,0,31.6,0.13,24,0 99 | 1,71,48,18,76,20.4,0.323,22,0 100 | 6,93,50,30,64,28.7,0.356,23,0 101 | 1,122,90,51,220,49.7,0.325,31,1 102 | 1,163,72,0,0,39,1.222,33,1 103 | 1,151,60,0,0,26.1,0.179,22,0 104 | 0,125,96,0,0,22.5,0.262,21,0 105 | 1,81,72,18,40,26.6,0.283,24,0 106 | 2,85,65,0,0,39.6,0.93,27,0 107 | 1,126,56,29,152,28.7,0.801,21,0 108 | 1,96,122,0,0,22.4,0.207,27,0 109 | 4,144,58,28,140,29.5,0.287,37,0 110 | 3,83,58,31,18,34.3,0.336,25,0 111 | 0,95,85,25,36,37.4,0.247,24,1 112 | 3,171,72,33,135,33.3,0.199,24,1 113 | 8,155,62,26,495,34,0.543,46,1 114 | 1,89,76,34,37,31.2,0.192,23,0 115 | 4,76,62,0,0,34,0.391,25,0 116 | 7,160,54,32,175,30.5,0.588,39,1 117 | 4,146,92,0,0,31.2,0.539,61,1 118 | 5,124,74,0,0,34,0.22,38,1 119 | 5,78,48,0,0,33.7,0.654,25,0 120 | 4,97,60,23,0,28.2,0.443,22,0 121 | 4,99,76,15,51,23.2,0.223,21,0 122 | 0,162,76,56,100,53.2,0.759,25,1 123 | 6,111,64,39,0,34.2,0.26,24,0 124 | 2,107,74,30,100,33.6,0.404,23,0 125 | 5,132,80,0,0,26.8,0.186,69,0 126 | 0,113,76,0,0,33.3,0.278,23,1 127 | 1,88,30,42,99,55,0.496,26,1 128 | 3,120,70,30,135,42.9,0.452,30,0 129 | 1,118,58,36,94,33.3,0.261,23,0 130 | 1,117,88,24,145,34.5,0.403,40,1 131 | 0,105,84,0,0,27.9,0.741,62,1 132 | 4,173,70,14,168,29.7,0.361,33,1 133 | 9,122,56,0,0,33.3,1.114,33,1 134 | 3,170,64,37,225,34.5,0.356,30,1 135 | 8,84,74,31,0,38.3,0.457,39,0 136 | 2,96,68,13,49,21.1,0.647,26,0 137 | 2,125,60,20,140,33.8,0.088,31,0 138 | 0,100,70,26,50,30.8,0.597,21,0 139 | 0,93,60,25,92,28.7,0.532,22,0 140 | 0,129,80,0,0,31.2,0.703,29,0 141 | 5,105,72,29,325,36.9,0.159,28,0 142 | 3,128,78,0,0,21.1,0.268,55,0 143 | 5,106,82,30,0,39.5,0.286,38,0 144 | 2,108,52,26,63,32.5,0.318,22,0 145 | 10,108,66,0,0,32.4,0.272,42,1 146 | 4,154,62,31,284,32.8,0.237,23,0 147 | 0,102,75,23,0,0,0.572,21,0 148 | 9,57,80,37,0,32.8,0.096,41,0 149 | 2,106,64,35,119,30.5,1.4,34,0 150 | 5,147,78,0,0,33.7,0.218,65,0 151 | 2,90,70,17,0,27.3,0.085,22,0 152 | 1,136,74,50,204,37.4,0.399,24,0 153 | 4,114,65,0,0,21.9,0.432,37,0 154 | 9,156,86,28,155,34.3,1.189,42,1 155 | 1,153,82,42,485,40.6,0.687,23,0 156 | 8,188,78,0,0,47.9,0.137,43,1 157 | 7,152,88,44,0,50,0.337,36,1 158 | 2,99,52,15,94,24.6,0.637,21,0 159 | 1,109,56,21,135,25.2,0.833,23,0 160 | 2,88,74,19,53,29,0.229,22,0 161 | 17,163,72,41,114,40.9,0.817,47,1 162 | 4,151,90,38,0,29.7,0.294,36,0 163 | 7,102,74,40,105,37.2,0.204,45,0 164 | 0,114,80,34,285,44.2,0.167,27,0 165 | 2,100,64,23,0,29.7,0.368,21,0 166 | 0,131,88,0,0,31.6,0.743,32,1 167 | 6,104,74,18,156,29.9,0.722,41,1 168 | 3,148,66,25,0,32.5,0.256,22,0 169 | 4,120,68,0,0,29.6,0.709,34,0 170 | 4,110,66,0,0,31.9,0.471,29,0 171 | 3,111,90,12,78,28.4,0.495,29,0 172 | 6,102,82,0,0,30.8,0.18,36,1 173 | 6,134,70,23,130,35.4,0.542,29,1 174 | 2,87,0,23,0,28.9,0.773,25,0 175 | 1,79,60,42,48,43.5,0.678,23,0 176 | 2,75,64,24,55,29.7,0.37,33,0 177 | 8,179,72,42,130,32.7,0.719,36,1 178 | 6,85,78,0,0,31.2,0.382,42,0 179 | 0,129,110,46,130,67.1,0.319,26,1 180 | 5,143,78,0,0,45,0.19,47,0 181 | 5,130,82,0,0,39.1,0.956,37,1 182 | 6,87,80,0,0,23.2,0.084,32,0 183 | 0,119,64,18,92,34.9,0.725,23,0 184 | 1,0,74,20,23,27.7,0.299,21,0 185 | 5,73,60,0,0,26.8,0.268,27,0 186 | 4,141,74,0,0,27.6,0.244,40,0 187 | 7,194,68,28,0,35.9,0.745,41,1 188 | 8,181,68,36,495,30.1,0.615,60,1 189 | 1,128,98,41,58,32,1.321,33,1 190 | 8,109,76,39,114,27.9,0.64,31,1 191 | 5,139,80,35,160,31.6,0.361,25,1 192 | 3,111,62,0,0,22.6,0.142,21,0 193 | 9,123,70,44,94,33.1,0.374,40,0 194 | 7,159,66,0,0,30.4,0.383,36,1 195 | 11,135,0,0,0,52.3,0.578,40,1 196 | 8,85,55,20,0,24.4,0.136,42,0 197 | 5,158,84,41,210,39.4,0.395,29,1 198 | 1,105,58,0,0,24.3,0.187,21,0 199 | 3,107,62,13,48,22.9,0.678,23,1 200 | 4,109,64,44,99,34.8,0.905,26,1 201 | 4,148,60,27,318,30.9,0.15,29,1 202 | 0,113,80,16,0,31,0.874,21,0 203 | 1,138,82,0,0,40.1,0.236,28,0 204 | 0,108,68,20,0,27.3,0.787,32,0 205 | 2,99,70,16,44,20.4,0.235,27,0 206 | 6,103,72,32,190,37.7,0.324,55,0 207 | 5,111,72,28,0,23.9,0.407,27,0 208 | 8,196,76,29,280,37.5,0.605,57,1 209 | 5,162,104,0,0,37.7,0.151,52,1 210 | 1,96,64,27,87,33.2,0.289,21,0 211 | 7,184,84,33,0,35.5,0.355,41,1 212 | 2,81,60,22,0,27.7,0.29,25,0 213 | 0,147,85,54,0,42.8,0.375,24,0 214 | 7,179,95,31,0,34.2,0.164,60,0 215 | 0,140,65,26,130,42.6,0.431,24,1 216 | 9,112,82,32,175,34.2,0.26,36,1 217 | 12,151,70,40,271,41.8,0.742,38,1 218 | 5,109,62,41,129,35.8,0.514,25,1 219 | 6,125,68,30,120,30,0.464,32,0 220 | 5,85,74,22,0,29,1.224,32,1 221 | 5,112,66,0,0,37.8,0.261,41,1 222 | 0,177,60,29,478,34.6,1.072,21,1 223 | 2,158,90,0,0,31.6,0.805,66,1 224 | 7,119,0,0,0,25.2,0.209,37,0 225 | 7,142,60,33,190,28.8,0.687,61,0 226 | 1,100,66,15,56,23.6,0.666,26,0 227 | 1,87,78,27,32,34.6,0.101,22,0 228 | 0,101,76,0,0,35.7,0.198,26,0 229 | 3,162,52,38,0,37.2,0.652,24,1 230 | 4,197,70,39,744,36.7,2.329,31,0 231 | 0,117,80,31,53,45.2,0.089,24,0 232 | 4,142,86,0,0,44,0.645,22,1 233 | 6,134,80,37,370,46.2,0.238,46,1 234 | 1,79,80,25,37,25.4,0.583,22,0 235 | 4,122,68,0,0,35,0.394,29,0 236 | 3,74,68,28,45,29.7,0.293,23,0 237 | 4,171,72,0,0,43.6,0.479,26,1 238 | 7,181,84,21,192,35.9,0.586,51,1 239 | 0,179,90,27,0,44.1,0.686,23,1 240 | 9,164,84,21,0,30.8,0.831,32,1 241 | 0,104,76,0,0,18.4,0.582,27,0 242 | 1,91,64,24,0,29.2,0.192,21,0 243 | 4,91,70,32,88,33.1,0.446,22,0 244 | 3,139,54,0,0,25.6,0.402,22,1 245 | 6,119,50,22,176,27.1,1.318,33,1 246 | 2,146,76,35,194,38.2,0.329,29,0 247 | 9,184,85,15,0,30,1.213,49,1 248 | 10,122,68,0,0,31.2,0.258,41,0 249 | 0,165,90,33,680,52.3,0.427,23,0 250 | 9,124,70,33,402,35.4,0.282,34,0 251 | 1,111,86,19,0,30.1,0.143,23,0 252 | 9,106,52,0,0,31.2,0.38,42,0 253 | 2,129,84,0,0,28,0.284,27,0 254 | 2,90,80,14,55,24.4,0.249,24,0 255 | 0,86,68,32,0,35.8,0.238,25,0 256 | 12,92,62,7,258,27.6,0.926,44,1 257 | 1,113,64,35,0,33.6,0.543,21,1 258 | 3,111,56,39,0,30.1,0.557,30,0 259 | 2,114,68,22,0,28.7,0.092,25,0 260 | 1,193,50,16,375,25.9,0.655,24,0 261 | 11,155,76,28,150,33.3,1.353,51,1 262 | 3,191,68,15,130,30.9,0.299,34,0 263 | 3,141,0,0,0,30,0.761,27,1 264 | 4,95,70,32,0,32.1,0.612,24,0 265 | 3,142,80,15,0,32.4,0.2,63,0 266 | 4,123,62,0,0,32,0.226,35,1 267 | 5,96,74,18,67,33.6,0.997,43,0 268 | 0,138,0,0,0,36.3,0.933,25,1 269 | 2,128,64,42,0,40,1.101,24,0 270 | 0,102,52,0,0,25.1,0.078,21,0 271 | 2,146,0,0,0,27.5,0.24,28,1 272 | 10,101,86,37,0,45.6,1.136,38,1 273 | 2,108,62,32,56,25.2,0.128,21,0 274 | 3,122,78,0,0,23,0.254,40,0 275 | 1,71,78,50,45,33.2,0.422,21,0 276 | 13,106,70,0,0,34.2,0.251,52,0 277 | 2,100,70,52,57,40.5,0.677,25,0 278 | 7,106,60,24,0,26.5,0.296,29,1 279 | 0,104,64,23,116,27.8,0.454,23,0 280 | 5,114,74,0,0,24.9,0.744,57,0 281 | 2,108,62,10,278,25.3,0.881,22,0 282 | 0,146,70,0,0,37.9,0.334,28,1 283 | 10,129,76,28,122,35.9,0.28,39,0 284 | 7,133,88,15,155,32.4,0.262,37,0 285 | 7,161,86,0,0,30.4,0.165,47,1 286 | 2,108,80,0,0,27,0.259,52,1 287 | 7,136,74,26,135,26,0.647,51,0 288 | 5,155,84,44,545,38.7,0.619,34,0 289 | 1,119,86,39,220,45.6,0.808,29,1 290 | 4,96,56,17,49,20.8,0.34,26,0 291 | 5,108,72,43,75,36.1,0.263,33,0 292 | 0,78,88,29,40,36.9,0.434,21,0 293 | 0,107,62,30,74,36.6,0.757,25,1 294 | 2,128,78,37,182,43.3,1.224,31,1 295 | 1,128,48,45,194,40.5,0.613,24,1 296 | 0,161,50,0,0,21.9,0.254,65,0 297 | 6,151,62,31,120,35.5,0.692,28,0 298 | 2,146,70,38,360,28,0.337,29,1 299 | 0,126,84,29,215,30.7,0.52,24,0 300 | 14,100,78,25,184,36.6,0.412,46,1 301 | 8,112,72,0,0,23.6,0.84,58,0 302 | 0,167,0,0,0,32.3,0.839,30,1 303 | 2,144,58,33,135,31.6,0.422,25,1 304 | 5,77,82,41,42,35.8,0.156,35,0 305 | 5,115,98,0,0,52.9,0.209,28,1 306 | 3,150,76,0,0,21,0.207,37,0 307 | 2,120,76,37,105,39.7,0.215,29,0 308 | 10,161,68,23,132,25.5,0.326,47,1 309 | 0,137,68,14,148,24.8,0.143,21,0 310 | 0,128,68,19,180,30.5,1.391,25,1 311 | 2,124,68,28,205,32.9,0.875,30,1 312 | 6,80,66,30,0,26.2,0.313,41,0 313 | 0,106,70,37,148,39.4,0.605,22,0 314 | 2,155,74,17,96,26.6,0.433,27,1 315 | 3,113,50,10,85,29.5,0.626,25,0 316 | 7,109,80,31,0,35.9,1.127,43,1 317 | 2,112,68,22,94,34.1,0.315,26,0 318 | 3,99,80,11,64,19.3,0.284,30,0 319 | 3,182,74,0,0,30.5,0.345,29,1 320 | 3,115,66,39,140,38.1,0.15,28,0 321 | 6,194,78,0,0,23.5,0.129,59,1 322 | 4,129,60,12,231,27.5,0.527,31,0 323 | 3,112,74,30,0,31.6,0.197,25,1 324 | 0,124,70,20,0,27.4,0.254,36,1 325 | 13,152,90,33,29,26.8,0.731,43,1 326 | 2,112,75,32,0,35.7,0.148,21,0 327 | 1,157,72,21,168,25.6,0.123,24,0 328 | 1,122,64,32,156,35.1,0.692,30,1 329 | 10,179,70,0,0,35.1,0.2,37,0 330 | 2,102,86,36,120,45.5,0.127,23,1 331 | 6,105,70,32,68,30.8,0.122,37,0 332 | 8,118,72,19,0,23.1,1.476,46,0 333 | 2,87,58,16,52,32.7,0.166,25,0 334 | 1,180,0,0,0,43.3,0.282,41,1 335 | 12,106,80,0,0,23.6,0.137,44,0 336 | 1,95,60,18,58,23.9,0.26,22,0 337 | 0,165,76,43,255,47.9,0.259,26,0 338 | 0,117,0,0,0,33.8,0.932,44,0 339 | 5,115,76,0,0,31.2,0.343,44,1 340 | 9,152,78,34,171,34.2,0.893,33,1 341 | 7,178,84,0,0,39.9,0.331,41,1 342 | 1,130,70,13,105,25.9,0.472,22,0 343 | 1,95,74,21,73,25.9,0.673,36,0 344 | 1,0,68,35,0,32,0.389,22,0 345 | 5,122,86,0,0,34.7,0.29,33,0 346 | 8,95,72,0,0,36.8,0.485,57,0 347 | 8,126,88,36,108,38.5,0.349,49,0 348 | 1,139,46,19,83,28.7,0.654,22,0 349 | 3,116,0,0,0,23.5,0.187,23,0 350 | 3,99,62,19,74,21.8,0.279,26,0 351 | 5,0,80,32,0,41,0.346,37,1 352 | 4,92,80,0,0,42.2,0.237,29,0 353 | 4,137,84,0,0,31.2,0.252,30,0 354 | 3,61,82,28,0,34.4,0.243,46,0 355 | 1,90,62,12,43,27.2,0.58,24,0 356 | 3,90,78,0,0,42.7,0.559,21,0 357 | 9,165,88,0,0,30.4,0.302,49,1 358 | 1,125,50,40,167,33.3,0.962,28,1 359 | 13,129,0,30,0,39.9,0.569,44,1 360 | 12,88,74,40,54,35.3,0.378,48,0 361 | 1,196,76,36,249,36.5,0.875,29,1 362 | 5,189,64,33,325,31.2,0.583,29,1 363 | 5,158,70,0,0,29.8,0.207,63,0 364 | 5,103,108,37,0,39.2,0.305,65,0 365 | 4,146,78,0,0,38.5,0.52,67,1 366 | 4,147,74,25,293,34.9,0.385,30,0 367 | 5,99,54,28,83,34,0.499,30,0 368 | 6,124,72,0,0,27.6,0.368,29,1 369 | 0,101,64,17,0,21,0.252,21,0 370 | 3,81,86,16,66,27.5,0.306,22,0 371 | 1,133,102,28,140,32.8,0.234,45,1 372 | 3,173,82,48,465,38.4,2.137,25,1 373 | 0,118,64,23,89,0,1.731,21,0 374 | 0,84,64,22,66,35.8,0.545,21,0 375 | 2,105,58,40,94,34.9,0.225,25,0 376 | 2,122,52,43,158,36.2,0.816,28,0 377 | 12,140,82,43,325,39.2,0.528,58,1 378 | 0,98,82,15,84,25.2,0.299,22,0 379 | 1,87,60,37,75,37.2,0.509,22,0 380 | 4,156,75,0,0,48.3,0.238,32,1 381 | 0,93,100,39,72,43.4,1.021,35,0 382 | 1,107,72,30,82,30.8,0.821,24,0 383 | 0,105,68,22,0,20,0.236,22,0 384 | 1,109,60,8,182,25.4,0.947,21,0 385 | 1,90,62,18,59,25.1,1.268,25,0 386 | 1,125,70,24,110,24.3,0.221,25,0 387 | 1,119,54,13,50,22.3,0.205,24,0 388 | 5,116,74,29,0,32.3,0.66,35,1 389 | 8,105,100,36,0,43.3,0.239,45,1 390 | 5,144,82,26,285,32,0.452,58,1 391 | 3,100,68,23,81,31.6,0.949,28,0 392 | 1,100,66,29,196,32,0.444,42,0 393 | 5,166,76,0,0,45.7,0.34,27,1 394 | 1,131,64,14,415,23.7,0.389,21,0 395 | 4,116,72,12,87,22.1,0.463,37,0 396 | 4,158,78,0,0,32.9,0.803,31,1 397 | 2,127,58,24,275,27.7,1.6,25,0 398 | 3,96,56,34,115,24.7,0.944,39,0 399 | 0,131,66,40,0,34.3,0.196,22,1 400 | 3,82,70,0,0,21.1,0.389,25,0 401 | 3,193,70,31,0,34.9,0.241,25,1 402 | 4,95,64,0,0,32,0.161,31,1 403 | 6,137,61,0,0,24.2,0.151,55,0 404 | 5,136,84,41,88,35,0.286,35,1 405 | 9,72,78,25,0,31.6,0.28,38,0 406 | 5,168,64,0,0,32.9,0.135,41,1 407 | 2,123,48,32,165,42.1,0.52,26,0 408 | 4,115,72,0,0,28.9,0.376,46,1 409 | 0,101,62,0,0,21.9,0.336,25,0 410 | 8,197,74,0,0,25.9,1.191,39,1 411 | 1,172,68,49,579,42.4,0.702,28,1 412 | 6,102,90,39,0,35.7,0.674,28,0 413 | 1,112,72,30,176,34.4,0.528,25,0 414 | 1,143,84,23,310,42.4,1.076,22,0 415 | 1,143,74,22,61,26.2,0.256,21,0 416 | 0,138,60,35,167,34.6,0.534,21,1 417 | 3,173,84,33,474,35.7,0.258,22,1 418 | 1,97,68,21,0,27.2,1.095,22,0 419 | 4,144,82,32,0,38.5,0.554,37,1 420 | 1,83,68,0,0,18.2,0.624,27,0 421 | 3,129,64,29,115,26.4,0.219,28,1 422 | 1,119,88,41,170,45.3,0.507,26,0 423 | 2,94,68,18,76,26,0.561,21,0 424 | 0,102,64,46,78,40.6,0.496,21,0 425 | 2,115,64,22,0,30.8,0.421,21,0 426 | 8,151,78,32,210,42.9,0.516,36,1 427 | 4,184,78,39,277,37,0.264,31,1 428 | 0,94,0,0,0,0,0.256,25,0 429 | 1,181,64,30,180,34.1,0.328,38,1 430 | 0,135,94,46,145,40.6,0.284,26,0 431 | 1,95,82,25,180,35,0.233,43,1 432 | 2,99,0,0,0,22.2,0.108,23,0 433 | 3,89,74,16,85,30.4,0.551,38,0 434 | 1,80,74,11,60,30,0.527,22,0 435 | 2,139,75,0,0,25.6,0.167,29,0 436 | 1,90,68,8,0,24.5,1.138,36,0 437 | 0,141,0,0,0,42.4,0.205,29,1 438 | 12,140,85,33,0,37.4,0.244,41,0 439 | 5,147,75,0,0,29.9,0.434,28,0 440 | 1,97,70,15,0,18.2,0.147,21,0 441 | 6,107,88,0,0,36.8,0.727,31,0 442 | 0,189,104,25,0,34.3,0.435,41,1 443 | 2,83,66,23,50,32.2,0.497,22,0 444 | 4,117,64,27,120,33.2,0.23,24,0 445 | 8,108,70,0,0,30.5,0.955,33,1 446 | 4,117,62,12,0,29.7,0.38,30,1 447 | 0,180,78,63,14,59.4,2.42,25,1 448 | 1,100,72,12,70,25.3,0.658,28,0 449 | 0,95,80,45,92,36.5,0.33,26,0 450 | 0,104,64,37,64,33.6,0.51,22,1 451 | 0,120,74,18,63,30.5,0.285,26,0 452 | 1,82,64,13,95,21.2,0.415,23,0 453 | 2,134,70,0,0,28.9,0.542,23,1 454 | 0,91,68,32,210,39.9,0.381,25,0 455 | 2,119,0,0,0,19.6,0.832,72,0 456 | 2,100,54,28,105,37.8,0.498,24,0 457 | 14,175,62,30,0,33.6,0.212,38,1 458 | 1,135,54,0,0,26.7,0.687,62,0 459 | 5,86,68,28,71,30.2,0.364,24,0 460 | 10,148,84,48,237,37.6,1.001,51,1 461 | 9,134,74,33,60,25.9,0.46,81,0 462 | 9,120,72,22,56,20.8,0.733,48,0 463 | 1,71,62,0,0,21.8,0.416,26,0 464 | 8,74,70,40,49,35.3,0.705,39,0 465 | 5,88,78,30,0,27.6,0.258,37,0 466 | 10,115,98,0,0,24,1.022,34,0 467 | 0,124,56,13,105,21.8,0.452,21,0 468 | 0,74,52,10,36,27.8,0.269,22,0 469 | 0,97,64,36,100,36.8,0.6,25,0 470 | 8,120,0,0,0,30,0.183,38,1 471 | 6,154,78,41,140,46.1,0.571,27,0 472 | 1,144,82,40,0,41.3,0.607,28,0 473 | 0,137,70,38,0,33.2,0.17,22,0 474 | 0,119,66,27,0,38.8,0.259,22,0 475 | 7,136,90,0,0,29.9,0.21,50,0 476 | 4,114,64,0,0,28.9,0.126,24,0 477 | 0,137,84,27,0,27.3,0.231,59,0 478 | 2,105,80,45,191,33.7,0.711,29,1 479 | 7,114,76,17,110,23.8,0.466,31,0 480 | 8,126,74,38,75,25.9,0.162,39,0 481 | 4,132,86,31,0,28,0.419,63,0 482 | 3,158,70,30,328,35.5,0.344,35,1 483 | 0,123,88,37,0,35.2,0.197,29,0 484 | 4,85,58,22,49,27.8,0.306,28,0 485 | 0,84,82,31,125,38.2,0.233,23,0 486 | 0,145,0,0,0,44.2,0.63,31,1 487 | 0,135,68,42,250,42.3,0.365,24,1 488 | 1,139,62,41,480,40.7,0.536,21,0 489 | 0,173,78,32,265,46.5,1.159,58,0 490 | 4,99,72,17,0,25.6,0.294,28,0 491 | 8,194,80,0,0,26.1,0.551,67,0 492 | 2,83,65,28,66,36.8,0.629,24,0 493 | 2,89,90,30,0,33.5,0.292,42,0 494 | 4,99,68,38,0,32.8,0.145,33,0 495 | 4,125,70,18,122,28.9,1.144,45,1 496 | 3,80,0,0,0,0,0.174,22,0 497 | 6,166,74,0,0,26.6,0.304,66,0 498 | 5,110,68,0,0,26,0.292,30,0 499 | 2,81,72,15,76,30.1,0.547,25,0 500 | 7,195,70,33,145,25.1,0.163,55,1 501 | 6,154,74,32,193,29.3,0.839,39,0 502 | 2,117,90,19,71,25.2,0.313,21,0 503 | 3,84,72,32,0,37.2,0.267,28,0 504 | 6,0,68,41,0,39,0.727,41,1 505 | 7,94,64,25,79,33.3,0.738,41,0 506 | 3,96,78,39,0,37.3,0.238,40,0 507 | 10,75,82,0,0,33.3,0.263,38,0 508 | 0,180,90,26,90,36.5,0.314,35,1 509 | 1,130,60,23,170,28.6,0.692,21,0 510 | 2,84,50,23,76,30.4,0.968,21,0 511 | 8,120,78,0,0,25,0.409,64,0 512 | 12,84,72,31,0,29.7,0.297,46,1 513 | 0,139,62,17,210,22.1,0.207,21,0 514 | 9,91,68,0,0,24.2,0.2,58,0 515 | 2,91,62,0,0,27.3,0.525,22,0 516 | 3,99,54,19,86,25.6,0.154,24,0 517 | 3,163,70,18,105,31.6,0.268,28,1 518 | 9,145,88,34,165,30.3,0.771,53,1 519 | 7,125,86,0,0,37.6,0.304,51,0 520 | 13,76,60,0,0,32.8,0.18,41,0 521 | 6,129,90,7,326,19.6,0.582,60,0 522 | 2,68,70,32,66,25,0.187,25,0 523 | 3,124,80,33,130,33.2,0.305,26,0 524 | 6,114,0,0,0,0,0.189,26,0 525 | 9,130,70,0,0,34.2,0.652,45,1 526 | 3,125,58,0,0,31.6,0.151,24,0 527 | 3,87,60,18,0,21.8,0.444,21,0 528 | 1,97,64,19,82,18.2,0.299,21,0 529 | 3,116,74,15,105,26.3,0.107,24,0 530 | 0,117,66,31,188,30.8,0.493,22,0 531 | 0,111,65,0,0,24.6,0.66,31,0 532 | 2,122,60,18,106,29.8,0.717,22,0 533 | 0,107,76,0,0,45.3,0.686,24,0 534 | 1,86,66,52,65,41.3,0.917,29,0 535 | 6,91,0,0,0,29.8,0.501,31,0 536 | 1,77,56,30,56,33.3,1.251,24,0 537 | 4,132,0,0,0,32.9,0.302,23,1 538 | 0,105,90,0,0,29.6,0.197,46,0 539 | 0,57,60,0,0,21.7,0.735,67,0 540 | 0,127,80,37,210,36.3,0.804,23,0 541 | 3,129,92,49,155,36.4,0.968,32,1 542 | 8,100,74,40,215,39.4,0.661,43,1 543 | 3,128,72,25,190,32.4,0.549,27,1 544 | 10,90,85,32,0,34.9,0.825,56,1 545 | 4,84,90,23,56,39.5,0.159,25,0 546 | 1,88,78,29,76,32,0.365,29,0 547 | 8,186,90,35,225,34.5,0.423,37,1 548 | 5,187,76,27,207,43.6,1.034,53,1 549 | 4,131,68,21,166,33.1,0.16,28,0 550 | 1,164,82,43,67,32.8,0.341,50,0 551 | 4,189,110,31,0,28.5,0.68,37,0 552 | 1,116,70,28,0,27.4,0.204,21,0 553 | 3,84,68,30,106,31.9,0.591,25,0 554 | 6,114,88,0,0,27.8,0.247,66,0 555 | 1,88,62,24,44,29.9,0.422,23,0 556 | 1,84,64,23,115,36.9,0.471,28,0 557 | 7,124,70,33,215,25.5,0.161,37,0 558 | 1,97,70,40,0,38.1,0.218,30,0 559 | 8,110,76,0,0,27.8,0.237,58,0 560 | 11,103,68,40,0,46.2,0.126,42,0 561 | 11,85,74,0,0,30.1,0.3,35,0 562 | 6,125,76,0,0,33.8,0.121,54,1 563 | 0,198,66,32,274,41.3,0.502,28,1 564 | 1,87,68,34,77,37.6,0.401,24,0 565 | 6,99,60,19,54,26.9,0.497,32,0 566 | 0,91,80,0,0,32.4,0.601,27,0 567 | 2,95,54,14,88,26.1,0.748,22,0 568 | 1,99,72,30,18,38.6,0.412,21,0 569 | 6,92,62,32,126,32,0.085,46,0 570 | 4,154,72,29,126,31.3,0.338,37,0 571 | 0,121,66,30,165,34.3,0.203,33,1 572 | 3,78,70,0,0,32.5,0.27,39,0 573 | 2,130,96,0,0,22.6,0.268,21,0 574 | 3,111,58,31,44,29.5,0.43,22,0 575 | 2,98,60,17,120,34.7,0.198,22,0 576 | 1,143,86,30,330,30.1,0.892,23,0 577 | 1,119,44,47,63,35.5,0.28,25,0 578 | 6,108,44,20,130,24,0.813,35,0 579 | 2,118,80,0,0,42.9,0.693,21,1 580 | 10,133,68,0,0,27,0.245,36,0 581 | 2,197,70,99,0,34.7,0.575,62,1 582 | 0,151,90,46,0,42.1,0.371,21,1 583 | 6,109,60,27,0,25,0.206,27,0 584 | 12,121,78,17,0,26.5,0.259,62,0 585 | 8,100,76,0,0,38.7,0.19,42,0 586 | 8,124,76,24,600,28.7,0.687,52,1 587 | 1,93,56,11,0,22.5,0.417,22,0 588 | 8,143,66,0,0,34.9,0.129,41,1 589 | 6,103,66,0,0,24.3,0.249,29,0 590 | 3,176,86,27,156,33.3,1.154,52,1 591 | 0,73,0,0,0,21.1,0.342,25,0 592 | 11,111,84,40,0,46.8,0.925,45,1 593 | 2,112,78,50,140,39.4,0.175,24,0 594 | 3,132,80,0,0,34.4,0.402,44,1 595 | 2,82,52,22,115,28.5,1.699,25,0 596 | 6,123,72,45,230,33.6,0.733,34,0 597 | 0,188,82,14,185,32,0.682,22,1 598 | 0,67,76,0,0,45.3,0.194,46,0 599 | 1,89,24,19,25,27.8,0.559,21,0 600 | 1,173,74,0,0,36.8,0.088,38,1 601 | 1,109,38,18,120,23.1,0.407,26,0 602 | 1,108,88,19,0,27.1,0.4,24,0 603 | 6,96,0,0,0,23.7,0.19,28,0 604 | 1,124,74,36,0,27.8,0.1,30,0 605 | 7,150,78,29,126,35.2,0.692,54,1 606 | 4,183,0,0,0,28.4,0.212,36,1 607 | 1,124,60,32,0,35.8,0.514,21,0 608 | 1,181,78,42,293,40,1.258,22,1 609 | 1,92,62,25,41,19.5,0.482,25,0 610 | 0,152,82,39,272,41.5,0.27,27,0 611 | 1,111,62,13,182,24,0.138,23,0 612 | 3,106,54,21,158,30.9,0.292,24,0 613 | 3,174,58,22,194,32.9,0.593,36,1 614 | 7,168,88,42,321,38.2,0.787,40,1 615 | 6,105,80,28,0,32.5,0.878,26,0 616 | 11,138,74,26,144,36.1,0.557,50,1 617 | 3,106,72,0,0,25.8,0.207,27,0 618 | 6,117,96,0,0,28.7,0.157,30,0 619 | 2,68,62,13,15,20.1,0.257,23,0 620 | 9,112,82,24,0,28.2,1.282,50,1 621 | 0,119,0,0,0,32.4,0.141,24,1 622 | 2,112,86,42,160,38.4,0.246,28,0 623 | 2,92,76,20,0,24.2,1.698,28,0 624 | 6,183,94,0,0,40.8,1.461,45,0 625 | 0,94,70,27,115,43.5,0.347,21,0 626 | 2,108,64,0,0,30.8,0.158,21,0 627 | 4,90,88,47,54,37.7,0.362,29,0 628 | 0,125,68,0,0,24.7,0.206,21,0 629 | 0,132,78,0,0,32.4,0.393,21,0 630 | 5,128,80,0,0,34.6,0.144,45,0 631 | 4,94,65,22,0,24.7,0.148,21,0 632 | 7,114,64,0,0,27.4,0.732,34,1 633 | 0,102,78,40,90,34.5,0.238,24,0 634 | 2,111,60,0,0,26.2,0.343,23,0 635 | 1,128,82,17,183,27.5,0.115,22,0 636 | 10,92,62,0,0,25.9,0.167,31,0 637 | 13,104,72,0,0,31.2,0.465,38,1 638 | 5,104,74,0,0,28.8,0.153,48,0 639 | 2,94,76,18,66,31.6,0.649,23,0 640 | 7,97,76,32,91,40.9,0.871,32,1 641 | 1,100,74,12,46,19.5,0.149,28,0 642 | 0,102,86,17,105,29.3,0.695,27,0 643 | 4,128,70,0,0,34.3,0.303,24,0 644 | 6,147,80,0,0,29.5,0.178,50,1 645 | 4,90,0,0,0,28,0.61,31,0 646 | 3,103,72,30,152,27.6,0.73,27,0 647 | 2,157,74,35,440,39.4,0.134,30,0 648 | 1,167,74,17,144,23.4,0.447,33,1 649 | 0,179,50,36,159,37.8,0.455,22,1 650 | 11,136,84,35,130,28.3,0.26,42,1 651 | 0,107,60,25,0,26.4,0.133,23,0 652 | 1,91,54,25,100,25.2,0.234,23,0 653 | 1,117,60,23,106,33.8,0.466,27,0 654 | 5,123,74,40,77,34.1,0.269,28,0 655 | 2,120,54,0,0,26.8,0.455,27,0 656 | 1,106,70,28,135,34.2,0.142,22,0 657 | 2,155,52,27,540,38.7,0.24,25,1 658 | 2,101,58,35,90,21.8,0.155,22,0 659 | 1,120,80,48,200,38.9,1.162,41,0 660 | 11,127,106,0,0,39,0.19,51,0 661 | 3,80,82,31,70,34.2,1.292,27,1 662 | 10,162,84,0,0,27.7,0.182,54,0 663 | 1,199,76,43,0,42.9,1.394,22,1 664 | 8,167,106,46,231,37.6,0.165,43,1 665 | 9,145,80,46,130,37.9,0.637,40,1 666 | 6,115,60,39,0,33.7,0.245,40,1 667 | 1,112,80,45,132,34.8,0.217,24,0 668 | 4,145,82,18,0,32.5,0.235,70,1 669 | 10,111,70,27,0,27.5,0.141,40,1 670 | 6,98,58,33,190,34,0.43,43,0 671 | 9,154,78,30,100,30.9,0.164,45,0 672 | 6,165,68,26,168,33.6,0.631,49,0 673 | 1,99,58,10,0,25.4,0.551,21,0 674 | 10,68,106,23,49,35.5,0.285,47,0 675 | 3,123,100,35,240,57.3,0.88,22,0 676 | 8,91,82,0,0,35.6,0.587,68,0 677 | 6,195,70,0,0,30.9,0.328,31,1 678 | 9,156,86,0,0,24.8,0.23,53,1 679 | 0,93,60,0,0,35.3,0.263,25,0 680 | 3,121,52,0,0,36,0.127,25,1 681 | 2,101,58,17,265,24.2,0.614,23,0 682 | 2,56,56,28,45,24.2,0.332,22,0 683 | 0,162,76,36,0,49.6,0.364,26,1 684 | 0,95,64,39,105,44.6,0.366,22,0 685 | 4,125,80,0,0,32.3,0.536,27,1 686 | 5,136,82,0,0,0,0.64,69,0 687 | 2,129,74,26,205,33.2,0.591,25,0 688 | 3,130,64,0,0,23.1,0.314,22,0 689 | 1,107,50,19,0,28.3,0.181,29,0 690 | 1,140,74,26,180,24.1,0.828,23,0 691 | 1,144,82,46,180,46.1,0.335,46,1 692 | 8,107,80,0,0,24.6,0.856,34,0 693 | 13,158,114,0,0,42.3,0.257,44,1 694 | 2,121,70,32,95,39.1,0.886,23,0 695 | 7,129,68,49,125,38.5,0.439,43,1 696 | 2,90,60,0,0,23.5,0.191,25,0 697 | 7,142,90,24,480,30.4,0.128,43,1 698 | 3,169,74,19,125,29.9,0.268,31,1 699 | 0,99,0,0,0,25,0.253,22,0 700 | 4,127,88,11,155,34.5,0.598,28,0 701 | 4,118,70,0,0,44.5,0.904,26,0 702 | 2,122,76,27,200,35.9,0.483,26,0 703 | 6,125,78,31,0,27.6,0.565,49,1 704 | 1,168,88,29,0,35,0.905,52,1 705 | 2,129,0,0,0,38.5,0.304,41,0 706 | 4,110,76,20,100,28.4,0.118,27,0 707 | 6,80,80,36,0,39.8,0.177,28,0 708 | 10,115,0,0,0,0,0.261,30,1 709 | 2,127,46,21,335,34.4,0.176,22,0 710 | 9,164,78,0,0,32.8,0.148,45,1 711 | 2,93,64,32,160,38,0.674,23,1 712 | 3,158,64,13,387,31.2,0.295,24,0 713 | 5,126,78,27,22,29.6,0.439,40,0 714 | 10,129,62,36,0,41.2,0.441,38,1 715 | 0,134,58,20,291,26.4,0.352,21,0 716 | 3,102,74,0,0,29.5,0.121,32,0 717 | 7,187,50,33,392,33.9,0.826,34,1 718 | 3,173,78,39,185,33.8,0.97,31,1 719 | 10,94,72,18,0,23.1,0.595,56,0 720 | 1,108,60,46,178,35.5,0.415,24,0 721 | 5,97,76,27,0,35.6,0.378,52,1 722 | 4,83,86,19,0,29.3,0.317,34,0 723 | 1,114,66,36,200,38.1,0.289,21,0 724 | 1,149,68,29,127,29.3,0.349,42,1 725 | 5,117,86,30,105,39.1,0.251,42,0 726 | 1,111,94,0,0,32.8,0.265,45,0 727 | 4,112,78,40,0,39.4,0.236,38,0 728 | 1,116,78,29,180,36.1,0.496,25,0 729 | 0,141,84,26,0,32.4,0.433,22,0 730 | 2,175,88,0,0,22.9,0.326,22,0 731 | 2,92,52,0,0,30.1,0.141,22,0 732 | 3,130,78,23,79,28.4,0.323,34,1 733 | 8,120,86,0,0,28.4,0.259,22,1 734 | 2,174,88,37,120,44.5,0.646,24,1 735 | 2,106,56,27,165,29,0.426,22,0 736 | 2,105,75,0,0,23.3,0.56,53,0 737 | 4,95,60,32,0,35.4,0.284,28,0 738 | 0,126,86,27,120,27.4,0.515,21,0 739 | 8,65,72,23,0,32,0.6,42,0 740 | 2,99,60,17,160,36.6,0.453,21,0 741 | 1,102,74,0,0,39.5,0.293,42,1 742 | 11,120,80,37,150,42.3,0.785,48,1 743 | 3,102,44,20,94,30.8,0.4,26,0 744 | 1,109,58,18,116,28.5,0.219,22,0 745 | 9,140,94,0,0,32.7,0.734,45,1 746 | 13,153,88,37,140,40.6,1.174,39,0 747 | 12,100,84,33,105,30,0.488,46,0 748 | 1,147,94,41,0,49.3,0.358,27,1 749 | 1,81,74,41,57,46.3,1.096,32,0 750 | 3,187,70,22,200,36.4,0.408,36,1 751 | 6,162,62,0,0,24.3,0.178,50,1 752 | 4,136,70,0,0,31.2,1.182,22,1 753 | 1,121,78,39,74,39,0.261,28,0 754 | 3,108,62,24,0,26,0.223,25,0 755 | 0,181,88,44,510,43.3,0.222,26,1 756 | 8,154,78,32,0,32.4,0.443,45,1 757 | 1,128,88,39,110,36.5,1.057,37,1 758 | 7,137,90,41,0,32,0.391,39,0 759 | 0,123,72,0,0,36.3,0.258,52,1 760 | 1,106,76,0,0,37.5,0.197,26,0 761 | 6,190,92,0,0,35.5,0.278,66,1 762 | 2,88,58,26,16,28.4,0.766,22,0 763 | 9,170,74,31,0,44,0.403,43,1 764 | 9,89,62,0,0,22.5,0.142,33,0 765 | 10,101,76,48,180,32.9,0.171,63,0 766 | 2,122,70,27,0,36.8,0.34,27,0 767 | 5,121,72,23,112,26.2,0.245,30,0 768 | 1,126,60,0,0,30.1,0.349,47,1 769 | 1,93,70,31,0,30.4,0.315,23,0 -------------------------------------------------------------------------------- /experiments/results_diabetes/mlp/model.csv: -------------------------------------------------------------------------------- 1 | intercept,layer,node,node_0,node_1,node_2,node_3,node_4,node_5,node_6,node_7,node_8,node_9,node_10,node_11,node_12,node_13,node_14,node_15,node_16,node_17,node_18,node_19,node_20,node_21,node_22,node_23,node_24,node_25,node_26,node_27,node_28,node_29,node_30,node_31,node_32,node_33,node_34,node_35,node_36,node_37,node_38,node_39,node_40,node_41,node_42,node_43,node_44,node_45,node_46,node_47,node_48,node_49,node_50,node_51,node_52,node_53,node_54,node_55,node_56,node_57,node_58,node_59,node_60,node_61,node_62,node_63,node_64,node_65,node_66,node_67,node_68,node_69,node_70,node_71,node_72,node_73,node_74,node_75,node_76,node_77,node_78,node_79,node_80,node_81,node_82,node_83,node_84,node_85,node_86,node_87,node_88,node_89,node_90,node_91,node_92,node_93,node_94,node_95,node_96,node_97,node_98,node_99 2 | -0.2168494727804177,0,0,1.3502880247194057e-61,-3.186265303230712e-51,-3.051001372638506e-50,1.2282132237659842e-28,2.7136162949445054e-58,-3.4121731923185005e-50,4.369077040337422e-37,-1.0623235710817353e-55,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 3 | 0.3857081207958196,0,1,-0.08368657303425048,-0.23657597474565092,0.26041747299497253,0.24205491393214773,0.2726479097720116,-0.1568264698205712,-0.22058251974671908,-0.7072537948277506,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 4 | -0.06609576469808634,0,2,0.029184956533051525,0.08532995728914367,-0.07221747540160146,-0.08571172416959956,-0.04792569684210472,-0.013621739930973837,0.057358789171324115,-0.20557525247029856,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 5 | -0.11029127308918153,0,3,-0.014349258642472676,0.15844733445006,-0.16451890602408245,-0.22003515943460009,0.18797013535599114,0.05193002849635226,-0.24757609083082166,-0.25110604572696477,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 6 | 0.0760104668645433,0,4,-0.0773443862441187,-0.17424647882925154,-0.21386402522544146,-0.05394636455993133,0.1464883898339884,-0.09458369083642036,0.018035631734092245,-0.14052255838962222,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 7 | -0.04069959636207438,0,5,0.1499547698812781,0.43508385556168094,-0.39859229204149205,-0.11172396604970548,-0.25366681373791616,0.3578588427109637,-0.06315189403685616,-0.3807260286160163,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 8 | 0.37885535493612915,0,6,-0.10714447777582181,-0.1616006905974136,0.11659928335679072,-0.16903031651917827,-0.22078872151746848,0.04821964161889891,-0.19800875149082328,-0.036071603120903316,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 9 | -0.08442168572178352,0,7,0.13544508892662802,0.013219368800696973,-0.010255510637173526,-0.005351008648177608,-1.3211692595223412e-42,-0.01178709741492191,-0.037865749948045486,-0.10719578288041273,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 10 | 0.4881109196451743,0,8,0.03604133211027319,-0.3494670545493643,0.1190446586582914,0.09698453636255494,0.038575606723289586,-0.1833795983299535,-0.3476112553712565,-0.6503408713040179,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 11 | 0.28303290826060873,0,9,-0.2634457062265703,0.05095273151970325,0.1378802844291079,-0.3051243057801809,-0.3264393311581058,-0.1576107586510655,-0.2255584272477018,0.20724829327420666,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 12 | 0.1424403966585765,0,10,0.554515369821733,0.09890570759189463,0.09980524426219547,0.25777135427186787,-0.34853151281008593,-0.9384095592442641,-0.3850691452280528,-0.415777328095057,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 13 | 0.33778091075511774,0,11,0.05427965478302607,-0.02272852399485216,-0.09663034597125193,0.05871903546162328,-0.15122658255992294,-0.31147425152992647,-0.3853320235694037,0.010956082678811476,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 14 | -0.2213107738776456,0,12,0.13619787692234836,0.3599785467650885,0.09242614759312041,-0.08740389205469634,-0.18538690050509601,0.3185635713497621,0.2527037931629114,0.09498066466657362,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 15 | 0.3923118646123839,0,13,0.12259155421468981,-0.2531327277775889,-0.05040834587214693,-0.04550536111091425,-0.10414512876886135,-0.014420821838273878,-0.14259771142973762,-0.10480773899127595,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 16 | -0.18270351480122493,0,14,-1.0952924176071583e-26,-6.080691647326532e-38,-2.4154816907990836e-23,-2.0535475137308106e-52,2.1764935784519895e-41,-2.0187968360194418e-56,-5.601106031884508e-29,-9.536113549990514e-35,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 17 | -0.13555974211014954,0,15,-4.8691984444189115e-28,1.3723982585643679e-57,6.639427512692075e-44,3.9416794511841554e-44,1.2054096500359934e-44,4.144195112223423e-48,4.1012235856505076e-53,-3.0278971519728224e-39,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 18 | -0.1494195390537757,0,16,-2.5570332105859393e-23,-1.0291290856496319e-31,2.9110580674895205e-51,-1.919865105914959e-59,2.2799832313044982e-32,-8.464521788070954e-40,1.0891906830729018e-58,2.230974366438451e-60,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 19 | -0.07729254506285342,0,17,0.11783837283525725,0.001081632965512505,-0.06723420794321641,-0.003967016265828164,-0.14561110315538228,-0.09821335124817517,0.03821208224163593,-0.00200122670997692,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 20 | -0.029188979783779982,0,18,0.2005149015441603,0.38455537180893445,0.033960717423544534,0.19779808448920208,-0.21259148322338578,0.21625552264423478,-0.0662114153607698,-0.12662137479833727,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 21 | 0.14232275480207351,0,19,0.1008902121239879,-0.08910755867583,0.17035196550385395,0.08208712363501242,0.06352230851155469,0.06578323915342753,0.2382556323035613,0.1621315805135154,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 22 | -0.1768230691909377,0,20,0.31844756436201554,0.3730698392918826,0.04810940380652599,-0.03998989532169989,0.20632444530413518,0.21153851435475796,-0.05117115059272327,0.07771075241073955,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 23 | -0.3316744974326085,0,21,0.3260670715341744,0.33969576368140303,0.2699384046075577,0.010993810401314244,0.2721971065804624,0.1568222474355282,0.21006658583994972,0.1906728454449007,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 24 | 0.42779362995328657,0,22,-0.2621783816424524,-0.21140916348355376,0.3298346586675413,-0.28086786861748103,0.43864978584773423,-0.31381918148566484,0.021087964198033226,-1.224419893590803,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 25 | -0.2834137756137287,0,23,0.2506111344591838,0.36131655852055816,0.17772487159458522,0.02618096258585456,-0.13731182760691954,0.33724594090602833,0.21070156688563046,0.16082022202169194,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 26 | -0.13753073587601386,0,24,9.628264543279111e-32,-1.4828485349451942e-59,7.297013405283629e-38,-1.679162025975097e-45,-1.0017085963702365e-54,-1.23316766379768e-22,1.5951739321850932e-23,-1.5296155112345257e-57,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 27 | 0.08956971706496032,0,25,0.13287900644424339,0.2043210754289776,0.19877225190384232,-0.003307184058433296,-0.011835842397063432,0.027792067431193362,0.1745746778678715,0.14919034849258078,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 28 | -0.11270292106795561,0,26,-0.07061342084492594,-0.08280115330896183,-0.26454295404435696,0.09888865876301672,0.6947309457527078,-0.16942538925764677,-0.28961848447218963,0.3734920038066134,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 29 | 0.3307650795939037,0,27,0.1168084919054914,-0.28044451450448993,-0.080470032289033,0.13766808068040795,-0.2644476927289576,0.09275957754428996,-0.053112528816808766,0.2631581003105321,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 30 | 0.03835114828026617,0,28,0.072540121448154,0.19871313104747654,0.1914894127414462,-0.10706919141716982,0.12698929375041376,-0.1426019835187698,-0.10192638376162194,0.09757723360793356,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 31 | -0.16068000483406186,0,29,7.443207876219326e-59,-3.608885410008221e-48,1.6215571758726292e-46,3.126926325564179e-56,-4.9755336886418216e-48,-3.1658194570410185e-58,-3.9303055684272294e-35,5.1524399563654266e-55,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 32 | 0.00778903720323271,0,30,-0.12466526685610187,0.014272065426744513,-0.020188968575107023,2.2872705443498953e-37,-7.820446964190014e-44,-0.023410781137938097,-0.1772117672940896,-0.022252226324458658,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 33 | -0.18117873109633936,0,31,-3.0005332022484526e-42,5.773119063570309e-60,5.596653400442781e-41,-1.6848579390580645e-41,5.524091911040955e-53,4.519227961973913e-53,-5.135430550302771e-44,-1.7342683733746673e-57,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 34 | 0.372428872281523,0,32,-0.05294953979704725,-0.3286174825915197,0.11863297866159894,-0.07522199001947025,-0.1065939251231235,-0.020065254301402075,-0.07195976556139569,0.07065639303531757,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 35 | 0.4210353522941027,0,33,0.01691371859895659,-0.21546726428387372,0.1322130784870344,0.22163186479707306,0.22275282603379282,-0.2053882405707514,-0.17835031348970543,-0.6947027102431212,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 36 | 0.48530809961577115,0,34,-0.6516138731036316,-0.13249802749869838,0.011529301787620926,0.308822003495961,0.43134707921619997,-0.2876122621180093,0.16160656717091254,-0.2298696978746157,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 37 | -0.14533985127617985,0,35,0.1716581691907036,0.33205367560940147,0.008075328675544282,0.22754450506841284,-0.022174011018231486,0.0610883976609826,0.2608099998901015,0.06384147725643384,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 38 | 0.4134548804377309,0,36,-0.05467034637610061,-0.20719980805717467,0.07229195466692695,0.1343057746682141,0.0018480319771910355,-0.07705245215443024,-0.5577153978491384,-0.23983849426256562,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 39 | -0.2124389437831693,0,37,-1.3240061715219408e-56,-1.1627715047608625e-53,2.202899121877515e-62,-8.380768371257152e-56,-5.51079968696567e-48,-8.872544101285909e-50,-3.503195050022138e-30,-7.389049872361646e-63,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 40 | -0.052538965843864714,0,38,0.5078341939645634,-0.19852595113600655,-0.14464203763009348,-0.39066909019795754,-0.13685959903278008,0.2708135771899338,-0.9010237100864681,-0.42022615166989813,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 41 | -0.028584781314526537,0,39,0.15254729450369361,0.15584264681284815,0.03908944336914631,-0.09111982530756242,-0.07491564818491259,0.329133578577229,0.09299826115540298,-0.2147033443891298,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 42 | 0.24299464081112723,0,40,-0.2355522642154348,-0.0465454218712097,0.36602847439206,-0.1920191801511284,0.22120283703184618,-0.34272109236603393,0.5872867728486029,-0.7752302587190206,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 43 | -0.17155833620875474,0,41,-0.18152906359744891,-0.030202784430993015,-0.3759500360499424,-0.0860200180099821,0.28495049563238833,-0.06578836121010208,0.6812945199920664,0.9525236874456021,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 44 | -0.33241572761729404,0,42,0.4385800816320946,-0.09882509380367258,0.11501009372834177,-0.05482947261798267,0.7658211221115501,0.23224422293174393,0.30263085594803535,-1.071970839857593,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 45 | 0.020375564102289707,0,43,-0.18170752731764261,0.6105419279700438,-0.26927227842032087,-0.6480264300364089,-0.9161917946024251,0.3205707722302091,-0.3761199713910849,-0.19203468747104227,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 46 | 0.12162216667473114,0,44,0.0017458061657869809,-0.03586978298395259,0.13371865229789845,0.1187091703618055,-0.1333644222675825,0.10353092821948205,-0.0730158481937753,0.05521796367691736,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 47 | 0.43563490977894154,0,45,0.10404185890227255,-0.06730082451871386,0.057760092243414836,0.09729973570728889,0.048075969607985033,-0.42150872888301316,-0.6634616982833577,-0.13939639073687582,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 48 | 0.11880087422926541,0,46,0.08175883895920527,-0.9796409041783546,0.0987116434784441,0.013988661185583225,0.5850608625066941,0.06747275821819868,0.1602899106404661,0.43015516309300567,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 49 | -0.14538101170103507,0,47,-0.07881030361204532,0.5494191340776083,-0.13823774404048803,-0.18863252917004575,0.22312597499389972,0.28636144768490984,-0.23565518236743424,-0.12725808471943983,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 50 | -0.1708975058442249,0,48,0.04252597418672054,0.4006526818473673,-0.102205165597013,0.14984646192543838,0.2292905535378894,0.2592383397078547,0.22310692195820112,0.20595457004574927,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 51 | -0.14821157299175733,0,49,-0.12357273513997931,0.6492165832133794,-0.038657291334737304,-0.10396567012488439,0.2126702806951004,0.06088751521967577,-0.28105143471905397,-0.11173462825150185,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 52 | 0.42130874021600007,0,50,0.028260573336678717,-0.40019789788478255,0.10121438223138918,0.12285466763601134,0.05261078941060604,-0.03273136733228823,-0.40302999841375386,-0.4464591592497199,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 53 | -0.07359465274305126,0,51,0.019536984053940144,0.4785767919203675,0.13462890517790438,-0.2606627106193084,0.28113202463282094,-0.19061422628879338,0.012101943632255686,-0.0713792163338014,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 54 | -0.3201983786590414,0,52,0.35391343991104635,0.0370045113895631,0.32950066641281406,0.0044951475222970985,-0.6733359861632299,-0.20607646371806168,-0.30307376013503823,0.7617503932363408,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 55 | 0.31641868536417794,0,53,-0.2745991086778885,-0.14206933640963632,0.1270297357058703,0.002479372284849708,-0.1774806193882791,-0.08869090838083592,-0.2177632999237762,0.12431734100871149,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 56 | 0.39442034794358843,0,54,-0.17379960492506508,-0.362670968403701,0.21547764034632821,-0.0030820546565336173,0.22791152052070465,-0.021698872640015618,0.18650116662630264,-0.0407191408146153,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 57 | 0.47058668576504775,0,55,-0.014229902029584041,-0.08611761667488284,-0.12908514140509536,0.31964487006162684,-0.2930038900000799,-0.36876619338709216,-0.5646546603354368,0.06442430179338206,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 58 | -0.1522609347584484,0,56,-1.0906320827345939e-53,4.55094532353818e-40,-3.1454916025667776e-61,5.849644794414389e-47,3.4091738112827794e-50,9.79348347741335e-45,-9.899787912372207e-63,-1.4939761271693926e-55,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 59 | -0.02101537500958744,0,57,-1.9854799624093926e-11,0.0012236833526162593,0.0021876270477845,3.5528949741140445e-32,-9.641002927871974e-26,-0.0004569953944531523,-7.648633191363384e-11,-1.5525399612301867e-20,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 60 | -0.2694696125579701,0,58,0.017074433374763,0.2742428099453403,0.28601055003241976,0.0845600302782535,0.05088926213485768,0.36946235072770406,-0.0032998411674735034,-0.06836019956571081,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 61 | -0.19639966095113812,0,59,-0.09986097201461622,0.11254582903403729,-0.011897999889318667,0.34886496765280395,0.17697286433031845,0.5464869826832328,0.2630523664388102,-0.08861278341153335,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 62 | 0.39255107292793534,0,60,-0.28593915142454956,-0.02657572360022915,-0.11950140705681692,-0.1832615806517193,-0.35001769691106394,-0.14791350067000134,-0.2080023750763819,0.38217256435035724,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 63 | 0.28527269159094976,0,61,-0.19550876676125997,-0.07196181691811773,-0.17392078213968906,0.15012125114277394,0.06850132767767499,-0.1340059911782387,0.356200679198848,0.11286928892952415,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 64 | -0.14671815202307353,0,62,0.03745090845666684,0.05915069925147357,-0.21842052147955185,0.005169452351905793,-0.039028385640727074,0.1775534517534598,-0.20872560572386117,-0.06823399166813686,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 65 | 0.23247490840411172,0,63,-0.4576996250987966,-0.20513199045133934,0.4179285377749694,-0.21047835498142337,0.048729602214243446,0.027326104114451168,-0.3812315025548564,-1.043487185454576,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 66 | 0.1927047796626717,0,64,-0.21926267598780386,0.02675324113870361,0.09994553954818251,0.15270410831887418,-0.11416457014116506,-0.12632670642596386,-0.045007086929694934,0.21019650448330596,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 67 | 0.3722391752602512,0,65,-0.1409081813499944,-0.038410694799330326,0.15749292570383366,-0.047816008816185036,-0.14474653967567433,0.06503123388579612,-0.0489708850604643,0.24154620036647415,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 68 | -0.16602804333814528,0,66,-3.338561286523155e-37,-7.642692660604131e-23,1.0559841705481102e-47,-8.1091135603355885e-47,7.714814951635633e-62,-4.275352330284902e-62,-3.3961237187214744e-54,1.1991972143557983e-22,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 69 | -0.24287739721961227,0,67,-0.18746434884631705,0.25621174872962893,0.08973554308713592,0.12830053940015348,-0.43239461711168925,0.5650729833749837,0.022429707399297494,-0.11541667177858941,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 70 | 0.30479125276814795,0,68,0.08671895083807306,-0.07042307415251839,-0.1936491830087603,0.16571124234931317,0.17037233266367213,0.04441089878550372,0.10827246491356175,0.25443778968894587,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 71 | -0.07501053221424686,0,69,-0.49560378228427965,0.05578074435871748,-0.020322986806325237,0.03207902740773381,-0.5739337747541715,0.5724554994495201,0.3227911299593823,0.10053512232586116,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 72 | 0.07300321917417388,0,70,0.28946239669901835,0.17048033817672764,0.21828512007851875,0.11383763229286097,-0.015094755543685516,0.29216037161098046,-0.06493964863311603,0.04145768371484799,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 73 | -0.14824828481544322,0,71,-2.054527831463552e-62,-1.8464458385036637e-25,-8.291890665116297e-32,2.8766206265337523e-52,1.1241230477588718e-25,-2.9254177255656837e-24,4.2470984432876445e-60,3.4851053523746306e-44,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 74 | -0.08063309284639104,0,72,0.6285642596917584,-0.14649986485597627,-0.1877072865257849,0.6663911128926496,0.3305868880136911,-0.13579483825456512,0.6556970516658274,-0.2868718063660527,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 75 | 0.3764669321135153,0,73,-0.0030100834027650953,-0.2615836895589865,0.3769434928113632,0.14867003225925565,0.13714498495491706,-0.040727818569093716,-0.5028200165304837,-0.7757556950468423,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 76 | -0.1987675282439072,0,74,0.20949450288294633,0.38774998300804087,-0.0934618167311762,-0.03009474345196803,0.20157842883018984,0.4054781797519352,0.18007327429331207,0.1674777866816553,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 77 | 0.3470149305409285,0,75,-0.3238596987093253,-0.2782649825702905,0.35046957658244804,-0.6123678175235531,0.3515235766983698,-0.27966575834638585,0.1323457492359837,-0.5837093641056104,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 78 | 0.38731034248842694,0,76,-0.28568817971908267,-0.23115259258546159,0.32315378462729755,0.08371105584098418,0.21439769620572202,-0.28208182677332494,0.1268098008966595,-0.6531438238892782,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 79 | 0.0781043019787277,0,77,-0.19947923387826458,0.06596302075074545,0.17348273834040875,0.19896356009024246,0.20595845613791786,0.08894296989020986,0.008102855837504232,0.006078218343728333,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 80 | 0.37012565456306795,0,78,-0.17673095875834352,-0.1296229965716929,0.088941219526412,-0.04588213264656781,0.25155332012975185,-0.17493708753160636,-0.1726579366578208,0.17336810104993094,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 81 | 0.31537410292695955,0,79,-0.11452922998216657,-0.41693782449546196,0.16423781562374334,0.04527462798861551,-0.16678605794888396,0.07511771374176035,-0.0441865700215643,0.3372870757640816,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 82 | 0.07712416234456225,0,80,-0.09301949295272205,-0.17336628923594588,-0.10332151596007635,0.11881760307241679,-0.00047907507369610513,-0.15585969671106653,-0.046488967986779164,-0.0757506023324438,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 83 | -0.2700886228152316,0,81,-0.12926020691539022,0.4733972510745089,0.07566719836406581,-0.06997192255454396,-0.14832281658005705,0.28232831809347975,-0.017435858950307644,-0.10613957972292637,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 84 | -0.09536152302812526,0,82,-1.378376278343208,0.27603519807622245,0.13754686739161945,0.2908702654337574,0.6036731824182997,-0.056411867883207144,-0.4171310199694113,0.45124330848023564,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 85 | 0.44344832197575473,0,83,0.1007943195755701,-0.29778971386886766,-0.08005248896157556,0.12552841949869287,-0.05578411109792775,-0.28486302729053214,0.22234959311122054,0.3934756771352057,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 86 | 0.007185662851315398,0,84,0.09984080780846968,0.40462652608502536,-0.07213413582183179,0.1781908460174823,-0.20349642250395997,0.22498328656948685,-0.19068708292862827,-0.22070953906140917,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 87 | -0.07328904364571592,0,85,-8.87487411510114e-46,2.9649458461463265e-57,-5.981256776572348e-35,2.0501485221920194e-28,-1.254300583078435e-57,4.322138777967063e-44,1.7420877244764112e-58,-6.293390811514223e-35,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 88 | 0.020046650692903303,0,86,0.08068637147686379,0.09717320186535812,0.21740966768084397,0.1595872264433986,-0.053933935702521335,-0.20963697404842127,0.12141522896192782,-0.04163366704016053,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 89 | 0.03420121623925157,0,87,-0.25360227063150703,-0.02889641876719196,-0.047193631645836065,-0.1974154146202062,0.14518819589833112,-0.2357056345954936,-0.0025473071778596237,-0.24355489214851322,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 90 | -0.15266274552490078,0,88,0.11945746620806452,0.21952880764980845,-0.03574691160668527,-0.4226204623384704,0.6802004489376775,-0.5676904745708937,0.467729600545593,-0.5402576243272451,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 91 | 0.09257004103554077,0,89,0.26213505838847057,0.03739100887552246,0.07034343187132708,-0.0377516493984168,0.01548539462129453,0.2512687842498687,-0.1953687447592168,-0.21236830272740914,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 92 | -0.08987869293065856,0,90,-0.0900610424674326,-0.14021111520394183,-0.15869902002937875,-0.5574306017864781,-0.17436253864043935,0.07466125966474042,0.43060236914403044,0.8383819667761233,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 93 | 0.0695469708345577,0,91,0.46242745061686497,-0.3421276041567705,-0.2551144879046615,-0.15772780971012845,-1.0790383074832837,0.3813770410137433,-1.474409745940752,-0.2009275736926686,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 94 | 0.2226337115054295,0,92,-0.14991210112429654,-0.28902265003440425,0.22041626992420932,-0.05965861387570822,0.21783016914141554,-0.001447355894596224,0.1137473848159499,0.16882378072242726,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 95 | 0.4632273097370594,0,93,0.0858801360082036,-0.14053737486428122,0.20160409675621138,0.13222253961686847,-0.04152423246852679,-0.17720207601667842,-0.6397471365689718,-0.9580280985492741,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 96 | -0.17023253661106372,0,94,-0.09622679937999752,0.5246973639280262,-0.02545596673063644,-0.1639609131414685,0.09277873579895715,0.059047211525854244,-0.10706732003993628,0.042994519647365996,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 97 | 0.24287722273956125,0,95,-0.1028501164024293,-0.2100851539607608,-0.14277836270786673,0.2721250973072182,-0.16193595066258584,0.11662468231394714,-0.3214889059846333,0.12839305031944434,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 98 | 0.5410393383912668,0,96,-0.057106967175902024,-0.2804330519349583,-0.07595293365921456,0.13570411783000988,-0.23250683793668778,-0.22335562131153128,-0.4107047986523467,-0.06720412943042973,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 99 | 0.01579129572418472,0,97,-0.25737343559225356,-0.15234044235889396,0.12105029043214094,-0.10094432333881317,-0.06872944716620862,-0.055795902817356406,-0.2592961885697153,-0.15720705421187936,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 100 | 0.27253034065394466,0,98,0.11071705705997595,-0.28725172233618174,0.3319427575132676,-0.1401837523146519,0.042158212345102114,-0.05750240109337755,0.09726444810980797,-0.03647969891005341,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 101 | -0.1780272770737326,0,99,-0.32190429094117584,0.17797665488118705,0.21678105421727004,-0.025142220515042852,-0.26465715428938946,0.4874197702494441,0.011334084528049475,-0.07079959139806169,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, 102 | -0.0803904865464769,1,0,-3.5531577541028493e-37,-0.7051228975481068,-0.11284882562238446,-0.16567486417457727,0.12749732194493232,1.0836314426028957,-0.3109350756930361,-0.03319157685929565,-0.7765658937866393,-0.31848210425905443,2.459829922459594,-0.8411820175231377,0.5440509859392672,-0.48980587333413245,-7.651333500215969e-32,-5.019557290952507e-60,7.51157546464481e-55,-0.04465530810502111,0.23745372007755494,-0.10137591249010826,0.3019761197923758,0.29382180111326706,-1.279200053557614,0.5023054438038888,-4.012641190891013e-61,0.12396033823275004,2.392606819339173,-0.4028382636938931,0.08068627960232636,-3.5271161146444193e-40,0.148951639720405,-9.83776890723419e-24,-0.43278491901533445,-1.0363055958882024,-0.6856431553455206,0.4256141686563395,-0.5156575055490651,2.8926535351027075e-33,-1.5000089287678005,0.23671612667248704,-1.0677554522395671,-1.6392887019940598,-1.793326732962398,1.1271709864839337,-0.02035785729797462,-0.6949578108735455,1.3675797108957333,0.4727314930374952,0.36450181733508924,0.7180034643600784,-0.6623165374593789,0.4735284237160418,-1.276164494974983,-0.306228529754649,-0.3383260216319462,-0.8194349662797318,3.453219725084271e-64,7.197962591648547e-06,0.6728600519943988,0.6163747999845851,-0.5172152035999741,-0.2592998798713083,-0.14747440237786916,-0.7497566736100804,-0.1165459436356709,-0.15096775727875134,-2.445850027592095e-63,0.8948035002885684,-0.2765352869536071,1.0361389376606795,0.11836918418826825,-6.402097212419761e-36,1.5293671510945044,-0.7814152186982444,0.4964610715508715,-1.227353393310042,-0.814111177337202,-0.035287318680104686,-0.23965592055862567,-0.24336614196170026,0.14365373425091252,0.8172772400193357,1.4456590524616897,-0.5645209599340439,0.3552370320170194,3.4481558530679944e-64,0.13129442787249868,0.09290513148862367,-1.280669049332226,0.11042463224926166,-1.6945483662567478,-2.5717997830461656,-0.45072586092274425,-0.8894533928309312,0.5717384409326675,-0.529758080265975,-0.7062662697982454,0.17690606775387968,-0.4411076617136925,0.4492876799387198 103 | -------------------------------------------------------------------------------- /rce/generate_ce.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import os 4 | import time 5 | 6 | # optimization 7 | from pyomo import environ 8 | from pyomo.environ import * 9 | 10 | # utils 11 | import rce 12 | 13 | 14 | def write_tables(save_path, X_train, y_train, clf, clf_type, task_type): 15 | """ 16 | This function uses constraint_learning to write the ML models in 17 | a tabular format and it saves the csv file in 'save_path'. 18 | """ 19 | if not os.path.exists(save_path + '/' + clf_type): 20 | os.makedirs(save_path + '/' + clf_type) 21 | constraintL = rce.ConstraintLearning(X_train, y_train, clf, clf_type) 22 | constraint_add = constraintL.constraint_extrapolation(task_type) 23 | constraint_add.to_csv(save_path + '/%s/model.csv' % (clf_type), index=False) 24 | print(f'{clf_type} tables saved.') 25 | 26 | 27 | def base_model(u, F, F_b, F_int, F_coh, I, L, P): 28 | """ 29 | This function returns a model with base constraints (coherence, immutability, etc.) 30 | necessary to generate counterfactual explanations. 31 | """ 32 | model = ConcreteModel('RCE') 33 | model.x = Var(F, domain=Reals, name=['ce_%s' % str(ce) for ce in F]) # counterfactual features 34 | 35 | for i in F_b: 36 | model.x[i].domain = Binary 37 | 38 | for i in F_int: 39 | model.x[i].domain = NonNegativeIntegers 40 | 41 | for cat in F_coh.keys(): 42 | model.add_component('coherence_' + cat, Constraint(expr=sum(model.x[i] for i in F_coh[cat]) == 1)) 43 | 44 | def immutability(model, i): 45 | # print() 46 | return model.x[i] == u[i].item() 47 | 48 | model.add_component('immutability', Constraint(I, rule=immutability)) 49 | 50 | def larger(model, i): 51 | return model.x[i] >= u[i].item() 52 | 53 | model.add_component('larger_than', Constraint(L, rule=larger)) 54 | 55 | def positive(model, i): 56 | return model.x[i] >= 0 57 | 58 | model.add_component('positive', Constraint(P, rule=positive)) 59 | 60 | return model 61 | 62 | 63 | def find_maxrad(x_, clf_type, save_path, F, F_b, F_int, F_coh, I, L, P, class_, obj_type): 64 | """ 65 | This function finds the closest solution to the (robust) counterfactual explanation 'x_' 66 | that is classified as 'class_', namely the class of the factual instance. 67 | """ 68 | clf_path = save_path + '/' + clf_type + '/model.csv' 69 | mfile = pd.read_csv(clf_path) 70 | model = base_model(x_, F, F_b, F_int, F_coh, I, L, P) 71 | 72 | # Define the objective function. It must be the same as the uncertainty set 73 | # used for the robust counterfactual explanation 74 | model = add_objective(model, x_, obj_type, maxrad=True) 75 | # Upper and lower bounds for the validity constraints 76 | lb, ub = None, None 77 | if clf_type in ['cart', 'gbm', 'rf']: 78 | if class_ == 1: 79 | lb, ub = 0.499999, None 80 | else: 81 | lb, ub = None, 0.500001 82 | elif clf_type in ['linear', 'svm', 'mlp']: 83 | if class_ == 1: 84 | lb, ub = 0., None 85 | else: 86 | lb, ub = None, 0 87 | 88 | # Definition of variables for the validity constraints 89 | model.y = Var(Any, dense=False, domain=Reals) 90 | model.l = Var(Any, dense=False, domain=Binary) 91 | model.y_viol = Var(Any, dense=False, domain=Binary) 92 | model.v = Var(Any, dense=False, domain=NonNegativeReals) 93 | model.v_ind = Var(Any, dense=False, domain=Binary) 94 | model.lam = Var(Any, dense=False, domain=Reals, bounds=(0, 1)) 95 | 96 | # Adding the validity constraints 97 | if clf_type in ['linear', 'svm']: 98 | model = constraints_linear(model, 'prediction', class_, mfile, F, 0, None, lb, ub) 99 | elif clf_type == 'cart': 100 | model = constraints_tree(model, 'prediction', mfile, F, 0, None, lb, ub, 101 | adversarial_algorithm_bool=False, S=[], adv_prob=False) # check it 102 | elif clf_type == 'rf': 103 | model = constraints_rf(model, 'prediction', mfile, F, 0, None, lb, ub, 104 | adversarial_algorithm_bool=False, S=[], adv_prob=False) 105 | elif clf_type == 'gbm': 106 | model = constraints_gbm(model, 'prediction', mfile, F, 0, None, lb, ub, 107 | adversarial_algorithm_bool=False, S=[], adv_prob=False) 108 | elif clf_type == 'mlp': 109 | model = constraints_mlp(model, 'prediction', mfile, F, lb, ub, adv_prob=True, S=[]) # check it 110 | 111 | # Solving the optimization model 112 | opt = SolverFactory('gurobi') 113 | opt.solve(model) 114 | results = opt.solve(model) 115 | sol = [value(model.x[i]) for i in x_.columns] 116 | distance = value(model.OBJ) 117 | 118 | # print(value(model.y['prediction'])) 119 | 120 | if obj_type == 'l2': 121 | distance = np.sqrt(distance) 122 | 123 | return sol, np.round(distance, 5) 124 | 125 | 126 | def constraints_linear(model, outcome, class_, linear_table, F, rho, unc_type, lb, ub): 127 | """ 128 | This function generates the constraints for a linear predictive model. 129 | """ 130 | intercept = linear_table['intercept'][0] 131 | coeff = linear_table.drop(['intercept'], axis=1, inplace=False).loc[0, :] 132 | if class_ == 1: 133 | sign = 1 134 | else: 135 | sign = -1 136 | if unc_type == 'l2': 137 | robust_term = sign * rho * np.linalg.norm(coeff, 2) 138 | elif unc_type == 'linf': 139 | robust_term = sign * rho * np.linalg.norm(coeff, 1) 140 | else: 141 | robust_term = 0 142 | 143 | model.add_component('linear_model', Constraint( 144 | expr=model.y[outcome] == sum(model.x[i] * coeff.loc[i] for i in F) + intercept + robust_term)) 145 | 146 | if not pd.isna(ub): 147 | model.add_component('ub', Constraint(expr=model.y['prediction'] <= ub)) 148 | if not pd.isna(lb): 149 | model.add_component('lb', Constraint(expr=model.y['prediction'] >= lb)) 150 | 151 | return model 152 | 153 | 154 | def constraints_tree(model, outcome, tree_table, F, rho, unc_type, lb, ub, adversarial_algorithm_bool, S, adv_prob): 155 | """ 156 | This function generates the constraints for a decision tree and each decision tree of random forest and 157 | gradient boosting. 158 | """ 159 | M = 1000 160 | leaf_values = tree_table.loc[:, ['ID', 'prediction']].drop_duplicates().set_index('ID') 161 | # Row-level information: 162 | intercept = tree_table['threshold'] 163 | coeff = tree_table.drop(['ID', 'threshold', 'prediction', 'node_ID'], axis=1, inplace=False).reset_index( 164 | drop=True) 165 | l_ids = tree_table['ID'] 166 | node_ids = tree_table['node_ID'] 167 | n_constr = coeff.shape[0] 168 | L = np.unique(tree_table['ID']) 169 | 170 | if not adversarial_algorithm_bool: # the robust counterfactual explanations will be in one of the leaves 171 | def constraintsTree_1(model, j): 172 | if unc_type == 'l2': 173 | robust_term = rho * np.linalg.norm(coeff.loc[j, :], 2) 174 | elif unc_type == 'linf': 175 | robust_term = rho * np.linalg.norm(coeff.loc[j, :], 1) 176 | elif unc_type == 'l1': 177 | print('not supported yet!') 178 | elif pd.isna(unc_type): 179 | robust_term = 0 180 | return sum(model.x[i] * coeff.loc[j, i] for i in F) + robust_term <= intercept.iloc[j] + M * ( 181 | 1 - model.l[(outcome, str(l_ids.iloc[j]))]) 182 | 183 | model.add_component(outcome + '_splits', Constraint(range(n_constr), rule=constraintsTree_1)) 184 | 185 | def constraintsTree_2(model): 186 | return sum(model.l[(outcome, str(i))] for i in L) == 1 187 | 188 | model.add_component(outcome + '_oneleaf', Constraint(rule=constraintsTree_2)) 189 | 190 | def constraintTree_3(model): 191 | return model.y[outcome] == sum(leaf_values.loc[i, 'prediction'] * model.l[(outcome, str(i))] for i in L) 192 | 193 | model.add_component('DT_' + outcome, Constraint(rule=constraintTree_3)) 194 | 195 | if not pd.isna(ub): 196 | model.add_component('ub', Constraint(expr=model.y[outcome] <= ub)) 197 | if not pd.isna(lb): 198 | model.add_component('lb', Constraint(expr=model.y[outcome] >= lb)) 199 | else: # adversarial algorithm 200 | if adv_prob: # adversarial problem 201 | def constraintsTree_1(model, j): 202 | return sum(model.x[i] * coeff.loc[j, i] for i in F) + model.w[(outcome, node_ids[j])] <= intercept.iloc[ 203 | j] + M * (1 - model.l[(outcome, str(l_ids.iloc[j]))]) 204 | model.add_component(outcome + '_splits', Constraint(range(n_constr), rule=constraintsTree_1)) 205 | def constraintsTree_2(model): 206 | return sum(model.l[(outcome, str(i))] for i in L) == 1 207 | model.add_component(outcome + '_oneleaf', Constraint(rule=constraintsTree_2)) 208 | def constraintTree_3(model): 209 | return model.y[outcome] == sum( 210 | leaf_values.loc[i, 'prediction'] * model.l[(outcome, str(i))] for i in L) 211 | 212 | model.add_component('DT' + outcome, Constraint(rule=constraintTree_3)) 213 | # 214 | # def constraintsTree_4(model, i): 215 | # print(M) 216 | # print(tree_table[tree_table['node_ID'] == i]['ID']) 217 | # return model.w[(outcome, i)] <= M * sum( 218 | # model.l[(outcome, str(j))] for j in tree_table[tree_table['node_ID'] == i]['ID']) 219 | def constraintsTree_5(model, i): 220 | return model.mu[(outcome)] <= model.w[(outcome, i)] 221 | for i in np.unique(node_ids): 222 | # model.add_component(outcome + f'contr_4_{i}', Constraint([i], rule=constraintsTree_4)) 223 | model.add_component(outcome + f'contr_5_{i}', Constraint([i], rule=constraintsTree_5)) 224 | 225 | if not pd.isna(ub): 226 | model.add_component('ub', Constraint(expr=model.y[outcome] <= ub)) 227 | if not pd.isna(lb): 228 | model.add_component('lb', Constraint(expr=model.y[outcome] >= lb)) 229 | 230 | else: 231 | def constraintsTree_1(model, j, s): 232 | return sum((model.x[i] + S[s][i]) * coeff.loc[j, i] for i in F) <= intercept.iloc[j] + M * ( 233 | 1 - model.l[(outcome, str(l_ids.iloc[j]), s)]) 234 | model.add_component(outcome + '_splits', 235 | Constraint(range(n_constr), range(len(S)), rule=constraintsTree_1)) 236 | 237 | def constraintsTree_2(model, s): 238 | return sum(model.l[(outcome, str(i), s)] for i in L) == 1 239 | 240 | model.add_component(outcome + '_oneleaf', Constraint(range(len(S)), rule=constraintsTree_2)) 241 | 242 | def constraintTree_3(model, s): 243 | return model.y[(outcome, s)] == sum( 244 | leaf_values.loc[i, 'prediction'] * model.l[(outcome, str(i), s)] for i in L) 245 | 246 | model.add_component('DT' + outcome, Constraint(range(len(S)), rule=constraintTree_3)) 247 | if not pd.isna(ub): 248 | def constr_ub(model, s): 249 | return model.y[(outcome, s)] <= ub 250 | 251 | model.add_component('ub', Constraint(range(len(S)), rule=constr_ub)) 252 | 253 | if not pd.isna(lb): 254 | def constr_lb(model, s): 255 | return model.y[(outcome, s)] >= lb 256 | 257 | model.add_component('lb', Constraint(range(len(S)), rule=constr_lb)) 258 | 259 | return model 260 | 261 | 262 | def constraints_rf(model, outcome, forest_table, F, rho, unc_type, lb, ub, adversarial_algorithm_bool, S, adv_prob): 263 | """ 264 | This function generates the constraints for a random forest. 265 | """ 266 | forest_table_temp = forest_table.copy() 267 | forest_table_temp['Tree_id'] = [outcome + '_' + str(i) for i in forest_table_temp['Tree_id']] 268 | T = np.unique(forest_table_temp['Tree_id']) 269 | if not adversarial_algorithm_bool: 270 | for i, t in enumerate(T): 271 | tree_table = forest_table_temp.loc[forest_table_temp['Tree_id'] == t, :].drop('Tree_id', axis=1) 272 | constraints_tree(model, t, tree_table, F, rho, unc_type, lb=None, ub=None, 273 | adversarial_algorithm_bool=False, S=[], adv_prob=False) 274 | 275 | model.add_component('RF' + outcome, 276 | Constraint(rule=model.y[outcome] == 1 / len(T) * quicksum(model.y[j] for j in T))) 277 | if not pd.isna(ub): 278 | model.add_component('ub_' + outcome, Constraint(expr=model.y[outcome] <= ub)) 279 | if not pd.isna(lb): 280 | model.add_component('lb_' + outcome, Constraint(expr=model.y[outcome] >= lb)) 281 | else: 282 | if adv_prob: 283 | for i, t in enumerate(T): 284 | tree_table = forest_table_temp.loc[forest_table_temp['Tree_id'] == t, :].drop( 285 | 'Tree_id', axis=1).reset_index(drop=True, inplace=False) 286 | constraints_tree(model, t, tree_table, F, rho, unc_type, lb=None, ub=None, 287 | adversarial_algorithm_bool=True, S=S, adv_prob=True) 288 | model.add_component('RF' + outcome, 289 | Constraint(rule=model.y[outcome] == 1 / len(T) * quicksum(model.y[j] for j in T))) 290 | 291 | def aux_mu(model, i): 292 | return model.mu_e[outcome] <= model.mu[i] 293 | model.add_component('RF_aux_mu', Constraint(T, rule=aux_mu)) 294 | 295 | if not pd.isna(ub): 296 | model.add_component('ub_' + outcome, Constraint(expr=model.y[outcome] <= ub)) 297 | if not pd.isna(lb): 298 | model.add_component('lb_' + outcome, Constraint(expr=model.y[outcome] >= lb)) 299 | else: 300 | for i, t in enumerate(T): 301 | tree_table = forest_table_temp.loc[forest_table_temp['Tree_id'] == t, :].drop( 302 | 'Tree_id', axis=1).reset_index(drop=True, inplace=False) 303 | constraints_tree(model, t, tree_table, F, rho, unc_type, lb=None, ub=None, 304 | adversarial_algorithm_bool=True, S=S, adv_prob=False) 305 | 306 | def constr_RF_output(model, s): 307 | return model.y[(outcome, s)] == 1 / len(T) * quicksum(model.y[(j, s)] for j in T) 308 | model.add_component('RF' + outcome, Constraint(range(len(S)), rule=constr_RF_output)) 309 | 310 | if not pd.isna(ub): 311 | def constr_ub(model, s): 312 | return model.y[(outcome, s)] <= ub 313 | model.add_component('ub', Constraint(range(len(S)), rule=constr_ub)) 314 | if not pd.isna(lb): 315 | def constr_lb(model, s): 316 | return model.y[(outcome, s)] >= lb 317 | model.add_component('lb', Constraint(range(len(S)), rule=constr_lb)) 318 | 319 | return model 320 | 321 | 322 | def constraints_gbm(model, outcome, gbm_table, F, rho, unc_type, lb, ub, adversarial_algorithm_bool, S, adv_prob): 323 | """ 324 | This function generates the constraints for a gradient boosting. 325 | """ 326 | gbm_table_temp = gbm_table.copy() 327 | gbm_table_temp['Tree_id'] = [outcome + '_' + str(i) for i in gbm_table_temp['Tree_id']] 328 | T = np.unique(gbm_table_temp['Tree_id']) 329 | if not adversarial_algorithm_bool: 330 | for i, t in enumerate(T): 331 | tree_table = gbm_table_temp.loc[gbm_table_temp['Tree_id'] == t, :].drop(['Tree_id', 'initial_prediction', 'learning_rate'], axis=1, inplace=False) 332 | constraints_tree(model, t, tree_table, F, rho, unc_type, lb=None, ub=None, 333 | adversarial_algorithm_bool=False, S=[], adv_prob=False) 334 | 335 | def constraint_gbm(model): 336 | return model.y[outcome] == np.unique(gbm_table_temp['initial_prediction']).item() + np.unique(gbm_table_temp['learning_rate']).item() * quicksum(model.y[j] for j in T) 337 | 338 | model.add_component('GBM'+outcome, Constraint(rule=constraint_gbm)) 339 | if not pd.isna(ub): 340 | model.add_component('ub_' + outcome, Constraint(expr=model.y[outcome] <= ub)) 341 | if not pd.isna(lb): 342 | model.add_component('lb_' + outcome, Constraint(expr=model.y[outcome] >= lb)) 343 | else: 344 | if adv_prob: 345 | for i, t in enumerate(T): 346 | tree_table = gbm_table_temp.loc[gbm_table_temp['Tree_id'] == t, :].drop(['Tree_id', 'initial_prediction', 'learning_rate'], axis=1, inplace=False).reset_index(drop=True, inplace=False) 347 | constraints_tree(model, t, tree_table, F, rho, unc_type, lb=None, ub=None, 348 | adversarial_algorithm_bool=True, S=S, adv_prob=True) 349 | 350 | def constraint_gbm(model): 351 | return model.y[outcome] == np.unique(gbm_table_temp['initial_prediction']).item() + np.unique(gbm_table_temp['learning_rate']).item() * quicksum(model.y[j] for j in T) 352 | 353 | model.add_component('GBM' + outcome, Constraint(rule=constraint_gbm)) 354 | 355 | def aux_mu(model, i): 356 | return model.mu_e[outcome] <= model.mu[i] 357 | model.add_component('GBM_aux_mu', Constraint(T, rule=aux_mu)) 358 | 359 | if not pd.isna(ub): 360 | model.add_component('ub_' + outcome, Constraint(expr=model.y[outcome] <= ub)) 361 | if not pd.isna(lb): 362 | model.add_component('lb_' + outcome, Constraint(expr=model.y[outcome] >= lb)) 363 | else: 364 | for i, t in enumerate(T): 365 | tree_table = gbm_table_temp.loc[gbm_table_temp['Tree_id'] == t, :].drop(['Tree_id', 'initial_prediction', 'learning_rate'], axis=1, inplace=False).reset_index(drop=True, inplace=False) 366 | constraints_tree(model, t, tree_table, F, rho, unc_type, lb=None, ub=None, 367 | adversarial_algorithm_bool=True, S=S, adv_prob=False) 368 | 369 | def constraint_gbm(model, s): 370 | return model.y[(outcome, s)] == np.unique(gbm_table_temp['initial_prediction']).item() + np.unique(gbm_table_temp['learning_rate']).item() * quicksum(model.y[(j, s)] for j in T) 371 | 372 | model.add_component('GBM' + outcome, Constraint(range(len(S)), rule=constraint_gbm)) 373 | 374 | if not pd.isna(ub): 375 | def constr_ub(model, s): 376 | return model.y[(outcome, s)] <= ub 377 | model.add_component('ub', Constraint(range(len(S)), rule=constr_ub)) 378 | if not pd.isna(lb): 379 | def constr_lb(model, s): 380 | return model.y[(outcome, s)] >= lb 381 | model.add_component('lb', Constraint(range(len(S)), rule=constr_lb)) 382 | 383 | return model 384 | 385 | 386 | def constraints_mlp(model, outcome, weights, F, lb, ub, adv_prob, S, M_l=-1e2, M_u=1e2): 387 | """ 388 | This function generates the constraints for a neural network. 389 | """ 390 | if adv_prob: 391 | nodes_input = range(len(F)) 392 | v_input = [model.x[i] for i in F] 393 | max_layer = max(weights['layer']) 394 | for l in range(max_layer + 1): 395 | df_layer = weights.query('layer == %d' % l) 396 | max_nodes = [k for k in df_layer.columns if 'node_' in k] 397 | coeffs_layer = np.array(df_layer.loc[:, max_nodes].dropna(axis=1)) 398 | intercepts_layer = np.array(df_layer['intercept']) 399 | nodes = df_layer['node'] 400 | 401 | if l == max_layer: 402 | node = nodes.iloc[0] # only one node in last layer 403 | model.add_component('MLP' + outcome, Constraint( 404 | rule=model.y[outcome] == sum(v_input[i] * coeffs_layer[node, i] for i in nodes_input) + 405 | intercepts_layer[node] 406 | ) 407 | ) 408 | else: 409 | # Save v_pos for input to next layer 410 | v_pos_list = [] 411 | for node in nodes: 412 | ## Initialize variables 413 | v_pos_list.append(model.v[(outcome, l, node)]) 414 | model.add_component('constraint_1_' + str(l) + '_' + str(node) + outcome, 415 | Constraint(rule=model.v[(outcome, l, node)] >= sum( 416 | v_input[i] * coeffs_layer[node, i] for i in nodes_input) + 417 | intercepts_layer[ 418 | node] 419 | ) 420 | ) 421 | model.add_component('constraint_2_' + str(l) + '_' + str(node) + outcome, 422 | Constraint(rule=model.v[(outcome, l, node)] <= M_u * ( 423 | model.v_ind[(outcome, l, node)]))) 424 | model.add_component('constraint_3_' + str(l) + '_' + str(node) + outcome, 425 | Constraint(rule=model.v[(outcome, l, node)] <= sum( 426 | v_input[i] * coeffs_layer[node, i] for i in nodes_input) + 427 | intercepts_layer[ 428 | node] - M_l * (1 - model.v_ind[(outcome, l, node)]) 429 | ) 430 | ) 431 | 432 | ## Prepare nodes_input for next layer 433 | nodes_input = nodes 434 | v_input = v_pos_list 435 | 436 | if not pd.isna(ub): 437 | model.add_component('ub_' + outcome, Constraint(expr=model.y[outcome] <= ub)) 438 | if not pd.isna(lb): 439 | model.add_component('lb_' + outcome, Constraint(expr=model.y[outcome] >= lb)) 440 | else: 441 | nodes_input = range(len(F)) 442 | v_input = [[model.x[i] + S[s][i] for i in F] for s in range(len(S))] 443 | max_layer = max(weights['layer']) 444 | for l in range(max_layer + 1): 445 | df_layer = weights.query('layer == %d' % l) 446 | max_nodes = [k for k in df_layer.columns if 'node_' in k] 447 | coeffs_layer = np.array(df_layer.loc[:, max_nodes].dropna(axis=1)) 448 | intercepts_layer = np.array(df_layer['intercept']) 449 | nodes = df_layer['node'] 450 | if l == max_layer: 451 | node = nodes.iloc[0] # only one node in last layer 452 | for s in range(len(S)): 453 | model.add_component('MLP' + outcome + '_' + str(s), 454 | Constraint(rule=model.y[(outcome, s)] == sum( 455 | v_input[s][i] * coeffs_layer[node, i] for i in nodes_input) + 456 | intercepts_layer[node] 457 | ) 458 | ) 459 | else: 460 | # Save v_pos for input to next layer 461 | v_pos_list = [] 462 | for s in range(len(S)): 463 | v_pos_list_s = [] 464 | for node in nodes: 465 | ## Initialize variables 466 | v_pos_list_s.append(model.v[(outcome, l, node), s]) 467 | model.add_component('constraint_1_' + str(l) + '_' + str(node) + outcome + '_' + str(s), 468 | Constraint(rule=model.v[(outcome, l, node), s] >= sum( 469 | v_input[s][i] * coeffs_layer[node, i] for i in nodes_input) + 470 | intercepts_layer[node] 471 | ) 472 | ) 473 | model.add_component('constraint_2_' + str(l) + '_' + str(node) + outcome + '_' + str(s), 474 | Constraint(rule=model.v[(outcome, l, node), s] <= M_u * ( 475 | model.v_ind[(outcome, l, node), s]))) 476 | model.add_component('constraint_3_' + str(l) + '_' + str(node) + outcome + '_' + str(s), 477 | Constraint(rule=model.v[(outcome, l, node), s] <= sum( 478 | v_input[s][i] * coeffs_layer[node, i] for i in nodes_input) + 479 | intercepts_layer[node] - M_l * ( 480 | 1 - model.v_ind[(outcome, l, node), s]) 481 | ) 482 | ) 483 | v_pos_list.append(v_pos_list_s) 484 | 485 | ## Prepare nodes_input for next layer 486 | nodes_input = nodes 487 | v_input = v_pos_list 488 | 489 | if not pd.isna(ub): 490 | for s in range(len(S)): 491 | model.add_component('ub_' + outcome + '_' + str(s), Constraint(expr=model.y[(outcome, s)] <= ub)) 492 | if not pd.isna(lb): 493 | for s in range(len(S)): 494 | model.add_component('lb_' + outcome + '_' + str(s), Constraint(expr=model.y[(outcome, s)] >= lb)) 495 | 496 | return model 497 | 498 | 499 | def adv_problem(model, outcome, clf_type, mfile, F, x_, lb, ub, rho, unc_type): 500 | """ 501 | This function defines and solves the adversarial problem. 502 | """ 503 | model.w = Var(Any, dense=False, domain=NonNegativeReals, 504 | bounds=(0, 10)) # What is a better ub? 505 | model.mu = Var(Any, dense=False, domain=NonNegativeReals) 506 | 507 | if clf_type in ['rf', 'gbm']: 508 | model.mu_e = Var(Any, dense=False, domain=NonNegativeReals) 509 | 510 | def obj(model): 511 | if clf_type == 'mlp': 512 | if not pd.isna(lb): 513 | lam = 1 514 | else: 515 | lam = -1 516 | return lam * model.y['prediction'] 517 | elif clf_type == 'cart': 518 | return model.mu[(outcome)] 519 | elif clf_type in ['rf', 'gbm']: 520 | # return sum([model.mu[f"{outcome}_{i}"] for i in np.unique(mfile['Tree_id'])]) 521 | return model.mu_e[outcome] 522 | else: 523 | print('Not implemented yet') 524 | 525 | model.OBJ = Objective(rule=obj, sense=maximize) 526 | 527 | if clf_type == 'cart': 528 | model = constraints_tree(model, 'prediction', mfile, F, rho, unc_type, lb, ub, adversarial_algorithm_bool=True, 529 | S=[], adv_prob=True) 530 | elif clf_type == 'mlp': 531 | model = constraints_mlp(model, 'prediction', mfile, F, lb, ub, adv_prob=True, S=[]) 532 | elif clf_type == 'rf': 533 | model = constraints_rf(model, 'prediction', mfile, F, rho, unc_type, lb, ub, adversarial_algorithm_bool=True, 534 | S=[], adv_prob=True) 535 | elif clf_type == 'gbm': 536 | model = constraints_gbm(model, 'prediction', mfile, F, rho, unc_type, lb, ub, adversarial_algorithm_bool=True, S=[], adv_prob=True) 537 | if unc_type == 'l2': 538 | def aux_uncert(model): 539 | return sum((x_[i].item() - model.x[i]) ** 2 for i in F) <= rho ** 2 540 | 541 | model.add_component(outcome + '_aux_uncert', Constraint(rule=aux_uncert)) 542 | 543 | elif unc_type == 'linf': 544 | def aux_unc_1(model, i): 545 | return model.x[i] <= x_[i].item() + rho 546 | 547 | model.add_component(outcome + '_aux_unc_1', Constraint(F, rule=aux_unc_1)) 548 | 549 | def aux_unc_2(model, i): 550 | return model.x[i] >= x_[i].item() - rho 551 | 552 | model.add_component(outcome + 'aux_unc_2', Constraint(F, rule=aux_unc_2)) 553 | 554 | print('Optimizing the adversarial problem...') 555 | 556 | opt = SolverFactory('gurobi_persistent') 557 | opt.set_instance(model) 558 | opt.set_gurobi_param('PoolSolutions', 10) 559 | opt.set_gurobi_param('PoolSearchMode', 1) 560 | 561 | # opt.options["NonConvex"] = 2 562 | # opt.options["DualReductions"] = 0 ######### 563 | 564 | start_time_pp = time.time() 565 | results = opt.solve(model, load_solutions=True, tee=False) 566 | subopt_solutions = [] 567 | print("Status:", results.solver.termination_condition) 568 | if (results.solver.status == SolverStatus.ok) and ( 569 | results.solver.termination_condition == TerminationCondition.optimal): 570 | solution = [value(model.x[i]) for i in F] 571 | print('solution adv problem', solution, 'generated in ', np.round(time.time() - start_time_pp, 1), 's') 572 | 573 | number_of_solutions = opt.get_model_attr('SolCount') 574 | for i in range(number_of_solutions): 575 | opt.set_gurobi_param('SolutionNumber', i) 576 | suboptimal_solutions = opt.get_model_attr('Xn') 577 | 578 | vars_name_x = [opt.get_var_attr(model.x[i], 'VarName') for i in F] 579 | vars_name_ix = [int(vars_name_x[i].replace('x', '')) for i in range(len(vars_name_x))] 580 | vars_val_x = [suboptimal_solutions[i - 1] for i in vars_name_ix] 581 | solution_i = [vars_val_x[i] for i in range(len(vars_val_x))] 582 | subopt_solutions.append(solution_i) 583 | status = True 584 | else: 585 | solution = [] 586 | status = False 587 | 588 | return model, solution, subopt_solutions, status 589 | 590 | 591 | def master_problem(model, u, outcome, clf_type, mfile, F, rho, unc_type, lb, ub, S, time_limit = 1000): 592 | """ 593 | This function defines and solves the master problem. 594 | """ 595 | model_temp = add_objective(model, u, 'l1') 596 | if clf_type == 'cart': 597 | master_model = constraints_tree(model_temp, outcome, mfile, F, rho, unc_type, lb, ub, 598 | adversarial_algorithm_bool=True, S=S, adv_prob=False) 599 | elif clf_type == 'mlp': 600 | master_model = constraints_mlp(model_temp, outcome, mfile, F, lb, ub, adv_prob=False, S=S) 601 | elif clf_type == 'rf': 602 | master_model = constraints_rf(model_temp, outcome, mfile, F, rho, unc_type, lb, ub, 603 | adversarial_algorithm_bool=True, S=S, adv_prob=False) 604 | elif clf_type == 'gbm': 605 | master_model = constraints_gbm(model_temp, outcome, mfile, F, rho, unc_type, lb, ub, 606 | adversarial_algorithm_bool=True, S=S, adv_prob=False) 607 | 608 | print('Optimizing the master problem...') 609 | start_time_master = time.time() 610 | opt = SolverFactory('gurobi') 611 | try: opt.solve(master_model, timelimit=time_limit) 612 | except: 613 | return None, None 614 | # opt.solve(master_model) 615 | end_time_master = time.time() 616 | sol_master = pd.DataFrame([[value(master_model.x[i]) for i in F]], columns=F) 617 | print('solution master', [value(master_model.x[i]) for i in F], 'generated in ', 618 | np.round(end_time_master - start_time_master, 1), 's') 619 | # print([value(master_model.y[(outcome, s)]) for s in range(len(S))]) 620 | return master_model, sol_master 621 | 622 | 623 | def adversarial_algorithm(model, outcome, clf_type, save_path, u, mfile, F, rho, unc_type, lb, ub, time_limit): 624 | """ 625 | Adversarial algorithm with the iteration between the master problem and the adversarial problem 626 | """ 627 | print('time limit: %i' % time_limit) 628 | if not pd.isna(ub): 629 | ub_price, lb_price = None, ub 630 | pred_ = 1 631 | else: 632 | ub_price, lb_price = lb, None 633 | pred_ = 0 634 | 635 | S = [{j: 0 for j in F}] 636 | iterations = 0 637 | # eps = 0.00001 638 | eps = 0 639 | iteration_condition = True 640 | comp_time = time.time() 641 | time_find_maxrad_list = [] 642 | solutions_master_dict = {} 643 | while True: 644 | if time.time() - comp_time > time_limit: 645 | break 646 | print(f'\n\n------------------------ Iteration: {iterations} ------------------------') 647 | 648 | # try: master_model, sol_master = master_problem(model.clone(), u, outcome, clf_type, mfile, F, rho, unc_type, lb, 649 | # ub, S, time_limit = time_limit) 650 | # except: 651 | # break 652 | master_model, sol_master = master_problem(model.clone(), u, outcome, clf_type, mfile, F, rho, unc_type, lb, 653 | ub, S, time_limit = time_limit) 654 | 655 | if master_model is None: 656 | print('master_model is None -- MP not solved within time limit') 657 | break 658 | 659 | print('--> Distance to the factual instance:', value(master_model.OBJ)) 660 | solutions_master_dict[iterations] = {'sol': sol_master, 'obj': value(master_model.OBJ)} 661 | time_find_max_start = time.time() 662 | _, dist_border = find_maxrad(sol_master, clf_type, save_path, F, [], [], {}, [], [], [], 663 | pred_, unc_type) 664 | # time_find_maxrad_list.append(time.time() - time_find_max_start) 665 | comp_time -= time.time() - time_find_max_start 666 | print('--> Distance to the border:', dist_border) 667 | if dist_border + rho/100 >= rho: 668 | print("Stopping because the distance to the border is >= rho") 669 | break 670 | 671 | adv_model, sol_adv, set_sol_adv, status_adv = adv_problem(model.clone(), outcome, clf_type, mfile, F, 672 | sol_master, lb_price, ub_price, 673 | rho, unc_type) 674 | print('Status adversarial problem:', status_adv) 675 | if status_adv: 676 | if value(adv_model.OBJ) <= eps: 677 | print(f'Stopping because the ADV obj value ({value(adv_model.OBJ)}) is < eps ({eps})') 678 | break 679 | 680 | for temp_sol_adv in set_sol_adv: 681 | temp_sol_adv = pd.DataFrame([temp_sol_adv], columns=F) 682 | S_df = np.subtract(temp_sol_adv, sol_master) 683 | if {j: S_df[j].item() for j in F} not in S: 684 | S.append({j: S_df[j].item() for j in F}) 685 | break 686 | else: 687 | pass 688 | else: 689 | print('Stopping because ADV problem is infeasible') 690 | break 691 | iterations += 1 692 | 693 | print(f'### Iterative approach completed in {np.round(time.time() - comp_time, 1)} s ###\n') 694 | comp_time_final = time.time() - comp_time - sum(time_find_maxrad_list) 695 | model_temp = model.clone() 696 | if clf_type == 'cart': 697 | model = constraints_tree(model_temp, outcome, mfile, F, rho, unc_type, lb, ub, True, S=S, adv_prob=False) 698 | elif clf_type == 'rf': 699 | model = constraints_rf(model_temp, outcome, mfile, F, rho, unc_type, lb, ub, True, S, False) 700 | elif clf_type == 'mlp': 701 | model = constraints_mlp(model_temp, outcome, mfile, F, lb, ub, adv_prob=False, S=S) 702 | return model, iterations, comp_time_final, sol_master, solutions_master_dict 703 | 704 | 705 | def add_objective(model, u, obj_type, maxrad=False): 706 | F = list(u.columns) 707 | 708 | if obj_type == 'l2': 709 | def l2norm(model): 710 | return sum((u[i].item() - model.x[i]) ** 2 for i in F) 711 | 712 | model.OBJ = Objective(rule=l2norm, sense=minimize) 713 | 714 | elif obj_type == 'linf': 715 | model.g = Var(['aux_g'], domain=NonNegativeReals, name=['aux_g_%s' % str(i) for i in F]) 716 | 717 | def obj_aux1(model, i): 718 | return model.g['aux_g'] >= (u[i].item() - model.x[i]) 719 | 720 | model.add_component('obj_aux1', Constraint(F, rule=obj_aux1)) 721 | 722 | def obj_aux2(model, i): 723 | return model.g['aux_g'] >= -(u[i].item() - model.x[i]) 724 | 725 | model.add_component('obj_aux2', Constraint(F, rule=obj_aux2)) 726 | 727 | def obj_aux3(model): 728 | return model.g['aux_g'] 729 | 730 | model.OBJ = Objective(rule=obj_aux3, sense=minimize) 731 | 732 | elif obj_type == 'l1': 733 | model.g = Var(F, domain=NonNegativeReals, name=['aux_g_%s' % str(i) for i in F]) 734 | 735 | def obj_aux1(model, i): 736 | return model.g[i] >= (u[i].item() - model.x[i]) 737 | 738 | model.add_component('obj_aux1', Constraint(F, rule=obj_aux1)) 739 | 740 | def obj_aux2(model, i): 741 | return model.g[i] >= -(u[i].item() - model.x[i]) 742 | 743 | model.add_component('obj_aux2', Constraint(F, rule=obj_aux2)) 744 | 745 | def obj_aux3(model): 746 | return sum(model.g[i] for i in F) 747 | 748 | model.OBJ = Objective(rule=obj_aux3, sense=minimize) 749 | 750 | 751 | return model 752 | 753 | 754 | def add_validity_constraint(model, clf, clf_type, save_path, u, F, rho, unc_type, adversarial, time_limit): 755 | """ 756 | The validity constraints require an adversarial approach in the case of MLP and optionally 757 | in the case of tree based models. 758 | """ 759 | model.y = Var(Any, dense=False, domain=Reals) 760 | model.l = Var(Any, dense=False, domain=Binary) 761 | model.y_viol = Var(Any, dense=False, domain=Binary) 762 | model.v = Var(Any, dense=False, domain=NonNegativeReals) 763 | model.v_ind = Var(Any, dense=False, domain=Binary) 764 | model.lam = Var(Any, dense=False, domain=Reals, bounds=(0, 1)) 765 | 766 | clf_path = save_path + '/' + clf_type + '/model.csv' 767 | mfile = pd.read_csv(clf_path) 768 | 769 | num_iterations, comp_time = 0, 0 770 | lb, ub = None, None 771 | if clf_type in ['cart', 'gbm', 'rf']: 772 | if clf.predict(u) == 1: 773 | ub, lb = 0.4999, None 774 | else: 775 | ub, lb = None, 0.5001 776 | elif clf_type in ['linear', 'svm', 'mlp']: 777 | if clf.predict(u) == 1: 778 | ub, lb = 0.0, None 779 | else: 780 | ub, lb = None, 0.0 781 | 782 | solutions_master_dict = {} 783 | if clf_type in ['linear', 'svm']: 784 | print('embedding validity constraints...') 785 | model = constraints_linear(model, 'prediction', clf.predict(u), mfile, F, rho, unc_type, lb, ub) 786 | model = add_objective(model, u, unc_type) 787 | print('validity constraint embedded.') 788 | opt = SolverFactory('gurobi') 789 | start_opt = time.time() 790 | opt.solve(model) 791 | results = opt.solve(model) 792 | end_opt = time.time() 793 | solution_ = [value(model.x[i]) for i in u.columns] 794 | solution_ = pd.DataFrame([solution_], columns=u.columns) 795 | comp_time = end_opt - start_opt 796 | elif clf_type == 'cart': 797 | if not adversarial: 798 | print('embedding validity constraints Cart.') 799 | model = constraints_tree(model, 'prediction', mfile, F, rho, unc_type, lb, ub, 800 | adversarial_algorithm_bool=False, S=[], adv_prob=False) 801 | model = add_objective(model, u, unc_type) 802 | print('validity constraint embedded.') 803 | opt = SolverFactory('gurobi') 804 | start_opt = time.time() 805 | opt.solve(model) 806 | results = opt.solve(model) 807 | end_opt = time.time() 808 | solution_ = [value(model.x[i]) for i in u.columns] 809 | solution_ = pd.DataFrame([solution_], columns=u.columns) 810 | comp_time = end_opt - start_opt 811 | else: 812 | print('\n### Starting the Cart iterative approach ###') 813 | model, num_iterations, comp_time, solution_, solutions_master_dict = adversarial_algorithm(model, 'prediction', clf_type, save_path, u, mfile, F, rho, unc_type, lb, ub, time_limit) 814 | elif clf_type == 'rf': 815 | if not adversarial: 816 | print('embedding validity constraints...') 817 | model = constraints_rf(model, 'prediction', mfile, F, rho, unc_type, lb, ub, 818 | adversarial_algorithm_bool=False, S=[], adv_prob=False) 819 | model = add_objective(model, u, unc_type) 820 | opt = SolverFactory('gurobi') 821 | start_opt = time.time() 822 | opt.solve(model) 823 | results = opt.solve(model) 824 | end_opt = time.time() 825 | solution_ = [value(model.x[i]) for i in u.columns] 826 | solution_ = pd.DataFrame([solution_], columns=u.columns) 827 | comp_time = end_opt - start_opt 828 | print('validity constraint embedded.') 829 | else: 830 | print('\n### Starting the RANDOM FOREST iterative approach ###') 831 | model, num_iterations, comp_time, solution_, solutions_master_dict = adversarial_algorithm(model, 'prediction', clf_type, save_path, u, mfile, F, rho, unc_type, lb, ub, time_limit) 832 | elif clf_type == 'gbm': 833 | if not adversarial: 834 | print('embedding validity constraints...') 835 | model = constraints_gbm(model, 'prediction', mfile, F, rho, unc_type, lb, ub, 836 | adversarial_algorithm_bool=False, S=[], adv_prob=False) 837 | model = add_objective(model, u, unc_type) 838 | opt = SolverFactory('gurobi') 839 | start_opt = time.time() 840 | opt.solve(model) 841 | results = opt.solve(model) 842 | end_opt = time.time() 843 | solution_ = [value(model.x[i]) for i in u.columns] 844 | solution_ = pd.DataFrame([solution_], columns=u.columns) 845 | comp_time = end_opt - start_opt 846 | print('validity constraint embedded.') 847 | else: 848 | print('\n### Starting the GRADIENT BOOSTING iterative approach ###') 849 | model, num_iterations, comp_time, solution_, solutions_master_dict = adversarial_algorithm(model, 'prediction', clf_type, save_path, u, mfile, 850 | F, rho, unc_type, lb, ub, time_limit) 851 | elif clf_type == 'mlp': 852 | print('\n### Starting the NN iterative approach ###') 853 | model, num_iterations, comp_time, solution_, solutions_master_dict = adversarial_algorithm(model, 'prediction', clf_type, save_path, u, mfile, F, rho, unc_type, lb, ub, time_limit) 854 | 855 | return model, num_iterations, comp_time, solution_, solutions_master_dict 856 | 857 | 858 | def generate(clf, X_train, y_train, save_path, clf_type, task_type, u, F, F_b, F_int, F_coh, I, L, P, rho,unc_type='ball', iterative=False, time_limit = 100): 859 | """ 860 | This function generate the robust counterfactual explanation 861 | """ 862 | write_tables(save_path, X_train, y_train, clf, clf_type, task_type) 863 | model = base_model(u, F, F_b, F_int, F_coh, I, L, P) 864 | model, num_iterations, comp_time, solution_, solutions_master_dict = add_validity_constraint(model, clf, clf_type, save_path, u, F, rho, unc_type, iterative, time_limit) 865 | 866 | return model, num_iterations, comp_time, solution_, solutions_master_dict 867 | --------------------------------------------------------------------------------