├── .deepsource.toml ├── .gitattributes ├── .gitignore ├── LICENSE ├── README.md ├── Report.pdf ├── data ├── balance-scale │ ├── Index │ ├── balance-scale.data │ ├── balance-scale.names │ └── balance-scale_enc.csv ├── breast-cancer │ ├── Index │ ├── breast-cancer.data │ ├── breast-cancer.names │ └── breast-cancer_enc.csv ├── car-evaluation │ ├── car.c45-names │ ├── car.data │ ├── car.names │ └── car_eval_enc.csv ├── hayes-roth │ ├── Index │ ├── hayes-roth.data │ ├── hayes-roth.names │ ├── hayes-roth.test │ └── hayes-roth_enc.csv ├── house-votes-84 │ ├── Index │ ├── house-votes-84.data │ ├── house-votes-84.names │ └── house-votes-84_enc.csv ├── monks │ ├── Index.txt │ ├── monk1_enc.csv │ ├── monk2_enc.csv │ ├── monk3_enc.csv │ ├── monks-1.test │ ├── monks-1.train │ ├── monks-2.test │ ├── monks-2.train │ ├── monks-3.test │ ├── monks-3.train │ └── monks.names ├── soybean-small │ ├── Index │ ├── backup-large.data │ ├── backup-large.test │ ├── fisher-order │ ├── soybean-explanation │ ├── soybean-large.names │ ├── soybean-small.data │ ├── soybean-small.names │ ├── soybean-small_enc.csv │ ├── stepp-order │ └── why-various-soybean-databases ├── spect │ ├── DonorNote.txt │ ├── spect.names │ ├── spect.test │ ├── spect.train │ └── spect_enc.csv └── tic-tac-toe │ ├── Index │ ├── tic-tac-toe.data │ ├── tic-tac-toe.names │ └── tic-tac-toe_enc.csv ├── dataset.py ├── experiments ├── Experiments.ipynb ├── Result.ipynb └── Test.ipynb ├── fig ├── dt.jpg └── maxflow.png ├── res ├── 600 │ ├── boct.csv │ ├── mfoct.csv │ ├── oct.csv │ └── soct.csv ├── 1200 │ ├── boct.csv │ ├── mfoct.csv │ ├── oct.csv │ └── soct.csv └── sk.csv └── tree ├── __init__.py ├── binoct.py ├── mfoct.py └── oct.py /.deepsource.toml: -------------------------------------------------------------------------------- 1 | version = 1 2 | 3 | [[analyzers]] 4 | name = "python" 5 | enabled = true 6 | 7 | [analyzers.meta] 8 | runtime_version = "3.x.x" -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | *.egg-info/ 24 | .installed.cfg 25 | *.egg 26 | MANIFEST 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .nox/ 42 | .coverage 43 | .coverage.* 44 | .cache 45 | nosetests.xml 46 | coverage.xml 47 | *.cover 48 | .hypothesis/ 49 | .pytest_cache/ 50 | 51 | # Translations 52 | *.mo 53 | *.pot 54 | 55 | # Django stuff: 56 | *.log 57 | local_settings.py 58 | db.sqlite3 59 | 60 | # Flask stuff: 61 | instance/ 62 | .webassets-cache 63 | 64 | # Scrapy stuff: 65 | .scrapy 66 | 67 | # Sphinx documentation 68 | docs/_build/ 69 | 70 | # PyBuilder 71 | target/ 72 | 73 | # Jupyter Notebook 74 | .ipynb_checkpoints 75 | 76 | # IPython 77 | profile_default/ 78 | ipython_config.py 79 | 80 | # pyenv 81 | .python-version 82 | 83 | # celery beat schedule file 84 | celerybeat-schedule 85 | 86 | # SageMath parsed files 87 | *.sage.py 88 | 89 | # Environments 90 | .env 91 | .venv 92 | env/ 93 | venv/ 94 | ENV/ 95 | env.bak/ 96 | venv.bak/ 97 | 98 | # Spyder project settings 99 | .spyderproject 100 | .spyproject 101 | 102 | # Rope project settings 103 | .ropeproject 104 | 105 | # mkdocs documentation 106 | /site 107 | 108 | # mypy 109 | .mypy_cache/ 110 | .dmypy.json 111 | dmypy.json 112 | 113 | # Pyre type checker 114 | .pyre/ 115 | fig/maxflow.pptx 116 | .idea/* 117 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 LucasBoTang 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Report.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LucasBoTang/Optimal_Classification_Trees/9a5714db7cb3a60eea4758c8143dc3144f96b3d2/Report.pdf -------------------------------------------------------------------------------- /data/balance-scale/Index: -------------------------------------------------------------------------------- 1 | Index of balance-scale 2 | 3 | 02 Dec 1996 132 Index 4 | 13 May 1994 6250 balance-scale.data 5 | 13 May 1994 2222 balance-scale.names 6 | -------------------------------------------------------------------------------- /data/balance-scale/balance-scale.data: -------------------------------------------------------------------------------- 1 | B,1,1,1,1 2 | R,1,1,1,2 3 | R,1,1,1,3 4 | R,1,1,1,4 5 | R,1,1,1,5 6 | R,1,1,2,1 7 | R,1,1,2,2 8 | R,1,1,2,3 9 | R,1,1,2,4 10 | R,1,1,2,5 11 | R,1,1,3,1 12 | R,1,1,3,2 13 | R,1,1,3,3 14 | R,1,1,3,4 15 | R,1,1,3,5 16 | R,1,1,4,1 17 | R,1,1,4,2 18 | R,1,1,4,3 19 | R,1,1,4,4 20 | R,1,1,4,5 21 | R,1,1,5,1 22 | R,1,1,5,2 23 | R,1,1,5,3 24 | R,1,1,5,4 25 | R,1,1,5,5 26 | L,1,2,1,1 27 | B,1,2,1,2 28 | R,1,2,1,3 29 | R,1,2,1,4 30 | R,1,2,1,5 31 | B,1,2,2,1 32 | R,1,2,2,2 33 | R,1,2,2,3 34 | R,1,2,2,4 35 | R,1,2,2,5 36 | R,1,2,3,1 37 | R,1,2,3,2 38 | R,1,2,3,3 39 | R,1,2,3,4 40 | R,1,2,3,5 41 | R,1,2,4,1 42 | R,1,2,4,2 43 | R,1,2,4,3 44 | R,1,2,4,4 45 | R,1,2,4,5 46 | R,1,2,5,1 47 | R,1,2,5,2 48 | R,1,2,5,3 49 | R,1,2,5,4 50 | R,1,2,5,5 51 | L,1,3,1,1 52 | L,1,3,1,2 53 | B,1,3,1,3 54 | R,1,3,1,4 55 | R,1,3,1,5 56 | L,1,3,2,1 57 | R,1,3,2,2 58 | R,1,3,2,3 59 | R,1,3,2,4 60 | R,1,3,2,5 61 | B,1,3,3,1 62 | R,1,3,3,2 63 | R,1,3,3,3 64 | R,1,3,3,4 65 | R,1,3,3,5 66 | R,1,3,4,1 67 | R,1,3,4,2 68 | R,1,3,4,3 69 | R,1,3,4,4 70 | R,1,3,4,5 71 | R,1,3,5,1 72 | R,1,3,5,2 73 | R,1,3,5,3 74 | R,1,3,5,4 75 | R,1,3,5,5 76 | L,1,4,1,1 77 | L,1,4,1,2 78 | L,1,4,1,3 79 | B,1,4,1,4 80 | R,1,4,1,5 81 | L,1,4,2,1 82 | B,1,4,2,2 83 | R,1,4,2,3 84 | R,1,4,2,4 85 | R,1,4,2,5 86 | L,1,4,3,1 87 | R,1,4,3,2 88 | R,1,4,3,3 89 | R,1,4,3,4 90 | R,1,4,3,5 91 | B,1,4,4,1 92 | R,1,4,4,2 93 | R,1,4,4,3 94 | R,1,4,4,4 95 | R,1,4,4,5 96 | R,1,4,5,1 97 | R,1,4,5,2 98 | R,1,4,5,3 99 | R,1,4,5,4 100 | R,1,4,5,5 101 | L,1,5,1,1 102 | L,1,5,1,2 103 | L,1,5,1,3 104 | L,1,5,1,4 105 | B,1,5,1,5 106 | L,1,5,2,1 107 | L,1,5,2,2 108 | R,1,5,2,3 109 | R,1,5,2,4 110 | R,1,5,2,5 111 | L,1,5,3,1 112 | R,1,5,3,2 113 | R,1,5,3,3 114 | R,1,5,3,4 115 | R,1,5,3,5 116 | L,1,5,4,1 117 | R,1,5,4,2 118 | R,1,5,4,3 119 | R,1,5,4,4 120 | R,1,5,4,5 121 | B,1,5,5,1 122 | R,1,5,5,2 123 | R,1,5,5,3 124 | R,1,5,5,4 125 | R,1,5,5,5 126 | L,2,1,1,1 127 | B,2,1,1,2 128 | R,2,1,1,3 129 | R,2,1,1,4 130 | R,2,1,1,5 131 | B,2,1,2,1 132 | R,2,1,2,2 133 | R,2,1,2,3 134 | R,2,1,2,4 135 | R,2,1,2,5 136 | R,2,1,3,1 137 | R,2,1,3,2 138 | R,2,1,3,3 139 | R,2,1,3,4 140 | R,2,1,3,5 141 | R,2,1,4,1 142 | R,2,1,4,2 143 | R,2,1,4,3 144 | R,2,1,4,4 145 | R,2,1,4,5 146 | R,2,1,5,1 147 | R,2,1,5,2 148 | R,2,1,5,3 149 | R,2,1,5,4 150 | R,2,1,5,5 151 | L,2,2,1,1 152 | L,2,2,1,2 153 | L,2,2,1,3 154 | B,2,2,1,4 155 | R,2,2,1,5 156 | L,2,2,2,1 157 | B,2,2,2,2 158 | R,2,2,2,3 159 | R,2,2,2,4 160 | R,2,2,2,5 161 | L,2,2,3,1 162 | R,2,2,3,2 163 | R,2,2,3,3 164 | R,2,2,3,4 165 | R,2,2,3,5 166 | B,2,2,4,1 167 | R,2,2,4,2 168 | R,2,2,4,3 169 | R,2,2,4,4 170 | R,2,2,4,5 171 | R,2,2,5,1 172 | R,2,2,5,2 173 | R,2,2,5,3 174 | R,2,2,5,4 175 | R,2,2,5,5 176 | L,2,3,1,1 177 | L,2,3,1,2 178 | L,2,3,1,3 179 | L,2,3,1,4 180 | L,2,3,1,5 181 | L,2,3,2,1 182 | L,2,3,2,2 183 | B,2,3,2,3 184 | R,2,3,2,4 185 | R,2,3,2,5 186 | L,2,3,3,1 187 | B,2,3,3,2 188 | R,2,3,3,3 189 | R,2,3,3,4 190 | R,2,3,3,5 191 | L,2,3,4,1 192 | R,2,3,4,2 193 | R,2,3,4,3 194 | R,2,3,4,4 195 | R,2,3,4,5 196 | L,2,3,5,1 197 | R,2,3,5,2 198 | R,2,3,5,3 199 | R,2,3,5,4 200 | R,2,3,5,5 201 | L,2,4,1,1 202 | L,2,4,1,2 203 | L,2,4,1,3 204 | L,2,4,1,4 205 | L,2,4,1,5 206 | L,2,4,2,1 207 | L,2,4,2,2 208 | L,2,4,2,3 209 | B,2,4,2,4 210 | R,2,4,2,5 211 | L,2,4,3,1 212 | L,2,4,3,2 213 | R,2,4,3,3 214 | R,2,4,3,4 215 | R,2,4,3,5 216 | L,2,4,4,1 217 | B,2,4,4,2 218 | R,2,4,4,3 219 | R,2,4,4,4 220 | R,2,4,4,5 221 | L,2,4,5,1 222 | R,2,4,5,2 223 | R,2,4,5,3 224 | R,2,4,5,4 225 | R,2,4,5,5 226 | L,2,5,1,1 227 | L,2,5,1,2 228 | L,2,5,1,3 229 | L,2,5,1,4 230 | L,2,5,1,5 231 | L,2,5,2,1 232 | L,2,5,2,2 233 | L,2,5,2,3 234 | L,2,5,2,4 235 | B,2,5,2,5 236 | L,2,5,3,1 237 | L,2,5,3,2 238 | L,2,5,3,3 239 | R,2,5,3,4 240 | R,2,5,3,5 241 | L,2,5,4,1 242 | L,2,5,4,2 243 | R,2,5,4,3 244 | R,2,5,4,4 245 | R,2,5,4,5 246 | L,2,5,5,1 247 | B,2,5,5,2 248 | R,2,5,5,3 249 | R,2,5,5,4 250 | R,2,5,5,5 251 | L,3,1,1,1 252 | L,3,1,1,2 253 | B,3,1,1,3 254 | R,3,1,1,4 255 | R,3,1,1,5 256 | L,3,1,2,1 257 | R,3,1,2,2 258 | R,3,1,2,3 259 | R,3,1,2,4 260 | R,3,1,2,5 261 | B,3,1,3,1 262 | R,3,1,3,2 263 | R,3,1,3,3 264 | R,3,1,3,4 265 | R,3,1,3,5 266 | R,3,1,4,1 267 | R,3,1,4,2 268 | R,3,1,4,3 269 | R,3,1,4,4 270 | R,3,1,4,5 271 | R,3,1,5,1 272 | R,3,1,5,2 273 | R,3,1,5,3 274 | R,3,1,5,4 275 | R,3,1,5,5 276 | L,3,2,1,1 277 | L,3,2,1,2 278 | L,3,2,1,3 279 | L,3,2,1,4 280 | L,3,2,1,5 281 | L,3,2,2,1 282 | L,3,2,2,2 283 | B,3,2,2,3 284 | R,3,2,2,4 285 | R,3,2,2,5 286 | L,3,2,3,1 287 | B,3,2,3,2 288 | R,3,2,3,3 289 | R,3,2,3,4 290 | R,3,2,3,5 291 | L,3,2,4,1 292 | R,3,2,4,2 293 | R,3,2,4,3 294 | R,3,2,4,4 295 | R,3,2,4,5 296 | L,3,2,5,1 297 | R,3,2,5,2 298 | R,3,2,5,3 299 | R,3,2,5,4 300 | R,3,2,5,5 301 | L,3,3,1,1 302 | L,3,3,1,2 303 | L,3,3,1,3 304 | L,3,3,1,4 305 | L,3,3,1,5 306 | L,3,3,2,1 307 | L,3,3,2,2 308 | L,3,3,2,3 309 | L,3,3,2,4 310 | R,3,3,2,5 311 | L,3,3,3,1 312 | L,3,3,3,2 313 | B,3,3,3,3 314 | R,3,3,3,4 315 | R,3,3,3,5 316 | L,3,3,4,1 317 | L,3,3,4,2 318 | R,3,3,4,3 319 | R,3,3,4,4 320 | R,3,3,4,5 321 | L,3,3,5,1 322 | R,3,3,5,2 323 | R,3,3,5,3 324 | R,3,3,5,4 325 | R,3,3,5,5 326 | L,3,4,1,1 327 | L,3,4,1,2 328 | L,3,4,1,3 329 | L,3,4,1,4 330 | L,3,4,1,5 331 | L,3,4,2,1 332 | L,3,4,2,2 333 | L,3,4,2,3 334 | L,3,4,2,4 335 | L,3,4,2,5 336 | L,3,4,3,1 337 | L,3,4,3,2 338 | L,3,4,3,3 339 | B,3,4,3,4 340 | R,3,4,3,5 341 | L,3,4,4,1 342 | L,3,4,4,2 343 | B,3,4,4,3 344 | R,3,4,4,4 345 | R,3,4,4,5 346 | L,3,4,5,1 347 | L,3,4,5,2 348 | R,3,4,5,3 349 | R,3,4,5,4 350 | R,3,4,5,5 351 | L,3,5,1,1 352 | L,3,5,1,2 353 | L,3,5,1,3 354 | L,3,5,1,4 355 | L,3,5,1,5 356 | L,3,5,2,1 357 | L,3,5,2,2 358 | L,3,5,2,3 359 | L,3,5,2,4 360 | L,3,5,2,5 361 | L,3,5,3,1 362 | L,3,5,3,2 363 | L,3,5,3,3 364 | L,3,5,3,4 365 | B,3,5,3,5 366 | L,3,5,4,1 367 | L,3,5,4,2 368 | L,3,5,4,3 369 | R,3,5,4,4 370 | R,3,5,4,5 371 | L,3,5,5,1 372 | L,3,5,5,2 373 | B,3,5,5,3 374 | R,3,5,5,4 375 | R,3,5,5,5 376 | L,4,1,1,1 377 | L,4,1,1,2 378 | L,4,1,1,3 379 | B,4,1,1,4 380 | R,4,1,1,5 381 | L,4,1,2,1 382 | B,4,1,2,2 383 | R,4,1,2,3 384 | R,4,1,2,4 385 | R,4,1,2,5 386 | L,4,1,3,1 387 | R,4,1,3,2 388 | R,4,1,3,3 389 | R,4,1,3,4 390 | R,4,1,3,5 391 | B,4,1,4,1 392 | R,4,1,4,2 393 | R,4,1,4,3 394 | R,4,1,4,4 395 | R,4,1,4,5 396 | R,4,1,5,1 397 | R,4,1,5,2 398 | R,4,1,5,3 399 | R,4,1,5,4 400 | R,4,1,5,5 401 | L,4,2,1,1 402 | L,4,2,1,2 403 | L,4,2,1,3 404 | L,4,2,1,4 405 | L,4,2,1,5 406 | L,4,2,2,1 407 | L,4,2,2,2 408 | L,4,2,2,3 409 | B,4,2,2,4 410 | R,4,2,2,5 411 | L,4,2,3,1 412 | L,4,2,3,2 413 | R,4,2,3,3 414 | R,4,2,3,4 415 | R,4,2,3,5 416 | L,4,2,4,1 417 | B,4,2,4,2 418 | R,4,2,4,3 419 | R,4,2,4,4 420 | R,4,2,4,5 421 | L,4,2,5,1 422 | R,4,2,5,2 423 | R,4,2,5,3 424 | R,4,2,5,4 425 | R,4,2,5,5 426 | L,4,3,1,1 427 | L,4,3,1,2 428 | L,4,3,1,3 429 | L,4,3,1,4 430 | L,4,3,1,5 431 | L,4,3,2,1 432 | L,4,3,2,2 433 | L,4,3,2,3 434 | L,4,3,2,4 435 | L,4,3,2,5 436 | L,4,3,3,1 437 | L,4,3,3,2 438 | L,4,3,3,3 439 | B,4,3,3,4 440 | R,4,3,3,5 441 | L,4,3,4,1 442 | L,4,3,4,2 443 | B,4,3,4,3 444 | R,4,3,4,4 445 | R,4,3,4,5 446 | L,4,3,5,1 447 | L,4,3,5,2 448 | R,4,3,5,3 449 | R,4,3,5,4 450 | R,4,3,5,5 451 | L,4,4,1,1 452 | L,4,4,1,2 453 | L,4,4,1,3 454 | L,4,4,1,4 455 | L,4,4,1,5 456 | L,4,4,2,1 457 | L,4,4,2,2 458 | L,4,4,2,3 459 | L,4,4,2,4 460 | L,4,4,2,5 461 | L,4,4,3,1 462 | L,4,4,3,2 463 | L,4,4,3,3 464 | L,4,4,3,4 465 | L,4,4,3,5 466 | L,4,4,4,1 467 | L,4,4,4,2 468 | L,4,4,4,3 469 | B,4,4,4,4 470 | R,4,4,4,5 471 | L,4,4,5,1 472 | L,4,4,5,2 473 | L,4,4,5,3 474 | R,4,4,5,4 475 | R,4,4,5,5 476 | L,4,5,1,1 477 | L,4,5,1,2 478 | L,4,5,1,3 479 | L,4,5,1,4 480 | L,4,5,1,5 481 | L,4,5,2,1 482 | L,4,5,2,2 483 | L,4,5,2,3 484 | L,4,5,2,4 485 | L,4,5,2,5 486 | L,4,5,3,1 487 | L,4,5,3,2 488 | L,4,5,3,3 489 | L,4,5,3,4 490 | L,4,5,3,5 491 | L,4,5,4,1 492 | L,4,5,4,2 493 | L,4,5,4,3 494 | L,4,5,4,4 495 | B,4,5,4,5 496 | L,4,5,5,1 497 | L,4,5,5,2 498 | L,4,5,5,3 499 | B,4,5,5,4 500 | R,4,5,5,5 501 | L,5,1,1,1 502 | L,5,1,1,2 503 | L,5,1,1,3 504 | L,5,1,1,4 505 | B,5,1,1,5 506 | L,5,1,2,1 507 | L,5,1,2,2 508 | R,5,1,2,3 509 | R,5,1,2,4 510 | R,5,1,2,5 511 | L,5,1,3,1 512 | R,5,1,3,2 513 | R,5,1,3,3 514 | R,5,1,3,4 515 | R,5,1,3,5 516 | L,5,1,4,1 517 | R,5,1,4,2 518 | R,5,1,4,3 519 | R,5,1,4,4 520 | R,5,1,4,5 521 | B,5,1,5,1 522 | R,5,1,5,2 523 | R,5,1,5,3 524 | R,5,1,5,4 525 | R,5,1,5,5 526 | L,5,2,1,1 527 | L,5,2,1,2 528 | L,5,2,1,3 529 | L,5,2,1,4 530 | L,5,2,1,5 531 | L,5,2,2,1 532 | L,5,2,2,2 533 | L,5,2,2,3 534 | L,5,2,2,4 535 | B,5,2,2,5 536 | L,5,2,3,1 537 | L,5,2,3,2 538 | L,5,2,3,3 539 | R,5,2,3,4 540 | R,5,2,3,5 541 | L,5,2,4,1 542 | L,5,2,4,2 543 | R,5,2,4,3 544 | R,5,2,4,4 545 | R,5,2,4,5 546 | L,5,2,5,1 547 | B,5,2,5,2 548 | R,5,2,5,3 549 | R,5,2,5,4 550 | R,5,2,5,5 551 | L,5,3,1,1 552 | L,5,3,1,2 553 | L,5,3,1,3 554 | L,5,3,1,4 555 | L,5,3,1,5 556 | L,5,3,2,1 557 | L,5,3,2,2 558 | L,5,3,2,3 559 | L,5,3,2,4 560 | L,5,3,2,5 561 | L,5,3,3,1 562 | L,5,3,3,2 563 | L,5,3,3,3 564 | L,5,3,3,4 565 | B,5,3,3,5 566 | L,5,3,4,1 567 | L,5,3,4,2 568 | L,5,3,4,3 569 | R,5,3,4,4 570 | R,5,3,4,5 571 | L,5,3,5,1 572 | L,5,3,5,2 573 | B,5,3,5,3 574 | R,5,3,5,4 575 | R,5,3,5,5 576 | L,5,4,1,1 577 | L,5,4,1,2 578 | L,5,4,1,3 579 | L,5,4,1,4 580 | L,5,4,1,5 581 | L,5,4,2,1 582 | L,5,4,2,2 583 | L,5,4,2,3 584 | L,5,4,2,4 585 | L,5,4,2,5 586 | L,5,4,3,1 587 | L,5,4,3,2 588 | L,5,4,3,3 589 | L,5,4,3,4 590 | L,5,4,3,5 591 | L,5,4,4,1 592 | L,5,4,4,2 593 | L,5,4,4,3 594 | L,5,4,4,4 595 | B,5,4,4,5 596 | L,5,4,5,1 597 | L,5,4,5,2 598 | L,5,4,5,3 599 | B,5,4,5,4 600 | R,5,4,5,5 601 | L,5,5,1,1 602 | L,5,5,1,2 603 | L,5,5,1,3 604 | L,5,5,1,4 605 | L,5,5,1,5 606 | L,5,5,2,1 607 | L,5,5,2,2 608 | L,5,5,2,3 609 | L,5,5,2,4 610 | L,5,5,2,5 611 | L,5,5,3,1 612 | L,5,5,3,2 613 | L,5,5,3,3 614 | L,5,5,3,4 615 | L,5,5,3,5 616 | L,5,5,4,1 617 | L,5,5,4,2 618 | L,5,5,4,3 619 | L,5,5,4,4 620 | L,5,5,4,5 621 | L,5,5,5,1 622 | L,5,5,5,2 623 | L,5,5,5,3 624 | L,5,5,5,4 625 | B,5,5,5,5 626 | -------------------------------------------------------------------------------- /data/balance-scale/balance-scale.names: -------------------------------------------------------------------------------- 1 | 1. Title: Balance Scale Weight & Distance Database 2 | 3 | 2. Source Information: 4 | (a) Source: Generated to model psychological experiments reported 5 | by Siegler, R. S. (1976). Three Aspects of Cognitive 6 | Development. Cognitive Psychology, 8, 481-520. 7 | (b) Donor: Tim Hume (hume@ics.uci.edu) 8 | (c) Date: 22 April 1994 9 | 10 | 3. Past Usage: (possibly different formats of this data) 11 | - Publications 12 | 1. Klahr, D., & Siegler, R.S. (1978). The Representation of 13 | Children's Knowledge. In H. W. Reese & L. P. Lipsitt (Eds.), 14 | Advances in Child Development and Behavior, pp. 61-116. New 15 | York: Academic Press 16 | 2. Langley,P. (1987). A General Theory of Discrimination 17 | Learning. In D. Klahr, P. Langley, & R. Neches (Eds.), 18 | Production System Models of Learning and Development, pp. 19 | 99-161. Cambridge, MA: MIT Press 20 | 3. Newell, A. (1990). Unified Theories of Cognition. 21 | Cambridge, MA: Harvard University Press 22 | 4. McClelland, J.L. (1988). Parallel Distibuted Processing: 23 | Implications for Cognition and Development. Technical 24 | Report AIP-47, Department of Psychology, Carnegie-Mellon 25 | University 26 | 5. Shultz, T., Mareschal, D., & Schmidt, W. (1994). Modeling 27 | Cognitive Development on Balance Scale Phenomena. Machine 28 | Learning, Vol. 16, pp. 59-88. 29 | 30 | 4. Relevant Information: 31 | This data set was generated to model psychological 32 | experimental results. Each example is classified as having the 33 | balance scale tip to the right, tip to the left, or be 34 | balanced. The attributes are the left weight, the left 35 | distance, the right weight, and the right distance. The 36 | correct way to find the class is the greater of 37 | (left-distance * left-weight) and (right-distance * 38 | right-weight). If they are equal, it is balanced. 39 | 40 | 5. Number of Instances: 625 (49 balanced, 288 left, 288 right) 41 | 42 | 6. Number of Attributes: 4 (numeric) + class name = 5 43 | 44 | 7. Attribute Information: 45 | 1. Class Name: 3 (L, B, R) 46 | 2. Left-Weight: 5 (1, 2, 3, 4, 5) 47 | 3. Left-Distance: 5 (1, 2, 3, 4, 5) 48 | 4. Right-Weight: 5 (1, 2, 3, 4, 5) 49 | 5. Right-Distance: 5 (1, 2, 3, 4, 5) 50 | 51 | 8. Missing Attribute Values: 52 | none 53 | 54 | 9. Class Distribution: 55 | 1. 46.08 percent are L 56 | 2. 07.84 percent are B 57 | 3. 46.08 percent are R 58 | -------------------------------------------------------------------------------- /data/breast-cancer/Index: -------------------------------------------------------------------------------- 1 | Index of breast-cancer 2 | 3 | 02 Dec 1996 132 Index 4 | 05 Mar 1990 3172 breast-cancer.names 5 | 30 May 1989 18654 breast-cancer.data 6 | -------------------------------------------------------------------------------- /data/breast-cancer/breast-cancer.names: -------------------------------------------------------------------------------- 1 | Citation Request: 2 | This breast cancer domain was obtained from the University Medical Centre, 3 | Institute of Oncology, Ljubljana, Yugoslavia. Thanks go to M. Zwitter and 4 | M. Soklic for providing the data. Please include this citation if you plan 5 | to use this database. 6 | 7 | 1. Title: Breast cancer data (Michalski has used this) 8 | 9 | 2. Sources: 10 | -- Matjaz Zwitter & Milan Soklic (physicians) 11 | Institute of Oncology 12 | University Medical Center 13 | Ljubljana, Yugoslavia 14 | -- Donors: Ming Tan and Jeff Schlimmer (Jeffrey.Schlimmer@a.gp.cs.cmu.edu) 15 | -- Date: 11 July 1988 16 | 17 | 3. Past Usage: (Several: here are some) 18 | -- Michalski,R.S., Mozetic,I., Hong,J., & Lavrac,N. (1986). The 19 | Multi-Purpose Incremental Learning System AQ15 and its Testing 20 | Application to Three Medical Domains. In Proceedings of the 21 | Fifth National Conference on Artificial Intelligence, 1041-1045, 22 | Philadelphia, PA: Morgan Kaufmann. 23 | -- accuracy range: 66%-72% 24 | -- Clark,P. & Niblett,T. (1987). Induction in Noisy Domains. In 25 | Progress in Machine Learning (from the Proceedings of the 2nd 26 | European Working Session on Learning), 11-30, Bled, 27 | Yugoslavia: Sigma Press. 28 | -- 8 test results given: 65%-72% accuracy range 29 | -- Tan, M., & Eshelman, L. (1988). Using weighted networks to 30 | represent classification knowledge in noisy domains. Proceedings 31 | of the Fifth International Conference on Machine Learning, 121-134, 32 | Ann Arbor, MI. 33 | -- 4 systems tested: accuracy range was 68%-73.5% 34 | -- Cestnik,G., Konenenko,I, & Bratko,I. (1987). Assistant-86: A 35 | Knowledge-Elicitation Tool for Sophisticated Users. In I.Bratko 36 | & N.Lavrac (Eds.) Progress in Machine Learning, 31-45, Sigma Press. 37 | -- Assistant-86: 78% accuracy 38 | 39 | 4. Relevant Information: 40 | This is one of three domains provided by the Oncology Institute 41 | that has repeatedly appeared in the machine learning literature. 42 | (See also lymphography and primary-tumor.) 43 | 44 | This data set includes 201 instances of one class and 85 instances of 45 | another class. The instances are described by 9 attributes, some of 46 | which are linear and some are nominal. 47 | 48 | 5. Number of Instances: 286 49 | 50 | 6. Number of Attributes: 9 + the class attribute 51 | 52 | 7. Attribute Information: 53 | 1. Class: no-recurrence-events, recurrence-events 54 | 2. age: 10-19, 20-29, 30-39, 40-49, 50-59, 60-69, 70-79, 80-89, 90-99. 55 | 3. menopause: lt40, ge40, premeno. 56 | 4. tumor-size: 0-4, 5-9, 10-14, 15-19, 20-24, 25-29, 30-34, 35-39, 40-44, 57 | 45-49, 50-54, 55-59. 58 | 5. inv-nodes: 0-2, 3-5, 6-8, 9-11, 12-14, 15-17, 18-20, 21-23, 24-26, 59 | 27-29, 30-32, 33-35, 36-39. 60 | 6. node-caps: yes, no. 61 | 7. deg-malig: 1, 2, 3. 62 | 8. breast: left, right. 63 | 9. breast-quad: left-up, left-low, right-up, right-low, central. 64 | 10. irradiat: yes, no. 65 | 66 | 8. Missing Attribute Values: (denoted by "?") 67 | Attribute #: Number of instances with missing values: 68 | 6. 8 69 | 9. 1. 70 | 71 | 9. Class Distribution: 72 | 1. no-recurrence-events: 201 instances 73 | 2. recurrence-events: 85 instances -------------------------------------------------------------------------------- /data/car-evaluation/car.c45-names: -------------------------------------------------------------------------------- 1 | | names file (C4.5 format) for car evaluation domain 2 | 3 | | class values 4 | 5 | unacc, acc, good, vgood 6 | 7 | | attributes 8 | 9 | buying: vhigh, high, med, low. 10 | maint: vhigh, high, med, low. 11 | doors: 2, 3, 4, 5more. 12 | persons: 2, 4, more. 13 | lug_boot: small, med, big. 14 | safety: low, med, high. 15 | -------------------------------------------------------------------------------- /data/car-evaluation/car.names: -------------------------------------------------------------------------------- 1 | 1. Title: Car Evaluation Database 2 | 3 | 2. Sources: 4 | (a) Creator: Marko Bohanec 5 | (b) Donors: Marko Bohanec (marko.bohanec@ijs.si) 6 | Blaz Zupan (blaz.zupan@ijs.si) 7 | (c) Date: June, 1997 8 | 9 | 3. Past Usage: 10 | 11 | The hierarchical decision model, from which this dataset is 12 | derived, was first presented in 13 | 14 | M. Bohanec and V. Rajkovic: Knowledge acquisition and explanation for 15 | multi-attribute decision making. In 8th Intl Workshop on Expert 16 | Systems and their Applications, Avignon, France. pages 59-78, 1988. 17 | 18 | Within machine-learning, this dataset was used for the evaluation 19 | of HINT (Hierarchy INduction Tool), which was proved to be able to 20 | completely reconstruct the original hierarchical model. This, 21 | together with a comparison with C4.5, is presented in 22 | 23 | B. Zupan, M. Bohanec, I. Bratko, J. Demsar: Machine learning by 24 | function decomposition. ICML-97, Nashville, TN. 1997 (to appear) 25 | 26 | 4. Relevant Information Paragraph: 27 | 28 | Car Evaluation Database was derived from a simple hierarchical 29 | decision model originally developed for the demonstration of DEX 30 | (M. Bohanec, V. Rajkovic: Expert system for decision 31 | making. Sistemica 1(1), pp. 145-157, 1990.). The model evaluates 32 | cars according to the following concept structure: 33 | 34 | CAR car acceptability 35 | . PRICE overall price 36 | . . buying buying price 37 | . . maint price of the maintenance 38 | . TECH technical characteristics 39 | . . COMFORT comfort 40 | . . . doors number of doors 41 | . . . persons capacity in terms of persons to carry 42 | . . . lug_boot the size of luggage boot 43 | . . safety estimated safety of the car 44 | 45 | Input attributes are printed in lowercase. Besides the target 46 | concept (CAR), the model includes three intermediate concepts: 47 | PRICE, TECH, COMFORT. Every concept is in the original model 48 | related to its lower level descendants by a set of examples (for 49 | these examples sets see http://www-ai.ijs.si/BlazZupan/car.html). 50 | 51 | The Car Evaluation Database contains examples with the structural 52 | information removed, i.e., directly relates CAR to the six input 53 | attributes: buying, maint, doors, persons, lug_boot, safety. 54 | 55 | Because of known underlying concept structure, this database may be 56 | particularly useful for testing constructive induction and 57 | structure discovery methods. 58 | 59 | 5. Number of Instances: 1728 60 | (instances completely cover the attribute space) 61 | 62 | 6. Number of Attributes: 6 63 | 64 | 7. Attribute Values: 65 | 66 | buying v-high, high, med, low 67 | maint v-high, high, med, low 68 | doors 2, 3, 4, 5-more 69 | persons 2, 4, more 70 | lug_boot small, med, big 71 | safety low, med, high 72 | 73 | 8. Missing Attribute Values: none 74 | 75 | 9. Class Distribution (number of instances per class) 76 | 77 | class N N[%] 78 | ----------------------------- 79 | unacc 1210 (70.023 %) 80 | acc 384 (22.222 %) 81 | good 69 ( 3.993 %) 82 | v-good 65 ( 3.762 %) 83 | -------------------------------------------------------------------------------- /data/hayes-roth/Index: -------------------------------------------------------------------------------- 1 | Index of hayes-roth 2 | 3 | 02 Dec 1996 160 Index 4 | 26 Feb 1990 1740 hayes-roth.data 5 | 26 Feb 1990 280 hayes-roth.test 6 | 31 Aug 1989 6430 hayes-roth.names 7 | -------------------------------------------------------------------------------- /data/hayes-roth/hayes-roth.data: -------------------------------------------------------------------------------- 1 | 92,2,1,1,2,1 2 | 10,2,1,3,2,2 3 | 83,3,1,4,1,3 4 | 61,2,4,2,2,3 5 | 107,1,1,3,4,3 6 | 113,1,1,3,2,2 7 | 80,3,1,3,2,2 8 | 125,3,4,2,4,3 9 | 36,2,2,1,1,1 10 | 105,3,2,1,1,1 11 | 81,1,2,1,1,1 12 | 122,2,2,3,4,3 13 | 94,1,1,2,1,1 14 | 60,2,1,2,2,2 15 | 8,2,4,1,4,3 16 | 20,1,1,3,3,1 17 | 85,3,2,1,2,2 18 | 50,1,2,1,1,1 19 | 68,3,3,2,1,1 20 | 89,3,1,3,2,1 21 | 52,1,2,2,1,2 22 | 19,3,2,1,3,1 23 | 118,2,1,2,1,1 24 | 16,3,2,1,3,1 25 | 91,2,3,2,1,1 26 | 79,3,2,2,1,2 27 | 23,3,2,1,3,2 28 | 25,2,1,2,2,2 29 | 30,1,1,3,2,1 30 | 57,3,2,1,1,1 31 | 3,1,4,1,1,3 32 | 114,2,2,1,3,1 33 | 37,1,2,1,3,2 34 | 66,1,1,1,2,1 35 | 110,2,4,3,1,3 36 | 116,3,1,2,2,2 37 | 88,1,1,2,2,2 38 | 77,3,2,2,1,2 39 | 82,1,2,1,2,2 40 | 64,3,4,3,2,3 41 | 84,2,2,2,1,2 42 | 86,2,2,1,2,2 43 | 6,3,2,1,3,2 44 | 74,3,2,1,1,1 45 | 106,3,1,2,1,1 46 | 115,1,2,1,3,2 47 | 130,2,1,1,2,1 48 | 54,1,1,1,2,1 49 | 33,1,2,2,3,2 50 | 67,3,3,1,1,1 51 | 69,3,3,3,1,1 52 | 39,3,2,1,2,2 53 | 53,3,2,1,2,2 54 | 127,3,1,2,1,1 55 | 96,1,1,1,2,1 56 | 121,2,1,3,2,1 57 | 70,2,2,2,1,2 58 | 123,2,1,2,1,1 59 | 42,2,2,1,3,1 60 | 78,2,1,2,2,2 61 | 11,1,2,4,2,3 62 | 129,2,2,1,2,2 63 | 128,1,1,2,4,3 64 | 5,1,3,2,1,1 65 | 4,2,4,4,2,3 66 | 95,2,3,2,1,1 67 | 73,3,1,2,2,2 68 | 26,1,1,2,2,2 69 | 48,1,3,2,4,3 70 | 104,1,1,2,2,2 71 | 102,3,1,4,2,3 72 | 2,2,1,3,2,2 73 | 41,1,1,3,2,2 74 | 119,3,1,3,2,1 75 | 75,1,2,4,4,3 76 | 47,1,4,2,1,3 77 | 93,2,1,2,1,1 78 | 46,3,4,1,2,3 79 | 132,2,2,1,1,1 80 | 108,1,1,2,1,1 81 | 18,2,2,4,3,3 82 | 62,3,1,2,2,2 83 | 120,1,1,3,2,1 84 | 35,1,2,1,3,1 85 | 27,1,4,4,1,3 86 | 98,3,3,3,2,2 87 | 109,2,2,1,3,2 88 | 31,3,3,2,1,2 89 | 112,1,1,1,3,1 90 | 34,2,2,1,2,2 91 | 63,2,2,2,1,2 92 | 65,2,3,2,3,2 93 | 117,1,3,2,1,2 94 | 56,2,2,1,2,2 95 | 59,1,1,1,2,1 96 | 76,3,2,2,1,2 97 | 1,3,2,1,1,1 98 | 28,1,1,2,1,1 99 | 22,3,1,4,4,3 100 | 29,3,3,2,1,2 101 | 111,2,3,2,1,2 102 | 97,2,1,3,1,1 103 | 49,1,2,1,2,2 104 | 51,3,1,1,2,1 105 | 87,2,2,4,1,3 106 | 58,1,2,2,1,2 107 | 32,2,3,2,1,2 108 | 72,2,2,1,4,3 109 | 55,1,4,2,3,3 110 | 103,2,2,1,1,1 111 | 7,1,2,1,1,1 112 | 99,2,2,3,2,2 113 | 15,1,3,2,1,1 114 | 126,3,1,2,1,1 115 | 45,3,1,1,2,1 116 | 101,3,3,1,4,3 117 | 100,2,3,4,1,3 118 | 24,1,2,3,3,2 119 | 124,3,3,2,2,2 120 | 13,3,3,4,2,3 121 | 14,1,2,2,1,2 122 | 38,2,1,1,4,3 123 | 71,3,1,2,2,2 124 | 43,3,2,2,4,3 125 | 131,2,3,1,3,1 126 | 17,2,1,1,2,1 127 | 12,3,4,1,3,3 128 | 44,1,1,4,3,3 129 | 40,2,1,2,1,1 130 | 90,1,2,1,2,2 131 | 21,1,2,2,1,2 132 | 9,3,1,1,2,1 133 | -------------------------------------------------------------------------------- /data/hayes-roth/hayes-roth.names: -------------------------------------------------------------------------------- 1 | 1. Title: Hayes-Roth & Hayes-Roth (1977) Database 2 | 3 | 2. Source Information: 4 | (a) Creators: Barbara and Frederick Hayes-Roth 5 | (b) Donor: David W. Aha (aha@ics.uci.edu) (714) 856-8779 6 | (c) Date: March, 1989 7 | 8 | 3. Past Usage: 9 | 1. Hayes-Roth, B., & Hayes-Roth, F. (1977). Concept learning and the 10 | recognition and classification of exemplars. Journal of Verbal Learning 11 | and Verbal Behavior, 16, 321-338. 12 | -- Results: 13 | -- Human subjects classification and recognition performance: 14 | 1. decreases with distance from the prototype, 15 | 2. is better on unseen prototypes than old instances, and 16 | 3. improves with presentation frequency during learning. 17 | 2. Anderson, J.R., & Kline, P.J. (1979). A learning system and its 18 | psychological implications. In Proceedings of the Sixth International 19 | Joint Conference on Artificial Intelligence (pp. 16-21). Tokyo, Japan: 20 | Morgan Kaufmann. 21 | -- Partitioned the results into 4 classes: 22 | 1. prototypes 23 | 2. near-prototypes with high presentation frequency during learning 24 | 3. near-prototypes with low presentation frequency during learning 25 | 4. instances that are far from protoypes 26 | -- Described evidence that ACT's classification confidence and 27 | recognition behaviors closely simulated human subjects' behaviors. 28 | 3. Aha, D.W. (1989). Incremental learning of independent, overlapping, and 29 | graded concept descriptions with an instance-based process framework. 30 | Manuscript submitted for publication. 31 | -- Used same partition as Anderson & Kline 32 | -- Described evidence that Bloom's classification confidence behavior 33 | is similar to the human subjects' behavior. Bloom fitted the data 34 | more closely than did ACT. 35 | 36 | 4. Relevant Information: 37 | This database contains 5 numeric-valued attributes. Only a subset of 38 | 3 are used during testing (the latter 3). Furthermore, only 2 of the 39 | 3 concepts are "used" during testing (i.e., those with the prototypes 40 | 000 and 111). I've mapped all values to their zero-indexing equivalents. 41 | 42 | Some instances could be placed in either category 0 or 1. I've followed 43 | the authors' suggestion, placing them in each category with equal 44 | probability. 45 | 46 | I've replaced the actual values of the attributes (i.e., hobby has values 47 | chess, sports and stamps) with numeric values. I think this is how 48 | the authors' did this when testing the categorization models described 49 | in the paper. I find this unfair. While the subjects were able to bring 50 | background knowledge to bear on the attribute values and their 51 | relationships, the algorithms were provided with no such knowledge. I'm 52 | uncertain whether the 2 distractor attributes (name and hobby) are 53 | presented to the authors' algorithms during testing. However, it is clear 54 | that only the age, educational status, and marital status attributes are 55 | given during the human subjects' transfer tests. 56 | 57 | 5. Number of Instances: 132 training instances, 28 test instances 58 | 59 | 6. Number of Attributes: 5 plus the class membership attribute. 3 concepts. 60 | 61 | 7. Attribute Information: 62 | -- 1. name: distinct for each instance and represented numerically 63 | -- 2. hobby: nominal values ranging between 1 and 3 64 | -- 3. age: nominal values ranging between 1 and 4 65 | -- 4. educational level: nominal values ranging between 1 and 4 66 | -- 5. marital status: nominal values ranging between 1 and 4 67 | -- 6. class: nominal value between 1 and 3 68 | 69 | 9. Missing Attribute Values: none 70 | 71 | 10. Class Distribution: see below 72 | 73 | 11. Detailed description of the experiment: 74 | 1. 3 categories (1, 2, and neither -- which I call 3) 75 | -- some of the instances could be classified in either class 1 or 2, and 76 | they have been evenly distributed between the two classes 77 | 2. 5 Attributes 78 | -- A. name (a randomly-generated number between 1 and 132) 79 | -- B. hobby (a randomly-generated number between 1 and 3) 80 | -- C. age (a number between 1 and 4) 81 | -- D. education level (a number between 1 and 4) 82 | -- E. marital status (a number between 1 and 4) 83 | 3. Classification: 84 | -- only attributes C-E are diagnostic; values for A and B are ignored 85 | -- Class Neither: if a 4 occurs for any attribute C-E 86 | -- Class 1: Otherwise, if (# of 1's)>(# of 2's) for attributes C-E 87 | -- Class 2: Otherwise, if (# of 2's)>(# of 1's) for attributes C-E 88 | -- Either 1 or 2: Otherwise, if (# of 2's)=(# of 1's) for attributes C-E 89 | 4. Prototypes: 90 | -- Class 1: 111 91 | -- Class 2: 222 92 | -- Class Either: 333 93 | -- Class Neither: 444 94 | 5. Number of training instances: 132 95 | -- Each instance presented 0, 1, or 10 times 96 | -- None of the prototypes seen during training 97 | -- 3 instances from each of categories 1, 2, and either are repeated 98 | 10 times each 99 | -- 3 additional instances from the Either category are shown during 100 | learning 101 | 5. Number of test instances: 28 102 | -- All 9 class 1 103 | -- All 9 class 2 104 | -- All 6 class Either 105 | -- All 4 prototypes 106 | -------------------- 107 | -- 28 total 108 | 109 | Observations of interest: 110 | 1. Relative classification confidence of 111 | -- prototypes for classes 1 and 2 (2 instances) 112 | (Anderson calls these Class 1 instances) 113 | -- instances of class 1 with frequency 10 during training and 114 | instances of class 2 with frequency 10 during training that 115 | are 1 value away from their respective prototypes (6 instances) 116 | (Anderson calls these Class 2 instances) 117 | -- instances of class 1 with frequency 1 during training and 118 | instances of class 2 with frequency 1 during training that 119 | are 1 value away from their respective prototypes (6 instances) 120 | (Anderson calls these Class 3 instances) 121 | -- instances of class 1 with frequency 1 during training and 122 | instances of class 2 with frequency 1 during training that 123 | are 2 values away from their respective prototypes (6 instances) 124 | (Anderson calls these Class 4 instances) 125 | 2. Relative classification recognition of them also 126 | 127 | Some Expected results: 128 | Both frequency and distance from prototype will effect the classification 129 | accuracy of instances. Greater the frequency, higher the classification 130 | confidence. Closer to prototype, higher the classification confidence. 131 | -------------------------------------------------------------------------------- /data/hayes-roth/hayes-roth.test: -------------------------------------------------------------------------------- 1 | 1,1,1,2,1 2 | 1,1,2,1,1 3 | 1,2,1,1,1 4 | 1,1,1,3,1 5 | 1,1,3,1,1 6 | 1,3,1,1,1 7 | 1,1,3,3,1 8 | 1,3,1,3,1 9 | 1,3,3,1,1 10 | 1,2,2,1,2 11 | 1,2,1,2,2 12 | 1,1,2,2,2 13 | 1,2,2,3,2 14 | 1,2,3,2,2 15 | 1,3,2,2,2 16 | 1,2,3,3,2 17 | 1,3,2,3,2 18 | 1,3,3,2,2 19 | 1,1,3,2,1 20 | 1,3,2,1,2 21 | 1,2,1,3,1 22 | 1,2,3,1,2 23 | 1,1,2,3,1 24 | 1,3,1,2,2 25 | 1,1,1,1,1 26 | 1,2,2,2,2 27 | 1,3,3,3,1 28 | 1,4,4,4,3 29 | -------------------------------------------------------------------------------- /data/hayes-roth/hayes-roth_enc.csv: -------------------------------------------------------------------------------- 1 | "V2.1","V2.2","V2.3","V3.1","V3.2","V3.3","V3.4","V4.1","V4.2","V4.3","V4.4","V5.1","V5.2","V5.3","V5.4","target" 2 | "0","1","0","1","0","0","0","1","0","0","0","0","1","0","0","1" 3 | "0","1","0","1","0","0","0","0","0","1","0","0","1","0","0","2" 4 | "0","0","1","1","0","0","0","0","0","0","1","1","0","0","0","3" 5 | "0","1","0","0","0","0","1","0","1","0","0","0","1","0","0","3" 6 | "1","0","0","1","0","0","0","0","0","1","0","0","0","0","1","3" 7 | "1","0","0","1","0","0","0","0","0","1","0","0","1","0","0","2" 8 | "0","0","1","1","0","0","0","0","0","1","0","0","1","0","0","2" 9 | "0","0","1","0","0","0","1","0","1","0","0","0","0","0","1","3" 10 | "0","1","0","0","1","0","0","1","0","0","0","1","0","0","0","1" 11 | "0","0","1","0","1","0","0","1","0","0","0","1","0","0","0","1" 12 | "1","0","0","0","1","0","0","1","0","0","0","1","0","0","0","1" 13 | "0","1","0","0","1","0","0","0","0","1","0","0","0","0","1","3" 14 | "1","0","0","1","0","0","0","0","1","0","0","1","0","0","0","1" 15 | "0","1","0","1","0","0","0","0","1","0","0","0","1","0","0","2" 16 | "0","1","0","0","0","0","1","1","0","0","0","0","0","0","1","3" 17 | "1","0","0","1","0","0","0","0","0","1","0","0","0","1","0","1" 18 | "0","0","1","0","1","0","0","1","0","0","0","0","1","0","0","2" 19 | "1","0","0","0","1","0","0","1","0","0","0","1","0","0","0","1" 20 | "0","0","1","0","0","1","0","0","1","0","0","1","0","0","0","1" 21 | "0","0","1","1","0","0","0","0","0","1","0","0","1","0","0","1" 22 | "1","0","0","0","1","0","0","0","1","0","0","1","0","0","0","2" 23 | "0","0","1","0","1","0","0","1","0","0","0","0","0","1","0","1" 24 | "0","1","0","1","0","0","0","0","1","0","0","1","0","0","0","1" 25 | "0","0","1","0","1","0","0","1","0","0","0","0","0","1","0","1" 26 | "0","1","0","0","0","1","0","0","1","0","0","1","0","0","0","1" 27 | "0","0","1","0","1","0","0","0","1","0","0","1","0","0","0","2" 28 | "0","0","1","0","1","0","0","1","0","0","0","0","0","1","0","2" 29 | "0","1","0","1","0","0","0","0","1","0","0","0","1","0","0","2" 30 | "1","0","0","1","0","0","0","0","0","1","0","0","1","0","0","1" 31 | "0","0","1","0","1","0","0","1","0","0","0","1","0","0","0","1" 32 | "1","0","0","0","0","0","1","1","0","0","0","1","0","0","0","3" 33 | "0","1","0","0","1","0","0","1","0","0","0","0","0","1","0","1" 34 | "1","0","0","0","1","0","0","1","0","0","0","0","0","1","0","2" 35 | "1","0","0","1","0","0","0","1","0","0","0","0","1","0","0","1" 36 | "0","1","0","0","0","0","1","0","0","1","0","1","0","0","0","3" 37 | "0","0","1","1","0","0","0","0","1","0","0","0","1","0","0","2" 38 | "1","0","0","1","0","0","0","0","1","0","0","0","1","0","0","2" 39 | "0","0","1","0","1","0","0","0","1","0","0","1","0","0","0","2" 40 | "1","0","0","0","1","0","0","1","0","0","0","0","1","0","0","2" 41 | "0","0","1","0","0","0","1","0","0","1","0","0","1","0","0","3" 42 | "0","1","0","0","1","0","0","0","1","0","0","1","0","0","0","2" 43 | "0","1","0","0","1","0","0","1","0","0","0","0","1","0","0","2" 44 | "0","0","1","0","1","0","0","1","0","0","0","0","0","1","0","2" 45 | "0","0","1","0","1","0","0","1","0","0","0","1","0","0","0","1" 46 | "0","0","1","1","0","0","0","0","1","0","0","1","0","0","0","1" 47 | "1","0","0","0","1","0","0","1","0","0","0","0","0","1","0","2" 48 | "0","1","0","1","0","0","0","1","0","0","0","0","1","0","0","1" 49 | "1","0","0","1","0","0","0","1","0","0","0","0","1","0","0","1" 50 | "1","0","0","0","1","0","0","0","1","0","0","0","0","1","0","2" 51 | "0","0","1","0","0","1","0","1","0","0","0","1","0","0","0","1" 52 | "0","0","1","0","0","1","0","0","0","1","0","1","0","0","0","1" 53 | "0","0","1","0","1","0","0","1","0","0","0","0","1","0","0","2" 54 | "0","0","1","0","1","0","0","1","0","0","0","0","1","0","0","2" 55 | "0","0","1","1","0","0","0","0","1","0","0","1","0","0","0","1" 56 | "1","0","0","1","0","0","0","1","0","0","0","0","1","0","0","1" 57 | "0","1","0","1","0","0","0","0","0","1","0","0","1","0","0","1" 58 | "0","1","0","0","1","0","0","0","1","0","0","1","0","0","0","2" 59 | "0","1","0","1","0","0","0","0","1","0","0","1","0","0","0","1" 60 | "0","1","0","0","1","0","0","1","0","0","0","0","0","1","0","1" 61 | "0","1","0","1","0","0","0","0","1","0","0","0","1","0","0","2" 62 | "1","0","0","0","1","0","0","0","0","0","1","0","1","0","0","3" 63 | "0","1","0","0","1","0","0","1","0","0","0","0","1","0","0","2" 64 | "1","0","0","1","0","0","0","0","1","0","0","0","0","0","1","3" 65 | "1","0","0","0","0","1","0","0","1","0","0","1","0","0","0","1" 66 | "0","1","0","0","0","0","1","0","0","0","1","0","1","0","0","3" 67 | "0","1","0","0","0","1","0","0","1","0","0","1","0","0","0","1" 68 | "0","0","1","1","0","0","0","0","1","0","0","0","1","0","0","2" 69 | "1","0","0","1","0","0","0","0","1","0","0","0","1","0","0","2" 70 | "1","0","0","0","0","1","0","0","1","0","0","0","0","0","1","3" 71 | "1","0","0","1","0","0","0","0","1","0","0","0","1","0","0","2" 72 | "0","0","1","1","0","0","0","0","0","0","1","0","1","0","0","3" 73 | "0","1","0","1","0","0","0","0","0","1","0","0","1","0","0","2" 74 | "1","0","0","1","0","0","0","0","0","1","0","0","1","0","0","2" 75 | "0","0","1","1","0","0","0","0","0","1","0","0","1","0","0","1" 76 | "1","0","0","0","1","0","0","0","0","0","1","0","0","0","1","3" 77 | "1","0","0","0","0","0","1","0","1","0","0","1","0","0","0","3" 78 | "0","1","0","1","0","0","0","0","1","0","0","1","0","0","0","1" 79 | "0","0","1","0","0","0","1","1","0","0","0","0","1","0","0","3" 80 | "0","1","0","0","1","0","0","1","0","0","0","1","0","0","0","1" 81 | "1","0","0","1","0","0","0","0","1","0","0","1","0","0","0","1" 82 | "0","1","0","0","1","0","0","0","0","0","1","0","0","1","0","3" 83 | "0","0","1","1","0","0","0","0","1","0","0","0","1","0","0","2" 84 | "1","0","0","1","0","0","0","0","0","1","0","0","1","0","0","1" 85 | "1","0","0","0","1","0","0","1","0","0","0","0","0","1","0","1" 86 | "1","0","0","0","0","0","1","0","0","0","1","1","0","0","0","3" 87 | "0","0","1","0","0","1","0","0","0","1","0","0","1","0","0","2" 88 | "0","1","0","0","1","0","0","1","0","0","0","0","0","1","0","2" 89 | "0","0","1","0","0","1","0","0","1","0","0","1","0","0","0","2" 90 | "1","0","0","1","0","0","0","1","0","0","0","0","0","1","0","1" 91 | "0","1","0","0","1","0","0","1","0","0","0","0","1","0","0","2" 92 | "0","1","0","0","1","0","0","0","1","0","0","1","0","0","0","2" 93 | "0","1","0","0","0","1","0","0","1","0","0","0","0","1","0","2" 94 | "1","0","0","0","0","1","0","0","1","0","0","1","0","0","0","2" 95 | "0","1","0","0","1","0","0","1","0","0","0","0","1","0","0","2" 96 | "1","0","0","1","0","0","0","1","0","0","0","0","1","0","0","1" 97 | "0","0","1","0","1","0","0","0","1","0","0","1","0","0","0","2" 98 | "0","0","1","0","1","0","0","1","0","0","0","1","0","0","0","1" 99 | "1","0","0","1","0","0","0","0","1","0","0","1","0","0","0","1" 100 | "0","0","1","1","0","0","0","0","0","0","1","0","0","0","1","3" 101 | "0","0","1","0","0","1","0","0","1","0","0","1","0","0","0","2" 102 | "0","1","0","0","0","1","0","0","1","0","0","1","0","0","0","2" 103 | "0","1","0","1","0","0","0","0","0","1","0","1","0","0","0","1" 104 | "1","0","0","0","1","0","0","1","0","0","0","0","1","0","0","2" 105 | "0","0","1","1","0","0","0","1","0","0","0","0","1","0","0","1" 106 | "0","1","0","0","1","0","0","0","0","0","1","1","0","0","0","3" 107 | "1","0","0","0","1","0","0","0","1","0","0","1","0","0","0","2" 108 | "0","1","0","0","0","1","0","0","1","0","0","1","0","0","0","2" 109 | "0","1","0","0","1","0","0","1","0","0","0","0","0","0","1","3" 110 | "1","0","0","0","0","0","1","0","1","0","0","0","0","1","0","3" 111 | "0","1","0","0","1","0","0","1","0","0","0","1","0","0","0","1" 112 | "1","0","0","0","1","0","0","1","0","0","0","1","0","0","0","1" 113 | "0","1","0","0","1","0","0","0","0","1","0","0","1","0","0","2" 114 | "1","0","0","0","0","1","0","0","1","0","0","1","0","0","0","1" 115 | "0","0","1","1","0","0","0","0","1","0","0","1","0","0","0","1" 116 | "0","0","1","1","0","0","0","1","0","0","0","0","1","0","0","1" 117 | "0","0","1","0","0","1","0","1","0","0","0","0","0","0","1","3" 118 | "0","1","0","0","0","1","0","0","0","0","1","1","0","0","0","3" 119 | "1","0","0","0","1","0","0","0","0","1","0","0","0","1","0","2" 120 | "0","0","1","0","0","1","0","0","1","0","0","0","1","0","0","2" 121 | "0","0","1","0","0","1","0","0","0","0","1","0","1","0","0","3" 122 | "1","0","0","0","1","0","0","0","1","0","0","1","0","0","0","2" 123 | "0","1","0","1","0","0","0","1","0","0","0","0","0","0","1","3" 124 | "0","0","1","1","0","0","0","0","1","0","0","0","1","0","0","2" 125 | "0","0","1","0","1","0","0","0","1","0","0","0","0","0","1","3" 126 | "0","1","0","0","0","1","0","1","0","0","0","0","0","1","0","1" 127 | "0","1","0","1","0","0","0","1","0","0","0","0","1","0","0","1" 128 | "0","0","1","0","0","0","1","1","0","0","0","0","0","1","0","3" 129 | "1","0","0","1","0","0","0","0","0","0","1","0","0","1","0","3" 130 | "0","1","0","1","0","0","0","0","1","0","0","1","0","0","0","1" 131 | "1","0","0","0","1","0","0","1","0","0","0","0","1","0","0","2" 132 | "1","0","0","0","1","0","0","0","1","0","0","1","0","0","0","2" 133 | "0","0","1","1","0","0","0","1","0","0","0","0","1","0","0","1" 134 | -------------------------------------------------------------------------------- /data/house-votes-84/Index: -------------------------------------------------------------------------------- 1 | Index of voting-records 2 | 3 | 02 Dec 1996 135 Index 4 | 30 Jun 1993 6868 house-votes-84.names 5 | 30 May 1989 18171 house-votes-84.data 6 | -------------------------------------------------------------------------------- /data/house-votes-84/house-votes-84.names: -------------------------------------------------------------------------------- 1 | 1. Title: 1984 United States Congressional Voting Records Database 2 | 3 | 2. Source Information: 4 | (a) Source: Congressional Quarterly Almanac, 98th Congress, 5 | 2nd session 1984, Volume XL: Congressional Quarterly Inc. 6 | Washington, D.C., 1985. 7 | (b) Donor: Jeff Schlimmer (Jeffrey.Schlimmer@a.gp.cs.cmu.edu) 8 | (c) Date: 27 April 1987 9 | 10 | 3. Past Usage 11 | - Publications 12 | 1. Schlimmer, J. C. (1987). Concept acquisition through 13 | representational adjustment. Doctoral dissertation, Department of 14 | Information and Computer Science, University of California, Irvine, CA. 15 | -- Results: about 90%-95% accuracy appears to be STAGGER's asymptote 16 | - Predicted attribute: party affiliation (2 classes) 17 | 18 | 4. Relevant Information: 19 | This data set includes votes for each of the U.S. House of 20 | Representatives Congressmen on the 16 key votes identified by the 21 | CQA. The CQA lists nine different types of votes: voted for, paired 22 | for, and announced for (these three simplified to yea), voted 23 | against, paired against, and announced against (these three 24 | simplified to nay), voted present, voted present to avoid conflict 25 | of interest, and did not vote or otherwise make a position known 26 | (these three simplified to an unknown disposition). 27 | 28 | 5. Number of Instances: 435 (267 democrats, 168 republicans) 29 | 30 | 6. Number of Attributes: 16 + class name = 17 (all Boolean valued) 31 | 32 | 7. Attribute Information: 33 | 1. Class Name: 2 (democrat, republican) 34 | 2. handicapped-infants: 2 (y,n) 35 | 3. water-project-cost-sharing: 2 (y,n) 36 | 4. adoption-of-the-budget-resolution: 2 (y,n) 37 | 5. physician-fee-freeze: 2 (y,n) 38 | 6. el-salvador-aid: 2 (y,n) 39 | 7. religious-groups-in-schools: 2 (y,n) 40 | 8. anti-satellite-test-ban: 2 (y,n) 41 | 9. aid-to-nicaraguan-contras: 2 (y,n) 42 | 10. mx-missile: 2 (y,n) 43 | 11. immigration: 2 (y,n) 44 | 12. synfuels-corporation-cutback: 2 (y,n) 45 | 13. education-spending: 2 (y,n) 46 | 14. superfund-right-to-sue: 2 (y,n) 47 | 15. crime: 2 (y,n) 48 | 16. duty-free-exports: 2 (y,n) 49 | 17. export-administration-act-south-africa: 2 (y,n) 50 | 51 | 8. Missing Attribute Values: Denoted by "?" 52 | 53 | NOTE: It is important to recognize that "?" in this database does 54 | not mean that the value of the attribute is unknown. It 55 | means simply, that the value is not "yea" or "nay" (see 56 | "Relevant Information" section above). 57 | 58 | Attribute: #Missing Values: 59 | 1: 0 60 | 2: 0 61 | 3: 12 62 | 4: 48 63 | 5: 11 64 | 6: 11 65 | 7: 15 66 | 8: 11 67 | 9: 14 68 | 10: 15 69 | 11: 22 70 | 12: 7 71 | 13: 21 72 | 14: 31 73 | 15: 25 74 | 16: 17 75 | 17: 28 76 | 77 | 9. Class Distribution: (2 classes) 78 | 1. 45.2 percent are democrat 79 | 2. 54.8 percent are republican 80 | 81 | Class predictiveness and predictability: Pr(C|A=V) and Pr(A=V|C) 82 | Attribute 1: (A = handicapped-infants) 83 | 0.91; 1.21 (C=democrat; V=y) 84 | 0.09; 0.10 (C=republican; V=y) 85 | 0.43; 0.38 (C=democrat; V=n) 86 | 0.57; 0.41 (C=republican; V=n) 87 | 0.75; 0.03 (C=democrat; V=?) 88 | 0.25; 0.01 (C=republican; V=?) 89 | Attribute 2: (A = water-project-cost-sharing) 90 | 0.62; 0.45 (C=democrat; V=y) 91 | 0.38; 0.23 (C=republican; V=y) 92 | 0.62; 0.45 (C=democrat; V=n) 93 | 0.38; 0.23 (C=republican; V=n) 94 | 0.58; 0.10 (C=democrat; V=?) 95 | 0.42; 0.06 (C=republican; V=?) 96 | Attribute 3: (A = adoption-of-the-budget-resolution) 97 | 0.91; 0.87 (C=democrat; V=y) 98 | 0.09; 0.07 (C=republican; V=y) 99 | 0.17; 0.11 (C=democrat; V=n) 100 | 0.83; 0.44 (C=republican; V=n) 101 | 0.64; 0.03 (C=democrat; V=?) 102 | 0.36; 0.01 (C=republican; V=?) 103 | Attribute 4: (A = physician-fee-freeze) 104 | 0.08; 0.05 (C=democrat; V=y) 105 | 0.92; 0.50 (C=republican; V=y) 106 | 0.99; 0.92 (C=democrat; V=n) 107 | 0.01; 0.01 (C=republican; V=n) 108 | 0.73; 0.03 (C=democrat; V=?) 109 | 0.27; 0.01 (C=republican; V=?) 110 | Attribute 5: (A = el-salvador-aid) 111 | 0.26; 0.21 (C=democrat; V=y) 112 | 0.74; 0.48 (C=republican; V=y) 113 | 0.96; 0.75 (C=democrat; V=n) 114 | 0.04; 0.02 (C=republican; V=n) 115 | 0.80; 0.04 (C=democrat; V=?) 116 | 0.20; 0.01 (C=republican; V=?) 117 | Attribute 6: (A = religious-groups-in-schools) 118 | 0.45; 0.46 (C=democrat; V=y) 119 | 0.55; 0.46 (C=republican; V=y) 120 | 0.89; 0.51 (C=democrat; V=n) 121 | 0.11; 0.05 (C=republican; V=n) 122 | 0.82; 0.03 (C=democrat; V=?) 123 | 0.18; 0.01 (C=republican; V=?) 124 | Attribute 7: (A = anti-satellite-test-ban) 125 | 0.84; 0.75 (C=democrat; V=y) 126 | 0.16; 0.12 (C=republican; V=y) 127 | 0.32; 0.22 (C=democrat; V=n) 128 | 0.68; 0.38 (C=republican; V=n) 129 | 0.57; 0.03 (C=democrat; V=?) 130 | 0.43; 0.02 (C=republican; V=?) 131 | Attribute 8: (A = aid-to-nicaraguan-contras) 132 | 0.90; 0.82 (C=democrat; V=y) 133 | 0.10; 0.07 (C=republican; V=y) 134 | 0.25; 0.17 (C=democrat; V=n) 135 | 0.75; 0.41 (C=republican; V=n) 136 | 0.27; 0.01 (C=democrat; V=?) 137 | 0.73; 0.03 (C=republican; V=?) 138 | Attribute 9: (A = mx-missile) 139 | 0.91; 0.70 (C=democrat; V=y) 140 | 0.09; 0.06 (C=republican; V=y) 141 | 0.29; 0.22 (C=democrat; V=n) 142 | 0.71; 0.45 (C=republican; V=n) 143 | 0.86; 0.07 (C=democrat; V=?) 144 | 0.14; 0.01 (C=republican; V=?) 145 | Attribute 10: (A = immigration) 146 | 0.57; 0.46 (C=democrat; V=y) 147 | 0.43; 0.28 (C=republican; V=y) 148 | 0.66; 0.52 (C=democrat; V=n) 149 | 0.34; 0.23 (C=republican; V=n) 150 | 0.57; 0.01 (C=democrat; V=?) 151 | 0.43; 0.01 (C=republican; V=?) 152 | Attribute 11: (A = synfuels-corporation-cutback) 153 | 0.86; 0.48 (C=democrat; V=y) 154 | 0.14; 0.06 (C=republican; V=y) 155 | 0.48; 0.47 (C=democrat; V=n) 156 | 0.52; 0.43 (C=republican; V=n) 157 | 0.57; 0.04 (C=democrat; V=?) 158 | 0.43; 0.03 (C=republican; V=?) 159 | Attribute 12: (A = education-spending) 160 | 0.21; 0.13 (C=democrat; V=y) 161 | 0.79; 0.42 (C=republican; V=y) 162 | 0.91; 0.80 (C=democrat; V=n) 163 | 0.09; 0.06 (C=republican; V=n) 164 | 0.58; 0.07 (C=democrat; V=?) 165 | 0.42; 0.04 (C=republican; V=?) 166 | Attribute 13: (A = superfund-right-to-sue) 167 | 0.35; 0.27 (C=democrat; V=y) 168 | 0.65; 0.42 (C=republican; V=y) 169 | 0.89; 0.67 (C=democrat; V=n) 170 | 0.11; 0.07 (C=republican; V=n) 171 | 0.60; 0.06 (C=democrat; V=?) 172 | 0.40; 0.03 (C=republican; V=?) 173 | Attribute 14: (A = crime) 174 | 0.36; 0.34 (C=democrat; V=y) 175 | 0.64; 0.49 (C=republican; V=y) 176 | 0.98; 0.63 (C=democrat; V=n) 177 | 0.02; 0.01 (C=republican; V=n) 178 | 0.59; 0.04 (C=democrat; V=?) 179 | 0.41; 0.02 (C=republican; V=?) 180 | Attribute 15: (A = duty-free-exports) 181 | 0.92; 0.60 (C=democrat; V=y) 182 | 0.08; 0.04 (C=republican; V=y) 183 | 0.39; 0.34 (C=democrat; V=n) 184 | 0.61; 0.44 (C=republican; V=n) 185 | 0.57; 0.06 (C=democrat; V=?) 186 | 0.43; 0.04 (C=republican; V=?) 187 | Attribute 16: (A = export-administration-act-south-africa) 188 | 0.64; 0.65 (C=democrat; V=y) 189 | 0.36; 0.30 (C=republican; V=y) 190 | 0.19; 0.04 (C=democrat; V=n) 191 | 0.81; 0.15 (C=republican; V=n) 192 | 0.79; 0.31 (C=democrat; V=?) 193 | 0.21; 0.07 (C=republican; V=?) 194 | -------------------------------------------------------------------------------- /data/monks/Index.txt: -------------------------------------------------------------------------------- 1 | Index of monks-problems 2 | 3 | 02 Dec 1996 405 Index 4 | 29 Jun 1993 2852 monks.names 5 | 12 Feb 1993 10260 monks-1.test 6 | 12 Feb 1993 2947 monks-1.train 7 | 12 Feb 1993 10260 monks-2.test 8 | 12 Feb 1993 4013 monks-2.train 9 | 12 Feb 1993 10260 monks-3.test 10 | 12 Feb 1993 2886 monks-3.train 11 | 12 Feb 1993 57073 thrun.comparison.dat 12 | 12 Feb 1993 588760 thrun.comparison.ps.Z 13 | 12 Feb 1993 10227 update 14 | -------------------------------------------------------------------------------- /data/monks/monk1_enc.csv: -------------------------------------------------------------------------------- 1 | "Feat_0.1","Feat_0.2","Feat_0.3","Feat_1.1","Feat_1.2","Feat_1.3","Feat_2.1","Feat_3.1","Feat_3.2","Feat_3.3","Feat_4.1","Feat_4.2","Feat_4.3","Feat_4.4","Feat_5.1","target" 2 | 1,0,0,1,0,0,1,1,0,0,0,0,1,0,1,"1" 3 | 1,0,0,1,0,0,1,1,0,0,0,0,1,0,0,"1" 4 | 1,0,0,1,0,0,1,0,0,1,0,1,0,0,1,"1" 5 | 1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,"1" 6 | 1,0,0,1,0,0,0,1,0,0,0,1,0,0,1,"1" 7 | 1,0,0,1,0,0,0,1,0,0,0,1,0,0,0,"1" 8 | 1,0,0,1,0,0,0,0,1,0,0,0,1,0,1,"1" 9 | 1,0,0,1,0,0,0,0,1,0,0,0,0,1,1,"1" 10 | 1,0,0,1,0,0,0,0,0,1,1,0,0,0,0,"1" 11 | 1,0,0,0,1,0,1,1,0,0,1,0,0,0,0,"1" 12 | 1,0,0,0,1,0,1,1,0,0,0,1,0,0,1,"0" 13 | 1,0,0,0,1,0,1,1,0,0,0,0,1,0,1,"0" 14 | 1,0,0,0,1,0,1,1,0,0,0,0,0,1,0,"0" 15 | 1,0,0,0,1,0,1,0,1,0,1,0,0,0,1,"1" 16 | 1,0,0,0,1,0,1,0,1,0,0,0,1,0,1,"0" 17 | 1,0,0,0,1,0,1,0,1,0,0,0,1,0,0,"0" 18 | 1,0,0,0,1,0,1,0,1,0,0,0,0,1,0,"0" 19 | 1,0,0,0,1,0,1,0,0,1,0,1,0,0,1,"0" 20 | 1,0,0,0,1,0,1,0,0,1,0,0,0,1,0,"0" 21 | 1,0,0,0,1,0,0,1,0,0,0,1,0,0,0,"0" 22 | 1,0,0,0,1,0,0,0,1,0,0,0,1,0,0,"0" 23 | 1,0,0,0,1,0,0,0,1,0,0,0,0,1,1,"0" 24 | 1,0,0,0,1,0,0,0,1,0,0,0,0,1,0,"0" 25 | 1,0,0,0,1,0,0,0,0,1,0,1,0,0,0,"0" 26 | 1,0,0,0,1,0,0,0,0,1,0,0,1,0,1,"0" 27 | 1,0,0,0,1,0,0,0,0,1,0,0,1,0,0,"0" 28 | 1,0,0,0,0,1,1,1,0,0,0,1,0,0,1,"0" 29 | 1,0,0,0,0,1,1,1,0,0,0,0,0,1,1,"0" 30 | 1,0,0,0,0,1,1,0,1,0,0,1,0,0,1,"0" 31 | 1,0,0,0,0,1,1,0,1,0,0,0,0,1,1,"0" 32 | 1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,"1" 33 | 1,0,0,0,0,1,1,0,0,1,0,1,0,0,0,"0" 34 | 1,0,0,0,0,1,1,0,0,1,0,0,1,0,1,"0" 35 | 1,0,0,0,0,1,1,0,0,1,0,0,0,1,1,"0" 36 | 1,0,0,0,0,1,1,0,0,1,0,0,0,1,0,"0" 37 | 1,0,0,0,0,1,0,1,0,0,0,1,0,0,0,"0" 38 | 1,0,0,0,0,1,0,0,1,0,1,0,0,0,0,"1" 39 | 1,0,0,0,0,1,0,0,1,0,0,1,0,0,0,"0" 40 | 1,0,0,0,0,1,0,0,1,0,0,0,1,0,0,"0" 41 | 1,0,0,0,0,1,0,0,1,0,0,0,0,1,1,"0" 42 | 1,0,0,0,0,1,0,0,1,0,0,0,0,1,0,"0" 43 | 1,0,0,0,0,1,0,0,0,1,1,0,0,0,1,"1" 44 | 1,0,0,0,0,1,0,0,0,1,0,1,0,0,1,"0" 45 | 1,0,0,0,0,1,0,0,0,1,0,0,0,1,1,"0" 46 | 1,0,0,0,0,1,0,0,0,1,0,0,0,1,0,"0" 47 | 0,1,0,1,0,0,1,1,0,0,0,0,1,0,1,"0" 48 | 0,1,0,1,0,0,1,1,0,0,0,0,1,0,0,"0" 49 | 0,1,0,1,0,0,1,0,1,0,1,0,0,0,1,"1" 50 | 0,1,0,1,0,0,1,0,1,0,1,0,0,0,0,"1" 51 | 0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,"0" 52 | 0,1,0,1,0,0,1,0,1,0,0,0,1,0,1,"0" 53 | 0,1,0,1,0,0,1,0,1,0,0,0,0,1,1,"0" 54 | 0,1,0,1,0,0,1,0,1,0,0,0,0,1,0,"0" 55 | 0,1,0,1,0,0,1,0,0,1,0,0,0,1,1,"0" 56 | 0,1,0,1,0,0,0,1,0,0,0,1,0,0,0,"0" 57 | 0,1,0,1,0,0,0,1,0,0,0,0,1,0,1,"0" 58 | 0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,"0" 59 | 0,1,0,1,0,0,0,0,1,0,0,0,1,0,1,"0" 60 | 0,1,0,1,0,0,0,0,1,0,0,0,0,1,0,"0" 61 | 0,1,0,1,0,0,0,0,0,1,0,1,0,0,0,"0" 62 | 0,1,0,1,0,0,0,0,0,1,0,0,0,1,1,"0" 63 | 0,1,0,0,1,0,1,1,0,0,0,1,0,0,1,"1" 64 | 0,1,0,0,1,0,1,1,0,0,0,1,0,0,0,"1" 65 | 0,1,0,0,1,0,1,1,0,0,0,0,1,0,1,"1" 66 | 0,1,0,0,1,0,1,0,1,0,0,0,1,0,0,"1" 67 | 0,1,0,0,1,0,1,0,0,1,1,0,0,0,1,"1" 68 | 0,1,0,0,1,0,1,0,0,1,1,0,0,0,0,"1" 69 | 0,1,0,0,1,0,1,0,0,1,0,1,0,0,0,"1" 70 | 0,1,0,0,1,0,1,0,0,1,0,0,1,0,0,"1" 71 | 0,1,0,0,1,0,1,0,0,1,0,0,0,1,0,"1" 72 | 0,1,0,0,1,0,0,1,0,0,1,0,0,0,1,"1" 73 | 0,1,0,0,1,0,0,1,0,0,0,0,1,0,0,"1" 74 | 0,1,0,0,1,0,0,1,0,0,0,0,0,1,1,"1" 75 | 0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,"1" 76 | 0,1,0,0,1,0,0,0,1,0,0,1,0,0,1,"1" 77 | 0,1,0,0,1,0,0,0,0,1,0,0,0,1,1,"1" 78 | 0,1,0,0,0,1,1,1,0,0,1,0,0,0,1,"1" 79 | 0,1,0,0,0,1,1,0,1,0,1,0,0,0,1,"1" 80 | 0,1,0,0,0,1,1,0,1,0,0,0,1,0,1,"0" 81 | 0,1,0,0,0,1,1,0,0,1,1,0,0,0,0,"1" 82 | 0,1,0,0,0,1,1,0,0,1,0,0,1,0,1,"0" 83 | 0,1,0,0,0,1,1,0,0,1,0,0,0,1,0,"0" 84 | 0,1,0,0,0,1,0,1,0,0,0,0,1,0,0,"0" 85 | 0,1,0,0,0,1,0,0,1,0,1,0,0,0,1,"1" 86 | 0,1,0,0,0,1,0,0,1,0,1,0,0,0,0,"1" 87 | 0,1,0,0,0,1,0,0,1,0,0,1,0,0,1,"0" 88 | 0,1,0,0,0,1,0,0,0,1,0,0,1,0,0,"0" 89 | 0,0,1,1,0,0,1,1,0,0,1,0,0,0,1,"1" 90 | 0,0,1,1,0,0,1,1,0,0,1,0,0,0,0,"1" 91 | 0,0,1,1,0,0,1,0,1,0,1,0,0,0,1,"1" 92 | 0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,"0" 93 | 0,0,1,1,0,0,1,0,0,1,0,1,0,0,0,"0" 94 | 0,0,1,1,0,0,0,1,0,0,1,0,0,0,1,"1" 95 | 0,0,1,1,0,0,0,1,0,0,0,1,0,0,0,"0" 96 | 0,0,1,1,0,0,0,0,1,0,0,1,0,0,0,"0" 97 | 0,0,1,1,0,0,0,0,1,0,0,0,1,0,0,"0" 98 | 0,0,1,1,0,0,0,0,0,1,0,1,0,0,0,"0" 99 | 0,0,1,0,1,0,1,1,0,0,1,0,0,0,1,"1" 100 | 0,0,1,0,1,0,1,1,0,0,0,0,0,1,0,"0" 101 | 0,0,1,0,1,0,1,0,1,0,1,0,0,0,0,"1" 102 | 0,0,1,0,1,0,1,0,1,0,0,0,0,1,0,"0" 103 | 0,0,1,0,1,0,0,1,0,0,1,0,0,0,1,"1" 104 | 0,0,1,0,1,0,0,1,0,0,1,0,0,0,0,"1" 105 | 0,0,1,0,1,0,0,1,0,0,0,0,1,0,0,"0" 106 | 0,0,1,0,1,0,0,0,0,1,1,0,0,0,1,"1" 107 | 0,0,1,0,1,0,0,0,0,1,0,1,0,0,1,"0" 108 | 0,0,1,0,1,0,0,0,0,1,0,0,0,1,1,"0" 109 | 0,0,1,0,0,1,1,1,0,0,1,0,0,0,1,"1" 110 | 0,0,1,0,0,1,1,1,0,0,0,1,0,0,1,"1" 111 | 0,0,1,0,0,1,1,1,0,0,0,0,0,1,0,"1" 112 | 0,0,1,0,0,1,1,0,1,0,0,0,1,0,0,"1" 113 | 0,0,1,0,0,1,1,0,1,0,0,0,0,1,0,"1" 114 | 0,0,1,0,0,1,1,0,0,1,1,0,0,0,0,"1" 115 | 0,0,1,0,0,1,1,0,0,1,0,1,0,0,1,"1" 116 | 0,0,1,0,0,1,1,0,0,1,0,1,0,0,0,"1" 117 | 0,0,1,0,0,1,1,0,0,1,0,0,0,1,0,"1" 118 | 0,0,1,0,0,1,0,1,0,0,1,0,0,0,1,"1" 119 | 0,0,1,0,0,1,0,1,0,0,0,0,1,0,0,"1" 120 | 0,0,1,0,0,1,0,1,0,0,0,0,0,1,1,"1" 121 | 0,0,1,0,0,1,0,1,0,0,0,0,0,1,0,"1" 122 | 0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,"1" 123 | 0,0,1,0,0,1,0,0,0,1,0,1,0,0,0,"1" 124 | 0,0,1,0,0,1,0,0,0,1,0,0,1,0,0,"1" 125 | 0,0,1,0,0,1,0,0,0,1,0,0,0,1,0,"1" 126 | -------------------------------------------------------------------------------- /data/monks/monk2_enc.csv: -------------------------------------------------------------------------------- 1 | "Feat0.1","Feat0.2","Feat0.3","Feat1.1","Feat1.2","Feat1.3","Feat2.1","Feat3.1","Feat3.2","Feat3.3","Feat4.1","Feat4.2","Feat4.3","Feat4.4","Feat5.1","target" 2 | 1,0,0,1,0,0,1,1,0,0,0,1,0,0,0,"0" 3 | 1,0,0,1,0,0,1,1,0,0,0,0,0,1,1,"0" 4 | 1,0,0,1,0,0,1,0,1,0,1,0,0,0,1,"0" 5 | 1,0,0,1,0,0,1,0,1,0,1,0,0,0,0,"0" 6 | 1,0,0,1,0,0,1,0,1,0,0,1,0,0,1,"0" 7 | 1,0,0,1,0,0,1,0,1,0,0,0,1,0,1,"0" 8 | 1,0,0,1,0,0,1,0,1,0,0,0,0,1,1,"0" 9 | 1,0,0,1,0,0,1,0,0,1,0,1,0,0,1,"0" 10 | 1,0,0,1,0,0,1,0,0,1,0,0,0,1,1,"0" 11 | 1,0,0,1,0,0,0,1,0,0,1,0,0,0,1,"0" 12 | 1,0,0,1,0,0,0,1,0,0,1,0,0,0,0,"0" 13 | 1,0,0,1,0,0,0,0,1,0,0,0,1,0,1,"0" 14 | 1,0,0,1,0,0,0,0,1,0,0,0,0,1,1,"0" 15 | 1,0,0,1,0,0,0,0,1,0,0,0,0,1,0,"1" 16 | 1,0,0,1,0,0,0,0,0,1,1,0,0,0,0,"0" 17 | 1,0,0,1,0,0,0,0,0,1,0,1,0,0,0,"1" 18 | 1,0,0,0,1,0,1,1,0,0,1,0,0,0,0,"0" 19 | 1,0,0,0,1,0,1,0,1,0,1,0,0,0,0,"0" 20 | 1,0,0,0,1,0,1,0,1,0,0,1,0,0,0,"1" 21 | 1,0,0,0,1,0,1,0,1,0,0,0,1,0,1,"0" 22 | 1,0,0,0,1,0,1,0,1,0,0,0,1,0,0,"1" 23 | 1,0,0,0,1,0,1,0,1,0,0,0,0,1,1,"0" 24 | 1,0,0,0,1,0,1,0,0,1,1,0,0,0,1,"0" 25 | 1,0,0,0,1,0,1,0,0,1,1,0,0,0,0,"0" 26 | 1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,"1" 27 | 1,0,0,0,1,0,1,0,0,1,0,0,1,0,1,"0" 28 | 1,0,0,0,1,0,1,0,0,1,0,0,1,0,0,"1" 29 | 1,0,0,0,1,0,1,0,0,1,0,0,0,1,1,"0" 30 | 1,0,0,0,1,0,1,0,0,1,0,0,0,1,0,"1" 31 | 1,0,0,0,1,0,0,1,0,0,0,1,0,0,1,"0" 32 | 1,0,0,0,1,0,0,1,0,0,0,0,0,1,1,"0" 33 | 1,0,0,0,1,0,0,0,1,0,0,0,1,0,1,"1" 34 | 1,0,0,0,1,0,0,0,1,0,0,0,0,1,1,"1" 35 | 1,0,0,0,1,0,0,0,0,1,1,0,0,0,1,"0" 36 | 1,0,0,0,1,0,0,0,0,1,1,0,0,0,0,"1" 37 | 1,0,0,0,1,0,0,0,0,1,0,0,1,0,1,"1" 38 | 1,0,0,0,1,0,0,0,0,1,0,0,1,0,0,"0" 39 | 1,0,0,0,1,0,0,0,0,1,0,0,0,1,1,"1" 40 | 1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,"0" 41 | 1,0,0,0,0,1,1,1,0,0,1,0,0,0,0,"0" 42 | 1,0,0,0,0,1,1,1,0,0,0,1,0,0,0,"0" 43 | 1,0,0,0,0,1,1,1,0,0,0,0,1,0,1,"0" 44 | 1,0,0,0,0,1,1,1,0,0,0,0,1,0,0,"0" 45 | 1,0,0,0,0,1,1,0,1,0,0,1,0,0,1,"0" 46 | 1,0,0,0,0,1,1,0,1,0,0,1,0,0,0,"1" 47 | 1,0,0,0,0,1,1,0,1,0,0,0,1,0,0,"1" 48 | 1,0,0,0,0,1,1,0,1,0,0,0,0,1,1,"0" 49 | 1,0,0,0,0,1,1,0,0,1,0,1,0,0,0,"1" 50 | 1,0,0,0,0,1,1,0,0,1,0,0,1,0,1,"0" 51 | 1,0,0,0,0,1,1,0,0,1,0,0,0,1,0,"1" 52 | 1,0,0,0,0,1,0,1,0,0,0,0,1,0,1,"0" 53 | 1,0,0,0,0,1,0,1,0,0,0,0,1,0,0,"1" 54 | 1,0,0,0,0,1,0,1,0,0,0,0,0,1,1,"0" 55 | 1,0,0,0,0,1,0,0,1,0,1,0,0,0,0,"1" 56 | 1,0,0,0,0,1,0,0,1,0,0,0,1,0,0,"0" 57 | 1,0,0,0,0,1,0,0,1,0,0,0,0,1,0,"0" 58 | 1,0,0,0,0,1,0,0,0,1,0,1,0,0,1,"1" 59 | 0,1,0,1,0,0,1,1,0,0,1,0,0,0,1,"0" 60 | 0,1,0,1,0,0,1,1,0,0,0,1,0,0,0,"0" 61 | 0,1,0,1,0,0,1,1,0,0,0,0,1,0,1,"0" 62 | 0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,"1" 63 | 0,1,0,1,0,0,1,0,0,1,1,0,0,0,0,"0" 64 | 0,1,0,1,0,0,1,0,0,1,0,1,0,0,0,"1" 65 | 0,1,0,1,0,0,1,0,0,1,0,0,1,0,0,"1" 66 | 0,1,0,1,0,0,1,0,0,1,0,0,0,1,1,"0" 67 | 0,1,0,1,0,0,0,1,0,0,1,0,0,0,1,"0" 68 | 0,1,0,1,0,0,0,1,0,0,0,1,0,0,0,"1" 69 | 0,1,0,1,0,0,0,1,0,0,0,0,0,1,1,"0" 70 | 0,1,0,1,0,0,0,0,1,0,0,1,0,0,1,"1" 71 | 0,1,0,1,0,0,0,0,1,0,0,0,0,1,0,"0" 72 | 0,1,0,1,0,0,0,0,0,1,1,0,0,0,1,"0" 73 | 0,1,0,1,0,0,0,0,0,1,1,0,0,0,0,"1" 74 | 0,1,0,1,0,0,0,0,0,1,0,1,0,0,0,"0" 75 | 0,1,0,1,0,0,0,0,0,1,0,0,1,0,0,"0" 76 | 0,1,0,1,0,0,0,0,0,1,0,0,0,1,0,"0" 77 | 0,1,0,0,1,0,1,1,0,0,0,0,1,0,1,"0" 78 | 0,1,0,0,1,0,1,1,0,0,0,0,0,1,0,"1" 79 | 0,1,0,0,1,0,1,0,1,0,1,0,0,0,1,"0" 80 | 0,1,0,0,1,0,1,0,1,0,0,0,1,0,1,"1" 81 | 0,1,0,0,1,0,1,0,0,1,0,0,1,0,1,"1" 82 | 0,1,0,0,1,0,1,0,0,1,0,0,1,0,0,"0" 83 | 0,1,0,0,1,0,1,0,0,1,0,0,0,1,1,"1" 84 | 0,1,0,0,1,0,0,1,0,0,1,0,0,0,1,"0" 85 | 0,1,0,0,1,0,0,1,0,0,0,1,0,0,0,"0" 86 | 0,1,0,0,1,0,0,1,0,0,0,0,1,0,0,"0" 87 | 0,1,0,0,1,0,0,1,0,0,0,0,0,1,1,"1" 88 | 0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,"0" 89 | 0,1,0,0,1,0,0,0,1,0,1,0,0,0,1,"1" 90 | 0,1,0,0,1,0,0,0,1,0,0,1,0,0,0,"0" 91 | 0,1,0,0,1,0,0,0,1,0,0,0,1,0,1,"0" 92 | 0,1,0,0,1,0,0,0,0,1,1,0,0,0,1,"1" 93 | 0,1,0,0,1,0,0,0,0,1,0,1,0,0,1,"0" 94 | 0,1,0,0,1,0,0,0,0,1,0,1,0,0,0,"0" 95 | 0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,"0" 96 | 0,1,0,0,0,1,1,1,0,0,1,0,0,0,1,"0" 97 | 0,1,0,0,0,1,1,1,0,0,1,0,0,0,0,"0" 98 | 0,1,0,0,0,1,1,1,0,0,0,0,1,0,0,"1" 99 | 0,1,0,0,0,1,1,0,1,0,1,0,0,0,1,"0" 100 | 0,1,0,0,0,1,1,0,1,0,0,0,1,0,1,"1" 101 | 0,1,0,0,0,1,1,0,1,0,0,0,1,0,0,"0" 102 | 0,1,0,0,0,1,1,0,1,0,0,0,0,1,0,"0" 103 | 0,1,0,0,0,1,1,0,0,1,1,0,0,0,0,"1" 104 | 0,1,0,0,0,1,1,0,0,1,0,1,0,0,1,"1" 105 | 0,1,0,0,0,1,1,0,0,1,0,0,0,1,1,"1" 106 | 0,1,0,0,0,1,0,1,0,0,1,0,0,0,0,"1" 107 | 0,1,0,0,0,1,0,1,0,0,0,1,0,0,1,"1" 108 | 0,1,0,0,0,1,0,1,0,0,0,0,1,0,1,"1" 109 | 0,1,0,0,0,1,0,1,0,0,0,0,0,1,0,"0" 110 | 0,1,0,0,0,1,0,0,1,0,1,0,0,0,1,"1" 111 | 0,1,0,0,0,1,0,0,1,0,0,1,0,0,1,"0" 112 | 0,1,0,0,0,1,0,0,1,0,0,0,1,0,0,"0" 113 | 0,1,0,0,0,1,0,0,0,1,0,0,1,0,1,"0" 114 | 0,1,0,0,0,1,0,0,0,1,0,0,1,0,0,"0" 115 | 0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,"0" 116 | 0,0,1,1,0,0,1,1,0,0,0,0,0,1,1,"0" 117 | 0,0,1,1,0,0,1,0,1,0,1,0,0,0,0,"0" 118 | 0,0,1,1,0,0,1,0,1,0,0,1,0,0,0,"1" 119 | 0,0,1,1,0,0,1,0,1,0,0,0,1,0,0,"1" 120 | 0,0,1,1,0,0,1,0,1,0,0,0,0,1,1,"0" 121 | 0,0,1,1,0,0,1,0,1,0,0,0,0,1,0,"1" 122 | 0,0,1,1,0,0,1,0,0,1,1,0,0,0,1,"0" 123 | 0,0,1,1,0,0,1,0,0,1,1,0,0,0,0,"0" 124 | 0,0,1,1,0,0,1,0,0,1,0,1,0,0,0,"1" 125 | 0,0,1,1,0,0,1,0,0,1,0,0,1,0,0,"1" 126 | 0,0,1,1,0,0,0,1,0,0,1,0,0,0,1,"0" 127 | 0,0,1,1,0,0,0,1,0,0,0,1,0,0,0,"1" 128 | 0,0,1,1,0,0,0,1,0,0,0,0,1,0,1,"0" 129 | 0,0,1,1,0,0,0,1,0,0,0,0,1,0,0,"1" 130 | 0,0,1,1,0,0,0,1,0,0,0,0,0,1,1,"0" 131 | 0,0,1,1,0,0,0,1,0,0,0,0,0,1,0,"1" 132 | 0,0,1,1,0,0,0,0,1,0,0,1,0,0,1,"1" 133 | 0,0,1,1,0,0,0,0,0,1,1,0,0,0,0,"1" 134 | 0,0,1,1,0,0,0,0,0,1,0,1,0,0,1,"1" 135 | 0,0,1,1,0,0,0,0,0,1,0,1,0,0,0,"0" 136 | 0,0,1,1,0,0,0,0,0,1,0,0,0,1,0,"0" 137 | 0,0,1,0,1,0,1,1,0,0,1,0,0,0,0,"0" 138 | 0,0,1,0,1,0,1,1,0,0,0,1,0,0,0,"1" 139 | 0,0,1,0,1,0,1,1,0,0,0,0,1,0,1,"0" 140 | 0,0,1,0,1,0,1,1,0,0,0,0,1,0,0,"1" 141 | 0,0,1,0,1,0,1,0,1,0,1,0,0,0,0,"1" 142 | 0,0,1,0,1,0,1,0,1,0,0,1,0,0,1,"1" 143 | 0,0,1,0,1,0,1,0,0,1,1,0,0,0,1,"0" 144 | 0,0,1,0,1,0,1,0,0,1,0,1,0,0,1,"1" 145 | 0,0,1,0,1,0,1,0,0,1,0,0,1,0,1,"1" 146 | 0,0,1,0,1,0,1,0,0,1,0,0,1,0,0,"0" 147 | 0,0,1,0,1,0,0,1,0,0,1,0,0,0,1,"0" 148 | 0,0,1,0,1,0,0,1,0,0,0,1,0,0,0,"0" 149 | 0,0,1,0,1,0,0,1,0,0,0,0,1,0,1,"1" 150 | 0,0,1,0,1,0,0,1,0,0,0,0,1,0,0,"0" 151 | 0,0,1,0,1,0,0,0,1,0,1,0,0,0,1,"1" 152 | 0,0,1,0,1,0,0,0,1,0,0,1,0,0,1,"0" 153 | 0,0,1,0,1,0,0,0,1,0,0,1,0,0,0,"0" 154 | 0,0,1,0,1,0,0,0,1,0,0,0,1,0,0,"0" 155 | 0,0,1,0,1,0,0,0,0,1,1,0,0,0,1,"1" 156 | 0,0,1,0,1,0,0,0,0,1,0,0,1,0,0,"0" 157 | 0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,"0" 158 | 0,0,1,0,0,1,1,1,0,0,1,0,0,0,1,"0" 159 | 0,0,1,0,0,1,1,1,0,0,0,1,0,0,1,"0" 160 | 0,0,1,0,0,1,1,1,0,0,0,0,1,0,1,"0" 161 | 0,0,1,0,0,1,1,1,0,0,0,0,1,0,0,"1" 162 | 0,0,1,0,0,1,1,0,1,0,0,0,1,0,0,"0" 163 | 0,0,1,0,0,1,0,1,0,0,1,0,0,0,1,"0" 164 | 0,0,1,0,0,1,0,0,1,0,1,0,0,0,1,"1" 165 | 0,0,1,0,0,1,0,0,1,0,0,1,0,0,1,"0" 166 | 0,0,1,0,0,1,0,0,1,0,0,0,1,0,1,"0" 167 | 0,0,1,0,0,1,0,0,1,0,0,0,1,0,0,"0" 168 | 0,0,1,0,0,1,0,0,0,1,1,0,0,0,1,"1" 169 | 0,0,1,0,0,1,0,0,0,1,0,1,0,0,1,"0" 170 | 0,0,1,0,0,1,0,0,0,1,0,0,0,1,0,"0" 171 | -------------------------------------------------------------------------------- /data/monks/monk3_enc.csv: -------------------------------------------------------------------------------- 1 | "Feat0.1","Feat0.2","Feat0.3","Feat1.1","Feat1.2","Feat1.3","Feat2.1","Feat3.1","Feat3.2","Feat3.3","Feat4.1","Feat4.2","Feat4.3","Feat4.4","Feat5.1","target" 2 | 1,0,0,1,0,0,1,1,0,0,1,0,0,0,0,"1" 3 | 1,0,0,1,0,0,1,1,0,0,0,1,0,0,1,"1" 4 | 1,0,0,1,0,0,1,1,0,0,0,1,0,0,0,"1" 5 | 1,0,0,1,0,0,1,1,0,0,0,0,1,0,1,"0" 6 | 1,0,0,1,0,0,1,1,0,0,0,0,0,1,1,"0" 7 | 1,0,0,1,0,0,1,0,1,0,1,0,0,0,1,"1" 8 | 1,0,0,1,0,0,1,0,1,0,0,1,0,0,0,"1" 9 | 1,0,0,1,0,0,1,0,1,0,0,0,0,1,0,"0" 10 | 1,0,0,1,0,0,0,1,0,0,0,1,0,0,0,"1" 11 | 1,0,0,1,0,0,0,1,0,0,0,0,0,1,0,"0" 12 | 1,0,0,1,0,0,0,0,1,0,0,1,0,0,0,"1" 13 | 1,0,0,1,0,0,0,0,1,0,0,0,0,1,1,"0" 14 | 1,0,0,1,0,0,0,0,1,0,0,0,0,1,0,"0" 15 | 1,0,0,1,0,0,0,0,0,1,1,0,0,0,1,"1" 16 | 1,0,0,1,0,0,0,0,0,1,1,0,0,0,0,"1" 17 | 1,0,0,1,0,0,0,0,0,1,0,0,1,0,1,"1" 18 | 1,0,0,1,0,0,0,0,0,1,0,0,1,0,0,"1" 19 | 1,0,0,0,1,0,1,1,0,0,0,0,1,0,1,"1" 20 | 1,0,0,0,1,0,1,0,1,0,0,1,0,0,1,"1" 21 | 1,0,0,0,1,0,1,0,1,0,0,1,0,0,0,"1" 22 | 1,0,0,0,1,0,1,0,1,0,0,0,1,0,1,"0" 23 | 1,0,0,0,1,0,1,0,0,1,1,0,0,0,1,"1" 24 | 1,0,0,0,1,0,1,0,0,1,1,0,0,0,0,"1" 25 | 1,0,0,0,1,0,1,0,0,1,0,1,0,0,1,"1" 26 | 1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,"1" 27 | 1,0,0,0,1,0,1,0,0,1,0,0,1,0,0,"1" 28 | 1,0,0,0,1,0,1,0,0,1,0,0,0,1,1,"0" 29 | 1,0,0,0,1,0,0,1,0,0,0,0,1,0,1,"1" 30 | 1,0,0,0,1,0,0,1,0,0,0,0,0,1,0,"0" 31 | 1,0,0,0,1,0,0,0,1,0,1,0,0,0,1,"1" 32 | 1,0,0,0,1,0,0,0,1,0,0,1,0,0,1,"1" 33 | 1,0,0,0,1,0,0,0,1,0,0,1,0,0,0,"1" 34 | 1,0,0,0,1,0,0,0,0,1,1,0,0,0,1,"1" 35 | 1,0,0,0,1,0,0,0,0,1,0,1,0,0,1,"1" 36 | 1,0,0,0,1,0,0,0,0,1,0,1,0,0,0,"1" 37 | 1,0,0,0,0,1,1,1,0,0,0,1,0,0,1,"0" 38 | 1,0,0,0,0,1,1,1,0,0,0,0,0,1,1,"0" 39 | 1,0,0,0,0,1,1,0,1,0,0,0,1,0,0,"0" 40 | 1,0,0,0,0,1,1,0,1,0,0,0,0,1,1,"0" 41 | 1,0,0,0,0,1,1,0,0,1,1,0,0,0,1,"0" 42 | 1,0,0,0,0,1,1,0,0,1,0,0,1,0,1,"0" 43 | 1,0,0,0,0,1,0,1,0,0,1,0,0,0,1,"0" 44 | 1,0,0,0,0,1,0,1,0,0,1,0,0,0,0,"0" 45 | 1,0,0,0,0,1,0,1,0,0,0,1,0,0,1,"0" 46 | 1,0,0,0,0,1,0,1,0,0,0,0,0,1,0,"0" 47 | 1,0,0,0,0,1,0,0,1,0,0,0,1,0,0,"0" 48 | 1,0,0,0,0,1,0,0,1,0,0,0,0,1,0,"0" 49 | 1,0,0,0,0,1,0,0,0,1,0,0,0,1,1,"0" 50 | 0,1,0,1,0,0,1,1,0,0,1,0,0,0,1,"1" 51 | 0,1,0,1,0,0,1,1,0,0,1,0,0,0,0,"1" 52 | 0,1,0,1,0,0,1,1,0,0,0,0,0,1,1,"0" 53 | 0,1,0,1,0,0,1,1,0,0,0,0,0,1,0,"0" 54 | 0,1,0,1,0,0,1,0,1,0,1,0,0,0,1,"1" 55 | 0,1,0,1,0,0,1,0,1,0,1,0,0,0,0,"1" 56 | 0,1,0,1,0,0,1,0,0,1,0,1,0,0,0,"1" 57 | 0,1,0,1,0,0,1,0,0,1,0,0,1,0,0,"1" 58 | 0,1,0,1,0,0,1,0,0,1,0,0,0,1,1,"0" 59 | 0,1,0,1,0,0,0,1,0,0,0,1,0,0,0,"1" 60 | 0,1,0,1,0,0,0,0,1,0,0,0,0,1,1,"0" 61 | 0,1,0,1,0,0,0,0,0,1,1,0,0,0,0,"1" 62 | 0,1,0,0,1,0,1,1,0,0,0,0,1,0,0,"1" 63 | 0,1,0,0,1,0,1,1,0,0,0,0,0,1,0,"0" 64 | 0,1,0,0,1,0,1,0,1,0,1,0,0,0,0,"1" 65 | 0,1,0,0,1,0,1,0,1,0,0,1,0,0,1,"0" 66 | 0,1,0,0,1,0,1,0,0,1,1,0,0,0,1,"1" 67 | 0,1,0,0,1,0,1,0,0,1,0,1,0,0,0,"1" 68 | 0,1,0,0,1,0,1,0,0,1,0,0,1,0,1,"0" 69 | 0,1,0,0,1,0,1,0,0,1,0,0,1,0,0,"0" 70 | 0,1,0,0,1,0,1,0,0,1,0,0,0,1,0,"0" 71 | 0,1,0,0,1,0,0,1,0,0,0,1,0,0,0,"1" 72 | 0,1,0,0,1,0,0,0,1,0,1,0,0,0,0,"1" 73 | 0,1,0,0,1,0,0,0,1,0,0,0,1,0,1,"1" 74 | 0,1,0,0,1,0,0,0,1,0,0,0,1,0,0,"1" 75 | 0,1,0,0,1,0,0,0,0,1,0,0,0,1,1,"0" 76 | 0,1,0,0,0,1,1,1,0,0,0,0,1,0,1,"1" 77 | 0,1,0,0,0,1,1,0,1,0,1,0,0,0,1,"0" 78 | 0,1,0,0,0,1,1,0,1,0,0,1,0,0,1,"0" 79 | 0,1,0,0,0,1,1,0,1,0,0,1,0,0,0,"0" 80 | 0,1,0,0,0,1,1,0,1,0,0,0,1,0,0,"0" 81 | 0,1,0,0,0,1,1,0,0,1,0,0,1,0,1,"0" 82 | 0,1,0,0,0,1,0,1,0,0,1,0,0,0,0,"0" 83 | 0,1,0,0,0,1,0,1,0,0,0,1,0,0,0,"0" 84 | 0,1,0,0,0,1,0,1,0,0,0,0,0,1,1,"0" 85 | 0,1,0,0,0,1,0,0,1,0,0,0,1,0,1,"0" 86 | 0,1,0,0,0,1,0,0,1,0,0,0,0,1,0,"0" 87 | 0,1,0,0,0,1,0,0,0,1,1,0,0,0,1,"0" 88 | 0,1,0,0,0,1,0,0,0,1,0,1,0,0,1,"0" 89 | 0,1,0,0,0,1,0,0,0,1,0,0,0,1,0,"0" 90 | 0,0,1,1,0,0,1,1,0,0,1,0,0,0,1,"1" 91 | 0,0,1,1,0,0,1,1,0,0,0,1,0,0,1,"1" 92 | 0,0,1,1,0,0,1,1,0,0,0,0,1,0,1,"1" 93 | 0,0,1,1,0,0,1,0,1,0,0,0,0,1,0,"0" 94 | 0,0,1,1,0,0,1,0,0,1,1,0,0,0,0,"1" 95 | 0,0,1,1,0,0,1,0,0,1,0,0,0,1,0,"0" 96 | 0,0,1,1,0,0,0,1,0,0,0,1,0,0,1,"1" 97 | 0,0,1,1,0,0,0,0,1,0,0,0,1,0,0,"1" 98 | 0,0,1,1,0,0,0,0,1,0,0,0,0,1,0,"0" 99 | 0,0,1,1,0,0,0,0,0,1,1,0,0,0,1,"1" 100 | 0,0,1,0,1,0,1,1,0,0,0,1,0,0,0,"1" 101 | 0,0,1,0,1,0,1,1,0,0,0,0,0,1,1,"0" 102 | 0,0,1,0,1,0,1,0,1,0,0,0,1,0,1,"1" 103 | 0,0,1,0,1,0,1,0,0,1,1,0,0,0,0,"1" 104 | 0,0,1,0,1,0,0,1,0,0,0,1,0,0,0,"1" 105 | 0,0,1,0,1,0,0,1,0,0,0,0,1,0,0,"1" 106 | 0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,"1" 107 | 0,0,1,0,1,0,0,0,0,1,1,0,0,0,1,"1" 108 | 0,0,1,0,1,0,0,0,0,1,0,0,1,0,0,"1" 109 | 0,0,1,0,1,0,0,0,0,1,0,0,0,1,1,"0" 110 | 0,0,1,0,0,1,1,1,0,0,0,0,1,0,0,"1" 111 | 0,0,1,0,0,1,1,1,0,0,0,0,0,1,1,"1" 112 | 0,0,1,0,0,1,1,0,1,0,0,0,0,1,0,"0" 113 | 0,0,1,0,0,1,1,0,0,1,1,0,0,0,1,"0" 114 | 0,0,1,0,0,1,1,0,0,1,0,1,0,0,1,"0" 115 | 0,0,1,0,0,1,1,0,0,1,0,1,0,0,0,"0" 116 | 0,0,1,0,0,1,1,0,0,1,0,0,0,1,1,"0" 117 | 0,0,1,0,0,1,0,1,0,0,1,0,0,0,1,"0" 118 | 0,0,1,0,0,1,0,1,0,0,1,0,0,0,0,"0" 119 | 0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,"0" 120 | 0,0,1,0,0,1,0,0,1,0,0,0,1,0,0,"0" 121 | 0,0,1,0,0,1,0,0,0,1,1,0,0,0,1,"0" 122 | 0,0,1,0,0,1,0,0,0,1,0,0,1,0,0,"0" 123 | 0,0,1,0,0,1,0,0,0,1,0,0,0,1,0,"0" 124 | -------------------------------------------------------------------------------- /data/monks/monks-1.test: -------------------------------------------------------------------------------- 1 | 1 1 1 1 1 1 1 data_1 2 | 1 1 1 1 1 1 2 data_2 3 | 1 1 1 1 1 2 1 data_3 4 | 1 1 1 1 1 2 2 data_4 5 | 1 1 1 1 1 3 1 data_5 6 | 1 1 1 1 1 3 2 data_6 7 | 1 1 1 1 1 4 1 data_7 8 | 1 1 1 1 1 4 2 data_8 9 | 1 1 1 1 2 1 1 data_9 10 | 1 1 1 1 2 1 2 data_10 11 | 1 1 1 1 2 2 1 data_11 12 | 1 1 1 1 2 2 2 data_12 13 | 1 1 1 1 2 3 1 data_13 14 | 1 1 1 1 2 3 2 data_14 15 | 1 1 1 1 2 4 1 data_15 16 | 1 1 1 1 2 4 2 data_16 17 | 1 1 1 1 3 1 1 data_17 18 | 1 1 1 1 3 1 2 data_18 19 | 1 1 1 1 3 2 1 data_19 20 | 1 1 1 1 3 2 2 data_20 21 | 1 1 1 1 3 3 1 data_21 22 | 1 1 1 1 3 3 2 data_22 23 | 1 1 1 1 3 4 1 data_23 24 | 1 1 1 1 3 4 2 data_24 25 | 1 1 1 2 1 1 1 data_25 26 | 1 1 1 2 1 1 2 data_26 27 | 1 1 1 2 1 2 1 data_27 28 | 1 1 1 2 1 2 2 data_28 29 | 1 1 1 2 1 3 1 data_29 30 | 1 1 1 2 1 3 2 data_30 31 | 1 1 1 2 1 4 1 data_31 32 | 1 1 1 2 1 4 2 data_32 33 | 1 1 1 2 2 1 1 data_33 34 | 1 1 1 2 2 1 2 data_34 35 | 1 1 1 2 2 2 1 data_35 36 | 1 1 1 2 2 2 2 data_36 37 | 1 1 1 2 2 3 1 data_37 38 | 1 1 1 2 2 3 2 data_38 39 | 1 1 1 2 2 4 1 data_39 40 | 1 1 1 2 2 4 2 data_40 41 | 1 1 1 2 3 1 1 data_41 42 | 1 1 1 2 3 1 2 data_42 43 | 1 1 1 2 3 2 1 data_43 44 | 1 1 1 2 3 2 2 data_44 45 | 1 1 1 2 3 3 1 data_45 46 | 1 1 1 2 3 3 2 data_46 47 | 1 1 1 2 3 4 1 data_47 48 | 1 1 1 2 3 4 2 data_48 49 | 1 1 2 1 1 1 1 data_49 50 | 1 1 2 1 1 1 2 data_50 51 | 0 1 2 1 1 2 1 data_51 52 | 0 1 2 1 1 2 2 data_52 53 | 0 1 2 1 1 3 1 data_53 54 | 0 1 2 1 1 3 2 data_54 55 | 0 1 2 1 1 4 1 data_55 56 | 0 1 2 1 1 4 2 data_56 57 | 1 1 2 1 2 1 1 data_57 58 | 1 1 2 1 2 1 2 data_58 59 | 0 1 2 1 2 2 1 data_59 60 | 0 1 2 1 2 2 2 data_60 61 | 0 1 2 1 2 3 1 data_61 62 | 0 1 2 1 2 3 2 data_62 63 | 0 1 2 1 2 4 1 data_63 64 | 0 1 2 1 2 4 2 data_64 65 | 1 1 2 1 3 1 1 data_65 66 | 1 1 2 1 3 1 2 data_66 67 | 0 1 2 1 3 2 1 data_67 68 | 0 1 2 1 3 2 2 data_68 69 | 0 1 2 1 3 3 1 data_69 70 | 0 1 2 1 3 3 2 data_70 71 | 0 1 2 1 3 4 1 data_71 72 | 0 1 2 1 3 4 2 data_72 73 | 1 1 2 2 1 1 1 data_73 74 | 1 1 2 2 1 1 2 data_74 75 | 0 1 2 2 1 2 1 data_75 76 | 0 1 2 2 1 2 2 data_76 77 | 0 1 2 2 1 3 1 data_77 78 | 0 1 2 2 1 3 2 data_78 79 | 0 1 2 2 1 4 1 data_79 80 | 0 1 2 2 1 4 2 data_80 81 | 1 1 2 2 2 1 1 data_81 82 | 1 1 2 2 2 1 2 data_82 83 | 0 1 2 2 2 2 1 data_83 84 | 0 1 2 2 2 2 2 data_84 85 | 0 1 2 2 2 3 1 data_85 86 | 0 1 2 2 2 3 2 data_86 87 | 0 1 2 2 2 4 1 data_87 88 | 0 1 2 2 2 4 2 data_88 89 | 1 1 2 2 3 1 1 data_89 90 | 1 1 2 2 3 1 2 data_90 91 | 0 1 2 2 3 2 1 data_91 92 | 0 1 2 2 3 2 2 data_92 93 | 0 1 2 2 3 3 1 data_93 94 | 0 1 2 2 3 3 2 data_94 95 | 0 1 2 2 3 4 1 data_95 96 | 0 1 2 2 3 4 2 data_96 97 | 1 1 3 1 1 1 1 data_97 98 | 1 1 3 1 1 1 2 data_98 99 | 0 1 3 1 1 2 1 data_99 100 | 0 1 3 1 1 2 2 data_100 101 | 0 1 3 1 1 3 1 data_101 102 | 0 1 3 1 1 3 2 data_102 103 | 0 1 3 1 1 4 1 data_103 104 | 0 1 3 1 1 4 2 data_104 105 | 1 1 3 1 2 1 1 data_105 106 | 1 1 3 1 2 1 2 data_106 107 | 0 1 3 1 2 2 1 data_107 108 | 0 1 3 1 2 2 2 data_108 109 | 0 1 3 1 2 3 1 data_109 110 | 0 1 3 1 2 3 2 data_110 111 | 0 1 3 1 2 4 1 data_111 112 | 0 1 3 1 2 4 2 data_112 113 | 1 1 3 1 3 1 1 data_113 114 | 1 1 3 1 3 1 2 data_114 115 | 0 1 3 1 3 2 1 data_115 116 | 0 1 3 1 3 2 2 data_116 117 | 0 1 3 1 3 3 1 data_117 118 | 0 1 3 1 3 3 2 data_118 119 | 0 1 3 1 3 4 1 data_119 120 | 0 1 3 1 3 4 2 data_120 121 | 1 1 3 2 1 1 1 data_121 122 | 1 1 3 2 1 1 2 data_122 123 | 0 1 3 2 1 2 1 data_123 124 | 0 1 3 2 1 2 2 data_124 125 | 0 1 3 2 1 3 1 data_125 126 | 0 1 3 2 1 3 2 data_126 127 | 0 1 3 2 1 4 1 data_127 128 | 0 1 3 2 1 4 2 data_128 129 | 1 1 3 2 2 1 1 data_129 130 | 1 1 3 2 2 1 2 data_130 131 | 0 1 3 2 2 2 1 data_131 132 | 0 1 3 2 2 2 2 data_132 133 | 0 1 3 2 2 3 1 data_133 134 | 0 1 3 2 2 3 2 data_134 135 | 0 1 3 2 2 4 1 data_135 136 | 0 1 3 2 2 4 2 data_136 137 | 1 1 3 2 3 1 1 data_137 138 | 1 1 3 2 3 1 2 data_138 139 | 0 1 3 2 3 2 1 data_139 140 | 0 1 3 2 3 2 2 data_140 141 | 0 1 3 2 3 3 1 data_141 142 | 0 1 3 2 3 3 2 data_142 143 | 0 1 3 2 3 4 1 data_143 144 | 0 1 3 2 3 4 2 data_144 145 | 1 2 1 1 1 1 1 data_145 146 | 1 2 1 1 1 1 2 data_146 147 | 0 2 1 1 1 2 1 data_147 148 | 0 2 1 1 1 2 2 data_148 149 | 0 2 1 1 1 3 1 data_149 150 | 0 2 1 1 1 3 2 data_150 151 | 0 2 1 1 1 4 1 data_151 152 | 0 2 1 1 1 4 2 data_152 153 | 1 2 1 1 2 1 1 data_153 154 | 1 2 1 1 2 1 2 data_154 155 | 0 2 1 1 2 2 1 data_155 156 | 0 2 1 1 2 2 2 data_156 157 | 0 2 1 1 2 3 1 data_157 158 | 0 2 1 1 2 3 2 data_158 159 | 0 2 1 1 2 4 1 data_159 160 | 0 2 1 1 2 4 2 data_160 161 | 1 2 1 1 3 1 1 data_161 162 | 1 2 1 1 3 1 2 data_162 163 | 0 2 1 1 3 2 1 data_163 164 | 0 2 1 1 3 2 2 data_164 165 | 0 2 1 1 3 3 1 data_165 166 | 0 2 1 1 3 3 2 data_166 167 | 0 2 1 1 3 4 1 data_167 168 | 0 2 1 1 3 4 2 data_168 169 | 1 2 1 2 1 1 1 data_169 170 | 1 2 1 2 1 1 2 data_170 171 | 0 2 1 2 1 2 1 data_171 172 | 0 2 1 2 1 2 2 data_172 173 | 0 2 1 2 1 3 1 data_173 174 | 0 2 1 2 1 3 2 data_174 175 | 0 2 1 2 1 4 1 data_175 176 | 0 2 1 2 1 4 2 data_176 177 | 1 2 1 2 2 1 1 data_177 178 | 1 2 1 2 2 1 2 data_178 179 | 0 2 1 2 2 2 1 data_179 180 | 0 2 1 2 2 2 2 data_180 181 | 0 2 1 2 2 3 1 data_181 182 | 0 2 1 2 2 3 2 data_182 183 | 0 2 1 2 2 4 1 data_183 184 | 0 2 1 2 2 4 2 data_184 185 | 1 2 1 2 3 1 1 data_185 186 | 1 2 1 2 3 1 2 data_186 187 | 0 2 1 2 3 2 1 data_187 188 | 0 2 1 2 3 2 2 data_188 189 | 0 2 1 2 3 3 1 data_189 190 | 0 2 1 2 3 3 2 data_190 191 | 0 2 1 2 3 4 1 data_191 192 | 0 2 1 2 3 4 2 data_192 193 | 1 2 2 1 1 1 1 data_193 194 | 1 2 2 1 1 1 2 data_194 195 | 1 2 2 1 1 2 1 data_195 196 | 1 2 2 1 1 2 2 data_196 197 | 1 2 2 1 1 3 1 data_197 198 | 1 2 2 1 1 3 2 data_198 199 | 1 2 2 1 1 4 1 data_199 200 | 1 2 2 1 1 4 2 data_200 201 | 1 2 2 1 2 1 1 data_201 202 | 1 2 2 1 2 1 2 data_202 203 | 1 2 2 1 2 2 1 data_203 204 | 1 2 2 1 2 2 2 data_204 205 | 1 2 2 1 2 3 1 data_205 206 | 1 2 2 1 2 3 2 data_206 207 | 1 2 2 1 2 4 1 data_207 208 | 1 2 2 1 2 4 2 data_208 209 | 1 2 2 1 3 1 1 data_209 210 | 1 2 2 1 3 1 2 data_210 211 | 1 2 2 1 3 2 1 data_211 212 | 1 2 2 1 3 2 2 data_212 213 | 1 2 2 1 3 3 1 data_213 214 | 1 2 2 1 3 3 2 data_214 215 | 1 2 2 1 3 4 1 data_215 216 | 1 2 2 1 3 4 2 data_216 217 | 1 2 2 2 1 1 1 data_217 218 | 1 2 2 2 1 1 2 data_218 219 | 1 2 2 2 1 2 1 data_219 220 | 1 2 2 2 1 2 2 data_220 221 | 1 2 2 2 1 3 1 data_221 222 | 1 2 2 2 1 3 2 data_222 223 | 1 2 2 2 1 4 1 data_223 224 | 1 2 2 2 1 4 2 data_224 225 | 1 2 2 2 2 1 1 data_225 226 | 1 2 2 2 2 1 2 data_226 227 | 1 2 2 2 2 2 1 data_227 228 | 1 2 2 2 2 2 2 data_228 229 | 1 2 2 2 2 3 1 data_229 230 | 1 2 2 2 2 3 2 data_230 231 | 1 2 2 2 2 4 1 data_231 232 | 1 2 2 2 2 4 2 data_232 233 | 1 2 2 2 3 1 1 data_233 234 | 1 2 2 2 3 1 2 data_234 235 | 1 2 2 2 3 2 1 data_235 236 | 1 2 2 2 3 2 2 data_236 237 | 1 2 2 2 3 3 1 data_237 238 | 1 2 2 2 3 3 2 data_238 239 | 1 2 2 2 3 4 1 data_239 240 | 1 2 2 2 3 4 2 data_240 241 | 1 2 3 1 1 1 1 data_241 242 | 1 2 3 1 1 1 2 data_242 243 | 0 2 3 1 1 2 1 data_243 244 | 0 2 3 1 1 2 2 data_244 245 | 0 2 3 1 1 3 1 data_245 246 | 0 2 3 1 1 3 2 data_246 247 | 0 2 3 1 1 4 1 data_247 248 | 0 2 3 1 1 4 2 data_248 249 | 1 2 3 1 2 1 1 data_249 250 | 1 2 3 1 2 1 2 data_250 251 | 0 2 3 1 2 2 1 data_251 252 | 0 2 3 1 2 2 2 data_252 253 | 0 2 3 1 2 3 1 data_253 254 | 0 2 3 1 2 3 2 data_254 255 | 0 2 3 1 2 4 1 data_255 256 | 0 2 3 1 2 4 2 data_256 257 | 1 2 3 1 3 1 1 data_257 258 | 1 2 3 1 3 1 2 data_258 259 | 0 2 3 1 3 2 1 data_259 260 | 0 2 3 1 3 2 2 data_260 261 | 0 2 3 1 3 3 1 data_261 262 | 0 2 3 1 3 3 2 data_262 263 | 0 2 3 1 3 4 1 data_263 264 | 0 2 3 1 3 4 2 data_264 265 | 1 2 3 2 1 1 1 data_265 266 | 1 2 3 2 1 1 2 data_266 267 | 0 2 3 2 1 2 1 data_267 268 | 0 2 3 2 1 2 2 data_268 269 | 0 2 3 2 1 3 1 data_269 270 | 0 2 3 2 1 3 2 data_270 271 | 0 2 3 2 1 4 1 data_271 272 | 0 2 3 2 1 4 2 data_272 273 | 1 2 3 2 2 1 1 data_273 274 | 1 2 3 2 2 1 2 data_274 275 | 0 2 3 2 2 2 1 data_275 276 | 0 2 3 2 2 2 2 data_276 277 | 0 2 3 2 2 3 1 data_277 278 | 0 2 3 2 2 3 2 data_278 279 | 0 2 3 2 2 4 1 data_279 280 | 0 2 3 2 2 4 2 data_280 281 | 1 2 3 2 3 1 1 data_281 282 | 1 2 3 2 3 1 2 data_282 283 | 0 2 3 2 3 2 1 data_283 284 | 0 2 3 2 3 2 2 data_284 285 | 0 2 3 2 3 3 1 data_285 286 | 0 2 3 2 3 3 2 data_286 287 | 0 2 3 2 3 4 1 data_287 288 | 0 2 3 2 3 4 2 data_288 289 | 1 3 1 1 1 1 1 data_289 290 | 1 3 1 1 1 1 2 data_290 291 | 0 3 1 1 1 2 1 data_291 292 | 0 3 1 1 1 2 2 data_292 293 | 0 3 1 1 1 3 1 data_293 294 | 0 3 1 1 1 3 2 data_294 295 | 0 3 1 1 1 4 1 data_295 296 | 0 3 1 1 1 4 2 data_296 297 | 1 3 1 1 2 1 1 data_297 298 | 1 3 1 1 2 1 2 data_298 299 | 0 3 1 1 2 2 1 data_299 300 | 0 3 1 1 2 2 2 data_300 301 | 0 3 1 1 2 3 1 data_301 302 | 0 3 1 1 2 3 2 data_302 303 | 0 3 1 1 2 4 1 data_303 304 | 0 3 1 1 2 4 2 data_304 305 | 1 3 1 1 3 1 1 data_305 306 | 1 3 1 1 3 1 2 data_306 307 | 0 3 1 1 3 2 1 data_307 308 | 0 3 1 1 3 2 2 data_308 309 | 0 3 1 1 3 3 1 data_309 310 | 0 3 1 1 3 3 2 data_310 311 | 0 3 1 1 3 4 1 data_311 312 | 0 3 1 1 3 4 2 data_312 313 | 1 3 1 2 1 1 1 data_313 314 | 1 3 1 2 1 1 2 data_314 315 | 0 3 1 2 1 2 1 data_315 316 | 0 3 1 2 1 2 2 data_316 317 | 0 3 1 2 1 3 1 data_317 318 | 0 3 1 2 1 3 2 data_318 319 | 0 3 1 2 1 4 1 data_319 320 | 0 3 1 2 1 4 2 data_320 321 | 1 3 1 2 2 1 1 data_321 322 | 1 3 1 2 2 1 2 data_322 323 | 0 3 1 2 2 2 1 data_323 324 | 0 3 1 2 2 2 2 data_324 325 | 0 3 1 2 2 3 1 data_325 326 | 0 3 1 2 2 3 2 data_326 327 | 0 3 1 2 2 4 1 data_327 328 | 0 3 1 2 2 4 2 data_328 329 | 1 3 1 2 3 1 1 data_329 330 | 1 3 1 2 3 1 2 data_330 331 | 0 3 1 2 3 2 1 data_331 332 | 0 3 1 2 3 2 2 data_332 333 | 0 3 1 2 3 3 1 data_333 334 | 0 3 1 2 3 3 2 data_334 335 | 0 3 1 2 3 4 1 data_335 336 | 0 3 1 2 3 4 2 data_336 337 | 1 3 2 1 1 1 1 data_337 338 | 1 3 2 1 1 1 2 data_338 339 | 0 3 2 1 1 2 1 data_339 340 | 0 3 2 1 1 2 2 data_340 341 | 0 3 2 1 1 3 1 data_341 342 | 0 3 2 1 1 3 2 data_342 343 | 0 3 2 1 1 4 1 data_343 344 | 0 3 2 1 1 4 2 data_344 345 | 1 3 2 1 2 1 1 data_345 346 | 1 3 2 1 2 1 2 data_346 347 | 0 3 2 1 2 2 1 data_347 348 | 0 3 2 1 2 2 2 data_348 349 | 0 3 2 1 2 3 1 data_349 350 | 0 3 2 1 2 3 2 data_350 351 | 0 3 2 1 2 4 1 data_351 352 | 0 3 2 1 2 4 2 data_352 353 | 1 3 2 1 3 1 1 data_353 354 | 1 3 2 1 3 1 2 data_354 355 | 0 3 2 1 3 2 1 data_355 356 | 0 3 2 1 3 2 2 data_356 357 | 0 3 2 1 3 3 1 data_357 358 | 0 3 2 1 3 3 2 data_358 359 | 0 3 2 1 3 4 1 data_359 360 | 0 3 2 1 3 4 2 data_360 361 | 1 3 2 2 1 1 1 data_361 362 | 1 3 2 2 1 1 2 data_362 363 | 0 3 2 2 1 2 1 data_363 364 | 0 3 2 2 1 2 2 data_364 365 | 0 3 2 2 1 3 1 data_365 366 | 0 3 2 2 1 3 2 data_366 367 | 0 3 2 2 1 4 1 data_367 368 | 0 3 2 2 1 4 2 data_368 369 | 1 3 2 2 2 1 1 data_369 370 | 1 3 2 2 2 1 2 data_370 371 | 0 3 2 2 2 2 1 data_371 372 | 0 3 2 2 2 2 2 data_372 373 | 0 3 2 2 2 3 1 data_373 374 | 0 3 2 2 2 3 2 data_374 375 | 0 3 2 2 2 4 1 data_375 376 | 0 3 2 2 2 4 2 data_376 377 | 1 3 2 2 3 1 1 data_377 378 | 1 3 2 2 3 1 2 data_378 379 | 0 3 2 2 3 2 1 data_379 380 | 0 3 2 2 3 2 2 data_380 381 | 0 3 2 2 3 3 1 data_381 382 | 0 3 2 2 3 3 2 data_382 383 | 0 3 2 2 3 4 1 data_383 384 | 0 3 2 2 3 4 2 data_384 385 | 1 3 3 1 1 1 1 data_385 386 | 1 3 3 1 1 1 2 data_386 387 | 1 3 3 1 1 2 1 data_387 388 | 1 3 3 1 1 2 2 data_388 389 | 1 3 3 1 1 3 1 data_389 390 | 1 3 3 1 1 3 2 data_390 391 | 1 3 3 1 1 4 1 data_391 392 | 1 3 3 1 1 4 2 data_392 393 | 1 3 3 1 2 1 1 data_393 394 | 1 3 3 1 2 1 2 data_394 395 | 1 3 3 1 2 2 1 data_395 396 | 1 3 3 1 2 2 2 data_396 397 | 1 3 3 1 2 3 1 data_397 398 | 1 3 3 1 2 3 2 data_398 399 | 1 3 3 1 2 4 1 data_399 400 | 1 3 3 1 2 4 2 data_400 401 | 1 3 3 1 3 1 1 data_401 402 | 1 3 3 1 3 1 2 data_402 403 | 1 3 3 1 3 2 1 data_403 404 | 1 3 3 1 3 2 2 data_404 405 | 1 3 3 1 3 3 1 data_405 406 | 1 3 3 1 3 3 2 data_406 407 | 1 3 3 1 3 4 1 data_407 408 | 1 3 3 1 3 4 2 data_408 409 | 1 3 3 2 1 1 1 data_409 410 | 1 3 3 2 1 1 2 data_410 411 | 1 3 3 2 1 2 1 data_411 412 | 1 3 3 2 1 2 2 data_412 413 | 1 3 3 2 1 3 1 data_413 414 | 1 3 3 2 1 3 2 data_414 415 | 1 3 3 2 1 4 1 data_415 416 | 1 3 3 2 1 4 2 data_416 417 | 1 3 3 2 2 1 1 data_417 418 | 1 3 3 2 2 1 2 data_418 419 | 1 3 3 2 2 2 1 data_419 420 | 1 3 3 2 2 2 2 data_420 421 | 1 3 3 2 2 3 1 data_421 422 | 1 3 3 2 2 3 2 data_422 423 | 1 3 3 2 2 4 1 data_423 424 | 1 3 3 2 2 4 2 data_424 425 | 1 3 3 2 3 1 1 data_425 426 | 1 3 3 2 3 1 2 data_426 427 | 1 3 3 2 3 2 1 data_427 428 | 1 3 3 2 3 2 2 data_428 429 | 1 3 3 2 3 3 1 data_429 430 | 1 3 3 2 3 3 2 data_430 431 | 1 3 3 2 3 4 1 data_431 432 | 1 3 3 2 3 4 2 data_432 433 | -------------------------------------------------------------------------------- /data/monks/monks-1.train: -------------------------------------------------------------------------------- 1 | 1 1 1 1 1 3 1 data_5 2 | 1 1 1 1 1 3 2 data_6 3 | 1 1 1 1 3 2 1 data_19 4 | 1 1 1 1 3 3 2 data_22 5 | 1 1 1 2 1 2 1 data_27 6 | 1 1 1 2 1 2 2 data_28 7 | 1 1 1 2 2 3 1 data_37 8 | 1 1 1 2 2 4 1 data_39 9 | 1 1 1 2 3 1 2 data_42 10 | 1 1 2 1 1 1 2 data_50 11 | 0 1 2 1 1 2 1 data_51 12 | 0 1 2 1 1 3 1 data_53 13 | 0 1 2 1 1 4 2 data_56 14 | 1 1 2 1 2 1 1 data_57 15 | 0 1 2 1 2 3 1 data_61 16 | 0 1 2 1 2 3 2 data_62 17 | 0 1 2 1 2 4 2 data_64 18 | 0 1 2 1 3 2 1 data_67 19 | 0 1 2 1 3 4 2 data_72 20 | 0 1 2 2 1 2 2 data_76 21 | 0 1 2 2 2 3 2 data_86 22 | 0 1 2 2 2 4 1 data_87 23 | 0 1 2 2 2 4 2 data_88 24 | 0 1 2 2 3 2 2 data_92 25 | 0 1 2 2 3 3 1 data_93 26 | 0 1 2 2 3 3 2 data_94 27 | 0 1 3 1 1 2 1 data_99 28 | 0 1 3 1 1 4 1 data_103 29 | 0 1 3 1 2 2 1 data_107 30 | 0 1 3 1 2 4 1 data_111 31 | 1 1 3 1 3 1 2 data_114 32 | 0 1 3 1 3 2 2 data_116 33 | 0 1 3 1 3 3 1 data_117 34 | 0 1 3 1 3 4 1 data_119 35 | 0 1 3 1 3 4 2 data_120 36 | 0 1 3 2 1 2 2 data_124 37 | 1 1 3 2 2 1 2 data_130 38 | 0 1 3 2 2 2 2 data_132 39 | 0 1 3 2 2 3 2 data_134 40 | 0 1 3 2 2 4 1 data_135 41 | 0 1 3 2 2 4 2 data_136 42 | 1 1 3 2 3 1 1 data_137 43 | 0 1 3 2 3 2 1 data_139 44 | 0 1 3 2 3 4 1 data_143 45 | 0 1 3 2 3 4 2 data_144 46 | 0 2 1 1 1 3 1 data_149 47 | 0 2 1 1 1 3 2 data_150 48 | 1 2 1 1 2 1 1 data_153 49 | 1 2 1 1 2 1 2 data_154 50 | 0 2 1 1 2 2 2 data_156 51 | 0 2 1 1 2 3 1 data_157 52 | 0 2 1 1 2 4 1 data_159 53 | 0 2 1 1 2 4 2 data_160 54 | 0 2 1 1 3 4 1 data_167 55 | 0 2 1 2 1 2 2 data_172 56 | 0 2 1 2 1 3 1 data_173 57 | 0 2 1 2 1 4 2 data_176 58 | 0 2 1 2 2 3 1 data_181 59 | 0 2 1 2 2 4 2 data_184 60 | 0 2 1 2 3 2 2 data_188 61 | 0 2 1 2 3 4 1 data_191 62 | 1 2 2 1 1 2 1 data_195 63 | 1 2 2 1 1 2 2 data_196 64 | 1 2 2 1 1 3 1 data_197 65 | 1 2 2 1 2 3 2 data_206 66 | 1 2 2 1 3 1 1 data_209 67 | 1 2 2 1 3 1 2 data_210 68 | 1 2 2 1 3 2 2 data_212 69 | 1 2 2 1 3 3 2 data_214 70 | 1 2 2 1 3 4 2 data_216 71 | 1 2 2 2 1 1 1 data_217 72 | 1 2 2 2 1 3 2 data_222 73 | 1 2 2 2 1 4 1 data_223 74 | 1 2 2 2 1 4 2 data_224 75 | 1 2 2 2 2 2 1 data_227 76 | 1 2 2 2 3 4 1 data_239 77 | 1 2 3 1 1 1 1 data_241 78 | 1 2 3 1 2 1 1 data_249 79 | 0 2 3 1 2 3 1 data_253 80 | 1 2 3 1 3 1 2 data_258 81 | 0 2 3 1 3 3 1 data_261 82 | 0 2 3 1 3 4 2 data_264 83 | 0 2 3 2 1 3 2 data_270 84 | 1 2 3 2 2 1 1 data_273 85 | 1 2 3 2 2 1 2 data_274 86 | 0 2 3 2 2 2 1 data_275 87 | 0 2 3 2 3 3 2 data_286 88 | 1 3 1 1 1 1 1 data_289 89 | 1 3 1 1 1 1 2 data_290 90 | 1 3 1 1 2 1 1 data_297 91 | 0 3 1 1 2 2 2 data_300 92 | 0 3 1 1 3 2 2 data_308 93 | 1 3 1 2 1 1 1 data_313 94 | 0 3 1 2 1 2 2 data_316 95 | 0 3 1 2 2 2 2 data_324 96 | 0 3 1 2 2 3 2 data_326 97 | 0 3 1 2 3 2 2 data_332 98 | 1 3 2 1 1 1 1 data_337 99 | 0 3 2 1 1 4 2 data_344 100 | 1 3 2 1 2 1 2 data_346 101 | 0 3 2 1 2 4 2 data_352 102 | 1 3 2 2 1 1 1 data_361 103 | 1 3 2 2 1 1 2 data_362 104 | 0 3 2 2 1 3 2 data_366 105 | 1 3 2 2 3 1 1 data_377 106 | 0 3 2 2 3 2 1 data_379 107 | 0 3 2 2 3 4 1 data_383 108 | 1 3 3 1 1 1 1 data_385 109 | 1 3 3 1 1 2 1 data_387 110 | 1 3 3 1 1 4 2 data_392 111 | 1 3 3 1 2 3 2 data_398 112 | 1 3 3 1 2 4 2 data_400 113 | 1 3 3 1 3 1 2 data_402 114 | 1 3 3 1 3 2 1 data_403 115 | 1 3 3 1 3 2 2 data_404 116 | 1 3 3 1 3 4 2 data_408 117 | 1 3 3 2 1 1 1 data_409 118 | 1 3 3 2 1 3 2 data_414 119 | 1 3 3 2 1 4 1 data_415 120 | 1 3 3 2 1 4 2 data_416 121 | 1 3 3 2 3 1 2 data_426 122 | 1 3 3 2 3 2 2 data_428 123 | 1 3 3 2 3 3 2 data_430 124 | 1 3 3 2 3 4 2 data_432 125 | -------------------------------------------------------------------------------- /data/monks/monks-2.test: -------------------------------------------------------------------------------- 1 | 0 1 1 1 1 1 1 data_1 2 | 0 1 1 1 1 1 2 data_2 3 | 0 1 1 1 1 2 1 data_3 4 | 0 1 1 1 1 2 2 data_4 5 | 0 1 1 1 1 3 1 data_5 6 | 0 1 1 1 1 3 2 data_6 7 | 0 1 1 1 1 4 1 data_7 8 | 0 1 1 1 1 4 2 data_8 9 | 0 1 1 1 2 1 1 data_9 10 | 0 1 1 1 2 1 2 data_10 11 | 0 1 1 1 2 2 1 data_11 12 | 0 1 1 1 2 2 2 data_12 13 | 0 1 1 1 2 3 1 data_13 14 | 0 1 1 1 2 3 2 data_14 15 | 0 1 1 1 2 4 1 data_15 16 | 0 1 1 1 2 4 2 data_16 17 | 0 1 1 1 3 1 1 data_17 18 | 0 1 1 1 3 1 2 data_18 19 | 0 1 1 1 3 2 1 data_19 20 | 0 1 1 1 3 2 2 data_20 21 | 0 1 1 1 3 3 1 data_21 22 | 0 1 1 1 3 3 2 data_22 23 | 0 1 1 1 3 4 1 data_23 24 | 0 1 1 1 3 4 2 data_24 25 | 0 1 1 2 1 1 1 data_25 26 | 0 1 1 2 1 1 2 data_26 27 | 0 1 1 2 1 2 1 data_27 28 | 0 1 1 2 1 2 2 data_28 29 | 0 1 1 2 1 3 1 data_29 30 | 0 1 1 2 1 3 2 data_30 31 | 0 1 1 2 1 4 1 data_31 32 | 0 1 1 2 1 4 2 data_32 33 | 0 1 1 2 2 1 1 data_33 34 | 0 1 1 2 2 1 2 data_34 35 | 0 1 1 2 2 2 1 data_35 36 | 1 1 1 2 2 2 2 data_36 37 | 0 1 1 2 2 3 1 data_37 38 | 1 1 1 2 2 3 2 data_38 39 | 0 1 1 2 2 4 1 data_39 40 | 1 1 1 2 2 4 2 data_40 41 | 0 1 1 2 3 1 1 data_41 42 | 0 1 1 2 3 1 2 data_42 43 | 0 1 1 2 3 2 1 data_43 44 | 1 1 1 2 3 2 2 data_44 45 | 0 1 1 2 3 3 1 data_45 46 | 1 1 1 2 3 3 2 data_46 47 | 0 1 1 2 3 4 1 data_47 48 | 1 1 1 2 3 4 2 data_48 49 | 0 1 2 1 1 1 1 data_49 50 | 0 1 2 1 1 1 2 data_50 51 | 0 1 2 1 1 2 1 data_51 52 | 0 1 2 1 1 2 2 data_52 53 | 0 1 2 1 1 3 1 data_53 54 | 0 1 2 1 1 3 2 data_54 55 | 0 1 2 1 1 4 1 data_55 56 | 0 1 2 1 1 4 2 data_56 57 | 0 1 2 1 2 1 1 data_57 58 | 0 1 2 1 2 1 2 data_58 59 | 0 1 2 1 2 2 1 data_59 60 | 1 1 2 1 2 2 2 data_60 61 | 0 1 2 1 2 3 1 data_61 62 | 1 1 2 1 2 3 2 data_62 63 | 0 1 2 1 2 4 1 data_63 64 | 1 1 2 1 2 4 2 data_64 65 | 0 1 2 1 3 1 1 data_65 66 | 0 1 2 1 3 1 2 data_66 67 | 0 1 2 1 3 2 1 data_67 68 | 1 1 2 1 3 2 2 data_68 69 | 0 1 2 1 3 3 1 data_69 70 | 1 1 2 1 3 3 2 data_70 71 | 0 1 2 1 3 4 1 data_71 72 | 1 1 2 1 3 4 2 data_72 73 | 0 1 2 2 1 1 1 data_73 74 | 0 1 2 2 1 1 2 data_74 75 | 0 1 2 2 1 2 1 data_75 76 | 1 1 2 2 1 2 2 data_76 77 | 0 1 2 2 1 3 1 data_77 78 | 1 1 2 2 1 3 2 data_78 79 | 0 1 2 2 1 4 1 data_79 80 | 1 1 2 2 1 4 2 data_80 81 | 0 1 2 2 2 1 1 data_81 82 | 1 1 2 2 2 1 2 data_82 83 | 1 1 2 2 2 2 1 data_83 84 | 0 1 2 2 2 2 2 data_84 85 | 1 1 2 2 2 3 1 data_85 86 | 0 1 2 2 2 3 2 data_86 87 | 1 1 2 2 2 4 1 data_87 88 | 0 1 2 2 2 4 2 data_88 89 | 0 1 2 2 3 1 1 data_89 90 | 1 1 2 2 3 1 2 data_90 91 | 1 1 2 2 3 2 1 data_91 92 | 0 1 2 2 3 2 2 data_92 93 | 1 1 2 2 3 3 1 data_93 94 | 0 1 2 2 3 3 2 data_94 95 | 1 1 2 2 3 4 1 data_95 96 | 0 1 2 2 3 4 2 data_96 97 | 0 1 3 1 1 1 1 data_97 98 | 0 1 3 1 1 1 2 data_98 99 | 0 1 3 1 1 2 1 data_99 100 | 0 1 3 1 1 2 2 data_100 101 | 0 1 3 1 1 3 1 data_101 102 | 0 1 3 1 1 3 2 data_102 103 | 0 1 3 1 1 4 1 data_103 104 | 0 1 3 1 1 4 2 data_104 105 | 0 1 3 1 2 1 1 data_105 106 | 0 1 3 1 2 1 2 data_106 107 | 0 1 3 1 2 2 1 data_107 108 | 1 1 3 1 2 2 2 data_108 109 | 0 1 3 1 2 3 1 data_109 110 | 1 1 3 1 2 3 2 data_110 111 | 0 1 3 1 2 4 1 data_111 112 | 1 1 3 1 2 4 2 data_112 113 | 0 1 3 1 3 1 1 data_113 114 | 0 1 3 1 3 1 2 data_114 115 | 0 1 3 1 3 2 1 data_115 116 | 1 1 3 1 3 2 2 data_116 117 | 0 1 3 1 3 3 1 data_117 118 | 1 1 3 1 3 3 2 data_118 119 | 0 1 3 1 3 4 1 data_119 120 | 1 1 3 1 3 4 2 data_120 121 | 0 1 3 2 1 1 1 data_121 122 | 0 1 3 2 1 1 2 data_122 123 | 0 1 3 2 1 2 1 data_123 124 | 1 1 3 2 1 2 2 data_124 125 | 0 1 3 2 1 3 1 data_125 126 | 1 1 3 2 1 3 2 data_126 127 | 0 1 3 2 1 4 1 data_127 128 | 1 1 3 2 1 4 2 data_128 129 | 0 1 3 2 2 1 1 data_129 130 | 1 1 3 2 2 1 2 data_130 131 | 1 1 3 2 2 2 1 data_131 132 | 0 1 3 2 2 2 2 data_132 133 | 1 1 3 2 2 3 1 data_133 134 | 0 1 3 2 2 3 2 data_134 135 | 1 1 3 2 2 4 1 data_135 136 | 0 1 3 2 2 4 2 data_136 137 | 0 1 3 2 3 1 1 data_137 138 | 1 1 3 2 3 1 2 data_138 139 | 1 1 3 2 3 2 1 data_139 140 | 0 1 3 2 3 2 2 data_140 141 | 1 1 3 2 3 3 1 data_141 142 | 0 1 3 2 3 3 2 data_142 143 | 1 1 3 2 3 4 1 data_143 144 | 0 1 3 2 3 4 2 data_144 145 | 0 2 1 1 1 1 1 data_145 146 | 0 2 1 1 1 1 2 data_146 147 | 0 2 1 1 1 2 1 data_147 148 | 0 2 1 1 1 2 2 data_148 149 | 0 2 1 1 1 3 1 data_149 150 | 0 2 1 1 1 3 2 data_150 151 | 0 2 1 1 1 4 1 data_151 152 | 0 2 1 1 1 4 2 data_152 153 | 0 2 1 1 2 1 1 data_153 154 | 0 2 1 1 2 1 2 data_154 155 | 0 2 1 1 2 2 1 data_155 156 | 1 2 1 1 2 2 2 data_156 157 | 0 2 1 1 2 3 1 data_157 158 | 1 2 1 1 2 3 2 data_158 159 | 0 2 1 1 2 4 1 data_159 160 | 1 2 1 1 2 4 2 data_160 161 | 0 2 1 1 3 1 1 data_161 162 | 0 2 1 1 3 1 2 data_162 163 | 0 2 1 1 3 2 1 data_163 164 | 1 2 1 1 3 2 2 data_164 165 | 0 2 1 1 3 3 1 data_165 166 | 1 2 1 1 3 3 2 data_166 167 | 0 2 1 1 3 4 1 data_167 168 | 1 2 1 1 3 4 2 data_168 169 | 0 2 1 2 1 1 1 data_169 170 | 0 2 1 2 1 1 2 data_170 171 | 0 2 1 2 1 2 1 data_171 172 | 1 2 1 2 1 2 2 data_172 173 | 0 2 1 2 1 3 1 data_173 174 | 1 2 1 2 1 3 2 data_174 175 | 0 2 1 2 1 4 1 data_175 176 | 1 2 1 2 1 4 2 data_176 177 | 0 2 1 2 2 1 1 data_177 178 | 1 2 1 2 2 1 2 data_178 179 | 1 2 1 2 2 2 1 data_179 180 | 0 2 1 2 2 2 2 data_180 181 | 1 2 1 2 2 3 1 data_181 182 | 0 2 1 2 2 3 2 data_182 183 | 1 2 1 2 2 4 1 data_183 184 | 0 2 1 2 2 4 2 data_184 185 | 0 2 1 2 3 1 1 data_185 186 | 1 2 1 2 3 1 2 data_186 187 | 1 2 1 2 3 2 1 data_187 188 | 0 2 1 2 3 2 2 data_188 189 | 1 2 1 2 3 3 1 data_189 190 | 0 2 1 2 3 3 2 data_190 191 | 1 2 1 2 3 4 1 data_191 192 | 0 2 1 2 3 4 2 data_192 193 | 0 2 2 1 1 1 1 data_193 194 | 0 2 2 1 1 1 2 data_194 195 | 0 2 2 1 1 2 1 data_195 196 | 1 2 2 1 1 2 2 data_196 197 | 0 2 2 1 1 3 1 data_197 198 | 1 2 2 1 1 3 2 data_198 199 | 0 2 2 1 1 4 1 data_199 200 | 1 2 2 1 1 4 2 data_200 201 | 0 2 2 1 2 1 1 data_201 202 | 1 2 2 1 2 1 2 data_202 203 | 1 2 2 1 2 2 1 data_203 204 | 0 2 2 1 2 2 2 data_204 205 | 1 2 2 1 2 3 1 data_205 206 | 0 2 2 1 2 3 2 data_206 207 | 1 2 2 1 2 4 1 data_207 208 | 0 2 2 1 2 4 2 data_208 209 | 0 2 2 1 3 1 1 data_209 210 | 1 2 2 1 3 1 2 data_210 211 | 1 2 2 1 3 2 1 data_211 212 | 0 2 2 1 3 2 2 data_212 213 | 1 2 2 1 3 3 1 data_213 214 | 0 2 2 1 3 3 2 data_214 215 | 1 2 2 1 3 4 1 data_215 216 | 0 2 2 1 3 4 2 data_216 217 | 0 2 2 2 1 1 1 data_217 218 | 1 2 2 2 1 1 2 data_218 219 | 1 2 2 2 1 2 1 data_219 220 | 0 2 2 2 1 2 2 data_220 221 | 1 2 2 2 1 3 1 data_221 222 | 0 2 2 2 1 3 2 data_222 223 | 1 2 2 2 1 4 1 data_223 224 | 0 2 2 2 1 4 2 data_224 225 | 1 2 2 2 2 1 1 data_225 226 | 0 2 2 2 2 1 2 data_226 227 | 0 2 2 2 2 2 1 data_227 228 | 0 2 2 2 2 2 2 data_228 229 | 0 2 2 2 2 3 1 data_229 230 | 0 2 2 2 2 3 2 data_230 231 | 0 2 2 2 2 4 1 data_231 232 | 0 2 2 2 2 4 2 data_232 233 | 1 2 2 2 3 1 1 data_233 234 | 0 2 2 2 3 1 2 data_234 235 | 0 2 2 2 3 2 1 data_235 236 | 0 2 2 2 3 2 2 data_236 237 | 0 2 2 2 3 3 1 data_237 238 | 0 2 2 2 3 3 2 data_238 239 | 0 2 2 2 3 4 1 data_239 240 | 0 2 2 2 3 4 2 data_240 241 | 0 2 3 1 1 1 1 data_241 242 | 0 2 3 1 1 1 2 data_242 243 | 0 2 3 1 1 2 1 data_243 244 | 1 2 3 1 1 2 2 data_244 245 | 0 2 3 1 1 3 1 data_245 246 | 1 2 3 1 1 3 2 data_246 247 | 0 2 3 1 1 4 1 data_247 248 | 1 2 3 1 1 4 2 data_248 249 | 0 2 3 1 2 1 1 data_249 250 | 1 2 3 1 2 1 2 data_250 251 | 1 2 3 1 2 2 1 data_251 252 | 0 2 3 1 2 2 2 data_252 253 | 1 2 3 1 2 3 1 data_253 254 | 0 2 3 1 2 3 2 data_254 255 | 1 2 3 1 2 4 1 data_255 256 | 0 2 3 1 2 4 2 data_256 257 | 0 2 3 1 3 1 1 data_257 258 | 1 2 3 1 3 1 2 data_258 259 | 1 2 3 1 3 2 1 data_259 260 | 0 2 3 1 3 2 2 data_260 261 | 1 2 3 1 3 3 1 data_261 262 | 0 2 3 1 3 3 2 data_262 263 | 1 2 3 1 3 4 1 data_263 264 | 0 2 3 1 3 4 2 data_264 265 | 0 2 3 2 1 1 1 data_265 266 | 1 2 3 2 1 1 2 data_266 267 | 1 2 3 2 1 2 1 data_267 268 | 0 2 3 2 1 2 2 data_268 269 | 1 2 3 2 1 3 1 data_269 270 | 0 2 3 2 1 3 2 data_270 271 | 1 2 3 2 1 4 1 data_271 272 | 0 2 3 2 1 4 2 data_272 273 | 1 2 3 2 2 1 1 data_273 274 | 0 2 3 2 2 1 2 data_274 275 | 0 2 3 2 2 2 1 data_275 276 | 0 2 3 2 2 2 2 data_276 277 | 0 2 3 2 2 3 1 data_277 278 | 0 2 3 2 2 3 2 data_278 279 | 0 2 3 2 2 4 1 data_279 280 | 0 2 3 2 2 4 2 data_280 281 | 1 2 3 2 3 1 1 data_281 282 | 0 2 3 2 3 1 2 data_282 283 | 0 2 3 2 3 2 1 data_283 284 | 0 2 3 2 3 2 2 data_284 285 | 0 2 3 2 3 3 1 data_285 286 | 0 2 3 2 3 3 2 data_286 287 | 0 2 3 2 3 4 1 data_287 288 | 0 2 3 2 3 4 2 data_288 289 | 0 3 1 1 1 1 1 data_289 290 | 0 3 1 1 1 1 2 data_290 291 | 0 3 1 1 1 2 1 data_291 292 | 0 3 1 1 1 2 2 data_292 293 | 0 3 1 1 1 3 1 data_293 294 | 0 3 1 1 1 3 2 data_294 295 | 0 3 1 1 1 4 1 data_295 296 | 0 3 1 1 1 4 2 data_296 297 | 0 3 1 1 2 1 1 data_297 298 | 0 3 1 1 2 1 2 data_298 299 | 0 3 1 1 2 2 1 data_299 300 | 1 3 1 1 2 2 2 data_300 301 | 0 3 1 1 2 3 1 data_301 302 | 1 3 1 1 2 3 2 data_302 303 | 0 3 1 1 2 4 1 data_303 304 | 1 3 1 1 2 4 2 data_304 305 | 0 3 1 1 3 1 1 data_305 306 | 0 3 1 1 3 1 2 data_306 307 | 0 3 1 1 3 2 1 data_307 308 | 1 3 1 1 3 2 2 data_308 309 | 0 3 1 1 3 3 1 data_309 310 | 1 3 1 1 3 3 2 data_310 311 | 0 3 1 1 3 4 1 data_311 312 | 1 3 1 1 3 4 2 data_312 313 | 0 3 1 2 1 1 1 data_313 314 | 0 3 1 2 1 1 2 data_314 315 | 0 3 1 2 1 2 1 data_315 316 | 1 3 1 2 1 2 2 data_316 317 | 0 3 1 2 1 3 1 data_317 318 | 1 3 1 2 1 3 2 data_318 319 | 0 3 1 2 1 4 1 data_319 320 | 1 3 1 2 1 4 2 data_320 321 | 0 3 1 2 2 1 1 data_321 322 | 1 3 1 2 2 1 2 data_322 323 | 1 3 1 2 2 2 1 data_323 324 | 0 3 1 2 2 2 2 data_324 325 | 1 3 1 2 2 3 1 data_325 326 | 0 3 1 2 2 3 2 data_326 327 | 1 3 1 2 2 4 1 data_327 328 | 0 3 1 2 2 4 2 data_328 329 | 0 3 1 2 3 1 1 data_329 330 | 1 3 1 2 3 1 2 data_330 331 | 1 3 1 2 3 2 1 data_331 332 | 0 3 1 2 3 2 2 data_332 333 | 1 3 1 2 3 3 1 data_333 334 | 0 3 1 2 3 3 2 data_334 335 | 1 3 1 2 3 4 1 data_335 336 | 0 3 1 2 3 4 2 data_336 337 | 0 3 2 1 1 1 1 data_337 338 | 0 3 2 1 1 1 2 data_338 339 | 0 3 2 1 1 2 1 data_339 340 | 1 3 2 1 1 2 2 data_340 341 | 0 3 2 1 1 3 1 data_341 342 | 1 3 2 1 1 3 2 data_342 343 | 0 3 2 1 1 4 1 data_343 344 | 1 3 2 1 1 4 2 data_344 345 | 0 3 2 1 2 1 1 data_345 346 | 1 3 2 1 2 1 2 data_346 347 | 1 3 2 1 2 2 1 data_347 348 | 0 3 2 1 2 2 2 data_348 349 | 1 3 2 1 2 3 1 data_349 350 | 0 3 2 1 2 3 2 data_350 351 | 1 3 2 1 2 4 1 data_351 352 | 0 3 2 1 2 4 2 data_352 353 | 0 3 2 1 3 1 1 data_353 354 | 1 3 2 1 3 1 2 data_354 355 | 1 3 2 1 3 2 1 data_355 356 | 0 3 2 1 3 2 2 data_356 357 | 1 3 2 1 3 3 1 data_357 358 | 0 3 2 1 3 3 2 data_358 359 | 1 3 2 1 3 4 1 data_359 360 | 0 3 2 1 3 4 2 data_360 361 | 0 3 2 2 1 1 1 data_361 362 | 1 3 2 2 1 1 2 data_362 363 | 1 3 2 2 1 2 1 data_363 364 | 0 3 2 2 1 2 2 data_364 365 | 1 3 2 2 1 3 1 data_365 366 | 0 3 2 2 1 3 2 data_366 367 | 1 3 2 2 1 4 1 data_367 368 | 0 3 2 2 1 4 2 data_368 369 | 1 3 2 2 2 1 1 data_369 370 | 0 3 2 2 2 1 2 data_370 371 | 0 3 2 2 2 2 1 data_371 372 | 0 3 2 2 2 2 2 data_372 373 | 0 3 2 2 2 3 1 data_373 374 | 0 3 2 2 2 3 2 data_374 375 | 0 3 2 2 2 4 1 data_375 376 | 0 3 2 2 2 4 2 data_376 377 | 1 3 2 2 3 1 1 data_377 378 | 0 3 2 2 3 1 2 data_378 379 | 0 3 2 2 3 2 1 data_379 380 | 0 3 2 2 3 2 2 data_380 381 | 0 3 2 2 3 3 1 data_381 382 | 0 3 2 2 3 3 2 data_382 383 | 0 3 2 2 3 4 1 data_383 384 | 0 3 2 2 3 4 2 data_384 385 | 0 3 3 1 1 1 1 data_385 386 | 0 3 3 1 1 1 2 data_386 387 | 0 3 3 1 1 2 1 data_387 388 | 1 3 3 1 1 2 2 data_388 389 | 0 3 3 1 1 3 1 data_389 390 | 1 3 3 1 1 3 2 data_390 391 | 0 3 3 1 1 4 1 data_391 392 | 1 3 3 1 1 4 2 data_392 393 | 0 3 3 1 2 1 1 data_393 394 | 1 3 3 1 2 1 2 data_394 395 | 1 3 3 1 2 2 1 data_395 396 | 0 3 3 1 2 2 2 data_396 397 | 1 3 3 1 2 3 1 data_397 398 | 0 3 3 1 2 3 2 data_398 399 | 1 3 3 1 2 4 1 data_399 400 | 0 3 3 1 2 4 2 data_400 401 | 0 3 3 1 3 1 1 data_401 402 | 1 3 3 1 3 1 2 data_402 403 | 1 3 3 1 3 2 1 data_403 404 | 0 3 3 1 3 2 2 data_404 405 | 1 3 3 1 3 3 1 data_405 406 | 0 3 3 1 3 3 2 data_406 407 | 1 3 3 1 3 4 1 data_407 408 | 0 3 3 1 3 4 2 data_408 409 | 0 3 3 2 1 1 1 data_409 410 | 1 3 3 2 1 1 2 data_410 411 | 1 3 3 2 1 2 1 data_411 412 | 0 3 3 2 1 2 2 data_412 413 | 1 3 3 2 1 3 1 data_413 414 | 0 3 3 2 1 3 2 data_414 415 | 1 3 3 2 1 4 1 data_415 416 | 0 3 3 2 1 4 2 data_416 417 | 1 3 3 2 2 1 1 data_417 418 | 0 3 3 2 2 1 2 data_418 419 | 0 3 3 2 2 2 1 data_419 420 | 0 3 3 2 2 2 2 data_420 421 | 0 3 3 2 2 3 1 data_421 422 | 0 3 3 2 2 3 2 data_422 423 | 0 3 3 2 2 4 1 data_423 424 | 0 3 3 2 2 4 2 data_424 425 | 1 3 3 2 3 1 1 data_425 426 | 0 3 3 2 3 1 2 data_426 427 | 0 3 3 2 3 2 1 data_427 428 | 0 3 3 2 3 2 2 data_428 429 | 0 3 3 2 3 3 1 data_429 430 | 0 3 3 2 3 3 2 data_430 431 | 0 3 3 2 3 4 1 data_431 432 | 0 3 3 2 3 4 2 data_432 433 | -------------------------------------------------------------------------------- /data/monks/monks-2.train: -------------------------------------------------------------------------------- 1 | 0 1 1 1 1 2 2 data_4 2 | 0 1 1 1 1 4 1 data_7 3 | 0 1 1 1 2 1 1 data_9 4 | 0 1 1 1 2 1 2 data_10 5 | 0 1 1 1 2 2 1 data_11 6 | 0 1 1 1 2 3 1 data_13 7 | 0 1 1 1 2 4 1 data_15 8 | 0 1 1 1 3 2 1 data_19 9 | 0 1 1 1 3 4 1 data_23 10 | 0 1 1 2 1 1 1 data_25 11 | 0 1 1 2 1 1 2 data_26 12 | 0 1 1 2 2 3 1 data_37 13 | 0 1 1 2 2 4 1 data_39 14 | 1 1 1 2 2 4 2 data_40 15 | 0 1 1 2 3 1 2 data_42 16 | 1 1 1 2 3 2 2 data_44 17 | 0 1 2 1 1 1 2 data_50 18 | 0 1 2 1 2 1 2 data_58 19 | 1 1 2 1 2 2 2 data_60 20 | 0 1 2 1 2 3 1 data_61 21 | 1 1 2 1 2 3 2 data_62 22 | 0 1 2 1 2 4 1 data_63 23 | 0 1 2 1 3 1 1 data_65 24 | 0 1 2 1 3 1 2 data_66 25 | 1 1 2 1 3 2 2 data_68 26 | 0 1 2 1 3 3 1 data_69 27 | 1 1 2 1 3 3 2 data_70 28 | 0 1 2 1 3 4 1 data_71 29 | 1 1 2 1 3 4 2 data_72 30 | 0 1 2 2 1 2 1 data_75 31 | 0 1 2 2 1 4 1 data_79 32 | 1 1 2 2 2 3 1 data_85 33 | 1 1 2 2 2 4 1 data_87 34 | 0 1 2 2 3 1 1 data_89 35 | 1 1 2 2 3 1 2 data_90 36 | 1 1 2 2 3 3 1 data_93 37 | 0 1 2 2 3 3 2 data_94 38 | 1 1 2 2 3 4 1 data_95 39 | 0 1 2 2 3 4 2 data_96 40 | 0 1 3 1 1 1 2 data_98 41 | 0 1 3 1 1 2 2 data_100 42 | 0 1 3 1 1 3 1 data_101 43 | 0 1 3 1 1 3 2 data_102 44 | 0 1 3 1 2 2 1 data_107 45 | 1 1 3 1 2 2 2 data_108 46 | 1 1 3 1 2 3 2 data_110 47 | 0 1 3 1 2 4 1 data_111 48 | 1 1 3 1 3 2 2 data_116 49 | 0 1 3 1 3 3 1 data_117 50 | 1 1 3 1 3 4 2 data_120 51 | 0 1 3 2 1 3 1 data_125 52 | 1 1 3 2 1 3 2 data_126 53 | 0 1 3 2 1 4 1 data_127 54 | 1 1 3 2 2 1 2 data_130 55 | 0 1 3 2 2 3 2 data_134 56 | 0 1 3 2 2 4 2 data_136 57 | 1 1 3 2 3 2 1 data_139 58 | 0 2 1 1 1 1 1 data_145 59 | 0 2 1 1 1 2 2 data_148 60 | 0 2 1 1 1 3 1 data_149 61 | 1 2 1 1 2 2 2 data_156 62 | 0 2 1 1 3 1 2 data_162 63 | 1 2 1 1 3 2 2 data_164 64 | 1 2 1 1 3 3 2 data_166 65 | 0 2 1 1 3 4 1 data_167 66 | 0 2 1 2 1 1 1 data_169 67 | 1 2 1 2 1 2 2 data_172 68 | 0 2 1 2 1 4 1 data_175 69 | 1 2 1 2 2 2 1 data_179 70 | 0 2 1 2 2 4 2 data_184 71 | 0 2 1 2 3 1 1 data_185 72 | 1 2 1 2 3 1 2 data_186 73 | 0 2 1 2 3 2 2 data_188 74 | 0 2 1 2 3 3 2 data_190 75 | 0 2 1 2 3 4 2 data_192 76 | 0 2 2 1 1 3 1 data_197 77 | 1 2 2 1 1 4 2 data_200 78 | 0 2 2 1 2 1 1 data_201 79 | 1 2 2 1 2 3 1 data_205 80 | 1 2 2 1 3 3 1 data_213 81 | 0 2 2 1 3 3 2 data_214 82 | 1 2 2 1 3 4 1 data_215 83 | 0 2 2 2 1 1 1 data_217 84 | 0 2 2 2 1 2 2 data_220 85 | 0 2 2 2 1 3 2 data_222 86 | 1 2 2 2 1 4 1 data_223 87 | 0 2 2 2 1 4 2 data_224 88 | 1 2 2 2 2 1 1 data_225 89 | 0 2 2 2 2 2 2 data_228 90 | 0 2 2 2 2 3 1 data_229 91 | 1 2 2 2 3 1 1 data_233 92 | 0 2 2 2 3 2 1 data_235 93 | 0 2 2 2 3 2 2 data_236 94 | 0 2 2 2 3 4 2 data_240 95 | 0 2 3 1 1 1 1 data_241 96 | 0 2 3 1 1 1 2 data_242 97 | 1 2 3 1 1 3 2 data_246 98 | 0 2 3 1 2 1 1 data_249 99 | 1 2 3 1 2 3 1 data_253 100 | 0 2 3 1 2 3 2 data_254 101 | 0 2 3 1 2 4 2 data_256 102 | 1 2 3 1 3 1 2 data_258 103 | 1 2 3 1 3 2 1 data_259 104 | 1 2 3 1 3 4 1 data_263 105 | 1 2 3 2 1 1 2 data_266 106 | 1 2 3 2 1 2 1 data_267 107 | 1 2 3 2 1 3 1 data_269 108 | 0 2 3 2 1 4 2 data_272 109 | 1 2 3 2 2 1 1 data_273 110 | 0 2 3 2 2 2 1 data_275 111 | 0 2 3 2 2 3 2 data_278 112 | 0 2 3 2 3 3 1 data_285 113 | 0 2 3 2 3 3 2 data_286 114 | 0 2 3 2 3 4 2 data_288 115 | 0 3 1 1 1 4 1 data_295 116 | 0 3 1 1 2 1 2 data_298 117 | 1 3 1 1 2 2 2 data_300 118 | 1 3 1 1 2 3 2 data_302 119 | 0 3 1 1 2 4 1 data_303 120 | 1 3 1 1 2 4 2 data_304 121 | 0 3 1 1 3 1 1 data_305 122 | 0 3 1 1 3 1 2 data_306 123 | 1 3 1 1 3 2 2 data_308 124 | 1 3 1 1 3 3 2 data_310 125 | 0 3 1 2 1 1 1 data_313 126 | 1 3 1 2 1 2 2 data_316 127 | 0 3 1 2 1 3 1 data_317 128 | 1 3 1 2 1 3 2 data_318 129 | 0 3 1 2 1 4 1 data_319 130 | 1 3 1 2 1 4 2 data_320 131 | 1 3 1 2 2 2 1 data_323 132 | 1 3 1 2 3 1 2 data_330 133 | 1 3 1 2 3 2 1 data_331 134 | 0 3 1 2 3 2 2 data_332 135 | 0 3 1 2 3 4 2 data_336 136 | 0 3 2 1 1 1 2 data_338 137 | 1 3 2 1 1 2 2 data_340 138 | 0 3 2 1 1 3 1 data_341 139 | 1 3 2 1 1 3 2 data_342 140 | 1 3 2 1 2 1 2 data_346 141 | 1 3 2 1 2 2 1 data_347 142 | 0 3 2 1 3 1 1 data_353 143 | 1 3 2 1 3 2 1 data_355 144 | 1 3 2 1 3 3 1 data_357 145 | 0 3 2 1 3 3 2 data_358 146 | 0 3 2 2 1 1 1 data_361 147 | 0 3 2 2 1 2 2 data_364 148 | 1 3 2 2 1 3 1 data_365 149 | 0 3 2 2 1 3 2 data_366 150 | 1 3 2 2 2 1 1 data_369 151 | 0 3 2 2 2 2 1 data_371 152 | 0 3 2 2 2 2 2 data_372 153 | 0 3 2 2 2 3 2 data_374 154 | 1 3 2 2 3 1 1 data_377 155 | 0 3 2 2 3 3 2 data_382 156 | 0 3 2 2 3 4 2 data_384 157 | 0 3 3 1 1 1 1 data_385 158 | 0 3 3 1 1 2 1 data_387 159 | 0 3 3 1 1 3 1 data_389 160 | 1 3 3 1 1 3 2 data_390 161 | 0 3 3 1 2 3 2 data_398 162 | 0 3 3 2 1 1 1 data_409 163 | 1 3 3 2 2 1 1 data_417 164 | 0 3 3 2 2 2 1 data_419 165 | 0 3 3 2 2 3 1 data_421 166 | 0 3 3 2 2 3 2 data_422 167 | 1 3 3 2 3 1 1 data_425 168 | 0 3 3 2 3 2 1 data_427 169 | 0 3 3 2 3 4 2 data_432 170 | -------------------------------------------------------------------------------- /data/monks/monks-3.test: -------------------------------------------------------------------------------- 1 | 1 1 1 1 1 1 1 data_1 2 | 1 1 1 1 1 1 2 data_2 3 | 1 1 1 1 1 2 1 data_3 4 | 1 1 1 1 1 2 2 data_4 5 | 1 1 1 1 1 3 1 data_5 6 | 1 1 1 1 1 3 2 data_6 7 | 0 1 1 1 1 4 1 data_7 8 | 0 1 1 1 1 4 2 data_8 9 | 1 1 1 1 2 1 1 data_9 10 | 1 1 1 1 2 1 2 data_10 11 | 1 1 1 1 2 2 1 data_11 12 | 1 1 1 1 2 2 2 data_12 13 | 1 1 1 1 2 3 1 data_13 14 | 1 1 1 1 2 3 2 data_14 15 | 0 1 1 1 2 4 1 data_15 16 | 0 1 1 1 2 4 2 data_16 17 | 1 1 1 1 3 1 1 data_17 18 | 1 1 1 1 3 1 2 data_18 19 | 1 1 1 1 3 2 1 data_19 20 | 1 1 1 1 3 2 2 data_20 21 | 1 1 1 1 3 3 1 data_21 22 | 1 1 1 1 3 3 2 data_22 23 | 0 1 1 1 3 4 1 data_23 24 | 0 1 1 1 3 4 2 data_24 25 | 1 1 1 2 1 1 1 data_25 26 | 1 1 1 2 1 1 2 data_26 27 | 1 1 1 2 1 2 1 data_27 28 | 1 1 1 2 1 2 2 data_28 29 | 1 1 1 2 1 3 1 data_29 30 | 1 1 1 2 1 3 2 data_30 31 | 0 1 1 2 1 4 1 data_31 32 | 0 1 1 2 1 4 2 data_32 33 | 1 1 1 2 2 1 1 data_33 34 | 1 1 1 2 2 1 2 data_34 35 | 1 1 1 2 2 2 1 data_35 36 | 1 1 1 2 2 2 2 data_36 37 | 1 1 1 2 2 3 1 data_37 38 | 1 1 1 2 2 3 2 data_38 39 | 0 1 1 2 2 4 1 data_39 40 | 0 1 1 2 2 4 2 data_40 41 | 1 1 1 2 3 1 1 data_41 42 | 1 1 1 2 3 1 2 data_42 43 | 1 1 1 2 3 2 1 data_43 44 | 1 1 1 2 3 2 2 data_44 45 | 1 1 1 2 3 3 1 data_45 46 | 1 1 1 2 3 3 2 data_46 47 | 0 1 1 2 3 4 1 data_47 48 | 0 1 1 2 3 4 2 data_48 49 | 1 1 2 1 1 1 1 data_49 50 | 1 1 2 1 1 1 2 data_50 51 | 1 1 2 1 1 2 1 data_51 52 | 1 1 2 1 1 2 2 data_52 53 | 1 1 2 1 1 3 1 data_53 54 | 1 1 2 1 1 3 2 data_54 55 | 0 1 2 1 1 4 1 data_55 56 | 0 1 2 1 1 4 2 data_56 57 | 1 1 2 1 2 1 1 data_57 58 | 1 1 2 1 2 1 2 data_58 59 | 1 1 2 1 2 2 1 data_59 60 | 1 1 2 1 2 2 2 data_60 61 | 1 1 2 1 2 3 1 data_61 62 | 1 1 2 1 2 3 2 data_62 63 | 0 1 2 1 2 4 1 data_63 64 | 0 1 2 1 2 4 2 data_64 65 | 1 1 2 1 3 1 1 data_65 66 | 1 1 2 1 3 1 2 data_66 67 | 1 1 2 1 3 2 1 data_67 68 | 1 1 2 1 3 2 2 data_68 69 | 1 1 2 1 3 3 1 data_69 70 | 1 1 2 1 3 3 2 data_70 71 | 0 1 2 1 3 4 1 data_71 72 | 0 1 2 1 3 4 2 data_72 73 | 1 1 2 2 1 1 1 data_73 74 | 1 1 2 2 1 1 2 data_74 75 | 1 1 2 2 1 2 1 data_75 76 | 1 1 2 2 1 2 2 data_76 77 | 1 1 2 2 1 3 1 data_77 78 | 1 1 2 2 1 3 2 data_78 79 | 0 1 2 2 1 4 1 data_79 80 | 0 1 2 2 1 4 2 data_80 81 | 1 1 2 2 2 1 1 data_81 82 | 1 1 2 2 2 1 2 data_82 83 | 1 1 2 2 2 2 1 data_83 84 | 1 1 2 2 2 2 2 data_84 85 | 1 1 2 2 2 3 1 data_85 86 | 1 1 2 2 2 3 2 data_86 87 | 0 1 2 2 2 4 1 data_87 88 | 0 1 2 2 2 4 2 data_88 89 | 1 1 2 2 3 1 1 data_89 90 | 1 1 2 2 3 1 2 data_90 91 | 1 1 2 2 3 2 1 data_91 92 | 1 1 2 2 3 2 2 data_92 93 | 1 1 2 2 3 3 1 data_93 94 | 1 1 2 2 3 3 2 data_94 95 | 0 1 2 2 3 4 1 data_95 96 | 0 1 2 2 3 4 2 data_96 97 | 0 1 3 1 1 1 1 data_97 98 | 0 1 3 1 1 1 2 data_98 99 | 0 1 3 1 1 2 1 data_99 100 | 0 1 3 1 1 2 2 data_100 101 | 1 1 3 1 1 3 1 data_101 102 | 1 1 3 1 1 3 2 data_102 103 | 0 1 3 1 1 4 1 data_103 104 | 0 1 3 1 1 4 2 data_104 105 | 0 1 3 1 2 1 1 data_105 106 | 0 1 3 1 2 1 2 data_106 107 | 0 1 3 1 2 2 1 data_107 108 | 0 1 3 1 2 2 2 data_108 109 | 0 1 3 1 2 3 1 data_109 110 | 0 1 3 1 2 3 2 data_110 111 | 0 1 3 1 2 4 1 data_111 112 | 0 1 3 1 2 4 2 data_112 113 | 0 1 3 1 3 1 1 data_113 114 | 0 1 3 1 3 1 2 data_114 115 | 0 1 3 1 3 2 1 data_115 116 | 0 1 3 1 3 2 2 data_116 117 | 0 1 3 1 3 3 1 data_117 118 | 0 1 3 1 3 3 2 data_118 119 | 0 1 3 1 3 4 1 data_119 120 | 0 1 3 1 3 4 2 data_120 121 | 0 1 3 2 1 1 1 data_121 122 | 0 1 3 2 1 1 2 data_122 123 | 0 1 3 2 1 2 1 data_123 124 | 0 1 3 2 1 2 2 data_124 125 | 1 1 3 2 1 3 1 data_125 126 | 1 1 3 2 1 3 2 data_126 127 | 0 1 3 2 1 4 1 data_127 128 | 0 1 3 2 1 4 2 data_128 129 | 0 1 3 2 2 1 1 data_129 130 | 0 1 3 2 2 1 2 data_130 131 | 0 1 3 2 2 2 1 data_131 132 | 0 1 3 2 2 2 2 data_132 133 | 0 1 3 2 2 3 1 data_133 134 | 0 1 3 2 2 3 2 data_134 135 | 0 1 3 2 2 4 1 data_135 136 | 0 1 3 2 2 4 2 data_136 137 | 0 1 3 2 3 1 1 data_137 138 | 0 1 3 2 3 1 2 data_138 139 | 0 1 3 2 3 2 1 data_139 140 | 0 1 3 2 3 2 2 data_140 141 | 0 1 3 2 3 3 1 data_141 142 | 0 1 3 2 3 3 2 data_142 143 | 0 1 3 2 3 4 1 data_143 144 | 0 1 3 2 3 4 2 data_144 145 | 1 2 1 1 1 1 1 data_145 146 | 1 2 1 1 1 1 2 data_146 147 | 1 2 1 1 1 2 1 data_147 148 | 1 2 1 1 1 2 2 data_148 149 | 1 2 1 1 1 3 1 data_149 150 | 1 2 1 1 1 3 2 data_150 151 | 0 2 1 1 1 4 1 data_151 152 | 0 2 1 1 1 4 2 data_152 153 | 1 2 1 1 2 1 1 data_153 154 | 1 2 1 1 2 1 2 data_154 155 | 1 2 1 1 2 2 1 data_155 156 | 1 2 1 1 2 2 2 data_156 157 | 1 2 1 1 2 3 1 data_157 158 | 1 2 1 1 2 3 2 data_158 159 | 0 2 1 1 2 4 1 data_159 160 | 0 2 1 1 2 4 2 data_160 161 | 1 2 1 1 3 1 1 data_161 162 | 1 2 1 1 3 1 2 data_162 163 | 1 2 1 1 3 2 1 data_163 164 | 1 2 1 1 3 2 2 data_164 165 | 1 2 1 1 3 3 1 data_165 166 | 1 2 1 1 3 3 2 data_166 167 | 0 2 1 1 3 4 1 data_167 168 | 0 2 1 1 3 4 2 data_168 169 | 1 2 1 2 1 1 1 data_169 170 | 1 2 1 2 1 1 2 data_170 171 | 1 2 1 2 1 2 1 data_171 172 | 1 2 1 2 1 2 2 data_172 173 | 1 2 1 2 1 3 1 data_173 174 | 1 2 1 2 1 3 2 data_174 175 | 0 2 1 2 1 4 1 data_175 176 | 0 2 1 2 1 4 2 data_176 177 | 1 2 1 2 2 1 1 data_177 178 | 1 2 1 2 2 1 2 data_178 179 | 1 2 1 2 2 2 1 data_179 180 | 1 2 1 2 2 2 2 data_180 181 | 1 2 1 2 2 3 1 data_181 182 | 1 2 1 2 2 3 2 data_182 183 | 0 2 1 2 2 4 1 data_183 184 | 0 2 1 2 2 4 2 data_184 185 | 1 2 1 2 3 1 1 data_185 186 | 1 2 1 2 3 1 2 data_186 187 | 1 2 1 2 3 2 1 data_187 188 | 1 2 1 2 3 2 2 data_188 189 | 1 2 1 2 3 3 1 data_189 190 | 1 2 1 2 3 3 2 data_190 191 | 0 2 1 2 3 4 1 data_191 192 | 0 2 1 2 3 4 2 data_192 193 | 1 2 2 1 1 1 1 data_193 194 | 1 2 2 1 1 1 2 data_194 195 | 1 2 2 1 1 2 1 data_195 196 | 1 2 2 1 1 2 2 data_196 197 | 1 2 2 1 1 3 1 data_197 198 | 1 2 2 1 1 3 2 data_198 199 | 0 2 2 1 1 4 1 data_199 200 | 0 2 2 1 1 4 2 data_200 201 | 1 2 2 1 2 1 1 data_201 202 | 1 2 2 1 2 1 2 data_202 203 | 1 2 2 1 2 2 1 data_203 204 | 1 2 2 1 2 2 2 data_204 205 | 1 2 2 1 2 3 1 data_205 206 | 1 2 2 1 2 3 2 data_206 207 | 0 2 2 1 2 4 1 data_207 208 | 0 2 2 1 2 4 2 data_208 209 | 1 2 2 1 3 1 1 data_209 210 | 1 2 2 1 3 1 2 data_210 211 | 1 2 2 1 3 2 1 data_211 212 | 1 2 2 1 3 2 2 data_212 213 | 1 2 2 1 3 3 1 data_213 214 | 1 2 2 1 3 3 2 data_214 215 | 0 2 2 1 3 4 1 data_215 216 | 0 2 2 1 3 4 2 data_216 217 | 1 2 2 2 1 1 1 data_217 218 | 1 2 2 2 1 1 2 data_218 219 | 1 2 2 2 1 2 1 data_219 220 | 1 2 2 2 1 2 2 data_220 221 | 1 2 2 2 1 3 1 data_221 222 | 1 2 2 2 1 3 2 data_222 223 | 0 2 2 2 1 4 1 data_223 224 | 0 2 2 2 1 4 2 data_224 225 | 1 2 2 2 2 1 1 data_225 226 | 1 2 2 2 2 1 2 data_226 227 | 1 2 2 2 2 2 1 data_227 228 | 1 2 2 2 2 2 2 data_228 229 | 1 2 2 2 2 3 1 data_229 230 | 1 2 2 2 2 3 2 data_230 231 | 0 2 2 2 2 4 1 data_231 232 | 0 2 2 2 2 4 2 data_232 233 | 1 2 2 2 3 1 1 data_233 234 | 1 2 2 2 3 1 2 data_234 235 | 1 2 2 2 3 2 1 data_235 236 | 1 2 2 2 3 2 2 data_236 237 | 1 2 2 2 3 3 1 data_237 238 | 1 2 2 2 3 3 2 data_238 239 | 0 2 2 2 3 4 1 data_239 240 | 0 2 2 2 3 4 2 data_240 241 | 0 2 3 1 1 1 1 data_241 242 | 0 2 3 1 1 1 2 data_242 243 | 0 2 3 1 1 2 1 data_243 244 | 0 2 3 1 1 2 2 data_244 245 | 1 2 3 1 1 3 1 data_245 246 | 1 2 3 1 1 3 2 data_246 247 | 0 2 3 1 1 4 1 data_247 248 | 0 2 3 1 1 4 2 data_248 249 | 0 2 3 1 2 1 1 data_249 250 | 0 2 3 1 2 1 2 data_250 251 | 0 2 3 1 2 2 1 data_251 252 | 0 2 3 1 2 2 2 data_252 253 | 0 2 3 1 2 3 1 data_253 254 | 0 2 3 1 2 3 2 data_254 255 | 0 2 3 1 2 4 1 data_255 256 | 0 2 3 1 2 4 2 data_256 257 | 0 2 3 1 3 1 1 data_257 258 | 0 2 3 1 3 1 2 data_258 259 | 0 2 3 1 3 2 1 data_259 260 | 0 2 3 1 3 2 2 data_260 261 | 0 2 3 1 3 3 1 data_261 262 | 0 2 3 1 3 3 2 data_262 263 | 0 2 3 1 3 4 1 data_263 264 | 0 2 3 1 3 4 2 data_264 265 | 0 2 3 2 1 1 1 data_265 266 | 0 2 3 2 1 1 2 data_266 267 | 0 2 3 2 1 2 1 data_267 268 | 0 2 3 2 1 2 2 data_268 269 | 1 2 3 2 1 3 1 data_269 270 | 1 2 3 2 1 3 2 data_270 271 | 0 2 3 2 1 4 1 data_271 272 | 0 2 3 2 1 4 2 data_272 273 | 0 2 3 2 2 1 1 data_273 274 | 0 2 3 2 2 1 2 data_274 275 | 0 2 3 2 2 2 1 data_275 276 | 0 2 3 2 2 2 2 data_276 277 | 0 2 3 2 2 3 1 data_277 278 | 0 2 3 2 2 3 2 data_278 279 | 0 2 3 2 2 4 1 data_279 280 | 0 2 3 2 2 4 2 data_280 281 | 0 2 3 2 3 1 1 data_281 282 | 0 2 3 2 3 1 2 data_282 283 | 0 2 3 2 3 2 1 data_283 284 | 0 2 3 2 3 2 2 data_284 285 | 0 2 3 2 3 3 1 data_285 286 | 0 2 3 2 3 3 2 data_286 287 | 0 2 3 2 3 4 1 data_287 288 | 0 2 3 2 3 4 2 data_288 289 | 1 3 1 1 1 1 1 data_289 290 | 1 3 1 1 1 1 2 data_290 291 | 1 3 1 1 1 2 1 data_291 292 | 1 3 1 1 1 2 2 data_292 293 | 1 3 1 1 1 3 1 data_293 294 | 1 3 1 1 1 3 2 data_294 295 | 0 3 1 1 1 4 1 data_295 296 | 0 3 1 1 1 4 2 data_296 297 | 1 3 1 1 2 1 1 data_297 298 | 1 3 1 1 2 1 2 data_298 299 | 1 3 1 1 2 2 1 data_299 300 | 1 3 1 1 2 2 2 data_300 301 | 1 3 1 1 2 3 1 data_301 302 | 1 3 1 1 2 3 2 data_302 303 | 0 3 1 1 2 4 1 data_303 304 | 0 3 1 1 2 4 2 data_304 305 | 1 3 1 1 3 1 1 data_305 306 | 1 3 1 1 3 1 2 data_306 307 | 1 3 1 1 3 2 1 data_307 308 | 1 3 1 1 3 2 2 data_308 309 | 1 3 1 1 3 3 1 data_309 310 | 1 3 1 1 3 3 2 data_310 311 | 0 3 1 1 3 4 1 data_311 312 | 0 3 1 1 3 4 2 data_312 313 | 1 3 1 2 1 1 1 data_313 314 | 1 3 1 2 1 1 2 data_314 315 | 1 3 1 2 1 2 1 data_315 316 | 1 3 1 2 1 2 2 data_316 317 | 1 3 1 2 1 3 1 data_317 318 | 1 3 1 2 1 3 2 data_318 319 | 0 3 1 2 1 4 1 data_319 320 | 0 3 1 2 1 4 2 data_320 321 | 1 3 1 2 2 1 1 data_321 322 | 1 3 1 2 2 1 2 data_322 323 | 1 3 1 2 2 2 1 data_323 324 | 1 3 1 2 2 2 2 data_324 325 | 1 3 1 2 2 3 1 data_325 326 | 1 3 1 2 2 3 2 data_326 327 | 0 3 1 2 2 4 1 data_327 328 | 0 3 1 2 2 4 2 data_328 329 | 1 3 1 2 3 1 1 data_329 330 | 1 3 1 2 3 1 2 data_330 331 | 1 3 1 2 3 2 1 data_331 332 | 1 3 1 2 3 2 2 data_332 333 | 1 3 1 2 3 3 1 data_333 334 | 1 3 1 2 3 3 2 data_334 335 | 0 3 1 2 3 4 1 data_335 336 | 0 3 1 2 3 4 2 data_336 337 | 1 3 2 1 1 1 1 data_337 338 | 1 3 2 1 1 1 2 data_338 339 | 1 3 2 1 1 2 1 data_339 340 | 1 3 2 1 1 2 2 data_340 341 | 1 3 2 1 1 3 1 data_341 342 | 1 3 2 1 1 3 2 data_342 343 | 0 3 2 1 1 4 1 data_343 344 | 0 3 2 1 1 4 2 data_344 345 | 1 3 2 1 2 1 1 data_345 346 | 1 3 2 1 2 1 2 data_346 347 | 1 3 2 1 2 2 1 data_347 348 | 1 3 2 1 2 2 2 data_348 349 | 1 3 2 1 2 3 1 data_349 350 | 1 3 2 1 2 3 2 data_350 351 | 0 3 2 1 2 4 1 data_351 352 | 0 3 2 1 2 4 2 data_352 353 | 1 3 2 1 3 1 1 data_353 354 | 1 3 2 1 3 1 2 data_354 355 | 1 3 2 1 3 2 1 data_355 356 | 1 3 2 1 3 2 2 data_356 357 | 1 3 2 1 3 3 1 data_357 358 | 1 3 2 1 3 3 2 data_358 359 | 0 3 2 1 3 4 1 data_359 360 | 0 3 2 1 3 4 2 data_360 361 | 1 3 2 2 1 1 1 data_361 362 | 1 3 2 2 1 1 2 data_362 363 | 1 3 2 2 1 2 1 data_363 364 | 1 3 2 2 1 2 2 data_364 365 | 1 3 2 2 1 3 1 data_365 366 | 1 3 2 2 1 3 2 data_366 367 | 0 3 2 2 1 4 1 data_367 368 | 0 3 2 2 1 4 2 data_368 369 | 1 3 2 2 2 1 1 data_369 370 | 1 3 2 2 2 1 2 data_370 371 | 1 3 2 2 2 2 1 data_371 372 | 1 3 2 2 2 2 2 data_372 373 | 1 3 2 2 2 3 1 data_373 374 | 1 3 2 2 2 3 2 data_374 375 | 0 3 2 2 2 4 1 data_375 376 | 0 3 2 2 2 4 2 data_376 377 | 1 3 2 2 3 1 1 data_377 378 | 1 3 2 2 3 1 2 data_378 379 | 1 3 2 2 3 2 1 data_379 380 | 1 3 2 2 3 2 2 data_380 381 | 1 3 2 2 3 3 1 data_381 382 | 1 3 2 2 3 3 2 data_382 383 | 0 3 2 2 3 4 1 data_383 384 | 0 3 2 2 3 4 2 data_384 385 | 0 3 3 1 1 1 1 data_385 386 | 0 3 3 1 1 1 2 data_386 387 | 0 3 3 1 1 2 1 data_387 388 | 0 3 3 1 1 2 2 data_388 389 | 1 3 3 1 1 3 1 data_389 390 | 1 3 3 1 1 3 2 data_390 391 | 0 3 3 1 1 4 1 data_391 392 | 0 3 3 1 1 4 2 data_392 393 | 0 3 3 1 2 1 1 data_393 394 | 0 3 3 1 2 1 2 data_394 395 | 0 3 3 1 2 2 1 data_395 396 | 0 3 3 1 2 2 2 data_396 397 | 0 3 3 1 2 3 1 data_397 398 | 0 3 3 1 2 3 2 data_398 399 | 0 3 3 1 2 4 1 data_399 400 | 0 3 3 1 2 4 2 data_400 401 | 0 3 3 1 3 1 1 data_401 402 | 0 3 3 1 3 1 2 data_402 403 | 0 3 3 1 3 2 1 data_403 404 | 0 3 3 1 3 2 2 data_404 405 | 0 3 3 1 3 3 1 data_405 406 | 0 3 3 1 3 3 2 data_406 407 | 0 3 3 1 3 4 1 data_407 408 | 0 3 3 1 3 4 2 data_408 409 | 0 3 3 2 1 1 1 data_409 410 | 0 3 3 2 1 1 2 data_410 411 | 0 3 3 2 1 2 1 data_411 412 | 0 3 3 2 1 2 2 data_412 413 | 1 3 3 2 1 3 1 data_413 414 | 1 3 3 2 1 3 2 data_414 415 | 0 3 3 2 1 4 1 data_415 416 | 0 3 3 2 1 4 2 data_416 417 | 0 3 3 2 2 1 1 data_417 418 | 0 3 3 2 2 1 2 data_418 419 | 0 3 3 2 2 2 1 data_419 420 | 0 3 3 2 2 2 2 data_420 421 | 0 3 3 2 2 3 1 data_421 422 | 0 3 3 2 2 3 2 data_422 423 | 0 3 3 2 2 4 1 data_423 424 | 0 3 3 2 2 4 2 data_424 425 | 0 3 3 2 3 1 1 data_425 426 | 0 3 3 2 3 1 2 data_426 427 | 0 3 3 2 3 2 1 data_427 428 | 0 3 3 2 3 2 2 data_428 429 | 0 3 3 2 3 3 1 data_429 430 | 0 3 3 2 3 3 2 data_430 431 | 0 3 3 2 3 4 1 data_431 432 | 0 3 3 2 3 4 2 data_432 433 | -------------------------------------------------------------------------------- /data/monks/monks-3.train: -------------------------------------------------------------------------------- 1 | 1 1 1 1 1 1 2 data_2 2 | 1 1 1 1 1 2 1 data_3 3 | 1 1 1 1 1 2 2 data_4 4 | 0 1 1 1 1 3 1 data_5 5 | 0 1 1 1 1 4 1 data_7 6 | 1 1 1 1 2 1 1 data_9 7 | 1 1 1 1 2 2 2 data_12 8 | 0 1 1 1 2 4 2 data_16 9 | 1 1 1 2 1 2 2 data_28 10 | 0 1 1 2 1 4 2 data_32 11 | 1 1 1 2 2 2 2 data_36 12 | 0 1 1 2 2 4 1 data_39 13 | 0 1 1 2 2 4 2 data_40 14 | 1 1 1 2 3 1 1 data_41 15 | 1 1 1 2 3 1 2 data_42 16 | 1 1 1 2 3 3 1 data_45 17 | 1 1 1 2 3 3 2 data_46 18 | 1 1 2 1 1 3 1 data_53 19 | 1 1 2 1 2 2 1 data_59 20 | 1 1 2 1 2 2 2 data_60 21 | 0 1 2 1 2 3 1 data_61 22 | 1 1 2 1 3 1 1 data_65 23 | 1 1 2 1 3 1 2 data_66 24 | 1 1 2 1 3 2 1 data_67 25 | 1 1 2 1 3 2 2 data_68 26 | 1 1 2 1 3 3 2 data_70 27 | 0 1 2 1 3 4 1 data_71 28 | 1 1 2 2 1 3 1 data_77 29 | 0 1 2 2 1 4 2 data_80 30 | 1 1 2 2 2 1 1 data_81 31 | 1 1 2 2 2 2 1 data_83 32 | 1 1 2 2 2 2 2 data_84 33 | 1 1 2 2 3 1 1 data_89 34 | 1 1 2 2 3 2 1 data_91 35 | 1 1 2 2 3 2 2 data_92 36 | 0 1 3 1 1 2 1 data_99 37 | 0 1 3 1 1 4 1 data_103 38 | 0 1 3 1 2 3 2 data_110 39 | 0 1 3 1 2 4 1 data_111 40 | 0 1 3 1 3 1 1 data_113 41 | 0 1 3 1 3 3 1 data_117 42 | 0 1 3 2 1 1 1 data_121 43 | 0 1 3 2 1 1 2 data_122 44 | 0 1 3 2 1 2 1 data_123 45 | 0 1 3 2 1 4 2 data_128 46 | 0 1 3 2 2 3 2 data_134 47 | 0 1 3 2 2 4 2 data_136 48 | 0 1 3 2 3 4 1 data_143 49 | 1 2 1 1 1 1 1 data_145 50 | 1 2 1 1 1 1 2 data_146 51 | 0 2 1 1 1 4 1 data_151 52 | 0 2 1 1 1 4 2 data_152 53 | 1 2 1 1 2 1 1 data_153 54 | 1 2 1 1 2 1 2 data_154 55 | 1 2 1 1 3 2 2 data_164 56 | 1 2 1 1 3 3 2 data_166 57 | 0 2 1 1 3 4 1 data_167 58 | 1 2 1 2 1 2 2 data_172 59 | 0 2 1 2 2 4 1 data_183 60 | 1 2 1 2 3 1 2 data_186 61 | 1 2 2 1 1 3 2 data_198 62 | 0 2 2 1 1 4 2 data_200 63 | 1 2 2 1 2 1 2 data_202 64 | 0 2 2 1 2 2 1 data_203 65 | 1 2 2 1 3 1 1 data_209 66 | 1 2 2 1 3 2 2 data_212 67 | 0 2 2 1 3 3 1 data_213 68 | 0 2 2 1 3 3 2 data_214 69 | 0 2 2 1 3 4 2 data_216 70 | 1 2 2 2 1 2 2 data_220 71 | 1 2 2 2 2 1 2 data_226 72 | 1 2 2 2 2 3 1 data_229 73 | 1 2 2 2 2 3 2 data_230 74 | 0 2 2 2 3 4 1 data_239 75 | 1 2 3 1 1 3 1 data_245 76 | 0 2 3 1 2 1 1 data_249 77 | 0 2 3 1 2 2 1 data_251 78 | 0 2 3 1 2 2 2 data_252 79 | 0 2 3 1 2 3 2 data_254 80 | 0 2 3 1 3 3 1 data_261 81 | 0 2 3 2 1 1 2 data_266 82 | 0 2 3 2 1 2 2 data_268 83 | 0 2 3 2 1 4 1 data_271 84 | 0 2 3 2 2 3 1 data_277 85 | 0 2 3 2 2 4 2 data_280 86 | 0 2 3 2 3 1 1 data_281 87 | 0 2 3 2 3 2 1 data_283 88 | 0 2 3 2 3 4 2 data_288 89 | 1 3 1 1 1 1 1 data_289 90 | 1 3 1 1 1 2 1 data_291 91 | 1 3 1 1 1 3 1 data_293 92 | 0 3 1 1 2 4 2 data_304 93 | 1 3 1 1 3 1 2 data_306 94 | 0 3 1 1 3 4 2 data_312 95 | 1 3 1 2 1 2 1 data_315 96 | 1 3 1 2 2 3 2 data_326 97 | 0 3 1 2 2 4 2 data_328 98 | 1 3 1 2 3 1 1 data_329 99 | 1 3 2 1 1 2 2 data_340 100 | 0 3 2 1 1 4 1 data_343 101 | 1 3 2 1 2 3 1 data_349 102 | 1 3 2 1 3 1 2 data_354 103 | 1 3 2 2 1 2 2 data_364 104 | 1 3 2 2 1 3 2 data_366 105 | 1 3 2 2 2 1 2 data_370 106 | 1 3 2 2 3 1 1 data_377 107 | 1 3 2 2 3 3 2 data_382 108 | 0 3 2 2 3 4 1 data_383 109 | 1 3 3 1 1 3 2 data_390 110 | 1 3 3 1 1 4 1 data_391 111 | 0 3 3 1 2 4 2 data_400 112 | 0 3 3 1 3 1 1 data_401 113 | 0 3 3 1 3 2 1 data_403 114 | 0 3 3 1 3 2 2 data_404 115 | 0 3 3 1 3 4 1 data_407 116 | 0 3 3 2 1 1 1 data_409 117 | 0 3 3 2 1 1 2 data_410 118 | 0 3 3 2 2 2 2 data_420 119 | 0 3 3 2 2 3 2 data_422 120 | 0 3 3 2 3 1 1 data_425 121 | 0 3 3 2 3 3 2 data_430 122 | 0 3 3 2 3 4 2 data_432 123 | -------------------------------------------------------------------------------- /data/monks/monks.names: -------------------------------------------------------------------------------- 1 | 2 | 1. Title: The Monk's Problems 3 | 4 | 2. Sources: 5 | (a) Donor: Sebastian Thrun 6 | School of Computer Science 7 | Carnegie Mellon University 8 | Pittsburgh, PA 15213, USA 9 | 10 | E-mail: thrun@cs.cmu.edu 11 | 12 | (b) Date: October 1992 13 | 14 | 3. Past Usage: 15 | 16 | - See File: thrun.comparison.ps.Z 17 | 18 | - Wnek, J., "Hypothesis-driven Constructive Induction," PhD dissertation, 19 | School of Information Technology and Engineering, Reports of Machine 20 | Learning and Inference Laboratory, MLI 93-2, Center for Artificial 21 | Intelligence, George Mason University, March 1993. 22 | 23 | - Wnek, J. and Michalski, R.S., "Comparing Symbolic and 24 | Subsymbolic Learning: Three Studies," in Machine Learning: A 25 | Multistrategy Approach, Vol. 4., R.S. Michalski and G. Tecuci (Eds.), 26 | Morgan Kaufmann, San Mateo, CA, 1993. 27 | 28 | 4. Relevant Information: 29 | 30 | The MONK's problem were the basis of a first international comparison 31 | of learning algorithms. The result of this comparison is summarized in 32 | "The MONK's Problems - A Performance Comparison of Different Learning 33 | algorithms" by S.B. Thrun, J. Bala, E. Bloedorn, I. Bratko, B. 34 | Cestnik, J. Cheng, K. De Jong, S. Dzeroski, S.E. Fahlman, D. Fisher, 35 | R. Hamann, K. Kaufman, S. Keller, I. Kononenko, J. Kreuziger, R.S. 36 | Michalski, T. Mitchell, P. Pachowicz, Y. Reich H. Vafaie, W. Van de 37 | Welde, W. Wenzel, J. Wnek, and J. Zhang has been published as 38 | Technical Report CS-CMU-91-197, Carnegie Mellon University in Dec. 39 | 1991. 40 | 41 | One significant characteristic of this comparison is that it was 42 | performed by a collection of researchers, each of whom was an advocate 43 | of the technique they tested (often they were the creators of the 44 | various methods). In this sense, the results are less biased than in 45 | comparisons performed by a single person advocating a specific 46 | learning method, and more accurately reflect the generalization 47 | behavior of the learning techniques as applied by knowledgeable users. 48 | 49 | There are three MONK's problems. The domains for all MONK's problems 50 | are the same (described below). One of the MONK's problems has noise 51 | added. For each problem, the domain has been partitioned into a train 52 | and test set. 53 | 54 | 5. Number of Instances: 432 55 | 56 | 6. Number of Attributes: 8 (including class attribute) 57 | 58 | 7. Attribute information: 59 | 1. class: 0, 1 60 | 2. a1: 1, 2, 3 61 | 3. a2: 1, 2, 3 62 | 4. a3: 1, 2 63 | 5. a4: 1, 2, 3 64 | 6. a5: 1, 2, 3, 4 65 | 7. a6: 1, 2 66 | 8. Id: (A unique symbol for each instance) 67 | 68 | 8. Missing Attribute Values: None 69 | 70 | 9. Target Concepts associated to the MONK's problem: 71 | 72 | MONK-1: (a1 = a2) or (a5 = 1) 73 | 74 | MONK-2: EXACTLY TWO of {a1 = 1, a2 = 1, a3 = 1, a4 = 1, a5 = 1, a6 = 1} 75 | 76 | MONK-3: (a5 = 3 and a4 = 1) or (a5 /= 4 and a2 /= 3) 77 | (5% class noise added to the training set) 78 | 79 | -------------------------------------------------------------------------------- /data/soybean-small/Index: -------------------------------------------------------------------------------- 1 | Index of soybean 2 | 3 | 02 Dec 1996 484 Index 4 | 16 Jul 1992 26619 soybean-explanation 5 | 09 Jul 1990 5242 soybean-large.names 6 | 23 May 1990 2528 soybean-small.names 7 | 22 May 1990 3431 fisher-order 8 | 22 May 1990 3431 stepp-order 9 | 26 Mar 1990 942 why-various-soybean-databases 10 | 26 Feb 1990 34197 backup-large.test 11 | 26 Feb 1990 26643 soybean-large.data 12 | 26 Feb 1990 32572 soybean-large.test 13 | 26 Feb 1990 3431 soybean-small.data 14 | 30 May 1989 27355 backup-large.data 15 | -------------------------------------------------------------------------------- /data/soybean-small/fisher-order: -------------------------------------------------------------------------------- 1 | 4,0,2,1,1,1,0,1,0,2,1,1,0,2,2,0,0,0,1,0,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0,D1 2 | 5,0,2,1,0,3,1,1,1,2,1,1,0,2,2,0,0,0,1,1,3,0,1,1,0,0,0,0,4,0,0,0,0,0,0,D1 3 | 3,0,2,1,0,2,0,2,1,1,1,1,0,2,2,0,0,0,1,0,3,0,1,1,0,0,0,0,4,0,0,0,0,0,0,D1 4 | 6,0,2,1,0,1,1,1,0,0,1,1,0,2,2,0,0,0,1,1,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0,D1 5 | 4,0,2,1,0,3,0,2,0,2,1,1,0,2,2,0,0,0,1,0,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0,D1 6 | 5,0,2,1,0,2,0,1,1,0,1,1,0,2,2,0,0,0,1,1,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0,D1 7 | 3,0,2,1,0,2,1,1,0,1,1,1,0,2,2,0,0,0,1,1,3,0,1,1,0,0,0,0,4,0,0,0,0,0,0,D1 8 | 3,0,2,1,0,1,0,2,1,2,1,1,0,2,2,0,0,0,1,0,3,0,1,1,0,0,0,0,4,0,0,0,0,0,0,D1 9 | 6,0,2,1,0,3,0,1,1,1,1,1,0,2,2,0,0,0,1,0,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0,D1 10 | 6,0,2,1,0,1,0,1,0,2,1,1,0,2,2,0,0,0,1,0,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0,D1 11 | 6,0,0,2,1,0,2,1,0,0,1,1,0,2,2,0,0,0,1,1,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,D2 12 | 4,0,0,1,0,2,3,1,1,1,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,D2 13 | 5,0,0,2,0,3,2,1,0,2,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,D2 14 | 6,0,0,1,1,3,3,1,1,0,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,D2 15 | 3,0,0,2,1,0,2,1,0,1,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,D2 16 | 4,0,0,1,1,1,3,1,1,1,1,1,0,2,2,0,0,0,1,1,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,D2 17 | 3,0,0,1,0,1,2,1,0,0,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,D2 18 | 5,0,0,2,1,2,2,1,0,2,1,1,0,2,2,0,0,0,1,1,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,D2 19 | 6,0,0,2,0,1,3,1,1,0,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,D2 20 | 5,0,0,2,1,3,3,1,1,2,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,D2 21 | 0,1,2,0,0,1,1,1,1,1,1,0,0,2,2,0,0,0,1,0,1,1,0,1,1,0,0,3,4,0,0,0,0,0,0,D3 22 | 2,1,2,0,0,3,1,2,0,1,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,0,D3 23 | 2,1,2,0,0,2,1,1,0,2,1,0,0,2,2,0,0,0,1,0,1,1,0,1,1,0,0,3,4,0,0,0,0,0,0,D3 24 | 0,1,2,0,0,0,1,1,1,2,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,0,D3 25 | 0,1,2,0,0,2,1,1,1,1,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,0,D3 26 | 4,0,2,0,1,0,1,2,0,2,1,1,0,2,2,0,0,0,1,1,1,1,0,1,1,0,0,3,4,0,0,0,0,0,0,D3 27 | 2,1,2,0,0,3,1,2,0,2,1,0,0,2,2,0,0,0,1,0,1,1,0,1,1,0,0,3,4,0,0,0,0,0,0,D3 28 | 0,1,2,0,0,0,1,1,0,1,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,1,D3 29 | 3,0,2,0,1,3,1,2,0,1,1,0,0,2,2,0,0,0,1,1,1,1,0,1,1,0,0,3,4,0,0,0,0,0,0,D3 30 | 0,1,2,0,0,1,1,2,1,2,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,0,D3 31 | 2,1,2,1,1,3,1,2,1,2,1,1,0,2,2,0,0,0,1,0,2,2,0,1,0,0,0,3,4,0,0,0,0,0,1,D4 32 | 0,1,1,1,0,1,1,1,0,0,1,1,0,2,2,0,0,0,1,0,1,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 33 | 3,1,2,0,0,1,1,2,1,0,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 34 | 2,1,2,1,1,1,1,2,0,2,1,1,0,2,2,0,0,0,1,0,1,2,0,1,0,0,0,3,4,0,0,0,0,0,1,D4 35 | 1,1,2,0,0,3,1,1,1,2,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 36 | 1,1,2,1,0,0,1,2,1,1,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 37 | 0,1,2,1,0,3,1,1,0,0,1,1,0,2,2,0,0,0,1,0,1,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 38 | 2,1,2,0,0,1,1,2,0,0,1,1,0,2,2,0,0,0,1,0,1,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 39 | 3,1,2,0,0,2,1,2,1,1,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 40 | 3,1,1,0,0,2,1,2,1,2,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 41 | 0,1,2,1,1,1,1,1,0,0,1,1,0,2,2,0,0,0,1,0,1,2,0,1,0,0,0,3,4,0,0,0,0,0,1,D4 42 | 1,1,2,1,1,3,1,2,0,1,1,1,0,2,2,0,0,0,1,1,1,2,0,1,0,0,0,3,4,0,0,0,0,0,1,D4 43 | 1,1,2,0,0,0,1,2,1,0,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 44 | 1,1,2,1,1,2,3,1,1,1,1,1,0,2,2,0,0,0,1,0,2,2,0,1,0,0,0,3,4,0,0,0,0,0,1,D4 45 | 2,1,1,0,0,3,1,2,0,2,1,1,0,2,2,0,0,0,1,0,1,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 46 | 0,1,1,1,1,2,1,2,1,0,1,1,0,2,2,0,0,0,1,1,2,2,0,1,0,0,0,3,4,0,0,0,0,0,1,D4 47 | 0,1,2,1,0,3,1,1,0,2,1,1,0,2,2,0,0,0,1,0,1,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 48 | -------------------------------------------------------------------------------- /data/soybean-small/soybean-large.names: -------------------------------------------------------------------------------- 1 | 1. Title: Large Soybean Database 2 | 3 | 2. Sources: 4 | (a) R.S. Michalski and R.L. Chilausky "Learning by Being Told and 5 | Learning from Examples: An Experimental Comparison of the Two 6 | Methods of Knowledge Acquisition in the Context of Developing 7 | an Expert System for Soybean Disease Diagnosis", International 8 | Journal of Policy Analysis and Information Systems, Vol. 4, 9 | No. 2, 1980. 10 | (b) Donor: Ming Tan & Jeff Schlimmer (Jeff.Schlimmer%cs.cmu.edu) 11 | (c) Date: 11 July 1988 12 | 13 | 3. Past Usage: 14 | 1. See above. 15 | 2. Tan, M., & Eshelman, L. (1988). Using weighted networks to represent 16 | classification knowledge in noisy domains. Proceedings of the Fifth 17 | International Conference on Machine Learning (pp. 121-134). Ann Arbor, 18 | Michigan: Morgan Kaufmann. 19 | -- IWN recorded a 97.1% classification accuracy 20 | -- 290 training and 340 test instances 21 | 3. Fisher,D.H. & Schlimmer,J.C. (1988). Concept Simplification and 22 | Predictive Accuracy. Proceedings of the Fifth 23 | International Conference on Machine Learning (pp. 22-28). Ann Arbor, 24 | Michigan: Morgan Kaufmann. 25 | -- Notes why this database is highly predictable 26 | 27 | 4. Relevant Information Paragraph: 28 | There are 19 classes, only the first 15 of which have been used in prior 29 | work. The folklore seems to be that the last four classes are 30 | unjustified by the data since they have so few examples. 31 | There are 35 categorical attributes, some nominal and some ordered. The 32 | value ``dna'' means does not apply. The values for attributes are 33 | encoded numerically, with the first value encoded as ``0,'' the second as 34 | ``1,'' and so forth. An unknown values is encoded as ``?''. 35 | 36 | 5. Number of Instances: 307 37 | 38 | 6. Number of Attributes: 35 (all have been nominalized) 39 | 40 | 7. Attribute Information: 41 | -- 19 Classes 42 | diaporthe-stem-canker, charcoal-rot, rhizoctonia-root-rot, 43 | phytophthora-rot, brown-stem-rot, powdery-mildew, 44 | downy-mildew, brown-spot, bacterial-blight, 45 | bacterial-pustule, purple-seed-stain, anthracnose, 46 | phyllosticta-leaf-spot, alternarialeaf-spot, 47 | frog-eye-leaf-spot, diaporthe-pod-&-stem-blight, 48 | cyst-nematode, 2-4-d-injury, herbicide-injury. 49 | 50 | 1. date: april,may,june,july,august,september,october,?. 51 | 2. plant-stand: normal,lt-normal,?. 52 | 3. precip: lt-norm,norm,gt-norm,?. 53 | 4. temp: lt-norm,norm,gt-norm,?. 54 | 5. hail: yes,no,?. 55 | 6. crop-hist: diff-lst-year,same-lst-yr,same-lst-two-yrs, 56 | same-lst-sev-yrs,?. 57 | 7. area-damaged: scattered,low-areas,upper-areas,whole-field,?. 58 | 8. severity: minor,pot-severe,severe,?. 59 | 9. seed-tmt: none,fungicide,other,?. 60 | 10. germination: 90-100%,80-89%,lt-80%,?. 61 | 11. plant-growth: norm,abnorm,?. 62 | 12. leaves: norm,abnorm. 63 | 13. leafspots-halo: absent,yellow-halos,no-yellow-halos,?. 64 | 14. leafspots-marg: w-s-marg,no-w-s-marg,dna,?. 65 | 15. leafspot-size: lt-1/8,gt-1/8,dna,?. 66 | 16. leaf-shread: absent,present,?. 67 | 17. leaf-malf: absent,present,?. 68 | 18. leaf-mild: absent,upper-surf,lower-surf,?. 69 | 19. stem: norm,abnorm,?. 70 | 20. lodging: yes,no,?. 71 | 21. stem-cankers: absent,below-soil,above-soil,above-sec-nde,?. 72 | 22. canker-lesion: dna,brown,dk-brown-blk,tan,?. 73 | 23. fruiting-bodies: absent,present,?. 74 | 24. external decay: absent,firm-and-dry,watery,?. 75 | 25. mycelium: absent,present,?. 76 | 26. int-discolor: none,brown,black,?. 77 | 27. sclerotia: absent,present,?. 78 | 28. fruit-pods: norm,diseased,few-present,dna,?. 79 | 29. fruit spots: absent,colored,brown-w/blk-specks,distort,dna,?. 80 | 30. seed: norm,abnorm,?. 81 | 31. mold-growth: absent,present,?. 82 | 32. seed-discolor: absent,present,?. 83 | 33. seed-size: norm,lt-norm,?. 84 | 34. shriveling: absent,present,?. 85 | 35. roots: norm,rotted,galls-cysts,?. 86 | 87 | 8. Number of Missing Attribute Values: (denoted by "?") 88 | (Problem: these don't appear to be correct! Needs to be updated.) 89 | 1. date: 0 90 | 2. plant-stand: 1 91 | 3. precip: 8 92 | 4. temp: 11 93 | 5. hail: 7 94 | 6. crop-hist: 41 95 | 7. area-damaged: 1 96 | 8. severity: 1 97 | 9. seed-tmt: 41 98 | 10. germination: 41 99 | 11. plant-growth: 36 100 | 12. leaves: 1 101 | 13. leafspots-halo: 0 102 | 14. leafspots-marg: 25 103 | 15. leafspot-size: 25 104 | 16. leaf-shread: 25 105 | 17. leaf-malf: 26 106 | 18. leaf-mild: 25 107 | 19. stem: 30 108 | 20. lodging: 1 109 | 21. stem-cankers: 41 110 | 22. canker-lesion: 11 111 | 23. fruiting-bodies: 11 112 | 24. external decay: 35 113 | 25. mycelium: 11 114 | 26. int-discolor: 11 115 | 27. sclerotia: 11 116 | 28. fruit-pods: 11 117 | 29. fruit spots: 25 118 | 30. seed: 35 119 | 31. mold-growth: 29 120 | 32. seed-discolor: 29 121 | 33. seed-size: 35 122 | 34. shriveling: 29 123 | 35. roots: 35 124 | 125 | 9. Class Distribution: 126 | 1. diaporthe-stem-canker: 10 127 | 2. charcoal-rot: 10 128 | 3. rhizoctonia-root-rot: 10 129 | 4. phytophthora-rot: 40 130 | 5. brown-stem-rot: 20 131 | 6. powdery-mildew: 10 132 | 7. downy-mildew: 10 133 | 8. brown-spot: 40 134 | 9. bacterial-blight: 10 135 | 10. bacterial-pustule: 10 136 | 11. purple-seed-stain: 10 137 | 12. anthracnose: 20 138 | 13. phyllosticta-leaf-spot: 10 139 | 14. alternarialeaf-spot: 40 140 | 15. frog-eye-leaf-spot: 40 141 | 16. diaporthe-pod-&-stem-blight: 6 142 | 17. cyst-nematode: 6 143 | 18. 2-4-d-injury: 1 144 | 19. herbicide-injury: 4 145 | -------------------------------------------------------------------------------- /data/soybean-small/soybean-small.data: -------------------------------------------------------------------------------- 1 | 4,0,2,1,1,1,0,1,0,2,1,1,0,2,2,0,0,0,1,0,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0,D1 2 | 5,0,2,1,0,3,1,1,1,2,1,1,0,2,2,0,0,0,1,1,3,0,1,1,0,0,0,0,4,0,0,0,0,0,0,D1 3 | 3,0,2,1,0,2,0,2,1,1,1,1,0,2,2,0,0,0,1,0,3,0,1,1,0,0,0,0,4,0,0,0,0,0,0,D1 4 | 6,0,2,1,0,1,1,1,0,0,1,1,0,2,2,0,0,0,1,1,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0,D1 5 | 4,0,2,1,0,3,0,2,0,2,1,1,0,2,2,0,0,0,1,0,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0,D1 6 | 5,0,2,1,0,2,0,1,1,0,1,1,0,2,2,0,0,0,1,1,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0,D1 7 | 3,0,2,1,0,2,1,1,0,1,1,1,0,2,2,0,0,0,1,1,3,0,1,1,0,0,0,0,4,0,0,0,0,0,0,D1 8 | 3,0,2,1,0,1,0,2,1,2,1,1,0,2,2,0,0,0,1,0,3,0,1,1,0,0,0,0,4,0,0,0,0,0,0,D1 9 | 6,0,2,1,0,3,0,1,1,1,1,1,0,2,2,0,0,0,1,0,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0,D1 10 | 6,0,2,1,0,1,0,1,0,2,1,1,0,2,2,0,0,0,1,0,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0,D1 11 | 6,0,0,2,1,0,2,1,0,0,1,1,0,2,2,0,0,0,1,1,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,D2 12 | 4,0,0,1,0,2,3,1,1,1,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,D2 13 | 5,0,0,2,0,3,2,1,0,2,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,D2 14 | 6,0,0,1,1,3,3,1,1,0,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,D2 15 | 3,0,0,2,1,0,2,1,0,1,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,D2 16 | 4,0,0,1,1,1,3,1,1,1,1,1,0,2,2,0,0,0,1,1,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,D2 17 | 3,0,0,1,0,1,2,1,0,0,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,D2 18 | 5,0,0,2,1,2,2,1,0,2,1,1,0,2,2,0,0,0,1,1,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,D2 19 | 6,0,0,2,0,1,3,1,1,0,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,D2 20 | 5,0,0,2,1,3,3,1,1,2,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,D2 21 | 0,1,2,0,0,1,1,1,1,1,1,0,0,2,2,0,0,0,1,0,1,1,0,1,1,0,0,3,4,0,0,0,0,0,0,D3 22 | 2,1,2,0,0,3,1,2,0,1,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,0,D3 23 | 2,1,2,0,0,2,1,1,0,2,1,0,0,2,2,0,0,0,1,0,1,1,0,1,1,0,0,3,4,0,0,0,0,0,0,D3 24 | 0,1,2,0,0,0,1,1,1,2,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,0,D3 25 | 0,1,2,0,0,2,1,1,1,1,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,0,D3 26 | 4,0,2,0,1,0,1,2,0,2,1,1,0,2,2,0,0,0,1,1,1,1,0,1,1,0,0,3,4,0,0,0,0,0,0,D3 27 | 2,1,2,0,0,3,1,2,0,2,1,0,0,2,2,0,0,0,1,0,1,1,0,1,1,0,0,3,4,0,0,0,0,0,0,D3 28 | 0,1,2,0,0,0,1,1,0,1,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,1,D3 29 | 3,0,2,0,1,3,1,2,0,1,1,0,0,2,2,0,0,0,1,1,1,1,0,1,1,0,0,3,4,0,0,0,0,0,0,D3 30 | 0,1,2,0,0,1,1,2,1,2,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,0,D3 31 | 2,1,2,1,1,3,1,2,1,2,1,1,0,2,2,0,0,0,1,0,2,2,0,1,0,0,0,3,4,0,0,0,0,0,1,D4 32 | 0,1,1,1,0,1,1,1,0,0,1,1,0,2,2,0,0,0,1,0,1,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 33 | 3,1,2,0,0,1,1,2,1,0,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 34 | 2,1,2,1,1,1,1,2,0,2,1,1,0,2,2,0,0,0,1,0,1,2,0,1,0,0,0,3,4,0,0,0,0,0,1,D4 35 | 1,1,2,0,0,3,1,1,1,2,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 36 | 1,1,2,1,0,0,1,2,1,1,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 37 | 0,1,2,1,0,3,1,1,0,0,1,1,0,2,2,0,0,0,1,0,1,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 38 | 2,1,2,0,0,1,1,2,0,0,1,1,0,2,2,0,0,0,1,0,1,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 39 | 3,1,2,0,0,2,1,2,1,1,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 40 | 3,1,1,0,0,2,1,2,1,2,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 41 | 0,1,2,1,1,1,1,1,0,0,1,1,0,2,2,0,0,0,1,0,1,2,0,1,0,0,0,3,4,0,0,0,0,0,1,D4 42 | 1,1,2,1,1,3,1,2,0,1,1,1,0,2,2,0,0,0,1,1,1,2,0,1,0,0,0,3,4,0,0,0,0,0,1,D4 43 | 1,1,2,0,0,0,1,2,1,0,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 44 | 1,1,2,1,1,2,3,1,1,1,1,1,0,2,2,0,0,0,1,0,2,2,0,1,0,0,0,3,4,0,0,0,0,0,1,D4 45 | 2,1,1,0,0,3,1,2,0,2,1,1,0,2,2,0,0,0,1,0,1,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 46 | 0,1,1,1,1,2,1,2,1,0,1,1,0,2,2,0,0,0,1,1,2,2,0,1,0,0,0,3,4,0,0,0,0,0,1,D4 47 | 0,1,2,1,0,3,1,1,0,2,1,1,0,2,2,0,0,0,1,0,1,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 48 | -------------------------------------------------------------------------------- /data/soybean-small/soybean-small.names: -------------------------------------------------------------------------------- 1 | 1. Title: Small Soybean Database 2 | 3 | 2. Sources: 4 | (a) Michalski,R.S. Learning by being told and learning from 5 | examples: an experimental comparison of the two methodes of knowledge 6 | acquisition in the context of developing an expert system for soybean 7 | desease diagnoiss", International Journal of Policy Analysis and 8 | Information Systems, 1980, 4(2), 125-161. 9 | (b) Donor: Doug Fisher (dfisher%vuse@uunet.uucp) 10 | (c) Date: 1987 11 | 12 | 3. Past Usage: 13 | See the soybean-large.names 14 | 15 | 4. Relevant Information Paragraph: 16 | A small subset of the original soybean database. See the reference 17 | for Fisher and Schlimmer in soybean-large.names for more information. 18 | 19 | Steven Souders wrote: 20 | 21 | > Figure 15 in the Michalski and Stepp paper (PAMI-82) says that the 22 | > discriminant values for the attribute CONDITION OF FRUIT PODS for the 23 | > classes Rhizoctonia Root Rot and Phytophthora Rot are "few or none" 24 | > and "irrelevant" respectively. However, in the SOYBEAN-SMALL dataset 25 | > I got from UCI, the value for this attribute is "dna" (does not apply) 26 | > for both classes. I show the actual data below for cases D3 27 | > (Rhizoctonia Root Rot) and D4 (Phytophthora Rot). According to the 28 | > attribute names given in soybean-large.names, FRUIT-PODS is attribute 29 | > #28. If you look at column 28 in the data below (marked with arrows) 30 | > you'll notice that all cases of D3 and D4 have the same value. Thus, 31 | > the SOYBEAN-SMALL dataset from UCI could NOT have produced the results 32 | > in the Michalski and Stepp paper. 33 | 34 | I do not have that paper, but have found what is probably a later 35 | variation of that figure in Stepp's dissertation, which lists the 36 | value "normal" for the first 2 classes and "irrelevant" for the latter 37 | 2 classes. I believe that "irrelevant" is used here as a synonym for 38 | "not-applicable", "dna", and "does-not-apply". I believe that there 39 | is a mis-print in the figure he read in their PAMI-83 article. 40 | 41 | I have checked over each attribute value in this database. It 42 | corresponds exactly with the copies listed in both Stepp's and Fisher's 43 | dissertations. 44 | 45 | 5. Number of Instances: 47 46 | 47 | 6. Number of Attributes: 35 (all have been nominalized) 48 | -- All attributes here appear with numeric values 49 | 50 | 7. Attribute Information: 51 | -- derivable from soybean-large.names 52 | 53 | 8. Number of Missing Attribute Values: 0 54 | 55 | 9. Class Distribution: 56 | 1. D1: 10 57 | 2. D2: 10 58 | 3. D3: 10 59 | 4. D4: 17 60 | 61 | -------------------------------------------------------------------------------- /data/soybean-small/soybean-small_enc.csv: -------------------------------------------------------------------------------- 1 | "V1.1","V1.2","V1.3","V1.4","V1.5","V1.6","V1.7","V2.1","V3.1","V3.2","V3.3","V4.1","V4.2","V4.3","V5.1","V6.1","V6.2","V6.3","V6.4","V7.1","V7.2","V7.3","V7.4","V8.1","V9.1","V10.1","V10.2","V10.3","V12.1","V20.1","V21.1","V21.2","V21.3","V21.4","V22.1","V22.2","V22.3","V22.4","V23.1","V24.1","V25.1","V26.1","V27.1","V28.1","V35.1","target" 2 | "0","0","0","0","1","0","0","1","0","0","1","0","1","0","0","0","1","0","0","1","0","0","0","1","1","0","0","1","0","1","0","0","0","1","0","1","0","0","0","0","1","1","1","1","1","1" 3 | "0","0","0","0","0","1","0","1","0","0","1","0","1","0","1","0","0","0","1","0","1","0","0","1","0","0","0","1","0","0","0","0","0","1","1","0","0","0","0","0","1","1","1","1","1","1" 4 | "0","0","0","1","0","0","0","1","0","0","1","0","1","0","1","0","0","1","0","1","0","0","0","0","0","0","1","0","0","1","0","0","0","1","1","0","0","0","0","0","1","1","1","1","1","1" 5 | "0","0","0","0","0","0","1","1","0","0","1","0","1","0","1","0","1","0","0","0","1","0","0","1","1","1","0","0","0","0","0","0","0","1","0","1","0","0","0","0","1","1","1","1","1","1" 6 | "0","0","0","0","1","0","0","1","0","0","1","0","1","0","1","0","0","0","1","1","0","0","0","0","1","0","0","1","0","1","0","0","0","1","0","1","0","0","0","0","1","1","1","1","1","1" 7 | "0","0","0","0","0","1","0","1","0","0","1","0","1","0","1","0","0","1","0","1","0","0","0","1","0","1","0","0","0","0","0","0","0","1","0","1","0","0","0","0","1","1","1","1","1","1" 8 | "0","0","0","1","0","0","0","1","0","0","1","0","1","0","1","0","0","1","0","0","1","0","0","1","1","0","1","0","0","0","0","0","0","1","1","0","0","0","0","0","1","1","1","1","1","1" 9 | "0","0","0","1","0","0","0","1","0","0","1","0","1","0","1","0","1","0","0","1","0","0","0","0","0","0","0","1","0","1","0","0","0","1","1","0","0","0","0","0","1","1","1","1","1","1" 10 | "0","0","0","0","0","0","1","1","0","0","1","0","1","0","1","0","0","0","1","1","0","0","0","1","0","0","1","0","0","1","0","0","0","1","0","1","0","0","0","0","1","1","1","1","1","1" 11 | "0","0","0","0","0","0","1","1","0","0","1","0","1","0","1","0","1","0","0","1","0","0","0","1","1","0","0","1","0","1","0","0","0","1","0","1","0","0","0","0","1","1","1","1","1","1" 12 | "0","0","0","0","0","0","1","1","1","0","0","0","0","1","0","1","0","0","0","0","0","1","0","1","1","1","0","0","0","0","1","0","0","0","0","0","0","1","1","1","1","0","0","1","1","2" 13 | "0","0","0","0","1","0","0","1","1","0","0","0","1","0","1","0","0","1","0","0","0","0","1","1","0","0","1","0","0","1","1","0","0","0","0","0","0","1","1","1","1","0","0","1","1","2" 14 | "0","0","0","0","0","1","0","1","1","0","0","0","0","1","1","0","0","0","1","0","0","1","0","1","1","0","0","1","0","1","1","0","0","0","0","0","0","1","1","1","1","0","0","1","1","2" 15 | "0","0","0","0","0","0","1","1","1","0","0","0","1","0","0","0","0","0","1","0","0","0","1","1","0","1","0","0","0","1","1","0","0","0","0","0","0","1","1","1","1","0","0","1","1","2" 16 | "0","0","0","1","0","0","0","1","1","0","0","0","0","1","0","1","0","0","0","0","0","1","0","1","1","0","1","0","0","1","1","0","0","0","0","0","0","1","1","1","1","0","0","1","1","2" 17 | "0","0","0","0","1","0","0","1","1","0","0","0","1","0","0","0","1","0","0","0","0","0","1","1","0","0","1","0","0","0","1","0","0","0","0","0","0","1","1","1","1","0","0","1","1","2" 18 | "0","0","0","1","0","0","0","1","1","0","0","0","1","0","1","0","1","0","0","0","0","1","0","1","1","1","0","0","0","1","1","0","0","0","0","0","0","1","1","1","1","0","0","1","1","2" 19 | "0","0","0","0","0","1","0","1","1","0","0","0","0","1","0","0","0","1","0","0","0","1","0","1","1","0","0","1","0","0","1","0","0","0","0","0","0","1","1","1","1","0","0","1","1","2" 20 | "0","0","0","0","0","0","1","1","1","0","0","0","0","1","1","0","1","0","0","0","0","0","1","1","0","1","0","0","0","1","1","0","0","0","0","0","0","1","1","1","1","0","0","1","1","2" 21 | "0","0","0","0","0","1","0","1","1","0","0","0","0","1","0","0","0","0","1","0","0","0","1","1","0","0","0","1","0","1","1","0","0","0","0","0","0","1","1","1","1","0","0","1","1","2" 22 | "1","0","0","0","0","0","0","0","0","0","1","1","0","0","1","0","1","0","0","0","1","0","0","1","0","0","1","0","1","1","0","1","0","0","0","1","0","0","1","0","0","1","1","0","1","3" 23 | "0","0","1","0","0","0","0","0","0","0","1","1","0","0","1","0","0","0","1","0","1","0","0","0","1","0","1","0","1","1","0","1","0","0","0","1","0","0","1","0","1","1","1","0","1","3" 24 | "0","0","1","0","0","0","0","0","0","0","1","1","0","0","1","0","0","1","0","0","1","0","0","1","1","0","0","1","1","1","0","1","0","0","0","1","0","0","1","0","0","1","1","0","1","3" 25 | "1","0","0","0","0","0","0","0","0","0","1","1","0","0","1","1","0","0","0","0","1","0","0","1","0","0","0","1","1","1","0","1","0","0","0","1","0","0","1","0","1","1","1","0","1","3" 26 | "1","0","0","0","0","0","0","0","0","0","1","1","0","0","1","0","0","1","0","0","1","0","0","1","0","0","1","0","1","1","0","1","0","0","0","1","0","0","1","0","1","1","1","0","1","3" 27 | "0","0","0","0","1","0","0","1","0","0","1","1","0","0","0","1","0","0","0","0","1","0","0","0","1","0","0","1","0","0","0","1","0","0","0","1","0","0","1","0","0","1","1","0","1","3" 28 | "0","0","1","0","0","0","0","0","0","0","1","1","0","0","1","0","0","0","1","0","1","0","0","0","1","0","0","1","1","1","0","1","0","0","0","1","0","0","1","0","0","1","1","0","1","3" 29 | "1","0","0","0","0","0","0","0","0","0","1","1","0","0","1","1","0","0","0","0","1","0","0","1","1","0","1","0","1","1","0","1","0","0","0","1","0","0","1","0","1","1","1","0","0","3" 30 | "0","0","0","1","0","0","0","1","0","0","1","1","0","0","0","0","0","0","1","0","1","0","0","0","1","0","1","0","1","0","0","1","0","0","0","1","0","0","1","0","0","1","1","0","1","3" 31 | "1","0","0","0","0","0","0","0","0","0","1","1","0","0","1","0","1","0","0","0","1","0","0","0","0","0","0","1","1","1","0","1","0","0","0","1","0","0","1","0","1","1","1","0","1","3" 32 | "0","0","1","0","0","0","0","0","0","0","1","0","1","0","0","0","0","0","1","0","1","0","0","0","0","0","0","1","0","1","0","0","1","0","0","0","1","0","1","0","1","1","1","0","0","4" 33 | "1","0","0","0","0","0","0","0","0","1","0","0","1","0","1","0","1","0","0","0","1","0","0","1","1","1","0","0","0","1","0","1","0","0","0","0","1","0","1","1","1","1","1","0","0","4" 34 | "0","0","0","1","0","0","0","0","0","0","1","1","0","0","1","0","1","0","0","0","1","0","0","0","0","1","0","0","0","1","0","0","1","0","0","0","1","0","1","1","1","1","1","0","0","4" 35 | "0","0","1","0","0","0","0","0","0","0","1","0","1","0","0","0","1","0","0","0","1","0","0","0","1","0","0","1","0","1","0","1","0","0","0","0","1","0","1","0","1","1","1","0","0","4" 36 | "0","1","0","0","0","0","0","0","0","0","1","1","0","0","1","0","0","0","1","0","1","0","0","1","0","0","0","1","0","1","0","0","1","0","0","0","1","0","1","1","1","1","1","0","0","4" 37 | "0","1","0","0","0","0","0","0","0","0","1","0","1","0","1","1","0","0","0","0","1","0","0","0","0","0","1","0","0","1","0","0","1","0","0","0","1","0","1","1","1","1","1","0","0","4" 38 | "1","0","0","0","0","0","0","0","0","0","1","0","1","0","1","0","0","0","1","0","1","0","0","1","1","1","0","0","0","1","0","1","0","0","0","0","1","0","1","1","1","1","1","0","0","4" 39 | "0","0","1","0","0","0","0","0","0","0","1","1","0","0","1","0","1","0","0","0","1","0","0","0","1","1","0","0","0","1","0","1","0","0","0","0","1","0","1","1","1","1","1","0","0","4" 40 | "0","0","0","1","0","0","0","0","0","0","1","1","0","0","1","0","0","1","0","0","1","0","0","0","0","0","1","0","0","1","0","0","1","0","0","0","1","0","1","1","1","1","1","0","0","4" 41 | "0","0","0","1","0","0","0","0","0","1","0","1","0","0","1","0","0","1","0","0","1","0","0","0","0","0","0","1","0","1","0","0","1","0","0","0","1","0","1","1","1","1","1","0","0","4" 42 | "1","0","0","0","0","0","0","0","0","0","1","0","1","0","0","0","1","0","0","0","1","0","0","1","1","1","0","0","0","1","0","1","0","0","0","0","1","0","1","0","1","1","1","0","0","4" 43 | "0","1","0","0","0","0","0","0","0","0","1","0","1","0","0","0","0","0","1","0","1","0","0","0","1","0","1","0","0","0","0","1","0","0","0","0","1","0","1","0","1","1","1","0","0","4" 44 | "0","1","0","0","0","0","0","0","0","0","1","1","0","0","1","1","0","0","0","0","1","0","0","0","0","1","0","0","0","1","0","0","1","0","0","0","1","0","1","1","1","1","1","0","0","4" 45 | "0","1","0","0","0","0","0","0","0","0","1","0","1","0","0","0","0","1","0","0","0","0","1","1","0","0","1","0","0","1","0","0","1","0","0","0","1","0","1","0","1","1","1","0","0","4" 46 | "0","0","1","0","0","0","0","0","0","1","0","1","0","0","1","0","0","0","1","0","1","0","0","0","1","0","0","1","0","1","0","1","0","0","0","0","1","0","1","1","1","1","1","0","0","4" 47 | "1","0","0","0","0","0","0","0","0","1","0","0","1","0","0","0","0","1","0","0","1","0","0","0","0","1","0","0","0","0","0","0","1","0","0","0","1","0","1","0","1","1","1","0","0","4" 48 | "1","0","0","0","0","0","0","0","0","0","1","0","1","0","1","0","0","0","1","0","1","0","0","1","1","0","0","1","0","1","0","1","0","0","0","0","1","0","1","1","1","1","1","0","0","4" 49 | -------------------------------------------------------------------------------- /data/soybean-small/stepp-order: -------------------------------------------------------------------------------- 1 | 6,0,2,1,0,1,0,1,0,2,1,1,0,2,2,0,0,0,1,0,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0,D1 2 | 6,0,2,1,0,3,0,1,1,1,1,1,0,2,2,0,0,0,1,0,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0,D1 3 | 3,0,2,1,0,1,0,2,1,2,1,1,0,2,2,0,0,0,1,0,3,0,1,1,0,0,0,0,4,0,0,0,0,0,0,D1 4 | 3,0,2,1,0,2,1,1,0,1,1,1,0,2,2,0,0,0,1,1,3,0,1,1,0,0,0,0,4,0,0,0,0,0,0,D1 5 | 5,0,2,1,0,2,0,1,1,0,1,1,0,2,2,0,0,0,1,1,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0,D1 6 | 4,0,2,1,0,3,0,2,0,2,1,1,0,2,2,0,0,0,1,0,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0,D1 7 | 6,0,2,1,0,1,1,1,0,0,1,1,0,2,2,0,0,0,1,1,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0,D1 8 | 3,0,2,1,0,2,0,2,1,1,1,1,0,2,2,0,0,0,1,0,3,0,1,1,0,0,0,0,4,0,0,0,0,0,0,D1 9 | 5,0,2,1,0,3,1,1,1,2,1,1,0,2,2,0,0,0,1,1,3,0,1,1,0,0,0,0,4,0,0,0,0,0,0,D1 10 | 4,0,2,1,1,1,0,1,0,2,1,1,0,2,2,0,0,0,1,0,3,1,1,1,0,0,0,0,4,0,0,0,0,0,0,D1 11 | 5,0,0,2,1,3,3,1,1,2,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,D2 12 | 6,0,0,2,0,1,3,1,1,0,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,D2 13 | 5,0,0,2,1,2,2,1,0,2,1,1,0,2,2,0,0,0,1,1,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,D2 14 | 3,0,0,1,0,1,2,1,0,0,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,D2 15 | 4,0,0,1,1,1,3,1,1,1,1,1,0,2,2,0,0,0,1,1,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,D2 16 | 3,0,0,2,1,0,2,1,0,1,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,D2 17 | 6,0,0,1,1,3,3,1,1,0,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,D2 18 | 5,0,0,2,0,3,2,1,0,2,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,D2 19 | 4,0,0,1,0,2,3,1,1,1,1,1,0,2,2,0,0,0,1,0,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,D2 20 | 6,0,0,2,1,0,2,1,0,0,1,1,0,2,2,0,0,0,1,1,0,3,0,0,0,2,1,0,4,0,0,0,0,0,0,D2 21 | 0,1,2,0,0,0,1,1,1,2,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,0,D3 22 | 2,1,2,0,0,2,1,1,0,2,1,0,0,2,2,0,0,0,1,0,1,1,0,1,1,0,0,3,4,0,0,0,0,0,0,D3 23 | 2,1,2,0,0,3,1,2,0,1,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,0,D3 24 | 0,1,2,0,0,1,1,1,1,1,1,0,0,2,2,0,0,0,1,0,1,1,0,1,1,0,0,3,4,0,0,0,0,0,0,D3 25 | 0,1,2,0,0,1,1,2,1,2,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,0,D3 26 | 3,0,2,0,1,3,1,2,0,1,1,0,0,2,2,0,0,0,1,1,1,1,0,1,1,0,0,3,4,0,0,0,0,0,0,D3 27 | 0,1,2,0,0,0,1,1,0,1,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,1,D3 28 | 2,1,2,0,0,3,1,2,0,2,1,0,0,2,2,0,0,0,1,0,1,1,0,1,1,0,0,3,4,0,0,0,0,0,0,D3 29 | 4,0,2,0,1,0,1,2,0,2,1,1,0,2,2,0,0,0,1,1,1,1,0,1,1,0,0,3,4,0,0,0,0,0,0,D3 30 | 0,1,2,0,0,2,1,1,1,1,1,0,0,2,2,0,0,0,1,0,1,1,0,1,0,0,0,3,4,0,0,0,0,0,0,D3 31 | 3,1,2,0,0,2,1,2,1,1,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 32 | 2,1,2,0,0,1,1,2,0,0,1,1,0,2,2,0,0,0,1,0,1,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 33 | 0,1,2,1,0,3,1,1,0,0,1,1,0,2,2,0,0,0,1,0,1,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 34 | 1,1,2,1,0,0,1,2,1,1,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 35 | 1,1,2,0,0,3,1,1,1,2,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 36 | 2,1,2,1,1,1,1,2,0,2,1,1,0,2,2,0,0,0,1,0,1,2,0,1,0,0,0,3,4,0,0,0,0,0,1,D4 37 | 3,1,2,0,0,1,1,2,1,0,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 38 | 2,1,2,1,1,3,1,2,1,2,1,1,0,2,2,0,0,0,1,0,2,2,0,1,0,0,0,3,4,0,0,0,0,0,1,D4 39 | 0,1,1,1,0,1,1,1,0,0,1,1,0,2,2,0,0,0,1,0,1,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 40 | 0,1,2,1,0,3,1,1,0,2,1,1,0,2,2,0,0,0,1,0,1,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 41 | 0,1,1,1,1,2,1,2,1,0,1,1,0,2,2,0,0,0,1,1,2,2,0,1,0,0,0,3,4,0,0,0,0,0,1,D4 42 | 2,1,1,0,0,3,1,2,0,2,1,1,0,2,2,0,0,0,1,0,1,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 43 | 1,1,2,1,1,2,3,1,1,1,1,1,0,2,2,0,0,0,1,0,2,2,0,1,0,0,0,3,4,0,0,0,0,0,1,D4 44 | 1,1,2,0,0,0,1,2,1,0,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 45 | 1,1,2,1,1,3,1,2,0,1,1,1,0,2,2,0,0,0,1,1,1,2,0,1,0,0,0,3,4,0,0,0,0,0,1,D4 46 | 0,1,2,1,1,1,1,1,0,0,1,1,0,2,2,0,0,0,1,0,1,2,0,1,0,0,0,3,4,0,0,0,0,0,1,D4 47 | 3,1,1,0,0,2,1,2,1,2,1,1,0,2,2,0,0,0,1,0,2,2,0,0,0,0,0,3,4,0,0,0,0,0,1,D4 48 | -------------------------------------------------------------------------------- /data/soybean-small/why-various-soybean-databases: -------------------------------------------------------------------------------- 1 | From: Raymond Mooney 2 | Date: Mon, 12 Mar 90 19:19:58 CST 3 | 4 | Regarding the data we used. We used a later version of the soybean 5 | data sent to us by Bob Stepp which I believe to be the version used by 6 | Reinke in his MS thesis on the GEM system (this has 17 diseases as 7 | referenced by Spackman, ML88). I also got the version you have (19 8 | diseases, 35 features) from Bob Stepp which I believe is the version 9 | used in the original experiments from Michalski & Chilausky (this has 10 | missing features which is why we decided to use the 17 disease version 11 | in our experiments). The M&C paper references only 15 diseases (35 12 | features) and the first 15 diseases in my 19 disease set match those 13 | in the M&C paper. I guess the last 4 diseases just weren't reported 14 | in the paper (wonder why??). The existence of the 4 disease 15 | clustering set adds even more confusion to any reference to using 16 | soybean data. 17 | 18 | Hope this helps some. 19 | 20 | -Ray 21 | 22 | 23 | 24 | -------------------------------------------------------------------------------- /data/spect/DonorNote.txt: -------------------------------------------------------------------------------- 1 | September 1, 2006 2 | 3 | SPECTF.test has been modified (see Donor Note below for justification). 4 | The incorrect test set has been renamed to SPECTFincorrect.test 5 | 6 | --Librarian 7 | 8 | 9 | Donor Note: 10 | 11 | > The incorrect file is the SPECTF.test file in the SPECT dataset. It contains 12 | > too many examples (duplicates the training file) and some of the data points are repeated. 13 | -------------------------------------------------------------------------------- /data/spect/spect.names: -------------------------------------------------------------------------------- 1 | 1. Title of Database: SPECT heart data 2 | 3 | 2. Sources: 4 | -- Original owners: Krzysztof J. Cios, Lukasz A. Kurgan 5 | University of Colorado at Denver, Denver, CO 80217, U.S.A. 6 | Krys.Cios@cudenver.edu 7 | Lucy S. Goodenday 8 | Medical College of Ohio, OH, U.S.A. 9 | 10 | -- Donors: Lukasz A.Kurgan, Krzysztof J. Cios 11 | -- Date: 10/01/01 12 | 13 | 3. Past Usage: 14 | 1. Kurgan, L.A., Cios, K.J., Tadeusiewicz, R., Ogiela, M. & Goodenday, L.S. 15 | "Knowledge Discovery Approach to Automated Cardiac SPECT Diagnosis" 16 | Artificial Intelligence in Medicine, vol. 23:2, pp 149-169, Oct 2001 17 | 18 | Results: The CLIP3 machine learning algorithm achieved 84.0% accuracy 19 | References: 20 | Cios, K.J., Wedding, D.K. & Liu, N. 21 | CLIP3: cover learning using integer programming. 22 | Kybernetes, 26:4-5, pp 513-536, 1997 23 | 24 | Cios, K.J. & Kurgan, L. 25 | Hybrid Inductive Machine Learning: An Overview of CLIP Algorithms, 26 | In: Jain, L.C., and Kacprzyk, J. (Eds.) 27 | New Learning Paradigms in Soft Computing, 28 | Physica-Verlag (Springer), 2001 29 | 30 | SPECT is a good data set for testing ML algorithms; it has 267 instances 31 | that are descibed by 23 binary attributes 32 | 33 | Other results (in press): 34 | -- CLIP4 algorithm achieved 86.1% accuracy 35 | -- ensemble of CLIP4 classifiers achieved 90.4% accuracy 36 | -- Predicted attribute: OVERALL_DIAGNOSIS (binary) 37 | 38 | 4. Relevant Information: 39 | The dataset describes diagnosing of cardiac Single Proton Emission Computed Tomography (SPECT) images. 40 | Each of the patients is classified into two categories: normal and abnormal. 41 | The database of 267 SPECT image sets (patients) was processed to extract features that summarize the original SPECT images. 42 | As a result, 44 continuous feature pattern was created for each patient. 43 | The pattern was further processed to obtain 22 binary feature patterns. 44 | The CLIP3 algorithm was used to generate classification rules from these patterns. 45 | The CLIP3 algorithm generated rules that were 84.0% accurate (as compared with cardilogists' diagnoses). 46 | 47 | 5. Number of Instances: 267 48 | 6. Number of Attributes: 23 (22 binary + 1 binary class) 49 | 7. Attribute Information: 50 | 1. OVERALL_DIAGNOSIS: 0,1 (class attribute, binary) 51 | 2. F1: 0,1 (the partial diagnosis 1, binary) 52 | 3. F2: 0,1 (the partial diagnosis 2, binary) 53 | 4. F3: 0,1 (the partial diagnosis 3, binary) 54 | 5. F4: 0,1 (the partial diagnosis 4, binary) 55 | 6. F5: 0,1 (the partial diagnosis 5, binary) 56 | 7. F6: 0,1 (the partial diagnosis 6, binary) 57 | 8. F7: 0,1 (the partial diagnosis 7, binary) 58 | 9. F8: 0,1 (the partial diagnosis 8, binary) 59 | 10. F9: 0,1 (the partial diagnosis 9, binary) 60 | 11. F10: 0,1 (the partial diagnosis 10, binary) 61 | 12. F11: 0,1 (the partial diagnosis 11, binary) 62 | 13. F12: 0,1 (the partial diagnosis 12, binary) 63 | 14. F13: 0,1 (the partial diagnosis 13, binary) 64 | 15. F14: 0,1 (the partial diagnosis 14, binary) 65 | 16. F15: 0,1 (the partial diagnosis 15, binary) 66 | 17. F16: 0,1 (the partial diagnosis 16, binary) 67 | 18. F17: 0,1 (the partial diagnosis 17, binary) 68 | 19. F18: 0,1 (the partial diagnosis 18, binary) 69 | 20. F19: 0,1 (the partial diagnosis 19, binary) 70 | 21. F20: 0,1 (the partial diagnosis 20, binary) 71 | 22. F21: 0,1 (the partial diagnosis 21, binary) 72 | 23. F22: 0,1 (the partial diagnosis 22, binary) 73 | -- dataset is divided into: 74 | -- training data ("SPECT.train" 80 instances) 75 | -- testing data ("SPECT.test" 187 instances) 76 | 8. Missing Attribute Values: None 77 | 9. Class Distribution: 78 | -- entire data 79 | Class # examples 80 | 0 55 81 | 1 212 82 | -- training dataset 83 | Class # examples 84 | 0 40 85 | 1 40 86 | -- testing dataset 87 | Class # examples 88 | 0 15 89 | 1 172 90 | -------------------------------------------------------------------------------- /data/spect/spect.test: -------------------------------------------------------------------------------- 1 | 1,1,0,0,1,1,0,0,0,1,1,0,0,0,1,1,1,0,0,1,1,0,0 2 | 1,1,0,0,1,1,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0 3 | 1,0,0,0,1,0,1,0,0,1,0,1,0,0,1,1,0,0,0,0,0,0,1 4 | 1,0,1,1,1,0,0,1,0,1,0,0,1,1,1,0,1,0,0,0,0,1,0 5 | 1,0,0,1,0,0,0,0,1,0,0,1,0,1,1,0,1,0,0,0,0,0,1 6 | 1,0,0,1,1,0,1,0,0,1,0,1,0,1,0,0,1,0,0,0,0,1,1 7 | 1,1,0,0,1,0,0,1,1,1,1,0,1,1,1,0,1,0,0,0,1,0,1 8 | 1,1,0,0,1,0,0,0,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0 9 | 1,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0 10 | 1,1,0,0,1,1,1,0,0,1,1,1,0,0,1,0,1,1,0,1,0,0,0 11 | 1,1,0,0,0,1,0,0,0,1,1,0,0,1,1,1,0,0,0,1,0,0,0 12 | 1,1,0,0,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0 13 | 1,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,1,0,0,0,1 14 | 1,1,0,0,0,1,0,0,0,1,1,0,0,0,1,0,0,0,1,1,0,0,0 15 | 1,1,0,0,1,1,0,0,0,1,1,0,0,0,0,0,1,0,0,1,1,0,0 16 | 1,1,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0,1,0 17 | 1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,1,0,0,0,1,1 18 | 1,1,0,0,0,0,1,1,0,0,1,1,1,0,0,0,0,1,0,0,0,0,1 19 | 1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0 20 | 1,1,1,1,0,1,0,1,1,0,1,0,1,1,0,0,1,0,0,0,1,1,0 21 | 1,1,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,1,0,1,0,0 22 | 1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,1,1,1 23 | 1,0,0,1,1,1,0,0,1,1,1,0,0,1,1,1,1,0,1,0,1,1,0 24 | 1,1,1,0,1,1,1,1,0,0,0,1,1,0,0,0,1,1,0,0,1,0,0 25 | 1,1,1,0,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0 26 | 1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0 27 | 1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,1,0 28 | 1,1,1,1,1,0,1,1,1,0,1,0,0,1,1,1,1,0,0,1,1,0,0 29 | 1,1,1,0,0,1,1,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0 30 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0 31 | 1,1,1,0,0,1,0,0,1,1,1,1,0,1,1,1,1,0,0,1,1,0,0 32 | 1,1,1,0,0,0,1,1,0,0,1,1,0,0,0,1,1,0,0,1,1,1,0 33 | 1,0,0,0,0,0,0,1,1,0,0,0,1,1,0,0,1,0,0,0,0,1,1 34 | 1,1,0,0,0,1,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,1,1 35 | 1,0,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,1,0,0 36 | 1,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,1,1 37 | 1,0,1,1,0,0,0,1,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0 38 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0 39 | 1,1,0,1,1,1,0,0,1,1,1,0,0,1,0,1,1,0,0,0,1,1,1 40 | 1,0,1,1,1,0,0,1,1,1,0,0,1,1,1,0,0,0,0,0,0,1,1 41 | 1,0,1,1,0,0,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,1,1 42 | 1,0,0,0,0,0,1,1,0,0,0,1,1,1,0,0,0,1,1,0,0,0,0 43 | 1,0,0,1,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,1,1,1 44 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1 45 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0 46 | 1,0,0,1,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,1 47 | 1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1 48 | 1,0,0,1,0,1,0,0,1,1,1,0,0,1,1,0,0,1,1,0,0,1,1 49 | 1,1,0,0,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0 50 | 1,1,0,1,1,1,0,0,1,1,0,0,0,0,1,1,1,0,0,0,1,1,0 51 | 1,0,0,1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,1 52 | 1,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,1 53 | 1,0,1,1,0,0,0,1,1,0,0,0,1,1,0,0,1,1,1,0,0,1,1 54 | 1,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1 55 | 1,1,0,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,1 56 | 1,1,0,1,0,1,1,0,1,1,0,1,1,1,1,1,1,0,0,1,0,1,1 57 | 1,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,1,1 58 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1 59 | 1,0,1,1,0,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,1,0,0 60 | 1,0,1,1,0,0,0,1,0,0,0,0,1,1,0,0,0,1,0,0,0,0,1 61 | 1,1,1,1,1,1,1,0,0,1,0,1,0,1,0,1,1,0,0,1,1,0,1 62 | 1,0,1,0,0,0,1,1,0,0,0,1,1,0,0,0,0,1,1,0,0,0,0 63 | 1,0,1,0,0,1,1,0,0,0,1,1,0,1,0,0,0,0,0,0,0,1,1 64 | 1,1,0,1,1,1,0,0,1,1,0,0,0,1,1,1,1,0,0,0,1,1,1 65 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0 66 | 1,1,1,1,0,1,1,1,0,0,1,1,1,1,0,0,0,0,1,1,1,1,0 67 | 1,0,0,0,0,0,0,0,1,0,0,0,0,1,1,0,1,0,0,0,1,1,1 68 | 1,1,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,1,1,0,0 69 | 1,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,1,0,0,1,1,0,0 70 | 1,1,1,1,0,1,0,1,1,0,1,0,1,1,0,0,0,0,1,1,0,1,1 71 | 1,1,1,0,0,1,1,1,1,0,0,0,1,1,0,0,1,1,0,0,1,1,1 72 | 1,0,1,1,0,0,1,1,1,0,0,0,1,1,0,0,0,0,0,0,0,1,1 73 | 1,1,0,1,1,1,0,0,1,1,1,0,0,1,1,1,1,0,1,1,1,1,1 74 | 1,0,0,1,0,1,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,1 75 | 1,0,0,0,1,0,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,0 76 | 1,0,0,1,0,0,0,1,1,0,0,0,1,1,0,0,0,0,1,1,0,1,1 77 | 1,0,1,1,0,0,0,1,1,0,0,0,1,1,0,0,1,1,1,0,1,1,1 78 | 1,1,0,1,1,1,0,0,1,1,1,0,0,1,1,0,0,0,0,0,0,1,0 79 | 1,0,1,1,1,1,1,1,1,1,1,1,0,1,1,1,0,0,0,1,0,1,1 80 | 1,0,0,0,0,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,1,1,1 81 | 1,1,0,0,0,1,0,0,1,0,0,0,0,1,1,1,1,1,0,0,1,1,1 82 | 1,0,1,1,1,0,1,0,0,1,0,1,1,1,1,0,1,1,1,0,0,1,1 83 | 1,0,0,1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,1,1 84 | 1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1 85 | 1,1,1,1,1,1,1,0,0,1,1,0,1,0,1,1,1,0,0,1,1,0,0 86 | 1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1 87 | 1,1,1,1,1,1,0,1,1,1,1,0,1,1,1,1,0,0,0,0,1,0,0 88 | 1,0,1,0,0,1,1,1,0,0,0,1,1,0,0,0,1,1,0,1,1,0,1 89 | 1,1,0,1,0,1,0,0,1,0,0,0,0,1,1,0,1,0,0,0,1,1,1 90 | 1,0,0,0,0,0,0,0,1,0,0,1,0,1,0,0,0,0,0,0,0,1,0 91 | 1,0,0,0,1,0,0,0,0,1,0,0,0,1,1,1,1,0,0,1,1,0,0 92 | 1,1,0,1,1,0,0,0,1,1,0,0,0,1,1,1,1,0,0,1,1,1,1 93 | 1,0,0,1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0 94 | 1,0,0,1,0,0,1,0,1,1,0,1,0,1,1,1,0,1,0,0,0,0,0 95 | 1,1,0,0,0,1,1,0,0,0,1,0,0,0,0,0,0,1,1,1,0,0,1 96 | 1,0,0,0,1,0,0,1,1,1,0,1,1,1,1,0,0,0,0,0,1,0,0 97 | 1,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,1,0,1 98 | 1,1,1,1,0,1,0,1,0,0,1,1,1,1,0,0,0,0,0,0,1,1,1 99 | 1,0,0,1,1,0,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,1,1 100 | 1,1,0,1,0,1,1,1,1,0,1,1,1,1,0,0,1,0,1,0,1,1,1 101 | 1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0 102 | 1,0,0,1,1,0,0,0,1,1,0,0,0,1,1,1,0,0,0,0,0,1,1 103 | 1,1,0,0,1,0,0,0,0,0,1,1,0,0,1,0,0,1,1,1,1,1,0 104 | 1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,1,0,0 105 | 1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0 106 | 1,1,0,0,0,0,0,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,1 107 | 1,1,0,0,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0 108 | 1,1,0,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0 109 | 1,1,0,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0 110 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 111 | 1,1,0,0,0,1,0,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0 112 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1 113 | 1,1,0,0,0,1,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0 114 | 1,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0 115 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 116 | 1,0,0,1,0,1,0,0,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0 117 | 1,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,1 118 | 1,1,1,0,0,1,1,1,0,0,1,1,0,0,0,0,0,0,0,1,0,0,0 119 | 1,1,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0 120 | 1,1,1,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,0,0,0,1,1 121 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 122 | 1,1,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0 123 | 1,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0 124 | 1,1,0,1,1,1,0,0,1,1,1,0,0,1,1,1,1,0,0,1,1,1,1 125 | 1,0,0,1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,1,1 126 | 1,1,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0 127 | 1,0,0,0,1,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0 128 | 1,0,0,1,1,0,1,0,0,0,0,1,0,0,1,0,0,0,1,1,1,0,0 129 | 1,1,0,1,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0 130 | 1,1,1,1,0,1,1,1,1,0,1,1,1,0,0,0,0,0,0,0,1,0,0 131 | 1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0 132 | 1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1 133 | 1,0,0,1,1,0,0,1,1,1,0,0,1,0,0,1,1,0,0,0,0,1,0 134 | 1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1 135 | 1,1,0,0,0,1,1,0,0,1,1,1,0,0,1,0,0,0,0,0,0,0,0 136 | 1,1,1,1,0,0,0,0,0,0,0,0,1,1,0,0,1,1,1,0,0,1,1 137 | 1,1,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,1,0,1,1,1,0 138 | 1,1,1,0,1,1,0,0,1,1,1,0,1,0,0,1,1,0,0,0,1,1,0 139 | 1,0,0,1,1,0,0,0,1,0,0,0,0,1,1,0,1,0,0,0,1,1,1 140 | 1,0,0,1,0,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,1,0 141 | 1,0,1,1,1,0,1,1,1,1,0,1,0,1,1,1,1,0,0,0,1,1,1 142 | 1,0,1,1,0,0,1,0,1,0,0,1,1,1,0,0,1,0,0,1,1,0,1 143 | 1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,1,1,1,1 144 | 1,1,0,1,0,1,0,1,0,1,1,0,0,1,1,1,1,0,0,1,1,0,1 145 | 1,1,0,0,1,1,0,0,0,1,1,0,0,0,1,1,1,0,0,1,1,0,1 146 | 1,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1 147 | 1,1,1,1,0,0,1,1,0,1,0,0,1,0,1,0,1,0,0,0,1,1,1 148 | 1,0,0,1,0,0,0,0,1,0,0,1,0,1,0,0,1,0,0,0,1,1,1 149 | 1,1,1,1,1,1,1,1,0,1,1,0,0,1,0,1,1,0,0,1,1,1,1 150 | 1,1,0,0,0,1,0,0,1,1,1,0,0,1,1,0,1,0,0,1,1,0,0 151 | 1,0,0,1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,1,1 152 | 1,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0 153 | 1,1,1,1,0,1,0,1,1,0,1,0,1,1,0,0,0,0,0,0,0,1,0 154 | 1,1,1,1,0,0,1,1,1,0,1,1,1,1,0,0,0,1,1,1,1,0,0 155 | 1,0,0,1,0,0,0,0,1,0,0,0,0,1,0,0,1,0,0,1,0,1,1 156 | 1,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,1,1 157 | 1,0,0,0,1,0,0,0,1,1,0,0,0,1,1,0,0,0,0,0,0,1,0 158 | 1,1,1,0,0,1,1,0,0,1,1,1,0,0,0,0,0,0,0,1,0,0,0 159 | 1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1 160 | 1,1,1,1,1,1,1,1,1,0,1,1,1,1,0,1,1,1,1,1,1,1,1 161 | 1,0,1,0,0,0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,1,0,0 162 | 1,1,0,1,1,1,1,1,1,0,0,1,1,1,1,1,0,1,0,0,0,1,1 163 | 1,0,0,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,0,0,0,1,0 164 | 1,0,0,1,1,0,0,0,1,1,1,0,1,1,1,1,0,0,0,0,0,1,1 165 | 1,1,0,0,0,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0 166 | 1,0,0,1,0,1,0,0,1,0,1,0,0,1,0,1,0,0,1,0,0,1,0 167 | 1,0,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,1,1,0,0,1,1 168 | 1,0,1,1,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,1 169 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 170 | 1,1,1,1,0,1,1,0,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0 171 | 1,1,0,1,1,1,0,0,1,1,1,0,0,1,1,0,1,0,1,1,1,0,0 172 | 1,1,0,0,0,1,0,0,0,1,1,0,0,0,1,0,0,0,0,0,1,1,0 173 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 174 | 0,0,0,1,1,0,0,0,1,1,0,0,0,1,1,0,0,0,0,0,0,1,0 175 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 176 | 0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,1,0,0,1 177 | 0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0 178 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 179 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 180 | 0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0 181 | 0,0,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,1,0,0 182 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 183 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 184 | 0,1,1,0,0,0,1,0,0,0,1,1,1,0,0,0,1,0,0,0,0,0,0 185 | 0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0 186 | 0,1,0,1,0,1,0,0,1,1,0,0,0,0,1,0,1,0,0,0,0,0,0 187 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 -------------------------------------------------------------------------------- /data/spect/spect.train: -------------------------------------------------------------------------------- 1 | 1,0,0,0,1,0,0,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0 2 | 1,0,0,1,1,0,0,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,1 3 | 1,1,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0 4 | 1,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,1,1 5 | 1,0,0,0,0,0,0,0,1,0,0,0,0,1,0,1,1,0,0,0,0,0,0 6 | 1,0,0,0,1,0,0,0,0,1,0,0,0,1,1,0,1,0,0,0,1,0,1 7 | 1,1,0,1,1,0,0,0,1,0,1,0,1,1,0,0,0,0,0,0,0,1,1 8 | 1,0,0,1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,1 9 | 1,0,0,1,0,0,0,1,1,0,0,0,0,1,0,1,0,0,0,0,0,1,1 10 | 1,0,1,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0 11 | 1,1,1,0,0,1,0,1,0,0,1,1,1,1,0,0,1,1,1,1,1,0,1 12 | 1,1,1,0,0,1,1,1,0,1,1,1,1,0,1,0,0,1,0,1,1,0,0 13 | 1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,1,1 14 | 1,0,1,0,0,0,0,1,0,0,0,0,1,0,0,0,1,0,1,0,0,1,1 15 | 1,1,0,1,1,0,0,1,1,1,0,1,1,1,1,1,1,0,1,1,0,1,1 16 | 1,0,1,1,0,0,1,1,1,0,0,0,1,1,0,0,1,1,1,0,1,1,1 17 | 1,0,0,1,1,0,0,0,1,1,0,0,0,1,1,0,1,0,0,0,0,1,0 18 | 1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1 19 | 1,1,0,1,0,1,0,1,1,0,1,0,1,1,0,0,0,1,0,0,1,1,0 20 | 1,1,0,0,0,1,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,0 21 | 1,0,0,0,0,0,0,1,0,0,0,1,1,1,0,0,0,0,0,0,0,1,1 22 | 1,1,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,1,1,0,0 23 | 1,1,1,0,0,1,1,1,0,0,1,1,1,0,0,0,0,0,0,1,0,0,0 24 | 1,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0 25 | 1,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,1,0,0,0 26 | 1,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0 27 | 1,0,0,0,1,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,0,1,0 28 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0 29 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1 30 | 1,1,0,1,1,1,0,0,0,0,1,0,0,1,1,0,1,0,0,0,1,1,1 31 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 32 | 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 33 | 1,1,1,0,0,1,1,1,0,0,1,1,1,0,0,0,0,1,0,1,0,0,1 34 | 1,0,1,1,1,0,0,1,1,1,0,1,1,1,0,0,1,1,1,0,0,1,1 35 | 1,1,0,1,1,1,0,0,1,1,1,0,0,1,1,1,0,0,0,0,0,1,0 36 | 1,1,1,1,1,1,1,0,0,0,0,1,1,1,1,0,1,1,1,1,1,0,1 37 | 1,1,1,1,0,1,0,1,1,1,1,0,1,1,1,0,1,0,0,0,1,1,1 38 | 1,1,0,0,1,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0 39 | 1,1,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0 40 | 1,1,0,1,1,0,0,0,1,1,1,0,0,1,1,1,1,0,0,1,1,0,0 41 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1 42 | 0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0 43 | 0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0 44 | 0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 45 | 0,0,1,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0 46 | 0,1,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,1 47 | 0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,0,0 48 | 0,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0 49 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 50 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0 51 | 0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,1,0,1 52 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 53 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 54 | 0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0 55 | 0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,0,0,0,0,1 56 | 0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 57 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0 58 | 0,1,1,1,0,1,0,1,1,1,1,1,0,0,1,0,1,0,0,1,0,1,0 59 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 60 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 61 | 0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0 62 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 63 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1 64 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 65 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 66 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 67 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1 68 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 69 | 0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 70 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 71 | 0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0 72 | 0,0,0,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0 73 | 0,1,0,0,0,1,0,1,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0 74 | 0,1,0,1,0,0,0,0,1,0,1,0,0,1,0,0,0,0,0,0,0,1,0 75 | 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 76 | 0,1,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0 77 | 0,1,0,0,0,1,1,0,0,1,1,0,0,0,1,0,0,0,0,1,1,0,0 78 | 0,1,0,0,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0 79 | 0,0,0,1,1,0,0,1,0,0,0,0,1,1,1,0,0,0,0,0,0,1,1 80 | 0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0 -------------------------------------------------------------------------------- /data/tic-tac-toe/Index: -------------------------------------------------------------------------------- 1 | Index of tic-tac-toe 2 | 3 | 02 Dec 1996 126 Index 4 | 19 Aug 1991 25866 tic-tac-toe.data 5 | 19 Aug 1991 3244 tic-tac-toe.names 6 | -------------------------------------------------------------------------------- /data/tic-tac-toe/tic-tac-toe.names: -------------------------------------------------------------------------------- 1 | 1. Title: Tic-Tac-Toe Endgame database 2 | 3 | 2. Source Information 4 | -- Creator: David W. Aha (aha@cs.jhu.edu) 5 | -- Donor: David W. Aha (aha@cs.jhu.edu) 6 | -- Date: 19 August 1991 7 | 8 | 3. Known Past Usage: 9 | 1. Matheus,~C.~J., \& Rendell,~L.~A. (1989). Constructive 10 | induction on decision trees. In {\it Proceedings of the 11 | Eleventh International Joint Conference on Artificial Intelligence} 12 | (pp. 645--650). Detroit, MI: Morgan Kaufmann. 13 | -- CITRE was applied to 100-instance training and 200-instance test 14 | sets. In a study using various amounts of domain-specific 15 | knowledge, its highest average accuracy was 76.7% (using the 16 | final decision tree created for testing). 17 | 18 | 2. Matheus,~C.~J. (1990). Adding domain knowledge to SBL through 19 | feature construction. In {\it Proceedings of the Eighth National 20 | Conference on Artificial Intelligence} (pp. 803--808). 21 | Boston, MA: AAAI Press. 22 | -- Similar experiments with CITRE, includes learning curves up 23 | to 500-instance training sets but used _all_ instances in the 24 | database for testing. Accuracies reached above 90%, but specific 25 | values are not given (see Chris's dissertation for more details). 26 | 27 | 3. Aha,~D.~W. (1991). Incremental constructive induction: An instance-based 28 | approach. In {\it Proceedings of the Eighth International Workshop 29 | on Machine Learning} (pp. 117--121). Evanston, ILL: Morgan Kaufmann. 30 | -- Used 70% for training, 30% of the instances for testing, evaluated 31 | over 10 trials. Results reported for six algorithms: 32 | -- NewID: 84.0% 33 | -- CN2: 98.1% 34 | -- MBRtalk: 88.4% 35 | -- IB1: 98.1% 36 | -- IB3: 82.0% 37 | -- IB3-CI: 99.1% 38 | -- Results also reported when adding an additional 10 irrelevant 39 | ternary-valued attributes; similar _relative_ results except that 40 | IB1's performance degraded more quickly than the others. 41 | 42 | 4. Relevant Information: 43 | 44 | This database encodes the complete set of possible board configurations 45 | at the end of tic-tac-toe games, where "x" is assumed to have played 46 | first. The target concept is "win for x" (i.e., true when "x" has one 47 | of 8 possible ways to create a "three-in-a-row"). 48 | 49 | Interestingly, this raw database gives a stripped-down decision tree 50 | algorithm (e.g., ID3) fits. However, the rule-based CN2 algorithm, the 51 | simple IB1 instance-based learning algorithm, and the CITRE 52 | feature-constructing decision tree algorithm perform well on it. 53 | 54 | 5. Number of Instances: 958 (legal tic-tac-toe endgame boards) 55 | 56 | 6. Number of Attributes: 9, each corresponding to one tic-tac-toe square 57 | 58 | 7. Attribute Information: (x=player x has taken, o=player o has taken, b=blank) 59 | 60 | 1. top-left-square: {x,o,b} 61 | 2. top-middle-square: {x,o,b} 62 | 3. top-right-square: {x,o,b} 63 | 4. middle-left-square: {x,o,b} 64 | 5. middle-middle-square: {x,o,b} 65 | 6. middle-right-square: {x,o,b} 66 | 7. bottom-left-square: {x,o,b} 67 | 8. bottom-middle-square: {x,o,b} 68 | 9. bottom-right-square: {x,o,b} 69 | 10. Class: {positive,negative} 70 | 71 | 8. Missing Attribute Values: None 72 | 73 | 9. Class Distribution: About 65.3% are positive (i.e., wins for "x") 74 | -------------------------------------------------------------------------------- /dataset.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding: utf-8 3 | # author: Bo Tang 4 | 5 | import pandas as pd 6 | import numpy as np 7 | 8 | from sklearn.preprocessing import LabelBinarizer 9 | 10 | def loadData(dataname): 11 | """ 12 | load training and testing data from different dataset 13 | """ 14 | # balance-scale 15 | if dataname == 'balance-scale': 16 | x, y = loadBalanceScale() 17 | return x, y 18 | # breast-cancer 19 | if dataname == 'breast-cancer': 20 | x, y = loadBreastCancer() 21 | return x, y 22 | # car-evaluation 23 | if dataname == 'car-evaluation': 24 | x, y = loadCarEvaluation() 25 | return x, y 26 | # hayes-roth 27 | if dataname == 'hayes-roth': 28 | x, y = loadHayesRoth() 29 | return x, y 30 | # house-votes-84 31 | if dataname == 'house-votes-84': 32 | x, y = loadHouseVotes84() 33 | return x, y 34 | # soybean-small 35 | if dataname == 'soybean-small': 36 | x, y = loadSoybean() 37 | return x, y 38 | # spect 39 | if dataname == 'spect': 40 | x, y = loadSpect() 41 | return x, y 42 | # tic-tac-toe 43 | if dataname == 'tic-tac-toe': 44 | x, y = loadTicTacToe() 45 | return x, y 46 | # monks 47 | if dataname[:5] == 'monks': 48 | x, y = loadMonks(dataname) 49 | return x, y 50 | raise NameError('No dataset "{}".'.format(dataname)) 51 | 52 | def oneHot(x): 53 | """ 54 | one-hot encoding 55 | """ 56 | x_enc = np.zeros((x.shape[0], 0)) 57 | for j in range(x.shape[1]): 58 | lb = LabelBinarizer() 59 | lb.fit(np.unique(x[:,j])) 60 | x_enc = np.concatenate((x_enc, lb.transform(x[:,j])), axis=1) 61 | return x_enc 62 | 63 | def loadBalanceScale(): 64 | """ 65 | load balance-scale dataset 66 | """ 67 | df = pd.read_csv('./data/balance-scale/balance-scale.data', header=None, delimiter=',') 68 | x, y = df[[1,2,3,4]], df[0] 69 | y = pd.factorize(y) 70 | return np.array(x), np.array(y, dtype=object)[0] 71 | 72 | def loadBreastCancer(): 73 | """ 74 | load breast-cancer dataset 75 | """ 76 | df = pd.read_csv('./data/breast-cancer/breast-cancer.data', header=None, delimiter=',') 77 | for i in range(9): 78 | df = df[df[i] != '?'] 79 | df = df.apply(lambda x: pd.factorize(x)[0]) 80 | x, y = df[[1,2,3,4,5,6,7,8,9]], df[0] 81 | return np.array(x), np.array(y) 82 | 83 | def loadCarEvaluation(): 84 | """ 85 | load car-evaluation dataset 86 | """ 87 | df = pd.read_csv('./data/car-evaluation/car.data', header=None, delimiter=',') 88 | df = df.apply(lambda x: pd.factorize(x)[0]) 89 | x, y = df[[0,1,2,3,4,5]], df[6] 90 | return np.array(x), np.array(y) 91 | 92 | def loadHayesRoth(): 93 | """ 94 | load hayes-roth dataset 95 | """ 96 | df_train = pd.read_csv('./data/hayes-roth/hayes-roth.data', header=None, delimiter=',') 97 | df_test = pd.read_csv('./data/hayes-roth/hayes-roth.test', header=None, delimiter=',') 98 | x_train, y_train = df_train[[1,2,3,4]], df_train[5] 99 | x_test, y_test = df_test[[0,1,2,3]], df_test[4] 100 | x, y = np.concatenate((x_train, x_test), axis=0), np.concatenate((y_train, y_test), axis=0) 101 | x = pd.DataFrame(x) 102 | x1, x2 = np.array(x[[0,3]]), np.array(x[[1,2]]) 103 | x1 = oneHot(x1) 104 | x = np.concatenate((x1, x2), axis=1) 105 | return x, y 106 | 107 | def loadHouseVotes84(): 108 | """ 109 | load house-votes-84 dataset 110 | """ 111 | df = pd.read_csv('./data/house-votes-84/house-votes-84.data', header=None, delimiter=',') 112 | for i in range(1,17): 113 | df = df[df[i] != '?'] 114 | df = df.apply(lambda x: pd.factorize(x)[0]) 115 | x, y = df[[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16]], df[0] 116 | return np.array(x), np.array(y) 117 | 118 | def loadSoybean(): 119 | """ 120 | load soybean dataset 121 | """ 122 | df = pd.read_csv('./data/soybean-small/soybean-small.data', header=None, delimiter=',') 123 | for i in range(35): 124 | df = df[df[i] != '?'] 125 | x, y = df[range(35)], df[35] 126 | y = pd.factorize(y) 127 | x = pd.DataFrame(x) 128 | x1 = np.array(x[[0,5,8,12,13,14,17,20,21,23,25,27,28,34]]) 129 | x2 = np.array(x[[1,2,3,4,6,7,9,10,11,15,16,18,19,22,24,26,29,30,31,32,33]]) 130 | x1 = oneHot(x1) 131 | x = np.concatenate((x1, x2), axis=1) 132 | return np.array(x), np.array(y, dtype=object)[0] 133 | 134 | def loadSpect(): 135 | """ 136 | load spect dataset 137 | """ 138 | df_train = pd.read_csv('./data/spect/spect.train', header=None, delimiter=',') 139 | df_test = pd.read_csv('./data/spect/spect.test', header=None, delimiter=',') 140 | x_train, y_train = df_train[range(1,23)], df_train[0] 141 | x_test, y_test = df_test[range(1,23)], df_test[0] 142 | return np.concatenate((x_train, x_test), axis=0), np.concatenate((y_train, y_test), axis=0) 143 | 144 | def loadTicTacToe(): 145 | """ 146 | load tic-tac-toe dataset 147 | """ 148 | df = pd.read_csv('./data/tic-tac-toe/tic-tac-toe.data', header=None, delimiter=',') 149 | x, y = df[[0,1,2,3,4,5,6,7]], df[9] 150 | x = oneHot(np.array(x)) 151 | y = pd.factorize(y) 152 | return x, np.array(y, dtype=object)[0] 153 | 154 | def loadMonks(dataname): 155 | """ 156 | load Monks dataset 157 | """ 158 | _, index = dataname.split('-') 159 | if index not in ['1', '2', '3']: 160 | raise AssertionError('No dataset ' + dataname) 161 | df_train = pd.read_csv('./data/monks/monks-{}.train'.format(index), header=None, delimiter=' ') 162 | df_test = pd.read_csv('./data/monks/monks-{}.test'.format(index), header=None, delimiter=' ') 163 | x_train, y_train = df_train[[2,3,4,5,6,7]], df_train[1] 164 | x_test, y_test = df_test[[2,3,4,5,6,7]], df_test[1] 165 | x = np.concatenate((x_train, x_test), axis=0) 166 | y = np.concatenate((y_train, y_test), axis=0) 167 | x = oneHot(x) 168 | return x, y 169 | -------------------------------------------------------------------------------- /experiments/Result.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": 1, 6 | "id": "architectural-privacy", 7 | "metadata": {}, 8 | "outputs": [], 9 | "source": [ 10 | "import pandas as pd" 11 | ] 12 | }, 13 | { 14 | "cell_type": "code", 15 | "execution_count": 2, 16 | "id": "informal-feelings", 17 | "metadata": {}, 18 | "outputs": [], 19 | "source": [ 20 | "res_oct = pd.read_csv('./res/oct.csv')\n", 21 | "res_boct = pd.read_csv('./res/boct.csv')\n", 22 | "res_mfoct = pd.read_csv('./res/mfoct.csv')\n", 23 | "res_sk = pd.read_csv('./res/sk.csv')\n", 24 | "res = {'OCT':res_oct, 'binOCT':res_boct, 'flowOCT':res_mfoct, 'CART':res_sk}" 25 | ] 26 | }, 27 | { 28 | "cell_type": "code", 29 | "execution_count": 3, 30 | "id": "smooth-watch", 31 | "metadata": {}, 32 | "outputs": [], 33 | "source": [ 34 | "def resProcess(res):\n", 35 | " \"\"\"\n", 36 | " Calculate the average results from random data split\n", 37 | " Select alpha with highest validation accuracy\n", 38 | " \"\"\"\n", 39 | " if 'alpha' in res.columns:\n", 40 | " res_group = res.groupby(['instance', 'depth', 'alpha']).mean().reset_index()\n", 41 | " res_group = res_group.groupby(['instance', 'depth']) \\\n", 42 | " .apply(lambda group: group[group['val_acc'] == group['val_acc'].max()]) \\\n", 43 | " .drop_duplicates(subset=['instance', 'depth']) \\\n", 44 | " .reset_index(drop=True)\n", 45 | " else:\n", 46 | " res_group = res.groupby(['instance', 'depth']).mean().reset_index()\n", 47 | " return res_group" 48 | ] 49 | }, 50 | { 51 | "cell_type": "code", 52 | "execution_count": 4, 53 | "id": "federal-jersey", 54 | "metadata": {}, 55 | "outputs": [], 56 | "source": [ 57 | "def resCombine(res, feat):\n", 58 | " df = pd.DataFrame(columns=['instance', 'depth'])\n", 59 | " for r in res:\n", 60 | " df_cur = resProcess(res[r])[['instance', 'depth', feat]]\n", 61 | " df_cur = df_cur.rename(columns={feat:r})\n", 62 | " df = df.merge(df_cur, how='outer', on=['instance', 'depth'])\n", 63 | " return df" 64 | ] 65 | }, 66 | { 67 | "cell_type": "code", 68 | "execution_count": 5, 69 | "id": "crazy-poison", 70 | "metadata": {}, 71 | "outputs": [], 72 | "source": [ 73 | "resCombine(res, 'test_acc').to_csv('./res/res.csv')" 74 | ] 75 | }, 76 | { 77 | "cell_type": "code", 78 | "execution_count": null, 79 | "id": "accepted-washer", 80 | "metadata": {}, 81 | "outputs": [], 82 | "source": [] 83 | } 84 | ], 85 | "metadata": { 86 | "kernelspec": { 87 | "display_name": "Python 3", 88 | "language": "python", 89 | "name": "python3" 90 | }, 91 | "language_info": { 92 | "codemirror_mode": { 93 | "name": "ipython", 94 | "version": 3 95 | }, 96 | "file_extension": ".py", 97 | "mimetype": "text/x-python", 98 | "name": "python", 99 | "nbconvert_exporter": "python", 100 | "pygments_lexer": "ipython3", 101 | "version": "3.7.10" 102 | } 103 | }, 104 | "nbformat": 4, 105 | "nbformat_minor": 5 106 | } 107 | -------------------------------------------------------------------------------- /fig/dt.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LucasBoTang/Optimal_Classification_Trees/9a5714db7cb3a60eea4758c8143dc3144f96b3d2/fig/dt.jpg -------------------------------------------------------------------------------- /fig/maxflow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/LucasBoTang/Optimal_Classification_Trees/9a5714db7cb3a60eea4758c8143dc3144f96b3d2/fig/maxflow.png -------------------------------------------------------------------------------- /res/1200/boct.csv: -------------------------------------------------------------------------------- 1 | instance,depth,seed,train_acc,val_acc,test_acc,train_time,gap 2 | balance-scale,2,37,0.6762820512820513,0.5705128205128205,0.6496815286624203,6.49409556388855,0.0 3 | balance-scale,2,42,0.717948717948718,0.6410256410256411,0.6624203821656051,5.691070079803467,0.0 4 | balance-scale,2,53,0.6730769230769231,0.5641025641025641,0.6369426751592356,4.699124336242676,0.0 5 | balance-scale,3,37,0.7147435897435898,0.6602564102564102,0.7133757961783439,1202.0272710323334,0.746031746031746 6 | balance-scale,3,42,0.7019230769230769,0.6794871794871795,0.643312101910828,1201.7590672969818,0.7213114726217793 7 | balance-scale,3,53,0.6602564102564102,0.5384615384615384,0.6369426751592356,1201.7603561878204,0.7564367313284133 8 | balance-scale,4,37,0.7115384615384616,0.6923076923076923,0.7133757961783439,1202.4304239749908,1.0 9 | balance-scale,4,42,0.7724358974358975,0.7435897435897436,0.7006369426751592,1202.7033512592316,1.0 10 | balance-scale,4,53,0.7467948717948718,0.6923076923076923,0.7070063694267515,1202.5300314426422,1.0 11 | balance-scale,5,37,0.5256410256410257,0.5064102564102564,0.4585987261146497,1204.3992640972137,1.0 12 | balance-scale,5,42,0.7435897435897436,0.7051282051282052,0.6560509554140127,1204.1075296401978,1.0 13 | balance-scale,5,53,0.5961538461538461,0.5961538461538461,0.5796178343949044,1203.9429712295532,1.0 14 | breast-cancer,2,37,0.6884057971014492,0.782608695652174,0.6714285714285714,10.569740295410156,0.0 15 | breast-cancer,2,42,0.7898550724637681,0.6811594202898551,0.7285714285714285,11.668315410614014,0.0 16 | breast-cancer,2,53,0.8260869565217391,0.7101449275362319,0.6571428571428571,12.541383266448975,0.0 17 | breast-cancer,3,37,0.8043478260869565,0.7681159420289855,0.6857142857142857,1202.569682598114,0.9583332678025808 18 | breast-cancer,3,42,0.8043478260869565,0.6666666666666666,0.7714285714285715,1201.8535821437836,0.9984638984546473 19 | breast-cancer,3,53,0.7971014492753623,0.6956521739130435,0.6857142857142857,1202.07471036911,1.0 20 | breast-cancer,4,37,0.8043478260869565,0.7391304347826086,0.7142857142857143,1201.7621977329254,1.0 21 | breast-cancer,4,42,0.8260869565217391,0.7246376811594203,0.7142857142857143,1202.2093675136566,1.0 22 | breast-cancer,4,53,0.7681159420289855,0.6376811594202898,0.5571428571428572,1201.949079990387,1.0 23 | breast-cancer,5,37,0.7101449275362319,0.6666666666666666,0.6571428571428571,1203.0921263694763,1.0 24 | breast-cancer,5,42,0.8188405797101449,0.6811594202898551,0.6571428571428571,1203.437982082367,1.0 25 | breast-cancer,5,53,0.5,0.34782608695652173,0.38571428571428573,1203.5128684043884,1.0 26 | car-evaluation,2,37,0.7951388888888888,0.7662037037037037,0.7546296296296297,10.15673828125,0.0 27 | car-evaluation,2,42,0.7511574074074074,0.8009259259259259,0.7731481481481481,25.813984632492065,0.0 28 | car-evaluation,2,53,0.78125,0.7453703703703703,0.7685185185185185,14.829872608184814,0.0 29 | car-evaluation,3,37,0.8252314814814815,0.7962962962962963,0.7754629629629629,1202.2350552082062,0.814558349740469 30 | car-evaluation,3,42,0.7546296296296297,0.7824074074074074,0.7638888888888888,1201.4477128982544,0.8972118902451541 31 | car-evaluation,3,53,0.8171296296296297,0.8148148148148148,0.8194444444444444,1201.8295838832855,0.7648092256407947 32 | car-evaluation,4,37,0.8645833333333334,0.8726851851851852,0.8472222222222222,1203.6653242111206,1.0 33 | car-evaluation,4,42,0.8321759259259259,0.8611111111111112,0.8379629629629629,1203.2259788513184,1.0 34 | car-evaluation,4,53,0.8645833333333334,0.8657407407407407,0.8587962962962963,1203.488644361496,1.0 35 | car-evaluation,5,37,0.8912037037037037,0.8703703703703703,0.8472222222222222,1208.3941612243652,1.0 36 | car-evaluation,5,42,0.8402777777777778,0.8541666666666666,0.8425925925925926,1209.214187860489,1.0 37 | car-evaluation,5,53,0.7650462962962963,0.7569444444444444,0.7546296296296297,1211.351671218872,1.0 38 | hayes-roth,2,37,0.65,0.475,0.65,2.103224515914917,0.0 39 | hayes-roth,2,42,0.4875,0.4,0.45,2.4111292362213135,0.0 40 | hayes-roth,2,53,0.675,0.625,0.55,2.6695516109466553,0.0 41 | hayes-roth,3,37,0.7,0.7,0.575,1201.8161759376526,0.4736841400150401 42 | hayes-roth,3,42,0.85,0.575,0.8,281.03241205215454,0.0 43 | hayes-roth,3,53,0.8,0.675,0.725,747.8261659145355,0.0 44 | hayes-roth,4,37,0.4875,0.275,0.55,1201.8807849884033,1.0 45 | hayes-roth,4,42,0.9,0.575,0.8,1200.8314754962921,1.0 46 | hayes-roth,4,53,0.6,0.65,0.575,1201.2610342502594,1.0 47 | hayes-roth,5,37,0.4875,0.35,0.525,1202.8683636188507,1.0 48 | hayes-roth,5,42,0.725,0.5,0.65,1201.4049129486084,1.0 49 | hayes-roth,5,53,0.675,0.575,0.55,1201.6097180843353,1.0 50 | house-votes-84,2,37,0.9741379310344828,0.9482758620689655,0.9827586206896551,0.796579122543335,0.0 51 | house-votes-84,2,42,0.9655172413793104,0.9655172413793104,0.9482758620689655,0.7205848693847656,0.0 52 | house-votes-84,2,53,0.9741379310344828,0.9655172413793104,0.9655172413793104,0.714623212814331,0.0 53 | house-votes-84,3,37,0.9913793103448276,0.9310344827586207,0.9827586206896551,206.0470631122589,0.0 54 | house-votes-84,3,42,0.9827586206896551,0.896551724137931,0.9655172413793104,727.7878918647766,0.0 55 | house-votes-84,3,53,0.9913793103448276,0.9655172413793104,0.9655172413793104,426.9277603626251,0.0 56 | house-votes-84,4,37,1.0,0.896551724137931,0.9310344827586207,7.982578992843628,0.0 57 | house-votes-84,4,42,0.9913793103448276,0.9137931034482759,0.9310344827586207,1200.702674627304,1.0 58 | house-votes-84,4,53,1.0,0.8620689655172413,0.8793103448275862,2.3118181228637695,0.0 59 | house-votes-84,5,37,1.0,0.9137931034482759,0.9655172413793104,3.7609503269195557,0.0 60 | house-votes-84,5,42,1.0,0.896551724137931,0.9482758620689655,4.737844467163086,0.0 61 | house-votes-84,5,53,1.0,0.896551724137931,0.9655172413793104,4.973699569702148,0.0 62 | soybean-small,2,37,1.0,0.6666666666666666,0.9166666666666666,0.09275197982788086,0.0 63 | soybean-small,2,42,1.0,1.0,1.0,0.4248645305633545,0.0 64 | soybean-small,2,53,1.0,1.0,1.0,0.4577763080596924,0.0 65 | soybean-small,3,37,1.0,0.3333333333333333,0.8333333333333334,0.22240543365478516,0.0 66 | soybean-small,3,42,1.0,1.0,1.0,0.18949317932128906,0.0 67 | soybean-small,3,53,0.391304347826087,0.5,0.4166666666666667,0.17752432823181152,0.0 68 | soybean-small,4,37,0.782608695652174,1.0,0.5833333333333334,0.45378828048706055,0.0 69 | soybean-small,4,42,1.0,1.0,1.0,0.47074341773986816,0.0 70 | soybean-small,4,53,0.9565217391304348,0.8333333333333334,0.9166666666666666,0.4398233890533447,0.0 71 | soybean-small,5,37,1.0,1.0,0.9166666666666666,1.133969783782959,0.0 72 | soybean-small,5,42,0.6086956521739131,0.5833333333333334,0.75,1.1698739528656006,0.0 73 | soybean-small,5,53,0.6956521739130435,0.5833333333333334,0.5,1.1499519348144531,0.0 74 | spect,2,37,0.7894736842105263,0.7611940298507462,0.835820895522388,10.050130605697632,0.0 75 | spect,2,42,0.8571428571428571,0.7014925373134329,0.7014925373134329,1.0142879486083984,0.0 76 | spect,2,53,0.8120300751879699,0.746268656716418,0.8059701492537313,7.728850364685059,0.0 77 | spect,3,37,0.8195488721804511,0.7761194029850746,0.8208955223880597,681.1462171077728,4.1254125584394557e-08 78 | spect,3,42,0.9022556390977443,0.7014925373134329,0.7014925373134329,76.13746500015259,0.0 79 | spect,3,53,0.8270676691729323,0.746268656716418,0.746268656716418,1200.7778549194336,0.17391307320834046 80 | spect,4,37,0.8872180451127819,0.7014925373134329,0.7910447761194029,1201.8891637325287,0.3999926062651573 81 | spect,4,42,0.9398496240601504,0.7014925373134329,0.6716417910447762,1201.2569341659546,0.6249998557756127 82 | spect,4,53,0.8796992481203008,0.7761194029850746,0.7761194029850746,1200.939608335495,0.3749999571699871 83 | spect,5,37,0.924812030075188,0.6865671641791045,0.7014925373134329,1202.9508533477783,0.2999995917533409 84 | spect,5,42,0.9398496240601504,0.7313432835820896,0.6865671641791045,1204.665539264679,0.7499999705880918 85 | spect,5,53,0.9172932330827067,0.7164179104477612,0.7761194029850746,1203.2616856098175,0.36363636294793356 86 | tic-tac-toe,2,37,0.7202505219206681,0.694560669456067,0.675,126.21291851997375,0.0 87 | tic-tac-toe,2,42,0.7077244258872651,0.6736401673640168,0.7125,99.85691738128662,0.0 88 | tic-tac-toe,2,53,0.7077244258872651,0.7071129707112971,0.6791666666666667,135.1524350643158,0.0 89 | tic-tac-toe,3,37,0.7536534446764092,0.7447698744769874,0.7083333333333334,1201.9514586925507,1.0 90 | tic-tac-toe,3,42,0.7766179540709812,0.7238493723849372,0.7708333333333334,1201.8222696781158,1.0 91 | tic-tac-toe,3,53,0.7745302713987474,0.7322175732217573,0.7708333333333334,1201.6999847888947,0.9999999999999999 92 | tic-tac-toe,4,37,0.8162839248434238,0.7907949790794979,0.8041666666666667,1203.262772321701,1.0 93 | tic-tac-toe,4,42,0.824634655532359,0.7866108786610879,0.7916666666666666,1203.0945386886597,1.0 94 | tic-tac-toe,4,53,0.7995824634655533,0.7489539748953975,0.7625,1203.3237614631653,1.0 95 | tic-tac-toe,5,37,0.8810020876826722,0.8200836820083682,0.8416666666666667,1210.6531698703766,0.9999999999999991 96 | tic-tac-toe,5,42,0.8768267223382046,0.8117154811715481,0.8125,1210.1827635765076,0.9999999999999999 97 | tic-tac-toe,5,53,0.8580375782881002,0.7824267782426778,0.7833333333333333,1210.1819670200348,1.0 98 | monks-1,2,37,0.8093525179856115,0.7050359712230215,0.7841726618705036,13.814148426055908,0.0 99 | monks-1,2,42,0.7913669064748201,0.7482014388489209,0.762589928057554,3.615893602371216,0.0 100 | monks-1,2,53,0.8093525179856115,0.7841726618705036,0.7050359712230215,3.0934271812438965,0.0 101 | monks-1,3,37,0.9172661870503597,0.8776978417266187,0.8705035971223022,104.25982689857483,0.0 102 | monks-1,3,42,0.9064748201438849,0.8489208633093526,0.8633093525179856,247.9573073387146,0.0 103 | monks-1,3,53,0.920863309352518,0.8705035971223022,0.841726618705036,149.2303466796875,0.0 104 | monks-1,4,37,1.0,1.0,1.0,3.800654172897339,0.0 105 | monks-1,4,42,1.0,1.0,1.0,11.262216567993164,0.0 106 | monks-1,4,53,1.0,1.0,1.0,8.850003480911255,0.0 107 | monks-1,5,37,1.0,1.0,1.0,7.287518739700317,0.0 108 | monks-1,5,42,1.0,1.0,1.0,8.98398208618164,0.0 109 | monks-1,5,53,1.0,1.0,1.0,6.359995603561401,0.0 110 | monks-2,2,37,0.63,0.6,0.5960264900662252,18.357926845550537,0.0 111 | monks-2,2,42,0.6433333333333333,0.64,0.6291390728476821,21.05271601676941,0.0 112 | monks-2,2,53,0.66,0.6,0.5960264900662252,20.68569779396057,0.0 113 | monks-2,3,37,0.6866666666666666,0.6,0.5629139072847682,1202.5145230293274,0.9574467764096007 114 | monks-2,3,42,0.6933333333333334,0.64,0.5562913907284768,1201.7115063667297,0.9999999999999996 115 | monks-2,3,53,0.6933333333333334,0.6266666666666667,0.6357615894039735,1202.976598739624,0.858695652173913 116 | monks-2,4,37,0.7233333333333334,0.6066666666666667,0.5960264900662252,1202.0436625480652,1.0 117 | monks-2,4,42,0.74,0.6,0.5496688741721855,1201.9409601688385,0.9999999999999996 118 | monks-2,4,53,0.7566666666666667,0.5866666666666667,0.5960264900662252,1202.6809391975403,1.0 119 | monks-2,5,37,0.7733333333333333,0.6266666666666667,0.5761589403973509,1204.547600030899,1.0 120 | monks-2,5,42,0.81,0.6266666666666667,0.609271523178808,1204.833270072937,0.9999999999999989 121 | monks-2,5,53,0.8033333333333333,0.66,0.6357615894039735,1204.4382603168488,1.0 122 | monks-3,2,37,0.9711191335740073,0.9492753623188406,0.9640287769784173,1.0053126811981201,0.0 123 | monks-3,2,42,0.9711191335740073,0.9492753623188406,0.9640287769784173,0.9694066047668457,0.0 124 | monks-3,2,53,0.9675090252707581,0.9492753623188406,0.9712230215827338,1.266613483428955,0.0 125 | monks-3,3,37,0.9963898916967509,0.9637681159420289,1.0,95.83753561973572,0.0 126 | monks-3,3,42,0.9963898916967509,0.9855072463768116,0.9784172661870504,35.87109994888306,0.0 127 | monks-3,3,53,0.9927797833935018,0.9782608695652174,0.9928057553956835,288.6725392341614,0.0 128 | monks-3,4,37,0.9963898916967509,0.9637681159420289,1.0,1201.1374018192291,1.0 129 | monks-3,4,42,0.9963898916967509,0.9855072463768116,0.9784172661870504,1201.2396914958954,1.0 130 | monks-3,4,53,0.9927797833935018,0.9637681159420289,0.9856115107913669,1201.8195173740387,0.9999999999999846 131 | monks-3,5,37,0.9963898916967509,0.9637681159420289,1.0,1203.4502074718475,1.0 132 | monks-3,5,42,0.9963898916967509,0.9855072463768116,0.9784172661870504,1203.3447997570038,1.0 133 | monks-3,5,53,0.9963898916967509,0.9565217391304348,0.9712230215827338,1203.450241804123,1.0 134 | -------------------------------------------------------------------------------- /res/600/boct.csv: -------------------------------------------------------------------------------- 1 | instance,depth,seed,train_acc,val_acc,test_acc,train_time,gap 2 | balance-scale,2,37,0.6762820512820513,0.5705128205128205,0.6496815286624203,7.862054109573364,0.0 3 | balance-scale,2,42,0.717948717948718,0.6410256410256411,0.6624203821656051,6.624675512313843,0.0 4 | balance-scale,2,53,0.6730769230769231,0.5641025641025641,0.6369426751592356,5.622634649276733,0.0 5 | balance-scale,3,37,0.6955128205128205,0.6346153846153846,0.7006369426751592,601.2039694786072,0.8461538461538445 6 | balance-scale,3,42,0.7019230769230769,0.6794871794871795,0.643312101910828,601.0618939399719,0.8360655726922692 7 | balance-scale,3,53,0.6602564102564102,0.5384615384615384,0.6369426751592356,601.048951625824,0.8548375658240375 8 | balance-scale,4,37,0.7307692307692307,0.6923076923076923,0.7133757961783439,601.9935750961304,1.0 9 | balance-scale,4,42,0.7724358974358975,0.7435897435897436,0.7006369426751592,601.7378787994385,1.0 10 | balance-scale,4,53,0.6410256410256411,0.5769230769230769,0.5732484076433121,601.5925099849701,1.0 11 | balance-scale,5,37,0.6602564102564102,0.6346153846153846,0.5796178343949044,603.7505631446838,1.0 12 | balance-scale,5,42,0.7692307692307693,0.75,0.6878980891719745,603.9125680923462,1.0 13 | balance-scale,5,53,0.5961538461538461,0.5961538461538461,0.5796178343949044,604.1991484165192,1.0 14 | breast-cancer,2,37,0.6884057971014492,0.782608695652174,0.6714285714285714,11.192744255065918,0.0 15 | breast-cancer,2,42,0.7898550724637681,0.6811594202898551,0.7285714285714285,12.462610721588137,0.0 16 | breast-cancer,2,53,0.8260869565217391,0.7101449275362319,0.6571428571428571,13.177868843078612,0.0 17 | breast-cancer,3,37,0.8043478260869565,0.7681159420289855,0.6857142857142857,601.1264808177948,0.9988095219372164 18 | breast-cancer,3,42,0.8043478260869565,0.6666666666666666,0.7714285714285715,600.8310377597809,1.0 19 | breast-cancer,3,53,0.7971014492753623,0.6956521739130435,0.6857142857142857,600.9173622131348,1.0 20 | breast-cancer,4,37,0.8043478260869565,0.7391304347826086,0.7142857142857143,601.3978729248047,1.0 21 | breast-cancer,4,42,0.8115942028985508,0.7536231884057971,0.7,601.7337622642517,1.0 22 | breast-cancer,4,53,0.8695652173913043,0.7101449275362319,0.6857142857142857,601.66601395607,1.0 23 | breast-cancer,5,37,0.6376811594202898,0.5797101449275363,0.6285714285714286,603.6280953884125,1.0 24 | breast-cancer,5,42,0.8188405797101449,0.6811594202898551,0.6571428571428571,603.2902035713196,1.0 25 | breast-cancer,5,53,0.8043478260869565,0.7246376811594203,0.5857142857142857,604.0874724388123,1.0 26 | car-evaluation,2,37,0.7951388888888888,0.7662037037037037,0.7546296296296297,12.050049543380736,0.0 27 | car-evaluation,2,42,0.7511574074074074,0.8009259259259259,0.7731481481481481,34.00459599494934,0.0 28 | car-evaluation,2,53,0.78125,0.7453703703703703,0.7685185185185185,19.25566601753235,0.0 29 | car-evaluation,3,37,0.8252314814814815,0.7962962962962963,0.7754629629629629,602.0302274227142,0.9462479863969928 30 | car-evaluation,3,42,0.7546296296296297,0.7824074074074074,0.7638888888888888,601.5215156078339,0.9651190306550392 31 | car-evaluation,3,53,0.8171296296296297,0.8148148148148148,0.8194444444444444,601.7814176082611,0.9177215189873418 32 | car-evaluation,4,37,0.8067129629629629,0.8240740740740741,0.7986111111111112,608.4482736587524,1.0 33 | car-evaluation,4,42,0.8321759259259259,0.8611111111111112,0.8379629629629629,604.4570188522339,1.0 34 | car-evaluation,4,53,0.8043981481481481,0.8032407407407407,0.8148148148148148,604.3772075176239,1.0 35 | car-evaluation,5,37,0.8298611111111112,0.8402777777777778,0.8032407407407407,615.3691148757935,1.0 36 | car-evaluation,5,42,0.8344907407407407,0.8425925925925926,0.8287037037037037,611.5830554962158,1.0 37 | car-evaluation,5,53,0.8541666666666666,0.8263888888888888,0.8379629629629629,612.4775185585022,1.0 38 | hayes-roth,2,37,0.65,0.475,0.65,2.20510482788086,0.0 39 | hayes-roth,2,42,0.4875,0.4,0.45,2.546194076538086,0.0 40 | hayes-roth,2,53,0.675,0.625,0.55,2.5451955795288086,0.0 41 | hayes-roth,3,37,0.7,0.7,0.575,601.1109611988068,0.5789473387628651 42 | hayes-roth,3,42,0.85,0.575,0.8,416.7763319015503,0.0 43 | hayes-roth,3,53,0.8,0.675,0.725,600.7151877880096,0.3124999892578122 44 | hayes-roth,4,37,0.5,0.275,0.55,601.2501971721649,1.0 45 | hayes-roth,4,42,0.9375,0.575,0.8,600.5481021404266,1.0 46 | hayes-roth,4,53,0.6,0.65,0.575,600.7737891674042,1.0 47 | hayes-roth,5,37,0.4875,0.35,0.525,602.1622724533081,1.0 48 | hayes-roth,5,42,0.725,0.5,0.65,601.408326625824,1.0 49 | hayes-roth,5,53,0.725,0.625,0.45,601.9275100231171,1.0 50 | house-votes-84,2,37,0.9741379310344828,0.9482758620689656,0.9827586206896552,1.0033190250396729,0.0 51 | house-votes-84,2,42,0.9655172413793104,0.9655172413793104,0.9482758620689656,1.85512924194336,0.0 52 | house-votes-84,2,53,0.9741379310344828,0.9655172413793104,0.9655172413793104,1.111111402511597,0.0 53 | house-votes-84,3,37,0.9913793103448276,0.9310344827586208,0.9827586206896552,343.5392863750458,0.0 54 | house-votes-84,3,42,0.9827586206896552,0.896551724137931,0.9655172413793104,600.8548917770386,0.9999999999999988 55 | house-votes-84,3,53,0.9913793103448276,0.9655172413793104,0.9655172413793104,600.5593709945679,0.9813084110990652 56 | house-votes-84,4,37,1.0,0.896551724137931,0.9310344827586208,8.93610405921936,0.0 57 | house-votes-84,4,42,0.9913793103448276,1.0,0.9655172413793104,601.065819978714,1.0 58 | house-votes-84,4,53,1.0,0.8620689655172413,0.8793103448275862,3.6000921726226807,0.0 59 | house-votes-84,5,37,1.0,0.913793103448276,0.9655172413793104,5.534389495849609,0.0 60 | house-votes-84,5,42,1.0,0.896551724137931,0.9482758620689656,7.55802583694458,0.0 61 | house-votes-84,5,53,1.0,0.896551724137931,0.9655172413793104,6.7112109661102295,0.0 62 | soybean-small,2,37,1.0,0.6666666666666666,0.9166666666666666,0.1848185062408447,0.0 63 | soybean-small,2,42,1.0,1.0,1.0,0.3569512367248535,0.0 64 | soybean-small,2,53,1.0,1.0,1.0,0.4449937343597412,0.0 65 | soybean-small,3,37,1.0,0.3333333333333333,0.8333333333333334,0.2797079086303711,0.0 66 | soybean-small,3,42,1.0,1.0,1.0,0.2880237102508545,0.0 67 | soybean-small,3,53,0.391304347826087,0.5,0.4166666666666667,0.3224079608917236,0.0 68 | soybean-small,4,37,0.782608695652174,1.0,0.5833333333333334,0.7889747619628906,0.0 69 | soybean-small,4,42,1.0,1.0,1.0,0.7882423400878906,0.0 70 | soybean-small,4,53,0.9565217391304348,0.8333333333333334,0.9166666666666666,0.6703813076019287,0.0 71 | soybean-small,5,37,1.0,1.0,0.9166666666666666,1.6034832000732422,0.0 72 | soybean-small,5,42,0.6086956521739131,0.5833333333333334,0.75,2.1194751262664795,0.0 73 | soybean-small,5,53,0.6956521739130435,0.5833333333333334,0.5,1.8016624450683596,0.0 74 | spect,2,37,0.7894736842105263,0.7611940298507462,0.835820895522388,13.05594563484192,0.0 75 | spect,2,42,0.8571428571428571,0.7014925373134329,0.7014925373134329,1.32100510597229,0.0 76 | spect,2,53,0.8120300751879699,0.746268656716418,0.8059701492537313,9.929855585098268,0.0 77 | spect,3,37,0.8195488721804511,0.7761194029850746,0.8208955223880597,600.5585608482361,0.083333374587455 78 | spect,3,42,0.9022556390977444,0.7014925373134329,0.7014925373134329,94.24910044670104,0.0 79 | spect,3,53,0.8270676691729323,0.746268656716418,0.746268656716418,600.6704833507538,0.3294970722698564 80 | spect,4,37,0.8872180451127819,0.7164179104477612,0.7910447761194029,601.895712852478,0.3999999999999997 81 | spect,4,42,0.9398496240601504,0.7164179104477612,0.6567164179104478,602.0687654018402,0.6249999273586844 82 | spect,4,53,0.8796992481203008,0.7761194029850746,0.7761194029850746,601.6434552669525,0.4314903456565458 83 | spect,5,37,0.924812030075188,0.6865671641791045,0.7014925373134329,605.2380752563477,0.2999995917533409 84 | spect,5,42,0.9398496240601504,0.746268656716418,0.7164179104477612,605.5421595573425,0.7499999710374966 85 | spect,5,53,0.9022556390977444,0.746268656716418,0.7761194029850746,605.9256126880646,0.5595212923415105 86 | tic-tac-toe,2,37,0.7202505219206681,0.694560669456067,0.675,159.2136504650116,0.0 87 | tic-tac-toe,2,42,0.7077244258872651,0.6736401673640168,0.7125,138.4157738685608,0.0 88 | tic-tac-toe,2,53,0.7077244258872651,0.7071129707112971,0.6791666666666667,180.1628725528717,0.0 89 | tic-tac-toe,3,37,0.7536534446764092,0.7447698744769874,0.7083333333333334,601.5572957992554,1.0 90 | tic-tac-toe,3,42,0.7453027139874739,0.7322175732217573,0.7583333333333333,602.3250176906586,1.0 91 | tic-tac-toe,3,53,0.7745302713987474,0.7322175732217573,0.7708333333333334,602.297905921936,1.0 92 | tic-tac-toe,4,37,0.81419624217119,0.799163179916318,0.8166666666666667,605.6317763328552,1.0 93 | tic-tac-toe,4,42,0.7954070981210856,0.7531380753138075,0.825,605.8481559753418,1.0 94 | tic-tac-toe,4,53,0.7995824634655533,0.7489539748953975,0.7625,605.0438885688782,1.0 95 | tic-tac-toe,5,37,0.8559498956158664,0.8326359832635983,0.8125,629.4825010299683,1.0 96 | tic-tac-toe,5,42,0.8580375782881002,0.8200836820083682,0.7916666666666666,618.8465006351471,1.0 97 | tic-tac-toe,5,53,0.8580375782881002,0.7824267782426778,0.7833333333333333,617.0963134765625,1.0 98 | monks-1,2,37,0.6906474820143885,0.5755395683453237,0.6474820143884892,3.4014892578125,0.0 99 | monks-1,2,42,0.7913669064748201,0.7482014388489209,0.762589928057554,4.54936671257019,0.0 100 | monks-1,2,53,0.8093525179856115,0.7841726618705036,0.7050359712230215,2.9206364154815674,0.0 101 | monks-1,3,37,0.8525179856115108,0.7985611510791367,0.8561151079136691,600.5185284614563,0.7465966693488422 102 | monks-1,3,42,0.8525179856115108,0.8705035971223022,0.8273381294964028,539.6730034351349,0.0 103 | monks-1,3,53,0.8525179856115108,0.8633093525179856,0.8345323741007195,286.8803479671478,0.0 104 | monks-1,4,37,0.8345323741007195,0.8489208633093526,0.8848920863309353,601.4544382095337,1.0 105 | monks-1,4,42,0.8525179856115108,0.8705035971223022,0.8273381294964028,601.5815010070801,1.0 106 | monks-1,4,53,1.0,1.0,1.0,10.074071407318115,0.0 107 | monks-1,5,37,1.0,1.0,1.0,11.426413536071777,0.0 108 | monks-1,5,42,1.0,1.0,1.0,14.264965057373047,0.0 109 | monks-1,5,53,1.0,1.0,1.0,10.543958187103271,0.0 110 | monks-2,2,37,0.63,0.6,0.5960264900662252,26.242021799087524,0.0 111 | monks-2,2,42,0.6433333333333333,0.64,0.6291390728476821,28.43013048171997,0.0 112 | monks-2,2,53,0.66,0.6,0.5960264900662252,25.529518365859985,0.0 113 | monks-2,3,37,0.6866666666666666,0.6,0.5629139072847682,601.4213998317719,1.0 114 | monks-2,3,42,0.6933333333333334,0.64,0.5562913907284768,601.4784550666809,1.0 115 | monks-2,3,53,0.6933333333333334,0.6266666666666667,0.6357615894039735,601.5561256408691,0.982484076433121 116 | monks-2,4,37,0.7233333333333334,0.6066666666666667,0.5960264900662252,602.491738319397,1.0 117 | monks-2,4,42,0.74,0.6,0.5496688741721855,602.670923948288,0.9999999999999996 118 | monks-2,4,53,0.7533333333333333,0.5933333333333334,0.5960264900662252,602.2184751033783,1.0 119 | monks-2,5,37,0.77,0.6,0.5629139072847682,606.5714340209961,1.0 120 | monks-2,5,42,0.7933333333333333,0.6066666666666667,0.5695364238410596,606.7011761665344,1.0 121 | monks-2,5,53,0.8033333333333333,0.66,0.6357615894039735,607.4473748207092,1.0 122 | monks-3,2,37,0.9711191335740073,0.9492753623188406,0.9640287769784173,1.1229984760284424,0.0 123 | monks-3,2,42,0.9711191335740073,0.9492753623188406,0.9640287769784173,1.1828384399414062,0.0 124 | monks-3,2,53,0.9675090252707581,0.9492753623188406,0.9712230215827338,1.5708012580871582,0.0 125 | monks-3,3,37,0.9963898916967509,0.9637681159420289,1.0,139.1701533794403,0.0 126 | monks-3,3,42,0.9963898916967509,0.9855072463768116,0.9784172661870504,47.74843883514404,0.0 127 | monks-3,3,53,0.9927797833935018,0.9782608695652174,0.9928057553956835,412.6961364746094,0.0 128 | monks-3,4,37,0.9963898916967509,0.9637681159420289,1.0,602.3403358459473,1.0 129 | monks-3,4,42,0.9963898916967509,0.9855072463768116,0.9784172661870504,601.8910541534424,1.0 130 | monks-3,4,53,0.9927797833935018,0.9637681159420289,0.9856115107913669,602.215008020401,1.0 131 | monks-3,5,37,0.9963898916967509,0.9637681159420289,1.0,605.92489361763,1.0 132 | monks-3,5,42,0.9963898916967509,0.9855072463768116,0.9784172661870504,607.5114023685455,1.0 133 | monks-3,5,53,0.9963898916967509,0.9565217391304348,0.9712230215827338,606.7485764026642,1.0 134 | -------------------------------------------------------------------------------- /res/sk.csv: -------------------------------------------------------------------------------- 1 | instance,depth,seed,train_acc,val_acc,test_acc,train_time 2 | balance-scale,2,37,0.7275641025641025,0.6089743589743589,0.6751592356687898,0.04706859588623047 3 | balance-scale,2,42,0.6730769230769231,0.6217948717948718,0.5796178343949044,0.0 4 | balance-scale,2,53,0.6923076923076923,0.6217948717948718,0.6305732484076433,0.0009980201721191406 5 | balance-scale,3,37,0.782051282051282,0.6666666666666666,0.7515923566878981,0.0 6 | balance-scale,3,42,0.8044871794871795,0.6794871794871795,0.6942675159235668,0.0009975433349609375 7 | balance-scale,3,53,0.8012820512820513,0.6923076923076923,0.7133757961783439,0.0009970664978027344 8 | balance-scale,4,37,0.8365384615384616,0.7756410256410257,0.7643312101910829,0.0009961128234863281 9 | balance-scale,4,42,0.8301282051282052,0.782051282051282,0.7452229299363057,0.000997781753540039 10 | balance-scale,4,53,0.842948717948718,0.7756410256410257,0.802547770700637,0.0009965896606445312 11 | balance-scale,5,37,0.875,0.7948717948717948,0.7834394904458599,0.000997304916381836 12 | balance-scale,5,42,0.8685897435897436,0.7564102564102564,0.7388535031847133,0.0 13 | balance-scale,5,53,0.8814102564102564,0.7756410256410257,0.7898089171974523,0.0 14 | breast-cancer,2,37,0.7753623188405797,0.8260869565217391,0.7285714285714285,0.0009963512420654297 15 | breast-cancer,2,42,0.7898550724637681,0.6811594202898551,0.7285714285714285,0.0 16 | breast-cancer,2,53,0.7971014492753623,0.7971014492753623,0.6714285714285714,0.0 17 | breast-cancer,3,37,0.7971014492753623,0.782608695652174,0.7428571428571429,0.0 18 | breast-cancer,3,42,0.8260869565217391,0.6956521739130435,0.7285714285714285,0.0 19 | breast-cancer,3,53,0.8405797101449275,0.6811594202898551,0.7142857142857143,0.0 20 | breast-cancer,4,37,0.8260869565217391,0.7681159420289855,0.7142857142857143,0.000997304916381836 21 | breast-cancer,4,42,0.8695652173913043,0.7101449275362319,0.7428571428571429,0.0009810924530029297 22 | breast-cancer,4,53,0.855072463768116,0.7246376811594203,0.7,0.0009953975677490234 23 | breast-cancer,5,37,0.8623188405797102,0.7536231884057971,0.6571428571428571,0.0009899139404296875 24 | breast-cancer,5,42,0.9057971014492754,0.7101449275362319,0.7285714285714285,0.0009992122650146484 25 | breast-cancer,5,53,0.8840579710144928,0.6956521739130435,0.7,0.000997304916381836 26 | car-evaluation,2,37,0.7951388888888888,0.7662037037037037,0.7546296296296297,0.0 27 | car-evaluation,2,42,0.7488425925925926,0.8240740740740741,0.7893518518518519,0.0009958744049072266 28 | car-evaluation,2,53,0.7743055555555556,0.7847222222222222,0.7777777777777778,0.0009975433349609375 29 | car-evaluation,3,37,0.7951388888888888,0.7662037037037037,0.7546296296296297,0.0 30 | car-evaluation,3,42,0.7835648148148148,0.7939814814814815,0.8055555555555556,0.0009975433349609375 31 | car-evaluation,3,53,0.8020833333333334,0.7893518518518519,0.7569444444444444,0.0009961128234863281 32 | car-evaluation,4,37,0.8495370370370371,0.8240740740740741,0.8240740740740741,0.000997781753540039 33 | car-evaluation,4,42,0.8599537037037037,0.8819444444444444,0.8472222222222222,0.0009975433349609375 34 | car-evaluation,4,53,0.8368055555555556,0.8356481481481481,0.8379629629629629,0.0009970664978027344 35 | car-evaluation,5,37,0.875,0.8703703703703703,0.8425925925925926,0.0 36 | car-evaluation,5,42,0.8692129629629629,0.8680555555555556,0.8564814814814815,0.0 37 | car-evaluation,5,53,0.8680555555555556,0.8564814814814815,0.8032407407407407,0.0009987354278564453 38 | hayes-roth,2,37,0.625,0.4,0.55,0.0 39 | hayes-roth,2,42,0.5625,0.575,0.475,0.0 40 | hayes-roth,2,53,0.5625,0.525,0.55,0.0 41 | hayes-roth,3,37,0.675,0.425,0.625,0.0 42 | hayes-roth,3,42,0.7125,0.6,0.6,0.0009970664978027344 43 | hayes-roth,3,53,0.7,0.675,0.6,0.0 44 | hayes-roth,4,37,0.7375,0.6,0.8,0.0 45 | hayes-roth,4,42,0.775,0.675,0.625,0.0 46 | hayes-roth,4,53,0.7,0.675,0.6,0.0 47 | hayes-roth,5,37,0.7875,0.525,0.775,0.0009937286376953125 48 | hayes-roth,5,42,0.85,0.75,0.825,0.0009965896606445312 49 | hayes-roth,5,53,0.8,0.775,0.75,0.0 50 | house-votes-84,2,37,0.9741379310344828,0.9482758620689655,0.9827586206896551,0.0 51 | house-votes-84,2,42,0.9655172413793104,0.9827586206896551,0.9655172413793104,0.000997781753540039 52 | house-votes-84,2,53,0.9741379310344828,0.9655172413793104,0.9655172413793104,0.0 53 | house-votes-84,3,37,0.9741379310344828,0.9482758620689655,0.9827586206896551,0.0 54 | house-votes-84,3,42,0.9827586206896551,0.896551724137931,0.9655172413793104,0.0 55 | house-votes-84,3,53,0.9741379310344828,0.9655172413793104,0.9655172413793104,0.0 56 | house-votes-84,4,37,1.0,0.9655172413793104,0.9482758620689655,0.0 57 | house-votes-84,4,42,0.9827586206896551,0.896551724137931,0.9655172413793104,0.0009970664978027344 58 | house-votes-84,4,53,0.9827586206896551,0.9655172413793104,0.9655172413793104,0.0 59 | house-votes-84,5,37,1.0,0.9655172413793104,0.9827586206896551,0.0009970664978027344 60 | house-votes-84,5,42,0.9913793103448276,0.896551724137931,0.9655172413793104,0.0 61 | house-votes-84,5,53,0.9913793103448276,0.9655172413793104,0.9655172413793104,0.0 62 | soybean-small,2,37,0.9130434782608695,0.4166666666666667,0.8333333333333334,0.0009965896606445312 63 | soybean-small,2,42,0.8260869565217391,0.6666666666666666,0.75,0.0009951591491699219 64 | soybean-small,2,53,0.8260869565217391,0.8333333333333334,0.6666666666666666,0.0 65 | soybean-small,3,37,1.0,0.9166666666666666,1.0,0.000997304916381836 66 | soybean-small,3,42,1.0,1.0,0.9166666666666666,0.0009982585906982422 67 | soybean-small,3,53,1.0,1.0,1.0,0.0 68 | soybean-small,4,37,1.0,1.0,1.0,0.0009970664978027344 69 | soybean-small,4,42,1.0,1.0,0.9166666666666666,0.0 70 | soybean-small,4,53,1.0,1.0,1.0,0.000997304916381836 71 | soybean-small,5,37,1.0,1.0,1.0,0.0 72 | soybean-small,5,42,1.0,1.0,0.9166666666666666,0.0 73 | soybean-small,5,53,1.0,1.0,1.0,0.000997304916381836 74 | spect,2,37,0.7894736842105263,0.7611940298507462,0.835820895522388,0.0 75 | spect,2,42,0.8571428571428571,0.7014925373134329,0.7014925373134329,0.0 76 | spect,2,53,0.8120300751879699,0.746268656716418,0.8059701492537313,0.0009961128234863281 77 | spect,3,37,0.7894736842105263,0.7761194029850746,0.8805970149253731,0.0 78 | spect,3,42,0.8872180451127819,0.7611940298507462,0.7313432835820896,0.0 79 | spect,3,53,0.8195488721804511,0.7761194029850746,0.8059701492537313,0.0009968280792236328 80 | spect,4,37,0.8571428571428571,0.746268656716418,0.7910447761194029,0.000997781753540039 81 | spect,4,42,0.9323308270676691,0.7014925373134329,0.7014925373134329,0.00099945068359375 82 | spect,4,53,0.8421052631578947,0.7761194029850746,0.835820895522388,0.0009965896606445312 83 | spect,5,37,0.8796992481203008,0.746268656716418,0.8208955223880597,0.0009989738464355469 84 | spect,5,42,0.9398496240601504,0.6865671641791045,0.6865671641791045,0.0 85 | spect,5,53,0.8646616541353384,0.746268656716418,0.835820895522388,0.0009982585906982422 86 | tic-tac-toe,2,37,0.7202505219206681,0.694560669456067,0.675,0.0 87 | tic-tac-toe,2,42,0.6910229645093946,0.6778242677824268,0.7375,0.000988006591796875 88 | tic-tac-toe,2,53,0.7035490605427975,0.698744769874477,0.7166666666666667,0.0009975433349609375 89 | tic-tac-toe,3,37,0.7286012526096033,0.702928870292887,0.6791666666666667,0.0009970664978027344 90 | tic-tac-toe,3,42,0.7411273486430062,0.7573221757322176,0.775,0.0009984970092773438 91 | tic-tac-toe,3,53,0.7599164926931107,0.7280334728033473,0.6833333333333333,0.0 92 | tic-tac-toe,4,37,0.8037578288100209,0.7573221757322176,0.7916666666666666,0.000997304916381836 93 | tic-tac-toe,4,42,0.8162839248434238,0.7824267782426778,0.8458333333333333,0.0009975433349609375 94 | tic-tac-toe,4,53,0.81419624217119,0.8075313807531381,0.7625,0.000997781753540039 95 | tic-tac-toe,5,37,0.8643006263048016,0.7907949790794979,0.8,0.000997304916381836 96 | tic-tac-toe,5,42,0.8559498956158664,0.8451882845188284,0.8416666666666667,0.0009975433349609375 97 | tic-tac-toe,5,53,0.8622129436325678,0.803347280334728,0.7875,0.001993894577026367 98 | monks-1,2,37,0.7733812949640287,0.697841726618705,0.7410071942446043,0.0009975433349609375 99 | monks-1,2,42,0.7482014388489209,0.7338129496402878,0.7553956834532374,0.0 100 | monks-1,2,53,0.7769784172661871,0.7338129496402878,0.697841726618705,0.000997781753540039 101 | monks-1,3,37,0.8633093525179856,0.7841726618705036,0.8273381294964028,0.0 102 | monks-1,3,42,0.7482014388489209,0.7338129496402878,0.7553956834532374,0.0009975433349609375 103 | monks-1,3,53,0.8633093525179856,0.8273381294964028,0.7841726618705036,0.0009968280792236328 104 | monks-1,4,37,0.8633093525179856,0.7841726618705036,0.8273381294964028,0.0 105 | monks-1,4,42,0.7553956834532374,0.6546762589928058,0.7194244604316546,0.0009980201721191406 106 | monks-1,4,53,0.8633093525179856,0.8273381294964028,0.7841726618705036,0.000997781753540039 107 | monks-1,5,37,0.8669064748201439,0.8057553956834532,0.7553956834532374,0.0009965896606445312 108 | monks-1,5,42,0.7841726618705036,0.6834532374100719,0.7338129496402878,0.0 109 | monks-1,5,53,0.8812949640287769,0.8201438848920863,0.762589928057554,0.000997304916381836 110 | monks-2,2,37,0.6266666666666667,0.6733333333333333,0.7019867549668874,0.000997304916381836 111 | monks-2,2,42,0.64,0.68,0.6688741721854304,0.0 112 | monks-2,2,53,0.6466666666666666,0.72,0.6158940397350994,0.0 113 | monks-2,3,37,0.6766666666666666,0.6266666666666667,0.4966887417218543,0.000997304916381836 114 | monks-2,3,42,0.6566666666666666,0.66,0.6556291390728477,0.0009970664978027344 115 | monks-2,3,53,0.6866666666666666,0.66,0.5629139072847682,0.0 116 | monks-2,4,37,0.7166666666666667,0.6266666666666667,0.5827814569536424,0.0009980201721191406 117 | monks-2,4,42,0.7033333333333334,0.64,0.6291390728476821,0.0009984970092773438 118 | monks-2,4,53,0.7133333333333334,0.5866666666666667,0.5960264900662252,0.0009953975677490234 119 | monks-2,5,37,0.8,0.7666666666666667,0.6688741721854304,0.000995635986328125 120 | monks-2,5,42,0.7966666666666666,0.6666666666666666,0.6556291390728477,0.0009984970092773438 121 | monks-2,5,53,0.83,0.8,0.6821192052980133,0.0010004043579101562 122 | monks-3,2,37,0.9711191335740073,0.9492753623188406,0.9640287769784173,0.0 123 | monks-3,2,42,0.9711191335740073,0.9492753623188406,0.9640287769784173,0.0 124 | monks-3,2,53,0.9675090252707581,0.9492753623188406,0.9712230215827338,0.0 125 | monks-3,3,37,0.9963898916967509,0.9637681159420289,1.0,0.0009975433349609375 126 | monks-3,3,42,0.9711191335740073,0.9492753623188406,0.9640287769784173,0.0 127 | monks-3,3,53,0.9675090252707581,0.9492753623188406,0.9712230215827338,0.0 128 | monks-3,4,37,0.9963898916967509,0.9637681159420289,1.0,0.0009982585906982422 129 | monks-3,4,42,0.9963898916967509,0.9855072463768116,0.9784172661870504,0.0009961128234863281 130 | monks-3,4,53,0.9927797833935018,0.9782608695652174,0.9928057553956835,0.000997781753540039 131 | monks-3,5,37,0.9963898916967509,0.9637681159420289,1.0,0.0009970664978027344 132 | monks-3,5,42,0.9963898916967509,0.9855072463768116,0.9784172661870504,0.0009970664978027344 133 | monks-3,5,53,0.9963898916967509,0.9637681159420289,0.9928057553956835,0.0009984970092773438 134 | -------------------------------------------------------------------------------- /tree/__init__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding: utf-8 3 | # author: Bo Tang 4 | 5 | from .oct import optimalDecisionTreeClassifier 6 | from .binoct import binOptimalDecisionTreeClassifier 7 | from .mfoct import maxFlowOptimalDecisionTreeClassifier 8 | -------------------------------------------------------------------------------- /tree/oct.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding: utf-8 3 | # author: Bo Tang 4 | 5 | from collections import namedtuple 6 | import numpy as np 7 | from scipy import stats 8 | import gurobipy as gp 9 | from gurobipy import GRB 10 | from sklearn import tree 11 | 12 | class optimalDecisionTreeClassifier: 13 | """ 14 | optimal classification tree 15 | """ 16 | def __init__(self, max_depth=3, min_samples_split=2, alpha=0, warmstart=True, timelimit=600, output=True): 17 | self.max_depth = max_depth 18 | self.min_samples_split = min_samples_split 19 | self.alpha = alpha 20 | self.warmstart = warmstart 21 | self.timelimit = timelimit 22 | self.output = output 23 | self.trained = False 24 | self.optgap = None 25 | 26 | # node index 27 | self.n_index = [i+1 for i in range(2 ** (self.max_depth + 1) - 1)] 28 | self.b_index = self.n_index[:-2**self.max_depth] # branch nodes 29 | self.l_index = self.n_index[-2**self.max_depth:] # leaf nodes 30 | 31 | def fit(self, x, y): 32 | """ 33 | fit training data 34 | """ 35 | # data size 36 | self.n, self.p = x.shape 37 | if self.output: 38 | print('Training data include {} instances, {} features.'.format(self.n,self.p)) 39 | 40 | # labels 41 | self.labels = np.unique(y) 42 | 43 | # scale data 44 | self.scales = np.max(x, axis=0) 45 | self.scales[self.scales == 0] = 1 46 | 47 | # solve MIP 48 | m, a, b, c, d, l = self._buildMIP(x/self.scales, y) 49 | if self.warmstart: 50 | self._setStart(x, y, a, c, d, l) 51 | m.optimize() 52 | self.optgap = m.MIPGap 53 | 54 | # get parameters 55 | self._a = {ind:a[ind].x for ind in a} 56 | self._b = {ind:b[ind].x for ind in b} 57 | self._c = {ind:c[ind].x for ind in c} 58 | self._d = {ind:d[ind].x for ind in d} 59 | 60 | self.trained = True 61 | 62 | def predict(self, x): 63 | """ 64 | model prediction 65 | """ 66 | if not self.trained: 67 | raise AssertionError('This optimalDecisionTreeClassifier instance is not fitted yet.') 68 | 69 | # leaf label 70 | labelmap = {} 71 | for t in self.l_index: 72 | for k in self.labels: 73 | if self._c[k,t] >= 1e-2: 74 | labelmap[t] = k 75 | 76 | y_pred = [] 77 | for xi in x/self.scales: 78 | t = 1 79 | while t not in self.l_index: 80 | right = (sum([self._a[j,t] * xi[j] for j in range(self.p)]) + 1e-9 >= self._b[t]) 81 | if right: 82 | t = 2 * t + 1 83 | else: 84 | t = 2 * t 85 | # label 86 | y_pred.append(labelmap[t]) 87 | 88 | return np.array(y_pred) 89 | 90 | def _buildMIP(self, x, y): 91 | """ 92 | build MIP formulation for Optimal Decision Tree 93 | """ 94 | # create a model 95 | m = gp.Model('m') 96 | 97 | # output 98 | m.Params.outputFlag = self.output 99 | m.Params.LogToConsole = self.output 100 | # time limit 101 | m.Params.timelimit = self.timelimit 102 | # parallel 103 | m.params.threads = 0 104 | 105 | # model sense 106 | m.modelSense = GRB.MINIMIZE 107 | 108 | # variables 109 | a = m.addVars(self.p, self.b_index, vtype=GRB.BINARY, name='a') # splitting feature 110 | b = m.addVars(self.b_index, vtype=GRB.CONTINUOUS, name='b') # splitting threshold 111 | c = m.addVars(self.labels, self.l_index, vtype=GRB.BINARY, name='c') # node prediction 112 | d = m.addVars(self.b_index, vtype=GRB.BINARY, name='d') # splitting option 113 | z = m.addVars(self.n, self.l_index, vtype=GRB.BINARY, name='z') # leaf node assignment 114 | l = m.addVars(self.l_index, vtype=GRB.BINARY, name='l') # leaf node activation 115 | L = m.addVars(self.l_index, vtype=GRB.CONTINUOUS, name='L') # leaf node misclassified 116 | M = m.addVars(self.labels, self.l_index, vtype=GRB.CONTINUOUS, name='M') # leaf node samples with label 117 | N = m.addVars(self.l_index, vtype=GRB.CONTINUOUS, name='N') # leaf node samples 118 | 119 | # calculate baseline accuracy 120 | baseline = self._calBaseline(y) 121 | 122 | # calculate minimum distance 123 | min_dis = self._calMinDist(x) 124 | 125 | # objective function 126 | obj = L.sum() / baseline + self.alpha * d.sum() 127 | m.setObjective(obj) 128 | 129 | # constraints 130 | # (20) 131 | m.addConstrs(L[t] >= N[t] - M[k,t] - self.n * (1 - c[k,t]) for t in self.l_index for k in self.labels) 132 | # (21) 133 | m.addConstrs(L[t] <= N[t] - M[k,t] + self.n * c[k,t] for t in self.l_index for k in self.labels) 134 | # (17) 135 | m.addConstrs(gp.quicksum((y[i] == k) * z[i,t] for i in range(self.n)) == M[k,t] 136 | for t in self.l_index for k in self.labels) 137 | # (16) 138 | m.addConstrs(z.sum('*', t) == N[t] for t in self.l_index) 139 | # (18) 140 | m.addConstrs(c.sum('*', t) == l[t] for t in self.l_index) 141 | # (13) and (14) 142 | for t in self.l_index: 143 | left = (t % 2 == 0) 144 | ta = t // 2 145 | while ta != 0: 146 | if left: 147 | m.addConstrs(gp.quicksum(a[j,ta] * (x[i,j] + min_dis[j]) for j in range(self.p)) 148 | + 149 | (1 + np.max(min_dis)) * (1 - d[ta]) 150 | <= 151 | b[ta] + (1 + np.max(min_dis)) * (1 - z[i,t]) 152 | for i in range(self.n)) 153 | else: 154 | m.addConstrs(gp.quicksum(a[j,ta] * x[i,j] for j in range(self.p)) 155 | >= 156 | b[ta] - (1 - z[i,t]) 157 | for i in range(self.n)) 158 | left = (ta % 2 == 0) 159 | ta //= 2 160 | # (8) 161 | m.addConstrs(z.sum(i, '*') == 1 for i in range(self.n)) 162 | # (6) 163 | m.addConstrs(z[i,t] <= l[t] for t in self.l_index for i in range(self.n)) 164 | # (7) 165 | m.addConstrs(z.sum('*', t) >= self.min_samples_split * l[t] for t in self.l_index) 166 | # (2) 167 | m.addConstrs(a.sum('*', t) == d[t] for t in self.b_index) 168 | # (3) 169 | m.addConstrs(b[t] <= d[t] for t in self.b_index) 170 | # (5) 171 | m.addConstrs(d[t] <= d[t//2] for t in self.b_index if t != 1) 172 | 173 | return m, a, b, c, d, l 174 | 175 | @staticmethod 176 | def _calBaseline(y): 177 | """ 178 | obtain baseline accuracy by simply predicting the most popular class 179 | """ 180 | mode = stats.mode(y)[0][0] 181 | return np.sum(y == mode) 182 | 183 | @staticmethod 184 | def _calMinDist(x): 185 | """ 186 | get the smallest non-zero distance of features 187 | """ 188 | min_dis = [] 189 | for j in range(x.shape[1]): 190 | xj = x[:,j] 191 | # drop duplicates 192 | xj = np.unique(xj) 193 | # sort 194 | xj = np.sort(xj)[::-1] 195 | # distance 196 | dis = [1] 197 | for i in range(len(xj)-1): 198 | dis.append(xj[i] - xj[i+1]) 199 | # min distance 200 | min_dis.append(np.min(dis) if np.min(dis) else 1) 201 | return min_dis 202 | 203 | def _setStart(self, x, y, a, c, d, l): 204 | """ 205 | set warm start from CART 206 | """ 207 | # train with CART 208 | if self.min_samples_split > 1: 209 | clf = tree.DecisionTreeClassifier(max_depth=self.max_depth, min_samples_split=self.min_samples_split) 210 | else: 211 | clf = tree.DecisionTreeClassifier(max_depth=self.max_depth) 212 | clf.fit(x, y) 213 | 214 | # get splitting rules 215 | rules = self._getRules(clf) 216 | 217 | # fix branch node 218 | for t in self.b_index: 219 | # not split 220 | if rules[t].feat is None or rules[t].feat == tree._tree.TREE_UNDEFINED: 221 | d[t].start = 0 222 | for f in range(self.p): 223 | a[f,t].start = 0 224 | # split 225 | else: 226 | d[t].start = 1 227 | for f in range(self.p): 228 | if f == int(rules[t].feat): 229 | a[f,t].start = 1 230 | else: 231 | a[f,t].start = 0 232 | 233 | # fix leaf nodes 234 | for t in self.l_index: 235 | # terminate early 236 | if rules[t].value is None: 237 | l[t].start = int(t % 2) 238 | # flows go to right 239 | if t % 2: 240 | t_leaf = t 241 | while rules[t].value is None: 242 | t //= 2 243 | for k in self.labels: 244 | if k == np.argmax(rules[t].value): 245 | c[k, t_leaf].start = 1 246 | else: 247 | c[k, t_leaf].start = 0 248 | # nothing in left 249 | else: 250 | for k in self.labels: 251 | c[k, t].start = 0 252 | # terminate at leaf node 253 | else: 254 | l[t].start = 1 255 | for k in self.labels: 256 | if k == np.argmax(rules[t].value): 257 | c[k, t].start = 1 258 | else: 259 | c[k, t].start = 0 260 | 261 | def _getRules(self, clf): 262 | """ 263 | get splitting rules 264 | """ 265 | # node index map 266 | node_map = {1:0} 267 | for t in self.b_index: 268 | # terminal 269 | node_map[2*t] = -1 270 | node_map[2*t+1] = -1 271 | # left 272 | l = clf.tree_.children_left[node_map[t]] 273 | node_map[2*t] = l 274 | # right 275 | r = clf.tree_.children_right[node_map[t]] 276 | node_map[2*t+1] = r 277 | 278 | # rules 279 | rule = namedtuple('Rules', ('feat', 'threshold', 'value')) 280 | rules = {} 281 | # branch nodes 282 | for t in self.b_index: 283 | i = node_map[t] 284 | if i == -1: 285 | r = rule(None, None, None) 286 | else: 287 | r = rule(clf.tree_.feature[i], clf.tree_.threshold[i], clf.tree_.value[i,0]) 288 | rules[t] = r 289 | # leaf nodes 290 | for t in self.l_index: 291 | i = node_map[t] 292 | if i == -1: 293 | r = rule(None, None, None) 294 | else: 295 | r = rule(None, None, clf.tree_.value[i,0]) 296 | rules[t] = r 297 | 298 | return rules 299 | --------------------------------------------------------------------------------