├── .github └── workflows │ └── python-publish.yml ├── .gitignore ├── README.md ├── data ├── dictionaries │ ├── dictionary-fixedraddist_scheme-HCPMGH.mat │ ├── dictionary-fixedraddist_scheme-StLucGE.mat │ ├── dictionary-fixedraddist_scheme-StLucGE_v2.mat │ ├── dictionary-hexagpack_scheme-HCPMGH.mat │ └── dictionary-hexagpack_scheme-StLucGE.mat └── schemes │ ├── scheme-HCPMGH_bvals.txt │ ├── scheme-HCPMGH_bvecs.txt │ ├── scheme-HCPMGH_scheme.txt │ ├── scheme-HCPMGH_schemeWithB0.txt │ ├── scheme-StLucGE260_bvals.txt │ ├── scheme-StLucGE260_bvecs.txt │ ├── scheme-StLucGE260_scheme.txt │ ├── scheme-StLucGE260_schemeWithB0.txt │ ├── scheme-StLucGE260v2_bvals.txt │ ├── scheme-StLucGE260v2_bvecs.txt │ ├── scheme-StLucGE260v2_scheme.txt │ ├── scheme-StLucGE260v2_schemeWithB0.txt │ ├── scheme-StLucGE260v3_bvals.txt │ ├── scheme-StLucGE260v3_bvecs.txt │ ├── scheme-StLucGE260v3_scheme.txt │ ├── scheme-StLucGE260v3_schemeWithB0.txt │ ├── scheme-StLucGE260v4_bvals.txt │ ├── scheme-StLucGE260v4_bvecs.txt │ ├── scheme-StLucGE260v4_scheme.txt │ ├── scheme-StLucGE260v4_schemeWithB0.txt │ ├── scheme-StLucGE534_bvals.txt │ ├── scheme-StLucGE534_bvecs.txt │ ├── scheme-StLucGE534_scheme.txt │ ├── scheme-StLucGE534_schemeWithB0.txt │ ├── scheme-StLucGE_bvals.txt │ ├── scheme-StLucGE_bvecs.txt │ ├── scheme-StLucGE_scheme.txt │ └── scheme-StLucGE_schemeWithB0.txt ├── examples ├── genTrainAndTestData.py ├── invivo_fullylearned.py ├── invivo_hybrid.py ├── prepare_report.py ├── report.py ├── slurm-synthetizer.py ├── testStLucGE.py ├── train_FullyLearned_MLP.py └── train_Hybrid_MLP.py ├── fastmf ├── __init__.py ├── generation │ ├── __init__.py │ ├── formatter.py │ ├── generator.py │ └── synthetizer.py ├── inference │ ├── __init__.py │ ├── fullylearned_inference.py │ └── hybrid_inference.py ├── models │ ├── MLP_FullyLearned.py │ ├── MLP_Split.py │ └── __init__.py ├── reports │ ├── __init__.py │ └── evaluator.py └── utils │ ├── NN_utils.py │ ├── __init__.py │ ├── mf_estimator.py │ ├── mf_utils.py │ └── tripwire.py └── pyproject.toml /.github/workflows/python-publish.yml: -------------------------------------------------------------------------------- 1 | # This workflow will upload a Python Package using Twine when a release is created 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries 3 | 4 | # This workflow uses actions that are not certified by GitHub. 5 | # They are provided by a third-party and are governed by 6 | # separate terms of service, privacy policy, and support 7 | # documentation. 8 | 9 | name: Upload Python Package 10 | 11 | on: 12 | release: 13 | types: [published] 14 | 15 | permissions: 16 | contents: read 17 | 18 | jobs: 19 | deploy: 20 | 21 | runs-on: ubuntu-latest 22 | 23 | steps: 24 | - uses: actions/checkout@v3 25 | - name: Set up Python 26 | uses: actions/setup-python@v3 27 | with: 28 | python-version: '3.x' 29 | - name: Install dependencies 30 | run: | 31 | python -m pip install --upgrade pip 32 | pip install build 33 | - name: Build package 34 | run: python -m build 35 | - name: Publish package 36 | uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29 37 | with: 38 | user: __token__ 39 | password: ${{ secrets.PYPI_API_TOKEN }} 40 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | 53 | # Translations 54 | *.mo 55 | *.pot 56 | 57 | # Django stuff: 58 | *.log 59 | local_settings.py 60 | db.sqlite3 61 | db.sqlite3-journal 62 | 63 | # Flask stuff: 64 | instance/ 65 | .webassets-cache 66 | 67 | # Scrapy stuff: 68 | .scrapy 69 | 70 | # Sphinx documentation 71 | docs/_build/ 72 | 73 | # PyBuilder 74 | target/ 75 | 76 | # Jupyter Notebook 77 | .ipynb_checkpoints 78 | 79 | # IPython 80 | profile_default/ 81 | ipython_config.py 82 | 83 | # pyenv 84 | .python-version 85 | 86 | # pipenv 87 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 88 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 89 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 90 | # install all needed dependencies. 91 | #Pipfile.lock 92 | 93 | # celery beat schedule file 94 | celerybeat-schedule 95 | 96 | # SageMath parsed files 97 | *.sage.py 98 | 99 | # Environments 100 | .env 101 | .venv 102 | env/ 103 | venv/ 104 | ENV/ 105 | env.bak/ 106 | venv.bak/ 107 | 108 | # Spyder project settings 109 | .spyderproject 110 | .spyproject 111 | 112 | # Rope project settings 113 | .ropeproject 114 | 115 | # mkdocs documentation 116 | /site 117 | 118 | # mypy 119 | .mypy_cache/ 120 | .dmypy.json 121 | dmypy.json 122 | 123 | # Pyre type checker 124 | .pyre/ 125 | 126 | .idea/ 127 | tests/ 128 | 129 | poetry.lock 130 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # FastMF_public [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.10643411.svg)](https://doi.org/10.5281/zenodo.10643411) [![PyPI](https://img.shields.io/pypi/v/FastMF?label=pypi%20package)](https://pypi.org/project/FastMF//) 2 | 3 | FastMF is a Python library designed to perform dictionary matching on diffusion MRI data to a precomputed dictionary of microstructural features, leveraging deep learning for enhanced accuracy and efficiency. 4 | ## Installation 5 | ### Prerequisites 6 | FastMF requires [Python](https://www.python.org/) v3.8+. 7 | ### Installation Steps 8 | To get started with FastMF, follow these simple steps to clone the repository and set up the necessary environment: 9 | 10 | ```sh 11 | git clone https://github.com/Hyedryn/FastMF_public.git 12 | cd FastMF_public 13 | python -m pip install . 14 | ``` 15 | ## Usage 16 | To familiarize yourself with its functionality, we recommend exploring the various examples provided in the \`\`**examples**\`\` folder. 17 | 18 | -------------------------------------------------------------------------------- /data/dictionaries/dictionary-fixedraddist_scheme-HCPMGH.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Hyedryn/FastMF_public/cb0775bff53f320ccb0a6a3bc507c924a9f30868/data/dictionaries/dictionary-fixedraddist_scheme-HCPMGH.mat -------------------------------------------------------------------------------- /data/dictionaries/dictionary-fixedraddist_scheme-StLucGE.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Hyedryn/FastMF_public/cb0775bff53f320ccb0a6a3bc507c924a9f30868/data/dictionaries/dictionary-fixedraddist_scheme-StLucGE.mat -------------------------------------------------------------------------------- /data/dictionaries/dictionary-fixedraddist_scheme-StLucGE_v2.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Hyedryn/FastMF_public/cb0775bff53f320ccb0a6a3bc507c924a9f30868/data/dictionaries/dictionary-fixedraddist_scheme-StLucGE_v2.mat -------------------------------------------------------------------------------- /data/dictionaries/dictionary-hexagpack_scheme-HCPMGH.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Hyedryn/FastMF_public/cb0775bff53f320ccb0a6a3bc507c924a9f30868/data/dictionaries/dictionary-hexagpack_scheme-HCPMGH.mat -------------------------------------------------------------------------------- /data/dictionaries/dictionary-hexagpack_scheme-StLucGE.mat: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Hyedryn/FastMF_public/cb0775bff53f320ccb0a6a3bc507c924a9f30868/data/dictionaries/dictionary-hexagpack_scheme-StLucGE.mat -------------------------------------------------------------------------------- /data/schemes/scheme-HCPMGH_bvals.txt: -------------------------------------------------------------------------------- 1 | 0 2 | 1000 3 | 1000 4 | 1000 5 | 1000 6 | 1000 7 | 1000 8 | 1000 9 | 1000 10 | 1000 11 | 1000 12 | 1000 13 | 1000 14 | 1000 15 | 0 16 | 1000 17 | 1000 18 | 1000 19 | 1000 20 | 1000 21 | 1000 22 | 1000 23 | 1000 24 | 1000 25 | 1000 26 | 1000 27 | 1000 28 | 1000 29 | 0 30 | 1000 31 | 1000 32 | 1000 33 | 1000 34 | 1000 35 | 1000 36 | 1000 37 | 1000 38 | 1000 39 | 1000 40 | 1000 41 | 1000 42 | 1000 43 | 0 44 | 1000 45 | 1000 46 | 1000 47 | 1000 48 | 1000 49 | 1000 50 | 1000 51 | 1000 52 | 1000 53 | 1000 54 | 1000 55 | 1000 56 | 1000 57 | 0 58 | 1000 59 | 1000 60 | 1000 61 | 1000 62 | 1000 63 | 1000 64 | 1000 65 | 1000 66 | 1000 67 | 1000 68 | 1000 69 | 1000 70 | 0 71 | 3000 72 | 3000 73 | 3000 74 | 3000 75 | 3000 76 | 3000 77 | 3000 78 | 3000 79 | 3000 80 | 3000 81 | 3000 82 | 3000 83 | 3000 84 | 0 85 | 3000 86 | 3000 87 | 3000 88 | 3000 89 | 3000 90 | 3000 91 | 3000 92 | 3000 93 | 3000 94 | 3000 95 | 3000 96 | 3000 97 | 3000 98 | 0 99 | 3000 100 | 3000 101 | 3000 102 | 3000 103 | 3000 104 | 3000 105 | 3000 106 | 3000 107 | 3000 108 | 3000 109 | 3000 110 | 3000 111 | 3000 112 | 0 113 | 3000 114 | 3000 115 | 3000 116 | 3000 117 | 3000 118 | 3000 119 | 3000 120 | 3000 121 | 3000 122 | 3000 123 | 3000 124 | 3000 125 | 3000 126 | 0 127 | 3000 128 | 3000 129 | 3000 130 | 3000 131 | 3000 132 | 3000 133 | 3000 134 | 3000 135 | 3000 136 | 3000 137 | 3000 138 | 3000 139 | 0 140 | 5000 141 | 5000 142 | 5000 143 | 5000 144 | 5000 145 | 5000 146 | 5000 147 | 5000 148 | 5000 149 | 5000 150 | 5000 151 | 5000 152 | 5000 153 | 0 154 | 5000 155 | 5000 156 | 5000 157 | 5000 158 | 5000 159 | 5000 160 | 5000 161 | 5000 162 | 5000 163 | 5000 164 | 5000 165 | 5000 166 | 5000 167 | 0 168 | 5000 169 | 5000 170 | 5000 171 | 5000 172 | 5000 173 | 5000 174 | 5000 175 | 5000 176 | 5000 177 | 5000 178 | 5000 179 | 5000 180 | 5000 181 | 0 182 | 5000 183 | 5000 184 | 5000 185 | 5000 186 | 5000 187 | 5000 188 | 5000 189 | 5000 190 | 5000 191 | 5000 192 | 5000 193 | 5000 194 | 5000 195 | 0 196 | 5000 197 | 5000 198 | 5000 199 | 5000 200 | 5000 201 | 5000 202 | 5000 203 | 5000 204 | 5000 205 | 5000 206 | 5000 207 | 5000 208 | 5000 209 | 0 210 | 5000 211 | 5000 212 | 5000 213 | 5000 214 | 5000 215 | 5000 216 | 5000 217 | 5000 218 | 5000 219 | 5000 220 | 5000 221 | 5000 222 | 5000 223 | 0 224 | 5000 225 | 5000 226 | 5000 227 | 5000 228 | 5000 229 | 5000 230 | 5000 231 | 5000 232 | 5000 233 | 5000 234 | 5000 235 | 5000 236 | 5000 237 | 0 238 | 5000 239 | 5000 240 | 5000 241 | 5000 242 | 5000 243 | 5000 244 | 5000 245 | 5000 246 | 5000 247 | 5000 248 | 5000 249 | 5000 250 | 5000 251 | 0 252 | 5000 253 | 5000 254 | 5000 255 | 5000 256 | 5000 257 | 5000 258 | 5000 259 | 5000 260 | 5000 261 | 5000 262 | 5000 263 | 5000 264 | 5000 265 | 0 266 | 5000 267 | 5000 268 | 5000 269 | 5000 270 | 5000 271 | 5000 272 | 5000 273 | 5000 274 | 5000 275 | 5000 276 | 5000 277 | 0 278 | 10000 279 | 10000 280 | 10000 281 | 10000 282 | 10000 283 | 10000 284 | 10000 285 | 10000 286 | 10000 287 | 10000 288 | 10000 289 | 10000 290 | 10000 291 | 0 292 | 10000 293 | 10000 294 | 10000 295 | 10000 296 | 10000 297 | 10000 298 | 10000 299 | 10000 300 | 10000 301 | 10000 302 | 10000 303 | 10000 304 | 10000 305 | 0 306 | 9950 307 | 10050 308 | 10000 309 | 10000 310 | 10000 311 | 10000 312 | 10000 313 | 10000 314 | 10000 315 | 10000 316 | 10000 317 | 10000 318 | 10000 319 | 0 320 | 10000 321 | 10000 322 | 10000 323 | 10000 324 | 10000 325 | 10000 326 | 10000 327 | 10000 328 | 10000 329 | 10000 330 | 10000 331 | 10000 332 | 10000 333 | 0 334 | 10000 335 | 10000 336 | 10050 337 | 9950 338 | 10000 339 | 10000 340 | 10000 341 | 10000 342 | 10000 343 | 10000 344 | 10000 345 | 10000 346 | 10000 347 | 0 348 | 10000 349 | 10000 350 | 10050 351 | 10000 352 | 10000 353 | 10000 354 | 10000 355 | 10000 356 | 10000 357 | 10050 358 | 9950 359 | 10000 360 | 10000 361 | 0 362 | 10050 363 | 9950 364 | 10000 365 | 10000 366 | 10000 367 | 10000 368 | 10000 369 | 10000 370 | 10000 371 | 10000 372 | 10000 373 | 10000 374 | 10000 375 | 0 376 | 10000 377 | 10000 378 | 10000 379 | 10000 380 | 10000 381 | 10000 382 | 10000 383 | 10000 384 | 10000 385 | 10000 386 | 10000 387 | 10000 388 | 10000 389 | 0 390 | 10000 391 | 10000 392 | 10000 393 | 10000 394 | 10000 395 | 10000 396 | 10000 397 | 10000 398 | 10000 399 | 10000 400 | 10000 401 | 10000 402 | 10000 403 | 0 404 | 10000 405 | 10000 406 | 10000 407 | 10000 408 | 10000 409 | 10000 410 | 10000 411 | 10000 412 | 10000 413 | 10000 414 | 10000 415 | 0 416 | 10000 417 | 10000 418 | 10000 419 | 10000 420 | 10000 421 | 10000 422 | 10000 423 | 10000 424 | 10000 425 | 10000 426 | 10000 427 | 10000 428 | 10000 429 | 0 430 | 10000 431 | 10000 432 | 10000 433 | 10000 434 | 10000 435 | 10000 436 | 10000 437 | 10000 438 | 10000 439 | 10000 440 | 10000 441 | 10000 442 | 10000 443 | 0 444 | 10000 445 | 10000 446 | 10000 447 | 10000 448 | 10000 449 | 10000 450 | 10000 451 | 10000 452 | 10000 453 | 10000 454 | 10000 455 | 10000 456 | 10000 457 | 0 458 | 10000 459 | 10000 460 | 10000 461 | 10000 462 | 10000 463 | 10000 464 | 10000 465 | 10000 466 | 10000 467 | 10000 468 | 10000 469 | 9950 470 | 10050 471 | 0 472 | 10000 473 | 10000 474 | 10000 475 | 10000 476 | 10000 477 | 10000 478 | 10000 479 | 10000 480 | 9950 481 | 10050 482 | 10000 483 | 10000 484 | 10000 485 | 0 486 | 10000 487 | 10000 488 | 10000 489 | 10000 490 | 10000 491 | 10050 492 | 9950 493 | 10000 494 | 10000 495 | 10000 496 | 10000 497 | 10000 498 | 10000 499 | 0 500 | 10000 501 | 10000 502 | 10050 503 | 9950 504 | 10000 505 | 10000 506 | 10000 507 | 10000 508 | 10050 509 | 10000 510 | 10000 511 | 10000 512 | 10000 513 | 0 514 | 10000 515 | 10000 516 | 10000 517 | 10000 518 | 10000 519 | 10000 520 | 10000 521 | 10000 522 | 10000 523 | 10000 524 | 10000 525 | 10000 526 | 9950 527 | 0 528 | 10000 529 | 10000 530 | 10000 531 | 10000 532 | 10000 533 | 10000 534 | 10000 535 | 10000 536 | 10000 537 | 10000 538 | 10000 539 | 10000 540 | 10000 541 | 0 542 | 10000 543 | 10000 544 | 10000 545 | 10000 546 | 10000 547 | 10050 548 | 10000 549 | 10000 550 | 10000 551 | 10000 552 | 10000 553 | -------------------------------------------------------------------------------- /data/schemes/scheme-StLucGE260_bvals.txt: -------------------------------------------------------------------------------- 1 | 0.000000 2 | 1000.000000 3 | 1000.000000 4 | 1000.000000 5 | 1000.000000 6 | 1000.000000 7 | 1000.000000 8 | 1000.000000 9 | 1000.000000 10 | 1000.000000 11 | 1000.000000 12 | 1000.000000 13 | 1000.000000 14 | 1000.000000 15 | 1000.000000 16 | 1000.000000 17 | 1000.000000 18 | 1000.000000 19 | 1000.000000 20 | 1000.000000 21 | 1000.000000 22 | 1000.000000 23 | 1000.000000 24 | 1000.000000 25 | 1000.000000 26 | 1000.000000 27 | 1000.000000 28 | 1000.000000 29 | 1000.000000 30 | 1000.000000 31 | 1000.000000 32 | 1000.000000 33 | 1000.000000 34 | 1000.000000 35 | 1000.000000 36 | 1000.000000 37 | 1000.000000 38 | 1000.000000 39 | 1000.000000 40 | 1000.000000 41 | 1000.000000 42 | 1000.000000 43 | 1000.000000 44 | 1000.000000 45 | 1000.000000 46 | 1000.000000 47 | 1000.000000 48 | 1000.000000 49 | 1000.000000 50 | 1000.000000 51 | 1000.000000 52 | 1000.000000 53 | 1000.000000 54 | 1000.000000 55 | 1000.000000 56 | 1000.000000 57 | 1000.000000 58 | 1000.000000 59 | 1000.000000 60 | 1000.000000 61 | 1000.000000 62 | 1000.000000 63 | 1000.000000 64 | 1000.000000 65 | 1000.000000 66 | 0.000000 67 | 3000.000000 68 | 3000.000000 69 | 3000.000000 70 | 3000.000000 71 | 3000.000000 72 | 3000.000000 73 | 3000.000000 74 | 3000.000000 75 | 3000.000000 76 | 3000.000000 77 | 3000.000000 78 | 3000.000000 79 | 3000.000000 80 | 3000.000000 81 | 3000.000000 82 | 3000.000000 83 | 3000.000000 84 | 3000.000000 85 | 3000.000000 86 | 3000.000000 87 | 3000.000000 88 | 3000.000000 89 | 3000.000000 90 | 3000.000000 91 | 3000.000000 92 | 3000.000000 93 | 3000.000000 94 | 3000.000000 95 | 3000.000000 96 | 3000.000000 97 | 3000.000000 98 | 3000.000000 99 | 3000.000000 100 | 3000.000000 101 | 3000.000000 102 | 3000.000000 103 | 3000.000000 104 | 3000.000000 105 | 3000.000000 106 | 3000.000000 107 | 3000.000000 108 | 3000.000000 109 | 3000.000000 110 | 3000.000000 111 | 3000.000000 112 | 3000.000000 113 | 3000.000000 114 | 3000.000000 115 | 3000.000000 116 | 3000.000000 117 | 3000.000000 118 | 3000.000000 119 | 3000.000000 120 | 3000.000000 121 | 3000.000000 122 | 3000.000000 123 | 3000.000000 124 | 3000.000000 125 | 3000.000000 126 | 3000.000000 127 | 3000.000000 128 | 3000.000000 129 | 3000.000000 130 | 3000.000000 131 | 0.000000 132 | 5000.000000 133 | 5000.000000 134 | 5000.000000 135 | 5000.000000 136 | 5000.000000 137 | 5000.000000 138 | 5000.000000 139 | 5000.000000 140 | 5000.000000 141 | 5000.000000 142 | 5000.000000 143 | 5000.000000 144 | 5000.000000 145 | 5000.000000 146 | 5000.000000 147 | 5000.000000 148 | 5000.000000 149 | 5000.000000 150 | 5000.000000 151 | 5000.000000 152 | 5000.000000 153 | 5000.000000 154 | 5000.000000 155 | 5000.000000 156 | 5000.000000 157 | 5000.000000 158 | 5000.000000 159 | 5000.000000 160 | 5000.000000 161 | 5000.000000 162 | 5000.000000 163 | 5000.000000 164 | 5000.000000 165 | 5000.000000 166 | 5000.000000 167 | 5000.000000 168 | 5000.000000 169 | 5000.000000 170 | 5000.000000 171 | 5000.000000 172 | 5000.000000 173 | 5000.000000 174 | 5000.000000 175 | 5000.000000 176 | 5000.000000 177 | 5000.000000 178 | 5000.000000 179 | 5000.000000 180 | 5000.000000 181 | 5000.000000 182 | 5000.000000 183 | 5000.000000 184 | 5000.000000 185 | 5000.000000 186 | 5000.000000 187 | 5000.000000 188 | 5000.000000 189 | 5000.000000 190 | 5000.000000 191 | 5000.000000 192 | 5000.000000 193 | 5000.000000 194 | 5000.000000 195 | 5000.000000 196 | 5000.000000 197 | 5000.000000 198 | 5000.000000 199 | 5000.000000 200 | 5000.000000 201 | 5000.000000 202 | 5000.000000 203 | 5000.000000 204 | 5000.000000 205 | 5000.000000 206 | 5000.000000 207 | 5000.000000 208 | 5000.000000 209 | 5000.000000 210 | 5000.000000 211 | 5000.000000 212 | 5000.000000 213 | 5000.000000 214 | 5000.000000 215 | 5000.000000 216 | 5000.000000 217 | 5000.000000 218 | 5000.000000 219 | 5000.000000 220 | 5000.000000 221 | 5000.000000 222 | 5000.000000 223 | 5000.000000 224 | 5000.000000 225 | 5000.000000 226 | 5000.000000 227 | 5000.000000 228 | 5000.000000 229 | 5000.000000 230 | 5000.000000 231 | 5000.000000 232 | 5000.000000 233 | 5000.000000 234 | 5000.000000 235 | 5000.000000 236 | 5000.000000 237 | 5000.000000 238 | 5000.000000 239 | 5000.000000 240 | 5000.000000 241 | 5000.000000 242 | 5000.000000 243 | 5000.000000 244 | 5000.000000 245 | 5000.000000 246 | 5000.000000 247 | 5000.000000 248 | 5000.000000 249 | 5000.000000 250 | 5000.000000 251 | 5000.000000 252 | 5000.000000 253 | 5000.000000 254 | 5000.000000 255 | 5000.000000 256 | 5000.000000 257 | 5000.000000 258 | 5000.000000 259 | 5000.000000 260 | 0.000000 -------------------------------------------------------------------------------- /data/schemes/scheme-StLucGE260_bvecs.txt: -------------------------------------------------------------------------------- 1 | 0.000000 0.000000 0.000000 2 | 0.506659 0.400302 0.763580 3 | 0.157900 0.072222 0.984810 4 | -0.218510 -0.852977 0.474008 5 | -0.365721 0.720282 0.589443 6 | 0.704623 0.467065 0.534187 7 | -0.593909 0.267921 0.758611 8 | 0.432124 0.710808 0.554996 9 | -0.371309 -0.928490 0.005993 10 | 0.295611 0.910524 0.289067 11 | 0.619266 -0.628691 0.470380 12 | 0.724778 0.175052 0.666374 13 | 0.033005 0.991648 0.124681 14 | -0.781988 -0.447054 0.434323 15 | 0.695943 -0.356921 0.623114 16 | 0.905458 0.395704 0.153508 17 | -0.944708 -0.227593 0.236070 18 | 0.223540 -0.968191 0.112406 19 | -0.811879 0.575981 0.095392 20 | -0.769694 0.073603 0.634156 21 | 0.464724 -0.358097 0.809814 22 | -0.210316 0.881590 0.422571 23 | 0.370207 -0.660392 0.653322 24 | 0.254340 0.580075 0.773837 25 | -0.512483 -0.441690 0.736390 26 | -0.239806 0.537204 0.808644 27 | -0.583671 0.793415 0.172685 28 | 0.770125 -0.602436 0.209710 29 | -0.956379 0.146149 0.252942 30 | -0.688910 -0.690736 0.219742 31 | -0.183398 -0.404022 0.896176 32 | 0.594203 -0.085277 0.799782 33 | 0.945027 -0.326622 0.015535 34 | 0.020028 -0.676546 0.736128 35 | -0.845253 -0.530449 0.064580 36 | 0.601240 0.789747 0.121694 37 | 0.119419 0.858841 0.498128 38 | 0.892122 0.242131 0.381434 39 | -0.686004 0.615727 0.387658 40 | -0.013039 0.710436 0.703641 41 | 0.087481 -0.876949 0.472554 42 | 0.427293 0.135118 0.893960 43 | -0.444646 -0.851496 0.277929 44 | 0.368869 -0.840662 0.396514 45 | -0.086657 -0.976336 0.198137 46 | -0.997872 -0.064877 0.006484 47 | 0.677837 0.654369 0.335169 48 | 0.968321 -0.061125 0.242114 49 | -0.072838 -0.101980 0.992116 50 | 0.211364 -0.224388 0.951302 51 | -0.834101 0.323656 0.446680 52 | 0.186653 -0.490612 0.851153 53 | -0.312565 0.934105 0.172488 54 | 0.866947 -0.346737 0.358017 55 | -0.544233 -0.654414 0.524931 56 | 0.142318 0.365086 0.920031 57 | -0.822785 -0.181488 0.538597 58 | 0.535998 -0.832896 0.137807 59 | -0.593110 -0.166013 0.787820 60 | -0.571310 0.516642 0.637719 61 | 0.844634 -0.067728 0.531042 62 | -0.345579 -0.122150 0.930406 63 | -0.267045 -0.655804 0.706122 64 | -0.135770 0.297339 0.945069 65 | -0.383242 0.170978 0.907685 66 | 0.000000 0.000000 0.000000 67 | 0.544726 0.377455 0.748867 68 | 0.609632 0.587324 0.532353 69 | -0.776927 -0.331725 0.535110 70 | 0.148609 0.982462 0.112619 71 | 0.656418 -0.312286 0.686727 72 | -0.404975 0.911955 0.065826 73 | 0.868520 -0.355322 0.345570 74 | 0.289850 -0.928916 0.230439 75 | -0.239456 -0.783871 0.572894 76 | -0.844074 0.167208 0.509490 77 | 0.340963 0.705851 0.620902 78 | 0.396979 -0.769599 0.500124 79 | -0.603516 0.194891 0.773166 80 | 0.061356 0.678213 0.732300 81 | 0.991308 0.130820 0.013946 82 | 0.888235 0.458057 0.034949 83 | -0.475096 -0.068728 0.877246 84 | -0.962977 0.119082 0.241855 85 | 0.428766 -0.197945 0.881463 86 | 0.385631 0.854039 0.349150 87 | -0.813985 0.467230 0.345145 88 | 0.833035 -0.114242 0.541296 89 | -0.499832 -0.344578 0.794628 90 | -0.128004 0.970726 0.203238 91 | -0.652096 -0.609117 0.451384 92 | -0.655402 0.442914 0.611780 93 | 0.265337 0.445947 0.854826 94 | 0.421367 0.905972 0.040812 95 | -0.936738 -0.192107 0.292603 96 | 0.132731 -0.140303 0.981171 97 | 0.885328 0.355894 0.299221 98 | 0.788495 -0.593081 0.162880 99 | -0.667806 0.732923 0.129842 100 | -0.390876 0.533586 0.750001 101 | 0.089877 0.182579 0.979074 102 | -0.679457 -0.733690 0.006100 103 | -0.576719 0.689802 0.437686 104 | 0.977845 -0.191832 0.083786 105 | 0.695364 0.663640 0.275773 106 | -0.174705 -0.000574 0.984621 107 | 0.661548 0.044982 0.748553 108 | 0.062135 0.885488 0.460490 109 | -0.829967 -0.504657 0.237645 110 | 0.572778 -0.792219 0.210509 111 | -0.260494 -0.920981 0.289718 112 | 0.946762 0.038072 0.319674 113 | 0.782384 0.276996 0.557807 114 | -0.458819 -0.593757 0.661013 115 | 0.048482 -0.881620 0.469463 116 | 0.656893 -0.574331 0.488503 117 | 0.144417 -0.701699 0.697684 118 | -0.895446 0.439007 0.073818 119 | -0.152355 -0.579408 0.800671 120 | -0.748166 -0.080234 0.658642 121 | -0.341165 0.856815 0.386619 122 | -0.180254 -0.306281 0.934719 123 | -0.214911 0.744225 0.632410 124 | 0.424149 -0.519307 0.741902 125 | -0.529012 -0.804593 0.269772 126 | 0.144461 -0.439333 0.886633 127 | -0.320648 0.261317 0.910438 128 | -0.075792 0.446651 0.891492 129 | 0.395944 0.123399 0.909946 130 | 0.009233 -0.988246 0.152594 131 | 0.000000 0.000000 0.000000 132 | 0.682493 -0.435984 0.586618 133 | -0.147704 -0.964043 0.220917 134 | -0.438106 -0.886335 0.149914 135 | -0.660883 -0.042704 0.749273 136 | 0.134860 0.222122 0.965647 137 | -0.198969 0.013096 0.979918 138 | -0.801989 -0.342390 0.489472 139 | -0.061620 -0.913820 0.401418 140 | 0.530749 0.362922 0.765894 141 | -0.240426 0.420100 0.875049 142 | -0.309412 0.196522 0.930400 143 | -0.548454 0.705722 0.448502 144 | 0.378367 -0.390897 0.839070 145 | -0.842551 0.306185 0.443123 146 | -0.722094 -0.677761 0.138638 147 | 0.209104 0.807998 0.550831 148 | 0.884260 0.019329 0.466595 149 | 0.029162 0.946757 0.320625 150 | -0.672723 0.474347 0.567837 151 | -0.214108 0.746299 0.630234 152 | 0.705159 0.691898 0.155010 153 | -0.024569 0.413747 0.910060 154 | 0.695187 0.242023 0.676860 155 | -0.008691 0.827933 0.560760 156 | 0.976751 0.167954 0.133225 157 | 0.817267 -0.361517 0.448754 158 | 0.874882 -0.472092 0.108217 159 | -0.146820 0.898783 0.413077 160 | 0.558461 -0.754455 0.344847 161 | 0.007949 0.999695 0.023386 162 | -0.839099 0.061940 0.540441 163 | -0.992551 0.057597 0.107356 164 | -0.863780 0.454221 0.218099 165 | -0.851899 -0.523691 0.004044 166 | -0.951551 0.305693 0.033199 167 | -0.144824 -0.685584 0.713443 168 | -0.584145 -0.674634 0.451269 169 | -0.471689 -0.435258 0.766851 170 | 0.205586 -0.065247 0.976462 171 | 0.274919 0.886508 0.372187 172 | 0.612225 -0.619178 0.491731 173 | -0.937846 0.050695 0.343330 174 | 0.646799 0.561461 0.516152 175 | -0.554293 0.788791 0.265648 176 | 0.786681 -0.523676 0.326949 177 | -0.598012 -0.515564 0.613658 178 | -0.744543 0.243819 0.621456 179 | 0.544630 0.747155 0.380969 180 | 0.963355 0.024872 0.267073 181 | 0.929921 -0.298441 0.214894 182 | 0.990351 -0.101509 0.094349 183 | -0.570219 -0.775024 0.272376 184 | 0.376515 0.024215 0.926094 185 | -0.731484 -0.527570 0.431974 186 | 0.306430 -0.913771 0.266690 187 | -0.180501 0.980396 0.079006 188 | -0.349768 0.819075 0.454728 189 | 0.611610 0.791103 0.009406 190 | -0.787094 -0.135100 0.601856 191 | -0.330954 -0.879372 0.342309 192 | -0.721389 0.656104 0.221643 193 | -0.461415 0.639631 0.614791 194 | -0.104168 0.616156 0.780705 195 | 0.727773 0.598039 0.335703 196 | -0.229836 -0.972595 0.035120 197 | 0.054595 -0.220595 0.973836 198 | 0.596194 -0.037389 0.801969 199 | -0.199789 -0.516483 0.832664 200 | 0.164011 -0.889514 0.426457 201 | -0.138888 -0.249037 0.958484 202 | 0.090130 -0.977427 0.191084 203 | 0.534268 -0.431777 0.726723 204 | 0.337520 -0.939530 0.057991 205 | 0.314666 0.617841 0.720595 206 | 0.755979 0.032312 0.653798 207 | 0.033493 -0.808259 0.587874 208 | 0.780348 -0.183893 0.597697 209 | 0.520031 0.535568 0.665383 210 | -0.646341 0.763035 0.004633 211 | -0.812803 -0.525098 0.252236 212 | -0.661193 -0.302506 0.686523 213 | -0.085964 0.221178 0.971437 214 | 0.385603 -0.800067 0.459568 215 | -0.803180 0.594787 0.033630 216 | 0.762541 0.394768 0.512532 217 | 0.245213 -0.557692 0.793001 218 | 0.454541 0.856848 0.243318 219 | 0.329770 0.234272 0.914532 220 | 0.415662 0.907479 0.060893 221 | -0.338424 -0.128025 0.932244 222 | 0.851337 0.217429 0.477441 223 | -0.370701 -0.619560 0.691900 224 | -0.407524 -0.750719 0.519947 225 | -0.620971 0.172696 0.764573 226 | -0.961797 -0.273243 0.016850 227 | 0.179408 0.966951 0.181161 228 | 0.516062 -0.842252 0.155856 229 | 0.320022 0.427597 0.845427 230 | 0.853200 0.483316 0.196102 231 | 0.903653 0.314444 0.290752 232 | -0.281072 0.926315 0.250877 233 | 0.534041 0.158965 0.830379 234 | -0.551716 0.430355 0.714425 235 | -0.910509 -0.365329 0.193670 236 | -0.333608 0.566454 0.753549 237 | 0.623271 -0.233531 0.746322 238 | 0.418219 0.722410 0.550652 239 | -0.205415 -0.813942 0.543418 240 | 0.413613 -0.187154 0.891009 241 | -0.970129 -0.140845 0.197513 242 | 0.129941 0.517041 0.846040 243 | -0.312861 -0.349682 0.883086 244 | -0.899476 -0.178210 0.398978 245 | 0.715387 -0.669896 0.198647 246 | -0.741107 0.540193 0.398688 247 | -0.934636 0.250376 0.252521 248 | 0.260162 -0.736155 0.624813 249 | -0.449799 0.314130 0.836064 250 | 0.011069 0.010701 0.999881 251 | 0.001828 -0.466765 0.884380 252 | -0.463042 0.879094 0.113079 253 | 0.067238 -0.656494 0.751329 254 | 0.194959 -0.349950 0.916257 255 | -0.475905 0.043381 0.878426 256 | 0.906411 -0.175757 0.384094 257 | 0.094326 0.695330 0.712473 258 | 0.444794 -0.618631 0.647653 259 | -0.516371 -0.206293 0.831146 260 | 0.000000 0.000000 0.000000 -------------------------------------------------------------------------------- /data/schemes/scheme-StLucGE260_scheme.txt: -------------------------------------------------------------------------------- 1 | VERSION: 1 2 | 0.506659 0.400302 0.763580 0.030796 0.035660 0.022932 0.077400 3 | 0.157900 0.072222 0.984810 0.030796 0.035660 0.022932 0.077400 4 | -0.218510 -0.852977 0.474008 0.030796 0.035660 0.022932 0.077400 5 | -0.365721 0.720282 0.589443 0.030796 0.035660 0.022932 0.077400 6 | 0.704623 0.467065 0.534187 0.030796 0.035660 0.022932 0.077400 7 | -0.593909 0.267921 0.758611 0.030796 0.035660 0.022932 0.077400 8 | 0.432124 0.710808 0.554996 0.030796 0.035660 0.022932 0.077400 9 | -0.371309 -0.928490 0.005993 0.030796 0.035660 0.022932 0.077400 10 | 0.295611 0.910524 0.289067 0.030796 0.035660 0.022932 0.077400 11 | 0.619266 -0.628691 0.470380 0.030796 0.035660 0.022932 0.077400 12 | 0.724778 0.175052 0.666374 0.030796 0.035660 0.022932 0.077400 13 | 0.033005 0.991648 0.124681 0.030796 0.035660 0.022932 0.077400 14 | -0.781988 -0.447054 0.434323 0.030796 0.035660 0.022932 0.077400 15 | 0.695943 -0.356921 0.623114 0.030796 0.035660 0.022932 0.077400 16 | 0.905458 0.395704 0.153508 0.030796 0.035660 0.022932 0.077400 17 | -0.944708 -0.227593 0.236070 0.030796 0.035660 0.022932 0.077400 18 | 0.223540 -0.968191 0.112406 0.030796 0.035660 0.022932 0.077400 19 | -0.811879 0.575981 0.095392 0.030796 0.035660 0.022932 0.077400 20 | -0.769694 0.073603 0.634156 0.030796 0.035660 0.022932 0.077400 21 | 0.464724 -0.358097 0.809814 0.030796 0.035660 0.022932 0.077400 22 | -0.210316 0.881590 0.422571 0.030796 0.035660 0.022932 0.077400 23 | 0.370207 -0.660392 0.653322 0.030796 0.035660 0.022932 0.077400 24 | 0.254340 0.580075 0.773837 0.030796 0.035660 0.022932 0.077400 25 | -0.512483 -0.441690 0.736390 0.030796 0.035660 0.022932 0.077400 26 | -0.239806 0.537204 0.808644 0.030796 0.035660 0.022932 0.077400 27 | -0.583671 0.793415 0.172685 0.030796 0.035660 0.022932 0.077400 28 | 0.770125 -0.602436 0.209710 0.030796 0.035660 0.022932 0.077400 29 | -0.956379 0.146149 0.252942 0.030796 0.035660 0.022932 0.077400 30 | -0.688910 -0.690736 0.219742 0.030796 0.035660 0.022932 0.077400 31 | -0.183398 -0.404022 0.896176 0.030796 0.035660 0.022932 0.077400 32 | 0.594203 -0.085277 0.799782 0.030796 0.035660 0.022932 0.077400 33 | 0.945027 -0.326622 0.015535 0.030796 0.035660 0.022932 0.077400 34 | 0.020028 -0.676546 0.736128 0.030796 0.035660 0.022932 0.077400 35 | -0.845253 -0.530449 0.064580 0.030796 0.035660 0.022932 0.077400 36 | 0.601240 0.789747 0.121694 0.030796 0.035660 0.022932 0.077400 37 | 0.119419 0.858841 0.498128 0.030796 0.035660 0.022932 0.077400 38 | 0.892122 0.242131 0.381434 0.030796 0.035660 0.022932 0.077400 39 | -0.686004 0.615727 0.387658 0.030796 0.035660 0.022932 0.077400 40 | -0.013039 0.710436 0.703641 0.030796 0.035660 0.022932 0.077400 41 | 0.087481 -0.876949 0.472554 0.030796 0.035660 0.022932 0.077400 42 | 0.427293 0.135118 0.893960 0.030796 0.035660 0.022932 0.077400 43 | -0.444646 -0.851496 0.277929 0.030796 0.035660 0.022932 0.077400 44 | 0.368869 -0.840662 0.396514 0.030796 0.035660 0.022932 0.077400 45 | -0.086657 -0.976336 0.198137 0.030796 0.035660 0.022932 0.077400 46 | -0.997872 -0.064877 0.006484 0.030796 0.035660 0.022932 0.077400 47 | 0.677837 0.654369 0.335169 0.030796 0.035660 0.022932 0.077400 48 | 0.968321 -0.061125 0.242114 0.030796 0.035660 0.022932 0.077400 49 | -0.072838 -0.101980 0.992116 0.030796 0.035660 0.022932 0.077400 50 | 0.211364 -0.224388 0.951302 0.030796 0.035660 0.022932 0.077400 51 | -0.834101 0.323656 0.446680 0.030796 0.035660 0.022932 0.077400 52 | 0.186653 -0.490612 0.851153 0.030796 0.035660 0.022932 0.077400 53 | -0.312565 0.934105 0.172488 0.030796 0.035660 0.022932 0.077400 54 | 0.866947 -0.346737 0.358017 0.030796 0.035660 0.022932 0.077400 55 | -0.544233 -0.654414 0.524931 0.030796 0.035660 0.022932 0.077400 56 | 0.142318 0.365086 0.920031 0.030796 0.035660 0.022932 0.077400 57 | -0.822785 -0.181488 0.538597 0.030796 0.035660 0.022932 0.077400 58 | 0.535998 -0.832896 0.137807 0.030796 0.035660 0.022932 0.077400 59 | -0.593110 -0.166013 0.787820 0.030796 0.035660 0.022932 0.077400 60 | -0.571310 0.516642 0.637719 0.030796 0.035660 0.022932 0.077400 61 | 0.844634 -0.067728 0.531042 0.030796 0.035660 0.022932 0.077400 62 | -0.345579 -0.122150 0.930406 0.030796 0.035660 0.022932 0.077400 63 | -0.267045 -0.655804 0.706122 0.030796 0.035660 0.022932 0.077400 64 | -0.135770 0.297339 0.945069 0.030796 0.035660 0.022932 0.077400 65 | -0.383242 0.170978 0.907685 0.030796 0.035660 0.022932 0.077400 66 | 0.544726 0.377455 0.748867 0.053340 0.035660 0.022932 0.077400 67 | 0.609632 0.587324 0.532353 0.053340 0.035660 0.022932 0.077400 68 | -0.776927 -0.331725 0.535110 0.053340 0.035660 0.022932 0.077400 69 | 0.148609 0.982462 0.112619 0.053340 0.035660 0.022932 0.077400 70 | 0.656418 -0.312286 0.686727 0.053340 0.035660 0.022932 0.077400 71 | -0.404975 0.911955 0.065826 0.053340 0.035660 0.022932 0.077400 72 | 0.868520 -0.355322 0.345570 0.053340 0.035660 0.022932 0.077400 73 | 0.289850 -0.928916 0.230439 0.053340 0.035660 0.022932 0.077400 74 | -0.239456 -0.783871 0.572894 0.053340 0.035660 0.022932 0.077400 75 | -0.844074 0.167208 0.509490 0.053340 0.035660 0.022932 0.077400 76 | 0.340963 0.705851 0.620902 0.053340 0.035660 0.022932 0.077400 77 | 0.396979 -0.769599 0.500124 0.053340 0.035660 0.022932 0.077400 78 | -0.603516 0.194891 0.773166 0.053340 0.035660 0.022932 0.077400 79 | 0.061356 0.678213 0.732300 0.053340 0.035660 0.022932 0.077400 80 | 0.991308 0.130820 0.013946 0.053340 0.035660 0.022932 0.077400 81 | 0.888235 0.458057 0.034949 0.053340 0.035660 0.022932 0.077400 82 | -0.475096 -0.068728 0.877246 0.053340 0.035660 0.022932 0.077400 83 | -0.962977 0.119082 0.241855 0.053340 0.035660 0.022932 0.077400 84 | 0.428766 -0.197945 0.881463 0.053340 0.035660 0.022932 0.077400 85 | 0.385631 0.854039 0.349150 0.053340 0.035660 0.022932 0.077400 86 | -0.813985 0.467230 0.345145 0.053340 0.035660 0.022932 0.077400 87 | 0.833035 -0.114242 0.541296 0.053340 0.035660 0.022932 0.077400 88 | -0.499832 -0.344578 0.794628 0.053340 0.035660 0.022932 0.077400 89 | -0.128004 0.970726 0.203238 0.053340 0.035660 0.022932 0.077400 90 | -0.652096 -0.609117 0.451384 0.053340 0.035660 0.022932 0.077400 91 | -0.655402 0.442914 0.611780 0.053340 0.035660 0.022932 0.077400 92 | 0.265337 0.445947 0.854826 0.053340 0.035660 0.022932 0.077400 93 | 0.421367 0.905972 0.040812 0.053340 0.035660 0.022932 0.077400 94 | -0.936738 -0.192107 0.292603 0.053340 0.035660 0.022932 0.077400 95 | 0.132731 -0.140303 0.981171 0.053340 0.035660 0.022932 0.077400 96 | 0.885328 0.355894 0.299221 0.053340 0.035660 0.022932 0.077400 97 | 0.788495 -0.593081 0.162880 0.053340 0.035660 0.022932 0.077400 98 | -0.667806 0.732923 0.129842 0.053340 0.035660 0.022932 0.077400 99 | -0.390876 0.533586 0.750001 0.053340 0.035660 0.022932 0.077400 100 | 0.089877 0.182579 0.979074 0.053340 0.035660 0.022932 0.077400 101 | -0.679457 -0.733690 0.006100 0.053340 0.035660 0.022932 0.077400 102 | -0.576719 0.689802 0.437686 0.053340 0.035660 0.022932 0.077400 103 | 0.977845 -0.191832 0.083786 0.053340 0.035660 0.022932 0.077400 104 | 0.695364 0.663640 0.275773 0.053340 0.035660 0.022932 0.077400 105 | -0.174705 -0.000574 0.984621 0.053340 0.035660 0.022932 0.077400 106 | 0.661548 0.044982 0.748553 0.053340 0.035660 0.022932 0.077400 107 | 0.062135 0.885488 0.460490 0.053340 0.035660 0.022932 0.077400 108 | -0.829967 -0.504657 0.237645 0.053340 0.035660 0.022932 0.077400 109 | 0.572778 -0.792219 0.210509 0.053340 0.035660 0.022932 0.077400 110 | -0.260494 -0.920981 0.289718 0.053340 0.035660 0.022932 0.077400 111 | 0.946762 0.038072 0.319674 0.053340 0.035660 0.022932 0.077400 112 | 0.782384 0.276996 0.557807 0.053340 0.035660 0.022932 0.077400 113 | -0.458819 -0.593757 0.661013 0.053340 0.035660 0.022932 0.077400 114 | 0.048482 -0.881620 0.469463 0.053340 0.035660 0.022932 0.077400 115 | 0.656893 -0.574331 0.488503 0.053340 0.035660 0.022932 0.077400 116 | 0.144417 -0.701699 0.697684 0.053340 0.035660 0.022932 0.077400 117 | -0.895446 0.439007 0.073818 0.053340 0.035660 0.022932 0.077400 118 | -0.152355 -0.579408 0.800671 0.053340 0.035660 0.022932 0.077400 119 | -0.748166 -0.080234 0.658642 0.053340 0.035660 0.022932 0.077400 120 | -0.341165 0.856815 0.386619 0.053340 0.035660 0.022932 0.077400 121 | -0.180254 -0.306281 0.934719 0.053340 0.035660 0.022932 0.077400 122 | -0.214911 0.744225 0.632410 0.053340 0.035660 0.022932 0.077400 123 | 0.424149 -0.519307 0.741902 0.053340 0.035660 0.022932 0.077400 124 | -0.529012 -0.804593 0.269772 0.053340 0.035660 0.022932 0.077400 125 | 0.144461 -0.439333 0.886633 0.053340 0.035660 0.022932 0.077400 126 | -0.320648 0.261317 0.910438 0.053340 0.035660 0.022932 0.077400 127 | -0.075792 0.446651 0.891492 0.053340 0.035660 0.022932 0.077400 128 | 0.395944 0.123399 0.909946 0.053340 0.035660 0.022932 0.077400 129 | 0.009233 -0.988246 0.152594 0.053340 0.035660 0.022932 0.077400 130 | 0.682493 -0.435984 0.586618 0.068862 0.035660 0.022932 0.077400 131 | -0.147704 -0.964043 0.220917 0.068862 0.035660 0.022932 0.077400 132 | -0.438106 -0.886335 0.149914 0.068862 0.035660 0.022932 0.077400 133 | -0.660883 -0.042704 0.749273 0.068862 0.035660 0.022932 0.077400 134 | 0.134860 0.222122 0.965647 0.068862 0.035660 0.022932 0.077400 135 | -0.198969 0.013096 0.979918 0.068862 0.035660 0.022932 0.077400 136 | -0.801989 -0.342390 0.489472 0.068862 0.035660 0.022932 0.077400 137 | -0.061620 -0.913820 0.401418 0.068862 0.035660 0.022932 0.077400 138 | 0.530749 0.362922 0.765894 0.068862 0.035660 0.022932 0.077400 139 | -0.240426 0.420100 0.875049 0.068862 0.035660 0.022932 0.077400 140 | -0.309412 0.196522 0.930400 0.068862 0.035660 0.022932 0.077400 141 | -0.548454 0.705722 0.448502 0.068862 0.035660 0.022932 0.077400 142 | 0.378367 -0.390897 0.839070 0.068862 0.035660 0.022932 0.077400 143 | -0.842551 0.306185 0.443123 0.068862 0.035660 0.022932 0.077400 144 | -0.722094 -0.677761 0.138638 0.068862 0.035660 0.022932 0.077400 145 | 0.209104 0.807998 0.550831 0.068862 0.035660 0.022932 0.077400 146 | 0.884260 0.019329 0.466595 0.068862 0.035660 0.022932 0.077400 147 | 0.029162 0.946757 0.320625 0.068862 0.035660 0.022932 0.077400 148 | -0.672723 0.474347 0.567837 0.068862 0.035660 0.022932 0.077400 149 | -0.214108 0.746299 0.630234 0.068862 0.035660 0.022932 0.077400 150 | 0.705159 0.691898 0.155010 0.068862 0.035660 0.022932 0.077400 151 | -0.024569 0.413747 0.910060 0.068862 0.035660 0.022932 0.077400 152 | 0.695187 0.242023 0.676860 0.068862 0.035660 0.022932 0.077400 153 | -0.008691 0.827933 0.560760 0.068862 0.035660 0.022932 0.077400 154 | 0.976751 0.167954 0.133225 0.068862 0.035660 0.022932 0.077400 155 | 0.817267 -0.361517 0.448754 0.068862 0.035660 0.022932 0.077400 156 | 0.874882 -0.472092 0.108217 0.068862 0.035660 0.022932 0.077400 157 | -0.146820 0.898783 0.413077 0.068862 0.035660 0.022932 0.077400 158 | 0.558461 -0.754455 0.344847 0.068862 0.035660 0.022932 0.077400 159 | 0.007949 0.999695 0.023386 0.068862 0.035660 0.022932 0.077400 160 | -0.839099 0.061940 0.540441 0.068862 0.035660 0.022932 0.077400 161 | -0.992551 0.057597 0.107356 0.068862 0.035660 0.022932 0.077400 162 | -0.863780 0.454221 0.218099 0.068862 0.035660 0.022932 0.077400 163 | -0.851899 -0.523691 0.004044 0.068862 0.035660 0.022932 0.077400 164 | -0.951551 0.305693 0.033199 0.068862 0.035660 0.022932 0.077400 165 | -0.144824 -0.685584 0.713443 0.068862 0.035660 0.022932 0.077400 166 | -0.584145 -0.674634 0.451269 0.068862 0.035660 0.022932 0.077400 167 | -0.471689 -0.435258 0.766851 0.068862 0.035660 0.022932 0.077400 168 | 0.205586 -0.065247 0.976462 0.068862 0.035660 0.022932 0.077400 169 | 0.274919 0.886508 0.372187 0.068862 0.035660 0.022932 0.077400 170 | 0.612225 -0.619178 0.491731 0.068862 0.035660 0.022932 0.077400 171 | -0.937846 0.050695 0.343330 0.068862 0.035660 0.022932 0.077400 172 | 0.646799 0.561461 0.516152 0.068862 0.035660 0.022932 0.077400 173 | -0.554293 0.788791 0.265648 0.068862 0.035660 0.022932 0.077400 174 | 0.786681 -0.523676 0.326949 0.068862 0.035660 0.022932 0.077400 175 | -0.598012 -0.515564 0.613658 0.068862 0.035660 0.022932 0.077400 176 | -0.744543 0.243819 0.621456 0.068862 0.035660 0.022932 0.077400 177 | 0.544630 0.747155 0.380969 0.068862 0.035660 0.022932 0.077400 178 | 0.963355 0.024872 0.267073 0.068862 0.035660 0.022932 0.077400 179 | 0.929921 -0.298441 0.214894 0.068862 0.035660 0.022932 0.077400 180 | 0.990351 -0.101509 0.094349 0.068862 0.035660 0.022932 0.077400 181 | -0.570219 -0.775024 0.272376 0.068862 0.035660 0.022932 0.077400 182 | 0.376515 0.024215 0.926094 0.068862 0.035660 0.022932 0.077400 183 | -0.731484 -0.527570 0.431974 0.068862 0.035660 0.022932 0.077400 184 | 0.306430 -0.913771 0.266690 0.068862 0.035660 0.022932 0.077400 185 | -0.180501 0.980396 0.079006 0.068862 0.035660 0.022932 0.077400 186 | -0.349768 0.819075 0.454728 0.068862 0.035660 0.022932 0.077400 187 | 0.611610 0.791103 0.009406 0.068862 0.035660 0.022932 0.077400 188 | -0.787094 -0.135100 0.601856 0.068862 0.035660 0.022932 0.077400 189 | -0.330954 -0.879372 0.342309 0.068862 0.035660 0.022932 0.077400 190 | -0.721389 0.656104 0.221643 0.068862 0.035660 0.022932 0.077400 191 | -0.461415 0.639631 0.614791 0.068862 0.035660 0.022932 0.077400 192 | -0.104168 0.616156 0.780705 0.068862 0.035660 0.022932 0.077400 193 | 0.727773 0.598039 0.335703 0.068862 0.035660 0.022932 0.077400 194 | -0.229836 -0.972595 0.035120 0.068862 0.035660 0.022932 0.077400 195 | 0.054595 -0.220595 0.973836 0.068862 0.035660 0.022932 0.077400 196 | 0.596194 -0.037389 0.801969 0.068862 0.035660 0.022932 0.077400 197 | -0.199789 -0.516483 0.832664 0.068862 0.035660 0.022932 0.077400 198 | 0.164011 -0.889514 0.426457 0.068862 0.035660 0.022932 0.077400 199 | -0.138888 -0.249037 0.958484 0.068862 0.035660 0.022932 0.077400 200 | 0.090130 -0.977427 0.191084 0.068862 0.035660 0.022932 0.077400 201 | 0.534268 -0.431777 0.726723 0.068862 0.035660 0.022932 0.077400 202 | 0.337520 -0.939530 0.057991 0.068862 0.035660 0.022932 0.077400 203 | 0.314666 0.617841 0.720595 0.068862 0.035660 0.022932 0.077400 204 | 0.755979 0.032312 0.653798 0.068862 0.035660 0.022932 0.077400 205 | 0.033493 -0.808259 0.587874 0.068862 0.035660 0.022932 0.077400 206 | 0.780348 -0.183893 0.597697 0.068862 0.035660 0.022932 0.077400 207 | 0.520031 0.535568 0.665383 0.068862 0.035660 0.022932 0.077400 208 | -0.646341 0.763035 0.004633 0.068862 0.035660 0.022932 0.077400 209 | -0.812803 -0.525098 0.252236 0.068862 0.035660 0.022932 0.077400 210 | -0.661193 -0.302506 0.686523 0.068862 0.035660 0.022932 0.077400 211 | -0.085964 0.221178 0.971437 0.068862 0.035660 0.022932 0.077400 212 | 0.385603 -0.800067 0.459568 0.068862 0.035660 0.022932 0.077400 213 | -0.803180 0.594787 0.033630 0.068862 0.035660 0.022932 0.077400 214 | 0.762541 0.394768 0.512532 0.068862 0.035660 0.022932 0.077400 215 | 0.245213 -0.557692 0.793001 0.068862 0.035660 0.022932 0.077400 216 | 0.454541 0.856848 0.243318 0.068862 0.035660 0.022932 0.077400 217 | 0.329770 0.234272 0.914532 0.068862 0.035660 0.022932 0.077400 218 | 0.415662 0.907479 0.060893 0.068862 0.035660 0.022932 0.077400 219 | -0.338424 -0.128025 0.932244 0.068862 0.035660 0.022932 0.077400 220 | 0.851337 0.217429 0.477441 0.068862 0.035660 0.022932 0.077400 221 | -0.370701 -0.619560 0.691900 0.068862 0.035660 0.022932 0.077400 222 | -0.407524 -0.750719 0.519947 0.068862 0.035660 0.022932 0.077400 223 | -0.620971 0.172696 0.764573 0.068862 0.035660 0.022932 0.077400 224 | -0.961797 -0.273243 0.016850 0.068862 0.035660 0.022932 0.077400 225 | 0.179408 0.966951 0.181161 0.068862 0.035660 0.022932 0.077400 226 | 0.516062 -0.842252 0.155856 0.068862 0.035660 0.022932 0.077400 227 | 0.320022 0.427597 0.845427 0.068862 0.035660 0.022932 0.077400 228 | 0.853200 0.483316 0.196102 0.068862 0.035660 0.022932 0.077400 229 | 0.903653 0.314444 0.290752 0.068862 0.035660 0.022932 0.077400 230 | -0.281072 0.926315 0.250877 0.068862 0.035660 0.022932 0.077400 231 | 0.534041 0.158965 0.830379 0.068862 0.035660 0.022932 0.077400 232 | -0.551716 0.430355 0.714425 0.068862 0.035660 0.022932 0.077400 233 | -0.910509 -0.365329 0.193670 0.068862 0.035660 0.022932 0.077400 234 | -0.333608 0.566454 0.753549 0.068862 0.035660 0.022932 0.077400 235 | 0.623271 -0.233531 0.746322 0.068862 0.035660 0.022932 0.077400 236 | 0.418219 0.722410 0.550652 0.068862 0.035660 0.022932 0.077400 237 | -0.205415 -0.813942 0.543418 0.068862 0.035660 0.022932 0.077400 238 | 0.413613 -0.187154 0.891009 0.068862 0.035660 0.022932 0.077400 239 | -0.970129 -0.140845 0.197513 0.068862 0.035660 0.022932 0.077400 240 | 0.129941 0.517041 0.846040 0.068862 0.035660 0.022932 0.077400 241 | -0.312861 -0.349682 0.883086 0.068862 0.035660 0.022932 0.077400 242 | -0.899476 -0.178210 0.398978 0.068862 0.035660 0.022932 0.077400 243 | 0.715387 -0.669896 0.198647 0.068862 0.035660 0.022932 0.077400 244 | -0.741107 0.540193 0.398688 0.068862 0.035660 0.022932 0.077400 245 | -0.934636 0.250376 0.252521 0.068862 0.035660 0.022932 0.077400 246 | 0.260162 -0.736155 0.624813 0.068862 0.035660 0.022932 0.077400 247 | -0.449799 0.314130 0.836064 0.068862 0.035660 0.022932 0.077400 248 | 0.011069 0.010701 0.999881 0.068862 0.035660 0.022932 0.077400 249 | 0.001828 -0.466765 0.884380 0.068862 0.035660 0.022932 0.077400 250 | -0.463042 0.879094 0.113079 0.068862 0.035660 0.022932 0.077400 251 | 0.067238 -0.656494 0.751329 0.068862 0.035660 0.022932 0.077400 252 | 0.194959 -0.349950 0.916257 0.068862 0.035660 0.022932 0.077400 253 | -0.475905 0.043381 0.878426 0.068862 0.035660 0.022932 0.077400 254 | 0.906411 -0.175757 0.384094 0.068862 0.035660 0.022932 0.077400 255 | 0.094326 0.695330 0.712473 0.068862 0.035660 0.022932 0.077400 256 | 0.444794 -0.618631 0.647653 0.068862 0.035660 0.022932 0.077400 257 | -0.516371 -0.206293 0.831146 0.068862 0.035660 0.022932 0.077400 -------------------------------------------------------------------------------- /data/schemes/scheme-StLucGE260v2_bvals.txt: -------------------------------------------------------------------------------- 1 | 0.000000 2 | 1000.000000 3 | 1000.000000 4 | 1000.000000 5 | 1000.000000 6 | 1000.000000 7 | 1000.000000 8 | 1000.000000 9 | 1000.000000 10 | 1000.000000 11 | 1000.000000 12 | 1000.000000 13 | 1000.000000 14 | 1000.000000 15 | 1000.000000 16 | 1000.000000 17 | 1000.000000 18 | 1000.000000 19 | 1000.000000 20 | 1000.000000 21 | 1000.000000 22 | 1000.000000 23 | 1000.000000 24 | 1000.000000 25 | 1000.000000 26 | 1000.000000 27 | 1000.000000 28 | 1000.000000 29 | 1000.000000 30 | 1000.000000 31 | 1000.000000 32 | 1000.000000 33 | 1000.000000 34 | 1000.000000 35 | 1000.000000 36 | 1000.000000 37 | 1000.000000 38 | 1000.000000 39 | 1000.000000 40 | 1000.000000 41 | 1000.000000 42 | 1000.000000 43 | 1000.000000 44 | 1000.000000 45 | 1000.000000 46 | 1000.000000 47 | 1000.000000 48 | 1000.000000 49 | 1000.000000 50 | 1000.000000 51 | 1000.000000 52 | 1000.000000 53 | 1000.000000 54 | 1000.000000 55 | 1000.000000 56 | 1000.000000 57 | 1000.000000 58 | 1000.000000 59 | 1000.000000 60 | 1000.000000 61 | 1000.000000 62 | 1000.000000 63 | 1000.000000 64 | 1000.000000 65 | 1000.000000 66 | 0.000000 67 | 2000.000000 68 | 2000.000000 69 | 2000.000000 70 | 2000.000000 71 | 2000.000000 72 | 2000.000000 73 | 2000.000000 74 | 2000.000000 75 | 2000.000000 76 | 2000.000000 77 | 2000.000000 78 | 2000.000000 79 | 2000.000000 80 | 2000.000000 81 | 2000.000000 82 | 2000.000000 83 | 2000.000000 84 | 2000.000000 85 | 2000.000000 86 | 2000.000000 87 | 2000.000000 88 | 2000.000000 89 | 2000.000000 90 | 2000.000000 91 | 2000.000000 92 | 2000.000000 93 | 2000.000000 94 | 2000.000000 95 | 2000.000000 96 | 2000.000000 97 | 2000.000000 98 | 2000.000000 99 | 2000.000000 100 | 2000.000000 101 | 2000.000000 102 | 2000.000000 103 | 2000.000000 104 | 2000.000000 105 | 2000.000000 106 | 2000.000000 107 | 2000.000000 108 | 2000.000000 109 | 2000.000000 110 | 2000.000000 111 | 2000.000000 112 | 2000.000000 113 | 2000.000000 114 | 2000.000000 115 | 2000.000000 116 | 2000.000000 117 | 2000.000000 118 | 2000.000000 119 | 2000.000000 120 | 2000.000000 121 | 2000.000000 122 | 2000.000000 123 | 2000.000000 124 | 2000.000000 125 | 2000.000000 126 | 2000.000000 127 | 2000.000000 128 | 2000.000000 129 | 2000.000000 130 | 2000.000000 131 | 0.000000 132 | 5000.000000 133 | 5000.000000 134 | 5000.000000 135 | 5000.000000 136 | 5000.000000 137 | 5000.000000 138 | 5000.000000 139 | 5000.000000 140 | 5000.000000 141 | 5000.000000 142 | 5000.000000 143 | 5000.000000 144 | 5000.000000 145 | 5000.000000 146 | 5000.000000 147 | 5000.000000 148 | 5000.000000 149 | 5000.000000 150 | 5000.000000 151 | 5000.000000 152 | 5000.000000 153 | 5000.000000 154 | 5000.000000 155 | 5000.000000 156 | 5000.000000 157 | 5000.000000 158 | 5000.000000 159 | 5000.000000 160 | 5000.000000 161 | 5000.000000 162 | 5000.000000 163 | 5000.000000 164 | 5000.000000 165 | 5000.000000 166 | 5000.000000 167 | 5000.000000 168 | 5000.000000 169 | 5000.000000 170 | 5000.000000 171 | 5000.000000 172 | 5000.000000 173 | 5000.000000 174 | 5000.000000 175 | 5000.000000 176 | 5000.000000 177 | 5000.000000 178 | 5000.000000 179 | 5000.000000 180 | 5000.000000 181 | 5000.000000 182 | 5000.000000 183 | 5000.000000 184 | 5000.000000 185 | 5000.000000 186 | 5000.000000 187 | 5000.000000 188 | 5000.000000 189 | 5000.000000 190 | 5000.000000 191 | 5000.000000 192 | 5000.000000 193 | 5000.000000 194 | 5000.000000 195 | 5000.000000 196 | 5000.000000 197 | 5000.000000 198 | 5000.000000 199 | 5000.000000 200 | 5000.000000 201 | 5000.000000 202 | 5000.000000 203 | 5000.000000 204 | 5000.000000 205 | 5000.000000 206 | 5000.000000 207 | 5000.000000 208 | 5000.000000 209 | 5000.000000 210 | 5000.000000 211 | 5000.000000 212 | 5000.000000 213 | 5000.000000 214 | 5000.000000 215 | 5000.000000 216 | 5000.000000 217 | 5000.000000 218 | 5000.000000 219 | 5000.000000 220 | 5000.000000 221 | 5000.000000 222 | 5000.000000 223 | 5000.000000 224 | 5000.000000 225 | 5000.000000 226 | 5000.000000 227 | 5000.000000 228 | 5000.000000 229 | 5000.000000 230 | 5000.000000 231 | 5000.000000 232 | 5000.000000 233 | 5000.000000 234 | 5000.000000 235 | 5000.000000 236 | 5000.000000 237 | 5000.000000 238 | 5000.000000 239 | 5000.000000 240 | 5000.000000 241 | 5000.000000 242 | 5000.000000 243 | 5000.000000 244 | 5000.000000 245 | 5000.000000 246 | 5000.000000 247 | 5000.000000 248 | 5000.000000 249 | 5000.000000 250 | 5000.000000 251 | 5000.000000 252 | 5000.000000 253 | 5000.000000 254 | 5000.000000 255 | 5000.000000 256 | 5000.000000 257 | 5000.000000 258 | 5000.000000 259 | 5000.000000 260 | 0.000000 261 | -------------------------------------------------------------------------------- /data/schemes/scheme-StLucGE260v2_bvecs.txt: -------------------------------------------------------------------------------- 1 | 0.000000 0.000000 0.000000 2 | -0.460728 0.370036 0.806723 3 | -0.851590 -0.446171 0.275183 4 | -0.424062 -0.889992 0.167589 5 | 0.564603 -0.800032 0.202911 6 | -0.179371 0.497872 0.848499 7 | 0.957196 0.289338 0.007649 8 | -0.582547 -0.228998 0.779871 9 | -0.186863 0.184211 0.964960 10 | 0.936576 -0.335168 0.102405 11 | -0.263249 -0.865082 0.427005 12 | -0.901371 0.385736 0.196821 13 | -0.534349 0.781382 0.322357 14 | -0.890749 0.163139 0.424208 15 | 0.057218 0.937025 0.344543 16 | -0.706893 0.094790 0.700940 17 | 0.248936 0.964245 0.090901 18 | 0.805982 0.590969 0.033882 19 | 0.108793 0.275300 0.955183 20 | -0.735691 0.570442 0.365178 21 | -0.172757 -0.694000 0.698941 22 | -0.069223 -0.980376 0.184581 23 | -0.073597 -0.435209 0.897316 24 | 0.143125 -0.638456 0.756234 25 | 0.321978 0.034970 0.946101 26 | 0.865220 0.402916 0.298418 27 | 0.396508 0.842577 0.364479 28 | -0.398714 -0.483401 0.779327 29 | 0.974474 0.048641 0.219166 30 | 0.129279 0.568459 0.812491 31 | 0.709183 -0.385424 0.590346 32 | 0.592152 -0.646124 0.481538 33 | -0.692232 -0.461101 0.555159 34 | 0.219569 -0.314709 0.923443 35 | -0.155604 0.746832 0.646552 36 | 0.679158 0.410820 0.608253 37 | 0.041495 -0.867742 0.495280 38 | -0.456767 0.642336 0.615442 39 | 0.259773 -0.937491 0.231581 40 | 0.853117 0.160377 0.496457 41 | 0.580239 0.808929 0.094638 42 | 0.397343 0.383298 0.833787 43 | 0.158946 0.788207 0.594531 44 | -0.700800 0.389492 0.597641 45 | -0.815652 -0.166530 0.554057 46 | -0.433255 0.899267 0.060078 47 | -0.492842 -0.690378 0.529608 48 | 0.430157 -0.508504 0.745915 49 | -0.456346 0.055279 0.888084 50 | 0.897472 -0.211143 0.387250 51 | -0.273766 -0.198573 0.941074 52 | 0.667896 0.646414 0.368868 53 | 0.445389 0.635863 0.630323 54 | 0.512554 -0.202063 0.834541 55 | -0.743448 0.667379 0.043481 56 | -0.949251 -0.138669 0.282299 57 | -0.665596 -0.702591 0.251689 58 | 0.795537 -0.546099 0.262482 59 | 0.752910 -0.081138 0.653102 60 | -0.252612 0.889137 0.381605 61 | -0.993956 0.079585 0.075620 62 | 0.015965 -0.073777 0.997147 63 | -0.120350 0.989733 0.077099 64 | 0.594019 0.147931 0.790732 65 | 0.333182 -0.786930 0.519356 66 | 0.000000 0.000000 0.000000 67 | -0.799032 0.311220 0.514481 68 | -0.243019 -0.438067 0.865470 69 | -0.678912 -0.607210 0.412764 70 | 0.729899 0.658882 0.181993 71 | -0.381221 -0.910448 0.160486 72 | 0.896786 -0.434831 0.081835 73 | 0.712126 -0.666574 0.220354 74 | -0.714721 0.027761 0.698859 75 | -0.270014 0.365703 0.890704 76 | 0.435136 -0.795054 0.422546 77 | -0.697516 0.709050 0.103539 78 | 0.990990 -0.130134 0.031687 79 | 0.484183 -0.869751 0.095396 80 | -0.075925 0.132782 0.988233 81 | -0.590464 0.350171 0.727140 82 | 0.487405 0.861371 0.143098 83 | 0.547205 0.704183 0.452429 84 | -0.454308 0.102870 0.884885 85 | 0.958482 0.228712 0.170302 86 | -0.182798 -0.707798 0.682354 87 | -0.877709 -0.479018 0.012986 88 | -0.457389 0.851190 0.257432 89 | -0.063716 -0.991013 0.117619 90 | -0.422962 -0.786853 0.449406 91 | 0.135628 -0.790601 0.597122 92 | 0.187451 0.969699 0.156669 93 | -0.220319 0.973928 0.054077 94 | -0.257512 -0.127116 0.957877 95 | 0.033036 -0.229023 0.972860 96 | 0.592994 -0.340078 0.729866 97 | 0.321664 -0.303058 0.897044 98 | 0.677215 0.196607 0.709031 99 | -0.487475 -0.547801 0.679913 100 | -0.077254 0.942534 0.325058 101 | -0.979799 -0.146285 0.136364 102 | 0.227184 0.043541 0.972878 103 | -0.947192 0.176878 0.267472 104 | -0.664933 -0.734822 0.133792 105 | -0.848381 0.475814 0.232059 106 | -0.120428 -0.899214 0.420609 107 | 0.869198 0.162114 0.467134 108 | 0.283503 0.623007 0.729033 109 | -0.418379 0.595770 0.685578 110 | 0.775919 -0.121108 0.619098 111 | 0.516813 -0.034193 0.855415 112 | 0.198111 -0.936918 0.287987 113 | 0.830068 -0.385617 0.402848 114 | -0.527267 -0.234451 0.816715 115 | -0.868836 -0.386827 0.309012 116 | -0.732966 -0.321332 0.599589 117 | 0.358659 -0.594426 0.719737 118 | 0.941192 -0.107344 0.320367 119 | 0.033971 0.799812 0.599289 120 | 0.103001 0.395526 0.912661 121 | 0.623922 -0.577621 0.526379 122 | -0.638618 0.625530 0.448195 123 | -0.094650 0.608164 0.788149 124 | 0.573207 0.486126 0.659633 125 | -0.887113 -0.067557 0.456581 126 | -0.293193 0.805494 0.514992 127 | 0.285195 0.851826 0.439382 128 | 0.397002 0.304113 0.865971 129 | 0.065245 -0.540930 0.838533 130 | 0.796859 0.451956 0.400938 131 | 0.000000 0.000000 0.000000 132 | 0.299370 0.873025 0.384975 133 | 0.565033 -0.800635 0.199304 134 | -0.618861 0.141509 0.772648 135 | 0.644436 0.130662 0.753412 136 | 0.274332 0.495463 0.824171 137 | -0.191539 0.702175 0.685757 138 | -0.960963 -0.022388 0.275769 139 | -0.667250 -0.441196 0.600102 140 | -0.002429 -0.535805 0.844338 141 | -0.911705 0.404431 0.072313 142 | 0.368890 0.614250 0.697580 143 | 0.565493 0.823678 0.042092 144 | 0.308848 0.107179 0.945053 145 | 0.455895 -0.057976 0.888143 146 | -0.351093 0.014591 0.936227 147 | 0.252722 -0.866987 0.429494 148 | -0.085989 0.961021 0.262763 149 | -0.126269 0.015174 0.991880 150 | 0.759768 0.641289 0.107248 151 | 0.619437 0.634388 0.462438 152 | 0.057414 0.121187 0.990968 153 | -0.424521 0.654451 0.625680 154 | 0.068347 -0.673844 0.735705 155 | -0.665592 0.731239 0.149256 156 | 0.007364 0.907840 0.419252 157 | -0.292020 -0.950211 0.108733 158 | 0.992446 -0.115159 0.042299 159 | 0.932929 0.271704 0.236266 160 | -0.744099 0.318792 0.587102 161 | 0.031182 -0.878614 0.476514 162 | -0.751359 0.115135 0.649772 163 | -0.675276 -0.582679 0.452202 164 | 0.937776 0.025331 0.346316 165 | -0.238626 -0.360846 0.901581 166 | 0.866778 -0.476047 0.148576 167 | 0.333505 -0.449276 0.828810 168 | -0.771407 -0.632892 0.066179 169 | 0.547019 -0.619776 0.562715 170 | 0.142895 -0.450097 0.881472 171 | -0.834386 -0.503461 0.224337 172 | -0.634728 0.497693 0.591119 173 | 0.200732 -0.285082 0.937249 174 | -0.128971 0.535778 0.834451 175 | -0.471713 0.751193 0.461731 176 | -0.304346 -0.800766 0.515895 177 | -0.079205 -0.985724 0.148575 178 | 0.951805 -0.255318 0.169939 179 | -0.357522 0.377125 0.854374 180 | -0.871150 -0.124417 0.474993 181 | 0.108020 0.713437 0.692343 182 | -0.561581 -0.013561 0.827310 183 | 0.985881 0.065063 0.154288 184 | 0.568846 -0.190486 0.800081 185 | 0.736712 0.306815 0.602595 186 | -0.114132 0.187401 0.975630 187 | 0.555980 -0.436738 0.707210 188 | -0.519811 -0.392612 0.758717 189 | 0.061295 -0.164546 0.984463 190 | 0.790141 0.405318 0.459776 191 | -0.301412 0.560494 0.771360 192 | -0.890445 0.378682 0.252402 193 | -0.351946 -0.681214 0.641936 194 | -0.465855 -0.147248 0.872523 195 | -0.858486 0.272062 0.434722 196 | 0.306351 -0.922183 0.236067 197 | 0.159032 -0.010915 0.987213 198 | 0.709151 -0.274556 0.649403 199 | 0.161333 0.822968 0.544698 200 | -0.314115 -0.559908 0.766704 201 | -0.318705 0.907369 0.274058 202 | 0.910353 -0.160453 0.381462 203 | -0.098188 -0.756239 0.646887 204 | -0.984285 0.103635 0.142975 205 | -0.472819 0.218328 0.853683 206 | 0.557811 0.555700 0.616477 207 | 0.748405 -0.645722 0.151436 208 | -0.893611 -0.445492 0.054729 209 | -0.644589 -0.178920 0.743298 210 | -0.921168 -0.237985 0.307916 211 | -0.342283 -0.871902 0.350185 212 | -0.013358 0.348741 0.937124 213 | -0.115078 -0.156450 0.980959 214 | 0.808372 -0.290586 0.511952 215 | -0.281501 0.830301 0.480996 216 | 0.423471 -0.297387 0.855706 217 | -0.634804 0.638425 0.435244 218 | -0.191427 0.356534 0.914461 219 | 0.297374 -0.123684 0.946716 220 | 0.405527 0.741663 0.534307 221 | -0.768784 -0.289302 0.570329 222 | 0.512742 0.802020 0.306363 223 | -0.592667 0.338296 0.730959 224 | 0.487488 0.146565 0.860740 225 | -0.485403 -0.851892 0.196633 226 | 0.342207 0.295685 0.891888 227 | -0.284845 -0.163403 0.944544 228 | 0.107477 0.463526 0.879541 229 | -0.823618 -0.422480 0.378371 230 | 0.252435 -0.755160 0.604987 231 | -0.406940 -0.319049 0.855925 232 | 0.688704 -0.053001 0.723103 233 | 0.876595 0.237709 0.418421 234 | -0.518209 -0.556020 0.649847 235 | 0.774198 0.530650 0.345005 236 | 0.293055 0.946475 0.135291 237 | 0.836894 0.059244 0.544149 238 | 0.287406 -0.607527 0.740479 239 | -0.166186 -0.525768 0.834236 240 | 0.718143 -0.548477 0.428303 241 | -0.476344 0.489498 0.730403 242 | -0.045767 -0.336526 0.940561 243 | -0.098107 0.994224 0.043518 244 | -0.885873 0.084643 0.456141 245 | 0.927775 0.362922 0.086723 246 | 0.609503 0.334101 0.718946 247 | -0.071733 0.806745 0.586530 248 | -0.307535 0.180637 0.934234 249 | 0.042493 0.603535 0.796204 250 | -0.987812 -0.145806 0.054481 251 | 0.501093 -0.763869 0.406706 252 | -0.492440 0.850806 0.183390 253 | 0.165908 0.273414 0.947481 254 | 0.853390 -0.391734 0.343903 255 | -0.767435 -0.083261 0.635697 256 | -0.664533 -0.713765 0.221213 257 | -0.774554 0.496091 0.392378 258 | 0.456962 0.408157 0.790313 259 | 0.688523 0.677913 0.257624 260 | 0.000000 0.000000 0.000000 261 | -------------------------------------------------------------------------------- /data/schemes/scheme-StLucGE260v3_bvals.txt: -------------------------------------------------------------------------------- 1 | 0.000000 2 | 1000.000000 3 | 1000.000000 4 | 1000.000000 5 | 1000.000000 6 | 1000.000000 7 | 1000.000000 8 | 1000.000000 9 | 1000.000000 10 | 1000.000000 11 | 1000.000000 12 | 1000.000000 13 | 1000.000000 14 | 1000.000000 15 | 1000.000000 16 | 1000.000000 17 | 1000.000000 18 | 1000.000000 19 | 1000.000000 20 | 1000.000000 21 | 1000.000000 22 | 1000.000000 23 | 1000.000000 24 | 1000.000000 25 | 1000.000000 26 | 1000.000000 27 | 1000.000000 28 | 1000.000000 29 | 1000.000000 30 | 1000.000000 31 | 1000.000000 32 | 1000.000000 33 | 1000.000000 34 | 1000.000000 35 | 1000.000000 36 | 1000.000000 37 | 1000.000000 38 | 1000.000000 39 | 1000.000000 40 | 1000.000000 41 | 1000.000000 42 | 1000.000000 43 | 1000.000000 44 | 1000.000000 45 | 1000.000000 46 | 1000.000000 47 | 1000.000000 48 | 1000.000000 49 | 1000.000000 50 | 1000.000000 51 | 1000.000000 52 | 1000.000000 53 | 1000.000000 54 | 1000.000000 55 | 1000.000000 56 | 1000.000000 57 | 1000.000000 58 | 1000.000000 59 | 1000.000000 60 | 1000.000000 61 | 1000.000000 62 | 1000.000000 63 | 1000.000000 64 | 1000.000000 65 | 1000.000000 66 | 1000.000000 67 | 1000.000000 68 | 1000.000000 69 | 1000.000000 70 | 1000.000000 71 | 1000.000000 72 | 1000.000000 73 | 1000.000000 74 | 1000.000000 75 | 1000.000000 76 | 1000.000000 77 | 1000.000000 78 | 1000.000000 79 | 1000.000000 80 | 1000.000000 81 | 1000.000000 82 | 1000.000000 83 | 1000.000000 84 | 1000.000000 85 | 1000.000000 86 | 1000.000000 87 | 1000.000000 88 | 1000.000000 89 | 1000.000000 90 | 1000.000000 91 | 1000.000000 92 | 1000.000000 93 | 1000.000000 94 | 1000.000000 95 | 1000.000000 96 | 1000.000000 97 | 1000.000000 98 | 1000.000000 99 | 1000.000000 100 | 1000.000000 101 | 1000.000000 102 | 1000.000000 103 | 1000.000000 104 | 1000.000000 105 | 1000.000000 106 | 1000.000000 107 | 1000.000000 108 | 1000.000000 109 | 1000.000000 110 | 1000.000000 111 | 1000.000000 112 | 1000.000000 113 | 1000.000000 114 | 1000.000000 115 | 1000.000000 116 | 1000.000000 117 | 1000.000000 118 | 1000.000000 119 | 1000.000000 120 | 1000.000000 121 | 1000.000000 122 | 1000.000000 123 | 1000.000000 124 | 1000.000000 125 | 1000.000000 126 | 1000.000000 127 | 1000.000000 128 | 1000.000000 129 | 1000.000000 130 | 0.000000 131 | 3000.000000 132 | 3000.000000 133 | 3000.000000 134 | 3000.000000 135 | 3000.000000 136 | 3000.000000 137 | 3000.000000 138 | 3000.000000 139 | 3000.000000 140 | 3000.000000 141 | 3000.000000 142 | 3000.000000 143 | 3000.000000 144 | 3000.000000 145 | 3000.000000 146 | 3000.000000 147 | 3000.000000 148 | 3000.000000 149 | 3000.000000 150 | 3000.000000 151 | 3000.000000 152 | 3000.000000 153 | 3000.000000 154 | 3000.000000 155 | 3000.000000 156 | 3000.000000 157 | 3000.000000 158 | 3000.000000 159 | 3000.000000 160 | 3000.000000 161 | 3000.000000 162 | 3000.000000 163 | 3000.000000 164 | 3000.000000 165 | 3000.000000 166 | 3000.000000 167 | 3000.000000 168 | 3000.000000 169 | 3000.000000 170 | 3000.000000 171 | 3000.000000 172 | 3000.000000 173 | 3000.000000 174 | 3000.000000 175 | 3000.000000 176 | 3000.000000 177 | 3000.000000 178 | 3000.000000 179 | 3000.000000 180 | 3000.000000 181 | 3000.000000 182 | 3000.000000 183 | 3000.000000 184 | 3000.000000 185 | 3000.000000 186 | 3000.000000 187 | 3000.000000 188 | 3000.000000 189 | 3000.000000 190 | 3000.000000 191 | 3000.000000 192 | 3000.000000 193 | 3000.000000 194 | 3000.000000 195 | 0.000000 196 | 5000.000000 197 | 5000.000000 198 | 5000.000000 199 | 5000.000000 200 | 5000.000000 201 | 5000.000000 202 | 5000.000000 203 | 5000.000000 204 | 5000.000000 205 | 5000.000000 206 | 5000.000000 207 | 5000.000000 208 | 5000.000000 209 | 5000.000000 210 | 5000.000000 211 | 5000.000000 212 | 5000.000000 213 | 5000.000000 214 | 5000.000000 215 | 5000.000000 216 | 5000.000000 217 | 5000.000000 218 | 5000.000000 219 | 5000.000000 220 | 5000.000000 221 | 5000.000000 222 | 5000.000000 223 | 5000.000000 224 | 5000.000000 225 | 5000.000000 226 | 5000.000000 227 | 5000.000000 228 | 5000.000000 229 | 5000.000000 230 | 5000.000000 231 | 5000.000000 232 | 5000.000000 233 | 5000.000000 234 | 5000.000000 235 | 5000.000000 236 | 5000.000000 237 | 5000.000000 238 | 5000.000000 239 | 5000.000000 240 | 5000.000000 241 | 5000.000000 242 | 5000.000000 243 | 5000.000000 244 | 5000.000000 245 | 5000.000000 246 | 5000.000000 247 | 5000.000000 248 | 5000.000000 249 | 5000.000000 250 | 5000.000000 251 | 5000.000000 252 | 5000.000000 253 | 5000.000000 254 | 5000.000000 255 | 5000.000000 256 | 5000.000000 257 | 5000.000000 258 | 5000.000000 259 | 5000.000000 260 | 0.000000 261 | -------------------------------------------------------------------------------- /data/schemes/scheme-StLucGE260v3_bvecs.txt: -------------------------------------------------------------------------------- 1 | 0.000000 0.000000 0.000000 2 | 0.570869 -0.735395 0.365105 3 | 0.137091 0.473075 0.870291 4 | -0.123964 0.991241 0.045551 5 | -0.507353 0.642062 0.574760 6 | -0.068728 -0.816493 0.573250 7 | 0.562031 -0.476288 0.676218 8 | -0.763391 0.055748 0.643527 9 | 0.188018 -0.535983 0.823026 10 | 0.904696 0.216522 0.366938 11 | 0.437572 -0.876898 0.198946 12 | -0.476996 0.282080 0.832409 13 | -0.645732 -0.162004 0.746180 14 | 0.410438 0.401253 0.818863 15 | 0.844000 -0.402174 0.354852 16 | 0.543214 0.681910 0.489813 17 | 0.967390 -0.158731 0.197386 18 | -0.719812 0.693431 0.032007 19 | -0.546861 -0.565839 0.617065 20 | 0.283884 0.941535 0.181446 21 | -0.977415 -0.202910 0.059064 22 | -0.313052 0.677101 0.665982 23 | 0.515318 0.192871 0.835014 24 | -0.921901 -0.297321 0.248393 25 | 0.320743 -0.947114 0.009978 26 | -0.258668 -0.864479 0.431006 27 | -0.691366 -0.558807 0.457982 28 | 0.256239 -0.751125 0.608402 29 | 0.839151 0.045709 0.541974 30 | -0.909090 -0.139610 0.392512 31 | 0.004334 -0.091413 0.995804 32 | -0.489139 -0.735434 0.468915 33 | -0.319417 -0.262569 0.910511 34 | 0.139932 -0.873986 0.465369 35 | 0.092699 0.669128 0.737343 36 | 0.758697 -0.591951 0.271980 37 | -0.973046 0.225552 0.048033 38 | -0.308459 -0.735259 0.603529 39 | -0.999788 0.020606 0.000380 40 | 0.129256 0.812757 0.568084 41 | -0.496863 -0.359895 0.789685 42 | 0.296125 0.584543 0.755394 43 | -0.377417 -0.560495 0.737158 44 | 0.228919 -0.139907 0.963339 45 | 0.638566 -0.752707 0.160202 46 | 0.827048 0.548854 0.121456 47 | -0.815567 0.261791 0.516058 48 | 0.483789 0.868170 0.110585 49 | -0.975490 -0.006630 0.219945 50 | 0.047313 0.278278 0.959334 51 | -0.209461 0.941398 0.264377 52 | -0.595245 0.071692 0.800340 53 | -0.115619 0.609882 0.784013 54 | -0.611466 0.758189 0.226403 55 | -0.037585 -0.930743 0.363738 56 | 0.149698 0.080479 0.985451 57 | 0.930308 0.337703 0.143123 58 | -0.281121 0.831459 0.479214 59 | 0.679100 -0.551930 0.483939 60 | -0.021630 -0.501057 0.865144 61 | 0.559410 0.776867 0.289029 62 | 0.873374 -0.203410 0.442540 63 | -0.934826 0.223279 0.276126 64 | -0.670787 0.620158 0.406753 65 | 0.836856 0.428244 0.340997 66 | -0.896343 -0.438433 0.065925 67 | -0.520158 0.470974 0.712474 68 | 0.681399 0.726012 0.092742 69 | 0.277757 0.269378 0.922110 70 | 0.023102 0.976695 0.213386 71 | 0.218245 -0.945932 0.239962 72 | -0.575658 -0.811315 0.101907 73 | -0.671812 -0.372207 0.640414 74 | 0.023838 -0.988062 0.152199 75 | 0.151147 -0.334871 0.930062 76 | -0.379122 0.088716 0.921084 77 | 0.164144 0.912646 0.374344 78 | -0.081087 -0.288920 0.953913 79 | 0.431443 -0.179226 0.884158 80 | -0.433514 -0.856562 0.279940 81 | -0.062134 0.899146 0.433216 82 | -0.262440 0.283856 0.922253 83 | 0.978756 0.116649 0.168609 84 | -0.795725 -0.158256 0.584617 85 | -0.640157 -0.709030 0.295762 86 | -0.148136 -0.654841 0.741106 87 | 0.372047 0.842680 0.389194 88 | 0.693636 0.497710 0.520725 89 | 0.482230 -0.677886 0.554910 90 | 0.921051 -0.359479 0.149798 91 | 0.701715 0.124628 0.701473 92 | -0.834614 0.418691 0.357936 93 | -0.239195 -0.453576 0.858519 94 | -0.759808 -0.641953 0.102893 95 | 0.359670 -0.838293 0.409758 96 | -0.692649 0.459098 0.556297 97 | -0.813683 -0.356688 0.459014 98 | 0.591113 -0.037926 0.805696 99 | 0.071756 -0.700214 0.710318 100 | 0.598012 -0.273313 0.753447 101 | 0.379213 -0.384951 0.841434 102 | 0.136714 0.990530 0.012595 103 | 0.935747 -0.013072 0.352429 104 | -0.210014 -0.953108 0.217897 105 | 0.621173 0.351307 0.700519 106 | -0.361268 -0.930437 0.061419 107 | 0.838520 -0.540532 0.068618 108 | 0.506111 0.550319 0.664078 109 | 0.742490 -0.350258 0.570988 110 | -0.785368 0.581846 0.211312 111 | 0.368553 -0.585443 0.722098 112 | -0.892961 0.070259 0.444616 113 | -0.321512 0.488821 0.810977 114 | -0.112853 0.124931 0.985726 115 | -0.405430 0.888766 0.213824 116 | -0.097201 0.429414 0.897862 117 | -0.666078 0.270043 0.695282 118 | 0.751042 -0.129917 0.647346 119 | 0.336367 0.733967 0.590042 120 | -0.901490 0.416670 0.117057 121 | -0.473386 -0.114193 0.873422 122 | -0.814839 -0.509778 0.275976 123 | 0.377612 0.047065 0.924767 124 | -0.101367 0.775093 0.623663 125 | -0.474163 0.781091 0.406284 126 | -0.225385 -0.070415 0.971722 127 | -0.528150 0.848840 0.022989 128 | 0.786412 0.285964 0.547522 129 | 0.718412 0.618875 0.317611 130 | 0.000000 0.000000 0.000000 131 | -0.070883 0.018301 0.997317 132 | -0.446628 0.656327 0.608078 133 | -0.423954 0.076722 0.902428 134 | -0.958652 -0.275888 0.069807 135 | 0.469283 0.569418 0.674934 136 | -0.202223 -0.849118 0.487959 137 | -0.880117 -0.127074 0.457435 138 | -0.173717 0.813272 0.555348 139 | -0.552398 0.833177 0.025936 140 | -0.726289 0.521853 0.447407 141 | 0.736941 -0.224808 0.637479 142 | -0.978857 0.024593 0.203060 143 | 0.344761 0.919433 0.189162 144 | -0.775026 0.613544 0.151321 145 | 0.904765 0.284499 0.316956 146 | 0.172489 0.756038 0.631391 147 | -0.408709 0.421126 0.809697 148 | 0.458600 0.771986 0.440140 149 | -0.259325 0.927343 0.269787 150 | 0.575694 -0.496960 0.649313 151 | -0.645346 -0.574467 0.503504 152 | -0.162666 0.310612 0.936515 153 | -0.241148 -0.221637 0.944841 154 | 0.421629 -0.317644 0.849312 155 | 0.623729 0.771519 0.125383 156 | -0.002826 -0.739640 0.672996 157 | -0.855715 0.196340 0.478750 158 | 0.918664 -0.065805 0.389519 159 | 0.472915 0.275500 0.836929 160 | -0.529929 0.778640 0.335998 161 | -0.920676 0.338879 0.193692 162 | 0.238000 -0.970094 0.047676 163 | 0.830988 -0.397111 0.389566 164 | 0.087131 0.916514 0.390399 165 | -0.472059 -0.793455 0.384175 166 | 0.948523 -0.297342 0.109051 167 | -0.086674 -0.973451 0.211849 168 | -0.699572 -0.693859 0.170759 169 | 0.780946 -0.608062 0.142772 170 | 0.205484 -0.548346 0.810613 171 | 0.347399 -0.728506 0.590417 172 | -0.682616 0.008919 0.730723 173 | -0.710466 -0.308939 0.632294 174 | 0.279143 -0.028062 0.959839 175 | 0.993365 0.028231 0.111489 176 | -0.850029 -0.427085 0.308300 177 | 0.155835 0.255857 0.954072 178 | 0.147550 -0.901536 0.406769 179 | 0.624371 -0.672611 0.397185 180 | -0.391923 -0.912809 0.114783 181 | -0.491219 -0.287799 0.822117 182 | 0.087263 -0.287602 0.953766 183 | 0.703119 0.380157 0.600920 184 | -0.119292 0.604308 0.787770 185 | -0.653429 0.324209 0.684046 186 | 0.566728 -0.025929 0.823497 187 | -0.393022 -0.606485 0.691165 188 | 0.433675 -0.862035 0.262338 189 | 0.727130 0.578759 0.369215 190 | -0.150877 -0.512608 0.845262 191 | 0.849767 0.522101 0.072848 192 | 0.781464 0.102431 0.615485 193 | 0.043271 0.994695 0.093324 194 | 0.191238 0.520801 0.831982 195 | 0.000000 0.000000 0.000000 196 | -0.123664 0.218682 0.967928 197 | 0.653743 0.718189 0.238380 198 | -0.551559 0.709218 0.439081 199 | 0.132023 0.008395 0.991211 200 | -0.317881 0.701050 0.638342 201 | -0.613270 -0.699234 0.367384 202 | 0.786081 0.121519 0.606061 203 | 0.919558 0.194690 0.341335 204 | 0.865256 0.476267 0.156532 205 | 0.759177 0.455655 0.464789 206 | -0.004564 -0.952174 0.305521 207 | -0.207234 -0.545028 0.812403 208 | -0.716622 0.184801 0.672534 209 | -0.943236 -0.145343 0.298632 210 | 0.548037 -0.770858 0.324704 211 | 0.906604 -0.387980 0.165956 212 | 0.751401 -0.498059 0.432822 213 | -0.901713 0.159330 0.401906 214 | -0.122995 -0.810419 0.572795 215 | 0.030056 -0.333940 0.942115 216 | -0.584579 -0.414930 0.697209 217 | 0.526398 0.667197 0.527023 218 | 0.396173 0.181576 0.900043 219 | -0.773982 -0.436420 0.458791 220 | 0.320463 -0.478361 0.817603 221 | -0.800719 -0.132456 0.584213 222 | -0.244246 0.472669 0.846716 223 | -0.418904 -0.672645 0.609974 224 | 0.607604 -0.038083 0.793327 225 | 0.016658 0.646116 0.763058 226 | 0.982943 0.181828 0.027589 227 | 0.136760 0.372904 0.917736 228 | -0.713560 0.675433 0.186073 229 | -0.083982 0.854692 0.512298 230 | 0.067740 0.961290 0.267082 231 | -0.718989 -0.690778 0.076680 232 | -0.171900 -0.984397 0.037599 233 | 0.342070 0.548935 0.762665 234 | -0.542476 0.456786 0.705029 235 | -0.579896 -0.086854 0.810048 236 | 0.378266 0.885477 0.269898 237 | 0.612982 -0.349496 0.708594 238 | 0.829694 -0.191978 0.524168 239 | 0.354905 -0.188980 0.915603 240 | -0.358987 -0.296030 0.885152 241 | -0.158191 0.986474 0.042953 242 | -0.435608 0.177871 0.882387 243 | -0.895700 0.414888 0.159967 244 | -0.327646 0.895112 0.302362 245 | 0.228359 -0.817945 0.528032 246 | 0.232739 0.800229 0.552690 247 | -0.989374 0.123138 0.077304 248 | 0.746465 -0.654316 0.121082 249 | 0.592651 0.354356 0.723323 250 | 0.301848 -0.926344 0.225333 251 | 0.081241 -0.646962 0.758182 252 | -0.473303 -0.879977 0.040295 253 | 0.961737 -0.101400 0.254520 254 | -0.501944 0.864327 0.031481 255 | 0.486015 -0.636434 0.598950 256 | -0.187772 -0.080648 0.978896 257 | -0.338682 -0.880009 0.332984 258 | -0.882411 -0.438787 0.169755 259 | -0.762757 0.453221 0.461294 260 | 0.000000 0.000000 0.000000 261 | -------------------------------------------------------------------------------- /data/schemes/scheme-StLucGE260v4_bvals.txt: -------------------------------------------------------------------------------- 1 | 0.000000 2 | 1000.000000 3 | 1000.000000 4 | 1000.000000 5 | 1000.000000 6 | 1000.000000 7 | 1000.000000 8 | 1000.000000 9 | 1000.000000 10 | 1000.000000 11 | 1000.000000 12 | 1000.000000 13 | 1000.000000 14 | 1000.000000 15 | 1000.000000 16 | 1000.000000 17 | 1000.000000 18 | 1000.000000 19 | 1000.000000 20 | 1000.000000 21 | 1000.000000 22 | 1000.000000 23 | 1000.000000 24 | 1000.000000 25 | 1000.000000 26 | 1000.000000 27 | 1000.000000 28 | 1000.000000 29 | 1000.000000 30 | 1000.000000 31 | 1000.000000 32 | 1000.000000 33 | 1000.000000 34 | 1000.000000 35 | 1000.000000 36 | 1000.000000 37 | 1000.000000 38 | 1000.000000 39 | 1000.000000 40 | 1000.000000 41 | 1000.000000 42 | 1000.000000 43 | 1000.000000 44 | 1000.000000 45 | 1000.000000 46 | 1000.000000 47 | 1000.000000 48 | 1000.000000 49 | 1000.000000 50 | 1000.000000 51 | 1000.000000 52 | 1000.000000 53 | 1000.000000 54 | 1000.000000 55 | 1000.000000 56 | 1000.000000 57 | 1000.000000 58 | 1000.000000 59 | 1000.000000 60 | 1000.000000 61 | 1000.000000 62 | 1000.000000 63 | 1000.000000 64 | 1000.000000 65 | 1000.000000 66 | 0.000000 67 | 2000.000000 68 | 2000.000000 69 | 2000.000000 70 | 2000.000000 71 | 2000.000000 72 | 2000.000000 73 | 2000.000000 74 | 2000.000000 75 | 2000.000000 76 | 2000.000000 77 | 2000.000000 78 | 2000.000000 79 | 2000.000000 80 | 2000.000000 81 | 2000.000000 82 | 2000.000000 83 | 2000.000000 84 | 2000.000000 85 | 2000.000000 86 | 2000.000000 87 | 2000.000000 88 | 2000.000000 89 | 2000.000000 90 | 2000.000000 91 | 2000.000000 92 | 2000.000000 93 | 2000.000000 94 | 2000.000000 95 | 2000.000000 96 | 2000.000000 97 | 2000.000000 98 | 2000.000000 99 | 2000.000000 100 | 2000.000000 101 | 2000.000000 102 | 2000.000000 103 | 2000.000000 104 | 2000.000000 105 | 2000.000000 106 | 2000.000000 107 | 2000.000000 108 | 2000.000000 109 | 2000.000000 110 | 2000.000000 111 | 2000.000000 112 | 2000.000000 113 | 2000.000000 114 | 2000.000000 115 | 2000.000000 116 | 2000.000000 117 | 2000.000000 118 | 2000.000000 119 | 2000.000000 120 | 2000.000000 121 | 2000.000000 122 | 2000.000000 123 | 2000.000000 124 | 2000.000000 125 | 2000.000000 126 | 2000.000000 127 | 2000.000000 128 | 2000.000000 129 | 2000.000000 130 | 2000.000000 131 | 0.000000 132 | 5000.000000 133 | 5000.000000 134 | 5000.000000 135 | 5000.000000 136 | 5000.000000 137 | 5000.000000 138 | 5000.000000 139 | 5000.000000 140 | 5000.000000 141 | 5000.000000 142 | 5000.000000 143 | 5000.000000 144 | 5000.000000 145 | 5000.000000 146 | 5000.000000 147 | 5000.000000 148 | 5000.000000 149 | 5000.000000 150 | 5000.000000 151 | 5000.000000 152 | 5000.000000 153 | 5000.000000 154 | 5000.000000 155 | 5000.000000 156 | 5000.000000 157 | 5000.000000 158 | 5000.000000 159 | 5000.000000 160 | 5000.000000 161 | 5000.000000 162 | 5000.000000 163 | 5000.000000 164 | 5000.000000 165 | 5000.000000 166 | 5000.000000 167 | 5000.000000 168 | 5000.000000 169 | 5000.000000 170 | 5000.000000 171 | 5000.000000 172 | 5000.000000 173 | 5000.000000 174 | 5000.000000 175 | 5000.000000 176 | 5000.000000 177 | 5000.000000 178 | 5000.000000 179 | 5000.000000 180 | 5000.000000 181 | 5000.000000 182 | 5000.000000 183 | 5000.000000 184 | 5000.000000 185 | 5000.000000 186 | 5000.000000 187 | 5000.000000 188 | 5000.000000 189 | 5000.000000 190 | 5000.000000 191 | 5000.000000 192 | 5000.000000 193 | 5000.000000 194 | 5000.000000 195 | 5000.000000 196 | 5000.000000 197 | 5000.000000 198 | 5000.000000 199 | 5000.000000 200 | 5000.000000 201 | 5000.000000 202 | 5000.000000 203 | 5000.000000 204 | 5000.000000 205 | 5000.000000 206 | 5000.000000 207 | 5000.000000 208 | 5000.000000 209 | 5000.000000 210 | 5000.000000 211 | 5000.000000 212 | 5000.000000 213 | 5000.000000 214 | 5000.000000 215 | 5000.000000 216 | 5000.000000 217 | 5000.000000 218 | 5000.000000 219 | 5000.000000 220 | 5000.000000 221 | 5000.000000 222 | 5000.000000 223 | 5000.000000 224 | 5000.000000 225 | 5000.000000 226 | 5000.000000 227 | 5000.000000 228 | 5000.000000 229 | 5000.000000 230 | 5000.000000 231 | 5000.000000 232 | 5000.000000 233 | 5000.000000 234 | 5000.000000 235 | 5000.000000 236 | 5000.000000 237 | 5000.000000 238 | 5000.000000 239 | 5000.000000 240 | 5000.000000 241 | 5000.000000 242 | 5000.000000 243 | 5000.000000 244 | 5000.000000 245 | 5000.000000 246 | 5000.000000 247 | 5000.000000 248 | 5000.000000 249 | 5000.000000 250 | 5000.000000 251 | 5000.000000 252 | 5000.000000 253 | 5000.000000 254 | 5000.000000 255 | 5000.000000 256 | 5000.000000 257 | 5000.000000 258 | 5000.000000 259 | 5000.000000 260 | 0.000000 261 | -------------------------------------------------------------------------------- /data/schemes/scheme-StLucGE260v4_bvecs.txt: -------------------------------------------------------------------------------- 1 | 0.000000 0.000000 0.000000 2 | 0.753724 0.016936 0.656973 3 | -0.263881 -0.911115 0.316602 4 | -0.147930 -0.988486 0.031809 5 | -0.071212 -0.148481 0.986348 6 | -0.910151 0.399539 0.109519 7 | -0.745893 0.546841 0.380275 8 | -0.372834 0.864766 0.336413 9 | 0.877466 -0.167433 0.449466 10 | 0.220992 -0.098908 0.970247 11 | -0.371832 -0.454406 0.809479 12 | -0.209395 0.976959 0.041284 13 | -0.378376 -0.156090 0.912397 14 | -0.973145 0.005867 0.230117 15 | 0.039312 -0.649536 0.759314 16 | -0.986641 -0.162852 0.004433 17 | -0.853209 -0.113042 0.509173 18 | 0.556765 0.769545 0.312751 19 | -0.791514 -0.587908 0.166944 20 | 0.261046 -0.413524 0.872269 21 | 0.495448 -0.177518 0.850305 22 | 0.189660 0.580387 0.791947 23 | 0.253528 0.206107 0.945116 24 | 0.773714 0.610564 0.169050 25 | 0.885190 -0.395621 0.244791 26 | 0.201225 0.877285 0.435752 27 | 0.549715 -0.834321 0.041489 28 | -0.070815 0.816991 0.572285 29 | -0.641976 -0.070768 0.763452 30 | -0.062633 0.955934 0.286824 31 | -0.525756 -0.793335 0.306922 32 | -0.137066 0.625054 0.768453 33 | 0.779816 0.311789 0.542839 34 | 0.052864 -0.958966 0.278548 35 | 0.666665 -0.331292 0.667685 36 | -0.381447 -0.684267 0.621511 37 | 0.930343 0.133465 0.341538 38 | -0.573671 -0.818868 0.018911 39 | 0.224291 -0.806923 0.546415 40 | -0.425117 0.693475 0.581694 41 | 0.344688 -0.909779 0.231285 42 | -0.883846 -0.345387 0.315475 43 | -0.109307 -0.817558 0.565376 44 | -0.141863 0.137170 0.980336 45 | -0.076753 -0.429411 0.899842 46 | 0.481821 0.424759 0.766439 47 | 0.540358 0.138827 0.829904 48 | 0.334821 0.927479 0.166364 49 | -0.355187 0.433547 0.828178 50 | 0.984947 -0.140413 0.100815 51 | 0.525507 -0.747832 0.405695 52 | 0.711847 -0.538098 0.451358 53 | -0.631522 0.743805 0.218938 54 | 0.656035 0.558704 0.507413 55 | -0.653276 -0.356728 0.667814 56 | 0.429339 -0.584086 0.688848 57 | -0.616801 0.452855 0.643800 58 | -0.662453 -0.576582 0.478236 59 | -0.747729 0.185786 0.637484 60 | 0.003287 0.379787 0.925068 61 | -0.455577 0.164069 0.874946 62 | 0.375654 0.705437 0.601035 63 | -0.890977 0.252225 0.377549 64 | 0.912680 0.385599 0.135381 65 | 0.774605 -0.626992 0.082874 66 | 0.000000 0.000000 0.000000 67 | 0.216997 -0.956373 0.195607 68 | 0.021717 0.968058 0.249783 69 | -0.480902 -0.278800 0.831266 70 | 0.601956 0.081523 0.794357 71 | 0.048039 0.046767 0.997750 72 | 0.902170 -0.110385 0.417018 73 | -0.460877 0.047847 0.886173 74 | -0.418831 -0.570286 0.706650 75 | 0.421315 -0.819108 0.389301 76 | -0.070732 -0.995531 0.062570 77 | 0.187435 0.736353 0.650117 78 | 0.769678 0.486273 0.413683 79 | 0.788374 -0.614091 0.036871 80 | -0.984070 -0.027010 0.175720 81 | 0.910408 -0.366005 0.192864 82 | -0.726025 -0.153264 0.670371 83 | -0.802566 -0.547560 0.236780 84 | -0.621953 -0.779306 0.076529 85 | 0.098436 -0.287061 0.952841 86 | -0.269967 0.955405 0.119666 87 | 0.057226 0.355573 0.932895 88 | 0.687217 -0.650171 0.324053 89 | -0.881340 -0.269227 0.388274 90 | -0.949305 -0.310821 0.047020 91 | 0.816972 0.167303 0.551875 92 | 0.526118 -0.846615 0.080266 93 | 0.360390 0.928775 0.086583 94 | 0.511451 0.664228 0.545178 95 | 0.820955 0.561502 0.103674 96 | -0.947657 0.287912 0.138030 97 | -0.684135 -0.458195 0.567465 98 | -0.352128 0.843608 0.405378 99 | -0.327322 0.655255 0.680809 100 | -0.176625 -0.427786 0.886455 101 | -0.068569 0.592807 0.802420 102 | 0.934300 0.240986 0.262696 103 | 0.190409 -0.784969 0.589549 104 | 0.609958 0.388079 0.690903 105 | 0.500780 -0.603791 0.620206 106 | -0.243434 0.280235 0.928552 107 | -0.072695 -0.679484 0.730080 108 | -0.027842 -0.917518 0.396719 109 | -0.490922 0.403874 0.771934 110 | 0.760929 -0.413220 0.500236 111 | 0.991150 -0.048395 0.123611 112 | -0.573667 -0.713884 0.401592 113 | -0.345721 -0.908886 0.233244 114 | -0.889213 0.063090 0.453122 115 | 0.345569 0.214263 0.913605 116 | 0.712656 -0.170944 0.680368 117 | 0.214577 -0.541042 0.813161 118 | 0.298474 0.872353 0.387187 119 | 0.355500 -0.107440 0.928481 120 | -0.070804 0.846047 0.528386 121 | -0.791954 0.354690 0.496995 122 | -0.589680 0.778497 0.214987 123 | 0.325312 0.509142 0.796835 124 | -0.597338 0.611660 0.518709 125 | -0.684573 0.159449 0.711291 126 | -0.210719 -0.110787 0.971249 127 | -0.286036 -0.798334 0.529949 128 | 0.603093 0.756845 0.251922 129 | 0.483924 -0.353772 0.800414 130 | -0.804056 0.536922 0.255361 131 | 0.000000 0.000000 0.000000 132 | 0.408709 -0.221367 0.885412 133 | -0.358885 -0.654593 0.665365 134 | -0.562954 -0.533089 0.631585 135 | -0.009425 0.890079 0.455709 136 | -0.976892 0.133146 0.167196 137 | 0.380826 0.449502 0.808035 138 | -0.998605 -0.038335 0.036307 139 | 0.607101 -0.637736 0.474047 140 | 0.732199 -0.406532 0.546458 141 | -0.740011 0.432792 0.514854 142 | 0.716628 0.605638 0.345900 143 | -0.801804 0.214551 0.557744 144 | 0.580120 0.403028 0.707834 145 | -0.294828 0.467254 0.833517 146 | 0.533704 0.216907 0.817381 147 | -0.677856 -0.734726 0.026250 148 | -0.316091 -0.941307 0.118435 149 | -0.643497 0.317657 0.696424 150 | 0.984484 -0.012869 0.175002 151 | -0.818446 0.491275 0.297985 152 | -0.108579 -0.981390 0.158383 153 | -0.001586 -0.286822 0.957983 154 | -0.888448 0.295129 0.351509 155 | -0.012161 -0.833016 0.553116 156 | 0.926922 -0.064844 0.369609 157 | -0.162390 0.619438 0.768067 158 | 0.824167 0.022873 0.565885 159 | 0.354749 0.774913 0.523127 160 | 0.224027 0.899835 0.374311 161 | -0.832400 -0.017395 0.553903 162 | 0.143278 0.797449 0.586129 163 | -0.265771 0.251276 0.930713 164 | -0.873401 -0.218080 0.435444 165 | 0.753029 0.277338 0.596683 166 | -0.191850 -0.454553 0.869813 167 | -0.368133 -0.056242 0.928070 168 | 0.967479 0.252805 0.008649 169 | -0.594078 -0.334304 0.731650 170 | 0.391991 -0.919944 0.006738 171 | 0.888242 0.173640 0.425295 172 | 0.022685 -0.500214 0.865605 173 | -0.197060 0.976542 0.086794 174 | 0.774756 -0.622068 0.113070 175 | 0.211479 -0.358239 0.909363 176 | -0.395795 -0.847759 0.353059 177 | 0.405459 0.875280 0.263606 178 | -0.489456 0.501759 0.713212 179 | -0.486280 0.756101 0.437998 180 | -0.169124 0.936404 0.307481 181 | 0.161040 0.094631 0.982401 182 | -0.928759 -0.311075 0.201593 183 | -0.491731 -0.716790 0.494381 184 | -0.880620 -0.472361 0.037202 185 | 0.683958 0.499308 0.531877 186 | -0.377468 0.896468 0.232083 187 | 0.246994 0.963366 0.104498 188 | 0.142776 -0.979327 0.143299 189 | -0.290207 0.834727 0.467986 190 | 0.905251 -0.292627 0.308041 191 | -0.920468 0.075990 0.383358 192 | -0.679671 0.645827 0.347785 193 | -0.491403 0.109411 0.864032 194 | -0.462574 0.309484 0.830810 195 | 0.207269 -0.133630 0.969114 196 | -0.040754 0.213174 0.976164 197 | 0.147474 0.330571 0.932188 198 | 0.683857 0.103928 0.722176 199 | 0.504589 0.607367 0.613592 200 | -0.717609 0.687955 0.108420 201 | 0.402567 -0.653492 0.641005 202 | -0.850139 0.520706 0.078287 203 | -0.212006 -0.231463 0.949462 204 | -0.685851 0.088334 0.722361 205 | -0.589422 0.590278 0.551501 206 | -0.073347 0.436001 0.896952 207 | 0.329143 -0.915903 0.229752 208 | -0.234090 -0.800400 0.551871 209 | 0.519949 -0.821981 0.232379 210 | -0.514892 -0.841829 0.161898 211 | -0.937123 0.331498 0.109135 212 | 0.562292 -0.059607 0.824788 213 | 0.476815 0.877467 0.051945 214 | 0.430677 -0.785392 0.444610 215 | 0.719463 -0.144633 0.679304 216 | 0.895011 -0.425260 0.134572 217 | 0.536592 0.735549 0.413565 218 | -0.357816 0.676885 0.643268 219 | -0.780185 -0.600370 0.175690 220 | 0.568351 -0.508928 0.646506 221 | 0.384773 0.022183 0.922745 222 | -0.193074 -0.912616 0.360353 223 | -0.420425 -0.268713 0.866623 224 | 0.410280 -0.437776 0.800014 225 | -0.746387 -0.376276 0.548929 226 | 0.613897 0.763839 0.199198 227 | 0.783567 0.607546 0.130041 228 | 0.220399 -0.767642 0.601789 229 | 0.792558 -0.498177 0.351671 230 | 0.831105 0.393516 0.392950 231 | 0.974028 -0.209375 0.086204 232 | 0.019512 0.999253 0.033368 233 | 0.593757 -0.303480 0.745219 234 | -0.629739 -0.719759 0.292192 235 | -0.012581 -0.063396 0.997909 236 | 0.235015 -0.570494 0.786959 237 | 0.298656 0.638351 0.709445 238 | 0.053126 -0.688526 0.723264 239 | 0.026799 -0.938072 0.345403 240 | 0.146609 0.520055 0.841457 241 | 0.889132 0.417982 0.186377 242 | 0.342736 0.244214 0.907134 243 | -0.194619 0.044665 0.979861 244 | 0.672919 -0.685744 0.277373 245 | 0.833167 -0.222904 0.506109 246 | -0.154471 -0.654809 0.739840 247 | -0.129671 0.780981 0.610945 248 | -0.559764 0.800177 0.215362 249 | 0.046934 0.674986 0.736336 250 | -0.734620 -0.170957 0.656587 251 | 0.218588 -0.881687 0.418147 252 | -0.580457 -0.098590 0.808301 253 | 0.587263 -0.809029 0.024392 254 | -0.391017 -0.473511 0.789236 255 | -0.961066 -0.101670 0.256935 256 | -0.686041 -0.575863 0.444669 257 | 0.953678 0.200923 0.223895 258 | -0.830658 -0.444580 0.335196 259 | 0.054289 0.968326 0.243718 260 | 0.000000 0.000000 0.000000 261 | -------------------------------------------------------------------------------- /data/schemes/scheme-StLucGE534_bvals.txt: -------------------------------------------------------------------------------- 1 | 0.0 2 | 1000.0 3 | 1000.0 4 | 1000.0 5 | 1000.0 6 | 1000.0 7 | 1000.0 8 | 1000.0 9 | 1000.0 10 | 1000.0 11 | 1000.0 12 | 1000.0 13 | 1000.0 14 | 1000.0 15 | 1000.0 16 | 1000.0 17 | 1000.0 18 | 1000.0 19 | 1000.0 20 | 1000.0 21 | 1000.0 22 | 1000.0 23 | 1000.0 24 | 1000.0 25 | 1000.0 26 | 1000.0 27 | 1000.0 28 | 1000.0 29 | 1000.0 30 | 1000.0 31 | 1000.0 32 | 1000.0 33 | 1000.0 34 | 1000.0 35 | 1000.0 36 | 1000.0 37 | 1000.0 38 | 1000.0 39 | 1000.0 40 | 1000.0 41 | 1000.0 42 | 1000.0 43 | 1000.0 44 | 1000.0 45 | 1000.0 46 | 1000.0 47 | 1000.0 48 | 1000.0 49 | 1000.0 50 | 1000.0 51 | 1000.0 52 | 1000.0 53 | 1000.0 54 | 1000.0 55 | 1000.0 56 | 1000.0 57 | 1000.0 58 | 1000.0 59 | 1000.0 60 | 1000.0 61 | 1000.0 62 | 1000.0 63 | 1000.0 64 | 1000.0 65 | 1000.0 66 | 1000.0 67 | 1000.0 68 | 1000.0 69 | 1000.0 70 | 1000.0 71 | 1000.0 72 | 1000.0 73 | 1000.0 74 | 1000.0 75 | 1000.0 76 | 1000.0 77 | 1000.0 78 | 1000.0 79 | 1000.0 80 | 1000.0 81 | 1000.0 82 | 1000.0 83 | 1000.0 84 | 1000.0 85 | 1000.0 86 | 1000.0 87 | 1000.0 88 | 1000.0 89 | 1000.0 90 | 1000.0 91 | 1000.0 92 | 1000.0 93 | 1000.0 94 | 1000.0 95 | 1000.0 96 | 1000.0 97 | 1000.0 98 | 1000.0 99 | 1000.0 100 | 1000.0 101 | 1000.0 102 | 1000.0 103 | 1000.0 104 | 1000.0 105 | 1000.0 106 | 1000.0 107 | 1000.0 108 | 1000.0 109 | 1000.0 110 | 1000.0 111 | 1000.0 112 | 1000.0 113 | 1000.0 114 | 1000.0 115 | 1000.0 116 | 1000.0 117 | 1000.0 118 | 1000.0 119 | 1000.0 120 | 1000.0 121 | 1000.0 122 | 1000.0 123 | 1000.0 124 | 1000.0 125 | 1000.0 126 | 1000.0 127 | 1000.0 128 | 1000.0 129 | 1000.0 130 | 1000.0 131 | 1000.0 132 | 1000.0 133 | 1000.0 134 | 1000.0 135 | 2000.0 136 | 2000.0 137 | 2000.0 138 | 2000.0 139 | 2000.0 140 | 2000.0 141 | 2000.0 142 | 2000.0 143 | 2000.0 144 | 2000.0 145 | 2000.0 146 | 2000.0 147 | 2000.0 148 | 2000.0 149 | 2000.0 150 | 2000.0 151 | 2000.0 152 | 2000.0 153 | 2000.0 154 | 2000.0 155 | 2000.0 156 | 2000.0 157 | 2000.0 158 | 2000.0 159 | 2000.0 160 | 2000.0 161 | 2000.0 162 | 2000.0 163 | 2000.0 164 | 2000.0 165 | 2000.0 166 | 2000.0 167 | 2000.0 168 | 2000.0 169 | 2000.0 170 | 2000.0 171 | 2000.0 172 | 2000.0 173 | 2000.0 174 | 2000.0 175 | 2000.0 176 | 2000.0 177 | 2000.0 178 | 2000.0 179 | 2000.0 180 | 2000.0 181 | 2000.0 182 | 2000.0 183 | 2000.0 184 | 2000.0 185 | 2000.0 186 | 2000.0 187 | 2000.0 188 | 2000.0 189 | 2000.0 190 | 2000.0 191 | 2000.0 192 | 2000.0 193 | 2000.0 194 | 2000.0 195 | 2000.0 196 | 2000.0 197 | 2000.0 198 | 2000.0 199 | 2000.0 200 | 2000.0 201 | 2000.0 202 | 2000.0 203 | 2000.0 204 | 2000.0 205 | 2000.0 206 | 2000.0 207 | 2000.0 208 | 2000.0 209 | 2000.0 210 | 2000.0 211 | 2000.0 212 | 2000.0 213 | 2000.0 214 | 2000.0 215 | 2000.0 216 | 2000.0 217 | 2000.0 218 | 2000.0 219 | 2000.0 220 | 2000.0 221 | 2000.0 222 | 2000.0 223 | 2000.0 224 | 2000.0 225 | 2000.0 226 | 2000.0 227 | 2000.0 228 | 2000.0 229 | 2000.0 230 | 2000.0 231 | 2000.0 232 | 2000.0 233 | 2000.0 234 | 2000.0 235 | 2000.0 236 | 2000.0 237 | 2000.0 238 | 2000.0 239 | 2000.0 240 | 2000.0 241 | 2000.0 242 | 2000.0 243 | 2000.0 244 | 2000.0 245 | 2000.0 246 | 2000.0 247 | 2000.0 248 | 2000.0 249 | 2000.0 250 | 2000.0 251 | 2000.0 252 | 2000.0 253 | 2000.0 254 | 2000.0 255 | 2000.0 256 | 2000.0 257 | 2000.0 258 | 2000.0 259 | 2000.0 260 | 2000.0 261 | 2000.0 262 | 2000.0 263 | 2000.0 264 | 2000.0 265 | 2000.0 266 | 2000.0 267 | 2000.0 268 | 3000.0 269 | 3000.0 270 | 3000.0 271 | 3000.0 272 | 3000.0 273 | 3000.0 274 | 3000.0 275 | 3000.0 276 | 3000.0 277 | 3000.0 278 | 3000.0 279 | 3000.0 280 | 3000.0 281 | 3000.0 282 | 3000.0 283 | 3000.0 284 | 3000.0 285 | 3000.0 286 | 3000.0 287 | 3000.0 288 | 3000.0 289 | 3000.0 290 | 3000.0 291 | 3000.0 292 | 3000.0 293 | 3000.0 294 | 3000.0 295 | 3000.0 296 | 3000.0 297 | 3000.0 298 | 3000.0 299 | 3000.0 300 | 3000.0 301 | 3000.0 302 | 3000.0 303 | 3000.0 304 | 3000.0 305 | 3000.0 306 | 3000.0 307 | 3000.0 308 | 3000.0 309 | 3000.0 310 | 3000.0 311 | 3000.0 312 | 3000.0 313 | 3000.0 314 | 3000.0 315 | 3000.0 316 | 3000.0 317 | 3000.0 318 | 3000.0 319 | 3000.0 320 | 3000.0 321 | 3000.0 322 | 3000.0 323 | 3000.0 324 | 3000.0 325 | 3000.0 326 | 3000.0 327 | 3000.0 328 | 3000.0 329 | 3000.0 330 | 3000.0 331 | 3000.0 332 | 3000.0 333 | 3000.0 334 | 3000.0 335 | 3000.0 336 | 3000.0 337 | 3000.0 338 | 3000.0 339 | 3000.0 340 | 3000.0 341 | 3000.0 342 | 3000.0 343 | 3000.0 344 | 3000.0 345 | 3000.0 346 | 3000.0 347 | 3000.0 348 | 3000.0 349 | 3000.0 350 | 3000.0 351 | 3000.0 352 | 3000.0 353 | 3000.0 354 | 3000.0 355 | 3000.0 356 | 3000.0 357 | 3000.0 358 | 3000.0 359 | 3000.0 360 | 3000.0 361 | 3000.0 362 | 3000.0 363 | 3000.0 364 | 3000.0 365 | 3000.0 366 | 3000.0 367 | 3000.0 368 | 3000.0 369 | 3000.0 370 | 3000.0 371 | 3000.0 372 | 3000.0 373 | 3000.0 374 | 3000.0 375 | 3000.0 376 | 3000.0 377 | 3000.0 378 | 3000.0 379 | 3000.0 380 | 3000.0 381 | 3000.0 382 | 3000.0 383 | 3000.0 384 | 3000.0 385 | 3000.0 386 | 3000.0 387 | 3000.0 388 | 3000.0 389 | 3000.0 390 | 3000.0 391 | 3000.0 392 | 3000.0 393 | 3000.0 394 | 3000.0 395 | 3000.0 396 | 3000.0 397 | 3000.0 398 | 3000.0 399 | 3000.0 400 | 3000.0 401 | 3000.0 402 | 5000.0 403 | 5000.0 404 | 5000.0 405 | 5000.0 406 | 5000.0 407 | 5000.0 408 | 5000.0 409 | 5000.0 410 | 5000.0 411 | 5000.0 412 | 5000.0 413 | 5000.0 414 | 5000.0 415 | 5000.0 416 | 5000.0 417 | 5000.0 418 | 5000.0 419 | 5000.0 420 | 5000.0 421 | 5000.0 422 | 5000.0 423 | 5000.0 424 | 5000.0 425 | 5000.0 426 | 5000.0 427 | 5000.0 428 | 5000.0 429 | 5000.0 430 | 5000.0 431 | 5000.0 432 | 5000.0 433 | 5000.0 434 | 5000.0 435 | 5000.0 436 | 5000.0 437 | 5000.0 438 | 5000.0 439 | 5000.0 440 | 5000.0 441 | 5000.0 442 | 5000.0 443 | 5000.0 444 | 5000.0 445 | 5000.0 446 | 5000.0 447 | 5000.0 448 | 5000.0 449 | 5000.0 450 | 5000.0 451 | 5000.0 452 | 5000.0 453 | 5000.0 454 | 5000.0 455 | 5000.0 456 | 5000.0 457 | 5000.0 458 | 5000.0 459 | 5000.0 460 | 5000.0 461 | 5000.0 462 | 5000.0 463 | 5000.0 464 | 5000.0 465 | 5000.0 466 | 5000.0 467 | 5000.0 468 | 5000.0 469 | 5000.0 470 | 5000.0 471 | 5000.0 472 | 5000.0 473 | 5000.0 474 | 5000.0 475 | 5000.0 476 | 5000.0 477 | 5000.0 478 | 5000.0 479 | 5000.0 480 | 5000.0 481 | 5000.0 482 | 5000.0 483 | 5000.0 484 | 5000.0 485 | 5000.0 486 | 5000.0 487 | 5000.0 488 | 5000.0 489 | 5000.0 490 | 5000.0 491 | 5000.0 492 | 5000.0 493 | 5000.0 494 | 5000.0 495 | 5000.0 496 | 5000.0 497 | 5000.0 498 | 5000.0 499 | 5000.0 500 | 5000.0 501 | 5000.0 502 | 5000.0 503 | 5000.0 504 | 5000.0 505 | 5000.0 506 | 5000.0 507 | 5000.0 508 | 5000.0 509 | 5000.0 510 | 5000.0 511 | 5000.0 512 | 5000.0 513 | 5000.0 514 | 5000.0 515 | 5000.0 516 | 5000.0 517 | 5000.0 518 | 5000.0 519 | 5000.0 520 | 5000.0 521 | 5000.0 522 | 5000.0 523 | 5000.0 524 | 5000.0 525 | 5000.0 526 | 5000.0 527 | 5000.0 528 | 5000.0 529 | 5000.0 530 | 5000.0 531 | 5000.0 532 | 5000.0 533 | 5000.0 534 | 5000.0 535 | -------------------------------------------------------------------------------- /data/schemes/scheme-StLucGE534_bvecs.txt: -------------------------------------------------------------------------------- 1 | 0.0000 0.0000 0.0000 2 | -0.5841 -0.3812 -0.7166 3 | -0.1508 -0.6113 -0.7769 4 | 0.9762 0.0714 0.2049 5 | 0.1337 -0.9745 -0.1804 6 | -0.1174 -0.8848 0.4510 7 | -0.1776 0.5575 0.8109 8 | -0.1531 0.9723 0.1767 9 | -0.0160 0.5101 0.8600 10 | -0.3116 0.7296 0.6088 11 | 0.7949 -0.4584 -0.3975 12 | 0.3399 -0.7456 0.5732 13 | 0.8508 -0.5199 -0.0757 14 | -0.4567 -0.8048 0.3791 15 | 0.2750 0.4316 0.8591 16 | -0.3407 0.6991 -0.6286 17 | -0.1504 0.9175 0.3682 18 | 0.6937 0.2635 0.6704 19 | -0.7665 0.3038 -0.5658 20 | 0.1657 0.9683 0.1867 21 | 0.6272 0.5549 -0.5465 22 | -0.9976 0.0678 0.0164 23 | -0.9296 0.3687 0.0047 24 | 0.1513 0.6831 -0.7145 25 | -0.8612 0.1678 -0.4797 26 | 0.3063 0.0961 0.9471 27 | -0.4042 -0.5014 0.7650 28 | -0.0974 -0.9868 0.1295 29 | 0.9443 -0.3019 0.1307 30 | 0.1775 0.8243 0.5376 31 | -0.6731 -0.3192 0.6672 32 | -0.8939 -0.3976 0.2071 33 | -0.7941 -0.2980 0.5297 34 | -0.0400 0.2935 -0.9551 35 | -0.7509 -0.1489 0.6434 36 | -0.6444 0.4472 -0.6203 37 | 0.4043 -0.0602 0.9126 38 | -0.9064 -0.2960 -0.3015 39 | -0.0369 -0.5457 0.8372 40 | 0.8414 0.5390 -0.0386 41 | 0.9541 0.2895 -0.0768 42 | -0.1730 0.1619 -0.9715 43 | 0.0616 -0.6734 0.7367 44 | 0.4598 0.7116 -0.5313 45 | -0.0065 0.9491 -0.3150 46 | 0.5293 -0.5060 -0.6811 47 | -0.3699 -0.8971 0.2418 48 | -0.7556 0.4578 -0.4684 49 | 0.4693 0.1257 0.8741 50 | 0.1476 0.9010 -0.4079 51 | -0.4837 0.7117 -0.5094 52 | -0.2241 -0.5353 0.8144 53 | 0.2537 -0.6560 0.7109 54 | -0.1563 0.9652 -0.2095 55 | -0.8611 0.3293 -0.3874 56 | 0.2320 -0.8535 0.4667 57 | 0.0551 -0.4043 0.9130 58 | -0.5127 0.4220 -0.7477 59 | -0.2242 -0.1608 -0.9612 60 | -0.1219 -0.6775 0.7253 61 | 0.6608 0.7009 -0.2683 62 | -0.7092 -0.4653 0.5296 63 | -0.6149 0.3097 0.7253 64 | -0.9447 -0.0851 0.3167 65 | -0.4542 0.7338 0.5052 66 | -0.1695 -0.9843 -0.0488 67 | -0.9747 0.0618 0.2148 68 | -0.6168 -0.6550 -0.4365 69 | -0.4597 -0.8856 0.0666 70 | 0.4382 -0.3679 0.8202 71 | 0.6713 -0.7166 0.1893 72 | -0.9542 -0.2968 0.0382 73 | 0.1590 0.8065 -0.5695 74 | -0.8250 -0.3342 -0.4557 75 | -0.3306 -0.3751 0.8660 76 | -0.3207 0.9406 -0.1119 77 | 0.0007 -0.8579 -0.5139 78 | 0.1936 -0.9302 0.3119 79 | -0.8210 -0.4277 0.3781 80 | 0.6811 -0.6228 -0.3850 81 | 0.9227 -0.2392 0.3023 82 | -0.0004 0.9935 -0.1138 83 | 0.8149 0.1995 0.5442 84 | 0.5958 0.6811 -0.4257 85 | 0.9324 -0.3583 -0.0473 86 | 0.1549 0.9879 -0.0089 87 | -0.9578 -0.1130 -0.2645 88 | 0.7807 -0.5719 -0.2520 89 | 0.8469 0.4075 -0.3415 90 | -0.8358 -0.4659 -0.2907 91 | 0.1742 0.9126 0.3699 92 | 0.0427 -0.0958 0.9945 93 | -0.5158 -0.3510 0.7815 94 | 0.8653 -0.4134 0.2834 95 | 0.9182 0.1531 0.3654 96 | 0.5056 -0.7145 0.4836 97 | -0.5823 0.1375 0.8013 98 | 0.5752 -0.6158 -0.5385 99 | -0.2048 0.3498 -0.9142 100 | -0.5972 -0.7582 -0.2617 101 | 0.7346 0.5540 -0.3917 102 | 0.0663 -0.9343 -0.3502 103 | 0.8809 -0.2917 -0.3727 104 | 0.7503 -0.5068 0.4246 105 | -0.9503 -0.2327 0.2070 106 | 0.2720 -0.7553 -0.5963 107 | -0.7693 -0.6374 0.0439 108 | 0.3122 0.7099 -0.6314 109 | -0.6771 -0.0072 0.7359 110 | 0.7393 0.3893 0.5494 111 | -0.1337 0.3871 0.9123 112 | -0.0221 -0.1964 -0.9803 113 | 0.8815 -0.4130 -0.2288 114 | 0.7353 0.6717 0.0900 115 | 0.5655 -0.0306 0.8242 116 | -0.7136 0.6009 -0.3602 117 | 0.0986 0.3949 0.9134 118 | -0.9267 0.2067 0.3140 119 | 0.0019 0.9963 0.0859 120 | 0.2656 -0.3670 -0.8915 121 | 0.0959 -0.7619 -0.6405 122 | 0.4001 -0.9161 0.0273 123 | 0.9805 0.1850 0.0656 124 | -0.6914 -0.7134 -0.1141 125 | 0.9844 -0.0597 -0.1657 126 | 0.3522 -0.5198 0.7783 127 | -0.4654 0.0706 -0.8823 128 | -0.3536 -0.7792 -0.5176 129 | 0.7623 0.4073 -0.5030 130 | 0.1582 -0.5402 0.8266 131 | -0.6278 0.7778 0.0306 132 | -0.2231 -0.7829 0.5808 133 | 0.3123 0.4191 -0.8525 134 | 0.1463 -0.5055 -0.8503 135 | -0.3191 0.9448 0.0739 136 | -0.2800 -0.9551 0.0969 137 | 0.9363 0.2697 0.2248 138 | 0.7212 -0.2018 -0.6627 139 | 0.3390 -0.9298 -0.1431 140 | -0.9589 -0.2500 -0.1340 141 | -0.8545 0.5105 -0.0963 142 | 0.4816 0.8016 -0.3543 143 | -0.1511 0.8263 0.5425 144 | 0.1734 0.7067 0.6859 145 | 0.3101 -0.0881 -0.9466 146 | 0.1494 0.5436 -0.8260 147 | -0.4290 -0.8705 -0.2412 148 | -0.4716 0.8685 0.1526 149 | 0.4455 -0.6185 0.6473 150 | 0.6888 -0.4268 0.5861 151 | -0.6128 0.6026 0.5112 152 | 0.8234 -0.3443 0.4510 153 | -0.1601 0.9870 -0.0166 154 | -0.9694 0.2185 0.1123 155 | -0.0865 -0.7455 -0.6609 156 | -0.3328 0.5902 0.7355 157 | 0.2625 -0.9536 0.1476 158 | 0.0111 0.7861 0.6179 159 | -0.1603 0.8996 -0.4061 160 | 0.0966 -0.0632 -0.9933 161 | 0.1857 -0.2182 -0.9581 162 | 0.6143 -0.3080 0.7265 163 | -0.3023 -0.8768 -0.3741 164 | -0.6905 -0.6929 0.2078 165 | -0.2358 -0.4573 -0.8575 166 | 0.6254 0.1243 0.7704 167 | 0.0043 0.0981 -0.9952 168 | -0.7171 -0.3628 -0.5951 169 | 0.4897 0.7085 0.5081 170 | 0.8491 -0.1594 -0.5037 171 | -0.5905 -0.6198 0.5169 172 | 0.9998 0.0016 0.0218 173 | 0.7934 -0.0881 0.6023 174 | 0.4549 0.2656 -0.8500 175 | -0.3106 0.2622 0.9137 176 | -0.5805 -0.4763 0.6604 177 | 0.9482 0.0826 -0.3068 178 | -0.3578 -0.9306 -0.0776 179 | -0.7882 -0.1871 -0.5863 180 | -0.6202 0.4653 0.6315 181 | 0.1488 0.9649 -0.2162 182 | 0.5236 -0.8467 -0.0945 183 | 0.0326 -0.6361 -0.7710 184 | -0.1133 -0.0299 -0.9931 185 | -0.2399 -0.9460 -0.2179 186 | -0.9726 0.2138 -0.0916 187 | -0.4745 0.4540 0.7541 188 | 0.3365 0.7223 0.6042 189 | -0.7450 0.6394 -0.1901 190 | 0.9834 -0.1806 -0.0167 191 | 0.2193 0.0661 -0.9734 192 | -0.3521 0.5553 -0.7534 193 | 0.3953 -0.6228 -0.6751 194 | 0.6563 -0.6549 0.3746 195 | 0.7246 0.6834 -0.0888 196 | 0.3311 0.9031 0.2736 197 | 0.3945 -0.8883 0.2351 198 | -0.3503 -0.0742 0.9337 199 | -0.3425 0.2126 -0.9152 200 | 0.7520 -0.2625 0.6047 201 | 0.7821 -0.0355 -0.6221 202 | 0.6730 -0.1252 0.7290 203 | 0.3232 0.5900 0.7399 204 | 0.9401 -0.0469 0.3378 205 | -0.1856 0.6681 -0.7206 206 | 0.2156 -0.6405 -0.7370 207 | 0.6375 -0.3622 -0.6800 208 | -0.1185 -0.9215 -0.3698 209 | -0.2735 -0.7059 -0.6534 210 | -0.5367 -0.8380 -0.0984 211 | 0.2980 0.9484 -0.1083 212 | 0.7803 -0.5734 0.2497 213 | -0.9253 0.0285 -0.3781 214 | 0.6276 -0.7458 -0.2234 215 | -0.6070 -0.1820 0.7736 216 | -0.3126 0.4301 0.8469 217 | 0.5994 0.4337 0.6728 218 | 0.2143 0.2538 0.9432 219 | -0.4847 -0.7791 -0.3975 220 | -0.2910 -0.8591 0.4211 221 | -0.2527 0.0723 0.9648 222 | 0.8890 0.0198 -0.4574 223 | 0.9352 -0.1136 -0.3355 224 | -0.8067 0.0270 0.5904 225 | 0.8610 0.4275 0.2756 226 | -0.7562 0.6542 -0.0137 227 | 0.8685 0.2420 -0.4325 228 | -0.0446 -0.4887 -0.8713 229 | -0.3323 -0.5752 -0.7475 230 | 0.3823 0.2851 0.8789 231 | -0.9158 0.3530 -0.1918 232 | 0.5104 -0.2040 0.8354 233 | -0.8102 -0.5722 -0.1270 234 | 0.1820 -0.8596 -0.4774 235 | 0.5428 -0.7829 0.3041 236 | 0.9213 0.3822 0.0712 237 | -0.3064 0.9048 -0.2958 238 | 0.8710 -0.1682 0.4615 239 | -0.3119 0.8181 -0.4832 240 | -0.9815 0.0522 -0.1842 241 | 0.6024 0.2693 -0.7514 242 | -0.4646 -0.6137 0.6384 243 | 0.5836 0.7985 -0.1476 244 | -0.5989 -0.7109 0.3687 245 | -0.4472 0.8317 0.3290 246 | -0.4810 0.8764 -0.0242 247 | -0.1675 0.7952 -0.5827 248 | 0.2517 -0.3946 0.8837 249 | 0.4554 -0.8511 -0.2613 250 | 0.7771 -0.6258 0.0675 251 | 0.0258 0.2453 0.9691 252 | -0.2982 -0.6581 0.6914 253 | 0.8524 0.3190 0.4143 254 | -0.0557 0.0632 0.9964 255 | 0.8385 0.5276 0.1358 256 | 0.7464 0.0673 0.6621 257 | -0.0501 -0.2523 0.9663 258 | -0.0020 -0.9525 0.3046 259 | 0.5454 -0.7457 -0.3828 260 | -0.7229 0.1675 0.6704 261 | -0.3043 0.9166 0.2595 262 | -0.8243 0.4919 0.2803 263 | -0.5428 -0.8130 0.2106 264 | 0.7920 0.5684 -0.2228 265 | -0.5989 0.7294 -0.3308 266 | -0.8390 0.1959 0.5076 267 | 0.0118 0.8886 0.4586 268 | -0.7441 0.3320 0.5797 269 | -0.4238 0.1093 0.8991 270 | 0.1466 -0.7772 0.6119 271 | 0.2997 0.9025 -0.3095 272 | -0.4687 0.2892 0.8347 273 | 0.5685 -0.2275 -0.7906 274 | -0.8493 0.3458 0.3989 275 | -0.4068 -0.1117 -0.9067 276 | 0.3341 -0.2331 0.9133 277 | 0.1387 0.3888 -0.9108 278 | -0.8625 -0.1178 0.4921 279 | 0.4870 -0.0768 -0.8700 280 | 0.9307 0.2556 -0.2617 281 | 0.7352 0.2822 -0.6164 282 | -0.4073 -0.7410 0.5339 283 | 0.2076 -0.9780 -0.0188 284 | -0.7231 -0.5790 0.3768 285 | 0.8699 0.0336 0.4922 286 | -0.6449 0.2816 -0.7105 287 | 0.3603 -0.8335 -0.4190 288 | 0.3375 0.8282 0.4475 289 | 0.3219 0.8217 -0.4703 290 | -0.1904 -0.9409 0.2800 291 | -0.0087 0.7534 -0.6575 292 | 0.3216 0.5732 -0.7537 293 | 0.3880 -0.8310 0.3986 294 | -0.9108 0.3597 0.2024 295 | -0.4900 -0.5379 -0.6860 296 | 0.1573 0.5632 0.8112 297 | 0.6274 0.6708 0.3954 298 | -0.5738 -0.0699 -0.8160 299 | -0.4795 0.6036 0.6370 300 | 0.5337 -0.8371 0.1202 301 | -0.7267 -0.6233 -0.2888 302 | 0.7581 0.5042 0.4136 303 | 0.7447 0.6101 0.2705 304 | -0.9062 0.0560 0.4192 305 | -0.4187 -0.4133 -0.8086 306 | 0.4793 0.5817 -0.6571 307 | 0.1269 0.0822 0.9885 308 | -0.5017 0.2502 -0.8280 309 | -0.0386 -0.7941 0.6066 310 | 0.4669 -0.3805 -0.7983 311 | 0.5370 -0.4719 0.6992 312 | -0.7127 0.6132 0.3406 313 | -0.8762 -0.4800 0.0423 314 | 0.0855 -0.9853 0.1478 315 | 0.6212 0.7513 0.2231 316 | 0.9851 0.1169 -0.1258 317 | 0.0078 0.9601 0.2795 318 | -0.0535 -0.9779 -0.2020 319 | 0.7370 -0.6664 -0.1127 320 | -0.7436 0.4769 0.4686 321 | 0.6206 0.4257 -0.6585 322 | 0.6868 -0.4827 -0.5434 323 | -0.4964 0.5783 -0.6474 324 | -0.7456 0.6458 0.1643 325 | 0.3953 0.0850 -0.9146 326 | -0.8062 -0.5525 0.2117 327 | 0.2959 0.2477 -0.9226 328 | -0.9893 -0.0958 0.1103 329 | 0.4727 0.4335 -0.7672 330 | -0.6210 0.1073 -0.7765 331 | -0.1958 0.5176 -0.8329 332 | -0.1470 0.2204 0.9643 333 | -0.1784 -0.8321 -0.5252 334 | 0.4829 0.5880 0.6490 335 | -0.9013 -0.4150 -0.1242 336 | -0.2471 -0.2369 0.9396 337 | 0.9792 -0.1180 0.1650 338 | -0.6275 -0.5268 -0.5734 339 | -0.7546 0.1360 -0.6420 340 | -0.8372 -0.0075 -0.5468 341 | 0.1261 0.2206 -0.9672 342 | 0.9500 -0.2424 -0.1968 343 | 0.2274 -0.0809 0.9704 344 | -0.3640 0.3921 -0.8448 345 | -0.7436 -0.5028 -0.4407 346 | 0.6976 0.1118 -0.7077 347 | -0.9366 0.1941 -0.2917 348 | -0.6611 -0.2127 -0.7195 349 | -0.3266 -0.2897 -0.8996 350 | -0.6193 0.5978 -0.5090 351 | 0.3172 0.9443 0.0876 352 | 0.3360 -0.5033 -0.7961 353 | -0.8891 -0.2611 0.3760 354 | 0.3930 -0.2396 -0.8878 355 | -0.4493 0.8141 -0.3679 356 | -0.0224 0.6178 -0.7860 357 | -0.5797 0.7231 0.3756 358 | 0.6061 -0.5787 0.5457 359 | 0.4874 0.8022 0.3449 360 | -0.4387 -0.2164 0.8722 361 | 0.0591 -0.8833 0.4650 362 | 0.6471 -0.7624 0.0026 363 | -0.4778 -0.6674 -0.5712 364 | 0.4380 0.8759 -0.2024 365 | 0.8160 0.1379 -0.5614 366 | -0.4743 0.8576 -0.1990 367 | 0.2552 -0.9179 -0.3039 368 | -0.5057 -0.2455 -0.8270 369 | -0.6294 -0.7755 0.0490 370 | 0.4416 0.4487 0.7769 371 | -0.8292 0.4825 -0.2822 372 | -0.1412 -0.3980 0.9064 373 | 0.8752 -0.4719 0.1067 374 | 0.0668 -0.3550 -0.9325 375 | -0.6238 0.7673 -0.1487 376 | -0.1330 -0.3296 -0.9347 377 | -0.2952 0.0254 -0.9551 378 | 0.6078 0.7932 0.0375 379 | -0.1545 -0.0910 0.9838 380 | -0.6127 0.7630 0.2060 381 | -0.0088 0.8653 -0.5012 382 | -0.0304 0.4640 -0.8853 383 | 0.6302 0.5586 0.5392 384 | 0.4586 0.8885 -0.0172 385 | 0.4777 0.8625 0.1668 386 | -0.3046 0.8415 0.4462 387 | 0.5544 0.0974 -0.8265 388 | -0.8532 0.5134 0.0924 389 | 0.5435 0.2923 0.7869 390 | 0.0024 0.6582 0.7528 391 | -0.1590 0.7038 0.6924 392 | 0.6449 -0.0602 -0.7619 393 | -0.7190 -0.0384 -0.6939 394 | 0.0174 -0.9995 -0.0266 395 | -0.8878 -0.1530 -0.4341 396 | 0.1469 -0.2514 0.9567 397 | -0.9931 -0.0963 -0.0668 398 | -0.5224 -0.0401 0.8518 399 | 0.4389 -0.7240 -0.5321 400 | 0.8908 0.4239 -0.1635 401 | 0.7788 -0.3252 -0.5364 402 | -0.5841 -0.3812 -0.7166 403 | -0.1508 -0.6113 -0.7769 404 | 0.9762 0.0714 0.2049 405 | 0.1337 -0.9745 -0.1804 406 | -0.1174 -0.8848 0.4510 407 | -0.1776 0.5575 0.8109 408 | -0.1531 0.9723 0.1767 409 | -0.0160 0.5101 0.8600 410 | -0.3116 0.7296 0.6088 411 | 0.7949 -0.4584 -0.3975 412 | 0.3399 -0.7456 0.5732 413 | 0.8508 -0.5199 -0.0757 414 | -0.4567 -0.8048 0.3791 415 | 0.2750 0.4316 0.8591 416 | -0.3407 0.6991 -0.6286 417 | -0.1504 0.9175 0.3682 418 | 0.6937 0.2635 0.6704 419 | -0.7665 0.3038 -0.5658 420 | 0.1657 0.9683 0.1867 421 | 0.6272 0.5549 -0.5465 422 | -0.9976 0.0678 0.0164 423 | -0.9296 0.3687 0.0047 424 | 0.1513 0.6831 -0.7145 425 | -0.8612 0.1678 -0.4797 426 | 0.3063 0.0961 0.9471 427 | -0.4042 -0.5014 0.7650 428 | -0.0974 -0.9868 0.1295 429 | 0.9443 -0.3019 0.1307 430 | 0.1775 0.8243 0.5376 431 | -0.6731 -0.3192 0.6672 432 | -0.8939 -0.3976 0.2071 433 | -0.7941 -0.2980 0.5297 434 | -0.0400 0.2935 -0.9551 435 | -0.7509 -0.1489 0.6434 436 | -0.6444 0.4472 -0.6203 437 | 0.4043 -0.0602 0.9126 438 | -0.9064 -0.2960 -0.3015 439 | -0.0369 -0.5457 0.8372 440 | 0.8414 0.5390 -0.0386 441 | 0.9541 0.2895 -0.0768 442 | -0.1730 0.1619 -0.9715 443 | 0.0616 -0.6734 0.7367 444 | 0.4598 0.7116 -0.5313 445 | -0.0065 0.9491 -0.3150 446 | 0.5293 -0.5060 -0.6811 447 | -0.3699 -0.8971 0.2418 448 | -0.7556 0.4578 -0.4684 449 | 0.4693 0.1257 0.8741 450 | 0.1476 0.9010 -0.4079 451 | -0.4837 0.7117 -0.5094 452 | -0.2241 -0.5353 0.8144 453 | 0.2537 -0.6560 0.7109 454 | -0.1563 0.9652 -0.2095 455 | -0.8611 0.3293 -0.3874 456 | 0.2320 -0.8535 0.4667 457 | 0.0551 -0.4043 0.9130 458 | -0.5127 0.4220 -0.7477 459 | -0.2242 -0.1608 -0.9612 460 | -0.1219 -0.6775 0.7253 461 | 0.6608 0.7009 -0.2683 462 | -0.7092 -0.4653 0.5296 463 | -0.6149 0.3097 0.7253 464 | -0.9447 -0.0851 0.3167 465 | -0.4542 0.7338 0.5052 466 | -0.1695 -0.9843 -0.0488 467 | -0.9747 0.0618 0.2148 468 | -0.6168 -0.6550 -0.4365 469 | -0.4597 -0.8856 0.0666 470 | 0.4382 -0.3679 0.8202 471 | 0.6713 -0.7166 0.1893 472 | -0.9542 -0.2968 0.0382 473 | 0.1590 0.8065 -0.5695 474 | -0.8250 -0.3342 -0.4557 475 | -0.3306 -0.3751 0.8660 476 | -0.3207 0.9406 -0.1119 477 | 0.0007 -0.8579 -0.5139 478 | 0.1936 -0.9302 0.3119 479 | -0.8210 -0.4277 0.3781 480 | 0.6811 -0.6228 -0.3850 481 | 0.9227 -0.2392 0.3023 482 | -0.0004 0.9935 -0.1138 483 | 0.8149 0.1995 0.5442 484 | 0.5958 0.6811 -0.4257 485 | 0.9324 -0.3583 -0.0473 486 | 0.1549 0.9879 -0.0089 487 | -0.9578 -0.1130 -0.2645 488 | 0.7807 -0.5719 -0.2520 489 | 0.8469 0.4075 -0.3415 490 | -0.8358 -0.4659 -0.2907 491 | 0.1742 0.9126 0.3699 492 | 0.0427 -0.0958 0.9945 493 | -0.5158 -0.3510 0.7815 494 | 0.8653 -0.4134 0.2834 495 | 0.9182 0.1531 0.3654 496 | 0.5056 -0.7145 0.4836 497 | -0.5823 0.1375 0.8013 498 | 0.5752 -0.6158 -0.5385 499 | -0.2048 0.3498 -0.9142 500 | -0.5972 -0.7582 -0.2617 501 | 0.7346 0.5540 -0.3917 502 | 0.0663 -0.9343 -0.3502 503 | 0.8809 -0.2917 -0.3727 504 | 0.7503 -0.5068 0.4246 505 | -0.9503 -0.2327 0.2070 506 | 0.2720 -0.7553 -0.5963 507 | -0.7693 -0.6374 0.0439 508 | 0.3122 0.7099 -0.6314 509 | -0.6771 -0.0072 0.7359 510 | 0.7393 0.3893 0.5494 511 | -0.1337 0.3871 0.9123 512 | -0.0221 -0.1964 -0.9803 513 | 0.8815 -0.4130 -0.2288 514 | 0.7353 0.6717 0.0900 515 | 0.5655 -0.0306 0.8242 516 | -0.7136 0.6009 -0.3602 517 | 0.0986 0.3949 0.9134 518 | -0.9267 0.2067 0.3140 519 | 0.0019 0.9963 0.0859 520 | 0.2656 -0.3670 -0.8915 521 | 0.0959 -0.7619 -0.6405 522 | 0.4001 -0.9161 0.0273 523 | 0.9805 0.1850 0.0656 524 | -0.6914 -0.7134 -0.1141 525 | 0.9844 -0.0597 -0.1657 526 | 0.3522 -0.5198 0.7783 527 | -0.4654 0.0706 -0.8823 528 | -0.3536 -0.7792 -0.5176 529 | 0.7623 0.4073 -0.5030 530 | 0.1582 -0.5402 0.8266 531 | -0.6278 0.7778 0.0306 532 | -0.2231 -0.7829 0.5808 533 | 0.3123 0.4191 -0.8525 534 | 0.1463 -0.5055 -0.8503 535 | -------------------------------------------------------------------------------- /data/schemes/scheme-StLucGE_bvals.txt: -------------------------------------------------------------------------------- 1 | 0 2 | 1000 3 | 1000 4 | 1000 5 | 1000 6 | 1000 7 | 1000 8 | 1000 9 | 1000 10 | 1000 11 | 1000 12 | 1000 13 | 1000 14 | 1000 15 | 1000 16 | 1000 17 | 1000 18 | 1000 19 | 1000 20 | 1000 21 | 1000 22 | 1000 23 | 1000 24 | 1000 25 | 1000 26 | 1000 27 | 1000 28 | 1000 29 | 1000 30 | 1000 31 | 1000 32 | 1000 33 | 1000 34 | 1000 35 | 1000 36 | 1000 37 | 1000 38 | 1000 39 | 1000 40 | 1000 41 | 1000 42 | 1000 43 | 1000 44 | 1000 45 | 1000 46 | 1000 47 | 1000 48 | 1000 49 | 1000 50 | 1000 51 | 1000 52 | 1000 53 | 1000 54 | 1000 55 | 1000 56 | 1000 57 | 1000 58 | 1000 59 | 1000 60 | 1000 61 | 1000 62 | 1000 63 | 1000 64 | 1000 65 | 1000 66 | 0 67 | 2000 68 | 2000 69 | 2000 70 | 2000 71 | 2000 72 | 2000 73 | 2000 74 | 2000 75 | 2000 76 | 2000 77 | 2000 78 | 2000 79 | 2000 80 | 2000 81 | 2000 82 | 2000 83 | 0 84 | 2000 85 | 2000 86 | 2000 87 | 2000 88 | 2000 89 | 2000 90 | 2000 91 | 2000 92 | 2000 93 | 2000 94 | 2000 95 | 2000 96 | 2000 97 | 2000 98 | 2000 99 | 2000 100 | 0 101 | 3000 102 | 3000 103 | 3000 104 | 3000 105 | 3000 106 | 3000 107 | 3000 108 | 3000 109 | 3000 110 | 3000 111 | 3000 112 | 3000 113 | 3000 114 | 3000 115 | 3000 116 | 3000 117 | 0 118 | 3000 119 | 3000 120 | 3000 121 | 3000 122 | 3000 123 | 3000 124 | 3000 125 | 3000 126 | 3000 127 | 3000 128 | 3000 129 | 3000 130 | 3000 131 | 3000 132 | 3000 133 | 3000 134 | 0 135 | 5000 136 | 5000 137 | 5000 138 | 5000 139 | 5000 140 | 5000 141 | 5000 142 | 5000 143 | 5000 144 | 5000 145 | 5000 146 | 5000 147 | 5000 148 | 5000 149 | 5000 150 | 5000 151 | 0 152 | 5000 153 | 5000 154 | 5000 155 | 5000 156 | 5000 157 | 5000 158 | 5000 159 | 5000 160 | 5000 161 | 5000 162 | 5000 163 | 5000 164 | 5000 165 | 5000 166 | 5000 167 | 5000 168 | -------------------------------------------------------------------------------- /data/schemes/scheme-StLucGE_bvecs.txt: -------------------------------------------------------------------------------- 1 | 0 -1 -0.00199904 0.0260078 -0.591135 0.23607 0.893021 -0.796184 -0.233963 -0.935686 -0.505827 -0.34622 -0.456969 0.486997 0.617845 -0.576984 0.826695 -0.893712 -0.290101 -0.115951 0.800182 -0.513982 0.788549 -0.94928 -0.232965 -0.0209978 -0.216931 -0.774003 0.160927 0.147034 -0.888142 0.56197 0.380808 -0.305999 0.332087 0.963226 0.959502 -0.452965 0.773133 -0.709081 0.692769 -0.681659 0.141995 0.740169 0.103005 -0.584037 0.0880087 0.552263 -0.838158 -0.362996 0.184062 0.720938 -0.4331 -0.502114 -0.170945 -0.462968 -0.38503 0.713102 -0.259924 -0.000999553 -0.0370025 -0.57032 0.282105 -0.721098 -0.266985 0 -1 -0.494198 0.12001 -0.919038 0.11197 0.914305 -0.680843 -0.0669785 -0.917623 -0.443231 -0.215974 -0.480033 0.549943 0.700687 -0.429107 0.698789 0 -0.725825 -0.389831 -0.172056 0.609674 -0.59876 0.727829 -0.882959 -0.32021 -0.0440175 -0.188896 -0.732164 -0.308976 -0.285958 -0.859154 0.106973 0.297951 0 -1 -0.494198 0.12001 -0.919038 0.111971 0.914305 -0.680843 -0.0669779 -0.917623 -0.443231 -0.215975 -0.480032 0.549942 0.700688 -0.429107 0.69879 0 -0.725825 -0.389831 -0.172057 0.609674 -0.598759 0.727829 -0.882959 -0.32021 -0.0440172 -0.188897 -0.732165 -0.308975 -0.285959 -0.859154 0.106973 0.297951 0 -1 -0.494 0.12 -0.919 0.112 0.914 -0.681 -0.067 -0.918 -0.443 -0.216 -0.48 0.55 0.701 -0.429 0.699 0 -0.726 -0.39 -0.172 0.61 -0.599 0.728 -0.883 -0.32 -0.044 -0.189 -0.732 -0.309 -0.286 -0.859 0.107 0.298 2 | 0 0 0.999998 0.649171 -0.766176 -0.524158 -0.259005 0.129031 0.929855 0.139953 -0.84471 -0.847539 -0.630956 -0.388997 0.672831 -0.104996 -0.520807 -0.0399881 -0.541189 -0.962591 0.403091 0.839969 0.152913 -0.233069 0.78288 -0.187989 -0.955701 -0.604003 0.35584 0.731174 0.417066 0.231989 0.142929 -0.199001 -0.130034 -0.265063 0.205108 -0.888931 0.628108 0.408047 0.0239917 0.528735 -0.724977 0.388089 0.822044 -0.596037 -0.335032 -0.792377 -0.458086 -0.560993 0.392134 -0.692941 0.682159 0.690157 -0.508833 0.42297 -0.809064 -0.247036 0.884737 0.0770013 -0.902057 -0.303171 0.145054 0.608083 0.959945 0 0 0.869349 0.64105 -0.331014 -0.630833 -0.23608 0.30293 0.99767 0.152938 -0.890464 -0.926892 -0.671045 -0.47795 0.692692 -0.00700146 -0.639808 0 0.0909773 -0.248891 -0.881289 0.640657 0.688724 0.221948 -0.397981 0.54836 -0.268107 -0.876519 -0.333076 -0.549958 0.857876 0.504091 0.214945 -0.17797 0 0 0.869349 0.641049 -0.331013 -0.630833 -0.236079 0.302931 0.99767 0.152937 -0.890464 -0.926892 -0.671045 -0.47795 0.692691 -0.00700152 -0.639808 0 0.0909778 -0.248892 -0.881289 0.640657 0.688724 0.221947 -0.397981 0.54836 -0.268107 -0.876518 -0.333076 -0.549957 0.857876 0.50409 0.214944 -0.17797 0 0 0.869 0.641 -0.331 -0.631 -0.236 0.303 0.998 0.153 -0.89 -0.927 -0.671 -0.478 0.693 -0.007 -0.64 0 0.091 -0.249 -0.881 0.641 0.689 0.222 -0.398 0.548 -0.268 -0.877 -0.333 -0.55 0.858 0.504 0.215 -0.178 3 | 0 0 0 0.760198 0.252058 0.818247 0.368009 0.591136 0.283956 0.323891 -0.174939 -0.402256 -0.626956 0.781995 0.406898 -0.809978 0.21292 -0.446855 -0.789275 -0.244895 -0.444101 0.173993 -0.595658 0.211062 0.576911 -0.981947 0.198939 0.190002 0.920587 -0.666158 0.19303 -0.793959 0.913541 -0.931001 0.934243 0.0440094 0.1931 0.0679958 0.0880156 0.575067 0.72076 -0.505747 0.673978 0.549124 0.56003 0.551035 0.938087 0.259123 -0.296055 0.74399 -0.901306 0.0089994 -0.589137 0.521119 -0.843722 0.778946 0.444035 0.656094 -0.386886 -0.99703 0.430027 -0.763428 -0.948354 0.332045 -0.0849948 0 0 0 0.758059 -0.214008 0.767797 0.32911 0.666847 -0.0129958 0.366849 0.103054 -0.306964 -0.565037 0.684928 0.170924 -0.903226 0.319904 0 -0.681836 -0.886614 -0.440144 -0.466751 0.408835 -0.648848 0.248988 0.772507 -0.962383 0.442757 -0.594135 0.775938 -0.426939 0.0880157 -0.97075 0.937844 0 0 0 0.758059 -0.214009 0.767797 0.32911 0.666847 -0.012996 0.366849 0.103053 -0.306964 -0.565038 0.684929 0.170924 -0.903226 0.319904 0 -0.681836 -0.886614 -0.440144 -0.466751 0.408836 -0.648848 0.248989 0.772507 -0.962383 0.442757 -0.594133 0.775939 -0.426938 0.0880164 -0.97075 0.937844 0 0 0 0.758 -0.214 0.768 0.329 0.667 -0.013 0.367 0.103 -0.307 -0.565 0.685 0.171 -0.903 0.32 0 -0.682 -0.887 -0.44 -0.467 0.409 -0.649 0.249 0.772 -0.962 0.443 -0.594 0.776 -0.427 0.088 -0.971 0.938 4 | -------------------------------------------------------------------------------- /data/schemes/scheme-StLucGE_scheme.txt: -------------------------------------------------------------------------------- 1 | VERSION: 1 2 | -1.000000 0.000000 0.000000 0.030796 0.035660 0.022932 0.077400 3 | -0.001999 0.999998 0.000000 0.030796 0.035660 0.022932 0.077400 4 | 0.026008 0.649171 0.760198 0.030796 0.035660 0.022932 0.077400 5 | -0.591135 -0.766176 0.252058 0.030796 0.035660 0.022932 0.077400 6 | 0.236070 -0.524158 0.818247 0.030796 0.035660 0.022932 0.077400 7 | 0.893021 -0.259005 0.368009 0.030796 0.035660 0.022932 0.077400 8 | -0.796184 0.129031 0.591136 0.030796 0.035660 0.022932 0.077400 9 | -0.233963 0.929855 0.283956 0.030796 0.035660 0.022932 0.077400 10 | -0.935686 0.139953 0.323891 0.030796 0.035660 0.022932 0.077400 11 | -0.505827 -0.844710 -0.174939 0.030796 0.035660 0.022932 0.077400 12 | -0.346220 -0.847539 -0.402256 0.030796 0.035660 0.022932 0.077400 13 | -0.456969 -0.630956 -0.626956 0.030796 0.035660 0.022932 0.077400 14 | 0.486997 -0.388997 0.781995 0.030796 0.035660 0.022932 0.077400 15 | 0.617845 0.672831 0.406898 0.030796 0.035660 0.022932 0.077400 16 | -0.576984 -0.104996 -0.809978 0.030796 0.035660 0.022932 0.077400 17 | 0.826695 -0.520807 0.212920 0.030796 0.035660 0.022932 0.077400 18 | -0.893712 -0.039988 -0.446855 0.030796 0.035660 0.022932 0.077400 19 | -0.290101 -0.541189 -0.789275 0.030796 0.035660 0.022932 0.077400 20 | -0.115951 -0.962591 -0.244895 0.030796 0.035660 0.022932 0.077400 21 | 0.800182 0.403091 -0.444101 0.030796 0.035660 0.022932 0.077400 22 | -0.513982 0.839969 0.173993 0.030796 0.035660 0.022932 0.077400 23 | 0.788549 0.152913 -0.595658 0.030796 0.035660 0.022932 0.077400 24 | -0.949280 -0.233069 0.211062 0.030796 0.035660 0.022932 0.077400 25 | -0.232965 0.782880 0.576911 0.030796 0.035660 0.022932 0.077400 26 | -0.020998 -0.187989 -0.981947 0.030796 0.035660 0.022932 0.077400 27 | -0.216931 -0.955701 0.198939 0.030796 0.035660 0.022932 0.077400 28 | -0.774003 -0.604003 0.190002 0.030796 0.035660 0.022932 0.077400 29 | 0.160927 0.355840 0.920587 0.030796 0.035660 0.022932 0.077400 30 | 0.147034 0.731174 -0.666158 0.030796 0.035660 0.022932 0.077400 31 | -0.888142 0.417066 0.193030 0.030796 0.035660 0.022932 0.077400 32 | 0.561970 0.231989 -0.793959 0.030796 0.035660 0.022932 0.077400 33 | 0.380808 0.142929 0.913541 0.030796 0.035660 0.022932 0.077400 34 | -0.305999 -0.199001 -0.931001 0.030796 0.035660 0.022932 0.077400 35 | 0.332087 -0.130034 0.934243 0.030796 0.035660 0.022932 0.077400 36 | 0.963226 -0.265063 0.044009 0.030796 0.035660 0.022932 0.077400 37 | 0.959502 0.205108 0.193100 0.030796 0.035660 0.022932 0.077400 38 | -0.452965 -0.888931 0.067996 0.030796 0.035660 0.022932 0.077400 39 | 0.773133 0.628108 0.088016 0.030796 0.035660 0.022932 0.077400 40 | -0.709081 0.408047 0.575067 0.030796 0.035660 0.022932 0.077400 41 | 0.692769 0.023992 0.720760 0.030796 0.035660 0.022932 0.077400 42 | -0.681659 0.528735 -0.505747 0.030796 0.035660 0.022932 0.077400 43 | 0.141995 -0.724977 0.673978 0.030796 0.035660 0.022932 0.077400 44 | 0.740169 0.388089 0.549124 0.030796 0.035660 0.022932 0.077400 45 | 0.103005 0.822044 0.560030 0.030796 0.035660 0.022932 0.077400 46 | -0.584037 -0.596037 0.551035 0.030796 0.035660 0.022932 0.077400 47 | 0.088009 -0.335032 0.938087 0.030796 0.035660 0.022932 0.077400 48 | 0.552263 -0.792377 0.259123 0.030796 0.035660 0.022932 0.077400 49 | -0.838158 -0.458086 -0.296055 0.030796 0.035660 0.022932 0.077400 50 | -0.362996 -0.560993 0.743990 0.030796 0.035660 0.022932 0.077400 51 | 0.184062 0.392134 -0.901306 0.030796 0.035660 0.022932 0.077400 52 | 0.720938 -0.692941 0.008999 0.030796 0.035660 0.022932 0.077400 53 | -0.433100 0.682159 -0.589137 0.030796 0.035660 0.022932 0.077400 54 | -0.502114 0.690157 0.521119 0.030796 0.035660 0.022932 0.077400 55 | -0.170945 -0.508833 -0.843722 0.030796 0.035660 0.022932 0.077400 56 | -0.462968 0.422970 0.778946 0.030796 0.035660 0.022932 0.077400 57 | -0.385030 -0.809064 0.444035 0.030796 0.035660 0.022932 0.077400 58 | 0.713102 -0.247036 0.656094 0.030796 0.035660 0.022932 0.077400 59 | -0.259924 0.884737 -0.386886 0.030796 0.035660 0.022932 0.077400 60 | -0.001000 0.077001 -0.997030 0.030796 0.035660 0.022932 0.077400 61 | -0.037003 -0.902057 0.430027 0.030796 0.035660 0.022932 0.077400 62 | -0.570320 -0.303171 -0.763428 0.030796 0.035660 0.022932 0.077400 63 | 0.282105 0.145054 -0.948354 0.030796 0.035660 0.022932 0.077400 64 | -0.721098 0.608083 0.332045 0.030796 0.035660 0.022932 0.077400 65 | -0.266985 0.959945 -0.084995 0.030796 0.035660 0.022932 0.077400 66 | -1.000000 0.000000 0.000000 0.043552 0.035660 0.022932 0.077400 67 | -0.494198 0.869349 0.000000 0.043552 0.035660 0.022932 0.077400 68 | 0.120010 0.641050 0.758059 0.043552 0.035660 0.022932 0.077400 69 | -0.919038 -0.331014 -0.214008 0.043552 0.035660 0.022932 0.077400 70 | 0.111970 -0.630833 0.767797 0.043552 0.035660 0.022932 0.077400 71 | 0.914305 -0.236080 0.329110 0.043552 0.035660 0.022932 0.077400 72 | -0.680843 0.302930 0.666847 0.043552 0.035660 0.022932 0.077400 73 | -0.066978 0.997670 -0.012996 0.043552 0.035660 0.022932 0.077400 74 | -0.917623 0.152938 0.366849 0.043552 0.035660 0.022932 0.077400 75 | -0.443231 -0.890464 0.103054 0.043552 0.035660 0.022932 0.077400 76 | -0.215974 -0.926892 -0.306964 0.043552 0.035660 0.022932 0.077400 77 | -0.480033 -0.671045 -0.565037 0.043552 0.035660 0.022932 0.077400 78 | 0.549943 -0.477950 0.684928 0.043552 0.035660 0.022932 0.077400 79 | 0.700687 0.692692 0.170924 0.043552 0.035660 0.022932 0.077400 80 | -0.429107 -0.007001 -0.903226 0.043552 0.035660 0.022932 0.077400 81 | 0.698789 -0.639808 0.319904 0.043552 0.035660 0.022932 0.077400 82 | -0.725825 0.090977 -0.681836 0.043552 0.035660 0.022932 0.077400 83 | -0.389831 -0.248891 -0.886614 0.043552 0.035660 0.022932 0.077400 84 | -0.172056 -0.881289 -0.440144 0.043552 0.035660 0.022932 0.077400 85 | 0.609674 0.640657 -0.466751 0.043552 0.035660 0.022932 0.077400 86 | -0.598760 0.688724 0.408835 0.043552 0.035660 0.022932 0.077400 87 | 0.727829 0.221948 -0.648848 0.043552 0.035660 0.022932 0.077400 88 | -0.882959 -0.397981 0.248988 0.043552 0.035660 0.022932 0.077400 89 | -0.320210 0.548360 0.772507 0.043552 0.035660 0.022932 0.077400 90 | -0.044018 -0.268107 -0.962383 0.043552 0.035660 0.022932 0.077400 91 | -0.188896 -0.876519 0.442757 0.043552 0.035660 0.022932 0.077400 92 | -0.732164 -0.333076 -0.594135 0.043552 0.035660 0.022932 0.077400 93 | -0.308976 -0.549958 0.775938 0.043552 0.035660 0.022932 0.077400 94 | -0.285958 0.857876 -0.426939 0.043552 0.035660 0.022932 0.077400 95 | -0.859154 0.504091 0.088016 0.043552 0.035660 0.022932 0.077400 96 | 0.106973 0.214945 -0.970750 0.043552 0.035660 0.022932 0.077400 97 | 0.297951 -0.177970 0.937844 0.043552 0.035660 0.022932 0.077400 98 | -1.000000 0.000000 0.000000 0.053340 0.035660 0.022932 0.077400 99 | -0.494198 0.869349 0.000000 0.053340 0.035660 0.022932 0.077400 100 | 0.120010 0.641049 0.758059 0.053340 0.035660 0.022932 0.077400 101 | -0.919038 -0.331013 -0.214009 0.053340 0.035660 0.022932 0.077400 102 | 0.111971 -0.630833 0.767797 0.053340 0.035660 0.022932 0.077400 103 | 0.914305 -0.236079 0.329110 0.053340 0.035660 0.022932 0.077400 104 | -0.680843 0.302931 0.666847 0.053340 0.035660 0.022932 0.077400 105 | -0.066978 0.997670 -0.012996 0.053340 0.035660 0.022932 0.077400 106 | -0.917623 0.152937 0.366849 0.053340 0.035660 0.022932 0.077400 107 | -0.443231 -0.890464 0.103053 0.053340 0.035660 0.022932 0.077400 108 | -0.215975 -0.926892 -0.306964 0.053340 0.035660 0.022932 0.077400 109 | -0.480032 -0.671045 -0.565038 0.053340 0.035660 0.022932 0.077400 110 | 0.549942 -0.477950 0.684929 0.053340 0.035660 0.022932 0.077400 111 | 0.700688 0.692691 0.170924 0.053340 0.035660 0.022932 0.077400 112 | -0.429107 -0.007002 -0.903226 0.053340 0.035660 0.022932 0.077400 113 | 0.698790 -0.639808 0.319904 0.053340 0.035660 0.022932 0.077400 114 | -0.725825 0.090978 -0.681836 0.053340 0.035660 0.022932 0.077400 115 | -0.389831 -0.248892 -0.886614 0.053340 0.035660 0.022932 0.077400 116 | -0.172057 -0.881289 -0.440144 0.053340 0.035660 0.022932 0.077400 117 | 0.609674 0.640657 -0.466751 0.053340 0.035660 0.022932 0.077400 118 | -0.598759 0.688724 0.408836 0.053340 0.035660 0.022932 0.077400 119 | 0.727829 0.221947 -0.648848 0.053340 0.035660 0.022932 0.077400 120 | -0.882959 -0.397981 0.248989 0.053340 0.035660 0.022932 0.077400 121 | -0.320210 0.548360 0.772507 0.053340 0.035660 0.022932 0.077400 122 | -0.044017 -0.268107 -0.962383 0.053340 0.035660 0.022932 0.077400 123 | -0.188897 -0.876518 0.442757 0.053340 0.035660 0.022932 0.077400 124 | -0.732165 -0.333076 -0.594133 0.053340 0.035660 0.022932 0.077400 125 | -0.308975 -0.549957 0.775939 0.053340 0.035660 0.022932 0.077400 126 | -0.285959 0.857876 -0.426938 0.053340 0.035660 0.022932 0.077400 127 | -0.859154 0.504090 0.088016 0.053340 0.035660 0.022932 0.077400 128 | 0.106973 0.214944 -0.970750 0.053340 0.035660 0.022932 0.077400 129 | 0.297951 -0.177970 0.937844 0.053340 0.035660 0.022932 0.077400 130 | -1.000000 0.000000 0.000000 0.068862 0.035660 0.022932 0.077400 131 | -0.494198 0.869349 0.000000 0.068862 0.035660 0.022932 0.077400 132 | 0.120009 0.641050 0.758059 0.068862 0.035660 0.022932 0.077400 133 | -0.919038 -0.331014 -0.214009 0.068862 0.035660 0.022932 0.077400 134 | 0.111970 -0.630833 0.767797 0.068862 0.035660 0.022932 0.077400 135 | 0.914305 -0.236079 0.329110 0.068862 0.035660 0.022932 0.077400 136 | -0.680844 0.302930 0.666847 0.068862 0.035660 0.022932 0.077400 137 | -0.066978 0.997670 -0.012996 0.068862 0.035660 0.022932 0.077400 138 | -0.917623 0.152937 0.366849 0.068862 0.035660 0.022932 0.077400 139 | -0.443231 -0.890464 0.103054 0.068862 0.035660 0.022932 0.077400 140 | -0.215975 -0.926892 -0.306964 0.068862 0.035660 0.022932 0.077400 141 | -0.480032 -0.671045 -0.565038 0.068862 0.035660 0.022932 0.077400 142 | 0.549943 -0.477950 0.684928 0.068862 0.035660 0.022932 0.077400 143 | 0.700688 0.692691 0.170924 0.068862 0.035660 0.022932 0.077400 144 | -0.429108 -0.007002 -0.903226 0.068862 0.035660 0.022932 0.077400 145 | 0.698790 -0.639808 0.319904 0.068862 0.035660 0.022932 0.077400 146 | -0.725825 0.090978 -0.681836 0.068862 0.035660 0.022932 0.077400 147 | -0.389830 -0.248892 -0.886614 0.068862 0.035660 0.022932 0.077400 148 | -0.172056 -0.881289 -0.440144 0.068862 0.035660 0.022932 0.077400 149 | 0.609674 0.640657 -0.466750 0.068862 0.035660 0.022932 0.077400 150 | -0.598760 0.688724 0.408836 0.068862 0.035660 0.022932 0.077400 151 | 0.727829 0.221948 -0.648848 0.068862 0.035660 0.022932 0.077400 152 | -0.882959 -0.397981 0.248988 0.068862 0.035660 0.022932 0.077400 153 | -0.320210 0.548360 0.772507 0.068862 0.035660 0.022932 0.077400 154 | -0.044018 -0.268107 -0.962383 0.068862 0.035660 0.022932 0.077400 155 | -0.188896 -0.876518 0.442757 0.068862 0.035660 0.022932 0.077400 156 | -0.732165 -0.333075 -0.594134 0.068862 0.035660 0.022932 0.077400 157 | -0.308976 -0.549957 0.775939 0.068862 0.035660 0.022932 0.077400 158 | -0.285959 0.857876 -0.426938 0.068862 0.035660 0.022932 0.077400 159 | -0.859154 0.504090 0.088016 0.068862 0.035660 0.022932 0.077400 160 | 0.106972 0.214945 -0.970750 0.068862 0.035660 0.022932 0.077400 161 | 0.297951 -0.177970 0.937844 0.068862 0.035660 0.022932 0.077400 162 | -------------------------------------------------------------------------------- /data/schemes/scheme-StLucGE_schemeWithB0.txt: -------------------------------------------------------------------------------- 1 | VERSION: 1 2 | 0.000000 0.000000 0.000000 0.000000 0.035660 0.022932 0.077400 3 | -1.000000 0.000000 0.000000 0.030796 0.035660 0.022932 0.077400 4 | -0.001999 0.999998 0.000000 0.030796 0.035660 0.022932 0.077400 5 | 0.026008 0.649171 0.760198 0.030796 0.035660 0.022932 0.077400 6 | -0.591135 -0.766176 0.252058 0.030796 0.035660 0.022932 0.077400 7 | 0.236070 -0.524158 0.818247 0.030796 0.035660 0.022932 0.077400 8 | 0.893021 -0.259005 0.368009 0.030796 0.035660 0.022932 0.077400 9 | -0.796184 0.129031 0.591136 0.030796 0.035660 0.022932 0.077400 10 | -0.233963 0.929855 0.283956 0.030796 0.035660 0.022932 0.077400 11 | -0.935686 0.139953 0.323891 0.030796 0.035660 0.022932 0.077400 12 | -0.505827 -0.844710 -0.174939 0.030796 0.035660 0.022932 0.077400 13 | -0.346220 -0.847539 -0.402256 0.030796 0.035660 0.022932 0.077400 14 | -0.456969 -0.630956 -0.626956 0.030796 0.035660 0.022932 0.077400 15 | 0.486997 -0.388997 0.781995 0.030796 0.035660 0.022932 0.077400 16 | 0.617845 0.672831 0.406898 0.030796 0.035660 0.022932 0.077400 17 | -0.576984 -0.104996 -0.809978 0.030796 0.035660 0.022932 0.077400 18 | 0.826695 -0.520807 0.212920 0.030796 0.035660 0.022932 0.077400 19 | -0.893712 -0.039988 -0.446855 0.030796 0.035660 0.022932 0.077400 20 | -0.290101 -0.541189 -0.789275 0.030796 0.035660 0.022932 0.077400 21 | -0.115951 -0.962591 -0.244895 0.030796 0.035660 0.022932 0.077400 22 | 0.800182 0.403091 -0.444101 0.030796 0.035660 0.022932 0.077400 23 | -0.513982 0.839969 0.173993 0.030796 0.035660 0.022932 0.077400 24 | 0.788549 0.152913 -0.595658 0.030796 0.035660 0.022932 0.077400 25 | -0.949280 -0.233069 0.211062 0.030796 0.035660 0.022932 0.077400 26 | -0.232965 0.782880 0.576911 0.030796 0.035660 0.022932 0.077400 27 | -0.020998 -0.187989 -0.981947 0.030796 0.035660 0.022932 0.077400 28 | -0.216931 -0.955701 0.198939 0.030796 0.035660 0.022932 0.077400 29 | -0.774003 -0.604003 0.190002 0.030796 0.035660 0.022932 0.077400 30 | 0.160927 0.355840 0.920587 0.030796 0.035660 0.022932 0.077400 31 | 0.147034 0.731174 -0.666158 0.030796 0.035660 0.022932 0.077400 32 | -0.888142 0.417066 0.193030 0.030796 0.035660 0.022932 0.077400 33 | 0.561970 0.231989 -0.793959 0.030796 0.035660 0.022932 0.077400 34 | 0.380808 0.142929 0.913541 0.030796 0.035660 0.022932 0.077400 35 | -0.305999 -0.199001 -0.931001 0.030796 0.035660 0.022932 0.077400 36 | 0.332087 -0.130034 0.934243 0.030796 0.035660 0.022932 0.077400 37 | 0.963226 -0.265063 0.044009 0.030796 0.035660 0.022932 0.077400 38 | 0.959502 0.205108 0.193100 0.030796 0.035660 0.022932 0.077400 39 | -0.452965 -0.888931 0.067996 0.030796 0.035660 0.022932 0.077400 40 | 0.773133 0.628108 0.088016 0.030796 0.035660 0.022932 0.077400 41 | -0.709081 0.408047 0.575067 0.030796 0.035660 0.022932 0.077400 42 | 0.692769 0.023992 0.720760 0.030796 0.035660 0.022932 0.077400 43 | -0.681659 0.528735 -0.505747 0.030796 0.035660 0.022932 0.077400 44 | 0.141995 -0.724977 0.673978 0.030796 0.035660 0.022932 0.077400 45 | 0.740169 0.388089 0.549124 0.030796 0.035660 0.022932 0.077400 46 | 0.103005 0.822044 0.560030 0.030796 0.035660 0.022932 0.077400 47 | -0.584037 -0.596037 0.551035 0.030796 0.035660 0.022932 0.077400 48 | 0.088009 -0.335032 0.938087 0.030796 0.035660 0.022932 0.077400 49 | 0.552263 -0.792377 0.259123 0.030796 0.035660 0.022932 0.077400 50 | -0.838158 -0.458086 -0.296055 0.030796 0.035660 0.022932 0.077400 51 | -0.362996 -0.560993 0.743990 0.030796 0.035660 0.022932 0.077400 52 | 0.184062 0.392134 -0.901306 0.030796 0.035660 0.022932 0.077400 53 | 0.720938 -0.692941 0.008999 0.030796 0.035660 0.022932 0.077400 54 | -0.433100 0.682159 -0.589137 0.030796 0.035660 0.022932 0.077400 55 | -0.502114 0.690157 0.521119 0.030796 0.035660 0.022932 0.077400 56 | -0.170945 -0.508833 -0.843722 0.030796 0.035660 0.022932 0.077400 57 | -0.462968 0.422970 0.778946 0.030796 0.035660 0.022932 0.077400 58 | -0.385030 -0.809064 0.444035 0.030796 0.035660 0.022932 0.077400 59 | 0.713102 -0.247036 0.656094 0.030796 0.035660 0.022932 0.077400 60 | -0.259924 0.884737 -0.386886 0.030796 0.035660 0.022932 0.077400 61 | -0.001000 0.077001 -0.997030 0.030796 0.035660 0.022932 0.077400 62 | -0.037003 -0.902057 0.430027 0.030796 0.035660 0.022932 0.077400 63 | -0.570320 -0.303171 -0.763428 0.030796 0.035660 0.022932 0.077400 64 | 0.282105 0.145054 -0.948354 0.030796 0.035660 0.022932 0.077400 65 | -0.721098 0.608083 0.332045 0.030796 0.035660 0.022932 0.077400 66 | -0.266985 0.959945 -0.084995 0.030796 0.035660 0.022932 0.077400 67 | 0.000000 0.000000 0.000000 0.000000 0.035660 0.022932 0.077400 68 | -1.000000 0.000000 0.000000 0.043552 0.035660 0.022932 0.077400 69 | -0.494198 0.869349 0.000000 0.043552 0.035660 0.022932 0.077400 70 | 0.120010 0.641050 0.758059 0.043552 0.035660 0.022932 0.077400 71 | -0.919038 -0.331014 -0.214008 0.043552 0.035660 0.022932 0.077400 72 | 0.111970 -0.630833 0.767797 0.043552 0.035660 0.022932 0.077400 73 | 0.914305 -0.236080 0.329110 0.043552 0.035660 0.022932 0.077400 74 | -0.680843 0.302930 0.666847 0.043552 0.035660 0.022932 0.077400 75 | -0.066978 0.997670 -0.012996 0.043552 0.035660 0.022932 0.077400 76 | -0.917623 0.152938 0.366849 0.043552 0.035660 0.022932 0.077400 77 | -0.443231 -0.890464 0.103054 0.043552 0.035660 0.022932 0.077400 78 | -0.215974 -0.926892 -0.306964 0.043552 0.035660 0.022932 0.077400 79 | -0.480033 -0.671045 -0.565037 0.043552 0.035660 0.022932 0.077400 80 | 0.549943 -0.477950 0.684928 0.043552 0.035660 0.022932 0.077400 81 | 0.700687 0.692692 0.170924 0.043552 0.035660 0.022932 0.077400 82 | -0.429107 -0.007001 -0.903226 0.043552 0.035660 0.022932 0.077400 83 | 0.698789 -0.639808 0.319904 0.043552 0.035660 0.022932 0.077400 84 | 0.000000 0.000000 0.000000 0.000000 0.035660 0.022932 0.077400 85 | -0.725825 0.090977 -0.681836 0.043552 0.035660 0.022932 0.077400 86 | -0.389831 -0.248891 -0.886614 0.043552 0.035660 0.022932 0.077400 87 | -0.172056 -0.881289 -0.440144 0.043552 0.035660 0.022932 0.077400 88 | 0.609674 0.640657 -0.466751 0.043552 0.035660 0.022932 0.077400 89 | -0.598760 0.688724 0.408835 0.043552 0.035660 0.022932 0.077400 90 | 0.727829 0.221948 -0.648848 0.043552 0.035660 0.022932 0.077400 91 | -0.882959 -0.397981 0.248988 0.043552 0.035660 0.022932 0.077400 92 | -0.320210 0.548360 0.772507 0.043552 0.035660 0.022932 0.077400 93 | -0.044018 -0.268107 -0.962383 0.043552 0.035660 0.022932 0.077400 94 | -0.188896 -0.876519 0.442757 0.043552 0.035660 0.022932 0.077400 95 | -0.732164 -0.333076 -0.594135 0.043552 0.035660 0.022932 0.077400 96 | -0.308976 -0.549958 0.775938 0.043552 0.035660 0.022932 0.077400 97 | -0.285958 0.857876 -0.426939 0.043552 0.035660 0.022932 0.077400 98 | -0.859154 0.504091 0.088016 0.043552 0.035660 0.022932 0.077400 99 | 0.106973 0.214945 -0.970750 0.043552 0.035660 0.022932 0.077400 100 | 0.297951 -0.177970 0.937844 0.043552 0.035660 0.022932 0.077400 101 | 0.000000 0.000000 0.000000 0.000000 0.035660 0.022932 0.077400 102 | -1.000000 0.000000 0.000000 0.053340 0.035660 0.022932 0.077400 103 | -0.494198 0.869349 0.000000 0.053340 0.035660 0.022932 0.077400 104 | 0.120010 0.641049 0.758059 0.053340 0.035660 0.022932 0.077400 105 | -0.919038 -0.331013 -0.214009 0.053340 0.035660 0.022932 0.077400 106 | 0.111971 -0.630833 0.767797 0.053340 0.035660 0.022932 0.077400 107 | 0.914305 -0.236079 0.329110 0.053340 0.035660 0.022932 0.077400 108 | -0.680843 0.302931 0.666847 0.053340 0.035660 0.022932 0.077400 109 | -0.066978 0.997670 -0.012996 0.053340 0.035660 0.022932 0.077400 110 | -0.917623 0.152937 0.366849 0.053340 0.035660 0.022932 0.077400 111 | -0.443231 -0.890464 0.103053 0.053340 0.035660 0.022932 0.077400 112 | -0.215975 -0.926892 -0.306964 0.053340 0.035660 0.022932 0.077400 113 | -0.480032 -0.671045 -0.565038 0.053340 0.035660 0.022932 0.077400 114 | 0.549942 -0.477950 0.684929 0.053340 0.035660 0.022932 0.077400 115 | 0.700688 0.692691 0.170924 0.053340 0.035660 0.022932 0.077400 116 | -0.429107 -0.007002 -0.903226 0.053340 0.035660 0.022932 0.077400 117 | 0.698790 -0.639808 0.319904 0.053340 0.035660 0.022932 0.077400 118 | 0.000000 0.000000 0.000000 0.000000 0.035660 0.022932 0.077400 119 | -0.725825 0.090978 -0.681836 0.053340 0.035660 0.022932 0.077400 120 | -0.389831 -0.248892 -0.886614 0.053340 0.035660 0.022932 0.077400 121 | -0.172057 -0.881289 -0.440144 0.053340 0.035660 0.022932 0.077400 122 | 0.609674 0.640657 -0.466751 0.053340 0.035660 0.022932 0.077400 123 | -0.598759 0.688724 0.408836 0.053340 0.035660 0.022932 0.077400 124 | 0.727829 0.221947 -0.648848 0.053340 0.035660 0.022932 0.077400 125 | -0.882959 -0.397981 0.248989 0.053340 0.035660 0.022932 0.077400 126 | -0.320210 0.548360 0.772507 0.053340 0.035660 0.022932 0.077400 127 | -0.044017 -0.268107 -0.962383 0.053340 0.035660 0.022932 0.077400 128 | -0.188897 -0.876518 0.442757 0.053340 0.035660 0.022932 0.077400 129 | -0.732165 -0.333076 -0.594133 0.053340 0.035660 0.022932 0.077400 130 | -0.308975 -0.549957 0.775939 0.053340 0.035660 0.022932 0.077400 131 | -0.285959 0.857876 -0.426938 0.053340 0.035660 0.022932 0.077400 132 | -0.859154 0.504090 0.088016 0.053340 0.035660 0.022932 0.077400 133 | 0.106973 0.214944 -0.970750 0.053340 0.035660 0.022932 0.077400 134 | 0.297951 -0.177970 0.937844 0.053340 0.035660 0.022932 0.077400 135 | 0.000000 0.000000 0.000000 0.000000 0.035660 0.022932 0.077400 136 | -1.000000 0.000000 0.000000 0.068862 0.035660 0.022932 0.077400 137 | -0.494198 0.869349 0.000000 0.068862 0.035660 0.022932 0.077400 138 | 0.120009 0.641050 0.758059 0.068862 0.035660 0.022932 0.077400 139 | -0.919038 -0.331014 -0.214009 0.068862 0.035660 0.022932 0.077400 140 | 0.111970 -0.630833 0.767797 0.068862 0.035660 0.022932 0.077400 141 | 0.914305 -0.236079 0.329110 0.068862 0.035660 0.022932 0.077400 142 | -0.680844 0.302930 0.666847 0.068862 0.035660 0.022932 0.077400 143 | -0.066978 0.997670 -0.012996 0.068862 0.035660 0.022932 0.077400 144 | -0.917623 0.152937 0.366849 0.068862 0.035660 0.022932 0.077400 145 | -0.443231 -0.890464 0.103054 0.068862 0.035660 0.022932 0.077400 146 | -0.215975 -0.926892 -0.306964 0.068862 0.035660 0.022932 0.077400 147 | -0.480032 -0.671045 -0.565038 0.068862 0.035660 0.022932 0.077400 148 | 0.549943 -0.477950 0.684928 0.068862 0.035660 0.022932 0.077400 149 | 0.700688 0.692691 0.170924 0.068862 0.035660 0.022932 0.077400 150 | -0.429108 -0.007002 -0.903226 0.068862 0.035660 0.022932 0.077400 151 | 0.698790 -0.639808 0.319904 0.068862 0.035660 0.022932 0.077400 152 | 0.000000 0.000000 0.000000 0.000000 0.035660 0.022932 0.077400 153 | -0.725825 0.090978 -0.681836 0.068862 0.035660 0.022932 0.077400 154 | -0.389830 -0.248892 -0.886614 0.068862 0.035660 0.022932 0.077400 155 | -0.172056 -0.881289 -0.440144 0.068862 0.035660 0.022932 0.077400 156 | 0.609674 0.640657 -0.466750 0.068862 0.035660 0.022932 0.077400 157 | -0.598760 0.688724 0.408836 0.068862 0.035660 0.022932 0.077400 158 | 0.727829 0.221948 -0.648848 0.068862 0.035660 0.022932 0.077400 159 | -0.882959 -0.397981 0.248988 0.068862 0.035660 0.022932 0.077400 160 | -0.320210 0.548360 0.772507 0.068862 0.035660 0.022932 0.077400 161 | -0.044018 -0.268107 -0.962383 0.068862 0.035660 0.022932 0.077400 162 | -0.188896 -0.876518 0.442757 0.068862 0.035660 0.022932 0.077400 163 | -0.732165 -0.333075 -0.594134 0.068862 0.035660 0.022932 0.077400 164 | -0.308976 -0.549957 0.775939 0.068862 0.035660 0.022932 0.077400 165 | -0.285959 0.857876 -0.426938 0.068862 0.035660 0.022932 0.077400 166 | -0.859154 0.504090 0.088016 0.068862 0.035660 0.022932 0.077400 167 | 0.106972 0.214945 -0.970750 0.068862 0.035660 0.022932 0.077400 168 | 0.297951 -0.177970 0.937844 0.068862 0.035660 0.022932 0.077400 169 | -------------------------------------------------------------------------------- /examples/genTrainAndTestData.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | fastmf_path = os.path.join('D:\\', 'FastMF_python-paper') 5 | if(not(fastmf_path in sys.path)): 6 | sys.path.insert(0,fastmf_path) 7 | 8 | #%% 9 | import fastmf.generation as gen 10 | #%% 11 | # Generate training and test data 12 | scheme_file = "..\\data\\schemes\\scheme-HCPMGH_scheme.txt" 13 | bvals_file = "..\\data\\schemes\\scheme-HCPMGH_bvals.txt" 14 | dic_file = "..\\data\\dictionaries\\dictionary-fixedraddist_scheme-HCPMGH.mat" 15 | synth_HCP_FixRadDist = gen.Synthetizer(scheme_file, bvals_file, dic_file, task_name="testFastMF", 16 | include_csf = True) 17 | 18 | base_path = "..\\tests\\firstDataTest\\" 19 | #%% 20 | genFirstStep = True 21 | if genFirstStep: 22 | # Generate training data 23 | synthStandard = synth_HCP_FixRadDist.generateStandardSet(1000, run_id="01") 24 | synthStandard.save(base_path, force_overwrite=True) 25 | genStandard = gen.Generator(synthStandard, base_path, 26 | orientation_estimate = 'CSD') 27 | genStandard.computeSphericalHarmonics() 28 | genStandard.computeNNLSWeights() 29 | genStandard.computeExhaustiveMF() 30 | 31 | synthStructured = synth_HCP_FixRadDist.generateStructuredSet(repetition=5, run_id="01") 32 | synthStructured.save(base_path) 33 | genStructured = gen.Generator(synthStructured, base_path, orientation_estimate = 'MSMTCSD') 34 | genStructured.computeSphericalHarmonics() 35 | genStructured.computeNNLSWeights() 36 | genStructured.computeExhaustiveMF() 37 | 38 | #%% 39 | formatterStandard = gen.DataFormatter(base_path, "testFastMF", "twoRuns", dic_file, 40 | ["01", "02"], "standard", [1500, 250, 250]) 41 | formatterStructured = gen.DataFormatter(base_path, "testFastMF", "twoRuns", dic_file, 42 | ["01", "02"], "structured", [800, 200, 200]) 43 | 44 | formatterStandard.genNNLSInput(normalization="None", orientation_estimate="CSD") 45 | formatterStandard.genNNLSTarget(min_max_scaling=True) 46 | 47 | formatterStandard.genSphericalHarmonicInput() 48 | formatterStandard.genSphericalHarmonicTarget(min_max_scaling=False) 49 | 50 | 51 | formatterStructured.genNNLSInput(normalization="None", orientation_estimate="MSMTCSD") 52 | formatterStructured.genNNLSTarget(min_max_scaling=True) 53 | 54 | formatterStructured.genSphericalHarmonicInput() 55 | formatterStructured.genSphericalHarmonicTarget(min_max_scaling=False) -------------------------------------------------------------------------------- /examples/invivo_fullylearned.py: -------------------------------------------------------------------------------- 1 | import os 2 | import fastmf.inference as inf 3 | import pickle 4 | 5 | if __name__ == '__main__': 6 | 7 | base_path = r"C:\Users\quent\Documents\Github\FastMF_python\tests\dataV1" 8 | include_csf = False 9 | session_id = "2Mtraining" 10 | run_id = "0" 11 | task_name = "paper" 12 | training_sample = 2000000 13 | 14 | model_path = os.path.join(base_path, 'training', 'type-standard', f'ses-{session_id}') 15 | base_name_Full = f"type-standard_task-{task_name}_ses-{session_id}_trainSamples-{training_sample}" 16 | metadata_Full = os.path.join(model_path, "FullyLearnedSWAP", base_name_Full + '_metadata.json') 17 | model_state_dict_Full = os.path.join(model_path, "FullyLearnedSWAP", base_name_Full + '_modelstatedict.pt') 18 | scaler_path = os.path.join(base_path, 'scaler', 'scaler-minmax_ses-{0}_SH.pickle'.format(session_id)) 19 | dic_path = "..\\data\\dictionaries\\dictionary-fixedraddist_scheme-HCPMGH.mat" 20 | 21 | invivo_fitter = inf.FullyLearned_Model(metadata_Full, model_state_dict_Full, scaling_fn='MinMax', dictionary_path=dic_path, scaling_fn_path=scaler_path, device='cpu') 22 | 23 | # Define patient data 24 | folder_path = r"C:\Users\quent\Documents\Github\FastMF_python\tests\ElikoPyHCPv2" 25 | patient_path = "sub-1002" 26 | data_path = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + "_dmri_preproc.nii.gz" 27 | bvals = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + "_dmri_preproc.bval" 28 | bvecs = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + "_dmri_preproc.bvec" 29 | wm_mask_path = folder_path + '/subjects/' + patient_path + "/masks/" + patient_path + '_wm_mask_FSL_T1.nii.gz' 30 | 31 | # Fit the model 32 | FullyLearnedModelFitted = invivo_fitter.fit(data_path, wm_mask_path, 33 | bvals=bvals, bvecs=bvecs, 34 | verbose=4, M0_estimation=True) 35 | 36 | # Save the model 37 | output_folder = os.path.join(folder_path, 'subjects', patient_path, 'dMRI', 'microstructure', 'FullyLearned') 38 | if not os.path.exists(output_folder): 39 | os.makedirs(output_folder) 40 | with open(os.path.join(output_folder, f"{patient_path}_FL.pickle"), 'wb') as handle: 41 | pickle.dump(FullyLearnedModelFitted, handle, protocol=pickle.HIGHEST_PROTOCOL) 42 | print(FullyLearnedModelFitted.write_nifti(os.path.join(output_folder, f"{patient_path}_FL.nii.gz"))) 43 | -------------------------------------------------------------------------------- /examples/invivo_hybrid.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from dipy.io.image import load_nifti 4 | 5 | import fastmf.inference as inf 6 | import pickle 7 | 8 | import numpy as np 9 | 10 | if __name__ == '__main__': 11 | 12 | base_path = r"C:\Users\quent\Documents\Github\FastMF_python\tests\dataPaper" 13 | include_csf = False 14 | session_id = "2Mtraining" 15 | run_id = "0" 16 | task_name = "paper" 17 | training_sample = 2000000 18 | orientation_estimate = 'CSD' 19 | 20 | model_path = os.path.join(base_path, 'training', 'type-standard', f'ses-{session_id}') 21 | base_name_Hybrid = f"type-standard_task-{task_name}_ses-{session_id}_orientation-{orientation_estimate}_trainSamples-{training_sample}" 22 | 23 | metadata_Hybrid = os.path.join(model_path, "Hybrid", base_name_Hybrid + '_metadata.json') 24 | model_state_dict_Hybrid = os.path.join(model_path, "Hybrid", base_name_Hybrid + '_modelstatedict.pt') 25 | 26 | scaler_path = os.path.join(base_path, 'scaler', 'scaler-minmax_ses-{0}_NNLS.pickle'.format(session_id)) 27 | dic_path = "..\\data\\dictionaries\\dictionary-fixedraddist_scheme-HCPMGH.mat" 28 | 29 | invivo_fitter = inf.Hybrid_Model(metadata_Hybrid, model_state_dict_Hybrid, scaling_fn='MinMax', 30 | dictionary_path=dic_path, scaling_fn_path=scaler_path, device='cpu') 31 | 32 | # Define patient data 33 | folder_path = r"C:\Users\quent\Documents\Github\FastMF_python\tests\ElikoPyHCPv2" 34 | patient_path = "sub-1002" 35 | data_path = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + "_dmri_preproc.nii.gz" 36 | bvals = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + "_dmri_preproc.bval" 37 | bvecs = folder_path + '/subjects/' + patient_path + '/dMRI/preproc/' + patient_path + "_dmri_preproc.bvec" 38 | wm_mask_path = folder_path + '/subjects/' + patient_path + "/masks/" + patient_path + '_wm_mask_FSL_T1.nii.gz' 39 | 40 | # Load peaks 41 | odf_csd_path = folder_path + '/subjects/' + patient_path + "/dMRI/ODF/CSD" 42 | peaks_path = odf_csd_path + '/' + patient_path + '_CSD_peaks.nii.gz' 43 | peaks_values_path = odf_csd_path + '/' + patient_path + '_CSD_values.nii.gz' 44 | peaks, _ = load_nifti(peaks_path) 45 | peaks_values, _ = load_nifti(peaks_values_path) 46 | 47 | numfasc = np.sum(peaks_values[:, :, :, 0] > 0.05) + np.sum( 48 | peaks_values[:, :, :, 1] > 0.05) 49 | 50 | # Normalize peaks with numfasc > 2 51 | peaks[numfasc >= 2] = peaks[numfasc >= 2] / np.linalg.norm(peaks[numfasc >= 2], axis=-1)[..., None] 52 | 53 | # Mask with WM mask and only voxels with at least 2 fascicles 54 | wm_mask, _ = load_nifti(wm_mask_path) 55 | mask = np.logical_and(wm_mask, numfasc >= 2) 56 | # Reduce number of TRUE in mask to speedup computation during testing 57 | mask[0:120, 0:110, :] = False 58 | 59 | print("Number of voxels: ", np.sum(mask)) 60 | print("Number of total voxels: ", np.prod(mask.shape), " (", mask.shape, ")") 61 | print("Percentage of voxels to be processed: ", np.sum(mask) / np.prod(mask.shape) * 100) 62 | 63 | # Fit the model 64 | HybridModelFitted = invivo_fitter.fit(data_path, mask, peaks, 65 | bvals=bvals, bvecs=bvecs, 66 | verbose=4, M0_estimation=True) 67 | 68 | # Save the model 69 | output_folder = os.path.join(folder_path, 'subjects', patient_path, 'dMRI', 'microstructure', 'Hybrid') 70 | if not os.path.exists(output_folder): 71 | os.makedirs(output_folder) 72 | with open(os.path.join(output_folder, f"{patient_path}_Hybrid.pickle"), 'wb') as handle: 73 | pickle.dump(HybridModelFitted, handle, protocol=pickle.HIGHEST_PROTOCOL) 74 | print(HybridModelFitted.write_nifti(os.path.join(output_folder, f"{patient_path}_Hybrid.nii.gz"))) 75 | 76 | # Save the model weights 77 | HybridModelFitted.write_weights(os.path.join(output_folder, f"{patient_path}_Hybrid_weights")) 78 | -------------------------------------------------------------------------------- /examples/prepare_report.py: -------------------------------------------------------------------------------- 1 | import os 2 | import fastmf.generation as gen 3 | 4 | if __name__ == "__main__": 5 | eval_dir = r"E:\MF" 6 | scheme_file = os.path.join(eval_dir, "data\\schemes\\scheme-HCPMGH_scheme.txt") 7 | bvals_file = os.path.join(eval_dir, "data\\schemes\\scheme-HCPMGH_bvals.txt") 8 | dic_file = os.path.join(eval_dir, "data\\dictionaries\\dictionary-fixedraddist_scheme-HCPMGH.mat") 9 | 10 | print("0. Synthetizer") 11 | synth_HCP_FixRadDist = gen.Synthetizer(scheme_file, bvals_file, dic_file, task_name="PaperStructuredNoCSF", include_csf=False) 12 | synth_HCP_FixRadDist.generateStructuredSet() 13 | base_path = os.path.join(eval_dir, "basedir") 14 | 15 | synthStructured_path = os.path.join(base_path, "synthetizer", "type-structured", "raw", 16 | "type-structured_task-PaperStructuredNoCSF_run-0_raw.pickle") 17 | 18 | print("1. Generator") 19 | genStructured = gen.Generator(synthStructured_path, base_path, orientation_estimate='MSMTCSD') 20 | genStructured.computeSphericalHarmonics() 21 | genStructured.computeNNLSWeights() 22 | genStructured.computeExhaustiveMF() 23 | 24 | genStructured_2 = gen.Generator(synthStructured_path, base_path, orientation_estimate='GROUNDTRUTH') 25 | genStructured_2.computeExhaustiveMF() 26 | 27 | print("2. Formatter") 28 | formatterStructured = gen.DataFormatter(base_path, "PaperStructuredNoCSF", "PaperSmalltest", dic_file, 29 | ["01"], "structured", [7500, 0, 0]) 30 | 31 | formatterStructured.genNNLSTarget(include_csf = False) 32 | formatterStructured.genNNLSInput(include_csf = False) 33 | formatterStructured.genSphericalHarmonicInput(include_csf = False) 34 | formatterStructured.genSphericalHarmonicTarget(include_csf = False) 35 | -------------------------------------------------------------------------------- /examples/report.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pickle 3 | import sys 4 | from sklearn.metrics import d2_absolute_error_score 5 | import numpy as np 6 | import torch 7 | 8 | fastmf_path = os.path.join('D:\\','FastMF_python-paper', 'FastMF_python') 9 | if(not(fastmf_path in sys.path)): 10 | sys.path.insert(0,fastmf_path) 11 | import fastmf.reports.evaluator as evaluator 12 | 13 | include_csf = True 14 | 15 | base_path = ['D:\\', 'Documents\\','DATASIM\\','stage\\','data_MLP\\','dataTestPaper3'] 16 | 17 | model_base_path = ['D:\\', 'Documents', 'DATASIM', 'stage', 'saved_models'] 18 | 19 | hybrid_model_path = os.path.join(*model_base_path, 20 | 'MLP_split_paper_csf') 21 | 22 | full_model_path = os.path.join(*model_base_path, 23 | 'MLP_FullyLearned_csf_4') 24 | 25 | task_name = "PaperCSF" 26 | run_id = "0" 27 | session_id = "PaperCSF" 28 | 29 | 30 | metadata_Hybrid = os.path.join(hybrid_model_path, 'metadata.json') 31 | model_state_dict_Hybrid = os.path.join(hybrid_model_path, 'model_state_dict.pt') 32 | 33 | metadata_Full = os.path.join(full_model_path, 'metadata.json') 34 | model_state_dict_Full = os.path.join(full_model_path, 'model_state_dict.pt') 35 | 36 | 37 | #%% 38 | evaluation = evaluator.Evaluator(os.path.join(*base_path), 39 | task_name, session_id, run_id, 40 | metadata_Hybrid, model_state_dict_Hybrid, 41 | metadata_Full, model_state_dict_Full, 42 | scaling_fn = 'MinMax') 43 | 44 | 45 | #%% Compute basic metrics 46 | 47 | d2_MF_MSMT = d2_absolute_error_score(evaluation.target_NNLS[:,:], 48 | evaluation.scaling_fn_hybrid(evaluation.MF_MSMTCSD_output, 49 | evaluation.minis_hybrid, 50 | evaluation.maxis_hybrid, 51 | inverse = False 52 | ), 53 | multioutput = 'raw_values') 54 | 55 | d2_MF_GT = d2_absolute_error_score(evaluation.target_NNLS[:,:], 56 | evaluation.scaling_fn_hybrid(evaluation.MF_GROUNDTRUTH_output, 57 | evaluation.minis_hybrid, 58 | evaluation.maxis_hybrid, 59 | inverse = False 60 | ), 61 | multioutput = 'raw_values') 62 | 63 | d2_Hybrid = d2_absolute_error_score(evaluation.target_NNLS, 64 | evaluation.pred_hybrid, 65 | multioutput = 'raw_values') 66 | 67 | idx_prop_full = [0,4,5,6,10,11,12] 68 | d2_Full = d2_absolute_error_score(evaluation.target_SH[:,idx_prop_full], 69 | evaluation.pred_full[:,idx_prop_full], 70 | multioutput = 'raw_values') 71 | 72 | 73 | print('D2 MF MSMT : ', d2_MF_MSMT) 74 | print('D2 MF GT :', d2_MF_GT) 75 | print('D2 Hybrid : ', d2_Hybrid) 76 | print('D2 Full : ', d2_Full) 77 | 78 | #%% 79 | evaluation._Evaluator__plot_MAEbyAngularError() 80 | 81 | #%% 82 | evaluation._Evaluator__plot_GTvsPrediction() 83 | 84 | #%% 85 | evaluation._Evaluator__plot_Residuals() 86 | 87 | #%% 88 | evaluation._Evaluator__plot_Distributions() 89 | 90 | #%% 91 | abs_dots_MF, abs_dots_SH = evaluation._Evaluator__plot_AngularError() 92 | 93 | angles_MF = np.arccos(abs_dots_MF)*180/np.pi 94 | med = np.median(angles_MF) 95 | avg = np.mean(angles_MF) 96 | q1 = 0.25 97 | qtile1 = np.quantile(angles_MF, q1) 98 | q2 = 0.75 99 | qtile2 = np.quantile(angles_MF,q2) 100 | q3 = 0.95 101 | qtile3 = np.quantile(angles_MF,q3) 102 | print('MSMT Median angular error : ', med) 103 | print('MSMT Average angular error : ', avg) 104 | print('MSMT Quantile \ q = {0} : {1}'.format(q1,qtile1)) 105 | print('MSMT Quantile \ q = {0} : {1}'.format(q2,qtile2)) 106 | print('MSMT Quantile \ q = {0} : {1}'.format(q3,qtile3)) 107 | 108 | 109 | angles_SH = np.arccos(abs_dots_SH)*180/np.pi 110 | med = np.median(angles_SH) 111 | avg = np.mean(angles_SH) 112 | q1 = 0.25 113 | qtile1 = np.quantile(angles_SH, q1) 114 | q2 = 0.75 115 | qtile2 = np.quantile(angles_SH,q2) 116 | q3 = 0.95 117 | qtile3 = np.quantile(angles_SH,q3) 118 | print('SH Median angular error : ', med) 119 | print('SH Average angular error : ', avg) 120 | print('SH Quantile \ q = {0} : {1}'.format(q1,qtile1)) 121 | print('SH Quantile \ q = {0} : {1}'.format(q2,qtile2)) 122 | print('SH Quantile \ q = {0} : {1}'.format(q3,qtile3)) 123 | 124 | 125 | #%% 126 | evaluation._Evaluator__plot_MAE_By_Nu() 127 | 128 | #%% 129 | evaluation._Evaluator__plot_D2_by_nu() 130 | 131 | #%% 132 | evaluation._Evaluator__assess_nuCSF() 133 | 134 | #%% 135 | -------------------------------------------------------------------------------- /examples/slurm-synthetizer.py: -------------------------------------------------------------------------------- 1 | import fastmf.generation as gen 2 | import sys 3 | 4 | if __name__ == "__main__": 5 | scheme_file = "../data/schemes/scheme-HCPMGH_scheme.txt" 6 | bvals_file = "../data/schemes/scheme-HCPMGH_bvals.txt" 7 | dic_file = "../data/dictionaries/dictionary-fixedraddist_scheme-HCPMGH.mat" 8 | synth_HCP_FixRadDist = gen.Synthetizer(scheme_file, bvals_file, dic_file, task_name="fixraddistHCP") 9 | num_samples = 10000 10 | 11 | base_path = "../tests/fixraddistHCP/" 12 | 13 | run_id = str(sys.argv[1]) 14 | 15 | # Generate training data 16 | synthStandard = synth_HCP_FixRadDist.generateStandardSet(num_samples, run_id=run_id) 17 | synthStandard.save(base_path) 18 | -------------------------------------------------------------------------------- /examples/testStLucGE.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | #%% 4 | import fastmf.generation as gen 5 | from fastmf.reports import evaluator 6 | 7 | base_path = r"C:\Users\quent\Documents\Github\FastMF_python\tests\dataPaper" 8 | run_id = 0 9 | orientation_estimate = "CSD" 10 | seed = 111 11 | print("Seed: ", seed) 12 | task_name = "paperStLucGE" 13 | 14 | # Synthetizer Path 15 | synthetizer_file = os.path.join(base_path, "synthetizer", "type-structured", "raw", 16 | f"type-structured_task-{task_name}_run-{run_id}_raw.pickle") 17 | 18 | # MF Generation 19 | genStructured = gen.Generator(synthetizer_file, base_path, orientation_estimate_sh_max_order=12, 20 | orientation_estimate=orientation_estimate, recompute_S0mean=False, compute_vf=False, 21 | compute_swap=False) 22 | genStructured.computeExhaustiveMF(processes=1) 23 | 24 | -------------------------------------------------------------------------------- /examples/train_FullyLearned_MLP.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import os 3 | import sys 4 | import torch 5 | import pickle 6 | import matplotlib.pyplot as plt 7 | import sklearn 8 | import numpy as np 9 | import sklearn.metrics 10 | import json 11 | from io import StringIO 12 | 13 | 14 | fastmf_path = os.path.join('D:\\', 'FastMF_python-paper', 'FastMF_python') 15 | if(not(fastmf_path in sys.path)): 16 | sys.path.insert(0,fastmf_path) 17 | import fastmf.models.MLP_FullyLearned as MLP_Full 18 | import fastmf.utils.NN_utils as nnu 19 | 20 | device = 'cuda' 21 | 22 | #Some parameters 23 | TRAIN = True 24 | SAVE = True 25 | 26 | 27 | #%% Load data 28 | base_path = ['D:\\', 'Documents\\','DATASIM\\','stage\\','data_MLP\\','dataPaperVeryBig'] 29 | task_name = 'PaperCSF' 30 | session_id = 'BigPaperCSF' 31 | type_set = 'standard' 32 | in_normalization = 'None' 33 | target_normalization = 'minmax' 34 | 35 | in_train_cubic, in_valid_cubic, in_test_cubic, target_train, target_valid, target_test = MLP_Full.DataLoader(base_path, task_name, session_id, type_set, 36 | in_normalization, target_normalization) 37 | 38 | ntrain = in_train_cubic.shape[0]#2000000 39 | nvalid = in_valid_cubic.shape[0]#100000 40 | ntest = in_test_cubic.shape[0]#100000 41 | 42 | in_train = in_train_cubic[0:ntrain,:].reshape(ntrain, in_train_cubic.shape[1]*in_train_cubic.shape[2]) 43 | del in_train_cubic 44 | in_valid = in_valid_cubic[0:nvalid,:].reshape(nvalid, in_valid_cubic.shape[1]*in_valid_cubic.shape[2]) 45 | del in_valid_cubic 46 | in_test = in_test_cubic[0:ntest,:].reshape(ntest, in_test_cubic.shape[1]*in_test_cubic.shape[2]) 47 | del in_test_cubic 48 | 49 | target_train = target_train[0:ntrain,:] 50 | target_valid = target_valid[0:nvalid,:] 51 | target_test = target_test[0:ntest,:] 52 | 53 | 54 | 55 | #%% Load scaler and define utility function 56 | 57 | scaler_path = os.path.join(*base_path, 'scaler', 'scaler-minmax_ses-{0}_SH.pickle'.format(session_id)) 58 | with open( scaler_path, 'rb') as file: 59 | scaler = pickle.load(file) 60 | 61 | maxis = scaler.data_max_[:,np.newaxis] 62 | minis = scaler.data_min_[:,np.newaxis] 63 | 64 | def MinMaxScaler(x, minis, maxis, inverse = False): 65 | # if(not(np.any(x.shape == mini.shape) and np.any(x.shape == maxi.shape))): 66 | # raise ValueError('Shape mismatch ! x : {0}, mini : {1}, maxi : {2}'.format(x.shape, 67 | # maxi.shape, 68 | # mini.shape)) 69 | if(inverse): 70 | a = maxis - minis 71 | b = minis 72 | else: 73 | a = 1/(maxis - minis) 74 | b = - minis * a 75 | return x*a + b 76 | 77 | 78 | #%% Define model 79 | num_fasc = 2 80 | num_atoms = in_test.shape[1]//num_fasc 81 | num_outputs = target_test.shape[1] 82 | num_inputs = in_train.shape[1] 83 | 84 | p_split = 0.1#Drop out rate in the split layers 85 | p_final = 0.1#Drop out rate in the final layers 86 | 87 | 88 | 89 | architecture = [['FCL', num_inputs,1000], 90 | ['Activation-ReLU'], 91 | ['Dropout', 0.1], 92 | ['FCL', 1000,1000], 93 | ['Activation-ReLU'], 94 | ['Dropout', 0.1], 95 | ['FCL', 1000,1000], 96 | ['Activation-ReLU'], 97 | ['Dropout', 0.1], 98 | ['FCL', 1000,num_outputs], 99 | ['Activation-Sigmoid'], 100 | ] 101 | 102 | 103 | 104 | model = MLP_Full.Network(architecture) 105 | num_parameters = nnu.count_parameters(model) 106 | print('Number of parameters : ', num_parameters) 107 | 108 | base_save_path = 'D:\\Documents\\DATASIM\\stage\\saved_models' 109 | folder = 'MLP_FullyLearned_csf_4' 110 | save_path = os.path.join(base_save_path, folder) 111 | if not os.path.exists(save_path): 112 | # create the directory 113 | os.makedirs(save_path) 114 | 115 | #%% training 116 | if(TRAIN): 117 | ind_fig = 0 118 | num_epochs = 70 119 | learning_rate = 2e-4 120 | num_train_samples = in_train.shape[0] 121 | batch_size = 12000 122 | metric_function = lambda x,y:MLP_Full.D2score(x,y) 123 | out = MLP_Full.Train(model, batch_size, num_epochs, learning_rate, 124 | torch.from_numpy(in_train), torch.from_numpy(in_valid), 125 | torch.from_numpy(target_train), torch.from_numpy(target_valid), 126 | device = 'cuda', 127 | full_train_on_gpu = False, 128 | valid_on_gpu = True, 129 | bavard = 1, 130 | random_seed = 10, 131 | loss_function = torch.nn.L1Loss(), 132 | metric_function = metric_function) 133 | 134 | 135 | try: 136 | validation_acc = [float(x.detach().numpy()) for x in out['validation_accuracy']] 137 | except AttributeError: 138 | validation_acc = [float(x) for x in out['validation_accuracy']] 139 | train_losses = [float(x.detach().numpy()) for x in out['train_losses']] 140 | validation_losses = [float(x.detach().numpy()) for x in out['validation_losses']] 141 | 142 | 143 | #%% Plot training curves 144 | plt.figure() 145 | plt.plot(validation_losses, label = 'validation') 146 | plt.plot(train_losses, label = 'train') 147 | plt.title('MSE Loss') 148 | plt.legend() 149 | if(SAVE): 150 | plt.savefig(str(ind_fig)+'.png', bbox_inches = 'tight') 151 | ind_fig+=1 152 | plt.show() 153 | 154 | plt.figure() 155 | plt.plot([float(x) for x in validation_acc], label = 'validation MAE') 156 | plt.legend() 157 | if(SAVE): 158 | plt.savefig(str(ind_fig)+'.png', bbox_inches = 'tight') 159 | ind_fig+=1 160 | plt.show() 161 | 162 | 163 | 164 | #%% Prediction on test set 165 | 166 | model.eval() #Eval mode : affects dropout 167 | 168 | #Evaluate on the test set 169 | pred_test = model.cpu()(torch.from_numpy(in_test)) 170 | pred_test = pred_test.detach().cpu().numpy() 171 | 172 | MAE_test = sklearn.metrics.mean_absolute_error(target_test,pred_test, multioutput = 'raw_values') 173 | MAE_test_avg = np.array([np.mean(MAE_test[0:num_fasc]), np.mean(MAE_test[num_fasc:-1])]) 174 | 175 | print('MAE Test : {0}'.format(MAE_test)) 176 | print('MAE Test Avg : {0}'.format(MAE_test_avg)) 177 | 178 | D2_MAE = sklearn.metrics.d2_absolute_error_score(target_test,pred_test, multioutput = 'raw_values') 179 | print('D2_MAE Test : {0}'.format(D2_MAE)) 180 | 181 | Var_test = np.var(pred_test, axis = 0) 182 | Var_GT = np.var(target_test, axis = 0) 183 | mean_test = np.mean(pred_test, axis = 0) 184 | mean_GT = np.mean(target_test, axis = 0) 185 | 186 | 187 | 188 | 189 | 190 | #%% Save results 191 | 192 | #Scale back to physical values 193 | pred_test_ph = MinMaxScaler(pred_test, minis.T, maxis.T, inverse = True) 194 | target_test_ph = MinMaxScaler(target_test, minis.T, maxis.T, inverse = True) 195 | MAE_test_ph = sklearn.metrics.mean_absolute_error(target_test_ph,pred_test_ph, multioutput = 'raw_values') 196 | 197 | s = StringIO() 198 | print(model.Layers, file=s) 199 | inter = s.getvalue() 200 | all_layers = inter.split('\n') 201 | s.close() 202 | 203 | if(SAVE): 204 | metadata = { 205 | 'device':device, 206 | 'base_path': base_path, 207 | 'task_name':task_name, 208 | 'session_id':session_id, 209 | 'type_set':type_set, 210 | 'in_normalization':in_normalization, 211 | 'target_normalization':target_normalization, 212 | 'num_atoms': num_atoms, 213 | 'num_outputs':num_outputs, 214 | 'num_fasc':num_fasc, 215 | 'architecture':architecture, 216 | 'layers':all_layers, 217 | 'number of parameters': num_parameters, 218 | 'num_epochs':num_epochs, 219 | 'learning_rate':learning_rate, 220 | 'num_train_samples':num_train_samples, 221 | 'batch_size':batch_size, 222 | 'MAE_test':MAE_test.tolist(), 223 | 'MAE_test_ph':MAE_test_ph.tolist(), 224 | 'MAE_test_avg':MAE_test_avg.tolist(), 225 | 'D2_MAE':D2_MAE.tolist(), 226 | 'variance of predictions on test set':Var_test.tolist(), 227 | 'mean of predictions on test set':mean_test.tolist(), 228 | 'variance of ground truth values':Var_GT.tolist(), 229 | 'mean of ground truth values':mean_GT.tolist(), 230 | 'validation_acc':validation_acc, 231 | 'validation_losses':validation_losses, 232 | 'train_losses':train_losses, 233 | 234 | 235 | } 236 | # open the file for writing 237 | with open(os.path.join(save_path,'metadata.json'), 'w') as f: 238 | # write the data to the file 239 | json.dump(metadata, f, indent = 0) 240 | 241 | torch.save(model.state_dict(), os.path.join(save_path, 'model_state_dict.pt')) -------------------------------------------------------------------------------- /examples/train_Hybrid_MLP.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import os 3 | import sys 4 | import torch 5 | import pickle 6 | import matplotlib.pyplot as plt 7 | import sklearn 8 | import numpy as np 9 | import sklearn.metrics 10 | import json 11 | from io import StringIO 12 | 13 | 14 | fastmf_path = os.path.join('D:\\', 'FastMF_python-paper', 'FastMF_python') 15 | if(not(fastmf_path in sys.path)): 16 | sys.path.insert(0,fastmf_path) 17 | import fastmf.models.MLP_Split as MLP_Split 18 | import fastmf.utils.NN_utils as nnu 19 | 20 | 21 | 22 | 23 | device = 'cuda' 24 | 25 | #Some parameters 26 | TRAIN = True 27 | SAVE = True 28 | 29 | 30 | #%% Load data 31 | base_path = ['D:\\', 'Documents\\','DATASIM\\','stage\\','data_MLP\\','dataPaperVeryBig'] 32 | task_name = 'PaperCSF' 33 | session_id = 'PaperCSF' 34 | type_set = 'standard' 35 | in_normalization = 'SumToOne' 36 | target_normalization = 'minmax' 37 | 38 | data = MLP_Split.DataLoader(base_path, task_name, session_id, type_set, 39 | in_normalization, target_normalization) 40 | 41 | idx_start_train = 0 42 | idx_end_train = 1000000 43 | ntrain = idx_start_train - idx_end_train 44 | ntest = 100000 45 | nvalid = 100000 46 | in_train = data[0][idx_start_train:idx_end_train,:] 47 | in_valid = data[1][0:nvalid,:] 48 | in_test = data[2][0:ntest,:] 49 | 50 | target_train = data[3][idx_start_train:idx_end_train,:] 51 | target_valid = data[4][0:nvalid,:] 52 | target_test = data[5][0:ntest,:] 53 | del data 54 | 55 | #%% Check target data 56 | # hists = [] 57 | # for k in range(target_train.shape[1]): 58 | # hist = np.histogram(target_train[:,k], bins = 60, range = [0,1] ) 59 | # bin_middles = (hist[1][1:] + hist[1][0:-1])/2 60 | # bin_size = hist[1][1] - hist[1][0] 61 | # fig,ax = plt.subplots(2,1) 62 | # ax[0].plot(bin_middles, hist[0], '-og', label = 'train') 63 | # ax[0].legend() 64 | 65 | # hist = np.histogram(target_valid[:,k], bins = 60, range = [0,1] ) 66 | # bin_middles = (hist[1][1:] + hist[1][0:-1])/2 67 | # bin_size = hist[1][1] - hist[1][0] 68 | # ax[1].plot(bin_middles, hist[0], '-ok', label = 'valid') 69 | # ax[1].legend() 70 | 71 | #%% Load scaler and define utility function 72 | 73 | scaler_path = os.path.join(*base_path, 'scaler', 'scaler-minmax_ses-{0}_NNLS.pickle'.format(session_id)) 74 | with open( scaler_path, 'rb') as file: 75 | scaler = pickle.load(file) 76 | 77 | maxis = scaler.data_max_[:,np.newaxis] 78 | minis = scaler.data_min_[:,np.newaxis] 79 | 80 | def MinMaxScaler(x, minis, maxis, inverse = False): 81 | # if(not(np.any(x.shape == mini.shape) and np.any(x.shape == maxi.shape))): 82 | # raise ValueError('Shape mismatch ! x : {0}, mini : {1}, maxi : {2}'.format(x.shape, 83 | # maxi.shape, 84 | # mini.shape)) 85 | if(inverse): 86 | a = maxis - minis 87 | b = minis 88 | else: 89 | a = 1/(maxis - minis) 90 | b = - minis * a 91 | return x*a + b 92 | 93 | 94 | 95 | #%% Define model 96 | 97 | #Reference model (Louise Adam) without dropout // dropouts are 0.1 for each layer in the ref model 98 | num_fasc = 2 99 | num_atoms = in_test.shape[1]//num_fasc 100 | num_outputs = target_test.shape[1] 101 | 102 | p_split = 0.1#Drop out rate in the split layers 103 | p_final = 0.1#Drop out rate in the final layers 104 | 105 | split_architecture = [['FCL', num_atoms,num_atoms], 106 | ['Activation-ReLU'], 107 | ['Dropout', 0.1], 108 | ['FCL', num_atoms,250], 109 | ['Activation-ReLU'], 110 | ] 111 | 112 | final_architecture = [['Batchnorm', 500], 113 | ['FCL', 500,250], 114 | ['Activation-ReLU'], 115 | ['Dropout', 0.1], 116 | ['FCL', 250,250], 117 | ['Activation-ReLU'], 118 | ['Dropout', 0.1], 119 | ['FCL', 250,100], 120 | ['Activation-ReLU'], 121 | ['Dropout', 0.1], 122 | ['FCL', 100,num_outputs], 123 | ['Activation-Sigmoid'], 124 | ] 125 | 126 | 127 | 128 | model = MLP_Split.Network(split_architecture, final_architecture) 129 | print(model) 130 | num_parameters = nnu.count_parameters(model) 131 | 132 | 133 | base_save_path = 'D:\\Documents\\DATASIM\\stage\\saved_models' 134 | folder = 'MLP_split_no_csf' 135 | save_path = os.path.join(base_save_path, folder) 136 | if not os.path.exists(save_path): 137 | # create the directory 138 | os.makedirs(save_path) 139 | 140 | #%% training 141 | if(TRAIN): 142 | ind_fig = 0 143 | num_epochs = 25 144 | learning_rate = 3.5e-4 145 | num_train_samples = in_train.shape[0] 146 | batch_size = 5000 147 | 148 | loss_fn = torch.nn.L1Loss(reduction = 'mean') 149 | 150 | out = MLP_Split.Train(model, batch_size, num_epochs, learning_rate, 151 | torch.from_numpy(in_train), torch.from_numpy(in_valid), 152 | torch.from_numpy(target_train), torch.from_numpy(target_valid), 153 | device = 'cuda', 154 | full_train_on_gpu = False, 155 | valid_on_gpu = True, 156 | bavard = 1, 157 | random_seed = 10, 158 | loss_function = loss_fn) 159 | 160 | 161 | 162 | validation_acc = [float(x.detach().numpy()) for x in out['validation_accuracy']] 163 | train_losses = [float(x.detach().numpy()) for x in out['train_losses']] 164 | validation_losses = [float(x.detach().numpy()) for x in out['validation_losses']] 165 | 166 | 167 | #%% Plot training curves 168 | plt.figure() 169 | plt.plot(validation_losses, label = 'validation') 170 | plt.plot(train_losses, label = 'train') 171 | plt.title('MSE Loss') 172 | plt.legend() 173 | if(SAVE): 174 | plt.savefig(str(ind_fig)+'.png', bbox_inches = 'tight') 175 | ind_fig+=1 176 | plt.show() 177 | 178 | plt.figure() 179 | plt.plot([float(x) for x in validation_acc], label = 'validation MAE') 180 | plt.legend() 181 | if(SAVE): 182 | plt.savefig(str(ind_fig)+'.png', bbox_inches = 'tight') 183 | ind_fig+=1 184 | plt.show() 185 | 186 | 187 | 188 | #%% Prediction on test set 189 | 190 | model.eval() #Eval mode : affects dropout 191 | 192 | #Evaluate on the test set 193 | pred_test = model.cpu()(torch.from_numpy(in_test)) 194 | pred_test = pred_test.detach().cpu().numpy() 195 | 196 | 197 | pred_valid = model.cpu()(torch.from_numpy(in_valid)) 198 | pred_valid = pred_valid.detach().cpu().numpy() 199 | 200 | pred_train = model.cpu()(torch.from_numpy(in_train)) 201 | pred_train = pred_train.detach().cpu().numpy() 202 | 203 | MAE_train = sklearn.metrics.mean_absolute_error(target_train,pred_train, multioutput = 'raw_values') 204 | MAE_valid = sklearn.metrics.mean_absolute_error(target_valid,pred_valid, multioutput = 'raw_values') 205 | 206 | MAE_test = sklearn.metrics.mean_absolute_error(target_test,pred_test, multioutput = 'raw_values') 207 | MAE_test_avg = np.array([np.mean(MAE_test[0:num_fasc]), np.mean(MAE_test[num_fasc:])]) 208 | 209 | print('MAE Test : {0}'.format(MAE_test)) 210 | print('MAE Test Avg : {0}'.format(MAE_test_avg)) 211 | 212 | D2_MAE = sklearn.metrics.d2_absolute_error_score(target_test,pred_test, multioutput = 'raw_values') 213 | print('D2_MAE Test : {0}'.format(D2_MAE)) 214 | 215 | Var_test = np.var(pred_test, axis = 0) 216 | Var_GT = np.var(target_test, axis = 0) 217 | mean_test = np.mean(pred_test, axis = 0) 218 | mean_GT = np.mean(target_test, axis = 0) 219 | 220 | 221 | 222 | 223 | 224 | #%% Save results 225 | 226 | #Scale back to physical values 227 | pred_test_ph = MinMaxScaler(pred_test, minis.T, maxis.T, inverse = True) 228 | target_test_ph = MinMaxScaler(target_test, minis.T, maxis.T, inverse = True) 229 | MAE_test_ph = sklearn.metrics.mean_absolute_error(target_test_ph,pred_test_ph, multioutput = 'raw_values') 230 | 231 | s = StringIO() 232 | print(model.Split_Layers, file=s) 233 | inter = s.getvalue() 234 | all_layers_split = inter.split('\n') 235 | s.close() 236 | 237 | s = StringIO() 238 | print(model.Final_Layers, file=s) 239 | inter = s.getvalue() 240 | all_layers_final = inter.split('\n') 241 | s.close() 242 | 243 | if(SAVE): 244 | metadata = { 245 | 'device':device, 246 | 'base_path': base_path, 247 | 'task_name':task_name, 248 | 'session_id':session_id, 249 | 'type_set':type_set, 250 | 'in_normalization':in_normalization, 251 | 'target_normalization':target_normalization, 252 | 'num_atoms': num_atoms, 253 | 'num_outputs':num_outputs, 254 | 'num_fasc':num_fasc, 255 | 'split_architecture':split_architecture, 256 | 'final_architecture':final_architecture, 257 | 'layers split':all_layers_split, 258 | 'layers final':all_layers_final, 259 | 'number of parameters': num_parameters, 260 | 'num_epochs':num_epochs, 261 | 'learning_rate':learning_rate, 262 | 'num_train_samples':num_train_samples, 263 | 'batch_size':batch_size, 264 | 'MAE_test':MAE_test.tolist(), 265 | 'MAE_test_ph':MAE_test_ph.tolist(), 266 | 'MAE_test_avg':MAE_test_avg.tolist(), 267 | 'D2_MAE':D2_MAE.tolist(), 268 | 'variance of predictions on test set':Var_test.tolist(), 269 | 'mean of predictions on test set':mean_test.tolist(), 270 | 'variance of ground truth values':Var_GT.tolist(), 271 | 'mean of ground truth values':mean_GT.tolist(), 272 | 'validation_acc':validation_acc, 273 | 'validation_losses':validation_losses, 274 | 'train_losses':train_losses, 275 | 276 | 277 | } 278 | # open the file for writing 279 | with open(os.path.join(save_path,'metadata.json'), 'w') as f: 280 | # write the data to the file 281 | json.dump(metadata, f, indent = 0) 282 | 283 | torch.save(model.state_dict(), os.path.join(save_path, 'model_state_dict.pt')) -------------------------------------------------------------------------------- /fastmf/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Hyedryn/FastMF_public/cb0775bff53f320ccb0a6a3bc507c924a9f30868/fastmf/__init__.py -------------------------------------------------------------------------------- /fastmf/generation/__init__.py: -------------------------------------------------------------------------------- 1 | from . import synthetizer 2 | from . import formatter 3 | from . import generator 4 | 5 | Synthetizer = synthetizer.Synthetizer 6 | DataFormatter = formatter.DataFormatter 7 | SynthetizerFit = synthetizer.SynthetizerFit 8 | Generator = generator.Generator 9 | -------------------------------------------------------------------------------- /fastmf/generation/synthetizer.py: -------------------------------------------------------------------------------- 1 | import json 2 | import pickle 3 | import numpy as np 4 | import os 5 | from tqdm import tqdm 6 | 7 | import fastmf.utils.mf_utils as mfu 8 | 9 | from matplotlib import pyplot as plt 10 | 11 | 12 | class Synthetizer: 13 | """Class to generate synthetic data from a given scheme and dictionary.""" 14 | def __init__(self, scheme_path, bvals_path, dictionary_path, 15 | task_name="default", 16 | include_csf = False, M0_random = True): 17 | """Initialize the synthetizer with a given scheme and dictionary.""" 18 | 19 | # Load DW-MRI protocol 20 | bvals = np.loadtxt(bvals_path) # NOT in SI units, in s/mm^2 21 | scheme = np.loadtxt(scheme_path, skiprows=1) # only DWI, no b0s 22 | 23 | # Load MF dictionary 24 | dictionary_structure = mfu.loadmat(dictionary_path) 25 | 26 | self.__init_scheme(scheme, bvals) 27 | self.__init_MF_dictionary(dictionary_structure) 28 | 29 | self.num_fasc = 2 30 | self.task_name = task_name 31 | 32 | self.include_csf = include_csf 33 | 34 | self.M0_random = M0_random 35 | 36 | def __init_MF_dictionary(self, dictionary_structure): 37 | self.MF_dict = dictionary_structure 38 | self.MF_dict["fasc_propnames"] = [s.strip() for s in dictionary_structure['fasc_propnames']] 39 | 40 | self.interpolator = mfu.init_PGSE_multishell_interp( 41 | dictionary_structure['dictionary'], 42 | dictionary_structure['sch_mat'], 43 | dictionary_structure['orientation']) 44 | 45 | def __init_scheme(self, scheme, bvals): 46 | ind_b0 = np.where(bvals <= 1e-16)[0] 47 | ind_b = np.where(bvals > 1e-16)[0] 48 | num_B0 = ind_b0.size 49 | sch_mat_b0 = np.zeros((scheme.shape[0] + num_B0, scheme.shape[1])) 50 | sch_mat_b0[ind_b0, 4:] = scheme[0, 4:] 51 | sch_mat_b0[ind_b, :] = scheme 52 | self.scheme = sch_mat_b0 53 | self.TE = np.mean(self.scheme[:, 6]) 54 | self.num_mris = sch_mat_b0.shape[0] 55 | 56 | def __generateRandomDirections(self, crossangle_min, dir1='fixed'): 57 | crossangle_min_rad = crossangle_min * np.pi / 180 58 | if dir1 == 'fixed': 59 | # fixed direction (do not take direction in the Z axis orientation) 60 | cyldir_1 = np.array([1 / np.sqrt(2), 1 / np.sqrt(2), 0]) 61 | elif dir1 == 'random': 62 | norm1 = -1 63 | while norm1 <= 0: 64 | cyldir_1 = np.random.randn(3) 65 | norm1 = np.linalg.norm(cyldir_1, 2) 66 | cyldir_1 = cyldir_1 / norm1 # get unit vector 67 | if cyldir_1[2] < 0: 68 | # Force half-sphere with positive z 69 | cyldir_1 = -cyldir_1 70 | else: 71 | raise ValueError('dir1 should be either fixed or random') 72 | 73 | # cyldir2 - Enforce min crossing angle 74 | cyldir_2 = cyldir_1.copy() 75 | while np.abs(np.dot(cyldir_1, cyldir_2)) > np.cos(crossangle_min_rad): 76 | norm2 = -1 77 | while norm2 <= 0: 78 | cyldir_2 = np.random.randn(3) 79 | norm2 = np.linalg.norm(cyldir_2, 2) 80 | cyldir_2 = cyldir_2 / norm2 81 | if cyldir_2[2] < 0: 82 | # Force half-sphere with positive z 83 | cyldir_2 = - cyldir_2 84 | crossang = np.arccos(np.abs(np.dot(cyldir_1, cyldir_2))) * 180 / np.pi 85 | 86 | return cyldir_1, cyldir_2, crossang 87 | 88 | def generateStandardSet(self, num_samples, run_id=0, SNR_min=20, SNR_max=100, 89 | SNR_dist='uniform', nu_min=0.15, nu_csf_max = 0, 90 | crossangle_min=30, nu1_dominant=True, random_seed=None): 91 | np.random.seed(random_seed) 92 | nus1 = [] 93 | nuscsf = [] 94 | SNRs = [] 95 | nu_max = 1 - nu_min 96 | for i in range(num_samples): 97 | if nu1_dominant: 98 | nus1.append(1 - (nu_min + (nu_max - nu_min) / 2 * np.random.rand())) 99 | else: 100 | nus1.append((nu_min + (nu_max - nu_min) * np.random.rand())) 101 | if(self.include_csf): 102 | nuscsf.append(nu_csf_max * np.random.rand()) 103 | 104 | if SNR_dist == 'triangular': 105 | SNR = np.random.triangular(SNR_min, SNR_min, SNR_max, 1) 106 | elif SNR_dist == 'uniform': 107 | SNR = np.random.uniform(SNR_min, SNR_max, 1) 108 | else: 109 | raise ValueError("Unknown SNR distribution %s" % SNR_dist) 110 | SNRs.append(SNR) 111 | 112 | data_dic = self.__generator(num_samples, nus1, nuscsf, SNRs, 113 | crossangle_min, random_seed = random_seed,) 114 | 115 | data_dic["parameters"]["type"] = "standard" 116 | data_dic["parameters"]["run_id"] = run_id 117 | 118 | data_dic["parameters"]["SNR_dist"] = SNR_dist 119 | data_dic["parameters"]["SNR_min"] = SNR_min 120 | data_dic["parameters"]["SNR_max"] = SNR_max 121 | data_dic["parameters"]["nu_min"] = nu_min 122 | data_dic["parameters"]["nu_max"] = 1-nu_min 123 | data_dic["parameters"]["nu1_dominant"] = nu1_dominant 124 | data_dic["parameters"]["random_seed"] = random_seed 125 | data_dic['parameters']['nu_csf_max'] = nu_csf_max 126 | data_dic['parameters']['include_csf'] = self.include_csf 127 | 128 | return SynthetizerFit(data_dic) 129 | 130 | def generateStructuredSet(self, nu1_values=[0.5, 0.6, 0.7, 0.8, 0.9], 131 | nucsf_values = np.linspace(0.05,0.6,8), 132 | include_csf = False, 133 | SNR_values=[30, 50, 100], 134 | repetition=500, 135 | run_id=0, 136 | crossangle_min=30, 137 | random_seed=None): 138 | 139 | if(not(include_csf == (len(nucsf_values)>0))): 140 | raise ValueError('include_csf is {0} but len(nucsf_values) is {1}.'.format(include_csf, len(nucsf_values))) 141 | np.random.seed(random_seed) 142 | num_samples = 0 143 | SNRs = [] 144 | nus1 = [] 145 | nuscsf = [] 146 | if(self.include_csf): 147 | for nu1 in nu1_values: 148 | for nucsf in nucsf_values: 149 | for SNR in SNR_values: 150 | for k in range(repetition): 151 | num_samples += 1 152 | nus1.append(nu1) 153 | SNRs.append(SNR) 154 | nuscsf.append(nucsf) 155 | else: 156 | for nu1 in nu1_values: 157 | for SNR in SNR_values: 158 | for k in range(repetition): 159 | num_samples += 1 160 | nus1.append(nu1) 161 | SNRs.append(SNR) 162 | 163 | 164 | data_dic = self.__generator(num_samples, nus1, nuscsf, SNRs, crossangle_min, random_seed = random_seed) 165 | 166 | data_dic["parameters"]["type"] = "structured" 167 | data_dic["parameters"]["run_id"] = run_id 168 | 169 | data_dic["parameters"]["nu1_values"] = nu1_values 170 | if(len(nucsf_values)>0): 171 | data_dic["parameters"]["nucsf_values"] = nucsf_values.tolist() 172 | data_dic["parameters"]["include_csf"] = include_csf 173 | data_dic["parameters"]["SNR_values"] = SNR_values 174 | data_dic["parameters"]["repetition"] = repetition 175 | data_dic["parameters"]["random_seed"] = random_seed 176 | 177 | data_dic["parameters"]["nu_min"] = min(1-np.max(nu1_values), np.min(nu1_values)) 178 | data_dic["parameters"]["nu_max"] = 1 - data_dic["parameters"]["nu_min"] 179 | 180 | return SynthetizerFit(data_dic) 181 | 182 | def __generator(self, num_samples, 183 | nus1, nuscsf, 184 | SNRs, crossangle_min, 185 | random_seed = None): 186 | np.random.seed(random_seed) 187 | assert num_samples == len(nus1), "num_samples should be equal to the length of nus1" 188 | assert num_samples == len(SNRs), "num_samples should be equal to the length of SNRs" 189 | assert (num_samples == len(nuscsf)) or (0 == len(nuscsf)), "the number of nus for csf should be equal to 0 or num_samples" 190 | 191 | M0 = 500 #Only used if not(M0_random) 192 | num_coils = 1 193 | dir1_type = 'random' 194 | S0_max = np.max(self.MF_dict["S0_fasc"]) 195 | 196 | # Prepare output arrays 197 | IDs = np.zeros((num_samples, self.num_fasc), dtype=np.int32) 198 | nus = np.zeros((num_samples, self.num_fasc)) 199 | orientations = np.zeros((num_samples, self.num_fasc, 3)) 200 | crossangles = np.zeros((num_samples)) 201 | M0s = np.zeros(num_samples) 202 | 203 | DWI_image_store = np.zeros((self.num_mris, num_samples)) 204 | DWI_noisy_store = np.zeros((self.num_mris, num_samples)) 205 | 206 | if(len(nuscsf)>0): 207 | sig_csf = self.MF_dict['sig_csf'] 208 | T2_csf = self.MF_dict['T2_csf'] 209 | TE = self.TE 210 | 211 | print('Generating Voxels...') 212 | for i in tqdm(range(num_samples)): 213 | 214 | # 1. Generate random fasciles properties based on dictionary. 215 | 216 | nu1 = nus1[i] 217 | nu2 = 1 - nu1 218 | 219 | 220 | fasc_dir_1, fasc_dir_2, crossangle = self.__generateRandomDirections(crossangle_min, dir1=dir1_type) 221 | 222 | 223 | ID_1 = np.random.randint(0, self.MF_dict["num_atom"]) 224 | ID_2 = np.random.randint(0, self.MF_dict["num_atom"]) 225 | 226 | # 2. Generate noise less raw signal 227 | 228 | sig_fasc1 = mfu.interp_PGSE_from_multishell(self.scheme, ordir=self.MF_dict['orientation'], newdir=fasc_dir_1, sig_ms=self.MF_dict["dictionary"][:, ID_1], sch_mat_ms=self.MF_dict["sch_mat"]) 229 | sig_fasc2 = mfu.interp_PGSE_from_multishell(self.scheme, ordir=self.MF_dict['orientation'], newdir=fasc_dir_2, sig_ms=self.MF_dict["dictionary"][:, ID_2], sch_mat_ms=self.MF_dict["sch_mat"]) 230 | 231 | if(len(nuscsf) == 0): 232 | DWI_image = nu1 * sig_fasc1 + nu2 * sig_fasc2 233 | DWI_image_store[:, i] = DWI_image 234 | else: 235 | nucsf = nuscsf[i] 236 | DWI_image = (1-nucsf) * (nu1 * sig_fasc1 + nu2 * sig_fasc2) + nucsf * sig_csf #np.exp(-self.TE/T2_csf) * np.exp(-self.MF_dict['bvals'] * sig_csf) 237 | DWI_image_store[:, i] = DWI_image 238 | 239 | # 3. Add noise to raw signal 240 | 241 | # M0 random 242 | if(self.M0_random): 243 | M0 = float(np.random.randint(500, 5000)) 244 | 245 | sigma_g = S0_max / SNRs[i] 246 | DWI_image_noisy = mfu.gen_SoS_MRI(DWI_image, sigma_g, num_coils) 247 | DWI_noisy_store[:, i] = M0*DWI_image_noisy 248 | 249 | # Store remaining parameters 250 | IDs[i, :] = np.array([ID_1, ID_2]) 251 | nus[i, :] = np.array([nu1, nu2]) 252 | M0s[i] = M0 253 | orientations[i, 0, :] = fasc_dir_1 254 | orientations[i, 1, :] = fasc_dir_2 255 | crossangles[i] = crossangle 256 | 257 | # Create dictionary containing all the information 258 | 259 | DWI_dict = {'DWI_image_store': DWI_image_store, 260 | 'DWI_noisy_store': DWI_noisy_store, 261 | 'M0s': M0s, 262 | 'IDs': IDs, 263 | 'nus': nus, 264 | 'nuscsf' : np.array(nuscsf), #Storing the csf volume fractions 265 | 'orientations': orientations, 266 | 'SNRs': SNRs, 267 | 'crossangles': crossangles, 268 | 'parameters': { 269 | 'task_name': self.task_name, 270 | 'M0_random': self.M0_random, 271 | 'dir1_type': dir1_type, 272 | 'crossangle_min': crossangle_min, 273 | 'num_samples': num_samples, 274 | 'num_coils': num_coils, 275 | 'num_fasc': self.num_fasc, 276 | 'scheme': self.scheme, 277 | 'MF_dict': self.MF_dict, 278 | }, 279 | } 280 | 281 | return DWI_dict 282 | 283 | 284 | class SynthetizerFit: 285 | 286 | def __init__(self, data_dic): 287 | self.data_dic = data_dic 288 | 289 | def save(self, basepath, force_overwrite = False): 290 | type_ = self.data_dic['parameters']['type'] 291 | task_name = self.data_dic['parameters']['task_name'] 292 | run_id = self.data_dic['parameters']['run_id'] 293 | 294 | output_path = os.path.join(basepath, "synthetizer", f"type-{type_}", "raw") 295 | # Create folder if it does not exist 296 | if not os.path.exists(output_path): 297 | os.makedirs(output_path) 298 | 299 | filename = f"type-{type_}_task-{task_name}_run-{run_id}_raw" 300 | 301 | if(not(force_overwrite) and os.path.exists(os.path.join(output_path, filename + ".pickle"))): 302 | raise ValueError('An identical synthetizer already exists. If you want to overwrite it, specify force_overwrite = True') 303 | with open(os.path.join(output_path, filename + ".pickle"), 'wb') as handle: 304 | pickle.dump(self.data_dic, handle, protocol=pickle.HIGHEST_PROTOCOL) 305 | 306 | metadata = self.data_dic['parameters'].copy() 307 | 308 | del metadata['MF_dict'] 309 | del metadata['scheme'] 310 | 311 | with open(os.path.join(output_path,filename+'.json'), 'w') as fp: 312 | json.dump(metadata, fp, indent=4) 313 | 314 | def saveQCPlot(self, basepath): 315 | type_ = self.data_dic['parameters']['type'] 316 | task_name = self.data_dic['parameters']['task_name'] 317 | run_id = self.data_dic['parameters']['run_id'] 318 | 319 | output_path = os.path.join(basepath, "synthetizer", f"type-{type_}","raw") 320 | # Create folder if it does not exist 321 | if not os.path.exists(output_path): 322 | os.makedirs(output_path) 323 | 324 | filename = f"type-{type_}_task-{task_name}_run-{run_id}_raw.png" 325 | 326 | self.__plotQC(os.path.join(output_path,filename)) 327 | 328 | def __plotQC(self, filename): 329 | 330 | fig, axs = plt.subplots(2, 2, figsize=(20, 20)) 331 | fig.suptitle('Quality control', fontsize=20) 332 | 333 | # Plot SNR distribution 334 | axs[0, 0].hist(self.data_dic['SNRs'], bins=20, density=True) 335 | axs[0, 0].set_title('SNR distribution', fontsize=16) 336 | axs[0, 0].set_xlabel('SNR', fontsize=16) 337 | axs[0, 0].set_ylabel('Density', fontsize=16) 338 | 339 | # Plot M0 distribution 340 | axs[0, 1].hist(self.data_dic['M0s'], bins=20, density=True) 341 | axs[0, 1].set_title('M0 distribution', fontsize=16) 342 | axs[0, 1].set_xlabel('M0', fontsize=16) 343 | axs[0, 1].set_ylabel('Density', fontsize=16) 344 | 345 | # Plot nu1 distribution 346 | axs[1, 0].hist(self.data_dic['nus'][:, 0], bins=20, density=True) 347 | axs[1, 0].set_title('nu1 distribution', fontsize=16) 348 | axs[1, 0].set_xlabel('nu1', fontsize=16) 349 | axs[1, 0].set_ylabel('Density', fontsize=16) 350 | 351 | # Plot crossing angle distribution 352 | axs[1, 1].hist(self.data_dic['crossangles'], bins=20, density=True) 353 | axs[1, 1].set_title('Crossing angle distribution', fontsize=16) 354 | axs[1, 1].set_xlabel('Crossing angle', fontsize=16) 355 | axs[1, 1].set_ylabel('Density', fontsize=16) 356 | 357 | plt.savefig(filename) 358 | plt.close() 359 | -------------------------------------------------------------------------------- /fastmf/inference/__init__.py: -------------------------------------------------------------------------------- 1 | from . import hybrid_inference 2 | from . import fullylearned_inference 3 | 4 | 5 | FullyLearned_Model = fullylearned_inference.FullyLearned_Model 6 | Hybrid_Model = hybrid_inference.Hybrid_Model 7 | -------------------------------------------------------------------------------- /fastmf/models/MLP_FullyLearned.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import torch 3 | import time 4 | import numpy as np 5 | import os 6 | from tqdm import tqdm 7 | import torch.optim as optim 8 | import fastmf.utils.NN_utils as nnu 9 | 10 | 11 | #%% Utility d2 mae score 12 | def D2score(pred,target): 13 | gt_medians = torch.median(target, axis = 0)[0] 14 | num = torch.mean(torch.abs(pred -target)) 15 | den = torch.mean(torch.abs(target - gt_medians.unsqueeze(0))) 16 | return 1 - num/den 17 | 18 | #%% MLP class 19 | class Network(torch.nn.Module): 20 | def __init__(self, architecture): 21 | super(Network, self).__init__() 22 | self.architecture = architecture 23 | self.Layers = torch.nn.Sequential() 24 | 25 | 26 | for i,module_data in enumerate(architecture): #Split layers 27 | name = module_data[0].split('-') 28 | if(name[0]=='FCL'): 29 | 30 | self.Layers.add_module('Layer_0_{0}'.format(i), 31 | torch.nn.Linear(module_data[1], 32 | module_data[2]) 33 | ) 34 | 35 | elif(name[0] == 'Dropout'): 36 | self.Layers.add_module('Dropout_0_{0}'.format(i), 37 | torch.nn.Dropout(p=module_data[1])) 38 | 39 | elif(name[0] == 'Batchnorm'): 40 | self.Layers.add_module('Batchnorm_0_{0}'.format(i), 41 | torch.nn.Batchnorm1d(module_data[1])) 42 | 43 | elif(name[0] == 'Activation'): 44 | nnu.add_activation(self.Layers, name[1], str_name = 'Activation_0_'+str(i)) 45 | 46 | 47 | 48 | def forward(self, x): 49 | for i,module in enumerate(self.Layers): 50 | x = module(x) 51 | 52 | return x 53 | 54 | #%% Data loading function 55 | def DataLoader(base_path, 56 | task_name, session_id, type_set, 57 | input_normalization, 58 | target_normalization): 59 | db_in = os.path.join(base_path, 'formatter', 'type-{0}'.format(type_set), 'SH') 60 | base = "type-{0}_task-{1}_ses-{2}_normalization".format(type_set, task_name, session_id) 61 | 62 | train_input_name = base + '-{0}_set-training_SH.npy'.format(input_normalization) 63 | validation_input_name = base + '-{0}_set-validation_SH.npy'.format(input_normalization) 64 | test_input_name = base + '-{0}_set-testing_SH.npy'.format(input_normalization) 65 | 66 | train_input_path = os.path.join(db_in,train_input_name) 67 | validation_input_path = os.path.join(db_in,validation_input_name) 68 | test_input_path = os.path.join(db_in,test_input_name) 69 | 70 | 71 | base = "type-{0}_task-{1}_ses-{2}_scaler".format(type_set, task_name, session_id) 72 | train_target_name = base + '-{0}_set-training_target.npy'.format(target_normalization) 73 | validation_target_name = base + '-{0}_set-validation_target.npy'.format(target_normalization) 74 | test_target_name = base + '-{0}_set-testing_target.npy'.format(target_normalization) 75 | 76 | train_target_path = os.path.join(db_in,train_target_name) 77 | validation_target_path = os.path.join(db_in,validation_target_name) 78 | test_target_path = os.path.join(db_in,test_target_name) 79 | 80 | 81 | with open(train_input_path, 'rb') as file: 82 | in_train = np.load(file).astype('float32') 83 | 84 | with open(validation_input_path, 'rb') as file: 85 | in_valid = np.load(file).astype('float32') 86 | 87 | with open(test_input_path, 'rb') as file: 88 | in_test = np.load(file).astype('float32') 89 | 90 | with open(train_target_path, 'rb') as file: 91 | target_train = np.load(file).astype('float32') 92 | 93 | with open(validation_target_path, 'rb') as file: 94 | target_valid = np.load(file).astype('float32') 95 | 96 | with open(test_target_path, 'rb') as file: 97 | target_test = np.load(file).astype('float32') 98 | 99 | 100 | 101 | return in_train,in_valid,in_test, target_train,target_valid,target_test 102 | #%% Training function 103 | 104 | def Train(model, batch_size, num_epochs, learning_rate, 105 | in_train, in_valid, 106 | target_train,target_valid, 107 | device = 'cpu', 108 | full_train_on_gpu = False, 109 | valid_on_gpu = False, 110 | bavard = 0, 111 | random_seed = 10, 112 | loss_function = None, 113 | metric_function = None, swapping = False): 114 | 115 | max_grad = 0 #Maximum gradient values (over all parameters) : to monitor training 116 | torch.manual_seed(random_seed) 117 | num_train_samples = in_train.shape[0] #Number of samples in the training set 118 | 119 | get_slice = lambda i, size: range(i * size, (i + 1) * size) #Will be used to get the different batches 120 | 121 | optimizer = optim.Adam(model.parameters(), lr=learning_rate) 122 | if(loss_function is None): 123 | print('WARNING : Using default (L1Loss) loss function') 124 | loss_function = torch.nn.L1Loss(reduction = 'mean') 125 | #For 'accuracy' evaluation 126 | #Should be different than loss_fn ideally, 127 | #and can be used to quickly determine the comparative performances of different loss functions 128 | if(metric_function is None): 129 | metric_function = torch.nn.L1Loss(reduction='mean') 130 | 131 | if(device == 'cuda'): 132 | model = model.to(device) 133 | 134 | 135 | print('----------------------- Training --------------------------', flush=True) 136 | start = time.time() 137 | 138 | num_batches_train = num_train_samples // batch_size #Number of training batches 139 | train_losses = [] #Will store the loss on the training set for each epoch 140 | val_losses = [] #Will store the loss on the validation set for each epoch 141 | val_acc = [] #Will store the 'accuracy' on the validation set for each epoch 142 | 143 | if(device == 'cuda'): 144 | torch.cuda.reset_peak_memory_stats(device) 145 | if(full_train_on_gpu): 146 | in_train = in_train.detach().to(device) 147 | target_train = target_train.detach().to(device) 148 | for epoch in tqdm(range(num_epochs)): 149 | #print('\n************ Epoch {0} ************'.format(epoch)) 150 | if(bavard > 0): 151 | print('memory :', torch.cuda.memory_allocated()/1024/1024) 152 | for i in range(num_batches_train): 153 | optimizer.zero_grad() #Reset gradients 154 | batch = get_slice(i,batch_size) #Create indexes of current batch 155 | 156 | if (full_train_on_gpu or device == 'cpu'): 157 | output = model.forward(in_train[batch, :]) # Forward pass 158 | if swapping: 159 | target_train_swapped = target_train[batch, :] 160 | else: 161 | output = model.forward(in_train[batch, :].detach().to(device)) # Forward pass 162 | if swapping: 163 | target_train_swapped = target_train[batch, :].detach().to(device) 164 | 165 | if swapping: 166 | swap = torch.mean(torch.abs(output[:, 0:6] - target_train_swapped[:, 0:6]) + 167 | torch.abs(output[:, 6:] - target_train_swapped[:, 6:]), dim=1) > \ 168 | torch.mean(torch.abs(output[:, 0:6] - target_train_swapped[:, 6:]) + 169 | torch.abs(output[:, 6:] - target_train_swapped[:, 0:6]), dim=1) 170 | target_train_swapped[swap, :] = target_train_swapped[swap, :][:, (6, 7, 8, 9, 10, 11, 0, 1, 2, 3, 4, 5)] 171 | 172 | batch_loss = loss_function(output, target_train_swapped) # Calculate loss 173 | else: 174 | if (full_train_on_gpu or device == 'cpu'): 175 | batch_loss = loss_function(output, target_train[batch, :]) 176 | else: 177 | batch_loss = loss_function(output, target_train[batch, :].detach().to(device)) 178 | 179 | batch_loss.backward() #Calculate gradients 180 | optimizer.step() #Update based on gradients 181 | param = [x for x in model.parameters()] 182 | for p in param: 183 | if(torch.any(torch.isnan(p))): 184 | print('Nan value encountered in model parameters') 185 | 186 | #Get maximum gradient values to monitor training 187 | if(bavard >5): 188 | full_grads = [x.grad for x in model.parameters()] 189 | maxi = 0 190 | min_grad = 1e12 191 | for grad in full_grads: 192 | if(not(grad is None)): 193 | maxi = torch.max(torch.abs(grad)) 194 | mini = torch.min(torch.abs(grad)) 195 | max_grad = max(max_grad,maxi) 196 | min_grad = min(min_grad,mini) 197 | print('Epoch : {0} / Batch : {1} / Max grad so far : {2}'.format(epoch,i,max_grad)) 198 | print('Epoch : {0} / Batch : {1} / Min grad so far : {2}'.format(epoch,i,min_grad)) 199 | 200 | del output 201 | del batch 202 | 203 | if(i0): 214 | print('Train loss at end of the epoch : ', batch_loss ) 215 | del batch_loss 216 | if(device == 'cuda'): 217 | del batch_loss_cpu 218 | torch.cuda.empty_cache() 219 | #Evaluate on validation set 220 | if(device == 'cuda'): 221 | if(valid_on_gpu): 222 | if(target_valid.shape[0]<10000): 223 | validation_output = model.forward(in_valid.detach().to(device), 224 | ) #Prediction on validation set 225 | else: 226 | validation_output = torch.zeros(target_valid.shape, device = 'cuda', dtype = torch.float32) 227 | idx_start = 0 228 | while(idx_start0): 257 | print('Validation loss at end of the epoch : ', batch_loss_valid) 258 | if(bavard >1): 259 | print('memory before deletions :', torch.cuda.memory_allocated()/1024/1024) 260 | 261 | 262 | del batch_loss_valid 263 | del validation_output 264 | del batch_acc_valid 265 | if(device == 'cuda'): 266 | if(bavard>0): 267 | print('\n\nMax GPU memory used during epoch : ', torch.cuda.max_memory_allocated(device)/1024/1024) 268 | if(valid_on_gpu): 269 | del batch_loss_valid_cpu 270 | del batch_acc_valid_cpu 271 | torch.cuda.empty_cache() 272 | if(bavard>1): 273 | print('memory after deletions :', torch.cuda.memory_allocated()/1024/1024) 274 | 275 | 276 | end = time.time() 277 | total_time = end-start 278 | out = {"train_losses": train_losses, 279 | "validation_losses": val_losses, 280 | "validation_accuracy" : val_acc, 281 | "model": model, 282 | "total_time": total_time, 283 | "optimizer": optimizer, 284 | } 285 | if(device == 'cuda'): 286 | torch.cuda.empty_cache() 287 | return out 288 | 289 | -------------------------------------------------------------------------------- /fastmf/models/MLP_Split.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import torch 3 | import time 4 | from tqdm import tqdm 5 | import torch.optim as optim 6 | import fastmf.utils.NN_utils as nnu 7 | import os 8 | import numpy as np 9 | 10 | 11 | #%% Utility d2 mae score 12 | def D2score(pred,target): 13 | gt_medians = torch.median(target, axis = 0)[0] 14 | num = torch.mean(torch.abs(pred - target)) 15 | den = torch.mean(torch.abs(target - gt_medians.unsqueeze(0))) 16 | return 1 - num/den 17 | 18 | #%% Split MLP class 19 | class Network(torch.nn.Module): 20 | def __init__(self, split_architecture, final_architecture): 21 | super(Network, self).__init__() 22 | self.split_architecture = split_architecture 23 | self.final_architecture = final_architecture 24 | self.Split_Layers = torch.nn.ModuleList([torch.nn.Sequential(), torch.nn.Sequential()]) 25 | self.Final_Layers = torch.nn.Sequential() 26 | 27 | for i,module_data in enumerate(split_architecture): #Split layers 28 | name = module_data[0].split('-') 29 | if(name[0]=='FCL'): 30 | 31 | self.Split_Layers[0].add_module('Layer_0_{0}'.format(i), 32 | torch.nn.Linear(module_data[1], 33 | module_data[2]) 34 | ) 35 | self.Split_Layers[1].add_module('Layer_1_{0}'.format(i), 36 | torch.nn.Linear(module_data[1], 37 | module_data[2]) 38 | ) 39 | elif(name[0] == 'Dropout'): 40 | self.Split_Layers[0].add_module('Dropout_0_{0}'.format(i), 41 | torch.nn.Dropout(p=module_data[1])) 42 | self.Split_Layers[1].add_module('Dropout_1_{0}'.format(i), 43 | torch.nn.Dropout(p=module_data[1])) 44 | elif(name[0] == 'Batchnorm'): 45 | self.Split_Layers[0].add_module('Batchnorm_0_{0}'.format(i), 46 | torch.nn.Batchnorm1d(module_data[1])) 47 | self.Split_Layers[1].add_module('Batchnorm_1_{0}'.format(i), 48 | torch.nn.Batchnorm1d(module_data[1])) 49 | elif(name[0] == 'Activation'): 50 | nnu.add_activation(self.Split_Layers[0], name[1], str_name = 'Activation_0_'+str(i)) 51 | nnu.add_activation(self.Split_Layers[1], name[1], str_name = 'Activation_1_'+str(i)) 52 | 53 | for i,module_data in enumerate(final_architecture): #Final Layers 54 | name = module_data[0].split('-') 55 | if(name[0]=='FCL'): 56 | 57 | self.Final_Layers.add_module('Layer_0_{0}'.format(i), 58 | torch.nn.Linear(module_data[1], 59 | module_data[2]) 60 | ) 61 | 62 | 63 | elif(name[0] == 'Dropout'): 64 | self.Final_Layers.add_module('Dropout_0_{0}'.format(i), 65 | torch.nn.Dropout(p=module_data[1])) 66 | 67 | elif(name[0] == 'Activation'): 68 | nnu.add_activation(self.Final_Layers, name[1], str_name = 'Activation_0_'+str(i)) 69 | 70 | def forward(self, x): 71 | num_atoms = self.split_architecture[0][1] # ['FCL', num_atoms, ?????] 72 | x_fasc1 = x[:,0:num_atoms] 73 | x_fasc2 = x[:,num_atoms:2*num_atoms] 74 | if(x.shape[1]==2*num_atoms+1): 75 | x_csf = x[:,-1:] 76 | for i in range(len(self.Split_Layers[0])): 77 | x_fasc1 = self.Split_Layers[0][i](x_fasc1) 78 | x_fasc2 = self.Split_Layers[0][i](x_fasc2) 79 | x = torch.concatenate((x_fasc1,x_fasc2), dim = 1) 80 | for i,module in enumerate(self.Final_Layers): 81 | x = module(x) 82 | 83 | return x 84 | 85 | #%% Data loading function 86 | def DataLoader(base_path, 87 | task_name, session_id, type_set, 88 | input_normalization, 89 | target_normalization, orientation_estimate): 90 | db_in = os.path.join(base_path, 'formatter', 'type-standard', 'NNLS') 91 | base = "type-{0}_task-{1}_ses-{2}_orientation-{3}_normalization".format(type_set, task_name, session_id, orientation_estimate) 92 | 93 | train_input_name = base + '-{0}_set-training_NNLS.npy'.format(input_normalization) 94 | validation_input_name = base + '-{0}_set-validation_NNLS.npy'.format(input_normalization) 95 | test_input_name = base + '-{0}_set-testing_NNLS.npy'.format(input_normalization) 96 | 97 | train_input_path = os.path.join(db_in,train_input_name) 98 | validation_input_path = os.path.join(db_in,validation_input_name) 99 | test_input_path = os.path.join(db_in,test_input_name) 100 | 101 | 102 | base = "type-{0}_task-{1}_ses-{2}_orientation-{3}_scaler".format(type_set, task_name, session_id, orientation_estimate) 103 | train_target_name = base + '-{0}_set-training_target.npy'.format(target_normalization) 104 | validation_target_name = base + '-{0}_set-validation_target.npy'.format(target_normalization) 105 | test_target_name = base + '-{0}_set-testing_target.npy'.format(target_normalization) 106 | 107 | train_target_path = os.path.join(db_in,train_target_name) 108 | validation_target_path = os.path.join(db_in,validation_target_name) 109 | test_target_path = os.path.join(db_in,test_target_name) 110 | 111 | 112 | with open(train_input_path, 'rb') as file: 113 | in_train = np.load(file).astype('float32') 114 | 115 | with open(validation_input_path, 'rb') as file: 116 | in_valid = np.load(file).astype('float32') 117 | 118 | with open(test_input_path, 'rb') as file: 119 | in_test = np.load(file).astype('float32') 120 | 121 | with open(train_target_path, 'rb') as file: 122 | target_train = np.load(file).astype('float32') 123 | 124 | with open(validation_target_path, 'rb') as file: 125 | target_valid = np.load(file).astype('float32') 126 | 127 | with open(test_target_path, 'rb') as file: 128 | target_test = np.load(file).astype('float32') 129 | 130 | 131 | 132 | return in_train,in_valid,in_test, target_train,target_valid,target_test 133 | 134 | 135 | 136 | #%% Training function 137 | #%% Training function 138 | 139 | def Train(model, batch_size, num_epochs, learning_rate, 140 | in_train, in_valid, 141 | target_train,target_valid, 142 | device = 'cpu', 143 | full_train_on_gpu = False, 144 | valid_on_gpu = False, 145 | bavard = 0, 146 | random_seed = 10, 147 | loss_function = None, 148 | metric_function = None): 149 | 150 | max_grad = 0 #Maximum gradient values (over all parameters) : to monitor training 151 | torch.manual_seed(random_seed) 152 | num_train_samples = in_train.shape[0] #Number of samples in the training set 153 | 154 | get_slice = lambda i, size: range(i * size, (i + 1) * size) #Will be used to get the different batches 155 | 156 | optimizer = optim.Adam(model.parameters(), lr=learning_rate) 157 | if(loss_function is None): 158 | print('WARNING : Using default (L1Loss) loss function') 159 | loss_function = torch.nn.L1Loss(reduction = 'mean') 160 | #For 'accuracy' evaluation 161 | #Should be different than loss_fn ideally, 162 | #and can be used to quickly determine the comparative performances of different loss functions 163 | if(metric_function is None): 164 | metric_function = torch.nn.L1Loss(reduction='mean') 165 | 166 | if(device == 'cuda'): 167 | model = model.to(device) 168 | 169 | 170 | print('----------------------- Training --------------------------', flush=True) 171 | start = time.time() 172 | 173 | num_batches_train = num_train_samples // batch_size #Number of training batches 174 | train_losses = [] #Will store the loss on the training set for each epoch 175 | val_losses = [] #Will store the loss on the validation set for each epoch 176 | val_acc = [] #Will store the 'accuracy' on the validation set for each epoch 177 | 178 | if(device == 'cuda'): 179 | torch.cuda.reset_peak_memory_stats(device) 180 | if(full_train_on_gpu): 181 | in_train = in_train.detach().to(device) 182 | target_train = target_train.detach().to(device) 183 | for epoch in tqdm(range(num_epochs)): 184 | #print('\n************ Epoch {0} ************'.format(epoch)) 185 | if(bavard > 0): 186 | print('memory :', torch.cuda.memory_allocated()/1024/1024) 187 | for i in range(num_batches_train): 188 | optimizer.zero_grad() #Reset gradients 189 | batch = get_slice(i,batch_size) #Create indexes of current batch 190 | 191 | if(full_train_on_gpu or device == 'cpu'): 192 | output = model.forward(in_train[batch, :]) #Forward pass 193 | batch_loss = loss_function(output, target_train[batch, :])#, batch)#Calculate loss 194 | else: 195 | output = model.forward(in_train[batch, :].detach().to(device)) #Forward pass 196 | batch_loss = loss_function(output, target_train[batch, :].detach().to(device)) #Calculate loss 197 | 198 | batch_loss.backward() #Calculate gradients 199 | optimizer.step() #Update based on gradients 200 | param = [x for x in model.parameters()] 201 | for p in param: 202 | if(torch.any(torch.isnan(p))): 203 | print('Nan value encountered in model parameters') 204 | 205 | #Get maximum gradient values to monitor training 206 | if(bavard >5): 207 | full_grads = [x.grad for x in model.parameters()] 208 | maxi = 0 209 | min_grad = 1e12 210 | for grad in full_grads: 211 | if(not(grad is None)): 212 | maxi = torch.max(torch.abs(grad)) 213 | mini = torch.min(torch.abs(grad)) 214 | max_grad = max(max_grad,maxi) 215 | min_grad = min(min_grad,mini) 216 | print('Epoch : {0} / Batch : {1} / Max grad so far : {2}'.format(epoch,i,max_grad)) 217 | print('Epoch : {0} / Batch : {1} / Min grad so far : {2}'.format(epoch,i,min_grad)) 218 | 219 | del output 220 | del batch 221 | 222 | if(i0): 233 | print('Train loss at end of the epoch : ', batch_loss ) 234 | del batch_loss 235 | if(device == 'cuda'): 236 | del batch_loss_cpu 237 | torch.cuda.empty_cache() 238 | #Evaluate on validation set 239 | if(device == 'cuda'): 240 | if(valid_on_gpu): 241 | if(target_valid.shape[0]<10000): 242 | validation_output = model.forward(in_valid.detach().to(device), 243 | ) #Prediction on validation set 244 | else: 245 | validation_output = torch.zeros(target_valid.shape, device = 'cuda', dtype = torch.float32) 246 | idx_start = 0 247 | while(idx_start0): 276 | print('Validation loss at end of the epoch : ', batch_loss_valid) 277 | if(bavard >1): 278 | print('memory before deletions :', torch.cuda.memory_allocated()/1024/1024) 279 | 280 | 281 | del batch_loss_valid 282 | del validation_output 283 | del batch_acc_valid 284 | if(device == 'cuda'): 285 | if(bavard>0): 286 | print('\n\nMax GPU memory used during epoch : ', torch.cuda.max_memory_allocated(device)/1024/1024) 287 | if(valid_on_gpu): 288 | del batch_loss_valid_cpu 289 | del batch_acc_valid_cpu 290 | torch.cuda.empty_cache() 291 | if(bavard>1): 292 | print('memory after deletions :', torch.cuda.memory_allocated()/1024/1024) 293 | 294 | 295 | end = time.time() 296 | total_time = end-start 297 | out = {"train_losses": train_losses, 298 | "validation_losses": val_losses, 299 | "validation_accuracy" : val_acc, 300 | "model": model, 301 | "total_time": total_time, 302 | "optimizer": optimizer, 303 | } 304 | if(device == 'cuda'): 305 | torch.cuda.empty_cache() 306 | return out -------------------------------------------------------------------------------- /fastmf/models/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Hyedryn/FastMF_public/cb0775bff53f320ccb0a6a3bc507c924a9f30868/fastmf/models/__init__.py -------------------------------------------------------------------------------- /fastmf/reports/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Hyedryn/FastMF_public/cb0775bff53f320ccb0a6a3bc507c924a9f30868/fastmf/reports/__init__.py -------------------------------------------------------------------------------- /fastmf/utils/NN_utils.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | import torch 3 | 4 | 5 | #%% Utility function : add_activation 6 | def add_activation(layers, activation_name, str_name = 'Activation'): 7 | if(activation_name == 'LeakyReLU'): 8 | layers.add_module(str_name,torch.nn.LeakyReLU()) 9 | elif(activation_name == 'ReLU'): 10 | layers.add_module(str_name,torch.nn.ReLU()) 11 | elif(activation_name == 'SELU'): 12 | layers.add_module(str_name,torch.nn.SELU()) 13 | elif(activation_name == 'ELU'): 14 | layers.add_module(str_name,torch.nn.ELU()) 15 | elif(activation_name == 'SiLU'): 16 | layers.add_module(str_name,torch.nn.SiLU()) 17 | elif(activation_name == 'Mish'): 18 | layers.add_module(str_name,torch.nn.Mish()) 19 | elif(activation_name == 'Softsign'): 20 | layers.add_module(str_name,torch.nn.Softsign()) 21 | elif(activation_name == 'Tanhshrink'): 22 | layers.add_module(str_name,torch.nn.Tanhshrink()) 23 | elif(activation_name == 'Tanh'): 24 | layers.add_module(str_name,torch.nn.Tanh()) 25 | elif(activation_name == 'ELU'): 26 | layers.add_module(str_name,torch.nn.ELU()) 27 | elif(activation_name == 'Sigmoid'): 28 | layers.add_module(str_name,torch.nn.Sigmoid()) 29 | else: 30 | print('Warning : Putting ReLU (default) as activation') 31 | layers.add_module(str_name,torch.nn.ReLU()) 32 | #%% utility function to get the number of trainable parameters of the model 33 | def count_parameters(model): 34 | return sum(p.numel() for p in model.parameters() if p.requires_grad) -------------------------------------------------------------------------------- /fastmf/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Hyedryn/FastMF_public/cb0775bff53f320ccb0a6a3bc507c924a9f30868/fastmf/utils/__init__.py -------------------------------------------------------------------------------- /fastmf/utils/mf_estimator.py: -------------------------------------------------------------------------------- 1 | import os 2 | import time 3 | import tempfile 4 | import numpy as np 5 | import scipy 6 | from itertools import product 7 | import scipy.optimize 8 | from tqdm import tqdm 9 | 10 | # %% objective function 11 | def L(Asmall, y, x): 12 | # y : measurement 13 | # x : w1,w2,sigma 14 | # Asmall : Sub dic 15 | 16 | u = np.sum(x[:Asmall.shape[1]] * Asmall, axis=1) 17 | u = u**2 + x[-1]**2 18 | return np.sum((y - np.sqrt(u))**2) 19 | 20 | def solve_one_comb(Asmall, y): 21 | sigma_g = 1 22 | m, n = Asmall.shape 23 | x0 = np.zeros(n+1) 24 | x0[:-1] = np.ones(n) / n 25 | x0[-1] = sigma_g 26 | resu = scipy.optimize.minimize(lambda x: L(Asmall, y, x), x0=x0, 27 | method = 'Nelder-Mead', 28 | options = {'maxiter': 1000}) 29 | 30 | return resu.x, resu.fun 31 | 32 | def solve_exhaustive_posweights_sigma(A, y, dicsizes, printmsg=None, verbose=False, cMode=True): 33 | """ 34 | Args: 35 | A: 2-D NumPy array. 36 | y: 1-D NumPy array of length A.shape[0]. 37 | dicsizes: 1-D NumPy array containing strictly positive integers 38 | representing the size of each sub-dictionary in A. 39 | Their sum must equal A.shape[1]. 40 | printmsg: str to be printed at the start of the execution of the 41 | function. Useful on computing clusters or in parallel computing. 42 | Default: None. 43 | 44 | Returns: 45 | w_nneg: 1-D NumPy array containing the K non-negative weights assigned 46 | to the one optimal column in each sub-dictionary. To get the full 47 | optimal w_opt do: 48 | w_opt = numpy.zeros(A.shape[1]) 49 | w_opt[ind_atoms_totdic] = w_nneg. 50 | ind_atoms_subdic: 1-D numy array of size K containing the index of the 51 | column selected (having a non-zero weight) within each sub-dictionary 52 | Ak, i.e. ind_atoms_subdic[k] is in [0, dicsizes[k][. 53 | ind_atoms_totdic: 1-D NumPy array of size K containing the indices of 54 | all columns with non-zero weight in A, i.e. ind_atoms_totdic[k] is in 55 | [0, A.shape[1][. 56 | min_obj: floating-point scalar equal to ||Aw_opt-y||_2^2. 57 | y_recons: 1-D NumPy array equal to Aw_opt, i.e. the model prediction. 58 | """ 59 | # Print message (can be useful on computing clusters or in // computing) 60 | if printmsg is not None: 61 | print(printmsg, end="") 62 | 63 | # --- Check inputs --- 64 | # A should be a 2D NumPy array 65 | assert isinstance(A, np.ndarray), "A should be a NumPy ndarray" 66 | assert A.ndim == 2, "A should be a 2D array" 67 | # A should not have zero columns 68 | assert not np.any(np.all(A == 0, axis=0)), "All-zero columns detected in A" 69 | # A should contain floating-point numbers 70 | if A.dtype is not np.float64: 71 | A = A.astype(np.float64) 72 | # y should be a NumPy float64 array 73 | assert isinstance(y, np.ndarray), "y should be a NumPy ndarray" 74 | if y.dtype is not np.float64: 75 | y = y.astype(np.float64) 76 | # Refuse empty data 77 | assert A.size > 0 and y.size > 0, "A and y should not be empty arrays" 78 | # A.shape[0] should match y 79 | msg = ("Number of rows in A (%d) should match number of elements in y (%d)" 80 | % (A.shape[0], y.size)) 81 | assert A.shape[0] == y.size, msg 82 | 83 | # diclengths should be a NumPy int32 array with strictly positive entries 84 | assert isinstance(dicsizes, np.ndarray), ("dicsizes should be a " 85 | "NumPy ndarray") 86 | assert np.all(dicsizes > 0), "All entries of dicsizes should be > 0" 87 | if dicsizes.dtype is not np.int64: 88 | dicsizes = dicsizes.astype(np.int64) 89 | 90 | # Sum of subsizes should match total size of A 91 | msg = ("Number of columns of A (%d) does not equal sum of size of " 92 | "sub-matrices in diclengths array (%d)" 93 | % (A.shape[1], np.sum(dicsizes))) 94 | assert A.shape[1] == np.sum(dicsizes), msg 95 | 96 | 97 | # y is often read-only when passed by multiprocessing functions such as 98 | # multiprocessing.Pool.starmap/map, ipyparallel.Client.map/map_async, etc. 99 | # This made for Numba compilation errors in lsqnonneg_1var, lsqnonneg_2var 100 | if y.flags['WRITEABLE'] is False: 101 | y = y.copy() 102 | y.flags.writeable = True 103 | 104 | N_LSC = dicsizes.size # number of large-scale compartments in voxel 105 | end_ind = np.cumsum(dicsizes) # indices excluded in Python 106 | st_ind = np.zeros(dicsizes.size, dtype=np.int64) 107 | st_ind[1:] = end_ind[:-1] 108 | Nsubprob = np.prod(dicsizes) 109 | 110 | # Compute all the combinations of atoms from each fascicle sub-dictionary 111 | # atom_indices = arrangements(dicsizes) # too much memory used 112 | idx_range = tuple([np.arange(dicsizes[i]) 113 | for i in range(len(dicsizes))]) 114 | 115 | # Prepare output 116 | w_nneg = np.zeros(N_LSC) 117 | ind_atoms_subdic = np.zeros(N_LSC, dtype=np.int64) 118 | y_sq = np.sum(y**2) 119 | min_obj = y_sq 120 | 121 | # Solve all subproblems. Note: do not create list of all index 122 | # combinations because way too memory expensive. Use itertools.product. 123 | if cMode: 124 | tmp = tempfile.mkdtemp() 125 | print("tmp: ", tmp) 126 | # Call a C function then extract results: 127 | np.savetxt(os.path.join(tmp, r'dicA.txt'), A, fmt='%f') 128 | np.savetxt(os.path.join(tmp, r'y.txt'), y, fmt='%f') 129 | 130 | time.sleep(1) 131 | 132 | # Delete 133 | csf = 1 in dicsizes 134 | # System call 135 | if csf and len(dicsizes)==3: 136 | cmd = f'cd {tmp}; neldermead 0.4 0.4 0.2 0.1 {dicsizes[0]} {len(y)}' 137 | elif len(dicsizes)==2: 138 | cmd = f'cd {tmp}; neldermead 0.5 0.5 0.1 {dicsizes[0]} {len(y)}' 139 | else: 140 | raise NotImplementedError(f"Fitting for dicsizes {dicsizes} not implemented."); 141 | #print(cmd) 142 | import subprocess 143 | result = subprocess.check_output(cmd, shell=True) 144 | 145 | # Load results 146 | w_nneg = np.loadtxt(os.path.join(tmp, r'w.txt')) 147 | sigma = w_nneg[-1] 148 | print("sigma: ", sigma) 149 | w_nneg = w_nneg[:-1] 150 | ind_atoms_subdic = np.loadtxt(os.path.join(tmp, r'ind.txt'), dtype=int) 151 | min_obj = np.loadtxt(os.path.join(tmp, r'obj.txt')) 152 | 153 | 154 | #w_nneg = w_nneg / np.sum(w_nneg) 155 | else: 156 | cnt = 0 157 | for idx in tqdm(product(*idx_range),desc="Solving subproblems",total=Nsubprob, mininterval=2, disable=not(verbose)): 158 | cnt += 1 159 | Asmall = A[:, st_ind + idx] 160 | print("Asmall.shape: ", Asmall.shape) 161 | print("stdind: ", st_ind) 162 | print("idx: ", idx) 163 | 164 | w, obj_fun = solve_one_comb(Asmall, y) 165 | 166 | if obj_fun < min_obj: 167 | w_nneg = w 168 | min_obj = obj_fun 169 | ind_atoms_subdic = np.atleast_1d(idx) 170 | 171 | assert Nsubprob == cnt, "Problem with number of subproblems solved" 172 | # absolute index within complete dictionary A 173 | ind_atoms_totdic = st_ind + ind_atoms_subdic 174 | # reconstructed data vector 175 | y_recons = np.dot(A[:, ind_atoms_totdic], w_nneg) 176 | return (w_nneg, ind_atoms_subdic, ind_atoms_totdic, min_obj, y_recons) 177 | 178 | 179 | 180 | 181 | -------------------------------------------------------------------------------- /fastmf/utils/tripwire.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Wed Mar 3 16:49:28 2021 4 | 5 | @author: rensonnetg 6 | """ 7 | 8 | """ Class to raise error for missing modules or other misfortunes 9 | from DIPY code 10 | https://github.com/dipy/dipy/blob/master/dipy/utils/tripwire.py 11 | """ 12 | 13 | 14 | class TripWireError(AttributeError): 15 | """ Exception if trying to use TripWire object """ 16 | 17 | 18 | def is_tripwire(obj): 19 | """ Returns True if `obj` appears to be a TripWire object 20 | Examples 21 | -------- 22 | >>> is_tripwire(object()) 23 | False 24 | >>> is_tripwire(TripWire('some message')) 25 | True 26 | """ 27 | try: 28 | obj.any_attribute 29 | except TripWireError: 30 | return True 31 | except Exception: 32 | pass 33 | return False 34 | 35 | 36 | class TripWire(object): 37 | """ Class raising error if used 38 | Standard use is to proxy modules that we could not import 39 | Examples 40 | -------- 41 | >>> try: 42 | ... import silly_module_name 43 | ... except ImportError: 44 | ... silly_module_name = TripWire('We do not have silly_module_name') 45 | >>> silly_module_name.do_silly_thing('with silly string') #doctest: +IGNORE_EXCEPTION_DETAIL 46 | Traceback (most recent call last): 47 | ... 48 | TripWireError: We do not have silly_module_name 49 | """ 50 | 51 | def __init__(self, msg): 52 | self._msg = msg 53 | 54 | def __getattr__(self, attr_name): 55 | """ Raise informative error accessing attributes """ 56 | raise TripWireError(self._msg) 57 | 58 | def __call__(self, *args, **kwargs): 59 | """ Raise informative error while calling """ 60 | raise TripWireError(self._msg) 61 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "FastMF" 3 | version = "0.1.0" 4 | description = "FastMF is a python package for microstructural property characterization using deep learning based on diffusion MRI data." 5 | authors = ["Quentin Dessain "] 6 | readme = "README.md" 7 | 8 | [tool.poetry.dependencies] 9 | python = ">=3.8,<3.11" 10 | numpy = "^1.22" 11 | dipy = "^1.6" 12 | matplotlib = "^3.5" 13 | cvxpy = "<=1.3.1" 14 | numba = "<=0.57.0" 15 | tqdm = "<=4.48" 16 | pathos = "<=0.2.8" 17 | scipy = "<=1.7.3" 18 | nibabel = "<=5.0" 19 | torch = "<=2.0.0" 20 | scikit-learn = "<=1.0" 21 | 22 | [build-system] 23 | requires = ["poetry-core"] 24 | build-backend = "poetry.core.masonry.api" 25 | --------------------------------------------------------------------------------